{"text":"2019: Day 8, part 2.use std::io;\n\nmod digits;\nmod errors;\n\nconst IMAGE_WIDTH: usize = 25;\nconst IMAGE_HEIGHT: usize = 6;\n\ntype Pixels = Vec;\n\n#[derive(Debug)]\nstruct Image {\n width: usize,\n height: usize,\n pixels: Pixels,\n}\n\nimpl Image {\n fn of_size(width: usize, height: usize) -> Self {\n let size = width * height;\n let mut pixels = Vec::new();\n pixels.resize(size, Color::Transparent);\n Image {\n width,\n height,\n pixels,\n }\n }\n\n fn print(&self) {\n self.pixels.chunks(self.width).for_each(|row| {\n row.iter().for_each(|pixel| pixel.print());\n println!();\n });\n }\n}\n\nstruct Layer<'a>(&'a [Color]);\n\nimpl Layer<'_> {\n fn apply_to(&self, image: Image) -> Image {\n let pixels = self\n .0\n .iter()\n .zip(image.pixels.into_iter())\n .map(|(layer_pixel, image_pixel)| layer_pixel.paint_onto(image_pixel))\n .collect();\n Image { pixels, ..image }\n }\n}\n\n#[derive(Debug, Clone, Copy)]\nenum Color {\n Black,\n White,\n Transparent,\n}\n\nimpl Color {\n fn decode(value: u32) -> Self {\n match value {\n 0 => Self::Black,\n 1 => Self::White,\n 2 => Self::Transparent,\n _ => panic!(\"Invalid color value: {}\", value),\n }\n }\n\n fn paint_onto(&self, other: Self) -> Self {\n match (self, other) {\n (Self::Black, _) => Self::Black,\n (Self::White, _) => Self::White,\n (Self::Transparent, other) => other,\n }\n }\n\n fn print(&self) {\n match self {\n Self::Black => print!(\" \"),\n Self::White => print!(\"✦\"),\n Self::Transparent => panic!(\"Can't print transparent colors!\"),\n }\n }\n}\n\nfn main() -> io::Result<()> {\n let mut input = String::new();\n io::stdin().read_line(&mut input)?;\n\n let image_size = IMAGE_WIDTH * IMAGE_HEIGHT;\n\n let pixels: Pixels = input\n .trim()\n .chars()\n .map(|c| {\n c.to_digit(10)\n .map(Color::decode)\n .ok_or(errors::io(&format!(\"Invalid digit: {}\", c)))\n })\n .collect::>()?;\n\n let image = pixels\n .chunks(image_size)\n .rev()\n .fold(Image::of_size(IMAGE_WIDTH, IMAGE_HEIGHT), |image, layer| {\n Layer(layer).apply_to(image)\n });\n\n image.print();\n\n Ok(())\n}\n<|endoftext|>"} {"text":"use std::collections::HashMap;\nuse std::sync::Arc;\nuse std::fmt::Write;\nuse super::command::InternalCommand;\nuse super::{Command, CommandGroup, CommandOrAlias};\nuse ::client::Context;\nuse ::model::Message;\nuse ::utils::Colour;\n\nfn error_embed(ctx: &mut Context, message: &Message, input: &str) {\n let _ = ctx.send_message(message.channel_id, |m| m\n .embed(|e| e\n .colour(Colour::dark_red())\n .description(input)));\n}\n\nfn remove_aliases(cmds: &HashMap) -> HashMap<&String, &InternalCommand> {\n let mut result = HashMap::new();\n\n for (n, v) in cmds {\n if let CommandOrAlias::Command(ref cmd) = *v {\n result.insert(n, cmd);\n }\n }\n\n result\n}\n\npub fn with_embeds(ctx: &mut Context,\n message: &Message,\n groups: HashMap>,\n args: Vec) -> Result<(), String> {\n if !args.is_empty() {\n let name = args.join(\" \");\n\n for (group_name, group) in groups {\n let mut found: Option<(&String, &InternalCommand)> = None;\n\n for (command_name, command) in &group.commands {\n let with_prefix = if let Some(ref prefix) = group.prefix {\n format!(\"{} {}\", prefix, command_name)\n } else {\n command_name.to_owned()\n };\n\n if name == with_prefix || name == *command_name {\n match *command {\n CommandOrAlias::Command(ref cmd) => {\n found = Some((command_name, cmd));\n },\n CommandOrAlias::Alias(ref name) => {\n error_embed(ctx, message, &format!(\"Did you mean \\\"{}\\\"?\", name));\n return Ok(());\n }\n }\n }\n }\n\n if let Some((command_name, command)) = found {\n if !command.help_available {\n error_embed(ctx, message, \"**Error**: No help available.\");\n\n return Ok(());\n }\n\n let _ = ctx.send_message(message.channel_id, |m| {\n m.embed(|e| {\n let mut embed = e.colour(Colour::rosewater())\n .title(command_name);\n if let Some(ref desc) = command.desc {\n embed = embed.description(desc);\n }\n\n if let Some(ref usage) = command.usage {\n embed = embed.field(|f| f\n .name(\"Usage\")\n .value(&format!(\"`{} {}`\", command_name, usage)));\n }\n\n if let Some(ref example) = command.example {\n embed = embed.field(|f| f\n .name(\"Sample usage\")\n .value(&format!(\"`{} {}`\", command_name, example)));\n }\n\n if group_name != \"Ungrouped\" {\n embed = embed.field(|f| f\n .name(\"Group\")\n .value(&group_name));\n }\n\n let available = if command.dm_only {\n \"Only in DM\"\n } else if command.guild_only {\n \"Only in guilds\"\n } else {\n \"In DM and guilds\"\n };\n\n embed = embed.field(|f| f\n .name(\"Available\")\n .value(available));\n\n embed\n })\n });\n\n return Ok(());\n }\n }\n\n let error_msg = format!(\"**Error**: Command `{}` not found.\", name);\n error_embed(ctx, message, &error_msg);\n\n return Ok(());\n }\n\n let _ = ctx.send_message(message.channel_id, |m| {\n m.embed(|mut e| {\n e = e.colour(Colour::rosewater())\n .description(\"To get help with an individual command, pass its \\\n name as an argument to this command.\");\n\n for (group_name, group) in groups {\n let mut desc = String::new();\n\n if let Some(ref x) = group.prefix {\n let _ = write!(desc, \"Prefix: {}\\n\", x);\n }\n\n desc.push_str(\"Commands:\\n\");\n\n let mut no_commands = true;\n\n for (n, cmd) in remove_aliases(&group.commands) {\n if cmd.help_available {\n let _ = write!(desc, \"`{}`\\n\", n);\n\n no_commands = false;\n }\n }\n\n if no_commands {\n let _ = write!(desc, \"*[No commands]*\");\n }\n\n e = e.field(|f| f.name(&group_name).value(&desc));\n }\n\n e\n })\n });\n\n Ok(())\n}\n\npub fn plain(ctx: &mut Context,\n _: &Message,\n groups: HashMap>,\n args: Vec) -> Result<(), String> {\n if !args.is_empty() {\n let name = args.join(\" \");\n\n for (group_name, group) in groups {\n let mut found: Option<(&String, &Command)> = None;\n\n for (command_name, command) in &group.commands {\n let with_prefix = if let Some(ref prefix) = group.prefix {\n format!(\"{} {}\", prefix, command_name)\n } else {\n command_name.to_owned()\n };\n\n if name == with_prefix || name == *command_name {\n match *command {\n CommandOrAlias::Command(ref cmd) => {\n found = Some((command_name, cmd));\n },\n CommandOrAlias::Alias(ref name) => {\n let _ = ctx.say(&format!(\"Did you mean {:?}?\", name));\n return Ok(());\n }\n }\n }\n }\n\n if let Some((command_name, command)) = found {\n if !command.help_available {\n let _ = ctx.say(\"**Error**: No help available.\");\n return Ok(());\n }\n\n let mut result = format!(\"**{}**\\n\", command_name);\n\n if let Some(ref desc) = command.desc {\n let _ = write!(result, \"**Description:** {}\\n\", desc);\n }\n\n if let Some(ref usage) = command.usage {\n let _ = write!(result, \"**Usage:** `{} {}`\\n\", command_name, usage);\n }\n\n if let Some(ref example) = command.example {\n let _ = write!(result, \"**Sample usage:** `{} {}`\\n\", command_name, example);\n }\n\n if group_name != \"Ungrouped\" {\n let _ = write!(result, \"**Group:** {}\\n\", group_name);\n }\n\n result.push_str(\"**Available:** \");\n result.push_str(if command.dm_only {\n \"Only in DM\"\n } else if command.guild_only {\n \"Only in guilds\"\n } else {\n \"In DM and guilds\"\n });\n result.push_str(\"\\n\");\n\n let _ = ctx.say(&result);\n\n return Ok(());\n }\n }\n\n let _ = ctx.say(&format!(\"**Error**: Command `{}` not found.\", name));\n\n return Ok(());\n }\n\n let mut result = \"**Commands**\\nTo get help with an individual command, pass its \\\n name as an argument to this command.\\n\\n\"\n .to_string();\n\n for (group_name, group) in groups {\n let _ = write!(result, \"**{}:** \", group_name);\n\n if let Some(ref x) = group.prefix {\n let _ = write!(result, \"(prefix: `{}`): \", x);\n }\n\n let mut no_commands = true;\n\n for (n, cmd) in remove_aliases(&group.commands) {\n if cmd.help_available {\n let _ = write!(result, \"`{}` \", n);\n\n no_commands = false;\n }\n }\n\n if no_commands {\n result.push_str(\"*[No Commands]*\");\n }\n\n result.push('\\n');\n }\n\n let _ = ctx.say(&result);\n\n Ok(())\n}\nRelisting 'Commands:' is redundant in embed helpuse std::collections::HashMap;\nuse std::sync::Arc;\nuse std::fmt::Write;\nuse super::command::InternalCommand;\nuse super::{Command, CommandGroup, CommandOrAlias};\nuse ::client::Context;\nuse ::model::Message;\nuse ::utils::Colour;\n\nfn error_embed(ctx: &mut Context, message: &Message, input: &str) {\n let _ = ctx.send_message(message.channel_id, |m| m\n .embed(|e| e\n .colour(Colour::dark_red())\n .description(input)));\n}\n\nfn remove_aliases(cmds: &HashMap) -> HashMap<&String, &InternalCommand> {\n let mut result = HashMap::new();\n\n for (n, v) in cmds {\n if let CommandOrAlias::Command(ref cmd) = *v {\n result.insert(n, cmd);\n }\n }\n\n result\n}\n\npub fn with_embeds(ctx: &mut Context,\n message: &Message,\n groups: HashMap>,\n args: Vec) -> Result<(), String> {\n if !args.is_empty() {\n let name = args.join(\" \");\n\n for (group_name, group) in groups {\n let mut found: Option<(&String, &InternalCommand)> = None;\n\n for (command_name, command) in &group.commands {\n let with_prefix = if let Some(ref prefix) = group.prefix {\n format!(\"{} {}\", prefix, command_name)\n } else {\n command_name.to_owned()\n };\n\n if name == with_prefix || name == *command_name {\n match *command {\n CommandOrAlias::Command(ref cmd) => {\n found = Some((command_name, cmd));\n },\n CommandOrAlias::Alias(ref name) => {\n error_embed(ctx, message, &format!(\"Did you mean \\\"{}\\\"?\", name));\n return Ok(());\n }\n }\n }\n }\n\n if let Some((command_name, command)) = found {\n if !command.help_available {\n error_embed(ctx, message, \"**Error**: No help available.\");\n\n return Ok(());\n }\n\n let _ = ctx.send_message(message.channel_id, |m| {\n m.embed(|e| {\n let mut embed = e.colour(Colour::rosewater())\n .title(command_name);\n if let Some(ref desc) = command.desc {\n embed = embed.description(desc);\n }\n\n if let Some(ref usage) = command.usage {\n embed = embed.field(|f| f\n .name(\"Usage\")\n .value(&format!(\"`{} {}`\", command_name, usage)));\n }\n\n if let Some(ref example) = command.example {\n embed = embed.field(|f| f\n .name(\"Sample usage\")\n .value(&format!(\"`{} {}`\", command_name, example)));\n }\n\n if group_name != \"Ungrouped\" {\n embed = embed.field(|f| f\n .name(\"Group\")\n .value(&group_name));\n }\n\n let available = if command.dm_only {\n \"Only in DM\"\n } else if command.guild_only {\n \"Only in guilds\"\n } else {\n \"In DM and guilds\"\n };\n\n embed = embed.field(|f| f\n .name(\"Available\")\n .value(available));\n\n embed\n })\n });\n\n return Ok(());\n }\n }\n\n let error_msg = format!(\"**Error**: Command `{}` not found.\", name);\n error_embed(ctx, message, &error_msg);\n\n return Ok(());\n }\n\n let _ = ctx.send_message(message.channel_id, |m| {\n m.embed(|mut e| {\n e = e.colour(Colour::rosewater())\n .description(\"To get help with an individual command, pass its \\\n name as an argument to this command.\");\n\n for (group_name, group) in groups {\n let mut desc = String::new();\n\n if let Some(ref x) = group.prefix {\n let _ = write!(desc, \"Prefix: {}\\n\", x);\n }\n\n let mut no_commands = true;\n\n for (n, cmd) in remove_aliases(&group.commands) {\n if cmd.help_available {\n let _ = write!(desc, \"`{}`\\n\", n);\n\n no_commands = false;\n }\n }\n\n if no_commands {\n let _ = write!(desc, \"*[No commands]*\");\n }\n\n e = e.field(|f| f.name(&group_name).value(&desc));\n }\n\n e\n })\n });\n\n Ok(())\n}\n\npub fn plain(ctx: &mut Context,\n _: &Message,\n groups: HashMap>,\n args: Vec) -> Result<(), String> {\n if !args.is_empty() {\n let name = args.join(\" \");\n\n for (group_name, group) in groups {\n let mut found: Option<(&String, &Command)> = None;\n\n for (command_name, command) in &group.commands {\n let with_prefix = if let Some(ref prefix) = group.prefix {\n format!(\"{} {}\", prefix, command_name)\n } else {\n command_name.to_owned()\n };\n\n if name == with_prefix || name == *command_name {\n match *command {\n CommandOrAlias::Command(ref cmd) => {\n found = Some((command_name, cmd));\n },\n CommandOrAlias::Alias(ref name) => {\n let _ = ctx.say(&format!(\"Did you mean {:?}?\", name));\n return Ok(());\n }\n }\n }\n }\n\n if let Some((command_name, command)) = found {\n if !command.help_available {\n let _ = ctx.say(\"**Error**: No help available.\");\n return Ok(());\n }\n\n let mut result = format!(\"**{}**\\n\", command_name);\n\n if let Some(ref desc) = command.desc {\n let _ = write!(result, \"**Description:** {}\\n\", desc);\n }\n\n if let Some(ref usage) = command.usage {\n let _ = write!(result, \"**Usage:** `{} {}`\\n\", command_name, usage);\n }\n\n if let Some(ref example) = command.example {\n let _ = write!(result, \"**Sample usage:** `{} {}`\\n\", command_name, example);\n }\n\n if group_name != \"Ungrouped\" {\n let _ = write!(result, \"**Group:** {}\\n\", group_name);\n }\n\n result.push_str(\"**Available:** \");\n result.push_str(if command.dm_only {\n \"Only in DM\"\n } else if command.guild_only {\n \"Only in guilds\"\n } else {\n \"In DM and guilds\"\n });\n result.push_str(\"\\n\");\n\n let _ = ctx.say(&result);\n\n return Ok(());\n }\n }\n\n let _ = ctx.say(&format!(\"**Error**: Command `{}` not found.\", name));\n\n return Ok(());\n }\n\n let mut result = \"**Commands**\\nTo get help with an individual command, pass its \\\n name as an argument to this command.\\n\\n\"\n .to_string();\n\n for (group_name, group) in groups {\n let _ = write!(result, \"**{}:** \", group_name);\n\n if let Some(ref x) = group.prefix {\n let _ = write!(result, \"(prefix: `{}`): \", x);\n }\n\n let mut no_commands = true;\n\n for (n, cmd) in remove_aliases(&group.commands) {\n if cmd.help_available {\n let _ = write!(result, \"`{}` \", n);\n\n no_commands = false;\n }\n }\n\n if no_commands {\n result.push_str(\"*[No Commands]*\");\n }\n\n result.push('\\n');\n }\n\n let _ = ctx.say(&result);\n\n Ok(())\n}\n<|endoftext|>"} {"text":"delete<|endoftext|>"} {"text":"chore: associated function<|endoftext|>"} {"text":"\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A graph module for use in dataflow, region resolution, and elsewhere.\n\/\/!\n\/\/! # Interface details\n\/\/!\n\/\/! You customize the graph by specifying a \"node data\" type `N` and an\n\/\/! \"edge data\" type `E`. You can then later gain access (mutable or\n\/\/! immutable) to these \"user-data\" bits. Currently, you can only add\n\/\/! nodes or edges to the graph. You cannot remove or modify them once\n\/\/! added. This could be changed if we have a need.\n\/\/!\n\/\/! # Implementation details\n\/\/!\n\/\/! The main tricky thing about this code is the way that edges are\n\/\/! stored. The edges are stored in a central array, but they are also\n\/\/! threaded onto two linked lists for each node, one for incoming edges\n\/\/! and one for outgoing edges. Note that every edge is a member of some\n\/\/! incoming list and some outgoing list. Basically you can load the\n\/\/! first index of the linked list from the node data structures (the\n\/\/! field `first_edge`) and then, for each edge, load the next index from\n\/\/! the field `next_edge`). Each of those fields is an array that should\n\/\/! be indexed by the direction (see the type `Direction`).\n\nuse bitvec::BitVector;\nuse std::fmt::{Formatter, Error, Debug};\nuse std::usize;\nuse snapshot_vec::{SnapshotVec, SnapshotVecDelegate};\n\n#[cfg(test)]\nmod tests;\n\npub struct Graph {\n nodes: SnapshotVec>,\n edges: SnapshotVec>,\n}\n\npub struct Node {\n first_edge: [EdgeIndex; 2], \/\/ see module comment\n pub data: N,\n}\n\npub struct Edge {\n next_edge: [EdgeIndex; 2], \/\/ see module comment\n source: NodeIndex,\n target: NodeIndex,\n pub data: E,\n}\n\nimpl SnapshotVecDelegate for Node {\n type Value = Node;\n type Undo = ();\n\n fn reverse(_: &mut Vec>, _: ()) {}\n}\n\nimpl SnapshotVecDelegate for Edge {\n type Value = Edge;\n type Undo = ();\n\n fn reverse(_: &mut Vec>, _: ()) {}\n}\n\nimpl Debug for Edge {\n fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {\n write!(f,\n \"Edge {{ next_edge: [{:?}, {:?}], source: {:?}, target: {:?}, data: {:?} }}\",\n self.next_edge[0],\n self.next_edge[1],\n self.source,\n self.target,\n self.data)\n }\n}\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]\npub struct NodeIndex(pub usize);\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]\npub struct EdgeIndex(pub usize);\n\npub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);\n\n\/\/ Use a private field here to guarantee no more instances are created:\n#[derive(Copy, Clone, Debug, PartialEq)]\npub struct Direction {\n repr: usize,\n}\n\npub const OUTGOING: Direction = Direction { repr: 0 };\n\npub const INCOMING: Direction = Direction { repr: 1 };\n\nimpl NodeIndex {\n \/\/\/ Returns unique id (unique with respect to the graph holding associated node).\n pub fn node_id(&self) -> usize {\n self.0\n }\n}\n\nimpl EdgeIndex {\n \/\/\/ Returns unique id (unique with respect to the graph holding associated edge).\n pub fn edge_id(&self) -> usize {\n self.0\n }\n}\n\nimpl Graph {\n pub fn new() -> Graph {\n Graph {\n nodes: SnapshotVec::new(),\n edges: SnapshotVec::new(),\n }\n }\n\n \/\/ # Simple accessors\n\n #[inline]\n pub fn all_nodes(&self) -> &[Node] {\n &self.nodes\n }\n\n #[inline]\n pub fn len_nodes(&self) -> usize {\n self.nodes.len()\n }\n\n #[inline]\n pub fn all_edges(&self) -> &[Edge] {\n &self.edges\n }\n\n #[inline]\n pub fn len_edges(&self) -> usize {\n self.edges.len()\n }\n\n \/\/ # Node construction\n\n pub fn next_node_index(&self) -> NodeIndex {\n NodeIndex(self.nodes.len())\n }\n\n pub fn add_node(&mut self, data: N) -> NodeIndex {\n let idx = self.next_node_index();\n self.nodes.push(Node {\n first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],\n data: data,\n });\n idx\n }\n\n pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N {\n &mut self.nodes[idx.0].data\n }\n\n pub fn node_data(&self, idx: NodeIndex) -> &N {\n &self.nodes[idx.0].data\n }\n\n pub fn node(&self, idx: NodeIndex) -> &Node {\n &self.nodes[idx.0]\n }\n\n \/\/ # Edge construction and queries\n\n pub fn next_edge_index(&self) -> EdgeIndex {\n EdgeIndex(self.edges.len())\n }\n\n pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex {\n debug!(\"graph: add_edge({:?}, {:?}, {:?})\", source, target, data);\n\n let idx = self.next_edge_index();\n\n \/\/ read current first of the list of edges from each node\n let source_first = self.nodes[source.0].first_edge[OUTGOING.repr];\n let target_first = self.nodes[target.0].first_edge[INCOMING.repr];\n\n \/\/ create the new edge, with the previous firsts from each node\n \/\/ as the next pointers\n self.edges.push(Edge {\n next_edge: [source_first, target_first],\n source: source,\n target: target,\n data: data,\n });\n\n \/\/ adjust the firsts for each node target be the next object.\n self.nodes[source.0].first_edge[OUTGOING.repr] = idx;\n self.nodes[target.0].first_edge[INCOMING.repr] = idx;\n\n return idx;\n }\n\n pub fn mut_edge_data(&mut self, idx: EdgeIndex) -> &mut E {\n &mut self.edges[idx.0].data\n }\n\n pub fn edge_data(&self, idx: EdgeIndex) -> &E {\n &self.edges[idx.0].data\n }\n\n pub fn edge(&self, idx: EdgeIndex) -> &Edge {\n &self.edges[idx.0]\n }\n\n pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {\n \/\/! Accesses the index of the first edge adjacent to `node`.\n \/\/! This is useful if you wish to modify the graph while walking\n \/\/! the linked list of edges.\n\n self.nodes[node.0].first_edge[dir.repr]\n }\n\n pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {\n \/\/! Accesses the next edge in a given direction.\n \/\/! This is useful if you wish to modify the graph while walking\n \/\/! the linked list of edges.\n\n self.edges[edge.0].next_edge[dir.repr]\n }\n\n \/\/ # Iterating over nodes, edges\n\n pub fn each_node<'a, F>(&'a self, mut f: F) -> bool\n where F: FnMut(NodeIndex, &'a Node) -> bool\n {\n \/\/! Iterates over all edges defined in the graph.\n self.nodes.iter().enumerate().all(|(i, node)| f(NodeIndex(i), node))\n }\n\n pub fn each_edge<'a, F>(&'a self, mut f: F) -> bool\n where F: FnMut(EdgeIndex, &'a Edge) -> bool\n {\n \/\/! Iterates over all edges defined in the graph\n self.edges.iter().enumerate().all(|(i, edge)| f(EdgeIndex(i), edge))\n }\n\n pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges {\n self.adjacent_edges(source, OUTGOING)\n }\n\n pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges {\n self.adjacent_edges(source, INCOMING)\n }\n\n pub fn adjacent_edges(&self, source: NodeIndex, direction: Direction) -> AdjacentEdges {\n let first_edge = self.node(source).first_edge[direction.repr];\n AdjacentEdges {\n graph: self,\n direction: direction,\n next: first_edge,\n }\n }\n\n pub fn successor_nodes(&self, source: NodeIndex) -> AdjacentTargets {\n self.outgoing_edges(source).targets()\n }\n\n pub fn predecessor_nodes(&self, target: NodeIndex) -> AdjacentSources {\n self.incoming_edges(target).sources()\n }\n\n \/\/ # Fixed-point iteration\n \/\/\n \/\/ A common use for graphs in our compiler is to perform\n \/\/ fixed-point iteration. In this case, each edge represents a\n \/\/ constraint, and the nodes themselves are associated with\n \/\/ variables or other bitsets. This method facilitates such a\n \/\/ computation.\n\n pub fn iterate_until_fixed_point<'a, F>(&'a self, mut op: F)\n where F: FnMut(usize, EdgeIndex, &'a Edge) -> bool\n {\n let mut iteration = 0;\n let mut changed = true;\n while changed {\n changed = false;\n iteration += 1;\n for (i, edge) in self.edges.iter().enumerate() {\n changed |= op(iteration, EdgeIndex(i), edge);\n }\n }\n }\n\n pub fn depth_traverse<'a>(&'a self,\n start: NodeIndex,\n direction: Direction)\n -> DepthFirstTraversal<'a, N, E> {\n DepthFirstTraversal::with_start_node(self, start, direction)\n }\n}\n\n\/\/ # Iterators\n\npub struct AdjacentEdges<'g, N, E>\n where N: 'g,\n E: 'g\n{\n graph: &'g Graph,\n direction: Direction,\n next: EdgeIndex,\n}\n\nimpl<'g, N, E> AdjacentEdges<'g, N, E> {\n fn targets(self) -> AdjacentTargets<'g, N, E> {\n AdjacentTargets { edges: self }\n }\n\n fn sources(self) -> AdjacentSources<'g, N, E> {\n AdjacentSources { edges: self }\n }\n}\n\nimpl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> {\n type Item = (EdgeIndex, &'g Edge);\n\n fn next(&mut self) -> Option<(EdgeIndex, &'g Edge)> {\n let edge_index = self.next;\n if edge_index == INVALID_EDGE_INDEX {\n return None;\n }\n\n let edge = self.graph.edge(edge_index);\n self.next = edge.next_edge[self.direction.repr];\n Some((edge_index, edge))\n }\n}\n\npub struct AdjacentTargets<'g, N: 'g, E: 'g>\n where N: 'g,\n E: 'g\n{\n edges: AdjacentEdges<'g, N, E>,\n}\n\nimpl<'g, N: Debug, E: Debug> Iterator for AdjacentTargets<'g, N, E> {\n type Item = NodeIndex;\n\n fn next(&mut self) -> Option {\n self.edges.next().map(|(_, edge)| edge.target)\n }\n}\n\npub struct AdjacentSources<'g, N: 'g, E: 'g>\n where N: 'g,\n E: 'g\n{\n edges: AdjacentEdges<'g, N, E>,\n}\n\nimpl<'g, N: Debug, E: Debug> Iterator for AdjacentSources<'g, N, E> {\n type Item = NodeIndex;\n\n fn next(&mut self) -> Option {\n self.edges.next().map(|(_, edge)| edge.source)\n }\n}\n\npub struct DepthFirstTraversal<'g, N: 'g, E: 'g> {\n graph: &'g Graph,\n stack: Vec,\n visited: BitVector,\n direction: Direction,\n}\n\nimpl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {\n pub fn new(graph: &'g Graph, direction: Direction) -> Self {\n let visited = BitVector::new(graph.len_nodes());\n DepthFirstTraversal {\n graph: graph,\n stack: vec![],\n visited: visited,\n direction: direction\n }\n }\n\n pub fn with_start_node(graph: &'g Graph,\n start_node: NodeIndex,\n direction: Direction)\n -> Self {\n let mut visited = BitVector::new(graph.len_nodes());\n visited.insert(start_node.node_id());\n DepthFirstTraversal {\n graph: graph,\n stack: vec![start_node],\n visited: visited,\n direction: direction\n }\n }\n\n pub fn reset(&mut self, start_node: NodeIndex) {\n self.stack.truncate(0);\n self.stack.push(start_node);\n self.visited.clear();\n self.visited.insert(start_node.node_id());\n }\n\n fn visit(&mut self, node: NodeIndex) {\n if self.visited.insert(node.node_id()) {\n self.stack.push(node);\n }\n }\n}\n\nimpl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {\n type Item = NodeIndex;\n\n fn next(&mut self) -> Option {\n let next = self.stack.pop();\n if let Some(idx) = next {\n for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {\n let target = edge.source_or_target(self.direction);\n self.visit(target);\n }\n }\n next\n }\n}\n\npub fn each_edge_index(max_edge_index: EdgeIndex, mut f: F)\n where F: FnMut(EdgeIndex) -> bool\n{\n let mut i = 0;\n let n = max_edge_index.0;\n while i < n {\n if !f(EdgeIndex(i)) {\n return;\n }\n i += 1;\n }\n}\n\nimpl Edge {\n pub fn source(&self) -> NodeIndex {\n self.source\n }\n\n pub fn target(&self) -> NodeIndex {\n self.target\n }\n\n pub fn source_or_target(&self, direction: Direction) -> NodeIndex {\n if direction == OUTGOING {\n self.target\n } else {\n self.source\n }\n }\n}\nRollup merge of #37286 - srinivasreddy:graph, r=nrc\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A graph module for use in dataflow, region resolution, and elsewhere.\n\/\/!\n\/\/! # Interface details\n\/\/!\n\/\/! You customize the graph by specifying a \"node data\" type `N` and an\n\/\/! \"edge data\" type `E`. You can then later gain access (mutable or\n\/\/! immutable) to these \"user-data\" bits. Currently, you can only add\n\/\/! nodes or edges to the graph. You cannot remove or modify them once\n\/\/! added. This could be changed if we have a need.\n\/\/!\n\/\/! # Implementation details\n\/\/!\n\/\/! The main tricky thing about this code is the way that edges are\n\/\/! stored. The edges are stored in a central array, but they are also\n\/\/! threaded onto two linked lists for each node, one for incoming edges\n\/\/! and one for outgoing edges. Note that every edge is a member of some\n\/\/! incoming list and some outgoing list. Basically you can load the\n\/\/! first index of the linked list from the node data structures (the\n\/\/! field `first_edge`) and then, for each edge, load the next index from\n\/\/! the field `next_edge`). Each of those fields is an array that should\n\/\/! be indexed by the direction (see the type `Direction`).\n\nuse bitvec::BitVector;\nuse std::fmt::{Formatter, Error, Debug};\nuse std::usize;\nuse snapshot_vec::{SnapshotVec, SnapshotVecDelegate};\n\n#[cfg(test)]\nmod tests;\n\npub struct Graph {\n nodes: SnapshotVec>,\n edges: SnapshotVec>,\n}\n\npub struct Node {\n first_edge: [EdgeIndex; 2], \/\/ see module comment\n pub data: N,\n}\n\npub struct Edge {\n next_edge: [EdgeIndex; 2], \/\/ see module comment\n source: NodeIndex,\n target: NodeIndex,\n pub data: E,\n}\n\nimpl SnapshotVecDelegate for Node {\n type Value = Node;\n type Undo = ();\n\n fn reverse(_: &mut Vec>, _: ()) {}\n}\n\nimpl SnapshotVecDelegate for Edge {\n type Value = Edge;\n type Undo = ();\n\n fn reverse(_: &mut Vec>, _: ()) {}\n}\n\nimpl Debug for Edge {\n fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {\n write!(f,\n \"Edge {{ next_edge: [{:?}, {:?}], source: {:?}, target: {:?}, data: {:?} }}\",\n self.next_edge[0],\n self.next_edge[1],\n self.source,\n self.target,\n self.data)\n }\n}\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]\npub struct NodeIndex(pub usize);\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]\npub struct EdgeIndex(pub usize);\n\npub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);\n\n\/\/ Use a private field here to guarantee no more instances are created:\n#[derive(Copy, Clone, Debug, PartialEq)]\npub struct Direction {\n repr: usize,\n}\n\npub const OUTGOING: Direction = Direction { repr: 0 };\n\npub const INCOMING: Direction = Direction { repr: 1 };\n\nimpl NodeIndex {\n \/\/\/ Returns unique id (unique with respect to the graph holding associated node).\n pub fn node_id(&self) -> usize {\n self.0\n }\n}\n\nimpl EdgeIndex {\n \/\/\/ Returns unique id (unique with respect to the graph holding associated edge).\n pub fn edge_id(&self) -> usize {\n self.0\n }\n}\n\nimpl Graph {\n pub fn new() -> Graph {\n Graph {\n nodes: SnapshotVec::new(),\n edges: SnapshotVec::new(),\n }\n }\n\n \/\/ # Simple accessors\n\n #[inline]\n pub fn all_nodes(&self) -> &[Node] {\n &self.nodes\n }\n\n #[inline]\n pub fn len_nodes(&self) -> usize {\n self.nodes.len()\n }\n\n #[inline]\n pub fn all_edges(&self) -> &[Edge] {\n &self.edges\n }\n\n #[inline]\n pub fn len_edges(&self) -> usize {\n self.edges.len()\n }\n\n \/\/ # Node construction\n\n pub fn next_node_index(&self) -> NodeIndex {\n NodeIndex(self.nodes.len())\n }\n\n pub fn add_node(&mut self, data: N) -> NodeIndex {\n let idx = self.next_node_index();\n self.nodes.push(Node {\n first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],\n data: data,\n });\n idx\n }\n\n pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N {\n &mut self.nodes[idx.0].data\n }\n\n pub fn node_data(&self, idx: NodeIndex) -> &N {\n &self.nodes[idx.0].data\n }\n\n pub fn node(&self, idx: NodeIndex) -> &Node {\n &self.nodes[idx.0]\n }\n\n \/\/ # Edge construction and queries\n\n pub fn next_edge_index(&self) -> EdgeIndex {\n EdgeIndex(self.edges.len())\n }\n\n pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex {\n debug!(\"graph: add_edge({:?}, {:?}, {:?})\", source, target, data);\n\n let idx = self.next_edge_index();\n\n \/\/ read current first of the list of edges from each node\n let source_first = self.nodes[source.0].first_edge[OUTGOING.repr];\n let target_first = self.nodes[target.0].first_edge[INCOMING.repr];\n\n \/\/ create the new edge, with the previous firsts from each node\n \/\/ as the next pointers\n self.edges.push(Edge {\n next_edge: [source_first, target_first],\n source: source,\n target: target,\n data: data,\n });\n\n \/\/ adjust the firsts for each node target be the next object.\n self.nodes[source.0].first_edge[OUTGOING.repr] = idx;\n self.nodes[target.0].first_edge[INCOMING.repr] = idx;\n\n return idx;\n }\n\n pub fn mut_edge_data(&mut self, idx: EdgeIndex) -> &mut E {\n &mut self.edges[idx.0].data\n }\n\n pub fn edge_data(&self, idx: EdgeIndex) -> &E {\n &self.edges[idx.0].data\n }\n\n pub fn edge(&self, idx: EdgeIndex) -> &Edge {\n &self.edges[idx.0]\n }\n\n pub fn first_adjacent(&self, node: NodeIndex, dir: Direction) -> EdgeIndex {\n \/\/! Accesses the index of the first edge adjacent to `node`.\n \/\/! This is useful if you wish to modify the graph while walking\n \/\/! the linked list of edges.\n\n self.nodes[node.0].first_edge[dir.repr]\n }\n\n pub fn next_adjacent(&self, edge: EdgeIndex, dir: Direction) -> EdgeIndex {\n \/\/! Accesses the next edge in a given direction.\n \/\/! This is useful if you wish to modify the graph while walking\n \/\/! the linked list of edges.\n\n self.edges[edge.0].next_edge[dir.repr]\n }\n\n \/\/ # Iterating over nodes, edges\n\n pub fn each_node<'a, F>(&'a self, mut f: F) -> bool\n where F: FnMut(NodeIndex, &'a Node) -> bool\n {\n \/\/! Iterates over all edges defined in the graph.\n self.nodes.iter().enumerate().all(|(i, node)| f(NodeIndex(i), node))\n }\n\n pub fn each_edge<'a, F>(&'a self, mut f: F) -> bool\n where F: FnMut(EdgeIndex, &'a Edge) -> bool\n {\n \/\/! Iterates over all edges defined in the graph\n self.edges.iter().enumerate().all(|(i, edge)| f(EdgeIndex(i), edge))\n }\n\n pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges {\n self.adjacent_edges(source, OUTGOING)\n }\n\n pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges {\n self.adjacent_edges(source, INCOMING)\n }\n\n pub fn adjacent_edges(&self, source: NodeIndex, direction: Direction) -> AdjacentEdges {\n let first_edge = self.node(source).first_edge[direction.repr];\n AdjacentEdges {\n graph: self,\n direction: direction,\n next: first_edge,\n }\n }\n\n pub fn successor_nodes(&self, source: NodeIndex) -> AdjacentTargets {\n self.outgoing_edges(source).targets()\n }\n\n pub fn predecessor_nodes(&self, target: NodeIndex) -> AdjacentSources {\n self.incoming_edges(target).sources()\n }\n\n \/\/ # Fixed-point iteration\n \/\/\n \/\/ A common use for graphs in our compiler is to perform\n \/\/ fixed-point iteration. In this case, each edge represents a\n \/\/ constraint, and the nodes themselves are associated with\n \/\/ variables or other bitsets. This method facilitates such a\n \/\/ computation.\n\n pub fn iterate_until_fixed_point<'a, F>(&'a self, mut op: F)\n where F: FnMut(usize, EdgeIndex, &'a Edge) -> bool\n {\n let mut iteration = 0;\n let mut changed = true;\n while changed {\n changed = false;\n iteration += 1;\n for (i, edge) in self.edges.iter().enumerate() {\n changed |= op(iteration, EdgeIndex(i), edge);\n }\n }\n }\n\n pub fn depth_traverse<'a>(&'a self,\n start: NodeIndex,\n direction: Direction)\n -> DepthFirstTraversal<'a, N, E> {\n DepthFirstTraversal::with_start_node(self, start, direction)\n }\n}\n\n\/\/ # Iterators\n\npub struct AdjacentEdges<'g, N, E>\n where N: 'g,\n E: 'g\n{\n graph: &'g Graph,\n direction: Direction,\n next: EdgeIndex,\n}\n\nimpl<'g, N, E> AdjacentEdges<'g, N, E> {\n fn targets(self) -> AdjacentTargets<'g, N, E> {\n AdjacentTargets { edges: self }\n }\n\n fn sources(self) -> AdjacentSources<'g, N, E> {\n AdjacentSources { edges: self }\n }\n}\n\nimpl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> {\n type Item = (EdgeIndex, &'g Edge);\n\n fn next(&mut self) -> Option<(EdgeIndex, &'g Edge)> {\n let edge_index = self.next;\n if edge_index == INVALID_EDGE_INDEX {\n return None;\n }\n\n let edge = self.graph.edge(edge_index);\n self.next = edge.next_edge[self.direction.repr];\n Some((edge_index, edge))\n }\n}\n\npub struct AdjacentTargets<'g, N: 'g, E: 'g>\n where N: 'g,\n E: 'g\n{\n edges: AdjacentEdges<'g, N, E>,\n}\n\nimpl<'g, N: Debug, E: Debug> Iterator for AdjacentTargets<'g, N, E> {\n type Item = NodeIndex;\n\n fn next(&mut self) -> Option {\n self.edges.next().map(|(_, edge)| edge.target)\n }\n}\n\npub struct AdjacentSources<'g, N: 'g, E: 'g>\n where N: 'g,\n E: 'g\n{\n edges: AdjacentEdges<'g, N, E>,\n}\n\nimpl<'g, N: Debug, E: Debug> Iterator for AdjacentSources<'g, N, E> {\n type Item = NodeIndex;\n\n fn next(&mut self) -> Option {\n self.edges.next().map(|(_, edge)| edge.source)\n }\n}\n\npub struct DepthFirstTraversal<'g, N: 'g, E: 'g> {\n graph: &'g Graph,\n stack: Vec,\n visited: BitVector,\n direction: Direction,\n}\n\nimpl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {\n pub fn new(graph: &'g Graph, direction: Direction) -> Self {\n let visited = BitVector::new(graph.len_nodes());\n DepthFirstTraversal {\n graph: graph,\n stack: vec![],\n visited: visited,\n direction: direction,\n }\n }\n\n pub fn with_start_node(graph: &'g Graph,\n start_node: NodeIndex,\n direction: Direction)\n -> Self {\n let mut visited = BitVector::new(graph.len_nodes());\n visited.insert(start_node.node_id());\n DepthFirstTraversal {\n graph: graph,\n stack: vec![start_node],\n visited: visited,\n direction: direction,\n }\n }\n\n pub fn reset(&mut self, start_node: NodeIndex) {\n self.stack.truncate(0);\n self.stack.push(start_node);\n self.visited.clear();\n self.visited.insert(start_node.node_id());\n }\n\n fn visit(&mut self, node: NodeIndex) {\n if self.visited.insert(node.node_id()) {\n self.stack.push(node);\n }\n }\n}\n\nimpl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {\n type Item = NodeIndex;\n\n fn next(&mut self) -> Option {\n let next = self.stack.pop();\n if let Some(idx) = next {\n for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {\n let target = edge.source_or_target(self.direction);\n self.visit(target);\n }\n }\n next\n }\n}\n\npub fn each_edge_index(max_edge_index: EdgeIndex, mut f: F)\n where F: FnMut(EdgeIndex) -> bool\n{\n let mut i = 0;\n let n = max_edge_index.0;\n while i < n {\n if !f(EdgeIndex(i)) {\n return;\n }\n i += 1;\n }\n}\n\nimpl Edge {\n pub fn source(&self) -> NodeIndex {\n self.source\n }\n\n pub fn target(&self) -> NodeIndex {\n self.target\n }\n\n pub fn source_or_target(&self, direction: Direction) -> NodeIndex {\n if direction == OUTGOING {\n self.target\n } else {\n self.source\n }\n }\n}\n<|endoftext|>"} {"text":"\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::collections::HashSet;\nuse std::collections::hash_map::RandomState;\nuse std::iter::FromIterator;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\n\nuse devicemapper::consts::SECTOR_SIZE;\nuse devicemapper::DM;\nuse devicemapper::{DataBlocks, Sectors, Segment};\nuse devicemapper::LinearDev;\nuse devicemapper::{ThinPoolDev, ThinPoolStatus, ThinPoolWorkingStatus};\nuse time::now;\nuse uuid::Uuid;\nuse serde_json;\n\nuse super::super::consts::IEC::Mi;\nuse super::super::engine::{Filesystem, HasName, HasUuid, Pool};\nuse super::super::errors::{EngineError, EngineResult, ErrorEnum};\nuse super::super::structures::Table;\nuse super::super::types::{FilesystemUuid, RenameAction, Redundancy};\n\nuse super::blockdevmgr::BlockDevMgr;\nuse super::device::wipe_sectors;\nuse super::dmdevice::{FlexRole, ThinPoolRole, format_flex_name, format_thinpool_name};\nuse super::filesystem::{StratFilesystem, FilesystemStatus};\nuse super::mdv::MetadataVol;\nuse super::metadata::MIN_MDA_SECTORS;\nuse super::serde_structs::{FlexDevsSave, PoolSave, Recordable, ThinPoolDevSave};\n\nconst DATA_BLOCK_SIZE: Sectors = Sectors(2048);\nconst META_LOWATER: u64 = 512;\nconst DATA_LOWATER: DataBlocks = DataBlocks(512);\n\nconst INITIAL_META_SIZE: Sectors = Sectors(16 * Mi \/ SECTOR_SIZE as u64);\nconst INITIAL_DATA_SIZE: Sectors = Sectors(512 * Mi \/ SECTOR_SIZE as u64);\nconst INITIAL_MDV_SIZE: Sectors = Sectors(16 * Mi \/ SECTOR_SIZE as u64);\n\n#[derive(Debug)]\npub struct StratPool {\n name: String,\n pool_uuid: Uuid,\n pub block_devs: BlockDevMgr,\n pub filesystems: Table,\n redundancy: Redundancy,\n thin_pool: ThinPoolDev,\n mdv: MetadataVol,\n}\n\nimpl StratPool {\n \/\/\/ Initialize a Stratis Pool.\n \/\/\/ 1. Initialize the block devices specified by paths.\n \/\/\/ 2. Set up thinpool device to back filesystems.\n pub fn initialize(name: &str,\n dm: &DM,\n paths: &[&Path],\n redundancy: Redundancy,\n force: bool)\n -> EngineResult {\n let pool_uuid = Uuid::new_v4();\n\n let mut block_mgr =\n try!(BlockDevMgr::initialize(&pool_uuid, paths, MIN_MDA_SECTORS, force));\n\n if block_mgr.avail_space() < StratPool::min_initial_size() {\n let avail_size = block_mgr.avail_space().bytes();\n try!(block_mgr.destroy_all());\n return Err(EngineError::Engine(ErrorEnum::Invalid,\n format!(\"Space on pool must be at least {} bytes, \\\n available space is only {} bytes\",\n StratPool::min_initial_size().bytes(),\n avail_size)));\n\n\n }\n\n let meta_regions = block_mgr\n .alloc_space(INITIAL_META_SIZE)\n .expect(\"blockmgr must not fail, already checked for space\");\n let device_name = format_flex_name(&pool_uuid, FlexRole::ThinMeta);\n let meta_dev = try!(LinearDev::new(&device_name, dm, meta_regions));\n\n \/\/ When constructing a thin-pool, Stratis reserves the first N\n \/\/ sectors on a block device by creating a linear device with a\n \/\/ starting offset. DM writes the super block in the first block.\n \/\/ DM requires this first block to be zeros when the meta data for\n \/\/ the thin-pool is initially created. If we don't zero the\n \/\/ superblock DM issue error messages because it triggers code paths\n \/\/ that are trying to re-adopt the device with the attributes that\n \/\/ have been passed.\n try!(wipe_sectors(&try!(meta_dev.devnode()), Sectors(0), INITIAL_META_SIZE));\n\n let data_regions = block_mgr\n .alloc_space(INITIAL_DATA_SIZE)\n .expect(\"blockmgr must not fail, already checked for space\");\n let device_name = format_flex_name(&pool_uuid, FlexRole::ThinData);\n let data_dev = try!(LinearDev::new(&device_name, dm, data_regions));\n let length = try!(data_dev.size());\n\n let device_name = format_thinpool_name(&pool_uuid, ThinPoolRole::Pool);\n \/\/ TODO Fix hard coded data blocksize and low water mark.\n let thinpool_dev = try!(ThinPoolDev::new(&device_name,\n dm,\n length,\n DATA_BLOCK_SIZE,\n DataBlocks(256000),\n meta_dev,\n data_dev));\n\n let mdv_regions = block_mgr\n .alloc_space(INITIAL_MDV_SIZE)\n .expect(\"blockmgr must not fail, already checked for space\");\n let device_name = format_flex_name(&pool_uuid, FlexRole::MetadataVolume);\n let mdv_dev = try!(LinearDev::new(&device_name, dm, mdv_regions));\n let mdv = try!(MetadataVol::initialize(&pool_uuid, mdv_dev));\n\n let mut pool = StratPool {\n name: name.to_owned(),\n pool_uuid: pool_uuid,\n block_devs: block_mgr,\n filesystems: Table::new(),\n redundancy: redundancy,\n thin_pool: thinpool_dev,\n mdv: mdv,\n };\n\n try!(pool.write_metadata());\n\n Ok(pool)\n }\n\n \/\/\/ Minimum initial size for a pool.\n pub fn min_initial_size() -> Sectors {\n INITIAL_META_SIZE + INITIAL_DATA_SIZE + INITIAL_MDV_SIZE\n }\n\n \/\/ TODO: Check current time against global last updated, and use\n \/\/ alternate time value if earlier, as described in SWDD\n fn write_metadata(&mut self) -> EngineResult<()> {\n let data = try!(serde_json::to_string(&try!(self.record())));\n self.block_devs\n .save_state(&now().to_timespec(), data.as_bytes())\n }\n\n pub fn check(&mut self) -> () {\n let dm = DM::new().expect(\"Could not get DM handle\");\n\n if let Err(e) = self.mdv.check() {\n error!(\"MDV error: {}\", e);\n return;\n }\n\n let result = match self.thin_pool.status(&dm) {\n Ok(r) => r,\n Err(_) => {\n error!(\"Could not get thinpool status\");\n \/\/ TODO: Take pool offline?\n return;\n }\n };\n\n match result {\n ThinPoolStatus::Good(wstatus, usage) => {\n match wstatus {\n ThinPoolWorkingStatus::Good => {}\n ThinPoolWorkingStatus::ReadOnly => {\n \/\/ TODO: why is pool r\/o and how do we get it\n \/\/ rw again?\n }\n ThinPoolWorkingStatus::OutOfSpace => {\n \/\/ TODO: Add more space if possible, or\n \/\/ prevent further usage\n \/\/ Should never happen -- we should be extending first!\n }\n ThinPoolWorkingStatus::NeedsCheck => {\n \/\/ TODO: Take pool offline?\n \/\/ TODO: run thin_check\n }\n }\n\n if usage.used_meta > usage.total_meta - META_LOWATER {\n \/\/ TODO: Extend meta device\n }\n\n if usage.used_data > usage.total_data - DATA_LOWATER {\n \/\/ TODO: Extend data device\n }\n }\n ThinPoolStatus::Fail => {\n \/\/ TODO: Take pool offline?\n \/\/ TODO: Run thin_check\n }\n }\n\n for fs in &mut self.filesystems {\n match fs.check(&dm) {\n Ok(f_status) => {\n if let FilesystemStatus::Failed = f_status {\n \/\/ TODO: recover fs? (how?)\n }\n }\n Err(_e) => error!(\"fs.check() failed\"),\n }\n }\n }\n\n \/\/\/ Teardown a pool.\n \/\/\/ Take down the device mapper devices belonging to the pool.\n \/\/\/ This method and destroy() must keep their DM teardown operations\n \/\/\/ in sync.\n pub fn teardown(self) -> EngineResult<()> {\n \/\/ TODO: any necessary clean up of filesystems\n if !self.filesystems.is_empty() {\n return Err(EngineError::Engine(ErrorEnum::Busy,\n format!(\"May be unsynced files on device.\")));\n }\n let dm = try!(DM::new());\n try!(self.thin_pool.teardown(&dm));\n try!(self.mdv.teardown(&dm));\n Ok(())\n }\n}\n\nimpl Pool for StratPool {\n fn create_filesystems<'a, 'b>(&'a mut self,\n specs: &[&'b str])\n -> EngineResult> {\n let names: HashSet<_, RandomState> = HashSet::from_iter(specs);\n for name in names.iter() {\n if self.filesystems.contains_name(name) {\n return Err(EngineError::Engine(ErrorEnum::AlreadyExists, name.to_string()));\n }\n }\n\n \/\/ TODO: Roll back on filesystem initialization failure.\n let dm = try!(DM::new());\n let mut result = Vec::new();\n for name in names.iter() {\n let uuid = Uuid::new_v4();\n let new_filesystem = try!(StratFilesystem::initialize(&self.pool_uuid,\n uuid,\n name,\n &dm,\n &mut self.thin_pool));\n try!(self.mdv.save_fs(&new_filesystem));\n self.filesystems.insert(new_filesystem);\n result.push((**name, uuid));\n }\n\n Ok(result)\n }\n\n fn add_blockdevs(&mut self, paths: &[&Path], force: bool) -> EngineResult> {\n let bdev_paths = try!(self.block_devs.add(&self.pool_uuid, paths, force));\n try!(self.write_metadata());\n Ok(bdev_paths)\n }\n\n fn destroy(self) -> EngineResult<()> {\n \/\/ Ensure that DM teardown operations in this method are in sync\n \/\/ with operations in teardown().\n let dm = try!(DM::new());\n try!(self.thin_pool.teardown(&dm));\n try!(self.mdv.teardown(&dm));\n try!(self.block_devs.destroy_all());\n\n Ok(())\n }\n\n fn destroy_filesystems<'a, 'b>(&'a mut self,\n fs_uuids: &[&'b FilesystemUuid])\n -> EngineResult> {\n for fsid in fs_uuids {\n try!(self.mdv.rm_fs(fsid));\n }\n destroy_filesystems!{self; fs_uuids}\n }\n\n fn rename_filesystem(&mut self,\n uuid: &FilesystemUuid,\n new_name: &str)\n -> EngineResult {\n rename_filesystem!{self; uuid; new_name}\n }\n\n fn rename(&mut self, name: &str) {\n self.name = name.to_owned();\n }\n\n fn get_filesystem(&mut self, uuid: &FilesystemUuid) -> Option<&mut Filesystem> {\n get_filesystem!(self; uuid)\n }\n}\n\nimpl HasUuid for StratPool {\n fn uuid(&self) -> &FilesystemUuid {\n &self.pool_uuid\n }\n}\n\nimpl HasName for StratPool {\n fn name(&self) -> &str {\n &self.name\n }\n}\n\nimpl Recordable for StratPool {\n fn record(&self) -> EngineResult {\n\n let mapper = |seg: &Segment| -> EngineResult<(String, Sectors, Sectors)> {\n let bd = try!(self.block_devs\n .get_by_device(seg.device)\n .ok_or(EngineError::Engine(ErrorEnum::NotFound,\n format!(\"no block device found for device {:?}\",\n seg.device))));\n Ok((bd.uuid().simple().to_string(), seg.start, seg.length))\n };\n\n let mut meta_dev = vec![];\n for item in self.mdv.segments().iter().map(&mapper) {\n match item {\n Ok(seg) => meta_dev.push(seg),\n Err(err) => return Err(err),\n }\n }\n\n let mut thin_meta_dev = vec![];\n for item in self.thin_pool\n .meta_dev()\n .segments()\n .iter()\n .map(&mapper) {\n match item {\n Ok(seg) => thin_meta_dev.push(seg),\n Err(err) => return Err(err),\n }\n }\n\n let mut thin_data_dev = vec![];\n for item in self.thin_pool\n .data_dev()\n .segments()\n .iter()\n .map(&mapper) {\n match item {\n Ok(seg) => thin_data_dev.push(seg),\n Err(err) => return Err(err),\n }\n }\n\n Ok(PoolSave {\n name: self.name.clone(),\n block_devs: try!(self.block_devs.record()),\n flex_devs: FlexDevsSave {\n meta_dev: meta_dev,\n thin_meta_dev: thin_meta_dev,\n thin_data_dev: thin_data_dev,\n },\n thinpool_dev: ThinPoolDevSave { data_block_size: *self.thin_pool.data_block_size() },\n })\n }\n}\nupdate pool::uuid() to return a PoolUuid\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::collections::HashSet;\nuse std::collections::hash_map::RandomState;\nuse std::iter::FromIterator;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\n\nuse devicemapper::consts::SECTOR_SIZE;\nuse devicemapper::DM;\nuse devicemapper::{DataBlocks, Sectors, Segment};\nuse devicemapper::LinearDev;\nuse devicemapper::{ThinPoolDev, ThinPoolStatus, ThinPoolWorkingStatus};\nuse time::now;\nuse uuid::Uuid;\nuse serde_json;\n\nuse super::super::consts::IEC::Mi;\nuse super::super::engine::{Filesystem, HasName, HasUuid, Pool};\nuse super::super::errors::{EngineError, EngineResult, ErrorEnum};\nuse super::super::structures::Table;\nuse super::super::types::{FilesystemUuid, PoolUuid, RenameAction, Redundancy};\n\nuse super::blockdevmgr::BlockDevMgr;\nuse super::device::wipe_sectors;\nuse super::dmdevice::{FlexRole, ThinPoolRole, format_flex_name, format_thinpool_name};\nuse super::filesystem::{StratFilesystem, FilesystemStatus};\nuse super::mdv::MetadataVol;\nuse super::metadata::MIN_MDA_SECTORS;\nuse super::serde_structs::{FlexDevsSave, PoolSave, Recordable, ThinPoolDevSave};\n\nconst DATA_BLOCK_SIZE: Sectors = Sectors(2048);\nconst META_LOWATER: u64 = 512;\nconst DATA_LOWATER: DataBlocks = DataBlocks(512);\n\nconst INITIAL_META_SIZE: Sectors = Sectors(16 * Mi \/ SECTOR_SIZE as u64);\nconst INITIAL_DATA_SIZE: Sectors = Sectors(512 * Mi \/ SECTOR_SIZE as u64);\nconst INITIAL_MDV_SIZE: Sectors = Sectors(16 * Mi \/ SECTOR_SIZE as u64);\n\n#[derive(Debug)]\npub struct StratPool {\n name: String,\n pool_uuid: PoolUuid,\n pub block_devs: BlockDevMgr,\n pub filesystems: Table,\n redundancy: Redundancy,\n thin_pool: ThinPoolDev,\n mdv: MetadataVol,\n}\n\nimpl StratPool {\n \/\/\/ Initialize a Stratis Pool.\n \/\/\/ 1. Initialize the block devices specified by paths.\n \/\/\/ 2. Set up thinpool device to back filesystems.\n pub fn initialize(name: &str,\n dm: &DM,\n paths: &[&Path],\n redundancy: Redundancy,\n force: bool)\n -> EngineResult {\n let pool_uuid = Uuid::new_v4();\n\n let mut block_mgr =\n try!(BlockDevMgr::initialize(&pool_uuid, paths, MIN_MDA_SECTORS, force));\n\n if block_mgr.avail_space() < StratPool::min_initial_size() {\n let avail_size = block_mgr.avail_space().bytes();\n try!(block_mgr.destroy_all());\n return Err(EngineError::Engine(ErrorEnum::Invalid,\n format!(\"Space on pool must be at least {} bytes, \\\n available space is only {} bytes\",\n StratPool::min_initial_size().bytes(),\n avail_size)));\n\n\n }\n\n let meta_regions = block_mgr\n .alloc_space(INITIAL_META_SIZE)\n .expect(\"blockmgr must not fail, already checked for space\");\n let device_name = format_flex_name(&pool_uuid, FlexRole::ThinMeta);\n let meta_dev = try!(LinearDev::new(&device_name, dm, meta_regions));\n\n \/\/ When constructing a thin-pool, Stratis reserves the first N\n \/\/ sectors on a block device by creating a linear device with a\n \/\/ starting offset. DM writes the super block in the first block.\n \/\/ DM requires this first block to be zeros when the meta data for\n \/\/ the thin-pool is initially created. If we don't zero the\n \/\/ superblock DM issue error messages because it triggers code paths\n \/\/ that are trying to re-adopt the device with the attributes that\n \/\/ have been passed.\n try!(wipe_sectors(&try!(meta_dev.devnode()), Sectors(0), INITIAL_META_SIZE));\n\n let data_regions = block_mgr\n .alloc_space(INITIAL_DATA_SIZE)\n .expect(\"blockmgr must not fail, already checked for space\");\n let device_name = format_flex_name(&pool_uuid, FlexRole::ThinData);\n let data_dev = try!(LinearDev::new(&device_name, dm, data_regions));\n let length = try!(data_dev.size());\n\n let device_name = format_thinpool_name(&pool_uuid, ThinPoolRole::Pool);\n \/\/ TODO Fix hard coded data blocksize and low water mark.\n let thinpool_dev = try!(ThinPoolDev::new(&device_name,\n dm,\n length,\n DATA_BLOCK_SIZE,\n DataBlocks(256000),\n meta_dev,\n data_dev));\n\n let mdv_regions = block_mgr\n .alloc_space(INITIAL_MDV_SIZE)\n .expect(\"blockmgr must not fail, already checked for space\");\n let device_name = format_flex_name(&pool_uuid, FlexRole::MetadataVolume);\n let mdv_dev = try!(LinearDev::new(&device_name, dm, mdv_regions));\n let mdv = try!(MetadataVol::initialize(&pool_uuid, mdv_dev));\n\n let mut pool = StratPool {\n name: name.to_owned(),\n pool_uuid: pool_uuid,\n block_devs: block_mgr,\n filesystems: Table::new(),\n redundancy: redundancy,\n thin_pool: thinpool_dev,\n mdv: mdv,\n };\n\n try!(pool.write_metadata());\n\n Ok(pool)\n }\n\n \/\/\/ Minimum initial size for a pool.\n pub fn min_initial_size() -> Sectors {\n INITIAL_META_SIZE + INITIAL_DATA_SIZE + INITIAL_MDV_SIZE\n }\n\n \/\/ TODO: Check current time against global last updated, and use\n \/\/ alternate time value if earlier, as described in SWDD\n fn write_metadata(&mut self) -> EngineResult<()> {\n let data = try!(serde_json::to_string(&try!(self.record())));\n self.block_devs\n .save_state(&now().to_timespec(), data.as_bytes())\n }\n\n pub fn check(&mut self) -> () {\n let dm = DM::new().expect(\"Could not get DM handle\");\n\n if let Err(e) = self.mdv.check() {\n error!(\"MDV error: {}\", e);\n return;\n }\n\n let result = match self.thin_pool.status(&dm) {\n Ok(r) => r,\n Err(_) => {\n error!(\"Could not get thinpool status\");\n \/\/ TODO: Take pool offline?\n return;\n }\n };\n\n match result {\n ThinPoolStatus::Good(wstatus, usage) => {\n match wstatus {\n ThinPoolWorkingStatus::Good => {}\n ThinPoolWorkingStatus::ReadOnly => {\n \/\/ TODO: why is pool r\/o and how do we get it\n \/\/ rw again?\n }\n ThinPoolWorkingStatus::OutOfSpace => {\n \/\/ TODO: Add more space if possible, or\n \/\/ prevent further usage\n \/\/ Should never happen -- we should be extending first!\n }\n ThinPoolWorkingStatus::NeedsCheck => {\n \/\/ TODO: Take pool offline?\n \/\/ TODO: run thin_check\n }\n }\n\n if usage.used_meta > usage.total_meta - META_LOWATER {\n \/\/ TODO: Extend meta device\n }\n\n if usage.used_data > usage.total_data - DATA_LOWATER {\n \/\/ TODO: Extend data device\n }\n }\n ThinPoolStatus::Fail => {\n \/\/ TODO: Take pool offline?\n \/\/ TODO: Run thin_check\n }\n }\n\n for fs in &mut self.filesystems {\n match fs.check(&dm) {\n Ok(f_status) => {\n if let FilesystemStatus::Failed = f_status {\n \/\/ TODO: recover fs? (how?)\n }\n }\n Err(_e) => error!(\"fs.check() failed\"),\n }\n }\n }\n\n \/\/\/ Teardown a pool.\n \/\/\/ Take down the device mapper devices belonging to the pool.\n \/\/\/ This method and destroy() must keep their DM teardown operations\n \/\/\/ in sync.\n pub fn teardown(self) -> EngineResult<()> {\n \/\/ TODO: any necessary clean up of filesystems\n if !self.filesystems.is_empty() {\n return Err(EngineError::Engine(ErrorEnum::Busy,\n format!(\"May be unsynced files on device.\")));\n }\n let dm = try!(DM::new());\n try!(self.thin_pool.teardown(&dm));\n try!(self.mdv.teardown(&dm));\n Ok(())\n }\n}\n\nimpl Pool for StratPool {\n fn create_filesystems<'a, 'b>(&'a mut self,\n specs: &[&'b str])\n -> EngineResult> {\n let names: HashSet<_, RandomState> = HashSet::from_iter(specs);\n for name in names.iter() {\n if self.filesystems.contains_name(name) {\n return Err(EngineError::Engine(ErrorEnum::AlreadyExists, name.to_string()));\n }\n }\n\n \/\/ TODO: Roll back on filesystem initialization failure.\n let dm = try!(DM::new());\n let mut result = Vec::new();\n for name in names.iter() {\n let uuid = Uuid::new_v4();\n let new_filesystem = try!(StratFilesystem::initialize(&self.pool_uuid,\n uuid,\n name,\n &dm,\n &mut self.thin_pool));\n try!(self.mdv.save_fs(&new_filesystem));\n self.filesystems.insert(new_filesystem);\n result.push((**name, uuid));\n }\n\n Ok(result)\n }\n\n fn add_blockdevs(&mut self, paths: &[&Path], force: bool) -> EngineResult> {\n let bdev_paths = try!(self.block_devs.add(&self.pool_uuid, paths, force));\n try!(self.write_metadata());\n Ok(bdev_paths)\n }\n\n fn destroy(self) -> EngineResult<()> {\n \/\/ Ensure that DM teardown operations in this method are in sync\n \/\/ with operations in teardown().\n let dm = try!(DM::new());\n try!(self.thin_pool.teardown(&dm));\n try!(self.mdv.teardown(&dm));\n try!(self.block_devs.destroy_all());\n\n Ok(())\n }\n\n fn destroy_filesystems<'a, 'b>(&'a mut self,\n fs_uuids: &[&'b FilesystemUuid])\n -> EngineResult> {\n for fsid in fs_uuids {\n try!(self.mdv.rm_fs(fsid));\n }\n destroy_filesystems!{self; fs_uuids}\n }\n\n fn rename_filesystem(&mut self,\n uuid: &FilesystemUuid,\n new_name: &str)\n -> EngineResult {\n rename_filesystem!{self; uuid; new_name}\n }\n\n fn rename(&mut self, name: &str) {\n self.name = name.to_owned();\n }\n\n fn get_filesystem(&mut self, uuid: &FilesystemUuid) -> Option<&mut Filesystem> {\n get_filesystem!(self; uuid)\n }\n}\n\nimpl HasUuid for StratPool {\n fn uuid(&self) -> &PoolUuid {\n &self.pool_uuid\n }\n}\n\nimpl HasName for StratPool {\n fn name(&self) -> &str {\n &self.name\n }\n}\n\nimpl Recordable for StratPool {\n fn record(&self) -> EngineResult {\n\n let mapper = |seg: &Segment| -> EngineResult<(String, Sectors, Sectors)> {\n let bd = try!(self.block_devs\n .get_by_device(seg.device)\n .ok_or(EngineError::Engine(ErrorEnum::NotFound,\n format!(\"no block device found for device {:?}\",\n seg.device))));\n Ok((bd.uuid().simple().to_string(), seg.start, seg.length))\n };\n\n let mut meta_dev = vec![];\n for item in self.mdv.segments().iter().map(&mapper) {\n match item {\n Ok(seg) => meta_dev.push(seg),\n Err(err) => return Err(err),\n }\n }\n\n let mut thin_meta_dev = vec![];\n for item in self.thin_pool\n .meta_dev()\n .segments()\n .iter()\n .map(&mapper) {\n match item {\n Ok(seg) => thin_meta_dev.push(seg),\n Err(err) => return Err(err),\n }\n }\n\n let mut thin_data_dev = vec![];\n for item in self.thin_pool\n .data_dev()\n .segments()\n .iter()\n .map(&mapper) {\n match item {\n Ok(seg) => thin_data_dev.push(seg),\n Err(err) => return Err(err),\n }\n }\n\n Ok(PoolSave {\n name: self.name.clone(),\n block_devs: try!(self.block_devs.record()),\n flex_devs: FlexDevsSave {\n meta_dev: meta_dev,\n thin_meta_dev: thin_meta_dev,\n thin_data_dev: thin_data_dev,\n },\n thinpool_dev: ThinPoolDevSave { data_block_size: *self.thin_pool.data_block_size() },\n })\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n#![feature(plugin_registrar, quote, box_syntax)]\n#![deny(missing_copy_implementations)]\n#![allow(unstable)]\n\n\/\/! Macro extensions crate.\n\/\/! Implements `shaders!` macro as well as `#[shader_param]` and\n\/\/! `#[vertex_format]` attributes.\n\nextern crate rustc;\nextern crate syntax;\n\nuse syntax::{ast, attr, ext, codemap};\nuse syntax::ext::build::AstBuilder;\nuse syntax::parse::token;\nuse syntax::fold::Folder;\nuse syntax::ptr::P;\n\npub mod shader_param;\npub mod vertex_format;\n\n\/\/\/ Entry point for the plugin phase\n#[plugin_registrar]\npub fn registrar(reg: &mut rustc::plugin::Registry) {\n use syntax::parse::token::intern;\n use syntax::ext::base;\n \/\/ Register the `#[shader_param]` attribute.\n reg.register_syntax_extension(intern(\"shader_param\"),\n base::Decorator(box shader_param::ShaderParam));\n \/\/ Register the `#[vertex_format]` attribute.\n reg.register_syntax_extension(intern(\"vertex_format\"),\n base::Decorator(box vertex_format::VertexFormat));\n}\n\n\/\/\/ Scan through the field's attributes and extract the field vertex name. If\n\/\/\/ multiple names are found, use the first name and emit a warning.\nfn find_name(cx: &mut ext::base::ExtCtxt, span: codemap::Span,\n attributes: &[ast::Attribute]) -> Option {\n attributes.iter().fold(None, |name, attribute| {\n match attribute.node.value.node {\n ast::MetaNameValue(ref attr_name, ref attr_value) => {\n match (attr_name.get(), &attr_value.node) {\n (\"name\", &ast::LitStr(ref new_name, _)) => {\n attr::mark_used(attribute);\n name.map_or(Some(new_name.clone()), |name| {\n cx.span_warn(span, &format!(\n \"Extra field name detected: {:?} - \\\n ignoring in favour of: {:?}\", new_name, name\n )[]);\n None\n })\n }\n _ => None,\n }\n }\n _ => name,\n }\n })\n}\n\n\/\/\/ Marker string to base the unique identifier generated by `extern_crate_hack()` on\nstatic EXTERN_CRATE_HACK: &'static str = \"__gfx_extern_crate_hack\";\n\n\/\/\/ Inserts a module with a unique identifier that reexports\n\/\/\/ The `gfx` crate, and returns that identifier\nfn extern_crate_hack(context: &mut ext::base::ExtCtxt,\n span: codemap::Span,\n mut push: F) -> ast::Ident where F: FnMut(P) {\n let extern_crate_hack = token::gensym_ident(EXTERN_CRATE_HACK);\n \/\/ mod $EXTERN_CRATE_HACK {\n \/\/ extern crate gfx_ = \"gfx\";\n \/\/ pub use gfx_ as gfx;\n \/\/ }\n let item = context.item_mod(\n span,\n span,\n extern_crate_hack,\n vec![],\n vec![\n P(ast::Item {\n span: span,\n vis: ast::Inherited,\n attrs: vec![],\n node: ast::ItemExternCrate(\n Some((\n token::InternedString::new(\"gfx\"),\n ast::CookedStr\n )),\n ),\n id: ast::DUMMY_NODE_ID,\n ident: token::str_to_ident(\"gfx_\")\n }),\n context.item_use_simple_(\n span,\n ast::Public,\n context.ident_of(\"gfx\"),\n context.path(span, vec![\n context.ident_of(\"self\"),\n context.ident_of(\"gfx_\")\n ])\n )\n ]\n );\n push(item);\n extern_crate_hack\n}\n\n\/\/\/ This Folder gets used to fixup all paths generated by the\n\/\/\/ #[derive trait impl to point to the unique module\n\/\/\/ containing the `gfx` reexport.\nstruct ExternCrateHackFolder {\n path_root: ast::Ident\n}\n\nimpl Folder for ExternCrateHackFolder {\n fn fold_path(&mut self, p: ast::Path) -> ast::Path {\n let p = syntax::fold::noop_fold_path(p, self);\n let needs_fix = (&p.segments[]).get(0)\n .map(|s| s.identifier.as_str() == EXTERN_CRATE_HACK)\n .unwrap_or(false);\n let needs_fix_self = (&p.segments[]).get(0)\n .map(|s| s.identifier.as_str() == \"self\")\n .unwrap_or(false) &&\n (&p.segments[]).get(1)\n .map(|s| s.identifier.as_str() == EXTERN_CRATE_HACK)\n .unwrap_or(false);\n\n if needs_fix {\n let mut p = p.clone();\n p.segments[0].identifier = self.path_root;\n p.global = false;\n p\n } else if needs_fix_self {\n let mut p = p.clone();\n p.segments[1].identifier = self.path_root;\n p.global = false;\n p\n } else {\n p\n }\n\n }\n}\n\n\/\/\/ Simply applies the `ExternCrateHackFolder`\nfn fixup_extern_crate_paths(item: P, path_root: ast::Ident) -> P {\n ExternCrateHackFolder {\n path_root: path_root\n }.fold_item(item).into_iter().next().unwrap()\n}\n\n\/\/ The `gfx` reexport module here does not need a unique name,\n\/\/ as it gets inserted in a new block and thus doesn't conflict with\n\/\/ any names outside its lexical scope.\n#[macro_export]\nmacro_rules! shaders {\n (targets : $v:expr) => {\n {\n use gfx;\n gfx::ShaderSource {\n targets: $v,\n ..shaders!()\n }\n }\n };\n (targets : $v:expr, $($t:tt)*) => {\n {\n use gfx;\n gfx::ShaderSource {\n targets: $v,\n ..shaders!($($t)*)\n }\n }\n };\n ($i:ident : $v:expr) => {\n {\n use gfx;\n gfx::ShaderSource {\n $i: Some($v),\n ..shaders!()\n }\n }\n };\n ($i:ident : $v:expr, $($t:tt)*) => {\n {\n use gfx;\n gfx::ShaderSource {\n $i: Some($v),\n ..shaders!($($t)*)\n }\n }\n };\n () => {\n {\n use gfx;\n gfx::ShaderSource {\n glsl_120: None,\n glsl_130: None,\n glsl_140: None,\n glsl_150: None,\n targets: &[],\n }\n }\n }\n}\nSilenced warnings in gfx_macros, related to unstable rust items\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n#![feature(core, plugin_registrar, quote, box_syntax, rustc_private)]\n#![deny(missing_copy_implementations)]\n\n\/\/! Macro extensions crate.\n\/\/! Implements `shaders!` macro as well as `#[shader_param]` and\n\/\/! `#[vertex_format]` attributes.\n\nextern crate rustc;\nextern crate syntax;\n\nuse syntax::{ast, attr, ext, codemap};\nuse syntax::ext::build::AstBuilder;\nuse syntax::parse::token;\nuse syntax::fold::Folder;\nuse syntax::ptr::P;\n\npub mod shader_param;\npub mod vertex_format;\n\n\/\/\/ Entry point for the plugin phase\n#[plugin_registrar]\npub fn registrar(reg: &mut rustc::plugin::Registry) {\n use syntax::parse::token::intern;\n use syntax::ext::base;\n \/\/ Register the `#[shader_param]` attribute.\n reg.register_syntax_extension(intern(\"shader_param\"),\n base::Decorator(box shader_param::ShaderParam));\n \/\/ Register the `#[vertex_format]` attribute.\n reg.register_syntax_extension(intern(\"vertex_format\"),\n base::Decorator(box vertex_format::VertexFormat));\n}\n\n\/\/\/ Scan through the field's attributes and extract the field vertex name. If\n\/\/\/ multiple names are found, use the first name and emit a warning.\nfn find_name(cx: &mut ext::base::ExtCtxt, span: codemap::Span,\n attributes: &[ast::Attribute]) -> Option {\n attributes.iter().fold(None, |name, attribute| {\n match attribute.node.value.node {\n ast::MetaNameValue(ref attr_name, ref attr_value) => {\n match (attr_name.get(), &attr_value.node) {\n (\"name\", &ast::LitStr(ref new_name, _)) => {\n attr::mark_used(attribute);\n name.map_or(Some(new_name.clone()), |name| {\n cx.span_warn(span, &format!(\n \"Extra field name detected: {:?} - \\\n ignoring in favour of: {:?}\", new_name, name\n )[]);\n None\n })\n }\n _ => None,\n }\n }\n _ => name,\n }\n })\n}\n\n\/\/\/ Marker string to base the unique identifier generated by `extern_crate_hack()` on\nstatic EXTERN_CRATE_HACK: &'static str = \"__gfx_extern_crate_hack\";\n\n\/\/\/ Inserts a module with a unique identifier that reexports\n\/\/\/ The `gfx` crate, and returns that identifier\nfn extern_crate_hack(context: &mut ext::base::ExtCtxt,\n span: codemap::Span,\n mut push: F) -> ast::Ident where F: FnMut(P) {\n let extern_crate_hack = token::gensym_ident(EXTERN_CRATE_HACK);\n \/\/ mod $EXTERN_CRATE_HACK {\n \/\/ extern crate gfx_ = \"gfx\";\n \/\/ pub use gfx_ as gfx;\n \/\/ }\n let item = context.item_mod(\n span,\n span,\n extern_crate_hack,\n vec![],\n vec![\n P(ast::Item {\n span: span,\n vis: ast::Inherited,\n attrs: vec![],\n node: ast::ItemExternCrate(\n Some((\n token::InternedString::new(\"gfx\"),\n ast::CookedStr\n )),\n ),\n id: ast::DUMMY_NODE_ID,\n ident: token::str_to_ident(\"gfx_\")\n }),\n context.item_use_simple_(\n span,\n ast::Public,\n context.ident_of(\"gfx\"),\n context.path(span, vec![\n context.ident_of(\"self\"),\n context.ident_of(\"gfx_\")\n ])\n )\n ]\n );\n push(item);\n extern_crate_hack\n}\n\n\/\/\/ This Folder gets used to fixup all paths generated by the\n\/\/\/ #[derive trait impl to point to the unique module\n\/\/\/ containing the `gfx` reexport.\nstruct ExternCrateHackFolder {\n path_root: ast::Ident\n}\n\nimpl Folder for ExternCrateHackFolder {\n fn fold_path(&mut self, p: ast::Path) -> ast::Path {\n let p = syntax::fold::noop_fold_path(p, self);\n let needs_fix = (&p.segments[]).get(0)\n .map(|s| s.identifier.as_str() == EXTERN_CRATE_HACK)\n .unwrap_or(false);\n let needs_fix_self = (&p.segments[]).get(0)\n .map(|s| s.identifier.as_str() == \"self\")\n .unwrap_or(false) &&\n (&p.segments[]).get(1)\n .map(|s| s.identifier.as_str() == EXTERN_CRATE_HACK)\n .unwrap_or(false);\n\n if needs_fix {\n let mut p = p.clone();\n p.segments[0].identifier = self.path_root;\n p.global = false;\n p\n } else if needs_fix_self {\n let mut p = p.clone();\n p.segments[1].identifier = self.path_root;\n p.global = false;\n p\n } else {\n p\n }\n\n }\n}\n\n\/\/\/ Simply applies the `ExternCrateHackFolder`\nfn fixup_extern_crate_paths(item: P, path_root: ast::Ident) -> P {\n ExternCrateHackFolder {\n path_root: path_root\n }.fold_item(item).into_iter().next().unwrap()\n}\n\n\/\/ The `gfx` reexport module here does not need a unique name,\n\/\/ as it gets inserted in a new block and thus doesn't conflict with\n\/\/ any names outside its lexical scope.\n#[macro_export]\nmacro_rules! shaders {\n (targets : $v:expr) => {\n {\n use gfx;\n gfx::ShaderSource {\n targets: $v,\n ..shaders!()\n }\n }\n };\n (targets : $v:expr, $($t:tt)*) => {\n {\n use gfx;\n gfx::ShaderSource {\n targets: $v,\n ..shaders!($($t)*)\n }\n }\n };\n ($i:ident : $v:expr) => {\n {\n use gfx;\n gfx::ShaderSource {\n $i: Some($v),\n ..shaders!()\n }\n }\n };\n ($i:ident : $v:expr, $($t:tt)*) => {\n {\n use gfx;\n gfx::ShaderSource {\n $i: Some($v),\n ..shaders!($($t)*)\n }\n }\n };\n () => {\n {\n use gfx;\n gfx::ShaderSource {\n glsl_120: None,\n glsl_130: None,\n glsl_140: None,\n glsl_150: None,\n targets: &[],\n }\n }\n }\n}\n<|endoftext|>"} {"text":"entry<|endoftext|>"} {"text":"\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[link(name = \"rustc\",\n vers = \"0.8-pre\",\n uuid = \"0ce89b41-2f92-459e-bbc1-8f5fe32f16cf\",\n url = \"https:\/\/github.com\/mozilla\/rust\/tree\/master\/src\/rustc\")];\n\n#[comment = \"The Rust compiler\"];\n#[license = \"MIT\/ASL2\"];\n#[crate_type = \"lib\"];\n\n\/\/ Rustc tasks always run on a fixed_stack_segment, so code in this\n\/\/ module can call C functions (in particular, LLVM functions) with\n\/\/ impunity.\n#[allow(cstack)];\n\nextern mod extra;\nextern mod syntax;\n\nuse driver::driver::{host_triple, optgroups, early_error};\nuse driver::driver::{str_input, file_input, build_session_options};\nuse driver::driver::{build_session, build_configuration, parse_pretty};\nuse driver::driver::{PpMode, pretty_print_input, list_metadata};\nuse driver::driver::{compile_input};\nuse driver::session;\nuse middle::lint;\n\nuse std::io;\nuse std::num;\nuse std::os;\nuse std::result;\nuse std::str;\nuse std::task;\nuse std::vec;\nuse extra::getopts::{groups, opt_present};\nuse extra::getopts;\nuse syntax::codemap;\nuse syntax::diagnostic;\n\npub mod middle {\n pub mod trans;\n pub mod ty;\n pub mod subst;\n pub mod resolve;\n pub mod typeck;\n pub mod check_loop;\n pub mod check_match;\n pub mod check_const;\n pub mod lint;\n pub mod borrowck;\n pub mod dataflow;\n pub mod mem_categorization;\n pub mod liveness;\n pub mod kind;\n pub mod freevars;\n pub mod pat_util;\n pub mod region;\n pub mod const_eval;\n pub mod astencode;\n pub mod lang_items;\n pub mod privacy;\n pub mod moves;\n pub mod entry;\n pub mod effect;\n pub mod reachable;\n pub mod graph;\n pub mod cfg;\n pub mod stack_check;\n}\n\npub mod front {\n pub mod config;\n pub mod test;\n pub mod std_inject;\n pub mod assign_node_ids;\n}\n\npub mod back {\n pub mod link;\n pub mod abi;\n pub mod upcall;\n pub mod arm;\n pub mod mips;\n pub mod x86;\n pub mod x86_64;\n pub mod rpath;\n pub mod target_strs;\n}\n\npub mod metadata;\n\npub mod driver;\n\npub mod util {\n pub mod common;\n pub mod ppaux;\n}\n\npub mod lib {\n pub mod llvm;\n}\n\n\/\/ A curious inner module that allows ::std::foo to be available in here for\n\/\/ macros.\n\/*\nmod std {\n pub use std::clone;\n pub use std::cmp;\n pub use std::os;\n pub use std::str;\n pub use std::sys;\n pub use std::to_bytes;\n pub use std::unstable;\n pub use extra::serialize;\n}\n*\/\n\npub fn version(argv0: &str) {\n let vers = match option_env!(\"CFG_VERSION\") {\n Some(vers) => vers,\n None => \"unknown version\"\n };\n printfln!(\"%s %s\", argv0, vers);\n printfln!(\"host: %s\", host_triple());\n}\n\npub fn usage(argv0: &str) {\n let message = fmt!(\"Usage: %s [OPTIONS] INPUT\", argv0);\n printfln!(\"%s\\\nAdditional help:\n -W help Print 'lint' options and default settings\n -Z help Print internal options for debugging rustc\\n\",\n groups::usage(message, optgroups()));\n}\n\npub fn describe_warnings() {\n use extra::sort::Sort;\n println(\"\nAvailable lint options:\n -W Warn about \n -A Allow \n -D Deny \n -F Forbid (deny, and deny all overrides)\n\");\n\n let lint_dict = lint::get_lint_dict();\n let mut lint_dict = lint_dict.move_iter()\n .map(|(k, v)| (v, k))\n .collect::<~[(lint::LintSpec, &'static str)]>();\n lint_dict.qsort();\n\n let mut max_key = 0;\n for &(_, name) in lint_dict.iter() {\n max_key = num::max(name.len(), max_key);\n }\n fn padded(max: uint, s: &str) -> ~str {\n str::from_utf8(vec::from_elem(max - s.len(), ' ' as u8)) + s\n }\n println(\"\\nAvailable lint checks:\\n\");\n printfln!(\" %s %7.7s %s\",\n padded(max_key, \"name\"), \"default\", \"meaning\");\n printfln!(\" %s %7.7s %s\\n\",\n padded(max_key, \"----\"), \"-------\", \"-------\");\n for (spec, name) in lint_dict.move_iter() {\n let name = name.replace(\"_\", \"-\");\n printfln!(\" %s %7.7s %s\",\n padded(max_key, name),\n lint::level_to_str(spec.default),\n spec.desc);\n }\n io::println(\"\");\n}\n\npub fn describe_debug_flags() {\n println(\"\\nAvailable debug options:\\n\");\n let r = session::debugging_opts_map();\n for tuple in r.iter() {\n match *tuple {\n (ref name, ref desc, _) => {\n printfln!(\" -Z %-20s -- %s\", *name, *desc);\n }\n }\n }\n}\n\npub fn run_compiler(args: &[~str], demitter: diagnostic::Emitter) {\n \/\/ Don't display log spew by default. Can override with RUST_LOG.\n ::std::logging::console_off();\n\n let mut args = args.to_owned();\n let binary = args.shift().to_managed();\n\n if args.is_empty() { usage(binary); return; }\n\n let matches =\n &match getopts::groups::getopts(args, optgroups()) {\n Ok(m) => m,\n Err(f) => {\n early_error(demitter, getopts::fail_str(f));\n }\n };\n\n if opt_present(matches, \"h\") || opt_present(matches, \"help\") {\n usage(binary);\n return;\n }\n\n \/\/ Display the available lint options if \"-W help\" or only \"-W\" is given.\n let lint_flags = vec::append(getopts::opt_strs(matches, \"W\"),\n getopts::opt_strs(matches, \"warn\"));\n\n let show_lint_options = lint_flags.iter().any(|x| x == &~\"help\") ||\n (opt_present(matches, \"W\") && lint_flags.is_empty());\n\n if show_lint_options {\n describe_warnings();\n return;\n }\n\n let r = getopts::opt_strs(matches, \"Z\");\n if r.iter().any(|x| x == &~\"help\") {\n describe_debug_flags();\n return;\n }\n\n if getopts::opt_maybe_str(matches, \"passes\") == Some(~\"list\") {\n unsafe { lib::llvm::llvm::LLVMRustPrintPasses(); }\n return;\n }\n\n if opt_present(matches, \"v\") || opt_present(matches, \"version\") {\n version(binary);\n return;\n }\n let input = match matches.free.len() {\n 0u => early_error(demitter, ~\"no input filename given\"),\n 1u => {\n let ifile = matches.free[0].as_slice();\n if \"-\" == ifile {\n let src = str::from_utf8(io::stdin().read_whole_stream());\n str_input(src.to_managed())\n } else {\n file_input(Path(ifile))\n }\n }\n _ => early_error(demitter, ~\"multiple input filenames provided\")\n };\n\n let sopts = build_session_options(binary, matches, demitter);\n let sess = build_session(sopts, demitter);\n let odir = getopts::opt_maybe_str(matches, \"out-dir\").map_move(|o| Path(o));\n let ofile = getopts::opt_maybe_str(matches, \"o\").map_move(|o| Path(o));\n let cfg = build_configuration(sess);\n let pretty = do getopts::opt_default(matches, \"pretty\", \"normal\").map_move |a| {\n parse_pretty(sess, a)\n };\n match pretty {\n Some::(ppm) => {\n pretty_print_input(sess, cfg, &input, ppm);\n return;\n }\n None:: => {\/* continue *\/ }\n }\n let ls = opt_present(matches, \"ls\");\n if ls {\n match input {\n file_input(ref ifile) => {\n list_metadata(sess, &(*ifile), io::stdout());\n }\n str_input(_) => {\n early_error(demitter, ~\"can not list metadata for stdin\");\n }\n }\n return;\n }\n\n compile_input(sess, cfg, &input, &odir, &ofile);\n}\n\n#[deriving(Eq)]\npub enum monitor_msg {\n fatal,\n done,\n}\n\n\/*\nThis is a sanity check that any failure of the compiler is performed\nthrough the diagnostic module and reported properly - we shouldn't be calling\nplain-old-fail on any execution path that might be taken. Since we have\nconsole logging off by default, hitting a plain fail statement would make the\ncompiler silently exit, which would be terrible.\n\nThis method wraps the compiler in a subtask and injects a function into the\ndiagnostic emitter which records when we hit a fatal error. If the task\nfails without recording a fatal error then we've encountered a compiler\nbug and need to present an error.\n*\/\npub fn monitor(f: ~fn(diagnostic::Emitter)) {\n use std::comm::*;\n\n \/\/ XXX: This is a hack for newsched since it doesn't support split stacks.\n \/\/ rustc needs a lot of stack!\n static STACK_SIZE: uint = 6000000;\n\n let (p, ch) = stream();\n let ch = SharedChan::new(ch);\n let ch_capture = ch.clone();\n let mut task_builder = task::task();\n task_builder.supervised();\n\n \/\/ XXX: Hacks on hacks. If the env is trying to override the stack size\n \/\/ then *don't* set it explicitly.\n if os::getenv(\"RUST_MIN_STACK\").is_none() {\n task_builder.opts.stack_size = Some(STACK_SIZE);\n }\n\n match do task_builder.try {\n let ch = ch_capture.clone();\n let ch_capture = ch.clone();\n \/\/ The 'diagnostics emitter'. Every error, warning, etc. should\n \/\/ go through this function.\n let demitter: @fn(Option<(@codemap::CodeMap, codemap::Span)>,\n &str,\n diagnostic::level) =\n |cmsp, msg, lvl| {\n if lvl == diagnostic::fatal {\n ch_capture.send(fatal);\n }\n diagnostic::emit(cmsp, msg, lvl);\n };\n\n struct finally {\n ch: SharedChan,\n }\n\n impl Drop for finally {\n fn drop(&mut self) { self.ch.send(done); }\n }\n\n let _finally = finally { ch: ch };\n\n f(demitter);\n\n \/\/ Due reasons explain in #7732, if there was a jit execution context it\n \/\/ must be consumed and passed along to our parent task.\n back::link::jit::consume_engine()\n } {\n result::Ok(_) => { \/* fallthrough *\/ }\n result::Err(_) => {\n \/\/ Task failed without emitting a fatal diagnostic\n if p.recv() == done {\n diagnostic::emit(\n None,\n diagnostic::ice_msg(\"unexpected failure\"),\n diagnostic::error);\n\n let xs = [\n ~\"the compiler hit an unexpected failure path. \\\n this is a bug\",\n ~\"try running with RUST_LOG=rustc=1 \\\n to get further details and report the results \\\n to github.com\/mozilla\/rust\/issues\"\n ];\n for note in xs.iter() {\n diagnostic::emit(None, *note, diagnostic::note)\n }\n }\n \/\/ Fail so the process returns a failure code\n fail!();\n }\n }\n}\n\npub fn main() {\n let args = os::args();\n main_args(args);\n}\n\npub fn main_args(args: &[~str]) {\n let owned_args = args.to_owned();\n do monitor |demitter| {\n run_compiler(owned_args, demitter);\n }\n}\nauto merge of #9271 : brson\/rust\/extra-help, r=catamorphism\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[link(name = \"rustc\",\n vers = \"0.8-pre\",\n uuid = \"0ce89b41-2f92-459e-bbc1-8f5fe32f16cf\",\n url = \"https:\/\/github.com\/mozilla\/rust\/tree\/master\/src\/rustc\")];\n\n#[comment = \"The Rust compiler\"];\n#[license = \"MIT\/ASL2\"];\n#[crate_type = \"lib\"];\n\n\/\/ Rustc tasks always run on a fixed_stack_segment, so code in this\n\/\/ module can call C functions (in particular, LLVM functions) with\n\/\/ impunity.\n#[allow(cstack)];\n\nextern mod extra;\nextern mod syntax;\n\nuse driver::driver::{host_triple, optgroups, early_error};\nuse driver::driver::{str_input, file_input, build_session_options};\nuse driver::driver::{build_session, build_configuration, parse_pretty};\nuse driver::driver::{PpMode, pretty_print_input, list_metadata};\nuse driver::driver::{compile_input};\nuse driver::session;\nuse middle::lint;\n\nuse std::io;\nuse std::num;\nuse std::os;\nuse std::result;\nuse std::str;\nuse std::task;\nuse std::vec;\nuse extra::getopts::{groups, opt_present};\nuse extra::getopts;\nuse syntax::codemap;\nuse syntax::diagnostic;\n\npub mod middle {\n pub mod trans;\n pub mod ty;\n pub mod subst;\n pub mod resolve;\n pub mod typeck;\n pub mod check_loop;\n pub mod check_match;\n pub mod check_const;\n pub mod lint;\n pub mod borrowck;\n pub mod dataflow;\n pub mod mem_categorization;\n pub mod liveness;\n pub mod kind;\n pub mod freevars;\n pub mod pat_util;\n pub mod region;\n pub mod const_eval;\n pub mod astencode;\n pub mod lang_items;\n pub mod privacy;\n pub mod moves;\n pub mod entry;\n pub mod effect;\n pub mod reachable;\n pub mod graph;\n pub mod cfg;\n pub mod stack_check;\n}\n\npub mod front {\n pub mod config;\n pub mod test;\n pub mod std_inject;\n pub mod assign_node_ids;\n}\n\npub mod back {\n pub mod link;\n pub mod abi;\n pub mod upcall;\n pub mod arm;\n pub mod mips;\n pub mod x86;\n pub mod x86_64;\n pub mod rpath;\n pub mod target_strs;\n}\n\npub mod metadata;\n\npub mod driver;\n\npub mod util {\n pub mod common;\n pub mod ppaux;\n}\n\npub mod lib {\n pub mod llvm;\n}\n\n\/\/ A curious inner module that allows ::std::foo to be available in here for\n\/\/ macros.\n\/*\nmod std {\n pub use std::clone;\n pub use std::cmp;\n pub use std::os;\n pub use std::str;\n pub use std::sys;\n pub use std::to_bytes;\n pub use std::unstable;\n pub use extra::serialize;\n}\n*\/\n\npub fn version(argv0: &str) {\n let vers = match option_env!(\"CFG_VERSION\") {\n Some(vers) => vers,\n None => \"unknown version\"\n };\n printfln!(\"%s %s\", argv0, vers);\n printfln!(\"host: %s\", host_triple());\n}\n\npub fn usage(argv0: &str) {\n let message = fmt!(\"Usage: %s [OPTIONS] INPUT\", argv0);\n printfln!(\"%s\\n\\\nAdditional help:\n -W help Print 'lint' options and default settings\n -Z help Print internal options for debugging rustc\\n\",\n groups::usage(message, optgroups()));\n}\n\npub fn describe_warnings() {\n use extra::sort::Sort;\n println(\"\nAvailable lint options:\n -W Warn about \n -A Allow \n -D Deny \n -F Forbid (deny, and deny all overrides)\n\");\n\n let lint_dict = lint::get_lint_dict();\n let mut lint_dict = lint_dict.move_iter()\n .map(|(k, v)| (v, k))\n .collect::<~[(lint::LintSpec, &'static str)]>();\n lint_dict.qsort();\n\n let mut max_key = 0;\n for &(_, name) in lint_dict.iter() {\n max_key = num::max(name.len(), max_key);\n }\n fn padded(max: uint, s: &str) -> ~str {\n str::from_utf8(vec::from_elem(max - s.len(), ' ' as u8)) + s\n }\n println(\"\\nAvailable lint checks:\\n\");\n printfln!(\" %s %7.7s %s\",\n padded(max_key, \"name\"), \"default\", \"meaning\");\n printfln!(\" %s %7.7s %s\\n\",\n padded(max_key, \"----\"), \"-------\", \"-------\");\n for (spec, name) in lint_dict.move_iter() {\n let name = name.replace(\"_\", \"-\");\n printfln!(\" %s %7.7s %s\",\n padded(max_key, name),\n lint::level_to_str(spec.default),\n spec.desc);\n }\n io::println(\"\");\n}\n\npub fn describe_debug_flags() {\n println(\"\\nAvailable debug options:\\n\");\n let r = session::debugging_opts_map();\n for tuple in r.iter() {\n match *tuple {\n (ref name, ref desc, _) => {\n printfln!(\" -Z %-20s -- %s\", *name, *desc);\n }\n }\n }\n}\n\npub fn run_compiler(args: &[~str], demitter: diagnostic::Emitter) {\n \/\/ Don't display log spew by default. Can override with RUST_LOG.\n ::std::logging::console_off();\n\n let mut args = args.to_owned();\n let binary = args.shift().to_managed();\n\n if args.is_empty() { usage(binary); return; }\n\n let matches =\n &match getopts::groups::getopts(args, optgroups()) {\n Ok(m) => m,\n Err(f) => {\n early_error(demitter, getopts::fail_str(f));\n }\n };\n\n if opt_present(matches, \"h\") || opt_present(matches, \"help\") {\n usage(binary);\n return;\n }\n\n \/\/ Display the available lint options if \"-W help\" or only \"-W\" is given.\n let lint_flags = vec::append(getopts::opt_strs(matches, \"W\"),\n getopts::opt_strs(matches, \"warn\"));\n\n let show_lint_options = lint_flags.iter().any(|x| x == &~\"help\") ||\n (opt_present(matches, \"W\") && lint_flags.is_empty());\n\n if show_lint_options {\n describe_warnings();\n return;\n }\n\n let r = getopts::opt_strs(matches, \"Z\");\n if r.iter().any(|x| x == &~\"help\") {\n describe_debug_flags();\n return;\n }\n\n if getopts::opt_maybe_str(matches, \"passes\") == Some(~\"list\") {\n unsafe { lib::llvm::llvm::LLVMRustPrintPasses(); }\n return;\n }\n\n if opt_present(matches, \"v\") || opt_present(matches, \"version\") {\n version(binary);\n return;\n }\n let input = match matches.free.len() {\n 0u => early_error(demitter, ~\"no input filename given\"),\n 1u => {\n let ifile = matches.free[0].as_slice();\n if \"-\" == ifile {\n let src = str::from_utf8(io::stdin().read_whole_stream());\n str_input(src.to_managed())\n } else {\n file_input(Path(ifile))\n }\n }\n _ => early_error(demitter, ~\"multiple input filenames provided\")\n };\n\n let sopts = build_session_options(binary, matches, demitter);\n let sess = build_session(sopts, demitter);\n let odir = getopts::opt_maybe_str(matches, \"out-dir\").map_move(|o| Path(o));\n let ofile = getopts::opt_maybe_str(matches, \"o\").map_move(|o| Path(o));\n let cfg = build_configuration(sess);\n let pretty = do getopts::opt_default(matches, \"pretty\", \"normal\").map_move |a| {\n parse_pretty(sess, a)\n };\n match pretty {\n Some::(ppm) => {\n pretty_print_input(sess, cfg, &input, ppm);\n return;\n }\n None:: => {\/* continue *\/ }\n }\n let ls = opt_present(matches, \"ls\");\n if ls {\n match input {\n file_input(ref ifile) => {\n list_metadata(sess, &(*ifile), io::stdout());\n }\n str_input(_) => {\n early_error(demitter, ~\"can not list metadata for stdin\");\n }\n }\n return;\n }\n\n compile_input(sess, cfg, &input, &odir, &ofile);\n}\n\n#[deriving(Eq)]\npub enum monitor_msg {\n fatal,\n done,\n}\n\n\/*\nThis is a sanity check that any failure of the compiler is performed\nthrough the diagnostic module and reported properly - we shouldn't be calling\nplain-old-fail on any execution path that might be taken. Since we have\nconsole logging off by default, hitting a plain fail statement would make the\ncompiler silently exit, which would be terrible.\n\nThis method wraps the compiler in a subtask and injects a function into the\ndiagnostic emitter which records when we hit a fatal error. If the task\nfails without recording a fatal error then we've encountered a compiler\nbug and need to present an error.\n*\/\npub fn monitor(f: ~fn(diagnostic::Emitter)) {\n use std::comm::*;\n\n \/\/ XXX: This is a hack for newsched since it doesn't support split stacks.\n \/\/ rustc needs a lot of stack!\n static STACK_SIZE: uint = 6000000;\n\n let (p, ch) = stream();\n let ch = SharedChan::new(ch);\n let ch_capture = ch.clone();\n let mut task_builder = task::task();\n task_builder.supervised();\n\n \/\/ XXX: Hacks on hacks. If the env is trying to override the stack size\n \/\/ then *don't* set it explicitly.\n if os::getenv(\"RUST_MIN_STACK\").is_none() {\n task_builder.opts.stack_size = Some(STACK_SIZE);\n }\n\n match do task_builder.try {\n let ch = ch_capture.clone();\n let ch_capture = ch.clone();\n \/\/ The 'diagnostics emitter'. Every error, warning, etc. should\n \/\/ go through this function.\n let demitter: @fn(Option<(@codemap::CodeMap, codemap::Span)>,\n &str,\n diagnostic::level) =\n |cmsp, msg, lvl| {\n if lvl == diagnostic::fatal {\n ch_capture.send(fatal);\n }\n diagnostic::emit(cmsp, msg, lvl);\n };\n\n struct finally {\n ch: SharedChan,\n }\n\n impl Drop for finally {\n fn drop(&mut self) { self.ch.send(done); }\n }\n\n let _finally = finally { ch: ch };\n\n f(demitter);\n\n \/\/ Due reasons explain in #7732, if there was a jit execution context it\n \/\/ must be consumed and passed along to our parent task.\n back::link::jit::consume_engine()\n } {\n result::Ok(_) => { \/* fallthrough *\/ }\n result::Err(_) => {\n \/\/ Task failed without emitting a fatal diagnostic\n if p.recv() == done {\n diagnostic::emit(\n None,\n diagnostic::ice_msg(\"unexpected failure\"),\n diagnostic::error);\n\n let xs = [\n ~\"the compiler hit an unexpected failure path. \\\n this is a bug\",\n ~\"try running with RUST_LOG=rustc=1 \\\n to get further details and report the results \\\n to github.com\/mozilla\/rust\/issues\"\n ];\n for note in xs.iter() {\n diagnostic::emit(None, *note, diagnostic::note)\n }\n }\n \/\/ Fail so the process returns a failure code\n fail!();\n }\n }\n}\n\npub fn main() {\n let args = os::args();\n main_args(args);\n}\n\npub fn main_args(args: &[~str]) {\n let owned_args = args.to_owned();\n do monitor |demitter| {\n run_compiler(owned_args, demitter);\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Interfaces to the operating system provided random number\n\/\/! generators.\n\npub use self::imp::OsRng;\n\n#[cfg(all(unix, not(target_os = \"ios\")))]\nmod imp {\n use prelude::v1::*;\n use self::OsRngInner::*;\n\n use fs::File;\n use io;\n use libc;\n use mem;\n use rand::Rng;\n use rand::reader::ReaderRng;\n use sys::os::errno;\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\")))]\n fn getrandom(buf: &mut [u8]) -> libc::c_long {\n extern \"C\" {\n fn syscall(number: libc::c_long, ...) -> libc::c_long;\n }\n\n #[cfg(target_arch = \"x86_64\")]\n const NR_GETRANDOM: libc::c_long = 318;\n #[cfg(target_arch = \"x86\")]\n const NR_GETRANDOM: libc::c_long = 355;\n #[cfg(any(target_arch = \"arm\", target_arch = \"aarch64\"))]\n const NR_GETRANDOM: libc::c_long = 384;\n #[cfg(target_arch = \"powerpc\")]\n const NR_GETRANDOM: libc::c_long = 384;\n\n unsafe {\n syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), 0)\n }\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\"))))]\n fn getrandom(_buf: &mut [u8]) -> libc::c_long { -1 }\n\n fn getrandom_fill_bytes(v: &mut [u8]) {\n let mut read = 0;\n let len = v.len();\n while read < len {\n let result = getrandom(&mut v[read..]);\n if result == -1 {\n let err = errno() as libc::c_int;\n if err == libc::EINTR {\n continue;\n } else {\n panic!(\"unexpected getrandom error: {}\", err);\n }\n } else {\n read += result as usize;\n }\n }\n }\n\n fn getrandom_next_u32() -> u32 {\n let mut buf: [u8; 4] = [0; 4];\n getrandom_fill_bytes(&mut buf);\n unsafe { mem::transmute::<[u8; 4], u32>(buf) }\n }\n\n fn getrandom_next_u64() -> u64 {\n let mut buf: [u8; 8] = [0; 8];\n getrandom_fill_bytes(&mut buf);\n unsafe { mem::transmute::<[u8; 8], u64>(buf) }\n }\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\")))]\n fn is_getrandom_available() -> bool {\n use sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering};\n\n static GETRANDOM_CHECKED: AtomicBool = ATOMIC_BOOL_INIT;\n static GETRANDOM_AVAILABLE: AtomicBool = ATOMIC_BOOL_INIT;\n\n if !GETRANDOM_CHECKED.load(Ordering::Relaxed) {\n let mut buf: [u8; 0] = [];\n let result = getrandom(&mut buf);\n let available = if result == -1 {\n let err = errno() as libc::c_int;\n err != libc::ENOSYS\n } else {\n true\n };\n GETRANDOM_AVAILABLE.store(available, Ordering::Relaxed);\n GETRANDOM_CHECKED.store(true, Ordering::Relaxed);\n available\n } else {\n GETRANDOM_AVAILABLE.load(Ordering::Relaxed)\n }\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\"))))]\n fn is_getrandom_available() -> bool { false }\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n inner: OsRngInner,\n }\n\n enum OsRngInner {\n OsGetrandomRng,\n OsReaderRng(ReaderRng),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result {\n if is_getrandom_available() {\n return Ok(OsRng { inner: OsGetrandomRng });\n }\n\n let reader = try!(File::open(\"\/dev\/urandom\"));\n let reader_rng = ReaderRng::new(reader);\n\n Ok(OsRng { inner: OsReaderRng(reader_rng) })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n match self.inner {\n OsGetrandomRng => getrandom_next_u32(),\n OsReaderRng(ref mut rng) => rng.next_u32(),\n }\n }\n fn next_u64(&mut self) -> u64 {\n match self.inner {\n OsGetrandomRng => getrandom_next_u64(),\n OsReaderRng(ref mut rng) => rng.next_u64(),\n }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n match self.inner {\n OsGetrandomRng => getrandom_fill_bytes(v),\n OsReaderRng(ref mut rng) => rng.fill_bytes(v)\n }\n }\n }\n}\n\n#[cfg(target_os = \"ios\")]\nmod imp {\n use prelude::v1::*;\n\n use io;\n use mem;\n use rand::Rng;\n use libc::{c_int, size_t};\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n #[repr(C)]\n struct SecRandom;\n\n #[allow(non_upper_case_globals)]\n const kSecRandomDefault: *const SecRandom = 0 as *const SecRandom;\n\n #[link(name = \"Security\", kind = \"framework\")]\n extern \"C\" {\n fn SecRandomCopyBytes(rnd: *const SecRandom,\n count: size_t, bytes: *mut u8) -> c_int;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n let mut v = [0; 4];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn next_u64(&mut self) -> u64 {\n let mut v = [0; 8];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n SecRandomCopyBytes(kSecRandomDefault, v.len() as size_t,\n v.as_mut_ptr())\n };\n if ret == -1 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(windows)]\nmod imp {\n use prelude::v1::*;\n\n use io;\n use mem;\n use rand::Rng;\n use libc::types::os::arch::extra::{LONG_PTR};\n use libc::{DWORD, BYTE, LPCSTR, BOOL};\n\n type HCRYPTPROV = LONG_PTR;\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n hcryptprov: HCRYPTPROV\n }\n\n const PROV_RSA_FULL: DWORD = 1;\n const CRYPT_SILENT: DWORD = 64;\n const CRYPT_VERIFYCONTEXT: DWORD = 0xF0000000;\n\n #[allow(non_snake_case)]\n extern \"system\" {\n fn CryptAcquireContextA(phProv: *mut HCRYPTPROV,\n pszContainer: LPCSTR,\n pszProvider: LPCSTR,\n dwProvType: DWORD,\n dwFlags: DWORD) -> BOOL;\n fn CryptGenRandom(hProv: HCRYPTPROV,\n dwLen: DWORD,\n pbBuffer: *mut BYTE) -> BOOL;\n fn CryptReleaseContext(hProv: HCRYPTPROV, dwFlags: DWORD) -> BOOL;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result {\n let mut hcp = 0;\n let ret = unsafe {\n CryptAcquireContextA(&mut hcp, 0 as LPCSTR, 0 as LPCSTR,\n PROV_RSA_FULL,\n CRYPT_VERIFYCONTEXT | CRYPT_SILENT)\n };\n\n if ret == 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(OsRng { hcryptprov: hcp })\n }\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n let mut v = [0; 4];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn next_u64(&mut self) -> u64 {\n let mut v = [0; 8];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n CryptGenRandom(self.hcryptprov, v.len() as DWORD,\n v.as_mut_ptr())\n };\n if ret == 0 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n\n impl Drop for OsRng {\n fn drop(&mut self) {\n let ret = unsafe {\n CryptReleaseContext(self.hcryptprov, 0)\n };\n if ret == 0 {\n panic!(\"couldn't release context: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use prelude::v1::*;\n\n use sync::mpsc::channel;\n use rand::Rng;\n use super::OsRng;\n use thread;\n\n #[test]\n fn test_os_rng() {\n let mut r = OsRng::new().unwrap();\n\n r.next_u32();\n r.next_u64();\n\n let mut v = [0; 1000];\n r.fill_bytes(&mut v);\n }\n\n #[test]\n fn test_os_rng_tasks() {\n\n let mut txs = vec!();\n for _ in 0..20 {\n let (tx, rx) = channel();\n txs.push(tx);\n\n thread::spawn(move|| {\n \/\/ wait until all the tasks are ready to go.\n rx.recv().unwrap();\n\n \/\/ deschedule to attempt to interleave things as much\n \/\/ as possible (XXX: is this a good test?)\n let mut r = OsRng::new().unwrap();\n thread::yield_now();\n let mut v = [0; 1000];\n\n for _ in 0..100 {\n r.next_u32();\n thread::yield_now();\n r.next_u64();\n thread::yield_now();\n r.fill_bytes(&mut v);\n thread::yield_now();\n }\n });\n }\n\n \/\/ start all the tasks\n for tx in &txs {\n tx.send(()).unwrap();\n }\n }\n}\nAuto merge of #24526 - klutzy:getrandom-fix, r=alexcrichton\/\/ Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Interfaces to the operating system provided random number\n\/\/! generators.\n\npub use self::imp::OsRng;\n\n#[cfg(all(unix, not(target_os = \"ios\")))]\nmod imp {\n use prelude::v1::*;\n use self::OsRngInner::*;\n\n use fs::File;\n use io;\n use libc;\n use mem;\n use rand::Rng;\n use rand::reader::ReaderRng;\n use sys::os::errno;\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\")))]\n fn getrandom(buf: &mut [u8]) -> libc::c_long {\n extern \"C\" {\n fn syscall(number: libc::c_long, ...) -> libc::c_long;\n }\n\n #[cfg(target_arch = \"x86_64\")]\n const NR_GETRANDOM: libc::c_long = 318;\n #[cfg(target_arch = \"x86\")]\n const NR_GETRANDOM: libc::c_long = 355;\n #[cfg(any(target_arch = \"arm\", target_arch = \"aarch64\"))]\n const NR_GETRANDOM: libc::c_long = 384;\n #[cfg(target_arch = \"powerpc\")]\n const NR_GETRANDOM: libc::c_long = 384;\n\n unsafe {\n syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), 0)\n }\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\"))))]\n fn getrandom(_buf: &mut [u8]) -> libc::c_long { -1 }\n\n fn getrandom_fill_bytes(v: &mut [u8]) {\n let mut read = 0;\n let len = v.len();\n while read < len {\n let result = getrandom(&mut v[read..]);\n if result == -1 {\n let err = errno() as libc::c_int;\n if err == libc::EINTR {\n continue;\n } else {\n panic!(\"unexpected getrandom error: {}\", err);\n }\n } else {\n read += result as usize;\n }\n }\n }\n\n fn getrandom_next_u32() -> u32 {\n let mut buf: [u8; 4] = [0; 4];\n getrandom_fill_bytes(&mut buf);\n unsafe { mem::transmute::<[u8; 4], u32>(buf) }\n }\n\n fn getrandom_next_u64() -> u64 {\n let mut buf: [u8; 8] = [0; 8];\n getrandom_fill_bytes(&mut buf);\n unsafe { mem::transmute::<[u8; 8], u64>(buf) }\n }\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\")))]\n fn is_getrandom_available() -> bool {\n use sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering};\n use sync::{Once, ONCE_INIT};\n\n static CHECKER: Once = ONCE_INIT;\n static AVAILABLE: AtomicBool = ATOMIC_BOOL_INIT;\n\n CHECKER.call_once(|| {\n let mut buf: [u8; 0] = [];\n let result = getrandom(&mut buf);\n let available = if result == -1 {\n let err = io::Error::last_os_error().raw_os_error();\n err != Some(libc::ENOSYS)\n } else {\n true\n };\n AVAILABLE.store(available, Ordering::Relaxed);\n });\n\n AVAILABLE.load(Ordering::Relaxed)\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\"))))]\n fn is_getrandom_available() -> bool { false }\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n inner: OsRngInner,\n }\n\n enum OsRngInner {\n OsGetrandomRng,\n OsReaderRng(ReaderRng),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result {\n if is_getrandom_available() {\n return Ok(OsRng { inner: OsGetrandomRng });\n }\n\n let reader = try!(File::open(\"\/dev\/urandom\"));\n let reader_rng = ReaderRng::new(reader);\n\n Ok(OsRng { inner: OsReaderRng(reader_rng) })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n match self.inner {\n OsGetrandomRng => getrandom_next_u32(),\n OsReaderRng(ref mut rng) => rng.next_u32(),\n }\n }\n fn next_u64(&mut self) -> u64 {\n match self.inner {\n OsGetrandomRng => getrandom_next_u64(),\n OsReaderRng(ref mut rng) => rng.next_u64(),\n }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n match self.inner {\n OsGetrandomRng => getrandom_fill_bytes(v),\n OsReaderRng(ref mut rng) => rng.fill_bytes(v)\n }\n }\n }\n}\n\n#[cfg(target_os = \"ios\")]\nmod imp {\n use prelude::v1::*;\n\n use io;\n use mem;\n use rand::Rng;\n use libc::{c_int, size_t};\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n #[repr(C)]\n struct SecRandom;\n\n #[allow(non_upper_case_globals)]\n const kSecRandomDefault: *const SecRandom = 0 as *const SecRandom;\n\n #[link(name = \"Security\", kind = \"framework\")]\n extern \"C\" {\n fn SecRandomCopyBytes(rnd: *const SecRandom,\n count: size_t, bytes: *mut u8) -> c_int;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n let mut v = [0; 4];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn next_u64(&mut self) -> u64 {\n let mut v = [0; 8];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n SecRandomCopyBytes(kSecRandomDefault, v.len() as size_t,\n v.as_mut_ptr())\n };\n if ret == -1 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(windows)]\nmod imp {\n use prelude::v1::*;\n\n use io;\n use mem;\n use rand::Rng;\n use libc::types::os::arch::extra::{LONG_PTR};\n use libc::{DWORD, BYTE, LPCSTR, BOOL};\n\n type HCRYPTPROV = LONG_PTR;\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n hcryptprov: HCRYPTPROV\n }\n\n const PROV_RSA_FULL: DWORD = 1;\n const CRYPT_SILENT: DWORD = 64;\n const CRYPT_VERIFYCONTEXT: DWORD = 0xF0000000;\n\n #[allow(non_snake_case)]\n extern \"system\" {\n fn CryptAcquireContextA(phProv: *mut HCRYPTPROV,\n pszContainer: LPCSTR,\n pszProvider: LPCSTR,\n dwProvType: DWORD,\n dwFlags: DWORD) -> BOOL;\n fn CryptGenRandom(hProv: HCRYPTPROV,\n dwLen: DWORD,\n pbBuffer: *mut BYTE) -> BOOL;\n fn CryptReleaseContext(hProv: HCRYPTPROV, dwFlags: DWORD) -> BOOL;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result {\n let mut hcp = 0;\n let ret = unsafe {\n CryptAcquireContextA(&mut hcp, 0 as LPCSTR, 0 as LPCSTR,\n PROV_RSA_FULL,\n CRYPT_VERIFYCONTEXT | CRYPT_SILENT)\n };\n\n if ret == 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(OsRng { hcryptprov: hcp })\n }\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n let mut v = [0; 4];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn next_u64(&mut self) -> u64 {\n let mut v = [0; 8];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n CryptGenRandom(self.hcryptprov, v.len() as DWORD,\n v.as_mut_ptr())\n };\n if ret == 0 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n\n impl Drop for OsRng {\n fn drop(&mut self) {\n let ret = unsafe {\n CryptReleaseContext(self.hcryptprov, 0)\n };\n if ret == 0 {\n panic!(\"couldn't release context: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use prelude::v1::*;\n\n use sync::mpsc::channel;\n use rand::Rng;\n use super::OsRng;\n use thread;\n\n #[test]\n fn test_os_rng() {\n let mut r = OsRng::new().unwrap();\n\n r.next_u32();\n r.next_u64();\n\n let mut v = [0; 1000];\n r.fill_bytes(&mut v);\n }\n\n #[test]\n fn test_os_rng_tasks() {\n\n let mut txs = vec!();\n for _ in 0..20 {\n let (tx, rx) = channel();\n txs.push(tx);\n\n thread::spawn(move|| {\n \/\/ wait until all the tasks are ready to go.\n rx.recv().unwrap();\n\n \/\/ deschedule to attempt to interleave things as much\n \/\/ as possible (XXX: is this a good test?)\n let mut r = OsRng::new().unwrap();\n thread::yield_now();\n let mut v = [0; 1000];\n\n for _ in 0..100 {\n r.next_u32();\n thread::yield_now();\n r.next_u64();\n thread::yield_now();\n r.fill_bytes(&mut v);\n thread::yield_now();\n }\n });\n }\n\n \/\/ start all the tasks\n for tx in &txs {\n tx.send(()).unwrap();\n }\n }\n}\n<|endoftext|>"} {"text":"\/*!\n * A key,value store that works on anything.\n *\n * This works using a binary search tree. In the first version, it's a\n * very naive algorithm, but it will probably be updated to be a\n * red-black tree or something else.\n *\/\n#[forbid(deprecated_mode)];\n\nuse core::cmp::{Eq, Ord};\nuse core::option::{Some, None};\nuse Option = core::Option;\n\npub type TreeMap = @mut TreeEdge;\n\ntype TreeEdge = Option<@TreeNode>;\n\nstruct TreeNode {\n key: K,\n mut value: V,\n mut left: TreeEdge,\n mut right: TreeEdge\n}\n\n\/\/\/ Create a treemap\npub fn TreeMap() -> TreeMap { @mut None }\n\n\/\/\/ Insert a value into the map\npub fn insert(m: &mut TreeEdge, k: K, v: V) {\n match copy *m {\n None => {\n *m = Some(@TreeNode {key: k,\n mut value: v,\n mut left: None,\n mut right: None});\n return;\n }\n Some(node) => {\n if k == node.key {\n node.value = v;\n } else if k < node.key {\n insert(&mut node.left, k, v);\n } else {\n insert(&mut node.right, k, v);\n }\n }\n };\n}\n\n\/\/\/ Find a value based on the key\npub fn find(m: &const TreeEdge, k: K)\n -> Option {\n match copy *m {\n None => None,\n\n \/\/ FIXME (#2808): was that an optimization?\n Some(node) => {\n if k == node.key {\n Some(node.value)\n } else if k < node.key {\n find(&const node.left, k)\n } else {\n find(&const node.right, k)\n }\n }\n }\n}\n\n\/\/\/ Visit all pairs in the map in order.\npub fn traverse(m: &const TreeEdge, \n f: fn((&K), (&V))) {\n match copy *m {\n None => (),\n Some(node) => {\n traverse(&const node.left, f);\n \/\/ copy of value is req'd as f() requires an immutable ptr\n f(&node.key, © node.value);\n traverse(&const node.right, f);\n }\n }\n}\n\n\/\/\/ Compare two treemaps and return true iff \n\/\/\/ they contain same keys and values\npub fn equals(t1: &const TreeEdge,\n t2: &const TreeEdge) \n -> bool {\n let mut v1 = ~[];\n let mut v2 = ~[];\n traverse(t1, |k,v| { v1.push((copy *k, copy *v)) });\n traverse(t2, |k,v| { v2.push((copy *k, copy *v)) });\n return v1 == v2;\n}\n\n\n#[cfg(test)]\nmod tests {\n #[legacy_exports];\n\n #[test]\n fn init_treemap() { let _m = TreeMap::(); }\n\n #[test]\n fn insert_one() { let m = TreeMap(); insert(m, 1, 2); }\n\n #[test]\n fn insert_two() { let m = TreeMap(); insert(m, 1, 2); insert(m, 3, 4); }\n\n #[test]\n fn insert_find() {\n let m = TreeMap();\n insert(m, 1, 2);\n assert (find(m, 1) == Some(2));\n }\n\n #[test]\n fn find_empty() {\n let m = TreeMap::(); assert (find(m, 1) == None);\n }\n\n #[test]\n fn find_not_found() {\n let m = TreeMap();\n insert(m, 1, 2);\n assert (find(m, 2) == None);\n }\n\n #[test]\n fn traverse_in_order() {\n let m = TreeMap();\n insert(m, 3, ());\n insert(m, 0, ());\n insert(m, 4, ());\n insert(m, 2, ());\n insert(m, 1, ());\n\n let n = @mut 0;\n fn t(n: @mut int, k: int, _v: ()) {\n assert (*n == k); *n += 1;\n }\n traverse(m, |x,y| t(n, *x, *y));\n }\n\n #[test]\n fn equality() {\n let m1 = TreeMap();\n insert(m1, 3, ());\n insert(m1, 0, ());\n insert(m1, 4, ());\n insert(m1, 2, ());\n insert(m1, 1, ());\n let m2 = TreeMap();\n insert(m2, 2, ());\n insert(m2, 1, ());\n insert(m2, 3, ());\n insert(m2, 0, ());\n insert(m2, 4, ());\n\n assert equals(m1, m2);\n\n let m3 = TreeMap();\n assert !equals(m1,m3);\n\n }\n\n #[test]\n fn u8_map() {\n let m = TreeMap();\n\n let k1 = str::to_bytes(~\"foo\");\n let k2 = str::to_bytes(~\"bar\");\n\n insert(m, k1, ~\"foo\");\n insert(m, k2, ~\"bar\");\n\n assert (find(m, k2) == Some(~\"bar\"));\n assert (find(m, k1) == Some(~\"foo\"));\n }\n}\nFix whitespace\/*!\n * A key,value store that works on anything.\n *\n * This works using a binary search tree. In the first version, it's a\n * very naive algorithm, but it will probably be updated to be a\n * red-black tree or something else.\n *\/\n#[forbid(deprecated_mode)];\n\nuse core::cmp::{Eq, Ord};\nuse core::option::{Some, None};\nuse Option = core::Option;\n\npub type TreeMap = @mut TreeEdge;\n\ntype TreeEdge = Option<@TreeNode>;\n\nstruct TreeNode {\n key: K,\n mut value: V,\n mut left: TreeEdge,\n mut right: TreeEdge\n}\n\n\/\/\/ Create a treemap\npub fn TreeMap() -> TreeMap { @mut None }\n\n\/\/\/ Insert a value into the map\npub fn insert(m: &mut TreeEdge, k: K, v: V) {\n match copy *m {\n None => {\n *m = Some(@TreeNode {key: k,\n mut value: v,\n mut left: None,\n mut right: None});\n return;\n }\n Some(node) => {\n if k == node.key {\n node.value = v;\n } else if k < node.key {\n insert(&mut node.left, k, v);\n } else {\n insert(&mut node.right, k, v);\n }\n }\n };\n}\n\n\/\/\/ Find a value based on the key\npub fn find(m: &const TreeEdge, k: K)\n -> Option {\n match copy *m {\n None => None,\n\n \/\/ FIXME (#2808): was that an optimization?\n Some(node) => {\n if k == node.key {\n Some(node.value)\n } else if k < node.key {\n find(&const node.left, k)\n } else {\n find(&const node.right, k)\n }\n }\n }\n}\n\n\/\/\/ Visit all pairs in the map in order.\npub fn traverse(m: &const TreeEdge,\n f: fn((&K), (&V))) {\n match copy *m {\n None => (),\n Some(node) => {\n traverse(&const node.left, f);\n \/\/ copy of value is req'd as f() requires an immutable ptr\n f(&node.key, © node.value);\n traverse(&const node.right, f);\n }\n }\n}\n\n\/\/\/ Compare two treemaps and return true iff\n\/\/\/ they contain same keys and values\npub fn equals(t1: &const TreeEdge,\n t2: &const TreeEdge)\n -> bool {\n let mut v1 = ~[];\n let mut v2 = ~[];\n traverse(t1, |k,v| { v1.push((copy *k, copy *v)) });\n traverse(t2, |k,v| { v2.push((copy *k, copy *v)) });\n return v1 == v2;\n}\n\n\n#[cfg(test)]\nmod tests {\n #[legacy_exports];\n\n #[test]\n fn init_treemap() { let _m = TreeMap::(); }\n\n #[test]\n fn insert_one() { let m = TreeMap(); insert(m, 1, 2); }\n\n #[test]\n fn insert_two() { let m = TreeMap(); insert(m, 1, 2); insert(m, 3, 4); }\n\n #[test]\n fn insert_find() {\n let m = TreeMap();\n insert(m, 1, 2);\n assert (find(m, 1) == Some(2));\n }\n\n #[test]\n fn find_empty() {\n let m = TreeMap::(); assert (find(m, 1) == None);\n }\n\n #[test]\n fn find_not_found() {\n let m = TreeMap();\n insert(m, 1, 2);\n assert (find(m, 2) == None);\n }\n\n #[test]\n fn traverse_in_order() {\n let m = TreeMap();\n insert(m, 3, ());\n insert(m, 0, ());\n insert(m, 4, ());\n insert(m, 2, ());\n insert(m, 1, ());\n\n let n = @mut 0;\n fn t(n: @mut int, k: int, _v: ()) {\n assert (*n == k); *n += 1;\n }\n traverse(m, |x,y| t(n, *x, *y));\n }\n\n #[test]\n fn equality() {\n let m1 = TreeMap();\n insert(m1, 3, ());\n insert(m1, 0, ());\n insert(m1, 4, ());\n insert(m1, 2, ());\n insert(m1, 1, ());\n let m2 = TreeMap();\n insert(m2, 2, ());\n insert(m2, 1, ());\n insert(m2, 3, ());\n insert(m2, 0, ());\n insert(m2, 4, ());\n\n assert equals(m1, m2);\n\n let m3 = TreeMap();\n assert !equals(m1,m3);\n\n }\n\n #[test]\n fn u8_map() {\n let m = TreeMap();\n\n let k1 = str::to_bytes(~\"foo\");\n let k2 = str::to_bytes(~\"bar\");\n\n insert(m, k1, ~\"foo\");\n insert(m, k2, ~\"bar\");\n\n assert (find(m, k2) == Some(~\"bar\"));\n assert (find(m, k1) == Some(~\"foo\"));\n }\n}\n<|endoftext|>"} {"text":"Fixed build on non-x86 architectures<|endoftext|>"} {"text":"add account mod<|endoftext|>"} {"text":"Add missing source fileuse aabb::{AABB, Bounded};\nuse std::boxed::Box;\nuse std::f32;\nuse std::iter::repeat;\n\nenum BVHNode {\n Leaf { aabb: AABB, shapes: Vec },\n Node {\n aabb: AABB,\n init: Box,\n tail: Box,\n },\n}\n\nimpl BVHNode {\n pub fn new(shapes: &[T], indices: Vec) -> BVHNode {\n let (aabb_bounds, centroid_bounds) =\n indices.iter().fold((AABB::empty(), AABB::empty()),\n |(aabb_bounds, centroid_bounds), idx| {\n let aabb = &shapes[*idx].aabb();\n (aabb_bounds.union_aabb(aabb),\n centroid_bounds.union_point(&aabb.center()))\n });\n\n if indices.len() < 5 {\n return BVHNode::Leaf {\n aabb: aabb_bounds,\n shapes: indices,\n };\n }\n\n \/\/ Find the axis along which the shapes are spread the most\n let split_axis = centroid_bounds.largest_axis();\n let split_axis_size = centroid_bounds.max[split_axis] - centroid_bounds.min[split_axis];\n\n \/\/\/ Defines a Bucket utility object\n #[derive(Copy, Clone)]\n struct Bucket {\n size: usize,\n aabb: AABB,\n }\n\n impl Bucket {\n \/\/\/ Returns an empty bucket\n fn empty() -> Bucket {\n Bucket {\n size: 0,\n aabb: AABB::empty(),\n }\n }\n\n \/\/\/ Extends this `Bucket` by the given `AABB`.\n fn add_aabb(&mut self, aabb: &AABB) {\n self.size += 1;\n self.aabb = self.aabb.union_aabb(aabb);\n }\n }\n\n \/\/\/ Returns the union of two `Bucket`s.\n fn bucket_union(a: Bucket, b: &Bucket) -> Bucket {\n Bucket {\n size: a.size + b.size,\n aabb: a.aabb.union_aabb(&b.aabb),\n }\n }\n\n \/\/ Create twelve buckets, and twelve index assignment vectors\n let mut buckets = [Bucket::empty(); 12];\n let mut bucket_assignments: [Vec; 12] = Default::default();\n\n \/\/ Iterate through all shapes\n for idx in &indices {\n let shape = &shapes[*idx];\n let shape_aabb = shape.aabb();\n let shape_center = shape_aabb.center();\n\n \/\/ Get the relative position of the shape centroid [0.0..1.0]\n let bucket_num_relative = (shape_center[split_axis] - centroid_bounds.min[split_axis]) \/\n split_axis_size;\n\n \/\/ Convert that to the actual `Bucket` number\n let bucket_num = (bucket_num_relative * 11.99) as usize;\n\n \/\/ Extend the selected `Bucket` and add the index to the actual bucket\n buckets[bucket_num].add_aabb(&shape_aabb);\n bucket_assignments[bucket_num].push(*idx);\n }\n\n \/\/ Compute the costs for each configuration\n let costs = (0..11).map(|i| {\n let init = buckets.iter().take(i + 1).fold(Bucket::empty(), bucket_union);\n let tail = buckets.iter().skip(i + 1).fold(Bucket::empty(), bucket_union);\n\n 0.125 +\n (init.size as f32 * init.aabb.surface_area() +\n tail.size as f32 * tail.aabb.surface_area()) \/ aabb_bounds.surface_area()\n });\n\n \/\/ Select the configuration with the minimal costs\n let (min_bucket, _) = costs.enumerate().fold((0, f32::INFINITY), |(min_bucket, min_cost),\n (bucket_num, bucket_cost)| {\n if bucket_cost < min_cost {\n (bucket_num, bucket_cost)\n } else {\n (min_bucket, min_cost)\n }\n });\n\n \/\/ Join together all index buckets, and proceed recursively\n let mut init_indices = Vec::new();\n for mut indices in bucket_assignments.iter_mut().take(min_bucket + 1) {\n init_indices.append(&mut indices);\n }\n let mut tail_indices = Vec::new();\n for mut indices in bucket_assignments.iter_mut().skip(min_bucket + 1) {\n tail_indices.append(&mut indices);\n }\n\n \/\/ Construct the actual data structure\n BVHNode::Node {\n aabb: aabb_bounds,\n init: Box::new(BVHNode::new(shapes, init_indices)),\n tail: Box::new(BVHNode::new(shapes, tail_indices)),\n }\n }\n\n fn print(&self, depth: usize) {\n let padding: String = repeat(\" \").take(depth).collect();\n match *self {\n BVHNode::Node { ref aabb, ref init, ref tail } => {\n println!(\"{}AABB\\t{:?}\", padding, aabb);\n println!(\"{}init\", padding);\n init.print(depth + 1);\n println!(\"{}tail\", padding);\n tail.print(depth + 1);\n }\n BVHNode::Leaf { ref aabb, ref shapes } => {\n println!(\"{}AABB\\t{:?}\", padding, aabb);\n println!(\"{}shapes\\t{:?}\", padding, shapes);\n }\n }\n }\n}\n\npub struct BVH {\n shapes: Vec,\n root: BVHNode,\n}\n\nimpl BVH {\n pub fn new(shapes: Vec) -> BVH {\n let indices = (0..shapes.len()).collect::>();\n let root = BVHNode::new(&shapes, indices);\n BVH {\n shapes: shapes,\n root: root,\n }\n }\n\n pub fn print(&self) {\n self.root.print(0);\n }\n}\n<|endoftext|>"} {"text":"Change API some more, cookie setting can now happen from non-HTTP API.<|endoftext|>"} {"text":"rand range<|endoftext|>"} {"text":"\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_camel_case_types)]\n\n\/\/ The crate store - a central repo for information collected about external\n\/\/ crates and libraries\n\nuse back::svh::Svh;\nuse metadata::decoder;\nuse metadata::loader;\n\nuse std::cell::RefCell;\nuse std::c_vec::CVec;\nuse std::rc::Rc;\nuse collections::HashMap;\nuse syntax::ast;\nuse syntax::parse::token::IdentInterner;\nuse syntax::crateid::CrateId;\n\n\/\/ A map from external crate numbers (as decoded from some crate file) to\n\/\/ local crate numbers (as generated during this session). Each external\n\/\/ crate may refer to types in other external crates, and each has their\n\/\/ own crate numbers.\npub type cnum_map = @RefCell>;\n\npub enum MetadataBlob {\n MetadataVec(CVec),\n MetadataArchive(loader::ArchiveMetadata),\n}\n\npub struct crate_metadata {\n pub name: ~str,\n pub data: MetadataBlob,\n pub cnum_map: cnum_map,\n pub cnum: ast::CrateNum,\n}\n\n#[deriving(Eq)]\npub enum LinkagePreference {\n RequireDynamic,\n RequireStatic,\n}\n\n#[deriving(Eq, FromPrimitive)]\npub enum NativeLibaryKind {\n NativeStatic, \/\/ native static library (.a archive)\n NativeFramework, \/\/ OSX-specific\n NativeUnknown, \/\/ default way to specify a dynamic library\n}\n\n\/\/ Where a crate came from on the local filesystem. One of these two options\n\/\/ must be non-None.\n#[deriving(Eq, Clone)]\npub struct CrateSource {\n pub dylib: Option,\n pub rlib: Option,\n pub cnum: ast::CrateNum,\n}\n\npub struct CStore {\n metas: RefCell>,\n extern_mod_crate_map: RefCell,\n used_crate_sources: RefCell>,\n used_libraries: RefCell>,\n used_link_args: RefCell>,\n pub intr: Rc,\n}\n\n\/\/ Map from NodeId's of local extern crate statements to crate numbers\ntype extern_mod_crate_map = HashMap;\n\nimpl CStore {\n pub fn new(intr: Rc) -> CStore {\n CStore {\n metas: RefCell::new(HashMap::new()),\n extern_mod_crate_map: RefCell::new(HashMap::new()),\n used_crate_sources: RefCell::new(Vec::new()),\n used_libraries: RefCell::new(Vec::new()),\n used_link_args: RefCell::new(Vec::new()),\n intr: intr\n }\n }\n\n pub fn get_crate_data(&self, cnum: ast::CrateNum) -> @crate_metadata {\n *self.metas.borrow().get(&cnum)\n }\n\n pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> Svh {\n let cdata = self.get_crate_data(cnum);\n decoder::get_crate_hash(cdata.data())\n }\n\n pub fn get_crate_id(&self, cnum: ast::CrateNum) -> CrateId {\n let cdata = self.get_crate_data(cnum);\n decoder::get_crate_id(cdata.data())\n }\n\n pub fn set_crate_data(&self, cnum: ast::CrateNum, data: @crate_metadata) {\n self.metas.borrow_mut().insert(cnum, data);\n }\n\n pub fn have_crate_data(&self, cnum: ast::CrateNum) -> bool {\n self.metas.borrow().contains_key(&cnum)\n }\n\n pub fn iter_crate_data(&self, i: |ast::CrateNum, @crate_metadata|) {\n for (&k, &v) in self.metas.borrow().iter() {\n i(k, v);\n }\n }\n\n pub fn add_used_crate_source(&self, src: CrateSource) {\n let mut used_crate_sources = self.used_crate_sources.borrow_mut();\n if !used_crate_sources.contains(&src) {\n used_crate_sources.push(src);\n }\n }\n\n pub fn get_used_crate_source(&self, cnum: ast::CrateNum)\n -> Option {\n self.used_crate_sources.borrow_mut()\n .iter().find(|source| source.cnum == cnum)\n .map(|source| source.clone())\n }\n\n pub fn reset(&self) {\n self.metas.borrow_mut().clear();\n self.extern_mod_crate_map.borrow_mut().clear();\n self.used_crate_sources.borrow_mut().clear();\n self.used_libraries.borrow_mut().clear();\n self.used_link_args.borrow_mut().clear();\n }\n\n \/\/ This method is used when generating the command line to pass through to\n \/\/ system linker. The linker expects undefined symbols on the left of the\n \/\/ command line to be defined in libraries on the right, not the other way\n \/\/ around. For more info, see some comments in the add_used_library function\n \/\/ below.\n \/\/\n \/\/ In order to get this left-to-right dependency ordering, we perform a\n \/\/ topological sort of all crates putting the leaves at the right-most\n \/\/ positions.\n pub fn get_used_crates(&self, prefer: LinkagePreference)\n -> Vec<(ast::CrateNum, Option)> {\n let mut ordering = Vec::new();\n fn visit(cstore: &CStore, cnum: ast::CrateNum,\n ordering: &mut Vec) {\n if ordering.as_slice().contains(&cnum) { return }\n let meta = cstore.get_crate_data(cnum);\n for (_, &dep) in meta.cnum_map.borrow().iter() {\n visit(cstore, dep, ordering);\n }\n ordering.push(cnum);\n };\n for (&num, _) in self.metas.borrow().iter() {\n visit(self, num, &mut ordering);\n }\n ordering.as_mut_slice().reverse();\n let ordering = ordering.as_slice();\n let mut libs = self.used_crate_sources.borrow()\n .iter()\n .map(|src| (src.cnum, match prefer {\n RequireDynamic => src.dylib.clone(),\n RequireStatic => src.rlib.clone(),\n }))\n .collect::)>>();\n libs.sort_by(|&(a, _), &(b, _)| {\n ordering.position_elem(&a).cmp(&ordering.position_elem(&b))\n });\n libs\n }\n\n pub fn add_used_library(&self, lib: ~str, kind: NativeLibaryKind) {\n assert!(!lib.is_empty());\n self.used_libraries.borrow_mut().push((lib, kind));\n }\n\n pub fn get_used_libraries<'a>(&'a self)\n -> &'a RefCell > {\n &self.used_libraries\n }\n\n pub fn add_used_link_args(&self, args: &str) {\n for s in args.split(' ') {\n self.used_link_args.borrow_mut().push(s.to_owned());\n }\n }\n\n pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell > {\n &self.used_link_args\n }\n\n pub fn add_extern_mod_stmt_cnum(&self,\n emod_id: ast::NodeId,\n cnum: ast::CrateNum) {\n self.extern_mod_crate_map.borrow_mut().insert(emod_id, cnum);\n }\n\n pub fn find_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId)\n -> Option {\n self.extern_mod_crate_map.borrow().find(&emod_id).map(|x| *x)\n }\n}\n\nimpl crate_metadata {\n pub fn data<'a>(&'a self) -> &'a [u8] { self.data.as_slice() }\n}\n\nimpl MetadataBlob {\n pub fn as_slice<'a>(&'a self) -> &'a [u8] {\n match *self {\n MetadataVec(ref vec) => vec.as_slice(),\n MetadataArchive(ref ar) => ar.as_slice(),\n }\n }\n}\nmetadata: cstore: remove dead code\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_camel_case_types)]\n\n\/\/ The crate store - a central repo for information collected about external\n\/\/ crates and libraries\n\nuse back::svh::Svh;\nuse metadata::decoder;\nuse metadata::loader;\n\nuse std::cell::RefCell;\nuse std::c_vec::CVec;\nuse std::rc::Rc;\nuse collections::HashMap;\nuse syntax::ast;\nuse syntax::parse::token::IdentInterner;\n\n\/\/ A map from external crate numbers (as decoded from some crate file) to\n\/\/ local crate numbers (as generated during this session). Each external\n\/\/ crate may refer to types in other external crates, and each has their\n\/\/ own crate numbers.\npub type cnum_map = @RefCell>;\n\npub enum MetadataBlob {\n MetadataVec(CVec),\n MetadataArchive(loader::ArchiveMetadata),\n}\n\npub struct crate_metadata {\n pub name: ~str,\n pub data: MetadataBlob,\n pub cnum_map: cnum_map,\n pub cnum: ast::CrateNum,\n}\n\n#[deriving(Eq)]\npub enum LinkagePreference {\n RequireDynamic,\n RequireStatic,\n}\n\n#[deriving(Eq, FromPrimitive)]\npub enum NativeLibaryKind {\n NativeStatic, \/\/ native static library (.a archive)\n NativeFramework, \/\/ OSX-specific\n NativeUnknown, \/\/ default way to specify a dynamic library\n}\n\n\/\/ Where a crate came from on the local filesystem. One of these two options\n\/\/ must be non-None.\n#[deriving(Eq, Clone)]\npub struct CrateSource {\n pub dylib: Option,\n pub rlib: Option,\n pub cnum: ast::CrateNum,\n}\n\npub struct CStore {\n metas: RefCell>,\n extern_mod_crate_map: RefCell,\n used_crate_sources: RefCell>,\n used_libraries: RefCell>,\n used_link_args: RefCell>,\n pub intr: Rc,\n}\n\n\/\/ Map from NodeId's of local extern crate statements to crate numbers\ntype extern_mod_crate_map = HashMap;\n\nimpl CStore {\n pub fn new(intr: Rc) -> CStore {\n CStore {\n metas: RefCell::new(HashMap::new()),\n extern_mod_crate_map: RefCell::new(HashMap::new()),\n used_crate_sources: RefCell::new(Vec::new()),\n used_libraries: RefCell::new(Vec::new()),\n used_link_args: RefCell::new(Vec::new()),\n intr: intr\n }\n }\n\n pub fn get_crate_data(&self, cnum: ast::CrateNum) -> @crate_metadata {\n *self.metas.borrow().get(&cnum)\n }\n\n pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> Svh {\n let cdata = self.get_crate_data(cnum);\n decoder::get_crate_hash(cdata.data())\n }\n\n pub fn set_crate_data(&self, cnum: ast::CrateNum, data: @crate_metadata) {\n self.metas.borrow_mut().insert(cnum, data);\n }\n\n pub fn iter_crate_data(&self, i: |ast::CrateNum, @crate_metadata|) {\n for (&k, &v) in self.metas.borrow().iter() {\n i(k, v);\n }\n }\n\n pub fn add_used_crate_source(&self, src: CrateSource) {\n let mut used_crate_sources = self.used_crate_sources.borrow_mut();\n if !used_crate_sources.contains(&src) {\n used_crate_sources.push(src);\n }\n }\n\n pub fn get_used_crate_source(&self, cnum: ast::CrateNum)\n -> Option {\n self.used_crate_sources.borrow_mut()\n .iter().find(|source| source.cnum == cnum)\n .map(|source| source.clone())\n }\n\n pub fn reset(&self) {\n self.metas.borrow_mut().clear();\n self.extern_mod_crate_map.borrow_mut().clear();\n self.used_crate_sources.borrow_mut().clear();\n self.used_libraries.borrow_mut().clear();\n self.used_link_args.borrow_mut().clear();\n }\n\n \/\/ This method is used when generating the command line to pass through to\n \/\/ system linker. The linker expects undefined symbols on the left of the\n \/\/ command line to be defined in libraries on the right, not the other way\n \/\/ around. For more info, see some comments in the add_used_library function\n \/\/ below.\n \/\/\n \/\/ In order to get this left-to-right dependency ordering, we perform a\n \/\/ topological sort of all crates putting the leaves at the right-most\n \/\/ positions.\n pub fn get_used_crates(&self, prefer: LinkagePreference)\n -> Vec<(ast::CrateNum, Option)> {\n let mut ordering = Vec::new();\n fn visit(cstore: &CStore, cnum: ast::CrateNum,\n ordering: &mut Vec) {\n if ordering.as_slice().contains(&cnum) { return }\n let meta = cstore.get_crate_data(cnum);\n for (_, &dep) in meta.cnum_map.borrow().iter() {\n visit(cstore, dep, ordering);\n }\n ordering.push(cnum);\n };\n for (&num, _) in self.metas.borrow().iter() {\n visit(self, num, &mut ordering);\n }\n ordering.as_mut_slice().reverse();\n let ordering = ordering.as_slice();\n let mut libs = self.used_crate_sources.borrow()\n .iter()\n .map(|src| (src.cnum, match prefer {\n RequireDynamic => src.dylib.clone(),\n RequireStatic => src.rlib.clone(),\n }))\n .collect::)>>();\n libs.sort_by(|&(a, _), &(b, _)| {\n ordering.position_elem(&a).cmp(&ordering.position_elem(&b))\n });\n libs\n }\n\n pub fn add_used_library(&self, lib: ~str, kind: NativeLibaryKind) {\n assert!(!lib.is_empty());\n self.used_libraries.borrow_mut().push((lib, kind));\n }\n\n pub fn get_used_libraries<'a>(&'a self)\n -> &'a RefCell > {\n &self.used_libraries\n }\n\n pub fn add_used_link_args(&self, args: &str) {\n for s in args.split(' ') {\n self.used_link_args.borrow_mut().push(s.to_owned());\n }\n }\n\n pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell > {\n &self.used_link_args\n }\n\n pub fn add_extern_mod_stmt_cnum(&self,\n emod_id: ast::NodeId,\n cnum: ast::CrateNum) {\n self.extern_mod_crate_map.borrow_mut().insert(emod_id, cnum);\n }\n\n pub fn find_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId)\n -> Option {\n self.extern_mod_crate_map.borrow().find(&emod_id).map(|x| *x)\n }\n}\n\nimpl crate_metadata {\n pub fn data<'a>(&'a self) -> &'a [u8] { self.data.as_slice() }\n}\n\nimpl MetadataBlob {\n pub fn as_slice<'a>(&'a self) -> &'a [u8] {\n match *self {\n MetadataVec(ref vec) => vec.as_slice(),\n MetadataArchive(ref ar) => ar.as_slice(),\n }\n }\n}\n<|endoftext|>"} {"text":"Solution for the LZW compression algorithm\/\/ Implements http:\/\/rosettacode.org\/wiki\/LZW_compression\n\nuse std::hashmap::HashMap;\nuse std::vec;\nuse std::str;\n\nfn compress(original_str: &str) -> ~[int] {\n let original = original_str.as_bytes();\n let mut dict_size = 256;\n let mut dictionary = HashMap::new();\n \n for i in range(0, dict_size) {\n dictionary.insert(~[i as u8], i);\n }\n\n let mut result = ~[];\n let mut w:~[u8] = ~[];\n for &c in original.iter() {\n let mut wc = w.clone();\n wc.push(c);\n\n match dictionary.find(&wc) {\n Some(_) => w = wc,\n None => {\n result.push(*dictionary.get(&w));\n dictionary.insert(wc, dict_size);\n dict_size += 1;\n w = ~[c];\n }\n }\n }\n\n if w.len() > 0 {\n result.push(*dictionary.get(&w));\n }\n\n result\n}\n\nfn decompress(compressed: &[int]) -> ~str {\n let mut dict_size = 256;\n let mut dictionary = HashMap::new();\n \n for i in range(0, dict_size) {\n dictionary.insert(i, ~[i as u8]);\n }\n\n let mut w = ~[compressed[0] as u8];\n let compressed = compressed.slice(1, compressed.len());\n let mut result = w.clone();\n for &k in compressed.iter() {\n let entry = \n match dictionary.find(&k) {\n Some(v) => v.clone(),\n None if k == dict_size => vec::append_one(w.clone(), w[0]),\n None => fail!(\"Invalid compressed string\")\n };\n \n result = vec::append(result, entry);\n dictionary.insert(dict_size, vec::append_one(w, entry[0]));\n dict_size += 1;\n w = entry; \n }\n\n str::from_utf8(result)\n}\n\nfn main() {\n let original = \"TOBEORNOTTOBEORTOBEORNOT\";\n let compressed = compress(original);\n \n for &v in compressed.iter() {\n print!(\"{} \", v);\n }\n\n let decompressed = decompress(compressed);\n println(decompressed);\n\n \/\/ Check if the decompressed string corresponds to the original string\n assert!(original == decompressed);\n}\n<|endoftext|>"} {"text":"use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Error as FmtError;\n\n#[derive(Debug, PartialEq, Clone, Copy)]\npub enum RuntimeErrorKind {\n Instantiate,\n IOError,\n ProcessExitFailure,\n\n \/\/ more?\n}\n\n#[derive(Debug)]\npub struct RuntimeError {\n kind: RuntimeErrorKind,\n cause: Option>,\n}\n\nimpl RuntimeError {\n\n pub fn new(kind: RuntimeErrorKind, cause: Option>) -> RuntimeError {\n RuntimeError {\n kind: kind,\n cause: cause,\n }\n }\n\n}\n\nfn runtime_error_kind_as_str(e: &RuntimeErrorKind) -> &'static str {\n match e {\n &RuntimeErrorKind::Instantiate => \"Could not instantiate\",\n &RuntimeErrorKind::IOError => \"IO Error\",\n &RuntimeErrorKind::ProcessExitFailure => \"Process exited with failure\",\n }\n}\n\nimpl Display for RuntimeError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"{}\", runtime_error_kind_as_str(&self.kind)));\n Ok(())\n }\n\n}\n\nimpl Error for RuntimeError {\n\n fn description(&self) -> &str {\n runtime_error_kind_as_str(&self.kind)\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n}\n\nImpl From for RuntimeErroruse std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Error as FmtError;\nuse std::io::Error as IOError;\n\n#[derive(Debug, PartialEq, Clone, Copy)]\npub enum RuntimeErrorKind {\n Instantiate,\n IOError,\n ProcessExitFailure,\n\n \/\/ more?\n}\n\n#[derive(Debug)]\npub struct RuntimeError {\n kind: RuntimeErrorKind,\n cause: Option>,\n}\n\nimpl RuntimeError {\n\n pub fn new(kind: RuntimeErrorKind, cause: Option>) -> RuntimeError {\n RuntimeError {\n kind: kind,\n cause: cause,\n }\n }\n\n}\n\nfn runtime_error_kind_as_str(e: &RuntimeErrorKind) -> &'static str {\n match e {\n &RuntimeErrorKind::Instantiate => \"Could not instantiate\",\n &RuntimeErrorKind::IOError => \"IO Error\",\n &RuntimeErrorKind::ProcessExitFailure => \"Process exited with failure\",\n }\n}\n\nimpl Display for RuntimeError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"{}\", runtime_error_kind_as_str(&self.kind)));\n Ok(())\n }\n\n}\n\nimpl Error for RuntimeError {\n\n fn description(&self) -> &str {\n runtime_error_kind_as_str(&self.kind)\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl From for RuntimeError {\n\n fn from(ioe: IOError) -> RuntimeError {\n RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(ioe)))\n }\n\n}\n\n<|endoftext|>"} {"text":"Added binary to decimal converter.fn to_binary(num :int) {\n println!(\"{:t}\", num)\n}\n\nfn to_decimal(num :int) {\n println!(\"{}\", num);\n}\n\nfn main() {\n to_binary(200);\n to_decimal(0b11001000);\n}\n<|endoftext|>"} {"text":"test drop order for parameters when a future is dropped part-way through execution\/\/ aux-build:arc_wake.rs\n\/\/ edition:2018\n\/\/ run-pass\n\n#![allow(unused_variables)]\n#![feature(async_await)]\n\n\/\/ Test that the drop order for parameters in a fn and async fn matches up. Also test that\n\/\/ parameters (used or unused) are not dropped until the async fn is cancelled.\n\/\/ This file is mostly copy-pasted from drop-order-for-async-fn-parameters.rs\n\nextern crate arc_wake;\n\nuse arc_wake::ArcWake;\nuse std::cell::RefCell;\nuse std::future::Future;\nuse std::marker::PhantomData;\nuse std::pin::Pin;\nuse std::rc::Rc;\nuse std::sync::Arc;\nuse std::task::{Context, Poll};\n\nstruct EmptyWaker;\n\nimpl ArcWake for EmptyWaker {\n fn wake(self: Arc) {}\n}\n\n#[derive(Debug, Eq, PartialEq)]\nenum DropOrder {\n Function,\n Val(&'static str),\n}\n\ntype DropOrderListPtr = Rc>>;\n\nstruct D(&'static str, DropOrderListPtr);\n\nimpl Drop for D {\n fn drop(&mut self) {\n self.1.borrow_mut().push(DropOrder::Val(self.0));\n }\n}\n\nstruct NeverReady;\n\nimpl Future for NeverReady {\n type Output = ();\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll {\n Poll::Pending\n }\n}\n\n\/\/\/ Check that unused bindings are dropped after the function is polled.\nasync fn foo_async(x: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n}\n\nfn foo_sync(x: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n}\n\n\/\/\/ Check that underscore patterns are dropped after the function is polled.\nasync fn bar_async(x: D, _: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n}\n\nfn bar_sync(x: D, _: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n}\n\n\/\/\/ Check that underscore patterns within more complex patterns are dropped after the function\n\/\/\/ is polled.\nasync fn baz_async((x, _): (D, D)) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n}\n\nfn baz_sync((x, _): (D, D)) {\n x.1.borrow_mut().push(DropOrder::Function);\n}\n\n\/\/\/ Check that underscore and unused bindings within and outwith more complex patterns are dropped\n\/\/\/ after the function is polled.\nasync fn foobar_async(x: D, (a, _, _c): (D, D, D), _: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n}\n\nfn foobar_sync(x: D, (a, _, _c): (D, D, D), _: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n}\n\nstruct Foo;\n\nimpl Foo {\n \/\/\/ Check that unused bindings are dropped after the method is polled.\n async fn foo_async(x: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n }\n\n fn foo_sync(x: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n }\n\n \/\/\/ Check that underscore patterns are dropped after the method is polled.\n async fn bar_async(x: D, _: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n }\n\n fn bar_sync(x: D, _: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n }\n\n \/\/\/ Check that underscore patterns within more complex patterns are dropped after the method\n \/\/\/ is polled.\n async fn baz_async((x, _): (D, D)) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n }\n\n fn baz_sync((x, _): (D, D)) {\n x.1.borrow_mut().push(DropOrder::Function);\n }\n\n \/\/\/ Check that underscore and unused bindings within and outwith more complex patterns are\n \/\/\/ dropped after the method is polled.\n async fn foobar_async(x: D, (a, _, _c): (D, D, D), _: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n }\n\n fn foobar_sync(x: D, (a, _, _c): (D, D, D), _: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n }\n}\n\nstruct Bar<'a>(PhantomData<&'a ()>);\n\nimpl<'a> Bar<'a> {\n \/\/\/ Check that unused bindings are dropped after the method with self is polled.\n async fn foo_async(&'a self, x: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n }\n\n fn foo_sync(&'a self, x: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n }\n\n \/\/\/ Check that underscore patterns are dropped after the method with self is polled.\n async fn bar_async(&'a self, x: D, _: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n }\n\n fn bar_sync(&'a self, x: D, _: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n }\n\n \/\/\/ Check that underscore patterns within more complex patterns are dropped after the method\n \/\/\/ with self is polled.\n async fn baz_async(&'a self, (x, _): (D, D)) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n }\n\n fn baz_sync(&'a self, (x, _): (D, D)) {\n x.1.borrow_mut().push(DropOrder::Function);\n }\n\n \/\/\/ Check that underscore and unused bindings within and outwith more complex patterns are\n \/\/\/ dropped after the method with self is polled.\n async fn foobar_async(&'a self, x: D, (a, _, _c): (D, D, D), _: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n NeverReady.await;\n }\n\n fn foobar_sync(&'a self, x: D, (a, _, _c): (D, D, D), _: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n }\n}\n\nfn assert_drop_order_after_cancel>(\n f: impl FnOnce(DropOrderListPtr) -> Fut,\n g: impl FnOnce(DropOrderListPtr),\n) {\n let empty = Arc::new(EmptyWaker);\n let waker = ArcWake::into_waker(empty);\n let mut cx = Context::from_waker(&waker);\n\n let actual_order = Rc::new(RefCell::new(Vec::new()));\n let mut fut = Box::pin(f(actual_order.clone()));\n let _ = fut.as_mut().poll(&mut cx);\n\n \/\/ Parameters are never dropped until the future completes.\n assert_eq!(*actual_order.borrow(), vec![DropOrder::Function]);\n \n drop(fut);\n\n let expected_order = Rc::new(RefCell::new(Vec::new()));\n g(expected_order.clone());\n assert_eq!(*actual_order.borrow(), *expected_order.borrow());\n}\n\nfn main() {\n \/\/ Free functions (see doc comment on function for what it tests).\n assert_drop_order_after_cancel(\n |l| foo_async(D(\"x\", l.clone()), D(\"_y\", l.clone())),\n |l| foo_sync(D(\"x\", l.clone()), D(\"_y\", l.clone())),\n );\n assert_drop_order_after_cancel(\n |l| bar_async(D(\"x\", l.clone()), D(\"_\", l.clone())),\n |l| bar_sync(D(\"x\", l.clone()), D(\"_\", l.clone())),\n );\n assert_drop_order_after_cancel(\n |l| baz_async((D(\"x\", l.clone()), D(\"_\", l.clone()))),\n |l| baz_sync((D(\"x\", l.clone()), D(\"_\", l.clone()))),\n );\n assert_drop_order_after_cancel(\n |l| {\n foobar_async(\n D(\"x\", l.clone()),\n (D(\"a\", l.clone()), D(\"_\", l.clone()), D(\"_c\", l.clone())),\n D(\"_\", l.clone()),\n D(\"_y\", l.clone()),\n )\n },\n |l| {\n foobar_sync(\n D(\"x\", l.clone()),\n (D(\"a\", l.clone()), D(\"_\", l.clone()), D(\"_c\", l.clone())),\n D(\"_\", l.clone()),\n D(\"_y\", l.clone()),\n )\n },\n );\n\n \/\/ Methods w\/out self (see doc comment on function for what it tests).\n assert_drop_order_after_cancel(\n |l| Foo::foo_async(D(\"x\", l.clone()), D(\"_y\", l.clone())),\n |l| Foo::foo_sync(D(\"x\", l.clone()), D(\"_y\", l.clone())),\n );\n assert_drop_order_after_cancel(\n |l| Foo::bar_async(D(\"x\", l.clone()), D(\"_\", l.clone())),\n |l| Foo::bar_sync(D(\"x\", l.clone()), D(\"_\", l.clone())),\n );\n assert_drop_order_after_cancel(\n |l| Foo::baz_async((D(\"x\", l.clone()), D(\"_\", l.clone()))),\n |l| Foo::baz_sync((D(\"x\", l.clone()), D(\"_\", l.clone()))),\n );\n assert_drop_order_after_cancel(\n |l| {\n Foo::foobar_async(\n D(\"x\", l.clone()),\n (D(\"a\", l.clone()), D(\"_\", l.clone()), D(\"_c\", l.clone())),\n D(\"_\", l.clone()),\n D(\"_y\", l.clone()),\n )\n },\n |l| {\n Foo::foobar_sync(\n D(\"x\", l.clone()),\n (D(\"a\", l.clone()), D(\"_\", l.clone()), D(\"_c\", l.clone())),\n D(\"_\", l.clone()),\n D(\"_y\", l.clone()),\n )\n },\n );\n\n \/\/ Methods (see doc comment on function for what it tests).\n let b = Bar(Default::default());\n assert_drop_order_after_cancel(\n |l| b.foo_async(D(\"x\", l.clone()), D(\"_y\", l.clone())),\n |l| b.foo_sync(D(\"x\", l.clone()), D(\"_y\", l.clone())),\n );\n assert_drop_order_after_cancel(\n |l| b.bar_async(D(\"x\", l.clone()), D(\"_\", l.clone())),\n |l| b.bar_sync(D(\"x\", l.clone()), D(\"_\", l.clone())),\n );\n assert_drop_order_after_cancel(\n |l| b.baz_async((D(\"x\", l.clone()), D(\"_\", l.clone()))),\n |l| b.baz_sync((D(\"x\", l.clone()), D(\"_\", l.clone()))),\n );\n assert_drop_order_after_cancel(\n |l| {\n b.foobar_async(\n D(\"x\", l.clone()),\n (D(\"a\", l.clone()), D(\"_\", l.clone()), D(\"_c\", l.clone())),\n D(\"_\", l.clone()),\n D(\"_y\", l.clone()),\n )\n },\n |l| {\n b.foobar_sync(\n D(\"x\", l.clone()),\n (D(\"a\", l.clone()), D(\"_\", l.clone()), D(\"_c\", l.clone())),\n D(\"_\", l.clone()),\n D(\"_y\", l.clone()),\n )\n },\n );\n}\n<|endoftext|>"} {"text":"Add helper function to be more flexible<|endoftext|>"} {"text":"ChunkData 0x21 packet manual impl.<|endoftext|>"} {"text":"various ways of destructuring struct in match added<|endoftext|>"} {"text":"use core::*;\nuse core::hash::*;\nuse common::*;\n\npub struct Entry {\n pub keys: Vec<(T, U)>,\n cur: usize,\n}\n\nimpl Iterator for Entry {\n fn next(&mut self) -> Option<(T, U)> {\n self.usize += 1;\n self.keys.get(self.usize)\n }\n}\n\n\/\/\/ A hashmap (a simple implementation)\npub struct HashMap\n where T: Hash\n{\n values: [Entry; 247],\n}\n\nimpl HashMap\n where T: Hash {\n \/\/\/ Get the position of an entry\n pub fn get_pos(key: &T) -> u8 {\n let hash = SipHasher::new();\n\n key.hash(hash);\n hash.finish() % 248\n }\n\n \/\/\/ Get a refference to an entry\n pub fn get(&self, key: &T) -> Option<&U> {\n &self.values[self.get_pos(key)].find(|(k, v)| key == k)\n }\n \/\/\/ Get a mutable refference to an entry\n pub fn get_mut(&mut self, key: &T) -> Option<&mut U> {\n &mut self.values[self.get_pos(key)].find(|(k, v)| key == k)\n }\n \/\/\/ Set the value of an entry\n pub fn set(&mut self, key: &T, val: &U) {\n match self.get_mut(key) {\n Some(e) => e,\n None => {\n self.values[self.get_pos(key)]\n .keys\n .push((*key, *val));\n }\n }\n }\n}\nRemove hashmap in favor of libcollection<|endoftext|>"} {"text":"\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\nThe Finally trait provides a method, `finally` on\nstack closures that emulates Java-style try\/finally blocks.\n\n# Example\n\n~~~\ndo || {\n ...\n}.finally {\n alway_run_this();\n}\n~~~\n*\/\n\nuse ops::Drop;\n\n#[cfg(test)] use task::failing;\n\npub trait Finally {\n fn finally(&self, dtor: &fn()) -> T;\n}\n\nimpl<'self,T> Finally for &'self fn() -> T {\n fn finally(&self, dtor: &fn()) -> T {\n let _d = Finallyalizer {\n dtor: dtor\n };\n\n (*self)()\n }\n}\n\nstruct Finallyalizer<'self> {\n dtor: &'self fn()\n}\n\n#[unsafe_destructor]\nimpl<'self> Drop for Finallyalizer<'self> {\n fn finalize(&self) {\n (self.dtor)();\n }\n}\n\n#[test]\nfn test_success() {\n let mut i = 0;\n do (|| {\n i = 10;\n }).finally {\n assert!(!failing());\n assert!(i == 10);\n i = 20;\n }\n assert!(i == 20);\n}\n\n#[test]\n#[ignore(cfg(windows))]\n#[should_fail]\nfn test_fail() {\n let mut i = 0;\n do (|| {\n i = 10;\n fail!();\n }).finally {\n assert!(failing());\n assert!(i == 10);\n }\n}\n\n#[test]\nfn test_retval() {\n let closure: &fn() -> int = || 10;\n let i = do closure.finally { };\n assert!(i == 10);\n}\n\n#[test]\nfn test_compact() {\n \/\/ FIXME #4727: Should be able to use a fn item instead\n \/\/ of a closure for do_some_fallible_work,\n \/\/ but it's a type error.\n let do_some_fallible_work: &fn() = || { };\n fn but_always_run_this_function() { }\n do_some_fallible_work.finally(\n but_always_run_this_function);\n}\nauto merge of #5839 : bjz\/rust\/master, r=brson\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\nThe Finally trait provides a method, `finally` on\nstack closures that emulates Java-style try\/finally blocks.\n\n# Example\n\n~~~\ndo || {\n ...\n}.finally {\n alway_run_this();\n}\n~~~\n*\/\n\nuse ops::Drop;\n\n#[cfg(test)] use task::{failing, spawn};\n\npub trait Finally {\n fn finally(&self, dtor: &fn()) -> T;\n}\n\nimpl<'self,T> Finally for &'self fn() -> T {\n fn finally(&self, dtor: &fn()) -> T {\n let _d = Finallyalizer {\n dtor: dtor\n };\n\n (*self)()\n }\n}\n\nimpl Finally for ~fn() -> T {\n fn finally(&self, dtor: &fn()) -> T {\n let _d = Finallyalizer {\n dtor: dtor\n };\n\n (*self)()\n }\n}\n\nimpl Finally for @fn() -> T {\n fn finally(&self, dtor: &fn()) -> T {\n let _d = Finallyalizer {\n dtor: dtor\n };\n\n (*self)()\n }\n}\n\nstruct Finallyalizer<'self> {\n dtor: &'self fn()\n}\n\n#[unsafe_destructor]\nimpl<'self> Drop for Finallyalizer<'self> {\n fn finalize(&self) {\n (self.dtor)();\n }\n}\n\n#[test]\nfn test_success() {\n let mut i = 0;\n do (|| {\n i = 10;\n }).finally {\n assert!(!failing());\n assert!(i == 10);\n i = 20;\n }\n assert!(i == 20);\n}\n\n#[test]\n#[ignore(cfg(windows))]\n#[should_fail]\nfn test_fail() {\n let mut i = 0;\n do (|| {\n i = 10;\n fail!();\n }).finally {\n assert!(failing());\n assert!(i == 10);\n }\n}\n\n#[test]\nfn test_retval() {\n let closure: &fn() -> int = || 10;\n let i = do closure.finally { };\n assert!(i == 10);\n}\n\n#[test]\nfn test_compact() {\n \/\/ FIXME #4727: Should be able to use a fn item instead\n \/\/ of a closure for do_some_fallible_work,\n \/\/ but it's a type error.\n let do_some_fallible_work: &fn() = || { };\n fn but_always_run_this_function() { }\n do_some_fallible_work.finally(\n but_always_run_this_function);\n}\n\n#[test]\nfn test_owned() {\n fn spawn_with_finalizer(f: ~fn()) {\n do spawn { do f.finally { } }\n }\n let owned: ~fn() = || { };\n spawn_with_finalizer(owned);\n}\n\n#[test]\nfn test_managed() {\n let i = @mut 10;\n let managed: @fn() -> int = || {\n let r = *i;\n *i += 10;\n r\n };\n assert!(do managed.finally {} == 10);\n assert!(*i == 20);\n}<|endoftext|>"} {"text":"add mutable vector iterator<|endoftext|>"} {"text":"Add test for #2989use std;\n\ntrait methods {\n fn to_bytes() -> ~[u8];\n}\n\nimpl (): methods {\n fn to_bytes() -> ~[u8] {\n vec::from_elem(0, 0)\n }\n}\n\n\/\/ the position of this function is significant! - if it comes before methods\n\/\/ then it works, if it comes after it then it doesnt!\nfn to_bools(bitv: {storage: ~[u64]}) -> ~[bool] {\n vec::from_fn(8, |i| {\n let w = i \/ 64;\n let b = i % 64;\n let x = 1u64 & (bitv.storage[w] >> b);\n x == 1u64\n })\n}\n\nfn main() {\n let bools = ~[false, false, true, false, false, true, true, false];\n let bools2 = to_bools({storage: ~[0b01100100]});\n\n for uint::range(0, 8) |i| {\n io::println(#fmt(\"%u => %u vs %u\", i, bools[i] as uint, bools2[i] as uint));\n }\n\n assert bools == bools2;\n}<|endoftext|>"} {"text":"Problem 04enum List {\n Cons(T, ~List),\n Nil\n}\n\nfn length(list: &List) -> uint {\n match *list {\n Nil => 0,\n Cons(_, ~ref rest) => length(rest)+1\n }\n}\n\nfn main() {\n let list: List = Cons('a', ~Cons('b', ~Cons('c', ~Cons('d', ~Nil))));\n println!(\"{}\", length(&list));\n}\n<|endoftext|>"} {"text":"#[doc = \"Prunes branches of the document tree that contain no documentation\"];\n\nexport mk_pass;\n\nfn mk_pass() -> pass {\n run\n}\n\ntype ctxt = {\n mutable have_docs: bool\n};\n\nfn run(\n _srv: astsrv::srv,\n doc: doc::cratedoc\n) -> doc::cratedoc {\n let ctxt = {\n mutable have_docs: true\n };\n let fold = fold::fold({\n fold_mod: fold_mod,\n fold_fn: fold_fn,\n fold_const: fold_const,\n fold_modlist: fold_modlist,\n fold_fnlist: fold_fnlist,\n fold_constlist: fold_constlist\n with *fold::default_seq_fold(ctxt)\n });\n fold.fold_crate(fold, doc)\n}\n\nfn fold_mod(\n fold: fold::fold,\n doc: doc::moddoc\n) -> doc::moddoc {\n let doc = fold::default_seq_fold_mod(fold, doc);\n fold.ctxt.have_docs =\n doc.brief != none\n || doc.desc != none\n || vec::is_not_empty(*doc.mods)\n || vec::is_not_empty(*doc.fns);\n ret doc;\n}\n\nfn fold_fn(\n fold: fold::fold,\n doc: doc::fndoc\n) -> doc::fndoc {\n let have_arg_docs = false;\n let doc = ~{\n args: vec::filter_map(doc.args) {|doc|\n if option::is_some(doc.desc) {\n have_arg_docs = true;\n some(doc)\n } else {\n none\n }\n },\n return: {\n ty: if option::is_some(doc.return.desc) {\n doc.return.ty\n } else {\n none\n }\n with doc.return\n }\n with *doc\n };\n\n fold.ctxt.have_docs =\n doc.brief != none\n || doc.desc != none\n || have_arg_docs\n || doc.return.desc != none;\n ret doc;\n}\n\n#[test]\nfn should_elide_undocumented_arguments() {\n let source = \"#[doc = \\\"hey\\\"] fn a(b: int) { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert vec::is_empty(doc.topmod.fns[0].args);\n}\n\n#[test]\nfn should_not_elide_fns_with_documented_arguments() {\n let source = \"#[doc(args(a = \\\"b\\\"))] fn a(a: int) { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert vec::is_not_empty(*doc.topmod.fns);\n}\n\n#[test]\nfn should_elide_undocumented_return_values() {\n let source = \"#[doc = \\\"fonz\\\"] fn a() -> int { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = tystr_pass::mk_pass()(srv, doc);\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert doc.topmod.fns[0].return.ty == none;\n}\n\nfn fold_modlist(\n fold: fold::fold,\n list: doc::modlist\n) -> doc::modlist {\n doc::modlist(vec::filter_map(*list) {|doc|\n let doc = fold.fold_mod(fold, doc);\n if fold.ctxt.have_docs {\n some(doc)\n } else {\n none\n }\n })\n}\n\n#[test]\nfn should_elide_undocumented_mods() {\n let source = \"mod a { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = run(srv, doc);\n assert vec::is_empty(*doc.topmod.mods);\n}\n\n#[test]\nfn should_not_elide_undocument_mods_with_documented_mods() {\n let source = \"mod a { #[doc = \\\"b\\\"] mod b { } }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert vec::is_not_empty(*doc.topmod.mods);\n}\n\n#[test]\nfn should_not_elide_undocument_mods_with_documented_fns() {\n let source = \"mod a { #[doc = \\\"b\\\"] fn b() { } }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert vec::is_not_empty(*doc.topmod.mods);\n}\n\nfn fold_fnlist(\n fold: fold::fold,\n list: doc::fnlist\n) -> doc::fnlist {\n doc::fnlist(vec::filter_map(*list) {|doc|\n let doc = fold.fold_fn(fold, doc);\n if fold.ctxt.have_docs {\n some(doc)\n } else {\n none\n }\n })\n}\n\n#[test]\nfn should_elide_undocumented_fns() {\n let source = \"fn a() { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = run(srv, doc);\n assert vec::is_empty(*doc.topmod.fns);\n}\n\nfn fold_const(\n fold: fold::fold,\n doc: doc::constdoc\n) -> doc::constdoc {\n let doc = fold::default_seq_fold_const(fold, doc);\n fold.ctxt.have_docs =\n doc.brief != none\n || doc.desc != none;\n ret doc;\n}\n\nfn fold_constlist(\n fold: fold::fold,\n list: doc::constlist\n) -> doc::constlist {\n doc::constlist(vec::filter_map(*list) {|doc|\n let doc = fold.fold_const(fold, doc);\n if fold.ctxt.have_docs {\n some(doc)\n } else {\n none\n }\n })\n}\n\n#[test]\nfn should_elide_undocumented_consts() {\n let source = \"const a: bool = true;\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = run(srv, doc);\n assert vec::is_empty(*doc.topmod.consts);\n}\nrustdoc: Don't prune functions with documented failure conditions#[doc = \"Prunes branches of the document tree that contain no documentation\"];\n\nexport mk_pass;\n\nfn mk_pass() -> pass {\n run\n}\n\ntype ctxt = {\n mutable have_docs: bool\n};\n\nfn run(\n _srv: astsrv::srv,\n doc: doc::cratedoc\n) -> doc::cratedoc {\n let ctxt = {\n mutable have_docs: true\n };\n let fold = fold::fold({\n fold_mod: fold_mod,\n fold_fn: fold_fn,\n fold_const: fold_const,\n fold_modlist: fold_modlist,\n fold_fnlist: fold_fnlist,\n fold_constlist: fold_constlist\n with *fold::default_seq_fold(ctxt)\n });\n fold.fold_crate(fold, doc)\n}\n\nfn fold_mod(\n fold: fold::fold,\n doc: doc::moddoc\n) -> doc::moddoc {\n let doc = fold::default_seq_fold_mod(fold, doc);\n fold.ctxt.have_docs =\n doc.brief != none\n || doc.desc != none\n || vec::is_not_empty(*doc.mods)\n || vec::is_not_empty(*doc.fns);\n ret doc;\n}\n\nfn fold_fn(\n fold: fold::fold,\n doc: doc::fndoc\n) -> doc::fndoc {\n let have_arg_docs = false;\n let doc = ~{\n args: vec::filter_map(doc.args) {|doc|\n if option::is_some(doc.desc) {\n have_arg_docs = true;\n some(doc)\n } else {\n none\n }\n },\n return: {\n ty: if option::is_some(doc.return.desc) {\n doc.return.ty\n } else {\n none\n }\n with doc.return\n }\n with *doc\n };\n\n fold.ctxt.have_docs =\n doc.brief != none\n || doc.desc != none\n || have_arg_docs\n || doc.return.desc != none\n || doc.failure != none;\n ret doc;\n}\n\n#[test]\nfn should_elide_undocumented_arguments() {\n let source = \"#[doc = \\\"hey\\\"] fn a(b: int) { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert vec::is_empty(doc.topmod.fns[0].args);\n}\n\n#[test]\nfn should_not_elide_fns_with_documented_arguments() {\n let source = \"#[doc(args(a = \\\"b\\\"))] fn a(a: int) { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert vec::is_not_empty(*doc.topmod.fns);\n}\n\n#[test]\nfn should_elide_undocumented_return_values() {\n let source = \"#[doc = \\\"fonz\\\"] fn a() -> int { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = tystr_pass::mk_pass()(srv, doc);\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert doc.topmod.fns[0].return.ty == none;\n}\n\n#[test]\nfn should_not_elide_fns_with_documented_failure_conditions() {\n let source = \"#[doc(failure = \\\"yup\\\")] fn a() { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert vec::is_not_empty(*doc.topmod.fns);\n}\n\nfn fold_modlist(\n fold: fold::fold,\n list: doc::modlist\n) -> doc::modlist {\n doc::modlist(vec::filter_map(*list) {|doc|\n let doc = fold.fold_mod(fold, doc);\n if fold.ctxt.have_docs {\n some(doc)\n } else {\n none\n }\n })\n}\n\n#[test]\nfn should_elide_undocumented_mods() {\n let source = \"mod a { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = run(srv, doc);\n assert vec::is_empty(*doc.topmod.mods);\n}\n\n#[test]\nfn should_not_elide_undocument_mods_with_documented_mods() {\n let source = \"mod a { #[doc = \\\"b\\\"] mod b { } }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert vec::is_not_empty(*doc.topmod.mods);\n}\n\n#[test]\nfn should_not_elide_undocument_mods_with_documented_fns() {\n let source = \"mod a { #[doc = \\\"b\\\"] fn b() { } }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass()(srv, doc);\n let doc = run(srv, doc);\n assert vec::is_not_empty(*doc.topmod.mods);\n}\n\nfn fold_fnlist(\n fold: fold::fold,\n list: doc::fnlist\n) -> doc::fnlist {\n doc::fnlist(vec::filter_map(*list) {|doc|\n let doc = fold.fold_fn(fold, doc);\n if fold.ctxt.have_docs {\n some(doc)\n } else {\n none\n }\n })\n}\n\n#[test]\nfn should_elide_undocumented_fns() {\n let source = \"fn a() { }\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = run(srv, doc);\n assert vec::is_empty(*doc.topmod.fns);\n}\n\nfn fold_const(\n fold: fold::fold,\n doc: doc::constdoc\n) -> doc::constdoc {\n let doc = fold::default_seq_fold_const(fold, doc);\n fold.ctxt.have_docs =\n doc.brief != none\n || doc.desc != none;\n ret doc;\n}\n\nfn fold_constlist(\n fold: fold::fold,\n list: doc::constlist\n) -> doc::constlist {\n doc::constlist(vec::filter_map(*list) {|doc|\n let doc = fold.fold_const(fold, doc);\n if fold.ctxt.have_docs {\n some(doc)\n } else {\n none\n }\n })\n}\n\n#[test]\nfn should_elide_undocumented_consts() {\n let source = \"const a: bool = true;\";\n let srv = astsrv::mk_srv_from_str(source);\n let doc = extract::from_srv(srv, \"\");\n let doc = run(srv, doc);\n assert vec::is_empty(*doc.topmod.consts);\n}\n<|endoftext|>"} {"text":"use alloc::arc::Arc;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cell::UnsafeCell;\nuse core::ops::DerefMut;\nuse core::{mem, ptr};\n\nuse common::elf::Elf;\nuse common::memory;\n\nuse scheduler;\nuse scheduler::context::{CONTEXT_STACK_SIZE, CONTEXT_STACK_ADDR, context_switch,\ncontext_userspace, Context, ContextMemory};\n\nuse schemes::Url;\n\n\/\/\/ Excecute an excecutable\npub fn execute(url: Url, mut args: Vec) {\n unsafe {\n\n\n\n if let Some(current) = Context::current_mut() {\n\n let reenable = scheduler::start_no_ints();\n let cptr = current.deref_mut();\n scheduler::end_no_ints(reenable);\n\n Context::spawn(\"kexec \".to_string() + &url.string, box move || {\n if let Some(mut resource) = url.open() {\n let mut vec: Vec = Vec::new();\n resource.read_to_end(&mut vec);\n\n let executable = Elf::from_data(vec.as_ptr() as usize);\n let entry = executable.entry();\n let mut memory = Vec::new();\n for segment in executable.load_segment().iter() {\n let virtual_address = segment.vaddr as usize;\n let virtual_size = segment.mem_len as usize;\n let physical_address = memory::alloc(virtual_size);\n\n if physical_address > 0 {\n \/\/ Copy progbits\n ::memcpy(physical_address as *mut u8,\n (executable.data + segment.off as usize) as *const u8,\n segment.file_len as usize);\n \/\/ Zero bss\n ::memset((physical_address + segment.file_len as usize) as *mut u8,\n 0,\n segment.mem_len as usize - segment.file_len as usize);\n\n memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size,\n writeable: segment.flags & 2 == 2\n });\n }\n }\n\n if entry > 0 && ! memory.is_empty() {\n args.insert(0, url.to_string());\n\n let mut context_args: Vec = Vec::new();\n context_args.push(0); \/\/ ENVP\n context_args.push(0); \/\/ ARGV NULL\n let mut argc = 0;\n for i in 0..args.len() {\n if let Some(arg) = args.get(args.len() - i - 1) {\n context_args.push(arg.as_ptr() as usize);\n argc += 1;\n }\n }\n context_args.push(argc);\n\n let reenable = scheduler::start_no_ints();\n\n let context = cptr;\n\n context.name = url.to_string();\n\n context.sp = context.kernel_stack + CONTEXT_STACK_SIZE - 128;\n\n context.stack = Some(ContextMemory {\n physical_address: memory::alloc(CONTEXT_STACK_SIZE),\n virtual_address: CONTEXT_STACK_ADDR,\n virtual_size: CONTEXT_STACK_SIZE,\n writeable: true\n });\n\n context.args = Arc::new(UnsafeCell::new(args));\n context.cwd = Arc::new(UnsafeCell::new((*context.cwd.get()).clone()));\n context.memory = Arc::new(UnsafeCell::new(memory));\n\n let user_sp = if let Some(ref stack) = context.stack {\n let mut sp = stack.physical_address + stack.virtual_size - 128;\n for arg in context_args.iter() {\n sp -= mem::size_of::();\n ptr::write(sp as *mut usize, *arg);\n }\n sp - stack.physical_address + stack.virtual_address\n } else {\n 0\n };\n\n context.push(0x20 | 3);\n context.push(user_sp);\n context.push(1 << 9);\n context.push(0x18 | 3);\n context.push(entry);\n context.push(context_userspace as usize);\n\n scheduler::end_no_ints(reenable);\n } else {\n debug!(\"{}: Invalid memory or entry\\n\", url.string);\n }\n } else {\n debug!(\"{}: Failed to open\\n\", url.string);\n }\n });\n\n loop {\n context_switch(false);\n }\n }\n }\n}\nFix a typouse alloc::arc::Arc;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cell::UnsafeCell;\nuse core::ops::DerefMut;\nuse core::{mem, ptr};\n\nuse common::elf::Elf;\nuse common::memory;\n\nuse scheduler;\nuse scheduler::context::{CONTEXT_STACK_SIZE, CONTEXT_STACK_ADDR, context_switch,\ncontext_userspace, Context, ContextMemory};\n\nuse schemes::Url;\n\n\/\/\/ Execute an executable\npub fn execute(url: Url, mut args: Vec) {\n unsafe {\n\n\n\n if let Some(current) = Context::current_mut() {\n\n let reenable = scheduler::start_no_ints();\n let cptr = current.deref_mut();\n scheduler::end_no_ints(reenable);\n\n Context::spawn(\"kexec \".to_string() + &url.string, box move || {\n if let Some(mut resource) = url.open() {\n let mut vec: Vec = Vec::new();\n resource.read_to_end(&mut vec);\n\n let executable = Elf::from_data(vec.as_ptr() as usize);\n let entry = executable.entry();\n let mut memory = Vec::new();\n for segment in executable.load_segment().iter() {\n let virtual_address = segment.vaddr as usize;\n let virtual_size = segment.mem_len as usize;\n let physical_address = memory::alloc(virtual_size);\n\n if physical_address > 0 {\n \/\/ Copy progbits\n ::memcpy(physical_address as *mut u8,\n (executable.data + segment.off as usize) as *const u8,\n segment.file_len as usize);\n \/\/ Zero bss\n ::memset((physical_address + segment.file_len as usize) as *mut u8,\n 0,\n segment.mem_len as usize - segment.file_len as usize);\n\n memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size,\n writeable: segment.flags & 2 == 2\n });\n }\n }\n\n if entry > 0 && ! memory.is_empty() {\n args.insert(0, url.to_string());\n\n let mut context_args: Vec = Vec::new();\n context_args.push(0); \/\/ ENVP\n context_args.push(0); \/\/ ARGV NULL\n let mut argc = 0;\n for i in 0..args.len() {\n if let Some(arg) = args.get(args.len() - i - 1) {\n context_args.push(arg.as_ptr() as usize);\n argc += 1;\n }\n }\n context_args.push(argc);\n\n let reenable = scheduler::start_no_ints();\n\n let context = cptr;\n\n context.name = url.to_string();\n\n context.sp = context.kernel_stack + CONTEXT_STACK_SIZE - 128;\n\n context.stack = Some(ContextMemory {\n physical_address: memory::alloc(CONTEXT_STACK_SIZE),\n virtual_address: CONTEXT_STACK_ADDR,\n virtual_size: CONTEXT_STACK_SIZE,\n writeable: true\n });\n\n context.args = Arc::new(UnsafeCell::new(args));\n context.cwd = Arc::new(UnsafeCell::new((*context.cwd.get()).clone()));\n context.memory = Arc::new(UnsafeCell::new(memory));\n\n let user_sp = if let Some(ref stack) = context.stack {\n let mut sp = stack.physical_address + stack.virtual_size - 128;\n for arg in context_args.iter() {\n sp -= mem::size_of::();\n ptr::write(sp as *mut usize, *arg);\n }\n sp - stack.physical_address + stack.virtual_address\n } else {\n 0\n };\n\n context.push(0x20 | 3);\n context.push(user_sp);\n context.push(1 << 9);\n context.push(0x18 | 3);\n context.push(entry);\n context.push(context_userspace as usize);\n\n scheduler::end_no_ints(reenable);\n } else {\n debug!(\"{}: Invalid memory or entry\\n\", url.string);\n }\n } else {\n debug!(\"{}: Failed to open\\n\", url.string);\n }\n });\n\n loop {\n context_switch(false);\n }\n }\n }\n}\n<|endoftext|>"} {"text":"[add] currency converteruse std::collections::HashMap;\n\nstruct Converter {\n val: HashMap\n}\n\nimpl Converter {\n fn init() -> Converter {\n Converter { \n val: HashMap::new()\n }\n }\n\n fn add_new, T: Into>(&mut self, fname: U, fvalue: T) {\n self.val.insert(fname.into(), fvalue.into());\n }\n\n fn convert, T: Into>(&self, fvalue: T, fname: U, tname: U) -> Option {\n let p1 = match self.val.get(&fname.into()) {\n Some(value) => value,\n None => return None\n };\n let p2 = match self.val.get(&tname.into()) {\n Some(value) => value,\n None => return None\n };\n Some(fvalue.into() * p1 \/ p2)\n }\n}\n\nfn main() {\n let mut converter = Converter::init();\n converter.add_new(\"rub\", 1);\n converter.add_new(\"eur\", 68);\n converter.add_new(\"usd\", 59);\n converter.add_new(\"btc\", 155763);\n let from_v = 391.30882352941177;\n let r = converter.convert(from_v, \"eur\", \"usd\").unwrap();\n println!(\"{} eur -> {} usd\", from_v, r);\n let from_v = 110882.13559322034;\n let r = converter.convert(from_v, \"usd\", \"btc\").unwrap();\n println!(\"{} usd -> {} btc\", from_v, r);\n}<|endoftext|>"} {"text":"Still working on word2vec<|endoftext|>"} {"text":"add a scary test casefn main() {\n \/\/ If we are careful, we can exploit data layout...\n let raw = unsafe {\n std::mem::transmute::<&[u8], [usize; 2]>(&[42])\n };\n let ptr = raw[0] + raw[1];\n let ptr = ptr as *const u8;\n \/\/ The pointer is one-past-the end, but we decrement it into bounds before using it\n assert_eq!(unsafe { *ptr.offset(-1) }, 42);\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n\nThe `Ord` and `Eq` comparison traits\n\nThis module contains the definition of both `Ord` and `Eq` which define\nthe common interfaces for doing comparison. Both are language items\nthat the compiler uses to implement the comparison operators. Rust code\nmay implement `Ord` to overload the `<`, `<=`, `>`, and `>=` operators,\nand `Eq` to overload the `==` and `!=` operators.\n\n*\/\n\n#[allow(missing_doc)];\n\n\/**\n* Trait for values that can be compared for equality and inequality.\n*\n* This trait allows partial equality, where types can be unordered instead of strictly equal or\n* unequal. For example, with the built-in floating-point types `a == b` and `a != b` will both\n* evaluate to false if either `a` or `b` is NaN (cf. IEEE 754-2008 section 5.11).\n*\n* Eq only requires the `eq` method to be implemented; `ne` is its negation by default.\n*\n* Eventually, this will be implemented by default for types that implement `TotalEq`.\n*\/\n#[lang=\"eq\"]\npub trait Eq {\n fn eq(&self, other: &Self) -> bool;\n\n #[inline]\n fn ne(&self, other: &Self) -> bool { !self.eq(other) }\n}\n\n\/\/\/ Trait for equality comparisons where `a == b` and `a != b` are strict inverses.\npub trait TotalEq: Eq {\n \/\/\/ This method must return the same value as `eq`. It exists to prevent\n \/\/\/ deriving `TotalEq` from fields not implementing the `TotalEq` trait.\n fn equals(&self, other: &Self) -> bool {\n self.eq(other)\n }\n}\n\nmacro_rules! totaleq_impl(\n ($t:ty) => {\n impl TotalEq for $t {\n #[inline]\n fn equals(&self, other: &$t) -> bool { *self == *other }\n }\n }\n)\n\ntotaleq_impl!(bool)\n\ntotaleq_impl!(u8)\ntotaleq_impl!(u16)\ntotaleq_impl!(u32)\ntotaleq_impl!(u64)\n\ntotaleq_impl!(i8)\ntotaleq_impl!(i16)\ntotaleq_impl!(i32)\ntotaleq_impl!(i64)\n\ntotaleq_impl!(int)\ntotaleq_impl!(uint)\n\ntotaleq_impl!(char)\n\n#[deriving(Clone, Eq, Show)]\npub enum Ordering { Less = -1, Equal = 0, Greater = 1 }\n\n\/\/\/ Trait for types that form a total order\npub trait TotalOrd: TotalEq + Ord {\n fn cmp(&self, other: &Self) -> Ordering;\n}\n\nimpl TotalEq for Ordering {\n #[inline]\n fn equals(&self, other: &Ordering) -> bool {\n *self == *other\n }\n}\nimpl TotalOrd for Ordering {\n #[inline]\n fn cmp(&self, other: &Ordering) -> Ordering {\n (*self as int).cmp(&(*other as int))\n }\n}\n\nimpl Ord for Ordering {\n #[inline]\n fn lt(&self, other: &Ordering) -> bool { (*self as int) < (*other as int) }\n}\n\nmacro_rules! totalord_impl(\n ($t:ty) => {\n impl TotalOrd for $t {\n #[inline]\n fn cmp(&self, other: &$t) -> Ordering {\n if *self < *other { Less }\n else if *self > *other { Greater }\n else { Equal }\n }\n }\n }\n)\n\ntotalord_impl!(u8)\ntotalord_impl!(u16)\ntotalord_impl!(u32)\ntotalord_impl!(u64)\n\ntotalord_impl!(i8)\ntotalord_impl!(i16)\ntotalord_impl!(i32)\ntotalord_impl!(i64)\n\ntotalord_impl!(int)\ntotalord_impl!(uint)\n\ntotalord_impl!(char)\n\n\/\/\/ Compares (a1, b1) against (a2, b2), where the a values are more significant.\npub fn cmp2(\n a1: &A, b1: &B,\n a2: &A, b2: &B) -> Ordering\n{\n match a1.cmp(a2) {\n Less => Less,\n Greater => Greater,\n Equal => b1.cmp(b2)\n }\n}\n\n\/**\nReturn `o1` if it is not `Equal`, otherwise `o2`. Simulates the\nlexical ordering on a type `(int, int)`.\n*\/\n#[inline]\npub fn lexical_ordering(o1: Ordering, o2: Ordering) -> Ordering {\n match o1 {\n Equal => o2,\n _ => o1\n }\n}\n\n\/**\n* Trait for values that can be compared for a sort-order.\n*\n* Ord only requires implementation of the `lt` method,\n* with the others generated from default implementations.\n*\n* However it remains possible to implement the others separately,\n* for compatibility with floating-point NaN semantics\n* (cf. IEEE 754-2008 section 5.11).\n*\/\n#[lang=\"ord\"]\npub trait Ord: Eq {\n fn lt(&self, other: &Self) -> bool;\n #[inline]\n fn le(&self, other: &Self) -> bool { !other.lt(self) }\n #[inline]\n fn gt(&self, other: &Self) -> bool { other.lt(self) }\n #[inline]\n fn ge(&self, other: &Self) -> bool { !self.lt(other) }\n}\n\n\/\/\/ The equivalence relation. Two values may be equivalent even if they are\n\/\/\/ of different types. The most common use case for this relation is\n\/\/\/ container types; e.g. it is often desirable to be able to use `&str`\n\/\/\/ values to look up entries in a container with `~str` keys.\npub trait Equiv {\n fn equiv(&self, other: &T) -> bool;\n}\n\n#[inline]\npub fn min(v1: T, v2: T) -> T {\n if v1 < v2 { v1 } else { v2 }\n}\n\n#[inline]\npub fn max(v1: T, v2: T) -> T {\n if v1 > v2 { v1 } else { v2 }\n}\n\n#[cfg(test)]\nmod test {\n use super::lexical_ordering;\n\n #[test]\n fn test_int_totalord() {\n assert_eq!(5.cmp(&10), Less);\n assert_eq!(10.cmp(&5), Greater);\n assert_eq!(5.cmp(&5), Equal);\n assert_eq!((-5).cmp(&12), Less);\n assert_eq!(12.cmp(-5), Greater);\n }\n\n #[test]\n fn test_cmp2() {\n assert_eq!(cmp2(1, 2, 3, 4), Less);\n assert_eq!(cmp2(3, 2, 3, 4), Less);\n assert_eq!(cmp2(5, 2, 3, 4), Greater);\n assert_eq!(cmp2(5, 5, 5, 4), Greater);\n }\n\n #[test]\n fn test_int_totaleq() {\n assert!(5.equals(&5));\n assert!(!2.equals(&17));\n }\n\n #[test]\n fn test_ordering_order() {\n assert!(Less < Equal);\n assert_eq!(Greater.cmp(&Less), Greater);\n }\n\n #[test]\n fn test_lexical_ordering() {\n fn t(o1: Ordering, o2: Ordering, e: Ordering) {\n assert_eq!(lexical_ordering(o1, o2), e);\n }\n\n let xs = [Less, Equal, Greater];\n for &o in xs.iter() {\n t(Less, o, Less);\n t(Equal, o, o);\n t(Greater, o, Greater);\n }\n }\n}\nRemove std::cmp::cmp2.\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n\nThe `Ord` and `Eq` comparison traits\n\nThis module contains the definition of both `Ord` and `Eq` which define\nthe common interfaces for doing comparison. Both are language items\nthat the compiler uses to implement the comparison operators. Rust code\nmay implement `Ord` to overload the `<`, `<=`, `>`, and `>=` operators,\nand `Eq` to overload the `==` and `!=` operators.\n\n*\/\n\n#[allow(missing_doc)];\n\n\/**\n* Trait for values that can be compared for equality and inequality.\n*\n* This trait allows partial equality, where types can be unordered instead of strictly equal or\n* unequal. For example, with the built-in floating-point types `a == b` and `a != b` will both\n* evaluate to false if either `a` or `b` is NaN (cf. IEEE 754-2008 section 5.11).\n*\n* Eq only requires the `eq` method to be implemented; `ne` is its negation by default.\n*\n* Eventually, this will be implemented by default for types that implement `TotalEq`.\n*\/\n#[lang=\"eq\"]\npub trait Eq {\n fn eq(&self, other: &Self) -> bool;\n\n #[inline]\n fn ne(&self, other: &Self) -> bool { !self.eq(other) }\n}\n\n\/\/\/ Trait for equality comparisons where `a == b` and `a != b` are strict inverses.\npub trait TotalEq: Eq {\n \/\/\/ This method must return the same value as `eq`. It exists to prevent\n \/\/\/ deriving `TotalEq` from fields not implementing the `TotalEq` trait.\n fn equals(&self, other: &Self) -> bool {\n self.eq(other)\n }\n}\n\nmacro_rules! totaleq_impl(\n ($t:ty) => {\n impl TotalEq for $t {\n #[inline]\n fn equals(&self, other: &$t) -> bool { *self == *other }\n }\n }\n)\n\ntotaleq_impl!(bool)\n\ntotaleq_impl!(u8)\ntotaleq_impl!(u16)\ntotaleq_impl!(u32)\ntotaleq_impl!(u64)\n\ntotaleq_impl!(i8)\ntotaleq_impl!(i16)\ntotaleq_impl!(i32)\ntotaleq_impl!(i64)\n\ntotaleq_impl!(int)\ntotaleq_impl!(uint)\n\ntotaleq_impl!(char)\n\n#[deriving(Clone, Eq, Show)]\npub enum Ordering { Less = -1, Equal = 0, Greater = 1 }\n\n\/\/\/ Trait for types that form a total order\npub trait TotalOrd: TotalEq + Ord {\n fn cmp(&self, other: &Self) -> Ordering;\n}\n\nimpl TotalEq for Ordering {\n #[inline]\n fn equals(&self, other: &Ordering) -> bool {\n *self == *other\n }\n}\nimpl TotalOrd for Ordering {\n #[inline]\n fn cmp(&self, other: &Ordering) -> Ordering {\n (*self as int).cmp(&(*other as int))\n }\n}\n\nimpl Ord for Ordering {\n #[inline]\n fn lt(&self, other: &Ordering) -> bool { (*self as int) < (*other as int) }\n}\n\nmacro_rules! totalord_impl(\n ($t:ty) => {\n impl TotalOrd for $t {\n #[inline]\n fn cmp(&self, other: &$t) -> Ordering {\n if *self < *other { Less }\n else if *self > *other { Greater }\n else { Equal }\n }\n }\n }\n)\n\ntotalord_impl!(u8)\ntotalord_impl!(u16)\ntotalord_impl!(u32)\ntotalord_impl!(u64)\n\ntotalord_impl!(i8)\ntotalord_impl!(i16)\ntotalord_impl!(i32)\ntotalord_impl!(i64)\n\ntotalord_impl!(int)\ntotalord_impl!(uint)\n\ntotalord_impl!(char)\n\n\/**\nReturn `o1` if it is not `Equal`, otherwise `o2`. Simulates the\nlexical ordering on a type `(int, int)`.\n*\/\n#[inline]\npub fn lexical_ordering(o1: Ordering, o2: Ordering) -> Ordering {\n match o1 {\n Equal => o2,\n _ => o1\n }\n}\n\n\/**\n* Trait for values that can be compared for a sort-order.\n*\n* Ord only requires implementation of the `lt` method,\n* with the others generated from default implementations.\n*\n* However it remains possible to implement the others separately,\n* for compatibility with floating-point NaN semantics\n* (cf. IEEE 754-2008 section 5.11).\n*\/\n#[lang=\"ord\"]\npub trait Ord: Eq {\n fn lt(&self, other: &Self) -> bool;\n #[inline]\n fn le(&self, other: &Self) -> bool { !other.lt(self) }\n #[inline]\n fn gt(&self, other: &Self) -> bool { other.lt(self) }\n #[inline]\n fn ge(&self, other: &Self) -> bool { !self.lt(other) }\n}\n\n\/\/\/ The equivalence relation. Two values may be equivalent even if they are\n\/\/\/ of different types. The most common use case for this relation is\n\/\/\/ container types; e.g. it is often desirable to be able to use `&str`\n\/\/\/ values to look up entries in a container with `~str` keys.\npub trait Equiv {\n fn equiv(&self, other: &T) -> bool;\n}\n\n#[inline]\npub fn min(v1: T, v2: T) -> T {\n if v1 < v2 { v1 } else { v2 }\n}\n\n#[inline]\npub fn max(v1: T, v2: T) -> T {\n if v1 > v2 { v1 } else { v2 }\n}\n\n#[cfg(test)]\nmod test {\n use super::lexical_ordering;\n\n #[test]\n fn test_int_totalord() {\n assert_eq!(5.cmp(&10), Less);\n assert_eq!(10.cmp(&5), Greater);\n assert_eq!(5.cmp(&5), Equal);\n assert_eq!((-5).cmp(&12), Less);\n assert_eq!(12.cmp(-5), Greater);\n }\n\n #[test]\n fn test_int_totaleq() {\n assert!(5.equals(&5));\n assert!(!2.equals(&17));\n }\n\n #[test]\n fn test_ordering_order() {\n assert!(Less < Equal);\n assert_eq!(Greater.cmp(&Less), Greater);\n }\n\n #[test]\n fn test_lexical_ordering() {\n fn t(o1: Ordering, o2: Ordering, e: Ordering) {\n assert_eq!(lexical_ordering(o1, o2), e);\n }\n\n let xs = [Less, Equal, Greater];\n for &o in xs.iter() {\n t(Less, o, Less);\n t(Equal, o, o);\n t(Greater, o, Greater);\n }\n }\n}\n<|endoftext|>"} {"text":"#![crate_name = \"rustful_macros\"]\n\n#![crate_type = \"dylib\"]\n\n#![doc(html_root_url = \"http:\/\/ogeon.github.io\/rustful\/doc\/\")]\n\n#![feature(plugin_registrar, quote)]\n\n#![allow(unstable)]\n\n\/\/!This crate provides some helpful macros for rustful, including `insert_routes!` and `content_type!`.\n\/\/!\n\/\/!#`insert_routes!`\n\/\/!The `insert_routes!` macro generates routes from the provided handlers and routing tree and\n\/\/!adds them to the provided router. The router is then returned.\n\/\/!\n\/\/!This can be useful to lower the risk of typing errors, among other things.\n\/\/!\n\/\/!##Example 1\n\/\/!\n\/\/!```rust ignore\n\/\/!#![feature(plugin)]\n\/\/!#[plugin]\n\/\/!#[no_link]\n\/\/!extern crate rustful_macros;\n\/\/!\n\/\/!extern crate rustful;\n\/\/!\n\/\/!...\n\/\/!\n\/\/!let router = insert_routes!{\n\/\/! TreeRouter::new(): {\n\/\/! \"\/about\" => Get: about_us,\n\/\/! \"\/user\/:user\" => Get: show_user,\n\/\/! \"\/product\/:name\" => Get: show_product,\n\/\/! \"\/*\" => Get: show_error,\n\/\/! \"\/\" => Get: show_welcome\n\/\/! }\n\/\/!};\n\/\/!```\n\/\/!\n\/\/!##Example 2\n\/\/!\n\/\/!```rust ignore\n\/\/!#![feature(plugin)]\n\/\/!#[plugin]\n\/\/!#[no_link]\n\/\/!extern crate rustful_macros;\n\/\/!\n\/\/!extern crate rustful;\n\/\/!\n\/\/!...\n\/\/!\n\/\/!let mut router = TreeRouter::new();\n\/\/!insert_routes!{\n\/\/! &mut router: {\n\/\/! \"\/\" => Get: show_home,\n\/\/! \"home\" => Get: show_home,\n\/\/! \"user\/:username\" => {Get: show_user, Post: save_user},\n\/\/! \"product\" => {\n\/\/! Get: show_all_products,\n\/\/!\n\/\/! \"json\" => Get: send_all_product_data\n\/\/! \":id\" => {\n\/\/! Get: show_product,\n\/\/! Post | Delete: edit_product,\n\/\/!\n\/\/! \"json\" => Get: send_product_data\n\/\/! }\n\/\/! }\n\/\/! }\n\/\/!};\n\/\/!```\n\nextern crate syntax;\nextern crate rustc;\n\nuse std::path::BytesContainer;\n\nuse syntax::{ast, codemap};\nuse syntax::ext::base::{\n ExtCtxt, MacResult, MacExpr,\n NormalTT, TTMacroExpander\n};\nuse syntax::ext::build::AstBuilder;\nuse syntax::ext::quote::rt::ExtParseUtils;\nuse syntax::parse;\nuse syntax::parse::token;\nuse syntax::parse::parser;\nuse syntax::parse::parser::Parser;\nuse syntax::ptr::P;\n\/\/use syntax::print::pprust;\n\nuse rustc::plugin::Registry;\n\n#[plugin_registrar]\n#[doc(hidden)]\npub fn macro_registrar(reg: &mut Registry) {\n let expander = Box::new(expand_routes) as Box;\n reg.register_syntax_extension(token::intern(\"insert_routes\"), NormalTT(expander, None));\n}\n\nfn expand_routes<'cx>(cx: &'cx mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree]) -> Box {\n let insert_method = cx.ident_of(\"insert\");\n let router_ident = cx.ident_of(\"router\");\n let router_var = cx.expr_ident(sp, router_ident);\n let router_trait_path = cx.path_global(sp, vec![cx.ident_of(\"rustful\"), cx.ident_of(\"Router\")]);\n let router_trait_use = cx.view_use_simple(sp, ast::Inherited, router_trait_path);\n\n let mut parser = parse::new_parser_from_tts(\n cx.parse_sess(), cx.cfg(), tts.to_vec()\n );\n\n let mut calls = vec![cx.stmt_let(sp, true, router_ident, parser.parse_expr())];\n parser.expect(&token::Colon);\n\n for (path, method, handler) in parse_routes(cx, &mut parser).into_iter() {\n let path_expr = cx.parse_expr(format!(\"\\\"{}\\\"\", path));\n let method_expr = cx.expr_path(method);\n calls.push(cx.stmt_expr(cx.expr_method_call(sp, router_var.clone(), insert_method, vec![method_expr, path_expr, handler])));\n }\n\n let block = cx.expr_block(cx.block_all(sp, vec![router_trait_use], calls, Some(router_var)));\n\n MacExpr::new(block)\n}\n\nfn parse_routes(cx: &mut ExtCtxt, parser: &mut Parser) -> Vec<(String, ast::Path, P)> {\n parse_subroutes(\"\", cx, parser)\n}\n\nfn parse_subroutes(base: &str, cx: &mut ExtCtxt, parser: &mut Parser) -> Vec<(String, ast::Path, P)> {\n let mut routes = Vec::new();\n\n parser.eat(&token::OpenDelim(token::Brace));\n\n while !parser.eat(&token::Eof) {\n match parser.parse_optional_str() {\n Some((ref s, _, _)) => {\n if !parser.eat(&token::FatArrow) {\n parser.expect(&token::FatArrow);\n break;\n }\n\n let mut new_base = base.to_string();\n match s.container_as_str() {\n Some(s) => {\n new_base.push_str(s.trim_matches('\/'));\n new_base.push_str(\"\/\");\n },\n None => cx.span_err(parser.span, \"invalid path\")\n }\n\n if parser.eat(&token::Eof) {\n cx.span_err(parser.span, \"unexpected end of routing tree\");\n }\n\n if parser.eat(&token::OpenDelim(token::Brace)) {\n let subroutes = parse_subroutes(new_base.as_slice(), cx, parser);\n routes.push_all(subroutes.as_slice());\n\n if parser.eat(&token::CloseDelim(token::Brace)) {\n if !parser.eat(&token::Comma) {\n break;\n }\n } else {\n parser.expect_one_of(&[token::Comma, token::CloseDelim(token::Brace)], &[]);\n }\n } else {\n for (method, handler) in parse_handler(parser).into_iter() {\n routes.push((new_base.clone(), method, handler))\n }\n\n if !parser.eat(&token::Comma) {\n break;\n }\n }\n },\n None => {\n for (method, handler) in parse_handler(parser).into_iter() {\n routes.push((base.to_string(), method, handler))\n }\n\n if !parser.eat(&token::Comma) {\n break;\n }\n }\n }\n }\n\n routes\n}\n\nfn parse_handler(parser: &mut Parser) -> Vec<(ast::Path, P)> {\n let mut methods = Vec::new();\n\n loop {\n methods.push(parser.parse_path(parser::NoTypesAllowed));\n\n if parser.eat(&token::Colon) {\n break;\n }\n\n if !parser.eat(&token::BinOp(token::Or)) {\n parser.expect_one_of(&[token::Colon, token::BinOp(token::Or)], &[]);\n }\n }\n\n let handler = parser.parse_expr();\n\n methods.into_iter().map(|m| (m, handler.clone())).collect()\n}\n\n\n\/**\nA macro for assigning content types.\n\nIt takes a main type, a sub type and a parameter list. Instead of this:\n\n```\nresponse.headers.content_type = Some(MediaType {\n type_: String::from_str(\"text\"),\n subtype: String::from_str(\"html\"),\n parameters: vec!((String::from_str(\"charset\"), String::from_str(\"UTF-8\")))\n});\n```\n\nit can be written like this:\n\n```\nresponse.headers.content_type = content_type!(\"text\", \"html\", \"charset\": \"UTF-8\");\n```\n\nThe `\"charset\": \"UTF-8\"` part defines the parameter list for the content type.\nIt may contain more than one parameter, or be omitted:\n\n```\nresponse.headers.content_type = content_type!(\"application\", \"octet-stream\", \"type\": \"image\/gif\", \"padding\": \"4\");\n```\n\n```\nresponse.headers.content_type = content_type!(\"image\", \"png\");\n```\n**\/\n#[macro_export]\nmacro_rules! content_type {\n ($main_type:expr, $sub_type:expr) => ({\n ::rustful::mime::Mime (\n std::str::FromStr::from_str($main_type).unwrap(),\n std::str::FromStr::from_str($sub_type).unwrap(),\n Vec::new()\n )\n });\n\n ($main_type:expr, $sub_type:expr, $(($param:expr, $value:expr)),+) => ({\n ::rustful::mime::Mime (\n std::str::FromStr::from_str($main_type).unwrap(),\n std::str::FromStr::from_str($sub_type).unwrap(),\n vec!( $( (std::str::FromStr::from_str($param).unwrap(), std::str::FromStr::from_str($value).unwrap()) ),+ )\n )\n });\n}\n\n\n\/**\nA macro for callig `send` in response and aborting the handle function if it fails.\n\nThis macro will print an error to `stdout`.\n**\/\n#[macro_export]\nmacro_rules! try_send {\n ($writer:expr, $content:expr) => (\n match $writer.send($content) {\n Ok(v) => v,\n Err(::rustful::ResponseError::IoError(e)) => {\n println!(\"IO error: {}\", e);\n return;\n },\n Err(::rustful::ResponseError::PluginError(e)) => {\n println!(\"plugin error: {}\", e);\n return;\n }\n }\n );\n\n ($writer:expr, $content:expr, $what:expr) => (\n match $writer.send($content) {\n Ok(v) => v,\n Err(::rustful::ResponseError::IoError(e)) => {\n println!(\"IO error while {}: {}\", $what, e);\n return;\n },\n Err(::rustful::ResponseError::PluginError(e)) => {\n println!(\"plugin error while {}: {}\", $what, e);\n return;\n }\n }\n )\n}Update for libsyntax changes#![crate_name = \"rustful_macros\"]\n\n#![crate_type = \"dylib\"]\n\n#![doc(html_root_url = \"http:\/\/ogeon.github.io\/rustful\/doc\/\")]\n\n#![feature(plugin_registrar, quote)]\n\n#![allow(unstable)]\n\n\/\/!This crate provides some helpful macros for rustful, including `insert_routes!` and `content_type!`.\n\/\/!\n\/\/!#`insert_routes!`\n\/\/!The `insert_routes!` macro generates routes from the provided handlers and routing tree and\n\/\/!adds them to the provided router. The router is then returned.\n\/\/!\n\/\/!This can be useful to lower the risk of typing errors, among other things.\n\/\/!\n\/\/!##Example 1\n\/\/!\n\/\/!```rust ignore\n\/\/!#![feature(plugin)]\n\/\/!#[plugin]\n\/\/!#[no_link]\n\/\/!extern crate rustful_macros;\n\/\/!\n\/\/!extern crate rustful;\n\/\/!\n\/\/!...\n\/\/!\n\/\/!let router = insert_routes!{\n\/\/! TreeRouter::new(): {\n\/\/! \"\/about\" => Get: about_us,\n\/\/! \"\/user\/:user\" => Get: show_user,\n\/\/! \"\/product\/:name\" => Get: show_product,\n\/\/! \"\/*\" => Get: show_error,\n\/\/! \"\/\" => Get: show_welcome\n\/\/! }\n\/\/!};\n\/\/!```\n\/\/!\n\/\/!##Example 2\n\/\/!\n\/\/!```rust ignore\n\/\/!#![feature(plugin)]\n\/\/!#[plugin]\n\/\/!#[no_link]\n\/\/!extern crate rustful_macros;\n\/\/!\n\/\/!extern crate rustful;\n\/\/!\n\/\/!...\n\/\/!\n\/\/!let mut router = TreeRouter::new();\n\/\/!insert_routes!{\n\/\/! &mut router: {\n\/\/! \"\/\" => Get: show_home,\n\/\/! \"home\" => Get: show_home,\n\/\/! \"user\/:username\" => {Get: show_user, Post: save_user},\n\/\/! \"product\" => {\n\/\/! Get: show_all_products,\n\/\/!\n\/\/! \"json\" => Get: send_all_product_data\n\/\/! \":id\" => {\n\/\/! Get: show_product,\n\/\/! Post | Delete: edit_product,\n\/\/!\n\/\/! \"json\" => Get: send_product_data\n\/\/! }\n\/\/! }\n\/\/! }\n\/\/!};\n\/\/!```\n\nextern crate syntax;\nextern crate rustc;\n\nuse std::path::BytesContainer;\n\nuse syntax::{ast, codemap};\nuse syntax::ext::base::{\n ExtCtxt, MacResult, MacExpr,\n NormalTT, TTMacroExpander\n};\nuse syntax::ext::build::AstBuilder;\nuse syntax::ext::quote::rt::ExtParseUtils;\nuse syntax::parse;\nuse syntax::parse::token;\nuse syntax::parse::parser;\nuse syntax::parse::parser::Parser;\nuse syntax::ptr::P;\n\/\/use syntax::print::pprust;\n\nuse rustc::plugin::Registry;\n\n#[plugin_registrar]\n#[doc(hidden)]\npub fn macro_registrar(reg: &mut Registry) {\n let expander = Box::new(expand_routes) as Box;\n reg.register_syntax_extension(token::intern(\"insert_routes\"), NormalTT(expander, None));\n}\n\nfn expand_routes<'cx>(cx: &'cx mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree]) -> Box {\n let insert_method = cx.ident_of(\"insert\");\n let router_ident = cx.ident_of(\"router\");\n let router_var = cx.expr_ident(sp, router_ident);\n let router_trait_path = cx.path_global(sp, vec![cx.ident_of(\"rustful\"), cx.ident_of(\"Router\")]);\n let router_trait_use = cx.item_use_simple(sp, ast::Inherited, router_trait_path);\n\n let mut parser = parse::new_parser_from_tts(\n cx.parse_sess(), cx.cfg(), tts.to_vec()\n );\n\n let mut calls = vec![cx.stmt_item(sp, router_trait_use), cx.stmt_let(sp, true, router_ident, parser.parse_expr())];\n parser.expect(&token::Colon);\n\n for (path, method, handler) in parse_routes(cx, &mut parser).into_iter() {\n let path_expr = cx.parse_expr(format!(\"\\\"{}\\\"\", path));\n let method_expr = cx.expr_path(method);\n calls.push(cx.stmt_expr(cx.expr_method_call(sp, router_var.clone(), insert_method, vec![method_expr, path_expr, handler])));\n }\n\n let block = cx.expr_block(cx.block_all(sp, calls, Some(router_var)));\n\n MacExpr::new(block)\n}\n\nfn parse_routes(cx: &mut ExtCtxt, parser: &mut Parser) -> Vec<(String, ast::Path, P)> {\n parse_subroutes(\"\", cx, parser)\n}\n\nfn parse_subroutes(base: &str, cx: &mut ExtCtxt, parser: &mut Parser) -> Vec<(String, ast::Path, P)> {\n let mut routes = Vec::new();\n\n parser.eat(&token::OpenDelim(token::Brace));\n\n while !parser.eat(&token::Eof) {\n match parser.parse_optional_str() {\n Some((ref s, _, _)) => {\n if !parser.eat(&token::FatArrow) {\n parser.expect(&token::FatArrow);\n break;\n }\n\n let mut new_base = base.to_string();\n match s.container_as_str() {\n Some(s) => {\n new_base.push_str(s.trim_matches('\/'));\n new_base.push_str(\"\/\");\n },\n None => cx.span_err(parser.span, \"invalid path\")\n }\n\n if parser.eat(&token::Eof) {\n cx.span_err(parser.span, \"unexpected end of routing tree\");\n }\n\n if parser.eat(&token::OpenDelim(token::Brace)) {\n let subroutes = parse_subroutes(new_base.as_slice(), cx, parser);\n routes.push_all(subroutes.as_slice());\n\n if parser.eat(&token::CloseDelim(token::Brace)) {\n if !parser.eat(&token::Comma) {\n break;\n }\n } else {\n parser.expect_one_of(&[token::Comma, token::CloseDelim(token::Brace)], &[]);\n }\n } else {\n for (method, handler) in parse_handler(parser).into_iter() {\n routes.push((new_base.clone(), method, handler))\n }\n\n if !parser.eat(&token::Comma) {\n break;\n }\n }\n },\n None => {\n for (method, handler) in parse_handler(parser).into_iter() {\n routes.push((base.to_string(), method, handler))\n }\n\n if !parser.eat(&token::Comma) {\n break;\n }\n }\n }\n }\n\n routes\n}\n\nfn parse_handler(parser: &mut Parser) -> Vec<(ast::Path, P)> {\n let mut methods = Vec::new();\n\n loop {\n methods.push(parser.parse_path(parser::NoTypesAllowed));\n\n if parser.eat(&token::Colon) {\n break;\n }\n\n if !parser.eat(&token::BinOp(token::Or)) {\n parser.expect_one_of(&[token::Colon, token::BinOp(token::Or)], &[]);\n }\n }\n\n let handler = parser.parse_expr();\n\n methods.into_iter().map(|m| (m, handler.clone())).collect()\n}\n\n\n\/**\nA macro for assigning content types.\n\nIt takes a main type, a sub type and a parameter list. Instead of this:\n\n```\nresponse.headers.content_type = Some(MediaType {\n type_: String::from_str(\"text\"),\n subtype: String::from_str(\"html\"),\n parameters: vec!((String::from_str(\"charset\"), String::from_str(\"UTF-8\")))\n});\n```\n\nit can be written like this:\n\n```\nresponse.headers.content_type = content_type!(\"text\", \"html\", \"charset\": \"UTF-8\");\n```\n\nThe `\"charset\": \"UTF-8\"` part defines the parameter list for the content type.\nIt may contain more than one parameter, or be omitted:\n\n```\nresponse.headers.content_type = content_type!(\"application\", \"octet-stream\", \"type\": \"image\/gif\", \"padding\": \"4\");\n```\n\n```\nresponse.headers.content_type = content_type!(\"image\", \"png\");\n```\n**\/\n#[macro_export]\nmacro_rules! content_type {\n ($main_type:expr, $sub_type:expr) => ({\n ::rustful::mime::Mime (\n std::str::FromStr::from_str($main_type).unwrap(),\n std::str::FromStr::from_str($sub_type).unwrap(),\n Vec::new()\n )\n });\n\n ($main_type:expr, $sub_type:expr, $(($param:expr, $value:expr)),+) => ({\n ::rustful::mime::Mime (\n std::str::FromStr::from_str($main_type).unwrap(),\n std::str::FromStr::from_str($sub_type).unwrap(),\n vec!( $( (std::str::FromStr::from_str($param).unwrap(), std::str::FromStr::from_str($value).unwrap()) ),+ )\n )\n });\n}\n\n\n\/**\nA macro for callig `send` in response and aborting the handle function if it fails.\n\nThis macro will print an error to `stdout`.\n**\/\n#[macro_export]\nmacro_rules! try_send {\n ($writer:expr, $content:expr) => (\n match $writer.send($content) {\n Ok(v) => v,\n Err(::rustful::ResponseError::IoError(e)) => {\n println!(\"IO error: {}\", e);\n return;\n },\n Err(::rustful::ResponseError::PluginError(e)) => {\n println!(\"plugin error: {}\", e);\n return;\n }\n }\n );\n\n ($writer:expr, $content:expr, $what:expr) => (\n match $writer.send($content) {\n Ok(v) => v,\n Err(::rustful::ResponseError::IoError(e)) => {\n println!(\"IO error while {}: {}\", $what, e);\n return;\n },\n Err(::rustful::ResponseError::PluginError(e)) => {\n println!(\"plugin error while {}: {}\", $what, e);\n return;\n }\n }\n )\n}<|endoftext|>"} {"text":"add basic slice functions<|endoftext|>"} {"text":"ups. add evaluate_expression.rs\/\/! The `evaluate_expressions` module...\n\n\nuse backend::zcode::zfile::{ZOP, Operand, Variable, Constant, LargeConstant};\nuse frontend::ast::{ASTNode};\nuse frontend::codegen;\nuse frontend::codegen::{CodeGenManager};\nuse frontend::lexer::Token::{TokNumOp, TokCompOp, TokLogOp, TokInt, TokBoolean, TokVariable, TokFunction};\n\n\n\npub fn evaluate_expression<'a>(node: &'a ASTNode, code: &mut Vec, mut manager: &mut CodeGenManager<'a>) -> Operand {\n let mut temp_ids = CodeGenManager::new_temp_var_vec();\n evaluate_expression_internal(node, code, &mut temp_ids, manager)\n}\n\n\/\/\/ Evaluates an expression node to zCode.\nfn evaluate_expression_internal<'a>(node: &'a ASTNode, code: &mut Vec,\n temp_ids: &mut Vec, mut manager: &mut CodeGenManager<'a>) -> Operand {\n let n = node.as_default();\n\n match n.category {\n TokNumOp { ref op_name, .. } => {\n if n.childs.len() != 2 {\n panic!(\"Numeric operators need two arguments!\")\n }\n let eval0 = evaluate_expression_internal(&n.childs[0], code, temp_ids, manager);\n let eval1 = evaluate_expression_internal(&n.childs[1], code, temp_ids, manager);\n eval_num_op(&eval0, &eval1, &**op_name, code, temp_ids)\n },\n TokCompOp { ref op_name, .. } => {\n if n.childs.len() != 2 {\n panic!(\"Numeric operators need two arguments!\")\n }\n let eval0 = evaluate_expression_internal(&n.childs[0], code, temp_ids, manager);\n let eval1 = evaluate_expression_internal(&n.childs[1], code, temp_ids, manager);\n eval_comp_op(&eval0, &eval1, &**op_name, code, temp_ids, manager)\n },\n TokLogOp { ref op_name, .. } => {\n let eval0 = evaluate_expression_internal(&n.childs[0], code, temp_ids, manager);\n \n match &**op_name {\n \"and\" | \"or\" => {\n let eval1 = evaluate_expression_internal(&n.childs[1], code, temp_ids, manager);\n eval_and_or(&eval0, &eval1, &**op_name, code, temp_ids)\n },\n \"not\" => {\n eval_not(&eval0, code, temp_ids, manager)\n }, \n _ => panic!(\"unhandled op\")\n }\n }\n TokInt { ref value, .. } => {\n Operand::new_large_const(*value as i16)\n },\n TokBoolean { ref value, .. } => {\n boolstr_to_const(&**value)\n },\n TokVariable { ref name, .. } => {\n Operand::Var(manager.symbol_table.get_symbol_id(name))\n },\n TokFunction { ref name, .. } => {\n match &**name {\n \"random\" => {\n let args = &node.as_default().childs;\n if args.len() != 2 {\n panic!(\"Function random only supports 2 args\");\n }\n\n if args[0].as_default().childs.len() != 1 || args[1].as_default().childs.len() != 1 {\n panic!(\"Unsupported Expression\");\n }\n\n let from = &args[0].as_default().childs[0];\n let to = &args[1].as_default().childs[0];\n\n let from_value = evaluate_expression_internal(from, code, temp_ids, manager);\n let to_value = evaluate_expression_internal(to, code, temp_ids, manager);\n codegen::function_random(&from_value, &to_value, code, temp_ids)\n },\n _ => { panic!(\"Unsupported function: {}\", name)}\n }\n },\n _ => panic!(\"unhandled token {:?}\", n.category)\n }\n}\n\nfn eval_num_op<'a>(eval0: &Operand, eval1: &Operand, op_name: &str, code: &mut Vec, temp_ids: &mut Vec) -> Operand {\n if count_constants(eval0, eval1) == 2 {\n return direct_eval_num_op(eval0, eval1, op_name);\n }\n let save_var = determine_save_var(eval0, eval1, temp_ids);\n match op_name {\n \"+\" => {\n code.push(ZOP::Add{operand1: eval0.clone(), operand2: eval1.clone(), save_variable: save_var.clone()});\n },\n \"-\" => {\n code.push(ZOP::Sub{operand1: eval0.clone(), operand2: eval1.clone(), save_variable: save_var.clone()});\n },\n \"*\" => {\n\n },\n \"\/\" => {\n\n },\n \"%\" => {\n\n },\n _ => panic!(\"unhandled op\")\n };\n\n free_var_if_both_temp(eval0, eval1, temp_ids);\n\n Operand::Var(save_var)\n}\n\n\n\nfn direct_eval_num_op(eval0: &Operand, eval1: &Operand, op_name: &str) -> Operand {\n let mut out_large = false;\n let val0 = eval0.const_value();\n let val1 = eval1.const_value();\n match eval0 { &Operand::LargeConst(_) => {out_large = true; }, _ => {} };\n match eval1 { &Operand::LargeConst(_) => {out_large = true; }, _ => {} };\n let result = match op_name {\n \"+\" => {\n val0 + val1\n },\n \"-\" => {\n val0 - val1\n },\n \"*\" => {\n val0 * val1\n },\n \"\/\" => {\n val0 \/ val1\n },\n \"%\" => {\n val0 % val1\n },\n _ => panic!(\"unhandled op\")\n };\n if out_large {\n Operand::LargeConst(LargeConstant { value: result })\n } else {\n Operand::Const(Constant { value: result as u8 })\n }\n}\n\nfn eval_comp_op<'a>(eval0: &Operand, eval1: &Operand, op_name: &str, code: &mut Vec, \n temp_ids: &mut Vec, mut manager: &mut CodeGenManager<'a>) -> Operand {\n if count_constants(eval0, eval1) == 2 {\n return direct_eval_comp_op(eval0, eval1, op_name);\n }\n let save_var = Variable { id: temp_ids.pop().unwrap() };\n let label = format!(\"expr_{}\", manager.ids_expr.start_next());\n let const_true = Operand::new_const(1);\n let const_false = Operand::new_const(0);\n match op_name {\n \"is\" | \"==\" | \"eq\" => {\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_true});\n code.push(ZOP::JE{operand1: eval0.clone(), operand2: eval1.clone(), jump_to_label: label.to_string()});\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_false});\n },\n \"neq\" => {\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_false});\n code.push(ZOP::JE{operand1: eval0.clone(), operand2: eval1.clone(), jump_to_label: label.to_string()});\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_true});\n },\n \"<\" | \"lt\" => {\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_true });\n code.push(ZOP::JL{operand1: eval0.clone(), operand2: eval1.clone(), jump_to_label: label.to_string()});\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_false});\n },\n \"<=\" | \"lte\" => {\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_false});\n code.push(ZOP::JG{operand1: eval0.clone(), operand2: eval1.clone(), jump_to_label: label.to_string()});\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_true});\n },\n \">=\" | \"gte\" => {\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_false});\n code.push(ZOP::JL{operand1: eval0.clone(), operand2: eval1.clone(), jump_to_label: label.to_string()});\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_true});\n },\n \">\" | \"gt\" => {\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_true});\n code.push(ZOP::JG{operand1: eval0.clone(), operand2: eval1.clone(), jump_to_label: label.to_string()});\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: const_false});\n },\n _ => panic!(\"unhandled op\")\n };\n code.push(ZOP::Label {name: label.to_string()});\n free_var_if_temp(eval0, temp_ids);\n free_var_if_temp(eval1, temp_ids);\n Operand::Var(save_var)\n}\n\n\/\/\/ Directly evaluates the given compare operation.\n\/\/\/ Both operands must be constants.\nfn direct_eval_comp_op(eval0: &Operand, eval1: &Operand, op_name: &str) -> Operand {\n let val0 = eval0.const_value();\n let val1 = eval1.const_value();\n let result = match op_name {\n \"is\" | \"==\" | \"eq\" => { val0 == val1 },\n \"neq\" => { val0 != val1 },\n \"<\" | \"lt\" => { val0 < val1 },\n \"<=\" | \"lte\" => { val0 <= val1 },\n \">=\" | \"gte\" => { val0 >= val1 },\n \">\" | \"gt\" => { val0 > val1 },\n _ => panic!(\"unhandled op\")\n };\n if result {\n Operand::Const(Constant {value: 1})\n } else {\n Operand::Const(Constant {value: 0})\n }\n}\n\nfn eval_and_or(eval0: &Operand, eval1: &Operand, op_name: &str, code: &mut Vec, \n temp_ids: &mut Vec) -> Operand {\n if count_constants(&eval0, &eval1) == 2 {\n let mut out_large = false;\n let val0 = eval0.const_value();\n let val1 = eval1.const_value();\n match eval0 { &Operand::LargeConst(_) => {out_large = true; }, _ => {} };\n match eval1 { &Operand::LargeConst(_) => {out_large = true; }, _ => {} };\n let result = if op_name == \"or\" {\n val0 | val1\n } else {\n val0 & val1\n };\n if out_large {\n return Operand::LargeConst(LargeConstant { value: result })\n } else {\n return Operand::Const(Constant { value: result as u8 })\n }\n }\n \n let save_var = determine_save_var(eval0, eval1, temp_ids);\n if op_name == \"or\" {\n code.push(ZOP::Or{operand1: eval0.clone(), operand2: eval1.clone(), save_variable: save_var.clone()});\n } else {\n code.push(ZOP::And{operand1: eval0.clone(), operand2: eval1.clone(), save_variable: save_var.clone()});\n }\n free_var_if_both_temp(eval0, eval1, temp_ids);\n Operand::Var(save_var)\n}\n\nfn eval_not<'a>(eval: &Operand, code: &mut Vec,\n temp_ids: &mut Vec, mut manager: &mut CodeGenManager<'a>) -> Operand {\n if eval.is_const() {\n let val = eval.const_value();\n let result: u8 = if val > 0 { 0 } else { 1 };\n return Operand::Const(Constant { value: result });\n }\n let save_var = Variable { id: temp_ids.pop().unwrap() };\n let label = format!(\"expr_{}\", manager.ids_expr.start_next());\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: Operand::new_const(0)});\n code.push(ZOP::JG{operand1: eval.clone(), operand2: Operand::new_const(0), jump_to_label: label.to_string()});\n code.push(ZOP::StoreVariable{ variable: save_var.clone(), value: Operand::new_const(1)});\n code.push(ZOP::Label {name: label.to_string()});\n free_var_if_temp(eval, temp_ids);\n Operand::Var(save_var)\n}\n\nfn free_var_if_both_temp (eval0: &Operand, eval1: &Operand, temp_ids: &mut Vec) {\n match eval0 { \n &Operand::Var(ref var1) => {\n if CodeGenManager::is_temp_var(var1) {\n match eval1 {\n &Operand::Var(ref var2)=> {\n if CodeGenManager::is_temp_var(var2) {\n temp_ids.push(var2.id);\n }\n }, _ => {}\n }\n }\n }, _ => {}\n };\n}\n\nfn free_var_if_temp (operand: &Operand, temp_ids: &mut Vec) {\n match operand {\n &Operand::Var(ref var) => {\n if CodeGenManager::is_temp_var(var){\n temp_ids.push(var.id);\n }\n }, _ => {}\n }\n}\n\nfn determine_save_var(operand1: &Operand, operand2: &Operand, temp_ids: &mut Vec) -> Variable {\n match operand1 {\n &Operand::Var(ref var) => {\n if CodeGenManager::is_temp_var(var) {\n return var.clone();\n }\n }, _ => {}\n };\n match operand2 {\n &Operand::Var(ref var) => {\n if CodeGenManager::is_temp_var(var) {\n return var.clone();\n }\n }, _ => {}\n };\n return Variable { id: temp_ids.pop().unwrap() };\n}\n\nfn count_constants(operand1: &Operand, operand2: &Operand) -> u8 {\n let mut const_count: u8 = 0;\n if operand1.is_const() {\n const_count += 1;\n }\n if operand2.is_const() {\n const_count += 1;\n }\n const_count\n}\n\nfn boolstr_to_const(string: &str) -> Operand {\n match string {\n \"true\" => Operand::Const(Constant { value: 1 }),\n _ => Operand::Const(Constant { value: 0 })\n }\n}<|endoftext|>"} {"text":"[algo] fizzbuzz solution with one trick\/\/ Mathematical approach for the fizzbuzz\n\/\/\n\/\/ http:\/\/philcrissman.net\/posts\/eulers-fizzbuzz\/\nfn main() {\n (0..100).for_each(|number: i32| {\n let value = match number.pow(4) % 15 {\n 0 => \"FizzBuzz\".to_string(),\n 6 => \"Fizz\".to_string(),\n 10 => \"Buzz\".to_string(),\n _ => number.to_string(),\n };\n\n println!(\"For {} Value is {}\", number, value);\n })\n}\n<|endoftext|>"} {"text":"Add missing `simple.rs`\/\/ parng\/lib.rs\n\/\/\n\/\/ Copyright (c) 2016 Mozilla Foundation\n\n\/\/! A simple API that allocates an in-memory buffer and decodes into it.\n\nuse PngError;\nuse imageloader::{self, DataProvider, ImageLoader, InterlacingInfo, LevelOfDetail, LoadProgress};\nuse imageloader::{ScanlinesForPrediction, ScanlinesForRgbaConversion, UninitializedExtension};\nuse metadata::ColorType;\nuse std::io::{Read, Seek};\nuse std::mem;\nuse std::sync::mpsc::{self, Receiver, Sender};\n\nstruct MemoryDataProvider {\n rgba_pixels: Vec,\n indexed_pixels: Vec,\n rgba_aligned_stride: usize,\n indexed_aligned_stride: usize,\n data_sender: Sender>,\n}\n\nimpl MemoryDataProvider {\n #[inline(never)]\n pub fn new(width: u32, height: u32, indexed: bool) -> (MemoryDataProvider, Receiver>) {\n let rgba_aligned_stride = imageloader::align(width as usize * 4);\n let indexed_aligned_stride = imageloader::align(width as usize * 4);\n let (data_sender, data_receiver) = mpsc::channel();\n\n \/\/ We make room for eight pixels past the end in case the final scanline consists of a\n \/\/ level of detail with a nonzero offset. Tricky!\n let rgba_length = rgba_aligned_stride * (height as usize) + 8 * 4;\n let indexed_length = if indexed {\n indexed_aligned_stride * (height as usize) + 8 + 1\n } else {\n 0\n };\n\n let (mut rgba_pixels, mut indexed_pixels) = (vec![], vec![]);\n unsafe {\n rgba_pixels.extend_with_uninitialized(rgba_length);\n indexed_pixels.extend_with_uninitialized(indexed_length)\n }\n\n let data_provider = MemoryDataProvider {\n rgba_pixels: rgba_pixels,\n indexed_pixels: indexed_pixels,\n rgba_aligned_stride: rgba_aligned_stride,\n indexed_aligned_stride: indexed_aligned_stride,\n data_sender: data_sender,\n };\n (data_provider, data_receiver)\n }\n}\n\nimpl DataProvider for MemoryDataProvider {\n fn fetch_scanlines_for_prediction<'a>(&'a mut self,\n reference_scanline: Option,\n current_scanline: u32,\n lod: LevelOfDetail,\n indexed: bool)\n -> ScanlinesForPrediction {\n let buffer_color_depth = buffer_color_depth(indexed);\n let reference_scanline = reference_scanline.map(|reference_scanline| {\n InterlacingInfo::new(reference_scanline, buffer_color_depth, lod)\n });\n let current_scanline = InterlacingInfo::new(current_scanline, buffer_color_depth, lod);\n\n let aligned_stride = if indexed {\n self.indexed_aligned_stride\n } else {\n self.rgba_aligned_stride\n };\n\n let split_point = aligned_stride * (current_scanline.y as usize);\n let dest_pixels = if indexed {\n &mut self.indexed_pixels\n } else {\n &mut self.rgba_pixels\n };\n let (head, tail) = dest_pixels.split_at_mut(split_point);\n let head_length = head.len();\n let reference_scanline_data = match reference_scanline {\n None => None,\n Some(reference_scanline) => {\n debug_assert!(current_scanline.stride == reference_scanline.stride);\n let start = (reference_scanline.y as usize) * aligned_stride +\n (reference_scanline.offset as usize);\n let end = start + aligned_stride;\n let slice = &mut head[start..end];\n Some(slice)\n }\n };\n let start = (current_scanline.y as usize) * aligned_stride +\n (current_scanline.offset as usize) - head_length;\n let end = start + aligned_stride;\n let current_scanline_data = &mut tail[start..end];\n ScanlinesForPrediction {\n reference_scanline: reference_scanline_data,\n current_scanline: current_scanline_data,\n stride: current_scanline.stride,\n }\n }\n\n fn prediction_complete_for_scanline(&mut self, _: u32, _: LevelOfDetail) {}\n\n fn fetch_scanlines_for_rgba_conversion<'a>(&'a mut self,\n scanline: u32,\n lod: LevelOfDetail,\n indexed: bool)\n -> ScanlinesForRgbaConversion<'a> {\n let rgba_scanline = InterlacingInfo::new(scanline, 32, lod);\n let indexed_scanline = if indexed {\n Some(InterlacingInfo::new(scanline, 8, lod))\n } else {\n None\n };\n let rgba_aligned_stride = self.rgba_aligned_stride;\n let indexed_aligned_stride = self.indexed_aligned_stride;\n ScanlinesForRgbaConversion {\n rgba_scanline: &mut self.rgba_pixels[(rgba_aligned_stride * scanline as usize)..],\n indexed_scanline: if indexed_scanline.is_some() {\n Some(&self.indexed_pixels[(indexed_aligned_stride * scanline as usize)..])\n } else {\n None\n },\n rgba_stride: rgba_scanline.stride,\n indexed_stride: indexed_scanline.map(|indexed_scanline| indexed_scanline.stride),\n }\n }\n\n fn rgba_conversion_complete_for_scanline(&mut self, _: u32, _: LevelOfDetail) {}\n\n fn finished(&mut self) {\n self.data_sender.send(mem::replace(&mut self.rgba_pixels, vec![])).unwrap()\n }\n}\n\n\/\/\/ An in-memory decoded image in big-endian RGBA format, 32 bits per pixel.\npub struct Image {\n \/\/\/ The width of the image, in pixels.\n pub width: u32,\n \/\/\/ The height of the image, in pixels.\n pub height: u32,\n \/\/\/ The number of bytes between successive scanlines. This may be any value greater than or\n \/\/\/ equal to `4 * width`.\n \/\/\/\n \/\/\/ Because of SIMD alignment restrictions, `parng` may well choose a value greater than `4 *\n \/\/\/ width` here.\n pub stride: usize,\n \/\/\/ The actual pixels.\n pub pixels: Vec,\n}\n\nimpl Image {\n \/\/\/ Allocates space for and loads a PNG image stream from a reader into memory.\n \/\/\/\n \/\/\/ The returned image is big-endian, 32 bits per pixel RGBA.\n \/\/\/\n \/\/\/ This method does not return until the image is fully loaded. If you need a different\n \/\/\/ in-memory representation, or you need to display the image before it's fully loaded,\n \/\/\/ consider using the `imageloader::ImageLoader` API instead.\n pub fn load(input: &mut I) -> Result where I: Read + Seek {\n let mut image = ImageLoader::new();\n loop {\n match try!(image.add_data(input)) {\n LoadProgress::NeedDataProviderAndMoreData => break,\n LoadProgress::NeedMoreData => {}\n LoadProgress::Finished => panic!(\"Image ended before metadata was read!\"),\n }\n }\n\n let (dimensions, indexed) = {\n let metadata = image.metadata().as_ref().unwrap();\n (metadata.dimensions, metadata.color_type == ColorType::Indexed)\n };\n let (data_provider, data_receiver) = MemoryDataProvider::new(dimensions.width,\n dimensions.height,\n indexed);\n let aligned_stride = data_provider.rgba_aligned_stride;\n image.set_data_provider(Box::new(data_provider));\n\n while let LoadProgress::NeedMoreData = try!(image.add_data(input)) {}\n try!(image.wait_until_finished());\n\n let pixels = data_receiver.recv().unwrap();\n Ok(Image {\n width: dimensions.width,\n height: dimensions.height,\n stride: aligned_stride,\n pixels: pixels,\n })\n }\n}\n\nfn buffer_color_depth(indexed: bool) -> u8 {\n if indexed {\n 8\n } else {\n 32\n }\n}\n\n<|endoftext|>"} {"text":"Added Pi file and numbers folderuse std::f32::to_str_digits;\nuse std::num::atan;\n\nfn get_pi(num :uint) ->~str {\n to_str_digits(16f32*atan(1f32\/5f32)-4f32*atan(1f32\/239f32), num)\n}\n\n#[test]\nfn get_pi_test() {\n assert!(get_pi(12) == to_str_digits(3.1415927410125732421875f32, 12), \"{} {}\", get_pi(12), 3.141592741013);\n}\n\n#[test]\nfn get_pi_test2() {\n assert!(get_pi(0) == to_str_digits(3.1415927410125732421875f32, 0), \"{} {}\", get_pi(0), 3);\n}\n\nfn main() {\n let pi = get_pi(12);\n println!(\"{}\", pi);\n}<|endoftext|>"} {"text":"stop considering `Rc` cycles (leaks) as unsafe<|endoftext|>"} {"text":"Got it working!!!<|endoftext|>"} {"text":"\/\/ EBML enum definitions and utils shared by the encoder and decoder\n\nconst tag_paths: uint = 0x01u;\n\nconst tag_items: uint = 0x02u;\n\nconst tag_paths_data: uint = 0x03u;\n\nconst tag_paths_data_name: uint = 0x04u;\n\nconst tag_paths_data_item: uint = 0x05u;\n\nconst tag_paths_data_mod: uint = 0x06u;\n\nconst tag_def_id: uint = 0x07u;\n\nconst tag_items_data: uint = 0x08u;\n\nconst tag_items_data_item: uint = 0x09u;\n\nconst tag_items_data_item_family: uint = 0x0au;\n\nconst tag_items_data_item_ty_param_bounds: uint = 0x0bu;\n\nconst tag_items_data_item_type: uint = 0x0cu;\n\nconst tag_items_data_item_symbol: uint = 0x0du;\n\nconst tag_items_data_item_variant: uint = 0x0eu;\n\nconst tag_items_data_parent_item: uint = 0x0fu;\n\nconst tag_index: uint = 0x11u;\n\nconst tag_index_buckets: uint = 0x12u;\n\nconst tag_index_buckets_bucket: uint = 0x13u;\n\nconst tag_index_buckets_bucket_elt: uint = 0x14u;\n\nconst tag_index_table: uint = 0x15u;\n\nconst tag_meta_item_name_value: uint = 0x18u;\n\nconst tag_meta_item_name: uint = 0x19u;\n\nconst tag_meta_item_value: uint = 0x20u;\n\nconst tag_attributes: uint = 0x21u;\n\nconst tag_attribute: uint = 0x22u;\n\nconst tag_meta_item_word: uint = 0x23u;\n\nconst tag_meta_item_list: uint = 0x24u;\n\n\/\/ The list of crates that this crate depends on\nconst tag_crate_deps: uint = 0x25u;\n\n\/\/ A single crate dependency\nconst tag_crate_dep: uint = 0x26u;\n\nconst tag_crate_hash: uint = 0x28u;\n\nconst tag_parent_item: uint = 0x29u;\n\nconst tag_crate_dep_name: uint = 0x2au;\nconst tag_crate_dep_hash: uint = 0x2bu;\nconst tag_crate_dep_vers: uint = 0x2cu;\n\nconst tag_mod_impl: uint = 0x30u;\n\nconst tag_item_trait_method: uint = 0x31u;\nconst tag_impl_trait: uint = 0x32u;\n\n\/\/ discriminator value for variants\nconst tag_disr_val: uint = 0x34u;\n\n\/\/ used to encode ast_map::path and ast_map::path_elt\nconst tag_path: uint = 0x40u;\nconst tag_path_len: uint = 0x41u;\nconst tag_path_elt_mod: uint = 0x42u;\nconst tag_path_elt_name: uint = 0x43u;\nconst tag_item_field: uint = 0x44u;\nconst tag_class_mut: uint = 0x45u;\n\nconst tag_region_param: uint = 0x46u;\nconst tag_mod_impl_trait: uint = 0x47u;\n\/*\n trait items contain tag_item_trait_method elements,\n impl items contain tag_item_impl_method elements, and classes\n have both. That's because some code treats classes like traits,\n and other code treats them like impls. Because classes can contain\n both, tag_item_trait_method and tag_item_impl_method have to be two\n different tags.\n *\/\nconst tag_item_impl_method: uint = 0x48u;\nconst tag_item_dtor: uint = 0x49u;\nconst tag_paths_foreign_path: uint = 0x4a;\nconst tag_item_trait_method_self_ty: uint = 0x4b;\nconst tag_item_trait_method_self_ty_region: uint = 0x4c;\n\n\/\/ Reexports are found within module tags. Each reexport contains def_ids\n\/\/ and names.\nconst tag_items_data_item_reexport: uint = 0x4d;\nconst tag_items_data_item_reexport_def_id: uint = 0x4e;\nconst tag_items_data_item_reexport_name: uint = 0x4f;\n\n\/\/ used to encode crate_ctxt side tables\nenum astencode_tag { \/\/ Reserves 0x50 -- 0x6f\n tag_ast = 0x50,\n\n tag_tree = 0x51,\n\n tag_id_range = 0x52,\n\n tag_table = 0x53,\n tag_table_id = 0x54,\n tag_table_val = 0x55,\n tag_table_def = 0x56,\n tag_table_node_type = 0x57,\n tag_table_node_type_subst = 0x58,\n tag_table_freevars = 0x59,\n tag_table_tcache = 0x5a,\n tag_table_param_bounds = 0x5b,\n tag_table_inferred_modes = 0x5c,\n tag_table_mutbl = 0x5d,\n tag_table_last_use = 0x5e,\n tag_table_spill = 0x5f,\n tag_table_method_map = 0x60,\n tag_table_vtable_map = 0x61,\n tag_table_borrowings = 0x62\n}\n\n\/\/ djb's cdb hashes.\nfn hash_node_id(&&node_id: int) -> uint {\n return 177573u ^ (node_id as uint);\n}\n\nfn hash_path(&&s: ~str) -> uint {\n let mut h = 5381u;\n for str::each(s) |ch| { h = (h << 5u) + h ^ (ch as uint); }\n return h;\n}\n\ntype link_meta = {name: @~str, vers: @~str, extras_hash: ~str};\n\nrustc: Remove a bunch of unused metadata tags from common\/\/ EBML enum definitions and utils shared by the encoder and decoder\n\nconst tag_items: uint = 0x02u;\n\nconst tag_paths_data_name: uint = 0x04u;\n\nconst tag_def_id: uint = 0x07u;\n\nconst tag_items_data: uint = 0x08u;\n\nconst tag_items_data_item: uint = 0x09u;\n\nconst tag_items_data_item_family: uint = 0x0au;\n\nconst tag_items_data_item_ty_param_bounds: uint = 0x0bu;\n\nconst tag_items_data_item_type: uint = 0x0cu;\n\nconst tag_items_data_item_symbol: uint = 0x0du;\n\nconst tag_items_data_item_variant: uint = 0x0eu;\n\nconst tag_items_data_parent_item: uint = 0x0fu;\n\nconst tag_index: uint = 0x11u;\n\nconst tag_index_buckets: uint = 0x12u;\n\nconst tag_index_buckets_bucket: uint = 0x13u;\n\nconst tag_index_buckets_bucket_elt: uint = 0x14u;\n\nconst tag_index_table: uint = 0x15u;\n\nconst tag_meta_item_name_value: uint = 0x18u;\n\nconst tag_meta_item_name: uint = 0x19u;\n\nconst tag_meta_item_value: uint = 0x20u;\n\nconst tag_attributes: uint = 0x21u;\n\nconst tag_attribute: uint = 0x22u;\n\nconst tag_meta_item_word: uint = 0x23u;\n\nconst tag_meta_item_list: uint = 0x24u;\n\n\/\/ The list of crates that this crate depends on\nconst tag_crate_deps: uint = 0x25u;\n\n\/\/ A single crate dependency\nconst tag_crate_dep: uint = 0x26u;\n\nconst tag_crate_hash: uint = 0x28u;\n\nconst tag_parent_item: uint = 0x29u;\n\nconst tag_crate_dep_name: uint = 0x2au;\nconst tag_crate_dep_hash: uint = 0x2bu;\nconst tag_crate_dep_vers: uint = 0x2cu;\n\nconst tag_mod_impl: uint = 0x30u;\n\nconst tag_item_trait_method: uint = 0x31u;\nconst tag_impl_trait: uint = 0x32u;\n\n\/\/ discriminator value for variants\nconst tag_disr_val: uint = 0x34u;\n\n\/\/ used to encode ast_map::path and ast_map::path_elt\nconst tag_path: uint = 0x40u;\nconst tag_path_len: uint = 0x41u;\nconst tag_path_elt_mod: uint = 0x42u;\nconst tag_path_elt_name: uint = 0x43u;\nconst tag_item_field: uint = 0x44u;\nconst tag_class_mut: uint = 0x45u;\n\nconst tag_region_param: uint = 0x46u;\nconst tag_mod_impl_trait: uint = 0x47u;\n\/*\n trait items contain tag_item_trait_method elements,\n impl items contain tag_item_impl_method elements, and classes\n have both. That's because some code treats classes like traits,\n and other code treats them like impls. Because classes can contain\n both, tag_item_trait_method and tag_item_impl_method have to be two\n different tags.\n *\/\nconst tag_item_impl_method: uint = 0x48u;\nconst tag_item_dtor: uint = 0x49u;\nconst tag_item_trait_method_self_ty: uint = 0x4b;\nconst tag_item_trait_method_self_ty_region: uint = 0x4c;\n\n\/\/ Reexports are found within module tags. Each reexport contains def_ids\n\/\/ and names.\nconst tag_items_data_item_reexport: uint = 0x4d;\nconst tag_items_data_item_reexport_def_id: uint = 0x4e;\nconst tag_items_data_item_reexport_name: uint = 0x4f;\n\n\/\/ used to encode crate_ctxt side tables\nenum astencode_tag { \/\/ Reserves 0x50 -- 0x6f\n tag_ast = 0x50,\n\n tag_tree = 0x51,\n\n tag_id_range = 0x52,\n\n tag_table = 0x53,\n tag_table_id = 0x54,\n tag_table_val = 0x55,\n tag_table_def = 0x56,\n tag_table_node_type = 0x57,\n tag_table_node_type_subst = 0x58,\n tag_table_freevars = 0x59,\n tag_table_tcache = 0x5a,\n tag_table_param_bounds = 0x5b,\n tag_table_inferred_modes = 0x5c,\n tag_table_mutbl = 0x5d,\n tag_table_last_use = 0x5e,\n tag_table_spill = 0x5f,\n tag_table_method_map = 0x60,\n tag_table_vtable_map = 0x61,\n tag_table_borrowings = 0x62\n}\n\n\/\/ djb's cdb hashes.\nfn hash_node_id(&&node_id: int) -> uint {\n return 177573u ^ (node_id as uint);\n}\n\nfn hash_path(&&s: ~str) -> uint {\n let mut h = 5381u;\n for str::each(s) |ch| { h = (h << 5u) + h ^ (ch as uint); }\n return h;\n}\n\ntype link_meta = {name: @~str, vers: @~str, extras_hash: ~str};\n\n<|endoftext|>"} {"text":"Set all dictionary names to lowercase, as querying won't work otherwise.<|endoftext|>"} {"text":"Remove the `Datum::Json` variant<|endoftext|>"} {"text":"use std::url::Url;\nuse std::{cmp, mem, ptr};\nuse std::get_slice::GetSlice;\nuse std::io::*;\nuse std::process::Command;\nuse std::ops::DerefMut;\nuse std::syscall::SysError;\nuse std::syscall::ENOENT;\nuse std::to_num::ToNum;\n\nuse orbital::event::Event;\nuse orbital::Point;\nuse orbital::Size;\n\nuse self::display::Display;\nuse self::session::Session;\nuse self::window::Window;\n\npub mod display;\npub mod package;\npub mod session;\npub mod window;\n\npub static mut session_ptr: *mut Session = 0 as *mut Session;\n\n\/\/\/ A window resource\npub struct Resource {\n \/\/\/ The window\n pub window: Box,\n \/\/\/ Seek point\n pub seek: usize,\n}\n\nimpl Resource {\n pub fn dup(&self) -> Result> {\n Ok(box Resource {\n window: Window::new(self.window.point,\n self.window.size,\n self.window.title.clone()),\n seek: self.seek,\n })\n }\n\n \/\/\/ Return the url of this resource\n pub fn path(&self) -> Result {\n Ok(format!(\"orbital:\/\/\/{}\/{}\/{}\/{}\/{}\",\n self.window.point.x,\n self.window.point.y,\n self.window.size.width,\n self.window.size.height,\n self.window.title))\n }\n\n \/\/\/ Read data to buffer\n pub fn read(&mut self, buf: &mut [u8]) -> Result {\n \/\/ Read events from window\n let mut i = 0;\n while buf.len() - i >= mem::size_of::() {\n match self.window.poll() {\n Some(event) => {\n unsafe { ptr::write(buf.as_ptr().offset(i as isize) as *mut Event, event) };\n i += mem::size_of::();\n }\n None => break,\n }\n }\n\n Ok(i)\n }\n\n \/\/\/ Write to resource\n pub fn write(&mut self, buf: &[u8]) -> Result {\n let content = &mut self.window.content;\n\n let size = cmp::min(content.size - self.seek, buf.len());\n unsafe {\n Display::copy_run(buf.as_ptr() as usize, content.offscreen + self.seek, size);\n }\n self.seek += size;\n\n Ok(size)\n }\n\n \/\/\/ Seek\n pub fn seek(&mut self, pos: SeekFrom) -> Result {\n let end = self.window.content.size;\n\n self.seek = match pos {\n SeekFrom::Start(offset) => cmp::min(end as u64, cmp::max(0, offset)) as usize,\n SeekFrom::Current(offset) =>\n cmp::min(end as i64, cmp::max(0, self.seek as i64 + offset)) as usize,\n SeekFrom::End(offset) =>\n cmp::min(end as i64, cmp::max(0, end as i64 + offset)) as usize,\n };\n\n Ok(self.seek as u64)\n }\n\n \/\/\/ Sync the resource, should flip\n pub fn sync(&mut self) -> Result<()> {\n self.window.redraw();\n Ok(())\n }\n}\n\n\/\/\/ A window scheme\npub struct Scheme {\n pub session: Box,\n pub next_x: isize,\n pub next_y: isize,\n}\n\nimpl Scheme {\n pub fn new() -> Box {\n println!(\"- Starting Orbital\");\n println!(\" Console: Press F1\");\n println!(\" Desktop: Press F2\");\n let mut ret = box Scheme {\n session: Session::new(),\n next_x: 0,\n next_y: 0,\n };\n unsafe { session_ptr = ret.session.deref_mut() };\n ret\n }\n\n pub fn open(&mut self, url_str: &str, _: usize) -> Result> {\n \/\/ window:\/\/host\/path\/path\/path is the path type we're working with.\n let url = Url::from_str(url_str);\n\n let host = url.host();\n if host.is_empty() {\n let path = url.path_parts();\n let mut pointx = match path.get(0) {\n Some(x) => x.to_num_signed(),\n None => 0,\n };\n let mut pointy = match path.get(1) {\n Some(y) => y.to_num_signed(),\n None => 0,\n };\n let size_width = match path.get(2) {\n Some(w) => w.to_num(),\n None => 100,\n };\n let size_height = match path.get(3) {\n Some(h) => h.to_num(),\n None => 100,\n };\n\n let mut title = match path.get(4) {\n Some(t) => t.clone(),\n None => String::new(),\n };\n for i in 5..path.len() {\n if let Some(t) = path.get(i) {\n title = title + \"\/\" + t;\n }\n }\n\n if pointx <= 0 || pointy <= 0 {\n if self.next_x > self.session.display.width as isize - size_width as isize {\n self.next_x = 0;\n }\n self.next_x += 32;\n pointx = self.next_x as i32;\n\n if self.next_y > self.session.display.height as isize - size_height as isize {\n self.next_y = 0;\n }\n self.next_y += 32;\n pointy = self.next_y as i32;\n }\n\n Ok(box Resource {\n window: Window::new(Point::new(pointx, pointy),\n Size::new(size_width, size_height),\n title),\n seek: 0,\n })\n } else if host == \"launch\" {\n let path = url.path();\n\n for package in self.session.packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if (accept.starts_with('*') &&\n path.ends_with(&accept.get_slice(Some(1), None))) ||\n (accept.ends_with('*') &&\n path.starts_with(&accept.get_slice(None, Some(accept.len() - 1)))) {\n accepted = true;\n break;\n }\n }\n if accepted {\n if Command::new(&package.binary).arg(&path).spawn_scheme().is_none() {\n println!(\"{}: Failed to launch\", package.binary);\n }\n break;\n }\n }\n\n Err(SysError::new(ENOENT))\n } else {\n Err(SysError::new(ENOENT))\n }\n }\n\n pub fn event(&mut self, event: &Event) {\n self.session.event(event);\n\n unsafe { self.session.redraw() };\n }\n}\n\n\/\/ TODO: This is a hack and it will go away\n#[cold]\n#[inline(never)]\n#[no_mangle]\npub unsafe extern \"C\" fn _event(scheme: *mut Scheme, event: *const Event) {\n (*scheme).event(&*event);\n}\nFixes #439 - Launch binaries from file manager if they end in .binuse std::url::Url;\nuse std::{cmp, mem, ptr};\nuse std::get_slice::GetSlice;\nuse std::io::*;\nuse std::process::Command;\nuse std::ops::DerefMut;\nuse std::syscall::SysError;\nuse std::syscall::ENOENT;\nuse std::to_num::ToNum;\n\nuse orbital::event::Event;\nuse orbital::Point;\nuse orbital::Size;\n\nuse self::display::Display;\nuse self::session::Session;\nuse self::window::Window;\n\npub mod display;\npub mod package;\npub mod session;\npub mod window;\n\npub static mut session_ptr: *mut Session = 0 as *mut Session;\n\n\/\/\/ A window resource\npub struct Resource {\n \/\/\/ The window\n pub window: Box,\n \/\/\/ Seek point\n pub seek: usize,\n}\n\nimpl Resource {\n pub fn dup(&self) -> Result> {\n Ok(box Resource {\n window: Window::new(self.window.point,\n self.window.size,\n self.window.title.clone()),\n seek: self.seek,\n })\n }\n\n \/\/\/ Return the url of this resource\n pub fn path(&self) -> Result {\n Ok(format!(\"orbital:\/\/\/{}\/{}\/{}\/{}\/{}\",\n self.window.point.x,\n self.window.point.y,\n self.window.size.width,\n self.window.size.height,\n self.window.title))\n }\n\n \/\/\/ Read data to buffer\n pub fn read(&mut self, buf: &mut [u8]) -> Result {\n \/\/ Read events from window\n let mut i = 0;\n while buf.len() - i >= mem::size_of::() {\n match self.window.poll() {\n Some(event) => {\n unsafe { ptr::write(buf.as_ptr().offset(i as isize) as *mut Event, event) };\n i += mem::size_of::();\n }\n None => break,\n }\n }\n\n Ok(i)\n }\n\n \/\/\/ Write to resource\n pub fn write(&mut self, buf: &[u8]) -> Result {\n let content = &mut self.window.content;\n\n let size = cmp::min(content.size - self.seek, buf.len());\n unsafe {\n Display::copy_run(buf.as_ptr() as usize, content.offscreen + self.seek, size);\n }\n self.seek += size;\n\n Ok(size)\n }\n\n \/\/\/ Seek\n pub fn seek(&mut self, pos: SeekFrom) -> Result {\n let end = self.window.content.size;\n\n self.seek = match pos {\n SeekFrom::Start(offset) => cmp::min(end as u64, cmp::max(0, offset)) as usize,\n SeekFrom::Current(offset) =>\n cmp::min(end as i64, cmp::max(0, self.seek as i64 + offset)) as usize,\n SeekFrom::End(offset) =>\n cmp::min(end as i64, cmp::max(0, end as i64 + offset)) as usize,\n };\n\n Ok(self.seek as u64)\n }\n\n \/\/\/ Sync the resource, should flip\n pub fn sync(&mut self) -> Result<()> {\n self.window.redraw();\n Ok(())\n }\n}\n\n\/\/\/ A window scheme\npub struct Scheme {\n pub session: Box,\n pub next_x: isize,\n pub next_y: isize,\n}\n\nimpl Scheme {\n pub fn new() -> Box {\n println!(\"- Starting Orbital\");\n println!(\" Console: Press F1\");\n println!(\" Desktop: Press F2\");\n let mut ret = box Scheme {\n session: Session::new(),\n next_x: 0,\n next_y: 0,\n };\n unsafe { session_ptr = ret.session.deref_mut() };\n ret\n }\n\n pub fn open(&mut self, url_str: &str, _: usize) -> Result> {\n \/\/ window:\/\/host\/path\/path\/path is the path type we're working with.\n let url = Url::from_str(url_str);\n\n let host = url.host();\n if host.is_empty() {\n let path = url.path_parts();\n let mut pointx = match path.get(0) {\n Some(x) => x.to_num_signed(),\n None => 0,\n };\n let mut pointy = match path.get(1) {\n Some(y) => y.to_num_signed(),\n None => 0,\n };\n let size_width = match path.get(2) {\n Some(w) => w.to_num(),\n None => 100,\n };\n let size_height = match path.get(3) {\n Some(h) => h.to_num(),\n None => 100,\n };\n\n let mut title = match path.get(4) {\n Some(t) => t.clone(),\n None => String::new(),\n };\n for i in 5..path.len() {\n if let Some(t) = path.get(i) {\n title = title + \"\/\" + t;\n }\n }\n\n if pointx <= 0 || pointy <= 0 {\n if self.next_x > self.session.display.width as isize - size_width as isize {\n self.next_x = 0;\n }\n self.next_x += 32;\n pointx = self.next_x as i32;\n\n if self.next_y > self.session.display.height as isize - size_height as isize {\n self.next_y = 0;\n }\n self.next_y += 32;\n pointy = self.next_y as i32;\n }\n\n Ok(box Resource {\n window: Window::new(Point::new(pointx, pointy),\n Size::new(size_width, size_height),\n title),\n seek: 0,\n })\n } else if host == \"launch\" {\n let path = url.path();\n\n if path.ends_with(\".bin\") {\n if Command::new(&path).spawn_scheme().is_none() {\n println!(\"{}: Failed to launch\", path);\n }\n } else {\n for package in self.session.packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if (accept.starts_with('*') &&\n path.ends_with(&accept.get_slice(Some(1), None))) ||\n (accept.ends_with('*') &&\n path.starts_with(&accept.get_slice(None, Some(accept.len() - 1)))) {\n accepted = true;\n break;\n }\n }\n if accepted {\n if Command::new(&package.binary).arg(&path).spawn_scheme().is_none() {\n println!(\"{}: Failed to launch\", package.binary);\n }\n break;\n }\n }\n }\n\n Err(SysError::new(ENOENT))\n } else {\n Err(SysError::new(ENOENT))\n }\n }\n\n pub fn event(&mut self, event: &Event) {\n self.session.event(event);\n\n unsafe { self.session.redraw() };\n }\n}\n\n\/\/ TODO: This is a hack and it will go away\n#[cold]\n#[inline(never)]\n#[no_mangle]\npub unsafe extern \"C\" fn _event(scheme: *mut Scheme, event: *const Event) {\n (*scheme).event(&*event);\n}\n<|endoftext|>"} {"text":"an empy message<|endoftext|>"} {"text":"Remove a few warnings from the Vulkan validation layers installation method<|endoftext|>"} {"text":"Add rust code for ocr-numbers.#![feature(str_checked_slicing)]\n\nconst DIGITS: [&str; 10] = [\n \" _ | ||_| \", \/\/ 0\n \" | | \", \/\/ 1\n \" _ _||_ \", \/\/ 2\n \" _ _| _| \", \/\/ 3\n \" |_| | \", \/\/ 4\n \" _ |_ _| \", \/\/ 5\n \" _ |_ |_| \", \/\/ 6\n \" _ | | \", \/\/ 7\n \" _ |_||_| \", \/\/ 8\n \" _ |_| _| \", \/\/ 9\n];\n\nconst DIGITS_ROWS: usize = 3;\nconst DIGITS_LINES: usize = 4;\n\npub fn convert(input: &str) -> Result {\n let v = input.split(|x| x == '\\n').collect::>();\n\n let lines = v.len();\n if lines == 0 || lines % DIGITS_LINES != 0 {\n return Err(\"Invalid lines\".to_string());\n }\n\n let rows = v[0].len();\n if rows == 0 || rows % DIGITS_ROWS != 0 {\n return Err(format!(\"Invalid {} row at line 1\", rows));\n }\n if v.iter().any(|x| x.len() != rows) {\n return Err(\"Invalid rows\".to_string());\n }\n\n let mut result = String::new();\n for i in 0..lines \/ DIGITS_LINES {\n for j in 0..rows \/ DIGITS_ROWS {\n let row = j * DIGITS_ROWS;\n let line = i * DIGITS_LINES;\n\n let number = format!(\n \"{}{}{}{}\",\n v[line].get(row..row + DIGITS_ROWS).unwrap(),\n v[line + 1].get(row..row + DIGITS_ROWS).unwrap(),\n v[line + 2].get(row..row + DIGITS_ROWS).unwrap(),\n v[line + 3].get(row..row + DIGITS_ROWS).unwrap()\n );\n\n let index: u8 = DIGITS\n .iter()\n .position(|&x| x == number)\n .map_or_else(|| b'?', |v| v as u8 + b'0');\n\n result.push(index as char);\n }\n\n result.push(',');\n }\n\n result.pop();\n\n Ok(result)\n}\n<|endoftext|>"} {"text":"use inflector::Inflector;\n\nuse botocore::{Service, Shape, Operation};\nuse std::ascii::AsciiExt;\nuse self::ec2::Ec2Generator;\nuse self::json::JsonGenerator;\nuse self::query::QueryGenerator;\nuse self::rest_json::RestJsonGenerator;\n\nmod ec2;\nmod json;\nmod query;\nmod rest_json;\n\npub trait GenerateProtocol {\n fn generate_methods(&self, service: &Service) -> String;\n\n fn generate_prelude(&self, service: &Service) -> String;\n\n fn generate_struct_attributes(&self) -> String;\n\n fn generate_support_types(&self, _name: &str, _shape: &Shape, _service: &Service)\n -> Option {\n None\n }\n\n fn generate_error_types(&self, _service: &Service) -> Option {\n None\n }\n\n fn timestamp_type(&self) -> &'static str;\n}\n\npub fn generate_source(service: &Service) -> String {\n match &service.metadata.protocol[..] {\n \"json\" => generate(service, JsonGenerator),\n \"ec2\" => generate(service, Ec2Generator),\n \"query\" => generate(service, QueryGenerator),\n \"rest-json\" => generate(service, RestJsonGenerator),\n protocol => panic!(\"Unknown protocol {}\", protocol),\n }\n}\n\nfn generate

(service: &Service, protocol_generator: P) -> String where P: GenerateProtocol {\n format!(\n \"{prelude}\n\n {types}\n {error_types}\n\n {client}\",\n client = generate_client(service, &protocol_generator),\n prelude = &protocol_generator.generate_prelude(service),\n types = generate_types(service, &protocol_generator),\n error_types = protocol_generator.generate_error_types(service).unwrap_or(\"\".to_string()),\n )\n}\n\nfn generate_client

(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n format!(\n \"\/\/\/ A client for the {service_name} API.\n pub struct {type_name}

where P: ProvideAwsCredentials {{\n credentials_provider: P,\n region: region::Region,\n }}\n\n impl

{type_name}

where P: ProvideAwsCredentials {{\n pub fn new(credentials_provider: P, region: region::Region) -> Self {{\n {type_name} {{\n credentials_provider: credentials_provider,\n region: region,\n }}\n }}\n\n {methods}\n }}\n \",\n methods = protocol_generator.generate_methods(service),\n service_name = match &service.metadata.service_abbreviation {\n &Some(ref service_abbreviation) => service_abbreviation.as_str(),\n &None => service.metadata.service_full_name.as_ref()\n },\n type_name = service.client_type_name(),\n )\n}\n\nfn generate_list(name: &str, shape: &Shape) -> String {\n format!(\"pub type {} = Vec<{}>;\", name, capitalize_first(shape.member().to_string()))\n}\n\nfn generate_map(name: &str, shape: &Shape) -> String {\n format!(\n \"pub type {} = ::std::collections::HashMap<{}, {}>;\",\n name,\n capitalize_first(shape.key().to_string()),\n capitalize_first(shape.value().to_string()),\n )\n}\n\nfn generate_primitive_type(name: &str, shape_type: &str, for_timestamps: &str) -> String {\n let primitive_type = match shape_type {\n \"blob\" => \"Vec\",\n \"boolean\" => \"bool\",\n \"double\" => \"f64\",\n \"float\" => \"f32\",\n \"integer\" => \"i32\",\n \"long\" => \"i64\",\n \"string\" => \"String\",\n \"timestamp\" => for_timestamps,\n primitive_type => panic!(\"Unknown primitive type: {}\", primitive_type),\n };\n\n format!(\"pub type {} = {};\", name, primitive_type)\n}\n\nfn generate_types

(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n service.shapes.iter().filter_map(|(name, shape)| {\n let type_name = &capitalize_first(name.to_string());\n\n if type_name == \"String\" {\n return protocol_generator.generate_support_types(type_name, shape, &service);\n }\n\n if shape.exception() && service.typed_errors() {\n return None;\n }\n\n\n\n let mut parts = Vec::with_capacity(3);\n\n if let Some(ref docs) = shape.documentation {\n parts.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\\\",\"\\\\\\\\\").replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n match &shape.shape_type[..] {\n \"structure\" => parts.push(generate_struct(service, type_name, shape, protocol_generator)),\n \"map\" => parts.push(generate_map(type_name, shape)),\n \"list\" => parts.push(generate_list(type_name, shape)),\n shape_type => parts.push(generate_primitive_type(type_name, shape_type, protocol_generator.timestamp_type())),\n }\n\n if let Some(support_types) = protocol_generator.generate_support_types(type_name, shape, &service) {\n parts.push(support_types);\n }\n\n Some(parts.join(\"\\n\"))\n }).collect::>().join(\"\\n\")\n}\n\n\n\nfn generate_struct

(\n service: &Service,\n name: &str,\n shape: &Shape,\n protocol_generator: &P,\n) -> String where P: GenerateProtocol {\n if shape.members.is_none() || shape.members.as_ref().unwrap().is_empty() {\n format!(\n \"{attributes}\n pub struct {name};\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n )\n } else {\n format!(\n \"{attributes}\n pub struct {name} {{\n {struct_fields}\n }}\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n struct_fields = generate_struct_fields(service, shape),\n )\n }\n\n}\n\npub fn generate_field_name(member_name: &str) -> String {\n let name = member_name.to_snake_case();\n if name == \"return\" || name == \"type\" {\n name + \"_\"\n } else {\n name\n }\n}\n\nfn generate_struct_fields(service: &Service, shape: &Shape) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n let mut lines = Vec::with_capacity(4);\n let name = generate_field_name(member_name);\n\n if let Some(ref docs) = member.documentation {\n lines.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\\\",\"\\\\\\\\\").replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n lines.push(\"#[allow(unused_attributes)]\".to_owned());\n lines.push(format!(\"#[serde(rename=\\\"{}\\\")]\", member_name));\n\n if let Some(shape_type) = service.shape_type_for_member(member) {\n if shape_type == \"blob\" {\n lines.push(\n \"#[serde(\n deserialize_with=\\\"::serialization::SerdeBlob::deserialize_blob\\\",\n serialize_with=\\\"::serialization::SerdeBlob::serialize_blob\\\",\n default,\n )]\".to_owned()\n );\n }\n }\n\n let type_name = capitalize_first(member.shape.to_string());\n\n if shape.required(member_name) {\n lines.push(format!(\"pub {}: {},\", name, type_name));\n } else if name == \"type\" {\n lines.push(format!(\"pub aws_{}: Option<{}>,\", name, type_name));\n } else {\n lines.push(format!(\"pub {}: Option<{}>,\", name, type_name));\n }\n\n lines.join(\"\\n\")\n }).collect::>().join(\"\\n\")\n}\n\nimpl Operation {\n pub fn error_type_name(&self) -> String {\n format!(\"{}Error\", self.name)\n }\n}\n\nfn capitalize_first(word: String) -> String {\n assert!(word.is_ascii());\n\n let mut result = word.into_bytes();\n result[0] = result[0].to_ascii_uppercase();\n\n String::from_utf8(result).unwrap()\n}Don't serialize Option to `\"foo\": null` if the Option is None. AWS hates that.use inflector::Inflector;\n\nuse botocore::{Service, Shape, Operation};\nuse std::ascii::AsciiExt;\nuse self::ec2::Ec2Generator;\nuse self::json::JsonGenerator;\nuse self::query::QueryGenerator;\nuse self::rest_json::RestJsonGenerator;\n\nmod ec2;\nmod json;\nmod query;\nmod rest_json;\n\npub trait GenerateProtocol {\n fn generate_methods(&self, service: &Service) -> String;\n\n fn generate_prelude(&self, service: &Service) -> String;\n\n fn generate_struct_attributes(&self) -> String;\n\n fn generate_support_types(&self, _name: &str, _shape: &Shape, _service: &Service)\n -> Option {\n None\n }\n\n fn generate_error_types(&self, _service: &Service) -> Option {\n None\n }\n\n fn timestamp_type(&self) -> &'static str;\n}\n\npub fn generate_source(service: &Service) -> String {\n match &service.metadata.protocol[..] {\n \"json\" => generate(service, JsonGenerator),\n \"ec2\" => generate(service, Ec2Generator),\n \"query\" => generate(service, QueryGenerator),\n \"rest-json\" => generate(service, RestJsonGenerator),\n protocol => panic!(\"Unknown protocol {}\", protocol),\n }\n}\n\nfn generate

(service: &Service, protocol_generator: P) -> String where P: GenerateProtocol {\n format!(\n \"{prelude}\n\n {types}\n {error_types}\n\n {client}\",\n client = generate_client(service, &protocol_generator),\n prelude = &protocol_generator.generate_prelude(service),\n types = generate_types(service, &protocol_generator),\n error_types = protocol_generator.generate_error_types(service).unwrap_or(\"\".to_string()),\n )\n}\n\nfn generate_client

(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n format!(\n \"\/\/\/ A client for the {service_name} API.\n pub struct {type_name}

where P: ProvideAwsCredentials {{\n credentials_provider: P,\n region: region::Region,\n }}\n\n impl

{type_name}

where P: ProvideAwsCredentials {{\n pub fn new(credentials_provider: P, region: region::Region) -> Self {{\n {type_name} {{\n credentials_provider: credentials_provider,\n region: region,\n }}\n }}\n\n {methods}\n }}\n \",\n methods = protocol_generator.generate_methods(service),\n service_name = match &service.metadata.service_abbreviation {\n &Some(ref service_abbreviation) => service_abbreviation.as_str(),\n &None => service.metadata.service_full_name.as_ref()\n },\n type_name = service.client_type_name(),\n )\n}\n\nfn generate_list(name: &str, shape: &Shape) -> String {\n format!(\"pub type {} = Vec<{}>;\", name, capitalize_first(shape.member().to_string()))\n}\n\nfn generate_map(name: &str, shape: &Shape) -> String {\n format!(\n \"pub type {} = ::std::collections::HashMap<{}, {}>;\",\n name,\n capitalize_first(shape.key().to_string()),\n capitalize_first(shape.value().to_string()),\n )\n}\n\nfn generate_primitive_type(name: &str, shape_type: &str, for_timestamps: &str) -> String {\n let primitive_type = match shape_type {\n \"blob\" => \"Vec\",\n \"boolean\" => \"bool\",\n \"double\" => \"f64\",\n \"float\" => \"f32\",\n \"integer\" => \"i32\",\n \"long\" => \"i64\",\n \"string\" => \"String\",\n \"timestamp\" => for_timestamps,\n primitive_type => panic!(\"Unknown primitive type: {}\", primitive_type),\n };\n\n format!(\"pub type {} = {};\", name, primitive_type)\n}\n\nfn generate_types

(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n service.shapes.iter().filter_map(|(name, shape)| {\n let type_name = &capitalize_first(name.to_string());\n\n if type_name == \"String\" {\n return protocol_generator.generate_support_types(type_name, shape, &service);\n }\n\n if shape.exception() && service.typed_errors() {\n return None;\n }\n\n\n\n let mut parts = Vec::with_capacity(3);\n\n if let Some(ref docs) = shape.documentation {\n parts.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\\\",\"\\\\\\\\\").replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n match &shape.shape_type[..] {\n \"structure\" => parts.push(generate_struct(service, type_name, shape, protocol_generator)),\n \"map\" => parts.push(generate_map(type_name, shape)),\n \"list\" => parts.push(generate_list(type_name, shape)),\n shape_type => parts.push(generate_primitive_type(type_name, shape_type, protocol_generator.timestamp_type())),\n }\n\n if let Some(support_types) = protocol_generator.generate_support_types(type_name, shape, &service) {\n parts.push(support_types);\n }\n\n Some(parts.join(\"\\n\"))\n }).collect::>().join(\"\\n\")\n}\n\n\n\nfn generate_struct

(\n service: &Service,\n name: &str,\n shape: &Shape,\n protocol_generator: &P,\n) -> String where P: GenerateProtocol {\n if shape.members.is_none() || shape.members.as_ref().unwrap().is_empty() {\n format!(\n \"{attributes}\n pub struct {name};\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n )\n } else {\n format!(\n \"{attributes}\n pub struct {name} {{\n {struct_fields}\n }}\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n struct_fields = generate_struct_fields(service, shape),\n )\n }\n\n}\n\npub fn generate_field_name(member_name: &str) -> String {\n let name = member_name.to_snake_case();\n if name == \"return\" || name == \"type\" {\n name + \"_\"\n } else {\n name\n }\n}\n\nfn generate_struct_fields(service: &Service, shape: &Shape) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n let mut lines = Vec::with_capacity(4);\n let name = generate_field_name(member_name);\n\n if let Some(ref docs) = member.documentation {\n lines.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\\\",\"\\\\\\\\\").replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n lines.push(\"#[allow(unused_attributes)]\".to_owned());\n lines.push(format!(\"#[serde(rename=\\\"{}\\\")]\", member_name));\n\n if let Some(shape_type) = service.shape_type_for_member(member) {\n if shape_type == \"blob\" {\n lines.push(\n \"#[serde(\n deserialize_with=\\\"::serialization::SerdeBlob::deserialize_blob\\\",\n serialize_with=\\\"::serialization::SerdeBlob::serialize_blob\\\",\n default,\n )]\".to_owned()\n );\n } else if shape_type == \"boolean\" && !shape.required(member_name) {\n lines.push(\"#[serde(skip_serializing_if=\\\"::std::option::Option::is_none\\\")]\".to_owned());\n }\n }\n\n let type_name = capitalize_first(member.shape.to_string());\n\n if shape.required(member_name) {\n lines.push(format!(\"pub {}: {},\", name, type_name));\n } else if name == \"type\" {\n lines.push(format!(\"pub aws_{}: Option<{}>,\", name, type_name));\n } else {\n lines.push(format!(\"pub {}: Option<{}>,\", name, type_name));\n }\n\n lines.join(\"\\n\")\n }).collect::>().join(\"\\n\")\n}\n\nimpl Operation {\n pub fn error_type_name(&self) -> String {\n format!(\"{}Error\", self.name)\n }\n}\n\nfn capitalize_first(word: String) -> String {\n assert!(word.is_ascii());\n\n let mut result = word.into_bytes();\n result[0] = result[0].to_ascii_uppercase();\n\n String::from_utf8(result).unwrap()\n}<|endoftext|>"} {"text":"\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_camel_case_types)]\n\npub use self::FileMatch::*;\n\nuse rustc_data_structures::fx::FxHashSet;\nuse std::borrow::Cow;\nuse std::env;\nuse std::fs;\nuse std::path::{Path, PathBuf};\n\nuse session::search_paths::{SearchPaths, PathKind};\nuse rustc_fs_util::fix_windows_verbatim_for_gcc;\n\n#[derive(Copy, Clone)]\npub enum FileMatch {\n FileMatches,\n FileDoesntMatch,\n}\n\n\/\/ A module for searching for libraries\n\npub struct FileSearch<'a> {\n pub sysroot: &'a Path,\n pub search_paths: &'a SearchPaths,\n pub triple: &'a str,\n pub kind: PathKind,\n}\n\nimpl<'a> FileSearch<'a> {\n pub fn for_each_lib_search_path(&self, mut f: F) where\n F: FnMut(&Path, PathKind)\n {\n let mut visited_dirs = FxHashSet::default();\n visited_dirs.reserve(self.search_paths.paths.len() + 1);\n for (path, kind) in self.search_paths.iter(self.kind) {\n f(path, kind);\n visited_dirs.insert(path.to_path_buf());\n }\n\n debug!(\"filesearch: searching lib path\");\n let tlib_path = make_target_lib_path(self.sysroot,\n self.triple);\n if !visited_dirs.contains(&tlib_path) {\n f(&tlib_path, PathKind::All);\n }\n\n visited_dirs.insert(tlib_path);\n }\n\n pub fn get_lib_path(&self) -> PathBuf {\n make_target_lib_path(self.sysroot, self.triple)\n }\n\n pub fn search(&self, mut pick: F)\n where F: FnMut(&Path, PathKind) -> FileMatch\n {\n self.for_each_lib_search_path(|lib_search_path, kind| {\n debug!(\"searching {}\", lib_search_path.display());\n let files = match fs::read_dir(lib_search_path) {\n Ok(files) => files,\n Err(..) => return,\n };\n let files = files.filter_map(|p| p.ok().map(|s| s.path()))\n .collect::>();\n fn is_rlib(p: &Path) -> bool {\n p.extension() == Some(\"rlib\".as_ref())\n }\n \/\/ Reading metadata out of rlibs is faster, and if we find both\n \/\/ an rlib and a dylib we only read one of the files of\n \/\/ metadata, so in the name of speed, bring all rlib files to\n \/\/ the front of the search list.\n let files1 = files.iter().filter(|p| is_rlib(p));\n let files2 = files.iter().filter(|p| !is_rlib(p));\n for path in files1.chain(files2) {\n debug!(\"testing {}\", path.display());\n let maybe_picked = pick(path, kind);\n match maybe_picked {\n FileMatches => {\n debug!(\"picked {}\", path.display());\n }\n FileDoesntMatch => {\n debug!(\"rejected {}\", path.display());\n }\n }\n }\n });\n }\n\n pub fn new(sysroot: &'a Path,\n triple: &'a str,\n search_paths: &'a SearchPaths,\n kind: PathKind) -> FileSearch<'a> {\n debug!(\"using sysroot = {}, triple = {}\", sysroot.display(), triple);\n FileSearch {\n sysroot,\n search_paths,\n triple,\n kind,\n }\n }\n\n \/\/ Returns a list of directories where target-specific dylibs might be located.\n pub fn get_dylib_search_paths(&self) -> Vec {\n let mut paths = Vec::new();\n self.for_each_lib_search_path(|lib_search_path, _| {\n paths.push(lib_search_path.to_path_buf());\n });\n paths\n }\n\n \/\/ Returns a list of directories where target-specific tool binaries are located.\n pub fn get_tools_search_paths(&self) -> Vec {\n let mut p = PathBuf::from(self.sysroot);\n p.push(find_libdir(self.sysroot).as_ref());\n p.push(RUST_LIB_DIR);\n p.push(&self.triple);\n p.push(\"bin\");\n vec![p]\n }\n}\n\npub fn relative_target_lib_path(sysroot: &Path, target_triple: &str) -> PathBuf {\n let mut p = PathBuf::from(find_libdir(sysroot).as_ref());\n assert!(p.is_relative());\n p.push(RUST_LIB_DIR);\n p.push(target_triple);\n p.push(\"lib\");\n p\n}\n\nfn make_target_lib_path(sysroot: &Path,\n target_triple: &str) -> PathBuf {\n sysroot.join(&relative_target_lib_path(sysroot, target_triple))\n}\n\npub fn get_or_default_sysroot() -> PathBuf {\n \/\/ Follow symlinks. If the resolved path is relative, make it absolute.\n fn canonicalize(path: Option) -> Option {\n path.and_then(|path| {\n match fs::canonicalize(&path) {\n \/\/ See comments on this target function, but the gist is that\n \/\/ gcc chokes on verbatim paths which fs::canonicalize generates\n \/\/ so we try to avoid those kinds of paths.\n Ok(canon) => Some(fix_windows_verbatim_for_gcc(&canon)),\n Err(e) => bug!(\"failed to get realpath: {}\", e),\n }\n })\n }\n\n match env::current_exe() {\n Ok(exe) => {\n match canonicalize(Some(exe)) {\n Some(mut p) => { p.pop(); p.pop(); p },\n None => bug!(\"can't determine value for sysroot\")\n }\n }\n Err(ref e) => panic!(format!(\"failed to get current_exe: {}\", e))\n }\n}\n\n\/\/ The name of the directory rustc expects libraries to be located.\nfn find_libdir(sysroot: &Path) -> Cow<'static, str> {\n \/\/ FIXME: This is a quick hack to make the rustc binary able to locate\n \/\/ Rust libraries in Linux environments where libraries might be installed\n \/\/ to lib64\/lib32. This would be more foolproof by basing the sysroot off\n \/\/ of the directory where librustc is located, rather than where the rustc\n \/\/ binary is.\n \/\/ If --libdir is set during configuration to the value other than\n \/\/ \"lib\" (i.e. non-default), this value is used (see issue #16552).\n\n match option_env!(\"CFG_LIBDIR_RELATIVE\") {\n Some(libdir) if libdir != \"lib\" => return libdir.into(),\n _ => if sysroot.join(PRIMARY_LIB_DIR).join(RUST_LIB_DIR).exists() {\n return PRIMARY_LIB_DIR.into();\n } else {\n return SECONDARY_LIB_DIR.into();\n }\n }\n\n #[cfg(target_pointer_width = \"64\")]\n const PRIMARY_LIB_DIR: &'static str = \"lib64\";\n\n #[cfg(target_pointer_width = \"32\")]\n const PRIMARY_LIB_DIR: &'static str = \"lib32\";\n\n const SECONDARY_LIB_DIR: &'static str = \"lib\";\n}\n\n\/\/ The name of rustc's own place to organize libraries.\n\/\/ Used to be \"rustc\", now the default is \"rustlib\"\nconst RUST_LIB_DIR: &'static str = \"rustlib\";\nrustc\/session: move consts up to improve readability\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_camel_case_types)]\n\npub use self::FileMatch::*;\n\nuse rustc_data_structures::fx::FxHashSet;\nuse std::borrow::Cow;\nuse std::env;\nuse std::fs;\nuse std::path::{Path, PathBuf};\n\nuse session::search_paths::{SearchPaths, PathKind};\nuse rustc_fs_util::fix_windows_verbatim_for_gcc;\n\n#[derive(Copy, Clone)]\npub enum FileMatch {\n FileMatches,\n FileDoesntMatch,\n}\n\n\/\/ A module for searching for libraries\n\npub struct FileSearch<'a> {\n pub sysroot: &'a Path,\n pub search_paths: &'a SearchPaths,\n pub triple: &'a str,\n pub kind: PathKind,\n}\n\nimpl<'a> FileSearch<'a> {\n pub fn for_each_lib_search_path(&self, mut f: F) where\n F: FnMut(&Path, PathKind)\n {\n let mut visited_dirs = FxHashSet::default();\n visited_dirs.reserve(self.search_paths.paths.len() + 1);\n for (path, kind) in self.search_paths.iter(self.kind) {\n f(path, kind);\n visited_dirs.insert(path.to_path_buf());\n }\n\n debug!(\"filesearch: searching lib path\");\n let tlib_path = make_target_lib_path(self.sysroot,\n self.triple);\n if !visited_dirs.contains(&tlib_path) {\n f(&tlib_path, PathKind::All);\n }\n\n visited_dirs.insert(tlib_path);\n }\n\n pub fn get_lib_path(&self) -> PathBuf {\n make_target_lib_path(self.sysroot, self.triple)\n }\n\n pub fn search(&self, mut pick: F)\n where F: FnMut(&Path, PathKind) -> FileMatch\n {\n self.for_each_lib_search_path(|lib_search_path, kind| {\n debug!(\"searching {}\", lib_search_path.display());\n let files = match fs::read_dir(lib_search_path) {\n Ok(files) => files,\n Err(..) => return,\n };\n let files = files.filter_map(|p| p.ok().map(|s| s.path()))\n .collect::>();\n fn is_rlib(p: &Path) -> bool {\n p.extension() == Some(\"rlib\".as_ref())\n }\n \/\/ Reading metadata out of rlibs is faster, and if we find both\n \/\/ an rlib and a dylib we only read one of the files of\n \/\/ metadata, so in the name of speed, bring all rlib files to\n \/\/ the front of the search list.\n let files1 = files.iter().filter(|p| is_rlib(p));\n let files2 = files.iter().filter(|p| !is_rlib(p));\n for path in files1.chain(files2) {\n debug!(\"testing {}\", path.display());\n let maybe_picked = pick(path, kind);\n match maybe_picked {\n FileMatches => {\n debug!(\"picked {}\", path.display());\n }\n FileDoesntMatch => {\n debug!(\"rejected {}\", path.display());\n }\n }\n }\n });\n }\n\n pub fn new(sysroot: &'a Path,\n triple: &'a str,\n search_paths: &'a SearchPaths,\n kind: PathKind) -> FileSearch<'a> {\n debug!(\"using sysroot = {}, triple = {}\", sysroot.display(), triple);\n FileSearch {\n sysroot,\n search_paths,\n triple,\n kind,\n }\n }\n\n \/\/ Returns a list of directories where target-specific dylibs might be located.\n pub fn get_dylib_search_paths(&self) -> Vec {\n let mut paths = Vec::new();\n self.for_each_lib_search_path(|lib_search_path, _| {\n paths.push(lib_search_path.to_path_buf());\n });\n paths\n }\n\n \/\/ Returns a list of directories where target-specific tool binaries are located.\n pub fn get_tools_search_paths(&self) -> Vec {\n let mut p = PathBuf::from(self.sysroot);\n p.push(find_libdir(self.sysroot).as_ref());\n p.push(RUST_LIB_DIR);\n p.push(&self.triple);\n p.push(\"bin\");\n vec![p]\n }\n}\n\npub fn relative_target_lib_path(sysroot: &Path, target_triple: &str) -> PathBuf {\n let mut p = PathBuf::from(find_libdir(sysroot).as_ref());\n assert!(p.is_relative());\n p.push(RUST_LIB_DIR);\n p.push(target_triple);\n p.push(\"lib\");\n p\n}\n\nfn make_target_lib_path(sysroot: &Path,\n target_triple: &str) -> PathBuf {\n sysroot.join(&relative_target_lib_path(sysroot, target_triple))\n}\n\npub fn get_or_default_sysroot() -> PathBuf {\n \/\/ Follow symlinks. If the resolved path is relative, make it absolute.\n fn canonicalize(path: Option) -> Option {\n path.and_then(|path| {\n match fs::canonicalize(&path) {\n \/\/ See comments on this target function, but the gist is that\n \/\/ gcc chokes on verbatim paths which fs::canonicalize generates\n \/\/ so we try to avoid those kinds of paths.\n Ok(canon) => Some(fix_windows_verbatim_for_gcc(&canon)),\n Err(e) => bug!(\"failed to get realpath: {}\", e),\n }\n })\n }\n\n match env::current_exe() {\n Ok(exe) => {\n match canonicalize(Some(exe)) {\n Some(mut p) => { p.pop(); p.pop(); p },\n None => bug!(\"can't determine value for sysroot\")\n }\n }\n Err(ref e) => panic!(format!(\"failed to get current_exe: {}\", e))\n }\n}\n\n\/\/ The name of the directory rustc expects libraries to be located.\nfn find_libdir(sysroot: &Path) -> Cow<'static, str> {\n \/\/ FIXME: This is a quick hack to make the rustc binary able to locate\n \/\/ Rust libraries in Linux environments where libraries might be installed\n \/\/ to lib64\/lib32. This would be more foolproof by basing the sysroot off\n \/\/ of the directory where librustc is located, rather than where the rustc\n \/\/ binary is.\n \/\/ If --libdir is set during configuration to the value other than\n \/\/ \"lib\" (i.e. non-default), this value is used (see issue #16552).\n\n #[cfg(target_pointer_width = \"64\")]\n const PRIMARY_LIB_DIR: &'static str = \"lib64\";\n\n #[cfg(target_pointer_width = \"32\")]\n const PRIMARY_LIB_DIR: &'static str = \"lib32\";\n\n const SECONDARY_LIB_DIR: &'static str = \"lib\";\n\n match option_env!(\"CFG_LIBDIR_RELATIVE\") {\n Some(libdir) if libdir != \"lib\" => libdir.into(),\n _ => if sysroot.join(PRIMARY_LIB_DIR).join(RUST_LIB_DIR).exists() {\n PRIMARY_LIB_DIR.into()\n } else {\n SECONDARY_LIB_DIR.into()\n }\n }\n}\n\n\/\/ The name of rustc's own place to organize libraries.\n\/\/ Used to be \"rustc\", now the default is \"rustlib\"\nconst RUST_LIB_DIR: &'static str = \"rustlib\";\n<|endoftext|>"} {"text":"Rework the disabling of the SPIR-V optimiser<|endoftext|>"} {"text":"use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult = Result;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::new(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n pub fn iter_ids(&self, m: &Module) -> Result, StorageBackendError>\n {\n glob(&self.prefix_of_files_for_module(m)[..])\n .and_then(|globlist| {\n let v = globlist.filter_map(Result::ok)\n .map(|pbuf| FileID::from(&pbuf))\n .collect::>()\n .into_iter();\n Ok(v)\n })\n .map_err(|e| {\n let serr = StorageBackendError::new(\n \"iter_ids()\",\n \"Cannot iter on file ids\",\n None);\n serr\n })\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser)\n -> Result>, StorageBackendError>\n where HP: FileHeaderParser\n {\n self.iter_ids(m)\n .and_then(|ids| {\n Ok(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::>()\n .into_iter())\n })\n .map_err(|e| {\n let serr = StorageBackendError::new(\n \"iter_files()\",\n \"Cannot iter on files\",\n None);\n serr\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file(&self, f: File, p: &Parser) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file(&self, f: File, p: &Parser) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser) -> Option>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success reading file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::new(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n let idstr : String = id.clone().into();\n let idtype : FileIDType = id.into();\n let typestr : String = idtype.into();\n\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id: '{}'\", idstr);\n debug!(\" type: '{}'\", typestr);\n\n self.prefix_of_files_for_module(owner) +\n \"-\" + &typestr[..] +\n \"-\" + &idstr[..] +\n \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option, \/\/ Data dump, if any\n pub caused_by: Option>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n\n fn new(action: S, desc: S, data: Option) -> StorageBackendError\n where S: Into\n {\n StorageBackendError {\n action: action.into(),\n desc: desc.into(),\n data_dump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl<'a> Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser) -> Result\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::new(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\nFix wording in debug messageuse std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult = Result;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::new(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n pub fn iter_ids(&self, m: &Module) -> Result, StorageBackendError>\n {\n glob(&self.prefix_of_files_for_module(m)[..])\n .and_then(|globlist| {\n let v = globlist.filter_map(Result::ok)\n .map(|pbuf| FileID::from(&pbuf))\n .collect::>()\n .into_iter();\n Ok(v)\n })\n .map_err(|e| {\n let serr = StorageBackendError::new(\n \"iter_ids()\",\n \"Cannot iter on file ids\",\n None);\n serr\n })\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser)\n -> Result>, StorageBackendError>\n where HP: FileHeaderParser\n {\n self.iter_ids(m)\n .and_then(|ids| {\n Ok(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::>()\n .into_iter())\n })\n .map_err(|e| {\n let serr = StorageBackendError::new(\n \"iter_files()\",\n \"Cannot iter on files\",\n None);\n serr\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file(&self, f: File, p: &Parser) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file(&self, f: File, p: &Parser) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser) -> Option>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success opening file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::new(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n let idstr : String = id.clone().into();\n let idtype : FileIDType = id.into();\n let typestr : String = idtype.into();\n\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id: '{}'\", idstr);\n debug!(\" type: '{}'\", typestr);\n\n self.prefix_of_files_for_module(owner) +\n \"-\" + &typestr[..] +\n \"-\" + &idstr[..] +\n \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option, \/\/ Data dump, if any\n pub caused_by: Option>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n\n fn new(action: S, desc: S, data: Option) -> StorageBackendError\n where S: Into\n {\n StorageBackendError {\n action: action.into(),\n desc: desc.into(),\n data_dump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl<'a> Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser) -> Result\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::new(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n<|endoftext|>"} {"text":"Added client state machine per section 3.2<|endoftext|>"} {"text":"Implement `Show` for the `EmailAddress` type<|endoftext|>"} {"text":"Changed one call to clone() to a dereference.<|endoftext|>"} {"text":"Defined and documented request sequenceruse super::app::App;\n\n\/\/\/ Ensures that request handling is sequentially consistent\n\/\/\/\n\/\/\/ Requests come in to the server in a certain order. Each request has a certain set of\n\/\/\/ \"dependencies\". These dependencies are the parts of the state that the request wants to\n\/\/\/ modify. We want to avoid partially updating state or ending up in an inconsistent state, so it\n\/\/\/ is important that each request waits until **all** of the data it needs is available. Another\n\/\/\/ part of this is that requests that do not dependent on any of the same data should be able to\n\/\/\/ run concurrently. For example, if two turtles are drawing separate lines, they should be able\n\/\/\/ to draw those lines at the same time.\n\/\/\/\n\/\/\/ More precisely:\n\/\/\/ * This type enforces the property that requests are executed in the order in which they arrive.\n\/\/\/ * Requests that arrive later are only allowed to execute before prior requests if they do not\n\/\/\/ share any data dependencies with the prior requests.\n\/\/\/\n\/\/\/ Example: Suppose there are N = 4 turtles and you have the following requests:\n\/\/\/ - Request R1 depends on turtles: 1, 2, 3\n\/\/\/ - Request R2 depends on turtles: 4\n\/\/\/ - Request R3 depends on turtles: 3, 4\n\/\/\/ - Request R4 depends on turtles: 1, 2, 3, 4\n\/\/\/ - Request R5 depends on turtles: 1\n\/\/\/\n\/\/\/ Expected behaviour:\n\/\/\/ 1. R1 and R2 execute concurrently, no shared dependencies\n\/\/\/ 2. R3 waits on both R1 and R2\n\/\/\/ 3. R4 waits on R3 (and implicitly R1 and R2)\n\/\/\/ 4. R5 waits on R4\n\/\/\/\n\/\/\/ Conceptually, you can imagine that there is a queue for each turtle's data. The requests can\n\/\/\/ be sorted into those queues like so:\n\/\/\/\n\/\/\/ 1: R1, R4, R5\n\/\/\/ 2: R1, R4\n\/\/\/ 3: R1, R3, R4\n\/\/\/ 4: R2, R3, R4\n\/\/\/\n\/\/\/ The key here is that each command is listed in the order that it was in the original queue. A\n\/\/\/ command cannot execute until the command before it is done. A command can't be done until it's\n\/\/\/ at the front of all the queues it is in.\n\/\/\/\n\/\/\/ Note that commands can take a non-instant amount of time to execute. (That is, a command can\n\/\/\/ `await` during its execution.) That means that any locks need to be held across await points so\n\/\/\/ that a command completely finishes executing before the next command is notified that the lock\n\/\/\/ is available.\n#[derive(Debug)]\npub struct RequestSequencer<'a> {\n app: &'a App,\n}\n\nimpl<'a> RequestSequencer<'a> {\n pub fn new(app: &'a App) -> Self {\n Self {\n app,\n }\n }\n\n \/\/\/ Requests the opportunity to potentially read or modify all turtles\n \/\/\/\n \/\/\/ This request is guaranteed to be fulfilled in FIFO order as soon as all of the turtles are\n \/\/\/ available.\n pub async fn request_all_turtles(&self) -> Vec>> {\n todo!()\n }\n\n \/\/\/ Requests the opportunity to potentially read or modify all turtles\n pub async fn request_all_turtles(&self) -> Vec>> {\n todo!()\n }\n}\n<|endoftext|>"} {"text":"Neighbor accessor tests<|endoftext|>"} {"text":"use std::io::net::ip::{Port, IpAddr};\n\nuse router::Router;\nuse middleware::{ MiddlewareStack, Middleware, Action };\nuse into_middleware::IntoMiddleware;\nuse into_error_handler::IntoErrorHandler;\nuse nickel_error::{ NickelError, ErrorWithStatusCode };\nuse server::Server;\n\nuse http::method::{ Method, Get, Post, Put, Delete };\nuse http::status::NotFound;\nuse request::Request;\nuse response::Response;\n\n\n\/\/pre defined middleware\nuse json_body_parser::JsonBodyParser;\nuse query_string::QueryStringParser;\nuse default_error_handler::DefaultErrorHandler;\n\n\/\/\/ Nickel is the application object. It's the surface that\n\/\/\/ holds all public APIs.\n\npub struct Nickel{\n middleware_stack: MiddlewareStack,\n server: Option,\n}\n\nimpl Nickel {\n\n \/\/\/ In order to use Nickels API one first has to create an instance.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ ```\n pub fn new() -> Nickel {\n let mut middleware_stack = MiddlewareStack::new();\n\n \/\/ Hook up the default error handler by default. Users are\n \/\/ free to cancel it out from their custom error handler if\n \/\/ they don't like the default behaviour.\n middleware_stack.add_error_handler(DefaultErrorHandler);\n\n Nickel {\n middleware_stack: middleware_stack,\n server: None\n }\n }\n\n \/\/\/ Registers a middleware handler which will be invoked among other middleware\n \/\/\/ handlers before each request. Middleware can be stacked and is invoked in the\n \/\/\/ same order it was registered.\n \/\/\/\n \/\/\/ A middleware handler is nearly identical to a regular route handler with the only\n \/\/\/ difference that it expects a return value of boolean. That is to indicate whether\n \/\/\/ other middleware handler (if any) further down the stack should continue or if the\n \/\/\/ middleware invocation should be stopped after the current handler.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn logger (req: &Request, res: &mut Response) -> Result{\n \/\/\/ println!(\"logging request: {}\", req.origin.request_uri);\n \/\/\/ Ok(Continue)\n \/\/\/ }\n \/\/\/ ```\n pub fn utilize(&mut self, handler: T){\n self.middleware_stack.add_middleware(handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific GET request.\n \/\/\/ Handlers are assigned to paths and paths are allowed to contain\n \/\/\/ variables and wildcards. A handler added through this API will\n \/\/\/ be attached to the default router. Consider creating the router\n \/\/\/ middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ # Example without variables and wildcards\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\", handler);\n \/\/\/ ```\n \/\/\/ # Example with variables\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ let text = format!(\"This is user: {}\", request.params.get(&\"userid\".to_string()));\n \/\/\/ response.send(text.as_slice());\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/:userid\", handler);\n \/\/\/ ```\n \/\/\/ # Example with simple wildcard\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 but not \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/*\/:userid\", handler);\n \/\/\/ ```\n \/\/\/ # Example with double wildcard\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 and also \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/**\/:userid\", handler);\n \/\/\/ ```\n pub fn get(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Get, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific POST request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/post\/request\");\n \/\/\/ };\n \/\/\/ server.post(\"\/a\/post\/request\", handler);\n \/\/\/ ```\n \/\/\/ Take a look at `get()` for a more detailed description.\n pub fn post(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Post, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific PUT request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/put\/request\");\n \/\/\/ };\n \/\/\/ server.put(\"\/a\/put\/request\", handler);\n \/\/\/ ```\n \/\/\/ Take a look at `get(..)` for a more detailed description.\n pub fn put(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Put, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific DELETE request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ fn handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a DELETE request to \/a\/delete\/request\");\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.delete(\"\/a\/delete\/request\", handler);\n \/\/\/ ```\n pub fn delete(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Delete, uri, handler);\n }\n\n fn register_route_with_new_router(&mut self, method: Method, uri: &str, handler: fn(request: &Request, response: &mut Response)) {\n let mut router = Router::new();\n router.add_route(method, String::from_str(uri), handler);\n self.utilize(router);\n }\n\n \/\/\/ Registers an error handler which will be invoked among other error handler\n \/\/\/ as soon as any regular handler returned an error\n \/\/\/\n \/\/\/ A error handler is nearly identical to a regular middleware handler with the only\n \/\/\/ difference that it takes an additional error parameter or type `NickelError.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ # extern crate http;\n \/\/\/ # extern crate nickel;\n \/\/\/ # fn main() {\n \/\/\/ use nickel::{Nickel, Request, Response, Action, Continue, Halt};\n \/\/\/ use nickel::{NickelError, ErrorWithStatusCode, get_media_type};\n \/\/\/ use http::status::NotFound;\n \/\/\/\n \/\/\/ fn error_handler(err: &NickelError, req: &Request, response: &mut Response)\n \/\/\/ -> Result{\n \/\/\/ match err.kind {\n \/\/\/ ErrorWithStatusCode(NotFound) => {\n \/\/\/ response.origin.headers.content_type = get_media_type(\"html\");\n \/\/\/ response.origin.status = NotFound;\n \/\/\/ response.send(\"

Call the police!

\");\n \/\/\/ Ok(Halt)\n \/\/\/ },\n \/\/\/ _ => Ok(Continue)\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.handle_error(error_handler)\n \/\/\/ # }\n \/\/\/ ```\n pub fn handle_error(&mut self, handler: fn(err: &NickelError,\n req: &Request,\n res: &mut Response)\n -> Result){\n let handler = IntoErrorHandler::from_fn(handler);\n self.middleware_stack.add_error_handler(handler);\n }\n\n \/\/\/ Create a new middleware to serve as a router.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ let mut router = Nickel::router();\n \/\/\/\n \/\/\/ fn foo_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"Hi from \/foo\");\n \/\/\/ };\n \/\/\/\n \/\/\/ router.get(\"\/foo\", foo_handler);\n \/\/\/ server.utilize(router);\n \/\/\/ ```\n pub fn router() -> Router {\n Router::new()\n }\n\n \/\/\/ Create a new middleware to parse JSON bodies.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)] extern crate nickel_macros;\n \/\/\/ # extern crate nickel;\n \/\/\/ # extern crate serialize;\n \/\/\/ # use nickel::{Nickel, Request, Response};\n \/\/\/ use nickel::JsonBody;\n \/\/\/\n \/\/\/ # fn main() {\n \/\/\/ #[deriving(Decodable, Encodable)]\n \/\/\/ struct Person {\n \/\/\/ first_name: String,\n \/\/\/ last_name: String,\n \/\/\/ }\n \/\/\/\n \/\/\/ let router = router! {\n \/\/\/ post \"\/a\/post\/request\" => |request, response| {\n \/\/\/ let person = request.json_as::().unwrap();\n \/\/\/ let text = format!(\"Hello {} {}\", person.first_name, person.last_name);\n \/\/\/ response.send(text);\n \/\/\/ }\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ \/\/ It is currently a requirement that the json_body_parser middleware\n \/\/\/ \/\/ is added before any routes that require it.\n \/\/\/ server.utilize(Nickel::json_body_parser());\n \/\/\/ server.utilize(router);\n \/\/\/ # }\n \/\/\/ ```\n pub fn json_body_parser() -> JsonBodyParser {\n JsonBodyParser\n }\n\n \/\/\/ Create a new middleware to parse the query string.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)] extern crate nickel_macros;\n \/\/\/ # extern crate nickel;\n \/\/\/ # use nickel::{Nickel, Request, Response};\n \/\/\/ use nickel::QueryString;\n \/\/\/ # fn main() {\n \/\/\/ let router = router! {\n \/\/\/ get \"\/a\/get\/request\" => |request, response| {\n \/\/\/ let foo = request.query(\"foo\", \"this is the default value, if foo is not present!\");\n \/\/\/ response.send(foo[0].as_slice());\n \/\/\/ }\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ \/\/ It is currently a requirement that the query_string middleware\n \/\/\/ \/\/ is added before any routes that require it.\n \/\/\/ server.utilize(Nickel::query_string());\n \/\/\/ server.utilize(router);\n \/\/\/ # }\n \/\/\/ ```\n pub fn query_string() -> QueryStringParser {\n QueryStringParser\n }\n\n \/\/\/ Bind and listen for connections on the given host and port\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.listen(Ipv4Addr(127, 0, 0, 1), 6767);\n \/\/\/ ```\n pub fn listen(mut self, ip: IpAddr, port: Port) {\n fn not_found_handler(_request: &Request, _response: &mut Response) -> Result {\n Err(NickelError::new(\"File Not Found\", ErrorWithStatusCode(NotFound)))\n }\n\n self.middleware_stack.add_middleware(IntoMiddleware::from_fn(not_found_handler));\n self.server = Some(Server::new(self.middleware_stack, ip, port));\n self.server.unwrap().serve();\n }\n}\ndocs(nickel): fix docs for putuse std::io::net::ip::{Port, IpAddr};\n\nuse router::Router;\nuse middleware::{ MiddlewareStack, Middleware, Action };\nuse into_middleware::IntoMiddleware;\nuse into_error_handler::IntoErrorHandler;\nuse nickel_error::{ NickelError, ErrorWithStatusCode };\nuse server::Server;\n\nuse http::method::{ Method, Get, Post, Put, Delete };\nuse http::status::NotFound;\nuse request::Request;\nuse response::Response;\n\n\n\/\/pre defined middleware\nuse json_body_parser::JsonBodyParser;\nuse query_string::QueryStringParser;\nuse default_error_handler::DefaultErrorHandler;\n\n\/\/\/ Nickel is the application object. It's the surface that\n\/\/\/ holds all public APIs.\n\npub struct Nickel{\n middleware_stack: MiddlewareStack,\n server: Option,\n}\n\nimpl Nickel {\n\n \/\/\/ In order to use Nickels API one first has to create an instance.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ ```\n pub fn new() -> Nickel {\n let mut middleware_stack = MiddlewareStack::new();\n\n \/\/ Hook up the default error handler by default. Users are\n \/\/ free to cancel it out from their custom error handler if\n \/\/ they don't like the default behaviour.\n middleware_stack.add_error_handler(DefaultErrorHandler);\n\n Nickel {\n middleware_stack: middleware_stack,\n server: None\n }\n }\n\n \/\/\/ Registers a middleware handler which will be invoked among other middleware\n \/\/\/ handlers before each request. Middleware can be stacked and is invoked in the\n \/\/\/ same order it was registered.\n \/\/\/\n \/\/\/ A middleware handler is nearly identical to a regular route handler with the only\n \/\/\/ difference that it expects a return value of boolean. That is to indicate whether\n \/\/\/ other middleware handler (if any) further down the stack should continue or if the\n \/\/\/ middleware invocation should be stopped after the current handler.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn logger (req: &Request, res: &mut Response) -> Result{\n \/\/\/ println!(\"logging request: {}\", req.origin.request_uri);\n \/\/\/ Ok(Continue)\n \/\/\/ }\n \/\/\/ ```\n pub fn utilize(&mut self, handler: T){\n self.middleware_stack.add_middleware(handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific GET request.\n \/\/\/ Handlers are assigned to paths and paths are allowed to contain\n \/\/\/ variables and wildcards. A handler added through this API will\n \/\/\/ be attached to the default router. Consider creating the router\n \/\/\/ middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ # Example without variables and wildcards\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\", handler);\n \/\/\/ ```\n \/\/\/ # Example with variables\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ let text = format!(\"This is user: {}\", request.params.get(&\"userid\".to_string()));\n \/\/\/ response.send(text.as_slice());\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/:userid\", handler);\n \/\/\/ ```\n \/\/\/ # Example with simple wildcard\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 but not \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/*\/:userid\", handler);\n \/\/\/ ```\n \/\/\/ # Example with double wildcard\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 and also \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/**\/:userid\", handler);\n \/\/\/ ```\n pub fn get(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Get, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific POST request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/post\/request\");\n \/\/\/ };\n \/\/\/ server.post(\"\/a\/post\/request\", handler);\n \/\/\/ ```\n \/\/\/ Take a look at `get()` for a more detailed description.\n pub fn post(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Post, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific PUT request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ fn handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/put\/request\");\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.put(\"\/a\/put\/request\", handler);\n \/\/\/ ```\n pub fn put(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Put, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific DELETE request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ fn handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a DELETE request to \/a\/delete\/request\");\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.delete(\"\/a\/delete\/request\", handler);\n \/\/\/ ```\n pub fn delete(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Delete, uri, handler);\n }\n\n fn register_route_with_new_router(&mut self, method: Method, uri: &str, handler: fn(request: &Request, response: &mut Response)) {\n let mut router = Router::new();\n router.add_route(method, String::from_str(uri), handler);\n self.utilize(router);\n }\n\n \/\/\/ Registers an error handler which will be invoked among other error handler\n \/\/\/ as soon as any regular handler returned an error\n \/\/\/\n \/\/\/ A error handler is nearly identical to a regular middleware handler with the only\n \/\/\/ difference that it takes an additional error parameter or type `NickelError.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ # extern crate http;\n \/\/\/ # extern crate nickel;\n \/\/\/ # fn main() {\n \/\/\/ use nickel::{Nickel, Request, Response, Action, Continue, Halt};\n \/\/\/ use nickel::{NickelError, ErrorWithStatusCode, get_media_type};\n \/\/\/ use http::status::NotFound;\n \/\/\/\n \/\/\/ fn error_handler(err: &NickelError, req: &Request, response: &mut Response)\n \/\/\/ -> Result{\n \/\/\/ match err.kind {\n \/\/\/ ErrorWithStatusCode(NotFound) => {\n \/\/\/ response.origin.headers.content_type = get_media_type(\"html\");\n \/\/\/ response.origin.status = NotFound;\n \/\/\/ response.send(\"

Call the police!

\");\n \/\/\/ Ok(Halt)\n \/\/\/ },\n \/\/\/ _ => Ok(Continue)\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.handle_error(error_handler)\n \/\/\/ # }\n \/\/\/ ```\n pub fn handle_error(&mut self, handler: fn(err: &NickelError,\n req: &Request,\n res: &mut Response)\n -> Result){\n let handler = IntoErrorHandler::from_fn(handler);\n self.middleware_stack.add_error_handler(handler);\n }\n\n \/\/\/ Create a new middleware to serve as a router.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ let mut router = Nickel::router();\n \/\/\/\n \/\/\/ fn foo_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"Hi from \/foo\");\n \/\/\/ };\n \/\/\/\n \/\/\/ router.get(\"\/foo\", foo_handler);\n \/\/\/ server.utilize(router);\n \/\/\/ ```\n pub fn router() -> Router {\n Router::new()\n }\n\n \/\/\/ Create a new middleware to parse JSON bodies.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)] extern crate nickel_macros;\n \/\/\/ # extern crate nickel;\n \/\/\/ # extern crate serialize;\n \/\/\/ # use nickel::{Nickel, Request, Response};\n \/\/\/ use nickel::JsonBody;\n \/\/\/\n \/\/\/ # fn main() {\n \/\/\/ #[deriving(Decodable, Encodable)]\n \/\/\/ struct Person {\n \/\/\/ first_name: String,\n \/\/\/ last_name: String,\n \/\/\/ }\n \/\/\/\n \/\/\/ let router = router! {\n \/\/\/ post \"\/a\/post\/request\" => |request, response| {\n \/\/\/ let person = request.json_as::().unwrap();\n \/\/\/ let text = format!(\"Hello {} {}\", person.first_name, person.last_name);\n \/\/\/ response.send(text);\n \/\/\/ }\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ \/\/ It is currently a requirement that the json_body_parser middleware\n \/\/\/ \/\/ is added before any routes that require it.\n \/\/\/ server.utilize(Nickel::json_body_parser());\n \/\/\/ server.utilize(router);\n \/\/\/ # }\n \/\/\/ ```\n pub fn json_body_parser() -> JsonBodyParser {\n JsonBodyParser\n }\n\n \/\/\/ Create a new middleware to parse the query string.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)] extern crate nickel_macros;\n \/\/\/ # extern crate nickel;\n \/\/\/ # use nickel::{Nickel, Request, Response};\n \/\/\/ use nickel::QueryString;\n \/\/\/ # fn main() {\n \/\/\/ let router = router! {\n \/\/\/ get \"\/a\/get\/request\" => |request, response| {\n \/\/\/ let foo = request.query(\"foo\", \"this is the default value, if foo is not present!\");\n \/\/\/ response.send(foo[0].as_slice());\n \/\/\/ }\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ \/\/ It is currently a requirement that the query_string middleware\n \/\/\/ \/\/ is added before any routes that require it.\n \/\/\/ server.utilize(Nickel::query_string());\n \/\/\/ server.utilize(router);\n \/\/\/ # }\n \/\/\/ ```\n pub fn query_string() -> QueryStringParser {\n QueryStringParser\n }\n\n \/\/\/ Bind and listen for connections on the given host and port\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.listen(Ipv4Addr(127, 0, 0, 1), 6767);\n \/\/\/ ```\n pub fn listen(mut self, ip: IpAddr, port: Port) {\n fn not_found_handler(_request: &Request, _response: &mut Response) -> Result {\n Err(NickelError::new(\"File Not Found\", ErrorWithStatusCode(NotFound)))\n }\n\n self.middleware_stack.add_middleware(IntoMiddleware::from_fn(not_found_handler));\n self.server = Some(Server::new(self.middleware_stack, ip, port));\n self.server.unwrap().serve();\n }\n}\n<|endoftext|>"} {"text":"\/\/! AWS Regions and helper functions.\n\/\/!\n\/\/! Mostly used for translating the Region enum to a string AWS accepts.\n\/\/!\n\/\/! For example: `UsEast1` to \"us-east-1\"\n\nuse std::error::Error;\nuse std::str::FromStr;\nuse std::fmt::{Display, Error as FmtError, Formatter};\n\n\/\/\/ An AWS region.\n\/\/\/ `CnNorth1` is currently untested due to Rusoto maintainers not having access to AWS China.\n#[derive(Copy, Clone, Debug, PartialEq)]\npub enum Region {\n ApNortheast1,\n ApNortheast2,\n ApSouth1,\n ApSoutheast1,\n ApSoutheast2,\n EuCentral1,\n EuWest1,\n SaEast1,\n UsEast1,\n UsWest1,\n UsWest2,\n CnNorth1,\n}\n\n\/\/\/ An error produced when attempting to convert a `str` into a `Region` fails.\n#[derive(Debug,PartialEq)]\npub struct ParseRegionError {\n message: String,\n}\n\nimpl Display for Region {\n fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {\n let region_str = match *self {\n Region::ApNortheast1 => \"ap-northeast-1\",\n Region::ApNortheast2 => \"ap-northeast-2\",\n Region::ApSouth1 => \"ap-south-1\",\n Region::ApSoutheast1 => \"ap-southeast-1\",\n Region::ApSoutheast2 => \"ap-southeast-2\",\n Region::EuCentral1 => \"eu-central-1\",\n Region::EuWest1 => \"eu-west-1\",\n Region::SaEast1 => \"sa-east-1\",\n Region::UsEast1 => \"us-east-1\",\n Region::UsWest1 => \"us-west-1\",\n Region::UsWest2 => \"us-west-2\",\n Region::CnNorth1 => \"cn-north-1\",\n };\n\n write!(f, \"{}\", region_str)\n }\n}\n\nimpl FromStr for Region {\n type Err = ParseRegionError;\n\n fn from_str(s: &str) -> Result {\n match s {\n \"ap-northeast-1\" => Ok(Region::ApNortheast1),\n \"ap-northeast-2\" => Ok(Region::ApNortheast2),\n \"ap-south-1\" => Ok(Region::ApSouth1),\n \"ap-southeast-1\" => Ok(Region::ApSoutheast1),\n \"ap-southeast-2\" => Ok(Region::ApSoutheast2),\n \"eu-central-1\" => Ok(Region::EuCentral1),\n \"eu-west-1\" => Ok(Region::EuWest1),\n \"sa-east-1\" => Ok(Region::SaEast1),\n \"us-east-1\" => Ok(Region::UsEast1),\n \"us-west-1\" => Ok(Region::UsWest1),\n \"us-west-2\" => Ok(Region::UsWest2),\n \"cn-north-1\" => Ok(Region::CnNorth1),\n s => Err(ParseRegionError::new(s))\n }\n }\n}\n\nimpl ParseRegionError {\n pub fn new(input: &str) -> Self {\n ParseRegionError {\n message: format!(\"Not a valid AWS region: {}\", input)\n }\n }\n}\n\nimpl Error for ParseRegionError {\n fn description(&self) -> &str {\n &self.message\n }\n}\n\nimpl Display for ParseRegionError {\n fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {\n write!(f, \"{}\", self.message)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn from_str() {\n assert_eq!(\n \"foo\".parse::().err().expect(\n \"Parsing foo as a Region was not an error\"\n ).to_string(),\n \"Not a valid AWS region: foo\".to_owned()\n );\n assert_eq!(\"ap-northeast-1\".parse(), Ok(Region::ApNortheast1));\n assert_eq!(\"ap-northeast-2\".parse(), Ok(Region::ApNortheast2));\n assert_eq!(\"ap-south-1\".parse(), Ok(Region::ApSouth1));\n assert_eq!(\"ap-southeast-1\".parse(), Ok(Region::ApSoutheast1));\n assert_eq!(\"ap-southeast-2\".parse(), Ok(Region::ApSoutheast2));\n assert_eq!(\"eu-central-1\".parse(), Ok(Region::EuCentral1));\n assert_eq!(\"eu-west-1\".parse(), Ok(Region::EuWest1));\n assert_eq!(\"sa-east-1\".parse(), Ok(Region::SaEast1));\n assert_eq!(\"us-east-1\".parse(), Ok(Region::UsEast1));\n assert_eq!(\"us-west-1\".parse(), Ok(Region::UsWest1));\n assert_eq!(\"us-west-2\".parse(), Ok(Region::UsWest2));\n assert_eq!(\"cn-north-1\".parse(), Ok(Region::CnNorth1));\n }\n\n #[test]\n fn region_display() {\n assert_eq!(Region::ApNortheast1.to_string(), \"ap-northeast-1\".to_owned());\n assert_eq!(Region::ApNortheast2.to_string(), \"ap-northeast-2\".to_owned());\n assert_eq!(Region::ApSouth1.to_string(), \"ap-south-1\".to_owned());\n assert_eq!(Region::ApSoutheast1.to_string(), \"ap-southeast-1\".to_owned());\n assert_eq!(Region::ApSoutheast2.to_string(), \"ap-southeast-2\".to_owned());\n assert_eq!(Region::EuCentral1.to_string(), \"eu-central-1\".to_owned());\n assert_eq!(Region::EuWest1.to_string(), \"eu-west-1\".to_owned());\n assert_eq!(Region::SaEast1.to_string(), \"sa-east-1\".to_owned());\n assert_eq!(Region::UsEast1.to_string(), \"us-east-1\".to_owned());\n assert_eq!(Region::UsWest1.to_string(), \"us-west-1\".to_owned());\n assert_eq!(Region::UsWest2.to_string(), \"us-west-2\".to_owned());\n assert_eq!(Region::CnNorth1.to_string(), \"cn-north-1\".to_owned());\n }\n}\nimpr: add ca-central-1 region\/\/! AWS Regions and helper functions.\n\/\/!\n\/\/! Mostly used for translating the Region enum to a string AWS accepts.\n\/\/!\n\/\/! For example: `UsEast1` to \"us-east-1\"\n\nuse std::error::Error;\nuse std::str::FromStr;\nuse std::fmt::{Display, Error as FmtError, Formatter};\n\n\/\/\/ An AWS region.\n\/\/\/ `CnNorth1` is currently untested due to Rusoto maintainers not having access to AWS China.\n#[derive(Copy, Clone, Debug, PartialEq)]\npub enum Region {\n ApNortheast1,\n ApNortheast2,\n ApSouth1,\n ApSoutheast1,\n ApSoutheast2,\n CaCentral1,\n EuCentral1,\n EuWest1,\n SaEast1,\n UsEast1,\n UsWest1,\n UsWest2,\n CnNorth1,\n}\n\n\/\/\/ An error produced when attempting to convert a `str` into a `Region` fails.\n#[derive(Debug,PartialEq)]\npub struct ParseRegionError {\n message: String,\n}\n\nimpl Display for Region {\n fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {\n let region_str = match *self {\n Region::ApNortheast1 => \"ap-northeast-1\",\n Region::ApNortheast2 => \"ap-northeast-2\",\n Region::ApSouth1 => \"ap-south-1\",\n Region::ApSoutheast1 => \"ap-southeast-1\",\n Region::ApSoutheast2 => \"ap-southeast-2\",\n Region::CaCentral1 => \"ca-central-1\",\n Region::EuCentral1 => \"eu-central-1\",\n Region::EuWest1 => \"eu-west-1\",\n Region::SaEast1 => \"sa-east-1\",\n Region::UsEast1 => \"us-east-1\",\n Region::UsWest1 => \"us-west-1\",\n Region::UsWest2 => \"us-west-2\",\n Region::CnNorth1 => \"cn-north-1\",\n };\n\n write!(f, \"{}\", region_str)\n }\n}\n\nimpl FromStr for Region {\n type Err = ParseRegionError;\n\n fn from_str(s: &str) -> Result {\n match s {\n \"ap-northeast-1\" => Ok(Region::ApNortheast1),\n \"ap-northeast-2\" => Ok(Region::ApNortheast2),\n \"ap-south-1\" => Ok(Region::ApSouth1),\n \"ap-southeast-1\" => Ok(Region::ApSoutheast1),\n \"ap-southeast-2\" => Ok(Region::ApSoutheast2),\n \"ca-central-1\" => Ok(Region::CaCentral1),\n \"eu-central-1\" => Ok(Region::EuCentral1),\n \"eu-west-1\" => Ok(Region::EuWest1),\n \"sa-east-1\" => Ok(Region::SaEast1),\n \"us-east-1\" => Ok(Region::UsEast1),\n \"us-west-1\" => Ok(Region::UsWest1),\n \"us-west-2\" => Ok(Region::UsWest2),\n \"cn-north-1\" => Ok(Region::CnNorth1),\n s => Err(ParseRegionError::new(s))\n }\n }\n}\n\nimpl ParseRegionError {\n pub fn new(input: &str) -> Self {\n ParseRegionError {\n message: format!(\"Not a valid AWS region: {}\", input)\n }\n }\n}\n\nimpl Error for ParseRegionError {\n fn description(&self) -> &str {\n &self.message\n }\n}\n\nimpl Display for ParseRegionError {\n fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {\n write!(f, \"{}\", self.message)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn from_str() {\n assert_eq!(\n \"foo\".parse::().err().expect(\n \"Parsing foo as a Region was not an error\"\n ).to_string(),\n \"Not a valid AWS region: foo\".to_owned()\n );\n assert_eq!(\"ap-northeast-1\".parse(), Ok(Region::ApNortheast1));\n assert_eq!(\"ap-northeast-2\".parse(), Ok(Region::ApNortheast2));\n assert_eq!(\"ap-south-1\".parse(), Ok(Region::ApSouth1));\n assert_eq!(\"ap-southeast-1\".parse(), Ok(Region::ApSoutheast1));\n assert_eq!(\"ap-southeast-2\".parse(), Ok(Region::ApSoutheast2));\n assert_eq!(\"ca-central-1\".parse(), Ok(Region::CaCentral1));\n assert_eq!(\"eu-central-1\".parse(), Ok(Region::EuCentral1));\n assert_eq!(\"eu-west-1\".parse(), Ok(Region::EuWest1));\n assert_eq!(\"sa-east-1\".parse(), Ok(Region::SaEast1));\n assert_eq!(\"us-east-1\".parse(), Ok(Region::UsEast1));\n assert_eq!(\"us-west-1\".parse(), Ok(Region::UsWest1));\n assert_eq!(\"us-west-2\".parse(), Ok(Region::UsWest2));\n assert_eq!(\"cn-north-1\".parse(), Ok(Region::CnNorth1));\n }\n\n #[test]\n fn region_display() {\n assert_eq!(Region::ApNortheast1.to_string(), \"ap-northeast-1\".to_owned());\n assert_eq!(Region::ApNortheast2.to_string(), \"ap-northeast-2\".to_owned());\n assert_eq!(Region::ApSouth1.to_string(), \"ap-south-1\".to_owned());\n assert_eq!(Region::ApSoutheast1.to_string(), \"ap-southeast-1\".to_owned());\n assert_eq!(Region::ApSoutheast2.to_string(), \"ap-southeast-2\".to_owned());\n assert_eq!(Region::CaCentral1.to_string(), \"ca-central-1\".to_owned());\n assert_eq!(Region::EuCentral1.to_string(), \"eu-central-1\".to_owned());\n assert_eq!(Region::EuWest1.to_string(), \"eu-west-1\".to_owned());\n assert_eq!(Region::SaEast1.to_string(), \"sa-east-1\".to_owned());\n assert_eq!(Region::UsEast1.to_string(), \"us-east-1\".to_owned());\n assert_eq!(Region::UsWest1.to_string(), \"us-west-1\".to_owned());\n assert_eq!(Region::UsWest2.to_string(), \"us-west-2\".to_owned());\n assert_eq!(Region::CnNorth1.to_string(), \"cn-north-1\".to_owned());\n }\n}\n<|endoftext|>"} {"text":"fix bug: lit_fixnum does not match zero.<|endoftext|>"} {"text":"Make all 'conditionals_short' tests pass.<|endoftext|>"} {"text":"Don't use unify::Eq for fun since we have UFCS now<|endoftext|>"} {"text":"center of gravity<|endoftext|>"} {"text":"use std::f64;\nuse std::io::{IoError, MemWriter};\nuse std::io;\nuse std::num::{FPNaN, FPInfinite};\n\nuse ser;\nuse ser::Serializer;\n\n\/\/\/ A structure for implementing serialization to JSON.\npub struct Writer {\n writer: W,\n}\n\nimpl Writer {\n \/\/\/ Creates a new JSON visitr whose output will be written to the writer\n \/\/\/ specified.\n #[inline]\n pub fn new(writer: W) -> Writer {\n Writer {\n writer: writer,\n }\n }\n\n \/\/\/ Unwrap the Writer from the Serializer.\n #[inline]\n pub fn unwrap(self) -> W {\n self.writer\n }\n}\n\nimpl ser::Serializer for Writer {\n fn visit<\n T: ser::Serialize,\n >(&mut self, value: &T) -> Result<(), IoError> {\n value.visit(&mut self.writer, Visitor)\n }\n}\n\nstruct Visitor;\n\nimpl ser::Visitor for Visitor {\n #[inline]\n fn visit_null(&self, writer: &mut W) -> Result<(), IoError> {\n writer.write_str(\"null\")\n }\n\n #[inline]\n fn visit_bool(&self, writer: &mut W, value: bool) -> Result<(), IoError> {\n if value {\n writer.write_str(\"true\")\n } else {\n writer.write_str(\"false\")\n }\n }\n\n #[inline]\n fn visit_int(&self, writer: &mut W, value: int) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_i8(&self, writer: &mut W, value: i8) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_i16(&self, writer: &mut W, value: i16) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_i32(&self, writer: &mut W, value: i32) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_i64(&self, writer: &mut W, value: i64) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_uint(&self, writer: &mut W, value: uint) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_u8(&self, writer: &mut W, value: u8) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_u16(&self, writer: &mut W, value: u16) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_u32(&self, writer: &mut W, value: u32) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_u64(&self, writer: &mut W, value: u64) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_f64(&self, writer: &mut W, value: f64) -> Result<(), IoError> {\n fmt_f64_or_null(writer, value)\n }\n\n #[inline]\n fn visit_char(&self, writer: &mut W, v: char) -> Result<(), IoError> {\n escape_char(writer, v)\n }\n\n #[inline]\n fn visit_str(&self, writer: &mut W, value: &str) -> Result<(), IoError> {\n escape_str(writer, value)\n }\n\n #[inline]\n fn visit_seq<\n V: ser::SeqVisitor\n >(&self, writer: &mut W, mut visitor: V) -> Result<(), IoError> {\n try!(writer.write_str(\"[\"));\n\n loop {\n match try!(visitor.visit(writer, Visitor)) {\n Some(()) => { }\n None => { break; }\n }\n }\n\n writer.write_str(\"]\")\n }\n\n #[inline]\n fn visit_seq_elt<\n T: ser::Serialize,\n >(&self, writer: &mut W, first: bool, value: T) -> Result<(), IoError> {\n if !first {\n try!(writer.write_str(\",\"));\n }\n\n value.visit(writer, Visitor)\n }\n\n #[inline]\n fn visit_map<\n V: ser::MapVisitor\n >(&self, writer: &mut W, mut visitor: V) -> Result<(), IoError> {\n try!(writer.write_str(\"{{\"));\n\n loop {\n match try!(visitor.visit(writer, Visitor)) {\n Some(()) => { }\n None => { break; }\n }\n }\n\n writer.write_str(\"}}\")\n }\n\n #[inline]\n fn visit_map_elt<\n K: ser::Serialize,\n V: ser::Serialize,\n >(&self, writer: &mut W, first: bool, key: K, value: V) -> Result<(), IoError> {\n if !first {\n try!(writer.write_str(\",\"));\n }\n\n try!(key.visit(writer, Visitor));\n try!(writer.write_str(\":\"));\n value.visit(writer, Visitor)\n }\n}\n\n#[inline]\npub fn escape_bytes(wr: &mut W, bytes: &[u8]) -> Result<(), IoError> {\n try!(wr.write_str(\"\\\"\"));\n\n let mut start = 0;\n\n for (i, byte) in bytes.iter().enumerate() {\n let escaped = match *byte {\n b'\"' => \"\\\\\\\"\",\n b'\\\\' => \"\\\\\\\\\",\n b'\\x08' => \"\\\\b\",\n b'\\x0c' => \"\\\\f\",\n b'\\n' => \"\\\\n\",\n b'\\r' => \"\\\\r\",\n b'\\t' => \"\\\\t\",\n _ => { continue; }\n };\n\n if start < i {\n try!(wr.write(bytes.slice(start, i)));\n }\n\n try!(wr.write_str(escaped));\n\n start = i + 1;\n }\n\n if start != bytes.len() {\n try!(wr.write(bytes.slice_from(start)));\n }\n\n wr.write_str(\"\\\"\")\n}\n\n#[inline]\npub fn escape_str(wr: &mut W, value: &str) -> Result<(), IoError> {\n escape_bytes(wr, value.as_bytes())\n}\n\n#[inline]\npub fn escape_char(wr: &mut W, value: char) -> Result<(), IoError> {\n let mut buf = [0, .. 4];\n value.encode_utf8(buf);\n escape_bytes(wr, buf)\n}\n\nfn fmt_f64_or_null(wr: &mut W, value: f64) -> Result<(), IoError> {\n match value.classify() {\n FPNaN | FPInfinite => wr.write_str(\"null\"),\n _ => wr.write_str(f64::to_str_digits(value, 6).as_slice()),\n }\n}\n\n#[inline]\npub fn to_vec<\n T: ser::Serialize,\n>(value: &T) -> Result, IoError> {\n let writer = MemWriter::with_capacity(1024);\n let mut writer = Writer::new(writer);\n try!(writer.visit(value));\n Ok(writer.unwrap().unwrap())\n}\n\n#[inline]\npub fn to_string<\n T: ser::Serialize,\n>(value: &T) -> Result>, IoError> {\n let vec = try!(to_vec(value));\n Ok(String::from_utf8(vec))\n}\nInline the visitoruse std::f64;\nuse std::io::{IoError, MemWriter};\nuse std::io;\nuse std::num::{FPNaN, FPInfinite};\n\nuse ser;\nuse ser::Serializer;\n\n\/\/\/ A structure for implementing serialization to JSON.\npub struct Writer {\n writer: W,\n}\n\nimpl Writer {\n \/\/\/ Creates a new JSON visitr whose output will be written to the writer\n \/\/\/ specified.\n #[inline]\n pub fn new(writer: W) -> Writer {\n Writer {\n writer: writer,\n }\n }\n\n \/\/\/ Unwrap the Writer from the Serializer.\n #[inline]\n pub fn unwrap(self) -> W {\n self.writer\n }\n}\n\nimpl ser::Serializer for Writer {\n #[inline]\n fn visit<\n T: ser::Serialize,\n >(&mut self, value: &T) -> Result<(), IoError> {\n value.visit(&mut self.writer, Visitor)\n }\n}\n\nstruct Visitor;\n\nimpl ser::Visitor for Visitor {\n #[inline]\n fn visit_null(&self, writer: &mut W) -> Result<(), IoError> {\n writer.write_str(\"null\")\n }\n\n #[inline]\n fn visit_bool(&self, writer: &mut W, value: bool) -> Result<(), IoError> {\n if value {\n writer.write_str(\"true\")\n } else {\n writer.write_str(\"false\")\n }\n }\n\n #[inline]\n fn visit_int(&self, writer: &mut W, value: int) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_i8(&self, writer: &mut W, value: i8) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_i16(&self, writer: &mut W, value: i16) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_i32(&self, writer: &mut W, value: i32) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_i64(&self, writer: &mut W, value: i64) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_uint(&self, writer: &mut W, value: uint) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_u8(&self, writer: &mut W, value: u8) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_u16(&self, writer: &mut W, value: u16) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_u32(&self, writer: &mut W, value: u32) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_u64(&self, writer: &mut W, value: u64) -> Result<(), IoError> {\n write!(writer, \"{}\", value)\n }\n\n #[inline]\n fn visit_f64(&self, writer: &mut W, value: f64) -> Result<(), IoError> {\n fmt_f64_or_null(writer, value)\n }\n\n #[inline]\n fn visit_char(&self, writer: &mut W, v: char) -> Result<(), IoError> {\n escape_char(writer, v)\n }\n\n #[inline]\n fn visit_str(&self, writer: &mut W, value: &str) -> Result<(), IoError> {\n escape_str(writer, value)\n }\n\n #[inline]\n fn visit_seq<\n V: ser::SeqVisitor\n >(&self, writer: &mut W, mut visitor: V) -> Result<(), IoError> {\n try!(writer.write_str(\"[\"));\n\n loop {\n match try!(visitor.visit(writer, Visitor)) {\n Some(()) => { }\n None => { break; }\n }\n }\n\n writer.write_str(\"]\")\n }\n\n #[inline]\n fn visit_seq_elt<\n T: ser::Serialize,\n >(&self, writer: &mut W, first: bool, value: T) -> Result<(), IoError> {\n if !first {\n try!(writer.write_str(\",\"));\n }\n\n value.visit(writer, Visitor)\n }\n\n #[inline]\n fn visit_map<\n V: ser::MapVisitor\n >(&self, writer: &mut W, mut visitor: V) -> Result<(), IoError> {\n try!(writer.write_str(\"{{\"));\n\n loop {\n match try!(visitor.visit(writer, Visitor)) {\n Some(()) => { }\n None => { break; }\n }\n }\n\n writer.write_str(\"}}\")\n }\n\n #[inline]\n fn visit_map_elt<\n K: ser::Serialize,\n V: ser::Serialize,\n >(&self, writer: &mut W, first: bool, key: K, value: V) -> Result<(), IoError> {\n if !first {\n try!(writer.write_str(\",\"));\n }\n\n try!(key.visit(writer, Visitor));\n try!(writer.write_str(\":\"));\n value.visit(writer, Visitor)\n }\n}\n\n#[inline]\npub fn escape_bytes(wr: &mut W, bytes: &[u8]) -> Result<(), IoError> {\n try!(wr.write_str(\"\\\"\"));\n\n let mut start = 0;\n\n for (i, byte) in bytes.iter().enumerate() {\n let escaped = match *byte {\n b'\"' => \"\\\\\\\"\",\n b'\\\\' => \"\\\\\\\\\",\n b'\\x08' => \"\\\\b\",\n b'\\x0c' => \"\\\\f\",\n b'\\n' => \"\\\\n\",\n b'\\r' => \"\\\\r\",\n b'\\t' => \"\\\\t\",\n _ => { continue; }\n };\n\n if start < i {\n try!(wr.write(bytes.slice(start, i)));\n }\n\n try!(wr.write_str(escaped));\n\n start = i + 1;\n }\n\n if start != bytes.len() {\n try!(wr.write(bytes.slice_from(start)));\n }\n\n wr.write_str(\"\\\"\")\n}\n\n#[inline]\npub fn escape_str(wr: &mut W, value: &str) -> Result<(), IoError> {\n escape_bytes(wr, value.as_bytes())\n}\n\n#[inline]\npub fn escape_char(wr: &mut W, value: char) -> Result<(), IoError> {\n let mut buf = [0, .. 4];\n value.encode_utf8(buf);\n escape_bytes(wr, buf)\n}\n\nfn fmt_f64_or_null(wr: &mut W, value: f64) -> Result<(), IoError> {\n match value.classify() {\n FPNaN | FPInfinite => wr.write_str(\"null\"),\n _ => wr.write_str(f64::to_str_digits(value, 6).as_slice()),\n }\n}\n\n#[inline]\npub fn to_vec<\n T: ser::Serialize,\n>(value: &T) -> Result, IoError> {\n let writer = MemWriter::with_capacity(1024);\n let mut writer = Writer::new(writer);\n try!(writer.visit(value));\n Ok(writer.unwrap().unwrap())\n}\n\n#[inline]\npub fn to_string<\n T: ser::Serialize,\n>(value: &T) -> Result>, IoError> {\n let vec = try!(to_vec(value));\n Ok(String::from_utf8(vec))\n}\n<|endoftext|>"} {"text":"add regression test for #32505\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z parse-only -Z continue-parse-after-error\n\npub fn test() {\n foo(|_|) \/\/~ ERROR unexpected token: `)`\n}\n\nfn main() { }\n<|endoftext|>"} {"text":"Skeleton for problem 2017\/\/ https:\/\/leetcode.com\/problems\/grid-game\/\npub fn grid_game(grid: Vec>) -> i64 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", grid_game(vec![vec![2, 5, 4], vec![1, 5, 1]])); \/\/ 4\n println!(\"{}\", grid_game(vec![vec![3, 3, 1], vec![8, 5, 2]])); \/\/ 4\n println!(\"{}\", grid_game(vec![vec![1, 3, 1, 15], vec![1, 3, 3, 1]])); \/\/ 7\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_camel_case_types)]\n\nuse std::cell::{RefCell, Cell};\nuse std::collections::HashMap;\nuse std::ffi::CString;\nuse std::fmt::Debug;\nuse std::hash::{Hash, BuildHasher};\nuse std::iter::repeat;\nuse std::panic;\nuse std::path::Path;\nuse std::time::{Duration, Instant};\n\nuse std::sync::mpsc::{Sender};\nuse syntax_pos::{SpanData};\nuse ty::maps::{QueryMsg};\nuse dep_graph::{DepNode};\nuse proc_macro;\nuse lazy_static;\nuse session::Session;\n\n\/\/ The name of the associated type for `Fn` return types\npub const FN_OUTPUT_NAME: &'static str = \"Output\";\n\n\/\/ Useful type to use with `Result<>` indicate that an error has already\n\/\/ been reported to the user, so no need to continue checking.\n#[derive(Clone, Copy, Debug, RustcEncodable, RustcDecodable)]\npub struct ErrorReported;\n\nthread_local!(static TIME_DEPTH: Cell = Cell::new(0));\n\nlazy_static! {\n static ref DEFAULT_HOOK: Box = {\n let hook = panic::take_hook();\n panic::set_hook(Box::new(panic_hook));\n hook\n };\n}\n\nfn panic_hook(info: &panic::PanicInfo) {\n if !proc_macro::__internal::in_sess() {\n (*DEFAULT_HOOK)(info)\n }\n}\n\npub fn install_panic_hook() {\n lazy_static::initialize(&DEFAULT_HOOK);\n}\n\n\/\/\/ Parameters to the `Dump` variant of type `ProfileQueriesMsg`.\n#[derive(Clone,Debug)]\npub struct ProfQDumpParams {\n \/\/\/ A base path for the files we will dump\n pub path:String,\n \/\/\/ To ensure that the compiler waits for us to finish our dumps\n pub ack:Sender<()>,\n \/\/\/ toggle dumping a log file with every `ProfileQueriesMsg`\n pub dump_profq_msg_log:bool,\n}\n\n\/\/\/ A sequence of these messages induce a trace of query-based incremental compilation.\n\/\/\/ FIXME(matthewhammer): Determine whether we should include cycle detection here or not.\n#[derive(Clone,Debug)]\npub enum ProfileQueriesMsg {\n \/\/\/ begin a timed pass\n TimeBegin(String),\n \/\/\/ end a timed pass\n TimeEnd,\n \/\/\/ begin a task (see dep_graph::graph::with_task)\n TaskBegin(DepNode),\n \/\/\/ end a task\n TaskEnd,\n \/\/\/ begin a new query\n \/\/\/ can't use `Span` because queries are sent to other thread\n QueryBegin(SpanData, QueryMsg),\n \/\/\/ query is satisfied by using an already-known value for the given key\n CacheHit,\n \/\/\/ query requires running a provider; providers may nest, permitting queries to nest.\n ProviderBegin,\n \/\/\/ query is satisfied by a provider terminating with a value\n ProviderEnd,\n \/\/\/ dump a record of the queries to the given path\n Dump(ProfQDumpParams),\n \/\/\/ halt the profiling\/monitoring background thread\n Halt\n}\n\n\/\/\/ If enabled, send a message to the profile-queries thread\npub fn profq_msg(sess: &Session, msg: ProfileQueriesMsg) {\n if let Some(s) = sess.profile_channel.borrow().as_ref() {\n s.send(msg).unwrap()\n } else {\n \/\/ Do nothing\n }\n}\n\n\/\/\/ Set channel for profile queries channel\npub fn profq_set_chan(sess: &Session, s: Sender) -> bool {\n let mut channel = sess.profile_channel.borrow_mut();\n if channel.is_none() {\n *channel = Some(s);\n true\n } else {\n false\n }\n}\n\n\/\/\/ Read the current depth of `time()` calls. This is used to\n\/\/\/ encourage indentation across threads.\npub fn time_depth() -> usize {\n TIME_DEPTH.with(|slot| slot.get())\n}\n\n\/\/\/ Set the current depth of `time()` calls. The idea is to call\n\/\/\/ `set_time_depth()` with the result from `time_depth()` in the\n\/\/\/ parent thread.\npub fn set_time_depth(depth: usize) {\n TIME_DEPTH.with(|slot| slot.set(depth));\n}\n\npub fn time(sess: &Session, what: &str, f: F) -> T where\n F: FnOnce() -> T,\n{\n time_ext(sess.time_passes(), Some(sess), what, f)\n}\n\npub fn time_ext(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> T where\n F: FnOnce() -> T,\n{\n if !do_it { return f(); }\n\n let old = TIME_DEPTH.with(|slot| {\n let r = slot.get();\n slot.set(r + 1);\n r\n });\n\n if let Some(sess) = sess {\n if cfg!(debug_assertions) {\n profq_msg(sess, ProfileQueriesMsg::TimeBegin(what.to_string()))\n }\n }\n let start = Instant::now();\n let rv = f();\n let dur = start.elapsed();\n if let Some(sess) = sess {\n if cfg!(debug_assertions) {\n profq_msg(sess, ProfileQueriesMsg::TimeEnd)\n }\n }\n\n print_time_passes_entry_internal(what, dur);\n\n TIME_DEPTH.with(|slot| slot.set(old));\n\n rv\n}\n\npub fn print_time_passes_entry(do_it: bool, what: &str, dur: Duration) {\n if !do_it {\n return\n }\n\n let old = TIME_DEPTH.with(|slot| {\n let r = slot.get();\n slot.set(r + 1);\n r\n });\n\n print_time_passes_entry_internal(what, dur);\n\n TIME_DEPTH.with(|slot| slot.set(old));\n}\n\nfn print_time_passes_entry_internal(what: &str, dur: Duration) {\n let indentation = TIME_DEPTH.with(|slot| slot.get());\n\n let mem_string = match get_resident() {\n Some(n) => {\n let mb = n as f64 \/ 1_000_000.0;\n format!(\"; rss: {}MB\", mb.round() as usize)\n }\n None => \"\".to_owned(),\n };\n println!(\"{}time: {}{}\\t{}\",\n repeat(\" \").take(indentation).collect::(),\n duration_to_secs_str(dur),\n mem_string,\n what);\n}\n\n\/\/ Hack up our own formatting for the duration to make it easier for scripts\n\/\/ to parse (always use the same number of decimal places and the same unit).\npub fn duration_to_secs_str(dur: Duration) -> String {\n const NANOS_PER_SEC: f64 = 1_000_000_000.0;\n let secs = dur.as_secs() as f64 +\n dur.subsec_nanos() as f64 \/ NANOS_PER_SEC;\n\n format!(\"{:.3}\", secs)\n}\n\npub fn to_readable_str(mut val: usize) -> String {\n let mut groups = vec![];\n loop {\n let group = val % 1000;\n\n val \/= 1000;\n\n if val == 0 {\n groups.push(format!(\"{}\", group));\n break;\n } else {\n groups.push(format!(\"{:03}\", group));\n }\n }\n\n groups.reverse();\n\n groups.join(\"_\")\n}\n\npub fn record_time(accu: &Cell, f: F) -> T where\n F: FnOnce() -> T,\n{\n let start = Instant::now();\n let rv = f();\n let duration = start.elapsed();\n accu.set(duration + accu.get());\n rv\n}\n\n\/\/ Memory reporting\n#[cfg(unix)]\nfn get_resident() -> Option {\n use std::fs;\n\n let field = 1;\n let contents = fs::read_string(\"\/proc\/self\/statm\").ok()?;\n let s = contents.split_whitespace().nth(field)?;\n let npages = s.parse::().ok()?;\n Some(npages * 4096)\n}\n\n#[cfg(windows)]\nfn get_resident() -> Option {\n type BOOL = i32;\n type DWORD = u32;\n type HANDLE = *mut u8;\n use libc::size_t;\n use std::mem;\n #[repr(C)]\n #[allow(non_snake_case)]\n struct PROCESS_MEMORY_COUNTERS {\n cb: DWORD,\n PageFaultCount: DWORD,\n PeakWorkingSetSize: size_t,\n WorkingSetSize: size_t,\n QuotaPeakPagedPoolUsage: size_t,\n QuotaPagedPoolUsage: size_t,\n QuotaPeakNonPagedPoolUsage: size_t,\n QuotaNonPagedPoolUsage: size_t,\n PagefileUsage: size_t,\n PeakPagefileUsage: size_t,\n }\n type PPROCESS_MEMORY_COUNTERS = *mut PROCESS_MEMORY_COUNTERS;\n #[link(name = \"psapi\")]\n extern \"system\" {\n fn GetCurrentProcess() -> HANDLE;\n fn GetProcessMemoryInfo(Process: HANDLE,\n ppsmemCounters: PPROCESS_MEMORY_COUNTERS,\n cb: DWORD) -> BOOL;\n }\n let mut pmc: PROCESS_MEMORY_COUNTERS = unsafe { mem::zeroed() };\n pmc.cb = mem::size_of_val(&pmc) as DWORD;\n match unsafe { GetProcessMemoryInfo(GetCurrentProcess(), &mut pmc, pmc.cb) } {\n 0 => None,\n _ => Some(pmc.WorkingSetSize as usize),\n }\n}\n\npub fn indent(op: F) -> R where\n R: Debug,\n F: FnOnce() -> R,\n{\n \/\/ Use in conjunction with the log post-processor like `src\/etc\/indenter`\n \/\/ to make debug output more readable.\n debug!(\">>\");\n let r = op();\n debug!(\"<< (Result = {:?})\", r);\n r\n}\n\npub struct Indenter {\n _cannot_construct_outside_of_this_module: (),\n}\n\nimpl Drop for Indenter {\n fn drop(&mut self) { debug!(\"<<\"); }\n}\n\npub fn indenter() -> Indenter {\n debug!(\">>\");\n Indenter { _cannot_construct_outside_of_this_module: () }\n}\n\npub trait MemoizationMap {\n type Key: Clone;\n type Value: Clone;\n\n \/\/\/ If `key` is present in the map, return the value,\n \/\/\/ otherwise invoke `op` and store the value in the map.\n \/\/\/\n \/\/\/ NB: if the receiver is a `DepTrackingMap`, special care is\n \/\/\/ needed in the `op` to ensure that the correct edges are\n \/\/\/ added into the dep graph. See the `DepTrackingMap` impl for\n \/\/\/ more details!\n fn memoize(&self, key: Self::Key, op: OP) -> Self::Value\n where OP: FnOnce() -> Self::Value;\n}\n\nimpl MemoizationMap for RefCell>\n where K: Hash+Eq+Clone, V: Clone, S: BuildHasher\n{\n type Key = K;\n type Value = V;\n\n fn memoize(&self, key: K, op: OP) -> V\n where OP: FnOnce() -> V\n {\n let result = self.borrow().get(&key).cloned();\n match result {\n Some(result) => result,\n None => {\n let result = op();\n self.borrow_mut().insert(key, result.clone());\n result\n }\n }\n }\n}\n\n#[cfg(unix)]\npub fn path2cstr(p: &Path) -> CString {\n use std::os::unix::prelude::*;\n use std::ffi::OsStr;\n let p: &OsStr = p.as_ref();\n CString::new(p.as_bytes()).unwrap()\n}\n#[cfg(windows)]\npub fn path2cstr(p: &Path) -> CString {\n CString::new(p.to_str().unwrap()).unwrap()\n}\n\n\n#[test]\nfn test_to_readable_str() {\n assert_eq!(\"0\", to_readable_str(0));\n assert_eq!(\"1\", to_readable_str(1));\n assert_eq!(\"99\", to_readable_str(99));\n assert_eq!(\"999\", to_readable_str(999));\n assert_eq!(\"1_000\", to_readable_str(1_000));\n assert_eq!(\"1_001\", to_readable_str(1_001));\n assert_eq!(\"999_999\", to_readable_str(999_999));\n assert_eq!(\"1_000_000\", to_readable_str(1_000_000));\n assert_eq!(\"1_234_567\", to_readable_str(1_234_567));\n}\nadd handy helper for Cell, used for perf stats\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_camel_case_types)]\n\nuse std::cell::{RefCell, Cell};\nuse std::collections::HashMap;\nuse std::ffi::CString;\nuse std::fmt::Debug;\nuse std::hash::{Hash, BuildHasher};\nuse std::iter::repeat;\nuse std::panic;\nuse std::path::Path;\nuse std::time::{Duration, Instant};\n\nuse std::sync::mpsc::{Sender};\nuse syntax_pos::{SpanData};\nuse ty::maps::{QueryMsg};\nuse dep_graph::{DepNode};\nuse proc_macro;\nuse lazy_static;\nuse session::Session;\n\n\/\/ The name of the associated type for `Fn` return types\npub const FN_OUTPUT_NAME: &'static str = \"Output\";\n\n\/\/ Useful type to use with `Result<>` indicate that an error has already\n\/\/ been reported to the user, so no need to continue checking.\n#[derive(Clone, Copy, Debug, RustcEncodable, RustcDecodable)]\npub struct ErrorReported;\n\nthread_local!(static TIME_DEPTH: Cell = Cell::new(0));\n\nlazy_static! {\n static ref DEFAULT_HOOK: Box = {\n let hook = panic::take_hook();\n panic::set_hook(Box::new(panic_hook));\n hook\n };\n}\n\nfn panic_hook(info: &panic::PanicInfo) {\n if !proc_macro::__internal::in_sess() {\n (*DEFAULT_HOOK)(info)\n }\n}\n\npub fn install_panic_hook() {\n lazy_static::initialize(&DEFAULT_HOOK);\n}\n\n\/\/\/ Parameters to the `Dump` variant of type `ProfileQueriesMsg`.\n#[derive(Clone,Debug)]\npub struct ProfQDumpParams {\n \/\/\/ A base path for the files we will dump\n pub path:String,\n \/\/\/ To ensure that the compiler waits for us to finish our dumps\n pub ack:Sender<()>,\n \/\/\/ toggle dumping a log file with every `ProfileQueriesMsg`\n pub dump_profq_msg_log:bool,\n}\n\n\/\/\/ A sequence of these messages induce a trace of query-based incremental compilation.\n\/\/\/ FIXME(matthewhammer): Determine whether we should include cycle detection here or not.\n#[derive(Clone,Debug)]\npub enum ProfileQueriesMsg {\n \/\/\/ begin a timed pass\n TimeBegin(String),\n \/\/\/ end a timed pass\n TimeEnd,\n \/\/\/ begin a task (see dep_graph::graph::with_task)\n TaskBegin(DepNode),\n \/\/\/ end a task\n TaskEnd,\n \/\/\/ begin a new query\n \/\/\/ can't use `Span` because queries are sent to other thread\n QueryBegin(SpanData, QueryMsg),\n \/\/\/ query is satisfied by using an already-known value for the given key\n CacheHit,\n \/\/\/ query requires running a provider; providers may nest, permitting queries to nest.\n ProviderBegin,\n \/\/\/ query is satisfied by a provider terminating with a value\n ProviderEnd,\n \/\/\/ dump a record of the queries to the given path\n Dump(ProfQDumpParams),\n \/\/\/ halt the profiling\/monitoring background thread\n Halt\n}\n\n\/\/\/ If enabled, send a message to the profile-queries thread\npub fn profq_msg(sess: &Session, msg: ProfileQueriesMsg) {\n if let Some(s) = sess.profile_channel.borrow().as_ref() {\n s.send(msg).unwrap()\n } else {\n \/\/ Do nothing\n }\n}\n\n\/\/\/ Set channel for profile queries channel\npub fn profq_set_chan(sess: &Session, s: Sender) -> bool {\n let mut channel = sess.profile_channel.borrow_mut();\n if channel.is_none() {\n *channel = Some(s);\n true\n } else {\n false\n }\n}\n\n\/\/\/ Read the current depth of `time()` calls. This is used to\n\/\/\/ encourage indentation across threads.\npub fn time_depth() -> usize {\n TIME_DEPTH.with(|slot| slot.get())\n}\n\n\/\/\/ Set the current depth of `time()` calls. The idea is to call\n\/\/\/ `set_time_depth()` with the result from `time_depth()` in the\n\/\/\/ parent thread.\npub fn set_time_depth(depth: usize) {\n TIME_DEPTH.with(|slot| slot.set(depth));\n}\n\npub fn time(sess: &Session, what: &str, f: F) -> T where\n F: FnOnce() -> T,\n{\n time_ext(sess.time_passes(), Some(sess), what, f)\n}\n\npub fn time_ext(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> T where\n F: FnOnce() -> T,\n{\n if !do_it { return f(); }\n\n let old = TIME_DEPTH.with(|slot| {\n let r = slot.get();\n slot.set(r + 1);\n r\n });\n\n if let Some(sess) = sess {\n if cfg!(debug_assertions) {\n profq_msg(sess, ProfileQueriesMsg::TimeBegin(what.to_string()))\n }\n }\n let start = Instant::now();\n let rv = f();\n let dur = start.elapsed();\n if let Some(sess) = sess {\n if cfg!(debug_assertions) {\n profq_msg(sess, ProfileQueriesMsg::TimeEnd)\n }\n }\n\n print_time_passes_entry_internal(what, dur);\n\n TIME_DEPTH.with(|slot| slot.set(old));\n\n rv\n}\n\npub fn print_time_passes_entry(do_it: bool, what: &str, dur: Duration) {\n if !do_it {\n return\n }\n\n let old = TIME_DEPTH.with(|slot| {\n let r = slot.get();\n slot.set(r + 1);\n r\n });\n\n print_time_passes_entry_internal(what, dur);\n\n TIME_DEPTH.with(|slot| slot.set(old));\n}\n\nfn print_time_passes_entry_internal(what: &str, dur: Duration) {\n let indentation = TIME_DEPTH.with(|slot| slot.get());\n\n let mem_string = match get_resident() {\n Some(n) => {\n let mb = n as f64 \/ 1_000_000.0;\n format!(\"; rss: {}MB\", mb.round() as usize)\n }\n None => \"\".to_owned(),\n };\n println!(\"{}time: {}{}\\t{}\",\n repeat(\" \").take(indentation).collect::(),\n duration_to_secs_str(dur),\n mem_string,\n what);\n}\n\n\/\/ Hack up our own formatting for the duration to make it easier for scripts\n\/\/ to parse (always use the same number of decimal places and the same unit).\npub fn duration_to_secs_str(dur: Duration) -> String {\n const NANOS_PER_SEC: f64 = 1_000_000_000.0;\n let secs = dur.as_secs() as f64 +\n dur.subsec_nanos() as f64 \/ NANOS_PER_SEC;\n\n format!(\"{:.3}\", secs)\n}\n\npub fn to_readable_str(mut val: usize) -> String {\n let mut groups = vec![];\n loop {\n let group = val % 1000;\n\n val \/= 1000;\n\n if val == 0 {\n groups.push(format!(\"{}\", group));\n break;\n } else {\n groups.push(format!(\"{:03}\", group));\n }\n }\n\n groups.reverse();\n\n groups.join(\"_\")\n}\n\npub fn record_time(accu: &Cell, f: F) -> T where\n F: FnOnce() -> T,\n{\n let start = Instant::now();\n let rv = f();\n let duration = start.elapsed();\n accu.set(duration + accu.get());\n rv\n}\n\n\/\/ Memory reporting\n#[cfg(unix)]\nfn get_resident() -> Option {\n use std::fs;\n\n let field = 1;\n let contents = fs::read_string(\"\/proc\/self\/statm\").ok()?;\n let s = contents.split_whitespace().nth(field)?;\n let npages = s.parse::().ok()?;\n Some(npages * 4096)\n}\n\n#[cfg(windows)]\nfn get_resident() -> Option {\n type BOOL = i32;\n type DWORD = u32;\n type HANDLE = *mut u8;\n use libc::size_t;\n use std::mem;\n #[repr(C)]\n #[allow(non_snake_case)]\n struct PROCESS_MEMORY_COUNTERS {\n cb: DWORD,\n PageFaultCount: DWORD,\n PeakWorkingSetSize: size_t,\n WorkingSetSize: size_t,\n QuotaPeakPagedPoolUsage: size_t,\n QuotaPagedPoolUsage: size_t,\n QuotaPeakNonPagedPoolUsage: size_t,\n QuotaNonPagedPoolUsage: size_t,\n PagefileUsage: size_t,\n PeakPagefileUsage: size_t,\n }\n type PPROCESS_MEMORY_COUNTERS = *mut PROCESS_MEMORY_COUNTERS;\n #[link(name = \"psapi\")]\n extern \"system\" {\n fn GetCurrentProcess() -> HANDLE;\n fn GetProcessMemoryInfo(Process: HANDLE,\n ppsmemCounters: PPROCESS_MEMORY_COUNTERS,\n cb: DWORD) -> BOOL;\n }\n let mut pmc: PROCESS_MEMORY_COUNTERS = unsafe { mem::zeroed() };\n pmc.cb = mem::size_of_val(&pmc) as DWORD;\n match unsafe { GetProcessMemoryInfo(GetCurrentProcess(), &mut pmc, pmc.cb) } {\n 0 => None,\n _ => Some(pmc.WorkingSetSize as usize),\n }\n}\n\npub fn indent(op: F) -> R where\n R: Debug,\n F: FnOnce() -> R,\n{\n \/\/ Use in conjunction with the log post-processor like `src\/etc\/indenter`\n \/\/ to make debug output more readable.\n debug!(\">>\");\n let r = op();\n debug!(\"<< (Result = {:?})\", r);\n r\n}\n\npub struct Indenter {\n _cannot_construct_outside_of_this_module: (),\n}\n\nimpl Drop for Indenter {\n fn drop(&mut self) { debug!(\"<<\"); }\n}\n\npub fn indenter() -> Indenter {\n debug!(\">>\");\n Indenter { _cannot_construct_outside_of_this_module: () }\n}\n\npub trait MemoizationMap {\n type Key: Clone;\n type Value: Clone;\n\n \/\/\/ If `key` is present in the map, return the value,\n \/\/\/ otherwise invoke `op` and store the value in the map.\n \/\/\/\n \/\/\/ NB: if the receiver is a `DepTrackingMap`, special care is\n \/\/\/ needed in the `op` to ensure that the correct edges are\n \/\/\/ added into the dep graph. See the `DepTrackingMap` impl for\n \/\/\/ more details!\n fn memoize(&self, key: Self::Key, op: OP) -> Self::Value\n where OP: FnOnce() -> Self::Value;\n}\n\nimpl MemoizationMap for RefCell>\n where K: Hash+Eq+Clone, V: Clone, S: BuildHasher\n{\n type Key = K;\n type Value = V;\n\n fn memoize(&self, key: K, op: OP) -> V\n where OP: FnOnce() -> V\n {\n let result = self.borrow().get(&key).cloned();\n match result {\n Some(result) => result,\n None => {\n let result = op();\n self.borrow_mut().insert(key, result.clone());\n result\n }\n }\n }\n}\n\n#[cfg(unix)]\npub fn path2cstr(p: &Path) -> CString {\n use std::os::unix::prelude::*;\n use std::ffi::OsStr;\n let p: &OsStr = p.as_ref();\n CString::new(p.as_bytes()).unwrap()\n}\n#[cfg(windows)]\npub fn path2cstr(p: &Path) -> CString {\n CString::new(p.to_str().unwrap()).unwrap()\n}\n\n\n#[test]\nfn test_to_readable_str() {\n assert_eq!(\"0\", to_readable_str(0));\n assert_eq!(\"1\", to_readable_str(1));\n assert_eq!(\"99\", to_readable_str(99));\n assert_eq!(\"999\", to_readable_str(999));\n assert_eq!(\"1_000\", to_readable_str(1_000));\n assert_eq!(\"1_001\", to_readable_str(1_001));\n assert_eq!(\"999_999\", to_readable_str(999_999));\n assert_eq!(\"1_000_000\", to_readable_str(1_000_000));\n assert_eq!(\"1_234_567\", to_readable_str(1_234_567));\n}\n\npub trait CellUsizeExt {\n fn increment(&self);\n}\n\nimpl CellUsizeExt for Cell {\n fn increment(&self) {\n self.set(self.get() + 1);\n }\n}\n<|endoftext|>"} {"text":"Add a simple integration testextern crate specinfra;\n\nuse specinfra::backend;\n\n#[test]\n#[cfg(target_os=\"macos\")]\nfn it_works() {\n let b = backend::direct::Direct::new();\n let s = specinfra::new(&b).unwrap();\n let file = s.file(\"\/etc\/passwd\");\n\n assert_eq!(0o644, file.mode().unwrap())\n}\n<|endoftext|>"} {"text":"Guessing game.use std::io::stdin;\nuse std::rand;\nuse std::rand::RngUtil;\nuse std::num::abs;\nuse std::int;\n\nfn generate_secret_number() -> int {\n return abs(rand::rng().gen::() % 100) + 1;\n}\n\nfn process_guess(secret: int, guess: int, guesses: &mut int) {\n println(fmt!(\"You guessed: %d\", guess));\n\n if guess > secret {\n println(\"Your guess was too high!\");\n } else if guess < secret {\n println(\"Your guess was too low!\");\n } else if guess == secret {\n println(\"You got it!\");\n *guesses = 4;\n }\n\n *guesses += 1;\n}\n\nfn main() {\n let secret = generate_secret_number();\n\n let guesses = @mut 1;\n\n println(\"--- N U M B E R - G A M E ---\");\n println(\"\");\n println(\"Guess a number from 1-100 (you get five tries):\");\n\n loop {\n println(fmt!(\"Guess #%d\", *guesses));\n\n let input = stdin().read_line();\n\n match int::from_str(input) {\n Some(number) => process_guess(secret, number, guesses),\n None => println(\"Hey, put in a number.\")\n }\n if *guesses == 5 { break; }\n }\n\n println(\"Done!\");\n}\n<|endoftext|>"} {"text":"Add regression test for issue #17441\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn main() {\n let _foo = &[1u, 2] as [uint];\n \/\/~^ ERROR cast to unsized type: `&[uint, .. 2]` as `[uint]`\n \/\/~^^ NOTE consider using an implicit coercion to `&[uint]` instead\n let _bar = box 1u as std::fmt::Show;\n \/\/~^ ERROR cast to unsized type: `Box` as `core::fmt::Show`\n \/\/~^^ NOTE did you mean `Box`?\n let _baz = 1u as std::fmt::Show;\n \/\/~^ ERROR cast to unsized type: `uint` as `core::fmt::Show`\n \/\/~^^ NOTE consider using a box or reference as appropriate\n let _quux = [1u, 2] as [uint];\n \/\/~^ ERROR cast to unsized type: `[uint, .. 2]` as `[uint]`\n \/\/~^^ NOTE consider using a box or reference as appropriate\n}\n<|endoftext|>"} {"text":"use crate::beacon_processor::{\n BeaconProcessor, WorkEvent as BeaconWorkEvent, MAX_WORK_EVENT_QUEUE_LEN,\n};\nuse crate::service::{NetworkMessage, RequestId};\nuse crate::status::status_message;\nuse crate::sync::manager::RequestId as SyncId;\nuse crate::sync::SyncMessage;\nuse beacon_chain::{BeaconChain, BeaconChainTypes};\nuse lighthouse_network::rpc::*;\nuse lighthouse_network::{\n Client, MessageId, NetworkGlobals, PeerId, PeerRequestId, Request, Response,\n};\nuse slog::{debug, error, o, trace, warn};\nuse std::cmp;\nuse std::sync::Arc;\nuse std::time::{Duration, SystemTime, UNIX_EPOCH};\nuse store::SyncCommitteeMessage;\nuse tokio::sync::mpsc;\nuse types::{\n Attestation, AttesterSlashing, EthSpec, ProposerSlashing, SignedAggregateAndProof,\n SignedBeaconBlock, SignedContributionAndProof, SignedVoluntaryExit, SubnetId, SyncSubnetId,\n};\n\n\/\/\/ Processes validated messages from the network. It relays necessary data to the syncing thread\n\/\/\/ and processes blocks from the pubsub network.\npub struct Processor {\n \/\/\/ A reference to the underlying beacon chain.\n chain: Arc>,\n \/\/\/ A channel to the syncing thread.\n sync_send: mpsc::UnboundedSender>,\n \/\/\/ A network context to return and handle RPC requests.\n network: HandlerNetworkContext,\n \/\/\/ A multi-threaded, non-blocking processor for applying messages to the beacon chain.\n beacon_processor_send: mpsc::Sender>,\n \/\/\/ The `RPCHandler` logger.\n log: slog::Logger,\n}\n\nimpl Processor {\n \/\/\/ Instantiate a `Processor` instance\n pub fn new(\n executor: task_executor::TaskExecutor,\n beacon_chain: Arc>,\n network_globals: Arc>,\n network_send: mpsc::UnboundedSender>,\n log: &slog::Logger,\n ) -> Self {\n let sync_logger = log.new(o!(\"service\"=> \"sync\"));\n let (beacon_processor_send, beacon_processor_receive) =\n mpsc::channel(MAX_WORK_EVENT_QUEUE_LEN);\n\n \/\/ spawn the sync thread\n let sync_send = crate::sync::manager::spawn(\n executor.clone(),\n beacon_chain.clone(),\n network_globals.clone(),\n network_send.clone(),\n beacon_processor_send.clone(),\n sync_logger,\n );\n\n BeaconProcessor {\n beacon_chain: Arc::downgrade(&beacon_chain),\n network_tx: network_send.clone(),\n sync_tx: sync_send.clone(),\n network_globals,\n executor,\n max_workers: cmp::max(1, num_cpus::get()),\n current_workers: 0,\n importing_blocks: Default::default(),\n log: log.clone(),\n }\n .spawn_manager(beacon_processor_receive, None);\n\n Processor {\n chain: beacon_chain,\n sync_send,\n network: HandlerNetworkContext::new(network_send, log.clone()),\n beacon_processor_send,\n log: log.new(o!(\"service\" => \"router\")),\n }\n }\n\n fn send_to_sync(&mut self, message: SyncMessage) {\n self.sync_send.send(message).unwrap_or_else(|e| {\n warn!(\n self.log,\n \"Could not send message to the sync service\";\n \"error\" => %e,\n )\n });\n }\n\n \/\/\/ Handle a peer disconnect.\n \/\/\/\n \/\/\/ Removes the peer from the manager.\n pub fn on_disconnect(&mut self, peer_id: PeerId) {\n self.send_to_sync(SyncMessage::Disconnect(peer_id));\n }\n\n \/\/\/ An error occurred during an RPC request. The state is maintained by the sync manager, so\n \/\/\/ this function notifies the sync manager of the error.\n pub fn on_rpc_error(&mut self, peer_id: PeerId, request_id: RequestId) {\n \/\/ Check if the failed RPC belongs to sync\n if let RequestId::Sync(request_id) = request_id {\n self.send_to_sync(SyncMessage::RpcError {\n peer_id,\n request_id,\n });\n }\n }\n\n \/\/\/ Sends a `Status` message to the peer.\n \/\/\/\n \/\/\/ Called when we first connect to a peer, or when the PeerManager determines we need to\n \/\/\/ re-status.\n pub fn send_status(&mut self, peer_id: PeerId) {\n let status_message = status_message(&self.chain);\n debug!(self.log, \"Sending Status Request\"; \"peer\" => %peer_id, &status_message);\n self.network\n .send_processor_request(peer_id, Request::Status(status_message));\n }\n\n \/\/\/ Handle a `Status` request.\n \/\/\/\n \/\/\/ Processes the `Status` from the remote peer and sends back our `Status`.\n pub fn on_status_request(\n &mut self,\n peer_id: PeerId,\n request_id: PeerRequestId,\n status: StatusMessage,\n ) {\n debug!(self.log, \"Received Status Request\"; \"peer_id\" => %peer_id, &status);\n\n \/\/ Say status back.\n self.network.send_response(\n peer_id,\n Response::Status(status_message(&self.chain)),\n request_id,\n );\n\n self.send_beacon_processor_work(BeaconWorkEvent::status_message(peer_id, status))\n }\n\n \/\/\/ Process a `Status` response from a peer.\n pub fn on_status_response(&mut self, peer_id: PeerId, status: StatusMessage) {\n debug!(self.log, \"Received Status Response\"; \"peer_id\" => %peer_id, &status);\n self.send_beacon_processor_work(BeaconWorkEvent::status_message(peer_id, status))\n }\n\n \/\/\/ Handle a `BlocksByRoot` request from the peer.\n pub fn on_blocks_by_root_request(\n &mut self,\n peer_id: PeerId,\n request_id: PeerRequestId,\n request: BlocksByRootRequest,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::blocks_by_roots_request(\n peer_id, request_id, request,\n ))\n }\n\n \/\/\/ Handle a `BlocksByRange` request from the peer.\n pub fn on_blocks_by_range_request(\n &mut self,\n peer_id: PeerId,\n request_id: PeerRequestId,\n req: BlocksByRangeRequest,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::blocks_by_range_request(\n peer_id, request_id, req,\n ))\n }\n\n \/\/\/ Handle a `BlocksByRange` response from the peer.\n \/\/\/ A `beacon_block` behaves as a stream which is terminated on a `None` response.\n pub fn on_blocks_by_range_response(\n &mut self,\n peer_id: PeerId,\n request_id: RequestId,\n beacon_block: Option>>,\n ) {\n let request_id = match request_id {\n RequestId::Sync(sync_id) => match sync_id {\n SyncId::SingleBlock { .. } | SyncId::ParentLookup { .. } => {\n unreachable!(\"Block lookups do not request BBRange requests\")\n }\n id @ (SyncId::BackFillSync { .. } | SyncId::RangeSync { .. }) => id,\n },\n RequestId::Router => unreachable!(\"All BBRange requests belong to sync\"),\n };\n\n trace!(\n self.log,\n \"Received BlocksByRange Response\";\n \"peer\" => %peer_id,\n );\n\n self.send_to_sync(SyncMessage::RpcBlock {\n peer_id,\n request_id,\n beacon_block,\n seen_timestamp: timestamp_now(),\n });\n }\n\n \/\/\/ Handle a `BlocksByRoot` response from the peer.\n pub fn on_blocks_by_root_response(\n &mut self,\n peer_id: PeerId,\n request_id: RequestId,\n beacon_block: Option>>,\n ) {\n let request_id = match request_id {\n RequestId::Sync(sync_id) => match sync_id {\n id @ (SyncId::SingleBlock { .. } | SyncId::ParentLookup { .. }) => id,\n SyncId::BackFillSync { .. } | SyncId::RangeSync { .. } => {\n unreachable!(\"Batch syncing do not request BBRoot requests\")\n }\n },\n RequestId::Router => unreachable!(\"All BBRoot requests belong to sync\"),\n };\n\n trace!(\n self.log,\n \"Received BlocksByRoot Response\";\n \"peer\" => %peer_id,\n );\n self.send_to_sync(SyncMessage::RpcBlock {\n peer_id,\n request_id,\n beacon_block,\n seen_timestamp: timestamp_now(),\n });\n }\n\n \/\/\/ Process a gossip message declaring a new block.\n \/\/\/\n \/\/\/ Attempts to apply to block to the beacon chain. May queue the block for later processing.\n \/\/\/\n \/\/\/ Returns a `bool` which, if `true`, indicates we should forward the block to our peers.\n pub fn on_block_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n peer_client: Client,\n block: Arc>,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_beacon_block(\n message_id,\n peer_id,\n peer_client,\n block,\n timestamp_now(),\n ))\n }\n\n pub fn on_unaggregated_attestation_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n unaggregated_attestation: Attestation,\n subnet_id: SubnetId,\n should_process: bool,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::unaggregated_attestation(\n message_id,\n peer_id,\n unaggregated_attestation,\n subnet_id,\n should_process,\n timestamp_now(),\n ))\n }\n\n pub fn on_aggregated_attestation_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n aggregate: SignedAggregateAndProof,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::aggregated_attestation(\n message_id,\n peer_id,\n aggregate,\n timestamp_now(),\n ))\n }\n\n pub fn on_voluntary_exit_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n voluntary_exit: Box,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_voluntary_exit(\n message_id,\n peer_id,\n voluntary_exit,\n ))\n }\n\n pub fn on_proposer_slashing_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n proposer_slashing: Box,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_proposer_slashing(\n message_id,\n peer_id,\n proposer_slashing,\n ))\n }\n\n pub fn on_attester_slashing_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n attester_slashing: Box>,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_attester_slashing(\n message_id,\n peer_id,\n attester_slashing,\n ))\n }\n\n pub fn on_sync_committee_signature_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n sync_signature: SyncCommitteeMessage,\n subnet_id: SyncSubnetId,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_sync_signature(\n message_id,\n peer_id,\n sync_signature,\n subnet_id,\n timestamp_now(),\n ))\n }\n\n pub fn on_sync_committee_contribution_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n sync_contribution: SignedContributionAndProof,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_sync_contribution(\n message_id,\n peer_id,\n sync_contribution,\n timestamp_now(),\n ))\n }\n\n fn send_beacon_processor_work(&mut self, work: BeaconWorkEvent) {\n self.beacon_processor_send\n .try_send(work)\n .unwrap_or_else(|e| {\n let work_type = match &e {\n mpsc::error::TrySendError::Closed(work)\n | mpsc::error::TrySendError::Full(work) => work.work_type(),\n };\n error!(&self.log, \"Unable to send message to the beacon processor\";\n \"error\" => %e, \"type\" => work_type)\n })\n }\n}\n\n\/\/\/ Wraps a Network Channel to employ various RPC related network functionality for the\n\/\/\/ processor.\n#[derive(Clone)]\npub struct HandlerNetworkContext {\n \/\/\/ The network channel to relay messages to the Network service.\n network_send: mpsc::UnboundedSender>,\n \/\/\/ Logger for the `NetworkContext`.\n log: slog::Logger,\n}\n\nimpl HandlerNetworkContext {\n pub fn new(network_send: mpsc::UnboundedSender>, log: slog::Logger) -> Self {\n Self { network_send, log }\n }\n\n \/\/\/ Sends a message to the network task.\n fn inform_network(&mut self, msg: NetworkMessage) {\n self.network_send.send(msg).unwrap_or_else(\n |e| warn!(self.log, \"Could not send message to the network service\"; \"error\" => %e),\n )\n }\n\n \/\/\/ Sends a request to the network task.\n pub fn send_processor_request(&mut self, peer_id: PeerId, request: Request) {\n self.inform_network(NetworkMessage::SendRequest {\n peer_id,\n request_id: RequestId::Router,\n request,\n })\n }\n\n \/\/\/ Sends a response to the network task.\n pub fn send_response(&mut self, peer_id: PeerId, response: Response, id: PeerRequestId) {\n self.inform_network(NetworkMessage::SendResponse {\n peer_id,\n id,\n response,\n })\n }\n\n \/\/\/ Sends an error response to the network task.\n pub fn _send_error_response(\n &mut self,\n peer_id: PeerId,\n id: PeerRequestId,\n error: RPCResponseErrorCode,\n reason: String,\n ) {\n self.inform_network(NetworkMessage::SendErrorResponse {\n peer_id,\n error,\n id,\n reason,\n })\n }\n}\n\nfn timestamp_now() -> Duration {\n SystemTime::now()\n .duration_since(UNIX_EPOCH)\n .unwrap_or_else(|_| Duration::from_secs(0))\n}\nRemove unused method in HandlerNetworkContext (#3299)use crate::beacon_processor::{\n BeaconProcessor, WorkEvent as BeaconWorkEvent, MAX_WORK_EVENT_QUEUE_LEN,\n};\nuse crate::service::{NetworkMessage, RequestId};\nuse crate::status::status_message;\nuse crate::sync::manager::RequestId as SyncId;\nuse crate::sync::SyncMessage;\nuse beacon_chain::{BeaconChain, BeaconChainTypes};\nuse lighthouse_network::rpc::*;\nuse lighthouse_network::{\n Client, MessageId, NetworkGlobals, PeerId, PeerRequestId, Request, Response,\n};\nuse slog::{debug, error, o, trace, warn};\nuse std::cmp;\nuse std::sync::Arc;\nuse std::time::{Duration, SystemTime, UNIX_EPOCH};\nuse store::SyncCommitteeMessage;\nuse tokio::sync::mpsc;\nuse types::{\n Attestation, AttesterSlashing, EthSpec, ProposerSlashing, SignedAggregateAndProof,\n SignedBeaconBlock, SignedContributionAndProof, SignedVoluntaryExit, SubnetId, SyncSubnetId,\n};\n\n\/\/\/ Processes validated messages from the network. It relays necessary data to the syncing thread\n\/\/\/ and processes blocks from the pubsub network.\npub struct Processor {\n \/\/\/ A reference to the underlying beacon chain.\n chain: Arc>,\n \/\/\/ A channel to the syncing thread.\n sync_send: mpsc::UnboundedSender>,\n \/\/\/ A network context to return and handle RPC requests.\n network: HandlerNetworkContext,\n \/\/\/ A multi-threaded, non-blocking processor for applying messages to the beacon chain.\n beacon_processor_send: mpsc::Sender>,\n \/\/\/ The `RPCHandler` logger.\n log: slog::Logger,\n}\n\nimpl Processor {\n \/\/\/ Instantiate a `Processor` instance\n pub fn new(\n executor: task_executor::TaskExecutor,\n beacon_chain: Arc>,\n network_globals: Arc>,\n network_send: mpsc::UnboundedSender>,\n log: &slog::Logger,\n ) -> Self {\n let sync_logger = log.new(o!(\"service\"=> \"sync\"));\n let (beacon_processor_send, beacon_processor_receive) =\n mpsc::channel(MAX_WORK_EVENT_QUEUE_LEN);\n\n \/\/ spawn the sync thread\n let sync_send = crate::sync::manager::spawn(\n executor.clone(),\n beacon_chain.clone(),\n network_globals.clone(),\n network_send.clone(),\n beacon_processor_send.clone(),\n sync_logger,\n );\n\n BeaconProcessor {\n beacon_chain: Arc::downgrade(&beacon_chain),\n network_tx: network_send.clone(),\n sync_tx: sync_send.clone(),\n network_globals,\n executor,\n max_workers: cmp::max(1, num_cpus::get()),\n current_workers: 0,\n importing_blocks: Default::default(),\n log: log.clone(),\n }\n .spawn_manager(beacon_processor_receive, None);\n\n Processor {\n chain: beacon_chain,\n sync_send,\n network: HandlerNetworkContext::new(network_send, log.clone()),\n beacon_processor_send,\n log: log.new(o!(\"service\" => \"router\")),\n }\n }\n\n fn send_to_sync(&mut self, message: SyncMessage) {\n self.sync_send.send(message).unwrap_or_else(|e| {\n warn!(\n self.log,\n \"Could not send message to the sync service\";\n \"error\" => %e,\n )\n });\n }\n\n \/\/\/ Handle a peer disconnect.\n \/\/\/\n \/\/\/ Removes the peer from the manager.\n pub fn on_disconnect(&mut self, peer_id: PeerId) {\n self.send_to_sync(SyncMessage::Disconnect(peer_id));\n }\n\n \/\/\/ An error occurred during an RPC request. The state is maintained by the sync manager, so\n \/\/\/ this function notifies the sync manager of the error.\n pub fn on_rpc_error(&mut self, peer_id: PeerId, request_id: RequestId) {\n \/\/ Check if the failed RPC belongs to sync\n if let RequestId::Sync(request_id) = request_id {\n self.send_to_sync(SyncMessage::RpcError {\n peer_id,\n request_id,\n });\n }\n }\n\n \/\/\/ Sends a `Status` message to the peer.\n \/\/\/\n \/\/\/ Called when we first connect to a peer, or when the PeerManager determines we need to\n \/\/\/ re-status.\n pub fn send_status(&mut self, peer_id: PeerId) {\n let status_message = status_message(&self.chain);\n debug!(self.log, \"Sending Status Request\"; \"peer\" => %peer_id, &status_message);\n self.network\n .send_processor_request(peer_id, Request::Status(status_message));\n }\n\n \/\/\/ Handle a `Status` request.\n \/\/\/\n \/\/\/ Processes the `Status` from the remote peer and sends back our `Status`.\n pub fn on_status_request(\n &mut self,\n peer_id: PeerId,\n request_id: PeerRequestId,\n status: StatusMessage,\n ) {\n debug!(self.log, \"Received Status Request\"; \"peer_id\" => %peer_id, &status);\n\n \/\/ Say status back.\n self.network.send_response(\n peer_id,\n Response::Status(status_message(&self.chain)),\n request_id,\n );\n\n self.send_beacon_processor_work(BeaconWorkEvent::status_message(peer_id, status))\n }\n\n \/\/\/ Process a `Status` response from a peer.\n pub fn on_status_response(&mut self, peer_id: PeerId, status: StatusMessage) {\n debug!(self.log, \"Received Status Response\"; \"peer_id\" => %peer_id, &status);\n self.send_beacon_processor_work(BeaconWorkEvent::status_message(peer_id, status))\n }\n\n \/\/\/ Handle a `BlocksByRoot` request from the peer.\n pub fn on_blocks_by_root_request(\n &mut self,\n peer_id: PeerId,\n request_id: PeerRequestId,\n request: BlocksByRootRequest,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::blocks_by_roots_request(\n peer_id, request_id, request,\n ))\n }\n\n \/\/\/ Handle a `BlocksByRange` request from the peer.\n pub fn on_blocks_by_range_request(\n &mut self,\n peer_id: PeerId,\n request_id: PeerRequestId,\n req: BlocksByRangeRequest,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::blocks_by_range_request(\n peer_id, request_id, req,\n ))\n }\n\n \/\/\/ Handle a `BlocksByRange` response from the peer.\n \/\/\/ A `beacon_block` behaves as a stream which is terminated on a `None` response.\n pub fn on_blocks_by_range_response(\n &mut self,\n peer_id: PeerId,\n request_id: RequestId,\n beacon_block: Option>>,\n ) {\n let request_id = match request_id {\n RequestId::Sync(sync_id) => match sync_id {\n SyncId::SingleBlock { .. } | SyncId::ParentLookup { .. } => {\n unreachable!(\"Block lookups do not request BBRange requests\")\n }\n id @ (SyncId::BackFillSync { .. } | SyncId::RangeSync { .. }) => id,\n },\n RequestId::Router => unreachable!(\"All BBRange requests belong to sync\"),\n };\n\n trace!(\n self.log,\n \"Received BlocksByRange Response\";\n \"peer\" => %peer_id,\n );\n\n self.send_to_sync(SyncMessage::RpcBlock {\n peer_id,\n request_id,\n beacon_block,\n seen_timestamp: timestamp_now(),\n });\n }\n\n \/\/\/ Handle a `BlocksByRoot` response from the peer.\n pub fn on_blocks_by_root_response(\n &mut self,\n peer_id: PeerId,\n request_id: RequestId,\n beacon_block: Option>>,\n ) {\n let request_id = match request_id {\n RequestId::Sync(sync_id) => match sync_id {\n id @ (SyncId::SingleBlock { .. } | SyncId::ParentLookup { .. }) => id,\n SyncId::BackFillSync { .. } | SyncId::RangeSync { .. } => {\n unreachable!(\"Batch syncing do not request BBRoot requests\")\n }\n },\n RequestId::Router => unreachable!(\"All BBRoot requests belong to sync\"),\n };\n\n trace!(\n self.log,\n \"Received BlocksByRoot Response\";\n \"peer\" => %peer_id,\n );\n self.send_to_sync(SyncMessage::RpcBlock {\n peer_id,\n request_id,\n beacon_block,\n seen_timestamp: timestamp_now(),\n });\n }\n\n \/\/\/ Process a gossip message declaring a new block.\n \/\/\/\n \/\/\/ Attempts to apply to block to the beacon chain. May queue the block for later processing.\n \/\/\/\n \/\/\/ Returns a `bool` which, if `true`, indicates we should forward the block to our peers.\n pub fn on_block_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n peer_client: Client,\n block: Arc>,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_beacon_block(\n message_id,\n peer_id,\n peer_client,\n block,\n timestamp_now(),\n ))\n }\n\n pub fn on_unaggregated_attestation_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n unaggregated_attestation: Attestation,\n subnet_id: SubnetId,\n should_process: bool,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::unaggregated_attestation(\n message_id,\n peer_id,\n unaggregated_attestation,\n subnet_id,\n should_process,\n timestamp_now(),\n ))\n }\n\n pub fn on_aggregated_attestation_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n aggregate: SignedAggregateAndProof,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::aggregated_attestation(\n message_id,\n peer_id,\n aggregate,\n timestamp_now(),\n ))\n }\n\n pub fn on_voluntary_exit_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n voluntary_exit: Box,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_voluntary_exit(\n message_id,\n peer_id,\n voluntary_exit,\n ))\n }\n\n pub fn on_proposer_slashing_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n proposer_slashing: Box,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_proposer_slashing(\n message_id,\n peer_id,\n proposer_slashing,\n ))\n }\n\n pub fn on_attester_slashing_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n attester_slashing: Box>,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_attester_slashing(\n message_id,\n peer_id,\n attester_slashing,\n ))\n }\n\n pub fn on_sync_committee_signature_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n sync_signature: SyncCommitteeMessage,\n subnet_id: SyncSubnetId,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_sync_signature(\n message_id,\n peer_id,\n sync_signature,\n subnet_id,\n timestamp_now(),\n ))\n }\n\n pub fn on_sync_committee_contribution_gossip(\n &mut self,\n message_id: MessageId,\n peer_id: PeerId,\n sync_contribution: SignedContributionAndProof,\n ) {\n self.send_beacon_processor_work(BeaconWorkEvent::gossip_sync_contribution(\n message_id,\n peer_id,\n sync_contribution,\n timestamp_now(),\n ))\n }\n\n fn send_beacon_processor_work(&mut self, work: BeaconWorkEvent) {\n self.beacon_processor_send\n .try_send(work)\n .unwrap_or_else(|e| {\n let work_type = match &e {\n mpsc::error::TrySendError::Closed(work)\n | mpsc::error::TrySendError::Full(work) => work.work_type(),\n };\n error!(&self.log, \"Unable to send message to the beacon processor\";\n \"error\" => %e, \"type\" => work_type)\n })\n }\n}\n\n\/\/\/ Wraps a Network Channel to employ various RPC related network functionality for the\n\/\/\/ processor.\n#[derive(Clone)]\npub struct HandlerNetworkContext {\n \/\/\/ The network channel to relay messages to the Network service.\n network_send: mpsc::UnboundedSender>,\n \/\/\/ Logger for the `NetworkContext`.\n log: slog::Logger,\n}\n\nimpl HandlerNetworkContext {\n pub fn new(network_send: mpsc::UnboundedSender>, log: slog::Logger) -> Self {\n Self { network_send, log }\n }\n\n \/\/\/ Sends a message to the network task.\n fn inform_network(&mut self, msg: NetworkMessage) {\n self.network_send.send(msg).unwrap_or_else(\n |e| warn!(self.log, \"Could not send message to the network service\"; \"error\" => %e),\n )\n }\n\n \/\/\/ Sends a request to the network task.\n pub fn send_processor_request(&mut self, peer_id: PeerId, request: Request) {\n self.inform_network(NetworkMessage::SendRequest {\n peer_id,\n request_id: RequestId::Router,\n request,\n })\n }\n\n \/\/\/ Sends a response to the network task.\n pub fn send_response(&mut self, peer_id: PeerId, response: Response, id: PeerRequestId) {\n self.inform_network(NetworkMessage::SendResponse {\n peer_id,\n id,\n response,\n })\n }\n}\n\nfn timestamp_now() -> Duration {\n SystemTime::now()\n .duration_since(UNIX_EPOCH)\n .unwrap_or_else(|_| Duration::from_secs(0))\n}\n<|endoftext|>"} {"text":" removed filter for testing<|endoftext|>"} {"text":"an empy message<|endoftext|>"} {"text":"Adding the chapter on the pointer's dereferencing.<|endoftext|>"} {"text":"pub mod ast;\npub mod symbols;\n\nuse std::fs::DirBuilder;\nuse std::path::PathBuf;\n\npub struct Config {\n output_dir: PathBuf,\n pub dump_tokens: bool,\n pub dump_ast: bool,\n pub dump_symbol_table: bool,\n}\n\nimpl Config {\n pub fn new(output_dir: &str) -> Config {\n let output_dir = PathBuf::from(output_dir);\n\n \/\/ create output directory\n DirBuilder::new().create(output_dir.as_path()).expect(\"Diagnostics Directory\");\n\n Config {\n output_dir: PathBuf::from(output_dir),\n dump_tokens: true,\n dump_ast: true,\n dump_symbol_table: true,\n }\n }\n\n pub fn output_dir(&self) -> PathBuf {\n self.output_dir.clone()\n }\n}\n\nimpl Default for Config {\n fn default() -> Config {\n Config::new(\"diag\")\n }\n}\n\npub mod dot {\n use std::path::Path;\n use std::process::Command;\n\n \/\/\/ Try to run the `dot` command, fails silently.\n pub fn run(filepath: &Path) {\n Command::new(\"dot\")\n .arg(\"-Tpng\")\n .arg(\"-O\")\n .arg(filepath.to_str().unwrap())\n .spawn()\n .ok();\n }\n}\nDiag: Do not explode if diagnostics output dir existspub mod ast;\npub mod symbols;\n\nuse std::fs::DirBuilder;\nuse std::path::PathBuf;\n\npub struct Config {\n output_dir: PathBuf,\n pub dump_tokens: bool,\n pub dump_ast: bool,\n pub dump_symbol_table: bool,\n}\n\nimpl Config {\n pub fn new(output_dir: &str) -> Config {\n let output_dir = PathBuf::from(output_dir);\n\n \/\/ create output directory\n DirBuilder::new()\n .recursive(true)\n .create(output_dir.as_path())\n .expect(\"Diagnostics Directory\");\n\n Config {\n output_dir: PathBuf::from(output_dir),\n dump_tokens: true,\n dump_ast: true,\n dump_symbol_table: true,\n }\n }\n\n pub fn output_dir(&self) -> PathBuf {\n self.output_dir.clone()\n }\n}\n\nimpl Default for Config {\n fn default() -> Config {\n Config::new(\"diag\")\n }\n}\n\npub mod dot {\n use std::path::Path;\n use std::process::Command;\n\n \/\/\/ Try to run the `dot` command, fails silently.\n pub fn run(filepath: &Path) {\n Command::new(\"dot\")\n .arg(\"-Tpng\")\n .arg(\"-O\")\n .arg(filepath.to_str().unwrap())\n .spawn()\n .ok();\n }\n}\n<|endoftext|>"} {"text":"zguide: add version.rsextern mod zmq;\n\nfn main() {\n let (major, minor, patch) = zmq::version();\n printfln!(\"Current 0MQ version is %d.%d.%d\", major, minor, patch);\n}\n<|endoftext|>"} {"text":"Remove calls to exit() and replace them with error propagation up to main()<|endoftext|>"} {"text":"\/\/! Build indexes as appropriate for the markdown pass\n\nexport mk_pass;\n\nfn mk_pass(config: config::config) -> pass {\n {\n name: \"markdown_index\",\n f: fn~(srv: astsrv::srv, doc: doc::doc) -> doc::doc {\n run(srv, doc, config)\n }\n }\n}\n\nfn run(\n _srv: astsrv::srv,\n doc: doc::doc,\n config: config::config\n) -> doc::doc {\n let fold = fold::fold({\n fold_mod: fold_mod,\n fold_nmod: fold_nmod\n with *fold::default_any_fold(config)\n });\n fold.fold_doc(fold, doc)\n}\n\nfn fold_mod(\n fold: fold::fold,\n doc: doc::moddoc\n) -> doc::moddoc {\n\n let doc = fold::default_any_fold_mod(fold, doc);\n\n {\n index: some(build_mod_index(doc, fold.ctxt))\n with doc\n }\n}\n\nfn fold_nmod(\n fold: fold::fold,\n doc: doc::nmoddoc\n) -> doc::nmoddoc {\n\n let doc = fold::default_any_fold_nmod(fold, doc);\n\n {\n index: some(build_nmod_index(doc, fold.ctxt))\n with doc\n }\n}\n\nfn build_mod_index(\n doc: doc::moddoc,\n config: config::config\n) -> doc::index {\n {\n entries: par::anymap(doc.items, |doc| {\n item_to_entry(doc, config)\n })\n }\n}\n\nfn build_nmod_index(\n doc: doc::nmoddoc,\n config: config::config\n) -> doc::index {\n {\n entries: par::anymap(doc.fns, |doc| {\n item_to_entry(doc::fntag(doc), config)\n })\n }\n}\n\nfn item_to_entry(\n doc: doc::itemtag,\n config: config::config\n) -> doc::index_entry {\n let link = alt doc {\n doc::modtag(_) | doc::nmodtag(_)\n if config.output_style == config::doc_per_mod {\n markdown_writer::make_filename(config, doc::itempage(doc))\n }\n _ {\n \"#\" + pandoc_header_id(markdown_pass::header_text(doc))\n }\n };\n\n {\n kind: markdown_pass::header_kind(doc),\n name: markdown_pass::header_name(doc),\n brief: doc.brief(),\n link: link\n }\n}\n\nfn pandoc_header_id(header: str) -> str {\n\n \/\/ http:\/\/johnmacfarlane.net\/pandoc\/README.html#headers\n\n let header = remove_formatting(header);\n let header = remove_punctuation(header);\n let header = replace_with_hyphens(header);\n let header = convert_to_lowercase(header);\n let header = remove_up_to_first_letter(header);\n let header = maybe_use_section_id(header);\n ret header;\n\n fn remove_formatting(s: str) -> str {\n str::replace(s, \"`\", \"\")\n }\n fn remove_punctuation(s: str) -> str {\n let s = str::replace(s, \"<\", \"\");\n let s = str::replace(s, \">\", \"\");\n let s = str::replace(s, \"[\", \"\");\n let s = str::replace(s, \"]\", \"\");\n let s = str::replace(s, \"(\", \"\");\n let s = str::replace(s, \")\", \"\");\n let s = str::replace(s, \"@\", \"\");\n let s = str::replace(s, \"~\", \"\");\n let s = str::replace(s, \"\/\", \"\");\n let s = str::replace(s, \":\", \"\");\n let s = str::replace(s, \"&\", \"\");\n ret s;\n }\n fn replace_with_hyphens(s: str) -> str {\n str::replace(s, \" \", \"-\")\n }\n fn convert_to_lowercase(s: str) -> str { str::to_lower(s) }\n fn remove_up_to_first_letter(s: str) -> str { s }\n fn maybe_use_section_id(s: str) -> str { s }\n}\n\n#[test]\nfn should_remove_punctuation_from_headers() {\n assert pandoc_header_id(\"impl foo of bar\") == \"impl-foo-of-bara\";\n assert pandoc_header_id(\"fn@(~[~A])\") == \"fna\";\n assert pandoc_header_id(\"impl of num::num for int\")\n == \"impl-of-numnum-for-int\";\n assert pandoc_header_id(\"impl of num::num for int\/&\")\n == \"impl-of-numnum-for-int\";\n}\n\n#[test]\nfn should_index_mod_contents() {\n let doc = test::mk_doc(\n config::doc_per_crate,\n \"mod a { } fn b() { }\"\n );\n assert option::get(doc.cratemod().index).entries[0] == {\n kind: \"Module\",\n name: \"a\",\n brief: none,\n link: \"#module-a\"\n };\n assert option::get(doc.cratemod().index).entries[1] == {\n kind: \"Function\",\n name: \"b\",\n brief: none,\n link: \"#function-b\"\n };\n}\n\n#[test]\nfn should_index_mod_contents_multi_page() {\n let doc = test::mk_doc(\n config::doc_per_mod,\n \"mod a { } fn b() { }\"\n );\n assert option::get(doc.cratemod().index).entries[0] == {\n kind: \"Module\",\n name: \"a\",\n brief: none,\n link: \"a.html\"\n };\n assert option::get(doc.cratemod().index).entries[1] == {\n kind: \"Function\",\n name: \"b\",\n brief: none,\n link: \"#function-b\"\n };\n}\n\n#[test]\nfn should_index_foreign_mod_pages() {\n let doc = test::mk_doc(\n config::doc_per_mod,\n \"extern mod a { }\"\n );\n assert option::get(doc.cratemod().index).entries[0] == {\n kind: \"Foreign module\",\n name: \"a\",\n brief: none,\n link: \"a.html\"\n };\n}\n\n#[test]\nfn should_add_brief_desc_to_index() {\n let doc = test::mk_doc(\n config::doc_per_mod,\n \"#[doc = \\\"test\\\"] mod a { }\"\n );\n assert option::get(doc.cratemod().index).entries[0].brief == some(\"test\");\n}\n\n#[test]\nfn should_index_foreign_mod_contents() {\n let doc = test::mk_doc(\n config::doc_per_crate,\n \"extern mod a { fn b(); }\"\n );\n assert option::get(doc.cratemod().nmods()[0].index).entries[0] == {\n kind: \"Function\",\n name: \"b\",\n brief: none,\n link: \"#function-b\"\n };\n}\n\n#[cfg(test)]\nmod test {\n fn mk_doc(output_style: config::output_style, source: str) -> doc::doc {\n do astsrv::from_str(source) |srv| {\n let config = {\n output_style: output_style\n with config::default_config(\"whatever\")\n };\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass().f(srv, doc);\n let doc = desc_to_brief_pass::mk_pass().f(srv, doc);\n let doc = path_pass::mk_pass().f(srv, doc);\n run(srv, doc, config)\n }\n }\n}\nrustdoc: Filter out another invalid id character\/\/! Build indexes as appropriate for the markdown pass\n\nexport mk_pass;\n\nfn mk_pass(config: config::config) -> pass {\n {\n name: \"markdown_index\",\n f: fn~(srv: astsrv::srv, doc: doc::doc) -> doc::doc {\n run(srv, doc, config)\n }\n }\n}\n\nfn run(\n _srv: astsrv::srv,\n doc: doc::doc,\n config: config::config\n) -> doc::doc {\n let fold = fold::fold({\n fold_mod: fold_mod,\n fold_nmod: fold_nmod\n with *fold::default_any_fold(config)\n });\n fold.fold_doc(fold, doc)\n}\n\nfn fold_mod(\n fold: fold::fold,\n doc: doc::moddoc\n) -> doc::moddoc {\n\n let doc = fold::default_any_fold_mod(fold, doc);\n\n {\n index: some(build_mod_index(doc, fold.ctxt))\n with doc\n }\n}\n\nfn fold_nmod(\n fold: fold::fold,\n doc: doc::nmoddoc\n) -> doc::nmoddoc {\n\n let doc = fold::default_any_fold_nmod(fold, doc);\n\n {\n index: some(build_nmod_index(doc, fold.ctxt))\n with doc\n }\n}\n\nfn build_mod_index(\n doc: doc::moddoc,\n config: config::config\n) -> doc::index {\n {\n entries: par::anymap(doc.items, |doc| {\n item_to_entry(doc, config)\n })\n }\n}\n\nfn build_nmod_index(\n doc: doc::nmoddoc,\n config: config::config\n) -> doc::index {\n {\n entries: par::anymap(doc.fns, |doc| {\n item_to_entry(doc::fntag(doc), config)\n })\n }\n}\n\nfn item_to_entry(\n doc: doc::itemtag,\n config: config::config\n) -> doc::index_entry {\n let link = alt doc {\n doc::modtag(_) | doc::nmodtag(_)\n if config.output_style == config::doc_per_mod {\n markdown_writer::make_filename(config, doc::itempage(doc))\n }\n _ {\n \"#\" + pandoc_header_id(markdown_pass::header_text(doc))\n }\n };\n\n {\n kind: markdown_pass::header_kind(doc),\n name: markdown_pass::header_name(doc),\n brief: doc.brief(),\n link: link\n }\n}\n\nfn pandoc_header_id(header: str) -> str {\n\n \/\/ http:\/\/johnmacfarlane.net\/pandoc\/README.html#headers\n\n let header = remove_formatting(header);\n let header = remove_punctuation(header);\n let header = replace_with_hyphens(header);\n let header = convert_to_lowercase(header);\n let header = remove_up_to_first_letter(header);\n let header = maybe_use_section_id(header);\n ret header;\n\n fn remove_formatting(s: str) -> str {\n str::replace(s, \"`\", \"\")\n }\n fn remove_punctuation(s: str) -> str {\n let s = str::replace(s, \"<\", \"\");\n let s = str::replace(s, \">\", \"\");\n let s = str::replace(s, \"[\", \"\");\n let s = str::replace(s, \"]\", \"\");\n let s = str::replace(s, \"(\", \"\");\n let s = str::replace(s, \")\", \"\");\n let s = str::replace(s, \"@\", \"\");\n let s = str::replace(s, \"~\", \"\");\n let s = str::replace(s, \"\/\", \"\");\n let s = str::replace(s, \":\", \"\");\n let s = str::replace(s, \"&\", \"\");\n let s = str::replace(s, \"^\", \"\");\n ret s;\n }\n fn replace_with_hyphens(s: str) -> str {\n str::replace(s, \" \", \"-\")\n }\n fn convert_to_lowercase(s: str) -> str { str::to_lower(s) }\n fn remove_up_to_first_letter(s: str) -> str { s }\n fn maybe_use_section_id(s: str) -> str { s }\n}\n\n#[test]\nfn should_remove_punctuation_from_headers() {\n assert pandoc_header_id(\"impl foo of bar\") == \"impl-foo-of-bara\";\n assert pandoc_header_id(\"fn@(~[~A])\") == \"fna\";\n assert pandoc_header_id(\"impl of num::num for int\")\n == \"impl-of-numnum-for-int\";\n assert pandoc_header_id(\"impl of num::num for int\/&\")\n == \"impl-of-numnum-for-int\";\n assert pandoc_header_id(\"impl of num::num for ^int\")\n == \"impl-of-numnum-for-int\";\n}\n\n#[test]\nfn should_index_mod_contents() {\n let doc = test::mk_doc(\n config::doc_per_crate,\n \"mod a { } fn b() { }\"\n );\n assert option::get(doc.cratemod().index).entries[0] == {\n kind: \"Module\",\n name: \"a\",\n brief: none,\n link: \"#module-a\"\n };\n assert option::get(doc.cratemod().index).entries[1] == {\n kind: \"Function\",\n name: \"b\",\n brief: none,\n link: \"#function-b\"\n };\n}\n\n#[test]\nfn should_index_mod_contents_multi_page() {\n let doc = test::mk_doc(\n config::doc_per_mod,\n \"mod a { } fn b() { }\"\n );\n assert option::get(doc.cratemod().index).entries[0] == {\n kind: \"Module\",\n name: \"a\",\n brief: none,\n link: \"a.html\"\n };\n assert option::get(doc.cratemod().index).entries[1] == {\n kind: \"Function\",\n name: \"b\",\n brief: none,\n link: \"#function-b\"\n };\n}\n\n#[test]\nfn should_index_foreign_mod_pages() {\n let doc = test::mk_doc(\n config::doc_per_mod,\n \"extern mod a { }\"\n );\n assert option::get(doc.cratemod().index).entries[0] == {\n kind: \"Foreign module\",\n name: \"a\",\n brief: none,\n link: \"a.html\"\n };\n}\n\n#[test]\nfn should_add_brief_desc_to_index() {\n let doc = test::mk_doc(\n config::doc_per_mod,\n \"#[doc = \\\"test\\\"] mod a { }\"\n );\n assert option::get(doc.cratemod().index).entries[0].brief == some(\"test\");\n}\n\n#[test]\nfn should_index_foreign_mod_contents() {\n let doc = test::mk_doc(\n config::doc_per_crate,\n \"extern mod a { fn b(); }\"\n );\n assert option::get(doc.cratemod().nmods()[0].index).entries[0] == {\n kind: \"Function\",\n name: \"b\",\n brief: none,\n link: \"#function-b\"\n };\n}\n\n#[cfg(test)]\nmod test {\n fn mk_doc(output_style: config::output_style, source: str) -> doc::doc {\n do astsrv::from_str(source) |srv| {\n let config = {\n output_style: output_style\n with config::default_config(\"whatever\")\n };\n let doc = extract::from_srv(srv, \"\");\n let doc = attr_pass::mk_pass().f(srv, doc);\n let doc = desc_to_brief_pass::mk_pass().f(srv, doc);\n let doc = path_pass::mk_pass().f(srv, doc);\n run(srv, doc, config)\n }\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(warnings)]\n\nuse std::process::{Command, Stdio};\nuse std::path::{Path, PathBuf};\n\npub fn run(cmd: &mut Command) {\n println!(\"running: {:?}\", cmd);\n run_silent(cmd);\n}\n\npub fn run_silent(cmd: &mut Command) {\n let status = match cmd.status() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !status.success() {\n fail(&format!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\",\n cmd,\n status));\n }\n}\n\npub fn gnu_target(target: &str) -> String {\n match target {\n \"i686-pc-windows-msvc\" => \"i686-pc-win32\".to_string(),\n \"x86_64-pc-windows-msvc\" => \"x86_64-pc-win32\".to_string(),\n \"i686-pc-windows-gnu\" => \"i686-w64-mingw32\".to_string(),\n \"x86_64-pc-windows-gnu\" => \"x86_64-w64-mingw32\".to_string(),\n s => s.to_string(),\n }\n}\n\npub fn cc2ar(cc: &Path, target: &str) -> Option {\n if target.contains(\"msvc\") {\n None\n } else if target.contains(\"musl\") {\n Some(PathBuf::from(\"ar\"))\n } else {\n let parent = cc.parent().unwrap();\n let file = cc.file_name().unwrap().to_str().unwrap();\n for suffix in &[\"gcc\", \"cc\", \"clang\"] {\n if let Some(idx) = file.rfind(suffix) {\n let mut file = file[..idx].to_owned();\n file.push_str(\"ar\");\n return Some(parent.join(&file));\n }\n }\n Some(parent.join(file))\n }\n}\n\npub fn output(cmd: &mut Command) -> String {\n let output = match cmd.stderr(Stdio::inherit()).output() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !output.status.success() {\n panic!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\",\n cmd,\n output.status);\n }\n String::from_utf8(output.stdout).unwrap()\n}\n\nfn fail(s: &str) -> ! {\n println!(\"\\n\\n{}\\n\\n\", s);\n std::process::exit(1);\n}\nDisconnect ar from cc on OpenBSD\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(warnings)]\n\nuse std::process::{Command, Stdio};\nuse std::path::{Path, PathBuf};\n\npub fn run(cmd: &mut Command) {\n println!(\"running: {:?}\", cmd);\n run_silent(cmd);\n}\n\npub fn run_silent(cmd: &mut Command) {\n let status = match cmd.status() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !status.success() {\n fail(&format!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\",\n cmd,\n status));\n }\n}\n\npub fn gnu_target(target: &str) -> String {\n match target {\n \"i686-pc-windows-msvc\" => \"i686-pc-win32\".to_string(),\n \"x86_64-pc-windows-msvc\" => \"x86_64-pc-win32\".to_string(),\n \"i686-pc-windows-gnu\" => \"i686-w64-mingw32\".to_string(),\n \"x86_64-pc-windows-gnu\" => \"x86_64-w64-mingw32\".to_string(),\n s => s.to_string(),\n }\n}\n\npub fn cc2ar(cc: &Path, target: &str) -> Option {\n if target.contains(\"msvc\") {\n None\n } else if target.contains(\"musl\") {\n Some(PathBuf::from(\"ar\"))\n } else if target.contains(\"openbsd\") {\n Some(PathBuf::from(\"ar\"))\n } else {\n let parent = cc.parent().unwrap();\n let file = cc.file_name().unwrap().to_str().unwrap();\n for suffix in &[\"gcc\", \"cc\", \"clang\"] {\n if let Some(idx) = file.rfind(suffix) {\n let mut file = file[..idx].to_owned();\n file.push_str(\"ar\");\n return Some(parent.join(&file));\n }\n }\n Some(parent.join(file))\n }\n}\n\npub fn output(cmd: &mut Command) -> String {\n let output = match cmd.stderr(Stdio::inherit()).output() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !output.status.success() {\n panic!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\",\n cmd,\n output.status);\n }\n String::from_utf8(output.stdout).unwrap()\n}\n\nfn fail(s: &str) -> ! {\n println!(\"\\n\\n{}\\n\\n\", s);\n std::process::exit(1);\n}\n<|endoftext|>"} {"text":"appname fix<|endoftext|>"} {"text":"Refactor message loop<|endoftext|>"} {"text":"\/\/\/! Basic CSS block layout.\n\nuse style::{StyledNode, Inline, Block, DisplayNone};\nuse css::{Keyword, Length, Px};\nuse std::default::Default;\nuse std::iter::AdditiveIterator; \/\/ for `sum`\n\n\/\/ CSS box model. All sizes are in px.\n\n#[deriving(Default, Show)]\npub struct Dimensions {\n \/\/ Position of the content area relative to the document origin:\n pub x: f32,\n pub y: f32,\n\n \/\/ Content area size:\n pub width: f32,\n pub height: f32,\n\n \/\/ Surrounding edges:\n pub padding: EdgeSizes,\n pub border: EdgeSizes,\n pub margin: EdgeSizes,\n}\n\n\/\/\/ A node in the layout tree.\npub struct LayoutBox<'a> {\n pub box_type: BoxType<'a>,\n pub dimensions: Dimensions,\n pub children: Vec>,\n}\n\npub enum BoxType<'a> {\n BlockNode(&'a StyledNode<'a>),\n InlineNode(&'a StyledNode<'a>),\n InlineContainer,\n}\n\nimpl<'a> LayoutBox<'a> {\n fn new(box_type: BoxType) -> LayoutBox {\n LayoutBox {\n box_type: box_type,\n dimensions: Default::default(),\n children: Vec::new(),\n }\n }\n\n fn get_style_node(&self) -> &'a StyledNode<'a> {\n match self.box_type {\n BlockNode(node) => node,\n InlineNode(node) => node,\n InlineContainer => fail!(\"Inline container has no style node\")\n }\n }\n}\n\n#[deriving(Default, Show)]\nstruct EdgeSizes { left: f32, right: f32, top: f32, bottom: f32 }\n\n\/\/\/ Transform a style tree into a layout tree.\npub fn layout_tree<'a>(node: &'a StyledNode<'a>, containing_block: Dimensions) -> LayoutBox<'a> {\n let mut root_box = build_layout_tree(node);\n root_box.layout(containing_block);\n return root_box;\n}\n\n\/\/\/ Build the tree of LayoutBoxes, but don't perform any layout calculations yet.\nfn build_layout_tree<'a>(style_node: &'a StyledNode<'a>) -> LayoutBox<'a> {\n \/\/ Create the root box.\n let mut root = LayoutBox::new(match style_node.display() {\n Block => BlockNode(style_node),\n Inline => InlineNode(style_node),\n DisplayNone => fail!(\"Root node has display: none.\")\n });\n\n \/\/ Create the descendant boxes.\n for child in style_node.children.iter() {\n match child.display() {\n Block => root.children.push(build_layout_tree(child)),\n Inline => root.get_inline_container().children.push(build_layout_tree(child)),\n DisplayNone => {} \/\/ Don't lay out nodes with `display: none;`\n }\n }\n return root;\n}\n\nimpl<'a> LayoutBox<'a> {\n \/\/\/ Lay out a box and its descendants.\n fn layout(&mut self, containing_block: Dimensions) {\n match self.box_type {\n BlockNode(_) => self.layout_block(containing_block),\n InlineNode(_) => {} \/\/ TODO\n InlineContainer => {} \/\/ TODO\n }\n }\n\n \/\/\/ Lay out a block-level element and its descendants.\n fn layout_block(&mut self, containing_block: Dimensions) {\n \/\/ Child width can depend on parent width, so we need to calculate this node's width before\n \/\/ laying out its children.\n self.calculate_block_width(containing_block);\n\n \/\/ Recursively lay out the children of this node within its content area.\n let content_height = self.layout_block_content(containing_block);\n\n \/\/ Parent height can depend on child height, so `calculate_height` must be called after the\n \/\/ content layout is finished.\n self.calculate_block_height(content_height);\n }\n\n \/\/\/ Calculate the width of a block-level non-replaced element in normal flow.\n \/\/\/\n \/\/\/ http:\/\/www.w3.org\/TR\/CSS2\/visudet.html#blockwidth\n fn calculate_block_width(&mut self, containing_block: Dimensions) {\n let style = self.get_style_node();\n\n \/\/ `width` has initial value `auto`.\n let auto = Keyword(\"auto\".to_string());\n let mut width = style.value(\"width\").unwrap_or(auto.clone());\n\n \/\/ margin, border, and padding have initial value 0.\n let zero = Length(0.0, Px);\n\n let mut margin_left = style.lookup(\"margin-left\", \"margin\", &zero);\n let mut margin_right = style.lookup(\"margin-right\", \"margin\", &zero);\n\n let border_left = style.lookup(\"border-left-width\", \"border-width\", &zero);\n let border_right = style.lookup(\"border-right-width\", \"border-width\", &zero);\n\n let padding_left = style.lookup(\"padding-left\", \"padding\", &zero);\n let padding_right = style.lookup(\"padding-right\", \"padding\", &zero);\n\n let total = [&margin_left, &margin_right, &border_left, &border_right,\n &padding_left, &padding_right, &width].iter().map(|v| v.to_px()).sum();\n\n \/\/ If width is not auto and the total is wider than the container, treat auto margins as 0.\n if width != auto && total > containing_block.width {\n if margin_left == auto {\n margin_left = Length(0.0, Px);\n }\n if margin_right == auto {\n margin_right = Length(0.0, Px);\n }\n }\n\n \/\/ Adjust used values so that the above sum equals `containing_block.width`.\n \/\/ Each arm of the `match` should increase the total width by exactly `underflow`,\n \/\/ and afterward all values should be absolute lengths in px.\n let underflow = containing_block.width - total;\n match (width == auto, margin_left == auto, margin_right == auto) {\n \/\/ If the values are overconstrained, calculate margin_right.\n (false, false, false) => {\n margin_right = Length(margin_right.to_px() + underflow, Px);\n }\n \/\/ If exactly one value is auto, its used value follows from the equality.\n (false, false, true) => {\n margin_right = Length(underflow, Px);\n }\n (false, true, false) => {\n margin_left = Length(underflow, Px);\n }\n \/\/ If width is set to auto, any other auto values become 0.\n (true, _, _) => {\n if margin_left == auto {\n margin_left = Length(0.0, Px);\n }\n if margin_right == auto {\n margin_right = Length(0.0, Px);\n }\n width = Length(underflow, Px);\n }\n (false, true, true) => {\n \/\/ If margin-left and margin-right are both auto, their used values are equal.\n margin_left = Length(underflow \/ 2.0, Px);\n margin_right = Length(underflow \/ 2.0, Px);\n }\n }\n\n let d = &mut self.dimensions;\n d.width = width.to_px();\n\n d.padding.left = padding_left.to_px();\n d.padding.right = padding_right.to_px();\n\n d.border.left = border_left.to_px();\n d.border.right = border_right.to_px();\n\n d.margin.left = margin_left.to_px();\n d.margin.right = margin_right.to_px();\n\n d.x = containing_block.x + d.margin.left + d.border.left + d.padding.left;\n }\n\n \/\/\/ Lay out the node's children within its content area and return the content height.\n \/\/\/\n \/\/\/ http:\/\/www.w3.org\/TR\/CSS2\/visudet.html#normal-block\n fn layout_block_content(&mut self, containing_block: Dimensions) -> f32 {\n \/\/ First we need to find the position of the content area...\n let style = self.get_style_node();\n let d = &mut self.dimensions;\n\n \/\/ margin, border, and padding have initial value 0.\n let zero = Length(0.0, Px);\n\n \/\/ If margin-top or margin-bottom is `auto`, the used value is zero.\n d.margin.top = style.lookup(\"margin-top\", \"margin\", &zero).to_px();\n d.margin.bottom = style.lookup(\"margin-bottom\", \"margin\", &zero).to_px();\n\n d.border.top = style.lookup(\"border-top-width\", \"border-width\", &zero).to_px();\n d.border.bottom = style.lookup(\"border-bottom-width\", \"border-width\", &zero).to_px();\n\n d.padding.top = style.lookup(\"padding-top\", \"padding\", &zero).to_px();\n d.padding.bottom = style.lookup(\"padding-bottom\", \"padding\", &zero).to_px();\n\n d.y = containing_block.y + d.margin.top + d.border.top + d.padding.top;\n\n \/\/ Now we can lay out the children within the content area.\n let mut content_height = 0.0;\n for child in self.children.mut_iter() {\n child.layout(*d);\n\n \/\/ Position each child below the previous one. TODO: margin collapsing\n child.dimensions.y = d.y + content_height;\n content_height = content_height + child.dimensions.margin_box_height();\n }\n return content_height;\n }\n\n \/\/\/ Height of a block-level non-replaced element in normal flow with overflow visible.\n fn calculate_block_height(&mut self, content_height: f32) {\n let height = self.get_style_node().value(\"height\");\n self.dimensions.height = match height {\n Some(Length(h, Px)) => h,\n _ => content_height \/\/ In the default (`auto`) case, use the content height.\n };\n }\n\n \/\/\/ Where a new inline child should go.\n fn get_inline_container(&mut self) -> &mut LayoutBox<'a> {\n match self.box_type {\n InlineNode(_) | InlineContainer => self,\n BlockNode(_) => {\n \/\/ If we're in the middle of a series of inline nodes, keep using the existing\n \/\/ container. Otherwise, create a new inline container.\n match self.children.last() {\n Some(&LayoutBox { box_type: InlineContainer,..}) => {}\n _ => self.children.push(LayoutBox::new(InlineContainer))\n }\n self.children.mut_last().unwrap()\n }\n }\n }\n}\n\nimpl Dimensions {\n \/\/\/ Total height of a box including its margins, border, and padding.\n fn margin_box_height(&self) -> f32 {\n self.height + self.padding.top + self.padding.bottom\n + self.border.top + self.border.bottom\n + self.margin.top + self.margin.bottom\n }\n}\nMove EdgeSizes back by Dimensions\/\/\/! Basic CSS block layout.\n\nuse style::{StyledNode, Inline, Block, DisplayNone};\nuse css::{Keyword, Length, Px};\nuse std::default::Default;\nuse std::iter::AdditiveIterator; \/\/ for `sum`\n\n\/\/ CSS box model. All sizes are in px.\n\n#[deriving(Default, Show)]\npub struct Dimensions {\n \/\/ Position of the content area relative to the document origin:\n pub x: f32,\n pub y: f32,\n\n \/\/ Content area size:\n pub width: f32,\n pub height: f32,\n\n \/\/ Surrounding edges:\n pub padding: EdgeSizes,\n pub border: EdgeSizes,\n pub margin: EdgeSizes,\n}\n\n#[deriving(Default, Show)]\nstruct EdgeSizes { left: f32, right: f32, top: f32, bottom: f32 }\n\n\/\/\/ A node in the layout tree.\npub struct LayoutBox<'a> {\n pub box_type: BoxType<'a>,\n pub dimensions: Dimensions,\n pub children: Vec>,\n}\n\npub enum BoxType<'a> {\n BlockNode(&'a StyledNode<'a>),\n InlineNode(&'a StyledNode<'a>),\n InlineContainer,\n}\n\nimpl<'a> LayoutBox<'a> {\n fn new(box_type: BoxType) -> LayoutBox {\n LayoutBox {\n box_type: box_type,\n dimensions: Default::default(),\n children: Vec::new(),\n }\n }\n\n fn get_style_node(&self) -> &'a StyledNode<'a> {\n match self.box_type {\n BlockNode(node) => node,\n InlineNode(node) => node,\n InlineContainer => fail!(\"Inline container has no style node\")\n }\n }\n}\n\n\/\/\/ Transform a style tree into a layout tree.\npub fn layout_tree<'a>(node: &'a StyledNode<'a>, containing_block: Dimensions) -> LayoutBox<'a> {\n let mut root_box = build_layout_tree(node);\n root_box.layout(containing_block);\n return root_box;\n}\n\n\/\/\/ Build the tree of LayoutBoxes, but don't perform any layout calculations yet.\nfn build_layout_tree<'a>(style_node: &'a StyledNode<'a>) -> LayoutBox<'a> {\n \/\/ Create the root box.\n let mut root = LayoutBox::new(match style_node.display() {\n Block => BlockNode(style_node),\n Inline => InlineNode(style_node),\n DisplayNone => fail!(\"Root node has display: none.\")\n });\n\n \/\/ Create the descendant boxes.\n for child in style_node.children.iter() {\n match child.display() {\n Block => root.children.push(build_layout_tree(child)),\n Inline => root.get_inline_container().children.push(build_layout_tree(child)),\n DisplayNone => {} \/\/ Don't lay out nodes with `display: none;`\n }\n }\n return root;\n}\n\nimpl<'a> LayoutBox<'a> {\n \/\/\/ Lay out a box and its descendants.\n fn layout(&mut self, containing_block: Dimensions) {\n match self.box_type {\n BlockNode(_) => self.layout_block(containing_block),\n InlineNode(_) => {} \/\/ TODO\n InlineContainer => {} \/\/ TODO\n }\n }\n\n \/\/\/ Lay out a block-level element and its descendants.\n fn layout_block(&mut self, containing_block: Dimensions) {\n \/\/ Child width can depend on parent width, so we need to calculate this node's width before\n \/\/ laying out its children.\n self.calculate_block_width(containing_block);\n\n \/\/ Recursively lay out the children of this node within its content area.\n let content_height = self.layout_block_content(containing_block);\n\n \/\/ Parent height can depend on child height, so `calculate_height` must be called after the\n \/\/ content layout is finished.\n self.calculate_block_height(content_height);\n }\n\n \/\/\/ Calculate the width of a block-level non-replaced element in normal flow.\n \/\/\/\n \/\/\/ http:\/\/www.w3.org\/TR\/CSS2\/visudet.html#blockwidth\n fn calculate_block_width(&mut self, containing_block: Dimensions) {\n let style = self.get_style_node();\n\n \/\/ `width` has initial value `auto`.\n let auto = Keyword(\"auto\".to_string());\n let mut width = style.value(\"width\").unwrap_or(auto.clone());\n\n \/\/ margin, border, and padding have initial value 0.\n let zero = Length(0.0, Px);\n\n let mut margin_left = style.lookup(\"margin-left\", \"margin\", &zero);\n let mut margin_right = style.lookup(\"margin-right\", \"margin\", &zero);\n\n let border_left = style.lookup(\"border-left-width\", \"border-width\", &zero);\n let border_right = style.lookup(\"border-right-width\", \"border-width\", &zero);\n\n let padding_left = style.lookup(\"padding-left\", \"padding\", &zero);\n let padding_right = style.lookup(\"padding-right\", \"padding\", &zero);\n\n let total = [&margin_left, &margin_right, &border_left, &border_right,\n &padding_left, &padding_right, &width].iter().map(|v| v.to_px()).sum();\n\n \/\/ If width is not auto and the total is wider than the container, treat auto margins as 0.\n if width != auto && total > containing_block.width {\n if margin_left == auto {\n margin_left = Length(0.0, Px);\n }\n if margin_right == auto {\n margin_right = Length(0.0, Px);\n }\n }\n\n \/\/ Adjust used values so that the above sum equals `containing_block.width`.\n \/\/ Each arm of the `match` should increase the total width by exactly `underflow`,\n \/\/ and afterward all values should be absolute lengths in px.\n let underflow = containing_block.width - total;\n match (width == auto, margin_left == auto, margin_right == auto) {\n \/\/ If the values are overconstrained, calculate margin_right.\n (false, false, false) => {\n margin_right = Length(margin_right.to_px() + underflow, Px);\n }\n \/\/ If exactly one value is auto, its used value follows from the equality.\n (false, false, true) => {\n margin_right = Length(underflow, Px);\n }\n (false, true, false) => {\n margin_left = Length(underflow, Px);\n }\n \/\/ If width is set to auto, any other auto values become 0.\n (true, _, _) => {\n if margin_left == auto {\n margin_left = Length(0.0, Px);\n }\n if margin_right == auto {\n margin_right = Length(0.0, Px);\n }\n width = Length(underflow, Px);\n }\n (false, true, true) => {\n \/\/ If margin-left and margin-right are both auto, their used values are equal.\n margin_left = Length(underflow \/ 2.0, Px);\n margin_right = Length(underflow \/ 2.0, Px);\n }\n }\n\n let d = &mut self.dimensions;\n d.width = width.to_px();\n\n d.padding.left = padding_left.to_px();\n d.padding.right = padding_right.to_px();\n\n d.border.left = border_left.to_px();\n d.border.right = border_right.to_px();\n\n d.margin.left = margin_left.to_px();\n d.margin.right = margin_right.to_px();\n\n d.x = containing_block.x + d.margin.left + d.border.left + d.padding.left;\n }\n\n \/\/\/ Lay out the node's children within its content area and return the content height.\n \/\/\/\n \/\/\/ http:\/\/www.w3.org\/TR\/CSS2\/visudet.html#normal-block\n fn layout_block_content(&mut self, containing_block: Dimensions) -> f32 {\n \/\/ First we need to find the position of the content area...\n let style = self.get_style_node();\n let d = &mut self.dimensions;\n\n \/\/ margin, border, and padding have initial value 0.\n let zero = Length(0.0, Px);\n\n \/\/ If margin-top or margin-bottom is `auto`, the used value is zero.\n d.margin.top = style.lookup(\"margin-top\", \"margin\", &zero).to_px();\n d.margin.bottom = style.lookup(\"margin-bottom\", \"margin\", &zero).to_px();\n\n d.border.top = style.lookup(\"border-top-width\", \"border-width\", &zero).to_px();\n d.border.bottom = style.lookup(\"border-bottom-width\", \"border-width\", &zero).to_px();\n\n d.padding.top = style.lookup(\"padding-top\", \"padding\", &zero).to_px();\n d.padding.bottom = style.lookup(\"padding-bottom\", \"padding\", &zero).to_px();\n\n d.y = containing_block.y + d.margin.top + d.border.top + d.padding.top;\n\n \/\/ Now we can lay out the children within the content area.\n let mut content_height = 0.0;\n for child in self.children.mut_iter() {\n child.layout(*d);\n\n \/\/ Position each child below the previous one. TODO: margin collapsing\n child.dimensions.y = d.y + content_height;\n content_height = content_height + child.dimensions.margin_box_height();\n }\n return content_height;\n }\n\n \/\/\/ Height of a block-level non-replaced element in normal flow with overflow visible.\n fn calculate_block_height(&mut self, content_height: f32) {\n let height = self.get_style_node().value(\"height\");\n self.dimensions.height = match height {\n Some(Length(h, Px)) => h,\n _ => content_height \/\/ In the default (`auto`) case, use the content height.\n };\n }\n\n \/\/\/ Where a new inline child should go.\n fn get_inline_container(&mut self) -> &mut LayoutBox<'a> {\n match self.box_type {\n InlineNode(_) | InlineContainer => self,\n BlockNode(_) => {\n \/\/ If we're in the middle of a series of inline nodes, keep using the existing\n \/\/ container. Otherwise, create a new inline container.\n match self.children.last() {\n Some(&LayoutBox { box_type: InlineContainer,..}) => {}\n _ => self.children.push(LayoutBox::new(InlineContainer))\n }\n self.children.mut_last().unwrap()\n }\n }\n }\n}\n\nimpl Dimensions {\n \/\/\/ Total height of a box including its margins, border, and padding.\n fn margin_box_height(&self) -> f32 {\n self.height + self.padding.top + self.padding.bottom\n + self.border.top + self.border.bottom\n + self.margin.top + self.margin.bottom\n }\n}\n<|endoftext|>"} {"text":"Buffer stdout<|endoftext|>"} {"text":"Properly resolve nested template names on Windows.<|endoftext|>"} {"text":"added single-threaded triangles\/\/ extern crate mmap;\n\/\/ extern crate time;\n\/\/ extern crate dataflow_join;\n\nextern crate graph_map;\n\nuse std::cmp::Ordering::*;\n\nuse graph_map::GraphMMap;\n\n\/\/ use dataflow_join::graph::{GraphTrait, GraphMMap, gallop};\n\nfn main () {\n if let Some(source) = std::env::args().skip(1).next() {\n println!(\"triangles: {:?}\", raw_triangles(&GraphMMap::new(&source)));\n }\n else {\n println!(\"usage: \");\n }\n}\n\nfn raw_triangles(graph: &GraphMMap) -> u64 {\n\n let mut count = 0;\n for a in 0..graph.nodes() {\n if graph.edges(a).len() > 0 {\n count += 1;\n }\n }\n\n println!(\"count: {}\", count);\n\n let mut count = 0;\n for a in 0..graph.nodes() {\n let aaa = graph.edges(a);\n \/\/ println!(\"degr[{}]: {}\", a, aaa.len());\n for &b in aaa {\n let bbb = graph.edges(b as usize);\n count += if aaa.len() < bbb.len() { intersect(aaa, bbb) }\n else { intersect(bbb, aaa) };\n }\n }\n count\n}\n\nfn intersect(mut aaa: &[u32], mut bbb: &[u32]) -> u64 {\n let mut count = 0;\n \/\/ magic gallop overhead # is 4\n if aaa.len() < bbb.len() \/ 4 {\n for a in aaa {\n bbb = gallop(bbb, a);\n if bbb.len() > 0 && &bbb[0] == a { count += 1; }\n }\n }\n else {\n while aaa.len() > 0 && bbb.len() > 0 {\n match aaa[0].cmp(&bbb[0]) {\n Greater => { bbb = &bbb[1..]; },\n Less => { aaa = &aaa[1..]; },\n Equal => { aaa = &aaa[1..];\n bbb = &bbb[1..];\n count += 1;\n },\n }\n }\n }\n count\n}\n\n\n#[inline(always)]\npub fn gallop<'a, T: Ord>(mut slice: &'a [T], value: &T) -> &'a [T] {\n \/\/ if empty slice, or already >= element, return\n if slice.len() > 0 && &slice[0] < value {\n let mut step = 1;\n while step < slice.len() && &slice[step] < value {\n slice = &slice[step..];\n step = step << 1;\n }\n\n step = step >> 1;\n while step > 0 {\n if step < slice.len() && &slice[step] < value {\n slice = &slice[step..];\n }\n step = step >> 1;\n }\n\n slice = &slice[1..]; \/\/ advance one, as we always stayed < value\n }\n\n return slice;\n}<|endoftext|>"} {"text":"Add speculative TODO to make Port.Socket private.<|endoftext|>"} {"text":"LT should evaluate to True (not just be impl'd)<|endoftext|>"} {"text":"use std::io;\n\nmacro_rules! readln {\n () => {\n {\n let mut line = String::new();\n match io::stdin().read_line(&mut line) {\n Ok(n) => Some(line.trim().to_string()),\n Err(e) => None\n }\n }\n };\n}\n\nfn console_title(title: &str) {\n\n}\n\n#[no_mangle]\npub fn main() {\n console_title(\"Test\");\n\n println!(\"Type help for a command list\");\n while let Some(line) = readln!() {\n let mut args: Vec = Vec::new();\n for arg in line.split(' ') {\n args.push(arg.to_string());\n }\n\n if let Some(command) = args.get(0) {\n println!(\"# {}\", line);\n\n if command == \"panic\" {\n panic!(\"Test panic\");\n } else {\n println!(\"Commands: panic\");\n }\n }\n }\n}\nAdding `ls` command for test.rsuse std::{io, fs};\n\nmacro_rules! readln {\n () => {\n {\n let mut line = String::new();\n match io::stdin().read_line(&mut line) {\n Ok(n) => Some(line.trim().to_string()),\n Err(e) => None\n }\n }\n };\n}\n\nfn console_title(title: &str) {\n\n}\n\n#[no_mangle]\npub fn main() {\n console_title(\"Test\");\n\n println!(\"Type help for a command list\");\n while let Some(line) = readln!() {\n let args: Vec = line.split(' ').map(|arg| arg.to_string()).collect();\n\n if let Some(command) = args.get(0) {\n println!(\"# {}\", line);\n\n match &command[..]\n {\n \"panic\" => panic!(\"Test panic\"),\n \"ls\" => {\n \/\/ TODO: when libredox is completed\n \/\/fs::read_dir(\"\/\").unwrap().map(|dir| println!(\"{}\", dir));\n }\n _ => println!(\"Commands: panic\"),\n }\n }\n }\n}\n<|endoftext|>"} {"text":"Add tests<|endoftext|>"} {"text":"Add problem 136\/\/! [Problem 136](https:\/\/projecteuler.net\/problem=136) solver.\n\/\/!\n\/\/! Using the same algorithm as p135.\n\n#![warn(bad_style,\n unused, unused_extern_crates, unused_import_braces,\n unused_qualifications, unused_results, unused_typecasts)]\n\n#![feature(phase)]\n\n#[phase(plugin, link)] extern crate common;\n\nuse std::iter;\n\nfn num_solutions(limit: uint) -> Vec {\n let mut cnt = Vec::from_elem(limit, 0);\n for q in range(1, limit) {\n let r = (4 - (q % 4)) % 4;\n if q * r >= limit { continue }\n for p in iter::range_step(r, q * 3, 4) {\n let n = q * p;\n if n >= limit { break }\n cnt[n] += 1;\n }\n }\n cnt\n}\n\nfn solve() -> String {\n let limit = 50000000;\n let cnt = 1;\n num_solutions(limit)\n .iter()\n .filter(|&&n| n == cnt)\n .count()\n .to_string()\n}\n\nproblem!(\"2544559\", solve);\n\n#[cfg(test)]\nmod tests {\n #[test]\n fn first_sol() {\n let pos = super::num_solutions(2000)\n .iter()\n .position(|&n| n == 10)\n .unwrap();\n assert_eq!(1155, pos);\n }\n}\n<|endoftext|>"} {"text":"Regex based url matching<|endoftext|>"} {"text":"use std::cell::UnsafeCell;\nuse std::fmt;\nuse std::io::{self, Read};\nuse std::ptr;\nuse std::sync::Arc;\n\npub struct MemBuf {\n buf: Arc>>,\n start: usize,\n end: usize,\n}\n\nimpl MemBuf {\n pub fn new() -> MemBuf {\n MemBuf::with_capacity(0)\n }\n\n pub fn with_capacity(cap: usize) -> MemBuf {\n MemBuf {\n buf: Arc::new(UnsafeCell::new(vec![0; cap])),\n start: 0,\n end: 0,\n }\n }\n\n pub fn bytes(&self) -> &[u8] {\n &self.buf()[self.start..self.end]\n }\n\n pub fn is_empty(&self) -> bool {\n self.len() == 0\n }\n\n pub fn len(&self) -> usize {\n self.end - self.start\n }\n\n pub fn capacity(&self) -> usize {\n self.buf().len()\n }\n\n pub fn read_from(&mut self, io: &mut R) -> io::Result {\n let start = self.end - self.start;\n let n = try!(io.read(&mut self.buf_mut()[start..]));\n self.end += n;\n Ok(n)\n }\n\n pub fn slice(&mut self, len: usize) -> MemSlice {\n assert!(self.end - self.start >= len);\n let start = self.start;\n self.start += len;\n MemSlice {\n buf: self.buf.clone(),\n start: start,\n end: self.start,\n }\n }\n\n pub fn reserve(&mut self, needed: usize) {\n let orig_cap = self.capacity();\n let remaining = orig_cap - self.end;\n if remaining >= needed {\n \/\/ all done\n return\n }\n let is_unique = Arc::get_mut(&mut self.buf).is_some();\n trace!(\"MemBuf::reserve {} access\", if is_unique { \"unique\" } else { \"shared\" });\n if is_unique && remaining + self.start >= needed {\n \/\/ we have unique access, we can mutate this vector\n trace!(\"MemBuf::reserve unique access, shifting\");\n unsafe {\n let mut buf = &mut *self.buf.get();\n let len = self.len();\n ptr::copy(\n buf.as_ptr().offset(self.start as isize),\n buf.as_mut_ptr(),\n len\n );\n self.start = 0;\n self.end = len;\n }\n } else if is_unique {\n \/\/ we have unique access, we can mutate this vector\n trace!(\"MemBuf::reserve unique access, growing\");\n unsafe {\n let mut vec = &mut *self.buf.get();\n grow_zerofill(vec, needed);\n }\n } else {\n \/\/ we need to allocate more space, but dont have unique\n \/\/ access, so we need to make a new buffer\n trace!(\"MemBuf::reserve shared buffer, creating new\");\n let mut new = MemBuf::with_capacity(needed);\n unsafe {\n ptr::copy_nonoverlapping(\n self.bytes().as_ptr(),\n new.buf_mut().as_mut_ptr(),\n self.len()\n );\n }\n new.end = self.len();\n *self = new;\n }\n }\n\n pub fn reset(&mut self) {\n match Arc::get_mut(&mut self.buf) {\n Some(_) => {\n trace!(\"MemBuf::reset was unique, re-using\");\n self.start = 0;\n self.end = 0;\n },\n None => {\n trace!(\"MemBuf::reset not unique, creating new MemBuf\");\n *self = MemBuf::with_capacity(self.buf().len());\n }\n }\n }\n\n fn buf_mut(&mut self) -> &mut [u8] {\n \/\/ The contract here is that we NEVER have a MemSlice that exists\n \/\/ with slice.end > self.start.\n \/\/ In other words, we should *ALWAYS* be the only instance that can\n \/\/ look at the bytes on the right side of self.start.\n unsafe {\n &mut (*self.buf.get())[self.start..]\n }\n }\n\n fn buf(&self) -> &Vec {\n unsafe {\n &*self.buf.get()\n }\n }\n}\n\n#[inline]\nunsafe fn grow_zerofill(buf: &mut Vec, additional: usize) {\n let orig_cap = buf.capacity();\n buf.reserve(additional);\n let new_cap = buf.capacity();\n let reserved = new_cap - orig_cap;\n let orig_len = buf.len();\n zero(buf, orig_len, reserved);\n buf.set_len(orig_len + reserved);\n\n\n unsafe fn zero(buf: &mut Vec, offset: usize, len: usize) {\n assert!(buf.capacity() >= len + offset,\n \"offset of {} with len of {} is bigger than capacity of {}\",\n offset, len, buf.capacity());\n ptr::write_bytes(buf.as_mut_ptr().offset(offset as isize), 0, len);\n }\n}\n\n#[test]\nfn test_grow_zerofill() {\n for init in 0..100 {\n for reserve in (0..100).rev() {\n let mut vec = vec![0; init];\n unsafe { grow_zerofill(&mut vec, reserve) }\n assert_eq!(vec.len(), vec.capacity());\n }\n }\n}\n\nimpl fmt::Debug for MemBuf {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"MemBuf\")\n .field(\"start\", &self.start)\n .field(\"end\", &self.end)\n .field(\"buf\", &&self.buf()[self.start..self.end])\n .finish()\n }\n}\n\npub struct MemSlice {\n buf: Arc>>,\n start: usize,\n end: usize,\n}\n\nimpl MemSlice {\n pub fn empty() -> MemSlice {\n MemSlice {\n buf: Arc::new(UnsafeCell::new(Vec::new())),\n start: 0,\n end: 0,\n }\n }\n}\n\n\n#[cfg(test)]\nimpl ::http::io::MemRead for ::mock::AsyncIo {\n fn read_mem(&mut self, len: usize) -> io::Result {\n let mut v = vec![0; len];\n let n = try!(self.read(v.as_mut_slice()));\n v.truncate(n);\n Ok(MemSlice {\n buf: Arc::new(UnsafeCell::new(v)),\n start: 0,\n end: n,\n })\n }\n}\n\nimpl fmt::Debug for MemSlice {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Debug::fmt(&**self, f)\n }\n}\n\nimpl ::std::ops::Deref for MemSlice {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe {\n &(*self.buf.get())[self.start..self.end]\n }\n }\n}\n\nunsafe impl Send for MemBuf {}\nunsafe impl Send for MemSlice {}\n\n\/*\n#[cfg(test)]\nmod tests {\n use super::{MemBuf};\n\n #[test]\n fn test_\n}\n*\/\nrefactor(http): allow MemSlice to be sliced to make copiesuse std::cell::UnsafeCell;\nuse std::fmt;\nuse std::io::{self, Read};\nuse std::ops::{Deref, Range, RangeFrom, RangeTo, RangeFull};\nuse std::ptr;\nuse std::sync::Arc;\n\npub struct MemBuf {\n buf: Arc>>,\n start: usize,\n end: usize,\n}\n\nimpl MemBuf {\n pub fn new() -> MemBuf {\n MemBuf::with_capacity(0)\n }\n\n pub fn with_capacity(cap: usize) -> MemBuf {\n MemBuf {\n buf: Arc::new(UnsafeCell::new(vec![0; cap])),\n start: 0,\n end: 0,\n }\n }\n\n pub fn bytes(&self) -> &[u8] {\n &self.buf()[self.start..self.end]\n }\n\n pub fn is_empty(&self) -> bool {\n self.len() == 0\n }\n\n pub fn len(&self) -> usize {\n self.end - self.start\n }\n\n pub fn capacity(&self) -> usize {\n self.buf().len()\n }\n\n pub fn read_from(&mut self, io: &mut R) -> io::Result {\n let start = self.end - self.start;\n let n = try!(io.read(&mut self.buf_mut()[start..]));\n self.end += n;\n Ok(n)\n }\n\n pub fn slice(&mut self, len: usize) -> MemSlice {\n assert!(self.end - self.start >= len);\n let start = self.start;\n self.start += len;\n MemSlice {\n buf: self.buf.clone(),\n start: start,\n end: self.start,\n }\n }\n\n pub fn reserve(&mut self, needed: usize) {\n let orig_cap = self.capacity();\n let remaining = orig_cap - self.end;\n if remaining >= needed {\n \/\/ all done\n return\n }\n let is_unique = Arc::get_mut(&mut self.buf).is_some();\n trace!(\"MemBuf::reserve {} access\", if is_unique { \"unique\" } else { \"shared\" });\n if is_unique && remaining + self.start >= needed {\n \/\/ we have unique access, we can mutate this vector\n trace!(\"MemBuf::reserve unique access, shifting\");\n unsafe {\n let mut buf = &mut *self.buf.get();\n let len = self.len();\n ptr::copy(\n buf.as_ptr().offset(self.start as isize),\n buf.as_mut_ptr(),\n len\n );\n self.start = 0;\n self.end = len;\n }\n } else if is_unique {\n \/\/ we have unique access, we can mutate this vector\n trace!(\"MemBuf::reserve unique access, growing\");\n unsafe {\n let mut vec = &mut *self.buf.get();\n grow_zerofill(vec, needed);\n }\n } else {\n \/\/ we need to allocate more space, but dont have unique\n \/\/ access, so we need to make a new buffer\n trace!(\"MemBuf::reserve shared buffer, creating new\");\n let mut new = MemBuf::with_capacity(needed);\n unsafe {\n ptr::copy_nonoverlapping(\n self.bytes().as_ptr(),\n new.buf_mut().as_mut_ptr(),\n self.len()\n );\n }\n new.end = self.len();\n *self = new;\n }\n }\n\n pub fn reset(&mut self) {\n match Arc::get_mut(&mut self.buf) {\n Some(_) => {\n trace!(\"MemBuf::reset was unique, re-using\");\n self.start = 0;\n self.end = 0;\n },\n None => {\n trace!(\"MemBuf::reset not unique, creating new MemBuf\");\n *self = MemBuf::with_capacity(self.buf().len());\n }\n }\n }\n\n fn buf_mut(&mut self) -> &mut [u8] {\n \/\/ The contract here is that we NEVER have a MemSlice that exists\n \/\/ with slice.end > self.start.\n \/\/ In other words, we should *ALWAYS* be the only instance that can\n \/\/ look at the bytes on the right side of self.start.\n unsafe {\n &mut (*self.buf.get())[self.start..]\n }\n }\n\n fn buf(&self) -> &Vec {\n unsafe {\n &*self.buf.get()\n }\n }\n}\n\n#[inline]\nunsafe fn grow_zerofill(buf: &mut Vec, additional: usize) {\n let orig_cap = buf.capacity();\n buf.reserve(additional);\n let new_cap = buf.capacity();\n let reserved = new_cap - orig_cap;\n let orig_len = buf.len();\n zero(buf, orig_len, reserved);\n buf.set_len(orig_len + reserved);\n\n\n unsafe fn zero(buf: &mut Vec, offset: usize, len: usize) {\n assert!(buf.capacity() >= len + offset,\n \"offset of {} with len of {} is bigger than capacity of {}\",\n offset, len, buf.capacity());\n ptr::write_bytes(buf.as_mut_ptr().offset(offset as isize), 0, len);\n }\n}\n\n#[test]\nfn test_grow_zerofill() {\n for init in 0..100 {\n for reserve in (0..100).rev() {\n let mut vec = vec![0; init];\n unsafe { grow_zerofill(&mut vec, reserve) }\n assert_eq!(vec.len(), vec.capacity());\n }\n }\n}\n\nimpl fmt::Debug for MemBuf {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"MemBuf\")\n .field(\"start\", &self.start)\n .field(\"end\", &self.end)\n .field(\"buf\", &&self.buf()[self.start..self.end])\n .finish()\n }\n}\n\npub struct MemSlice {\n buf: Arc>>,\n start: usize,\n end: usize,\n}\n\nimpl MemSlice {\n pub fn empty() -> MemSlice {\n MemSlice {\n buf: Arc::new(UnsafeCell::new(Vec::new())),\n start: 0,\n end: 0,\n }\n }\n\n pub fn slice(&self, range: S) -> MemSlice {\n range.slice(self)\n }\n}\n\n\nimpl fmt::Debug for MemSlice {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Debug::fmt(&**self, f)\n }\n}\n\nimpl Deref for MemSlice {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe {\n &(*self.buf.get())[self.start..self.end]\n }\n }\n}\n\npub trait Slice {\n fn slice(self, subject: &MemSlice) -> MemSlice;\n}\n\n\nimpl Slice for Range {\n fn slice(self, subject: &MemSlice) -> MemSlice {\n assert!(subject.start + self.start <= subject.end);\n assert!(subject.start + self.end <= subject.end);\n MemSlice {\n buf: subject.buf.clone(),\n start: subject.start + self.start,\n end: subject.start + self.end,\n }\n }\n}\n\nimpl Slice for RangeFrom {\n fn slice(self, subject: &MemSlice) -> MemSlice {\n assert!(subject.start + self.start <= subject.end);\n MemSlice {\n buf: subject.buf.clone(),\n start: subject.start + self.start,\n end: subject.end,\n }\n }\n}\n\nimpl Slice for RangeTo {\n fn slice(self, subject: &MemSlice) -> MemSlice {\n assert!(subject.start + self.end <= subject.end);\n MemSlice {\n buf: subject.buf.clone(),\n start: subject.start,\n end: subject.start + self.end,\n }\n }\n}\n\nimpl Slice for RangeFull {\n fn slice(self, subject: &MemSlice) -> MemSlice {\n MemSlice {\n buf: subject.buf.clone(),\n start: subject.start,\n end: subject.end,\n }\n }\n}\n\nunsafe impl Send for MemBuf {}\nunsafe impl Send for MemSlice {}\n\n#[cfg(test)]\nimpl ::http::io::MemRead for ::mock::AsyncIo {\n fn read_mem(&mut self, len: usize) -> io::Result {\n let mut v = vec![0; len];\n let n = try!(self.read(v.as_mut_slice()));\n v.truncate(n);\n Ok(MemSlice {\n buf: Arc::new(UnsafeCell::new(v)),\n start: 0,\n end: n,\n })\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{MemBuf};\n\n #[test]\n fn test_mem_slice_slice() {\n let mut buf = MemBuf::with_capacity(32);\n buf.read_from(&mut &b\"Hello World\"[..]).unwrap();\n\n let len = buf.len();\n let full = buf.slice(len);\n\n assert_eq!(&*full, b\"Hello World\");\n assert_eq!(&*full.slice(6..), b\"World\");\n assert_eq!(&*full.slice(..5), b\"Hello\");\n assert_eq!(&*full.slice(..), b\"Hello World\");\n for a in 0..len {\n for b in a..len {\n assert_eq!(&*full.slice(a..b), &b\"Hello World\"[a..b], \"{}..{}\", a, b);\n }\n }\n }\n}\n<|endoftext|>"} {"text":"Create ppipe_example.rs \n<|endoftext|>"} {"text":"fetch\/artifact: fix md5 hash format<|endoftext|>"} {"text":"Syncfile: move header logic into own function, create syncdir if needed<|endoftext|>"} {"text":"Solved first problem for Day 1<|endoftext|>"} {"text":"Moving elapsed to record time needed to read data<|endoftext|>"} {"text":"#![feature(globs)]\n#![feature(if_let)]\n\nextern crate neovim;\nextern crate rgtk;\nextern crate serialize;\n\n\/\/use neovim::*;\nuse rgtk::*;\nuse std::collections::HashSet;\n\nmod projects;\nmod ui;\nmod utils;\n\nfn main() {\n gtk::init();\n\n \/\/ constants\n\n let width = 1242;\n let height = 768;\n let editor_height = ((height as f32) * 0.8) as i32;\n\n \/\/ create the window\n\n let mut window = gtk::Window::new(gtk::WindowType::TopLevel).unwrap();\n window.set_title(\"SolidOak\");\n window.set_window_position(gtk::WindowPosition::Center);\n window.set_default_size(width, height);\n\n window.connect(gtk::signals::DeleteEvent::new(|_| {\n gtk::main_quit();\n true\n }));\n\n \/\/ create the panes\n\n let new_button = gtk::Button::new_with_label(\"New Project\").unwrap();\n let import_button = gtk::Button::new_with_label(\"Import\").unwrap();\n let rename_button = gtk::Button::new_with_label(\"Rename\").unwrap();\n let remove_button = gtk::Button::new_with_label(\"Remove\").unwrap();\n\n let mut proj_btns = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n proj_btns.set_size_request(-1, -1);\n proj_btns.add(&new_button);\n proj_btns.add(&import_button);\n proj_btns.add(&rename_button);\n proj_btns.add(&remove_button);\n\n let mut proj_tree = gtk::TreeView::new().unwrap();\n let selection = proj_tree.get_selection().unwrap();\n let column_types = [glib::ffi::g_type_string, glib::ffi::g_type_string];\n let store = gtk::TreeStore::new(&column_types).unwrap();\n let model = store.get_model().unwrap();\n proj_tree.set_model(&model);\n proj_tree.set_headers_visible(false);\n\n let mut scroll_pane = gtk::ScrolledWindow::new(None, None).unwrap();\n scroll_pane.add(&proj_tree);\n\n let column = gtk::TreeViewColumn::new().unwrap();\n let cell = gtk::CellRendererText::new().unwrap();\n column.pack_start(&cell, true);\n column.add_attribute(&cell, \"text\", 0);\n proj_tree.append_column(&column);\n\n let mut proj_pane = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n proj_pane.set_size_request(-1, -1);\n proj_pane.pack_start(&proj_btns, false, true, 0);\n proj_pane.pack_start(&scroll_pane, true, true, 0);\n\n let editor_pane = gtk::VteTerminal::new().unwrap();\n editor_pane.set_size_request(-1, editor_height);\n\n let run_button = gtk::Button::new_with_label(\"Run\").unwrap();\n let build_button = gtk::Button::new_with_label(\"Build\").unwrap();\n let test_button = gtk::Button::new_with_label(\"Test\").unwrap();\n let clean_button = gtk::Button::new_with_label(\"Clean\").unwrap();\n let stop_button = gtk::Button::new_with_label(\"Stop\").unwrap();\n\n let mut build_btns = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n build_btns.set_size_request(-1, -1);\n build_btns.add(&run_button);\n build_btns.add(&build_button);\n build_btns.add(&test_button);\n build_btns.add(&clean_button);\n build_btns.add(&stop_button);\n\n let build_term = gtk::VteTerminal::new().unwrap();\n\n let mut build_pane = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n build_pane.pack_start(&build_btns, false, true, 0);\n build_pane.pack_start(&build_term, true, true, 0);\n\n let mut content = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n content.pack_start(&editor_pane, false, true, 0);\n content.pack_start(&build_pane, true, true, 0);\n\n let mut hbox = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n hbox.pack_start(&proj_pane, false, true, 0);\n hbox.pack_start(&content, true, true, 0);\n window.add(&hbox);\n\n \/\/ populate the project tree\n\n let mut state = ::utils::State{\n projects: HashSet::new(),\n expansions: HashSet::new(),\n selection: None,\n tree_model: &model,\n tree_store: &store,\n tree_selection: &selection,\n rename_button: &rename_button,\n remove_button: &remove_button,\n };\n\n ::utils::create_data_dir();\n ::utils::read_prefs(&mut state);\n ::ui::update_project_tree(&mut state, &mut proj_tree);\n\n \/\/ connect to the signals\n\n new_button.connect(gtk::signals::Clicked::new(|| {\n ::projects::new_project(&mut state, &mut proj_tree);\n }));\n import_button.connect(gtk::signals::Clicked::new(|| {\n ::projects::import_project(&mut state, &mut proj_tree);\n }));\n rename_button.connect(gtk::signals::Clicked::new(|| {\n ::projects::rename_file(&mut state);\n }));\n remove_button.connect(gtk::signals::Clicked::new(|| {\n ::projects::remove_item(&mut state);\n }));\n selection.connect(gtk::signals::Changed::new(|| {\n ::projects::update_selection(&mut state);\n }));\n proj_tree.connect(gtk::signals::RowCollapsed::new(|iter_raw, _| {\n let iter = gtk::TreeIter::wrap_pointer(iter_raw);\n ::projects::remove_expansion(&mut state, &iter);\n }));\n proj_tree.connect(gtk::signals::RowExpanded::new(|iter_raw, _| {\n let iter = gtk::TreeIter::wrap_pointer(iter_raw);\n ::projects::add_expansion(&mut state, &iter);\n }));\n\n \/\/ show the window\n\n window.show_all();\n gtk::main();\n}\nMake build pane larger#![feature(globs)]\n#![feature(if_let)]\n\nextern crate neovim;\nextern crate rgtk;\nextern crate serialize;\n\n\/\/use neovim::*;\nuse rgtk::*;\nuse std::collections::HashSet;\n\nmod projects;\nmod ui;\nmod utils;\n\nfn main() {\n gtk::init();\n\n \/\/ constants\n\n let width = 1242;\n let height = 768;\n let editor_height = ((height as f32) * 0.75) as i32;\n\n \/\/ create the window\n\n let mut window = gtk::Window::new(gtk::WindowType::TopLevel).unwrap();\n window.set_title(\"SolidOak\");\n window.set_window_position(gtk::WindowPosition::Center);\n window.set_default_size(width, height);\n\n window.connect(gtk::signals::DeleteEvent::new(|_| {\n gtk::main_quit();\n true\n }));\n\n \/\/ create the panes\n\n let new_button = gtk::Button::new_with_label(\"New Project\").unwrap();\n let import_button = gtk::Button::new_with_label(\"Import\").unwrap();\n let rename_button = gtk::Button::new_with_label(\"Rename\").unwrap();\n let remove_button = gtk::Button::new_with_label(\"Remove\").unwrap();\n\n let mut proj_btns = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n proj_btns.set_size_request(-1, -1);\n proj_btns.add(&new_button);\n proj_btns.add(&import_button);\n proj_btns.add(&rename_button);\n proj_btns.add(&remove_button);\n\n let mut proj_tree = gtk::TreeView::new().unwrap();\n let selection = proj_tree.get_selection().unwrap();\n let column_types = [glib::ffi::g_type_string, glib::ffi::g_type_string];\n let store = gtk::TreeStore::new(&column_types).unwrap();\n let model = store.get_model().unwrap();\n proj_tree.set_model(&model);\n proj_tree.set_headers_visible(false);\n\n let mut scroll_pane = gtk::ScrolledWindow::new(None, None).unwrap();\n scroll_pane.add(&proj_tree);\n\n let column = gtk::TreeViewColumn::new().unwrap();\n let cell = gtk::CellRendererText::new().unwrap();\n column.pack_start(&cell, true);\n column.add_attribute(&cell, \"text\", 0);\n proj_tree.append_column(&column);\n\n let mut proj_pane = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n proj_pane.set_size_request(-1, -1);\n proj_pane.pack_start(&proj_btns, false, true, 0);\n proj_pane.pack_start(&scroll_pane, true, true, 0);\n\n let editor_pane = gtk::VteTerminal::new().unwrap();\n editor_pane.set_size_request(-1, editor_height);\n\n let run_button = gtk::Button::new_with_label(\"Run\").unwrap();\n let build_button = gtk::Button::new_with_label(\"Build\").unwrap();\n let test_button = gtk::Button::new_with_label(\"Test\").unwrap();\n let clean_button = gtk::Button::new_with_label(\"Clean\").unwrap();\n let stop_button = gtk::Button::new_with_label(\"Stop\").unwrap();\n\n let mut build_btns = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n build_btns.set_size_request(-1, -1);\n build_btns.add(&run_button);\n build_btns.add(&build_button);\n build_btns.add(&test_button);\n build_btns.add(&clean_button);\n build_btns.add(&stop_button);\n\n let build_term = gtk::VteTerminal::new().unwrap();\n\n let mut build_pane = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n build_pane.pack_start(&build_btns, false, true, 0);\n build_pane.pack_start(&build_term, true, true, 0);\n\n let mut content = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n content.pack_start(&editor_pane, false, true, 0);\n content.pack_start(&build_pane, true, true, 0);\n\n let mut hbox = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n hbox.pack_start(&proj_pane, false, true, 0);\n hbox.pack_start(&content, true, true, 0);\n window.add(&hbox);\n\n \/\/ populate the project tree\n\n let mut state = ::utils::State{\n projects: HashSet::new(),\n expansions: HashSet::new(),\n selection: None,\n tree_model: &model,\n tree_store: &store,\n tree_selection: &selection,\n rename_button: &rename_button,\n remove_button: &remove_button,\n };\n\n ::utils::create_data_dir();\n ::utils::read_prefs(&mut state);\n ::ui::update_project_tree(&mut state, &mut proj_tree);\n\n \/\/ connect to the signals\n\n new_button.connect(gtk::signals::Clicked::new(|| {\n ::projects::new_project(&mut state, &mut proj_tree);\n }));\n import_button.connect(gtk::signals::Clicked::new(|| {\n ::projects::import_project(&mut state, &mut proj_tree);\n }));\n rename_button.connect(gtk::signals::Clicked::new(|| {\n ::projects::rename_file(&mut state);\n }));\n remove_button.connect(gtk::signals::Clicked::new(|| {\n ::projects::remove_item(&mut state);\n }));\n selection.connect(gtk::signals::Changed::new(|| {\n ::projects::update_selection(&mut state);\n }));\n proj_tree.connect(gtk::signals::RowCollapsed::new(|iter_raw, _| {\n let iter = gtk::TreeIter::wrap_pointer(iter_raw);\n ::projects::remove_expansion(&mut state, &iter);\n }));\n proj_tree.connect(gtk::signals::RowExpanded::new(|iter_raw, _| {\n let iter = gtk::TreeIter::wrap_pointer(iter_raw);\n ::projects::add_expansion(&mut state, &iter);\n }));\n\n \/\/ show the window\n\n window.show_all();\n gtk::main();\n}\n<|endoftext|>"} {"text":"\/\/ This file is part of rgtk.\n\/\/\n\/\/ rgtk is free software: you can redistribute it and\/or modify\n\/\/ it under the terms of the GNU Lesser General Public License as published by\n\/\/ the Free Software Foundation, either version 3 of the License, or\n\/\/ (at your option) any later version.\n\/\/\n\/\/ rgtk is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\/\/ GNU Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public License\n\/\/ along with rgtk. If not, see .\n\n\/*!\n\nBindings and wrappers for __GTK+__, __GLib__ and __Cairo__.\n\nThe various parts of __rgtk__ can be found in the submodules __gtk__, __gdk__, __glib__ and __cairo__.\n\nTrait reexports\n===============\n\nFor you're conveniance the various traits of `rgtk` are reexported in the `rgtk::*`\nnamespace as `{Gtk\/Gdk\/Glib\/Cairo}{trait_name}Trait` so you can just use...\n\n```Rust\nextern mod rgtk;\nuse rgtk::*;\n```\n\n...to easily access all the traits methods:\n\n```Rust\nlet button = gtk::Button:new(); \/\/ trait gtk::traits::Button reexported as GtkButtonTrait,\n \/\/ it's trait methods can be accessed here.\n```\n*\/\n\n#![crate_name = \"rgtk\"]\n#![crate_type = \"lib\"]\n#![crate_type = \"rlib\"]\n#![feature(macro_rules)]\n#![allow(dead_code)] \/\/ TODO: drop this\n#![feature(unsafe_destructor)]\n\nextern crate libc;\n\npub use gtk::traits::Widget as GtkWidgetTrait;\npub use gtk::traits::Container as GtkContainerTrait;\npub use gtk::traits::Window as GtkWindowTrait;\npub use gtk::traits::Misc as GtkMiscTrait;\npub use gtk::traits::Button as GtkButtonTrait;\npub use gtk::traits::Label as GtkLabelTrait;\npub use gtk::traits::Box as GtkBoxTrait;\npub use gtk::traits::Orientable as GtkOrientableTrait;\npub use gtk::traits::Frame as GtkFrameTrait;\npub use gtk::traits::ToggleButton as GtkToggleButtonTrait;\npub use gtk::traits::ScaleButton as GtkScaleButtonTrait;\npub use gtk::traits::Entry as GtkEntryTrait;\npub use gtk::traits::Bin as GtkBinTrait;\npub use gtk::traits::ToolShell as GtkToolShellTrait;\npub use gtk::traits::ToolItem as GtkToolItemTrait;\npub use gtk::traits::ToolButton as GtkToolButtonTrait;\npub use gtk::traits::ToggleToolButton as GtkToggleToolButtonTrait;\npub use gtk::traits::Dialog as GtkDialogTrait;\npub use gtk::traits::ColorChooser as GtkColorChooserTrait;\npub use gtk::traits::Scrollable as GtkScrollableTrait;\npub use gtk::traits::FileChooser as GtkFileChooserTrait;\npub use gtk::traits::FontChooser as GtkFontChooserTrait;\npub use gtk::traits::AppChooser as GtkAppChooserTrait;\npub use gtk::traits::Range as GtkRangeTrait;\npub use gtk::traits::Editable as GtkEditableTrait;\npub use gtk::traits::MenuShell as GtkMenuShellTrait;\npub use gtk::traits::MenuItem as GtkMenuItemTrait;\npub use gtk::traits::CheckMenuItem as GtkCheckMenuItemTrait;\npub use gtk::traits::CellEditable as GtkCellEditable;\n\n#[doc(hidden)]\n#[cfg(target_os=\"macos\")]\nmod platform {\n #[link(name = \"glib-2.0\")]\n #[link(name = \"gtk-3.0\")]\n #[link(name = \"gio-2.0\")]\n #[link(name = \"gobject-2.0\")]\n #[link(name = \"gdk-3.0\")]\n #[link(name = \"rgtk_glue\", kind = \"static\")]\n extern{}\n}\n\n#[doc(hidden)]\n#[cfg(target_os=\"linux\")]\nmod platform {\n #[link(name = \"glib-2.0\")]\n #[link(name = \"gtk-3\")]\n #[link(name = \"gio-2.0\")]\n #[link(name = \"gobject-2.0\")]\n #[link(name = \"gdk-3\")]\n #[link(name = \"rgtk_glue\", kind = \"static\")]\n extern{}\n}\n\n#[doc(hidden)]\n#[cfg(target_os=\"windows\")]\nmod platform {\n #[link(name = \"glib-2.0\")]\n #[link(name = \"gtk-3\")]\n #[link(name = \"gio-2.0\")]\n #[link(name = \"gobject-2.0\")]\n #[link(name = \"gdk-3\")]\n #[link(name = \"rgtk_glue\", kind = \"static\")]\n extern{}\n}\n\npub mod gtk;\npub mod cairo;\npub mod gdk;\npub mod glib;\nReexport CellRenderer trait\/\/ This file is part of rgtk.\n\/\/\n\/\/ rgtk is free software: you can redistribute it and\/or modify\n\/\/ it under the terms of the GNU Lesser General Public License as published by\n\/\/ the Free Software Foundation, either version 3 of the License, or\n\/\/ (at your option) any later version.\n\/\/\n\/\/ rgtk is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\/\/ GNU Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public License\n\/\/ along with rgtk. If not, see .\n\n\/*!\n\nBindings and wrappers for __GTK+__, __GLib__ and __Cairo__.\n\nThe various parts of __rgtk__ can be found in the submodules __gtk__, __gdk__, __glib__ and __cairo__.\n\nTrait reexports\n===============\n\nFor you're conveniance the various traits of `rgtk` are reexported in the `rgtk::*`\nnamespace as `{Gtk\/Gdk\/Glib\/Cairo}{trait_name}Trait` so you can just use...\n\n```Rust\nextern mod rgtk;\nuse rgtk::*;\n```\n\n...to easily access all the traits methods:\n\n```Rust\nlet button = gtk::Button:new(); \/\/ trait gtk::traits::Button reexported as GtkButtonTrait,\n \/\/ it's trait methods can be accessed here.\n```\n*\/\n\n#![crate_name = \"rgtk\"]\n#![crate_type = \"lib\"]\n#![crate_type = \"rlib\"]\n#![feature(macro_rules)]\n#![allow(dead_code)] \/\/ TODO: drop this\n#![feature(unsafe_destructor)]\n\nextern crate libc;\n\npub use gtk::traits::Widget as GtkWidgetTrait;\npub use gtk::traits::Container as GtkContainerTrait;\npub use gtk::traits::Window as GtkWindowTrait;\npub use gtk::traits::Misc as GtkMiscTrait;\npub use gtk::traits::Button as GtkButtonTrait;\npub use gtk::traits::Label as GtkLabelTrait;\npub use gtk::traits::Box as GtkBoxTrait;\npub use gtk::traits::Orientable as GtkOrientableTrait;\npub use gtk::traits::Frame as GtkFrameTrait;\npub use gtk::traits::ToggleButton as GtkToggleButtonTrait;\npub use gtk::traits::ScaleButton as GtkScaleButtonTrait;\npub use gtk::traits::Entry as GtkEntryTrait;\npub use gtk::traits::Bin as GtkBinTrait;\npub use gtk::traits::ToolShell as GtkToolShellTrait;\npub use gtk::traits::ToolItem as GtkToolItemTrait;\npub use gtk::traits::ToolButton as GtkToolButtonTrait;\npub use gtk::traits::ToggleToolButton as GtkToggleToolButtonTrait;\npub use gtk::traits::Dialog as GtkDialogTrait;\npub use gtk::traits::ColorChooser as GtkColorChooserTrait;\npub use gtk::traits::Scrollable as GtkScrollableTrait;\npub use gtk::traits::FileChooser as GtkFileChooserTrait;\npub use gtk::traits::FontChooser as GtkFontChooserTrait;\npub use gtk::traits::AppChooser as GtkAppChooserTrait;\npub use gtk::traits::Range as GtkRangeTrait;\npub use gtk::traits::Editable as GtkEditableTrait;\npub use gtk::traits::MenuShell as GtkMenuShellTrait;\npub use gtk::traits::MenuItem as GtkMenuItemTrait;\npub use gtk::traits::CheckMenuItem as GtkCheckMenuItemTrait;\npub use gtk::traits::CellEditable as GtkCellEditable;\npub use gtk::traits::CellRenderer as GtkCellRenderer;\n\n#[doc(hidden)]\n#[cfg(target_os=\"macos\")]\nmod platform {\n #[link(name = \"glib-2.0\")]\n #[link(name = \"gtk-3.0\")]\n #[link(name = \"gio-2.0\")]\n #[link(name = \"gobject-2.0\")]\n #[link(name = \"gdk-3.0\")]\n #[link(name = \"rgtk_glue\", kind = \"static\")]\n extern{}\n}\n\n#[doc(hidden)]\n#[cfg(target_os=\"linux\")]\nmod platform {\n #[link(name = \"glib-2.0\")]\n #[link(name = \"gtk-3\")]\n #[link(name = \"gio-2.0\")]\n #[link(name = \"gobject-2.0\")]\n #[link(name = \"gdk-3\")]\n #[link(name = \"rgtk_glue\", kind = \"static\")]\n extern{}\n}\n\n#[doc(hidden)]\n#[cfg(target_os=\"windows\")]\nmod platform {\n #[link(name = \"glib-2.0\")]\n #[link(name = \"gtk-3\")]\n #[link(name = \"gio-2.0\")]\n #[link(name = \"gobject-2.0\")]\n #[link(name = \"gdk-3\")]\n #[link(name = \"rgtk_glue\", kind = \"static\")]\n extern{}\n}\n\npub mod gtk;\npub mod cairo;\npub mod gdk;\npub mod glib;\n<|endoftext|>"} {"text":"Spiff up status message on send.<|endoftext|>"} {"text":"Add mutable access to cells in canvas<|endoftext|>"} {"text":"use std::iter::repeat;\n\nuse ansi_term::Style;\n\nuse options::{SizeFormat, TimeType};\n\n#[derive(PartialEq, Debug, Copy)]\npub enum Column {\n Permissions,\n FileSize(SizeFormat),\n Timestamp(TimeType, i64),\n Blocks,\n User,\n Group,\n HardLinks,\n Inode,\n\n GitStatus,\n}\n\n\/\/\/ Each column can pick its own **Alignment**. Usually, numbers are\n\/\/\/ right-aligned, and text is left-aligned.\n#[derive(Copy)]\npub enum Alignment {\n Left, Right,\n}\n\nimpl Column {\n\n \/\/\/ Get the alignment this column should use.\n pub fn alignment(&self) -> Alignment {\n match *self {\n Column::FileSize(_) => Alignment::Right,\n Column::HardLinks => Alignment::Right,\n Column::Inode => Alignment::Right,\n Column::Blocks => Alignment::Right,\n Column::GitStatus => Alignment::Right,\n _ => Alignment::Left,\n }\n }\n\n \/\/\/ Get the text that should be printed at the top, when the user elects\n \/\/\/ to have a header row printed.\n pub fn header(&self) -> &'static str {\n match *self {\n Column::Permissions => \"Permissions\",\n Column::FileSize(_) => \"Size\",\n Column::Timestamp(t, _) => t.header(),\n Column::Blocks => \"Blocks\",\n Column::User => \"User\",\n Column::Group => \"Group\",\n Column::HardLinks => \"Links\",\n Column::Inode => \"inode\",\n Column::GitStatus => \"Git\",\n }\n }\n}\n\n\/\/\/ Pad a string with the given number of spaces.\nfn spaces(length: usize) -> String {\n repeat(\" \").take(length).collect()\n}\n\nimpl Alignment {\n \/\/\/ Pad a string with the given alignment and number of spaces.\n \/\/\/\n \/\/\/ This doesn't take the width the string *should* be, rather the number\n \/\/\/ of spaces to add: this is because the strings are usually full of\n \/\/\/ invisible control characters, so getting the displayed width of the\n \/\/\/ string is not as simple as just getting its length.\n pub fn pad_string(&self, string: &String, padding: usize) -> String {\n match *self {\n Alignment::Left => format!(\"{}{}\", string, spaces(padding).as_slice()),\n Alignment::Right => format!(\"{}{}\", spaces(padding), string.as_slice()),\n }\n }\n}\n\n#[derive(PartialEq, Debug)]\npub struct Cell {\n pub length: usize,\n pub text: String,\n}\n\nimpl Cell {\n pub fn paint(style: Style, string: &str) -> Cell {\n Cell {\n text: style.paint(string).to_string(),\n length: string.len(),\n }\n }\n}\nReplace &String with &struse std::iter::repeat;\n\nuse ansi_term::Style;\n\nuse options::{SizeFormat, TimeType};\n\n#[derive(PartialEq, Debug, Copy)]\npub enum Column {\n Permissions,\n FileSize(SizeFormat),\n Timestamp(TimeType, i64),\n Blocks,\n User,\n Group,\n HardLinks,\n Inode,\n\n GitStatus,\n}\n\n\/\/\/ Each column can pick its own **Alignment**. Usually, numbers are\n\/\/\/ right-aligned, and text is left-aligned.\n#[derive(Copy)]\npub enum Alignment {\n Left, Right,\n}\n\nimpl Column {\n\n \/\/\/ Get the alignment this column should use.\n pub fn alignment(&self) -> Alignment {\n match *self {\n Column::FileSize(_) => Alignment::Right,\n Column::HardLinks => Alignment::Right,\n Column::Inode => Alignment::Right,\n Column::Blocks => Alignment::Right,\n Column::GitStatus => Alignment::Right,\n _ => Alignment::Left,\n }\n }\n\n \/\/\/ Get the text that should be printed at the top, when the user elects\n \/\/\/ to have a header row printed.\n pub fn header(&self) -> &'static str {\n match *self {\n Column::Permissions => \"Permissions\",\n Column::FileSize(_) => \"Size\",\n Column::Timestamp(t, _) => t.header(),\n Column::Blocks => \"Blocks\",\n Column::User => \"User\",\n Column::Group => \"Group\",\n Column::HardLinks => \"Links\",\n Column::Inode => \"inode\",\n Column::GitStatus => \"Git\",\n }\n }\n}\n\n\/\/\/ Pad a string with the given number of spaces.\nfn spaces(length: usize) -> String {\n repeat(\" \").take(length).collect()\n}\n\nimpl Alignment {\n \/\/\/ Pad a string with the given alignment and number of spaces.\n \/\/\/\n \/\/\/ This doesn't take the width the string *should* be, rather the number\n \/\/\/ of spaces to add: this is because the strings are usually full of\n \/\/\/ invisible control characters, so getting the displayed width of the\n \/\/\/ string is not as simple as just getting its length.\n pub fn pad_string(&self, string: &str, padding: usize) -> String {\n match *self {\n Alignment::Left => format!(\"{}{}\", string, spaces(padding).as_slice()),\n Alignment::Right => format!(\"{}{}\", spaces(padding), string.as_slice()),\n }\n }\n}\n\n#[derive(PartialEq, Debug)]\npub struct Cell {\n pub length: usize,\n pub text: String,\n}\n\nimpl Cell {\n pub fn paint(style: Style, string: &str) -> Cell {\n Cell {\n text: style.paint(string).to_string(),\n length: string.len(),\n }\n }\n}\n<|endoftext|>"} {"text":"Generate `AmbigGt` only when '>' followed by '>' or '='. Fix.<|endoftext|>"} {"text":"Iterator::to_owned_vec is no more.<|endoftext|>"} {"text":"Updated documentation<|endoftext|>"} {"text":"Make search_dbpage's argument to be SearchCond enum, dispatch to different subroutines based on it's a key or command<|endoftext|>"} {"text":"Add more tests and fix a bug in from_code.<|endoftext|>"} {"text":"Switch from compare to is_less<|endoftext|>"} {"text":"Allow using VAR= and setting the value to empty string<|endoftext|>"} {"text":"Add missing details; print code along with result<|endoftext|>"} {"text":"Remove Singleton<|endoftext|>"} {"text":"rub updates<|endoftext|>"} {"text":"Don't fail horribly on empty strings and newlines<|endoftext|>"} {"text":"Add re-exporting library part to ilc, for easier reuseextern crate ilc_base;\nextern crate ilc_ops;\nextern crate ilc_cli;\n\nextern crate ilc_format_weechat;\nextern crate ilc_format_energymech;\n\npub use ilc_base::{Context, Decode, Encode, Event, context, dummy, error, event, format};\npub use ilc_ops::{convert, dedup, freq, parse, seen, sort};\n\npub use ilc_cli::{decoder, encoder, force_decoder, force_encoder, open_files};\n\npub use convert::convert;\npub use dedup::dedup;\npub use freq::freq;\npub use parse::parse;\npub use seen::seen;\npub use sort::sort;\n<|endoftext|>"} {"text":"Feature add serde_yaml output frameworkuse std::collections::BTreeMap;\nuse rusqlite::Connection;\nuse dbVariableName;\nuse dbJob;\nuse dbFsFile;\nuse serde_yaml;\nuse std::collections::HashSet;\n\n\nfn job_provide_list_by_job(conn: &Connection) -> Vec {\n let mut items = Vec::::new();\n let provide_list_prep_rc = conn.prepare(\n \"SELECT DISTINCT\n VARIABLE_NAME.id,\n VARIABLE_NAME.name\n FROM JOBPROVIDE , VARIABLE_NAME\n WHERE\n VARIABLE_NAME.id = JOBPROVIDE.fk_variable\n order by VARIABLE_NAME.name\",\n );\n if provide_list_prep_rc.is_err() {\n error!(\"SQL issue in job_provide_list\");\n return items;\n }\n let mut provide_list_prep = provide_list_prep_rc.unwrap();\n\n let result_row = provide_list_prep.query(&[]);\n let mut rox = result_row.unwrap();\n\n while let Some(row_query) = rox.next() {\n let row = row_query.unwrap();\n let bill = dbVariableName::VariableName {\n id: row.get(0),\n name: row.get(1),\n };\n debug!(\"bill:{}\", bill.id);\n items.push(bill);\n }\n return items;\n}\n\n\n\nfn job_depend_list_by_job(conn: &Connection) -> Vec {\n let mut items = Vec::::new();\n let provide_list_prep_rc = conn.prepare(\n \"SELECT DISTINCT\n VARIABLE_NAME.id,\n VARIABLE_NAME.name\n FROM JOBDEPEND , VARIABLE_NAME\n WHERE\n VARIABLE_NAME.id = JOBDEPEND.fk_variable\n order by VARIABLE_NAME.name\",\n );\n if provide_list_prep_rc.is_err() {\n error!(\"SQL issue in job_provide_list\");\n return items;\n }\n let mut provide_list_prep = provide_list_prep_rc.unwrap();\n\n let result_row = provide_list_prep.query(&[]);\n let mut rox = result_row.unwrap();\n\n while let Some(row_query) = rox.next() {\n let row = row_query.unwrap();\n let bill = dbVariableName::VariableName {\n id: row.get(0),\n name: row.get(1),\n };\n debug!(\"bill:{}\", bill.id);\n items.push(bill);\n }\n return items;\n}\n\n\nfn job_targets_list_by_job(conn: &Connection) -> Vec {\n let mut items = Vec::::new();\n\n let mut hs_depend: HashSet = HashSet::new();\n let list_dep = job_depend_list_by_job(&conn);\n for item in list_dep {\n hs_depend.insert(item.id);\n }\n for item in job_provide_list_by_job(&conn) {\n let foo = hs_depend.contains(&item.id);\n if foo == false {\n items.push(item)\n }\n }\n return items;\n}\n\nfn list_job(conn: &Connection, fk_variable_name: &i32) -> Vec {\n let mut items = Vec::::new();\n let provide_list_prep_rc = conn.prepare(\n \"SELECT DISTINCT\n JOB.id,\n JOB.name,\n JOB.fk_file\n FROM JOB, JOBPROVIDE , VARIABLE_NAME\n WHERE\n JOB.id = JOBPROVIDE.fk_job\n AND\n VARIABLE_NAME.id = JOBPROVIDE.fk_variable\n AND\n VARIABLE_NAME.id = ?1\n order by JOB.name\",\n );\n if provide_list_prep_rc.is_err() {\n error!(\"SQL issue in job_provide_list\");\n return items;\n }\n let mut provide_list_prep = provide_list_prep_rc.unwrap();\n\n let result_row = provide_list_prep.query(&[fk_variable_name]);\n let mut rox = result_row.unwrap();\n\n while let Some(row_query) = rox.next() {\n let row = row_query.unwrap();\n let bill = dbJob::Job {\n id: row.get(0),\n name: row.get(1),\n fk_file: row.get(2),\n };\n debug!(\"bill:{}\", bill.id);\n items.push(bill);\n }\n return items;\n}\n\n\nfn list_file(conn: &Connection, fk_variable_name: &i32) -> Vec {\n let mut items = Vec::::new();\n let provide_list_prep_rc = conn.prepare(\n \"SELECT DISTINCT\n FS_FILE.id,\n FS_FILE.fk_fs_dir,\n FS_FILE.name\n FROM\n FS_FILE\n WHERE\n FS_FILE.id = ?1\n order by FS_FILE.name\",\n );\n if provide_list_prep_rc.is_err() {\n error!(\"SQL issue in job_provide_list\");\n return items;\n }\n let mut provide_list_prep = provide_list_prep_rc.unwrap();\n\n let result_row = provide_list_prep.query(&[fk_variable_name]);\n let mut rox = result_row.unwrap();\n\n while let Some(row_query) = rox.next() {\n let row = row_query.unwrap();\n let bill = dbFsFile::FsFile {\n id: row.get(0),\n fk_fs_dir: row.get(1),\n name: row.get(2),\n };\n debug!(\"bill:{}\", bill.id);\n items.push(bill);\n }\n return items;\n}\n\n\n\nfn shared_foo(conn: &Connection, variables: &Vec) -> String {\n let mut map = BTreeMap::new();\n for var_name in variables {\n let mut v_new = Vec::>::new();\n let doop = list_job(&conn, &var_name.id);\n for item in doop {\n let mut map2 = BTreeMap::new();\n map2.insert(\"name\", item.name.to_string());\n let woop = list_file(&conn, &item.fk_file);\n for op in woop {\n map2.insert(\"file_name\", op.name.to_string());\n }\n v_new.push(map2);\n }\n map.insert(var_name.name.to_string(), v_new);\n }\n return serde_yaml::to_string(&map).unwrap();\n}\n\npub fn process_list_provides(conn: &Connection) {\n let henry = job_provide_list_by_job(&conn);\n println!(\"{}\", shared_foo(&conn, &henry));\n}\n\n\npub fn process_list_targets(conn: &Connection) {\n let henry = job_targets_list_by_job(&conn);\n println!(\"{}\", shared_foo(&conn, &henry));\n}\n<|endoftext|>"} {"text":"Add accepts function.<|endoftext|>"} {"text":"Auto merge of #270 - frewsxcv:doc-examples, r=Hoverbear<|endoftext|>"} {"text":"fix warnings<|endoftext|>"} {"text":"Add an IO variant to LibError that encompasses std::io::Error<|endoftext|>"} {"text":"\/\/! A collection of traits abstracting over Listeners and Streams.\nuse std::any::{Any, TypeId};\nuse std::fmt;\nuse std::old_io::{IoResult, IoError, ConnectionAborted, InvalidInput, OtherIoError,\n Stream, Listener, Acceptor};\nuse std::old_io::net::ip::{SocketAddr, ToSocketAddr, Port};\nuse std::old_io::net::tcp::{TcpStream, TcpListener, TcpAcceptor};\nuse std::mem;\nuse std::raw::{self, TraitObject};\nuse std::sync::Arc;\n\nuse uany::UnsafeAnyExt;\nuse openssl::ssl::{Ssl, SslStream, SslContext};\nuse openssl::ssl::SslVerifyMode::SslVerifyNone;\nuse openssl::ssl::SslMethod::Sslv23;\nuse openssl::ssl::error::{SslError, StreamError, OpenSslErrors, SslSessionClosed};\nuse openssl::x509::X509FileType;\n\nmacro_rules! try_some {\n ($expr:expr) => (match $expr {\n Some(val) => { return Err(val); },\n _ => {}\n })\n}\n\n\/\/\/ The write-status indicating headers have not been written.\n#[allow(missing_copy_implementations)]\npub struct Fresh;\n\n\/\/\/ The write-status indicating headers have been written.\n#[allow(missing_copy_implementations)]\npub struct Streaming;\n\n\/\/\/ An abstraction to listen for connections on a certain port.\npub trait NetworkListener {\n type Acceptor: NetworkAcceptor;\n \/\/\/ Listens on a socket.\n fn listen(&mut self, addr: To) -> IoResult;\n}\n\n\/\/\/ An abstraction to receive `NetworkStream`s.\npub trait NetworkAcceptor: Clone + Send {\n type Stream: NetworkStream + Send + Clone;\n\n \/\/\/ Returns an iterator of streams.\n fn accept(&mut self) -> IoResult;\n\n \/\/\/ Get the address this Listener ended up listening on.\n fn socket_name(&self) -> IoResult;\n\n \/\/\/ Closes the Acceptor, so no more incoming connections will be handled.\n fn close(&mut self) -> IoResult<()>;\n\n \/\/\/ Returns an iterator over incoming connections.\n fn incoming(&mut self) -> NetworkConnections {\n NetworkConnections(self)\n }\n}\n\n\/\/\/ An iterator wrapper over a NetworkAcceptor.\npub struct NetworkConnections<'a, N: NetworkAcceptor>(&'a mut N);\n\nimpl<'a, N: NetworkAcceptor> Iterator for NetworkConnections<'a, N> {\n type Item = IoResult;\n fn next(&mut self) -> Option> {\n Some(self.0.accept())\n }\n}\n\n\n\/\/\/ An abstraction over streams that a Server can utilize.\npub trait NetworkStream: Stream + Any + StreamClone + Send {\n \/\/\/ Get the remote address of the underlying connection.\n fn peer_name(&mut self) -> IoResult;\n}\n\n\n#[doc(hidden)]\npub trait StreamClone {\n fn clone_box(&self) -> Box;\n}\n\nimpl StreamClone for T {\n #[inline]\n fn clone_box(&self) -> Box {\n Box::new(self.clone())\n }\n}\n\n\/\/\/ A connector creates a NetworkStream.\npub trait NetworkConnector {\n type Stream: NetworkStream + Send;\n \/\/\/ Connect to a remote address.\n fn connect(&mut self, host: &str, port: Port, scheme: &str) -> IoResult;\n}\n\nimpl fmt::Debug for Box {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt.pad(\"Box\")\n }\n}\n\nimpl Clone for Box {\n #[inline]\n fn clone(&self) -> Box { self.clone_box() }\n}\n\nimpl Reader for Box {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult { (**self).read(buf) }\n}\n\nimpl Writer for Box {\n #[inline]\n fn write_all(&mut self, msg: &[u8]) -> IoResult<()> { (**self).write_all(msg) }\n\n #[inline]\n fn flush(&mut self) -> IoResult<()> { (**self).flush() }\n}\n\nimpl<'a> Reader for &'a mut NetworkStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult { (**self).read(buf) }\n}\n\nimpl<'a> Writer for &'a mut NetworkStream {\n #[inline]\n fn write_all(&mut self, msg: &[u8]) -> IoResult<()> { (**self).write_all(msg) }\n\n #[inline]\n fn flush(&mut self) -> IoResult<()> { (**self).flush() }\n}\n\nimpl UnsafeAnyExt for NetworkStream {\n unsafe fn downcast_ref_unchecked(&self) -> &T {\n mem::transmute(mem::transmute::<&NetworkStream,\n raw::TraitObject>(self).data)\n }\n\n unsafe fn downcast_mut_unchecked(&mut self) -> &mut T {\n mem::transmute(mem::transmute::<&mut NetworkStream,\n raw::TraitObject>(self).data)\n }\n\n unsafe fn downcast_unchecked(self: Box) -> Box {\n mem::transmute(mem::transmute::,\n raw::TraitObject>(self).data)\n }\n}\n\nimpl NetworkStream {\n \/\/\/ Is the underlying type in this trait object a T?\n #[inline]\n pub fn is(&self) -> bool {\n self.get_type_id() == TypeId::of::()\n }\n\n \/\/\/ If the underlying type is T, get a reference to the contained data.\n #[inline]\n pub fn downcast_ref(&self) -> Option<&T> {\n if self.is::() {\n Some(unsafe { self.downcast_ref_unchecked() })\n } else {\n None\n }\n }\n\n \/\/\/ If the underlying type is T, get a mutable reference to the contained\n \/\/\/ data.\n #[inline]\n pub fn downcast_mut(&mut self) -> Option<&mut T> {\n if self.is::() {\n Some(unsafe { self.downcast_mut_unchecked() })\n } else {\n None\n }\n }\n\n \/\/\/ If the underlying type is T, extract it.\n pub fn downcast(self: Box)\n -> Result, Box> {\n if self.is::() {\n Ok(unsafe { self.downcast_unchecked() })\n } else {\n Err(self)\n }\n }\n}\n\n\/\/\/ A `NetworkListener` for `HttpStream`s.\n#[allow(missing_copy_implementations)]\npub enum HttpListener {\n \/\/\/ Http variant.\n Http,\n \/\/\/ Https variant. The two paths point to the certificate and key PEM files, in that order.\n Https(Path, Path),\n}\n\nimpl NetworkListener for HttpListener {\n type Acceptor = HttpAcceptor;\n\n #[inline]\n fn listen(&mut self, addr: To) -> IoResult {\n let mut tcp = try!(TcpListener::bind(addr));\n let addr = try!(tcp.socket_name());\n Ok(match *self {\n HttpListener::Http => HttpAcceptor::Http(try!(tcp.listen()), addr),\n HttpListener::Https(ref cert, ref key) => {\n let mut ssl_context = try!(SslContext::new(Sslv23).map_err(lift_ssl_error));\n try_some!(ssl_context.set_cipher_list(\"DEFAULT\").map(lift_ssl_error));\n try_some!(ssl_context.set_certificate_file(\n cert, X509FileType::PEM).map(lift_ssl_error));\n try_some!(ssl_context.set_private_key_file(\n key, X509FileType::PEM).map(lift_ssl_error));\n ssl_context.set_verify(SslVerifyNone, None);\n HttpAcceptor::Https(try!(tcp.listen()), addr, Arc::new(ssl_context))\n }\n })\n }\n}\n\n\/\/\/ A `NetworkAcceptor` for `HttpStream`s.\n#[derive(Clone)]\npub enum HttpAcceptor {\n \/\/\/ Http variant.\n Http(TcpAcceptor, SocketAddr),\n \/\/\/ Https variant.\n Https(TcpAcceptor, SocketAddr, Arc),\n}\n\nimpl NetworkAcceptor for HttpAcceptor {\n type Stream = HttpStream;\n\n #[inline]\n fn accept(&mut self) -> IoResult {\n Ok(match *self {\n HttpAcceptor::Http(ref mut tcp, _) => HttpStream::Http(try!(tcp.accept())),\n HttpAcceptor::Https(ref mut tcp, _, ref ssl_context) => {\n let stream = try!(tcp.accept());\n let ssl_stream = try!(SslStream::::new_server(&**ssl_context, stream).\n map_err(lift_ssl_error));\n HttpStream::Https(ssl_stream)\n }\n })\n }\n\n #[inline]\n fn close(&mut self) -> IoResult<()> {\n match *self {\n HttpAcceptor::Http(ref mut tcp, _) => tcp.close_accept(),\n HttpAcceptor::Https(ref mut tcp, _, _) => tcp.close_accept(),\n }\n }\n\n #[inline]\n fn socket_name(&self) -> IoResult {\n match *self {\n HttpAcceptor::Http(_, addr) => Ok(addr),\n HttpAcceptor::Https(_, addr, _) => Ok(addr),\n }\n }\n}\n\n\/\/\/ A wrapper around a TcpStream.\n#[derive(Clone)]\npub enum HttpStream {\n \/\/\/ A stream over the HTTP protocol.\n Http(TcpStream),\n \/\/\/ A stream over the HTTP protocol, protected by SSL.\n Https(SslStream),\n}\n\nimpl Reader for HttpStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult {\n match *self {\n HttpStream::Http(ref mut inner) => inner.read(buf),\n HttpStream::Https(ref mut inner) => inner.read(buf)\n }\n }\n}\n\nimpl Writer for HttpStream {\n #[inline]\n fn write_all(&mut self, msg: &[u8]) -> IoResult<()> {\n match *self {\n HttpStream::Http(ref mut inner) => inner.write_all(msg),\n HttpStream::Https(ref mut inner) => inner.write_all(msg)\n }\n }\n #[inline]\n fn flush(&mut self) -> IoResult<()> {\n match *self {\n HttpStream::Http(ref mut inner) => inner.flush(),\n HttpStream::Https(ref mut inner) => inner.flush(),\n }\n }\n}\n\nimpl NetworkStream for HttpStream {\n fn peer_name(&mut self) -> IoResult {\n match *self {\n HttpStream::Http(ref mut inner) => inner.peer_name(),\n HttpStream::Https(ref mut inner) => inner.get_mut().peer_name()\n }\n }\n}\n\n\/\/\/ A connector that will produce HttpStreams.\n#[allow(missing_copy_implementations)]\npub struct HttpConnector<'v>(pub Option>);\n\n\/\/\/ A method that can set verification methods on an SSL context\npub type ContextVerifier<'v> = Box ()+'v>;\n\nimpl<'v> NetworkConnector for HttpConnector<'v> {\n type Stream = HttpStream;\n\n fn connect(&mut self, host: &str, port: Port, scheme: &str) -> IoResult {\n let addr = (host, port);\n match scheme {\n \"http\" => {\n debug!(\"http scheme\");\n Ok(HttpStream::Http(try!(TcpStream::connect(addr))))\n },\n \"https\" => {\n debug!(\"https scheme\");\n let stream = try!(TcpStream::connect(addr));\n let mut context = try!(SslContext::new(Sslv23).map_err(lift_ssl_error));\n if let Some(ref mut verifier) = self.0 {\n verifier(&mut context);\n }\n let ssl = try!(Ssl::new(&context).map_err(lift_ssl_error));\n try!(ssl.set_hostname(host).map_err(lift_ssl_error));\n let stream = try!(SslStream::new(&context, stream).map_err(lift_ssl_error));\n Ok(HttpStream::Https(stream))\n },\n _ => {\n Err(IoError {\n kind: InvalidInput,\n desc: \"Invalid scheme for Http\",\n detail: None\n })\n }\n }\n }\n}\n\nfn lift_ssl_error(ssl: SslError) -> IoError {\n debug!(\"lift_ssl_error: {:?}\", ssl);\n match ssl {\n StreamError(err) => err,\n SslSessionClosed => IoError {\n kind: ConnectionAborted,\n desc: \"SSL Connection Closed\",\n detail: None\n },\n \/\/ Unfortunately throw this away. No way to support this\n \/\/ detail without a better Error abstraction.\n OpenSslErrors(errs) => IoError {\n kind: OtherIoError,\n desc: \"Error in OpenSSL\",\n detail: Some(format!(\"{:?}\", errs))\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use uany::UnsafeAnyExt;\n\n use mock::MockStream;\n use super::NetworkStream;\n\n #[test]\n fn test_downcast_box_stream() {\n let stream = box MockStream::new() as Box;\n\n let mock = stream.downcast::().ok().unwrap();\n assert_eq!(mock, box MockStream::new());\n\n }\n\n #[test]\n fn test_downcast_unchecked_box_stream() {\n let stream = box MockStream::new() as Box;\n\n let mock = unsafe { stream.downcast_unchecked::() };\n assert_eq!(mock, box MockStream::new());\n\n }\n\n}\nfix(net): don't stop the server when an SSL handshake fails with EOF\/\/! A collection of traits abstracting over Listeners and Streams.\nuse std::any::{Any, TypeId};\nuse std::fmt;\nuse std::old_io::{IoResult, IoError, ConnectionAborted, InvalidInput, OtherIoError,\n Stream, Listener, Acceptor};\nuse std::old_io::net::ip::{SocketAddr, ToSocketAddr, Port};\nuse std::old_io::net::tcp::{TcpStream, TcpListener, TcpAcceptor};\nuse std::mem;\nuse std::raw::{self, TraitObject};\nuse std::sync::Arc;\n\nuse uany::UnsafeAnyExt;\nuse openssl::ssl::{Ssl, SslStream, SslContext};\nuse openssl::ssl::SslVerifyMode::SslVerifyNone;\nuse openssl::ssl::SslMethod::Sslv23;\nuse openssl::ssl::error::{SslError, StreamError, OpenSslErrors, SslSessionClosed};\nuse openssl::x509::X509FileType;\n\nmacro_rules! try_some {\n ($expr:expr) => (match $expr {\n Some(val) => { return Err(val); },\n _ => {}\n })\n}\n\n\/\/\/ The write-status indicating headers have not been written.\n#[allow(missing_copy_implementations)]\npub struct Fresh;\n\n\/\/\/ The write-status indicating headers have been written.\n#[allow(missing_copy_implementations)]\npub struct Streaming;\n\n\/\/\/ An abstraction to listen for connections on a certain port.\npub trait NetworkListener {\n type Acceptor: NetworkAcceptor;\n \/\/\/ Listens on a socket.\n fn listen(&mut self, addr: To) -> IoResult;\n}\n\n\/\/\/ An abstraction to receive `NetworkStream`s.\npub trait NetworkAcceptor: Clone + Send {\n type Stream: NetworkStream + Send + Clone;\n\n \/\/\/ Returns an iterator of streams.\n fn accept(&mut self) -> IoResult;\n\n \/\/\/ Get the address this Listener ended up listening on.\n fn socket_name(&self) -> IoResult;\n\n \/\/\/ Closes the Acceptor, so no more incoming connections will be handled.\n fn close(&mut self) -> IoResult<()>;\n\n \/\/\/ Returns an iterator over incoming connections.\n fn incoming(&mut self) -> NetworkConnections {\n NetworkConnections(self)\n }\n}\n\n\/\/\/ An iterator wrapper over a NetworkAcceptor.\npub struct NetworkConnections<'a, N: NetworkAcceptor>(&'a mut N);\n\nimpl<'a, N: NetworkAcceptor> Iterator for NetworkConnections<'a, N> {\n type Item = IoResult;\n fn next(&mut self) -> Option> {\n Some(self.0.accept())\n }\n}\n\n\n\/\/\/ An abstraction over streams that a Server can utilize.\npub trait NetworkStream: Stream + Any + StreamClone + Send {\n \/\/\/ Get the remote address of the underlying connection.\n fn peer_name(&mut self) -> IoResult;\n}\n\n\n#[doc(hidden)]\npub trait StreamClone {\n fn clone_box(&self) -> Box;\n}\n\nimpl StreamClone for T {\n #[inline]\n fn clone_box(&self) -> Box {\n Box::new(self.clone())\n }\n}\n\n\/\/\/ A connector creates a NetworkStream.\npub trait NetworkConnector {\n type Stream: NetworkStream + Send;\n \/\/\/ Connect to a remote address.\n fn connect(&mut self, host: &str, port: Port, scheme: &str) -> IoResult;\n}\n\nimpl fmt::Debug for Box {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt.pad(\"Box\")\n }\n}\n\nimpl Clone for Box {\n #[inline]\n fn clone(&self) -> Box { self.clone_box() }\n}\n\nimpl Reader for Box {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult { (**self).read(buf) }\n}\n\nimpl Writer for Box {\n #[inline]\n fn write_all(&mut self, msg: &[u8]) -> IoResult<()> { (**self).write_all(msg) }\n\n #[inline]\n fn flush(&mut self) -> IoResult<()> { (**self).flush() }\n}\n\nimpl<'a> Reader for &'a mut NetworkStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult { (**self).read(buf) }\n}\n\nimpl<'a> Writer for &'a mut NetworkStream {\n #[inline]\n fn write_all(&mut self, msg: &[u8]) -> IoResult<()> { (**self).write_all(msg) }\n\n #[inline]\n fn flush(&mut self) -> IoResult<()> { (**self).flush() }\n}\n\nimpl UnsafeAnyExt for NetworkStream {\n unsafe fn downcast_ref_unchecked(&self) -> &T {\n mem::transmute(mem::transmute::<&NetworkStream,\n raw::TraitObject>(self).data)\n }\n\n unsafe fn downcast_mut_unchecked(&mut self) -> &mut T {\n mem::transmute(mem::transmute::<&mut NetworkStream,\n raw::TraitObject>(self).data)\n }\n\n unsafe fn downcast_unchecked(self: Box) -> Box {\n mem::transmute(mem::transmute::,\n raw::TraitObject>(self).data)\n }\n}\n\nimpl NetworkStream {\n \/\/\/ Is the underlying type in this trait object a T?\n #[inline]\n pub fn is(&self) -> bool {\n self.get_type_id() == TypeId::of::()\n }\n\n \/\/\/ If the underlying type is T, get a reference to the contained data.\n #[inline]\n pub fn downcast_ref(&self) -> Option<&T> {\n if self.is::() {\n Some(unsafe { self.downcast_ref_unchecked() })\n } else {\n None\n }\n }\n\n \/\/\/ If the underlying type is T, get a mutable reference to the contained\n \/\/\/ data.\n #[inline]\n pub fn downcast_mut(&mut self) -> Option<&mut T> {\n if self.is::() {\n Some(unsafe { self.downcast_mut_unchecked() })\n } else {\n None\n }\n }\n\n \/\/\/ If the underlying type is T, extract it.\n pub fn downcast(self: Box)\n -> Result, Box> {\n if self.is::() {\n Ok(unsafe { self.downcast_unchecked() })\n } else {\n Err(self)\n }\n }\n}\n\n\/\/\/ A `NetworkListener` for `HttpStream`s.\n#[allow(missing_copy_implementations)]\npub enum HttpListener {\n \/\/\/ Http variant.\n Http,\n \/\/\/ Https variant. The two paths point to the certificate and key PEM files, in that order.\n Https(Path, Path),\n}\n\nimpl NetworkListener for HttpListener {\n type Acceptor = HttpAcceptor;\n\n #[inline]\n fn listen(&mut self, addr: To) -> IoResult {\n let mut tcp = try!(TcpListener::bind(addr));\n let addr = try!(tcp.socket_name());\n Ok(match *self {\n HttpListener::Http => HttpAcceptor::Http(try!(tcp.listen()), addr),\n HttpListener::Https(ref cert, ref key) => {\n let mut ssl_context = try!(SslContext::new(Sslv23).map_err(lift_ssl_error));\n try_some!(ssl_context.set_cipher_list(\"DEFAULT\").map(lift_ssl_error));\n try_some!(ssl_context.set_certificate_file(\n cert, X509FileType::PEM).map(lift_ssl_error));\n try_some!(ssl_context.set_private_key_file(\n key, X509FileType::PEM).map(lift_ssl_error));\n ssl_context.set_verify(SslVerifyNone, None);\n HttpAcceptor::Https(try!(tcp.listen()), addr, Arc::new(ssl_context))\n }\n })\n }\n}\n\n\/\/\/ A `NetworkAcceptor` for `HttpStream`s.\n#[derive(Clone)]\npub enum HttpAcceptor {\n \/\/\/ Http variant.\n Http(TcpAcceptor, SocketAddr),\n \/\/\/ Https variant.\n Https(TcpAcceptor, SocketAddr, Arc),\n}\n\nimpl NetworkAcceptor for HttpAcceptor {\n type Stream = HttpStream;\n\n #[inline]\n fn accept(&mut self) -> IoResult {\n Ok(match *self {\n HttpAcceptor::Http(ref mut tcp, _) => HttpStream::Http(try!(tcp.accept())),\n HttpAcceptor::Https(ref mut tcp, _, ref ssl_context) => {\n let stream = try!(tcp.accept());\n match SslStream::::new_server(&**ssl_context, stream) {\n Ok(ssl_stream) => HttpStream::Https(ssl_stream),\n Err(StreamError(ref e)) => {\n return Err(IoError {\n kind: ConnectionAborted,\n desc: \"SSL Handshake Interrupted\",\n detail: Some(e.desc.to_string())\n });\n },\n Err(e) => return Err(lift_ssl_error(e))\n }\n }\n })\n }\n\n #[inline]\n fn close(&mut self) -> IoResult<()> {\n match *self {\n HttpAcceptor::Http(ref mut tcp, _) => tcp.close_accept(),\n HttpAcceptor::Https(ref mut tcp, _, _) => tcp.close_accept(),\n }\n }\n\n #[inline]\n fn socket_name(&self) -> IoResult {\n match *self {\n HttpAcceptor::Http(_, addr) => Ok(addr),\n HttpAcceptor::Https(_, addr, _) => Ok(addr),\n }\n }\n}\n\n\/\/\/ A wrapper around a TcpStream.\n#[derive(Clone)]\npub enum HttpStream {\n \/\/\/ A stream over the HTTP protocol.\n Http(TcpStream),\n \/\/\/ A stream over the HTTP protocol, protected by SSL.\n Https(SslStream),\n}\n\nimpl Reader for HttpStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult {\n match *self {\n HttpStream::Http(ref mut inner) => inner.read(buf),\n HttpStream::Https(ref mut inner) => inner.read(buf)\n }\n }\n}\n\nimpl Writer for HttpStream {\n #[inline]\n fn write_all(&mut self, msg: &[u8]) -> IoResult<()> {\n match *self {\n HttpStream::Http(ref mut inner) => inner.write_all(msg),\n HttpStream::Https(ref mut inner) => inner.write_all(msg)\n }\n }\n #[inline]\n fn flush(&mut self) -> IoResult<()> {\n match *self {\n HttpStream::Http(ref mut inner) => inner.flush(),\n HttpStream::Https(ref mut inner) => inner.flush(),\n }\n }\n}\n\nimpl NetworkStream for HttpStream {\n fn peer_name(&mut self) -> IoResult {\n match *self {\n HttpStream::Http(ref mut inner) => inner.peer_name(),\n HttpStream::Https(ref mut inner) => inner.get_mut().peer_name()\n }\n }\n}\n\n\/\/\/ A connector that will produce HttpStreams.\n#[allow(missing_copy_implementations)]\npub struct HttpConnector<'v>(pub Option>);\n\n\/\/\/ A method that can set verification methods on an SSL context\npub type ContextVerifier<'v> = Box ()+'v>;\n\nimpl<'v> NetworkConnector for HttpConnector<'v> {\n type Stream = HttpStream;\n\n fn connect(&mut self, host: &str, port: Port, scheme: &str) -> IoResult {\n let addr = (host, port);\n match scheme {\n \"http\" => {\n debug!(\"http scheme\");\n Ok(HttpStream::Http(try!(TcpStream::connect(addr))))\n },\n \"https\" => {\n debug!(\"https scheme\");\n let stream = try!(TcpStream::connect(addr));\n let mut context = try!(SslContext::new(Sslv23).map_err(lift_ssl_error));\n if let Some(ref mut verifier) = self.0 {\n verifier(&mut context);\n }\n let ssl = try!(Ssl::new(&context).map_err(lift_ssl_error));\n try!(ssl.set_hostname(host).map_err(lift_ssl_error));\n let stream = try!(SslStream::new(&context, stream).map_err(lift_ssl_error));\n Ok(HttpStream::Https(stream))\n },\n _ => {\n Err(IoError {\n kind: InvalidInput,\n desc: \"Invalid scheme for Http\",\n detail: None\n })\n }\n }\n }\n}\n\nfn lift_ssl_error(ssl: SslError) -> IoError {\n debug!(\"lift_ssl_error: {:?}\", ssl);\n match ssl {\n StreamError(err) => err,\n SslSessionClosed => IoError {\n kind: ConnectionAborted,\n desc: \"SSL Connection Closed\",\n detail: None\n },\n \/\/ Unfortunately throw this away. No way to support this\n \/\/ detail without a better Error abstraction.\n OpenSslErrors(errs) => IoError {\n kind: OtherIoError,\n desc: \"Error in OpenSSL\",\n detail: Some(format!(\"{:?}\", errs))\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use uany::UnsafeAnyExt;\n\n use mock::MockStream;\n use super::NetworkStream;\n\n #[test]\n fn test_downcast_box_stream() {\n let stream = box MockStream::new() as Box;\n\n let mock = stream.downcast::().ok().unwrap();\n assert_eq!(mock, box MockStream::new());\n\n }\n\n #[test]\n fn test_downcast_unchecked_box_stream() {\n let stream = box MockStream::new() as Box;\n\n let mock = unsafe { stream.downcast_unchecked::() };\n assert_eq!(mock, box MockStream::new());\n\n }\n\n}\n<|endoftext|>"} {"text":"Make percent-{en,de}coding public.<|endoftext|>"} {"text":"\/\/ Copyright 2019 Twitter, Inc.\n\/\/ Licensed under the Apache License, Version 2.0\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\nuse logger::*;\n\nuse core::fmt::Debug;\nuse std::collections::{HashMap, HashSet};\nuse std::hash::Hash;\n\npub struct Wheel {\n tick: usize,\n buckets: Vec>,\n timers: HashMap>,\n}\n\nimpl Wheel\nwhere\n T: Copy + Clone + Eq + Hash + Debug,\n{\n pub fn new(buckets: usize) -> Self {\n let mut wheel = Self {\n tick: 0,\n buckets: Vec::with_capacity(buckets),\n timers: HashMap::new(),\n };\n for _ in 0..buckets {\n wheel.buckets.push(Bucket::new());\n }\n wheel\n }\n\n pub fn tick(&mut self, ticks: usize) -> Vec {\n let mut timers = Vec::new();\n for _ in 0..ticks {\n timers.extend(self.do_tick());\n }\n if !timers.is_empty() {\n debug!(\"timeouts: {}\", timers.len());\n }\n timers\n }\n\n pub fn do_tick(&mut self) -> Vec {\n let mut expired = Vec::with_capacity(self.buckets[self.tick].timers.len());\n let mut remaining = HashSet::new();\n for token in &self.buckets[self.tick].timers {\n if self.timers[&token].remaining == 0 {\n expired.push(*token);\n self.timers.remove(&token);\n } else {\n remaining.insert(*token);\n self.timers.get_mut(token).unwrap().remaining -= 1;\n }\n }\n self.buckets[self.tick].timers = remaining;\n if self.tick == (self.buckets.len() - 1) {\n self.tick = 0;\n } else {\n self.tick += 1;\n }\n expired\n }\n\n pub fn add(&mut self, token: T, ticks: usize) {\n trace!(\"Add timer for {:?} in {} ticks\", token, ticks);\n if self.timers.contains_key(&token) {\n self.cancel(token);\n }\n let bucket = (ticks + self.tick) % self.buckets.len();\n let remaining = ticks \/ self.buckets.len();\n let timer = Timer {\n token,\n remaining,\n bucket,\n };\n self.timers.insert(token, timer);\n self.buckets[bucket].timers.insert(token);\n }\n\n pub fn pending(&self) -> usize {\n self.timers.len()\n }\n\n pub fn cancel(&mut self, token: T) {\n if let Some(timer) = self.timers.remove(&token) {\n self.buckets[timer.bucket].timers.remove(&token);\n }\n self.timers.shrink_to_fit();\n }\n\n pub fn next_timeout(&self) -> Option {\n if self.timers.is_empty() {\n None\n } else {\n let mut remaining = 0;\n loop {\n for offset in 0..self.buckets.len() {\n let mut tick = self.tick + offset;\n if tick >= self.buckets.len() {\n tick -= self.buckets.len();\n }\n for timer in &self.buckets[tick].timers {\n if self.timers[&timer].remaining == remaining {\n return Some(offset + remaining * self.buckets.len());\n }\n }\n }\n remaining += 1;\n }\n }\n }\n}\n\npub struct Bucket {\n timers: HashSet,\n}\n\nimpl Bucket\nwhere\n T: Copy + Clone + Eq + Hash + Debug,\n{\n pub fn new() -> Self {\n Self {\n timers: HashSet::new(),\n }\n }\n}\n\nimpl Default for Bucket\nwhere\n T: Copy + Clone + Eq + Hash + Debug,\n{\n fn default() -> Bucket {\n Bucket::new()\n }\n}\n\n#[derive(Clone, Copy, Eq, PartialEq, Hash)]\npub struct Timer {\n bucket: usize,\n remaining: usize,\n token: T,\n}\n\nimpl Timer\nwhere\n T: Copy + Clone + Eq + Hash + Debug,\n{\n pub fn token(&self) -> T {\n self.token\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn new() {\n let mut wheel = Wheel::::new(1000);\n assert!(wheel.tick(1000).is_empty());\n assert_eq!(wheel.next_timeout(), None);\n }\n\n #[test]\n fn add() {\n let mut wheel = Wheel::new(1000);\n let _id = wheel.add(0, 0);\n assert_eq!(wheel.pending(), 1);\n assert_eq!(wheel.next_timeout(), Some(0));\n let timers = wheel.tick(1);\n assert_eq!(timers.len(), 1);\n assert_eq!(wheel.pending(), 0);\n assert_eq!(wheel.next_timeout(), None);\n }\n\n #[test]\n fn cancel() {\n let mut wheel = Wheel::new(1000);\n wheel.add(0, 0);\n assert_eq!(wheel.pending(), 1);\n assert_eq!(wheel.next_timeout(), Some(0));\n wheel.cancel(0);\n assert_eq!(wheel.pending(), 0);\n assert_eq!(wheel.next_timeout(), None);\n }\n\n #[test]\n fn tick() {\n let mut wheel = Wheel::new(1000);\n for i in 0..1000 {\n wheel.add(i, i);\n }\n assert_eq!(wheel.pending(), 1000);\n for i in 0..1000 {\n assert_eq!(wheel.next_timeout(), Some(0));\n let timers = wheel.tick(1);\n assert_eq!(timers.len(), 1);\n assert_eq!(timers[0], i);\n }\n assert_eq!(wheel.pending(), 0);\n assert_eq!(wheel.next_timeout(), None);\n }\n\n #[test]\n fn wrap() {\n let mut wheel = Wheel::new(1000);\n for i in 0..2000 {\n wheel.add(i, i);\n }\n assert_eq!(wheel.pending(), 2000);\n for _ in 0..1000 {\n let timers = wheel.tick(1);\n assert_eq!(timers.len(), 1);\n }\n assert_eq!(wheel.pending(), 1000);\n for _ in 0..1000 {\n let timers = wheel.tick(1);\n assert_eq!(timers.len(), 1);\n }\n assert_eq!(wheel.pending(), 0);\n }\n\n #[test]\n fn next_timeout() {\n let mut wheel = Wheel::new(1000);\n wheel.add(1, 5000);\n assert_eq!(wheel.next_timeout(), Some(5000));\n wheel.add(2, 1000);\n assert_eq!(wheel.next_timeout(), Some(1000));\n wheel.add(3, 1);\n assert_eq!(wheel.next_timeout(), Some(1));\n }\n}\ntimer: add rustdoc\/\/ Copyright 2019 Twitter, Inc.\n\/\/ Licensed under the Apache License, Version 2.0\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\n\/\/! A basic implementation of a hashed wheel timer\n\nuse logger::*;\n\nuse core::fmt::Debug;\nuse std::collections::{HashMap, HashSet};\nuse std::hash::Hash;\n\npub struct Wheel {\n tick: usize,\n buckets: Vec>,\n timers: HashMap>,\n}\n\nimpl Wheel\nwhere\n T: Copy + Clone + Eq + Hash + Debug,\n{\n \/\/\/ Create a new timer `Wheel` with a given number of `buckets`. Higher\n \/\/\/ bucket count reduces collisions and results in more efficient\n \/\/\/ bookkeeping at the expense of additional memory.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ use timer::*;\n \/\/\/\n \/\/\/ let timer = Wheel::::new(1000);\n \/\/\/ ```\n pub fn new(buckets: usize) -> Self {\n let mut wheel = Self {\n tick: 0,\n buckets: Vec::with_capacity(buckets),\n timers: HashMap::new(),\n };\n for _ in 0..buckets {\n wheel.buckets.push(Bucket::new());\n }\n wheel\n }\n\n \/\/\/ Moves the timer forward by a set number of ticks. Any timers that expire\n \/\/\/ within the provided number of ticks will be returned in a `Vec`\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ use timer::*;\n \/\/\/ use std::time::{Duration, Instant};\n \/\/\/\n \/\/\/ let mut timer = Wheel::new(1000);\n \/\/\/ timer.add(1, 100);\n \/\/\/\n \/\/\/ let mut last_tick = Instant::now();\n \/\/\/\n \/\/\/ loop {\n \/\/\/ \/\/ do something here\n \/\/\/ let elapsed = Instant::now() - last_tick;\n \/\/\/ let ticks = elapsed.subsec_millis();\n \/\/\/ let expired = timer.tick(ticks as usize);\n \/\/\/ if expired.len() > 0 {\n \/\/\/ break;\n \/\/\/ }\n \/\/\/ }\n \/\/\/ ```\n pub fn tick(&mut self, ticks: usize) -> Vec {\n let mut timers = Vec::new();\n for _ in 0..ticks {\n timers.extend(self.do_tick());\n }\n if !timers.is_empty() {\n debug!(\"timeouts: {}\", timers.len());\n }\n timers\n }\n\n \/\/ internal function to advance by a single tick\n fn do_tick(&mut self) -> Vec {\n let mut expired = Vec::with_capacity(self.buckets[self.tick].timers.len());\n let mut remaining = HashSet::new();\n for token in &self.buckets[self.tick].timers {\n if self.timers[&token].remaining == 0 {\n expired.push(*token);\n self.timers.remove(&token);\n } else {\n remaining.insert(*token);\n self.timers.get_mut(token).unwrap().remaining -= 1;\n }\n }\n self.buckets[self.tick].timers = remaining;\n if self.tick == (self.buckets.len() - 1) {\n self.tick = 0;\n } else {\n self.tick += 1;\n }\n expired\n }\n\n \/\/\/ Adds a new timer for the given token for a number of ticks in the future\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use timer::*;\n \/\/\/\n \/\/\/ let mut timer = Wheel::new(1000);\n \/\/\/\n \/\/\/ timer.add(1, 0); \/\/ will expire on next tick\n \/\/\/ let expired = timer.tick(1);\n \/\/\/ assert_eq!(expired.len(), 1);\n \/\/\/ ```\n pub fn add(&mut self, token: T, ticks: usize) {\n trace!(\"Add timer for {:?} in {} ticks\", token, ticks);\n if self.timers.contains_key(&token) {\n self.cancel(token);\n }\n let bucket = (ticks + self.tick) % self.buckets.len();\n let remaining = ticks \/ self.buckets.len();\n let timer = Timer {\n token,\n remaining,\n bucket,\n };\n self.timers.insert(token, timer);\n self.buckets[bucket].timers.insert(token);\n }\n\n \/\/\/ Return the number of timers registered\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use timer::*;\n \/\/\/\n \/\/\/ let mut timer = Wheel::new(1000);\n \/\/\/\n \/\/\/ assert_eq!(timer.pending(), 0);\n \/\/\/\n \/\/\/ timer.add(1, 1);\n \/\/\/ assert_eq!(timer.pending(), 1);\n \/\/\/ ```\n pub fn pending(&self) -> usize {\n self.timers.len()\n }\n\n \/\/\/ Cancel a pending timer\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use timer::*;\n \/\/\/\n \/\/\/ let mut timer = Wheel::new(1000);\n \/\/\/\n \/\/\/ timer.add(1, 1);\n \/\/\/ assert_eq!(timer.pending(), 1);\n \/\/\/\n \/\/\/ timer.cancel(1);\n \/\/\/ assert_eq!(timer.pending(), 0);\n \/\/\/ ```\n pub fn cancel(&mut self, token: T) {\n if let Some(timer) = self.timers.remove(&token) {\n self.buckets[timer.bucket].timers.remove(&token);\n }\n self.timers.shrink_to_fit();\n }\n\n \/\/\/ Return the number of ticks until the next timeout would occur\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use timer::*;\n \/\/\/\n \/\/\/ let mut timer = Wheel::new(1000);\n \/\/\/\n \/\/\/ timer.add(1, 100);\n \/\/\/ assert_eq!(timer.next_timeout(), Some(100));\n \/\/\/ ```\n pub fn next_timeout(&self) -> Option {\n if self.timers.is_empty() {\n None\n } else {\n let mut remaining = 0;\n loop {\n for offset in 0..self.buckets.len() {\n let mut tick = self.tick + offset;\n if tick >= self.buckets.len() {\n tick -= self.buckets.len();\n }\n for timer in &self.buckets[tick].timers {\n if self.timers[&timer].remaining == remaining {\n return Some(offset + remaining * self.buckets.len());\n }\n }\n }\n remaining += 1;\n }\n }\n }\n}\n\npub struct Bucket {\n timers: HashSet,\n}\n\nimpl Bucket\nwhere\n T: Copy + Clone + Eq + Hash + Debug,\n{\n pub fn new() -> Self {\n Self {\n timers: HashSet::new(),\n }\n }\n}\n\nimpl Default for Bucket\nwhere\n T: Copy + Clone + Eq + Hash + Debug,\n{\n fn default() -> Bucket {\n Bucket::new()\n }\n}\n\n#[derive(Clone, Copy, Eq, PartialEq, Hash)]\npub struct Timer {\n bucket: usize,\n remaining: usize,\n token: T,\n}\n\nimpl Timer\nwhere\n T: Copy + Clone + Eq + Hash + Debug,\n{\n pub fn token(&self) -> T {\n self.token\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn new() {\n let mut wheel = Wheel::::new(1000);\n assert!(wheel.tick(1000).is_empty());\n assert_eq!(wheel.next_timeout(), None);\n }\n\n #[test]\n fn add() {\n let mut wheel = Wheel::new(1000);\n let _id = wheel.add(0, 0);\n assert_eq!(wheel.pending(), 1);\n assert_eq!(wheel.next_timeout(), Some(0));\n let timers = wheel.tick(1);\n assert_eq!(timers.len(), 1);\n assert_eq!(wheel.pending(), 0);\n assert_eq!(wheel.next_timeout(), None);\n }\n\n #[test]\n fn cancel() {\n let mut wheel = Wheel::new(1000);\n wheel.add(0, 0);\n assert_eq!(wheel.pending(), 1);\n assert_eq!(wheel.next_timeout(), Some(0));\n wheel.cancel(0);\n assert_eq!(wheel.pending(), 0);\n assert_eq!(wheel.next_timeout(), None);\n }\n\n #[test]\n fn tick() {\n let mut wheel = Wheel::new(1000);\n for i in 0..1000 {\n wheel.add(i, i);\n }\n assert_eq!(wheel.pending(), 1000);\n for i in 0..1000 {\n assert_eq!(wheel.next_timeout(), Some(0));\n let timers = wheel.tick(1);\n assert_eq!(timers.len(), 1);\n assert_eq!(timers[0], i);\n }\n assert_eq!(wheel.pending(), 0);\n assert_eq!(wheel.next_timeout(), None);\n }\n\n #[test]\n fn wrap() {\n let mut wheel = Wheel::new(1000);\n for i in 0..2000 {\n wheel.add(i, i);\n }\n assert_eq!(wheel.pending(), 2000);\n for _ in 0..1000 {\n let timers = wheel.tick(1);\n assert_eq!(timers.len(), 1);\n }\n assert_eq!(wheel.pending(), 1000);\n for _ in 0..1000 {\n let timers = wheel.tick(1);\n assert_eq!(timers.len(), 1);\n }\n assert_eq!(wheel.pending(), 0);\n }\n\n #[test]\n fn next_timeout() {\n let mut wheel = Wheel::new(1000);\n wheel.add(1, 5000);\n assert_eq!(wheel.next_timeout(), Some(5000));\n wheel.add(2, 1000);\n assert_eq!(wheel.next_timeout(), Some(1000));\n wheel.add(3, 1);\n assert_eq!(wheel.next_timeout(), Some(1));\n }\n}\n<|endoftext|>"} {"text":"#![crate_id(name=\"sum\", vers=\"1.0.0\", author=\"T. Jameson Little\")]\n#![feature(macro_rules)]\n\n\/*\n* This file is part of the uutils coreutils package.\n*\n* (c) T. Jameson Little \n*\n* For the full copyright and license information, please view the LICENSE file\n* that was distributed with this source code.\n*\/\n\nextern crate getopts;\nextern crate libc;\n\nuse std::os;\nuse std::io::{File, IoResult, print};\nuse std::io::stdio::{stdin_raw};\n\n#[path=\"..\/common\/util.rs\"]\nmod util;\n\nstatic VERSION: &'static str = \"1.0.0\";\nstatic NAME: &'static str = \"sum\";\n\nfn bsd_sum(mut reader: Box) -> (uint, u16) {\n let mut buf = [0, .. 1024];\n let mut blocks_read = 0;\n let mut checksum: u16 = 0;\n loop {\n match reader.read(buf) {\n Ok(n) if n != 0 => {\n blocks_read += 1;\n for &byte in buf.slice_to(n).iter() {\n checksum = (checksum >> 1) + ((checksum & 1) << 15);\n checksum += byte as u16;\n }\n },\n _ => break,\n }\n }\n\n (blocks_read, checksum)\n}\n\nfn sysv_sum(mut reader: Box) -> (uint, u16) {\n let mut buf = [0, .. 512];\n let mut blocks_read = 0;\n let mut ret = 0;\n\n loop {\n match reader.read(buf) {\n Ok(n) if n != 0 => {\n blocks_read += 1;\n for &byte in buf.slice_to(n).iter() {\n ret += byte as u32;\n }\n },\n _ => break,\n }\n }\n\n ret = (ret & 0xffff) + (ret >> 16);\n ret = (ret & 0xffff) + (ret >> 16);\n\n (blocks_read, ret as u16)\n}\n\nfn open(name: &str) -> IoResult> {\n\tmatch name {\n\t\t\"-\" => Ok(box stdin_raw() as Box),\n\t\t_ => {\n\t\t\tlet f = try!(File::open(&Path::new(name)));\n\t\t\tOk(box f as Box)\n\t\t}\n\t}\n}\n\n#[allow(dead_code)]\nfn main() { os::set_exit_status(uumain(os::args())); }\n\npub fn uumain(args: Vec) -> int {\n let program = args.get(0).as_slice();\n let opts = [\n getopts::optflag(\"r\", \"\", \"use the BSD compatible algorithm (default)\"),\n getopts::optflag(\"s\", \"sysv\", \"use System V compatible algorithm\"),\n getopts::optflag(\"h\", \"help\", \"show this help message\"),\n getopts::optflag(\"v\", \"version\", \"print the version and exit\"),\n ];\n\n let matches = match getopts::getopts(args.tail(), opts) {\n Ok(m) => m,\n Err(f) => crash!(1, \"Invalid options\\n{}\", f)\n };\n\n if matches.opt_present(\"help\") {\n println!(\"{} {}\", program, VERSION);\n println!(\"\");\n println!(\"Usage:\");\n println!(\" {0:s} [OPTION]... [FILE]...\", program);\n println!(\"\");\n print(getopts::usage(\"checksum and count the blocks in a file\", opts).as_slice());\n println!(\"\");\n println!(\"With no FILE, or when FILE is -, read standard input.\");\n return 0;\n }\n if matches.opt_present(\"version\") {\n println!(\"{} {}\", program, VERSION);\n return 0;\n }\n\n let sysv = matches.opt_present(\"sysv\");\n\n let file = if matches.free.is_empty() {\n \"-\"\n } else {\n matches.free.get(0).as_slice()\n };\n\n let reader = match open(file) {\n Ok(f) => f,\n _ => crash!(1, \"unable to open file\")\n };\n let (blocks, sum) = if sysv {\n sysv_sum(reader)\n } else {\n bsd_sum(reader)\n };\n\n println!(\"{} {}\", sum, blocks);\n\n 0\n}\nsum: handle multiple file args#![crate_id(name=\"sum\", vers=\"1.0.0\", author=\"T. Jameson Little\")]\n#![feature(macro_rules)]\n\n\/*\n* This file is part of the uutils coreutils package.\n*\n* (c) T. Jameson Little \n*\n* For the full copyright and license information, please view the LICENSE file\n* that was distributed with this source code.\n*\/\n\nextern crate getopts;\nextern crate libc;\n\nuse std::os;\nuse std::io::{File, IoResult, print};\nuse std::io::stdio::{stdin_raw};\n\n#[path=\"..\/common\/util.rs\"]\nmod util;\n\nstatic VERSION: &'static str = \"1.0.0\";\nstatic NAME: &'static str = \"sum\";\n\nfn bsd_sum(mut reader: Box) -> (uint, u16) {\n let mut buf = [0, .. 1024];\n let mut blocks_read = 0;\n let mut checksum: u16 = 0;\n loop {\n match reader.read(buf) {\n Ok(n) if n != 0 => {\n blocks_read += 1;\n for &byte in buf.slice_to(n).iter() {\n checksum = (checksum >> 1) + ((checksum & 1) << 15);\n checksum += byte as u16;\n }\n },\n _ => break,\n }\n }\n\n (blocks_read, checksum)\n}\n\nfn sysv_sum(mut reader: Box) -> (uint, u16) {\n let mut buf = [0, .. 512];\n let mut blocks_read = 0;\n let mut ret = 0;\n\n loop {\n match reader.read(buf) {\n Ok(n) if n != 0 => {\n blocks_read += 1;\n for &byte in buf.slice_to(n).iter() {\n ret += byte as u32;\n }\n },\n _ => break,\n }\n }\n\n ret = (ret & 0xffff) + (ret >> 16);\n ret = (ret & 0xffff) + (ret >> 16);\n\n (blocks_read, ret as u16)\n}\n\nfn open(name: &str) -> IoResult> {\n\tmatch name {\n\t\t\"-\" => Ok(box stdin_raw() as Box),\n\t\t_ => {\n\t\t\tlet f = try!(File::open(&Path::new(name)));\n\t\t\tOk(box f as Box)\n\t\t}\n\t}\n}\n\n#[allow(dead_code)]\nfn main() { os::set_exit_status(uumain(os::args())); }\n\npub fn uumain(args: Vec) -> int {\n let program = args.get(0).as_slice();\n let opts = [\n getopts::optflag(\"r\", \"\", \"use the BSD compatible algorithm (default)\"),\n getopts::optflag(\"s\", \"sysv\", \"use System V compatible algorithm\"),\n getopts::optflag(\"h\", \"help\", \"show this help message\"),\n getopts::optflag(\"v\", \"version\", \"print the version and exit\"),\n ];\n\n let matches = match getopts::getopts(args.tail(), opts) {\n Ok(m) => m,\n Err(f) => crash!(1, \"Invalid options\\n{}\", f)\n };\n\n if matches.opt_present(\"help\") {\n println!(\"{} {}\", program, VERSION);\n println!(\"\");\n println!(\"Usage:\");\n println!(\" {0:s} [OPTION]... [FILE]...\", program);\n println!(\"\");\n print(getopts::usage(\"checksum and count the blocks in a file\", opts).as_slice());\n println!(\"\");\n println!(\"With no FILE, or when FILE is -, read standard input.\");\n return 0;\n }\n if matches.opt_present(\"version\") {\n println!(\"{} {}\", program, VERSION);\n return 0;\n }\n\n let sysv = matches.opt_present(\"sysv\");\n\n let files = if matches.free.is_empty() {\n Vec::from_elem(1, \"-\".to_string())\n } else {\n matches.free\n };\n\n for file in files.iter() {\n let reader = match open(file.as_slice()) {\n Ok(f) => f,\n _ => crash!(1, \"unable to open file\")\n };\n let (blocks, sum) = if sysv {\n sysv_sum(reader)\n } else {\n bsd_sum(reader)\n };\n\n println!(\"{} {}\", sum, blocks);\n }\n\n 0\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::collections::HashSet;\n\nuse build::{Build, Compiler};\n\n#[derive(Hash, Eq, PartialEq, Clone, Debug)]\npub struct Step<'a> {\n pub src: Source<'a>,\n pub target: &'a str,\n}\n\nmacro_rules! targets {\n ($m:ident) => {\n $m! {\n (rustc, Rustc { stage: u32 }),\n (libstd, Libstd { stage: u32, compiler: Compiler<'a> }),\n (librustc, Librustc { stage: u32, compiler: Compiler<'a> }),\n (llvm, Llvm { _dummy: () }),\n (compiler_rt, CompilerRt { _dummy: () }),\n }\n }\n}\n\nmacro_rules! item { ($a:item) => ($a) }\n\nmacro_rules! define_source {\n ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {\n item! {\n #[derive(Hash, Eq, PartialEq, Clone, Debug)]\n pub enum Source<'a> {\n $($name { $($args)* }),*\n }\n }\n }\n}\n\ntargets!(define_source);\n\npub fn all(build: &Build) -> Vec {\n let mut ret = Vec::new();\n let mut all = HashSet::new();\n for target in top_level(build) {\n fill(build, &target, &mut ret, &mut all);\n }\n return ret;\n\n fn fill<'a>(build: &'a Build,\n target: &Step<'a>,\n ret: &mut Vec>,\n set: &mut HashSet>) {\n if set.insert(target.clone()) {\n for dep in target.deps(build) {\n fill(build, &dep, ret, set);\n }\n ret.push(target.clone());\n }\n }\n}\n\nfn top_level(build: &Build) -> Vec {\n let mut targets = Vec::new();\n let stage = build.flags.stage.unwrap_or(2);\n\n let host = Step {\n src: Source::Llvm { _dummy: () },\n target: build.flags.host.iter().next()\n .unwrap_or(&build.config.build),\n };\n let target = Step {\n src: Source::Llvm { _dummy: () },\n target: build.flags.target.iter().next().map(|x| &x[..])\n .unwrap_or(host.target)\n };\n\n add_steps(build, stage, &host, &target, &mut targets);\n\n if targets.len() == 0 {\n let t = Step {\n src: Source::Llvm { _dummy: () },\n target: &build.config.build,\n };\n for host in build.config.host.iter() {\n if !build.flags.host.contains(host) {\n continue\n }\n let host = t.target(host);\n targets.push(host.librustc(stage, host.compiler(stage)));\n for target in build.config.target.iter() {\n if !build.flags.target.contains(target) {\n continue\n }\n targets.push(host.target(target)\n .libstd(stage, host.compiler(stage)));\n }\n }\n }\n\n return targets\n\n}\n\nfn add_steps<'a>(build: &'a Build,\n stage: u32,\n host: &Step<'a>,\n target: &Step<'a>,\n targets: &mut Vec>) {\n for step in build.flags.step.iter() {\n let compiler = host.compiler(stage);\n match &step[..] {\n \"libstd\" => targets.push(target.libstd(stage, compiler)),\n \"librustc\" => targets.push(target.librustc(stage, compiler)),\n \"rustc\" => targets.push(host.rustc(stage)),\n \"llvm\" => targets.push(target.llvm(())),\n \"compiler-rt\" => targets.push(target.compiler_rt(())),\n _ => panic!(\"unknown build target: `{}`\", step),\n }\n }\n}\n\nmacro_rules! constructors {\n ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(\n fn $short(&self, $($arg: $t),*) -> Step<'a> {\n Step {\n src: Source::$name { $($arg: $arg),* },\n target: self.target,\n }\n }\n )*}\n}\n\nimpl<'a> Step<'a> {\n fn compiler(&self, stage: u32) -> Compiler<'a> {\n Compiler::new(stage, self.target)\n }\n\n fn target(&self, target: &'a str) -> Step<'a> {\n Step { target: target, src: self.src.clone() }\n }\n\n targets!(constructors);\n\n pub fn deps(&self, build: &'a Build) -> Vec> {\n match self.src {\n Source::Rustc { stage: 0 } => {\n assert!(self.target == build.config.build);\n Vec::new()\n }\n Source::Rustc { stage } => {\n let compiler = Compiler::new(stage - 1, &build.config.build);\n vec![self.librustc(stage - 1, compiler)]\n }\n Source::Librustc { stage, compiler } => {\n vec![self.libstd(stage, compiler), self.llvm(())]\n }\n Source::Libstd { stage: _, compiler } => {\n vec![self.compiler_rt(()),\n self.rustc(compiler.stage).target(compiler.host)]\n }\n Source::CompilerRt { _dummy } => {\n vec![self.llvm(()).target(&build.config.build)]\n }\n Source::Llvm { _dummy } => Vec::new(),\n }\n }\n}\nrustbuild: Compile with the build compiler\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::collections::HashSet;\n\nuse build::{Build, Compiler};\n\n#[derive(Hash, Eq, PartialEq, Clone, Debug)]\npub struct Step<'a> {\n pub src: Source<'a>,\n pub target: &'a str,\n}\n\nmacro_rules! targets {\n ($m:ident) => {\n $m! {\n (rustc, Rustc { stage: u32 }),\n (libstd, Libstd { stage: u32, compiler: Compiler<'a> }),\n (librustc, Librustc { stage: u32, compiler: Compiler<'a> }),\n (llvm, Llvm { _dummy: () }),\n (compiler_rt, CompilerRt { _dummy: () }),\n }\n }\n}\n\nmacro_rules! item { ($a:item) => ($a) }\n\nmacro_rules! define_source {\n ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {\n item! {\n #[derive(Hash, Eq, PartialEq, Clone, Debug)]\n pub enum Source<'a> {\n $($name { $($args)* }),*\n }\n }\n }\n}\n\ntargets!(define_source);\n\npub fn all(build: &Build) -> Vec {\n let mut ret = Vec::new();\n let mut all = HashSet::new();\n for target in top_level(build) {\n fill(build, &target, &mut ret, &mut all);\n }\n return ret;\n\n fn fill<'a>(build: &'a Build,\n target: &Step<'a>,\n ret: &mut Vec>,\n set: &mut HashSet>) {\n if set.insert(target.clone()) {\n for dep in target.deps(build) {\n fill(build, &dep, ret, set);\n }\n ret.push(target.clone());\n }\n }\n}\n\nfn top_level(build: &Build) -> Vec {\n let mut targets = Vec::new();\n let stage = build.flags.stage.unwrap_or(2);\n\n let host = Step {\n src: Source::Llvm { _dummy: () },\n target: build.flags.host.iter().next()\n .unwrap_or(&build.config.build),\n };\n let target = Step {\n src: Source::Llvm { _dummy: () },\n target: build.flags.target.iter().next().map(|x| &x[..])\n .unwrap_or(host.target)\n };\n\n add_steps(build, stage, &host, &target, &mut targets);\n\n if targets.len() == 0 {\n let t = Step {\n src: Source::Llvm { _dummy: () },\n target: &build.config.build,\n };\n for host in build.config.host.iter() {\n if !build.flags.host.contains(host) {\n continue\n }\n let host = t.target(host);\n targets.push(host.librustc(stage, t.compiler(stage)));\n for target in build.config.target.iter() {\n if !build.flags.target.contains(target) {\n continue\n }\n targets.push(host.target(target)\n .libstd(stage, t.compiler(stage)));\n }\n }\n }\n\n return targets\n\n}\n\nfn add_steps<'a>(build: &'a Build,\n stage: u32,\n host: &Step<'a>,\n target: &Step<'a>,\n targets: &mut Vec>) {\n for step in build.flags.step.iter() {\n let compiler = host.compiler(stage);\n match &step[..] {\n \"libstd\" => targets.push(target.libstd(stage, compiler)),\n \"librustc\" => targets.push(target.librustc(stage, compiler)),\n \"rustc\" => targets.push(host.rustc(stage)),\n \"llvm\" => targets.push(target.llvm(())),\n \"compiler-rt\" => targets.push(target.compiler_rt(())),\n _ => panic!(\"unknown build target: `{}`\", step),\n }\n }\n}\n\nmacro_rules! constructors {\n ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(\n fn $short(&self, $($arg: $t),*) -> Step<'a> {\n Step {\n src: Source::$name { $($arg: $arg),* },\n target: self.target,\n }\n }\n )*}\n}\n\nimpl<'a> Step<'a> {\n fn compiler(&self, stage: u32) -> Compiler<'a> {\n Compiler::new(stage, self.target)\n }\n\n fn target(&self, target: &'a str) -> Step<'a> {\n Step { target: target, src: self.src.clone() }\n }\n\n targets!(constructors);\n\n pub fn deps(&self, build: &'a Build) -> Vec> {\n match self.src {\n Source::Rustc { stage: 0 } => {\n assert!(self.target == build.config.build);\n Vec::new()\n }\n Source::Rustc { stage } => {\n let compiler = Compiler::new(stage - 1, &build.config.build);\n vec![self.librustc(stage - 1, compiler)]\n }\n Source::Librustc { stage, compiler } => {\n vec![self.libstd(stage, compiler), self.llvm(())]\n }\n Source::Libstd { stage: _, compiler } => {\n vec![self.compiler_rt(()),\n self.rustc(compiler.stage).target(compiler.host)]\n }\n Source::CompilerRt { _dummy } => {\n vec![self.llvm(()).target(&build.config.build)]\n }\n Source::Llvm { _dummy } => Vec::new(),\n }\n }\n}\n<|endoftext|>"} {"text":"Add a TCP example to showcase model-view separation.use cursive::traits::*;\nuse cursive::views;\n\nuse std::io::Read as _;\nuse std::io::Write as _;\nuse std::sync::{Arc, Mutex};\n\n\/\/ This example builds a simple TCP server with some parameters and some output.\n\/\/ It then builds a TUI to control the parameters and display the output.\n\nfn main() {\n let mut siv = cursive::Cursive::default();\n\n \/\/ Build a shared model\n let model = Arc::new(Mutex::new(Model {\n offset: 0,\n logs: Vec::new(),\n cb_sink: siv.cb_sink().clone(),\n }));\n\n \/\/ Start the TCP server in a thread\n start_server(Arc::clone(&model));\n\n \/\/ Build the UI from the model\n siv.add_layer(\n views::Dialog::around(build_ui(Arc::clone(&model)))\n .button(\"Quit\", |s| s.quit()),\n );\n\n siv.run();\n}\n\nstruct Model {\n offset: u8,\n logs: Vec<(u8, u8)>,\n cb_sink: cursive::CbSink,\n}\n\nfn start_server(model: Arc>) {\n std::thread::spawn(move || {\n if let Err(err) = serve(Arc::clone(&model)) {\n model\n .lock()\n .unwrap()\n .cb_sink\n .send(Box::new(move |s: &mut cursive::Cursive| {\n s.add_layer(\n views::Dialog::text(&format!(\"{:?}\", err))\n .title(\"Error in TCP server\")\n .button(\"Quit\", |s| s.quit()),\n );\n }))\n .unwrap();\n }\n });\n}\n\nfn serve(model: Arc>) -> std::io::Result<()> {\n let listener = std::net::TcpListener::bind(\"localhost:1234\")?;\n\n for stream in listener.incoming() {\n let stream = stream?;\n\n for byte in (&stream).bytes() {\n let byte = byte?;\n let mut model = model.lock().unwrap();\n let response = byte.wrapping_add(model.offset);\n model.logs.push((byte, response));\n (&stream).write_all(&[response])?;\n model\n .cb_sink\n .send(Box::new(cursive::Cursive::noop))\n .unwrap();\n }\n }\n\n Ok(())\n}\n\nfn readable_char(byte: u8) -> char {\n if byte.is_ascii_control() {\n '�'\n } else {\n byte as char\n }\n}\n\nfn build_log_viewer(model: Arc>) -> impl cursive::view::View {\n views::Canvas::new(model)\n .with_draw(|model, printer| {\n let model = model.lock().unwrap();\n for (i, &(byte, answer)) in model.logs.iter().enumerate() {\n printer.print(\n (0, i),\n &format!(\n \"{:3} '{}' -> {:3} '{}'\",\n byte,\n readable_char(byte),\n answer,\n readable_char(answer),\n ),\n );\n }\n })\n .with_required_size(|model, _req| {\n let model = model.lock().unwrap();\n cursive::Vec2::new(10, model.logs.len())\n })\n}\n\nfn build_selector(model: Arc>) -> impl cursive::view::View {\n views::LinearLayout::horizontal()\n .child(\n views::EditView::new()\n .content(\"0\")\n .with_id(\"edit\")\n .min_width(5),\n )\n .child(views::DummyView.fixed_width(1))\n .child(views::Button::new(\"Update\", move |s| {\n if let Some(n) = s\n .call_on_id(\"edit\", |edit: &mut views::EditView| {\n edit.get_content()\n })\n .and_then(|content| content.parse().ok())\n {\n model.lock().unwrap().offset = n;\n } else {\n s.add_layer(views::Dialog::info(\n \"Could not parse offset as u8\",\n ));\n }\n }))\n .child(views::DummyView.fixed_width(1))\n .child(views::Button::new(\"Test\", |s| {\n if let Err(err) = test_server() {\n s.add_layer(\n views::Dialog::info(&format!(\"{:?}\", err))\n .title(\"Error running test.\"),\n );\n }\n }))\n}\n\nfn test_server() -> std::io::Result<()> {\n let mut stream = std::net::TcpStream::connect(\"localhost:1234\")?;\n for &byte in &[1, 2, 3, b'a', b'c', b'd'] {\n let mut buf = [0];\n stream.write_all(&[byte])?;\n stream.read_exact(&mut buf)?;\n }\n Ok(())\n}\n\nfn build_ui(model: Arc>) -> impl cursive::view::View {\n views::LinearLayout::vertical()\n .child(build_selector(Arc::clone(&model)))\n .child(build_log_viewer(Arc::clone(&model)))\n}\n<|endoftext|>"} {"text":"Bitmask disp9 in bcond ops<|endoftext|>"} {"text":"Add unary and binary tests for incr-comp\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for struct definitions.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph -Z force-overflow-checks=off\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![crate_type=\"rlib\"]\n\n\n\/\/ Change constant operand of negation -----------------------------------------\n#[cfg(cfail1)]\npub fn const_negation() -> i32 {\n -10\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn const_negation() -> i32 {\n -1\n}\n\n\n\/\/ Change constant operand of bitwise not --------------------------------------\n#[cfg(cfail1)]\npub fn const_bitwise_not() -> i32 {\n !100\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn const_bitwise_not() -> i32 {\n !99\n}\n\n\n\/\/ Change variable operand of negation -----------------------------------------\n#[cfg(cfail1)]\npub fn var_negation(x: i32) -> i32 {\n -x\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn var_negation(y: i32) -> i32 {\n -y\n}\n\n\n\/\/ Change variable operand of bitwise not --------------------------------------\n#[cfg(cfail1)]\npub fn var_bitwise_not(x: i32) -> i32 {\n !x\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn var_bitwise_not(y: i32) -> i32 {\n !y\n}\n\n\n\/\/ Change variable operand of deref --------------------------------------------\n#[cfg(cfail1)]\npub fn var_deref(x: &i32) -> i32 {\n *x\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn var_deref(y: &i32) -> i32 {\n *y\n}\n\n\n\/\/ Change first constant operand of addition -----------------------------------\n#[cfg(cfail1)]\npub fn first_const_add() -> i32 {\n 1 + 3\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn first_const_add() -> i32 {\n 2 + 3\n}\n\n\n\/\/ Change second constant operand of addition -----------------------------------\n#[cfg(cfail1)]\npub fn second_const_add() -> i32 {\n 1 + 2\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn second_const_add() -> i32 {\n 1 + 3\n}\n\n\n\/\/ Change first variable operand of addition -----------------------------------\n#[cfg(cfail1)]\npub fn first_var_add(a: i32) -> i32 {\n a + 2\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn first_var_add(b: i32) -> i32 {\n b + 3\n}\n\n\n\/\/ Change second variable operand of addition ----------------------------------\n#[cfg(cfail1)]\npub fn second_var_add(a: i32) -> i32 {\n 1 + a\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn second_var_add(b: i32) -> i32 {\n 1 + b\n}\n\n\n\/\/ Change operator from + to - -------------------------------------------------\n#[cfg(cfail1)]\npub fn plus_to_minus(a: i32) -> i32 {\n 1 + a\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn plus_to_minus(a: i32) -> i32 {\n 1 - a\n}\n\n\n\/\/ Change operator from + to * -------------------------------------------------\n#[cfg(cfail1)]\npub fn plus_to_mult(a: i32) -> i32 {\n 1 + a\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn plus_to_mult(a: i32) -> i32 {\n 1 * a\n}\n\n\n\/\/ Change operator from + to \/ -------------------------------------------------\n#[cfg(cfail1)]\npub fn plus_to_div(a: i32) -> i32 {\n 1 + a\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn plus_to_div(a: i32) -> i32 {\n 1 \/ a\n}\n\n\n\/\/ Change operator from + to % -------------------------------------------------\n#[cfg(cfail1)]\npub fn plus_to_mod(a: i32) -> i32 {\n 1 + a\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn plus_to_mod(a: i32) -> i32 {\n 1 % a\n}\n\n\n\/\/ Change operator from && to || -----------------------------------------------\n#[cfg(cfail1)]\npub fn and_to_or(a: bool, b: bool) -> bool {\n a && b\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn and_to_or(a: bool, b: bool) -> bool {\n a || b\n}\n\n\n\n\/\/ Change operator from & to | -------------------------------------------------\n#[cfg(cfail1)]\npub fn bitwise_and_to_bitwise_or(a: i32) -> i32 {\n 1 & a\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn bitwise_and_to_bitwise_or(a: i32) -> i32 {\n 1 | a\n}\n\n\n\n\/\/ Change operator from & to ^ -------------------------------------------------\n#[cfg(cfail1)]\npub fn bitwise_and_to_bitwise_xor(a: i32) -> i32 {\n 1 & a\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn bitwise_and_to_bitwise_xor(a: i32) -> i32 {\n 1 ^ a\n}\n\n\n\n\/\/ Change operator from & to << ------------------------------------------------\n#[cfg(cfail1)]\npub fn bitwise_and_to_lshift(a: i32) -> i32 {\n a & 1\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn bitwise_and_to_lshift(a: i32) -> i32 {\n a << 1\n}\n\n\n\n\/\/ Change operator from & to >> ------------------------------------------------\n#[cfg(cfail1)]\npub fn bitwise_and_to_rshift(a: i32) -> i32 {\n a & 1\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn bitwise_and_to_rshift(a: i32) -> i32 {\n a >> 1\n}\n\n\n\n\/\/ Change operator from == to != -----------------------------------------------\n#[cfg(cfail1)]\npub fn eq_to_uneq(a: i32) -> bool {\n a == 1\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn eq_to_uneq(a: i32) -> bool {\n a != 1\n}\n\n\n\n\/\/ Change operator from == to < ------------------------------------------------\n#[cfg(cfail1)]\npub fn eq_to_lt(a: i32) -> bool {\n a == 1\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn eq_to_lt(a: i32) -> bool {\n a < 1\n}\n\n\n\n\/\/ Change operator from == to > ------------------------------------------------\n#[cfg(cfail1)]\npub fn eq_to_gt(a: i32) -> bool {\n a == 1\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn eq_to_gt(a: i32) -> bool {\n a > 1\n}\n\n\n\n\/\/ Change operator from == to <= -----------------------------------------------\n#[cfg(cfail1)]\npub fn eq_to_le(a: i32) -> bool {\n a == 1\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn eq_to_le(a: i32) -> bool {\n a <= 1\n}\n\n\n\n\/\/ Change operator from == to >= -----------------------------------------------\n#[cfg(cfail1)]\npub fn eq_to_ge(a: i32) -> bool {\n a == 1\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn eq_to_ge(a: i32) -> bool {\n a >= 1\n}\n\n\n\n\/\/ Change type in cast expression ----------------------------------------------\n#[cfg(cfail1)]\npub fn type_cast(a: u8) -> u64 {\n let b = a as i32;\n let c = b as u64;\n c\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn type_cast(a: u8) -> u64 {\n let b = a as u32;\n let c = b as u64;\n c\n}\n\n\n\n\/\/ Change value in cast expression ---------------------------------------------\n#[cfg(cfail1)]\npub fn value_cast(a: u32) -> i32 {\n 1 as i32\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn value_cast(a: u32) -> i32 {\n 2 as i32\n}\n\n\n\n\/\/ Change l-value in assignment ------------------------------------------------\n#[cfg(cfail1)]\npub fn lvalue() -> i32 {\n let x = 10;\n x\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn lvalue() -> i32 {\n let y = 10;\n y\n}\n\n\n\n\/\/ Change r-value in assignment ------------------------------------------------\n#[cfg(cfail1)]\npub fn rvalue() -> i32 {\n let x = 10;\n x\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn rvalue() -> i32 {\n let x = 11;\n x\n}\n\n\n\n\/\/ Change index into slice -----------------------------------------------------\n#[cfg(cfail1)]\npub fn index_to_slice() -> i32 {\n let xs = [1,2,3,4,5];\n xs[1]\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfails2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfails3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn index_to_slice() -> i32 {\n let xs = &[1,2,3,4,5];\n xs[1]\n}\n<|endoftext|>"} {"text":"Add `cp` example with manual struct definition.extern crate serialize;\nextern crate docopt;\n\nuse docopt::Docopt;\n\n\/\/ Write the Docopt usage string.\nstatic USAGE: &'static str = \"\nUsage: cp [-a] \n cp [-a] ... \n\nOptions:\n -a, --archive Copy everything.\n\";\n\n#[deriving(Decodable, Show)]\nstruct Args {\n arg_source: Vec,\n arg_dest: String,\n arg_dir: String,\n flag_archive: bool,\n}\n\nfn main() {\n let args: Args = Docopt::new(USAGE)\n .and_then(|d| d.decode())\n .unwrap_or_else(|e| e.exit());\n println!(\"{}\", args);\n}\n<|endoftext|>"} {"text":"Add serve-root exampleextern crate httpd = \"tiny-http\";\n\nfn get_content_type(path: &Path) -> &'static str {\n let extension = match path.extension_str() {\n None => return \"text\/plain\",\n Some(e) => e\n };\n\n match extension {\n \"gif\" => \"image\/gif\",\n \"jpg\" => \"image\/jpeg\",\n \"jpeg\" => \"image\/jpeg\",\n \"png\" => \"image\/png\",\n \"pdf\" => \"application\/pdf\",\n \"htm\" => \"text\/html\",\n \"html\" => \"text\/html\",\n _ => \"text\/plain\"\n }\n}\n\nfn main() {\n let (server, port) = httpd::Server::new_with_random_port().unwrap();\n println!(\"Now listening on port {}\", port);\n\n loop {\n let rq = match server.recv() {\n Ok(rq) => rq,\n Err(_) => break\n };\n\n println!(\"{}\", rq);\n\n let response = match httpd::Response::from_file(&Path::new(rq.get_url().path.clone())) {\n Ok(res) => res,\n Err(err) => {\n let rep = httpd::Response::empty().with_status_code(httpd::StatusCode(404));\n rq.respond(rep);\n continue\n }\n };\n\n let response = response.with_header(\n httpd::Header{\n field: from_str(\"Content-Type\").unwrap(),\n value: get_content_type(&Path::new(rq.get_url().path.clone())).to_string()\n }\n );\n\n rq.respond(response);\n }\n}\n<|endoftext|>"} {"text":"Allow Generator to have !Send inputs and outputs<|endoftext|>"} {"text":"Remove the `Show` impl from `hl::RethinkDB`<|endoftext|>"} {"text":"extern crate orbital;\n\nextern crate system;\n\nuse orbital::Color;\n\nuse std::fs::File;\nuse std::io::{Read, Write};\nuse std::ops::Deref;\nuse std::sync::Arc;\nuse std::thread;\n\nuse system::error::Error;\nuse system::syscall::*;\n\nuse window::ConsoleWindow;\n\nmod window;\n\nmacro_rules! readln {\n () => ({\n let mut buffer = String::new();\n match std::io::stdin().read_line(&mut buffer) {\n Ok(_) => Some(buffer),\n Err(_) => None\n }\n });\n}\n\npub fn pipe() -> [usize; 2] {\n let mut fds = [0; 2];\n Error::demux(unsafe { sys_pipe2(fds.as_mut_ptr(), 0) }).unwrap();\n fds\n}\n\nfn main() {\n let to_shell_fds = pipe();\n let from_shell_fds = pipe();\n\n unsafe {\n if Error::demux(sys_clone(0)).unwrap() == 0 {\n \/\/ Close STDIO\n sys_close(2);\n sys_close(1);\n sys_close(0);\n\n \/\/ Create piped STDIO\n sys_dup(to_shell_fds[0]);\n sys_dup(from_shell_fds[1]);\n sys_dup(from_shell_fds[1]);\n\n \/\/ Close extra pipes\n sys_close(to_shell_fds[0]);\n sys_close(to_shell_fds[1]);\n sys_close(from_shell_fds[0]);\n sys_close(from_shell_fds[1]);\n\n \/\/ Execute the shell\n let shell = \"file:\/apps\/shell\/main.bin\\0\";\n sys_execve(shell.as_ptr(), 0 as *const *const u8);\n panic!(\"Shell not found\");\n } else {\n \/\/ Close extra pipes\n sys_close(to_shell_fds[0]);\n sys_close(from_shell_fds[1]);\n }\n };\n\n let window = Arc::new(ConsoleWindow::new(0, 0, 576, 400, \"Terminal\"));\n\n let window_weak = Arc::downgrade(&window);\n thread::spawn(move || {\n let mut from_shell = unsafe { File::from_fd(from_shell_fds[0]).unwrap() };\n loop {\n let mut output = String::new();\n if let Ok(_) = from_shell.read_to_string(&mut output) {\n if let Some(window) = window_weak.upgrade() {\n let window_ptr =\n (window.deref() as *const Box) as *mut Box;\n unsafe { &mut *window_ptr }.print(&output, Color::rgb(255, 255, 255));\n unsafe { &mut *window_ptr }.sync();\n } else {\n break;\n }\n } else {\n break;\n }\n }\n });\n\n {\n let mut to_shell = unsafe { File::from_fd(to_shell_fds[1]).unwrap() };\n let window_ptr = (window.deref() as *const Box) as *mut Box;\n while let Some(mut string) = unsafe { &mut *window_ptr }.read() {\n string.push('\\n');\n if let Ok(_) = to_shell.write(&string.into_bytes()) {\n\n } else {\n break;\n }\n }\n }\n}\nFix terminal pathextern crate orbital;\n\nextern crate system;\n\nuse orbital::Color;\n\nuse std::fs::File;\nuse std::io::{Read, Write};\nuse std::ops::Deref;\nuse std::sync::Arc;\nuse std::thread;\n\nuse system::error::Error;\nuse system::syscall::*;\n\nuse window::ConsoleWindow;\n\nmod window;\n\nmacro_rules! readln {\n () => ({\n let mut buffer = String::new();\n match std::io::stdin().read_line(&mut buffer) {\n Ok(_) => Some(buffer),\n Err(_) => None\n }\n });\n}\n\npub fn pipe() -> [usize; 2] {\n let mut fds = [0; 2];\n Error::demux(unsafe { sys_pipe2(fds.as_mut_ptr(), 0) }).unwrap();\n fds\n}\n\nfn main() {\n let to_shell_fds = pipe();\n let from_shell_fds = pipe();\n\n unsafe {\n if Error::demux(sys_clone(0)).unwrap() == 0 {\n \/\/ Close STDIO\n sys_close(2);\n sys_close(1);\n sys_close(0);\n\n \/\/ Create piped STDIO\n sys_dup(to_shell_fds[0]);\n sys_dup(from_shell_fds[1]);\n sys_dup(from_shell_fds[1]);\n\n \/\/ Close extra pipes\n sys_close(to_shell_fds[0]);\n sys_close(to_shell_fds[1]);\n sys_close(from_shell_fds[0]);\n sys_close(from_shell_fds[1]);\n\n \/\/ Execute the shell\n let shell = \"ion\\0\";\n sys_execve(shell.as_ptr(), 0 as *const *const u8);\n panic!(\"Shell not found\");\n } else {\n \/\/ Close extra pipes\n sys_close(to_shell_fds[0]);\n sys_close(from_shell_fds[1]);\n }\n };\n\n let window = Arc::new(ConsoleWindow::new(0, 0, 576, 400, \"Terminal\"));\n\n let window_weak = Arc::downgrade(&window);\n thread::spawn(move || {\n let mut from_shell = unsafe { File::from_fd(from_shell_fds[0]).unwrap() };\n loop {\n let mut output = String::new();\n if let Ok(_) = from_shell.read_to_string(&mut output) {\n if let Some(window) = window_weak.upgrade() {\n let window_ptr =\n (window.deref() as *const Box) as *mut Box;\n unsafe { &mut *window_ptr }.print(&output, Color::rgb(255, 255, 255));\n unsafe { &mut *window_ptr }.sync();\n } else {\n break;\n }\n } else {\n break;\n }\n }\n });\n\n {\n let mut to_shell = unsafe { File::from_fd(to_shell_fds[1]).unwrap() };\n let window_ptr = (window.deref() as *const Box) as *mut Box;\n while let Some(mut string) = unsafe { &mut *window_ptr }.read() {\n string.push('\\n');\n if let Ok(_) = to_shell.write(&string.into_bytes()) {\n\n } else {\n break;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"Add statistik.\/\/use std::os;\nuse std::io;\nuse std::num::from_str_radix;\n\nfn main() {\n let mut ns: Vec = io::stdin()\n .lines()\n .map(|l| from_str_radix::(l.unwrap().as_slice().trim(), 10))\n .filter(|o| match *o {\n Some(_) => true,\n None => false,\n })\n .map(|n| match n {\n Some(x) => x,\n None => fail!(\"borkage\")\n })\n .collect();\n\n ns.sort_by(|a, b| a.partial_cmp(b).unwrap_or(Equal));\n\n let count = ns.len();\n\n let mut sum: f64 = 0.0; \/\/ no reduce in rust? searched, failed to find\n for n in ns.iter() {\n sum = sum + *n\n }\n\n println!(\"count: {}\", count);\n println!(\"sum: {}\", sum);\n\n if (ns.is_empty()) {\n println!(\"(empty set of numbers, remaining stats not available)\");\n } else {\n println!(\"avg: {}\", sum\/count as f64);\n println!(\"max: {}\", ns[count - 1]);\n println!(\"p999: {}\", ns[ns.len() * 999\/1000]);\n println!(\"p99: {}\", ns[ns.len() * 99\/100]);\n println!(\"p90: {}\", ns[ns.len() * 90\/100]);\n println!(\"p50: {}\", ns[ns.len() * 50\/100]);\n println!(\"min: {}\", ns[0]);\n }\n}\n<|endoftext|>"} {"text":"implement cpu halting<|endoftext|>"} {"text":"[compiler] Architecture of the symbolic execution of a spacetime program to extract every possible instant.\/\/ Copyright 2018 Pierre Talbot (IRCAM)\n\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/\/ Given a process P, we iterate over all the possible execution paths of a spacetime program.\n\nuse context::*;\nuse session::*;\nuse gcollections::VectorStack;\nuse gcollections::ops::*;\nuse std::collections::HashSet;\n\n#[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug)]\nenum CompletionCode {\n Terminate = 0,\n Pause = 1,\n PauseUp = 2,\n Stop = 3\n}\n\n#[derive(Clone, Debug)]\npub struct SymbolicInstant {\n pub program: Stmt,\n pub current_process: ProcessUID,\n pub state: HashSet\n}\n\nimpl SymbolicInstant {\n pub fn new(program: Stmt, current_process: ProcessUID, state: HashSet) -> Self {\n SymbolicInstant { program, current_process, state }\n }\n}\n\npub struct SymbolicExecution {\n session: Session,\n context: Context,\n visited_states: Vec>,\n next_instants: VectorStack\n}\n\nimpl SymbolicExecution {\n pub fn new(session: Session, context: Context) -> Self {\n SymbolicExecution {\n session: session,\n context: context,\n visited_states: vec![],\n next_instants: VectorStack::empty()\n }\n }\n\n pub fn for_each(mut self, f: F) -> Env\n where F: Fn(Env<(Context, SymbolicInstant)>) -> Env\n {\n let mut fake = false;\n self.initialize();\n while let Some(instant) = self.next() {\n let env = f(Env::value(self.session, (self.context, instant)));\n let (session, context) = env.decompose();\n if context.is_value() || context.is_fake() {\n fake = fake || context.is_fake();\n self.context = context.unwrap_all();\n self.session = session;\n }\n else { return Env::nothing(session) }\n }\n if fake { Env::fake(self.session, self.context) }\n else { Env::value(self.session, self.context) }\n }\n\n fn push_instant(&mut self, instant: SymbolicInstant) {\n if !self.visited_states.iter().any(|s| s == &instant.state) {\n self.visited_states.push(instant.state.clone());\n self.next_instants.insert(instant);\n }\n }\n\n fn initialize(&mut self) {\n for (i, uid) in self.context.entry_points.clone().into_iter().enumerate() {\n let process = self.context.find_proc(uid.clone());\n let mut state = HashSet::new();\n state.insert(i);\n let instant = SymbolicInstant::new(process.body, uid, state);\n self.push_instant(instant);\n }\n }\n\n fn next(&mut self) -> Option {\n let instant = self.next_instants.pop();\n if let Some(instant) = instant.clone() {\n self.compute_residual(instant);\n }\n instant\n }\n\n fn compute_residual(&mut self, instant: SymbolicInstant) {\n\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for Windows\n\n#![allow(bad_style)]\n\nuse prelude::v1::*;\nuse os::windows::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io;\nuse libc::types::os::arch::extra::LPWCH;\nuse libc::{self, c_int, c_void};\nuse ops::Range;\nuse os::windows::ffi::EncodeWide;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse sys::c;\nuse sys::handle::Handle;\n\nuse libc::funcs::extra::kernel32::{\n GetEnvironmentStringsW,\n FreeEnvironmentStringsW\n};\n\npub fn errno() -> i32 {\n unsafe { libc::GetLastError() as i32 }\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errnum: i32) -> String {\n use libc::types::os::arch::extra::DWORD;\n use libc::types::os::arch::extra::LPWSTR;\n use libc::types::os::arch::extra::LPVOID;\n use libc::types::os::arch::extra::WCHAR;\n\n #[link_name = \"kernel32\"]\n extern \"system\" {\n fn FormatMessageW(flags: DWORD,\n lpSrc: LPVOID,\n msgId: DWORD,\n langId: DWORD,\n buf: LPWSTR,\n nsize: DWORD,\n args: *const c_void)\n -> DWORD;\n }\n\n const FORMAT_MESSAGE_FROM_SYSTEM: DWORD = 0x00001000;\n const FORMAT_MESSAGE_IGNORE_INSERTS: DWORD = 0x00000200;\n\n \/\/ This value is calculated from the macro\n \/\/ MAKELANGID(LANG_SYSTEM_DEFAULT, SUBLANG_SYS_DEFAULT)\n let langId = 0x0800 as DWORD;\n\n let mut buf = [0 as WCHAR; 2048];\n\n unsafe {\n let res = FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM |\n FORMAT_MESSAGE_IGNORE_INSERTS,\n ptr::null_mut(),\n errnum as DWORD,\n langId,\n buf.as_mut_ptr(),\n buf.len() as DWORD,\n ptr::null());\n if res == 0 {\n \/\/ Sometimes FormatMessageW can fail e.g. system doesn't like langId,\n let fm_err = errno();\n return format!(\"OS Error {} (FormatMessageW() returned error {})\",\n errnum, fm_err);\n }\n\n let b = buf.iter().position(|&b| b == 0).unwrap_or(buf.len());\n match String::from_utf16(&buf[..b]) {\n Ok(mut msg) => {\n \/\/ Trim trailing CRLF inserted by FormatMessageW\n let len = msg.trim_right().len();\n msg.truncate(len);\n msg\n },\n Err(..) => format!(\"OS Error {} (FormatMessageW() returned \\\n invalid UTF-16)\", errnum),\n }\n }\n}\n\npub struct Env {\n base: LPWCH,\n cur: LPWCH,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n\n fn next(&mut self) -> Option<(OsString, OsString)> {\n unsafe {\n if *self.cur == 0 { return None }\n let p = &*self.cur;\n let mut len = 0;\n while *(p as *const u16).offset(len) != 0 {\n len += 1;\n }\n let p = p as *const u16;\n let s = slice::from_raw_parts(p, len as usize);\n self.cur = self.cur.offset(len + 1);\n\n let (k, v) = match s.iter().position(|&b| b == '=' as u16) {\n Some(n) => (&s[..n], &s[n+1..]),\n None => (s, &[][..]),\n };\n Some((OsStringExt::from_wide(k), OsStringExt::from_wide(v)))\n }\n }\n}\n\nimpl Drop for Env {\n fn drop(&mut self) {\n unsafe { FreeEnvironmentStringsW(self.base); }\n }\n}\n\npub fn env() -> Env {\n unsafe {\n let ch = GetEnvironmentStringsW();\n if ch as usize == 0 {\n panic!(\"failure getting env string from OS: {}\",\n io::Error::last_os_error());\n }\n Env { base: ch, cur: ch }\n }\n}\n\npub struct SplitPaths<'a> {\n data: EncodeWide<'a>,\n must_yield: bool,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n SplitPaths {\n data: unparsed.encode_wide(),\n must_yield: true,\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option {\n \/\/ On Windows, the PATH environment variable is semicolon separated.\n \/\/ Double quotes are used as a way of introducing literal semicolons\n \/\/ (since c:\\some;dir is a valid Windows path). Double quotes are not\n \/\/ themselves permitted in path names, so there is no way to escape a\n \/\/ double quote. Quoted regions can appear in arbitrary locations, so\n \/\/\n \/\/ c:\\foo;c:\\som\"e;di\"r;c:\\bar\n \/\/\n \/\/ Should parse as [c:\\foo, c:\\some;dir, c:\\bar].\n \/\/\n \/\/ (The above is based on testing; there is no clear reference available\n \/\/ for the grammar.)\n\n\n let must_yield = self.must_yield;\n self.must_yield = false;\n\n let mut in_progress = Vec::new();\n let mut in_quote = false;\n for b in self.data.by_ref() {\n if b == '\"' as u16 {\n in_quote = !in_quote;\n } else if b == ';' as u16 && !in_quote {\n self.must_yield = true;\n break\n } else {\n in_progress.push(b)\n }\n }\n\n if !must_yield && in_progress.is_empty() {\n None\n } else {\n Some(super::os2path(&in_progress))\n }\n }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths(paths: I) -> Result\n where I: Iterator, T: AsRef\n{\n let mut joined = Vec::new();\n let sep = b';' as u16;\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref();\n if i > 0 { joined.push(sep) }\n let v = path.encode_wide().collect::>();\n if v.contains(&(b'\"' as u16)) {\n return Err(JoinPathsError)\n } else if v.contains(&sep) {\n joined.push(b'\"' as u16);\n joined.push_all(&v[..]);\n joined.push(b'\"' as u16);\n } else {\n joined.push_all(&v[..]);\n }\n }\n\n Ok(OsStringExt::from_wide(&joined[..]))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains `\\\"`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result {\n super::fill_utf16_buf(|buf, sz| unsafe {\n libc::GetModuleFileNameW(ptr::null_mut(), buf, sz)\n }, super::os2path)\n}\n\npub fn getcwd() -> io::Result {\n super::fill_utf16_buf(|buf, sz| unsafe {\n libc::GetCurrentDirectoryW(sz, buf)\n }, super::os2path)\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n let p: &OsStr = p.as_ref();\n let mut p = p.encode_wide().collect::>();\n p.push(0);\n\n unsafe {\n match libc::SetCurrentDirectoryW(p.as_ptr()) != (0 as libc::BOOL) {\n true => Ok(()),\n false => Err(io::Error::last_os_error()),\n }\n }\n}\n\npub fn getenv(k: &OsStr) -> Option {\n let k = super::to_utf16_os(k);\n super::fill_utf16_buf(|buf, sz| unsafe {\n libc::GetEnvironmentVariableW(k.as_ptr(), buf, sz)\n }, |buf| {\n OsStringExt::from_wide(buf)\n }).ok()\n}\n\npub fn setenv(k: &OsStr, v: &OsStr) {\n let k = super::to_utf16_os(k);\n let v = super::to_utf16_os(v);\n\n unsafe {\n if libc::SetEnvironmentVariableW(k.as_ptr(), v.as_ptr()) == 0 {\n panic!(\"failed to set env: {}\", io::Error::last_os_error());\n }\n }\n}\n\npub fn unsetenv(n: &OsStr) {\n let v = super::to_utf16_os(n);\n unsafe {\n if libc::SetEnvironmentVariableW(v.as_ptr(), ptr::null()) == 0 {\n panic!(\"failed to unset env: {}\", io::Error::last_os_error());\n }\n }\n}\n\npub struct Args {\n range: Range,\n cur: *mut *mut u16,\n}\n\nimpl Iterator for Args {\n type Item = OsString;\n fn next(&mut self) -> Option {\n self.range.next().map(|i| unsafe {\n let ptr = *self.cur.offset(i);\n let mut len = 0;\n while *ptr.offset(len) != 0 { len += 1; }\n\n \/\/ Push it onto the list.\n let ptr = ptr as *const u16;\n let buf = slice::from_raw_parts(ptr, len as usize);\n OsStringExt::from_wide(buf)\n })\n }\n fn size_hint(&self) -> (usize, Option) { self.range.size_hint() }\n}\n\nimpl ExactSizeIterator for Args {\n fn len(&self) -> usize { self.range.len() }\n}\n\nimpl Drop for Args {\n fn drop(&mut self) {\n \/\/ self.cur can be null if CommandLineToArgvW previously failed,\n \/\/ but LocalFree ignores NULL pointers\n unsafe { c::LocalFree(self.cur as *mut c_void); }\n }\n}\n\npub fn args() -> Args {\n unsafe {\n let mut nArgs: c_int = 0;\n let lpCmdLine = c::GetCommandLineW();\n let szArgList = c::CommandLineToArgvW(lpCmdLine, &mut nArgs);\n\n \/\/ szArcList can be NULL if CommandLinToArgvW failed,\n \/\/ but in that case nArgs is 0 so we won't actually\n \/\/ try to read a null pointer\n Args { cur: szArgList, range: 0..(nArgs as isize) }\n }\n}\n\npub fn temp_dir() -> PathBuf {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetTempPathW(sz, buf)\n }, super::os2path).unwrap()\n}\n\npub fn home_dir() -> Option {\n getenv(\"HOME\".as_ref()).or_else(|| {\n getenv(\"USERPROFILE\".as_ref())\n }).map(PathBuf::from).or_else(|| unsafe {\n let me = c::GetCurrentProcess();\n let mut token = ptr::null_mut();\n if c::OpenProcessToken(me, c::TOKEN_READ, &mut token) == 0 {\n return None\n }\n let _handle = Handle::new(token);\n super::fill_utf16_buf(|buf, mut sz| {\n match c::GetUserProfileDirectoryW(token, buf, &mut sz) {\n 0 if libc::GetLastError() != 0 => 0,\n 0 => sz,\n n => n as libc::DWORD,\n }\n }, super::os2path).ok()\n })\n}\n\npub fn exit(code: i32) -> ! {\n unsafe { c::ExitProcess(code as libc::c_uint) }\n}\nAuto merge of #27995 - nagisa:windows-error-message, r=alexcrichton\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for Windows\n\n#![allow(bad_style)]\n\nuse prelude::v1::*;\nuse os::windows::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io;\nuse libc::types::os::arch::extra::LPWCH;\nuse libc::{self, c_int, c_void};\nuse ops::Range;\nuse os::windows::ffi::EncodeWide;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse sys::c;\nuse sys::handle::Handle;\n\nuse libc::funcs::extra::kernel32::{\n GetEnvironmentStringsW,\n FreeEnvironmentStringsW\n};\n\npub fn errno() -> i32 {\n unsafe { libc::GetLastError() as i32 }\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errnum: i32) -> String {\n use libc::types::os::arch::extra::DWORD;\n use libc::types::os::arch::extra::LPWSTR;\n use libc::types::os::arch::extra::LPVOID;\n use libc::types::os::arch::extra::WCHAR;\n\n #[link_name = \"kernel32\"]\n extern \"system\" {\n fn FormatMessageW(flags: DWORD,\n lpSrc: LPVOID,\n msgId: DWORD,\n langId: DWORD,\n buf: LPWSTR,\n nsize: DWORD,\n args: *const c_void)\n -> DWORD;\n }\n\n const FORMAT_MESSAGE_FROM_SYSTEM: DWORD = 0x00001000;\n const FORMAT_MESSAGE_IGNORE_INSERTS: DWORD = 0x00000200;\n\n \/\/ This value is calculated from the macro\n \/\/ MAKELANGID(LANG_SYSTEM_DEFAULT, SUBLANG_SYS_DEFAULT)\n let langId = 0x0800 as DWORD;\n\n let mut buf = [0 as WCHAR; 2048];\n\n unsafe {\n let res = FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM |\n FORMAT_MESSAGE_IGNORE_INSERTS,\n ptr::null_mut(),\n errnum as DWORD,\n langId,\n buf.as_mut_ptr(),\n buf.len() as DWORD,\n ptr::null()) as usize;\n if res == 0 {\n \/\/ Sometimes FormatMessageW can fail e.g. system doesn't like langId,\n let fm_err = errno();\n return format!(\"OS Error {} (FormatMessageW() returned error {})\",\n errnum, fm_err);\n }\n\n match String::from_utf16(&buf[..res]) {\n Ok(mut msg) => {\n \/\/ Trim trailing CRLF inserted by FormatMessageW\n let len = msg.trim_right().len();\n msg.truncate(len);\n msg\n },\n Err(..) => format!(\"OS Error {} (FormatMessageW() returned \\\n invalid UTF-16)\", errnum),\n }\n }\n}\n\npub struct Env {\n base: LPWCH,\n cur: LPWCH,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n\n fn next(&mut self) -> Option<(OsString, OsString)> {\n unsafe {\n if *self.cur == 0 { return None }\n let p = &*self.cur;\n let mut len = 0;\n while *(p as *const u16).offset(len) != 0 {\n len += 1;\n }\n let p = p as *const u16;\n let s = slice::from_raw_parts(p, len as usize);\n self.cur = self.cur.offset(len + 1);\n\n let (k, v) = match s.iter().position(|&b| b == '=' as u16) {\n Some(n) => (&s[..n], &s[n+1..]),\n None => (s, &[][..]),\n };\n Some((OsStringExt::from_wide(k), OsStringExt::from_wide(v)))\n }\n }\n}\n\nimpl Drop for Env {\n fn drop(&mut self) {\n unsafe { FreeEnvironmentStringsW(self.base); }\n }\n}\n\npub fn env() -> Env {\n unsafe {\n let ch = GetEnvironmentStringsW();\n if ch as usize == 0 {\n panic!(\"failure getting env string from OS: {}\",\n io::Error::last_os_error());\n }\n Env { base: ch, cur: ch }\n }\n}\n\npub struct SplitPaths<'a> {\n data: EncodeWide<'a>,\n must_yield: bool,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n SplitPaths {\n data: unparsed.encode_wide(),\n must_yield: true,\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option {\n \/\/ On Windows, the PATH environment variable is semicolon separated.\n \/\/ Double quotes are used as a way of introducing literal semicolons\n \/\/ (since c:\\some;dir is a valid Windows path). Double quotes are not\n \/\/ themselves permitted in path names, so there is no way to escape a\n \/\/ double quote. Quoted regions can appear in arbitrary locations, so\n \/\/\n \/\/ c:\\foo;c:\\som\"e;di\"r;c:\\bar\n \/\/\n \/\/ Should parse as [c:\\foo, c:\\some;dir, c:\\bar].\n \/\/\n \/\/ (The above is based on testing; there is no clear reference available\n \/\/ for the grammar.)\n\n\n let must_yield = self.must_yield;\n self.must_yield = false;\n\n let mut in_progress = Vec::new();\n let mut in_quote = false;\n for b in self.data.by_ref() {\n if b == '\"' as u16 {\n in_quote = !in_quote;\n } else if b == ';' as u16 && !in_quote {\n self.must_yield = true;\n break\n } else {\n in_progress.push(b)\n }\n }\n\n if !must_yield && in_progress.is_empty() {\n None\n } else {\n Some(super::os2path(&in_progress))\n }\n }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths(paths: I) -> Result\n where I: Iterator, T: AsRef\n{\n let mut joined = Vec::new();\n let sep = b';' as u16;\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref();\n if i > 0 { joined.push(sep) }\n let v = path.encode_wide().collect::>();\n if v.contains(&(b'\"' as u16)) {\n return Err(JoinPathsError)\n } else if v.contains(&sep) {\n joined.push(b'\"' as u16);\n joined.push_all(&v[..]);\n joined.push(b'\"' as u16);\n } else {\n joined.push_all(&v[..]);\n }\n }\n\n Ok(OsStringExt::from_wide(&joined[..]))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains `\\\"`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result {\n super::fill_utf16_buf(|buf, sz| unsafe {\n libc::GetModuleFileNameW(ptr::null_mut(), buf, sz)\n }, super::os2path)\n}\n\npub fn getcwd() -> io::Result {\n super::fill_utf16_buf(|buf, sz| unsafe {\n libc::GetCurrentDirectoryW(sz, buf)\n }, super::os2path)\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n let p: &OsStr = p.as_ref();\n let mut p = p.encode_wide().collect::>();\n p.push(0);\n\n unsafe {\n match libc::SetCurrentDirectoryW(p.as_ptr()) != (0 as libc::BOOL) {\n true => Ok(()),\n false => Err(io::Error::last_os_error()),\n }\n }\n}\n\npub fn getenv(k: &OsStr) -> Option {\n let k = super::to_utf16_os(k);\n super::fill_utf16_buf(|buf, sz| unsafe {\n libc::GetEnvironmentVariableW(k.as_ptr(), buf, sz)\n }, |buf| {\n OsStringExt::from_wide(buf)\n }).ok()\n}\n\npub fn setenv(k: &OsStr, v: &OsStr) {\n let k = super::to_utf16_os(k);\n let v = super::to_utf16_os(v);\n\n unsafe {\n if libc::SetEnvironmentVariableW(k.as_ptr(), v.as_ptr()) == 0 {\n panic!(\"failed to set env: {}\", io::Error::last_os_error());\n }\n }\n}\n\npub fn unsetenv(n: &OsStr) {\n let v = super::to_utf16_os(n);\n unsafe {\n if libc::SetEnvironmentVariableW(v.as_ptr(), ptr::null()) == 0 {\n panic!(\"failed to unset env: {}\", io::Error::last_os_error());\n }\n }\n}\n\npub struct Args {\n range: Range,\n cur: *mut *mut u16,\n}\n\nimpl Iterator for Args {\n type Item = OsString;\n fn next(&mut self) -> Option {\n self.range.next().map(|i| unsafe {\n let ptr = *self.cur.offset(i);\n let mut len = 0;\n while *ptr.offset(len) != 0 { len += 1; }\n\n \/\/ Push it onto the list.\n let ptr = ptr as *const u16;\n let buf = slice::from_raw_parts(ptr, len as usize);\n OsStringExt::from_wide(buf)\n })\n }\n fn size_hint(&self) -> (usize, Option) { self.range.size_hint() }\n}\n\nimpl ExactSizeIterator for Args {\n fn len(&self) -> usize { self.range.len() }\n}\n\nimpl Drop for Args {\n fn drop(&mut self) {\n \/\/ self.cur can be null if CommandLineToArgvW previously failed,\n \/\/ but LocalFree ignores NULL pointers\n unsafe { c::LocalFree(self.cur as *mut c_void); }\n }\n}\n\npub fn args() -> Args {\n unsafe {\n let mut nArgs: c_int = 0;\n let lpCmdLine = c::GetCommandLineW();\n let szArgList = c::CommandLineToArgvW(lpCmdLine, &mut nArgs);\n\n \/\/ szArcList can be NULL if CommandLinToArgvW failed,\n \/\/ but in that case nArgs is 0 so we won't actually\n \/\/ try to read a null pointer\n Args { cur: szArgList, range: 0..(nArgs as isize) }\n }\n}\n\npub fn temp_dir() -> PathBuf {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetTempPathW(sz, buf)\n }, super::os2path).unwrap()\n}\n\npub fn home_dir() -> Option {\n getenv(\"HOME\".as_ref()).or_else(|| {\n getenv(\"USERPROFILE\".as_ref())\n }).map(PathBuf::from).or_else(|| unsafe {\n let me = c::GetCurrentProcess();\n let mut token = ptr::null_mut();\n if c::OpenProcessToken(me, c::TOKEN_READ, &mut token) == 0 {\n return None\n }\n let _handle = Handle::new(token);\n super::fill_utf16_buf(|buf, mut sz| {\n match c::GetUserProfileDirectoryW(token, buf, &mut sz) {\n 0 if libc::GetLastError() != 0 => 0,\n 0 => sz,\n n => n as libc::DWORD,\n }\n }, super::os2path).ok()\n })\n}\n\npub fn exit(code: i32) -> ! {\n unsafe { c::ExitProcess(code as libc::c_uint) }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![crate_name = \"alloc_jemalloc\"]\n#![crate_type = \"rlib\"]\n#![no_std]\n#![allocator]\n#![unstable(feature = \"alloc_jemalloc\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![cfg_attr(not(stage0), deny(warnings))]\n#![feature(allocator)]\n#![feature(libc)]\n#![feature(staged_api)]\n\nextern crate libc;\n\nuse libc::{c_int, c_void, size_t};\n\n\/\/ Linkage directives to pull in jemalloc and its dependencies.\n\/\/\n\/\/ On some platforms we need to be sure to link in `pthread` which jemalloc\n\/\/ depends on, and specifically on android we need to also link to libgcc.\n\/\/ Currently jemalloc is compiled with gcc which will generate calls to\n\/\/ intrinsics that are libgcc specific (e.g. those intrinsics aren't present in\n\/\/ libcompiler-rt), so link that in to get that support.\n#[link(name = \"jemalloc\", kind = \"static\")]\n#[cfg_attr(target_os = \"android\", link(name = \"gcc\"))]\n#[cfg_attr(all(not(windows),\n not(target_os = \"android\"),\n not(target_env = \"musl\")),\n link(name = \"pthread\"))]\nextern \"C\" {\n fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void;\n fn je_rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;\n fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;\n fn je_sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);\n fn je_nallocx(size: size_t, flags: c_int) -> size_t;\n}\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc64\",\n target_arch = \"powerpc64le\")))]\nconst MIN_ALIGN: usize = 16;\n\n\/\/ MALLOCX_ALIGN(a) macro\nfn mallocx_align(a: usize) -> c_int {\n a.trailing_zeros() as c_int\n}\n\nfn align_to_flags(align: usize) -> c_int {\n if align <= MIN_ALIGN {\n 0\n } else {\n mallocx_align(align)\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {\n let flags = align_to_flags(align);\n unsafe { je_mallocx(size as size_t, flags) as *mut u8 }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate(ptr: *mut u8,\n _old_size: usize,\n size: usize,\n align: usize)\n -> *mut u8 {\n let flags = align_to_flags(align);\n unsafe { je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8 }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate_inplace(ptr: *mut u8,\n _old_size: usize,\n size: usize,\n align: usize)\n -> usize {\n let flags = align_to_flags(align);\n unsafe { je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) as usize }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {\n let flags = align_to_flags(align);\n unsafe { je_sdallocx(ptr as *mut c_void, old_size as size_t, flags) }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_usable_size(size: usize, align: usize) -> usize {\n let flags = align_to_flags(align);\n unsafe { je_nallocx(size as size_t, flags) as usize }\n}\n\n\/\/ These symbols are used by jemalloc on android but the really old android\n\/\/ we're building on doesn't have them defined, so just make sure the symbols\n\/\/ are available.\n#[no_mangle]\n#[cfg(target_os = \"android\")]\npub extern fn pthread_atfork(_prefork: *mut u8,\n _postfork_parent: *mut u8,\n _postfork_child: *mut u8) -> i32 {\n 0\n}\ntarget_arch is always powerpc64, remove powerpc64le check\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![crate_name = \"alloc_jemalloc\"]\n#![crate_type = \"rlib\"]\n#![no_std]\n#![allocator]\n#![unstable(feature = \"alloc_jemalloc\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![cfg_attr(not(stage0), deny(warnings))]\n#![feature(allocator)]\n#![feature(libc)]\n#![feature(staged_api)]\n\nextern crate libc;\n\nuse libc::{c_int, c_void, size_t};\n\n\/\/ Linkage directives to pull in jemalloc and its dependencies.\n\/\/\n\/\/ On some platforms we need to be sure to link in `pthread` which jemalloc\n\/\/ depends on, and specifically on android we need to also link to libgcc.\n\/\/ Currently jemalloc is compiled with gcc which will generate calls to\n\/\/ intrinsics that are libgcc specific (e.g. those intrinsics aren't present in\n\/\/ libcompiler-rt), so link that in to get that support.\n#[link(name = \"jemalloc\", kind = \"static\")]\n#[cfg_attr(target_os = \"android\", link(name = \"gcc\"))]\n#[cfg_attr(all(not(windows),\n not(target_os = \"android\"),\n not(target_env = \"musl\")),\n link(name = \"pthread\"))]\nextern \"C\" {\n fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void;\n fn je_rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;\n fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;\n fn je_sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);\n fn je_nallocx(size: size_t, flags: c_int) -> size_t;\n}\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc64\")))]\nconst MIN_ALIGN: usize = 16;\n\n\/\/ MALLOCX_ALIGN(a) macro\nfn mallocx_align(a: usize) -> c_int {\n a.trailing_zeros() as c_int\n}\n\nfn align_to_flags(align: usize) -> c_int {\n if align <= MIN_ALIGN {\n 0\n } else {\n mallocx_align(align)\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {\n let flags = align_to_flags(align);\n unsafe { je_mallocx(size as size_t, flags) as *mut u8 }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate(ptr: *mut u8,\n _old_size: usize,\n size: usize,\n align: usize)\n -> *mut u8 {\n let flags = align_to_flags(align);\n unsafe { je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8 }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate_inplace(ptr: *mut u8,\n _old_size: usize,\n size: usize,\n align: usize)\n -> usize {\n let flags = align_to_flags(align);\n unsafe { je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) as usize }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {\n let flags = align_to_flags(align);\n unsafe { je_sdallocx(ptr as *mut c_void, old_size as size_t, flags) }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_usable_size(size: usize, align: usize) -> usize {\n let flags = align_to_flags(align);\n unsafe { je_nallocx(size as size_t, flags) as usize }\n}\n\n\/\/ These symbols are used by jemalloc on android but the really old android\n\/\/ we're building on doesn't have them defined, so just make sure the symbols\n\/\/ are available.\n#[no_mangle]\n#[cfg(target_os = \"android\")]\npub extern fn pthread_atfork(_prefork: *mut u8,\n _postfork_parent: *mut u8,\n _postfork_child: *mut u8) -> i32 {\n 0\n}\n<|endoftext|>"} {"text":"add unicode_chars, checking unicode chars which looks same to but different from ascii codes.\/\/ Characters and their corresponding confusables were collected from\n\/\/ http:\/\/www.unicode.org\/Public\/security\/revision-06\/confusables.txt\n\nuse codemap::mk_span as make_span;\nuse error_handler::DiagnosticBuilder;\nuse lexer::StringReader;\n\nconst UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[\n (' ', \"No-Break Space\", ' '),\n (' ', \"Ogham Space Mark\", ' '),\n (' ', \"En Quad\", ' '),\n (' ', \"Em Quad\", ' '),\n (' ', \"En Space\", ' '),\n (' ', \"Em Space\", ' '),\n (' ', \"Three-Per-Em Space\", ' '),\n (' ', \"Four-Per-Em Space\", ' '),\n (' ', \"Six-Per-Em Space\", ' '),\n (' ', \"Figure Space\", ' '),\n (' ', \"Punctuation Space\", ' '),\n (' ', \"Thin Space\", ' '),\n (' ', \"Hair Space\", ' '),\n (' ', \"Narrow No-Break Space\", ' '),\n (' ', \"Medium Mathematical Space\", ' '),\n (' ', \"Ideographic Space\", ' '),\n ('ߺ', \"Nko Lajanyalan\", '_'),\n ('﹍', \"Dashed Low Line\", '_'),\n ('﹎', \"Centreline Low Line\", '_'),\n ('﹏', \"Wavy Low Line\", '_'),\n ('‐', \"Hyphen\", '-'),\n ('‑', \"Non-Breaking Hyphen\", '-'),\n ('‒', \"Figure Dash\", '-'),\n ('–', \"En Dash\", '-'),\n ('—', \"Em Dash\", '-'),\n ('﹘', \"Small Em Dash\", '-'),\n ('⁃', \"Hyphen Bullet\", '-'),\n ('˗', \"Modifier Letter Minus Sign\", '-'),\n ('−', \"Minus Sign\", '-'),\n ('ー', \"Katakana-Hiragana Prolonged Sound Mark\", '-'),\n ('٫', \"Arabic Decimal Separator\", ','),\n ('‚', \"Single Low-9 Quotation Mark\", ','),\n ('ꓹ', \"Lisu Letter Tone Na Po\", ','),\n (',', \"Fullwidth Comma\", ','),\n (';', \"Greek Question Mark\", ';'),\n (';', \"Fullwidth Semicolon\", ';'),\n ('ः', \"Devanagari Sign Visarga\", ':'),\n ('ઃ', \"Gujarati Sign Visarga\", ':'),\n (':', \"Fullwidth Colon\", ':'),\n ('։', \"Armenian Full Stop\", ':'),\n ('܃', \"Syriac Supralinear Colon\", ':'),\n ('܄', \"Syriac Sublinear Colon\", ':'),\n ('︰', \"Presentation Form For Vertical Two Dot Leader\", ':'),\n ('᠃', \"Mongolian Full Stop\", ':'),\n ('᠉', \"Mongolian Manchu Full Stop\", ':'),\n ('⁚', \"Two Dot Punctuation\", ':'),\n ('׃', \"Hebrew Punctuation Sof Pasuq\", ':'),\n ('˸', \"Modifier Letter Raised Colon\", ':'),\n ('꞉', \"Modifier Letter Colon\", ':'),\n ('∶', \"Ratio\", ':'),\n ('ː', \"Modifier Letter Triangular Colon\", ':'),\n ('ꓽ', \"Lisu Letter Tone Mya Jeu\", ':'),\n ('!', \"Fullwidth Exclamation Mark\", '!'),\n ('ǃ', \"Latin Letter Retroflex Click\", '!'),\n ('ʔ', \"Latin Letter Glottal Stop\", '?'),\n ('ॽ', \"Devanagari Letter Glottal Stop\", '?'),\n ('Ꭾ', \"Cherokee Letter He\", '?'),\n ('?', \"Fullwidth Question Mark\", '?'),\n ('𝅭', \"Musical Symbol Combining Augmentation Dot\", '.'),\n ('․', \"One Dot Leader\", '.'),\n ('۔', \"Arabic Full Stop\", '.'),\n ('܁', \"Syriac Supralinear Full Stop\", '.'),\n ('܂', \"Syriac Sublinear Full Stop\", '.'),\n ('꘎', \"Vai Full Stop\", '.'),\n ('𐩐', \"Kharoshthi Punctuation Dot\", '.'),\n ('·', \"Middle Dot\", '.'),\n ('٠', \"Arabic-Indic Digit Zero\", '.'),\n ('۰', \"Extended Arabic-Indic Digit Zero\", '.'),\n ('ꓸ', \"Lisu Letter Tone Mya Ti\", '.'),\n ('。', \"Ideographic Full Stop\", '.'),\n ('・', \"Katakana Middle Dot\", '.'),\n ('՝', \"Armenian Comma\", '\\''),\n (''', \"Fullwidth Apostrophe\", '\\''),\n ('‘', \"Left Single Quotation Mark\", '\\''),\n ('’', \"Right Single Quotation Mark\", '\\''),\n ('‛', \"Single High-Reversed-9 Quotation Mark\", '\\''),\n ('′', \"Prime\", '\\''),\n ('‵', \"Reversed Prime\", '\\''),\n ('՚', \"Armenian Apostrophe\", '\\''),\n ('׳', \"Hebrew Punctuation Geresh\", '\\''),\n ('`', \"Greek Varia\", '\\''),\n ('`', \"Fullwidth Grave Accent\", '\\''),\n ('΄', \"Greek Tonos\", '\\''),\n ('´', \"Greek Oxia\", '\\''),\n ('᾽', \"Greek Koronis\", '\\''),\n ('᾿', \"Greek Psili\", '\\''),\n ('῾', \"Greek Dasia\", '\\''),\n ('ʹ', \"Modifier Letter Prime\", '\\''),\n ('ʹ', \"Greek Numeral Sign\", '\\''),\n ('ˊ', \"Modifier Letter Acute Accent\", '\\''),\n ('ˋ', \"Modifier Letter Grave Accent\", '\\''),\n ('˴', \"Modifier Letter Middle Grave Accent\", '\\''),\n ('ʻ', \"Modifier Letter Turned Comma\", '\\''),\n ('ʽ', \"Modifier Letter Reversed Comma\", '\\''),\n ('ʼ', \"Modifier Letter Apostrophe\", '\\''),\n ('ʾ', \"Modifier Letter Right Half Ring\", '\\''),\n ('ꞌ', \"Latin Small Letter Saltillo\", '\\''),\n ('י', \"Hebrew Letter Yod\", '\\''),\n ('ߴ', \"Nko High Tone Apostrophe\", '\\''),\n ('ߵ', \"Nko Low Tone Apostrophe\", '\\''),\n ('"', \"Fullwidth Quotation Mark\", '\"'),\n ('“', \"Left Double Quotation Mark\", '\"'),\n ('”', \"Right Double Quotation Mark\", '\"'),\n ('‟', \"Double High-Reversed-9 Quotation Mark\", '\"'),\n ('″', \"Double Prime\", '\"'),\n ('‶', \"Reversed Double Prime\", '\"'),\n ('〃', \"Ditto Mark\", '\"'),\n ('״', \"Hebrew Punctuation Gershayim\", '\"'),\n ('˝', \"Double Acute Accent\", '\"'),\n ('ʺ', \"Modifier Letter Double Prime\", '\"'),\n ('˶', \"Modifier Letter Middle Double Acute Accent\", '\"'),\n ('˵', \"Modifier Letter Middle Double Grave Accent\", '\"'),\n ('ˮ', \"Modifier Letter Double Apostrophe\", '\"'),\n ('ײ', \"Hebrew Ligature Yiddish Double Yod\", '\"'),\n ('❞', \"Heavy Double Comma Quotation Mark Ornament\", '\"'),\n ('❝', \"Heavy Double Turned Comma Quotation Mark Ornament\", '\"'),\n ('❨', \"Medium Left Parenthesis Ornament\", '('),\n ('﴾', \"Ornate Left Parenthesis\", '('),\n ('(', \"Fullwidth Left Parenthesis\", '('),\n ('❩', \"Medium Right Parenthesis Ornament\", ')'),\n ('﴿', \"Ornate Right Parenthesis\", ')'),\n (')', \"Fullwidth Right Parenthesis\", ')'),\n ('[', \"Fullwidth Left Square Bracket\", '['),\n ('❲', \"Light Left Tortoise Shell Bracket Ornament\", '['),\n ('「', \"Left Corner Bracket\", '['),\n ('『', \"Left White Corner Bracket\", '['),\n ('【', \"Left Black Lenticular Bracket\", '['),\n ('〔', \"Left Tortoise Shell Bracket\", '['),\n ('〖', \"Left White Lenticular Bracket\", '['),\n ('〘', \"Left White Tortoise Shell Bracket\", '['),\n ('〚', \"Left White Square Bracket\", '['),\n (']', \"Fullwidth Right Square Bracket\", ']'),\n ('❳', \"Light Right Tortoise Shell Bracket Ornament\", ']'),\n ('」', \"Right Corner Bracket\", ']'),\n ('』', \"Right White Corner Bracket\", ']'),\n ('】', \"Right Black Lenticular Bracket\", ']'),\n ('〕', \"Right Tortoise Shell Bracket\", ']'),\n ('〗', \"Right White Lenticular Bracket\", ']'),\n ('〙', \"Right White Tortoise Shell Bracket\", ']'),\n ('〛', \"Right White Square Bracket\", ']'),\n ('❴', \"Medium Left Curly Bracket Ornament\", '{'),\n ('❵', \"Medium Right Curly Bracket Ornament\", '}'),\n ('⁎', \"Low Asterisk\", '*'),\n ('٭', \"Arabic Five Pointed Star\", '*'),\n ('∗', \"Asterisk Operator\", '*'),\n ('᜵', \"Philippine Single Punctuation\", '\/'),\n ('⁁', \"Caret Insertion Point\", '\/'),\n ('∕', \"Division Slash\", '\/'),\n ('⁄', \"Fraction Slash\", '\/'),\n ('╱', \"Box Drawings Light Diagonal Upper Right To Lower Left\", '\/'),\n ('⟋', \"Mathematical Rising Diagonal\", '\/'),\n ('⧸', \"Big Solidus\", '\/'),\n ('㇓', \"Cjk Stroke Sp\", '\/'),\n ('〳', \"Vertical Kana Repeat Mark Upper Half\", '\/'),\n ('丿', \"Cjk Unified Ideograph-4E3F\", '\/'),\n ('⼃', \"Kangxi Radical Slash\", '\/'),\n ('\', \"Fullwidth Reverse Solidus\", '\\\\'),\n ('﹨', \"Small Reverse Solidus\", '\\\\'),\n ('∖', \"Set Minus\", '\\\\'),\n ('⟍', \"Mathematical Falling Diagonal\", '\\\\'),\n ('⧵', \"Reverse Solidus Operator\", '\\\\'),\n ('⧹', \"Big Reverse Solidus\", '\\\\'),\n ('、', \"Ideographic Comma\", '\\\\'),\n ('ヽ', \"Katakana Iteration Mark\", '\\\\'),\n ('㇔', \"Cjk Stroke D\", '\\\\'),\n ('丶', \"Cjk Unified Ideograph-4E36\", '\\\\'),\n ('⼂', \"Kangxi Radical Dot\", '\\\\'),\n ('ꝸ', \"Latin Small Letter Um\", '&'),\n ('﬩', \"Hebrew Letter Alternative Plus Sign\", '+'),\n ('‹', \"Single Left-Pointing Angle Quotation Mark\", '<'),\n ('❮', \"Heavy Left-Pointing Angle Quotation Mark Ornament\", '<'),\n ('˂', \"Modifier Letter Left Arrowhead\", '<'),\n ('〈', \"Left Angle Bracket\", '<'),\n ('《', \"Left Double Angle Bracket\", '<'),\n ('꓿', \"Lisu Punctuation Full Stop\", '='),\n ('›', \"Single Right-Pointing Angle Quotation Mark\", '>'),\n ('❯', \"Heavy Right-Pointing Angle Quotation Mark Ornament\", '>'),\n ('˃', \"Modifier Letter Right Arrowhead\", '>'),\n ('〉', \"Right Angle Bracket\", '>'),\n ('》', \"Right Double Angle Bracket\", '>'),\n ('Ⲻ', \"Coptic Capital Letter Dialect-P Ni\", '-'),\n ('Ɂ', \"Latin Capital Letter Glottal Stop\", '?'),\n ('Ⳇ', \"Coptic Capital Letter Old Coptic Esh\", '\/'), ];\n\nconst ASCII_ARRAY: &'static [(char, &'static str)] = &[\n (' ', \"Space\"),\n ('_', \"Underscore\"),\n ('-', \"Minus\/Hyphen\"),\n (',', \"Comma\"),\n (';', \"Semicolon\"),\n (':', \"Colon\"),\n ('!', \"Exclamation Mark\"),\n ('?', \"Question Mark\"),\n ('.', \"Period\"),\n ('\\'', \"Single Quote\"),\n ('\"', \"Quotation Mark\"),\n ('(', \"Left Parenthesis\"),\n (')', \"Right Parenthesis\"),\n ('[', \"Left Square Bracket\"),\n (']', \"Right Square Bracket\"),\n ('{', \"Left Curly Brace\"),\n ('}', \"Right Curly Brace\"),\n ('*', \"Asterisk\"),\n ('\/', \"Slash\"),\n ('\\\\', \"Backslash\"),\n ('&', \"Ampersand\"),\n ('+', \"Plus Sign\"),\n ('<', \"Less-Than Sign\"),\n ('=', \"Equals Sign\"),\n ('>', \"Greater-Than Sign\"), ];\n\npub fn check_for_substitution<'a>(reader: &StringReader<'a>,\n ch: char,\n err: &mut DiagnosticBuilder) {\n UNICODE_ARRAY\n .iter()\n .find(|&& (c, _, _)| c == ch)\n .map(|&(_, u_name, ascii_char)| {\n let span = make_span(reader.last_pos, reader.pos);\n match ASCII_ARRAY.iter().find(|&& (c, _)| c == ascii_char) {\n Some(&(ascii_char, ascii_name)) => {\n let msg =\n format!(\"unicode character '{}' ({}) looks much like '{}' ({}), but it's not\",\n ch, u_name, ascii_char, ascii_name);\n err.span_help(span, &msg);\n },\n None => {\n reader\n .span_diagnostic\n .span_bug_no_panic(span,\n &format!(\"substitution character not found for '{}'\", ch));\n }\n }\n });\n}\n<|endoftext|>"} {"text":"Auto merge of #28067 - apasel422:issue-20803, r=alexcrichton\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ops::Add;\n\nfn foo(x: T) -> >::Output where i32: Add {\n 42i32 + x\n}\n\nfn main() {\n println!(\"{}\", foo(0i32));\n}\n<|endoftext|>"} {"text":"add test for tydesc name\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-fast: check-fast screws up repr paths\n\nuse std::unstable::intrinsics::get_tydesc;\n\nstruct Foo {\n x: T\n}\n\nfn main() {\n unsafe {\n assert_eq!((*get_tydesc::()).name, \"int\");\n assert_eq!((*get_tydesc::<~[int]>()).name, \"~[int]\");\n assert_eq!((*get_tydesc::>()).name, \"Foo\");\n }\n}\n<|endoftext|>"} {"text":"Missed util.rs in previous commituse std::error::Error;\nuse std::ffi::CString;\nuse std::fs::{File, remove_file, remove_dir_all};\nuse std::path::{Path, PathBuf};\nuse libc::chown;\n\npub fn own_path(path: &Path, uid: u32) -> Result<(), Box> {\n let path_str = match path.to_str() {\n Some(s) => s,\n None => return Err(From::from(\"empty CString from path\".to_string())),\n };\n unsafe {\n let path_ptr = try!(CString::new(path_str)).as_ptr();\n chown(path_ptr, uid, uid); \/\/ FIXME: check result\n }\n Ok(())\n\n \/\/ TODO: recurse for dirs? different method?\n}\n\npub struct Lock {\n path: PathBuf,\n pub uid: u32,\n}\n\nimpl Lock {\n pub fn new(path: &Path, uid: u32) -> Result> {\n try!(File::create(path));\n try!(own_path(path, uid));\n Ok(Lock {\n path: path.to_path_buf(),\n uid: uid,\n })\n }\n}\n\nimpl Drop for Lock {\n fn drop(&mut self) {\n remove_file(self.path.as_path());\n }\n}\n\npub struct OwnedDir {\n path: PathBuf,\n}\n\nimpl OwnedDir {\n pub fn new(path: &Path, uid: u32) -> Result> {\n try!(own_path(path, uid)); \/\/ XXX: own_dir?\n Ok(OwnedDir {\n path: path.to_path_buf(),\n })\n }\n}\n\nimpl Drop for OwnedDir {\n fn drop(&mut self) {\n remove_dir_all(self.path.as_path());\n }\n}\n<|endoftext|>"} {"text":"Add an example to encodeextern crate base58;\n\nuse base58::encode_base58;\n\nfn main() {\n let encoded = encode_base58(String::from_str(\"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\"));\n\tprintln!(\"encoded: {}\",encoded);\n}\n\n<|endoftext|>"} {"text":"\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Checks if the \"sysv64\" calling convention behaves the same as the\n\/\/ \"C\" calling convention on platforms where both should be the same\n\n\/\/ This file contains versions of the following run-pass tests with\n\/\/ the calling convention changed to \"sysv64\"\n\n\/\/ cabi-int-widening\n\/\/ extern-pass-char\n\/\/ extern-pass-u32\n\/\/ extern-pass-u64\n\/\/ extern-pass-double\n\/\/ extern-pass-empty\n\/\/ extern-pass-TwoU8s\n\/\/ extern-pass-TwoU16s\n\/\/ extern-pass-TwoU32s\n\/\/ extern-pass-TwoU64s\n\/\/ extern-return-TwoU8s\n\/\/ extern-return-TwoU16s\n\/\/ extern-return-TwoU32s\n\/\/ extern-return-TwoU64s\n\/\/ foreign-fn-with-byval\n\/\/ issue-28676\n\/\/ struct-return\n\n\/\/ ignore-android\n\/\/ ignore-arm\n\/\/ ignore-aarch64\n\/\/ ignore-msvc\n\n\/\/ note: msvc is ignored as rust_test_helpers does not have the sysv64 abi on msvc\n\n#![feature(abi_sysv64)]\n#[allow(dead_code)]\n#[allow(improper_ctypes)]\n\n#[cfg(target_arch = \"x86_64\")]\nmod tests {\n #[repr(C)]\n #[derive(Copy, Clone, PartialEq, Debug)]\n pub struct TwoU8s {\n one: u8, two: u8\n }\n\n #[repr(C)]\n #[derive(Copy, Clone, PartialEq, Debug)]\n pub struct TwoU16s {\n one: u16, two: u16\n }\n\n #[repr(C)]\n #[derive(Copy, Clone, PartialEq, Debug)]\n pub struct TwoU32s {\n one: u32, two: u32\n }\n\n #[repr(C)]\n #[derive(Copy, Clone, PartialEq, Debug)]\n pub struct TwoU64s {\n one: u64, two: u64\n }\n\n #[repr(C)]\n pub struct ManyInts {\n arg1: i8,\n arg2: i16,\n arg3: i32,\n arg4: i16,\n arg5: i8,\n arg6: TwoU8s,\n }\n\n #[repr(C)]\n pub struct Empty;\n\n #[repr(C)]\n #[derive(Copy, Clone)]\n pub struct S {\n x: u64,\n y: u64,\n z: u64,\n }\n\n #[repr(C)]\n #[derive(Copy, Clone)]\n pub struct Quad { a: u64, b: u64, c: u64, d: u64 }\n\n #[repr(C)]\n #[derive(Copy, Clone)]\n pub struct Floats { a: f64, b: u8, c: f64 }\n\n #[link(name = \"rust_test_helpers\")]\n extern \"sysv64\" {\n pub fn rust_int8_to_int32(_: i8) -> i32;\n pub fn rust_dbg_extern_identity_u8(v: u8) -> u8;\n pub fn rust_dbg_extern_identity_u32(v: u32) -> u32;\n pub fn rust_dbg_extern_identity_u64(v: u64) -> u64;\n pub fn rust_dbg_extern_identity_double(v: f64) -> f64;\n pub fn rust_dbg_extern_empty_struct(v1: ManyInts, e: Empty, v2: ManyInts);\n pub fn rust_dbg_extern_identity_TwoU8s(v: TwoU8s) -> TwoU8s;\n pub fn rust_dbg_extern_identity_TwoU16s(v: TwoU16s) -> TwoU16s;\n pub fn rust_dbg_extern_identity_TwoU32s(v: TwoU32s) -> TwoU32s;\n pub fn rust_dbg_extern_identity_TwoU64s(v: TwoU64s) -> TwoU64s;\n pub fn rust_dbg_extern_return_TwoU8s() -> TwoU8s;\n pub fn rust_dbg_extern_return_TwoU16s() -> TwoU16s;\n pub fn rust_dbg_extern_return_TwoU32s() -> TwoU32s;\n pub fn rust_dbg_extern_return_TwoU64s() -> TwoU64s;\n pub fn get_x(x: S) -> u64;\n pub fn get_y(x: S) -> u64;\n pub fn get_z(x: S) -> u64;\n pub fn get_c_many_params(_: *const (), _: *const (),\n _: *const (), _: *const (), f: Quad) -> u64;\n pub fn rust_dbg_abi_1(q: Quad) -> Quad;\n pub fn rust_dbg_abi_2(f: Floats) -> Floats;\n }\n\n pub fn cabi_int_widening() {\n let x = unsafe {\n rust_int8_to_int32(-1)\n };\n\n assert!(x == -1);\n }\n\n pub fn extern_pass_char() {\n unsafe {\n assert_eq!(22, rust_dbg_extern_identity_u8(22));\n }\n }\n\n pub fn extern_pass_u32() {\n unsafe {\n assert_eq!(22, rust_dbg_extern_identity_u32(22));\n }\n }\n\n pub fn extern_pass_u64() {\n unsafe {\n assert_eq!(22, rust_dbg_extern_identity_u64(22));\n }\n }\n\n pub fn extern_pass_double() {\n unsafe {\n assert_eq!(22.0_f64, rust_dbg_extern_identity_double(22.0_f64));\n }\n }\n\n pub fn extern_pass_empty() {\n unsafe {\n let x = ManyInts {\n arg1: 2,\n arg2: 3,\n arg3: 4,\n arg4: 5,\n arg5: 6,\n arg6: TwoU8s { one: 7, two: 8, }\n };\n let y = ManyInts {\n arg1: 1,\n arg2: 2,\n arg3: 3,\n arg4: 4,\n arg5: 5,\n arg6: TwoU8s { one: 6, two: 7, }\n };\n let empty = Empty;\n rust_dbg_extern_empty_struct(x, empty, y);\n }\n }\n\n pub fn extern_pass_twou8s() {\n unsafe {\n let x = TwoU8s {one: 22, two: 23};\n let y = rust_dbg_extern_identity_TwoU8s(x);\n assert_eq!(x, y);\n }\n }\n\n pub fn extern_pass_twou16s() {\n unsafe {\n let x = TwoU16s {one: 22, two: 23};\n let y = rust_dbg_extern_identity_TwoU16s(x);\n assert_eq!(x, y);\n }\n }\n\n pub fn extern_pass_twou32s() {\n unsafe {\n let x = TwoU32s {one: 22, two: 23};\n let y = rust_dbg_extern_identity_TwoU32s(x);\n assert_eq!(x, y);\n }\n }\n\n pub fn extern_pass_twou64s() {\n unsafe {\n let x = TwoU64s {one: 22, two: 23};\n let y = rust_dbg_extern_identity_TwoU64s(x);\n assert_eq!(x, y);\n }\n }\n\n pub fn extern_return_twou8s() {\n unsafe {\n let y = rust_dbg_extern_return_TwoU8s();\n assert_eq!(y.one, 10);\n assert_eq!(y.two, 20);\n }\n }\n\n pub fn extern_return_twou16s() {\n unsafe {\n let y = rust_dbg_extern_return_TwoU16s();\n assert_eq!(y.one, 10);\n assert_eq!(y.two, 20);\n }\n }\n\n pub fn extern_return_twou32s() {\n unsafe {\n let y = rust_dbg_extern_return_TwoU32s();\n assert_eq!(y.one, 10);\n assert_eq!(y.two, 20);\n }\n }\n\n pub fn extern_return_twou64s() {\n unsafe {\n let y = rust_dbg_extern_return_TwoU64s();\n assert_eq!(y.one, 10);\n assert_eq!(y.two, 20);\n }\n }\n\n #[inline(never)]\n fn indirect_call(func: unsafe extern \"sysv64\" fn(s: S) -> u64, s: S) -> u64 {\n unsafe {\n func(s)\n }\n }\n\n pub fn foreign_fn_with_byval() {\n let s = S { x: 1, y: 2, z: 3 };\n assert_eq!(s.x, indirect_call(get_x, s));\n assert_eq!(s.y, indirect_call(get_y, s));\n assert_eq!(s.z, indirect_call(get_z, s));\n }\n\n fn test() {\n use std::ptr;\n unsafe {\n let null = ptr::null();\n let q = Quad {\n a: 1,\n b: 2,\n c: 3,\n d: 4\n };\n assert_eq!(get_c_many_params(null, null, null, null, q), q.c);\n }\n }\n\n pub fn issue_28676() {\n test();\n }\n\n fn test1() {\n unsafe {\n let q = Quad { a: 0xaaaa_aaaa_aaaa_aaaa,\n b: 0xbbbb_bbbb_bbbb_bbbb,\n c: 0xcccc_cccc_cccc_cccc,\n d: 0xdddd_dddd_dddd_dddd };\n let qq = rust_dbg_abi_1(q);\n println!(\"a: {:x}\", qq.a as usize);\n println!(\"b: {:x}\", qq.b as usize);\n println!(\"c: {:x}\", qq.c as usize);\n println!(\"d: {:x}\", qq.d as usize);\n assert_eq!(qq.a, q.c + 1);\n assert_eq!(qq.b, q.d - 1);\n assert_eq!(qq.c, q.a + 1);\n assert_eq!(qq.d, q.b - 1);\n }\n }\n\n fn test2() {\n unsafe {\n let f = Floats { a: 1.234567890e-15_f64,\n b: 0b_1010_1010,\n c: 1.0987654321e-15_f64 };\n let ff = rust_dbg_abi_2(f);\n println!(\"a: {}\", ff.a as f64);\n println!(\"b: {}\", ff.b as usize);\n println!(\"c: {}\", ff.c as f64);\n assert_eq!(ff.a, f.c + 1.0f64);\n assert_eq!(ff.b, 0xff);\n assert_eq!(ff.c, f.a - 1.0f64);\n }\n }\n\n pub fn struct_return() {\n test1();\n test2();\n }\n}\n\n#[cfg(target_arch = \"x86_64\")]\nfn main() {\n use tests::*;\n cabi_int_widening();\n extern_pass_char();\n extern_pass_u32();\n extern_pass_u64();\n extern_pass_double();\n extern_pass_empty();\n extern_pass_twou8s();\n extern_pass_twou16s();\n extern_pass_twou32s();\n extern_pass_twou64s();\n extern_return_twou8s();\n extern_return_twou16s();\n extern_return_twou32s();\n extern_return_twou64s();\n foreign_fn_with_byval();\n issue_28676();\n struct_return();\n}\n\n#[cfg(not(target_arch = \"x86_64\"))]\nfn main() {\n\n}\nthe win64 calling convention is also used on x86_64-pc-windows-gnu, so ignore windows entirely instead of just msvc\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Checks if the \"sysv64\" calling convention behaves the same as the\n\/\/ \"C\" calling convention on platforms where both should be the same\n\n\/\/ This file contains versions of the following run-pass tests with\n\/\/ the calling convention changed to \"sysv64\"\n\n\/\/ cabi-int-widening\n\/\/ extern-pass-char\n\/\/ extern-pass-u32\n\/\/ extern-pass-u64\n\/\/ extern-pass-double\n\/\/ extern-pass-empty\n\/\/ extern-pass-TwoU8s\n\/\/ extern-pass-TwoU16s\n\/\/ extern-pass-TwoU32s\n\/\/ extern-pass-TwoU64s\n\/\/ extern-return-TwoU8s\n\/\/ extern-return-TwoU16s\n\/\/ extern-return-TwoU32s\n\/\/ extern-return-TwoU64s\n\/\/ foreign-fn-with-byval\n\/\/ issue-28676\n\/\/ struct-return\n\n\/\/ ignore-android\n\/\/ ignore-arm\n\/\/ ignore-aarch64\n\/\/ ignore-windows\n\n\/\/ note: windows is ignored as rust_test_helpers does not have the sysv64 abi on windows\n\n#![feature(abi_sysv64)]\n#[allow(dead_code)]\n#[allow(improper_ctypes)]\n\n#[cfg(target_arch = \"x86_64\")]\nmod tests {\n #[repr(C)]\n #[derive(Copy, Clone, PartialEq, Debug)]\n pub struct TwoU8s {\n one: u8, two: u8\n }\n\n #[repr(C)]\n #[derive(Copy, Clone, PartialEq, Debug)]\n pub struct TwoU16s {\n one: u16, two: u16\n }\n\n #[repr(C)]\n #[derive(Copy, Clone, PartialEq, Debug)]\n pub struct TwoU32s {\n one: u32, two: u32\n }\n\n #[repr(C)]\n #[derive(Copy, Clone, PartialEq, Debug)]\n pub struct TwoU64s {\n one: u64, two: u64\n }\n\n #[repr(C)]\n pub struct ManyInts {\n arg1: i8,\n arg2: i16,\n arg3: i32,\n arg4: i16,\n arg5: i8,\n arg6: TwoU8s,\n }\n\n #[repr(C)]\n pub struct Empty;\n\n #[repr(C)]\n #[derive(Copy, Clone)]\n pub struct S {\n x: u64,\n y: u64,\n z: u64,\n }\n\n #[repr(C)]\n #[derive(Copy, Clone)]\n pub struct Quad { a: u64, b: u64, c: u64, d: u64 }\n\n #[repr(C)]\n #[derive(Copy, Clone)]\n pub struct Floats { a: f64, b: u8, c: f64 }\n\n #[link(name = \"rust_test_helpers\")]\n extern \"sysv64\" {\n pub fn rust_int8_to_int32(_: i8) -> i32;\n pub fn rust_dbg_extern_identity_u8(v: u8) -> u8;\n pub fn rust_dbg_extern_identity_u32(v: u32) -> u32;\n pub fn rust_dbg_extern_identity_u64(v: u64) -> u64;\n pub fn rust_dbg_extern_identity_double(v: f64) -> f64;\n pub fn rust_dbg_extern_empty_struct(v1: ManyInts, e: Empty, v2: ManyInts);\n pub fn rust_dbg_extern_identity_TwoU8s(v: TwoU8s) -> TwoU8s;\n pub fn rust_dbg_extern_identity_TwoU16s(v: TwoU16s) -> TwoU16s;\n pub fn rust_dbg_extern_identity_TwoU32s(v: TwoU32s) -> TwoU32s;\n pub fn rust_dbg_extern_identity_TwoU64s(v: TwoU64s) -> TwoU64s;\n pub fn rust_dbg_extern_return_TwoU8s() -> TwoU8s;\n pub fn rust_dbg_extern_return_TwoU16s() -> TwoU16s;\n pub fn rust_dbg_extern_return_TwoU32s() -> TwoU32s;\n pub fn rust_dbg_extern_return_TwoU64s() -> TwoU64s;\n pub fn get_x(x: S) -> u64;\n pub fn get_y(x: S) -> u64;\n pub fn get_z(x: S) -> u64;\n pub fn get_c_many_params(_: *const (), _: *const (),\n _: *const (), _: *const (), f: Quad) -> u64;\n pub fn rust_dbg_abi_1(q: Quad) -> Quad;\n pub fn rust_dbg_abi_2(f: Floats) -> Floats;\n }\n\n pub fn cabi_int_widening() {\n let x = unsafe {\n rust_int8_to_int32(-1)\n };\n\n assert!(x == -1);\n }\n\n pub fn extern_pass_char() {\n unsafe {\n assert_eq!(22, rust_dbg_extern_identity_u8(22));\n }\n }\n\n pub fn extern_pass_u32() {\n unsafe {\n assert_eq!(22, rust_dbg_extern_identity_u32(22));\n }\n }\n\n pub fn extern_pass_u64() {\n unsafe {\n assert_eq!(22, rust_dbg_extern_identity_u64(22));\n }\n }\n\n pub fn extern_pass_double() {\n unsafe {\n assert_eq!(22.0_f64, rust_dbg_extern_identity_double(22.0_f64));\n }\n }\n\n pub fn extern_pass_empty() {\n unsafe {\n let x = ManyInts {\n arg1: 2,\n arg2: 3,\n arg3: 4,\n arg4: 5,\n arg5: 6,\n arg6: TwoU8s { one: 7, two: 8, }\n };\n let y = ManyInts {\n arg1: 1,\n arg2: 2,\n arg3: 3,\n arg4: 4,\n arg5: 5,\n arg6: TwoU8s { one: 6, two: 7, }\n };\n let empty = Empty;\n rust_dbg_extern_empty_struct(x, empty, y);\n }\n }\n\n pub fn extern_pass_twou8s() {\n unsafe {\n let x = TwoU8s {one: 22, two: 23};\n let y = rust_dbg_extern_identity_TwoU8s(x);\n assert_eq!(x, y);\n }\n }\n\n pub fn extern_pass_twou16s() {\n unsafe {\n let x = TwoU16s {one: 22, two: 23};\n let y = rust_dbg_extern_identity_TwoU16s(x);\n assert_eq!(x, y);\n }\n }\n\n pub fn extern_pass_twou32s() {\n unsafe {\n let x = TwoU32s {one: 22, two: 23};\n let y = rust_dbg_extern_identity_TwoU32s(x);\n assert_eq!(x, y);\n }\n }\n\n pub fn extern_pass_twou64s() {\n unsafe {\n let x = TwoU64s {one: 22, two: 23};\n let y = rust_dbg_extern_identity_TwoU64s(x);\n assert_eq!(x, y);\n }\n }\n\n pub fn extern_return_twou8s() {\n unsafe {\n let y = rust_dbg_extern_return_TwoU8s();\n assert_eq!(y.one, 10);\n assert_eq!(y.two, 20);\n }\n }\n\n pub fn extern_return_twou16s() {\n unsafe {\n let y = rust_dbg_extern_return_TwoU16s();\n assert_eq!(y.one, 10);\n assert_eq!(y.two, 20);\n }\n }\n\n pub fn extern_return_twou32s() {\n unsafe {\n let y = rust_dbg_extern_return_TwoU32s();\n assert_eq!(y.one, 10);\n assert_eq!(y.two, 20);\n }\n }\n\n pub fn extern_return_twou64s() {\n unsafe {\n let y = rust_dbg_extern_return_TwoU64s();\n assert_eq!(y.one, 10);\n assert_eq!(y.two, 20);\n }\n }\n\n #[inline(never)]\n fn indirect_call(func: unsafe extern \"sysv64\" fn(s: S) -> u64, s: S) -> u64 {\n unsafe {\n func(s)\n }\n }\n\n pub fn foreign_fn_with_byval() {\n let s = S { x: 1, y: 2, z: 3 };\n assert_eq!(s.x, indirect_call(get_x, s));\n assert_eq!(s.y, indirect_call(get_y, s));\n assert_eq!(s.z, indirect_call(get_z, s));\n }\n\n fn test() {\n use std::ptr;\n unsafe {\n let null = ptr::null();\n let q = Quad {\n a: 1,\n b: 2,\n c: 3,\n d: 4\n };\n assert_eq!(get_c_many_params(null, null, null, null, q), q.c);\n }\n }\n\n pub fn issue_28676() {\n test();\n }\n\n fn test1() {\n unsafe {\n let q = Quad { a: 0xaaaa_aaaa_aaaa_aaaa,\n b: 0xbbbb_bbbb_bbbb_bbbb,\n c: 0xcccc_cccc_cccc_cccc,\n d: 0xdddd_dddd_dddd_dddd };\n let qq = rust_dbg_abi_1(q);\n println!(\"a: {:x}\", qq.a as usize);\n println!(\"b: {:x}\", qq.b as usize);\n println!(\"c: {:x}\", qq.c as usize);\n println!(\"d: {:x}\", qq.d as usize);\n assert_eq!(qq.a, q.c + 1);\n assert_eq!(qq.b, q.d - 1);\n assert_eq!(qq.c, q.a + 1);\n assert_eq!(qq.d, q.b - 1);\n }\n }\n\n fn test2() {\n unsafe {\n let f = Floats { a: 1.234567890e-15_f64,\n b: 0b_1010_1010,\n c: 1.0987654321e-15_f64 };\n let ff = rust_dbg_abi_2(f);\n println!(\"a: {}\", ff.a as f64);\n println!(\"b: {}\", ff.b as usize);\n println!(\"c: {}\", ff.c as f64);\n assert_eq!(ff.a, f.c + 1.0f64);\n assert_eq!(ff.b, 0xff);\n assert_eq!(ff.c, f.a - 1.0f64);\n }\n }\n\n pub fn struct_return() {\n test1();\n test2();\n }\n}\n\n#[cfg(target_arch = \"x86_64\")]\nfn main() {\n use tests::*;\n cabi_int_widening();\n extern_pass_char();\n extern_pass_u32();\n extern_pass_u64();\n extern_pass_double();\n extern_pass_empty();\n extern_pass_twou8s();\n extern_pass_twou16s();\n extern_pass_twou32s();\n extern_pass_twou64s();\n extern_return_twou8s();\n extern_return_twou16s();\n extern_return_twou32s();\n extern_return_twou64s();\n foreign_fn_with_byval();\n issue_28676();\n struct_return();\n}\n\n#[cfg(not(target_arch = \"x86_64\"))]\nfn main() {\n\n}\n<|endoftext|>"} {"text":"Add cube example#[macro_use]\nextern crate glium;\nextern crate arcball;\nextern crate cgmath;\n\nuse glium::Surface;\nuse glium::index::PrimitiveType;\nuse glium::glutin::{self, ElementState, Event, VirtualKeyCode, MouseButton, MouseScrollDelta};\nuse cgmath::{Point3, Vector3, Vector2, Matrix4};\nuse arcball::ArcballCamera;\n\n#[derive(Copy, Clone)]\nstruct Vertex {\n pos: [f32; 3],\n color: [f32; 3],\n}\nimplement_vertex!(Vertex, pos, color);\n\nfn main() {\n use glium::DisplayBuild;\n\n let display = glutin::WindowBuilder::new()\n .with_title(\"Arcball Camera Cube Example\")\n .build_glium()\n .unwrap();\n\n \/\/ Hard-coded cube triangle strip\n let vertex_buffer = glium::VertexBuffer::new(&display,\n &[Vertex { pos: [1.0, 1.0, -1.0], color: [1.0, 0.0, 0.0] },\n Vertex { pos: [-1.0, 1.0, -1.0], color: [1.0, 0.0, 0.0] },\n Vertex { pos: [1.0, 1.0, 1.0], color: [1.0, 0.0, 0.0] },\n\n Vertex { pos: [-1.0, 1.0, 1.0], color: [0.0, 1.0, 0.0] },\n Vertex { pos: [-1.0, -1.0, 1.0], color: [0.0, 1.0, 0.0] },\n Vertex { pos: [-1.0, 1.0, -1.0], color: [0.0, 1.0, 0.0] },\n\n Vertex { pos: [-1.0, -1.0, -1.0], color: [0.0, 0.0, 1.0] },\n Vertex { pos: [1.0, 1.0, -1.0], color: [0.0, 0.0, 1.0] },\n Vertex { pos: [1.0, -1.0, -1.0], color: [0.0, 0.0, 1.0] },\n\n Vertex { pos: [1.0, 1.0, 1.0], color: [1.0, 1.0, 0.0] },\n Vertex { pos: [1.0, -1.0, 1.0], color: [1.0, 1.0, 0.0] },\n Vertex { pos: [-1.0, -1.0, 1.0], color: [1.0, 1.0, 0.0] },\n\n Vertex { pos: [1.0, -1.0, -1.0], color: [1.0, 0.0, 1.0] },\n Vertex { pos: [-1.0, -1.0, -1.0], color: [1.0, 0.0, 1.0] }\n ]\n ).unwrap();\n let index_buffer = glium::index::NoIndices(PrimitiveType::TriangleStrip);\n\n let program = program!(&display,\n 140 => {\n vertex: \"\n #version 140\n\n uniform mat4 proj_view;\n\n in vec3 pos;\n in vec3 color;\n\n out vec3 vcolor;\n\n void main(void) {\n gl_Position = proj_view * vec4(pos, 1.0);\n vcolor = color;\n }\n \",\n fragment: \"\n #version 140\n\n in vec3 vcolor;\n out vec4 color;\n\n void main(void) {\n color = vec4(vcolor, 1.0);\n }\n \"\n },\n ).unwrap();\n\n let display_dims = display.get_framebuffer_dimensions();\n let mut persp_proj = cgmath::perspective(cgmath::Deg(65.0), display_dims.0 as f32 \/ display_dims.1 as f32, 1.0, 200.0);\n let mut arcball_camera = {\n let look_at = Matrix4::::look_at(Point3::new(0.0, 0.0, 6.0),\n Point3::new(0.0, 0.0, 0.0),\n Vector3::new(0.0, 1.0, 0.0));\n ArcballCamera::new(&look_at, 0.05, 4.0, [display_dims.0 as f32, display_dims.1 as f32])\n };\n\n \/\/ Track if left\/right mouse is down\n let mut mouse_pressed = [false, false];\n let mut prev_mouse = None;\n 'outer: loop {\n for e in display.poll_events() {\n match e {\n glutin::Event::Closed => break 'outer,\n Event::KeyboardInput(state, _, code) => {\n let pressed = state == ElementState::Pressed;\n match code {\n Some(VirtualKeyCode::Escape) if pressed => break 'outer,\n _ => {}\n }\n },\n Event::MouseMoved(x, y) if prev_mouse.is_none() => {\n prev_mouse = Some((x, y));\n },\n Event::MouseMoved(x, y) => {\n let prev = prev_mouse.unwrap();\n if mouse_pressed[0] {\n arcball_camera.rotate(Vector2::new(prev.0 as f32, prev.1 as f32), Vector2::new(x as f32, y as f32));\n } else if mouse_pressed[1] {\n let mouse_delta = Vector2::new((x - prev.0) as f32, -(y - prev.1) as f32);\n arcball_camera.pan(mouse_delta, 0.16);\n }\n prev_mouse = Some((x, y));\n },\n Event::MouseInput(state, button) => {\n if button == MouseButton::Left {\n mouse_pressed[0] = state == ElementState::Pressed;\n } else if button == MouseButton::Right {\n mouse_pressed[1] = state == ElementState::Pressed;\n }\n },\n Event::MouseWheel(delta, _) => {\n let y = match delta {\n MouseScrollDelta::LineDelta(_, y) => y,\n MouseScrollDelta::PixelDelta(_, y) => y,\n };\n arcball_camera.zoom(y, 0.16);\n },\n Event::Resized(w, h) => {\n persp_proj = cgmath::perspective(cgmath::Deg(65.0), w as f32 \/ h as f32, 1.0, 1000.0);\n arcball_camera.update_screen(w as f32, h as f32);\n },\n _ => {}\n }\n }\n let proj_view: [[f32; 4]; 4] = (persp_proj * arcball_camera.get_mat4()).into();\n let uniforms = uniform! {\n proj_view: proj_view,\n };\n let draw_params = glium::DrawParameters {\n depth: glium::Depth {\n test: glium::draw_parameters::DepthTest::IfLess,\n write: true,\n .. Default::default()\n },\n .. Default::default()\n };\n\n let mut target = display.draw();\n target.clear_color(0.1, 0.1, 0.1, 0.0);\n target.clear_depth(1.0);\n target.draw(&vertex_buffer, &index_buffer, &program, &uniforms, &draw_params).unwrap();\n target.finish().unwrap();\n }\n}\n\n<|endoftext|>"} {"text":"use ide_db::{base_db::FileId, defs::Definition, search::FileReference};\nuse syntax::{\n algo::find_node_at_range,\n ast::{self, ArgListOwner},\n AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, T,\n};\n\nuse SyntaxKind::WHITESPACE;\n\nuse crate::{\n assist_context::AssistBuilder, utils::next_prev, AssistContext, AssistId, AssistKind, Assists,\n};\n\n\/\/ Assist: remove_unused_param\n\/\/\n\/\/ Removes unused function parameter.\n\/\/\n\/\/ ```\n\/\/ fn frobnicate(x: i32$0) {}\n\/\/\n\/\/ fn main() {\n\/\/ frobnicate(92);\n\/\/ }\n\/\/ ```\n\/\/ ->\n\/\/ ```\n\/\/ fn frobnicate() {}\n\/\/\n\/\/ fn main() {\n\/\/ frobnicate();\n\/\/ }\n\/\/ ```\npub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {\n let param: ast::Param = ctx.find_node_at_offset()?;\n let ident_pat = match param.pat()? {\n ast::Pat::IdentPat(it) => it,\n _ => return None,\n };\n let func = param.syntax().ancestors().find_map(ast::Fn::cast)?;\n\n \/\/ check if fn is in impl Trait for ..\n if func\n .syntax()\n .parent() \/\/ AssocItemList\n .and_then(|x| x.parent())\n .and_then(ast::Impl::cast)\n .map_or(false, |imp| imp.trait_().is_some())\n {\n cov_mark::hit!(trait_impl);\n return None;\n }\n\n let param_position = func.param_list()?.params().position(|it| it == param)?;\n let fn_def = {\n let func = ctx.sema.to_def(&func)?;\n Definition::ModuleDef(func.into())\n };\n\n let param_def = {\n let local = ctx.sema.to_def(&ident_pat)?;\n Definition::Local(local)\n };\n if param_def.usages(&ctx.sema).at_least_one() {\n cov_mark::hit!(keep_used);\n return None;\n }\n acc.add(\n AssistId(\"remove_unused_param\", AssistKind::Refactor),\n \"Remove unused parameter\",\n param.syntax().text_range(),\n |builder| {\n builder.delete(range_to_remove(param.syntax()));\n for (file_id, references) in fn_def.usages(&ctx.sema).all() {\n process_usages(ctx, builder, file_id, references, param_position);\n }\n },\n )\n}\n\nfn process_usages(\n ctx: &AssistContext,\n builder: &mut AssistBuilder,\n file_id: FileId,\n references: Vec,\n arg_to_remove: usize,\n) {\n let source_file = ctx.sema.parse(file_id);\n builder.edit_file(file_id);\n for usage in references {\n if let Some(text_range) = process_usage(&source_file, usage, arg_to_remove) {\n builder.delete(text_range);\n }\n }\n}\n\nfn process_usage(\n source_file: &SourceFile,\n FileReference { range, .. }: FileReference,\n arg_to_remove: usize,\n) -> Option {\n let call_expr: ast::CallExpr = find_node_at_range(source_file.syntax(), range)?;\n let call_expr_range = call_expr.expr()?.syntax().text_range();\n if !call_expr_range.contains_range(range) {\n return None;\n }\n let arg = call_expr.arg_list()?.args().nth(arg_to_remove)?;\n Some(range_to_remove(arg.syntax()))\n}\n\nfn range_to_remove(node: &SyntaxNode) -> TextRange {\n let up_to_comma = next_prev().find_map(|dir| {\n node.siblings_with_tokens(dir)\n .filter_map(|it| it.into_token())\n .find(|it| it.kind() == T![,])\n .map(|it| (dir, it))\n });\n if let Some((dir, token)) = up_to_comma {\n if node.next_sibling().is_some() {\n let up_to_space = token\n .siblings_with_tokens(dir)\n .skip(1)\n .take_while(|it| it.kind() == WHITESPACE)\n .last()\n .and_then(|it| it.into_token());\n return node\n .text_range()\n .cover(up_to_space.map_or(token.text_range(), |it| it.text_range()));\n }\n node.text_range().cover(token.text_range())\n } else {\n node.text_range()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use crate::tests::{check_assist, check_assist_not_applicable};\n\n use super::*;\n\n #[test]\n fn remove_unused() {\n check_assist(\n remove_unused_param,\n r#\"\nfn a() { foo(9, 2) }\nfn foo(x: i32, $0y: i32) { x; }\nfn b() { foo(9, 2,) }\n\"#,\n r#\"\nfn a() { foo(9) }\nfn foo(x: i32) { x; }\nfn b() { foo(9, ) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_first_param() {\n check_assist(\n remove_unused_param,\n r#\"\nfn foo($0x: i32, y: i32) { y; }\nfn a() { foo(1, 2) }\nfn b() { foo(1, 2,) }\n\"#,\n r#\"\nfn foo(y: i32) { y; }\nfn a() { foo(2) }\nfn b() { foo(2,) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_single_param() {\n check_assist(\n remove_unused_param,\n r#\"\nfn foo($0x: i32) { 0; }\nfn a() { foo(1) }\nfn b() { foo(1, ) }\n\"#,\n r#\"\nfn foo() { 0; }\nfn a() { foo() }\nfn b() { foo( ) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_surrounded_by_parms() {\n check_assist(\n remove_unused_param,\n r#\"\nfn foo(x: i32, $0y: i32, z: i32) { x; }\nfn a() { foo(1, 2, 3) }\nfn b() { foo(1, 2, 3,) }\n\"#,\n r#\"\nfn foo(x: i32, z: i32) { x; }\nfn a() { foo(1, 3) }\nfn b() { foo(1, 3,) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_qualified_call() {\n check_assist(\n remove_unused_param,\n r#\"\nmod bar { pub fn foo(x: i32, $0y: i32) { x; } }\nfn b() { bar::foo(9, 2) }\n\"#,\n r#\"\nmod bar { pub fn foo(x: i32) { x; } }\nfn b() { bar::foo(9) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_turbofished_func() {\n check_assist(\n remove_unused_param,\n r#\"\npub fn foo(x: T, $0y: i32) { x; }\nfn b() { foo::(9, 2) }\n\"#,\n r#\"\npub fn foo(x: T) { x; }\nfn b() { foo::(9) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_generic_unused_param_func() {\n check_assist(\n remove_unused_param,\n r#\"\npub fn foo(x: i32, $0y: T) { x; }\nfn b() { foo::(9, 2) }\nfn b2() { foo(9, 2) }\n\"#,\n r#\"\npub fn foo(x: i32) { x; }\nfn b() { foo::(9) }\nfn b2() { foo(9) }\n\"#,\n );\n }\n\n #[test]\n fn keep_used() {\n cov_mark::check!(keep_used);\n check_assist_not_applicable(\n remove_unused_param,\n r#\"\nfn foo(x: i32, $0y: i32) { y; }\nfn main() { foo(9, 2) }\n\"#,\n );\n }\n\n #[test]\n fn trait_impl() {\n cov_mark::check!(trait_impl);\n check_assist_not_applicable(\n remove_unused_param,\n r#\"\ntrait Trait {\n fn foo(x: i32);\n}\nimpl Trait for () {\n fn foo($0x: i32) {}\n}\n\"#,\n );\n }\n\n #[test]\n fn remove_across_files() {\n check_assist(\n remove_unused_param,\n r#\"\n\/\/- \/main.rs\nfn foo(x: i32, $0y: i32) { x; }\n\nmod foo;\n\n\/\/- \/foo.rs\nuse super::foo;\n\nfn bar() {\n let _ = foo(1, 2);\n}\n\"#,\n r#\"\n\/\/- \/main.rs\nfn foo(x: i32) { x; }\n\nmod foo;\n\n\/\/- \/foo.rs\nuse super::foo;\n\nfn bar() {\n let _ = foo(1);\n}\n\"#,\n )\n }\n\n #[test]\n fn remove_method_param() {\n \/\/ FIXME: This is completely wrong:\n \/\/ * method call expressions are not handled\n \/\/ * assoc function syntax removes the wrong argument.\n check_assist(\n remove_unused_param,\n r#\"\nstruct S;\nimpl S { fn f(&self, $0_unused: i32) {} }\nfn main() {\n S.f(92);\n S.f();\n S.f(92, 92);\n S::f(&S, 92);\n}\n\"#,\n r#\"\nstruct S;\nimpl S { fn f(&self) {} }\nfn main() {\n S.f(92);\n S.f();\n S.f(92, 92);\n S::f(92);\n}\n\"#,\n )\n }\n}\nadd: fix: Adding remove_unused_param for method and fixing same for associative funcuse ide_db::{base_db::FileId, defs::Definition, search::FileReference};\nuse syntax::{\n algo::find_node_at_range,\n ast::{self, ArgListOwner},\n AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, T,\n};\n\nuse SyntaxKind::WHITESPACE;\n\nuse crate::{\n assist_context::AssistBuilder, utils::next_prev, AssistContext, AssistId, AssistKind, Assists,\n};\n\n\/\/ Assist: remove_unused_param\n\/\/\n\/\/ Removes unused function parameter.\n\/\/\n\/\/ ```\n\/\/ fn frobnicate(x: i32$0) {}\n\/\/\n\/\/ fn main() {\n\/\/ frobnicate(92);\n\/\/ }\n\/\/ ```\n\/\/ ->\n\/\/ ```\n\/\/ fn frobnicate() {}\n\/\/\n\/\/ fn main() {\n\/\/ frobnicate();\n\/\/ }\n\/\/ ```\npub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {\n let param: ast::Param = ctx.find_node_at_offset()?;\n let ident_pat = match param.pat()? {\n ast::Pat::IdentPat(it) => it,\n _ => return None,\n };\n let func = param.syntax().ancestors().find_map(ast::Fn::cast)?;\n let is_self_present =\n param.syntax().parent()?.children().find_map(ast::SelfParam::cast).is_some();\n\n \/\/ check if fn is in impl Trait for ..\n if func\n .syntax()\n .parent() \/\/ AssocItemList\n .and_then(|x| x.parent())\n .and_then(ast::Impl::cast)\n .map_or(false, |imp| imp.trait_().is_some())\n {\n cov_mark::hit!(trait_impl);\n return None;\n }\n\n let mut param_position = func.param_list()?.params().position(|it| it == param)?;\n \/\/ param_list() does not take self param into consideration, hence this additional check is\n \/\/ added. There are two cases to handle in this scenario, where functions are\n \/\/ associative(functions not associative and not containting contain self, are not allowed), in\n \/\/ this case param position is rightly set. If a method call is present which has self param,\n \/\/ that needs to be handled and is added below in process_usage function to reduce this increment and\n \/\/ not consider self param.\n if is_self_present {\n param_position += 1;\n }\n let fn_def = {\n let func = ctx.sema.to_def(&func)?;\n Definition::ModuleDef(func.into())\n };\n\n let param_def = {\n let local = ctx.sema.to_def(&ident_pat)?;\n Definition::Local(local)\n };\n if param_def.usages(&ctx.sema).at_least_one() {\n cov_mark::hit!(keep_used);\n return None;\n }\n acc.add(\n AssistId(\"remove_unused_param\", AssistKind::Refactor),\n \"Remove unused parameter\",\n param.syntax().text_range(),\n |builder| {\n builder.delete(range_to_remove(param.syntax()));\n for (file_id, references) in fn_def.usages(&ctx.sema).all() {\n process_usages(ctx, builder, file_id, references, param_position, is_self_present);\n }\n },\n )\n}\n\nfn process_usages(\n ctx: &AssistContext,\n builder: &mut AssistBuilder,\n file_id: FileId,\n references: Vec,\n arg_to_remove: usize,\n is_self_present: bool,\n) {\n let source_file = ctx.sema.parse(file_id);\n builder.edit_file(file_id);\n for usage in references {\n if let Some(text_range) = process_usage(&source_file, usage, arg_to_remove, is_self_present)\n {\n builder.delete(text_range);\n }\n }\n}\n\nfn process_usage(\n source_file: &SourceFile,\n FileReference { range, .. }: FileReference,\n mut arg_to_remove: usize,\n is_self_present: bool,\n) -> Option {\n let call_expr_opt: Option = find_node_at_range(source_file.syntax(), range);\n if let Some(call_expr) = call_expr_opt {\n let call_expr_range = call_expr.expr()?.syntax().text_range();\n if !call_expr_range.contains_range(range) {\n return None;\n }\n\n let arg = call_expr.arg_list()?.args().nth(arg_to_remove)?;\n return Some(range_to_remove(arg.syntax()));\n }\n\n let method_call_expr_opt: Option =\n find_node_at_range(source_file.syntax(), range);\n if let Some(method_call_expr) = method_call_expr_opt {\n let method_call_expr_range = method_call_expr.name_ref()?.syntax().text_range();\n if !method_call_expr_range.contains_range(range) {\n return None;\n }\n\n if is_self_present {\n arg_to_remove -= 1;\n }\n\n let arg = method_call_expr.arg_list()?.args().nth(arg_to_remove)?;\n return Some(range_to_remove(arg.syntax()));\n }\n\n return None;\n}\n\nfn range_to_remove(node: &SyntaxNode) -> TextRange {\n let up_to_comma = next_prev().find_map(|dir| {\n node.siblings_with_tokens(dir)\n .filter_map(|it| it.into_token())\n .find(|it| it.kind() == T![,])\n .map(|it| (dir, it))\n });\n if let Some((dir, token)) = up_to_comma {\n if node.next_sibling().is_some() {\n let up_to_space = token\n .siblings_with_tokens(dir)\n .skip(1)\n .take_while(|it| it.kind() == WHITESPACE)\n .last()\n .and_then(|it| it.into_token());\n return node\n .text_range()\n .cover(up_to_space.map_or(token.text_range(), |it| it.text_range()));\n }\n node.text_range().cover(token.text_range())\n } else {\n node.text_range()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use crate::tests::{check_assist, check_assist_not_applicable};\n\n use super::*;\n\n #[test]\n fn remove_unused() {\n check_assist(\n remove_unused_param,\n r#\"\nfn a() { foo(9, 2) }\nfn foo(x: i32, $0y: i32) { x; }\nfn b() { foo(9, 2,) }\n\"#,\n r#\"\nfn a() { foo(9) }\nfn foo(x: i32) { x; }\nfn b() { foo(9, ) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_first_param() {\n check_assist(\n remove_unused_param,\n r#\"\nfn foo($0x: i32, y: i32) { y; }\nfn a() { foo(1, 2) }\nfn b() { foo(1, 2,) }\n\"#,\n r#\"\nfn foo(y: i32) { y; }\nfn a() { foo(2) }\nfn b() { foo(2,) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_single_param() {\n check_assist(\n remove_unused_param,\n r#\"\nfn foo($0x: i32) { 0; }\nfn a() { foo(1) }\nfn b() { foo(1, ) }\n\"#,\n r#\"\nfn foo() { 0; }\nfn a() { foo() }\nfn b() { foo( ) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_surrounded_by_parms() {\n check_assist(\n remove_unused_param,\n r#\"\nfn foo(x: i32, $0y: i32, z: i32) { x; }\nfn a() { foo(1, 2, 3) }\nfn b() { foo(1, 2, 3,) }\n\"#,\n r#\"\nfn foo(x: i32, z: i32) { x; }\nfn a() { foo(1, 3) }\nfn b() { foo(1, 3,) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_qualified_call() {\n check_assist(\n remove_unused_param,\n r#\"\nmod bar { pub fn foo(x: i32, $0y: i32) { x; } }\nfn b() { bar::foo(9, 2) }\n\"#,\n r#\"\nmod bar { pub fn foo(x: i32) { x; } }\nfn b() { bar::foo(9) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_turbofished_func() {\n check_assist(\n remove_unused_param,\n r#\"\npub fn foo(x: T, $0y: i32) { x; }\nfn b() { foo::(9, 2) }\n\"#,\n r#\"\npub fn foo(x: T) { x; }\nfn b() { foo::(9) }\n\"#,\n );\n }\n\n #[test]\n fn remove_unused_generic_unused_param_func() {\n check_assist(\n remove_unused_param,\n r#\"\npub fn foo(x: i32, $0y: T) { x; }\nfn b() { foo::(9, 2) }\nfn b2() { foo(9, 2) }\n\"#,\n r#\"\npub fn foo(x: i32) { x; }\nfn b() { foo::(9) }\nfn b2() { foo(9) }\n\"#,\n );\n }\n\n #[test]\n fn keep_used() {\n cov_mark::check!(keep_used);\n check_assist_not_applicable(\n remove_unused_param,\n r#\"\nfn foo(x: i32, $0y: i32) { y; }\nfn main() { foo(9, 2) }\n\"#,\n );\n }\n\n #[test]\n fn trait_impl() {\n cov_mark::check!(trait_impl);\n check_assist_not_applicable(\n remove_unused_param,\n r#\"\ntrait Trait {\n fn foo(x: i32);\n}\nimpl Trait for () {\n fn foo($0x: i32) {}\n}\n\"#,\n );\n }\n\n #[test]\n fn remove_across_files() {\n check_assist(\n remove_unused_param,\n r#\"\n\/\/- \/main.rs\nfn foo(x: i32, $0y: i32) { x; }\n\nmod foo;\n\n\/\/- \/foo.rs\nuse super::foo;\n\nfn bar() {\n let _ = foo(1, 2);\n}\n\"#,\n r#\"\n\/\/- \/main.rs\nfn foo(x: i32) { x; }\n\nmod foo;\n\n\/\/- \/foo.rs\nuse super::foo;\n\nfn bar() {\n let _ = foo(1);\n}\n\"#,\n )\n }\n\n #[test]\n fn test_remove_method_param() {\n check_assist(\n remove_unused_param,\n r#\"\nstruct S;\nimpl S { fn f(&self, $0_unused: i32) {} }\nfn main() {\n S.f(92);\n S.f();\n S.f(93, 92);\n S::f(&S, 92);\n}\n\"#,\n r#\"\nstruct S;\nimpl S { fn f(&self) {} }\nfn main() {\n S.f();\n S.f();\n S.f(92);\n S::f(&S);\n}\n\"#,\n )\n }\n}\n<|endoftext|>"} {"text":"Import and tidy up macro from Gist\/*!\nMacro to declare lazily evaluated statics, which allows runtime initialization.\n\nThe macro works by defining a custom type with a `Deref` impl, which delegates the dereferencing\nto a hidden `static mut` that gets lazily initialized on first access.\n\n# Examples\n\nTo load the extension and use it:\n\n```rust,ignore\n\n}\n```\n*\/\n\n#![crate_id = \"lazy_static\"]\n#![crate_type = \"dylib\"]\n#![license = \"MIT\"]\n\n#![feature(macro_rules)]\n\n#[macro_export]\nmacro_rules! lazy_static {\n ($(static ref $N:ident : $T:ty = $e:expr;)*) => {\n $(\n #[allow(non_camel_case_types)]\n #[allow(dead_code)]\n struct $N {__unit__: ()}\n static $N: $N = $N {__unit__: ()};\n impl Deref<$T> for $N {\n fn deref<'a>(&'a self) -> &'a $T {\n use std::sync::{Once, ONCE_INIT};\n use std::mem::transmute;\n\n #[inline(always)]\n fn require_share(_: &T) { }\n\n unsafe {\n static mut s: *$T = 0 as *$T;\n static mut ONCE: Once = ONCE_INIT;\n ONCE.doit(|| {\n s = transmute::, *$T>(box() ($e));\n });\n let static_ref = &*s;\n require_share(static_ref);\n static_ref\n }\n }\n }\n\n )*\n }\n}\n\n#[cfg(test)]\nmod test {\n use std::collections::HashMap;\n\n lazy_static! {\n static ref NUMBER: uint = times_two(3);\n static ref VEC: [Box, ..3] = [box 1, box 2, box 3];\n static ref OWNED_STRING: String = \"hello\".to_string();\n static ref HASHMAP: HashMap = {\n let mut m = HashMap::new();\n m.insert(0u, \"abc\");\n m.insert(1, \"def\");\n m.insert(2, \"ghi\");\n m\n };\n static ref UNUSED: () = ();\n }\n\n fn times_two(n: uint) -> uint {\n n * 2\n }\n\n #[test]\n fn test_basic() {\n assert_eq!(OWNED_STRING.as_slice(), \"hello\");\n assert_eq!(*NUMBER, 6);\n assert!(HASHMAP.find(&1).is_some());\n assert!(HASHMAP.find(&3).is_none());\n assert_eq!(VEC.as_slice(), &[box 1, box 2, box 3]);\n }\n\n #[test]\n fn test_repeat() {\n assert_eq!(*NUMBER, 6);\n assert_eq!(*NUMBER, 6);\n assert_eq!(*NUMBER, 6);\n }\n}\n<|endoftext|>"} {"text":"Codeforces 762Ause std::collections::LinkedList;\n\nmacro_rules! read_line {\n ($v:ident) => {\n let mut temp = String::new();\n std::io::stdin().read_line(&mut temp).unwrap();\n let $v = temp;\n };\n (var, $t:ty, $($v:ident), *) => {\n read_line!(input_line);\n let mut iter = parse_token!($t, input_line);\n $(\n let $v = iter.next().unwrap();\n )*\n };\n (vec, $t:ty, $v:ident) => {\n read_line!(input_line);\n let iter = parse_token!($t, input_line);\n let $v: Vec<$t> = iter.collect();\n };\n ($($v:ident; $t:ty), *) => {\n read_line!(input_line);\n let mut iter = input_line.split_whitespace();\n $(\n let $v: $t = iter.next().unwrap().parse().unwrap();\n )*\n };\n}\n\nmacro_rules! parse_token {\n ($t:ty, $e:expr) => {\n $e.split_whitespace().map(|x| x.parse::<$t>().unwrap());\n };\n}\n\nfn main() {\n read_line!(k;u64, n;u64);\n let mut divisors = LinkedList::new();\n let mut i = (k as f64).sqrt() as u64;\n if i * i == k {\n divisors.push_back(i);\n i -= 1;\n }\n while i >= 1 {\n if k % i == 0 {\n divisors.push_front(i);\n divisors.push_back(k \/ i);\n }\n i -= 1;\n }\n if n > divisors.len() as u64 {\n println!(\"-1\");\n } else {\n for (idx, divisor) in divisors.iter().enumerate() {\n if idx as u64 == n - 1 {\n println!(\"{}\", divisor);\n }\n }\n }\n}<|endoftext|>"} {"text":"Fix: Set isflag when creating a mail in the store<|endoftext|>"} {"text":"\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::nll::constraints::OutlivesConstraint;\nuse borrow_check::nll::type_check::{BorrowCheckContext, Locations};\nuse rustc::infer::nll_relate::{TypeRelating, TypeRelatingDelegate};\nuse rustc::infer::{InferCtxt, NLLRegionVariableOrigin};\nuse rustc::mir::{ConstraintCategory, UserTypeAnnotation};\nuse rustc::traits::query::Fallible;\nuse rustc::ty::relate::TypeRelation;\nuse rustc::ty::subst::UserSubsts;\nuse rustc::ty::{self, Ty};\nuse syntax_pos::DUMMY_SP;\n\n\/\/\/ Adds sufficient constraints to ensure that `a <: b`.\npub(super) fn sub_types<'tcx>(\n infcx: &InferCtxt<'_, '_, 'tcx>,\n a: Ty<'tcx>,\n b: Ty<'tcx>,\n locations: Locations,\n category: ConstraintCategory,\n borrowck_context: Option<&mut BorrowCheckContext<'_, 'tcx>>,\n) -> Fallible<()> {\n debug!(\"sub_types(a={:?}, b={:?}, locations={:?})\", a, b, locations);\n TypeRelating::new(\n infcx,\n NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),\n ty::Variance::Covariant,\n ).relate(&a, &b)?;\n Ok(())\n}\n\n\/\/\/ Adds sufficient constraints to ensure that `a == b`.\npub(super) fn eq_types<'tcx>(\n infcx: &InferCtxt<'_, '_, 'tcx>,\n a: Ty<'tcx>,\n b: Ty<'tcx>,\n locations: Locations,\n category: ConstraintCategory,\n borrowck_context: Option<&mut BorrowCheckContext<'_, 'tcx>>,\n) -> Fallible<()> {\n debug!(\"eq_types(a={:?}, b={:?}, locations={:?})\", a, b, locations);\n TypeRelating::new(\n infcx,\n NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),\n ty::Variance::Invariant,\n ).relate(&a, &b)?;\n Ok(())\n}\n\n\/\/\/ Adds sufficient constraints to ensure that `a <: b`, where `b` is\n\/\/\/ a user-given type (which means it may have canonical variables\n\/\/\/ encoding things like `_`).\npub(super) fn relate_type_and_user_type<'tcx>(\n infcx: &InferCtxt<'_, '_, 'tcx>,\n a: Ty<'tcx>,\n v: ty::Variance,\n user_ty: UserTypeAnnotation<'tcx>,\n locations: Locations,\n category: ConstraintCategory,\n borrowck_context: Option<&mut BorrowCheckContext<'_, 'tcx>>,\n) -> Fallible> {\n debug!(\n \"relate_type_and_user_type(a={:?}, v={:?}, b={:?}, locations={:?})\",\n a, v, user_ty, locations\n );\n\n let b = match user_ty {\n UserTypeAnnotation::Ty(canonical_ty) => {\n let (ty, _) =\n infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_ty);\n ty\n }\n UserTypeAnnotation::FnDef(def_id, canonical_substs) => {\n let (UserSubsts { substs, user_self_ty }, _) =\n infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_substs);\n assert!(user_self_ty.is_none()); \/\/ TODO for now\n infcx.tcx.mk_fn_def(def_id, substs)\n }\n UserTypeAnnotation::AdtDef(adt_def, canonical_substs) => {\n let (UserSubsts { substs, user_self_ty }, _) =\n infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_substs);\n assert!(user_self_ty.is_none()); \/\/ TODO for now\n infcx.tcx.mk_adt(adt_def, substs)\n }\n };\n\n \/\/ The `TypeRelating` code assumes that the \"canonical variables\"\n \/\/ appear in the \"a\" side, so flip `Contravariant` ambient\n \/\/ variance to get the right relationship.\n let v1 = ty::Contravariant.xform(v);\n\n let mut type_relating = TypeRelating::new(\n infcx,\n NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),\n v1,\n );\n type_relating.relate(&b, &a)?;\n\n Ok(b)\n}\n\nstruct NllTypeRelatingDelegate<'me, 'bccx: 'me, 'gcx: 'tcx, 'tcx: 'bccx> {\n infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,\n borrowck_context: Option<&'me mut BorrowCheckContext<'bccx, 'tcx>>,\n\n \/\/\/ Where (and why) is this relation taking place?\n locations: Locations,\n\n \/\/\/ What category do we assign the resulting `'a: 'b` relationships?\n category: ConstraintCategory,\n}\n\nimpl NllTypeRelatingDelegate<'me, 'bccx, 'gcx, 'tcx> {\n fn new(\n infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,\n borrowck_context: Option<&'me mut BorrowCheckContext<'bccx, 'tcx>>,\n locations: Locations,\n category: ConstraintCategory,\n ) -> Self {\n Self {\n infcx,\n borrowck_context,\n locations,\n category,\n }\n }\n}\n\nimpl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, '_, 'tcx> {\n fn create_next_universe(&mut self) -> ty::UniverseIndex {\n self.infcx.create_next_universe()\n }\n\n fn next_existential_region_var(&mut self) -> ty::Region<'tcx> {\n let origin = NLLRegionVariableOrigin::Existential;\n self.infcx.next_nll_region_var(origin)\n }\n\n fn next_placeholder_region(&mut self, placeholder: ty::Placeholder) -> ty::Region<'tcx> {\n let origin = NLLRegionVariableOrigin::Placeholder(placeholder);\n if let Some(borrowck_context) = &mut self.borrowck_context {\n borrowck_context.placeholder_indices.insert(placeholder);\n }\n self.infcx.next_nll_region_var(origin)\n }\n\n fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> {\n self.infcx\n .next_nll_region_var_in_universe(NLLRegionVariableOrigin::Existential, universe)\n }\n\n fn push_outlives(&mut self, sup: ty::Region<'tcx>, sub: ty::Region<'tcx>) {\n if let Some(borrowck_context) = &mut self.borrowck_context {\n let sub = borrowck_context.universal_regions.to_region_vid(sub);\n let sup = borrowck_context.universal_regions.to_region_vid(sup);\n borrowck_context\n .constraints\n .outlives_constraints\n .push(OutlivesConstraint {\n sup,\n sub,\n locations: self.locations,\n category: self.category,\n });\n }\n }\n}\npull the common code across user-ty variants up top\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::nll::constraints::OutlivesConstraint;\nuse borrow_check::nll::type_check::{BorrowCheckContext, Locations};\nuse rustc::infer::nll_relate::{TypeRelating, TypeRelatingDelegate};\nuse rustc::infer::{InferCtxt, NLLRegionVariableOrigin};\nuse rustc::mir::{ConstraintCategory, UserTypeAnnotation};\nuse rustc::traits::query::Fallible;\nuse rustc::ty::relate::TypeRelation;\nuse rustc::ty::subst::UserSubsts;\nuse rustc::ty::{self, Ty};\nuse syntax_pos::DUMMY_SP;\n\n\/\/\/ Adds sufficient constraints to ensure that `a <: b`.\npub(super) fn sub_types<'tcx>(\n infcx: &InferCtxt<'_, '_, 'tcx>,\n a: Ty<'tcx>,\n b: Ty<'tcx>,\n locations: Locations,\n category: ConstraintCategory,\n borrowck_context: Option<&mut BorrowCheckContext<'_, 'tcx>>,\n) -> Fallible<()> {\n debug!(\"sub_types(a={:?}, b={:?}, locations={:?})\", a, b, locations);\n TypeRelating::new(\n infcx,\n NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),\n ty::Variance::Covariant,\n ).relate(&a, &b)?;\n Ok(())\n}\n\n\/\/\/ Adds sufficient constraints to ensure that `a == b`.\npub(super) fn eq_types<'tcx>(\n infcx: &InferCtxt<'_, '_, 'tcx>,\n a: Ty<'tcx>,\n b: Ty<'tcx>,\n locations: Locations,\n category: ConstraintCategory,\n borrowck_context: Option<&mut BorrowCheckContext<'_, 'tcx>>,\n) -> Fallible<()> {\n debug!(\"eq_types(a={:?}, b={:?}, locations={:?})\", a, b, locations);\n TypeRelating::new(\n infcx,\n NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),\n ty::Variance::Invariant,\n ).relate(&a, &b)?;\n Ok(())\n}\n\n\/\/\/ Adds sufficient constraints to ensure that `a <: b`, where `b` is\n\/\/\/ a user-given type (which means it may have canonical variables\n\/\/\/ encoding things like `_`).\npub(super) fn relate_type_and_user_type<'tcx>(\n infcx: &InferCtxt<'_, '_, 'tcx>,\n a: Ty<'tcx>,\n v: ty::Variance,\n user_ty: UserTypeAnnotation<'tcx>,\n locations: Locations,\n category: ConstraintCategory,\n borrowck_context: Option<&mut BorrowCheckContext<'_, 'tcx>>,\n) -> Fallible> {\n debug!(\n \"relate_type_and_user_type(a={:?}, v={:?}, b={:?}, locations={:?})\",\n a, v, user_ty, locations\n );\n\n \/\/ The `TypeRelating` code assumes that the \"canonical variables\"\n \/\/ appear in the \"a\" side, so flip `Contravariant` ambient\n \/\/ variance to get the right relationship.\n let v1 = ty::Contravariant.xform(v);\n\n let mut type_relating = TypeRelating::new(\n infcx,\n NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),\n v1,\n );\n\n match user_ty {\n UserTypeAnnotation::Ty(canonical_ty) => {\n let (ty, _) =\n infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_ty);\n type_relating.relate(&ty, &a)?;\n Ok(ty)\n }\n UserTypeAnnotation::FnDef(def_id, canonical_substs) => {\n let (\n UserSubsts {\n substs,\n user_self_ty,\n },\n _,\n ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_substs);\n assert!(user_self_ty.is_none()); \/\/ TODO for now\n let ty = infcx.tcx.mk_fn_def(def_id, substs);\n type_relating.relate(&ty, &a)?;\n Ok(ty)\n }\n UserTypeAnnotation::AdtDef(adt_def, canonical_substs) => {\n let (\n UserSubsts {\n substs,\n user_self_ty,\n },\n _,\n ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_substs);\n assert!(user_self_ty.is_none()); \/\/ TODO for now\n let ty = infcx.tcx.mk_adt(adt_def, substs);\n type_relating.relate(&ty, &a)?;\n Ok(ty)\n }\n }\n}\n\nstruct NllTypeRelatingDelegate<'me, 'bccx: 'me, 'gcx: 'tcx, 'tcx: 'bccx> {\n infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,\n borrowck_context: Option<&'me mut BorrowCheckContext<'bccx, 'tcx>>,\n\n \/\/\/ Where (and why) is this relation taking place?\n locations: Locations,\n\n \/\/\/ What category do we assign the resulting `'a: 'b` relationships?\n category: ConstraintCategory,\n}\n\nimpl NllTypeRelatingDelegate<'me, 'bccx, 'gcx, 'tcx> {\n fn new(\n infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,\n borrowck_context: Option<&'me mut BorrowCheckContext<'bccx, 'tcx>>,\n locations: Locations,\n category: ConstraintCategory,\n ) -> Self {\n Self {\n infcx,\n borrowck_context,\n locations,\n category,\n }\n }\n}\n\nimpl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, '_, 'tcx> {\n fn create_next_universe(&mut self) -> ty::UniverseIndex {\n self.infcx.create_next_universe()\n }\n\n fn next_existential_region_var(&mut self) -> ty::Region<'tcx> {\n let origin = NLLRegionVariableOrigin::Existential;\n self.infcx.next_nll_region_var(origin)\n }\n\n fn next_placeholder_region(&mut self, placeholder: ty::Placeholder) -> ty::Region<'tcx> {\n let origin = NLLRegionVariableOrigin::Placeholder(placeholder);\n if let Some(borrowck_context) = &mut self.borrowck_context {\n borrowck_context.placeholder_indices.insert(placeholder);\n }\n self.infcx.next_nll_region_var(origin)\n }\n\n fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> {\n self.infcx\n .next_nll_region_var_in_universe(NLLRegionVariableOrigin::Existential, universe)\n }\n\n fn push_outlives(&mut self, sup: ty::Region<'tcx>, sub: ty::Region<'tcx>) {\n if let Some(borrowck_context) = &mut self.borrowck_context {\n let sub = borrowck_context.universal_regions.to_region_vid(sub);\n let sup = borrowck_context.universal_regions.to_region_vid(sup);\n borrowck_context\n .constraints\n .outlives_constraints\n .push(OutlivesConstraint {\n sup,\n sub,\n locations: self.locations,\n category: self.category,\n });\n }\n }\n}\n<|endoftext|>"} {"text":"fuchsia: fix regression in fmt::Debug for Events (#699)<|endoftext|>"} {"text":"[Auto] bin\/core\/bookmark: Fix Clippy warnings<|endoftext|>"} {"text":"Add test coverage for verifier_cache\/\/ Copyright 2018 The Grin Developers\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nextern crate chrono;\nextern crate grin_core;\nextern crate grin_keychain as keychain;\nextern crate grin_util as util;\nextern crate grin_wallet as wallet;\n\nuse std::sync::{Arc, RwLock};\n\npub mod common;\n\nuse grin_core::core::verifier_cache::{LruVerifierCache, VerifierCache};\nuse grin_core::core::{Output, OutputFeatures};\nuse keychain::{ExtKeychain, Keychain};\nuse wallet::libtx::proof;\n\nfn verifier_cache() -> Arc> {\n\tArc::new(RwLock::new(LruVerifierCache::new()))\n}\n\n#[test]\nfn test_verifier_cache_rangeproofs() {\n\tlet cache = verifier_cache();\n\n\tlet keychain = ExtKeychain::from_random_seed().unwrap();\n\tlet key_id = keychain.derive_key_id(1).unwrap();\n\tlet commit = keychain.commit(5, &key_id).unwrap();\n\tlet proof = proof::create(&keychain, 5, &key_id, commit, None).unwrap();\n\n\tlet out = Output {\n\t\tfeatures: OutputFeatures::DEFAULT_OUTPUT,\n\t\tcommit: commit,\n\t\tproof: proof,\n\t};\n\n\t\/\/ Check our output is not verified according to the cache.\n\t{\n\t\tlet mut cache = cache.write().unwrap();\n\t\tlet unverified = cache.filter_rangeproof_unverified(&vec![out]);\n\t\tassert_eq!(unverified, vec![out]);\n\t}\n\n\t\/\/ Add our output to the cache.\n\t{\n\t\tlet mut cache = cache.write().unwrap();\n\t\tcache.add_rangeproof_verified(vec![out]);\n\t}\n\n\t\/\/ Check it shows as verified according to the cache.\n\t{\n\t\tlet mut cache = cache.write().unwrap();\n\t\tlet unverified = cache.filter_rangeproof_unverified(&vec![out]);\n\t\tassert_eq!(unverified, vec![]);\n\t}\n}\n<|endoftext|>"} {"text":"Add benchmark\/\/ cargo bench\n\n#![feature(test)]\n\nextern crate ryu;\nextern crate test;\n\nmacro_rules! benches {\n ($($name:ident($value:expr),)*) => {\n mod bench_ryu {\n use test::{Bencher, black_box};\n $(\n #[bench]\n fn $name(b: &mut Bencher) {\n use ryu;\n\n let mut buf = ryu::Buffer::new();\n\n b.iter(move || {\n let value = black_box($value);\n let formatted = buf.format(value);\n black_box(formatted);\n });\n }\n )*\n }\n\n mod bench_std_fmt {\n use test::{Bencher, black_box};\n $(\n #[bench]\n fn $name(b: &mut Bencher) {\n use std::io::Write;\n\n let mut buf = Vec::with_capacity(20);\n\n b.iter(|| {\n buf.clear();\n let value = black_box($value);\n write!(&mut buf, \"{}\", value).unwrap();\n black_box(buf.as_slice());\n });\n }\n )*\n }\n }\n}\n\nbenches!(\n bench_0_f64(0f64),\n bench_short_f64(0.1234f64),\n bench_e_f64(2.718281828459045f64),\n bench_max_f64(::std::f64::MAX),\n\n bench_0_f32(0f32),\n bench_short_f32(0.1234f32),\n bench_e_f32(2.718281828459045f32),\n bench_max_f32(::std::f32::MAX),\n);\n<|endoftext|>"} {"text":"Began abstracting quadtree_diffusion\nmacro_rules! printlnerr(\n ($($arg:tt)*) => { {use std::io::Write;\n let r = writeln!(&mut ::std::io::stderr(), $($arg)*);\n r.expect(\"failed printing to stderr\");\n } }\n);\n\n\ntrait TreeContinuumPhysics {\n const DIMENSIONS: usize;\n type Steward: TimeSteward;\n type NodeVarying: QueryResult;\n type BoundaryVarying: QueryResult;\n}\n\n\nuse std::cmp::{min, max};\nuse std::collections::HashSet;\n\nuse time_steward::{DeterministicRandomId};\nuse time_steward::{PersistentTypeId, PersistentlyIdentifiedType, ListedType, DataHandleTrait, DataTimelineCellTrait, Basics as BasicsTrait};\nuse time_steward::stewards::{simple_full as steward_module};\nuse steward_module::{TimeSteward, ConstructibleTimeSteward, IncrementalTimeSteward, Event, DataHandle, DataTimelineCell, EventHandle, Accessor, EventAccessor, FutureCleanupAccessor, simple_timeline};\nuse simple_timeline::{SimpleTimeline, query, set, destroy, just_destroyed};\n\nconst DIMENSIONS: usize = 2;\n\nenum Face {\n WorldEdge,\n SingleBoundary (BoundaryHandle),\n SplitBoundary ([BoundaryHandle; 1<<(DIMENSIONS-1)]\n}\n\n#[derive (Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]\nstruct NodeData {\n width: Distance,\n center: [Distance ; Physics::DIMENSIONS],\n parent: Option >,\n varying: DataTimelineCell , Physics::Steward>>,\n}\n#[derive (Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]\nstruct NodeVarying {\n children: Vec>,\n boundaries: [[Face; 2]; DIMENSIONS],\n data: Physics::NodeVarying,\n}\ntype NodeHandle = DataHandle >;\nimpl PersistentlyIdentifiedType for NodeData {\n const ID: PersistentTypeId = PersistentTypeId(Physics::ID ^ 0x0d838bdd804f48d7);\n}\n\n#[derive (Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]\nstruct BoundaryData {\n length: Distance,\n center: [Distance ; 2],\n nodes: [NodeHandle; 2],\n varying: DataTimelineCell , Physics::Steward>>,\n}\n#[derive (Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]\nstruct BoundaryVarying {\n data: Physics::BoundaryVarying,\n}\ntype BoundaryHandle = DataHandle >;\nimpl PersistentlyIdentifiedType for BoundaryData {\n const ID: PersistentTypeId = PersistentTypeId(Physics::ID ^ 0x4913f629aef09374);\n}\n\nmacro_rules! get {\n ($accessor: expr, $cell: expr) => {\n query ($accessor, $cell)\n }\n}\nmacro_rules! set {\n ($accessor: expr, $cell: expr, $field: ident, $value: expr) => {\n {\n let mut value = query ($accessor, $cell);\n value.$field = $value;\n set ($accessor, $cell, value);\n }\n }\n}\nmacro_rules! set_with {\n ($accessor: expr, $cell: expr, | $varying: ident | $actions: expr) => {\n {\n let mut $varying = query ($accessor, $cell);\n $actions;\n set ($accessor, $cell, $varying);\n }\n }\n}\nmacro_rules! exists {\n ($accessor: expr, $cell: expr) => {\n $accessor.query ($cell, &GetVarying).is_some()\n }\n}\n\n\nfn audit > (accessor: &A) {\n audit_node (accessor, &accessor.globals().root);\n}\nfn audit_node > (accessor: &A, node: &NodeHandle) {\n let varying = get!(accessor, &node.varying);\n for dimension in 0..2 {\n for direction in 0..2 {\n for boundary in varying.boundaries [dimension] [direction].iter() {\n assert!(&boundary.nodes[(direction+1)&1] == node);\n audit_boundary (accessor, boundary);\n }\n }\n }\n for child in varying.children.iter() {\n audit_node (accessor, child);\n }\n}\nfn audit_boundary > (accessor: &A, boundary: &BoundaryHandle) {\n get!(accessor, &boundary.varying);\n for direction in 0..2 {\n assert!(get!(accessor, &boundary.nodes [direction].varying).boundaries.iter().any(|whatever| whatever[(direction+1)&1].iter().any(| other_boundary| other_boundary == boundary)));\n }\n}\n\nfn split > (accessor: &A, node: &NodeHandle) {\n let mut varying = get!(accessor, &node.varying);\n \/\/printlnerr!(\"{:?}\", (node.width, node.center, varying.children.len()));\n \/\/if !varying.children.is_empty() {\n \/\/printlnerr!(\"{:?}\", (node.width, node.center, varying.children.len()));\n \/\/}\n assert!(varying.children.is_empty());\n let mut velocities = [[[0;2];2];2];\n let mut old_boundaries = [[[None,None],[None,None]],[[None,None],[None,None]]];\n let mut middle_velocities = [[0;2];2];\n for dimension in 0..2 {\n for direction in 0..2 {\n \/\/let direction_signum = (direction*2)-1;\n let other_direction = (direction + 1) & 1;\n \n match varying.boundaries [dimension] [direction].len() {\n 1 => {\n loop {\n {\n let boundary = &varying.boundaries [dimension] [direction] [0];\n let neighbor = &boundary.nodes [direction];\n if neighbor.width <= node.width {\n break;\n }\n split (accessor, &neighbor);\n }\n varying = get!(accessor, &node.varying);\n }\n let boundary = &varying.boundaries [dimension] [direction] [0];\n assert!(&boundary.nodes[other_direction] == node);\n velocities [dimension] [direction] = [\n get!(accessor, &boundary.varying).transfer_velocity,\n get!(accessor, &boundary.varying).transfer_velocity\n ];\n old_boundaries [dimension] [direction] = [\n Some(boundary.clone()),\n Some(boundary.clone()),\n ];\n },\n 2 => {\n let boundaries = &varying.boundaries [dimension] [direction];\n assert!(&boundaries[0].nodes[other_direction] == node);\n assert!(&boundaries[1].nodes[other_direction] == node);\n velocities [dimension] [direction] = [\n get!(accessor, &boundaries [0].varying).transfer_velocity,\n get!(accessor, &boundaries [1].varying).transfer_velocity,\n ];\n old_boundaries [dimension] [direction] = [\n Some(boundaries [0].clone()),\n Some(boundaries [1].clone()),\n ];\n },\n 0 => (),\n _ => unreachable!(),\n };\n }\n middle_velocities [dimension] = [\n (velocities [dimension] [0] [0] + velocities [dimension] [1] [0]) \/ 2,\n (velocities [dimension] [0] [1] + velocities [dimension] [1] [1]) \/ 2,\n ];\n }\n \n \/\/ TODO: more accurate computation of ideal middle velocities and ink amounts,\n \/\/ and conserve ink correctly\n let mut new_children = [[[None,None],[None,None]],[[None,None],[None,None]]];\n for x in 0..2 {\n let x_signum = (x as Distance*2)-1;\n for y in 0..2 {\n let y_signum = (y as Distance*2)-1;\n let center = [\n node.center[0] + (node.width >> 2)*x_signum,\n node.center[1] + (node.width >> 2)*y_signum,\n ];\n let new_child = accessor.new_handle (NodeData {\n width: node.width >> 1,\n center: center,\n parent: Some (node.clone()),\n varying: DataTimelineCell::new (SimpleTimeline::new ()),\n });\n set (accessor, &new_child.varying, NodeVarying {\n last_change: *accessor.now(),\n ink_at_last_change: varying.ink_at_last_change >> 2,\n children: Vec::new(),\n boundaries: [[Vec::new(), Vec::new()], [Vec::new(), Vec::new()]],\n accumulation_rate: 0,\n slope: [0; 2],\n });\n varying.children.push (new_child.clone());\n new_children [0][x][y] = Some(new_child.clone());\n new_children [1][y][x] = Some(new_child.clone());\n }\n }\n for dimension in 0..2 {\n for direction in 0..2 {\n let other_direction = (direction + 1) & 1;\n for which in 0..2 {\n if let Some(boundary) = old_boundaries [dimension] [direction] [which].as_ref() {\n let neighbor = &boundary.nodes [direction];\n assert!(neighbor.width <= node.width);\n set_with!(accessor, &neighbor.varying, | neighbor_varying | {\n for b in neighbor_varying.boundaries [dimension] [other_direction].iter() {\n assert!(&b.nodes[other_direction] == node);\n }\n neighbor_varying.boundaries [dimension] [other_direction].clear();\n });\n }\n }\n }\n }\n let mut new_boundaries = Vec::new();\n for dimension in 0..2 {\n let other_dimension = (dimension + 1) & 1;\n for direction in 0..2 {\n let other_direction = (direction + 1) & 1;\n let direction_signum = (direction as Distance*2)-1;\n for which in 0..2 {\n let which_signum = (which as Distance*2)-1;\n if let Some(boundary) = old_boundaries [dimension] [direction] [which].as_ref() {\n let child = new_children [dimension] [direction] [which].as_ref().unwrap();\n let neighbor = &boundary.nodes [direction];\n let mut center = node.center;\n center [dimension] += (node.width >> 2)*direction_signum;\n center [other_dimension] += (node.width >> 4)*which_signum;\n let new_boundary = accessor.new_handle (BoundaryData {\n length: node.width >> 1,\n center: center,\n nodes: if direction == 0 { [neighbor.clone(), child.clone()] } else { [child.clone(), neighbor.clone()] },\n varying: DataTimelineCell::new (SimpleTimeline::new ()),\n });\n set (accessor, &new_boundary.varying, BoundaryVarying {\n transfer_velocity: velocities [dimension] [direction] [which],\n next_change: None,\n });\n set_with!(accessor, &child.varying, | child_varying | {\n child_varying.boundaries [dimension] [direction].push(new_boundary.clone());\n });\n set_with!(accessor, &neighbor.varying, | neighbor_varying | {\n neighbor_varying.boundaries [dimension] [other_direction].push(new_boundary.clone());\n });\n new_boundaries.push (new_boundary);\n }\n }\n }\n }\n for dimension in 0..2 {\n let other_dimension = (dimension + 1) & 1;\n for which in 0..2 {\n let which_signum = (which as Distance*2)-1;\n let child0 = new_children [dimension] [0] [which].as_ref().unwrap();\n let child1 = new_children [dimension] [1] [which].as_ref().unwrap();\n let mut center = node.center;\n center [other_dimension] += (node.width >> 4)*which_signum;\n let new_boundary = accessor.new_handle (BoundaryData {\n length: node.width >> 1,\n center: center,\n nodes: [child0.clone(), child1.clone()],\n varying: DataTimelineCell::new (SimpleTimeline::new ()),\n });\n set (accessor, &new_boundary.varying, BoundaryVarying {\n transfer_velocity: middle_velocities [dimension] [which],\n next_change: None,\n });\n set_with!(accessor, &child0.varying, | child0_varying | {\n child0_varying.boundaries [dimension] [1].push(new_boundary.clone());\n });\n set_with!(accessor, &child1.varying, | child1_varying | {\n child1_varying.boundaries [dimension] [0].push(new_boundary.clone());\n });\n new_boundaries.push (new_boundary);\n }\n }\n for dimension in 0..2 {\n for direction in 0..2 {\n for which in 0..2 {\n if let Some(boundary) = old_boundaries [dimension] [direction] [which].as_ref() {\n if !just_destroyed (accessor, & boundary.varying) {\n let boundary_varying = query (accessor, &boundary.varying);\n destroy (accessor, &boundary.varying);\n }\n }\n }\n }\n }\n \n varying.boundaries = [[Vec::new(), Vec::new()], [Vec::new(), Vec::new()]];\n \n set (accessor, &node.varying, varying);\n \n for boundary in new_boundaries {\n update_transfer_change_prediction (accessor, &boundary);\n }\n \n}\n\n\nfn merge > (accessor: &A, node: &NodeHandle) {\n let mut varying = get!(accessor, &node.varying);\n assert!(!varying.children.is_empty());\n \n varying.last_change =*accessor.now();\n varying.ink_at_last_change = 0;\n let mut prior_boundaries = Vec::new();\n let mut discovered_prior_boundaries = HashSet::new();\n let mut prior_children = HashSet::new();\n let mut neighbors = Vec::new();\n let mut discovered_neighbors = HashSet::new();\n for child in varying.children.iter() {\n update_node(accessor, child);\n let child_varying = get!(accessor, &child.varying);\n assert!(child_varying.children.is_empty());\n assert!(child_varying.last_change == *accessor.now());\n varying.ink_at_last_change += child_varying.ink_at_last_change;\n for (dimension, whatever) in child_varying.boundaries.iter().enumerate() {for (direction, something) in whatever.iter().enumerate() {for other_boundary in something.iter() {\n if discovered_prior_boundaries.insert (other_boundary.clone()) {\n prior_boundaries.push ((dimension, direction, other_boundary.clone()));\n }\n }}}\n prior_children.insert (child.clone()) ;\n\n destroy (accessor, &child.varying);\n }\n \n let mut new_boundaries = Vec::new();\n for (dimension, direction, boundary) in prior_boundaries {\n let boundary_varying = get!(accessor, &boundary.varying);\n let neighbor = if !prior_children.contains (&boundary.nodes [1]) {\n Some (boundary.nodes [1].clone())\n } else if !prior_children.contains (&boundary.nodes [0]) {\n Some (boundary.nodes [0].clone())\n } else {None};\n if let Some(neighbor) = neighbor {\n if discovered_neighbors.insert (neighbor.clone()) {\n neighbors.push (neighbor.clone());\n let other_direction = (direction+1)&1;\n set_with!(accessor, &neighbor.varying, | neighbor_varying | {\n for b in neighbor_varying.boundaries [dimension] [other_direction].iter() {\n assert!(prior_children.contains(&b.nodes[other_direction]));\n }\n neighbor_varying.boundaries [dimension] [other_direction].clear();\n });\n let mut center = boundary.center;\n if neighbor.width == node.width {\n center = [\n (node.center[0] + neighbor.center [0]) >> 1,\n (node.center[1] + neighbor.center [1]) >> 1,\n ];\n }\n let nodes = if direction == 1 {[node.clone(), neighbor.clone()]}else{[neighbor.clone(), node.clone()]};\n let new_boundary = accessor.new_handle (BoundaryData {\n length: neighbor.width,\n center: center,\n nodes: nodes.clone(),\n varying: DataTimelineCell::new (SimpleTimeline::new ()),\n });\n set (accessor, &new_boundary.varying, BoundaryVarying {\n transfer_velocity: boundary_varying.transfer_velocity,\n next_change: None,\n });\n set_with!(accessor, &neighbor.varying, | neighbor_varying | {\n neighbor_varying.boundaries [dimension] [other_direction].push(new_boundary.clone());\n });\n varying.boundaries [dimension] [direction].push(new_boundary.clone());\n new_boundaries.push (new_boundary);\n }\n }\n destroy (accessor, &boundary.varying);\n }\n \n varying.children.clear();\n set (accessor, &node.varying, varying);\n update_inferred_node_properties (accessor, node);\n \n for boundary in new_boundaries {\n update_transfer_change_prediction (accessor, &boundary);\n }\n\n if let Some(parent) = node.parent.as_ref() {\n \/\/audit(accessor);\n maybe_merge (accessor, parent);\n \/\/audit(accessor);\n }\n \n for neighbor in neighbors {\n maybe_merge (accessor, &neighbor);\n }\n}\n<|endoftext|>"} {"text":"docs: Add basic example (#216)fn main() {\n const CARGO_MANIFEST: &str = r#\"\n[package]\nname = \"cargo\"\nversion = \"0.57.0\"\nedition = \"2018\"\nauthors = [\"Yehuda Katz \",\n \"Carl Lerche \",\n \"Alex Crichton \"]\nlicense = \"MIT OR Apache-2.0\"\nhomepage = \"https:\/\/crates.io\"\nrepository = \"https:\/\/github.com\/rust-lang\/cargo\"\ndocumentation = \"https:\/\/docs.rs\/cargo\"\nreadme = \"README.md\"\ndescription = \"\"\"\nCargo, a package manager for Rust.\n\"\"\"\n\n[lib]\nname = \"cargo\"\npath = \"src\/cargo\/lib.rs\"\n\n[dependencies]\natty = \"0.2\"\nbytesize = \"1.0\"\ncargo-platform = { path = \"crates\/cargo-platform\", version = \"0.1.2\" }\ncargo-util = { path = \"crates\/cargo-util\", version = \"0.1.1\" }\ncrates-io = { path = \"crates\/crates-io\", version = \"0.33.0\" }\ncrossbeam-utils = \"0.8\"\ncurl = { version = \"0.4.38\", features = [\"http2\"] }\ncurl-sys = \"0.4.45\"\nenv_logger = \"0.9.0\"\npretty_env_logger = { version = \"0.4\", optional = true }\nanyhow = \"1.0\"\nfiletime = \"0.2.9\"\nflate2 = { version = \"1.0.3\", default-features = false, features = [\"zlib\"] }\ngit2 = \"0.13.16\"\ngit2-curl = \"0.14.1\"\nglob = \"0.3.0\"\nhex = \"0.4\"\nhome = \"0.5\"\nhumantime = \"2.0.0\"\nignore = \"0.4.7\"\nlazy_static = \"1.2.0\"\njobserver = \"0.1.24\"\nlazycell = \"1.2.0\"\nlibc = \"0.2\"\nlog = \"0.4.6\"\nlibgit2-sys = \"0.12.18\"\nmemchr = \"2.1.3\"\nnum_cpus = \"1.0\"\nopener = \"0.5\"\npercent-encoding = \"2.0\"\nrustfix = \"0.6.0\"\nsemver = { version = \"1.0.3\", features = [\"serde\"] }\nserde = { version = \"1.0.123\", features = [\"derive\"] }\nserde_ignored = \"0.1.0\"\nserde_json = { version = \"1.0.30\", features = [\"raw_value\"] }\nshell-escape = \"0.1.4\"\nstrip-ansi-escapes = \"0.1.0\"\ntar = { version = \"0.4.35\", default-features = false }\ntempfile = \"3.0\"\ntermcolor = \"1.1\"\ntoml = \"0.5.7\"\nunicode-xid = \"0.2.0\"\nurl = \"2.2.2\"\nwalkdir = \"2.2\"\nclap = \"2.31.2\"\nunicode-width = \"0.1.5\"\nopenssl = { version = '0.10.11', optional = true }\nim-rc = \"15.0.0\"\nitertools = \"0.10.0\"\n\n# A noop dependency that changes in the Rust repository, it's a bit of a hack.\n# See the `src\/tools\/rustc-workspace-hack\/README.md` file in `rust-lang\/rust`\n# for more information.\nrustc-workspace-hack = \"1.0.0\"\n\n[target.'cfg(windows)'.dependencies]\nfwdansi = \"1.1.0\"\n\n[target.'cfg(windows)'.dependencies.winapi]\nversion = \"0.3\"\nfeatures = [\n \"basetsd\",\n \"handleapi\",\n \"jobapi\",\n \"jobapi2\",\n \"memoryapi\",\n \"minwindef\",\n \"ntdef\",\n \"ntstatus\",\n \"processenv\",\n \"processthreadsapi\",\n \"psapi\",\n \"synchapi\",\n \"winerror\",\n \"winbase\",\n \"wincon\",\n \"winnt\",\n]\n\n[dev-dependencies]\ncargo-test-macro = { path = \"crates\/cargo-test-macro\" }\ncargo-test-support = { path = \"crates\/cargo-test-support\" }\n\n[build-dependencies]\nflate2 = { version = \"1.0.3\", default-features = false, features = [\"zlib\"] }\ntar = { version = \"0.4.26\", default-features = false }\n\n[[bin]]\nname = \"cargo\"\ntest = false\ndoc = false\n\n[features]\ndeny-warnings = []\nvendored-openssl = [\"openssl\/vendored\"]\npretty-env-logger = [\"pretty_env_logger\"]\n\"#;\n let manifest = CARGO_MANIFEST.parse::().unwrap();\n println!(\"{:#?}\", manifest);\n}\n<|endoftext|>"} {"text":"Add an example displaying the current number of CPUsextern crate num_cpus;\n\nfn main() {\n println!(\"Logical CPUs: {}\", num_cpus::get());\n println!(\"Physical CPUs: {}\", num_cpus::get_physical());\n}\n<|endoftext|>"} {"text":"Added more examples<|endoftext|>"} {"text":"impl addr messageuse std;\nextern crate time;\nuse super::Address;\nuse ::serialize::{self, Serializable};\n\n#[derive(Debug,Default)]\npub struct AddrMessage {\n pub addrs : Vec
,\n}\nimpl std::fmt::Display for AddrMessage {\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n write!(f, \"Addr(len={})\", self.addrs.len())\n }\n}\n\nimpl Serializable for AddrMessage {\n fn get_serialize_size(&self) -> usize {\n self.addrs.get_serialize_size()\n }\n fn serialize(&self, io:&mut std::io::Write) -> serialize::Result {\n self.addrs.serialize(io)\n }\n fn unserialize(&mut self, io:&mut std::io::Read) -> serialize::Result {\n self.addrs.unserialize(io)\n }\n}\n\n<|endoftext|>"} {"text":"Fixing daemon<|endoftext|>"} {"text":"add test for #14229\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntrait Foo: Sized {\n fn foo(self) {}\n}\n\ntrait Bar: Sized {\n fn bar(self) {}\n}\n\nstruct S;\n\nimpl<'l> Foo for &'l S {}\n\nimpl Bar for T {}\n\nfn main() {\n let s = S;\n s.foo();\n (&s).bar();\n s.bar();\n}\n<|endoftext|>"} {"text":"Add test for #24533\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::slice::Iter;\nuse std::io::{Error, ErrorKind, Result};\nuse std::vec::*;\n\nfn foo(it: &mut Iter) -> Result {\n Ok(*it.next().unwrap())\n}\n\nfn bar() -> Result {\n let data: Vec = Vec::new();\n\n if true {\n return Err(Error::new(ErrorKind::NotFound, \"msg\"));\n }\n\n let mut it = data.iter();\n foo(&mut it)\n}\n\nfn main() {\n bar();\n}\n<|endoftext|>"} {"text":"Rollup merge of #27140 - dotdash:test-26468, r=luqmana\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)]\n\nenum FooMode {\n Check = 0x1001,\n}\n\nenum BarMode {\n Check = 0x2001,\n}\n\nenum Mode {\n Foo(FooMode),\n Bar(BarMode),\n}\n\n#[inline(never)]\nfn broken(mode: &Mode) -> u32 {\n for _ in 0..1 {\n if let Mode::Foo(FooMode::Check) = *mode { return 17 }\n if let Mode::Bar(BarMode::Check) = *mode { return 19 }\n }\n return 42;\n}\n\nfn main() {\n let mode = Mode::Bar(BarMode::Check);\n assert_eq!(broken(&mode), 19);\n}\n<|endoftext|>"} {"text":"Added version to handler\/mod.rs<|endoftext|>"} {"text":"Unhide Archive::from_bytes from docs<|endoftext|>"} {"text":"Fixed mistake in previous album<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Command-line interface of the rustbuild build system.\n\/\/!\n\/\/! This module implements the command-line parsing of the build system which\n\/\/! has various flags to configure how it's run.\n\nuse std::env;\nuse std::fs;\nuse std::path::PathBuf;\nuse std::process;\n\nuse getopts::Options;\n\nuse Build;\nuse config::Config;\nuse metadata;\nuse builder::Builder;\n\nuse cache::{Interned, INTERNER};\n\n\/\/\/ Deserialized version of all flags for this compile.\npub struct Flags {\n pub verbose: usize, \/\/ verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose\n pub on_fail: Option,\n pub stage: Option,\n pub keep_stage: Option,\n pub build: Option>,\n\n pub host: Vec>,\n pub target: Vec>,\n pub config: Option,\n pub src: PathBuf,\n pub jobs: Option,\n pub cmd: Subcommand,\n pub incremental: bool,\n}\n\npub enum Subcommand {\n Build {\n paths: Vec,\n },\n Doc {\n paths: Vec,\n },\n Test {\n paths: Vec,\n test_args: Vec,\n fail_fast: bool,\n },\n Bench {\n paths: Vec,\n test_args: Vec,\n },\n Clean {\n all: bool,\n },\n Dist {\n paths: Vec,\n },\n Install {\n paths: Vec,\n },\n}\n\nimpl Default for Subcommand {\n fn default() -> Subcommand {\n Subcommand::Build {\n paths: vec![PathBuf::from(\"nowhere\")],\n }\n }\n}\n\nimpl Flags {\n pub fn parse(args: &[String]) -> Flags {\n let mut extra_help = String::new();\n let mut subcommand_help = format!(\"\\\nUsage: x.py [options] [...]\n\nSubcommands:\n build Compile either the compiler or libraries\n test Build and run some test suites\n bench Build and run some benchmarks\n doc Build documentation\n clean Clean out build directories\n dist Build distribution artifacts\n install Install distribution artifacts\n\nTo learn more about a subcommand, run `.\/x.py -h`\");\n\n let mut opts = Options::new();\n \/\/ Options common to all subcommands\n opts.optflagmulti(\"v\", \"verbose\", \"use verbose output (-vv for very verbose)\");\n opts.optflag(\"i\", \"incremental\", \"use incremental compilation\");\n opts.optopt(\"\", \"config\", \"TOML configuration file for build\", \"FILE\");\n opts.optopt(\"\", \"build\", \"build target of the stage0 compiler\", \"BUILD\");\n opts.optmulti(\"\", \"host\", \"host targets to build\", \"HOST\");\n opts.optmulti(\"\", \"target\", \"target targets to build\", \"TARGET\");\n opts.optopt(\"\", \"on-fail\", \"command to run on failure\", \"CMD\");\n opts.optopt(\"\", \"stage\", \"stage to build\", \"N\");\n opts.optopt(\"\", \"keep-stage\", \"stage to keep without recompiling\", \"N\");\n opts.optopt(\"\", \"src\", \"path to the root of the rust checkout\", \"DIR\");\n opts.optopt(\"j\", \"jobs\", \"number of jobs to run in parallel\", \"JOBS\");\n opts.optflag(\"h\", \"help\", \"print this help message\");\n\n \/\/ fn usage()\n let usage = |exit_code: i32, opts: &Options, subcommand_help: &str, extra_help: &str| -> ! {\n println!(\"{}\", opts.usage(subcommand_help));\n if !extra_help.is_empty() {\n println!(\"{}\", extra_help);\n }\n process::exit(exit_code);\n };\n\n \/\/ We can't use getopt to parse the options until we have completed specifying which\n \/\/ options are valid, but under the current implementation, some options are conditional on\n \/\/ the subcommand. Therefore we must manually identify the subcommand first, so that we can\n \/\/ complete the definition of the options. Then we can use the getopt::Matches object from\n \/\/ there on out.\n let subcommand = args.iter().find(|&s|\n (s == \"build\")\n || (s == \"test\")\n || (s == \"bench\")\n || (s == \"doc\")\n || (s == \"clean\")\n || (s == \"dist\")\n || (s == \"install\"));\n let subcommand = match subcommand {\n Some(s) => s,\n None => {\n \/\/ No subcommand -- show the general usage and subcommand help\n println!(\"{}\\n\", subcommand_help);\n process::exit(1);\n }\n };\n\n \/\/ Some subcommands get extra options\n match subcommand.as_str() {\n \"test\" => {\n opts.optflag(\"\", \"no-fail-fast\", \"Run all tests regardless of failure\");\n opts.optmulti(\"\", \"test-args\", \"extra arguments\", \"ARGS\");\n },\n \"bench\" => { opts.optmulti(\"\", \"test-args\", \"extra arguments\", \"ARGS\"); },\n \"clean\" => { opts.optflag(\"\", \"all\", \"clean all build artifacts\"); },\n _ => { },\n };\n\n \/\/ Done specifying what options are possible, so do the getopts parsing\n let matches = opts.parse(&args[..]).unwrap_or_else(|e| {\n \/\/ Invalid argument\/option format\n println!(\"\\n{}\\n\", e);\n usage(1, &opts, &subcommand_help, &extra_help);\n });\n \/\/ Extra sanity check to make sure we didn't hit this crazy corner case:\n \/\/\n \/\/ .\/x.py --frobulate clean build\n \/\/ ^-- option ^ ^- actual subcommand\n \/\/ \\_ arg to option could be mistaken as subcommand\n let mut pass_sanity_check = true;\n match matches.free.get(0) {\n Some(check_subcommand) => {\n if check_subcommand != subcommand {\n pass_sanity_check = false;\n }\n },\n None => {\n pass_sanity_check = false;\n }\n }\n if !pass_sanity_check {\n println!(\"{}\\n\", subcommand_help);\n println!(\"Sorry, I couldn't figure out which subcommand you were trying to specify.\\n\\\n You may need to move some options to after the subcommand.\\n\");\n process::exit(1);\n }\n \/\/ Extra help text for some commands\n match subcommand.as_str() {\n \"build\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories to the crates\n and\/or artifacts to compile. For example:\n\n .\/x.py build src\/libcore\n .\/x.py build src\/libcore src\/libproc_macro\n .\/x.py build src\/libstd --stage 1\n\n If no arguments are passed then the complete artifacts for that stage are\n also compiled.\n\n .\/x.py build\n .\/x.py build --stage 1\n\n For a quick build of a usable compiler, you can pass:\n\n .\/x.py build --stage 1 src\/libtest\n\n This will first build everything once (like --stage 0 without further\n arguments would), and then use the compiler built in stage 0 to build\n src\/libtest and its dependencies.\n Once this is done, build\/$ARCH\/stage1 contains a usable compiler.\");\n }\n \"test\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories to tests that\n should be compiled and run. For example:\n\n .\/x.py test src\/test\/run-pass\n .\/x.py test src\/libstd --test-args hash_map\n .\/x.py test src\/libstd --stage 0\n\n If no arguments are passed then the complete artifacts for that stage are\n compiled and tested.\n\n .\/x.py test\n .\/x.py test --stage 1\");\n }\n \"doc\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories of documentation\n to build. For example:\n\n .\/x.py doc src\/doc\/book\n .\/x.py doc src\/doc\/nomicon\n .\/x.py doc src\/doc\/book src\/libstd\n\n If no arguments are passed then everything is documented:\n\n .\/x.py doc\n .\/x.py doc --stage 1\");\n }\n _ => { }\n };\n \/\/ Get any optional paths which occur after the subcommand\n let cwd = t!(env::current_dir());\n let paths = matches.free[1..].iter().map(|p| cwd.join(p)).collect::>();\n\n let cfg_file = matches.opt_str(\"config\").map(PathBuf::from).or_else(|| {\n if fs::metadata(\"config.toml\").is_ok() {\n Some(PathBuf::from(\"config.toml\"))\n } else {\n None\n }\n });\n\n \/\/ All subcommands can have an optional \"Available paths\" section\n if matches.opt_present(\"verbose\") {\n let config = Config::parse(&[\"build\".to_string()]);\n let mut build = Build::new(config);\n metadata::build(&mut build);\n\n let maybe_rules_help = Builder::get_help(&build, subcommand.as_str());\n extra_help.push_str(maybe_rules_help.unwrap_or_default().as_str());\n } else {\n extra_help.push_str(format!(\"Run `.\/x.py {} -h -v` to see a list of available paths.\",\n subcommand).as_str());\n }\n\n \/\/ User passed in -h\/--help?\n if matches.opt_present(\"help\") {\n usage(0, &opts, &subcommand_help, &extra_help);\n }\n\n let cmd = match subcommand.as_str() {\n \"build\" => {\n Subcommand::Build { paths: paths }\n }\n \"test\" => {\n Subcommand::Test {\n paths,\n test_args: matches.opt_strs(\"test-args\"),\n fail_fast: !matches.opt_present(\"no-fail-fast\"),\n }\n }\n \"bench\" => {\n Subcommand::Bench {\n paths,\n test_args: matches.opt_strs(\"test-args\"),\n }\n }\n \"doc\" => {\n Subcommand::Doc { paths: paths }\n }\n \"clean\" => {\n if paths.len() > 0 {\n println!(\"\\nclean takes no arguments\\n\");\n usage(1, &opts, &subcommand_help, &extra_help);\n }\n\n Subcommand::Clean {\n all: matches.opt_present(\"all\"),\n }\n }\n \"dist\" => {\n Subcommand::Dist {\n paths,\n }\n }\n \"install\" => {\n Subcommand::Install {\n paths,\n }\n }\n _ => {\n usage(1, &opts, &subcommand_help, &extra_help);\n }\n };\n\n\n let mut stage = matches.opt_str(\"stage\").map(|j| j.parse().unwrap());\n\n if matches.opt_present(\"incremental\") && stage.is_none() {\n stage = Some(1);\n }\n\n let cwd = t!(env::current_dir());\n let src = matches.opt_str(\"src\").map(PathBuf::from)\n .or_else(|| env::var_os(\"SRC\").map(PathBuf::from))\n .unwrap_or(cwd);\n\n Flags {\n verbose: matches.opt_count(\"verbose\"),\n stage,\n on_fail: matches.opt_str(\"on-fail\"),\n keep_stage: matches.opt_str(\"keep-stage\").map(|j| j.parse().unwrap()),\n build: matches.opt_str(\"build\").map(|s| INTERNER.intern_string(s)),\n host: split(matches.opt_strs(\"host\"))\n .into_iter().map(|x| INTERNER.intern_string(x)).collect::>(),\n target: split(matches.opt_strs(\"target\"))\n .into_iter().map(|x| INTERNER.intern_string(x)).collect::>(),\n config: cfg_file,\n src,\n jobs: matches.opt_str(\"jobs\").map(|j| j.parse().unwrap()),\n cmd,\n incremental: matches.opt_present(\"incremental\"),\n }\n }\n}\n\nimpl Subcommand {\n pub fn test_args(&self) -> Vec<&str> {\n match *self {\n Subcommand::Test { ref test_args, .. } |\n Subcommand::Bench { ref test_args, .. } => {\n test_args.iter().flat_map(|s| s.split_whitespace()).collect()\n }\n _ => Vec::new(),\n }\n }\n\n pub fn fail_fast(&self) -> bool {\n match *self {\n Subcommand::Test { fail_fast, .. } => fail_fast,\n _ => false,\n }\n }\n}\n\nfn split(s: Vec) -> Vec {\n s.iter().flat_map(|s| s.split(',')).map(|s| s.to_string()).collect()\n}\nDo not show \"available paths\" help in .\/x.py clean\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Command-line interface of the rustbuild build system.\n\/\/!\n\/\/! This module implements the command-line parsing of the build system which\n\/\/! has various flags to configure how it's run.\n\nuse std::env;\nuse std::fs;\nuse std::path::PathBuf;\nuse std::process;\n\nuse getopts::Options;\n\nuse Build;\nuse config::Config;\nuse metadata;\nuse builder::Builder;\n\nuse cache::{Interned, INTERNER};\n\n\/\/\/ Deserialized version of all flags for this compile.\npub struct Flags {\n pub verbose: usize, \/\/ verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose\n pub on_fail: Option,\n pub stage: Option,\n pub keep_stage: Option,\n pub build: Option>,\n\n pub host: Vec>,\n pub target: Vec>,\n pub config: Option,\n pub src: PathBuf,\n pub jobs: Option,\n pub cmd: Subcommand,\n pub incremental: bool,\n}\n\npub enum Subcommand {\n Build {\n paths: Vec,\n },\n Doc {\n paths: Vec,\n },\n Test {\n paths: Vec,\n test_args: Vec,\n fail_fast: bool,\n },\n Bench {\n paths: Vec,\n test_args: Vec,\n },\n Clean {\n all: bool,\n },\n Dist {\n paths: Vec,\n },\n Install {\n paths: Vec,\n },\n}\n\nimpl Default for Subcommand {\n fn default() -> Subcommand {\n Subcommand::Build {\n paths: vec![PathBuf::from(\"nowhere\")],\n }\n }\n}\n\nimpl Flags {\n pub fn parse(args: &[String]) -> Flags {\n let mut extra_help = String::new();\n let mut subcommand_help = format!(\"\\\nUsage: x.py [options] [...]\n\nSubcommands:\n build Compile either the compiler or libraries\n test Build and run some test suites\n bench Build and run some benchmarks\n doc Build documentation\n clean Clean out build directories\n dist Build distribution artifacts\n install Install distribution artifacts\n\nTo learn more about a subcommand, run `.\/x.py -h`\");\n\n let mut opts = Options::new();\n \/\/ Options common to all subcommands\n opts.optflagmulti(\"v\", \"verbose\", \"use verbose output (-vv for very verbose)\");\n opts.optflag(\"i\", \"incremental\", \"use incremental compilation\");\n opts.optopt(\"\", \"config\", \"TOML configuration file for build\", \"FILE\");\n opts.optopt(\"\", \"build\", \"build target of the stage0 compiler\", \"BUILD\");\n opts.optmulti(\"\", \"host\", \"host targets to build\", \"HOST\");\n opts.optmulti(\"\", \"target\", \"target targets to build\", \"TARGET\");\n opts.optopt(\"\", \"on-fail\", \"command to run on failure\", \"CMD\");\n opts.optopt(\"\", \"stage\", \"stage to build\", \"N\");\n opts.optopt(\"\", \"keep-stage\", \"stage to keep without recompiling\", \"N\");\n opts.optopt(\"\", \"src\", \"path to the root of the rust checkout\", \"DIR\");\n opts.optopt(\"j\", \"jobs\", \"number of jobs to run in parallel\", \"JOBS\");\n opts.optflag(\"h\", \"help\", \"print this help message\");\n\n \/\/ fn usage()\n let usage = |exit_code: i32, opts: &Options, subcommand_help: &str, extra_help: &str| -> ! {\n println!(\"{}\", opts.usage(subcommand_help));\n if !extra_help.is_empty() {\n println!(\"{}\", extra_help);\n }\n process::exit(exit_code);\n };\n\n \/\/ We can't use getopt to parse the options until we have completed specifying which\n \/\/ options are valid, but under the current implementation, some options are conditional on\n \/\/ the subcommand. Therefore we must manually identify the subcommand first, so that we can\n \/\/ complete the definition of the options. Then we can use the getopt::Matches object from\n \/\/ there on out.\n let subcommand = args.iter().find(|&s|\n (s == \"build\")\n || (s == \"test\")\n || (s == \"bench\")\n || (s == \"doc\")\n || (s == \"clean\")\n || (s == \"dist\")\n || (s == \"install\"));\n let subcommand = match subcommand {\n Some(s) => s,\n None => {\n \/\/ No subcommand -- show the general usage and subcommand help\n println!(\"{}\\n\", subcommand_help);\n process::exit(1);\n }\n };\n\n \/\/ Some subcommands get extra options\n match subcommand.as_str() {\n \"test\" => {\n opts.optflag(\"\", \"no-fail-fast\", \"Run all tests regardless of failure\");\n opts.optmulti(\"\", \"test-args\", \"extra arguments\", \"ARGS\");\n },\n \"bench\" => { opts.optmulti(\"\", \"test-args\", \"extra arguments\", \"ARGS\"); },\n \"clean\" => { opts.optflag(\"\", \"all\", \"clean all build artifacts\"); },\n _ => { },\n };\n\n \/\/ Done specifying what options are possible, so do the getopts parsing\n let matches = opts.parse(&args[..]).unwrap_or_else(|e| {\n \/\/ Invalid argument\/option format\n println!(\"\\n{}\\n\", e);\n usage(1, &opts, &subcommand_help, &extra_help);\n });\n \/\/ Extra sanity check to make sure we didn't hit this crazy corner case:\n \/\/\n \/\/ .\/x.py --frobulate clean build\n \/\/ ^-- option ^ ^- actual subcommand\n \/\/ \\_ arg to option could be mistaken as subcommand\n let mut pass_sanity_check = true;\n match matches.free.get(0) {\n Some(check_subcommand) => {\n if check_subcommand != subcommand {\n pass_sanity_check = false;\n }\n },\n None => {\n pass_sanity_check = false;\n }\n }\n if !pass_sanity_check {\n println!(\"{}\\n\", subcommand_help);\n println!(\"Sorry, I couldn't figure out which subcommand you were trying to specify.\\n\\\n You may need to move some options to after the subcommand.\\n\");\n process::exit(1);\n }\n \/\/ Extra help text for some commands\n match subcommand.as_str() {\n \"build\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories to the crates\n and\/or artifacts to compile. For example:\n\n .\/x.py build src\/libcore\n .\/x.py build src\/libcore src\/libproc_macro\n .\/x.py build src\/libstd --stage 1\n\n If no arguments are passed then the complete artifacts for that stage are\n also compiled.\n\n .\/x.py build\n .\/x.py build --stage 1\n\n For a quick build of a usable compiler, you can pass:\n\n .\/x.py build --stage 1 src\/libtest\n\n This will first build everything once (like --stage 0 without further\n arguments would), and then use the compiler built in stage 0 to build\n src\/libtest and its dependencies.\n Once this is done, build\/$ARCH\/stage1 contains a usable compiler.\");\n }\n \"test\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories to tests that\n should be compiled and run. For example:\n\n .\/x.py test src\/test\/run-pass\n .\/x.py test src\/libstd --test-args hash_map\n .\/x.py test src\/libstd --stage 0\n\n If no arguments are passed then the complete artifacts for that stage are\n compiled and tested.\n\n .\/x.py test\n .\/x.py test --stage 1\");\n }\n \"doc\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories of documentation\n to build. For example:\n\n .\/x.py doc src\/doc\/book\n .\/x.py doc src\/doc\/nomicon\n .\/x.py doc src\/doc\/book src\/libstd\n\n If no arguments are passed then everything is documented:\n\n .\/x.py doc\n .\/x.py doc --stage 1\");\n }\n _ => { }\n };\n \/\/ Get any optional paths which occur after the subcommand\n let cwd = t!(env::current_dir());\n let paths = matches.free[1..].iter().map(|p| cwd.join(p)).collect::>();\n\n let cfg_file = matches.opt_str(\"config\").map(PathBuf::from).or_else(|| {\n if fs::metadata(\"config.toml\").is_ok() {\n Some(PathBuf::from(\"config.toml\"))\n } else {\n None\n }\n });\n\n \/\/ All subcommands except `clean` can have an optional \"Available paths\" section\n if matches.opt_present(\"verbose\") {\n let config = Config::parse(&[\"build\".to_string()]);\n let mut build = Build::new(config);\n metadata::build(&mut build);\n\n let maybe_rules_help = Builder::get_help(&build, subcommand.as_str());\n extra_help.push_str(maybe_rules_help.unwrap_or_default().as_str());\n } else if subcommand.as_str() != \"clean\" {\n extra_help.push_str(format!(\n \"Run `.\/x.py {} -h -v` to see a list of available paths.\",\n subcommand).as_str());\n }\n\n \/\/ User passed in -h\/--help?\n if matches.opt_present(\"help\") {\n usage(0, &opts, &subcommand_help, &extra_help);\n }\n\n let cmd = match subcommand.as_str() {\n \"build\" => {\n Subcommand::Build { paths: paths }\n }\n \"test\" => {\n Subcommand::Test {\n paths,\n test_args: matches.opt_strs(\"test-args\"),\n fail_fast: !matches.opt_present(\"no-fail-fast\"),\n }\n }\n \"bench\" => {\n Subcommand::Bench {\n paths,\n test_args: matches.opt_strs(\"test-args\"),\n }\n }\n \"doc\" => {\n Subcommand::Doc { paths: paths }\n }\n \"clean\" => {\n if paths.len() > 0 {\n println!(\"\\nclean does not take a path argument\\n\");\n usage(1, &opts, &subcommand_help, &extra_help);\n }\n\n Subcommand::Clean {\n all: matches.opt_present(\"all\"),\n }\n }\n \"dist\" => {\n Subcommand::Dist {\n paths,\n }\n }\n \"install\" => {\n Subcommand::Install {\n paths,\n }\n }\n _ => {\n usage(1, &opts, &subcommand_help, &extra_help);\n }\n };\n\n\n let mut stage = matches.opt_str(\"stage\").map(|j| j.parse().unwrap());\n\n if matches.opt_present(\"incremental\") && stage.is_none() {\n stage = Some(1);\n }\n\n let cwd = t!(env::current_dir());\n let src = matches.opt_str(\"src\").map(PathBuf::from)\n .or_else(|| env::var_os(\"SRC\").map(PathBuf::from))\n .unwrap_or(cwd);\n\n Flags {\n verbose: matches.opt_count(\"verbose\"),\n stage,\n on_fail: matches.opt_str(\"on-fail\"),\n keep_stage: matches.opt_str(\"keep-stage\").map(|j| j.parse().unwrap()),\n build: matches.opt_str(\"build\").map(|s| INTERNER.intern_string(s)),\n host: split(matches.opt_strs(\"host\"))\n .into_iter().map(|x| INTERNER.intern_string(x)).collect::>(),\n target: split(matches.opt_strs(\"target\"))\n .into_iter().map(|x| INTERNER.intern_string(x)).collect::>(),\n config: cfg_file,\n src,\n jobs: matches.opt_str(\"jobs\").map(|j| j.parse().unwrap()),\n cmd,\n incremental: matches.opt_present(\"incremental\"),\n }\n }\n}\n\nimpl Subcommand {\n pub fn test_args(&self) -> Vec<&str> {\n match *self {\n Subcommand::Test { ref test_args, .. } |\n Subcommand::Bench { ref test_args, .. } => {\n test_args.iter().flat_map(|s| s.split_whitespace()).collect()\n }\n _ => Vec::new(),\n }\n }\n\n pub fn fail_fast(&self) -> bool {\n match *self {\n Subcommand::Test { fail_fast, .. } => fail_fast,\n _ => false,\n }\n }\n}\n\nfn split(s: Vec) -> Vec {\n s.iter().flat_map(|s| s.split(',')).map(|s| s.to_string()).collect()\n}\n<|endoftext|>"} {"text":"Add xfailed test for #3707\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-test\nstruct Obj {\n member: uint\n}\n\nimpl Obj {\n static pure fn boom() -> bool {\n return 1+1 == 2\n }\n pure fn chirp() {\n self.boom(); \/\/~ ERROR wat\n }\n}\n\nfn main() {\n let o = Obj { member: 0 };\n o.chirp();\n 1 + 1;\n}\n<|endoftext|>"} {"text":"extern crate cards;\nextern crate libthyme;\nextern crate ncurses;\n\nuse super::{Action,UI};\nuse libthyme::board::{Position,HPosition,VPosition};\nuse libthyme::game::Game;\nuse libthyme::score::Scorer;\n\nconst CARD_WIDTH: i32 = 7;\nconst CARD_HEIGHT: i32 = 5;\nconst CARD_MARGIN: i32 = 2;\nconst BOARD_MARGIN: i32 = 2;\nconst STATUS_HEIGHT: i32 = 2;\n\nconst CARD_COLOR_BLACK: i16 = 1; \/\/ black on white\nconst CARD_COLOR_RED: i16 = 2; \/\/ red on white\nconst CARD_COLOR_EMPTY: i16 = 4; \/\/ white on black\nconst SELECTED_COLOR: i16 = 5; \/\/ yellow on black\nconst CURSOR_INFO_COLOR: i16 = 6; \/\/ cyan on black\nconst GAME_INFO_COLOR: i16 = 7; \/\/ green on black\nconst BG_COLOR: i16 = ncurses::COLOR_BLACK;\nconst CARD_BG_COLOR: i16 = ncurses::COLOR_WHITE;\n\n\/\/\/ Set up the UI\npub fn initialize_screen() {\n ncurses::setlocale(ncurses::LcCategory::ctype, \"\");\n ncurses::initscr();\n ncurses::noecho();\n ncurses::start_color();\n ncurses::keypad(ncurses::stdscr, true);\n ncurses::curs_set(ncurses::CURSOR_VISIBILITY::CURSOR_INVISIBLE);\n ncurses::init_pair(CARD_COLOR_BLACK, ncurses::COLOR_BLACK, CARD_BG_COLOR);\n ncurses::init_pair(CARD_COLOR_RED, ncurses::COLOR_RED, CARD_BG_COLOR);\n ncurses::init_pair(CARD_COLOR_EMPTY, ncurses::COLOR_WHITE, BG_COLOR);\n ncurses::init_pair(SELECTED_COLOR, ncurses::COLOR_YELLOW, BG_COLOR);\n ncurses::init_pair(CURSOR_INFO_COLOR, ncurses::COLOR_CYAN, BG_COLOR);\n ncurses::init_pair(GAME_INFO_COLOR, ncurses::COLOR_GREEN, BG_COLOR);\n}\n\n\/\/\/ Redraw a UI in the current screen\npub fn redraw(ui: &UI, game: &mut Game, refresh: bool) {\n if refresh {\n ncurses::clear();\n ncurses::bkgd(ncurses::COLOR_PAIR(CARD_COLOR_EMPTY));\n }\n if validate_screen_size() {\n write_title(game);\n draw_cards(ui, game);\n write_cursor_message(ui, game);\n write_message(&ui.message);\n }\n ncurses::refresh();\n}\n\n\/\/\/ Tear down the UI\npub fn cleanup() {\n ncurses::endwin();\n}\n\n\/\/\/ Process input from the user\n\/\/\/\n\/\/\/ Known inputs:\n\/\/\/ - Arrow keys: Move cursor between various positions on the board\n\/\/\/ - Q: Quit game\n\/\/\/ - H: Hint (unimplemented)\n\/\/\/ - ?: Help\n\/\/\/ - Space: Toggle position selection\n\/\/\/ - Return: Play move, clear selection\npub fn get_action() -> Action {\n return match ncurses::getch() {\n ncurses::KEY_LEFT => Action::CursorLeft,\n ncurses::KEY_RIGHT => Action::CursorRight,\n ncurses::KEY_UP => Action::CursorUp,\n ncurses::KEY_DOWN => Action::CursorDown,\n ncurses::KEY_RESIZE => Action::Resize,\n ncurses::KEY_ENTER | 13 | 10 => Action::Play,\n 32 => Action::ToggleSelection, \/\/ Space\n 113 => Action::Quit, \/\/ Q\n 104 => Action::Hint, \/\/ H\n 63 => Action::Help, \/\/ ?\n _ => Action::Unknown\n }\n}\n\n\/\/\/ Check that the content can fit\nfn validate_screen_size() -> bool {\n let min_height = BOARD_MARGIN*2 + CARD_MARGIN*4 + STATUS_HEIGHT + CARD_HEIGHT*3;\n if ncurses::LINES < min_height || ncurses::COLS < 50 {\n write_message(&format!(\n \"Please resize your terminal to be at least 50x{}\",\n min_height));\n return false\n }\n return true\n}\n\n\/\/\/ Print the game title and status info\nfn write_title(game: &mut Game) {\n printw_margin(0, 0);\n ncurses::attron(ncurses::A_BOLD());\n ncurses::printw(\"Thyme\");\n ncurses::attroff(ncurses::A_BOLD());\n ncurses::printw(&format!(\" - Score: {}\", game.score()));\n printw_margin(0, 1);\n let (_, suit) = layout_suit(game.board.lucky_card);\n let info = ncurses::COLOR_PAIR(GAME_INFO_COLOR);\n ncurses::attron(info);\n ncurses::printw(&format!(\"Lucky Suit: {} Discards Left: {}\/{}\", suit,\n game.discards_allowed, game.discards_allowed_max));\n ncurses::clrtoeol();\n ncurses::attroff(info);\n}\n\n\/\/\/ Print the message at the bottom of the window\nfn write_message(message: &str) {\n printw_margin(0, ncurses::LINES - 1);\n ncurses::printw(message);\n ncurses::clrtoeol();\n}\n\nfn write_cursor_message(ui: &UI, game: &Game) {\n let stacked_cards = game.board.count_cards(ui.cursor_position);\n let all_cards = game.board.count_all_cards();\n let message = format!(\"*{}\/{} cards in the stack\", stacked_cards, all_cards);\n let color = ncurses::COLOR_PAIR(CURSOR_INFO_COLOR);\n printw_margin(0, ncurses::LINES - 2);\n ncurses::attron(color);\n ncurses::printw(&message);\n ncurses::clrtoeol();\n ncurses::attroff(color);\n}\n\nfn printw_margin(x: i32, y: i32) {\n ncurses::mv(y, x);\n printw_repeat(\" \", BOARD_MARGIN, ncurses::COLOR_PAIR(CARD_COLOR_EMPTY));\n}\n\nfn draw_cards(ui: &UI, game: &mut Game) {\n for position in game.board.positions() {\n let card = game.board.top(position);\n if card.is_some() {\n draw_card(position, card.unwrap());\n } else {\n draw_empty(position);\n }\n let (x, y) = card_location(position);\n toggle_highlight_card(x, y, ui.selection.contains(&position));\n if position == ui.cursor_position {\n let offset = (CARD_WIDTH as f32 \/2.0).floor() as i32;\n ncurses::mvprintw(y + CARD_HEIGHT, x + offset, \"*\");\n }\n }\n}\n\n\/\/\/ Draw a card on the board at a position\nfn draw_card(position: Position, card: cards::card::Card) {\n let (x, y) = card_location(position);\n let (color, suit) = layout_suit(card);\n let value = layout_value(card);\n let black = ncurses::COLOR_PAIR(CARD_COLOR_BLACK);\n ncurses::attron(black);\n ncurses::mvprintw(y, x, &value);\n ncurses::attroff(black);\n ncurses::attron(color);\n ncurses::mvprintw(y, x + value.len() as i32, &suit);\n ncurses::attroff(color);\n let spacing = CARD_WIDTH - value.len() as i32 - 1;\n printw_repeat(\" \", spacing, black);\n for i in 1..CARD_HEIGHT - 1 {\n ncurses::mv(y + i, x);\n printw_repeat(\" \", CARD_WIDTH, black);\n }\n ncurses::mv(y + CARD_HEIGHT - 1, x);\n printw_repeat(\" \", spacing, black);\n ncurses::attron(black);\n ncurses::printw(&value);\n ncurses::attroff(black);\n ncurses::attron(color);\n ncurses::printw(&suit);\n ncurses::attroff(color);\n}\n\n\/\/\/ Draw empty slot for a card\nfn draw_empty(position: Position) {\n let color = ncurses::COLOR_PAIR(CARD_COLOR_EMPTY);\n let (x, y) = card_location(position);\n ncurses::attron(color);\n ncurses::mvprintw(y, x, \"┌\");\n printw_repeat(\"─\", CARD_WIDTH - 2, color);\n ncurses::mvprintw(y, x + CARD_WIDTH - 1, \"┐\");\n ncurses::attroff(color);\n for i in 1..CARD_HEIGHT - 1 {\n ncurses::attron(color);\n ncurses::mvprintw(y + i, x , \"│\");\n printw_repeat(\" \", CARD_WIDTH - 2, color);\n ncurses::attroff(color);\n ncurses::attron(color);\n ncurses::mvprintw(y + i, x + CARD_WIDTH - 1, \"│\");\n ncurses::attroff(color);\n }\n ncurses::attron(color);\n ncurses::mvprintw(y + CARD_HEIGHT - 1, x, \"└\");\n printw_repeat(\"─\", CARD_WIDTH - 2, color);\n ncurses::mvprintw(y + CARD_HEIGHT - 1, x + CARD_WIDTH - 1, \"┘\");\n ncurses::attroff(color);\n}\n\nfn printw_repeat(content: &str, len: i32, color: u64) {\n ncurses::attron(color);\n for _ in 0..len {\n ncurses::printw(content);\n }\n ncurses::attroff(color);\n}\n\nfn toggle_highlight_card(x: i32, y: i32, on: bool) {\n let color = ncurses::COLOR_PAIR(SELECTED_COLOR);\n ncurses::attron(color);\n ncurses::mvprintw(y - 1, x - 1, if on {\"┌\"} else {\" \"});\n ncurses::attroff(color);\n ncurses::mv(y - 1, x);\n printw_repeat(if on {\"─\"} else {\" \"}, CARD_WIDTH, color);\n ncurses::mv(y + CARD_HEIGHT, x);\n printw_repeat(if on {\"─\"} else {\" \"}, CARD_WIDTH, color);\n ncurses::attron(color);\n ncurses::mvprintw(y - 1, x + CARD_WIDTH, if on {\"┐\"} else {\" \"});\n for i in 0..CARD_HEIGHT {\n ncurses::mvprintw(y + i, x - 1, if on {\"│\"} else {\" \"});\n ncurses::mvprintw(y + i, x + CARD_WIDTH, if on {\"│\"} else {\" \"});\n }\n ncurses::mvprintw(y + CARD_HEIGHT, x - 1, if on {\"└\"} else {\" \"});\n ncurses::mvprintw(y + CARD_HEIGHT, x + CARD_WIDTH, if on {\"┘\"} else {\" \"});\n ncurses::attroff(color);\n}\n\nfn layout_suit(card: cards::card::Card) -> (u64, String) {\n let black = ncurses::COLOR_PAIR(CARD_COLOR_BLACK);\n let red = ncurses::COLOR_PAIR(CARD_COLOR_RED);\n return match card.suit {\n cards::card::Suit::Diamonds => (red, \"\\u{2666}\".to_string()),\n cards::card::Suit::Clubs => (black, \"\\u{2663}\".to_string()),\n cards::card::Suit::Spades => (black, \"\\u{2660}\".to_string()),\n cards::card::Suit::Hearts => (red, \"\\u{2665}\".to_string()),\n }\n}\n\nfn layout_value(card: cards::card::Card) -> String {\n return match card.value {\n cards::card::Value::Ace => \"A\",\n cards::card::Value::Two => \"2\",\n cards::card::Value::Three => \"3\",\n cards::card::Value::Four => \"4\",\n cards::card::Value::Five => \"5\",\n cards::card::Value::Six => \"6\",\n cards::card::Value::Seven => \"7\",\n cards::card::Value::Eight => \"8\",\n cards::card::Value::Nine => \"9\",\n cards::card::Value::Ten => \"10\",\n cards::card::Value::Jack => \"J\",\n cards::card::Value::Queen => \"Q\",\n cards::card::Value::King => \"K\",\n }.to_string()\n}\n\n\/\/\/ Location (x, y) for a card position\nfn card_location(position: Position) -> (i32, i32) {\n let left = BOARD_MARGIN + CARD_MARGIN;\n let center = BOARD_MARGIN + CARD_MARGIN*2 + CARD_WIDTH;\n let right = BOARD_MARGIN + CARD_MARGIN*3 + CARD_WIDTH*2;\n let top = BOARD_MARGIN + CARD_MARGIN;\n let middle = BOARD_MARGIN + CARD_MARGIN*2 + CARD_HEIGHT;\n let bottom = BOARD_MARGIN + CARD_MARGIN*3 + CARD_HEIGHT*2;\n match (position.x, position.y) {\n (HPosition::Left, VPosition::Top) => (left, top),\n (HPosition::Left, VPosition::Middle) => (left, middle),\n (HPosition::Left, VPosition::Bottom) => (left, bottom),\n (HPosition::Center, VPosition::Top) => (center, top),\n (HPosition::Center, VPosition::Middle) => (center, middle),\n (HPosition::Center, VPosition::Bottom) => (center, bottom),\n (HPosition::Right, VPosition::Top) => (right, top),\n (HPosition::Right, VPosition::Middle) => (right, middle),\n (HPosition::Right, VPosition::Bottom) => (right, bottom),\n }\n}\nPrint clear bonuses in empty stacksextern crate cards;\nextern crate libthyme;\nextern crate ncurses;\n\nuse super::{Action,UI};\nuse libthyme::board::{Position,HPosition,VPosition};\nuse libthyme::game::Game;\nuse libthyme::score::Scorer;\nuse std::cmp;\n\nconst CARD_WIDTH: i32 = 7;\nconst CARD_HEIGHT: i32 = 5;\nconst CARD_MARGIN: i32 = 2;\nconst BOARD_MARGIN: i32 = 2;\nconst STATUS_HEIGHT: i32 = 2;\n\nconst CARD_COLOR_BLACK: i16 = 1; \/\/ black on white\nconst CARD_COLOR_RED: i16 = 2; \/\/ red on white\nconst CARD_COLOR_EMPTY: i16 = 4; \/\/ white on black\nconst SELECTED_COLOR: i16 = 5; \/\/ yellow on black\nconst CURSOR_INFO_COLOR: i16 = 6; \/\/ cyan on black\nconst GAME_INFO_COLOR: i16 = 7; \/\/ green on black\nconst BG_COLOR: i16 = ncurses::COLOR_BLACK;\nconst CARD_BG_COLOR: i16 = ncurses::COLOR_WHITE;\n\n\/\/\/ Set up the UI\npub fn initialize_screen() {\n ncurses::setlocale(ncurses::LcCategory::ctype, \"\");\n ncurses::initscr();\n ncurses::noecho();\n ncurses::start_color();\n ncurses::keypad(ncurses::stdscr, true);\n ncurses::curs_set(ncurses::CURSOR_VISIBILITY::CURSOR_INVISIBLE);\n ncurses::init_pair(CARD_COLOR_BLACK, ncurses::COLOR_BLACK, CARD_BG_COLOR);\n ncurses::init_pair(CARD_COLOR_RED, ncurses::COLOR_RED, CARD_BG_COLOR);\n ncurses::init_pair(CARD_COLOR_EMPTY, ncurses::COLOR_WHITE, BG_COLOR);\n ncurses::init_pair(SELECTED_COLOR, ncurses::COLOR_YELLOW, BG_COLOR);\n ncurses::init_pair(CURSOR_INFO_COLOR, ncurses::COLOR_CYAN, BG_COLOR);\n ncurses::init_pair(GAME_INFO_COLOR, ncurses::COLOR_GREEN, BG_COLOR);\n}\n\n\/\/\/ Redraw a UI in the current screen\npub fn redraw(ui: &UI, game: &mut Game, refresh: bool) {\n if refresh {\n ncurses::clear();\n ncurses::bkgd(ncurses::COLOR_PAIR(CARD_COLOR_EMPTY));\n }\n if validate_screen_size() {\n write_title(game);\n draw_cards(ui, game);\n write_cursor_message(ui, game);\n write_message(&ui.message);\n }\n ncurses::refresh();\n}\n\n\/\/\/ Tear down the UI\npub fn cleanup() {\n ncurses::endwin();\n}\n\n\/\/\/ Process input from the user\n\/\/\/\n\/\/\/ Known inputs:\n\/\/\/ - Arrow keys: Move cursor between various positions on the board\n\/\/\/ - Q: Quit game\n\/\/\/ - H: Hint (unimplemented)\n\/\/\/ - ?: Help\n\/\/\/ - Space: Toggle position selection\n\/\/\/ - Return: Play move, clear selection\npub fn get_action() -> Action {\n return match ncurses::getch() {\n ncurses::KEY_LEFT => Action::CursorLeft,\n ncurses::KEY_RIGHT => Action::CursorRight,\n ncurses::KEY_UP => Action::CursorUp,\n ncurses::KEY_DOWN => Action::CursorDown,\n ncurses::KEY_RESIZE => Action::Resize,\n ncurses::KEY_ENTER | 13 | 10 => Action::Play,\n 32 => Action::ToggleSelection, \/\/ Space\n 113 => Action::Quit, \/\/ Q\n 104 => Action::Hint, \/\/ H\n 63 => Action::Help, \/\/ ?\n _ => Action::Unknown\n }\n}\n\n\/\/\/ Check that the content can fit\nfn validate_screen_size() -> bool {\n let min_height = BOARD_MARGIN*2 + CARD_MARGIN*4 + STATUS_HEIGHT + CARD_HEIGHT*3;\n if ncurses::LINES < min_height || ncurses::COLS < 50 {\n write_message(&format!(\n \"Please resize your terminal to be at least 50x{}\",\n min_height));\n return false\n }\n return true\n}\n\n\/\/\/ Print the game title and status info\nfn write_title(game: &mut Game) {\n printw_margin(0, 0);\n ncurses::attron(ncurses::A_BOLD());\n ncurses::printw(\"Thyme\");\n ncurses::attroff(ncurses::A_BOLD());\n ncurses::printw(&format!(\" - Score: {}\", game.score()));\n printw_margin(0, 1);\n let (_, suit) = layout_suit(game.board.lucky_card);\n let info = ncurses::COLOR_PAIR(GAME_INFO_COLOR);\n ncurses::attron(info);\n ncurses::printw(&format!(\"Lucky Suit: {} Discards Left: {}\/{}\", suit,\n game.discards_allowed, game.discards_allowed_max));\n ncurses::clrtoeol();\n ncurses::attroff(info);\n}\n\n\/\/\/ Print the message at the bottom of the window\nfn write_message(message: &str) {\n printw_margin(0, ncurses::LINES - 1);\n ncurses::printw(message);\n ncurses::clrtoeol();\n}\n\nfn write_cursor_message(ui: &UI, game: &Game) {\n let stacked_cards = game.board.count_cards(ui.cursor_position);\n let all_cards = game.board.count_all_cards();\n let message = format!(\"*{}\/{} cards in the stack\", stacked_cards, all_cards);\n let color = ncurses::COLOR_PAIR(CURSOR_INFO_COLOR);\n printw_margin(0, ncurses::LINES - 2);\n ncurses::attron(color);\n ncurses::printw(&message);\n ncurses::clrtoeol();\n ncurses::attroff(color);\n}\n\nfn printw_margin(x: i32, y: i32) {\n ncurses::mv(y, x);\n printw_repeat(\" \", BOARD_MARGIN, ncurses::COLOR_PAIR(CARD_COLOR_EMPTY));\n}\n\nfn draw_cards(ui: &UI, game: &mut Game) {\n for position in game.board.positions() {\n let card = game.board.top(position);\n if card.is_some() {\n draw_card(position, card.unwrap());\n } else {\n draw_empty(game, position);\n }\n let (x, y) = card_location(position);\n toggle_highlight_card(x, y, ui.selection.contains(&position));\n if position == ui.cursor_position {\n let offset = (CARD_WIDTH as f32 \/2.0).floor() as i32;\n ncurses::mvprintw(y + CARD_HEIGHT, x + offset, \"*\");\n }\n }\n}\n\n\/\/\/ Draw a card on the board at a position\nfn draw_card(position: Position, card: cards::card::Card) {\n let (x, y) = card_location(position);\n let (color, suit) = layout_suit(card);\n let value = layout_value(card);\n let black = ncurses::COLOR_PAIR(CARD_COLOR_BLACK);\n ncurses::attron(black);\n ncurses::mvprintw(y, x, &value);\n ncurses::attroff(black);\n ncurses::attron(color);\n ncurses::mvprintw(y, x + value.len() as i32, &suit);\n ncurses::attroff(color);\n let spacing = CARD_WIDTH - value.len() as i32 - 1;\n printw_repeat(\" \", spacing, black);\n for i in 1..CARD_HEIGHT - 1 {\n ncurses::mv(y + i, x);\n printw_repeat(\" \", CARD_WIDTH, black);\n }\n ncurses::mv(y + CARD_HEIGHT - 1, x);\n printw_repeat(\" \", spacing, black);\n ncurses::attron(black);\n ncurses::printw(&value);\n ncurses::attroff(black);\n ncurses::attron(color);\n ncurses::printw(&suit);\n ncurses::attroff(color);\n}\n\n\/\/\/ Draw empty slot for a card\nfn draw_empty(game: &Game, position: Position) {\n let color = ncurses::COLOR_PAIR(CARD_COLOR_EMPTY);\n let (x, y) = card_location(position);\n ncurses::attron(color);\n ncurses::mvprintw(y, x, \"┌\");\n printw_repeat(\"─\", CARD_WIDTH - 2, color);\n ncurses::mvprintw(y, x + CARD_WIDTH - 1, \"┐\");\n ncurses::attroff(color);\n let gap_height = CARD_HEIGHT - 1;\n let bonus_height = gap_height\/2;\n for i in 1..gap_height {\n ncurses::attron(color);\n ncurses::mvprintw(y + i, x , \"│\");\n if i == bonus_height {\n let bonus = format!(\"{}\", game.scorer.bonus(position));\n let available_width = cmp::max(0, CARD_WIDTH - 2 - bonus.len() as i32);\n let lede = available_width\/2;\n printw_repeat(\" \", lede, color);\n ncurses::printw(&bonus);\n printw_repeat(\" \", lede, color);\n } else {\n printw_repeat(\" \", CARD_WIDTH - 2, color);\n }\n ncurses::attroff(color);\n ncurses::attron(color);\n ncurses::mvprintw(y + i, x + CARD_WIDTH - 1, \"│\");\n ncurses::attroff(color);\n }\n ncurses::attron(color);\n ncurses::mvprintw(y + CARD_HEIGHT - 1, x, \"└\");\n printw_repeat(\"─\", CARD_WIDTH - 2, color);\n ncurses::mvprintw(y + CARD_HEIGHT - 1, x + CARD_WIDTH - 1, \"┘\");\n ncurses::attroff(color);\n}\n\nfn printw_repeat(content: &str, len: i32, color: u64) {\n ncurses::attron(color);\n for _ in 0..len {\n ncurses::printw(content);\n }\n ncurses::attroff(color);\n}\n\nfn toggle_highlight_card(x: i32, y: i32, on: bool) {\n let color = ncurses::COLOR_PAIR(SELECTED_COLOR);\n ncurses::attron(color);\n ncurses::mvprintw(y - 1, x - 1, if on {\"┌\"} else {\" \"});\n ncurses::attroff(color);\n ncurses::mv(y - 1, x);\n printw_repeat(if on {\"─\"} else {\" \"}, CARD_WIDTH, color);\n ncurses::mv(y + CARD_HEIGHT, x);\n printw_repeat(if on {\"─\"} else {\" \"}, CARD_WIDTH, color);\n ncurses::attron(color);\n ncurses::mvprintw(y - 1, x + CARD_WIDTH, if on {\"┐\"} else {\" \"});\n for i in 0..CARD_HEIGHT {\n ncurses::mvprintw(y + i, x - 1, if on {\"│\"} else {\" \"});\n ncurses::mvprintw(y + i, x + CARD_WIDTH, if on {\"│\"} else {\" \"});\n }\n ncurses::mvprintw(y + CARD_HEIGHT, x - 1, if on {\"└\"} else {\" \"});\n ncurses::mvprintw(y + CARD_HEIGHT, x + CARD_WIDTH, if on {\"┘\"} else {\" \"});\n ncurses::attroff(color);\n}\n\nfn layout_suit(card: cards::card::Card) -> (u64, String) {\n let black = ncurses::COLOR_PAIR(CARD_COLOR_BLACK);\n let red = ncurses::COLOR_PAIR(CARD_COLOR_RED);\n return match card.suit {\n cards::card::Suit::Diamonds => (red, \"\\u{2666}\".to_string()),\n cards::card::Suit::Clubs => (black, \"\\u{2663}\".to_string()),\n cards::card::Suit::Spades => (black, \"\\u{2660}\".to_string()),\n cards::card::Suit::Hearts => (red, \"\\u{2665}\".to_string()),\n }\n}\n\nfn layout_value(card: cards::card::Card) -> String {\n return match card.value {\n cards::card::Value::Ace => \"A\",\n cards::card::Value::Two => \"2\",\n cards::card::Value::Three => \"3\",\n cards::card::Value::Four => \"4\",\n cards::card::Value::Five => \"5\",\n cards::card::Value::Six => \"6\",\n cards::card::Value::Seven => \"7\",\n cards::card::Value::Eight => \"8\",\n cards::card::Value::Nine => \"9\",\n cards::card::Value::Ten => \"10\",\n cards::card::Value::Jack => \"J\",\n cards::card::Value::Queen => \"Q\",\n cards::card::Value::King => \"K\",\n }.to_string()\n}\n\n\/\/\/ Location (x, y) for a card position\nfn card_location(position: Position) -> (i32, i32) {\n let left = BOARD_MARGIN + CARD_MARGIN;\n let center = BOARD_MARGIN + CARD_MARGIN*2 + CARD_WIDTH;\n let right = BOARD_MARGIN + CARD_MARGIN*3 + CARD_WIDTH*2;\n let top = BOARD_MARGIN + CARD_MARGIN;\n let middle = BOARD_MARGIN + CARD_MARGIN*2 + CARD_HEIGHT;\n let bottom = BOARD_MARGIN + CARD_MARGIN*3 + CARD_HEIGHT*2;\n match (position.x, position.y) {\n (HPosition::Left, VPosition::Top) => (left, top),\n (HPosition::Left, VPosition::Middle) => (left, middle),\n (HPosition::Left, VPosition::Bottom) => (left, bottom),\n (HPosition::Center, VPosition::Top) => (center, top),\n (HPosition::Center, VPosition::Middle) => (center, middle),\n (HPosition::Center, VPosition::Bottom) => (center, bottom),\n (HPosition::Right, VPosition::Top) => (right, top),\n (HPosition::Right, VPosition::Middle) => (right, middle),\n (HPosition::Right, VPosition::Bottom) => (right, bottom),\n }\n}\n<|endoftext|>"} {"text":"Initialise package_version.rs<|endoftext|>"} {"text":"easage-pack: Make kind optional and BIGF default<|endoftext|>"} {"text":"stops2mimir addedextern crate rustc_serialize;\nextern crate docopt;\nextern crate csv;\nextern crate mimir;\nuse mimir::rubber::Rubber;\n#[macro_use]\nextern crate log;\n\nuse docopt::Docopt;\n\nconst USAGE: &'static str =\n \"\nUsage:\n stops2mimir --help\n stops2mimir --input= \\\n [--connection-string=] [--dataset=]\n\nOptions:\n -h, --help \\\n Show this message.\n -i, --input= NTFS stops.txt file.\n -c, \\\n --connection-string=\n Elasticsearch \\\n parameters, [default: http:\/\/localhost:9200\/munin]\n -d, --dataset=\n\";\n\n#[derive(Debug, RustcDecodable)]\nstruct Args {\n flag_input: String,\n flag_dataset: String,\n flag_connection_string: String,\n}\n\nstruct StopPointIter<'a, R: std::io::Read + 'a> {\n iter: csv::StringRecords<'a, R>,\n stop_id_pos: usize,\n stop_lat_pos: usize,\n stop_lon_pos: usize,\n stop_name_pos: usize,\n location_type_pos: Option,\n stop_visible_pos: Option,\n}\nimpl<'a, R: std::io::Read + 'a> StopPointIter<'a, R> {\n fn new(r: &'a mut csv::Reader) -> Option {\n let headers = if let Ok(hs) = r.headers() {\n hs\n } else {\n return None;\n };\n let get = |name| headers.iter().position(|s| s == name);\n let stop_id_pos = if let Some(pos) = get(\"stop_id\") {\n pos\n } else {\n return None;\n };\n let stop_lat_pos = if let Some(pos) = get(\"stop_lat\") {\n pos\n } else {\n return None;\n };\n let stop_lon_pos = if let Some(pos) = get(\"stop_lon\") {\n pos\n } else {\n return None;\n };\n let stop_name_pos = if let Some(pos) = get(\"stop_name\") {\n pos\n } else {\n return None;\n };\n\n Some(StopPointIter {\n iter: r.records(),\n stop_id_pos: stop_id_pos,\n stop_lat_pos: stop_lat_pos,\n stop_lon_pos: stop_lon_pos,\n stop_name_pos: stop_name_pos,\n location_type_pos: get(\"location_type\"),\n stop_visible_pos: get(\"visible\"),\n })\n }\n fn get_location_type(&self, record: &[String]) -> Option {\n self.location_type_pos.and_then(|pos| record.get(pos).and_then(|s| s.parse().ok()))\n }\n fn get_visible(&self, record: &[String]) -> Option {\n self.stop_visible_pos.and_then(|pos| record.get(pos).and_then(|s| s.parse().ok()))\n }\n}\nimpl<'a, R: std::io::Read + 'a> Iterator for StopPointIter<'a, R> {\n type Item = csv::Result;\n fn next(&mut self) -> Option {\n fn get(record: &[String], pos: usize) -> csv::Result<&str> {\n match record.get(pos) {\n Some(s) => Ok(&s),\n None => Err(csv::Error::Decode(format!(\"Failed accessing record '{}'.\", pos))),\n }\n }\n fn parse_f64(s: &str) -> csv::Result {\n s.parse()\n .map_err(|_| csv::Error::Decode(format!(\"Failed converting '{}' from str.\", s)))\n }\n fn is_stop_area(location_type: &Option, visible: &Option) -> csv::Result {\n if (*location_type == Some(1)) && (*visible == Some(0)) {\n Ok(true)\n } else {\n Err(csv::Error::Decode(\"Not stop_area.\".to_string()))\n }\n }\n self.iter.next().map(|r| {\n r.and_then(|r| {\n let stop_id = try!(get(&r, self.stop_id_pos));\n let stop_lat = try!(get(&r, self.stop_lat_pos));\n let stop_lat = try!(parse_f64(stop_lat));\n let stop_lon = try!(get(&r, self.stop_lon_pos));\n let stop_lon = try!(parse_f64(stop_lon));\n let stop_name = try!(get(&r, self.stop_name_pos));\n let location_type = self.get_location_type(&r);\n let visible = self.get_visible(&r);\n try!(is_stop_area(&location_type, &visible));\n Ok(mimir::Stop {\n id: stop_id.to_string(),\n coord: mimir::Coord::new(stop_lat, stop_lon),\n label: stop_name.to_string(),\n weight: 1,\n zip_codes: vec![],\n administrative_regions: vec![],\n name: stop_name.to_string(),\n })\n })\n })\n }\n}\n\nfn main() {\n println!(\"Launching stops2mimir...\");\n\n let args: Args = Docopt::new(USAGE)\n .and_then(|dopt| dopt.decode())\n .unwrap_or_else(|e| e.exit());\n\tprintln!(\"args: {:?}\", args);\n println!(\"creation of indexes\");\n let mut rubber = Rubber::new(&args.flag_connection_string);\n\n let mut rdr = csv::Reader::from_file(args.flag_input)\n .unwrap()\n .double_quote(true);\n\n let stops: Vec = StopPointIter::new(&mut rdr)\n .expect(\"Can't find needed fields in the header.\")\n .filter_map(|rc| {\n rc.map_err(|e| println!(\"error at csv line decoding : {}\", e))\n .ok()\n })\n .collect();\n\n println!(\"Importing stops into Mimir\");\n let nb_stops = rubber.index(\"stops\", &args.flag_dataset, stops.iter())\n .unwrap();\n\n println!(\"Nb of indexed stops: {}\", nb_stops);\n\n}\n<|endoftext|>"} {"text":"rm unneeded macro<|endoftext|>"} {"text":"Added constants from prctl.h<|endoftext|>"} {"text":"multi bytes<|endoftext|>"} {"text":"Fix #557 Closes #557<|endoftext|>"} {"text":"added TPCH Q17-like queryextern crate rand;\nextern crate time;\nextern crate columnar;\nextern crate timely;\nextern crate differential_dataflow;\n\nextern crate docopt;\nuse docopt::Docopt;\n\nuse std::thread;\nuse std::fs::File;\nuse std::io::{BufRead, BufReader};\n\nuse timely::example_shared::*;\nuse timely::example_shared::operators::*;\nuse timely::communication::*;\nuse timely::networking::initialize_networking;\nuse timely::networking::initialize_networking_from_file;\n\nuse timely::drain::DrainExt;\n\n\/\/ use differential_dataflow::collection_trace::lookup::UnsignedInt;\n\/\/ use differential_dataflow::collection_trace::LeastUpperBound;\n\nuse differential_dataflow::operators::*;\n\n\/\/ The typical differential dataflow vertex receives updates of the form (key, time, value, update),\n\/\/ where the data are logically partitioned by key, and are then subject to various aggregations by time,\n\/\/ accumulating for each value the update integers. The resulting multiset is the subjected to computation.\n\n\/\/ The implementation I am currently most comfortable with is *conservative* in the sense that it will defer updates\n\/\/ until it has received all updates for a time, at which point it commits these updates permanently. This is done\n\/\/ to avoid issues with running logic on partially formed data, but should also simplify our data management story.\n\/\/ Rather than requiring random access to diffs, we can store them as flat arrays (possibly sorted) and integrate\n\/\/ them using merge techniques. Updating cached accumulations seems maybe harder w\/o hashmaps, but we'll see...\n\nstatic USAGE: &'static str = \"\nUsage: tpch17 [options] [...]\n\nOptions:\n -w , --workers number of workers per process [default: 1]\n -p , --processid identity of this process [default: 0]\n -n , --processes number of processes involved [default: 1]\n -h , --hosts list of host:port for workers\n\";\n\nfn main() {\n\n let args = Docopt::new(USAGE).and_then(|dopt| dopt.parse()).unwrap_or_else(|e| e.exit());\n\n let workers: u64 = if let Ok(threads) = args.get_str(\"-w\").parse() { threads }\n else { panic!(\"invalid setting for --workers: {}\", args.get_str(\"-t\")) };\n let process_id: u64 = if let Ok(proc_id) = args.get_str(\"-p\").parse() { proc_id }\n else { panic!(\"invalid setting for --processid: {}\", args.get_str(\"-p\")) };\n let processes: u64 = if let Ok(processes) = args.get_str(\"-n\").parse() { processes }\n else { panic!(\"invalid setting for --processes: {}\", args.get_str(\"-n\")) };\n\n println!(\"Starting timely with\");\n println!(\"\\tworkers:\\t{}\", workers);\n println!(\"\\tprocesses:\\t{}\", processes);\n println!(\"\\tprocessid:\\t{}\", process_id);\n\n \/\/ vector holding communicators to use; one per local worker.\n if processes > 1 {\n println!(\"Initializing BinaryCommunicator\");\n\n let hosts = args.get_str(\"-h\");\n let communicators = if hosts != \"\" {\n initialize_networking_from_file(hosts, process_id, workers).ok().expect(\"error initializing networking\")\n }\n else {\n let addresses = (0..processes).map(|index| format!(\"localhost:{}\", 2101 + index).to_string()).collect();\n initialize_networking(addresses, process_id, workers).ok().expect(\"error initializing networking\")\n };\n\n start_main(communicators);\n }\n else if workers > 1 {\n println!(\"Initializing ProcessCommunicator\");\n start_main(ProcessCommunicator::new_vector(workers));\n }\n else {\n println!(\"Initializing ThreadCommunicator\");\n start_main(vec![ThreadCommunicator]);\n };\n}\n\nfn start_main(communicators: Vec) {\n \/\/ let communicators = ProcessCommunicator::new_vector(1);\n let mut guards = Vec::new();\n for communicator in communicators.into_iter() {\n guards.push(thread::Builder::new().name(format!(\"worker thread {}\", communicator.index()))\n .spawn(move || test_dataflow(communicator))\n .unwrap());\n }\n\n for guard in guards { guard.join().unwrap(); }\n}\n\nfn test_dataflow(communicator: C) {\n\n let start = time::precise_time_s();\n let mut computation = GraphRoot::new(communicator);\n\n let (mut parts, mut items) = computation.subcomputation(|builder| {\n\n let (part_input, parts) = builder.new_input::<((u32, String, String), i32)>();\n let (item_input, items) = builder.new_input::<((u32, u32, u64), i32)>();\n\n \/\/ compute the average quantities\n let average = items.group_by_u(|(x,y,_)| (x,y), |k,v| (*k,*v), |_,s,t| {\n let mut sum = 0;\n let mut cnt = 0;\n for &(val,wgt) in s.iter() {\n cnt += wgt;\n sum += val;\n }\n t.push((sum \/ cnt as u32, 1));\n });\n\n \/\/ filter parts by brand and container\n let parts = parts.filter(|x| (x.0).1 == \"Brand#33\" && (x.0).2 == \"WRAP PACK\")\n .map(|((key, _, _), wgt)| (key, wgt));\n\n \/\/ join items against their averages, filter by quantity, remove filter coordinate\n let items = items.join_u(&average, |x| (x.0, (x.1, x.2)), |y| y, |k, x, f| (*k, x.0, x.1, *f))\n .filter(|&((_, q, _, avg),_)| q < avg \/ 5)\n .map(|((key,_,price,_), wgt)| ((key,price), wgt));\n\n \/\/ semi-join against the part keys we retained. think of a better way to produce sum ...\n parts.join_u(&items, |k| (k,()), |(k,p)| (k,p), |_,_,p| *p)\n .inspect_batch(|_t,x| println!(\"results: {:?}\", x.len()));\n\n (part_input, item_input)\n });\n\n let mut parts_buffer = Vec::new();\n let parts_reader = BufReader::new(File::open(format!(\"\/Users\/mcsherry\/Desktop\/tpch-sf-10\/part-{}.tbl\", computation.index())).unwrap());\n for line in parts_reader.lines() {\n let text = line.ok().expect(\"read error\");\n let mut fields = text.split(\"|\");\n\n let part_id = fields.next().unwrap().parse::().unwrap();\n fields.next();\n fields.next();\n let brand = fields.next().unwrap().to_owned();\n fields.next();\n fields.next();\n let container = fields.next().unwrap().to_owned();\n\n parts_buffer.push(((part_id, brand, container), 1));\n if parts_buffer.len() == 1024 {\n parts.send_at(0u64, parts_buffer.drain_temp());\n computation.step();\n\n }\n }\n\n parts.send_at(0u64, parts_buffer.drain_temp());\n computation.step();\n\n let mut items_buffer = Vec::new();\n let items_reader = BufReader::new(File::open(format!(\"\/Users\/mcsherry\/Desktop\/tpch-sf-10\/lineitem-{}.tbl\", computation.index())).unwrap());\n for line in items_reader.lines() {\n let text = line.ok().expect(\"read error\");\n let mut fields = text.split(\"|\");\n\n fields.next();\n let item_id = fields.next().unwrap().parse::().unwrap();\n fields.next();\n fields.next();\n let quantity = fields.next().unwrap().parse::().unwrap();\n let extended_price = fields.next().unwrap().parse::().unwrap() as u64;\n\n items_buffer.push(((item_id, quantity, extended_price), 1));\n if items_buffer.len() == 1024 {\n items.send_at(0u64, items_buffer.drain_temp());\n computation.step();\n }\n }\n\n items.send_at(0u64, items_buffer.drain_temp());\n computation.step();\n\n println!(\"data loaded at {}\", time::precise_time_s() - start);\n\n parts.close();\n items.close();\n\n while computation.step() { std::thread::yield_now(); }\n computation.step(); \/\/ shut down\n\n println!(\"computation finished at {}\", time::precise_time_s() - start);\n}\n<|endoftext|>"} {"text":"set1 challenge5: addedextern crate rustc_serialize;\nuse rustc_serialize::hex::ToHex;\n\nfn main() {\n\tlet string = \"Burning 'em, if you ain't quick and nimble\\nI go crazy when I hear a cymbal\".as_bytes();\n\tlet key = \"ICE\".as_bytes();\n\n\tlet mut encrypted: Vec = Vec::new();\n\n\tfor i in 0..string.len() {\n\t\tencrypted.push(string[i] ^ key[i % 3])\n\t}\n\n\tprintln!(\"{}\", encrypted.to_hex());\n}\n<|endoftext|>"} {"text":"docs(examples): SARSALambda|Fourier|MountainCarextern crate rsrl;\n#[macro_use] extern crate slog;\n\nuse rsrl::{run, logging, Parameter, SerialExperiment, Evaluation};\nuse rsrl::agents::memory::Trace;\nuse rsrl::agents::control::td::SARSALambda;\nuse rsrl::domains::{Domain, MountainCar};\nuse rsrl::fa::{Linear, Projector};\nuse rsrl::fa::projection::Fourier;\nuse rsrl::geometry::Space;\nuse rsrl::policies::EpsilonGreedy;\n\n\nfn main() {\n let domain = MountainCar::default();\n let mut agent = {\n let n_actions = domain.action_space().span().into();\n\n \/\/ Build the linear value function using a fourier basis projection and the appropriate\n \/\/ eligibility trace.\n let bases = Fourier::from_space(3, domain.state_space());\n let trace = Trace::replacing(0.7, bases.activation());\n let q_func = Linear::new(bases, n_actions);\n\n \/\/ Build a stochastic behaviour policy with exponential epsilon.\n let eps = Parameter::exponential(0.99, 0.05, 0.99);\n let policy = EpsilonGreedy::new(eps);\n\n SARSALambda::new(trace, q_func, policy, 0.1, 0.99)\n };\n\n let logger = logging::root(logging::stdout());\n let domain_builder = Box::new(MountainCar::default);\n\n \/\/ Training phase:\n let _training_result = {\n \/\/ Start a serial learning experiment up to 1000 steps per episode.\n let e = SerialExperiment::new(&mut agent, domain_builder.clone(), 1000);\n\n \/\/ Realise 1000 episodes of the experiment generator.\n run(e, 1000, Some(logger.clone()))\n };\n\n \/\/ Testing phase:\n let testing_result =\n Evaluation::new(&mut agent, domain_builder).next().unwrap();\n\n info!(logger, \"solution\"; testing_result);\n}\n<|endoftext|>"} {"text":"use std::error::Error;\nuse std::{fmt, env};\n\nuse mir;\nuse ty::{FnSig, Ty, layout};\n\nuse super::{\n MemoryPointer, Lock, AccessKind\n};\n\nuse rustc_const_math::ConstMathErr;\nuse syntax::codemap::Span;\nuse backtrace::Backtrace;\n\n#[derive(Debug, Clone)]\npub struct EvalError<'tcx> {\n pub kind: EvalErrorKind<'tcx>,\n pub backtrace: Option,\n}\n\nimpl<'tcx> From> for EvalError<'tcx> {\n fn from(kind: EvalErrorKind<'tcx>) -> Self {\n let backtrace = match env::var(\"RUST_BACKTRACE\") {\n Ok(ref val) if !val.is_empty() => Some(Backtrace::new_unresolved()),\n _ => None\n };\n EvalError {\n kind,\n backtrace,\n }\n }\n}\n\n#[derive(Debug, Clone)]\npub enum EvalErrorKind<'tcx> {\n \/\/\/ This variant is used by machines to signal their own errors that do not\n \/\/\/ match an existing variant\n MachineError(String),\n FunctionPointerTyMismatch(FnSig<'tcx>, FnSig<'tcx>),\n NoMirFor(String),\n UnterminatedCString(MemoryPointer),\n DanglingPointerDeref,\n DoubleFree,\n InvalidMemoryAccess,\n InvalidFunctionPointer,\n InvalidBool,\n InvalidDiscriminant,\n PointerOutOfBounds {\n ptr: MemoryPointer,\n access: bool,\n allocation_size: u64,\n },\n InvalidNullPointerUsage,\n ReadPointerAsBytes,\n ReadBytesAsPointer,\n InvalidPointerMath,\n ReadUndefBytes,\n DeadLocal,\n InvalidBoolOp(mir::BinOp),\n Unimplemented(String),\n DerefFunctionPointer,\n ExecuteMemory,\n ArrayIndexOutOfBounds(Span, u64, u64),\n Math(Span, ConstMathErr),\n Intrinsic(String),\n OverflowingMath,\n InvalidChar(u128),\n ExecutionTimeLimitReached,\n StackFrameLimitReached,\n OutOfTls,\n TlsOutOfBounds,\n AbiViolation(String),\n AlignmentCheckFailed {\n required: u64,\n has: u64,\n },\n MemoryLockViolation {\n ptr: MemoryPointer,\n len: u64,\n frame: usize,\n access: AccessKind,\n lock: Lock,\n },\n MemoryAcquireConflict {\n ptr: MemoryPointer,\n len: u64,\n kind: AccessKind,\n lock: Lock,\n },\n InvalidMemoryLockRelease {\n ptr: MemoryPointer,\n len: u64,\n frame: usize,\n lock: Lock,\n },\n DeallocatedLockedMemory {\n ptr: MemoryPointer,\n lock: Lock,\n },\n ValidationFailure(String),\n CalledClosureAsFunction,\n VtableForArgumentlessMethod,\n ModifiedConstantMemory,\n AssumptionNotHeld,\n InlineAsm,\n TypeNotPrimitive(Ty<'tcx>),\n ReallocatedWrongMemoryKind(String, String),\n DeallocatedWrongMemoryKind(String, String),\n ReallocateNonBasePtr,\n DeallocateNonBasePtr,\n IncorrectAllocationInformation(u64, usize, u64, u64),\n Layout(layout::LayoutError<'tcx>),\n HeapAllocZeroBytes,\n HeapAllocNonPowerOfTwoAlignment(u64),\n Unreachable,\n Panic,\n ReadFromReturnPointer,\n PathNotFound(Vec),\n UnimplementedTraitSelection,\n \/\/\/ Abort in case type errors are reached\n TypeckError,\n \/\/\/ Cannot compute this constant because it depends on another one\n \/\/\/ which already produced an error\n ReferencedConstant,\n}\n\npub type EvalResult<'tcx, T = ()> = Result>;\n\nimpl<'tcx> Error for EvalError<'tcx> {\n fn description(&self) -> &str {\n use self::EvalErrorKind::*;\n match self.kind {\n MachineError(ref inner) => inner,\n FunctionPointerTyMismatch(..) =>\n \"tried to call a function through a function pointer of a different type\",\n InvalidMemoryAccess =>\n \"tried to access memory through an invalid pointer\",\n DanglingPointerDeref =>\n \"dangling pointer was dereferenced\",\n DoubleFree =>\n \"tried to deallocate dangling pointer\",\n InvalidFunctionPointer =>\n \"tried to use a function pointer after offsetting it\",\n InvalidBool =>\n \"invalid boolean value read\",\n InvalidDiscriminant =>\n \"invalid enum discriminant value read\",\n PointerOutOfBounds { .. } =>\n \"pointer offset outside bounds of allocation\",\n InvalidNullPointerUsage =>\n \"invalid use of NULL pointer\",\n MemoryLockViolation { .. } =>\n \"memory access conflicts with lock\",\n MemoryAcquireConflict { .. } =>\n \"new memory lock conflicts with existing lock\",\n ValidationFailure(..) =>\n \"type validation failed\",\n InvalidMemoryLockRelease { .. } =>\n \"invalid attempt to release write lock\",\n DeallocatedLockedMemory { .. } =>\n \"tried to deallocate memory in conflict with a lock\",\n ReadPointerAsBytes =>\n \"a raw memory access tried to access part of a pointer value as raw bytes\",\n ReadBytesAsPointer =>\n \"a memory access tried to interpret some bytes as a pointer\",\n InvalidPointerMath =>\n \"attempted to do invalid arithmetic on pointers that would leak base addresses, e.g. comparing pointers into different allocations\",\n ReadUndefBytes =>\n \"attempted to read undefined bytes\",\n DeadLocal =>\n \"tried to access a dead local variable\",\n InvalidBoolOp(_) =>\n \"invalid boolean operation\",\n Unimplemented(ref msg) => msg,\n DerefFunctionPointer =>\n \"tried to dereference a function pointer\",\n ExecuteMemory =>\n \"tried to treat a memory pointer as a function pointer\",\n ArrayIndexOutOfBounds(..) =>\n \"array index out of bounds\",\n Math(..) =>\n \"mathematical operation failed\",\n Intrinsic(..) =>\n \"intrinsic failed\",\n OverflowingMath =>\n \"attempted to do overflowing math\",\n NoMirFor(..) =>\n \"mir not found\",\n InvalidChar(..) =>\n \"tried to interpret an invalid 32-bit value as a char\",\n ExecutionTimeLimitReached =>\n \"the expression was too complex to be evaluated or resulted in an infinite loop\",\n StackFrameLimitReached =>\n \"reached the configured maximum number of stack frames\",\n OutOfTls =>\n \"reached the maximum number of representable TLS keys\",\n TlsOutOfBounds =>\n \"accessed an invalid (unallocated) TLS key\",\n AbiViolation(ref msg) => msg,\n AlignmentCheckFailed{..} =>\n \"tried to execute a misaligned read or write\",\n CalledClosureAsFunction =>\n \"tried to call a closure through a function pointer\",\n VtableForArgumentlessMethod =>\n \"tried to call a vtable function without arguments\",\n ModifiedConstantMemory =>\n \"tried to modify constant memory\",\n AssumptionNotHeld =>\n \"`assume` argument was false\",\n InlineAsm =>\n \"miri does not support inline assembly\",\n TypeNotPrimitive(_) =>\n \"expected primitive type, got nonprimitive\",\n ReallocatedWrongMemoryKind(_, _) =>\n \"tried to reallocate memory from one kind to another\",\n DeallocatedWrongMemoryKind(_, _) =>\n \"tried to deallocate memory of the wrong kind\",\n ReallocateNonBasePtr =>\n \"tried to reallocate with a pointer not to the beginning of an existing object\",\n DeallocateNonBasePtr =>\n \"tried to deallocate with a pointer not to the beginning of an existing object\",\n IncorrectAllocationInformation(..) =>\n \"tried to deallocate or reallocate using incorrect alignment or size\",\n Layout(_) =>\n \"rustc layout computation failed\",\n UnterminatedCString(_) =>\n \"attempted to get length of a null terminated string, but no null found before end of allocation\",\n HeapAllocZeroBytes =>\n \"tried to re-, de- or allocate zero bytes on the heap\",\n HeapAllocNonPowerOfTwoAlignment(_) =>\n \"tried to re-, de-, or allocate heap memory with alignment that is not a power of two\",\n Unreachable =>\n \"entered unreachable code\",\n Panic =>\n \"the evaluated program panicked\",\n ReadFromReturnPointer =>\n \"tried to read from the return pointer\",\n EvalErrorKind::PathNotFound(_) =>\n \"a path could not be resolved, maybe the crate is not loaded\",\n UnimplementedTraitSelection =>\n \"there were unresolved type arguments during trait selection\",\n TypeckError =>\n \"encountered constants with type errors, stopping evaluation\",\n ReferencedConstant =>\n \"referenced constant has errors\",\n }\n }\n}\n\nimpl<'tcx> fmt::Display for EvalError<'tcx> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n use self::EvalErrorKind::*;\n match self.kind {\n PointerOutOfBounds { ptr, access, allocation_size } => {\n write!(f, \"{} at offset {}, outside bounds of allocation {} which has size {}\",\n if access { \"memory access\" } else { \"pointer computed\" },\n ptr.offset, ptr.alloc_id, allocation_size)\n },\n MemoryLockViolation { ptr, len, frame, access, ref lock } => {\n write!(f, \"{:?} access by frame {} at {:?}, size {}, is in conflict with lock {:?}\",\n access, frame, ptr, len, lock)\n }\n MemoryAcquireConflict { ptr, len, kind, ref lock } => {\n write!(f, \"new {:?} lock at {:?}, size {}, is in conflict with lock {:?}\",\n kind, ptr, len, lock)\n }\n InvalidMemoryLockRelease { ptr, len, frame, ref lock } => {\n write!(f, \"frame {} tried to release memory write lock at {:?}, size {}, but cannot release lock {:?}\",\n frame, ptr, len, lock)\n }\n DeallocatedLockedMemory { ptr, ref lock } => {\n write!(f, \"tried to deallocate memory at {:?} in conflict with lock {:?}\",\n ptr, lock)\n }\n ValidationFailure(ref err) => {\n write!(f, \"type validation failed: {}\", err)\n }\n NoMirFor(ref func) => write!(f, \"no mir for `{}`\", func),\n FunctionPointerTyMismatch(sig, got) =>\n write!(f, \"tried to call a function with sig {} through a function pointer of type {}\", sig, got),\n ArrayIndexOutOfBounds(span, len, index) =>\n write!(f, \"index out of bounds: the len is {} but the index is {} at {:?}\", len, index, span),\n ReallocatedWrongMemoryKind(ref old, ref new) =>\n write!(f, \"tried to reallocate memory from {} to {}\", old, new),\n DeallocatedWrongMemoryKind(ref old, ref new) =>\n write!(f, \"tried to deallocate {} memory but gave {} as the kind\", old, new),\n Math(_, ref err) =>\n write!(f, \"{}\", err.description()),\n Intrinsic(ref err) =>\n write!(f, \"{}\", err),\n InvalidChar(c) =>\n write!(f, \"tried to interpret an invalid 32-bit value as a char: {}\", c),\n AlignmentCheckFailed { required, has } =>\n write!(f, \"tried to access memory with alignment {}, but alignment {} is required\",\n has, required),\n TypeNotPrimitive(ty) =>\n write!(f, \"expected primitive type, got {}\", ty),\n Layout(ref err) =>\n write!(f, \"rustc layout computation failed: {:?}\", err),\n PathNotFound(ref path) =>\n write!(f, \"Cannot find path {:?}\", path),\n MachineError(ref inner) =>\n write!(f, \"{}\", inner),\n IncorrectAllocationInformation(size, size2, align, align2) =>\n write!(f, \"incorrect alloc info: expected size {} and align {}, got size {} and align {}\", size, align, size2, align2),\n _ => write!(f, \"{}\", self.description()),\n }\n }\n}\nRollup merge of #49083 - oli-obk:mopsgeschwindigkeit, r=michaelwoeristeruse std::error::Error;\nuse std::{fmt, env};\n\nuse mir;\nuse ty::{FnSig, Ty, layout};\n\nuse super::{\n MemoryPointer, Lock, AccessKind\n};\n\nuse rustc_const_math::ConstMathErr;\nuse syntax::codemap::Span;\nuse backtrace::Backtrace;\n\n#[derive(Debug, Clone)]\npub struct EvalError<'tcx> {\n pub kind: EvalErrorKind<'tcx>,\n pub backtrace: Option,\n}\n\nimpl<'tcx> From> for EvalError<'tcx> {\n fn from(kind: EvalErrorKind<'tcx>) -> Self {\n let backtrace = match env::var(\"MIRI_BACKTRACE\") {\n Ok(ref val) if !val.is_empty() => Some(Backtrace::new_unresolved()),\n _ => None\n };\n EvalError {\n kind,\n backtrace,\n }\n }\n}\n\n#[derive(Debug, Clone)]\npub enum EvalErrorKind<'tcx> {\n \/\/\/ This variant is used by machines to signal their own errors that do not\n \/\/\/ match an existing variant\n MachineError(String),\n FunctionPointerTyMismatch(FnSig<'tcx>, FnSig<'tcx>),\n NoMirFor(String),\n UnterminatedCString(MemoryPointer),\n DanglingPointerDeref,\n DoubleFree,\n InvalidMemoryAccess,\n InvalidFunctionPointer,\n InvalidBool,\n InvalidDiscriminant,\n PointerOutOfBounds {\n ptr: MemoryPointer,\n access: bool,\n allocation_size: u64,\n },\n InvalidNullPointerUsage,\n ReadPointerAsBytes,\n ReadBytesAsPointer,\n InvalidPointerMath,\n ReadUndefBytes,\n DeadLocal,\n InvalidBoolOp(mir::BinOp),\n Unimplemented(String),\n DerefFunctionPointer,\n ExecuteMemory,\n ArrayIndexOutOfBounds(Span, u64, u64),\n Math(Span, ConstMathErr),\n Intrinsic(String),\n OverflowingMath,\n InvalidChar(u128),\n ExecutionTimeLimitReached,\n StackFrameLimitReached,\n OutOfTls,\n TlsOutOfBounds,\n AbiViolation(String),\n AlignmentCheckFailed {\n required: u64,\n has: u64,\n },\n MemoryLockViolation {\n ptr: MemoryPointer,\n len: u64,\n frame: usize,\n access: AccessKind,\n lock: Lock,\n },\n MemoryAcquireConflict {\n ptr: MemoryPointer,\n len: u64,\n kind: AccessKind,\n lock: Lock,\n },\n InvalidMemoryLockRelease {\n ptr: MemoryPointer,\n len: u64,\n frame: usize,\n lock: Lock,\n },\n DeallocatedLockedMemory {\n ptr: MemoryPointer,\n lock: Lock,\n },\n ValidationFailure(String),\n CalledClosureAsFunction,\n VtableForArgumentlessMethod,\n ModifiedConstantMemory,\n AssumptionNotHeld,\n InlineAsm,\n TypeNotPrimitive(Ty<'tcx>),\n ReallocatedWrongMemoryKind(String, String),\n DeallocatedWrongMemoryKind(String, String),\n ReallocateNonBasePtr,\n DeallocateNonBasePtr,\n IncorrectAllocationInformation(u64, usize, u64, u64),\n Layout(layout::LayoutError<'tcx>),\n HeapAllocZeroBytes,\n HeapAllocNonPowerOfTwoAlignment(u64),\n Unreachable,\n Panic,\n ReadFromReturnPointer,\n PathNotFound(Vec),\n UnimplementedTraitSelection,\n \/\/\/ Abort in case type errors are reached\n TypeckError,\n \/\/\/ Cannot compute this constant because it depends on another one\n \/\/\/ which already produced an error\n ReferencedConstant,\n}\n\npub type EvalResult<'tcx, T = ()> = Result>;\n\nimpl<'tcx> Error for EvalError<'tcx> {\n fn description(&self) -> &str {\n use self::EvalErrorKind::*;\n match self.kind {\n MachineError(ref inner) => inner,\n FunctionPointerTyMismatch(..) =>\n \"tried to call a function through a function pointer of a different type\",\n InvalidMemoryAccess =>\n \"tried to access memory through an invalid pointer\",\n DanglingPointerDeref =>\n \"dangling pointer was dereferenced\",\n DoubleFree =>\n \"tried to deallocate dangling pointer\",\n InvalidFunctionPointer =>\n \"tried to use a function pointer after offsetting it\",\n InvalidBool =>\n \"invalid boolean value read\",\n InvalidDiscriminant =>\n \"invalid enum discriminant value read\",\n PointerOutOfBounds { .. } =>\n \"pointer offset outside bounds of allocation\",\n InvalidNullPointerUsage =>\n \"invalid use of NULL pointer\",\n MemoryLockViolation { .. } =>\n \"memory access conflicts with lock\",\n MemoryAcquireConflict { .. } =>\n \"new memory lock conflicts with existing lock\",\n ValidationFailure(..) =>\n \"type validation failed\",\n InvalidMemoryLockRelease { .. } =>\n \"invalid attempt to release write lock\",\n DeallocatedLockedMemory { .. } =>\n \"tried to deallocate memory in conflict with a lock\",\n ReadPointerAsBytes =>\n \"a raw memory access tried to access part of a pointer value as raw bytes\",\n ReadBytesAsPointer =>\n \"a memory access tried to interpret some bytes as a pointer\",\n InvalidPointerMath =>\n \"attempted to do invalid arithmetic on pointers that would leak base addresses, e.g. comparing pointers into different allocations\",\n ReadUndefBytes =>\n \"attempted to read undefined bytes\",\n DeadLocal =>\n \"tried to access a dead local variable\",\n InvalidBoolOp(_) =>\n \"invalid boolean operation\",\n Unimplemented(ref msg) => msg,\n DerefFunctionPointer =>\n \"tried to dereference a function pointer\",\n ExecuteMemory =>\n \"tried to treat a memory pointer as a function pointer\",\n ArrayIndexOutOfBounds(..) =>\n \"array index out of bounds\",\n Math(..) =>\n \"mathematical operation failed\",\n Intrinsic(..) =>\n \"intrinsic failed\",\n OverflowingMath =>\n \"attempted to do overflowing math\",\n NoMirFor(..) =>\n \"mir not found\",\n InvalidChar(..) =>\n \"tried to interpret an invalid 32-bit value as a char\",\n ExecutionTimeLimitReached =>\n \"the expression was too complex to be evaluated or resulted in an infinite loop\",\n StackFrameLimitReached =>\n \"reached the configured maximum number of stack frames\",\n OutOfTls =>\n \"reached the maximum number of representable TLS keys\",\n TlsOutOfBounds =>\n \"accessed an invalid (unallocated) TLS key\",\n AbiViolation(ref msg) => msg,\n AlignmentCheckFailed{..} =>\n \"tried to execute a misaligned read or write\",\n CalledClosureAsFunction =>\n \"tried to call a closure through a function pointer\",\n VtableForArgumentlessMethod =>\n \"tried to call a vtable function without arguments\",\n ModifiedConstantMemory =>\n \"tried to modify constant memory\",\n AssumptionNotHeld =>\n \"`assume` argument was false\",\n InlineAsm =>\n \"miri does not support inline assembly\",\n TypeNotPrimitive(_) =>\n \"expected primitive type, got nonprimitive\",\n ReallocatedWrongMemoryKind(_, _) =>\n \"tried to reallocate memory from one kind to another\",\n DeallocatedWrongMemoryKind(_, _) =>\n \"tried to deallocate memory of the wrong kind\",\n ReallocateNonBasePtr =>\n \"tried to reallocate with a pointer not to the beginning of an existing object\",\n DeallocateNonBasePtr =>\n \"tried to deallocate with a pointer not to the beginning of an existing object\",\n IncorrectAllocationInformation(..) =>\n \"tried to deallocate or reallocate using incorrect alignment or size\",\n Layout(_) =>\n \"rustc layout computation failed\",\n UnterminatedCString(_) =>\n \"attempted to get length of a null terminated string, but no null found before end of allocation\",\n HeapAllocZeroBytes =>\n \"tried to re-, de- or allocate zero bytes on the heap\",\n HeapAllocNonPowerOfTwoAlignment(_) =>\n \"tried to re-, de-, or allocate heap memory with alignment that is not a power of two\",\n Unreachable =>\n \"entered unreachable code\",\n Panic =>\n \"the evaluated program panicked\",\n ReadFromReturnPointer =>\n \"tried to read from the return pointer\",\n EvalErrorKind::PathNotFound(_) =>\n \"a path could not be resolved, maybe the crate is not loaded\",\n UnimplementedTraitSelection =>\n \"there were unresolved type arguments during trait selection\",\n TypeckError =>\n \"encountered constants with type errors, stopping evaluation\",\n ReferencedConstant =>\n \"referenced constant has errors\",\n }\n }\n}\n\nimpl<'tcx> fmt::Display for EvalError<'tcx> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n use self::EvalErrorKind::*;\n match self.kind {\n PointerOutOfBounds { ptr, access, allocation_size } => {\n write!(f, \"{} at offset {}, outside bounds of allocation {} which has size {}\",\n if access { \"memory access\" } else { \"pointer computed\" },\n ptr.offset, ptr.alloc_id, allocation_size)\n },\n MemoryLockViolation { ptr, len, frame, access, ref lock } => {\n write!(f, \"{:?} access by frame {} at {:?}, size {}, is in conflict with lock {:?}\",\n access, frame, ptr, len, lock)\n }\n MemoryAcquireConflict { ptr, len, kind, ref lock } => {\n write!(f, \"new {:?} lock at {:?}, size {}, is in conflict with lock {:?}\",\n kind, ptr, len, lock)\n }\n InvalidMemoryLockRelease { ptr, len, frame, ref lock } => {\n write!(f, \"frame {} tried to release memory write lock at {:?}, size {}, but cannot release lock {:?}\",\n frame, ptr, len, lock)\n }\n DeallocatedLockedMemory { ptr, ref lock } => {\n write!(f, \"tried to deallocate memory at {:?} in conflict with lock {:?}\",\n ptr, lock)\n }\n ValidationFailure(ref err) => {\n write!(f, \"type validation failed: {}\", err)\n }\n NoMirFor(ref func) => write!(f, \"no mir for `{}`\", func),\n FunctionPointerTyMismatch(sig, got) =>\n write!(f, \"tried to call a function with sig {} through a function pointer of type {}\", sig, got),\n ArrayIndexOutOfBounds(span, len, index) =>\n write!(f, \"index out of bounds: the len is {} but the index is {} at {:?}\", len, index, span),\n ReallocatedWrongMemoryKind(ref old, ref new) =>\n write!(f, \"tried to reallocate memory from {} to {}\", old, new),\n DeallocatedWrongMemoryKind(ref old, ref new) =>\n write!(f, \"tried to deallocate {} memory but gave {} as the kind\", old, new),\n Math(_, ref err) =>\n write!(f, \"{}\", err.description()),\n Intrinsic(ref err) =>\n write!(f, \"{}\", err),\n InvalidChar(c) =>\n write!(f, \"tried to interpret an invalid 32-bit value as a char: {}\", c),\n AlignmentCheckFailed { required, has } =>\n write!(f, \"tried to access memory with alignment {}, but alignment {} is required\",\n has, required),\n TypeNotPrimitive(ty) =>\n write!(f, \"expected primitive type, got {}\", ty),\n Layout(ref err) =>\n write!(f, \"rustc layout computation failed: {:?}\", err),\n PathNotFound(ref path) =>\n write!(f, \"Cannot find path {:?}\", path),\n MachineError(ref inner) =>\n write!(f, \"{}\", inner),\n IncorrectAllocationInformation(size, size2, align, align2) =>\n write!(f, \"incorrect alloc info: expected size {} and align {}, got size {} and align {}\", size, align, size2, align2),\n _ => write!(f, \"{}\", self.description()),\n }\n }\n}\n<|endoftext|>"} {"text":"We now check stuff inside struct literals.<|endoftext|>"} {"text":"Test format hygiene\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub const arg0: u8 = 1;\n\npub fn main() {\n format!(\"{}\", 1);\n}\n<|endoftext|>"} {"text":"Create regex_cache.rs<|endoftext|>"} {"text":"use quote::{ToTokens, Tokens};\nuse syn::punctuated::Punctuated;\nuse syn::synom::Synom;\nuse syn::{Field, FieldValue, Ident, Meta};\n\nmod metadata;\nmod request;\nmod response;\n\nuse self::metadata::Metadata;\nuse self::request::Request;\nuse self::response::Response;\n\npub fn strip_serde_attrs(field: &Field) -> Field {\n let mut field = field.clone();\n\n field.attrs = field.attrs.into_iter().filter(|attr| {\n let meta = attr.interpret_meta()\n .expect(\"ruma_api! could not parse field attributes\");\n\n let meta_list = match meta {\n Meta::List(meta_list) => meta_list,\n _ => panic!(\"expected Meta::List\"),\n };\n\n if meta_list.ident.as_ref() != \"serde\" {\n return true;\n }\n\n false\n }).collect();\n\n field\n}\n\npub struct Api {\n metadata: Metadata,\n request: Request,\n response: Response,\n}\n\nimpl From for Api {\n fn from(raw_api: RawApi) -> Self {\n Api {\n metadata: raw_api.metadata.into(),\n request: raw_api.request.into(),\n response: raw_api.response.into(),\n }\n }\n}\n\nimpl ToTokens for Api {\n fn to_tokens(&self, tokens: &mut Tokens) {\n let description = &self.metadata.description;\n let method = Ident::from(self.metadata.method.as_ref());\n let name = &self.metadata.name;\n let path = &self.metadata.path;\n let rate_limited = &self.metadata.rate_limited;\n let requires_authentication = &self.metadata.requires_authentication;\n\n let request = &self.request;\n let request_types = quote! { #request };\n let response = &self.response;\n let response_types = quote! { #response };\n\n let set_request_path = if self.request.has_path_fields() {\n let path_str = path.as_str();\n\n assert!(path_str.starts_with('\/'), \"path needs to start with '\/'\");\n assert!(\n path_str.chars().filter(|c| *c == ':').count() == self.request.path_field_count(),\n \"number of declared path parameters needs to match amount of placeholders in path\"\n );\n\n let request_path_init_fields = self.request.request_path_init_fields();\n\n let mut tokens = quote! {\n let request_path = RequestPath {\n #request_path_init_fields\n };\n\n \/\/ This `unwrap()` can only fail when the url is a\n \/\/ cannot-be-base url like `mailto:` or `data:`, which is not\n \/\/ the case for our placeholder url.\n let mut path_segments = url.path_segments_mut().unwrap();\n };\n\n for segment in path_str[1..].split('\/') {\n tokens.append_all(quote! {\n path_segments.push\n });\n\n if segment.starts_with(':') {\n let path_var = &segment[1..];\n let path_var_ident = Ident::from(path_var);\n\n tokens.append_all(quote! {\n (&request_path.#path_var_ident.to_string());\n });\n } else {\n tokens.append_all(quote! {\n (#segment);\n });\n }\n }\n\n tokens\n } else {\n quote! {\n url.set_path(metadata.path);\n }\n };\n\n let set_request_query = if self.request.has_query_fields() {\n let request_query_init_fields = self.request.request_query_init_fields();\n\n quote! {\n let request_query = RequestQuery {\n #request_query_init_fields\n };\n\n url.set_query(Some(&::serde_urlencoded::to_string(request_query)?));\n }\n } else {\n Tokens::new()\n };\n\n let add_headers_to_request = if self.request.has_header_fields() {\n let mut header_tokens = quote! {\n let headers = http_request.headers_mut();\n };\n\n header_tokens.append_all(self.request.add_headers_to_request());\n\n header_tokens\n } else {\n Tokens::new()\n };\n\n let add_body_to_request = if let Some(field) = self.request.newtype_body_field() {\n let field_name = field.ident.expect(\"expected field to have an identifier\");\n\n quote! {\n let request_body = RequestBody(request.#field_name);\n\n http_request.set_body(::serde_json::to_vec(&request_body)?);\n }\n } else if self.request.has_body_fields() {\n let request_body_init_fields = self.request.request_body_init_fields();\n\n quote! {\n let request_body = RequestBody {\n #request_body_init_fields\n };\n\n http_request.set_body(::serde_json::to_vec(&request_body)?);\n }\n } else {\n Tokens::new()\n };\n\n let deserialize_response_body = if let Some(field) = self.response.newtype_body_field() {\n let field_type = &field.ty;\n\n quote! {\n let future_response = http_response.body()\n .fold::<_, _, Result<_, ::std::io::Error>>(Vec::new(), |mut bytes, chunk| {\n bytes.write_all(&chunk)?;\n\n Ok(bytes)\n })\n .map_err(::ruma_api::Error::from)\n .and_then(|bytes| {\n ::serde_json::from_slice::<#field_type>(bytes.as_slice())\n .map_err(::ruma_api::Error::from)\n })\n }\n } else if self.response.has_body_fields() {\n quote! {\n let future_response = http_response.body()\n .fold::<_, _, Result<_, ::std::io::Error>>(Vec::new(), |mut bytes, chunk| {\n bytes.write_all(&chunk)?;\n\n Ok(bytes)\n })\n .map_err(::ruma_api::Error::from)\n .and_then(|bytes| {\n ::serde_json::from_slice::(bytes.as_slice())\n .map_err(::ruma_api::Error::from)\n })\n }\n } else {\n quote! {\n let future_response = ::futures::future::ok(())\n }\n };\n\n let extract_headers = if self.response.has_header_fields() {\n quote! {\n let mut headers = http_response.headers().clone();\n }\n } else {\n Tokens::new()\n };\n\n let response_init_fields = if self.response.has_fields() {\n self.response.init_fields()\n } else {\n Tokens::new()\n };\n\n tokens.append_all(quote! {\n #[allow(unused_imports)]\n use std::io::Write as _Write;\n\n #[allow(unused_imports)]\n use ::futures::{Future as _Future, Stream as _Stream};\n use ::ruma_api::Endpoint as _RumaApiEndpoint;\n\n \/\/\/ The API endpoint.\n #[derive(Debug)]\n pub struct Endpoint;\n\n #request_types\n\n impl ::std::convert::TryFrom for ::http::Request {\n type Error = ::ruma_api::Error;\n\n #[allow(unused_mut, unused_variables)]\n fn try_from(request: Request) -> Result {\n let metadata = Endpoint::METADATA;\n\n \/\/ Use dummy homeserver url which has to be overwritten in\n \/\/ the calling code. Previously (with http::Uri) this was\n \/\/ not required, but Url::parse only accepts absolute urls.\n let mut url = ::url::Url::parse(\"http:\/\/invalid-host-please-change\/\").unwrap();\n\n { #set_request_path }\n { #set_request_query }\n\n let mut http_request = ::http::Request::new(\n ::http::Method::#method,\n \/\/ Every valid URL is a valid URI\n url.into_string().parse().unwrap(),\n );\n\n { #add_headers_to_request }\n\n { #add_body_to_request }\n\n Ok(http_request)\n }\n }\n\n #response_types\n\n impl ::futures::future::FutureFrom<::http::Response> for Response {\n type Future = Box<_Future>;\n type Error = ::ruma_api::Error;\n\n #[allow(unused_variables)]\n fn future_from(http_response: ::http::Response)\n -> Box<_Future> {\n #extract_headers\n\n #deserialize_response_body\n .and_then(move |response_body| {\n let response = Response {\n #response_init_fields\n };\n\n Ok(response)\n });\n\n Box::new(future_response)\n }\n }\n\n impl ::ruma_api::Endpoint for Endpoint {\n type Request = Request;\n type Response = Response;\n\n const METADATA: ::ruma_api::Metadata = ::ruma_api::Metadata {\n description: #description,\n method: ::http::Method::#method,\n name: #name,\n path: #path,\n rate_limited: #rate_limited,\n requires_authentication: #requires_authentication,\n };\n }\n });\n }\n}\n\ntype ParseMetadata = Punctuated;\ntype ParseFields = Punctuated;\n\npub struct RawApi {\n pub metadata: Vec,\n pub request: Vec,\n pub response: Vec,\n}\n\nimpl Synom for RawApi {\n named!(parse -> Self, do_parse!(\n custom_keyword!(metadata) >>\n metadata: braces!(ParseMetadata::parse_terminated) >>\n custom_keyword!(request) >>\n request: braces!(call!(ParseFields::parse_terminated_with, Field::parse_named)) >>\n custom_keyword!(response) >>\n response: braces!(call!(ParseFields::parse_terminated_with, Field::parse_named)) >>\n (RawApi {\n metadata: metadata.1.into_iter().collect(),\n request: request.1.into_iter().collect(),\n response: response.1.into_iter().collect(),\n })\n ));\n}\nUse Vec for request and response bodies. Use http's API for creating requests and responses.use quote::{ToTokens, Tokens};\nuse syn::punctuated::Punctuated;\nuse syn::synom::Synom;\nuse syn::{Field, FieldValue, Ident, Meta};\n\nmod metadata;\nmod request;\nmod response;\n\nuse self::metadata::Metadata;\nuse self::request::Request;\nuse self::response::Response;\n\npub fn strip_serde_attrs(field: &Field) -> Field {\n let mut field = field.clone();\n\n field.attrs = field.attrs.into_iter().filter(|attr| {\n let meta = attr.interpret_meta()\n .expect(\"ruma_api! could not parse field attributes\");\n\n let meta_list = match meta {\n Meta::List(meta_list) => meta_list,\n _ => panic!(\"expected Meta::List\"),\n };\n\n if meta_list.ident.as_ref() != \"serde\" {\n return true;\n }\n\n false\n }).collect();\n\n field\n}\n\npub struct Api {\n metadata: Metadata,\n request: Request,\n response: Response,\n}\n\nimpl From for Api {\n fn from(raw_api: RawApi) -> Self {\n Api {\n metadata: raw_api.metadata.into(),\n request: raw_api.request.into(),\n response: raw_api.response.into(),\n }\n }\n}\n\nimpl ToTokens for Api {\n fn to_tokens(&self, tokens: &mut Tokens) {\n let description = &self.metadata.description;\n let method = Ident::from(self.metadata.method.as_ref());\n let name = &self.metadata.name;\n let path = &self.metadata.path;\n let rate_limited = &self.metadata.rate_limited;\n let requires_authentication = &self.metadata.requires_authentication;\n\n let request = &self.request;\n let request_types = quote! { #request };\n let response = &self.response;\n let response_types = quote! { #response };\n\n let set_request_path = if self.request.has_path_fields() {\n let path_str = path.as_str();\n\n assert!(path_str.starts_with('\/'), \"path needs to start with '\/'\");\n assert!(\n path_str.chars().filter(|c| *c == ':').count() == self.request.path_field_count(),\n \"number of declared path parameters needs to match amount of placeholders in path\"\n );\n\n let request_path_init_fields = self.request.request_path_init_fields();\n\n let mut tokens = quote! {\n let request_path = RequestPath {\n #request_path_init_fields\n };\n\n \/\/ This `unwrap()` can only fail when the url is a\n \/\/ cannot-be-base url like `mailto:` or `data:`, which is not\n \/\/ the case for our placeholder url.\n let mut path_segments = url.path_segments_mut().unwrap();\n };\n\n for segment in path_str[1..].split('\/') {\n tokens.append_all(quote! {\n path_segments.push\n });\n\n if segment.starts_with(':') {\n let path_var = &segment[1..];\n let path_var_ident = Ident::from(path_var);\n\n tokens.append_all(quote! {\n (&request_path.#path_var_ident.to_string());\n });\n } else {\n tokens.append_all(quote! {\n (#segment);\n });\n }\n }\n\n tokens\n } else {\n quote! {\n url.set_path(metadata.path);\n }\n };\n\n let set_request_query = if self.request.has_query_fields() {\n let request_query_init_fields = self.request.request_query_init_fields();\n\n quote! {\n let request_query = RequestQuery {\n #request_query_init_fields\n };\n\n url.set_query(Some(&::serde_urlencoded::to_string(request_query)?));\n }\n } else {\n Tokens::new()\n };\n\n let add_headers_to_request = if self.request.has_header_fields() {\n let mut header_tokens = quote! {\n let headers = http_request.headers_mut();\n };\n\n header_tokens.append_all(self.request.add_headers_to_request());\n\n header_tokens\n } else {\n Tokens::new()\n };\n\n let create_http_request = if let Some(field) = self.request.newtype_body_field() {\n let field_name = field.ident.expect(\"expected field to have an identifier\");\n\n quote! {\n let request_body = RequestBody(request.#field_name);\n\n let mut http_request = ::http::Request::new(::serde_json::to_vec(&request_body)?);\n }\n } else if self.request.has_body_fields() {\n let request_body_init_fields = self.request.request_body_init_fields();\n\n quote! {\n let request_body = RequestBody {\n #request_body_init_fields\n };\n\n let mut http_request = ::http::Request::new(::serde_json::to_vec(&request_body)?);\n }\n } else {\n quote! {\n let mut http_request = ::http::Request::new(());\n }\n };\n\n let deserialize_response_body = if let Some(field) = self.response.newtype_body_field() {\n let field_type = &field.ty;\n\n quote! {\n let future_response = http_response.body()\n .fold::<_, _, Result<_, ::std::io::Error>>(Vec::new(), |mut bytes, chunk| {\n bytes.write_all(&chunk)?;\n\n Ok(bytes)\n })\n .map_err(::ruma_api::Error::from)\n .and_then(|bytes| {\n ::serde_json::from_slice::<#field_type>(bytes.as_slice())\n .map_err(::ruma_api::Error::from)\n })\n }\n } else if self.response.has_body_fields() {\n quote! {\n let future_response = http_response.body()\n .fold::<_, _, Result<_, ::std::io::Error>>(Vec::new(), |mut bytes, chunk| {\n bytes.write_all(&chunk)?;\n\n Ok(bytes)\n })\n .map_err(::ruma_api::Error::from)\n .and_then(|bytes| {\n ::serde_json::from_slice::(bytes.as_slice())\n .map_err(::ruma_api::Error::from)\n })\n }\n } else {\n quote! {\n let future_response = ::futures::future::ok(())\n }\n };\n\n let extract_headers = if self.response.has_header_fields() {\n quote! {\n let mut headers = http_response.headers().clone();\n }\n } else {\n Tokens::new()\n };\n\n let response_init_fields = if self.response.has_fields() {\n self.response.init_fields()\n } else {\n Tokens::new()\n };\n\n tokens.append_all(quote! {\n #[allow(unused_imports)]\n use std::io::Write as _Write;\n\n #[allow(unused_imports)]\n use ::futures::{Future as _Future, Stream as _Stream};\n use ::ruma_api::Endpoint as _RumaApiEndpoint;\n\n \/\/\/ The API endpoint.\n #[derive(Debug)]\n pub struct Endpoint;\n\n #request_types\n\n impl ::std::convert::TryFrom for ::http::Request> {\n type Error = ::ruma_api::Error;\n\n #[allow(unused_mut, unused_variables)]\n fn try_from(request: Request) -> Result {\n let metadata = Endpoint::METADATA;\n\n \/\/ Use dummy homeserver url which has to be overwritten in\n \/\/ the calling code. Previously (with http::Uri) this was\n \/\/ not required, but Url::parse only accepts absolute urls.\n let mut url = ::url::Url::parse(\"http:\/\/invalid-host-please-change\/\").unwrap();\n\n { #set_request_path }\n { #set_request_query }\n\n #create_http_request\n\n *http_request.method_mut() = ::http::Method::#method;\n *http_request.uri_mut() = url.into_string().parse().unwrap();\n\n { #add_headers_to_request }\n\n Ok(http_request)\n }\n }\n\n #response_types\n\n impl ::futures::future::FutureFrom<::http::Response>> for Response {\n type Future = Box<_Future>;\n type Error = ::ruma_api::Error;\n\n #[allow(unused_variables)]\n fn future_from(http_response: ::http::Response>)\n -> Box<_Future> {\n #extract_headers\n\n #deserialize_response_body\n .and_then(move |response_body| {\n let response = Response {\n #response_init_fields\n };\n\n Ok(response)\n });\n\n Box::new(future_response)\n }\n }\n\n impl ::ruma_api::Endpoint, Vec> for Endpoint {\n type Request = Request;\n type Response = Response;\n\n const METADATA: ::ruma_api::Metadata = ::ruma_api::Metadata {\n description: #description,\n method: ::http::Method::#method,\n name: #name,\n path: #path,\n rate_limited: #rate_limited,\n requires_authentication: #requires_authentication,\n };\n }\n });\n }\n}\n\ntype ParseMetadata = Punctuated;\ntype ParseFields = Punctuated;\n\npub struct RawApi {\n pub metadata: Vec,\n pub request: Vec,\n pub response: Vec,\n}\n\nimpl Synom for RawApi {\n named!(parse -> Self, do_parse!(\n custom_keyword!(metadata) >>\n metadata: braces!(ParseMetadata::parse_terminated) >>\n custom_keyword!(request) >>\n request: braces!(call!(ParseFields::parse_terminated_with, Field::parse_named)) >>\n custom_keyword!(response) >>\n response: braces!(call!(ParseFields::parse_terminated_with, Field::parse_named)) >>\n (RawApi {\n metadata: metadata.1.into_iter().collect(),\n request: request.1.into_iter().collect(),\n response: response.1.into_iter().collect(),\n })\n ));\n}\n<|endoftext|>"} {"text":"add codegen test\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -O\n#![crate_type=\"lib\"]\n#![feature(maybe_uninit)]\n\nuse std::mem::MaybeUninit;\n\n\/\/ Boxing a `MaybeUninit` value should not copy junk from the stack\n#[no_mangle]\npub fn box_uninitialized() -> Box> {\n \/\/ CHECK-LABEL: @box_uninitialized\n \/\/ CHECK-NOT: store\n Box::new(MaybeUninit::uninitialized())\n}\n<|endoftext|>"} {"text":"Add a test written by jdm that tests repeated vector self appending.\/\/ xfail-stage0\n\nfn main()\n{\n \/\/ Make sure we properly handle repeated self-appends.\n let vec[int] a = [0];\n auto i = 20;\n while (i > 0) {\n a += a;\n i -= 1;\n }\n}\n<|endoftext|>"} {"text":"Remove duplicated documentation<|endoftext|>"} {"text":"auto merge of #529 : eschweic\/servo\/master, r=pcwalton\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/ Implements a Quadtree data structure to keep track of which tiles have\n\/\/ been rasterized and which have not.\n\nuse geom::point::Point2D;\nuse geom::size::Size2D;\nuse geom::rect::Rect;\n\npriv enum Quadtype {\n Empty,\n Base,\n Branch,\n}\n\npriv enum Quadrant {\n TL = 0,\n TR = 1,\n BL = 2,\n BR = 3,\n}\n\n\npub struct Quadtree {\n quadtype: Quadtype,\n rect: Rect,\n quadrants: [Option<~Quadtree>, ..4],\n}\n\n\nimpl Quadtree {\n pub fn new(x: uint, y: uint, width: uint, height: uint) -> Quadtree {\n Quadtree {\n quadtype: Empty,\n rect: Rect {\n origin: Point2D(x, y),\n size: Size2D(width, height),\n },\n\n quadrants: [None, None, None, None],\n }\n }\n \n \/\/\/ Determine which child contains a given point\n priv fn get_quadrant(&self, x: uint, y: uint) -> Quadrant {\n let self_width = self.rect.size.width;\n let self_height = self.rect.size.height;\n let self_x = self.rect.origin.x;\n let self_y = self.rect.origin.y;\n match (self_width, self_height) {\n (1, _) => {\n if y < self_y + self_height \/ 2 { \n TL\n } else { \n BR\n }\n }\n (_, 1) => {\n if x < self_x + self_width \/ 2 {\n TL\n } else {\n BR\n }\n }\n _ => {\n if x < self_x + self_width \/ 2 {\n if y < self_y + self_height \/ 2 { \n TL\n } else { \n BL\n }\n } else if y < self_y + self_height \/ 2 { \n TR\n } else { \n BR\n }\n }\n }\n }\n \n \/\/\/ Change a point from Empty to Base\n pub fn add_region(&mut self, x: uint, y: uint) {\n let self_x = self.rect.origin.x;\n let self_y = self.rect.origin.y;\n let self_width = self.rect.size.width;\n let self_height = self.rect.size.height;\n\n debug!(\"Quadtree: adding: (%?, %?) w:%?, h:%?\", self_x, self_y, self_width, self_height);\n\n if x >= self_x + self_width || x < self_x\n || y >= self_y + self_height || y < self_y {\n return; \/\/ Out of bounds\n }\n match self.quadtype {\n Base => return,\n Empty => {\n if self_width == 1 && self_height == 1 {\n self.quadtype = Base;\n return;\n }\n self.quadtype = Branch;\n\n \/\/ Initialize children\n self.quadrants[TL as int] = Some(~Quadtree::new(self_x,\n self_y,\n (self_width \/ 2).max(&1),\n (self_height \/ 2).max(&1)));\n if self_width > 1 && self_height > 1 {\n self.quadrants[TR as int] = Some(~Quadtree::new(self_x + self_width \/ 2,\n self_y,\n self_width - self_width \/ 2,\n self_height \/ 2));\n self.quadrants[BL as int] = Some(~Quadtree::new(self_x,\n self_y + self_height \/ 2,\n self_width \/ 2,\n self_height - self_height \/ 2));\n }\n self.quadrants[BR as int] = Some(~Quadtree::new(self_x + self_width \/ 2,\n self_y + self_height \/ 2,\n self_width - self_width \/ 2,\n self_height - self_height \/ 2));\n }\n Branch => {} \/\/ Fall through\n }\n\n \/\/ If we've made it this far, we know we are a branch and therefore have children\n let index = self.get_quadrant(x, y) as int;\n \n match self.quadrants[index] {\n None => fail!(\"Quadtree: child query failure\"),\n Some(ref mut region) => {\n \/\/ Recurse if necessary\n match region.quadtype {\n Empty | Branch => {\n region.add_region(x, y);\n }\n Base => {} \/\/ nothing to do\n }\n }\n }\n \n \/\/ FIXME: ideally we could make the assignments in the match,\n \/\/ but borrowed pointers prevent that. So here's a flag instead.\n let mut base_flag = 0;\n \n \/\/ If all children are Bases, convert self to Base\n match (&self.quadrants, self_width, self_height) {\n (&[Some(ref tl_q), _, _, Some(ref br_q)], 1, _) |\n (&[Some(ref tl_q), _, _, Some(ref br_q)], _, 1) => {\n match(tl_q.quadtype, br_q.quadtype) {\n (Base, Base) => {\n base_flag = 1;\n }\n _ => {} \/\/ nothing to do\n }\n }\n (&[Some(ref tl_q), Some(ref tr_q), Some(ref bl_q), Some(ref br_q)], _, _) => {\n match (tl_q.quadtype, tr_q.quadtype, bl_q.quadtype, br_q.quadtype) {\n (Base, Base, Base, Base) => {\n base_flag = 2;\n }\n _ => {} \/\/ nothing to do\n }\n }\n _ => {} \/\/ nothing to do\n }\n \n match base_flag {\n 0 => {}\n 1 => {\n self.quadtype = Base;\n self.quadrants[TL as int] = None;\n self.quadrants[BR as int] = None;\n }\n 2 => {\n self.quadtype = Base;\n self.quadrants[TL as int] = None;\n self.quadrants[TR as int] = None;\n self.quadrants[BL as int] = None;\n self.quadrants[BR as int] = None;\n }\n _ => fail!(\"Quadtree: Unknown flag type\"),\n }\n }\n \n \/\/\/ Check if a point is a Base or Empty.\n pub fn check_region(&self, x: uint, y: uint) -> bool {\n let self_x = self.rect.origin.x;\n let self_y = self.rect.origin.y;\n let self_width = self.rect.size.width;\n let self_height = self.rect.size.height;\n\n if x >= self_x + self_width || x < self_x\n || y >= self_y + self_height || y < self_y {\n return false; \/\/ out of bounds\n }\n\n match self.quadtype {\n Empty => false,\n Base => true,\n Branch => {\n let index = self.get_quadrant(x,y) as int;\n match self.quadrants[index] {\n None => fail!(\"Quadtree: child query failed\"),\n Some(ref region) => region.check_region(x, y)\n }\n }\n }\n }\n \n}\n\n\n#[test]\nfn test_add_region() {\n let mut t = Quadtree::new(50, 50, 3, 4);\n assert!(!t.check_region(50, 50));\n t.add_region(50, 50);\n assert!(t.check_region(50, 50));\n assert!(!t.check_region(51, 50));\n assert!(!t.check_region(50, 51));\n t.add_region(53, 50);\n assert!(!t.check_region(53, 50));\n\n}<|endoftext|>"} {"text":"Cubes - resize support<|endoftext|>"} {"text":"disallow range for now<|endoftext|>"} {"text":"Remove superfluous debug println.<|endoftext|>"} {"text":"add option for sprites onto tiles<|endoftext|>"} {"text":"Implement LED brightness regs<|endoftext|>"} {"text":"Use Ar1 alias in Map impls<|endoftext|>"} {"text":"Implement sound disable reg and wave mem lockout<|endoftext|>"} {"text":"use ansi_term::Style;\nuse ansi_term::Style::Plain;\nuse ansi_term::Colour::{Red, Green, Yellow, Blue, Cyan, Purple, Fixed};\n\nuse std::default::Default;\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Colours {\n pub filetypes: FileTypes,\n pub perms: Permissions,\n pub size: Size,\n pub users: Users,\n pub links: Links,\n pub git: Git,\n\n pub punctuation: Style,\n pub date: Style,\n pub inode: Style,\n pub blocks: Style,\n pub header: Style,\n\n pub symlink_path: Style,\n pub broken_arrow: Style,\n pub broken_filename: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct FileTypes {\n pub normal: Style,\n pub directory: Style,\n pub symlink: Style,\n pub special: Style,\n pub executable: Style,\n pub image: Style,\n pub video: Style,\n pub music: Style,\n pub lossless: Style,\n pub crypto: Style,\n pub document: Style,\n pub compressed: Style,\n pub temp: Style,\n pub immediate: Style,\n pub compiled: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Permissions {\n pub user_read: Style,\n pub user_write: Style,\n pub user_execute_file: Style,\n pub user_execute_other: Style,\n\n pub group_read: Style,\n pub group_write: Style,\n pub group_execute: Style,\n\n pub other_read: Style,\n pub other_write: Style,\n pub other_execute: Style,\n\n pub attribute: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Size {\n pub numbers: Style,\n pub unit: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Users {\n pub user_you: Style,\n pub user_someone_else: Style,\n pub group_yours: Style,\n pub group_not_yours: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Links {\n pub normal: Style,\n pub multi_link_file: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Git {\n pub new: Style,\n pub modified: Style,\n pub deleted: Style,\n pub renamed: Style,\n pub typechange: Style,\n}\n\nimpl Colours {\n pub fn plain() -> Colours {\n Colours::default()\n }\n\n pub fn colourful() -> Colours {\n Colours {\n filetypes: FileTypes {\n normal: Plain,\n directory: Blue.bold(),\n symlink: Cyan.normal(),\n special: Yellow.normal(),\n executable: Green.bold(),\n image: Fixed(133).normal(),\n video: Fixed(135).normal(),\n music: Fixed(92).normal(),\n lossless: Fixed(93).normal(),\n crypto: Fixed(109).normal(),\n document: Fixed(105).normal(),\n compressed: Red.normal(),\n temp: Fixed(244).normal(),\n immediate: Yellow.bold().underline(),\n compiled: Fixed(137).normal(),\n },\n\n perms: Permissions {\n user_read: Yellow.bold(),\n user_write: Red.bold(),\n user_execute_file: Green.bold().underline(),\n user_execute_other: Green.bold(),\n group_read: Yellow.normal(),\n group_write: Red.normal(),\n group_execute: Green.normal(),\n other_read: Yellow.normal(),\n other_write: Red.normal(),\n other_execute: Green.normal(),\n attribute: Plain,\n },\n\n size: Size {\n numbers: Green.bold(),\n unit: Green.normal(),\n },\n\n users: Users {\n user_you: Yellow.bold(),\n user_someone_else: Plain,\n group_yours: Yellow.bold(),\n group_not_yours: Plain,\n },\n\n links: Links {\n normal: Red.bold(),\n multi_link_file: Red.on(Yellow),\n },\n\n git: Git {\n new: Green.normal(),\n modified: Blue.normal(),\n deleted: Red.normal(),\n renamed: Yellow.normal(),\n typechange: Purple.normal(),\n },\n\n punctuation: Fixed(244).normal(),\n date: Blue.normal(),\n inode: Purple.normal(),\n blocks: Cyan.normal(),\n header: Plain.underline(),\n\n symlink_path: Cyan.normal(),\n broken_arrow: Red.normal(),\n broken_filename: Red.underline()\n }\n }\n}\nUse new ansi-termuse ansi_term::Style;\nuse ansi_term::Colour::{Red, Green, Yellow, Blue, Cyan, Purple, Fixed};\n\nuse std::default::Default;\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Colours {\n pub filetypes: FileTypes,\n pub perms: Permissions,\n pub size: Size,\n pub users: Users,\n pub links: Links,\n pub git: Git,\n\n pub punctuation: Style,\n pub date: Style,\n pub inode: Style,\n pub blocks: Style,\n pub header: Style,\n\n pub symlink_path: Style,\n pub broken_arrow: Style,\n pub broken_filename: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct FileTypes {\n pub normal: Style,\n pub directory: Style,\n pub symlink: Style,\n pub special: Style,\n pub executable: Style,\n pub image: Style,\n pub video: Style,\n pub music: Style,\n pub lossless: Style,\n pub crypto: Style,\n pub document: Style,\n pub compressed: Style,\n pub temp: Style,\n pub immediate: Style,\n pub compiled: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Permissions {\n pub user_read: Style,\n pub user_write: Style,\n pub user_execute_file: Style,\n pub user_execute_other: Style,\n\n pub group_read: Style,\n pub group_write: Style,\n pub group_execute: Style,\n\n pub other_read: Style,\n pub other_write: Style,\n pub other_execute: Style,\n\n pub attribute: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Size {\n pub numbers: Style,\n pub unit: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Users {\n pub user_you: Style,\n pub user_someone_else: Style,\n pub group_yours: Style,\n pub group_not_yours: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Links {\n pub normal: Style,\n pub multi_link_file: Style,\n}\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\npub struct Git {\n pub new: Style,\n pub modified: Style,\n pub deleted: Style,\n pub renamed: Style,\n pub typechange: Style,\n}\n\nimpl Colours {\n pub fn plain() -> Colours {\n Colours::default()\n }\n\n pub fn colourful() -> Colours {\n Colours {\n filetypes: FileTypes {\n normal: Style::default(),\n directory: Blue.bold(),\n symlink: Cyan.normal(),\n special: Yellow.normal(),\n executable: Green.bold(),\n image: Fixed(133).normal(),\n video: Fixed(135).normal(),\n music: Fixed(92).normal(),\n lossless: Fixed(93).normal(),\n crypto: Fixed(109).normal(),\n document: Fixed(105).normal(),\n compressed: Red.normal(),\n temp: Fixed(244).normal(),\n immediate: Yellow.bold().underline(),\n compiled: Fixed(137).normal(),\n },\n\n perms: Permissions {\n user_read: Yellow.bold(),\n user_write: Red.bold(),\n user_execute_file: Green.bold().underline(),\n user_execute_other: Green.bold(),\n group_read: Yellow.normal(),\n group_write: Red.normal(),\n group_execute: Green.normal(),\n other_read: Yellow.normal(),\n other_write: Red.normal(),\n other_execute: Green.normal(),\n attribute: Style::default(),\n },\n\n size: Size {\n numbers: Green.bold(),\n unit: Green.normal(),\n },\n\n users: Users {\n user_you: Yellow.bold(),\n user_someone_else: Style::default(),\n group_yours: Yellow.bold(),\n group_not_yours: Style::default(),\n },\n\n links: Links {\n normal: Red.bold(),\n multi_link_file: Red.on(Yellow),\n },\n\n git: Git {\n new: Green.normal(),\n modified: Blue.normal(),\n deleted: Red.normal(),\n renamed: Yellow.normal(),\n typechange: Purple.normal(),\n },\n\n punctuation: Fixed(244).normal(),\n date: Blue.normal(),\n inode: Purple.normal(),\n blocks: Cyan.normal(),\n header: Style::default().underline(),\n\n symlink_path: Cyan.normal(),\n broken_arrow: Red.normal(),\n broken_filename: Red.underline()\n }\n }\n}\n<|endoftext|>"} {"text":"\/\/! Yet more cli test cases. These are testing that the output\n\/\/! is exactly as expected.\n\nextern crate multirust_dist;\nextern crate multirust_mock;\n\nuse multirust_mock::clitools::{self, Config, Scenario,\n expect_ok, expect_ok_ex,\n expect_err_ex,\n this_host_triple};\nuse std::env;\n\nfn setup(f: &Fn(&Config)) {\n clitools::setup(Scenario::SimpleV2, f);\n}\n\n#[test]\nfn update() {\n setup(&|config| {\n expect_ok_ex(config, &[\"multirust\", \"update\", \"nightly\"],\nr\"\nnightly revision:\n\n1.3.0 (hash-n-2)\n1.3.0 (hash-n-2)\n\n\",\nr\"info: installing toolchain 'nightly'\ninfo: downloading toolchain manifest\ninfo: downloading component 'rust-std'\ninfo: downloading component 'rustc'\ninfo: downloading component 'cargo'\ninfo: downloading component 'rust-docs'\ninfo: installing component 'rust-std'\ninfo: installing component 'rustc'\ninfo: installing component 'cargo'\ninfo: installing component 'rust-docs'\ninfo: toolchain 'nightly' installed\n\");\n });\n}\n\n#[test]\nfn update_again() {\n setup(&|config| {\n expect_ok(config, &[\"multirust\", \"update\", \"nightly\"]);\n expect_ok_ex(config, &[\"multirust\", \"update\", \"nightly\"],\nr\"\nnightly revision:\n\n1.3.0 (hash-n-2)\n1.3.0 (hash-n-2)\n\n\",\nr\"info: updating existing install for 'nightly'\ninfo: downloading toolchain manifest\ninfo: toolchain is already up to date\n\");\n });\n}\n\n#[test]\nfn default() {\n setup(&|config| {\n expect_ok_ex(config, &[\"multirust\", \"update\", \"nightly\"],\nr\"\nnightly revision:\n\n1.3.0 (hash-n-2)\n1.3.0 (hash-n-2)\n\n\",\nr\"info: installing toolchain 'nightly'\ninfo: downloading toolchain manifest\ninfo: downloading component 'rust-std'\ninfo: downloading component 'rustc'\ninfo: downloading component 'cargo'\ninfo: downloading component 'rust-docs'\ninfo: installing component 'rust-std'\ninfo: installing component 'rustc'\ninfo: installing component 'cargo'\ninfo: installing component 'rust-docs'\ninfo: toolchain 'nightly' installed\n\");\n });\n}\n\n#[test]\nfn override_again() {\n setup(&|config| {\n let cwd = env::current_dir().unwrap();\n expect_ok(config, &[\"multirust\", \"override\", \"nightly\"]);\n expect_ok_ex(config, &[\"multirust\", \"override\", \"nightly\"],\nr\"\nnightly revision:\n\n1.3.0 (hash-n-2)\n1.3.0 (hash-n-2)\n\n\",\n&format!(\nr\"info: using existing install for 'nightly'\ninfo: override toolchain for '{}' set to 'nightly'\n\", cwd.display()));\n });\n}\n\n#[test]\nfn remove_override() {\n setup(&|config| {\n let cwd = env::current_dir().unwrap();\n expect_ok(config, &[\"multirust\", \"override\", \"nightly\"]);\n expect_ok_ex(config, &[\"multirust\", \"remove-override\"],\nr\"\",\n&format!(r\"info: override toolchain for '{}' removed\n\", cwd.display()));\n });\n}\n\n#[test]\nfn remove_override_none() {\n setup(&|config| {\n let cwd = env::current_dir().unwrap();\n expect_ok_ex(config, &[\"multirust\", \"remove-override\"],\nr\"\",\n&format!(r\"info: no override toolchain for '{}'\n\", cwd.display()));\n });\n}\n\n#[test]\nfn update_no_manifest() {\n setup(&|config| {\n expect_err_ex(config, &[\"multirust\", \"update\", \"nightly-2016-01-01\"],\nr\"\",\nr\"info: installing toolchain 'nightly-2016-01-01'\ninfo: downloading toolchain manifest\nerror: no release found for 'nightly-2016-01-01'\n\");\n });\n}\n\n#[test]\nfn delete_data() {\n setup(&|config| {\n expect_ok(config, &[\"multirust\", \"default\", \"nightly\"]);\n assert!(config.homedir.path().exists());\n expect_ok_ex(config, &[\"multirust\", \"delete-data\", \"-y\"],\nr\"\",\n&format!(\nr\"info: deleted directory '{}'\n\", config.homedir.path().display()));\n });\n}\n\n\/\/ Issue #111\n\/\/ multirust update nightly-2016-03-1\n#[test]\nfn update_invalid_toolchain() {\n setup(&|config| {\n expect_err_ex(config, &[\"multirust\", \"update\", \"nightly-2016-03-1\"],\nr\"\",\nr\"error: toolchain 'nightly-2016-03-1' is not installed\n\");\n });\n }\n\n#[test]\nfn default_invalid_toolchain() {\n setup(&|config| {\n expect_err_ex(config, &[\"multirust\", \"default\", \"nightly-2016-03-1\"],\nr\"\",\nr\"error: toolchain 'nightly-2016-03-1' is not installed\n\");\n });\n}\n\n#[test]\nfn list_targets() {\n setup(&|config| {\n let trip = this_host_triple();\n let mut sorted = vec![format!(\"{} (default)\", &*trip),\n format!(\"{} (installed)\", clitools::CROSS_ARCH1),\n clitools::CROSS_ARCH2.to_string()];\n sorted.sort();\n\n let expected = format!(\"{}\\n{}\\n{}\\n\", sorted[0], sorted[1], sorted[2]);\n\n expect_ok(config, &[\"multirust\", \"update\", \"nightly\"]);\n expect_ok(config, &[\"multirust\", \"add-target\", \"nightly\",\n clitools::CROSS_ARCH1]);\n expect_ok_ex(config, &[\"multirust\", \"list-targets\", \"nightly\"],\n&expected,\nr\"\");\n });\n}\nFix the test for 'default'\/\/! Yet more cli test cases. These are testing that the output\n\/\/! is exactly as expected.\n\nextern crate multirust_dist;\nextern crate multirust_mock;\n\nuse multirust_mock::clitools::{self, Config, Scenario,\n expect_ok, expect_ok_ex,\n expect_err_ex,\n this_host_triple};\nuse std::env;\n\nfn setup(f: &Fn(&Config)) {\n clitools::setup(Scenario::SimpleV2, f);\n}\n\n#[test]\nfn update() {\n setup(&|config| {\n expect_ok_ex(config, &[\"multirust\", \"update\", \"nightly\"],\nr\"\nnightly revision:\n\n1.3.0 (hash-n-2)\n1.3.0 (hash-n-2)\n\n\",\nr\"info: installing toolchain 'nightly'\ninfo: downloading toolchain manifest\ninfo: downloading component 'rust-std'\ninfo: downloading component 'rustc'\ninfo: downloading component 'cargo'\ninfo: downloading component 'rust-docs'\ninfo: installing component 'rust-std'\ninfo: installing component 'rustc'\ninfo: installing component 'cargo'\ninfo: installing component 'rust-docs'\ninfo: toolchain 'nightly' installed\n\");\n });\n}\n\n#[test]\nfn update_again() {\n setup(&|config| {\n expect_ok(config, &[\"multirust\", \"update\", \"nightly\"]);\n expect_ok_ex(config, &[\"multirust\", \"update\", \"nightly\"],\nr\"\nnightly revision:\n\n1.3.0 (hash-n-2)\n1.3.0 (hash-n-2)\n\n\",\nr\"info: updating existing install for 'nightly'\ninfo: downloading toolchain manifest\ninfo: toolchain is already up to date\n\");\n });\n}\n\n#[test]\nfn default() {\n setup(&|config| {\n expect_ok_ex(config, &[\"multirust\", \"default\", \"nightly\"],\nr\"\nnightly revision:\n\n1.3.0 (hash-n-2)\n1.3.0 (hash-n-2)\n\n\",\nr\"info: installing toolchain 'nightly'\ninfo: downloading toolchain manifest\ninfo: downloading component 'rust-std'\ninfo: downloading component 'rustc'\ninfo: downloading component 'cargo'\ninfo: downloading component 'rust-docs'\ninfo: installing component 'rust-std'\ninfo: installing component 'rustc'\ninfo: installing component 'cargo'\ninfo: installing component 'rust-docs'\ninfo: toolchain 'nightly' installed\ninfo: default toolchain set to 'nightly'\n\");\n });\n}\n\n#[test]\nfn override_again() {\n setup(&|config| {\n let cwd = env::current_dir().unwrap();\n expect_ok(config, &[\"multirust\", \"override\", \"nightly\"]);\n expect_ok_ex(config, &[\"multirust\", \"override\", \"nightly\"],\nr\"\nnightly revision:\n\n1.3.0 (hash-n-2)\n1.3.0 (hash-n-2)\n\n\",\n&format!(\nr\"info: using existing install for 'nightly'\ninfo: override toolchain for '{}' set to 'nightly'\n\", cwd.display()));\n });\n}\n\n#[test]\nfn remove_override() {\n setup(&|config| {\n let cwd = env::current_dir().unwrap();\n expect_ok(config, &[\"multirust\", \"override\", \"nightly\"]);\n expect_ok_ex(config, &[\"multirust\", \"remove-override\"],\nr\"\",\n&format!(r\"info: override toolchain for '{}' removed\n\", cwd.display()));\n });\n}\n\n#[test]\nfn remove_override_none() {\n setup(&|config| {\n let cwd = env::current_dir().unwrap();\n expect_ok_ex(config, &[\"multirust\", \"remove-override\"],\nr\"\",\n&format!(r\"info: no override toolchain for '{}'\n\", cwd.display()));\n });\n}\n\n#[test]\nfn update_no_manifest() {\n setup(&|config| {\n expect_err_ex(config, &[\"multirust\", \"update\", \"nightly-2016-01-01\"],\nr\"\",\nr\"info: installing toolchain 'nightly-2016-01-01'\ninfo: downloading toolchain manifest\nerror: no release found for 'nightly-2016-01-01'\n\");\n });\n}\n\n#[test]\nfn delete_data() {\n setup(&|config| {\n expect_ok(config, &[\"multirust\", \"default\", \"nightly\"]);\n assert!(config.homedir.path().exists());\n expect_ok_ex(config, &[\"multirust\", \"delete-data\", \"-y\"],\nr\"\",\n&format!(\nr\"info: deleted directory '{}'\n\", config.homedir.path().display()));\n });\n}\n\n\/\/ Issue #111\n\/\/ multirust update nightly-2016-03-1\n#[test]\nfn update_invalid_toolchain() {\n setup(&|config| {\n expect_err_ex(config, &[\"multirust\", \"update\", \"nightly-2016-03-1\"],\nr\"\",\nr\"error: toolchain 'nightly-2016-03-1' is not installed\n\");\n });\n }\n\n#[test]\nfn default_invalid_toolchain() {\n setup(&|config| {\n expect_err_ex(config, &[\"multirust\", \"default\", \"nightly-2016-03-1\"],\nr\"\",\nr\"error: toolchain 'nightly-2016-03-1' is not installed\n\");\n });\n}\n\n#[test]\nfn list_targets() {\n setup(&|config| {\n let trip = this_host_triple();\n let mut sorted = vec![format!(\"{} (default)\", &*trip),\n format!(\"{} (installed)\", clitools::CROSS_ARCH1),\n clitools::CROSS_ARCH2.to_string()];\n sorted.sort();\n\n let expected = format!(\"{}\\n{}\\n{}\\n\", sorted[0], sorted[1], sorted[2]);\n\n expect_ok(config, &[\"multirust\", \"update\", \"nightly\"]);\n expect_ok(config, &[\"multirust\", \"add-target\", \"nightly\",\n clitools::CROSS_ARCH1]);\n expect_ok_ex(config, &[\"multirust\", \"list-targets\", \"nightly\"],\n&expected,\nr\"\");\n });\n}\n<|endoftext|>"} {"text":"Derive Debug trait for PacketSet in define_packet_set macro<|endoftext|>"} {"text":"Use resiter::IterInnerOkOrElse instead of libimagerror version<|endoftext|>"} {"text":"Completed Implementation of Cat<|endoftext|>"} {"text":"fixed comment at top of tut-03<|endoftext|>"} {"text":"add tests for invalid UTF-16 on Windows\/\/! These Windows-only tests are ported from the Unix-only tests in\n\/\/! tests\/utf16.rs. The tests that use StrictUtf8 mode are omitted here,\n\/\/! because that's a Unix-only feature.\n\n#![cfg(windows)]\n\nuse clap::{App, Arg};\nuse std::ffi::OsString;\nuse std::os::windows::ffi::OsStringExt;\n\n\/\/ Take a slice of ASCII bytes, convert them to UTF-16, and then append a\n\/\/ dangling surrogate character to make the result invalid UTF-16.\nfn bad_osstring(ascii: &[u8]) -> OsString {\n let mut wide_chars: Vec = ascii.iter().map(|&c| c as u16).collect();\n \/\/ UTF-16 surrogate characters are only valid in pairs.\n let surrogate_char: u16 = 0xDC00;\n wide_chars.push(surrogate_char);\n let os = OsString::from_wide(&wide_chars);\n assert!(os.to_str().is_none(), \"invalid Unicode\");\n os\n}\n\n#[test]\nfn invalid_utf16_lossy_positional() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\" 'some arg'\"))\n .try_get_matches_from(vec![OsString::from(\"\"), bad_osstring(b\"\")]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_lossy(\"arg\").unwrap(), \"\\u{FFFD}\");\n}\n\n#[test]\nfn invalid_utf16_lossy_option_short_space() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![\n OsString::from(\"\"),\n OsString::from(\"-a\"),\n bad_osstring(b\"\"),\n ]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_lossy(\"arg\").unwrap(), \"\\u{FFFD}\");\n}\n\n#[test]\nfn invalid_utf16_lossy_option_short_equals() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![OsString::from(\"\"), bad_osstring(b\"-a=\")]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_lossy(\"arg\").unwrap(), \"\\u{FFFD}\");\n}\n\n#[test]\nfn invalid_utf16_lossy_option_short_no_space() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![OsString::from(\"\"), bad_osstring(b\"-a\")]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_lossy(\"arg\").unwrap(), \"\\u{FFFD}\");\n}\n\n#[test]\nfn invalid_utf16_lossy_option_long_space() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![\n OsString::from(\"\"),\n OsString::from(\"--arg\"),\n bad_osstring(b\"\"),\n ]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_lossy(\"arg\").unwrap(), \"\\u{FFFD}\");\n}\n\n#[test]\nfn invalid_utf16_lossy_option_long_equals() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![OsString::from(\"\"), bad_osstring(b\"--arg=\")]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_lossy(\"arg\").unwrap(), \"\\u{FFFD}\");\n}\n\n#[test]\nfn invalid_utf16_positional() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\" 'some arg'\"))\n .try_get_matches_from(vec![OsString::from(\"\"), bad_osstring(b\"\")]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_os(\"arg\").unwrap(), &*bad_osstring(b\"\"));\n}\n\n#[test]\nfn invalid_utf16_option_short_space() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![\n OsString::from(\"\"),\n OsString::from(\"-a\"),\n bad_osstring(b\"\"),\n ]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_os(\"arg\").unwrap(), &*bad_osstring(b\"\"));\n}\n\n#[test]\nfn invalid_utf16_option_short_equals() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![OsString::from(\"\"), bad_osstring(b\"-a=\")]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_os(\"arg\").unwrap(), &*bad_osstring(b\"\"));\n}\n\n#[test]\nfn invalid_utf16_option_short_no_space() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![OsString::from(\"\"), bad_osstring(b\"-a\")]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_os(\"arg\").unwrap(), &*bad_osstring(b\"\"));\n}\n\n#[test]\nfn invalid_utf16_option_long_space() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![\n OsString::from(\"\"),\n OsString::from(\"--arg\"),\n bad_osstring(b\"\"),\n ]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_os(\"arg\").unwrap(), &*bad_osstring(b\"\"));\n}\n\n#[test]\nfn invalid_utf16_option_long_equals() {\n let r = App::new(\"bad_utf16\")\n .arg(Arg::from(\"-a, --arg 'some arg'\"))\n .try_get_matches_from(vec![OsString::from(\"\"), bad_osstring(b\"--arg=\")]);\n assert!(r.is_ok());\n let m = r.unwrap();\n assert!(m.is_present(\"arg\"));\n assert_eq!(&*m.value_of_os(\"arg\").unwrap(), &*bad_osstring(b\"\"));\n}\n<|endoftext|>"} {"text":"Manage notification error correctly<|endoftext|>"} {"text":"Add ID reporting in imag-tag<|endoftext|>"} {"text":"updated build.rs to use 'git describe --all'<|endoftext|>"} {"text":"Add Session::new function.<|endoftext|>"} {"text":"Update pattern.rs<|endoftext|>"} {"text":"adds state commit in updates kicks<|endoftext|>"} {"text":"\/\/ Extracting metadata from crate files\n\nimport driver::session;\nimport syntax::{ast, ast_util};\nimport lib::llvm::{False, llvm, mk_object_file, mk_section_iter};\nimport front::attr;\nimport middle::resolve;\nimport syntax::visit;\nimport syntax::codemap::span;\nimport back::x86;\nimport util::{common, filesearch};\nimport std::{vec, str, fs, io, option};\nimport std::option::{none, some};\nimport std::map::{hashmap, new_int_hash};\nimport syntax::print::pprust;\nimport common::*;\n\nexport read_crates;\nexport list_file_metadata;\n\n\/\/ Traverses an AST, reading all the information about use'd crates and native\n\/\/ libraries necessary for later resolving, typechecking, linking, etc.\nfn read_crates(sess: session::session, crate: ast::crate) {\n let e =\n @{sess: sess,\n crate_cache: @std::map::new_str_hash::(),\n mutable next_crate_num: 1};\n let v =\n visit::mk_simple_visitor(@{visit_view_item:\n bind visit_view_item(e, _),\n visit_item: bind visit_item(e, _)\n with *visit::default_simple_visitor()});\n visit::visit_crate(crate, (), v);\n}\n\ntype env =\n @{sess: session::session,\n crate_cache: @hashmap,\n mutable next_crate_num: ast::crate_num};\n\nfn visit_view_item(e: env, i: @ast::view_item) {\n alt i.node {\n ast::view_item_use(ident, meta_items, id) {\n let cnum = resolve_crate(e, ident, meta_items, i.span);\n cstore::add_use_stmt_cnum(e.sess.get_cstore(), id, cnum);\n }\n _ { }\n }\n}\n\nfn visit_item(e: env, i: @ast::item) {\n alt i.node {\n ast::item_native_mod(m) {\n if m.abi != ast::native_abi_rust && m.abi != ast::native_abi_cdecl &&\n m.abi != ast::native_abi_c_stack_cdecl &&\n m.abi != ast::native_abi_c_stack_stdcall {\n ret;\n }\n let cstore = e.sess.get_cstore();\n if !cstore::add_used_library(cstore, m.native_name) { ret; }\n for a: ast::attribute in\n attr::find_attrs_by_name(i.attrs, \"link_args\") {\n\n alt attr::get_meta_item_value_str(attr::attr_meta(a)) {\n some(linkarg) { cstore::add_used_link_args(cstore, linkarg); }\n none. {\/* fallthrough *\/ }\n }\n }\n }\n _ { }\n }\n}\n\n\/\/ A diagnostic function for dumping crate metadata to an output stream\nfn list_file_metadata(path: str, out: io::writer) {\n alt get_metadata_section(path) {\n option::some(bytes) { decoder::list_crate_metadata(bytes, out); }\n option::none. {\n out.write_str(\"Could not find metadata in \" + path + \".\\n\");\n }\n }\n}\n\nfn metadata_matches(crate_data: @[u8], metas: [@ast::meta_item]) -> bool {\n let attrs = decoder::get_crate_attributes(crate_data);\n let linkage_metas = attr::find_linkage_metas(attrs);\n\n log #fmt[\"matching %u metadata requirements against %u items\",\n vec::len(metas), vec::len(linkage_metas)];\n\n log #fmt(\"crate metadata:\");\n for have: @ast::meta_item in linkage_metas {\n log #fmt(\" %s\", pprust::meta_item_to_str(*have));\n }\n\n for needed: @ast::meta_item in metas {\n log #fmt[\"looking for %s\", pprust::meta_item_to_str(*needed)];\n if !attr::contains(linkage_metas, needed) {\n log #fmt[\"missing %s\", pprust::meta_item_to_str(*needed)];\n ret false;\n }\n }\n ret true;\n}\n\nfn default_native_lib_naming(sess: session::session, static: bool) ->\n {prefix: str, suffix: str} {\n if static { ret {prefix: \"lib\", suffix: \".rlib\"}; }\n alt sess.get_targ_cfg().os {\n session::os_win32. { ret {prefix: \"\", suffix: \".dll\"}; }\n session::os_macos. { ret {prefix: \"lib\", suffix: \".dylib\"}; }\n session::os_linux. { ret {prefix: \"lib\", suffix: \".so\"}; }\n }\n}\n\nfn find_library_crate(sess: session::session, ident: ast::ident,\n metas: [@ast::meta_item])\n -> option::t<{ident: str, data: @[u8]}> {\n\n attr::require_unique_names(sess, metas);\n\n let crate_name =\n {\n let name_items = attr::find_meta_items_by_name(metas, \"name\");\n alt vec::last(name_items) {\n some(i) {\n alt attr::get_meta_item_value_str(i) {\n some(n) { n }\n \/\/ FIXME: Probably want a warning here since the user\n \/\/ is using the wrong type of meta item\n _ { ident }\n }\n }\n none. { ident }\n }\n };\n\n let nn = default_native_lib_naming(sess, sess.get_opts().static);\n let x =\n find_library_crate_aux(nn, crate_name, metas,\n sess.filesearch());\n if x != none || sess.get_opts().static { ret x; }\n let nn2 = default_native_lib_naming(sess, true);\n ret find_library_crate_aux(nn2, crate_name, metas,\n sess.filesearch());\n}\n\nfn find_library_crate_aux(nn: {prefix: str, suffix: str}, crate_name: str,\n metas: [@ast::meta_item],\n filesearch: filesearch::filesearch) ->\n option::t<{ident: str, data: @[u8]}> {\n let prefix: str = nn.prefix + crate_name;\n let suffix: str = nn.suffix;\n\n ret filesearch::search(filesearch, { |path|\n log #fmt(\"inspecting file %s\", path);\n let f: str = fs::basename(path);\n if !(str::starts_with(f, prefix) && str::ends_with(f, suffix)) {\n log #fmt[\"skipping %s, doesn't look like %s*%s\", path, prefix,\n suffix];\n option::none\n } else {\n log #fmt(\"%s is a candidate\", path);\n alt get_metadata_section(path) {\n option::some(cvec) {\n if !metadata_matches(cvec, metas) {\n log #fmt[\"skipping %s, metadata doesn't match\", path];\n option::none\n } else {\n log #fmt[\"found %s with matching metadata\", path];\n option::some({ident: path, data: cvec})\n }\n }\n _ {\n log #fmt(\"could not load metadata for %s\", path);\n option::none\n }\n }\n }\n });\n}\n\nfn get_metadata_section(filename: str) -> option::t<@[u8]> unsafe {\n let mb = str::as_buf(filename, {|buf|\n llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf)\n });\n if mb as int == 0 { ret option::none::<@[u8]>; }\n let of = mk_object_file(mb);\n let si = mk_section_iter(of.llof);\n while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {\n let name_buf = llvm::LLVMGetSectionName(si.llsi);\n let name = str::str_from_cstr(name_buf);\n if str::eq(name, x86::get_meta_sect_name()) {\n let cbuf = llvm::LLVMGetSectionContents(si.llsi);\n let csz = llvm::LLVMGetSectionSize(si.llsi);\n let cvbuf: *u8 = std::unsafe::reinterpret_cast(cbuf);\n ret option::some::<@[u8]>(@vec::unsafe::from_buf(cvbuf, csz));\n }\n llvm::LLVMMoveToNextSection(si.llsi);\n }\n ret option::none::<@[u8]>;\n}\n\nfn load_library_crate(sess: session::session, span: span, ident: ast::ident,\n metas: [@ast::meta_item])\n -> {ident: str, data: @[u8]} {\n\n\n alt find_library_crate(sess, ident, metas) {\n some(t) { ret t; }\n none. {\n sess.span_fatal(span, #fmt[\"can't find crate for '%s'\", ident]);\n }\n }\n}\n\nfn resolve_crate(e: env, ident: ast::ident, metas: [@ast::meta_item],\n span: span) -> ast::crate_num {\n if !e.crate_cache.contains_key(ident) {\n let cinfo =\n load_library_crate(e.sess, span, ident, metas);\n\n let cfilename = cinfo.ident;\n let cdata = cinfo.data;\n\n \/\/ Claim this crate number and cache it\n let cnum = e.next_crate_num;\n e.crate_cache.insert(ident, cnum);\n e.next_crate_num += 1;\n\n \/\/ Now resolve the crates referenced by this crate\n let cnum_map = resolve_crate_deps(e, cdata);\n\n let cmeta = {name: ident, data: cdata, cnum_map: cnum_map};\n\n let cstore = e.sess.get_cstore();\n cstore::set_crate_data(cstore, cnum, cmeta);\n cstore::add_used_crate_file(cstore, cfilename);\n ret cnum;\n } else { ret e.crate_cache.get(ident); }\n}\n\n\/\/ Go through the crate metadata and load any crates that it references\nfn resolve_crate_deps(e: env, cdata: @[u8]) -> cstore::cnum_map {\n log \"resolving deps of external crate\";\n \/\/ The map from crate numbers in the crate we're resolving to local crate\n \/\/ numbers\n let cnum_map = new_int_hash::();\n for dep: decoder::crate_dep in decoder::get_crate_deps(cdata) {\n let extrn_cnum = dep.cnum;\n let cname = dep.ident;\n log #fmt[\"resolving dep %s\", cname];\n if e.crate_cache.contains_key(cname) {\n log \"already have it\";\n \/\/ We've already seen this crate\n let local_cnum = e.crate_cache.get(cname);\n cnum_map.insert(extrn_cnum, local_cnum);\n } else {\n log \"need to load it\";\n \/\/ This is a new one so we've got to load it\n \/\/ FIXME: Need better error reporting than just a bogus span\n let fake_span = ast_util::dummy_sp();\n let local_cnum = resolve_crate(e, cname, [], fake_span);\n cnum_map.insert(extrn_cnum, local_cnum);\n }\n }\n ret cnum_map;\n}\n\n\/\/ Local Variables:\n\/\/ mode: rust\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ compile-command: \"make -k -C $RBUILD 2>&1 | sed -e 's\/\\\\\/x\\\\\/\/x:\\\\\/\/g'\";\n\/\/ End:\nrustc: Find crates by matching the name metadata\/\/ Extracting metadata from crate files\n\nimport driver::session;\nimport syntax::{ast, ast_util};\nimport lib::llvm::{False, llvm, mk_object_file, mk_section_iter};\nimport front::attr;\nimport middle::resolve;\nimport syntax::visit;\nimport syntax::codemap::span;\nimport back::x86;\nimport util::{common, filesearch};\nimport std::{vec, str, fs, io, option};\nimport std::option::{none, some};\nimport std::map::{hashmap, new_int_hash};\nimport syntax::print::pprust;\nimport common::*;\n\nexport read_crates;\nexport list_file_metadata;\n\n\/\/ Traverses an AST, reading all the information about use'd crates and native\n\/\/ libraries necessary for later resolving, typechecking, linking, etc.\nfn read_crates(sess: session::session, crate: ast::crate) {\n let e =\n @{sess: sess,\n crate_cache: @std::map::new_str_hash::(),\n mutable next_crate_num: 1};\n let v =\n visit::mk_simple_visitor(@{visit_view_item:\n bind visit_view_item(e, _),\n visit_item: bind visit_item(e, _)\n with *visit::default_simple_visitor()});\n visit::visit_crate(crate, (), v);\n}\n\ntype env =\n @{sess: session::session,\n crate_cache: @hashmap,\n mutable next_crate_num: ast::crate_num};\n\nfn visit_view_item(e: env, i: @ast::view_item) {\n alt i.node {\n ast::view_item_use(ident, meta_items, id) {\n let cnum = resolve_crate(e, ident, meta_items, i.span);\n cstore::add_use_stmt_cnum(e.sess.get_cstore(), id, cnum);\n }\n _ { }\n }\n}\n\nfn visit_item(e: env, i: @ast::item) {\n alt i.node {\n ast::item_native_mod(m) {\n if m.abi != ast::native_abi_rust && m.abi != ast::native_abi_cdecl &&\n m.abi != ast::native_abi_c_stack_cdecl &&\n m.abi != ast::native_abi_c_stack_stdcall {\n ret;\n }\n let cstore = e.sess.get_cstore();\n if !cstore::add_used_library(cstore, m.native_name) { ret; }\n for a: ast::attribute in\n attr::find_attrs_by_name(i.attrs, \"link_args\") {\n\n alt attr::get_meta_item_value_str(attr::attr_meta(a)) {\n some(linkarg) { cstore::add_used_link_args(cstore, linkarg); }\n none. {\/* fallthrough *\/ }\n }\n }\n }\n _ { }\n }\n}\n\n\/\/ A diagnostic function for dumping crate metadata to an output stream\nfn list_file_metadata(path: str, out: io::writer) {\n alt get_metadata_section(path) {\n option::some(bytes) { decoder::list_crate_metadata(bytes, out); }\n option::none. {\n out.write_str(\"Could not find metadata in \" + path + \".\\n\");\n }\n }\n}\n\nfn metadata_matches(crate_data: @[u8], metas: [@ast::meta_item]) -> bool {\n let attrs = decoder::get_crate_attributes(crate_data);\n let linkage_metas = attr::find_linkage_metas(attrs);\n\n log #fmt[\"matching %u metadata requirements against %u items\",\n vec::len(metas), vec::len(linkage_metas)];\n\n log #fmt(\"crate metadata:\");\n for have: @ast::meta_item in linkage_metas {\n log #fmt(\" %s\", pprust::meta_item_to_str(*have));\n }\n\n for needed: @ast::meta_item in metas {\n log #fmt[\"looking for %s\", pprust::meta_item_to_str(*needed)];\n if !attr::contains(linkage_metas, needed) {\n log #fmt[\"missing %s\", pprust::meta_item_to_str(*needed)];\n ret false;\n }\n }\n ret true;\n}\n\nfn default_native_lib_naming(sess: session::session, static: bool) ->\n {prefix: str, suffix: str} {\n if static { ret {prefix: \"lib\", suffix: \".rlib\"}; }\n alt sess.get_targ_cfg().os {\n session::os_win32. { ret {prefix: \"\", suffix: \".dll\"}; }\n session::os_macos. { ret {prefix: \"lib\", suffix: \".dylib\"}; }\n session::os_linux. { ret {prefix: \"lib\", suffix: \".so\"}; }\n }\n}\n\nfn find_library_crate(sess: session::session, ident: ast::ident,\n metas: [@ast::meta_item])\n -> option::t<{ident: str, data: @[u8]}> {\n\n attr::require_unique_names(sess, metas);\n\n \/\/ Metadata \"name\" will be used to find the crate. Use `ident'\n \/\/ as \"name\" if the attribute is not explicitly specified\n if !attr::contains_name(metas, \"name\") {\n metas += [attr::mk_name_value_item_str(\"name\", ident)];\n }\n let nn = default_native_lib_naming(sess, sess.get_opts().static);\n let x =\n find_library_crate_aux(nn, metas, sess.filesearch());\n if x != none || sess.get_opts().static { ret x; }\n let nn2 = default_native_lib_naming(sess, true);\n ret find_library_crate_aux(nn2, metas, sess.filesearch());\n}\n\nfn find_library_crate_aux(nn: {prefix: str, suffix: str},\n metas: [@ast::meta_item],\n filesearch: filesearch::filesearch) ->\n option::t<{ident: str, data: @[u8]}> {\n let prefix: str = nn.prefix;\n let suffix: str = nn.suffix;\n\n ret filesearch::search(filesearch, { |path|\n log #fmt(\"inspecting file %s\", path);\n let f: str = fs::basename(path);\n if !(str::starts_with(f, prefix) && str::ends_with(f, suffix)) {\n log #fmt[\"skipping %s, doesn't look like %s*%s\", path, prefix,\n suffix];\n option::none\n } else {\n log #fmt(\"%s is a candidate\", path);\n alt get_metadata_section(path) {\n option::some(cvec) {\n if !metadata_matches(cvec, metas) {\n log #fmt[\"skipping %s, metadata doesn't match\", path];\n option::none\n } else {\n log #fmt[\"found %s with matching metadata\", path];\n option::some({ident: path, data: cvec})\n }\n }\n _ {\n log #fmt(\"could not load metadata for %s\", path);\n option::none\n }\n }\n }\n });\n}\n\nfn get_metadata_section(filename: str) -> option::t<@[u8]> unsafe {\n let mb = str::as_buf(filename, {|buf|\n llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf)\n });\n if mb as int == 0 { ret option::none::<@[u8]>; }\n let of = mk_object_file(mb);\n let si = mk_section_iter(of.llof);\n while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {\n let name_buf = llvm::LLVMGetSectionName(si.llsi);\n let name = str::str_from_cstr(name_buf);\n if str::eq(name, x86::get_meta_sect_name()) {\n let cbuf = llvm::LLVMGetSectionContents(si.llsi);\n let csz = llvm::LLVMGetSectionSize(si.llsi);\n let cvbuf: *u8 = std::unsafe::reinterpret_cast(cbuf);\n ret option::some::<@[u8]>(@vec::unsafe::from_buf(cvbuf, csz));\n }\n llvm::LLVMMoveToNextSection(si.llsi);\n }\n ret option::none::<@[u8]>;\n}\n\nfn load_library_crate(sess: session::session, span: span, ident: ast::ident,\n metas: [@ast::meta_item])\n -> {ident: str, data: @[u8]} {\n\n\n alt find_library_crate(sess, ident, metas) {\n some(t) { ret t; }\n none. {\n sess.span_fatal(span, #fmt[\"can't find crate for '%s'\", ident]);\n }\n }\n}\n\nfn resolve_crate(e: env, ident: ast::ident, metas: [@ast::meta_item],\n span: span) -> ast::crate_num {\n if !e.crate_cache.contains_key(ident) {\n let cinfo =\n load_library_crate(e.sess, span, ident, metas);\n\n let cfilename = cinfo.ident;\n let cdata = cinfo.data;\n\n \/\/ Claim this crate number and cache it\n let cnum = e.next_crate_num;\n e.crate_cache.insert(ident, cnum);\n e.next_crate_num += 1;\n\n \/\/ Now resolve the crates referenced by this crate\n let cnum_map = resolve_crate_deps(e, cdata);\n\n let cmeta = {name: ident, data: cdata, cnum_map: cnum_map};\n\n let cstore = e.sess.get_cstore();\n cstore::set_crate_data(cstore, cnum, cmeta);\n cstore::add_used_crate_file(cstore, cfilename);\n ret cnum;\n } else { ret e.crate_cache.get(ident); }\n}\n\n\/\/ Go through the crate metadata and load any crates that it references\nfn resolve_crate_deps(e: env, cdata: @[u8]) -> cstore::cnum_map {\n log \"resolving deps of external crate\";\n \/\/ The map from crate numbers in the crate we're resolving to local crate\n \/\/ numbers\n let cnum_map = new_int_hash::();\n for dep: decoder::crate_dep in decoder::get_crate_deps(cdata) {\n let extrn_cnum = dep.cnum;\n let cname = dep.ident;\n log #fmt[\"resolving dep %s\", cname];\n if e.crate_cache.contains_key(cname) {\n log \"already have it\";\n \/\/ We've already seen this crate\n let local_cnum = e.crate_cache.get(cname);\n cnum_map.insert(extrn_cnum, local_cnum);\n } else {\n log \"need to load it\";\n \/\/ This is a new one so we've got to load it\n \/\/ FIXME: Need better error reporting than just a bogus span\n let fake_span = ast_util::dummy_sp();\n let local_cnum = resolve_crate(e, cname, [], fake_span);\n cnum_map.insert(extrn_cnum, local_cnum);\n }\n }\n ret cnum_map;\n}\n\n\/\/ Local Variables:\n\/\/ mode: rust\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ compile-command: \"make -k -C $RBUILD 2>&1 | sed -e 's\/\\\\\/x\\\\\/\/x:\\\\\/\/g'\";\n\/\/ End:\n<|endoftext|>"} {"text":"issue\/28: first test<|endoftext|>"} {"text":"Cancellations.<|endoftext|>"} {"text":"Fix spacing<|endoftext|>"} {"text":"Create main.rsuse std::io;\nuse std::fs::{self, DirEntry};\nuse std::path::Path;\n\nfn visit_dirs(dir: &Path, cb: &Fn(&DirEntry) -> io::Result<()> ) -> io::Result<()> {\n if dir.is_dir() {\n for entry in fs::read_dir(dir)? {\n let entry = entry?;\n let path = entry.path();\n if path.is_dir() {\n visit_dirs(&path, cb)?;\n } else {\n cb(&entry)?;\n }\n }\n }\n Ok(())\n}\n\nfn visit_files(cb: &DirEntry) -> io::Result<()> {\n\tprintln!(\"{:?}\",cb.path()); \/\/file_name());\n\tOk(())\t\n}\n\n\n\n\nfn main() {\n let path = Path::new(\".\");\n visit_dirs(path, &visit_files);\n}\n<|endoftext|>"} {"text":"Fixed main.rs<|endoftext|>"} {"text":"Setup command arguments with getoptsextern crate getopts;\nuse getopts::Options;\nuse std::env;\n\nfn print_usage(opts: Options) {\n let brief = format!(\"Usage: listbotto DIR [options]\");\n print!(\"{}\", opts.usage(&brief));\n}\n\nfn main() {\n let args: Vec = env::args().collect();\n\n let mut opts = Options::new();\n opts.optflag(\"h\", \"help\", \"print this help menu\");\n let matches = match opts.parse(&args[1..]) {\n Ok(m) => { m }\n Err(f) => { panic!(f.to_string()) }\n };\n\n if matches.opt_present(\"h\") {\n print_usage(opts);\n return;\n }\n\n print_usage(opts);\n return;\n}\n\n<|endoftext|>"} {"text":"Implements resolve_uri to also open Files<|endoftext|>"} {"text":"Formatting<|endoftext|>"} {"text":"Update main.rs<|endoftext|>"} {"text":"add convolution to buildpub mod convolution;\npub use self::convolution::*;\n<|endoftext|>"} {"text":"grab item population for terrain map<|endoftext|>"} {"text":"Improve textbox\/cursor logic with return, backspace, left_key<|endoftext|>"} {"text":"Add a few encoding testsextern crate opus;\n\n#[test]\nfn version_ascii() {\n\tprintln!(\"Opus version: {}\", opus::version());\n}\n\n\/\/ 48000Hz * 1 channel * 20 ms \/ 1000\nconst MONO_20MS: usize = 48000 * 1 * 20 \/ 1000;\n\n#[test]\nfn encode_mono() {\n\tlet mut encoder = opus::Encoder::new(48000, opus::Channels::Mono, opus::CodingMode::Audio).unwrap();\n\t\n\tlet mut output = [0; 256];\n\tlet len = encoder.encode(&[0_i16; MONO_20MS], &mut output).unwrap();\n\tassert_eq!(&output[..len], &[244, 255, 254]);\n\n\tlet len = encoder.encode(&[0_i16; MONO_20MS], &mut output).unwrap();\n\tassert_eq!(&output[..len], &[244, 255, 254]);\n\n\tlet len = encoder.encode(&[1_i16; MONO_20MS], &mut output).unwrap();\n\tassert!(len > 190 && len < 220);\n\n\tlet len = encoder.encode(&[0_i16; MONO_20MS], &mut output).unwrap();\n\tassert!(len > 170 && len < 190);\n}\n\n#[test]\nfn encode_stereo() {\n\tlet mut encoder = opus::Encoder::new(48000, opus::Channels::Stereo, opus::CodingMode::Audio).unwrap();\n\n\tlet mut output = [0; 512];\n\tlet len = encoder.encode(&[0_i16; 2 * MONO_20MS], &mut output).unwrap();\n\tassert_eq!(&output[..len], &[249, 255, 254, 255, 254]);\n\n\tlet len = encoder.encode(&[0_i16; 2 * MONO_20MS], &mut output).unwrap();\n\tassert_eq!(&output[..len], &[249, 255, 254, 255, 254]);\n\n\tlet len = encoder.encode(&[17_i16; 2 * MONO_20MS], &mut output).unwrap();\n\tassert!(len > 240);\n\n\tlet len = encoder.encode(&[0_i16; 2 * MONO_20MS], &mut output).unwrap();\n\tassert!(len > 240);\n\n\t\/\/ Very small buffer should still succeed\n\tlet len = encoder.encode(&[95_i16; 2 * MONO_20MS], &mut [0; 20]).unwrap();\n\tassert!(len < 20);\n}\n\n#[test]\nfn encode_bad_rate() {\n\tmatch opus::Encoder::new(48001, opus::Channels::Mono, opus::CodingMode::Audio) {\n\t\tOk(_) => panic!(\"Encoder::new did not return BadArg\"),\n\t\tErr(err) => assert_eq!(err.code(), opus::ErrorCode::BadArg),\n\t}\n}\n\n#[test]\nfn encode_bad_buffer() {\n\tlet mut encoder = opus::Encoder::new(48000, opus::Channels::Stereo, opus::CodingMode::Audio).unwrap();\n\tmatch encoder.encode(&[1_i16; 2 * MONO_20MS], &mut [0; 0]) {\n\t\tOk(_) => panic!(\"encode with 0-length buffer did not return BadArg\"),\n\t\tErr(err) => assert_eq!(err.code(), opus::ErrorCode::BadArg),\n\t}\n}\n<|endoftext|>"} {"text":"Remove `to_u32` method from `Kind`<|endoftext|>"} {"text":"output songs in extended m3u format<|endoftext|>"} {"text":"extern crate rusoto_codegen;\n\nuse std::env;\nuse std::path::Path;\n\nuse rusoto_codegen::{Service, generate};\n\n\/*\ngamelift\/2015-10-01\/service-2.json: \"protocol\":\"json\"\nsupport\/2013-04-15\/service-2.json: \"protocol\":\"json\"\n*\/\n\n\/\/ expand to use cfg!() so codegen only gets run for services\n\/\/ in the feautres list\nmacro_rules! services {\n ( $( [$name:expr, $date:expr] ),* ) => {\n {\n let mut services = Vec::new();\n $(\n if cfg!(feature = $name) {\n services.push(Service::new($name, $date));\n }\n )*\n services\n }\n }\n}\n\nfn main() {\n let out_dir = env::var_os(\"OUT_DIR\").expect(\"OUT_DIR not specified\");\n let out_path = Path::new(&out_dir);\n\n let services = services! {\n [\"acm\", \"2015-12-08\"],\n [\"cloudhsm\", \"2014-05-30\"],\n [\"cloudtrail\", \"2013-11-01\"],\n [\"codecommit\", \"2015-04-13\"],\n [\"codedeploy\", \"2014-10-06\"],\n [\"codepipeline\", \"2015-07-09\"],\n [\"cognito-identity\", \"2014-06-30\"],\n [\"config\", \"2014-11-12\"],\n [\"datapipeline\", \"2012-10-29\"],\n [\"devicefarm\", \"2015-06-23\"],\n [\"directconnect\", \"2012-10-25\"],\n [\"ds\", \"2015-04-16\"],\n [\"dynamodb\", \"2012-08-10\"],\n [\"dynamodbstreams\", \"2012-08-10\"],\n [\"ec2\", \"2015-10-01\"],\n [\"ecr\", \"2015-09-21\"],\n [\"ecs\", \"2014-11-13\"],\n [\"elastictranscoder\", \"2012-09-25\"],\n [\"emr\", \"2009-03-31\"],\n [\"events\", \"2014-02-03\"],\n [\"firehose\", \"2015-08-04\"],\n [\"inspector\", \"2016-02-16\"],\n [\"kinesis\", \"2013-12-02\"],\n [\"kms\", \"2014-11-01\"],\n [\"logs\", \"2014-03-28\"],\n [\"machinelearning\", \"2014-12-12\"],\n [\"marketplacecommerceanalytics\", \"2015-07-01\"],\n [\"opsworks\", \"2013-02-18\"],\n [\"route53domains\", \"2014-05-15\"],\n [\"sqs\", \"2012-11-05\"],\n [\"ssm\", \"2014-11-06\"],\n [\"storagegateway\", \"2013-06-30\"],\n [\"swf\", \"2012-01-25\"],\n [\"waf\", \"2015-08-24\"],\n [\"workspaces\", \"2015-04-08\"]\n };\n\n for service in services {\n generate(service, out_path);\n }\n\n\n println!(\"cargo:rerun-if-changed=codegen\");\n}\nfix typoextern crate rusoto_codegen;\n\nuse std::env;\nuse std::path::Path;\n\nuse rusoto_codegen::{Service, generate};\n\n\/*\ngamelift\/2015-10-01\/service-2.json: \"protocol\":\"json\"\nsupport\/2013-04-15\/service-2.json: \"protocol\":\"json\"\n*\/\n\n\/\/ expand to use cfg!() so codegen only gets run for services\n\/\/ in the features list\nmacro_rules! services {\n ( $( [$name:expr, $date:expr] ),* ) => {\n {\n let mut services = Vec::new();\n $(\n if cfg!(feature = $name) {\n services.push(Service::new($name, $date));\n }\n )*\n services\n }\n }\n}\n\nfn main() {\n let out_dir = env::var_os(\"OUT_DIR\").expect(\"OUT_DIR not specified\");\n let out_path = Path::new(&out_dir);\n\n let services = services! {\n [\"acm\", \"2015-12-08\"],\n [\"cloudhsm\", \"2014-05-30\"],\n [\"cloudtrail\", \"2013-11-01\"],\n [\"codecommit\", \"2015-04-13\"],\n [\"codedeploy\", \"2014-10-06\"],\n [\"codepipeline\", \"2015-07-09\"],\n [\"cognito-identity\", \"2014-06-30\"],\n [\"config\", \"2014-11-12\"],\n [\"datapipeline\", \"2012-10-29\"],\n [\"devicefarm\", \"2015-06-23\"],\n [\"directconnect\", \"2012-10-25\"],\n [\"ds\", \"2015-04-16\"],\n [\"dynamodb\", \"2012-08-10\"],\n [\"dynamodbstreams\", \"2012-08-10\"],\n [\"ec2\", \"2015-10-01\"],\n [\"ecr\", \"2015-09-21\"],\n [\"ecs\", \"2014-11-13\"],\n [\"elastictranscoder\", \"2012-09-25\"],\n [\"emr\", \"2009-03-31\"],\n [\"events\", \"2014-02-03\"],\n [\"firehose\", \"2015-08-04\"],\n [\"inspector\", \"2016-02-16\"],\n [\"kinesis\", \"2013-12-02\"],\n [\"kms\", \"2014-11-01\"],\n [\"logs\", \"2014-03-28\"],\n [\"machinelearning\", \"2014-12-12\"],\n [\"marketplacecommerceanalytics\", \"2015-07-01\"],\n [\"opsworks\", \"2013-02-18\"],\n [\"route53domains\", \"2014-05-15\"],\n [\"sqs\", \"2012-11-05\"],\n [\"ssm\", \"2014-11-06\"],\n [\"storagegateway\", \"2013-06-30\"],\n [\"swf\", \"2012-01-25\"],\n [\"waf\", \"2015-08-24\"],\n [\"workspaces\", \"2015-04-08\"]\n };\n\n for service in services {\n generate(service, out_path);\n }\n\n println!(\"cargo:rerun-if-changed=codegen\");\n}\n<|endoftext|>"} {"text":"add example task_workermod async_helpers;\n\nuse futures::FutureExt;\nuse std::io::Write;\nuse std::{error::Error, time::Duration};\nuse zeromq::{Socket, SocketRecv, SocketSend};\n\n#[async_helpers::main]\nasync fn main() -> Result<(), Box> {\n \/\/ Socket to receive messages on\n let mut receiver = zeromq::PullSocket::new();\n receiver.connect(\"tcp:\/\/127.0.0.1:5557\").await?;\n\n \/\/ Socket to send messages to\n let mut sender = zeromq::PushSocket::new();\n sender.connect(\"tcp:\/\/127.0.0.1:5558\").await?;\n\n \/\/ Socket for control input\n let mut controller = zeromq::SubSocket::new();\n controller.connect(\"tcp:\/\/127.0.0.1:5559\").await?;\n controller.subscribe(\"\").await?;\n\n \/\/ Process messages from receiver and controller\n loop {\n futures::select! {\n message = receiver.recv().fuse() => {\n \/\/ Process task\n let message = message.unwrap();\n let workload = String::from_utf8(message.get(0).unwrap().to_vec())?\n .parse()\n .expect(\"Couldn't parse u64 from data\");\n\n \/\/ Do the work\n async_helpers::sleep(Duration::from_millis(workload)).await;\n\n \/\/ Send results to sink\n sender.send(message).await?;\n\n \/\/ Simple progress indicator for the viewer\n print!(\".\");\n std::io::stdout().flush()?;\n },\n \/\/ Any waiting controller command acts as 'KILL'\n _kill = controller.recv().fuse() => {\n break\n }\n };\n }\n\n println!(\"Done\");\n receiver.close().await;\n sender.close().await;\n controller.close().await;\n Ok(())\n}\n<|endoftext|>"} {"text":"Enrich error types<|endoftext|>"} {"text":"Remove unnecessary clone().to_string() calls<|endoftext|>"} {"text":"\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nextern crate cgmath;\n#[macro_use]\nextern crate gfx;\nextern crate gfx_core;\nextern crate gfx_window_glutin;\nextern crate glutin;\nextern crate gfx_gl as gl;\nextern crate gfx_device_gl;\n\nuse cgmath::{SquareMatrix, Matrix, Point3, Vector3, Matrix3, Matrix4};\nuse cgmath::{Transform, Vector4};\npub use gfx::format::{DepthStencil, Rgba8 as ColorFormat};\n\/\/use glfw::Context;\nuse gl::Gl;\nuse gl::types::*;\nuse std::mem;\nuse std::ptr;\nuse std::str;\nuse std::env;\nuse std::str::FromStr;\nuse std::iter::repeat;\nuse std::ffi::CString;\nuse std::time::{Duration, Instant};\nuse gfx_device_gl::{Resources as R, CommandBuffer as CB};\nuse gfx_core::Device;\n\ngfx_defines!{\n vertex Vertex {\n pos: [f32; 3] = \"a_Pos\",\n }\n\n pipeline pipe {\n vbuf: gfx::VertexBuffer = (),\n transform: gfx::Global<[[f32; 4]; 4]> = \"u_Transform\",\n out_color: gfx::RenderTarget = \"o_Color\",\n }\n}\n\nstatic VERTEX_SRC: &'static [u8] = b\"\n #version 150 core\n in vec3 a_Pos;\n uniform mat4 u_Transform;\n\n void main() {\n gl_Position = u_Transform * vec4(a_Pos, 1.0); \n }\n\";\n\nstatic FRAGMENT_SRC: &'static [u8] = b\"\n #version 150 core\n out vec4 o_Color;\n\n void main() {\n o_Color = vec4(1.0, 0.0, 0.0, 1.0);\n }\n\";\n\nstatic VERTEX_DATA: &'static [Vertex] = &[\n Vertex { pos: [-1.0, 0.0, -1.0] },\n Vertex { pos: [ 1.0, 0.0, -1.0] },\n Vertex { pos: [-1.0, 0.0, 1.0] },\n];\n\nconst CLEAR_COLOR: (f32, f32, f32, f32) = (0.3, 0.3, 0.3, 1.0);\n\n\/\/----------------------------------------\n\nfn transform(x: i16, y: i16, proj_view: &Matrix4) -> Matrix4 {\n let mut model = Matrix4::from(Matrix3::identity() * 0.05);\n model.w = Vector4::new(x as f32 * 0.10,\n 0f32,\n y as f32 * 0.10,\n 1f32);\n proj_view * model\n}\n\ntrait Renderer: Drop {\n fn render(&mut self, proj_view: &Matrix4);\n fn window(&mut self) -> &glutin::Window;\n}\n\n\n\nstruct GFX {\n dimension: i16,\n window: glutin::Window,\n device:gfx_device_gl::Device,\n encoder: gfx::Encoder,\n data: pipe::Data,\n pso: gfx::PipelineState,\n slice: gfx::Slice,\n}\n\nstruct GL {\n dimension: i16,\n window: glutin::Window,\n gl:Gl,\n trans_uniform:GLint,\n vs:GLuint,\n fs:GLuint,\n program:GLuint,\n vbo:GLuint,\n vao:GLuint,\n}\n\n\nimpl GFX {\n fn new(builder: glutin::WindowBuilder, dimension: i16) -> Self {\n use gfx::traits::FactoryExt;\n\n let (window, device, mut factory, main_color, _) =\n gfx_window_glutin::init::(builder);\n let encoder: gfx::Encoder<_,_> = factory.create_command_buffer().into();\n\n let pso = factory.create_pipeline_simple(\n VERTEX_SRC, FRAGMENT_SRC,\n pipe::new()\n ).unwrap();\n\n let (vbuf, slice) = factory.create_vertex_buffer_with_slice(VERTEX_DATA,());\n\n let data = pipe::Data {\n vbuf: vbuf,\n transform: cgmath::Matrix4::identity().into(),\n out_color: main_color,\n };\n\n GFX {\n window: window,\n dimension: dimension,\n device: device,\n encoder: encoder,\n data: data,\n pso: pso,\n slice: slice,\n }\n }\n}\n\nfn duration_to_f64(dur: Duration) -> f64 {\n dur.as_secs() as f64 + dur.subsec_nanos() as f64 \/ 1000_000_000.0\n}\n\nimpl Renderer for GFX {\n fn render(&mut self, proj_view: &Matrix4) {\n let start = Instant::now();\n self.encoder.clear(&self.data.out_color, [CLEAR_COLOR.0,\n CLEAR_COLOR.1,\n CLEAR_COLOR.2,\n CLEAR_COLOR.3]);\n\n for x in (-self.dimension) ..self.dimension {\n for y in (-self.dimension) ..self.dimension {\n self.data.transform = transform(x, y, proj_view).into();\n self.encoder.draw(&self.slice, &self.pso, &self.data);\n }\n }\n\n let pre_submit = start.elapsed();\n self.encoder.flush(&mut self.device);\n let post_submit = start.elapsed();\n self.window.swap_buffers().unwrap();\n self.device.cleanup();\n let swap = start.elapsed();\n\n println!(\"total time:\\t\\t{0:4.2}ms\", duration_to_f64(swap));\n println!(\"\\tcreate list:\\t{0:4.2}ms\", duration_to_f64(pre_submit));\n println!(\"\\tsubmit:\\t\\t{0:4.2}ms\", duration_to_f64(post_submit - pre_submit));\n println!(\"\\tgpu wait:\\t{0:4.2}ms\", duration_to_f64(swap - post_submit));\n }\n fn window(&mut self) -> &glutin::Window { &self.window }\n}\n\nimpl Drop for GFX {\n fn drop(&mut self) {\n }\n}\n\n\n\nimpl GL {\n fn new(builder: glutin::WindowBuilder, dimension: i16) -> Self {\n fn compile_shader (gl:&Gl, src: &[u8], ty: GLenum) -> GLuint {\n unsafe {\n let shader = gl.CreateShader(ty);\n \/\/ Attempt to compile the shader\n gl.ShaderSource(shader, 1,\n &(src.as_ptr() as *const i8),\n &(src.len() as GLint));\n gl.CompileShader(shader);\n\n \/\/ Get the compile status\n let mut status = gl::FALSE as GLint;\n gl.GetShaderiv(shader, gl::COMPILE_STATUS, &mut status);\n\n \/\/ Fail on error\n if status != (gl::TRUE as GLint) {\n let mut len: GLint = 0;\n gl.GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut len);\n\n \/\/ allocate a buffer of size (len - 1) to skip the trailing null character\n let mut buf: Vec = repeat(0u8).take((len as usize).saturating_sub(1)).collect();\n gl.GetShaderInfoLog(shader, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);\n panic!(\"{}\", str::from_utf8(&buf).ok().expect(\"ShaderInfoLog not valid utf8\"));\n }\n shader\n }\n };\n\n let window = builder.build().unwrap();\n unsafe { window.make_current().unwrap() };\n let gl = Gl::load_with(|s| window.get_proc_address(s) as *const _);\n \n \/\/ Create GLSL shaders\n let vs = compile_shader(&gl, VERTEX_SRC, gl::VERTEX_SHADER);\n let fs = compile_shader(&gl, FRAGMENT_SRC, gl::FRAGMENT_SHADER);\n\n \/\/ Link program\n let program;\n unsafe {\n program = gl.CreateProgram();\n gl.AttachShader(program, vs);\n gl.AttachShader(program, fs);\n gl.LinkProgram(program);\n \/\/ Get the link status\n let mut status = gl::FALSE as GLint;\n gl.GetProgramiv(program, gl::LINK_STATUS, &mut status);\n\n \/\/ Fail on error\n if status != (gl::TRUE as GLint) {\n let mut len: GLint = 0;\n gl.GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut len);\n\n \/\/ allocate a buffer of size (len - 1) to skip the trailing null character\n let mut buf: Vec = repeat(0u8).take((len as usize).saturating_sub(1)).collect();\n gl.GetProgramInfoLog(program, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);\n panic!(\"{}\", str::from_utf8(&buf).ok().expect(\"ProgramInfoLog not valid utf8\"));\n }\n }\n\n let mut vao = 0;\n let mut vbo = 0;\n\n let trans_uniform;\n unsafe {\n \/\/ Create Vertex Array Object\n gl.GenVertexArrays(1, &mut vao);\n gl.BindVertexArray(vao);\n\n \/\/ Create a Vertex Buffer Object and copy the vertex data to it\n gl.GenBuffers(1, &mut vbo);\n gl.BindBuffer(gl::ARRAY_BUFFER, vbo);\n\n gl.BufferData(gl::ARRAY_BUFFER,\n (VERTEX_DATA.len() * mem::size_of::()) as GLsizeiptr,\n mem::transmute(&VERTEX_DATA[0]),\n gl::STATIC_DRAW);\n\n \/\/ Use shader program\n gl.UseProgram(program);\n let o_color = CString::new(\"o_Color\").unwrap();\n gl.BindFragDataLocation(program, 0, o_color.as_bytes_with_nul().as_ptr() as *const i8);\n\n \/\/ Specify the layout of the vertex data\n let a_pos = CString::new(\"a_Pos\").unwrap();\n gl.BindFragDataLocation(program, 0, a_pos.as_bytes_with_nul().as_ptr() as *const i8);\n\n let pos_attr = gl.GetAttribLocation(program, a_pos.as_ptr());\n gl.EnableVertexAttribArray(pos_attr as GLuint);\n gl.VertexAttribPointer(pos_attr as GLuint, 3, gl::FLOAT,\n gl::FALSE as GLboolean, 0, ptr::null());\n\n let u_transform = CString::new(\"u_Transform\").unwrap();\n trans_uniform = gl.GetUniformLocation(program, u_transform.as_bytes_with_nul().as_ptr() as *const i8)\n };\n\n GL {\n window: window, \n dimension: dimension,\n gl: gl,\n vs: vs,\n fs: fs,\n program: program,\n vbo: vbo,\n vao: vao,\n trans_uniform: trans_uniform,\n }\n }\n}\n\nfn duration_to_ms(dur: Duration) -> f64 {\n (dur.as_secs() * 1000) as f64 + dur.subsec_nanos() as f64 \/ 1000_000.0\n}\n\nimpl Renderer for GL {\n fn render(&mut self, proj_view: &Matrix4) {\n let start = Instant::now();\n\n \/\/ Clear the screen to black\n unsafe {\n self.gl.ClearColor(CLEAR_COLOR.0, CLEAR_COLOR.1, CLEAR_COLOR.2, CLEAR_COLOR.3);\n self.gl.Clear(gl::COLOR_BUFFER_BIT);\n }\n \n for x in (-self.dimension) ..self.dimension {\n for y in (-self.dimension) ..self.dimension {\n let mat:Matrix4 = transform(x, y, proj_view).into();\n\n unsafe {\n self.gl.UniformMatrix4fv(self.trans_uniform,\n 1,\n gl::FALSE,\n mat.as_ptr());\n self.gl.DrawArrays(gl::TRIANGLES, 0, 3);\n }\n\n }\n }\n\n let submit = start.elapsed();\n\n \/\/ Swap buffers\n self.window.swap_buffers().unwrap();\n let swap = start.elapsed();\n\n println!(\"total time:\\t\\t{0:4.2}ms\", duration_to_ms(swap));\n println!(\"\\tsubmit:\\t\\t{0:4.2}ms\", duration_to_ms(submit));\n println!(\"\\tgpu wait:\\t{0:4.2}ms\", duration_to_ms(swap - submit));\n }\n fn window(&mut self) -> &glutin::Window { &self.window }\n}\n\nimpl Drop for GL {\n fn drop(&mut self) {\n unsafe {\n self.gl.DeleteProgram(self.program);\n self.gl.DeleteShader(self.fs);\n self.gl.DeleteShader(self.vs);\n self.gl.DeleteBuffers(1, &self.vbo);\n self.gl.DeleteVertexArrays(1, &self.vao);\n }\n }\n}\n\nfn main() {\n let ref mut args = env::args();\n let args_count = env::args().count();\n if args_count == 1 {\n println!(\"gfx-perf [gl|gfx] \");\n return;\n }\n\n let mode = args.nth(1).unwrap();\n let count: i32 = if args_count == 3 {\n FromStr::from_str(&args.next().unwrap()).ok()\n } else {\n None\n }.unwrap_or(10000);\n\n let count = ((count as f64).sqrt() \/ 2.) as i16;\n\n let builder = glutin::WindowBuilder::new()\n .with_title(\"Performance example\".to_string())\n .with_dimensions(800, 600)\n .with_vsync();\n\n let mut r: Box;\n match mode.as_ref() {\n \"gfx\" => r = Box::new(GFX::new(builder, count)),\n \"gl\" => r = Box::new(GL::new(builder, count)),\n x => {\n panic!(\"{} is not a known mode\", x)\n }\n }\n\n let proj_view = {\n let view = Matrix4::look_at(\n Point3::new(0f32, 5.0, -5.0),\n Point3::new(0f32, 0.0, 0.0),\n Vector3::unit_z(),\n );\n\n let proj = {\n let aspect = {\n let (w, h) = r.window().get_inner_size().unwrap();\n w as f32 \/ h as f32\n };\n cgmath::perspective(cgmath::deg(45.0f32), aspect, 1.0, 10.0)\n };\n proj * view\n };\n\n println!(\"count is {}\", count*count*4);\n\n 'main: loop {\n for event in r.window().poll_events() {\n match event {\n glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) |\n glutin::Event::Closed => break 'main,\n _ => {},\n }\n }\n r.render(&proj_view);\n }\n}\nAuto merge of #1013 - Mattbazooka:patch-1, r=kvark\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nextern crate cgmath;\n#[macro_use]\nextern crate gfx;\nextern crate gfx_core;\nextern crate gfx_window_glutin;\nextern crate glutin;\nextern crate gfx_gl as gl;\nextern crate gfx_device_gl;\n\nuse cgmath::{SquareMatrix, Matrix, Point3, Vector3, Matrix3, Matrix4};\nuse cgmath::{Transform, Vector4};\npub use gfx::format::{DepthStencil, Rgba8 as ColorFormat};\n\/\/use glfw::Context;\nuse gl::Gl;\nuse gl::types::*;\nuse std::mem;\nuse std::ptr;\nuse std::str;\nuse std::env;\nuse std::str::FromStr;\nuse std::iter::repeat;\nuse std::ffi::CString;\nuse std::time::{Duration, Instant};\nuse gfx_device_gl::{Resources as R, CommandBuffer as CB};\nuse gfx_core::Device;\n\ngfx_defines!{\n vertex Vertex {\n pos: [f32; 3] = \"a_Pos\",\n }\n\n pipeline pipe {\n vbuf: gfx::VertexBuffer = (),\n transform: gfx::Global<[[f32; 4]; 4]> = \"u_Transform\",\n out_color: gfx::RenderTarget = \"o_Color\",\n }\n}\n\nstatic VERTEX_SRC: &'static [u8] = b\"\n #version 150 core\n in vec3 a_Pos;\n uniform mat4 u_Transform;\n\n void main() {\n gl_Position = u_Transform * vec4(a_Pos, 1.0); \n }\n\";\n\nstatic FRAGMENT_SRC: &'static [u8] = b\"\n #version 150 core\n out vec4 o_Color;\n\n void main() {\n o_Color = vec4(1.0, 0.0, 0.0, 1.0);\n }\n\";\n\nstatic VERTEX_DATA: &'static [Vertex] = &[\n Vertex { pos: [-1.0, 0.0, -1.0] },\n Vertex { pos: [ 1.0, 0.0, -1.0] },\n Vertex { pos: [-1.0, 0.0, 1.0] },\n];\n\nconst CLEAR_COLOR: (f32, f32, f32, f32) = (0.3, 0.3, 0.3, 1.0);\n\n\/\/----------------------------------------\n\nfn transform(x: i16, y: i16, proj_view: &Matrix4) -> Matrix4 {\n let mut model = Matrix4::from(Matrix3::identity() * 0.05);\n model.w = Vector4::new(x as f32 * 0.10,\n 0f32,\n y as f32 * 0.10,\n 1f32);\n proj_view * model\n}\n\ntrait Renderer: Drop {\n fn render(&mut self, proj_view: &Matrix4);\n fn window(&mut self) -> &glutin::Window;\n}\n\n\n\nstruct GFX {\n dimension: i16,\n window: glutin::Window,\n device:gfx_device_gl::Device,\n encoder: gfx::Encoder,\n data: pipe::Data,\n pso: gfx::PipelineState,\n slice: gfx::Slice,\n}\n\nstruct GL {\n dimension: i16,\n window: glutin::Window,\n gl:Gl,\n trans_uniform:GLint,\n vs:GLuint,\n fs:GLuint,\n program:GLuint,\n vbo:GLuint,\n vao:GLuint,\n}\n\n\nimpl GFX {\n fn new(builder: glutin::WindowBuilder, dimension: i16) -> Self {\n use gfx::traits::FactoryExt;\n\n let (window, device, mut factory, main_color, _) =\n gfx_window_glutin::init::(builder);\n let encoder: gfx::Encoder<_,_> = factory.create_command_buffer().into();\n\n let pso = factory.create_pipeline_simple(\n VERTEX_SRC, FRAGMENT_SRC,\n pipe::new()\n ).unwrap();\n\n let (vbuf, slice) = factory.create_vertex_buffer_with_slice(VERTEX_DATA,());\n\n let data = pipe::Data {\n vbuf: vbuf,\n transform: cgmath::Matrix4::identity().into(),\n out_color: main_color,\n };\n\n GFX {\n window: window,\n dimension: dimension,\n device: device,\n encoder: encoder,\n data: data,\n pso: pso,\n slice: slice,\n }\n }\n}\n\nfn duration_to_f64(dur: Duration) -> f64 {\n dur.as_secs() as f64 + dur.subsec_nanos() as f64 \/ 1000_000_000.0\n}\n\nimpl Renderer for GFX {\n fn render(&mut self, proj_view: &Matrix4) {\n let start = Instant::now();\n self.encoder.clear(&self.data.out_color, [CLEAR_COLOR.0,\n CLEAR_COLOR.1,\n CLEAR_COLOR.2,\n CLEAR_COLOR.3]);\n\n for x in (-self.dimension) ..self.dimension {\n for y in (-self.dimension) ..self.dimension {\n self.data.transform = transform(x, y, proj_view).into();\n self.encoder.draw(&self.slice, &self.pso, &self.data);\n }\n }\n\n let pre_submit = start.elapsed();\n self.encoder.flush(&mut self.device);\n let post_submit = start.elapsed();\n self.window.swap_buffers().unwrap();\n self.device.cleanup();\n let swap = start.elapsed();\n\n println!(\"total time:\\t\\t{0:4.2}ms\", duration_to_ms(swap));\n println!(\"\\tcreate list:\\t{0:4.2}ms\", duration_to_ms(pre_submit));\n println!(\"\\tsubmit:\\t\\t{0:4.2}ms\", duration_to_ms(post_submit - pre_submit));\n println!(\"\\tgpu wait:\\t{0:4.2}ms\", duration_to_ms(swap - post_submit));\n }\n fn window(&mut self) -> &glutin::Window { &self.window }\n}\n\nimpl Drop for GFX {\n fn drop(&mut self) {\n }\n}\n\n\n\nimpl GL {\n fn new(builder: glutin::WindowBuilder, dimension: i16) -> Self {\n fn compile_shader (gl:&Gl, src: &[u8], ty: GLenum) -> GLuint {\n unsafe {\n let shader = gl.CreateShader(ty);\n \/\/ Attempt to compile the shader\n gl.ShaderSource(shader, 1,\n &(src.as_ptr() as *const i8),\n &(src.len() as GLint));\n gl.CompileShader(shader);\n\n \/\/ Get the compile status\n let mut status = gl::FALSE as GLint;\n gl.GetShaderiv(shader, gl::COMPILE_STATUS, &mut status);\n\n \/\/ Fail on error\n if status != (gl::TRUE as GLint) {\n let mut len: GLint = 0;\n gl.GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut len);\n\n \/\/ allocate a buffer of size (len - 1) to skip the trailing null character\n let mut buf: Vec = repeat(0u8).take((len as usize).saturating_sub(1)).collect();\n gl.GetShaderInfoLog(shader, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);\n panic!(\"{}\", str::from_utf8(&buf).ok().expect(\"ShaderInfoLog not valid utf8\"));\n }\n shader\n }\n };\n\n let window = builder.build().unwrap();\n unsafe { window.make_current().unwrap() };\n let gl = Gl::load_with(|s| window.get_proc_address(s) as *const _);\n \n \/\/ Create GLSL shaders\n let vs = compile_shader(&gl, VERTEX_SRC, gl::VERTEX_SHADER);\n let fs = compile_shader(&gl, FRAGMENT_SRC, gl::FRAGMENT_SHADER);\n\n \/\/ Link program\n let program;\n unsafe {\n program = gl.CreateProgram();\n gl.AttachShader(program, vs);\n gl.AttachShader(program, fs);\n gl.LinkProgram(program);\n \/\/ Get the link status\n let mut status = gl::FALSE as GLint;\n gl.GetProgramiv(program, gl::LINK_STATUS, &mut status);\n\n \/\/ Fail on error\n if status != (gl::TRUE as GLint) {\n let mut len: GLint = 0;\n gl.GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut len);\n\n \/\/ allocate a buffer of size (len - 1) to skip the trailing null character\n let mut buf: Vec = repeat(0u8).take((len as usize).saturating_sub(1)).collect();\n gl.GetProgramInfoLog(program, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);\n panic!(\"{}\", str::from_utf8(&buf).ok().expect(\"ProgramInfoLog not valid utf8\"));\n }\n }\n\n let mut vao = 0;\n let mut vbo = 0;\n\n let trans_uniform;\n unsafe {\n \/\/ Create Vertex Array Object\n gl.GenVertexArrays(1, &mut vao);\n gl.BindVertexArray(vao);\n\n \/\/ Create a Vertex Buffer Object and copy the vertex data to it\n gl.GenBuffers(1, &mut vbo);\n gl.BindBuffer(gl::ARRAY_BUFFER, vbo);\n\n gl.BufferData(gl::ARRAY_BUFFER,\n (VERTEX_DATA.len() * mem::size_of::()) as GLsizeiptr,\n mem::transmute(&VERTEX_DATA[0]),\n gl::STATIC_DRAW);\n\n \/\/ Use shader program\n gl.UseProgram(program);\n let o_color = CString::new(\"o_Color\").unwrap();\n gl.BindFragDataLocation(program, 0, o_color.as_bytes_with_nul().as_ptr() as *const i8);\n\n \/\/ Specify the layout of the vertex data\n let a_pos = CString::new(\"a_Pos\").unwrap();\n gl.BindFragDataLocation(program, 0, a_pos.as_bytes_with_nul().as_ptr() as *const i8);\n\n let pos_attr = gl.GetAttribLocation(program, a_pos.as_ptr());\n gl.EnableVertexAttribArray(pos_attr as GLuint);\n gl.VertexAttribPointer(pos_attr as GLuint, 3, gl::FLOAT,\n gl::FALSE as GLboolean, 0, ptr::null());\n\n let u_transform = CString::new(\"u_Transform\").unwrap();\n trans_uniform = gl.GetUniformLocation(program, u_transform.as_bytes_with_nul().as_ptr() as *const i8)\n };\n\n GL {\n window: window, \n dimension: dimension,\n gl: gl,\n vs: vs,\n fs: fs,\n program: program,\n vbo: vbo,\n vao: vao,\n trans_uniform: trans_uniform,\n }\n }\n}\n\nfn duration_to_ms(dur: Duration) -> f64 {\n (dur.as_secs() * 1000) as f64 + dur.subsec_nanos() as f64 \/ 1000_000.0\n}\n\nimpl Renderer for GL {\n fn render(&mut self, proj_view: &Matrix4) {\n let start = Instant::now();\n\n \/\/ Clear the screen to black\n unsafe {\n self.gl.ClearColor(CLEAR_COLOR.0, CLEAR_COLOR.1, CLEAR_COLOR.2, CLEAR_COLOR.3);\n self.gl.Clear(gl::COLOR_BUFFER_BIT);\n }\n \n for x in (-self.dimension) ..self.dimension {\n for y in (-self.dimension) ..self.dimension {\n let mat:Matrix4 = transform(x, y, proj_view).into();\n\n unsafe {\n self.gl.UniformMatrix4fv(self.trans_uniform,\n 1,\n gl::FALSE,\n mat.as_ptr());\n self.gl.DrawArrays(gl::TRIANGLES, 0, 3);\n }\n\n }\n }\n\n let submit = start.elapsed();\n\n \/\/ Swap buffers\n self.window.swap_buffers().unwrap();\n let swap = start.elapsed();\n\n println!(\"total time:\\t\\t{0:4.2}ms\", duration_to_ms(swap));\n println!(\"\\tsubmit:\\t\\t{0:4.2}ms\", duration_to_ms(submit));\n println!(\"\\tgpu wait:\\t{0:4.2}ms\", duration_to_ms(swap - submit));\n }\n fn window(&mut self) -> &glutin::Window { &self.window }\n}\n\nimpl Drop for GL {\n fn drop(&mut self) {\n unsafe {\n self.gl.DeleteProgram(self.program);\n self.gl.DeleteShader(self.fs);\n self.gl.DeleteShader(self.vs);\n self.gl.DeleteBuffers(1, &self.vbo);\n self.gl.DeleteVertexArrays(1, &self.vao);\n }\n }\n}\n\nfn main() {\n let ref mut args = env::args();\n let args_count = env::args().count();\n if args_count == 1 {\n println!(\"gfx-perf [gl|gfx] \");\n return;\n }\n\n let mode = args.nth(1).unwrap();\n let count: i32 = if args_count == 3 {\n FromStr::from_str(&args.next().unwrap()).ok()\n } else {\n None\n }.unwrap_or(10000);\n\n let count = ((count as f64).sqrt() \/ 2.) as i16;\n\n let builder = glutin::WindowBuilder::new()\n .with_title(\"Performance example\".to_string())\n .with_dimensions(800, 600)\n .with_vsync();\n\n let mut r: Box;\n match mode.as_ref() {\n \"gfx\" => r = Box::new(GFX::new(builder, count)),\n \"gl\" => r = Box::new(GL::new(builder, count)),\n x => {\n panic!(\"{} is not a known mode\", x)\n }\n }\n\n let proj_view = {\n let view = Matrix4::look_at(\n Point3::new(0f32, 5.0, -5.0),\n Point3::new(0f32, 0.0, 0.0),\n Vector3::unit_z(),\n );\n\n let proj = {\n let aspect = {\n let (w, h) = r.window().get_inner_size().unwrap();\n w as f32 \/ h as f32\n };\n cgmath::perspective(cgmath::deg(45.0f32), aspect, 1.0, 10.0)\n };\n proj * view\n };\n\n println!(\"count is {}\", count*count*4);\n\n 'main: loop {\n for event in r.window().poll_events() {\n match event {\n glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) |\n glutin::Event::Closed => break 'main,\n _ => {},\n }\n }\n r.render(&proj_view);\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n * Methods for the various MIR types. These are intended for use after\n * building is complete.\n *\/\n\nuse mir::*;\nuse ty::subst::{Subst, Substs};\nuse ty::{self, AdtDef, Ty, TyCtxt};\nuse ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};\nuse hir;\nuse ty::util::IntTypeExt;\n\n#[derive(Copy, Clone, Debug)]\npub enum LvalueTy<'tcx> {\n \/\/\/ Normal type.\n Ty { ty: Ty<'tcx> },\n\n \/\/\/ Downcast to a particular variant of an enum.\n Downcast { adt_def: &'tcx AdtDef,\n substs: &'tcx Substs<'tcx>,\n variant_index: usize },\n}\n\nimpl<'a, 'gcx, 'tcx> LvalueTy<'tcx> {\n pub fn from_ty(ty: Ty<'tcx>) -> LvalueTy<'tcx> {\n LvalueTy::Ty { ty: ty }\n }\n\n pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {\n match *self {\n LvalueTy::Ty { ty } =>\n ty,\n LvalueTy::Downcast { adt_def, substs, variant_index: _ } =>\n tcx.mk_adt(adt_def, substs),\n }\n }\n\n pub fn projection_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>,\n elem: &LvalueElem<'tcx>)\n -> LvalueTy<'tcx>\n {\n match *elem {\n ProjectionElem::Deref => {\n let ty = self.to_ty(tcx)\n .builtin_deref(true, ty::LvaluePreference::NoPreference)\n .unwrap_or_else(|| {\n bug!(\"deref projection of non-dereferencable ty {:?}\", self)\n })\n .ty;\n LvalueTy::Ty {\n ty,\n }\n }\n ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } =>\n LvalueTy::Ty {\n ty: self.to_ty(tcx).builtin_index().unwrap()\n },\n ProjectionElem::Subslice { from, to } => {\n let ty = self.to_ty(tcx);\n LvalueTy::Ty {\n ty: match ty.sty {\n ty::TyArray(inner, size) => {\n tcx.mk_array(inner, size-(from as usize)-(to as usize))\n }\n ty::TySlice(..) => ty,\n _ => {\n bug!(\"cannot subslice non-array type: `{:?}`\", self)\n }\n }\n }\n }\n ProjectionElem::Downcast(adt_def1, index) =>\n match self.to_ty(tcx).sty {\n ty::TyAdt(adt_def, substs) => {\n assert!(adt_def.is_enum());\n assert!(index < adt_def.variants.len());\n assert_eq!(adt_def, adt_def1);\n LvalueTy::Downcast { adt_def,\n substs,\n variant_index: index }\n }\n _ => {\n bug!(\"cannot downcast non-ADT type: `{:?}`\", self)\n }\n },\n ProjectionElem::Field(_, fty) => LvalueTy::Ty { ty: fty }\n }\n }\n}\n\nimpl<'tcx> TypeFoldable<'tcx> for LvalueTy<'tcx> {\n fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {\n match *self {\n LvalueTy::Ty { ty } => LvalueTy::Ty { ty: ty.fold_with(folder) },\n LvalueTy::Downcast { adt_def, substs, variant_index } => {\n LvalueTy::Downcast {\n adt_def,\n substs: substs.fold_with(folder),\n variant_index,\n }\n }\n }\n }\n\n fn super_visit_with>(&self, visitor: &mut V) -> bool {\n match *self {\n LvalueTy::Ty { ty } => ty.visit_with(visitor),\n LvalueTy::Downcast { substs, .. } => substs.visit_with(visitor)\n }\n }\n}\n\nimpl<'tcx> Lvalue<'tcx> {\n pub fn ty<'a, 'gcx, D: HasLocalDecls<'tcx>>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> LvalueTy<'tcx> {\n match *self {\n Lvalue::Local(index) =>\n LvalueTy::Ty { ty: local_decls.local_decls()[index].ty },\n Lvalue::Static(ref data) =>\n LvalueTy::Ty { ty: data.ty },\n Lvalue::Projection(ref proj) =>\n proj.base.ty(local_decls, tcx).projection_ty(tcx, &proj.elem),\n }\n }\n}\n\nimpl<'tcx> Rvalue<'tcx> {\n pub fn ty<'a, 'gcx, D: HasLocalDecls<'tcx>>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>\n {\n match *self {\n Rvalue::Use(ref operand) => operand.ty(local_decls, tcx),\n Rvalue::Repeat(ref operand, ref count) => {\n let op_ty = operand.ty(local_decls, tcx);\n let count = count.as_u64(tcx.sess.target.uint_type);\n assert_eq!(count as usize as u64, count);\n tcx.mk_array(op_ty, count as usize)\n }\n Rvalue::Ref(reg, bk, ref lv) => {\n let lv_ty = lv.ty(local_decls, tcx).to_ty(tcx);\n tcx.mk_ref(reg,\n ty::TypeAndMut {\n ty: lv_ty,\n mutbl: bk.to_mutbl_lossy()\n }\n )\n }\n Rvalue::Len(..) => tcx.types.usize,\n Rvalue::Cast(.., ty) => ty,\n Rvalue::BinaryOp(op, ref lhs, ref rhs) => {\n let lhs_ty = lhs.ty(local_decls, tcx);\n let rhs_ty = rhs.ty(local_decls, tcx);\n op.ty(tcx, lhs_ty, rhs_ty)\n }\n Rvalue::CheckedBinaryOp(op, ref lhs, ref rhs) => {\n let lhs_ty = lhs.ty(local_decls, tcx);\n let rhs_ty = rhs.ty(local_decls, tcx);\n let ty = op.ty(tcx, lhs_ty, rhs_ty);\n tcx.intern_tup(&[ty, tcx.types.bool], false)\n }\n Rvalue::UnaryOp(UnOp::Not, ref operand) |\n Rvalue::UnaryOp(UnOp::Neg, ref operand) => {\n operand.ty(local_decls, tcx)\n }\n Rvalue::Discriminant(ref lval) => {\n let ty = lval.ty(local_decls, tcx).to_ty(tcx);\n if let ty::TyAdt(adt_def, _) = ty.sty {\n adt_def.repr.discr_type().to_ty(tcx)\n } else {\n \/\/ Undefined behaviour, bug for now; may want to return something for\n \/\/ the `discriminant` intrinsic later.\n bug!(\"Rvalue::Discriminant on Lvalue of type {:?}\", ty);\n }\n }\n Rvalue::NullaryOp(NullOp::Box, t) => tcx.mk_box(t),\n Rvalue::NullaryOp(NullOp::SizeOf, _) => tcx.types.usize,\n Rvalue::Aggregate(ref ak, ref ops) => {\n match **ak {\n AggregateKind::Array(ty) => {\n tcx.mk_array(ty, ops.len())\n }\n AggregateKind::Tuple => {\n tcx.mk_tup(\n ops.iter().map(|op| op.ty(local_decls, tcx)),\n false\n )\n }\n AggregateKind::Adt(def, _, substs, _) => {\n tcx.type_of(def.did).subst(tcx, substs)\n }\n AggregateKind::Closure(did, substs) => {\n tcx.mk_closure_from_closure_substs(did, substs)\n }\n }\n }\n }\n }\n}\n\nimpl<'tcx> Operand<'tcx> {\n pub fn ty<'a, 'gcx, D: HasLocalDecls<'tcx>>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {\n match self {\n &Operand::Consume(ref l) => l.ty(local_decls, tcx).to_ty(tcx),\n &Operand::Constant(ref c) => c.ty,\n }\n }\n}\n\nimpl<'tcx> BinOp {\n pub fn ty<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,\n lhs_ty: Ty<'tcx>,\n rhs_ty: Ty<'tcx>)\n -> Ty<'tcx> {\n \/\/ FIXME: handle SIMD correctly\n match self {\n &BinOp::Add | &BinOp::Sub | &BinOp::Mul | &BinOp::Div | &BinOp::Rem |\n &BinOp::BitXor | &BinOp::BitAnd | &BinOp::BitOr => {\n \/\/ these should be integers or floats of the same size.\n assert_eq!(lhs_ty, rhs_ty);\n lhs_ty\n }\n &BinOp::Shl | &BinOp::Shr | &BinOp::Offset => {\n lhs_ty \/\/ lhs_ty can be != rhs_ty\n }\n &BinOp::Eq | &BinOp::Lt | &BinOp::Le |\n &BinOp::Ne | &BinOp::Ge | &BinOp::Gt => {\n tcx.types.bool\n }\n }\n }\n}\n\nimpl BorrowKind {\n pub fn to_mutbl_lossy(self) -> hir::Mutability {\n match self {\n BorrowKind::Mut => hir::MutMutable,\n BorrowKind::Shared => hir::MutImmutable,\n\n \/\/ We have no type corresponding to a unique imm borrow, so\n \/\/ use `&mut`. It gives all the capabilities of an `&uniq`\n \/\/ and hence is a safe \"over approximation\".\n BorrowKind::Unique => hir::MutMutable,\n }\n }\n}\n\nimpl BinOp {\n pub fn to_hir_binop(self) -> hir::BinOp_ {\n match self {\n BinOp::Add => hir::BinOp_::BiAdd,\n BinOp::Sub => hir::BinOp_::BiSub,\n BinOp::Mul => hir::BinOp_::BiMul,\n BinOp::Div => hir::BinOp_::BiDiv,\n BinOp::Rem => hir::BinOp_::BiRem,\n BinOp::BitXor => hir::BinOp_::BiBitXor,\n BinOp::BitAnd => hir::BinOp_::BiBitAnd,\n BinOp::BitOr => hir::BinOp_::BiBitOr,\n BinOp::Shl => hir::BinOp_::BiShl,\n BinOp::Shr => hir::BinOp_::BiShr,\n BinOp::Eq => hir::BinOp_::BiEq,\n BinOp::Ne => hir::BinOp_::BiNe,\n BinOp::Lt => hir::BinOp_::BiLt,\n BinOp::Gt => hir::BinOp_::BiGt,\n BinOp::Le => hir::BinOp_::BiLe,\n BinOp::Ge => hir::BinOp_::BiGe,\n BinOp::Offset => unreachable!()\n }\n }\n}\nplease tidy by shortening lines\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n * Methods for the various MIR types. These are intended for use after\n * building is complete.\n *\/\n\nuse mir::*;\nuse ty::subst::{Subst, Substs};\nuse ty::{self, AdtDef, Ty, TyCtxt};\nuse ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};\nuse hir;\nuse ty::util::IntTypeExt;\n\n#[derive(Copy, Clone, Debug)]\npub enum LvalueTy<'tcx> {\n \/\/\/ Normal type.\n Ty { ty: Ty<'tcx> },\n\n \/\/\/ Downcast to a particular variant of an enum.\n Downcast { adt_def: &'tcx AdtDef,\n substs: &'tcx Substs<'tcx>,\n variant_index: usize },\n}\n\nimpl<'a, 'gcx, 'tcx> LvalueTy<'tcx> {\n pub fn from_ty(ty: Ty<'tcx>) -> LvalueTy<'tcx> {\n LvalueTy::Ty { ty: ty }\n }\n\n pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {\n match *self {\n LvalueTy::Ty { ty } =>\n ty,\n LvalueTy::Downcast { adt_def, substs, variant_index: _ } =>\n tcx.mk_adt(adt_def, substs),\n }\n }\n\n pub fn projection_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>,\n elem: &LvalueElem<'tcx>)\n -> LvalueTy<'tcx>\n {\n match *elem {\n ProjectionElem::Deref => {\n let ty = self.to_ty(tcx)\n .builtin_deref(true, ty::LvaluePreference::NoPreference)\n .unwrap_or_else(|| {\n bug!(\"deref projection of non-dereferencable ty {:?}\", self)\n })\n .ty;\n LvalueTy::Ty {\n ty,\n }\n }\n ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } =>\n LvalueTy::Ty {\n ty: self.to_ty(tcx).builtin_index().unwrap()\n },\n ProjectionElem::Subslice { from, to } => {\n let ty = self.to_ty(tcx);\n LvalueTy::Ty {\n ty: match ty.sty {\n ty::TyArray(inner, size) => {\n tcx.mk_array(inner, size-(from as usize)-(to as usize))\n }\n ty::TySlice(..) => ty,\n _ => {\n bug!(\"cannot subslice non-array type: `{:?}`\", self)\n }\n }\n }\n }\n ProjectionElem::Downcast(adt_def1, index) =>\n match self.to_ty(tcx).sty {\n ty::TyAdt(adt_def, substs) => {\n assert!(adt_def.is_enum());\n assert!(index < adt_def.variants.len());\n assert_eq!(adt_def, adt_def1);\n LvalueTy::Downcast { adt_def,\n substs,\n variant_index: index }\n }\n _ => {\n bug!(\"cannot downcast non-ADT type: `{:?}`\", self)\n }\n },\n ProjectionElem::Field(_, fty) => LvalueTy::Ty { ty: fty }\n }\n }\n}\n\nimpl<'tcx> TypeFoldable<'tcx> for LvalueTy<'tcx> {\n fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {\n match *self {\n LvalueTy::Ty { ty } => LvalueTy::Ty { ty: ty.fold_with(folder) },\n LvalueTy::Downcast { adt_def, substs, variant_index } => {\n LvalueTy::Downcast {\n adt_def,\n substs: substs.fold_with(folder),\n variant_index,\n }\n }\n }\n }\n\n fn super_visit_with>(&self, visitor: &mut V) -> bool {\n match *self {\n LvalueTy::Ty { ty } => ty.visit_with(visitor),\n LvalueTy::Downcast { substs, .. } => substs.visit_with(visitor)\n }\n }\n}\n\nimpl<'tcx> Lvalue<'tcx> {\n pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> LvalueTy<'tcx>\n where D: HasLocalDecls<'tcx>\n {\n match *self {\n Lvalue::Local(index) =>\n LvalueTy::Ty { ty: local_decls.local_decls()[index].ty },\n Lvalue::Static(ref data) =>\n LvalueTy::Ty { ty: data.ty },\n Lvalue::Projection(ref proj) =>\n proj.base.ty(local_decls, tcx).projection_ty(tcx, &proj.elem),\n }\n }\n}\n\nimpl<'tcx> Rvalue<'tcx> {\n pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>\n where D: HasLocalDecls<'tcx>\n {\n match *self {\n Rvalue::Use(ref operand) => operand.ty(local_decls, tcx),\n Rvalue::Repeat(ref operand, ref count) => {\n let op_ty = operand.ty(local_decls, tcx);\n let count = count.as_u64(tcx.sess.target.uint_type);\n assert_eq!(count as usize as u64, count);\n tcx.mk_array(op_ty, count as usize)\n }\n Rvalue::Ref(reg, bk, ref lv) => {\n let lv_ty = lv.ty(local_decls, tcx).to_ty(tcx);\n tcx.mk_ref(reg,\n ty::TypeAndMut {\n ty: lv_ty,\n mutbl: bk.to_mutbl_lossy()\n }\n )\n }\n Rvalue::Len(..) => tcx.types.usize,\n Rvalue::Cast(.., ty) => ty,\n Rvalue::BinaryOp(op, ref lhs, ref rhs) => {\n let lhs_ty = lhs.ty(local_decls, tcx);\n let rhs_ty = rhs.ty(local_decls, tcx);\n op.ty(tcx, lhs_ty, rhs_ty)\n }\n Rvalue::CheckedBinaryOp(op, ref lhs, ref rhs) => {\n let lhs_ty = lhs.ty(local_decls, tcx);\n let rhs_ty = rhs.ty(local_decls, tcx);\n let ty = op.ty(tcx, lhs_ty, rhs_ty);\n tcx.intern_tup(&[ty, tcx.types.bool], false)\n }\n Rvalue::UnaryOp(UnOp::Not, ref operand) |\n Rvalue::UnaryOp(UnOp::Neg, ref operand) => {\n operand.ty(local_decls, tcx)\n }\n Rvalue::Discriminant(ref lval) => {\n let ty = lval.ty(local_decls, tcx).to_ty(tcx);\n if let ty::TyAdt(adt_def, _) = ty.sty {\n adt_def.repr.discr_type().to_ty(tcx)\n } else {\n \/\/ Undefined behaviour, bug for now; may want to return something for\n \/\/ the `discriminant` intrinsic later.\n bug!(\"Rvalue::Discriminant on Lvalue of type {:?}\", ty);\n }\n }\n Rvalue::NullaryOp(NullOp::Box, t) => tcx.mk_box(t),\n Rvalue::NullaryOp(NullOp::SizeOf, _) => tcx.types.usize,\n Rvalue::Aggregate(ref ak, ref ops) => {\n match **ak {\n AggregateKind::Array(ty) => {\n tcx.mk_array(ty, ops.len())\n }\n AggregateKind::Tuple => {\n tcx.mk_tup(\n ops.iter().map(|op| op.ty(local_decls, tcx)),\n false\n )\n }\n AggregateKind::Adt(def, _, substs, _) => {\n tcx.type_of(def.did).subst(tcx, substs)\n }\n AggregateKind::Closure(did, substs) => {\n tcx.mk_closure_from_closure_substs(did, substs)\n }\n }\n }\n }\n }\n}\n\nimpl<'tcx> Operand<'tcx> {\n pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>\n where D: HasLocalDecls<'tcx>\n {\n match self {\n &Operand::Consume(ref l) => l.ty(local_decls, tcx).to_ty(tcx),\n &Operand::Constant(ref c) => c.ty,\n }\n }\n}\n\nimpl<'tcx> BinOp {\n pub fn ty<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,\n lhs_ty: Ty<'tcx>,\n rhs_ty: Ty<'tcx>)\n -> Ty<'tcx> {\n \/\/ FIXME: handle SIMD correctly\n match self {\n &BinOp::Add | &BinOp::Sub | &BinOp::Mul | &BinOp::Div | &BinOp::Rem |\n &BinOp::BitXor | &BinOp::BitAnd | &BinOp::BitOr => {\n \/\/ these should be integers or floats of the same size.\n assert_eq!(lhs_ty, rhs_ty);\n lhs_ty\n }\n &BinOp::Shl | &BinOp::Shr | &BinOp::Offset => {\n lhs_ty \/\/ lhs_ty can be != rhs_ty\n }\n &BinOp::Eq | &BinOp::Lt | &BinOp::Le |\n &BinOp::Ne | &BinOp::Ge | &BinOp::Gt => {\n tcx.types.bool\n }\n }\n }\n}\n\nimpl BorrowKind {\n pub fn to_mutbl_lossy(self) -> hir::Mutability {\n match self {\n BorrowKind::Mut => hir::MutMutable,\n BorrowKind::Shared => hir::MutImmutable,\n\n \/\/ We have no type corresponding to a unique imm borrow, so\n \/\/ use `&mut`. It gives all the capabilities of an `&uniq`\n \/\/ and hence is a safe \"over approximation\".\n BorrowKind::Unique => hir::MutMutable,\n }\n }\n}\n\nimpl BinOp {\n pub fn to_hir_binop(self) -> hir::BinOp_ {\n match self {\n BinOp::Add => hir::BinOp_::BiAdd,\n BinOp::Sub => hir::BinOp_::BiSub,\n BinOp::Mul => hir::BinOp_::BiMul,\n BinOp::Div => hir::BinOp_::BiDiv,\n BinOp::Rem => hir::BinOp_::BiRem,\n BinOp::BitXor => hir::BinOp_::BiBitXor,\n BinOp::BitAnd => hir::BinOp_::BiBitAnd,\n BinOp::BitOr => hir::BinOp_::BiBitOr,\n BinOp::Shl => hir::BinOp_::BiShl,\n BinOp::Shr => hir::BinOp_::BiShr,\n BinOp::Eq => hir::BinOp_::BiEq,\n BinOp::Ne => hir::BinOp_::BiNe,\n BinOp::Lt => hir::BinOp_::BiLt,\n BinOp::Gt => hir::BinOp_::BiGt,\n BinOp::Le => hir::BinOp_::BiLe,\n BinOp::Ge => hir::BinOp_::BiGe,\n BinOp::Offset => unreachable!()\n }\n }\n}\n<|endoftext|>"} {"text":"Not sure this is an error but this doesn't compile; eol problem?\n\t\t\r\n\/\/ țĝŖᚤ\n\/\/ ĭǿưrƒͷᛄđ\n\/* ᚠƂǜ *\/ \/* \/* ŪŖŔWⱬ *\/ *\/ \/\/ .ǕĈⱰȠÙâ\n\/* \/* \/* \/* DZæļƽŢȥɋ¥ *\/ *\/ *\/ *\/ \/* \/* Ȏ *\/ *\/ \/\/ sɋĜ\n\/* \/* \/* ᚾᚷçᚴᚣȑ¶ *\/ *\/ *\/ \/* \/* \/* ȵŌIJň *\/ *\/ *\/ \/* ɁȧƏœɀğƬ *\/ \/\/ ǏᚩɁżAȸ(Īĥ\n\/* ƬǞJ *\/ \/\/ YɎPÎᚭƗáƣǭ\n\/\/ \n\r\/\/ \nOⱰᛜã\n mod cvreuz { mod okax { } } mod pbsh5tu0q { mod a_ { mod iznremg9w6 { mod qo_gmsdfq2 { mod dh { } } } } } \/\/ ƞⱿƂƨ(Ⱦ\n \/\/ ţŦMŰƒ\n\/\/ ƖrŦųǤ\n\/* *\/ \/* ɉƸ *\/ \n\t mod aza6hu_jk { } \n<|endoftext|>"} {"text":"use core::ops::DerefMut;\n\nuse redox::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd);\n }\n })\n}\n\/* } Magic Macros *\/\n\npub struct Command {\n pub name: String,\n pub main: Box)>\n}\n\nimpl Command {\n pub fn vec() -> Vec {\n let mut commands: Vec = Vec::new();\n\n commands.push(Command {\n name: \"break\".to_string(),\n main: box |args: &Vec|{\n unsafe{\n asm!(\"int 3\" : : : : \"intel\");\n }\n }\n });\n\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec|{\n let mut echo = String::new();\n let mut first = true;\n for i in 1..args.len() {\n match args.get(i) {\n Option::Some(arg) => {\n if first {\n first = false\n }else{\n echo = echo + \" \";\n }\n echo = echo + arg;\n },\n Option::None => ()\n }\n }\n println!(echo);\n }\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec|{\n match args.get(1) {\n Option::Some(arg) => OpenEvent{ url_string: arg.clone() }.trigger(),\n Option::None => ()\n }\n }\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec|{\n match args.get(1) {\n Option::Some(arg) => {\n let path = arg.clone();\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec = Vec::new();\n resource.read_to_end(&mut vec);\n\n let commands = String::from_utf8(&vec);\n for command in commands.split(\"\\n\".to_string()) {\n exec!(command);\n }\n },\n Option::None => ()\n }\n }\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec = Vec::new();\n for i in 2..args.len() {\n match args.get(i) {\n Option::Some(arg) => {\n if i == 2 {\n vec.push_all(&arg.to_utf8())\n }else{\n vec.push_all(&(\" \".to_string() + arg).to_utf8())\n }\n },\n Option::None => vec = Vec::new()\n }\n }\n vec.push_all(&\"\\r\\n\\r\\n\".to_string().to_utf8());\n\n match resource.write(&vec.as_slice()) {\n Option::Some(size) => println!(\"Wrote \".to_string() + size + \" bytes\"),\n Option::None => println!(\"Failed to write\".to_string())\n }\n\n vec = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(size) => println!(String::from_utf8(&vec)),\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(_) => println!(String::from_utf8(&vec)),\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + ' ' + String::from_num_radix(*byte as usize, 16);\n }\n println!(line);\n },\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n return commands;\n }\n}\n\npub struct Variable {\n pub name: String,\n pub value: String\n}\n\npub struct Mode {\n value: bool\n}\n\npub struct Application {\n commands: Vec,\n variables: Vec,\n modes: Vec\n}\n\nimpl Application {\n pub fn new() -> Application {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new()\n };\n }\n\n fn on_command(&mut self, command_string: &String){\n \/\/Comment\n if command_string[0] == '#' {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\".to_string() {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + '\\n' + &variable.name + \"=\" + &variable.value;\n }\n println!(&variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec = Vec::::new();\n for arg in command_string.split(\" \".to_string()) {\n if arg.len() > 0 {\n if arg[0] == '$' {\n let name = arg.substr(1, arg.len() - 1);\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n }else{\n args.push(arg);\n }\n }\n }\n\n \/\/Execute commands\n match args.get(0) {\n Option::Some(cmd) => {\n if *cmd == \"if\".to_string() {\n let mut value = false;\n\n match args.get(1) {\n Option::Some(left) => match args.get(2) {\n Option::Some(cmp) => match args.get(3) {\n Option::Some(right) => {\n if *cmp == \"==\".to_string() {\n value = *left == *right;\n }else if *cmp == \"!=\".to_string() {\n value = *left != *right;\n }else if *cmp == \">\".to_string() {\n value = left.to_num_signed() > right.to_num_signed();\n }else if *cmp == \">=\".to_string() {\n value = left.to_num_signed() >= right.to_num_signed();\n }else if *cmp == \"<\".to_string() {\n value = left.to_num_signed() < right.to_num_signed();\n }else if *cmp == \"<=\".to_string() {\n value = left.to_num_signed() <= right.to_num_signed();\n }else{\n println!(&(\"Unknown comparison: \".to_string() + cmp));\n }\n },\n Option::None => ()\n },\n Option::None => ()\n },\n Option::None => ()\n }\n\n self.modes.insert(0, Mode{\n value: value\n });\n return;\n }\n\n if *cmd == \"else\".to_string() {\n let mut syntax_error = false;\n match self.modes.get(0) {\n Option::Some(mode) => mode.value = !mode.value,\n Option::None => syntax_error = true\n }\n if syntax_error {\n println!(&\"Syntax error: else found with no previous if\".to_string());\n }\n return;\n }\n\n if *cmd == \"fi\".to_string() {\n let mut syntax_error = false;\n match self.modes.remove(0) {\n Option::Some(_) => (),\n Option::None => syntax_error = true\n }\n if syntax_error {\n println!(&\"Syntax error: fi found with no previous if\".to_string());\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if ! mode.value {\n return;\n }\n }\n\n \/\/Set variables\n match cmd.find(\"=\".to_string()) {\n Option::Some(i) => {\n let name = cmd.substr(0, i);\n let mut value = cmd.substr(i + 1, cmd.len() - i - 1);\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n match args.get(i) {\n Option::Some(arg) => value = value + ' ' + arg.clone(),\n Option::None => ()\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Option::Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n Option::None => break\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n }else{\n for variable in self.variables.iter() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable{\n name: name,\n value: value\n });\n }\n return;\n },\n Option::None => ()\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if command.name == *cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n let mut help = \"Commands:\".to_string();\n for command in self.commands.iter() {\n help = help + ' ' + &command.name;\n }\n println!(&help);\n },\n Option::None => ()\n }\n }\n\n fn main(&mut self){\n console_title(&\"Terminal\".to_string());\n\n while let Option::Some(command) = readln!() {\n println!(\"# \".to_string() + &command);\n if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main(){\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\nTerminal will run given scriptsuse core::ops::DerefMut;\n\nuse redox::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd);\n }\n })\n}\n\/* } Magic Macros *\/\n\npub struct Command {\n pub name: String,\n pub main: Box)>\n}\n\nimpl Command {\n pub fn vec() -> Vec {\n let mut commands: Vec = Vec::new();\n\n commands.push(Command {\n name: \"break\".to_string(),\n main: box |args: &Vec|{\n unsafe{\n asm!(\"int 3\" : : : : \"intel\");\n }\n }\n });\n\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec|{\n let mut echo = String::new();\n let mut first = true;\n for i in 1..args.len() {\n match args.get(i) {\n Option::Some(arg) => {\n if first {\n first = false\n }else{\n echo = echo + \" \";\n }\n echo = echo + arg;\n },\n Option::None => ()\n }\n }\n println!(echo);\n }\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec|{\n match args.get(1) {\n Option::Some(arg) => OpenEvent{ url_string: arg.clone() }.trigger(),\n Option::None => ()\n }\n }\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec|{\n match args.get(1) {\n Option::Some(arg) => {\n let path = arg.clone();\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec = Vec::new();\n resource.read_to_end(&mut vec);\n\n let commands = String::from_utf8(&vec);\n for command in commands.split(\"\\n\".to_string()) {\n exec!(command);\n }\n },\n Option::None => ()\n }\n }\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec = Vec::new();\n for i in 2..args.len() {\n match args.get(i) {\n Option::Some(arg) => {\n if i == 2 {\n vec.push_all(&arg.to_utf8())\n }else{\n vec.push_all(&(\" \".to_string() + arg).to_utf8())\n }\n },\n Option::None => vec = Vec::new()\n }\n }\n vec.push_all(&\"\\r\\n\\r\\n\".to_string().to_utf8());\n\n match resource.write(&vec.as_slice()) {\n Option::Some(size) => println!(\"Wrote \".to_string() + size + \" bytes\"),\n Option::None => println!(\"Failed to write\".to_string())\n }\n\n vec = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(size) => println!(String::from_utf8(&vec)),\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(_) => println!(String::from_utf8(&vec)),\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + ' ' + String::from_num_radix(*byte as usize, 16);\n }\n println!(line);\n },\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n return commands;\n }\n}\n\npub struct Variable {\n pub name: String,\n pub value: String\n}\n\npub struct Mode {\n value: bool\n}\n\npub struct Application {\n commands: Vec,\n variables: Vec,\n modes: Vec\n}\n\nimpl Application {\n pub fn new() -> Application {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new()\n };\n }\n\n fn on_command(&mut self, command_string: &String){\n \/\/Comment\n if command_string[0] == '#' {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\".to_string() {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + '\\n' + &variable.name + \"=\" + &variable.value;\n }\n println!(&variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec = Vec::::new();\n for arg in command_string.split(\" \".to_string()) {\n if arg.len() > 0 {\n if arg[0] == '$' {\n let name = arg.substr(1, arg.len() - 1);\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n }else{\n args.push(arg);\n }\n }\n }\n\n \/\/Execute commands\n match args.get(0) {\n Option::Some(cmd) => {\n if *cmd == \"if\".to_string() {\n let mut value = false;\n\n match args.get(1) {\n Option::Some(left) => match args.get(2) {\n Option::Some(cmp) => match args.get(3) {\n Option::Some(right) => {\n if *cmp == \"==\".to_string() {\n value = *left == *right;\n }else if *cmp == \"!=\".to_string() {\n value = *left != *right;\n }else if *cmp == \">\".to_string() {\n value = left.to_num_signed() > right.to_num_signed();\n }else if *cmp == \">=\".to_string() {\n value = left.to_num_signed() >= right.to_num_signed();\n }else if *cmp == \"<\".to_string() {\n value = left.to_num_signed() < right.to_num_signed();\n }else if *cmp == \"<=\".to_string() {\n value = left.to_num_signed() <= right.to_num_signed();\n }else{\n println!(&(\"Unknown comparison: \".to_string() + cmp));\n }\n },\n Option::None => ()\n },\n Option::None => ()\n },\n Option::None => ()\n }\n\n self.modes.insert(0, Mode{\n value: value\n });\n return;\n }\n\n if *cmd == \"else\".to_string() {\n let mut syntax_error = false;\n match self.modes.get(0) {\n Option::Some(mode) => mode.value = !mode.value,\n Option::None => syntax_error = true\n }\n if syntax_error {\n println!(&\"Syntax error: else found with no previous if\".to_string());\n }\n return;\n }\n\n if *cmd == \"fi\".to_string() {\n let mut syntax_error = false;\n match self.modes.remove(0) {\n Option::Some(_) => (),\n Option::None => syntax_error = true\n }\n if syntax_error {\n println!(&\"Syntax error: fi found with no previous if\".to_string());\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if ! mode.value {\n return;\n }\n }\n\n \/\/Set variables\n match cmd.find(\"=\".to_string()) {\n Option::Some(i) => {\n let name = cmd.substr(0, i);\n let mut value = cmd.substr(i + 1, cmd.len() - i - 1);\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n match args.get(i) {\n Option::Some(arg) => value = value + ' ' + arg.clone(),\n Option::None => ()\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Option::Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n Option::None => break\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n }else{\n for variable in self.variables.iter() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable{\n name: name,\n value: value\n });\n }\n return;\n },\n Option::None => ()\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if command.name == *cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n let mut help = \"Commands:\".to_string();\n for command in self.commands.iter() {\n help = help + ' ' + &command.name;\n }\n println!(&help);\n },\n Option::None => ()\n }\n }\n\n fn main(&mut self){\n console_title(&\"Terminal\".to_string());\n \n if let Option::Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# \".to_string() + &command);\n self.on_command(&command);\n }\n\n while let Option::Some(command) = readln!() {\n println!(\"# \".to_string() + &command);\n if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main(){\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"Missing import.use std::io;\nuse std::fs;\nuse std::path::Path;\nuse std::collections::HashMap;\n\nuse csv;\nuse itertools::Itertools;\nuse bio::alignment::distance;\n\n#[derive(RustcDecodable)]\npub struct Record {\n pub feature: String,\n pub codeword: String,\n pub expressed: u8\n}\n\n\npub struct Codebook {\n inner: HashMap\n}\n\n\nimpl Codebook {\n pub fn neighbors(&self, feature: &str, dist: u8) -> u32 {\n self.inner.get(feature)\n .expect(&format!(\"Error: Feature {} not in codebook.\", feature))[dist as usize - 1]\n }\n}\n\n\npub struct Reader {\n inner: csv::Reader,\n dist: u8\n}\n\n\nimpl Reader {\n \/\/\/ Read from a given file path.\n pub fn from_file>(path: P, dist: u8) -> io::Result {\n fs::File::open(path).map(|f| Reader::from_reader(f, dist))\n }\n}\n\n\nimpl Reader {\n pub fn from_reader(rdr: R, dist: u8) -> Self {\n Reader {\n inner: csv::Reader::from_reader(rdr).delimiter(b'\\t'),\n dist: dist\n }\n }\n\n pub fn codebook(&mut self) -> Codebook {\n let records = self.inner.decode::().map(|record| record.unwrap()).collect_vec();\n let mut inner = HashMap::new();\n for record in records.iter() {\n inner.insert(record.feature.clone(), [0; 4]);\n }\n for a in records.iter() {\n let codeword = a.codeword.as_bytes();\n let neighbors = inner.get_mut(&a.feature).unwrap();\n for b in records.iter() {\n if b.feature != a.feature {\n let dist = distance::hamming(codeword, b.codeword.as_bytes()).unwrap();\n assert!(dist >= self.dist as u32, \"Unexpected hamming distance {} (>={} allowed).\", dist, self.dist);\n if dist <= 4 && b.expressed == 1 {\n neighbors[(dist - 1) as usize] += 1;\n }\n }\n }\n }\n Codebook { inner: inner }\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Traits for dynamic typing of any `'static` type (through runtime reflection)\n\/\/!\n\/\/! This module implements the `Any` trait, which enables dynamic typing\n\/\/! of any `'static` type through runtime reflection.\n\/\/!\n\/\/! `Any` itself can be used to get a `TypeId`, and has more features when used\n\/\/! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and\n\/\/! `as_ref` methods, to test if the contained value is of a given type, and to\n\/\/! get a reference to the inner value as a type. As`&mut Any`, there is also\n\/\/! the `as_mut` method, for getting a mutable reference to the inner value.\n\/\/! `Box` adds the `move` method, which will unwrap a `Box` from the\n\/\/! object. See the extension traits (`*Ext`) for the full details.\n\/\/!\n\/\/! Note that &Any is limited to testing whether a value is of a specified\n\/\/! concrete type, and cannot be used to test whether a type implements a trait.\n\/\/!\n\/\/! # Examples\n\/\/!\n\/\/! Consider a situation where we want to log out a value passed to a function.\n\/\/! We know the value we're working on implements Show, but we don't know its\n\/\/! concrete type. We want to give special treatment to certain types: in this\n\/\/! case printing out the length of String values prior to their value.\n\/\/! We don't know the concrete type of our value at compile time, so we need to\n\/\/! use runtime reflection instead.\n\/\/!\n\/\/! ```rust\n\/\/! use std::fmt::Show;\n\/\/! use std::any::{Any, AnyRefExt};\n\/\/!\n\/\/! \/\/ Logger function for any type that implements Show.\n\/\/! fn log(value: &T) {\n\/\/! let value_any = value as &Any;\n\/\/!\n\/\/! \/\/ try to convert our value to a String. If successful, we want to\n\/\/! \/\/ output the String's length as well as its value. If not, it's a\n\/\/! \/\/ different type: just print it out unadorned.\n\/\/! match value_any.downcast_ref::() {\n\/\/! Some(as_string) => {\n\/\/! println!(\"String ({}): {}\", as_string.len(), as_string);\n\/\/! }\n\/\/! None => {\n\/\/! println!(\"{}\", value);\n\/\/! }\n\/\/! }\n\/\/! }\n\/\/!\n\/\/! \/\/ This function wants to log its parameter out prior to doing work with it.\n\/\/! fn do_work(value: &T) {\n\/\/! log(value);\n\/\/! \/\/ ...do some other work\n\/\/! }\n\/\/!\n\/\/! fn main() {\n\/\/! let my_string = \"Hello World\".to_string();\n\/\/! do_work(&my_string);\n\/\/!\n\/\/! let my_i8: i8 = 100;\n\/\/! do_work(&my_i8);\n\/\/! }\n\/\/! ```\n\n#![stable]\n\nuse mem::{transmute, transmute_copy};\nuse option::{Option, Some, None};\nuse raw::TraitObject;\nuse intrinsics::TypeId;\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Any trait\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n\/\/\/ The `Any` trait is implemented by all `'static` types, and can be used for\n\/\/\/ dynamic typing\n\/\/\/\n\/\/\/ Every type with no non-`'static` references implements `Any`, so `Any` can\n\/\/\/ be used as a trait object to emulate the effects dynamic typing.\n#[stable]\npub trait Any: 'static {\n \/\/\/ Get the `TypeId` of `self`\n #[stable]\n fn get_type_id(&self) -> TypeId;\n}\n\nimpl Any for T {\n fn get_type_id(&self) -> TypeId { TypeId::of::() }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Extension methods for Any trait objects.\n\/\/ Implemented as three extension traits so that the methods can be generic.\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n\/\/\/ Extension methods for a referenced `Any` trait object\n#[unstable = \"this trait will not be necessary once DST lands, it will be a \\\n part of `impl Any`\"]\npub trait AnyRefExt<'a> {\n \/\/\/ Returns true if the boxed type is the same as `T`\n #[stable]\n fn is(self) -> bool;\n\n \/\/\/ Returns some reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[unstable = \"naming conventions around acquiring references may change\"]\n fn downcast_ref(self) -> Option<&'a T>;\n}\n\n#[stable]\nimpl<'a> AnyRefExt<'a> for &'a Any {\n #[inline]\n fn is(self) -> bool {\n \/\/ Get TypeId of the type this function is instantiated with\n let t = TypeId::of::();\n\n \/\/ Get TypeId of the type in the trait object\n let boxed = self.get_type_id();\n\n \/\/ Compare both TypeIds on equality\n t == boxed\n }\n\n #[inline]\n fn downcast_ref(self) -> Option<&'a T> {\n if self.is::() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute_copy(&self);\n\n \/\/ Extract the data pointer\n Some(transmute(to.data))\n }\n } else {\n None\n }\n }\n}\n\n\/\/\/ Extension methods for a mutable referenced `Any` trait object\n#[unstable = \"this trait will not be necessary once DST lands, it will be a \\\n part of `impl Any`\"]\npub trait AnyMutRefExt<'a> {\n \/\/\/ Returns some mutable reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[unstable = \"naming conventions around acquiring references may change\"]\n fn downcast_mut(self) -> Option<&'a mut T>;\n}\n\n#[stable]\nimpl<'a> AnyMutRefExt<'a> for &'a mut Any {\n #[inline]\n fn downcast_mut(self) -> Option<&'a mut T> {\n if self.is::() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute_copy(&self);\n\n \/\/ Extract the data pointer\n Some(transmute(to.data))\n }\n } else {\n None\n }\n }\n}\nMark Any::get_type_id as experimental\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Traits for dynamic typing of any `'static` type (through runtime reflection)\n\/\/!\n\/\/! This module implements the `Any` trait, which enables dynamic typing\n\/\/! of any `'static` type through runtime reflection.\n\/\/!\n\/\/! `Any` itself can be used to get a `TypeId`, and has more features when used\n\/\/! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and\n\/\/! `as_ref` methods, to test if the contained value is of a given type, and to\n\/\/! get a reference to the inner value as a type. As`&mut Any`, there is also\n\/\/! the `as_mut` method, for getting a mutable reference to the inner value.\n\/\/! `Box` adds the `move` method, which will unwrap a `Box` from the\n\/\/! object. See the extension traits (`*Ext`) for the full details.\n\/\/!\n\/\/! Note that &Any is limited to testing whether a value is of a specified\n\/\/! concrete type, and cannot be used to test whether a type implements a trait.\n\/\/!\n\/\/! # Examples\n\/\/!\n\/\/! Consider a situation where we want to log out a value passed to a function.\n\/\/! We know the value we're working on implements Show, but we don't know its\n\/\/! concrete type. We want to give special treatment to certain types: in this\n\/\/! case printing out the length of String values prior to their value.\n\/\/! We don't know the concrete type of our value at compile time, so we need to\n\/\/! use runtime reflection instead.\n\/\/!\n\/\/! ```rust\n\/\/! use std::fmt::Show;\n\/\/! use std::any::{Any, AnyRefExt};\n\/\/!\n\/\/! \/\/ Logger function for any type that implements Show.\n\/\/! fn log(value: &T) {\n\/\/! let value_any = value as &Any;\n\/\/!\n\/\/! \/\/ try to convert our value to a String. If successful, we want to\n\/\/! \/\/ output the String's length as well as its value. If not, it's a\n\/\/! \/\/ different type: just print it out unadorned.\n\/\/! match value_any.downcast_ref::() {\n\/\/! Some(as_string) => {\n\/\/! println!(\"String ({}): {}\", as_string.len(), as_string);\n\/\/! }\n\/\/! None => {\n\/\/! println!(\"{}\", value);\n\/\/! }\n\/\/! }\n\/\/! }\n\/\/!\n\/\/! \/\/ This function wants to log its parameter out prior to doing work with it.\n\/\/! fn do_work(value: &T) {\n\/\/! log(value);\n\/\/! \/\/ ...do some other work\n\/\/! }\n\/\/!\n\/\/! fn main() {\n\/\/! let my_string = \"Hello World\".to_string();\n\/\/! do_work(&my_string);\n\/\/!\n\/\/! let my_i8: i8 = 100;\n\/\/! do_work(&my_i8);\n\/\/! }\n\/\/! ```\n\n#![stable]\n\nuse mem::{transmute, transmute_copy};\nuse option::{Option, Some, None};\nuse raw::TraitObject;\nuse intrinsics::TypeId;\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Any trait\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n\/\/\/ The `Any` trait is implemented by all `'static` types, and can be used for\n\/\/\/ dynamic typing\n\/\/\/\n\/\/\/ Every type with no non-`'static` references implements `Any`, so `Any` can\n\/\/\/ be used as a trait object to emulate the effects dynamic typing.\n#[stable]\npub trait Any: 'static {\n \/\/\/ Get the `TypeId` of `self`\n #[experimental = \"this method will likely be replaced by an associated static\"]\n fn get_type_id(&self) -> TypeId;\n}\n\nimpl Any for T {\n fn get_type_id(&self) -> TypeId { TypeId::of::() }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Extension methods for Any trait objects.\n\/\/ Implemented as three extension traits so that the methods can be generic.\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n\/\/\/ Extension methods for a referenced `Any` trait object\n#[unstable = \"this trait will not be necessary once DST lands, it will be a \\\n part of `impl Any`\"]\npub trait AnyRefExt<'a> {\n \/\/\/ Returns true if the boxed type is the same as `T`\n #[stable]\n fn is(self) -> bool;\n\n \/\/\/ Returns some reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[unstable = \"naming conventions around acquiring references may change\"]\n fn downcast_ref(self) -> Option<&'a T>;\n}\n\n#[stable]\nimpl<'a> AnyRefExt<'a> for &'a Any {\n #[inline]\n fn is(self) -> bool {\n \/\/ Get TypeId of the type this function is instantiated with\n let t = TypeId::of::();\n\n \/\/ Get TypeId of the type in the trait object\n let boxed = self.get_type_id();\n\n \/\/ Compare both TypeIds on equality\n t == boxed\n }\n\n #[inline]\n fn downcast_ref(self) -> Option<&'a T> {\n if self.is::() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute_copy(&self);\n\n \/\/ Extract the data pointer\n Some(transmute(to.data))\n }\n } else {\n None\n }\n }\n}\n\n\/\/\/ Extension methods for a mutable referenced `Any` trait object\n#[unstable = \"this trait will not be necessary once DST lands, it will be a \\\n part of `impl Any`\"]\npub trait AnyMutRefExt<'a> {\n \/\/\/ Returns some mutable reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[unstable = \"naming conventions around acquiring references may change\"]\n fn downcast_mut(self) -> Option<&'a mut T>;\n}\n\n#[stable]\nimpl<'a> AnyMutRefExt<'a> for &'a mut Any {\n #[inline]\n fn downcast_mut(self) -> Option<&'a mut T> {\n if self.is::() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute_copy(&self);\n\n \/\/ Extract the data pointer\n Some(transmute(to.data))\n }\n } else {\n None\n }\n }\n}\n<|endoftext|>"} {"text":"use crate::command_prelude::*;\n\nuse cargo::core::{GitReference, SourceId};\nuse cargo::ops;\nuse cargo::util::IntoUrl;\n\npub fn cli() -> App {\n subcommand(\"install\")\n .about(\"Install a Rust binary. Default location is $HOME\/.cargo\/bin\")\n .arg(opt(\"quiet\", \"No output printed to stdout\").short(\"q\"))\n .arg(Arg::with_name(\"crate\").empty_values(false).multiple(true))\n .arg(\n opt(\"version\", \"Specify a version to install\")\n .alias(\"vers\")\n .value_name(\"VERSION\")\n .requires(\"crate\"),\n )\n .arg(\n opt(\"git\", \"Git URL to install the specified crate from\")\n .value_name(\"URL\")\n .conflicts_with_all(&[\"path\", \"index\", \"registry\"]),\n )\n .arg(\n opt(\"branch\", \"Branch to use when installing from git\")\n .value_name(\"BRANCH\")\n .requires(\"git\"),\n )\n .arg(\n opt(\"tag\", \"Tag to use when installing from git\")\n .value_name(\"TAG\")\n .requires(\"git\"),\n )\n .arg(\n opt(\"rev\", \"Specific commit to use when installing from git\")\n .value_name(\"SHA\")\n .requires(\"git\"),\n )\n .arg(\n opt(\"path\", \"Filesystem path to local crate to install\")\n .value_name(\"PATH\")\n .conflicts_with_all(&[\"git\", \"index\", \"registry\"]),\n )\n .arg(opt(\n \"list\",\n \"list all installed packages and their versions\",\n ))\n .arg_jobs()\n .arg(opt(\"force\", \"Force overwriting existing crates or binaries\").short(\"f\"))\n .arg(opt(\"no-track\", \"Do not save tracking information\"))\n .arg_features()\n .arg_profile(\"Install artifacts with the specified profile\")\n .arg(opt(\"debug\", \"Build in debug mode instead of release mode\"))\n .arg_targets_bins_examples(\n \"Install only the specified binary\",\n \"Install all binaries\",\n \"Install only the specified example\",\n \"Install all examples\",\n )\n .arg_target_triple(\"Build for the target triple\")\n .arg_target_dir()\n .arg(opt(\"root\", \"Directory to install packages into\").value_name(\"DIR\"))\n .arg(\n opt(\"index\", \"Registry index to install from\")\n .value_name(\"INDEX\")\n .requires(\"crate\")\n .conflicts_with_all(&[\"git\", \"path\", \"registry\"]),\n )\n .arg(\n opt(\"registry\", \"Registry to use\")\n .value_name(\"REGISTRY\")\n .requires(\"crate\")\n .conflicts_with_all(&[\"git\", \"path\", \"index\"]),\n )\n .after_help(\"Run `cargo help install` for more detailed information.\\n\")\n}\n\npub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult {\n let workspace;\n if let Some(path) = args.value_of_path(\"path\", config) {\n config.reload_rooted_at(path)?;\n \/\/ Only provide worksapce information for local crate installation\n workspace = args.workspace(config).ok();\n } else {\n config.reload_rooted_at(config.home().clone().into_path_unlocked())?;\n workspace = None;\n }\n\n let mut compile_opts = args.compile_options(\n config,\n CompileMode::Build,\n workspace.as_ref(),\n ProfileChecking::Checked,\n )?;\n\n compile_opts.build_config.requested_profile =\n args.get_profile_name(config, \"release\", ProfileChecking::Checked)?;\n\n let krates = args\n .values_of(\"crate\")\n .unwrap_or_default()\n .collect::>();\n\n let mut from_cwd = false;\n\n let source = if let Some(url) = args.value_of(\"git\") {\n let url = url.into_url()?;\n let gitref = if let Some(branch) = args.value_of(\"branch\") {\n GitReference::Branch(branch.to_string())\n } else if let Some(tag) = args.value_of(\"tag\") {\n GitReference::Tag(tag.to_string())\n } else if let Some(rev) = args.value_of(\"rev\") {\n GitReference::Rev(rev.to_string())\n } else {\n GitReference::DefaultBranch\n };\n SourceId::for_git(&url, gitref)?\n } else if let Some(path) = args.value_of_path(\"path\", config) {\n SourceId::for_path(&path)?\n } else if krates.is_empty() {\n from_cwd = true;\n SourceId::for_path(config.cwd())?\n } else if let Some(registry) = args.registry(config)? {\n SourceId::alt_registry(config, ®istry)?\n } else if let Some(index) = args.value_of(\"index\") {\n SourceId::for_registry(&index.into_url()?)?\n } else {\n SourceId::crates_io(config)?\n };\n\n let version = args.value_of(\"version\");\n let root = args.value_of(\"root\");\n\n if args.is_present(\"list\") {\n ops::install_list(root, config)?;\n } else {\n ops::install(\n config,\n root,\n krates,\n source,\n from_cwd,\n version,\n &compile_opts,\n args.is_present(\"force\"),\n args.is_present(\"no-track\"),\n )?;\n }\n Ok(())\n}\nOnly `--path` and cwd crate installation need workspace infouse crate::command_prelude::*;\n\nuse cargo::core::{GitReference, SourceId};\nuse cargo::ops;\nuse cargo::util::IntoUrl;\n\npub fn cli() -> App {\n subcommand(\"install\")\n .about(\"Install a Rust binary. Default location is $HOME\/.cargo\/bin\")\n .arg(opt(\"quiet\", \"No output printed to stdout\").short(\"q\"))\n .arg(Arg::with_name(\"crate\").empty_values(false).multiple(true))\n .arg(\n opt(\"version\", \"Specify a version to install\")\n .alias(\"vers\")\n .value_name(\"VERSION\")\n .requires(\"crate\"),\n )\n .arg(\n opt(\"git\", \"Git URL to install the specified crate from\")\n .value_name(\"URL\")\n .conflicts_with_all(&[\"path\", \"index\", \"registry\"]),\n )\n .arg(\n opt(\"branch\", \"Branch to use when installing from git\")\n .value_name(\"BRANCH\")\n .requires(\"git\"),\n )\n .arg(\n opt(\"tag\", \"Tag to use when installing from git\")\n .value_name(\"TAG\")\n .requires(\"git\"),\n )\n .arg(\n opt(\"rev\", \"Specific commit to use when installing from git\")\n .value_name(\"SHA\")\n .requires(\"git\"),\n )\n .arg(\n opt(\"path\", \"Filesystem path to local crate to install\")\n .value_name(\"PATH\")\n .conflicts_with_all(&[\"git\", \"index\", \"registry\"]),\n )\n .arg(opt(\n \"list\",\n \"list all installed packages and their versions\",\n ))\n .arg_jobs()\n .arg(opt(\"force\", \"Force overwriting existing crates or binaries\").short(\"f\"))\n .arg(opt(\"no-track\", \"Do not save tracking information\"))\n .arg_features()\n .arg_profile(\"Install artifacts with the specified profile\")\n .arg(opt(\"debug\", \"Build in debug mode instead of release mode\"))\n .arg_targets_bins_examples(\n \"Install only the specified binary\",\n \"Install all binaries\",\n \"Install only the specified example\",\n \"Install all examples\",\n )\n .arg_target_triple(\"Build for the target triple\")\n .arg_target_dir()\n .arg(opt(\"root\", \"Directory to install packages into\").value_name(\"DIR\"))\n .arg(\n opt(\"index\", \"Registry index to install from\")\n .value_name(\"INDEX\")\n .requires(\"crate\")\n .conflicts_with_all(&[\"git\", \"path\", \"registry\"]),\n )\n .arg(\n opt(\"registry\", \"Registry to use\")\n .value_name(\"REGISTRY\")\n .requires(\"crate\")\n .conflicts_with_all(&[\"git\", \"path\", \"index\"]),\n )\n .after_help(\"Run `cargo help install` for more detailed information.\\n\")\n}\n\npub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult {\n if let Some(path) = args.value_of_path(\"path\", config) {\n config.reload_rooted_at(path)?;\n } else {\n config.reload_rooted_at(config.home().clone().into_path_unlocked())?;\n }\n\n let krates = args\n .values_of(\"crate\")\n .unwrap_or_default()\n .collect::>();\n\n let mut from_cwd = false;\n\n let source = if let Some(url) = args.value_of(\"git\") {\n let url = url.into_url()?;\n let gitref = if let Some(branch) = args.value_of(\"branch\") {\n GitReference::Branch(branch.to_string())\n } else if let Some(tag) = args.value_of(\"tag\") {\n GitReference::Tag(tag.to_string())\n } else if let Some(rev) = args.value_of(\"rev\") {\n GitReference::Rev(rev.to_string())\n } else {\n GitReference::DefaultBranch\n };\n SourceId::for_git(&url, gitref)?\n } else if let Some(path) = args.value_of_path(\"path\", config) {\n SourceId::for_path(&path)?\n } else if krates.is_empty() {\n from_cwd = true;\n SourceId::for_path(config.cwd())?\n } else if let Some(registry) = args.registry(config)? {\n SourceId::alt_registry(config, ®istry)?\n } else if let Some(index) = args.value_of(\"index\") {\n SourceId::for_registry(&index.into_url()?)?\n } else {\n SourceId::crates_io(config)?\n };\n\n let version = args.value_of(\"version\");\n let root = args.value_of(\"root\");\n\n \/\/ We only provide worksapce information for local crate installation from\n \/\/ one of the following sources:\n \/\/ - From current working directory (only work for edition 2015).\n \/\/ - From a specific local file path.\n let workspace = if from_cwd || args.is_present(\"path\") {\n args.workspace(config).ok()\n } else {\n None\n };\n\n let mut compile_opts = args.compile_options(\n config,\n CompileMode::Build,\n workspace.as_ref(),\n ProfileChecking::Checked,\n )?;\n\n compile_opts.build_config.requested_profile =\n args.get_profile_name(config, \"release\", ProfileChecking::Checked)?;\n\n if args.is_present(\"list\") {\n ops::install_list(root, config)?;\n } else {\n ops::install(\n config,\n root,\n krates,\n source,\n from_cwd,\n version,\n &compile_opts,\n args.is_present(\"force\"),\n args.is_present(\"no-track\"),\n )?;\n }\n Ok(())\n}\n<|endoftext|>"} {"text":"#[doc = \"A standard linked list\"];\n\nimport core::option;\nimport option::*;\nimport option::{some, none};\n\nenum list {\n cons(T, @list),\n nil,\n}\n\n#[doc = \"Create a list from a vector\"]\nfn from_vec(v: [const T]) -> list {\n *vec::foldr(v, @nil::, { |h, t| @cons(h, t) })\n}\n\n#[doc = \"\nLeft fold\n\nApplies `f` to `u` and the first element in the list, then applies `f` to the\nresult of the previous call and the second element, and so on, returning the\naccumulated result.\n\n# Arguments\n\n* ls - The list to fold\n* z - The initial value\n* f - The function to apply\n\"]\nfn foldl(z: T, ls: list, f: fn(T, U) -> T) -> T {\n let mut accum: T = z;\n iter(ls) {|elt| accum = f(accum, elt);}\n accum\n}\n\n#[doc = \"\nSearch for an element that matches a given predicate\n\nApply function `f` to each element of `v`, starting from the first.\nWhen function `f` returns true then an option containing the element\nis returned. If `f` matches no elements then none is returned.\n\"]\nfn find(ls: list, f: fn(T) -> bool) -> option {\n let mut ls = ls;\n loop {\n ls = alt ls {\n cons(hd, tl) {\n if f(hd) { ret some(hd); }\n *tl\n }\n nil { ret none; }\n }\n };\n}\n\n#[doc = \"Returns true if a list contains an element with the given value\"]\nfn has(ls: list, elt: T) -> bool {\n for each(ls) { |e|\n if e == elt { ret true; }\n }\n ret false;\n}\n\n#[doc = \"Returns true if the list is empty\"]\npure fn is_empty(ls: list) -> bool {\n alt ls {\n nil { true }\n _ { false }\n }\n}\n\n#[doc = \"Returns true if the list is not empty\"]\npure fn is_not_empty(ls: list) -> bool {\n ret !is_empty(ls);\n}\n\n#[doc = \"Returns the length of a list\"]\nfn len(ls: list) -> uint {\n let mut count = 0u;\n iter(ls) {|_e| count += 1u;}\n count\n}\n\n#[doc = \"Returns all but the first element of a list\"]\npure fn tail(ls: list) -> list {\n alt ls {\n cons(_, tl) { ret *tl; }\n nil { fail \"list empty\" }\n }\n}\n\n#[doc = \"Returns the first element of a list\"]\npure fn head(ls: list) -> T {\n alt check ls { cons(hd, _) { hd } }\n}\n\n#[doc = \"Appends one list to another\"]\npure fn append(l: list, m: list) -> list {\n alt l {\n nil { ret m; }\n cons(x, xs) { let rest = append(*xs, m); ret cons(x, @rest); }\n }\n}\n\n#[doc = \"Iterate over a list\"]\nfn iter(l: list, f: fn(T)) {\n alt l {\n cons(hd, tl) {\n f(hd);\n let mut cur = tl;\n loop {\n alt *cur {\n cons(hd, tl) {\n f(hd);\n cur = tl;\n }\n nil { break; }\n }\n }\n }\n nil {}\n }\n}\n\n#[doc = \"Iterate over a list\"]\nfn each(l: list, f: fn(T) -> bool) {\n alt l {\n cons(hd, tl) {\n if !f(hd) { ret; }\n let mut cur = tl;\n loop {\n alt *cur {\n cons(hd, tl) {\n if !f(hd) { ret; }\n cur = tl;\n }\n nil { break; }\n }\n }\n }\n nil {}\n }\n}\n\n#[cfg(test)]\nmod tests {\n\n #[test]\n fn test_is_empty() {\n let empty : list::list = from_vec([]);\n let full1 = from_vec([1]);\n let full2 = from_vec(['r', 'u']);\n\n assert is_empty(empty);\n assert !is_empty(full1);\n assert !is_empty(full2);\n\n assert !is_not_empty(empty);\n assert is_not_empty(full1);\n assert is_not_empty(full2);\n }\n\n #[test]\n fn test_from_vec() {\n let l = from_vec([0, 1, 2]);\n\n assert (head(l) == 0);\n\n let tail_l = tail(l);\n assert (head(tail_l) == 1);\n\n let tail_tail_l = tail(tail_l);\n assert (head(tail_tail_l) == 2);\n }\n\n #[test]\n fn test_from_vec_empty() {\n let empty : list::list = from_vec([]);\n assert (empty == list::nil::);\n }\n\n #[test]\n fn test_from_vec_mut() {\n let l = from_vec([mut 0, 1, 2]);\n\n assert (head(l) == 0);\n\n let tail_l = tail(l);\n assert (head(tail_l) == 1);\n\n let tail_tail_l = tail(tail_l);\n assert (head(tail_tail_l) == 2);\n }\n\n #[test]\n fn test_foldl() {\n fn add(&&a: uint, &&b: int) -> uint { ret a + (b as uint); }\n let l = from_vec([0, 1, 2, 3, 4]);\n let empty = list::nil::;\n assert (list::foldl(0u, l, add) == 10u);\n assert (list::foldl(0u, empty, add) == 0u);\n }\n\n #[test]\n fn test_foldl2() {\n fn sub(&&a: int, &&b: int) -> int {\n a - b\n }\n let l = from_vec([1, 2, 3, 4]);\n assert (list::foldl(0, l, sub) == -10);\n }\n\n #[test]\n fn test_find_success() {\n fn match(&&i: int) -> bool { ret i == 2; }\n let l = from_vec([0, 1, 2]);\n assert (list::find(l, match) == option::some(2));\n }\n\n #[test]\n fn test_find_fail() {\n fn match(&&_i: int) -> bool { ret false; }\n let l = from_vec([0, 1, 2]);\n let empty = list::nil::;\n assert (list::find(l, match) == option::none::);\n assert (list::find(empty, match) == option::none::);\n }\n\n #[test]\n fn test_has() {\n let l = from_vec([5, 8, 6]);\n let empty = list::nil::;\n assert (list::has(l, 5));\n assert (!list::has(l, 7));\n assert (list::has(l, 8));\n assert (!list::has(empty, 5));\n }\n\n #[test]\n fn test_len() {\n let l = from_vec([0, 1, 2]);\n let empty = list::nil::;\n assert (list::len(l) == 3u);\n assert (list::len(empty) == 0u);\n }\n\n}\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\nmake list based on boxes#[doc = \"A standard linked list\"];\n\nimport core::option;\nimport option::*;\nimport option::{some, none};\n\nenum list {\n cons(T, @list),\n nil,\n}\n\n#[doc = \"Create a list from a vector\"]\nfn from_vec(v: [const T]) -> @list {\n @vec::foldr(v, @nil::, { |h, t| @cons(h, t) })\n}\n\n#[doc = \"\nLeft fold\n\nApplies `f` to `u` and the first element in the list, then applies `f` to the\nresult of the previous call and the second element, and so on, returning the\naccumulated result.\n\n# Arguments\n\n* ls - The list to fold\n* z - The initial value\n* f - The function to apply\n\"]\nfn foldl(z: T, ls: @list, f: fn(T, U) -> T) -> T {\n let mut accum: T = z;\n iter(ls) {|elt| accum = f(accum, elt);}\n accum\n}\n\n#[doc = \"\nSearch for an element that matches a given predicate\n\nApply function `f` to each element of `v`, starting from the first.\nWhen function `f` returns true then an option containing the element\nis returned. If `f` matches no elements then none is returned.\n\"]\nfn find(ls: @list, f: fn(T) -> bool) -> option {\n let mut ls = ls;\n loop {\n ls = alt ls {\n cons(hd, tl) {\n if f(hd) { ret some(hd); }\n tl\n }\n nil { ret none; }\n }\n };\n}\n\n#[doc = \"Returns true if a list contains an element with the given value\"]\nfn has(ls: @list, elt: T) -> bool {\n for each(ls) { |e|\n if e == elt { ret true; }\n }\n ret false;\n}\n\n#[doc = \"Returns true if the list is empty\"]\npure fn is_empty(ls: @list) -> bool {\n alt *ls {\n nil { true }\n _ { false }\n }\n}\n\n#[doc = \"Returns true if the list is not empty\"]\npure fn is_not_empty(ls: @list) -> bool {\n ret !is_empty(ls);\n}\n\n#[doc = \"Returns the length of a list\"]\nfn len(ls: @list) -> uint {\n let mut count = 0u;\n iter(ls) {|_e| count += 1u;}\n count\n}\n\n#[doc = \"Returns all but the first element of a list\"]\npure fn tail(ls: @list) -> list {\n alt *ls {\n cons(_, tl) { ret tl; }\n nil { fail \"list empty\" }\n }\n}\n\n#[doc = \"Returns the first element of a list\"]\npure fn head(ls: @list) -> T {\n alt check *ls { cons(hd, _) { hd } }\n}\n\n#[doc = \"Appends one list to another\"]\npure fn append(l: @list, m: @list) -> @list {\n alt *l {\n nil { ret m; }\n cons(x, xs) { let rest = append(*xs, m); ret @cons(x, @rest); }\n }\n}\n\n#[doc = \"Iterate over a list\"]\nfn iter(l: @list, f: fn(T)) {\n let mut cur = l;\n loop {\n cur = alt *cur {\n cons(hd, tl) {\n f(hd);\n tl\n }\n nil { break; }\n }\n }\n}\n\n#[doc = \"Iterate over a list\"]\nfn each(l: list, f: fn(T) -> bool) {\n let mut cur = l;\n loop {\n cur = alt *cur {\n cons(hd, tl) {\n if !f(hd) { ret; }\n }\n nil { break; }\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n\n #[test]\n fn test_is_empty() {\n let empty : @list::list = from_vec([]);\n let full1 = from_vec([1]);\n let full2 = from_vec(['r', 'u']);\n\n assert is_empty(empty);\n assert !is_empty(full1);\n assert !is_empty(full2);\n\n assert !is_not_empty(empty);\n assert is_not_empty(full1);\n assert is_not_empty(full2);\n }\n\n #[test]\n fn test_from_vec() {\n let l = from_vec([0, 1, 2]);\n\n assert (head(l) == 0);\n\n let tail_l = tail(l);\n assert (head(tail_l) == 1);\n\n let tail_tail_l = tail(tail_l);\n assert (head(tail_tail_l) == 2);\n }\n\n #[test]\n fn test_from_vec_empty() {\n let empty : @list::list = from_vec([]);\n assert (empty == list::nil::);\n }\n\n #[test]\n fn test_from_vec_mut() {\n let l = from_vec([mut 0, 1, 2]);\n\n assert (head(l) == 0);\n\n let tail_l = tail(l);\n assert (head(tail_l) == 1);\n\n let tail_tail_l = tail(tail_l);\n assert (head(tail_tail_l) == 2);\n }\n\n #[test]\n fn test_foldl() {\n fn add(&&a: uint, &&b: int) -> uint { ret a + (b as uint); }\n let l = from_vec([0, 1, 2, 3, 4]);\n let empty = @list::nil::;\n assert (list::foldl(0u, l, add) == 10u);\n assert (list::foldl(0u, empty, add) == 0u);\n }\n\n #[test]\n fn test_foldl2() {\n fn sub(&&a: int, &&b: int) -> int {\n a - b\n }\n let l = from_vec([1, 2, 3, 4]);\n assert (list::foldl(0, l, sub) == -10);\n }\n\n #[test]\n fn test_find_success() {\n fn match(&&i: int) -> bool { ret i == 2; }\n let l = from_vec([0, 1, 2]);\n assert (list::find(l, match) == option::some(2));\n }\n\n #[test]\n fn test_find_fail() {\n fn match(&&_i: int) -> bool { ret false; }\n let l = from_vec([0, 1, 2]);\n let empty = list::nil::;\n assert (list::find(l, match) == option::none::);\n assert (list::find(empty, match) == option::none::);\n }\n\n #[test]\n fn test_has() {\n let l = from_vec([5, 8, 6]);\n let empty = @list::nil::;\n assert (list::has(l, 5));\n assert (!list::has(l, 7));\n assert (list::has(l, 8));\n assert (!list::has(empty, 5));\n }\n\n #[test]\n fn test_len() {\n let l = from_vec([0, 1, 2]);\n let empty = @list::nil::;\n assert (list::len(l) == 3u);\n assert (list::len(empty) == 0u);\n }\n\n}\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n<|endoftext|>"} {"text":"\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Terminal formatting library.\n\/\/!\n\/\/! This crate provides the `Terminal` trait, which abstracts over an [ANSI\n\/\/! Termina][ansi] to provide color printing, among other things. There are two implementations,\n\/\/! the `TerminfoTerminal`, which uses control characters from a\n\/\/! [terminfo][ti] database, and `WinConsole`, which uses the [Win32 Console\n\/\/! API][win].\n\/\/!\n\/\/! ## Example\n\/\/!\n\/\/! ```rust\n\/\/! extern crate term;\n\/\/!\n\/\/! fn main() {\n\/\/! let mut t = term::stdout();\n\/\/! t.fg(term::color::GREEN);\n\/\/! println!(\"hello, \");\n\/\/! t.fg(term::color::RED);\n\/\/! println!(\"world!\");\n\/\/! t.reset();\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! [ansi]: https:\/\/en.wikipedia.org\/wiki\/ANSI_escape_code\n\/\/! [win]: http:\/\/msdn.microsoft.com\/en-us\/library\/windows\/desktop\/ms682010%28v=vs.85%29.aspx\n\/\/! [ti]: https:\/\/en.wikipedia.org\/wiki\/Terminfo\n\n#![crate_id = \"term#0.11.0-pre\"]\n#![comment = \"Simple ANSI color library\"]\n#![license = \"MIT\/ASL2\"]\n#![crate_type = \"rlib\"]\n#![crate_type = \"dylib\"]\n#![doc(html_logo_url = \"http:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"http:\/\/www.rust-lang.org\/favicon.ico\",\n html_root_url = \"http:\/\/static.rust-lang.org\/doc\/master\")]\n\n#![feature(macro_rules, phase)]\n\n#![deny(missing_doc)]\n\n#[phase(syntax, link)] extern crate log;\nextern crate collections;\n\npub use terminfo::TerminfoTerminal;\n#[cfg(windows)]\npub use win::WinConsole;\n\nuse std::io::IoResult;\n\npub mod terminfo;\n\n#[cfg(windows)]\nmod win;\n\n#[cfg(not(windows))]\n\/\/\/ Return a Terminal wrapping stdout, or None if a terminal couldn't be\n\/\/\/ opened.\npub fn stdout() -> Option>:Send>> {\n let ti: Option>>\n = Terminal::new(box std::io::stdout() as Box);\n ti.map(|t| box t as Box:Send>:Send>)\n}\n\n#[cfg(windows)]\n\/\/\/ Return a Terminal wrapping stdout, or None if a terminal couldn't be\n\/\/\/ opened.\npub fn stdout() -> Option:Send>:Send>> {\n let ti: Option>>\n = Terminal::new(box std::io::stdout() as Box);\n\n match ti {\n Some(t) => Some(box t as Box:Send>:Send>),\n None => {\n let wc: Option>>\n = Terminal::new(box std::io::stdout() as Box);\n wc.map(|w| box w as Box:Send>:Send>)\n }\n }\n}\n\n#[cfg(not(windows))]\n\/\/\/ Return a Terminal wrapping stderr, or None if a terminal couldn't be\n\/\/\/ opened.\npub fn stderr() -> Option:Send>:Send>:Send> {\n let ti: Option>>\n = Terminal::new(box std::io::stderr() as Box);\n ti.map(|t| box t as Box:Send>:Send>)\n}\n\n#[cfg(windows)]\n\/\/\/ Return a Terminal wrapping stderr, or None if a terminal couldn't be\n\/\/\/ opened.\npub fn stderr() -> Option:Send>:Send>> {\n let ti: Option>>\n = Terminal::new(box std::io::stderr() as Box);\n\n match ti {\n Some(t) => Some(box t as Box:Send>:Send>),\n None => {\n let wc: Option>>\n = Terminal::new(box std::io::stderr() as Box);\n wc.map(|w| box w as Box:Send>:Send>)\n }\n }\n}\n\n\n\/\/\/ Terminal color definitions\npub mod color {\n \/\/\/ Number for a terminal color\n pub type Color = u16;\n\n pub static BLACK: Color = 0u16;\n pub static RED: Color = 1u16;\n pub static GREEN: Color = 2u16;\n pub static YELLOW: Color = 3u16;\n pub static BLUE: Color = 4u16;\n pub static MAGENTA: Color = 5u16;\n pub static CYAN: Color = 6u16;\n pub static WHITE: Color = 7u16;\n\n pub static BRIGHT_BLACK: Color = 8u16;\n pub static BRIGHT_RED: Color = 9u16;\n pub static BRIGHT_GREEN: Color = 10u16;\n pub static BRIGHT_YELLOW: Color = 11u16;\n pub static BRIGHT_BLUE: Color = 12u16;\n pub static BRIGHT_MAGENTA: Color = 13u16;\n pub static BRIGHT_CYAN: Color = 14u16;\n pub static BRIGHT_WHITE: Color = 15u16;\n}\n\n\/\/\/ Terminal attributes\npub mod attr {\n \/\/\/ Terminal attributes for use with term.attr().\n \/\/\/\n \/\/\/ Most attributes can only be turned on and must be turned off with term.reset().\n \/\/\/ The ones that can be turned off explicitly take a boolean value.\n \/\/\/ Color is also represented as an attribute for convenience.\n pub enum Attr {\n \/\/\/ Bold (or possibly bright) mode\n Bold,\n \/\/\/ Dim mode, also called faint or half-bright. Often not supported\n Dim,\n \/\/\/ Italics mode. Often not supported\n Italic(bool),\n \/\/\/ Underline mode\n Underline(bool),\n \/\/\/ Blink mode\n Blink,\n \/\/\/ Standout mode. Often implemented as Reverse, sometimes coupled with Bold\n Standout(bool),\n \/\/\/ Reverse mode, inverts the foreground and background colors\n Reverse,\n \/\/\/ Secure mode, also called invis mode. Hides the printed text\n Secure,\n \/\/\/ Convenience attribute to set the foreground color\n ForegroundColor(super::color::Color),\n \/\/\/ Convenience attribute to set the background color\n BackgroundColor(super::color::Color)\n }\n}\n\n\/\/\/ A terminal with similar capabilities to an ANSI Terminal\n\/\/\/ (foreground\/background colors etc).\npub trait Terminal: Writer {\n \/\/\/ Returns `None` whenever the terminal cannot be created for some\n \/\/\/ reason.\n fn new(out: T) -> Option;\n\n \/\/\/ Sets the foreground color to the given color.\n \/\/\/\n \/\/\/ If the color is a bright color, but the terminal only supports 8 colors,\n \/\/\/ the corresponding normal color will be used instead.\n \/\/\/\n \/\/\/ Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`\n \/\/\/ if there was an I\/O error.\n fn fg(&mut self, color: color::Color) -> IoResult;\n\n \/\/\/ Sets the background color to the given color.\n \/\/\/\n \/\/\/ If the color is a bright color, but the terminal only supports 8 colors,\n \/\/\/ the corresponding normal color will be used instead.\n \/\/\/\n \/\/\/ Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`\n \/\/\/ if there was an I\/O error.\n fn bg(&mut self, color: color::Color) -> IoResult;\n\n \/\/\/ Sets the given terminal attribute, if supported. Returns `Ok(true)`\n \/\/\/ if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if\n \/\/\/ there was an I\/O error.\n fn attr(&mut self, attr: attr::Attr) -> IoResult;\n\n \/\/\/ Returns whether the given terminal attribute is supported.\n fn supports_attr(&self, attr: attr::Attr) -> bool;\n\n \/\/\/ Resets all terminal attributes and color to the default.\n \/\/\/ Returns `Ok()`.\n fn reset(&mut self) -> IoResult<()>;\n\n \/\/\/ Returns the contained stream, destroying the `Terminal`\n fn unwrap(self) -> T;\n\n \/\/\/ Gets an immutable reference to the stream inside\n fn get_ref<'a>(&'a self) -> &'a T;\n\n \/\/\/ Gets a mutable reference to the stream inside\n fn get_mut<'a>(&'a mut self) -> &'a mut T;\n}\nFix example\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Terminal formatting library.\n\/\/!\n\/\/! This crate provides the `Terminal` trait, which abstracts over an [ANSI\n\/\/! Termina][ansi] to provide color printing, among other things. There are two implementations,\n\/\/! the `TerminfoTerminal`, which uses control characters from a\n\/\/! [terminfo][ti] database, and `WinConsole`, which uses the [Win32 Console\n\/\/! API][win].\n\/\/!\n\/\/! ## Example\n\/\/!\n\/\/! ```rust\n\/\/! extern crate term;\n\/\/!\n\/\/! fn main() {\n\/\/! let mut t = term::stdout().unwrap();\n\/\/! t.fg(term::color::GREEN).unwrap();\n\/\/! println!(\"hello, \");\n\/\/! t.fg(term::color::RED).unwrap();\n\/\/! println!(\"world!\");\n\/\/! t.reset().unwrap();\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! [ansi]: https:\/\/en.wikipedia.org\/wiki\/ANSI_escape_code\n\/\/! [win]: http:\/\/msdn.microsoft.com\/en-us\/library\/windows\/desktop\/ms682010%28v=vs.85%29.aspx\n\/\/! [ti]: https:\/\/en.wikipedia.org\/wiki\/Terminfo\n\n#![crate_id = \"term#0.11.0-pre\"]\n#![comment = \"Simple ANSI color library\"]\n#![license = \"MIT\/ASL2\"]\n#![crate_type = \"rlib\"]\n#![crate_type = \"dylib\"]\n#![doc(html_logo_url = \"http:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"http:\/\/www.rust-lang.org\/favicon.ico\",\n html_root_url = \"http:\/\/static.rust-lang.org\/doc\/master\")]\n\n#![feature(macro_rules, phase)]\n\n#![deny(missing_doc)]\n\n#[phase(syntax, link)] extern crate log;\nextern crate collections;\n\npub use terminfo::TerminfoTerminal;\n#[cfg(windows)]\npub use win::WinConsole;\n\nuse std::io::IoResult;\n\npub mod terminfo;\n\n#[cfg(windows)]\nmod win;\n\n#[cfg(not(windows))]\n\/\/\/ Return a Terminal wrapping stdout, or None if a terminal couldn't be\n\/\/\/ opened.\npub fn stdout() -> Option>:Send>> {\n let ti: Option>>\n = Terminal::new(box std::io::stdout() as Box);\n ti.map(|t| box t as Box:Send>:Send>)\n}\n\n#[cfg(windows)]\n\/\/\/ Return a Terminal wrapping stdout, or None if a terminal couldn't be\n\/\/\/ opened.\npub fn stdout() -> Option:Send>:Send>> {\n let ti: Option>>\n = Terminal::new(box std::io::stdout() as Box);\n\n match ti {\n Some(t) => Some(box t as Box:Send>:Send>),\n None => {\n let wc: Option>>\n = Terminal::new(box std::io::stdout() as Box);\n wc.map(|w| box w as Box:Send>:Send>)\n }\n }\n}\n\n#[cfg(not(windows))]\n\/\/\/ Return a Terminal wrapping stderr, or None if a terminal couldn't be\n\/\/\/ opened.\npub fn stderr() -> Option:Send>:Send>:Send> {\n let ti: Option>>\n = Terminal::new(box std::io::stderr() as Box);\n ti.map(|t| box t as Box:Send>:Send>)\n}\n\n#[cfg(windows)]\n\/\/\/ Return a Terminal wrapping stderr, or None if a terminal couldn't be\n\/\/\/ opened.\npub fn stderr() -> Option:Send>:Send>> {\n let ti: Option>>\n = Terminal::new(box std::io::stderr() as Box);\n\n match ti {\n Some(t) => Some(box t as Box:Send>:Send>),\n None => {\n let wc: Option>>\n = Terminal::new(box std::io::stderr() as Box);\n wc.map(|w| box w as Box:Send>:Send>)\n }\n }\n}\n\n\n\/\/\/ Terminal color definitions\npub mod color {\n \/\/\/ Number for a terminal color\n pub type Color = u16;\n\n pub static BLACK: Color = 0u16;\n pub static RED: Color = 1u16;\n pub static GREEN: Color = 2u16;\n pub static YELLOW: Color = 3u16;\n pub static BLUE: Color = 4u16;\n pub static MAGENTA: Color = 5u16;\n pub static CYAN: Color = 6u16;\n pub static WHITE: Color = 7u16;\n\n pub static BRIGHT_BLACK: Color = 8u16;\n pub static BRIGHT_RED: Color = 9u16;\n pub static BRIGHT_GREEN: Color = 10u16;\n pub static BRIGHT_YELLOW: Color = 11u16;\n pub static BRIGHT_BLUE: Color = 12u16;\n pub static BRIGHT_MAGENTA: Color = 13u16;\n pub static BRIGHT_CYAN: Color = 14u16;\n pub static BRIGHT_WHITE: Color = 15u16;\n}\n\n\/\/\/ Terminal attributes\npub mod attr {\n \/\/\/ Terminal attributes for use with term.attr().\n \/\/\/\n \/\/\/ Most attributes can only be turned on and must be turned off with term.reset().\n \/\/\/ The ones that can be turned off explicitly take a boolean value.\n \/\/\/ Color is also represented as an attribute for convenience.\n pub enum Attr {\n \/\/\/ Bold (or possibly bright) mode\n Bold,\n \/\/\/ Dim mode, also called faint or half-bright. Often not supported\n Dim,\n \/\/\/ Italics mode. Often not supported\n Italic(bool),\n \/\/\/ Underline mode\n Underline(bool),\n \/\/\/ Blink mode\n Blink,\n \/\/\/ Standout mode. Often implemented as Reverse, sometimes coupled with Bold\n Standout(bool),\n \/\/\/ Reverse mode, inverts the foreground and background colors\n Reverse,\n \/\/\/ Secure mode, also called invis mode. Hides the printed text\n Secure,\n \/\/\/ Convenience attribute to set the foreground color\n ForegroundColor(super::color::Color),\n \/\/\/ Convenience attribute to set the background color\n BackgroundColor(super::color::Color)\n }\n}\n\n\/\/\/ A terminal with similar capabilities to an ANSI Terminal\n\/\/\/ (foreground\/background colors etc).\npub trait Terminal: Writer {\n \/\/\/ Returns `None` whenever the terminal cannot be created for some\n \/\/\/ reason.\n fn new(out: T) -> Option;\n\n \/\/\/ Sets the foreground color to the given color.\n \/\/\/\n \/\/\/ If the color is a bright color, but the terminal only supports 8 colors,\n \/\/\/ the corresponding normal color will be used instead.\n \/\/\/\n \/\/\/ Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`\n \/\/\/ if there was an I\/O error.\n fn fg(&mut self, color: color::Color) -> IoResult;\n\n \/\/\/ Sets the background color to the given color.\n \/\/\/\n \/\/\/ If the color is a bright color, but the terminal only supports 8 colors,\n \/\/\/ the corresponding normal color will be used instead.\n \/\/\/\n \/\/\/ Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)`\n \/\/\/ if there was an I\/O error.\n fn bg(&mut self, color: color::Color) -> IoResult;\n\n \/\/\/ Sets the given terminal attribute, if supported. Returns `Ok(true)`\n \/\/\/ if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if\n \/\/\/ there was an I\/O error.\n fn attr(&mut self, attr: attr::Attr) -> IoResult;\n\n \/\/\/ Returns whether the given terminal attribute is supported.\n fn supports_attr(&self, attr: attr::Attr) -> bool;\n\n \/\/\/ Resets all terminal attributes and color to the default.\n \/\/\/ Returns `Ok()`.\n fn reset(&mut self) -> IoResult<()>;\n\n \/\/\/ Returns the contained stream, destroying the `Terminal`\n fn unwrap(self) -> T;\n\n \/\/\/ Gets an immutable reference to the stream inside\n fn get_ref<'a>(&'a self) -> &'a T;\n\n \/\/\/ Gets a mutable reference to the stream inside\n fn get_mut<'a>(&'a mut self) -> &'a mut T;\n}\n<|endoftext|>"} {"text":"Communicator crate updates<|endoftext|>"} {"text":"\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Detecting language items.\n\/\/\n\/\/ Language items are items that represent concepts intrinsic to the language\n\/\/ itself. Examples are:\n\/\/\n\/\/ * Traits that specify \"kinds\"; e.g. \"Share\", \"Send\".\n\/\/\n\/\/ * Traits that represent operators; e.g. \"Add\", \"Sub\", \"Index\".\n\/\/\n\/\/ * Functions called by the compiler itself.\n\n\nuse driver::session::Session;\nuse metadata::csearch::each_lang_item;\nuse middle::ty;\nuse syntax::ast;\nuse syntax::ast_util::local_def;\nuse syntax::attr::AttrMetaMethods;\nuse syntax::parse::token::InternedString;\nuse syntax::visit::Visitor;\nuse syntax::visit;\n\nuse collections::HashMap;\nuse std::iter::Enumerate;\nuse std::slice;\n\n\/\/ The actual lang items defined come at the end of this file in one handy table.\n\/\/ So you probably just want to nip down to the end.\nmacro_rules! lets_do_this {\n (\n $( $variant:ident, $name:expr, $method:ident; )*\n ) => {\n\n#[deriving(FromPrimitive)]\npub enum LangItem {\n $($variant),*\n}\n\npub struct LanguageItems {\n pub items: Vec> ,\n}\n\nimpl LanguageItems {\n pub fn new() -> LanguageItems {\n fn foo(_: LangItem) -> Option { None }\n\n LanguageItems {\n items: vec!($(foo($variant)),*)\n }\n }\n\n pub fn items<'a>(&'a self) -> Enumerate>> {\n self.items.iter().enumerate()\n }\n\n pub fn item_name(index: uint) -> &'static str {\n let item: Option = FromPrimitive::from_uint(index);\n match item {\n $( Some($variant) => $name, )*\n None => \"???\"\n }\n }\n\n pub fn require(&self, it: LangItem) -> Result {\n match self.items.get(it as uint) {\n &Some(id) => Ok(id),\n &None => {\n Err(format!(\"requires `{}` lang_item\",\n LanguageItems::item_name(it as uint)))\n }\n }\n }\n\n pub fn to_builtin_kind(&self, id: ast::DefId) -> Option {\n if Some(id) == self.send_trait() {\n Some(ty::BoundSend)\n } else if Some(id) == self.sized_trait() {\n Some(ty::BoundSized)\n } else if Some(id) == self.copy_trait() {\n Some(ty::BoundCopy)\n } else if Some(id) == self.share_trait() {\n Some(ty::BoundShare)\n } else {\n None\n }\n }\n\n $(\n pub fn $method(&self) -> Option {\n *self.items.get($variant as uint)\n }\n )*\n}\n\nstruct LanguageItemCollector<'a> {\n items: LanguageItems,\n\n session: &'a Session,\n\n item_refs: HashMap<&'static str, uint>,\n}\n\nimpl<'a> Visitor<()> for LanguageItemCollector<'a> {\n fn visit_item(&mut self, item: &ast::Item, _: ()) {\n match extract(item.attrs.as_slice()) {\n Some(value) => {\n let item_index = self.item_refs.find_equiv(&value).map(|x| *x);\n\n match item_index {\n Some(item_index) => {\n self.collect_item(item_index, local_def(item.id))\n }\n None => {}\n }\n }\n None => {}\n }\n\n visit::walk_item(self, item, ());\n }\n}\n\nimpl<'a> LanguageItemCollector<'a> {\n pub fn new(session: &'a Session) -> LanguageItemCollector<'a> {\n let mut item_refs = HashMap::new();\n\n $( item_refs.insert($name, $variant as uint); )*\n\n LanguageItemCollector {\n session: session,\n items: LanguageItems::new(),\n item_refs: item_refs\n }\n }\n\n pub fn collect_item(&mut self, item_index: uint, item_def_id: ast::DefId) {\n \/\/ Check for duplicates.\n match self.items.items.get(item_index) {\n &Some(original_def_id) if original_def_id != item_def_id => {\n self.session.err(format!(\"duplicate entry for `{}`\",\n LanguageItems::item_name(item_index)));\n }\n &Some(_) | &None => {\n \/\/ OK.\n }\n }\n\n \/\/ Matched.\n *self.items.items.get_mut(item_index) = Some(item_def_id);\n }\n\n pub fn collect_local_language_items(&mut self, krate: &ast::Crate) {\n visit::walk_crate(self, krate, ());\n }\n\n pub fn collect_external_language_items(&mut self) {\n let crate_store = &self.session.cstore;\n crate_store.iter_crate_data(|crate_number, _crate_metadata| {\n each_lang_item(crate_store, crate_number, |node_id, item_index| {\n let def_id = ast::DefId { krate: crate_number, node: node_id };\n self.collect_item(item_index, def_id);\n true\n });\n })\n }\n\n pub fn collect(&mut self, krate: &ast::Crate) {\n self.collect_local_language_items(krate);\n self.collect_external_language_items();\n }\n}\n\npub fn extract(attrs: &[ast::Attribute]) -> Option {\n for attribute in attrs.iter() {\n match attribute.name_str_pair() {\n Some((ref key, ref value)) if key.equiv(&(\"lang\")) => {\n return Some((*value).clone());\n }\n Some(..) | None => {}\n }\n }\n\n return None;\n}\n\npub fn collect_language_items(krate: &ast::Crate,\n session: &Session) -> @LanguageItems {\n let mut collector = LanguageItemCollector::new(session);\n collector.collect(krate);\n let LanguageItemCollector { items, .. } = collector;\n session.abort_if_errors();\n @items\n}\n\n\/\/ End of the macro\n }\n}\n\nlets_do_this! {\n\/\/ Variant name, Name, Method name;\n SendTraitLangItem, \"send\", send_trait;\n SizedTraitLangItem, \"sized\", sized_trait;\n CopyTraitLangItem, \"copy\", copy_trait;\n ShareTraitLangItem, \"share\", share_trait;\n\n DropTraitLangItem, \"drop\", drop_trait;\n\n AddTraitLangItem, \"add\", add_trait;\n SubTraitLangItem, \"sub\", sub_trait;\n MulTraitLangItem, \"mul\", mul_trait;\n DivTraitLangItem, \"div\", div_trait;\n RemTraitLangItem, \"rem\", rem_trait;\n NegTraitLangItem, \"neg\", neg_trait;\n NotTraitLangItem, \"not\", not_trait;\n BitXorTraitLangItem, \"bitxor\", bitxor_trait;\n BitAndTraitLangItem, \"bitand\", bitand_trait;\n BitOrTraitLangItem, \"bitor\", bitor_trait;\n ShlTraitLangItem, \"shl\", shl_trait;\n ShrTraitLangItem, \"shr\", shr_trait;\n IndexTraitLangItem, \"index\", index_trait;\n\n UnsafeTypeLangItem, \"unsafe\", unsafe_type;\n\n DerefTraitLangItem, \"deref\", deref_trait;\n DerefMutTraitLangItem, \"deref_mut\", deref_mut_trait;\n\n EqTraitLangItem, \"eq\", eq_trait;\n OrdTraitLangItem, \"ord\", ord_trait;\n\n StrEqFnLangItem, \"str_eq\", str_eq_fn;\n UniqStrEqFnLangItem, \"uniq_str_eq\", uniq_str_eq_fn;\n FailFnLangItem, \"fail_\", fail_fn;\n FailBoundsCheckFnLangItem, \"fail_bounds_check\", fail_bounds_check_fn;\n ExchangeMallocFnLangItem, \"exchange_malloc\", exchange_malloc_fn;\n ClosureExchangeMallocFnLangItem, \"closure_exchange_malloc\", closure_exchange_malloc_fn;\n ExchangeFreeFnLangItem, \"exchange_free\", exchange_free_fn;\n MallocFnLangItem, \"malloc\", malloc_fn;\n FreeFnLangItem, \"free\", free_fn;\n StrDupUniqFnLangItem, \"strdup_uniq\", strdup_uniq_fn;\n\n StartFnLangItem, \"start\", start_fn;\n\n TyDescStructLangItem, \"ty_desc\", ty_desc;\n TyVisitorTraitLangItem, \"ty_visitor\", ty_visitor;\n OpaqueStructLangItem, \"opaque\", opaque;\n\n TypeIdLangItem, \"type_id\", type_id;\n\n EhPersonalityLangItem, \"eh_personality\", eh_personality_fn;\n\n ManagedHeapLangItem, \"managed_heap\", managed_heap;\n ExchangeHeapLangItem, \"exchange_heap\", exchange_heap;\n GcLangItem, \"gc\", gc;\n\n CovariantTypeItem, \"covariant_type\", covariant_type;\n ContravariantTypeItem, \"contravariant_type\", contravariant_type;\n InvariantTypeItem, \"invariant_type\", invariant_type;\n\n CovariantLifetimeItem, \"covariant_lifetime\", covariant_lifetime;\n ContravariantLifetimeItem, \"contravariant_lifetime\", contravariant_lifetime;\n InvariantLifetimeItem, \"invariant_lifetime\", invariant_lifetime;\n\n NoSendItem, \"no_send_bound\", no_send_bound;\n NoCopyItem, \"no_copy_bound\", no_copy_bound;\n NoShareItem, \"no_share_bound\", no_share_bound;\n ManagedItem, \"managed_bound\", managed_bound;\n}\nmiddle: lang_items: allow dead code\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Detecting language items.\n\/\/\n\/\/ Language items are items that represent concepts intrinsic to the language\n\/\/ itself. Examples are:\n\/\/\n\/\/ * Traits that specify \"kinds\"; e.g. \"Share\", \"Send\".\n\/\/\n\/\/ * Traits that represent operators; e.g. \"Add\", \"Sub\", \"Index\".\n\/\/\n\/\/ * Functions called by the compiler itself.\n\n\nuse driver::session::Session;\nuse metadata::csearch::each_lang_item;\nuse middle::ty;\nuse syntax::ast;\nuse syntax::ast_util::local_def;\nuse syntax::attr::AttrMetaMethods;\nuse syntax::parse::token::InternedString;\nuse syntax::visit::Visitor;\nuse syntax::visit;\n\nuse collections::HashMap;\nuse std::iter::Enumerate;\nuse std::slice;\n\n\/\/ The actual lang items defined come at the end of this file in one handy table.\n\/\/ So you probably just want to nip down to the end.\nmacro_rules! lets_do_this {\n (\n $( $variant:ident, $name:expr, $method:ident; )*\n ) => {\n\n#[deriving(FromPrimitive)]\npub enum LangItem {\n $($variant),*\n}\n\npub struct LanguageItems {\n pub items: Vec> ,\n}\n\nimpl LanguageItems {\n pub fn new() -> LanguageItems {\n fn foo(_: LangItem) -> Option { None }\n\n LanguageItems {\n items: vec!($(foo($variant)),*)\n }\n }\n\n pub fn items<'a>(&'a self) -> Enumerate>> {\n self.items.iter().enumerate()\n }\n\n pub fn item_name(index: uint) -> &'static str {\n let item: Option = FromPrimitive::from_uint(index);\n match item {\n $( Some($variant) => $name, )*\n None => \"???\"\n }\n }\n\n pub fn require(&self, it: LangItem) -> Result {\n match self.items.get(it as uint) {\n &Some(id) => Ok(id),\n &None => {\n Err(format!(\"requires `{}` lang_item\",\n LanguageItems::item_name(it as uint)))\n }\n }\n }\n\n pub fn to_builtin_kind(&self, id: ast::DefId) -> Option {\n if Some(id) == self.send_trait() {\n Some(ty::BoundSend)\n } else if Some(id) == self.sized_trait() {\n Some(ty::BoundSized)\n } else if Some(id) == self.copy_trait() {\n Some(ty::BoundCopy)\n } else if Some(id) == self.share_trait() {\n Some(ty::BoundShare)\n } else {\n None\n }\n }\n\n $(\n #[allow(dead_code)]\n pub fn $method(&self) -> Option {\n *self.items.get($variant as uint)\n }\n )*\n}\n\nstruct LanguageItemCollector<'a> {\n items: LanguageItems,\n\n session: &'a Session,\n\n item_refs: HashMap<&'static str, uint>,\n}\n\nimpl<'a> Visitor<()> for LanguageItemCollector<'a> {\n fn visit_item(&mut self, item: &ast::Item, _: ()) {\n match extract(item.attrs.as_slice()) {\n Some(value) => {\n let item_index = self.item_refs.find_equiv(&value).map(|x| *x);\n\n match item_index {\n Some(item_index) => {\n self.collect_item(item_index, local_def(item.id))\n }\n None => {}\n }\n }\n None => {}\n }\n\n visit::walk_item(self, item, ());\n }\n}\n\nimpl<'a> LanguageItemCollector<'a> {\n pub fn new(session: &'a Session) -> LanguageItemCollector<'a> {\n let mut item_refs = HashMap::new();\n\n $( item_refs.insert($name, $variant as uint); )*\n\n LanguageItemCollector {\n session: session,\n items: LanguageItems::new(),\n item_refs: item_refs\n }\n }\n\n pub fn collect_item(&mut self, item_index: uint, item_def_id: ast::DefId) {\n \/\/ Check for duplicates.\n match self.items.items.get(item_index) {\n &Some(original_def_id) if original_def_id != item_def_id => {\n self.session.err(format!(\"duplicate entry for `{}`\",\n LanguageItems::item_name(item_index)));\n }\n &Some(_) | &None => {\n \/\/ OK.\n }\n }\n\n \/\/ Matched.\n *self.items.items.get_mut(item_index) = Some(item_def_id);\n }\n\n pub fn collect_local_language_items(&mut self, krate: &ast::Crate) {\n visit::walk_crate(self, krate, ());\n }\n\n pub fn collect_external_language_items(&mut self) {\n let crate_store = &self.session.cstore;\n crate_store.iter_crate_data(|crate_number, _crate_metadata| {\n each_lang_item(crate_store, crate_number, |node_id, item_index| {\n let def_id = ast::DefId { krate: crate_number, node: node_id };\n self.collect_item(item_index, def_id);\n true\n });\n })\n }\n\n pub fn collect(&mut self, krate: &ast::Crate) {\n self.collect_local_language_items(krate);\n self.collect_external_language_items();\n }\n}\n\npub fn extract(attrs: &[ast::Attribute]) -> Option {\n for attribute in attrs.iter() {\n match attribute.name_str_pair() {\n Some((ref key, ref value)) if key.equiv(&(\"lang\")) => {\n return Some((*value).clone());\n }\n Some(..) | None => {}\n }\n }\n\n return None;\n}\n\npub fn collect_language_items(krate: &ast::Crate,\n session: &Session) -> @LanguageItems {\n let mut collector = LanguageItemCollector::new(session);\n collector.collect(krate);\n let LanguageItemCollector { items, .. } = collector;\n session.abort_if_errors();\n @items\n}\n\n\/\/ End of the macro\n }\n}\n\nlets_do_this! {\n\/\/ Variant name, Name, Method name;\n SendTraitLangItem, \"send\", send_trait;\n SizedTraitLangItem, \"sized\", sized_trait;\n CopyTraitLangItem, \"copy\", copy_trait;\n ShareTraitLangItem, \"share\", share_trait;\n\n DropTraitLangItem, \"drop\", drop_trait;\n\n AddTraitLangItem, \"add\", add_trait;\n SubTraitLangItem, \"sub\", sub_trait;\n MulTraitLangItem, \"mul\", mul_trait;\n DivTraitLangItem, \"div\", div_trait;\n RemTraitLangItem, \"rem\", rem_trait;\n NegTraitLangItem, \"neg\", neg_trait;\n NotTraitLangItem, \"not\", not_trait;\n BitXorTraitLangItem, \"bitxor\", bitxor_trait;\n BitAndTraitLangItem, \"bitand\", bitand_trait;\n BitOrTraitLangItem, \"bitor\", bitor_trait;\n ShlTraitLangItem, \"shl\", shl_trait;\n ShrTraitLangItem, \"shr\", shr_trait;\n IndexTraitLangItem, \"index\", index_trait;\n\n UnsafeTypeLangItem, \"unsafe\", unsafe_type;\n\n DerefTraitLangItem, \"deref\", deref_trait;\n DerefMutTraitLangItem, \"deref_mut\", deref_mut_trait;\n\n EqTraitLangItem, \"eq\", eq_trait;\n OrdTraitLangItem, \"ord\", ord_trait;\n\n StrEqFnLangItem, \"str_eq\", str_eq_fn;\n UniqStrEqFnLangItem, \"uniq_str_eq\", uniq_str_eq_fn;\n FailFnLangItem, \"fail_\", fail_fn;\n FailBoundsCheckFnLangItem, \"fail_bounds_check\", fail_bounds_check_fn;\n ExchangeMallocFnLangItem, \"exchange_malloc\", exchange_malloc_fn;\n ClosureExchangeMallocFnLangItem, \"closure_exchange_malloc\", closure_exchange_malloc_fn;\n ExchangeFreeFnLangItem, \"exchange_free\", exchange_free_fn;\n MallocFnLangItem, \"malloc\", malloc_fn;\n FreeFnLangItem, \"free\", free_fn;\n StrDupUniqFnLangItem, \"strdup_uniq\", strdup_uniq_fn;\n\n StartFnLangItem, \"start\", start_fn;\n\n TyDescStructLangItem, \"ty_desc\", ty_desc;\n TyVisitorTraitLangItem, \"ty_visitor\", ty_visitor;\n OpaqueStructLangItem, \"opaque\", opaque;\n\n TypeIdLangItem, \"type_id\", type_id;\n\n EhPersonalityLangItem, \"eh_personality\", eh_personality_fn;\n\n ManagedHeapLangItem, \"managed_heap\", managed_heap;\n ExchangeHeapLangItem, \"exchange_heap\", exchange_heap;\n GcLangItem, \"gc\", gc;\n\n CovariantTypeItem, \"covariant_type\", covariant_type;\n ContravariantTypeItem, \"contravariant_type\", contravariant_type;\n InvariantTypeItem, \"invariant_type\", invariant_type;\n\n CovariantLifetimeItem, \"covariant_lifetime\", covariant_lifetime;\n ContravariantLifetimeItem, \"contravariant_lifetime\", contravariant_lifetime;\n InvariantLifetimeItem, \"invariant_lifetime\", invariant_lifetime;\n\n NoSendItem, \"no_send_bound\", no_send_bound;\n NoCopyItem, \"no_copy_bound\", no_copy_bound;\n NoShareItem, \"no_share_bound\", no_share_bound;\n ManagedItem, \"managed_bound\", managed_bound;\n}\n<|endoftext|>"} {"text":"add main4fn main() {\n let value = std::env::args().nth(1);\n let convert_and_double = compose(convert, double);\n\n match convert_and_double(value) {\n Some(n) => println!(\"{}\", n),\n None => println!(\"No value\"),\n }\n\n}\n\nfn convert(n: Option) -> Option {\n n.and_then(|n| n.parse().ok())\n}\n\nfn double(n: Option) -> Option {\n n.map(|n| n * 2.0)\n}\n\nfn compose<'f, T1, T2, T3, F1, F2>(a: F1, b: F2) -> Box T3 + 'f>\n where F1:Fn(T1) -> T2 + 'f,\n F2:Fn(T2) -> T3 + 'f\n{\n Box::new(move |input| b(a(input)))\n}<|endoftext|>"} {"text":"Document {Anime,Manga}Attributes::airing_status<|endoftext|>"} {"text":"s\/should_fail\/should_panic<|endoftext|>"} {"text":"video.putTag method<|endoftext|>"} {"text":"use metadata::{Metadata, Package, PackageId, Resolve, ResolveNode, Target};\nuse racer_interner::InternedString;\nuse std::collections::HashMap;\nuse std::path::{Path, PathBuf};\n\n\/\/\/ Cached dependencies for racer\n#[derive(Clone, Debug)]\npub struct PackageMap {\n manifest_to_id: HashMap,\n id_to_edition: HashMap,\n id_to_deps: HashMap>,\n id_to_lib: HashMap,\n}\n\nimpl PackageMap {\n pub fn from_metadata(meta: Metadata) -> Self {\n let Metadata {\n packages, resolve, ..\n } = meta;\n PackageMap::new(packages, resolve)\n }\n pub fn new(packages: Vec, resolve: Option) -> Self {\n let mut manifest_to_id = HashMap::new();\n let mut id_to_lib = HashMap::new();\n let mut id_to_edition = HashMap::new();\n for package in packages {\n let Package {\n id,\n targets,\n manifest_path,\n edition,\n ..\n } = package;\n manifest_to_id.insert(manifest_path, id);\n id_to_edition.insert(id, edition);\n if let Some(t) = targets.into_iter().find(|t| t.is_lib()) {\n id_to_lib.insert(package.id, t.to_owned());\n }\n }\n let id_to_deps = resolve.map_or_else(\n || HashMap::new(),\n |res| construct_deps(res.nodes, &id_to_lib),\n );\n PackageMap {\n manifest_to_id,\n id_to_edition,\n id_to_deps,\n id_to_lib,\n }\n }\n pub fn get_id(&self, path: &Path) -> Option {\n self.manifest_to_id.get(path).map(|&id| id)\n }\n pub fn get_edition(&self, id: PackageId) -> Option<&str> {\n self.id_to_edition.get(&id).map(|s| s.as_str())\n }\n pub fn get_lib(&self, id: PackageId) -> Option<&Target> {\n self.id_to_lib.get(&id)\n }\n pub fn get_lib_src_path(&self, id: PackageId) -> Option<&Path> {\n self.get_lib(id).map(|t| t.src_path.as_ref())\n }\n pub fn ids(&self) -> impl Iterator {\n self.id_to_edition.keys()\n }\n pub fn get_dependencies(&self, id: PackageId) -> Option<&HashMap> {\n self.id_to_deps.get(&id)\n }\n pub fn get_src_path_from_libname(&self, id: PackageId, s: &str) -> Option<&Path> {\n let deps = self.get_dependencies(id)?;\n let query_str = InternedString::new_if_exists(s)?;\n deps.get(&query_str).map(AsRef::as_ref)\n }\n}\n\nfn construct_deps(\n nodes: Vec,\n targets: &HashMap,\n) -> HashMap> {\n nodes\n .into_iter()\n .map(|node| {\n let deps: HashMap<_, _> = node\n .dependencies\n .into_iter()\n .filter_map(|id| targets.get(&id).map(|t| (t.name, t.src_path.clone())))\n .collect();\n (node.id, deps)\n }).collect()\n}\nUse vec for dependencyuse metadata::{Metadata, Package, PackageId, Resolve, ResolveNode, Target};\nuse racer_interner::InternedString;\nuse std::collections::HashMap;\nuse std::path::{Path, PathBuf};\n\n\/\/\/ Cached dependencies for racer\n#[derive(Clone, Debug)]\npub struct PackageMap {\n manifest_to_id: HashMap,\n id_to_edition: HashMap,\n id_to_deps: HashMap>,\n id_to_lib: HashMap,\n}\n\nimpl PackageMap {\n pub fn from_metadata(meta: Metadata) -> Self {\n let Metadata {\n packages, resolve, ..\n } = meta;\n PackageMap::new(packages, resolve)\n }\n pub fn new(packages: Vec, resolve: Option) -> Self {\n let mut manifest_to_id = HashMap::new();\n let mut id_to_lib = HashMap::new();\n let mut id_to_edition = HashMap::new();\n for package in packages {\n let Package {\n id,\n targets,\n manifest_path,\n edition,\n ..\n } = package;\n manifest_to_id.insert(manifest_path, id);\n id_to_edition.insert(id, edition);\n if let Some(t) = targets.into_iter().find(|t| t.is_lib()) {\n id_to_lib.insert(package.id, t.to_owned());\n }\n }\n let id_to_deps = resolve.map_or_else(\n || HashMap::new(),\n |res| construct_deps(res.nodes, &id_to_lib),\n );\n PackageMap {\n manifest_to_id,\n id_to_edition,\n id_to_deps,\n id_to_lib,\n }\n }\n pub fn get_id(&self, path: &Path) -> Option {\n self.manifest_to_id.get(path).map(|&id| id)\n }\n pub fn get_edition(&self, id: PackageId) -> Option<&str> {\n self.id_to_edition.get(&id).map(|s| s.as_str())\n }\n pub fn get_lib(&self, id: PackageId) -> Option<&Target> {\n self.id_to_lib.get(&id)\n }\n pub fn get_lib_src_path(&self, id: PackageId) -> Option<&Path> {\n self.get_lib(id).map(|t| t.src_path.as_ref())\n }\n pub fn ids(&self) -> impl Iterator {\n self.id_to_edition.keys()\n }\n pub fn get_dependencies(&self, id: PackageId) -> Option<&Vec<(InternedString, PathBuf)>> {\n self.id_to_deps.get(&id)\n }\n pub fn get_src_path_from_libname(&self, id: PackageId, s: &str) -> Option<&Path> {\n let deps = self.get_dependencies(id)?;\n let query_str = InternedString::new_if_exists(s)?;\n deps.iter().find(|t| t.0 == query_str).map(|t| t.1.as_ref())\n }\n}\n\nfn construct_deps(\n nodes: Vec,\n targets: &HashMap,\n) -> HashMap> {\n nodes\n .into_iter()\n .map(|node| {\n let deps: Vec<_> = node\n .dependencies\n .into_iter()\n .filter_map(|id| targets.get(&id).map(|t| (t.name, t.src_path.clone())))\n .collect();\n (node.id, deps)\n }).collect()\n}\n<|endoftext|>"} {"text":"load game screen<|endoftext|>"} {"text":"Move update_db stuff into an sql transaction.<|endoftext|>"} {"text":"\/\/ xfail-pretty\n\n\n\/\/ -*- rust -*-\nfn ho(f: fn(int) -> int ) -> int { let n: int = f(3); ret n; }\n\nfn direct(x: int) -> int { ret x + 1; }\n\nfn main() {\n let a: int =\n direct(3); \/\/ direct\n \/\/let int b = ho(direct); \/\/ indirect unbound\n\n\n let c: int =\n ho(bind direct(_)); \/\/ indirect bound\n \/\/assert (a == b);\n \/\/assert (b == c);\n\n\n}Clean up run-pass\/fun-call-variants\/\/ -*- rust -*-\nfn ho(f: fn(int) -> int ) -> int { let n: int = f(3); ret n; }\n\nfn direct(x: int) -> int { ret x + 1; }\n\nfn main() {\n let a: int =\n direct(3); \/\/ direct\n let b: int = ho(direct); \/\/ indirect unbound\n\n let c: int =\n ho(bind direct(_)); \/\/ indirect bound\n assert (a == b);\n assert (b == c);\n}<|endoftext|>"} {"text":"Oops - forgot to git add cargo.rsuse std::fs::File;\nuse std::io::Read;\nuse std::env;\nuse std::path::{Path,PathBuf};\nuse std::fs::{PathExt,read_dir};\nuse toml;\n\nmacro_rules! otry {\n ($e:expr) => (match $e { Some(e) => e, None => return None})\n}\n\nmacro_rules! otry2 {\n ($e:expr) => (match $e { Ok(e) => e, Err(_) => return None})\n}\n\n\nfn find_src_via_lockfile(kratename: &str, cargofile: &Path) -> Option {\n let mut file = otry2!(File::open(cargofile));\n let mut string = String::new();\n otry2!(file.read_to_string(&mut string));\n let mut parser = toml::Parser::new(&string);\n let lock_table = parser.parse().unwrap();\n\n debug!(\"PHIL found lock table {:?}\",lock_table);\n\n let t = match lock_table.get(\"package\") {\n Some(&toml::Value::Array(ref t1)) => t1,\n _ => return None\n };\n\n for item in t {\n if let &toml::Value::Table(ref t) = item {\n if Some(&toml::Value::String(kratename.to_string())) == t.get(\"name\") {\n\n let version = otry!(getstr(t, \"version\"));\n let source = otry!(getstr(t, \"source\"));\n\n if Some(\"registry\") == source.split(\"+\").nth(0) {\n\n let mut d = otry!(env::home_dir());\n d.push(\".cargo\"); \n d.push(\"registry\");\n d.push(\"src\");\n d = otry!(find_git_dir(d));\n d.push(kratename.to_string() + \"-\" + &version);\n d.push(\"src\");\n d.push(\"lib.rs\");\n return Some(d)\n }\n }\n }\n }\n return None;\n}\n\nfn find_git_dir(d: PathBuf) -> Option {\n for entry in otry2!(read_dir(d)) {\n let path = otry2!(entry).path();\n \n \/\/ Assume there is only one directory and it's the git one\n if path.is_dir() {\n if let Some(ref fname) = path.file_name().and_then(|s| s.to_str()) {\n println!(\"PHIL fname is {}\",fname);\n if fname.starts_with(\"git-\") { \n return Some(path.clone());\n }\n }\n }\n }\n return None;\n}\n\nfn getstr(t: &toml::Table, k: &str) -> Option {\n match t.get(k) {\n Some(&toml::Value::String(ref s)) => Some(s.clone()),\n _ => None\n }\n}\n\nfn find_cargo_lockfile(currentfile: &Path) -> Option {\n let mut f = currentfile.to_path_buf();\n f.push(\"Cargo.lock\");\n if f.exists() {\n return Some(f);\n } else {\n if f.pop() && f.pop() {\n return find_cargo_lockfile(&f);\n } else {\n None\n }\n }\n}\n\n\npub fn get_crate_file(name: &str, from_path: &Path) -> Option {\n if let Some(lockfile) = find_cargo_lockfile(from_path) {\n return find_src_via_lockfile(name, &lockfile);\n }\n None\n}\n<|endoftext|>"} {"text":" use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') | (Normal, ' ') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') | (Normal, '\\n') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 'Z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset >= 0 {\n clipboard.push(editor.cur());\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') | (Normal, 'H') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'Y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = editor.cur().to_string() + clipboard;\n editor.left();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.right();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') | (Normal, 'L') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\nFix crash use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') | (Normal, ' ') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') | (Normal, '\\n') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 'Z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset >= 0 {\n clipboard.push(editor.cur());\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') | (Normal, 'H') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'Y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = editor.cur().to_string() + clipboard;\n editor.left();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.right();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') | (Normal, 'L') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset >= 0 {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of the install aspects of the compiler.\n\/\/!\n\/\/! This module is responsible for installing the standard library,\n\/\/! compiler, and documentation.\n\nuse std::env;\nuse std::fs;\nuse std::path::{Path, PathBuf, Component};\nuse std::process::Command;\n\nuse dist::{self, pkgname, sanitize_sh, tmpdir};\n\nuse builder::{Builder, RunConfig, ShouldRun, Step};\nuse cache::Interned;\nuse config::Config;\n\npub fn install_docs(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"docs\", \"rust-docs\", stage, Some(host));\n}\n\npub fn install_std(builder: &Builder, stage: u32, target: Interned) {\n install_sh(builder, \"std\", \"rust-std\", stage, Some(target));\n}\n\npub fn install_cargo(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"cargo\", \"cargo\", stage, Some(host));\n}\n\npub fn install_rls(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"rls\", \"rls\", stage, Some(host));\n}\n\npub fn install_rustfmt(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"rustfmt\", \"rustfmt\", stage, Some(host));\n}\n\npub fn install_analysis(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"analysis\", \"rust-analysis\", stage, Some(host));\n}\n\npub fn install_src(builder: &Builder, stage: u32) {\n install_sh(builder, \"src\", \"rust-src\", stage, None);\n}\npub fn install_rustc(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"rustc\", \"rustc\", stage, Some(host));\n}\n\nfn install_sh(\n builder: &Builder,\n package: &str,\n name: &str,\n stage: u32,\n host: Option>\n) {\n let build = builder.build;\n build.info(&format!(\"Install {} stage{} ({:?})\", package, stage, host));\n\n let prefix_default = PathBuf::from(\"\/usr\/local\");\n let sysconfdir_default = PathBuf::from(\"\/etc\");\n let datadir_default = PathBuf::from(\"share\");\n let docdir_default = datadir_default.join(\"doc\/rust\");\n let bindir_default = PathBuf::from(\"bin\");\n let libdir_default = PathBuf::from(\"lib\");\n let mandir_default = datadir_default.join(\"man\");\n let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);\n let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);\n let datadir = build.config.datadir.as_ref().unwrap_or(&datadir_default);\n let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);\n let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);\n let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);\n let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);\n\n let sysconfdir = prefix.join(sysconfdir);\n let datadir = prefix.join(datadir);\n let docdir = prefix.join(docdir);\n let bindir = prefix.join(bindir);\n let libdir = prefix.join(libdir);\n let mandir = prefix.join(mandir);\n\n let destdir = env::var_os(\"DESTDIR\").map(PathBuf::from);\n\n let prefix = add_destdir(&prefix, &destdir);\n let sysconfdir = add_destdir(&sysconfdir, &destdir);\n let datadir = add_destdir(&datadir, &destdir);\n let docdir = add_destdir(&docdir, &destdir);\n let bindir = add_destdir(&bindir, &destdir);\n let libdir = add_destdir(&libdir, &destdir);\n let mandir = add_destdir(&mandir, &destdir);\n\n let empty_dir = build.out.join(\"tmp\/empty_dir\");\n\n t!(fs::create_dir_all(&empty_dir));\n let package_name = if let Some(host) = host {\n format!(\"{}-{}\", pkgname(build, name), host)\n } else {\n pkgname(build, name)\n };\n\n let mut cmd = Command::new(\"sh\");\n cmd.current_dir(&empty_dir)\n .arg(sanitize_sh(&tmpdir(build).join(&package_name).join(\"install.sh\")))\n .arg(format!(\"--prefix={}\", sanitize_sh(&prefix)))\n .arg(format!(\"--sysconfdir={}\", sanitize_sh(&sysconfdir)))\n .arg(format!(\"--datadir={}\", sanitize_sh(&datadir)))\n .arg(format!(\"--docdir={}\", sanitize_sh(&docdir)))\n .arg(format!(\"--bindir={}\", sanitize_sh(&bindir)))\n .arg(format!(\"--libdir={}\", sanitize_sh(&libdir)))\n .arg(format!(\"--mandir={}\", sanitize_sh(&mandir)))\n .arg(\"--disable-ldconfig\");\n build.run(&mut cmd);\n t!(fs::remove_dir_all(&empty_dir));\n}\n\nfn add_destdir(path: &Path, destdir: &Option) -> PathBuf {\n let mut ret = match *destdir {\n Some(ref dest) => dest.clone(),\n None => return path.to_path_buf(),\n };\n for part in path.components() {\n match part {\n Component::Normal(s) => ret.push(s),\n _ => {}\n }\n }\n ret\n}\n\nmacro_rules! install {\n (($sel:ident, $builder:ident, $_config:ident),\n $($name:ident,\n $path:expr,\n $default_cond:expr,\n only_hosts: $only_hosts:expr,\n $run_item:block $(, $c:ident)*;)+) => {\n $(\n #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\n pub struct $name {\n pub stage: u32,\n pub target: Interned,\n pub host: Interned,\n }\n\n impl $name {\n #[allow(dead_code)]\n fn should_build(config: &Config) -> bool {\n config.extended && config.tools.as_ref()\n .map_or(true, |t| t.contains($path))\n }\n\n #[allow(dead_code)]\n fn should_install(builder: &Builder) -> bool {\n builder.config.tools.as_ref().map_or(false, |t| t.contains($path))\n }\n }\n\n impl Step for $name {\n type Output = ();\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = $only_hosts;\n $(const $c: bool = true;)*\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let $_config = &run.builder.config;\n run.path($path).default_condition($default_cond)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure($name {\n stage: run.builder.top_stage,\n target: run.target,\n host: run.builder.build.build,\n });\n }\n\n fn run($sel, $builder: &Builder) {\n $run_item\n }\n })+\n }\n}\n\ninstall!((self, builder, _config),\n Docs, \"src\/doc\", _config.docs, only_hosts: false, {\n builder.ensure(dist::Docs { stage: self.stage, host: self.target });\n install_docs(builder, self.stage, self.target);\n };\n Std, \"src\/libstd\", true, only_hosts: true, {\n for target in &builder.build.targets {\n builder.ensure(dist::Std {\n compiler: builder.compiler(self.stage, self.host),\n target: *target\n });\n install_std(builder, self.stage, *target);\n }\n };\n Cargo, \"cargo\", Self::should_build(_config), only_hosts: true, {\n builder.ensure(dist::Cargo { stage: self.stage, target: self.target });\n install_cargo(builder, self.stage, self.target);\n };\n Rls, \"rls\", Self::should_build(_config), only_hosts: true, {\n if builder.ensure(dist::Rls { stage: self.stage, target: self.target }).is_some() ||\n Self::should_install(builder) {\n install_rls(builder, self.stage, self.target);\n } else {\n builder.info(&format!(\"skipping Install RLS stage{} ({})\", self.stage, self.target));\n }\n };\n Rustfmt, \"rustfmt\", Self::should_build(_config), only_hosts: true, {\n if builder.ensure(dist::Rustfmt { stage: self.stage, target: self.target }).is_some() ||\n Self::should_install(builder) {\n install_rustfmt(builder, self.stage, self.target);\n } else {\n builder.info(\n &format!(\"skipping Install Rustfmt stage{} ({})\", self.stage, self.target));\n }\n };\n Analysis, \"analysis\", Self::should_build(_config), only_hosts: false, {\n builder.ensure(dist::Analysis {\n compiler: builder.compiler(self.stage, self.host),\n target: self.target\n });\n install_analysis(builder, self.stage, self.target);\n };\n Rustc, \"src\/librustc\", true, only_hosts: true, {\n builder.ensure(dist::Rustc {\n compiler: builder.compiler(self.stage, self.target),\n });\n install_rustc(builder, self.stage, self.target);\n };\n);\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct Src {\n pub stage: u32,\n}\n\nimpl Step for Src {\n type Output = ();\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = true;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let config = &run.builder.config;\n let cond = config.extended &&\n config.tools.as_ref().map_or(true, |t| t.contains(\"src\"));\n run.path(\"src\").default_condition(cond)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(Src {\n stage: run.builder.top_stage,\n });\n }\n\n fn run(self, builder: &Builder) {\n builder.ensure(dist::Src);\n install_src(builder, self.stage);\n }\n}\nAuto merge of #49778 - tamird:install-relative-prefix, r=Mark-Simulacrum\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of the install aspects of the compiler.\n\/\/!\n\/\/! This module is responsible for installing the standard library,\n\/\/! compiler, and documentation.\n\nuse std::env;\nuse std::fs;\nuse std::path::{Path, PathBuf, Component};\nuse std::process::Command;\n\nuse dist::{self, pkgname, sanitize_sh, tmpdir};\n\nuse builder::{Builder, RunConfig, ShouldRun, Step};\nuse cache::Interned;\nuse config::Config;\n\npub fn install_docs(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"docs\", \"rust-docs\", stage, Some(host));\n}\n\npub fn install_std(builder: &Builder, stage: u32, target: Interned) {\n install_sh(builder, \"std\", \"rust-std\", stage, Some(target));\n}\n\npub fn install_cargo(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"cargo\", \"cargo\", stage, Some(host));\n}\n\npub fn install_rls(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"rls\", \"rls\", stage, Some(host));\n}\n\npub fn install_rustfmt(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"rustfmt\", \"rustfmt\", stage, Some(host));\n}\n\npub fn install_analysis(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"analysis\", \"rust-analysis\", stage, Some(host));\n}\n\npub fn install_src(builder: &Builder, stage: u32) {\n install_sh(builder, \"src\", \"rust-src\", stage, None);\n}\npub fn install_rustc(builder: &Builder, stage: u32, host: Interned) {\n install_sh(builder, \"rustc\", \"rustc\", stage, Some(host));\n}\n\nfn install_sh(\n builder: &Builder,\n package: &str,\n name: &str,\n stage: u32,\n host: Option>\n) {\n let build = builder.build;\n build.info(&format!(\"Install {} stage{} ({:?})\", package, stage, host));\n\n let prefix_default = PathBuf::from(\"\/usr\/local\");\n let sysconfdir_default = PathBuf::from(\"\/etc\");\n let datadir_default = PathBuf::from(\"share\");\n let docdir_default = datadir_default.join(\"doc\/rust\");\n let bindir_default = PathBuf::from(\"bin\");\n let libdir_default = PathBuf::from(\"lib\");\n let mandir_default = datadir_default.join(\"man\");\n let prefix = build.config.prefix.as_ref().map_or(prefix_default, |p| {\n fs::canonicalize(p).expect(&format!(\"could not canonicalize {}\", p.display()))\n });\n let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);\n let datadir = build.config.datadir.as_ref().unwrap_or(&datadir_default);\n let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);\n let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);\n let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);\n let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);\n\n let sysconfdir = prefix.join(sysconfdir);\n let datadir = prefix.join(datadir);\n let docdir = prefix.join(docdir);\n let bindir = prefix.join(bindir);\n let libdir = prefix.join(libdir);\n let mandir = prefix.join(mandir);\n\n let destdir = env::var_os(\"DESTDIR\").map(PathBuf::from);\n\n let prefix = add_destdir(&prefix, &destdir);\n let sysconfdir = add_destdir(&sysconfdir, &destdir);\n let datadir = add_destdir(&datadir, &destdir);\n let docdir = add_destdir(&docdir, &destdir);\n let bindir = add_destdir(&bindir, &destdir);\n let libdir = add_destdir(&libdir, &destdir);\n let mandir = add_destdir(&mandir, &destdir);\n\n let empty_dir = build.out.join(\"tmp\/empty_dir\");\n\n t!(fs::create_dir_all(&empty_dir));\n let package_name = if let Some(host) = host {\n format!(\"{}-{}\", pkgname(build, name), host)\n } else {\n pkgname(build, name)\n };\n\n let mut cmd = Command::new(\"sh\");\n cmd.current_dir(&empty_dir)\n .arg(sanitize_sh(&tmpdir(build).join(&package_name).join(\"install.sh\")))\n .arg(format!(\"--prefix={}\", sanitize_sh(&prefix)))\n .arg(format!(\"--sysconfdir={}\", sanitize_sh(&sysconfdir)))\n .arg(format!(\"--datadir={}\", sanitize_sh(&datadir)))\n .arg(format!(\"--docdir={}\", sanitize_sh(&docdir)))\n .arg(format!(\"--bindir={}\", sanitize_sh(&bindir)))\n .arg(format!(\"--libdir={}\", sanitize_sh(&libdir)))\n .arg(format!(\"--mandir={}\", sanitize_sh(&mandir)))\n .arg(\"--disable-ldconfig\");\n build.run(&mut cmd);\n t!(fs::remove_dir_all(&empty_dir));\n}\n\nfn add_destdir(path: &Path, destdir: &Option) -> PathBuf {\n let mut ret = match *destdir {\n Some(ref dest) => dest.clone(),\n None => return path.to_path_buf(),\n };\n for part in path.components() {\n match part {\n Component::Normal(s) => ret.push(s),\n _ => {}\n }\n }\n ret\n}\n\nmacro_rules! install {\n (($sel:ident, $builder:ident, $_config:ident),\n $($name:ident,\n $path:expr,\n $default_cond:expr,\n only_hosts: $only_hosts:expr,\n $run_item:block $(, $c:ident)*;)+) => {\n $(\n #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\n pub struct $name {\n pub stage: u32,\n pub target: Interned,\n pub host: Interned,\n }\n\n impl $name {\n #[allow(dead_code)]\n fn should_build(config: &Config) -> bool {\n config.extended && config.tools.as_ref()\n .map_or(true, |t| t.contains($path))\n }\n\n #[allow(dead_code)]\n fn should_install(builder: &Builder) -> bool {\n builder.config.tools.as_ref().map_or(false, |t| t.contains($path))\n }\n }\n\n impl Step for $name {\n type Output = ();\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = $only_hosts;\n $(const $c: bool = true;)*\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let $_config = &run.builder.config;\n run.path($path).default_condition($default_cond)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure($name {\n stage: run.builder.top_stage,\n target: run.target,\n host: run.builder.build.build,\n });\n }\n\n fn run($sel, $builder: &Builder) {\n $run_item\n }\n })+\n }\n}\n\ninstall!((self, builder, _config),\n Docs, \"src\/doc\", _config.docs, only_hosts: false, {\n builder.ensure(dist::Docs { stage: self.stage, host: self.target });\n install_docs(builder, self.stage, self.target);\n };\n Std, \"src\/libstd\", true, only_hosts: true, {\n for target in &builder.build.targets {\n builder.ensure(dist::Std {\n compiler: builder.compiler(self.stage, self.host),\n target: *target\n });\n install_std(builder, self.stage, *target);\n }\n };\n Cargo, \"cargo\", Self::should_build(_config), only_hosts: true, {\n builder.ensure(dist::Cargo { stage: self.stage, target: self.target });\n install_cargo(builder, self.stage, self.target);\n };\n Rls, \"rls\", Self::should_build(_config), only_hosts: true, {\n if builder.ensure(dist::Rls { stage: self.stage, target: self.target }).is_some() ||\n Self::should_install(builder) {\n install_rls(builder, self.stage, self.target);\n } else {\n builder.info(&format!(\"skipping Install RLS stage{} ({})\", self.stage, self.target));\n }\n };\n Rustfmt, \"rustfmt\", Self::should_build(_config), only_hosts: true, {\n if builder.ensure(dist::Rustfmt { stage: self.stage, target: self.target }).is_some() ||\n Self::should_install(builder) {\n install_rustfmt(builder, self.stage, self.target);\n } else {\n builder.info(\n &format!(\"skipping Install Rustfmt stage{} ({})\", self.stage, self.target));\n }\n };\n Analysis, \"analysis\", Self::should_build(_config), only_hosts: false, {\n builder.ensure(dist::Analysis {\n compiler: builder.compiler(self.stage, self.host),\n target: self.target\n });\n install_analysis(builder, self.stage, self.target);\n };\n Rustc, \"src\/librustc\", true, only_hosts: true, {\n builder.ensure(dist::Rustc {\n compiler: builder.compiler(self.stage, self.target),\n });\n install_rustc(builder, self.stage, self.target);\n };\n);\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct Src {\n pub stage: u32,\n}\n\nimpl Step for Src {\n type Output = ();\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = true;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let config = &run.builder.config;\n let cond = config.extended &&\n config.tools.as_ref().map_or(true, |t| t.contains(\"src\"));\n run.path(\"src\").default_condition(cond)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(Src {\n stage: run.builder.top_stage,\n });\n }\n\n fn run(self, builder: &Builder) {\n builder.ensure(dist::Src);\n install_src(builder, self.stage);\n }\n}\n<|endoftext|>"} {"text":"Day7_1<|endoftext|>"} {"text":"\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The first version of the prelude of The Rust Standard Library.\n\/\/!\n\/\/! See the [module-level documentation](..\/index.html) for more.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n\/\/ Re-exported core operators\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use marker::{Copy, Send, Sized, Sync};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use ops::{Drop, Fn, FnMut, FnOnce};\n\n\/\/ Re-exported functions\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use mem::drop;\n\n\/\/ Re-exported types and traits\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use boxed::Box;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use borrow::ToOwned;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use clone::Clone;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use cmp::{PartialEq, PartialOrd, Eq, Ord};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use convert::{AsRef, AsMut, Into, From};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use default::Default;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use iter::{Iterator, Extend, IntoIterator};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use iter::{DoubleEndedIterator, ExactSizeIterator};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use option::Option::{self, Some, None};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use result::Result::{self, Ok, Err};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use slice::SliceConcatExt;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use string::{String, ToString};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)] pub use vec::Vec;\nReformat std prelude source to show it is the sum of core and alloc preludes\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The first version of the prelude of The Rust Standard Library.\n\/\/!\n\/\/! See the [module-level documentation](..\/index.html) for more.\n\n\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n\/\/ Re-exported core operators\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use marker::{Copy, Send, Sized, Sync};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use ops::{Drop, Fn, FnMut, FnOnce};\n\n\/\/ Re-exported functions\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use mem::drop;\n\n\/\/ Re-exported types and traits\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use clone::Clone;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use cmp::{PartialEq, PartialOrd, Eq, Ord};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use convert::{AsRef, AsMut, Into, From};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use default::Default;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use iter::{Iterator, Extend, IntoIterator};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use iter::{DoubleEndedIterator, ExactSizeIterator};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use option::Option::{self, Some, None};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use result::Result::{self, Ok, Err};\n\n\n\/\/ The file so far is equivalent to src\/libcore\/prelude\/v1.rs,\n\/\/ and below to src\/liballoc\/prelude.rs.\n\/\/ Those files are duplicated rather than using glob imports\n\/\/ because we want docs to show these re-exports as pointing to within `std`.\n\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use boxed::Box;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use borrow::ToOwned;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use slice::SliceConcatExt;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use string::{String, ToString};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[doc(no_inline)]\npub use vec::Vec;\n<|endoftext|>"} {"text":"Fix missing counting<|endoftext|>"} {"text":"Complete problem 7use math::primes;\n\npub fn demo(n: u64) {\n println!(\"{}\", primes::primes(n).last().unwrap());\n}\n<|endoftext|>"} {"text":"Allow builtins in aliases<|endoftext|>"} {"text":"Start operations testing.<|endoftext|>"} {"text":"Add missing audio.rs<|endoftext|>"} {"text":"forgot to add file#[allow(dead_code)]\npub struct Block {\n pub version : u32,\n pub prev_block : Vec,\n pub merkle_root : Vec,\n pub timestamp : u32,\n pub bits : u32,\n pub nonce : u32,\n pub txn_count : u64,\n pub txns : Vec\n}\n\n#[allow(dead_code)]\npub struct Transaction {\n pub version : u32,\n pub tx_in_count : u64,\n pub tx_in : Vec,\n pub tx_out_count : u64,\n pub tx_out : Vec,\n pub lock_time : u32\n}\n\n#[allow(dead_code)]\npub struct TxIn {\n pub previous_output : OutPoint,\n pub script_length : u64,\n pub signature_script : Vec,\n pub sequence : u32\n}\n\n#[allow(dead_code)]\npub struct TxOut {\n pub value : i64,\n pub pk_script_length : u64,\n pub pk_script : Vec\n}\n\n#[allow(dead_code)]\npub struct OutPoint {\n pub hash : Vec,\n pub index : u32\n}\n<|endoftext|>"} {"text":"Add build.rs#[cfg(target_os=\"linux\")]\nfn main(){\n println!(\"cargo:rustc-link-lib=dl\");\n}\n\n#[cfg(any(target_os=\"freebsd\",\n target_os=\"dragonfly\"))]\nfn main(){\n println!(\"cargo:rustc-link-lib=c\");\n}\n\n#[cfg(any(target_os=\"openbsd\",\n target_os=\"bitrig\",\n target_os=\"netbsd\",\n target_os=\"macos\"))]\nfn main(){\n \/\/ netbsd claims dl* will be available to any dynamically linked binary, but I haven’t found\n \/\/ any libraries that have to be linked to on other platforms.\n}\n\n#[cfg(target_os=\"windows\")]\nfn main(){}\n<|endoftext|>"} {"text":"Added missing build fileextern crate gl_generator;\nextern crate khronos_api;\n\nuse std::os;\nuse std::io::File;\n\nfn main() {\n let dest = Path::new(os::getenv(\"OUT_DIR\").unwrap());\n\n let mut file = File::create(&dest.join(\"gl_bindings.rs\")).unwrap();\n\n gl_generator::generate_bindings(gl_generator::GlobalGenerator,\n gl_generator::registry::Ns::Gl,\n khronos_api::GL_XML,\n vec![\"GL_EXT_texture_filter_anisotropic\".to_string()],\n \"4.5\", \"core\",\n &mut file).unwrap();\n}\n<|endoftext|>"} {"text":"remove rand_string , use thread_rng instead<|endoftext|>"} {"text":"The Vulkan renderer now uses multiple vertex arrays of each attribute layout<|endoftext|>"} {"text":"use std::collections::BTreeMap;\n\nuse serde_json::value::Value as Json;\n\nuse helpers::{HelperDef, HelperResult};\nuse registry::Registry;\nuse context::{to_json, JsonTruthy};\nuse render::{Helper, RenderContext, Renderable};\nuse error::RenderError;\nuse output::Output;\n\n#[derive(Clone, Copy)]\npub struct EachHelper;\n\nimpl HelperDef for EachHelper {\n fn call(\n &self,\n h: &Helper,\n r: &Registry,\n rc: &mut RenderContext,\n out: &mut Output,\n ) -> HelperResult {\n let value = try!(\n h.param(0)\n .ok_or_else(|| RenderError::new(\"Param not found for helper \\\"each\\\"\"))\n );\n\n let template = h.template();\n\n match template {\n Some(t) => {\n rc.promote_local_vars();\n let local_path_root = value\n .path_root()\n .map(|p| format!(\"{}\/{}\", rc.get_path(), p));\n\n debug!(\"each value {:?}\", value.value());\n let rendered = match (value.value().is_truthy(), value.value()) {\n (true, &Json::Array(ref list)) => {\n let len = list.len();\n for i in 0..len {\n let mut local_rc = rc.derive();\n if let Some(ref p) = local_path_root {\n local_rc.push_local_path_root(p.clone());\n }\n\n local_rc.set_local_var(\"@first\".to_string(), to_json(&(i == 0usize)));\n local_rc.set_local_var(\"@last\".to_string(), to_json(&(i == len - 1)));\n local_rc.set_local_var(\"@index\".to_string(), to_json(&i));\n\n if let Some(inner_path) = value.path() {\n let new_path =\n format!(\"{}\/{}\/[{}]\", local_rc.get_path(), inner_path, i);\n debug!(\"each path {:?}\", new_path);\n local_rc.set_path(new_path.clone());\n }\n\n if let Some(block_param) = h.block_param() {\n let mut map = BTreeMap::new();\n map.insert(block_param.to_string(), to_json(&list[i]));\n local_rc.push_block_context(&map)?;\n }\n\n try!(t.render(r, &mut local_rc, out));\n\n if h.block_param().is_some() {\n local_rc.pop_block_context();\n }\n\n if local_path_root.is_some() {\n local_rc.pop_local_path_root();\n }\n }\n Ok(())\n }\n (true, &Json::Object(ref obj)) => {\n let mut first: bool = true;\n for k in obj.keys() {\n let mut local_rc = rc.derive();\n if let Some(ref p) = local_path_root {\n local_rc.push_local_path_root(p.clone());\n }\n local_rc.set_local_var(\"@first\".to_string(), to_json(&first));\n if first {\n first = false;\n }\n\n local_rc.set_local_var(\"@key\".to_string(), to_json(k));\n\n if let Some(inner_path) = value.path() {\n let new_path =\n format!(\"{}\/{}\/[{}]\", local_rc.get_path(), inner_path, k);\n local_rc.set_path(new_path);\n }\n\n if let Some((bp_key, bp_val)) = h.block_param_pair() {\n let mut map = BTreeMap::new();\n map.insert(bp_key.to_string(), to_json(k));\n map.insert(bp_val.to_string(), to_json(obj.get(k).unwrap()));\n local_rc.push_block_context(&map)?;\n }\n\n try!(t.render(r, &mut local_rc, out));\n\n if h.block_param().is_some() {\n local_rc.pop_block_context();\n }\n\n if local_path_root.is_some() {\n local_rc.pop_local_path_root();\n }\n }\n\n Ok(())\n }\n (false, _) => {\n if let Some(else_template) = h.inverse() {\n try!(else_template.render(r, rc, out));\n }\n Ok(())\n }\n _ => Err(RenderError::new(format!(\n \"Param type is not iterable: {:?}\",\n template\n ))),\n };\n\n rc.demote_local_vars();\n rendered\n }\n None => Ok(()),\n }\n }\n}\n\npub static EACH_HELPER: EachHelper = EachHelper;\n\n#[cfg(test)]\nmod test {\n use registry::Registry;\n use context::to_json;\n\n use std::collections::BTreeMap;\n use std::str::FromStr;\n use serde_json::value::Value as Json;\n\n #[test]\n fn test_each() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\n \"t0\",\n \"{{#each this}}{{@first}}|{{@last}}|{{@index}}:{{this}}|{{\/each}}\"\n )\n .is_ok()\n );\n assert!(\n handlebars\n .register_template_string(\n \"t1\",\n \"{{#each this}}{{@first}}|{{@key}}:{{this}}|{{\/each}}\"\n )\n .is_ok()\n );\n\n let r0 = handlebars.render(\"t0\", &vec![1u16, 2u16, 3u16]);\n assert_eq!(\n r0.ok().unwrap(),\n \"true|false|0:1|false|false|1:2|false|true|2:3|\".to_string()\n );\n\n let mut m: BTreeMap = BTreeMap::new();\n m.insert(\"ftp\".to_string(), 21);\n m.insert(\"http\".to_string(), 80);\n let r1 = handlebars.render(\"t1\", &m);\n assert_eq!(r1.ok().unwrap(), \"true|ftp:21|false|http:80|\".to_string());\n }\n\n #[test]\n fn test_each_with_parent() {\n let json_str = r#\"{\"a\":{\"a\":99,\"c\":[{\"d\":100},{\"d\":200}]}}\"#;\n\n let data = Json::from_str(json_str).unwrap();\n \/\/ println!(\"data: {}\", data);\n let mut handlebars = Registry::new();\n\n \/\/ previously, to access the parent in an each block,\n \/\/ a user would need to specify ..\/..\/b, as the path\n \/\/ that is computed includes the array index: .\/a.c.[0]\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each a.c}} d={{d}} b={{..\/a.a}} {{\/each}}\")\n .is_ok()\n );\n\n let r1 = handlebars.render(\"t0\", &data);\n assert_eq!(r1.ok().unwrap(), \" d=100 b=99 d=200 b=99 \".to_string());\n }\n\n #[test]\n fn test_nested_each_with_parent() {\n let json_str = r#\"{\"a\": [{\"b\": [{\"d\": 100}], \"c\": 200}]}\"#;\n\n let data = Json::from_str(json_str).unwrap();\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\n \"t0\",\n \"{{#each a}}{{#each b}}{{d}}:{{..\/c}}{{\/each}}{{\/each}}\"\n )\n .is_ok()\n );\n\n let r1 = handlebars.render(\"t0\", &data);\n assert_eq!(r1.ok().unwrap(), \"100:200\".to_string());\n }\n\n #[test]\n fn test_nested_each() {\n let json_str = r#\"{\"a\": [{\"b\": true}], \"b\": [[1, 2, 3],[4, 5]]}\"#;\n\n let data = Json::from_str(json_str).unwrap();\n\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\n \"t0\",\n \"{{#each b}}{{#if ..\/a}}{{#each this}}{{this}}{{\/each}}{{\/if}}{{\/each}}\"\n )\n .is_ok()\n );\n\n let r1 = handlebars.render(\"t0\", &data);\n assert_eq!(r1.ok().unwrap(), \"12345\".to_string());\n }\n\n #[test]\n fn test_nested_array() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each this.[0]}}{{this}}{{\/each}}\")\n .is_ok()\n );\n\n let r0 = handlebars.render(\"t0\", &(vec![vec![1, 2, 3]]));\n\n assert_eq!(r0.ok().unwrap(), \"123\".to_string());\n }\n\n #[test]\n fn test_empty_key() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each this}}{{@key}}-{{value}}\\n{{\/each}}\")\n .is_ok()\n );\n\n let r0 = handlebars\n .render(\n \"t0\",\n &({\n let mut rv = BTreeMap::new();\n rv.insert(\"foo\".to_owned(), {\n let mut rv = BTreeMap::new();\n rv.insert(\"value\".to_owned(), \"bar\".to_owned());\n rv\n });\n rv.insert(\"\".to_owned(), {\n let mut rv = BTreeMap::new();\n rv.insert(\"value\".to_owned(), \"baz\".to_owned());\n rv\n });\n rv\n }),\n )\n .unwrap();\n\n let mut r0_sp: Vec<_> = r0.split('\\n').collect();\n r0_sp.sort();\n\n assert_eq!(r0_sp, vec![\"\", \"-baz\", \"foo-bar\"]);\n }\n\n #[test]\n fn test_each_else() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each a}}1{{else}}empty{{\/each}}\")\n .is_ok()\n );\n let m1 = btreemap! {\n \"a\".to_string() => Vec::::new(),\n };\n let r0 = handlebars.render(\"t0\", &m1).unwrap();\n assert_eq!(r0, \"empty\");\n\n let m2 = btreemap!{\n \"b\".to_string() => Vec::::new()\n };\n let r1 = handlebars.render(\"t0\", &m2).unwrap();\n assert_eq!(r1, \"empty\");\n }\n\n #[test]\n fn test_block_param() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each a as |i|}}{{i}}{{\/each}}\")\n .is_ok()\n );\n let m1 = btreemap! {\n \"a\".to_string() => vec![1,2,3,4,5]\n };\n let r0 = handlebars.render(\"t0\", &m1).unwrap();\n assert_eq!(r0, \"12345\");\n }\n\n #[test]\n fn test_each_object_block_param() {\n let mut handlebars = Registry::new();\n let template = \"{{#each this as |k v|}}\\\n {{#with k as |inner_k|}}{{inner_k}}{{\/with}}:{{v}}|\\\n {{\/each}}\";\n assert!(handlebars.register_template_string(\"t0\", template).is_ok());\n\n let m = btreemap!{\n \"ftp\".to_string() => 21,\n \"http\".to_string() => 80\n };\n let r0 = handlebars.render(\"t0\", &m);\n assert_eq!(r0.ok().unwrap(), \"ftp:21|http:80|\".to_string());\n }\n\n #[test]\n fn test_nested_each_with_path_ups() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\n \"t0\",\n \"{{#each a.b}}{{#each c}}{{..\/..\/d}}{{\/each}}{{\/each}}\"\n )\n .is_ok()\n );\n\n let data = btreemap! {\n \"a\".to_string() => to_json(&btreemap! {\n \"b\".to_string() => vec![btreemap!{\"c\".to_string() => vec![1]}]\n }),\n \"d\".to_string() => to_json(&1)\n };\n\n let r0 = handlebars.render(\"t0\", &data);\n assert_eq!(r0.ok().unwrap(), \"1\".to_string());\n }\n\n #[test]\n fn test_nested_each_with_path_up_this() {\n let mut handlebars = Registry::new();\n let template = \"{{#each variant}}{{#each ..\/typearg}}\\\n {{#if @first}}template<{{\/if}}{{this}}{{#if @last}}>{{else}},{{\/if}}\\\n {{\/each}}{{\/each}}\";\n assert!(handlebars.register_template_string(\"t0\", template).is_ok());\n let data = btreemap! {\n \"typearg\".to_string() => vec![\"T\".to_string()],\n \"variant\".to_string() => vec![\"1\".to_string(), \"2\".to_string()]\n };\n let r0 = handlebars.render(\"t0\", &data);\n assert_eq!(r0.ok().unwrap(), \"templatetemplate\".to_string());\n }\n\n #[test]\n fn test_key_iteration_with_unicode() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each this}}{{@key}}: {{this}}\\n{{\/each}}\")\n .is_ok()\n );\n let data = json!({\n \"normal\": 1,\n \"你好\": 2,\n \"#special key\": 3,\n \"😂\": 4,\n \"me.dot.key\": 5\n });\n let r0 = handlebars.render(\"t0\", &data).ok().unwrap();\n assert!(r0.contains(\"normal: 1\"));\n assert!(r0.contains(\"你好: 2\"));\n assert!(r0.contains(\"#special key: 3\"));\n assert!(r0.contains(\"😂: 4\"));\n assert!(r0.contains(\"me.dot.key: 5\"));\n }\n}\n(fix) remove some unneeded ops in each helperuse std::collections::BTreeMap;\n\nuse serde_json::value::Value as Json;\n\nuse helpers::{HelperDef, HelperResult};\nuse registry::Registry;\nuse context::{to_json, JsonTruthy};\nuse render::{Helper, RenderContext, Renderable};\nuse error::RenderError;\nuse output::Output;\n\n#[derive(Clone, Copy)]\npub struct EachHelper;\n\nimpl HelperDef for EachHelper {\n fn call(\n &self,\n h: &Helper,\n r: &Registry,\n rc: &mut RenderContext,\n out: &mut Output,\n ) -> HelperResult {\n let value = try!(\n h.param(0)\n .ok_or_else(|| RenderError::new(\"Param not found for helper \\\"each\\\"\"))\n );\n\n let template = h.template();\n\n match template {\n Some(t) => {\n rc.promote_local_vars();\n let local_path_root = value\n .path_root()\n .map(|p| format!(\"{}\/{}\", rc.get_path(), p));\n\n debug!(\"each value {:?}\", value.value());\n let rendered = match (value.value().is_truthy(), value.value()) {\n (true, &Json::Array(ref list)) => {\n let len = list.len();\n for i in 0..len {\n let mut local_rc = rc.derive();\n if let Some(ref p) = local_path_root {\n local_rc.push_local_path_root(p.clone());\n }\n\n local_rc.set_local_var(\"@first\".to_string(), to_json(&(i == 0usize)));\n local_rc.set_local_var(\"@last\".to_string(), to_json(&(i == len - 1)));\n local_rc.set_local_var(\"@index\".to_string(), to_json(&i));\n\n if let Some(inner_path) = value.path() {\n let new_path =\n format!(\"{}\/{}\/[{}]\", local_rc.get_path(), inner_path, i);\n debug!(\"each path {:?}\", new_path);\n local_rc.set_path(new_path);\n }\n\n if let Some(block_param) = h.block_param() {\n let mut map = BTreeMap::new();\n map.insert(block_param.to_string(), to_json(&list[i]));\n local_rc.push_block_context(&map)?;\n }\n\n try!(t.render(r, &mut local_rc, out));\n\n \/\/ local_rc is dropped at the end of each iteration\n \/\/ so we don't need to cleanup\n \/\/ \n \/\/ if h.block_param().is_some() {\n \/\/ local_rc.pop_block_context();\n \/\/ }\n\n \/\/ if local_path_root.is_some() {\n \/\/ local_rc.pop_local_path_root();\n \/\/ }\n }\n Ok(())\n }\n (true, &Json::Object(ref obj)) => {\n let mut first: bool = true;\n for k in obj.keys() {\n let mut local_rc = rc.derive();\n if let Some(ref p) = local_path_root {\n local_rc.push_local_path_root(p.clone());\n }\n local_rc.set_local_var(\"@first\".to_string(), to_json(&first));\n if first {\n first = false;\n }\n\n local_rc.set_local_var(\"@key\".to_string(), to_json(k));\n\n if let Some(inner_path) = value.path() {\n let new_path =\n format!(\"{}\/{}\/[{}]\", local_rc.get_path(), inner_path, k);\n local_rc.set_path(new_path);\n }\n\n if let Some((bp_key, bp_val)) = h.block_param_pair() {\n let mut map = BTreeMap::new();\n map.insert(bp_key.to_string(), to_json(k));\n map.insert(bp_val.to_string(), to_json(obj.get(k).unwrap()));\n local_rc.push_block_context(&map)?;\n }\n\n try!(t.render(r, &mut local_rc, out));\n\n \/\/ if h.block_param().is_some() {\n \/\/ local_rc.pop_block_context();\n \/\/ }\n\n \/\/ if local_path_root.is_some() {\n \/\/ local_rc.pop_local_path_root();\n \/\/ }\n }\n\n Ok(())\n }\n (false, _) => {\n if let Some(else_template) = h.inverse() {\n try!(else_template.render(r, rc, out));\n }\n Ok(())\n }\n _ => Err(RenderError::new(format!(\n \"Param type is not iterable: {:?}\",\n value.value()\n ))),\n };\n\n rc.demote_local_vars();\n rendered\n }\n None => Ok(()),\n }\n }\n}\n\npub static EACH_HELPER: EachHelper = EachHelper;\n\n#[cfg(test)]\nmod test {\n use registry::Registry;\n use context::to_json;\n\n use std::collections::BTreeMap;\n use std::str::FromStr;\n use serde_json::value::Value as Json;\n\n #[test]\n fn test_each() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\n \"t0\",\n \"{{#each this}}{{@first}}|{{@last}}|{{@index}}:{{this}}|{{\/each}}\"\n )\n .is_ok()\n );\n assert!(\n handlebars\n .register_template_string(\n \"t1\",\n \"{{#each this}}{{@first}}|{{@key}}:{{this}}|{{\/each}}\"\n )\n .is_ok()\n );\n\n let r0 = handlebars.render(\"t0\", &vec![1u16, 2u16, 3u16]);\n assert_eq!(\n r0.ok().unwrap(),\n \"true|false|0:1|false|false|1:2|false|true|2:3|\".to_string()\n );\n\n let mut m: BTreeMap = BTreeMap::new();\n m.insert(\"ftp\".to_string(), 21);\n m.insert(\"http\".to_string(), 80);\n let r1 = handlebars.render(\"t1\", &m);\n assert_eq!(r1.ok().unwrap(), \"true|ftp:21|false|http:80|\".to_string());\n }\n\n #[test]\n fn test_each_with_parent() {\n let json_str = r#\"{\"a\":{\"a\":99,\"c\":[{\"d\":100},{\"d\":200}]}}\"#;\n\n let data = Json::from_str(json_str).unwrap();\n \/\/ println!(\"data: {}\", data);\n let mut handlebars = Registry::new();\n\n \/\/ previously, to access the parent in an each block,\n \/\/ a user would need to specify ..\/..\/b, as the path\n \/\/ that is computed includes the array index: .\/a.c.[0]\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each a.c}} d={{d}} b={{..\/a.a}} {{\/each}}\")\n .is_ok()\n );\n\n let r1 = handlebars.render(\"t0\", &data);\n assert_eq!(r1.ok().unwrap(), \" d=100 b=99 d=200 b=99 \".to_string());\n }\n\n #[test]\n fn test_nested_each_with_parent() {\n let json_str = r#\"{\"a\": [{\"b\": [{\"d\": 100}], \"c\": 200}]}\"#;\n\n let data = Json::from_str(json_str).unwrap();\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\n \"t0\",\n \"{{#each a}}{{#each b}}{{d}}:{{..\/c}}{{\/each}}{{\/each}}\"\n )\n .is_ok()\n );\n\n let r1 = handlebars.render(\"t0\", &data);\n assert_eq!(r1.ok().unwrap(), \"100:200\".to_string());\n }\n\n #[test]\n fn test_nested_each() {\n let json_str = r#\"{\"a\": [{\"b\": true}], \"b\": [[1, 2, 3],[4, 5]]}\"#;\n\n let data = Json::from_str(json_str).unwrap();\n\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\n \"t0\",\n \"{{#each b}}{{#if ..\/a}}{{#each this}}{{this}}{{\/each}}{{\/if}}{{\/each}}\"\n )\n .is_ok()\n );\n\n let r1 = handlebars.render(\"t0\", &data);\n assert_eq!(r1.ok().unwrap(), \"12345\".to_string());\n }\n\n #[test]\n fn test_nested_array() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each this.[0]}}{{this}}{{\/each}}\")\n .is_ok()\n );\n\n let r0 = handlebars.render(\"t0\", &(vec![vec![1, 2, 3]]));\n\n assert_eq!(r0.ok().unwrap(), \"123\".to_string());\n }\n\n #[test]\n fn test_empty_key() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each this}}{{@key}}-{{value}}\\n{{\/each}}\")\n .is_ok()\n );\n\n let r0 = handlebars\n .render(\n \"t0\",\n &({\n let mut rv = BTreeMap::new();\n rv.insert(\"foo\".to_owned(), {\n let mut rv = BTreeMap::new();\n rv.insert(\"value\".to_owned(), \"bar\".to_owned());\n rv\n });\n rv.insert(\"\".to_owned(), {\n let mut rv = BTreeMap::new();\n rv.insert(\"value\".to_owned(), \"baz\".to_owned());\n rv\n });\n rv\n }),\n )\n .unwrap();\n\n let mut r0_sp: Vec<_> = r0.split('\\n').collect();\n r0_sp.sort();\n\n assert_eq!(r0_sp, vec![\"\", \"-baz\", \"foo-bar\"]);\n }\n\n #[test]\n fn test_each_else() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each a}}1{{else}}empty{{\/each}}\")\n .is_ok()\n );\n let m1 = btreemap! {\n \"a\".to_string() => Vec::::new(),\n };\n let r0 = handlebars.render(\"t0\", &m1).unwrap();\n assert_eq!(r0, \"empty\");\n\n let m2 = btreemap!{\n \"b\".to_string() => Vec::::new()\n };\n let r1 = handlebars.render(\"t0\", &m2).unwrap();\n assert_eq!(r1, \"empty\");\n }\n\n #[test]\n fn test_block_param() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each a as |i|}}{{i}}{{\/each}}\")\n .is_ok()\n );\n let m1 = btreemap! {\n \"a\".to_string() => vec![1,2,3,4,5]\n };\n let r0 = handlebars.render(\"t0\", &m1).unwrap();\n assert_eq!(r0, \"12345\");\n }\n\n #[test]\n fn test_each_object_block_param() {\n let mut handlebars = Registry::new();\n let template = \"{{#each this as |k v|}}\\\n {{#with k as |inner_k|}}{{inner_k}}{{\/with}}:{{v}}|\\\n {{\/each}}\";\n assert!(handlebars.register_template_string(\"t0\", template).is_ok());\n\n let m = btreemap!{\n \"ftp\".to_string() => 21,\n \"http\".to_string() => 80\n };\n let r0 = handlebars.render(\"t0\", &m);\n assert_eq!(r0.ok().unwrap(), \"ftp:21|http:80|\".to_string());\n }\n\n #[test]\n fn test_nested_each_with_path_ups() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\n \"t0\",\n \"{{#each a.b}}{{#each c}}{{..\/..\/d}}{{\/each}}{{\/each}}\"\n )\n .is_ok()\n );\n\n let data = btreemap! {\n \"a\".to_string() => to_json(&btreemap! {\n \"b\".to_string() => vec![btreemap!{\"c\".to_string() => vec![1]}]\n }),\n \"d\".to_string() => to_json(&1)\n };\n\n let r0 = handlebars.render(\"t0\", &data);\n assert_eq!(r0.ok().unwrap(), \"1\".to_string());\n }\n\n #[test]\n fn test_nested_each_with_path_up_this() {\n let mut handlebars = Registry::new();\n let template = \"{{#each variant}}{{#each ..\/typearg}}\\\n {{#if @first}}template<{{\/if}}{{this}}{{#if @last}}>{{else}},{{\/if}}\\\n {{\/each}}{{\/each}}\";\n assert!(handlebars.register_template_string(\"t0\", template).is_ok());\n let data = btreemap! {\n \"typearg\".to_string() => vec![\"T\".to_string()],\n \"variant\".to_string() => vec![\"1\".to_string(), \"2\".to_string()]\n };\n let r0 = handlebars.render(\"t0\", &data);\n assert_eq!(r0.ok().unwrap(), \"templatetemplate\".to_string());\n }\n\n #[test]\n fn test_key_iteration_with_unicode() {\n let mut handlebars = Registry::new();\n assert!(\n handlebars\n .register_template_string(\"t0\", \"{{#each this}}{{@key}}: {{this}}\\n{{\/each}}\")\n .is_ok()\n );\n let data = json!({\n \"normal\": 1,\n \"你好\": 2,\n \"#special key\": 3,\n \"😂\": 4,\n \"me.dot.key\": 5\n });\n let r0 = handlebars.render(\"t0\", &data).ok().unwrap();\n assert!(r0.contains(\"normal: 1\"));\n assert!(r0.contains(\"你好: 2\"));\n assert!(r0.contains(\"#special key: 3\"));\n assert!(r0.contains(\"😂: 4\"));\n assert!(r0.contains(\"me.dot.key: 5\"));\n }\n}\n<|endoftext|>"} {"text":"For over reversed range<|endoftext|>"} {"text":"Begin structure of game. Add Point2D and Tank structs.<|endoftext|>"} {"text":"Skeleton for problem 1985\/\/ https:\/\/leetcode.com\/problems\/find-the-kth-largest-integer-in-the-array\/\npub fn kth_largest_number(nums: Vec, k: i32) -> String {\n todo!()\n}\n\nfn main() {\n println!(\n \"{}\",\n kth_largest_number(\n vec![\n String::from(\"3\"),\n String::from(\"6\"),\n String::from(\"7\"),\n String::from(\"10\")\n ],\n 4\n )\n ); \/\/ \"3\"\n println!(\n \"{}\",\n kth_largest_number(\n vec![\n String::from(\"2\"),\n String::from(\"21\"),\n String::from(\"12\"),\n String::from(\"1\")\n ],\n 3\n )\n ); \/\/ \"2\"\n println!(\n \"{}\",\n kth_largest_number(vec![String::from(\"0\"), String::from(\"0\")], 2)\n ); \/\/ \"0\"\n}\n<|endoftext|>"} {"text":"Solve problem 2418\/\/ https:\/\/leetcode.com\/problems\/sort-the-people\/\npub fn sort_people(names: Vec, heights: Vec) -> Vec {\n let mut v: Vec<(i32, String)> = heights.into_iter().zip(names.into_iter()).collect();\n v.sort_by(|a, b| b.cmp(a));\n v.into_iter().map(|a| a.1).clone().collect()\n}\n\nfn main() {\n println!(\n \"{:?}\",\n sort_people(\n vec![\"Mary\".to_string(), \"John\".to_string(), \"Emma\".to_string()],\n vec![180, 165, 170]\n )\n ); \/\/ [\"Mary\",\"Emma\",\"John\"]\n println!(\n \"{:?}\",\n sort_people(\n vec![\"Alice\".to_string(), \"Bob\".to_string(), \"Bob\".to_string()],\n vec![155, 185, 150]\n )\n ); \/\/ [\"Bob\",\"Alice\",\"Bob\"]\n}\n<|endoftext|>"} {"text":"type T = uint;\n\n\/**\n * Divide two numbers, return the result, rounded up.\n *\n * # Arguments\n *\n * * x - an integer\n * * y - an integer distinct from 0u\n *\n * # Return value\n *\n * The smallest integer `q` such that `x\/y <= q`.\n *\/\npure fn div_ceil(x: uint, y: uint) -> uint {\n let div = div(x, y);\n if x % y == 0u { ret div;}\n else { ret div + 1u; }\n}\n\n\/**\n * Divide two numbers, return the result, rounded to the closest integer.\n *\n * # Arguments\n *\n * * x - an integer\n * * y - an integer distinct from 0u\n *\n * # Return value\n *\n * The integer `q` closest to `x\/y`.\n *\/\npure fn div_round(x: uint, y: uint) -> uint {\n let div = div(x, y);\n if x % y * 2u < y { ret div;}\n else { ret div + 1u; }\n}\n\n\/**\n * Divide two numbers, return the result, rounded down.\n *\n * Note: This is the same function as `div`.\n *\n * # Arguments\n *\n * * x - an integer\n * * y - an integer distinct from 0u\n *\n * # Return value\n *\n * The smallest integer `q` such that `x\/y <= q`. This\n * is either `x\/y` or `x\/y + 1`.\n *\/\npure fn div_floor(x: uint, y: uint) -> uint { ret x \/ y; }\n\n\/\/\/ Produce a uint suitable for use in a hash table\npure fn hash(&&x: uint) -> uint { ret x; }\n\n\/**\n * Iterate over the range [`lo`..`hi`), or stop when requested\n *\n * # Arguments\n *\n * * lo - The integer at which to start the loop (included)\n * * hi - The integer at which to stop the loop (excluded)\n * * it - A block to execute with each consecutive integer of the range.\n * Return `true` to continue, `false` to stop.\n *\n * # Return value\n *\n * `true` If execution proceeded correctly, `false` if it was interrupted,\n * that is if `it` returned `false` at any point.\n *\/\nfn iterate(lo: uint, hi: uint, it: fn(uint) -> bool) -> bool {\n let mut i = lo;\n while i < hi {\n if (!it(i)) { ret false; }\n i += 1u;\n }\n ret true;\n}\n\n\/\/\/ Returns the smallest power of 2 greater than or equal to `n`\n#[inline(always)]\nfn next_power_of_two(n: uint) -> uint {\n let halfbits: uint = sys::size_of::() * 4u;\n let mut tmp: uint = n - 1u;\n let mut shift: uint = 1u;\n while shift <= halfbits { tmp |= tmp >> shift; shift <<= 1u; }\n ret tmp + 1u;\n}\n\n#[test]\nfn test_next_power_of_two() {\n assert (uint::next_power_of_two(0u) == 0u);\n assert (uint::next_power_of_two(1u) == 1u);\n assert (uint::next_power_of_two(2u) == 2u);\n assert (uint::next_power_of_two(3u) == 4u);\n assert (uint::next_power_of_two(4u) == 4u);\n assert (uint::next_power_of_two(5u) == 8u);\n assert (uint::next_power_of_two(6u) == 8u);\n assert (uint::next_power_of_two(7u) == 8u);\n assert (uint::next_power_of_two(8u) == 8u);\n assert (uint::next_power_of_two(9u) == 16u);\n assert (uint::next_power_of_two(10u) == 16u);\n assert (uint::next_power_of_two(11u) == 16u);\n assert (uint::next_power_of_two(12u) == 16u);\n assert (uint::next_power_of_two(13u) == 16u);\n assert (uint::next_power_of_two(14u) == 16u);\n assert (uint::next_power_of_two(15u) == 16u);\n assert (uint::next_power_of_two(16u) == 16u);\n assert (uint::next_power_of_two(17u) == 32u);\n assert (uint::next_power_of_two(18u) == 32u);\n assert (uint::next_power_of_two(19u) == 32u);\n assert (uint::next_power_of_two(20u) == 32u);\n assert (uint::next_power_of_two(21u) == 32u);\n assert (uint::next_power_of_two(22u) == 32u);\n assert (uint::next_power_of_two(23u) == 32u);\n assert (uint::next_power_of_two(24u) == 32u);\n assert (uint::next_power_of_two(25u) == 32u);\n assert (uint::next_power_of_two(26u) == 32u);\n assert (uint::next_power_of_two(27u) == 32u);\n assert (uint::next_power_of_two(28u) == 32u);\n assert (uint::next_power_of_two(29u) == 32u);\n assert (uint::next_power_of_two(30u) == 32u);\n assert (uint::next_power_of_two(31u) == 32u);\n assert (uint::next_power_of_two(32u) == 32u);\n assert (uint::next_power_of_two(33u) == 64u);\n assert (uint::next_power_of_two(34u) == 64u);\n assert (uint::next_power_of_two(35u) == 64u);\n assert (uint::next_power_of_two(36u) == 64u);\n assert (uint::next_power_of_two(37u) == 64u);\n assert (uint::next_power_of_two(38u) == 64u);\n assert (uint::next_power_of_two(39u) == 64u);\n}\n\n#[test]\nfn test_overflows() {\n assert (uint::max_value > 0u);\n assert (uint::min_value <= 0u);\n assert (uint::min_value + uint::max_value + 1u == 0u);\n}\n\n#[test]\nfn test_div() {\n assert(uint::div_floor(3u, 4u) == 0u);\n assert(uint::div_ceil(3u, 4u) == 1u);\n assert(uint::div_round(3u, 4u) == 1u);\n}\nMake uint::iterate puretype T = uint;\n\n\/**\n * Divide two numbers, return the result, rounded up.\n *\n * # Arguments\n *\n * * x - an integer\n * * y - an integer distinct from 0u\n *\n * # Return value\n *\n * The smallest integer `q` such that `x\/y <= q`.\n *\/\npure fn div_ceil(x: uint, y: uint) -> uint {\n let div = div(x, y);\n if x % y == 0u { ret div;}\n else { ret div + 1u; }\n}\n\n\/**\n * Divide two numbers, return the result, rounded to the closest integer.\n *\n * # Arguments\n *\n * * x - an integer\n * * y - an integer distinct from 0u\n *\n * # Return value\n *\n * The integer `q` closest to `x\/y`.\n *\/\npure fn div_round(x: uint, y: uint) -> uint {\n let div = div(x, y);\n if x % y * 2u < y { ret div;}\n else { ret div + 1u; }\n}\n\n\/**\n * Divide two numbers, return the result, rounded down.\n *\n * Note: This is the same function as `div`.\n *\n * # Arguments\n *\n * * x - an integer\n * * y - an integer distinct from 0u\n *\n * # Return value\n *\n * The smallest integer `q` such that `x\/y <= q`. This\n * is either `x\/y` or `x\/y + 1`.\n *\/\npure fn div_floor(x: uint, y: uint) -> uint { ret x \/ y; }\n\n\/\/\/ Produce a uint suitable for use in a hash table\npure fn hash(&&x: uint) -> uint { ret x; }\n\n\/**\n * Iterate over the range [`lo`..`hi`), or stop when requested\n *\n * # Arguments\n *\n * * lo - The integer at which to start the loop (included)\n * * hi - The integer at which to stop the loop (excluded)\n * * it - A block to execute with each consecutive integer of the range.\n * Return `true` to continue, `false` to stop.\n *\n * # Return value\n *\n * `true` If execution proceeded correctly, `false` if it was interrupted,\n * that is if `it` returned `false` at any point.\n *\/\npure fn iterate(lo: uint, hi: uint, it: fn(uint) -> bool) -> bool {\n let mut i = lo;\n while i < hi {\n if (!it(i)) { ret false; }\n i += 1u;\n }\n ret true;\n}\n\n\/\/\/ Returns the smallest power of 2 greater than or equal to `n`\n#[inline(always)]\nfn next_power_of_two(n: uint) -> uint {\n let halfbits: uint = sys::size_of::() * 4u;\n let mut tmp: uint = n - 1u;\n let mut shift: uint = 1u;\n while shift <= halfbits { tmp |= tmp >> shift; shift <<= 1u; }\n ret tmp + 1u;\n}\n\n#[test]\nfn test_next_power_of_two() {\n assert (uint::next_power_of_two(0u) == 0u);\n assert (uint::next_power_of_two(1u) == 1u);\n assert (uint::next_power_of_two(2u) == 2u);\n assert (uint::next_power_of_two(3u) == 4u);\n assert (uint::next_power_of_two(4u) == 4u);\n assert (uint::next_power_of_two(5u) == 8u);\n assert (uint::next_power_of_two(6u) == 8u);\n assert (uint::next_power_of_two(7u) == 8u);\n assert (uint::next_power_of_two(8u) == 8u);\n assert (uint::next_power_of_two(9u) == 16u);\n assert (uint::next_power_of_two(10u) == 16u);\n assert (uint::next_power_of_two(11u) == 16u);\n assert (uint::next_power_of_two(12u) == 16u);\n assert (uint::next_power_of_two(13u) == 16u);\n assert (uint::next_power_of_two(14u) == 16u);\n assert (uint::next_power_of_two(15u) == 16u);\n assert (uint::next_power_of_two(16u) == 16u);\n assert (uint::next_power_of_two(17u) == 32u);\n assert (uint::next_power_of_two(18u) == 32u);\n assert (uint::next_power_of_two(19u) == 32u);\n assert (uint::next_power_of_two(20u) == 32u);\n assert (uint::next_power_of_two(21u) == 32u);\n assert (uint::next_power_of_two(22u) == 32u);\n assert (uint::next_power_of_two(23u) == 32u);\n assert (uint::next_power_of_two(24u) == 32u);\n assert (uint::next_power_of_two(25u) == 32u);\n assert (uint::next_power_of_two(26u) == 32u);\n assert (uint::next_power_of_two(27u) == 32u);\n assert (uint::next_power_of_two(28u) == 32u);\n assert (uint::next_power_of_two(29u) == 32u);\n assert (uint::next_power_of_two(30u) == 32u);\n assert (uint::next_power_of_two(31u) == 32u);\n assert (uint::next_power_of_two(32u) == 32u);\n assert (uint::next_power_of_two(33u) == 64u);\n assert (uint::next_power_of_two(34u) == 64u);\n assert (uint::next_power_of_two(35u) == 64u);\n assert (uint::next_power_of_two(36u) == 64u);\n assert (uint::next_power_of_two(37u) == 64u);\n assert (uint::next_power_of_two(38u) == 64u);\n assert (uint::next_power_of_two(39u) == 64u);\n}\n\n#[test]\nfn test_overflows() {\n assert (uint::max_value > 0u);\n assert (uint::min_value <= 0u);\n assert (uint::min_value + uint::max_value + 1u == 0u);\n}\n\n#[test]\nfn test_div() {\n assert(uint::div_floor(3u, 4u) == 0u);\n assert(uint::div_ceil(3u, 4u) == 1u);\n assert(uint::div_round(3u, 4u) == 1u);\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse syntax::fold::Folder;\nuse syntax::{ast, fold, attr};\nuse syntax::codemap;\n\nstruct Context<'a> {\n in_cfg: 'a |attrs: &[ast::Attribute]| -> bool,\n}\n\n\/\/ Support conditional compilation by transforming the AST, stripping out\n\/\/ any items that do not belong in the current configuration\npub fn strip_unconfigured_items(crate: ast::Crate) -> ast::Crate {\n let config = crate.config.clone();\n strip_items(crate, |attrs| in_cfg(config, attrs))\n}\n\nimpl<'a> fold::Folder for Context<'a> {\n fn fold_mod(&mut self, module: &ast::Mod) -> ast::Mod {\n fold_mod(self, module)\n }\n fn fold_block(&mut self, block: ast::P) -> ast::P {\n fold_block(self, block)\n }\n fn fold_foreign_mod(&mut self, foreign_mod: &ast::ForeignMod) -> ast::ForeignMod {\n fold_foreign_mod(self, foreign_mod)\n }\n fn fold_item_underscore(&mut self, item: &ast::Item_) -> ast::Item_ {\n fold_item_underscore(self, item)\n }\n}\n\npub fn strip_items(crate: ast::Crate,\n in_cfg: |attrs: &[ast::Attribute]| -> bool)\n -> ast::Crate {\n let mut ctxt = Context {\n in_cfg: in_cfg,\n };\n ctxt.fold_crate(crate)\n}\n\nfn filter_view_item<'r>(cx: &Context, view_item: &'r ast::ViewItem)\n -> Option<&'r ast::ViewItem> {\n if view_item_in_cfg(cx, view_item) {\n Some(view_item)\n } else {\n None\n }\n}\n\nfn fold_mod(cx: &mut Context, m: &ast::Mod) -> ast::Mod {\n let filtered_items = m.items.iter()\n .filter(|&a| item_in_cfg(cx, *a))\n .flat_map(|&x| cx.fold_item(x).move_iter())\n .collect();\n let filtered_view_items = m.view_items.iter().filter_map(|a| {\n filter_view_item(cx, a).map(|x| cx.fold_view_item(x))\n }).collect();\n ast::Mod {\n view_items: filtered_view_items,\n items: filtered_items\n }\n}\n\nfn filter_foreign_item(cx: &Context, item: @ast::ForeignItem)\n -> Option<@ast::ForeignItem> {\n if foreign_item_in_cfg(cx, item) {\n Some(item)\n } else {\n None\n }\n}\n\nfn fold_foreign_mod(cx: &mut Context, nm: &ast::ForeignMod) -> ast::ForeignMod {\n let filtered_items = nm.items\n .iter()\n .filter_map(|a| filter_foreign_item(cx, *a))\n .collect();\n let filtered_view_items = nm.view_items.iter().filter_map(|a| {\n filter_view_item(cx, a).map(|x| cx.fold_view_item(x))\n }).collect();\n ast::ForeignMod {\n abis: nm.abis,\n view_items: filtered_view_items,\n items: filtered_items\n }\n}\n\nfn fold_item_underscore(cx: &mut Context, item: &ast::Item_) -> ast::Item_ {\n let item = match *item {\n ast::ItemImpl(ref a, ref b, c, ref methods) => {\n let methods = methods.iter().filter(|m| method_in_cfg(cx, **m))\n .map(|x| *x).collect();\n ast::ItemImpl((*a).clone(), (*b).clone(), c, methods)\n }\n ast::ItemTrait(ref a, ref b, ref methods) => {\n let methods = methods.iter()\n .filter(|m| trait_method_in_cfg(cx, *m) )\n .map(|x| (*x).clone())\n .collect();\n ast::ItemTrait((*a).clone(), (*b).clone(), methods)\n }\n ast::ItemStruct(def, ref generics) => {\n ast::ItemStruct(fold_struct(cx, def), generics.clone())\n }\n ast::ItemEnum(ref def, ref generics) => {\n let mut variants = def.variants.iter().map(|c| c.clone()).filter(|m| {\n (cx.in_cfg)(m.node.attrs)\n }).map(|v| {\n match v.node.kind {\n ast::TupleVariantKind(..) => v,\n ast::StructVariantKind(def) => {\n let def = fold_struct(cx, def);\n @codemap::Spanned {\n node: ast::Variant_ {\n kind: ast::StructVariantKind(def),\n ..v.node.clone()\n },\n ..*v\n }\n }\n }\n });\n ast::ItemEnum(ast::EnumDef {\n variants: variants.collect(),\n }, generics.clone())\n }\n ref item => item.clone(),\n };\n\n fold::noop_fold_item_underscore(&item, cx)\n}\n\nfn fold_struct(cx: &Context, def: &ast::StructDef) -> @ast::StructDef {\n let mut fields = def.fields.iter().map(|c| c.clone()).filter(|m| {\n (cx.in_cfg)(m.node.attrs)\n });\n @ast::StructDef {\n fields: fields.collect(),\n ctor_id: def.ctor_id,\n }\n}\n\nfn retain_stmt(cx: &Context, stmt: @ast::Stmt) -> bool {\n match stmt.node {\n ast::StmtDecl(decl, _) => {\n match decl.node {\n ast::DeclItem(item) => {\n item_in_cfg(cx, item)\n }\n _ => true\n }\n }\n _ => true\n }\n}\n\nfn fold_block(cx: &mut Context, b: ast::P) -> ast::P {\n let resulting_stmts = b.stmts.iter()\n .filter(|&a| retain_stmt(cx, *a))\n .flat_map(|&stmt| cx.fold_stmt(stmt).move_iter())\n .collect();\n let filtered_view_items = b.view_items.iter().filter_map(|a| {\n filter_view_item(cx, a).map(|x| cx.fold_view_item(x))\n }).collect();\n ast::P(ast::Block {\n view_items: filtered_view_items,\n stmts: resulting_stmts,\n expr: b.expr.map(|x| cx.fold_expr(x)),\n id: b.id,\n rules: b.rules,\n span: b.span,\n })\n}\n\nfn item_in_cfg(cx: &Context, item: &ast::Item) -> bool {\n return (cx.in_cfg)(item.attrs);\n}\n\nfn foreign_item_in_cfg(cx: &Context, item: &ast::ForeignItem) -> bool {\n return (cx.in_cfg)(item.attrs);\n}\n\nfn view_item_in_cfg(cx: &Context, item: &ast::ViewItem) -> bool {\n return (cx.in_cfg)(item.attrs);\n}\n\nfn method_in_cfg(cx: &Context, meth: &ast::Method) -> bool {\n return (cx.in_cfg)(meth.attrs);\n}\n\nfn trait_method_in_cfg(cx: &Context, meth: &ast::TraitMethod) -> bool {\n match *meth {\n ast::Required(ref meth) => (cx.in_cfg)(meth.attrs),\n ast::Provided(meth) => (cx.in_cfg)(meth.attrs)\n }\n}\n\n\/\/ Determine if an item should be translated in the current crate\n\/\/ configuration based on the item's attributes\nfn in_cfg(cfg: &[@ast::MetaItem], attrs: &[ast::Attribute]) -> bool {\n attr::test_cfg(cfg, attrs.iter().map(|x| *x))\n}\n\nfront -- collapse iterator actions that require access to the same &mut state\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse syntax::fold::Folder;\nuse syntax::{ast, fold, attr};\nuse syntax::codemap;\n\nstruct Context<'a> {\n in_cfg: 'a |attrs: &[ast::Attribute]| -> bool,\n}\n\n\/\/ Support conditional compilation by transforming the AST, stripping out\n\/\/ any items that do not belong in the current configuration\npub fn strip_unconfigured_items(crate: ast::Crate) -> ast::Crate {\n let config = crate.config.clone();\n strip_items(crate, |attrs| in_cfg(config, attrs))\n}\n\nimpl<'a> fold::Folder for Context<'a> {\n fn fold_mod(&mut self, module: &ast::Mod) -> ast::Mod {\n fold_mod(self, module)\n }\n fn fold_block(&mut self, block: ast::P) -> ast::P {\n fold_block(self, block)\n }\n fn fold_foreign_mod(&mut self, foreign_mod: &ast::ForeignMod) -> ast::ForeignMod {\n fold_foreign_mod(self, foreign_mod)\n }\n fn fold_item_underscore(&mut self, item: &ast::Item_) -> ast::Item_ {\n fold_item_underscore(self, item)\n }\n}\n\npub fn strip_items(crate: ast::Crate,\n in_cfg: |attrs: &[ast::Attribute]| -> bool)\n -> ast::Crate {\n let mut ctxt = Context {\n in_cfg: in_cfg,\n };\n ctxt.fold_crate(crate)\n}\n\nfn filter_view_item<'r>(cx: &Context, view_item: &'r ast::ViewItem)\n -> Option<&'r ast::ViewItem> {\n if view_item_in_cfg(cx, view_item) {\n Some(view_item)\n } else {\n None\n }\n}\n\nfn fold_mod(cx: &mut Context, m: &ast::Mod) -> ast::Mod {\n let filtered_items: ~[&@ast::Item] = m.items.iter()\n .filter(|&a| item_in_cfg(cx, *a))\n .collect();\n let flattened_items = filtered_items.move_iter()\n .flat_map(|&x| cx.fold_item(x).move_iter())\n .collect();\n let filtered_view_items = m.view_items.iter().filter_map(|a| {\n filter_view_item(cx, a).map(|x| cx.fold_view_item(x))\n }).collect();\n ast::Mod {\n view_items: filtered_view_items,\n items: flattened_items\n }\n}\n\nfn filter_foreign_item(cx: &Context, item: @ast::ForeignItem)\n -> Option<@ast::ForeignItem> {\n if foreign_item_in_cfg(cx, item) {\n Some(item)\n } else {\n None\n }\n}\n\nfn fold_foreign_mod(cx: &mut Context, nm: &ast::ForeignMod) -> ast::ForeignMod {\n let filtered_items = nm.items\n .iter()\n .filter_map(|a| filter_foreign_item(cx, *a))\n .collect();\n let filtered_view_items = nm.view_items.iter().filter_map(|a| {\n filter_view_item(cx, a).map(|x| cx.fold_view_item(x))\n }).collect();\n ast::ForeignMod {\n abis: nm.abis,\n view_items: filtered_view_items,\n items: filtered_items\n }\n}\n\nfn fold_item_underscore(cx: &mut Context, item: &ast::Item_) -> ast::Item_ {\n let item = match *item {\n ast::ItemImpl(ref a, ref b, c, ref methods) => {\n let methods = methods.iter().filter(|m| method_in_cfg(cx, **m))\n .map(|x| *x).collect();\n ast::ItemImpl((*a).clone(), (*b).clone(), c, methods)\n }\n ast::ItemTrait(ref a, ref b, ref methods) => {\n let methods = methods.iter()\n .filter(|m| trait_method_in_cfg(cx, *m) )\n .map(|x| (*x).clone())\n .collect();\n ast::ItemTrait((*a).clone(), (*b).clone(), methods)\n }\n ast::ItemStruct(def, ref generics) => {\n ast::ItemStruct(fold_struct(cx, def), generics.clone())\n }\n ast::ItemEnum(ref def, ref generics) => {\n let mut variants = def.variants.iter().map(|c| c.clone()).\n filter_map(|v| {\n if !(cx.in_cfg)(v.node.attrs) {\n None\n } else {\n Some(match v.node.kind {\n ast::TupleVariantKind(..) => v,\n ast::StructVariantKind(def) => {\n let def = fold_struct(cx, def);\n @codemap::Spanned {\n node: ast::Variant_ {\n kind: ast::StructVariantKind(def),\n ..v.node.clone()\n },\n ..*v\n }\n }\n })\n }\n });\n ast::ItemEnum(ast::EnumDef {\n variants: variants.collect(),\n }, generics.clone())\n }\n ref item => item.clone(),\n };\n\n fold::noop_fold_item_underscore(&item, cx)\n}\n\nfn fold_struct(cx: &Context, def: &ast::StructDef) -> @ast::StructDef {\n let mut fields = def.fields.iter().map(|c| c.clone()).filter(|m| {\n (cx.in_cfg)(m.node.attrs)\n });\n @ast::StructDef {\n fields: fields.collect(),\n ctor_id: def.ctor_id,\n }\n}\n\nfn retain_stmt(cx: &Context, stmt: @ast::Stmt) -> bool {\n match stmt.node {\n ast::StmtDecl(decl, _) => {\n match decl.node {\n ast::DeclItem(item) => {\n item_in_cfg(cx, item)\n }\n _ => true\n }\n }\n _ => true\n }\n}\n\nfn fold_block(cx: &mut Context, b: ast::P) -> ast::P {\n let resulting_stmts: ~[&@ast::Stmt] =\n b.stmts.iter().filter(|&a| retain_stmt(cx, *a)).collect();\n let resulting_stmts = resulting_stmts.move_iter()\n .flat_map(|&stmt| cx.fold_stmt(stmt).move_iter())\n .collect();\n let filtered_view_items = b.view_items.iter().filter_map(|a| {\n filter_view_item(cx, a).map(|x| cx.fold_view_item(x))\n }).collect();\n ast::P(ast::Block {\n view_items: filtered_view_items,\n stmts: resulting_stmts,\n expr: b.expr.map(|x| cx.fold_expr(x)),\n id: b.id,\n rules: b.rules,\n span: b.span,\n })\n}\n\nfn item_in_cfg(cx: &Context, item: &ast::Item) -> bool {\n return (cx.in_cfg)(item.attrs);\n}\n\nfn foreign_item_in_cfg(cx: &Context, item: &ast::ForeignItem) -> bool {\n return (cx.in_cfg)(item.attrs);\n}\n\nfn view_item_in_cfg(cx: &Context, item: &ast::ViewItem) -> bool {\n return (cx.in_cfg)(item.attrs);\n}\n\nfn method_in_cfg(cx: &Context, meth: &ast::Method) -> bool {\n return (cx.in_cfg)(meth.attrs);\n}\n\nfn trait_method_in_cfg(cx: &Context, meth: &ast::TraitMethod) -> bool {\n match *meth {\n ast::Required(ref meth) => (cx.in_cfg)(meth.attrs),\n ast::Provided(meth) => (cx.in_cfg)(meth.attrs)\n }\n}\n\n\/\/ Determine if an item should be translated in the current crate\n\/\/ configuration based on the item's attributes\nfn in_cfg(cfg: &[@ast::MetaItem], attrs: &[ast::Attribute]) -> bool {\n attr::test_cfg(cfg, attrs.iter().map(|x| *x))\n}\n\n<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse io;\nuse libc;\nuse sys::fd::FileDesc;\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Anonymous pipes\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\npub struct AnonPipe(FileDesc);\n\npub fn anon_pipe() -> io::Result<(AnonPipe, AnonPipe)> {\n let mut fds = [0; 2];\n\n libc::pipe2(&mut fds, libc::O_CLOEXEC).map_err(|err| io::Error::from_raw_os_error(err.errno))?;\n\n let fd0 = FileDesc::new(fds[0]);\n let fd1 = FileDesc::new(fds[1]);\n Ok((AnonPipe::from_fd(fd0)?, AnonPipe::from_fd(fd1)?))\n}\n\nimpl AnonPipe {\n pub fn from_fd(fd: FileDesc) -> io::Result {\n fd.set_cloexec()?;\n Ok(AnonPipe(fd))\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result {\n self.0.read(buf)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec) -> io::Result {\n self.0.read_to_end(buf)\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result {\n self.0.write(buf)\n }\n\n pub fn fd(&self) -> &FileDesc { &self.0 }\n pub fn into_fd(self) -> FileDesc { self.0 }\n}\n\npub fn read2(_p1: AnonPipe,\n _v1: &mut Vec,\n _p2: AnonPipe,\n _v2: &mut Vec) -> io::Result<()> {\n ::sys_common::util::dumb_print(format_args!(\"read2\\n\"));\n unimplemented!();\n \/*\n \/\/ Set both pipes into nonblocking mode as we're gonna be reading from both\n \/\/ in the `select` loop below, and we wouldn't want one to block the other!\n let p1 = p1.into_fd();\n let p2 = p2.into_fd();\n p1.set_nonblocking(true)?;\n p2.set_nonblocking(true)?;\n\n let max = cmp::max(p1.raw(), p2.raw());\n loop {\n \/\/ wait for either pipe to become readable using `select`\n cvt_r(|| unsafe {\n let mut read: libc::fd_set = mem::zeroed();\n libc::FD_SET(p1.raw(), &mut read);\n libc::FD_SET(p2.raw(), &mut read);\n libc::select(max + 1, &mut read, ptr::null_mut(), ptr::null_mut(),\n ptr::null_mut())\n })?;\n\n \/\/ Read as much as we can from each pipe, ignoring EWOULDBLOCK or\n \/\/ EAGAIN. If we hit EOF, then this will happen because the underlying\n \/\/ reader will return Ok(0), in which case we'll see `Ok` ourselves. In\n \/\/ this case we flip the other fd back into blocking mode and read\n \/\/ whatever's leftover on that file descriptor.\n let read = |fd: &FileDesc, dst: &mut Vec| {\n match fd.read_to_end(dst) {\n Ok(_) => Ok(true),\n Err(e) => {\n if e.raw_os_error() == Some(libc::EWOULDBLOCK) ||\n e.raw_os_error() == Some(libc::EAGAIN) {\n Ok(false)\n } else {\n Err(e)\n }\n }\n }\n };\n if read(&p1, v1)? {\n p2.set_nonblocking(false)?;\n return p2.read_to_end(v2).map(|_| ());\n }\n if read(&p2, v2)? {\n p1.set_nonblocking(false)?;\n return p1.read_to_end(v1).map(|_| ());\n }\n }\n *\/\n}\nSimple implementation of read2\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse io;\nuse libc;\nuse sys::fd::FileDesc;\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Anonymous pipes\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\npub struct AnonPipe(FileDesc);\n\npub fn anon_pipe() -> io::Result<(AnonPipe, AnonPipe)> {\n let mut fds = [0; 2];\n\n libc::pipe2(&mut fds, libc::O_CLOEXEC).map_err(|err| io::Error::from_raw_os_error(err.errno))?;\n\n Ok((AnonPipe(FileDesc::new(fds[0])), AnonPipe(FileDesc::new(fds[1]))))\n}\n\nimpl AnonPipe {\n pub fn from_fd(fd: FileDesc) -> io::Result {\n fd.set_cloexec()?;\n Ok(AnonPipe(fd))\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result {\n self.0.read(buf)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec) -> io::Result {\n self.0.read_to_end(buf)\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result {\n self.0.write(buf)\n }\n\n pub fn fd(&self) -> &FileDesc { &self.0 }\n pub fn into_fd(self) -> FileDesc { self.0 }\n}\n\npub fn read2(p1: AnonPipe,\n v1: &mut Vec,\n p2: AnonPipe,\n v2: &mut Vec) -> io::Result<()> {\n \/\/TODO: Use event based I\/O multiplexing\n \/\/unimplemented!()\n\n p1.read_to_end(v1)?;\n p2.read_to_end(v2)?;\n\n Ok(())\n\n \/*\n \/\/ Set both pipes into nonblocking mode as we're gonna be reading from both\n \/\/ in the `select` loop below, and we wouldn't want one to block the other!\n let p1 = p1.into_fd();\n let p2 = p2.into_fd();\n p1.set_nonblocking(true)?;\n p2.set_nonblocking(true)?;\n\n loop {\n \/\/ wait for either pipe to become readable using `select`\n cvt_r(|| unsafe {\n let mut read: libc::fd_set = mem::zeroed();\n libc::FD_SET(p1.raw(), &mut read);\n libc::FD_SET(p2.raw(), &mut read);\n libc::select(max + 1, &mut read, ptr::null_mut(), ptr::null_mut(),\n ptr::null_mut())\n })?;\n\n \/\/ Read as much as we can from each pipe, ignoring EWOULDBLOCK or\n \/\/ EAGAIN. If we hit EOF, then this will happen because the underlying\n \/\/ reader will return Ok(0), in which case we'll see `Ok` ourselves. In\n \/\/ this case we flip the other fd back into blocking mode and read\n \/\/ whatever's leftover on that file descriptor.\n let read = |fd: &FileDesc, dst: &mut Vec| {\n match fd.read_to_end(dst) {\n Ok(_) => Ok(true),\n Err(e) => {\n if e.raw_os_error() == Some(libc::EWOULDBLOCK) ||\n e.raw_os_error() == Some(libc::EAGAIN) {\n Ok(false)\n } else {\n Err(e)\n }\n }\n }\n };\n if read(&p1, v1)? {\n p2.set_nonblocking(false)?;\n return p2.read_to_end(v2).map(|_| ());\n }\n if read(&p2, v2)? {\n p1.set_nonblocking(false)?;\n return p1.read_to_end(v1).map(|_| ());\n }\n }\n *\/\n}\n<|endoftext|>"} {"text":"fn main() -> () {\n let @vec[uint] quux = @[];\n}\ntest: xfail vector-no-ann-2 in stage0\/\/ xfail-stage0\n\nfn main() -> () {\n let @vec[uint] quux = @[];\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Sanity checking performed by rustbuild before actually executing anything.\n\/\/!\n\/\/! This module contains the implementation of ensuring that the build\n\/\/! environment looks reasonable before progressing. This will verify that\n\/\/! various programs like git and python exist, along with ensuring that all C\n\/\/! compilers for cross-compiling are found.\n\/\/!\n\/\/! In theory if we get past this phase it's a bug if a build fails, but in\n\/\/! practice that's likely not true!\n\nuse std::collections::HashSet;\nuse std::env;\nuse std::ffi::{OsStr, OsString};\nuse std::fs;\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse Build;\n\npub fn check(build: &mut Build) {\n let mut checked = HashSet::new();\n let path = env::var_os(\"PATH\").unwrap_or(OsString::new());\n let mut need_cmd = |cmd: &OsStr| {\n if !checked.insert(cmd.to_owned()) {\n return\n }\n for path in env::split_paths(&path).map(|p| p.join(cmd)) {\n if fs::metadata(&path).is_ok() ||\n fs::metadata(path.with_extension(\"exe\")).is_ok() {\n return\n }\n }\n panic!(\"\\n\\ncouldn't find required command: {:?}\\n\\n\", cmd);\n };\n\n \/\/ If we've got a git directory we're gona need git to update\n \/\/ submodules and learn about various other aspects.\n if fs::metadata(build.src.join(\".git\")).is_ok() {\n need_cmd(\"git\".as_ref());\n }\n\n \/\/ We need cmake, but only if we're actually building LLVM\n for host in build.config.host.iter() {\n if let Some(config) = build.config.target_config.get(host) {\n if config.llvm_config.is_some() {\n continue\n }\n }\n need_cmd(\"cmake\".as_ref());\n if build.config.ninja {\n need_cmd(\"ninja\".as_ref())\n }\n break\n }\n\n need_cmd(\"python\".as_ref());\n\n \/\/ We're gonna build some custom C code here and there, host triples\n \/\/ also build some C++ shims for LLVM so we need a C++ compiler.\n for target in build.config.target.iter() {\n need_cmd(build.cc(target).as_ref());\n if let Some(ar) = build.ar(target) {\n need_cmd(ar.as_ref());\n }\n }\n for host in build.config.host.iter() {\n need_cmd(build.cxx(host).as_ref());\n }\n\n \/\/ Externally configured LLVM requires FileCheck to exist\n let filecheck = build.llvm_filecheck(&build.config.build);\n if !filecheck.starts_with(&build.out) && !filecheck.exists() {\n panic!(\"filecheck executable {:?} does not exist\", filecheck);\n }\n\n for target in build.config.target.iter() {\n \/\/ Either can't build or don't want to run jemalloc on these targets\n if target.contains(\"rumprun\") ||\n target.contains(\"bitrig\") ||\n target.contains(\"openbsd\") ||\n target.contains(\"msvc\") {\n build.config.use_jemalloc = false;\n }\n\n \/\/ Can't compile for iOS unless we're on OSX\n if target.contains(\"apple-ios\") &&\n !build.config.build.contains(\"apple-darwin\") {\n panic!(\"the iOS target is only supported on OSX\");\n }\n\n \/\/ Make sure musl-root is valid if specified\n if target.contains(\"musl\") && (target.contains(\"x86_64\") || target.contains(\"i686\")) {\n match build.config.musl_root {\n Some(ref root) => {\n if fs::metadata(root.join(\"lib\/libc.a\")).is_err() {\n panic!(\"couldn't find libc.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n if fs::metadata(root.join(\"lib\/libunwind.a\")).is_err() {\n panic!(\"couldn't find libunwind.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n }\n None => {\n panic!(\"when targeting MUSL the build.musl-root option \\\n must be specified in config.toml\")\n }\n }\n }\n\n if target.contains(\"msvc\") {\n \/\/ There are three builds of cmake on windows: MSVC, MinGW, and\n \/\/ Cygwin. The Cygwin build does not have generators for Visual\n \/\/ Studio, so detect that here and error.\n let out = output(Command::new(\"cmake\").arg(\"--help\"));\n if !out.contains(\"Visual Studio\") {\n panic!(\"\ncmake does not support Visual Studio generators.\n\nThis is likely due to it being an msys\/cygwin build of cmake,\nrather than the required windows version, built using MinGW\nor Visual Studio.\n\nIf you are building under msys2 try installing the mingw-w64-x86_64-cmake\npackage instead of cmake:\n\n$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake\n\");\n }\n }\n\n if target.contains(\"arm-linux-android\") {\n need_cmd(\"adb\".as_ref());\n }\n }\n\n for host in build.flags.host.iter() {\n if !build.config.host.contains(host) {\n panic!(\"specified host `{}` is not in the .\/configure list\", host);\n }\n }\n for target in build.flags.target.iter() {\n if !build.config.target.contains(target) {\n panic!(\"specified target `{}` is not in the .\/configure list\",\n target);\n }\n }\n\n let run = |cmd: &mut Command| {\n cmd.output().map(|output| {\n String::from_utf8_lossy(&output.stdout)\n .lines().next().unwrap()\n .to_string()\n })\n };\n build.gdb_version = run(Command::new(\"gdb\").arg(\"--version\")).ok();\n build.lldb_version = run(Command::new(\"lldb\").arg(\"--version\")).ok();\n if build.lldb_version.is_some() {\n build.lldb_python_dir = run(Command::new(\"lldb\").arg(\"-P\")).ok();\n }\n}\nAuto merge of #35117 - aravind-pg:path, r=alexcrichton\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Sanity checking performed by rustbuild before actually executing anything.\n\/\/!\n\/\/! This module contains the implementation of ensuring that the build\n\/\/! environment looks reasonable before progressing. This will verify that\n\/\/! various programs like git and python exist, along with ensuring that all C\n\/\/! compilers for cross-compiling are found.\n\/\/!\n\/\/! In theory if we get past this phase it's a bug if a build fails, but in\n\/\/! practice that's likely not true!\n\nuse std::collections::HashSet;\nuse std::env;\nuse std::ffi::{OsStr, OsString};\nuse std::fs;\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse Build;\n\npub fn check(build: &mut Build) {\n let mut checked = HashSet::new();\n let path = env::var_os(\"PATH\").unwrap_or(OsString::new());\n \/\/ On Windows, quotes are invalid characters for filename paths, and if\n \/\/ one is present as part of the PATH then that can lead to the system\n \/\/ being unable to identify the files properly. See\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/34959 for more details.\n if cfg!(windows) {\n if path.to_string_lossy().contains(\"\\\"\") {\n panic!(\"PATH contains invalid character '\\\"'\");\n }\n }\n let mut need_cmd = |cmd: &OsStr| {\n if !checked.insert(cmd.to_owned()) {\n return\n }\n for path in env::split_paths(&path).map(|p| p.join(cmd)) {\n if fs::metadata(&path).is_ok() ||\n fs::metadata(path.with_extension(\"exe\")).is_ok() {\n return\n }\n }\n panic!(\"\\n\\ncouldn't find required command: {:?}\\n\\n\", cmd);\n };\n\n \/\/ If we've got a git directory we're gona need git to update\n \/\/ submodules and learn about various other aspects.\n if fs::metadata(build.src.join(\".git\")).is_ok() {\n need_cmd(\"git\".as_ref());\n }\n\n \/\/ We need cmake, but only if we're actually building LLVM\n for host in build.config.host.iter() {\n if let Some(config) = build.config.target_config.get(host) {\n if config.llvm_config.is_some() {\n continue\n }\n }\n need_cmd(\"cmake\".as_ref());\n if build.config.ninja {\n need_cmd(\"ninja\".as_ref())\n }\n break\n }\n\n need_cmd(\"python\".as_ref());\n\n \/\/ We're gonna build some custom C code here and there, host triples\n \/\/ also build some C++ shims for LLVM so we need a C++ compiler.\n for target in build.config.target.iter() {\n need_cmd(build.cc(target).as_ref());\n if let Some(ar) = build.ar(target) {\n need_cmd(ar.as_ref());\n }\n }\n for host in build.config.host.iter() {\n need_cmd(build.cxx(host).as_ref());\n }\n\n \/\/ Externally configured LLVM requires FileCheck to exist\n let filecheck = build.llvm_filecheck(&build.config.build);\n if !filecheck.starts_with(&build.out) && !filecheck.exists() {\n panic!(\"filecheck executable {:?} does not exist\", filecheck);\n }\n\n for target in build.config.target.iter() {\n \/\/ Either can't build or don't want to run jemalloc on these targets\n if target.contains(\"rumprun\") ||\n target.contains(\"bitrig\") ||\n target.contains(\"openbsd\") ||\n target.contains(\"msvc\") {\n build.config.use_jemalloc = false;\n }\n\n \/\/ Can't compile for iOS unless we're on OSX\n if target.contains(\"apple-ios\") &&\n !build.config.build.contains(\"apple-darwin\") {\n panic!(\"the iOS target is only supported on OSX\");\n }\n\n \/\/ Make sure musl-root is valid if specified\n if target.contains(\"musl\") && (target.contains(\"x86_64\") || target.contains(\"i686\")) {\n match build.config.musl_root {\n Some(ref root) => {\n if fs::metadata(root.join(\"lib\/libc.a\")).is_err() {\n panic!(\"couldn't find libc.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n if fs::metadata(root.join(\"lib\/libunwind.a\")).is_err() {\n panic!(\"couldn't find libunwind.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n }\n None => {\n panic!(\"when targeting MUSL the build.musl-root option \\\n must be specified in config.toml\")\n }\n }\n }\n\n if target.contains(\"msvc\") {\n \/\/ There are three builds of cmake on windows: MSVC, MinGW, and\n \/\/ Cygwin. The Cygwin build does not have generators for Visual\n \/\/ Studio, so detect that here and error.\n let out = output(Command::new(\"cmake\").arg(\"--help\"));\n if !out.contains(\"Visual Studio\") {\n panic!(\"\ncmake does not support Visual Studio generators.\n\nThis is likely due to it being an msys\/cygwin build of cmake,\nrather than the required windows version, built using MinGW\nor Visual Studio.\n\nIf you are building under msys2 try installing the mingw-w64-x86_64-cmake\npackage instead of cmake:\n\n$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake\n\");\n }\n }\n\n if target.contains(\"arm-linux-android\") {\n need_cmd(\"adb\".as_ref());\n }\n }\n\n for host in build.flags.host.iter() {\n if !build.config.host.contains(host) {\n panic!(\"specified host `{}` is not in the .\/configure list\", host);\n }\n }\n for target in build.flags.target.iter() {\n if !build.config.target.contains(target) {\n panic!(\"specified target `{}` is not in the .\/configure list\",\n target);\n }\n }\n\n let run = |cmd: &mut Command| {\n cmd.output().map(|output| {\n String::from_utf8_lossy(&output.stdout)\n .lines().next().unwrap()\n .to_string()\n })\n };\n build.gdb_version = run(Command::new(\"gdb\").arg(\"--version\")).ok();\n build.lldb_version = run(Command::new(\"lldb\").arg(\"--version\")).ok();\n if build.lldb_version.is_some() {\n build.lldb_python_dir = run(Command::new(\"lldb\").arg(\"-P\")).ok();\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Sanity checking performed by rustbuild before actually executing anything.\n\/\/!\n\/\/! This module contains the implementation of ensuring that the build\n\/\/! environment looks reasonable before progressing. This will verify that\n\/\/! various programs like git and python exist, along with ensuring that all C\n\/\/! compilers for cross-compiling are found.\n\/\/!\n\/\/! In theory if we get past this phase it's a bug if a build fails, but in\n\/\/! practice that's likely not true!\n\nuse std::collections::HashSet;\nuse std::env;\nuse std::ffi::{OsStr, OsString};\nuse std::fs;\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse Build;\n\npub fn check(build: &mut Build) {\n let mut checked = HashSet::new();\n let path = env::var_os(\"PATH\").unwrap_or(OsString::new());\n \/\/ On Windows, quotes are invalid characters for filename paths, and if\n \/\/ one is present as part of the PATH then that can lead to the system\n \/\/ being unable to identify the files properly. See\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/34959 for more details.\n if cfg!(windows) {\n if path.to_string_lossy().contains(\"\\\"\") {\n panic!(\"PATH contains invalid character '\\\"'\");\n }\n }\n let have_cmd = |cmd: &OsStr| {\n for path in env::split_paths(&path) {\n let target = path.join(cmd);\n let mut cmd_alt = cmd.to_os_string();\n cmd_alt.push(\".exe\");\n if target.is_file() ||\n target.with_extension(\"exe\").exists() ||\n target.join(cmd_alt).exists() {\n return Some(target);\n }\n }\n return None;\n };\n\n let mut need_cmd = |cmd: &OsStr| {\n if !checked.insert(cmd.to_owned()) {\n return\n }\n if have_cmd(cmd).is_none() {\n panic!(\"\\n\\ncouldn't find required command: {:?}\\n\\n\", cmd);\n }\n };\n\n \/\/ If we've got a git directory we're gona need git to update\n \/\/ submodules and learn about various other aspects.\n if build.src_is_git {\n need_cmd(\"git\".as_ref());\n }\n\n \/\/ We need cmake, but only if we're actually building LLVM\n for host in build.config.host.iter() {\n if let Some(config) = build.config.target_config.get(host) {\n if config.llvm_config.is_some() {\n continue\n }\n }\n need_cmd(\"cmake\".as_ref());\n if build.config.ninja {\n \/\/ Some Linux distros rename `ninja` to `ninja-build`.\n \/\/ CMake can work with either binary name.\n if have_cmd(\"ninja-build\".as_ref()).is_none() {\n need_cmd(\"ninja\".as_ref());\n }\n }\n break\n }\n\n if build.config.python.is_none() {\n build.config.python = have_cmd(\"python2.7\".as_ref());\n }\n if build.config.python.is_none() {\n build.config.python = have_cmd(\"python2\".as_ref());\n }\n if build.config.python.is_none() {\n need_cmd(\"python\".as_ref());\n build.config.python = Some(\"python\".into());\n }\n need_cmd(build.config.python.as_ref().unwrap().as_ref());\n\n\n if let Some(ref s) = build.config.nodejs {\n need_cmd(s.as_ref());\n } else {\n \/\/ Look for the nodejs command, needed for emscripten testing\n if let Some(node) = have_cmd(\"node\".as_ref()) {\n build.config.nodejs = Some(node);\n } else if let Some(node) = have_cmd(\"nodejs\".as_ref()) {\n build.config.nodejs = Some(node);\n }\n }\n\n if let Some(ref gdb) = build.config.gdb {\n need_cmd(gdb.as_ref());\n } else {\n build.config.gdb = have_cmd(\"gdb\".as_ref());\n }\n\n \/\/ We're gonna build some custom C code here and there, host triples\n \/\/ also build some C++ shims for LLVM so we need a C++ compiler.\n for target in build.config.target.iter() {\n \/\/ On emscripten we don't actually need the C compiler to just\n \/\/ build the target artifacts, only for testing. For the sake\n \/\/ of easier bot configuration, just skip detection.\n if target.contains(\"emscripten\") {\n continue;\n }\n\n need_cmd(build.cc(target).as_ref());\n if let Some(ar) = build.ar(target) {\n need_cmd(ar.as_ref());\n }\n }\n for host in build.config.host.iter() {\n need_cmd(build.cxx(host).as_ref());\n }\n\n \/\/ The msvc hosts don't use jemalloc, turn it off globally to\n \/\/ avoid packaging the dummy liballoc_jemalloc on that platform.\n for host in build.config.host.iter() {\n if host.contains(\"msvc\") {\n build.config.use_jemalloc = false;\n }\n }\n\n \/\/ Externally configured LLVM requires FileCheck to exist\n let filecheck = build.llvm_filecheck(&build.config.build);\n if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {\n panic!(\"FileCheck executable {:?} does not exist\", filecheck);\n }\n\n for target in build.config.target.iter() {\n \/\/ Can't compile for iOS unless we're on macOS\n if target.contains(\"apple-ios\") &&\n !build.config.build.contains(\"apple-darwin\") {\n panic!(\"the iOS target is only supported on macOS\");\n }\n\n \/\/ Make sure musl-root is valid if specified\n if target.contains(\"musl\") && !target.contains(\"mips\") {\n match build.musl_root(target) {\n Some(root) => {\n if fs::metadata(root.join(\"lib\/libc.a\")).is_err() {\n panic!(\"couldn't find libc.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n if fs::metadata(root.join(\"lib\/libunwind.a\")).is_err() {\n panic!(\"couldn't find libunwind.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n }\n None => {\n panic!(\"when targeting MUSL either the rust.musl-root \\\n option or the target.$TARGET.musl-root option must \\\n be specified in config.toml\")\n }\n }\n }\n\n if target.contains(\"msvc\") {\n \/\/ There are three builds of cmake on windows: MSVC, MinGW, and\n \/\/ Cygwin. The Cygwin build does not have generators for Visual\n \/\/ Studio, so detect that here and error.\n let out = output(Command::new(\"cmake\").arg(\"--help\"));\n if !out.contains(\"Visual Studio\") {\n panic!(\"\ncmake does not support Visual Studio generators.\n\nThis is likely due to it being an msys\/cygwin build of cmake,\nrather than the required windows version, built using MinGW\nor Visual Studio.\n\nIf you are building under msys2 try installing the mingw-w64-x86_64-cmake\npackage instead of cmake:\n\n$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake\n\");\n }\n }\n }\n\n for host in build.flags.host.iter() {\n if !build.config.host.contains(host) {\n panic!(\"specified host `{}` is not in the .\/configure list\", host);\n }\n }\n for target in build.flags.target.iter() {\n if !build.config.target.contains(target) {\n panic!(\"specified target `{}` is not in the .\/configure list\",\n target);\n }\n }\n\n let run = |cmd: &mut Command| {\n cmd.output().map(|output| {\n String::from_utf8_lossy(&output.stdout)\n .lines().next().unwrap()\n .to_string()\n })\n };\n build.lldb_version = run(Command::new(\"lldb\").arg(\"--version\")).ok();\n if build.lldb_version.is_some() {\n build.lldb_python_dir = run(Command::new(\"lldb\").arg(\"-P\")).ok();\n }\n\n if let Some(ref s) = build.config.ccache {\n need_cmd(s.as_ref());\n }\n}\nrustbuild: Sanity-check cmake for sanitizers too\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Sanity checking performed by rustbuild before actually executing anything.\n\/\/!\n\/\/! This module contains the implementation of ensuring that the build\n\/\/! environment looks reasonable before progressing. This will verify that\n\/\/! various programs like git and python exist, along with ensuring that all C\n\/\/! compilers for cross-compiling are found.\n\/\/!\n\/\/! In theory if we get past this phase it's a bug if a build fails, but in\n\/\/! practice that's likely not true!\n\nuse std::collections::HashSet;\nuse std::env;\nuse std::ffi::{OsStr, OsString};\nuse std::fs;\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse Build;\n\npub fn check(build: &mut Build) {\n let mut checked = HashSet::new();\n let path = env::var_os(\"PATH\").unwrap_or(OsString::new());\n \/\/ On Windows, quotes are invalid characters for filename paths, and if\n \/\/ one is present as part of the PATH then that can lead to the system\n \/\/ being unable to identify the files properly. See\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/34959 for more details.\n if cfg!(windows) {\n if path.to_string_lossy().contains(\"\\\"\") {\n panic!(\"PATH contains invalid character '\\\"'\");\n }\n }\n let have_cmd = |cmd: &OsStr| {\n for path in env::split_paths(&path) {\n let target = path.join(cmd);\n let mut cmd_alt = cmd.to_os_string();\n cmd_alt.push(\".exe\");\n if target.is_file() ||\n target.with_extension(\"exe\").exists() ||\n target.join(cmd_alt).exists() {\n return Some(target);\n }\n }\n return None;\n };\n\n let mut need_cmd = |cmd: &OsStr| {\n if !checked.insert(cmd.to_owned()) {\n return\n }\n if have_cmd(cmd).is_none() {\n panic!(\"\\n\\ncouldn't find required command: {:?}\\n\\n\", cmd);\n }\n };\n\n \/\/ If we've got a git directory we're gona need git to update\n \/\/ submodules and learn about various other aspects.\n if build.src_is_git {\n need_cmd(\"git\".as_ref());\n }\n\n \/\/ We need cmake, but only if we're actually building LLVM or sanitizers.\n let building_llvm = build.config.host.iter()\n .filter_map(|host| build.config.target_config.get(host))\n .any(|config| config.llvm_config.is_none());\n if building_llvm || build.config.sanitizers {\n need_cmd(\"cmake\".as_ref());\n }\n\n \/\/ Ninja is currently only used for LLVM itself.\n if building_llvm && build.config.ninja {\n \/\/ Some Linux distros rename `ninja` to `ninja-build`.\n \/\/ CMake can work with either binary name.\n if have_cmd(\"ninja-build\".as_ref()).is_none() {\n need_cmd(\"ninja\".as_ref());\n }\n }\n\n if build.config.python.is_none() {\n build.config.python = have_cmd(\"python2.7\".as_ref());\n }\n if build.config.python.is_none() {\n build.config.python = have_cmd(\"python2\".as_ref());\n }\n if build.config.python.is_none() {\n need_cmd(\"python\".as_ref());\n build.config.python = Some(\"python\".into());\n }\n need_cmd(build.config.python.as_ref().unwrap().as_ref());\n\n\n if let Some(ref s) = build.config.nodejs {\n need_cmd(s.as_ref());\n } else {\n \/\/ Look for the nodejs command, needed for emscripten testing\n if let Some(node) = have_cmd(\"node\".as_ref()) {\n build.config.nodejs = Some(node);\n } else if let Some(node) = have_cmd(\"nodejs\".as_ref()) {\n build.config.nodejs = Some(node);\n }\n }\n\n if let Some(ref gdb) = build.config.gdb {\n need_cmd(gdb.as_ref());\n } else {\n build.config.gdb = have_cmd(\"gdb\".as_ref());\n }\n\n \/\/ We're gonna build some custom C code here and there, host triples\n \/\/ also build some C++ shims for LLVM so we need a C++ compiler.\n for target in build.config.target.iter() {\n \/\/ On emscripten we don't actually need the C compiler to just\n \/\/ build the target artifacts, only for testing. For the sake\n \/\/ of easier bot configuration, just skip detection.\n if target.contains(\"emscripten\") {\n continue;\n }\n\n need_cmd(build.cc(target).as_ref());\n if let Some(ar) = build.ar(target) {\n need_cmd(ar.as_ref());\n }\n }\n for host in build.config.host.iter() {\n need_cmd(build.cxx(host).as_ref());\n }\n\n \/\/ The msvc hosts don't use jemalloc, turn it off globally to\n \/\/ avoid packaging the dummy liballoc_jemalloc on that platform.\n for host in build.config.host.iter() {\n if host.contains(\"msvc\") {\n build.config.use_jemalloc = false;\n }\n }\n\n \/\/ Externally configured LLVM requires FileCheck to exist\n let filecheck = build.llvm_filecheck(&build.config.build);\n if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {\n panic!(\"FileCheck executable {:?} does not exist\", filecheck);\n }\n\n for target in build.config.target.iter() {\n \/\/ Can't compile for iOS unless we're on macOS\n if target.contains(\"apple-ios\") &&\n !build.config.build.contains(\"apple-darwin\") {\n panic!(\"the iOS target is only supported on macOS\");\n }\n\n \/\/ Make sure musl-root is valid if specified\n if target.contains(\"musl\") && !target.contains(\"mips\") {\n match build.musl_root(target) {\n Some(root) => {\n if fs::metadata(root.join(\"lib\/libc.a\")).is_err() {\n panic!(\"couldn't find libc.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n if fs::metadata(root.join(\"lib\/libunwind.a\")).is_err() {\n panic!(\"couldn't find libunwind.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n }\n None => {\n panic!(\"when targeting MUSL either the rust.musl-root \\\n option or the target.$TARGET.musl-root option must \\\n be specified in config.toml\")\n }\n }\n }\n\n if target.contains(\"msvc\") {\n \/\/ There are three builds of cmake on windows: MSVC, MinGW, and\n \/\/ Cygwin. The Cygwin build does not have generators for Visual\n \/\/ Studio, so detect that here and error.\n let out = output(Command::new(\"cmake\").arg(\"--help\"));\n if !out.contains(\"Visual Studio\") {\n panic!(\"\ncmake does not support Visual Studio generators.\n\nThis is likely due to it being an msys\/cygwin build of cmake,\nrather than the required windows version, built using MinGW\nor Visual Studio.\n\nIf you are building under msys2 try installing the mingw-w64-x86_64-cmake\npackage instead of cmake:\n\n$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake\n\");\n }\n }\n }\n\n for host in build.flags.host.iter() {\n if !build.config.host.contains(host) {\n panic!(\"specified host `{}` is not in the .\/configure list\", host);\n }\n }\n for target in build.flags.target.iter() {\n if !build.config.target.contains(target) {\n panic!(\"specified target `{}` is not in the .\/configure list\",\n target);\n }\n }\n\n let run = |cmd: &mut Command| {\n cmd.output().map(|output| {\n String::from_utf8_lossy(&output.stdout)\n .lines().next().unwrap()\n .to_string()\n })\n };\n build.lldb_version = run(Command::new(\"lldb\").arg(\"--version\")).ok();\n if build.lldb_version.is_some() {\n build.lldb_python_dir = run(Command::new(\"lldb\").arg(\"-P\")).ok();\n }\n\n if let Some(ref s) = build.config.ccache {\n need_cmd(s.as_ref());\n }\n}\n<|endoftext|>"} {"text":"Add test case for wasm non-clash.\/\/ check-pass\n#![crate_type = \"lib\"]\n\n#[cfg(target_arch = \"wasm32\")]\nmod wasm_non_clash {\n mod a {\n #[link(wasm_import_module = \"a\")]\n extern \"C\" {\n pub fn foo();\n }\n }\n\n mod b {\n #[link(wasm_import_module = \"b\")]\n extern \"C\" {\n pub fn foo() -> usize;\n \/\/ #79581: These declarations shouldn't clash because foreign fn names are mangled\n \/\/ on wasm32.\n }\n }\n}\n<|endoftext|>"} {"text":"Add missing compositing\/mod.rs\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse compositing::resize_rate_limiter::ResizeRateLimiter;\nuse dom::event::Event;\nuse platform::{Application, Window};\n\nuse azure::azure_hl::{BackendType, B8G8R8A8, DataSourceSurface, DrawTarget, SourceSurfaceMethods};\nuse core::cell::Cell;\nuse core::comm::{Chan, SharedChan, Port};\nuse core::util;\nuse geom::matrix::identity;\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse gfx::compositor::{Compositor, LayerBuffer, LayerBufferSet};\nuse gfx::opts::Opts;\nuse layers;\nuse servo_util::time;\n\nmod resize_rate_limiter;\n\n\/\/\/ Type of the function that is called when the screen is to be redisplayed.\npub type CompositeCallback = @fn();\n\n\/\/\/ Type of the function that is called when the window is resized.\npub type ResizeCallback = @fn(uint, uint);\n\n\/\/\/ The implementation of the layers-based compositor.\n#[deriving(Clone)]\npub struct CompositorImpl {\n chan: SharedChan\n}\n\nimpl CompositorImpl {\n \/\/\/ Creates a new compositor instance.\n pub fn new(dom_event_chan: SharedChan, opts: Opts) -> CompositorImpl {\n let dom_event_chan = Cell(dom_event_chan);\n let chan: Chan = do on_osmain |port| {\n debug!(\"preparing to enter main loop\");\n mainloop(port, dom_event_chan.take(), &opts);\n };\n\n CompositorImpl {\n chan: SharedChan::new(chan)\n }\n }\n}\n\n\/\/\/ Messages to the compositor.\npub enum Msg {\n BeginDrawing(Chan),\n Draw(Chan, LayerBufferSet),\n AddKeyHandler(Chan<()>),\n Exit\n}\n\n\/\/\/ Azure surface wrapping to work with the layers infrastructure.\nstruct AzureDrawTargetImageData {\n draw_target: DrawTarget,\n data_source_surface: DataSourceSurface,\n size: Size2D\n}\n\nimpl layers::layers::ImageData for AzureDrawTargetImageData {\n fn size(&self) -> Size2D {\n self.size\n }\n fn stride(&self) -> uint {\n self.data_source_surface.stride() as uint\n }\n fn format(&self) -> layers::layers::Format {\n \/\/ FIXME: This is not always correct. We should query the Azure draw target for the format.\n layers::layers::ARGB32Format\n }\n fn with_data(&self, f: layers::layers::WithDataFn) { \n do self.data_source_surface.with_data |data| {\n f(data);\n }\n }\n}\n\nfn mainloop(po: Port, dom_event_chan: SharedChan, opts: &Opts) {\n let key_handlers: @mut ~[Chan<()>] = @mut ~[];\n\n let app = Application::new();\n let window = Window::new(&app);\n\n let surfaces = @mut SurfaceSet(opts.render_backend);\n\n let context = layers::rendergl::init_render_context();\n\n \/\/ Create an initial layer tree.\n \/\/\n \/\/ TODO: There should be no initial layer tree until the renderer creates one from the display\n \/\/ list. This is only here because we don't have that logic in the renderer yet.\n let root_layer = @mut layers::layers::ContainerLayer();\n let original_layer_transform;\n {\n let image_data = @layers::layers::BasicImageData::new(Size2D(0u, 0u),\n 0,\n layers::layers::RGB24Format,\n ~[]);\n let image = @mut layers::layers::Image::new(image_data as @layers::layers::ImageData);\n let image_layer = @mut layers::layers::ImageLayer(image);\n original_layer_transform = image_layer.common.transform;\n image_layer.common.set_transform(original_layer_transform.scale(800.0, 600.0, 1.0));\n root_layer.add_child(layers::layers::ImageLayerKind(image_layer));\n }\n\n\n let scene = @mut layers::scene::Scene(layers::layers::ContainerLayerKind(root_layer),\n Size2D(800.0, 600.0),\n identity());\n\n let done = @mut false;\n let resize_rate_limiter = @mut ResizeRateLimiter(dom_event_chan);\n let check_for_messages: @fn() = || {\n \/\/ Periodically check if content responded to our last resize event\n resize_rate_limiter.check_resize_response();\n\n \/\/ Handle messages\n while po.peek() {\n match po.recv() {\n AddKeyHandler(key_ch) => key_handlers.push(key_ch),\n BeginDrawing(sender) => lend_surface(surfaces, sender),\n Draw(sender, draw_target) => {\n debug!(\"osmain: received new frame\");\n return_surface(surfaces, draw_target);\n lend_surface(surfaces, sender);\n\n \/\/ Iterate over the children of the container layer.\n let mut current_layer_child = root_layer.first_child;\n\n \/\/ Replace the image layer data with the buffer data.\n let buffers = util::replace(&mut surfaces.front.layer_buffer_set.buffers, ~[]);\n for buffers.each |buffer| {\n let width = buffer.rect.size.width as uint;\n let height = buffer.rect.size.height as uint;\n\n debug!(\"osmain: compositing buffer rect %?\", &buffer.rect);\n\n let image_data = @AzureDrawTargetImageData {\n draw_target: buffer.draw_target.clone(),\n data_source_surface: buffer.draw_target.snapshot().get_data_surface(),\n size: Size2D(width, height)\n };\n let image = @mut layers::layers::Image::new(image_data as @layers::layers::ImageData);\n\n \/\/ Find or create an image layer.\n let image_layer;\n current_layer_child = match current_layer_child {\n None => {\n debug!(\"osmain: adding new image layer\");\n image_layer = @mut layers::layers::ImageLayer(image);\n root_layer.add_child(layers::layers::ImageLayerKind(image_layer));\n None\n }\n Some(layers::layers::ImageLayerKind(existing_image_layer)) => {\n image_layer = existing_image_layer;\n image_layer.set_image(image);\n\n \/\/ Move on to the next sibling.\n do current_layer_child.get().with_common |common| {\n common.next_sibling\n }\n }\n Some(_) => fail!(~\"found unexpected layer kind\"),\n };\n\n \/\/ Set the layer's transform.\n let x = buffer.rect.origin.x as f32;\n let y = buffer.rect.origin.y as f32;\n image_layer.common.set_transform(\n original_layer_transform.translate(x, y, 0.0)\n .scale(width as f32, height as f32, 1.0));\n }\n surfaces.front.layer_buffer_set.buffers = buffers;\n }\n Exit => {\n *done = true;\n }\n }\n }\n };\n\n do window.set_composite_callback {\n do time::time(~\"compositing\") {\n \/\/ Adjust the layer dimensions as necessary to correspond to the size of the window.\n scene.size = window.size();\n\n \/\/ Render the scene.\n layers::rendergl::render_scene(context, scene);\n }\n\n window.present();\n }\n\n do window.set_resize_callback |width, height| {\n debug!(\"osmain: window resized to %ux%u\", width, height);\n resize_rate_limiter.window_resized(width, height);\n }\n\n \/\/ Enter the main event loop.\n while !*done {\n \/\/ Check for new messages coming from the rendering task.\n check_for_messages();\n\n \/\/ Check for messages coming from the windowing system.\n window.check_loop();\n }\n}\n\n\/\/\/ Implementation of the abstract `Compositor` interface.\nimpl Compositor for CompositorImpl {\n fn begin_drawing(&self, next_dt: Chan) {\n self.chan.send(BeginDrawing(next_dt))\n }\n fn draw(&self, next_dt: Chan, draw_me: LayerBufferSet) {\n self.chan.send(Draw(next_dt, draw_me))\n }\n}\n\nstruct SurfaceSet {\n front: Surface,\n back: Surface,\n}\n\nfn lend_surface(surfaces: &mut SurfaceSet, receiver: Chan) {\n \/\/ We are in a position to lend out the surface?\n assert!(surfaces.front.have);\n \/\/ Ok then take it\n let old_layer_buffers = util::replace(&mut surfaces.front.layer_buffer_set.buffers, ~[]);\n let new_layer_buffers = do old_layer_buffers.map |layer_buffer| {\n let draw_target_ref = &layer_buffer.draw_target;\n let layer_buffer = LayerBuffer {\n draw_target: draw_target_ref.clone(),\n rect: copy layer_buffer.rect,\n stride: layer_buffer.stride\n };\n debug!(\"osmain: lending surface %?\", layer_buffer);\n layer_buffer\n };\n surfaces.front.layer_buffer_set.buffers = old_layer_buffers;\n\n let new_layer_buffer_set = LayerBufferSet { buffers: new_layer_buffers };\n receiver.send(new_layer_buffer_set);\n \/\/ Now we don't have it\n surfaces.front.have = false;\n \/\/ But we (hopefully) have another!\n surfaces.front <-> surfaces.back;\n \/\/ Let's look\n assert!(surfaces.front.have);\n}\n\nfn return_surface(surfaces: &mut SurfaceSet, layer_buffer_set: LayerBufferSet) {\n \/\/#debug(\"osmain: returning surface %?\", layer_buffer_set);\n \/\/ We have room for a return\n assert!(surfaces.front.have);\n assert!(!surfaces.back.have);\n\n surfaces.back.layer_buffer_set = layer_buffer_set;\n\n \/\/ Now we have it again\n surfaces.back.have = true;\n}\n\nfn SurfaceSet(backend: BackendType) -> SurfaceSet {\n SurfaceSet { front: Surface(backend), back: Surface(backend) }\n}\n\nstruct Surface {\n layer_buffer_set: LayerBufferSet,\n have: bool,\n}\n\nfn Surface(backend: BackendType) -> Surface {\n let layer_buffer = LayerBuffer {\n draw_target: DrawTarget::new(backend, Size2D(800i32, 600i32), B8G8R8A8),\n rect: Rect(Point2D(0u, 0u), Size2D(800u, 600u)),\n stride: 800 * 4\n };\n let layer_buffer_set = LayerBufferSet {\n buffers: ~[ layer_buffer ]\n };\n Surface {\n layer_buffer_set: layer_buffer_set,\n have: true\n }\n}\n\n\/\/\/ A function for spawning into the platform's main thread.\nfn on_osmain(f: ~fn(po: Port)) -> Chan {\n let (setup_po, setup_ch) = comm::stream();\n do task::task().sched_mode(task::PlatformThread).spawn {\n let (po, ch) = comm::stream();\n setup_ch.send(ch);\n f(po);\n }\n setup_po.recv()\n}\n\n<|endoftext|>"} {"text":"Added missing #[macro_export] directive<|endoftext|>"} {"text":"Implement NormalizedSurfaceMesh<|endoftext|>"} {"text":"\/\/ -*- rust -*-\n\nuse std;\nimport std.map;\nimport std.util;\n\nfn test_simple() {\n log \"*** starting test_simple\";\n\n fn eq(&uint x, &uint y) -> bool { ret x == y; }\n fn hash(&uint u) -> uint {\n \/\/ FIXME: can't use std.util.id since we'd be capturing a type param,\n \/\/ and presently we can't close items over type params.\n ret u;\n }\n\n let map.hashfn[uint] hasher = hash;\n let map.eqfn[uint] eqer = eq;\n let map.hashmap[uint, uint] hm = map.mk_hashmap[uint, uint](hasher, eqer);\n\n check (hm.insert(10u, 12u));\n check (hm.insert(11u, 13u));\n check (hm.insert(12u, 14u));\n\n check (hm.get(11u) == 13u);\n check (hm.get(12u) == 14u);\n check (hm.get(10u) == 12u);\n\n check (!hm.insert(12u, 14u));\n check (hm.get(12u) == 14u);\n\n check (!hm.insert(12u, 12u));\n check (hm.get(12u) == 12u);\n\n log \"*** finished test_simple\";\n}\n\n\/**\n * Force map growth and rehashing.\n *\/\nfn test_growth() {\n log \"*** starting test_growth\";\n\n let uint num_to_insert = 64u;\n\n fn eq(&uint x, &uint y) -> bool { ret x == y; }\n fn hash(&uint u) -> uint {\n \/\/ FIXME: can't use std.util.id since we'd be capturing a type param,\n \/\/ and presently we can't close items over type params.\n ret u;\n }\n\n let map.hashfn[uint] hasher = hash;\n let map.eqfn[uint] eqer = eq;\n let map.hashmap[uint, uint] hm = map.mk_hashmap[uint, uint](hasher, eqer);\n\n let uint i = 0u;\n while (i < num_to_insert) {\n check (hm.insert(i, i * i));\n log \"inserting \" + std._uint.to_str(i, 10u)\n + \" -> \" + std._uint.to_str(i * i, 10u);\n i += 1u;\n }\n\n log \"-----\";\n\n i = 0u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 1u;\n }\n\n check (hm.insert(num_to_insert, 17u));\n check (hm.get(num_to_insert) == 17u);\n\n log \"-----\";\n\n hm.rehash();\n\n i = 0u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 1u;\n }\n\n log \"*** finished test_growth\";\n}\n\nfn test_removal() {\n log \"*** starting test_removal\";\n\n let uint num_to_insert = 64u;\n\n fn eq(&uint x, &uint y) -> bool { ret x == y; }\n fn hash(&uint u) -> uint {\n \/\/ This hash function intentionally causes collisions between\n \/\/ consecutive integer pairs.\n ret (u \/ 2u) * 2u;\n }\n\n let map.hashfn[uint] hasher = hash;\n let map.eqfn[uint] eqer = eq;\n let map.hashmap[uint, uint] hm = map.mk_hashmap[uint, uint](hasher, eqer);\n\n let uint i = 0u;\n while (i < num_to_insert) {\n check (hm.insert(i, i * i));\n log \"inserting \" + std._uint.to_str(i, 10u)\n + \" -> \" + std._uint.to_str(i * i, 10u);\n i += 1u;\n }\n\n check (hm.size() == num_to_insert);\n\n log \"-----\";\n log \"removing evens\";\n\n i = 0u;\n while (i < num_to_insert) {\n \/**\n * FIXME (issue #150): we want to check the removed value as in the\n * following:\n\n let util.option[uint] v = hm.remove(i);\n alt (v) {\n case (util.some[uint](u)) {\n check (u == (i * i));\n }\n case (util.none[uint]()) { fail; }\n }\n\n * but we util.option is a tag type so util.some and util.none are\n * off limits until we parse the dwarf for tag types.\n *\/\n\n hm.remove(i);\n i += 2u;\n }\n\n check (hm.size() == (num_to_insert \/ 2u));\n\n log \"-----\";\n\n i = 1u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 2u;\n }\n\n log \"-----\";\n log \"rehashing\";\n\n hm.rehash();\n\n log \"-----\";\n\n i = 1u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 2u;\n }\n\n log \"-----\";\n\n i = 0u;\n while (i < num_to_insert) {\n check (hm.insert(i, i * i));\n log \"inserting \" + std._uint.to_str(i, 10u)\n + \" -> \" + std._uint.to_str(i * i, 10u);\n i += 2u;\n }\n\n check (hm.size() == num_to_insert);\n\n log \"-----\";\n\n i = 0u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 1u;\n }\n\n log \"-----\";\n log \"rehashing\";\n\n hm.rehash();\n\n log \"-----\";\n\n check (hm.size() == num_to_insert);\n\n i = 0u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 1u;\n }\n\n log \"*** finished test_removal\";\n}\n\nfn main() {\n test_simple();\n test_growth();\n test_removal();\n}\nPinching myself for certainty. ;p\/\/ -*- rust -*-\n\nuse std;\nimport std.map;\nimport std.util;\n\nfn test_simple() {\n log \"*** starting test_simple\";\n\n fn eq(&uint x, &uint y) -> bool { ret x == y; }\n fn hash(&uint u) -> uint {\n \/\/ FIXME: can't use std.util.id since we'd be capturing a type param,\n \/\/ and presently we can't close items over type params.\n ret u;\n }\n\n let map.hashfn[uint] hasher = hash;\n let map.eqfn[uint] eqer = eq;\n let map.hashmap[uint, uint] hm = map.mk_hashmap[uint, uint](hasher, eqer);\n\n check (hm.insert(10u, 12u));\n check (hm.insert(11u, 13u));\n check (hm.insert(12u, 14u));\n\n check (hm.get(11u) == 13u);\n check (hm.get(12u) == 14u);\n check (hm.get(10u) == 12u);\n\n check (!hm.insert(12u, 14u));\n check (hm.get(12u) == 14u);\n\n check (!hm.insert(12u, 12u));\n check (hm.get(12u) == 12u);\n\n log \"*** finished test_simple\";\n}\n\n\/**\n * Force map growth and rehashing.\n *\/\nfn test_growth() {\n log \"*** starting test_growth\";\n\n let uint num_to_insert = 64u;\n\n fn eq(&uint x, &uint y) -> bool { ret x == y; }\n fn hash(&uint u) -> uint {\n \/\/ FIXME: can't use std.util.id since we'd be capturing a type param,\n \/\/ and presently we can't close items over type params.\n ret u;\n }\n\n let map.hashfn[uint] hasher = hash;\n let map.eqfn[uint] eqer = eq;\n let map.hashmap[uint, uint] hm = map.mk_hashmap[uint, uint](hasher, eqer);\n\n let uint i = 0u;\n while (i < num_to_insert) {\n check (hm.insert(i, i * i));\n log \"inserting \" + std._uint.to_str(i, 10u)\n + \" -> \" + std._uint.to_str(i * i, 10u);\n i += 1u;\n }\n\n log \"-----\";\n\n i = 0u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 1u;\n }\n\n check (hm.insert(num_to_insert, 17u));\n check (hm.get(num_to_insert) == 17u);\n\n log \"-----\";\n\n hm.rehash();\n\n i = 0u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 1u;\n }\n\n log \"*** finished test_growth\";\n}\n\nfn test_removal() {\n log \"*** starting test_removal\";\n\n let uint num_to_insert = 64u;\n\n fn eq(&uint x, &uint y) -> bool { ret x == y; }\n fn hash(&uint u) -> uint {\n \/\/ This hash function intentionally causes collisions between\n \/\/ consecutive integer pairs.\n ret (u \/ 2u) * 2u;\n }\n\n check (hash(0u) == hash(1u));\n check (hash(2u) == hash(3u));\n check (hash(0u) != hash(2u));\n\n let map.hashfn[uint] hasher = hash;\n let map.eqfn[uint] eqer = eq;\n let map.hashmap[uint, uint] hm = map.mk_hashmap[uint, uint](hasher, eqer);\n\n let uint i = 0u;\n while (i < num_to_insert) {\n check (hm.insert(i, i * i));\n log \"inserting \" + std._uint.to_str(i, 10u)\n + \" -> \" + std._uint.to_str(i * i, 10u);\n i += 1u;\n }\n\n check (hm.size() == num_to_insert);\n\n log \"-----\";\n log \"removing evens\";\n\n i = 0u;\n while (i < num_to_insert) {\n \/**\n * FIXME (issue #150): we want to check the removed value as in the\n * following:\n\n let util.option[uint] v = hm.remove(i);\n alt (v) {\n case (util.some[uint](u)) {\n check (u == (i * i));\n }\n case (util.none[uint]()) { fail; }\n }\n\n * but we util.option is a tag type so util.some and util.none are\n * off limits until we parse the dwarf for tag types.\n *\/\n\n hm.remove(i);\n i += 2u;\n }\n\n check (hm.size() == (num_to_insert \/ 2u));\n\n log \"-----\";\n\n i = 1u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 2u;\n }\n\n log \"-----\";\n log \"rehashing\";\n\n hm.rehash();\n\n log \"-----\";\n\n i = 1u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 2u;\n }\n\n log \"-----\";\n\n i = 0u;\n while (i < num_to_insert) {\n check (hm.insert(i, i * i));\n log \"inserting \" + std._uint.to_str(i, 10u)\n + \" -> \" + std._uint.to_str(i * i, 10u);\n i += 2u;\n }\n\n check (hm.size() == num_to_insert);\n\n log \"-----\";\n\n i = 0u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 1u;\n }\n\n log \"-----\";\n log \"rehashing\";\n\n hm.rehash();\n\n log \"-----\";\n\n check (hm.size() == num_to_insert);\n\n i = 0u;\n while (i < num_to_insert) {\n log \"get(\" + std._uint.to_str(i, 10u) + \") = \"\n + std._uint.to_str(hm.get(i), 10u);\n check (hm.get(i) == i * i);\n i += 1u;\n }\n\n log \"*** finished test_removal\";\n}\n\nfn main() {\n test_simple();\n test_growth();\n test_removal();\n}\n<|endoftext|>"} {"text":"\/\/ xfail-stage0\n\nuse std;\nimport std::str;\n\nfn test_bytes_len() {\n assert (str::byte_len(\"\") == 0u);\n assert (str::byte_len(\"hello world\") == 11u);\n assert (str::byte_len(\"\\x63\") == 1u);\n assert (str::byte_len(\"\\xa2\") == 2u);\n assert (str::byte_len(\"\\u03c0\") == 2u);\n assert (str::byte_len(\"\\u2620\") == 3u);\n assert (str::byte_len(\"\\U0001d11e\") == 4u);\n}\n\nfn test_index_and_rindex() {\n assert (str::index(\"hello\", 'e' as u8) == 1);\n assert (str::index(\"hello\", 'o' as u8) == 4);\n assert (str::index(\"hello\", 'z' as u8) == -1);\n assert (str::rindex(\"hello\", 'l' as u8) == 3);\n assert (str::rindex(\"hello\", 'h' as u8) == 0);\n assert (str::rindex(\"hello\", 'z' as u8) == -1);\n}\n\nfn test_split() {\n fn t(&str s, char c, int i, &str k) {\n log \"splitting: \" + s;\n log i;\n auto v = str::split(s, c as u8);\n log \"split to: \";\n for (str z in v) {\n log z;\n }\n log \"comparing: \" + v.(i) + \" vs. \" + k;\n assert (str::eq(v.(i), k));\n }\n t(\"abc.hello.there\", '.', 0, \"abc\");\n t(\"abc.hello.there\", '.', 1, \"hello\");\n t(\"abc.hello.there\", '.', 2, \"there\");\n t(\".hello.there\", '.', 0, \"\");\n t(\".hello.there\", '.', 1, \"hello\");\n t(\"...hello.there.\", '.', 3, \"hello\");\n t(\"...hello.there.\", '.', 5, \"\");\n}\n\nfn test_find() {\n fn t(&str haystack, &str needle, int i) {\n let int j = str::find(haystack,needle);\n log \"searched for \" + needle;\n log j;\n assert (i == j);\n }\n t(\"this is a simple\", \"is a\", 5);\n t(\"this is a simple\", \"is z\", -1);\n t(\"this is a simple\", \"\", 0);\n t(\"this is a simple\", \"simple\", 10);\n t(\"this\", \"simple\", -1);\n}\n\nfn test_substr() {\n fn t(&str a, &str b, int start) {\n assert (str::eq(str::substr(a, start as uint,\n str::byte_len(b)), b));\n }\n\n t(\"hello\", \"llo\", 2);\n t(\"hello\", \"el\", 1);\n t(\"substr should not be a challenge\", \"not\", 14);\n}\n\nfn test_concat() {\n fn t(&vec[str] v, &str s) {\n assert (str::eq(str::concat(v), s));\n }\n\n t([\"you\", \"know\", \"I'm\", \"no\", \"good\"], \"youknowI'mnogood\");\n let vec[str] v = [];\n t(v, \"\");\n t([\"hi\"], \"hi\");\n}\n\nfn test_connect() {\n fn t(&vec[str] v, &str sep, &str s) {\n assert (str::eq(str::connect(v, sep), s));\n }\n\n t([\"you\", \"know\", \"I'm\", \"no\", \"good\"], \" \", \"you know I'm no good\");\n let vec[str] v = [];\n t(v, \" \", \"\");\n t([\"hi\"], \" \", \"hi\");\n}\n\nfn test_to_upper() {\n \/\/ to_upper doesn't understand unicode yet,\n \/\/ but we need to at least preserve it\n auto unicode = \"\\u65e5\\u672c\";\n auto input = \"abcDEF\" + unicode + \"xyz:.;\";\n auto expected = \"ABCDEF\" + unicode + \"XYZ:.;\";\n auto actual = str::to_upper(input);\n assert (str::eq(expected, actual));\n}\n\nfn test_slice() {\n assert (str::eq(\"ab\", str::slice(\"abc\", 0u, 2u)));\n assert (str::eq(\"bc\", str::slice(\"abc\", 1u, 3u)));\n assert (str::eq(\"\", str::slice(\"abc\", 1u, 1u)));\n\n fn a_million_letter_a() -> str {\n auto i = 0;\n auto res = \"\";\n while (i < 100000) {\n res += \"aaaaaaaaaa\";\n i += 1;\n }\n ret res;\n }\n\n fn half_a_million_letter_a() -> str {\n auto i = 0;\n auto res = \"\";\n while (i < 100000) {\n res += \"aaaaa\";\n i += 1;\n }\n ret res;\n }\n\n assert (str::eq(half_a_million_letter_a(),\n str::slice(a_million_letter_a(),\n 0u,\n 500000u)));\n}\n\nfn main() {\n test_bytes_len();\n test_index_and_rindex();\n test_split();\n test_find();\n test_substr();\n test_concat();\n test_connect();\n test_to_upper();\n test_slice();\n}\nstdlib: Add regression tests for std::str\/\/ xfail-stage0\n\nuse std;\nimport std::str;\n\nfn test_bytes_len() {\n assert (str::byte_len(\"\") == 0u);\n assert (str::byte_len(\"hello world\") == 11u);\n assert (str::byte_len(\"\\x63\") == 1u);\n assert (str::byte_len(\"\\xa2\") == 2u);\n assert (str::byte_len(\"\\u03c0\") == 2u);\n assert (str::byte_len(\"\\u2620\") == 3u);\n assert (str::byte_len(\"\\U0001d11e\") == 4u);\n}\n\nfn test_index_and_rindex() {\n assert (str::index(\"hello\", 'e' as u8) == 1);\n assert (str::index(\"hello\", 'o' as u8) == 4);\n assert (str::index(\"hello\", 'z' as u8) == -1);\n assert (str::rindex(\"hello\", 'l' as u8) == 3);\n assert (str::rindex(\"hello\", 'h' as u8) == 0);\n assert (str::rindex(\"hello\", 'z' as u8) == -1);\n}\n\nfn test_split() {\n fn t(&str s, char c, int i, &str k) {\n log \"splitting: \" + s;\n log i;\n auto v = str::split(s, c as u8);\n log \"split to: \";\n for (str z in v) {\n log z;\n }\n log \"comparing: \" + v.(i) + \" vs. \" + k;\n assert (str::eq(v.(i), k));\n }\n t(\"abc.hello.there\", '.', 0, \"abc\");\n t(\"abc.hello.there\", '.', 1, \"hello\");\n t(\"abc.hello.there\", '.', 2, \"there\");\n t(\".hello.there\", '.', 0, \"\");\n t(\".hello.there\", '.', 1, \"hello\");\n t(\"...hello.there.\", '.', 3, \"hello\");\n t(\"...hello.there.\", '.', 5, \"\");\n}\n\nfn test_find() {\n fn t(&str haystack, &str needle, int i) {\n let int j = str::find(haystack,needle);\n log \"searched for \" + needle;\n log j;\n assert (i == j);\n }\n t(\"this is a simple\", \"is a\", 5);\n t(\"this is a simple\", \"is z\", -1);\n t(\"this is a simple\", \"\", 0);\n t(\"this is a simple\", \"simple\", 10);\n t(\"this\", \"simple\", -1);\n}\n\nfn test_substr() {\n fn t(&str a, &str b, int start) {\n assert (str::eq(str::substr(a, start as uint,\n str::byte_len(b)), b));\n }\n\n t(\"hello\", \"llo\", 2);\n t(\"hello\", \"el\", 1);\n t(\"substr should not be a challenge\", \"not\", 14);\n}\n\nfn test_concat() {\n fn t(&vec[str] v, &str s) {\n assert (str::eq(str::concat(v), s));\n }\n\n t([\"you\", \"know\", \"I'm\", \"no\", \"good\"], \"youknowI'mnogood\");\n let vec[str] v = [];\n t(v, \"\");\n t([\"hi\"], \"hi\");\n}\n\nfn test_connect() {\n fn t(&vec[str] v, &str sep, &str s) {\n assert (str::eq(str::connect(v, sep), s));\n }\n\n t([\"you\", \"know\", \"I'm\", \"no\", \"good\"], \" \", \"you know I'm no good\");\n let vec[str] v = [];\n t(v, \" \", \"\");\n t([\"hi\"], \" \", \"hi\");\n}\n\nfn test_to_upper() {\n \/\/ to_upper doesn't understand unicode yet,\n \/\/ but we need to at least preserve it\n auto unicode = \"\\u65e5\\u672c\";\n auto input = \"abcDEF\" + unicode + \"xyz:.;\";\n auto expected = \"ABCDEF\" + unicode + \"XYZ:.;\";\n auto actual = str::to_upper(input);\n assert (str::eq(expected, actual));\n}\n\nfn test_slice() {\n assert (str::eq(\"ab\", str::slice(\"abc\", 0u, 2u)));\n assert (str::eq(\"bc\", str::slice(\"abc\", 1u, 3u)));\n assert (str::eq(\"\", str::slice(\"abc\", 1u, 1u)));\n\n fn a_million_letter_a() -> str {\n auto i = 0;\n auto res = \"\";\n while (i < 100000) {\n res += \"aaaaaaaaaa\";\n i += 1;\n }\n ret res;\n }\n\n fn half_a_million_letter_a() -> str {\n auto i = 0;\n auto res = \"\";\n while (i < 100000) {\n res += \"aaaaa\";\n i += 1;\n }\n ret res;\n }\n\n assert (str::eq(half_a_million_letter_a(),\n str::slice(a_million_letter_a(),\n 0u,\n 500000u)));\n}\n\nfn test_ends_with() {\n assert (str::ends_with(\"\", \"\"));\n assert (str::ends_with(\"abc\", \"\"));\n assert (str::ends_with(\"abc\", \"c\"));\n assert (!str::ends_with(\"a\", \"abc\"));\n assert (!str::ends_with(\"\", \"abc\"));\n}\n\nfn main() {\n test_bytes_len();\n test_index_and_rindex();\n test_split();\n test_find();\n test_substr();\n test_concat();\n test_connect();\n test_to_upper();\n test_slice();\n test_ends_with();\n}\n<|endoftext|>"} {"text":"enums can be used as functions<|endoftext|>"} {"text":"\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)]\n\nuse core::prelude::*;\n\nuse cmp;\nuse ffi::CString;\nuse io;\nuse libc::consts::os::posix01::PTHREAD_STACK_MIN;\nuse libc;\nuse mem;\nuse ptr;\nuse sys::os;\nuse thunk::Thunk;\nuse time::Duration;\n\nuse sys_common::stack::RED_ZONE;\nuse sys_common::thread::*;\n\npub type rust_thread = libc::pthread_t;\n\n#[cfg(all(not(target_os = \"linux\"),\n not(target_os = \"macos\"),\n not(target_os = \"bitrig\"),\n not(target_os = \"openbsd\")))]\npub mod guard {\n pub unsafe fn current() -> usize { 0 }\n pub unsafe fn main() -> usize { 0 }\n pub unsafe fn init() {}\n}\n\n\n#[cfg(any(target_os = \"linux\",\n target_os = \"macos\",\n target_os = \"bitrig\",\n target_os = \"openbsd\"))]\n#[allow(unused_imports)]\npub mod guard {\n use libc::{self, pthread_t};\n use libc::funcs::posix88::mman::mmap;\n use libc::consts::os::posix88::{PROT_NONE,\n MAP_PRIVATE,\n MAP_ANON,\n MAP_FAILED,\n MAP_FIXED};\n use mem;\n use ptr;\n use super::{pthread_self, pthread_attr_destroy};\n use sys::os;\n\n \/\/ These are initialized in init() and only read from after\n static mut GUARD_PAGE: usize = 0;\n\n #[cfg(any(target_os = \"macos\",\n target_os = \"bitrig\",\n target_os = \"openbsd\"))]\n unsafe fn get_stack_start() -> *mut libc::c_void {\n current() as *mut libc::c_void\n }\n\n #[cfg(any(target_os = \"linux\", target_os = \"android\"))]\n unsafe fn get_stack_start() -> *mut libc::c_void {\n let mut attr: libc::pthread_attr_t = mem::zeroed();\n assert_eq!(pthread_getattr_np(pthread_self(), &mut attr), 0);\n let mut stackaddr = ptr::null_mut();\n let mut stacksize = 0;\n assert_eq!(pthread_attr_getstack(&attr, &mut stackaddr, &mut stacksize), 0);\n assert_eq!(pthread_attr_destroy(&mut attr), 0);\n stackaddr\n }\n\n pub unsafe fn init() {\n let psize = os::page_size();\n let mut stackaddr = get_stack_start();\n\n \/\/ Ensure stackaddr is page aligned! A parent process might\n \/\/ have reset RLIMIT_STACK to be non-page aligned. The\n \/\/ pthread_attr_getstack() reports the usable stack area\n \/\/ stackaddr < stackaddr + stacksize, so if stackaddr is not\n \/\/ page-aligned, calculate the fix such that stackaddr <\n \/\/ new_page_aligned_stackaddr < stackaddr + stacksize\n let remainder = (stackaddr as usize) % psize;\n if remainder != 0 {\n stackaddr = ((stackaddr as usize) + psize - remainder)\n as *mut libc::c_void;\n }\n\n \/\/ Rellocate the last page of the stack.\n \/\/ This ensures SIGBUS will be raised on\n \/\/ stack overflow.\n let result = mmap(stackaddr,\n psize as libc::size_t,\n PROT_NONE,\n MAP_PRIVATE | MAP_ANON | MAP_FIXED,\n -1,\n 0);\n\n if result != stackaddr || result == MAP_FAILED {\n panic!(\"failed to allocate a guard page\");\n }\n\n let offset = if cfg!(target_os = \"linux\") {2} else {1};\n\n GUARD_PAGE = stackaddr as usize + offset * psize;\n }\n\n pub unsafe fn main() -> usize {\n GUARD_PAGE\n }\n\n #[cfg(target_os = \"macos\")]\n pub unsafe fn current() -> usize {\n extern {\n fn pthread_get_stackaddr_np(thread: pthread_t) -> *mut libc::c_void;\n fn pthread_get_stacksize_np(thread: pthread_t) -> libc::size_t;\n }\n (pthread_get_stackaddr_np(pthread_self()) as libc::size_t -\n pthread_get_stacksize_np(pthread_self())) as usize\n }\n\n #[cfg(any(target_os = \"openbsd\", target_os = \"bitrig\"))]\n pub unsafe fn current() -> usize {\n #[repr(C)]\n struct stack_t {\n ss_sp: *mut libc::c_void,\n ss_size: libc::size_t,\n ss_flags: libc::c_int,\n }\n extern {\n fn pthread_main_np() -> libc::c_uint;\n fn pthread_stackseg_np(thread: pthread_t,\n sinfo: *mut stack_t) -> libc::c_uint;\n }\n\n let mut current_stack: stack_t = mem::zeroed();\n assert_eq!(pthread_stackseg_np(pthread_self(), &mut current_stack), 0);\n\n let extra = if cfg!(target_os = \"bitrig\") {3} else {1} * os::page_size();\n if pthread_main_np() == 1 {\n \/\/ main thread\n current_stack.ss_sp as usize - current_stack.ss_size as usize + extra\n } else {\n \/\/ new thread\n current_stack.ss_sp as usize - current_stack.ss_size as usize\n }\n }\n\n #[cfg(any(target_os = \"linux\", target_os = \"android\"))]\n pub unsafe fn current() -> usize {\n let mut attr: libc::pthread_attr_t = mem::zeroed();\n assert_eq!(pthread_getattr_np(pthread_self(), &mut attr), 0);\n let mut guardsize = 0;\n assert_eq!(pthread_attr_getguardsize(&attr, &mut guardsize), 0);\n if guardsize == 0 {\n panic!(\"there is no guard page\");\n }\n let mut stackaddr = ptr::null_mut();\n let mut size = 0;\n assert_eq!(pthread_attr_getstack(&attr, &mut stackaddr, &mut size), 0);\n assert_eq!(pthread_attr_destroy(&mut attr), 0);\n\n stackaddr as usize + guardsize as usize\n }\n\n #[cfg(any(target_os = \"linux\", target_os = \"android\"))]\n extern {\n fn pthread_getattr_np(native: libc::pthread_t,\n attr: *mut libc::pthread_attr_t) -> libc::c_int;\n fn pthread_attr_getguardsize(attr: *const libc::pthread_attr_t,\n guardsize: *mut libc::size_t) -> libc::c_int;\n fn pthread_attr_getstack(attr: *const libc::pthread_attr_t,\n stackaddr: *mut *mut libc::c_void,\n stacksize: *mut libc::size_t) -> libc::c_int;\n }\n}\n\npub unsafe fn create(stack: usize, p: Thunk) -> io::Result {\n let p = box p;\n let mut native: libc::pthread_t = mem::zeroed();\n let mut attr: libc::pthread_attr_t = mem::zeroed();\n assert_eq!(pthread_attr_init(&mut attr), 0);\n\n \/\/ Reserve room for the red zone, the runtime's stack of last resort.\n let stack_size = cmp::max(stack, RED_ZONE + min_stack_size(&attr) as usize);\n match pthread_attr_setstacksize(&mut attr, stack_size as libc::size_t) {\n 0 => {}\n n => {\n assert_eq!(n, libc::EINVAL);\n \/\/ EINVAL means |stack_size| is either too small or not a\n \/\/ multiple of the system page size. Because it's definitely\n \/\/ >= PTHREAD_STACK_MIN, it must be an alignment issue.\n \/\/ Round up to the nearest page and try again.\n let page_size = os::page_size();\n let stack_size = (stack_size + page_size - 1) &\n (-(page_size as isize - 1) as usize - 1);\n assert_eq!(pthread_attr_setstacksize(&mut attr,\n stack_size as libc::size_t), 0);\n }\n };\n\n let ret = pthread_create(&mut native, &attr, thread_start,\n &*p as *const _ as *mut _);\n assert_eq!(pthread_attr_destroy(&mut attr), 0);\n\n return if ret != 0 {\n Err(io::Error::from_os_error(ret))\n } else {\n mem::forget(p); \/\/ ownership passed to pthread_create\n Ok(native)\n };\n\n #[no_stack_check]\n extern fn thread_start(main: *mut libc::c_void) -> *mut libc::c_void {\n start_thread(main);\n 0 as *mut _\n }\n}\n\n#[cfg(any(target_os = \"linux\", target_os = \"android\"))]\npub unsafe fn set_name(name: &str) {\n \/\/ pthread_setname_np() since glibc 2.12\n \/\/ availability autodetected via weak linkage\n type F = unsafe extern fn(libc::pthread_t, *const libc::c_char)\n -> libc::c_int;\n extern {\n #[linkage = \"extern_weak\"]\n static pthread_setname_np: *const ();\n }\n if !pthread_setname_np.is_null() {\n let cname = CString::new(name).unwrap();\n mem::transmute::<*const (), F>(pthread_setname_np)(pthread_self(),\n cname.as_ptr());\n }\n}\n\n#[cfg(any(target_os = \"freebsd\",\n target_os = \"dragonfly\",\n target_os = \"bitrig\",\n target_os = \"openbsd\"))]\npub unsafe fn set_name(name: &str) {\n extern {\n fn pthread_set_name_np(tid: libc::pthread_t, name: *const libc::c_char);\n }\n let cname = CString::new(name).unwrap();\n pthread_set_name_np(pthread_self(), cname.as_ptr());\n}\n\n#[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\npub unsafe fn set_name(name: &str) {\n extern {\n fn pthread_setname_np(name: *const libc::c_char) -> libc::c_int;\n }\n let cname = CString::new(name).unwrap();\n pthread_setname_np(cname.as_ptr());\n}\n\npub unsafe fn join(native: rust_thread) {\n assert_eq!(pthread_join(native, ptr::null_mut()), 0);\n}\n\npub unsafe fn detach(native: rust_thread) {\n assert_eq!(pthread_detach(native), 0);\n}\n\npub unsafe fn yield_now() {\n assert_eq!(sched_yield(), 0);\n}\n\npub fn sleep(dur: Duration) {\n unsafe {\n if dur < Duration::zero() {\n return yield_now()\n }\n let seconds = dur.num_seconds();\n let ns = dur - Duration::seconds(seconds);\n let mut ts = libc::timespec {\n tv_sec: seconds as libc::time_t,\n tv_nsec: ns.num_nanoseconds().unwrap() as libc::c_long,\n };\n \/\/ If we're awoken with a signal then the return value will be -1 and\n \/\/ nanosleep will fill in `ts` with the remaining time.\n while dosleep(&mut ts) == -1 {\n assert_eq!(os::errno(), libc::EINTR);\n }\n }\n\n #[cfg(target_os = \"linux\")]\n unsafe fn dosleep(ts: *mut libc::timespec) -> libc::c_int {\n extern {\n fn clock_nanosleep(clock_id: libc::c_int, flags: libc::c_int,\n request: *const libc::timespec,\n remain: *mut libc::timespec) -> libc::c_int;\n }\n clock_nanosleep(libc::CLOCK_MONOTONIC, 0, ts, ts)\n }\n #[cfg(not(target_os = \"linux\"))]\n unsafe fn dosleep(ts: *mut libc::timespec) -> libc::c_int {\n libc::nanosleep(ts, ts)\n }\n}\n\n\/\/ glibc >= 2.15 has a __pthread_get_minstack() function that returns\n\/\/ PTHREAD_STACK_MIN plus however many bytes are needed for thread-local\n\/\/ storage. We need that information to avoid blowing up when a small stack\n\/\/ is created in an application with big thread-local storage requirements.\n\/\/ See #6233 for rationale and details.\n\/\/\n\/\/ Link weakly to the symbol for compatibility with older versions of glibc.\n\/\/ Assumes that we've been dynamically linked to libpthread but that is\n\/\/ currently always the case. Note that you need to check that the symbol\n\/\/ is non-null before calling it!\n#[cfg(target_os = \"linux\")]\nfn min_stack_size(attr: *const libc::pthread_attr_t) -> libc::size_t {\n type F = unsafe extern \"C\" fn(*const libc::pthread_attr_t) -> libc::size_t;\n extern {\n #[linkage = \"extern_weak\"]\n static __pthread_get_minstack: *const ();\n }\n if __pthread_get_minstack.is_null() {\n PTHREAD_STACK_MIN\n } else {\n unsafe { mem::transmute::<*const (), F>(__pthread_get_minstack)(attr) }\n }\n}\n\n\/\/ __pthread_get_minstack() is marked as weak but extern_weak linkage is\n\/\/ not supported on OS X, hence this kludge...\n#[cfg(not(target_os = \"linux\"))]\nfn min_stack_size(_: *const libc::pthread_attr_t) -> libc::size_t {\n PTHREAD_STACK_MIN\n}\n\nextern {\n fn pthread_self() -> libc::pthread_t;\n fn pthread_create(native: *mut libc::pthread_t,\n attr: *const libc::pthread_attr_t,\n f: extern fn(*mut libc::c_void) -> *mut libc::c_void,\n value: *mut libc::c_void) -> libc::c_int;\n fn pthread_join(native: libc::pthread_t,\n value: *mut *mut libc::c_void) -> libc::c_int;\n fn pthread_attr_init(attr: *mut libc::pthread_attr_t) -> libc::c_int;\n fn pthread_attr_destroy(attr: *mut libc::pthread_attr_t) -> libc::c_int;\n fn pthread_attr_setstacksize(attr: *mut libc::pthread_attr_t,\n stack_size: libc::size_t) -> libc::c_int;\n fn pthread_attr_setdetachstate(attr: *mut libc::pthread_attr_t,\n state: libc::c_int) -> libc::c_int;\n fn pthread_detach(thread: libc::pthread_t) -> libc::c_int;\n fn sched_yield() -> libc::c_int;\n}\nrollup merge of #23631: andersk\/minstack-dlsym\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)]\n\nuse core::prelude::*;\n\nuse cmp;\nuse dynamic_lib::DynamicLibrary;\nuse ffi::CString;\nuse io;\nuse libc::consts::os::posix01::PTHREAD_STACK_MIN;\nuse libc;\nuse mem;\nuse ptr;\nuse sync::{Once, ONCE_INIT};\nuse sys::os;\nuse thunk::Thunk;\nuse time::Duration;\n\nuse sys_common::stack::RED_ZONE;\nuse sys_common::thread::*;\n\npub type rust_thread = libc::pthread_t;\n\n#[cfg(all(not(target_os = \"linux\"),\n not(target_os = \"macos\"),\n not(target_os = \"bitrig\"),\n not(target_os = \"openbsd\")))]\npub mod guard {\n pub unsafe fn current() -> usize { 0 }\n pub unsafe fn main() -> usize { 0 }\n pub unsafe fn init() {}\n}\n\n\n#[cfg(any(target_os = \"linux\",\n target_os = \"macos\",\n target_os = \"bitrig\",\n target_os = \"openbsd\"))]\n#[allow(unused_imports)]\npub mod guard {\n use libc::{self, pthread_t};\n use libc::funcs::posix88::mman::mmap;\n use libc::consts::os::posix88::{PROT_NONE,\n MAP_PRIVATE,\n MAP_ANON,\n MAP_FAILED,\n MAP_FIXED};\n use mem;\n use ptr;\n use super::{pthread_self, pthread_attr_destroy};\n use sys::os;\n\n \/\/ These are initialized in init() and only read from after\n static mut GUARD_PAGE: usize = 0;\n\n #[cfg(any(target_os = \"macos\",\n target_os = \"bitrig\",\n target_os = \"openbsd\"))]\n unsafe fn get_stack_start() -> *mut libc::c_void {\n current() as *mut libc::c_void\n }\n\n #[cfg(any(target_os = \"linux\", target_os = \"android\"))]\n unsafe fn get_stack_start() -> *mut libc::c_void {\n let mut attr: libc::pthread_attr_t = mem::zeroed();\n assert_eq!(pthread_getattr_np(pthread_self(), &mut attr), 0);\n let mut stackaddr = ptr::null_mut();\n let mut stacksize = 0;\n assert_eq!(pthread_attr_getstack(&attr, &mut stackaddr, &mut stacksize), 0);\n assert_eq!(pthread_attr_destroy(&mut attr), 0);\n stackaddr\n }\n\n pub unsafe fn init() {\n let psize = os::page_size();\n let mut stackaddr = get_stack_start();\n\n \/\/ Ensure stackaddr is page aligned! A parent process might\n \/\/ have reset RLIMIT_STACK to be non-page aligned. The\n \/\/ pthread_attr_getstack() reports the usable stack area\n \/\/ stackaddr < stackaddr + stacksize, so if stackaddr is not\n \/\/ page-aligned, calculate the fix such that stackaddr <\n \/\/ new_page_aligned_stackaddr < stackaddr + stacksize\n let remainder = (stackaddr as usize) % psize;\n if remainder != 0 {\n stackaddr = ((stackaddr as usize) + psize - remainder)\n as *mut libc::c_void;\n }\n\n \/\/ Rellocate the last page of the stack.\n \/\/ This ensures SIGBUS will be raised on\n \/\/ stack overflow.\n let result = mmap(stackaddr,\n psize as libc::size_t,\n PROT_NONE,\n MAP_PRIVATE | MAP_ANON | MAP_FIXED,\n -1,\n 0);\n\n if result != stackaddr || result == MAP_FAILED {\n panic!(\"failed to allocate a guard page\");\n }\n\n let offset = if cfg!(target_os = \"linux\") {2} else {1};\n\n GUARD_PAGE = stackaddr as usize + offset * psize;\n }\n\n pub unsafe fn main() -> usize {\n GUARD_PAGE\n }\n\n #[cfg(target_os = \"macos\")]\n pub unsafe fn current() -> usize {\n extern {\n fn pthread_get_stackaddr_np(thread: pthread_t) -> *mut libc::c_void;\n fn pthread_get_stacksize_np(thread: pthread_t) -> libc::size_t;\n }\n (pthread_get_stackaddr_np(pthread_self()) as libc::size_t -\n pthread_get_stacksize_np(pthread_self())) as usize\n }\n\n #[cfg(any(target_os = \"openbsd\", target_os = \"bitrig\"))]\n pub unsafe fn current() -> usize {\n #[repr(C)]\n struct stack_t {\n ss_sp: *mut libc::c_void,\n ss_size: libc::size_t,\n ss_flags: libc::c_int,\n }\n extern {\n fn pthread_main_np() -> libc::c_uint;\n fn pthread_stackseg_np(thread: pthread_t,\n sinfo: *mut stack_t) -> libc::c_uint;\n }\n\n let mut current_stack: stack_t = mem::zeroed();\n assert_eq!(pthread_stackseg_np(pthread_self(), &mut current_stack), 0);\n\n let extra = if cfg!(target_os = \"bitrig\") {3} else {1} * os::page_size();\n if pthread_main_np() == 1 {\n \/\/ main thread\n current_stack.ss_sp as usize - current_stack.ss_size as usize + extra\n } else {\n \/\/ new thread\n current_stack.ss_sp as usize - current_stack.ss_size as usize\n }\n }\n\n #[cfg(any(target_os = \"linux\", target_os = \"android\"))]\n pub unsafe fn current() -> usize {\n let mut attr: libc::pthread_attr_t = mem::zeroed();\n assert_eq!(pthread_getattr_np(pthread_self(), &mut attr), 0);\n let mut guardsize = 0;\n assert_eq!(pthread_attr_getguardsize(&attr, &mut guardsize), 0);\n if guardsize == 0 {\n panic!(\"there is no guard page\");\n }\n let mut stackaddr = ptr::null_mut();\n let mut size = 0;\n assert_eq!(pthread_attr_getstack(&attr, &mut stackaddr, &mut size), 0);\n assert_eq!(pthread_attr_destroy(&mut attr), 0);\n\n stackaddr as usize + guardsize as usize\n }\n\n #[cfg(any(target_os = \"linux\", target_os = \"android\"))]\n extern {\n fn pthread_getattr_np(native: libc::pthread_t,\n attr: *mut libc::pthread_attr_t) -> libc::c_int;\n fn pthread_attr_getguardsize(attr: *const libc::pthread_attr_t,\n guardsize: *mut libc::size_t) -> libc::c_int;\n fn pthread_attr_getstack(attr: *const libc::pthread_attr_t,\n stackaddr: *mut *mut libc::c_void,\n stacksize: *mut libc::size_t) -> libc::c_int;\n }\n}\n\npub unsafe fn create(stack: usize, p: Thunk) -> io::Result {\n let p = box p;\n let mut native: libc::pthread_t = mem::zeroed();\n let mut attr: libc::pthread_attr_t = mem::zeroed();\n assert_eq!(pthread_attr_init(&mut attr), 0);\n\n \/\/ Reserve room for the red zone, the runtime's stack of last resort.\n let stack_size = cmp::max(stack, RED_ZONE + min_stack_size(&attr) as usize);\n match pthread_attr_setstacksize(&mut attr, stack_size as libc::size_t) {\n 0 => {}\n n => {\n assert_eq!(n, libc::EINVAL);\n \/\/ EINVAL means |stack_size| is either too small or not a\n \/\/ multiple of the system page size. Because it's definitely\n \/\/ >= PTHREAD_STACK_MIN, it must be an alignment issue.\n \/\/ Round up to the nearest page and try again.\n let page_size = os::page_size();\n let stack_size = (stack_size + page_size - 1) &\n (-(page_size as isize - 1) as usize - 1);\n assert_eq!(pthread_attr_setstacksize(&mut attr,\n stack_size as libc::size_t), 0);\n }\n };\n\n let ret = pthread_create(&mut native, &attr, thread_start,\n &*p as *const _ as *mut _);\n assert_eq!(pthread_attr_destroy(&mut attr), 0);\n\n return if ret != 0 {\n Err(io::Error::from_os_error(ret))\n } else {\n mem::forget(p); \/\/ ownership passed to pthread_create\n Ok(native)\n };\n\n #[no_stack_check]\n extern fn thread_start(main: *mut libc::c_void) -> *mut libc::c_void {\n start_thread(main);\n 0 as *mut _\n }\n}\n\n#[cfg(any(target_os = \"linux\", target_os = \"android\"))]\npub unsafe fn set_name(name: &str) {\n \/\/ pthread_setname_np() since glibc 2.12\n \/\/ availability autodetected via weak linkage\n type F = unsafe extern fn(libc::pthread_t, *const libc::c_char)\n -> libc::c_int;\n extern {\n #[linkage = \"extern_weak\"]\n static pthread_setname_np: *const ();\n }\n if !pthread_setname_np.is_null() {\n let cname = CString::new(name).unwrap();\n mem::transmute::<*const (), F>(pthread_setname_np)(pthread_self(),\n cname.as_ptr());\n }\n}\n\n#[cfg(any(target_os = \"freebsd\",\n target_os = \"dragonfly\",\n target_os = \"bitrig\",\n target_os = \"openbsd\"))]\npub unsafe fn set_name(name: &str) {\n extern {\n fn pthread_set_name_np(tid: libc::pthread_t, name: *const libc::c_char);\n }\n let cname = CString::new(name).unwrap();\n pthread_set_name_np(pthread_self(), cname.as_ptr());\n}\n\n#[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\npub unsafe fn set_name(name: &str) {\n extern {\n fn pthread_setname_np(name: *const libc::c_char) -> libc::c_int;\n }\n let cname = CString::new(name).unwrap();\n pthread_setname_np(cname.as_ptr());\n}\n\npub unsafe fn join(native: rust_thread) {\n assert_eq!(pthread_join(native, ptr::null_mut()), 0);\n}\n\npub unsafe fn detach(native: rust_thread) {\n assert_eq!(pthread_detach(native), 0);\n}\n\npub unsafe fn yield_now() {\n assert_eq!(sched_yield(), 0);\n}\n\npub fn sleep(dur: Duration) {\n unsafe {\n if dur < Duration::zero() {\n return yield_now()\n }\n let seconds = dur.num_seconds();\n let ns = dur - Duration::seconds(seconds);\n let mut ts = libc::timespec {\n tv_sec: seconds as libc::time_t,\n tv_nsec: ns.num_nanoseconds().unwrap() as libc::c_long,\n };\n \/\/ If we're awoken with a signal then the return value will be -1 and\n \/\/ nanosleep will fill in `ts` with the remaining time.\n while dosleep(&mut ts) == -1 {\n assert_eq!(os::errno(), libc::EINTR);\n }\n }\n\n #[cfg(target_os = \"linux\")]\n unsafe fn dosleep(ts: *mut libc::timespec) -> libc::c_int {\n extern {\n fn clock_nanosleep(clock_id: libc::c_int, flags: libc::c_int,\n request: *const libc::timespec,\n remain: *mut libc::timespec) -> libc::c_int;\n }\n clock_nanosleep(libc::CLOCK_MONOTONIC, 0, ts, ts)\n }\n #[cfg(not(target_os = \"linux\"))]\n unsafe fn dosleep(ts: *mut libc::timespec) -> libc::c_int {\n libc::nanosleep(ts, ts)\n }\n}\n\n\/\/ glibc >= 2.15 has a __pthread_get_minstack() function that returns\n\/\/ PTHREAD_STACK_MIN plus however many bytes are needed for thread-local\n\/\/ storage. We need that information to avoid blowing up when a small stack\n\/\/ is created in an application with big thread-local storage requirements.\n\/\/ See #6233 for rationale and details.\n\/\/\n\/\/ Use dlsym to get the symbol value at runtime, both for\n\/\/ compatibility with older versions of glibc, and to avoid creating\n\/\/ dependencies on GLIBC_PRIVATE symbols. Assumes that we've been\n\/\/ dynamically linked to libpthread but that is currently always the\n\/\/ case. We previously used weak linkage (under the same assumption),\n\/\/ but that caused Debian to detect an unnecessarily strict versioned\n\/\/ dependency on libc6 (#23628).\n#[cfg(target_os = \"linux\")]\nfn min_stack_size(attr: *const libc::pthread_attr_t) -> libc::size_t {\n type F = unsafe extern \"C\" fn(*const libc::pthread_attr_t) -> libc::size_t;\n static INIT: Once = ONCE_INIT;\n static mut __pthread_get_minstack: Option = None;\n\n INIT.call_once(|| {\n let lib = DynamicLibrary::open(None).unwrap();\n unsafe {\n if let Ok(f) = lib.symbol(\"__pthread_get_minstack\") {\n __pthread_get_minstack = Some(mem::transmute::<*const (), F>(f));\n }\n }\n });\n\n match unsafe { __pthread_get_minstack } {\n None => PTHREAD_STACK_MIN,\n Some(f) => unsafe { f(attr) },\n }\n}\n\n\/\/ No point in looking up __pthread_get_minstack() on non-glibc\n\/\/ platforms.\n#[cfg(not(target_os = \"linux\"))]\nfn min_stack_size(_: *const libc::pthread_attr_t) -> libc::size_t {\n PTHREAD_STACK_MIN\n}\n\nextern {\n fn pthread_self() -> libc::pthread_t;\n fn pthread_create(native: *mut libc::pthread_t,\n attr: *const libc::pthread_attr_t,\n f: extern fn(*mut libc::c_void) -> *mut libc::c_void,\n value: *mut libc::c_void) -> libc::c_int;\n fn pthread_join(native: libc::pthread_t,\n value: *mut *mut libc::c_void) -> libc::c_int;\n fn pthread_attr_init(attr: *mut libc::pthread_attr_t) -> libc::c_int;\n fn pthread_attr_destroy(attr: *mut libc::pthread_attr_t) -> libc::c_int;\n fn pthread_attr_setstacksize(attr: *mut libc::pthread_attr_t,\n stack_size: libc::size_t) -> libc::c_int;\n fn pthread_attr_setdetachstate(attr: *mut libc::pthread_attr_t,\n state: libc::c_int) -> libc::c_int;\n fn pthread_detach(thread: libc::pthread_t) -> libc::c_int;\n fn sched_yield() -> libc::c_int;\n}\n<|endoftext|>"} {"text":"[add] rock paper scissors game#![allow(dead_code)]\npub mod pseudo;\n\nuse std::time::SystemTime;\nuse std::cmp::Ordering;\nuse std::str::FromStr;\nuse pseudo::Rng;\nuse std::io;\n\n#[derive(Debug, Eq, Copy, Clone)]\nenum Figure {\n Rock,\n Paper,\n Scissors\n}\n\nimpl Ord for Figure {\n fn cmp(&self, other: &Figure) -> Ordering {\n use Figure::*;\n match (self, other) {\n (&Rock, &Rock) => Ordering::Equal,\n (&Rock, &Paper) => Ordering::Less,\n (&Rock, &Scissors) => Ordering::Greater,\n (&Paper, &Rock) => Ordering::Greater,\n (&Paper, &Paper) => Ordering::Equal,\n (&Paper, &Scissors) => Ordering::Less,\n (&Scissors, &Rock) => Ordering::Less,\n (&Scissors, &Paper) => Ordering::Greater,\n (&Scissors, &Scissors) => Ordering::Equal\n }\n }\n}\n\nimpl PartialEq for Figure {\n fn eq(&self, other: &Figure) -> bool {\n self.cmp(other) == Ordering::Equal\n }\n}\n\nimpl PartialOrd for Figure {\n fn partial_cmp(&self, other: &Figure) -> Option {\n Some(self.cmp(other))\n }\n}\n\nimpl FromStr for Figure {\n type Err = ();\n\n fn from_str(s: &str) -> Result {\n use Figure::*;\n match s.trim() {\n \"r\" | \"rock\" => Ok(Rock),\n \"p\" | \"paper\" => Ok(Paper),\n \"s\" | \"scissors\" => Ok(Scissors),\n _ => Err(())\n }\n }\n}\n\nimpl Figure {\n fn select(n: u8) -> Figure {\n match n {\n 0 => Figure::Rock,\n 1 => Figure::Paper,\n 2 => Figure::Scissors,\n _ => panic!(\"unknown figure index\")\n }\n }\n}\n\nfn main() {\n let time = SystemTime::now().elapsed().unwrap();\n let mut rng = Rng::new(time.subsec_nanos());\n let mut buffer = String::new();\n loop {\n let computer_figure = Figure::select((rng.rand() % 3) as u8);\n println!(\"[?] Rock, paper or scissors ?\");\n io::stdin().read_line(&mut buffer)\n .unwrap();\n let user_figure = Figure::from_str(&buffer.to_lowercase()).unwrap();\n let result = if computer_figure > user_figure {\n \"computer wins!\"\n } else if computer_figure == user_figure {\n \"draw\"\n } else {\n \"user wins!\"\n };\n println!(\">>> {:?} vs {:?}: {}\", computer_figure, user_figure, result);\n buffer.clear();\n println!(\"[?] Again (y|yes|n|no)?\");\n io::stdin().read_line(&mut buffer)\n .unwrap();\n match buffer.to_lowercase().trim() {\n \"n\" | \"no\" => break,\n _ => {}\n };\n buffer.clear();\n }\n}\n<|endoftext|>"} {"text":"Add regression test for #20413\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntrait Foo {\n fn answer(self);\n}\n\nstruct NoData;\n\/\/~^ ERROR: parameter `T` is never used\n\nimpl Foo for T where NoData: Foo {\n\/\/~^ ERROR: overflow evaluating the requirement\n fn answer(self) {\n let val: NoData = NoData;\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"Add test (fixes #27340)\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct Foo;\n#[derive(Copy, Clone)]\n\/\/~^ ERROR the trait `Copy` may not be implemented for this type; field `0` does not implement\nstruct Bar(Foo);\n\nfn main() {}\n<|endoftext|>"} {"text":"Adds Rust Problem 26\/\/\/ Problem 26\n\/\/\/ A unit fraction contains 1 in the numerator. The decimal representation of \n\/\/\/ the unit fractions with denominators 2 to 10 are given:\n\/\/\/ \n\/\/\/ \t\t1\/2\t= \t0.5\n\/\/\/ \t\t1\/3\t= \t0.(3)\n\/\/\/ \t\t1\/4\t= \t0.25\n\/\/\/ \t\t1\/5\t= \t0.2\n\/\/\/ \t\t1\/6\t= \t0.1(6)\n\/\/\/ \t\t1\/7\t= \t0.(142857)\n\/\/\/ \t\t1\/8\t= \t0.125\n\/\/\/ \t\t1\/9\t= \t0.(1)\n\/\/\/ \t\t1\/10\t= \t0.1\n\/\/\/ Where 0.1(6) means 0.166666..., and has a 1-digit recurring cycle. It can \n\/\/\/ be seen that 1\/7 has a 6-digit recurring cycle.\n\/\/\/ \n\/\/\/ Find the value of d < 1000 for which 1\/d contains the longest recurring \n\/\/\/ cycle in its decimal fraction part.\nfn main() {\n\tlet mut longest = 1;\n\tfor d in 2u16..1000u16 {\n\t\tlet mut top: u16 = 10;\n\t\tlet mut cycle: Vec = Vec::new();\n\t\tlet bot: u16 = d;\n\t\t\/\/ let mut div = 0;\n\t\tlet mut rem = 1;\n\t\twhile rem > 0 {\n\t\t\t\/\/ div = top\/bot;\n\t\t\trem = top%bot;\n\t\t\tmatch cycle.iter().position(|&x| x == top) {\n\t\t\t\tSome(_) => break,\n\t\t\t\tNone => cycle.push(top)\n\t\t\t};\n\t\t\ttop = rem*10;\n\t\t}\n\t\tif cycle.len() > longest {\n\t\t\tlongest = cycle.len();\n\t\t\tprintln!(\"1\/{} cycle length = {}\", d, cycle.len());\n\t\t}\n\t}\n\tprintln!(\"Answer: {}\", longest);\n}\n<|endoftext|>"} {"text":"Derive deserialize for key::verification::start::raw::StartEventContent<|endoftext|>"} {"text":"Add struct representing virtual machinetype Cell = u8;\n\npub struct VM {\n cells: [Cell; 30_000],\n data_pointer: usize,\n}\n<|endoftext|>"} {"text":"RANDOM!<|endoftext|>"} {"text":"Remove Store::walk()<|endoftext|>"} {"text":"libimagutil: Move from error-chain to failure<|endoftext|>"} {"text":"#[allow(dead_code)]\npub struct ImmutableFieldsWithReferences<'a> {\n \/\/ Read PrivateFields and ImmutableFields first.\n c: String,\n\n \/\/ Now we have a reference to a String that was allocated somewhere else.\n \/\/ We need a lifetime to indicate that e must live for at least as long\n \/\/ as the struct.\n e: &'a String\n}\n\nimpl<'a> ImmutableFieldsWithReferences<'a> {\n pub fn new(s: &'a String) -> ImmutableFieldsWithReferences<'a> {\n ImmutableFieldsWithReferences {\n c: \"an owned string\".to_owned(),\n e: s\n }\n }\n\n \/* ====================== BEST PRACTICE BELOW ====================== *\/\n pub fn get_c(&self) -> &String {\n \/\/ This is typically what we want, similar to returning a string in C#.\n \/\/ The caller can get a reference to c with 'let x = i.get_c()' but\n \/\/ 'let mut x = i.get_c()' will generate a compilation error:\n \/\/ \"cannot borrow immutable borrowed content *x as mutable\".\n &self.c\n }\n\n pub fn get_c_mutable(&mut self) -> &mut String {\n \/\/ This function allows callers to mutate c.\n \/\/ Note that self is now a mutable reference.\n\n \/\/ There is no real equivalent in C#, where strings are immutable. However,\n \/\/ it is similar to having a 'public string Foo { get; set; }' property.\n &mut self.c\n }\n\n pub fn get_e(&self) -> &String {\n \/\/ This is in some respects the same as get_c(). The return type of the function\n \/\/ is an immutable borrow, so the caller will not be able to change it.\n \/\/ They will get the same compilation error.\n\n \/\/ The main difference is that in the body of this function we can just\n \/\/ use 'self.e' instead of needing to borrow it.\n\n \/\/ This function can be called multiple times without moving e out of\n \/\/ the struct.\n self.e\n }\n}\nAdd TODO about structs with mutable reference fields.#[allow(dead_code)]\npub struct ImmutableFieldsWithReferences<'a> {\n \/\/ Read PrivateFields and ImmutableFields first.\n c: String,\n\n \/\/ Now we have a reference to a String that was allocated somewhere else.\n \/\/ We need a lifetime to indicate that e must live for at least as long\n \/\/ as the struct.\n e: &'a String\n}\n\nimpl<'a> ImmutableFieldsWithReferences<'a> {\n pub fn new(s: &'a String) -> ImmutableFieldsWithReferences<'a> {\n ImmutableFieldsWithReferences {\n c: \"an owned string\".to_owned(),\n e: s\n }\n }\n\n \/* ====================== BEST PRACTICE BELOW ====================== *\/\n pub fn get_c(&self) -> &String {\n \/\/ This is typically what we want, similar to returning a string in C#.\n \/\/ The caller can get a reference to c with 'let x = i.get_c()' but\n \/\/ 'let mut x = i.get_c()' will generate a compilation error:\n \/\/ \"cannot borrow immutable borrowed content *x as mutable\".\n &self.c\n }\n\n pub fn get_c_mutable(&mut self) -> &mut String {\n \/\/ This function allows callers to mutate c.\n \/\/ Note that self is now a mutable reference.\n\n \/\/ There is no real equivalent in C#, where strings are immutable. However,\n \/\/ it is similar to having a 'public string Foo { get; set; }' property.\n &mut self.c\n }\n\n pub fn get_e(&self) -> &String {\n \/\/ This is in some respects the same as get_c(). The return type of the function\n \/\/ is an immutable borrow, so the caller will not be able to change it.\n \/\/ They will get the same compilation error.\n\n \/\/ The main difference is that in the body of this function we can just\n \/\/ use 'self.e' instead of needing to borrow it.\n\n \/\/ This function can be called multiple times without moving e out of\n \/\/ the struct.\n self.e\n }\n}\n\n\/\/ TODO: We can have a mutable reference in a struct.\nstruct PointRef<'a> {\n x: &'a mut i32,\n y: &'a mut i32,\n}<|endoftext|>"} {"text":"Basic ELF loading into vspace.<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::path::Path;\nuse std::process::Command;\nuse std::fs;\n\nuse build_helper::output;\nuse cmake;\n\nuse build::Build;\nuse build::util::{exe, staticlib};\n\npub fn llvm(build: &Build, target: &str) {\n \/\/ If we're using a custom LLVM bail out here, but we can only use a\n \/\/ custom LLVM for the build triple.\n if let Some(config) = build.config.target_config.get(target) {\n if let Some(ref s) = config.llvm_config {\n return check_llvm_version(build, s);\n }\n }\n\n \/\/ If the cleaning trigger is newer than our built artifacts (or if the\n \/\/ artifacts are missing) then we keep going, otherwise we bail out.\n let dst = build.llvm_out(target);\n let stamp = build.src.join(\"src\/rustllvm\/llvm-auto-clean-trigger\");\n let llvm_config = dst.join(\"bin\").join(exe(\"llvm-config\", target));\n build.clear_if_dirty(&dst, &stamp);\n if fs::metadata(llvm_config).is_ok() {\n return\n }\n\n let _ = fs::remove_dir_all(&dst.join(\"build\"));\n t!(fs::create_dir_all(&dst.join(\"build\")));\n let assertions = if build.config.llvm_assertions {\"ON\"} else {\"OFF\"};\n\n \/\/ http:\/\/llvm.org\/docs\/CMake.html\n let mut cfg = cmake::Config::new(build.src.join(\"src\/llvm\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(if build.config.llvm_optimize {\"Release\"} else {\"Debug\"})\n .define(\"LLVM_ENABLE_ASSERTIONS\", assertions)\n .define(\"LLVM_TARGETS_TO_BUILD\", \"X86;ARM;AArch64;Mips;PowerPC\")\n .define(\"LLVM_INCLUDE_EXAMPLES\", \"OFF\")\n .define(\"LLVM_INCLUDE_TESTS\", \"OFF\")\n .define(\"LLVM_INCLUDE_DOCS\", \"OFF\")\n .define(\"LLVM_ENABLE_ZLIB\", \"OFF\")\n .define(\"WITH_POLLY\", \"OFF\")\n .define(\"LLVM_ENABLE_TERMINFO\", \"OFF\")\n .define(\"LLVM_ENABLE_LIBEDIT\", \"OFF\")\n .define(\"LLVM_PARALLEL_COMPILE_JOBS\", build.jobs().to_string());\n\n if target.starts_with(\"i686\") {\n cfg.define(\"LLVM_BUILD_32_BITS\", \"ON\");\n }\n\n \/\/ http:\/\/llvm.org\/docs\/HowToCrossCompileLLVM.html\n if target != build.config.build {\n \/\/ FIXME: if the llvm root for the build triple is overridden then we\n \/\/ should use llvm-tblgen from there, also should verify that it\n \/\/ actually exists most of the time in normal installs of LLVM.\n let host = build.llvm_out(&build.config.build).join(\"bin\/llvm-tblgen\");\n cfg.define(\"CMAKE_CROSSCOMPILING\", \"True\")\n .define(\"LLVM_TARGET_ARCH\", target.split('-').next().unwrap())\n .define(\"LLVM_TABLEGEN\", &host)\n .define(\"LLVM_DEFAULT_TARGET_TRIPLE\", target);\n }\n\n \/\/ MSVC handles compiler business itself\n if !target.contains(\"msvc\") {\n if build.config.ccache {\n cfg.define(\"CMAKE_C_COMPILER\", \"ccache\")\n .define(\"CMAKE_C_COMPILER_ARG1\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", \"ccache\")\n .define(\"CMAKE_CXX_COMPILER_ARG1\", build.cxx(target));\n } else {\n cfg.define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cxx(target));\n }\n cfg.build_arg(\"-j\").build_arg(build.jobs().to_string());\n }\n\n \/\/ FIXME: we don't actually need to build all LLVM tools and all LLVM\n \/\/ libraries here, e.g. we just want a few components and a few\n \/\/ tools. Figure out how to filter them down and only build the right\n \/\/ tools and libs on all platforms.\n cfg.build();\n}\n\nfn check_llvm_version(build: &Build, llvm_config: &Path) {\n if !build.config.llvm_version_check {\n return\n }\n\n let mut cmd = Command::new(llvm_config);\n let version = output(cmd.arg(\"--version\"));\n if version.starts_with(\"3.5\") || version.starts_with(\"3.6\") ||\n version.starts_with(\"3.7\") {\n return\n }\n panic!(\"\\n\\nbad LLVM version: {}, need >=3.5\\n\\n\", version)\n}\n\npub fn compiler_rt(build: &Build, target: &str) {\n let dst = build.compiler_rt_out(target);\n let arch = target.split('-').next().unwrap();\n let mode = if build.config.rust_optimize {\"Release\"} else {\"Debug\"};\n let (dir, build_target, libname) = if target.contains(\"linux\") ||\n target.contains(\"freebsd\") ||\n target.contains(\"netbsd\") {\n let os = if target.contains(\"android\") {\"-android\"} else {\"\"};\n let arch = if arch.starts_with(\"arm\") && target.contains(\"eabihf\") {\n \"armhf\"\n } else {\n arch\n };\n let target = format!(\"clang_rt.builtins-{}{}\", arch, os);\n (\"linux\".to_string(), target.clone(), target)\n } else if target.contains(\"darwin\") {\n let target = format!(\"clang_rt.builtins_{}_osx\", arch);\n (\"builtins\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-gnu\") {\n let target = format!(\"clang_rt.builtins-{}\", arch);\n (\"windows\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-msvc\") {\n (format!(\"windows\/{}\", mode),\n \"lib\/builtins\/builtins\".to_string(),\n format!(\"clang_rt.builtins-{}\", arch.replace(\"i686\", \"i386\")))\n } else {\n panic!(\"can't get os from target: {}\", target)\n };\n let output = dst.join(\"build\/lib\").join(dir)\n .join(staticlib(&libname, target));\n build.compiler_rt_built.borrow_mut().insert(target.to_string(),\n output.clone());\n if fs::metadata(&output).is_ok() {\n return\n }\n let _ = fs::remove_dir_all(&dst);\n t!(fs::create_dir_all(&dst));\n let build_llvm_config = build.llvm_out(&build.config.build)\n .join(\"bin\")\n .join(exe(\"llvm-config\", &build.config.build));\n let mut cfg = cmake::Config::new(build.src.join(\"src\/compiler-rt\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(mode)\n .define(\"LLVM_CONFIG_PATH\", build_llvm_config)\n .define(\"COMPILER_RT_DEFAULT_TARGET_TRIPLE\", target)\n .define(\"COMPILER_RT_BUILD_SANITIZERS\", \"OFF\")\n .define(\"COMPILER_RT_BUILD_EMUTLS\", \"OFF\")\n \/\/ inform about c\/c++ compilers, the c++ compiler isn't actually used but\n \/\/ it's needed to get the initial configure to work on all platforms.\n .define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cc(target))\n .build_target(&build_target);\n cfg.build();\n}\nAuto merge of #32361 - japaric:no-llvm-assertions-on-arm, r=alexcrichton\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::path::Path;\nuse std::process::Command;\nuse std::fs;\n\nuse build_helper::output;\nuse cmake;\n\nuse build::Build;\nuse build::util::{exe, staticlib};\n\npub fn llvm(build: &Build, target: &str) {\n \/\/ If we're using a custom LLVM bail out here, but we can only use a\n \/\/ custom LLVM for the build triple.\n if let Some(config) = build.config.target_config.get(target) {\n if let Some(ref s) = config.llvm_config {\n return check_llvm_version(build, s);\n }\n }\n\n \/\/ If the cleaning trigger is newer than our built artifacts (or if the\n \/\/ artifacts are missing) then we keep going, otherwise we bail out.\n let dst = build.llvm_out(target);\n let stamp = build.src.join(\"src\/rustllvm\/llvm-auto-clean-trigger\");\n let llvm_config = dst.join(\"bin\").join(exe(\"llvm-config\", target));\n build.clear_if_dirty(&dst, &stamp);\n if fs::metadata(llvm_config).is_ok() {\n return\n }\n\n let _ = fs::remove_dir_all(&dst.join(\"build\"));\n t!(fs::create_dir_all(&dst.join(\"build\")));\n let mut assertions = if build.config.llvm_assertions {\"ON\"} else {\"OFF\"};\n\n \/\/ Disable LLVM assertions on ARM compilers until #32360 is fixed\n if target.contains(\"arm\") && target.contains(\"gnu\") {\n assertions = \"OFF\";\n }\n\n \/\/ http:\/\/llvm.org\/docs\/CMake.html\n let mut cfg = cmake::Config::new(build.src.join(\"src\/llvm\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(if build.config.llvm_optimize {\"Release\"} else {\"Debug\"})\n .define(\"LLVM_ENABLE_ASSERTIONS\", assertions)\n .define(\"LLVM_TARGETS_TO_BUILD\", \"X86;ARM;AArch64;Mips;PowerPC\")\n .define(\"LLVM_INCLUDE_EXAMPLES\", \"OFF\")\n .define(\"LLVM_INCLUDE_TESTS\", \"OFF\")\n .define(\"LLVM_INCLUDE_DOCS\", \"OFF\")\n .define(\"LLVM_ENABLE_ZLIB\", \"OFF\")\n .define(\"WITH_POLLY\", \"OFF\")\n .define(\"LLVM_ENABLE_TERMINFO\", \"OFF\")\n .define(\"LLVM_ENABLE_LIBEDIT\", \"OFF\")\n .define(\"LLVM_PARALLEL_COMPILE_JOBS\", build.jobs().to_string());\n\n if target.starts_with(\"i686\") {\n cfg.define(\"LLVM_BUILD_32_BITS\", \"ON\");\n }\n\n \/\/ http:\/\/llvm.org\/docs\/HowToCrossCompileLLVM.html\n if target != build.config.build {\n \/\/ FIXME: if the llvm root for the build triple is overridden then we\n \/\/ should use llvm-tblgen from there, also should verify that it\n \/\/ actually exists most of the time in normal installs of LLVM.\n let host = build.llvm_out(&build.config.build).join(\"bin\/llvm-tblgen\");\n cfg.define(\"CMAKE_CROSSCOMPILING\", \"True\")\n .define(\"LLVM_TARGET_ARCH\", target.split('-').next().unwrap())\n .define(\"LLVM_TABLEGEN\", &host)\n .define(\"LLVM_DEFAULT_TARGET_TRIPLE\", target);\n }\n\n \/\/ MSVC handles compiler business itself\n if !target.contains(\"msvc\") {\n if build.config.ccache {\n cfg.define(\"CMAKE_C_COMPILER\", \"ccache\")\n .define(\"CMAKE_C_COMPILER_ARG1\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", \"ccache\")\n .define(\"CMAKE_CXX_COMPILER_ARG1\", build.cxx(target));\n } else {\n cfg.define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cxx(target));\n }\n cfg.build_arg(\"-j\").build_arg(build.jobs().to_string());\n }\n\n \/\/ FIXME: we don't actually need to build all LLVM tools and all LLVM\n \/\/ libraries here, e.g. we just want a few components and a few\n \/\/ tools. Figure out how to filter them down and only build the right\n \/\/ tools and libs on all platforms.\n cfg.build();\n}\n\nfn check_llvm_version(build: &Build, llvm_config: &Path) {\n if !build.config.llvm_version_check {\n return\n }\n\n let mut cmd = Command::new(llvm_config);\n let version = output(cmd.arg(\"--version\"));\n if version.starts_with(\"3.5\") || version.starts_with(\"3.6\") ||\n version.starts_with(\"3.7\") {\n return\n }\n panic!(\"\\n\\nbad LLVM version: {}, need >=3.5\\n\\n\", version)\n}\n\npub fn compiler_rt(build: &Build, target: &str) {\n let dst = build.compiler_rt_out(target);\n let arch = target.split('-').next().unwrap();\n let mode = if build.config.rust_optimize {\"Release\"} else {\"Debug\"};\n let (dir, build_target, libname) = if target.contains(\"linux\") ||\n target.contains(\"freebsd\") ||\n target.contains(\"netbsd\") {\n let os = if target.contains(\"android\") {\"-android\"} else {\"\"};\n let arch = if arch.starts_with(\"arm\") && target.contains(\"eabihf\") {\n \"armhf\"\n } else {\n arch\n };\n let target = format!(\"clang_rt.builtins-{}{}\", arch, os);\n (\"linux\".to_string(), target.clone(), target)\n } else if target.contains(\"darwin\") {\n let target = format!(\"clang_rt.builtins_{}_osx\", arch);\n (\"builtins\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-gnu\") {\n let target = format!(\"clang_rt.builtins-{}\", arch);\n (\"windows\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-msvc\") {\n (format!(\"windows\/{}\", mode),\n \"lib\/builtins\/builtins\".to_string(),\n format!(\"clang_rt.builtins-{}\", arch.replace(\"i686\", \"i386\")))\n } else {\n panic!(\"can't get os from target: {}\", target)\n };\n let output = dst.join(\"build\/lib\").join(dir)\n .join(staticlib(&libname, target));\n build.compiler_rt_built.borrow_mut().insert(target.to_string(),\n output.clone());\n if fs::metadata(&output).is_ok() {\n return\n }\n let _ = fs::remove_dir_all(&dst);\n t!(fs::create_dir_all(&dst));\n let build_llvm_config = build.llvm_out(&build.config.build)\n .join(\"bin\")\n .join(exe(\"llvm-config\", &build.config.build));\n let mut cfg = cmake::Config::new(build.src.join(\"src\/compiler-rt\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(mode)\n .define(\"LLVM_CONFIG_PATH\", build_llvm_config)\n .define(\"COMPILER_RT_DEFAULT_TARGET_TRIPLE\", target)\n .define(\"COMPILER_RT_BUILD_SANITIZERS\", \"OFF\")\n .define(\"COMPILER_RT_BUILD_EMUTLS\", \"OFF\")\n \/\/ inform about c\/c++ compilers, the c++ compiler isn't actually used but\n \/\/ it's needed to get the initial configure to work on all platforms.\n .define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cc(target))\n .build_target(&build_target);\n cfg.build();\n}\n<|endoftext|>"} {"text":"Auto merge of #10975 - theCapypara:flock-enosys-android, r=weihanglo<|endoftext|>"} {"text":"Renamed method_variant_to_expr to method_to_path.<|endoftext|>"} {"text":"fix #18: unicode not displayed correctly.<|endoftext|>"} {"text":"chore: catch_unwind<|endoftext|>"} {"text":"use ears::{Sound, AudioController};\nuse std::collections::HashMap;\nuse find_folder;\n\n\npub struct SoundHandler {\n pub map: HashMap<&'static str, Sound>,\n}\n\nimpl SoundHandler {\n pub fn fill() -> Self {\n let mut map: HashMap<&str, Sound> = HashMap::new();\n for path in SOUNDS {\n \/\/ Sprites are located here Create Path\n let sound_path = match find_folder::Search::ParentsThenKids(2, 2)\n .for_folder(\"Sounds\") {\n Ok(res) => res.join(path),\n Err(_) => panic!(\"Folder not found!\"),\n };\n \/\/ Create String from path\n let sound_string = match sound_path.to_str() {\n Some(res) => res,\n None => panic!(\"Sound not found\"),\n };\n\n \/\/ Open path an create image\n println!(\"{}\", sound_string);\n let sound = match Sound::new(sound_string) {\n Some(x) => x,\n None => panic!(\"Can't open {} in {}\", path, sound_string),\n };\n map.insert(*path, sound);\n\n }\n SoundHandler { map: map }\n }\n pub fn play(&mut self, sound: &str) {\n let plays = match self.map.get_mut(sound) {\n Some(res) => res,\n None => {\n panic!(\"Can't play {}\", sound);\n }\n };\n plays.play();\n\n }\n}\n\nconst SOUNDS: &'static [&'static str] = &[\"test.ogg\", \"Welcome.ogg\"];\nRepairuse ears::{Sound, AudioController};\nuse std::collections::HashMap;\nuse find_folder;\n\n\npub struct SoundHandler {\n pub map: HashMap<&'static str, Sound>,\n}\n\nimpl SoundHandler {\n pub fn fill() -> Self {\n let mut map: HashMap<&str, Sound> = HashMap::new();\n for path in SOUNDS {\n \/\/ Sprites are located here Create Path\n let sound_path = match find_folder::Search::ParentsThenKids(2, 2)\n .for_folder(\"Sounds\") {\n Ok(res) => res.join(path),\n Err(_) => panic!(\"Folder not found!\"),\n };\n \/\/ Create String from path\n let sound_string = match sound_path.to_str() {\n Some(res) => res,\n None => panic!(\"Sound not found\"),\n };\n\n \/\/ Open path an create image\n println!(\"{}\", sound_string);\n let sound = match Sound::new(sound_string) {\n Some(x) => x,\n None => panic!(\"Can't open {} in {}\", path, sound_string),\n };\n map.insert(*path, sound);\n\n }\n SoundHandler { map: map }\n }\n pub fn play(&mut self, sound: &str) {\n let plays = match self.map.get_mut(sound) {\n Some(res) => res,\n None => {\n panic!(\"Can't play {}\", sound);\n }\n };\n plays.play();\n\n }\n}\n\nconst SOUNDS: &'static [&'static str] = &[\n \"Chicken.ogg\", \n \"Dagger.ogg\",\n \"Dead.ogg\",\n \"Item.ogg\",\n \"Spear.ogg\",\n \"Sword.ogg\",\n \"test.ogg\",\n \"Welcome.ogg\",\n ];\n<|endoftext|>"} {"text":"extern crate libc;\n\nuse libc::{c_int, c_uchar};\n\n#[link(name = \"stb_truetype\")]\nextern { }\n\npub mod ffi {\n use libc::{c_int, c_uchar, c_float, c_void};\n use std::ptr;\n\n pub struct FontInfo {\n userdata: *const c_void,\n\n \/\/ pointer to .ttf file\n data: *const c_uchar,\n\n \/\/ offset of start of font\n fontstart: c_int,\n\n \/\/ number of glyphs, needed for range checking\n numGlyphs: c_int,\n\n \/\/ table locations as offset from start of .ttf\n loca: c_int,\n head: c_int,\n glyf: c_int,\n hhea: c_int,\n hmtx: c_int,\n kern: c_int,\n\n \/\/ a cmap mapping for our chosen character encoding\n index_map: c_int,\n\n \/\/ format needed to map from glyph index to glyph\n indexToLocFormat: c_int,\n }\n\n impl FontInfo {\n pub fn new() -> FontInfo {\n FontInfo {\n userdata: ptr::null(),\n data: ptr::null(),\n fontstart: 0,\n numGlyphs: 0,\n loca: 0,\n head: 0,\n glyf: 0,\n hhea: 0,\n hmtx: 0,\n kern: 0,\n index_map: 0,\n indexToLocFormat: 0,\n }\n }\n }\n\n extern {\n pub fn stbtt_GetFontOffsetForIndex(\n data: *const c_uchar,\n index: c_int,\n ) -> c_int;\n\n\n pub fn stbtt_InitFont(\n info: *mut FontInfo,\n data2: *const c_uchar,\n fontstart: c_int,\n );\n\n pub fn stbtt_GetFontVMetrics(\n info: *const FontInfo,\n ascent: *mut c_int,\n descent: *mut c_int,\n lineGap: *mut c_int,\n );\n\n pub fn stbtt_FindGlyphIndex(\n info: *const FontInfo,\n unicode_codepoint: c_int,\n ) -> c_int;\n\n pub fn stbtt_GetGlyphHMetrics(\n info: *const FontInfo,\n glyph_index: c_int,\n advanceWidth: *mut c_int,\n eftSideBearing: *mut c_int,\n );\n\n pub fn stbtt_GetGlyphBitmapBox(\n font: *const FontInfo,\n glyph: c_int,\n scale_x: c_float,\n scale_y: c_float,\n ix0: *mut c_int,\n iy0: *mut c_int,\n ix1: *mut c_int,\n iy1: *mut c_int,\n );\n\n pub fn stbtt_GetGlyphKernAdvance(\n info: *const FontInfo,\n glyph1: c_int,\n glyph2: c_int,\n );\n\n pub fn stbtt_MakeGlyphBitmap(\n info: *const FontInfo,\n output: *const c_uchar,\n out_w: c_int,\n out_h: c_int,\n out_stride: c_int,\n scale_x: c_float,\n scale_y: c_float,\n glyph: c_int,\n );\n\n pub fn stbtt_ScaleForPixelHeight(\n info: *const FontInfo,\n height: c_float,\n ) -> c_float;\n\n pub fn stbtt_GetGlyphBitmap(\n info: *const FontInfo,\n scale_x: c_float,\n scale_y: c_float,\n glyph: c_int,\n width: *mut c_int,\n height: *mut c_int,\n xoff: *mut c_int,\n yoff: *mut c_int,\n ) -> *const c_uchar;\n }\n}\n\npub struct Font {\n font_info: ffi::FontInfo,\n data: Vec,\n height: f32,\n scale: f32,\n ascent: i32,\n descent: i32,\n line_gap: i32,\n}\n\nimpl Font {\n pub fn new(font_path: &Path, height: f32) -> Font {\n let data = {\n use std::io::{BufferedReader, File};\n if !font_path.exists() {\n fail!(\"Wrong font path: {}\", font_path.display());\n }\n let file = File::open(font_path);\n let mut reader = BufferedReader::new(file);\n reader.read_to_end().unwrap()\n };\n let mut font_info = ffi::FontInfo::new();\n unsafe {\n let font_offset = ffi::stbtt_GetFontOffsetForIndex(data.get(0) as *const u8, 0);\n ffi::stbtt_InitFont(&mut font_info, data.get(0) as *const u8, font_offset);\n }\n let scale = unsafe {\n ffi::stbtt_ScaleForPixelHeight(&font_info, height)\n };\n let mut c_ascent: c_int = 0;\n let mut c_descent: c_int = 0;\n let mut c_line_gap: c_int = 0;\n unsafe {\n ffi::stbtt_GetFontVMetrics(&font_info,\n &mut c_ascent, &mut c_descent, &mut c_line_gap);\n }\n let ascent = (c_ascent as f32 * scale) as i32;\n let descent = (c_descent as f32 * scale) as i32;\n let line_gap = (c_line_gap as f32 * scale) as i32;\n Font {\n font_info: font_info,\n data: data,\n height: height,\n scale: scale,\n ascent: ascent,\n descent: descent,\n line_gap: line_gap,\n }\n }\n \n pub fn get_glyph(&self, glyph_index: i32) -> (Vec, i32, i32, i32, i32) {\n use std::vec::raw::from_buf;\n let mut w = 0;\n let mut h = 0;\n let mut xoff = 0;\n let mut yoff = 0;\n let bitmap = unsafe {\n let scale_x = 0.0;\n let scale_y = self.scale;\n let buf = ffi::stbtt_GetGlyphBitmap(\n &self.font_info,\n scale_x,\n scale_y,\n glyph_index as c_int,\n &mut w,\n &mut h,\n &mut xoff,\n &mut yoff,\n );\n from_buf(buf, (w * h) as uint)\n };\n (bitmap, w as i32, h as i32, xoff as i32, yoff as i32)\n }\n\n pub fn find_glyph_index(&self, c: char) -> i32 {\n unsafe {\n ffi::stbtt_FindGlyphIndex(&self.font_info, c as c_int) as i32\n }\n }\n\n pub fn get_glyph_bitmap_box(&self, glyph_index: i32) -> (i32, i32, i32, i32) {\n let scale_x = 0.0;\n let scale_y = self.scale;\n let mut ix0 = 0;\n let mut iy0 = 0;\n let mut ix1 = 0;\n let mut iy1 = 0;\n unsafe {\n ffi::stbtt_GetGlyphBitmapBox(\n &self.font_info,\n glyph_index as c_int,\n scale_x,\n scale_y,\n &mut ix0,\n &mut iy0,\n &mut ix1,\n &mut iy1,\n );\n }\n (ix0 as i32, iy0 as i32, ix1 as i32, iy1 as i32)\n }\n}\n\n\/\/ vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:\nUpdated for Rust f8426e2e2 2014-09-16extern crate libc;\n\nuse libc::{c_int, c_uchar};\nuse std::io::fs::PathExtensions;\n\n#[link(name = \"stb_truetype\")]\nextern { }\n\npub mod ffi {\n use libc::{c_int, c_uchar, c_float, c_void};\n use std::ptr;\n\n pub struct FontInfo {\n userdata: *const c_void,\n\n \/\/ pointer to .ttf file\n data: *const c_uchar,\n\n \/\/ offset of start of font\n fontstart: c_int,\n\n \/\/ number of glyphs, needed for range checking\n numGlyphs: c_int,\n\n \/\/ table locations as offset from start of .ttf\n loca: c_int,\n head: c_int,\n glyf: c_int,\n hhea: c_int,\n hmtx: c_int,\n kern: c_int,\n\n \/\/ a cmap mapping for our chosen character encoding\n index_map: c_int,\n\n \/\/ format needed to map from glyph index to glyph\n indexToLocFormat: c_int,\n }\n\n impl FontInfo {\n pub fn new() -> FontInfo {\n FontInfo {\n userdata: ptr::null(),\n data: ptr::null(),\n fontstart: 0,\n numGlyphs: 0,\n loca: 0,\n head: 0,\n glyf: 0,\n hhea: 0,\n hmtx: 0,\n kern: 0,\n index_map: 0,\n indexToLocFormat: 0,\n }\n }\n }\n\n extern {\n pub fn stbtt_GetFontOffsetForIndex(\n data: *const c_uchar,\n index: c_int,\n ) -> c_int;\n\n\n pub fn stbtt_InitFont(\n info: *mut FontInfo,\n data2: *const c_uchar,\n fontstart: c_int,\n );\n\n pub fn stbtt_GetFontVMetrics(\n info: *const FontInfo,\n ascent: *mut c_int,\n descent: *mut c_int,\n lineGap: *mut c_int,\n );\n\n pub fn stbtt_FindGlyphIndex(\n info: *const FontInfo,\n unicode_codepoint: c_int,\n ) -> c_int;\n\n pub fn stbtt_GetGlyphHMetrics(\n info: *const FontInfo,\n glyph_index: c_int,\n advanceWidth: *mut c_int,\n eftSideBearing: *mut c_int,\n );\n\n pub fn stbtt_GetGlyphBitmapBox(\n font: *const FontInfo,\n glyph: c_int,\n scale_x: c_float,\n scale_y: c_float,\n ix0: *mut c_int,\n iy0: *mut c_int,\n ix1: *mut c_int,\n iy1: *mut c_int,\n );\n\n pub fn stbtt_GetGlyphKernAdvance(\n info: *const FontInfo,\n glyph1: c_int,\n glyph2: c_int,\n );\n\n pub fn stbtt_MakeGlyphBitmap(\n info: *const FontInfo,\n output: *const c_uchar,\n out_w: c_int,\n out_h: c_int,\n out_stride: c_int,\n scale_x: c_float,\n scale_y: c_float,\n glyph: c_int,\n );\n\n pub fn stbtt_ScaleForPixelHeight(\n info: *const FontInfo,\n height: c_float,\n ) -> c_float;\n\n pub fn stbtt_GetGlyphBitmap(\n info: *const FontInfo,\n scale_x: c_float,\n scale_y: c_float,\n glyph: c_int,\n width: *mut c_int,\n height: *mut c_int,\n xoff: *mut c_int,\n yoff: *mut c_int,\n ) -> *const c_uchar;\n }\n}\n\npub struct Font {\n font_info: ffi::FontInfo,\n data: Vec,\n height: f32,\n scale: f32,\n ascent: i32,\n descent: i32,\n line_gap: i32,\n}\n\nimpl Font {\n pub fn new(font_path: &Path, height: f32) -> Font {\n let data = {\n use std::io::{BufferedReader, File};\n if !font_path.exists() {\n fail!(\"Wrong font path: {}\", font_path.display());\n }\n let file = File::open(font_path);\n let mut reader = BufferedReader::new(file);\n reader.read_to_end().unwrap()\n };\n let mut font_info = ffi::FontInfo::new();\n unsafe {\n let font_offset = ffi::stbtt_GetFontOffsetForIndex(data.get(0) as *const u8, 0);\n ffi::stbtt_InitFont(&mut font_info, data.get(0) as *const u8, font_offset);\n }\n let scale = unsafe {\n ffi::stbtt_ScaleForPixelHeight(&font_info, height)\n };\n let mut c_ascent: c_int = 0;\n let mut c_descent: c_int = 0;\n let mut c_line_gap: c_int = 0;\n unsafe {\n ffi::stbtt_GetFontVMetrics(&font_info,\n &mut c_ascent, &mut c_descent, &mut c_line_gap);\n }\n let ascent = (c_ascent as f32 * scale) as i32;\n let descent = (c_descent as f32 * scale) as i32;\n let line_gap = (c_line_gap as f32 * scale) as i32;\n Font {\n font_info: font_info,\n data: data,\n height: height,\n scale: scale,\n ascent: ascent,\n descent: descent,\n line_gap: line_gap,\n }\n }\n \n pub fn get_glyph(&self, glyph_index: i32) -> (Vec, i32, i32, i32, i32) {\n use std::vec::raw::from_buf;\n let mut w = 0;\n let mut h = 0;\n let mut xoff = 0;\n let mut yoff = 0;\n let bitmap = unsafe {\n let scale_x = 0.0;\n let scale_y = self.scale;\n let buf = ffi::stbtt_GetGlyphBitmap(\n &self.font_info,\n scale_x,\n scale_y,\n glyph_index as c_int,\n &mut w,\n &mut h,\n &mut xoff,\n &mut yoff,\n );\n from_buf(buf, (w * h) as uint)\n };\n (bitmap, w as i32, h as i32, xoff as i32, yoff as i32)\n }\n\n pub fn find_glyph_index(&self, c: char) -> i32 {\n unsafe {\n ffi::stbtt_FindGlyphIndex(&self.font_info, c as c_int) as i32\n }\n }\n\n pub fn get_glyph_bitmap_box(&self, glyph_index: i32) -> (i32, i32, i32, i32) {\n let scale_x = 0.0;\n let scale_y = self.scale;\n let mut ix0 = 0;\n let mut iy0 = 0;\n let mut ix1 = 0;\n let mut iy1 = 0;\n unsafe {\n ffi::stbtt_GetGlyphBitmapBox(\n &self.font_info,\n glyph_index as c_int,\n scale_x,\n scale_y,\n &mut ix0,\n &mut iy0,\n &mut ix1,\n &mut iy1,\n );\n }\n (ix0 as i32, iy0 as i32, ix1 as i32, iy1 as i32)\n }\n}\n\n\/\/ vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:\n<|endoftext|>"} {"text":"Auto merge of #34350 - Stebalien:test-30276, r=eddyb\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct Test([i32]);\nfn main() {\n let _x: fn(_) -> Test = Test;\n}\n<|endoftext|>"} {"text":"Add an XFAILed test for treating alts as expressions\/\/ xfail-boot\n\/\/ xfail-stage0\n\/\/ -*- rust -*-\n\n\/\/ Tests for using alt as an expression\n\nfn test() {\n let bool res = alt (true) {\n case (true) {\n true\n }\n case (false) {\n false\n }\n };\n check (res);\n\n res = alt(false) {\n case (true) {\n false\n }\n case (false) {\n true\n }\n };\n check (res);\n}\n\nfn main() {\n test();\n}<|endoftext|>"} {"text":"Fix: use mod ui and remove unused imports<|endoftext|>"} {"text":"add missing test file#![feature(trace_macros)]\n#[macro_use]\nextern crate nom;\n\nuse nom::{IResult,Needed};\n\n#[allow(dead_code)]\nstruct Range {\n start: char,\n end: char\n}\n\npub fn take_char(input: &[u8]) -> IResult<&[u8], char> {\n if input.len() > 0 {\n IResult::Done(&input[1..], input[0] as char)\n } else {\n IResult::Incomplete(Needed::Size(1))\n }\n}\n\n\/\/trace_macros!(true);\n\n#[allow(dead_code)]\nnamed!(range<&[u8], Range>,\n alt!(\n chain!(\n start: take_char ~\n tag!(\"-\") ~\n end: take_char,\n || {\n Range {\n start: start,\n end: end,\n }\n }\n ) |\n map!(\n take_char,\n |c| {\n Range {\n start: c,\n end: c,\n }\n }\n )\n )\n);\n\n\n#[allow(dead_code)]\nnamed!(literal<&[u8], Vec >,\n map!(\n many1!(take_char),\n |cs| {\n cs\n }\n )\n);\n\n#[test]\nfn issue_58() {\n range(&b\"abcd\"[..]);\n literal(&b\"abcd\"[..]);\n}\n\n\/\/trace_macros!(false);\n<|endoftext|>"} {"text":"Add 2015 day 20 solutionfn get_present_counts(max: u32, presents: u32, limit: Option) -> Vec {\n let len = max \/ presents;\n let mut houses = Vec::with_capacity(len as usize);\n\n for _ in 0..len {\n houses.push(0);\n }\n\n for i in 1..len {\n let mut count = 0;\n let mut j = i;\n\n while j < len {\n houses[j as usize] += i * presents;\n \n count += 1;\n j += match limit {\n Some(x) if count >= x => break,\n _ => i\n };\n }\n }\n\n houses\n}\n\nfn main() {\n let input = 34000000;\n let result = get_present_counts(input, 10, None)\n .into_iter()\n .position(|x| x >= input).unwrap();\n\n println!(\"Part 1: {}\", result);\n\n let result = get_present_counts(input, 11, Some(50))\n .into_iter()\n .position(|x| x >= input).unwrap();\n\n println!(\"Part 2: {}\", result);\n}\n<|endoftext|>"} {"text":"Added Envelope and Point structs for amplitude and frequency interpolation.\n\/\/\/ Point for use in the envelope struct.\n#[deriving(Clone, Show)]\npub struct Point {\n \/\/\/ `time` represents the x domain.\n pub time: f32,\n \/\/\/ `value` represents the y domain.\n pub value: f32,\n \/\/\/ `curve` represents the bezier curve depth.\n pub curve: f32\n}\n\nimpl Point {\n \/\/\/ Constructor method for Point.\n pub fn new(time: f32, value: f32, curve: f32) -> Point {\n Point { time: time, value: value, curve: curve }\n }\n}\n\n\/\/\/ Envelope struct, primarily used for\n\/\/\/ frequency and amplitude interpolation.\n#[deriving(Clone, Show)]\npub struct Envelope {\n \/\/\/ Envelope represented by a vector\n \/\/\/ of points (sorted by `time`).\n pub points: Vec\n}\n\nimpl Envelope {\n\n \/\/\/ Default, empty constructor.\n fn new() -> Envelope {\n Envelope {\n points: vec![]\n }\n }\n\n \/\/\/ Add a new point to the Envelope.\n fn add_point(&mut self, point: Point) {\n self.points.push(point);\n self.points.sort_by(|a, b| if a.time < b.time { Less }\n else if a.time > b.time { Greater }\n else { Equal });\n }\n\n \/\/\/ Return `value` for the given `time`.\n fn get_value(&self, time: f32) -> f32 {\n \/\/ If there is less than two points interpolation\n \/\/ is not meaningful, thus we should just return 0.\n if self.points.len() <= 1 { return 0f32 }\n \/\/ Iterate through points.\n for i in range(0, self.points.len()) {\n \/\/ Find the start point to interpolate.\n if time >= self.points.get(i).time {\n \/\/ Interpolate both points and add the value\n \/\/ of the first to find our result.\n return self.interpolate(time,\n *self.points.get(i-1),\n *self.points.get(i))\n + self.points.get(i-1).value;\n }\n }\n 0f32\n }\n\n \/\/\/ Interpolate between points.\n fn interpolate(&self, time: f32, start: Point, end: Point) -> f32 {\n \/\/ Find time passed from start of interpolation.\n let time_pos = time - start.time;\n \/\/ Find duration of interpolation.\n let duration = end.time - start.time;\n \/\/ Set gradient for interpolation.\n let gradient_value = end.value - start.value;\n if gradient_value == 0f32 { return 0f32 }\n let gradient = duration \/ gradient_value;\n let half_gradient_value = gradient_value * 0.5f32;\n \/\/ Consider bezier curve.\n let y2 = half_gradient_value + start.curve * half_gradient_value;\n let perc_time = time_pos \/ duration;\n \/\/ Re-adjust linear trajectory.\n let ya = Envelope::get_bezier_pt(0f32, y2, perc_time);\n let yb = Envelope::get_bezier_pt(y2, gradient_value, perc_time);\n Envelope::get_bezier_pt(ya, yb, perc_time)\n }\n\n \/\/\/ Get bezier point for bezier curve.\n fn get_bezier_pt(n1: f32, n2: f32, perc: f32) -> f32 {\n (n2 - n1) * perc + n1\n }\n\n}\n\n<|endoftext|>"} {"text":"use rgtk::*;\nuse std::fs::PathExt;\nuse std::num::FromPrimitive;\nuse std::path::Path;\nuse std::process::Command;\n\nfn remove_expansions_for_path(state: &mut ::utils::State, path_str: &String) {\n for expansion_str in state.expansions.clone().iter() {\n if !Path::new(expansion_str).exists() ||\n path_str == expansion_str ||\n ::utils::is_parent_path(path_str, expansion_str)\n {\n state.expansions.remove(expansion_str);\n }\n }\n}\n\nfn save_project(state: &mut ::utils::State, tree: &mut gtk::TreeView, path_str: &String) {\n state.projects.insert(path_str.clone());\n state.selection = Some(path_str.clone());\n ::utils::write_prefs(state);\n ::ui::update_project_tree(state, tree);\n}\n\npub fn new_project(state: &mut ::utils::State, tree: &mut gtk::TreeView) {\n if let Some(dialog) = gtk::FileChooserDialog::new(\n \"New Project\",\n None,\n gtk::FileChooserAction::Save\n ) {\n if let Some(gtk::ResponseType::Accept) = FromPrimitive::from_i32(dialog.run()) {\n if let Some(path_str) = dialog.get_filename() {\n let path = Path::new(path_str.as_slice());\n if let Some(name_os_str) = path.file_name() {\n if let Some(name_str) = name_os_str.to_str() {\n if let Some(parent_path) = path.parent() {\n match Command::new(\"cargo\").arg(\"new\").arg(name_str).arg(\"--bin\")\n .current_dir(parent_path).status()\n {\n Ok(_) => save_project(state, tree, &path_str),\n Err(e) => println!(\"Error creating {}: {}\", name_str, e)\n }\n }\n }\n }\n }\n }\n dialog.destroy();\n }\n}\n\npub fn import_project(state: &mut ::utils::State, tree: &mut gtk::TreeView) {\n if let Some(dialog) = gtk::FileChooserDialog::new(\n \"Import\",\n None,\n gtk::FileChooserAction::SelectFolder\n ) {\n if let Some(gtk::ResponseType::Accept) = FromPrimitive::from_i32(dialog.run()) {\n if let Some(path_str) = dialog.get_filename() {\n save_project(state, tree, &path_str);\n }\n }\n dialog.destroy();\n }\n}\n\npub fn rename_file(state: &mut ::utils::State, fd: i32) {\n if let Some(_) = ::utils::get_selected_path(state) {\n if let Some(dialog) = gtk::FileChooserDialog::new(\n \"Rename\",\n None,\n gtk::FileChooserAction::Save\n ) {\n if let Some(gtk::ResponseType::Accept) = FromPrimitive::from_i32(dialog.run()) {\n if let Some(path_str) = dialog.get_filename() {\n state.selection = Some(path_str.clone());\n ::utils::write_prefs(&state);\n ::ffi::send_message(fd, format!(\"Move {}\", path_str).as_slice());\n }\n }\n dialog.destroy();\n }\n }\n}\n\npub fn remove_item(state: &mut ::utils::State, tree: &mut gtk::TreeView, fd: i32) {\n if let Some(path_str) = ::utils::get_selected_path(state) {\n if let Some(dialog) = gtk::MessageDialog::new_with_markup(\n Some(state.window.clone()),\n gtk::DialogFlags::Modal,\n gtk::MessageType::Question,\n gtk::ButtonsType::OkCancel,\n if state.projects.contains(&path_str) {\n \"Remove this project? It WILL NOT be deleted from the disk.\"\n } else {\n \"Remove this file? It WILL be deleted from the disk.\"\n }\n ) {\n if let Some(gtk::ResponseType::Ok) = FromPrimitive::from_i32(dialog.run()) {\n if state.projects.contains(&path_str) {\n state.projects.remove(&path_str);\n remove_expansions_for_path(state, &path_str);\n ::utils::write_prefs(state);\n ::ui::update_project_tree(state, tree);\n } else {\n ::ffi::send_message(fd, \"call delete(expand('%')) | bdelete!\".as_slice());\n }\n }\n dialog.destroy();\n }\n }\n}\n\npub fn set_selection(state: &mut ::utils::State, tree: &mut gtk::TreeView, fd: i32) {\n if !state.is_refreshing_tree {\n if let Some(path_str) = ::utils::get_selected_path(state) {\n state.selection = Some(path_str.clone());\n ::utils::write_prefs(state);\n ::ui::update_project_tree(state, tree);\n ::ffi::send_message(fd, format!(\"e {}\", path_str).as_slice());\n }\n }\n}\n\npub fn remove_expansion(state: &mut ::utils::State, iter: >k::TreeIter) {\n if let Some(path_str) = state.tree_model.get_value(iter, 1).get_string() {\n remove_expansions_for_path(state, &path_str);\n ::utils::write_prefs(state);\n }\n}\n\npub fn add_expansion(state: &mut ::utils::State, iter: >k::TreeIter) {\n if let Some(path_str) = state.tree_model.get_value(iter, 1).get_string() {\n state.expansions.insert(path_str);\n ::utils::write_prefs(state);\n }\n}\nUpdate for latest changes in rgtkuse rgtk::*;\nuse std::fs::PathExt;\nuse std::num::FromPrimitive;\nuse std::path::Path;\nuse std::process::Command;\n\nfn remove_expansions_for_path(state: &mut ::utils::State, path_str: &String) {\n for expansion_str in state.expansions.clone().iter() {\n if !Path::new(expansion_str).exists() ||\n path_str == expansion_str ||\n ::utils::is_parent_path(path_str, expansion_str)\n {\n state.expansions.remove(expansion_str);\n }\n }\n}\n\nfn save_project(state: &mut ::utils::State, tree: &mut gtk::TreeView, path_str: &String) {\n state.projects.insert(path_str.clone());\n state.selection = Some(path_str.clone());\n ::utils::write_prefs(state);\n ::ui::update_project_tree(state, tree);\n}\n\npub fn new_project(state: &mut ::utils::State, tree: &mut gtk::TreeView) {\n let dialog = gtk::FileChooserDialog::new(\n \"New Project\",\n None,\n gtk::FileChooserAction::Save,\n [(\"Save\", gtk::ResponseType::Ok), (\"Cancel\", gtk::ResponseType::Cancel)]\n );\n if let Some(gtk::ResponseType::Ok) = FromPrimitive::from_i32(dialog.run()) {\n if let Some(path_str) = dialog.get_filename() {\n let path = Path::new(path_str.as_slice());\n if let Some(name_os_str) = path.file_name() {\n if let Some(name_str) = name_os_str.to_str() {\n if let Some(parent_path) = path.parent() {\n match Command::new(\"cargo\").arg(\"new\").arg(name_str).arg(\"--bin\")\n .current_dir(parent_path).status()\n {\n Ok(_) => save_project(state, tree, &path_str),\n Err(e) => println!(\"Error creating {}: {}\", name_str, e)\n }\n }\n }\n }\n }\n }\n dialog.destroy();\n}\n\npub fn import_project(state: &mut ::utils::State, tree: &mut gtk::TreeView) {\n let dialog = gtk::FileChooserDialog::new(\n \"Import\",\n None,\n gtk::FileChooserAction::SelectFolder,\n [(\"Open\", gtk::ResponseType::Ok), (\"Cancel\", gtk::ResponseType::Cancel)]\n );\n if let Some(gtk::ResponseType::Ok) = FromPrimitive::from_i32(dialog.run()) {\n if let Some(path_str) = dialog.get_filename() {\n save_project(state, tree, &path_str);\n }\n }\n dialog.destroy();\n}\n\npub fn rename_file(state: &mut ::utils::State, fd: i32) {\n if let Some(_) = ::utils::get_selected_path(state) {\n let dialog = gtk::FileChooserDialog::new(\n \"Rename\",\n None,\n gtk::FileChooserAction::Save,\n [(\"Save\", gtk::ResponseType::Ok), (\"Cancel\", gtk::ResponseType::Cancel)]\n );\n if let Some(gtk::ResponseType::Ok) = FromPrimitive::from_i32(dialog.run()) {\n if let Some(path_str) = dialog.get_filename() {\n state.selection = Some(path_str.clone());\n ::utils::write_prefs(&state);\n ::ffi::send_message(fd, format!(\"Move {}\", path_str).as_slice());\n }\n }\n dialog.destroy();\n }\n}\n\npub fn remove_item(state: &mut ::utils::State, tree: &mut gtk::TreeView, fd: i32) {\n if let Some(path_str) = ::utils::get_selected_path(state) {\n if let Some(dialog) = gtk::MessageDialog::new_with_markup(\n Some(state.window.clone()),\n gtk::DialogFlags::Modal,\n gtk::MessageType::Question,\n gtk::ButtonsType::OkCancel,\n if state.projects.contains(&path_str) {\n \"Remove this project? It WILL NOT be deleted from the disk.\"\n } else {\n \"Remove this file? It WILL be deleted from the disk.\"\n }\n ) {\n if let Some(gtk::ResponseType::Ok) = FromPrimitive::from_i32(dialog.run()) {\n if state.projects.contains(&path_str) {\n state.projects.remove(&path_str);\n remove_expansions_for_path(state, &path_str);\n ::utils::write_prefs(state);\n ::ui::update_project_tree(state, tree);\n } else {\n ::ffi::send_message(fd, \"call delete(expand('%')) | bdelete!\".as_slice());\n }\n }\n dialog.destroy();\n }\n }\n}\n\npub fn set_selection(state: &mut ::utils::State, tree: &mut gtk::TreeView, fd: i32) {\n if !state.is_refreshing_tree {\n if let Some(path_str) = ::utils::get_selected_path(state) {\n state.selection = Some(path_str.clone());\n ::utils::write_prefs(state);\n ::ui::update_project_tree(state, tree);\n ::ffi::send_message(fd, format!(\"e {}\", path_str).as_slice());\n }\n }\n}\n\npub fn remove_expansion(state: &mut ::utils::State, iter: >k::TreeIter) {\n if let Some(path_str) = state.tree_model.get_value(iter, 1).get_string() {\n remove_expansions_for_path(state, &path_str);\n ::utils::write_prefs(state);\n }\n}\n\npub fn add_expansion(state: &mut ::utils::State, iter: >k::TreeIter) {\n if let Some(path_str) = state.tree_model.get_value(iter, 1).get_string() {\n state.expansions.insert(path_str);\n ::utils::write_prefs(state);\n }\n}\n<|endoftext|>"} {"text":"use std::borrow::Cow;\nuse std::sync::RwLock;\nuse std::collections::HashMap;\nuse std::collections::hash_map::Entry::{Occupied, Vacant};\nuse std::path::Path;\nuse serialize::Encodable;\nuse hyper::status::StatusCode::{self, InternalServerError};\nuse hyper::server::Response as HyperResponse;\nuse hyper::header::{Date, HttpDate, Server, ContentType, ContentLength, Header, HeaderFormat};\nuse hyper::net::{Fresh, Streaming};\nuse time;\nuse mimes::{get_media_type, MediaType};\nuse mustache;\nuse mustache::Template;\nuse std::io;\nuse std::io::{Read, Write, copy};\nuse std::fs::File;\nuse {NickelError, Halt, MiddlewareResult, AsBytes};\n\npub type TemplateCache = RwLock>;\n\n\/\/\/A container for the response\npub struct Response<'a, T=Fresh> {\n \/\/\/the original `hyper::server::Response`\n pub origin: HyperResponse<'a, T>,\n templates: &'a TemplateCache\n}\n\nimpl<'a> Response<'a, Fresh> {\n pub fn from_internal<'c, 'd>(response: HyperResponse<'c, Fresh>,\n templates: &'c TemplateCache)\n -> Response<'c, Fresh> {\n Response {\n origin: response,\n templates: templates\n }\n }\n\n \/\/\/ Sets the content type by it's short form.\n \/\/\/ Returns the response for chaining.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Continue};\n \/\/\/ use nickel::mimes::MediaType;\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ res.content_type(MediaType::Html);\n \/\/\/ Ok(Continue(res))\n \/\/\/ }\n \/\/\/ ```\n pub fn content_type(&mut self, mt: MediaType) -> &mut Response<'a> {\n self.origin.headers_mut().set(ContentType(get_media_type(mt)));\n self\n }\n\n \/\/\/ Sets the status code and returns the response for chaining\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Continue};\n \/\/\/ use nickel::status::StatusCode;\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ res.set_status(StatusCode::NotFound);\n \/\/\/ Ok(Continue(res))\n \/\/\/ }\n \/\/\/ ```\n pub fn set_status(&mut self, status: StatusCode) -> &mut Response<'a> {\n *self.origin.status_mut() = status;\n self\n }\n\n \/\/\/ Writes a response\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Halt};\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ res.send(\"hello world\")\n \/\/\/ }\n \/\/\/ ```\n pub fn send(self, text: T) -> MiddlewareResult<'a> {\n let mut stream = try!(self.start());\n match stream.write_all(text.as_bytes()) {\n Ok(()) => Ok(Halt(stream)),\n Err(e) => stream.bail(format!(\"Failed to send: {}\", e))\n }\n }\n\n \/\/\/ Writes a file to the output.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Halt};\n \/\/\/ use nickel::status::StatusCode;\n \/\/\/ use std::path::Path;\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ let favicon = Path::new(\"\/assets\/favicon.ico\");\n \/\/\/ res.send_file(favicon)\n \/\/\/ }\n \/\/\/ ```\n pub fn send_file(mut self, path: &Path) -> MiddlewareResult<'a> {\n \/\/ Chunk the response\n self.origin.headers_mut().remove::();\n \/\/ Determine content type by file extension or default to binary\n self.content_type(path.extension()\n .and_then(|os| os.to_str())\n .and_then(|s| s.parse().ok())\n .unwrap_or(MediaType::Bin));\n\n match File::open(path) {\n Ok(mut file) => {\n let mut stream = try!(self.start());\n match copy(&mut file, &mut stream) {\n Ok(_) => Ok(Halt(stream)),\n Err(e) => stream.bail(format!(\"Failed to send file: {}\", e))\n }\n }\n Err(e) => {\n self.error(InternalServerError,\n format!(\"Failed to send file '{:?}': {}\", path, e))\n }\n }\n }\n\n \/\/ TODO: This needs to be more sophisticated to return the correct headers\n \/\/ not just \"some headers\" :)\n \/\/\n \/\/ Also, it should only set them if not already set.\n fn set_fallback_headers(&mut self) {\n self.set_header_fallback(|| Date(HttpDate(time::now_utc())));\n self.set_header_fallback(|| Server(\"Nickel\".to_string()));\n self.set_header_fallback(|| ContentType(get_media_type(MediaType::Html)));\n }\n\n \/\/\/ Return an error with the appropriate status code for error handlers to\n \/\/\/ provide output for.\n pub fn error(self, status: StatusCode, message: T) -> MiddlewareResult<'a>\n where T: Into> {\n Err(NickelError::new(self, message, status))\n }\n\n \/\/\/ Sets the header if not already set.\n \/\/\/\n \/\/\/ If the header is not set then `f` will be called.\n \/\/\/ Renders the given template bound with the given data.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ # extern crate nickel;\n \/\/\/ # extern crate hyper;\n \/\/\/\n \/\/\/ # fn main() {\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Halt, MediaType, get_media_type};\n \/\/\/ use hyper::header::ContentType;\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ res.content_type(MediaType::Html);\n \/\/\/ res.set_header_fallback(|| {\n \/\/\/ panic!(\"Should not get called\");\n \/\/\/ ContentType(get_media_type(MediaType::Txt))\n \/\/\/ });\n \/\/\/ res.send(\"

Hello World<\/h1>\")\n \/\/\/ }\n \/\/\/ # }\n \/\/\/ ```\n pub fn set_header_fallback(&mut self, f: F)\n where H: Header + HeaderFormat, F: FnOnce() -> H {\n let headers = self.origin.headers_mut();\n if !headers.has::() { headers.set(f()) }\n }\n\n \/\/\/ Renders the given template bound with the given data.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use std::collections::HashMap;\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Halt};\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ let mut data = HashMap::new();\n \/\/\/ data.insert(\"name\", \"user\");\n \/\/\/ res.render(\"examples\/assets\/template.tpl\", &data)\n \/\/\/ }\n \/\/\/ ```\n pub fn render(self, path: &'static str, data: &T) -> MiddlewareResult<'a>\n where T: Encodable {\n fn render<'a, T>(res: Response<'a>, template: &Template, data: &T)\n -> MiddlewareResult<'a> where T: Encodable {\n let mut stream = try!(res.start());\n match template.render(&mut stream, data) {\n Ok(()) => Ok(Halt(stream)),\n Err(e) => stream.bail(format!(\"Problem rendering template: {:?}\", e))\n }\n }\n\n \/\/ Fast path doesn't need writer lock\n if let Some(t) = self.templates.read().unwrap().get(&path) {\n return render(self, t, data);\n }\n\n \/\/ We didn't find the template, get writers lock\n let mut templates = self.templates.write().unwrap();\n \/\/ Search again incase there was a race to compile the template\n let template = match templates.entry(path) {\n Vacant(entry) => {\n let mut file = File::open(&Path::new(path))\n .ok().expect(&*format!(\"Couldn't open the template file: {}\", path));\n let mut raw_template = String::new();\n\n file.read_to_string(&mut raw_template)\n .ok().expect(&*format!(\"Couldn't open the template file: {}\", path));\n\n entry.insert(mustache::compile_str(&*raw_template))\n },\n Occupied(entry) => entry.into_mut()\n };\n\n render(self, template, data)\n }\n\n pub fn start(mut self) -> Result, NickelError<'a>> {\n self.set_fallback_headers();\n\n let Response { origin, templates } = self;\n match origin.start() {\n Ok(origin) => Ok(Response { origin: origin, templates: templates }),\n Err(e) =>\n unsafe {\n Err(NickelError::without_response(format!(\"Failed to start response: {}\", e)))\n }\n }\n }\n}\n\nimpl<'a, 'b> Write for Response<'a, Streaming> {\n #[inline(always)]\n fn write(&mut self, buf: &[u8]) -> io::Result {\n self.origin.write(buf)\n }\n #[inline(always)]\n fn flush(&mut self) -> io::Result<()> {\n self.origin.flush()\n }\n}\n\nimpl<'a, 'b> Response<'a, Streaming> {\n \/\/\/ In the case of an unrecoverable error while a stream is already in\n \/\/\/ progress, there is no standard way to signal to the client that an\n \/\/\/ error has occurred. `bail` will drop the connection and log an error\n \/\/\/ message.\n pub fn bail(self, message: T) -> MiddlewareResult<'a>\n where T: Into> {\n let _ = self.end();\n unsafe { Err(NickelError::without_response(message)) }\n }\n\n \/\/\/ Flushes all writing of a response to the client.\n pub fn end(self) -> io::Result<()> {\n self.origin.end()\n }\n}\n\nimpl <'a, T> Response<'a, T> {\n \/\/\/ Gets the current status code for this response\n pub fn status(&self) -> StatusCode {\n self.origin.status()\n }\n}\n\n#[test]\nfn matches_content_type () {\n use hyper::mime::{Mime, TopLevel, SubLevel};\n let path = &Path::new(\"test.txt\");\n let content_type = path.extension()\n .and_then(|os| os.to_str())\n .and_then(|s| s.parse().ok());\n\n assert_eq!(content_type, Some(MediaType::Txt));\n let content_type = content_type.map(get_media_type).unwrap();\n\n match content_type {\n Mime(TopLevel::Text, SubLevel::Plain, _) => {}, \/\/ OK\n wrong => panic!(\"Wrong mime: {}\", wrong)\n }\n}\nfix(response): loosen bounds for render pathuse std::borrow::Cow;\nuse std::sync::RwLock;\nuse std::collections::HashMap;\nuse std::collections::hash_map::Entry::{Occupied, Vacant};\nuse std::path::Path;\nuse serialize::Encodable;\nuse hyper::status::StatusCode::{self, InternalServerError};\nuse hyper::server::Response as HyperResponse;\nuse hyper::header::{Date, HttpDate, Server, ContentType, ContentLength, Header, HeaderFormat};\nuse hyper::net::{Fresh, Streaming};\nuse time;\nuse mimes::{get_media_type, MediaType};\nuse mustache;\nuse mustache::Template;\nuse std::io;\nuse std::io::{Read, Write, copy};\nuse std::fs::File;\nuse {NickelError, Halt, MiddlewareResult, AsBytes};\n\npub type TemplateCache = RwLock>;\n\n\/\/\/A container for the response\npub struct Response<'a, T=Fresh> {\n \/\/\/the original `hyper::server::Response`\n pub origin: HyperResponse<'a, T>,\n templates: &'a TemplateCache\n}\n\nimpl<'a> Response<'a, Fresh> {\n pub fn from_internal<'c, 'd>(response: HyperResponse<'c, Fresh>,\n templates: &'c TemplateCache)\n -> Response<'c, Fresh> {\n Response {\n origin: response,\n templates: templates\n }\n }\n\n \/\/\/ Sets the content type by it's short form.\n \/\/\/ Returns the response for chaining.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Continue};\n \/\/\/ use nickel::mimes::MediaType;\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ res.content_type(MediaType::Html);\n \/\/\/ Ok(Continue(res))\n \/\/\/ }\n \/\/\/ ```\n pub fn content_type(&mut self, mt: MediaType) -> &mut Response<'a> {\n self.origin.headers_mut().set(ContentType(get_media_type(mt)));\n self\n }\n\n \/\/\/ Sets the status code and returns the response for chaining\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Continue};\n \/\/\/ use nickel::status::StatusCode;\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ res.set_status(StatusCode::NotFound);\n \/\/\/ Ok(Continue(res))\n \/\/\/ }\n \/\/\/ ```\n pub fn set_status(&mut self, status: StatusCode) -> &mut Response<'a> {\n *self.origin.status_mut() = status;\n self\n }\n\n \/\/\/ Writes a response\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Halt};\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ res.send(\"hello world\")\n \/\/\/ }\n \/\/\/ ```\n pub fn send(self, text: T) -> MiddlewareResult<'a> {\n let mut stream = try!(self.start());\n match stream.write_all(text.as_bytes()) {\n Ok(()) => Ok(Halt(stream)),\n Err(e) => stream.bail(format!(\"Failed to send: {}\", e))\n }\n }\n\n \/\/\/ Writes a file to the output.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Halt};\n \/\/\/ use nickel::status::StatusCode;\n \/\/\/ use std::path::Path;\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ let favicon = Path::new(\"\/assets\/favicon.ico\");\n \/\/\/ res.send_file(favicon)\n \/\/\/ }\n \/\/\/ ```\n pub fn send_file(mut self, path: &Path) -> MiddlewareResult<'a> {\n \/\/ Chunk the response\n self.origin.headers_mut().remove::();\n \/\/ Determine content type by file extension or default to binary\n self.content_type(path.extension()\n .and_then(|os| os.to_str())\n .and_then(|s| s.parse().ok())\n .unwrap_or(MediaType::Bin));\n\n match File::open(path) {\n Ok(mut file) => {\n let mut stream = try!(self.start());\n match copy(&mut file, &mut stream) {\n Ok(_) => Ok(Halt(stream)),\n Err(e) => stream.bail(format!(\"Failed to send file: {}\", e))\n }\n }\n Err(e) => {\n self.error(InternalServerError,\n format!(\"Failed to send file '{:?}': {}\", path, e))\n }\n }\n }\n\n \/\/ TODO: This needs to be more sophisticated to return the correct headers\n \/\/ not just \"some headers\" :)\n \/\/\n \/\/ Also, it should only set them if not already set.\n fn set_fallback_headers(&mut self) {\n self.set_header_fallback(|| Date(HttpDate(time::now_utc())));\n self.set_header_fallback(|| Server(\"Nickel\".to_string()));\n self.set_header_fallback(|| ContentType(get_media_type(MediaType::Html)));\n }\n\n \/\/\/ Return an error with the appropriate status code for error handlers to\n \/\/\/ provide output for.\n pub fn error(self, status: StatusCode, message: T) -> MiddlewareResult<'a>\n where T: Into> {\n Err(NickelError::new(self, message, status))\n }\n\n \/\/\/ Sets the header if not already set.\n \/\/\/\n \/\/\/ If the header is not set then `f` will be called.\n \/\/\/ Renders the given template bound with the given data.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ # extern crate nickel;\n \/\/\/ # extern crate hyper;\n \/\/\/\n \/\/\/ # fn main() {\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Halt, MediaType, get_media_type};\n \/\/\/ use hyper::header::ContentType;\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ res.content_type(MediaType::Html);\n \/\/\/ res.set_header_fallback(|| {\n \/\/\/ panic!(\"Should not get called\");\n \/\/\/ ContentType(get_media_type(MediaType::Txt))\n \/\/\/ });\n \/\/\/ res.send(\"

Hello World<\/h1>\")\n \/\/\/ }\n \/\/\/ # }\n \/\/\/ ```\n pub fn set_header_fallback(&mut self, f: F)\n where H: Header + HeaderFormat, F: FnOnce() -> H {\n let headers = self.origin.headers_mut();\n if !headers.has::() { headers.set(f()) }\n }\n\n \/\/\/ Renders the given template bound with the given data.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```{rust}\n \/\/\/ use std::collections::HashMap;\n \/\/\/ use nickel::{Request, Response, MiddlewareResult, Halt};\n \/\/\/\n \/\/\/ fn handler<'a>(_: &mut Request, mut res: Response<'a>) -> MiddlewareResult<'a> {\n \/\/\/ let mut data = HashMap::new();\n \/\/\/ data.insert(\"name\", \"user\");\n \/\/\/ res.render(\"examples\/assets\/template.tpl\", &data)\n \/\/\/ }\n \/\/\/ ```\n pub fn render(self, path: P, data: &T) -> MiddlewareResult<'a>\n where T: Encodable, P: AsRef + Into {\n fn render<'a, T>(res: Response<'a>, template: &Template, data: &T)\n -> MiddlewareResult<'a> where T: Encodable {\n let mut stream = try!(res.start());\n match template.render(&mut stream, data) {\n Ok(()) => Ok(Halt(stream)),\n Err(e) => stream.bail(format!(\"Problem rendering template: {:?}\", e))\n }\n }\n\n \/\/ Fast path doesn't need writer lock\n if let Some(t) = self.templates.read().unwrap().get(path.as_ref()) {\n return render(self, t, data);\n }\n\n \/\/ We didn't find the template, get writers lock\n let mut templates = self.templates.write().unwrap();\n\n \/\/ Additional clone required for now as the entry api doesn't give us a key ref\n let path = path.into();\n\n \/\/ Search again incase there was a race to compile the template\n let template = match templates.entry(path.clone()) {\n Vacant(entry) => {\n let mut file = File::open(&Path::new(&path))\n .ok().expect(&*format!(\"Couldn't open the template file: {}\", &path));\n let mut raw_template = String::new();\n\n file.read_to_string(&mut raw_template)\n .ok().expect(&*format!(\"Couldn't open the template file: {}\", &path));\n\n entry.insert(mustache::compile_str(&*raw_template))\n },\n Occupied(entry) => entry.into_mut()\n };\n\n render(self, template, data)\n }\n\n pub fn start(mut self) -> Result, NickelError<'a>> {\n self.set_fallback_headers();\n\n let Response { origin, templates } = self;\n match origin.start() {\n Ok(origin) => Ok(Response { origin: origin, templates: templates }),\n Err(e) =>\n unsafe {\n Err(NickelError::without_response(format!(\"Failed to start response: {}\", e)))\n }\n }\n }\n}\n\nimpl<'a, 'b> Write for Response<'a, Streaming> {\n #[inline(always)]\n fn write(&mut self, buf: &[u8]) -> io::Result {\n self.origin.write(buf)\n }\n #[inline(always)]\n fn flush(&mut self) -> io::Result<()> {\n self.origin.flush()\n }\n}\n\nimpl<'a, 'b> Response<'a, Streaming> {\n \/\/\/ In the case of an unrecoverable error while a stream is already in\n \/\/\/ progress, there is no standard way to signal to the client that an\n \/\/\/ error has occurred. `bail` will drop the connection and log an error\n \/\/\/ message.\n pub fn bail(self, message: T) -> MiddlewareResult<'a>\n where T: Into> {\n let _ = self.end();\n unsafe { Err(NickelError::without_response(message)) }\n }\n\n \/\/\/ Flushes all writing of a response to the client.\n pub fn end(self) -> io::Result<()> {\n self.origin.end()\n }\n}\n\nimpl <'a, T> Response<'a, T> {\n \/\/\/ Gets the current status code for this response\n pub fn status(&self) -> StatusCode {\n self.origin.status()\n }\n}\n\n#[test]\nfn matches_content_type () {\n use hyper::mime::{Mime, TopLevel, SubLevel};\n let path = &Path::new(\"test.txt\");\n let content_type = path.extension()\n .and_then(|os| os.to_str())\n .and_then(|s| s.parse().ok());\n\n assert_eq!(content_type, Some(MediaType::Txt));\n let content_type = content_type.map(get_media_type).unwrap();\n\n match content_type {\n Mime(TopLevel::Text, SubLevel::Plain, _) => {}, \/\/ OK\n wrong => panic!(\"Wrong mime: {}\", wrong)\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A structure for holding a set of enum variants.\n\/\/!\n\/\/! This module defines a container which uses an efficient bit mask\n\/\/! representation to hold C-like enum variants.\n\nuse core::prelude::*;\nuse core::fmt;\n\n\/\/ FIXME(conventions): implement BitXor\n\/\/ FIXME(contentions): implement union family of methods? (general design may be wrong here)\n\/\/ FIXME(conventions): implement len\n\n#[deriving(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\/\/\/ A specialized `Set` implementation to use enum types.\npub struct EnumSet {\n \/\/ We must maintain the invariant that no bits are set\n \/\/ for which no variant exists\n bits: uint\n}\n\nimpl fmt::Show for EnumSet {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(fmt, \"{{\"));\n let mut first = true;\n for e in self.iter() {\n if !first {\n try!(write!(fmt, \", \"));\n }\n try!(write!(fmt, \"{}\", e));\n first = false;\n }\n write!(fmt, \"}}\")\n }\n}\n\n\/**\nAn interface for casting C-like enum to uint and back.\nA typically implementation is as below.\n\n```{rust,ignore}\n#[repr(uint)]\nenum Foo {\n A, B, C\n}\n\nimpl CLike for Foo {\n fn to_uint(&self) -> uint {\n *self as uint\n }\n\n fn from_uint(v: uint) -> Foo {\n unsafe { mem::transmute(v) }\n }\n}\n```\n*\/\npub trait CLike {\n \/\/\/ Converts a C-like enum to a `uint`.\n fn to_uint(&self) -> uint;\n \/\/\/ Converts a `uint` to a C-like enum.\n fn from_uint(uint) -> Self;\n}\n\nfn bit(e: &E) -> uint {\n use core::uint;\n let value = e.to_uint();\n assert!(value < uint::BITS,\n \"EnumSet only supports up to {} variants.\", uint::BITS - 1);\n 1 << value\n}\n\nimpl EnumSet {\n \/\/\/ Deprecated: Renamed to `new`.\n #[deprecated = \"Renamed to `new`\"]\n pub fn empty() -> EnumSet {\n EnumSet::new()\n }\n\n \/\/\/ Returns an empty `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn new() -> EnumSet {\n EnumSet {bits: 0}\n }\n\n \/\/\/ Returns true if the `EnumSet` is empty.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn is_empty(&self) -> bool {\n self.bits == 0\n }\n\n pub fn clear(&mut self) {\n self.bits = 0;\n }\n\n \/\/\/ Returns `true` if the `EnumSet` contains any enum of the given `EnumSet`.\n \/\/\/ Deprecated: Use `is_disjoint`.\n #[deprecated = \"Use `is_disjoint`\"]\n pub fn intersects(&self, e: EnumSet) -> bool {\n !self.is_disjoint(&e)\n }\n\n \/\/\/ Returns `false` if the `EnumSet` contains any enum of the given `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn is_disjoint(&self, other: &EnumSet) -> bool {\n (self.bits & other.bits) == 0\n }\n\n \/\/\/ Returns `true` if a given `EnumSet` is included in this `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn is_superset(&self, other: &EnumSet) -> bool {\n (self.bits & other.bits) == other.bits\n }\n\n \/\/\/ Returns `true` if this `EnumSet` is included in the given `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn is_subset(&self, other: &EnumSet) -> bool {\n other.is_superset(self)\n }\n\n \/\/\/ Returns the union of both `EnumSets`.\n pub fn union(&self, e: EnumSet) -> EnumSet {\n EnumSet {bits: self.bits | e.bits}\n }\n\n \/\/\/ Returns the intersection of both `EnumSets`.\n pub fn intersection(&self, e: EnumSet) -> EnumSet {\n EnumSet {bits: self.bits & e.bits}\n }\n\n \/\/\/ Deprecated: Use `insert`.\n #[deprecated = \"Use `insert`\"]\n pub fn add(&mut self, e: E) {\n self.insert(e);\n }\n\n \/\/\/ Adds an enum to the `EnumSet`, and returns `true` if it wasn't there before\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn insert(&mut self, e: E) -> bool {\n let result = !self.contains(&e);\n self.bits |= bit(&e);\n result\n }\n\n \/\/\/ Removes an enum from the EnumSet\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn remove(&mut self, e: &E) -> bool {\n let result = self.contains(e);\n self.bits &= !bit(e);\n result\n }\n\n \/\/\/ Deprecated: use `contains`.\n #[deprecated = \"use `contains\"]\n pub fn contains_elem(&self, e: E) -> bool {\n self.contains(&e)\n }\n\n \/\/\/ Returns `true` if an `EnumSet` contains a given enum.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn contains(&self, e: &E) -> bool {\n (self.bits & bit(e)) != 0\n }\n\n \/\/\/ Returns an iterator over an `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn iter(&self) -> Items {\n Items::new(self.bits)\n }\n}\n\nimpl Sub, EnumSet> for EnumSet {\n fn sub(&self, e: &EnumSet) -> EnumSet {\n EnumSet {bits: self.bits & !e.bits}\n }\n}\n\nimpl BitOr, EnumSet> for EnumSet {\n fn bitor(&self, e: &EnumSet) -> EnumSet {\n EnumSet {bits: self.bits | e.bits}\n }\n}\n\nimpl BitAnd, EnumSet> for EnumSet {\n fn bitand(&self, e: &EnumSet) -> EnumSet {\n EnumSet {bits: self.bits & e.bits}\n }\n}\n\n\/\/\/ An iterator over an EnumSet\npub struct Items {\n index: uint,\n bits: uint,\n}\n\nimpl Items {\n fn new(bits: uint) -> Items {\n Items { index: 0, bits: bits }\n }\n}\n\nimpl Iterator for Items {\n fn next(&mut self) -> Option {\n if self.bits == 0 {\n return None;\n }\n\n while (self.bits & 1) == 0 {\n self.index += 1;\n self.bits >>= 1;\n }\n let elem = CLike::from_uint(self.index);\n self.index += 1;\n self.bits >>= 1;\n Some(elem)\n }\n\n fn size_hint(&self) -> (uint, Option) {\n let exact = self.bits.count_ones();\n (exact, Some(exact))\n }\n}\n\n#[cfg(test)]\nmod test {\n use std::prelude::*;\n use std::mem;\n\n use super::{EnumSet, CLike};\n\n #[deriving(PartialEq, Show)]\n #[repr(uint)]\n enum Foo {\n A, B, C\n }\n\n impl CLike for Foo {\n fn to_uint(&self) -> uint {\n *self as uint\n }\n\n fn from_uint(v: uint) -> Foo {\n unsafe { mem::transmute(v) }\n }\n }\n\n #[test]\n fn test_new() {\n let e: EnumSet = EnumSet::new();\n assert!(e.is_empty());\n }\n\n #[test]\n fn test_show() {\n let mut e = EnumSet::new();\n assert_eq!(\"{}\", e.to_string().as_slice());\n e.insert(A);\n assert_eq!(\"{A}\", e.to_string().as_slice());\n e.insert(C);\n assert_eq!(\"{A, C}\", e.to_string().as_slice());\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ intersect\n\n #[test]\n fn test_two_empties_do_not_intersect() {\n let e1: EnumSet = EnumSet::new();\n let e2: EnumSet = EnumSet::new();\n assert!(e1.is_disjoint(&e2));\n }\n\n #[test]\n fn test_empty_does_not_intersect_with_full() {\n let e1: EnumSet = EnumSet::new();\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(A);\n e2.insert(B);\n e2.insert(C);\n\n assert!(e1.is_disjoint(&e2));\n }\n\n #[test]\n fn test_disjoint_intersects() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(B);\n\n assert!(e1.is_disjoint(&e2));\n }\n\n #[test]\n fn test_overlapping_intersects() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(A);\n e2.insert(B);\n\n assert!(!e1.is_disjoint(&e2));\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ contains and contains_elem\n\n #[test]\n fn test_superset() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(A);\n e2.insert(B);\n\n let mut e3: EnumSet = EnumSet::new();\n e3.insert(C);\n\n assert!(e1.is_subset(&e2));\n assert!(e2.is_superset(&e1));\n assert!(!e3.is_superset(&e2))\n assert!(!e2.is_superset(&e3))\n }\n\n #[test]\n fn test_contains() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n assert!(e1.contains(&A));\n assert!(!e1.contains(&B));\n assert!(!e1.contains(&C));\n\n e1.insert(A);\n e1.insert(B);\n assert!(e1.contains(&A));\n assert!(e1.contains(&B));\n assert!(!e1.contains(&C));\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ iter\n\n #[test]\n fn test_iterator() {\n let mut e1: EnumSet = EnumSet::new();\n\n let elems: Vec = e1.iter().collect();\n assert!(elems.is_empty())\n\n e1.insert(A);\n let elems = e1.iter().collect();\n assert_eq!(vec![A], elems)\n\n e1.insert(C);\n let elems = e1.iter().collect();\n assert_eq!(vec![A,C], elems)\n\n e1.insert(C);\n let elems = e1.iter().collect();\n assert_eq!(vec![A,C], elems)\n\n e1.insert(B);\n let elems = e1.iter().collect();\n assert_eq!(vec![A,B,C], elems)\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ operators\n\n #[test]\n fn test_operators() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n e1.insert(C);\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(B);\n e2.insert(C);\n\n let e_union = e1 | e2;\n let elems = e_union.iter().collect();\n assert_eq!(vec![A,B,C], elems)\n\n let e_intersection = e1 & e2;\n let elems = e_intersection.iter().collect();\n assert_eq!(vec![C], elems)\n\n let e_subtract = e1 - e2;\n let elems = e_subtract.iter().collect();\n assert_eq!(vec![A], elems)\n }\n\n #[test]\n #[should_fail]\n fn test_overflow() {\n #[allow(dead_code)]\n #[repr(uint)]\n enum Bar {\n V00, V01, V02, V03, V04, V05, V06, V07, V08, V09,\n V10, V11, V12, V13, V14, V15, V16, V17, V18, V19,\n V20, V21, V22, V23, V24, V25, V26, V27, V28, V29,\n V30, V31, V32, V33, V34, V35, V36, V37, V38, V39,\n V40, V41, V42, V43, V44, V45, V46, V47, V48, V49,\n V50, V51, V52, V53, V54, V55, V56, V57, V58, V59,\n V60, V61, V62, V63, V64, V65, V66, V67, V68, V69,\n }\n impl CLike for Bar {\n fn to_uint(&self) -> uint {\n *self as uint\n }\n\n fn from_uint(v: uint) -> Bar {\n unsafe { mem::transmute(v) }\n }\n }\n let mut set = EnumSet::empty();\n set.add(V64);\n }\n}\nauto merge of #18740 : jbcrail\/rust\/implement-enum-set-len, r=alexcrichton\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A structure for holding a set of enum variants.\n\/\/!\n\/\/! This module defines a container which uses an efficient bit mask\n\/\/! representation to hold C-like enum variants.\n\nuse core::prelude::*;\nuse core::fmt;\n\n\/\/ FIXME(conventions): implement BitXor\n\/\/ FIXME(contentions): implement union family of methods? (general design may be wrong here)\n\n#[deriving(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\/\/\/ A specialized `Set` implementation to use enum types.\npub struct EnumSet {\n \/\/ We must maintain the invariant that no bits are set\n \/\/ for which no variant exists\n bits: uint\n}\n\nimpl fmt::Show for EnumSet {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(fmt, \"{{\"));\n let mut first = true;\n for e in self.iter() {\n if !first {\n try!(write!(fmt, \", \"));\n }\n try!(write!(fmt, \"{}\", e));\n first = false;\n }\n write!(fmt, \"}}\")\n }\n}\n\n\/**\nAn interface for casting C-like enum to uint and back.\nA typically implementation is as below.\n\n```{rust,ignore}\n#[repr(uint)]\nenum Foo {\n A, B, C\n}\n\nimpl CLike for Foo {\n fn to_uint(&self) -> uint {\n *self as uint\n }\n\n fn from_uint(v: uint) -> Foo {\n unsafe { mem::transmute(v) }\n }\n}\n```\n*\/\npub trait CLike {\n \/\/\/ Converts a C-like enum to a `uint`.\n fn to_uint(&self) -> uint;\n \/\/\/ Converts a `uint` to a C-like enum.\n fn from_uint(uint) -> Self;\n}\n\nfn bit(e: &E) -> uint {\n use core::uint;\n let value = e.to_uint();\n assert!(value < uint::BITS,\n \"EnumSet only supports up to {} variants.\", uint::BITS - 1);\n 1 << value\n}\n\nimpl EnumSet {\n \/\/\/ Deprecated: Renamed to `new`.\n #[deprecated = \"Renamed to `new`\"]\n pub fn empty() -> EnumSet {\n EnumSet::new()\n }\n\n \/\/\/ Returns an empty `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn new() -> EnumSet {\n EnumSet {bits: 0}\n }\n\n \/\/\/ Returns the number of elements in the given `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn len(&self) -> uint {\n self.bits.count_ones()\n }\n\n \/\/\/ Returns true if the `EnumSet` is empty.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn is_empty(&self) -> bool {\n self.bits == 0\n }\n\n pub fn clear(&mut self) {\n self.bits = 0;\n }\n\n \/\/\/ Returns `true` if the `EnumSet` contains any enum of the given `EnumSet`.\n \/\/\/ Deprecated: Use `is_disjoint`.\n #[deprecated = \"Use `is_disjoint`\"]\n pub fn intersects(&self, e: EnumSet) -> bool {\n !self.is_disjoint(&e)\n }\n\n \/\/\/ Returns `false` if the `EnumSet` contains any enum of the given `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn is_disjoint(&self, other: &EnumSet) -> bool {\n (self.bits & other.bits) == 0\n }\n\n \/\/\/ Returns `true` if a given `EnumSet` is included in this `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn is_superset(&self, other: &EnumSet) -> bool {\n (self.bits & other.bits) == other.bits\n }\n\n \/\/\/ Returns `true` if this `EnumSet` is included in the given `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn is_subset(&self, other: &EnumSet) -> bool {\n other.is_superset(self)\n }\n\n \/\/\/ Returns the union of both `EnumSets`.\n pub fn union(&self, e: EnumSet) -> EnumSet {\n EnumSet {bits: self.bits | e.bits}\n }\n\n \/\/\/ Returns the intersection of both `EnumSets`.\n pub fn intersection(&self, e: EnumSet) -> EnumSet {\n EnumSet {bits: self.bits & e.bits}\n }\n\n \/\/\/ Deprecated: Use `insert`.\n #[deprecated = \"Use `insert`\"]\n pub fn add(&mut self, e: E) {\n self.insert(e);\n }\n\n \/\/\/ Adds an enum to the `EnumSet`, and returns `true` if it wasn't there before\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn insert(&mut self, e: E) -> bool {\n let result = !self.contains(&e);\n self.bits |= bit(&e);\n result\n }\n\n \/\/\/ Removes an enum from the EnumSet\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn remove(&mut self, e: &E) -> bool {\n let result = self.contains(e);\n self.bits &= !bit(e);\n result\n }\n\n \/\/\/ Deprecated: use `contains`.\n #[deprecated = \"use `contains\"]\n pub fn contains_elem(&self, e: E) -> bool {\n self.contains(&e)\n }\n\n \/\/\/ Returns `true` if an `EnumSet` contains a given enum.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn contains(&self, e: &E) -> bool {\n (self.bits & bit(e)) != 0\n }\n\n \/\/\/ Returns an iterator over an `EnumSet`.\n #[unstable = \"matches collection reform specification, waiting for dust to settle\"]\n pub fn iter(&self) -> Items {\n Items::new(self.bits)\n }\n}\n\nimpl Sub, EnumSet> for EnumSet {\n fn sub(&self, e: &EnumSet) -> EnumSet {\n EnumSet {bits: self.bits & !e.bits}\n }\n}\n\nimpl BitOr, EnumSet> for EnumSet {\n fn bitor(&self, e: &EnumSet) -> EnumSet {\n EnumSet {bits: self.bits | e.bits}\n }\n}\n\nimpl BitAnd, EnumSet> for EnumSet {\n fn bitand(&self, e: &EnumSet) -> EnumSet {\n EnumSet {bits: self.bits & e.bits}\n }\n}\n\n\/\/\/ An iterator over an EnumSet\npub struct Items {\n index: uint,\n bits: uint,\n}\n\nimpl Items {\n fn new(bits: uint) -> Items {\n Items { index: 0, bits: bits }\n }\n}\n\nimpl Iterator for Items {\n fn next(&mut self) -> Option {\n if self.bits == 0 {\n return None;\n }\n\n while (self.bits & 1) == 0 {\n self.index += 1;\n self.bits >>= 1;\n }\n let elem = CLike::from_uint(self.index);\n self.index += 1;\n self.bits >>= 1;\n Some(elem)\n }\n\n fn size_hint(&self) -> (uint, Option) {\n let exact = self.bits.count_ones();\n (exact, Some(exact))\n }\n}\n\n#[cfg(test)]\nmod test {\n use std::prelude::*;\n use std::mem;\n\n use super::{EnumSet, CLike};\n\n #[deriving(PartialEq, Show)]\n #[repr(uint)]\n enum Foo {\n A, B, C\n }\n\n impl CLike for Foo {\n fn to_uint(&self) -> uint {\n *self as uint\n }\n\n fn from_uint(v: uint) -> Foo {\n unsafe { mem::transmute(v) }\n }\n }\n\n #[test]\n fn test_new() {\n let e: EnumSet = EnumSet::new();\n assert!(e.is_empty());\n }\n\n #[test]\n fn test_show() {\n let mut e = EnumSet::new();\n assert_eq!(\"{}\", e.to_string().as_slice());\n e.insert(A);\n assert_eq!(\"{A}\", e.to_string().as_slice());\n e.insert(C);\n assert_eq!(\"{A, C}\", e.to_string().as_slice());\n }\n\n #[test]\n fn test_len() {\n let mut e = EnumSet::new();\n assert_eq!(e.len(), 0);\n e.insert(A);\n e.insert(B);\n e.insert(C);\n assert_eq!(e.len(), 3);\n e.remove(&A);\n assert_eq!(e.len(), 2);\n e.clear();\n assert_eq!(e.len(), 0);\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ intersect\n\n #[test]\n fn test_two_empties_do_not_intersect() {\n let e1: EnumSet = EnumSet::new();\n let e2: EnumSet = EnumSet::new();\n assert!(e1.is_disjoint(&e2));\n }\n\n #[test]\n fn test_empty_does_not_intersect_with_full() {\n let e1: EnumSet = EnumSet::new();\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(A);\n e2.insert(B);\n e2.insert(C);\n\n assert!(e1.is_disjoint(&e2));\n }\n\n #[test]\n fn test_disjoint_intersects() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(B);\n\n assert!(e1.is_disjoint(&e2));\n }\n\n #[test]\n fn test_overlapping_intersects() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(A);\n e2.insert(B);\n\n assert!(!e1.is_disjoint(&e2));\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ contains and contains_elem\n\n #[test]\n fn test_superset() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(A);\n e2.insert(B);\n\n let mut e3: EnumSet = EnumSet::new();\n e3.insert(C);\n\n assert!(e1.is_subset(&e2));\n assert!(e2.is_superset(&e1));\n assert!(!e3.is_superset(&e2))\n assert!(!e2.is_superset(&e3))\n }\n\n #[test]\n fn test_contains() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n assert!(e1.contains(&A));\n assert!(!e1.contains(&B));\n assert!(!e1.contains(&C));\n\n e1.insert(A);\n e1.insert(B);\n assert!(e1.contains(&A));\n assert!(e1.contains(&B));\n assert!(!e1.contains(&C));\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ iter\n\n #[test]\n fn test_iterator() {\n let mut e1: EnumSet = EnumSet::new();\n\n let elems: Vec = e1.iter().collect();\n assert!(elems.is_empty())\n\n e1.insert(A);\n let elems = e1.iter().collect();\n assert_eq!(vec![A], elems)\n\n e1.insert(C);\n let elems = e1.iter().collect();\n assert_eq!(vec![A,C], elems)\n\n e1.insert(C);\n let elems = e1.iter().collect();\n assert_eq!(vec![A,C], elems)\n\n e1.insert(B);\n let elems = e1.iter().collect();\n assert_eq!(vec![A,B,C], elems)\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ operators\n\n #[test]\n fn test_operators() {\n let mut e1: EnumSet = EnumSet::new();\n e1.insert(A);\n e1.insert(C);\n\n let mut e2: EnumSet = EnumSet::new();\n e2.insert(B);\n e2.insert(C);\n\n let e_union = e1 | e2;\n let elems = e_union.iter().collect();\n assert_eq!(vec![A,B,C], elems)\n\n let e_intersection = e1 & e2;\n let elems = e_intersection.iter().collect();\n assert_eq!(vec![C], elems)\n\n let e_subtract = e1 - e2;\n let elems = e_subtract.iter().collect();\n assert_eq!(vec![A], elems)\n }\n\n #[test]\n #[should_fail]\n fn test_overflow() {\n #[allow(dead_code)]\n #[repr(uint)]\n enum Bar {\n V00, V01, V02, V03, V04, V05, V06, V07, V08, V09,\n V10, V11, V12, V13, V14, V15, V16, V17, V18, V19,\n V20, V21, V22, V23, V24, V25, V26, V27, V28, V29,\n V30, V31, V32, V33, V34, V35, V36, V37, V38, V39,\n V40, V41, V42, V43, V44, V45, V46, V47, V48, V49,\n V50, V51, V52, V53, V54, V55, V56, V57, V58, V59,\n V60, V61, V62, V63, V64, V65, V66, V67, V68, V69,\n }\n impl CLike for Bar {\n fn to_uint(&self) -> uint {\n *self as uint\n }\n\n fn from_uint(v: uint) -> Bar {\n unsafe { mem::transmute(v) }\n }\n }\n let mut set = EnumSet::empty();\n set.add(V64);\n }\n}\n<|endoftext|>"} {"text":"We don't need an immutable view into the buffer.<|endoftext|>"} {"text":"Hacked in support for mouse 'hover' events. (i.e. XTerm 1003)<|endoftext|>"} {"text":"Added documentation comments to the top-level items in the geometry module<|endoftext|>"} {"text":"more compact code and additional test<|endoftext|>"} {"text":"Make structs and enum private<|endoftext|>"} {"text":"use std::path::{Path, Component};\nuse std::error::Error;\nuse std::io::{self, Read};\nuse std::fs::{self, metadata, File};\n\n\/\/\/ Takes a path to a file and try to read the file into a String\n\npub fn file_to_string(path: &Path) -> Result> {\n let mut file = match File::open(path) {\n Ok(f) => f,\n Err(e) => {\n debug!(\"[*]: Failed to open {:?}\", path);\n return Err(Box::new(e));\n },\n };\n\n let mut content = String::new();\n\n if let Err(e) = file.read_to_string(&mut content) {\n debug!(\"[*]: Failed to read {:?}\", path);\n return Err(Box::new(e));\n }\n\n Ok(content)\n}\n\n\/\/\/ Takes a path and returns a path containing just enough `..\/` to point to the root of the given path.\n\/\/\/\n\/\/\/ This is mostly interesting for a relative path to point back to the directory from where the\n\/\/\/ path starts.\n\/\/\/\n\/\/\/ ```ignore\n\/\/\/ let mut path = Path::new(\"some\/relative\/path\");\n\/\/\/\n\/\/\/ println!(\"{}\", path_to_root(&path));\n\/\/\/ ```\n\/\/\/\n\/\/\/ **Outputs**\n\/\/\/\n\/\/\/ ```text\n\/\/\/ \"..\/..\/\"\n\/\/\/ ```\n\/\/\/\n\/\/\/ **note:** it's not very fool-proof, if you find a situation where it doesn't return the correct\n\/\/\/ path. Consider [submitting a new issue](https:\/\/github.com\/azerupi\/mdBook\/issues) or a\n\/\/\/ [pull-request](https:\/\/github.com\/azerupi\/mdBook\/pulls) to improve it.\n\npub fn path_to_root(path: &Path) -> String {\n debug!(\"[fn]: path_to_root\");\n \/\/ Remove filename and add \"..\/\" for every directory\n\n path.to_path_buf()\n .parent()\n .expect(\"\")\n .components()\n .fold(String::new(), |mut s, c| {\n match c {\n Component::Normal(_) => s.push_str(\"..\/\"),\n _ => {\n debug!(\"[*]: Other path component... {:?}\", c);\n },\n }\n s\n })\n}\n\n\n\n\/\/\/ This function creates a file and returns it. But before creating the file it checks every\n\/\/\/ directory in the path to see if it exists, and if it does not it will be created.\n\npub fn create_file(path: &Path) -> Result> {\n debug!(\"[fn]: create_file\");\n\n \/\/ Construct path\n if let Some(p) = path.parent() {\n debug!(\"Parent directory is: {:?}\", p);\n\n try!(fs::create_dir_all(p));\n }\n\n debug!(\"[*]: Create file: {:?}\", path);\n let f = match File::create(path) {\n Ok(f) => f,\n Err(e) => {\n debug!(\"File::create: {}\", e);\n return Err(Box::new(io::Error::new(io::ErrorKind::Other, format!(\"{}\", e))));\n },\n };\n\n Ok(f)\n}\n\n\/\/\/ Removes all the content of a directory but not the directory itself\n\npub fn remove_dir_content(dir: &Path) -> Result<(), Box> {\n for item in try!(fs::read_dir(dir)) {\n if let Ok(item) = item {\n let item = item.path();\n if item.is_dir() {\n try!(fs::remove_dir_all(item));\n } else {\n try!(fs::remove_file(item));\n }\n }\n }\n Ok(())\n}\n\n\/\/\/\n\/\/\/\n\/\/\/ Copies all files of a directory to another one except the files with the extensions given in the\n\/\/\/ `ext_blacklist` array\n\npub fn copy_files_except_ext(from: &Path, to: &Path, recursive: bool, ext_blacklist: &[&str]) -> Result<(), Box> {\n debug!(\"[fn] copy_files_except_ext\");\n \/\/ Check that from and to are different\n if from == to {\n return Ok(());\n }\n debug!(\"[*] Loop\");\n for entry in try!(fs::read_dir(from)) {\n let entry = try!(entry);\n debug!(\"[*] {:?}\", entry.path());\n let metadata = try!(entry.metadata());\n\n \/\/ If the entry is a dir and the recursive option is enabled, call itself\n if metadata.is_dir() && recursive {\n if entry.path() == to.to_path_buf() {\n continue;\n }\n debug!(\"[*] is dir\");\n\n \/\/ check if output dir already exists\n if !to.join(entry.file_name()).exists() {\n try!(fs::create_dir(&to.join(entry.file_name())));\n }\n\n try!(copy_files_except_ext(&from.join(entry.file_name()),\n &to.join(entry.file_name()),\n true,\n ext_blacklist));\n } else if metadata.is_file() {\n\n \/\/ Check if it is in the blacklist\n if let Some(ext) = entry.path().extension() {\n if ext_blacklist.contains(&ext.to_str().unwrap()) {\n continue;\n }\n debug!(\"[*] creating path for file: {:?}\",\n &to.join(entry.path().file_name().expect(\"a file should have a file name...\")));\n\n info!(\"[*] Copying file: {:?}\\n to {:?}\",\n entry.path(),\n &to.join(entry.path().file_name().expect(\"a file should have a file name...\")));\n try!(fs::copy(entry.path(),\n &to.join(entry.path().file_name().expect(\"a file should have a file name...\"))));\n }\n }\n }\n Ok(())\n}\n\n\n\/\/ ------------------------------------------------------------------------------------------------\n\/\/ ------------------------------------------------------------------------------------------------\n\n\/\/ tests\n\n#[cfg(test)]\nmod tests {\n extern crate tempdir;\n\n use super::copy_files_except_ext;\n use std::fs;\n\n #[test]\n fn copy_files_except_ext_test() {\n let tmp = match tempdir::TempDir::new(\"\") {\n Ok(t) => t,\n Err(_) => panic!(\"Could not create a temp dir\"),\n };\n\n \/\/ Create a couple of files\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.txt\")) {\n panic!(\"Could not create file.txt\")\n }\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.md\")) {\n panic!(\"Could not create file.md\")\n }\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.png\")) {\n panic!(\"Could not create file.png\")\n }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"sub_dir\")) {\n panic!(\"Could not create sub_dir\")\n }\n if let Err(_) = fs::File::create(&tmp.path().join(\"sub_dir\/file.png\")) {\n panic!(\"Could not create sub_dir\/file.png\")\n }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"sub_dir_exists\")) {\n panic!(\"Could not create sub_dir_exists\")\n }\n if let Err(_) = fs::File::create(&tmp.path().join(\"sub_dir_exists\/file.txt\")) {\n panic!(\"Could not create sub_dir_exists\/file.txt\")\n }\n\n \/\/ Create output dir\n if let Err(_) = fs::create_dir(&tmp.path().join(\"output\")) {\n panic!(\"Could not create output\")\n }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"output\/sub_dir_exists\")) {\n panic!(\"Could not create output\/sub_dir_exists\")\n }\n\n match copy_files_except_ext(&tmp.path(), &tmp.path().join(\"output\"), true, &[\"md\"]) {\n Err(e) => panic!(\"Error while executing the function:\\n{:?}\", e),\n Ok(_) => {},\n }\n\n \/\/ Check if the correct files where created\n if !(&tmp.path().join(\"output\/file.txt\")).exists() {\n panic!(\"output\/file.txt should exist\")\n }\n if (&tmp.path().join(\"output\/file.md\")).exists() {\n panic!(\"output\/file.md should not exist\")\n }\n if !(&tmp.path().join(\"output\/file.png\")).exists() {\n panic!(\"output\/file.png should exist\")\n }\n if !(&tmp.path().join(\"output\/sub_dir\/file.png\")).exists() {\n panic!(\"output\/sub_dir\/file.png should exist\")\n }\n if !(&tmp.path().join(\"output\/sub_dir_exists\/file.txt\")).exists() {\n panic!(\"output\/sub_dir\/file.png should exist\")\n }\n\n }\n}\nCopy files with no extension too.use std::path::{Path, Component};\nuse std::error::Error;\nuse std::io::{self, Read};\nuse std::fs::{self, metadata, File};\n\n\/\/\/ Takes a path to a file and try to read the file into a String\n\npub fn file_to_string(path: &Path) -> Result> {\n let mut file = match File::open(path) {\n Ok(f) => f,\n Err(e) => {\n debug!(\"[*]: Failed to open {:?}\", path);\n return Err(Box::new(e));\n },\n };\n\n let mut content = String::new();\n\n if let Err(e) = file.read_to_string(&mut content) {\n debug!(\"[*]: Failed to read {:?}\", path);\n return Err(Box::new(e));\n }\n\n Ok(content)\n}\n\n\/\/\/ Takes a path and returns a path containing just enough `..\/` to point to the root of the given path.\n\/\/\/\n\/\/\/ This is mostly interesting for a relative path to point back to the directory from where the\n\/\/\/ path starts.\n\/\/\/\n\/\/\/ ```ignore\n\/\/\/ let mut path = Path::new(\"some\/relative\/path\");\n\/\/\/\n\/\/\/ println!(\"{}\", path_to_root(&path));\n\/\/\/ ```\n\/\/\/\n\/\/\/ **Outputs**\n\/\/\/\n\/\/\/ ```text\n\/\/\/ \"..\/..\/\"\n\/\/\/ ```\n\/\/\/\n\/\/\/ **note:** it's not very fool-proof, if you find a situation where it doesn't return the correct\n\/\/\/ path. Consider [submitting a new issue](https:\/\/github.com\/azerupi\/mdBook\/issues) or a\n\/\/\/ [pull-request](https:\/\/github.com\/azerupi\/mdBook\/pulls) to improve it.\n\npub fn path_to_root(path: &Path) -> String {\n debug!(\"[fn]: path_to_root\");\n \/\/ Remove filename and add \"..\/\" for every directory\n\n path.to_path_buf()\n .parent()\n .expect(\"\")\n .components()\n .fold(String::new(), |mut s, c| {\n match c {\n Component::Normal(_) => s.push_str(\"..\/\"),\n _ => {\n debug!(\"[*]: Other path component... {:?}\", c);\n },\n }\n s\n })\n}\n\n\n\n\/\/\/ This function creates a file and returns it. But before creating the file it checks every\n\/\/\/ directory in the path to see if it exists, and if it does not it will be created.\n\npub fn create_file(path: &Path) -> Result> {\n debug!(\"[fn]: create_file\");\n\n \/\/ Construct path\n if let Some(p) = path.parent() {\n debug!(\"Parent directory is: {:?}\", p);\n\n try!(fs::create_dir_all(p));\n }\n\n debug!(\"[*]: Create file: {:?}\", path);\n let f = match File::create(path) {\n Ok(f) => f,\n Err(e) => {\n debug!(\"File::create: {}\", e);\n return Err(Box::new(io::Error::new(io::ErrorKind::Other, format!(\"{}\", e))));\n },\n };\n\n Ok(f)\n}\n\n\/\/\/ Removes all the content of a directory but not the directory itself\n\npub fn remove_dir_content(dir: &Path) -> Result<(), Box> {\n for item in try!(fs::read_dir(dir)) {\n if let Ok(item) = item {\n let item = item.path();\n if item.is_dir() {\n try!(fs::remove_dir_all(item));\n } else {\n try!(fs::remove_file(item));\n }\n }\n }\n Ok(())\n}\n\n\/\/\/\n\/\/\/\n\/\/\/ Copies all files of a directory to another one except the files with the extensions given in the\n\/\/\/ `ext_blacklist` array\n\npub fn copy_files_except_ext(from: &Path, to: &Path, recursive: bool, ext_blacklist: &[&str]) -> Result<(), Box> {\n debug!(\"[fn] copy_files_except_ext\");\n \/\/ Check that from and to are different\n if from == to {\n return Ok(());\n }\n debug!(\"[*] Loop\");\n for entry in try!(fs::read_dir(from)) {\n let entry = try!(entry);\n debug!(\"[*] {:?}\", entry.path());\n let metadata = try!(entry.metadata());\n\n \/\/ If the entry is a dir and the recursive option is enabled, call itself\n if metadata.is_dir() && recursive {\n if entry.path() == to.to_path_buf() {\n continue;\n }\n debug!(\"[*] is dir\");\n\n \/\/ check if output dir already exists\n if !to.join(entry.file_name()).exists() {\n try!(fs::create_dir(&to.join(entry.file_name())));\n }\n\n try!(copy_files_except_ext(&from.join(entry.file_name()),\n &to.join(entry.file_name()),\n true,\n ext_blacklist));\n } else if metadata.is_file() {\n\n \/\/ Check if it is in the blacklist\n if let Some(ext) = entry.path().extension() {\n if ext_blacklist.contains(&ext.to_str().unwrap()) {\n continue;\n }\n }\n debug!(\"[*] creating path for file: {:?}\",\n &to.join(entry.path().file_name().expect(\"a file should have a file name...\")));\n\n info!(\"[*] Copying file: {:?}\\n to {:?}\",\n entry.path(),\n &to.join(entry.path().file_name().expect(\"a file should have a file name...\")));\n try!(fs::copy(entry.path(),\n &to.join(entry.path().file_name().expect(\"a file should have a file name...\"))));\n }\n }\n Ok(())\n}\n\n\n\/\/ ------------------------------------------------------------------------------------------------\n\/\/ ------------------------------------------------------------------------------------------------\n\n\/\/ tests\n\n#[cfg(test)]\nmod tests {\n extern crate tempdir;\n\n use super::copy_files_except_ext;\n use std::fs;\n\n #[test]\n fn copy_files_except_ext_test() {\n let tmp = match tempdir::TempDir::new(\"\") {\n Ok(t) => t,\n Err(_) => panic!(\"Could not create a temp dir\"),\n };\n\n \/\/ Create a couple of files\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.txt\")) {\n panic!(\"Could not create file.txt\")\n }\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.md\")) {\n panic!(\"Could not create file.md\")\n }\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.png\")) {\n panic!(\"Could not create file.png\")\n }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"sub_dir\")) {\n panic!(\"Could not create sub_dir\")\n }\n if let Err(_) = fs::File::create(&tmp.path().join(\"sub_dir\/file.png\")) {\n panic!(\"Could not create sub_dir\/file.png\")\n }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"sub_dir_exists\")) {\n panic!(\"Could not create sub_dir_exists\")\n }\n if let Err(_) = fs::File::create(&tmp.path().join(\"sub_dir_exists\/file.txt\")) {\n panic!(\"Could not create sub_dir_exists\/file.txt\")\n }\n\n \/\/ Create output dir\n if let Err(_) = fs::create_dir(&tmp.path().join(\"output\")) {\n panic!(\"Could not create output\")\n }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"output\/sub_dir_exists\")) {\n panic!(\"Could not create output\/sub_dir_exists\")\n }\n\n match copy_files_except_ext(&tmp.path(), &tmp.path().join(\"output\"), true, &[\"md\"]) {\n Err(e) => panic!(\"Error while executing the function:\\n{:?}\", e),\n Ok(_) => {},\n }\n\n \/\/ Check if the correct files where created\n if !(&tmp.path().join(\"output\/file.txt\")).exists() {\n panic!(\"output\/file.txt should exist\")\n }\n if (&tmp.path().join(\"output\/file.md\")).exists() {\n panic!(\"output\/file.md should not exist\")\n }\n if !(&tmp.path().join(\"output\/file.png\")).exists() {\n panic!(\"output\/file.png should exist\")\n }\n if !(&tmp.path().join(\"output\/sub_dir\/file.png\")).exists() {\n panic!(\"output\/sub_dir\/file.png should exist\")\n }\n if !(&tmp.path().join(\"output\/sub_dir_exists\/file.txt\")).exists() {\n panic!(\"output\/sub_dir\/file.png should exist\")\n }\n\n }\n}\n<|endoftext|>"} {"text":"gash file added\/\/\n\/\/ gash.rs\n\/\/\n\/\/ Reference solution for PS2\n\/\/ Running on Rust 0.8\n\/\/\n\/\/ Special thanks to Kiet Tran for porting code from Rust 0.7 to Rust 0.8.\n\/\/\n\/\/ University of Virginia - cs4414 Fall 2013\n\/\/ Weilin Xu, Purnam Jantrania, David Evans\n\/\/ Version 0.2\n\/\/\n\nuse std::{io, run, os, path, libc};\nuse std::task;\n\nfn get_fd(fpath: &str, mode: &str) -> libc::c_int {\n #[fixed_stack_segment]; #[inline(never)];\n\n unsafe {\n let fpathbuf = fpath.to_c_str().unwrap();\n let modebuf = mode.to_c_str().unwrap();\n return libc::fileno(libc::fopen(fpathbuf, modebuf));\n }\n}\n\nfn exit(status: libc::c_int) {\n #[fixed_stack_segment]; #[inline(never)];\n unsafe { libc::exit(status); }\n}\n\nfn handle_cmd(cmd_line: &str, pipe_in: libc::c_int, pipe_out: libc::c_int, pipe_err: libc::c_int) {\n let mut out_fd = pipe_out;\n let mut in_fd = pipe_in;\n let err_fd = pipe_err;\n \n let mut argv: ~[~str] =\n cmd_line.split_iter(' ').filter_map(|x| if x != \"\" { Some(x.to_owned()) } else { None }).to_owned_vec();\n let mut i = 0;\n \/\/ found problem on redirection\n \/\/ `ping google.com | grep 1 > ping.txt &` didn't work\n \/\/ because grep won't flush the buffer until terminated (only) by SIGINT.\n while (i < argv.len()) {\n if (argv[i] == ~\">\") {\n argv.remove(i);\n out_fd = get_fd(argv.remove(i), \"w\");\n } else if (argv[i] == ~\"<\") {\n argv.remove(i);\n in_fd = get_fd(argv.remove(i), \"r\");\n }\n i += 1;\n }\n \n if argv.len() > 0 {\n let program = argv.remove(0);\n match program {\n ~\"help\" => {println(\"This is a new shell implemented in Rust!\")}\n ~\"cd\" => {if argv.len()>0 {os::change_dir(&path::PosixPath(argv[0]));}}\n \/\/global variable?\n \/\/~\"history\" => {for i in range(0, history.len()) {println(fmt!(\"%5u %s\", i+1, history[i]));}}\n ~\"exit\" => {exit(0);}\n _ => {let mut prog = run::Process::new(program, argv, run::ProcessOptions {\n env: None,\n dir: None,\n in_fd: Some(in_fd),\n out_fd: Some(out_fd),\n err_fd: Some(err_fd)\n });\n prog.finish();\n \/\/ close the pipes after process terminates.\n if in_fd != 0 {os::close(in_fd);}\n if out_fd != 1 {os::close(out_fd);}\n if err_fd != 2 {os::close(err_fd);}\n }\n }\/\/match\n }\/\/if\n}\n\nfn handle_cmdline(cmd_line:&str, bg_flag:bool)\n{\n \/\/ handle pipes\n let progs: ~[~str] =\n cmd_line.split_iter('|').filter_map(|x| if x != \"\" { Some(x.to_owned()) } else { None }).to_owned_vec();\n \n let mut pipes: ~[os::Pipe] = ~[];\n \n \/\/ create pipes\n if (progs.len() > 1) {\n for _ in range(0, progs.len()-1) {\n pipes.push(os::pipe());\n }\n }\n \n if progs.len() == 1 {\n if bg_flag == false { handle_cmd(progs[0], 0, 1, 2); }\n else {task::spawn_sched(task::SingleThreaded, ||{handle_cmd(progs[0], 0, 1, 2)});}\n } else {\n for i in range(0, progs.len()) {\n let prog = progs[i].to_owned();\n \n if i == 0 {\n let pipe_i = pipes[i];\n task::spawn_sched(task::SingleThreaded, ||{handle_cmd(prog, 0, pipe_i.out, 2)});\n } else if i == progs.len() - 1 {\n let pipe_i_1 = pipes[i-1];\n if bg_flag == true {\n task::spawn_sched(task::SingleThreaded, ||{handle_cmd(prog, pipe_i_1.input, 1, 2)});\n } else {\n handle_cmd(prog, pipe_i_1.input, 1, 2);\n }\n } else {\n let pipe_i = pipes[i];\n let pipe_i_1 = pipes[i-1];\n task::spawn_sched(task::SingleThreaded, ||{handle_cmd(prog, pipe_i_1.input, pipe_i.out, 2)});\n }\n }\n }\n}\n\nfn main() {\n static CMD_PROMPT: &'static str = \"gash > \";\n let mut history: ~[~str] = ~[];\n \n loop {\n print(CMD_PROMPT);\n \n let mut cmd_line = io::stdin().read_line();\n cmd_line = cmd_line.trim().to_owned();\n if cmd_line.len() > 0 {\n history.push(cmd_line.to_owned());\n }\n let mut bg_flag = false;\n if cmd_line.ends_with(\"&\") {\n cmd_line = cmd_line.trim_right_chars(&'&').to_owned();\n bg_flag = true;\n }\n \n if cmd_line == ~\"exit\" {\n break;\n } else if cmd_line == ~\"history\" {\n for i in range(0, history.len()) {\n println(fmt!(\"%5u %s\", i+1, history[i]));\n }\n } else {\n handle_cmdline(cmd_line, bg_flag);\n }\n }\n}<|endoftext|>"} {"text":"minor change so interrupt doesn't quit gash on the second input.<|endoftext|>"} {"text":"Add rust code for crypto-square project#![feature(ascii_ctype)]\nuse std::ascii::AsciiExt;\n\nfn matrix(length: usize) -> (usize, usize) {\n let s = (length as f64).sqrt().floor() as usize;\n\n if s * s == length {\n return (s, s);\n }\n\n if (s + 1) * s >= length {\n return (s + 1, s);\n }\n\n (s + 1, s + 1)\n}\n\npub fn encrypt(text: &str) -> String {\n let v = text.chars()\n .filter_map(|x| match x {\n _ if x.is_ascii_alphabetic() => Some(x.to_ascii_lowercase()),\n _ => None,\n })\n .collect::>();\n\n let (rows, columns) = matrix(v.len());\n\n let mut result = String::new();\n\n for i in 0..rows {\n for j in 0..columns {\n let p = j * rows + i;\n\n if p < v.len() {\n result.push(v[p]);\n }\n }\n\n if i != rows - 1 {\n result.push(' ');\n }\n }\n\n result\n}\n<|endoftext|>"} {"text":"#1 Added match expressions examplefn main() {\n let age = 49u32;\n match age {\n 1 => println!(\"got 1\"),\n 2 | 3 | 4 | 5 => println!(\"got something in between 2 and 5\"),\n 25 ... 50 => println!(\"got something in range of 25 and 50 (inclusive)\"),\n _ => println!(\"no one matches\")\n }\n}<|endoftext|>"} {"text":"Remove stale comments.<|endoftext|>"} {"text":" add a mio test with udp\/*\n This Source Code Form is subject to the terms of the Mozilla Public\n License, v. 2.0. If a copy of the MPL was not distributed with this\n file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n*\/\n\n\/\/#![feature(convert)]\n#![allow(dead_code)]\n\nextern crate mio;\n\nuse mio::*;\nuse mio::udp::UdpSocket;\n\n\nconst SERVER: mio::Token = mio::Token(0);\n\nstruct Test {socket: UdpSocket}\nimpl mio::Handler for Test {\n type Timeout = ();\n type Message = u64;\n\n fn ready(&mut self, _eloop: &mut mio::EventLoop, _token: mio::Token, event: mio::EventSet) {\n \/*\n println!(\"READY. Token is {:?}\", token);\n println!(\"EVENT: is_readable: {:?}, is_writable: {:?}, is_error: {:?}\",\n event.is_readable(), event.is_writable(), event.is_error()\n );\n *\/\n print!(\".\");\n\n if !event.is_readable() {\n return;\n } else { print!(\"\\n\") }\n\n loop {\n let mut buffer = [0u8; 65535];\n match self.socket.recv_from(buffer.as_mut()) {\n Ok(Some((len, source))) => {\n println!(\"** got {:?} bytes from {:?}\", len, source);\n println!(\"GOT: {}\", String::from_utf8_lossy(buffer.as_ref()))\n }\n Ok(None) => { println!(\"no data to read\"); return },\n Err(e) => {println!(\"error: {:?}\", e); return }\n }\n\n }\n\n }\n}\n\npub fn main() {\n\n let serv_addr : std::net::SocketAddr = \"127.0.0.1:4242\".parse().unwrap();\n\n let server = UdpSocket::bound(&serv_addr).unwrap();\n let client = UdpSocket::v4().unwrap();\n\n let mut eloop = mio::EventLoop::new().unwrap();\n eloop.register(&server, SERVER, EventSet::all(), PollOpt::level()).expect(\"error in the event loop setup\");\n\n\n std::thread::spawn(move || {\n std::thread::sleep(std::time::Duration::from_millis(5));\n println!(\"sending data to udp socket\");\n\n client.send_to(\"THIS IS DOG\".as_bytes(), &serv_addr).expect(\"send data\");\n let v = [\"hello\", \" \", \"world\", \" !\", \"how \", \"are \", \"you \", \"doing \", \"?\",\n \"loooooooooooooooooong message omgomgomgomogmomgomgomgomogm this is sooooooooooooooooooooo\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n loooooooooooooooooong loooooooooooooooooong llooooooooooooooooooong llooooooooooooooooooong\n \"\n ];\n for string in v.iter() {\n client.send_to(string.as_bytes(), &serv_addr).expect(\"can't send data to socket\");\n }\n });\n\n\n println!(\"running server\");\n eloop.run(&mut Test{socket: server}).expect(\"cant' run the event loop\");\n}\n<|endoftext|>"} {"text":"\/**\n * The concurrency primitives you know and love.\n *\n * Maybe once we have a \"core exports x only to std\" mechanism, these can be\n * in std.\n *\/\n\nexport semaphore, new_semaphore;\n\n\/\/ FIXME (#3119) This shouldn't be a thing exported from core.\nimport arc::exclusive;\n\n\/\/ Each waiting task receives on one of these. FIXME #3125 make these oneshot.\ntype wait_end = pipes::port<()>;\ntype signal_end = pipes::chan<()>;\n\/\/ A doubly-ended queue of waiting tasks.\ntype waitqueue = { head: pipes::port,\n tail: pipes::chan };\n\nfn new_waiter() -> (signal_end, wait_end) { pipes::stream() }\n\n\/\/\/ A counting semaphore.\nenum semaphore = exclusive<{\n mut count: int,\n waiters: waitqueue,\n}>;\n\n\/\/\/ Create a new semaphore with the specified count.\nfn new_semaphore(count: int) -> semaphore {\n let (tail, head) = pipes::stream();\n semaphore(exclusive({ mut count: count,\n waiters: { head: head, tail: tail } }))\n}\n\nimpl semaphore for &semaphore {\n \/\/\/ Creates a new handle to the semaphore.\n fn clone() -> semaphore {\n semaphore((**self).clone())\n }\n\n \/**\n * Acquires a resource represented by the semaphore. Blocks if necessary\n * until resource(s) become available.\n *\/\n fn wait() {\n let mut waiter_nobe = none;\n unsafe {\n do (**self).with |state| {\n state.count -= 1;\n if state.count < 0 {\n let (signal_end,wait_end) = new_waiter();\n waiter_nobe = some(wait_end);\n \/\/ Enqueue ourself.\n state.waiters.tail.send(signal_end);\n }\n }\n }\n for 1000.times { task::yield(); }\n \/\/ Need to wait outside the exclusive.\n if waiter_nobe.is_some() {\n let _ = option::unwrap(waiter_nobe).recv();\n }\n }\n\n \/**\n * Release a held resource represented by the semaphore. Wakes a blocked\n * contending task, if any exist.\n *\/\n fn signal() {\n unsafe {\n do (**self).with |state| {\n state.count += 1;\n \/\/ The peek is mandatory to make sure recv doesn't block.\n if state.count >= 0 && state.waiters.head.peek() {\n \/\/ Pop off the waitqueue and send a wakeup signal. If the\n \/\/ waiter was killed, its port will have closed, and send\n \/\/ will fail. Keep trying until we get a live task.\n state.waiters.head.recv().send(());\n \/\/ to-do: use this version when it's ready, kill-friendly.\n \/\/ while !state.waiters.head.recv().try_send(()) { }\n }\n }\n }\n }\n\n \/\/\/ Runs a function with ownership of one of the semaphore's resources.\n fn access(blk: fn() -> U) -> U {\n self.wait();\n let _x = sem_release(self);\n blk()\n }\n}\n\n\/\/ FIXME(#3136) should go inside of access()\nstruct sem_release {\n sem: &semaphore;\n new(sem: &semaphore) { self.sem = sem; }\n drop { self.sem.signal(); }\n}\n\n#[cfg(test)]\nmod tests {\n #[test]\n fn test_sem_as_mutex() {\n let s = ~new_semaphore(1);\n let s2 = ~s.clone();\n do task::spawn {\n do s2.access {\n for 10.times { task::yield(); }\n }\n }\n do s.access {\n for 10.times { task::yield(); }\n }\n }\n #[test]\n fn test_sem_as_cvar() {\n \/* Child waits and parent signals *\/\n let (c,p) = pipes::stream();\n let s = ~new_semaphore(0);\n let s2 = ~s.clone();\n do task::spawn {\n s2.wait();\n c.send(());\n }\n for 10.times { task::yield(); }\n s.signal();\n let _ = p.recv();\n \n \/* Parent waits and child signals *\/\n let (c,p) = pipes::stream();\n let s = ~new_semaphore(0);\n let s2 = ~s.clone();\n do task::spawn {\n for 10.times { task::yield(); }\n s2.signal();\n let _ = p.recv();\n }\n s.wait();\n c.send(());\n }\n #[test]\n fn test_sem_mutual_exclusion() {\n let (c,p) = pipes::stream();\n let s = ~new_semaphore(1);\n let s2 = ~s.clone();\n let sharedstate = ~0;\n let ptr = ptr::addr_of(*sharedstate);\n do task::spawn {\n let sharedstate = unsafe { unsafe::reinterpret_cast(ptr) };\n access_shared(sharedstate, s2, 10);\n c.send(());\n }\n access_shared(sharedstate, s, 10);\n let _ = p.recv();\n\n assert *sharedstate == 20;\n\n fn access_shared(sharedstate: &mut int, sem: &semaphore, n: uint) {\n for n.times {\n do sem.access {\n let oldval = *sharedstate;\n task::yield();\n *sharedstate = oldval + 1;\n }\n }\n }\n }\n #[test]\n fn test_sem_runtime_friendly_blocking() {\n do task::spawn_sched(task::manual_threads(1)) {\n let s = ~new_semaphore(1);\n let s2 = ~s.clone();\n let (c,p) = pipes::stream();\n let child_data = ~mut some((s2,c));\n do s.access {\n let (s2,c) = option::swap_unwrap(child_data);\n do task::spawn {\n c.send(());\n do s2.access { }\n c.send(());\n }\n let _ = p.recv(); \/\/ wait for child to come alive\n for 5.times { task::yield(); } \/\/ let the child contend\n }\n let _ = p.recv(); \/\/ wait for child to be done\n }\n }\n}\nfix trailing whitespace\/**\n * The concurrency primitives you know and love.\n *\n * Maybe once we have a \"core exports x only to std\" mechanism, these can be\n * in std.\n *\/\n\nexport semaphore, new_semaphore;\n\n\/\/ FIXME (#3119) This shouldn't be a thing exported from core.\nimport arc::exclusive;\n\n\/\/ Each waiting task receives on one of these. FIXME #3125 make these oneshot.\ntype wait_end = pipes::port<()>;\ntype signal_end = pipes::chan<()>;\n\/\/ A doubly-ended queue of waiting tasks.\ntype waitqueue = { head: pipes::port,\n tail: pipes::chan };\n\nfn new_waiter() -> (signal_end, wait_end) { pipes::stream() }\n\n\/\/\/ A counting semaphore.\nenum semaphore = exclusive<{\n mut count: int,\n waiters: waitqueue,\n}>;\n\n\/\/\/ Create a new semaphore with the specified count.\nfn new_semaphore(count: int) -> semaphore {\n let (tail, head) = pipes::stream();\n semaphore(exclusive({ mut count: count,\n waiters: { head: head, tail: tail } }))\n}\n\nimpl semaphore for &semaphore {\n \/\/\/ Creates a new handle to the semaphore.\n fn clone() -> semaphore {\n semaphore((**self).clone())\n }\n\n \/**\n * Acquires a resource represented by the semaphore. Blocks if necessary\n * until resource(s) become available.\n *\/\n fn wait() {\n let mut waiter_nobe = none;\n unsafe {\n do (**self).with |state| {\n state.count -= 1;\n if state.count < 0 {\n let (signal_end,wait_end) = new_waiter();\n waiter_nobe = some(wait_end);\n \/\/ Enqueue ourself.\n state.waiters.tail.send(signal_end);\n }\n }\n }\n for 1000.times { task::yield(); }\n \/\/ Need to wait outside the exclusive.\n if waiter_nobe.is_some() {\n let _ = option::unwrap(waiter_nobe).recv();\n }\n }\n\n \/**\n * Release a held resource represented by the semaphore. Wakes a blocked\n * contending task, if any exist.\n *\/\n fn signal() {\n unsafe {\n do (**self).with |state| {\n state.count += 1;\n \/\/ The peek is mandatory to make sure recv doesn't block.\n if state.count >= 0 && state.waiters.head.peek() {\n \/\/ Pop off the waitqueue and send a wakeup signal. If the\n \/\/ waiter was killed, its port will have closed, and send\n \/\/ will fail. Keep trying until we get a live task.\n state.waiters.head.recv().send(());\n \/\/ to-do: use this version when it's ready, kill-friendly.\n \/\/ while !state.waiters.head.recv().try_send(()) { }\n }\n }\n }\n }\n\n \/\/\/ Runs a function with ownership of one of the semaphore's resources.\n fn access(blk: fn() -> U) -> U {\n self.wait();\n let _x = sem_release(self);\n blk()\n }\n}\n\n\/\/ FIXME(#3136) should go inside of access()\nstruct sem_release {\n sem: &semaphore;\n new(sem: &semaphore) { self.sem = sem; }\n drop { self.sem.signal(); }\n}\n\n#[cfg(test)]\nmod tests {\n #[test]\n fn test_sem_as_mutex() {\n let s = ~new_semaphore(1);\n let s2 = ~s.clone();\n do task::spawn {\n do s2.access {\n for 10.times { task::yield(); }\n }\n }\n do s.access {\n for 10.times { task::yield(); }\n }\n }\n #[test]\n fn test_sem_as_cvar() {\n \/* Child waits and parent signals *\/\n let (c,p) = pipes::stream();\n let s = ~new_semaphore(0);\n let s2 = ~s.clone();\n do task::spawn {\n s2.wait();\n c.send(());\n }\n for 10.times { task::yield(); }\n s.signal();\n let _ = p.recv();\n\n \/* Parent waits and child signals *\/\n let (c,p) = pipes::stream();\n let s = ~new_semaphore(0);\n let s2 = ~s.clone();\n do task::spawn {\n for 10.times { task::yield(); }\n s2.signal();\n let _ = p.recv();\n }\n s.wait();\n c.send(());\n }\n #[test]\n fn test_sem_mutual_exclusion() {\n let (c,p) = pipes::stream();\n let s = ~new_semaphore(1);\n let s2 = ~s.clone();\n let sharedstate = ~0;\n let ptr = ptr::addr_of(*sharedstate);\n do task::spawn {\n let sharedstate = unsafe { unsafe::reinterpret_cast(ptr) };\n access_shared(sharedstate, s2, 10);\n c.send(());\n }\n access_shared(sharedstate, s, 10);\n let _ = p.recv();\n\n assert *sharedstate == 20;\n\n fn access_shared(sharedstate: &mut int, sem: &semaphore, n: uint) {\n for n.times {\n do sem.access {\n let oldval = *sharedstate;\n task::yield();\n *sharedstate = oldval + 1;\n }\n }\n }\n }\n #[test]\n fn test_sem_runtime_friendly_blocking() {\n do task::spawn_sched(task::manual_threads(1)) {\n let s = ~new_semaphore(1);\n let s2 = ~s.clone();\n let (c,p) = pipes::stream();\n let child_data = ~mut some((s2,c));\n do s.access {\n let (s2,c) = option::swap_unwrap(child_data);\n do task::spawn {\n c.send(());\n do s2.access { }\n c.send(());\n }\n let _ = p.recv(); \/\/ wait for child to come alive\n for 5.times { task::yield(); } \/\/ let the child contend\n }\n let _ = p.recv(); \/\/ wait for child to be done\n }\n }\n}\n<|endoftext|>"} {"text":"use std::fs::{self, File};\nuse std::collections::HashMap;\nuse std::path::{Path, PathBuf};\nuse std::default::Default;\nuse std::io::Write;\nuse error::Result;\nuse chrono::{DateTime, FixedOffset};\nuse rss;\n\nuse liquid::{Renderable, LiquidOptions, Context, Value};\n\nuse pulldown_cmark as cmark;\nuse liquid;\n\n#[derive(Debug)]\npub struct Document {\n pub name: String,\n pub path: String,\n pub attributes: HashMap,\n pub content: String,\n pub is_post: bool,\n pub date: Option>,\n markdown: bool,\n}\n\nimpl Document {\n pub fn new(name: String,\n path: String,\n attributes: HashMap,\n content: String,\n is_post: bool,\n date: Option>,\n markdown: bool)\n -> Document {\n Document {\n name: name,\n path: path,\n attributes: attributes,\n content: content,\n is_post: is_post,\n date: date,\n markdown: markdown,\n }\n }\n\n \/\/\/ Metadata for generating RSS feeds\n pub fn to_rss(&self, root_url: &str) -> rss::Item {\n rss::Item {\n title: self.attributes.get(\"title\").map(|s| s.to_owned()),\n link: Some(root_url.to_owned() + &self.path),\n pub_date: self.date.map(|date| date.to_rfc2822()),\n description: self.attributes.get(\"description\").map(|s| s.to_owned()),\n ..Default::default()\n }\n }\n\n \/\/\/ Attributes that are injected into the template when rendering\n pub fn get_attributes(&self) -> HashMap {\n let mut data = HashMap::new();\n data.insert(\"name\".to_owned(), Value::Str(self.name.clone()));\n data.insert(\"path\".to_owned(), Value::Str(self.path.clone()));\n for key in self.attributes.keys() {\n if let Some(val) = self.attributes.get(key) {\n data.insert(key.to_owned(), Value::Str(val.clone()));\n }\n }\n data\n }\n\n pub fn as_html(&self, post_data: &Vec) -> Result {\n let options: LiquidOptions = Default::default();\n let template = try!(liquid::parse(&self.content, options));\n\n \/\/ TODO: pass in documents as template data if as_html is called on Index\n \/\/ Document..\n let mut data = Context::with_values(self.get_attributes());\n data.set_val(\"posts\", Value::Array(post_data.clone()));\n\n Ok(try!(template.render(&mut data)).unwrap_or(String::new()))\n }\n\n pub fn create_file(&self,\n dest: &Path,\n layouts: &HashMap,\n post_data: &Vec)\n -> Result<()> {\n \/\/ construct target path\n let mut file_path_buf = PathBuf::new();\n file_path_buf.push(dest);\n file_path_buf.push(&self.path);\n file_path_buf.set_extension(\"html\");\n\n let file_path = file_path_buf.as_path();\n\n let layout_path = try!(self.attributes\n .get(&\"extends\".to_owned())\n .ok_or(format!(\"No extends property in {}\", self.name)));\n\n let layout = try!(layouts.get(layout_path)\n .ok_or(format!(\"Layout {} can not be found (defined in {})\",\n layout_path,\n self.name)));\n\n \/\/ create target directories if any exist\n file_path.parent().map(|p| fs::create_dir_all(p));\n\n let mut file = try!(File::create(&file_path));\n\n \/\/ Insert the attributes into the layout template\n \/\/ TODO we're currently calling get_attributes twice on each document render, can we get it\n \/\/ to a single call?\n let mut data = Context::with_values(self.get_attributes());\n\n \/\/ compile with liquid\n let mut html = try!(self.as_html(post_data));\n\n if self.markdown {\n html = {\n let mut buf = String::new();\n let parser = cmark::Parser::new(&html);\n cmark::html::push_html(&mut buf, parser);\n buf\n };\n }\n\n data.set_val(\"content\", Value::Str(html));\n\n let options: LiquidOptions = Default::default();\n\n let template = try!(liquid::parse(&layout, options));\n\n let res = try!(template.render(&mut data)).unwrap_or(String::new());\n\n try!(file.write_all(&res.into_bytes()));\n info!(\"Created {}\", file_path.display());\n Ok(())\n }\n}\nadded is_post gobal to allow users to detect if they are a post or notuse std::fs::{self, File};\nuse std::collections::HashMap;\nuse std::path::{Path, PathBuf};\nuse std::default::Default;\nuse std::io::Write;\nuse error::Result;\nuse chrono::{DateTime, FixedOffset};\nuse rss;\n\nuse liquid::{Renderable, LiquidOptions, Context, Value};\n\nuse pulldown_cmark as cmark;\nuse liquid;\n\n#[derive(Debug)]\npub struct Document {\n pub name: String,\n pub path: String,\n pub attributes: HashMap,\n pub content: String,\n pub is_post: bool,\n pub date: Option>,\n markdown: bool,\n}\n\nimpl Document {\n pub fn new(name: String,\n path: String,\n attributes: HashMap,\n content: String,\n is_post: bool,\n date: Option>,\n markdown: bool)\n -> Document {\n Document {\n name: name,\n path: path,\n attributes: attributes,\n content: content,\n is_post: is_post,\n date: date,\n markdown: markdown,\n }\n }\n\n \/\/\/ Metadata for generating RSS feeds\n pub fn to_rss(&self, root_url: &str) -> rss::Item {\n rss::Item {\n title: self.attributes.get(\"title\").map(|s| s.to_owned()),\n link: Some(root_url.to_owned() + &self.path),\n pub_date: self.date.map(|date| date.to_rfc2822()),\n description: self.attributes.get(\"description\").map(|s| s.to_owned()),\n ..Default::default()\n }\n }\n\n \/\/\/ Attributes that are injected into the template when rendering\n pub fn get_attributes(&self) -> HashMap {\n let mut data = HashMap::new();\n data.insert(\"name\".to_owned(), Value::Str(self.name.clone()));\n data.insert(\"path\".to_owned(), Value::Str(self.path.clone()));\n for key in self.attributes.keys() {\n if let Some(val) = self.attributes.get(key) {\n data.insert(key.to_owned(), Value::Str(val.clone()));\n }\n }\n data\n }\n\n pub fn as_html(&self, post_data: &Vec) -> Result {\n let options: LiquidOptions = Default::default();\n let template = try!(liquid::parse(&self.content, options));\n\n \/\/ TODO: pass in documents as template data if as_html is called on Index\n \/\/ Document..\n let mut data = Context::with_values(self.get_attributes());\n data.set_val(\"posts\", Value::Array(post_data.clone()));\n\n Ok(try!(template.render(&mut data)).unwrap_or(String::new()))\n }\n\n pub fn create_file(&self,\n dest: &Path,\n layouts: &HashMap,\n post_data: &Vec)\n -> Result<()> {\n \/\/ construct target path\n let mut file_path_buf = PathBuf::new();\n file_path_buf.push(dest);\n file_path_buf.push(&self.path);\n file_path_buf.set_extension(\"html\");\n\n let file_path = file_path_buf.as_path();\n\n let layout_path = try!(self.attributes\n .get(&\"extends\".to_owned())\n .ok_or(format!(\"No extends property in {}\", self.name)));\n\n let layout = try!(layouts.get(layout_path)\n .ok_or(format!(\"Layout {} can not be found (defined in {})\",\n layout_path,\n self.name)));\n\n \/\/ create target directories if any exist\n file_path.parent().map(|p| fs::create_dir_all(p));\n\n let mut file = try!(File::create(&file_path));\n\n \/\/ Insert the attributes into the layout template\n \/\/ TODO we're currently calling get_attributes twice on each document render, can we get it\n \/\/ to a single call?\n let mut data = Context::with_values(self.get_attributes());\n\n \/\/ compile with liquid\n let mut html = try!(self.as_html(post_data));\n\n if self.markdown {\n html = {\n let mut buf = String::new();\n let parser = cmark::Parser::new(&html);\n cmark::html::push_html(&mut buf, parser);\n buf\n };\n }\n\n data.set_val(\"content\", Value::Str(html));\n if self.is_post {\n data.set_val(\"is_post\", Value::Bool(true));\n }\n\n let options: LiquidOptions = Default::default();\n\n let template = try!(liquid::parse(&layout, options));\n\n let res = try!(template.render(&mut data)).unwrap_or(String::new());\n\n try!(file.write_all(&res.into_bytes()));\n info!(\"Created {}\", file_path.display());\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"Make sure numbers are always little-endian when encoded<|endoftext|>"} {"text":"(doc) Documented iron\/mod.<|endoftext|>"} {"text":"Initialise find.rs<|endoftext|>"} {"text":"completed communicator snippet with test example and with example of use keyword usage<|endoftext|>"} {"text":"\/\/ Support code for rustc's built in test runner generator. Currently,\n\/\/ none of this is meant for users. It is intended to support the\n\/\/ simplest interface possible for representing and running tests\n\/\/ while providing a base that other test frameworks may build off of.\n\nimport generic_os::getenv;\nimport task::task;\n\nexport test_name;\nexport test_fn;\nexport default_test_fn;\nexport test_desc;\nexport test_main;\nexport test_result;\nexport test_opts;\nexport tr_ok;\nexport tr_failed;\nexport tr_ignored;\nexport run_tests_console;\nexport run_tests_console_;\nexport run_test;\nexport filter_tests;\nexport parse_opts;\nexport test_to_task;\nexport default_test_to_task;\nexport configure_test_task;\nexport joinable;\n\nnative \"rust\" mod rustrt {\n fn sched_threads() -> uint;\n}\n\n\n\/\/ The name of a test. By convention this follows the rules for rust\n\/\/ paths; i.e. it should be a series of identifiers seperated by double\n\/\/ colons. This way if some test runner wants to arrange the tests\n\/\/ hierarchically it may.\ntype test_name = str;\n\n\/\/ A function that runs a test. If the function returns successfully,\n\/\/ the test succeeds; if the function fails then the test fails. We\n\/\/ may need to come up with a more clever definition of test in order\n\/\/ to support isolation of tests into tasks.\ntype test_fn<@T> = T;\n\ntype default_test_fn = test_fn;\n\n\/\/ The definition of a single test. A test runner will run a list of\n\/\/ these.\ntype test_desc<@T> = {\n name: test_name,\n fn: test_fn,\n ignore: bool\n};\n\n\/\/ The default console test runner. It accepts the command line\n\/\/ arguments and a vector of test_descs (generated at compile time).\nfn test_main(args: [str], tests: [test_desc]) {\n check (vec::is_not_empty(args));\n let opts =\n alt parse_opts(args) {\n either::left(o) { o }\n either::right(m) { fail m }\n };\n if !run_tests_console(opts, tests) { fail \"Some tests failed\"; }\n}\n\ntype test_opts = {filter: option::t, run_ignored: bool};\n\ntype opt_res = either::t;\n\n\/\/ Parses command line arguments into test options\nfn parse_opts(args: [str]) : vec::is_not_empty(args) -> opt_res {\n\n let args_ = vec::tail(args);\n let opts = [getopts::optflag(\"ignored\")];\n let match =\n alt getopts::getopts(args_, opts) {\n getopts::success(m) { m }\n getopts::failure(f) { ret either::right(getopts::fail_str(f)) }\n };\n\n let filter =\n if vec::len(match.free) > 0u {\n option::some(match.free[0])\n } else { option::none };\n\n let run_ignored = getopts::opt_present(match, \"ignored\");\n\n let test_opts = {filter: filter, run_ignored: run_ignored};\n\n ret either::left(test_opts);\n}\n\ntag test_result { tr_ok; tr_failed; tr_ignored; }\n\ntype joinable = (task, comm::port);\n\n\/\/ To get isolation and concurrency tests have to be run in their own tasks.\n\/\/ In cases where test functions are closures it is not ok to just dump them\n\/\/ into a task and run them, so this transformation gives the caller a chance\n\/\/ to create the test task.\ntype test_to_task<@T> = fn@(test_fn) -> joinable;\n\n\/\/ A simple console test runner\nfn run_tests_console(opts: test_opts,\n tests: [test_desc]) -> bool {\n run_tests_console_(opts, tests, default_test_to_task)\n}\n\nfn run_tests_console_<@T>(opts: test_opts, tests: [test_desc],\n to_task: test_to_task) -> bool {\n\n type test_state =\n @{out: io::writer,\n use_color: bool,\n mutable total: uint,\n mutable passed: uint,\n mutable failed: uint,\n mutable ignored: uint,\n mutable failures: [test_desc]};\n\n fn callback<@T>(event: testevent, st: test_state) {\n alt event {\n te_filtered(filtered_tests) {\n st.total = vec::len(filtered_tests);\n st.out.write_line(#fmt[\"\\nrunning %u tests\", st.total]);\n }\n te_wait(test) { st.out.write_str(#fmt[\"test %s ... \", test.name]); }\n te_result(test, result) {\n alt result {\n tr_ok. {\n st.passed += 1u;\n write_ok(st.out, st.use_color);\n st.out.write_line(\"\");\n }\n tr_failed. {\n st.failed += 1u;\n write_failed(st.out, st.use_color);\n st.out.write_line(\"\");\n st.failures += [test];\n }\n tr_ignored. {\n st.ignored += 1u;\n write_ignored(st.out, st.use_color);\n st.out.write_line(\"\");\n }\n }\n }\n }\n }\n\n let st =\n @{out: io::stdout(),\n use_color: use_color(),\n mutable total: 0u,\n mutable passed: 0u,\n mutable failed: 0u,\n mutable ignored: 0u,\n mutable failures: []};\n\n run_tests(opts, tests, to_task, bind callback(_, st));\n\n assert (st.passed + st.failed + st.ignored == st.total);\n let success = st.failed == 0u;\n\n if !success {\n st.out.write_line(\"\\nfailures:\");\n for test: test_desc in st.failures {\n let testname = test.name; \/\/ Satisfy alias analysis\n st.out.write_line(#fmt[\" %s\", testname]);\n }\n }\n\n st.out.write_str(#fmt[\"\\nresult: \"]);\n if success {\n \/\/ There's no parallelism at this point so it's safe to use color\n write_ok(st.out, true);\n } else { write_failed(st.out, true); }\n st.out.write_str(#fmt[\". %u passed; %u failed; %u ignored\\n\\n\", st.passed,\n st.failed, st.ignored]);\n\n ret success;\n\n fn write_ok(out: io::writer, use_color: bool) {\n write_pretty(out, \"ok\", term::color_green, use_color);\n }\n\n fn write_failed(out: io::writer, use_color: bool) {\n write_pretty(out, \"FAILED\", term::color_red, use_color);\n }\n\n fn write_ignored(out: io::writer, use_color: bool) {\n write_pretty(out, \"ignored\", term::color_yellow, use_color);\n }\n\n fn write_pretty(out: io::writer, word: str, color: u8, use_color: bool) {\n if use_color && term::color_supported() {\n term::fg(out.get_buf_writer(), color);\n }\n out.write_str(word);\n if use_color && term::color_supported() {\n term::reset(out.get_buf_writer());\n }\n }\n}\n\nfn use_color() -> bool { ret get_concurrency() == 1u; }\n\ntag testevent<@T> {\n te_filtered([test_desc]);\n te_wait(test_desc);\n te_result(test_desc, test_result);\n}\n\nfn run_tests<@T>(opts: test_opts, tests: [test_desc],\n to_task: test_to_task,\n callback: fn@(testevent)) {\n\n let filtered_tests = filter_tests(opts, tests);\n\n callback(te_filtered(filtered_tests));\n\n \/\/ It's tempting to just spawn all the tests at once but that doesn't\n \/\/ provide a great user experience because you might sit waiting for the\n \/\/ result of a particular test for an unusually long amount of time.\n let concurrency = get_concurrency();\n log #fmt[\"using %u test tasks\", concurrency];\n let total = vec::len(filtered_tests);\n let run_idx = 0u;\n let wait_idx = 0u;\n let futures = [];\n\n while wait_idx < total {\n while vec::len(futures) < concurrency && run_idx < total {\n futures += [run_test(filtered_tests[run_idx], to_task)];\n run_idx += 1u;\n }\n\n let future = futures[0];\n callback(te_wait(future.test));\n let result = future.wait();\n callback(te_result(future.test, result));\n futures = vec::slice(futures, 1u, vec::len(futures));\n wait_idx += 1u;\n }\n}\n\nfn get_concurrency() -> uint { rustrt::sched_threads() }\n\nfn filter_tests<@T>(opts: test_opts,\n tests: [test_desc]) -> [test_desc] {\n let filtered = tests;\n\n \/\/ Remove tests that don't match the test filter\n filtered = if option::is_none(opts.filter) {\n filtered\n } else {\n let filter_str =\n alt opts.filter {\n option::some(f) { f }\n option::none. { \"\" }\n };\n\n fn filter_fn<@T>(test: test_desc, filter_str: str) ->\n option::t> {\n if str::find(test.name, filter_str) >= 0 {\n ret option::some(test);\n } else { ret option::none; }\n }\n\n let filter = bind filter_fn(_, filter_str);\n\n vec::filter_map(filter, filtered)\n };\n\n \/\/ Maybe pull out the ignored test and unignore them\n filtered = if !opts.run_ignored {\n filtered\n } else {\n fn filter<@T>(test: test_desc) -> option::t> {\n if test.ignore {\n ret option::some({name: test.name,\n fn: test.fn,\n ignore: false});\n } else { ret option::none; }\n };\n\n vec::filter_map(bind filter(_), filtered)\n };\n\n \/\/ Sort the tests alphabetically\n filtered =\n {\n fn lteq<@T>(t1: test_desc, t2: test_desc) -> bool {\n str::lteq(t1.name, t2.name)\n }\n sort::merge_sort(bind lteq(_, _), filtered)\n };\n\n ret filtered;\n}\n\ntype test_future<@T> = {test: test_desc, wait: fn@() -> test_result};\n\nfn run_test<@T>(test: test_desc,\n to_task: test_to_task) -> test_future {\n if !test.ignore {\n let test_task = to_task(test.fn);\n ret {test: test,\n wait:\n bind fn (test_task: joinable) -> test_result {\n alt task::join(test_task) {\n task::tr_success. { tr_ok }\n task::tr_failure. { tr_failed }\n }\n }(test_task)};\n } else { ret {test: test, wait: fn () -> test_result { tr_ignored }}; }\n}\n\n\/\/ We need to run our tests in another task in order to trap test failures.\n\/\/ This function only works with functions that don't contain closures.\nfn default_test_to_task(&&f: default_test_fn) -> joinable {\n fn run_task(f: default_test_fn) {\n configure_test_task();\n f();\n }\n ret task::spawn_joinable(f, run_task);\n}\n\n\/\/ Call from within a test task to make sure it's set up correctly\nfn configure_test_task() {\n \/\/ If this task fails we don't want that failure to propagate to the\n \/\/ test runner or else we couldn't keep running tests\n task::unsupervise();\n}\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ compile-command: \"make -k -C .. 2>&1 | sed -e 's\/\\\\\/x\\\\\/\/x:\\\\\/\/g'\";\n\/\/ End:\nmigrate sched_threads call\/\/ Support code for rustc's built in test runner generator. Currently,\n\/\/ none of this is meant for users. It is intended to support the\n\/\/ simplest interface possible for representing and running tests\n\/\/ while providing a base that other test frameworks may build off of.\n\nimport generic_os::getenv;\nimport task::task;\n\nexport test_name;\nexport test_fn;\nexport default_test_fn;\nexport test_desc;\nexport test_main;\nexport test_result;\nexport test_opts;\nexport tr_ok;\nexport tr_failed;\nexport tr_ignored;\nexport run_tests_console;\nexport run_tests_console_;\nexport run_test;\nexport filter_tests;\nexport parse_opts;\nexport test_to_task;\nexport default_test_to_task;\nexport configure_test_task;\nexport joinable;\n\nnative \"c-stack-cdecl\" mod rustrt {\n fn sched_threads() -> uint;\n}\n\n\n\/\/ The name of a test. By convention this follows the rules for rust\n\/\/ paths; i.e. it should be a series of identifiers seperated by double\n\/\/ colons. This way if some test runner wants to arrange the tests\n\/\/ hierarchically it may.\ntype test_name = str;\n\n\/\/ A function that runs a test. If the function returns successfully,\n\/\/ the test succeeds; if the function fails then the test fails. We\n\/\/ may need to come up with a more clever definition of test in order\n\/\/ to support isolation of tests into tasks.\ntype test_fn<@T> = T;\n\ntype default_test_fn = test_fn;\n\n\/\/ The definition of a single test. A test runner will run a list of\n\/\/ these.\ntype test_desc<@T> = {\n name: test_name,\n fn: test_fn,\n ignore: bool\n};\n\n\/\/ The default console test runner. It accepts the command line\n\/\/ arguments and a vector of test_descs (generated at compile time).\nfn test_main(args: [str], tests: [test_desc]) {\n check (vec::is_not_empty(args));\n let opts =\n alt parse_opts(args) {\n either::left(o) { o }\n either::right(m) { fail m }\n };\n if !run_tests_console(opts, tests) { fail \"Some tests failed\"; }\n}\n\ntype test_opts = {filter: option::t, run_ignored: bool};\n\ntype opt_res = either::t;\n\n\/\/ Parses command line arguments into test options\nfn parse_opts(args: [str]) : vec::is_not_empty(args) -> opt_res {\n\n let args_ = vec::tail(args);\n let opts = [getopts::optflag(\"ignored\")];\n let match =\n alt getopts::getopts(args_, opts) {\n getopts::success(m) { m }\n getopts::failure(f) { ret either::right(getopts::fail_str(f)) }\n };\n\n let filter =\n if vec::len(match.free) > 0u {\n option::some(match.free[0])\n } else { option::none };\n\n let run_ignored = getopts::opt_present(match, \"ignored\");\n\n let test_opts = {filter: filter, run_ignored: run_ignored};\n\n ret either::left(test_opts);\n}\n\ntag test_result { tr_ok; tr_failed; tr_ignored; }\n\ntype joinable = (task, comm::port);\n\n\/\/ To get isolation and concurrency tests have to be run in their own tasks.\n\/\/ In cases where test functions are closures it is not ok to just dump them\n\/\/ into a task and run them, so this transformation gives the caller a chance\n\/\/ to create the test task.\ntype test_to_task<@T> = fn@(test_fn) -> joinable;\n\n\/\/ A simple console test runner\nfn run_tests_console(opts: test_opts,\n tests: [test_desc]) -> bool {\n run_tests_console_(opts, tests, default_test_to_task)\n}\n\nfn run_tests_console_<@T>(opts: test_opts, tests: [test_desc],\n to_task: test_to_task) -> bool {\n\n type test_state =\n @{out: io::writer,\n use_color: bool,\n mutable total: uint,\n mutable passed: uint,\n mutable failed: uint,\n mutable ignored: uint,\n mutable failures: [test_desc]};\n\n fn callback<@T>(event: testevent, st: test_state) {\n alt event {\n te_filtered(filtered_tests) {\n st.total = vec::len(filtered_tests);\n st.out.write_line(#fmt[\"\\nrunning %u tests\", st.total]);\n }\n te_wait(test) { st.out.write_str(#fmt[\"test %s ... \", test.name]); }\n te_result(test, result) {\n alt result {\n tr_ok. {\n st.passed += 1u;\n write_ok(st.out, st.use_color);\n st.out.write_line(\"\");\n }\n tr_failed. {\n st.failed += 1u;\n write_failed(st.out, st.use_color);\n st.out.write_line(\"\");\n st.failures += [test];\n }\n tr_ignored. {\n st.ignored += 1u;\n write_ignored(st.out, st.use_color);\n st.out.write_line(\"\");\n }\n }\n }\n }\n }\n\n let st =\n @{out: io::stdout(),\n use_color: use_color(),\n mutable total: 0u,\n mutable passed: 0u,\n mutable failed: 0u,\n mutable ignored: 0u,\n mutable failures: []};\n\n run_tests(opts, tests, to_task, bind callback(_, st));\n\n assert (st.passed + st.failed + st.ignored == st.total);\n let success = st.failed == 0u;\n\n if !success {\n st.out.write_line(\"\\nfailures:\");\n for test: test_desc in st.failures {\n let testname = test.name; \/\/ Satisfy alias analysis\n st.out.write_line(#fmt[\" %s\", testname]);\n }\n }\n\n st.out.write_str(#fmt[\"\\nresult: \"]);\n if success {\n \/\/ There's no parallelism at this point so it's safe to use color\n write_ok(st.out, true);\n } else { write_failed(st.out, true); }\n st.out.write_str(#fmt[\". %u passed; %u failed; %u ignored\\n\\n\", st.passed,\n st.failed, st.ignored]);\n\n ret success;\n\n fn write_ok(out: io::writer, use_color: bool) {\n write_pretty(out, \"ok\", term::color_green, use_color);\n }\n\n fn write_failed(out: io::writer, use_color: bool) {\n write_pretty(out, \"FAILED\", term::color_red, use_color);\n }\n\n fn write_ignored(out: io::writer, use_color: bool) {\n write_pretty(out, \"ignored\", term::color_yellow, use_color);\n }\n\n fn write_pretty(out: io::writer, word: str, color: u8, use_color: bool) {\n if use_color && term::color_supported() {\n term::fg(out.get_buf_writer(), color);\n }\n out.write_str(word);\n if use_color && term::color_supported() {\n term::reset(out.get_buf_writer());\n }\n }\n}\n\nfn use_color() -> bool { ret get_concurrency() == 1u; }\n\ntag testevent<@T> {\n te_filtered([test_desc]);\n te_wait(test_desc);\n te_result(test_desc, test_result);\n}\n\nfn run_tests<@T>(opts: test_opts, tests: [test_desc],\n to_task: test_to_task,\n callback: fn@(testevent)) {\n\n let filtered_tests = filter_tests(opts, tests);\n\n callback(te_filtered(filtered_tests));\n\n \/\/ It's tempting to just spawn all the tests at once but that doesn't\n \/\/ provide a great user experience because you might sit waiting for the\n \/\/ result of a particular test for an unusually long amount of time.\n let concurrency = get_concurrency();\n log #fmt[\"using %u test tasks\", concurrency];\n let total = vec::len(filtered_tests);\n let run_idx = 0u;\n let wait_idx = 0u;\n let futures = [];\n\n while wait_idx < total {\n while vec::len(futures) < concurrency && run_idx < total {\n futures += [run_test(filtered_tests[run_idx], to_task)];\n run_idx += 1u;\n }\n\n let future = futures[0];\n callback(te_wait(future.test));\n let result = future.wait();\n callback(te_result(future.test, result));\n futures = vec::slice(futures, 1u, vec::len(futures));\n wait_idx += 1u;\n }\n}\n\nfn get_concurrency() -> uint { rustrt::sched_threads() }\n\nfn filter_tests<@T>(opts: test_opts,\n tests: [test_desc]) -> [test_desc] {\n let filtered = tests;\n\n \/\/ Remove tests that don't match the test filter\n filtered = if option::is_none(opts.filter) {\n filtered\n } else {\n let filter_str =\n alt opts.filter {\n option::some(f) { f }\n option::none. { \"\" }\n };\n\n fn filter_fn<@T>(test: test_desc, filter_str: str) ->\n option::t> {\n if str::find(test.name, filter_str) >= 0 {\n ret option::some(test);\n } else { ret option::none; }\n }\n\n let filter = bind filter_fn(_, filter_str);\n\n vec::filter_map(filter, filtered)\n };\n\n \/\/ Maybe pull out the ignored test and unignore them\n filtered = if !opts.run_ignored {\n filtered\n } else {\n fn filter<@T>(test: test_desc) -> option::t> {\n if test.ignore {\n ret option::some({name: test.name,\n fn: test.fn,\n ignore: false});\n } else { ret option::none; }\n };\n\n vec::filter_map(bind filter(_), filtered)\n };\n\n \/\/ Sort the tests alphabetically\n filtered =\n {\n fn lteq<@T>(t1: test_desc, t2: test_desc) -> bool {\n str::lteq(t1.name, t2.name)\n }\n sort::merge_sort(bind lteq(_, _), filtered)\n };\n\n ret filtered;\n}\n\ntype test_future<@T> = {test: test_desc, wait: fn@() -> test_result};\n\nfn run_test<@T>(test: test_desc,\n to_task: test_to_task) -> test_future {\n if !test.ignore {\n let test_task = to_task(test.fn);\n ret {test: test,\n wait:\n bind fn (test_task: joinable) -> test_result {\n alt task::join(test_task) {\n task::tr_success. { tr_ok }\n task::tr_failure. { tr_failed }\n }\n }(test_task)};\n } else { ret {test: test, wait: fn () -> test_result { tr_ignored }}; }\n}\n\n\/\/ We need to run our tests in another task in order to trap test failures.\n\/\/ This function only works with functions that don't contain closures.\nfn default_test_to_task(&&f: default_test_fn) -> joinable {\n fn run_task(f: default_test_fn) {\n configure_test_task();\n f();\n }\n ret task::spawn_joinable(f, run_task);\n}\n\n\/\/ Call from within a test task to make sure it's set up correctly\nfn configure_test_task() {\n \/\/ If this task fails we don't want that failure to propagate to the\n \/\/ test runner or else we couldn't keep running tests\n task::unsupervise();\n}\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ compile-command: \"make -k -C .. 2>&1 | sed -e 's\/\\\\\/x\\\\\/\/x:\\\\\/\/g'\";\n\/\/ End:\n<|endoftext|>"} {"text":"\/\/! Data producers\n\/\/!\n\/\/! The goal of data producers is to parse data as soon as it is generated.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```ignore\n\/\/! use std::str;\n\/\/! fn local_print<'a,T: Debug>(input: T) -> IResult {\n\/\/! println!(\"{:?}\", input);\n\/\/! Done(input, ())\n\/\/! }\n\/\/! \/\/ create a data producer from a file\n\/\/! FileProducer::new(\"links.txt\", 20).map(|producer: FileProducer| {\n\/\/! let mut p = producer;\n\/\/!\n\/\/! \/\/ create the parsing function\n\/\/! fn parser(par: IResult<(),&[u8]>) -> IResult<&[u8],()> {\n\/\/! par.map_res(str::from_utf8).flat_map(local_print);\n\/\/! Done(\"\".as_bytes(), ())\n\/\/! }\n\/\/!\n\/\/! \/\/ adapt the parsing function to the producer\n\/\/! pusher!(push, parser);\n\/\/! \/\/ get started\n\/\/! push(&mut p);\n\/\/! });\n\/\/! ```\n\nuse internal::*;\nuse self::ProducerState::*;\n\nuse std::io::fs::File;\nuse std::io::{IoResult, IoErrorKind};\n\n\n\/\/\/ Holds the data producer's current state\n\/\/\/\n\/\/\/ * Eof indicates all data has been parsed, and contains the parser's result\n\/\/\/\n\/\/\/ * Continue indicates that more data is needed and should be available,\n\/\/\/ but not right now. Parsing should resume at some point.\n\/\/\/\n\/\/\/ * Data contains already parsed data\n\/\/\/\n\/\/\/ * ProducerError indicates something went wrong\n#[derive(Debug,PartialEq,Eq)]\npub enum ProducerState {\n Eof(O),\n Continue,\n Data(O),\n ProducerError(Err),\n}\n\n\/\/\/ A producer implements the produce method, currently working with u8 arrays\npub trait Producer {\n fn produce(&mut self) -> ProducerState<&[u8]>;\n}\n\n\/\/\/ Can produce data from a file\n\/\/\/\n\/\/\/ the size field is the size of v, the internal buffer\npub struct FileProducer {\n size: usize,\n file: File,\n v: Vec\n}\n\nimpl FileProducer {\n pub fn new(filename: &str, buffer_size: usize) -> IoResult {\n File::open(&Path::new(filename)).map(|f| {\n FileProducer {size: buffer_size, file: f, v: Vec::with_capacity(buffer_size)}\n })\n }\n}\n\nimpl Producer for FileProducer {\n fn produce(&mut self) -> ProducerState<&[u8]> {\n \/\/let mut v = Vec::with_capacity(self.size);\n self.v.clear();\n match self.file.push(self.size, &mut self.v) {\n Err(e) => {\n match e.kind {\n IoErrorKind::NoProgress => Continue,\n IoErrorKind::EndOfFile => Eof(self.v.as_slice()),\n _ => ProducerError(0)\n }\n },\n Ok(i) => {\n println!(\"read {:?} bytes: {:?}\", i, self.v);\n Data(self.v.as_slice())\n }\n }\n }\n}\n\n\/\/\/ Can parse data from an already in memory byte array\n\/\/\/\n\/\/\/ * buffer holds the reference to the data that must be parsed\n\/\/\/\n\/\/\/ * length is the length of that buffer\n\/\/\/\n\/\/\/ * index is the position in the buffer\n\/\/\/\n\/\/\/ * chunk_size is the quantity of data sent at once\npub struct MemProducer<'x> {\n buffer: &'x [u8],\n chunk_size: usize,\n length: usize,\n index: usize\n}\n\nimpl<'x> MemProducer<'x> {\n pub fn new(buffer: &'x[u8], chunk_size: usize) -> MemProducer {\n MemProducer {\n buffer: buffer,\n chunk_size: chunk_size,\n length: buffer.len(),\n index: 0\n }\n }\n}\n\nimpl<'x> Producer for MemProducer<'x> {\n fn produce(&mut self) -> ProducerState<&[u8]> {\n if self.index + self.chunk_size < self.length {\n println!(\"self.index + {} < self.length\", self.chunk_size);\n let new_index = self.index+self.chunk_size;\n let res = Data(self.buffer.slice(self.index, new_index));\n self.index = new_index;\n res\n } else if self.index < self.length {\n println!(\"self.index < self.length - 1\");\n let res = Eof(self.buffer.slice(self.index, self.length));\n self.index = self.length;\n res\n } else {\n ProducerError(0)\n }\n }\n}\n\n\/\/\/ Prepares a parser function for a push pipeline\n\/\/\/\n\/\/\/ It creates a function that accepts a producer and immediately starts parsing the data sent\n\/\/\/\n\/\/\/ # Example\n\/\/\/\n\/\/\/ ```ignore\n\/\/\/ fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],()> {\n\/\/\/ par.flat_map(local_print)\n\/\/\/ }\n\/\/\/ let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 8);\n\/\/\/\n\/\/\/ pusher!(ps, pr);\n\/\/\/ ps(&mut p);\n\/\/\/ ```\n#[macro_export]\nmacro_rules! pusher (\n ($name:ident, $f:expr) => (\n fn $name(producer: &mut Producer) {\n let mut acc: Vec = Vec::new();\n loop {\n let state = producer.produce();\n match state {\n ProducerState::Data(v) => {\n println!(\"got data\");\n acc.push_all(v)\n },\n ProducerState::Eof([]) => {\n println!(\"eof empty\");\n break;\n }\n ProducerState::Eof(v) => {\n println!(\"eof with {} bytes\", v.len());\n acc.push_all(v)\n }\n _ => {break;}\n }\n let mut v2: Vec = Vec::new();\n v2.push_all(acc.as_slice());\n let p = IResult::Done((), v2.as_slice());\n match $f(p) {\n IResult::Error(e) => {\n println!(\"error, stopping: {}\", e);\n break;\n },\n IResult::Incomplete(_) => {\n println!(\"incomplete\");\n },\n IResult::Done(i, _) => {\n println!(\"data, done\");\n acc.clear();\n acc.push_all(i);\n }\n }\n }\n }\n );\n);\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use internal::IResult;\n use internal::IResult::*;\n use std::fmt::Debug;\n use std::str;\n use map::*;\n\n fn local_print<'a,T: Debug>(input: T) -> IResult {\n println!(\"{:?}\", input);\n Done(input, ())\n }\n #[test]\n fn mem_producer() {\n let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 4);\n assert_eq!(p.produce(), ProducerState::Data(\"abcd\".as_bytes()));\n }\n\n #[test]\n fn mem_producer_2() {\n let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 8);\n fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],()> {\n par.flat_map(local_print)\n }\n pusher!(ps, pr);\n ps(&mut p);\n \/\/let mut iterations: uint = 0;\n \/\/let mut p = MemProducer::new(\"abcdefghi\".as_bytes(), 4);\n \/\/p.push(|par| {iterations = iterations + 1; par.flat_map(print)});\n \/\/assert_eq!(iterations, 3);\n }\n\n #[test]\n #[allow(unused_must_use)]\n fn file() {\n FileProducer::new(\"links.txt\", 20).map(|producer: FileProducer| {\n let mut p = producer;\n \/\/p.push(|par| {println!(\"parsed file: {}\", par); par});\n \/\/p.push(|par| par.flat_map(print));\n fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],()> {\n par.map_res(str::from_utf8).flat_map(local_print);\n Done(\"\".as_bytes(), ())\n }\n pusher!(ps, pr);\n ps(&mut p);\n \/\/assert!(false);\n });\n }\n\n #[test]\n fn accu() {\n fn f(input:&[u8]) -> IResult<&[u8],&[u8]> {\n if input.len() <= 4 {\n Incomplete(0)\n } else {\n Done(\"\".as_bytes(), input)\n }\n }\n\n let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 4);\n fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],&[u8]> {\n let r = par.flat_map(f);\n println!(\"f: {:?}\", r);\n r\n }\n pusher!(ps, pr );\n ps(&mut p);\n \/\/assert!(false);\n }\n\n #[test]\n fn accu_2() {\n fn f(input:&[u8]) -> IResult<&[u8],&[u8]> {\n if input.len() <= 4 || input.slice(0,5) != \"abcde\".as_bytes() {\n Incomplete(0)\n } else {\n Done(input.slice_from(5), input.slice(0,5))\n }\n }\n\n let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 4);\n fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],&[u8]> {\n let r = par.flat_map(f);\n println!(\"f: {:?}\", r);\n r\n }\n pusher!(ps, pr );\n ps(&mut p);\n \/\/assert!(false);\n }\n}\nrename io to old_io\/\/! Data producers\n\/\/!\n\/\/! The goal of data producers is to parse data as soon as it is generated.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```ignore\n\/\/! use std::str;\n\/\/! fn local_print<'a,T: Debug>(input: T) -> IResult {\n\/\/! println!(\"{:?}\", input);\n\/\/! Done(input, ())\n\/\/! }\n\/\/! \/\/ create a data producer from a file\n\/\/! FileProducer::new(\"links.txt\", 20).map(|producer: FileProducer| {\n\/\/! let mut p = producer;\n\/\/!\n\/\/! \/\/ create the parsing function\n\/\/! fn parser(par: IResult<(),&[u8]>) -> IResult<&[u8],()> {\n\/\/! par.map_res(str::from_utf8).flat_map(local_print);\n\/\/! Done(\"\".as_bytes(), ())\n\/\/! }\n\/\/!\n\/\/! \/\/ adapt the parsing function to the producer\n\/\/! pusher!(push, parser);\n\/\/! \/\/ get started\n\/\/! push(&mut p);\n\/\/! });\n\/\/! ```\n\nuse internal::*;\nuse self::ProducerState::*;\n\nuse std::old_io::fs::File;\nuse std::old_io::{IoResult, IoErrorKind};\n\n\n\/\/\/ Holds the data producer's current state\n\/\/\/\n\/\/\/ * Eof indicates all data has been parsed, and contains the parser's result\n\/\/\/\n\/\/\/ * Continue indicates that more data is needed and should be available,\n\/\/\/ but not right now. Parsing should resume at some point.\n\/\/\/\n\/\/\/ * Data contains already parsed data\n\/\/\/\n\/\/\/ * ProducerError indicates something went wrong\n#[derive(Debug,PartialEq,Eq)]\npub enum ProducerState {\n Eof(O),\n Continue,\n Data(O),\n ProducerError(Err),\n}\n\n\/\/\/ A producer implements the produce method, currently working with u8 arrays\npub trait Producer {\n fn produce(&mut self) -> ProducerState<&[u8]>;\n}\n\n\/\/\/ Can produce data from a file\n\/\/\/\n\/\/\/ the size field is the size of v, the internal buffer\npub struct FileProducer {\n size: usize,\n file: File,\n v: Vec\n}\n\nimpl FileProducer {\n pub fn new(filename: &str, buffer_size: usize) -> IoResult {\n File::open(&Path::new(filename)).map(|f| {\n FileProducer {size: buffer_size, file: f, v: Vec::with_capacity(buffer_size)}\n })\n }\n}\n\nimpl Producer for FileProducer {\n fn produce(&mut self) -> ProducerState<&[u8]> {\n \/\/let mut v = Vec::with_capacity(self.size);\n self.v.clear();\n match self.file.push(self.size, &mut self.v) {\n Err(e) => {\n match e.kind {\n IoErrorKind::NoProgress => Continue,\n IoErrorKind::EndOfFile => Eof(self.v.as_slice()),\n _ => ProducerError(0)\n }\n },\n Ok(i) => {\n println!(\"read {:?} bytes: {:?}\", i, self.v);\n Data(self.v.as_slice())\n }\n }\n }\n}\n\n\/\/\/ Can parse data from an already in memory byte array\n\/\/\/\n\/\/\/ * buffer holds the reference to the data that must be parsed\n\/\/\/\n\/\/\/ * length is the length of that buffer\n\/\/\/\n\/\/\/ * index is the position in the buffer\n\/\/\/\n\/\/\/ * chunk_size is the quantity of data sent at once\npub struct MemProducer<'x> {\n buffer: &'x [u8],\n chunk_size: usize,\n length: usize,\n index: usize\n}\n\nimpl<'x> MemProducer<'x> {\n pub fn new(buffer: &'x[u8], chunk_size: usize) -> MemProducer {\n MemProducer {\n buffer: buffer,\n chunk_size: chunk_size,\n length: buffer.len(),\n index: 0\n }\n }\n}\n\nimpl<'x> Producer for MemProducer<'x> {\n fn produce(&mut self) -> ProducerState<&[u8]> {\n if self.index + self.chunk_size < self.length {\n println!(\"self.index + {} < self.length\", self.chunk_size);\n let new_index = self.index+self.chunk_size;\n let res = Data(self.buffer.slice(self.index, new_index));\n self.index = new_index;\n res\n } else if self.index < self.length {\n println!(\"self.index < self.length - 1\");\n let res = Eof(self.buffer.slice(self.index, self.length));\n self.index = self.length;\n res\n } else {\n ProducerError(0)\n }\n }\n}\n\n\/\/\/ Prepares a parser function for a push pipeline\n\/\/\/\n\/\/\/ It creates a function that accepts a producer and immediately starts parsing the data sent\n\/\/\/\n\/\/\/ # Example\n\/\/\/\n\/\/\/ ```ignore\n\/\/\/ fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],()> {\n\/\/\/ par.flat_map(local_print)\n\/\/\/ }\n\/\/\/ let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 8);\n\/\/\/\n\/\/\/ pusher!(ps, pr);\n\/\/\/ ps(&mut p);\n\/\/\/ ```\n#[macro_export]\nmacro_rules! pusher (\n ($name:ident, $f:expr) => (\n fn $name(producer: &mut Producer) {\n let mut acc: Vec = Vec::new();\n loop {\n let state = producer.produce();\n match state {\n ProducerState::Data(v) => {\n println!(\"got data\");\n acc.push_all(v)\n },\n ProducerState::Eof([]) => {\n println!(\"eof empty\");\n break;\n }\n ProducerState::Eof(v) => {\n println!(\"eof with {} bytes\", v.len());\n acc.push_all(v)\n }\n _ => {break;}\n }\n let mut v2: Vec = Vec::new();\n v2.push_all(acc.as_slice());\n let p = IResult::Done((), v2.as_slice());\n match $f(p) {\n IResult::Error(e) => {\n println!(\"error, stopping: {}\", e);\n break;\n },\n IResult::Incomplete(_) => {\n println!(\"incomplete\");\n },\n IResult::Done(i, _) => {\n println!(\"data, done\");\n acc.clear();\n acc.push_all(i);\n }\n }\n }\n }\n );\n);\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use internal::IResult;\n use internal::IResult::*;\n use std::fmt::Debug;\n use std::str;\n use map::*;\n\n fn local_print<'a,T: Debug>(input: T) -> IResult {\n println!(\"{:?}\", input);\n Done(input, ())\n }\n #[test]\n fn mem_producer() {\n let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 4);\n assert_eq!(p.produce(), ProducerState::Data(\"abcd\".as_bytes()));\n }\n\n #[test]\n fn mem_producer_2() {\n let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 8);\n fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],()> {\n par.flat_map(local_print)\n }\n pusher!(ps, pr);\n ps(&mut p);\n \/\/let mut iterations: uint = 0;\n \/\/let mut p = MemProducer::new(\"abcdefghi\".as_bytes(), 4);\n \/\/p.push(|par| {iterations = iterations + 1; par.flat_map(print)});\n \/\/assert_eq!(iterations, 3);\n }\n\n #[test]\n #[allow(unused_must_use)]\n fn file() {\n FileProducer::new(\"links.txt\", 20).map(|producer: FileProducer| {\n let mut p = producer;\n \/\/p.push(|par| {println!(\"parsed file: {}\", par); par});\n \/\/p.push(|par| par.flat_map(print));\n fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],()> {\n par.map_res(str::from_utf8).flat_map(local_print);\n Done(\"\".as_bytes(), ())\n }\n pusher!(ps, pr);\n ps(&mut p);\n \/\/assert!(false);\n });\n }\n\n #[test]\n fn accu() {\n fn f(input:&[u8]) -> IResult<&[u8],&[u8]> {\n if input.len() <= 4 {\n Incomplete(0)\n } else {\n Done(\"\".as_bytes(), input)\n }\n }\n\n let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 4);\n fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],&[u8]> {\n let r = par.flat_map(f);\n println!(\"f: {:?}\", r);\n r\n }\n pusher!(ps, pr );\n ps(&mut p);\n \/\/assert!(false);\n }\n\n #[test]\n fn accu_2() {\n fn f(input:&[u8]) -> IResult<&[u8],&[u8]> {\n if input.len() <= 4 || input.slice(0,5) != \"abcde\".as_bytes() {\n Incomplete(0)\n } else {\n Done(input.slice_from(5), input.slice(0,5))\n }\n }\n\n let mut p = MemProducer::new(\"abcdefgh\".as_bytes(), 4);\n fn pr(par: IResult<(),&[u8]>) -> IResult<&[u8],&[u8]> {\n let r = par.flat_map(f);\n println!(\"f: {:?}\", r);\n r\n }\n pusher!(ps, pr );\n ps(&mut p);\n \/\/assert!(false);\n }\n}\n<|endoftext|>"} {"text":"Update tests to use example.com instead of domain.com<|endoftext|>"} {"text":"Syncfile: fix issue with truncated uuids<|endoftext|>"} {"text":"fiber can determine its status<|endoftext|>"} {"text":"2019: Day 3, part 2.use std::collections::HashMap;\nuse std::collections::HashSet;\nuse std::io;\nuse std::io::Read;\n\nuse nom::{\n character::complete as character, combinator::map, combinator::map_opt, combinator::map_res,\n combinator::opt, multi::many1, sequence::pair, sequence::terminated, sequence::tuple, IResult,\n};\n\ntype WirePath = Vec;\n\n#[derive(Clone, Copy, Debug)]\nstruct Movement {\n direction: Direction,\n amount: Amount,\n}\n\n#[derive(Clone, Copy, Debug)]\nenum Direction {\n Down,\n Left,\n Right,\n Up,\n}\n\nimpl Direction {\n fn inc(&self, position: Coordinates) -> Coordinates {\n match self {\n Direction::Down => Coordinates {\n y: position.y - 1,\n ..position\n },\n Direction::Left => Coordinates {\n x: position.x - 1,\n ..position\n },\n Direction::Right => Coordinates {\n x: position.x + 1,\n ..position\n },\n Direction::Up => Coordinates {\n y: position.y + 1,\n ..position\n },\n }\n }\n}\n\ntype Amount = u32;\n\ntype Distance = u32;\n\n#[derive(Clone, Copy, Debug, Hash)]\nstruct Coordinates {\n x: i32,\n y: i32,\n}\n\nimpl Coordinates {\n const ZERO: Coordinates = Coordinates { x: 0, y: 0 };\n}\n\nimpl PartialEq for Coordinates {\n fn eq(&self, other: &Self) -> bool {\n self.x == other.x && self.y == other.y\n }\n}\nimpl Eq for Coordinates {}\n\nfn main() -> io::Result<()> {\n let mut input: String = String::new();\n io::stdin().read_to_string(&mut input)?;\n let (_, (wire_a, wire_b)) = parse_wire_paths(&input)\n .map_err(|err| io::Error::new(io::ErrorKind::Other, format!(\"{:?}\", err)))?;\n\n let wire_a_locations_and_distances = all_locations_and_distances(&wire_a);\n let wire_b_locations_and_distances = all_locations_and_distances(&wire_b);\n let wire_a_locations = wire_a_locations_and_distances\n .keys()\n .collect::>();\n let wire_b_locations = wire_b_locations_and_distances\n .keys()\n .collect::>();\n let overlapping_locations = wire_a_locations.intersection(&wire_b_locations);\n let best = overlapping_locations\n .map(|c| wire_a_locations_and_distances[c] + wire_b_locations_and_distances[c])\n .min()\n .ok_or(io::ErrorKind::NotFound)?;\n println!(\"{}\", best);\n\n Ok(())\n}\n\nfn all_locations_and_distances(wire_path: &WirePath) -> HashMap {\n let mut locations = HashMap::new();\n let mut position = Coordinates::ZERO;\n let mut steps = 0;\n for movement in wire_path {\n for _ in 0..movement.amount {\n steps += 1;\n position = movement.direction.inc(position);\n if !locations.contains_key(&position) {\n locations.insert(position, steps);\n }\n }\n }\n locations\n}\n\nfn parse_wire_paths(input: &str) -> IResult<&str, (WirePath, WirePath)> {\n tuple((parse_wire_path, parse_wire_path))(input)\n}\n\nfn parse_wire_path(input: &str) -> IResult<&str, WirePath> {\n terminated(\n many1(terminated(parse_movement, opt(character::char(',')))),\n opt(character::line_ending),\n )(input)\n}\n\nfn parse_movement(input: &str) -> IResult<&str, Movement> {\n map(\n pair(parse_direction, parse_amount),\n |(direction, amount)| Movement { direction, amount },\n )(input)\n}\n\nfn parse_direction(input: &str) -> IResult<&str, Direction> {\n map_opt(character::anychar, |c| match c {\n 'D' => Some(Direction::Down),\n 'L' => Some(Direction::Left),\n 'R' => Some(Direction::Right),\n 'U' => Some(Direction::Up),\n _ => None,\n })(input)\n}\n\nfn parse_amount(input: &str) -> IResult<&str, Amount> {\n map_res(character::digit1, |digits: &str| digits.parse::())(input)\n}\n<|endoftext|>"} {"text":"Fix missing #[ruma_api(body)]<|endoftext|>"} {"text":"Added if else examplefn main() {\n let n = 5;\n\n if n < 0 {\n print!(\"{} is negative\", n);\n } else if n > 0 {\n print!(\"{} is positive\", n);\n } else {\n print!(\"{} is zero\", n);\n }\n\n let big_n =\n if n < 10 && n > -10 {\n println!(\", and is a small number, increase ten-fold\");\n\n \/\/ This expression returns an `i32`.\n 10 * n\n } else {\n println!(\", and is a big number, reduce by two\");\n\n \/\/ This expression must return an `i32` as well.\n n \/ 2\n \/\/ TODO ^ Try suppressing this expression with a semicolon.\n };\n \/\/ ^ Don't forget to put a semicolon here! All `let` bindings need it.\n\n println!(\"{} -> {}\", n, big_n);\n}\n<|endoftext|>"} {"text":"Rename `BlockLevelKind` to `NonReplacedContents`<|endoftext|>"} {"text":"add dummy.rs\/\/ Usually 'cargo test' tries to build everything in the examples directory.\n\/\/ That doesn't work for our examples because they have their own Cargo.toml and build.rs files.\n\/\/ We can override the default behavior of 'cargo test' by supplying a dummy file, like this,\n\/\/ and pointing to it in our Cargo.toml's [[example]] section.\n\nfn main() {}\n<|endoftext|>"} {"text":"EULER: Multiples of 3 and 5\/\/ EULER: Multiples of 3 and 5\n\/\/ https:\/\/projecteuler.net\/problem=1\nfn main() {\n let sum = range(1i, 1000i).filter(|&x| {x % 3 == 0 || x % 5 == 0}).fold(0i, |a, b| a + b);\n println!(\"{}\", sum);\n}\n<|endoftext|>"} {"text":"Add a test for assignment of unique boxes of generics\/\/ Issue #976\n\nfn f<@T>(x: ~T) {\n let _x2 = x;\n}\nfn main() { }\n<|endoftext|>"} {"text":"enum DataType {\n Unknown = 0,\n Boolean,\n Byte,\n Int16,\n Uint16,\n Int32,\n Uint32,\n Int64,\n Uint64,\n String, \/\/ TODO: What to name this string type?\n ByteArray,\n Int16Array,\n Uint16Array,\n Int32Array,\n Uint32Array,\n Int64Array,\n Uint64Array,\n StringArray,\n Hrtime,\n Nvlist, \/\/ TODO: What to name this ?\n NvlistArray,\n BooleanValue,\n Int8,\n Uint8,\n BooleanArray,\n Int8Array,\n Uint8Array\n}\n\nstruct NvPair {\n nvp_size: i32, \/\/ size of this nvpair\n nvp_name_sz: i16, \/\/ length of name string\n nvp_reserve: i16, \/\/ not used\n nvp_value_elem: i32, \/\/ number of elements for array types\n nvp_type: DataType, \/\/ type of value\n \/\/ name string\n \/\/ aligned ptr array for string arrays\n \/\/ aligned array of data for value\n}\n\n\/\/ nvlist header\nstruct NvList {\n nvl_version: i32\n nvl_nvflag: u32 \/\/ persistent flags\n nvl_priv: u64 \/\/ ptr to private data if not packed\n nvl_flag: u32\n nvl_pad: i32 \/\/ currently not used, for alignment\n}\n\n\/\/ nvp implementation version\nconst NV_VERSION: i32 = 0;\n\n\/\/ nvlist pack encoding\nconst NV_ENCODE_NATIVE: u8 = 0;\nconst NV_ENCODE_XDR: u8 = 1;\n\n\/\/ nvlist persistent unique name flags, stored in nvl_nvflags\nconst NV_UNIQUE_NAME: u32 = 0x1;\nconst NV_UNIQUE_NAME_TYPE: u32 = 0x2;\n\n\/\/ nvlist lookup pairs related flags\nconst NV_FLAG_NOENTOK: isize = 0x1;\n\n\/* What to do about these macros?\n\/\/ convenience macros\n#define NV_ALIGN(x) (((ulong_t)(x) + 7ul) & ~7ul)\n#define NV_ALIGN4(x) (((x) + 3) & ~3)\n\n#define NVP_SIZE(nvp) ((nvp)->nvp_size)\n#define NVP_NAME(nvp) ((char *)(nvp) + sizeof (nvpair_t))\n#define NVP_TYPE(nvp) ((nvp)->nvp_type)\n#define NVP_NELEM(nvp) ((nvp)->nvp_value_elem)\n#define NVP_VALUE(nvp) ((char *)(nvp) + NV_ALIGN(sizeof (nvpair_t) \\\n + (nvp)->nvp_name_sz))\n\n#define NVL_VERSION(nvl) ((nvl)->nvl_version)\n#define NVL_SIZE(nvl) ((nvl)->nvl_size)\n#define NVL_FLAG(nvl) ((nvl)->nvl_flag)\n*\/\n\n\/\/ NV allocator framework\nstruct NvAllocOps;\n\nstruct NvAlloc<> {\n nva_ops: &'static NvAllocOps,\n nva_arg: Any, \/\/ This was a void pointer type.\n \/\/ Not sure if Any is the correct type.\n}\n\nstruct NvAllocOps {\n int (*nv_ao_init)(nv_alloc_t *, __va_list);\n void (*nv_ao_fini)(nv_alloc_t *);\n void *(*nv_ao_alloc)(nv_alloc_t *, size_t);\n void (*nv_ao_free)(nv_alloc_t *, void *, size_t);\n void (*nv_ao_reset)(nv_alloc_t *);\n}\nForgot to comment out unrustified codeenum DataType {\n Unknown = 0,\n Boolean,\n Byte,\n Int16,\n Uint16,\n Int32,\n Uint32,\n Int64,\n Uint64,\n String, \/\/ TODO: What to name this string type?\n ByteArray,\n Int16Array,\n Uint16Array,\n Int32Array,\n Uint32Array,\n Int64Array,\n Uint64Array,\n StringArray,\n Hrtime,\n Nvlist, \/\/ TODO: What to name this ?\n NvlistArray,\n BooleanValue,\n Int8,\n Uint8,\n BooleanArray,\n Int8Array,\n Uint8Array\n}\n\nstruct NvPair {\n nvp_size: i32, \/\/ size of this nvpair\n nvp_name_sz: i16, \/\/ length of name string\n nvp_reserve: i16, \/\/ not used\n nvp_value_elem: i32, \/\/ number of elements for array types\n nvp_type: DataType, \/\/ type of value\n \/\/ name string\n \/\/ aligned ptr array for string arrays\n \/\/ aligned array of data for value\n}\n\n\/\/ nvlist header\nstruct NvList {\n nvl_version: i32\n nvl_nvflag: u32 \/\/ persistent flags\n nvl_priv: u64 \/\/ ptr to private data if not packed\n nvl_flag: u32\n nvl_pad: i32 \/\/ currently not used, for alignment\n}\n\n\/\/ nvp implementation version\nconst NV_VERSION: i32 = 0;\n\n\/\/ nvlist pack encoding\nconst NV_ENCODE_NATIVE: u8 = 0;\nconst NV_ENCODE_XDR: u8 = 1;\n\n\/\/ nvlist persistent unique name flags, stored in nvl_nvflags\nconst NV_UNIQUE_NAME: u32 = 0x1;\nconst NV_UNIQUE_NAME_TYPE: u32 = 0x2;\n\n\/\/ nvlist lookup pairs related flags\nconst NV_FLAG_NOENTOK: isize = 0x1;\n\n\/* What to do about these macros?\n\/\/ convenience macros\n#define NV_ALIGN(x) (((ulong_t)(x) + 7ul) & ~7ul)\n#define NV_ALIGN4(x) (((x) + 3) & ~3)\n\n#define NVP_SIZE(nvp) ((nvp)->nvp_size)\n#define NVP_NAME(nvp) ((char *)(nvp) + sizeof (nvpair_t))\n#define NVP_TYPE(nvp) ((nvp)->nvp_type)\n#define NVP_NELEM(nvp) ((nvp)->nvp_value_elem)\n#define NVP_VALUE(nvp) ((char *)(nvp) + NV_ALIGN(sizeof (nvpair_t) \\\n + (nvp)->nvp_name_sz))\n\n#define NVL_VERSION(nvl) ((nvl)->nvl_version)\n#define NVL_SIZE(nvl) ((nvl)->nvl_size)\n#define NVL_FLAG(nvl) ((nvl)->nvl_flag)\n*\/\n\n\/\/ NV allocator framework\nstruct NvAllocOps;\n\nstruct NvAlloc<> {\n nva_ops: &'static NvAllocOps,\n nva_arg: Any, \/\/ This was a void pointer type.\n \/\/ Not sure if Any is the correct type.\n}\n\n\/*\nstruct NvAllocOps {\n int (*nv_ao_init)(nv_alloc_t *, __va_list);\n void (*nv_ao_fini)(nv_alloc_t *);\n void *(*nv_ao_alloc)(nv_alloc_t *, size_t);\n void (*nv_ao_free)(nv_alloc_t *, void *, size_t);\n void (*nv_ao_reset)(nv_alloc_t *);\n}\n*\/\n<|endoftext|>"} {"text":"add write_read testsextern crate quick_protobuf;\n\nuse quick_protobuf::{Reader, MessageRead, Writer, MessageWrite};\n\nmacro_rules! write_read_primitive {\n ($name:ident, $read:ident, $write:ident) => (write_read_primitive!($name, $read, $write, 145););\n ($name:ident, $read:ident, $write:ident, $def:expr) => (\n#[test]\nfn $name(){\n let v = $def;\n let mut buf = Vec::new();\n {\n let mut w = Writer::new(&mut buf);\n w.$write(v).unwrap();\n }\n let len = buf.len();\n let mut buf = &*buf;\n let mut r = Reader::from_reader(&mut buf, len);\n assert_eq!(v, r.$read().unwrap());\n}\n );\n}\n\nwrite_read_primitive!(wr_int32, read_int32, write_int32);\nwrite_read_primitive!(wr_int64, read_int64, write_int64);\nwrite_read_primitive!(wr_uint32, read_uint32, write_uint32);\nwrite_read_primitive!(wr_uint64, read_uint64, write_uint64);\nwrite_read_primitive!(wr_sint32, read_sint32, write_sint32);\nwrite_read_primitive!(wr_sint64, read_sint64, write_sint64);\nwrite_read_primitive!(wr_bool, read_bool, write_bool, true);\nwrite_read_primitive!(wr_fixed32, read_fixed32, write_fixed32);\nwrite_read_primitive!(wr_fixed64, read_fixed64, write_fixed64);\nwrite_read_primitive!(wr_sfixed32, read_sfixed32, write_sfixed32);\nwrite_read_primitive!(wr_sfixed64, read_sfixed64, write_sfixed64);\nwrite_read_primitive!(wr_float, read_float, write_float, 5.8);\nwrite_read_primitive!(wr_double, read_double, write_double, 5.8);\n\n#[test]\nfn wr_bytes(){\n let v = b\"test_write_read\";\n let mut buf = Vec::new();\n {\n let mut w = Writer::new(&mut buf);\n w.write_bytes(v).unwrap();\n }\n let len = buf.len();\n let mut buf = &*buf;\n let mut r = Reader::from_reader(&mut buf, len);\n assert_eq!(v, &*r.read_bytes().unwrap());\n}\n\n#[test]\nfn wr_string(){\n let v = \"test_write_read\";\n let mut buf = Vec::new();\n {\n let mut w = Writer::new(&mut buf);\n w.write_string(v).unwrap();\n }\n let len = buf.len();\n let mut buf = &*buf;\n let mut r = Reader::from_reader(&mut buf, len);\n assert_eq!(v, &*r.read_string().unwrap());\n}\n\n#[derive(PartialEq, Eq, Debug, Clone, Copy)]\nenum TestEnum {\n A = 0,\n B = 1,\n C = 2,\n}\n\nimpl From for TestEnum {\n fn from(v: i32) -> TestEnum {\n match v {\n 0 => TestEnum::A,\n 1 => TestEnum::B,\n 2 => TestEnum::C,\n _ => unreachable!(),\n }\n }\n}\n\n#[test]\nfn wr_enum(){\n\n let v = TestEnum::C;\n let mut buf = Vec::new();\n {\n let mut w = Writer::new(&mut buf);\n w.write_enum(v as i32).unwrap();\n }\n let len = buf.len();\n let mut buf = &*buf;\n let mut r = Reader::from_reader(&mut buf, len);\n assert_eq!(v, r.read_enum().unwrap());\n}\n\n<|endoftext|>"} {"text":"\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse core::prelude::*;\n\nuse ast;\nuse ast::{meta_item, item, expr, ident};\nuse codemap::span;\nuse ext::base::ExtCtxt;\nuse ext::build::{AstBuilder, Duplicate};\nuse ext::deriving::generic::*;\n\nuse core::vec;\n\npub fn expand_deriving_rand(cx: @ExtCtxt,\n span: span,\n mitem: @meta_item,\n in_items: ~[@item])\n -> ~[@item] {\n let trait_def = TraitDef {\n path: Path::new(~[\"std\", \"rand\", \"Rand\"]),\n additional_bounds: ~[],\n generics: LifetimeBounds::empty(),\n methods: ~[\n MethodDef {\n name: \"rand\",\n generics: LifetimeBounds {\n lifetimes: ~[],\n bounds: ~[(\"R\",\n ~[ Path::new(~[\"std\", \"rand\", \"Rng\"]) ])]\n },\n explicit_self: None,\n args: ~[\n Ptr(~Literal(Path::new_local(\"R\")),\n Borrowed(None, ast::m_mutbl))\n ],\n ret_ty: Self,\n const_nonmatching: false,\n combine_substructure: rand_substructure\n }\n ]\n };\n trait_def.expand(cx, span, mitem, in_items)\n}\n\nfn rand_substructure(cx: @ExtCtxt, span: span, substr: &Substructure) -> @expr {\n let rng = match substr.nonself_args {\n [rng] => ~[ rng ],\n _ => cx.bug(\"Incorrect number of arguments to `rand` in `deriving(Rand)`\")\n };\n let rand_ident = ~[\n cx.ident_of(\"std\"),\n cx.ident_of(\"rand\"),\n cx.ident_of(\"Rand\"),\n cx.ident_of(\"rand\")\n ];\n let rand_call = || {\n cx.expr_call_global(span,\n copy rand_ident,\n ~[ rng[0].duplicate(cx) ])\n };\n\n return match *substr.fields {\n StaticStruct(_, ref summary) => {\n rand_thing(cx, span, substr.type_ident, summary, rand_call)\n }\n StaticEnum(_, ref variants) => {\n if variants.is_empty() {\n cx.span_fatal(span, \"`Rand` cannot be derived for enums with no variants\");\n }\n\n let variant_count = cx.expr_uint(span, variants.len());\n\n \/\/ need to specify the u32-ness of the random number\n let u32_ty = cx.ty_ident(span, cx.ident_of(\"u32\"));\n let r_ty = cx.ty_ident(span, cx.ident_of(\"R\"));\n let rand_name = cx.path_all(span, true, copy rand_ident, None, ~[ u32_ty, r_ty ]);\n let rand_name = cx.expr_path(rand_name);\n\n \/\/ ::std::rand::Rand::rand::(rng)\n let rv_call = cx.expr_call(span,\n rand_name,\n ~[ rng[0].duplicate(cx) ]);\n\n \/\/ rand() % variants.len()\n let rand_variant = cx.expr_binary(span, ast::rem,\n rv_call, variant_count);\n\n let mut arms = do variants.mapi |i, id_sum| {\n let i_expr = cx.expr_uint(span, i);\n let pat = cx.pat_lit(span, i_expr);\n\n match *id_sum {\n (ident, ref summary) => {\n cx.arm(span,\n ~[ pat ],\n rand_thing(cx, span, ident, summary, rand_call))\n }\n }\n };\n\n \/\/ _ => {} at the end. Should never occur\n arms.push(cx.arm_unreachable(span));\n\n cx.expr_match(span, rand_variant, arms)\n }\n _ => cx.bug(\"Non-static method in `deriving(Rand)`\")\n };\n\n fn rand_thing(cx: @ExtCtxt, span: span,\n ctor_ident: ident,\n summary: &Either,\n rand_call: &fn() -> @expr) -> @expr {\n match *summary {\n Left(count) => {\n if count == 0 {\n cx.expr_ident(span, ctor_ident)\n } else {\n let exprs = vec::from_fn(count, |_| rand_call());\n cx.expr_call_ident(span, ctor_ident, exprs)\n }\n }\n Right(ref fields) => {\n let rand_fields = do fields.map |ident| {\n cx.field_imm(span, *ident, rand_call())\n };\n cx.expr_struct_ident(span, ctor_ident, rand_fields)\n }\n }\n }\n}\nsyntax: revert the uint -> u32 \"fix\"; make the names\/comment match.\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse core::prelude::*;\n\nuse ast;\nuse ast::{meta_item, item, expr, ident};\nuse codemap::span;\nuse ext::base::ExtCtxt;\nuse ext::build::{AstBuilder, Duplicate};\nuse ext::deriving::generic::*;\n\nuse core::vec;\n\npub fn expand_deriving_rand(cx: @ExtCtxt,\n span: span,\n mitem: @meta_item,\n in_items: ~[@item])\n -> ~[@item] {\n let trait_def = TraitDef {\n path: Path::new(~[\"std\", \"rand\", \"Rand\"]),\n additional_bounds: ~[],\n generics: LifetimeBounds::empty(),\n methods: ~[\n MethodDef {\n name: \"rand\",\n generics: LifetimeBounds {\n lifetimes: ~[],\n bounds: ~[(\"R\",\n ~[ Path::new(~[\"std\", \"rand\", \"Rng\"]) ])]\n },\n explicit_self: None,\n args: ~[\n Ptr(~Literal(Path::new_local(\"R\")),\n Borrowed(None, ast::m_mutbl))\n ],\n ret_ty: Self,\n const_nonmatching: false,\n combine_substructure: rand_substructure\n }\n ]\n };\n trait_def.expand(cx, span, mitem, in_items)\n}\n\nfn rand_substructure(cx: @ExtCtxt, span: span, substr: &Substructure) -> @expr {\n let rng = match substr.nonself_args {\n [rng] => ~[ rng ],\n _ => cx.bug(\"Incorrect number of arguments to `rand` in `deriving(Rand)`\")\n };\n let rand_ident = ~[\n cx.ident_of(\"std\"),\n cx.ident_of(\"rand\"),\n cx.ident_of(\"Rand\"),\n cx.ident_of(\"rand\")\n ];\n let rand_call = || {\n cx.expr_call_global(span,\n copy rand_ident,\n ~[ rng[0].duplicate(cx) ])\n };\n\n return match *substr.fields {\n StaticStruct(_, ref summary) => {\n rand_thing(cx, span, substr.type_ident, summary, rand_call)\n }\n StaticEnum(_, ref variants) => {\n if variants.is_empty() {\n cx.span_fatal(span, \"`Rand` cannot be derived for enums with no variants\");\n }\n\n let variant_count = cx.expr_uint(span, variants.len());\n\n \/\/ need to specify the uint-ness of the random number\n let uint_ty = cx.ty_ident(span, cx.ident_of(\"uint\"));\n let r_ty = cx.ty_ident(span, cx.ident_of(\"R\"));\n let rand_name = cx.path_all(span, true, copy rand_ident, None, ~[ uint_ty, r_ty ]);\n let rand_name = cx.expr_path(rand_name);\n\n \/\/ ::std::rand::Rand::rand::(rng)\n let rv_call = cx.expr_call(span,\n rand_name,\n ~[ rng[0].duplicate(cx) ]);\n\n \/\/ rand() % variants.len()\n let rand_variant = cx.expr_binary(span, ast::rem,\n rv_call, variant_count);\n\n let mut arms = do variants.mapi |i, id_sum| {\n let i_expr = cx.expr_uint(span, i);\n let pat = cx.pat_lit(span, i_expr);\n\n match *id_sum {\n (ident, ref summary) => {\n cx.arm(span,\n ~[ pat ],\n rand_thing(cx, span, ident, summary, rand_call))\n }\n }\n };\n\n \/\/ _ => {} at the end. Should never occur\n arms.push(cx.arm_unreachable(span));\n\n cx.expr_match(span, rand_variant, arms)\n }\n _ => cx.bug(\"Non-static method in `deriving(Rand)`\")\n };\n\n fn rand_thing(cx: @ExtCtxt, span: span,\n ctor_ident: ident,\n summary: &Either,\n rand_call: &fn() -> @expr) -> @expr {\n match *summary {\n Left(count) => {\n if count == 0 {\n cx.expr_ident(span, ctor_ident)\n } else {\n let exprs = vec::from_fn(count, |_| rand_call());\n cx.expr_call_ident(span, ctor_ident, exprs)\n }\n }\n Right(ref fields) => {\n let rand_fields = do fields.map |ident| {\n cx.field_imm(span, *ident, rand_call())\n };\n cx.expr_struct_ident(span, ctor_ident, rand_fields)\n }\n }\n }\n}\n<|endoftext|>"} {"text":"Add test for generic iface methodsiface to_str {\n fn to_str() -> str;\n}\nimpl of to_str for int {\n fn to_str() -> str { int::str(self) }\n}\nimpl of to_str for str {\n fn to_str() -> str { self }\n}\n\niface map {\n fn map(f: block(T) -> U) -> [U];\n}\nimpl of map for [T] {\n fn map(f: block(T) -> U) -> [U] {\n let r = [];\n for x in self { r += [f(x)]; }\n r\n }\n}\n\nfn foo>(x: T) -> [str] {\n x.map({|_e| \"hi\" })\n}\nfn bar>(x: T) -> [str] {\n x.map({|_e| _e.to_str() })\n}\n\nfn main() {\n assert foo([1]) == [\"hi\"];\n assert bar::([4, 5]) == [\"4\", \"5\"];\n assert bar::([\"x\", \"y\"]) == [\"x\", \"y\"];\n}\n<|endoftext|>"} {"text":"fn on_osmain(f: fn~()) {\n let po = comm::Port();\n let ch = comm::Chan(po);\n do task::spawn_sched(task::PlatformThread) |copy f| {\n f();\n comm::send(ch, ());\n }\n comm::recv(po);\n}\n\n\/\/ FIXME: Needs additional cocoa setup on OS X. rust-cocoa should probably just\n\/\/ be a dependency\n#[test]\n#[ignore(cfg(target_os = \"macos\"))]\nfn test_everything() {\n\n on_osmain(|| {\n init(~[init_video, init_timer]);\n run_tests(~[\n general::test_was_init,\n general::test_set_error,\n general::test_error,\n general::test_clear_error,\n video::test_set_video_mode,\n \/\/ FIXME: Doesn't work when called from a directory that\n \/\/ doesn't contain the test image file\n \/\/video::test_blit,\n test_event::test_poll_event_none\n \/\/ FIXME: This test is interactive\n \/\/test_event::test_keyboard\n ]);\n quit();\n })\n}\n\nfn run_tests(tests: ~[fn()]) {\n vec::iter(tests, |test| test());\n}\n\nmod general {\n fn test_was_init() {\n assert init_timer == init_timer;\n \/\/assert vec::contains(was_init(~[init_timer]), init_timer);\n }\n\n fn test_set_error() {\n set_error(~\"test\");\n assert get_error() == ~\"test\";\n }\n\n fn test_error() {\n clear_error();\n assert str::is_empty(get_error());\n error(enomem);\n assert str::is_not_empty(get_error());\n }\n\n fn test_clear_error() {\n set_error(~\"test\");\n clear_error();\n assert str::is_empty(get_error());\n }\n}\n\nmod test_event {\n fn test_poll_event_none() {\n ::event::poll_event(|event| assert event == ::event::no_event);\n }\n\n fn test_keyboard() {\n io::println(~\"press a key in the window\");\n let surface = ::video::set_video_mode(320, 200, 32,\n ~[::video::swsurface], ~[::video::doublebuf, ::video::resizable]);\n let mut keydown = false;\n let mut keyup = false;\n while !keydown || !keyup {\n ::event::poll_event(|event| {\n match event {\n event::keyup_event(_) => keyup = true,\n event::keydown_event(_) => keydown = true,\n _ => { }\n }\n })\n }\n ::video::free_surface(surface);\n }\n}\n\nmod video {\n\n fn test_set_video_mode() {\n let surface = ::video::set_video_mode(320, 200, 32,\n ~[::video::hwsurface], ~[::video::doublebuf]);\n assert surface != ptr::null();\n ::video::free_surface(surface);\n }\n\n fn test_blit() {\n let screen = ::video::set_video_mode(320, 200, 32,\n ~[::video::swsurface], ~[::video::doublebuf]);\n assert screen != ptr::null();\n\n let image = {\n \/\/ FIXME: We need to load this from the crate instead of\n \/\/ off the filesystem\n let tmp = ::video::load_bmp(~\"rust-logo-128x128-blk.bmp\");\n assert tmp != ptr::null();\n let image = ::video::display_format(tmp);\n assert image != ptr::null();\n ::video::free_surface(tmp);\n image\n };\n\n for iter::repeat(1u) || {\n ::video::blit_surface(image, ptr::null(),\n screen, ptr::null());\n ::video::flip(screen);\n ::event::poll_event(|_event| {})\n };\n\n ::video::free_surface(image);\n ::video::free_surface(screen);\n }\n}\nSilly me, I left the test code commented out! Uncommented, so all tests validly passing now.fn on_osmain(f: fn~()) {\n let po = comm::Port();\n let ch = comm::Chan(po);\n do task::spawn_sched(task::PlatformThread) |copy f| {\n f();\n comm::send(ch, ());\n }\n comm::recv(po);\n}\n\n\/\/ FIXME: Needs additional cocoa setup on OS X. rust-cocoa should probably just\n\/\/ be a dependency\n#[test]\n#[ignore(cfg(target_os = \"macos\"))]\nfn test_everything() {\n\n on_osmain(|| {\n init(~[init_video, init_timer]);\n run_tests(~[\n general::test_was_init,\n general::test_set_error,\n general::test_error,\n general::test_clear_error,\n video::test_set_video_mode,\n \/\/ FIXME: Doesn't work when called from a directory that\n \/\/ doesn't contain the test image file\n \/\/video::test_blit,\n test_event::test_poll_event_none\n \/\/ FIXME: This test is interactive\n \/\/test_event::test_keyboard\n ]);\n quit();\n })\n}\n\nfn run_tests(tests: ~[fn()]) {\n vec::iter(tests, |test| test());\n}\n\nmod general {\n fn test_was_init() {\n assert vec::contains(was_init(~[init_timer]), init_timer);\n }\n\n fn test_set_error() {\n set_error(~\"test\");\n assert get_error() == ~\"test\";\n }\n\n fn test_error() {\n clear_error();\n assert str::is_empty(get_error());\n error(enomem);\n assert str::is_not_empty(get_error());\n }\n\n fn test_clear_error() {\n set_error(~\"test\");\n clear_error();\n assert str::is_empty(get_error());\n }\n}\n\nmod test_event {\n fn test_poll_event_none() {\n ::event::poll_event(|event| assert event == ::event::no_event);\n }\n\n fn test_keyboard() {\n io::println(~\"press a key in the window\");\n let surface = ::video::set_video_mode(320, 200, 32,\n ~[::video::swsurface], ~[::video::doublebuf, ::video::resizable]);\n let mut keydown = false;\n let mut keyup = false;\n while !keydown || !keyup {\n ::event::poll_event(|event| {\n match event {\n event::keyup_event(_) => keyup = true,\n event::keydown_event(_) => keydown = true,\n _ => { }\n }\n })\n }\n ::video::free_surface(surface);\n }\n}\n\nmod video {\n\n fn test_set_video_mode() {\n let surface = ::video::set_video_mode(320, 200, 32,\n ~[::video::hwsurface], ~[::video::doublebuf]);\n assert surface != ptr::null();\n ::video::free_surface(surface);\n }\n\n fn test_blit() {\n let screen = ::video::set_video_mode(320, 200, 32,\n ~[::video::swsurface], ~[::video::doublebuf]);\n assert screen != ptr::null();\n\n let image = {\n \/\/ FIXME: We need to load this from the crate instead of\n \/\/ off the filesystem\n let tmp = ::video::load_bmp(~\"rust-logo-128x128-blk.bmp\");\n assert tmp != ptr::null();\n let image = ::video::display_format(tmp);\n assert image != ptr::null();\n ::video::free_surface(tmp);\n image\n };\n\n for iter::repeat(1u) || {\n ::video::blit_surface(image, ptr::null(),\n screen, ptr::null());\n ::video::flip(screen);\n ::event::poll_event(|_event| {})\n };\n\n ::video::free_surface(image);\n ::video::free_surface(screen);\n }\n}\n<|endoftext|>"} {"text":"\n\n\/\/ This is a test for issue #109.\nuse std;\n\nfn slice[T](e: vec[T]) {\n let result: vec[T] = std::vec::alloc[T](1 as uint);\n log \"alloced\";\n result += e;\n log \"appended\";\n}\n\nfn main() { slice[str]([\"a\"]); }Remove run-pass\/vec-alloc-append.rs<|endoftext|>"} {"text":"Fix #207 with unit testuse ron::{de::from_str, ser::to_string};\nuse serde::{Deserialize, Serialize};\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\n#[serde(tag = \"type\", content = \"data\")]\nenum TestEnum {\n Name(String),\n Index(u32),\n}\n\n#[test]\nfn test_adjacently_tagged() {\n let source = TestEnum::Index(1);\n\n let ron_string = to_string(&source).unwrap();\n\n assert_eq!(ron_string, \"(type:\\\"Index\\\",data:1)\");\n\n let deserialized = from_str::(&ron_string).unwrap();\n\n assert_eq!(deserialized, source);\n}\n<|endoftext|>"} {"text":"Write buffer to stream<|endoftext|>"} {"text":"Add menu example\/*\n * Copyright (c) 2019 Boucher, Antoni \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy of\n * this software and associated documentation files (the \"Software\"), to deal in\n * the Software without restriction, including without limitation the rights to\n * use, copy, modify, merge, publish, distribute, sublicense, and\/or sell copies of\n * the Software, and to permit persons to whom the Software is furnished to do so,\n * subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n *\/\n\nuse gtk::{\n GtkMenuItemExt,\n Inhibit,\n MenuShellExt,\n OrientableExt,\n WidgetExt,\n};\nuse gtk::Orientation::Vertical;\nuse relm::{Relm, Widget, connect};\nuse relm_derive::{Msg, widget};\n\nuse self::Msg::*;\n\npub struct Model {\n relm: Relm,\n}\n\n#[derive(Msg)]\npub enum Msg {\n Quit,\n}\n\n#[widget]\nimpl Widget for Win {\n fn init_view(&mut self) {\n let file_menu = gtk::Menu::new();\n let file_item = gtk::MenuItem::new_with_label(\"File\");\n file_item.set_submenu(Some(&file_menu));\n let quit_item = gtk::MenuItem::new_with_label(\"Quit\");\n self.menubar.append(&file_item);\n file_menu.append(&quit_item);\n self.menubar.show_all();\n\n connect!(quit_item, connect_activate(_), self.model.relm, Quit);\n }\n\n fn model(relm: &Relm, _: ()) -> Model {\n Model {\n relm: relm.clone(),\n }\n }\n\n fn update(&mut self, event: Msg) {\n match event {\n Quit => gtk::main_quit(),\n }\n }\n\n view! {\n gtk::Window {\n gtk::Box {\n orientation: Vertical,\n #[name=\"menubar\"]\n gtk::MenuBar {\n },\n },\n delete_event(_, _) => (Quit, Inhibit(false)),\n }\n }\n}\n\nfn main() {\n Win::run(()).expect(\"Win::run failed\");\n}\n<|endoftext|>"} {"text":"Define a wordpub mod components {\n\n pub struct word {\n val: [u8; 3],\n }\n \n impl word {\n pub fn new() -> Self {\n word { val: [0; 3] }\n }\n }\n \n} \/* components *\/\n<|endoftext|>"} {"text":"build scriptuse std::env;\nuse std::fs::File;\nuse std::io::Write;\nuse std::path::Path;\n\nfn main() {\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n let dest_path = Path::new(&out_dir).join(\"hello.rs\");\n let mut f = File::create(&dest_path).unwrap();\n\n f.write_all(b\"\n pub fn message() -> &'static str {\n \\\"Hello, World!\\\"\n }\n \").unwrap();\n}\n<|endoftext|>"} {"text":"CloneSlicePrelude is now in the prelude, import unnecessary ...<|endoftext|>"} {"text":"test: enforce newlines in array formatuse ron::ser::{to_string_pretty, PrettyConfig};\n\n#[test]\nfn small_array() {\n let arr = &[(), (), ()][..];\n assert_eq!(\n to_string_pretty(&arr, PrettyConfig::new().with_new_line(\"\\n\".to_string())).unwrap(),\n \"[\n (),\n (),\n (),\n]\"\n );\n}\n<|endoftext|>"} {"text":"rust katause std::mem;\n\nfn main() {\n let a: u8 = 123;\n println!(\"a = {}\", a);\n \n let mut b: i8 = 0; \/\/mutable\n println!(\"b = {}\", b);\n b = 42;\n println!(\"b = {}\", b);\n\n let mut c = 123456789;\n println!(\"c = {}, size = {}\", c, mem::size_of_val(&c));\n c = -1; \n println!(\"c = {}, size = {}\", c, mem::size_of_val(&c));\n\n let z: isize = 123; \/\/ isize\/ usize\n let size_of_z = mem::size_of_val(&z);\n println!(\"z = {}, takes up {} bytes, {}-bit OS\",\n z, size_of_z, size_of_z * 8);\n\n let d: char = 'x';\n println!(\"d = {}, size = {} bytes\", d, mem::size_of_val(&d));\n\n let e: f64 = 2.5;\n println!(\"e = {}, size = {} bytes\", e, mem::size_of_val(&e));\n\n let g = false;\n println!(\"g = {}, size = {} bytes\", g, mem::size_of_val(&g));\n\n let f: bool = 4 > 0;\n println!(\"f = {}, size = {} bytes\", f, mem::size_of_val(&f));\n\n println!(\"{}\", i8::min_value());\n}\n<|endoftext|>"} {"text":"Add iterator extension for reporting from iterator<|endoftext|>"} {"text":"Skeleton for problem 2342\/\/ https:\/\/leetcode.com\/problems\/max-sum-of-a-pair-with-equal-sum-of-digits\/\npub fn maximum_sum(nums: Vec) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", maximum_sum(vec![18, 43, 36, 13, 7])); \/\/ 54\n println!(\"{}\", maximum_sum(vec![10, 12, 19, 14])); \/\/ -1\n}\n<|endoftext|>"} {"text":"Add regression test for #19380\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntrait Qiz {\n fn qiz();\n}\n\nstruct Foo;\nimpl Qiz for Foo {\n fn qiz() {}\n}\n\nstruct Bar {\n foos: &'static [&'static (Qiz + 'static)]\n}\n\nconst FOO : Foo = Foo;\nconst BAR : Bar = Bar { foos: &[&FOO]};\n\/\/~^ ERROR: cannot convert to a trait object because trait `Qiz` is not object-safe [E0038]\n\nfn main() { }\n<|endoftext|>"} {"text":"Rollup merge of #42160 - venkatagiri:issue_38821, r=Mark-Simulacrum\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub struct Nullable(T);\n\npub trait NotNull {}\n\npub trait IntoNullable {\n type Nullable;\n}\n\nimpl IntoNullable for T {\n type Nullable = Nullable;\n}\n\nimpl IntoNullable for Nullable {\n type Nullable = Nullable;\n}\n\npub trait Expression {\n type SqlType;\n}\n\npub trait Column: Expression {}\n\n#[derive(Debug, Copy, Clone)]\n\/\/~^ ERROR the trait bound `::SqlType: NotNull` is not satisfied\npub enum ColumnInsertValue where\n Col: Column,\n Expr: Expression::Nullable>,\n{\n Expression(Col, Expr),\n Default(Col),\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"\/\/! Basic DOM data structures.\n\nuse std::collections::hashmap::{HashMap, HashSet};\n\npub type AttrMap = HashMap;\n\n#[deriving(Show)]\npub struct Node {\n \/\/ data common to all nodes:\n pub children: Vec,\n\n \/\/ data specific to each node type:\n pub node_type: NodeType,\n}\n\n#[deriving(Show)]\npub enum NodeType {\n Element(ElementData),\n Text(String),\n}\n\n#[deriving(Show)]\npub struct ElementData {\n pub tag_name: String,\n pub attributes: AttrMap,\n}\n\n\/\/ Constructor functions for convenience:\n\npub fn text(data: String) -> Node {\n Node::new(vec!(), Text(data))\n}\n\npub fn elem(name: String, attrs: AttrMap, children: Vec) -> Node {\n Node::new(children, Element(ElementData {\n tag_name: name,\n attributes: attrs,\n }))\n}\n\n\/\/ Node methods\n\nimpl Node {\n fn new(children: Vec, node_type: NodeType) -> Node {\n Node {\n children: children,\n node_type: node_type\n }\n }\n}\n\n\/\/ Element methods\n\nimpl ElementData {\n pub fn get_attribute<'a>(&'a self, key: &str) -> Option<&'a String> {\n self.attributes.find_equiv(&key)\n }\n\n pub fn id<'a>(&'a self) -> Option<&'a String> {\n self.get_attribute(\"id\")\n }\n\n pub fn classes<'a>(&'a self) -> HashSet<&'a str> {\n self.get_attribute(\"class\").iter().flat_map(|classlist| {\n classlist.as_slice().split(' ')\n }).collect()\n }\n}\nSimplify DOM constructors\/\/! Basic DOM data structures.\n\nuse std::collections::hashmap::{HashMap, HashSet};\n\npub type AttrMap = HashMap;\n\n#[deriving(Show)]\npub struct Node {\n \/\/ data common to all nodes:\n pub children: Vec,\n\n \/\/ data specific to each node type:\n pub node_type: NodeType,\n}\n\n#[deriving(Show)]\npub enum NodeType {\n Element(ElementData),\n Text(String),\n}\n\n#[deriving(Show)]\npub struct ElementData {\n pub tag_name: String,\n pub attributes: AttrMap,\n}\n\n\/\/ Constructor functions for convenience:\n\npub fn text(data: String) -> Node {\n Node { children: vec![], node_type: Text(data) }\n}\n\npub fn elem(name: String, attrs: AttrMap, children: Vec) -> Node {\n Node {\n children: children,\n node_type: Element(ElementData {\n tag_name: name,\n attributes: attrs,\n })\n }\n}\n\n\/\/ Element methods\n\nimpl ElementData {\n pub fn get_attribute<'a>(&'a self, key: &str) -> Option<&'a String> {\n self.attributes.find_equiv(&key)\n }\n\n pub fn id<'a>(&'a self) -> Option<&'a String> {\n self.get_attribute(\"id\")\n }\n\n pub fn classes<'a>(&'a self) -> HashSet<&'a str> {\n self.get_attribute(\"class\").iter().flat_map(|classlist| {\n classlist.as_slice().split(' ')\n }).collect()\n }\n}\n<|endoftext|>"} {"text":"\/\/! Basic DOM data structures.\n\nuse std::collections::{HashMap,HashSet};\n\npub type AttrMap = HashMap;\n\n#[deriving(Show)]\npub struct Node {\n \/\/ data common to all nodes:\n pub children: Vec,\n\n \/\/ data specific to each node type:\n pub node_type: NodeType,\n}\n\n#[deriving(Show)]\npub enum NodeType {\n Element(ElementData),\n Text(String),\n}\n\n#[deriving(Show)]\npub struct ElementData {\n pub tag_name: String,\n pub attributes: AttrMap,\n}\n\n\/\/ Constructor functions for convenience:\n\npub fn text(data: String) -> Node {\n Node { children: vec![], node_type: NodeType::Text(data) }\n}\n\npub fn elem(name: String, attrs: AttrMap, children: Vec) -> Node {\n Node {\n children: children,\n node_type: NodeType::Element(ElementData {\n tag_name: name,\n attributes: attrs,\n })\n }\n}\n\n\/\/ Element methods\n\nimpl ElementData {\n pub fn get_attribute(&self, key: &str) -> Option<&String> {\n self.attributes.get(key)\n }\n\n pub fn id(&self) -> Option<&String> {\n self.get_attribute(\"id\")\n }\n\n pub fn classes(&self) -> HashSet<&str> {\n match self.get_attribute(\"class\") {\n Some(classlist) => classlist.as_slice().split(' ').collect(),\n None => HashSet::new()\n }\n }\n}\nSimplify attribute lookup methods\/\/! Basic DOM data structures.\n\nuse std::collections::{HashMap,HashSet};\n\npub type AttrMap = HashMap;\n\n#[deriving(Show)]\npub struct Node {\n \/\/ data common to all nodes:\n pub children: Vec,\n\n \/\/ data specific to each node type:\n pub node_type: NodeType,\n}\n\n#[deriving(Show)]\npub enum NodeType {\n Element(ElementData),\n Text(String),\n}\n\n#[deriving(Show)]\npub struct ElementData {\n pub tag_name: String,\n pub attributes: AttrMap,\n}\n\n\/\/ Constructor functions for convenience:\n\npub fn text(data: String) -> Node {\n Node { children: vec![], node_type: NodeType::Text(data) }\n}\n\npub fn elem(name: String, attrs: AttrMap, children: Vec) -> Node {\n Node {\n children: children,\n node_type: NodeType::Element(ElementData {\n tag_name: name,\n attributes: attrs,\n })\n }\n}\n\n\/\/ Element methods\n\nimpl ElementData {\n pub fn id(&self) -> Option<&String> {\n self.attributes.get(\"id\")\n }\n\n pub fn classes(&self) -> HashSet<&str> {\n match self.attributes.get(\"class\") {\n Some(classlist) => classlist.as_slice().split(' ').collect(),\n None => HashSet::new()\n }\n }\n}\n<|endoftext|>"} {"text":"Add as_bytes method to MaybeUtf8<|endoftext|>"} {"text":"Refactor domain_match to be easier to read.<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(duration)]\n\nextern crate libc;\n\nuse std::io;\nuse std::ops::Neg;\nuse std::net::{ToSocketAddrs, SocketAddr};\n\nuse utils::{One, NetInt};\n\nmod tcp;\nmod udp;\nmod socket;\nmod ext;\nmod utils;\n\n#[cfg(unix)] #[path = \"unix\/mod.rs\"] mod sys;\n\npub use tcp::TcpBuilder;\npub use udp::UdpBuilder;\npub use ext::{TcpStreamExt, TcpListenerExt, UdpSocketExt};\n\nfn one_addr(tsa: T) -> io::Result {\n let mut addrs = try!(tsa.to_socket_addrs());\n let addr = match addrs.next() {\n Some(addr) => addr,\n None => return Err(io::Error::new(io::ErrorKind::Other,\n \"no socket addresses could be resolved\"))\n };\n if addrs.next().is_none() {\n Ok(addr)\n } else {\n Err(io::Error::new(io::ErrorKind::Other,\n \"more than one address resolved\"))\n }\n}\n\nfn cvt>(t: T) -> io::Result {\n let one: T = T::one();\n if t == -one {\n Err(io::Error::last_os_error())\n } else {\n Ok(t)\n }\n}\n\nfn hton(i: I) -> I { i.to_be() }\n\ntrait AsInner {\n type Inner;\n fn as_inner(&self) -> &Self::Inner;\n}\n\ntrait FromInner {\n type Inner;\n fn from_inner(inner: Self::Inner) -> Self;\n}\n\ntrait IntoInner {\n type Inner;\n fn into_inner(self) -> Self::Inner;\n}\nReexport *BuilderExt\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(duration)]\n\nextern crate libc;\n\nuse std::io;\nuse std::ops::Neg;\nuse std::net::{ToSocketAddrs, SocketAddr};\n\nuse utils::{One, NetInt};\n\nmod tcp;\nmod udp;\nmod socket;\nmod ext;\nmod utils;\n\n#[cfg(unix)] #[path = \"unix\/mod.rs\"] mod sys;\n\npub use tcp::TcpBuilder;\npub use udp::UdpBuilder;\npub use ext::{TcpStreamExt, TcpListenerExt, UdpSocketExt};\npub use ext::{TcpBuilderExt, UdpBuilderExt};\n\nfn one_addr(tsa: T) -> io::Result {\n let mut addrs = try!(tsa.to_socket_addrs());\n let addr = match addrs.next() {\n Some(addr) => addr,\n None => return Err(io::Error::new(io::ErrorKind::Other,\n \"no socket addresses could be resolved\"))\n };\n if addrs.next().is_none() {\n Ok(addr)\n } else {\n Err(io::Error::new(io::ErrorKind::Other,\n \"more than one address resolved\"))\n }\n}\n\nfn cvt>(t: T) -> io::Result {\n let one: T = T::one();\n if t == -one {\n Err(io::Error::last_os_error())\n } else {\n Ok(t)\n }\n}\n\nfn hton(i: I) -> I { i.to_be() }\n\ntrait AsInner {\n type Inner;\n fn as_inner(&self) -> &Self::Inner;\n}\n\ntrait FromInner {\n type Inner;\n fn from_inner(inner: Self::Inner) -> Self;\n}\n\ntrait IntoInner {\n type Inner;\n fn into_inner(self) -> Self::Inner;\n}\n<|endoftext|>"} {"text":"Demonstrated rotation works for line collison, I'm just stupid<|endoftext|>"} {"text":"Remove unecessary feature gate 'io'<|endoftext|>"} {"text":":hammer: Fix stub<|endoftext|>"} {"text":"\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::infer::canonical::{Canonical, QueryResult};\nuse rustc::traits::{self, FulfillmentContext, ObligationCause, SelectionContext};\nuse rustc::traits::query::{CanonicalProjectionGoal, NoSolution, normalize::NormalizationResult};\nuse rustc::ty::{ParamEnvAnd, TyCtxt};\nuse rustc_data_structures::sync::Lrc;\nuse syntax::ast::DUMMY_NODE_ID;\nuse syntax_pos::DUMMY_SP;\nuse std::sync::atomic::Ordering;\n\ncrate fn normalize_projection_ty<'tcx>(\n tcx: TyCtxt<'_, 'tcx, 'tcx>,\n goal: CanonicalProjectionGoal<'tcx>,\n) -> Result>>>, NoSolution> {\n debug!(\"normalize_provider(goal={:#?})\", goal);\n\n tcx.sess.perf_stats.normalize_projection_ty.fetch_add(1, Ordering::Relaxed);\n tcx.infer_ctxt().enter(|ref infcx| {\n let (\n ParamEnvAnd {\n param_env,\n value: goal,\n },\n canonical_inference_vars,\n ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &goal);\n let fulfill_cx = &mut FulfillmentContext::new();\n let selcx = &mut SelectionContext::new(infcx);\n let cause = ObligationCause::misc(DUMMY_SP, DUMMY_NODE_ID);\n let mut obligations = vec![];\n let answer =\n traits::normalize_projection_type(selcx, param_env, goal, cause, 0, &mut obligations);\n fulfill_cx.register_predicate_obligations(infcx, obligations);\n\n \/\/ Now that we have fulfilled as much as we can, create a solution\n \/\/ from what we've learned.\n infcx.make_canonicalized_query_result(\n canonical_inference_vars,\n NormalizationResult { normalized_ty: answer },\n fulfill_cx,\n )\n })\n}\nuse query boiler plate for `normalize_projection_ty` too\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::infer::canonical::{Canonical, QueryResult};\nuse rustc::infer::InferOk;\nuse rustc::traits::query::{normalize::NormalizationResult, CanonicalProjectionGoal, NoSolution};\nuse rustc::traits::{self, ObligationCause, SelectionContext};\nuse rustc::ty::{ParamEnvAnd, TyCtxt};\nuse rustc_data_structures::sync::Lrc;\nuse std::sync::atomic::Ordering;\nuse syntax::ast::DUMMY_NODE_ID;\nuse syntax_pos::DUMMY_SP;\n\ncrate fn normalize_projection_ty<'tcx>(\n tcx: TyCtxt<'_, 'tcx, 'tcx>,\n goal: CanonicalProjectionGoal<'tcx>,\n) -> Result>>>, NoSolution> {\n debug!(\"normalize_provider(goal={:#?})\", goal);\n\n tcx.sess\n .perf_stats\n .normalize_projection_ty\n .fetch_add(1, Ordering::Relaxed);\n tcx.infer_ctxt().enter_canonical_trait_query(\n &goal,\n |infcx,\n ParamEnvAnd {\n param_env,\n value: goal,\n }| {\n let selcx = &mut SelectionContext::new(infcx);\n let cause = ObligationCause::misc(DUMMY_SP, DUMMY_NODE_ID);\n let mut obligations = vec![];\n let answer = traits::normalize_projection_type(\n selcx,\n param_env,\n goal,\n cause,\n 0,\n &mut obligations,\n );\n Ok(InferOk {\n value: NormalizationResult {\n normalized_ty: answer,\n },\n obligations,\n })\n },\n )\n}\n<|endoftext|>"} {"text":"Remove unused file<|endoftext|>"} {"text":"Unused constant<|endoftext|>"} {"text":"object safety implemented<|endoftext|>"} {"text":"\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that we give the generic E0495 when one of the free regions is\n\/\/ bound in a closure (rather than suggesting a change to the signature\n\/\/ of the closure, which is not specified in `foo` but rather in `invoke`).\n\n\/\/ FIXME - This might be better as a UI test, but the finer details\n\/\/ of the error seem to vary on different machines.\nfn invoke<'a, F>(x: &'a i32, f: F) -> &'a i32\nwhere F: FnOnce(&'a i32, &i32) -> &'a i32\n{\n let y = 22;\n f(x, &y)\n}\n\nfn foo<'a>(x: &'a i32) { \/\/~ ERROR E0495\n invoke(&x, |a, b| if a > b { a } else { b });\n}\n\nfn main() {}\nmove ERROR line\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that we give the generic E0495 when one of the free regions is\n\/\/ bound in a closure (rather than suggesting a change to the signature\n\/\/ of the closure, which is not specified in `foo` but rather in `invoke`).\n\n\/\/ FIXME - This might be better as a UI test, but the finer details\n\/\/ of the error seem to vary on different machines.\nfn invoke<'a, F>(x: &'a i32, f: F) -> &'a i32\nwhere F: FnOnce(&'a i32, &i32) -> &'a i32\n{\n let y = 22;\n f(x, &y)\n}\n\nfn foo<'a>(x: &'a i32) {\n invoke(&x, |a, b| if a > b { a } else { b }); \/\/~ ERROR E0495\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"Added a test case for #506\/*\n A reduced test case for Issue #506, provided by Rob Arnold.\n*\/\n\nnative \"rust\" mod rustrt {\n fn task_yield();\n}\n\nfn main() {\n spawn rustrt::task_yield();\n}\n\n<|endoftext|>"} {"text":"Complete the parser. Remove tests for parsing attributes.<|endoftext|>"} {"text":"Revert \"Deny non-absolut import pathes\"<|endoftext|>"} {"text":"Add RethinkDB errors to module `errors`<|endoftext|>"} {"text":"Use the connection module in RethinkDb<|endoftext|>"} {"text":"Add code for low-power-embedded-gamepub fn divmod(dividend: i16, divisor: i16) -> (i16, i16) {\n (dividend \/ divisor, dividend % divisor)\n}\n\npub fn evens(iter: impl Iterator) -> impl Iterator {\n iter.step_by(2)\n}\n\npub struct Position(pub i16, pub i16);\nimpl Position {\n pub fn manhattan(&self) -> i16 {\n self.0.abs() + self.1.abs()\n }\n}\n<|endoftext|>"} {"text":"difference between functions and closures<|endoftext|>"} {"text":"Add a test case for conditional drop\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(generators, generator_trait)]\n\nuse std::ops::Generator;\nuse std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};\n\nstatic A: AtomicUsize = ATOMIC_USIZE_INIT;\n\nstruct B;\n\nimpl Drop for B {\n fn drop(&mut self) {\n A.fetch_add(1, Ordering::SeqCst);\n }\n}\n\n\nfn test() -> bool { true }\nfn test2() -> bool { false }\n\nfn main() {\n t1();\n t2();\n}\n\nfn t1() {\n let mut a = || {\n let b = B;\n if test() {\n drop(b);\n }\n yield;\n };\n\n let n = A.load(Ordering::SeqCst);\n a.resume();\n assert_eq!(A.load(Ordering::SeqCst), n + 1);\n a.resume();\n assert_eq!(A.load(Ordering::SeqCst), n + 1);\n}\n\nfn t2() {\n let mut a = || {\n let b = B;\n if test2() {\n drop(b);\n }\n yield;\n };\n\n let n = A.load(Ordering::SeqCst);\n a.resume();\n assert_eq!(A.load(Ordering::SeqCst), n);\n a.resume();\n assert_eq!(A.load(Ordering::SeqCst), n + 1);\n}\n<|endoftext|>"} {"text":":art: Refactor the Slack Client<|endoftext|>"} {"text":"Add test: check with imag-ids whether no entries exist after init<|endoftext|>"} {"text":"add `volatile_load` and `volatile_store`<|endoftext|>"} {"text":"\"Morden\" as chain name produces incorrect rlp of transaction Added proper handling for 'morden' inside utils<|endoftext|>"} {"text":"Moved test utils into `util` module<|endoftext|>"} {"text":"Add tests for `minimum_packet_size`\/\/ Copyright (c) 2015 Robert Clipsham \n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(core, collections, custom_attribute, plugin)]\n#![plugin(pnet_macros)]\n\nextern crate pnet;\nextern crate pnet_macros;\n\nuse pnet_macros::types::*;\n\n#[packet]\npub struct ByteAligned {\n banana: u8,\n #[payload]\n payload: Vec\n}\n\n\n#[packet]\npub struct ByteAlignedWithVariableLength {\n banana: u16,\n #[length_fn = \"length_fn1\"]\n #[payload]\n payload: Vec\n}\n\nfn length_fn1(_: &ByteAlignedWithVariableLengthPacket) -> usize {\n unimplemented!()\n}\n\n\n#[packet]\npub struct ByteAlignedWithVariableLengthAndPayload {\n banana: u32,\n #[length_fn = \"length_fn2\"]\n var_length: Vec,\n #[payload]\n payload: Vec\n}\n\nfn length_fn2(_: &ByteAlignedWithVariableLengthAndPayloadPacket) -> usize {\n unimplemented!()\n}\n\n\n#[packet]\npub struct NonByteAligned {\n banana: u3,\n tomato: u5,\n #[payload]\n payload: Vec\n}\n\n\n#[packet]\npub struct NonByteAlignedWithVariableLength {\n banana: u11be,\n tomato: u21be,\n #[length_fn = \"length_fn3\"]\n #[payload]\n payload: Vec\n}\n\nfn length_fn3(_: &NonByteAlignedWithVariableLengthPacket) -> usize {\n unimplemented!()\n}\n\n\n#[packet]\npub struct NonByteAlignedWithVariableLengthAndPayload {\n banana: u7,\n tomato: u9be,\n #[length_fn = \"length_fn4\"]\n var_length: Vec,\n #[payload]\n payload: Vec\n}\n\nfn length_fn4(_: &NonByteAlignedWithVariableLengthAndPayloadPacket) -> usize {\n unimplemented!()\n}\n\n\nfn main() {\n assert_eq!(ByteAlignedPacket::minimum_packet_size(), 1);\n assert_eq!(ByteAlignedWithVariableLengthPacket::minimum_packet_size(), 2);\n assert_eq!(ByteAlignedWithVariableLengthAndPayloadPacket::minimum_packet_size(), 4);\n assert_eq!(NonByteAlignedPacket::minimum_packet_size(), 1);\n assert_eq!(NonByteAlignedWithVariableLengthPacket::minimum_packet_size(), 4);\n assert_eq!(NonByteAlignedWithVariableLengthAndPayloadPacket::minimum_packet_size(), 2);\n}\n<|endoftext|>"} {"text":"Update create_media_content to r0.4.0<|endoftext|>"} {"text":"Add option to override ref data in contact entries<|endoftext|>"} {"text":"Add example of decode for single filesextern crate docopt;\nextern crate flac;\nextern crate hound;\nextern crate nom;\nextern crate rustc_serialize;\n\nuse docopt::Docopt;\nuse flac::Stream;\nuse std::env;\nuse std::error::Error;\n\nconst USAGE: &'static str = \"\nUsage: decode \n decode ... \n decode --help\n\nOptions:\n -h, --help Show this message.\n\";\n\n#[derive(RustcDecodable)]\nstruct Arguments {\n arg_input: Vec,\n arg_output: Option,\n arg_dir: Option,\n}\n\nfn decode_file(input_file: &str, output_file: &str)\n -> Result<(), hound::Error> {\n let mut stream = try! {\n Stream::from_file(input_file).map_err(hound::Error::IoError)\n };\n let info = stream.info();\n let frames_len = stream.frames.len();\n\n let spec = hound::WavSpec {\n channels: info.channels as u16,\n sample_rate: info.sample_rate,\n bits_per_sample: info.bits_per_sample as u16,\n };\n\n let mut output = try!(hound::WavWriter::create(output_file, spec));\n\n for _ in 0..frames_len {\n if let Some(buffer) = stream.next_frame() {\n let buffer_size = buffer.len();\n let block_size = buffer_size \/ 2;\n let left = &buffer[0..block_size];\n let right = &buffer[block_size..buffer_size];\n\n for i in 0..block_size {\n try!(output.write_sample(left[i]));\n try!(output.write_sample(right[i]));\n }\n } else {\n break;\n }\n }\n\n output.finalize()\n}\n\nfn main() {\n let args: Arguments = Docopt::new(USAGE)\n .and_then(|d| d.argv(env::args()).decode())\n .unwrap_or_else(|e| e.exit());\n\n if let Some(ref output_file) = args.arg_output {\n let input_file = &args.arg_input[0];\n\n if let Err(e) = decode_file(input_file, output_file) {\n println!(\"{:?}\", e);\n } else {\n println!(\"decoded: {} -> {}\", input_file, output_file);\n }\n }\n}\n<|endoftext|>"} {"text":"Add hello_triangle Exampleextern crate bootstrap_rs as bootstrap;\nextern crate gl_util as gl;\n\nuse bootstrap::window::*;\nuse gl::*;\n\nstatic VERTEX_POSITIONS: [f32; 9] = [\n -1.0, -1.0, 0.0,\n 1.0, -1.0, 0.0,\n 0.0, 1.0, 0.0,\n];\n\nfn main() {\n let mut window = Window::new(\"Hello, Triangle!\");\n\n gl::init();\n let mut vertex_buffer = VertexBuffer::new();\n vertex_buffer.set_data_f32(&VERTEX_POSITIONS[..]);\n vertex_buffer.set_attrib_f32(AttributeLocation::from_index(0), 3, 0, 0);\n\n 'outer: loop {\n while let Some(message) = window.next_message() {\n match message {\n Message::Close => break 'outer,\n _ => {},\n }\n\n gl::clear();\n vertex_buffer.draw(DrawMode::Triangles, 0, 3);\n gl::swap_buffers();\n }\n }\n}\n<|endoftext|>"} {"text":"Remove functionality to delete whole wiki<|endoftext|>"} {"text":"use std::collections::HashMap;\nuse std::fs;\nuse std::io::prelude::*;\nuse std::path::PathBuf;\nuse std::str;\nuse std::sync::{Mutex, Arc};\n\nuse core::{PackageId, PackageSet};\nuse util::{CargoResult, human, Human};\nuse util::{internal, ChainError, profile, paths};\nuse util::Freshness;\n\nuse super::job::Work;\nuse super::{fingerprint, process, Kind, Context, Unit};\nuse super::CommandType;\n\n\/\/\/ Contains the parsed output of a custom build script.\n#[derive(Clone, Debug)]\npub struct BuildOutput {\n \/\/\/ Paths to pass to rustc with the `-L` flag\n pub library_paths: Vec,\n \/\/\/ Names and link kinds of libraries, suitable for the `-l` flag\n pub library_links: Vec,\n \/\/\/ Various `--cfg` flags to pass to the compiler\n pub cfgs: Vec,\n \/\/\/ Metadata to pass to the immediate dependencies\n pub metadata: Vec<(String, String)>,\n}\n\npub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;\n\npub struct BuildState {\n pub outputs: Mutex,\n}\n\n#[derive(Default)]\npub struct BuildScripts {\n pub to_link: Vec<(PackageId, Kind)>,\n pub plugins: Vec,\n}\n\n\/\/\/ Prepares a `Work` that executes the target as a custom build script.\n\/\/\/\n\/\/\/ The `req` given is the requirement which this run of the build script will\n\/\/\/ prepare work for. If the requirement is specified as both the target and the\n\/\/\/ host platforms it is assumed that the two are equal and the build script is\n\/\/\/ only run once (not twice).\npub fn prepare(cx: &mut Context, unit: &Unit)\n -> CargoResult<(Work, Work, Freshness)> {\n let _p = profile::start(format!(\"build script prepare: {}\/{}\",\n unit.pkg, unit.target.name()));\n let (script_output, build_output) = {\n (cx.layout(unit.pkg, Kind::Host).build(unit.pkg),\n cx.layout(unit.pkg, unit.kind).build_out(unit.pkg))\n };\n\n \/\/ Building the command to execute\n let to_exec = script_output.join(unit.target.name());\n\n \/\/ Start preparing the process to execute, starting out with some\n \/\/ environment variables. Note that the profile-related environment\n \/\/ variables are not set with this the build script's profile but rather the\n \/\/ package's library profile.\n let profile = cx.lib_profile(unit.pkg.package_id());\n let to_exec = to_exec.into_os_string();\n let mut p = try!(super::process(CommandType::Host(to_exec), unit.pkg, cx));\n p.env(\"OUT_DIR\", &build_output)\n .env(\"CARGO_MANIFEST_DIR\", unit.pkg.root())\n .env(\"NUM_JOBS\", &cx.jobs().to_string())\n .env(\"TARGET\", &match unit.kind {\n Kind::Host => &cx.config.rustc_info().host[..],\n Kind::Target => cx.target_triple(),\n })\n .env(\"DEBUG\", &profile.debuginfo.to_string())\n .env(\"OPT_LEVEL\", &profile.opt_level.to_string())\n .env(\"PROFILE\", if cx.build_config.release {\"release\"} else {\"debug\"})\n .env(\"HOST\", &cx.config.rustc_info().host);\n\n \/\/ Be sure to pass along all enabled features for this package, this is the\n \/\/ last piece of statically known information that we have.\n if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {\n for feat in features.iter() {\n p.env(&format!(\"CARGO_FEATURE_{}\", super::envify(feat)), \"1\");\n }\n }\n\n \/\/ Gather the set of native dependencies that this package has along with\n \/\/ some other variables to close over.\n \/\/\n \/\/ This information will be used at build-time later on to figure out which\n \/\/ sorts of variables need to be discovered at that time.\n let lib_deps = {\n cx.dep_run_custom_build(unit, true).iter().filter_map(|unit| {\n if unit.profile.run_custom_build {\n Some((unit.pkg.manifest().links().unwrap().to_string(),\n unit.pkg.package_id().clone()))\n } else {\n None\n }\n }).collect::>()\n };\n let pkg_name = unit.pkg.to_string();\n let build_state = cx.build_state.clone();\n let id = unit.pkg.package_id().clone();\n let all = (id.clone(), pkg_name.clone(), build_state.clone(),\n build_output.clone());\n let build_scripts = super::load_build_deps(cx, unit);\n let kind = unit.kind;\n\n try!(fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg)));\n try!(fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg)));\n\n let exec_engine = cx.exec_engine.clone();\n\n \/\/ Prepare the unit of \"dirty work\" which will actually run the custom build\n \/\/ command.\n \/\/\n \/\/ Note that this has to do some extra work just before running the command\n \/\/ to determine extra environment variables and such.\n let work = Work::new(move |desc_tx| {\n \/\/ Make sure that OUT_DIR exists.\n \/\/\n \/\/ If we have an old build directory, then just move it into place,\n \/\/ otherwise create it!\n if fs::metadata(&build_output).is_err() {\n try!(fs::create_dir(&build_output).chain_error(|| {\n internal(\"failed to create script output directory for \\\n build command\")\n }));\n }\n\n \/\/ For all our native lib dependencies, pick up their metadata to pass\n \/\/ along to this custom build command. We're also careful to augment our\n \/\/ dynamic library search path in case the build script depended on any\n \/\/ native dynamic libraries.\n {\n let build_state = build_state.outputs.lock().unwrap();\n for (name, id) in lib_deps {\n let key = (id.clone(), kind);\n let state = try!(build_state.get(&key).chain_error(|| {\n internal(format!(\"failed to locate build state for env \\\n vars: {}\/{:?}\", id, kind))\n }));\n let data = &state.metadata;\n for &(ref key, ref value) in data.iter() {\n p.env(&format!(\"DEP_{}_{}\", super::envify(&name),\n super::envify(key)), value);\n }\n }\n if let Some(build_scripts) = build_scripts {\n try!(super::add_plugin_deps(&mut p, &build_state,\n &build_scripts));\n }\n }\n\n \/\/ And now finally, run the build command itself!\n desc_tx.send(p.to_string()).ok();\n let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {\n e.desc = format!(\"failed to run custom build command for `{}`\\n{}\",\n pkg_name, e.desc);\n Human(e)\n }));\n\n \/\/ After the build command has finished running, we need to be sure to\n \/\/ remember all of its output so we can later discover precisely what it\n \/\/ was, even if we don't run the build command again (due to freshness).\n \/\/\n \/\/ This is also the location where we provide feedback into the build\n \/\/ state informing what variables were discovered via our script as\n \/\/ well.\n let output = try!(str::from_utf8(&output.stdout).map_err(|_| {\n human(\"build script output was not valid utf-8\")\n }));\n let parsed_output = try!(BuildOutput::parse(output, &pkg_name));\n build_state.insert(id, kind, parsed_output);\n\n try!(paths::write(&build_output.parent().unwrap().join(\"output\"),\n output.as_bytes()));\n Ok(())\n });\n\n \/\/ Now that we've prepared our work-to-do, we need to prepare the fresh work\n \/\/ itself to run when we actually end up just discarding what we calculated\n \/\/ above.\n \/\/\n \/\/ Note that the freshness calculation here is the build_cmd freshness, not\n \/\/ target specific freshness. This is because we don't actually know what\n \/\/ the inputs are to this command!\n \/\/\n \/\/ Also note that a fresh build command needs to\n let (freshness, dirty, fresh) =\n try!(fingerprint::prepare_build_cmd(cx, unit));\n let dirty = work.then(dirty);\n let fresh = Work::new(move |_tx| {\n let (id, pkg_name, build_state, build_output) = all;\n let contents = try!(paths::read(&build_output.parent().unwrap()\n .join(\"output\")));\n let output = try!(BuildOutput::parse(&contents, &pkg_name));\n build_state.insert(id, kind, output);\n Ok(())\n }).then(fresh);\n\n Ok((dirty, fresh, freshness))\n}\n\nimpl BuildState {\n pub fn new(config: &super::BuildConfig,\n packages: &PackageSet) -> BuildState {\n let mut sources = HashMap::new();\n for package in packages.iter() {\n match package.manifest().links() {\n Some(links) => {\n sources.insert(links.to_string(),\n package.package_id().clone());\n }\n None => {}\n }\n }\n let mut outputs = HashMap::new();\n let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));\n let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));\n for ((name, output), kind) in i1.chain(i2) {\n \/\/ If no package is using the library named `name`, then this is\n \/\/ just an override that we ignore.\n if let Some(id) = sources.get(name) {\n outputs.insert((id.clone(), kind), output.clone());\n }\n }\n BuildState { outputs: Mutex::new(outputs) }\n }\n\n fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {\n self.outputs.lock().unwrap().insert((id, kind), output);\n }\n}\n\nimpl BuildOutput {\n \/\/ Parses the output of a script.\n \/\/ The `pkg_name` is used for error messages.\n pub fn parse(input: &str, pkg_name: &str) -> CargoResult {\n let mut library_paths = Vec::new();\n let mut library_links = Vec::new();\n let mut cfgs = Vec::new();\n let mut metadata = Vec::new();\n let whence = format!(\"build script of `{}`\", pkg_name);\n\n for line in input.lines() {\n let mut iter = line.splitn(2, ':');\n if iter.next() != Some(\"cargo\") {\n \/\/ skip this line since it doesn't start with \"cargo:\"\n continue;\n }\n let data = match iter.next() {\n Some(val) => val,\n None => continue\n };\n\n \/\/ getting the `key=value` part of the line\n let mut iter = data.splitn(2, '=');\n let key = iter.next();\n let value = iter.next();\n let (key, value) = match (key, value) {\n (Some(a), Some(b)) => (a, b.trim_right()),\n \/\/ line started with `cargo:` but didn't match `key=value`\n _ => return Err(human(format!(\"Wrong output in {}: `{}`\",\n whence, line)))\n };\n\n match key {\n \"rustc-flags\" => {\n let (libs, links) = try!(\n BuildOutput::parse_rustc_flags(value, &whence)\n );\n library_links.extend(links.into_iter());\n library_paths.extend(libs.into_iter());\n }\n \"rustc-link-lib\" => library_links.push(value.to_string()),\n \"rustc-link-search\" => library_paths.push(PathBuf::from(value)),\n \"rustc-cfg\" => cfgs.push(value.to_string()),\n _ => metadata.push((key.to_string(), value.to_string())),\n }\n }\n\n Ok(BuildOutput {\n library_paths: library_paths,\n library_links: library_links,\n cfgs: cfgs,\n metadata: metadata,\n })\n }\n\n pub fn parse_rustc_flags(value: &str, whence: &str)\n -> CargoResult<(Vec, Vec)> {\n let value = value.trim();\n let mut flags_iter = value.split(|c: char| c.is_whitespace())\n .filter(|w| w.chars().any(|c| !c.is_whitespace()));\n let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());\n loop {\n let flag = match flags_iter.next() {\n Some(f) => f,\n None => break\n };\n if flag != \"-l\" && flag != \"-L\" {\n return Err(human(format!(\"Only `-l` and `-L` flags are allowed \\\n in {}: `{}`\",\n whence, value)))\n }\n let value = match flags_iter.next() {\n Some(v) => v,\n None => return Err(human(format!(\"Flag in rustc-flags has no \\\n value in {}: `{}`\",\n whence, value)))\n };\n match flag {\n \"-l\" => library_links.push(value.to_string()),\n \"-L\" => library_paths.push(PathBuf::from(value)),\n\n \/\/ was already checked above\n _ => return Err(human(\"only -l and -L flags are allowed\"))\n };\n }\n Ok((library_paths, library_links))\n }\n}\n\n\/\/\/ Compute the `build_scripts` map in the `Context` which tracks what build\n\/\/\/ scripts each package depends on.\n\/\/\/\n\/\/\/ The global `build_scripts` map lists for all (package, kind) tuples what set\n\/\/\/ of packages' build script outputs must be considered. For example this lists\n\/\/\/ all dependencies' `-L` flags which need to be propagated transitively.\n\/\/\/\n\/\/\/ The given set of targets to this function is the initial set of\n\/\/\/ targets\/profiles which are being built.\npub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,\n units: &[Unit<'b>]) {\n let mut ret = HashMap::new();\n for unit in units {\n build(&mut ret, cx, unit);\n }\n cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {\n (k, Arc::new(v))\n }));\n\n \/\/ Recursive function to build up the map we're constructing. This function\n \/\/ memoizes all of its return values as it goes along.\n fn build<'a, 'b, 'cfg>(out: &'a mut HashMap, BuildScripts>,\n cx: &Context<'b, 'cfg>,\n unit: &Unit<'b>)\n -> &'a BuildScripts {\n \/\/ Do a quick pre-flight check to see if we've already calculated the\n \/\/ set of dependencies.\n if out.contains_key(unit) {\n return &out[unit]\n }\n\n let mut to_link = Vec::new();\n let mut plugins = Vec::new();\n\n if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {\n to_link.push((unit.pkg.package_id().clone(), unit.kind));\n }\n for unit in cx.dep_targets(unit).iter() {\n let dep_scripts = build(out, cx, unit);\n\n if unit.target.for_host() {\n plugins.extend(dep_scripts.to_link.iter()\n .map(|p| &p.0).cloned());\n } else if unit.target.linkable() {\n to_link.extend(dep_scripts.to_link.iter().cloned());\n }\n }\n\n let prev = out.entry(*unit).or_insert(BuildScripts::default());\n prev.to_link.extend(to_link);\n prev.plugins.extend(plugins);\n return prev\n }\n}\nWrite the build script output ASAPuse std::collections::HashMap;\nuse std::fs;\nuse std::io::prelude::*;\nuse std::path::PathBuf;\nuse std::str;\nuse std::sync::{Mutex, Arc};\n\nuse core::{PackageId, PackageSet};\nuse util::{CargoResult, human, Human};\nuse util::{internal, ChainError, profile, paths};\nuse util::Freshness;\n\nuse super::job::Work;\nuse super::{fingerprint, process, Kind, Context, Unit};\nuse super::CommandType;\n\n\/\/\/ Contains the parsed output of a custom build script.\n#[derive(Clone, Debug)]\npub struct BuildOutput {\n \/\/\/ Paths to pass to rustc with the `-L` flag\n pub library_paths: Vec,\n \/\/\/ Names and link kinds of libraries, suitable for the `-l` flag\n pub library_links: Vec,\n \/\/\/ Various `--cfg` flags to pass to the compiler\n pub cfgs: Vec,\n \/\/\/ Metadata to pass to the immediate dependencies\n pub metadata: Vec<(String, String)>,\n}\n\npub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;\n\npub struct BuildState {\n pub outputs: Mutex,\n}\n\n#[derive(Default)]\npub struct BuildScripts {\n pub to_link: Vec<(PackageId, Kind)>,\n pub plugins: Vec,\n}\n\n\/\/\/ Prepares a `Work` that executes the target as a custom build script.\n\/\/\/\n\/\/\/ The `req` given is the requirement which this run of the build script will\n\/\/\/ prepare work for. If the requirement is specified as both the target and the\n\/\/\/ host platforms it is assumed that the two are equal and the build script is\n\/\/\/ only run once (not twice).\npub fn prepare(cx: &mut Context, unit: &Unit)\n -> CargoResult<(Work, Work, Freshness)> {\n let _p = profile::start(format!(\"build script prepare: {}\/{}\",\n unit.pkg, unit.target.name()));\n let (script_output, build_output) = {\n (cx.layout(unit.pkg, Kind::Host).build(unit.pkg),\n cx.layout(unit.pkg, unit.kind).build_out(unit.pkg))\n };\n\n \/\/ Building the command to execute\n let to_exec = script_output.join(unit.target.name());\n\n \/\/ Start preparing the process to execute, starting out with some\n \/\/ environment variables. Note that the profile-related environment\n \/\/ variables are not set with this the build script's profile but rather the\n \/\/ package's library profile.\n let profile = cx.lib_profile(unit.pkg.package_id());\n let to_exec = to_exec.into_os_string();\n let mut p = try!(super::process(CommandType::Host(to_exec), unit.pkg, cx));\n p.env(\"OUT_DIR\", &build_output)\n .env(\"CARGO_MANIFEST_DIR\", unit.pkg.root())\n .env(\"NUM_JOBS\", &cx.jobs().to_string())\n .env(\"TARGET\", &match unit.kind {\n Kind::Host => &cx.config.rustc_info().host[..],\n Kind::Target => cx.target_triple(),\n })\n .env(\"DEBUG\", &profile.debuginfo.to_string())\n .env(\"OPT_LEVEL\", &profile.opt_level.to_string())\n .env(\"PROFILE\", if cx.build_config.release {\"release\"} else {\"debug\"})\n .env(\"HOST\", &cx.config.rustc_info().host);\n\n \/\/ Be sure to pass along all enabled features for this package, this is the\n \/\/ last piece of statically known information that we have.\n if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {\n for feat in features.iter() {\n p.env(&format!(\"CARGO_FEATURE_{}\", super::envify(feat)), \"1\");\n }\n }\n\n \/\/ Gather the set of native dependencies that this package has along with\n \/\/ some other variables to close over.\n \/\/\n \/\/ This information will be used at build-time later on to figure out which\n \/\/ sorts of variables need to be discovered at that time.\n let lib_deps = {\n cx.dep_run_custom_build(unit, true).iter().filter_map(|unit| {\n if unit.profile.run_custom_build {\n Some((unit.pkg.manifest().links().unwrap().to_string(),\n unit.pkg.package_id().clone()))\n } else {\n None\n }\n }).collect::>()\n };\n let pkg_name = unit.pkg.to_string();\n let build_state = cx.build_state.clone();\n let id = unit.pkg.package_id().clone();\n let all = (id.clone(), pkg_name.clone(), build_state.clone(),\n build_output.clone());\n let build_scripts = super::load_build_deps(cx, unit);\n let kind = unit.kind;\n\n try!(fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg)));\n try!(fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg)));\n\n let exec_engine = cx.exec_engine.clone();\n\n \/\/ Prepare the unit of \"dirty work\" which will actually run the custom build\n \/\/ command.\n \/\/\n \/\/ Note that this has to do some extra work just before running the command\n \/\/ to determine extra environment variables and such.\n let work = Work::new(move |desc_tx| {\n \/\/ Make sure that OUT_DIR exists.\n \/\/\n \/\/ If we have an old build directory, then just move it into place,\n \/\/ otherwise create it!\n if fs::metadata(&build_output).is_err() {\n try!(fs::create_dir(&build_output).chain_error(|| {\n internal(\"failed to create script output directory for \\\n build command\")\n }));\n }\n\n \/\/ For all our native lib dependencies, pick up their metadata to pass\n \/\/ along to this custom build command. We're also careful to augment our\n \/\/ dynamic library search path in case the build script depended on any\n \/\/ native dynamic libraries.\n {\n let build_state = build_state.outputs.lock().unwrap();\n for (name, id) in lib_deps {\n let key = (id.clone(), kind);\n let state = try!(build_state.get(&key).chain_error(|| {\n internal(format!(\"failed to locate build state for env \\\n vars: {}\/{:?}\", id, kind))\n }));\n let data = &state.metadata;\n for &(ref key, ref value) in data.iter() {\n p.env(&format!(\"DEP_{}_{}\", super::envify(&name),\n super::envify(key)), value);\n }\n }\n if let Some(build_scripts) = build_scripts {\n try!(super::add_plugin_deps(&mut p, &build_state,\n &build_scripts));\n }\n }\n\n \/\/ And now finally, run the build command itself!\n desc_tx.send(p.to_string()).ok();\n let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| {\n e.desc = format!(\"failed to run custom build command for `{}`\\n{}\",\n pkg_name, e.desc);\n Human(e)\n }));\n try!(paths::write(&build_output.parent().unwrap().join(\"output\"),\n &output.stdout));\n\n \/\/ After the build command has finished running, we need to be sure to\n \/\/ remember all of its output so we can later discover precisely what it\n \/\/ was, even if we don't run the build command again (due to freshness).\n \/\/\n \/\/ This is also the location where we provide feedback into the build\n \/\/ state informing what variables were discovered via our script as\n \/\/ well.\n let output = try!(str::from_utf8(&output.stdout).map_err(|_| {\n human(\"build script output was not valid utf-8\")\n }));\n let parsed_output = try!(BuildOutput::parse(output, &pkg_name));\n build_state.insert(id, kind, parsed_output);\n Ok(())\n });\n\n \/\/ Now that we've prepared our work-to-do, we need to prepare the fresh work\n \/\/ itself to run when we actually end up just discarding what we calculated\n \/\/ above.\n \/\/\n \/\/ Note that the freshness calculation here is the build_cmd freshness, not\n \/\/ target specific freshness. This is because we don't actually know what\n \/\/ the inputs are to this command!\n \/\/\n \/\/ Also note that a fresh build command needs to\n let (freshness, dirty, fresh) =\n try!(fingerprint::prepare_build_cmd(cx, unit));\n let dirty = work.then(dirty);\n let fresh = Work::new(move |_tx| {\n let (id, pkg_name, build_state, build_output) = all;\n let contents = try!(paths::read(&build_output.parent().unwrap()\n .join(\"output\")));\n let output = try!(BuildOutput::parse(&contents, &pkg_name));\n build_state.insert(id, kind, output);\n Ok(())\n }).then(fresh);\n\n Ok((dirty, fresh, freshness))\n}\n\nimpl BuildState {\n pub fn new(config: &super::BuildConfig,\n packages: &PackageSet) -> BuildState {\n let mut sources = HashMap::new();\n for package in packages.iter() {\n match package.manifest().links() {\n Some(links) => {\n sources.insert(links.to_string(),\n package.package_id().clone());\n }\n None => {}\n }\n }\n let mut outputs = HashMap::new();\n let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));\n let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));\n for ((name, output), kind) in i1.chain(i2) {\n \/\/ If no package is using the library named `name`, then this is\n \/\/ just an override that we ignore.\n if let Some(id) = sources.get(name) {\n outputs.insert((id.clone(), kind), output.clone());\n }\n }\n BuildState { outputs: Mutex::new(outputs) }\n }\n\n fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {\n self.outputs.lock().unwrap().insert((id, kind), output);\n }\n}\n\nimpl BuildOutput {\n \/\/ Parses the output of a script.\n \/\/ The `pkg_name` is used for error messages.\n pub fn parse(input: &str, pkg_name: &str) -> CargoResult {\n let mut library_paths = Vec::new();\n let mut library_links = Vec::new();\n let mut cfgs = Vec::new();\n let mut metadata = Vec::new();\n let whence = format!(\"build script of `{}`\", pkg_name);\n\n for line in input.lines() {\n let mut iter = line.splitn(2, ':');\n if iter.next() != Some(\"cargo\") {\n \/\/ skip this line since it doesn't start with \"cargo:\"\n continue;\n }\n let data = match iter.next() {\n Some(val) => val,\n None => continue\n };\n\n \/\/ getting the `key=value` part of the line\n let mut iter = data.splitn(2, '=');\n let key = iter.next();\n let value = iter.next();\n let (key, value) = match (key, value) {\n (Some(a), Some(b)) => (a, b.trim_right()),\n \/\/ line started with `cargo:` but didn't match `key=value`\n _ => return Err(human(format!(\"Wrong output in {}: `{}`\",\n whence, line)))\n };\n\n match key {\n \"rustc-flags\" => {\n let (libs, links) = try!(\n BuildOutput::parse_rustc_flags(value, &whence)\n );\n library_links.extend(links.into_iter());\n library_paths.extend(libs.into_iter());\n }\n \"rustc-link-lib\" => library_links.push(value.to_string()),\n \"rustc-link-search\" => library_paths.push(PathBuf::from(value)),\n \"rustc-cfg\" => cfgs.push(value.to_string()),\n _ => metadata.push((key.to_string(), value.to_string())),\n }\n }\n\n Ok(BuildOutput {\n library_paths: library_paths,\n library_links: library_links,\n cfgs: cfgs,\n metadata: metadata,\n })\n }\n\n pub fn parse_rustc_flags(value: &str, whence: &str)\n -> CargoResult<(Vec, Vec)> {\n let value = value.trim();\n let mut flags_iter = value.split(|c: char| c.is_whitespace())\n .filter(|w| w.chars().any(|c| !c.is_whitespace()));\n let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());\n loop {\n let flag = match flags_iter.next() {\n Some(f) => f,\n None => break\n };\n if flag != \"-l\" && flag != \"-L\" {\n return Err(human(format!(\"Only `-l` and `-L` flags are allowed \\\n in {}: `{}`\",\n whence, value)))\n }\n let value = match flags_iter.next() {\n Some(v) => v,\n None => return Err(human(format!(\"Flag in rustc-flags has no \\\n value in {}: `{}`\",\n whence, value)))\n };\n match flag {\n \"-l\" => library_links.push(value.to_string()),\n \"-L\" => library_paths.push(PathBuf::from(value)),\n\n \/\/ was already checked above\n _ => return Err(human(\"only -l and -L flags are allowed\"))\n };\n }\n Ok((library_paths, library_links))\n }\n}\n\n\/\/\/ Compute the `build_scripts` map in the `Context` which tracks what build\n\/\/\/ scripts each package depends on.\n\/\/\/\n\/\/\/ The global `build_scripts` map lists for all (package, kind) tuples what set\n\/\/\/ of packages' build script outputs must be considered. For example this lists\n\/\/\/ all dependencies' `-L` flags which need to be propagated transitively.\n\/\/\/\n\/\/\/ The given set of targets to this function is the initial set of\n\/\/\/ targets\/profiles which are being built.\npub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,\n units: &[Unit<'b>]) {\n let mut ret = HashMap::new();\n for unit in units {\n build(&mut ret, cx, unit);\n }\n cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {\n (k, Arc::new(v))\n }));\n\n \/\/ Recursive function to build up the map we're constructing. This function\n \/\/ memoizes all of its return values as it goes along.\n fn build<'a, 'b, 'cfg>(out: &'a mut HashMap, BuildScripts>,\n cx: &Context<'b, 'cfg>,\n unit: &Unit<'b>)\n -> &'a BuildScripts {\n \/\/ Do a quick pre-flight check to see if we've already calculated the\n \/\/ set of dependencies.\n if out.contains_key(unit) {\n return &out[unit]\n }\n\n let mut to_link = Vec::new();\n let mut plugins = Vec::new();\n\n if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {\n to_link.push((unit.pkg.package_id().clone(), unit.kind));\n }\n for unit in cx.dep_targets(unit).iter() {\n let dep_scripts = build(out, cx, unit);\n\n if unit.target.for_host() {\n plugins.extend(dep_scripts.to_link.iter()\n .map(|p| &p.0).cloned());\n } else if unit.target.linkable() {\n to_link.extend(dep_scripts.to_link.iter().cloned());\n }\n }\n\n let prev = out.entry(*unit).or_insert(BuildScripts::default());\n prev.to_link.extend(to_link);\n prev.plugins.extend(plugins);\n return prev\n }\n}\n<|endoftext|>"} {"text":"\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::attr::{Attr, TokenListAttrValue};\nuse dom::bindings::codegen::Bindings::DOMTokenListBinding;\nuse dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListMethods;\nuse dom::bindings::error::{Fallible, InvalidCharacter, Syntax};\nuse dom::bindings::global::Window;\nuse dom::bindings::js::{JS, JSRef, Temporary, OptionalRootable};\nuse dom::bindings::utils::{Reflector, Reflectable, reflect_dom_object};\nuse dom::element::{Element, AttributeHandlers};\nuse dom::node::window_from_node;\n\nuse servo_util::atom::Atom;\nuse servo_util::namespace::Null;\nuse servo_util::str::{DOMString, HTML_SPACE_CHARACTERS};\n\n#[deriving(Encodable)]\npub struct DOMTokenList {\n reflector_: Reflector,\n element: JS,\n local_name: &'static str,\n}\n\nimpl DOMTokenList {\n pub fn new_inherited(element: &JSRef,\n local_name: &'static str) -> DOMTokenList {\n DOMTokenList {\n reflector_: Reflector::new(),\n element: JS::from_rooted(element),\n local_name: local_name,\n }\n }\n\n pub fn new(element: &JSRef,\n local_name: &'static str) -> Temporary {\n let window = window_from_node(element).root();\n reflect_dom_object(box DOMTokenList::new_inherited(element, local_name),\n &Window(*window), DOMTokenListBinding::Wrap)\n }\n}\n\nimpl Reflectable for DOMTokenList {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n &self.reflector_\n }\n}\n\ntrait PrivateDOMTokenListHelpers {\n fn attribute(&self) -> Option>;\n fn check_token_exceptions<'a>(&self, token: &'a str) -> Fallible<&'a str>;\n}\n\nimpl<'a> PrivateDOMTokenListHelpers for JSRef<'a, DOMTokenList> {\n fn attribute(&self) -> Option> {\n let element = self.element.root();\n element.deref().get_attribute(Null, self.local_name)\n }\n\n fn check_token_exceptions<'a>(&self, token: &'a str) -> Fallible<&'a str> {\n match token {\n \"\" => Err(Syntax),\n token if token.find(HTML_SPACE_CHARACTERS).is_some() => Err(InvalidCharacter),\n token => Ok(token)\n }\n }\n}\n\n\/\/ http:\/\/dom.spec.whatwg.org\/#domtokenlist\nimpl<'a> DOMTokenListMethods for JSRef<'a, DOMTokenList> {\n \/\/ http:\/\/dom.spec.whatwg.org\/#dom-domtokenlist-length\n fn Length(&self) -> u32 {\n let attribute = self.attribute().root();\n match attribute {\n Some(attribute) => {\n match *attribute.deref().value() {\n TokenListAttrValue(_, ref indexes) => indexes.len() as u32,\n _ => fail!(\"Expected a TokenListAttrValue\"),\n }\n }\n None => 0,\n }\n }\n\n \/\/ http:\/\/dom.spec.whatwg.org\/#dom-domtokenlist-item\n fn Item(&self, index: u32) -> Option {\n let attribute = self.attribute().root();\n attribute.and_then(|attribute| {\n match *attribute.deref().value() {\n TokenListAttrValue(ref value, ref indexes) => {\n indexes.as_slice().get(index as uint).map(|&(start, end)| {\n value.as_slice().slice(start, end).to_string()\n })\n },\n _ => fail!(\"Expected a TokenListAttrValue\"),\n }\n })\n }\n\n fn IndexedGetter(&self, index: u32, found: &mut bool) -> Option {\n let item = self.Item(index);\n *found = item.is_some();\n item\n }\n\n \/\/ http:\/\/dom.spec.whatwg.org\/#dom-domtokenlist-contains\n fn Contains(&self, token: DOMString) -> Fallible {\n self.check_token_exceptions(token.as_slice()).map(|slice| {\n self.attribute().root().and_then(|attr| attr.value().tokens().map(|mut tokens| {\n let atom = Atom::from_slice(slice);\n tokens.any(|token| *token == atom)\n })).unwrap_or(false)\n })\n }\n}\nCleaned DOMTokenList code duplication\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::attr::Attr;\nuse dom::bindings::codegen::Bindings::DOMTokenListBinding;\nuse dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListMethods;\nuse dom::bindings::error::{Fallible, InvalidCharacter, Syntax};\nuse dom::bindings::global::Window;\nuse dom::bindings::js::{JS, JSRef, Temporary, OptionalRootable};\nuse dom::bindings::utils::{Reflector, Reflectable, reflect_dom_object};\nuse dom::element::{Element, AttributeHandlers};\nuse dom::node::window_from_node;\n\nuse servo_util::atom::Atom;\nuse servo_util::namespace::Null;\nuse servo_util::str::{DOMString, HTML_SPACE_CHARACTERS};\n\n#[deriving(Encodable)]\npub struct DOMTokenList {\n reflector_: Reflector,\n element: JS,\n local_name: &'static str,\n}\n\nimpl DOMTokenList {\n pub fn new_inherited(element: &JSRef,\n local_name: &'static str) -> DOMTokenList {\n DOMTokenList {\n reflector_: Reflector::new(),\n element: JS::from_rooted(element),\n local_name: local_name,\n }\n }\n\n pub fn new(element: &JSRef,\n local_name: &'static str) -> Temporary {\n let window = window_from_node(element).root();\n reflect_dom_object(box DOMTokenList::new_inherited(element, local_name),\n &Window(*window), DOMTokenListBinding::Wrap)\n }\n}\n\nimpl Reflectable for DOMTokenList {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n &self.reflector_\n }\n}\n\ntrait PrivateDOMTokenListHelpers {\n fn attribute(&self) -> Option>;\n fn check_token_exceptions<'a>(&self, token: &'a str) -> Fallible<&'a str>;\n}\n\nimpl<'a> PrivateDOMTokenListHelpers for JSRef<'a, DOMTokenList> {\n fn attribute(&self) -> Option> {\n let element = self.element.root();\n element.deref().get_attribute(Null, self.local_name)\n }\n\n fn check_token_exceptions<'a>(&self, token: &'a str) -> Fallible<&'a str> {\n match token {\n \"\" => Err(Syntax),\n token if token.find(HTML_SPACE_CHARACTERS).is_some() => Err(InvalidCharacter),\n token => Ok(token)\n }\n }\n}\n\n\/\/ http:\/\/dom.spec.whatwg.org\/#domtokenlist\nimpl<'a> DOMTokenListMethods for JSRef<'a, DOMTokenList> {\n \/\/ http:\/\/dom.spec.whatwg.org\/#dom-domtokenlist-length\n fn Length(&self) -> u32 {\n self.attribute().root().map(|attr| {\n attr.value().tokens().map(|tokens| tokens.len()).unwrap_or(0)\n }).unwrap_or(0) as u32\n }\n\n \/\/ http:\/\/dom.spec.whatwg.org\/#dom-domtokenlist-item\n fn Item(&self, index: u32) -> Option {\n self.attribute().root().and_then(|attr| attr.value().tokens().and_then(|mut tokens| {\n tokens.idx(index as uint).map(|token| token.as_slice().to_string())\n }))\n }\n\n fn IndexedGetter(&self, index: u32, found: &mut bool) -> Option {\n let item = self.Item(index);\n *found = item.is_some();\n item\n }\n\n \/\/ http:\/\/dom.spec.whatwg.org\/#dom-domtokenlist-contains\n fn Contains(&self, token: DOMString) -> Fallible {\n self.check_token_exceptions(token.as_slice()).map(|slice| {\n self.attribute().root().and_then(|attr| attr.value().tokens().map(|mut tokens| {\n let atom = Atom::from_slice(slice);\n tokens.any(|token| *token == atom)\n })).unwrap_or(false)\n })\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Unix-specific extensions to primitives in the `std::fs` module.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse fs::{self, Permissions, OpenOptions};\nuse io;\nuse path::Path;\nuse sys;\nuse sys_common::{FromInner, AsInner, AsInnerMut};\n\n\/\/\/ Unix-specific extensions to `Permissions`\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\npub trait PermissionsExt {\n \/\/\/ Returns the underlying raw `mode_t` bits that are the standard Unix\n \/\/\/ permissions for this file.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ use std::fs::File;\n \/\/\/ use std::os::unix::fs::PermissionsExt;\n \/\/\/\n \/\/\/ let f = File::create(\"foo.txt\")?;\n \/\/\/ let metadata = f.metadata()?;\n \/\/\/ let permissions = metadata.permissions();\n \/\/\/\n \/\/\/ println!(\"permissions: {}\", permissions.mode());\n \/\/\/ ```\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn mode(&self) -> u32;\n\n \/\/\/ Sets the underlying raw bits for this set of permissions.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ use std::fs::File;\n \/\/\/ use std::os::unix::fs::PermissionsExt;\n \/\/\/\n \/\/\/ let f = File::create(\"foo.txt\")?;\n \/\/\/ let metadata = f.metadata()?;\n \/\/\/ let mut permissions = metadata.permissions();\n \/\/\/\n \/\/\/ permissions.set_mode(0o644); \/\/ Read\/write for owner and read for others.\n \/\/\/ assert_eq!(permissions.mode(), 0o644);\n \/\/\/ ```\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn set_mode(&mut self, mode: u32);\n\n \/\/\/ Creates a new instance of `Permissions` from the given set of Unix\n \/\/\/ permission bits.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ use std::fs::Permissions;\n \/\/\/ use std::os::unix::fs::PermissionsExt;\n \/\/\/\n \/\/\/ \/\/ Read\/write for owner and read for others.\n \/\/\/ let permissions = Permissions::from_mode(0o644);\n \/\/\/ assert_eq!(permissions.mode(), 0o644);\n \/\/\/ ```\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn from_mode(mode: u32) -> Self;\n}\n\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\nimpl PermissionsExt for Permissions {\n fn mode(&self) -> u32 {\n self.as_inner().mode()\n }\n\n fn set_mode(&mut self, mode: u32) {\n *self = Permissions::from_inner(FromInner::from_inner(mode));\n }\n\n fn from_mode(mode: u32) -> Permissions {\n Permissions::from_inner(FromInner::from_inner(mode))\n }\n}\n\n\/\/\/ Unix-specific extensions to `OpenOptions`\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\npub trait OpenOptionsExt {\n \/\/\/ Sets the mode bits that a new file will be created with.\n \/\/\/\n \/\/\/ If a new file is created as part of a `File::open_opts` call then this\n \/\/\/ specified `mode` will be used as the permission bits for the new file.\n \/\/\/ If no `mode` is set, the default of `0o666` will be used.\n \/\/\/ The operating system masks out bits with the systems `umask`, to produce\n \/\/\/ the final permissions.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ extern crate libc;\n \/\/\/ use std::fs::OpenOptions;\n \/\/\/ use std::os::unix::fs::OpenOptionsExt;\n \/\/\/\n \/\/\/ let mut options = OpenOptions::new();\n \/\/\/ options.mode(0o644); \/\/ Give read\/write for owner and read for others.\n \/\/\/ let file = options.open(\"foo.txt\");\n \/\/\/ ```\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn mode(&mut self, mode: u32) -> &mut Self;\n\n \/\/\/ Pass custom flags to the `flags` agument of `open`.\n \/\/\/\n \/\/\/ The bits that define the access mode are masked out with `O_ACCMODE`, to\n \/\/\/ ensure they do not interfere with the access mode set by Rusts options.\n \/\/\/\n \/\/\/ Custom flags can only set flags, not remove flags set by Rusts options.\n \/\/\/ This options overwrites any previously set custom flags.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ extern crate libc;\n \/\/\/ use std::fs::OpenOptions;\n \/\/\/ use std::os::unix::fs::OpenOptionsExt;\n \/\/\/\n \/\/\/ let mut options = OpenOptions::new();\n \/\/\/ options.write(true);\n \/\/\/ if cfg!(unix) {\n \/\/\/ options.custom_flags(libc::O_NOFOLLOW);\n \/\/\/ }\n \/\/\/ let file = options.open(\"foo.txt\");\n \/\/\/ ```\n #[stable(feature = \"open_options_ext\", since = \"1.10.0\")]\n fn custom_flags(&mut self, flags: i32) -> &mut Self;\n}\n\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\nimpl OpenOptionsExt for OpenOptions {\n fn mode(&mut self, mode: u32) -> &mut OpenOptions {\n self.as_inner_mut().mode(mode); self\n }\n\n fn custom_flags(&mut self, flags: i32) -> &mut OpenOptions {\n self.as_inner_mut().custom_flags(flags); self\n }\n}\n\n\/\/ Hm, why are there casts here to the returned type, shouldn't the types always\n\/\/ be the same? Right you are! Turns out, however, on android at least the types\n\/\/ in the raw `stat` structure are not the same as the types being returned. Who\n\/\/ knew!\n\/\/\n\/\/ As a result to make sure this compiles for all platforms we do the manual\n\/\/ casts and rely on manual lowering to `stat` if the raw type is desired.\n#[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\npub trait MetadataExt {\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mode(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn uid(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn gid(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn size(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn atime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn atime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mtime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mtime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ctime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ctime_nsec(&self) -> i64;\n}\n\n#[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\nimpl MetadataExt for fs::Metadata {\n fn mode(&self) -> u32 {\n self.as_inner().as_inner().st_mode as u32\n }\n fn uid(&self) -> u32 {\n self.as_inner().as_inner().st_uid as u32\n }\n fn gid(&self) -> u32 {\n self.as_inner().as_inner().st_gid as u32\n }\n fn size(&self) -> u64 {\n self.as_inner().as_inner().st_size as u64\n }\n fn atime(&self) -> i64 {\n self.as_inner().as_inner().st_atime as i64\n }\n fn atime_nsec(&self) -> i64 {\n self.as_inner().as_inner().st_atime_nsec as i64\n }\n fn mtime(&self) -> i64 {\n self.as_inner().as_inner().st_mtime as i64\n }\n fn mtime_nsec(&self) -> i64 {\n self.as_inner().as_inner().st_mtime_nsec as i64\n }\n fn ctime(&self) -> i64 {\n self.as_inner().as_inner().st_ctime as i64\n }\n fn ctime_nsec(&self) -> i64 {\n self.as_inner().as_inner().st_ctime_nsec as i64\n }\n}\n\n\/\/\/ Add special unix types (block\/char device, fifo and socket)\n#[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\npub trait FileTypeExt {\n \/\/\/ Returns whether this file type is a block device.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_block_device(&self) -> bool;\n \/\/\/ Returns whether this file type is a char device.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_char_device(&self) -> bool;\n \/\/\/ Returns whether this file type is a fifo.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_fifo(&self) -> bool;\n \/\/\/ Returns whether this file type is a socket.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_socket(&self) -> bool;\n}\n\n#[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\nimpl FileTypeExt for fs::FileType {\n fn is_block_device(&self) -> bool { false \/*FIXME: Implement block device mode*\/ }\n fn is_char_device(&self) -> bool { false \/*FIXME: Implement char device mode*\/ }\n fn is_fifo(&self) -> bool { false \/*FIXME: Implement fifo mode*\/ }\n fn is_socket(&self) -> bool { false \/*FIXME: Implement socket mode*\/ }\n}\n\n\/\/\/ Creates a new symbolic link on the filesystem.\n\/\/\/\n\/\/\/ The `dst` path will be a symbolic link pointing to the `src` path.\n\/\/\/\n\/\/\/ # Note\n\/\/\/\n\/\/\/ On Windows, you must specify whether a symbolic link points to a file\n\/\/\/ or directory. Use `os::windows::fs::symlink_file` to create a\n\/\/\/ symbolic link to a file, or `os::windows::fs::symlink_dir` to create a\n\/\/\/ symbolic link to a directory. Additionally, the process must have\n\/\/\/ `SeCreateSymbolicLinkPrivilege` in order to be able to create a\n\/\/\/ symbolic link.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::os::unix::fs;\n\/\/\/\n\/\/\/ # fn foo() -> std::io::Result<()> {\n\/\/\/ fs::symlink(\"a.txt\", \"b.txt\")?;\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[stable(feature = \"symlink\", since = \"1.1.0\")]\npub fn symlink, Q: AsRef>(src: P, dst: Q) -> io::Result<()>\n{\n sys::fs::symlink(src.as_ref(), dst.as_ref())\n}\n\n#[stable(feature = \"dir_builder\", since = \"1.6.0\")]\n\/\/\/ An extension trait for `fs::DirBuilder` for unix-specific options.\npub trait DirBuilderExt {\n \/\/\/ Sets the mode to create new directories with. This option defaults to\n \/\/\/ 0o777.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ use std::fs::DirBuilder;\n \/\/\/ use std::os::unix::fs::DirBuilderExt;\n \/\/\/\n \/\/\/ let mut builder = DirBuilder::new();\n \/\/\/ builder.mode(0o755);\n \/\/\/ ```\n #[stable(feature = \"dir_builder\", since = \"1.6.0\")]\n fn mode(&mut self, mode: u32) -> &mut Self;\n}\n\n#[stable(feature = \"dir_builder\", since = \"1.6.0\")]\nimpl DirBuilderExt for fs::DirBuilder {\n fn mode(&mut self, mode: u32) -> &mut fs::DirBuilder {\n self.as_inner_mut().set_mode(mode);\n self\n }\n}\nAdd dev and ino to MetadataExt\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Unix-specific extensions to primitives in the `std::fs` module.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse fs::{self, Permissions, OpenOptions};\nuse io;\nuse path::Path;\nuse sys;\nuse sys_common::{FromInner, AsInner, AsInnerMut};\n\n\/\/\/ Unix-specific extensions to `Permissions`\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\npub trait PermissionsExt {\n \/\/\/ Returns the underlying raw `mode_t` bits that are the standard Unix\n \/\/\/ permissions for this file.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ use std::fs::File;\n \/\/\/ use std::os::unix::fs::PermissionsExt;\n \/\/\/\n \/\/\/ let f = File::create(\"foo.txt\")?;\n \/\/\/ let metadata = f.metadata()?;\n \/\/\/ let permissions = metadata.permissions();\n \/\/\/\n \/\/\/ println!(\"permissions: {}\", permissions.mode());\n \/\/\/ ```\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn mode(&self) -> u32;\n\n \/\/\/ Sets the underlying raw bits for this set of permissions.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ use std::fs::File;\n \/\/\/ use std::os::unix::fs::PermissionsExt;\n \/\/\/\n \/\/\/ let f = File::create(\"foo.txt\")?;\n \/\/\/ let metadata = f.metadata()?;\n \/\/\/ let mut permissions = metadata.permissions();\n \/\/\/\n \/\/\/ permissions.set_mode(0o644); \/\/ Read\/write for owner and read for others.\n \/\/\/ assert_eq!(permissions.mode(), 0o644);\n \/\/\/ ```\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn set_mode(&mut self, mode: u32);\n\n \/\/\/ Creates a new instance of `Permissions` from the given set of Unix\n \/\/\/ permission bits.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ use std::fs::Permissions;\n \/\/\/ use std::os::unix::fs::PermissionsExt;\n \/\/\/\n \/\/\/ \/\/ Read\/write for owner and read for others.\n \/\/\/ let permissions = Permissions::from_mode(0o644);\n \/\/\/ assert_eq!(permissions.mode(), 0o644);\n \/\/\/ ```\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn from_mode(mode: u32) -> Self;\n}\n\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\nimpl PermissionsExt for Permissions {\n fn mode(&self) -> u32 {\n self.as_inner().mode()\n }\n\n fn set_mode(&mut self, mode: u32) {\n *self = Permissions::from_inner(FromInner::from_inner(mode));\n }\n\n fn from_mode(mode: u32) -> Permissions {\n Permissions::from_inner(FromInner::from_inner(mode))\n }\n}\n\n\/\/\/ Unix-specific extensions to `OpenOptions`\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\npub trait OpenOptionsExt {\n \/\/\/ Sets the mode bits that a new file will be created with.\n \/\/\/\n \/\/\/ If a new file is created as part of a `File::open_opts` call then this\n \/\/\/ specified `mode` will be used as the permission bits for the new file.\n \/\/\/ If no `mode` is set, the default of `0o666` will be used.\n \/\/\/ The operating system masks out bits with the systems `umask`, to produce\n \/\/\/ the final permissions.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ extern crate libc;\n \/\/\/ use std::fs::OpenOptions;\n \/\/\/ use std::os::unix::fs::OpenOptionsExt;\n \/\/\/\n \/\/\/ let mut options = OpenOptions::new();\n \/\/\/ options.mode(0o644); \/\/ Give read\/write for owner and read for others.\n \/\/\/ let file = options.open(\"foo.txt\");\n \/\/\/ ```\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn mode(&mut self, mode: u32) -> &mut Self;\n\n \/\/\/ Pass custom flags to the `flags` agument of `open`.\n \/\/\/\n \/\/\/ The bits that define the access mode are masked out with `O_ACCMODE`, to\n \/\/\/ ensure they do not interfere with the access mode set by Rusts options.\n \/\/\/\n \/\/\/ Custom flags can only set flags, not remove flags set by Rusts options.\n \/\/\/ This options overwrites any previously set custom flags.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ extern crate libc;\n \/\/\/ use std::fs::OpenOptions;\n \/\/\/ use std::os::unix::fs::OpenOptionsExt;\n \/\/\/\n \/\/\/ let mut options = OpenOptions::new();\n \/\/\/ options.write(true);\n \/\/\/ if cfg!(unix) {\n \/\/\/ options.custom_flags(libc::O_NOFOLLOW);\n \/\/\/ }\n \/\/\/ let file = options.open(\"foo.txt\");\n \/\/\/ ```\n #[stable(feature = \"open_options_ext\", since = \"1.10.0\")]\n fn custom_flags(&mut self, flags: i32) -> &mut Self;\n}\n\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\nimpl OpenOptionsExt for OpenOptions {\n fn mode(&mut self, mode: u32) -> &mut OpenOptions {\n self.as_inner_mut().mode(mode); self\n }\n\n fn custom_flags(&mut self, flags: i32) -> &mut OpenOptions {\n self.as_inner_mut().custom_flags(flags); self\n }\n}\n\n\/\/ Hm, why are there casts here to the returned type, shouldn't the types always\n\/\/ be the same? Right you are! Turns out, however, on android at least the types\n\/\/ in the raw `stat` structure are not the same as the types being returned. Who\n\/\/ knew!\n\/\/\n\/\/ As a result to make sure this compiles for all platforms we do the manual\n\/\/ casts and rely on manual lowering to `stat` if the raw type is desired.\n#[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\npub trait MetadataExt {\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn dev(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ino(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mode(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn uid(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn gid(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn size(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn atime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn atime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mtime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mtime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ctime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ctime_nsec(&self) -> i64;\n}\n\n#[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\nimpl MetadataExt for fs::Metadata {\n fn dev(&self) -> u64 {\n self.as_inner().as_inner().st_dev as u64\n }\n fn ino(&self) -> u64 {\n self.as_inner().as_inner().st_ino as u64\n }\n fn mode(&self) -> u32 {\n self.as_inner().as_inner().st_mode as u32\n }\n fn uid(&self) -> u32 {\n self.as_inner().as_inner().st_uid as u32\n }\n fn gid(&self) -> u32 {\n self.as_inner().as_inner().st_gid as u32\n }\n fn size(&self) -> u64 {\n self.as_inner().as_inner().st_size as u64\n }\n fn atime(&self) -> i64 {\n self.as_inner().as_inner().st_atime as i64\n }\n fn atime_nsec(&self) -> i64 {\n self.as_inner().as_inner().st_atime_nsec as i64\n }\n fn mtime(&self) -> i64 {\n self.as_inner().as_inner().st_mtime as i64\n }\n fn mtime_nsec(&self) -> i64 {\n self.as_inner().as_inner().st_mtime_nsec as i64\n }\n fn ctime(&self) -> i64 {\n self.as_inner().as_inner().st_ctime as i64\n }\n fn ctime_nsec(&self) -> i64 {\n self.as_inner().as_inner().st_ctime_nsec as i64\n }\n}\n\n\/\/\/ Add special unix types (block\/char device, fifo and socket)\n#[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\npub trait FileTypeExt {\n \/\/\/ Returns whether this file type is a block device.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_block_device(&self) -> bool;\n \/\/\/ Returns whether this file type is a char device.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_char_device(&self) -> bool;\n \/\/\/ Returns whether this file type is a fifo.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_fifo(&self) -> bool;\n \/\/\/ Returns whether this file type is a socket.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_socket(&self) -> bool;\n}\n\n#[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\nimpl FileTypeExt for fs::FileType {\n fn is_block_device(&self) -> bool { false \/*FIXME: Implement block device mode*\/ }\n fn is_char_device(&self) -> bool { false \/*FIXME: Implement char device mode*\/ }\n fn is_fifo(&self) -> bool { false \/*FIXME: Implement fifo mode*\/ }\n fn is_socket(&self) -> bool { false \/*FIXME: Implement socket mode*\/ }\n}\n\n\/\/\/ Creates a new symbolic link on the filesystem.\n\/\/\/\n\/\/\/ The `dst` path will be a symbolic link pointing to the `src` path.\n\/\/\/\n\/\/\/ # Note\n\/\/\/\n\/\/\/ On Windows, you must specify whether a symbolic link points to a file\n\/\/\/ or directory. Use `os::windows::fs::symlink_file` to create a\n\/\/\/ symbolic link to a file, or `os::windows::fs::symlink_dir` to create a\n\/\/\/ symbolic link to a directory. Additionally, the process must have\n\/\/\/ `SeCreateSymbolicLinkPrivilege` in order to be able to create a\n\/\/\/ symbolic link.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::os::unix::fs;\n\/\/\/\n\/\/\/ # fn foo() -> std::io::Result<()> {\n\/\/\/ fs::symlink(\"a.txt\", \"b.txt\")?;\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[stable(feature = \"symlink\", since = \"1.1.0\")]\npub fn symlink, Q: AsRef>(src: P, dst: Q) -> io::Result<()>\n{\n sys::fs::symlink(src.as_ref(), dst.as_ref())\n}\n\n#[stable(feature = \"dir_builder\", since = \"1.6.0\")]\n\/\/\/ An extension trait for `fs::DirBuilder` for unix-specific options.\npub trait DirBuilderExt {\n \/\/\/ Sets the mode to create new directories with. This option defaults to\n \/\/\/ 0o777.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ use std::fs::DirBuilder;\n \/\/\/ use std::os::unix::fs::DirBuilderExt;\n \/\/\/\n \/\/\/ let mut builder = DirBuilder::new();\n \/\/\/ builder.mode(0o755);\n \/\/\/ ```\n #[stable(feature = \"dir_builder\", since = \"1.6.0\")]\n fn mode(&mut self, mode: u32) -> &mut Self;\n}\n\n#[stable(feature = \"dir_builder\", since = \"1.6.0\")]\nimpl DirBuilderExt for fs::DirBuilder {\n fn mode(&mut self, mode: u32) -> &mut fs::DirBuilder {\n self.as_inner_mut().set_mode(mode);\n self\n }\n}\n<|endoftext|>"} {"text":"Fix: use Path::file_stem() instead of Path::file_name()<|endoftext|>"} {"text":"Re-enable moving cursor in VgaConsole<|endoftext|>"} {"text":"random say<|endoftext|>"} {"text":"fix default words<|endoftext|>"} {"text":"add example codeextern crate rust_fluent;\nuse rust_fluent::tcp;\n\nuse std::collections::HashMap;\n\n\nfn main() {\n let mut obj: HashMap = HashMap::new();\n obj.insert(\"key\".to_string(), \"value\".to_string());\n\n let fluentd = tcp::Fluentd {\n address: \"0.0.0.0\".to_string(),\n port: 24224,\n tag: \"foo\".to_string(),\n };\n\n fluentd.write(obj);\n}\n<|endoftext|>"} {"text":"start new<|endoftext|>"} {"text":"start GBA file.\/\/! Targets the Nintendo Game Boy Advance (GBA).\n\/\/!\n\/\/! The GBA is a handheld game device from 2001. Inside, the CPU is an ARM7TDMI.\n\/\/! That's in the ARMv4T architecture family.\n\/\/!\n\/\/! Technically the device has no OS, however we're going to copy the\n\/\/! `mipsel_sony_psp` target setup and set the OS string to be \"GBA\". Other than\n\/\/! the setting of the `target_os` and `target_vendor` values, this target is a\n\/\/! fairly standard configuration for `thumbv4t`\n\nuse crate::spec::{LinkerFlavor, LldFlavor, Target, TargetOptions, TargetResult};\n\npub fn target() -> TargetResult {\n Ok(Target {\n llvm_target: \"thumbv4t-none-eabi\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n target_c_int_width: \"32\".to_string(),\n target_os: \"gba\".to_string(),\n target_env: String::new(),\n target_vendor: \"nintendo\".to_string(),\n arch: \"arm\".to_string(),\n data_layout: \"TODO\".to_string(),\n linker_flavor: LinkerFlavor::Ld,\n options: TargetOptions {\n \/\/ TODO\n ..TargetOptions::default()\n },\n })\n}\n<|endoftext|>"} {"text":"test for invalid wide raw ptrfn main() {\n trait T { }\n #[derive(Debug)]\n struct S {\n x: * mut dyn T\n }\n dbg!(S { x: unsafe { std::mem::transmute((0usize, 0usize)) } }); \/\/~ ERROR: encountered dangling or unaligned vtable pointer\n}\n<|endoftext|>"} {"text":"Added basic exampleextern crate futures;\nextern crate tokio_serial;\nextern crate tokio_core;\n\nuse std::{io, str};\nuse tokio_core::io::{Io, Codec, EasyBuf};\nuse tokio_core::reactor::Core;\nuse futures::{future, Future, Stream, Sink};\n\nstruct LineCodec;\n\nimpl Codec for LineCodec {\n type In = String;\n type Out = String;\n\n fn decode(&mut self, buf: &mut EasyBuf) -> io::Result> {\n let newline = buf.as_ref().iter().position(|b| *b == b'\\n');\n if let Some(n) = newline {\n let line = buf.drain_to(n+1);\n return match str::from_utf8(&line.as_ref()) {\n Ok(s) => Ok(Some(s.to_string())),\n Err(_) => Err(io::Error::new(io::ErrorKind::Other, \"Invalid String\")),\n }\n }\n Ok(None)\n }\n\n \/\/ Don't actually encode anything.\n fn encode(&mut self, msg: Self::Out, buf: &mut Vec) -> io::Result<()> {\n Ok(())\n }\n}\n\nstruct Printer {\n serial: tokio_serial::Serial,\n buf: Vec,\n}\n\n\nfn main() {\n\n let mut core = Core::new().unwrap();\n let handle = core.handle();\n\n let settings = tokio_serial::SerialPortSettings::default();\n let port = tokio_serial::Serial::from_path(\"\/dev\/ttyUSB0\", &settings, &handle).unwrap();\n\n let (_, reader) = port.framed(LineCodec).split();\n\n let printer = reader.for_each(|s| {\n println!(\"{:?}\", s);\n Ok(())\n });\n \n\n core.run(gga).unwrap();\n\n}\n<|endoftext|>"} {"text":"use std::process::exit;\n\nuse libimagdiary::diary::Diary;\nuse libimagdiary::error::DiaryError as DE;\nuse libimagdiary::error::DiaryErrorKind as DEK;\nuse libimagentrylist::listers::core::CoreLister;\nuse libimagentrylist::lister::Lister;\nuse libimagrt::runtime::Runtime;\nuse libimagstore::store::Entry;\nuse libimagerror::trace::trace_error;\n\nuse util::get_diary_name;\n\npub fn list(rt: &Runtime) {\n let diaryname = get_diary_name(rt);\n if diaryname.is_none() {\n warn!(\"No diary selected. Use either the configuration file or the commandline option\");\n exit(1);\n }\n let diaryname = diaryname.unwrap();\n\n fn entry_to_location_listing_string(e: &Entry) -> String {\n e.get_location().clone()\n .without_base()\n .to_str()\n .map_err(|e| trace_error(&e))\n .unwrap_or(String::from(\"<>\"))\n }\n\n let diary = Diary::open(rt.store(), &diaryname[..]);\n debug!(\"Diary opened: {:?}\", diary);\n diary.entries()\n .and_then(|es| {\n debug!(\"Iterator for listing: {:?}\", es);\n\n let es = es.filter_map(|a| {\n debug!(\"Filtering: {:?}\", a);\n a.ok()\n }).map(|e| e.into());\n\n CoreLister::new(&entry_to_location_listing_string)\n .list(es) \/\/ TODO: Do not ignore non-ok()s\n .map_err(|e| DE::new(DEK::IOError, Some(Box::new(e))))\n })\n .map(|_| debug!(\"Ok\"))\n .map_err(|e| trace_error(&e))\n .ok();\n}\n\nSimplify error handlinguse std::process::exit;\n\nuse libimagdiary::diary::Diary;\nuse libimagdiary::error::DiaryErrorKind as DEK;\nuse libimagdiary::error::MapErrInto;\nuse libimagentrylist::listers::core::CoreLister;\nuse libimagentrylist::lister::Lister;\nuse libimagrt::runtime::Runtime;\nuse libimagstore::store::Entry;\nuse libimagerror::trace::trace_error;\n\nuse util::get_diary_name;\n\npub fn list(rt: &Runtime) {\n let diaryname = get_diary_name(rt);\n if diaryname.is_none() {\n warn!(\"No diary selected. Use either the configuration file or the commandline option\");\n exit(1);\n }\n let diaryname = diaryname.unwrap();\n\n fn entry_to_location_listing_string(e: &Entry) -> String {\n e.get_location().clone()\n .without_base()\n .to_str()\n .map_err(|e| trace_error(&e))\n .unwrap_or(String::from(\"<>\"))\n }\n\n let diary = Diary::open(rt.store(), &diaryname[..]);\n debug!(\"Diary opened: {:?}\", diary);\n diary.entries()\n .and_then(|es| {\n debug!(\"Iterator for listing: {:?}\", es);\n\n let es = es.filter_map(|a| {\n debug!(\"Filtering: {:?}\", a);\n a.ok()\n }).map(|e| e.into());\n\n CoreLister::new(&entry_to_location_listing_string)\n .list(es) \/\/ TODO: Do not ignore non-ok()s\n .map_err_into(DEK::IOError)\n })\n .map(|_| debug!(\"Ok\"))\n .map_err(|e| trace_error(&e))\n .ok();\n}\n\n<|endoftext|>"} {"text":"Call clone() as late as possible<|endoftext|>"} {"text":"Do not call clone() on a Copy type<|endoftext|>"} {"text":"Add test case for #2828struct NoCopy {\n n: int\n}\nfn NoCopy() -> NoCopy {\n NoCopy { n: 0 }\n}\n\nimpl NoCopy: Drop {\n fn finalize(&self) {\n log(error, \"running destructor\");\n }\n}\n\nfn main() {\n let x = NoCopy();\n\n let f = fn~() { assert x.n == 0; }; \/\/~ ERROR copying a noncopyable value\n let g = copy f;\n\n f(); g();\n}<|endoftext|>"} {"text":"add `Send` bound to the `spawn` type parameter<|endoftext|>"} {"text":"Flush back to the database every 1s.<|endoftext|>"} {"text":"finalize -> drop<|endoftext|>"} {"text":"Implement get_dbrecord<|endoftext|>"} {"text":"#![crate_name = \"input\"]\n#![deny(missing_doc)]\n#![feature(globs)]\n#![feature(struct_variant)]\n\n\/\/! A flexible structure for user interactions\n\/\/! to be used in window frameworks and widgets libraries.\n\npub mod keyboard;\npub mod mouse;\n\n\/\/\/ Models different kinds of buttons.\n#[deriving(Clone, PartialEq, Eq, Show)]\npub enum Button {\n \/\/\/ A keyboard button.\n Keyboard(keyboard::Key),\n \/\/\/ A mouse button.\n Mouse(mouse::Button),\n}\n\n\/\/\/ Models different kinds of motion.\n#[deriving(Clone, Show)]\npub enum Motion {\n \/\/\/ x and y in window coordinates.\n MouseCursor(f64, f64),\n \/\/\/ x and y in relative coordinates.\n MouseRelative(f64, f64),\n \/\/\/ x and y in scroll ticks.\n MouseScroll(f64, f64),\n}\n\n\/\/\/ Models input events.\n#[deriving(Clone, Show)]\npub enum InputEvent {\n \/\/\/ Pressed a button.\n Press(Button),\n \/\/\/ Released a button.\n Release(Button),\n \/\/\/ Moved mouse cursor.\n Move(Motion),\n \/\/\/ Text (usually from keyboard).\n Text(String),\n \/\/\/ Window got resized.\n Resize(u32, u32),\n}\n\nAdded Focus event#![crate_name = \"input\"]\n#![deny(missing_doc)]\n#![feature(globs)]\n#![feature(struct_variant)]\n\n\/\/! A flexible structure for user interactions\n\/\/! to be used in window frameworks and widgets libraries.\n\npub mod keyboard;\npub mod mouse;\n\n\/\/\/ Models different kinds of buttons.\n#[deriving(Clone, PartialEq, Eq, Show)]\npub enum Button {\n \/\/\/ A keyboard button.\n Keyboard(keyboard::Key),\n \/\/\/ A mouse button.\n Mouse(mouse::Button),\n}\n\n\/\/\/ Models different kinds of motion.\n#[deriving(Clone, Show)]\npub enum Motion {\n \/\/\/ x and y in window coordinates.\n MouseCursor(f64, f64),\n \/\/\/ x and y in relative coordinates.\n MouseRelative(f64, f64),\n \/\/\/ x and y in scroll ticks.\n MouseScroll(f64, f64),\n}\n\n\/\/\/ Models input events.\n#[deriving(Clone, Show)]\npub enum InputEvent {\n \/\/\/ Pressed a button.\n Press(Button),\n \/\/\/ Released a button.\n Release(Button),\n \/\/\/ Moved mouse cursor.\n Move(Motion),\n \/\/\/ Text (usually from keyboard).\n Text(String),\n \/\/\/ Window got resized.\n Resize(u32, u32),\n \/\/\/ Window gained or lost focus.\n Focus(bool),\n}\n\n<|endoftext|>"} {"text":"extern crate crypto;\nextern crate rustc_serialize;\n\nuse crypto::digest::Digest;\nuse crypto::hmac::Hmac;\nuse crypto::mac::{\n Mac,\n MacResult,\n};\nuse rustc_serialize::{\n json,\n Decodable,\n Encodable,\n};\nuse rustc_serialize::base64::{\n self,\n CharacterSet,\n FromBase64,\n Newline,\n ToBase64,\n};\npub use error::Error;\npub use header::Header;\npub use claims::Claims;\npub use claims::Registered;\n\npub mod error;\npub mod header;\npub mod claims;\n\n#[derive(Debug, Default)]\npub struct Token\n where H: Component {\n raw: Option,\n pub header: H,\n pub claims: Claims,\n}\n\npub trait Component {\n fn parse(raw: &str) -> Result;\n fn encode(&self) -> Result;\n}\n\nimpl Component for T\n where T: Encodable + Decodable + Sized {\n\n \/\/\/ Parse from a string.\n fn parse(raw: &str) -> Result {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n let header = try!(json::decode(&*s));\n\n Ok(header)\n }\n\n \/\/\/ Encode to a string.\n fn encode(&self) -> Result {\n let s = try!(json::encode(&self));\n let enc = (&*s).as_bytes().to_base64(BASE_CONFIG);\n Ok(enc)\n }\n}\n\nimpl Token\n where H: Component {\n pub fn new(header: H, claims: Claims) -> Token {\n Token {\n raw: None,\n header: header,\n claims: claims,\n }\n }\n\n \/\/\/ Parse a token from a string.\n pub fn parse(raw: &str) -> Result, Error> {\n let pieces: Vec<_> = raw.split('.').collect();\n\n Ok(Token {\n raw: Some(raw.into()),\n header: try!(Component::parse(pieces[0])),\n claims: try!(Claims::parse(pieces[1])),\n })\n }\n\n \/\/\/ Verify a parsed token with a key and a given hashing algorithm.\n \/\/\/ Make sure to check the token's algorithm before applying.\n pub fn verify(&self, key: &[u8], digest: D) -> bool {\n let raw = match self.raw {\n Some(ref s) => s,\n None => return false,\n };\n\n let pieces: Vec<_> = raw.rsplitn(2, '.').collect();\n let sig = pieces[0];\n let data = pieces[1];\n\n verify(sig, data, key, digest)\n }\n\n \/\/\/ Generate the signed token from a key and a given hashing algorithm.\n pub fn signed(&self, key: &[u8], digest: D) -> Result {\n let header = try!(Component::encode(&self.header));\n let claims = try!(self.claims.encode());\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, key, digest);\n Ok(format!(\"{}.{}\", data, sig))\n }\n}\n\nimpl PartialEq for Token\n where H: Component + PartialEq {\n fn eq(&self, other: &Token) -> bool {\n self.header == other.header &&\n self.claims == other.claims\n }\n}\n\nconst BASE_CONFIG: base64::Config = base64::Config {\n char_set: CharacterSet::Standard,\n newline: Newline::LF,\n pad: false,\n line_length: None,\n};\n\nfn sign(data: &str, key: &[u8], digest: D) -> String {\n let mut hmac = Hmac::new(digest, key);\n hmac.input(data.as_bytes());\n\n let mac = hmac.result();\n let code = mac.code();\n (*code).to_base64(BASE_CONFIG)\n}\n\nfn verify(target: &str, data: &str, key: &[u8], digest: D) -> bool {\n let target_bytes = match target.from_base64() {\n Ok(x) => x,\n Err(_) => return false,\n };\n let target_mac = MacResult::new_from_owned(target_bytes);\n\n let mut hmac = Hmac::new(digest, key);\n hmac.input(data.as_bytes());\n\n hmac.result() == target_mac\n}\n\n#[cfg(test)]\nmod tests {\n use sign;\n use verify;\n use Token;\n use header::Algorithm::HS256;\n use header::Header;\n use crypto::sha2::Sha256;\n\n #[test]\n pub fn sign_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let real_sig = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, \"secret\".as_bytes(), Sha256::new());\n\n assert_eq!(sig, real_sig);\n }\n\n #[test]\n pub fn verify_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let target = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n assert!(verify(target, &*data, \"secret\".as_bytes(), Sha256::new()));\n }\n\n #[test]\n pub fn raw_data() {\n let raw = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let token = Token::
::parse(raw).unwrap();\n\n {\n assert_eq!(token.header.alg, Some(HS256));\n }\n assert!(token.verify(\"secret\".as_bytes(), Sha256::new()));\n }\n\n #[test]\n pub fn roundtrip() {\n let token: Token
= Default::default();\n let key = \"secret\".as_bytes();\n let raw = token.signed(key, Sha256::new()).unwrap();\n let same = Token::parse(&*raw).unwrap();\n\n assert_eq!(token, same);\n assert!(same.verify(key, Sha256::new()));\n }\n}\nClean Component implextern crate crypto;\nextern crate rustc_serialize;\n\nuse crypto::digest::Digest;\nuse crypto::hmac::Hmac;\nuse crypto::mac::{\n Mac,\n MacResult,\n};\nuse rustc_serialize::{\n json,\n Decodable,\n Encodable,\n};\nuse rustc_serialize::base64::{\n self,\n CharacterSet,\n FromBase64,\n Newline,\n ToBase64,\n};\npub use error::Error;\npub use header::Header;\npub use claims::Claims;\npub use claims::Registered;\n\npub mod error;\npub mod header;\npub mod claims;\n\n#[derive(Debug, Default)]\npub struct Token\n where H: Component {\n raw: Option,\n pub header: H,\n pub claims: Claims,\n}\n\npub trait Component {\n fn parse(raw: &str) -> Result;\n fn encode(&self) -> Result;\n}\n\nimpl Component for T\n where T: Encodable + Decodable + Sized {\n\n \/\/\/ Parse from a string.\n fn parse(raw: &str) -> Result {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n Ok(try!(json::decode(&*s)))\n }\n\n \/\/\/ Encode to a string.\n fn encode(&self) -> Result {\n let s = try!(json::encode(&self));\n let enc = (&*s).as_bytes().to_base64(BASE_CONFIG);\n Ok(enc)\n }\n}\n\nimpl Token\n where H: Component {\n pub fn new(header: H, claims: Claims) -> Token {\n Token {\n raw: None,\n header: header,\n claims: claims,\n }\n }\n\n \/\/\/ Parse a token from a string.\n pub fn parse(raw: &str) -> Result, Error> {\n let pieces: Vec<_> = raw.split('.').collect();\n\n Ok(Token {\n raw: Some(raw.into()),\n header: try!(Component::parse(pieces[0])),\n claims: try!(Claims::parse(pieces[1])),\n })\n }\n\n \/\/\/ Verify a parsed token with a key and a given hashing algorithm.\n \/\/\/ Make sure to check the token's algorithm before applying.\n pub fn verify(&self, key: &[u8], digest: D) -> bool {\n let raw = match self.raw {\n Some(ref s) => s,\n None => return false,\n };\n\n let pieces: Vec<_> = raw.rsplitn(2, '.').collect();\n let sig = pieces[0];\n let data = pieces[1];\n\n verify(sig, data, key, digest)\n }\n\n \/\/\/ Generate the signed token from a key and a given hashing algorithm.\n pub fn signed(&self, key: &[u8], digest: D) -> Result {\n let header = try!(Component::encode(&self.header));\n let claims = try!(self.claims.encode());\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, key, digest);\n Ok(format!(\"{}.{}\", data, sig))\n }\n}\n\nimpl PartialEq for Token\n where H: Component + PartialEq {\n fn eq(&self, other: &Token) -> bool {\n self.header == other.header &&\n self.claims == other.claims\n }\n}\n\nconst BASE_CONFIG: base64::Config = base64::Config {\n char_set: CharacterSet::Standard,\n newline: Newline::LF,\n pad: false,\n line_length: None,\n};\n\nfn sign(data: &str, key: &[u8], digest: D) -> String {\n let mut hmac = Hmac::new(digest, key);\n hmac.input(data.as_bytes());\n\n let mac = hmac.result();\n let code = mac.code();\n (*code).to_base64(BASE_CONFIG)\n}\n\nfn verify(target: &str, data: &str, key: &[u8], digest: D) -> bool {\n let target_bytes = match target.from_base64() {\n Ok(x) => x,\n Err(_) => return false,\n };\n let target_mac = MacResult::new_from_owned(target_bytes);\n\n let mut hmac = Hmac::new(digest, key);\n hmac.input(data.as_bytes());\n\n hmac.result() == target_mac\n}\n\n#[cfg(test)]\nmod tests {\n use sign;\n use verify;\n use Token;\n use header::Algorithm::HS256;\n use header::Header;\n use crypto::sha2::Sha256;\n\n #[test]\n pub fn sign_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let real_sig = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, \"secret\".as_bytes(), Sha256::new());\n\n assert_eq!(sig, real_sig);\n }\n\n #[test]\n pub fn verify_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let target = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n assert!(verify(target, &*data, \"secret\".as_bytes(), Sha256::new()));\n }\n\n #[test]\n pub fn raw_data() {\n let raw = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let token = Token::
::parse(raw).unwrap();\n\n {\n assert_eq!(token.header.alg, Some(HS256));\n }\n assert!(token.verify(\"secret\".as_bytes(), Sha256::new()));\n }\n\n #[test]\n pub fn roundtrip() {\n let token: Token
= Default::default();\n let key = \"secret\".as_bytes();\n let raw = token.signed(key, Sha256::new()).unwrap();\n let same = Token::parse(&*raw).unwrap();\n\n assert_eq!(token, same);\n assert!(same.verify(key, Sha256::new()));\n }\n}\n<|endoftext|>"} {"text":"Auto merge of #245 - frewsxcv:docs, r=mbrubeck<|endoftext|>"} {"text":"Remove unnecessary numeric suffix<|endoftext|>"} {"text":"\/\/ Copyright 2014-2015 Sam Doshi (sam@metal-fish.co.uk)\n\/\/ Copyright 2013-2014 Philippe Delrieu (philippe.delrieu@free.fr)\n\/\/\n\/\/ Licensed under the MIT License .\n\/\/ This file may not be copied, modified, or distributed except according to those terms.\n#![feature(core, std_misc)]\n\nextern crate libc;\n\nuse std::ptr;\nuse libc::c_char;\n\nmod ffi;\n\n\n\/\/ Types\n\/\/ -----\n\/\/\/ Used by PortMidi to refer to a Midi device\npub type PortMidiDeviceId = i32;\npub type PortMidiResult = Result;\n\n\n\/\/ Errors\n\/\/ ------\n#[derive(Copy, Debug, PartialEq, Eq)]\npub enum PortMidiError {\n HostError,\n InvalidDeviceId,\n InsufficientMemory,\n BufferTooSmall,\n BufferOverflow,\n BadPtr,\n BadData,\n InternalError,\n BufferMaxSize\n}\n\nfn from_pm_error(pm_error: ffi::PmError) -> PortMidiResult<()> {\n match pm_error {\n ffi::PmError::PmNoError => Ok(()),\n ffi::PmError::PmGotData => Ok(()),\n ffi::PmError::PmHostError => Err(PortMidiError::HostError),\n ffi::PmError::PmInvalidDeviceId => Err(PortMidiError::InvalidDeviceId),\n ffi::PmError::PmInsufficientMemory => Err(PortMidiError::InsufficientMemory),\n ffi::PmError::PmBufferTooSmall => Err(PortMidiError::BufferTooSmall),\n ffi::PmError::PmBufferOverflow => Err(PortMidiError::BufferOverflow),\n ffi::PmError::PmBadPtr => Err(PortMidiError::BadPtr),\n ffi::PmError::PmBadData => Err(PortMidiError::BadData),\n ffi::PmError::PmInternalError => Err(PortMidiError::InternalError),\n ffi::PmError::PmBufferMaxSize => Err(PortMidiError::BufferMaxSize),\n }\n}\n\n\/\/ Global fns\n\/\/ ----------\n\/\/\/ `initialize` initalizes the underlying PortMidi C library, call this\n\/\/\/ before using the library.\n\/\/\/\n\/\/\/ Once initialized, PortMidi will no longer pickup any new Midi devices that are\n\/\/\/ connected, i.e. it does not support hot plugging.\npub fn initialize() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Initialize()\n })\n}\n\n\/\/\/ `terminate` terminates the underlying PortMidi C library, call this\n\/\/\/ after using the library.\npub fn terminate() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Terminate()\n })\n}\n\n\/\/\/ Return the number of devices. This number will not change during the lifetime\n\/\/\/ of the program.\npub fn count_devices() -> PortMidiDeviceId {\n unsafe {\n ffi::Pm_CountDevices()\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default input, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_input_device_id() -> Option {\n let id = unsafe { ffi::Pm_GetDefaultInputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default output, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_output_device_id() -> Option {\n let id = unsafe { ffi::Pm_GetDefaultOutputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\n\/\/ DeviceInfo\n\/\/ ----------\n\/\/\/ Represents what we know about a device\n#[derive(Clone, Debug)]\npub struct DeviceInfo {\n \/\/\/ The `PortMidiDeviceId` used with `OutputPort::new` and `InputPort::new`\n pub device_id: PortMidiDeviceId,\n \/\/\/ The name of the device\n pub name: String,\n \/\/\/ Is the device an input\n pub input: bool,\n \/\/\/ Is the device an output\n pub output: bool\n}\n\nimpl DeviceInfo {\n fn wrap(device_id: PortMidiDeviceId, device_info: *const ffi::PmDeviceInfo) -> DeviceInfo {\n let name = unsafe {\n let bytes = std::ffi::CStr::from_ptr((*device_info).name).to_bytes();\n std::str::from_utf8_unchecked(bytes).to_string()\n };\n let input = unsafe { (*device_info).input };\n let output = unsafe { (*device_info).output };\n\n DeviceInfo {\n device_id: device_id,\n name: name,\n input: input > 0,\n output: output > 0\n }\n }\n}\n\n\/\/\/ Returns a `DeviceInfo` with information about a device, or `None` if\n\/\/\/ it does not exist\npub fn get_device_info(device_id: PortMidiDeviceId) -> Option {\n let info = unsafe { ffi::Pm_GetDeviceInfo(device_id) };\n if info.is_null() {\n None\n }\n else {\n Some(DeviceInfo::wrap(device_id, info))\n }\n}\n\n\n\/\/ Midi events\n\/\/ -----------\n\/\/\/ Represents a single midi message, see also `MidiEvent`\n\/\/\/\n\/\/\/ TODO: should we use u8?\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\npub struct MidiMessage {\n pub status: i8,\n pub data1: i8,\n pub data2: i8,\n}\n\nimpl MidiMessage {\n fn wrap(cmessage : ffi::PmMessage) -> MidiMessage {\n MidiMessage {\n status: ((cmessage) & 0xFF) as i8,\n data1 : (((cmessage) >> 8) & 0xFF) as i8,\n data2 : (((cmessage) >> 16) & 0xFF) as i8,\n }\n }\n\n fn unwrap(&self) -> ffi::PmMessage {\n ((((self.data2 as i32) << 16) & 0xFF0000) |\n (((self.data1 as i32) << 8) & 0xFF00) |\n ((self.status as i32) & 0xFF)) as i32\n }\n}\n\n\/\/\/ Represents a time stamped midi event. See also `MidiMessage`\n\/\/\/\n\/\/\/ See the PortMidi documentation for how SysEx and midi realtime messages\n\/\/\/ are handled\n\/\/\/\n\/\/\/ TODO: what to do about the timestamp?\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\npub struct MidiEvent {\n pub message : MidiMessage,\n pub timestamp : ffi::PmTimestamp,\n}\n\nimpl MidiEvent {\n fn wrap(event: ffi::PmEvent) -> MidiEvent {\n MidiEvent {\n message: MidiMessage::wrap(event.message),\n timestamp : event.timestamp,\n }\n }\n\n fn unwrap(&self) -> ffi::PmEvent {\n ffi::PmEvent {\n message: self.message.unwrap(),\n timestamp: self.timestamp,\n }\n }\n}\n\n\n\/\/ Input\n\/\/ -----\n\/\/\/ Representation of an input midi port\n#[allow(missing_copy_implementations)]\npub struct InputPort {\n pm_stream : *const ffi::PortMidiStream,\n input_device : ffi::PmDeviceId,\n buffer_size : i32,\n}\n\nimpl InputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(input_device : PortMidiDeviceId, buffer_size: i32) -> InputPort {\n InputPort {\n pm_stream : ptr::null(),\n input_device : input_device,\n buffer_size : buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenInput(&self.pm_stream, self.input_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null())\n })\n }\n\n \/\/\/ Reads a single `MidiEvent` if one is avaible\n \/\/\/\n \/\/\/ A `Result` of `None` means no event was available.\n \/\/\/\n \/\/\/ See the PortMidi documentation for information on how it deals with input\n \/\/\/ overflows\n pub fn read(&mut self) -> PortMidiResult> {\n use std::num::FromPrimitive;\n \/\/get one note a the time\n let mut event = ffi::PmEvent { message : 0, timestamp : 0 };\n let no_of_notes = unsafe { ffi::Pm_Read(self.pm_stream, &mut event, 1) };\n match no_of_notes {\n y if y == 0 => Ok(None),\n y if y > 0 => Ok(Some(MidiEvent::wrap(event))),\n _ => {\n \/\/ if it's negative it's an error, convert it\n let maybe_pm_error: Option = FromPrimitive::from_i32(no_of_notes);\n if let Some(pm_error) = maybe_pm_error {\n from_pm_error(pm_error).map(|_| None)\n }\n else {\n \/\/ what should we do, if we can't convert the error no?\n \/\/ should we panic?\n Ok(None)\n }\n }\n }\n }\n\n \/\/\/ `poll` tests if there is input available, either returing a bool or an error\n pub fn poll(&self) -> PortMidiResult {\n let pm_error = unsafe { ffi::Pm_Poll(self.pm_stream) };\n match pm_error {\n ffi::PmError::PmNoError => Ok(false),\n ffi::PmError::PmGotData => Ok(true),\n other => from_pm_error(other).map(|_| false)\n }\n }\n\n \/\/\/ Closes the input, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Output\n\/\/ ------\n\/\/\/ Representation of an output midi port\n#[allow(missing_copy_implementations)]\npub struct OutputPort {\n pm_stream: *const ffi::PortMidiStream,\n output_device: ffi::PmDeviceId,\n buffer_size: i32,\n}\n\nimpl OutputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(output_device: PortMidiDeviceId, buffer_size: i32) -> OutputPort {\n OutputPort {\n pm_stream: ptr::null(),\n output_device: output_device,\n buffer_size: buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenOutput(&self.pm_stream, self.output_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null(), 0)\n })\n }\n\n \/\/\/ Terminates outgoing messages immediately\n \/\/\/\n \/\/\/ The caller should immediately close the output port, this may\n \/\/\/ result in transmission of a partial midi message. Note, not all platforms\n \/\/\/ support abort.\n pub fn abort(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Abort(self.pm_stream)\n })\n }\n\n \/\/\/ Closes the midi stream, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/\/\/ Write a single `MidiEvent`\n pub fn write_event(&mut self, midi_event: MidiEvent) -> PortMidiResult<()> {\n let event = midi_event.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_Write(self.pm_stream, &event, 1)\n })\n }\n\n \/\/\/ Write a single `MidiMessage` immediately\n pub fn write_message(&mut self, midi_message: MidiMessage) -> PortMidiResult<()> {\n let message = midi_message.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_WriteShort(self.pm_stream, 0, message)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Old code\n\/\/ --------\n\/** Translate portmidi error number into human readable message.\n* These strings are constants (set at compile time) so client has\n* no need to allocate storage\n*\/\npub fn get_error_text(error_code: ffi::PmError) -> String {\n unsafe {\n let error_text = ffi::Pm_GetErrorText(error_code);\n let bytes = std::ffi::CStr::from_ptr(error_text).to_bytes();\n std::str::from_utf8_unchecked(bytes).to_string()\n }\n}\n\n\/** Translate portmidi host error into human readable message.\n These strings are computed at run time, so client has to allocate storage.\n After this routine executes, the host error is cleared.\n*\/\npub fn get_host_error_text(msg: *const c_char, len: i32) {\n unsafe {\n ffi::Pm_GetHostErrorText(msg, len);\n }\n}\n\npub const HDRLENGTH: i32 = 50;\npub const PM_HOST_ERROR_MSG_LEN: i32 = 256;\n\nswitch MidiMessage from i8 to u8\/\/ Copyright 2014-2015 Sam Doshi (sam@metal-fish.co.uk)\n\/\/ Copyright 2013-2014 Philippe Delrieu (philippe.delrieu@free.fr)\n\/\/\n\/\/ Licensed under the MIT License .\n\/\/ This file may not be copied, modified, or distributed except according to those terms.\n#![feature(core, std_misc)]\n\nextern crate libc;\n\nuse std::ptr;\nuse libc::c_char;\n\nmod ffi;\n\n\n\/\/ Types\n\/\/ -----\n\/\/\/ Used by PortMidi to refer to a Midi device\npub type PortMidiDeviceId = i32;\npub type PortMidiResult = Result;\n\n\n\/\/ Errors\n\/\/ ------\n#[derive(Copy, Debug, PartialEq, Eq)]\npub enum PortMidiError {\n HostError,\n InvalidDeviceId,\n InsufficientMemory,\n BufferTooSmall,\n BufferOverflow,\n BadPtr,\n BadData,\n InternalError,\n BufferMaxSize\n}\n\nfn from_pm_error(pm_error: ffi::PmError) -> PortMidiResult<()> {\n match pm_error {\n ffi::PmError::PmNoError => Ok(()),\n ffi::PmError::PmGotData => Ok(()),\n ffi::PmError::PmHostError => Err(PortMidiError::HostError),\n ffi::PmError::PmInvalidDeviceId => Err(PortMidiError::InvalidDeviceId),\n ffi::PmError::PmInsufficientMemory => Err(PortMidiError::InsufficientMemory),\n ffi::PmError::PmBufferTooSmall => Err(PortMidiError::BufferTooSmall),\n ffi::PmError::PmBufferOverflow => Err(PortMidiError::BufferOverflow),\n ffi::PmError::PmBadPtr => Err(PortMidiError::BadPtr),\n ffi::PmError::PmBadData => Err(PortMidiError::BadData),\n ffi::PmError::PmInternalError => Err(PortMidiError::InternalError),\n ffi::PmError::PmBufferMaxSize => Err(PortMidiError::BufferMaxSize),\n }\n}\n\n\/\/ Global fns\n\/\/ ----------\n\/\/\/ `initialize` initalizes the underlying PortMidi C library, call this\n\/\/\/ before using the library.\n\/\/\/\n\/\/\/ Once initialized, PortMidi will no longer pickup any new Midi devices that are\n\/\/\/ connected, i.e. it does not support hot plugging.\npub fn initialize() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Initialize()\n })\n}\n\n\/\/\/ `terminate` terminates the underlying PortMidi C library, call this\n\/\/\/ after using the library.\npub fn terminate() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Terminate()\n })\n}\n\n\/\/\/ Return the number of devices. This number will not change during the lifetime\n\/\/\/ of the program.\npub fn count_devices() -> PortMidiDeviceId {\n unsafe {\n ffi::Pm_CountDevices()\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default input, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_input_device_id() -> Option {\n let id = unsafe { ffi::Pm_GetDefaultInputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default output, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_output_device_id() -> Option {\n let id = unsafe { ffi::Pm_GetDefaultOutputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\n\/\/ DeviceInfo\n\/\/ ----------\n\/\/\/ Represents what we know about a device\n#[derive(Clone, Debug)]\npub struct DeviceInfo {\n \/\/\/ The `PortMidiDeviceId` used with `OutputPort::new` and `InputPort::new`\n pub device_id: PortMidiDeviceId,\n \/\/\/ The name of the device\n pub name: String,\n \/\/\/ Is the device an input\n pub input: bool,\n \/\/\/ Is the device an output\n pub output: bool\n}\n\nimpl DeviceInfo {\n fn wrap(device_id: PortMidiDeviceId, device_info: *const ffi::PmDeviceInfo) -> DeviceInfo {\n let name = unsafe {\n let bytes = std::ffi::CStr::from_ptr((*device_info).name).to_bytes();\n std::str::from_utf8_unchecked(bytes).to_string()\n };\n let input = unsafe { (*device_info).input };\n let output = unsafe { (*device_info).output };\n\n DeviceInfo {\n device_id: device_id,\n name: name,\n input: input > 0,\n output: output > 0\n }\n }\n}\n\n\/\/\/ Returns a `DeviceInfo` with information about a device, or `None` if\n\/\/\/ it does not exist\npub fn get_device_info(device_id: PortMidiDeviceId) -> Option {\n let info = unsafe { ffi::Pm_GetDeviceInfo(device_id) };\n if info.is_null() {\n None\n }\n else {\n Some(DeviceInfo::wrap(device_id, info))\n }\n}\n\n\n\/\/ Midi events\n\/\/ -----------\n\/\/\/ Represents a single midi message, see also `MidiEvent`\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\npub struct MidiMessage {\n pub status: u8,\n pub data1: u8,\n pub data2: u8,\n}\n\nimpl MidiMessage {\n fn wrap(cmessage : ffi::PmMessage) -> MidiMessage {\n MidiMessage {\n status: ((cmessage) & 0xFF) as u8,\n data1 : (((cmessage) >> 8) & 0xFF) as u8,\n data2 : (((cmessage) >> 16) & 0xFF) as u8,\n }\n }\n\n fn unwrap(&self) -> ffi::PmMessage {\n ((((self.data2 as i32) << 16) & 0xFF0000) |\n (((self.data1 as i32) << 8) & 0xFF00) |\n ((self.status as i32) & 0xFF)) as i32\n }\n}\n\n\/\/\/ Represents a time stamped midi event. See also `MidiMessage`\n\/\/\/\n\/\/\/ See the PortMidi documentation for how SysEx and midi realtime messages\n\/\/\/ are handled\n\/\/\/\n\/\/\/ TODO: what to do about the timestamp?\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\npub struct MidiEvent {\n pub message : MidiMessage,\n pub timestamp : ffi::PmTimestamp,\n}\n\nimpl MidiEvent {\n fn wrap(event: ffi::PmEvent) -> MidiEvent {\n MidiEvent {\n message: MidiMessage::wrap(event.message),\n timestamp : event.timestamp,\n }\n }\n\n fn unwrap(&self) -> ffi::PmEvent {\n ffi::PmEvent {\n message: self.message.unwrap(),\n timestamp: self.timestamp,\n }\n }\n}\n\n\n\/\/ Input\n\/\/ -----\n\/\/\/ Representation of an input midi port\n#[allow(missing_copy_implementations)]\npub struct InputPort {\n pm_stream : *const ffi::PortMidiStream,\n input_device : ffi::PmDeviceId,\n buffer_size : i32,\n}\n\nimpl InputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(input_device : PortMidiDeviceId, buffer_size: i32) -> InputPort {\n InputPort {\n pm_stream : ptr::null(),\n input_device : input_device,\n buffer_size : buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenInput(&self.pm_stream, self.input_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null())\n })\n }\n\n \/\/\/ Reads a single `MidiEvent` if one is avaible\n \/\/\/\n \/\/\/ A `Result` of `None` means no event was available.\n \/\/\/\n \/\/\/ See the PortMidi documentation for information on how it deals with input\n \/\/\/ overflows\n pub fn read(&mut self) -> PortMidiResult> {\n use std::num::FromPrimitive;\n \/\/get one note a the time\n let mut event = ffi::PmEvent { message : 0, timestamp : 0 };\n let no_of_notes = unsafe { ffi::Pm_Read(self.pm_stream, &mut event, 1) };\n match no_of_notes {\n y if y == 0 => Ok(None),\n y if y > 0 => Ok(Some(MidiEvent::wrap(event))),\n _ => {\n \/\/ if it's negative it's an error, convert it\n let maybe_pm_error: Option = FromPrimitive::from_i32(no_of_notes);\n if let Some(pm_error) = maybe_pm_error {\n from_pm_error(pm_error).map(|_| None)\n }\n else {\n \/\/ what should we do, if we can't convert the error no?\n \/\/ should we panic?\n Ok(None)\n }\n }\n }\n }\n\n \/\/\/ `poll` tests if there is input available, either returing a bool or an error\n pub fn poll(&self) -> PortMidiResult {\n let pm_error = unsafe { ffi::Pm_Poll(self.pm_stream) };\n match pm_error {\n ffi::PmError::PmNoError => Ok(false),\n ffi::PmError::PmGotData => Ok(true),\n other => from_pm_error(other).map(|_| false)\n }\n }\n\n \/\/\/ Closes the input, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Output\n\/\/ ------\n\/\/\/ Representation of an output midi port\n#[allow(missing_copy_implementations)]\npub struct OutputPort {\n pm_stream: *const ffi::PortMidiStream,\n output_device: ffi::PmDeviceId,\n buffer_size: i32,\n}\n\nimpl OutputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(output_device: PortMidiDeviceId, buffer_size: i32) -> OutputPort {\n OutputPort {\n pm_stream: ptr::null(),\n output_device: output_device,\n buffer_size: buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenOutput(&self.pm_stream, self.output_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null(), 0)\n })\n }\n\n \/\/\/ Terminates outgoing messages immediately\n \/\/\/\n \/\/\/ The caller should immediately close the output port, this may\n \/\/\/ result in transmission of a partial midi message. Note, not all platforms\n \/\/\/ support abort.\n pub fn abort(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Abort(self.pm_stream)\n })\n }\n\n \/\/\/ Closes the midi stream, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/\/\/ Write a single `MidiEvent`\n pub fn write_event(&mut self, midi_event: MidiEvent) -> PortMidiResult<()> {\n let event = midi_event.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_Write(self.pm_stream, &event, 1)\n })\n }\n\n \/\/\/ Write a single `MidiMessage` immediately\n pub fn write_message(&mut self, midi_message: MidiMessage) -> PortMidiResult<()> {\n let message = midi_message.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_WriteShort(self.pm_stream, 0, message)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Old code\n\/\/ --------\n\/** Translate portmidi error number into human readable message.\n* These strings are constants (set at compile time) so client has\n* no need to allocate storage\n*\/\npub fn get_error_text(error_code: ffi::PmError) -> String {\n unsafe {\n let error_text = ffi::Pm_GetErrorText(error_code);\n let bytes = std::ffi::CStr::from_ptr(error_text).to_bytes();\n std::str::from_utf8_unchecked(bytes).to_string()\n }\n}\n\n\/** Translate portmidi host error into human readable message.\n These strings are computed at run time, so client has to allocate storage.\n After this routine executes, the host error is cleared.\n*\/\npub fn get_host_error_text(msg: *const c_char, len: i32) {\n unsafe {\n ffi::Pm_GetHostErrorText(msg, len);\n }\n}\n\npub const HDRLENGTH: i32 = 50;\npub const PM_HOST_ERROR_MSG_LEN: i32 = 256;\n\n<|endoftext|>"} {"text":"Optimized implementation for fields of width<=4<|endoftext|>"} {"text":"add channel registration command<|endoftext|>"} {"text":"\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse {LinkerFlavor, PanicStrategy};\nuse target::{LinkArgs, TargetOptions};\nuse std::default::Default;\n\npub fn opts() -> TargetOptions {\n let mut args = LinkArgs::new();\n args.insert(LinkerFlavor::Gcc, vec![\n \/\/ We want to be able to strip as much executable code as possible\n \/\/ from the linker command line, and this flag indicates to the\n \/\/ linker that it can avoid linking in dynamic libraries that don't\n \/\/ actually satisfy any symbols up to that point (as with many other\n \/\/ resolutions the linker does). This option only applies to all\n \/\/ following libraries so we're sure to pass it as one of the first\n \/\/ arguments.\n \"-Wl,--as-needed\".to_string(),\n\n \/\/ Always enable NX protection when it is available\n \"-Wl,-z,noexecstack\".to_string()\n ]);\n\n TargetOptions {\n pre_link_args: args,\n executables: true,\n relocation_model: \"static\".to_string(),\n disable_redzone: true,\n eliminate_frame_pointer: false,\n target_family: None,\n linker_is_gnu: true,\n has_elf_tls: true,\n panic_strategy: PanicStrategy::Abort,\n .. Default::default()\n }\n}\nEnable unwinding panics on Redox\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse target::{LinkArgs, TargetOptions};\nuse std::default::Default;\n\npub fn opts() -> TargetOptions {\n let mut args = LinkArgs::new();\n args.insert(LinkerFlavor::Gcc, vec![\n \/\/ We want to be able to strip as much executable code as possible\n \/\/ from the linker command line, and this flag indicates to the\n \/\/ linker that it can avoid linking in dynamic libraries that don't\n \/\/ actually satisfy any symbols up to that point (as with many other\n \/\/ resolutions the linker does). This option only applies to all\n \/\/ following libraries so we're sure to pass it as one of the first\n \/\/ arguments.\n \"-Wl,--as-needed\".to_string(),\n\n \/\/ Always enable NX protection when it is available\n \"-Wl,-z,noexecstack\".to_string()\n ]);\n\n TargetOptions {\n pre_link_args: args,\n executables: true,\n relocation_model: \"static\".to_string(),\n disable_redzone: true,\n eliminate_frame_pointer: false,\n target_family: None,\n linker_is_gnu: true,\n has_elf_tls: true,\n .. Default::default()\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse ast::{Ident, matcher_, matcher, match_tok, match_nonterminal, match_seq};\nuse ast::{tt_delim};\nuse ast;\nuse codemap::{Span, Spanned, dummy_sp};\nuse ext::base::{AnyMacro, ExtCtxt, MacResult, MRAny, MRDef, MacroDef};\nuse ext::base::{NormalTT, SyntaxExpanderTTTrait};\nuse ext::base;\nuse ext::tt::macro_parser::{error};\nuse ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};\nuse ext::tt::macro_parser::{parse, parse_or_else, success, failure};\nuse parse::lexer::{new_tt_reader, reader};\nuse parse::parser::Parser;\nuse parse::token::{get_ident_interner, special_idents, gensym_ident, ident_to_str};\nuse parse::token::{FAT_ARROW, SEMI, nt_matchers, nt_tt};\nuse print;\n\nstruct ParserAnyMacro {\n parser: @Parser,\n}\n\nimpl AnyMacro for ParserAnyMacro {\n fn make_expr(&self) -> @ast::Expr {\n self.parser.parse_expr()\n }\n fn make_item(&self) -> Option<@ast::item> {\n self.parser.parse_item(~[]) \/\/ no attrs\n }\n fn make_stmt(&self) -> @ast::Stmt {\n self.parser.parse_stmt(~[]) \/\/ no attrs\n }\n}\n\nstruct MacroRulesSyntaxExpanderTTFun {\n name: Ident,\n lhses: @~[@named_match],\n rhses: @~[@named_match],\n}\n\nimpl SyntaxExpanderTTTrait for MacroRulesSyntaxExpanderTTFun {\n fn expand(&self,\n cx: @ExtCtxt,\n sp: Span,\n arg: &[ast::token_tree],\n _: ast::SyntaxContext)\n -> MacResult {\n generic_extension(cx, sp, self.name, arg, *self.lhses, *self.rhses)\n }\n}\n\n\/\/ Given `lhses` and `rhses`, this is the new macro we create\nfn generic_extension(cx: @ExtCtxt,\n sp: Span,\n name: Ident,\n arg: &[ast::token_tree],\n lhses: &[@named_match],\n rhses: &[@named_match])\n -> MacResult {\n if cx.trace_macros() {\n println!(\"{}! \\\\{ {} \\\\}\",\n cx.str_of(name),\n print::pprust::tt_to_str(\n &ast::tt_delim(@mut arg.to_owned()),\n get_ident_interner()));\n }\n\n \/\/ Which arm's failure should we report? (the one furthest along)\n let mut best_fail_spot = dummy_sp();\n let mut best_fail_msg = ~\"internal error: ran no matchers\";\n\n let s_d = cx.parse_sess().span_diagnostic;\n\n for (i, lhs) in lhses.iter().enumerate() { \/\/ try each arm's matchers\n match *lhs {\n @matched_nonterminal(nt_matchers(ref mtcs)) => {\n \/\/ `none` is because we're not interpolating\n let arg_rdr = new_tt_reader(\n s_d,\n None,\n arg.to_owned()\n ) as @mut reader;\n match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) {\n success(named_matches) => {\n let rhs = match rhses[i] {\n \/\/ okay, what's your transcriber?\n @matched_nonterminal(nt_tt(@ref tt)) => {\n match (*tt) {\n \/\/ cut off delimiters; don't parse 'em\n tt_delim(ref tts) => {\n (*tts).slice(1u,(*tts).len()-1u).to_owned()\n }\n _ => cx.span_fatal(\n sp, \"macro rhs must be delimited\")\n }\n },\n _ => cx.span_bug(sp, \"bad thing in rhs\")\n };\n \/\/ rhs has holes ( `$id` and `$(...)` that need filled)\n let trncbr = new_tt_reader(s_d, Some(named_matches),\n rhs);\n let p = @Parser(cx.parse_sess(),\n cx.cfg(),\n trncbr as @mut reader);\n\n \/\/ Let the context choose how to interpret the result.\n \/\/ Weird, but useful for X-macros.\n return MRAny(@ParserAnyMacro {\n parser: p,\n } as @AnyMacro)\n }\n failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo {\n best_fail_spot = sp;\n best_fail_msg = (*msg).clone();\n },\n error(sp, ref msg) => cx.span_fatal(sp, (*msg))\n }\n }\n _ => cx.bug(\"non-matcher found in parsed lhses\")\n }\n }\n cx.span_fatal(best_fail_spot, best_fail_msg);\n}\n\n\/\/ this procedure performs the expansion of the\n\/\/ macro_rules! macro. It parses the RHS and adds\n\/\/ an extension to the current context.\npub fn add_new_extension(cx: @ExtCtxt,\n sp: Span,\n name: Ident,\n arg: ~[ast::token_tree],\n _: ast::SyntaxContext)\n -> base::MacResult {\n \/\/ these spans won't matter, anyways\n fn ms(m: matcher_) -> matcher {\n Spanned {\n node: m.clone(),\n span: dummy_sp()\n }\n }\n\n let lhs_nm = gensym_ident(\"lhs\");\n let rhs_nm = gensym_ident(\"rhs\");\n\n \/\/ The pattern that macro_rules matches.\n \/\/ The grammar for macro_rules! is:\n \/\/ $( $lhs:mtcs => $rhs:tt );+\n \/\/ ...quasiquoting this would be nice.\n let argument_gram = ~[\n ms(match_seq(~[\n ms(match_nonterminal(lhs_nm, special_idents::matchers, 0u)),\n ms(match_tok(FAT_ARROW)),\n ms(match_nonterminal(rhs_nm, special_idents::tt, 1u)),\n ], Some(SEMI), false, 0u, 2u)),\n \/\/to phase into semicolon-termination instead of\n \/\/semicolon-separation\n ms(match_seq(~[ms(match_tok(SEMI))], None, true, 2u, 2u))];\n\n\n \/\/ Parse the macro_rules! invocation (`none` is for no interpolations):\n let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,\n None,\n arg.clone());\n let argument_map = parse_or_else(cx.parse_sess(),\n cx.cfg(),\n arg_reader as @mut reader,\n argument_gram);\n\n \/\/ Extract the arguments:\n let lhses = match *argument_map.get(&lhs_nm) {\n @matched_seq(ref s, _) => \/* FIXME (#2543) *\/ @(*s).clone(),\n _ => cx.span_bug(sp, \"wrong-structured lhs\")\n };\n\n let rhses = match *argument_map.get(&rhs_nm) {\n @matched_seq(ref s, _) => \/* FIXME (#2543) *\/ @(*s).clone(),\n _ => cx.span_bug(sp, \"wrong-structured rhs\")\n };\n\n \/\/ Given `lhses` and `rhses`, this is the new macro we create\n fn generic_extension(cx: @ExtCtxt,\n sp: Span,\n name: Ident,\n arg: &[ast::token_tree],\n lhses: &[@named_match],\n rhses: &[@named_match])\n -> MacResult {\n if cx.trace_macros() {\n println!(\"{}! \\\\{ {} \\\\}\",\n cx.str_of(name),\n print::pprust::tt_to_str(\n &ast::tt_delim(@mut arg.to_owned()),\n get_ident_interner()));\n }\n\n \/\/ Which arm's failure should we report? (the one furthest along)\n let mut best_fail_spot = dummy_sp();\n let mut best_fail_msg = ~\"internal error: ran no matchers\";\n\n let s_d = cx.parse_sess().span_diagnostic;\n\n for (i, lhs) in lhses.iter().enumerate() { \/\/ try each arm's matchers\n match *lhs {\n @matched_nonterminal(nt_matchers(ref mtcs)) => {\n \/\/ `none` is because we're not interpolating\n let arg_rdr = new_tt_reader(\n s_d,\n None,\n arg.to_owned()\n ) as @mut reader;\n match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) {\n success(named_matches) => {\n let rhs = match rhses[i] {\n \/\/ okay, what's your transcriber?\n @matched_nonterminal(nt_tt(@ref tt)) => {\n match (*tt) {\n \/\/ cut off delimiters; don't parse 'em\n tt_delim(ref tts) => {\n (*tts).slice(1u,(*tts).len()-1u).to_owned()\n }\n _ => cx.span_fatal(\n sp, \"macro rhs must be delimited\")\n }\n },\n _ => cx.span_bug(sp, \"bad thing in rhs\")\n };\n \/\/ rhs has holes ( `$id` and `$(...)` that need filled)\n let trncbr = new_tt_reader(s_d, Some(named_matches),\n rhs);\n let p = @Parser(cx.parse_sess(),\n cx.cfg(),\n trncbr as @mut reader);\n\n \/\/ Let the context choose how to interpret the result.\n \/\/ Weird, but useful for X-macros.\n return MRAny(@ParserAnyMacro {\n parser: p\n } as @AnyMacro);\n }\n failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo {\n best_fail_spot = sp;\n best_fail_msg = (*msg).clone();\n },\n error(sp, ref msg) => cx.span_fatal(sp, (*msg))\n }\n }\n _ => cx.bug(\"non-matcher found in parsed lhses\")\n }\n }\n cx.span_fatal(best_fail_spot, best_fail_msg);\n }\n\n let exp = @MacroRulesSyntaxExpanderTTFun {\n name: name,\n lhses: lhses,\n rhses: rhses,\n } as @SyntaxExpanderTTTrait;\n\n return MRDef(MacroDef {\n name: ident_to_str(&name),\n ext: NormalTT(exp, Some(sp))\n });\n}\nsyntax: remove some dead code.\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse ast::{Ident, matcher_, matcher, match_tok, match_nonterminal, match_seq};\nuse ast::{tt_delim};\nuse ast;\nuse codemap::{Span, Spanned, dummy_sp};\nuse ext::base::{AnyMacro, ExtCtxt, MacResult, MRAny, MRDef, MacroDef};\nuse ext::base::{NormalTT, SyntaxExpanderTTTrait};\nuse ext::base;\nuse ext::tt::macro_parser::{error};\nuse ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};\nuse ext::tt::macro_parser::{parse, parse_or_else, success, failure};\nuse parse::lexer::{new_tt_reader, reader};\nuse parse::parser::Parser;\nuse parse::token::{get_ident_interner, special_idents, gensym_ident, ident_to_str};\nuse parse::token::{FAT_ARROW, SEMI, nt_matchers, nt_tt};\nuse print;\n\nstruct ParserAnyMacro {\n parser: @Parser,\n}\n\nimpl AnyMacro for ParserAnyMacro {\n fn make_expr(&self) -> @ast::Expr {\n self.parser.parse_expr()\n }\n fn make_item(&self) -> Option<@ast::item> {\n self.parser.parse_item(~[]) \/\/ no attrs\n }\n fn make_stmt(&self) -> @ast::Stmt {\n self.parser.parse_stmt(~[]) \/\/ no attrs\n }\n}\n\nstruct MacroRulesSyntaxExpanderTTFun {\n name: Ident,\n lhses: @~[@named_match],\n rhses: @~[@named_match],\n}\n\nimpl SyntaxExpanderTTTrait for MacroRulesSyntaxExpanderTTFun {\n fn expand(&self,\n cx: @ExtCtxt,\n sp: Span,\n arg: &[ast::token_tree],\n _: ast::SyntaxContext)\n -> MacResult {\n generic_extension(cx, sp, self.name, arg, *self.lhses, *self.rhses)\n }\n}\n\n\/\/ Given `lhses` and `rhses`, this is the new macro we create\nfn generic_extension(cx: @ExtCtxt,\n sp: Span,\n name: Ident,\n arg: &[ast::token_tree],\n lhses: &[@named_match],\n rhses: &[@named_match])\n -> MacResult {\n if cx.trace_macros() {\n println!(\"{}! \\\\{ {} \\\\}\",\n cx.str_of(name),\n print::pprust::tt_to_str(\n &ast::tt_delim(@mut arg.to_owned()),\n get_ident_interner()));\n }\n\n \/\/ Which arm's failure should we report? (the one furthest along)\n let mut best_fail_spot = dummy_sp();\n let mut best_fail_msg = ~\"internal error: ran no matchers\";\n\n let s_d = cx.parse_sess().span_diagnostic;\n\n for (i, lhs) in lhses.iter().enumerate() { \/\/ try each arm's matchers\n match *lhs {\n @matched_nonterminal(nt_matchers(ref mtcs)) => {\n \/\/ `none` is because we're not interpolating\n let arg_rdr = new_tt_reader(\n s_d,\n None,\n arg.to_owned()\n ) as @mut reader;\n match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) {\n success(named_matches) => {\n let rhs = match rhses[i] {\n \/\/ okay, what's your transcriber?\n @matched_nonterminal(nt_tt(@ref tt)) => {\n match (*tt) {\n \/\/ cut off delimiters; don't parse 'em\n tt_delim(ref tts) => {\n (*tts).slice(1u,(*tts).len()-1u).to_owned()\n }\n _ => cx.span_fatal(\n sp, \"macro rhs must be delimited\")\n }\n },\n _ => cx.span_bug(sp, \"bad thing in rhs\")\n };\n \/\/ rhs has holes ( `$id` and `$(...)` that need filled)\n let trncbr = new_tt_reader(s_d, Some(named_matches),\n rhs);\n let p = @Parser(cx.parse_sess(),\n cx.cfg(),\n trncbr as @mut reader);\n\n \/\/ Let the context choose how to interpret the result.\n \/\/ Weird, but useful for X-macros.\n return MRAny(@ParserAnyMacro {\n parser: p,\n } as @AnyMacro)\n }\n failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo {\n best_fail_spot = sp;\n best_fail_msg = (*msg).clone();\n },\n error(sp, ref msg) => cx.span_fatal(sp, (*msg))\n }\n }\n _ => cx.bug(\"non-matcher found in parsed lhses\")\n }\n }\n cx.span_fatal(best_fail_spot, best_fail_msg);\n}\n\n\/\/ this procedure performs the expansion of the\n\/\/ macro_rules! macro. It parses the RHS and adds\n\/\/ an extension to the current context.\npub fn add_new_extension(cx: @ExtCtxt,\n sp: Span,\n name: Ident,\n arg: ~[ast::token_tree],\n _: ast::SyntaxContext)\n -> base::MacResult {\n \/\/ these spans won't matter, anyways\n fn ms(m: matcher_) -> matcher {\n Spanned {\n node: m.clone(),\n span: dummy_sp()\n }\n }\n\n let lhs_nm = gensym_ident(\"lhs\");\n let rhs_nm = gensym_ident(\"rhs\");\n\n \/\/ The pattern that macro_rules matches.\n \/\/ The grammar for macro_rules! is:\n \/\/ $( $lhs:mtcs => $rhs:tt );+\n \/\/ ...quasiquoting this would be nice.\n let argument_gram = ~[\n ms(match_seq(~[\n ms(match_nonterminal(lhs_nm, special_idents::matchers, 0u)),\n ms(match_tok(FAT_ARROW)),\n ms(match_nonterminal(rhs_nm, special_idents::tt, 1u)),\n ], Some(SEMI), false, 0u, 2u)),\n \/\/to phase into semicolon-termination instead of\n \/\/semicolon-separation\n ms(match_seq(~[ms(match_tok(SEMI))], None, true, 2u, 2u))];\n\n\n \/\/ Parse the macro_rules! invocation (`none` is for no interpolations):\n let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,\n None,\n arg.clone());\n let argument_map = parse_or_else(cx.parse_sess(),\n cx.cfg(),\n arg_reader as @mut reader,\n argument_gram);\n\n \/\/ Extract the arguments:\n let lhses = match *argument_map.get(&lhs_nm) {\n @matched_seq(ref s, _) => \/* FIXME (#2543) *\/ @(*s).clone(),\n _ => cx.span_bug(sp, \"wrong-structured lhs\")\n };\n\n let rhses = match *argument_map.get(&rhs_nm) {\n @matched_seq(ref s, _) => \/* FIXME (#2543) *\/ @(*s).clone(),\n _ => cx.span_bug(sp, \"wrong-structured rhs\")\n };\n\n let exp = @MacroRulesSyntaxExpanderTTFun {\n name: name,\n lhses: lhses,\n rhses: rhses,\n } as @SyntaxExpanderTTTrait;\n\n return MRDef(MacroDef {\n name: ident_to_str(&name),\n ext: NormalTT(exp, Some(sp))\n });\n}\n<|endoftext|>"} {"text":"05 - alias\/\/ `NanoSecond` is a new name for `u64`\ntype NanoSecond = u64;\ntype Inch = u64;\n\n\/\/ Types must have CamelCase names, or the compiler will raise a warning.\n\/\/ The exception to this rule are the primitive types: uint, f32, etc.\n\/\/ Use an attribute to silence warning.\n#[allow(non_camel_case_types)]\ntype uint64_t = u64;\n\nfn main() {\n \/\/ `NanoSecond` = `Inch` = `uint64_t` = `u64`\n let nanoseconds: NanoSecond = 5 as uint64_t;\n let inches: Inch = 2 as uint64_t;\n\n \/\/ Note that type aliases *don't* provide any extra type safety, because\n \/\/ aliases are *not* new types\n println!(\"{} nanoseconds + {} inches = {} unit?\",\n nanoseconds,\n inches,\n nanoseconds + inches);\n}\n<|endoftext|>"} {"text":"test: Make issue-2590 conform to the spirit of the test better<|endoftext|>"} {"text":"\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse x = m::f; \/\/~ ERROR failed to resolve imports\n\nmod m {\n}\n\nfn main() {\n}\nactually fix failing test\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse x = m::f; \/\/~ ERROR failed to resolve import\n\nmod m {\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::traits::{Normalized, ObligationCause};\nuse rustc::traits::query::NoSolution;\nuse rustc::ty::{ParamEnvAnd, Ty, TyCtxt};\nuse rustc::util::common::CellUsizeExt;\n\ncrate fn normalize_ty_after_erasing_regions<'tcx>(\n tcx: TyCtxt<'_, 'tcx, 'tcx>,\n goal: ParamEnvAnd<'tcx, Ty<'tcx>>,\n) -> Ty<'tcx> {\n let ParamEnvAnd { param_env, value } = goal;\n tcx.sess.perf_stats.normalize_ty_after_erasing_regions.increment();\n tcx.infer_ctxt().enter(|infcx| {\n let cause = ObligationCause::dummy();\n match infcx.at(&cause, param_env).normalize(&value) {\n Ok(Normalized { value: normalized_value, obligations: _ }) => {\n \/\/ ^^^^^^^^^^^\n \/\/ We don't care about the `obligations`,\n \/\/ they are always only region relations,\n \/\/ and we are about to erase those anyway.\n let normalized_value = infcx.resolve_type_vars_if_possible(&normalized_value);\n let normalized_value = infcx.tcx.erase_regions(&normalized_value);\n tcx.lift_to_global(&normalized_value).unwrap()\n }\n Err(NoSolution) => bug!(\"could not fully normalize `{:?}`\", value),\n }\n })\n}\nadd a debug assertion that only outlives-oblig. result from norm.\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::traits::{Normalized, ObligationCause};\nuse rustc::traits::query::NoSolution;\nuse rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};\nuse rustc::util::common::CellUsizeExt;\n\ncrate fn normalize_ty_after_erasing_regions<'tcx>(\n tcx: TyCtxt<'_, 'tcx, 'tcx>,\n goal: ParamEnvAnd<'tcx, Ty<'tcx>>,\n) -> Ty<'tcx> {\n let ParamEnvAnd { param_env, value } = goal;\n tcx.sess\n .perf_stats\n .normalize_ty_after_erasing_regions\n .increment();\n tcx.infer_ctxt().enter(|infcx| {\n let cause = ObligationCause::dummy();\n match infcx.at(&cause, param_env).normalize(&value) {\n Ok(Normalized {\n value: normalized_value,\n obligations: normalized_obligations,\n }) => {\n \/\/ We don't care about the `obligations`; they are\n \/\/ always only region relations, and we are about to\n \/\/ erase those anyway:\n debug_assert_eq!(\n normalized_obligations\n .iter()\n .find(|p| not_outlives_predicate(&p.predicate)),\n None,\n );\n\n let normalized_value = infcx.resolve_type_vars_if_possible(&normalized_value);\n let normalized_value = infcx.tcx.erase_regions(&normalized_value);\n tcx.lift_to_global(&normalized_value).unwrap()\n }\n Err(NoSolution) => bug!(\"could not fully normalize `{:?}`\", value),\n }\n })\n}\n\nfn not_outlives_predicate(p: &ty::Predicate<'_>) -> bool {\n match p {\n ty::Predicate::RegionOutlives(..) | ty::Predicate::TypeOutlives(..) => false,\n ty::Predicate::Trait(..)\n | ty::Predicate::Projection(..)\n | ty::Predicate::WellFormed(..)\n | ty::Predicate::ObjectSafe(..)\n | ty::Predicate::ClosureKind(..)\n | ty::Predicate::Subtype(..)\n | ty::Predicate::ConstEvaluatable(..) => true,\n }\n}\n<|endoftext|>"} {"text":"rust version<|endoftext|>"} {"text":"use mio::*;\nuse mio::tcp::*;\nuse mio::buf::{ByteBuf, MutByteBuf, SliceBuf};\nuse mio::util::Slab;\nuse std::io;\nuse super::localhost;\n\nconst SERVER: Token = Token(0);\nconst CLIENT: Token = Token(1);\n\nstruct EchoConn {\n sock: TcpStream,\n buf: Option,\n mut_buf: Option,\n token: Option,\n interest: EventSet\n}\n\nimpl EchoConn {\n fn new(sock: TcpStream) -> EchoConn {\n EchoConn {\n sock: sock,\n buf: None,\n mut_buf: Some(ByteBuf::mut_with_capacity(2048)),\n token: None,\n interest: EventSet::hup()\n }\n }\n\n fn writable(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n let mut buf = self.buf.take().unwrap();\n\n match self.sock.try_write_buf(&mut buf) {\n Ok(None) => {\n debug!(\"client flushing buf; WOULDBLOCK\");\n\n self.buf = Some(buf);\n self.interest.insert(EventSet::writable());\n }\n Ok(Some(r)) => {\n debug!(\"CONN : we wrote {} bytes!\", r);\n\n self.mut_buf = Some(buf.flip());\n\n self.interest.insert(EventSet::readable());\n self.interest.remove(EventSet::writable());\n }\n Err(e) => debug!(\"not implemented; client err={:?}\", e),\n }\n\n event_loop.reregister(&self.sock, self.token.unwrap(), self.interest, PollOpt::edge() | PollOpt::oneshot())\n }\n\n fn readable(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n let mut buf = self.mut_buf.take().unwrap();\n\n match self.sock.try_read_buf(&mut buf) {\n Ok(None) => {\n panic!(\"We just got readable, but were unable to read from the socket?\");\n }\n Ok(Some(r)) => {\n debug!(\"CONN : we read {} bytes!\", r);\n self.interest.remove(EventSet::readable());\n self.interest.insert(EventSet::writable());\n }\n Err(e) => {\n debug!(\"not implemented; client err={:?}\", e);\n self.interest.remove(EventSet::readable());\n }\n\n };\n\n \/\/ prepare to provide this to writable\n self.buf = Some(buf.flip());\n event_loop.reregister(&self.sock, self.token.unwrap(), self.interest, PollOpt::edge())\n }\n}\n\nstruct EchoServer {\n sock: TcpListener,\n conns: Slab\n}\n\nimpl EchoServer {\n fn accept(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n debug!(\"server accepting socket\");\n\n let sock = self.sock.accept().unwrap().unwrap();\n let conn = EchoConn::new(sock,);\n let tok = self.conns.insert(conn)\n .ok().expect(\"could not add connectiont o slab\");\n\n \/\/ Register the connection\n self.conns[tok].token = Some(tok);\n event_loop.register_opt(&self.conns[tok].sock, tok, EventSet::readable(), PollOpt::edge() | PollOpt::oneshot())\n .ok().expect(\"could not register socket with event loop\");\n\n Ok(())\n }\n\n fn conn_readable(&mut self, event_loop: &mut EventLoop, tok: Token) -> io::Result<()> {\n debug!(\"server conn readable; tok={:?}\", tok);\n self.conn(tok).readable(event_loop)\n }\n\n fn conn_writable(&mut self, event_loop: &mut EventLoop, tok: Token) -> io::Result<()> {\n debug!(\"server conn writable; tok={:?}\", tok);\n self.conn(tok).writable(event_loop)\n }\n\n fn conn<'a>(&'a mut self, tok: Token) -> &'a mut EchoConn {\n &mut self.conns[tok]\n }\n}\n\nstruct EchoClient {\n sock: TcpStream,\n msgs: Vec<&'static str>,\n tx: SliceBuf<'static>,\n rx: SliceBuf<'static>,\n mut_buf: Option,\n token: Token,\n interest: EventSet\n}\n\n\n\/\/ Sends a message and expects to receive the same exact message, one at a time\nimpl EchoClient {\n fn new(sock: TcpStream, tok: Token, mut msgs: Vec<&'static str>) -> EchoClient {\n let curr = msgs.remove(0);\n\n EchoClient {\n sock: sock,\n msgs: msgs,\n tx: SliceBuf::wrap(curr.as_bytes()),\n rx: SliceBuf::wrap(curr.as_bytes()),\n mut_buf: Some(ByteBuf::mut_with_capacity(2048)),\n token: tok,\n interest: EventSet::none()\n }\n }\n\n fn readable(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n debug!(\"client socket readable\");\n\n let mut buf = self.mut_buf.take().unwrap();\n\n match self.sock.try_read_buf(&mut buf) {\n Ok(None) => {\n panic!(\"We just got readable, but were unable to read from the socket?\");\n }\n Ok(Some(r)) => {\n debug!(\"CLIENT : We read {} bytes!\", r);\n }\n Err(e) => {\n panic!(\"not implemented; client err={:?}\", e);\n }\n };\n\n \/\/ prepare for reading\n let mut buf = buf.flip();\n\n while buf.has_remaining() {\n let actual = buf.read_byte().unwrap();\n let expect = self.rx.read_byte().unwrap();\n\n assert!(actual == expect, \"actual={}; expect={}\", actual, expect);\n }\n\n self.mut_buf = Some(buf.flip());\n\n self.interest.remove(EventSet::readable());\n\n if !self.rx.has_remaining() {\n self.next_msg(event_loop).unwrap();\n }\n\n event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())\n }\n\n fn writable(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n debug!(\"client socket writable\");\n\n match self.sock.try_write_buf(&mut self.tx) {\n Ok(None) => {\n debug!(\"client flushing buf; WOULDBLOCK\");\n self.interest.insert(EventSet::writable());\n }\n Ok(Some(r)) => {\n debug!(\"CLIENT : we wrote {} bytes!\", r);\n self.interest.insert(EventSet::readable());\n self.interest.remove(EventSet::writable());\n }\n Err(e) => debug!(\"not implemented; client err={:?}\", e)\n }\n\n event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())\n }\n\n fn next_msg(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n if self.msgs.is_empty() {\n event_loop.shutdown();\n return Ok(());\n }\n\n let curr = self.msgs.remove(0);\n\n debug!(\"client prepping next message\");\n self.tx = SliceBuf::wrap(curr.as_bytes());\n self.rx = SliceBuf::wrap(curr.as_bytes());\n\n self.interest.insert(EventSet::writable());\n event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())\n }\n}\n\nstruct Echo {\n server: EchoServer,\n client: EchoClient,\n}\n\nimpl Echo {\n fn new(srv: TcpListener, client: TcpStream, msgs: Vec<&'static str>) -> Echo {\n Echo {\n server: EchoServer {\n sock: srv,\n conns: Slab::new_starting_at(Token(2), 128)\n },\n client: EchoClient::new(client, CLIENT, msgs)\n }\n }\n}\n\nimpl Handler for Echo {\n type Timeout = usize;\n type Message = ();\n\n fn ready(&mut self, event_loop: &mut EventLoop, token: Token, events: EventSet) {\n if events.is_readable() {\n match token {\n SERVER => self.server.accept(event_loop).unwrap(),\n CLIENT => self.client.readable(event_loop).unwrap(),\n i => self.server.conn_readable(event_loop, i).unwrap()\n }\n }\n\n if events.is_writable() {\n match token {\n SERVER => panic!(\"received writable for token 0\"),\n CLIENT => self.client.writable(event_loop).unwrap(),\n _ => self.server.conn_writable(event_loop, token).unwrap()\n };\n }\n }\n}\n\n#[test]\npub fn test_echo_server() {\n debug!(\"Starting TEST_ECHO_SERVER\");\n let mut event_loop = EventLoop::new().unwrap();\n\n let addr = localhost();\n let srv = TcpListener::bind(&addr).unwrap();\n\n info!(\"listen for connections\");\n event_loop.register_opt(&srv, SERVER, EventSet::readable(), PollOpt::edge() | PollOpt::oneshot()).unwrap();\n\n let (sock, _) = TcpSocket::v4().unwrap()\n .connect(&addr).unwrap();\n\n \/\/ Connect to the server\n event_loop.register_opt(&sock, CLIENT, EventSet::writable(), PollOpt::edge() | PollOpt::oneshot()).unwrap();\n\n \/\/ Start the event loop\n event_loop.run(&mut Echo::new(srv, sock, vec![\"foo\", \"bar\"])).unwrap();\n}\nFix typouse mio::*;\nuse mio::tcp::*;\nuse mio::buf::{ByteBuf, MutByteBuf, SliceBuf};\nuse mio::util::Slab;\nuse std::io;\nuse super::localhost;\n\nconst SERVER: Token = Token(0);\nconst CLIENT: Token = Token(1);\n\nstruct EchoConn {\n sock: TcpStream,\n buf: Option,\n mut_buf: Option,\n token: Option,\n interest: EventSet\n}\n\nimpl EchoConn {\n fn new(sock: TcpStream) -> EchoConn {\n EchoConn {\n sock: sock,\n buf: None,\n mut_buf: Some(ByteBuf::mut_with_capacity(2048)),\n token: None,\n interest: EventSet::hup()\n }\n }\n\n fn writable(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n let mut buf = self.buf.take().unwrap();\n\n match self.sock.try_write_buf(&mut buf) {\n Ok(None) => {\n debug!(\"client flushing buf; WOULDBLOCK\");\n\n self.buf = Some(buf);\n self.interest.insert(EventSet::writable());\n }\n Ok(Some(r)) => {\n debug!(\"CONN : we wrote {} bytes!\", r);\n\n self.mut_buf = Some(buf.flip());\n\n self.interest.insert(EventSet::readable());\n self.interest.remove(EventSet::writable());\n }\n Err(e) => debug!(\"not implemented; client err={:?}\", e),\n }\n\n event_loop.reregister(&self.sock, self.token.unwrap(), self.interest, PollOpt::edge() | PollOpt::oneshot())\n }\n\n fn readable(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n let mut buf = self.mut_buf.take().unwrap();\n\n match self.sock.try_read_buf(&mut buf) {\n Ok(None) => {\n panic!(\"We just got readable, but were unable to read from the socket?\");\n }\n Ok(Some(r)) => {\n debug!(\"CONN : we read {} bytes!\", r);\n self.interest.remove(EventSet::readable());\n self.interest.insert(EventSet::writable());\n }\n Err(e) => {\n debug!(\"not implemented; client err={:?}\", e);\n self.interest.remove(EventSet::readable());\n }\n\n };\n\n \/\/ prepare to provide this to writable\n self.buf = Some(buf.flip());\n event_loop.reregister(&self.sock, self.token.unwrap(), self.interest, PollOpt::edge())\n }\n}\n\nstruct EchoServer {\n sock: TcpListener,\n conns: Slab\n}\n\nimpl EchoServer {\n fn accept(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n debug!(\"server accepting socket\");\n\n let sock = self.sock.accept().unwrap().unwrap();\n let conn = EchoConn::new(sock,);\n let tok = self.conns.insert(conn)\n .ok().expect(\"could not add connection to slab\");\n\n \/\/ Register the connection\n self.conns[tok].token = Some(tok);\n event_loop.register_opt(&self.conns[tok].sock, tok, EventSet::readable(), PollOpt::edge() | PollOpt::oneshot())\n .ok().expect(\"could not register socket with event loop\");\n\n Ok(())\n }\n\n fn conn_readable(&mut self, event_loop: &mut EventLoop, tok: Token) -> io::Result<()> {\n debug!(\"server conn readable; tok={:?}\", tok);\n self.conn(tok).readable(event_loop)\n }\n\n fn conn_writable(&mut self, event_loop: &mut EventLoop, tok: Token) -> io::Result<()> {\n debug!(\"server conn writable; tok={:?}\", tok);\n self.conn(tok).writable(event_loop)\n }\n\n fn conn<'a>(&'a mut self, tok: Token) -> &'a mut EchoConn {\n &mut self.conns[tok]\n }\n}\n\nstruct EchoClient {\n sock: TcpStream,\n msgs: Vec<&'static str>,\n tx: SliceBuf<'static>,\n rx: SliceBuf<'static>,\n mut_buf: Option,\n token: Token,\n interest: EventSet\n}\n\n\n\/\/ Sends a message and expects to receive the same exact message, one at a time\nimpl EchoClient {\n fn new(sock: TcpStream, tok: Token, mut msgs: Vec<&'static str>) -> EchoClient {\n let curr = msgs.remove(0);\n\n EchoClient {\n sock: sock,\n msgs: msgs,\n tx: SliceBuf::wrap(curr.as_bytes()),\n rx: SliceBuf::wrap(curr.as_bytes()),\n mut_buf: Some(ByteBuf::mut_with_capacity(2048)),\n token: tok,\n interest: EventSet::none()\n }\n }\n\n fn readable(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n debug!(\"client socket readable\");\n\n let mut buf = self.mut_buf.take().unwrap();\n\n match self.sock.try_read_buf(&mut buf) {\n Ok(None) => {\n panic!(\"We just got readable, but were unable to read from the socket?\");\n }\n Ok(Some(r)) => {\n debug!(\"CLIENT : We read {} bytes!\", r);\n }\n Err(e) => {\n panic!(\"not implemented; client err={:?}\", e);\n }\n };\n\n \/\/ prepare for reading\n let mut buf = buf.flip();\n\n while buf.has_remaining() {\n let actual = buf.read_byte().unwrap();\n let expect = self.rx.read_byte().unwrap();\n\n assert!(actual == expect, \"actual={}; expect={}\", actual, expect);\n }\n\n self.mut_buf = Some(buf.flip());\n\n self.interest.remove(EventSet::readable());\n\n if !self.rx.has_remaining() {\n self.next_msg(event_loop).unwrap();\n }\n\n event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())\n }\n\n fn writable(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n debug!(\"client socket writable\");\n\n match self.sock.try_write_buf(&mut self.tx) {\n Ok(None) => {\n debug!(\"client flushing buf; WOULDBLOCK\");\n self.interest.insert(EventSet::writable());\n }\n Ok(Some(r)) => {\n debug!(\"CLIENT : we wrote {} bytes!\", r);\n self.interest.insert(EventSet::readable());\n self.interest.remove(EventSet::writable());\n }\n Err(e) => debug!(\"not implemented; client err={:?}\", e)\n }\n\n event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())\n }\n\n fn next_msg(&mut self, event_loop: &mut EventLoop) -> io::Result<()> {\n if self.msgs.is_empty() {\n event_loop.shutdown();\n return Ok(());\n }\n\n let curr = self.msgs.remove(0);\n\n debug!(\"client prepping next message\");\n self.tx = SliceBuf::wrap(curr.as_bytes());\n self.rx = SliceBuf::wrap(curr.as_bytes());\n\n self.interest.insert(EventSet::writable());\n event_loop.reregister(&self.sock, self.token, self.interest, PollOpt::edge() | PollOpt::oneshot())\n }\n}\n\nstruct Echo {\n server: EchoServer,\n client: EchoClient,\n}\n\nimpl Echo {\n fn new(srv: TcpListener, client: TcpStream, msgs: Vec<&'static str>) -> Echo {\n Echo {\n server: EchoServer {\n sock: srv,\n conns: Slab::new_starting_at(Token(2), 128)\n },\n client: EchoClient::new(client, CLIENT, msgs)\n }\n }\n}\n\nimpl Handler for Echo {\n type Timeout = usize;\n type Message = ();\n\n fn ready(&mut self, event_loop: &mut EventLoop, token: Token, events: EventSet) {\n if events.is_readable() {\n match token {\n SERVER => self.server.accept(event_loop).unwrap(),\n CLIENT => self.client.readable(event_loop).unwrap(),\n i => self.server.conn_readable(event_loop, i).unwrap()\n }\n }\n\n if events.is_writable() {\n match token {\n SERVER => panic!(\"received writable for token 0\"),\n CLIENT => self.client.writable(event_loop).unwrap(),\n _ => self.server.conn_writable(event_loop, token).unwrap()\n };\n }\n }\n}\n\n#[test]\npub fn test_echo_server() {\n debug!(\"Starting TEST_ECHO_SERVER\");\n let mut event_loop = EventLoop::new().unwrap();\n\n let addr = localhost();\n let srv = TcpListener::bind(&addr).unwrap();\n\n info!(\"listen for connections\");\n event_loop.register_opt(&srv, SERVER, EventSet::readable(), PollOpt::edge() | PollOpt::oneshot()).unwrap();\n\n let (sock, _) = TcpSocket::v4().unwrap()\n .connect(&addr).unwrap();\n\n \/\/ Connect to the server\n event_loop.register_opt(&sock, CLIENT, EventSet::writable(), PollOpt::edge() | PollOpt::oneshot()).unwrap();\n\n \/\/ Start the event loop\n event_loop.run(&mut Echo::new(srv, sock, vec![\"foo\", \"bar\"])).unwrap();\n}\n<|endoftext|>"} {"text":"Check-in new threadpool\/\/! A thread pool for running blocking functions asynchronously.\n\nuse std::sync::atomic::{AtomicU64, Ordering};\nuse std::thread;\nuse std::time::Duration;\n\nuse crossbeam::channel::{bounded, Receiver, Sender};\nuse lazy_static::lazy_static;\n\nuse super::Promise;\n\nconst MAX_THREADS: u64 = 10_000;\n\nstatic DYNAMIC_THREAD_COUNT: AtomicU64 = AtomicU64::new(0);\n\nstruct Pool {\n sender: Sender>,\n receiver: Receiver>,\n}\n\nlazy_static! {\n static ref POOL: Pool = {\n for _ in 0..2 {\n thread::Builder::new()\n .name(\"async-blocking-driver\".to_string())\n .spawn(|| {\n for task in &POOL.receiver {\n (task)()\n }\n })\n .expect(\"cannot start a thread driving blocking tasks\");\n }\n\n \/\/ We want to use an unbuffered channel here to help\n \/\/ us drive our dynamic control. In effect, the\n \/\/ kernel's scheduler becomes the queue, reducing\n \/\/ the number of buffers that work must flow through\n \/\/ before being acted on by a core. This helps keep\n \/\/ latency snappy in the overall async system by\n \/\/ reducing bufferbloat.\n let (sender, receiver) = bounded(0);\n Pool { sender, receiver }\n };\n}\n\n\/\/ Create up to MAX_THREADS dynamic blocking task worker threads.\n\/\/ Dynamic threads will terminate themselves if they don't\n\/\/ receive any work after one second.\nfn maybe_create_another_blocking_thread() {\n \/\/ We use a `Relaxed` atomic operation because\n \/\/ it's just a heuristic, and would not lose correctness\n \/\/ even if it's random.\n let workers = DYNAMIC_THREAD_COUNT.load(Ordering::Relaxed);\n if workers >= MAX_THREADS {\n return;\n }\n\n thread::Builder::new()\n .name(\"async-blocking-driver-dynamic\".to_string())\n .spawn(|| {\n let wait_limit = Duration::from_secs(1);\n\n DYNAMIC_THREAD_COUNT.fetch_add(1, Ordering::Relaxed);\n while let Ok(task) = POOL.receiver.recv_timeout(wait_limit) {\n (task)();\n }\n DYNAMIC_THREAD_COUNT.fetch_sub(1, Ordering::Relaxed);\n })\n .expect(\"cannot start a dynamic thread driving blocking tasks\");\n}\n\n\/\/\/ Spawns a blocking task.\n\/\/\/\n\/\/\/ The task will be spawned onto a thread pool specifically dedicated to blocking tasks.\npub fn spawn(work: F) -> Promise\nwhere\n F: FnOnce() -> R + Send + 'static,\n R: Send + 'static,\n{\n let (promise_filler, promise) = Promise::pair();\n let task = move || {\n let result = (work)();\n promise_filler.fill(result);\n };\n let first_try_result = POOL.sender.try_send(Box::new(task));\n match first_try_result {\n Ok(()) => {\n \/\/ NICEEEE\n }\n Err(crossbeam::channel::TrySendError::Full(task)) => {\n \/\/ We were not able to send to the channel without\n \/\/ blocking. Try to spin up another thread and then\n \/\/ retry sending while blocking.\n maybe_create_another_blocking_thread();\n POOL.sender.send(task).unwrap()\n }\n Err(crossbeam::channel::TrySendError::Disconnected(_)) => {\n panic!(\n \"unable to send to blocking threadpool \\\n due to receiver disconnection\"\n );\n }\n }\n promise\n}\n<|endoftext|>"} {"text":"Impl PartialEq for Intention<|endoftext|>"} {"text":"Add test to exercise InvalidUndefBytes\/\/ error-pattern: reading uninitialized memory\n\nuse std::alloc::{alloc, dealloc, Layout};\nuse std::slice::from_raw_parts;\n\nfn main() {\n let layout = Layout::from_size_align(32, 8).unwrap();\n unsafe {\n let ptr = alloc(layout);\n *ptr = 0x41;\n *ptr.add(1) = 0x42;\n *ptr.add(2) = 0x43;\n *ptr.add(3) = 0x44;\n *ptr.add(16) = 0x00;\n let slice1 = from_raw_parts(ptr, 16);\n let slice2 = from_raw_parts(ptr.add(16), 16);\n drop(slice1.cmp(slice2));\n dealloc(ptr, layout);\n }\n}\n<|endoftext|>"} {"text":"\/\/! Tests for the -Zrustdoc-map feature.\n\nuse cargo_test_support::registry::Package;\nuse cargo_test_support::{is_nightly, paths, project, Project};\n\nfn basic_project() -> Project {\n Package::new(\"bar\", \"1.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Straw;\")\n .publish();\n\n project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n edition = \"2018\"\n\n [dependencies]\n bar = \"1.0\"\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n r#\"\n pub fn myfun() -> Option {\n None\n }\n \"#,\n )\n .build()\n}\n\n#[cargo_test]\nfn ignores_on_stable() {\n \/\/ Requires -Zrustdoc-map to use.\n let p = basic_project();\n p.cargo(\"doc -v --no-deps\")\n .with_stderr_does_not_contain(\"[..]--extern-html-root-url[..]\")\n .run();\n}\n\n#[cargo_test]\nfn simple() {\n \/\/ Basic test that it works with crates.io.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n let p = basic_project();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https:\/\/docs.rs\/bar\/1.0.0\/[..]\",\n )\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"href=\"https:\/\/docs.rs\/bar\/1.0.0\/bar\/struct.Straw.html\"\"#));\n}\n\n#[cargo_test]\nfn std_docs() {\n \/\/ Mapping std docs somewhere else.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n \/\/ For local developers, skip this test if docs aren't installed.\n let docs = std::path::Path::new(&paths::sysroot()).join(\"share\/doc\/rust\/html\");\n if !docs.exists() {\n if cargo::util::is_ci() {\n panic!(\"std docs are not installed, check that the rust-docs component is installed\");\n } else {\n eprintln!(\n \"documentation not found at {}, \\\n skipping test (run `rustdoc component add rust-docs` to install\",\n docs.display()\n );\n return;\n }\n }\n let p = basic_project();\n p.change_file(\n \".cargo\/config\",\n r#\"\n [doc.extern-map]\n std = \"local\"\n \"#,\n );\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\"[RUNNING] `rustdoc [..]--crate-name foo [..]std=file:\/\/[..]\")\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"share\/doc\/rust\/html\/core\/option\/enum.Option.html\"\"#));\n\n p.change_file(\n \".cargo\/config\",\n r#\"\n [doc.extern-map]\n std = \"https:\/\/example.com\/rust\/\"\n \"#,\n );\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo [..]std=https:\/\/example.com\/rust\/[..]\",\n )\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"href=\"https:\/\/example.com\/rust\/core\/option\/enum.Option.html\"\"#));\n}\n\n#[cargo_test]\nfn renamed_dep() {\n \/\/ Handles renamed dependencies.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n Package::new(\"bar\", \"1.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Straw;\")\n .publish();\n\n let p = project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n edition = \"2018\"\n\n [dependencies]\n groovy = { version = \"1.0\", package = \"bar\" }\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n r#\"\n pub fn myfun() -> Option {\n None\n }\n \"#,\n )\n .build();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https:\/\/docs.rs\/bar\/1.0.0\/[..]\",\n )\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"href=\"https:\/\/docs.rs\/bar\/1.0.0\/bar\/struct.Straw.html\"\"#));\n}\n\n#[cargo_test]\nfn lib_name() {\n \/\/ Handles lib name != package name.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n Package::new(\"bar\", \"1.0.0\")\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"bar\"\n version = \"1.0.0\"\n\n [lib]\n name = \"rumpelstiltskin\"\n \"#,\n )\n .file(\"src\/lib.rs\", \"pub struct Straw;\")\n .publish();\n\n let p = project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n\n [dependencies]\n bar = \"1.0\"\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n r#\"\n pub fn myfun() -> Option {\n None\n }\n \"#,\n )\n .build();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo [..]rumpelstiltskin=https:\/\/docs.rs\/bar\/1.0.0\/[..]\",\n )\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"href=\"https:\/\/docs.rs\/bar\/1.0.0\/rumpelstiltskin\/struct.Straw.html\"\"#));\n}\n\n#[cargo_test]\nfn alt_registry() {\n \/\/ Supports other registry names.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n Package::new(\"bar\", \"1.0.0\")\n .alternative(true)\n .file(\n \"src\/lib.rs\",\n r#\"\n extern crate baz;\n pub struct Queen;\n pub use baz::King;\n \"#,\n )\n .registry_dep(\"baz\", \"1.0\")\n .publish();\n Package::new(\"baz\", \"1.0.0\")\n .alternative(true)\n .file(\"src\/lib.rs\", \"pub struct King;\")\n .publish();\n Package::new(\"grimm\", \"1.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Gold;\")\n .publish();\n\n let p = project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n edition = \"2018\"\n\n [dependencies]\n bar = { version = \"1.0\", registry=\"alternative\" }\n grimm = \"1.0\"\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n r#\"\n pub fn queen() -> bar::Queen { bar::Queen }\n pub fn king() -> bar::King { bar::King }\n pub fn gold() -> grimm::Gold { grimm::Gold }\n \"#,\n )\n .file(\n \".cargo\/config\",\n r#\"\n [doc.extern-map.registries]\n alternative = \"https:\/\/example.com\/{pkg_name}\/{version}\/\"\n crates-io = \"https:\/\/docs.rs\/\"\n \"#,\n )\n .build();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo \\\n [..]bar=https:\/\/example.com\/bar\/1.0.0\/[..]grimm=https:\/\/docs.rs\/grimm\/1.0.0\/[..]\",\n )\n .run();\n let queen = p.read_file(\"target\/doc\/foo\/fn.queen.html\");\n assert!(queen.contains(r#\"href=\"https:\/\/example.com\/bar\/1.0.0\/bar\/struct.Queen.html\"\"#));\n \/\/ The king example fails to link. Rustdoc seems to want the origin crate\n \/\/ name (baz) for re-exports. There are many issues in the issue tracker\n \/\/ for rustdoc re-exports, so I'm not sure, but I think this is maybe a\n \/\/ rustdoc issue. Alternatively, Cargo could provide mappings for all\n \/\/ transitive dependencies to fix this.\n let king = p.read_file(\"target\/doc\/foo\/fn.king.html\");\n assert!(king.contains(r#\"-> King\"#));\n\n let gold = p.read_file(\"target\/doc\/foo\/fn.gold.html\");\n assert!(gold.contains(r#\"href=\"https:\/\/docs.rs\/grimm\/1.0.0\/grimm\/struct.Gold.html\"\"#));\n}\n\n#[cargo_test]\nfn multiple_versions() {\n \/\/ What happens when there are multiple versions.\n \/\/ NOTE: This is currently broken behavior. Rustdoc does not provide a way\n \/\/ to match renamed dependencies.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n Package::new(\"bar\", \"1.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Spin;\")\n .publish();\n Package::new(\"bar\", \"2.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Straw;\")\n .publish();\n let p = project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n edition = \"2018\"\n\n [dependencies]\n bar = \"1.0\"\n bar2 = {version=\"2.0\", package=\"bar\"}\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n \"\n pub fn fn1() -> bar::Spin {bar::Spin}\n pub fn fn2() -> bar2::Straw {bar2::Straw}\n \",\n )\n .build();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo \\\n [..]bar=https:\/\/docs.rs\/bar\/1.0.0\/[..]bar=https:\/\/docs.rs\/bar\/2.0.0\/[..]\",\n )\n .run();\n let fn1 = p.read_file(\"target\/doc\/foo\/fn.fn1.html\");\n \/\/ This should be 1.0.0, rustdoc seems to use the last entry when there\n \/\/ are duplicates.\n assert!(fn1.contains(r#\"href=\"https:\/\/docs.rs\/bar\/2.0.0\/bar\/struct.Spin.html\"\"#));\n let fn2 = p.read_file(\"target\/doc\/foo\/fn.fn2.html\");\n assert!(fn2.contains(r#\"href=\"https:\/\/docs.rs\/bar\/2.0.0\/bar\/struct.Straw.html\"\"#));\n}\n\n#[cargo_test]\nfn rebuilds_when_changing() {\n \/\/ Make sure it rebuilds if the map changes.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n let p = basic_project();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\"[..]--extern-html-root-url[..]\")\n .run();\n\n \/\/ This also tests that the map for docs.rs can be overridden.\n p.change_file(\n \".cargo\/config\",\n r#\"\n [doc.extern-map.registries]\n crates-io = \"https:\/\/example.com\/\"\n \"#,\n );\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--extern-html-root-url 'bar=https:\/\/example.com\/bar\/1.0.0\/[..]' [..]\",\n )\n .run();\n}\nFix inconsistency with quoting\/\/! Tests for the -Zrustdoc-map feature.\n\nuse cargo_test_support::registry::Package;\nuse cargo_test_support::{is_nightly, paths, project, Project};\n\nfn basic_project() -> Project {\n Package::new(\"bar\", \"1.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Straw;\")\n .publish();\n\n project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n edition = \"2018\"\n\n [dependencies]\n bar = \"1.0\"\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n r#\"\n pub fn myfun() -> Option {\n None\n }\n \"#,\n )\n .build()\n}\n\n#[cargo_test]\nfn ignores_on_stable() {\n \/\/ Requires -Zrustdoc-map to use.\n let p = basic_project();\n p.cargo(\"doc -v --no-deps\")\n .with_stderr_does_not_contain(\"[..]--extern-html-root-url[..]\")\n .run();\n}\n\n#[cargo_test]\nfn simple() {\n \/\/ Basic test that it works with crates.io.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n let p = basic_project();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https:\/\/docs.rs\/bar\/1.0.0\/[..]\",\n )\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"href=\"https:\/\/docs.rs\/bar\/1.0.0\/bar\/struct.Straw.html\"\"#));\n}\n\n#[cargo_test]\nfn std_docs() {\n \/\/ Mapping std docs somewhere else.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n \/\/ For local developers, skip this test if docs aren't installed.\n let docs = std::path::Path::new(&paths::sysroot()).join(\"share\/doc\/rust\/html\");\n if !docs.exists() {\n if cargo::util::is_ci() {\n panic!(\"std docs are not installed, check that the rust-docs component is installed\");\n } else {\n eprintln!(\n \"documentation not found at {}, \\\n skipping test (run `rustdoc component add rust-docs` to install\",\n docs.display()\n );\n return;\n }\n }\n let p = basic_project();\n p.change_file(\n \".cargo\/config\",\n r#\"\n [doc.extern-map]\n std = \"local\"\n \"#,\n );\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\"[RUNNING] `rustdoc [..]--crate-name foo [..]std=file:\/\/[..]\")\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"share\/doc\/rust\/html\/core\/option\/enum.Option.html\"\"#));\n\n p.change_file(\n \".cargo\/config\",\n r#\"\n [doc.extern-map]\n std = \"https:\/\/example.com\/rust\/\"\n \"#,\n );\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo [..]std=https:\/\/example.com\/rust\/[..]\",\n )\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"href=\"https:\/\/example.com\/rust\/core\/option\/enum.Option.html\"\"#));\n}\n\n#[cargo_test]\nfn renamed_dep() {\n \/\/ Handles renamed dependencies.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n Package::new(\"bar\", \"1.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Straw;\")\n .publish();\n\n let p = project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n edition = \"2018\"\n\n [dependencies]\n groovy = { version = \"1.0\", package = \"bar\" }\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n r#\"\n pub fn myfun() -> Option {\n None\n }\n \"#,\n )\n .build();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https:\/\/docs.rs\/bar\/1.0.0\/[..]\",\n )\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"href=\"https:\/\/docs.rs\/bar\/1.0.0\/bar\/struct.Straw.html\"\"#));\n}\n\n#[cargo_test]\nfn lib_name() {\n \/\/ Handles lib name != package name.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n Package::new(\"bar\", \"1.0.0\")\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"bar\"\n version = \"1.0.0\"\n\n [lib]\n name = \"rumpelstiltskin\"\n \"#,\n )\n .file(\"src\/lib.rs\", \"pub struct Straw;\")\n .publish();\n\n let p = project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n\n [dependencies]\n bar = \"1.0\"\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n r#\"\n pub fn myfun() -> Option {\n None\n }\n \"#,\n )\n .build();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo [..]rumpelstiltskin=https:\/\/docs.rs\/bar\/1.0.0\/[..]\",\n )\n .run();\n let myfun = p.read_file(\"target\/doc\/foo\/fn.myfun.html\");\n assert!(myfun.contains(r#\"href=\"https:\/\/docs.rs\/bar\/1.0.0\/rumpelstiltskin\/struct.Straw.html\"\"#));\n}\n\n#[cargo_test]\nfn alt_registry() {\n \/\/ Supports other registry names.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n Package::new(\"bar\", \"1.0.0\")\n .alternative(true)\n .file(\n \"src\/lib.rs\",\n r#\"\n extern crate baz;\n pub struct Queen;\n pub use baz::King;\n \"#,\n )\n .registry_dep(\"baz\", \"1.0\")\n .publish();\n Package::new(\"baz\", \"1.0.0\")\n .alternative(true)\n .file(\"src\/lib.rs\", \"pub struct King;\")\n .publish();\n Package::new(\"grimm\", \"1.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Gold;\")\n .publish();\n\n let p = project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n edition = \"2018\"\n\n [dependencies]\n bar = { version = \"1.0\", registry=\"alternative\" }\n grimm = \"1.0\"\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n r#\"\n pub fn queen() -> bar::Queen { bar::Queen }\n pub fn king() -> bar::King { bar::King }\n pub fn gold() -> grimm::Gold { grimm::Gold }\n \"#,\n )\n .file(\n \".cargo\/config\",\n r#\"\n [doc.extern-map.registries]\n alternative = \"https:\/\/example.com\/{pkg_name}\/{version}\/\"\n crates-io = \"https:\/\/docs.rs\/\"\n \"#,\n )\n .build();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo \\\n [..]bar=https:\/\/example.com\/bar\/1.0.0\/[..]grimm=https:\/\/docs.rs\/grimm\/1.0.0\/[..]\",\n )\n .run();\n let queen = p.read_file(\"target\/doc\/foo\/fn.queen.html\");\n assert!(queen.contains(r#\"href=\"https:\/\/example.com\/bar\/1.0.0\/bar\/struct.Queen.html\"\"#));\n \/\/ The king example fails to link. Rustdoc seems to want the origin crate\n \/\/ name (baz) for re-exports. There are many issues in the issue tracker\n \/\/ for rustdoc re-exports, so I'm not sure, but I think this is maybe a\n \/\/ rustdoc issue. Alternatively, Cargo could provide mappings for all\n \/\/ transitive dependencies to fix this.\n let king = p.read_file(\"target\/doc\/foo\/fn.king.html\");\n assert!(king.contains(r#\"-> King\"#));\n\n let gold = p.read_file(\"target\/doc\/foo\/fn.gold.html\");\n assert!(gold.contains(r#\"href=\"https:\/\/docs.rs\/grimm\/1.0.0\/grimm\/struct.Gold.html\"\"#));\n}\n\n#[cargo_test]\nfn multiple_versions() {\n \/\/ What happens when there are multiple versions.\n \/\/ NOTE: This is currently broken behavior. Rustdoc does not provide a way\n \/\/ to match renamed dependencies.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n Package::new(\"bar\", \"1.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Spin;\")\n .publish();\n Package::new(\"bar\", \"2.0.0\")\n .file(\"src\/lib.rs\", \"pub struct Straw;\")\n .publish();\n let p = project()\n .file(\n \"Cargo.toml\",\n r#\"\n [package]\n name = \"foo\"\n version = \"0.1.0\"\n edition = \"2018\"\n\n [dependencies]\n bar = \"1.0\"\n bar2 = {version=\"2.0\", package=\"bar\"}\n \"#,\n )\n .file(\n \"src\/lib.rs\",\n \"\n pub fn fn1() -> bar::Spin {bar::Spin}\n pub fn fn2() -> bar2::Straw {bar2::Straw}\n \",\n )\n .build();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--crate-name foo \\\n [..]bar=https:\/\/docs.rs\/bar\/1.0.0\/[..]bar=https:\/\/docs.rs\/bar\/2.0.0\/[..]\",\n )\n .run();\n let fn1 = p.read_file(\"target\/doc\/foo\/fn.fn1.html\");\n \/\/ This should be 1.0.0, rustdoc seems to use the last entry when there\n \/\/ are duplicates.\n assert!(fn1.contains(r#\"href=\"https:\/\/docs.rs\/bar\/2.0.0\/bar\/struct.Spin.html\"\"#));\n let fn2 = p.read_file(\"target\/doc\/foo\/fn.fn2.html\");\n assert!(fn2.contains(r#\"href=\"https:\/\/docs.rs\/bar\/2.0.0\/bar\/struct.Straw.html\"\"#));\n}\n\n#[cargo_test]\nfn rebuilds_when_changing() {\n \/\/ Make sure it rebuilds if the map changes.\n if !is_nightly() {\n \/\/ --extern-html-root-url is unstable\n return;\n }\n let p = basic_project();\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\"[..]--extern-html-root-url[..]\")\n .run();\n\n \/\/ This also tests that the map for docs.rs can be overridden.\n p.change_file(\n \".cargo\/config\",\n r#\"\n [doc.extern-map.registries]\n crates-io = \"https:\/\/example.com\/\"\n \"#,\n );\n p.cargo(\"doc -v --no-deps -Zrustdoc-map\")\n .masquerade_as_nightly_cargo()\n .with_stderr_contains(\n \"[RUNNING] `rustdoc [..]--extern-html-root-url [..]bar=https:\/\/example.com\/bar\/1.0.0\/[..]\",\n )\n .run();\n}\n<|endoftext|>"} {"text":"libproc_macro - Quick tool to dump input\/output tokens.\nstruct Reader {\n inner: R,\n}\nimpl Reader {\n fn getb(&mut self) -> Option {\n let mut b = [0];\n match self.inner.read(&mut b)\n {\n Ok(1) => Some(b[0]),\n Ok(0) => None,\n Ok(_) => panic!(\"Bad byte count\"),\n Err(e) => panic!(\"Error reading from stream - {}\", e),\n }\n }\n fn get_u128v(&mut self) -> u128 {\n let mut ofs = 0;\n let mut raw_rv = 0u128;\n loop\n {\n let b = self.getb().unwrap();\n raw_rv |= ((b & 0x7F) as u128) << ofs;\n if b < 128 {\n break;\n }\n assert!(ofs < 18*7); \/\/ at most 18 bytes needed for a i128\n ofs += 7;\n }\n raw_rv\n }\n fn get_i128v(&mut self) -> i128 {\n let raw_rv = self.get_u128v();\n \/\/ Zig-zag encoding (0 = 0, 1 = -1, 2 = 1, ...)\n if raw_rv & 1 != 0 {\n -( (raw_rv >> 1) as i128 + 1 )\n }\n else {\n (raw_rv >> 1) as i128\n }\n }\n fn get_byte_vec(&mut self) -> Vec {\n let size = self.get_u128v();\n assert!(size < (1<<30));\n let size = size as usize;\n let mut buf = vec![0u8; size];\n match self.inner.read_exact(&mut buf)\n {\n Ok(_) => {},\n Err(e) => panic!(\"Error reading from stdin get_byte_vec({}) - {}\", size, e),\n }\n\n buf\n }\n fn get_string(&mut self) -> String {\n let raw = self.get_byte_vec();\n String::from_utf8(raw).expect(\"Invalid UTF-8 passed from compiler\")\n }\n fn get_f64(&mut self) -> f64 {\n let mut buf = [0u8; 8];\n match self.inner.read_exact(&mut buf)\n {\n Ok(_) => {},\n Err(e) => panic!(\"Error reading from stdin - {}\", e),\n }\n unsafe {\n ::std::mem::transmute(buf)\n }\n }\n}\n\n\/\/\/ Receive a token stream from the compiler\npub fn dump_token_stream(reader: R)\n{\n let mut s = Reader { inner: reader };\n loop\n {\n let hdr_b = match s.getb()\n {\n Some(b) => b,\n None => break,\n };\n \/\/ TODO: leading span\n match hdr_b\n {\n 0 => {\n let sym = s.get_string();\n if sym == \"\" {\n println!(\"EOF\")\n }\n else {\n println!(\"SYM {}\", sym);\n }\n },\n 1 => println!(\"IDENT {}\", s.get_string()),\n 2 => println!(\"LIFETIME {}\", s.get_string()),\n 3 => println!(\"STRING {:?}\", s.get_string()),\n 4 => println!(\"BYTESTRING {:x?}\", s.get_byte_vec()),\n 5 => println!(\"CHAR {:?}\", ::std::char::from_u32(s.get_i128v() as u32).expect(\"char lit\")),\n 6 => println!(\"UNSIGNED {val:?} ty={ty}\", ty=s.getb().expect(\"getb int ty\"), val=s.get_u128v()),\n 7 => println!(\"SIGNED {val:?} ty={ty}\", ty=s.getb().expect(\"getb int ty\"), val=s.get_i128v()),\n 8 => println!(\"FLOAT {val:?} ty={ty}\", ty=s.getb().expect(\"getb float ty\"), val=s.get_f64()),\n _ => panic!(\"Unknown tag byte: {:#x}\", hdr_b),\n }\n }\n}\n\nfn main()\n{\n {\n assert_eq!( Reader { inner: ::std::io::stdin().lock() }.getb(), Some(0) );\n }\n dump_token_stream(::std::io::stdin().lock());\n}\n<|endoftext|>"} {"text":"Day 6 part 1 and 2\/\/ advent6.rs\n\/\/ repetition codes\n\nuse std::io;\nuse std::io::BufRead;\nuse std::collections::HashMap;\n\nfn main() {\n let stdin = io::stdin();\n\n let inputs: Vec =\n stdin.lock().lines().map(|l| l.expect(\"Failed to read line\")).collect();\n let messages: Vec<&str> = inputs.iter().map(|x| x.as_str()).collect();\n\n let corrected = correct_errors(&messages, false);\n println!(\"Part 1 corrected message: {}\", corrected);\n\n let corrected2 = correct_errors(&messages, true);\n println!(\"Part 2 corrected message: {}\", corrected2);\n}\n\nfn correct_errors(messages: &[&str], reverse_sort: bool) -> String {\n if messages.len() == 0 {\n return \"\".to_string();\n }\n\n let message_len = messages[0].len();\n let mut histograms = Vec::with_capacity(message_len);\n for _ in 0..message_len {\n histograms.push(HashMap::new());\n }\n\n for message in messages {\n for (c, mut histogram) in message.chars().zip(histograms.iter_mut()) {\n *histogram.entry(c).or_insert(0) += 1;\n }\n }\n\n let mut corrected = String::new();\n for histogram in histograms {\n let mut char_counts: Vec<(char, usize)> = histogram.iter().map(|(k, v)| (*k, *v)).collect();\n if reverse_sort {\n \/\/ part 2\n char_counts.sort_by(|a, b| a.1.cmp(&b.1));\n } else {\n \/\/ part 1\n char_counts.sort_by(|a, b| b.1.cmp(&a.1));\n }\n\n corrected.push(char_counts[0].0);\n }\n\n corrected\n}\n\n\/\/ \/\/\/\/\/\/\/\n\/\/ Tests\n\n#[test]\nfn test_correct_errors() {\n let v = vec![\"eedadn\", \"drvtee\", \"eandsr\", \"raavrd\", \"atevrs\", \"tsrnev\", \"sdttsa\", \"rasrtv\",\n \"nssdts\", \"ntnada\", \"svetve\", \"tesnvt\", \"vntsnd\", \"vrdear\", \"dvrsen\", \"enarar\"];\n\n assert_eq!(\"easter\", correct_errors(&v, false));\n assert_eq!(\"advent\", correct_errors(&v, true));\n}\n<|endoftext|>"} {"text":"Attempt at BLKDISCARD ioctl.<|endoftext|>"} {"text":"Heap implementation in rust. works but not optimisedtrait Heap {\n fn build_max_heap(&mut self);\n fn add_element(&mut self, int);\n fn max_heapify(&mut self, index: uint);\n fn heapsort(&mut self) -> ~[int];\n}\n\nimpl Heap for ~[int] {\n fn build_max_heap(&mut self) {\n let mut index = (self.len() \/ 2) + 1;\n while index > 0 {\n index = index - 1;\n self.max_heapify(index);\n }\n }\n fn add_element(&mut self, elem: int) {\n self.push(elem);\n }\n fn max_heapify(&mut self, index: uint) {\n let left = (index * 2) + 1;\n let right = (index * 2) + 2;\n let mut largest = index;\n if (left < self.len() && (self[left] > self[largest])) {\n largest = left;\n }\n if (right < self.len() && (self[right] > self[largest])) {\n largest = right;\n }\n if (largest != index) {\n self.swap(index, largest);\n self.max_heapify(largest);\n }\n }\n fn heapsort(&mut self) -> ~[int] {\n self.build_max_heap();\n let mut result = ~[];\n while self.len() > 0 {\n result.push(self.remove(0));\n self.build_max_heap();\n }\n result\n }\n}\n\nfn main() {\n let mut a = ~[5,2,6,3,7];\n println(a.to_str());\n let b = a.heapsort();\n println(b.to_str());\n}\n<|endoftext|>"} {"text":"starting on guibuilderuse controls::{Button, Control, Label, Root, Sizer, Slider};\nuse failure::err_msg;\nuse failure::Error as FError;\nuse serde_json::Value;\n\npub struct gui<'a> {\n pub title: String,\n pub root_control: Option>,\n sizerstack: Vec<&'a Sizer>,\n}\n\n\nimpl gui<'_> {\n fn add_button(&mut self, name: String, label: Option) -> Result<&gui, FError> {\nlet wat = [1,2,3];\n let newbutton = Box::new(Button {\n control_id: Vec::new(),\n name: String::from(name),\n label: label,\n pressed: false,\n });\n match self.root_control {\n None => {\n self.root_control = Some(newbutton);\n Ok(self)\n }\n\n Some(_) => match self.sizerstack.last_mut() {\n None => Err(err_msg(\"no active sizer, can't add Button element!\")),\n Some(mut s) => {\n \/\/ add new elt to sizer.\n \/\/ *s.control_id = wat[..];\n \/\/ s.controls.push(newbutton);\n Ok(self)\n }\n },\n }\n }\n}\n\n\/*\nfn deserialize_control(id: Vec, data: &Value) -> Result, FError> {\n \/\/ what's the type?\n let obj = data\n .as_object()\n .ok_or(err_msg(\"control is not a valid json object\"))?;\n let objtype = get_string(obj, \"type\")?;\n\n match objtype {\n \"button\" => {\n let name = get_string(obj, \"name\")?;\n let label = match obj.get(\"label\") {\n Some(x) => {\n let s = x.as_string().ok_or(err_msg(\"'label' is not a string!\"))?;\n Some(String::from(s))\n }\n None => None,\n };\n Ok(Box::new(Button {\n control_id: id.clone(),\n name: String::from(name),\n label: label,\n pressed: false,\n }))\n }\n \"slider\" => {\n let name = get_string(obj, \"name\")?;\n let label = match obj.get(\"label\") {\n Some(x) => {\n let s = x.as_string().ok_or(err_msg(\"'label' is not a string!\"))?;\n Some(String::from(s))\n }\n None => None,\n };\n Ok(Box::new(Slider {\n control_id: id.clone(),\n name: String::from(name),\n label: label,\n pressed: false,\n location: 0.5,\n }))\n }\n \"label\" => {\n let name = get_string(obj, \"name\")?;\n let label = get_string(obj, \"label\")?;\n Ok(Box::new(Label {\n control_id: id.clone(),\n name: String::from(name),\n label: label.to_string(),\n }))\n }\n \"sizer\" => {\n let controls = obj\n .get(\"controls\")\n .ok_or(err_msg(\"'controls' not found\"))?\n .as_array()\n .ok_or(err_msg(\"'controls' is not an array\"))?;\n\n let mut controlv = Vec::new();\n\n \/\/ loop through array, makin controls.\n for (i, v) in controls.into_iter().enumerate() {\n let mut id = id.clone();\n id.push(i as i32);\n let c = try!(deserialize_control(id, v));\n controlv.push(c);\n }\n\n Ok(Box::new(Sizer {\n control_id: id.clone(),\n controls: controlv,\n }))\n }\n _ => Err(err_msg(format!(\"objtype '{}' not supported!\", objtype))),\n }\n}*\/\n<|endoftext|>"} {"text":"first rust program#[derive(Copy,Clone)]\nstruct VertexID {\n\tid: usize,\n\tis_nil: bool,\n}\n\nimpl std::fmt::Debug for VertexID {\n\tfn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\t\tif self.is_nil {\n\t\t\twrite!(f, \"nil\")\n\t\t} else {\n\t\t\twrite!(f, \"{}\", self.id)\n\t\t}\n\t}\n}\n\nimpl VertexID {\n\tfn new(id: usize) -> VertexID {\n\t\tVertexID{id: id, is_nil: false}\n\t}\n\n\tfn nil() -> VertexID {\n\t\tVertexID{id: 0, is_nil: true}\n\t}\n}\n\ntype EdgeID = (VertexID, usize);\n\nstruct Graph {\n\tpub vertices: Vec,\n\tpub edges: Vec>,\n}\n\nimpl Graph {\n\tfn new() -> Graph {\n\t\tGraph{\n\t\t\tvertices: Vec::new(),\n\t\t\tedges: Vec::new(),\n\t\t}\n\t}\n\n\tfn add_vertex(&mut self, v: T) -> VertexID {\n\t\tlet vid = self.vertices.len();\n\t\tself.vertices.push(v);\n\t\tself.edges.push(Vec::new());\n\t\tVertexID::new(vid)\n\t}\n\n\tfn add_edge(&mut self, vid1: VertexID, vid2: VertexID, weight: f64) -> EdgeID {\n\t\tlet eid = (vid1, self.edges.len());\n\t\tself.edges[vid1.id].push((vid2, weight));\n\t\teid\n\t}\n}\n\nfn dijkstra(graph: &Graph, source: VertexID) -> Vec<(f64, VertexID)> {\n\tlet mut dist_prev = Vec::with_capacity(graph.vertices.len());\n\tlet mut queue = Vec::with_capacity(graph.vertices.len());\n\tfor i in 0..graph.vertices.len() {\n\t\tlet mut p = (::std::f64::INFINITY, VertexID::nil());\n\t\tif i == source.id {\n\t\t\tp = (0.0, VertexID::nil());\n\t\t}\n\t\tdist_prev.push(p);\n\t\tqueue.push(VertexID::new(i));\n\t}\n\n\twhile !queue.is_empty() {\n\t\tlet mut minidx = 0;\n\t\tlet mut mindist = dist_prev[queue[0].id].0;\n\t\tfor i in 1..queue.len() {\n\t\t\tlet vid = queue[i];\n\t\t\tif dist_prev[vid.id].0 < mindist {\n\t\t\t\tminidx = i;\n\t\t\t\tmindist = dist_prev[vid.id].0;\n\t\t\t}\n\t\t}\n\t\tlet u = queue.swap_remove(minidx);\n\t\tfor e in graph.edges[u.id].iter() {\n\t\t\tlet alt = dist_prev[u.id].0 + e.1;\n\t\t\tif alt < dist_prev[e.0.id].0 {\n\t\t\t\tdist_prev[e.0.id] = (alt, u);\n\t\t\t}\n\t\t}\n\t}\n\tdist_prev\n}\n\nfn main() {\n\tlet mut graph = Graph::new();\n\tlet vid1 = graph.add_vertex(1);\n\tlet vid2 = graph.add_vertex(1010);\n\tgraph.add_edge(vid2, vid1, 1.4);\n\tlet dist_prev = dijkstra(&graph, vid1);\n\tprintln!(\"{:?}\", dist_prev);\n\tlet dist_prev2 = dijkstra(&graph, vid2);\n\tprintln!(\"{:?}\", dist_prev2);\n}<|endoftext|>"} {"text":"format change<|endoftext|>"} {"text":"Actually allow boxed `Changeset`<|endoftext|>"} {"text":"08 - nesting and labelsfn main() {\n 'outer: loop {\n println!(\"Entered the outer loop\");\n\n 'inner: loop {\n println!(\"Entered the inner loop\");\n\n \/\/ This would break only the inner loop\n \/\/break;\n\n \/\/ This breaks the outer loop\n break 'outer;\n }\n\n println!(\"This point will never be reached\");\n }\n\n println!(\"Exited the outer loop\");\n}\n<|endoftext|>"} {"text":"Fix repeated warning with two calls to to_package_id_specs<|endoftext|>"} {"text":"Ensure key ID version contains only valid characters.<|endoftext|>"} {"text":"tests that #39963 is fixed on MIR borrowck\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test case from #39963.\n\n#![feature(nll)]\n\n#[derive(Clone)]\nstruct Foo(Option>, Option>);\n\nfn test(f: &mut Foo) {\n match *f {\n Foo(Some(ref mut left), Some(ref mut right)) => match **left {\n Foo(Some(ref mut left), Some(ref mut right)) => panic!(),\n _ => panic!(),\n },\n _ => panic!(),\n }\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"#![allow(missing_docs)]\nuse device as d;\nuse device::{Device, Resources, Capabilities, SubmitInfo};\nuse device::draw::{CommandBuffer, Access, Gamma, Target};\nuse device::shade;\nuse super::{tex};\nuse draw_state::target::{Rect, Mirror, Mask, ClearData, Layer, Level};\n\npub struct DummyDevice {\n capabilities: Capabilities\n}\npub struct DummyCommandBuffer {\n buf: Vec\n}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub enum DummyResources{}\n\npub type Buffer = u32;\npub type ArrayBuffer = u32;\npub type Shader = u32;\npub type Program = u32;\npub type FrameBuffer = u32;\npub type Surface = u32;\npub type Sampler = u32;\npub type Texture = u32;\n\nimpl Resources for DummyResources {\n type Buffer = Buffer;\n type ArrayBuffer = ArrayBuffer;\n type Shader = Shader;\n type Program = Program;\n type FrameBuffer = FrameBuffer;\n type Surface = Surface;\n type Texture = Texture;\n type Sampler = Sampler;\n}\n\nimpl CommandBuffer for DummyCommandBuffer {\n fn new() -> DummyCommandBuffer {\n DummyCommandBuffer {\n buf: Vec::new(),\n }\n }\n\n fn clear(&mut self) {\n self.buf.clear();\n }\n\n fn bind_program(&mut self, prog: Program) {\n self.buf.push(\"bind_program\".to_string());\n }\n\n fn bind_array_buffer(&mut self, vao: ArrayBuffer) {\n self.buf.push(\"bind_array_buffer\".to_string());\n }\n\n fn bind_attribute(&mut self, slot: d::AttributeSlot, buf: Buffer,\n format: d::attrib::Format) {\n self.buf.push(\"bind_attribute\".to_string());\n }\n\n fn bind_index(&mut self, buf: Buffer) {\n self.buf.push(\"bind_index\".to_string());\n }\n\n fn bind_frame_buffer(&mut self, access: Access, fbo: FrameBuffer,\n gamma: Gamma) {\n self.buf.push(\"bind_frame_buffer\".to_string());\n }\n\n fn unbind_target(&mut self, access: Access, tar: Target) {\n self.buf.push(\"unbind_target\".to_string());\n }\n\n fn bind_target_surface(&mut self, access: Access, tar: Target,\n suf: Surface) {\n self.buf.push(\"bind_target_surface\".to_string());\n }\n\n fn bind_target_texture(&mut self, access: Access, tar: Target,\n tex: Texture, level: Level, layer: Option) {\n self.buf.push(\"bind_target_texture\".to_string());\n }\n\n fn bind_uniform_block(&mut self, prog: Program, slot: d::UniformBufferSlot,\n index: d::UniformBlockIndex, buf: Buffer) {\n self.buf.push(\"bind_uniform_block\".to_string());\n }\n\n fn bind_uniform(&mut self, loc: d::shade::Location,\n value: d::shade::UniformValue) {\n self.buf.push(\"bind_uniform\".to_string());\n }\n fn bind_texture(&mut self, slot: d::TextureSlot, kind: d::tex::Kind,\n tex: Texture,\n sampler: Option<(Sampler, d::tex::SamplerInfo)>) {\n self.buf.push(\"set_draw_color_buffers\".to_string());\n }\n\n fn set_draw_color_buffers(&mut self, num: usize) {\n self.buf.push(\"set_draw_color_buffers\".to_string());\n }\n\n fn set_primitive(&mut self, prim: d::state::Primitive) {\n self.buf.push(\"set_primitive\".to_string());\n }\n\n fn set_viewport(&mut self, view: Rect) {\n self.buf.push(\"set_viewport\".to_string());\n }\n\n fn set_multi_sample(&mut self, ms: Option) {\n self.buf.push(\"set_multi_sample\".to_string());\n }\n\n fn set_scissor(&mut self, rect: Option) {\n self.buf.push(\"set_scissor\".to_string());\n }\n\n fn set_depth_stencil(&mut self, depth: Option,\n stencil: Option,\n cull: d::state::CullFace) {\n self.buf.push(\"set_depth_stencil\".to_string());\n }\n\n fn set_blend(&mut self, blend: Option) {\n self.buf.push(\"set_blend\".to_string());\n }\n\n fn set_color_mask(&mut self, mask: d::state::ColorMask) {\n self.buf.push(\"set_color_mask\".to_string());\n }\n\n fn update_buffer(&mut self, buf: Buffer, data: d::draw::DataPointer,\n offset_bytes: usize) {\n self.buf.push(\"update_buffer\".to_string());\n }\n\n fn update_texture(&mut self, kind: d::tex::Kind, tex: Texture,\n info: d::tex::ImageInfo, data: d::draw::DataPointer) {\n self.buf.push(\"update_texture\".to_string());\n }\n\n fn call_clear(&mut self, data: ClearData, mask: Mask) {\n self.buf.push(\"call_clear\".to_string());\n }\n\n fn call_draw(&mut self, ptype: d::PrimitiveType, start: d::VertexCount,\n count: d::VertexCount, instances: d::draw::InstanceOption) {\n self.buf.push(\"call_draw\".to_string());\n }\n\n fn call_draw_indexed(&mut self, ptype: d::PrimitiveType,\n itype: d::IndexType, start: d::VertexCount,\n count: d::VertexCount, base: d::VertexCount,\n instances: d::draw::InstanceOption) {\n self.buf.push(\"call_draw_indexed\".to_string());\n }\n\n fn call_blit(&mut self, s_rect: Rect, d_rect: Rect, mirror: Mirror,\n mask: Mask) {\n self.buf.push(\"call_blit\".to_string());\n }\n}\n\nimpl DummyDevice {\n fn new() -> DummyDevice {\n DummyDevice {\n capabilities: Capabilities {\n shader_model: shade::ShaderModel::Unsupported,\n\n max_vertex_count: 0,\n max_index_count: 0,\n max_draw_buffers: 0,\n max_texture_size: 0,\n max_vertex_attributes: 0,\n\n buffer_role_change_allowed: false,\n\n array_buffer_supported: false,\n fragment_output_supported: false,\n immutable_storage_supported: false,\n instance_base_supported: false,\n instance_call_supported: false,\n instance_rate_supported: false,\n render_targets_supported: false,\n sampler_objects_supported: false,\n srgb_color_supported: false,\n uniform_block_supported: false,\n vertex_base_supported: false,\n }\n }\n }\n}\n\nimpl Device for DummyDevice {\n type Resources = DummyResources;\n type CommandBuffer = DummyCommandBuffer;\n\n fn get_capabilities<'a>(&'a self) -> &'a Capabilities {\n &self.capabilities\n }\n fn reset_state(&mut self) {}\n fn submit(&mut self, (cb, db, handles): SubmitInfo) {}\n fn cleanup(&mut self) {}\n}\nAdded DummyDevice::new(Capabilities)#![allow(missing_docs)]\nuse device as d;\nuse device::{Device, Resources, Capabilities, SubmitInfo};\nuse device::draw::{CommandBuffer, Access, Gamma, Target};\nuse device::shade;\nuse super::{tex};\nuse draw_state::target::{Rect, Mirror, Mask, ClearData, Layer, Level};\n\npub struct DummyDevice {\n capabilities: Capabilities\n}\npub struct DummyCommandBuffer {\n buf: Vec\n}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub enum DummyResources{}\n\npub type Buffer = u32;\npub type ArrayBuffer = u32;\npub type Shader = u32;\npub type Program = u32;\npub type FrameBuffer = u32;\npub type Surface = u32;\npub type Sampler = u32;\npub type Texture = u32;\n\nimpl Resources for DummyResources {\n type Buffer = Buffer;\n type ArrayBuffer = ArrayBuffer;\n type Shader = Shader;\n type Program = Program;\n type FrameBuffer = FrameBuffer;\n type Surface = Surface;\n type Texture = Texture;\n type Sampler = Sampler;\n}\n\nimpl CommandBuffer for DummyCommandBuffer {\n fn new() -> DummyCommandBuffer {\n DummyCommandBuffer {\n buf: Vec::new(),\n }\n }\n\n fn clear(&mut self) {\n self.buf.clear();\n }\n\n fn bind_program(&mut self, prog: Program) {\n self.buf.push(\"bind_program\".to_string());\n }\n\n fn bind_array_buffer(&mut self, vao: ArrayBuffer) {\n self.buf.push(\"bind_array_buffer\".to_string());\n }\n\n fn bind_attribute(&mut self, slot: d::AttributeSlot, buf: Buffer,\n format: d::attrib::Format) {\n self.buf.push(\"bind_attribute\".to_string());\n }\n\n fn bind_index(&mut self, buf: Buffer) {\n self.buf.push(\"bind_index\".to_string());\n }\n\n fn bind_frame_buffer(&mut self, access: Access, fbo: FrameBuffer,\n gamma: Gamma) {\n self.buf.push(\"bind_frame_buffer\".to_string());\n }\n\n fn unbind_target(&mut self, access: Access, tar: Target) {\n self.buf.push(\"unbind_target\".to_string());\n }\n\n fn bind_target_surface(&mut self, access: Access, tar: Target,\n suf: Surface) {\n self.buf.push(\"bind_target_surface\".to_string());\n }\n\n fn bind_target_texture(&mut self, access: Access, tar: Target,\n tex: Texture, level: Level, layer: Option) {\n self.buf.push(\"bind_target_texture\".to_string());\n }\n\n fn bind_uniform_block(&mut self, prog: Program, slot: d::UniformBufferSlot,\n index: d::UniformBlockIndex, buf: Buffer) {\n self.buf.push(\"bind_uniform_block\".to_string());\n }\n\n fn bind_uniform(&mut self, loc: d::shade::Location,\n value: d::shade::UniformValue) {\n self.buf.push(\"bind_uniform\".to_string());\n }\n fn bind_texture(&mut self, slot: d::TextureSlot, kind: d::tex::Kind,\n tex: Texture,\n sampler: Option<(Sampler, d::tex::SamplerInfo)>) {\n self.buf.push(\"set_draw_color_buffers\".to_string());\n }\n\n fn set_draw_color_buffers(&mut self, num: usize) {\n self.buf.push(\"set_draw_color_buffers\".to_string());\n }\n\n fn set_primitive(&mut self, prim: d::state::Primitive) {\n self.buf.push(\"set_primitive\".to_string());\n }\n\n fn set_viewport(&mut self, view: Rect) {\n self.buf.push(\"set_viewport\".to_string());\n }\n\n fn set_multi_sample(&mut self, ms: Option) {\n self.buf.push(\"set_multi_sample\".to_string());\n }\n\n fn set_scissor(&mut self, rect: Option) {\n self.buf.push(\"set_scissor\".to_string());\n }\n\n fn set_depth_stencil(&mut self, depth: Option,\n stencil: Option,\n cull: d::state::CullFace) {\n self.buf.push(\"set_depth_stencil\".to_string());\n }\n\n fn set_blend(&mut self, blend: Option) {\n self.buf.push(\"set_blend\".to_string());\n }\n\n fn set_color_mask(&mut self, mask: d::state::ColorMask) {\n self.buf.push(\"set_color_mask\".to_string());\n }\n\n fn update_buffer(&mut self, buf: Buffer, data: d::draw::DataPointer,\n offset_bytes: usize) {\n self.buf.push(\"update_buffer\".to_string());\n }\n\n fn update_texture(&mut self, kind: d::tex::Kind, tex: Texture,\n info: d::tex::ImageInfo, data: d::draw::DataPointer) {\n self.buf.push(\"update_texture\".to_string());\n }\n\n fn call_clear(&mut self, data: ClearData, mask: Mask) {\n self.buf.push(\"call_clear\".to_string());\n }\n\n fn call_draw(&mut self, ptype: d::PrimitiveType, start: d::VertexCount,\n count: d::VertexCount, instances: d::draw::InstanceOption) {\n self.buf.push(\"call_draw\".to_string());\n }\n\n fn call_draw_indexed(&mut self, ptype: d::PrimitiveType,\n itype: d::IndexType, start: d::VertexCount,\n count: d::VertexCount, base: d::VertexCount,\n instances: d::draw::InstanceOption) {\n self.buf.push(\"call_draw_indexed\".to_string());\n }\n\n fn call_blit(&mut self, s_rect: Rect, d_rect: Rect, mirror: Mirror,\n mask: Mask) {\n self.buf.push(\"call_blit\".to_string());\n }\n}\n\nimpl DummyDevice {\n fn new(capabilities: Capabilities) -> DummyDevice {\n DummyDevice {\n capabilities: capabilities\n }\n }\n}\n\nimpl Device for DummyDevice {\n type Resources = DummyResources;\n type CommandBuffer = DummyCommandBuffer;\n\n fn get_capabilities<'a>(&'a self) -> &'a Capabilities {\n &self.capabilities\n }\n fn reset_state(&mut self) {}\n fn submit(&mut self, (cb, db, handles): SubmitInfo) {}\n fn cleanup(&mut self) {}\n}\n<|endoftext|>"} {"text":"Added an example `test_event_app`#![feature(globs)]\n\nextern crate piston;\nextern crate event;\n\nuse piston::*;\nuse event::{\n AddKeyboard,\n AddLasting,\n AddPress,\n AddPressing,\n\n Event,\n\n Map,\n\n BackEnd,\n Observer,\n};\n\npub struct App<'a> {\n e: Event<'a>,\n back_end: TestBackEnd<'a>,\n}\n\nimpl<'a> App<'a> {\n pub fn new() -> App {\n App {\n e: Event::new(),\n back_end: TestBackEnd::new(),\n }\n }\n}\n\nimpl<'a> Game for App<'a> {\n fn load(&mut self, _asset_store: &mut AssetStore) {\n self.e.keyboard().press(keyboard::Up).map(&mut self.back_end, || {\n println!(\"Oops! You pressed keyboard::Up\");\n });\n\n let e = self.e.keyboard().pressing(keyboard::Up);\n e.map(&mut self.back_end, || {\n println!(\"Wow! You are pressing keyboard::Up\");\n });\n\n e.lasting(1.0).map(&mut self.back_end, || {\n println!(\"Wooooooow! You are pressing keybaord::Up at least 1.0 second!!\");\n });\n }\n\n fn update(&mut self, dt: f64, _asset_store: &mut AssetStore) {\n self.back_end.update(dt);\n }\n\n\n \/\/ re-wrap those events to Event, a good way to do this is in the GameWindow\n \/\/ implementions.\n fn key_press(\n &mut self,\n key: keyboard::Key,\n _asset_store: &mut AssetStore\n ) {\n self.back_end.on_event(event::KeyPressed(key));\n }\n\n fn key_release(\n &mut self,\n key: keyboard::Key,\n _asset_store: &mut AssetStore\n ) {\n self.back_end.on_event(event::KeyReleased(key));\n }\n\n fn mouse_press(\n &mut self,\n button: mouse::Button,\n _asset_store: &mut AssetStore\n ) {\n self.back_end.on_event(event::MouseButtonPressed(button));\n }\n\n fn mouse_release(\n &mut self,\n button: mouse::Button,\n _asset_store: &mut AssetStore\n ) {\n self.back_end.on_event(event::MouseButtonReleased(button));\n }\n}\n\nstruct TestBackEnd<'a> {\n observers: Vec>,\n}\n\nimpl<'a> TestBackEnd<'a> {\n pub fn new() -> TestBackEnd {\n TestBackEnd {\n observers: Vec::>::new(),\n }\n }\n}\n\nimpl<'a> BackEnd for TestBackEnd<'a> {\n fn add_observer(&mut self, ob: Box) -> uint {\n self.observers.push(ob);\n self.observers.len() - 1\n }\n\n fn update(&mut self, dt: f64) {\n for i in range(0, self.observers.len()) {\n let ob = self.observers.get_mut(i);\n\n ob.update(dt);\n\n if ob.can_trigger() {\n ob.trigger();\n }\n }\n }\n\n fn on_event(&mut self, e: event::Event) {\n for ob in self.observers.mut_iter() {\n ob.on_event(e);\n }\n }\n}\n\ntype GameWindowBackEnd = GameWindowSDL2;\n\nfn main() {\n let mut game_window: GameWindowBackEnd = GameWindow::new(\n GameWindowSettings::new (\n \"Piston-Lab\".to_owned(),\n [300, 300],\n false,\n true,\n [1.0, 1.0, 1.0, 1.0]\n )\n );\n\n let mut asset_store = AssetStore::from_folder(\"assets\");\n let mut app = App::new();\n\n app.run(&mut game_window, &mut asset_store);\n}\n\n\n<|endoftext|>"} {"text":"Add solution to problem 46#[macro_use] extern crate libeuler;\nextern crate num;\n\nuse libeuler::SieveOfAtkin;\nuse num::integer::Integer;\n\n\/\/\/ It was proposed by Christian Goldbach that every odd composite number can be written as the sum\n\/\/\/ of a prime and twice a square.\n\/\/\/\n\/\/\/ 9 = 7 + 2×1^2\n\/\/\/ 15 = 7 + 2×2^2\n\/\/\/ 21 = 3 + 2×3^2\n\/\/\/ 25 = 7 + 2×3^2\n\/\/\/ 27 = 19 + 2×2^2\n\/\/\/ 33 = 31 + 2×1^2\n\/\/\/\n\/\/\/ It turns out that the conjecture was false.\n\/\/\/\n\/\/\/ What is the smallest odd composite that cannot be written as the sum of a prime and twice a\n\/\/\/ square?\nfn main() {\n solutions! {\n sol naive {\n let sieve = SieveOfAtkin::new(10_000);\n let odd_composites = (2..10_000)\n .filter(|a| a.is_odd())\n .filter(|&a| !sieve.is_prime(a));\n\n for c in odd_composites {\n let twice_squares: Vec = (0..1000)\n .map(|v| 2*v*v)\n .take_while(|&v| v < c)\n .collect();\n\n let no_primes = twice_squares.iter().all(|sq| {\n !sieve.is_prime(c - sq)\n });\n\n if no_primes {\n return Some(c);\n }\n }\n\n return None;\n }\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-emscripten\n\/\/ min-llvm-version 7.0\n\/\/ error-pattern: panicked\n\n\/\/ Test that the simd_f{min,max} intrinsics produce the correct results.\n\n#![feature(repr_simd, platform_intrinsics)]\n#![allow(non_camel_case_types)]\n\n#[repr(simd)]\n#[derive(Copy, Clone, PartialEq, Debug)]\nstruct f32x4(pub f32, pub f32, pub f32, pub f32);\n\nextern \"platform-intrinsic\" {\n fn simd_fmin(x: T, y: T) -> T;\n fn simd_fmax(x: T, y: T) -> T;\n}\n\nfn main() {\n let x = f32x4(1.0, 2.0, 3.0, 4.0);\n let y = f32x4(2.0, 1.0, 4.0, 3.0);\n let nan = ::std::f32::NAN;\n let n = f32x4(nan, nan, nan, nan);\n\n unsafe {\n let min0 = simd_fmin(x, y);\n let min1 = simd_fmin(y, x);\n assert_eq!(min0, min1);\n let e = f32x4(1.0, 1.0, 3.0, 3.0);\n assert_eq!(min0, e);\n let minn = simd_fmin(x, n);\n assert_eq!(minn, x);\n let minn = simd_fmin(y, n);\n assert_eq!(minn, y);\n\n let max0 = simd_fmax(x, y);\n let max1 = simd_fmax(y, x);\n assert_eq!(max0, max1);\n let e = f32x4(2.0, 2.0, 4.0, 4.0);\n assert_eq!(max0, e);\n let maxn = simd_fmax(x, n);\n assert_eq!(maxn, x);\n let maxn = simd_fmax(y, n);\n assert_eq!(maxn, y);\n }\n}\nrun-pass\/simd-intrinsic-float-minmax: Force use of qNaN on Mips\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-emscripten\n\/\/ min-llvm-version 7.0\n\/\/ error-pattern: panicked\n\n\/\/ Test that the simd_f{min,max} intrinsics produce the correct results.\n\n#![feature(repr_simd, platform_intrinsics)]\n#![allow(non_camel_case_types)]\n\n#[repr(simd)]\n#[derive(Copy, Clone, PartialEq, Debug)]\nstruct f32x4(pub f32, pub f32, pub f32, pub f32);\n\nextern \"platform-intrinsic\" {\n fn simd_fmin(x: T, y: T) -> T;\n fn simd_fmax(x: T, y: T) -> T;\n}\n\nfn main() {\n let x = f32x4(1.0, 2.0, 3.0, 4.0);\n let y = f32x4(2.0, 1.0, 4.0, 3.0);\n\n #[cfg(not(any(target_arch = \"mips\", target_arch = \"mips64\")))]\n let nan = ::std::f32::NAN;\n \/\/ MIPS hardware treats f32::NAN as SNAN. Clear the signaling bit.\n \/\/ See https:\/\/github.com\/rust-lang\/rust\/issues\/52746.\n #[cfg(any(target_arch = \"mips\", target_arch = \"mips64\"))]\n let nan = f32::from_bits(::std::f32::NAN.to_bits() - 1);\n\n let n = f32x4(nan, nan, nan, nan);\n\n unsafe {\n let min0 = simd_fmin(x, y);\n let min1 = simd_fmin(y, x);\n assert_eq!(min0, min1);\n let e = f32x4(1.0, 1.0, 3.0, 3.0);\n assert_eq!(min0, e);\n let minn = simd_fmin(x, n);\n assert_eq!(minn, x);\n let minn = simd_fmin(y, n);\n assert_eq!(minn, y);\n\n let max0 = simd_fmax(x, y);\n let max1 = simd_fmax(y, x);\n assert_eq!(max0, max1);\n let e = f32x4(2.0, 2.0, 4.0, 4.0);\n assert_eq!(max0, e);\n let maxn = simd_fmax(x, n);\n assert_eq!(maxn, x);\n let maxn = simd_fmax(y, n);\n assert_eq!(maxn, y);\n }\n}\n<|endoftext|>"} {"text":"Move actual events and repsoses out of the game timer block results in much smoother input.<|endoftext|>"} {"text":"Created encode and decode functions for going from Brainfuck programs to binary strings and back<|endoftext|>"} {"text":"Move argument parsing into function<|endoftext|>"} {"text":"Refactor to determine potential issues with Arc and Mutex with FFI.<|endoftext|>"} {"text":"extern crate piston;\nextern crate graphics;\nextern crate piston_window;\nextern crate time;\nextern crate rand;\nextern crate ai_behavior;\nextern crate cgmath;\nextern crate opengl_graphics;\n\nmod app;\nmod entity;\nmod player;\nmod config;\nmod person;\n\nuse player::Player;\nuse entity::Entity;\n\nuse piston_window::{ PistonWindow, WindowSettings };\nuse piston::input::*;\nuse piston::event_loop::*;\nuse opengl_graphics::*;\nuse graphics::{ Image, clear, default_draw_state };\nuse graphics::rectangle::square;\nuse std::path::Path;\nuse rand::{Rng, SeedableRng, XorShiftRng};\n\nuse cgmath::rad;\nuse cgmath::{ Vector2, Vector4 };\nuse cgmath::{ Rotation2, Basis2 };\n\nfn transform_camera_coords(player : &Player, x : u32, y: u32, width : u32, height : u32) -> (f64, f64) {\n return (\n x as f64 - player.get_position().x as f64 + (width as f64 \/ 2f64) as f64 ,\n y as f64 - player.get_position().y as f64 + (height as f64 \/ 2f64) as f64\n );\n}\n\nfn draw_background(x: u32, y: u32, context: graphics::context::Context, gl_graphics: &mut GlGraphics, textures: &Vec, seed: [u32;4], player : &mut Player) {\n let mut rng1: XorShiftRng = SeedableRng::from_seed(seed);\n let txt: &Texture = textures.get(0).unwrap();\n let (width, height) = txt.get_size();\n for i in 0..(x\/width) + 1 {\n for j in 0..(y\/height) + 1 {\n let (k, l) = transform_camera_coords(player, i, j, x, y);\n let rand = rng1.gen::() % textures.len() as u32;\n let txt: &Texture = textures.get(rand as usize).unwrap();\n let (width, height) = txt.get_size();\n let image = Image::new().rect(square((i as f64 * width as f64) as f64 + k, (j as f64 * height as f64) as f64 + l, width as f64));\n println!(\"({}, {}) = ({}, {})\", (k * width as f64) as f64, (l * height as f64) as f64, k, l);\n image.draw(txt, default_draw_state(), context.transform, gl_graphics);\n }\n }\n}\n\nfn main() {\n let mut rng = rand::thread_rng();\n let seed: [u32;4] = [rng.gen::(), rng.gen::(), rng.gen::(), rng.gen::()];\n let opengl = OpenGL::V3_2;\n let mut window: PistonWindow = WindowSettings::new(\"GGJ2016\", [800, 600])\n .exit_on_esc(true)\n .opengl(opengl)\n .build()\n .unwrap_or_else(|e| { panic!(\"Failed to build PistonWindow: {}\", e) });\n window.set_ups(60);\n let mut gl = GlGraphics::new(opengl);\n\n let emoji_player = Texture::from_path(Path::new(\"assets\/img\/emoji\/78.png\")).unwrap();\n let mut app = app::App::new(emoji_player);\n\n let emoji = Texture::from_path(Path::new(\"assets\/img\/emoji\/77.png\")).unwrap();\n app.add_entity(Box::new(person::Person::new(emoji, Vector2::new(50.0, 50.0))));\n\n \/\/ Add player to entities (player instanciated in app)\n \/\/app.add_entity(Box::new(player::Player::new()));\n\n let mut textures :Vec= Vec::new();\n textures.push(Texture::from_path(Path::new(\"assets\/img\/ground\/placeholder_01.jpg\")).unwrap());\n textures.push(Texture::from_path(Path::new(\"assets\/img\/ground\/placeholder_02.jpg\")).unwrap());\n\n for e in window {\n if let Some(args) = e.press_args() {\n app.key_press(args);\n println!(\"asda\");\n }\n\n if let Some(args) = e.update_args() {\n app.update(args);\n }\n\n if let Some(args) = e.render_args() {\n gl.draw(args.viewport(), |c, gl| {\n clear([0.5, 0.2, 0.9, 1.0], gl);\n draw_background(args.width, args.height, c, gl, &textures, seed, app.get_player());\n\n app.render(c, gl, args);\n });\n }\n }\n}\nRemoved verbosity.extern crate piston;\nextern crate graphics;\nextern crate piston_window;\nextern crate time;\nextern crate rand;\nextern crate ai_behavior;\nextern crate cgmath;\nextern crate opengl_graphics;\n\nmod app;\nmod entity;\nmod player;\nmod config;\nmod person;\n\nuse player::Player;\nuse entity::Entity;\n\nuse piston_window::{ PistonWindow, WindowSettings };\nuse piston::input::*;\nuse piston::event_loop::*;\nuse opengl_graphics::*;\nuse graphics::{ Image, clear, default_draw_state };\nuse graphics::rectangle::square;\nuse std::path::Path;\nuse rand::{Rng, SeedableRng, XorShiftRng};\n\nuse cgmath::rad;\nuse cgmath::{ Vector2, Vector4 };\nuse cgmath::{ Rotation2, Basis2 };\n\nfn transform_camera_coords(player : &Player, x : u32, y: u32, width : u32, height : u32) -> (f64, f64) {\n return (\n x as f64 - player.get_position().x as f64 + (width as f64 \/ 2f64) as f64 ,\n y as f64 - player.get_position().y as f64 + (height as f64 \/ 2f64) as f64\n );\n}\n\nfn draw_background(x: u32, y: u32, context: graphics::context::Context, gl_graphics: &mut GlGraphics, textures: &Vec, seed: [u32;4], player : &mut Player) {\n let mut rng1: XorShiftRng = SeedableRng::from_seed(seed);\n let txt: &Texture = textures.get(0).unwrap();\n let (width, height) = txt.get_size();\n for i in 0..(x\/width) + 1 {\n for j in 0..(y\/height) + 1 {\n let (k, l) = transform_camera_coords(player, i, j, x, y);\n let rand = rng1.gen::() % textures.len() as u32;\n let txt: &Texture = textures.get(rand as usize).unwrap();\n let (width, height) = txt.get_size();\n let image = Image::new().rect(square((i as f64 * width as f64) as f64 + k, (j as f64 * height as f64) as f64 + l, width as f64));\n image.draw(txt, default_draw_state(), context.transform, gl_graphics);\n }\n }\n}\n\nfn main() {\n let mut rng = rand::thread_rng();\n let seed: [u32;4] = [rng.gen::(), rng.gen::(), rng.gen::(), rng.gen::()];\n let opengl = OpenGL::V3_2;\n let mut window: PistonWindow = WindowSettings::new(\"GGJ2016\", [800, 600])\n .exit_on_esc(true)\n .opengl(opengl)\n .build()\n .unwrap_or_else(|e| { panic!(\"Failed to build PistonWindow: {}\", e) });\n window.set_ups(60);\n let mut gl = GlGraphics::new(opengl);\n\n let emoji_player = Texture::from_path(Path::new(\"assets\/img\/emoji\/78.png\")).unwrap();\n let mut app = app::App::new(emoji_player);\n\n let emoji = Texture::from_path(Path::new(\"assets\/img\/emoji\/77.png\")).unwrap();\n app.add_entity(Box::new(person::Person::new(emoji, Vector2::new(50.0, 50.0))));\n\n \/\/ Add player to entities (player instanciated in app)\n \/\/app.add_entity(Box::new(player::Player::new()));\n\n let mut textures :Vec= Vec::new();\n textures.push(Texture::from_path(Path::new(\"assets\/img\/ground\/placeholder_01.jpg\")).unwrap());\n textures.push(Texture::from_path(Path::new(\"assets\/img\/ground\/placeholder_02.jpg\")).unwrap());\n\n for e in window {\n if let Some(args) = e.press_args() {\n app.key_press(args);\n println!(\"asda\");\n }\n\n if let Some(args) = e.update_args() {\n app.update(args);\n }\n\n if let Some(args) = e.render_args() {\n gl.draw(args.viewport(), |c, gl| {\n clear([0.5, 0.2, 0.9, 1.0], gl);\n draw_background(args.width, args.height, c, gl, &textures, seed, app.get_player());\n\n app.render(c, gl, args);\n });\n }\n }\n}\n<|endoftext|>"} {"text":"adding make command<|endoftext|>"} {"text":"#![feature(deque_extras)]\n#![feature(box_syntax)]\n#![feature(plugin)]\n#![plugin(peg_syntax_ext)]\n\nextern crate glob;\n\nuse std::collections::HashMap;\nuse std::fs::File;\nuse std::io::{stdout, Read, Write};\nuse std::env;\nuse std::process;\n\nuse self::directory_stack::DirectoryStack;\nuse self::input_editor::readln;\nuse self::peg::{parse, Job};\nuse self::variables::Variables;\nuse self::history::History;\nuse self::flow_control::{FlowControl, is_flow_control_command, Statement};\nuse self::status::{SUCCESS, NO_SUCH_COMMAND, TERMINATED};\n\npub mod directory_stack;\npub mod to_num;\npub mod input_editor;\npub mod peg;\npub mod variables;\npub mod history;\npub mod flow_control;\npub mod status;\n\n\n\/\/\/ This struct will contain all of the data structures related to this\n\/\/\/ instance of the shell.\npub struct Shell {\n variables: Variables,\n flow_control: FlowControl,\n directory_stack: DirectoryStack,\n history: History,\n}\n\nimpl Shell {\n \/\/\/ Panics if DirectoryStack construction fails\n pub fn new() -> Self {\n Shell {\n variables: Variables::new(),\n flow_control: FlowControl::new(),\n directory_stack: DirectoryStack::new().expect(\"\"),\n history: History::new(),\n }\n }\n\n pub fn print_prompt(&self) {\n self.print_prompt_prefix();\n match self.flow_control.current_statement {\n Statement::For(_, _) => self.print_for_prompt(),\n Statement::Default => self.print_default_prompt(),\n }\n if let Err(message) = stdout().flush() {\n println!(\"{}: failed to flush prompt to stdout\", message);\n }\n\n }\n\n \/\/ TODO eventually this thing should be gone\n fn print_prompt_prefix(&self) {\n let prompt_prefix = self.flow_control.modes.iter().rev().fold(String::new(), |acc, mode| {\n acc +\n if mode.value {\n \"+ \"\n } else {\n \"- \"\n }\n });\n print!(\"{}\", prompt_prefix);\n }\n\n fn print_for_prompt(&self) {\n print!(\"for> \");\n }\n\n fn print_default_prompt(&self) {\n let cwd = env::current_dir().ok().map_or(\"?\".to_string(),\n |ref p| p.to_str().unwrap_or(\"?\").to_string());\n print!(\"ion:{}# \", cwd);\n }\n\n fn on_command(&mut self, command_string: &str, commands: &HashMap<&str, Command>) {\n self.history.add(command_string.to_string());\n\n let mut jobs = parse(command_string);\n\n \/\/ Execute commands\n for job in jobs.drain(..) {\n if self.flow_control.collecting_block {\n \/\/ TODO move this logic into \"end\" command\n if job.command == \"end\" {\n self.flow_control.collecting_block = false;\n let block_jobs: Vec = self.flow_control\n .current_block\n .jobs\n .drain(..)\n .collect();\n let mut variable = String::new();\n let mut values: Vec = vec![];\n if let Statement::For(ref var, ref vals) = self.flow_control.current_statement {\n variable = var.clone();\n values = vals.clone();\n }\n for value in values {\n self.variables.set_var(&variable, &value);\n for job in block_jobs.iter() {\n self.run_job(job, commands);\n }\n }\n self.flow_control.current_statement = Statement::Default;\n } else {\n self.flow_control.current_block.jobs.push(job);\n }\n } else {\n if self.flow_control.skipping() && !is_flow_control_command(&job.command) {\n continue;\n }\n self.run_job(&job, commands);\n }\n }\n }\n\n fn run_job(&mut self, job: &Job, commands: &HashMap<&str, Command>) {\n let mut job = self.variables.expand_job(job);\n job.expand_globs();\n let exit_status = if let Some(command) = commands.get(job.command.as_str()) {\n Some((*command.main)(job.args.as_slice(), self))\n } else {\n self.run_external_commmand(&job.args)\n };\n if let Some(code) = exit_status {\n self.variables.set_var(\"?\", &code.to_string());\n }\n }\n\n \/\/\/ Returns an exit code if a command was run\n fn run_external_commmand(&mut self, args: &Vec) -> Option {\n if let Some(path) = args.get(0) {\n let mut command = process::Command::new(path);\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n command.arg(arg);\n }\n }\n match command.spawn() {\n Ok(mut child) => {\n match child.wait() {\n Ok(status) => {\n if let Some(code) = status.code() {\n Some(code)\n } else {\n println!(\"{}: child ended by signal\", path);\n Some(TERMINATED)\n }\n }\n Err(err) => {\n println!(\"{}: Failed to wait: {}\", path, err);\n Some(100) \/\/ TODO what should we return here?\n }\n }\n }\n Err(err) => {\n println!(\"{}: Failed to execute: {}\", path, err);\n Some(NO_SUCH_COMMAND)\n }\n }\n } else {\n None\n }\n }\n}\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the\n\/\/\/ functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ help: \"Describe what my_command does followed by a newline showing usage\",\n\/\/\/ main: box|args: &[String], &mut Shell| -> i32 {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command {\n pub name: &'static str,\n pub help: &'static str,\n pub main: Box i32>,\n}\n\nimpl Command {\n \/\/\/ Return the map from command names to commands\n pub fn map() -> HashMap<&'static str, Self> {\n let mut commands: HashMap<&str, Self> = HashMap::new();\n\n commands.insert(\"cd\",\n Command {\n name: \"cd\",\n help: \"Change the current directory\\n cd \",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.cd(args)\n },\n });\n\n commands.insert(\"dirs\",\n Command {\n name: \"dirs\",\n help: \"Display the current directory stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.dirs(args)\n },\n });\n\n commands.insert(\"exit\",\n Command {\n name: \"exit\",\n help: \"To exit the curent session\",\n main: box |args: &[String], _: &mut Shell| -> i32 {\n if let Some(status) = args.get(1) {\n if let Ok(status) = status.parse::() {\n process::exit(status);\n }\n }\n \/\/ TODO should use exit status of previously run command, not 0\n process::exit(0);\n },\n });\n\n commands.insert(\"let\",\n Command {\n name: \"let\",\n help: \"View, set or unset variables\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.variables.let_(args)\n },\n });\n\n commands.insert(\"read\",\n Command {\n name: \"read\",\n help: \"Read some variables\\n read \",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.variables.read(args)\n },\n });\n\n commands.insert(\"pushd\",\n Command {\n name: \"pushd\",\n help: \"Push a directory to the stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.pushd(args)\n },\n });\n\n commands.insert(\"popd\",\n Command {\n name: \"popd\",\n help: \"Pop a directory from the stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.popd(args)\n },\n });\n\n commands.insert(\"history\",\n Command {\n name: \"history\",\n help: \"Display a log of all commands previously executed\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.history.history(args)\n },\n });\n\n commands.insert(\"if\",\n Command {\n name: \"if\",\n help: \"Conditionally execute code\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.if_(args)\n },\n });\n\n commands.insert(\"else\",\n Command {\n name: \"else\",\n help: \"Execute code if a previous condition was false\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.else_(args)\n },\n });\n\n commands.insert(\"end\",\n Command {\n name: \"end\",\n help: \"End a code block\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.end(args)\n },\n });\n\n commands.insert(\"for\",\n Command {\n name: \"for\",\n help: \"Iterate through a list\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.for_(args)\n },\n });\n\n let command_helper: HashMap<&'static str, &'static str> = commands.iter()\n .map(|(k, v)| {\n (*k, v.help)\n })\n .collect();\n\n commands.insert(\"help\",\n Command {\n name: \"help\",\n help: \"Display helpful information about a given command, or list \\\n commands if none specified\\n help \",\n main: box move |args: &[String], _: &mut Shell| -> i32 {\n if let Some(command) = args.get(1) {\n if command_helper.contains_key(command.as_str()) {\n match command_helper.get(command.as_str()) {\n Some(help) => println!(\"{}\", help),\n None => {\n println!(\"Command helper not found [run 'help']...\")\n }\n }\n } else {\n println!(\"Command helper not found [run 'help']...\");\n }\n } else {\n for (command, _help) in command_helper.iter() {\n println!(\"{}\", command);\n }\n }\n SUCCESS\n },\n });\n\n commands\n }\n}\n\nfn main() {\n let commands = Command::map();\n let mut shell = Shell::new();\n\n for arg in env::args().skip(1) {\n let mut command_list = String::new();\n if let Ok(mut file) = File::open(&arg) {\n if let Err(message) = file.read_to_string(&mut command_list) {\n println!(\"{}: Failed to read {}\", message, arg);\n }\n }\n shell.on_command(&command_list, &commands);\n return;\n }\n\n loop {\n shell.print_prompt();\n\n if let Some(command) = readln() {\n let command = command.trim();\n if !command.is_empty() {\n shell.on_command(command, &commands);\n }\n } else {\n break;\n }\n }\n}\nWIP job control#![feature(deque_extras)]\n#![feature(box_syntax)]\n#![feature(plugin)]\n#![plugin(peg_syntax_ext)]\n\nextern crate glob;\n\nuse std::collections::HashMap;\nuse std::fs::File;\nuse std::io::{stdout, Read, Write};\nuse std::env;\nuse std::process;\n\nuse self::directory_stack::DirectoryStack;\nuse self::input_editor::readln;\nuse self::peg::{parse, Job};\nuse self::variables::Variables;\nuse self::history::History;\nuse self::flow_control::{FlowControl, is_flow_control_command, Statement};\nuse self::status::{SUCCESS, NO_SUCH_COMMAND, TERMINATED};\n\npub mod directory_stack;\npub mod to_num;\npub mod input_editor;\npub mod peg;\npub mod variables;\npub mod history;\npub mod flow_control;\npub mod status;\n\n\n\/\/\/ This struct will contain all of the data structures related to this\n\/\/\/ instance of the shell.\npub struct Shell {\n variables: Variables,\n flow_control: FlowControl,\n directory_stack: DirectoryStack,\n history: History,\n}\n\nimpl Shell {\n \/\/\/ Panics if DirectoryStack construction fails\n pub fn new() -> Self {\n Shell {\n variables: Variables::new(),\n flow_control: FlowControl::new(),\n directory_stack: DirectoryStack::new().expect(\"\"),\n history: History::new(),\n }\n }\n\n pub fn print_prompt(&self) {\n self.print_prompt_prefix();\n match self.flow_control.current_statement {\n Statement::For(_, _) => self.print_for_prompt(),\n Statement::Default => self.print_default_prompt(),\n }\n if let Err(message) = stdout().flush() {\n println!(\"{}: failed to flush prompt to stdout\", message);\n }\n\n }\n\n \/\/ TODO eventually this thing should be gone\n fn print_prompt_prefix(&self) {\n let prompt_prefix = self.flow_control.modes.iter().rev().fold(String::new(), |acc, mode| {\n acc +\n if mode.value {\n \"+ \"\n } else {\n \"- \"\n }\n });\n print!(\"{}\", prompt_prefix);\n }\n\n fn print_for_prompt(&self) {\n print!(\"for> \");\n }\n\n fn print_default_prompt(&self) {\n let cwd = env::current_dir().ok().map_or(\"?\".to_string(),\n |ref p| p.to_str().unwrap_or(\"?\").to_string());\n print!(\"ion:{}# \", cwd);\n }\n\n fn on_command(&mut self, command_string: &str, commands: &HashMap<&str, Command>) {\n self.history.add(command_string.to_string());\n\n let mut jobs = parse(command_string);\n\n \/\/ Execute commands\n for job in jobs.drain(..) {\n if self.flow_control.collecting_block {\n \/\/ TODO move this logic into \"end\" command\n if job.command == \"end\" {\n self.flow_control.collecting_block = false;\n let block_jobs: Vec = self.flow_control\n .current_block\n .jobs\n .drain(..)\n .collect();\n let mut variable = String::new();\n let mut values: Vec = vec![];\n if let Statement::For(ref var, ref vals) = self.flow_control.current_statement {\n variable = var.clone();\n values = vals.clone();\n }\n for value in values {\n self.variables.set_var(&variable, &value);\n for job in block_jobs.iter() {\n self.run_job(job, commands);\n }\n }\n self.flow_control.current_statement = Statement::Default;\n } else {\n self.flow_control.current_block.jobs.push(job);\n }\n } else {\n if self.flow_control.skipping() && !is_flow_control_command(&job.command) {\n continue;\n }\n self.run_job(&job, commands);\n }\n }\n }\n\n fn run_job(&mut self, job: &Job, commands: &HashMap<&str, Command>) {\n let mut job = self.variables.expand_job(job);\n job.expand_globs();\n let exit_status = if let Some(command) = commands.get(job.command.as_str()) {\n Some((*command.main)(job.args.as_slice(), self))\n } else {\n self.run_external_commmand(&job)\n };\n if let Some(code) = exit_status {\n self.variables.set_var(\"?\", &code.to_string());\n }\n }\n\n \/\/\/ Returns an exit code if a command was run\n fn run_external_commmand(&mut self, job: &Job) -> Option {\n let mut command = process::Command::new(&job.command);\n for i in 1..job.args.len() {\n if let Some(arg) = job.args.get(i) {\n command.arg(arg);\n }\n }\n if job.background {\n command.stdin(process::Stdio::null());\n }\n match command.spawn() {\n Ok(mut child) => {\n if job.background {\n None\n } else {\n Some(Shell::wait_and_get_status(&mut child, &job.command))\n }\n\n }\n Err(err) => {\n println!(\"{}: Failed to execute: {}\", job.command, err);\n Some(NO_SUCH_COMMAND)\n }\n }\n }\n\n \/\/ TODO don't pass in command and do printing outside this function\n fn wait_and_get_status(child: &mut process::Child, command: &str) -> i32 {\n match child.wait() {\n Ok(status) => {\n if let Some(code) = status.code() {\n code\n } else {\n println!(\"{}: child ended by signal\", command);\n TERMINATED\n }\n }\n Err(err) => {\n println!(\"{}: Failed to wait: {}\", command, err);\n 100 \/\/ TODO what should we return here?\n }\n }\n }\n\n}\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the\n\/\/\/ functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ help: \"Describe what my_command does followed by a newline showing usage\",\n\/\/\/ main: box|args: &[String], &mut Shell| -> i32 {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command {\n pub name: &'static str,\n pub help: &'static str,\n pub main: Box i32>,\n}\n\nimpl Command {\n \/\/\/ Return the map from command names to commands\n pub fn map() -> HashMap<&'static str, Self> {\n let mut commands: HashMap<&str, Self> = HashMap::new();\n\n commands.insert(\"cd\",\n Command {\n name: \"cd\",\n help: \"Change the current directory\\n cd \",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.cd(args)\n },\n });\n\n commands.insert(\"dirs\",\n Command {\n name: \"dirs\",\n help: \"Display the current directory stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.dirs(args)\n },\n });\n\n commands.insert(\"exit\",\n Command {\n name: \"exit\",\n help: \"To exit the curent session\",\n main: box |args: &[String], _: &mut Shell| -> i32 {\n if let Some(status) = args.get(1) {\n if let Ok(status) = status.parse::() {\n process::exit(status);\n }\n }\n \/\/ TODO should use exit status of previously run command, not 0\n process::exit(0);\n },\n });\n\n commands.insert(\"let\",\n Command {\n name: \"let\",\n help: \"View, set or unset variables\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.variables.let_(args)\n },\n });\n\n commands.insert(\"read\",\n Command {\n name: \"read\",\n help: \"Read some variables\\n read \",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.variables.read(args)\n },\n });\n\n commands.insert(\"pushd\",\n Command {\n name: \"pushd\",\n help: \"Push a directory to the stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.pushd(args)\n },\n });\n\n commands.insert(\"popd\",\n Command {\n name: \"popd\",\n help: \"Pop a directory from the stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.popd(args)\n },\n });\n\n commands.insert(\"history\",\n Command {\n name: \"history\",\n help: \"Display a log of all commands previously executed\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.history.history(args)\n },\n });\n\n commands.insert(\"if\",\n Command {\n name: \"if\",\n help: \"Conditionally execute code\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.if_(args)\n },\n });\n\n commands.insert(\"else\",\n Command {\n name: \"else\",\n help: \"Execute code if a previous condition was false\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.else_(args)\n },\n });\n\n commands.insert(\"end\",\n Command {\n name: \"end\",\n help: \"End a code block\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.end(args)\n },\n });\n\n commands.insert(\"for\",\n Command {\n name: \"for\",\n help: \"Iterate through a list\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.for_(args)\n },\n });\n\n let command_helper: HashMap<&'static str, &'static str> = commands.iter()\n .map(|(k, v)| {\n (*k, v.help)\n })\n .collect();\n\n commands.insert(\"help\",\n Command {\n name: \"help\",\n help: \"Display helpful information about a given command, or list \\\n commands if none specified\\n help \",\n main: box move |args: &[String], _: &mut Shell| -> i32 {\n if let Some(command) = args.get(1) {\n if command_helper.contains_key(command.as_str()) {\n match command_helper.get(command.as_str()) {\n Some(help) => println!(\"{}\", help),\n None => {\n println!(\"Command helper not found [run 'help']...\")\n }\n }\n } else {\n println!(\"Command helper not found [run 'help']...\");\n }\n } else {\n for (command, _help) in command_helper.iter() {\n println!(\"{}\", command);\n }\n }\n SUCCESS\n },\n });\n\n commands\n }\n}\n\nfn main() {\n let commands = Command::map();\n let mut shell = Shell::new();\n\n for arg in env::args().skip(1) {\n let mut command_list = String::new();\n if let Ok(mut file) = File::open(&arg) {\n if let Err(message) = file.read_to_string(&mut command_list) {\n println!(\"{}: Failed to read {}\", message, arg);\n }\n }\n shell.on_command(&command_list, &commands);\n return;\n }\n\n loop {\n shell.print_prompt();\n\n if let Some(command) = readln() {\n let command = command.trim();\n if !command.is_empty() {\n shell.on_command(command, &commands);\n }\n } else {\n break;\n }\n }\n}\n<|endoftext|>"} {"text":"dynamic can be relative to many units<|endoftext|>"} {"text":"tabbed navigation working<|endoftext|>"} {"text":"Key in calculate_response did not need to be mutable<|endoftext|>"} {"text":"Added a cool looking colored_ridge exampleextern crate turtle;\n\nuse std::f64::consts::E;\n\nuse turtle::{Turtle, Color, random};\n\nfn main() {\n let mut turtle = Turtle::new();\n\n let amplitude = 100.0;\n let width = 800.0;\n let step = 2.0;\n let height_factor = 2.0;\n\n turtle.set_speed(\"fastest\");\n turtle.pen_up();\n turtle.right(90.0);\n turtle.backward(width \/ 2.0);\n turtle.pen_down();\n\n turtle.set_background_color(\"grey\");\n\n turtle.set_speed(\"normal\");\n for i in 0..(width \/ step) as i32 {\n let x = i as f64 * step;\n \/\/ y = e^(-x^2) translated and scaled by the width and amplitude\n \/\/ 200e^(-(1\/200(x - 400))^2)\n let y = amplitude * E.powf(-(1.0\/(width \/ 4.0) * (x - width\/2.0)).powi(2));\n\n turtle.set_pen_color(random::().visible());\n turtle.set_pen_size(y * height_factor);\n turtle.forward(step);\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2016 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nextern crate winit;\nextern crate xcb;\nextern crate vk_sys as vk;\nextern crate gfx_core;\nextern crate gfx_device_vulkan;\n\nuse std::{mem, ptr};\nuse gfx_core::format;\n\n\npub fn init_winit(builder: winit::WindowBuilder) -> (winit::Window, gfx_device_vulkan::GraphicsQueue, gfx_device_vulkan::Factory) {\n let (device, factory, _backend) = gfx_device_vulkan::create(&builder.window.title, 1, &[],\n &[\"VK_KHR_surface\", \"VK_KHR_xcb_surface\"], &[\"VK_KHR_swapchain\"]);\n let win = builder.build().unwrap();\n (win, device, factory)\n}\n\npub type TargetHandle = gfx_core::handle::RenderTargetView;\n\npub struct SwapTarget {\n _image: vk::Image,\n target: TargetHandle,\n _fence: vk::Fence,\n}\n\npub struct Window {\n connection: xcb::Connection,\n _foreground: u32,\n window: u32,\n swapchain: vk::SwapchainKHR,\n targets: Vec,\n queue: gfx_device_vulkan::GraphicsQueue,\n}\n\npub struct Frame<'a> {\n window: &'a mut Window,\n target_id: u32,\n}\n\nimpl<'a> Frame<'a> {\n pub fn get_target(&self) -> TargetHandle {\n self.window.targets[self.target_id as usize].target.clone()\n }\n pub fn get_queue(&mut self) -> &mut gfx_device_vulkan::GraphicsQueue {\n &mut self.window.queue\n }\n}\n\nimpl<'a> Drop for Frame<'a> {\n fn drop(&mut self) {\n let mut result = vk::SUCCESS;\n let info = vk::PresentInfoKHR {\n sType: vk::STRUCTURE_TYPE_PRESENT_INFO_KHR,\n pNext: ptr::null(),\n waitSemaphoreCount: 0,\n pWaitSemaphores: ptr::null(),\n swapchainCount: 1,\n pSwapchains: &self.window.swapchain,\n pImageIndices: &self.target_id,\n pResults: &mut result,\n };\n let (_dev, vk) = self.window.queue.get_share().get_device();\n unsafe {\n vk.QueuePresentKHR(self.window.queue.get_queue(), &info);\n }\n assert_eq!(vk::SUCCESS, result);\n }\n}\n\nimpl Window {\n pub fn wait_draw(&mut self) -> Result, ()> {\n let ev = match self.connection.wait_for_event() {\n Some(ev) => ev,\n None => return Err(()),\n };\n \/\/self.connection.flush();\n match ev.response_type() & 0x7F {\n xcb::EXPOSE => Ok(Some(self.start_frame())),\n xcb::KEY_PRESS => Err(()),\n _ => Ok(None)\n }\n }\n\n pub fn start_frame(&mut self) -> Frame {\n \/\/TODO: handle window resize\n let index = unsafe {\n let (dev, vk) = self.queue.get_share().get_device();\n let mut i = 0;\n assert_eq!(vk::SUCCESS, vk.AcquireNextImageKHR(dev, self.swapchain, 60, 0, 0, &mut i));\n i\n };\n Frame {\n window: self,\n target_id: index,\n }\n }\n}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n xcb::unmap_window(&self.connection, self.window);\n xcb::destroy_window(&self.connection, self.window);\n self.connection.flush();\n }\n}\n\npub fn init_xcb(title: &str, width: u32, height: u32) -> (Window, gfx_device_vulkan::Factory) {\n let (mut device, mut factory, backend) = gfx_device_vulkan::create(title, 1, &[],\n &[\"VK_KHR_surface\", \"VK_KHR_xcb_surface\"], &[\"VK_KHR_swapchain\"]);\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n let (window, foreground) = {\n let setup = conn.get_setup();\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n let foreground = conn.generate_id();\n xcb::create_gc(&conn, foreground, screen.root(), &[\n (xcb::GC_FOREGROUND, screen.black_pixel()),\n (xcb::GC_GRAPHICS_EXPOSURES, 0),\n ]);\n\n let win = conn.generate_id();\n xcb::create_window(&conn,\n xcb::COPY_FROM_PARENT as u8,\n win,\n screen.root(),\n 0, 0,\n width as u16, height as u16,\n 10,\n xcb::WINDOW_CLASS_INPUT_OUTPUT as u16,\n screen.root_visual(), &[\n (xcb::CW_BACK_PIXEL, screen.black_pixel()),\n (xcb::CW_EVENT_MASK, xcb::EVENT_MASK_KEY_PRESS | xcb::EVENT_MASK_EXPOSURE),\n ]\n );\n (win, foreground)\n };\n\n xcb::map_window(&conn, window);\n xcb::change_property(&conn, xcb::PROP_MODE_REPLACE as u8, window,\n xcb::ATOM_WM_NAME, xcb::ATOM_STRING, 8, title.as_bytes());\n conn.flush();\n\n let surface = {\n let (inst, vk) = backend.get_instance();\n let info = vk::XcbSurfaceCreateInfoKHR {\n sType: vk::STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,\n pNext: ptr::null(),\n flags: 0,\n connection: conn.get_raw_conn() as *const _,\n window: window as *const _, \/\/HACK! TODO: fix the bindings\n };\n\n unsafe {\n let mut out = mem::zeroed();\n assert_eq!(vk::SUCCESS, vk.CreateXcbSurfaceKHR(inst, &info, ptr::null(), &mut out));\n out\n }\n };\n\n let (dev, vk) = backend.get_device();\n let mut images: [vk::Image; 2] = [0; 2];\n let mut num = images.len() as u32;\n\n let swapchain_info = vk::SwapchainCreateInfoKHR {\n sType: vk::STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,\n pNext: ptr::null(),\n flags: 0,\n surface: surface,\n minImageCount: num,\n imageFormat: vk::FORMAT_R8G8B8A8_UNORM,\n imageColorSpace: vk::COLORSPACE_SRGB_NONLINEAR_KHR,\n imageExtent: vk::Extent2D { width: width, height: height },\n imageArrayLayers: 1,\n imageUsage: vk::IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,\n imageSharingMode: vk::SHARING_MODE_EXCLUSIVE,\n queueFamilyIndexCount: 1,\n pQueueFamilyIndices: &0,\n preTransform: vk::SURFACE_TRANSFORM_IDENTITY_BIT_KHR,\n compositeAlpha: vk::COMPOSITE_ALPHA_OPAQUE_BIT_KHR,\n presentMode: vk::PRESENT_MODE_FIFO_RELAXED_KHR,\n clipped: vk::TRUE,\n oldSwapchain: 0,\n };\n\n let swapchain = unsafe {\n let mut out = mem::zeroed();\n assert_eq!(vk::SUCCESS, vk.CreateSwapchainKHR(dev, &swapchain_info, ptr::null(), &mut out));\n out\n };\n\n assert_eq!(vk::SUCCESS, unsafe {\n vk.GetSwapchainImagesKHR(dev, swapchain, &mut num, images.as_mut_ptr())\n });\n\n let mut cbuf = factory.create_command_buffer();\n\n let format = format::Format(format::SurfaceType::R8_G8_B8_A8, format::ChannelType::Unorm);\n let targets = images[.. num as usize].iter().map(|image| {\n use gfx_core::factory::Typed;\n cbuf.image_barrier(*image, vk::IMAGE_ASPECT_COLOR_BIT, vk::IMAGE_LAYOUT_UNDEFINED, vk::IMAGE_LAYOUT_PRESENT_SRC_KHR);\n let raw_view = factory.view_swapchain_image(*image, format, (width, height)).unwrap();\n SwapTarget {\n _image: *image,\n target: Typed::new(raw_view),\n _fence: factory.create_fence(true),\n }\n }).collect();\n\n {\n use gfx_core::Device;\n device.submit(&mut cbuf);\n }\n\n let win = Window {\n connection: conn,\n _foreground: foreground,\n window: window,\n swapchain: swapchain,\n targets: targets,\n queue: device,\n };\n (win, factory)\n}\n[vk] validation layers support\/\/ Copyright 2016 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nextern crate winit;\nextern crate xcb;\nextern crate vk_sys as vk;\nextern crate gfx_core;\nextern crate gfx_device_vulkan;\n\nuse std::ffi::CStr;\nuse std::{mem, ptr};\nuse std::os::raw;\nuse gfx_core::format;\n\n\npub fn init_winit(builder: winit::WindowBuilder) -> (winit::Window, gfx_device_vulkan::GraphicsQueue, gfx_device_vulkan::Factory) {\n let (device, factory, _backend) = gfx_device_vulkan::create(&builder.window.title, 1, &[],\n &[\"VK_KHR_surface\", \"VK_KHR_xcb_surface\"], &[\"VK_KHR_swapchain\"]);\n let win = builder.build().unwrap();\n (win, device, factory)\n}\n\npub type TargetHandle = gfx_core::handle::RenderTargetView;\n\npub struct SwapTarget {\n _image: vk::Image,\n target: TargetHandle,\n _fence: vk::Fence,\n}\n\npub struct Window {\n connection: xcb::Connection,\n _foreground: u32,\n window: u32,\n _debug_callback: Option,\n swapchain: vk::SwapchainKHR,\n targets: Vec,\n queue: gfx_device_vulkan::GraphicsQueue,\n}\n\npub struct Frame<'a> {\n window: &'a mut Window,\n target_id: u32,\n}\n\nimpl<'a> Frame<'a> {\n pub fn get_target(&self) -> TargetHandle {\n self.window.targets[self.target_id as usize].target.clone()\n }\n pub fn get_queue(&mut self) -> &mut gfx_device_vulkan::GraphicsQueue {\n &mut self.window.queue\n }\n}\n\nimpl<'a> Drop for Frame<'a> {\n fn drop(&mut self) {\n let mut result = vk::SUCCESS;\n let info = vk::PresentInfoKHR {\n sType: vk::STRUCTURE_TYPE_PRESENT_INFO_KHR,\n pNext: ptr::null(),\n waitSemaphoreCount: 0,\n pWaitSemaphores: ptr::null(),\n swapchainCount: 1,\n pSwapchains: &self.window.swapchain,\n pImageIndices: &self.target_id,\n pResults: &mut result,\n };\n let (_dev, vk) = self.window.queue.get_share().get_device();\n unsafe {\n vk.QueuePresentKHR(self.window.queue.get_queue(), &info);\n }\n assert_eq!(vk::SUCCESS, result);\n }\n}\n\nimpl Window {\n pub fn wait_draw(&mut self) -> Result, ()> {\n let ev = match self.connection.wait_for_event() {\n Some(ev) => ev,\n None => return Err(()),\n };\n \/\/self.connection.flush();\n match ev.response_type() & 0x7F {\n xcb::EXPOSE => Ok(Some(self.start_frame())),\n xcb::KEY_PRESS => Err(()),\n _ => Ok(None)\n }\n }\n\n pub fn start_frame(&mut self) -> Frame {\n \/\/TODO: handle window resize\n let index = unsafe {\n let (dev, vk) = self.queue.get_share().get_device();\n let mut i = 0;\n assert_eq!(vk::SUCCESS, vk.AcquireNextImageKHR(dev, self.swapchain, 60, 0, 0, &mut i));\n i\n };\n Frame {\n window: self,\n target_id: index,\n }\n }\n}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n xcb::unmap_window(&self.connection, self.window);\n xcb::destroy_window(&self.connection, self.window);\n self.connection.flush();\n }\n}\n\nconst LAYERS: &'static [&'static str] = &[\n];\nconst LAYERS_DEBUG: &'static [&'static str] = &[\n \"VK_LAYER_LUNARG_standard_validation\",\n];\nconst EXTENSIONS: &'static [&'static str] = &[\n \"VK_KHR_surface\",\n \"VK_KHR_xcb_surface\",\n];\nconst EXTENSIONS_DEBUG: &'static [&'static str] = &[\n \"VK_KHR_surface\",\n \"VK_KHR_xcb_surface\",\n \"VK_EXT_debug_report\",\n];\nconst DEV_EXTENSIONS: &'static [&'static str] = &[\n \"VK_KHR_swapchain\",\n];\n\nextern \"system\" fn callback(flags: vk::DebugReportFlagsEXT,\n _ob_type: vk::DebugReportObjectTypeEXT, _object: u64, _location: usize,\n _msg_code: i32, layer_prefix_c: *const raw::c_char,\n description_c: *const raw::c_char, _user_data: *mut raw::c_void) -> u32\n{\n let layer_prefix = unsafe { CStr::from_ptr(layer_prefix_c) }.to_str().unwrap();\n let description = unsafe { CStr::from_ptr(description_c) }.to_str().unwrap();\n println!(\"Vk flags {:x} in layer {}: {}\", flags, layer_prefix, description);\n vk::FALSE\n}\n\npub fn init_xcb(title: &str, width: u32, height: u32) -> (Window, gfx_device_vulkan::Factory) {\n let debug = true;\n let (mut device, mut factory, backend) = gfx_device_vulkan::create(title, 1,\n if debug {LAYERS_DEBUG} else {LAYERS},\n if debug {EXTENSIONS_DEBUG} else {EXTENSIONS},\n DEV_EXTENSIONS);\n\n let debug_callback = if debug {\n let info = vk::DebugReportCallbackCreateInfoEXT {\n sType: vk::STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,\n pNext: ptr::null(),\n flags: vk::DEBUG_REPORT_INFORMATION_BIT_EXT | vk::DEBUG_REPORT_WARNING_BIT_EXT |\n vk::DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT | vk::DEBUG_REPORT_ERROR_BIT_EXT |\n vk::DEBUG_REPORT_DEBUG_BIT_EXT,\n pfnCallback: callback,\n pUserData: ptr::null_mut(),\n };\n let (inst, vk) = backend.get_instance();\n Some(unsafe {\n let mut out = mem::zeroed();\n assert_eq!(vk::SUCCESS, vk.CreateDebugReportCallbackEXT(inst, &info, ptr::null(), &mut out));\n out\n })\n }else {\n None\n };\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n let (window, foreground) = {\n let setup = conn.get_setup();\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n let foreground = conn.generate_id();\n xcb::create_gc(&conn, foreground, screen.root(), &[\n (xcb::GC_FOREGROUND, screen.black_pixel()),\n (xcb::GC_GRAPHICS_EXPOSURES, 0),\n ]);\n\n let win = conn.generate_id();\n xcb::create_window(&conn,\n xcb::COPY_FROM_PARENT as u8,\n win,\n screen.root(),\n 0, 0,\n width as u16, height as u16,\n 10,\n xcb::WINDOW_CLASS_INPUT_OUTPUT as u16,\n screen.root_visual(), &[\n (xcb::CW_BACK_PIXEL, screen.black_pixel()),\n (xcb::CW_EVENT_MASK, xcb::EVENT_MASK_KEY_PRESS | xcb::EVENT_MASK_EXPOSURE),\n ]\n );\n (win, foreground)\n };\n\n xcb::map_window(&conn, window);\n xcb::change_property(&conn, xcb::PROP_MODE_REPLACE as u8, window,\n xcb::ATOM_WM_NAME, xcb::ATOM_STRING, 8, title.as_bytes());\n conn.flush();\n\n let surface = {\n let (inst, vk) = backend.get_instance();\n let info = vk::XcbSurfaceCreateInfoKHR {\n sType: vk::STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,\n pNext: ptr::null(),\n flags: 0,\n connection: conn.get_raw_conn() as *const _,\n window: window as *const _, \/\/HACK! TODO: fix the bindings\n };\n\n unsafe {\n let mut out = mem::zeroed();\n assert_eq!(vk::SUCCESS, vk.CreateXcbSurfaceKHR(inst, &info, ptr::null(), &mut out));\n out\n }\n };\n\n let (dev, vk) = backend.get_device();\n let mut images: [vk::Image; 2] = [0; 2];\n let mut num = images.len() as u32;\n\n let swapchain_info = vk::SwapchainCreateInfoKHR {\n sType: vk::STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,\n pNext: ptr::null(),\n flags: 0,\n surface: surface,\n minImageCount: num,\n imageFormat: vk::FORMAT_R8G8B8A8_UNORM,\n imageColorSpace: vk::COLORSPACE_SRGB_NONLINEAR_KHR,\n imageExtent: vk::Extent2D { width: width, height: height },\n imageArrayLayers: 1,\n imageUsage: vk::IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,\n imageSharingMode: vk::SHARING_MODE_EXCLUSIVE,\n queueFamilyIndexCount: 1,\n pQueueFamilyIndices: &0,\n preTransform: vk::SURFACE_TRANSFORM_IDENTITY_BIT_KHR,\n compositeAlpha: vk::COMPOSITE_ALPHA_OPAQUE_BIT_KHR,\n presentMode: vk::PRESENT_MODE_FIFO_RELAXED_KHR,\n clipped: vk::TRUE,\n oldSwapchain: 0,\n };\n\n let swapchain = unsafe {\n let mut out = mem::zeroed();\n assert_eq!(vk::SUCCESS, vk.CreateSwapchainKHR(dev, &swapchain_info, ptr::null(), &mut out));\n out\n };\n\n assert_eq!(vk::SUCCESS, unsafe {\n vk.GetSwapchainImagesKHR(dev, swapchain, &mut num, images.as_mut_ptr())\n });\n\n let mut cbuf = factory.create_command_buffer();\n\n let format = format::Format(format::SurfaceType::R8_G8_B8_A8, format::ChannelType::Unorm);\n let targets = images[.. num as usize].iter().map(|image| {\n use gfx_core::factory::Typed;\n cbuf.image_barrier(*image, vk::IMAGE_ASPECT_COLOR_BIT, vk::IMAGE_LAYOUT_UNDEFINED, vk::IMAGE_LAYOUT_PRESENT_SRC_KHR);\n let raw_view = factory.view_swapchain_image(*image, format, (width, height)).unwrap();\n SwapTarget {\n _image: *image,\n target: Typed::new(raw_view),\n _fence: factory.create_fence(true),\n }\n }).collect();\n\n {\n use gfx_core::Device;\n device.submit(&mut cbuf);\n }\n\n let win = Window {\n connection: conn,\n _foreground: foreground,\n window: window,\n _debug_callback: debug_callback,\n swapchain: swapchain,\n targets: targets,\n queue: device,\n };\n (win, factory)\n}\n<|endoftext|>"} {"text":"Fix: Also print if output is a data pipe<|endoftext|>"} {"text":"Rollup merge of #48988 - bobtwinkles:add_48070_test, r=nikomatsakis\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ run-pass\n\/\/ revisions: lxl nll\n\n#![cfg_attr(nll, feature(nll))]\n\nstruct Foo {\n x: u32\n}\n\nimpl Foo {\n fn twiddle(&mut self) -> &mut Self { self }\n fn twaddle(&mut self) -> &mut Self { self }\n fn emit(&mut self) {\n self.x += 1;\n }\n}\n\nfn main() {\n let mut foo = Foo { x: 0 };\n match 22 {\n 22 => &mut foo,\n 44 => foo.twiddle(),\n _ => foo.twaddle(),\n }.emit();\n}\n<|endoftext|>"} {"text":"Id::transmute<|endoftext|>"} {"text":"(test) Added test for failing tests.#![feature(phase)]\n#[phase(plugin, link)]\nextern crate stainless;\n\ndescribe! failing {\n failing \"should fail\" {\n fail!(\"should still pass\");\n }\n}\n\n<|endoftext|>"} {"text":"chore: enum<|endoftext|>"} {"text":"ask and you shall receive<|endoftext|>"} {"text":"initial code\/\/! A simple and easy wrapper around `Vec` to implement a FIFO queue. This is\n\/\/! no fancy, advanced data type but something simple you can use easily until\n\/\/! or unless you need something different.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use queue::Queue;\n\/\/!\n\/\/! let mut q = Queue::new();\n\/\/!\n\/\/! q.queue(\"hello\").unwrap();\n\/\/! q.queue(\"out\").unwrap();\n\/\/! q.queue(\"there!\").unwrap();\n\/\/!\n\/\/! while let Some(item) = q.dequeue() {\n\/\/! println!(\"{}\", item);\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! Outputs:\n\/\/!\n\/\/! ```text\n\/\/! hello\n\/\/! out\n\/\/! there!\n\/\/! ```\n\n#[cfg(test)]\nmod tests;\n\n\/\/\/ A first in, first out queue built around `Vec`. An optional capacity can be\n\/\/\/ set (or changed) to ensure the `Queue` never grows past a certain size. If\n\/\/\/ the capacity is not specified (ie set to `None`) then the `Queue` will grow\n\/\/\/ as needed. If you're worried about memory allocation, set a capacity and\n\/\/\/ the necessary memory will be allocated at that time. Otherwise memory could\n\/\/\/ be allocated, deallocated and reallocated as the queue changes size. The\n\/\/\/ only requirement of the type used is that it implements the `Clone` trait.\n#[derive(Clone, Debug)]\npub struct Queue {\n\tvec: Vec,\n\tcap: Option,\n}\n\nimpl Queue {\n\t\/\/\/ Constructs a new `Queue`.\n\t\/\/\/\n\t\/\/\/ # Example\n\t\/\/\/\n\t\/\/\/ ```\n\t\/\/\/ # use queue::Queue;\n\t\/\/\/ let mut q: Queue = Queue::new();\n\t\/\/\/ ```\n\tpub fn new() -> Queue {\n\t\tQueue {\n\t\t\tvec: Vec::new(),\n\t\t\tcap: None,\n\t\t}\n\t}\n\n\t\/\/\/ Constructs a new `Queue` with a specified capacity.\n\t\/\/\/\n\t\/\/\/ # Example\n\t\/\/\/\n\t\/\/\/ ```\n\t\/\/\/ # use queue::Queue;\n\t\/\/\/ let mut q: Queue = Queue::with_capacity(20);\n\t\/\/\/ ```\n\tpub fn with_capacity(cap: usize) -> Queue {\n\t\tQueue {\n\t\t\tvec: Vec::with_capacity(cap),\n\t\t\tcap: Some(cap),\n\t\t}\n\t}\n\n\t\/\/\/ Add an item to the end of the `Queue`. Returns `Ok(usize)` with the new\n\t\/\/\/ length of the `Queue`, or `Err(())` if there is no more room.\n\t\/\/\/\n\t\/\/\/ # Example\n\t\/\/\/\n\t\/\/\/ ```\n\t\/\/\/ # use queue::Queue;\n\t\/\/\/ let mut q = Queue::new();\n\t\/\/\/ q.queue(\"hello\").unwrap();\n\t\/\/\/ assert_eq!(q.peek(), Some(\"hello\"));\n\t\/\/\/ ```\n\tpub fn queue(&mut self, item: T) -> Result {\n\t\tif let Some(cap) = self.cap {\n\t\t\tif self.vec.len() >= cap {\n\t\t\t\tErr(())\n\t\t\t} else {\n\t\t\t\tself.vec.push(item);\n\t\t\t\tOk(self.vec.len())\n\t\t\t}\n\t\t} else {\n\t\t\tself.vec.push(item);\n\t\t\tOk(self.vec.len())\n\t\t}\n\t}\n\n\t\/\/\/ Remove the next item from the `Queue`. Returns `Option` so it will\n\t\/\/\/ return either `Some(T)` or `None` depending on if there's anything in\n\t\/\/\/ the `Queue` to get.\n\t\/\/\/\n\t\/\/\/ # Example\n\t\/\/\/\n\t\/\/\/ ```\n\t\/\/\/ # use queue::Queue;\n\t\/\/\/ let mut q = Queue::new();\n\t\/\/\/ q.queue(\"hello\").unwrap();\n\t\/\/\/ q.queue(\"world\").unwrap();\n\t\/\/\/ assert_eq!(q.dequeue(), Some(\"hello\"));\n\t\/\/\/ ```\n\tpub fn dequeue(&mut self) -> Option {\n\t\tif self.vec.len() > 0 {\n\t\t\tSome(self.vec.remove(0))\n\t\t} else {\n\t\t\tNone\n\t\t}\n\t}\n\n\t\/\/\/ Peek at the next item in the `Queue`, if there's something there.\n\t\/\/\/\n\t\/\/\/ # Example\n\t\/\/\/\n\t\/\/\/ ```\n\t\/\/\/ # use queue::Queue;\n\t\/\/\/ let mut q = Queue::new();\n\t\/\/\/ q.queue(12).unwrap();\n\t\/\/\/ assert_eq!(q.peek(), Some(12));\n\t\/\/\/ ```\n\tpub fn peek(&self) -> Option {\n\t\tif self.vec.len() > 0 {\n\t\t\tSome(self.vec[0].clone())\n\t\t} else {\n\t\t\tNone\n\t\t}\n\t}\n\n\t\/\/\/ The number of items currently in the `Queue`.\n\t\/\/\/\n\t\/\/\/ # Example\n\t\/\/\/\n\t\/\/\/ ```\n\t\/\/\/ # use queue::Queue;\n\t\/\/\/ let mut q = Queue::with_capacity(8);\n\t\/\/\/ q.queue(1).unwrap();\n\t\/\/\/ q.queue(2).unwrap();\n\t\/\/\/ assert_eq!(q.len(), 2);\n\t\/\/\/ ```\n\tpub fn len(&self) -> usize {\n\t\tself.vec.len()\n\t}\n\n\t\/\/\/ Query the capacity for a `Queue`. If there is no capacity set (the\n\t\/\/\/ `Queue` can grow as needed) then `None` will be returned, otherwise\n\t\/\/\/ it will be `Some(usize)`.\n\t\/\/\/\n\t\/\/\/ # Example\n\t\/\/\/\n\t\/\/\/ ```\n\t\/\/\/ # use queue::Queue;\n\t\/\/\/ let q: Queue = Queue::with_capacity(12);\n\t\/\/\/ assert_eq!(q.capacity(), Some(12));\n\t\/\/\/ ```\n\tpub fn capacity(&self) -> Option {\n\t\tself.cap\n\t}\n\n\t\/\/\/ Modify the capacity of a `Queue`. If set to `None`, the `Queue` will\n\t\/\/\/ grow automatically, as needed. Otherwise, it will be limited to the\n\t\/\/\/ specified number of items. If there are more items in the `Queue` than\n\t\/\/\/ the requested capacity, `Err(())` will be returned, otherwise the\n\t\/\/\/ operation will succeed and `Ok(())` will be returned. If the capacity\n\t\/\/\/ is shrunk, the underlying `Vec` will be shrunk also, which would free\n\t\/\/\/ up whatever extra memory was allocated for the `Queue`.\n\t\/\/\/\n\t\/\/\/ # Example\n\t\/\/\/\n\t\/\/\/ ```\n\t\/\/\/ # use queue::Queue;\n\t\/\/\/ let mut q: Queue = Queue::new();\n\t\/\/\/ q.set_capacity(12).unwrap();\n\t\/\/\/ q.set_capacity(None).unwrap();\n\t\/\/\/ ```\n\tpub fn set_capacity>>(&mut self, cap: C) -> Result<(), ()> {\n\t\tlet cap = cap.into();\n\n\t\tif cap == None {\n\t\t\tself.cap = None;\n\t\t\treturn Ok(());\n\t\t}\n\n\t\tif cap == self.cap {\n\t\t\treturn Ok(());\n\t\t}\n\n\t\tlet cap = cap.unwrap();\n\n\t\tif cap < self.vec.len() {\n\t\t\treturn Err(());\n\t\t}\n\n\t\tif let Some(scap) = self.cap {\n\t\t\tif cap < scap {\n\t\t\t\tself.vec.shrink_to_fit();\n\t\t\t}\n\t\t}\n\n\t\tlet r = cap - self.vec.len();\n\t\tself.vec.reserve_exact(r);\n\t\tself.cap = Some(cap);\n\n\t\tOk(())\n\t}\n}\n<|endoftext|>"} {"text":"Added tests to ensure that rectangle side methods return valid lines<|endoftext|>"} {"text":"Simplifying the bitmap header decoding.<|endoftext|>"} {"text":"(doc) Deny missing documentation.<|endoftext|>"} {"text":"Update function write<|endoftext|>"} {"text":"Zero extend rhs of sar imm, not sign extend<|endoftext|>"} {"text":"\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#![feature(plugin, custom_attribute)]\n#![plugin(gfx_macros)]\n\nextern crate cgmath;\nextern crate glfw;\nextern crate gfx;\nextern crate gfx_device_gl;\n\nuse cgmath::FixedArray;\nuse cgmath::{Matrix, Point3, Vector3};\nuse cgmath::{Transform, AffineMatrix3};\nuse gfx::traits::*;\nuse glfw::Context;\n\n#[vertex_format]\n#[derive(Clone, Copy)]\nstruct Vertex {\n #[as_float]\n #[name = \"a_Pos\"]\n pos: [i8; 3],\n\n #[as_float]\n #[name = \"a_TexCoord\"]\n tex_coord: [u8; 2],\n}\n\n\/\/ The shader_param attribute makes sure the following struct can be used to\n\/\/ pass parameters to a shader.\n#[shader_param]\nstruct Params {\n #[name = \"u_Transform\"]\n transform: [[f32; 4]; 4],\n\n #[name = \"t_Color\"]\n color: gfx::shade::TextureParam,\n}\n\n\n\/\/----------------------------------------\n\npub fn main() {\n let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();\n let (mut window, events) = glfw\n .create_window(640, 480, \"Cube example\", glfw::WindowMode::Windowed)\n .expect(\"Failed to create GLFW window.\");\n\n window.make_current();\n glfw.set_error_callback(glfw::FAIL_ON_ERRORS);\n window.set_key_polling(true);\n\n let (w, h) = window.get_framebuffer_size();\n let frame = gfx::Frame::new(w as u16, h as u16);\n\n let (mut device, mut factory) = gfx_device_gl::create(|s| window.get_proc_address(s));\n let mut renderer = factory.create_renderer();\n\n let vertex_data = [\n \/\/ top (0, 0, 1)\n Vertex { pos: [-1, -1, 1], tex_coord: [0, 0] },\n Vertex { pos: [ 1, -1, 1], tex_coord: [1, 0] },\n Vertex { pos: [ 1, 1, 1], tex_coord: [1, 1] },\n Vertex { pos: [-1, 1, 1], tex_coord: [0, 1] },\n \/\/ bottom (0, 0, -1)\n Vertex { pos: [-1, 1, -1], tex_coord: [1, 0] },\n Vertex { pos: [ 1, 1, -1], tex_coord: [0, 0] },\n Vertex { pos: [ 1, -1, -1], tex_coord: [0, 1] },\n Vertex { pos: [-1, -1, -1], tex_coord: [1, 1] },\n \/\/ right (1, 0, 0)\n Vertex { pos: [ 1, -1, -1], tex_coord: [0, 0] },\n Vertex { pos: [ 1, 1, -1], tex_coord: [1, 0] },\n Vertex { pos: [ 1, 1, 1], tex_coord: [1, 1] },\n Vertex { pos: [ 1, -1, 1], tex_coord: [0, 1] },\n \/\/ left (-1, 0, 0)\n Vertex { pos: [-1, -1, 1], tex_coord: [1, 0] },\n Vertex { pos: [-1, 1, 1], tex_coord: [0, 0] },\n Vertex { pos: [-1, 1, -1], tex_coord: [0, 1] },\n Vertex { pos: [-1, -1, -1], tex_coord: [1, 1] },\n \/\/ front (0, 1, 0)\n Vertex { pos: [ 1, 1, -1], tex_coord: [1, 0] },\n Vertex { pos: [-1, 1, -1], tex_coord: [0, 0] },\n Vertex { pos: [-1, 1, 1], tex_coord: [0, 1] },\n Vertex { pos: [ 1, 1, 1], tex_coord: [1, 1] },\n \/\/ back (0, -1, 0)\n Vertex { pos: [ 1, -1, 1], tex_coord: [0, 0] },\n Vertex { pos: [-1, -1, 1], tex_coord: [1, 0] },\n Vertex { pos: [-1, -1, -1], tex_coord: [1, 1] },\n Vertex { pos: [ 1, -1, -1], tex_coord: [0, 1] },\n ];\n\n let mesh = factory.create_mesh(&vertex_data);\n\n let index_data: &[u8] = &[\n 0, 1, 2, 2, 3, 0, \/\/ top\n 4, 5, 6, 6, 7, 4, \/\/ bottom\n 8, 9, 10, 10, 11, 8, \/\/ right\n 12, 13, 14, 14, 15, 12, \/\/ left\n 16, 17, 18, 18, 19, 16, \/\/ front\n 20, 21, 22, 22, 23, 20, \/\/ back\n ];\n\n let texture_info = gfx::tex::TextureInfo {\n width: 1,\n height: 1,\n depth: 1,\n levels: 1,\n kind: gfx::tex::TextureKind::Texture2D,\n format: gfx::tex::RGBA8,\n };\n let image_info = texture_info.to_image_info();\n let texture = factory.create_texture(texture_info).unwrap();\n factory.update_texture(&texture, &image_info,\n &[0x20u8, 0xA0u8, 0xC0u8, 0x00u8],\n None).unwrap();\n\n let sampler = factory.create_sampler(\n gfx::tex::SamplerInfo::new(gfx::tex::FilterMethod::Bilinear,\n gfx::tex::WrapMode::Clamp)\n );\n\n let program = {\n let vs = gfx::ShaderSource {\n glsl_120: Some(include_bytes!(\"cube_120.glslv\")),\n glsl_150: Some(include_bytes!(\"cube_150.glslv\")),\n .. gfx::ShaderSource::empty()\n };\n let fs = gfx::ShaderSource {\n glsl_120: Some(include_bytes!(\"cube_120.glslf\")),\n glsl_150: Some(include_bytes!(\"cube_150.glslf\")),\n .. gfx::ShaderSource::empty()\n };\n factory.link_program_source(vs, fs, &device.get_capabilities())\n .unwrap()\n };\n\n let view: AffineMatrix3 = Transform::look_at(\n &Point3::new(1.5f32, -5.0, 3.0),\n &Point3::new(0f32, 0.0, 0.0),\n &Vector3::unit_z(),\n );\n let aspect = w as f32 \/ h as f32;\n let proj = cgmath::perspective(cgmath::deg(45.0f32), aspect, 1.0, 10.0);\n\n let data = Params {\n transform: proj.mul_m(&view.mat).into_fixed(),\n color: (texture, Some(sampler)),\n };\n\n let mut batch = gfx::batch::OwnedBatch::new(mesh, program, data).unwrap();\n batch.slice = factory.create_buffer_index::(index_data)\n .to_slice(gfx::PrimitiveType::TriangleList);\n batch.state.depth(gfx::state::Comparison::LessEqual, true);\n\n let clear_data = gfx::ClearData {\n color: [0.3, 0.3, 0.3, 1.0],\n depth: 1.0,\n stencil: 0,\n };\n\n while !window.should_close() {\n glfw.poll_events();\n for (_, event) in glfw::flush_messages(&events) {\n match event {\n glfw::WindowEvent::Key(glfw::Key::Escape, _, glfw::Action::Press, _) =>\n window.set_should_close(true),\n _ => {},\n }\n }\n\n renderer.clear(clear_data, gfx::COLOR | gfx::DEPTH, &frame);\n renderer.draw(&batch, &frame).unwrap();\n device.submit(renderer.as_buffer());\n renderer.reset();\n\n window.swap_buffers();\n device.after_frame();\n factory.cleanup();\n }\n}\nUpdated the cube example\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#![feature(plugin, custom_attribute)]\n#![plugin(gfx_macros)]\n\nextern crate cgmath;\nextern crate glfw;\nextern crate gfx;\nextern crate gfx_window_glutin;\nextern crate glutin;\n\nuse cgmath::FixedArray;\nuse cgmath::{Matrix, Point3, Vector3};\nuse cgmath::{Transform, AffineMatrix3};\nuse gfx::traits::*;\n\n#[vertex_format]\n#[derive(Clone, Copy)]\nstruct Vertex {\n #[as_float]\n #[name = \"a_Pos\"]\n pos: [i8; 3],\n\n #[as_float]\n #[name = \"a_TexCoord\"]\n tex_coord: [u8; 2],\n}\n\n\/\/ The shader_param attribute makes sure the following struct can be used to\n\/\/ pass parameters to a shader.\n#[shader_param]\nstruct Params {\n #[name = \"u_Transform\"]\n transform: [[f32; 4]; 4],\n\n #[name = \"t_Color\"]\n color: gfx::shade::TextureParam,\n}\n\n\n\/\/----------------------------------------\n\npub fn main() {\n let (wrap, mut device, mut factory) = gfx_window_glutin::init_titled(\"Cube example\")\n .unwrap();\n let mut renderer = factory.create_renderer();\n\n let vertex_data = [\n \/\/ top (0, 0, 1)\n Vertex { pos: [-1, -1, 1], tex_coord: [0, 0] },\n Vertex { pos: [ 1, -1, 1], tex_coord: [1, 0] },\n Vertex { pos: [ 1, 1, 1], tex_coord: [1, 1] },\n Vertex { pos: [-1, 1, 1], tex_coord: [0, 1] },\n \/\/ bottom (0, 0, -1)\n Vertex { pos: [-1, 1, -1], tex_coord: [1, 0] },\n Vertex { pos: [ 1, 1, -1], tex_coord: [0, 0] },\n Vertex { pos: [ 1, -1, -1], tex_coord: [0, 1] },\n Vertex { pos: [-1, -1, -1], tex_coord: [1, 1] },\n \/\/ right (1, 0, 0)\n Vertex { pos: [ 1, -1, -1], tex_coord: [0, 0] },\n Vertex { pos: [ 1, 1, -1], tex_coord: [1, 0] },\n Vertex { pos: [ 1, 1, 1], tex_coord: [1, 1] },\n Vertex { pos: [ 1, -1, 1], tex_coord: [0, 1] },\n \/\/ left (-1, 0, 0)\n Vertex { pos: [-1, -1, 1], tex_coord: [1, 0] },\n Vertex { pos: [-1, 1, 1], tex_coord: [0, 0] },\n Vertex { pos: [-1, 1, -1], tex_coord: [0, 1] },\n Vertex { pos: [-1, -1, -1], tex_coord: [1, 1] },\n \/\/ front (0, 1, 0)\n Vertex { pos: [ 1, 1, -1], tex_coord: [1, 0] },\n Vertex { pos: [-1, 1, -1], tex_coord: [0, 0] },\n Vertex { pos: [-1, 1, 1], tex_coord: [0, 1] },\n Vertex { pos: [ 1, 1, 1], tex_coord: [1, 1] },\n \/\/ back (0, -1, 0)\n Vertex { pos: [ 1, -1, 1], tex_coord: [0, 0] },\n Vertex { pos: [-1, -1, 1], tex_coord: [1, 0] },\n Vertex { pos: [-1, -1, -1], tex_coord: [1, 1] },\n Vertex { pos: [ 1, -1, -1], tex_coord: [0, 1] },\n ];\n\n let mesh = factory.create_mesh(&vertex_data);\n\n let index_data: &[u8] = &[\n 0, 1, 2, 2, 3, 0, \/\/ top\n 4, 5, 6, 6, 7, 4, \/\/ bottom\n 8, 9, 10, 10, 11, 8, \/\/ right\n 12, 13, 14, 14, 15, 12, \/\/ left\n 16, 17, 18, 18, 19, 16, \/\/ front\n 20, 21, 22, 22, 23, 20, \/\/ back\n ];\n\n let texture_info = gfx::tex::TextureInfo {\n width: 1,\n height: 1,\n depth: 1,\n levels: 1,\n kind: gfx::tex::TextureKind::Texture2D,\n format: gfx::tex::RGBA8,\n };\n let image_info = texture_info.to_image_info();\n let texture = factory.create_texture(texture_info).unwrap();\n factory.update_texture(&texture, &image_info,\n &[0x20u8, 0xA0u8, 0xC0u8, 0x00u8],\n None).unwrap();\n\n let sampler = factory.create_sampler(\n gfx::tex::SamplerInfo::new(gfx::tex::FilterMethod::Bilinear,\n gfx::tex::WrapMode::Clamp)\n );\n\n let program = {\n let vs = gfx::ShaderSource {\n glsl_120: Some(include_bytes!(\"cube_120.glslv\")),\n glsl_150: Some(include_bytes!(\"cube_150.glslv\")),\n .. gfx::ShaderSource::empty()\n };\n let fs = gfx::ShaderSource {\n glsl_120: Some(include_bytes!(\"cube_120.glslf\")),\n glsl_150: Some(include_bytes!(\"cube_150.glslf\")),\n .. gfx::ShaderSource::empty()\n };\n factory.link_program_source(vs, fs, &device.get_capabilities())\n .unwrap()\n };\n\n let view: AffineMatrix3 = Transform::look_at(\n &Point3::new(1.5f32, -5.0, 3.0),\n &Point3::new(0f32, 0.0, 0.0),\n &Vector3::unit_z(),\n );\n let aspect = {\n let (w, h) = wrap.get_size();\n w as f32 \/ h as f32\n };\n let proj = cgmath::perspective(cgmath::deg(45.0f32), aspect, 1.0, 10.0);\n\n let data = Params {\n transform: proj.mul_m(&view.mat).into_fixed(),\n color: (texture, Some(sampler)),\n };\n\n let mut batch = gfx::batch::OwnedBatch::new(mesh, program, data).unwrap();\n batch.slice = factory.create_buffer_index::(index_data)\n .to_slice(gfx::PrimitiveType::TriangleList);\n batch.state.depth(gfx::state::Comparison::LessEqual, true);\n\n let clear_data = gfx::ClearData {\n color: [0.3, 0.3, 0.3, 1.0],\n depth: 1.0,\n stencil: 0,\n };\n\n 'main: loop {\n \/\/ quit when Esc is pressed.\n for event in wrap.window.poll_events() {\n match event {\n glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) => break 'main,\n glutin::Event::Closed => break 'main,\n _ => {},\n }\n }\n\n renderer.clear(clear_data, gfx::COLOR | gfx::DEPTH, &wrap);\n renderer.draw(&batch, &wrap).unwrap();\n device.submit(renderer.as_buffer());\n renderer.reset();\n\n wrap.window.swap_buffers();\n device.after_frame();\n factory.cleanup();\n }\n}\n<|endoftext|>"} {"text":"Styling, directory viewing and dumb binary detection.pub const CSS: &'static str =\nr\"\n* {\n\tpadding:0;\n\tmargin:0;\n}\n\nbody {\n\tcolor: #333;\n\tfont: 14px Sans-Serif;\n\tpadding: 50px;\n\tbackground: #eee;\n}\n\nh1 {\n\ttext-align: center;\n\tpadding: 20px 0 12px 0;\n\tmargin: 0;\n}\nh2 {\n\tfont-size: 16px;\n\ttext-align: center;\n\tpadding: 0 0 12px 0;\n}\n\n#container {\n\tbox-shadow: 0 5px 10px -5px rgba(0,0,0,0.5);\n\tposition: relative;\n\tbackground: white;\n}\n\ntable {\n\tbackground-color: #F3F3F3;\n\tborder-collapse: collapse;\n\twidth: 100%;\n\tmargin: 15px 0;\n}\n\nth {\n\tbackground-color: #215fa4;\n\tcolor: #FFF;\n\tcursor: pointer;\n\tpadding: 5px 10px;\n}\n\nth small {\n\tfont-size: 9px;\n}\n\ntd, th {\n\ttext-align: left;\n}\n\na {\n\ttext-decoration: none;\n}\n\ntd a {\n\tcolor: #001c3b;\n\tdisplay: block;\n\tpadding: 5px 10px;\n}\nth a {\n\tpadding-left: 0\n}\n\ntr:nth-of-type(odd) {\n\tbackground-color: #E6E6E6;\n}\n\ntr:hover td {\n\tbackground-color:#CACACA;\n}\n\ntr:hover td a {\n\tcolor: #000;\n}\n\";\n<|endoftext|>"} {"text":"add some testextern crate memcache;\nextern crate r2d2;\nextern crate r2d2_memcache;\n\nuse r2d2_memcache::MemcacheConnectionManager;\n\n#[test]\nfn connect() {\n let config = r2d2::Config::default();\n let manager = MemcacheConnectionManager::new(\"localhost:11211\").unwrap();\n\n let pool = r2d2::Pool::new(config, manager).unwrap();\n}\n\n#[test]\nfn flush() {\n let config = r2d2::Config::default();\n let manager = MemcacheConnectionManager::new(\"localhost:11211\").unwrap();\n\n let pool = r2d2::Pool::new(config, manager).unwrap();\n\n {\n let pool = pool.clone();\n let mut conn = pool.get().unwrap();\n conn.flush().unwrap();\n }\n}\n\n#[test]\nfn version() {\n let config = r2d2::Config::default();\n let manager = MemcacheConnectionManager::new(\"localhost:11211\").unwrap();\n\n let pool = r2d2::Pool::new(config, manager).unwrap();\n\n {\n let pool = pool.clone();\n let mut conn = pool.get().unwrap();\n conn.version().unwrap();\n }\n}\n\n#[test]\nfn store() {\n let config = r2d2::Config::default();\n let manager = MemcacheConnectionManager::new(\"localhost:11211\").unwrap();\n\n let pool = r2d2::Pool::new(config, manager).unwrap();\n\n {\n let pool = pool.clone();\n let mut conn = pool.get().unwrap();\n let value = &memcache::Raw {\n bytes: b\"bar\",\n flags: 0,\n };\n conn.set(\"foo\", value).unwrap();\n conn.replace(\"foo\", value).unwrap();\n conn.add(\"foo\", value).unwrap();\n conn.append(\"foo\", value).unwrap();\n conn.prepend(\"foo\", value).unwrap();\n }\n}\n\n#[test]\nfn get() {\n let config = r2d2::Config::default();\n let manager = MemcacheConnectionManager::new(\"localhost:11211\").unwrap();\n\n let pool = r2d2::Pool::new(config, manager).unwrap();\n {\n let pool = pool.clone();\n let mut conn = pool.get().unwrap();\n conn.flush().unwrap();\n let value = &memcache::Raw {\n bytes: b\"bar\",\n flags: 0,\n };\n conn.set(\"foo_get\", value).unwrap();\n let result: (Vec, u16) = conn.get(\"foo_get\").unwrap();\n assert!(result.0 == b\"bar\");\n assert!(result.1 == 0);\n }\n}\n\n#[test]\nfn delete() {\n let config = r2d2::Config::default();\n let manager = MemcacheConnectionManager::new(\"localhost:11211\").unwrap();\n\n let pool = r2d2::Pool::new(config, manager).unwrap();\n {\n let pool = pool.clone();\n let mut conn = pool.get().unwrap();\n conn.delete(\"foo\").unwrap();\n }\n}\n\n#[test]\nfn incr() {\n let config = r2d2::Config::default();\n let manager = MemcacheConnectionManager::new(\"localhost:11211\").unwrap();\n\n let pool = r2d2::Pool::new(config, manager).unwrap();\n {\n let pool = pool.clone();\n let mut conn = pool.get().unwrap();\n let value = &memcache::Raw {\n bytes: b\"100\",\n flags: 0\n };\n conn.set(\"foo_incr\", value).unwrap();\n assert!(conn.incr(\"foo_incr\", 1).unwrap() == Some(101));\n }\n}\n\n#[test]\nfn decr() {\n let config = r2d2::Config::default();\n let manager = MemcacheConnectionManager::new(\"localhost:11211\").unwrap();\n\n let pool = r2d2::Pool::new(config, manager).unwrap();\n {\n let pool = pool.clone();\n let mut conn = pool.get().unwrap();\n let value = &memcache::Raw {\n bytes: b\"100\",\n flags: 0\n };\n conn.set(\"foo_decr\", value).unwrap();\n assert!(conn.decr(\"foo_decr\", 1).unwrap() == Some(99));\n }\n}\n<|endoftext|>"} {"text":"use std::boxed::Box;\nuse std::cmp;\nuse std::collections::BTreeSet;\nuse std::ops::Range;\n\npub struct ItemList {\n clipping_range_max_len: usize,\n clipping_range_start: usize,\n highlighted_row: Option,\n line_indices: Box,\n marked_line_indices: BTreeSet,\n}\n\nimpl ItemList {\n pub fn new(clipping_range_max_len: usize) -> Self {\n assert!(clipping_range_max_len > 0);\n ItemList {\n clipping_range_max_len: clipping_range_max_len,\n clipping_range_start: 0,\n highlighted_row: None,\n line_indices: Box::new(0..0),\n marked_line_indices: BTreeSet::new(),\n }\n }\n\n pub fn highlighted_row(&self) -> Option {\n self.highlighted_row\n }\n\n pub fn len(&self) -> usize {\n self.line_indices.count()\n }\n\n pub fn line_indices_in_clipping_range(&self) -> Vec {\n self.line_indices.boxed_iter()\n .skip(self.clipping_range_start)\n .take(self.clipping_range_end())\n .collect()\n }\n\n pub fn selected_line_indices(&self) -> Vec {\n match self.marked_line_indices.len() {\n 0 => {\n match self.highlighted_row {\n None => Vec::new(),\n Some(row) => {\n let i = self.clipping_range_start + row;\n vec![self.line_indices.at(i)]\n }\n }\n }\n _ => self.marked_line_indices.iter().cloned().collect(),\n }\n }\n\n pub fn marked_rows(&self) -> Vec {\n self.line_indices_in_clipping_range().iter()\n .enumerate()\n .filter_map(|(i, idx)| {\n if self.marked_line_indices.contains(idx) { Some(i) } else { None }\n })\n .collect()\n }\n\n pub fn toggle_mark(&mut self) {\n if let Some(row) = self.highlighted_row {\n let i = self.clipping_range_start + row;\n let line_index = self.line_indices.at(i);\n if !self.marked_line_indices.remove(&line_index) {\n self.marked_line_indices.insert(line_index);\n }\n }\n }\n\n pub fn set_line_indices(&mut self, line_indices: Vec) {\n self.set_line_indices_with_box::>(Box::new(line_indices));\n }\n\n pub fn set_line_index_range(&mut self, range: Range) {\n self.set_line_indices_with_box::>(Box::new(range));\n }\n\n pub fn move_highlight_backward(&mut self) {\n if let Some(row) = self.highlighted_row {\n if row == 0 {\n self.scroll_backward();\n return;\n }\n self.highlighted_row = Some(row - 1);\n }\n }\n\n pub fn move_highlight_forward(&mut self) {\n if let Some(row) = self.highlighted_row {\n if Some(row) == self.max_row() {\n self.scroll_forward();\n return;\n }\n self.highlighted_row = Some(row + 1);\n }\n }\n\n pub fn scroll_backward(&mut self) {\n if self.clipping_range_start > 0 {\n self.clipping_range_start -= 1;\n }\n }\n\n pub fn scroll_forward(&mut self) {\n if self.clipping_range_end() < self.len() {\n self.clipping_range_start += 1;\n }\n }\n\n fn set_line_indices_with_box(&mut self, line_indices: Box) {\n self.line_indices = line_indices;\n let overrun = self.clipping_range_end().saturating_sub(self.len());\n self.clipping_range_start = self.clipping_range_start.saturating_sub(overrun);\n match (self.highlighted_row, self.max_row()) {\n (Some(row), Some(max_row)) if row > max_row => {\n self.highlighted_row = Some(max_row);\n }\n (None, Some(_)) => {\n self.highlighted_row = Some(0);\n }\n (_, None) => {\n self.highlighted_row = None;\n }\n (_, _) => { }\n }\n debug_assert!(\n (self.highlighted_row.is_some() && self.max_row().is_some()) ||\n (self.highlighted_row.is_none() && self.max_row().is_none()));\n }\n\n fn clipping_range_end(&self) -> usize {\n self.clipping_range_start + self.clipping_range_len()\n }\n\n fn clipping_range_len(&self) -> usize {\n cmp::min(self.len(), self.clipping_range_max_len)\n }\n\n fn max_row(&self) -> Option {\n match self.clipping_range_len() {\n 0 => None,\n i => Some(i - 1),\n }\n }\n}\n\npub trait Indices {\n fn at(&self, i: usize) -> usize;\n fn boxed_iter<'a>(&'a self) -> Box + 'a>;\n fn count(&self) -> usize;\n}\n\nimpl Indices for Range {\n fn at(&self, i: usize) -> usize {\n assert!(i < self.end);\n i\n }\n\n fn boxed_iter<'a>(&'a self) -> Box + 'a> {\n Box::new(self.clone())\n }\n\n fn count(&self) -> usize {\n self.clone().count()\n }\n}\n\nimpl Indices for Vec {\n fn at(&self, i: usize) -> usize {\n unsafe { *self.get_unchecked(i) }\n }\n\n fn boxed_iter<'a>(&'a self) -> Box + 'a> {\n Box::new(self.iter().map(|i| *i))\n }\n\n fn count(&self) -> usize {\n self.len()\n }\n}\nRename Indices::count() to Indices::len()use std::boxed::Box;\nuse std::cmp;\nuse std::collections::BTreeSet;\nuse std::ops::Range;\n\npub struct ItemList {\n clipping_range_max_len: usize,\n clipping_range_start: usize,\n highlighted_row: Option,\n line_indices: Box,\n marked_line_indices: BTreeSet,\n}\n\nimpl ItemList {\n pub fn new(clipping_range_max_len: usize) -> Self {\n assert!(clipping_range_max_len > 0);\n ItemList {\n clipping_range_max_len: clipping_range_max_len,\n clipping_range_start: 0,\n highlighted_row: None,\n line_indices: Box::new(0..0),\n marked_line_indices: BTreeSet::new(),\n }\n }\n\n pub fn highlighted_row(&self) -> Option {\n self.highlighted_row\n }\n\n pub fn len(&self) -> usize {\n self.line_indices.len()\n }\n\n pub fn line_indices_in_clipping_range(&self) -> Vec {\n self.line_indices.boxed_iter()\n .skip(self.clipping_range_start)\n .take(self.clipping_range_end())\n .collect()\n }\n\n pub fn selected_line_indices(&self) -> Vec {\n match self.marked_line_indices.len() {\n 0 => {\n match self.highlighted_row {\n None => Vec::new(),\n Some(row) => {\n let i = self.clipping_range_start + row;\n vec![self.line_indices.at(i)]\n }\n }\n }\n _ => self.marked_line_indices.iter().cloned().collect(),\n }\n }\n\n pub fn marked_rows(&self) -> Vec {\n self.line_indices_in_clipping_range().iter()\n .enumerate()\n .filter_map(|(i, idx)| {\n if self.marked_line_indices.contains(idx) { Some(i) } else { None }\n })\n .collect()\n }\n\n pub fn toggle_mark(&mut self) {\n if let Some(row) = self.highlighted_row {\n let i = self.clipping_range_start + row;\n let line_index = self.line_indices.at(i);\n if !self.marked_line_indices.remove(&line_index) {\n self.marked_line_indices.insert(line_index);\n }\n }\n }\n\n pub fn set_line_indices(&mut self, line_indices: Vec) {\n self.set_line_indices_with_box::>(Box::new(line_indices));\n }\n\n pub fn set_line_index_range(&mut self, range: Range) {\n self.set_line_indices_with_box::>(Box::new(range));\n }\n\n pub fn move_highlight_backward(&mut self) {\n if let Some(row) = self.highlighted_row {\n if row == 0 {\n self.scroll_backward();\n return;\n }\n self.highlighted_row = Some(row - 1);\n }\n }\n\n pub fn move_highlight_forward(&mut self) {\n if let Some(row) = self.highlighted_row {\n if Some(row) == self.max_row() {\n self.scroll_forward();\n return;\n }\n self.highlighted_row = Some(row + 1);\n }\n }\n\n pub fn scroll_backward(&mut self) {\n if self.clipping_range_start > 0 {\n self.clipping_range_start -= 1;\n }\n }\n\n pub fn scroll_forward(&mut self) {\n if self.clipping_range_end() < self.len() {\n self.clipping_range_start += 1;\n }\n }\n\n fn set_line_indices_with_box(&mut self, line_indices: Box) {\n self.line_indices = line_indices;\n let overrun = self.clipping_range_end().saturating_sub(self.len());\n self.clipping_range_start = self.clipping_range_start.saturating_sub(overrun);\n match (self.highlighted_row, self.max_row()) {\n (Some(row), Some(max_row)) if row > max_row => {\n self.highlighted_row = Some(max_row);\n }\n (None, Some(_)) => {\n self.highlighted_row = Some(0);\n }\n (_, None) => {\n self.highlighted_row = None;\n }\n (_, _) => { }\n }\n debug_assert!(\n (self.highlighted_row.is_some() && self.max_row().is_some()) ||\n (self.highlighted_row.is_none() && self.max_row().is_none()));\n }\n\n fn clipping_range_end(&self) -> usize {\n self.clipping_range_start + self.clipping_range_len()\n }\n\n fn clipping_range_len(&self) -> usize {\n cmp::min(self.len(), self.clipping_range_max_len)\n }\n\n fn max_row(&self) -> Option {\n match self.clipping_range_len() {\n 0 => None,\n i => Some(i - 1),\n }\n }\n}\n\npub trait Indices {\n fn at(&self, i: usize) -> usize;\n fn boxed_iter<'a>(&'a self) -> Box + 'a>;\n fn len(&self) -> usize;\n}\n\nimpl Indices for Range {\n fn at(&self, i: usize) -> usize {\n assert!(i < self.end);\n i\n }\n\n fn boxed_iter<'a>(&'a self) -> Box + 'a> {\n Box::new(self.clone())\n }\n\n fn len(&self) -> usize {\n ::len(&self)\n }\n}\n\nimpl Indices for Vec {\n fn at(&self, i: usize) -> usize {\n unsafe { *self.get_unchecked(i) }\n }\n\n fn boxed_iter<'a>(&'a self) -> Box + 'a> {\n Box::new(self.iter().map(|i| *i))\n }\n\n fn len(&self) -> usize {\n Vec::len(&self)\n }\n}\n<|endoftext|>"} {"text":"Simplify index sub-module.<|endoftext|>"} {"text":"use std::sync::Arc;\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::ops::Deref;\nuse std::sync::mpsc::{channel, Receiver, Sender};\nuse std::cell::UnsafeCell;\nuse std::marker::Sync;\nuse {Future, Poll, Async};\nuse task::{self, Task};\nuse lock::Lock;\n\n\n\/\/\/ A wrapped item of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedItem {\n item: Arc,\n}\n\nimpl Clone for SharedItem {\n fn clone(&self) -> Self {\n SharedItem { item: self.item.clone() }\n }\n}\n\nimpl Deref for SharedItem {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.item.as_ref()\n }\n}\n\n\/\/\/ A wrapped error of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedError {\n error: Arc,\n}\n\nimpl Clone for SharedError {\n fn clone(&self) -> Self {\n SharedError { error: self.error.clone() }\n }\n}\n\nimpl Deref for SharedError {\n type Target = E;\n\n fn deref(&self) -> &E {\n &self.error.as_ref()\n }\n}\n\nimpl SharedItem {\n fn new(item: T) -> Self {\n SharedItem { item: Arc::new(item) }\n }\n}\n\nimpl SharedError {\n fn new(error: E) -> Self {\n SharedError { error: Arc::new(error) }\n }\n}\n\n\/\/\/ The data that has to be synced to implement `Shared`,\n\/\/\/ in order to satisfy the `Future` trait's constraints.\nstruct SyncedInner\n where F: Future\n{\n original_future: F, \/\/ The original future\n}\n\nstruct Inner\n where F: Future\n{\n synced_inner: Lock>,\n tasks_unpark_started: AtomicBool,\n \/\/\/ When original future is polled and ready, unparks all the tasks in that channel\n tasks_receiver: Lock>,\n \/\/\/ The original future result wrapped with `SharedItem`\/`SharedError`\n result: UnsafeCell, SharedError>>>,\n}\n\nunsafe impl Sync for Inner where F: Future {}\n\n\/\/\/ TODO: doc\n#[must_use = \"futures do nothing unless polled\"]\npub struct Shared\n where F: Future\n{\n inner: Arc>,\n tasks_sender: Sender,\n}\n\nimpl Shared\n where F: Future\n{\n fn result_to_polled_result(result: Result, SharedError>)\n -> Result>, SharedError> {\n match result {\n Ok(item) => Ok(Async::Ready(item)),\n Err(error) => Err(error),\n }\n }\n}\n\npub fn new(future: F) -> Shared\n where F: Future\n{\n let (tasks_sender, tasks_receiver) = channel();\n Shared {\n inner: Arc::new(Inner {\n synced_inner: Lock::new(SyncedInner { original_future: future }),\n tasks_unpark_started: AtomicBool::new(false),\n tasks_receiver: Lock::new(tasks_receiver),\n result: UnsafeCell::new(None),\n }),\n tasks_sender: tasks_sender,\n }\n}\n\nimpl Future for Shared\n where F: Future\n{\n type Item = SharedItem;\n type Error = SharedError;\n\n fn poll(&mut self) -> Poll {\n \/\/ The logic is as follows:\n \/\/ 1. Check if the result is ready (with tasks_unpark_started)\n \/\/ - If the result is ready, return it.\n \/\/ - Otherwise:\n \/\/ 2. Try lock the self.inner.synced_inner:\n \/\/ - If successfully locked, poll the original future.\n \/\/ If the future is ready, unpark the tasks from\n \/\/ self.inner.tasks_receiver and return the result.\n \/\/ - If the future is not ready:\n \/\/ 3. Create a task and send it through self.tasks_sender.\n \/\/ 4. Check again if the result is ready (with tasks_unpark_started)\n \/\/ 5. Return the result if it's ready. It is necessary because otherwise there could be\n \/\/ a race between the task sending and the thread receiving the tasks.\n\n let mut should_unpark_tasks: bool = false;\n\n \/\/ If the result is ready, just return it\n if self.inner.tasks_unpark_started.load(Ordering::Relaxed) {\n unsafe {\n if let Some(ref result) = *self.inner.result.get() {\n return Self::result_to_polled_result(result.clone());\n }\n }\n }\n\n \/\/ The result was not ready.\n match self.inner.synced_inner.try_lock() {\n Some(mut inner_guard) => {\n let ref mut inner = *inner_guard;\n unsafe {\n \/\/ Other thread could poll the result, so we check if result has a value\n if (*self.inner.result.get()).is_some() {\n should_unpark_tasks = true;\n } else {\n match inner.original_future.poll() {\n Ok(Async::Ready(item)) => {\n *self.inner.result.get() = Some(Ok(SharedItem::new(item)));\n should_unpark_tasks = true;\n }\n Err(error) => {\n *self.inner.result.get() = Some(Err(SharedError::new(error)));\n should_unpark_tasks = true;\n }\n Ok(Async::NotReady) => {} \/\/ Will be handled later\n }\n }\n }\n }\n None => {} \/\/ Will be handled later\n }\n\n if should_unpark_tasks {\n self.inner.tasks_unpark_started.store(true, Ordering::Relaxed);\n match self.inner.tasks_receiver.try_lock() {\n Some(tasks_receiver_guard) => {\n let ref tasks_receiver = *tasks_receiver_guard;\n loop {\n match tasks_receiver.try_recv() {\n Ok(task) => task.unpark(),\n _ => break,\n }\n }\n }\n None => {} \/\/ Other thread is unparking the tasks\n }\n\n unsafe {\n if let Some(ref result) = *self.inner.result.get() {\n return Self::result_to_polled_result(result.clone());\n } else {\n \/\/ How should I use unwrap here?\n \/\/ The compiler says cannot \"move out of borrowed content\"\n unreachable!();\n }\n }\n }\n\n let t = task::park();\n let _ = self.tasks_sender.send(t);\n if self.inner.tasks_unpark_started.load(Ordering::Relaxed) {\n \/\/ If the tasks unpark has started, self.inner.result has a value (not None).\n \/\/ The result must be read here because it is possible that the task,\n \/\/ t (see variable above), had not been unparked.\n unsafe {\n if let Some(ref result) = *self.inner.result.get() {\n return Self::result_to_polled_result(result.clone());\n } else {\n \/\/ How should I use unwrap here?\n \/\/ The compiler says cannot \"move out of borrowed content\"\n unreachable!();\n }\n }\n }\n\n Ok(Async::NotReady)\n }\n}\n\nimpl Clone for Shared\n where F: Future\n{\n fn clone(&self) -> Self {\n Shared {\n inner: self.inner.clone(),\n tasks_sender: self.tasks_sender.clone(),\n }\n }\n}\nUse RwLock synced methods instead of unsafe blocksuse std::sync::{Arc, RwLock};\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::ops::Deref;\nuse std::sync::mpsc::{channel, Receiver, Sender};\nuse {Future, Poll, Async};\nuse task::{self, Task};\nuse lock::Lock;\n\n\n\/\/\/ A wrapped item of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedItem {\n item: Arc,\n}\n\nimpl Clone for SharedItem {\n fn clone(&self) -> Self {\n SharedItem { item: self.item.clone() }\n }\n}\n\nimpl Deref for SharedItem {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.item.as_ref()\n }\n}\n\n\/\/\/ A wrapped error of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedError {\n error: Arc,\n}\n\nimpl Clone for SharedError {\n fn clone(&self) -> Self {\n SharedError { error: self.error.clone() }\n }\n}\n\nimpl Deref for SharedError {\n type Target = E;\n\n fn deref(&self) -> &E {\n &self.error.as_ref()\n }\n}\n\nimpl SharedItem {\n fn new(item: T) -> Self {\n SharedItem { item: Arc::new(item) }\n }\n}\n\nimpl SharedError {\n fn new(error: E) -> Self {\n SharedError { error: Arc::new(error) }\n }\n}\n\nstruct SyncedInner\n where F: Future\n{\n \/\/\/ The original future that is wrapped as `Shared`\n original_future: F,\n \/\/\/ When original future is polled and ready, all the tasks in that channel will be unparked\n tasks_receiver: Receiver,\n}\n\nstruct Inner\n where F: Future\n{\n \/\/\/ The original future and the tasks receiver behind a mutex\n synced_inner: Lock>,\n \/\/\/ Indicates whether the result is ready, and the tasks unparking has been started\n tasks_unpark_started: AtomicBool,\n \/\/\/ The original future result wrapped with `SharedItem`\/`SharedError`\n result: RwLock, SharedError>>>,\n}\n\n\n\/\/\/ TODO: doc\n#[must_use = \"futures do nothing unless polled\"]\npub struct Shared\n where F: Future\n{\n inner: Arc>,\n tasks_sender: Sender,\n}\n\nimpl Shared\n where F: Future\n{\n fn result_to_polled_result(result: Result, SharedError>)\n -> Result>, SharedError> {\n match result {\n Ok(item) => Ok(Async::Ready(item)),\n Err(error) => Err(error),\n }\n }\n\n fn unpark_tasks(&self, tasks_receiver: &mut Receiver) {\n self.inner.tasks_unpark_started.store(true, Ordering::Relaxed);\n loop {\n match tasks_receiver.try_recv() {\n Ok(task) => task.unpark(),\n _ => break,\n }\n }\n }\n}\n\npub fn new(future: F) -> Shared\n where F: Future\n{\n let (tasks_sender, tasks_receiver) = channel();\n Shared {\n inner: Arc::new(Inner {\n synced_inner: Lock::new(SyncedInner {\n original_future: future,\n tasks_receiver: tasks_receiver,\n }),\n tasks_unpark_started: AtomicBool::new(false),\n result: RwLock::new(None),\n }),\n tasks_sender: tasks_sender,\n }\n}\n\nimpl Future for Shared\n where F: Future\n{\n type Item = SharedItem;\n type Error = SharedError;\n\n fn poll(&mut self) -> Poll {\n \/\/ The logic is as follows:\n \/\/ 1. Check if the result is ready (with tasks_unpark_started)\n \/\/ - If the result is ready, return it.\n \/\/ - Otherwise:\n \/\/ 2. Try lock the self.inner.original_future:\n \/\/ - If successfully locked, poll the original future.\n \/\/ If the future is ready, unpark the tasks from\n \/\/ self.inner.tasks_receiver and return the result.\n \/\/ - If the future is not ready:\n \/\/ 3. Create a task and send it through self.tasks_sender.\n \/\/ 4. Check again if the result is ready (with tasks_unpark_started)\n \/\/ 5. Return the result if it's ready. It is necessary because otherwise there could be\n \/\/ a race between the task sending and the thread receiving the tasks.\n\n \/\/ If the result is ready, just return it\n if self.inner.tasks_unpark_started.load(Ordering::Relaxed) {\n return Self::result_to_polled_result(self.inner\n .result\n .read()\n .unwrap()\n .clone()\n .unwrap());\n }\n\n \/\/ The result was not ready.\n \/\/ Try lock the original future.\n match self.inner.synced_inner.try_lock() {\n Some(mut synced_inner) => {\n let ref mut synced_inner = *synced_inner;\n let ref mut original_future = synced_inner.original_future;\n let mut result = self.inner.result.write().unwrap();\n \/\/ Other thread could already poll the result, so we check if result has a value\n if result.is_none() {\n match original_future.poll() {\n Ok(Async::Ready(item)) => {\n *result = Some(Ok(SharedItem::new(item)));\n self.unpark_tasks(&mut synced_inner.tasks_receiver);\n return Self::result_to_polled_result(result.clone().unwrap());\n }\n Err(error) => {\n *result = Some(Err(SharedError::new(error)));\n self.unpark_tasks(&mut synced_inner.tasks_receiver);\n return Self::result_to_polled_result(result.clone().unwrap());\n }\n Ok(Async::NotReady) => {} \/\/ A task will be parked\n }\n }\n }\n None => {} \/\/ A task will be parked\n }\n\n let t = task::park();\n let _ = self.tasks_sender.send(t);\n if self.inner.tasks_unpark_started.load(Ordering::Relaxed) {\n \/\/ If the tasks unpark has been started, self.inner.result has a value (not None).\n \/\/ The result must be read now even after sending the parked task\n \/\/ because it is possible that the task, t (see variable above),\n \/\/ had not been unparked.\n \/\/ That's because self.inner.tasks_receiver could has tasks that will never be unparked,\n \/\/ because the tasks receiving loop could have ended before receiving\n \/\/ the new task, t).\n return Self::result_to_polled_result(self.inner\n .result\n .read()\n .unwrap()\n .clone()\n .unwrap());\n }\n\n Ok(Async::NotReady)\n }\n}\n\nimpl Clone for Shared\n where F: Future\n{\n fn clone(&self) -> Self {\n Shared {\n inner: self.inner.clone(),\n tasks_sender: self.tasks_sender.clone(),\n }\n }\n}\n<|endoftext|>"} {"text":"\/\/ This test attempts to force the dynamic linker to resolve\n\/\/ external symbols as close to the red zone as possible.\n\nuse std;\nimport task;\nimport std::rand;\n\nextern mod rustrt {\n fn debug_get_stk_seg() -> *u8;\n\n fn rust_get_sched_id() -> libc::intptr_t;\n fn last_os_error() -> ~str;\n fn rust_getcwd() -> ~str;\n fn get_task_id() -> libc::intptr_t;\n fn sched_threads();\n fn rust_get_task();\n}\n\nfn calllink01() { rustrt::rust_get_sched_id(); }\nfn calllink02() { rustrt::last_os_error(); }\nfn calllink03() { rustrt::rust_getcwd(); }\nfn calllink08() { rustrt::get_task_id(); }\nfn calllink09() { rustrt::sched_threads(); }\nfn calllink10() { rustrt::rust_get_task(); }\n\nfn runtest(f: fn~(), frame_backoff: u32) {\n runtest2(f, frame_backoff, 0 as *u8);\n}\n\nfn runtest2(f: fn~(), frame_backoff: u32, last_stk: *u8) -> u32 {\n let curr_stk = rustrt::debug_get_stk_seg();\n if (last_stk != curr_stk && last_stk != 0 as *u8) {\n \/\/ We switched stacks, go back and try to hit the dynamic linker\n frame_backoff\n } else {\n let frame_backoff = runtest2(f, frame_backoff, curr_stk);\n if frame_backoff > 1u32 {\n frame_backoff - 1u32\n } else if frame_backoff == 1u32 {\n f();\n 0u32\n } else {\n 0u32\n }\n }\n}\n\nfn main() {\n let fns = ~[\n calllink01,\n calllink02,\n calllink03,\n calllink08,\n calllink09,\n calllink10\n ];\n let rng = rand::rng();\n for fns.each |f| {\n let sz = rng.next() % 256u32 + 256u32;\n let frame_backoff = rng.next() % 10u32 + 1u32;\n task::try(|| runtest(f, frame_backoff) );\n }\n}\nRemove nonexistent imports\/\/ This test attempts to force the dynamic linker to resolve\n\/\/ external symbols as close to the red zone as possible.\n\nextern mod rustrt {\n fn debug_get_stk_seg() -> *u8;\n\n fn rust_get_sched_id() -> libc::intptr_t;\n fn last_os_error() -> ~str;\n fn rust_getcwd() -> ~str;\n fn get_task_id() -> libc::intptr_t;\n fn sched_threads();\n fn rust_get_task();\n}\n\nfn calllink01() { rustrt::rust_get_sched_id(); }\nfn calllink02() { rustrt::last_os_error(); }\nfn calllink03() { rustrt::rust_getcwd(); }\nfn calllink08() { rustrt::get_task_id(); }\nfn calllink09() { rustrt::sched_threads(); }\nfn calllink10() { rustrt::rust_get_task(); }\n\nfn runtest(f: fn~(), frame_backoff: u32) {\n runtest2(f, frame_backoff, 0 as *u8);\n}\n\nfn runtest2(f: fn~(), frame_backoff: u32, last_stk: *u8) -> u32 {\n let curr_stk = rustrt::debug_get_stk_seg();\n if (last_stk != curr_stk && last_stk != 0 as *u8) {\n \/\/ We switched stacks, go back and try to hit the dynamic linker\n frame_backoff\n } else {\n let frame_backoff = runtest2(f, frame_backoff, curr_stk);\n if frame_backoff > 1u32 {\n frame_backoff - 1u32\n } else if frame_backoff == 1u32 {\n f();\n 0u32\n } else {\n 0u32\n }\n }\n}\n\nfn main() {\n let fns = ~[\n calllink01,\n calllink02,\n calllink03,\n calllink08,\n calllink09,\n calllink10\n ];\n let rng = rand::rng();\n for fns.each |f| {\n let sz = rng.next() % 256u32 + 256u32;\n let frame_backoff = rng.next() % 10u32 + 1u32;\n task::try(|| runtest(f, frame_backoff) );\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-linux see joyent\/libuv#1189\n\/\/ ignore-fast\n\/\/ ignore-android needs extra network permissions\n\/\/ exec-env:RUST_LOG=debug\n\nuse std::libc;\nuse std::io::net::ip::{Ipv4Addr, SocketAddr};\nuse std::io::net::tcp::{TcpListener, TcpStream};\nuse std::io::{Acceptor, Listener};\n\nfn main() {\n \/\/ This test has a chance to time out, try to not let it time out\n spawn(proc() {\n use std::io::timer;\n timer::sleep(30 * 1000);\n println!(\"timed out!\");\n unsafe { libc::exit(1) }\n });\n\n let addr = SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: 0 };\n let (p, c) = Chan::new();\n spawn(proc() {\n let mut listener = TcpListener::bind(addr).unwrap();\n c.send(listener.socket_name().unwrap());\n let mut acceptor = listener.listen();\n loop {\n let mut stream = match acceptor.accept() {\n Ok(stream) => stream,\n Err(error) => {\n debug!(\"accept failed: {:?}\", error);\n continue;\n }\n };\n stream.read_byte();\n stream.write([2]);\n }\n });\n let addr = p.recv();\n\n let (p, c) = Chan::new();\n for _ in range(0, 1000) {\n let c = c.clone();\n spawn(proc() {\n match TcpStream::connect(addr) {\n Ok(stream) => {\n let mut stream = stream;\n stream.write([1]);\n let mut buf = [0];\n stream.read(buf);\n },\n Err(e) => debug!(\"{:?}\", e)\n }\n c.send(());\n });\n }\n\n \/\/ Wait for all clients to exit, but don't wait for the server to exit. The\n \/\/ server just runs infinitely.\n drop(c);\n for _ in range(0, 1000) {\n p.recv();\n }\n unsafe { libc::exit(0) }\n}\nauto merge of #12880 : tedhorst\/rust\/master, r=alexcrichton\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-linux see joyent\/libuv#1189\n\/\/ ignore-fast\n\/\/ ignore-android needs extra network permissions\n\/\/ exec-env:RUST_LOG=debug\n\nuse std::libc;\nuse std::io::net::ip::{Ipv4Addr, SocketAddr};\nuse std::io::net::tcp::{TcpListener, TcpStream};\nuse std::io::{Acceptor, Listener};\n\nfn main() {\n \/\/ This test has a chance to time out, try to not let it time out\n spawn(proc() {\n use std::io::timer;\n timer::sleep(30 * 1000);\n println!(\"timed out!\");\n unsafe { libc::exit(1) }\n });\n\n let addr = SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: 0 };\n let (tx, rx) = channel();\n spawn(proc() {\n let mut listener = TcpListener::bind(addr).unwrap();\n tx.send(listener.socket_name().unwrap());\n let mut acceptor = listener.listen();\n loop {\n let mut stream = match acceptor.accept() {\n Ok(stream) => stream,\n Err(error) => {\n debug!(\"accept failed: {:?}\", error);\n continue;\n }\n };\n stream.read_byte();\n stream.write([2]);\n }\n });\n let addr = rx.recv();\n\n let (tx, rx) = channel();\n for _ in range(0, 1000) {\n let tx = tx.clone();\n spawn(proc() {\n match TcpStream::connect(addr) {\n Ok(stream) => {\n let mut stream = stream;\n stream.write([1]);\n let mut buf = [0];\n stream.read(buf);\n },\n Err(e) => debug!(\"{:?}\", e)\n }\n tx.send(());\n });\n }\n\n \/\/ Wait for all clients to exit, but don't wait for the server to exit. The\n \/\/ server just runs infinitely.\n drop(tx);\n for _ in range(0, 1000) {\n rx.recv();\n }\n unsafe { libc::exit(0) }\n}\n<|endoftext|>"} {"text":"Added a (non-working) load_bin_file.rs<|endoftext|>"} {"text":"Do not ignore errors when collecting links<|endoftext|>"} {"text":"fix unix process mutex<|endoftext|>"} {"text":"\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npub use device::target::Color;\nuse device::target::{Plane, PlaneEmpty};\n\nstatic MAX_COLOR_TARGETS: uint = 4;\n\n\/\/\/ A complete `Frame`, which is the result of rendering.\npub struct Frame {\n \/\/\/ Each color component has its own buffer.\n pub colors: [Plane, ..MAX_COLOR_TARGETS],\n \/\/\/ The depth buffer for this frame.\n pub depth: Plane,\n \/\/\/ The stencil buffer for this frame.\n pub stencil: Plane,\n}\n\nimpl Frame {\n \/\/\/ Create an empty `Frame`, which corresponds to the \"default framebuffer\", which for now\n \/\/\/ renders directly to the window that was created with the OpenGL context.\n pub fn new() -> Frame {\n Frame {\n colors: [PlaneEmpty, ..MAX_COLOR_TARGETS],\n depth: PlaneEmpty,\n stencil: PlaneEmpty,\n }\n }\n\n \/\/\/ An empty frame is considered to match the default framebuffer\n pub fn is_default(&self) -> bool {\n self.colors.iter().all(|&p| p==PlaneEmpty) &&\n self.depth == PlaneEmpty &&\n self.stencil == PlaneEmpty\n }\n}\nFrame clarification\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npub use device::target::Color;\nuse device::target::{Plane, PlaneEmpty};\n\nstatic MAX_COLOR_TARGETS: uint = 4;\n\n\/\/\/ A complete `Frame`, which is the result of rendering.\npub struct Frame {\n \/\/\/ Each color component has its own buffer.\n pub colors: [Plane, ..MAX_COLOR_TARGETS],\n \/\/\/ The depth buffer for this frame.\n pub depth: Plane,\n \/\/\/ The stencil buffer for this frame.\n pub stencil: Plane,\n}\n\nimpl Frame {\n \/\/\/ Create an empty `Frame`, which corresponds to the 'default framebuffer', which for now\n \/\/\/ renders directly to the window that was created with the OpenGL context.\n pub fn new() -> Frame {\n Frame {\n colors: [PlaneEmpty, ..MAX_COLOR_TARGETS],\n depth: PlaneEmpty,\n stencil: PlaneEmpty,\n }\n }\n\n \/\/\/ Returns true if this framebuffer is associated with the main window (matches `Frame::new`\n \/\/\/ exactly).\n pub fn is_default(&self) -> bool {\n self.colors.iter().all(|&p| p==PlaneEmpty) &&\n self.depth == PlaneEmpty &&\n self.stencil == PlaneEmpty\n }\n}\n<|endoftext|>"} {"text":"style(tokenizer): rustfmt<|endoftext|>"} {"text":"Use single-line comments in preference to multi-line comments<|endoftext|>"} {"text":"Fix up public key search logic<|endoftext|>"} {"text":"Auto merge of #396 - kbknapp:fix-nightly, r=kbknapp<|endoftext|>"} {"text":"smart move works now<|endoftext|>"} {"text":"adds node devices basics\/*\n * This library is free software; you can redistribute it and\/or\n * modify it under the terms of the GNU Lesser General Public\n * License as published by the Free Software Foundation; either\n * version 2.1 of the License, or (at your option) any later version.\n *\n * This library is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n * Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public\n * License along with this library. If not, see\n * .\n *\n * Sahid Orentino Ferdjaoui \n *\/\n\n#![allow(improper_ctypes)]\n\nextern crate libc;\n\nuse std::ffi::{CString, CStr};\nuse std::str;\n\nuse connect::{Connect, virConnectPtr};\nuse error::Error;\n\n#[allow(non_camel_case_types)]\n#[repr(C)]\npub struct virNodeDevice {\n}\n\n#[allow(non_camel_case_types)]\npub type virNodeDevicePtr = *const virNodeDevice;\n\n#[link(name = \"virt\")]\nextern {\n fn virNodeDeviceLookupByName(c: virConnectPtr,id: *const libc::c_char) -> virNodeDevicePtr;\n fn virNodeDeviceCreateXML(c: virConnectPtr, xml: *const libc::c_char, flags: libc::c_uint) -> virNodeDevicePtr;\n fn virNodeDeviceDestroy(d: virNodeDevicePtr) -> libc::c_int;\n fn virNodeDeviceFree(d: virNodeDevicePtr) -> libc::c_int;\n fn virNodeDeviceGetName(d: virNodeDevicePtr) -> *const libc::c_char;\n fn virNodeDeviceGetXMLDesc(d: virNodeDevicePtr, flags: libc::c_uint) -> *const libc::c_char;\n}\n\npub type NodeDeviceXMLFlags = self::libc::c_uint;\npub const VIR_INTERFACE_XML_INACTIVE:NodeDeviceXMLFlags = 1 << 0;\n\npub struct NodeDevice {\n pub d: virNodeDevicePtr\n}\n\nimpl NodeDevice {\n\n pub fn as_ptr(&self) -> virNodeDevicePtr {\n self.d\n }\n\n pub fn lookup_by_name(conn: &Connect, id: &str) -> Result {\n unsafe {\n let ptr = virNodeDeviceLookupByName(\n conn.as_ptr(), CString::new(id).unwrap().as_ptr());\n if ptr.is_null() {\n return Err(Error::new());\n }\n return Ok(NodeDevice{d: ptr});\n }\n }\n\n pub fn create_xml(conn: &Connect, xml: &str, flags: u32) -> Result {\n unsafe {\n let ptr = virNodeDeviceCreateXML(\n conn.as_ptr(), CString::new(xml).unwrap().as_ptr(),\n flags as libc::c_uint);\n if ptr.is_null() {\n return Err(Error::new());\n }\n return Ok(NodeDevice{d: ptr});\n }\n }\n\n pub fn get_name(&self) -> Result<&str, Error> {\n unsafe {\n let n = virNodeDeviceGetName(self.d);\n if n.is_null() {\n return Err(Error::new())\n }\n return Ok(str::from_utf8(\n CStr::from_ptr(n).to_bytes()).unwrap())\n }\n }\n\n pub fn get_xml_desc(&self, flags: u32) -> Result<&str, Error> {\n unsafe {\n let xml = virNodeDeviceGetXMLDesc(self.d, flags as libc::c_uint);\n if xml.is_null() {\n return Err(Error::new())\n }\n return Ok(str::from_utf8(\n CStr::from_ptr(xml).to_bytes()).unwrap())\n }\n }\n\n pub fn destroy(&self) -> Result<(), Error> {\n unsafe {\n if virNodeDeviceDestroy(self.d) == -1 {\n return Err(Error::new());\n }\n return Ok(());\n }\n }\n\n pub fn free(&self) -> Result<(), Error> {\n unsafe {\n if virNodeDeviceFree(self.d) == -1 {\n return Err(Error::new());\n }\n return Ok(());\n }\n }\n}\n<|endoftext|>"} {"text":"use a request object to create new requests this allows us to implement e.g. pagers or others usefull stuff later#![allow(dead_code)]\n#![allow(unused_imports)]\n#![allow(warnings)]\n#![allow(unused)]\n\n\nuse error;\nuse reqwest;\nuse reqwest::IntoUrl;\nuse response::Response;\nuse serde_derive;\nuse serde_json;\nuse serde;\nuse serde::Serializer;\nuse std;\nuse std::collections::HashMap;\nuse std::process::exit;\nuse std::result::Result;\nuse std::thread;\nuse std::time;\nuse types;\n\n\n\n\n#[derive(Debug, Clone)]\npub struct Request<'a> {\n pub url: &'a str,\n parameter: HashMap<&'a str, String>,\n}\n\n\n\nimpl<'a> Request<'a> {\n pub fn new(url: &'a str) -> Self {\n Self {\n parameter: HashMap::new(),\n url: url,\n }\n }\n\n\n pub fn get_paramter(self) -> HashMap<&'a str, String> {\n self.parameter.to_owned()\n }\n\n\n pub fn get_url(self) -> &'a str {\n self.url\n }\n\n\n pub fn add_parameter(&mut self, parameter: &'a str, value: String) {\n self.parameter.insert(parameter, value);\n }\n\n\n pub fn remove_parameter(&mut self, parameter: &str) {\n self.parameter.remove(parameter);\n }\n}\n<|endoftext|>"} {"text":"good option<|endoftext|>"} {"text":"some additional changes<|endoftext|>"} {"text":"Attempted to add a shape handle with method forwarding but it doesn't yet compile<|endoftext|>"} {"text":"Add a test\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ @has macros\/macro.my_macro!.html \/\/pre 'macro_rules! my_macro {'\n\/\/ @has - \/\/pre '() => { ... };'\n\/\/ @has - \/\/pre '($a:tt) => { ... };'\n\/\/ @has - \/\/pre '($e:expr) => { ... };'\n#[macro_export]\nmacro_rules! my_macro {\n () => [];\n ($a:tt) => ();\n ($e:expr) => {};\n}\n<|endoftext|>"} {"text":"Add test for #66930\/\/ check-pass\n\/\/ compile-flags: --emit=mir,link\n\/\/ Regression test for #66930, this ICE requires `--emit=mir` flag.\n\nstatic UTF8_CHAR_WIDTH: [u8; 0] = [];\n\npub fn utf8_char_width(b: u8) -> usize {\n UTF8_CHAR_WIDTH[b as usize] as usize\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"Fix Formatting in build.rs<|endoftext|>"} {"text":"Auto merge of #26041 - Nashenas88:sync-send-libcore-char, r=alexcrichton\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ pretty-expanded FIXME #23616\n\n#![feature(collections)]\n\nfn is_sync(_: T) where T: Sync {}\nfn is_send(_: T) where T: Send {}\n\nmacro_rules! all_sync_send {\n ($ctor:expr, $($iter:ident),+) => ({\n $(\n let mut x = $ctor;\n is_sync(x.$iter());\n let mut y = $ctor;\n is_send(y.$iter());\n )+\n })\n}\n\nfn main() {\n \/\/ for char.rs\n all_sync_send!(\"Я\", escape_default, escape_unicode);\n\n \/\/ for iter.rs\n \/\/ FIXME\n\n \/\/ for option.rs\n \/\/ FIXME\n\n \/\/ for result.rs\n \/\/ FIXME\n\n \/\/ for slice.rs\n \/\/ FIXME\n\n \/\/ for str\/mod.rs\n \/\/ FIXME\n}\n<|endoftext|>"} {"text":"remove useless code<|endoftext|>"} {"text":"Add first integration testextern crate ldap;\n\nuse std::thread;\nuse std::time::Duration;\n\nuse ldap::LDAP;\nuse ldap::Result;\n\n\n#[test]\nfn test_bind() {\n use ldap::bind::{ldap_bind_s, ldap_unbind};\n\n let mut conn = LDAP::connect(\"localhost:389\").unwrap();\n\n ldap_bind_s(&mut conn, \"cn=root,dc=aicube,dc=renet\".to_string(), \"secret\".to_string());\n\n thread::sleep(Duration::new(1, 0));\n\n conn.read();\n\n ldap_unbind(&mut conn);\n\n thread::sleep(Duration::new(1, 0));\n\n conn.read();\n}\n<|endoftext|>"} {"text":"Add FxHasher, a faster alternative to FnvHasher.\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::collections::{HashMap, HashSet};\nuse std::default::Default;\nuse std::hash::{Hasher, Hash, BuildHasherDefault};\nuse std::ops::BitXor;\n\npub type FxHashMap = HashMap>;\npub type FxHashSet = HashSet>;\n\n#[allow(non_snake_case)]\npub fn FxHashMap() -> FxHashMap {\n HashMap::default()\n}\n\n#[allow(non_snake_case)]\npub fn FxHashSet() -> FxHashSet {\n HashSet::default()\n}\n\n\/\/\/ A speedy hash algorithm for use within rustc. The hashmap in libcollections\n\/\/\/ by default uses SipHash which isn't quite as speedy as we want. In the\n\/\/\/ compiler we're not really worried about DOS attempts, so we use a fast\n\/\/\/ non-cryptographic hash.\n\/\/\/\n\/\/\/ This is the same as the algorithm used by Firefox -- which is a homespun\n\/\/\/ one not based on any widely-known algorithm -- though modified to produce\n\/\/\/ 64-bit hash values instead of 32-bit hash values. It consistently\n\/\/\/ out-performs an FNV-based hash within rustc itself -- the collision rate is\n\/\/\/ similar or slightly worse than FNV, but the speed of the hash function\n\/\/\/ itself is much higher because it works on up to 8 bytes at a time.\npub struct FxHasher {\n hash: usize\n}\n\n#[cfg(target_pointer_width = \"32\")]\nconst K: usize = 0x9e3779b9;\n#[cfg(target_pointer_width = \"64\")]\nconst K: usize = 0x517cc1b727220a95;\n\nimpl Default for FxHasher {\n #[inline]\n fn default() -> FxHasher {\n FxHasher { hash: 0 }\n }\n}\n\nimpl FxHasher {\n #[inline]\n fn add_to_hash(&mut self, i: usize) {\n self.hash = self.hash.rotate_left(5).bitxor(i).wrapping_mul(K);\n }\n}\n\nimpl Hasher for FxHasher {\n #[inline]\n fn write(&mut self, bytes: &[u8]) {\n for byte in bytes {\n let i = *byte;\n self.add_to_hash(i as usize);\n }\n }\n\n #[inline]\n fn write_u8(&mut self, i: u8) {\n self.add_to_hash(i as usize);\n }\n\n #[inline]\n fn write_u16(&mut self, i: u16) {\n self.add_to_hash(i as usize);\n }\n\n #[inline]\n fn write_u32(&mut self, i: u32) {\n self.add_to_hash(i as usize);\n }\n\n #[cfg(target_pointer_width = \"32\")]\n #[inline]\n fn write_u64(&mut self, i: u64) {\n self.add_to_hash(i as usize);\n self.add_to_hash((i >> 32) as usize);\n }\n\n #[cfg(target_pointer_width = \"64\")]\n #[inline]\n fn write_u64(&mut self, i: u64) {\n self.add_to_hash(i as usize);\n }\n\n #[inline]\n fn write_usize(&mut self, i: usize) {\n self.add_to_hash(i);\n }\n\n #[inline]\n fn finish(&self) -> u64 {\n self.hash as u64\n }\n}\n\npub fn hash(v: &T) -> u64 {\n let mut state = FxHasher::default();\n v.hash(&mut state);\n state.finish()\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n * Atomic types\n *\/\n\nuse unstable::intrinsics;\nuse cast;\nuse option::{Option,Some,None};\nuse libc::c_void;\nuse ops::Drop;\n\npub struct AtomicFlag {\n priv v: int\n}\n\npub struct AtomicBool {\n priv v: uint\n}\n\npub struct AtomicInt {\n priv v: int\n}\n\npub struct AtomicUint {\n priv v: uint\n}\n\npub struct AtomicPtr {\n priv p: *mut T\n}\n\npub struct AtomicOption {\n priv p: *mut c_void\n}\n\npub enum Ordering {\n Release,\n Acquire,\n SeqCst\n}\n\n\nimpl AtomicFlag {\n\n fn new() -> AtomicFlag {\n AtomicFlag { v: 0 }\n }\n\n \/**\n * Clears the atomic flag\n *\/\n #[inline(always)]\n fn clear(&mut self, order: Ordering) {\n unsafe {atomic_store(&mut self.v, 0, order)}\n }\n\n #[inline(always)]\n \/**\n * Sets the flag if it was previously unset, returns the previous value of the\n * flag.\n *\/\n fn test_and_set(&mut self, order: Ordering) -> bool {\n unsafe {atomic_compare_and_swap(&mut self.v, 0, 1, order) > 0}\n }\n}\n\nimpl AtomicBool {\n fn new(v: bool) -> AtomicBool {\n AtomicBool { v: if v { 1 } else { 0 } }\n }\n\n #[inline(always)]\n fn load(&self, order: Ordering) -> bool {\n unsafe { atomic_load(&self.v, order) > 0 }\n }\n\n #[inline(always)]\n fn store(&mut self, val: bool, order: Ordering) {\n let val = if val { 1 } else { 0 };\n\n unsafe { atomic_store(&mut self.v, val, order); }\n }\n\n #[inline(always)]\n fn swap(&mut self, val: bool, order: Ordering) -> bool {\n let val = if val { 1 } else { 0 };\n\n unsafe { atomic_swap(&mut self.v, val, order) > 0}\n }\n\n #[inline(always)]\n fn compare_and_swap(&mut self, old: bool, new: bool, order: Ordering) -> bool {\n let old = if old { 1 } else { 0 };\n let new = if new { 1 } else { 0 };\n\n unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) > 0 }\n }\n}\n\nimpl AtomicInt {\n fn new(v: int) -> AtomicInt {\n AtomicInt { v:v }\n }\n\n #[inline(always)]\n fn load(&self, order: Ordering) -> int {\n unsafe { atomic_load(&self.v, order) }\n }\n\n #[inline(always)]\n fn store(&mut self, val: int, order: Ordering) {\n unsafe { atomic_store(&mut self.v, val, order); }\n }\n\n #[inline(always)]\n fn swap(&mut self, val: int, order: Ordering) -> int {\n unsafe { atomic_swap(&mut self.v, val, order) }\n }\n\n #[inline(always)]\n fn compare_and_swap(&mut self, old: int, new: int, order: Ordering) -> int {\n unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }\n }\n\n #[inline(always)]\n fn fetch_add(&mut self, val: int, order: Ordering) -> int {\n unsafe { atomic_add(&mut self.v, val, order) }\n }\n\n #[inline(always)]\n fn fetch_sub(&mut self, val: int, order: Ordering) -> int {\n unsafe { atomic_sub(&mut self.v, val, order) }\n }\n}\n\nimpl AtomicUint {\n fn new(v: uint) -> AtomicUint {\n AtomicUint { v:v }\n }\n\n #[inline(always)]\n fn load(&self, order: Ordering) -> uint {\n unsafe { atomic_load(&self.v, order) }\n }\n\n #[inline(always)]\n fn store(&mut self, val: uint, order: Ordering) {\n unsafe { atomic_store(&mut self.v, val, order); }\n }\n\n #[inline(always)]\n fn swap(&mut self, val: uint, order: Ordering) -> uint {\n unsafe { atomic_swap(&mut self.v, val, order) }\n }\n\n #[inline(always)]\n fn compare_and_swap(&mut self, old: uint, new: uint, order: Ordering) -> uint {\n unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }\n }\n\n #[inline(always)]\n fn fetch_add(&mut self, val: uint, order: Ordering) -> uint {\n unsafe { atomic_add(&mut self.v, val, order) }\n }\n\n #[inline(always)]\n fn fetch_sub(&mut self, val: uint, order: Ordering) -> uint {\n unsafe { atomic_sub(&mut self.v, val, order) }\n }\n}\n\nimpl AtomicPtr {\n fn new(p: *mut T) -> AtomicPtr {\n AtomicPtr { p:p }\n }\n\n #[inline(always)]\n fn load(&self, order: Ordering) -> *mut T {\n unsafe { atomic_load(&self.p, order) }\n }\n\n #[inline(always)]\n fn store(&mut self, ptr: *mut T, order: Ordering) {\n unsafe { atomic_store(&mut self.p, ptr, order); }\n }\n\n #[inline(always)]\n fn swap(&mut self, ptr: *mut T, order: Ordering) -> *mut T {\n unsafe { atomic_swap(&mut self.p, ptr, order) }\n }\n\n #[inline(always)]\n fn compare_and_swap(&mut self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {\n unsafe { atomic_compare_and_swap(&mut self.p, old, new, order) }\n }\n}\n\nimpl AtomicOption {\n fn new(p: ~T) -> AtomicOption {\n unsafe {\n AtomicOption {\n p: cast::transmute(p)\n }\n }\n }\n\n fn empty() -> AtomicOption {\n unsafe {\n AtomicOption {\n p: cast::transmute(0)\n }\n }\n }\n\n #[inline(always)]\n fn swap(&mut self, val: ~T, order: Ordering) -> Option<~T> {\n unsafe {\n let val = cast::transmute(val);\n\n let p = atomic_swap(&mut self.p, val, order);\n let pv : &uint = cast::transmute(&p);\n\n if *pv == 0 {\n None\n } else {\n Some(cast::transmute(p))\n }\n }\n }\n\n #[inline(always)]\n fn take(&mut self, order: Ordering) -> Option<~T> {\n unsafe {\n self.swap(cast::transmute(0), order)\n }\n }\n}\n\n#[unsafe_destructor]\nimpl Drop for AtomicOption {\n fn finalize(&self) {\n \/\/ This will ensure that the contained data is\n \/\/ destroyed, unless it's null.\n unsafe {\n let this : &mut AtomicOption = cast::transmute(self);\n let _ = this.take(SeqCst);\n }\n }\n}\n\n#[inline(always)]\npub unsafe fn atomic_store(dst: &mut T, val: T, order:Ordering) {\n let dst = cast::transmute(dst);\n let val = cast::transmute(val);\n\n match order {\n Release => intrinsics::atomic_store_rel(dst, val),\n _ => intrinsics::atomic_store(dst, val)\n }\n}\n\n#[inline(always)]\npub unsafe fn atomic_load(dst: &T, order:Ordering) -> T {\n let dst = cast::transmute(dst);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_load_acq(dst),\n _ => intrinsics::atomic_load(dst)\n })\n}\n\n#[inline(always)]\npub unsafe fn atomic_swap(dst: &mut T, val: T, order: Ordering) -> T {\n let dst = cast::transmute(dst);\n let val = cast::transmute(val);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_xchg_acq(dst, val),\n Release => intrinsics::atomic_xchg_rel(dst, val),\n _ => intrinsics::atomic_xchg(dst, val)\n })\n}\n\n#[inline(always)]\npub unsafe fn atomic_add(dst: &mut T, val: T, order: Ordering) -> T {\n let dst = cast::transmute(dst);\n let val = cast::transmute(val);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_xadd_acq(dst, val),\n Release => intrinsics::atomic_xadd_rel(dst, val),\n _ => intrinsics::atomic_xadd(dst, val)\n })\n}\n\n#[inline(always)]\npub unsafe fn atomic_sub(dst: &mut T, val: T, order: Ordering) -> T {\n let dst = cast::transmute(dst);\n let val = cast::transmute(val);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_xsub_acq(dst, val),\n Release => intrinsics::atomic_xsub_rel(dst, val),\n _ => intrinsics::atomic_xsub(dst, val)\n })\n}\n\n#[inline(always)]\npub unsafe fn atomic_compare_and_swap(dst:&mut T, old:T, new:T, order: Ordering) -> T {\n let dst = cast::transmute(dst);\n let old = cast::transmute(old);\n let new = cast::transmute(new);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_cxchg_acq(dst, old, new),\n Release => intrinsics::atomic_cxchg_rel(dst, old, new),\n _ => intrinsics::atomic_cxchg(dst, old, new),\n })\n}\n\n#[cfg(test)]\nmod test {\n use option::*;\n use super::*;\n\n #[test]\n fn flag() {\n let mut flg = AtomicFlag::new();\n assert!(!flg.test_and_set(SeqCst));\n assert!(flg.test_and_set(SeqCst));\n\n flg.clear(SeqCst);\n assert!(!flg.test_and_set(SeqCst));\n }\n\n #[test]\n fn option_swap() {\n let mut p = AtomicOption::new(~1);\n let a = ~2;\n\n let b = p.swap(a, SeqCst);\n\n assert_eq!(b, Some(~1));\n assert_eq!(p.take(SeqCst), Some(~2));\n }\n\n #[test]\n fn option_take() {\n let mut p = AtomicOption::new(~1);\n\n assert_eq!(p.take(SeqCst), Some(~1));\n assert_eq!(p.take(SeqCst), None);\n\n let p2 = ~2;\n p.swap(p2, SeqCst);\n\n assert_eq!(p.take(SeqCst), Some(~2));\n }\n\n}\nAdd some documentation\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n * Atomic types\n *\n * Basic atomic types supporting atomic operations. Each method takes an `Ordering` which\n * represents the strength of the memory barrier for that operation. These orderings are the same\n * as C++11 atomic orderings [http:\/\/gcc.gnu.org\/wiki\/Atomic\/GCCMM\/AtomicSync]\n *\n * All atomic types are a single word in size.\n *\/\n\nuse unstable::intrinsics;\nuse cast;\nuse option::{Option,Some,None};\nuse libc::c_void;\nuse ops::Drop;\n\n\/**\n * A simple atomic flag, that can be set and cleared. The most basic atomic type.\n *\/\npub struct AtomicFlag {\n priv v: int\n}\n\n\/**\n * An atomic boolean type.\n *\/\npub struct AtomicBool {\n priv v: uint\n}\n\n\/**\n * A signed atomic integer type, supporting basic atomic aritmetic operations\n *\/\npub struct AtomicInt {\n priv v: int\n}\n\n\/**\n * An unsigned atomic integer type, supporting basic atomic aritmetic operations\n *\/\npub struct AtomicUint {\n priv v: uint\n}\n\n\/**\n * An unsafe atomic pointer. Only supports basic atomic operations\n *\/\npub struct AtomicPtr {\n priv p: *mut T\n}\n\n\/**\n * An owned atomic pointer. Ensures that only a single reference to the data is held at any time.\n *\/\npub struct AtomicOption {\n priv p: *mut c_void\n}\n\npub enum Ordering {\n Release,\n Acquire,\n SeqCst\n}\n\n\nimpl AtomicFlag {\n\n fn new() -> AtomicFlag {\n AtomicFlag { v: 0 }\n }\n\n \/**\n * Clears the atomic flag\n *\/\n #[inline(always)]\n fn clear(&mut self, order: Ordering) {\n unsafe {atomic_store(&mut self.v, 0, order)}\n }\n\n \/**\n * Sets the flag if it was previously unset, returns the previous value of the\n * flag.\n *\/\n #[inline(always)]\n fn test_and_set(&mut self, order: Ordering) -> bool {\n unsafe {atomic_compare_and_swap(&mut self.v, 0, 1, order) > 0}\n }\n}\n\nimpl AtomicBool {\n fn new(v: bool) -> AtomicBool {\n AtomicBool { v: if v { 1 } else { 0 } }\n }\n\n #[inline(always)]\n fn load(&self, order: Ordering) -> bool {\n unsafe { atomic_load(&self.v, order) > 0 }\n }\n\n #[inline(always)]\n fn store(&mut self, val: bool, order: Ordering) {\n let val = if val { 1 } else { 0 };\n\n unsafe { atomic_store(&mut self.v, val, order); }\n }\n\n #[inline(always)]\n fn swap(&mut self, val: bool, order: Ordering) -> bool {\n let val = if val { 1 } else { 0 };\n\n unsafe { atomic_swap(&mut self.v, val, order) > 0}\n }\n\n #[inline(always)]\n fn compare_and_swap(&mut self, old: bool, new: bool, order: Ordering) -> bool {\n let old = if old { 1 } else { 0 };\n let new = if new { 1 } else { 0 };\n\n unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) > 0 }\n }\n}\n\nimpl AtomicInt {\n fn new(v: int) -> AtomicInt {\n AtomicInt { v:v }\n }\n\n #[inline(always)]\n fn load(&self, order: Ordering) -> int {\n unsafe { atomic_load(&self.v, order) }\n }\n\n #[inline(always)]\n fn store(&mut self, val: int, order: Ordering) {\n unsafe { atomic_store(&mut self.v, val, order); }\n }\n\n #[inline(always)]\n fn swap(&mut self, val: int, order: Ordering) -> int {\n unsafe { atomic_swap(&mut self.v, val, order) }\n }\n\n #[inline(always)]\n fn compare_and_swap(&mut self, old: int, new: int, order: Ordering) -> int {\n unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }\n }\n\n #[inline(always)]\n fn fetch_add(&mut self, val: int, order: Ordering) -> int {\n unsafe { atomic_add(&mut self.v, val, order) }\n }\n\n #[inline(always)]\n fn fetch_sub(&mut self, val: int, order: Ordering) -> int {\n unsafe { atomic_sub(&mut self.v, val, order) }\n }\n}\n\nimpl AtomicUint {\n fn new(v: uint) -> AtomicUint {\n AtomicUint { v:v }\n }\n\n #[inline(always)]\n fn load(&self, order: Ordering) -> uint {\n unsafe { atomic_load(&self.v, order) }\n }\n\n #[inline(always)]\n fn store(&mut self, val: uint, order: Ordering) {\n unsafe { atomic_store(&mut self.v, val, order); }\n }\n\n #[inline(always)]\n fn swap(&mut self, val: uint, order: Ordering) -> uint {\n unsafe { atomic_swap(&mut self.v, val, order) }\n }\n\n #[inline(always)]\n fn compare_and_swap(&mut self, old: uint, new: uint, order: Ordering) -> uint {\n unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }\n }\n\n #[inline(always)]\n fn fetch_add(&mut self, val: uint, order: Ordering) -> uint {\n unsafe { atomic_add(&mut self.v, val, order) }\n }\n\n #[inline(always)]\n fn fetch_sub(&mut self, val: uint, order: Ordering) -> uint {\n unsafe { atomic_sub(&mut self.v, val, order) }\n }\n}\n\nimpl AtomicPtr {\n fn new(p: *mut T) -> AtomicPtr {\n AtomicPtr { p:p }\n }\n\n #[inline(always)]\n fn load(&self, order: Ordering) -> *mut T {\n unsafe { atomic_load(&self.p, order) }\n }\n\n #[inline(always)]\n fn store(&mut self, ptr: *mut T, order: Ordering) {\n unsafe { atomic_store(&mut self.p, ptr, order); }\n }\n\n #[inline(always)]\n fn swap(&mut self, ptr: *mut T, order: Ordering) -> *mut T {\n unsafe { atomic_swap(&mut self.p, ptr, order) }\n }\n\n #[inline(always)]\n fn compare_and_swap(&mut self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {\n unsafe { atomic_compare_and_swap(&mut self.p, old, new, order) }\n }\n}\n\nimpl AtomicOption {\n fn new(p: ~T) -> AtomicOption {\n unsafe {\n AtomicOption {\n p: cast::transmute(p)\n }\n }\n }\n\n fn empty() -> AtomicOption {\n unsafe {\n AtomicOption {\n p: cast::transmute(0)\n }\n }\n }\n\n #[inline(always)]\n fn swap(&mut self, val: ~T, order: Ordering) -> Option<~T> {\n unsafe {\n let val = cast::transmute(val);\n\n let p = atomic_swap(&mut self.p, val, order);\n let pv : &uint = cast::transmute(&p);\n\n if *pv == 0 {\n None\n } else {\n Some(cast::transmute(p))\n }\n }\n }\n\n #[inline(always)]\n fn take(&mut self, order: Ordering) -> Option<~T> {\n unsafe {\n self.swap(cast::transmute(0), order)\n }\n }\n}\n\n#[unsafe_destructor]\nimpl Drop for AtomicOption {\n fn finalize(&self) {\n \/\/ This will ensure that the contained data is\n \/\/ destroyed, unless it's null.\n unsafe {\n let this : &mut AtomicOption = cast::transmute(self);\n let _ = this.take(SeqCst);\n }\n }\n}\n\n#[inline(always)]\npub unsafe fn atomic_store(dst: &mut T, val: T, order:Ordering) {\n let dst = cast::transmute(dst);\n let val = cast::transmute(val);\n\n match order {\n Release => intrinsics::atomic_store_rel(dst, val),\n _ => intrinsics::atomic_store(dst, val)\n }\n}\n\n#[inline(always)]\npub unsafe fn atomic_load(dst: &T, order:Ordering) -> T {\n let dst = cast::transmute(dst);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_load_acq(dst),\n _ => intrinsics::atomic_load(dst)\n })\n}\n\n#[inline(always)]\npub unsafe fn atomic_swap(dst: &mut T, val: T, order: Ordering) -> T {\n let dst = cast::transmute(dst);\n let val = cast::transmute(val);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_xchg_acq(dst, val),\n Release => intrinsics::atomic_xchg_rel(dst, val),\n _ => intrinsics::atomic_xchg(dst, val)\n })\n}\n\n#[inline(always)]\npub unsafe fn atomic_add(dst: &mut T, val: T, order: Ordering) -> T {\n let dst = cast::transmute(dst);\n let val = cast::transmute(val);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_xadd_acq(dst, val),\n Release => intrinsics::atomic_xadd_rel(dst, val),\n _ => intrinsics::atomic_xadd(dst, val)\n })\n}\n\n#[inline(always)]\npub unsafe fn atomic_sub(dst: &mut T, val: T, order: Ordering) -> T {\n let dst = cast::transmute(dst);\n let val = cast::transmute(val);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_xsub_acq(dst, val),\n Release => intrinsics::atomic_xsub_rel(dst, val),\n _ => intrinsics::atomic_xsub(dst, val)\n })\n}\n\n#[inline(always)]\npub unsafe fn atomic_compare_and_swap(dst:&mut T, old:T, new:T, order: Ordering) -> T {\n let dst = cast::transmute(dst);\n let old = cast::transmute(old);\n let new = cast::transmute(new);\n\n cast::transmute(match order {\n Acquire => intrinsics::atomic_cxchg_acq(dst, old, new),\n Release => intrinsics::atomic_cxchg_rel(dst, old, new),\n _ => intrinsics::atomic_cxchg(dst, old, new),\n })\n}\n\n#[cfg(test)]\nmod test {\n use option::*;\n use super::*;\n\n #[test]\n fn flag() {\n let mut flg = AtomicFlag::new();\n assert!(!flg.test_and_set(SeqCst));\n assert!(flg.test_and_set(SeqCst));\n\n flg.clear(SeqCst);\n assert!(!flg.test_and_set(SeqCst));\n }\n\n #[test]\n fn option_swap() {\n let mut p = AtomicOption::new(~1);\n let a = ~2;\n\n let b = p.swap(a, SeqCst);\n\n assert_eq!(b, Some(~1));\n assert_eq!(p.take(SeqCst), Some(~2));\n }\n\n #[test]\n fn option_take() {\n let mut p = AtomicOption::new(~1);\n\n assert_eq!(p.take(SeqCst), Some(~1));\n assert_eq!(p.take(SeqCst), None);\n\n let p2 = ~2;\n p.swap(p2, SeqCst);\n\n assert_eq!(p.take(SeqCst), Some(~2));\n }\n\n}\n<|endoftext|>"} {"text":"some initial tests for the router<|endoftext|>"} {"text":"Begin writing guessing gameuse std::rand::random;\n\nfn main() {\n let random_value = random::() % 10u8;\n let mut guess = 0u8;\n\n println!(\"{}\", random_value);\n\n}\n<|endoftext|>"} {"text":"Add a test for extra::rl.\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-fast no compile flags for check-fast\n\n\/\/ we want this to be compiled to avoid bitrot, but the actual test\n\/\/has to be conducted by a human, i.e. someone (you?) compiling this\n\/\/file with a plain rustc invocation and running it and checking it\n\/\/works.\n\n\/\/ compile-flags: --cfg robot_mode\n\nextern mod extra;\nuse extra::rl;\n\nstatic HISTORY_FILE: &'static str = \"rl-human-test-history.txt\";\n\nfn main() {\n \/\/ don't run this in robot mode, but still typecheck it.\n if !cfg!(robot_mode) {\n println(\"~~ Welcome to the rl test \\\"suite\\\". ~~\");\n println!(\"Operations:\n - restrict the history to 2 lines,\n - set the tab-completion to suggest three copies of each of the last 3 letters (or 'empty'),\n - add 'one' and 'two' to the history,\n - save it to `{0}`,\n - add 'three',\n - prompt & save input (check the history & completion work and contains only 'two', 'three'),\n - load from `{0}`\n - prompt & save input (history should be 'one', 'two' again),\n - prompt once more.\n\nThe bool return values of each step are printed.\",\n HISTORY_FILE);\n\n println!(\"restricting history length: {}\", rl::set_history_max_len(3));\n\n do rl::complete |line, suggest| {\n if line.is_empty() {\n suggest(~\"empty\")\n } else {\n for c in line.rev_iter().take(3) {\n suggest(format!(\"{0}{1}{1}{1}\", line, c))\n }\n }\n }\n\n println!(\"adding 'one': {}\", rl::add_history(\"one\"));\n println!(\"adding 'two': {}\", rl::add_history(\"two\"));\n\n println!(\"saving history: {}\", rl::save_history(HISTORY_FILE));\n\n println!(\"adding 'three': {}\", rl::add_history(\"three\"));\n\n match rl::read(\"> \") {\n Some(s) => println!(\"saving input: {}\", rl::add_history(s)),\n None => return\n }\n println!(\"loading history: {}\", rl::load_history(HISTORY_FILE));\n\n match rl::read(\"> \") {\n Some(s) => println!(\"saving input: {}\", rl::add_history(s)),\n None => return\n }\n\n rl::read(\"> \");\n }\n}\n<|endoftext|>"} {"text":"fn main() {\n log 3-2;\n assert 3-2 == 1;\n}Revert \"Test for #954\"<|endoftext|>"} {"text":"Test for associated types ICE\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that a partially specified trait object with unspecified associated\n\/\/ type does not ICE.\n\n#![feature(associated_types)]\n\ntrait Foo {\n type A;\n}\n\nfn bar(x: &Foo) {} \/\/~ERROR missing type for associated type `A`\n\npub fn main() {}\n<|endoftext|>"} {"text":"Rollup merge of #42217 - venkatagiri:issue_39974, r=Mark-Simulacrum\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nconst LENGTH: f64 = 2;\n\nstruct Thing {\n f: [[f64; 2]; LENGTH],\n \/\/~^ ERROR mismatched types\n \/\/~| expected usize, found f64\n}\n\nfn main() {\n let _t = Thing { f: [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]] };\n}\n<|endoftext|>"} {"text":"\/\/ xfail-pretty\n\n#[legacy_modes];\n\nextern mod std;\nextern mod syntax;\n\nuse io::*;\n\nuse syntax::diagnostic;\nuse syntax::ast;\nuse syntax::codemap;\nuse syntax::parse;\nuse syntax::print::*;\n\ntrait fake_ext_ctxt {\n fn cfg() -> ast::crate_cfg;\n fn parse_sess() -> parse::parse_sess;\n}\n\ntype fake_session = parse::parse_sess;\n\nimpl fake_session: fake_ext_ctxt {\n fn cfg() -> ast::crate_cfg { ~[] }\n fn parse_sess() -> parse::parse_sess { self }\n}\n\nfn mk_ctxt() -> fake_ext_ctxt {\n parse::new_parse_sess(None) as fake_ext_ctxt\n}\n\n\nfn main() {\n let ext_cx = mk_ctxt();\n\n let abc = #ast{23};\n check_pp(ext_cx, abc, pprust::print_expr, ~\"23\");\n\n let expr3 = #ast{2 - $(abc) + 7};\n check_pp(ext_cx, expr3, pprust::print_expr, ~\"2 - 23 + 7\");\n\n let expr4 = #ast{2 - $(#ast{3}) + 9};\n check_pp(ext_cx, expr4, pprust::print_expr, ~\"2 - 3 + 9\");\n\n let ty = #ast[ty]{int};\n check_pp(ext_cx, ty, pprust::print_type, ~\"int\");\n\n let ty2 = #ast[ty]{option<$(ty)>};\n check_pp(ext_cx, ty2, pprust::print_type, ~\"option\");\n\n let item = #ast[item]{const x : int = 10;};\n check_pp(ext_cx, item, pprust::print_item, ~\"const x: int = 10;\");\n\n let item2: @ast::item = #ast[item]{const x : int = $(abc);};\n check_pp(ext_cx, item2, pprust::print_item, ~\"const x: int = 23;\");\n\n let stmt = #ast[stmt]{let x = 20;};\n check_pp(ext_cx, *stmt, pprust::print_stmt, ~\"let x = 20;\");\n\n let stmt2 = #ast[stmt]{let x : $(ty) = $(abc);};\n check_pp(ext_cx, *stmt2, pprust::print_stmt, ~\"let x: int = 23;\");\n\n let pat = #ast[pat]{some(_)};\n check_pp(ext_cx, pat, pprust::print_pat, ~\"some(_)\");\n\n \/\/ issue #1785\n let x = #ast{1};\n let test1 = #ast{1+$(x)};\n check_pp(ext_cx, test1, pprust::print_expr, ~\"1 + 1\");\n\n let test2 = #ast{$(x)+1};\n check_pp(ext_cx, test2, pprust::print_expr, ~\"1 + 1\");\n\n let y = #ast{2};\n let test3 = #ast{$(x) + $(y)};\n check_pp(ext_cx, test3, pprust::print_expr, ~\"1 + 2\");\n\n let crate = #ast[crate] { fn a() { } };\n check_pp(ext_cx, crate, pprust::print_crate_, ~\"fn a() { }\\n\");\n\n \/\/ issue #1926\n let s = #ast[expr]{__s};\n let e = #ast[expr]{__e};\n let call = #ast[expr]{$(s).foo(|__e| $(e) )};\n check_pp(ext_cx, call, pprust::print_expr, ~\"__s.foo(|__e| __e)\")\n}\n\nfn check_pp(cx: fake_ext_ctxt,\n expr: T, f: fn(pprust::ps, T), expect: ~str) {\n let s = do io::with_str_writer |wr| {\n let pp = pprust::rust_printer(wr, cx.parse_sess().interner);\n f(pp, expr);\n pp::eof(pp.s);\n };\n stdout().write_line(s);\n if expect != ~\"\" {\n error!(\"expect: '%s', got: '%s'\", expect, s);\n assert str == expect;\n }\n}\n\nFix run-pass-fulldeps\/qquote.rs\/\/ xfail-pretty\n\n#[legacy_modes];\n\nextern mod std;\nextern mod syntax;\n\nuse io::*;\n\nuse syntax::diagnostic;\nuse syntax::ast;\nuse syntax::codemap;\nuse syntax::parse;\nuse syntax::print::*;\n\ntrait fake_ext_ctxt {\n fn cfg() -> ast::crate_cfg;\n fn parse_sess() -> parse::parse_sess;\n}\n\ntype fake_session = parse::parse_sess;\n\nimpl fake_session: fake_ext_ctxt {\n fn cfg() -> ast::crate_cfg { ~[] }\n fn parse_sess() -> parse::parse_sess { self }\n}\n\nfn mk_ctxt() -> fake_ext_ctxt {\n parse::new_parse_sess(None) as fake_ext_ctxt\n}\n\n\nfn main() {\n let ext_cx = mk_ctxt();\n\n let abc = #ast{23};\n check_pp(ext_cx, abc, pprust::print_expr, ~\"23\");\n\n let expr3 = #ast{2 - $(abc) + 7};\n check_pp(ext_cx, expr3, pprust::print_expr, ~\"2 - 23 + 7\");\n\n let expr4 = #ast{2 - $(#ast{3}) + 9};\n check_pp(ext_cx, expr4, pprust::print_expr, ~\"2 - 3 + 9\");\n\n let ty = #ast[ty]{int};\n check_pp(ext_cx, ty, pprust::print_type, ~\"int\");\n\n let ty2 = #ast[ty]{option<$(ty)>};\n check_pp(ext_cx, ty2, pprust::print_type, ~\"option\");\n\n let item = #ast[item]{const x : int = 10;};\n check_pp(ext_cx, item, pprust::print_item, ~\"const x: int = 10;\");\n\n let item2: @ast::item = #ast[item]{const x : int = $(abc);};\n check_pp(ext_cx, item2, pprust::print_item, ~\"const x: int = 23;\");\n\n let stmt = #ast[stmt]{let x = 20;};\n check_pp(ext_cx, *stmt, pprust::print_stmt, ~\"let x = 20;\");\n\n let stmt2 = #ast[stmt]{let x : $(ty) = $(abc);};\n check_pp(ext_cx, *stmt2, pprust::print_stmt, ~\"let x: int = 23;\");\n\n let pat = #ast[pat]{some(_)};\n check_pp(ext_cx, pat, pprust::print_pat, ~\"some(_)\");\n\n \/\/ issue #1785\n let x = #ast{1};\n let test1 = #ast{1+$(x)};\n check_pp(ext_cx, test1, pprust::print_expr, ~\"1 + 1\");\n\n let test2 = #ast{$(x)+1};\n check_pp(ext_cx, test2, pprust::print_expr, ~\"1 + 1\");\n\n let y = #ast{2};\n let test3 = #ast{$(x) + $(y)};\n check_pp(ext_cx, test3, pprust::print_expr, ~\"1 + 2\");\n\n let crate = #ast[crate] { fn a() { } };\n check_pp(ext_cx, crate, pprust::print_crate_, ~\"fn a() { }\\n\");\n\n \/\/ issue #1926\n let s = #ast[expr]{__s};\n let e = #ast[expr]{__e};\n let call = #ast[expr]{$(s).foo(|__e| $(e) )};\n check_pp(ext_cx, call, pprust::print_expr, ~\"__s.foo(|__e| __e)\")\n}\n\nfn check_pp(cx: fake_ext_ctxt,\n expr: T, f: fn(pprust::ps, T), expect: ~str) {\n let s = do io::with_str_writer |wr| {\n let pp = pprust::rust_printer(wr, cx.parse_sess().interner);\n f(pp, expr);\n pp::eof(pp.s);\n };\n stdout().write_line(s);\n if expect != ~\"\" {\n error!(\"expect: '%s', got: '%s'\", expect, s);\n assert s == expect;\n }\n}\n\n<|endoftext|>"} {"text":"Add test of unsupported elided lifetime in extern Rust siguse std::marker::PhantomData;\n\n#[cxx::bridge]\nmod ffi {\n extern \"Rust\" {\n type T;\n\n fn f(t: &T) -> &str;\n }\n}\n\npub struct T<'a> {\n _lifetime: PhantomData<&'a ()>,\n}\n\nfn f<'a>(_t: &T<'a>) -> &'a str {\n \"\"\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"added solution for exercise 5.2\/\/\/ Type in the program that copies a file using line-at-a-time I\/O \n\/\/\/ (fgets and fputs) from Figure 5.5, but use a MAXLINE of 4.\n\nextern crate libc;\nextern crate apue;\n\nuse libc::{fopen, fgets, fputs, printf};\nuse apue::*;\n\nconst BUFLEN: usize = 4;\n\nfn main() {\n\tunsafe {\n\t\tlet mut args = std::env::args();\n\t\tif args.len() != 3 {\n\t\t\tprintln!(\"usage:\\n{} \/path\/to\/input \/path\/to\/output\", args.next().unwrap());\n\t\t\tstd::process::exit(1);\n\t\t}\n\t\targs.next(); \/\/ skip filename\n\t\tlet f_in = args.next().unwrap();\n\t\tlet f_out = args.next().unwrap();\n\t\tlet fd_in = fopen(f_in.to_ptr(), \"r\".to_ptr());\n\t\tlet fd_out = fopen(f_out.to_ptr(), \"w\".to_ptr());\n\n\t\tlet buffer:[u8;BUFLEN] = std::mem::uninitialized();\n\t\twhile !fgets(buffer.as_ptr() as *mut i8, BUFLEN as i32, fd_in).is_null() {\n\t\t\tprintf(\"buffer = %s\\n\".to_ptr(), buffer.as_ptr());\n\t\t\tfputs(buffer.as_ptr() as *mut i8, fd_out);\n\t\t}\n\t}\n}\n\n\/\/ # Solution:\n\n\/\/ What happens if you copy lines that exceed this length? \n\/\/ Explain what is happening.\n\n\/\/ Answer: the line is cut into chunks of 3 (3 bytes + null)\n\/\/ the last chunk of a line can be smaller, so e.g. just 1 byte + null\n\/\/\n\/\/ e.g.\n\/\/ hansaplasti\n\/\/ - han\n\/\/ - sap\n\/\/ - las\n\/\/ - ti<|endoftext|>"} {"text":"Add regression test for #20174\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct GradFn usize>(F);\n\nfn main() {\n let GradFn(x_squared) : GradFn<_> = GradFn(|| -> usize { 2 });\n let _ = x_squared();\n}\n<|endoftext|>"} {"text":"Add another test for blocks as expressions\/\/ xfail-boot\n\/\/ -*- rust -*-\n\n\/\/ Tests for standalone blocks as expressions\n\nfn test_basic() {\n let bool res = { true };\n check (res);\n}\n\nfn test_rec() {\n auto res = { rec(v1 = 10, v2 = 20) };\n check (res.v2 == 20);\n}\n\nfn test_filled_with_stuff() {\n auto res = {\n auto a = 0;\n while (a < 10) {\n a += 1;\n }\n a\n };\n check (res == 10);\n}\n\nfn main() {\n test_basic();\n test_rec();\n test_filled_with_stuff();\n}\n<|endoftext|>"} {"text":"Add test for path with interpolated leading componentuse proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};\nuse std::iter::FromIterator;\nuse syn::{Expr, Type};\n\n#[test]\nfn parse_interpolated_leading_component() {\n \/\/ mimics the token stream corresponding to `$mod::rest`\n let tokens = TokenStream::from_iter(vec![\n TokenTree::Group(Group::new(\n Delimiter::None,\n TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(\n \"first\",\n Span::call_site(),\n ))]),\n )),\n TokenTree::Punct(Punct::new(':', Spacing::Joint)),\n TokenTree::Punct(Punct::new(':', Spacing::Alone)),\n TokenTree::Ident(Ident::new(\"rest\", Span::call_site())),\n ]);\n\n match syn::parse2::(tokens.clone()) {\n Ok(Expr::Path(_)) => {}\n expr => panic!(\"incorrect expr: {:?}\", expr),\n }\n\n match syn::parse2::(tokens) {\n Ok(Type::Path(_)) => {}\n expr => panic!(\"incorrect expr: {:?}\", expr),\n }\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Checks the signature of the implicitly generated native main()\n\/\/ entry point. It must match C's `int main(int, char **)`.\n\n\/\/ This test is for targets with 16bit c_int only.\n\/\/ ignore-aarch64\n\/\/ ignore-arm\n\/\/ ignore-asmjs\n\/\/ ignore-hexagon\n\/\/ ignore-mips\n\/\/ ignore-powerpc\n\/\/ ignore-s390x\n\/\/ ignore-sparc\n\/\/ ignore-wasm32\n\/\/ ignore-x86\n\/\/ ignore-x86_64\n\/\/ ignore-xcore\n\nfn main() {\n}\n\n\/\/ CHECK: define i16 @main(i16, i8**)\ntest: ignore mips64 in abi-main-signature-16bit-c-int.rs\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Checks the signature of the implicitly generated native main()\n\/\/ entry point. It must match C's `int main(int, char **)`.\n\n\/\/ This test is for targets with 16bit c_int only.\n\/\/ ignore-aarch64\n\/\/ ignore-arm\n\/\/ ignore-asmjs\n\/\/ ignore-hexagon\n\/\/ ignore-mips\n\/\/ ignore-mips64\n\/\/ ignore-powerpc\n\/\/ ignore-s390x\n\/\/ ignore-sparc\n\/\/ ignore-wasm32\n\/\/ ignore-x86\n\/\/ ignore-x86_64\n\/\/ ignore-xcore\n\nfn main() {\n}\n\n\/\/ CHECK: define i16 @main(i16, i8**)\n<|endoftext|>"} {"text":"Attempt to find a maximal acyclic graph, so the toplogical sort results in a better ordering.<|endoftext|>"} {"text":"Implement \"seek_relative\"<|endoftext|>"} {"text":"\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(reason = \"not public\", issue = \"0\", feature = \"fd\")]\n\nuse cmp;\nuse io::{self, Read};\nuse libc::{self, c_int, c_void, ssize_t};\nuse mem;\nuse sync::atomic::{AtomicBool, Ordering};\nuse sys::cvt;\nuse sys_common::AsInner;\n\n#[derive(Debug)]\npub struct FileDesc {\n fd: c_int,\n}\n\nfn max_len() -> usize {\n \/\/ The maximum read limit on most posix-like systems is `SSIZE_MAX`,\n \/\/ with the man page quoting that if the count of bytes to read is\n \/\/ greater than `SSIZE_MAX` the result is \"unspecified\".\n \/\/\n \/\/ On macOS, however, apparently the 64-bit libc is either buggy or\n \/\/ intentionally showing odd behavior by rejecting any read with a size\n \/\/ larger than or equal to INT_MAX. To handle both of these the read\n \/\/ size is capped on both platforms.\n if cfg!(target_os = \"macos\") {\n ::max_value() as usize - 1\n } else {\n ::max_value() as usize\n }\n}\n\nimpl FileDesc {\n pub fn new(fd: c_int) -> FileDesc {\n FileDesc { fd: fd }\n }\n\n pub fn raw(&self) -> c_int { self.fd }\n\n \/\/\/ Extracts the actual filedescriptor without closing it.\n pub fn into_raw(self) -> c_int {\n let fd = self.fd;\n mem::forget(self);\n fd\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result {\n let ret = cvt(unsafe {\n libc::read(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n cmp::min(buf.len(), max_len()))\n })?;\n Ok(ret as usize)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec) -> io::Result {\n let mut me = self;\n (&mut me).read_to_end(buf)\n }\n\n pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result {\n #[cfg(target_os = \"android\")]\n use super::android::cvt_pread64;\n\n #[cfg(target_os = \"emscripten\")]\n unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64)\n -> io::Result\n {\n use convert::TryInto;\n use libc::pread64;\n \/\/ pread64 on emscripten actually takes a 32 bit offset\n if let Ok(o) = offset.try_into() {\n cvt(pread64(fd, buf, count, o))\n } else {\n Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot pread >2GB\"))\n }\n }\n\n #[cfg(not(any(target_os = \"android\", target_os = \"emscripten\")))]\n unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64)\n -> io::Result\n {\n #[cfg(target_os = \"linux\")]\n use libc::pread64;\n #[cfg(not(target_os = \"linux\"))]\n use libc::pread as pread64;\n cvt(pread64(fd, buf, count, offset))\n }\n\n unsafe {\n cvt_pread64(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n cmp::min(buf.len(), max_len()),\n offset as i64)\n .map(|n| n as usize)\n }\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result {\n let ret = cvt(unsafe {\n libc::write(self.fd,\n buf.as_ptr() as *const c_void,\n cmp::min(buf.len(), max_len()))\n })?;\n Ok(ret as usize)\n }\n\n pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result {\n #[cfg(target_os = \"android\")]\n use super::android::cvt_pwrite64;\n\n #[cfg(target_os = \"emscripten\")]\n unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64)\n -> io::Result\n {\n use convert::TryInto;\n use libc::pwrite64;\n \/\/ pwrite64 on emscripten actually takes a 32 bit offset\n if let Ok(o) = offset.try_into() {\n cvt(pwrite64(fd, buf, count, o))\n } else {\n Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot pwrite >2GB\"))\n }\n }\n\n #[cfg(not(any(target_os = \"android\", target_os = \"emscripten\")))]\n unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64)\n -> io::Result\n {\n #[cfg(target_os = \"linux\")]\n use libc::pwrite64;\n #[cfg(not(target_os = \"linux\"))]\n use libc::pwrite as pwrite64;\n cvt(pwrite64(fd, buf, count, offset))\n }\n\n unsafe {\n cvt_pwrite64(self.fd,\n buf.as_ptr() as *const c_void,\n cmp::min(buf.len(), max_len()),\n offset as i64)\n .map(|n| n as usize)\n }\n }\n\n #[cfg(target_os = \"linux\")]\n pub fn get_cloexec(&self) -> io::Result {\n unsafe {\n Ok((cvt(libc::fcntl(self.fd, libc::F_GETFD))? & libc::FD_CLOEXEC) != 0)\n }\n }\n\n #[cfg(not(any(target_env = \"newlib\",\n target_os = \"solaris\",\n target_os = \"emscripten\",\n target_os = \"fuchsia\",\n target_os = \"l4re\",\n target_os = \"haiku\")))]\n pub fn set_cloexec(&self) -> io::Result<()> {\n unsafe {\n cvt(libc::ioctl(self.fd, libc::FIOCLEX))?;\n Ok(())\n }\n }\n #[cfg(any(target_env = \"newlib\",\n target_os = \"solaris\",\n target_os = \"emscripten\",\n target_os = \"fuchsia\",\n target_os = \"l4re\",\n target_os = \"haiku\"))]\n pub fn set_cloexec(&self) -> io::Result<()> {\n unsafe {\n let previous = cvt(libc::fcntl(self.fd, libc::F_GETFD))?;\n let new = previous | libc::FD_CLOEXEC;\n if new != previous {\n cvt(libc::fcntl(self.fd, libc::F_SETFD, new))?;\n }\n Ok(())\n }\n }\n\n #[cfg(target_os = \"linux\")]\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n unsafe {\n let v = nonblocking as c_int;\n cvt(libc::ioctl(self.fd, libc::FIONBIO, &v))?;\n Ok(())\n }\n }\n\n #[cfg(not(target_os = \"linux\"))]\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n unsafe {\n let previous = cvt(libc::fcntl(self.fd, libc::F_GETFL))?;\n let new = if nonblocking {\n previous | libc::O_NONBLOCK\n } else {\n previous & !libc::O_NONBLOCK\n };\n if new != previous {\n cvt(libc::fcntl(self.fd, libc::F_SETFL, new))?;\n }\n Ok(())\n }\n }\n\n pub fn duplicate(&self) -> io::Result {\n \/\/ We want to atomically duplicate this file descriptor and set the\n \/\/ CLOEXEC flag, and currently that's done via F_DUPFD_CLOEXEC. This\n \/\/ flag, however, isn't supported on older Linux kernels (earlier than\n \/\/ 2.6.24).\n \/\/\n \/\/ To detect this and ensure that CLOEXEC is still set, we\n \/\/ follow a strategy similar to musl [1] where if passing\n \/\/ F_DUPFD_CLOEXEC causes `fcntl` to return EINVAL it means it's not\n \/\/ supported (the third parameter, 0, is always valid), so we stop\n \/\/ trying that.\n \/\/\n \/\/ Also note that Android doesn't have F_DUPFD_CLOEXEC, but get it to\n \/\/ resolve so we at least compile this.\n \/\/\n \/\/ [1]: http:\/\/comments.gmane.org\/gmane.linux.lib.musl.general\/2963\n #[cfg(any(target_os = \"android\", target_os = \"haiku\"))]\n use libc::F_DUPFD as F_DUPFD_CLOEXEC;\n #[cfg(not(any(target_os = \"android\", target_os=\"haiku\")))]\n use libc::F_DUPFD_CLOEXEC;\n\n let make_filedesc = |fd| {\n let fd = FileDesc::new(fd);\n fd.set_cloexec()?;\n Ok(fd)\n };\n static TRY_CLOEXEC: AtomicBool =\n AtomicBool::new(!cfg!(target_os = \"android\"));\n let fd = self.raw();\n if TRY_CLOEXEC.load(Ordering::Relaxed) {\n match cvt(unsafe { libc::fcntl(fd, F_DUPFD_CLOEXEC, 0) }) {\n \/\/ We *still* call the `set_cloexec` method as apparently some\n \/\/ linux kernel at some point stopped setting CLOEXEC even\n \/\/ though it reported doing so on F_DUPFD_CLOEXEC.\n Ok(fd) => {\n return Ok(if cfg!(target_os = \"linux\") {\n make_filedesc(fd)?\n } else {\n FileDesc::new(fd)\n })\n }\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {\n TRY_CLOEXEC.store(false, Ordering::Relaxed);\n }\n Err(e) => return Err(e),\n }\n }\n cvt(unsafe { libc::fcntl(fd, libc::F_DUPFD, 0) }).and_then(make_filedesc)\n }\n}\n\nimpl<'a> Read for &'a FileDesc {\n fn read(&mut self, buf: &mut [u8]) -> io::Result {\n (**self).read(buf)\n }\n}\n\nimpl AsInner for FileDesc {\n fn as_inner(&self) -> &c_int { &self.fd }\n}\n\nimpl Drop for FileDesc {\n fn drop(&mut self) {\n \/\/ Note that errors are ignored when closing a file descriptor. The\n \/\/ reason for this is that if an error occurs we don't actually know if\n \/\/ the file descriptor was closed or not, and if we retried (for\n \/\/ something like EINTR), we might close another valid file descriptor\n \/\/ (opened after we closed ours.\n let _ = unsafe { libc::close(self.fd) };\n }\n}\nFix typo in comment\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(reason = \"not public\", issue = \"0\", feature = \"fd\")]\n\nuse cmp;\nuse io::{self, Read};\nuse libc::{self, c_int, c_void, ssize_t};\nuse mem;\nuse sync::atomic::{AtomicBool, Ordering};\nuse sys::cvt;\nuse sys_common::AsInner;\n\n#[derive(Debug)]\npub struct FileDesc {\n fd: c_int,\n}\n\nfn max_len() -> usize {\n \/\/ The maximum read limit on most posix-like systems is `SSIZE_MAX`,\n \/\/ with the man page quoting that if the count of bytes to read is\n \/\/ greater than `SSIZE_MAX` the result is \"unspecified\".\n \/\/\n \/\/ On macOS, however, apparently the 64-bit libc is either buggy or\n \/\/ intentionally showing odd behavior by rejecting any read with a size\n \/\/ larger than or equal to INT_MAX. To handle both of these the read\n \/\/ size is capped on both platforms.\n if cfg!(target_os = \"macos\") {\n ::max_value() as usize - 1\n } else {\n ::max_value() as usize\n }\n}\n\nimpl FileDesc {\n pub fn new(fd: c_int) -> FileDesc {\n FileDesc { fd: fd }\n }\n\n pub fn raw(&self) -> c_int { self.fd }\n\n \/\/\/ Extracts the actual filedescriptor without closing it.\n pub fn into_raw(self) -> c_int {\n let fd = self.fd;\n mem::forget(self);\n fd\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result {\n let ret = cvt(unsafe {\n libc::read(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n cmp::min(buf.len(), max_len()))\n })?;\n Ok(ret as usize)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec) -> io::Result {\n let mut me = self;\n (&mut me).read_to_end(buf)\n }\n\n pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result {\n #[cfg(target_os = \"android\")]\n use super::android::cvt_pread64;\n\n #[cfg(target_os = \"emscripten\")]\n unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64)\n -> io::Result\n {\n use convert::TryInto;\n use libc::pread64;\n \/\/ pread64 on emscripten actually takes a 32 bit offset\n if let Ok(o) = offset.try_into() {\n cvt(pread64(fd, buf, count, o))\n } else {\n Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot pread >2GB\"))\n }\n }\n\n #[cfg(not(any(target_os = \"android\", target_os = \"emscripten\")))]\n unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64)\n -> io::Result\n {\n #[cfg(target_os = \"linux\")]\n use libc::pread64;\n #[cfg(not(target_os = \"linux\"))]\n use libc::pread as pread64;\n cvt(pread64(fd, buf, count, offset))\n }\n\n unsafe {\n cvt_pread64(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n cmp::min(buf.len(), max_len()),\n offset as i64)\n .map(|n| n as usize)\n }\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result {\n let ret = cvt(unsafe {\n libc::write(self.fd,\n buf.as_ptr() as *const c_void,\n cmp::min(buf.len(), max_len()))\n })?;\n Ok(ret as usize)\n }\n\n pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result {\n #[cfg(target_os = \"android\")]\n use super::android::cvt_pwrite64;\n\n #[cfg(target_os = \"emscripten\")]\n unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64)\n -> io::Result\n {\n use convert::TryInto;\n use libc::pwrite64;\n \/\/ pwrite64 on emscripten actually takes a 32 bit offset\n if let Ok(o) = offset.try_into() {\n cvt(pwrite64(fd, buf, count, o))\n } else {\n Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot pwrite >2GB\"))\n }\n }\n\n #[cfg(not(any(target_os = \"android\", target_os = \"emscripten\")))]\n unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64)\n -> io::Result\n {\n #[cfg(target_os = \"linux\")]\n use libc::pwrite64;\n #[cfg(not(target_os = \"linux\"))]\n use libc::pwrite as pwrite64;\n cvt(pwrite64(fd, buf, count, offset))\n }\n\n unsafe {\n cvt_pwrite64(self.fd,\n buf.as_ptr() as *const c_void,\n cmp::min(buf.len(), max_len()),\n offset as i64)\n .map(|n| n as usize)\n }\n }\n\n #[cfg(target_os = \"linux\")]\n pub fn get_cloexec(&self) -> io::Result {\n unsafe {\n Ok((cvt(libc::fcntl(self.fd, libc::F_GETFD))? & libc::FD_CLOEXEC) != 0)\n }\n }\n\n #[cfg(not(any(target_env = \"newlib\",\n target_os = \"solaris\",\n target_os = \"emscripten\",\n target_os = \"fuchsia\",\n target_os = \"l4re\",\n target_os = \"haiku\")))]\n pub fn set_cloexec(&self) -> io::Result<()> {\n unsafe {\n cvt(libc::ioctl(self.fd, libc::FIOCLEX))?;\n Ok(())\n }\n }\n #[cfg(any(target_env = \"newlib\",\n target_os = \"solaris\",\n target_os = \"emscripten\",\n target_os = \"fuchsia\",\n target_os = \"l4re\",\n target_os = \"haiku\"))]\n pub fn set_cloexec(&self) -> io::Result<()> {\n unsafe {\n let previous = cvt(libc::fcntl(self.fd, libc::F_GETFD))?;\n let new = previous | libc::FD_CLOEXEC;\n if new != previous {\n cvt(libc::fcntl(self.fd, libc::F_SETFD, new))?;\n }\n Ok(())\n }\n }\n\n #[cfg(target_os = \"linux\")]\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n unsafe {\n let v = nonblocking as c_int;\n cvt(libc::ioctl(self.fd, libc::FIONBIO, &v))?;\n Ok(())\n }\n }\n\n #[cfg(not(target_os = \"linux\"))]\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n unsafe {\n let previous = cvt(libc::fcntl(self.fd, libc::F_GETFL))?;\n let new = if nonblocking {\n previous | libc::O_NONBLOCK\n } else {\n previous & !libc::O_NONBLOCK\n };\n if new != previous {\n cvt(libc::fcntl(self.fd, libc::F_SETFL, new))?;\n }\n Ok(())\n }\n }\n\n pub fn duplicate(&self) -> io::Result {\n \/\/ We want to atomically duplicate this file descriptor and set the\n \/\/ CLOEXEC flag, and currently that's done via F_DUPFD_CLOEXEC. This\n \/\/ flag, however, isn't supported on older Linux kernels (earlier than\n \/\/ 2.6.24).\n \/\/\n \/\/ To detect this and ensure that CLOEXEC is still set, we\n \/\/ follow a strategy similar to musl [1] where if passing\n \/\/ F_DUPFD_CLOEXEC causes `fcntl` to return EINVAL it means it's not\n \/\/ supported (the third parameter, 0, is always valid), so we stop\n \/\/ trying that.\n \/\/\n \/\/ Also note that Android doesn't have F_DUPFD_CLOEXEC, but get it to\n \/\/ resolve so we at least compile this.\n \/\/\n \/\/ [1]: http:\/\/comments.gmane.org\/gmane.linux.lib.musl.general\/2963\n #[cfg(any(target_os = \"android\", target_os = \"haiku\"))]\n use libc::F_DUPFD as F_DUPFD_CLOEXEC;\n #[cfg(not(any(target_os = \"android\", target_os=\"haiku\")))]\n use libc::F_DUPFD_CLOEXEC;\n\n let make_filedesc = |fd| {\n let fd = FileDesc::new(fd);\n fd.set_cloexec()?;\n Ok(fd)\n };\n static TRY_CLOEXEC: AtomicBool =\n AtomicBool::new(!cfg!(target_os = \"android\"));\n let fd = self.raw();\n if TRY_CLOEXEC.load(Ordering::Relaxed) {\n match cvt(unsafe { libc::fcntl(fd, F_DUPFD_CLOEXEC, 0) }) {\n \/\/ We *still* call the `set_cloexec` method as apparently some\n \/\/ linux kernel at some point stopped setting CLOEXEC even\n \/\/ though it reported doing so on F_DUPFD_CLOEXEC.\n Ok(fd) => {\n return Ok(if cfg!(target_os = \"linux\") {\n make_filedesc(fd)?\n } else {\n FileDesc::new(fd)\n })\n }\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {\n TRY_CLOEXEC.store(false, Ordering::Relaxed);\n }\n Err(e) => return Err(e),\n }\n }\n cvt(unsafe { libc::fcntl(fd, libc::F_DUPFD, 0) }).and_then(make_filedesc)\n }\n}\n\nimpl<'a> Read for &'a FileDesc {\n fn read(&mut self, buf: &mut [u8]) -> io::Result {\n (**self).read(buf)\n }\n}\n\nimpl AsInner for FileDesc {\n fn as_inner(&self) -> &c_int { &self.fd }\n}\n\nimpl Drop for FileDesc {\n fn drop(&mut self) {\n \/\/ Note that errors are ignored when closing a file descriptor. The\n \/\/ reason for this is that if an error occurs we don't actually know if\n \/\/ the file descriptor was closed or not, and if we retried (for\n \/\/ something like EINTR), we might close another valid file descriptor\n \/\/ opened after we closed ours.\n let _ = unsafe { libc::close(self.fd) };\n }\n}\n<|endoftext|>"} {"text":"test exposing memory management failure for #1078\/\/ xfail-test\n\n\/\/ A port of task-killjoin to use a resource to manage\n\/\/ the join. \n\nuse std;\nimport task;\n\nfn joinable(f: fn()) -> (task::task, comm::port) {\n resource notify(data: (comm::chan,\n @mutable bool)) {\n let (c, v) = data;\n comm::send(c, *v);\n }\n fn wrapper(pair: (comm::chan, fn())) {\n let (c, f) = pair;\n let b = @mutable false;\n let _r = notify((c, b));\n f();\n *b = true;\n }\n let p = comm::port();\n let c = comm::chan(p);\n let t = task::spawn((c, f), wrapper);\n ret (t, p);\n}\n\nfn join(pair: (task::task, comm::port)) -> bool {\n let (_, port) = pair;\n comm::recv(port)\n}\n\nfn supervised() {\n \/\/ Yield to make sure the supervisor joins before we\n \/\/ fail. This is currently not needed because the supervisor\n \/\/ runs first, but I can imagine that changing.\n task::yield();\n fail;\n}\n\nfn supervisor() {\n \/\/ Unsupervise this task so the process doesn't return a failure status as\n \/\/ a result of the main task being killed.\n task::unsupervise();\n let f = supervised;\n join(joinable(supervised));\n}\n\nfn main() {\n join(joinable(supervisor));\n}\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n<|endoftext|>"} {"text":"#![cfg(target_os = \"macos\")]\n\nuse CreationError;\nuse CreationError::OsError;\nuse ContextError;\nuse GlAttributes;\nuse GlContext;\nuse PixelFormat;\nuse PixelFormatRequirements;\nuse Robustness;\nuse WindowAttributes;\nuse os::macos::ActivationPolicy;\n\nuse objc::runtime::{BOOL, NO};\n\nuse cgl::{CGLEnable, kCGLCECrashOnRemovedFunctions};\n\nuse cocoa::base::{id, nil};\nuse cocoa::foundation::NSAutoreleasePool;\nuse cocoa::appkit::{self, NSOpenGLContext, NSOpenGLPixelFormat};\n\nuse core_foundation::base::TCFType;\nuse core_foundation::string::CFString;\nuse core_foundation::bundle::{CFBundleGetBundleWithIdentifier, CFBundleGetFunctionPointerForName};\n\nuse std::str::FromStr;\nuse std::ops::Deref;\n\nuse libc;\n\nuse winit;\nuse winit::os::macos::WindowExt;\npub use winit::{MonitorId, NativeMonitorId, get_available_monitors, get_primary_monitor};\npub use winit::{PollEventsIterator, WaitEventsIterator};\npub use self::headless::HeadlessContext;\npub use self::headless::PlatformSpecificHeadlessBuilderAttributes;\n\nmod headless;\nmod helpers;\n\n#[derive(Clone, Default)]\npub struct PlatformSpecificWindowBuilderAttributes {\n pub activation_policy: ActivationPolicy,\n}\n\npub struct Window {\n context: IdRef,\n pixel_format: PixelFormat,\n winit_window: winit::Window,\n}\n\nunsafe impl Send for Window {}\nunsafe impl Sync for Window {}\n\nimpl Window {\n pub fn new(win_attribs: &WindowAttributes,\n pf_reqs: &PixelFormatRequirements,\n opengl: &GlAttributes<&Window>,\n _pl_attribs: &PlatformSpecificWindowBuilderAttributes,\n winit_builder: winit::WindowBuilder)\n -> Result {\n if opengl.sharing.is_some() {\n unimplemented!()\n }\n\n match opengl.robustness {\n Robustness::RobustNoResetNotification |\n Robustness::RobustLoseContextOnReset => {\n return Err(CreationError::RobustnessNotSupported);\n }\n _ => (),\n }\n\n let winit_window = winit_builder.build().unwrap();\n let view = winit_window.get_nsview() as id;\n let (context, pf) = match Window::create_context(view, pf_reqs, opengl) {\n Ok((context, pf)) => (context, pf),\n Err(e) => {\n return Err(OsError(format!(\"Couldn't create OpenGL context: {}\", e)));\n }\n };\n\n let window = Window {\n context: context,\n pixel_format: pf,\n winit_window: winit_window,\n };\n\n Ok(window)\n }\n\n fn create_context(view: id,\n pf_reqs: &PixelFormatRequirements,\n opengl: &GlAttributes<&Window>)\n -> Result<(IdRef, PixelFormat), CreationError> {\n let attributes = try!(helpers::build_nsattributes(pf_reqs, opengl));\n unsafe {\n let pixelformat = IdRef::new(NSOpenGLPixelFormat::alloc(nil)\n .initWithAttributes_(&attributes));\n\n if let Some(pixelformat) = pixelformat.non_nil() {\n\n \/\/ TODO: Add context sharing\n let context = IdRef::new(NSOpenGLContext::alloc(nil)\n .initWithFormat_shareContext_(*pixelformat, nil));\n\n if let Some(cxt) = context.non_nil() {\n let pf = {\n let get_attr = |attrib: appkit::NSOpenGLPixelFormatAttribute| -> i32 {\n let mut value = 0;\n\n NSOpenGLPixelFormat::getValues_forAttribute_forVirtualScreen_(\n *pixelformat,\n &mut value,\n attrib,\n NSOpenGLContext::currentVirtualScreen(*cxt));\n\n value\n };\n\n PixelFormat {\n hardware_accelerated: get_attr(appkit::NSOpenGLPFAAccelerated) != 0,\n color_bits: (get_attr(appkit::NSOpenGLPFAColorSize) - get_attr(appkit::NSOpenGLPFAAlphaSize)) as u8,\n alpha_bits: get_attr(appkit::NSOpenGLPFAAlphaSize) as u8,\n depth_bits: get_attr(appkit::NSOpenGLPFADepthSize) as u8,\n stencil_bits: get_attr(appkit::NSOpenGLPFAStencilSize) as u8,\n stereoscopy: get_attr(appkit::NSOpenGLPFAStereo) != 0,\n double_buffer: get_attr(appkit::NSOpenGLPFADoubleBuffer) != 0,\n multisampling: if get_attr(appkit::NSOpenGLPFAMultisample) > 0 {\n Some(get_attr(appkit::NSOpenGLPFASamples) as u16)\n } else {\n None\n },\n srgb: true,\n }\n };\n\n cxt.setView_(view);\n let value = if opengl.vsync { 1 } else { 0 };\n cxt.setValues_forParameter_(&value, appkit::NSOpenGLContextParameter::NSOpenGLCPSwapInterval);\n\n CGLEnable(cxt.CGLContextObj() as *mut _, kCGLCECrashOnRemovedFunctions);\n\n Ok((cxt, pf))\n } else {\n Err(CreationError::NotSupported)\n }\n } else {\n Err(CreationError::NoAvailablePixelFormat)\n }\n }\n }\n\n pub fn set_title(&self, title: &str) {\n self.winit_window.set_title(title)\n }\n\n #[inline]\n pub fn as_winit_window(&self) -> &winit::Window {\n &self.winit_window\n }\n\n #[inline]\n pub fn as_winit_window_mut(&mut self) -> &mut winit::Window {\n &mut self.winit_window\n }\n\n pub fn show(&self) {\n self.winit_window.show()\n }\n\n pub fn hide(&self) {\n self.winit_window.hide()\n }\n\n pub fn get_position(&self) -> Option<(i32, i32)> {\n self.winit_window.get_position()\n }\n\n pub fn set_position(&self, x: i32, y: i32) {\n self.winit_window.set_position(x, y)\n }\n\n pub fn get_inner_size(&self) -> Option<(u32, u32)> {\n self.winit_window.get_inner_size()\n }\n\n pub fn get_inner_size_points(&self) -> Option<(u32, u32)> {\n self.winit_window.get_inner_size()\n }\n\n pub fn get_inner_size_pixels(&self) -> Option<(u32, u32)> {\n self.winit_window.get_inner_size().map(|(x, y)| {\n let hidpi = self.hidpi_factor();\n ((x as f32 * hidpi) as u32, (y as f32 * hidpi) as u32)\n })\n }\n\n pub fn get_outer_size(&self) -> Option<(u32, u32)> {\n self.winit_window.get_outer_size()\n }\n\n pub fn set_inner_size(&self, x: u32, y: u32) {\n self.winit_window.set_inner_size(x, y)\n }\n\n pub fn poll_events(&self) -> winit::PollEventsIterator {\n self.winit_window.poll_events()\n }\n\n pub fn wait_events(&self) -> winit::WaitEventsIterator {\n self.winit_window.wait_events()\n }\n\n pub unsafe fn platform_display(&self) -> *mut libc::c_void {\n self.winit_window.platform_display()\n }\n\n pub unsafe fn platform_window(&self) -> *mut libc::c_void {\n self.winit_window.platform_window()\n }\n\n pub fn create_window_proxy(&self) -> winit::WindowProxy {\n self.winit_window.create_window_proxy()\n }\n\n pub fn set_window_resize_callback(&mut self, callback: Option) {\n self.winit_window.set_window_resize_callback(callback);\n }\n\n pub fn set_cursor(&self, cursor: winit::MouseCursor) {\n self.winit_window.set_cursor(cursor);\n }\n\n pub fn hidpi_factor(&self) -> f32 {\n self.winit_window.hidpi_factor()\n }\n\n pub fn set_cursor_position(&self, x: i32, y: i32) -> Result<(), ()> {\n self.winit_window.set_cursor_position(x, y)\n }\n\n pub fn set_cursor_state(&self, state: winit::CursorState) -> Result<(), String> {\n self.winit_window.set_cursor_state(state)\n }\n}\n\nimpl GlContext for Window {\n #[inline]\n unsafe fn make_current(&self) -> Result<(), ContextError> {\n let _: () = msg_send![*self.context, update];\n self.context.makeCurrentContext();\n Ok(())\n }\n\n #[inline]\n fn is_current(&self) -> bool {\n unsafe {\n let current = NSOpenGLContext::currentContext(nil);\n if current != nil {\n let is_equal: BOOL = msg_send![current, isEqual:*self.context];\n is_equal != NO\n } else {\n false\n }\n }\n }\n\n fn get_proc_address(&self, addr: &str) -> *const () {\n let symbol_name: CFString = FromStr::from_str(addr).unwrap();\n let framework_name: CFString = FromStr::from_str(\"com.apple.opengl\").unwrap();\n let framework =\n unsafe { CFBundleGetBundleWithIdentifier(framework_name.as_concrete_TypeRef()) };\n let symbol = unsafe {\n CFBundleGetFunctionPointerForName(framework, symbol_name.as_concrete_TypeRef())\n };\n symbol as *const _\n }\n\n #[inline]\n fn swap_buffers(&self) -> Result<(), ContextError> {\n unsafe {\n let pool = NSAutoreleasePool::new(nil);\n self.context.flushBuffer();\n let _: () = msg_send![pool, release];\n }\n Ok(())\n }\n\n #[inline]\n fn get_api(&self) -> ::Api {\n ::Api::OpenGl\n }\n\n #[inline]\n fn get_pixel_format(&self) -> PixelFormat {\n self.pixel_format.clone()\n }\n}\n\nstruct IdRef(id);\n\nimpl IdRef {\n fn new(i: id) -> IdRef {\n IdRef(i)\n }\n\n #[allow(dead_code)]\n fn retain(i: id) -> IdRef {\n if i != nil {\n let _: id = unsafe { msg_send![i, retain] };\n }\n IdRef(i)\n }\n\n fn non_nil(self) -> Option {\n if self.0 == nil { None } else { Some(self) }\n }\n}\n\nimpl Drop for IdRef {\n fn drop(&mut self) {\n if self.0 != nil {\n let _: () = unsafe { msg_send![self.0, release] };\n }\n }\n}\n\nimpl Deref for IdRef {\n type Target = id;\n fn deref<'a>(&'a self) -> &'a id {\n &self.0\n }\n}\n\nimpl Clone for IdRef {\n fn clone(&self) -> IdRef {\n if self.0 != nil {\n let _: id = unsafe { msg_send![self.0, retain] };\n }\n IdRef(self.0)\n }\n}\nCall [NSOpenGLContext update] on window size changes.#![cfg(target_os = \"macos\")]\n\nuse CreationError;\nuse CreationError::OsError;\nuse ContextError;\nuse GlAttributes;\nuse GlContext;\nuse PixelFormat;\nuse PixelFormatRequirements;\nuse Robustness;\nuse WindowAttributes;\nuse os::macos::ActivationPolicy;\n\nuse objc::runtime::{BOOL, NO};\n\nuse cgl::{CGLEnable, kCGLCECrashOnRemovedFunctions};\n\nuse cocoa::base::{id, nil};\nuse cocoa::foundation::NSAutoreleasePool;\nuse cocoa::appkit::{self, NSOpenGLContext, NSOpenGLPixelFormat};\n\nuse core_foundation::base::TCFType;\nuse core_foundation::string::CFString;\nuse core_foundation::bundle::{CFBundleGetBundleWithIdentifier, CFBundleGetFunctionPointerForName};\n\nuse std::str::FromStr;\nuse std::ops::Deref;\n\nuse libc;\n\nuse winit;\nuse winit::os::macos::WindowExt;\npub use winit::{MonitorId, NativeMonitorId, get_available_monitors, get_primary_monitor};\npub use self::headless::HeadlessContext;\npub use self::headless::PlatformSpecificHeadlessBuilderAttributes;\n\nmod headless;\nmod helpers;\n\n#[derive(Clone, Default)]\npub struct PlatformSpecificWindowBuilderAttributes {\n pub activation_policy: ActivationPolicy,\n}\n\npub struct Window {\n context: IdRef,\n pixel_format: PixelFormat,\n winit_window: winit::Window,\n}\n\npub struct WaitEventsIterator<'a> {\n window: &'a Window,\n winit_iterator: winit::WaitEventsIterator<'a>,\n}\n\nimpl<'a> Iterator for WaitEventsIterator<'a> {\n type Item = winit::Event;\n\n fn next(&mut self) -> Option {\n let event = self.winit_iterator.next();\n match event {\n Some(winit::Event::Resized(_, _)) => self.window.update_context(),\n _ => {},\n }\n event\n }\n}\n\npub struct PollEventsIterator<'a> {\n window: &'a Window,\n winit_iterator: winit::PollEventsIterator<'a>,\n}\n\nimpl<'a> Iterator for PollEventsIterator<'a> {\n type Item = winit::Event;\n\n fn next(&mut self) -> Option {\n let event = self.winit_iterator.next();\n match event {\n Some(winit::Event::Resized(_, _)) => self.window.update_context(),\n _ => {},\n }\n event\n }\n}\n\nunsafe impl Send for Window {}\nunsafe impl Sync for Window {}\n\nimpl Window {\n pub fn new(win_attribs: &WindowAttributes,\n pf_reqs: &PixelFormatRequirements,\n opengl: &GlAttributes<&Window>,\n _pl_attribs: &PlatformSpecificWindowBuilderAttributes,\n winit_builder: winit::WindowBuilder)\n -> Result {\n if opengl.sharing.is_some() {\n unimplemented!()\n }\n\n match opengl.robustness {\n Robustness::RobustNoResetNotification |\n Robustness::RobustLoseContextOnReset => {\n return Err(CreationError::RobustnessNotSupported);\n }\n _ => (),\n }\n\n let winit_window = winit_builder.build().unwrap();\n let view = winit_window.get_nsview() as id;\n let (context, pf) = match Window::create_context(view, pf_reqs, opengl) {\n Ok((context, pf)) => (context, pf),\n Err(e) => {\n return Err(OsError(format!(\"Couldn't create OpenGL context: {}\", e)));\n }\n };\n\n let window = Window {\n context: context,\n pixel_format: pf,\n winit_window: winit_window,\n };\n\n Ok(window)\n }\n\n fn create_context(view: id,\n pf_reqs: &PixelFormatRequirements,\n opengl: &GlAttributes<&Window>)\n -> Result<(IdRef, PixelFormat), CreationError> {\n let attributes = try!(helpers::build_nsattributes(pf_reqs, opengl));\n unsafe {\n let pixelformat = IdRef::new(NSOpenGLPixelFormat::alloc(nil)\n .initWithAttributes_(&attributes));\n\n if let Some(pixelformat) = pixelformat.non_nil() {\n\n \/\/ TODO: Add context sharing\n let context = IdRef::new(NSOpenGLContext::alloc(nil)\n .initWithFormat_shareContext_(*pixelformat, nil));\n\n if let Some(cxt) = context.non_nil() {\n let pf = {\n let get_attr = |attrib: appkit::NSOpenGLPixelFormatAttribute| -> i32 {\n let mut value = 0;\n\n NSOpenGLPixelFormat::getValues_forAttribute_forVirtualScreen_(\n *pixelformat,\n &mut value,\n attrib,\n NSOpenGLContext::currentVirtualScreen(*cxt));\n\n value\n };\n\n PixelFormat {\n hardware_accelerated: get_attr(appkit::NSOpenGLPFAAccelerated) != 0,\n color_bits: (get_attr(appkit::NSOpenGLPFAColorSize) - get_attr(appkit::NSOpenGLPFAAlphaSize)) as u8,\n alpha_bits: get_attr(appkit::NSOpenGLPFAAlphaSize) as u8,\n depth_bits: get_attr(appkit::NSOpenGLPFADepthSize) as u8,\n stencil_bits: get_attr(appkit::NSOpenGLPFAStencilSize) as u8,\n stereoscopy: get_attr(appkit::NSOpenGLPFAStereo) != 0,\n double_buffer: get_attr(appkit::NSOpenGLPFADoubleBuffer) != 0,\n multisampling: if get_attr(appkit::NSOpenGLPFAMultisample) > 0 {\n Some(get_attr(appkit::NSOpenGLPFASamples) as u16)\n } else {\n None\n },\n srgb: true,\n }\n };\n\n cxt.setView_(view);\n let value = if opengl.vsync { 1 } else { 0 };\n cxt.setValues_forParameter_(&value, appkit::NSOpenGLContextParameter::NSOpenGLCPSwapInterval);\n\n CGLEnable(cxt.CGLContextObj() as *mut _, kCGLCECrashOnRemovedFunctions);\n\n Ok((cxt, pf))\n } else {\n Err(CreationError::NotSupported)\n }\n } else {\n Err(CreationError::NoAvailablePixelFormat)\n }\n }\n }\n\n fn update_context(&self) {\n unsafe {\n self.context.update()\n }\n }\n\n pub fn set_title(&self, title: &str) {\n self.winit_window.set_title(title)\n }\n\n #[inline]\n pub fn as_winit_window(&self) -> &winit::Window {\n &self.winit_window\n }\n\n #[inline]\n pub fn as_winit_window_mut(&mut self) -> &mut winit::Window {\n &mut self.winit_window\n }\n\n pub fn show(&self) {\n self.winit_window.show()\n }\n\n pub fn hide(&self) {\n self.winit_window.hide()\n }\n\n pub fn get_position(&self) -> Option<(i32, i32)> {\n self.winit_window.get_position()\n }\n\n pub fn set_position(&self, x: i32, y: i32) {\n self.winit_window.set_position(x, y)\n }\n\n pub fn get_inner_size(&self) -> Option<(u32, u32)> {\n self.winit_window.get_inner_size()\n }\n\n pub fn get_inner_size_points(&self) -> Option<(u32, u32)> {\n self.winit_window.get_inner_size()\n }\n\n pub fn get_inner_size_pixels(&self) -> Option<(u32, u32)> {\n self.winit_window.get_inner_size().map(|(x, y)| {\n let hidpi = self.hidpi_factor();\n ((x as f32 * hidpi) as u32, (y as f32 * hidpi) as u32)\n })\n }\n\n pub fn get_outer_size(&self) -> Option<(u32, u32)> {\n self.winit_window.get_outer_size()\n }\n\n pub fn set_inner_size(&self, x: u32, y: u32) {\n self.winit_window.set_inner_size(x, y)\n }\n\n pub fn poll_events(&self) -> PollEventsIterator {\n PollEventsIterator {\n window: self,\n winit_iterator: self.winit_window.poll_events()\n }\n }\n\n pub fn wait_events(&self) -> WaitEventsIterator {\n WaitEventsIterator {\n window: self,\n winit_iterator: self.winit_window.wait_events()\n }\n }\n\n pub unsafe fn platform_display(&self) -> *mut libc::c_void {\n self.winit_window.platform_display()\n }\n\n pub unsafe fn platform_window(&self) -> *mut libc::c_void {\n self.winit_window.platform_window()\n }\n\n pub fn create_window_proxy(&self) -> winit::WindowProxy {\n self.winit_window.create_window_proxy()\n }\n\n pub fn set_window_resize_callback(&mut self, callback: Option) {\n self.winit_window.set_window_resize_callback(callback);\n }\n\n pub fn set_cursor(&self, cursor: winit::MouseCursor) {\n self.winit_window.set_cursor(cursor);\n }\n\n pub fn hidpi_factor(&self) -> f32 {\n self.winit_window.hidpi_factor()\n }\n\n pub fn set_cursor_position(&self, x: i32, y: i32) -> Result<(), ()> {\n self.winit_window.set_cursor_position(x, y)\n }\n\n pub fn set_cursor_state(&self, state: winit::CursorState) -> Result<(), String> {\n self.winit_window.set_cursor_state(state)\n }\n}\n\nimpl GlContext for Window {\n #[inline]\n unsafe fn make_current(&self) -> Result<(), ContextError> {\n let _: () = msg_send![*self.context, update];\n self.context.makeCurrentContext();\n Ok(())\n }\n\n #[inline]\n fn is_current(&self) -> bool {\n unsafe {\n let current = NSOpenGLContext::currentContext(nil);\n if current != nil {\n let is_equal: BOOL = msg_send![current, isEqual:*self.context];\n is_equal != NO\n } else {\n false\n }\n }\n }\n\n fn get_proc_address(&self, addr: &str) -> *const () {\n let symbol_name: CFString = FromStr::from_str(addr).unwrap();\n let framework_name: CFString = FromStr::from_str(\"com.apple.opengl\").unwrap();\n let framework =\n unsafe { CFBundleGetBundleWithIdentifier(framework_name.as_concrete_TypeRef()) };\n let symbol = unsafe {\n CFBundleGetFunctionPointerForName(framework, symbol_name.as_concrete_TypeRef())\n };\n symbol as *const _\n }\n\n #[inline]\n fn swap_buffers(&self) -> Result<(), ContextError> {\n unsafe {\n let pool = NSAutoreleasePool::new(nil);\n self.context.flushBuffer();\n let _: () = msg_send![pool, release];\n }\n Ok(())\n }\n\n #[inline]\n fn get_api(&self) -> ::Api {\n ::Api::OpenGl\n }\n\n #[inline]\n fn get_pixel_format(&self) -> PixelFormat {\n self.pixel_format.clone()\n }\n}\n\nstruct IdRef(id);\n\nimpl IdRef {\n fn new(i: id) -> IdRef {\n IdRef(i)\n }\n\n #[allow(dead_code)]\n fn retain(i: id) -> IdRef {\n if i != nil {\n let _: id = unsafe { msg_send![i, retain] };\n }\n IdRef(i)\n }\n\n fn non_nil(self) -> Option {\n if self.0 == nil { None } else { Some(self) }\n }\n}\n\nimpl Drop for IdRef {\n fn drop(&mut self) {\n if self.0 != nil {\n let _: () = unsafe { msg_send![self.0, release] };\n }\n }\n}\n\nimpl Deref for IdRef {\n type Target = id;\n fn deref<'a>(&'a self) -> &'a id {\n &self.0\n }\n}\n\nimpl Clone for IdRef {\n fn clone(&self) -> IdRef {\n if self.0 != nil {\n let _: id = unsafe { msg_send![self.0, retain] };\n }\n IdRef(self.0)\n }\n}\n<|endoftext|>"} {"text":"Remove the awkward queue wait now that synchronisation has been improved<|endoftext|>"} {"text":"added a test that `feature=foo` will be quoted\/\/! this file tests that when the commands being run are shown\n\/\/! in the output, their arguments are quoted properly\n\/\/! so that the command can be run in a terminal\n\nuse cargotest::support::{\n execs,\n project,\n};\nuse hamcrest::assert_that\n\n#[test]\nfn features_are_quoted() {\n let p = project(\"foo\")\n .file(\n \"Cargo.toml\",\n r#\"\n [project]\n name = \"foo\"\n version = \"0.1.0\"\n authors = [\"mikeyhew@example.com\"]\n\n [features]\n some_feature = []\n default = [\"some_feature\"]\n \"#,\n )\n .file(\"src\/main.rs\", \"fn main() {error}\")\n .build();\n\n assert_that(\n p.cargo(\"check -v\"),\n execs()\n .with_status(101)\n .with_stderr_contains(\n r#\"\\\n[CHECKING] foo [..]\n[RUNNING] `rustc [..] --cfg 'feature=\"default\"' --cfg 'feature=\"some_feature\"' [..]`\n[ERROR] [..]\nprocess didn't exit successfully: `rustc [..] --cfg 'feature=\"default\"' --cfg 'feature=\"some_feature\"' [..]`\n\"#\n )\n );\n}\n<|endoftext|>"} {"text":"\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 or the MIT license\n\/\/ , at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::default::Default;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::io;\nuse std::path::{PathBuf, Path};\n\nuse getopts;\nuse testing;\nuse rustc::session::search_paths::SearchPaths;\nuse rustc::session::config::Externs;\nuse syntax::codemap::DUMMY_SP;\n\nuse externalfiles::{ExternalHtml, LoadStringError, load_string};\n\nuse html::render::reset_ids;\nuse html::escape::Escape;\nuse html::markdown;\nuse html::markdown::{Markdown, MarkdownWithToc, find_testable_code};\nuse test::{TestOptions, Collector};\n\n\/\/\/ Separate any lines at the start of the file that begin with `%`.\nfn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {\n let mut metadata = Vec::new();\n let mut count = 0;\n for line in s.lines() {\n if line.starts_with(\"%\") {\n \/\/ remove %\n metadata.push(line[1..].trim_left());\n count += line.len() + 1;\n } else {\n return (metadata, &s[count..]);\n }\n }\n \/\/ if we're here, then all lines were metadata % lines.\n (metadata, \"\")\n}\n\n\/\/\/ Render `input` (e.g. \"foo.md\") into an HTML file in `output`\n\/\/\/ (e.g. output = \"bar\" => \"bar\/foo.html\").\npub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches,\n external_html: &ExternalHtml, include_toc: bool) -> isize {\n let input_p = Path::new(input);\n output.push(input_p.file_stem().unwrap());\n output.set_extension(\"html\");\n\n let mut css = String::new();\n for name in &matches.opt_strs(\"markdown-css\") {\n let s = format!(\"\\n\", name);\n css.push_str(&s)\n }\n\n let input_str = match load_string(input) {\n Ok(s) => s,\n Err(LoadStringError::ReadFail) => return 1,\n Err(LoadStringError::BadUtf8) => return 2,\n };\n if let Some(playground) = matches.opt_str(\"markdown-playground-url\").or(\n matches.opt_str(\"playground-url\")) {\n markdown::PLAYGROUND.with(|s| { *s.borrow_mut() = Some((None, playground)); });\n }\n\n let mut out = match File::create(&output) {\n Err(e) => {\n let _ = writeln!(&mut io::stderr(),\n \"rustdoc: {}: {}\",\n output.display(), e);\n return 4;\n }\n Ok(f) => f\n };\n\n let (metadata, text) = extract_leading_metadata(&input_str);\n if metadata.is_empty() {\n let _ = writeln!(\n &mut io::stderr(),\n \"rustdoc: invalid markdown file: expecting initial line with `% ...TITLE...`\"\n );\n return 5;\n }\n let title = metadata[0];\n\n reset_ids(false);\n\n let rendered = if include_toc {\n format!(\"{}\", MarkdownWithToc(text))\n } else {\n format!(\"{}\", Markdown(text))\n };\n\n let err = write!(\n &mut out,\n r#\"\n\n\n \n \n \n {title}<\/title>\n\n {css}\n {in_header}\n<\/head>\n<body class=\"rustdoc\">\n <!--[if lte IE 8]>\n <div class=\"warning\">\n This old browser is unsupported and will most likely display funky\n things.\n <\/div>\n <![endif]-->\n\n {before_content}\n <h1 class=\"title\">{title}<\/h1>\n {text}\n {after_content}\n<\/body>\n<\/html>\"#,\n title = Escape(title),\n css = css,\n in_header = external_html.in_header,\n before_content = external_html.before_content,\n text = rendered,\n after_content = external_html.after_content,\n );\n\n match err {\n Err(e) => {\n let _ = writeln!(&mut io::stderr(),\n \"rustdoc: cannot write to `{}`: {}\",\n output.display(), e);\n 6\n }\n Ok(_) => 0\n }\n}\n\n\/\/\/ Run any tests\/code examples in the markdown file `input`.\npub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,\n mut test_args: Vec<String>, maybe_sysroot: Option<PathBuf>) -> isize {\n let input_str = match load_string(input) {\n Ok(s) => s,\n Err(LoadStringError::ReadFail) => return 1,\n Err(LoadStringError::BadUtf8) => return 2,\n };\n\n let mut opts = TestOptions::default();\n opts.no_crate_inject = true;\n let mut collector = Collector::new(input.to_string(), cfgs, libs, externs,\n true, opts, maybe_sysroot, None,\n Some(input.to_owned()));\n find_testable_code(&input_str, &mut collector, DUMMY_SP);\n test_args.insert(0, \"rustdoctest\".to_string());\n testing::test_main(&test_args, collector.tests);\n 0\n}\n<commit_msg>Rollup merge of #40828 - projektir:markdown_metadata, r=steveklabnik<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::default::Default;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::io;\nuse std::path::{PathBuf, Path};\n\nuse getopts;\nuse testing;\nuse rustc::session::search_paths::SearchPaths;\nuse rustc::session::config::Externs;\nuse syntax::codemap::DUMMY_SP;\n\nuse externalfiles::{ExternalHtml, LoadStringError, load_string};\n\nuse html::render::reset_ids;\nuse html::escape::Escape;\nuse html::markdown;\nuse html::markdown::{Markdown, MarkdownWithToc, find_testable_code};\nuse test::{TestOptions, Collector};\n\n\/\/\/ Separate any lines at the start of the file that begin with `# ` or `%`.\nfn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {\n let mut metadata = Vec::new();\n let mut count = 0;\n\n for line in s.lines() {\n if line.starts_with(\"# \") || line.starts_with(\"%\") {\n \/\/ trim the whitespace after the symbol\n metadata.push(line[1..].trim_left());\n count += line.len() + 1;\n } else {\n return (metadata, &s[count..]);\n }\n }\n\n \/\/ if we're here, then all lines were metadata `# ` or `%` lines.\n (metadata, \"\")\n}\n\n\/\/\/ Render `input` (e.g. \"foo.md\") into an HTML file in `output`\n\/\/\/ (e.g. output = \"bar\" => \"bar\/foo.html\").\npub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches,\n external_html: &ExternalHtml, include_toc: bool) -> isize {\n let input_p = Path::new(input);\n output.push(input_p.file_stem().unwrap());\n output.set_extension(\"html\");\n\n let mut css = String::new();\n for name in &matches.opt_strs(\"markdown-css\") {\n let s = format!(\"<link rel=\\\"stylesheet\\\" type=\\\"text\/css\\\" href=\\\"{}\\\">\\n\", name);\n css.push_str(&s)\n }\n\n let input_str = match load_string(input) {\n Ok(s) => s,\n Err(LoadStringError::ReadFail) => return 1,\n Err(LoadStringError::BadUtf8) => return 2,\n };\n if let Some(playground) = matches.opt_str(\"markdown-playground-url\").or(\n matches.opt_str(\"playground-url\")) {\n markdown::PLAYGROUND.with(|s| { *s.borrow_mut() = Some((None, playground)); });\n }\n\n let mut out = match File::create(&output) {\n Err(e) => {\n let _ = writeln!(&mut io::stderr(),\n \"rustdoc: {}: {}\",\n output.display(), e);\n return 4;\n }\n Ok(f) => f\n };\n\n let (metadata, text) = extract_leading_metadata(&input_str);\n if metadata.is_empty() {\n let _ = writeln!(\n &mut io::stderr(),\n \"rustdoc: invalid markdown file: no initial lines starting with `# ` or `%`\"\n );\n return 5;\n }\n let title = metadata[0];\n\n reset_ids(false);\n\n let rendered = if include_toc {\n format!(\"{}\", MarkdownWithToc(text))\n } else {\n format!(\"{}\", Markdown(text))\n };\n\n let err = write!(\n &mut out,\n r#\"<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"utf-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n <meta name=\"generator\" content=\"rustdoc\">\n <title>{title}<\/title>\n\n {css}\n {in_header}\n<\/head>\n<body class=\"rustdoc\">\n <!--[if lte IE 8]>\n <div class=\"warning\">\n This old browser is unsupported and will most likely display funky\n things.\n <\/div>\n <![endif]-->\n\n {before_content}\n <h1 class=\"title\">{title}<\/h1>\n {text}\n {after_content}\n<\/body>\n<\/html>\"#,\n title = Escape(title),\n css = css,\n in_header = external_html.in_header,\n before_content = external_html.before_content,\n text = rendered,\n after_content = external_html.after_content,\n );\n\n match err {\n Err(e) => {\n let _ = writeln!(&mut io::stderr(),\n \"rustdoc: cannot write to `{}`: {}\",\n output.display(), e);\n 6\n }\n Ok(_) => 0\n }\n}\n\n\/\/\/ Run any tests\/code examples in the markdown file `input`.\npub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,\n mut test_args: Vec<String>, maybe_sysroot: Option<PathBuf>) -> isize {\n let input_str = match load_string(input) {\n Ok(s) => s,\n Err(LoadStringError::ReadFail) => return 1,\n Err(LoadStringError::BadUtf8) => return 2,\n };\n\n let mut opts = TestOptions::default();\n opts.no_crate_inject = true;\n let mut collector = Collector::new(input.to_string(), cfgs, libs, externs,\n true, opts, maybe_sysroot, None,\n Some(input.to_owned()));\n find_testable_code(&input_str, &mut collector, DUMMY_SP);\n test_args.insert(0, \"rustdoctest\".to_string());\n testing::test_main(&test_args, collector.tests);\n 0\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for unix systems\n\n#![allow(unused_imports)] \/\/ lots of cfg code here\n\nuse os::unix::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io::{self, Read, Write};\nuse iter;\nuse marker::PhantomData;\nuse mem;\nuse memchr;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse str;\nuse sys_common::mutex::Mutex;\nuse sys::{cvt, fd, syscall};\nuse vec;\n\nconst TMPBUF_SZ: usize = 128;\nstatic ENV_LOCK: Mutex = Mutex::new();\n\nextern {\n #[link_name = \"__errno_location\"]\n fn errno_location() -> *mut i32;\n}\n\n\/\/\/ Returns the platform-specific value of errno\npub fn errno() -> i32 {\n unsafe {\n (*errno_location())\n }\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errno: i32) -> String {\n if let Some(string) = syscall::STR_ERROR.get(errno as usize) {\n string.to_string()\n } else {\n \"unknown error\".to_string()\n }\n}\n\npub fn getcwd() -> io::Result<PathBuf> {\n let mut buf = [0; 4096];\n let count = cvt(syscall::getcwd(&mut buf))?;\n Ok(PathBuf::from(OsString::from_vec(buf[.. count].to_vec())))\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n cvt(syscall::chdir(p.to_str().unwrap())).and(Ok(()))\n}\n\npub struct SplitPaths<'a> {\n iter: iter::Map<slice::Split<'a, u8, fn(&u8) -> bool>,\n fn(&'a [u8]) -> PathBuf>,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n fn bytes_to_path(b: &[u8]) -> PathBuf {\n PathBuf::from(<OsStr as OsStrExt>::from_bytes(b))\n }\n fn is_colon(b: &u8) -> bool { *b == b':' }\n let unparsed = unparsed.as_bytes();\n SplitPaths {\n iter: unparsed.split(is_colon as fn(&u8) -> bool)\n .map(bytes_to_path as fn(&[u8]) -> PathBuf)\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option<PathBuf> { self.iter.next() }\n fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>\n where I: Iterator<Item=T>, T: AsRef<OsStr>\n{\n let mut joined = Vec::new();\n let sep = b':';\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref().as_bytes();\n if i > 0 { joined.push(sep) }\n if path.contains(&sep) {\n return Err(JoinPathsError)\n }\n joined.extend_from_slice(path);\n }\n Ok(OsStringExt::from_vec(joined))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains separator `:`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result<PathBuf> {\n use fs::File;\n\n let mut file = File::open(\"sys:exe\")?;\n\n let mut path = String::new();\n file.read_to_string(&mut path)?;\n\n if path.ends_with('\\n') {\n path.pop();\n }\n\n Ok(PathBuf::from(path))\n}\n\npub struct Env {\n iter: vec::IntoIter<(OsString, OsString)>,\n _dont_send_or_sync_me: PhantomData<*mut ()>,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n fn next(&mut self) -> Option<(OsString, OsString)> { self.iter.next() }\n fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }\n}\n\n\/\/\/ Returns a vector of (variable, value) byte-vector pairs for all the\n\/\/\/ environment variables of the current process.\npub fn env() -> Env {\n let mut variables: Vec<(OsString, OsString)> = Vec::new();\n if let Ok(mut file) = ::fs::File::open(\"env:\") {\n let mut string = String::new();\n if file.read_to_string(&mut string).is_ok() {\n for line in string.lines() {\n let mut parts = line.splitn(2, '=');\n if let Some(name) = parts.next() {\n let value = parts.next().unwrap_or(\"\");\n variables.push((OsString::from(name.to_string()),\n OsString::from(value.to_string())));\n }\n }\n }\n }\n Env { iter: variables.into_iter(), _dont_send_or_sync_me: PhantomData }\n}\n\npub fn getenv(key: &OsStr) -> io::Result<Option<OsString>> {\n if ! key.is_empty() {\n if let Ok(mut file) = ::fs::File::open(&(\"env:\".to_owned() + key.to_str().unwrap())) {\n let mut string = String::new();\n file.read_to_string(&mut string)?;\n Ok(Some(OsString::from(string)))\n } else {\n Ok(None)\n }\n } else {\n Ok(None)\n }\n}\n\npub fn setenv(key: &OsStr, value: &OsStr) -> io::Result<()> {\n if ! key.is_empty() {\n let mut file = ::fs::File::open(&(\"env:\".to_owned() + key.to_str().unwrap()))?;\n file.write_all(value.as_bytes())?;\n file.set_len(value.len() as u64)?;\n }\n Ok(())\n}\n\npub fn unsetenv(key: &OsStr) -> io::Result<()> {\n ::fs::remove_file(&(\"env:\".to_owned() + key.to_str().unwrap()))?;\n Ok(())\n}\n\npub fn page_size() -> usize {\n 4096\n}\n\npub fn temp_dir() -> PathBuf {\n ::env::var_os(\"TMPDIR\").map(PathBuf::from).unwrap_or_else(|| {\n PathBuf::from(\"\/tmp\")\n })\n}\n\npub fn home_dir() -> Option<PathBuf> {\n return ::env::var_os(\"HOME\").map(PathBuf::from);\n}\n\npub fn exit(code: i32) -> ! {\n let _ = syscall::exit(code as usize);\n unreachable!();\n}\n<commit_msg>Rollup merge of #42783 - ids1024:redox-env, r=sfackler<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for unix systems\n\n#![allow(unused_imports)] \/\/ lots of cfg code here\n\nuse os::unix::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io::{self, Read, Write};\nuse iter;\nuse marker::PhantomData;\nuse mem;\nuse memchr;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse str;\nuse sys_common::mutex::Mutex;\nuse sys::{cvt, fd, syscall};\nuse vec;\n\nconst TMPBUF_SZ: usize = 128;\nstatic ENV_LOCK: Mutex = Mutex::new();\n\nextern {\n #[link_name = \"__errno_location\"]\n fn errno_location() -> *mut i32;\n}\n\n\/\/\/ Returns the platform-specific value of errno\npub fn errno() -> i32 {\n unsafe {\n (*errno_location())\n }\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errno: i32) -> String {\n if let Some(string) = syscall::STR_ERROR.get(errno as usize) {\n string.to_string()\n } else {\n \"unknown error\".to_string()\n }\n}\n\npub fn getcwd() -> io::Result<PathBuf> {\n let mut buf = [0; 4096];\n let count = cvt(syscall::getcwd(&mut buf))?;\n Ok(PathBuf::from(OsString::from_vec(buf[.. count].to_vec())))\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n cvt(syscall::chdir(p.to_str().unwrap())).and(Ok(()))\n}\n\npub struct SplitPaths<'a> {\n iter: iter::Map<slice::Split<'a, u8, fn(&u8) -> bool>,\n fn(&'a [u8]) -> PathBuf>,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n fn bytes_to_path(b: &[u8]) -> PathBuf {\n PathBuf::from(<OsStr as OsStrExt>::from_bytes(b))\n }\n fn is_colon(b: &u8) -> bool { *b == b':' }\n let unparsed = unparsed.as_bytes();\n SplitPaths {\n iter: unparsed.split(is_colon as fn(&u8) -> bool)\n .map(bytes_to_path as fn(&[u8]) -> PathBuf)\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option<PathBuf> { self.iter.next() }\n fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>\n where I: Iterator<Item=T>, T: AsRef<OsStr>\n{\n let mut joined = Vec::new();\n let sep = b':';\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref().as_bytes();\n if i > 0 { joined.push(sep) }\n if path.contains(&sep) {\n return Err(JoinPathsError)\n }\n joined.extend_from_slice(path);\n }\n Ok(OsStringExt::from_vec(joined))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains separator `:`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result<PathBuf> {\n use fs::File;\n\n let mut file = File::open(\"sys:exe\")?;\n\n let mut path = String::new();\n file.read_to_string(&mut path)?;\n\n if path.ends_with('\\n') {\n path.pop();\n }\n\n Ok(PathBuf::from(path))\n}\n\npub struct Env {\n iter: vec::IntoIter<(OsString, OsString)>,\n _dont_send_or_sync_me: PhantomData<*mut ()>,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n fn next(&mut self) -> Option<(OsString, OsString)> { self.iter.next() }\n fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }\n}\n\n\/\/\/ Returns a vector of (variable, value) byte-vector pairs for all the\n\/\/\/ environment variables of the current process.\npub fn env() -> Env {\n let mut variables: Vec<(OsString, OsString)> = Vec::new();\n if let Ok(mut file) = ::fs::File::open(\"env:\") {\n let mut string = String::new();\n if file.read_to_string(&mut string).is_ok() {\n for line in string.lines() {\n let mut parts = line.splitn(2, '=');\n if let Some(name) = parts.next() {\n let value = parts.next().unwrap_or(\"\");\n variables.push((OsString::from(name.to_string()),\n OsString::from(value.to_string())));\n }\n }\n }\n }\n Env { iter: variables.into_iter(), _dont_send_or_sync_me: PhantomData }\n}\n\npub fn getenv(key: &OsStr) -> io::Result<Option<OsString>> {\n if ! key.is_empty() {\n if let Ok(mut file) = ::fs::File::open(&(\"env:\".to_owned() + key.to_str().unwrap())) {\n let mut string = String::new();\n file.read_to_string(&mut string)?;\n Ok(Some(OsString::from(string)))\n } else {\n Ok(None)\n }\n } else {\n Ok(None)\n }\n}\n\npub fn setenv(key: &OsStr, value: &OsStr) -> io::Result<()> {\n if ! key.is_empty() {\n let mut file = ::fs::File::create(&(\"env:\".to_owned() + key.to_str().unwrap()))?;\n file.write_all(value.as_bytes())?;\n file.set_len(value.len() as u64)?;\n }\n Ok(())\n}\n\npub fn unsetenv(key: &OsStr) -> io::Result<()> {\n ::fs::remove_file(&(\"env:\".to_owned() + key.to_str().unwrap()))?;\n Ok(())\n}\n\npub fn page_size() -> usize {\n 4096\n}\n\npub fn temp_dir() -> PathBuf {\n ::env::var_os(\"TMPDIR\").map(PathBuf::from).unwrap_or_else(|| {\n PathBuf::from(\"\/tmp\")\n })\n}\n\npub fn home_dir() -> Option<PathBuf> {\n return ::env::var_os(\"HOME\").map(PathBuf::from);\n}\n\npub fn exit(code: i32) -> ! {\n let _ = syscall::exit(code as usize);\n unreachable!();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse driver::session;\nuse lib::llvm::{ContextRef, ModuleRef, ValueRef};\nuse lib::llvm::{llvm, TargetData, TypeNames};\nuse lib::llvm::mk_target_data;\nuse metadata::common::LinkMeta;\nuse middle::astencode;\nuse middle::resolve;\nuse middle::trans::adt;\nuse middle::trans::base;\nuse middle::trans::builder::Builder;\nuse middle::trans::debuginfo;\nuse middle::trans::common::{C_i32, C_null};\nuse middle::ty;\n\nuse middle::trans::type_::Type;\n\nuse util::sha2::Sha256;\n\nuse std::cell::{Cell, RefCell};\nuse std::c_str::ToCStr;\nuse std::hashmap::{HashMap, HashSet};\nuse std::local_data;\nuse std::libc::c_uint;\nuse syntax::ast;\n\nuse middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats};\n\nuse middle::trans::base::{decl_crate_map};\n\npub struct CrateContext {\n sess: session::Session,\n llmod: ModuleRef,\n llcx: ContextRef,\n metadata_llmod: ModuleRef,\n td: TargetData,\n tn: TypeNames,\n externs: RefCell<ExternMap>,\n intrinsics: HashMap<&'static str, ValueRef>,\n item_vals: RefCell<HashMap<ast::NodeId, ValueRef>>,\n exp_map2: resolve::ExportMap2,\n reachable: @RefCell<HashSet<ast::NodeId>>,\n item_symbols: RefCell<HashMap<ast::NodeId, ~str>>,\n link_meta: LinkMeta,\n tydescs: RefCell<HashMap<ty::t, @tydesc_info>>,\n \/\/ Set when running emit_tydescs to enforce that no more tydescs are\n \/\/ created.\n finished_tydescs: Cell<bool>,\n \/\/ Track mapping of external ids to local items imported for inlining\n external: RefCell<HashMap<ast::DefId, Option<ast::NodeId>>>,\n \/\/ Backwards version of the `external` map (inlined items to where they\n \/\/ came from)\n external_srcs: RefCell<HashMap<ast::NodeId, ast::DefId>>,\n \/\/ A set of static items which cannot be inlined into other crates. This\n \/\/ will pevent in ii_item() structures from being encoded into the metadata\n \/\/ that is generated\n non_inlineable_statics: RefCell<HashSet<ast::NodeId>>,\n \/\/ Cache instances of monomorphized functions\n monomorphized: RefCell<HashMap<mono_id, ValueRef>>,\n monomorphizing: RefCell<HashMap<ast::DefId, uint>>,\n \/\/ Cache generated vtables\n vtables: RefCell<HashMap<(ty::t, mono_id), ValueRef>>,\n \/\/ Cache of constant strings,\n const_cstr_cache: RefCell<HashMap<@str, ValueRef>>,\n\n \/\/ Reverse-direction for const ptrs cast from globals.\n \/\/ Key is an int, cast from a ValueRef holding a *T,\n \/\/ Val is a ValueRef holding a *[T].\n \/\/\n \/\/ Needed because LLVM loses pointer->pointee association\n \/\/ when we ptrcast, and we have to ptrcast during translation\n \/\/ of a [T] const because we form a slice, a [*T,int] pair, not\n \/\/ a pointer to an LLVM array type.\n const_globals: RefCell<HashMap<int, ValueRef>>,\n\n \/\/ Cache of emitted const values\n const_values: RefCell<HashMap<ast::NodeId, ValueRef>>,\n\n \/\/ Cache of external const values\n extern_const_values: RefCell<HashMap<ast::DefId, ValueRef>>,\n\n impl_method_cache: RefCell<HashMap<(ast::DefId, ast::Name), ast::DefId>>,\n\n module_data: RefCell<HashMap<~str, ValueRef>>,\n lltypes: RefCell<HashMap<ty::t, Type>>,\n llsizingtypes: RefCell<HashMap<ty::t, Type>>,\n adt_reprs: RefCell<HashMap<ty::t, @adt::Repr>>,\n symbol_hasher: RefCell<Sha256>,\n type_hashcodes: RefCell<HashMap<ty::t, @str>>,\n all_llvm_symbols: RefCell<HashSet<@str>>,\n tcx: ty::ctxt,\n maps: astencode::Maps,\n stats: @mut Stats,\n tydesc_type: Type,\n int_type: Type,\n opaque_vec_type: Type,\n builder: BuilderRef_res,\n crate_map: ValueRef,\n crate_map_name: ~str,\n \/\/ Set when at least one function uses GC. Needed so that\n \/\/ decl_gc_metadata knows whether to link to the module metadata, which\n \/\/ is not emitted by LLVM's GC pass when no functions use GC.\n uses_gc: bool,\n dbg_cx: Option<debuginfo::CrateDebugContext>,\n do_not_commit_warning_issued: Cell<bool>,\n}\n\nimpl CrateContext {\n pub fn new(sess: session::Session,\n name: &str,\n tcx: ty::ctxt,\n emap2: resolve::ExportMap2,\n maps: astencode::Maps,\n symbol_hasher: Sha256,\n link_meta: LinkMeta,\n reachable: @RefCell<HashSet<ast::NodeId>>)\n -> CrateContext {\n unsafe {\n let llcx = llvm::LLVMContextCreate();\n set_task_llcx(llcx);\n let llmod = name.with_c_str(|buf| {\n llvm::LLVMModuleCreateWithNameInContext(buf, llcx)\n });\n let metadata_llmod = format!(\"{}_metadata\", name).with_c_str(|buf| {\n llvm::LLVMModuleCreateWithNameInContext(buf, llcx)\n });\n let data_layout: &str = sess.targ_cfg.target_strs.data_layout;\n let targ_triple: &str = sess.targ_cfg.target_strs.target_triple;\n data_layout.with_c_str(|buf| {\n llvm::LLVMSetDataLayout(llmod, buf);\n llvm::LLVMSetDataLayout(metadata_llmod, buf);\n });\n targ_triple.with_c_str(|buf| {\n llvm::LLVMRustSetNormalizedTarget(llmod, buf);\n llvm::LLVMRustSetNormalizedTarget(metadata_llmod, buf);\n });\n let targ_cfg = sess.targ_cfg;\n\n let td = mk_target_data(sess.targ_cfg.target_strs.data_layout);\n let tn = TypeNames::new();\n\n let mut intrinsics = base::declare_intrinsics(llmod);\n if sess.opts.extra_debuginfo {\n base::declare_dbg_intrinsics(llmod, &mut intrinsics);\n }\n let int_type = Type::int(targ_cfg.arch);\n let tydesc_type = Type::tydesc(targ_cfg.arch);\n let opaque_vec_type = Type::opaque_vec(targ_cfg.arch);\n\n let mut str_slice_ty = Type::named_struct(\"str_slice\");\n str_slice_ty.set_struct_body([Type::i8p(), int_type], false);\n\n tn.associate_type(\"tydesc\", &tydesc_type);\n tn.associate_type(\"str_slice\", &str_slice_ty);\n\n let (crate_map_name, crate_map) = decl_crate_map(sess, link_meta.clone(), llmod);\n let dbg_cx = if sess.opts.debuginfo {\n Some(debuginfo::CrateDebugContext::new(llmod, name.to_owned()))\n } else {\n None\n };\n\n if sess.count_llvm_insns() {\n base::init_insn_ctxt()\n }\n\n CrateContext {\n sess: sess,\n llmod: llmod,\n llcx: llcx,\n metadata_llmod: metadata_llmod,\n td: td,\n tn: tn,\n externs: RefCell::new(HashMap::new()),\n intrinsics: intrinsics,\n item_vals: RefCell::new(HashMap::new()),\n exp_map2: emap2,\n reachable: reachable,\n item_symbols: RefCell::new(HashMap::new()),\n link_meta: link_meta,\n tydescs: RefCell::new(HashMap::new()),\n finished_tydescs: Cell::new(false),\n external: RefCell::new(HashMap::new()),\n external_srcs: RefCell::new(HashMap::new()),\n non_inlineable_statics: RefCell::new(HashSet::new()),\n monomorphized: RefCell::new(HashMap::new()),\n monomorphizing: RefCell::new(HashMap::new()),\n vtables: RefCell::new(HashMap::new()),\n const_cstr_cache: RefCell::new(HashMap::new()),\n const_globals: RefCell::new(HashMap::new()),\n const_values: RefCell::new(HashMap::new()),\n extern_const_values: RefCell::new(HashMap::new()),\n impl_method_cache: RefCell::new(HashMap::new()),\n module_data: RefCell::new(HashMap::new()),\n lltypes: RefCell::new(HashMap::new()),\n llsizingtypes: RefCell::new(HashMap::new()),\n adt_reprs: RefCell::new(HashMap::new()),\n symbol_hasher: RefCell::new(symbol_hasher),\n type_hashcodes: RefCell::new(HashMap::new()),\n all_llvm_symbols: RefCell::new(HashSet::new()),\n tcx: tcx,\n maps: maps,\n stats: @mut Stats {\n n_static_tydescs: Cell::new(0u),\n n_glues_created: Cell::new(0u),\n n_null_glues: Cell::new(0u),\n n_real_glues: Cell::new(0u),\n n_fns: Cell::new(0u),\n n_monos: Cell::new(0u),\n n_inlines: Cell::new(0u),\n n_closures: Cell::new(0u),\n n_llvm_insns: Cell::new(0u),\n llvm_insns: RefCell::new(HashMap::new()),\n fn_stats: RefCell::new(~[]),\n },\n tydesc_type: tydesc_type,\n int_type: int_type,\n opaque_vec_type: opaque_vec_type,\n builder: BuilderRef_res(llvm::LLVMCreateBuilderInContext(llcx)),\n crate_map: crate_map,\n crate_map_name: crate_map_name,\n uses_gc: false,\n dbg_cx: dbg_cx,\n do_not_commit_warning_issued: Cell::new(false),\n }\n }\n }\n\n pub fn builder(@self) -> Builder {\n Builder::new(self)\n }\n\n pub fn const_inbounds_gepi(&self,\n pointer: ValueRef,\n indices: &[uint]) -> ValueRef {\n debug!(\"const_inbounds_gepi: pointer={} indices={:?}\",\n self.tn.val_to_str(pointer), indices);\n let v: ~[ValueRef] =\n indices.iter().map(|i| C_i32(*i as i32)).collect();\n unsafe {\n llvm::LLVMConstInBoundsGEP(pointer,\n v.as_ptr(),\n indices.len() as c_uint)\n }\n }\n\n pub fn offsetof_gep(&self,\n llptr_ty: Type,\n indices: &[uint]) -> ValueRef {\n \/*!\n * Returns the offset of applying the given GEP indices\n * to an instance of `llptr_ty`. Similar to `offsetof` in C,\n * except that `llptr_ty` must be a pointer type.\n *\/\n\n unsafe {\n let null = C_null(llptr_ty);\n llvm::LLVMConstPtrToInt(self.const_inbounds_gepi(null, indices),\n self.int_type.to_ref())\n }\n }\n}\n\n#[unsafe_destructor]\nimpl Drop for CrateContext {\n fn drop(&mut self) {\n unset_task_llcx();\n }\n}\n\nlocal_data_key!(task_local_llcx_key: @ContextRef)\n\npub fn task_llcx() -> ContextRef {\n let opt = local_data::get(task_local_llcx_key, |k| k.map(|k| *k));\n *opt.expect(\"task-local LLVMContextRef wasn't ever set!\")\n}\n\nfn set_task_llcx(c: ContextRef) {\n local_data::set(task_local_llcx_key, @c);\n}\n\nfn unset_task_llcx() {\n local_data::pop(task_local_llcx_key);\n}\n<commit_msg>librustc: De-`@mut` the translation stats<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse driver::session;\nuse lib::llvm::{ContextRef, ModuleRef, ValueRef};\nuse lib::llvm::{llvm, TargetData, TypeNames};\nuse lib::llvm::mk_target_data;\nuse metadata::common::LinkMeta;\nuse middle::astencode;\nuse middle::resolve;\nuse middle::trans::adt;\nuse middle::trans::base;\nuse middle::trans::builder::Builder;\nuse middle::trans::debuginfo;\nuse middle::trans::common::{C_i32, C_null};\nuse middle::ty;\n\nuse middle::trans::type_::Type;\n\nuse util::sha2::Sha256;\n\nuse std::cell::{Cell, RefCell};\nuse std::c_str::ToCStr;\nuse std::hashmap::{HashMap, HashSet};\nuse std::local_data;\nuse std::libc::c_uint;\nuse syntax::ast;\n\nuse middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats};\n\nuse middle::trans::base::{decl_crate_map};\n\npub struct CrateContext {\n sess: session::Session,\n llmod: ModuleRef,\n llcx: ContextRef,\n metadata_llmod: ModuleRef,\n td: TargetData,\n tn: TypeNames,\n externs: RefCell<ExternMap>,\n intrinsics: HashMap<&'static str, ValueRef>,\n item_vals: RefCell<HashMap<ast::NodeId, ValueRef>>,\n exp_map2: resolve::ExportMap2,\n reachable: @RefCell<HashSet<ast::NodeId>>,\n item_symbols: RefCell<HashMap<ast::NodeId, ~str>>,\n link_meta: LinkMeta,\n tydescs: RefCell<HashMap<ty::t, @tydesc_info>>,\n \/\/ Set when running emit_tydescs to enforce that no more tydescs are\n \/\/ created.\n finished_tydescs: Cell<bool>,\n \/\/ Track mapping of external ids to local items imported for inlining\n external: RefCell<HashMap<ast::DefId, Option<ast::NodeId>>>,\n \/\/ Backwards version of the `external` map (inlined items to where they\n \/\/ came from)\n external_srcs: RefCell<HashMap<ast::NodeId, ast::DefId>>,\n \/\/ A set of static items which cannot be inlined into other crates. This\n \/\/ will pevent in ii_item() structures from being encoded into the metadata\n \/\/ that is generated\n non_inlineable_statics: RefCell<HashSet<ast::NodeId>>,\n \/\/ Cache instances of monomorphized functions\n monomorphized: RefCell<HashMap<mono_id, ValueRef>>,\n monomorphizing: RefCell<HashMap<ast::DefId, uint>>,\n \/\/ Cache generated vtables\n vtables: RefCell<HashMap<(ty::t, mono_id), ValueRef>>,\n \/\/ Cache of constant strings,\n const_cstr_cache: RefCell<HashMap<@str, ValueRef>>,\n\n \/\/ Reverse-direction for const ptrs cast from globals.\n \/\/ Key is an int, cast from a ValueRef holding a *T,\n \/\/ Val is a ValueRef holding a *[T].\n \/\/\n \/\/ Needed because LLVM loses pointer->pointee association\n \/\/ when we ptrcast, and we have to ptrcast during translation\n \/\/ of a [T] const because we form a slice, a [*T,int] pair, not\n \/\/ a pointer to an LLVM array type.\n const_globals: RefCell<HashMap<int, ValueRef>>,\n\n \/\/ Cache of emitted const values\n const_values: RefCell<HashMap<ast::NodeId, ValueRef>>,\n\n \/\/ Cache of external const values\n extern_const_values: RefCell<HashMap<ast::DefId, ValueRef>>,\n\n impl_method_cache: RefCell<HashMap<(ast::DefId, ast::Name), ast::DefId>>,\n\n module_data: RefCell<HashMap<~str, ValueRef>>,\n lltypes: RefCell<HashMap<ty::t, Type>>,\n llsizingtypes: RefCell<HashMap<ty::t, Type>>,\n adt_reprs: RefCell<HashMap<ty::t, @adt::Repr>>,\n symbol_hasher: RefCell<Sha256>,\n type_hashcodes: RefCell<HashMap<ty::t, @str>>,\n all_llvm_symbols: RefCell<HashSet<@str>>,\n tcx: ty::ctxt,\n maps: astencode::Maps,\n stats: @Stats,\n tydesc_type: Type,\n int_type: Type,\n opaque_vec_type: Type,\n builder: BuilderRef_res,\n crate_map: ValueRef,\n crate_map_name: ~str,\n \/\/ Set when at least one function uses GC. Needed so that\n \/\/ decl_gc_metadata knows whether to link to the module metadata, which\n \/\/ is not emitted by LLVM's GC pass when no functions use GC.\n uses_gc: bool,\n dbg_cx: Option<debuginfo::CrateDebugContext>,\n do_not_commit_warning_issued: Cell<bool>,\n}\n\nimpl CrateContext {\n pub fn new(sess: session::Session,\n name: &str,\n tcx: ty::ctxt,\n emap2: resolve::ExportMap2,\n maps: astencode::Maps,\n symbol_hasher: Sha256,\n link_meta: LinkMeta,\n reachable: @RefCell<HashSet<ast::NodeId>>)\n -> CrateContext {\n unsafe {\n let llcx = llvm::LLVMContextCreate();\n set_task_llcx(llcx);\n let llmod = name.with_c_str(|buf| {\n llvm::LLVMModuleCreateWithNameInContext(buf, llcx)\n });\n let metadata_llmod = format!(\"{}_metadata\", name).with_c_str(|buf| {\n llvm::LLVMModuleCreateWithNameInContext(buf, llcx)\n });\n let data_layout: &str = sess.targ_cfg.target_strs.data_layout;\n let targ_triple: &str = sess.targ_cfg.target_strs.target_triple;\n data_layout.with_c_str(|buf| {\n llvm::LLVMSetDataLayout(llmod, buf);\n llvm::LLVMSetDataLayout(metadata_llmod, buf);\n });\n targ_triple.with_c_str(|buf| {\n llvm::LLVMRustSetNormalizedTarget(llmod, buf);\n llvm::LLVMRustSetNormalizedTarget(metadata_llmod, buf);\n });\n let targ_cfg = sess.targ_cfg;\n\n let td = mk_target_data(sess.targ_cfg.target_strs.data_layout);\n let tn = TypeNames::new();\n\n let mut intrinsics = base::declare_intrinsics(llmod);\n if sess.opts.extra_debuginfo {\n base::declare_dbg_intrinsics(llmod, &mut intrinsics);\n }\n let int_type = Type::int(targ_cfg.arch);\n let tydesc_type = Type::tydesc(targ_cfg.arch);\n let opaque_vec_type = Type::opaque_vec(targ_cfg.arch);\n\n let mut str_slice_ty = Type::named_struct(\"str_slice\");\n str_slice_ty.set_struct_body([Type::i8p(), int_type], false);\n\n tn.associate_type(\"tydesc\", &tydesc_type);\n tn.associate_type(\"str_slice\", &str_slice_ty);\n\n let (crate_map_name, crate_map) = decl_crate_map(sess, link_meta.clone(), llmod);\n let dbg_cx = if sess.opts.debuginfo {\n Some(debuginfo::CrateDebugContext::new(llmod, name.to_owned()))\n } else {\n None\n };\n\n if sess.count_llvm_insns() {\n base::init_insn_ctxt()\n }\n\n CrateContext {\n sess: sess,\n llmod: llmod,\n llcx: llcx,\n metadata_llmod: metadata_llmod,\n td: td,\n tn: tn,\n externs: RefCell::new(HashMap::new()),\n intrinsics: intrinsics,\n item_vals: RefCell::new(HashMap::new()),\n exp_map2: emap2,\n reachable: reachable,\n item_symbols: RefCell::new(HashMap::new()),\n link_meta: link_meta,\n tydescs: RefCell::new(HashMap::new()),\n finished_tydescs: Cell::new(false),\n external: RefCell::new(HashMap::new()),\n external_srcs: RefCell::new(HashMap::new()),\n non_inlineable_statics: RefCell::new(HashSet::new()),\n monomorphized: RefCell::new(HashMap::new()),\n monomorphizing: RefCell::new(HashMap::new()),\n vtables: RefCell::new(HashMap::new()),\n const_cstr_cache: RefCell::new(HashMap::new()),\n const_globals: RefCell::new(HashMap::new()),\n const_values: RefCell::new(HashMap::new()),\n extern_const_values: RefCell::new(HashMap::new()),\n impl_method_cache: RefCell::new(HashMap::new()),\n module_data: RefCell::new(HashMap::new()),\n lltypes: RefCell::new(HashMap::new()),\n llsizingtypes: RefCell::new(HashMap::new()),\n adt_reprs: RefCell::new(HashMap::new()),\n symbol_hasher: RefCell::new(symbol_hasher),\n type_hashcodes: RefCell::new(HashMap::new()),\n all_llvm_symbols: RefCell::new(HashSet::new()),\n tcx: tcx,\n maps: maps,\n stats: @Stats {\n n_static_tydescs: Cell::new(0u),\n n_glues_created: Cell::new(0u),\n n_null_glues: Cell::new(0u),\n n_real_glues: Cell::new(0u),\n n_fns: Cell::new(0u),\n n_monos: Cell::new(0u),\n n_inlines: Cell::new(0u),\n n_closures: Cell::new(0u),\n n_llvm_insns: Cell::new(0u),\n llvm_insns: RefCell::new(HashMap::new()),\n fn_stats: RefCell::new(~[]),\n },\n tydesc_type: tydesc_type,\n int_type: int_type,\n opaque_vec_type: opaque_vec_type,\n builder: BuilderRef_res(llvm::LLVMCreateBuilderInContext(llcx)),\n crate_map: crate_map,\n crate_map_name: crate_map_name,\n uses_gc: false,\n dbg_cx: dbg_cx,\n do_not_commit_warning_issued: Cell::new(false),\n }\n }\n }\n\n pub fn builder(@self) -> Builder {\n Builder::new(self)\n }\n\n pub fn const_inbounds_gepi(&self,\n pointer: ValueRef,\n indices: &[uint]) -> ValueRef {\n debug!(\"const_inbounds_gepi: pointer={} indices={:?}\",\n self.tn.val_to_str(pointer), indices);\n let v: ~[ValueRef] =\n indices.iter().map(|i| C_i32(*i as i32)).collect();\n unsafe {\n llvm::LLVMConstInBoundsGEP(pointer,\n v.as_ptr(),\n indices.len() as c_uint)\n }\n }\n\n pub fn offsetof_gep(&self,\n llptr_ty: Type,\n indices: &[uint]) -> ValueRef {\n \/*!\n * Returns the offset of applying the given GEP indices\n * to an instance of `llptr_ty`. Similar to `offsetof` in C,\n * except that `llptr_ty` must be a pointer type.\n *\/\n\n unsafe {\n let null = C_null(llptr_ty);\n llvm::LLVMConstPtrToInt(self.const_inbounds_gepi(null, indices),\n self.int_type.to_ref())\n }\n }\n}\n\n#[unsafe_destructor]\nimpl Drop for CrateContext {\n fn drop(&mut self) {\n unset_task_llcx();\n }\n}\n\nlocal_data_key!(task_local_llcx_key: @ContextRef)\n\npub fn task_llcx() -> ContextRef {\n let opt = local_data::get(task_local_llcx_key, |k| k.map(|k| *k));\n *opt.expect(\"task-local LLVMContextRef wasn't ever set!\")\n}\n\nfn set_task_llcx(c: ContextRef) {\n local_data::set(task_local_llcx_key, @c);\n}\n\nfn unset_task_llcx() {\n local_data::pop(task_local_llcx_key);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ no-pretty-expanded unnecessary unsafe block generated\n\n#![deny(warnings)]\n#![allow(unused_must_use)]\n#![allow(unused_features)]\n#![feature(box_syntax)]\n#![feature(question_mark)]\n\nuse std::fmt::{self, Write};\nuse std::usize;\n\nstruct A;\nstruct B;\nstruct C;\nstruct D;\n\nimpl fmt::LowerHex for A {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.write_str(\"aloha\")\n }\n}\nimpl fmt::UpperHex for B {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.write_str(\"adios\")\n }\n}\nimpl fmt::Display for C {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.pad_integral(true, \"☃\", \"123\")\n }\n}\nimpl fmt::Binary for D {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.write_str(\"aa\")?;\n f.write_char('☃')?;\n f.write_str(\"bb\")\n }\n}\n\nmacro_rules! t {\n ($a:expr, $b:expr) => { assert_eq!($a, $b) }\n}\n\npub fn main() {\n \/\/ Various edge cases without formats\n t!(format!(\"\"), \"\");\n t!(format!(\"hello\"), \"hello\");\n t!(format!(\"hello {{\"), \"hello {\");\n\n \/\/ default formatters should work\n t!(format!(\"{}\", 1.0f32), \"1\");\n t!(format!(\"{}\", 1.0f64), \"1\");\n t!(format!(\"{}\", \"a\"), \"a\");\n t!(format!(\"{}\", \"a\".to_string()), \"a\");\n t!(format!(\"{}\", false), \"false\");\n t!(format!(\"{}\", 'a'), \"a\");\n\n \/\/ At least exercise all the formats\n t!(format!(\"{}\", true), \"true\");\n t!(format!(\"{}\", '☃'), \"☃\");\n t!(format!(\"{}\", 10), \"10\");\n t!(format!(\"{}\", 10_usize), \"10\");\n t!(format!(\"{:?}\", '☃'), \"'\\\\u{2603}'\");\n t!(format!(\"{:?}\", 10), \"10\");\n t!(format!(\"{:?}\", 10_usize), \"10\");\n t!(format!(\"{:?}\", \"true\"), \"\\\"true\\\"\");\n t!(format!(\"{:?}\", \"foo\\nbar\"), \"\\\"foo\\\\nbar\\\"\");\n t!(format!(\"{:?}\", \"foo\\n\\\"bar\\\"\\r\\n\\'baz\\'\\t\\\\qux\\\\\"),\n r#\"\"foo\\n\\\"bar\\\"\\r\\n\\'baz\\'\\t\\\\qux\\\\\"\"#);\n t!(format!(\"{:?}\", \"foo\\0bar\\x01baz\\u{3b1}q\\u{75}x\"),\n r#\"\"foo\\u{0}bar\\u{1}baz\\u{3b1}qux\"\"#);\n t!(format!(\"{:o}\", 10_usize), \"12\");\n t!(format!(\"{:x}\", 10_usize), \"a\");\n t!(format!(\"{:X}\", 10_usize), \"A\");\n t!(format!(\"{}\", \"foo\"), \"foo\");\n t!(format!(\"{}\", \"foo\".to_string()), \"foo\");\n if cfg!(target_pointer_width = \"32\") {\n t!(format!(\"{:#p}\", 0x1234 as *const isize), \"0x00001234\");\n t!(format!(\"{:#p}\", 0x1234 as *mut isize), \"0x00001234\");\n } else {\n t!(format!(\"{:#p}\", 0x1234 as *const isize), \"0x0000000000001234\");\n t!(format!(\"{:#p}\", 0x1234 as *mut isize), \"0x0000000000001234\");\n }\n t!(format!(\"{:p}\", 0x1234 as *const isize), \"0x1234\");\n t!(format!(\"{:p}\", 0x1234 as *mut isize), \"0x1234\");\n t!(format!(\"{:x}\", A), \"aloha\");\n t!(format!(\"{:X}\", B), \"adios\");\n t!(format!(\"foo {} ☃☃☃☃☃☃\", \"bar\"), \"foo bar ☃☃☃☃☃☃\");\n t!(format!(\"{1} {0}\", 0, 1), \"1 0\");\n t!(format!(\"{foo} {bar}\", foo=0, bar=1), \"0 1\");\n t!(format!(\"{foo} {1} {bar} {0}\", 0, 1, foo=2, bar=3), \"2 1 3 0\");\n t!(format!(\"{} {0}\", \"a\"), \"a a\");\n t!(format!(\"{foo_bar}\", foo_bar=1), \"1\");\n t!(format!(\"{}\", 5 + 5), \"10\");\n t!(format!(\"{:#4}\", C), \"☃123\");\n t!(format!(\"{:b}\", D), \"aa☃bb\");\n\n let a: &fmt::Debug = &1;\n t!(format!(\"{:?}\", a), \"1\");\n\n\n \/\/ Formatting strings and their arguments\n t!(format!(\"{}\", \"a\"), \"a\");\n t!(format!(\"{:4}\", \"a\"), \"a \");\n t!(format!(\"{:4}\", \"☃\"), \"☃ \");\n t!(format!(\"{:>4}\", \"a\"), \" a\");\n t!(format!(\"{:<4}\", \"a\"), \"a \");\n t!(format!(\"{:^5}\", \"a\"), \" a \");\n t!(format!(\"{:^5}\", \"aa\"), \" aa \");\n t!(format!(\"{:^4}\", \"a\"), \" a \");\n t!(format!(\"{:^4}\", \"aa\"), \" aa \");\n t!(format!(\"{:.4}\", \"a\"), \"a\");\n t!(format!(\"{:4.4}\", \"a\"), \"a \");\n t!(format!(\"{:4.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:<4.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:>4.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:^4.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:>10.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:2.4}\", \"aaaaa\"), \"aaaa\");\n t!(format!(\"{:2.4}\", \"aaaa\"), \"aaaa\");\n t!(format!(\"{:2.4}\", \"aaa\"), \"aaa\");\n t!(format!(\"{:2.4}\", \"aa\"), \"aa\");\n t!(format!(\"{:2.4}\", \"a\"), \"a \");\n t!(format!(\"{:0>2}\", \"a\"), \"0a\");\n t!(format!(\"{:.*}\", 4, \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:.1$}\", \"aaaaaaaaaaaaaaaaaa\", 4), \"aaaa\");\n t!(format!(\"{:.a$}\", \"aaaaaaaaaaaaaaaaaa\", a=4), \"aaaa\");\n t!(format!(\"{:1$}\", \"a\", 4), \"a \");\n t!(format!(\"{1:0$}\", 4, \"a\"), \"a \");\n t!(format!(\"{:a$}\", \"a\", a=4), \"a \");\n t!(format!(\"{:-#}\", \"a\"), \"a\");\n t!(format!(\"{:+#}\", \"a\"), \"a\");\n\n \/\/ Some float stuff\n t!(format!(\"{:}\", 1.0f32), \"1\");\n t!(format!(\"{:}\", 1.0f64), \"1\");\n t!(format!(\"{:.3}\", 1.0f64), \"1.000\");\n t!(format!(\"{:10.3}\", 1.0f64), \" 1.000\");\n t!(format!(\"{:+10.3}\", 1.0f64), \" +1.000\");\n t!(format!(\"{:+10.3}\", -1.0f64), \" -1.000\");\n\n t!(format!(\"{:e}\", 1.2345e6f32), \"1.2345e6\");\n t!(format!(\"{:e}\", 1.2345e6f64), \"1.2345e6\");\n t!(format!(\"{:E}\", 1.2345e6f64), \"1.2345E6\");\n t!(format!(\"{:.3e}\", 1.2345e6f64), \"1.234e6\");\n t!(format!(\"{:10.3e}\", 1.2345e6f64), \" 1.234e6\");\n t!(format!(\"{:+10.3e}\", 1.2345e6f64), \" +1.234e6\");\n t!(format!(\"{:+10.3e}\", -1.2345e6f64), \" -1.234e6\");\n\n \/\/ Float edge cases\n t!(format!(\"{}\", -0.0), \"0\");\n t!(format!(\"{:?}\", -0.0), \"-0\");\n t!(format!(\"{:?}\", 0.0), \"0\");\n\n\n \/\/ Test that pointers don't get truncated.\n {\n let val = usize::MAX;\n let exp = format!(\"{:#x}\", val);\n t!(format!(\"{:p}\", val as *const isize), exp);\n }\n\n \/\/ Escaping\n t!(format!(\"{{\"), \"{\");\n t!(format!(\"}}\"), \"}\");\n\n test_write();\n test_print();\n test_order();\n\n \/\/ make sure that format! doesn't move out of local variables\n let a: Box<_> = box 3;\n format!(\"{}\", a);\n format!(\"{}\", a);\n\n \/\/ make sure that format! doesn't cause spurious unused-unsafe warnings when\n \/\/ it's inside of an outer unsafe block\n unsafe {\n let a: isize = ::std::mem::transmute(3_usize);\n format!(\"{}\", a);\n }\n\n test_format_args();\n\n \/\/ test that trailing commas are acceptable\n format!(\"{}\", \"test\",);\n format!(\"{foo}\", foo=\"test\",);\n}\n\n\/\/ Basic test to make sure that we can invoke the `write!` macro with an\n\/\/ fmt::Write instance.\nfn test_write() {\n use std::fmt::Write;\n let mut buf = String::new();\n write!(&mut buf, \"{}\", 3);\n {\n let w = &mut buf;\n write!(w, \"{foo}\", foo=4);\n write!(w, \"{}\", \"hello\");\n writeln!(w, \"{}\", \"line\");\n writeln!(w, \"{foo}\", foo=\"bar\");\n w.write_char('☃');\n w.write_str(\"str\");\n }\n\n t!(buf, \"34helloline\\nbar\\n☃str\");\n}\n\n\/\/ Just make sure that the macros are defined, there's not really a lot that we\n\/\/ can do with them just yet (to test the output)\nfn test_print() {\n print!(\"hi\");\n print!(\"{:?}\", vec!(0u8));\n println!(\"hello\");\n println!(\"this is a {}\", \"test\");\n println!(\"{foo}\", foo=\"bar\");\n}\n\n\/\/ Just make sure that the macros are defined, there's not really a lot that we\n\/\/ can do with them just yet (to test the output)\nfn test_format_args() {\n use std::fmt::Write;\n let mut buf = String::new();\n {\n let w = &mut buf;\n write!(w, \"{}\", format_args!(\"{}\", 1));\n write!(w, \"{}\", format_args!(\"test\"));\n write!(w, \"{}\", format_args!(\"{test}\", test=3));\n }\n let s = buf;\n t!(s, \"1test3\");\n\n let s = fmt::format(format_args!(\"hello {}\", \"world\"));\n t!(s, \"hello world\");\n let s = format!(\"{}: {}\", \"args were\", format_args!(\"hello {}\", \"world\"));\n t!(s, \"args were: hello world\");\n}\n\nfn test_order() {\n \/\/ Make sure format!() arguments are always evaluated in a left-to-right\n \/\/ ordering\n fn foo() -> isize {\n static mut FOO: isize = 0;\n unsafe {\n FOO += 1;\n FOO\n }\n }\n assert_eq!(format!(\"{} {} {a} {b} {} {c}\",\n foo(), foo(), foo(), a=foo(), b=foo(), c=foo()),\n \"1 2 4 5 3 6\".to_string());\n}\n<commit_msg>format: add tests for ergonomic format_args!<commit_after>\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ no-pretty-expanded unnecessary unsafe block generated\n\n#![deny(warnings)]\n#![allow(unused_must_use)]\n#![allow(unused_features)]\n#![feature(box_syntax)]\n#![feature(question_mark)]\n\nuse std::fmt::{self, Write};\nuse std::usize;\n\nstruct A;\nstruct B;\nstruct C;\nstruct D;\n\nimpl fmt::LowerHex for A {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.write_str(\"aloha\")\n }\n}\nimpl fmt::UpperHex for B {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.write_str(\"adios\")\n }\n}\nimpl fmt::Display for C {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.pad_integral(true, \"☃\", \"123\")\n }\n}\nimpl fmt::Binary for D {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.write_str(\"aa\")?;\n f.write_char('☃')?;\n f.write_str(\"bb\")\n }\n}\n\nmacro_rules! t {\n ($a:expr, $b:expr) => { assert_eq!($a, $b) }\n}\n\npub fn main() {\n \/\/ Various edge cases without formats\n t!(format!(\"\"), \"\");\n t!(format!(\"hello\"), \"hello\");\n t!(format!(\"hello {{\"), \"hello {\");\n\n \/\/ default formatters should work\n t!(format!(\"{}\", 1.0f32), \"1\");\n t!(format!(\"{}\", 1.0f64), \"1\");\n t!(format!(\"{}\", \"a\"), \"a\");\n t!(format!(\"{}\", \"a\".to_string()), \"a\");\n t!(format!(\"{}\", false), \"false\");\n t!(format!(\"{}\", 'a'), \"a\");\n\n \/\/ At least exercise all the formats\n t!(format!(\"{}\", true), \"true\");\n t!(format!(\"{}\", '☃'), \"☃\");\n t!(format!(\"{}\", 10), \"10\");\n t!(format!(\"{}\", 10_usize), \"10\");\n t!(format!(\"{:?}\", '☃'), \"'\\\\u{2603}'\");\n t!(format!(\"{:?}\", 10), \"10\");\n t!(format!(\"{:?}\", 10_usize), \"10\");\n t!(format!(\"{:?}\", \"true\"), \"\\\"true\\\"\");\n t!(format!(\"{:?}\", \"foo\\nbar\"), \"\\\"foo\\\\nbar\\\"\");\n t!(format!(\"{:?}\", \"foo\\n\\\"bar\\\"\\r\\n\\'baz\\'\\t\\\\qux\\\\\"),\n r#\"\"foo\\n\\\"bar\\\"\\r\\n\\'baz\\'\\t\\\\qux\\\\\"\"#);\n t!(format!(\"{:?}\", \"foo\\0bar\\x01baz\\u{3b1}q\\u{75}x\"),\n r#\"\"foo\\u{0}bar\\u{1}baz\\u{3b1}qux\"\"#);\n t!(format!(\"{:o}\", 10_usize), \"12\");\n t!(format!(\"{:x}\", 10_usize), \"a\");\n t!(format!(\"{:X}\", 10_usize), \"A\");\n t!(format!(\"{}\", \"foo\"), \"foo\");\n t!(format!(\"{}\", \"foo\".to_string()), \"foo\");\n if cfg!(target_pointer_width = \"32\") {\n t!(format!(\"{:#p}\", 0x1234 as *const isize), \"0x00001234\");\n t!(format!(\"{:#p}\", 0x1234 as *mut isize), \"0x00001234\");\n } else {\n t!(format!(\"{:#p}\", 0x1234 as *const isize), \"0x0000000000001234\");\n t!(format!(\"{:#p}\", 0x1234 as *mut isize), \"0x0000000000001234\");\n }\n t!(format!(\"{:p}\", 0x1234 as *const isize), \"0x1234\");\n t!(format!(\"{:p}\", 0x1234 as *mut isize), \"0x1234\");\n t!(format!(\"{:x}\", A), \"aloha\");\n t!(format!(\"{:X}\", B), \"adios\");\n t!(format!(\"foo {} ☃☃☃☃☃☃\", \"bar\"), \"foo bar ☃☃☃☃☃☃\");\n t!(format!(\"{1} {0}\", 0, 1), \"1 0\");\n t!(format!(\"{foo} {bar}\", foo=0, bar=1), \"0 1\");\n t!(format!(\"{foo} {1} {bar} {0}\", 0, 1, foo=2, bar=3), \"2 1 3 0\");\n t!(format!(\"{} {0}\", \"a\"), \"a a\");\n t!(format!(\"{foo_bar}\", foo_bar=1), \"1\");\n t!(format!(\"{}\", 5 + 5), \"10\");\n t!(format!(\"{:#4}\", C), \"☃123\");\n t!(format!(\"{:b}\", D), \"aa☃bb\");\n\n let a: &fmt::Debug = &1;\n t!(format!(\"{:?}\", a), \"1\");\n\n\n \/\/ Formatting strings and their arguments\n t!(format!(\"{}\", \"a\"), \"a\");\n t!(format!(\"{:4}\", \"a\"), \"a \");\n t!(format!(\"{:4}\", \"☃\"), \"☃ \");\n t!(format!(\"{:>4}\", \"a\"), \" a\");\n t!(format!(\"{:<4}\", \"a\"), \"a \");\n t!(format!(\"{:^5}\", \"a\"), \" a \");\n t!(format!(\"{:^5}\", \"aa\"), \" aa \");\n t!(format!(\"{:^4}\", \"a\"), \" a \");\n t!(format!(\"{:^4}\", \"aa\"), \" aa \");\n t!(format!(\"{:.4}\", \"a\"), \"a\");\n t!(format!(\"{:4.4}\", \"a\"), \"a \");\n t!(format!(\"{:4.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:<4.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:>4.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:^4.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:>10.4}\", \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:2.4}\", \"aaaaa\"), \"aaaa\");\n t!(format!(\"{:2.4}\", \"aaaa\"), \"aaaa\");\n t!(format!(\"{:2.4}\", \"aaa\"), \"aaa\");\n t!(format!(\"{:2.4}\", \"aa\"), \"aa\");\n t!(format!(\"{:2.4}\", \"a\"), \"a \");\n t!(format!(\"{:0>2}\", \"a\"), \"0a\");\n t!(format!(\"{:.*}\", 4, \"aaaaaaaaaaaaaaaaaa\"), \"aaaa\");\n t!(format!(\"{:.1$}\", \"aaaaaaaaaaaaaaaaaa\", 4), \"aaaa\");\n t!(format!(\"{:.a$}\", \"aaaaaaaaaaaaaaaaaa\", a=4), \"aaaa\");\n t!(format!(\"{:1$}\", \"a\", 4), \"a \");\n t!(format!(\"{1:0$}\", 4, \"a\"), \"a \");\n t!(format!(\"{:a$}\", \"a\", a=4), \"a \");\n t!(format!(\"{:-#}\", \"a\"), \"a\");\n t!(format!(\"{:+#}\", \"a\"), \"a\");\n\n \/\/ Some float stuff\n t!(format!(\"{:}\", 1.0f32), \"1\");\n t!(format!(\"{:}\", 1.0f64), \"1\");\n t!(format!(\"{:.3}\", 1.0f64), \"1.000\");\n t!(format!(\"{:10.3}\", 1.0f64), \" 1.000\");\n t!(format!(\"{:+10.3}\", 1.0f64), \" +1.000\");\n t!(format!(\"{:+10.3}\", -1.0f64), \" -1.000\");\n\n t!(format!(\"{:e}\", 1.2345e6f32), \"1.2345e6\");\n t!(format!(\"{:e}\", 1.2345e6f64), \"1.2345e6\");\n t!(format!(\"{:E}\", 1.2345e6f64), \"1.2345E6\");\n t!(format!(\"{:.3e}\", 1.2345e6f64), \"1.234e6\");\n t!(format!(\"{:10.3e}\", 1.2345e6f64), \" 1.234e6\");\n t!(format!(\"{:+10.3e}\", 1.2345e6f64), \" +1.234e6\");\n t!(format!(\"{:+10.3e}\", -1.2345e6f64), \" -1.234e6\");\n\n \/\/ Float edge cases\n t!(format!(\"{}\", -0.0), \"0\");\n t!(format!(\"{:?}\", -0.0), \"-0\");\n t!(format!(\"{:?}\", 0.0), \"0\");\n\n\n \/\/ Ergonomic format_args!\n t!(format!(\"{0:x} {0:X}\", 15), \"f F\");\n t!(format!(\"{0:x} {0:X} {}\", 15), \"f F 15\");\n \/\/ NOTE: For now the longer test cases must not be followed immediately by\n \/\/ >1 empty lines, or the pretty printer will break. Since no one wants to\n \/\/ touch the current pretty printer (#751), we have no choice but to work\n \/\/ around it. Some of the following test cases are also affected.\n t!(format!(\"{:x}{0:X}{a:x}{:X}{1:x}{a:X}\", 13, 14, a=15), \"dDfEeF\");\n t!(format!(\"{a:x} {a:X}\", a=15), \"f F\");\n\n \/\/ And its edge cases\n t!(format!(\"{a:.0$} {b:.0$} {0:.0$}\\n{a:.c$} {b:.c$} {c:.c$}\",\n 4, a=\"abcdefg\", b=\"hijklmn\", c=3),\n \"abcd hijk 4\\nabc hij 3\");\n t!(format!(\"{a:.*} {0} {:.*}\", 4, 3, \"efgh\", a=\"abcdef\"), \"abcd 4 efg\");\n t!(format!(\"{:.a$} {a} {a:#x}\", \"aaaaaa\", a=2), \"aa 2 0x2\");\n\n\n \/\/ Test that pointers don't get truncated.\n {\n let val = usize::MAX;\n let exp = format!(\"{:#x}\", val);\n t!(format!(\"{:p}\", val as *const isize), exp);\n }\n\n \/\/ Escaping\n t!(format!(\"{{\"), \"{\");\n t!(format!(\"}}\"), \"}\");\n\n test_write();\n test_print();\n test_order();\n test_once();\n\n \/\/ make sure that format! doesn't move out of local variables\n let a: Box<_> = box 3;\n format!(\"{}\", a);\n format!(\"{}\", a);\n\n \/\/ make sure that format! doesn't cause spurious unused-unsafe warnings when\n \/\/ it's inside of an outer unsafe block\n unsafe {\n let a: isize = ::std::mem::transmute(3_usize);\n format!(\"{}\", a);\n }\n\n test_format_args();\n\n \/\/ test that trailing commas are acceptable\n format!(\"{}\", \"test\",);\n format!(\"{foo}\", foo=\"test\",);\n}\n\n\/\/ Basic test to make sure that we can invoke the `write!` macro with an\n\/\/ fmt::Write instance.\nfn test_write() {\n use std::fmt::Write;\n let mut buf = String::new();\n write!(&mut buf, \"{}\", 3);\n {\n let w = &mut buf;\n write!(w, \"{foo}\", foo=4);\n write!(w, \"{}\", \"hello\");\n writeln!(w, \"{}\", \"line\");\n writeln!(w, \"{foo}\", foo=\"bar\");\n w.write_char('☃');\n w.write_str(\"str\");\n }\n\n t!(buf, \"34helloline\\nbar\\n☃str\");\n}\n\n\/\/ Just make sure that the macros are defined, there's not really a lot that we\n\/\/ can do with them just yet (to test the output)\nfn test_print() {\n print!(\"hi\");\n print!(\"{:?}\", vec!(0u8));\n println!(\"hello\");\n println!(\"this is a {}\", \"test\");\n println!(\"{foo}\", foo=\"bar\");\n}\n\n\/\/ Just make sure that the macros are defined, there's not really a lot that we\n\/\/ can do with them just yet (to test the output)\nfn test_format_args() {\n use std::fmt::Write;\n let mut buf = String::new();\n {\n let w = &mut buf;\n write!(w, \"{}\", format_args!(\"{}\", 1));\n write!(w, \"{}\", format_args!(\"test\"));\n write!(w, \"{}\", format_args!(\"{test}\", test=3));\n }\n let s = buf;\n t!(s, \"1test3\");\n\n let s = fmt::format(format_args!(\"hello {}\", \"world\"));\n t!(s, \"hello world\");\n let s = format!(\"{}: {}\", \"args were\", format_args!(\"hello {}\", \"world\"));\n t!(s, \"args were: hello world\");\n}\n\nfn test_order() {\n \/\/ Make sure format!() arguments are always evaluated in a left-to-right\n \/\/ ordering\n fn foo() -> isize {\n static mut FOO: isize = 0;\n unsafe {\n FOO += 1;\n FOO\n }\n }\n assert_eq!(format!(\"{} {} {a} {b} {} {c}\",\n foo(), foo(), foo(), a=foo(), b=foo(), c=foo()),\n \"1 2 4 5 3 6\".to_string());\n}\n\nfn test_once() {\n \/\/ Make sure each argument are evaluted only once even though it may be\n \/\/ formatted multiple times\n fn foo() -> isize {\n static mut FOO: isize = 0;\n unsafe {\n FOO += 1;\n FOO\n }\n }\n assert_eq!(format!(\"{0} {0} {0} {a} {a} {a}\", foo(), a=foo()),\n \"1 1 1 2 2 2\".to_string());\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Library used by tidy and other tools\n\/\/!\n\/\/! This library contains the tidy lints and exposes it\n\/\/! to be used by tools.\n\n#![deny(warnings)]\n\nuse std::fs;\n\nuse std::path::Path;\n\nmacro_rules! t {\n ($e:expr, $p:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed on {} with {}\", stringify!($e), ($p).display(), e),\n });\n\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {}\", stringify!($e), e),\n })\n}\n\nmacro_rules! tidy_error {\n ($bad:expr, $fmt:expr, $($arg:tt)*) => ({\n *$bad = true;\n eprint!(\"tidy error: \");\n eprintln!($fmt, $($arg)*);\n });\n}\n\npub mod bins;\npub mod style;\npub mod errors;\npub mod features;\npub mod cargo;\npub mod pal;\npub mod deps;\npub mod unstable_book;\n\nfn filter_dirs(path: &Path) -> bool {\n let skip = [\n \"src\/binaryen\",\n \"src\/dlmalloc\",\n \"src\/jemalloc\",\n \"src\/llvm\",\n \"src\/libbacktrace\",\n \"src\/libcompiler_builtins\",\n \"src\/librustc_data_structures\/owning_ref\",\n \"src\/compiler-rt\",\n \"src\/liblibc\",\n \"src\/vendor\",\n \"src\/rt\/hoedown\",\n \"src\/tools\/cargo\",\n \"src\/tools\/rls\",\n \"src\/tools\/clippy\",\n \"src\/tools\/rust-installer\",\n \"src\/tools\/rustfmt\",\n \"src\/tools\/miri\",\n \"src\/librustc\/mir\/interpret\",\n \"src\/librustc_mir\/interpret\",\n ];\n skip.iter().any(|p| path.ends_with(p))\n}\n\nfn walk_many(paths: &[&Path], skip: &mut FnMut(&Path) -> bool, f: &mut FnMut(&Path)) {\n for path in paths {\n walk(path, skip, f);\n }\n}\n\nfn walk(path: &Path, skip: &mut FnMut(&Path) -> bool, f: &mut FnMut(&Path)) {\n for entry in t!(fs::read_dir(path), path) {\n let entry = t!(entry);\n let kind = t!(entry.file_type());\n let path = entry.path();\n if kind.is_dir() {\n if !skip(&path) {\n walk(&path, skip, f);\n }\n } else {\n f(&path);\n }\n }\n}\n<commit_msg>add target\/ to ignored tidy dirs<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Library used by tidy and other tools\n\/\/!\n\/\/! This library contains the tidy lints and exposes it\n\/\/! to be used by tools.\n\n#![deny(warnings)]\n\nuse std::fs;\n\nuse std::path::Path;\n\nmacro_rules! t {\n ($e:expr, $p:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed on {} with {}\", stringify!($e), ($p).display(), e),\n });\n\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {}\", stringify!($e), e),\n })\n}\n\nmacro_rules! tidy_error {\n ($bad:expr, $fmt:expr, $($arg:tt)*) => ({\n *$bad = true;\n eprint!(\"tidy error: \");\n eprintln!($fmt, $($arg)*);\n });\n}\n\npub mod bins;\npub mod style;\npub mod errors;\npub mod features;\npub mod cargo;\npub mod pal;\npub mod deps;\npub mod unstable_book;\n\nfn filter_dirs(path: &Path) -> bool {\n let skip = [\n \"src\/binaryen\",\n \"src\/dlmalloc\",\n \"src\/jemalloc\",\n \"src\/llvm\",\n \"src\/libbacktrace\",\n \"src\/libcompiler_builtins\",\n \"src\/librustc_data_structures\/owning_ref\",\n \"src\/compiler-rt\",\n \"src\/liblibc\",\n \"src\/vendor\",\n \"src\/rt\/hoedown\",\n \"src\/tools\/cargo\",\n \"src\/tools\/rls\",\n \"src\/tools\/clippy\",\n \"src\/tools\/rust-installer\",\n \"src\/tools\/rustfmt\",\n \"src\/tools\/miri\",\n \"src\/librustc\/mir\/interpret\",\n \"src\/librustc_mir\/interpret\",\n \"src\/target\",\n ];\n skip.iter().any(|p| path.ends_with(p))\n}\n\nfn walk_many(paths: &[&Path], skip: &mut FnMut(&Path) -> bool, f: &mut FnMut(&Path)) {\n for path in paths {\n walk(path, skip, f);\n }\n}\n\nfn walk(path: &Path, skip: &mut FnMut(&Path) -> bool, f: &mut FnMut(&Path)) {\n for entry in t!(fs::read_dir(path), path) {\n let entry = t!(entry);\n let kind = t!(entry.file_type());\n let path = entry.path();\n if kind.is_dir() {\n if !skip(&path) {\n walk(&path, skip, f);\n }\n } else {\n f(&path);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>vnc is connecting properly<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for SimplifyBranchSame miscompilation<commit_after>\/\/ Regression test for SimplifyBranchSame miscompilation.\n\/\/ run-pass\n\nmacro_rules! m {\n ($a:expr, $b:expr, $c:block) => {\n match $a {\n Lto::Fat | Lto::Thin => { $b; (); $c }\n Lto::No => { $b; () }\n }\n }\n}\n\npub enum Lto { No, Thin, Fat }\n\nfn f(mut cookie: u32, lto: Lto) -> u32 {\n let mut _a = false;\n m!(lto, _a = true, {cookie = 0});\n cookie\n}\n\nfn main() { assert_eq!(f(42, Lto::Thin), 0) }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Finished basic implementation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add library with bisect function<commit_after>use std::ops::Range;\n\npub trait RangeExt {\n fn is_empty(&self) -> bool;\n fn is_singleton(&self) -> bool;\n}\n\nimpl RangeExt for Range<i32> {\n #[inline]\n fn is_empty(&self) -> bool {\n self.start >= self.end\n }\n #[inline]\n fn is_singleton(&self) -> bool {\n self.start + 1 == self.end\n }\n}\n\n\/\/\/ Finds least item in `r` for which the `predicate` holds.\npub fn bisect<P>(mut r: Range<i32>, mut predicate: P) -> Option<i32> where\n P: FnMut(i32) -> bool\n{\n if r.is_empty() {\n return None;\n }\n\n loop {\n if r.is_empty() {\n return match predicate(r.end) {\n true => Some(r.end),\n false => None,\n }\n }\n if r.is_singleton() {\n return if predicate(r.start) {\n Some(r.start)\n } else {\n None\n }\n }\n let mid = r.start + (r.end - r.start) \/ 2;\n if predicate(mid) {\n r = Range { start: r.start, end: mid };\n } else {\n r = Range { start: mid + 1, end: r.end };\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn test_bisect() {\n assert_eq!(None, bisect(0..0, |x| x >= 0));\n assert_eq!(Some(50), bisect(0..100, |x| x >= 50 ));\n assert_eq!(None, bisect(0..100, |x| x >= 1000));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add `core` feature<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test::to_code_reserved test.<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(macro_rules)]\n\npub struct Slide<T: Iterator<A>, A> {\n iter: T,\n n: uint,\n window: Vec<A>\n}\n\nmacro_rules! return_if(\n ($cond:expr, $value:expr) => (\n if $cond {\n return $value;\n }\n );\n)\n\nimpl<A: Clone, T: Iterator<A>> Slide<T, A> {\n fn push_window(&mut self) -> bool {\n let iter_next = self.iter.next();\n\n if iter_next.is_some() {\n self.window.push(iter_next.unwrap());\n true\n } else {\n false\n }\n }\n\n fn new(iter: T, n: uint) -> Slide<T, A> {\n Slide{\n iter: iter,\n n: n,\n window: Vec::with_capacity(n + 1)\n }\n }\n}\n\nimpl<A: Clone, T: Iterator<A>> Iterator<Vec<A>> for Slide<T, A> {\n fn next(&mut self) -> Option<Vec<A>> {\n return_if!(self.n == 0, None);\n return_if!(!self.push_window(), None);\n\n loop {\n let window_status = self.window.len().cmp(&self.n);\n\n match window_status {\n Greater => { self.window.remove(0); }\n Equal => { return Some(self.window.clone()); }\n Less => { return_if!(!self.push_window(), None); }\n }\n }\n }\n}\n\npub trait SlideIterator<T: Iterator<A>, A> {\n fn slide(self, n: uint) -> Slide<T, A>;\n}\n\nimpl<A: Clone, T: Iterator<A>> SlideIterator<T, A> for T {\n fn slide(self, n: uint) -> Slide<T, A> {\n Slide::new(self, n)\n }\n}\n\n#[test]\nfn test_slide() {\n let mut slide_iter = vec![1i, 2, 3, 4, 5].into_iter().slide(3);\n assert_eq!(slide_iter.next().unwrap(), vec![1, 2, 3])\n assert_eq!(slide_iter.next().unwrap(), vec![2, 3, 4])\n assert_eq!(slide_iter.next().unwrap(), vec![3, 4, 5])\n assert!(slide_iter.next().is_none())\n}\n\n#[test]\nfn test_slide_equal_window() {\n let mut slide_iter = vec![1i, 2, 3, 4, 5].into_iter().slide(5);\n assert_eq!(slide_iter.next().unwrap(), vec![1, 2, 3, 4, 5])\n assert!(slide_iter.next().is_none())\n}\n\n#[test]\nfn test_slide_zero_window() {\n let mut slide_iter = vec![1i, 2, 3, 4, 5].into_iter().slide(0);\n assert!(slide_iter.next().is_none())\n}\n\n#[test]\nfn test_slide_overlong_window() {\n let mut slide_iter = vec![1i, 2, 3, 4, 5].into_iter().slide(7);\n assert!(slide_iter.next().is_none())\n}\n<commit_msg>Simplify `push_window`<commit_after>#![feature(macro_rules)]\n\npub struct Slide<T: Iterator<A>, A> {\n iter: T,\n n: uint,\n window: Vec<A>\n}\n\nmacro_rules! return_if(\n ($cond:expr, $value:expr) => (\n if $cond {\n return $value;\n }\n );\n)\n\nimpl<A: Clone, T: Iterator<A>> Slide<T, A> {\n fn push_window(&mut self) -> bool {\n let iter_next = self.iter.next();\n let is_some = iter_next.is_some();\n\n if is_some {\n self.window.push(iter_next.unwrap());\n }\n\n is_some\n }\n\n fn new(iter: T, n: uint) -> Slide<T, A> {\n Slide{\n iter: iter,\n n: n,\n window: Vec::with_capacity(n + 1)\n }\n }\n}\n\nimpl<A: Clone, T: Iterator<A>> Iterator<Vec<A>> for Slide<T, A> {\n fn next(&mut self) -> Option<Vec<A>> {\n return_if!(self.n == 0, None);\n return_if!(!self.push_window(), None);\n\n loop {\n let window_status = self.window.len().cmp(&self.n);\n\n match window_status {\n Greater => { self.window.remove(0); }\n Equal => { return Some(self.window.clone()); }\n Less => { return_if!(!self.push_window(), None); }\n }\n }\n }\n}\n\npub trait SlideIterator<T: Iterator<A>, A> {\n fn slide(self, n: uint) -> Slide<T, A>;\n}\n\nimpl<A: Clone, T: Iterator<A>> SlideIterator<T, A> for T {\n fn slide(self, n: uint) -> Slide<T, A> {\n Slide::new(self, n)\n }\n}\n\n#[test]\nfn test_slide() {\n let mut slide_iter = vec![1i, 2, 3, 4, 5].into_iter().slide(3);\n assert_eq!(slide_iter.next().unwrap(), vec![1, 2, 3])\n assert_eq!(slide_iter.next().unwrap(), vec![2, 3, 4])\n assert_eq!(slide_iter.next().unwrap(), vec![3, 4, 5])\n assert!(slide_iter.next().is_none())\n}\n\n#[test]\nfn test_slide_equal_window() {\n let mut slide_iter = vec![1i, 2, 3, 4, 5].into_iter().slide(5);\n assert_eq!(slide_iter.next().unwrap(), vec![1, 2, 3, 4, 5])\n assert!(slide_iter.next().is_none())\n}\n\n#[test]\nfn test_slide_zero_window() {\n let mut slide_iter = vec![1i, 2, 3, 4, 5].into_iter().slide(0);\n assert!(slide_iter.next().is_none())\n}\n\n#[test]\nfn test_slide_overlong_window() {\n let mut slide_iter = vec![1i, 2, 3, 4, 5].into_iter().slide(7);\n assert!(slide_iter.next().is_none())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove useless comparison<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adding example code.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make cargo-expand able to be vendored by cargo-vendor<commit_after>\/\/ empty\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add some examples of reading the content of a Response, and a paragraph about `Read`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for issue #17780<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(unboxed_closures, overloaded_calls)]\n\nfn set(x: &mut uint) { *x = 5; }\n\nfn main() {\n \/\/ By-ref captures\n {\n let mut x = 0u;\n let _f = |&:| x = 42;\n \/\/~^ ERROR cannot assign to data in a free\n \/\/ variable from an immutable unboxed closure\n\n let mut y = 0u;\n let _g = |&:| set(&mut y);\n \/\/~^ ERROR cannot borrow data mutably in a free\n \/\/ variable from an immutable unboxed closure\n\n let mut z = 0u;\n let _h = |&mut:| { set(&mut z); |&:| z = 42; };\n \/\/~^ ERROR cannot assign to data in a\n \/\/ free variable from an immutable unboxed closure\n }\n \/\/ By-value captures\n {\n let mut x = 0u;\n let _f = move |&:| x = 42;\n \/\/~^ ERROR cannot assign to data in a free\n \/\/ variable from an immutable unboxed closure\n\n let mut y = 0u;\n let _g = move |&:| set(&mut y);\n \/\/~^ ERROR cannot borrow data mutably in a free\n \/\/ variable from an immutable unboxed closure\n\n let mut z = 0u;\n let _h = move |&mut:| { set(&mut z); move |&:| z = 42; };\n \/\/~^ ERROR cannot assign to data in a free\n \/\/ variable from an immutable unboxed closure\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Count missing tests as failed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add compile-fail test for when the inferred and actual result types differ.<commit_after>extern crate uom;\n\nuse uom::si::f32::*;\n\nfn main() {\n let _: Velocity = Length::new(1.0, uom::si::length::meter) *\n Time::new(1.0, uom::si::time::second);\n \/\/~^^ ERROR mismatched types [E0308]\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::hir;\nuse rustc::hir::def_id::{DefId, LOCAL_CRATE};\nuse rustc::mir::*;\nuse rustc::mir::transform::{MirSuite, MirPassIndex, MirSource};\nuse rustc::ty::TyCtxt;\nuse rustc::ty::item_path;\nuse rustc_data_structures::fx::FxHashMap;\nuse rustc_data_structures::indexed_vec::{Idx};\nuse std::fmt::Display;\nuse std::fs;\nuse std::io::{self, Write};\nuse std::path::{PathBuf, Path};\n\nconst INDENT: &'static str = \" \";\n\/\/\/ Alignment for lining up comments following MIR statements\nconst ALIGN: usize = 40;\n\n\/\/\/ If the session is properly configured, dumps a human-readable\n\/\/\/ representation of the mir into:\n\/\/\/\n\/\/\/ ```text\n\/\/\/ rustc.node<node_id>.<pass_num>.<pass_name>.<disambiguator>\n\/\/\/ ```\n\/\/\/\n\/\/\/ Output from this function is controlled by passing `-Z dump-mir=<filter>`,\n\/\/\/ where `<filter>` takes the following forms:\n\/\/\/\n\/\/\/ - `all` -- dump MIR for all fns, all passes, all everything\n\/\/\/ - `substring1&substring2,...` -- `&`-separated list of substrings\n\/\/\/ that can appear in the pass-name or the `item_path_str` for the given\n\/\/\/ node-id. If any one of the substrings match, the data is dumped out.\npub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_num: Option<(MirSuite, MirPassIndex)>,\n pass_name: &str,\n disambiguator: &Display,\n source: MirSource,\n mir: &Mir<'tcx>) {\n if !dump_enabled(tcx, pass_name, source) {\n return;\n }\n\n let node_path = item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 below\n tcx.item_path_str(tcx.hir.local_def_id(source.item_id()))\n });\n dump_matched_mir_node(tcx, pass_num, pass_name, &node_path,\n disambiguator, source, mir);\n for (index, promoted_mir) in mir.promoted.iter_enumerated() {\n let promoted_source = MirSource::Promoted(source.item_id(), index);\n dump_matched_mir_node(tcx, pass_num, pass_name, &node_path, disambiguator,\n promoted_source, promoted_mir);\n }\n}\n\npub fn dump_enabled<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n source: MirSource)\n -> bool {\n let filters = match tcx.sess.opts.debugging_opts.dump_mir {\n None => return false,\n Some(ref filters) => filters,\n };\n let node_id = source.item_id();\n let node_path = item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 below\n tcx.item_path_str(tcx.hir.local_def_id(node_id))\n });\n filters.split(\"&\")\n .any(|filter| {\n filter == \"all\" ||\n pass_name.contains(filter) ||\n node_path.contains(filter)\n })\n}\n\n\/\/ #41697 -- we use `with_forced_impl_filename_line()` because\n\/\/ `item_path_str()` would otherwise trigger `type_of`, and this can\n\/\/ run while we are already attempting to evaluate `type_of`.\n\nfn dump_matched_mir_node<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_num: Option<(MirSuite, MirPassIndex)>,\n pass_name: &str,\n node_path: &str,\n disambiguator: &Display,\n source: MirSource,\n mir: &Mir<'tcx>) {\n let promotion_id = match source {\n MirSource::Promoted(_, id) => format!(\"-{:?}\", id),\n MirSource::GeneratorDrop(_) => format!(\"-drop\"),\n _ => String::new()\n };\n\n let pass_num = if tcx.sess.opts.debugging_opts.dump_mir_exclude_pass_number {\n format!(\"\")\n } else {\n match pass_num {\n None => format!(\".-------\"),\n Some((suite, pass_num)) => format!(\".{:03}-{:03}\", suite.0, pass_num.0),\n }\n };\n\n let mut file_path = PathBuf::new();\n if let Some(ref file_dir) = tcx.sess.opts.debugging_opts.dump_mir_dir {\n let p = Path::new(file_dir);\n file_path.push(p);\n };\n let _ = fs::create_dir_all(&file_path);\n let file_name = format!(\"rustc.node{}{}{}.{}.{}.mir\",\n source.item_id(), promotion_id, pass_num, pass_name, disambiguator);\n file_path.push(&file_name);\n let _ = fs::File::create(&file_path).and_then(|mut file| {\n writeln!(file, \"\/\/ MIR for `{}`\", node_path)?;\n writeln!(file, \"\/\/ source = {:?}\", source)?;\n writeln!(file, \"\/\/ pass_name = {}\", pass_name)?;\n writeln!(file, \"\/\/ disambiguator = {}\", disambiguator)?;\n if let Some(ref layout) = mir.generator_layout {\n writeln!(file, \"\/\/ generator_layout = {:?}\", layout)?;\n }\n writeln!(file, \"\")?;\n write_mir_fn(tcx, source, mir, &mut file)?;\n Ok(())\n });\n}\n\n\/\/\/ Write out a human-readable textual representation for the given MIR.\npub fn write_mir_pretty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n single: Option<DefId>,\n w: &mut Write)\n -> io::Result<()>\n{\n writeln!(w, \"\/\/ WARNING: This output format is intended for human consumers only\")?;\n writeln!(w, \"\/\/ and is subject to change without notice. Knock yourself out.\")?;\n\n let mut first = true;\n for def_id in dump_mir_def_ids(tcx, single) {\n let mir = &tcx.optimized_mir(def_id);\n\n if first {\n first = false;\n } else {\n \/\/ Put empty lines between all items\n writeln!(w, \"\")?;\n }\n\n let id = tcx.hir.as_local_node_id(def_id).unwrap();\n let src = MirSource::from_node(tcx, id);\n write_mir_fn(tcx, src, mir, w)?;\n\n for (i, mir) in mir.promoted.iter_enumerated() {\n writeln!(w, \"\")?;\n write_mir_fn(tcx, MirSource::Promoted(id, i), mir, w)?;\n }\n }\n Ok(())\n}\n\npub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir<'tcx>,\n w: &mut Write)\n -> io::Result<()> {\n write_mir_intro(tcx, src, mir, w)?;\n for block in mir.basic_blocks().indices() {\n write_basic_block(tcx, block, mir, w)?;\n if block.index() + 1 != mir.basic_blocks().len() {\n writeln!(w, \"\")?;\n }\n }\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation for the given basic block.\npub fn write_basic_block(tcx: TyCtxt,\n block: BasicBlock,\n mir: &Mir,\n w: &mut Write)\n -> io::Result<()> {\n let data = &mir[block];\n\n \/\/ Basic block label at the top.\n writeln!(w, \"{}{:?}: {{\", INDENT, block)?;\n\n \/\/ List of statements in the middle.\n let mut current_location = Location { block: block, statement_index: 0 };\n for statement in &data.statements {\n let indented_mir = format!(\"{0}{0}{1:?};\", INDENT, statement);\n writeln!(w, \"{0:1$} \/\/ {2}\",\n indented_mir,\n ALIGN,\n comment(tcx, statement.source_info))?;\n\n current_location.statement_index += 1;\n }\n\n \/\/ Terminator at the bottom.\n let indented_terminator = format!(\"{0}{0}{1:?};\", INDENT, data.terminator().kind);\n writeln!(w, \"{0:1$} \/\/ {2}\",\n indented_terminator,\n ALIGN,\n comment(tcx, data.terminator().source_info))?;\n\n writeln!(w, \"{}}}\", INDENT)\n}\n\nfn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String {\n format!(\"scope {} at {}\", scope.index(), tcx.sess.codemap().span_to_string(span))\n}\n\n\/\/\/ Prints user-defined variables in a scope tree.\n\/\/\/\n\/\/\/ Returns the total number of variables printed.\nfn write_scope_tree(tcx: TyCtxt,\n mir: &Mir,\n scope_tree: &FxHashMap<VisibilityScope, Vec<VisibilityScope>>,\n w: &mut Write,\n parent: VisibilityScope,\n depth: usize)\n -> io::Result<()> {\n let indent = depth * INDENT.len();\n\n let children = match scope_tree.get(&parent) {\n Some(childs) => childs,\n None => return Ok(()),\n };\n\n for &child in children {\n let data = &mir.visibility_scopes[child];\n assert_eq!(data.parent_scope, Some(parent));\n writeln!(w, \"{0:1$}scope {2} {{\", \"\", indent, child.index())?;\n\n \/\/ User variable types (including the user's name in a comment).\n for local in mir.vars_iter() {\n let var = &mir.local_decls[local];\n let (name, source_info) = if var.source_info.scope == child {\n (var.name.unwrap(), var.source_info)\n } else {\n \/\/ Not a variable or not declared in this scope.\n continue;\n };\n\n let mut_str = if var.mutability == Mutability::Mut {\n \"mut \"\n } else {\n \"\"\n };\n\n let indent = indent + INDENT.len();\n let indented_var = format!(\"{0:1$}let {2}{3:?}: {4};\",\n INDENT,\n indent,\n mut_str,\n local,\n var.ty);\n writeln!(w, \"{0:1$} \/\/ \\\"{2}\\\" in {3}\",\n indented_var,\n ALIGN,\n name,\n comment(tcx, source_info))?;\n }\n\n write_scope_tree(tcx, mir, scope_tree, w, child, depth + 1)?;\n\n writeln!(w, \"{0:1$}}}\", \"\", depth * INDENT.len())?;\n }\n\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation of the MIR's `fn` type and the types of its\n\/\/\/ local variables (both user-defined bindings and compiler temporaries).\npub fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir,\n w: &mut Write)\n -> io::Result<()> {\n write_mir_sig(tcx, src, mir, w)?;\n writeln!(w, \" {{\")?;\n\n \/\/ construct a scope tree and write it out\n let mut scope_tree: FxHashMap<VisibilityScope, Vec<VisibilityScope>> = FxHashMap();\n for (index, scope_data) in mir.visibility_scopes.iter().enumerate() {\n if let Some(parent) = scope_data.parent_scope {\n scope_tree.entry(parent)\n .or_insert(vec![])\n .push(VisibilityScope::new(index));\n } else {\n \/\/ Only the argument scope has no parent, because it's the root.\n assert_eq!(index, ARGUMENT_VISIBILITY_SCOPE.index());\n }\n }\n\n \/\/ Print return pointer\n let indented_retptr = format!(\"{}let mut {:?}: {};\",\n INDENT,\n RETURN_POINTER,\n mir.return_ty);\n writeln!(w, \"{0:1$} \/\/ return pointer\",\n indented_retptr,\n ALIGN)?;\n\n write_scope_tree(tcx, mir, &scope_tree, w, ARGUMENT_VISIBILITY_SCOPE, 1)?;\n\n write_temp_decls(mir, w)?;\n\n \/\/ Add an empty line before the first block is printed.\n writeln!(w, \"\")?;\n\n Ok(())\n}\n\nfn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)\n -> io::Result<()>\n{\n match src {\n MirSource::Fn(_) => write!(w, \"fn\")?,\n MirSource::Const(_) => write!(w, \"const\")?,\n MirSource::Static(_, hir::MutImmutable) => write!(w, \"static\")?,\n MirSource::Static(_, hir::MutMutable) => write!(w, \"static mut\")?,\n MirSource::Promoted(_, i) => write!(w, \"{:?} in\", i)?,\n MirSource::GeneratorDrop(_) => write!(w, \"drop_glue\")?,\n }\n\n item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 elsewhere\n write!(w, \" {}\", tcx.node_path_str(src.item_id()))\n })?;\n\n match src {\n MirSource::Fn(_) | MirSource::GeneratorDrop(_) => {\n write!(w, \"(\")?;\n\n \/\/ fn argument types.\n for (i, arg) in mir.args_iter().enumerate() {\n if i != 0 {\n write!(w, \", \")?;\n }\n write!(w, \"{:?}: {}\", Lvalue::Local(arg), mir.local_decls[arg].ty)?;\n }\n\n write!(w, \") -> {}\", mir.return_ty)\n }\n MirSource::Const(..) |\n MirSource::Static(..) |\n MirSource::Promoted(..) => {\n assert_eq!(mir.arg_count, 0);\n write!(w, \": {} =\", mir.return_ty)\n }\n }\n}\n\nfn write_temp_decls(mir: &Mir, w: &mut Write) -> io::Result<()> {\n \/\/ Compiler-introduced temporary types.\n for temp in mir.temps_iter() {\n writeln!(w, \"{}let mut {:?}: {};\", INDENT, temp, mir.local_decls[temp].ty)?;\n }\n\n Ok(())\n}\n\npub fn dump_mir_def_ids(tcx: TyCtxt, single: Option<DefId>) -> Vec<DefId> {\n if let Some(i) = single {\n vec![i]\n } else {\n tcx.mir_keys(LOCAL_CRATE).iter().cloned().collect()\n }\n}\n<commit_msg>Mir pretty print: Add cleanup comment<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::hir;\nuse rustc::hir::def_id::{DefId, LOCAL_CRATE};\nuse rustc::mir::*;\nuse rustc::mir::transform::{MirSuite, MirPassIndex, MirSource};\nuse rustc::ty::TyCtxt;\nuse rustc::ty::item_path;\nuse rustc_data_structures::fx::FxHashMap;\nuse rustc_data_structures::indexed_vec::{Idx};\nuse std::fmt::Display;\nuse std::fs;\nuse std::io::{self, Write};\nuse std::path::{PathBuf, Path};\n\nconst INDENT: &'static str = \" \";\n\/\/\/ Alignment for lining up comments following MIR statements\nconst ALIGN: usize = 40;\n\n\/\/\/ If the session is properly configured, dumps a human-readable\n\/\/\/ representation of the mir into:\n\/\/\/\n\/\/\/ ```text\n\/\/\/ rustc.node<node_id>.<pass_num>.<pass_name>.<disambiguator>\n\/\/\/ ```\n\/\/\/\n\/\/\/ Output from this function is controlled by passing `-Z dump-mir=<filter>`,\n\/\/\/ where `<filter>` takes the following forms:\n\/\/\/\n\/\/\/ - `all` -- dump MIR for all fns, all passes, all everything\n\/\/\/ - `substring1&substring2,...` -- `&`-separated list of substrings\n\/\/\/ that can appear in the pass-name or the `item_path_str` for the given\n\/\/\/ node-id. If any one of the substrings match, the data is dumped out.\npub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_num: Option<(MirSuite, MirPassIndex)>,\n pass_name: &str,\n disambiguator: &Display,\n source: MirSource,\n mir: &Mir<'tcx>) {\n if !dump_enabled(tcx, pass_name, source) {\n return;\n }\n\n let node_path = item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 below\n tcx.item_path_str(tcx.hir.local_def_id(source.item_id()))\n });\n dump_matched_mir_node(tcx, pass_num, pass_name, &node_path,\n disambiguator, source, mir);\n for (index, promoted_mir) in mir.promoted.iter_enumerated() {\n let promoted_source = MirSource::Promoted(source.item_id(), index);\n dump_matched_mir_node(tcx, pass_num, pass_name, &node_path, disambiguator,\n promoted_source, promoted_mir);\n }\n}\n\npub fn dump_enabled<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n source: MirSource)\n -> bool {\n let filters = match tcx.sess.opts.debugging_opts.dump_mir {\n None => return false,\n Some(ref filters) => filters,\n };\n let node_id = source.item_id();\n let node_path = item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 below\n tcx.item_path_str(tcx.hir.local_def_id(node_id))\n });\n filters.split(\"&\")\n .any(|filter| {\n filter == \"all\" ||\n pass_name.contains(filter) ||\n node_path.contains(filter)\n })\n}\n\n\/\/ #41697 -- we use `with_forced_impl_filename_line()` because\n\/\/ `item_path_str()` would otherwise trigger `type_of`, and this can\n\/\/ run while we are already attempting to evaluate `type_of`.\n\nfn dump_matched_mir_node<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_num: Option<(MirSuite, MirPassIndex)>,\n pass_name: &str,\n node_path: &str,\n disambiguator: &Display,\n source: MirSource,\n mir: &Mir<'tcx>) {\n let promotion_id = match source {\n MirSource::Promoted(_, id) => format!(\"-{:?}\", id),\n MirSource::GeneratorDrop(_) => format!(\"-drop\"),\n _ => String::new()\n };\n\n let pass_num = if tcx.sess.opts.debugging_opts.dump_mir_exclude_pass_number {\n format!(\"\")\n } else {\n match pass_num {\n None => format!(\".-------\"),\n Some((suite, pass_num)) => format!(\".{:03}-{:03}\", suite.0, pass_num.0),\n }\n };\n\n let mut file_path = PathBuf::new();\n if let Some(ref file_dir) = tcx.sess.opts.debugging_opts.dump_mir_dir {\n let p = Path::new(file_dir);\n file_path.push(p);\n };\n let _ = fs::create_dir_all(&file_path);\n let file_name = format!(\"rustc.node{}{}{}.{}.{}.mir\",\n source.item_id(), promotion_id, pass_num, pass_name, disambiguator);\n file_path.push(&file_name);\n let _ = fs::File::create(&file_path).and_then(|mut file| {\n writeln!(file, \"\/\/ MIR for `{}`\", node_path)?;\n writeln!(file, \"\/\/ source = {:?}\", source)?;\n writeln!(file, \"\/\/ pass_name = {}\", pass_name)?;\n writeln!(file, \"\/\/ disambiguator = {}\", disambiguator)?;\n if let Some(ref layout) = mir.generator_layout {\n writeln!(file, \"\/\/ generator_layout = {:?}\", layout)?;\n }\n writeln!(file, \"\")?;\n write_mir_fn(tcx, source, mir, &mut file)?;\n Ok(())\n });\n}\n\n\/\/\/ Write out a human-readable textual representation for the given MIR.\npub fn write_mir_pretty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n single: Option<DefId>,\n w: &mut Write)\n -> io::Result<()>\n{\n writeln!(w, \"\/\/ WARNING: This output format is intended for human consumers only\")?;\n writeln!(w, \"\/\/ and is subject to change without notice. Knock yourself out.\")?;\n\n let mut first = true;\n for def_id in dump_mir_def_ids(tcx, single) {\n let mir = &tcx.optimized_mir(def_id);\n\n if first {\n first = false;\n } else {\n \/\/ Put empty lines between all items\n writeln!(w, \"\")?;\n }\n\n let id = tcx.hir.as_local_node_id(def_id).unwrap();\n let src = MirSource::from_node(tcx, id);\n write_mir_fn(tcx, src, mir, w)?;\n\n for (i, mir) in mir.promoted.iter_enumerated() {\n writeln!(w, \"\")?;\n write_mir_fn(tcx, MirSource::Promoted(id, i), mir, w)?;\n }\n }\n Ok(())\n}\n\npub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir<'tcx>,\n w: &mut Write)\n -> io::Result<()> {\n write_mir_intro(tcx, src, mir, w)?;\n for block in mir.basic_blocks().indices() {\n write_basic_block(tcx, block, mir, w)?;\n if block.index() + 1 != mir.basic_blocks().len() {\n writeln!(w, \"\")?;\n }\n }\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation for the given basic block.\npub fn write_basic_block(tcx: TyCtxt,\n block: BasicBlock,\n mir: &Mir,\n w: &mut Write)\n -> io::Result<()> {\n let data = &mir[block];\n\n \/\/ Basic block label at the top.\n let cleanup_text = if data.is_cleanup { \" \/\/ cleanup\" } else { \"\" };\n let lbl = format!(\"{}{:?}: {{\", INDENT, block);\n writeln!(w, \"{0:1$}{2}\", lbl, ALIGN, cleanup_text)?;\n\n \/\/ List of statements in the middle.\n let mut current_location = Location { block: block, statement_index: 0 };\n for statement in &data.statements {\n let indented_mir = format!(\"{0}{0}{1:?};\", INDENT, statement);\n writeln!(w, \"{0:1$} \/\/ {2}\",\n indented_mir,\n ALIGN,\n comment(tcx, statement.source_info))?;\n\n current_location.statement_index += 1;\n }\n\n \/\/ Terminator at the bottom.\n let indented_terminator = format!(\"{0}{0}{1:?};\", INDENT, data.terminator().kind);\n writeln!(w, \"{0:1$} \/\/ {2}\",\n indented_terminator,\n ALIGN,\n comment(tcx, data.terminator().source_info))?;\n\n writeln!(w, \"{}}}\", INDENT)\n}\n\nfn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String {\n format!(\"scope {} at {}\", scope.index(), tcx.sess.codemap().span_to_string(span))\n}\n\n\/\/\/ Prints user-defined variables in a scope tree.\n\/\/\/\n\/\/\/ Returns the total number of variables printed.\nfn write_scope_tree(tcx: TyCtxt,\n mir: &Mir,\n scope_tree: &FxHashMap<VisibilityScope, Vec<VisibilityScope>>,\n w: &mut Write,\n parent: VisibilityScope,\n depth: usize)\n -> io::Result<()> {\n let indent = depth * INDENT.len();\n\n let children = match scope_tree.get(&parent) {\n Some(childs) => childs,\n None => return Ok(()),\n };\n\n for &child in children {\n let data = &mir.visibility_scopes[child];\n assert_eq!(data.parent_scope, Some(parent));\n writeln!(w, \"{0:1$}scope {2} {{\", \"\", indent, child.index())?;\n\n \/\/ User variable types (including the user's name in a comment).\n for local in mir.vars_iter() {\n let var = &mir.local_decls[local];\n let (name, source_info) = if var.source_info.scope == child {\n (var.name.unwrap(), var.source_info)\n } else {\n \/\/ Not a variable or not declared in this scope.\n continue;\n };\n\n let mut_str = if var.mutability == Mutability::Mut {\n \"mut \"\n } else {\n \"\"\n };\n\n let indent = indent + INDENT.len();\n let indented_var = format!(\"{0:1$}let {2}{3:?}: {4};\",\n INDENT,\n indent,\n mut_str,\n local,\n var.ty);\n writeln!(w, \"{0:1$} \/\/ \\\"{2}\\\" in {3}\",\n indented_var,\n ALIGN,\n name,\n comment(tcx, source_info))?;\n }\n\n write_scope_tree(tcx, mir, scope_tree, w, child, depth + 1)?;\n\n writeln!(w, \"{0:1$}}}\", \"\", depth * INDENT.len())?;\n }\n\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation of the MIR's `fn` type and the types of its\n\/\/\/ local variables (both user-defined bindings and compiler temporaries).\npub fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir,\n w: &mut Write)\n -> io::Result<()> {\n write_mir_sig(tcx, src, mir, w)?;\n writeln!(w, \" {{\")?;\n\n \/\/ construct a scope tree and write it out\n let mut scope_tree: FxHashMap<VisibilityScope, Vec<VisibilityScope>> = FxHashMap();\n for (index, scope_data) in mir.visibility_scopes.iter().enumerate() {\n if let Some(parent) = scope_data.parent_scope {\n scope_tree.entry(parent)\n .or_insert(vec![])\n .push(VisibilityScope::new(index));\n } else {\n \/\/ Only the argument scope has no parent, because it's the root.\n assert_eq!(index, ARGUMENT_VISIBILITY_SCOPE.index());\n }\n }\n\n \/\/ Print return pointer\n let indented_retptr = format!(\"{}let mut {:?}: {};\",\n INDENT,\n RETURN_POINTER,\n mir.return_ty);\n writeln!(w, \"{0:1$} \/\/ return pointer\",\n indented_retptr,\n ALIGN)?;\n\n write_scope_tree(tcx, mir, &scope_tree, w, ARGUMENT_VISIBILITY_SCOPE, 1)?;\n\n write_temp_decls(mir, w)?;\n\n \/\/ Add an empty line before the first block is printed.\n writeln!(w, \"\")?;\n\n Ok(())\n}\n\nfn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)\n -> io::Result<()>\n{\n match src {\n MirSource::Fn(_) => write!(w, \"fn\")?,\n MirSource::Const(_) => write!(w, \"const\")?,\n MirSource::Static(_, hir::MutImmutable) => write!(w, \"static\")?,\n MirSource::Static(_, hir::MutMutable) => write!(w, \"static mut\")?,\n MirSource::Promoted(_, i) => write!(w, \"{:?} in\", i)?,\n MirSource::GeneratorDrop(_) => write!(w, \"drop_glue\")?,\n }\n\n item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 elsewhere\n write!(w, \" {}\", tcx.node_path_str(src.item_id()))\n })?;\n\n match src {\n MirSource::Fn(_) | MirSource::GeneratorDrop(_) => {\n write!(w, \"(\")?;\n\n \/\/ fn argument types.\n for (i, arg) in mir.args_iter().enumerate() {\n if i != 0 {\n write!(w, \", \")?;\n }\n write!(w, \"{:?}: {}\", Lvalue::Local(arg), mir.local_decls[arg].ty)?;\n }\n\n write!(w, \") -> {}\", mir.return_ty)\n }\n MirSource::Const(..) |\n MirSource::Static(..) |\n MirSource::Promoted(..) => {\n assert_eq!(mir.arg_count, 0);\n write!(w, \": {} =\", mir.return_ty)\n }\n }\n}\n\nfn write_temp_decls(mir: &Mir, w: &mut Write) -> io::Result<()> {\n \/\/ Compiler-introduced temporary types.\n for temp in mir.temps_iter() {\n writeln!(w, \"{}let mut {:?}: {};\", INDENT, temp, mir.local_decls[temp].ty)?;\n }\n\n Ok(())\n}\n\npub fn dump_mir_def_ids(tcx: TyCtxt, single: Option<DefId>) -> Vec<DefId> {\n if let Some(i) = single {\n vec![i]\n } else {\n tcx.mir_keys(LOCAL_CRATE).iter().cloned().collect()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added notes on comments<commit_after>\/\/ A compilation of notes and lessons from https:\/\/doc.rust-lang.org\/book\/comments.html\n\n\/\/ Yes, comments requires its own file - it's a bit more complex than just \/\/\n\/\/ Not much more complex but still :)\n\n\/\/ Regular comments are like the ones I've been doing. Duh.\n\n\/\/ Another type of comment is a doc comment, which is differentiated via \/\/\/ instead of \/\/\n\/\/ These types of comments support Markdown within them and are commonly used to provide documentation\n\/\/ and usage instructions for methods\n\n\/\/ Another type of doc comment is \/\/!, which is used commonly in module or crate root files (lib.rs and mod.rs)\n\/\/ Using \/\/! vs \/\/\/ seems to just be a style\/convention thing\n\n\/\/ Interestingly - you can use the rustdoc tool (https:\/\/doc.rust-lang.org\/book\/documentation.html) to create HTML\n\/\/ documents from doc comments and run code examples (from within the comments) as tests\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added some end-user \"sanity\" tests<commit_after>\/\/\/ Tests which exercise the overall application, in particular the `MDBook`\n\/\/\/ initialization and build\/rendering process.\n\/\/\/\n\/\/\/ This will create an entire book in a temporary directory using some\n\/\/\/ dummy content.\n\nextern crate mdbook;\nextern crate tempdir;\nextern crate env_logger;\n\nuse std::path::Path;\nuse std::fs::{self, File};\nuse std::io::{Read, Write};\n\nuse tempdir::TempDir;\nuse mdbook::MDBook;\n\n\nconst SUMMARY_MD: &'static str = \"# Summary\n\n[Introduction](intro.md)\n\n- [First Chapter](.\/first\/index.md)\n - [Nested Chapter](.\/first\/nested.md)\n- [Second Chapter](.\/second.md)\n\n[Conclusion](.\/conclusion.md)\n\";\n\nconst INTRO: &'static str = \"# Introduction\n\nHere's some interesting text...\";\n\nconst FIRST: &'static str = \"# First Chapter\n\nmore text.\";\n\nconst NESTED: &'static str = r#\"# Nested Chapter\n\nThis file has some testable code.\n\n```rust\nassert!($TEST_STATUS);\n```\"#;\n\nconst SECOND: &'static str = \"# Second Chapter\";\n\nconst CONCLUSION: &'static str = \"# Conclusion\";\n\n#[test]\nfn build_the_dummy_book() {\n let temp = create_book(true);\n let mut md = MDBook::new(temp.path());\n\n md.build().unwrap();\n}\n\n#[test]\nfn mdbook_can_correctly_test_a_passing_book() {\n let temp = create_book(true);\n let mut md = MDBook::new(temp.path());\n\n assert!(md.test().is_ok());\n}\n\n#[test]\nfn mdbook_detects_book_with_failing_tests() {\n let temp = create_book(false);\n let mut md = MDBook::new(temp.path());\n\n assert!(md.test().is_err());\n}\n\n#[test]\nfn by_default_mdbook_generates_rendered_content_in_the_book_directory() {\n let temp = create_book(false);\n let mut md = MDBook::new(temp.path());\n\n assert!(!temp.path().join(\"book\").exists());\n md.build().unwrap();\n\n assert!(temp.path().join(\"book\").exists());\n assert!(temp.path().join(\"book\").join(\"index.html\").exists());\n}\n\n#[test]\nfn make_sure_bottom_level_files_contain_links_to_chapters() {\n let temp = create_book(false);\n let mut md = MDBook::new(temp.path());\n md.build().unwrap();\n\n let dest = temp.path().join(\"book\");\n let links = vec![\n \"intro.html\",\n \"first\/index.html\",\n \"first\/nested.html\",\n \"second.html\",\n \"conclusion.html\",\n ];\n\n let files_in_bottom_dir = vec![\"index.html\", \"intro.html\", \"second.html\", \"conclusion.html\"];\n\n for filename in files_in_bottom_dir {\n assert_contains_strings(dest.join(filename), &links);\n }\n}\n\n#[test]\nfn check_correct_cross_links_in_nested_dir() {\n let temp = create_book(false);\n let mut md = MDBook::new(temp.path());\n md.build().unwrap();\n\n let first = temp.path().join(\"book\").join(\"first\");\n let links = vec![\n r#\"<base href=\"..\/\">\"#,\n \"intro.html\",\n \"first\/index.html\",\n \"first\/nested.html\",\n \"second.html\",\n \"conclusion.html\",\n ];\n\n let files_in_nested_dir = vec![\"index.html\", \"nested.html\"];\n\n for filename in files_in_nested_dir {\n assert_contains_strings(first.join(filename), &links);\n }\n}\n\n\/\/\/ Create a dummy book in a temporary directory, using the contents of\n\/\/\/ `SUMMARY_MD` as a guide.\n\/\/\/\n\/\/\/ The \"Nested Chapter\" file contains a code block with a single\n\/\/\/ `assert!($TEST_STATUS)`. If you want to check MDBook's testing\n\/\/\/ functionality, `$TEST_STATUS` can be substitute for either `true` or\n\/\/\/ `false`. This is done using the `passing_test` parameter.\nfn create_book(passing_test: bool) -> TempDir {\n let temp = TempDir::new(\"dummy_book\").unwrap();\n\n let src = temp.path().join(\"src\");\n fs::create_dir_all(&src).unwrap();\n\n File::create(src.join(\"SUMMARY.md\"))\n .unwrap()\n .write_all(SUMMARY_MD.as_bytes())\n .unwrap();\n File::create(src.join(\"intro.md\"))\n .unwrap()\n .write_all(INTRO.as_bytes())\n .unwrap();\n\n let first = src.join(\"first\");\n fs::create_dir_all(&first).unwrap();\n File::create(first.join(\"index.md\"))\n .unwrap()\n .write_all(FIRST.as_bytes())\n .unwrap();\n\n let to_substitute = if passing_test { \"true\" } else { \"false\" };\n let nested_text = NESTED.replace(\"$TEST_STATUS\", to_substitute);\n File::create(first.join(\"nested.md\"))\n .unwrap()\n .write_all(nested_text.as_bytes())\n .unwrap();\n\n File::create(src.join(\"second.md\"))\n .unwrap()\n .write_all(SECOND.as_bytes())\n .unwrap();\n File::create(src.join(\"conclusion.md\"))\n .unwrap()\n .write_all(CONCLUSION.as_bytes())\n .unwrap();\n\n temp\n}\n\nfn assert_contains_strings<P: AsRef<Path>>(filename: P, strings: &[&str]) {\n println!(\"Checking {}\", filename.as_ref().display());\n println!();\n\n let mut content = String::new();\n File::open(filename)\n .expect(\"Couldn't open the provided file\")\n .read_to_string(&mut content)\n .unwrap();\n\n println!(\"{}\", content);\n println!();\n println!();\n\n for s in strings {\n println!(\"Checking for {:?}\", s);\n assert!(content.contains(s));\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #37940 - michaelwoerister:ich-struct-constructors, r=nikomatsakis<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for struct constructor expressions.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![crate_type=\"rlib\"]\n\n\nstruct RegularStruct {\n x: i32,\n y: i64,\n z: i16,\n}\n\n\/\/ Change field value (regular struct) -----------------------------------------\n#[cfg(cfail1)]\nfn change_field_value_regular_struct() -> RegularStruct {\n RegularStruct {\n x: 0,\n y: 1,\n z: 2,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nfn change_field_value_regular_struct() -> RegularStruct {\n RegularStruct {\n x: 0,\n y: 2,\n z: 2,\n }\n}\n\n\n\n\/\/ Change field order (regular struct) -----------------------------------------\n#[cfg(cfail1)]\nfn change_field_order_regular_struct() -> RegularStruct {\n RegularStruct {\n x: 3,\n y: 4,\n z: 5,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nfn change_field_order_regular_struct() -> RegularStruct {\n RegularStruct {\n y: 4,\n x: 3,\n z: 5,\n }\n}\n\n\n\n\/\/ Add field (regular struct) --------------------------------------------------\n#[cfg(cfail1)]\nfn add_field_regular_struct() -> RegularStruct {\n let struct1 = RegularStruct {\n x: 3,\n y: 4,\n z: 5,\n };\n\n RegularStruct {\n x: 7,\n .. struct1\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nfn add_field_regular_struct() -> RegularStruct {\n let struct1 = RegularStruct {\n x: 3,\n y: 4,\n z: 5,\n };\n\n RegularStruct {\n x: 7,\n y: 8,\n .. struct1\n }\n}\n\n\n\n\/\/ Change field label (regular struct) -----------------------------------------\n#[cfg(cfail1)]\nfn change_field_label_regular_struct() -> RegularStruct {\n let struct1 = RegularStruct {\n x: 3,\n y: 4,\n z: 5,\n };\n\n RegularStruct {\n x: 7,\n y: 9,\n .. struct1\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nfn change_field_label_regular_struct() -> RegularStruct {\n let struct1 = RegularStruct {\n x: 3,\n y: 4,\n z: 5,\n };\n\n RegularStruct {\n x: 7,\n z: 9,\n .. struct1\n }\n}\n\n\n\nstruct RegularStruct2 {\n x: i8,\n y: i8,\n z: i8,\n}\n\n\/\/ Change constructor path (regular struct) ------------------------------------\n#[cfg(cfail1)]\nfn change_constructor_path_regular_struct() {\n let _ = RegularStruct {\n x: 0,\n y: 1,\n z: 2,\n };\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nfn change_constructor_path_regular_struct() {\n let _ = RegularStruct2 {\n x: 0,\n y: 1,\n z: 2,\n };\n}\n\n\n\n\/\/ Change constructor path indirectly (regular struct) -------------------------\nmod change_constructor_path_indirectly_regular_struct {\n #[cfg(cfail1)]\n use super::RegularStruct as Struct;\n #[cfg(not(cfail1))]\n use super::RegularStruct2 as Struct;\n\n fn function() -> Struct {\n Struct {\n x: 0,\n y: 1,\n z: 2,\n }\n }\n}\n\n\n\nstruct TupleStruct(i32, i64, i16);\n\n\/\/ Change field value (tuple struct) -------------------------------------------\n#[cfg(cfail1)]\nfn change_field_value_tuple_struct() -> TupleStruct {\n TupleStruct(0, 1, 2)\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nfn change_field_value_tuple_struct() -> TupleStruct {\n TupleStruct(0, 1, 3)\n}\n\n\n\nstruct TupleStruct2(u16, u16, u16);\n\n\/\/ Change constructor path (tuple struct) --------------------------------------\n#[cfg(cfail1)]\nfn change_constructor_path_tuple_struct() {\n let _ = TupleStruct(0, 1, 2);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nfn change_constructor_path_tuple_struct() {\n let _ = TupleStruct2(0, 1, 2);\n}\n\n\n\n\/\/ Change constructor path indirectly (tuple struct) ---------------------------\nmod change_constructor_path_indirectly_tuple_struct {\n #[cfg(cfail1)]\n use super::TupleStruct as Struct;\n #[cfg(not(cfail1))]\n use super::TupleStruct2 as Struct;\n\n fn function() -> Struct {\n Struct(0, 1, 2)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std;\nuse rustc;\n\nimport std::fs;\nimport std::getopts;\nimport std::getopts::optopt;\nimport std::getopts::opt_present;\nimport std::getopts::opt_str;\nimport std::ioivec;\nimport std::ioivec::stdout;\nimport std::vec;\nimport std::ivec;\nimport std::str;\nimport std::uint;\nimport std::option;\n\nimport rustc::syntax::ast;\nimport rustc::syntax::fold;\nimport rustc::syntax::walk;\nimport rustc::syntax::codemap;\nimport rustc::syntax::parse::parser;\nimport rustc::syntax::print::pprust;\n\n\nfn read_whole_file(filename: &str) -> str {\n str::unsafe_from_bytes_ivec(ioivec::file_reader(filename).read_whole_stream())\n}\n\nfn write_file(filename: &str, content: &str) {\n ioivec::file_writer(filename,\n ~[ioivec::create,\n ioivec::truncate]).write_str(content);\n \/\/ Work around https:\/\/github.com\/graydon\/rust\/issues\/726\n std::run::run_program(\"chmod\", [\"644\", filename]);\n}\n\nfn file_contains(filename: &str, needle: &str) -> bool {\n let contents = read_whole_file(filename);\n ret str::find(contents, needle) != -1;\n}\n\nfn contains(haystack: &str, needle: &str) -> bool {\n str::find(haystack, needle) != -1\n}\n\nfn find_rust_files(files: &mutable str[], path: str) {\n if str::ends_with(path, \".rs\") {\n if file_contains(path, \"xfail-stage1\") {\n \/\/log_err \"Skipping \" + path + \" because it is marked as xfail-stage1\";\n } else { files += ~[path]; }\n } else if (fs::file_is_dir(path) && str::find(path, \"compile-fail\") == -1)\n {\n for p in fs::list_dir(path) { find_rust_files(files, p); }\n }\n}\n\nfn safe_to_steal(e: ast::expr_) -> bool {\n alt e {\n\n \/\/ pretty-printer precedence issues -- https:\/\/github.com\/graydon\/rust\/issues\/670\n ast::expr_unary(_, _) {\n false\n }\n ast::expr_lit(lit) {\n alt lit.node {\n ast::lit_str(_, _) { true }\n ast::lit_char(_) { true }\n ast::lit_int(_) { false }\n ast::lit_uint(_) { false }\n ast::lit_mach_int(_, _) { false }\n ast::lit_float(_) { false }\n ast::lit_mach_float(_, _) { false }\n ast::lit_nil. { true }\n ast::lit_bool(_) { true }\n }\n }\n ast::expr_cast(_, _) { false }\n ast::expr_send(_, _) { false }\n ast::expr_recv(_, _) { false }\n ast::expr_assert(_) { false }\n ast::expr_binary(_, _, _) { false }\n ast::expr_assign(_, _) { false }\n ast::expr_assign_op(_, _, _) { false }\n ast::expr_fail(option::none.) { false \/* https:\/\/github.com\/graydon\/rust\/issues\/764 *\/ }\n ast::expr_ret(option::none.) { false }\n ast::expr_put(option::none.) { false }\n\n _ {\n true\n }\n }\n}\n\nfn steal_exprs(crate: &ast::crate) -> ast::expr[] {\n let exprs: @mutable ast::expr[] = @mutable ~[];\n \/\/ \"Stash\" is not type-parameterized because of the need for safe_to_steal\n fn stash_expr(es: @mutable ast::expr[], e: &@ast::expr) {\n if safe_to_steal(e.node) {\n *es += ~[*e];\n } else {\/* now my indices are wrong :( *\/ }\n }\n let v =\n {visit_expr_pre: bind stash_expr(exprs, _)\n with walk::default_visitor()};\n walk::walk_crate(v, crate);\n *exprs\n}\n\n\/\/ https:\/\/github.com\/graydon\/rust\/issues\/652\nfn safe_to_replace(e: ast::expr_) -> bool {\n alt e {\n ast::expr_if(_, _, _) { false }\n ast::expr_block(_) { false }\n _ { true }\n }\n}\n\n\/\/ Replace the |i|th expr (in fold order) of |crate| with |newexpr|.\nfn replace_expr_in_crate(crate: &ast::crate, i: uint, newexpr: ast::expr_) ->\n ast::crate {\n let j: @mutable uint = @mutable 0u;\n fn fold_expr_rep(j_: @mutable uint, i_: uint, newexpr_: &ast::expr_,\n original: &ast::expr_, fld: fold::ast_fold) ->\n ast::expr_ {\n *j_ += 1u;\n if i_ + 1u == *j_ && safe_to_replace(original) {\n newexpr_\n } else { fold::noop_fold_expr(original, fld) }\n }\n let afp =\n {fold_expr: bind fold_expr_rep(j, i, newexpr, _, _)\n with *fold::default_ast_fold()};\n let af = fold::make_fold(afp);\n let crate2: @ast::crate = @af.fold_crate(crate);\n fold::dummy_out(af); \/\/ work around a leak (https:\/\/github.com\/graydon\/rust\/issues\/651)\n *crate2\n}\n\niter under(n: uint) -> uint {\n let i: uint = 0u;\n while i < n { put i; i += 1u; }\n}\n\nfn devnull() -> ioivec::writer { std::ioivec::string_writer().get_writer() }\n\nfn as_str(f: fn(ioivec::writer) ) -> str {\n let w = std::ioivec::string_writer();\n f(w.get_writer());\n ret w.get_str();\n}\n\nfn pp_variants(crate: &ast::crate, codemap: &codemap::codemap, filename: &str) {\n let exprs = steal_exprs(crate);\n let exprsL = ivec::len(exprs);\n if (exprsL < 100u) {\n for each i: uint in under(uint::min(exprsL, 20u)) {\n log_err \"Replacing... \" + pprust::expr_to_str(@exprs.(i));\n for each j: uint in under(uint::min(exprsL, 5u)) {\n log_err \"With... \" + pprust::expr_to_str(@exprs.(j));\n let crate2 = @replace_expr_in_crate(crate, i, exprs.(j).node);\n \/\/ It would be best to test the *crate* for stability, but testing the\n \/\/ string for stability is easier and ok for now.\n let str3 = as_str(bind pprust::print_crate(codemap, crate2, filename,\n ioivec::string_reader(\"\"), _,\n pprust::no_ann()));\n \/\/ 1u would be sane here, but the pretty-printer currently has lots of whitespace and paren issues,\n \/\/ and https:\/\/github.com\/graydon\/rust\/issues\/766 is hilarious.\n check_roundtrip_convergence(str3, 7u);\n }\n }\n }\n}\n\nfn parse_and_print(code: &str) -> str {\n let filename = \"tmp.rs\";\n let codemap = codemap::new_codemap();\n \/\/write_file(filename, code);\n let crate =\n parser::parse_crate_from_source_str(filename, code, ~[], codemap);\n ret as_str(bind pprust::print_crate(codemap, crate, filename,\n ioivec::string_reader(code), _,\n pprust::no_ann()));\n}\n\nfn content_is_dangerous_to_modify(code: &str) -> bool {\n let dangerous_patterns = [\n \"obj\", \/\/ not safe to steal; https:\/\/github.com\/graydon\/rust\/issues\/761\n \"#macro\", \/\/ not safe to steal things inside of it, because they have a special syntax\n \" be \" \/\/ don't want to replace its child with a non-call: \"Non-call expression in tail call\"\n ];\n\n for p: str in dangerous_patterns { if contains(code, p) { ret true; } }\n ret false;\n}\n\nfn content_is_confusing(code: &str) -> bool {\n let \/\/ https:\/\/github.com\/graydon\/rust\/issues\/671\n \/\/ https:\/\/github.com\/graydon\/rust\/issues\/669\n \/\/ https:\/\/github.com\/graydon\/rust\/issues\/669\n \/\/ https:\/\/github.com\/graydon\/rust\/issues\/669\n \/\/ crazy rules enforced by parser rather than typechecker?\n \/\/ more precedence issues\n \/\/ more precedence issues?\n confusing_patterns =\n [\"#macro\", \"][]\", \"][mutable]\", \"][mutable ]\", \"self\", \"spawn\",\n \"bind\",\n \"\\n\\n\\n\\n\\n\", \/\/ https:\/\/github.com\/graydon\/rust\/issues\/759\n \" : \", \/\/ https:\/\/github.com\/graydon\/rust\/issues\/760\n \"if ret\",\n \"alt ret\",\n \"if fail\",\n \"alt fail\"\n ];\n\n for p: str in confusing_patterns { if contains(code, p) { ret true; } }\n ret false;\n}\n\nfn file_is_confusing(filename: &str) -> bool {\n let\n\n \/\/ https:\/\/github.com\/graydon\/rust\/issues\/674\n\n \/\/ something to do with () as a lone pattern\n\n \/\/ an issue where -2147483648 gains an\n \/\/ extra negative sign each time through,\n \/\/ which i can't reproduce using \"rustc\n \/\/ --pretty normal\"???\n confusing_files =\n [\"block-expr-precedence.rs\", \"nil-pattern.rs\",\n \"syntax-extension-fmt.rs\",\n \"newtype.rs\" \/\/ modifying it hits something like https:\/\/github.com\/graydon\/rust\/issues\/670\n ];\n\n for f in confusing_files { if contains(filename, f) { ret true; } }\n\n ret false;\n}\n\nfn check_roundtrip_convergence(code: &str, maxIters: uint) {\n\n let i = 0u;\n let new = code;\n let old = code;\n\n while i < maxIters {\n old = new;\n if content_is_confusing(old) { ret; }\n new = parse_and_print(old);\n if old == new { break; }\n i += 1u;\n }\n\n if old == new {\n log_err #fmt(\"Converged after %u iterations\", i);\n } else {\n log_err #fmt(\"Did not converge after %u iterations!\", i);\n write_file(\"round-trip-a.rs\", old);\n write_file(\"round-trip-b.rs\", new);\n std::run::run_program(\"diff\", [\"-w\", \"-u\", \"round-trip-a.rs\", \"round-trip-b.rs\"]);\n fail \"Mismatch\";\n }\n}\n\nfn check_convergence(files: &str[]) {\n log_err #fmt(\"pp convergence tests: %u files\", ivec::len(files));\n for file in files {\n if !file_is_confusing(file) {\n let s = read_whole_file(file);\n if !content_is_confusing(s) {\n log_err #fmt(\"pp converge: %s\", file);\n \/\/ Change from 7u to 2u when https:\/\/github.com\/graydon\/rust\/issues\/759 is fixed\n check_roundtrip_convergence(s, 7u);\n }\n }\n }\n}\n\nfn check_convergence_of_variants(files: &str[]) {\n for file in files {\n if !file_is_confusing(file) {\n let s = read_whole_file(file);\n if content_is_dangerous_to_modify(s) || content_is_confusing(s) { cont; }\n log_err \"check_convergence_of_variants: \" + file;\n let codemap = codemap::new_codemap();\n let crate = parser::parse_crate_from_source_str(file, s, ~[], codemap);\n log_err as_str(bind pprust::print_crate(codemap, crate, file,\n ioivec::string_reader(s), _,\n pprust::no_ann()));\n pp_variants(*crate, codemap, file);\n }\n }\n}\n\nfn main(args: vec[str]) {\n if vec::len(args) != 2u {\n log_err #fmt(\"usage: %s <testdir>\", args.(0));\n ret;\n }\n let files = ~[];\n let root = args.(1);\n\n find_rust_files(files, root);\n check_convergence(files);\n check_convergence_of_variants(files);\n}\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ compile-command: \"make -k -C $RBUILD 2>&1 | sed -e 's\/\\\\\/x\\\\\/\/x:\\\\\/\/g'\";\n\/\/ End:\n<commit_msg>Make the fuzzer check for obvious errors in the 'rest of the compiler', not just the parser. (Disabled by default because it's slow and messy.)<commit_after>use std;\nuse rustc;\n\nimport std::fs;\nimport std::getopts;\nimport std::getopts::optopt;\nimport std::getopts::opt_present;\nimport std::getopts::opt_str;\nimport std::ioivec;\nimport std::ioivec::stdout;\nimport std::vec;\nimport std::ivec;\nimport std::str;\nimport std::uint;\nimport std::option;\n\nimport rustc::syntax::ast;\nimport rustc::syntax::fold;\nimport rustc::syntax::walk;\nimport rustc::syntax::codemap;\nimport rustc::syntax::parse::parser;\nimport rustc::syntax::print::pprust;\n\n\nfn read_whole_file(filename: &str) -> str {\n str::unsafe_from_bytes_ivec(ioivec::file_reader(filename).read_whole_stream())\n}\n\nfn write_file(filename: &str, content: &str) {\n ioivec::file_writer(filename,\n ~[ioivec::create,\n ioivec::truncate]).write_str(content);\n \/\/ Work around https:\/\/github.com\/graydon\/rust\/issues\/726\n std::run::run_program(\"chmod\", [\"644\", filename]);\n}\n\nfn file_contains(filename: &str, needle: &str) -> bool {\n let contents = read_whole_file(filename);\n ret str::find(contents, needle) != -1;\n}\n\nfn contains(haystack: &str, needle: &str) -> bool {\n str::find(haystack, needle) != -1\n}\n\nfn find_rust_files(files: &mutable str[], path: str) {\n if str::ends_with(path, \".rs\") {\n if file_contains(path, \"xfail-stage1\") {\n \/\/log_err \"Skipping \" + path + \" because it is marked as xfail-stage1\";\n } else { files += ~[path]; }\n } else if (fs::file_is_dir(path) && str::find(path, \"compile-fail\") == -1)\n {\n for p in fs::list_dir(path) { find_rust_files(files, p); }\n }\n}\n\nfn safe_to_steal(e: ast::expr_) -> bool {\n alt e {\n\n \/\/ pretty-printer precedence issues -- https:\/\/github.com\/graydon\/rust\/issues\/670\n ast::expr_unary(_, _) {\n false\n }\n ast::expr_lit(lit) {\n alt lit.node {\n ast::lit_str(_, _) { true }\n ast::lit_char(_) { true }\n ast::lit_int(_) { false }\n ast::lit_uint(_) { false }\n ast::lit_mach_int(_, _) { false }\n ast::lit_float(_) { false }\n ast::lit_mach_float(_, _) { false }\n ast::lit_nil. { true }\n ast::lit_bool(_) { true }\n }\n }\n ast::expr_cast(_, _) { false }\n ast::expr_send(_, _) { false }\n ast::expr_recv(_, _) { false }\n ast::expr_assert(_) { false }\n ast::expr_binary(_, _, _) { false }\n ast::expr_assign(_, _) { false }\n ast::expr_assign_op(_, _, _) { false }\n ast::expr_fail(option::none.) { false \/* https:\/\/github.com\/graydon\/rust\/issues\/764 *\/ }\n ast::expr_ret(option::none.) { false }\n ast::expr_put(option::none.) { false }\n\n ast::expr_ret(_) { false \/* lots of code generation issues, such as https:\/\/github.com\/graydon\/rust\/issues\/770 *\/ }\n ast::expr_fail(_) { false }\n\n _ {\n true\n }\n }\n}\n\nfn steal_exprs(crate: &ast::crate) -> ast::expr[] {\n let exprs: @mutable ast::expr[] = @mutable ~[];\n \/\/ \"Stash\" is not type-parameterized because of the need for safe_to_steal\n fn stash_expr(es: @mutable ast::expr[], e: &@ast::expr) {\n if safe_to_steal(e.node) {\n *es += ~[*e];\n } else {\/* now my indices are wrong :( *\/ }\n }\n let v =\n {visit_expr_pre: bind stash_expr(exprs, _)\n with walk::default_visitor()};\n walk::walk_crate(v, crate);\n *exprs\n}\n\n\/\/ https:\/\/github.com\/graydon\/rust\/issues\/652\nfn safe_to_replace(e: ast::expr_) -> bool {\n alt e {\n ast::expr_if(_, _, _) { false }\n ast::expr_block(_) { false }\n _ { true }\n }\n}\n\n\/\/ Replace the |i|th expr (in fold order) of |crate| with |newexpr|.\nfn replace_expr_in_crate(crate: &ast::crate, i: uint, newexpr: ast::expr_) ->\n ast::crate {\n let j: @mutable uint = @mutable 0u;\n fn fold_expr_rep(j_: @mutable uint, i_: uint, newexpr_: &ast::expr_,\n original: &ast::expr_, fld: fold::ast_fold) ->\n ast::expr_ {\n *j_ += 1u;\n if i_ + 1u == *j_ && safe_to_replace(original) {\n newexpr_\n } else { fold::noop_fold_expr(original, fld) }\n }\n let afp =\n {fold_expr: bind fold_expr_rep(j, i, newexpr, _, _)\n with *fold::default_ast_fold()};\n let af = fold::make_fold(afp);\n let crate2: @ast::crate = @af.fold_crate(crate);\n fold::dummy_out(af); \/\/ work around a leak (https:\/\/github.com\/graydon\/rust\/issues\/651)\n *crate2\n}\n\niter under(n: uint) -> uint {\n let i: uint = 0u;\n while i < n { put i; i += 1u; }\n}\n\nfn devnull() -> ioivec::writer { std::ioivec::string_writer().get_writer() }\n\nfn as_str(f: fn(ioivec::writer) ) -> str {\n let w = std::ioivec::string_writer();\n f(w.get_writer());\n ret w.get_str();\n}\n\nfn check_variants_of_ast(crate: &ast::crate, codemap: &codemap::codemap, filename: &str) {\n let exprs = steal_exprs(crate);\n let exprsL = ivec::len(exprs);\n if (exprsL < 100u) {\n for each i: uint in under(uint::min(exprsL, 20u)) {\n log_err \"Replacing... \" + pprust::expr_to_str(@exprs.(i));\n for each j: uint in under(uint::min(exprsL, 5u)) {\n log_err \"With... \" + pprust::expr_to_str(@exprs.(j));\n let crate2 = @replace_expr_in_crate(crate, i, exprs.(j).node);\n \/\/ It would be best to test the *crate* for stability, but testing the\n \/\/ string for stability is easier and ok for now.\n let str3 = as_str(bind pprust::print_crate(codemap, crate2, filename,\n ioivec::string_reader(\"\"), _,\n pprust::no_ann()));\n \/\/ 1u would be sane here, but the pretty-printer currently has lots of whitespace and paren issues,\n \/\/ and https:\/\/github.com\/graydon\/rust\/issues\/766 is hilarious.\n check_roundtrip_convergence(str3, 7u);\n \/\/check_whole_compiler(str3);\n }\n }\n }\n}\n\n\/\/ We'd find more bugs if we could take an AST here, but\n\/\/ - that would find many \"false positives\" or unimportant bugs\n\/\/ - that would be tricky, requiring use of tasks or serialization or randomness.\n\/\/ This seems to find plenty of bugs as it is :)\nfn check_whole_compiler(code: &str) {\n let filename = \"test.rs\";\n write_file(filename, code);\n let p = std::run::program_output(\"\/Users\/jruderman\/code\/rust\/build\/stage1\/rustc\", [\"-c\", filename]);\n \/\/log_err #fmt(\"Status: %d\", p.status);\n \/\/log_err \"Output: \" + p.out;\n if p.err != \"\" {\n if contains(p.err, \"argument of incompatible type\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/769\";\n } else if contains(p.err, \"Cannot create binary operator with two operands of differing type\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/770\";\n } else if contains(p.err, \"May only branch on boolean predicates!\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/770 or https:\/\/github.com\/graydon\/rust\/issues\/776\";\n } else if contains(p.err, \"Invalid constantexpr cast!\") && contains(code, \"!\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/777\";\n } else if contains(p.err, \"Both operands to ICmp instruction are not of the same type!\") && contains(code, \"!\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/777 #issuecomment-1678487\";\n } else if contains(p.err, \"Ptr must be a pointer to Val type!\") && contains(code, \"!\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/779\";\n } else if contains(p.err, \"Calling a function with bad signature!\") && (contains(code, \"iter\") || contains(code, \"range\")) {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/771 - calling an iter fails\";\n } else if contains(p.err, \"Calling a function with a bad signature!\") && contains(code, \"empty\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/775 - possibly a modification of run-pass\/import-glob-crate.rs\";\n } else if contains(p.err, \"Invalid type for pointer element!\") && contains(code, \"put\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/773 - put put ()\";\n } else if contains(p.err, \"pointer being freed was not allocated\") && contains(p.out, \"Out of stack space, sorry\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/768 + https:\/\/github.com\/graydon\/rust\/issues\/778\"\n } else {\n log_err \"Stderr: \" + p.err;\n fail \"Unfamiliar error message\";\n }\n } else if contains(p.out, \"non-exhaustive match failure\") && contains(p.out, \"alias.rs\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/772\";\n } else if contains(p.out, \"non-exhaustive match failure\") && contains(p.out, \"trans.rs\") && contains(code, \"put\") {\n log_err \"https:\/\/github.com\/graydon\/rust\/issues\/774\";\n } else if contains(p.out, \"Out of stack space, sorry\") {\n log_err \"Possibly a variant of https:\/\/github.com\/graydon\/rust\/issues\/768\";\n } else if p.status == 256 {\n if !contains(p.out, \"error:\") {\n fail \"Exited with status 256 without a span-error\";\n }\n } else if p.status == 11 {\n log_err \"What is this I don't even\";\n } else if p.status != 0 {\n fail \"Unfamiliar status code\";\n }\n}\n\nfn parse_and_print(code: &str) -> str {\n let filename = \"tmp.rs\";\n let codemap = codemap::new_codemap();\n \/\/write_file(filename, code);\n let crate =\n parser::parse_crate_from_source_str(filename, code, ~[], codemap);\n ret as_str(bind pprust::print_crate(codemap, crate, filename,\n ioivec::string_reader(code), _,\n pprust::no_ann()));\n}\n\nfn content_is_dangerous_to_modify(code: &str) -> bool {\n let dangerous_patterns = [\n \"obj\", \/\/ not safe to steal; https:\/\/github.com\/graydon\/rust\/issues\/761\n \"#macro\", \/\/ not safe to steal things inside of it, because they have a special syntax\n \"#\", \/\/ strange representation of the arguments to #fmt, for example\n \" be \", \/\/ don't want to replace its child with a non-call: \"Non-call expression in tail call\"\n \"@\" \/\/ hangs when compiling: https:\/\/github.com\/graydon\/rust\/issues\/768\n ];\n\n for p: str in dangerous_patterns { if contains(code, p) { ret true; } }\n ret false;\n}\n\nfn content_is_confusing(code: &str) -> bool {\n let \/\/ https:\/\/github.com\/graydon\/rust\/issues\/671\n \/\/ https:\/\/github.com\/graydon\/rust\/issues\/669\n \/\/ https:\/\/github.com\/graydon\/rust\/issues\/669\n \/\/ https:\/\/github.com\/graydon\/rust\/issues\/669\n \/\/ crazy rules enforced by parser rather than typechecker?\n \/\/ more precedence issues\n \/\/ more precedence issues?\n confusing_patterns =\n [\"#macro\", \"][]\", \"][mutable]\", \"][mutable ]\", \"self\", \"spawn\",\n \"bind\",\n \"\\n\\n\\n\\n\\n\", \/\/ https:\/\/github.com\/graydon\/rust\/issues\/759\n \" : \", \/\/ https:\/\/github.com\/graydon\/rust\/issues\/760\n \"if ret\",\n \"alt ret\",\n \"if fail\",\n \"alt fail\"\n ];\n\n for p: str in confusing_patterns { if contains(code, p) { ret true; } }\n ret false;\n}\n\nfn file_is_confusing(filename: &str) -> bool {\n let\n\n \/\/ https:\/\/github.com\/graydon\/rust\/issues\/674\n\n \/\/ something to do with () as a lone pattern\n\n \/\/ an issue where -2147483648 gains an\n \/\/ extra negative sign each time through,\n \/\/ which i can't reproduce using \"rustc\n \/\/ --pretty normal\"???\n confusing_files =\n [\"block-expr-precedence.rs\", \"nil-pattern.rs\",\n \"syntax-extension-fmt.rs\",\n \"newtype.rs\" \/\/ modifying it hits something like https:\/\/github.com\/graydon\/rust\/issues\/670\n ];\n\n for f in confusing_files { if contains(filename, f) { ret true; } }\n\n ret false;\n}\n\nfn check_roundtrip_convergence(code: &str, maxIters: uint) {\n\n let i = 0u;\n let new = code;\n let old = code;\n\n while i < maxIters {\n old = new;\n if content_is_confusing(old) { ret; }\n new = parse_and_print(old);\n if old == new { break; }\n i += 1u;\n }\n\n if old == new {\n log_err #fmt(\"Converged after %u iterations\", i);\n } else {\n log_err #fmt(\"Did not converge after %u iterations!\", i);\n write_file(\"round-trip-a.rs\", old);\n write_file(\"round-trip-b.rs\", new);\n std::run::run_program(\"diff\", [\"-w\", \"-u\", \"round-trip-a.rs\", \"round-trip-b.rs\"]);\n fail \"Mismatch\";\n }\n}\n\nfn check_convergence(files: &str[]) {\n log_err #fmt(\"pp convergence tests: %u files\", ivec::len(files));\n for file in files {\n if !file_is_confusing(file) {\n let s = read_whole_file(file);\n if !content_is_confusing(s) {\n log_err #fmt(\"pp converge: %s\", file);\n \/\/ Change from 7u to 2u when https:\/\/github.com\/graydon\/rust\/issues\/759 is fixed\n check_roundtrip_convergence(s, 7u);\n }\n }\n }\n}\n\nfn check_variants(files: &str[]) {\n for file in files {\n if !file_is_confusing(file) {\n let s = read_whole_file(file);\n if content_is_dangerous_to_modify(s) || content_is_confusing(s) { cont; }\n log_err \"check_variants: \" + file;\n let codemap = codemap::new_codemap();\n let crate = parser::parse_crate_from_source_str(file, s, ~[], codemap);\n log_err as_str(bind pprust::print_crate(codemap, crate, file,\n ioivec::string_reader(s), _,\n pprust::no_ann()));\n check_variants_of_ast(*crate, codemap, file);\n }\n }\n}\n\nfn main(args: vec[str]) {\n if vec::len(args) != 2u {\n log_err #fmt(\"usage: %s <testdir>\", args.(0));\n ret;\n }\n let files = ~[];\n let root = args.(1);\n\n find_rust_files(files, root);\n check_convergence(files);\n check_variants(files);\n log_err \"Fuzzer done\";\n}\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ compile-command: \"make -k -C $RBUILD 2>&1 | sed -e 's\/\\\\\/x\\\\\/\/x:\\\\\/\/g'\";\n\/\/ End:\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a basic base64 module<commit_after>import io::{reader, reader_util};\n\niface to_base64 {\n fn to_base64() -> str;\n}\n\nimpl of to_base64 for ~[u8] {\n fn to_base64() -> str {\n let chars = str::chars(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+\/\"\n );\n\n let len = self.len();\n let mut s = \"\";\n str::reserve(s, ((len + 3u) \/ 4u) * 3u);\n\n let mut i = 0u;\n\n while i < len - (len % 3u) {\n let n = (self[i] as uint) << 16u |\n (self[i + 1u] as uint) << 8u |\n (self[i + 2u] as uint);\n\n \/\/ This 24-bit number gets separated into four 6-bit numbers.\n str::push_char(s, chars[(n >> 18u) & 63u]);\n str::push_char(s, chars[(n >> 12u) & 63u]);\n str::push_char(s, chars[(n >> 6u) & 63u]);\n str::push_char(s, chars[n & 63u]);\n\n i += 3u;\n }\n\n alt check len % 3u {\n 0u { }\n 1u {\n let n = (self[i] as uint) << 16u;\n str::push_char(s, chars[(n >> 18u) & 63u]);\n str::push_char(s, chars[(n >> 12u) & 63u]);\n str::push_char(s, '=');\n str::push_char(s, '=');\n }\n 2u {\n let n = (self[i] as uint) << 16u | (self[i + 1u] as uint) << 8u;\n str::push_char(s, chars[(n >> 18u) & 63u]);\n str::push_char(s, chars[(n >> 12u) & 63u]);\n str::push_char(s, chars[(n >> 6u) & 63u]);\n str::push_char(s, '=');\n }\n }\n\n s\n }\n}\n\nimpl of to_base64 for str {\n fn to_base64() -> str {\n str::bytes(self).to_base64()\n }\n}\n\niface from_base64 {\n fn from_base64() -> ~[u8];\n}\n\nimpl of from_base64 for ~[u8] {\n fn from_base64() -> ~[u8] {\n if self.len() % 4u != 0u { fail \"invalid base64 length\"; }\n\n let len = self.len();\n let mut padding = 0u;\n\n if len != 0u {\n if self[len - 1u] == '=' as u8 { padding += 1u; }\n if self[len - 2u] == '=' as u8 { padding += 1u; }\n }\n\n let mut r = ~[];\n vec::reserve(r, (len \/ 4u) * 3u - padding);\n\n let mut i = 0u;\n while i < len {\n let mut n = 0u;\n\n for iter::repeat(4u) {\n let ch = self[i] as char;\n n <<= 6u;\n\n if ch >= 'A' && ch <= 'Z' {\n n |= (ch as uint) - 0x41u;\n } else if ch >= 'a' && ch <= 'z' {\n n |= (ch as uint) - 0x47u;\n } else if ch >= '0' && ch <= '9' {\n n |= (ch as uint) + 0x04u;\n } else if ch == '+' {\n n |= 0x3Eu;\n } else if ch == '\/' {\n n |= 0x3Fu;\n } else if ch == '=' {\n alt len - i {\n 1u {\n vec::push(r, ((n >> 16u) & 0xFFu) as u8);\n vec::push(r, ((n >> 8u ) & 0xFFu) as u8);\n ret copy r;\n }\n 2u {\n vec::push(r, ((n >> 10u) & 0xFFu) as u8);\n ret copy r;\n }\n _ {\n fail \"invalid base64 padding\";\n }\n }\n } else {\n fail \"invalid base64 character\";\n }\n\n i += 1u;\n };\n\n vec::push(r, ((n >> 16u) & 0xFFu) as u8);\n vec::push(r, ((n >> 8u ) & 0xFFu) as u8);\n vec::push(r, ((n ) & 0xFFu) as u8);\n }\n\n r\n }\n}\n\nimpl of from_base64 for str {\n fn from_base64() -> ~[u8] {\n str::bytes(self).from_base64()\n }\n}\n\n#[cfg(test)]\nmod tests {\n #[test]\n fn test_to_base64() {\n assert \"\".to_base64() == \"\";\n assert \"f\".to_base64() == \"Zg==\";\n assert \"fo\".to_base64() == \"Zm8=\";\n assert \"foo\".to_base64() == \"Zm9v\";\n assert \"foob\".to_base64() == \"Zm9vYg==\";\n assert \"fooba\".to_base64() == \"Zm9vYmE=\";\n assert \"foobar\".to_base64() == \"Zm9vYmFy\";\n }\n\n #[test]\n fn test_from_base64() {\n assert \"\".from_base64() == str::bytes(\"\");\n assert \"Zg==\".from_base64() == str::bytes(\"f\");\n assert \"Zm8=\".from_base64() == str::bytes(\"fo\");\n assert \"Zm9v\".from_base64() == str::bytes(\"foo\");\n assert \"Zm9vYg==\".from_base64() == str::bytes(\"foob\");\n assert \"Zm9vYmE=\".from_base64() == str::bytes(\"fooba\");\n assert \"Zm9vYmFy\".from_base64() == str::bytes(\"foobar\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Advent<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Do not delete empty strings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Example example :)<commit_after>extern crate rosc;\n\nfn main() {\n println!(\"hello\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore: shortcut for propagating errors: ?<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #76977 - tmiasko:issue-76740, r=wesleywiser<commit_after>\/\/ Regression test for issue #76740.\n\/\/ run-fail FIXME: change to run-pass once #76899 lands\n\/\/ compile-flags: -Zmir-opt-level=3\n\n#[derive(Copy, Clone)]\npub struct V([usize; 4]);\n\nimpl V {\n fn new() -> Self {\n V([0; 4])\n }\n\n #[inline(never)]\n fn check(mut self) {\n assert_eq!(self.0[0], 0);\n self.0[0] = 1;\n }\n}\n\nfn main() {\n let v = V::new();\n let mut i = 0;\n while i != 10 {\n \/\/ Copy propagation incorrectly assumed that Operand::Move does not\n \/\/ mutate the local, and used the same v for each V::check call,\n \/\/ rather than a copy.\n v.check();\n i += 1;\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use collections::string::ToString;\n\nuse common::event::MouseEvent;\nuse common::memory;\nuse common::time::{self, Duration};\n\nuse core::{cmp, mem, ptr, slice, str};\n\nuse graphics::display::VBEMODEINFO;\n\nuse scheduler::Context;\n\nuse super::{Packet, Pipe, Setup};\nuse super::desc::*;\n\npub trait Hci {\n fn msg(&mut self, address: u8, endpoint: u8, pipe: Pipe, msgs: &[Packet]) -> usize;\n\n fn descriptor(&mut self,\n address: u8,\n descriptor_type: u8,\n descriptor_index: u8,\n descriptor_ptr: usize,\n descriptor_len: usize) {\n self.msg(address, 0, Pipe::Control, &[\n Packet::Setup(&Setup::get_descriptor(descriptor_type, descriptor_index, 0, descriptor_len as u16)),\n Packet::In(&mut unsafe { slice::from_raw_parts_mut(descriptor_ptr as *mut u8, descriptor_len as usize) }),\n Packet::Out(&[])\n ]);\n }\n\n unsafe fn device(&mut self, address: u8) where Self: Sized + 'static {\n self.msg(0, 0, Pipe::Control, &[\n Packet::Setup(&Setup::set_address(address)),\n Packet::In(&mut [])\n ]);\n\n let mut desc_dev = box DeviceDescriptor::default();\n self.descriptor(address,\n DESC_DEV,\n 0,\n (&mut *desc_dev as *mut DeviceDescriptor) as usize,\n mem::size_of_val(&*desc_dev));\n debugln!(\"{:#?}\", *desc_dev);\n\n {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.manufacturer_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Manufacturer: {}\", desc_str.str());\n }\n\n {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.product_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Product: {}\", desc_str.str());\n }\n\n {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.serial_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Serial: {}\", desc_str.str());\n }\n\n for configuration in 0..(*desc_dev).configurations {\n let desc_cfg_len = 1023;\n let desc_cfg_buf = memory::alloc(desc_cfg_len) as *mut u8;\n for i in 0..desc_cfg_len as isize {\n ptr::write(desc_cfg_buf.offset(i), 0);\n }\n self.descriptor(address,\n DESC_CFG,\n configuration,\n desc_cfg_buf as usize,\n desc_cfg_len);\n\n let desc_cfg = ptr::read(desc_cfg_buf as *const ConfigDescriptor);\n debugln!(\"{:#?}\", desc_cfg);\n\n let mut hid = false;\n\n let mut i = desc_cfg.length as isize;\n while i < desc_cfg.total_length as isize {\n let length = ptr::read(desc_cfg_buf.offset(i));\n let descriptor_type = ptr::read(desc_cfg_buf.offset(i + 1));\n match descriptor_type {\n DESC_INT => {\n let desc_int = ptr::read(desc_cfg_buf.offset(i) as *const InterfaceDescriptor);\n debugln!(\"{:#?}\", desc_int);\n }\n DESC_END => {\n let desc_end = ptr::read(desc_cfg_buf.offset(i) as *const EndpointDescriptor);\n debugln!(\"{:#?}\", desc_end);\n\n let endpoint = desc_end.address & 0xF;\n let in_len = desc_end.max_packet_size as usize;\n\n if hid {\n let this = self as *mut Hci;\n Context::spawn(\"kuhci_hid\".to_string(), box move || {\n debugln!(\"Starting HID driver\");\n\n let in_ptr = memory::alloc(in_len) as *mut u8;\n\n loop {\n for i in 0..in_len as isize {\n ptr::write(in_ptr.offset(i), 0);\n }\n\n if (*this).msg(address, endpoint, Pipe::Isochronous, &[\n Packet::In(&mut slice::from_raw_parts_mut(in_ptr, in_len))\n ]) > 0 {\n let buttons = ptr::read(in_ptr.offset(0) as *const u8) as usize;\n let x = ptr::read(in_ptr.offset(1) as *const u16) as usize;\n let y = ptr::read(in_ptr.offset(3) as *const u16) as usize;\n\n let mode_info = &*VBEMODEINFO;\n let mouse_x = (x * mode_info.xresolution as usize) \/ 32768;\n let mouse_y = (y * mode_info.yresolution as usize) \/ 32768;\n\n let mouse_event = MouseEvent {\n x: cmp::max(0, cmp::min(mode_info.xresolution as i32 - 1, mouse_x as i32)),\n y: cmp::max(0, cmp::min(mode_info.yresolution as i32 - 1, mouse_y as i32)),\n left_button: buttons & 1 == 1,\n middle_button: buttons & 4 == 4,\n right_button: buttons & 2 == 2,\n };\n ::env().events.lock().push_back(mouse_event.to_event());\n }\n\n Duration::new(0, 10 * time::NANOS_PER_MILLI).sleep();\n }\n });\n }\n }\n DESC_HID => {\n let desc_hid = &*(desc_cfg_buf.offset(i) as *const HIDDescriptor);\n debugln!(\"{:#?}\", desc_hid);\n hid = true;\n }\n _ => {\n debugln!(\"Unknown Descriptor Length {} Type {:X}\", length as usize, descriptor_type);\n }\n }\n i += length as isize;\n }\n\n memory::unalloc(desc_cfg_buf as usize);\n }\n }\n}\n<commit_msg>Make sure string descriptors are not null<commit_after>use collections::string::ToString;\n\nuse common::event::MouseEvent;\nuse common::memory;\nuse common::time::{self, Duration};\n\nuse core::{cmp, mem, ptr, slice, str};\n\nuse graphics::display::VBEMODEINFO;\n\nuse scheduler::Context;\n\nuse super::{Packet, Pipe, Setup};\nuse super::desc::*;\n\npub trait Hci {\n fn msg(&mut self, address: u8, endpoint: u8, pipe: Pipe, msgs: &[Packet]) -> usize;\n\n fn descriptor(&mut self,\n address: u8,\n descriptor_type: u8,\n descriptor_index: u8,\n descriptor_ptr: usize,\n descriptor_len: usize) {\n self.msg(address, 0, Pipe::Control, &[\n Packet::Setup(&Setup::get_descriptor(descriptor_type, descriptor_index, 0, descriptor_len as u16)),\n Packet::In(&mut unsafe { slice::from_raw_parts_mut(descriptor_ptr as *mut u8, descriptor_len as usize) }),\n Packet::Out(&[])\n ]);\n }\n\n unsafe fn device(&mut self, address: u8) where Self: Sized + 'static {\n self.msg(0, 0, Pipe::Control, &[\n Packet::Setup(&Setup::set_address(address)),\n Packet::In(&mut [])\n ]);\n\n let mut desc_dev = box DeviceDescriptor::default();\n self.descriptor(address,\n DESC_DEV,\n 0,\n (&mut *desc_dev as *mut DeviceDescriptor) as usize,\n mem::size_of_val(&*desc_dev));\n debugln!(\"{:#?}\", *desc_dev);\n\n if desc_dev.manufacturer_string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.manufacturer_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Manufacturer: {}\", desc_str.str());\n }\n\n if desc_dev.product_string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.product_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Product: {}\", desc_str.str());\n }\n\n if desc_dev.serial_string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.serial_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Serial: {}\", desc_str.str());\n }\n\n for configuration in 0..(*desc_dev).configurations {\n let desc_cfg_len = 1023;\n let desc_cfg_buf = memory::alloc(desc_cfg_len) as *mut u8;\n for i in 0..desc_cfg_len as isize {\n ptr::write(desc_cfg_buf.offset(i), 0);\n }\n self.descriptor(address,\n DESC_CFG,\n configuration,\n desc_cfg_buf as usize,\n desc_cfg_len);\n\n let desc_cfg = ptr::read(desc_cfg_buf as *const ConfigDescriptor);\n debugln!(\"{:#?}\", desc_cfg);\n\n if desc_cfg.string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_cfg.string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Configuration: {}\", desc_str.str());\n }\n\n let mut hid = false;\n\n let mut i = desc_cfg.length as isize;\n while i < desc_cfg.total_length as isize {\n let length = ptr::read(desc_cfg_buf.offset(i));\n let descriptor_type = ptr::read(desc_cfg_buf.offset(i + 1));\n match descriptor_type {\n DESC_INT => {\n let desc_int = ptr::read(desc_cfg_buf.offset(i) as *const InterfaceDescriptor);\n debugln!(\"{:#?}\", desc_int);\n\n if desc_int.string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_int.string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Interface: {}\", desc_str.str());\n }\n }\n DESC_END => {\n let desc_end = ptr::read(desc_cfg_buf.offset(i) as *const EndpointDescriptor);\n debugln!(\"{:#?}\", desc_end);\n\n let endpoint = desc_end.address & 0xF;\n let in_len = desc_end.max_packet_size as usize;\n\n if hid {\n let this = self as *mut Hci;\n Context::spawn(\"kuhci_hid\".to_string(), box move || {\n debugln!(\"Starting HID driver\");\n\n let in_ptr = memory::alloc(in_len) as *mut u8;\n\n loop {\n for i in 0..in_len as isize {\n ptr::write(in_ptr.offset(i), 0);\n }\n\n if (*this).msg(address, endpoint, Pipe::Isochronous, &[\n Packet::In(&mut slice::from_raw_parts_mut(in_ptr, in_len))\n ]) > 0 {\n let buttons = ptr::read(in_ptr.offset(0) as *const u8) as usize;\n let x = ptr::read(in_ptr.offset(1) as *const u16) as usize;\n let y = ptr::read(in_ptr.offset(3) as *const u16) as usize;\n\n let mode_info = &*VBEMODEINFO;\n let mouse_x = (x * mode_info.xresolution as usize) \/ 32768;\n let mouse_y = (y * mode_info.yresolution as usize) \/ 32768;\n\n let mouse_event = MouseEvent {\n x: cmp::max(0, cmp::min(mode_info.xresolution as i32 - 1, mouse_x as i32)),\n y: cmp::max(0, cmp::min(mode_info.yresolution as i32 - 1, mouse_y as i32)),\n left_button: buttons & 1 == 1,\n middle_button: buttons & 4 == 4,\n right_button: buttons & 2 == 2,\n };\n ::env().events.lock().push_back(mouse_event.to_event());\n }\n\n Duration::new(0, 10 * time::NANOS_PER_MILLI).sleep();\n }\n });\n }\n }\n DESC_HID => {\n let desc_hid = &*(desc_cfg_buf.offset(i) as *const HIDDescriptor);\n debugln!(\"{:#?}\", desc_hid);\n hid = true;\n }\n _ => {\n debugln!(\"Unknown Descriptor Length {} Type {:X}\", length as usize, descriptor_type);\n }\n }\n i += length as isize;\n }\n\n memory::unalloc(desc_cfg_buf as usize);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add native_closures.rs example with GMainLoop and g_idle_add()<commit_after>#![feature(libc)]\nextern crate libc;\n\n#[repr(C)]\nstruct GMainLoop;\n\n#[link(name = \"glib-2.0\")]\nextern {\n fn g_main_loop_new(ctx: *const libc::c_void, is_running: libc::c_int) -> *mut GMainLoop;\n fn g_main_loop_run(l: *mut GMainLoop);\n fn g_main_loop_quit(l: *mut GMainLoop);\n fn g_main_loop_ref(l: *mut GMainLoop) -> *mut GMainLoop;\n fn g_main_loop_unref(l: *mut GMainLoop);\n\n fn g_idle_add_full(prio: libc::c_int, f: extern fn (*mut libc::c_void) -> libc::c_int, user_data: *mut libc::c_void, destroy: extern fn (*mut libc::c_void));\n}\n\nstruct MainLoop {\n raw: *mut GMainLoop\n}\n\nimpl MainLoop {\n fn new() -> MainLoop {\n unsafe {\n let raw = g_main_loop_new(std::ptr::null(), 0);\n return MainLoop {raw: raw};\n };\n }\n\n fn run(&self) {\n unsafe {\n g_main_loop_run(self.raw);\n }\n }\n\n fn quit(&self) {\n unsafe {\n g_main_loop_quit(self.raw);\n }\n }\n}\n\nimpl Drop for MainLoop {\n fn drop(&mut self) {\n unsafe {\n g_main_loop_unref(self.raw);\n }\n }\n}\n\nimpl Clone for MainLoop {\n fn clone(&self) -> Self {\n unsafe {\n return MainLoop{raw: g_main_loop_ref(self.raw)};\n }\n }\n}\n\nenum SourceReturn {\n SourceContinue,\n SourceRemove\n}\n\nfn idle_add<F>(f: F)\n where F: FnMut() -> SourceReturn + 'static {\n let closure = Box::new(f);\n\n unsafe {\n g_idle_add_full(200, dispatch::<F>, std::mem::transmute(closure), destroy::<F>);\n }\n\n extern fn dispatch<F>(user_data: *mut libc::c_void) -> libc::c_int\n where F: FnMut() -> SourceReturn + 'static {\n unsafe {\n let mut closure: Box<F> = std::mem::transmute(user_data);\n\n let res = match (*closure)() {\n SourceReturn::SourceRemove => 0,\n _ => 1\n };\n\n std::mem::forget(closure);\n return res;\n }\n }\n\n extern fn destroy<F>(user_data: *mut libc::c_void)\n where F: FnMut() -> SourceReturn + 'static{\n unsafe {\n let _: Box<F> = std::mem::transmute(user_data);\n }\n }\n}\n\nstruct Foo {\n b : u32\n}\n\nimpl Drop for Foo {\n fn drop(&mut self) {\n print!(\"drop!\\n\");\n }\n}\n\nfn foo(l: &mut MainLoop) {\n let l = l.clone();\n let mut x = 0;\n let y = Foo{b: 1};\n\n idle_add(move || {\n x += 1;\n print!(\"bar {} {}\\n\", x, y.b);\n\n if x >= 100 {\n l.quit();\n return SourceReturn::SourceRemove;\n }\n\n return SourceReturn::SourceContinue;\n });\n}\n\nfn main() {\n let mut l = MainLoop::new();\n\n foo(&mut l);\n\n l.run();\n\n print!(\"done\\n\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>(feat) Added Hello World example.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Enviroment data\n\nuse alloc::boxed::Box;\n\nuse core_collections::borrow::ToOwned;\n\nuse ffi::{OsString, OsStr};\nuse fs::{self, File};\nuse path::{Path, PathBuf};\nuse string::{String, ToString};\nuse sys_common::AsInner;\nuse vec::Vec;\n\nuse system::error::ENOENT;\nuse system::syscall::sys_chdir;\n\nuse io::{Error, Result, Read, Write};\n\nstatic mut _args: *mut Vec<&'static str> = 0 as *mut Vec<&'static str>;\n\npub struct Args {\n i: usize\n}\n\nimpl Iterator for Args {\n \/\/Yes, this is supposed to be String, do not change it!\n \/\/Only change it if https:\/\/doc.rust-lang.org\/std\/env\/struct.Args.html changes from String\n type Item = String;\n fn next(&mut self) -> Option<String> {\n if let Some(arg) = unsafe { (*_args).get(self.i) } {\n self.i += 1;\n Some(arg.to_string())\n } else {\n None\n }\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n let len = if self.i <= unsafe { (*_args).len() } {\n unsafe { (*_args).len() - self.i }\n } else {\n 0\n };\n (len, Some(len))\n }\n}\n\nimpl ExactSizeIterator for Args {}\n\n\/\/\/ Arguments\npub fn args() -> Args {\n Args {\n i: 0\n }\n}\n\n\/\/\/ Initialize arguments\npub unsafe fn args_init(args: Vec<&'static str>) {\n _args = Box::into_raw(box args);\n}\n\n\/\/\/ Destroy arguments\npub unsafe fn args_destroy() {\n if _args as usize > 0 {\n drop(Box::from_raw(_args));\n }\n}\n\n\/\/\/ Private function to get the path from a custom location\n\/\/\/ If the custom directory cannot be found, None will be returned\nfn get_path_from(location : &str) -> Result<PathBuf> {\n match File::open(location) {\n Ok(file) => {\n match file.path() {\n Ok(path) => Ok(path),\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/\/ Method to return the current directory\npub fn current_dir() -> Result<PathBuf> {\n \/\/ Return the current path\n get_path_from(\".\/\")\n}\n\n\/\/\/ Method to return the home directory\npub fn home_dir() -> Option<PathBuf> {\n get_path_from(\"\/home\/\").ok()\n}\n\npub fn temp_dir() -> Option<PathBuf> {\n get_path_from(\"\/tmp\/\").ok()\n}\n\n\/\/\/ Set the current directory\npub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let file_result = if path_str.is_empty() || path_str.ends_with('\/') {\n File::open(path_str)\n } else {\n let mut path_string = path_str.to_owned();\n path_string.push_str(\"\/\");\n File::open(path_string)\n };\n\n match file_result {\n Ok(file) => {\n match file.path() {\n Ok(path) => {\n if let Some(path_str) = path.to_str() {\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_chdir(path_c.as_ptr()).and(Ok(()))\n }.map_err(|x| Error::from_sys(x))\n } else {\n Err(Error::new_sys(ENOENT))\n }\n }\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\npub enum VarError {\n NotPresent,\n NotUnicode(OsString),\n}\n\n\/\/\/ Returns the environment variable `key` from the current process. If `key` is not valid Unicode\n\/\/\/ or if the variable is not present then `Err` is returned\npub fn var<K: AsRef<OsStr>>(key: K) -> ::core::result::Result<String, VarError> {\n if let Some(key_str) = key.as_ref().to_str() {\n let mut file = try!(File::open(&(\"env:\".to_owned() + key_str)).or(Err(VarError::NotPresent)));\n let mut string = String::new();\n try!(file.read_to_string(&mut string).or(Err(VarError::NotPresent)));\n Ok(string)\n } else {\n Err(VarError::NotUnicode(key.as_ref().to_owned()))\n }\n}\n\npub fn var_os<K: AsRef<OsStr>>(key: K) -> Option<OsString> {\n if let Ok(value) = var(key) {\n Some((value.as_ref() as &OsStr).to_owned())\n } else {\n None\n }\n}\n\n\/\/\/ Sets the environment variable `key` to the value `value` for the current process\npub fn set_var<K: AsRef<OsStr>, V: AsRef<OsStr>>(key: K, value: V) {\n if let (Some(key_str), Some(value_str)) = (key.as_ref().to_str(), value.as_ref().to_str()) {\n if let Ok(mut file) = File::open(&(\"env:\".to_owned() + key_str)) {\n let _ = file.write_all(value_str.as_bytes());\n }\n }\n}\n\n\/\/\/ Removes an environment variable from the environment of the current process\npub fn remove_var<K: AsRef<OsStr>>(key: K) {\n if let Some(key_str) = key.as_ref().to_str() {\n let _ = fs::remove_file(&(\"env:\".to_owned() + key_str));\n }\n}\n\npub struct Vars {\n vars: Vec<(String, String)>,\n pos: usize\n}\n\nimpl Iterator for Vars {\n type Item = (String, String);\n\n fn next(&mut self) -> Option<Self::Item> {\n let variable = self.vars.get(self.pos);\n self.pos += 1;\n variable.cloned()\n }\n}\n\n\/\/\/ Returns an iterator over the environment variables of the current process\npub fn vars() -> Vars {\n let mut variables: Vec<(String, String)> = Vec::new();\n if let Ok(mut file) = File::open(\"env:\") {\n let mut string = String::new();\n if file.read_to_string(&mut string).is_ok() {\n for line in string.lines() {\n if let Some(equal_sign) = line.chars().position(|c| c == '=') {\n let name = line.chars().take(equal_sign).collect::<String>();\n let value = line.chars().skip(equal_sign+1).collect::<String>();\n variables.push((name, value));\n }\n }\n return Vars { vars: variables, pos: 0 };\n }\n }\n Vars { vars: Vec::new(), pos: 0 }\n}\n<commit_msg>Add some documentation to std::env<commit_after>\/\/! Enviroment data\n\nuse alloc::boxed::Box;\n\nuse core_collections::borrow::ToOwned;\n\nuse ffi::{OsString, OsStr};\nuse fs::{self, File};\nuse path::{Path, PathBuf};\nuse string::{String, ToString};\nuse sys_common::AsInner;\nuse vec::Vec;\nuse error;\nuse fmt;\n\nuse system::error::ENOENT;\nuse system::syscall::sys_chdir;\n\nuse io::{Error, Result, Read, Write};\n\nstatic mut _args: *mut Vec<&'static str> = 0 as *mut Vec<&'static str>;\n\n\/\/\/ An iterator over the arguments of a process, yielding a `String` value for each argument.\npub struct Args {\n i: usize\n}\n\nimpl Iterator for Args {\n \/\/Yes, this is supposed to be String, do not change it!\n \/\/Only change it if https:\/\/doc.rust-lang.org\/std\/env\/struct.Args.html changes from String\n type Item = String;\n fn next(&mut self) -> Option<String> {\n if let Some(arg) = unsafe { (*_args).get(self.i) } {\n self.i += 1;\n Some(arg.to_string())\n } else {\n None\n }\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n let len = if self.i <= unsafe { (*_args).len() } {\n unsafe { (*_args).len() - self.i }\n } else {\n 0\n };\n (len, Some(len))\n }\n}\n\nimpl ExactSizeIterator for Args {}\n\n\/\/\/ Arguments\npub fn args() -> Args {\n Args {\n i: 0\n }\n}\n\n\/\/\/ Initialize arguments\npub unsafe fn args_init(args: Vec<&'static str>) {\n _args = Box::into_raw(box args);\n}\n\n\/\/\/ Destroy arguments\npub unsafe fn args_destroy() {\n if _args as usize > 0 {\n drop(Box::from_raw(_args));\n }\n}\n\n\/\/\/ Private function to get the path from a custom location\n\/\/\/ If the custom directory cannot be found, None will be returned\nfn get_path_from(location : &str) -> Result<PathBuf> {\n match File::open(location) {\n Ok(file) => {\n match file.path() {\n Ok(path) => Ok(path),\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/\/ Method to return the current directory\npub fn current_dir() -> Result<PathBuf> {\n \/\/ Return the current path\n get_path_from(\".\/\")\n}\n\n\/\/\/ Method to return the home directory\npub fn home_dir() -> Option<PathBuf> {\n get_path_from(\"\/home\/\").ok()\n}\n\npub fn temp_dir() -> Option<PathBuf> {\n get_path_from(\"\/tmp\/\").ok()\n}\n\n\/\/\/ Set the current directory\npub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let file_result = if path_str.is_empty() || path_str.ends_with('\/') {\n File::open(path_str)\n } else {\n let mut path_string = path_str.to_owned();\n path_string.push_str(\"\/\");\n File::open(path_string)\n };\n\n match file_result {\n Ok(file) => {\n match file.path() {\n Ok(path) => {\n if let Some(path_str) = path.to_str() {\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_chdir(path_c.as_ptr()).and(Ok(()))\n }.map_err(|x| Error::from_sys(x))\n } else {\n Err(Error::new_sys(ENOENT))\n }\n }\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/\/ Possible errors from the `env::var` method.\n#[derive(Debug)]\npub enum VarError {\n \/\/\/ The specified environment variable was not set.\n NotPresent,\n \/\/\/ The key or the value of the specified environment variable did not contain valid Unicode\n \/\/\/ data.\n NotUnicode(OsString),\n}\n\nimpl fmt::Display for VarError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n VarError::NotPresent => write!(f, \"environment variable not found\"),\n VarError::NotUnicode(ref s) => write!(f, \"environment variable was not valid unicode: {:?}\", s)\n }\n }\n}\n\nimpl error::Error for VarError {\n fn description(&self) -> &str {\n match *self {\n VarError::NotPresent => \"environment variable not found\",\n VarError::NotUnicode(_) => \"environment variable was not valid unicode\"\n }\n }\n}\n\n\/\/\/ Returns the environment variable `key` from the current process. If `key` is not valid Unicode\n\/\/\/ or if the variable is not present then `Err` is returned\npub fn var<K: AsRef<OsStr>>(key: K) -> ::core::result::Result<String, VarError> {\n if let Some(key_str) = key.as_ref().to_str() {\n let mut file = try!(File::open(&(\"env:\".to_owned() + key_str)).or(Err(VarError::NotPresent)));\n let mut string = String::new();\n try!(file.read_to_string(&mut string).or(Err(VarError::NotPresent)));\n Ok(string)\n } else {\n Err(VarError::NotUnicode(key.as_ref().to_owned()))\n }\n}\n\n\/\/\/ Fetches the environment variable `key` from the current process, returning `None` if the\n\/\/\/ variable isn't set.\npub fn var_os<K: AsRef<OsStr>>(key: K) -> Option<OsString> {\n if let Ok(value) = var(key) {\n Some((value.as_ref() as &OsStr).to_owned())\n } else {\n None\n }\n}\n\n\/\/\/ Sets the environment variable `key` to the value `value` for the current process\npub fn set_var<K: AsRef<OsStr>, V: AsRef<OsStr>>(key: K, value: V) {\n if let (Some(key_str), Some(value_str)) = (key.as_ref().to_str(), value.as_ref().to_str()) {\n if let Ok(mut file) = File::open(&(\"env:\".to_owned() + key_str)) {\n let _ = file.write_all(value_str.as_bytes());\n }\n }\n}\n\n\/\/\/ Removes an environment variable from the environment of the current process\npub fn remove_var<K: AsRef<OsStr>>(key: K) {\n if let Some(key_str) = key.as_ref().to_str() {\n let _ = fs::remove_file(&(\"env:\".to_owned() + key_str));\n }\n}\n\n\/\/\/ An iterator over the snapshot of the environment variables of this process.\n\/\/\/ This iterator is created through `std::env::vars() and yields (String, String) pairs.`\npub struct Vars {\n vars: Vec<(String, String)>,\n pos: usize\n}\n\nimpl Iterator for Vars {\n type Item = (String, String);\n\n fn next(&mut self) -> Option<Self::Item> {\n let variable = self.vars.get(self.pos);\n self.pos += 1;\n variable.cloned()\n }\n}\n\n\/\/\/ Returns an iterator over the environment variables of the current process\npub fn vars() -> Vars {\n let mut variables: Vec<(String, String)> = Vec::new();\n if let Ok(mut file) = File::open(\"env:\") {\n let mut string = String::new();\n if file.read_to_string(&mut string).is_ok() {\n for line in string.lines() {\n if let Some(equal_sign) = line.chars().position(|c| c == '=') {\n let name = line.chars().take(equal_sign).collect::<String>();\n let value = line.chars().skip(equal_sign+1).collect::<String>();\n variables.push((name, value));\n }\n }\n return Vars { vars: variables, pos: 0 };\n }\n }\n Vars { vars: Vec::new(), pos: 0 }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add line feed character<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ TODO: Refactor using a matrix for performance\n\nuse redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n if self.cur() == '\\n' || self.cur() == '\\0' {\n self.left();\n while self.cur() != '\\n' &&\n self.cur() != '\\0' &&\n self.offset >= 1 {\n self.left();\n }\n } else {\n let x = self.get_x(); \/\/- if self.cur() == '\\n' { 1 } else { 0 };\n\n while self.cur() != '\\n' {\n self.left();\n }\n self.right();\n let mut new_offset = 0;\n\n\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n for _ in 1..x {\n if self.cur() != '\\n' {\n self.right();\n } else {\n break;\n }\n }\n }\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let x = self.get_x(); \/\/- if self.cur() == '\\n' { 1 } else { 0 };\n let original_c = self.cur();\n\n while self.offset >= self.string.len() &&\n self.cur() != '\\n' &&\n self.cur() != '\\0' {\n self.right();\n }\n self.right();\n\n if original_c == '\\n' {\n while self.cur() != '\\n' &&\n self.cur() != '\\0' &&\n self.offset < self.string.len() {\n self.right();\n }\n } else {\n for _ in 1..x {\n if self.cur() != '\\n' {\n self.right();\n } else {\n break;\n }\n }\n }\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn get_x(&self) -> usize {\n let mut x = 0;\n for (n, c) in self.string.chars().enumerate() {\n if c == '\\n' {\n x = 0;\n } else {\n x += 1;\n }\n if n >= self.offset {\n break;\n }\n }\n x\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<commit_msg>Fix bugs after refactoring<commit_after>\/\/ TODO: Refactor using a matrix for performance\n\nuse redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n if self.cur() == '\\n' || self.cur() == '\\0' {\n self.left();\n while self.cur() != '\\n' &&\n self.cur() != '\\0' &&\n self.offset >= 1 {\n self.left();\n }\n } else {\n let x = self.get_x(); \/\/- if self.cur() == '\\n' { 1 } else { 0 };\n\n while self.cur() != '\\n' &&\n self.offset >= 1 {\n self.left();\n }\n self.right();\n let mut new_offset = 0;\n\n\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n for _ in 1..x {\n if self.cur() != '\\n' {\n self.right();\n } else {\n break;\n }\n }\n }\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let x = self.get_x(); \/\/- if self.cur() == '\\n' { 1 } else { 0 };\n let original_c = self.cur();\n\n while self.offset < self.string.len() &&\n self.cur() != '\\n' &&\n self.cur() != '\\0' {\n self.right();\n }\n self.right();\n\n if original_c == '\\n' {\n while self.cur() != '\\n' &&\n self.cur() != '\\0' &&\n self.offset < self.string.len() {\n self.right();\n }\n } else {\n for _ in 1..x {\n if self.cur() != '\\n' {\n self.right();\n } else {\n break;\n }\n }\n }\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn get_x(&self) -> usize {\n let mut x = 0;\n for (n, c) in self.string.chars().enumerate() {\n if c == '\\n' {\n x = 0;\n } else {\n x += 1;\n }\n if n >= self.offset {\n break;\n }\n }\n x\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Set user agent...<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Create hello-world-cl.rs<commit_after>fn main() {\n \/\/ Print text to the console\n println!(\"Hello World!\");\n println!(\"Hola mundo!\");\n println!(\"Bonjour le monde!\");\n println!(\"Hallo verden!\");\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ based on:\n\/\/ http:\/\/shootout.alioth.debian.org\/u32\/benchmark.php?test=nbody&lang=java\n\n#[abi = \"cdecl\"]\n#[nolink]\nnative mod llvm {\n fn sqrt(n: float) -> float;\n}\n\nfn main() {\n \/\/\n \/\/ Leave these commented out to\n \/\/ finish in a reasonable time\n \/\/ during 'make check' under valgrind\n \/\/ 5000000\n \/\/ 50000000\n let inputs: [int] = [50000, 500000];\n\n let bodies: [Body::props] = NBodySystem::MakeNBodySystem();\n\n\n for n: int in inputs {\n log(debug, NBodySystem::energy(bodies));\n\n let i: int = 0;\n while i < n { NBodySystem::advance(bodies, 0.01); i += 1; }\n log(debug, NBodySystem::energy(bodies));\n }\n}\n\n\/\/ Body::props is a record of floats, so\n\/\/ vec<Body::props> is a vector of records of floats\n\nmod NBodySystem {\n\n fn MakeNBodySystem() -> [Body::props] {\n \/\/ these each return a Body::props\n let bodies: [Body::props] =\n [Body::sun(), Body::jupiter(), Body::saturn(), Body::uranus(),\n Body::neptune()];\n\n let px: float = 0.0;\n let py: float = 0.0;\n let pz: float = 0.0;\n\n let i: int = 0;\n while i < 5 {\n px += bodies[i].vx * bodies[i].mass;\n py += bodies[i].vy * bodies[i].mass;\n pz += bodies[i].vz * bodies[i].mass;\n\n i += 1;\n }\n\n \/\/ side-effecting\n Body::offsetMomentum(bodies[0], px, py, pz);\n\n ret bodies;\n }\n\n fn advance(bodies: [Body::props], dt: float) {\n\n let i: int = 0;\n while i < 5 {\n let j: int = i + 1;\n while j < 5 { advance_one(bodies[i], bodies[j], dt); j += 1; }\n\n i += 1;\n }\n\n i = 0;\n while i < 5 { move(bodies[i], dt); i += 1; }\n }\n\n fn advance_one(bi: Body::props, bj: Body::props, dt: float) unsafe {\n let dx: float = bi.x - bj.x;\n let dy: float = bi.y - bj.y;\n let dz: float = bi.z - bj.z;\n\n let dSquared: float = dx * dx + dy * dy + dz * dz;\n\n let distance: float = llvm::sqrt(dSquared);\n let mag: float = dt \/ (dSquared * distance);\n\n bi.vx -= dx * bj.mass * mag;\n bi.vy -= dy * bj.mass * mag;\n bi.vz -= dz * bj.mass * mag;\n\n bj.vx += dx * bi.mass * mag;\n bj.vy += dy * bi.mass * mag;\n bj.vz += dz * bi.mass * mag;\n }\n\n fn move(b: Body::props, dt: float) {\n b.x += dt * b.vx;\n b.y += dt * b.vy;\n b.z += dt * b.vz;\n }\n\n fn energy(bodies: [Body::props]) -> float unsafe {\n let dx: float;\n let dy: float;\n let dz: float;\n let distance: float;\n let e: float = 0.0;\n\n let i: int = 0;\n while i < 5 {\n e +=\n 0.5 * bodies[i].mass *\n (bodies[i].vx * bodies[i].vx + bodies[i].vy * bodies[i].vy\n + bodies[i].vz * bodies[i].vz);\n\n let j: int = i + 1;\n while j < 5 {\n dx = bodies[i].x - bodies[j].x;\n dy = bodies[i].y - bodies[j].y;\n dz = bodies[i].z - bodies[j].z;\n\n distance = llvm::sqrt(dx * dx + dy * dy + dz * dz);\n e -= bodies[i].mass * bodies[j].mass \/ distance;\n\n j += 1;\n }\n\n i += 1;\n }\n ret e;\n\n }\n}\n\nmod Body {\n\n const PI: float = 3.141592653589793;\n const SOLAR_MASS: float = 39.478417604357432;\n \/\/ was 4 * PI * PI originally\n const DAYS_PER_YEAR: float = 365.24;\n\n type props =\n {mutable x: float,\n mutable y: float,\n mutable z: float,\n mutable vx: float,\n mutable vy: float,\n mutable vz: float,\n mass: float};\n\n fn jupiter() -> Body::props {\n ret {mutable x: 4.84143144246472090e+00,\n mutable y: -1.16032004402742839e+00,\n mutable z: -1.03622044471123109e-01,\n mutable vx: 1.66007664274403694e-03 * DAYS_PER_YEAR,\n mutable vy: 7.69901118419740425e-03 * DAYS_PER_YEAR,\n mutable vz: -6.90460016972063023e-05 * DAYS_PER_YEAR,\n mass: 9.54791938424326609e-04 * SOLAR_MASS};\n }\n\n fn saturn() -> Body::props {\n ret {mutable x: 8.34336671824457987e+00,\n mutable y: 4.12479856412430479e+00,\n mutable z: -4.03523417114321381e-01,\n mutable vx: -2.76742510726862411e-03 * DAYS_PER_YEAR,\n mutable vy: 4.99852801234917238e-03 * DAYS_PER_YEAR,\n mutable vz: 2.30417297573763929e-05 * DAYS_PER_YEAR,\n mass: 2.85885980666130812e-04 * SOLAR_MASS};\n }\n\n fn uranus() -> Body::props {\n ret {mutable x: 1.28943695621391310e+01,\n mutable y: -1.51111514016986312e+01,\n mutable z: -2.23307578892655734e-01,\n mutable vx: 2.96460137564761618e-03 * DAYS_PER_YEAR,\n mutable vy: 2.37847173959480950e-03 * DAYS_PER_YEAR,\n mutable vz: -2.96589568540237556e-05 * DAYS_PER_YEAR,\n mass: 4.36624404335156298e-05 * SOLAR_MASS};\n }\n\n fn neptune() -> Body::props {\n ret {mutable x: 1.53796971148509165e+01,\n mutable y: -2.59193146099879641e+01,\n mutable z: 1.79258772950371181e-01,\n mutable vx: 2.68067772490389322e-03 * DAYS_PER_YEAR,\n mutable vy: 1.62824170038242295e-03 * DAYS_PER_YEAR,\n mutable vz: -9.51592254519715870e-05 * DAYS_PER_YEAR,\n mass: 5.15138902046611451e-05 * SOLAR_MASS};\n }\n\n fn sun() -> Body::props {\n ret {mutable x: 0.0,\n mutable y: 0.0,\n mutable z: 0.0,\n mutable vx: 0.0,\n mutable vy: 0.0,\n mutable vz: 0.0,\n mass: SOLAR_MASS};\n }\n\n fn offsetMomentum(props: Body::props, px: float, py: float, pz: float) {\n props.vx = -px \/ SOLAR_MASS;\n props.vy = -py \/ SOLAR_MASS;\n props.vz = -pz \/ SOLAR_MASS;\n }\n\n}\n<commit_msg>bench: Update shootout-nbody for style<commit_after>\/\/ based on:\n\/\/ http:\/\/shootout.alioth.debian.org\/u32\/benchmark.php?test=nbody&lang=java\n\nuse std;\n\n\/\/ Using sqrt from the standard library is way slower than using libc\n\/\/ directly even though std just calls libc, I guess it must be\n\/\/ because the the indirection through another dynamic linker\n\/\/ stub. Kind of shocking. Might be able to make it faster still with\n\/\/ an llvm intrinsic.\n#[nolink]\nnative mod libc {\n fn sqrt(n: float) -> float;\n}\n\nfn main(args: [str]) {\n let n = if vec::len(args) == 2u {\n int::from_str(args[1])\n } else {\n 1000000\n };\n let bodies: [Body::props] = NBodySystem::MakeNBodySystem();\n std::io::println(#fmt(\"%f\", NBodySystem::energy(bodies)));\n let i: int = 0;\n while i < n { NBodySystem::advance(bodies, 0.01); i += 1; }\n std::io::println(#fmt(\"%f\", NBodySystem::energy(bodies)));\n}\n\n\/\/ Body::props is a record of floats, so\n\/\/ vec<Body::props> is a vector of records of floats\n\nmod NBodySystem {\n\n fn MakeNBodySystem() -> [Body::props] {\n \/\/ these each return a Body::props\n let bodies: [Body::props] =\n [Body::sun(), Body::jupiter(), Body::saturn(), Body::uranus(),\n Body::neptune()];\n\n let px: float = 0.0;\n let py: float = 0.0;\n let pz: float = 0.0;\n\n let i: int = 0;\n while i < 5 {\n px += bodies[i].vx * bodies[i].mass;\n py += bodies[i].vy * bodies[i].mass;\n pz += bodies[i].vz * bodies[i].mass;\n\n i += 1;\n }\n\n \/\/ side-effecting\n Body::offsetMomentum(bodies[0], px, py, pz);\n\n ret bodies;\n }\n\n fn advance(bodies: [Body::props], dt: float) {\n\n let i: int = 0;\n while i < 5 {\n let j: int = i + 1;\n while j < 5 { advance_one(bodies[i], bodies[j], dt); j += 1; }\n\n i += 1;\n }\n\n i = 0;\n while i < 5 { move(bodies[i], dt); i += 1; }\n }\n\n fn advance_one(bi: Body::props, bj: Body::props, dt: float) unsafe {\n let dx: float = bi.x - bj.x;\n let dy: float = bi.y - bj.y;\n let dz: float = bi.z - bj.z;\n\n let dSquared: float = dx * dx + dy * dy + dz * dz;\n\n let distance: float = libc::sqrt(dSquared);\n let mag: float = dt \/ (dSquared * distance);\n\n bi.vx -= dx * bj.mass * mag;\n bi.vy -= dy * bj.mass * mag;\n bi.vz -= dz * bj.mass * mag;\n\n bj.vx += dx * bi.mass * mag;\n bj.vy += dy * bi.mass * mag;\n bj.vz += dz * bi.mass * mag;\n }\n\n fn move(b: Body::props, dt: float) {\n b.x += dt * b.vx;\n b.y += dt * b.vy;\n b.z += dt * b.vz;\n }\n\n fn energy(bodies: [Body::props]) -> float unsafe {\n let dx: float;\n let dy: float;\n let dz: float;\n let distance: float;\n let e: float = 0.0;\n\n let i: int = 0;\n while i < 5 {\n e +=\n 0.5 * bodies[i].mass *\n (bodies[i].vx * bodies[i].vx + bodies[i].vy * bodies[i].vy\n + bodies[i].vz * bodies[i].vz);\n\n let j: int = i + 1;\n while j < 5 {\n dx = bodies[i].x - bodies[j].x;\n dy = bodies[i].y - bodies[j].y;\n dz = bodies[i].z - bodies[j].z;\n\n distance = libc::sqrt(dx * dx + dy * dy + dz * dz);\n e -= bodies[i].mass * bodies[j].mass \/ distance;\n\n j += 1;\n }\n\n i += 1;\n }\n ret e;\n\n }\n}\n\nmod Body {\n\n const PI: float = 3.141592653589793;\n const SOLAR_MASS: float = 39.478417604357432;\n \/\/ was 4 * PI * PI originally\n const DAYS_PER_YEAR: float = 365.24;\n\n type props =\n {mutable x: float,\n mutable y: float,\n mutable z: float,\n mutable vx: float,\n mutable vy: float,\n mutable vz: float,\n mass: float};\n\n fn jupiter() -> Body::props {\n ret {mutable x: 4.84143144246472090e+00,\n mutable y: -1.16032004402742839e+00,\n mutable z: -1.03622044471123109e-01,\n mutable vx: 1.66007664274403694e-03 * DAYS_PER_YEAR,\n mutable vy: 7.69901118419740425e-03 * DAYS_PER_YEAR,\n mutable vz: -6.90460016972063023e-05 * DAYS_PER_YEAR,\n mass: 9.54791938424326609e-04 * SOLAR_MASS};\n }\n\n fn saturn() -> Body::props {\n ret {mutable x: 8.34336671824457987e+00,\n mutable y: 4.12479856412430479e+00,\n mutable z: -4.03523417114321381e-01,\n mutable vx: -2.76742510726862411e-03 * DAYS_PER_YEAR,\n mutable vy: 4.99852801234917238e-03 * DAYS_PER_YEAR,\n mutable vz: 2.30417297573763929e-05 * DAYS_PER_YEAR,\n mass: 2.85885980666130812e-04 * SOLAR_MASS};\n }\n\n fn uranus() -> Body::props {\n ret {mutable x: 1.28943695621391310e+01,\n mutable y: -1.51111514016986312e+01,\n mutable z: -2.23307578892655734e-01,\n mutable vx: 2.96460137564761618e-03 * DAYS_PER_YEAR,\n mutable vy: 2.37847173959480950e-03 * DAYS_PER_YEAR,\n mutable vz: -2.96589568540237556e-05 * DAYS_PER_YEAR,\n mass: 4.36624404335156298e-05 * SOLAR_MASS};\n }\n\n fn neptune() -> Body::props {\n ret {mutable x: 1.53796971148509165e+01,\n mutable y: -2.59193146099879641e+01,\n mutable z: 1.79258772950371181e-01,\n mutable vx: 2.68067772490389322e-03 * DAYS_PER_YEAR,\n mutable vy: 1.62824170038242295e-03 * DAYS_PER_YEAR,\n mutable vz: -9.51592254519715870e-05 * DAYS_PER_YEAR,\n mass: 5.15138902046611451e-05 * SOLAR_MASS};\n }\n\n fn sun() -> Body::props {\n ret {mutable x: 0.0,\n mutable y: 0.0,\n mutable z: 0.0,\n mutable vx: 0.0,\n mutable vy: 0.0,\n mutable vz: 0.0,\n mass: SOLAR_MASS};\n }\n\n fn offsetMomentum(props: Body::props, px: float, py: float, pz: float) {\n props.vx = -px \/ SOLAR_MASS;\n props.vy = -py \/ SOLAR_MASS;\n props.vz = -pz \/ SOLAR_MASS;\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #18652<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Tests multiple free variables being passed by value into an unboxed\n\/\/ once closure as an optimization by trans. This used to hit an\n\/\/ incorrect assert.\n\n#![feature(unboxed_closures, overloaded_calls)]\n\nfn main() {\n let x = 2u8;\n let y = 3u8;\n assert_eq!((move |:| x + y)(), 5);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #37851 - jneem:master, r=sanxiyn<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn gimme_a_raw_pointer<T>(_: *const T) { }\n\nfn test<T>(t: T) { }\n\nfn main() {\n \/\/ Clearly `pointer` must be of type `*const ()`.\n let pointer = &() as *const _;\n gimme_a_raw_pointer(pointer);\n\n let t = test as fn (i32);\n t(0i32);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags:--test\n\/\/ rustc-env:RUSTC_BOOTSTRAP_KEY=\n\/\/ ignore-pretty : (#23623) problems when ending with \/\/ comments\n\n#![cfg(any())] \/\/ This test should be configured away\n#![feature(rustc_attrs)] \/\/ Test that this is allowed on stable\/beta\n#![feature(iter_arith_traits)] \/\/ Test that this is not unused\n#![deny(unused_features)]\n\n#[test]\nfn dummy() {\n let () = \"this should not reach type-checking\";\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/\/ As always - iOS on arm uses SjLj exceptions and\n\/\/\/ _Unwind_Backtrace is even not available there. Still,\n\/\/\/ backtraces could be extracted using a backtrace function,\n\/\/\/ which thanks god is public\n\/\/\/\n\/\/\/ As mentioned in a huge comment block in `super::super`, backtrace\n\/\/\/ doesn't play well with green threads, so while it is extremely nice and\n\/\/\/ simple to use it should be used only on iOS devices as the only viable\n\/\/\/ option.\n\nuse io;\nuse libc;\nuse mem;\nuse sys::backtrace::BacktraceContext;\nuse sys_common::backtrace::Frame;\n\n#[inline(never)] \/\/ if we know this is a function call, we can skip it when\n \/\/ tracing\npub fn unwind_backtrace(frames: &mut [Frame])\n -> io::Result<(usize, BacktraceContext)>\n{\n const FRAME_LEN: usize = 100;\n assert!(FRAME_LEN >= frames.len());\n let mut raw_frames = [::ptr::null_mut(); FRAME_LEN];\n let nb_frames = unsafe {\n backtrace(raw_frames.as_mut_ptr(), raw_frames.len() as libc::c_int)\n } as usize;\n for (from, to) in raw_frames.iter().zip(frames.iter_mut()).take(nb_frames) {\n *to = Frame {\n exact_position: *from,\n symbol_addr: *from,\n };\n }\n Ok((nb_frames as usize, BacktraceContext))\n}\n\nextern {\n fn backtrace(buf: *mut *mut libc::c_void, sz: libc::c_int) -> libc::c_int;\n}\n<commit_msg>Remove unused<commit_after>\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/\/ As always - iOS on arm uses SjLj exceptions and\n\/\/\/ _Unwind_Backtrace is even not available there. Still,\n\/\/\/ backtraces could be extracted using a backtrace function,\n\/\/\/ which thanks god is public\n\/\/\/\n\/\/\/ As mentioned in a huge comment block in `super::super`, backtrace\n\/\/\/ doesn't play well with green threads, so while it is extremely nice and\n\/\/\/ simple to use it should be used only on iOS devices as the only viable\n\/\/\/ option.\n\nuse io;\nuse libc;\nuse sys::backtrace::BacktraceContext;\nuse sys_common::backtrace::Frame;\n\n#[inline(never)] \/\/ if we know this is a function call, we can skip it when\n \/\/ tracing\npub fn unwind_backtrace(frames: &mut [Frame])\n -> io::Result<(usize, BacktraceContext)>\n{\n const FRAME_LEN: usize = 100;\n assert!(FRAME_LEN >= frames.len());\n let mut raw_frames = [::ptr::null_mut(); FRAME_LEN];\n let nb_frames = unsafe {\n backtrace(raw_frames.as_mut_ptr(), raw_frames.len() as libc::c_int)\n } as usize;\n for (from, to) in raw_frames.iter().zip(frames.iter_mut()).take(nb_frames) {\n *to = Frame {\n exact_position: *from,\n symbol_addr: *from,\n };\n }\n Ok((nb_frames as usize, BacktraceContext))\n}\n\nextern {\n fn backtrace(buf: *mut *mut libc::c_void, sz: libc::c_int) -> libc::c_int;\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(slice_concat_ext, const_fn)]\n\nextern crate compiletest_rs as compiletest;\n\nuse std::slice::SliceConcatExt;\nuse std::path::{PathBuf, Path};\nuse std::io::Write;\n\nmacro_rules! eprintln {\n ($($arg:tt)*) => {\n let stderr = std::io::stderr();\n writeln!(stderr.lock(), $($arg)*).unwrap();\n }\n}\n\nconst fn miri_path() -> &'static str {\n concat!(\"target\/\", env!(\"PROFILE\"), \"\/miri\")\n}\n\nfn compile_fail(sysroot: &Path, path: &str, target: &str, host: &str, fullmir: bool) {\n eprintln!(\"## Running compile-fail tests in {} against miri for target {}\", path, target);\n let mut config = compiletest::default_config();\n config.mode = \"compile-fail\".parse().expect(\"Invalid mode\");\n config.rustc_path = miri_path().into();\n if fullmir {\n if host != target {\n \/\/ skip fullmir on nonhost\n return;\n }\n let sysroot = Path::new(&std::env::var(\"HOME\").unwrap()).join(\".xargo\").join(\"HOST\");\n config.target_rustcflags = Some(format!(\"--sysroot {}\", sysroot.to_str().unwrap()));\n config.src_base = PathBuf::from(path.to_string());\n } else {\n config.target_rustcflags = Some(format!(\"--sysroot {}\", sysroot.to_str().unwrap()));\n config.src_base = PathBuf::from(path.to_string());\n }\n config.target = target.to_owned();\n compiletest::run_tests(&config);\n}\n\nfn run_pass(path: &str) {\n eprintln!(\"## Running run-pass tests in {} against rustc\", path);\n let mut config = compiletest::default_config();\n config.mode = \"run-pass\".parse().expect(\"Invalid mode\");\n config.src_base = PathBuf::from(path);\n config.target_rustcflags = Some(\"-Dwarnings\".to_string());\n config.host_rustcflags = Some(\"-Dwarnings\".to_string());\n compiletest::run_tests(&config);\n}\n\nfn miri_pass(path: &str, target: &str, host: &str, fullmir: bool, opt: bool) {\n let opt_str = if opt {\n \" with optimizations\"\n } else {\n \"\"\n };\n eprintln!(\"## Running run-pass tests in {} against miri for target {}{}\", path, target, opt_str);\n let mut config = compiletest::default_config();\n config.mode = \"mir-opt\".parse().expect(\"Invalid mode\");\n config.src_base = PathBuf::from(path);\n config.target = target.to_owned();\n config.host = host.to_owned();\n config.rustc_path = miri_path().into();\n let mut flags = Vec::new();\n if fullmir {\n if host != target {\n \/\/ skip fullmir on nonhost\n return;\n }\n let sysroot = Path::new(&std::env::var(\"HOME\").unwrap()).join(\".xargo\").join(\"HOST\");\n flags.push(format!(\"--sysroot {}\", sysroot.to_str().unwrap()));\n }\n if opt {\n flags.push(\"-Zmir-opt-level=3\".to_owned());\n } else {\n flags.push(\"-Zmir-opt-level=0\".to_owned());\n }\n config.target_rustcflags = Some(flags.join(\" \"));\n \/\/ don't actually execute the final binary, it might be for other targets and we only care\n \/\/ about running miri, not the binary.\n config.runtool = Some(\"echo \\\"\\\" || \".to_owned());\n if target == host {\n std::env::set_var(\"MIRI_HOST_TARGET\", \"yes\");\n }\n compiletest::run_tests(&config);\n std::env::set_var(\"MIRI_HOST_TARGET\", \"\");\n}\n\nfn is_target_dir<P: Into<PathBuf>>(path: P) -> bool {\n let mut path = path.into();\n path.push(\"lib\");\n path.metadata().map(|m| m.is_dir()).unwrap_or(false)\n}\n\nfn for_all_targets<F: FnMut(String)>(sysroot: &Path, mut f: F) {\n let target_dir = sysroot.join(\"lib\").join(\"rustlib\");\n for entry in std::fs::read_dir(target_dir).expect(\"invalid sysroot\") {\n let entry = entry.unwrap();\n if !is_target_dir(entry.path()) { continue; }\n let target = entry.file_name().into_string().unwrap();\n f(target);\n }\n}\n\nfn get_sysroot() -> PathBuf {\n let sysroot = std::env::var(\"MIRI_SYSROOT\").unwrap_or_else(|_| {\n let sysroot = std::process::Command::new(\"rustc\")\n .arg(\"--print\")\n .arg(\"sysroot\")\n .output()\n .expect(\"rustc not found\")\n .stdout;\n String::from_utf8(sysroot).expect(\"sysroot is not utf8\")\n });\n PathBuf::from(sysroot.trim())\n}\n\nfn get_host() -> String {\n let host = std::process::Command::new(\"rustc\")\n .arg(\"-vV\")\n .output()\n .expect(\"rustc not found for -vV\")\n .stdout;\n let host = std::str::from_utf8(&host).expect(\"sysroot is not utf8\");\n let host = host.split(\"\\nhost: \").nth(1).expect(\"no host: part in rustc -vV\");\n let host = host.split('\\n').next().expect(\"no \\n after host\");\n String::from(host)\n}\n\n#[test]\nfn run_pass_miri() {\n let sysroot = get_sysroot();\n let host = get_host();\n\n for &opt in [false, true].iter() {\n for_all_targets(&sysroot, |target| {\n miri_pass(\"tests\/run-pass\", &target, &host, false, opt);\n });\n miri_pass(\"tests\/run-pass-fullmir\", &host, &host, true, opt);\n }\n}\n\n#[test]\nfn run_pass_rustc() {\n run_pass(\"tests\/run-pass\");\n run_pass(\"tests\/run-pass-fullmir\");\n}\n\n#[test]\nfn compile_fail_miri() {\n let sysroot = get_sysroot();\n let host = get_host();\n\n for_all_targets(&sysroot, |target| {\n compile_fail(&sysroot, \"tests\/compile-fail\", &target, &host, false);\n });\n compile_fail(&sysroot, \"tests\/compile-fail-fullmir\", &host, &host, true);\n}\n<commit_msg>actually, we can use a plain constant<commit_after>#![feature(slice_concat_ext)]\n\nextern crate compiletest_rs as compiletest;\n\nuse std::slice::SliceConcatExt;\nuse std::path::{PathBuf, Path};\nuse std::io::Write;\n\nmacro_rules! eprintln {\n ($($arg:tt)*) => {\n let stderr = std::io::stderr();\n writeln!(stderr.lock(), $($arg)*).unwrap();\n }\n}\n\nconst MIRI_PATH: &'static str = concat!(\"target\/\", env!(\"PROFILE\"), \"\/miri\");\n\nfn compile_fail(sysroot: &Path, path: &str, target: &str, host: &str, fullmir: bool) {\n eprintln!(\"## Running compile-fail tests in {} against miri for target {}\", path, target);\n let mut config = compiletest::default_config();\n config.mode = \"compile-fail\".parse().expect(\"Invalid mode\");\n config.rustc_path = MIRI_PATH.into();\n if fullmir {\n if host != target {\n \/\/ skip fullmir on nonhost\n return;\n }\n let sysroot = Path::new(&std::env::var(\"HOME\").unwrap()).join(\".xargo\").join(\"HOST\");\n config.target_rustcflags = Some(format!(\"--sysroot {}\", sysroot.to_str().unwrap()));\n config.src_base = PathBuf::from(path.to_string());\n } else {\n config.target_rustcflags = Some(format!(\"--sysroot {}\", sysroot.to_str().unwrap()));\n config.src_base = PathBuf::from(path.to_string());\n }\n config.target = target.to_owned();\n compiletest::run_tests(&config);\n}\n\nfn run_pass(path: &str) {\n eprintln!(\"## Running run-pass tests in {} against rustc\", path);\n let mut config = compiletest::default_config();\n config.mode = \"run-pass\".parse().expect(\"Invalid mode\");\n config.src_base = PathBuf::from(path);\n config.target_rustcflags = Some(\"-Dwarnings\".to_string());\n config.host_rustcflags = Some(\"-Dwarnings\".to_string());\n compiletest::run_tests(&config);\n}\n\nfn miri_pass(path: &str, target: &str, host: &str, fullmir: bool, opt: bool) {\n let opt_str = if opt {\n \" with optimizations\"\n } else {\n \"\"\n };\n eprintln!(\"## Running run-pass tests in {} against miri for target {}{}\", path, target, opt_str);\n let mut config = compiletest::default_config();\n config.mode = \"mir-opt\".parse().expect(\"Invalid mode\");\n config.src_base = PathBuf::from(path);\n config.target = target.to_owned();\n config.host = host.to_owned();\n config.rustc_path = MIRI_PATH.into();\n let mut flags = Vec::new();\n if fullmir {\n if host != target {\n \/\/ skip fullmir on nonhost\n return;\n }\n let sysroot = Path::new(&std::env::var(\"HOME\").unwrap()).join(\".xargo\").join(\"HOST\");\n flags.push(format!(\"--sysroot {}\", sysroot.to_str().unwrap()));\n }\n if opt {\n flags.push(\"-Zmir-opt-level=3\".to_owned());\n } else {\n flags.push(\"-Zmir-opt-level=0\".to_owned());\n }\n config.target_rustcflags = Some(flags.join(\" \"));\n \/\/ don't actually execute the final binary, it might be for other targets and we only care\n \/\/ about running miri, not the binary.\n config.runtool = Some(\"echo \\\"\\\" || \".to_owned());\n if target == host {\n std::env::set_var(\"MIRI_HOST_TARGET\", \"yes\");\n }\n compiletest::run_tests(&config);\n std::env::set_var(\"MIRI_HOST_TARGET\", \"\");\n}\n\nfn is_target_dir<P: Into<PathBuf>>(path: P) -> bool {\n let mut path = path.into();\n path.push(\"lib\");\n path.metadata().map(|m| m.is_dir()).unwrap_or(false)\n}\n\nfn for_all_targets<F: FnMut(String)>(sysroot: &Path, mut f: F) {\n let target_dir = sysroot.join(\"lib\").join(\"rustlib\");\n for entry in std::fs::read_dir(target_dir).expect(\"invalid sysroot\") {\n let entry = entry.unwrap();\n if !is_target_dir(entry.path()) { continue; }\n let target = entry.file_name().into_string().unwrap();\n f(target);\n }\n}\n\nfn get_sysroot() -> PathBuf {\n let sysroot = std::env::var(\"MIRI_SYSROOT\").unwrap_or_else(|_| {\n let sysroot = std::process::Command::new(\"rustc\")\n .arg(\"--print\")\n .arg(\"sysroot\")\n .output()\n .expect(\"rustc not found\")\n .stdout;\n String::from_utf8(sysroot).expect(\"sysroot is not utf8\")\n });\n PathBuf::from(sysroot.trim())\n}\n\nfn get_host() -> String {\n let host = std::process::Command::new(\"rustc\")\n .arg(\"-vV\")\n .output()\n .expect(\"rustc not found for -vV\")\n .stdout;\n let host = std::str::from_utf8(&host).expect(\"sysroot is not utf8\");\n let host = host.split(\"\\nhost: \").nth(1).expect(\"no host: part in rustc -vV\");\n let host = host.split('\\n').next().expect(\"no \\n after host\");\n String::from(host)\n}\n\n#[test]\nfn run_pass_miri() {\n let sysroot = get_sysroot();\n let host = get_host();\n\n for &opt in [false, true].iter() {\n for_all_targets(&sysroot, |target| {\n miri_pass(\"tests\/run-pass\", &target, &host, false, opt);\n });\n miri_pass(\"tests\/run-pass-fullmir\", &host, &host, true, opt);\n }\n}\n\n#[test]\nfn run_pass_rustc() {\n run_pass(\"tests\/run-pass\");\n run_pass(\"tests\/run-pass-fullmir\");\n}\n\n#[test]\nfn compile_fail_miri() {\n let sysroot = get_sysroot();\n let host = get_host();\n\n for_all_targets(&sysroot, |target| {\n compile_fail(&sysroot, \"tests\/compile-fail\", &target, &host, false);\n });\n compile_fail(&sysroot, \"tests\/compile-fail-fullmir\", &host, &host, true);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add tests<commit_after>\/\/ run-pass\n\n\/\/ Test copy\n\n#![feature(bindings_after_at)]\n\nstruct A { a: i32, b: i32 }\nstruct B { a: i32, b: C }\nstruct D { a: i32, d: C }\n#[derive(Copy,Clone)]\nstruct C { c: i32 }\n\npub fn main() {\n match (A {a: 10, b: 20}) {\n x@A {a, b: 20} => { assert!(x.a == 10); assert!(a == 10); }\n A {b: _b, ..} => { panic!(); }\n }\n\n let mut x@B {b, ..} = B {a: 10, b: C {c: 20}};\n assert_eq!(x.a, 10);\n x.b.c = 30;\n assert_eq!(b.c, 20);\n let mut y@D {d, ..} = D {a: 10, d: C {c: 20}};\n assert_eq!(y.a, 10);\n y.d.c = 30;\n assert_eq!(d.c, 20);\n\n let some_b = Some(B { a: 10, b: C { c: 20 } });\n\n \/\/ in irrefutable pattern\n if let Some(x @ B { b, .. }) = some_b {\n assert_eq!(x.b.c, 20);\n assert_eq!(b.c, 20);\n } else {\n unreachable!();\n }\n\n let some_b = Some(B { a: 10, b: C { c: 20 } });\n\n if let Some(x @ B { b: mut b @ C { c }, .. }) = some_b {\n assert_eq!(x.b.c, 20);\n assert_eq!(b.c, 20);\n b.c = 30;\n assert_eq!(b.c, 30);\n assert_eq!(c, 20);\n } else {\n unreachable!();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>fn main() {\n fn baz(_x: fn() -> int) {}\n for baz |_e| { } \/\/~ ERROR should return `bool`\n}\n<commit_msg>Fix busted test case<commit_after>fn main() {\n fn baz(_x: fn(y: int) -> int) {}\n for baz |_e| { } \/\/~ ERROR should return `bool`\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_attrs)]\n\nmacro_rules! mac {\n {} => {\n #[cfg(attr)]\n mod m {\n #[lang_item]\n fn f() {}\n }\n }\n}\n\nmac! {}\n\n#[rustc_error]\nfn main() {} \/\/~ ERROR compilation successful\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implements operations suitable on primitive integer types.<commit_after>\/\/ Copyright 2015 Pierre Talbot (IRCAM)\n\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse ncollections::ops::*;\nuse num::One;\n\nmacro_rules! integer_basic_ops_impl\n{\n ( $( $source:ty, $size:ty ),* ) =>\n {$(\n impl Cardinality for $source\n {\n type Size = $size;\n fn size(&self) -> $size {\n <$size as One>::one()\n }\n }\n\n impl Singleton<$source> for $source {\n fn singleton(value: $source) -> $source {\n value\n }\n }\n\n impl Bounded for $source {\n type Bound = $source;\n fn lower(&self) -> $source {\n *self\n }\n fn upper(&self) -> $source {\n *self\n }\n }\n\n impl Contains<$source> for $source\n {\n fn contains(&self, value: &$source) -> bool {\n self == value\n }\n }\n\n impl Disjoint for $source\n {\n fn is_disjoint(&self, value: &$source) -> bool {\n self != value\n }\n }\n\n impl Subset for $source\n {\n fn is_subset(&self, value: &$source) -> bool {\n self == value\n }\n }\n\n impl ProperSubset for $source\n {\n fn is_proper_subset(&self, _value: &$source) -> bool {\n false\n }\n }\n\n impl Overlap for $source\n {\n fn overlap(&self, value: &$source) -> bool {\n self == value\n }\n }\n )*}\n}\n\ninteger_basic_ops_impl!(i8,u8,u8,u8,i16,u16,u16,u16,i32,u32,u32,u32,i64,u64,u64,u64,isize,usize,usize,usize);\n\n#[cfg(test)]\nmod tests {\n use ncollections::ops::*;\n\n #[test]\n fn simple_tests() {\n for ref i in -2i32..10 {\n assert_eq!(i.size(), 1u32);\n assert_eq!(i.is_singleton(), true);\n assert_eq!(i.is_empty(), false);\n let res: i32 = Singleton::singleton(*i);\n assert_eq!(res, *i);\n assert_eq!(i.lower(), *i);\n assert_eq!(i.upper(), *i);\n for ref j in -10..10 {\n assert_eq!(i.contains(j), j.contains(i));\n assert_eq!(i.contains(j), i == j);\n assert_eq!(i.is_subset(j), i.contains(j));\n assert_eq!(i.overlap(j), i.contains(j));\n assert_eq!(i.is_subset(j), j.is_subset(i));\n\n assert_eq!(i.is_disjoint(j), j.is_disjoint(i));\n assert_eq!(i.is_disjoint(j), i != j);\n assert_eq!(i.is_proper_subset(j), false);\n assert_eq!(j.is_proper_subset(i), false);\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>set output to mpg<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>updated to check dirty file<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rust: initial implementation of Matrix and MatrixStack<commit_after>\/\/ Copyright (C) 2019 Inderjit Gill\n\n\/\/ This program is free software: you can redistribute it and\/or modify\n\/\/ it under the terms of the GNU General Public License as published by\n\/\/ the Free Software Foundation, either version 3 of the License, or\n\/\/ (at your option) any later version.\n\n\/\/ This program is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\/\/ GNU General Public License for more details.\n\n\/\/ You should have received a copy of the GNU General Public License\n\/\/ along with this program. If not, see <https:\/\/www.gnu.org\/licenses\/>.\n\nstruct Matrix {\n m: [f32; 16],\n}\n\nstruct MatrixStack {\n stack: Vec<Matrix>,\n}\n\nimpl Matrix {\n pub fn identity() -> Self {\n Matrix {\n m: [\n 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,\n ],\n }\n }\n\n pub fn copy_matrix(a: &Matrix) -> Self {\n Matrix { m: a.m }\n }\n\n pub fn copy_from(&mut self, a: &Matrix) {\n self.m = a.m;\n }\n\n \/\/ self = self * b\n pub fn multiply(&mut self, b: &Matrix) {\n let a00 = self.m[0];\n let a01 = self.m[1];\n let a02 = self.m[2];\n let a03 = self.m[3];\n let a10 = self.m[4];\n let a11 = self.m[5];\n let a12 = self.m[6];\n let a13 = self.m[7];\n let a20 = self.m[8];\n let a21 = self.m[9];\n let a22 = self.m[10];\n let a23 = self.m[11];\n let a30 = self.m[12];\n let a31 = self.m[13];\n let a32 = self.m[14];\n let a33 = self.m[15];\n\n {\n let b0 = b.m[0];\n let b1 = b.m[1];\n let b2 = b.m[2];\n let b3 = b.m[3];\n\n self.m[0] = b0 * a00 + b1 * a10 + b2 * a20 + b3 * a30;\n self.m[1] = b0 * a01 + b1 * a11 + b2 * a21 + b3 * a31;\n self.m[2] = b0 * a02 + b1 * a12 + b2 * a22 + b3 * a32;\n self.m[3] = b0 * a03 + b1 * a13 + b2 * a23 + b3 * a33;\n }\n\n {\n let b0 = b.m[4];\n let b1 = b.m[5];\n let b2 = b.m[6];\n let b3 = b.m[7];\n self.m[4] = b0 * a00 + b1 * a10 + b2 * a20 + b3 * a30;\n self.m[5] = b0 * a01 + b1 * a11 + b2 * a21 + b3 * a31;\n self.m[6] = b0 * a02 + b1 * a12 + b2 * a22 + b3 * a32;\n self.m[7] = b0 * a03 + b1 * a13 + b2 * a23 + b3 * a33;\n }\n {\n let b0 = b.m[8];\n let b1 = b.m[9];\n let b2 = b.m[10];\n let b3 = b.m[11];\n self.m[8] = b0 * a00 + b1 * a10 + b2 * a20 + b3 * a30;\n self.m[9] = b0 * a01 + b1 * a11 + b2 * a21 + b3 * a31;\n self.m[10] = b0 * a02 + b1 * a12 + b2 * a22 + b3 * a32;\n self.m[11] = b0 * a03 + b1 * a13 + b2 * a23 + b3 * a33;\n }\n {\n let b0 = b.m[12];\n let b1 = b.m[13];\n let b2 = b.m[14];\n let b3 = b.m[15];\n self.m[12] = b0 * a00 + b1 * a10 + b2 * a20 + b3 * a30;\n self.m[13] = b0 * a01 + b1 * a11 + b2 * a21 + b3 * a31;\n self.m[14] = b0 * a02 + b1 * a12 + b2 * a22 + b3 * a32;\n self.m[15] = b0 * a03 + b1 * a13 + b2 * a23 + b3 * a33;\n }\n }\n\n pub fn scale(&mut self, x: f32, y: f32, z: f32) {\n self.m[0] *= x;\n self.m[1] *= x;\n self.m[2] *= x;\n self.m[3] *= x;\n\n self.m[4] *= y;\n self.m[5] *= y;\n self.m[6] *= y;\n self.m[7] *= y;\n\n self.m[8] *= z;\n self.m[9] *= z;\n self.m[10] *= z;\n self.m[11] *= z;\n }\n\n pub fn translate(&mut self, x: f32, y: f32, z: f32) {\n self.m[12] = self.m[0] * x + self.m[4] * y + self.m[8] * z + self.m[12];\n self.m[13] = self.m[1] * x + self.m[5] * y + self.m[9] * z + self.m[13];\n self.m[14] = self.m[2] * x + self.m[6] * y + self.m[10] * z + self.m[14];\n self.m[15] = self.m[3] * x + self.m[7] * y + self.m[11] * z + self.m[15];\n }\n\n pub fn rotate_z(&mut self, rad: f32) {\n let s = rad.sin();\n let c = rad.cos();\n let a00 = self.m[0];\n let a01 = self.m[1];\n let a02 = self.m[2];\n let a03 = self.m[3];\n let a10 = self.m[4];\n let a11 = self.m[5];\n let a12 = self.m[6];\n let a13 = self.m[7];\n\n \/\/ Perform axis-specific matrix multiplication\n self.m[0] = a00 * c + a10 * s;\n self.m[1] = a01 * c + a11 * s;\n self.m[2] = a02 * c + a12 * s;\n self.m[3] = a03 * c + a13 * s;\n self.m[4] = a10 * c - a00 * s;\n self.m[5] = a11 * c - a01 * s;\n self.m[6] = a12 * c - a02 * s;\n self.m[7] = a13 * c - a03 * s;\n }\n\n pub fn transform_vec2(&self, x: f32, y: f32) -> (f32, f32) {\n let outx = self.m[0] * x + self.m[4] * y + self.m[12];\n let outy = self.m[1] * x + self.m[5] * y + self.m[13];\n (outx, outy)\n }\n\n pub fn transform_vec3(&self, x: f32, y: f32, z: f32) -> (f32, f32, f32) {\n let w1 = self.m[3] * x + self.m[7] * y + self.m[11] * z + self.m[15];\n let w = if w1 == 0.0 { 1.0 } else { w1 };\n\n let outx = (self.m[0] * x + self.m[4] * y + self.m[8] * z + self.m[12]) \/ w;\n let outy = (self.m[1] * x + self.m[5] * y + self.m[9] * z + self.m[13]) \/ w;\n let outz = (self.m[2] * x + self.m[6] * y + self.m[10] * z + self.m[14]) \/ w;\n\n (outx, outy, outz)\n }\n}\n\nimpl MatrixStack {\n pub fn new() -> Self {\n let mut ms = MatrixStack {\n stack: Vec::with_capacity(16),\n };\n ms.reset();\n ms\n }\n\n pub fn reset(&mut self) {\n self.stack.clear();\n \/\/ add an identity matrix onto the stack so that further\n \/\/ scale\/rotate\/translate ops can work\n self.stack.push(Matrix::identity())\n }\n\n pub fn peek(&self) -> Option<&Matrix> {\n self.stack.last()\n }\n pub fn push(&mut self) {\n if let Some(top) = self.peek() {\n self.stack.push(Matrix::copy_matrix(top));\n }\n }\n pub fn pop(&mut self) -> Option<Matrix> {\n self.stack.pop()\n }\n\n pub fn scale(&mut self, sx: f32, sy: f32) {\n let mut m = Matrix::identity();\n m.scale(sx, sy, 1.0);\n\n let len = self.stack.len();\n self.stack[len - 1].multiply(&m);\n }\n\n pub fn translate(&mut self, tx: f32, ty: f32) {\n let mut m = Matrix::identity();\n m.translate(tx, ty, 0.0);\n\n let len = self.stack.len();\n self.stack[len - 1].multiply(&m);\n }\n\n pub fn rotate(&mut self, a: f32) {\n let mut m = Matrix::identity();\n m.rotate_z(a);\n\n let len = self.stack.len();\n self.stack[len - 1].multiply(&m);\n }\n\n \/\/ todo: should this return a Result? (and not the dodgy else clause)\n \/\/ is that too much of a performance hit?\n pub fn transform_vec2(&self, x: f32, y: f32) -> (f32, f32) {\n if let Some(top) = self.peek() {\n top.transform_vec2(x, y)\n } else {\n (x, y)\n }\n }\n\n \/\/ todo: should this return a Result? (and not the dodgy else clause)\n \/\/ is that too much of a performance hit?\n pub fn transform_vec3(&self, x: f32, y: f32, z: f32) -> (f32, f32, f32) {\n if let Some(top) = self.peek() {\n top.transform_vec3(x, y, z)\n } else {\n (x, y, z)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>docs update<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a speed test example to help see the differences between various speeds<commit_after>extern crate turtle;\n\nuse turtle::Turtle;\n\n\/\/\/ This program draws several parallel lines to demonstrate each of the\n\/\/\/ different possible movement speeds\nfn main() {\n let mut turtle = Turtle::new();\n\n turtle.pen_up();\n turtle.set_speed(\"fastest\");\n turtle.left(90.0);\n turtle.forward(300.0);\n turtle.right(90.0);\n turtle.pen_down();\n\n let length = 200.0;\n\n for i in 1..12 {\n turtle.set_speed(i);\n turtle.forward(length);\n\n turtle.pen_up();\n turtle.set_speed(\"fastest\");\n turtle.backward(length);\n turtle.right(90.0);\n turtle.forward(60.0);\n turtle.left(90.0);\n turtle.pen_down();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added display example.<commit_after>use std::fmt; \/\/ Import `fmt`\n\n\/\/ A structure holding two numbers. `Debug` will be derived so the results can\n\/\/ be contrasted with `Display`.\n#[derive(Debug)]\nstruct MinMax(i64, i64);\n\n\/\/ Implement `Display` for `MinMax`.\nimpl fmt::Display for MinMax {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ Use `self.number` to refer to each positional data point.\n write!(f, \"({}, {})\", self.0, self.1)\n }\n}\n\n\/\/ Define a structure where the fields are nameable for comparison.\n#[derive(Debug)]\nstruct Point2 {\n x: f64,\n y: f64,\n}\n\n\/\/ Similarly, implement for Point2\nimpl fmt::Display for Point2 {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ Customize so only `x` and `y` are denoted.\n write!(f, \"x: {}, y: {}\", self.x, self.y)\n }\n}\n\nfn main() {\n let minmax = MinMax(0, 14);\n\n println!(\"Compare structures:\");\n println!(\"Display: {}\", minmax);\n println!(\"Debug: {:?}\", minmax);\n\n let big_range = MinMax(-300, 300);\n let small_range = MinMax(-3, 3);\n\n println!(\"The big range is {big} and the small is {small}\",\n small = small_range,\n big = big_range);\n\n let point = Point2 { x: 3.3, y: 7.2 };\n\n println!(\"Compare points:\");\n println!(\"Display: {}\", point);\n println!(\"Debug: {:?}\", point);\n\n \/\/ Error. Both `Debug` and `Display` were implemented but `{:b}`\n \/\/ requires `fmt::Binary` to be implemented. This will not work.\n \/\/ println!(\"What does Point2D look like in binary: {:b}?\", point);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>error<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ xfail-pretty\n\nfn id(x: bool) -> bool { x }\n\nfn call_id() {\n let c <- fail;\n id(c);\n}\n\nfn call_id_2() { id(true) && id(ret); }\n\nfn call_id_3() { id(ret) && id(ret); }\n\nfn call_id_4() { while id(break) { } }\n\niter put_break() -> int {\n while true { put break; }\n}\n\nfn log_fail() { log_err fail; }\n\nfn ret_ret() -> int { ret (ret 2) + 3; }\n\nfn ret_guard() {\n alt 2 {\n x when (ret) { x; }\n }\n}\n\nfn fail_then_concat() {\n let x = [], y = [3];\n fail;\n x += y;\n \"good\" + \"bye\";\n}\n\nfn main() {}\n<commit_msg>Add more unreachable-code tests. Closes #935<commit_after>\/\/ xfail-pretty\n\nfn id(x: bool) -> bool { x }\n\nfn call_id() {\n let c <- fail;\n id(c);\n}\n\nfn call_id_2() { id(true) && id(ret); }\n\nfn call_id_3() { id(ret) && id(ret); }\n\nfn call_id_4() { while id(break) { } }\n\niter put_break() -> int {\n while true { put break; }\n}\n\nfn log_fail() { log_err fail; }\n\nfn log_ret() { log_err ret; }\n\nfn log_break() { while true { log_err break; } }\n\nfn log_cont() { do { log_err cont; } while false }\n\nfn ret_ret() -> int { ret (ret 2) + 3; }\n\nfn ret_guard() {\n alt 2 {\n x when (ret) { x; }\n }\n}\n\nfn fail_then_concat() {\n let x = [], y = [3];\n fail;\n x += y;\n \"good\" + \"bye\";\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>use std::cell::RefCell;\nuse RawUniformValue;\n\nuse smallvec::SmallVec;\n\nuse gl;\nuse Handle;\nuse context::CommandContext;\nuse version::Version;\nuse version::Api;\n\npub struct UniformsStorage {\n values: RefCell<SmallVec<[Option<RawUniformValue>; 16]>>,\n uniform_blocks: RefCell<SmallVec<[Option<gl::types::GLuint>; 4]>>,\n shader_storage_blocks: RefCell<SmallVec<[Option<gl::types::GLuint>; 4]>>,\n}\n\nimpl UniformsStorage {\n \/\/\/ Builds a new empty storage.\n #[inline]\n pub fn new() -> UniformsStorage {\n UniformsStorage {\n values: RefCell::new(SmallVec::new()),\n uniform_blocks: RefCell::new(SmallVec::new()),\n shader_storage_blocks: RefCell::new(SmallVec::new()),\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glUniform`.\n pub fn set_uniform_value(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLint, value: &RawUniformValue)\n {\n let mut values = self.values.borrow_mut();\n\n if values.len() <= location as usize {\n for _ in (values.len() .. location as usize + 1) {\n values.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n macro_rules! uniform(\n ($ctxt:expr, $uniform:ident, $uniform_arb:ident, $($params:expr),+) => (\n unsafe {\n if $ctxt.version >= &Version(Api::Gl, 1, 5) ||\n $ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n $ctxt.gl.$uniform($($params),+)\n } else {\n assert!($ctxt.extensions.gl_arb_shader_objects);\n $ctxt.gl.$uniform_arb($($params),+)\n }\n }\n )\n );\n\n macro_rules! uniform64(\n ($ctxt:expr, $uniform:ident, $($params:expr),+) => (\n unsafe {\n if $ctxt.extensions.gl_arb_gpu_shader_fp64 {\n $ctxt.gl.$uniform($($params),+)\n } else {\n panic!(\"Double precision is not supported on this system.\")\n }\n }\n )\n );\n\n match (value, &mut values[location as usize]) {\n (&RawUniformValue::SignedInt(a), &mut Some(RawUniformValue::SignedInt(b))) if a == b => (),\n (&RawUniformValue::UnsignedInt(a), &mut Some(RawUniformValue::UnsignedInt(b))) if a == b => (),\n (&RawUniformValue::Float(a), &mut Some(RawUniformValue::Float(b))) if a == b => (),\n (&RawUniformValue::Mat2(a), &mut Some(RawUniformValue::Mat2(b))) if a == b => (),\n (&RawUniformValue::Mat3(a), &mut Some(RawUniformValue::Mat3(b))) if a == b => (),\n (&RawUniformValue::Mat4(a), &mut Some(RawUniformValue::Mat4(b))) if a == b => (),\n (&RawUniformValue::Vec2(a), &mut Some(RawUniformValue::Vec2(b))) if a == b => (),\n (&RawUniformValue::Vec3(a), &mut Some(RawUniformValue::Vec3(b))) if a == b => (),\n (&RawUniformValue::Vec4(a), &mut Some(RawUniformValue::Vec4(b))) if a == b => (),\n (&RawUniformValue::IntVec2(a), &mut Some(RawUniformValue::IntVec2(b))) if a == b => (),\n (&RawUniformValue::IntVec3(a), &mut Some(RawUniformValue::IntVec3(b))) if a == b => (),\n (&RawUniformValue::IntVec4(a), &mut Some(RawUniformValue::IntVec4(b))) if a == b => (),\n (&RawUniformValue::UnsignedIntVec2(a), &mut Some(RawUniformValue::UnsignedIntVec2(b))) if a == b => (),\n (&RawUniformValue::UnsignedIntVec3(a), &mut Some(RawUniformValue::UnsignedIntVec3(b))) if a == b => (),\n (&RawUniformValue::UnsignedIntVec4(a), &mut Some(RawUniformValue::UnsignedIntVec4(b))) if a == b => (),\n (&RawUniformValue::Double(a), &mut Some(RawUniformValue::Double(b))) if a == b => (),\n (&RawUniformValue::DoubleMat2(a), &mut Some(RawUniformValue::DoubleMat2(b))) if a == b => (),\n (&RawUniformValue::DoubleMat3(a), &mut Some(RawUniformValue::DoubleMat3(b))) if a == b => (),\n (&RawUniformValue::DoubleMat4(a), &mut Some(RawUniformValue::DoubleMat4(b))) if a == b => (),\n (&RawUniformValue::DoubleVec2(a), &mut Some(RawUniformValue::DoubleVec2(b))) if a == b => (),\n (&RawUniformValue::DoubleVec3(a), &mut Some(RawUniformValue::DoubleVec3(b))) if a == b => (),\n (&RawUniformValue::DoubleVec4(a), &mut Some(RawUniformValue::DoubleVec4(b))) if a == b => (),\n\n (&RawUniformValue::SignedInt(v), target) => {\n *target = Some(RawUniformValue::SignedInt(v));\n uniform!(ctxt, Uniform1i, Uniform1iARB, location, v);\n },\n\n (&RawUniformValue::UnsignedInt(v), target) => {\n *target = Some(RawUniformValue::UnsignedInt(v));\n\n \/\/ Uniform1uiARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform1ui(location, v)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform1iARB(location, v as gl::types::GLint)\n }\n }\n },\n\n (&RawUniformValue::Float(v), target) => {\n *target = Some(RawUniformValue::Float(v));\n uniform!(ctxt, Uniform1f, Uniform1fARB, location, v);\n },\n\n (&RawUniformValue::Mat2(v), target) => {\n *target = Some(RawUniformValue::Mat2(v));\n uniform!(ctxt, UniformMatrix2fv, UniformMatrix2fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Mat3(v), target) => {\n *target = Some(RawUniformValue::Mat3(v));\n uniform!(ctxt, UniformMatrix3fv, UniformMatrix3fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Mat4(v), target) => {\n *target = Some(RawUniformValue::Mat4(v));\n uniform!(ctxt, UniformMatrix4fv, UniformMatrix4fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Vec2(v), target) => {\n *target = Some(RawUniformValue::Vec2(v));\n uniform!(ctxt, Uniform2fv, Uniform2fvARB, location, 1, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Vec3(v), target) => {\n *target = Some(RawUniformValue::Vec3(v));\n uniform!(ctxt, Uniform3fv, Uniform3fvARB, location, 1, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Vec4(v), target) => {\n *target = Some(RawUniformValue::Vec4(v));\n uniform!(ctxt, Uniform4fv, Uniform4fvARB, location, 1, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::IntVec2(v), target) => {\n *target = Some(RawUniformValue::IntVec2(v));\n uniform!(ctxt, Uniform2iv, Uniform2ivARB, location, 1, v.as_ptr() as *const gl::types::GLint);\n },\n\n (&RawUniformValue::IntVec3(v), target) => {\n *target = Some(RawUniformValue::IntVec3(v));\n uniform!(ctxt, Uniform3iv, Uniform3ivARB, location, 1, v.as_ptr() as *const gl::types::GLint);\n },\n\n (&RawUniformValue::IntVec4(v), target) => {\n *target = Some(RawUniformValue::IntVec4(v));\n uniform!(ctxt, Uniform4iv, Uniform4ivARB, location, 1, v.as_ptr() as *const gl::types::GLint);\n },\n\n (&RawUniformValue::UnsignedIntVec2(v), target) => {\n *target = Some(RawUniformValue::UnsignedIntVec2(v));\n\n \/\/ Uniform2uivARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform2uiv(location, 1, v.as_ptr() as *const gl::types::GLuint)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform2ivARB(location, 1, v.as_ptr() as *const gl::types::GLint)\n }\n }\n },\n\n (&RawUniformValue::UnsignedIntVec3(v), target) => {\n *target = Some(RawUniformValue::UnsignedIntVec3(v));\n\n \/\/ Uniform3uivARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform3uiv(location, 1, v.as_ptr() as *const gl::types::GLuint)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform3ivARB(location, 1, v.as_ptr() as *const gl::types::GLint)\n }\n }\n },\n\n (&RawUniformValue::UnsignedIntVec4(v), target) => {\n *target = Some(RawUniformValue::UnsignedIntVec4(v));\n\n \/\/ Uniform4uivARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform4uiv(location, 1, v.as_ptr() as *const gl::types::GLuint)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform4ivARB(location, 1, v.as_ptr() as *const gl::types::GLint)\n }\n }\n },\n (&RawUniformValue::Double(v), target) => {\n *target = Some(RawUniformValue::Double(v));\n uniform64!(ctxt, Uniform1d, location, v);\n },\n\n (&RawUniformValue::DoubleMat2(v), target) => {\n *target = Some(RawUniformValue::DoubleMat2(v));\n uniform64!(ctxt, UniformMatrix2dv,\n location, 1, gl::FALSE, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleMat3(v), target) => {\n *target = Some(RawUniformValue::DoubleMat3(v));\n uniform64!(ctxt, UniformMatrix3dv,\n location, 1, gl::FALSE, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleMat4(v), target) => {\n *target = Some(RawUniformValue::DoubleMat4(v));\n uniform64!(ctxt, UniformMatrix4dv,\n location, 1, gl::FALSE, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleVec2(v), target) => {\n *target = Some(RawUniformValue::DoubleVec2(v));\n uniform64!(ctxt, Uniform2dv, location, 1, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleVec3(v), target) => {\n *target = Some(RawUniformValue::DoubleVec3(v));\n uniform64!(ctxt, Uniform3dv, location, 1, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleVec4(v), target) => {\n *target = Some(RawUniformValue::DoubleVec4(v));\n uniform64!(ctxt, Uniform4dv, location, 1, v.as_ptr() as *const gl::types::GLdouble);\n },\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glUniformBlockBinding`.\n pub fn set_uniform_block_binding(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLuint, value: gl::types::GLuint)\n {\n let mut blocks = self.uniform_blocks.borrow_mut();\n\n if blocks.len() <= location as usize {\n for _ in (blocks.len() .. location as usize + 1) {\n blocks.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n match (value, &mut blocks[location as usize]) {\n (a, &mut Some(b)) if a == b => (),\n\n (a, target) => {\n *target = Some(a);\n match program {\n Handle::Id(id) => unsafe {\n ctxt.gl.UniformBlockBinding(id, location, value);\n },\n _ => unreachable!()\n }\n },\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glShaderStorageBlockBinding`.\n pub fn set_shader_storage_block_binding(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLuint, value: gl::types::GLuint)\n {\n let mut blocks = self.shader_storage_blocks.borrow_mut();\n\n if blocks.len() <= location as usize {\n for _ in (blocks.len() .. location as usize + 1) {\n blocks.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n match (value, &mut blocks[location as usize]) {\n (a, &mut Some(b)) if a == b => (),\n\n (a, target) => {\n *target = Some(a);\n match program {\n Handle::Id(id) => unsafe {\n ctxt.gl.ShaderStorageBlockBinding(id, location, value);\n },\n _ => unreachable!()\n }\n },\n }\n }\n}\n<commit_msg>Use a hashmap for the uniforms storage<commit_after>use std::cell::RefCell;\nuse std::collections::HashMap;\nuse RawUniformValue;\n\nuse smallvec::SmallVec;\n\nuse gl;\nuse Handle;\nuse context::CommandContext;\nuse version::Version;\nuse version::Api;\n\npub struct UniformsStorage {\n values: RefCell<HashMap<gl::types::GLint, Option<RawUniformValue>>>,\n uniform_blocks: RefCell<SmallVec<[Option<gl::types::GLuint>; 4]>>,\n shader_storage_blocks: RefCell<SmallVec<[Option<gl::types::GLuint>; 4]>>,\n}\n\nimpl UniformsStorage {\n \/\/\/ Builds a new empty storage.\n #[inline]\n pub fn new() -> UniformsStorage {\n UniformsStorage {\n values: RefCell::new(HashMap::new()),\n uniform_blocks: RefCell::new(SmallVec::new()),\n shader_storage_blocks: RefCell::new(SmallVec::new()),\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glUniform`.\n pub fn set_uniform_value(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLint, value: &RawUniformValue)\n {\n let mut values = self.values.borrow_mut();\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n \/\/ TODO: more optimized\n if values.get(&location).is_none() {\n values.insert(location, None);\n }\n\n macro_rules! uniform(\n ($ctxt:expr, $uniform:ident, $uniform_arb:ident, $($params:expr),+) => (\n unsafe {\n if $ctxt.version >= &Version(Api::Gl, 1, 5) ||\n $ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n $ctxt.gl.$uniform($($params),+)\n } else {\n assert!($ctxt.extensions.gl_arb_shader_objects);\n $ctxt.gl.$uniform_arb($($params),+)\n }\n }\n )\n );\n\n macro_rules! uniform64(\n ($ctxt:expr, $uniform:ident, $($params:expr),+) => (\n unsafe {\n if $ctxt.extensions.gl_arb_gpu_shader_fp64 {\n $ctxt.gl.$uniform($($params),+)\n } else {\n panic!(\"Double precision is not supported on this system.\")\n }\n }\n )\n );\n\n match (value, values.get_mut(&location).unwrap()) {\n (&RawUniformValue::SignedInt(a), &mut Some(RawUniformValue::SignedInt(b))) if a == b => (),\n (&RawUniformValue::UnsignedInt(a), &mut Some(RawUniformValue::UnsignedInt(b))) if a == b => (),\n (&RawUniformValue::Float(a), &mut Some(RawUniformValue::Float(b))) if a == b => (),\n (&RawUniformValue::Mat2(a), &mut Some(RawUniformValue::Mat2(b))) if a == b => (),\n (&RawUniformValue::Mat3(a), &mut Some(RawUniformValue::Mat3(b))) if a == b => (),\n (&RawUniformValue::Mat4(a), &mut Some(RawUniformValue::Mat4(b))) if a == b => (),\n (&RawUniformValue::Vec2(a), &mut Some(RawUniformValue::Vec2(b))) if a == b => (),\n (&RawUniformValue::Vec3(a), &mut Some(RawUniformValue::Vec3(b))) if a == b => (),\n (&RawUniformValue::Vec4(a), &mut Some(RawUniformValue::Vec4(b))) if a == b => (),\n (&RawUniformValue::IntVec2(a), &mut Some(RawUniformValue::IntVec2(b))) if a == b => (),\n (&RawUniformValue::IntVec3(a), &mut Some(RawUniformValue::IntVec3(b))) if a == b => (),\n (&RawUniformValue::IntVec4(a), &mut Some(RawUniformValue::IntVec4(b))) if a == b => (),\n (&RawUniformValue::UnsignedIntVec2(a), &mut Some(RawUniformValue::UnsignedIntVec2(b))) if a == b => (),\n (&RawUniformValue::UnsignedIntVec3(a), &mut Some(RawUniformValue::UnsignedIntVec3(b))) if a == b => (),\n (&RawUniformValue::UnsignedIntVec4(a), &mut Some(RawUniformValue::UnsignedIntVec4(b))) if a == b => (),\n (&RawUniformValue::Double(a), &mut Some(RawUniformValue::Double(b))) if a == b => (),\n (&RawUniformValue::DoubleMat2(a), &mut Some(RawUniformValue::DoubleMat2(b))) if a == b => (),\n (&RawUniformValue::DoubleMat3(a), &mut Some(RawUniformValue::DoubleMat3(b))) if a == b => (),\n (&RawUniformValue::DoubleMat4(a), &mut Some(RawUniformValue::DoubleMat4(b))) if a == b => (),\n (&RawUniformValue::DoubleVec2(a), &mut Some(RawUniformValue::DoubleVec2(b))) if a == b => (),\n (&RawUniformValue::DoubleVec3(a), &mut Some(RawUniformValue::DoubleVec3(b))) if a == b => (),\n (&RawUniformValue::DoubleVec4(a), &mut Some(RawUniformValue::DoubleVec4(b))) if a == b => (),\n\n (&RawUniformValue::SignedInt(v), target) => {\n *target = Some(RawUniformValue::SignedInt(v));\n uniform!(ctxt, Uniform1i, Uniform1iARB, location, v);\n },\n\n (&RawUniformValue::UnsignedInt(v), target) => {\n *target = Some(RawUniformValue::UnsignedInt(v));\n\n \/\/ Uniform1uiARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform1ui(location, v)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform1iARB(location, v as gl::types::GLint)\n }\n }\n },\n\n (&RawUniformValue::Float(v), target) => {\n *target = Some(RawUniformValue::Float(v));\n uniform!(ctxt, Uniform1f, Uniform1fARB, location, v);\n },\n\n (&RawUniformValue::Mat2(v), target) => {\n *target = Some(RawUniformValue::Mat2(v));\n uniform!(ctxt, UniformMatrix2fv, UniformMatrix2fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Mat3(v), target) => {\n *target = Some(RawUniformValue::Mat3(v));\n uniform!(ctxt, UniformMatrix3fv, UniformMatrix3fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Mat4(v), target) => {\n *target = Some(RawUniformValue::Mat4(v));\n uniform!(ctxt, UniformMatrix4fv, UniformMatrix4fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Vec2(v), target) => {\n *target = Some(RawUniformValue::Vec2(v));\n uniform!(ctxt, Uniform2fv, Uniform2fvARB, location, 1, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Vec3(v), target) => {\n *target = Some(RawUniformValue::Vec3(v));\n uniform!(ctxt, Uniform3fv, Uniform3fvARB, location, 1, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::Vec4(v), target) => {\n *target = Some(RawUniformValue::Vec4(v));\n uniform!(ctxt, Uniform4fv, Uniform4fvARB, location, 1, v.as_ptr() as *const f32);\n },\n\n (&RawUniformValue::IntVec2(v), target) => {\n *target = Some(RawUniformValue::IntVec2(v));\n uniform!(ctxt, Uniform2iv, Uniform2ivARB, location, 1, v.as_ptr() as *const gl::types::GLint);\n },\n\n (&RawUniformValue::IntVec3(v), target) => {\n *target = Some(RawUniformValue::IntVec3(v));\n uniform!(ctxt, Uniform3iv, Uniform3ivARB, location, 1, v.as_ptr() as *const gl::types::GLint);\n },\n\n (&RawUniformValue::IntVec4(v), target) => {\n *target = Some(RawUniformValue::IntVec4(v));\n uniform!(ctxt, Uniform4iv, Uniform4ivARB, location, 1, v.as_ptr() as *const gl::types::GLint);\n },\n\n (&RawUniformValue::UnsignedIntVec2(v), target) => {\n *target = Some(RawUniformValue::UnsignedIntVec2(v));\n\n \/\/ Uniform2uivARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform2uiv(location, 1, v.as_ptr() as *const gl::types::GLuint)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform2ivARB(location, 1, v.as_ptr() as *const gl::types::GLint)\n }\n }\n },\n\n (&RawUniformValue::UnsignedIntVec3(v), target) => {\n *target = Some(RawUniformValue::UnsignedIntVec3(v));\n\n \/\/ Uniform3uivARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform3uiv(location, 1, v.as_ptr() as *const gl::types::GLuint)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform3ivARB(location, 1, v.as_ptr() as *const gl::types::GLint)\n }\n }\n },\n\n (&RawUniformValue::UnsignedIntVec4(v), target) => {\n *target = Some(RawUniformValue::UnsignedIntVec4(v));\n\n \/\/ Uniform4uivARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform4uiv(location, 1, v.as_ptr() as *const gl::types::GLuint)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform4ivARB(location, 1, v.as_ptr() as *const gl::types::GLint)\n }\n }\n },\n (&RawUniformValue::Double(v), target) => {\n *target = Some(RawUniformValue::Double(v));\n uniform64!(ctxt, Uniform1d, location, v);\n },\n\n (&RawUniformValue::DoubleMat2(v), target) => {\n *target = Some(RawUniformValue::DoubleMat2(v));\n uniform64!(ctxt, UniformMatrix2dv,\n location, 1, gl::FALSE, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleMat3(v), target) => {\n *target = Some(RawUniformValue::DoubleMat3(v));\n uniform64!(ctxt, UniformMatrix3dv,\n location, 1, gl::FALSE, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleMat4(v), target) => {\n *target = Some(RawUniformValue::DoubleMat4(v));\n uniform64!(ctxt, UniformMatrix4dv,\n location, 1, gl::FALSE, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleVec2(v), target) => {\n *target = Some(RawUniformValue::DoubleVec2(v));\n uniform64!(ctxt, Uniform2dv, location, 1, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleVec3(v), target) => {\n *target = Some(RawUniformValue::DoubleVec3(v));\n uniform64!(ctxt, Uniform3dv, location, 1, v.as_ptr() as *const gl::types::GLdouble);\n },\n\n (&RawUniformValue::DoubleVec4(v), target) => {\n *target = Some(RawUniformValue::DoubleVec4(v));\n uniform64!(ctxt, Uniform4dv, location, 1, v.as_ptr() as *const gl::types::GLdouble);\n },\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glUniformBlockBinding`.\n pub fn set_uniform_block_binding(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLuint, value: gl::types::GLuint)\n {\n let mut blocks = self.uniform_blocks.borrow_mut();\n\n if blocks.len() <= location as usize {\n for _ in (blocks.len() .. location as usize + 1) {\n blocks.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n match (value, &mut blocks[location as usize]) {\n (a, &mut Some(b)) if a == b => (),\n\n (a, target) => {\n *target = Some(a);\n match program {\n Handle::Id(id) => unsafe {\n ctxt.gl.UniformBlockBinding(id, location, value);\n },\n _ => unreachable!()\n }\n },\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glShaderStorageBlockBinding`.\n pub fn set_shader_storage_block_binding(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLuint, value: gl::types::GLuint)\n {\n let mut blocks = self.shader_storage_blocks.borrow_mut();\n\n if blocks.len() <= location as usize {\n for _ in (blocks.len() .. location as usize + 1) {\n blocks.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n match (value, &mut blocks[location as usize]) {\n (a, &mut Some(b)) if a == b => (),\n\n (a, target) => {\n *target = Some(a);\n match program {\n Handle::Id(id) => unsafe {\n ctxt.gl.ShaderStorageBlockBinding(id, location, value);\n },\n _ => unreachable!()\n }\n },\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate rustfmt;\nextern crate diff;\nextern crate regex;\nextern crate term;\n\nuse std::collections::HashMap;\nuse std::fs;\nuse std::io::{self, Read, BufRead, BufReader};\nuse std::path::Path;\n\nuse rustfmt::*;\nuse rustfmt::filemap::write_system_newlines;\nuse rustfmt::config::{Config, ReportTactic, WriteMode};\nuse rustfmt::rustfmt_diff::*;\n\nstatic DIFF_CONTEXT_SIZE: usize = 3;\n\nfn get_path_string(dir_entry: io::Result<fs::DirEntry>) -> String {\n let path = dir_entry.ok().expect(\"Couldn't get DirEntry.\").path();\n\n path.to_str().expect(\"Couldn't stringify path.\").to_owned()\n}\n\n\/\/ Integration tests. The files in the tests\/source are formatted and compared\n\/\/ to their equivalent in tests\/target. The target file and config can be\n\/\/ overriden by annotations in the source file. The input and output must match\n\/\/ exactly.\n\/\/ FIXME(#28) would be good to check for error messages and fail on them, or at\n\/\/ least report.\n#[test]\nfn system_tests() {\n \/\/ Get all files in the tests\/source directory.\n let files = fs::read_dir(\"tests\/source\").ok().expect(\"Couldn't read source dir.\");\n \/\/ Turn a DirEntry into a String that represents the relative path to the\n \/\/ file.\n let files = files.map(get_path_string);\n let (_reports, count, fails) = check_files(files, WriteMode::Default);\n\n \/\/ Display results.\n println!(\"Ran {} system tests.\", count);\n assert!(fails == 0, \"{} system tests failed\", fails);\n}\n\n\/\/ Do the same for tests\/coverage-source directory\n\/\/ the only difference is the coverage mode\n#[test]\nfn coverage_tests() {\n let files = fs::read_dir(\"tests\/coverage-source\").ok().expect(\"Couldn't read source dir.\");\n let files = files.map(get_path_string);\n let (_reports, count, fails) = check_files(files, WriteMode::Coverage);\n\n println!(\"Ran {} tests in coverage mode.\", count);\n assert!(fails == 0, \"{} tests failed\", fails);\n}\n\n#[test]\nfn checkstyle_test() {\n let filename = \"tests\/source\/fn-single-line.rs\".to_string();\n let expected = \"tests\/writemode\/checkstyle.xml\";\n\n let output = run_rustfmt(filename.clone(), WriteMode::Checkstyle);\n\n let mut expected_file = fs::File::open(&expected)\n .ok()\n .expect(\"Couldn't open target.\");\n let mut expected_text = String::new();\n expected_file.read_to_string(&mut expected_text)\n .ok()\n .expect(\"Failed reading target.\");\n\n let compare = make_diff(&expected_text, &output, DIFF_CONTEXT_SIZE);\n if compare.len() > 0 {\n let mut failures = HashMap::new();\n failures.insert(filename, compare);\n print_mismatches(failures);\n assert!(false, \"Text does not match expected output\");\n }\n}\n\n\/\/ Idempotence tests. Files in tests\/target are checked to be unaltered by\n\/\/ rustfmt.\n#[test]\nfn idempotence_tests() {\n \/\/ Get all files in the tests\/target directory.\n let files = fs::read_dir(\"tests\/target\")\n .ok()\n .expect(\"Couldn't read target dir.\")\n .map(get_path_string);\n let (_reports, count, fails) = check_files(files, WriteMode::Default);\n\n \/\/ Display results.\n println!(\"Ran {} idempotent tests.\", count);\n assert!(fails == 0, \"{} idempotent tests failed\", fails);\n}\n\n\/\/ Run rustfmt on itself. This operation must be idempotent. We also check that\n\/\/ no warnings are emitted.\n#[test]\nfn self_tests() {\n let files = fs::read_dir(\"src\/bin\")\n .ok()\n .expect(\"Couldn't read src dir.\")\n .chain(fs::read_dir(\"tests\").ok().expect(\"Couldn't read tests dir.\"))\n .map(get_path_string);\n \/\/ Hack because there's no `IntoIterator` impl for `[T; N]`.\n let files = files.chain(Some(\"src\/lib.rs\".to_owned()).into_iter());\n\n let (reports, count, fails) = check_files(files, WriteMode::Default);\n let mut warnings = 0;\n\n \/\/ Display results.\n println!(\"Ran {} self tests.\", count);\n assert!(fails == 0, \"{} self tests failed\", fails);\n\n for format_report in reports {\n println!(\"{}\", format_report);\n warnings += format_report.warning_count();\n }\n\n assert!(warnings == 0,\n \"Rustfmt's code generated {} warnings\",\n warnings);\n}\n\n\/\/ For each file, run rustfmt and collect the output.\n\/\/ Returns the number of files checked and the number of failures.\nfn check_files<I>(files: I, write_mode: WriteMode) -> (Vec<FormatReport>, u32, u32)\n where I: Iterator<Item = String>\n{\n let mut count = 0;\n let mut fails = 0;\n let mut reports = vec![];\n\n for file_name in files.filter(|f| f.ends_with(\".rs\")) {\n println!(\"Testing '{}'...\", file_name);\n\n match idempotent_check(file_name, write_mode) {\n Ok(report) => reports.push(report),\n Err(msg) => {\n print_mismatches(msg);\n fails += 1;\n }\n }\n\n count += 1;\n }\n\n (reports, count, fails)\n}\n\nfn print_mismatches(result: HashMap<String, Vec<Mismatch>>) {\n let mut t = term::stdout().unwrap();\n\n for (file_name, diff) in result {\n print_diff(diff,\n |line_num| format!(\"\\nMismatch at {}:{}:\", file_name, line_num));\n }\n\n assert!(t.reset().unwrap());\n}\n\npub fn run_rustfmt(filename: String, write_mode: WriteMode) -> String {\n let sig_comments = read_significant_comments(&filename);\n let mut config = get_config(sig_comments.get(\"config\").map(|x| &(*x)[..]));\n\n for (key, val) in &sig_comments {\n if key != \"target\" && key != \"config\" {\n config.override_value(key, val);\n }\n }\n\n \/\/ Don't generate warnings for to-do items.\n config.report_todo = ReportTactic::Never;\n\n \/\/ Simulate run()\n let mut out = Vec::new();\n let file_map = format(Path::new(&filename), &config, write_mode);\n let _ = filemap::write_all_files(&file_map, &mut out, write_mode, &config);\n String::from_utf8(out).unwrap()\n}\n\npub fn idempotent_check(filename: String,\n write_mode: WriteMode)\n -> Result<FormatReport, HashMap<String, Vec<Mismatch>>> {\n let sig_comments = read_significant_comments(&filename);\n let mut config = get_config(sig_comments.get(\"config\").map(|x| &(*x)[..]));\n\n for (key, val) in &sig_comments {\n if key != \"target\" && key != \"config\" {\n config.override_value(key, val);\n }\n }\n\n \/\/ Don't generate warnings for to-do items.\n config.report_todo = ReportTactic::Never;\n\n let mut file_map = format(Path::new(&filename), &config, write_mode);\n let format_report = fmt_lines(&mut file_map, &config);\n\n let mut write_result = HashMap::new();\n for (filename, text) in file_map.iter() {\n let mut v = Vec::new();\n \/\/ Won't panic, as we're not doing any IO.\n write_system_newlines(&mut v, text, &config).unwrap();\n \/\/ Won't panic, we are writing correct utf8.\n let one_result = String::from_utf8(v).unwrap();\n write_result.insert(filename.clone(), one_result);\n }\n\n let target = sig_comments.get(\"target\").map(|x| &(*x)[..]);\n\n handle_result(write_result, target, write_mode).map(|_| format_report)\n}\n\n\/\/ Reads test config file from comments and reads its contents.\nfn get_config(config_file: Option<&str>) -> Config {\n let config_file_name = match config_file {\n None => return Default::default(),\n Some(file_name) => {\n let mut full_path = \"tests\/config\/\".to_owned();\n full_path.push_str(&file_name);\n full_path\n }\n };\n\n let mut def_config_file = fs::File::open(config_file_name)\n .ok()\n .expect(\"Couldn't open config.\");\n let mut def_config = String::new();\n def_config_file.read_to_string(&mut def_config).ok().expect(\"Couldn't read config.\");\n\n Config::from_toml(&def_config)\n}\n\n\/\/ Reads significant comments of the form: \/\/ rustfmt-key: value\n\/\/ into a hash map.\nfn read_significant_comments(file_name: &str) -> HashMap<String, String> {\n let file = fs::File::open(file_name)\n .ok()\n .expect(&format!(\"Couldn't read file {}.\", file_name));\n let reader = BufReader::new(file);\n let pattern = r\"^\\s*\/\/\\s*rustfmt-([^:]+):\\s*(\\S+)\";\n let regex = regex::Regex::new(&pattern).ok().expect(\"Failed creating pattern 1.\");\n\n \/\/ Matches lines containing significant comments or whitespace.\n let line_regex = regex::Regex::new(r\"(^\\s*$)|(^\\s*\/\/\\s*rustfmt-[^:]+:\\s*\\S+)\")\n .ok()\n .expect(\"Failed creating pattern 2.\");\n\n reader.lines()\n .map(|line| line.ok().expect(\"Failed getting line.\"))\n .take_while(|line| line_regex.is_match(&line))\n .filter_map(|line| {\n regex.captures_iter(&line).next().map(|capture| {\n (capture.at(1).expect(\"Couldn't unwrap capture.\").to_owned(),\n capture.at(2).expect(\"Couldn't unwrap capture.\").to_owned())\n })\n })\n .collect()\n}\n\n\/\/ Compare output to input.\n\/\/ TODO: needs a better name, more explanation.\nfn handle_result(result: HashMap<String, String>,\n target: Option<&str>,\n write_mode: WriteMode)\n -> Result<(), HashMap<String, Vec<Mismatch>>> {\n let mut failures = HashMap::new();\n\n for (file_name, fmt_text) in result {\n \/\/ If file is in tests\/source, compare to file with same name in tests\/target.\n let target = get_target(&file_name, target, write_mode);\n let mut f = fs::File::open(&target).ok().expect(\"Couldn't open target.\");\n\n let mut text = String::new();\n f.read_to_string(&mut text).ok().expect(\"Failed reading target.\");\n\n if fmt_text != text {\n let diff = make_diff(&text, &fmt_text, DIFF_CONTEXT_SIZE);\n failures.insert(file_name, diff);\n }\n }\n\n if failures.is_empty() {\n Ok(())\n } else {\n Err(failures)\n }\n}\n\n\/\/ Map source file paths to their target paths.\nfn get_target(file_name: &str, target: Option<&str>, write_mode: WriteMode) -> String {\n let file_path = Path::new(file_name);\n let (source_path_prefix, target_path_prefix) = match write_mode {\n WriteMode::Coverage => {\n (Path::new(\"tests\/coverage-source\/\"),\n \"tests\/coverage-target\/\")\n }\n _ => (Path::new(\"tests\/source\/\"), \"tests\/target\/\"),\n };\n\n if file_path.starts_with(source_path_prefix) {\n let mut components = file_path.components();\n \/\/ Can't skip(2) as the resulting iterator can't as_path()\n components.next();\n components.next();\n\n let new_target = match components.as_path().to_str() {\n Some(string) => string,\n None => file_name,\n };\n let base = target.unwrap_or(new_target);\n\n format!(\"{}{}\", target_path_prefix, base)\n } else {\n file_name.to_owned()\n }\n}\n\n#[test]\nfn rustfmt_diff_make_diff_tests() {\n let diff = make_diff(\"a\\nb\\nc\\nd\", \"a\\ne\\nc\\nd\", 3);\n assert_eq!(diff,\n vec![Mismatch {\n line_number: 1,\n lines: vec![DiffLine::Context(\"a\".into()),\n DiffLine::Resulting(\"b\".into()),\n DiffLine::Expected(\"e\".into()),\n DiffLine::Context(\"c\".into()),\n DiffLine::Context(\"d\".into())],\n }]);\n}\n<commit_msg>Extract helper functions for testing.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate rustfmt;\nextern crate diff;\nextern crate regex;\nextern crate term;\n\nuse std::collections::HashMap;\nuse std::fs;\nuse std::io::{self, Read, BufRead, BufReader};\nuse std::path::Path;\n\nuse rustfmt::*;\nuse rustfmt::filemap::{write_system_newlines, FileMap};\nuse rustfmt::config::{Config, ReportTactic, WriteMode};\nuse rustfmt::rustfmt_diff::*;\n\nstatic DIFF_CONTEXT_SIZE: usize = 3;\n\nfn get_path_string(dir_entry: io::Result<fs::DirEntry>) -> String {\n let path = dir_entry.ok().expect(\"Couldn't get DirEntry.\").path();\n\n path.to_str().expect(\"Couldn't stringify path.\").to_owned()\n}\n\n\/\/ Integration tests. The files in the tests\/source are formatted and compared\n\/\/ to their equivalent in tests\/target. The target file and config can be\n\/\/ overriden by annotations in the source file. The input and output must match\n\/\/ exactly.\n\/\/ FIXME(#28) would be good to check for error messages and fail on them, or at\n\/\/ least report.\n#[test]\nfn system_tests() {\n \/\/ Get all files in the tests\/source directory.\n let files = fs::read_dir(\"tests\/source\").ok().expect(\"Couldn't read source dir.\");\n \/\/ Turn a DirEntry into a String that represents the relative path to the\n \/\/ file.\n let files = files.map(get_path_string);\n let (_reports, count, fails) = check_files(files, WriteMode::Default);\n\n \/\/ Display results.\n println!(\"Ran {} system tests.\", count);\n assert!(fails == 0, \"{} system tests failed\", fails);\n}\n\n\/\/ Do the same for tests\/coverage-source directory\n\/\/ the only difference is the coverage mode\n#[test]\nfn coverage_tests() {\n let files = fs::read_dir(\"tests\/coverage-source\").ok().expect(\"Couldn't read source dir.\");\n let files = files.map(get_path_string);\n let (_reports, count, fails) = check_files(files, WriteMode::Coverage);\n\n println!(\"Ran {} tests in coverage mode.\", count);\n assert!(fails == 0, \"{} tests failed\", fails);\n}\n\n#[test]\nfn checkstyle_test() {\n let filename = \"tests\/source\/fn-single-line.rs\";\n let expected_filename = \"tests\/writemode\/checkstyle.xml\";\n assert_output(filename, expected_filename, WriteMode::Checkstyle);\n}\n\n\n\/\/ Helper function for comparing the results of rustfmt\n\/\/ to a known output file generated by one of the write modes.\nfn assert_output(source: &str, expected_filename: &str, write_mode: WriteMode) {\n let config = read_config(&source);\n let file_map = run_rustfmt(source.to_string(), write_mode);\n\n \/\/ Populate output by writing to a vec.\n let mut out = vec![];\n let _ = filemap::write_all_files(&file_map, &mut out, write_mode, &config);\n let output = String::from_utf8(out).unwrap();\n\n let mut expected_file = fs::File::open(&expected_filename)\n .ok()\n .expect(\"Couldn't open target.\");\n let mut expected_text = String::new();\n expected_file.read_to_string(&mut expected_text)\n .ok()\n .expect(\"Failed reading target.\");\n\n let compare = make_diff(&expected_text, &output, DIFF_CONTEXT_SIZE);\n if compare.len() > 0 {\n let mut failures = HashMap::new();\n failures.insert(source.to_string(), compare);\n print_mismatches(failures);\n assert!(false, \"Text does not match expected output\");\n }\n}\n\n\/\/ Idempotence tests. Files in tests\/target are checked to be unaltered by\n\/\/ rustfmt.\n#[test]\nfn idempotence_tests() {\n \/\/ Get all files in the tests\/target directory.\n let files = fs::read_dir(\"tests\/target\")\n .ok()\n .expect(\"Couldn't read target dir.\")\n .map(get_path_string);\n let (_reports, count, fails) = check_files(files, WriteMode::Default);\n\n \/\/ Display results.\n println!(\"Ran {} idempotent tests.\", count);\n assert!(fails == 0, \"{} idempotent tests failed\", fails);\n}\n\n\/\/ Run rustfmt on itself. This operation must be idempotent. We also check that\n\/\/ no warnings are emitted.\n#[test]\nfn self_tests() {\n let files = fs::read_dir(\"src\/bin\")\n .ok()\n .expect(\"Couldn't read src dir.\")\n .chain(fs::read_dir(\"tests\").ok().expect(\"Couldn't read tests dir.\"))\n .map(get_path_string);\n \/\/ Hack because there's no `IntoIterator` impl for `[T; N]`.\n let files = files.chain(Some(\"src\/lib.rs\".to_owned()).into_iter());\n\n let (reports, count, fails) = check_files(files, WriteMode::Default);\n let mut warnings = 0;\n\n \/\/ Display results.\n println!(\"Ran {} self tests.\", count);\n assert!(fails == 0, \"{} self tests failed\", fails);\n\n for format_report in reports {\n println!(\"{}\", format_report);\n warnings += format_report.warning_count();\n }\n\n assert!(warnings == 0,\n \"Rustfmt's code generated {} warnings\",\n warnings);\n}\n\n\/\/ For each file, run rustfmt and collect the output.\n\/\/ Returns the number of files checked and the number of failures.\nfn check_files<I>(files: I, write_mode: WriteMode) -> (Vec<FormatReport>, u32, u32)\n where I: Iterator<Item = String>\n{\n let mut count = 0;\n let mut fails = 0;\n let mut reports = vec![];\n\n for file_name in files.filter(|f| f.ends_with(\".rs\")) {\n println!(\"Testing '{}'...\", file_name);\n\n match idempotent_check(file_name, write_mode) {\n Ok(report) => reports.push(report),\n Err(msg) => {\n print_mismatches(msg);\n fails += 1;\n }\n }\n\n count += 1;\n }\n\n (reports, count, fails)\n}\n\nfn print_mismatches(result: HashMap<String, Vec<Mismatch>>) {\n let mut t = term::stdout().unwrap();\n\n for (file_name, diff) in result {\n print_diff(diff,\n |line_num| format!(\"\\nMismatch at {}:{}:\", file_name, line_num));\n }\n\n assert!(t.reset().unwrap());\n}\n\nfn read_config(filename: &str) -> Config {\n let sig_comments = read_significant_comments(&filename);\n let mut config = get_config(sig_comments.get(\"config\").map(|x| &(*x)[..]));\n\n for (key, val) in &sig_comments {\n if key != \"target\" && key != \"config\" {\n config.override_value(key, val);\n }\n }\n\n \/\/ Don't generate warnings for to-do items.\n config.report_todo = ReportTactic::Never;\n config\n}\n\n\/\/ Simulate run()\nfn run_rustfmt(filename: String, write_mode: WriteMode) -> FileMap {\n let config = read_config(&filename);\n format(Path::new(&filename), &config, write_mode)\n}\n\npub fn idempotent_check(filename: String,\n write_mode: WriteMode)\n -> Result<FormatReport, HashMap<String, Vec<Mismatch>>> {\n let sig_comments = read_significant_comments(&filename);\n let config = read_config(&filename);\n let mut file_map = run_rustfmt(filename, write_mode);\n let format_report = fmt_lines(&mut file_map, &config);\n\n let mut write_result = HashMap::new();\n for (filename, text) in file_map.iter() {\n let mut v = Vec::new();\n \/\/ Won't panic, as we're not doing any IO.\n write_system_newlines(&mut v, text, &config).unwrap();\n \/\/ Won't panic, we are writing correct utf8.\n let one_result = String::from_utf8(v).unwrap();\n write_result.insert(filename.clone(), one_result);\n }\n\n let target = sig_comments.get(\"target\").map(|x| &(*x)[..]);\n\n handle_result(write_result, target, write_mode).map(|_| format_report)\n}\n\n\/\/ Reads test config file from comments and reads its contents.\nfn get_config(config_file: Option<&str>) -> Config {\n let config_file_name = match config_file {\n None => return Default::default(),\n Some(file_name) => {\n let mut full_path = \"tests\/config\/\".to_owned();\n full_path.push_str(&file_name);\n full_path\n }\n };\n\n let mut def_config_file = fs::File::open(config_file_name)\n .ok()\n .expect(\"Couldn't open config.\");\n let mut def_config = String::new();\n def_config_file.read_to_string(&mut def_config).ok().expect(\"Couldn't read config.\");\n\n Config::from_toml(&def_config)\n}\n\n\/\/ Reads significant comments of the form: \/\/ rustfmt-key: value\n\/\/ into a hash map.\nfn read_significant_comments(file_name: &str) -> HashMap<String, String> {\n let file = fs::File::open(file_name)\n .ok()\n .expect(&format!(\"Couldn't read file {}.\", file_name));\n let reader = BufReader::new(file);\n let pattern = r\"^\\s*\/\/\\s*rustfmt-([^:]+):\\s*(\\S+)\";\n let regex = regex::Regex::new(&pattern).ok().expect(\"Failed creating pattern 1.\");\n\n \/\/ Matches lines containing significant comments or whitespace.\n let line_regex = regex::Regex::new(r\"(^\\s*$)|(^\\s*\/\/\\s*rustfmt-[^:]+:\\s*\\S+)\")\n .ok()\n .expect(\"Failed creating pattern 2.\");\n\n reader.lines()\n .map(|line| line.ok().expect(\"Failed getting line.\"))\n .take_while(|line| line_regex.is_match(&line))\n .filter_map(|line| {\n regex.captures_iter(&line).next().map(|capture| {\n (capture.at(1).expect(\"Couldn't unwrap capture.\").to_owned(),\n capture.at(2).expect(\"Couldn't unwrap capture.\").to_owned())\n })\n })\n .collect()\n}\n\n\/\/ Compare output to input.\n\/\/ TODO: needs a better name, more explanation.\nfn handle_result(result: HashMap<String, String>,\n target: Option<&str>,\n write_mode: WriteMode)\n -> Result<(), HashMap<String, Vec<Mismatch>>> {\n let mut failures = HashMap::new();\n\n for (file_name, fmt_text) in result {\n \/\/ If file is in tests\/source, compare to file with same name in tests\/target.\n let target = get_target(&file_name, target, write_mode);\n let mut f = fs::File::open(&target).ok().expect(\"Couldn't open target.\");\n\n let mut text = String::new();\n f.read_to_string(&mut text).ok().expect(\"Failed reading target.\");\n\n if fmt_text != text {\n let diff = make_diff(&text, &fmt_text, DIFF_CONTEXT_SIZE);\n failures.insert(file_name, diff);\n }\n }\n\n if failures.is_empty() {\n Ok(())\n } else {\n Err(failures)\n }\n}\n\n\/\/ Map source file paths to their target paths.\nfn get_target(file_name: &str, target: Option<&str>, write_mode: WriteMode) -> String {\n let file_path = Path::new(file_name);\n let (source_path_prefix, target_path_prefix) = match write_mode {\n WriteMode::Coverage => {\n (Path::new(\"tests\/coverage-source\/\"),\n \"tests\/coverage-target\/\")\n }\n _ => (Path::new(\"tests\/source\/\"), \"tests\/target\/\"),\n };\n\n if file_path.starts_with(source_path_prefix) {\n let mut components = file_path.components();\n \/\/ Can't skip(2) as the resulting iterator can't as_path()\n components.next();\n components.next();\n\n let new_target = match components.as_path().to_str() {\n Some(string) => string,\n None => file_name,\n };\n let base = target.unwrap_or(new_target);\n\n format!(\"{}{}\", target_path_prefix, base)\n } else {\n file_name.to_owned()\n }\n}\n\n#[test]\nfn rustfmt_diff_make_diff_tests() {\n let diff = make_diff(\"a\\nb\\nc\\nd\", \"a\\ne\\nc\\nd\", 3);\n assert_eq!(diff,\n vec![Mismatch {\n line_number: 1,\n lines: vec![DiffLine::Context(\"a\".into()),\n DiffLine::Resulting(\"b\".into()),\n DiffLine::Expected(\"e\".into()),\n DiffLine::Context(\"c\".into()),\n DiffLine::Context(\"d\".into())],\n }]);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Parsing of GCC-style Language-Specific Data Area (LSDA)\n\/\/! For details see:\n\/\/! http:\/\/refspecs.linuxfoundation.org\/LSB_3.0.0\/LSB-PDA\/LSB-PDA\/ehframechpt.html\n\/\/! http:\/\/mentorembedded.github.io\/cxx-abi\/exceptions.pdf\n\/\/! http:\/\/www.airs.com\/blog\/archives\/460\n\/\/! http:\/\/www.airs.com\/blog\/archives\/464\n\/\/!\n\/\/! A reference implementation may be found in the GCC source tree\n\/\/! (<root>\/libgcc\/unwind-c.c as of this writing)\n\n#![allow(non_upper_case_globals)]\n#![allow(unused)]\n\nuse dwarf::DwarfReader;\nuse core::mem;\n\npub const DW_EH_PE_omit: u8 = 0xFF;\npub const DW_EH_PE_absptr: u8 = 0x00;\n\npub const DW_EH_PE_uleb128: u8 = 0x01;\npub const DW_EH_PE_udata2: u8 = 0x02;\npub const DW_EH_PE_udata4: u8 = 0x03;\npub const DW_EH_PE_udata8: u8 = 0x04;\npub const DW_EH_PE_sleb128: u8 = 0x09;\npub const DW_EH_PE_sdata2: u8 = 0x0A;\npub const DW_EH_PE_sdata4: u8 = 0x0B;\npub const DW_EH_PE_sdata8: u8 = 0x0C;\n\npub const DW_EH_PE_pcrel: u8 = 0x10;\npub const DW_EH_PE_textrel: u8 = 0x20;\npub const DW_EH_PE_datarel: u8 = 0x30;\npub const DW_EH_PE_funcrel: u8 = 0x40;\npub const DW_EH_PE_aligned: u8 = 0x50;\n\npub const DW_EH_PE_indirect: u8 = 0x80;\n\n#[derive(Copy, Clone)]\npub struct EHContext<'a> {\n pub ip: usize, \/\/ Current instruction pointer\n pub func_start: usize, \/\/ Address of the current function\n pub get_text_start: &'a Fn() -> usize, \/\/ Get address of the code section\n pub get_data_start: &'a Fn() -> usize, \/\/ Get address of the data section\n}\n\npub enum EHAction {\n None,\n Cleanup(usize),\n Catch(usize),\n Terminate,\n}\n\npub const USING_SJLJ_EXCEPTIONS: bool = cfg!(all(target_os = \"ios\", target_arch = \"arm\"));\n\npub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext) -> EHAction {\n if lsda.is_null() {\n return EHAction::None;\n }\n\n let func_start = context.func_start;\n let mut reader = DwarfReader::new(lsda);\n\n let start_encoding = reader.read::<u8>();\n \/\/ base address for landing pad offsets\n let lpad_base = if start_encoding != DW_EH_PE_omit {\n read_encoded_pointer(&mut reader, context, start_encoding)\n } else {\n func_start\n };\n\n let ttype_encoding = reader.read::<u8>();\n if ttype_encoding != DW_EH_PE_omit {\n \/\/ Rust doesn't analyze exception types, so we don't care about the type table\n reader.read_uleb128();\n }\n\n let call_site_encoding = reader.read::<u8>();\n let call_site_table_length = reader.read_uleb128();\n let action_table = reader.ptr.offset(call_site_table_length as isize);\n let ip = context.ip;\n\n if !USING_SJLJ_EXCEPTIONS {\n while reader.ptr < action_table {\n let cs_start = read_encoded_pointer(&mut reader, context, call_site_encoding);\n let cs_len = read_encoded_pointer(&mut reader, context, call_site_encoding);\n let cs_lpad = read_encoded_pointer(&mut reader, context, call_site_encoding);\n let cs_action = reader.read_uleb128();\n \/\/ Callsite table is sorted by cs_start, so if we've passed the ip, we\n \/\/ may stop searching.\n if ip < func_start + cs_start {\n break;\n }\n if ip < func_start + cs_start + cs_len {\n if cs_lpad == 0 {\n return EHAction::None;\n } else {\n let lpad = lpad_base + cs_lpad;\n return interpret_cs_action(cs_action, lpad);\n }\n }\n }\n \/\/ If ip is not present in the table, call terminate. This is for\n \/\/ a destructor inside a cleanup, or a library routine the compiler\n \/\/ was not expecting to throw\n EHAction::Terminate\n } else {\n \/\/ SjLj version:\n \/\/ The \"IP\" is an index into the call-site table, with two exceptions:\n \/\/ -1 means 'no-action', and 0 means 'terminate'.\n match ip as isize {\n -1 => return EHAction::None,\n 0 => return EHAction::Terminate,\n _ => (),\n }\n let mut idx = ip;\n loop {\n let cs_lpad = reader.read_uleb128();\n let cs_action = reader.read_uleb128();\n idx -= 1;\n if idx == 0 {\n \/\/ Can never have null landing pad for sjlj -- that would have\n \/\/ been indicated by a -1 call site index.\n let lpad = (cs_lpad + 1) as usize;\n return interpret_cs_action(cs_action, lpad);\n }\n }\n }\n}\n\nfn interpret_cs_action(cs_action: u64, lpad: usize) -> EHAction {\n if cs_action == 0 {\n EHAction::Cleanup(lpad)\n } else {\n EHAction::Catch(lpad)\n }\n}\n\n#[inline]\nfn round_up(unrounded: usize, align: usize) -> usize {\n assert!(align.is_power_of_two());\n (unrounded + align - 1) & !(align - 1)\n}\n\nunsafe fn read_encoded_pointer(reader: &mut DwarfReader,\n context: &EHContext,\n encoding: u8)\n -> usize {\n assert!(encoding != DW_EH_PE_omit);\n\n \/\/ DW_EH_PE_aligned implies it's an absolute pointer value\n if encoding == DW_EH_PE_aligned {\n reader.ptr = round_up(reader.ptr as usize, mem::size_of::<usize>()) as *const u8;\n return reader.read::<usize>();\n }\n\n let mut result = match encoding & 0x0F {\n DW_EH_PE_absptr => reader.read::<usize>(),\n DW_EH_PE_uleb128 => reader.read_uleb128() as usize,\n DW_EH_PE_udata2 => reader.read::<u16>() as usize,\n DW_EH_PE_udata4 => reader.read::<u32>() as usize,\n DW_EH_PE_udata8 => reader.read::<u64>() as usize,\n DW_EH_PE_sleb128 => reader.read_sleb128() as usize,\n DW_EH_PE_sdata2 => reader.read::<i16>() as usize,\n DW_EH_PE_sdata4 => reader.read::<i32>() as usize,\n DW_EH_PE_sdata8 => reader.read::<i64>() as usize,\n _ => panic!(),\n };\n\n result += match encoding & 0x70 {\n DW_EH_PE_absptr => 0,\n \/\/ relative to address of the encoded value, despite the name\n DW_EH_PE_pcrel => reader.ptr as usize,\n DW_EH_PE_funcrel => {\n assert!(context.func_start != 0);\n context.func_start\n }\n DW_EH_PE_textrel => {\n (*context.get_text_start)()\n }\n DW_EH_PE_datarel => {\n (*context.get_data_start)()\n }\n _ => panic!(),\n };\n\n if encoding & DW_EH_PE_indirect != 0 {\n result = *(result as *const usize);\n }\n\n result\n}\n<commit_msg>Looser LSDA parsing<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Parsing of GCC-style Language-Specific Data Area (LSDA)\n\/\/! For details see:\n\/\/! http:\/\/refspecs.linuxfoundation.org\/LSB_3.0.0\/LSB-PDA\/LSB-PDA\/ehframechpt.html\n\/\/! http:\/\/mentorembedded.github.io\/cxx-abi\/exceptions.pdf\n\/\/! http:\/\/www.airs.com\/blog\/archives\/460\n\/\/! http:\/\/www.airs.com\/blog\/archives\/464\n\/\/!\n\/\/! A reference implementation may be found in the GCC source tree\n\/\/! (<root>\/libgcc\/unwind-c.c as of this writing)\n\n#![allow(non_upper_case_globals)]\n#![allow(unused)]\n\nuse dwarf::DwarfReader;\nuse core::mem;\n\npub const DW_EH_PE_omit: u8 = 0xFF;\npub const DW_EH_PE_absptr: u8 = 0x00;\n\npub const DW_EH_PE_uleb128: u8 = 0x01;\npub const DW_EH_PE_udata2: u8 = 0x02;\npub const DW_EH_PE_udata4: u8 = 0x03;\npub const DW_EH_PE_udata8: u8 = 0x04;\npub const DW_EH_PE_sleb128: u8 = 0x09;\npub const DW_EH_PE_sdata2: u8 = 0x0A;\npub const DW_EH_PE_sdata4: u8 = 0x0B;\npub const DW_EH_PE_sdata8: u8 = 0x0C;\n\npub const DW_EH_PE_pcrel: u8 = 0x10;\npub const DW_EH_PE_textrel: u8 = 0x20;\npub const DW_EH_PE_datarel: u8 = 0x30;\npub const DW_EH_PE_funcrel: u8 = 0x40;\npub const DW_EH_PE_aligned: u8 = 0x50;\n\npub const DW_EH_PE_indirect: u8 = 0x80;\n\n#[derive(Copy, Clone)]\npub struct EHContext<'a> {\n pub ip: usize, \/\/ Current instruction pointer\n pub func_start: usize, \/\/ Address of the current function\n pub get_text_start: &'a Fn() -> usize, \/\/ Get address of the code section\n pub get_data_start: &'a Fn() -> usize, \/\/ Get address of the data section\n}\n\npub enum EHAction {\n None,\n Cleanup(usize),\n Catch(usize),\n Terminate,\n}\n\npub const USING_SJLJ_EXCEPTIONS: bool = cfg!(all(target_os = \"ios\", target_arch = \"arm\"));\n\npub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext) -> EHAction {\n if lsda.is_null() {\n return EHAction::None;\n }\n\n let func_start = context.func_start;\n let mut reader = DwarfReader::new(lsda);\n\n let start_encoding = reader.read::<u8>();\n \/\/ base address for landing pad offsets\n let lpad_base = if start_encoding != DW_EH_PE_omit {\n read_encoded_pointer(&mut reader, context, start_encoding)\n } else {\n func_start\n };\n\n let ttype_encoding = reader.read::<u8>();\n if ttype_encoding != DW_EH_PE_omit {\n \/\/ Rust doesn't analyze exception types, so we don't care about the type table\n reader.read_uleb128();\n }\n\n let call_site_encoding = reader.read::<u8>();\n let call_site_table_length = reader.read_uleb128();\n let action_table = reader.ptr.offset(call_site_table_length as isize);\n let ip = context.ip;\n\n if !USING_SJLJ_EXCEPTIONS {\n while reader.ptr < action_table {\n let cs_start = read_encoded_pointer(&mut reader, context, call_site_encoding);\n let cs_len = read_encoded_pointer(&mut reader, context, call_site_encoding);\n let cs_lpad = read_encoded_pointer(&mut reader, context, call_site_encoding);\n let cs_action = reader.read_uleb128();\n \/\/ Callsite table is sorted by cs_start, so if we've passed the ip, we\n \/\/ may stop searching.\n if ip < func_start + cs_start {\n break;\n }\n if ip < func_start + cs_start + cs_len {\n if cs_lpad == 0 {\n return EHAction::None;\n } else {\n let lpad = lpad_base + cs_lpad;\n return interpret_cs_action(cs_action, lpad);\n }\n }\n }\n \/\/ Ip is not present in the table. This should not hapen... but it does: issie #35011.\n \/\/ So rather than returning EHAction::Terminate, we do this.\n EHAction::None\n } else {\n \/\/ SjLj version:\n \/\/ The \"IP\" is an index into the call-site table, with two exceptions:\n \/\/ -1 means 'no-action', and 0 means 'terminate'.\n match ip as isize {\n -1 => return EHAction::None,\n 0 => return EHAction::Terminate,\n _ => (),\n }\n let mut idx = ip;\n loop {\n let cs_lpad = reader.read_uleb128();\n let cs_action = reader.read_uleb128();\n idx -= 1;\n if idx == 0 {\n \/\/ Can never have null landing pad for sjlj -- that would have\n \/\/ been indicated by a -1 call site index.\n let lpad = (cs_lpad + 1) as usize;\n return interpret_cs_action(cs_action, lpad);\n }\n }\n }\n}\n\nfn interpret_cs_action(cs_action: u64, lpad: usize) -> EHAction {\n if cs_action == 0 {\n EHAction::Cleanup(lpad)\n } else {\n EHAction::Catch(lpad)\n }\n}\n\n#[inline]\nfn round_up(unrounded: usize, align: usize) -> usize {\n assert!(align.is_power_of_two());\n (unrounded + align - 1) & !(align - 1)\n}\n\nunsafe fn read_encoded_pointer(reader: &mut DwarfReader,\n context: &EHContext,\n encoding: u8)\n -> usize {\n assert!(encoding != DW_EH_PE_omit);\n\n \/\/ DW_EH_PE_aligned implies it's an absolute pointer value\n if encoding == DW_EH_PE_aligned {\n reader.ptr = round_up(reader.ptr as usize, mem::size_of::<usize>()) as *const u8;\n return reader.read::<usize>();\n }\n\n let mut result = match encoding & 0x0F {\n DW_EH_PE_absptr => reader.read::<usize>(),\n DW_EH_PE_uleb128 => reader.read_uleb128() as usize,\n DW_EH_PE_udata2 => reader.read::<u16>() as usize,\n DW_EH_PE_udata4 => reader.read::<u32>() as usize,\n DW_EH_PE_udata8 => reader.read::<u64>() as usize,\n DW_EH_PE_sleb128 => reader.read_sleb128() as usize,\n DW_EH_PE_sdata2 => reader.read::<i16>() as usize,\n DW_EH_PE_sdata4 => reader.read::<i32>() as usize,\n DW_EH_PE_sdata8 => reader.read::<i64>() as usize,\n _ => panic!(),\n };\n\n result += match encoding & 0x70 {\n DW_EH_PE_absptr => 0,\n \/\/ relative to address of the encoded value, despite the name\n DW_EH_PE_pcrel => reader.ptr as usize,\n DW_EH_PE_funcrel => {\n assert!(context.func_start != 0);\n context.func_start\n }\n DW_EH_PE_textrel => {\n (*context.get_text_start)()\n }\n DW_EH_PE_datarel => {\n (*context.get_data_start)()\n }\n _ => panic!(),\n };\n\n if encoding & DW_EH_PE_indirect != 0 {\n result = *(result as *const usize);\n }\n\n result\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse core::cmp;\n\nuse common::event::{KeyEvent, MouseEvent};\n\nuse drivers::pio::*;\n\nuse schemes::KScheme;\n\n\/\/\/ PS2\npub struct Ps2 {\n \/\/\/ The data\n data: Pio8,\n \/\/\/ The command\n cmd: Pio8,\n \/\/\/ Left shift?\n lshift: bool,\n \/\/\/ Right shift?\n rshift: bool,\n \/\/\/ Caps lock?\n caps_lock: bool,\n \/\/\/ Caps lock toggle\n caps_lock_toggle: bool,\n \/\/\/ The mouse packet\n mouse_packet: [u8; 4],\n \/\/\/ Mouse packet index\n mouse_i: usize,\n \/\/\/ Mouse point x\n mouse_x: isize,\n \/\/\/ Mouse point y\n mouse_y: isize,\n \/\/\/ Layout for keyboard\n layout: usize,\n}\n\nimpl Ps2 {\n \/\/\/ Create new PS2 data\n pub fn new() -> Box<Self> {\n let mut module = box Ps2 {\n data: Pio8::new(0x60),\n cmd: Pio8::new(0x64),\n lshift: false,\n rshift: false,\n caps_lock: false,\n caps_lock_toggle: false,\n mouse_packet: [0; 4],\n mouse_i: 0,\n mouse_x: 0,\n mouse_y: 0,\n layout: 0,\n };\n\n unsafe {\n module.keyboard_init();\n module.mouse_init();\n }\n\n module\n }\n\n unsafe fn wait0(&self) {\n while (self.cmd.read() & 1) == 0 {}\n }\n\n unsafe fn wait1(&self) {\n while (self.cmd.read() & 2) == 2 {}\n }\n\n unsafe fn keyboard_init(&mut self) {\n while (self.cmd.read() & 0x1) == 1 {\n self.data.read();\n }\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let flags = (self.data.read() & 0b00110111) | 1 | 0b10000;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(flags);\n\n \/\/ Set Defaults\n self.wait1();\n self.data.write(0xF6);\n self.wait0();\n self.data.read();\n\n \/\/ Set LEDS\n self.wait1();\n self.data.write(0xED);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(0);\n self.wait0();\n self.data.read();\n\n \/\/ Set Scancode Map:\n self.wait1();\n self.data.write(0xF0);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(1);\n self.wait0();\n self.data.read();\n }\n\n \/\/\/ Keyboard interrupt\n pub fn keyboard_interrupt(&mut self) -> Option<KeyEvent> {\n let scancode = unsafe { self.data.read() };\n\n if scancode == 0 {\n return None;\n } else if scancode == 0x2A {\n self.lshift = true;\n } else if scancode == 0xAA {\n self.lshift = false;\n } else if scancode == 0x36 {\n self.rshift = true;\n } else if scancode == 0xB6 {\n self.rshift = false;\n } else if scancode == 0x3A {\n if !self.caps_lock {\n self.caps_lock = true;\n self.caps_lock_toggle = true;\n } else {\n self.caps_lock_toggle = false;\n }\n } else if scancode == 0xBA {\n if self.caps_lock && !self.caps_lock_toggle {\n self.caps_lock = false;\n }\n }\n\n let shift;\n if self.caps_lock {\n shift = !(self.lshift || self.rshift);\n } else {\n shift = self.lshift || self.rshift;\n }\n\n return Some(KeyEvent {\n character: char_for_scancode(scancode & 0x7F, shift, self.layout),\n scancode: scancode & 0x7F,\n pressed: scancode < 0x80,\n });\n }\n\n unsafe fn mouse_cmd(&mut self, byte: u8) -> u8 {\n self.wait1();\n self.cmd.write(0xD4);\n self.wait1();\n self.data.write(byte);\n\n self.wait0();\n self.data.read()\n }\n\n \/\/\/ Initialize mouse\n pub unsafe fn mouse_init(&mut self) {\n \/\/ The Init Dance\n self.wait1();\n self.cmd.write(0xA8);\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let status = self.data.read() | 2;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(status);\n\n \/\/ Set defaults\n self.mouse_cmd(0xF6);\n\n \/\/ Enable Streaming\n self.mouse_cmd(0xF4);\n }\n\n \/\/\/ Mouse interrupt\n pub fn mouse_interrupt(&mut self) -> Option<MouseEvent> {\n let byte = unsafe { self.data.read() };\n if self.mouse_i == 0 {\n if byte & 0x8 == 0x8 {\n self.mouse_packet[0] = byte;\n self.mouse_i += 1;\n }\n } else if self.mouse_i == 1 {\n self.mouse_packet[1] = byte;\n\n self.mouse_i += 1;\n } else {\n self.mouse_packet[2] = byte;\n\n let left_button = (self.mouse_packet[0] & 1) == 1;\n let right_button = (self.mouse_packet[0] & 2) == 2;\n let middle_button = (self.mouse_packet[0] & 4) == 4;\n\n let x;\n if (self.mouse_packet[0] & 0x40) != 0x40 && self.mouse_packet[1] != 0 {\n x = self.mouse_packet[1] as isize -\n (((self.mouse_packet[0] as isize) << 4) & 0x100);\n } else {\n x = 0;\n }\n\n let y;\n if (self.mouse_packet[0] & 0x80) != 0x80 && self.mouse_packet[2] != 0 {\n y = (((self.mouse_packet[0] as isize) << 3) & 0x100) -\n self.mouse_packet[2] as isize;\n } else {\n y = 0;\n }\n\n unsafe {\n self.mouse_x = cmp::max(0,\n cmp::min((*::console).display.width as isize,\n self.mouse_x + x));\n self.mouse_y = cmp::max(0,\n cmp::min((*::console).display.height as isize,\n self.mouse_y + y));\n }\n\n self.mouse_i = 0;\n\n return Some(MouseEvent {\n x: self.mouse_x,\n y: self.mouse_y,\n left_button: left_button,\n right_button: right_button,\n middle_button: middle_button,\n });\n }\n\n return None;\n }\n\n \/\/\/ Function to change the layout of the keyboard\n pub fn change_layout(&mut self, layout: usize) {\n self.layout = layout;\n }\n\n}\n\nimpl KScheme for Ps2 {\n fn on_irq(&mut self, irq: u8) {\n if irq == 0x1 || irq == 0xC {\n self.on_poll();\n }\n }\n\n fn on_poll(&mut self) {\n loop {\n let status = unsafe { self.cmd.read() };\n if status & 0x21 == 1 {\n if let Some(key_event) = self.keyboard_interrupt() {\n key_event.trigger();\n }\n } else if status & 0x21 == 0x21 {\n if let Some(mouse_event) = self.mouse_interrupt() {\n mouse_event.trigger();\n }\n } else {\n break;\n }\n }\n }\n}\n\n\/\/\/ Function to return the character associated with the scancode, and the layout\nfn char_for_scancode(scancode: u8, shift: bool, layout: usize) -> char {\n if scancode >= 58 {\n '\\x00'\n }\n let character =\n match layout {\n 0 => SCANCODES_EN[scancode as usize],\n 1 => SCANCODES_FR[scancode as usize],\n };\n if shift {\n character = characters[1]\n } else {\n character = characters[scancode as usize][0]\n }\n}\n\nstatic SCANCODES_EN: [[char; 2]; 58] = [['\\0', '\\0'],\n ['\\x1B', '\\x1B'],\n ['1', '!'],\n ['2', '@'],\n ['3', '#'],\n ['4', '$'],\n ['5', '%'],\n ['6', '^'],\n ['7', '&'],\n ['8', '*'],\n ['9', '('],\n ['0', ')'],\n ['-', '_'],\n ['=', '+'],\n ['\\0', '\\0'],\n ['\\t', '\\t'],\n ['q', 'Q'],\n ['w', 'W'],\n ['e', 'E'],\n ['r', 'R'],\n ['t', 'T'],\n ['y', 'Y'],\n ['u', 'U'],\n ['i', 'I'],\n ['o', 'O'],\n ['p', 'P'],\n ['[', '{'],\n [']', '}'],\n ['\\n', '\\n'],\n ['\\0', '\\0'],\n ['a', 'A'],\n ['s', 'S'],\n ['d', 'D'],\n ['f', 'F'],\n ['g', 'G'],\n ['h', 'H'],\n ['j', 'J'],\n ['k', 'K'],\n ['l', 'L'],\n [';', ':'],\n ['\\'', '\"'],\n ['`', '~'],\n ['\\0', '\\0'],\n ['\\\\', '|'],\n ['z', 'Z'],\n ['x', 'X'],\n ['c', 'C'],\n ['v', 'V'],\n ['b', 'B'],\n ['n', 'N'],\n ['m', 'M'],\n [',', '<'],\n ['.', '>'],\n ['\/', '?'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n [' ', ' ']];\n\n static SCANCODES_FR: [[char; 2]; 58] = [['\\0', '\\0'],\n ['\\x1B', '\\x1B'],\n ['1', '&'],\n ['2', 'é'],\n ['3', '\"'],\n ['4', '\\''],\n ['5', '('],\n ['6', '-'],\n ['7', 'è'],\n ['8', '_'],\n ['9', 'ç'],\n ['0', 'à'],\n ['-', ')'],\n ['=', '='],\n ['\\0', '\\0'],\n ['\\t', '\\t'],\n ['a', 'A'],\n ['z', 'Z'],\n ['e', 'E'],\n ['r', 'R'],\n ['t', 'T'],\n ['y', 'Y'],\n ['u', 'U'],\n ['i', 'I'],\n ['o', 'O'],\n ['p', 'P'],\n ['^', '¨'],\n ['$', '£'],\n ['\\n', '\\n'],\n ['\\0', '\\0'],\n ['q', 'Q'],\n ['s', 'S'],\n ['d', 'D'],\n ['f', 'F'],\n ['g', 'G'],\n ['h', 'H'],\n ['j', 'J'],\n ['k', 'K'],\n ['l', 'L'],\n ['m', 'M'],\n ['ù', '%'],\n ['*', 'µ'],\n ['\\0', '\\0'],\n ['<', '>'],\n ['w', 'W'],\n ['x', 'X'],\n ['c', 'C'],\n ['v', 'V'],\n ['b', 'B'],\n ['n', 'N'],\n [',', '?'],\n [';', '.'],\n [':', '\/'],\n ['!', '§'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n [' ', ' ']];\n<commit_msg>debugging char_for_scancode<commit_after>use alloc::boxed::Box;\n\nuse core::cmp;\n\nuse common::event::{KeyEvent, MouseEvent};\n\nuse drivers::pio::*;\n\nuse schemes::KScheme;\n\n\/\/\/ PS2\npub struct Ps2 {\n \/\/\/ The data\n data: Pio8,\n \/\/\/ The command\n cmd: Pio8,\n \/\/\/ Left shift?\n lshift: bool,\n \/\/\/ Right shift?\n rshift: bool,\n \/\/\/ Caps lock?\n caps_lock: bool,\n \/\/\/ Caps lock toggle\n caps_lock_toggle: bool,\n \/\/\/ The mouse packet\n mouse_packet: [u8; 4],\n \/\/\/ Mouse packet index\n mouse_i: usize,\n \/\/\/ Mouse point x\n mouse_x: isize,\n \/\/\/ Mouse point y\n mouse_y: isize,\n \/\/\/ Layout for keyboard\n layout: usize,\n}\n\nimpl Ps2 {\n \/\/\/ Create new PS2 data\n pub fn new() -> Box<Self> {\n let mut module = box Ps2 {\n data: Pio8::new(0x60),\n cmd: Pio8::new(0x64),\n lshift: false,\n rshift: false,\n caps_lock: false,\n caps_lock_toggle: false,\n mouse_packet: [0; 4],\n mouse_i: 0,\n mouse_x: 0,\n mouse_y: 0,\n layout: 0,\n };\n\n unsafe {\n module.keyboard_init();\n module.mouse_init();\n }\n\n module\n }\n\n unsafe fn wait0(&self) {\n while (self.cmd.read() & 1) == 0 {}\n }\n\n unsafe fn wait1(&self) {\n while (self.cmd.read() & 2) == 2 {}\n }\n\n unsafe fn keyboard_init(&mut self) {\n while (self.cmd.read() & 0x1) == 1 {\n self.data.read();\n }\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let flags = (self.data.read() & 0b00110111) | 1 | 0b10000;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(flags);\n\n \/\/ Set Defaults\n self.wait1();\n self.data.write(0xF6);\n self.wait0();\n self.data.read();\n\n \/\/ Set LEDS\n self.wait1();\n self.data.write(0xED);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(0);\n self.wait0();\n self.data.read();\n\n \/\/ Set Scancode Map:\n self.wait1();\n self.data.write(0xF0);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(1);\n self.wait0();\n self.data.read();\n }\n\n \/\/\/ Keyboard interrupt\n pub fn keyboard_interrupt(&mut self) -> Option<KeyEvent> {\n let scancode = unsafe { self.data.read() };\n\n if scancode == 0 {\n return None;\n } else if scancode == 0x2A {\n self.lshift = true;\n } else if scancode == 0xAA {\n self.lshift = false;\n } else if scancode == 0x36 {\n self.rshift = true;\n } else if scancode == 0xB6 {\n self.rshift = false;\n } else if scancode == 0x3A {\n if !self.caps_lock {\n self.caps_lock = true;\n self.caps_lock_toggle = true;\n } else {\n self.caps_lock_toggle = false;\n }\n } else if scancode == 0xBA {\n if self.caps_lock && !self.caps_lock_toggle {\n self.caps_lock = false;\n }\n }\n\n let shift;\n if self.caps_lock {\n shift = !(self.lshift || self.rshift);\n } else {\n shift = self.lshift || self.rshift;\n }\n\n return Some(KeyEvent {\n character: char_for_scancode(scancode & 0x7F, shift, self.layout),\n scancode: scancode & 0x7F,\n pressed: scancode < 0x80,\n });\n }\n\n unsafe fn mouse_cmd(&mut self, byte: u8) -> u8 {\n self.wait1();\n self.cmd.write(0xD4);\n self.wait1();\n self.data.write(byte);\n\n self.wait0();\n self.data.read()\n }\n\n \/\/\/ Initialize mouse\n pub unsafe fn mouse_init(&mut self) {\n \/\/ The Init Dance\n self.wait1();\n self.cmd.write(0xA8);\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let status = self.data.read() | 2;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(status);\n\n \/\/ Set defaults\n self.mouse_cmd(0xF6);\n\n \/\/ Enable Streaming\n self.mouse_cmd(0xF4);\n }\n\n \/\/\/ Mouse interrupt\n pub fn mouse_interrupt(&mut self) -> Option<MouseEvent> {\n let byte = unsafe { self.data.read() };\n if self.mouse_i == 0 {\n if byte & 0x8 == 0x8 {\n self.mouse_packet[0] = byte;\n self.mouse_i += 1;\n }\n } else if self.mouse_i == 1 {\n self.mouse_packet[1] = byte;\n\n self.mouse_i += 1;\n } else {\n self.mouse_packet[2] = byte;\n\n let left_button = (self.mouse_packet[0] & 1) == 1;\n let right_button = (self.mouse_packet[0] & 2) == 2;\n let middle_button = (self.mouse_packet[0] & 4) == 4;\n\n let x;\n if (self.mouse_packet[0] & 0x40) != 0x40 && self.mouse_packet[1] != 0 {\n x = self.mouse_packet[1] as isize -\n (((self.mouse_packet[0] as isize) << 4) & 0x100);\n } else {\n x = 0;\n }\n\n let y;\n if (self.mouse_packet[0] & 0x80) != 0x80 && self.mouse_packet[2] != 0 {\n y = (((self.mouse_packet[0] as isize) << 3) & 0x100) -\n self.mouse_packet[2] as isize;\n } else {\n y = 0;\n }\n\n unsafe {\n self.mouse_x = cmp::max(0,\n cmp::min((*::console).display.width as isize,\n self.mouse_x + x));\n self.mouse_y = cmp::max(0,\n cmp::min((*::console).display.height as isize,\n self.mouse_y + y));\n }\n\n self.mouse_i = 0;\n\n return Some(MouseEvent {\n x: self.mouse_x,\n y: self.mouse_y,\n left_button: left_button,\n right_button: right_button,\n middle_button: middle_button,\n });\n }\n\n return None;\n }\n\n \/\/\/ Function to change the layout of the keyboard\n pub fn change_layout(&mut self, layout: usize) {\n self.layout = layout;\n }\n\n}\n\nimpl KScheme for Ps2 {\n fn on_irq(&mut self, irq: u8) {\n if irq == 0x1 || irq == 0xC {\n self.on_poll();\n }\n }\n\n fn on_poll(&mut self) {\n loop {\n let status = unsafe { self.cmd.read() };\n if status & 0x21 == 1 {\n if let Some(key_event) = self.keyboard_interrupt() {\n key_event.trigger();\n }\n } else if status & 0x21 == 0x21 {\n if let Some(mouse_event) = self.mouse_interrupt() {\n mouse_event.trigger();\n }\n } else {\n break;\n }\n }\n }\n}\n\n\/\/\/ Function to return the character associated with the scancode, and the layout\nfn char_for_scancode(scancode: u8, shift: bool, layout: usize) -> char {\n if scancode >= 58 {\n '\\x00'\n }\n let characters =\n match layout {\n 0 => SCANCODES_EN[scancode as usize],\n 1 => SCANCODES_FR[scancode as usize],\n };\n if shift {\n character = characters[1]\n } else {\n character = characters[scancode as usize][0]\n }\n}\n\nstatic SCANCODES_EN: [[char; 2]; 58] = [['\\0', '\\0'],\n ['\\x1B', '\\x1B'],\n ['1', '!'],\n ['2', '@'],\n ['3', '#'],\n ['4', '$'],\n ['5', '%'],\n ['6', '^'],\n ['7', '&'],\n ['8', '*'],\n ['9', '('],\n ['0', ')'],\n ['-', '_'],\n ['=', '+'],\n ['\\0', '\\0'],\n ['\\t', '\\t'],\n ['q', 'Q'],\n ['w', 'W'],\n ['e', 'E'],\n ['r', 'R'],\n ['t', 'T'],\n ['y', 'Y'],\n ['u', 'U'],\n ['i', 'I'],\n ['o', 'O'],\n ['p', 'P'],\n ['[', '{'],\n [']', '}'],\n ['\\n', '\\n'],\n ['\\0', '\\0'],\n ['a', 'A'],\n ['s', 'S'],\n ['d', 'D'],\n ['f', 'F'],\n ['g', 'G'],\n ['h', 'H'],\n ['j', 'J'],\n ['k', 'K'],\n ['l', 'L'],\n [';', ':'],\n ['\\'', '\"'],\n ['`', '~'],\n ['\\0', '\\0'],\n ['\\\\', '|'],\n ['z', 'Z'],\n ['x', 'X'],\n ['c', 'C'],\n ['v', 'V'],\n ['b', 'B'],\n ['n', 'N'],\n ['m', 'M'],\n [',', '<'],\n ['.', '>'],\n ['\/', '?'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n [' ', ' ']];\n\n static SCANCODES_FR: [[char; 2]; 58] = [['\\0', '\\0'],\n ['\\x1B', '\\x1B'],\n ['1', '&'],\n ['2', 'é'],\n ['3', '\"'],\n ['4', '\\''],\n ['5', '('],\n ['6', '-'],\n ['7', 'è'],\n ['8', '_'],\n ['9', 'ç'],\n ['0', 'à'],\n ['-', ')'],\n ['=', '='],\n ['\\0', '\\0'],\n ['\\t', '\\t'],\n ['a', 'A'],\n ['z', 'Z'],\n ['e', 'E'],\n ['r', 'R'],\n ['t', 'T'],\n ['y', 'Y'],\n ['u', 'U'],\n ['i', 'I'],\n ['o', 'O'],\n ['p', 'P'],\n ['^', '¨'],\n ['$', '£'],\n ['\\n', '\\n'],\n ['\\0', '\\0'],\n ['q', 'Q'],\n ['s', 'S'],\n ['d', 'D'],\n ['f', 'F'],\n ['g', 'G'],\n ['h', 'H'],\n ['j', 'J'],\n ['k', 'K'],\n ['l', 'L'],\n ['m', 'M'],\n ['ù', '%'],\n ['*', 'µ'],\n ['\\0', '\\0'],\n ['<', '>'],\n ['w', 'W'],\n ['x', 'X'],\n ['c', 'C'],\n ['v', 'V'],\n ['b', 'B'],\n ['n', 'N'],\n [',', '?'],\n [';', '.'],\n [':', '\/'],\n ['!', '§'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n [' ', ' ']];\n<|endoftext|>"} {"text":"<commit_before>use std::cell::RefCell;\nuse RawUniformValue;\n\nuse gl;\nuse Handle;\nuse context::CommandContext;\nuse version::Version;\nuse version::Api;\n\npub struct UniformsStorage {\n values: RefCell<Vec<Option<RawUniformValue>>>,\n uniform_blocks: RefCell<Vec<Option<gl::types::GLuint>>>,\n shader_storage_blocks: RefCell<Vec<Option<gl::types::GLuint>>>,\n}\n\nimpl UniformsStorage {\n \/\/\/ Builds a new empty storage.\n pub fn new() -> UniformsStorage {\n UniformsStorage {\n values: RefCell::new(Vec::with_capacity(0)),\n uniform_blocks: RefCell::new(Vec::with_capacity(0)),\n shader_storage_blocks: RefCell::new(Vec::with_capacity(0)),\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glUniform`.\n pub fn set_uniform_value(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLint, value: &RawUniformValue)\n {\n let mut values = self.values.borrow_mut();\n\n if values.len() <= location as usize {\n values.reserve(location as usize + 1);\n for _ in (values.len() .. location as usize + 1) {\n values.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n macro_rules! uniform(\n ($ctxt:expr, $uniform:ident, $uniform_arb:ident, $($params:expr),+) => (\n unsafe {\n if $ctxt.version >= &Version(Api::Gl, 1, 5) ||\n $ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n $ctxt.gl.$uniform($($params),+)\n } else {\n assert!($ctxt.extensions.gl_arb_shader_objects);\n $ctxt.gl.$uniform_arb($($params),+)\n }\n }\n )\n );\n\n match (value, &mut values[location as usize]) {\n (&RawUniformValue::SignedInt(a), &mut Some(RawUniformValue::SignedInt(b))) if a == b => (),\n (&RawUniformValue::UnsignedInt(a), &mut Some(RawUniformValue::UnsignedInt(b))) if a == b => (),\n (&RawUniformValue::Float(a), &mut Some(RawUniformValue::Float(b))) if a == b => (),\n (&RawUniformValue::Mat2(a), &mut Some(RawUniformValue::Mat2(b))) if a == b => (),\n (&RawUniformValue::Mat3(a), &mut Some(RawUniformValue::Mat3(b))) if a == b => (),\n (&RawUniformValue::Mat4(a), &mut Some(RawUniformValue::Mat4(b))) if a == b => (),\n (&RawUniformValue::Vec2(a), &mut Some(RawUniformValue::Vec2(b))) if a == b => (),\n (&RawUniformValue::Vec3(a), &mut Some(RawUniformValue::Vec3(b))) if a == b => (),\n (&RawUniformValue::Vec4(a), &mut Some(RawUniformValue::Vec4(b))) if a == b => (),\n\n (&RawUniformValue::SignedInt(v), target) => {\n *target = Some(RawUniformValue::SignedInt(v));\n uniform!(ctxt, Uniform1i, Uniform1iARB, location, v);\n },\n\n (&RawUniformValue::UnsignedInt(v), target) => {\n *target = Some(RawUniformValue::UnsignedInt(v));\n\n \/\/ Uniform1uiARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform1ui(location, v)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform1iARB(location, v as gl::types::GLint)\n }\n }\n },\n \n (&RawUniformValue::Float(v), target) => {\n *target = Some(RawUniformValue::Float(v));\n uniform!(ctxt, Uniform1f, Uniform1fARB, location, v);\n },\n \n (&RawUniformValue::Mat2(v), target) => {\n *target = Some(RawUniformValue::Mat2(v));\n uniform!(ctxt, UniformMatrix2fv, UniformMatrix2fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Mat3(v), target) => {\n *target = Some(RawUniformValue::Mat3(v));\n uniform!(ctxt, UniformMatrix3fv, UniformMatrix3fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Mat4(v), target) => {\n *target = Some(RawUniformValue::Mat4(v));\n uniform!(ctxt, UniformMatrix4fv, UniformMatrix4fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Vec2(v), target) => {\n *target = Some(RawUniformValue::Vec2(v));\n uniform!(ctxt, Uniform2fv, Uniform2fvARB, location, 1, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Vec3(v), target) => {\n *target = Some(RawUniformValue::Vec3(v));\n uniform!(ctxt, Uniform3fv, Uniform3fvARB, location, 1, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Vec4(v), target) => {\n *target = Some(RawUniformValue::Vec4(v));\n uniform!(ctxt, Uniform4fv, Uniform4fvARB, location, 1, v.as_ptr() as *const f32);\n },\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glUniformBlockBinding`.\n pub fn set_uniform_block_binding(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLuint, value: gl::types::GLuint)\n {\n let mut blocks = self.uniform_blocks.borrow_mut();\n\n if blocks.len() <= location as usize {\n blocks.reserve(location as usize + 1);\n for _ in (blocks.len() .. location as usize + 1) {\n blocks.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n match (value, &mut blocks[location as usize]) {\n (a, &mut Some(b)) if a == b => (),\n\n (a, target) => {\n *target = Some(a);\n match program {\n Handle::Id(id) => unsafe {\n ctxt.gl.UniformBlockBinding(id, location, value);\n },\n _ => unreachable!()\n }\n },\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glShaderStorageBlockBinding`.\n pub fn set_shader_storage_block_binding(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLuint, value: gl::types::GLuint)\n {\n let mut blocks = self.shader_storage_blocks.borrow_mut();\n\n if blocks.len() <= location as usize {\n blocks.reserve(location as usize + 1);\n for _ in (blocks.len() .. location as usize + 1) {\n blocks.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n match (value, &mut blocks[location as usize]) {\n (a, &mut Some(b)) if a == b => (),\n\n (a, target) => {\n *target = Some(a);\n match program {\n Handle::Id(id) => unsafe {\n ctxt.gl.ShaderStorageBlockBinding(id, location, value);\n },\n _ => unreachable!()\n }\n },\n }\n }\n}\n<commit_msg>Use a SmallVec in uniforms storage<commit_after>use std::cell::RefCell;\nuse RawUniformValue;\n\nuse smallvec::SmallVec;\n\nuse gl;\nuse Handle;\nuse context::CommandContext;\nuse version::Version;\nuse version::Api;\n\npub struct UniformsStorage {\n values: RefCell<SmallVec<[Option<RawUniformValue>; 16]>>,\n uniform_blocks: RefCell<SmallVec<[Option<gl::types::GLuint>; 4]>>,\n shader_storage_blocks: RefCell<SmallVec<[Option<gl::types::GLuint>; 4]>>,\n}\n\nimpl UniformsStorage {\n \/\/\/ Builds a new empty storage.\n pub fn new() -> UniformsStorage {\n UniformsStorage {\n values: RefCell::new(SmallVec::new()),\n uniform_blocks: RefCell::new(SmallVec::new()),\n shader_storage_blocks: RefCell::new(SmallVec::new()),\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glUniform`.\n pub fn set_uniform_value(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLint, value: &RawUniformValue)\n {\n let mut values = self.values.borrow_mut();\n\n if values.len() <= location as usize {\n for _ in (values.len() .. location as usize + 1) {\n values.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n macro_rules! uniform(\n ($ctxt:expr, $uniform:ident, $uniform_arb:ident, $($params:expr),+) => (\n unsafe {\n if $ctxt.version >= &Version(Api::Gl, 1, 5) ||\n $ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n $ctxt.gl.$uniform($($params),+)\n } else {\n assert!($ctxt.extensions.gl_arb_shader_objects);\n $ctxt.gl.$uniform_arb($($params),+)\n }\n }\n )\n );\n\n match (value, &mut values[location as usize]) {\n (&RawUniformValue::SignedInt(a), &mut Some(RawUniformValue::SignedInt(b))) if a == b => (),\n (&RawUniformValue::UnsignedInt(a), &mut Some(RawUniformValue::UnsignedInt(b))) if a == b => (),\n (&RawUniformValue::Float(a), &mut Some(RawUniformValue::Float(b))) if a == b => (),\n (&RawUniformValue::Mat2(a), &mut Some(RawUniformValue::Mat2(b))) if a == b => (),\n (&RawUniformValue::Mat3(a), &mut Some(RawUniformValue::Mat3(b))) if a == b => (),\n (&RawUniformValue::Mat4(a), &mut Some(RawUniformValue::Mat4(b))) if a == b => (),\n (&RawUniformValue::Vec2(a), &mut Some(RawUniformValue::Vec2(b))) if a == b => (),\n (&RawUniformValue::Vec3(a), &mut Some(RawUniformValue::Vec3(b))) if a == b => (),\n (&RawUniformValue::Vec4(a), &mut Some(RawUniformValue::Vec4(b))) if a == b => (),\n\n (&RawUniformValue::SignedInt(v), target) => {\n *target = Some(RawUniformValue::SignedInt(v));\n uniform!(ctxt, Uniform1i, Uniform1iARB, location, v);\n },\n\n (&RawUniformValue::UnsignedInt(v), target) => {\n *target = Some(RawUniformValue::UnsignedInt(v));\n\n \/\/ Uniform1uiARB doesn't exist\n unsafe {\n if ctxt.version >= &Version(Api::Gl, 1, 5) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n ctxt.gl.Uniform1ui(location, v)\n } else {\n assert!(ctxt.extensions.gl_arb_shader_objects);\n ctxt.gl.Uniform1iARB(location, v as gl::types::GLint)\n }\n }\n },\n \n (&RawUniformValue::Float(v), target) => {\n *target = Some(RawUniformValue::Float(v));\n uniform!(ctxt, Uniform1f, Uniform1fARB, location, v);\n },\n \n (&RawUniformValue::Mat2(v), target) => {\n *target = Some(RawUniformValue::Mat2(v));\n uniform!(ctxt, UniformMatrix2fv, UniformMatrix2fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Mat3(v), target) => {\n *target = Some(RawUniformValue::Mat3(v));\n uniform!(ctxt, UniformMatrix3fv, UniformMatrix3fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Mat4(v), target) => {\n *target = Some(RawUniformValue::Mat4(v));\n uniform!(ctxt, UniformMatrix4fv, UniformMatrix4fvARB,\n location, 1, gl::FALSE, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Vec2(v), target) => {\n *target = Some(RawUniformValue::Vec2(v));\n uniform!(ctxt, Uniform2fv, Uniform2fvARB, location, 1, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Vec3(v), target) => {\n *target = Some(RawUniformValue::Vec3(v));\n uniform!(ctxt, Uniform3fv, Uniform3fvARB, location, 1, v.as_ptr() as *const f32);\n },\n \n (&RawUniformValue::Vec4(v), target) => {\n *target = Some(RawUniformValue::Vec4(v));\n uniform!(ctxt, Uniform4fv, Uniform4fvARB, location, 1, v.as_ptr() as *const f32);\n },\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glUniformBlockBinding`.\n pub fn set_uniform_block_binding(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLuint, value: gl::types::GLuint)\n {\n let mut blocks = self.uniform_blocks.borrow_mut();\n\n if blocks.len() <= location as usize {\n for _ in (blocks.len() .. location as usize + 1) {\n blocks.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n match (value, &mut blocks[location as usize]) {\n (a, &mut Some(b)) if a == b => (),\n\n (a, target) => {\n *target = Some(a);\n match program {\n Handle::Id(id) => unsafe {\n ctxt.gl.UniformBlockBinding(id, location, value);\n },\n _ => unreachable!()\n }\n },\n }\n }\n\n \/\/\/ Compares `value` with the value stored in this object. If the values differ, updates\n \/\/\/ the storage and calls `glShaderStorageBlockBinding`.\n pub fn set_shader_storage_block_binding(&self, ctxt: &mut CommandContext, program: Handle,\n location: gl::types::GLuint, value: gl::types::GLuint)\n {\n let mut blocks = self.shader_storage_blocks.borrow_mut();\n\n if blocks.len() <= location as usize {\n for _ in (blocks.len() .. location as usize + 1) {\n blocks.push(None);\n }\n }\n\n \/\/ TODO: don't assume that, instead use DSA if the program is not current\n assert!(ctxt.state.program == program);\n\n match (value, &mut blocks[location as usize]) {\n (a, &mut Some(b)) if a == b => (),\n\n (a, target) => {\n *target = Some(a);\n match program {\n Handle::Id(id) => unsafe {\n ctxt.gl.ShaderStorageBlockBinding(id, location, value);\n },\n _ => unreachable!()\n }\n },\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use arch::context::Context;\nuse arch::memory;\n\nuse collections::string::ToString;\n\nuse common::event::MouseEvent;\nuse common::time::{self, Duration};\n\nuse core::{cmp, mem, ptr, slice};\n\nuse graphics::display::VBEMODEINFO;\n\nuse super::{Packet, Pipe, Setup};\nuse super::desc::*;\n\npub trait Hci {\n fn msg(&mut self, address: u8, endpoint: u8, pipe: Pipe, msgs: &[Packet]) -> usize;\n\n fn descriptor(&mut self,\n address: u8,\n descriptor_type: u8,\n descriptor_index: u8,\n descriptor_ptr: usize,\n descriptor_len: usize) {\n self.msg(address, 0, Pipe::Control, &[\n Packet::Setup(&Setup::get_descriptor(descriptor_type, descriptor_index, 0, descriptor_len as u16)),\n Packet::In(&mut unsafe { slice::from_raw_parts_mut(descriptor_ptr as *mut u8, descriptor_len as usize) }),\n Packet::Out(&[])\n ]);\n }\n\n unsafe fn device(&mut self, address: u8) where Self: Sized + 'static {\n self.msg(0, 0, Pipe::Control, &[\n Packet::Setup(&Setup::set_address(address)),\n Packet::In(&mut [])\n ]);\n\n let mut desc_dev = box DeviceDescriptor::default();\n self.descriptor(address,\n DESC_DEV,\n 0,\n (&mut *desc_dev as *mut DeviceDescriptor) as usize,\n mem::size_of_val(&*desc_dev));\n \/\/debugln!(\"{:#?}\", *desc_dev);\n\n if desc_dev.manufacturer_string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.manufacturer_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Manufacturer: {}\", desc_str.str());\n }\n\n if desc_dev.product_string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.product_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Product: {}\", desc_str.str());\n }\n\n if desc_dev.serial_string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.serial_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Serial: {}\", desc_str.str());\n }\n\n for configuration in 0..(*desc_dev).configurations {\n let desc_cfg_len = 1023;\n let desc_cfg_buf = memory::alloc(desc_cfg_len) as *mut u8;\n for i in 0..desc_cfg_len as isize {\n ptr::write(desc_cfg_buf.offset(i), 0);\n }\n self.descriptor(address,\n DESC_CFG,\n configuration,\n desc_cfg_buf as usize,\n desc_cfg_len);\n\n let desc_cfg = ptr::read(desc_cfg_buf as *const ConfigDescriptor);\n \/\/debugln!(\"{:#?}\", desc_cfg);\n\n if desc_cfg.string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_cfg.string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n \/\/debugln!(\"Configuration: {}\", desc_str.str());\n }\n\n let mut hid = false;\n\n let mut i = desc_cfg.length as isize;\n while i < desc_cfg.total_length as isize {\n let length = ptr::read(desc_cfg_buf.offset(i));\n let descriptor_type = ptr::read(desc_cfg_buf.offset(i + 1));\n match descriptor_type {\n DESC_INT => {\n let desc_int = ptr::read(desc_cfg_buf.offset(i) as *const InterfaceDescriptor);\n \/\/debugln!(\"{:#?}\", desc_int);\n\n if desc_int.string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_int.string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n \/\/debugln!(\"Interface: {}\", desc_str.str());\n }\n }\n DESC_END => {\n let desc_end = ptr::read(desc_cfg_buf.offset(i) as *const EndpointDescriptor);\n \/\/debugln!(\"{:#?}\", desc_end);\n\n let endpoint = desc_end.address & 0xF;\n let in_len = desc_end.max_packet_size as usize;\n\n if hid {\n let this = self as *mut Hci;\n Context::spawn(\"kuhci_hid\".to_string(), box move || {\n debugln!(\"Starting HID driver\");\n\n let in_ptr = memory::alloc(in_len) as *mut u8;\n\n loop {\n for i in 0..in_len as isize {\n ptr::write(in_ptr.offset(i), 0);\n }\n\n if (*this).msg(address, endpoint, Pipe::Isochronous, &[\n Packet::In(&mut slice::from_raw_parts_mut(in_ptr, in_len))\n ]) > 0 {\n let buttons = ptr::read(in_ptr.offset(0) as *const u8) as usize;\n let x = ptr::read(in_ptr.offset(1) as *const u16) as usize;\n let y = ptr::read(in_ptr.offset(3) as *const u16) as usize;\n\n let mode_info = &*VBEMODEINFO;\n let mouse_x = (x * mode_info.xresolution as usize) \/ 32768;\n let mouse_y = (y * mode_info.yresolution as usize) \/ 32768;\n\n let mouse_event = MouseEvent {\n x: cmp::max(0, cmp::min(mode_info.xresolution as i32 - 1, mouse_x as i32)),\n y: cmp::max(0, cmp::min(mode_info.yresolution as i32 - 1, mouse_y as i32)),\n left_button: buttons & 1 == 1,\n middle_button: buttons & 4 == 4,\n right_button: buttons & 2 == 2,\n };\n ::env().events.send(mouse_event.to_event());\n }\n\n Duration::new(0, 10 * time::NANOS_PER_MILLI).sleep();\n }\n });\n }\n }\n DESC_HID => {\n \/\/let desc_hid = &*(desc_cfg_buf.offset(i) as *const HIDDescriptor);\n \/\/debugln!(\"{:#?}\", desc_hid);\n hid = true;\n }\n _ => {\n debugln!(\"Unknown Descriptor Length {} Type {:X}\", length as usize, descriptor_type);\n }\n }\n i += length as isize;\n }\n\n memory::unalloc(desc_cfg_buf as usize);\n }\n }\n}\n<commit_msg>Jump out of HID driver<commit_after>use arch::context::Context;\nuse arch::memory;\n\nuse collections::string::ToString;\n\nuse common::event::MouseEvent;\nuse common::time::{self, Duration};\n\nuse core::{cmp, mem, ptr, slice};\n\nuse graphics::display::VBEMODEINFO;\n\nuse super::{Packet, Pipe, Setup};\nuse super::desc::*;\n\npub trait Hci {\n fn msg(&mut self, address: u8, endpoint: u8, pipe: Pipe, msgs: &[Packet]) -> usize;\n\n fn descriptor(&mut self,\n address: u8,\n descriptor_type: u8,\n descriptor_index: u8,\n descriptor_ptr: usize,\n descriptor_len: usize) {\n self.msg(address, 0, Pipe::Control, &[\n Packet::Setup(&Setup::get_descriptor(descriptor_type, descriptor_index, 0, descriptor_len as u16)),\n Packet::In(&mut unsafe { slice::from_raw_parts_mut(descriptor_ptr as *mut u8, descriptor_len as usize) }),\n Packet::Out(&[])\n ]);\n }\n\n unsafe fn device(&mut self, address: u8) where Self: Sized + 'static {\n self.msg(0, 0, Pipe::Control, &[\n Packet::Setup(&Setup::set_address(address)),\n Packet::In(&mut [])\n ]);\n\n let mut desc_dev = box DeviceDescriptor::default();\n self.descriptor(address,\n DESC_DEV,\n 0,\n (&mut *desc_dev as *mut DeviceDescriptor) as usize,\n mem::size_of_val(&*desc_dev));\n \/\/debugln!(\"{:#?}\", *desc_dev);\n\n if desc_dev.manufacturer_string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.manufacturer_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Manufacturer: {}\", desc_str.str());\n }\n\n if desc_dev.product_string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.product_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Product: {}\", desc_str.str());\n }\n\n if desc_dev.serial_string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_dev.serial_string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n debugln!(\"Serial: {}\", desc_str.str());\n }\n\n for configuration in 0..(*desc_dev).configurations {\n let desc_cfg_len = 1023;\n let desc_cfg_buf = memory::alloc(desc_cfg_len) as *mut u8;\n for i in 0..desc_cfg_len as isize {\n ptr::write(desc_cfg_buf.offset(i), 0);\n }\n self.descriptor(address,\n DESC_CFG,\n configuration,\n desc_cfg_buf as usize,\n desc_cfg_len);\n\n let desc_cfg = ptr::read(desc_cfg_buf as *const ConfigDescriptor);\n \/\/debugln!(\"{:#?}\", desc_cfg);\n\n if desc_cfg.string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_cfg.string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n \/\/debugln!(\"Configuration: {}\", desc_str.str());\n }\n\n let mut hid = false;\n\n let mut i = desc_cfg.length as isize;\n while i < desc_cfg.total_length as isize {\n let length = ptr::read(desc_cfg_buf.offset(i));\n let descriptor_type = ptr::read(desc_cfg_buf.offset(i + 1));\n match descriptor_type {\n DESC_INT => {\n let desc_int = ptr::read(desc_cfg_buf.offset(i) as *const InterfaceDescriptor);\n \/\/debugln!(\"{:#?}\", desc_int);\n\n if desc_int.string > 0 {\n let mut desc_str = box StringDescriptor::default();\n self.descriptor(address,\n DESC_STR,\n desc_int.string,\n (&mut *desc_str as *mut StringDescriptor) as usize,\n mem::size_of_val(&*desc_str));\n \/\/debugln!(\"Interface: {}\", desc_str.str());\n }\n }\n DESC_END => {\n let desc_end = ptr::read(desc_cfg_buf.offset(i) as *const EndpointDescriptor);\n \/\/debugln!(\"{:#?}\", desc_end);\n\n let endpoint = desc_end.address & 0xF;\n let in_len = desc_end.max_packet_size as usize;\n\n if hid {\n let this = self as *mut Hci;\n Context::spawn(\"kuhci_hid\".to_string(), box move || {\n debugln!(\"Starting HID driver\");\n\n let in_ptr = memory::alloc(in_len) as *mut u8;\n\n loop {\n for i in 0..in_len as isize {\n ptr::write(in_ptr.offset(i), 0);\n }\n\n if (*this).msg(address, endpoint, Pipe::Isochronous, &[\n Packet::In(&mut slice::from_raw_parts_mut(in_ptr, in_len))\n ]) > 0 {\n let buttons = ptr::read(in_ptr.offset(0) as *const u8) as usize;\n let x = ptr::read(in_ptr.offset(1) as *const u16) as usize;\n let y = ptr::read(in_ptr.offset(3) as *const u16) as usize;\n\n let mode_info = &*VBEMODEINFO;\n let mouse_x = (x * mode_info.xresolution as usize) \/ 32768;\n let mouse_y = (y * mode_info.yresolution as usize) \/ 32768;\n\n let mouse_event = MouseEvent {\n x: cmp::max(0, cmp::min(mode_info.xresolution as i32 - 1, mouse_x as i32)),\n y: cmp::max(0, cmp::min(mode_info.yresolution as i32 - 1, mouse_y as i32)),\n left_button: buttons & 1 == 1,\n middle_button: buttons & 4 == 4,\n right_button: buttons & 2 == 2,\n };\n ::env().events.send(mouse_event.to_event());\n }\n\n Duration::new(0, 10 * time::NANOS_PER_MILLI).sleep();\n\n debugln!(\"HID Support disabled\");\n break;\n }\n });\n }\n }\n DESC_HID => {\n \/\/let desc_hid = &*(desc_cfg_buf.offset(i) as *const HIDDescriptor);\n \/\/debugln!(\"{:#?}\", desc_hid);\n hid = true;\n }\n _ => {\n debugln!(\"Unknown Descriptor Length {} Type {:X}\", length as usize, descriptor_type);\n }\n }\n i += length as isize;\n }\n\n memory::unalloc(desc_cfg_buf as usize);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ aux-build:test_comm.rs\n\nuse test_comm;\n\nfn main() {\n let p = test_comm::port();\n \n alt none::<int> {\n none {}\n some(_) {\n if test_comm::recv(p) == 0 {\n #error(\"floop\");\n }\n else {\n #error(\"bloop\");\n }\n }}\n}<commit_msg>Forgot to xfail-fast this<commit_after>\/\/ xfail-fast\n\/\/ aux-build:test_comm.rs\n\nuse test_comm;\n\nfn main() {\n let p = test_comm::port();\n \n alt none::<int> {\n none {}\n some(_) {\n if test_comm::recv(p) == 0 {\n #error(\"floop\");\n }\n else {\n #error(\"bloop\");\n }\n }}\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>start graphics and input implementation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test dangling ZST deref<commit_after>fn main() {\n let p = {\n let b = Box::new(42);\n &*b as *const i32 as *const ()\n };\n let _x = unsafe { *p }; \/\/~ ERROR dangling pointer was dereferenced\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Enviroment data\n\nuse alloc::boxed::Box;\n\nuse fs::File;\nuse path::{Path, PathBuf};\nuse io::Result;\nuse string::{String, ToString};\nuse vec::Vec;\n\nuse system::error::{Error, ENOENT};\nuse system::syscall::sys_chdir;\n\nstatic mut _args: *mut Vec<&'static str> = 0 as *mut Vec<&'static str>;\n\npub struct Args {\n i: usize\n}\n\nimpl Iterator for Args {\n \/\/Yes, this is supposed to be String, do not change it!\n \/\/Only change it if https:\/\/doc.rust-lang.org\/std\/env\/struct.Args.html changes from String\n type Item = String;\n fn next(&mut self) -> Option<String> {\n if let Some(arg) = unsafe { (*_args).get(self.i) } {\n self.i += 1;\n Some(arg.to_string())\n } else {\n None\n }\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n let len = if self.i <= unsafe { (*_args).len() } {\n unsafe { (*_args).len() - self.i }\n } else {\n 0\n };\n (len, Some(len))\n }\n}\n\nimpl ExactSizeIterator for Args {}\n\n\/\/\/ Arguments\npub fn args() -> Args {\n Args {\n i: 0\n }\n}\n\n\/\/\/ Initialize arguments\npub unsafe fn args_init(args: Vec<&'static str>) {\n _args = Box::into_raw(box args);\n}\n\n\/\/\/ Destroy arguments\npub unsafe fn args_destroy() {\n if _args as usize > 0 {\n drop(Box::from_raw(_args));\n }\n}\n\n\/\/\/ Private function to get the path from a custom location\n\/\/\/ If the custom directory cannot be found, None will be returned\nfn get_path_from(location : &str) -> Result<PathBuf> {\n match File::open(dir) {\n Ok(file) => {\n match file.path() {\n Ok(path) => Ok(path),\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/\/ Method to return the current directory\npub fn current_dir() -> Result<PathBuf> {\n \/\/ Return the current path\n get_dir(\".\/\")\n}\n\n\/\/\/ Method to return the home directory\npub fn home_dir() -> Result<PathBuf> {\n get_dir(\"\/home\/\")\n}\n\n\/\/\/ Set the current directory\npub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let file_result = if path.as_ref().inner.is_empty() || path.as_ref().inner.ends_with('\/') {\n File::open(&path.as_ref().inner)\n } else {\n File::open(&(path.as_ref().inner.to_string() + \"\/\"))\n };\n\n match file_result {\n Ok(file) => {\n match file.path() {\n Ok(path) => {\n if let Some(path_str) = path.to_str() {\n let path_c = path_str.to_string() + \"\\0\";\n match Error::demux(unsafe { sys_chdir(path_c.as_ptr()) }) {\n Ok(_) => Ok(()),\n Err(err) => Err(err),\n }\n } else {\n Err(Error::new(ENOENT))\n }\n }\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/ TODO: Fully implement `env::var()`\npub fn var(_key: &str) -> Result<String> {\n Ok(\"This is code filler\".to_string())\n}\n<commit_msg>Fixed a mistake about method name<commit_after>\/\/! Enviroment data\n\nuse alloc::boxed::Box;\n\nuse fs::File;\nuse path::{Path, PathBuf};\nuse io::Result;\nuse string::{String, ToString};\nuse vec::Vec;\n\nuse system::error::{Error, ENOENT};\nuse system::syscall::sys_chdir;\n\nstatic mut _args: *mut Vec<&'static str> = 0 as *mut Vec<&'static str>;\n\npub struct Args {\n i: usize\n}\n\nimpl Iterator for Args {\n \/\/Yes, this is supposed to be String, do not change it!\n \/\/Only change it if https:\/\/doc.rust-lang.org\/std\/env\/struct.Args.html changes from String\n type Item = String;\n fn next(&mut self) -> Option<String> {\n if let Some(arg) = unsafe { (*_args).get(self.i) } {\n self.i += 1;\n Some(arg.to_string())\n } else {\n None\n }\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n let len = if self.i <= unsafe { (*_args).len() } {\n unsafe { (*_args).len() - self.i }\n } else {\n 0\n };\n (len, Some(len))\n }\n}\n\nimpl ExactSizeIterator for Args {}\n\n\/\/\/ Arguments\npub fn args() -> Args {\n Args {\n i: 0\n }\n}\n\n\/\/\/ Initialize arguments\npub unsafe fn args_init(args: Vec<&'static str>) {\n _args = Box::into_raw(box args);\n}\n\n\/\/\/ Destroy arguments\npub unsafe fn args_destroy() {\n if _args as usize > 0 {\n drop(Box::from_raw(_args));\n }\n}\n\n\/\/\/ Private function to get the path from a custom location\n\/\/\/ If the custom directory cannot be found, None will be returned\nfn get_path_from(location : &str) -> Result<PathBuf> {\n match File::open(dir) {\n Ok(file) => {\n match file.path() {\n Ok(path) => Ok(path),\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/\/ Method to return the current directory\npub fn current_dir() -> Result<PathBuf> {\n \/\/ Return the current path\n get_path_from(\".\/\")\n}\n\n\/\/\/ Method to return the home directory\npub fn home_dir() -> Result<PathBuf> {\n get_path_from(\"\/home\/\")\n}\n\n\/\/\/ Set the current directory\npub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let file_result = if path.as_ref().inner.is_empty() || path.as_ref().inner.ends_with('\/') {\n File::open(&path.as_ref().inner)\n } else {\n File::open(&(path.as_ref().inner.to_string() + \"\/\"))\n };\n\n match file_result {\n Ok(file) => {\n match file.path() {\n Ok(path) => {\n if let Some(path_str) = path.to_str() {\n let path_c = path_str.to_string() + \"\\0\";\n match Error::demux(unsafe { sys_chdir(path_c.as_ptr()) }) {\n Ok(_) => Ok(()),\n Err(err) => Err(err),\n }\n } else {\n Err(Error::new(ENOENT))\n }\n }\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/ TODO: Fully implement `env::var()`\npub fn var(_key: &str) -> Result<String> {\n Ok(\"This is code filler\".to_string())\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Enviroment data\n\nuse alloc::boxed::Box;\n\nuse fs::File;\nuse path::{Path, PathBuf};\nuse io::Result;\nuse string::{String, ToString};\nuse vec::Vec;\n\nuse system::error::{Error, ENOENT};\nuse system::syscall::sys_chdir;\n\nstatic mut _args: *mut Vec<&'static str> = 0 as *mut Vec<&'static str>;\n\npub struct Args {\n i: usize\n}\n\nimpl Iterator for Args {\n \/\/Yes, this is supposed to be String, do not change it!\n \/\/Only change it if https:\/\/doc.rust-lang.org\/std\/env\/struct.Args.html changes from String\n type Item = String;\n fn next(&mut self) -> Option<String> {\n if let Some(arg) = unsafe { (*_args).get(self.i) } {\n self.i += 1;\n Some(arg.to_string())\n } else {\n None\n }\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n let len = if self.i <= unsafe { (*_args).len() } {\n unsafe { (*_args).len() - self.i }\n } else {\n 0\n };\n (len, Some(len))\n }\n}\n\nimpl ExactSizeIterator for Args {}\n\n\/\/\/ Arguments\npub fn args() -> Args {\n Args {\n i: 0\n }\n}\n\n\/\/\/ Initialize arguments\npub unsafe fn args_init(args: Vec<&'static str>) {\n _args = Box::into_raw(box args);\n}\n\n\/\/\/ Destroy arguments\npub unsafe fn args_destroy() {\n if _args as usize > 0 {\n drop(Box::from_raw(_args));\n }\n}\n\n\/\/\/ Private function to get the path from a custom location\n\/\/\/ If the custom directory cannot be found, None will be returned\nfn get_path_from(location : &str) -> Result<PathBuf> {\n match File::open(dir) {\n Ok(file) => {\n match file.path() {\n Ok(path) => Ok(path),\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/\/ Set the current directory\npub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let file_result = if path.as_ref().inner.is_empty() || path.as_ref().inner.ends_with('\/') {\n File::open(&path.as_ref().inner)\n } else {\n File::open(&(path.as_ref().inner.to_string() + \"\/\"))\n };\n\n match file_result {\n Ok(file) => {\n match file.path() {\n Ok(path) => {\n if let Some(path_str) = path.to_str() {\n let path_c = path_str.to_string() + \"\\0\";\n match Error::demux(unsafe { sys_chdir(path_c.as_ptr()) }) {\n Ok(_) => Ok(()),\n Err(err) => Err(err),\n }\n } else {\n Err(Error::new(ENOENT))\n }\n }\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/ TODO: Fully implement `env::var()`\npub fn var(_key: &str) -> Result<String> {\n Ok(\"This is code filler\".to_string())\n}\n<commit_msg>current_dir refactoring<commit_after>\/\/! Enviroment data\n\nuse alloc::boxed::Box;\n\nuse fs::File;\nuse path::{Path, PathBuf};\nuse io::Result;\nuse string::{String, ToString};\nuse vec::Vec;\n\nuse system::error::{Error, ENOENT};\nuse system::syscall::sys_chdir;\n\nstatic mut _args: *mut Vec<&'static str> = 0 as *mut Vec<&'static str>;\n\npub struct Args {\n i: usize\n}\n\nimpl Iterator for Args {\n \/\/Yes, this is supposed to be String, do not change it!\n \/\/Only change it if https:\/\/doc.rust-lang.org\/std\/env\/struct.Args.html changes from String\n type Item = String;\n fn next(&mut self) -> Option<String> {\n if let Some(arg) = unsafe { (*_args).get(self.i) } {\n self.i += 1;\n Some(arg.to_string())\n } else {\n None\n }\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n let len = if self.i <= unsafe { (*_args).len() } {\n unsafe { (*_args).len() - self.i }\n } else {\n 0\n };\n (len, Some(len))\n }\n}\n\nimpl ExactSizeIterator for Args {}\n\n\/\/\/ Arguments\npub fn args() -> Args {\n Args {\n i: 0\n }\n}\n\n\/\/\/ Initialize arguments\npub unsafe fn args_init(args: Vec<&'static str>) {\n _args = Box::into_raw(box args);\n}\n\n\/\/\/ Destroy arguments\npub unsafe fn args_destroy() {\n if _args as usize > 0 {\n drop(Box::from_raw(_args));\n }\n}\n\n\/\/\/ Private function to get the path from a custom location\n\/\/\/ If the custom directory cannot be found, None will be returned\nfn get_path_from(location : &str) -> Result<PathBuf> {\n match File::open(dir) {\n Ok(file) => {\n match file.path() {\n Ok(path) => Ok(path),\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/\/ Method to return the current directory\npub fn current_dir() -> Result<PathBuf> {\n \/\/ Return the current path\n get_dir(\".\/\")\n}\n\/\/\/ Set the current directory\npub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let file_result = if path.as_ref().inner.is_empty() || path.as_ref().inner.ends_with('\/') {\n File::open(&path.as_ref().inner)\n } else {\n File::open(&(path.as_ref().inner.to_string() + \"\/\"))\n };\n\n match file_result {\n Ok(file) => {\n match file.path() {\n Ok(path) => {\n if let Some(path_str) = path.to_str() {\n let path_c = path_str.to_string() + \"\\0\";\n match Error::demux(unsafe { sys_chdir(path_c.as_ptr()) }) {\n Ok(_) => Ok(()),\n Err(err) => Err(err),\n }\n } else {\n Err(Error::new(ENOENT))\n }\n }\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/ TODO: Fully implement `env::var()`\npub fn var(_key: &str) -> Result<String> {\n Ok(\"This is code filler\".to_string())\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Unsafe casting functions\n\nuse sys;\nuse unstable::intrinsics;\n\n\/\/\/ Casts the value at `src` to U. The two types must have the same length.\npub unsafe fn transmute_copy<T, U>(src: &T) -> U {\n let mut dest: U = intrinsics::uninit();\n {\n let dest_ptr: *mut u8 = transmute(&mut dest);\n let src_ptr: *u8 = transmute(src);\n intrinsics::memmove64(dest_ptr,\n src_ptr,\n sys::size_of::<U>() as u64);\n }\n dest\n}\n\n\/**\n * Move a thing into the void\n *\n * The forget function will take ownership of the provided value but neglect\n * to run any required cleanup or memory-management operations on it. This\n * can be used for various acts of magick, particularly when using\n * reinterpret_cast on pointer types.\n *\/\n#[inline(always)]\npub unsafe fn forget<T>(thing: T) { intrinsics::forget(thing); }\n\n\/**\n * Force-increment the reference count on a shared box. If used\n * carelessly, this can leak the box. Use this in conjunction with transmute\n * and\/or reinterpret_cast when such calls would otherwise scramble a box's\n * reference count\n *\/\npub unsafe fn bump_box_refcount<T>(t: @T) { forget(t); }\n\n\/**\n * Transform a value of one type into a value of another type.\n * Both types must have the same size and alignment.\n *\n * # Example\n *\n * assert!(transmute(\"L\") == ~[76u8, 0u8]);\n *\/\n#[inline(always)]\npub unsafe fn transmute<L, G>(thing: L) -> G {\n intrinsics::transmute(thing)\n}\n\n\/\/\/ Coerce an immutable reference to be mutable.\n#[inline(always)]\npub unsafe fn transmute_mut<'a,T>(ptr: &'a T) -> &'a mut T { transmute(ptr) }\n\n\/\/\/ Coerce a mutable reference to be immutable.\n#[inline(always)]\npub unsafe fn transmute_immut<'a,T>(ptr: &'a mut T) -> &'a T {\n transmute(ptr)\n}\n\n\/\/\/ Coerce a borrowed pointer to have an arbitrary associated region.\n#[inline(always)]\npub unsafe fn transmute_region<'a,'b,T>(ptr: &'a T) -> &'b T {\n transmute(ptr)\n}\n\n\/\/\/ Coerce an immutable reference to be mutable.\n#[inline(always)]\npub unsafe fn transmute_mut_unsafe<T>(ptr: *const T) -> *mut T {\n transmute(ptr)\n}\n\n\/\/\/ Coerce an immutable reference to be mutable.\n#[inline(always)]\npub unsafe fn transmute_immut_unsafe<T>(ptr: *const T) -> *T {\n transmute(ptr)\n}\n\n\/\/\/ Coerce a borrowed mutable pointer to have an arbitrary associated region.\n#[inline(always)]\npub unsafe fn transmute_mut_region<'a,'b,T>(ptr: &'a mut T) -> &'b mut T {\n transmute(ptr)\n}\n\n\/\/\/ Transforms lifetime of the second pointer to match the first.\n#[inline(always)]\npub unsafe fn copy_lifetime<'a,S,T>(_ptr: &'a S, ptr: &T) -> &'a T {\n transmute_region(ptr)\n}\n\n\/\/\/ Transforms lifetime of the second pointer to match the first.\n#[inline(always)]\npub unsafe fn copy_mut_lifetime<'a,S,T>(_ptr: &'a mut S, ptr: &mut T) -> &'a mut T {\n transmute_mut_region(ptr)\n}\n\n\/\/\/ Transforms lifetime of the second pointer to match the first.\n#[inline(always)]\npub unsafe fn copy_lifetime_vec<'a,S,T>(_ptr: &'a [S], ptr: &T) -> &'a T {\n transmute_region(ptr)\n}\n\n\n\/****************************************************************************\n * Tests\n ****************************************************************************\/\n\n#[cfg(test)]\nmod tests {\n use cast::{bump_box_refcount, transmute};\n\n #[test]\n fn test_transmute_copy() {\n assert_eq!(1u, unsafe { ::cast::transmute_copy(&1) });\n }\n\n #[test]\n fn test_bump_box_refcount() {\n unsafe {\n let box = @~\"box box box\"; \/\/ refcount 1\n bump_box_refcount(box); \/\/ refcount 2\n let ptr: *int = transmute(box); \/\/ refcount 2\n let _box1: @~str = ::cast::transmute_copy(&ptr);\n let _box2: @~str = ::cast::transmute_copy(&ptr);\n assert!(*_box1 == ~\"box box box\");\n assert!(*_box2 == ~\"box box box\");\n \/\/ Will destroy _box1 and _box2. Without the bump, this would\n \/\/ use-after-free. With too many bumps, it would leak.\n }\n }\n\n #[test]\n fn test_transmute() {\n use managed::raw::BoxRepr;\n unsafe {\n let x = @100u8;\n let x: *BoxRepr = transmute(x);\n assert!((*x).data == 100);\n let _x: @int = transmute(x);\n }\n }\n\n #[test]\n fn test_transmute2() {\n unsafe {\n assert_eq!(~[76u8, 0u8], transmute(~\"L\"));\n }\n }\n}\n<commit_msg>make transmute_copy use memcpy, and inline it<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Unsafe casting functions\n\nuse sys;\nuse unstable::intrinsics;\n\n\/\/\/ Casts the value at `src` to U. The two types must have the same length.\n#[cfg(stage0)]\npub unsafe fn transmute_copy<T, U>(src: &T) -> U {\n let mut dest: U = intrinsics::uninit();\n {\n let dest_ptr: *mut u8 = transmute(&mut dest);\n let src_ptr: *u8 = transmute(src);\n intrinsics::memmove64(dest_ptr,\n src_ptr,\n sys::size_of::<U>() as u64);\n }\n dest\n}\n\n#[cfg(target_word_size = \"32\", not(stage0))]\n#[inline(always)]\npub unsafe fn transmute_copy<T, U>(src: &T) -> U {\n let mut dest: U = intrinsics::uninit();\n let dest_ptr: *mut u8 = transmute(&mut dest);\n let src_ptr: *u8 = transmute(src);\n intrinsics::memcpy32(dest_ptr, src_ptr, sys::size_of::<U>() as u64);\n dest\n}\n\n#[cfg(target_word_size = \"64\", not(stage0))]\n#[inline(always)]\npub unsafe fn transmute_copy<T, U>(src: &T) -> U {\n let mut dest: U = intrinsics::uninit();\n let dest_ptr: *mut u8 = transmute(&mut dest);\n let src_ptr: *u8 = transmute(src);\n intrinsics::memcpy64(dest_ptr, src_ptr, sys::size_of::<U>() as u64);\n dest\n}\n\n\/**\n * Move a thing into the void\n *\n * The forget function will take ownership of the provided value but neglect\n * to run any required cleanup or memory-management operations on it. This\n * can be used for various acts of magick, particularly when using\n * reinterpret_cast on pointer types.\n *\/\n#[inline(always)]\npub unsafe fn forget<T>(thing: T) { intrinsics::forget(thing); }\n\n\/**\n * Force-increment the reference count on a shared box. If used\n * carelessly, this can leak the box. Use this in conjunction with transmute\n * and\/or reinterpret_cast when such calls would otherwise scramble a box's\n * reference count\n *\/\npub unsafe fn bump_box_refcount<T>(t: @T) { forget(t); }\n\n\/**\n * Transform a value of one type into a value of another type.\n * Both types must have the same size and alignment.\n *\n * # Example\n *\n * assert!(transmute(\"L\") == ~[76u8, 0u8]);\n *\/\n#[inline(always)]\npub unsafe fn transmute<L, G>(thing: L) -> G {\n intrinsics::transmute(thing)\n}\n\n\/\/\/ Coerce an immutable reference to be mutable.\n#[inline(always)]\npub unsafe fn transmute_mut<'a,T>(ptr: &'a T) -> &'a mut T { transmute(ptr) }\n\n\/\/\/ Coerce a mutable reference to be immutable.\n#[inline(always)]\npub unsafe fn transmute_immut<'a,T>(ptr: &'a mut T) -> &'a T {\n transmute(ptr)\n}\n\n\/\/\/ Coerce a borrowed pointer to have an arbitrary associated region.\n#[inline(always)]\npub unsafe fn transmute_region<'a,'b,T>(ptr: &'a T) -> &'b T {\n transmute(ptr)\n}\n\n\/\/\/ Coerce an immutable reference to be mutable.\n#[inline(always)]\npub unsafe fn transmute_mut_unsafe<T>(ptr: *const T) -> *mut T {\n transmute(ptr)\n}\n\n\/\/\/ Coerce an immutable reference to be mutable.\n#[inline(always)]\npub unsafe fn transmute_immut_unsafe<T>(ptr: *const T) -> *T {\n transmute(ptr)\n}\n\n\/\/\/ Coerce a borrowed mutable pointer to have an arbitrary associated region.\n#[inline(always)]\npub unsafe fn transmute_mut_region<'a,'b,T>(ptr: &'a mut T) -> &'b mut T {\n transmute(ptr)\n}\n\n\/\/\/ Transforms lifetime of the second pointer to match the first.\n#[inline(always)]\npub unsafe fn copy_lifetime<'a,S,T>(_ptr: &'a S, ptr: &T) -> &'a T {\n transmute_region(ptr)\n}\n\n\/\/\/ Transforms lifetime of the second pointer to match the first.\n#[inline(always)]\npub unsafe fn copy_mut_lifetime<'a,S,T>(_ptr: &'a mut S, ptr: &mut T) -> &'a mut T {\n transmute_mut_region(ptr)\n}\n\n\/\/\/ Transforms lifetime of the second pointer to match the first.\n#[inline(always)]\npub unsafe fn copy_lifetime_vec<'a,S,T>(_ptr: &'a [S], ptr: &T) -> &'a T {\n transmute_region(ptr)\n}\n\n\n\/****************************************************************************\n * Tests\n ****************************************************************************\/\n\n#[cfg(test)]\nmod tests {\n use cast::{bump_box_refcount, transmute};\n\n #[test]\n fn test_transmute_copy() {\n assert_eq!(1u, unsafe { ::cast::transmute_copy(&1) });\n }\n\n #[test]\n fn test_bump_box_refcount() {\n unsafe {\n let box = @~\"box box box\"; \/\/ refcount 1\n bump_box_refcount(box); \/\/ refcount 2\n let ptr: *int = transmute(box); \/\/ refcount 2\n let _box1: @~str = ::cast::transmute_copy(&ptr);\n let _box2: @~str = ::cast::transmute_copy(&ptr);\n assert!(*_box1 == ~\"box box box\");\n assert!(*_box2 == ~\"box box box\");\n \/\/ Will destroy _box1 and _box2. Without the bump, this would\n \/\/ use-after-free. With too many bumps, it would leak.\n }\n }\n\n #[test]\n fn test_transmute() {\n use managed::raw::BoxRepr;\n unsafe {\n let x = @100u8;\n let x: *BoxRepr = transmute(x);\n assert!((*x).data == 100);\n let _x: @int = transmute(x);\n }\n }\n\n #[test]\n fn test_transmute2() {\n unsafe {\n assert_eq!(~[76u8, 0u8], transmute(~\"L\"));\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add simple service, which uses the time oracle<commit_after>\/\/ Copyright 2017 The Exonum Team\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Service, which uses the time oracle.\n\n#![deny(missing_debug_implementations, missing_docs)]\n\n#[macro_use]\nextern crate exonum;\nextern crate exonum_time;\n#[macro_use]\nextern crate exonum_testkit;\nextern crate serde_json;\nextern crate serde;\n\nuse std::time::{UNIX_EPOCH, SystemTime, Duration};\nuse exonum::blockchain::{Service, Transaction};\nuse exonum::crypto::{gen_keypair, Hash, PublicKey};\nuse exonum::encoding;\nuse exonum::helpers::Height;\nuse exonum::messages::{Message, RawTransaction};\nuse exonum::storage::{Fork, ProofMapIndex, Snapshot};\nuse exonum_time::{TimeService, TimeSchema, TimeProvider, Time, MockTimeProvider};\nuse exonum_testkit::TestKitBuilder;\n\n\/\/\/ Marker service id.\nconst SERVICE_ID: u16 = 128;\n\/\/\/ Marker service name.\nconst SERVICE_NAME: &str = \"marker\";\n\/\/\/ `TxMarker` transaction id.\nconst TX_MARKER_ID: u16 = 0;\n\n\/\/\/ Marker service database schema.\n#[derive(Debug)]\npub struct MarkerSchema<T> {\n view: T,\n}\n\nimpl<T: AsRef<Snapshot>> MarkerSchema<T> {\n \/\/\/ Constructs schema for the given `snapshot`.\n pub fn new(view: T) -> Self {\n MarkerSchema { view }\n }\n\n \/\/\/ Returns the table that stores `i32` value for every node.\n pub fn marks(&self) -> ProofMapIndex<&Snapshot, PublicKey, i32> {\n ProofMapIndex::new(format!(\"{}.marks\", SERVICE_NAME), self.view.as_ref())\n }\n\n \/\/\/ Returns hashes for stored table.\n pub fn state_hash(&self) -> Vec<Hash> {\n vec![self.marks().root_hash()]\n }\n}\n\n\nimpl<'a> MarkerSchema<&'a mut Fork> {\n \/\/\/ Mutable reference to the ['marks'][1] index.\n \/\/\/\n \/\/\/ [1]: struct.MarkerSchema.html#method.marks\n pub fn marks_mut(&mut self) -> ProofMapIndex<&mut Fork, PublicKey, i32> {\n ProofMapIndex::new(format!(\"{}.marks\", SERVICE_NAME), self.view)\n }\n}\n\nmessage! {\n \/\/\/ Transaction, which must be executed no later than the specified time (field `time`).\n struct TxMarker {\n const TYPE = SERVICE_ID;\n const ID = TX_MARKER_ID;\n \/\/\/ Node's public key.\n from: &PublicKey,\n \/\/\/ Mark value\n mark: i32,\n \/\/\/ Time\n time: SystemTime,\n }\n}\n\nimpl Transaction for TxMarker {\n fn verify(&self) -> bool {\n self.verify_signature(self.from())\n }\n\n fn execute(&self, view: &mut Fork) {\n {\n \/\/ Import schema.\n let time_schema = TimeSchema::new(&view);\n \/\/ The time in the transaction should be less than in the blockchain.\n match time_schema.time().get() {\n Some(ref current_time) if current_time.time() > self.time() => {\n return;\n }\n _ => {}\n }\n }\n \/\/ Mark the node whose public key is specified in the transaction.\n let mut schema = MarkerSchema::new(view);\n schema.marks_mut().put(self.from(), self.mark());\n }\n}\n\nstruct MarkerService;\n\nimpl Service for MarkerService {\n fn service_name(&self) -> &'static str {\n SERVICE_NAME\n }\n\n fn state_hash(&self, snapshot: &Snapshot) -> Vec<Hash> {\n let schema = MarkerSchema::new(snapshot);\n schema.state_hash()\n }\n\n fn service_id(&self) -> u16 {\n SERVICE_ID\n }\n\n fn tx_from_raw(&self, raw: RawTransaction) -> Result<Box<Transaction>, encoding::Error> {\n match raw.message_type() {\n TX_MARKER_ID => Ok(Box::new(TxMarker::from_raw(raw)?)),\n _ => {\n let error =\n encoding::Error::IncorrectMessageType { message_type: raw.message_type() };\n Err(error)\n }\n }\n }\n}\n\nfn main() {\n let mock_provider = MockTimeProvider::new();\n \/\/ Create testkit for network with one validator.\n let mut testkit = TestKitBuilder::validator()\n .with_service(MarkerService)\n .with_service(TimeService::with_provider(\n Box::new(mock_provider.clone()) as Box<TimeProvider>,\n ))\n .create();\n \/\/ Set the time value to `UNIX_EPOCH + Duration::new(10, 0)`.\n mock_provider.set_time(UNIX_EPOCH + Duration::new(10, 0));\n \/\/ Create two blocks to set the time in the blockchain.\n testkit.create_blocks_until(Height(2));\n\n let snapshot = testkit.snapshot();\n let time_schema = TimeSchema::new(&snapshot);\n assert_eq!(\n time_schema.time().get(),\n Some(Time::new(UNIX_EPOCH + Duration::new(10, 0)))\n );\n\n \/\/ Create few transactions.\n let keypair1 = gen_keypair();\n let keypair2 = gen_keypair();\n let keypair3 = gen_keypair();\n let tx1 = TxMarker::new(\n &keypair1.0,\n 1,\n UNIX_EPOCH + Duration::new(10, 0),\n &keypair1.1,\n );\n let tx2 = TxMarker::new(\n &keypair2.0,\n 2,\n UNIX_EPOCH + Duration::new(20, 0),\n &keypair2.1,\n );\n let tx3 = TxMarker::new(\n &keypair3.0,\n 3,\n UNIX_EPOCH + Duration::new(5, 0),\n &keypair3.1,\n );\n testkit.create_block_with_transactions(txvec![tx1, tx2, tx3]);\n\n \/\/ Check results.\n let snapshot = testkit.snapshot();\n let schema = MarkerSchema::new(snapshot);\n assert_eq!(schema.marks().get(&keypair1.0), Some(1));\n assert_eq!(schema.marks().get(&keypair2.0), Some(2));\n assert_eq!(schema.marks().get(&keypair3.0), None);\n\n \/\/ And create one more transaction.\n let tx4 = TxMarker::new(\n &keypair3.0,\n 4,\n UNIX_EPOCH + Duration::new(15, 0),\n &keypair3.1,\n );\n testkit.create_block_with_transactions(txvec![tx4]);\n\n \/\/ And check result.\n let snapshot = testkit.snapshot();\n let schema = MarkerSchema::new(snapshot);\n assert_eq!(schema.marks().get(&keypair3.0), Some(4));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a basic test for line programs<commit_after>use pdb::{PDB, FallibleIterator, Rva};\n\n#[test]\nfn test_module_lines() {\n let file = std::fs::File::open(\"fixtures\/self\/foo.pdb\").expect(\"opening file\");\n let mut pdb = PDB::open(file).expect(\"parse pdb\");\n\n let address_map = pdb.address_map().expect(\"address map\");\n let string_table = pdb.string_table().expect(\"string table\");\n\n let dbi = pdb.debug_information().expect(\"dbi\");\n let mut modules = dbi.modules().expect(\"modules\");\n let module = modules.next().expect(\"parse module\").expect(\"no module\");\n let module_info = pdb.module_info(&module).expect(\"module info\");\n\n let line_program = module_info.line_program().expect(\"line program\");\n let mut lines = line_program.lines();\n let line_info = lines.next().expect(\"parse line info\").expect(\"no lines\");\n\n let rva = line_info.offset.to_rva(&address_map).expect(\"line rva\");\n let file_info = line_program.get_file_info(line_info.file_index).expect(\"file info\");\n let file_name = file_info.name.to_string_lossy(&string_table).expect(\"file name\");\n\n assert_eq!(line_info.line_start, 29);\n assert_eq!(line_info.column_start, Some(0)); \/\/ looks like useless column info\n assert_eq!(rva, Rva(0x64f0));\n assert_eq!(file_name, \"c:\\\\users\\\\user\\\\desktop\\\\self\\\\foo.cpp\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test(tokenizer): add edge cases<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Derive Debug for ObjectStream<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>This subcommand should be \"ingest raw timetable\"<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>cli args parsing<commit_after>fn help() {\n println!(\"Usage:\n args <string> - String is the answer\n args <increase\/decrease> <integer>\n \");\n}\n\nfn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n match args.len() {\n\n 1 => {\n println!(\"No params passed!\");\n help();\n },\n\n 2 => {\n match args[1].parse() {\n Ok(42) => println!(\"This is the answer\"),\n _ => println!(\"This is not the answer\")\n }\n },\n\n 3 => {\n let cmd = &args[1];\n let number: i32 = match args[2].parse() {\n Ok(n) => n,\n Err(_) => {\n println!(\"The arg: '{}' does not seem to be a number\", args[2]);\n return;\n }\n };\n\n match &cmd[..] {\n \"increase\" => println!(\"number: {}\", number + 1),\n \"decrease\" => println!(\"number: {}\",number - 1),\n _ => {\n println!(\"invalid command\");\n help();\n }\n };\n\n }\n\n _ => {\n help();\n }\n };\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Microbenchmark for the smallintmap library\n\nuse std;\nimport std::smallintmap;\nimport std::smallintmap::smallintmap;\nimport io::WriterUtil;\n\nfn append_sequential(min: uint, max: uint, map: smallintmap<uint>) {\n for uint::range(min, max) |i| {\n map.insert(i, i + 22u);\n }\n}\n\nfn check_sequential(min: uint, max: uint, map: smallintmap<uint>) {\n for uint::range(min, max) |i| {\n assert map.get(i) == i + 22u;\n }\n}\n\nfn main(args: ~[~str]) {\n let args = if os::getenv(~\"RUST_BENCH\").is_some() {\n ~[~\"\", ~\"100000\", ~\"100\"]\n } else if args.len() <= 1u {\n ~[~\"\", ~\"10000\", ~\"50\"]\n } else {\n args\n };\n let max = uint::from_str(args[1]).get();\n let rep = uint::from_str(args[2]).get();\n\n let mut checkf = 0.0;\n let mut appendf = 0.0;\n\n for uint::range(0u, rep) |_r| {\n let map = smallintmap::mk();\n let start = std::time::precise_time_s();\n append_sequential(0u, max, map);\n let mid = std::time::precise_time_s();\n check_sequential(0u, max, map);\n let end = std::time::precise_time_s();\n\n checkf += (end - mid) as float;\n appendf += (mid - start) as float;\n }\n\n let maxf = max as float;\n\n io::stdout().write_str(fmt!(\"insert(): %? seconds\\n\", checkf));\n io::stdout().write_str(fmt!(\" : %f op\/sec\\n\", maxf\/checkf));\n io::stdout().write_str(fmt!(\"get() : %? seconds\\n\", appendf));\n io::stdout().write_str(fmt!(\" : %f op\/sec\\n\", maxf\/appendf));\n}\n<commit_msg>test: Fix smallintmap test<commit_after>\/\/ Microbenchmark for the smallintmap library\n\nuse std;\nimport std::smallintmap;\nimport std::smallintmap::SmallIntMap;\nimport io::WriterUtil;\n\nfn append_sequential(min: uint, max: uint, map: SmallIntMap<uint>) {\n for uint::range(min, max) |i| {\n map.insert(i, i + 22u);\n }\n}\n\nfn check_sequential(min: uint, max: uint, map: SmallIntMap<uint>) {\n for uint::range(min, max) |i| {\n assert map.get(i) == i + 22u;\n }\n}\n\nfn main(args: ~[~str]) {\n let args = if os::getenv(~\"RUST_BENCH\").is_some() {\n ~[~\"\", ~\"100000\", ~\"100\"]\n } else if args.len() <= 1u {\n ~[~\"\", ~\"10000\", ~\"50\"]\n } else {\n args\n };\n let max = uint::from_str(args[1]).get();\n let rep = uint::from_str(args[2]).get();\n\n let mut checkf = 0.0;\n let mut appendf = 0.0;\n\n for uint::range(0u, rep) |_r| {\n let map = smallintmap::mk();\n let start = std::time::precise_time_s();\n append_sequential(0u, max, map);\n let mid = std::time::precise_time_s();\n check_sequential(0u, max, map);\n let end = std::time::precise_time_s();\n\n checkf += (end - mid) as float;\n appendf += (mid - start) as float;\n }\n\n let maxf = max as float;\n\n io::stdout().write_str(fmt!(\"insert(): %? seconds\\n\", checkf));\n io::stdout().write_str(fmt!(\" : %f op\/sec\\n\", maxf\/checkf));\n io::stdout().write_str(fmt!(\"get() : %? seconds\\n\", appendf));\n io::stdout().write_str(fmt!(\" : %f op\/sec\\n\", maxf\/appendf));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-test\n\/\/ xfail-fast\nextern mod std;\nuse std::arc;\nfn dispose(+_x: arc::ARC<bool>) unsafe { }\n\npub fn main() {\n let p = arc::arc(true);\n let x = Some(p);\n match x {\n Some(z) => { dispose(z); },\n None => fail!()\n }\n}\n<commit_msg>Fix xfail'd ARC test<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-fast\nextern mod std;\nuse std::arc;\nfn dispose(+_x: arc::ARC<bool>) { unsafe { } }\n\npub fn main() {\n let p = arc::ARC(true);\n let x = Some(p);\n match x {\n Some(z) => { dispose(z); },\n None => fail!()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #86349 - yerke:add-test-for-issue-78632, r=Mark-Simulacrum<commit_after>\/\/ check-pass\n\/\/\n\/\/ Regression test for issue #78632\n\n#![crate_type = \"lib\"]\n\npub trait Corge<T> {\n type Fred;\n}\n\nimpl Corge<u8> for () {\n type Fred = u32;\n}\n\npub trait Waldo {\n type Quax;\n}\n\nimpl Waldo for u32 {\n type Quax = u8;\n}\n\npub trait Grault\nwhere\n (): Corge<Self::Thud>,\n{\n type Thud;\n fn bar(_: <() as Corge<Self::Thud>>::Fred) {}\n}\n\nimpl<T> Grault for T\nwhere\n T: Waldo,\n (): Corge<T::Quax>,\n <() as Corge<T::Quax>>::Fred: Waldo,\n{\n type Thud = u8;\n}\n\npub trait Plugh<I> {\n fn baz();\n}\n\n#[derive(Copy, Clone, Debug)]\npub struct Qiz<T> {\n foo: T,\n}\n\nimpl<T> Plugh<<() as Corge<T::Thud>>::Fred> for Qiz<T>\nwhere\n T: Grault,\n (): Corge<T::Thud>,\n{\n fn baz() {}\n}\n\npub fn test() {\n <Qiz<u32> as Plugh<u32>>::baz();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: add testcase, close #4929 which was already fixed.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn make_adder(x: int) -> @fn(int) -> int { |y| x + y }\npub fn main() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #36<commit_after>extern mod euler;\nuse euler::calc::{ num_to_digits };\n\nfn is_palindromic(n: uint, radix: uint) -> bool {\n let digits = num_to_digits(n, radix);\n for uint::range(0, digits.len() \/ 2) |i| {\n if digits[i] != digits[digits.len() - 1 - i] { return false;}\n }\n return true;\n}\n\nfn to_palindromic(n: uint, radix: uint, is_odd: bool) -> uint{\n let digits = num_to_digits(n, radix);\n let mut num = 0;\n for digits.each |d| { num = num * radix + *d; }\n let start = if is_odd { 1 } else { 0 };\n for uint::range(start, digits.len()) |i| {\n num = num * radix + digits[digits.len() - 1 - i];\n }\n return num;\n}\n\nfn main() {\n let order_array = &[ 1, 10, 100, 1000, 1000, 10000 ];\n let mut sum = 0;\n for (order_array.len() - 1).timesi |i| {\n for [true, false].each |b| {\n let (start, end) = (order_array[i], order_array[i + 1]);\n for uint::range(start, end) |n| {\n let n = to_palindromic(n, 10, *b);\n if n >= 1000000 { break; }\n if is_palindromic(n, 2) {\n io::println(fmt!(\"%u = %s\", n, uint::to_str(n, 2)));\n sum += n;\n }\n }\n }\n }\n\n io::println(fmt!(\"answer: %u\", sum));\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Consistency<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test that &* of a dangling (and even unaligned) ptr is okay<commit_after>fn main() {\n let x = 2usize as *const u32;\n let _y = unsafe { &*x as *const u32 };\n\n let x = 0usize as *const u32;\n let _y = unsafe { &*x as *const u32 };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add drawing example<commit_after>\/\/! An example using the drawing functions. Writes to the user-provided target file.\n\nextern crate image;\nextern crate imageproc;\n\nuse std::env;\nuse std::path::Path;\nuse image::{Rgb, RgbImage};\nuse imageproc::rect::Rect;\nuse imageproc::drawing::{\n draw_cross_mut,\n draw_line_segment_mut,\n draw_hollow_rect_mut,\n draw_filled_rect_mut\n};\n\nfn main() {\n\n let arg = if env::args().count() == 2 {\n env::args().nth(1).unwrap()\n } else {\n panic!(\"Please enter a target file path\")\n };\n\n let path = Path::new(&arg);\n\n let red = Rgb([255u8, 0u8, 0u8]);\n let green = Rgb([0u8, 255u8, 0u8]);\n let blue = Rgb([0u8, 0u8, 255u8]);\n let white = Rgb([255u8, 255u8, 255u8]);\n\n let mut image = RgbImage::new(200, 200);\n\n \/\/ Draw some crosses within bounds\n draw_cross_mut(&mut image, white, 5, 5);\n draw_cross_mut(&mut image, red, 9, 9);\n draw_cross_mut(&mut image, blue, 9, 5);\n draw_cross_mut(&mut image, green, 5, 9);\n \/\/ Draw a cross totally outside image bounds - does not panic but nothing is rendered\n draw_cross_mut(&mut image, white, 250, 0);\n \/\/ Draw a cross partially out of bounds - the part in bounds is rendered\n draw_cross_mut(&mut image, white, 2, 0);\n\n \/\/ Draw a line segment wholly within bounds\n draw_line_segment_mut(&mut image, (20f32, 12f32), (40f32, 60f32), white);\n \/\/ Draw a line segment totally outside image bounds - does not panic but nothing is rendered\n draw_line_segment_mut(&mut image, (0f32, -30f32), (40f32, -20f32), white);\n \/\/ Draw a line segment partially out of bounds - the part in bounds is rendered\n draw_line_segment_mut(&mut image, (20f32, 180f32), (20f32, 220f32), white);\n\n \/\/ Draw a hollow rect within bounds\n draw_hollow_rect_mut(&mut image, Rect::at(60, 10).of_size(20, 20), white);\n \/\/ Outside bounds\n draw_hollow_rect_mut(&mut image, Rect::at(300, 10).of_size(20, 20), white);\n \/\/ Partially outside bounds\n draw_hollow_rect_mut(&mut image, Rect::at(90, -10).of_size(30, 20), white);\n\n \/\/ Draw a filled rect within bounds\n draw_filled_rect_mut(&mut image, Rect::at(130, 10).of_size(20, 20), white);\n \/\/ Outside bounds\n draw_filled_rect_mut(&mut image, Rect::at(300, 10).of_size(20, 20), white);\n \/\/ Partially outside bounds\n draw_filled_rect_mut(&mut image, Rect::at(180, -10).of_size(30, 20), white);\n\n let _ = image.save(path).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add file, although it is generated<commit_after>\/\/ generated\ninclude!(\"..\/common\/test_carllerche_bytes.rs\");\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:green_heart: Add test for create_rescheduled_items_message<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[link(name = \"rustc\",\n vers = \"0.8-pre\",\n uuid = \"0ce89b41-2f92-459e-bbc1-8f5fe32f16cf\",\n url = \"https:\/\/github.com\/mozilla\/rust\/tree\/master\/src\/rustc\")];\n\n#[comment = \"The Rust compiler\"];\n#[license = \"MIT\/ASL2\"];\n#[crate_type = \"lib\"];\n\n\/\/ Rustc tasks always run on a fixed_stack_segment, so code in this\n\/\/ module can call C functions (in particular, LLVM functions) with\n\/\/ impunity.\n#[allow(cstack)];\n\nextern mod extra;\nextern mod syntax;\n\nuse driver::driver::{host_triple, optgroups, early_error};\nuse driver::driver::{str_input, file_input, build_session_options};\nuse driver::driver::{build_session, build_configuration, parse_pretty};\nuse driver::driver::{PpMode, pretty_print_input, list_metadata};\nuse driver::driver::{compile_input};\nuse driver::session;\nuse middle::lint;\n\nuse std::io;\nuse std::num;\nuse std::os;\nuse std::result;\nuse std::str;\nuse std::task;\nuse std::vec;\nuse extra::getopts::{groups, opt_present};\nuse extra::getopts;\nuse syntax::codemap;\nuse syntax::diagnostic;\n\npub mod middle {\n pub mod trans;\n pub mod ty;\n pub mod subst;\n pub mod resolve;\n pub mod typeck;\n pub mod check_loop;\n pub mod check_match;\n pub mod check_const;\n pub mod lint;\n pub mod borrowck;\n pub mod dataflow;\n pub mod mem_categorization;\n pub mod liveness;\n pub mod kind;\n pub mod freevars;\n pub mod pat_util;\n pub mod region;\n pub mod const_eval;\n pub mod astencode;\n pub mod lang_items;\n pub mod privacy;\n pub mod moves;\n pub mod entry;\n pub mod effect;\n pub mod reachable;\n pub mod graph;\n pub mod cfg;\n pub mod stack_check;\n}\n\npub mod front {\n pub mod config;\n pub mod test;\n pub mod std_inject;\n pub mod assign_node_ids;\n}\n\npub mod back {\n pub mod link;\n pub mod abi;\n pub mod upcall;\n pub mod arm;\n pub mod mips;\n pub mod x86;\n pub mod x86_64;\n pub mod rpath;\n pub mod target_strs;\n}\n\npub mod metadata;\n\npub mod driver;\n\npub mod util {\n pub mod common;\n pub mod ppaux;\n}\n\npub mod lib {\n pub mod llvm;\n}\n\n\/\/ A curious inner module that allows ::std::foo to be available in here for\n\/\/ macros.\n\/*\nmod std {\n pub use std::clone;\n pub use std::cmp;\n pub use std::os;\n pub use std::str;\n pub use std::sys;\n pub use std::to_bytes;\n pub use std::unstable;\n pub use extra::serialize;\n}\n*\/\n\npub fn version(argv0: &str) {\n let vers = match option_env!(\"CFG_VERSION\") {\n Some(vers) => vers,\n None => \"unknown version\"\n };\n printfln!(\"%s %s\", argv0, vers);\n printfln!(\"host: %s\", host_triple());\n}\n\npub fn usage(argv0: &str) {\n let message = fmt!(\"Usage: %s [OPTIONS] INPUT\", argv0);\n printfln!(\"%s\\\nAdditional help:\n -W help Print 'lint' options and default settings\n -Z help Print internal options for debugging rustc\\n\",\n groups::usage(message, optgroups()));\n}\n\npub fn describe_warnings() {\n use extra::sort::Sort;\n println(\"\nAvailable lint options:\n -W <foo> Warn about <foo>\n -A <foo> Allow <foo>\n -D <foo> Deny <foo>\n -F <foo> Forbid <foo> (deny, and deny all overrides)\n\");\n\n let lint_dict = lint::get_lint_dict();\n let mut lint_dict = lint_dict.move_iter()\n .map(|(k, v)| (v, k))\n .collect::<~[(lint::LintSpec, &'static str)]>();\n lint_dict.qsort();\n\n let mut max_key = 0;\n for &(_, name) in lint_dict.iter() {\n max_key = num::max(name.len(), max_key);\n }\n fn padded(max: uint, s: &str) -> ~str {\n str::from_utf8(vec::from_elem(max - s.len(), ' ' as u8)) + s\n }\n println(\"\\nAvailable lint checks:\\n\");\n printfln!(\" %s %7.7s %s\",\n padded(max_key, \"name\"), \"default\", \"meaning\");\n printfln!(\" %s %7.7s %s\\n\",\n padded(max_key, \"----\"), \"-------\", \"-------\");\n for (spec, name) in lint_dict.move_iter() {\n let name = name.replace(\"_\", \"-\");\n printfln!(\" %s %7.7s %s\",\n padded(max_key, name),\n lint::level_to_str(spec.default),\n spec.desc);\n }\n io::println(\"\");\n}\n\npub fn describe_debug_flags() {\n println(\"\\nAvailable debug options:\\n\");\n let r = session::debugging_opts_map();\n for tuple in r.iter() {\n match *tuple {\n (ref name, ref desc, _) => {\n printfln!(\" -Z %-20s -- %s\", *name, *desc);\n }\n }\n }\n}\n\npub fn run_compiler(args: &[~str], demitter: diagnostic::Emitter) {\n \/\/ Don't display log spew by default. Can override with RUST_LOG.\n ::std::logging::console_off();\n\n let mut args = args.to_owned();\n let binary = args.shift().to_managed();\n\n if args.is_empty() { usage(binary); return; }\n\n let matches =\n &match getopts::groups::getopts(args, optgroups()) {\n Ok(m) => m,\n Err(f) => {\n early_error(demitter, getopts::fail_str(f));\n }\n };\n\n if opt_present(matches, \"h\") || opt_present(matches, \"help\") {\n usage(binary);\n return;\n }\n\n \/\/ Display the available lint options if \"-W help\" or only \"-W\" is given.\n let lint_flags = vec::append(getopts::opt_strs(matches, \"W\"),\n getopts::opt_strs(matches, \"warn\"));\n\n let show_lint_options = lint_flags.iter().any(|x| x == &~\"help\") ||\n (opt_present(matches, \"W\") && lint_flags.is_empty());\n\n if show_lint_options {\n describe_warnings();\n return;\n }\n\n let r = getopts::opt_strs(matches, \"Z\");\n if r.iter().any(|x| x == &~\"help\") {\n describe_debug_flags();\n return;\n }\n\n if getopts::opt_maybe_str(matches, \"passes\") == Some(~\"list\") {\n unsafe { lib::llvm::llvm::LLVMRustPrintPasses(); }\n return;\n }\n\n if opt_present(matches, \"v\") || opt_present(matches, \"version\") {\n version(binary);\n return;\n }\n let input = match matches.free.len() {\n 0u => early_error(demitter, ~\"no input filename given\"),\n 1u => {\n let ifile = matches.free[0].as_slice();\n if \"-\" == ifile {\n let src = str::from_utf8(io::stdin().read_whole_stream());\n str_input(src.to_managed())\n } else {\n file_input(Path(ifile))\n }\n }\n _ => early_error(demitter, ~\"multiple input filenames provided\")\n };\n\n let sopts = build_session_options(binary, matches, demitter);\n let sess = build_session(sopts, demitter);\n let odir = getopts::opt_maybe_str(matches, \"out-dir\").map_move(|o| Path(o));\n let ofile = getopts::opt_maybe_str(matches, \"o\").map_move(|o| Path(o));\n let cfg = build_configuration(sess);\n let pretty = do getopts::opt_default(matches, \"pretty\", \"normal\").map_move |a| {\n parse_pretty(sess, a)\n };\n match pretty {\n Some::<PpMode>(ppm) => {\n pretty_print_input(sess, cfg, &input, ppm);\n return;\n }\n None::<PpMode> => {\/* continue *\/ }\n }\n let ls = opt_present(matches, \"ls\");\n if ls {\n match input {\n file_input(ref ifile) => {\n list_metadata(sess, &(*ifile), io::stdout());\n }\n str_input(_) => {\n early_error(demitter, ~\"can not list metadata for stdin\");\n }\n }\n return;\n }\n\n compile_input(sess, cfg, &input, &odir, &ofile);\n}\n\n#[deriving(Eq)]\npub enum monitor_msg {\n fatal,\n done,\n}\n\n\/*\nThis is a sanity check that any failure of the compiler is performed\nthrough the diagnostic module and reported properly - we shouldn't be calling\nplain-old-fail on any execution path that might be taken. Since we have\nconsole logging off by default, hitting a plain fail statement would make the\ncompiler silently exit, which would be terrible.\n\nThis method wraps the compiler in a subtask and injects a function into the\ndiagnostic emitter which records when we hit a fatal error. If the task\nfails without recording a fatal error then we've encountered a compiler\nbug and need to present an error.\n*\/\npub fn monitor(f: ~fn(diagnostic::Emitter)) {\n use std::comm::*;\n\n \/\/ XXX: This is a hack for newsched since it doesn't support split stacks.\n \/\/ rustc needs a lot of stack!\n static STACK_SIZE: uint = 6000000;\n\n let (p, ch) = stream();\n let ch = SharedChan::new(ch);\n let ch_capture = ch.clone();\n let mut task_builder = task::task();\n task_builder.supervised();\n\n \/\/ XXX: Hacks on hacks. If the env is trying to override the stack size\n \/\/ then *don't* set it explicitly.\n if os::getenv(\"RUST_MIN_STACK\").is_none() {\n task_builder.opts.stack_size = Some(STACK_SIZE);\n }\n\n match do task_builder.try {\n let ch = ch_capture.clone();\n let ch_capture = ch.clone();\n \/\/ The 'diagnostics emitter'. Every error, warning, etc. should\n \/\/ go through this function.\n let demitter: @fn(Option<(@codemap::CodeMap, codemap::Span)>,\n &str,\n diagnostic::level) =\n |cmsp, msg, lvl| {\n if lvl == diagnostic::fatal {\n ch_capture.send(fatal);\n }\n diagnostic::emit(cmsp, msg, lvl);\n };\n\n struct finally {\n ch: SharedChan<monitor_msg>,\n }\n\n impl Drop for finally {\n fn drop(&mut self) { self.ch.send(done); }\n }\n\n let _finally = finally { ch: ch };\n\n f(demitter);\n\n \/\/ Due reasons explain in #7732, if there was a jit execution context it\n \/\/ must be consumed and passed along to our parent task.\n back::link::jit::consume_engine()\n } {\n result::Ok(_) => { \/* fallthrough *\/ }\n result::Err(_) => {\n \/\/ Task failed without emitting a fatal diagnostic\n if p.recv() == done {\n diagnostic::emit(\n None,\n diagnostic::ice_msg(\"unexpected failure\"),\n diagnostic::error);\n\n let xs = [\n ~\"the compiler hit an unexpected failure path. \\\n this is a bug\",\n ~\"try running with RUST_LOG=rustc=1 \\\n to get further details and report the results \\\n to github.com\/mozilla\/rust\/issues\"\n ];\n for note in xs.iter() {\n diagnostic::emit(None, *note, diagnostic::note)\n }\n }\n \/\/ Fail so the process returns a failure code\n fail!();\n }\n }\n}\n\npub fn main() {\n let args = os::args();\n main_args(args);\n}\n\npub fn main_args(args: &[~str]) {\n let owned_args = args.to_owned();\n do monitor |demitter| {\n run_compiler(owned_args, demitter);\n }\n}\n<commit_msg>Add an extra line before the \"Additional Help\" in rustc's --help output<commit_after>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[link(name = \"rustc\",\n vers = \"0.8-pre\",\n uuid = \"0ce89b41-2f92-459e-bbc1-8f5fe32f16cf\",\n url = \"https:\/\/github.com\/mozilla\/rust\/tree\/master\/src\/rustc\")];\n\n#[comment = \"The Rust compiler\"];\n#[license = \"MIT\/ASL2\"];\n#[crate_type = \"lib\"];\n\n\/\/ Rustc tasks always run on a fixed_stack_segment, so code in this\n\/\/ module can call C functions (in particular, LLVM functions) with\n\/\/ impunity.\n#[allow(cstack)];\n\nextern mod extra;\nextern mod syntax;\n\nuse driver::driver::{host_triple, optgroups, early_error};\nuse driver::driver::{str_input, file_input, build_session_options};\nuse driver::driver::{build_session, build_configuration, parse_pretty};\nuse driver::driver::{PpMode, pretty_print_input, list_metadata};\nuse driver::driver::{compile_input};\nuse driver::session;\nuse middle::lint;\n\nuse std::io;\nuse std::num;\nuse std::os;\nuse std::result;\nuse std::str;\nuse std::task;\nuse std::vec;\nuse extra::getopts::{groups, opt_present};\nuse extra::getopts;\nuse syntax::codemap;\nuse syntax::diagnostic;\n\npub mod middle {\n pub mod trans;\n pub mod ty;\n pub mod subst;\n pub mod resolve;\n pub mod typeck;\n pub mod check_loop;\n pub mod check_match;\n pub mod check_const;\n pub mod lint;\n pub mod borrowck;\n pub mod dataflow;\n pub mod mem_categorization;\n pub mod liveness;\n pub mod kind;\n pub mod freevars;\n pub mod pat_util;\n pub mod region;\n pub mod const_eval;\n pub mod astencode;\n pub mod lang_items;\n pub mod privacy;\n pub mod moves;\n pub mod entry;\n pub mod effect;\n pub mod reachable;\n pub mod graph;\n pub mod cfg;\n pub mod stack_check;\n}\n\npub mod front {\n pub mod config;\n pub mod test;\n pub mod std_inject;\n pub mod assign_node_ids;\n}\n\npub mod back {\n pub mod link;\n pub mod abi;\n pub mod upcall;\n pub mod arm;\n pub mod mips;\n pub mod x86;\n pub mod x86_64;\n pub mod rpath;\n pub mod target_strs;\n}\n\npub mod metadata;\n\npub mod driver;\n\npub mod util {\n pub mod common;\n pub mod ppaux;\n}\n\npub mod lib {\n pub mod llvm;\n}\n\n\/\/ A curious inner module that allows ::std::foo to be available in here for\n\/\/ macros.\n\/*\nmod std {\n pub use std::clone;\n pub use std::cmp;\n pub use std::os;\n pub use std::str;\n pub use std::sys;\n pub use std::to_bytes;\n pub use std::unstable;\n pub use extra::serialize;\n}\n*\/\n\npub fn version(argv0: &str) {\n let vers = match option_env!(\"CFG_VERSION\") {\n Some(vers) => vers,\n None => \"unknown version\"\n };\n printfln!(\"%s %s\", argv0, vers);\n printfln!(\"host: %s\", host_triple());\n}\n\npub fn usage(argv0: &str) {\n let message = fmt!(\"Usage: %s [OPTIONS] INPUT\", argv0);\n printfln!(\"%s\\n\\\nAdditional help:\n -W help Print 'lint' options and default settings\n -Z help Print internal options for debugging rustc\\n\",\n groups::usage(message, optgroups()));\n}\n\npub fn describe_warnings() {\n use extra::sort::Sort;\n println(\"\nAvailable lint options:\n -W <foo> Warn about <foo>\n -A <foo> Allow <foo>\n -D <foo> Deny <foo>\n -F <foo> Forbid <foo> (deny, and deny all overrides)\n\");\n\n let lint_dict = lint::get_lint_dict();\n let mut lint_dict = lint_dict.move_iter()\n .map(|(k, v)| (v, k))\n .collect::<~[(lint::LintSpec, &'static str)]>();\n lint_dict.qsort();\n\n let mut max_key = 0;\n for &(_, name) in lint_dict.iter() {\n max_key = num::max(name.len(), max_key);\n }\n fn padded(max: uint, s: &str) -> ~str {\n str::from_utf8(vec::from_elem(max - s.len(), ' ' as u8)) + s\n }\n println(\"\\nAvailable lint checks:\\n\");\n printfln!(\" %s %7.7s %s\",\n padded(max_key, \"name\"), \"default\", \"meaning\");\n printfln!(\" %s %7.7s %s\\n\",\n padded(max_key, \"----\"), \"-------\", \"-------\");\n for (spec, name) in lint_dict.move_iter() {\n let name = name.replace(\"_\", \"-\");\n printfln!(\" %s %7.7s %s\",\n padded(max_key, name),\n lint::level_to_str(spec.default),\n spec.desc);\n }\n io::println(\"\");\n}\n\npub fn describe_debug_flags() {\n println(\"\\nAvailable debug options:\\n\");\n let r = session::debugging_opts_map();\n for tuple in r.iter() {\n match *tuple {\n (ref name, ref desc, _) => {\n printfln!(\" -Z %-20s -- %s\", *name, *desc);\n }\n }\n }\n}\n\npub fn run_compiler(args: &[~str], demitter: diagnostic::Emitter) {\n \/\/ Don't display log spew by default. Can override with RUST_LOG.\n ::std::logging::console_off();\n\n let mut args = args.to_owned();\n let binary = args.shift().to_managed();\n\n if args.is_empty() { usage(binary); return; }\n\n let matches =\n &match getopts::groups::getopts(args, optgroups()) {\n Ok(m) => m,\n Err(f) => {\n early_error(demitter, getopts::fail_str(f));\n }\n };\n\n if opt_present(matches, \"h\") || opt_present(matches, \"help\") {\n usage(binary);\n return;\n }\n\n \/\/ Display the available lint options if \"-W help\" or only \"-W\" is given.\n let lint_flags = vec::append(getopts::opt_strs(matches, \"W\"),\n getopts::opt_strs(matches, \"warn\"));\n\n let show_lint_options = lint_flags.iter().any(|x| x == &~\"help\") ||\n (opt_present(matches, \"W\") && lint_flags.is_empty());\n\n if show_lint_options {\n describe_warnings();\n return;\n }\n\n let r = getopts::opt_strs(matches, \"Z\");\n if r.iter().any(|x| x == &~\"help\") {\n describe_debug_flags();\n return;\n }\n\n if getopts::opt_maybe_str(matches, \"passes\") == Some(~\"list\") {\n unsafe { lib::llvm::llvm::LLVMRustPrintPasses(); }\n return;\n }\n\n if opt_present(matches, \"v\") || opt_present(matches, \"version\") {\n version(binary);\n return;\n }\n let input = match matches.free.len() {\n 0u => early_error(demitter, ~\"no input filename given\"),\n 1u => {\n let ifile = matches.free[0].as_slice();\n if \"-\" == ifile {\n let src = str::from_utf8(io::stdin().read_whole_stream());\n str_input(src.to_managed())\n } else {\n file_input(Path(ifile))\n }\n }\n _ => early_error(demitter, ~\"multiple input filenames provided\")\n };\n\n let sopts = build_session_options(binary, matches, demitter);\n let sess = build_session(sopts, demitter);\n let odir = getopts::opt_maybe_str(matches, \"out-dir\").map_move(|o| Path(o));\n let ofile = getopts::opt_maybe_str(matches, \"o\").map_move(|o| Path(o));\n let cfg = build_configuration(sess);\n let pretty = do getopts::opt_default(matches, \"pretty\", \"normal\").map_move |a| {\n parse_pretty(sess, a)\n };\n match pretty {\n Some::<PpMode>(ppm) => {\n pretty_print_input(sess, cfg, &input, ppm);\n return;\n }\n None::<PpMode> => {\/* continue *\/ }\n }\n let ls = opt_present(matches, \"ls\");\n if ls {\n match input {\n file_input(ref ifile) => {\n list_metadata(sess, &(*ifile), io::stdout());\n }\n str_input(_) => {\n early_error(demitter, ~\"can not list metadata for stdin\");\n }\n }\n return;\n }\n\n compile_input(sess, cfg, &input, &odir, &ofile);\n}\n\n#[deriving(Eq)]\npub enum monitor_msg {\n fatal,\n done,\n}\n\n\/*\nThis is a sanity check that any failure of the compiler is performed\nthrough the diagnostic module and reported properly - we shouldn't be calling\nplain-old-fail on any execution path that might be taken. Since we have\nconsole logging off by default, hitting a plain fail statement would make the\ncompiler silently exit, which would be terrible.\n\nThis method wraps the compiler in a subtask and injects a function into the\ndiagnostic emitter which records when we hit a fatal error. If the task\nfails without recording a fatal error then we've encountered a compiler\nbug and need to present an error.\n*\/\npub fn monitor(f: ~fn(diagnostic::Emitter)) {\n use std::comm::*;\n\n \/\/ XXX: This is a hack for newsched since it doesn't support split stacks.\n \/\/ rustc needs a lot of stack!\n static STACK_SIZE: uint = 6000000;\n\n let (p, ch) = stream();\n let ch = SharedChan::new(ch);\n let ch_capture = ch.clone();\n let mut task_builder = task::task();\n task_builder.supervised();\n\n \/\/ XXX: Hacks on hacks. If the env is trying to override the stack size\n \/\/ then *don't* set it explicitly.\n if os::getenv(\"RUST_MIN_STACK\").is_none() {\n task_builder.opts.stack_size = Some(STACK_SIZE);\n }\n\n match do task_builder.try {\n let ch = ch_capture.clone();\n let ch_capture = ch.clone();\n \/\/ The 'diagnostics emitter'. Every error, warning, etc. should\n \/\/ go through this function.\n let demitter: @fn(Option<(@codemap::CodeMap, codemap::Span)>,\n &str,\n diagnostic::level) =\n |cmsp, msg, lvl| {\n if lvl == diagnostic::fatal {\n ch_capture.send(fatal);\n }\n diagnostic::emit(cmsp, msg, lvl);\n };\n\n struct finally {\n ch: SharedChan<monitor_msg>,\n }\n\n impl Drop for finally {\n fn drop(&mut self) { self.ch.send(done); }\n }\n\n let _finally = finally { ch: ch };\n\n f(demitter);\n\n \/\/ Due reasons explain in #7732, if there was a jit execution context it\n \/\/ must be consumed and passed along to our parent task.\n back::link::jit::consume_engine()\n } {\n result::Ok(_) => { \/* fallthrough *\/ }\n result::Err(_) => {\n \/\/ Task failed without emitting a fatal diagnostic\n if p.recv() == done {\n diagnostic::emit(\n None,\n diagnostic::ice_msg(\"unexpected failure\"),\n diagnostic::error);\n\n let xs = [\n ~\"the compiler hit an unexpected failure path. \\\n this is a bug\",\n ~\"try running with RUST_LOG=rustc=1 \\\n to get further details and report the results \\\n to github.com\/mozilla\/rust\/issues\"\n ];\n for note in xs.iter() {\n diagnostic::emit(None, *note, diagnostic::note)\n }\n }\n \/\/ Fail so the process returns a failure code\n fail!();\n }\n }\n}\n\npub fn main() {\n let args = os::args();\n main_args(args);\n}\n\npub fn main_args(args: &[~str]) {\n let owned_args = args.to_owned();\n do monitor |demitter| {\n run_compiler(owned_args, demitter);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>(feat) Added skeleton of middleware module.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #25025 - Manishearth:originOffset, r=jdm<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for nucleotide-count case<commit_after>use std::collections::HashMap;\n\npub fn nucleotide_counts(serial: &str) -> Result<HashMap<char, usize>, &str> {\n let counter = &mut [0usize; 4];\n\n for s in serial.chars() {\n match s {\n 'A' => counter[0] += 1,\n 'C' => counter[1] += 1,\n 'G' => counter[2] += 1,\n 'T' => counter[3] += 1,\n _ => return Err(\"Invalid serial\"),\n }\n }\n\n Ok(\n [\n ('A', counter[0]),\n ('C', counter[1]),\n ('G', counter[2]),\n ('T', counter[3]),\n ].iter()\n .cloned()\n .collect(),\n )\n}\n\npub fn count(n: char, serial: &str) -> Result<usize, &str> {\n if n != 'A' && n != 'C' && n != 'G' && n != 'T' {\n return Err(\"Invalid nucleotide\");\n }\n\n let mut counter = 0;\n\n for s in serial.chars() {\n if s == n {\n counter += 1;\n\n continue;\n }\n\n if s != 'A' && s != 'C' && s != 'G' && s != 'T' {\n return Err(\"Invalid serial\");\n }\n }\n\n Ok(counter)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implemented encode for message type 2<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>cargo test --bin ejonecho<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example app that uses the animation API<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nextern crate gleam;\nextern crate glutin;\nextern crate webrender;\nextern crate webrender_traits;\n\n#[macro_use]\nextern crate lazy_static;\n\n#[path=\"common\/boilerplate.rs\"]\nmod boilerplate;\n\nuse boilerplate::HandyDandyRectBuilder;\nuse std::sync::Mutex;\nuse webrender_traits::*;\n\n\/\/ This example creates a 100x100 white rect and allows the user to move it\n\/\/ around by using the arrow keys. It does this by using the animation API.\n\nfn body(builder: &mut DisplayListBuilder,\n _pipeline_id: &PipelineId,\n _layout_size: &LayoutSize)\n{\n \/\/ Create a 100x100 stacking context with an animatable transform property.\n \/\/ Note the magic \"42\" we use as the animation key. That is used to update\n \/\/ the transform in the keyboard event handler code.\n let bounds = (0,0).to(100, 100);\n builder.push_stacking_context(webrender_traits::ScrollPolicy::Scrollable,\n bounds,\n Some(PropertyBinding::Binding(PropertyBindingKey::new(42))),\n TransformStyle::Flat,\n None,\n webrender_traits::MixBlendMode::Normal,\n Vec::new());\n\n \/\/ Fill it with a white rect\n let clip = builder.push_clip_region(&bounds, vec![], None);\n builder.push_rect(bounds,\n clip,\n ColorF::new(1.0, 1.0, 1.0, 1.0));\n\n builder.pop_stacking_context();\n}\n\nlazy_static! {\n static ref TRANSFORM: Mutex<LayoutTransform> = Mutex::new(LayoutTransform::identity());\n}\n\nfn event_handler(event: &glutin::Event,\n api: &RenderApi)\n{\n match *event {\n glutin::Event::KeyboardInput(glutin::ElementState::Pressed, _, Some(key)) => {\n let offset = match key {\n glutin::VirtualKeyCode::Down => (0.0, 10.0),\n glutin::VirtualKeyCode::Up => (0.0, -10.0),\n glutin::VirtualKeyCode::Right => (10.0, 0.0),\n glutin::VirtualKeyCode::Left => (-10.0, 0.0),\n _ => return,\n };\n \/\/ Update the transform based on the keyboard input and push it to\n \/\/ webrender using the generate_frame API. This will recomposite with\n \/\/ the updated transform.\n let new_transform = TRANSFORM.lock().unwrap().post_translated(offset.0, offset.1, 0.0);\n api.generate_frame(Some(DynamicProperties {\n transforms: vec![\n PropertyValue {\n key: PropertyBindingKey::new(42),\n value: new_transform,\n },\n ],\n floats: vec![],\n }));\n *TRANSFORM.lock().unwrap() = new_transform;\n }\n _ => ()\n }\n}\n\nfn main() {\n boilerplate::main_wrapper(body, event_handler);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test(todo): add note about making them into integration tests :(<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Backtick missing in documentation. (#580)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>create pwm<commit_after>use core::any::{Any, TypeId};\nuse core::marker::Unsize;\n\nuse cast::{u16, u32};\nuse hal;\nuse stm32f411::{GPIOA, RCC, TIM1};\n\nuse timer::{Channel, TIM};\n\n\/\/\/ PWM driver\npub struct Pwm<'a, T>(pub &'a T)\nwhere\n T: 'a;\n\nimpl<'a> Pwm<'a, TIM1> {\n \/\/\/ Initializes the PWM module\n pub fn init<P>(&self, period: P)\n where\n P: Into<::apb2::Ticks>,\n {\n self._init(period.into())\n }\n\n fn _init(&self, period: ::apb2::Ticks) {\n let tim1 = self.0;\n\n \/\/ PWM mode 1\n tim1.ccmr1_output.modify(|_, w| unsafe {{\n w.oc1pe().set_bit()\n .oc1m().bits(0b110)\n .oc2pe().set_bit()\n .oc2m().bits(0b110)\n }});\n\n tim1.ccmr2_output.modify(|_, w| unsafe{{\n w.oc3pe().set_bit()\n .oc3m().bits(0b110)\n .oc4pe().set_bit()\n .oc4m().bits(0b110)\n }});\n\n tim1.ccer.modify(|_, w| {\n w.cc1p().clear_bit()\n .cc2p().clear_bit()\n .cc3p().clear_bit()\n .cc4p().clear_bit()\n });\n\n tim1.bdtr.modify(|_, w| w.moe().set_bit());\n\n self._set_period(period);\n\n tim1.cr1.write(|w| unsafe {\n w.cms().bits(0b00)\n .dir().set_bit()\n .opm().clear_bit()\n .cen().set_bit()\n });\n }\n\n fn _set_period(&self, period: ::apb2::Ticks) {\n let period = period.0;\n\n let psc = u16((period - 1) \/ (1 << 16)).unwrap();\n self.0.psc.write(|w| unsafe{ w.psc().bits(psc) });\n\n let arr = u16(period \/ u32(psc + 1)).unwrap();\n self.0.arr.write(|w| unsafe{ w.arr().bits(arr) });\n }\n}\n\nimpl<'a> hal::Pwm for Pwm<'a, TIM1> {\n type Channel = Channel;\n type Time = ::apb2::Ticks;\n type Duty = u16;\n\n fn disable(&self, channel: Channel) {\n match channel {\n Channel::_1 => self.0.ccer.modify(|_, w| w.cc1e().clear_bit()),\n Channel::_2 => self.0.ccer.modify(|_, w| w.cc2e().clear_bit()),\n Channel::_3 => self.0.ccer.modify(|_, w| w.cc3e().clear_bit()),\n Channel::_4 => self.0.ccer.modify(|_, w| w.cc4e().clear_bit()),\n }\n }\n\n fn enable(&self, channel: Channel) {\n match channel {\n Channel::_1 => self.0.ccer.modify(|_, w| w.cc1e().set_bit()),\n Channel::_2 => self.0.ccer.modify(|_, w| w.cc2e().set_bit()),\n Channel::_3 => self.0.ccer.modify(|_, w| w.cc3e().set_bit()),\n Channel::_4 => self.0.ccer.modify(|_, w| w.cc4e().set_bit()),\n }\n }\n\n fn get_duty(&self, channel: Channel) -> u16 {\n match channel {\n Channel::_1 => self.0.ccr1.read().ccr1().bits(),\n Channel::_2 => self.0.ccr2.read().ccr2().bits(),\n Channel::_3 => self.0.ccr3.read().ccr3().bits(),\n Channel::_4 => self.0.ccr4.read().ccr4().bits(),\n }\n }\n\n fn get_max_duty(&self) -> u16 {\n self.0.arr.read().arr().bits()\n }\n\n fn get_period(&self) -> ::apb2::Ticks {\n ::apb2::Ticks(u32(self.0.psc.read().bits() * self.0.arr.read().bits()))\n }\n\n fn set_duty(&self, channel: Channel, duty: u16) {\n unsafe {\n match channel {\n Channel::_1 => self.0.ccr1.write(|w| w.ccr1().bits(duty)),\n Channel::_2 => self.0.ccr2.write(|w| w.ccr2().bits(duty)),\n Channel::_3 => self.0.ccr3.write(|w| w.ccr3().bits(duty)),\n Channel::_4 => self.0.ccr4.write(|w| w.ccr4().bits(duty)),\n }\n }\n }\n\n fn set_period<P>(&self, period: P)\n where\n P: Into<::apb2::Ticks>,\n {\n self._set_period(period.into())\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that the simd_reduce_{op} intrinsics produce the correct results.\n\n#![feature(repr_simd, platform_intrinsics)]\n#[allow(non_camel_case_types)]\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct i32x4(pub i32, pub i32, pub i32, pub i32);\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct u32x4(pub u32, pub u32, pub u32, pub u32);\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct f32x4(pub f32, pub f32, pub f32, pub f32);\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct b8x4(pub i8, pub i8, pub i8, pub i8);\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct b8x16(\n pub i8, pub i8, pub i8, pub i8,\n pub i8, pub i8, pub i8, pub i8,\n pub i8, pub i8, pub i8, pub i8,\n pub i8, pub i8, pub i8, pub i8\n);\n\nextern \"platform-intrinsic\" {\n fn simd_reduce_add_unordered<T, U>(x: T) -> U;\n fn simd_reduce_mul_unordered<T, U>(x: T) -> U;\n fn simd_reduce_add_ordered<T, U>(x: T, acc: U) -> U;\n fn simd_reduce_mul_ordered<T, U>(x: T, acc: U) -> U;\n fn simd_reduce_min<T, U>(x: T) -> U;\n fn simd_reduce_max<T, U>(x: T) -> U;\n fn simd_reduce_min_nanless<T, U>(x: T) -> U;\n fn simd_reduce_max_nanless<T, U>(x: T) -> U;\n fn simd_reduce_and<T, U>(x: T) -> U;\n fn simd_reduce_or<T, U>(x: T) -> U;\n fn simd_reduce_xor<T, U>(x: T) -> U;\n fn simd_reduce_all<T>(x: T) -> bool;\n fn simd_reduce_any<T>(x: T) -> bool;\n}\n\nfn main() {\n unsafe {\n let x = i32x4(1, -2, 3, 4);\n let r: i32 = simd_reduce_add_unordered(x);\n assert_eq!(r, 6_i32);\n let r: i32 = simd_reduce_mul_unordered(x);\n assert_eq!(r, -24_i32);\n let r: i32 = simd_reduce_add_ordered(x, -1);\n assert_eq!(r, 5_i32);\n let r: i32 = simd_reduce_mul_ordered(x, -1);\n assert_eq!(r, 24_i32);\n\n let r: i32 = simd_reduce_min(x);\n assert_eq!(r, -2_i32);\n let r: i32 = simd_reduce_max(x);\n assert_eq!(r, 4_i32);\n\n let x = i32x4(-1, -1, -1, -1);\n let r: i32 = simd_reduce_and(x);\n assert_eq!(r, -1_i32);\n let r: i32 = simd_reduce_or(x);\n assert_eq!(r, -1_i32);\n let r: i32 = simd_reduce_xor(x);\n assert_eq!(r, 0_i32);\n\n let x = i32x4(-1, -1, 0, -1);\n let r: i32 = simd_reduce_and(x);\n assert_eq!(r, 0_i32);\n let r: i32 = simd_reduce_or(x);\n assert_eq!(r, -1_i32);\n let r: i32 = simd_reduce_xor(x);\n assert_eq!(r, -1_i32);\n }\n\n unsafe {\n let x = u32x4(1, 2, 3, 4);\n let r: u32 = simd_reduce_add_unordered(x);\n assert_eq!(r, 10_u32);\n let r: u32 = simd_reduce_mul_unordered(x);\n assert_eq!(r, 24_u32);\n let r: u32 = simd_reduce_add_ordered(x, 1);\n assert_eq!(r, 11_u32);\n let r: u32 = simd_reduce_mul_ordered(x, 2);\n assert_eq!(r, 48_u32);\n\n let r: u32 = simd_reduce_min(x);\n assert_eq!(r, 1_u32);\n let r: u32 = simd_reduce_max(x);\n assert_eq!(r, 4_u32);\n\n let t = u32::max_value();\n let x = u32x4(t, t, t, t);\n let r: u32 = simd_reduce_and(x);\n assert_eq!(r, t);\n let r: u32 = simd_reduce_or(x);\n assert_eq!(r, t);\n let r: u32 = simd_reduce_xor(x);\n assert_eq!(r, 0_u32);\n\n let x = u32x4(t, t, 0, t);\n let r: u32 = simd_reduce_and(x);\n assert_eq!(r, 0_u32);\n let r: u32 = simd_reduce_or(x);\n assert_eq!(r, t);\n let r: u32 = simd_reduce_xor(x);\n assert_eq!(r, t);\n }\n\n unsafe {\n let x = f32x4(1., -2., 3., 4.);\n let r: f32 = simd_reduce_add_unordered(x);\n assert_eq!(r, 6_f32);\n let r: f32 = simd_reduce_mul_unordered(x);\n assert_eq!(r, -24_f32);\n \/\/ FIXME: only works correctly for accumulator, 0:\n \/\/ https:\/\/bugs.llvm.org\/show_bug.cgi?id=36734\n let r: f32 = simd_reduce_add_ordered(x, 0.);\n assert_eq!(r, 6_f32);\n \/\/ FIXME: only works correctly for accumulator, 1:\n \/\/ https:\/\/bugs.llvm.org\/show_bug.cgi?id=36734\n let r: f32 = simd_reduce_mul_ordered(x, 1.);\n assert_eq!(r, -24_f32);\n\n let r: f32 = simd_reduce_min(x);\n assert_eq!(r, -2_f32);\n let r: f32 = simd_reduce_max(x);\n assert_eq!(r, 4_f32);\n let r: f32 = simd_reduce_min_nanless(x);\n assert_eq!(r, -2_f32);\n let r: f32 = simd_reduce_max_nanless(x);\n assert_eq!(r, 4_f32);\n }\n\n unsafe {\n let x = b8x4(!0, !0, !0, !0);\n let r: bool = simd_reduce_all(x);\n assert_eq!(r, true);\n let r: bool = simd_reduce_any(x);\n assert_eq!(r, true);\n\n let x = b8x4(!0, !0, 0, !0);\n let r: bool = simd_reduce_all(x);\n assert_eq!(r, false);\n let r: bool = simd_reduce_any(x);\n assert_eq!(r, true);\n\n let x = b8x4(0, 0, 0, 0);\n let r: bool = simd_reduce_all(x);\n assert_eq!(r, false);\n let r: bool = simd_reduce_any(x);\n assert_eq!(r, false);\n }\n}\n<commit_msg>add min-llvm version to reduction tests<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ min-llvm-version 5.0\n\n\/\/ Test that the simd_reduce_{op} intrinsics produce the correct results.\n\n#![feature(repr_simd, platform_intrinsics)]\n#[allow(non_camel_case_types)]\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct i32x4(pub i32, pub i32, pub i32, pub i32);\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct u32x4(pub u32, pub u32, pub u32, pub u32);\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct f32x4(pub f32, pub f32, pub f32, pub f32);\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct b8x4(pub i8, pub i8, pub i8, pub i8);\n\n#[repr(simd)]\n#[derive(Copy, Clone)]\nstruct b8x16(\n pub i8, pub i8, pub i8, pub i8,\n pub i8, pub i8, pub i8, pub i8,\n pub i8, pub i8, pub i8, pub i8,\n pub i8, pub i8, pub i8, pub i8\n);\n\nextern \"platform-intrinsic\" {\n fn simd_reduce_add_unordered<T, U>(x: T) -> U;\n fn simd_reduce_mul_unordered<T, U>(x: T) -> U;\n fn simd_reduce_add_ordered<T, U>(x: T, acc: U) -> U;\n fn simd_reduce_mul_ordered<T, U>(x: T, acc: U) -> U;\n fn simd_reduce_min<T, U>(x: T) -> U;\n fn simd_reduce_max<T, U>(x: T) -> U;\n fn simd_reduce_min_nanless<T, U>(x: T) -> U;\n fn simd_reduce_max_nanless<T, U>(x: T) -> U;\n fn simd_reduce_and<T, U>(x: T) -> U;\n fn simd_reduce_or<T, U>(x: T) -> U;\n fn simd_reduce_xor<T, U>(x: T) -> U;\n fn simd_reduce_all<T>(x: T) -> bool;\n fn simd_reduce_any<T>(x: T) -> bool;\n}\n\nfn main() {\n unsafe {\n let x = i32x4(1, -2, 3, 4);\n let r: i32 = simd_reduce_add_unordered(x);\n assert_eq!(r, 6_i32);\n let r: i32 = simd_reduce_mul_unordered(x);\n assert_eq!(r, -24_i32);\n let r: i32 = simd_reduce_add_ordered(x, -1);\n assert_eq!(r, 5_i32);\n let r: i32 = simd_reduce_mul_ordered(x, -1);\n assert_eq!(r, 24_i32);\n\n let r: i32 = simd_reduce_min(x);\n assert_eq!(r, -2_i32);\n let r: i32 = simd_reduce_max(x);\n assert_eq!(r, 4_i32);\n\n let x = i32x4(-1, -1, -1, -1);\n let r: i32 = simd_reduce_and(x);\n assert_eq!(r, -1_i32);\n let r: i32 = simd_reduce_or(x);\n assert_eq!(r, -1_i32);\n let r: i32 = simd_reduce_xor(x);\n assert_eq!(r, 0_i32);\n\n let x = i32x4(-1, -1, 0, -1);\n let r: i32 = simd_reduce_and(x);\n assert_eq!(r, 0_i32);\n let r: i32 = simd_reduce_or(x);\n assert_eq!(r, -1_i32);\n let r: i32 = simd_reduce_xor(x);\n assert_eq!(r, -1_i32);\n }\n\n unsafe {\n let x = u32x4(1, 2, 3, 4);\n let r: u32 = simd_reduce_add_unordered(x);\n assert_eq!(r, 10_u32);\n let r: u32 = simd_reduce_mul_unordered(x);\n assert_eq!(r, 24_u32);\n let r: u32 = simd_reduce_add_ordered(x, 1);\n assert_eq!(r, 11_u32);\n let r: u32 = simd_reduce_mul_ordered(x, 2);\n assert_eq!(r, 48_u32);\n\n let r: u32 = simd_reduce_min(x);\n assert_eq!(r, 1_u32);\n let r: u32 = simd_reduce_max(x);\n assert_eq!(r, 4_u32);\n\n let t = u32::max_value();\n let x = u32x4(t, t, t, t);\n let r: u32 = simd_reduce_and(x);\n assert_eq!(r, t);\n let r: u32 = simd_reduce_or(x);\n assert_eq!(r, t);\n let r: u32 = simd_reduce_xor(x);\n assert_eq!(r, 0_u32);\n\n let x = u32x4(t, t, 0, t);\n let r: u32 = simd_reduce_and(x);\n assert_eq!(r, 0_u32);\n let r: u32 = simd_reduce_or(x);\n assert_eq!(r, t);\n let r: u32 = simd_reduce_xor(x);\n assert_eq!(r, t);\n }\n\n unsafe {\n let x = f32x4(1., -2., 3., 4.);\n let r: f32 = simd_reduce_add_unordered(x);\n assert_eq!(r, 6_f32);\n let r: f32 = simd_reduce_mul_unordered(x);\n assert_eq!(r, -24_f32);\n \/\/ FIXME: only works correctly for accumulator, 0:\n \/\/ https:\/\/bugs.llvm.org\/show_bug.cgi?id=36734\n let r: f32 = simd_reduce_add_ordered(x, 0.);\n assert_eq!(r, 6_f32);\n \/\/ FIXME: only works correctly for accumulator, 1:\n \/\/ https:\/\/bugs.llvm.org\/show_bug.cgi?id=36734\n let r: f32 = simd_reduce_mul_ordered(x, 1.);\n assert_eq!(r, -24_f32);\n\n let r: f32 = simd_reduce_min(x);\n assert_eq!(r, -2_f32);\n let r: f32 = simd_reduce_max(x);\n assert_eq!(r, 4_f32);\n let r: f32 = simd_reduce_min_nanless(x);\n assert_eq!(r, -2_f32);\n let r: f32 = simd_reduce_max_nanless(x);\n assert_eq!(r, 4_f32);\n }\n\n unsafe {\n let x = b8x4(!0, !0, !0, !0);\n let r: bool = simd_reduce_all(x);\n assert_eq!(r, true);\n let r: bool = simd_reduce_any(x);\n assert_eq!(r, true);\n\n let x = b8x4(!0, !0, 0, !0);\n let r: bool = simd_reduce_all(x);\n assert_eq!(r, false);\n let r: bool = simd_reduce_any(x);\n assert_eq!(r, true);\n\n let x = b8x4(0, 0, 0, 0);\n let r: bool = simd_reduce_all(x);\n assert_eq!(r, false);\n let r: bool = simd_reduce_any(x);\n assert_eq!(r, false);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #27132 - apasel422:issue-23491, r=arielb1<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(box_syntax)]\n\nstruct Node<T: ?Sized>(T);\n\nfn main() {\n let x: Box<Node<[isize]>> = box Node([]);\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<commit_msg>Add todo<commit_after>\/\/ TODO: Refactor using a matrix for performance\n\nuse redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn reload(&mut self) {\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Some(ref mut file) => {\n file.seek(SeekFrom::Start(0));\n let mut string = String::new();\n file.read_to_string(&mut string);\n self.string = string;\n }\n None => self.string = String::new(),\n }\n }\n\n fn save(&mut self) {\n match self.file {\n Some(ref mut file) => {\n file.seek(SeekFrom::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n None => {\n \/\/TODO: Ask for file to save to\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\")).unwrap();\n\n self.url = url.to_string();\n self.file = File::open(&self.url);\n\n self.reload();\n self.draw_content(&mut window);\n\n while let Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => break,\n K_BKSP => if self.offset > 0 {\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n },\n K_DEL => if self.offset < self.string.len() {\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len() - 1];\n },\n K_F5 => self.reload(),\n K_F6 => self.save(),\n K_HOME => self.offset = 0,\n K_UP => {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n K_LEFT => if self.offset > 0 {\n self.offset -= 1;\n },\n K_RIGHT => if self.offset < self.string.len() {\n self.offset += 1;\n },\n K_END => self.offset = self.string.len(),\n K_DOWN => {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n _ => match key_event.character {\n '\\0' => (),\n _ => {\n self.string = self.string[0 .. self.offset].to_string() +\n &key_event.character.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n },\n }\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Some(arg) => Editor::new().main(&arg),\n None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<commit_msg>Started implementing Save window for editor as a freebie<commit_after>use redox::*;\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn reload(&mut self) {\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Some(ref mut file) => {\n file.seek(SeekFrom::Start(0));\n let mut string = String::new();\n file.read_to_string(&mut string);\n self.string = string;\n }\n None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &Window) {\n match self.file {\n Some(ref mut file) => {\n file.seek(SeekFrom::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n None => {\n let mut save_window = {\n const width: usize = 400;\n const height: usize = 200;\n Window::new((window.x() + (window.width()\/2 - width\/2) as isize),\n (window.y() + (window.height()\/2 - height\/2) as isize),\n width,\n height,\n \"Save As\").unwrap()\n };\n if let Some(event) = save_window.poll() {\n \/\/TODO: Create a Save\/Cancel button for file saving\n \/\/ and prompt the user for asking to save\n }\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\")).unwrap();\n\n self.url = url.to_string();\n self.file = File::open(&self.url);\n\n self.reload();\n self.draw_content(&mut window);\n\n while let Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => break,\n K_BKSP => if self.offset > 0 {\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n },\n K_DEL => if self.offset < self.string.len() {\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len() - 1];\n },\n K_F5 => self.reload(),\n K_F6 => self.save(&window),\n K_HOME => self.offset = 0,\n K_UP => {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n K_LEFT => if self.offset > 0 {\n self.offset -= 1;\n },\n K_RIGHT => if self.offset < self.string.len() {\n self.offset += 1;\n },\n K_END => self.offset = self.string.len(),\n K_DOWN => {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n _ => match key_event.character {\n '\\0' => (),\n _ => {\n self.string = self.string[0 .. self.offset].to_string() +\n &key_event.character.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n },\n }\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Some(arg) => Editor::new().main(&arg),\n None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Types in this module are mostly internal and automatically generated. You\n\/\/! shouldn't need to interact with these types during normal usage, other than\n\/\/! the methods on [`Table`](trait.Table.html)\n#[doc(hidden)]\npub mod filter;\n#[doc(hidden)]\npub mod joins;\n\nuse backend::Backend;\nuse expression::{Expression, SelectableExpression, NonAggregate};\nuse query_builder::*;\n#[doc(hidden)]\npub use self::joins::{InnerJoinSource, LeftOuterJoinSource};\nuse types::{FromSqlRow, HasSqlType};\n\npub use self::joins::JoinTo;\n\n\/\/\/ Trait indicating that a record can be queried from the database. This trait\n\/\/\/ can be derived automatically. See the [codegen\n\/\/\/ documentation](https:\/\/github.com\/diesel-rs\/diesel\/tree\/master\/diesel_codegen#derivequeryable)\n\/\/\/ for more.\npub trait Queryable<ST, DB> where\n DB: Backend + HasSqlType<ST>,\n{\n type Row: FromSqlRow<ST, DB>;\n\n fn build(row: Self::Row) -> Self;\n}\n\n#[doc(hidden)]\npub trait QuerySource {\n type FromClause;\n fn from_clause(&self) -> Self::FromClause;\n}\n\n\/\/\/ A column on a database table. Types which implement this trait should have\n\/\/\/ been generated by the [`table!` macro](..\/macro.table!.html).\npub trait Column: Expression {\n type Table: Table;\n\n fn name() -> &'static str;\n}\n\n\/\/\/ A SQL database table. Types which implement this trait should have been\n\/\/\/ generated by the [`table!` macro](..\/macro.table!.html).\npub trait Table: QuerySource + AsQuery + Sized {\n type PrimaryKey: Column<Table=Self> + Expression + NonAggregate;\n type AllColumns: SelectableExpression<Self> + NonAggregate;\n\n fn name() -> &'static str;\n fn primary_key(&self) -> Self::PrimaryKey;\n fn all_columns() -> Self::AllColumns;\n\n fn inner_join<T>(self, other: T) -> InnerJoinSource<Self, T> where\n T: Table,\n Self: JoinTo<T, joins::Inner>,\n {\n InnerJoinSource::new(self, other)\n }\n\n fn left_outer_join<T>(self, other: T) -> LeftOuterJoinSource<Self, T> where\n T: Table,\n Self: JoinTo<T, joins::LeftOuter>,\n {\n LeftOuterJoinSource::new(self, other)\n }\n}\n<commit_msg>Remove the `Column` constraint from `PrimaryKey`<commit_after>\/\/! Types in this module are mostly internal and automatically generated. You\n\/\/! shouldn't need to interact with these types during normal usage, other than\n\/\/! the methods on [`Table`](trait.Table.html)\n#[doc(hidden)]\npub mod filter;\n#[doc(hidden)]\npub mod joins;\n\nuse backend::Backend;\nuse expression::{Expression, SelectableExpression, NonAggregate};\nuse query_builder::*;\n#[doc(hidden)]\npub use self::joins::{InnerJoinSource, LeftOuterJoinSource};\nuse types::{FromSqlRow, HasSqlType};\n\npub use self::joins::JoinTo;\n\n\/\/\/ Trait indicating that a record can be queried from the database. This trait\n\/\/\/ can be derived automatically. See the [codegen\n\/\/\/ documentation](https:\/\/github.com\/diesel-rs\/diesel\/tree\/master\/diesel_codegen#derivequeryable)\n\/\/\/ for more.\npub trait Queryable<ST, DB> where\n DB: Backend + HasSqlType<ST>,\n{\n type Row: FromSqlRow<ST, DB>;\n\n fn build(row: Self::Row) -> Self;\n}\n\n#[doc(hidden)]\npub trait QuerySource {\n type FromClause;\n fn from_clause(&self) -> Self::FromClause;\n}\n\n\/\/\/ A column on a database table. Types which implement this trait should have\n\/\/\/ been generated by the [`table!` macro](..\/macro.table!.html).\npub trait Column: Expression {\n type Table: Table;\n\n fn name() -> &'static str;\n}\n\n\/\/\/ A SQL database table. Types which implement this trait should have been\n\/\/\/ generated by the [`table!` macro](..\/macro.table!.html).\npub trait Table: QuerySource + AsQuery + Sized {\n type PrimaryKey: SelectableExpression<Self> + NonAggregate;\n type AllColumns: SelectableExpression<Self> + NonAggregate;\n\n fn name() -> &'static str;\n fn primary_key(&self) -> Self::PrimaryKey;\n fn all_columns() -> Self::AllColumns;\n\n fn inner_join<T>(self, other: T) -> InnerJoinSource<Self, T> where\n T: Table,\n Self: JoinTo<T, joins::Inner>,\n {\n InnerJoinSource::new(self, other)\n }\n\n fn left_outer_join<T>(self, other: T) -> LeftOuterJoinSource<Self, T> where\n T: Table,\n Self: JoinTo<T, joins::LeftOuter>,\n {\n LeftOuterJoinSource::new(self, other)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>String to tiploc is better expressed as FromStr, really.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #3053. Fixes #3053.<commit_after>\/\/ exec-env:RUST_POISON_ON_FREE=1\n\n\/\/ Test that we root `x` even though it is found in immutable memory,\n\/\/ because it is moved.\n\n#[feature(managed_boxes)];\n\nfn free<T>(x: @T) {}\n\nstruct Foo {\n f: @Bar\n}\n\nstruct Bar {\n g: int\n}\n\nfn lend(x: @Foo) -> int {\n let y = &x.f.g;\n free(x); \/\/ specifically here, if x is not rooted, it will be freed\n *y\n}\n\npub fn main() {\n assert_eq!(lend(@Foo {f: @Bar {g: 22}}), 22);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add trace output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>docs(ClearTextPassword): fix a typo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #63473 - adrian-budau:master, r=Centril<commit_after>\/\/ build-pass\n\/\/ Regression test for #56870: Internal compiler error (traits & associated consts)\n\nuse std::fmt::Debug;\n\npub trait Foo<T> {\n const FOO: *const u8;\n}\n\nimpl <T: Debug> Foo<T> for dyn Debug {\n const FOO: *const u8 = <T as Debug>::fmt as *const u8;\n}\n\npub trait Bar {\n const BAR: *const u8;\n}\n\npub trait Baz {\n type Data: Debug;\n}\n\npub struct BarStruct<S: Baz>(S);\n\nimpl<S: Baz> Bar for BarStruct<S> {\n const BAR: *const u8 = <dyn Debug as Foo<<S as Baz>::Data>>::FOO;\n}\n\nstruct AnotherStruct;\n#[derive(Debug)]\nstruct SomeStruct;\n\nimpl Baz for AnotherStruct {\n type Data = SomeStruct;\n}\n\nfn main() {\n let _x = <BarStruct<AnotherStruct> as Bar>::BAR;\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse build::{Location, ScopeAuxiliaryVec, ScopeId};\nuse rustc::hir;\nuse rustc::mir::repr::*;\nuse rustc::mir::transform::MirSource;\nuse rustc::ty::{self, TyCtxt};\nuse rustc_data_structures::fnv::FnvHashMap;\nuse rustc_data_structures::indexed_vec::{Idx};\nuse std::fmt::Display;\nuse std::fs;\nuse std::io::{self, Write};\nuse syntax::ast::NodeId;\n\nconst INDENT: &'static str = \" \";\n\/\/\/ Alignment for lining up comments following MIR statements\nconst ALIGN: usize = 40;\n\n\/\/\/ If the session is properly configured, dumps a human-readable\n\/\/\/ representation of the mir into:\n\/\/\/\n\/\/\/ ```text\n\/\/\/ rustc.node<node_id>.<pass_name>.<disambiguator>\n\/\/\/ ```\n\/\/\/\n\/\/\/ Output from this function is controlled by passing `-Z dump-mir=<filter>`,\n\/\/\/ where `<filter>` takes the following forms:\n\/\/\/\n\/\/\/ - `all` -- dump MIR for all fns, all passes, all everything\n\/\/\/ - `substring1&substring2,...` -- `&`-separated list of substrings\n\/\/\/ that can appear in the pass-name or the `item_path_str` for the given\n\/\/\/ node-id. If any one of the substrings match, the data is dumped out.\npub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n disambiguator: &Display,\n src: MirSource,\n mir: &Mir<'tcx>,\n auxiliary: Option<&ScopeAuxiliaryVec>) {\n let filters = match tcx.sess.opts.debugging_opts.dump_mir {\n None => return,\n Some(ref filters) => filters,\n };\n let node_id = src.item_id();\n let node_path = tcx.item_path_str(tcx.map.local_def_id(node_id));\n let is_matched =\n filters.split(\"&\")\n .any(|filter| {\n filter == \"all\" ||\n pass_name.contains(filter) ||\n node_path.contains(filter)\n });\n if !is_matched {\n return;\n }\n\n let promotion_id = match src {\n MirSource::Promoted(_, id) => format!(\"-{:?}\", id),\n _ => String::new()\n };\n\n let file_name = format!(\"rustc.node{}{}.{}.{}.mir\",\n node_id, promotion_id, pass_name, disambiguator);\n let _ = fs::File::create(&file_name).and_then(|mut file| {\n try!(writeln!(file, \"\/\/ MIR for `{}`\", node_path));\n try!(writeln!(file, \"\/\/ node_id = {}\", node_id));\n try!(writeln!(file, \"\/\/ pass_name = {}\", pass_name));\n try!(writeln!(file, \"\/\/ disambiguator = {}\", disambiguator));\n try!(writeln!(file, \"\"));\n try!(write_mir_fn(tcx, src, mir, &mut file, auxiliary));\n Ok(())\n });\n}\n\n\/\/\/ Write out a human-readable textual representation for the given MIR.\npub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,\n iter: I,\n w: &mut Write)\n -> io::Result<()>\n where I: Iterator<Item=(&'a NodeId, &'a Mir<'tcx>)>, 'tcx: 'a\n{\n let mut first = true;\n for (&id, mir) in iter {\n if first {\n first = false;\n } else {\n \/\/ Put empty lines between all items\n writeln!(w, \"\")?;\n }\n\n let src = MirSource::from_node(tcx, id);\n write_mir_fn(tcx, src, mir, w, None)?;\n\n for (i, mir) in mir.promoted.iter_enumerated() {\n writeln!(w, \"\")?;\n write_mir_fn(tcx, MirSource::Promoted(id, i), mir, w, None)?;\n }\n }\n Ok(())\n}\n\nenum Annotation {\n EnterScope(ScopeId),\n ExitScope(ScopeId),\n}\n\nfn scope_entry_exit_annotations(auxiliary: Option<&ScopeAuxiliaryVec>)\n -> FnvHashMap<Location, Vec<Annotation>>\n{\n \/\/ compute scope\/entry exit annotations\n let mut annotations = FnvHashMap();\n if let Some(auxiliary) = auxiliary {\n for (scope_id, auxiliary) in auxiliary.iter_enumerated() {\n annotations.entry(auxiliary.dom)\n .or_insert(vec![])\n .push(Annotation::EnterScope(scope_id));\n\n for &loc in &auxiliary.postdoms {\n annotations.entry(loc)\n .or_insert(vec![])\n .push(Annotation::ExitScope(scope_id));\n }\n }\n }\n return annotations;\n}\n\npub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir<'tcx>,\n w: &mut Write,\n auxiliary: Option<&ScopeAuxiliaryVec>)\n -> io::Result<()> {\n let annotations = scope_entry_exit_annotations(auxiliary);\n write_mir_intro(tcx, src, mir, w)?;\n for block in mir.basic_blocks().indices() {\n write_basic_block(tcx, block, mir, w, &annotations)?;\n if block.index() + 1 != mir.basic_blocks().len() {\n writeln!(w, \"\")?;\n }\n }\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation for the given basic block.\nfn write_basic_block(tcx: TyCtxt,\n block: BasicBlock,\n mir: &Mir,\n w: &mut Write,\n annotations: &FnvHashMap<Location, Vec<Annotation>>)\n -> io::Result<()> {\n let data = &mir[block];\n\n \/\/ Basic block label at the top.\n writeln!(w, \"{}{:?}: {{\", INDENT, block)?;\n\n \/\/ List of statements in the middle.\n let mut current_location = Location { block: block, statement_index: 0 };\n for statement in &data.statements {\n if let Some(ref annotations) = annotations.get(¤t_location) {\n for annotation in annotations.iter() {\n match *annotation {\n Annotation::EnterScope(id) =>\n writeln!(w, \"{0}{0}\/\/ Enter Scope({1})\",\n INDENT, id.index())?,\n Annotation::ExitScope(id) =>\n writeln!(w, \"{0}{0}\/\/ Exit Scope({1})\",\n INDENT, id.index())?,\n }\n }\n }\n\n let indented_mir = format!(\"{0}{0}{1:?};\", INDENT, statement);\n writeln!(w, \"{0:1$} \/\/ {2}\",\n indented_mir,\n ALIGN,\n comment(tcx, statement.source_info))?;\n\n current_location.statement_index += 1;\n }\n\n \/\/ Terminator at the bottom.\n let indented_terminator = format!(\"{0}{0}{1:?};\", INDENT, data.terminator().kind);\n writeln!(w, \"{0:1$} \/\/ {2}\",\n indented_terminator,\n ALIGN,\n comment(tcx, data.terminator().source_info))?;\n\n writeln!(w, \"{}}}\\n\", INDENT)\n}\n\nfn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String {\n format!(\"scope {} at {}\", scope.index(), tcx.sess.codemap().span_to_string(span))\n}\n\nfn write_scope_tree(tcx: TyCtxt,\n mir: &Mir,\n scope_tree: &FnvHashMap<VisibilityScope, Vec<VisibilityScope>>,\n w: &mut Write,\n parent: VisibilityScope,\n depth: usize)\n -> io::Result<()> {\n let indent = depth * INDENT.len();\n\n let children = match scope_tree.get(&parent) {\n Some(childs) => childs,\n None => return Ok(()),\n };\n\n for &child in children {\n let data = &mir.visibility_scopes[child];\n assert_eq!(data.parent_scope, Some(parent));\n writeln!(w, \"{0:1$}scope {2} {{\", \"\", indent, child.index())?;\n\n \/\/ User variable types (including the user's name in a comment).\n for (id, var) in mir.var_decls.iter_enumerated() {\n \/\/ Skip if not declared in this scope.\n if var.source_info.scope != child {\n continue;\n }\n\n let mut_str = if var.mutability == Mutability::Mut {\n \"mut \"\n } else {\n \"\"\n };\n\n let indent = indent + INDENT.len();\n let indented_var = format!(\"{0:1$}let {2}{3:?}: {4};\",\n INDENT,\n indent,\n mut_str,\n id,\n var.ty);\n writeln!(w, \"{0:1$} \/\/ \\\"{2}\\\" in {3}\",\n indented_var,\n ALIGN,\n var.name,\n comment(tcx, var.source_info))?;\n }\n\n write_scope_tree(tcx, mir, scope_tree, w, child, depth + 1)?;\n\n writeln!(w, \"{0:1$}}}\", \"\", depth * INDENT.len())?;\n }\n\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation of the MIR's `fn` type and the types of its\n\/\/\/ local variables (both user-defined bindings and compiler temporaries).\nfn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir,\n w: &mut Write)\n -> io::Result<()> {\n write_mir_sig(tcx, src, mir, w)?;\n writeln!(w, \" {{\")?;\n\n \/\/ construct a scope tree and write it out\n let mut scope_tree: FnvHashMap<VisibilityScope, Vec<VisibilityScope>> = FnvHashMap();\n for (index, scope_data) in mir.visibility_scopes.iter().enumerate() {\n if let Some(parent) = scope_data.parent_scope {\n scope_tree.entry(parent)\n .or_insert(vec![])\n .push(VisibilityScope::new(index));\n } else {\n \/\/ Only the argument scope has no parent, because it's the root.\n assert_eq!(index, ARGUMENT_VISIBILITY_SCOPE.index());\n }\n }\n\n write_scope_tree(tcx, mir, &scope_tree, w, ARGUMENT_VISIBILITY_SCOPE, 1)?;\n\n write_mir_decls(mir, w)\n}\n\nfn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)\n -> io::Result<()>\n{\n match src {\n MirSource::Fn(_) => write!(w, \"fn\")?,\n MirSource::Const(_) => write!(w, \"const\")?,\n MirSource::Static(_, hir::MutImmutable) => write!(w, \"static\")?,\n MirSource::Static(_, hir::MutMutable) => write!(w, \"static mut\")?,\n MirSource::Promoted(_, i) => write!(w, \"{:?} in\", i)?\n }\n\n write!(w, \" {}\", tcx.node_path_str(src.item_id()))?;\n\n if let MirSource::Fn(_) = src {\n write!(w, \"(\")?;\n\n \/\/ fn argument types.\n for (i, arg) in mir.arg_decls.iter_enumerated() {\n if i.index() != 0 {\n write!(w, \", \")?;\n }\n write!(w, \"{:?}: {}\", Lvalue::Arg(i), arg.ty)?;\n }\n\n write!(w, \") -> \")?;\n\n \/\/ fn return type.\n match mir.return_ty {\n ty::FnOutput::FnConverging(ty) => write!(w, \"{}\", ty),\n ty::FnOutput::FnDiverging => write!(w, \"!\"),\n }\n } else {\n assert!(mir.arg_decls.is_empty());\n write!(w, \": {} =\", mir.return_ty.unwrap())\n }\n}\n\nfn write_mir_decls(mir: &Mir, w: &mut Write) -> io::Result<()> {\n \/\/ Compiler-introduced temporary types.\n for (id, temp) in mir.temp_decls.iter_enumerated() {\n writeln!(w, \"{}let mut {:?}: {};\", INDENT, id, temp.ty)?;\n }\n\n \/\/ Wrote any declaration? Add an empty line before the first block is printed.\n if !mir.var_decls.is_empty() || !mir.temp_decls.is_empty() {\n writeln!(w, \"\")?;\n }\n\n Ok(())\n}\n<commit_msg>Remove extra newlines in MIR dump<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse build::{Location, ScopeAuxiliaryVec, ScopeId};\nuse rustc::hir;\nuse rustc::mir::repr::*;\nuse rustc::mir::transform::MirSource;\nuse rustc::ty::{self, TyCtxt};\nuse rustc_data_structures::fnv::FnvHashMap;\nuse rustc_data_structures::indexed_vec::{Idx};\nuse std::fmt::Display;\nuse std::fs;\nuse std::io::{self, Write};\nuse syntax::ast::NodeId;\n\nconst INDENT: &'static str = \" \";\n\/\/\/ Alignment for lining up comments following MIR statements\nconst ALIGN: usize = 40;\n\n\/\/\/ If the session is properly configured, dumps a human-readable\n\/\/\/ representation of the mir into:\n\/\/\/\n\/\/\/ ```text\n\/\/\/ rustc.node<node_id>.<pass_name>.<disambiguator>\n\/\/\/ ```\n\/\/\/\n\/\/\/ Output from this function is controlled by passing `-Z dump-mir=<filter>`,\n\/\/\/ where `<filter>` takes the following forms:\n\/\/\/\n\/\/\/ - `all` -- dump MIR for all fns, all passes, all everything\n\/\/\/ - `substring1&substring2,...` -- `&`-separated list of substrings\n\/\/\/ that can appear in the pass-name or the `item_path_str` for the given\n\/\/\/ node-id. If any one of the substrings match, the data is dumped out.\npub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n disambiguator: &Display,\n src: MirSource,\n mir: &Mir<'tcx>,\n auxiliary: Option<&ScopeAuxiliaryVec>) {\n let filters = match tcx.sess.opts.debugging_opts.dump_mir {\n None => return,\n Some(ref filters) => filters,\n };\n let node_id = src.item_id();\n let node_path = tcx.item_path_str(tcx.map.local_def_id(node_id));\n let is_matched =\n filters.split(\"&\")\n .any(|filter| {\n filter == \"all\" ||\n pass_name.contains(filter) ||\n node_path.contains(filter)\n });\n if !is_matched {\n return;\n }\n\n let promotion_id = match src {\n MirSource::Promoted(_, id) => format!(\"-{:?}\", id),\n _ => String::new()\n };\n\n let file_name = format!(\"rustc.node{}{}.{}.{}.mir\",\n node_id, promotion_id, pass_name, disambiguator);\n let _ = fs::File::create(&file_name).and_then(|mut file| {\n try!(writeln!(file, \"\/\/ MIR for `{}`\", node_path));\n try!(writeln!(file, \"\/\/ node_id = {}\", node_id));\n try!(writeln!(file, \"\/\/ pass_name = {}\", pass_name));\n try!(writeln!(file, \"\/\/ disambiguator = {}\", disambiguator));\n try!(writeln!(file, \"\"));\n try!(write_mir_fn(tcx, src, mir, &mut file, auxiliary));\n Ok(())\n });\n}\n\n\/\/\/ Write out a human-readable textual representation for the given MIR.\npub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,\n iter: I,\n w: &mut Write)\n -> io::Result<()>\n where I: Iterator<Item=(&'a NodeId, &'a Mir<'tcx>)>, 'tcx: 'a\n{\n let mut first = true;\n for (&id, mir) in iter {\n if first {\n first = false;\n } else {\n \/\/ Put empty lines between all items\n writeln!(w, \"\")?;\n }\n\n let src = MirSource::from_node(tcx, id);\n write_mir_fn(tcx, src, mir, w, None)?;\n\n for (i, mir) in mir.promoted.iter_enumerated() {\n writeln!(w, \"\")?;\n write_mir_fn(tcx, MirSource::Promoted(id, i), mir, w, None)?;\n }\n }\n Ok(())\n}\n\nenum Annotation {\n EnterScope(ScopeId),\n ExitScope(ScopeId),\n}\n\nfn scope_entry_exit_annotations(auxiliary: Option<&ScopeAuxiliaryVec>)\n -> FnvHashMap<Location, Vec<Annotation>>\n{\n \/\/ compute scope\/entry exit annotations\n let mut annotations = FnvHashMap();\n if let Some(auxiliary) = auxiliary {\n for (scope_id, auxiliary) in auxiliary.iter_enumerated() {\n annotations.entry(auxiliary.dom)\n .or_insert(vec![])\n .push(Annotation::EnterScope(scope_id));\n\n for &loc in &auxiliary.postdoms {\n annotations.entry(loc)\n .or_insert(vec![])\n .push(Annotation::ExitScope(scope_id));\n }\n }\n }\n return annotations;\n}\n\npub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir<'tcx>,\n w: &mut Write,\n auxiliary: Option<&ScopeAuxiliaryVec>)\n -> io::Result<()> {\n let annotations = scope_entry_exit_annotations(auxiliary);\n write_mir_intro(tcx, src, mir, w)?;\n for block in mir.basic_blocks().indices() {\n write_basic_block(tcx, block, mir, w, &annotations)?;\n if block.index() + 1 != mir.basic_blocks().len() {\n writeln!(w, \"\")?;\n }\n }\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation for the given basic block.\nfn write_basic_block(tcx: TyCtxt,\n block: BasicBlock,\n mir: &Mir,\n w: &mut Write,\n annotations: &FnvHashMap<Location, Vec<Annotation>>)\n -> io::Result<()> {\n let data = &mir[block];\n\n \/\/ Basic block label at the top.\n writeln!(w, \"{}{:?}: {{\", INDENT, block)?;\n\n \/\/ List of statements in the middle.\n let mut current_location = Location { block: block, statement_index: 0 };\n for statement in &data.statements {\n if let Some(ref annotations) = annotations.get(¤t_location) {\n for annotation in annotations.iter() {\n match *annotation {\n Annotation::EnterScope(id) =>\n writeln!(w, \"{0}{0}\/\/ Enter Scope({1})\",\n INDENT, id.index())?,\n Annotation::ExitScope(id) =>\n writeln!(w, \"{0}{0}\/\/ Exit Scope({1})\",\n INDENT, id.index())?,\n }\n }\n }\n\n let indented_mir = format!(\"{0}{0}{1:?};\", INDENT, statement);\n writeln!(w, \"{0:1$} \/\/ {2}\",\n indented_mir,\n ALIGN,\n comment(tcx, statement.source_info))?;\n\n current_location.statement_index += 1;\n }\n\n \/\/ Terminator at the bottom.\n let indented_terminator = format!(\"{0}{0}{1:?};\", INDENT, data.terminator().kind);\n writeln!(w, \"{0:1$} \/\/ {2}\",\n indented_terminator,\n ALIGN,\n comment(tcx, data.terminator().source_info))?;\n\n writeln!(w, \"{}}}\", INDENT)\n}\n\nfn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String {\n format!(\"scope {} at {}\", scope.index(), tcx.sess.codemap().span_to_string(span))\n}\n\nfn write_scope_tree(tcx: TyCtxt,\n mir: &Mir,\n scope_tree: &FnvHashMap<VisibilityScope, Vec<VisibilityScope>>,\n w: &mut Write,\n parent: VisibilityScope,\n depth: usize)\n -> io::Result<()> {\n let indent = depth * INDENT.len();\n\n let children = match scope_tree.get(&parent) {\n Some(childs) => childs,\n None => return Ok(()),\n };\n\n for &child in children {\n let data = &mir.visibility_scopes[child];\n assert_eq!(data.parent_scope, Some(parent));\n writeln!(w, \"{0:1$}scope {2} {{\", \"\", indent, child.index())?;\n\n \/\/ User variable types (including the user's name in a comment).\n for (id, var) in mir.var_decls.iter_enumerated() {\n \/\/ Skip if not declared in this scope.\n if var.source_info.scope != child {\n continue;\n }\n\n let mut_str = if var.mutability == Mutability::Mut {\n \"mut \"\n } else {\n \"\"\n };\n\n let indent = indent + INDENT.len();\n let indented_var = format!(\"{0:1$}let {2}{3:?}: {4};\",\n INDENT,\n indent,\n mut_str,\n id,\n var.ty);\n writeln!(w, \"{0:1$} \/\/ \\\"{2}\\\" in {3}\",\n indented_var,\n ALIGN,\n var.name,\n comment(tcx, var.source_info))?;\n }\n\n write_scope_tree(tcx, mir, scope_tree, w, child, depth + 1)?;\n\n writeln!(w, \"{0:1$}}}\", \"\", depth * INDENT.len())?;\n }\n\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation of the MIR's `fn` type and the types of its\n\/\/\/ local variables (both user-defined bindings and compiler temporaries).\nfn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir,\n w: &mut Write)\n -> io::Result<()> {\n write_mir_sig(tcx, src, mir, w)?;\n writeln!(w, \" {{\")?;\n\n \/\/ construct a scope tree and write it out\n let mut scope_tree: FnvHashMap<VisibilityScope, Vec<VisibilityScope>> = FnvHashMap();\n for (index, scope_data) in mir.visibility_scopes.iter().enumerate() {\n if let Some(parent) = scope_data.parent_scope {\n scope_tree.entry(parent)\n .or_insert(vec![])\n .push(VisibilityScope::new(index));\n } else {\n \/\/ Only the argument scope has no parent, because it's the root.\n assert_eq!(index, ARGUMENT_VISIBILITY_SCOPE.index());\n }\n }\n\n write_scope_tree(tcx, mir, &scope_tree, w, ARGUMENT_VISIBILITY_SCOPE, 1)?;\n\n write_mir_decls(mir, w)\n}\n\nfn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)\n -> io::Result<()>\n{\n match src {\n MirSource::Fn(_) => write!(w, \"fn\")?,\n MirSource::Const(_) => write!(w, \"const\")?,\n MirSource::Static(_, hir::MutImmutable) => write!(w, \"static\")?,\n MirSource::Static(_, hir::MutMutable) => write!(w, \"static mut\")?,\n MirSource::Promoted(_, i) => write!(w, \"{:?} in\", i)?\n }\n\n write!(w, \" {}\", tcx.node_path_str(src.item_id()))?;\n\n if let MirSource::Fn(_) = src {\n write!(w, \"(\")?;\n\n \/\/ fn argument types.\n for (i, arg) in mir.arg_decls.iter_enumerated() {\n if i.index() != 0 {\n write!(w, \", \")?;\n }\n write!(w, \"{:?}: {}\", Lvalue::Arg(i), arg.ty)?;\n }\n\n write!(w, \") -> \")?;\n\n \/\/ fn return type.\n match mir.return_ty {\n ty::FnOutput::FnConverging(ty) => write!(w, \"{}\", ty),\n ty::FnOutput::FnDiverging => write!(w, \"!\"),\n }\n } else {\n assert!(mir.arg_decls.is_empty());\n write!(w, \": {} =\", mir.return_ty.unwrap())\n }\n}\n\nfn write_mir_decls(mir: &Mir, w: &mut Write) -> io::Result<()> {\n \/\/ Compiler-introduced temporary types.\n for (id, temp) in mir.temp_decls.iter_enumerated() {\n writeln!(w, \"{}let mut {:?}: {};\", INDENT, id, temp.ty)?;\n }\n\n \/\/ Wrote any declaration? Add an empty line before the first block is printed.\n if !mir.var_decls.is_empty() || !mir.temp_decls.is_empty() {\n writeln!(w, \"\")?;\n }\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 1513<commit_after>\/\/ https:\/\/leetcode.com\/problems\/number-of-substrings-with-only-1s\/\npub fn num_sub(s: String) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", num_sub(String::from(\"0110111\"))); \/\/ 9\n println!(\"{}\", num_sub(String::from(\"101\"))); \/\/ 2\n println!(\"{}\", num_sub(String::from(\"111111\"))); \/\/ 21\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2373<commit_after>\/\/ https:\/\/leetcode.com\/problems\/largest-local-values-in-a-matrix\/\npub fn largest_local(grid: Vec<Vec<i32>>) -> Vec<Vec<i32>> {\n todo!()\n}\n\nfn main() {\n println!(\n \"{:?}\",\n largest_local(vec![\n vec![9, 9, 8, 1],\n vec![5, 6, 2, 6],\n vec![8, 2, 6, 4],\n vec![6, 2, 2, 2]\n ])\n ); \/\/ [[9,9],[8,6]]\n println!(\n \"{:?}\",\n largest_local(vec![\n vec![1, 1, 1, 1, 1],\n vec![1, 1, 1, 1, 1],\n vec![1, 1, 2, 1, 1],\n vec![1, 1, 1, 1, 1],\n vec![1, 1, 1, 1, 1]\n ])\n ); \/\/ [[2,2,2],[2,2,2],[2,2,2]]\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added pages for api specs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adds shell.rs and UnixShell trait<commit_after>\/\/! Paths and Unix shells\n\/\/!\n\/\/! MacOS, Linux, FreeBSD, and many other OS model their design on Unix,\n\/\/! so handling them is relatively consistent. But only relatively.\n\/\/! POSIX postdates Unix by 20 years, and each \"Unix-like\" shell develops\n\/\/! unique quirks over time.\n\n\/\/ TODO: Nushell, PowerShell\n\/\/ Cross-platform non-POSIX shells were not assessed for integration yet.\n\nuse super::*;\nuse std::env;\nuse std::path::PathBuf;\n\npub type Shell = Box<dyn UnixShell>;\n\nmacro_rules! support_shells {\n ( $($shell:ident,)* ) => {\n fn enumerate_shells() -> Vec<Shell> {\n vec![$( Box::new($shell), )*]\n }\n }\n}\n\nsupport_shells! {\n Posix,\n Bash,\n Zsh,\n}\n\npub fn get_available_shells() -> impl Iterator<Item = Shell> {\n enumerate_shells().into_iter().filter(|sh| sh.does_exist())\n}\n\npub trait UnixShell {\n fn does_exist(&self) -> bool;\n\n fn rcfile(&self) -> Option<PathBuf>;\n\n fn export_string(&self) -> Result<String> {\n \/\/ The path is *prepended* in case there are system-installed\n \/\/ rustc's that need to be overridden.\n Ok(format!(\n r#\"export PATH=\"{}\/bin:$PATH\"\"#,\n canonical_cargo_home()?\n ))\n }\n}\n\nstruct Posix;\nimpl UnixShell for Posix {\n fn does_exist(&self) -> bool {\n true\n }\n\n fn rcfile(&self) -> Option<PathBuf> {\n utils::home_dir().map(|dir| dir.join(\".profile\"))\n }\n}\n\nstruct Bash;\nimpl UnixShell for Bash {\n fn does_exist(&self) -> bool {\n self.rcfile().map_or(false, |rc| rc.is_file()) || matches!(utils::find_cmd(&[\"bash\"]), Some(_))\n }\n\n fn rcfile(&self) -> Option<PathBuf> {\n \/\/ .bashrc is normative, in spite of a few weird Mac versions.\n utils::home_dir().map(|dir| dir.join(\".bashrc\"))\n }\n}\n\nstruct Zsh;\nimpl UnixShell for Zsh {\n fn does_exist(&self) -> bool {\n self.rcfile().map_or(false, |rc| rc.is_file()) || matches!(utils::find_cmd(&[\"zsh\"]), Some(_))\n }\n\n fn rcfile(&self) -> Option<PathBuf> {\n let zdotdir = match env::var(\"ZDOTDIR\") {\n Ok(dir) => Some(PathBuf::from(dir)),\n _ => utils::home_dir(),\n };\n\n \/\/ .zshenv is preferred for path mods but not all zshers use it,\n \/\/ zsh always loads .zshrc on interactive, unlike bash's weirdness.\n zdotdir.map(|dir| match dir.join(\".zshenv\") {\n rc if rc.is_file() => rc,\n _ => dir.join(\".zshrc\"),\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Don't attempt to wait for session ending when it has already ended<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>ADD: DIY hints content and hints promot behavior<commit_after>use std::collections::HashSet;\n\nuse rustyline::Editor;\nuse rustyline::{hint::Hinter, Context};\nuse rustyline_derive::{Completer, Helper, Highlighter, Validator};\n\n#[derive(Completer, Helper, Validator, Highlighter)]\nstruct DIYHinter {\n \/\/ It's simple example of rustyline, for more effecient, please use ** radix trie ** (thanks @gwenn's idea)\n hints: HashSet<String>,\n}\n\nimpl Hinter for DIYHinter {\n fn hint(&self, line: &str, pos: usize, _ctx: &Context<'_>) -> Option<String> {\n if pos < line.len() {\n return None;\n }\n\n self.hints\n .iter()\n .filter_map(|hint| {\n \/\/ expect hint after word complete, like redis cli, add condition: line.ends_with(\" \")\n if pos > 0 && hint.starts_with(&line[..pos]) {\n Some(hint[pos..].to_owned())\n } else {\n None\n }\n })\n .nth(0)\n }\n}\n\nfn diy_hints() -> HashSet<String> {\n let mut set = HashSet::new();\n set.insert(String::from(\"help\"));\n set.insert(String::from(\"get key\"));\n set.insert(String::from(\"set key value\"));\n set.insert(String::from(\"hget key field\"));\n set.insert(String::from(\"hset key field value\"));\n set\n}\n\nfn main() -> rustyline::Result<()> {\n println!(\"This is a DIY hint hack of rustyline\");\n let h = DIYHinter { hints: diy_hints() };\n \n let mut rl: Editor<DIYHinter> = Editor::new();\n rl.set_helper(Some(h));\n\n loop {\n let input = rl.readline(\"> \")?;\n println!(\"input: {}\", input);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::io::{TempDir, Command, fs};\nuse std::os;\n\nfn main() {\n \/\/ If we're the child, make sure we were invoked correctly\n let args = os::args();\n if args.len() > 1 && args[1].as_slice() == \"child\" {\n return assert_eq!(args[0].as_slice(), \"mytest\");\n }\n\n test();\n}\n\nfn test() {\n \/\/ If we're the parent, copy our own binary to a tempr directory, and then\n \/\/ make it executable.\n let dir = TempDir::new(\"mytest\").unwrap();\n let me = os::self_exe_name().unwrap();\n let dest = dir.path().join(format!(\"mytest{}\", os::consts::EXE_SUFFIX));\n fs::copy(&me, &dest).unwrap();\n\n \/\/ Append the temp directory to our own PATH.\n let mut path = os::split_paths(os::getenv(\"PATH\").unwrap_or(String::new()));\n path.push(dir.path().clone());\n let path = os::join_paths(path.as_slice()).unwrap();\n\n Command::new(\"mytest\").env(\"PATH\", path.as_slice())\n .arg(\"child\")\n .spawn().unwrap();\n}\n<commit_msg>Rollup merge of #21498 - quantheory:master, r=alexcrichton<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::io::{Command, fs, USER_RWX};\nuse std::os;\nuse std::path::BytesContainer;\nuse std::rand::random;\n\nfn main() {\n \/\/ If we're the child, make sure we were invoked correctly\n let args = os::args();\n if args.len() > 1 && args[1].as_slice() == \"child\" {\n return assert_eq!(args[0],\n format!(\"mytest{}\", os::consts::EXE_SUFFIX));\n }\n\n test();\n}\n\nfn test() {\n \/\/ If we're the parent, copy our own binary to a new directory.\n let my_path = os::self_exe_name().unwrap();\n let my_dir = my_path.dir_path();\n\n let random_u32: u32 = random();\n let child_dir = Path::new(my_dir.join(format!(\"issue-15149-child-{}\",\n random_u32)));\n fs::mkdir(&child_dir, USER_RWX).unwrap();\n\n let child_path = child_dir.join(format!(\"mytest{}\",\n os::consts::EXE_SUFFIX));\n fs::copy(&my_path, &child_path).unwrap();\n\n \/\/ Append the new directory to our own PATH.\n let mut path = os::split_paths(os::getenv(\"PATH\").unwrap_or(String::new()));\n path.push(child_dir.clone());\n let path = os::join_paths(path.as_slice()).unwrap();\n\n let child_output = Command::new(\"mytest\").env(\"PATH\", path.as_slice())\n .arg(\"child\")\n .output().unwrap();\n\n assert!(child_output.status.success(),\n format!(\"child assertion failed\\n child stdout:\\n {}\\n child stderr:\\n {}\",\n child_output.output.container_as_str().unwrap(),\n child_output.error.container_as_str().unwrap()));\n\n fs::rmdir_recursive(&child_dir).unwrap();\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add run-pass test for issue 33498<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub fn main() {\n let x = (0, 2);\n\n match x {\n (0, ref y) => {}\n (y, 0) => {}\n _ => (),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #22258<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ops::Add;\n\nfn f<T: Add>(a: T, b: T) -> <T as Add>::Output {\n a + b\n}\n\nfn main() {\n println!(\"a + b is {}\", f::<f32>(100f32, 200f32));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add alternate q6<commit_after>extern crate timely;\nextern crate graph_map;\nextern crate alg3_dynamic;\n\nuse std::sync::{Arc, Mutex};\n\nuse alg3_dynamic::*;\n\nuse timely::dataflow::operators::*;\n\nuse graph_map::GraphMMap;\n\n#[allow(non_snake_case)]\nfn main () {\n\n let start = ::std::time::Instant::now();\n\n let send = Arc::new(Mutex::new(0usize));\n let send2 = send.clone();\n\n let inspect = ::std::env::args().find(|x| x == \"inspect\").is_some();\n\n timely::execute_from_args(std::env::args(), move |root| {\n\n let send = send.clone();\n\n \/\/ handles to input and probe, but also both indices so we can compact them.\n let (mut input, mut query, probe, forward) = root.dataflow::<u32,_,_>(|builder| {\n\n \/\/ A stream of changes to the set of *triangles*, where a < b < c.\n let (graph, dT) = builder.new_input::<((u32, u32, u32), i32)>();\n\n \/\/ A stream of changes to the set of *triangles*, where a < b < c.\n let (query, dQ) = builder.new_input::<((u32, u32), ())>();\n\n \/\/ Our query is K4(w,x,y,z) := T(w,x,y), T(w,x,z), T(w,y,z), T(x,y,z)\n \/\/\n \/\/ This query is technically redundant, because the middle two constraints imply the fourth,\n \/\/ so let's slim it down to\n \/\/\n \/\/ K4(w,x,y,z) := T(w,x,y), T(w,x,z), T(w,y,z)\n \/\/\n \/\/ This seems like it could be a bit more complicated than triangles, in determining the rules\n \/\/ for incremental updates. I'm going to write them down first, and we'll see which indices we\n \/\/ actually need. I'll use A, B, and C for the instances of T above.\n \/\/\n \/\/ dK4dA(w,x,y,z) := dA(w,x,y), B(w,x,z), C(w,y,z)\n \/\/ dK4dB(w,x,y,z) := dB(w,x,z), A(w,x,y), C(w,y,z)\n \/\/ dK4dC(w,x,y,z) := dC(w,y,z), A(w,x,y), B(w,x,z)\n \/\/\n \/\/ Looking at this, it seems like we will need\n \/\/\n \/\/ dK4dA : indices on (w,x,_) and (w,_,y)\n \/\/ dK4dB : indices on (w,x,_) and (w,_,z)\n \/\/ dK4dC : indices on (w,_,y) and (w,_,z)\n \/\/\n \/\/ All of this seems to boil down to a \"forward\" and a \"reverse\" index, just as for triangles,\n \/\/ but where `w` is always present as part of the key. We just might want the first or second\n \/\/ field that follows it.\n\n let forward = IndexStream::from(\n |(a,b)| (a + b) as u64, \/\/ distribute triangles by a + b.\n &dT.map(|((a,b,c),_)| ((a,b),c)), \/\/ initialize with (a,b) keys and c values.\n &Vec::new().to_stream(builder) \/\/ empty update stream.\n );\n\n \/\/ we bind dQ as (v2, v5, v3), exploiting the fact that we have bound each \n let dK4dA = dQ.extend(vec![Box::new(forward.extend_using(|&(v2,v5)| min_max(v2,v5), <_ as PartialOrd>::le))])\n .flat_map(|((v2,v5),v3s,w)| {\n let v1s = v3s.clone();\n v3s.into_iter().map(move |v3| ((v1s.clone(),v2,v3,v5),w))\n })\n .extend(vec![Box::new(forward.extend_using(|&(ref _v1s,v2,_v3,v5)| min_max(v2,v5), <_ as PartialOrd>::le)),\n Box::new(forward.extend_using(|&(ref _v1s,v2,v3,_v5)| min_max(v2,v3), <_ as PartialOrd>::le))])\n .map(|((v1s,v2,v3,v5), mut v4s, w)| {\n v4s.retain(|&v4| v2 != v4 && v3 < v4);\n ((v1s,v2,v3,v4s,v5),w)\n })\n ;\n \/\/ if the third argument is \"inspect\", report triangle counts.\n if inspect {\n dK4dA.inspect_batch(move |_,x| {\n let mut sum = 0;\n for &((ref v1s, _v2, v3, ref v4s, _v5),_) in x.iter() {\n for &v1 in v1s.iter() {\n if v1 != v3 {\n for &v4 in v4s.iter() {\n if v1 != v4 {\n sum += 1;\n }\n }\n }\n }\n }\n\n if let Ok(mut bound) = send.lock() {\n *bound += sum;\n }\n });\n }\n\n (graph, query, dK4dA.probe(), forward)\n });\n\n \/\/ load fragment of input graph into memory to avoid io while running.\n let filename = std::env::args().nth(1).unwrap();\n let graph = GraphMMap::new(&filename);\n\n let mut triangles = Vec::new();\n\n let mut v1 = root.index();\n while v1 < graph.nodes() {\n let v1f = graph.edges(v1);\n for &v2 in v1f.iter() {\n intersect_and(v1f, graph.edges(v2 as usize), |v3| triangles.push((v1 as u32, v2, v3)));\n }\n v1 += root.peers();\n }\n\n \/\/ drop(graph);\n\n println!(\"{:?}\\tworker {} computed {} triangles\", start.elapsed(), root.index(), triangles.len());\n\n for &(a,b,c) in triangles.iter() {\n \/\/ send each extension\n input.send(((a,b,c), 1));\n input.send(((a,c,b), 1));\n input.send(((b,c,a), 1));\n }\n\n \/\/ synchronize with other workers.\n let prev = query.time().clone();\n input.close();\n query.advance_to(prev.inner + 1);\n while probe.less_than(query.time()) { root.step(); }\n forward.index.borrow_mut().merge_to(&prev);\n\n println!(\"{:?}\\tworker {} loaded index\", start.elapsed(), root.index());\n\n \/\/ number of nodes introduced at a time\n let batch: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let mut node = 0; \n let mut sent = 0;\n\n while sent < graph.nodes() {\n node += batch;\n while sent < graph.nodes() && sent < node {\n for &edge in graph.edges(sent) {\n query.send(((sent as u32, edge), ()));\n }\n \/\/ let (a,b,c) = triangles[sent];\n \/\/ query.send(((a,b,c), ()));\n \/\/ query.send(((a,c,b), ()));\n \/\/ query.send(((b,c,a), ()));\n sent += 1;\n }\n\n \/\/ advance input and synchronize.\n let prev = query.time().clone();\n query.advance_to(prev.inner + 1);\n while probe.less_than(query.time()) { root.step(); }\n\n \/\/ merge all of the indices we maintain.\n forward.index.borrow_mut().merge_to(&prev);\n }\n\n query.close();\n while root.step() { }\n\n if inspect { \n println!(\"{:?}\\tworker {} complete\", start.elapsed(), root.index()); \n }\n\n }).unwrap();\n\n let total = if let Ok(lock) = send2.lock() {\n *lock\n }\n else { 0 };\n\n if inspect { \n println!(\"elapsed: {:?}\\ttotal instances at this process: {:?}\", start.elapsed(), total); \n }\n}\n\nfn min_max<T: Ord>(a: T, b: T) -> (T, T) {\n if a < b { (a, b) } else { (b, a) }\n}\n\nfn intersect_and<F: FnMut(u32)>(aaa: &[u32], mut bbb: &[u32], mut func: F) {\n\n if aaa.len() > bbb.len() {\n intersect_and(bbb, aaa, func);\n }\n else {\n if aaa.len() < bbb.len() \/ 16 {\n for &a in aaa.iter() {\n bbb = gallop_ge(bbb, &a);\n if bbb.len() > 0 && bbb[0] == a {\n func(a)\n }\n }\n }\n else {\n for &a in aaa.iter() {\n while bbb.len() > 0 && bbb[0] < a {\n bbb = &bbb[1..];\n }\n if bbb.len() > 0 && a == bbb[0] {\n func(a);\n }\n }\n }\n }\n}\n\n#[inline(always)]\npub fn gallop_ge<'a, T: Ord>(mut slice: &'a [T], value: &T) -> &'a [T] {\n \/\/ if empty slice, or already >= element, return\n if slice.len() > 0 && &slice[0] < value {\n let mut step = 1;\n while step < slice.len() && &slice[step] < value {\n slice = &slice[step..];\n step = step << 1;\n }\n\n step = step >> 1;\n while step > 0 {\n if step < slice.len() && &slice[step] < value {\n slice = &slice[step..];\n }\n step = step >> 1;\n }\n\n slice = &slice[1..]; \/\/ advance one, as we always stayed < value\n }\n\n return slice;\n}\n\n\/\/ #[inline(always)]\n\/\/ pub fn gallop_gt<'a, T: Ord>(mut slice: &'a [T], value: &T) -> &'a [T] {\n\/\/ \/\/ if empty slice, or already > element, return\n\/\/ if slice.len() > 0 && &slice[0] <= value {\n\/\/ let mut step = 1;\n\/\/ while step < slice.len() && &slice[step] <= value {\n\/\/ slice = &slice[step..];\n\/\/ step = step << 1;\n\/\/ }\n\n\/\/ step = step >> 1;\n\/\/ while step > 0 {\n\/\/ if step < slice.len() && &slice[step] <= value {\n\/\/ slice = &slice[step..];\n\/\/ }\n\/\/ step = step >> 1;\n\/\/ }\n\n\/\/ slice = &slice[1..]; \/\/ advance one, as we always stayed <= value\n\/\/ }\n\n\/\/ return slice;\n\/\/ }<|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example of the toplevel_ref_arg lint.<commit_after>#![feature(phase)]\n\n#[phase(plugin)]\nextern crate rust_clippy;\n\nfn the_answer(ref mut x: u8) {\n *x = 42;\n}\n\nfn main() {\n let mut x = 0;\n the_answer(x);\n println!(\"The answer is {}.\", x);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>(feat) Implemented Response for IronResponse.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rename protocol to signature to be in line with microsoft spec and set the flags to 0<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ `NanoSecond` is a new name for `u64`.\ntype NanoSecond = u64;\ntype Inch = u64;\n\n\/\/ Use an attribute to silence warning.\n#[allow(non_camel_case_types)]\ntype u64_t = u64;\n\/\/ TODO ^ Try removing the attribute\n\n\/\/ Use an attribute to silence warnings\n#[allow(trivial_numeric_casts)]\nfn main() {\n \/\/ `NanoSecond` = `Inch` = `u64_t` = `u64`.\n let nanoseconds: NanoSecond = 5 as u64_t;\n let inches: Inch = 2 as u64_t;\n\n \/\/ Note that type aliases *don't* provide any extra type safety, because\n \/\/ aliases are *not* new types\n println!(\"{} nanoseconds + {} inches = {} unit?\",\n nanoseconds,\n inches,\n nanoseconds + inches);\n}\n<commit_msg>5.3: trivial_numeric_casts is now allowed by default<commit_after>\/\/ `NanoSecond` is a new name for `u64`.\ntype NanoSecond = u64;\ntype Inch = u64;\n\n\/\/ Use an attribute to silence warning.\n#[allow(non_camel_case_types)]\ntype u64_t = u64;\n\/\/ TODO ^ Try removing the attribute\n\nfn main() {\n \/\/ `NanoSecond` = `Inch` = `u64_t` = `u64`.\n let nanoseconds: NanoSecond = 5 as u64_t;\n let inches: Inch = 2 as u64_t;\n\n \/\/ Note that type aliases *don't* provide any extra type safety, because\n \/\/ aliases are *not* new types\n println!(\"{} nanoseconds + {} inches = {} unit?\",\n nanoseconds,\n inches,\n nanoseconds + inches);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>First major skeleton of game search DAG.<commit_after>use ::board;\nuse ::game;\n\nuse ::actions::Action;\n\nuse std::collections::HashMap;\nuse std::collections::hash_map::Entry;\nuse std::clone::Clone;\nuse std::default::Default;\nuse std::ops::RangeFrom;\n\n#[derive(Clone, Debug, Eq, PartialEq)]\nstruct ActionId(usize);\n\nimpl ActionId {\n fn as_usize(self) -> usize {\n let ActionId(value) = self;\n value\n }\n}\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\nstruct StateId(usize);\n\nimpl StateId {\n fn as_usize(self) -> usize {\n let StateId(value) = self;\n value\n }\n}\n\nstruct StateNamespace {\n state_id_generator: RangeFrom<usize>,\n states: HashMap<game::State, StateId>,\n}\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]\nenum NamespaceInsertion {\n Present(StateId),\n New(StateId),\n}\n\nimpl NamespaceInsertion {\n fn unwrap(self) -> StateId {\n match self {\n NamespaceInsertion::Present(s) => s,\n NamespaceInsertion::New(s) => s,\n }\n }\n}\n\nimpl StateNamespace {\n fn new() -> Self {\n StateNamespace {\n state_id_generator: 0..,\n states: HashMap::new(),\n }\n }\n\n fn next_id(&mut self) -> StateId {\n match self.state_id_generator.next() {\n Some(id) => StateId(id),\n None => panic!(\"Exhausted state ID namespace\"),\n }\n }\n\n fn get_or_insert(&mut self, state: game::State) -> NamespaceInsertion {\n match self.states.entry(state) {\n Entry::Occupied(e) => NamespaceInsertion::Present(*e.get()),\n Entry::Vacant(e) => NamespaceInsertion::New(*e.insert(self.next_id())),\n }\n }\n\n fn get(&self, state: &game::State) -> Option<StateId> {\n self.states.get(state).map(|x| *x)\n }\n}\n\nstruct ActionNamespace {\n action_id_generator: RangeFrom<usize>,\n}\n\nimpl ActionNamespace {\n fn new() -> Self {\n ActionNamespace {\n action_id_generator: 0..,\n }\n }\n\n fn next_id(&mut self) -> ActionId {\n match self.action_id_generator.next() {\n Some(id) => ActionId(id),\n None => panic!(\"Exhausted action ID namespace\"),\n }\n }\n}\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\nenum ActionTarget {\n Unexpanded,\n Cycle,\n State(StateId),\n}\n\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\nstruct ActionArc {\n action: Action,\n source: StateId,\n target: ActionTarget,\n}\n\npub struct SearchGraph<S, A> where S: Default, A: Default {\n root_id: StateId,\n state_ids: StateNamespace,\n action_ids: ActionNamespace,\n \/\/ Also consider making edges into an intrusive linked list, with head an\n \/\/ Option<EdgeNode> and EdgeNode<A> { data: A, action: Action, target:\n \/\/ StateId, next: Option<EdgeNode> }.\n state_out_edges: Vec<Vec<ActionId>>, \/\/ Indexed by StateId.\n state_in_edges: Vec<Vec<ActionId>>, \/\/ Indexed by StateId.\n action_arcs: Vec<ActionArc>, \/\/ Indexed by ActionId.\n state_data: Vec<S>, \/\/ Indexed by StateId.\n action_data: Vec<A>, \/\/ Indexed by ActionId.\n}\n\nimpl<S, A> SearchGraph<S, A> where S: Default, A: Default {\n pub fn new(root_state: game::State) -> Self {\n let mut graph = SearchGraph {\n root_id: StateId(0),\n state_ids: StateNamespace::new(),\n action_ids: ActionNamespace::new(),\n state_out_edges: Vec::new(),\n state_in_edges: Vec::new(),\n action_arcs: Vec::new(),\n state_data: Vec::new(),\n action_data: Vec::new(),\n };\n graph.add_state(root_state);\n graph\n }\n\n fn add_state(&mut self, state: game::State) -> NamespaceInsertion {\n match self.state_ids.get_or_insert(state) {\n (state, NamespaceInsertion::New(state_id)) => {\n self.state_data.push(Default::default());\n self.state_out_edges.push(Vec::new());\n self.state_in_edges.push(Vec::new());\n for action in state.role_actions(state.active_player().role()) {\n self.add_action_arc(action, state_id, ActionTarget::Unexpanded);\n }\n assert!(self.state_data.len() == state_id.as_usize() + 1);\n assert!(self.state_out_edges.len() == state_id.as_usize() + 1);\n assert!(self.state_in_edges.len() == state_id.as_usize() + 1);\n NamespaceInsertion::New(state_id)\n },\n (_, insertion) => insertion,\n }\n }\n\n fn add_action_arc(&mut self, action: Action, source: StateId, target: ActionTarget) {\n let arc = ActionArc { action: action, source: source, target: target, };\n let action_id = self.action_ids.next_id();\n self.state_out_edges[source.as_usize()].push(action_id);\n self.action_arcs.push(arc);\n self.action_data.push(Default::default());\n assert!(self.action_arcs.len() == action_id.as_usize() + 1);\n assert!(self.action_data.len() == action_id.as_usize() + 1);\n }\n\n fn expand_action_target(&mut self, from_state: &game::State, id: ActionId) {\n let arc: &mut ActionArc = &mut self.action_arcs[id.as_usize()];\n match arc.target {\n ActionTarget::Unexpanded => {\n match self.state_ids.states.get(from_state) {\n Some(source_id) => {\n if *source_id != arc.source {\n panic!(\"Source state ID {:?} does not match action source {:?}\",\n source_id, arc.source);\n }\n let mut target_state = from_state.clone();\n target_state.do_action(&arc.action);\n match self.add_state(target_state) {\n NamespaceInsertion::New(target_id) =>\n self.state_in_edges[target_id.as_usize()].push(id),\n NamespaceInsertion::Present(target_id) => {\n if self.path_exists(target_id, *source_id) {\n arc.target = ActionTarget::Cycle;\n } else {\n arc.target = ActionTarget::State(target_id);\n }\n },\n }\n },\n None =>\n panic!(\"Source state supplied for action {:?} does not match known states\",\n arc),\n }\n },\n _ => panic!(\"Action {:?} already expanded\", arc),\n }\n }\n\n fn path_exists(self, source: StateId, target: StateId) -> bool {\n let mut frontier = vec![target];\n while !frontier.is_empty() {\n let state = frontier.pop().unwrap();\n if source == state {\n return true\n }\n for arc in self.state_out_edges[state.as_usize()].iter().map(|&x| &self.action_arcs[x.as_usize()]) {\n if let ActionTarget::State(target_id) = arc.target {\n frontier.push(target_id);\n }\n }\n }\n false\n }\n\n \/\/ pub fn get_state<'s>(&'s mut self, states: game::State) -> StateNode<'s, S, A> {\n \n \/\/ }\n}\n\npub struct StateNode<'a, S: 'a, A: 'a> where S: Default, A: Default {\n graph: &'a mut SearchGraph<S, A>,\n id: StateId,\n}\n\nimpl<'a, S: 'a, A: 'a> StateNode<'a, S, A> where S: Default, A: Default {\n fn child_actions(&self) -> &[ActionId] {\n &self.graph.state_out_edges[self.id.as_usize()]\n }\n\n fn parent_actions(&self) -> &[ActionId] {\n &self.graph.state_in_edges[self.id.as_usize()]\n }\n\n pub fn data(&self) -> &S {\n &self.graph.state_data[self.id.as_usize()]\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add a test case for the \"self\" region in enums<commit_after>enum int_wrapper {\n int_wrapper_ctor(&int)\n}\n\nfn main() {\n unsafe {\n let x = 3;\n let y = int_wrapper_ctor(&x);\n let z : ∫\n alt y {\n int_wrapper_ctor(zz) { z = zz; }\n }\n log(debug, *z);\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tink-hybrid: add benchmark<commit_after>\/\/ Copyright 2021 The Tink-Rust Authors\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\/\/\n\n#![feature(test)]\nextern crate test;\nuse test::Bencher;\n\nconst MSG: &[u8] = b\"this data needs to be encrypted\";\nconst CONTEXT: &[u8] = b\"context\";\n\nfn setup(\n kt: tink_proto::KeyTemplate,\n) -> (\n Box<dyn tink_core::HybridEncrypt>,\n Box<dyn tink_core::HybridDecrypt>,\n Vec<u8>,\n) {\n tink_hybrid::init();\n let kh = tink_core::keyset::Handle::new(&kt).unwrap();\n let d = tink_hybrid::new_decrypt(&kh).unwrap();\n let pubkh = kh.public().unwrap();\n let e = tink_hybrid::new_encrypt(&pubkh).unwrap();\n let ct = e.encrypt(MSG, CONTEXT).unwrap();\n (e, d, ct)\n}\n\n\/\/\/ Size of the prefix information in the ciphertext. If this is corrupted, the tag will be\n\/\/\/ rejected immediately without performing any cryptographic operations.\nconst PREFIX_SIZE: usize = tink_core::cryptofmt::NON_RAW_PREFIX_SIZE;\n\nfn setup_failure(kt: tink_proto::KeyTemplate) -> (Box<dyn tink_core::HybridDecrypt>, Vec<u8>) {\n let (_e, d, ct) = setup(kt);\n (\n d,\n ct.iter()\n .enumerate()\n .map(|(i, b)| if i < PREFIX_SIZE { *b } else { b ^ 0b10101010 })\n .collect(),\n )\n}\n\n#[bench]\nfn bench_ecies_hkdf_aes128_gcm_encrypt(b: &mut Bencher) {\n let (e, _d, _ct) = setup(tink_hybrid::ecies_hkdf_aes128_gcm_key_template());\n b.iter(|| e.encrypt(MSG, CONTEXT).unwrap())\n}\n\n#[bench]\nfn bench_ecies_hkdf_aes128_gcm_decrypt(b: &mut Bencher) {\n let (_e, d, ct) = setup(tink_hybrid::ecies_hkdf_aes128_gcm_key_template());\n b.iter(|| d.decrypt(&ct, CONTEXT).unwrap())\n}\n\n#[bench]\nfn bench_ecies_hkdf_aes128_ctr_hmac_encrypt(b: &mut Bencher) {\n let (e, _d, _ct) = setup(tink_hybrid::ecies_hkdf_aes128_ctr_hmac_sha256_key_template());\n b.iter(|| e.encrypt(MSG, CONTEXT).unwrap())\n}\n\n#[bench]\nfn bench_ecies_hkdf_aes128_ctr_hmac_decrypt(b: &mut Bencher) {\n let (_e, d, ct) = setup(tink_hybrid::ecies_hkdf_aes128_ctr_hmac_sha256_key_template());\n b.iter(|| d.decrypt(&ct, CONTEXT).unwrap())\n}\n\n#[bench]\nfn bench_ecies_hkdf_aes128_gcm_decrypt_fail(b: &mut Bencher) {\n let (d, ct) = setup_failure(tink_hybrid::ecies_hkdf_aes128_gcm_key_template());\n b.iter(|| d.decrypt(&ct, CONTEXT).unwrap_err())\n}\n\n#[bench]\nfn bench_ecies_hkdf_aes128_ctr_hmac_decrypt_fail(b: &mut Bencher) {\n let (d, ct) = setup_failure(tink_hybrid::ecies_hkdf_aes128_ctr_hmac_sha256_key_template());\n b.iter(|| d.decrypt(&ct, CONTEXT).unwrap_err())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>style: Check for border-image-* initial specified values when serializing border shorthand.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ xfail-boot\n\/\/ xfail-stage0\n\/\/ -*- rust -*-\n\n\/\/ Tests for if as expressions returning boxed types\n\nfn test_box() {\n auto res = if (true) { @100 } else { @101 };\n check (*res == 100);\n}\n\nfn test_str() {\n auto res = if (true) { \"happy\" } else { \"sad\" };\n check (res == \"happy\");\n}\n\nfn main() {\n test_box();\n test_str();\n}\n<commit_msg>Un-XFAIL expr-if-box.rs<commit_after>\/\/ xfail-boot\n\/\/ -*- rust -*-\n\n\/\/ Tests for if as expressions returning boxed types\n\nfn test_box() {\n auto res = if (true) { @100 } else { @101 };\n check (*res == 100);\n}\n\nfn test_str() {\n auto res = if (true) { \"happy\" } else { \"sad\" };\n check (res == \"happy\");\n}\n\nfn main() {\n test_box();\n test_str();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implemented linear search in rust<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Buffer array wrong item types<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unimplemented argument<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add regression test<commit_after>\/\/ Regression test for #56128. When this `pub(super) use...` gets\n\/\/ exploded in the HIR, we were not handling ids correctly.\n\nmod bar {\n pub(super) use self::baz::{x, y};\n\n mod baz {\n pub fn x() { }\n pub fn y() { }\n }\n}\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before>iface to_str { fn to_str() -> str; }\n\nimpl of to_str for int {\n fn to_str() -> str { int::str(self) }\n}\nimpl of to_str for i8 {\n fn to_str() -> str { i8::str(self) }\n}\nimpl of to_str for i16 {\n fn to_str() -> str { i16::str(self) }\n}\nimpl of to_str for i32 {\n fn to_str() -> str { i32::str(self) }\n}\nimpl of to_str for i64 {\n fn to_str() -> str { i64::str(self) }\n}\nimpl of to_str for uint {\n fn to_str() -> str { uint::str(self) }\n}\nimpl of to_str for u8 {\n fn to_str() -> str { u8::str(self) }\n}\nimpl of to_str for u16 {\n fn to_str() -> str { u16::str(self) }\n}\nimpl of to_str for u32 {\n fn to_str() -> str { u32::str(self) }\n}\nimpl of to_str for u64 {\n fn to_str() -> str { u64::str(self) }\n}\nimpl of to_str for float {\n fn to_str() -> str { float::to_str(self, 4u) }\n}\nimpl of to_str for bool {\n fn to_str() -> str { bool::to_str(self) }\n}\nimpl of to_str for () {\n fn to_str() -> str { \"()\" }\n}\nimpl of to_str for str {\n fn to_str() -> str { self }\n}\n\nimpl <A: to_str copy, B: to_str copy> of to_str for (A, B) {\n fn to_str() -> str {\n let (a, b) = self;\n \"(\" + a.to_str() + \", \" + b.to_str() + \")\"\n }\n}\nimpl <A: to_str copy, B: to_str copy, C: to_str copy> of to_str for (A, B, C){\n fn to_str() -> str {\n let (a, b, c) = self;\n \"(\" + a.to_str() + \", \" + b.to_str() + \", \" + c.to_str() + \")\"\n }\n}\n\nimpl <A: to_str> of to_str for ~[A] {\n fn to_str() -> str {\n let mut acc = \"[\", first = true;\n for vec::each(self) |elt| {\n if first { first = false; }\n else { str::push_str(acc, \", \"); }\n str::push_str(acc, elt.to_str());\n }\n str::push_char(acc, ']');\n acc\n }\n}\n\nimpl <A: to_str> of to_str for @A {\n fn to_str() -> str { \"@\" + (*self).to_str() }\n}\nimpl <A: to_str> of to_str for ~A {\n fn to_str() -> str { \"~\" + (*self).to_str() }\n}\n\n#[cfg(test)]\nmod tests {\n #[test]\n fn test_simple_types() {\n assert 1.to_str() == \"1\";\n assert (-1).to_str() == \"-1\";\n assert 200u.to_str() == \"200\";\n assert 2u8.to_str() == \"2\";\n assert true.to_str() == \"true\";\n assert false.to_str() == \"false\";\n assert ().to_str() == \"()\";\n assert \"hi\".to_str() == \"hi\";\n }\n\n #[test]\n fn test_tuple_types() {\n assert (1, 2).to_str() == \"(1, 2)\";\n assert (\"a\", \"b\", false).to_str() == \"(a, b, false)\";\n assert ((), ((), 100)).to_str() == \"((), ((), 100))\";\n }\n\n #[test]\n fn test_vectors() {\n let x: ~[int] = ~[];\n assert x.to_str() == \"~[]\";\n assert (~[1]).to_str() == \"~[1]\";\n assert (~[1, 2, 3]).to_str() == \"~[1, 2, 3]\";\n assert (~[~[], ~[1], ~[1, 1]]).to_str() ==\n \"~[~[], ~[1], ~[1, 1]]\";\n }\n\n #[test]\n fn test_pointer_types() {\n assert (@1).to_str() == \"@1\";\n assert (~(true, false)).to_str() == \"~(true, false)\";\n }\n}\n<commit_msg>core: Ignore to_str::test_vectors. It's busted<commit_after>iface to_str { fn to_str() -> str; }\n\nimpl of to_str for int {\n fn to_str() -> str { int::str(self) }\n}\nimpl of to_str for i8 {\n fn to_str() -> str { i8::str(self) }\n}\nimpl of to_str for i16 {\n fn to_str() -> str { i16::str(self) }\n}\nimpl of to_str for i32 {\n fn to_str() -> str { i32::str(self) }\n}\nimpl of to_str for i64 {\n fn to_str() -> str { i64::str(self) }\n}\nimpl of to_str for uint {\n fn to_str() -> str { uint::str(self) }\n}\nimpl of to_str for u8 {\n fn to_str() -> str { u8::str(self) }\n}\nimpl of to_str for u16 {\n fn to_str() -> str { u16::str(self) }\n}\nimpl of to_str for u32 {\n fn to_str() -> str { u32::str(self) }\n}\nimpl of to_str for u64 {\n fn to_str() -> str { u64::str(self) }\n}\nimpl of to_str for float {\n fn to_str() -> str { float::to_str(self, 4u) }\n}\nimpl of to_str for bool {\n fn to_str() -> str { bool::to_str(self) }\n}\nimpl of to_str for () {\n fn to_str() -> str { \"()\" }\n}\nimpl of to_str for str {\n fn to_str() -> str { self }\n}\n\nimpl <A: to_str copy, B: to_str copy> of to_str for (A, B) {\n fn to_str() -> str {\n let (a, b) = self;\n \"(\" + a.to_str() + \", \" + b.to_str() + \")\"\n }\n}\nimpl <A: to_str copy, B: to_str copy, C: to_str copy> of to_str for (A, B, C){\n fn to_str() -> str {\n let (a, b, c) = self;\n \"(\" + a.to_str() + \", \" + b.to_str() + \", \" + c.to_str() + \")\"\n }\n}\n\nimpl <A: to_str> of to_str for ~[A] {\n fn to_str() -> str {\n let mut acc = \"[\", first = true;\n for vec::each(self) |elt| {\n if first { first = false; }\n else { str::push_str(acc, \", \"); }\n str::push_str(acc, elt.to_str());\n }\n str::push_char(acc, ']');\n acc\n }\n}\n\nimpl <A: to_str> of to_str for @A {\n fn to_str() -> str { \"@\" + (*self).to_str() }\n}\nimpl <A: to_str> of to_str for ~A {\n fn to_str() -> str { \"~\" + (*self).to_str() }\n}\n\n#[cfg(test)]\nmod tests {\n #[test]\n fn test_simple_types() {\n assert 1.to_str() == \"1\";\n assert (-1).to_str() == \"-1\";\n assert 200u.to_str() == \"200\";\n assert 2u8.to_str() == \"2\";\n assert true.to_str() == \"true\";\n assert false.to_str() == \"false\";\n assert ().to_str() == \"()\";\n assert \"hi\".to_str() == \"hi\";\n }\n\n #[test]\n fn test_tuple_types() {\n assert (1, 2).to_str() == \"(1, 2)\";\n assert (\"a\", \"b\", false).to_str() == \"(a, b, false)\";\n assert ((), ((), 100)).to_str() == \"((), ((), 100))\";\n }\n\n #[test]\n #[ignore]\n fn test_vectors() {\n let x: ~[int] = ~[];\n assert x.to_str() == \"~[]\";\n assert (~[1]).to_str() == \"~[1]\";\n assert (~[1, 2, 3]).to_str() == \"~[1, 2, 3]\";\n assert (~[~[], ~[1], ~[1, 1]]).to_str() ==\n \"~[~[], ~[1], ~[1, 1]]\";\n }\n\n #[test]\n fn test_pointer_types() {\n assert (@1).to_str() == \"@1\";\n assert (~(true, false)).to_str() == \"~(true, false)\";\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*! Utility implementations of Reader and Writer *\/\n\nuse prelude::*;\nuse cmp;\nuse io;\nuse boxed::Box;\nuse slice::bytes::MutableByteVector;\n\n\/\/\/ Wraps a `Reader`, limiting the number of bytes that can be read from it.\npub struct LimitReader<R> {\n limit: uint,\n inner: R\n}\n\nimpl<R: Reader> LimitReader<R> {\n \/\/\/ Creates a new `LimitReader`\n pub fn new(r: R, limit: uint) -> LimitReader<R> {\n LimitReader { limit: limit, inner: r }\n }\n\n \/\/\/ Consumes the `LimitReader`, returning the underlying `Reader`.\n pub fn unwrap(self) -> R { self.inner }\n\n \/\/\/ Returns the number of bytes that can be read before the `LimitReader`\n \/\/\/ will return EOF.\n \/\/\/\n \/\/\/ # Note\n \/\/\/\n \/\/\/ The reader may reach EOF after reading fewer bytes than indicated by\n \/\/\/ this method if the underlying reader reaches EOF.\n pub fn limit(&self) -> uint { self.limit }\n}\n\nimpl<R: Reader> Reader for LimitReader<R> {\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n if self.limit == 0 {\n return Err(io::standard_error(io::EndOfFile));\n }\n\n let len = cmp::min(self.limit, buf.len());\n self.inner.read(buf.mut_slice_to(len)).map(|len| {\n self.limit -= len;\n len\n })\n }\n}\n\nimpl<R: Buffer> Buffer for LimitReader<R> {\n fn fill_buf<'a>(&'a mut self) -> io::IoResult<&'a [u8]> {\n let amt = try!(self.inner.fill_buf());\n let buf = amt.slice_to(cmp::min(amt.len(), self.limit));\n if buf.len() == 0 {\n Err(io::standard_error(io::EndOfFile))\n } else {\n Ok(buf)\n }\n }\n\n fn consume(&mut self, amt: uint) {\n self.limit -= amt;\n self.inner.consume(amt);\n }\n\n}\n\n\/\/\/ A `Writer` which ignores bytes written to it, like \/dev\/null.\npub struct NullWriter;\n\nimpl Writer for NullWriter {\n #[inline]\n fn write(&mut self, _buf: &[u8]) -> io::IoResult<()> { Ok(()) }\n}\n\n\/\/\/ A `Reader` which returns an infinite stream of 0 bytes, like \/dev\/zero.\npub struct ZeroReader;\n\nimpl Reader for ZeroReader {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n buf.set_memory(0);\n Ok(buf.len())\n }\n}\n\nimpl Buffer for ZeroReader {\n fn fill_buf<'a>(&'a mut self) -> io::IoResult<&'a [u8]> {\n static DATA: [u8, ..64] = [0, ..64];\n Ok(DATA.as_slice())\n }\n fn consume(&mut self, _amt: uint) {}\n}\n\n\/\/\/ A `Reader` which is always at EOF, like \/dev\/null.\npub struct NullReader;\n\nimpl Reader for NullReader {\n #[inline]\n fn read(&mut self, _buf: &mut [u8]) -> io::IoResult<uint> {\n Err(io::standard_error(io::EndOfFile))\n }\n}\n\nimpl Buffer for NullReader {\n fn fill_buf<'a>(&'a mut self) -> io::IoResult<&'a [u8]> {\n Err(io::standard_error(io::EndOfFile))\n }\n fn consume(&mut self, _amt: uint) {}\n}\n\n\/\/\/ A `Writer` which multiplexes writes to a set of `Writers`.\npub struct MultiWriter {\n writers: Vec<Box<Writer>>\n}\n\nimpl MultiWriter {\n \/\/\/ Creates a new `MultiWriter`\n pub fn new(writers: Vec<Box<Writer>>) -> MultiWriter {\n MultiWriter { writers: writers }\n }\n}\n\nimpl Writer for MultiWriter {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::IoResult<()> {\n let mut ret = Ok(());\n for writer in self.writers.mut_iter() {\n ret = ret.and(writer.write(buf));\n }\n return ret;\n }\n\n #[inline]\n fn flush(&mut self) -> io::IoResult<()> {\n let mut ret = Ok(());\n for writer in self.writers.mut_iter() {\n ret = ret.and(writer.flush());\n }\n return ret;\n }\n}\n\n\/\/\/ A `Reader` which chains input from multiple `Readers`, reading each to\n\/\/\/ completion before moving onto the next.\npub struct ChainedReader<I, R> {\n readers: I,\n cur_reader: Option<R>,\n}\n\nimpl<R: Reader, I: Iterator<R>> ChainedReader<I, R> {\n \/\/\/ Creates a new `ChainedReader`\n pub fn new(mut readers: I) -> ChainedReader<I, R> {\n let r = readers.next();\n ChainedReader { readers: readers, cur_reader: r }\n }\n}\n\nimpl<R: Reader, I: Iterator<R>> Reader for ChainedReader<I, R> {\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n loop {\n let err = match self.cur_reader {\n Some(ref mut r) => {\n match r.read(buf) {\n Ok(len) => return Ok(len),\n Err(ref e) if e.kind == io::EndOfFile => None,\n Err(e) => Some(e),\n }\n }\n None => break\n };\n self.cur_reader = self.readers.next();\n match err {\n Some(e) => return Err(e),\n None => {}\n }\n }\n Err(io::standard_error(io::EndOfFile))\n }\n}\n\n\/\/\/ A `Reader` which forwards input from another `Reader`, passing it along to\n\/\/\/ a `Writer` as well. Similar to the `tee(1)` command.\npub struct TeeReader<R, W> {\n reader: R,\n writer: W,\n}\n\nimpl<R: Reader, W: Writer> TeeReader<R, W> {\n \/\/\/ Creates a new `TeeReader`\n pub fn new(r: R, w: W) -> TeeReader<R, W> {\n TeeReader { reader: r, writer: w }\n }\n\n \/\/\/ Consumes the `TeeReader`, returning the underlying `Reader` and\n \/\/\/ `Writer`.\n pub fn unwrap(self) -> (R, W) {\n let TeeReader { reader, writer } = self;\n (reader, writer)\n }\n}\n\nimpl<R: Reader, W: Writer> Reader for TeeReader<R, W> {\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n self.reader.read(buf).and_then(|len| {\n self.writer.write(buf.slice_to(len)).map(|()| len)\n })\n }\n}\n\n\/\/\/ Copies all data from a `Reader` to a `Writer`.\npub fn copy<R: Reader, W: Writer>(r: &mut R, w: &mut W) -> io::IoResult<()> {\n let mut buf = [0, ..super::DEFAULT_BUF_SIZE];\n loop {\n let len = match r.read(buf) {\n Ok(len) => len,\n Err(ref e) if e.kind == io::EndOfFile => return Ok(()),\n Err(e) => return Err(e),\n };\n try!(w.write(buf.slice_to(len)));\n }\n}\n\n\/\/\/ A `Reader` which converts an `Iterator<u8>` into a `Reader`.\npub struct IterReader<T> {\n iter: T,\n}\n\nimpl<T: Iterator<u8>> IterReader<T> {\n \/\/\/ Create a new `IterReader` which will read from the specified `Iterator`.\n pub fn new(iter: T) -> IterReader<T> {\n IterReader {\n iter: iter,\n }\n }\n}\n\nimpl<T: Iterator<u8>> Reader for IterReader<T> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n let mut len = 0;\n for (slot, elt) in buf.mut_iter().zip(self.iter.by_ref()) {\n *slot = elt;\n len += 1;\n }\n if len == 0 {\n Err(io::standard_error(io::EndOfFile))\n } else {\n Ok(len)\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use io::{MemReader, MemWriter, BufReader};\n use io;\n use boxed::Box;\n use super::*;\n use prelude::*;\n\n #[test]\n fn test_limit_reader_unlimited() {\n let mut r = MemReader::new(vec!(0, 1, 2));\n {\n let mut r = LimitReader::new(r.by_ref(), 4);\n assert_eq!(vec!(0, 1, 2), r.read_to_end().unwrap());\n }\n }\n\n #[test]\n fn test_limit_reader_limited() {\n let mut r = MemReader::new(vec!(0, 1, 2));\n {\n let mut r = LimitReader::new(r.by_ref(), 2);\n assert_eq!(vec!(0, 1), r.read_to_end().unwrap());\n }\n assert_eq!(vec!(2), r.read_to_end().unwrap());\n }\n\n #[test]\n fn test_limit_reader_limit() {\n let r = MemReader::new(vec!(0, 1, 2));\n let mut r = LimitReader::new(r, 3);\n assert_eq!(3, r.limit());\n assert_eq!(0, r.read_byte().unwrap());\n assert_eq!(2, r.limit());\n assert_eq!(vec!(1, 2), r.read_to_end().unwrap());\n assert_eq!(0, r.limit());\n }\n\n #[test]\n fn test_null_writer() {\n let mut s = NullWriter;\n let buf = vec![0, 0, 0];\n s.write(buf.as_slice()).unwrap();\n s.flush().unwrap();\n }\n\n #[test]\n fn test_zero_reader() {\n let mut s = ZeroReader;\n let mut buf = vec![1, 2, 3];\n assert_eq!(s.read(buf.as_mut_slice()), Ok(3));\n assert_eq!(vec![0, 0, 0], buf);\n }\n\n #[test]\n fn test_null_reader() {\n let mut r = NullReader;\n let mut buf = vec![0];\n assert!(r.read(buf.as_mut_slice()).is_err());\n }\n\n #[test]\n fn test_multi_writer() {\n static mut writes: uint = 0;\n static mut flushes: uint = 0;\n\n struct TestWriter;\n impl Writer for TestWriter {\n fn write(&mut self, _buf: &[u8]) -> io::IoResult<()> {\n unsafe { writes += 1 }\n Ok(())\n }\n\n fn flush(&mut self) -> io::IoResult<()> {\n unsafe { flushes += 1 }\n Ok(())\n }\n }\n\n let mut multi = MultiWriter::new(vec!(box TestWriter as Box<Writer>,\n box TestWriter as Box<Writer>));\n multi.write([1, 2, 3]).unwrap();\n assert_eq!(2, unsafe { writes });\n assert_eq!(0, unsafe { flushes });\n multi.flush().unwrap();\n assert_eq!(2, unsafe { writes });\n assert_eq!(2, unsafe { flushes });\n }\n\n #[test]\n fn test_chained_reader() {\n let rs = vec!(MemReader::new(vec!(0, 1)), MemReader::new(vec!()),\n MemReader::new(vec!(2, 3)));\n let mut r = ChainedReader::new(rs.move_iter());\n assert_eq!(vec!(0, 1, 2, 3), r.read_to_end().unwrap());\n }\n\n #[test]\n fn test_tee_reader() {\n let mut r = TeeReader::new(MemReader::new(vec!(0, 1, 2)),\n MemWriter::new());\n assert_eq!(vec!(0, 1, 2), r.read_to_end().unwrap());\n let (_, w) = r.unwrap();\n assert_eq!(vec!(0, 1, 2), w.unwrap());\n }\n\n #[test]\n fn test_copy() {\n let mut r = MemReader::new(vec!(0, 1, 2, 3, 4));\n let mut w = MemWriter::new();\n copy(&mut r, &mut w).unwrap();\n assert_eq!(vec!(0, 1, 2, 3, 4), w.unwrap());\n }\n\n #[test]\n fn limit_reader_buffer() {\n let data = \"0123456789\\n0123456789\\n\";\n let mut r = BufReader::new(data.as_bytes());\n {\n let mut r = LimitReader::new(r.by_ref(), 3);\n assert_eq!(r.read_line(), Ok(\"012\".to_string()));\n assert_eq!(r.limit(), 0);\n assert_eq!(r.read_line().err().unwrap().kind, io::EndOfFile);\n }\n {\n let mut r = LimitReader::new(r.by_ref(), 9);\n assert_eq!(r.read_line(), Ok(\"3456789\\n\".to_string()));\n assert_eq!(r.limit(), 1);\n assert_eq!(r.read_line(), Ok(\"0\".to_string()));\n }\n {\n let mut r = LimitReader::new(r.by_ref(), 100);\n assert_eq!(r.read_char(), Ok('1'));\n assert_eq!(r.limit(), 99);\n assert_eq!(r.read_line(), Ok(\"23456789\\n\".to_string()));\n }\n }\n\n #[test]\n fn test_iter_reader() {\n let mut r = IterReader::new(range(0u8, 8));\n let mut buf = [0, 0, 0];\n let len = r.read(buf).unwrap();\n assert_eq!(len, 3);\n assert!(buf == [0, 1, 2]);\n\n let len = r.read(buf).unwrap();\n assert_eq!(len, 3);\n assert!(buf == [3, 4, 5]);\n\n let len = r.read(buf).unwrap();\n assert_eq!(len, 2);\n assert!(buf == [6, 7, 5]);\n\n assert_eq!(r.read(buf).unwrap_err().kind, io::EndOfFile);\n }\n}\n<commit_msg>auto merge of #16513 : sfackler\/rust\/io-util-cleanup, r=alexcrichton<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*! Utility implementations of Reader and Writer *\/\n\nuse prelude::*;\nuse cmp;\nuse io;\nuse boxed::Box;\nuse slice::bytes::MutableByteVector;\n\n\/\/\/ Wraps a `Reader`, limiting the number of bytes that can be read from it.\npub struct LimitReader<R> {\n limit: uint,\n inner: R\n}\n\nimpl<R: Reader> LimitReader<R> {\n \/\/\/ Creates a new `LimitReader`\n pub fn new(r: R, limit: uint) -> LimitReader<R> {\n LimitReader { limit: limit, inner: r }\n }\n\n \/\/\/ Consumes the `LimitReader`, returning the underlying `Reader`.\n pub fn unwrap(self) -> R { self.inner }\n\n \/\/\/ Returns the number of bytes that can be read before the `LimitReader`\n \/\/\/ will return EOF.\n \/\/\/\n \/\/\/ # Note\n \/\/\/\n \/\/\/ The reader may reach EOF after reading fewer bytes than indicated by\n \/\/\/ this method if the underlying reader reaches EOF.\n pub fn limit(&self) -> uint { self.limit }\n}\n\nimpl<R: Reader> Reader for LimitReader<R> {\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n if self.limit == 0 {\n return Err(io::standard_error(io::EndOfFile));\n }\n\n let len = cmp::min(self.limit, buf.len());\n let res = self.inner.read(buf.mut_slice_to(len));\n match res {\n Ok(len) => self.limit -= len,\n _ => {}\n }\n res\n }\n}\n\nimpl<R: Buffer> Buffer for LimitReader<R> {\n fn fill_buf<'a>(&'a mut self) -> io::IoResult<&'a [u8]> {\n let amt = try!(self.inner.fill_buf());\n let buf = amt.slice_to(cmp::min(amt.len(), self.limit));\n if buf.len() == 0 {\n Err(io::standard_error(io::EndOfFile))\n } else {\n Ok(buf)\n }\n }\n\n fn consume(&mut self, amt: uint) {\n \/\/ Don't let callers reset the limit by passing an overlarge value\n let amt = cmp::min(amt, self.limit);\n self.limit -= amt;\n self.inner.consume(amt);\n }\n\n}\n\n\/\/\/ A `Writer` which ignores bytes written to it, like \/dev\/null.\npub struct NullWriter;\n\nimpl Writer for NullWriter {\n #[inline]\n fn write(&mut self, _buf: &[u8]) -> io::IoResult<()> { Ok(()) }\n}\n\n\/\/\/ A `Reader` which returns an infinite stream of 0 bytes, like \/dev\/zero.\npub struct ZeroReader;\n\nimpl Reader for ZeroReader {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n buf.set_memory(0);\n Ok(buf.len())\n }\n}\n\nimpl Buffer for ZeroReader {\n fn fill_buf<'a>(&'a mut self) -> io::IoResult<&'a [u8]> {\n static DATA: [u8, ..64] = [0, ..64];\n Ok(DATA.as_slice())\n }\n\n fn consume(&mut self, _amt: uint) {}\n}\n\n\/\/\/ A `Reader` which is always at EOF, like \/dev\/null.\npub struct NullReader;\n\nimpl Reader for NullReader {\n #[inline]\n fn read(&mut self, _buf: &mut [u8]) -> io::IoResult<uint> {\n Err(io::standard_error(io::EndOfFile))\n }\n}\n\nimpl Buffer for NullReader {\n fn fill_buf<'a>(&'a mut self) -> io::IoResult<&'a [u8]> {\n Err(io::standard_error(io::EndOfFile))\n }\n fn consume(&mut self, _amt: uint) {}\n}\n\n\/\/\/ A `Writer` which multiplexes writes to a set of `Writer`s.\n\/\/\/\n\/\/\/ The `Writer`s are delegated to in order. If any `Writer` returns an error,\n\/\/\/ that error is returned immediately and remaining `Writer`s are not called.\npub struct MultiWriter {\n writers: Vec<Box<Writer>>\n}\n\nimpl MultiWriter {\n \/\/\/ Creates a new `MultiWriter`\n pub fn new(writers: Vec<Box<Writer>>) -> MultiWriter {\n MultiWriter { writers: writers }\n }\n}\n\nimpl Writer for MultiWriter {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::IoResult<()> {\n for writer in self.writers.mut_iter() {\n try!(writer.write(buf));\n }\n Ok(())\n }\n\n #[inline]\n fn flush(&mut self) -> io::IoResult<()> {\n for writer in self.writers.mut_iter() {\n try!(writer.flush());\n }\n Ok(())\n }\n}\n\n\/\/\/ A `Reader` which chains input from multiple `Reader`s, reading each to\n\/\/\/ completion before moving onto the next.\npub struct ChainedReader<I, R> {\n readers: I,\n cur_reader: Option<R>,\n}\n\nimpl<R: Reader, I: Iterator<R>> ChainedReader<I, R> {\n \/\/\/ Creates a new `ChainedReader`\n pub fn new(mut readers: I) -> ChainedReader<I, R> {\n let r = readers.next();\n ChainedReader { readers: readers, cur_reader: r }\n }\n}\n\nimpl<R: Reader, I: Iterator<R>> Reader for ChainedReader<I, R> {\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n loop {\n let err = match self.cur_reader {\n Some(ref mut r) => {\n match r.read(buf) {\n Ok(len) => return Ok(len),\n Err(ref e) if e.kind == io::EndOfFile => None,\n Err(e) => Some(e),\n }\n }\n None => break\n };\n self.cur_reader = self.readers.next();\n match err {\n Some(e) => return Err(e),\n None => {}\n }\n }\n Err(io::standard_error(io::EndOfFile))\n }\n}\n\n\/\/\/ A `Reader` which forwards input from another `Reader`, passing it along to\n\/\/\/ a `Writer` as well. Similar to the `tee(1)` command.\npub struct TeeReader<R, W> {\n reader: R,\n writer: W,\n}\n\nimpl<R: Reader, W: Writer> TeeReader<R, W> {\n \/\/\/ Creates a new `TeeReader`\n pub fn new(r: R, w: W) -> TeeReader<R, W> {\n TeeReader { reader: r, writer: w }\n }\n\n \/\/\/ Consumes the `TeeReader`, returning the underlying `Reader` and\n \/\/\/ `Writer`.\n pub fn unwrap(self) -> (R, W) {\n let TeeReader { reader, writer } = self;\n (reader, writer)\n }\n}\n\nimpl<R: Reader, W: Writer> Reader for TeeReader<R, W> {\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n self.reader.read(buf).and_then(|len| {\n self.writer.write(buf.slice_to(len)).map(|()| len)\n })\n }\n}\n\n\/\/\/ Copies all data from a `Reader` to a `Writer`.\npub fn copy<R: Reader, W: Writer>(r: &mut R, w: &mut W) -> io::IoResult<()> {\n let mut buf = [0, ..super::DEFAULT_BUF_SIZE];\n loop {\n let len = match r.read(buf) {\n Ok(len) => len,\n Err(ref e) if e.kind == io::EndOfFile => return Ok(()),\n Err(e) => return Err(e),\n };\n try!(w.write(buf.slice_to(len)));\n }\n}\n\n\/\/\/ An adaptor converting an `Iterator<u8>` to a `Reader`.\npub struct IterReader<T> {\n iter: T,\n}\n\nimpl<T: Iterator<u8>> IterReader<T> {\n \/\/\/ Creates a new `IterReader` which will read from the specified\n \/\/\/ `Iterator`.\n pub fn new(iter: T) -> IterReader<T> {\n IterReader { iter: iter }\n }\n}\n\nimpl<T: Iterator<u8>> Reader for IterReader<T> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {\n let mut len = 0;\n for (slot, elt) in buf.mut_iter().zip(self.iter.by_ref()) {\n *slot = elt;\n len += 1;\n }\n if len == 0 && buf.len() != 0 {\n Err(io::standard_error(io::EndOfFile))\n } else {\n Ok(len)\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use io::{MemReader, MemWriter, BufReader};\n use io;\n use boxed::Box;\n use super::*;\n use prelude::*;\n\n #[test]\n fn test_limit_reader_unlimited() {\n let mut r = MemReader::new(vec!(0, 1, 2));\n {\n let mut r = LimitReader::new(r.by_ref(), 4);\n assert_eq!(vec!(0, 1, 2), r.read_to_end().unwrap());\n }\n }\n\n #[test]\n fn test_limit_reader_limited() {\n let mut r = MemReader::new(vec!(0, 1, 2));\n {\n let mut r = LimitReader::new(r.by_ref(), 2);\n assert_eq!(vec!(0, 1), r.read_to_end().unwrap());\n }\n assert_eq!(vec!(2), r.read_to_end().unwrap());\n }\n\n #[test]\n fn test_limit_reader_limit() {\n let r = MemReader::new(vec!(0, 1, 2));\n let mut r = LimitReader::new(r, 3);\n assert_eq!(3, r.limit());\n assert_eq!(0, r.read_byte().unwrap());\n assert_eq!(2, r.limit());\n assert_eq!(vec!(1, 2), r.read_to_end().unwrap());\n assert_eq!(0, r.limit());\n }\n\n #[test]\n fn test_limit_reader_overlong_consume() {\n let mut r = MemReader::new(vec![0, 1, 2, 3, 4, 5]);\n let mut r = LimitReader::new(r.by_ref(), 1);\n r.consume(2);\n assert_eq!(vec![], r.read_to_end().unwrap());\n }\n\n #[test]\n fn test_null_writer() {\n let mut s = NullWriter;\n let buf = vec![0, 0, 0];\n s.write(buf.as_slice()).unwrap();\n s.flush().unwrap();\n }\n\n #[test]\n fn test_zero_reader() {\n let mut s = ZeroReader;\n let mut buf = vec![1, 2, 3];\n assert_eq!(s.read(buf.as_mut_slice()), Ok(3));\n assert_eq!(vec![0, 0, 0], buf);\n }\n\n #[test]\n fn test_null_reader() {\n let mut r = NullReader;\n let mut buf = vec![0];\n assert!(r.read(buf.as_mut_slice()).is_err());\n }\n\n #[test]\n fn test_multi_writer() {\n static mut writes: uint = 0;\n static mut flushes: uint = 0;\n\n struct TestWriter;\n impl Writer for TestWriter {\n fn write(&mut self, _buf: &[u8]) -> io::IoResult<()> {\n unsafe { writes += 1 }\n Ok(())\n }\n\n fn flush(&mut self) -> io::IoResult<()> {\n unsafe { flushes += 1 }\n Ok(())\n }\n }\n\n let mut multi = MultiWriter::new(vec!(box TestWriter as Box<Writer>,\n box TestWriter as Box<Writer>));\n multi.write([1, 2, 3]).unwrap();\n assert_eq!(2, unsafe { writes });\n assert_eq!(0, unsafe { flushes });\n multi.flush().unwrap();\n assert_eq!(2, unsafe { writes });\n assert_eq!(2, unsafe { flushes });\n }\n\n #[test]\n fn test_chained_reader() {\n let rs = vec!(MemReader::new(vec!(0, 1)), MemReader::new(vec!()),\n MemReader::new(vec!(2, 3)));\n let mut r = ChainedReader::new(rs.move_iter());\n assert_eq!(vec!(0, 1, 2, 3), r.read_to_end().unwrap());\n }\n\n #[test]\n fn test_tee_reader() {\n let mut r = TeeReader::new(MemReader::new(vec!(0, 1, 2)),\n MemWriter::new());\n assert_eq!(vec!(0, 1, 2), r.read_to_end().unwrap());\n let (_, w) = r.unwrap();\n assert_eq!(vec!(0, 1, 2), w.unwrap());\n }\n\n #[test]\n fn test_copy() {\n let mut r = MemReader::new(vec!(0, 1, 2, 3, 4));\n let mut w = MemWriter::new();\n copy(&mut r, &mut w).unwrap();\n assert_eq!(vec!(0, 1, 2, 3, 4), w.unwrap());\n }\n\n #[test]\n fn limit_reader_buffer() {\n let data = \"0123456789\\n0123456789\\n\";\n let mut r = BufReader::new(data.as_bytes());\n {\n let mut r = LimitReader::new(r.by_ref(), 3);\n assert_eq!(r.read_line(), Ok(\"012\".to_string()));\n assert_eq!(r.limit(), 0);\n assert_eq!(r.read_line().err().unwrap().kind, io::EndOfFile);\n }\n {\n let mut r = LimitReader::new(r.by_ref(), 9);\n assert_eq!(r.read_line(), Ok(\"3456789\\n\".to_string()));\n assert_eq!(r.limit(), 1);\n assert_eq!(r.read_line(), Ok(\"0\".to_string()));\n }\n {\n let mut r = LimitReader::new(r.by_ref(), 100);\n assert_eq!(r.read_char(), Ok('1'));\n assert_eq!(r.limit(), 99);\n assert_eq!(r.read_line(), Ok(\"23456789\\n\".to_string()));\n }\n }\n\n #[test]\n fn test_iter_reader() {\n let mut r = IterReader::new(range(0u8, 8));\n let mut buf = [0, 0, 0];\n let len = r.read(buf).unwrap();\n assert_eq!(len, 3);\n assert!(buf == [0, 1, 2]);\n\n let len = r.read(buf).unwrap();\n assert_eq!(len, 3);\n assert!(buf == [3, 4, 5]);\n\n let len = r.read(buf).unwrap();\n assert_eq!(len, 2);\n assert!(buf == [6, 7, 5]);\n\n assert_eq!(r.read(buf).unwrap_err().kind, io::EndOfFile);\n }\n\n #[test]\n fn iter_reader_zero_length() {\n let mut r = IterReader::new(range(0u8, 8));\n let mut buf = [];\n assert_eq!(Ok(0), r.read(buf));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Query and calculate time offsets based on a specified date.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rename functions to actions.<commit_after><|endoftext|>"} {"text":"<commit_before>use common::string::*;\nuse common::vec::*;\n\nuse syscall::call::*;\n\n\/\/\/ File seek\npub enum Seek {\n \/\/\/ The start point\n Start(usize),\n \/\/\/ The current point\n Current(isize),\n \/\/\/ The end point\n End(isize),\n}\n\n\/\/\/ A Unix-style file\npub struct File {\n \/\/\/ The path to the file\n path: String,\n \/\/\/ The id for the file\n fd: usize,\n}\n\nimpl File {\n \/\/\/ Open a new file using a path\n \/\/ TODO: Why &String and not String\n \/\/ TODO: Return Option<File>\n pub fn open(path: &String) -> File {\n unsafe {\n let c_str: *const u8 = path.to_c_str();\n let ret = File {\n path: path.clone(),\n fd: sys_open(c_str, 0, 0),\n };\n sys_unalloc(c_str as usize);\n ret\n }\n }\n\n \/\/\/ Return the url to the file\n pub fn url(&self) -> String {\n \/\/TODO\n self.path.clone()\n }\n\n\n \/\/\/ Write to the file\n pub fn write(&mut self, buf: &[u8]) -> Option<usize> {\n unsafe {\n let count = sys_write(self.fd, buf.as_ptr(), buf.len());\n if count == 0xFFFFFFFF {\n Option::None\n } else {\n Option::Some(count)\n }\n }\n }\n\n \/\/\/ Seek a given position\n pub fn seek(&mut self, pos: Seek) -> Option<usize> {\n let (whence, offset) = match pos {\n Seek::Start(offset) => (0, offset as isize),\n Seek::Current(offset) => (1, offset),\n Seek::End(offset) => (2, offset),\n };\n\n let position = unsafe { sys_lseek(self.fd, offset, whence) };\n if position == 0xFFFFFFFF {\n Option::None\n } else {\n Option::Some(position)\n }\n }\n\n \/\/\/ Flush the io\n pub fn sync(&mut self) -> bool {\n unsafe { sys_fsync(self.fd) == 0 }\n }\n}\n\npub trait Read {\n\n \/\/\/ Read a file to a buffer\n fn read(&mut self, buf: &mut [u8]) -> Option<usize>;\n\n \/\/\/ Read the file to the end\n fn read_to_end(&mut self, vec: &mut Vec<u8>) -> Option<usize> {\n let mut read = 0;\n loop {\n let mut bytes = [0; 1024];\n match self.read(&mut bytes) {\n Option::Some(0) => return Option::Some(read),\n Option::None => return Option::None,\n Option::Some(count) => {\n for i in 0..count {\n vec.push(bytes[i]);\n }\n read += count;\n }\n }\n }\n }\n \/\/ \/\/\/ Return an iterator of the bytes\n \/\/fn bytes(&'a mut self) -> BytesIter<'a> {\n \/\/ BytesIter {\n \/\/ reader: self,\n \/\/ }\n \/\/}\n}\n\nimpl Read for File {\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n unsafe {\n let count = sys_read(self.fd, buf.as_mut_ptr(), buf.len());\n if count == 0xFFFFFFFF {\n Option::None\n } else {\n Option::Some(count)\n }\n }\n }\n}\n\nimpl Drop for File {\n fn drop(&mut self) {\n unsafe {\n sys_close(self.fd);\n }\n }\n}\n<commit_msg>Add butes method<commit_after>use common::string::*;\nuse common::vec::*;\n\nuse syscall::call::*;\n\n\/\/\/ File seek\npub enum Seek {\n \/\/\/ The start point\n Start(usize),\n \/\/\/ The current point\n Current(isize),\n \/\/\/ The end point\n End(isize),\n}\n\n\/\/\/ A Unix-style file\npub struct File {\n \/\/\/ The path to the file\n path: String,\n \/\/\/ The id for the file\n fd: usize,\n}\n\nimpl File {\n \/\/\/ Open a new file using a path\n \/\/ TODO: Why &String and not String\n \/\/ TODO: Return Option<File>\n pub fn open(path: &String) -> File {\n unsafe {\n let c_str: *const u8 = path.to_c_str();\n let ret = File {\n path: path.clone(),\n fd: sys_open(c_str, 0, 0),\n };\n sys_unalloc(c_str as usize);\n ret\n }\n }\n\n \/\/\/ Return the url to the file\n pub fn url(&self) -> String {\n \/\/TODO\n self.path.clone()\n }\n\n\n \/\/\/ Write to the file\n pub fn write(&mut self, buf: &[u8]) -> Option<usize> {\n unsafe {\n let count = sys_write(self.fd, buf.as_ptr(), buf.len());\n if count == 0xFFFFFFFF {\n Option::None\n } else {\n Option::Some(count)\n }\n }\n }\n\n \/\/\/ Seek a given position\n pub fn seek(&mut self, pos: Seek) -> Option<usize> {\n let (whence, offset) = match pos {\n Seek::Start(offset) => (0, offset as isize),\n Seek::Current(offset) => (1, offset),\n Seek::End(offset) => (2, offset),\n };\n\n let position = unsafe { sys_lseek(self.fd, offset, whence) };\n if position == 0xFFFFFFFF {\n Option::None\n } else {\n Option::Some(position)\n }\n }\n\n \/\/\/ Flush the io\n pub fn sync(&mut self) -> bool {\n unsafe { sys_fsync(self.fd) == 0 }\n }\n}\n\npub trait Read {\n\n \/\/\/ Read a file to a buffer\n fn read(&mut self, buf: &mut [u8]) -> Option<usize>;\n\n \/\/\/ Read the file to the end\n fn read_to_end(&mut self, vec: &mut Vec<u8>) -> Option<usize> {\n let mut read = 0;\n loop {\n let mut bytes = [0; 1024];\n match self.read(&mut bytes) {\n Option::Some(0) => return Option::Some(read),\n Option::None => return Option::None,\n Option::Some(count) => {\n for i in 0..count {\n vec.push(bytes[i]);\n }\n read += count;\n }\n }\n }\n }\n \/\/\/ Return an iterator of the bytes\n fn bytes(&'a mut self) -> VecIterator<'a, u8> {\n \/\/ TODO: This is only a temporary implementation. Make this read one byte at a time.\n let buf = Vec::new();\n self.read_to_end(&mut buf);\n\n buf.iter()\n }\n}\n\nimpl Read for File {\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n unsafe {\n let count = sys_read(self.fd, buf.as_mut_ptr(), buf.len());\n if count == 0xFFFFFFFF {\n Option::None\n } else {\n Option::Some(count)\n }\n }\n }\n}\n\nimpl Drop for File {\n fn drop(&mut self) {\n unsafe {\n sys_close(self.fd);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>continued demacrofication<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>examples: Add who_eats_my_hard_drive example<commit_after>extern crate rs_release;\n\nuse std::process::Command;\n\n#[derive(Debug)]\nenum Error {\n UnknownOs,\n ReadError,\n}\n\nfn get_os_id() -> Result<String, Error> {\n match rs_release::get_os_release() {\n Err(_) => Err(Error::ReadError),\n Ok(mut os_release) => os_release.remove(\"ID\").ok_or(Error::UnknownOs)\n }\n}\n\n\/\/ https:\/\/blog.tinned-software.net\/show-installed-yum-packages-by-size\/\nfn show_fedora_packages() {\n let mut command = Command::new(\"rpm\");\n\n command.arg(\"--query\");\n command.arg(\"--all\");\n command.arg(\"--queryformat\");\n command.arg(\"%10{size} - %-25{name} \\t %{version}\\n\");\n\n if let Err(e) = command.spawn() {\n println!(\"ERROR running rpm: {:?}\", e);\n }\n}\n\n\/\/ http:\/\/www.commandlinefu.com\/commands\/view\/3842\/list-your-largest-installed-packages-on-debianubuntu\nfn show_debian_packages() {\n let mut command = Command::new(\"dpkg-query\");\n\n command.arg(\"--show\");\n command.arg(\"--showformat\");\n command.arg(\"${Installed-Size}\\t${Package}\\n\");\n\n if let Err(e) = command.spawn() {\n println!(\"ERROR running dpkg-query: {:?}\", e);\n }\n}\n\nfn main() {\n match get_os_id() {\n Ok(id) => match id.as_str() {\n \"fedora\" => show_fedora_packages(),\n \"debian\" => show_debian_packages(),\n _ => println!(\"ERROR: {:?}\", Error::UnknownOs),\n },\n Err(e) => println!(\"ERROR: {:?}\", e),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The compiler code necessary for `#[derive(Decodable)]`. See encodable.rs for more.\n\nuse ast;\nuse ast::{MetaItem, Item, Expr, MutMutable};\nuse codemap::Span;\nuse ext::base::ExtCtxt;\nuse ext::build::AstBuilder;\nuse ext::deriving::generic::*;\nuse ext::deriving::generic::ty::*;\nuse parse::token::InternedString;\nuse parse::token;\nuse ptr::P;\n\npub fn expand_deriving_rustc_decodable<F>(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Item,\n push: F) where\n F: FnOnce(P<Item>),\n{\n expand_deriving_decodable_imp(cx, span, mitem, item, push, \"rustc_serialize\")\n}\n\npub fn expand_deriving_decodable<F>(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Item,\n push: F) where\n F: FnOnce(P<Item>),\n{\n expand_deriving_decodable_imp(cx, span, mitem, item, push, \"serialize\")\n}\n\nfn expand_deriving_decodable_imp<F>(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Item,\n push: F,\n krate: &'static str) where\n F: FnOnce(P<Item>),\n{\n let trait_def = TraitDef {\n span: span,\n attributes: Vec::new(),\n path: Path::new_(vec!(krate, \"Decodable\"), None, vec!(), true),\n additional_bounds: Vec::new(),\n generics: LifetimeBounds::empty(),\n methods: vec!(\n MethodDef {\n name: \"decode\",\n generics: LifetimeBounds {\n lifetimes: Vec::new(),\n bounds: vec!((\"__D\", vec!(Path::new_(\n vec!(krate, \"Decoder\"), None,\n vec!(), true))))\n },\n explicit_self: None,\n args: vec!(Ptr(box Literal(Path::new_local(\"__D\")),\n Borrowed(None, MutMutable))),\n ret_ty: Literal(Path::new_(\n vec!(\"std\", \"result\", \"Result\"),\n None,\n vec!(box Self, box Literal(Path::new_(\n vec![\"__D\", \"Error\"], None, vec![], false\n ))),\n true\n )),\n attributes: Vec::new(),\n combine_substructure: combine_substructure(box |a, b, c| {\n decodable_substructure(a, b, c, krate)\n }),\n })\n };\n\n trait_def.expand(cx, mitem, item, push)\n}\n\nfn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,\n substr: &Substructure,\n krate: &str) -> P<Expr> {\n let decoder = substr.nonself_args[0].clone();\n let recurse = vec!(cx.ident_of(krate),\n cx.ident_of(\"Decodable\"),\n cx.ident_of(\"decode\"));\n \/\/ throw an underscore in front to suppress unused variable warnings\n let blkarg = cx.ident_of(\"_d\");\n let blkdecoder = cx.expr_ident(trait_span, blkarg);\n let calldecode = cx.expr_call_global(trait_span, recurse, vec!(blkdecoder.clone()));\n let lambdadecode = cx.lambda_expr_1(trait_span, calldecode, blkarg);\n\n return match *substr.fields {\n StaticStruct(_, ref summary) => {\n let nfields = match *summary {\n Unnamed(ref fields) => fields.len(),\n Named(ref fields) => fields.len()\n };\n let read_struct_field = cx.ident_of(\"read_struct_field\");\n\n let path = cx.path_ident(trait_span, substr.type_ident);\n let result = decode_static_fields(cx,\n trait_span,\n path,\n summary,\n |cx, span, name, field| {\n cx.expr_try(span,\n cx.expr_method_call(span, blkdecoder.clone(), read_struct_field,\n vec!(cx.expr_str(span, name),\n cx.expr_uint(span, field),\n lambdadecode.clone())))\n });\n let result = cx.expr_ok(trait_span, result);\n cx.expr_method_call(trait_span,\n decoder,\n cx.ident_of(\"read_struct\"),\n vec!(\n cx.expr_str(trait_span, token::get_ident(substr.type_ident)),\n cx.expr_uint(trait_span, nfields),\n cx.lambda_expr_1(trait_span, result, blkarg)\n ))\n }\n StaticEnum(_, ref fields) => {\n let variant = cx.ident_of(\"i\");\n\n let mut arms = Vec::new();\n let mut variants = Vec::new();\n let rvariant_arg = cx.ident_of(\"read_enum_variant_arg\");\n\n for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() {\n variants.push(cx.expr_str(v_span, token::get_ident(name)));\n\n let path = cx.path(trait_span, vec![substr.type_ident, name]);\n let decoded = decode_static_fields(cx,\n v_span,\n path,\n parts,\n |cx, span, _, field| {\n let idx = cx.expr_uint(span, field);\n cx.expr_try(span,\n cx.expr_method_call(span, blkdecoder.clone(), rvariant_arg,\n vec!(idx, lambdadecode.clone())))\n });\n\n arms.push(cx.arm(v_span,\n vec!(cx.pat_lit(v_span, cx.expr_uint(v_span, i))),\n decoded));\n }\n\n arms.push(cx.arm_unreachable(trait_span));\n\n let result = cx.expr_ok(trait_span,\n cx.expr_match(trait_span,\n cx.expr_ident(trait_span, variant), arms));\n let lambda = cx.lambda_expr(trait_span, vec!(blkarg, variant), result);\n let variant_vec = cx.expr_vec(trait_span, variants);\n let variant_vec = cx.expr_addr_of(trait_span, variant_vec);\n let result = cx.expr_method_call(trait_span, blkdecoder,\n cx.ident_of(\"read_enum_variant\"),\n vec!(variant_vec, lambda));\n cx.expr_method_call(trait_span,\n decoder,\n cx.ident_of(\"read_enum\"),\n vec!(\n cx.expr_str(trait_span, token::get_ident(substr.type_ident)),\n cx.lambda_expr_1(trait_span, result, blkarg)\n ))\n }\n _ => cx.bug(\"expected StaticEnum or StaticStruct in deriving(Decodable)\")\n };\n}\n\n\/\/\/ Create a decoder for a single enum variant\/struct:\n\/\/\/ - `outer_pat_path` is the path to this enum variant\/struct\n\/\/\/ - `getarg` should retrieve the `uint`-th field with name `@str`.\nfn decode_static_fields<F>(cx: &mut ExtCtxt,\n trait_span: Span,\n outer_pat_path: ast::Path,\n fields: &StaticFields,\n mut getarg: F)\n -> P<Expr> where\n F: FnMut(&mut ExtCtxt, Span, InternedString, uint) -> P<Expr>,\n{\n match *fields {\n Unnamed(ref fields) => {\n let path_expr = cx.expr_path(outer_pat_path);\n if fields.is_empty() {\n path_expr\n } else {\n let fields = fields.iter().enumerate().map(|(i, &span)| {\n getarg(cx, span,\n token::intern_and_get_ident(&format!(\"_field{}\", i)[]),\n i)\n }).collect();\n\n cx.expr_call(trait_span, path_expr, fields)\n }\n }\n Named(ref fields) => {\n \/\/ use the field's span to get nicer error messages.\n let fields = fields.iter().enumerate().map(|(i, &(name, span))| {\n let arg = getarg(cx, span, token::get_ident(name), i);\n cx.field_imm(span, name, arg)\n }).collect();\n cx.expr_struct(trait_span, outer_pat_path, fields)\n }\n }\n}\n<commit_msg>Avoid unnecessary closures when deriving RustcDecodable<commit_after>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The compiler code necessary for `#[derive(Decodable)]`. See encodable.rs for more.\n\nuse ast;\nuse ast::{MetaItem, Item, Expr, MutMutable};\nuse codemap::Span;\nuse ext::base::ExtCtxt;\nuse ext::build::AstBuilder;\nuse ext::deriving::generic::*;\nuse ext::deriving::generic::ty::*;\nuse parse::token::InternedString;\nuse parse::token;\nuse ptr::P;\n\npub fn expand_deriving_rustc_decodable<F>(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Item,\n push: F) where\n F: FnOnce(P<Item>),\n{\n expand_deriving_decodable_imp(cx, span, mitem, item, push, \"rustc_serialize\")\n}\n\npub fn expand_deriving_decodable<F>(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Item,\n push: F) where\n F: FnOnce(P<Item>),\n{\n expand_deriving_decodable_imp(cx, span, mitem, item, push, \"serialize\")\n}\n\nfn expand_deriving_decodable_imp<F>(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Item,\n push: F,\n krate: &'static str) where\n F: FnOnce(P<Item>),\n{\n let trait_def = TraitDef {\n span: span,\n attributes: Vec::new(),\n path: Path::new_(vec!(krate, \"Decodable\"), None, vec!(), true),\n additional_bounds: Vec::new(),\n generics: LifetimeBounds::empty(),\n methods: vec!(\n MethodDef {\n name: \"decode\",\n generics: LifetimeBounds {\n lifetimes: Vec::new(),\n bounds: vec!((\"__D\", vec!(Path::new_(\n vec!(krate, \"Decoder\"), None,\n vec!(), true))))\n },\n explicit_self: None,\n args: vec!(Ptr(box Literal(Path::new_local(\"__D\")),\n Borrowed(None, MutMutable))),\n ret_ty: Literal(Path::new_(\n vec!(\"std\", \"result\", \"Result\"),\n None,\n vec!(box Self, box Literal(Path::new_(\n vec![\"__D\", \"Error\"], None, vec![], false\n ))),\n true\n )),\n attributes: Vec::new(),\n combine_substructure: combine_substructure(box |a, b, c| {\n decodable_substructure(a, b, c, krate)\n }),\n })\n };\n\n trait_def.expand(cx, mitem, item, push)\n}\n\nfn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span,\n substr: &Substructure,\n krate: &str) -> P<Expr> {\n let decoder = substr.nonself_args[0].clone();\n let recurse = vec!(cx.ident_of(krate),\n cx.ident_of(\"Decodable\"),\n cx.ident_of(\"decode\"));\n let exprdecode = cx.expr_path(cx.path_global(trait_span, recurse));\n \/\/ throw an underscore in front to suppress unused variable warnings\n let blkarg = cx.ident_of(\"_d\");\n let blkdecoder = cx.expr_ident(trait_span, blkarg);\n\n return match *substr.fields {\n StaticStruct(_, ref summary) => {\n let nfields = match *summary {\n Unnamed(ref fields) => fields.len(),\n Named(ref fields) => fields.len()\n };\n let read_struct_field = cx.ident_of(\"read_struct_field\");\n\n let path = cx.path_ident(trait_span, substr.type_ident);\n let result = decode_static_fields(cx,\n trait_span,\n path,\n summary,\n |cx, span, name, field| {\n cx.expr_try(span,\n cx.expr_method_call(span, blkdecoder.clone(), read_struct_field,\n vec!(cx.expr_str(span, name),\n cx.expr_uint(span, field),\n exprdecode.clone())))\n });\n let result = cx.expr_ok(trait_span, result);\n cx.expr_method_call(trait_span,\n decoder,\n cx.ident_of(\"read_struct\"),\n vec!(\n cx.expr_str(trait_span, token::get_ident(substr.type_ident)),\n cx.expr_uint(trait_span, nfields),\n cx.lambda_expr_1(trait_span, result, blkarg)\n ))\n }\n StaticEnum(_, ref fields) => {\n let variant = cx.ident_of(\"i\");\n\n let mut arms = Vec::new();\n let mut variants = Vec::new();\n let rvariant_arg = cx.ident_of(\"read_enum_variant_arg\");\n\n for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() {\n variants.push(cx.expr_str(v_span, token::get_ident(name)));\n\n let path = cx.path(trait_span, vec![substr.type_ident, name]);\n let decoded = decode_static_fields(cx,\n v_span,\n path,\n parts,\n |cx, span, _, field| {\n let idx = cx.expr_uint(span, field);\n cx.expr_try(span,\n cx.expr_method_call(span, blkdecoder.clone(), rvariant_arg,\n vec!(idx, exprdecode.clone())))\n });\n\n arms.push(cx.arm(v_span,\n vec!(cx.pat_lit(v_span, cx.expr_uint(v_span, i))),\n decoded));\n }\n\n arms.push(cx.arm_unreachable(trait_span));\n\n let result = cx.expr_ok(trait_span,\n cx.expr_match(trait_span,\n cx.expr_ident(trait_span, variant), arms));\n let lambda = cx.lambda_expr(trait_span, vec!(blkarg, variant), result);\n let variant_vec = cx.expr_vec(trait_span, variants);\n let variant_vec = cx.expr_addr_of(trait_span, variant_vec);\n let result = cx.expr_method_call(trait_span, blkdecoder,\n cx.ident_of(\"read_enum_variant\"),\n vec!(variant_vec, lambda));\n cx.expr_method_call(trait_span,\n decoder,\n cx.ident_of(\"read_enum\"),\n vec!(\n cx.expr_str(trait_span, token::get_ident(substr.type_ident)),\n cx.lambda_expr_1(trait_span, result, blkarg)\n ))\n }\n _ => cx.bug(\"expected StaticEnum or StaticStruct in deriving(Decodable)\")\n };\n}\n\n\/\/\/ Create a decoder for a single enum variant\/struct:\n\/\/\/ - `outer_pat_path` is the path to this enum variant\/struct\n\/\/\/ - `getarg` should retrieve the `uint`-th field with name `@str`.\nfn decode_static_fields<F>(cx: &mut ExtCtxt,\n trait_span: Span,\n outer_pat_path: ast::Path,\n fields: &StaticFields,\n mut getarg: F)\n -> P<Expr> where\n F: FnMut(&mut ExtCtxt, Span, InternedString, uint) -> P<Expr>,\n{\n match *fields {\n Unnamed(ref fields) => {\n let path_expr = cx.expr_path(outer_pat_path);\n if fields.is_empty() {\n path_expr\n } else {\n let fields = fields.iter().enumerate().map(|(i, &span)| {\n getarg(cx, span,\n token::intern_and_get_ident(&format!(\"_field{}\", i)[]),\n i)\n }).collect();\n\n cx.expr_call(trait_span, path_expr, fields)\n }\n }\n Named(ref fields) => {\n \/\/ use the field's span to get nicer error messages.\n let fields = fields.iter().enumerate().map(|(i, &(name, span))| {\n let arg = getarg(cx, span, token::get_ident(name), i);\n cx.field_imm(span, name, arg)\n }).collect();\n cx.expr_struct(trait_span, outer_pat_path, fields)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>continued cleaning up iterator_provider<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> #19 MD5 textures are optional<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>adds a bench main for instrumentation<commit_after>#![feature(os, collections, rustc_private)]\n\nextern crate getopts;\nextern crate selecta;\n\nuse getopts::{optopt,getopts};\nuse selecta::configuration::Configuration;\nuse selecta::search::Search;\nuse selecta::tty::TTY;\nuse selecta::tty::IO;\nuse selecta::screen::Screen;\n\n#[allow(dead_code)]\nfn main() {\n let initial_query = extract_initial_query();\n let lines = read_lines(100000);\n\n let config = Configuration::from_inputs(lines, initial_query, Some(20));\n let mut search = Search::blank(config);\n\n search = search.append_to_search(\"t\").backspace().append_to_search(\"o\");\n}\n\nfn extract_initial_query() -> Option<String> {\n let args = std::os::args();\n let opts = &[\n optopt(\"s\", \"search\", \"initial search query\", \"\"),\n ];\n let matches = getopts(args.tail(), opts).unwrap();\n\n matches.opt_str(\"s\")\n}\n\nfn one_two_three() -> Vec<String> {\n vec![\"one\".to_string(),\n \"two\".to_string(),\n \"three\".to_string()]\n}\n\nfn read_lines(n: usize) -> Vec<String> {\n let mut result: Vec<String> = Vec::new();\n for thing in one_two_three().iter().cycle().take(n) {\n result.push(thing.clone());\n }\n result\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Lengthen comment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added publish example code<commit_after>extern crate diesel_demo;\nextern crate diesel;\n\nuse self::diesel::prelude::*;\nuse self::diesel_demo::*;\nuse self::models::Post;\nuse std::env::args;\n\nfn main () {\n use diesel_demo::schema::posts::dsl::{posts, published};\n let id = args().nth(1).expect(\"publish_post requires a post id\").parse::<i32>().expect(\"Invalid ID\");\n let connection = establish_connection();\n\n let post = diesel::update(posts.find(id)).set(published.eq(true)).get_result::<Post>(&connection).expect(&format!(\"Unable to find post {}\", id));\n\n println!(\"Published post {}\", post.title);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add metrics for pg-to-tar.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #70283 - CDirkx:regression-test-70155, r=oli-obk<commit_after>\/\/ check-pass\n#![allow(incomplete_features)]\n#![feature(const_generics)]\n\n\/\/ `Range` should be usable within const generics:\nstruct _Range<const R: std::ops::Range<usize>>;\nconst RANGE : _Range<{ 0 .. 1000 }> = _Range;\n\n\/\/ `RangeFrom` should be usable within const generics:\nstruct _RangeFrom<const R: std::ops::RangeFrom<usize>>;\nconst RANGE_FROM : _RangeFrom<{ 0 .. }> = _RangeFrom;\n\n\/\/ `RangeFull` should be usable within const generics:\nstruct _RangeFull<const R: std::ops::RangeFull>;\nconst RANGE_FULL : _RangeFull<{ .. }> = _RangeFull;\n\n\/\/ Regression test for #70155\n\/\/ `RangeInclusive` should be usable within const generics:\nstruct _RangeInclusive<const R: std::ops::RangeInclusive<usize>>;\nconst RANGE_INCLUSIVE : _RangeInclusive<{ 0 ..= 999 }> = _RangeInclusive;\n\n\/\/ `RangeTo` should be usable within const generics:\nstruct _RangeTo<const R: std::ops::RangeTo<usize>>;\nconst RANGE_TO : _RangeTo<{ .. 1000 }> = _RangeTo;\n\n\/\/ `RangeToInclusive` should be usable within const generics:\nstruct _RangeToInclusive<const R: std::ops::RangeToInclusive<usize>>;\nconst RANGE_TO_INCLUSIVE : _RangeToInclusive<{ ..= 999 }> = _RangeToInclusive;\n\npub fn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for large zip64 files<commit_after>\/\/ The following is a hexdump of a zip64 file containing the following files:\n\/\/ zero4400: 4400 MB of zeroes\n\/\/ zero100: 100 MB of zeroes\n\/\/ zero4400_2: 4400 MB of zeroes\n\/\/\n\/\/ 00000000 50 4b 03 04 2d 00 00 00 00 00 1b 6e 51 4d 66 82 |PK..-......nQMf.|\n\/\/ 00000010 13 da ff ff ff ff ff ff ff ff 08 00 30 00 7a 65 |............0.ze|\n\/\/ 00000020 72 6f 34 34 30 30 55 54 09 00 03 a5 21 c7 5b db |ro4400UT....!.[.|\n\/\/ 00000030 21 c7 5b 75 78 0b 00 01 04 e8 03 00 00 04 e8 03 |!.[ux...........|\n\/\/ 00000040 00 00 01 00 10 00 00 00 00 13 01 00 00 00 00 00 |................|\n\/\/ 00000050 00 13 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|\n\/\/ 00000060 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|\n\/\/ *\n\/\/ 113000050 00 00 00 00 00 00 50 4b 03 04 0a 00 00 00 00 00 |......PK........|\n\/\/ 113000060 2b 6e 51 4d 98 23 28 4b 00 00 40 06 00 00 40 06 |+nQM.#(K..@...@.|\n\/\/ 113000070 07 00 1c 00 7a 65 72 6f 31 30 30 55 54 09 00 03 |....zero100UT...|\n\/\/ 113000080 c2 21 c7 5b c2 21 c7 5b 75 78 0b 00 01 04 e8 03 |.!.[.!.[ux......|\n\/\/ 113000090 00 00 04 e8 03 00 00 00 00 00 00 00 00 00 00 00 |................|\n\/\/ 1130000a0 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|\n\/\/ *\n\/\/ 119400090 00 00 00 00 00 00 00 50 4b 03 04 2d 00 00 00 00 |.......PK..-....|\n\/\/ 1194000a0 00 3b 6e 51 4d 66 82 13 da ff ff ff ff ff ff ff |.;nQMf..........|\n\/\/ 1194000b0 ff 0a 00 30 00 7a 65 72 6f 34 34 30 30 5f 32 55 |...0.zero4400_2U|\n\/\/ 1194000c0 54 09 00 03 e2 21 c7 5b db 21 c7 5b 75 78 0b 00 |T....!.[.!.[ux..|\n\/\/ 1194000d0 01 04 e8 03 00 00 04 e8 03 00 00 01 00 10 00 00 |................|\n\/\/ 1194000e0 00 00 13 01 00 00 00 00 00 00 13 01 00 00 00 00 |................|\n\/\/ 1194000f0 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|\n\/\/ *\n\/\/ 22c4000e0 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 50 |...............P|\n\/\/ 22c4000f0 4b 01 02 1e 03 2d 00 00 00 00 00 1b 6e 51 4d 66 |K....-......nQMf|\n\/\/ 22c400100 82 13 da ff ff ff ff ff ff ff ff 08 00 2c 00 00 |.............,..|\n\/\/ 22c400110 00 00 00 00 00 00 00 a4 81 00 00 00 00 7a 65 72 |.............zer|\n\/\/ 22c400120 6f 34 34 30 30 55 54 05 00 03 a5 21 c7 5b 75 78 |o4400UT....!.[ux|\n\/\/ 22c400130 0b 00 01 04 e8 03 00 00 04 e8 03 00 00 01 00 10 |................|\n\/\/ 22c400140 00 00 00 00 13 01 00 00 00 00 00 00 13 01 00 00 |................|\n\/\/ 22c400150 00 50 4b 01 02 1e 03 0a 00 00 00 00 00 2b 6e 51 |.PK..........+nQ|\n\/\/ 22c400160 4d 98 23 28 4b 00 00 40 06 00 00 40 06 07 00 24 |M.#(K..@...@...$|\n\/\/ 22c400170 00 00 00 00 00 00 00 00 00 a4 81 ff ff ff ff 7a |...............z|\n\/\/ 22c400180 65 72 6f 31 30 30 55 54 05 00 03 c2 21 c7 5b 75 |ero100UT....!.[u|\n\/\/ 22c400190 78 0b 00 01 04 e8 03 00 00 04 e8 03 00 00 01 00 |x...............|\n\/\/ 22c4001a0 08 00 56 00 00 13 01 00 00 00 50 4b 01 02 1e 03 |..V.......PK....|\n\/\/ 22c4001b0 2d 00 00 00 00 00 3b 6e 51 4d 66 82 13 da ff ff |-.....;nQMf.....|\n\/\/ 22c4001c0 ff ff ff ff ff ff 0a 00 34 00 00 00 00 00 00 00 |........4.......|\n\/\/ 22c4001d0 00 00 a4 81 ff ff ff ff 7a 65 72 6f 34 34 30 30 |........zero4400|\n\/\/ 22c4001e0 5f 32 55 54 05 00 03 e2 21 c7 5b 75 78 0b 00 01 |_2UT....!.[ux...|\n\/\/ 22c4001f0 04 e8 03 00 00 04 e8 03 00 00 01 00 18 00 00 00 |................|\n\/\/ 22c400200 00 13 01 00 00 00 00 00 00 13 01 00 00 00 97 00 |................|\n\/\/ 22c400210 40 19 01 00 00 00 50 4b 06 06 2c 00 00 00 00 00 |@.....PK..,.....|\n\/\/ 22c400220 00 00 1e 03 2d 00 00 00 00 00 00 00 00 00 03 00 |....-...........|\n\/\/ 22c400230 00 00 00 00 00 00 03 00 00 00 00 00 00 00 27 01 |..............'.|\n\/\/ 22c400240 00 00 00 00 00 00 ef 00 40 2c 02 00 00 00 50 4b |........@,....PK|\n\/\/ 22c400250 06 07 00 00 00 00 16 02 40 2c 02 00 00 00 01 00 |........@,......|\n\/\/ 22c400260 00 00 50 4b 05 06 00 00 00 00 03 00 03 00 27 01 |..PK..........'.|\n\/\/ 22c400270 00 00 ff ff ff ff 00 00 |........|\n\/\/ 22c400278\n\nextern crate zip;\n\nuse std::io::{self, Seek, SeekFrom, Read};\n\nconst BLOCK1_LENGTH : u64 = 0x60;\nconst BLOCK1 : [u8; BLOCK1_LENGTH as usize] = [\n 0x50, 0x4b, 0x03, 0x04, 0x2d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x6e, 0x51, 0x4d, 0x66, 0x82,\n 0x13, 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x08, 0x00, 0x30, 0x00, 0x7a, 0x65,\n 0x72, 0x6f, 0x34, 0x34, 0x30, 0x30, 0x55, 0x54, 0x09, 0x00, 0x03, 0xa5, 0x21, 0xc7, 0x5b, 0xdb,\n 0x21, 0xc7, 0x5b, 0x75, 0x78, 0x0b, 0x00, 0x01, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x04, 0xe8, 0x03,\n 0x00, 0x00, 0x01, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n];\n\nconst BLOCK2_LENGTH : u64 = 0x50;\nconst BLOCK2 : [u8; BLOCK2_LENGTH as usize] = [\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x03, 0x04, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00,\n 0x2b, 0x6e, 0x51, 0x4d, 0x98, 0x23, 0x28, 0x4b, 0x00, 0x00, 0x40, 0x06, 0x00, 0x00, 0x40, 0x06,\n 0x07, 0x00, 0x1c, 0x00, 0x7a, 0x65, 0x72, 0x6f, 0x31, 0x30, 0x30, 0x55, 0x54, 0x09, 0x00, 0x03,\n 0xc2, 0x21, 0xc7, 0x5b, 0xc2, 0x21, 0xc7, 0x5b, 0x75, 0x78, 0x0b, 0x00, 0x01, 0x04, 0xe8, 0x03,\n 0x00, 0x00, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n];\n\nconst BLOCK3_LENGTH : u64 = 0x60;\nconst BLOCK3 : [u8; BLOCK3_LENGTH as usize] = [\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x03, 0x04, 0x2d, 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x3b, 0x6e, 0x51, 0x4d, 0x66, 0x82, 0x13, 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,\n 0xff, 0x0a, 0x00, 0x30, 0x00, 0x7a, 0x65, 0x72, 0x6f, 0x34, 0x34, 0x30, 0x30, 0x5f, 0x32, 0x55,\n 0x54, 0x09, 0x00, 0x03, 0xe2, 0x21, 0xc7, 0x5b, 0xdb, 0x21, 0xc7, 0x5b, 0x75, 0x78, 0x0b, 0x00,\n 0x01, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x01, 0x00, 0x10, 0x00, 0x00,\n 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00,\n];\n\nconst BLOCK4_LENGTH : u64 = 0x198;\nconst BLOCK4 : [u8; BLOCK4_LENGTH as usize] = [\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50,\n 0x4b, 0x01, 0x02, 0x1e, 0x03, 0x2d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x6e, 0x51, 0x4d, 0x66,\n 0x82, 0x13, 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x08, 0x00, 0x2c, 0x00, 0x00,\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa4, 0x81, 0x00, 0x00, 0x00, 0x00, 0x7a, 0x65, 0x72,\n 0x6f, 0x34, 0x34, 0x30, 0x30, 0x55, 0x54, 0x05, 0x00, 0x03, 0xa5, 0x21, 0xc7, 0x5b, 0x75, 0x78,\n 0x0b, 0x00, 0x01, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x01, 0x00, 0x10,\n 0x00, 0x00, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00,\n 0x00, 0x50, 0x4b, 0x01, 0x02, 0x1e, 0x03, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2b, 0x6e, 0x51,\n 0x4d, 0x98, 0x23, 0x28, 0x4b, 0x00, 0x00, 0x40, 0x06, 0x00, 0x00, 0x40, 0x06, 0x07, 0x00, 0x24,\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa4, 0x81, 0xff, 0xff, 0xff, 0xff, 0x7a,\n 0x65, 0x72, 0x6f, 0x31, 0x30, 0x30, 0x55, 0x54, 0x05, 0x00, 0x03, 0xc2, 0x21, 0xc7, 0x5b, 0x75,\n 0x78, 0x0b, 0x00, 0x01, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x01, 0x00,\n 0x08, 0x00, 0x56, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x01, 0x02, 0x1e, 0x03,\n 0x2d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3b, 0x6e, 0x51, 0x4d, 0x66, 0x82, 0x13, 0xda, 0xff, 0xff,\n 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0a, 0x00, 0x34, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0xa4, 0x81, 0xff, 0xff, 0xff, 0xff, 0x7a, 0x65, 0x72, 0x6f, 0x34, 0x34, 0x30, 0x30,\n 0x5f, 0x32, 0x55, 0x54, 0x05, 0x00, 0x03, 0xe2, 0x21, 0xc7, 0x5b, 0x75, 0x78, 0x0b, 0x00, 0x01,\n 0x04, 0xe8, 0x03, 0x00, 0x00, 0x04, 0xe8, 0x03, 0x00, 0x00, 0x01, 0x00, 0x18, 0x00, 0x00, 0x00,\n 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x01, 0x00, 0x00, 0x00, 0x97, 0x00,\n 0x40, 0x19, 0x01, 0x00, 0x00, 0x00, 0x50, 0x4b, 0x06, 0x06, 0x2c, 0x00, 0x00, 0x00, 0x00, 0x00,\n 0x00, 0x00, 0x1e, 0x03, 0x2d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00,\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x27, 0x01,\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xef, 0x00, 0x40, 0x2c, 0x02, 0x00, 0x00, 0x00, 0x50, 0x4b,\n 0x06, 0x07, 0x00, 0x00, 0x00, 0x00, 0x16, 0x02, 0x40, 0x2c, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00,\n 0x00, 0x00, 0x50, 0x4b, 0x05, 0x06, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x03, 0x00, 0x27, 0x01,\n 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00,\n];\n\nconst BLOCK1_START : u64 = 0x000000000;\nconst BLOCK2_START : u64 = 0x113000050;\nconst BLOCK3_START : u64 = 0x119400090;\nconst BLOCK4_START : u64 = 0x22c4000e0;\n\nconst BLOCK1_END : u64 = BLOCK1_START + BLOCK1_LENGTH - 1;\nconst BLOCK2_END : u64 = BLOCK2_START + BLOCK2_LENGTH - 2;\nconst BLOCK3_END : u64 = BLOCK3_START + BLOCK3_LENGTH - 3;\nconst BLOCK4_END : u64 = BLOCK4_START + BLOCK4_LENGTH - 4;\n\nconst TOTAL_LENGTH : u64 = BLOCK4_START + BLOCK4_LENGTH;\n\nstruct Zip64File {\n pointer: u64,\n}\n\nimpl Zip64File {\n fn new() -> Self {\n Zip64File { pointer: 0 }\n }\n}\n\nimpl Seek for Zip64File {\n fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {\n match pos {\n SeekFrom::Start(offset) => { self.pointer = offset; },\n SeekFrom::End(offset) => {\n if offset > 0 || offset < -(TOTAL_LENGTH as i64) {\n return Err(io::Error::new(io::ErrorKind::Other, \"Invalid seek offset\"));\n }\n self.pointer = (TOTAL_LENGTH as i64 + offset) as u64;\n },\n SeekFrom::Current(offset) => {\n let seekpos = self.pointer as i64 + offset;\n if seekpos < 0 || seekpos as u64 > TOTAL_LENGTH {\n return Err(io::Error::new(io::ErrorKind::Other, \"Invalid seek offset\"));\n }\n self.pointer = seekpos as u64;\n },\n }\n Ok(self.pointer)\n }\n}\n\nimpl Read for Zip64File {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n if self.pointer >= TOTAL_LENGTH {\n return Ok(0);\n }\n match self.pointer {\n BLOCK1_START ... BLOCK1_END => {\n buf[0] = BLOCK1[(self.pointer - BLOCK1_START) as usize];\n },\n BLOCK2_START ... BLOCK2_END => {\n buf[0] = BLOCK2[(self.pointer - BLOCK2_START) as usize];\n },\n BLOCK3_START ... BLOCK3_END => {\n buf[0] = BLOCK3[(self.pointer - BLOCK3_START) as usize];\n },\n BLOCK4_START ... BLOCK4_END => {\n buf[0] = BLOCK4[(self.pointer - BLOCK4_START) as usize];\n },\n _ => {\n buf[0] = 0;\n },\n }\n self.pointer += 1;\n Ok(1)\n }\n}\n\n#[test]\nfn zip64_large() {\n let zipfile = Zip64File::new();\n let mut archive = zip::ZipArchive::new(zipfile).unwrap();\n let mut buf = [0u8; 32];\n\n for i in 0..archive.len() {\n let mut file = archive.by_index(i).unwrap();\n let outpath = file.sanitized_name();\n println!(\"Entry {} has name \\\"{}\\\" ({} bytes)\", i, outpath.as_path().display(), file.size());\n\n match file.read_exact(&mut buf) {\n Ok(()) => println!(\"The first {} bytes are: {:?}\", buf.len(), buf),\n Err(e) => println!(\"Could not read the file: {:?}\", e),\n };\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add async filtering example<commit_after>extern crate futures;\nextern crate socketcan;\nextern crate tokio;\nuse futures::stream::Stream;\nuse socketcan::bcm::async::*;\nuse socketcan::FrameFlags;\nuse std::time;\n\nfn main() {\n let socket = CanBCMSocket::open_nb(\"vcan0\").unwrap();\n let ival = time::Duration::from_millis(0);\n let f = socket\n .filter_id_incoming_frames(0x123, ival, ival, FrameFlags::empty())\n .unwrap()\n .map_err(|err| eprintln!(\"IO error {:?}\", err))\n .for_each(|frame| {\n println!(\"Frame {:?}\", frame);\n Ok(())\n });\n tokio::run(f);\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\n\/\/ nvp implementation version\npub const NV_VERSION: i32 = 0;\n\n\/\/ nvlist header\n\/\/#[derive(Debug)]\npub struct NvList {\n pub version: i32,\n pub nvflag: u32, \/\/ persistent flags\n pub pairs: Vec<(String, NvValue)>,\n}\n\nimpl NvList {\n pub fn new(nvflag: u32) -> Self {\n NvList {\n version: NV_VERSION,\n nvflag: nvflag,\n pairs: Vec::new(),\n }\n }\n\n pub fn find(&self, name: &str) -> Option<&NvValue> {\n for pair in &self.pairs {\n if pair.0 == name {\n return Some(&pair.1);\n }\n }\n None\n }\n\n pub fn find_mut(&mut self, name: &str) -> Option<&mut NvValue> {\n for pair in &mut self.pairs {\n if pair.0 == name {\n return Some(&mut pair.1);\n }\n }\n None\n }\n\n pub fn get<'a, T: GetNvValue<'a>>(&'a self, name: &str) -> Option<T> {\n self.find(name).and_then(|x| GetNvValue::get(x))\n }\n}\n\nimpl fmt::Debug for NvList {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(f, \"NvList {{ version: {:X}, nvflag: {:X}, pairs: [\\n\", self.version, self.nvflag));\n for &(ref name, ref value) in &self.pairs {\n if name.is_empty() { break; }\n try!(write!(f, \"{} : {:?}\\n\", name, value));\n }\n try!(write!(f, \"] }}\\n\"));\n Ok(())\n }\n}\n\n\/\/ TODO Auto implement Debug. format! currently crashes with big u32 values\n\/\/#[derive(Debug)]\npub enum NvValue {\n Unknown,\n Boolean,\n Byte(u8),\n Int16(i16),\n Uint16(u16),\n Int32(i32),\n Uint32(u32),\n Int64(i64),\n Uint64(u64),\n String(String),\n ByteArray(Vec<u8>),\n Int16Array(Vec<i16>),\n Uint16Array(Vec<u16>),\n Int32Array(Vec<i32>),\n Uint32Array(Vec<u32>),\n Int64Array(Vec<i64>),\n Uint64Array(Vec<u64>),\n StringArray(Vec<String>),\n HrTime(i64),\n NvList(NvList),\n NvListArray(Vec<NvList>),\n BooleanValue(bool),\n Int8(i8),\n Uint8(u8),\n BooleanArray(Vec<bool>),\n Int8Array(Vec<i8>),\n Uint8Array(Vec<u8>),\n}\n\nimpl NvValue {\n pub fn data_type(&self) -> DataType {\n match *self {\n NvValue::Unknown => DataType::Unknown,\n NvValue::Boolean => DataType::Boolean,\n NvValue::Byte(_) => DataType::Byte,\n NvValue::Int16(_) => DataType::Int16,\n NvValue::Uint16(_) => DataType::Uint16,\n NvValue::Int32(_) => DataType::Int32,\n NvValue::Uint32(_) => DataType::Uint32,\n NvValue::Int64(_) => DataType::Int64,\n NvValue::Uint64(_) => DataType::Uint64,\n NvValue::String(_) => DataType::String,\n NvValue::ByteArray(_) => DataType::ByteArray,\n NvValue::Int16Array(_) => DataType::Int16Array,\n NvValue::Uint16Array(_) => DataType::Uint16Array,\n NvValue::Int32Array(_) => DataType::Int32Array,\n NvValue::Uint32Array(_) => DataType::Uint32Array,\n NvValue::Int64Array(_) => DataType::Int64Array,\n NvValue::Uint64Array(_) => DataType::Uint64Array,\n NvValue::StringArray(_) => DataType::StringArray,\n NvValue::HrTime(_) => DataType::HrTime,\n NvValue::NvList(_) => DataType::NvList,\n NvValue::NvListArray(_) => DataType::NvListArray,\n NvValue::BooleanValue(_) => DataType::BooleanValue,\n NvValue::Int8(_) => DataType::Int8,\n NvValue::Uint8(_) => DataType::Uint8,\n NvValue::BooleanArray(_) => DataType::BooleanArray,\n NvValue::Int8Array(_) => DataType::Int8Array,\n NvValue::Uint8Array(_) => DataType::Uint8Array,\n }\n }\n\n pub fn num_elements(&self) -> usize {\n match *self {\n NvValue::Unknown => 1,\n NvValue::Boolean => 1,\n NvValue::Byte(_) => 1,\n NvValue::Int16(_) => 1,\n NvValue::Uint16(_) => 1,\n NvValue::Int32(_) => 1,\n NvValue::Uint32(_) => 1,\n NvValue::Int64(_) => 1,\n NvValue::Uint64(_) => 1,\n NvValue::String(_) => 1,\n NvValue::ByteArray(ref a) => a.len(),\n NvValue::Int16Array(ref a) => a.len(),\n NvValue::Uint16Array(ref a) => a.len(),\n NvValue::Int32Array(ref a) => a.len(),\n NvValue::Uint32Array(ref a) => a.len(),\n NvValue::Int64Array(ref a) => a.len(),\n NvValue::Uint64Array(ref a) => a.len(),\n NvValue::StringArray(ref a) => a.len(),\n NvValue::HrTime(_) => 1,\n NvValue::NvList(_) => 1,\n NvValue::NvListArray(ref a) => a.len(),\n NvValue::BooleanValue(_) => 1,\n NvValue::Int8(_) => 1,\n NvValue::Uint8(_) => 1,\n NvValue::BooleanArray(ref a) => a.len(),\n NvValue::Int8Array(ref a) => a.len(),\n NvValue::Uint8Array(ref a) => a.len(),\n }\n }\n}\n\nimpl fmt::Debug for NvValue {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n NvValue::Int64(v) => write!(f, \"Int64(0x{:X})\", v),\n NvValue::Uint64(v) => write!(f, \"Uint64(0x{:X})\", v),\n NvValue::NvList(ref v) => write!(f, \"NvList({:?})\", v),\n NvValue::NvListArray(ref v) => {\n try!(write!(f, \"NvListArray([\"));\n for nv_list in v {\n try!(write!(f, \"NvList({:?})\", nv_list));\n }\n write!(f, \"])\")\n },\n NvValue::String(ref v) => { write!(f, \"String({})\", v) },\n _ => write!(f, \"{:?}\", self),\n }\n }\n}\n\n#[derive(Copy, Clone, Debug)]\npub enum DataType {\n Unknown = 0,\n Boolean,\n Byte,\n Int16,\n Uint16,\n Int32,\n Uint32,\n Int64,\n Uint64,\n String,\n ByteArray,\n Int16Array,\n Uint16Array,\n Int32Array,\n Uint32Array,\n Int64Array,\n Uint64Array,\n StringArray,\n HrTime,\n NvList,\n NvListArray,\n BooleanValue,\n Int8,\n Uint8,\n BooleanArray,\n Int8Array,\n Uint8Array,\n}\n\nimpl DataType {\n pub fn from_u8(u: u8) -> Option<DataType> {\n match u {\n 0 => Some(DataType::Unknown),\n 1 => Some(DataType::Boolean),\n 2 => Some(DataType::Byte),\n 3 => Some(DataType::Int16),\n 4 => Some(DataType::Uint16),\n 5 => Some(DataType::Int32),\n 6 => Some(DataType::Uint32),\n 7 => Some(DataType::Int64),\n 8 => Some(DataType::Uint64),\n 9 => Some(DataType::String),\n 10 => Some(DataType::ByteArray),\n 11 => Some(DataType::Int16Array),\n 12 => Some(DataType::Uint16Array),\n 13 => Some(DataType::Int32Array),\n 14 => Some(DataType::Uint32Array),\n 15 => Some(DataType::Int64Array),\n 16 => Some(DataType::Uint64Array),\n 17 => Some(DataType::StringArray),\n 18 => Some(DataType::HrTime),\n 19 => Some(DataType::NvList),\n 20 => Some(DataType::NvListArray),\n 21 => Some(DataType::BooleanValue),\n 22 => Some(DataType::Int8),\n 23 => Some(DataType::Uint8),\n 24 => Some(DataType::BooleanArray),\n 25 => Some(DataType::Int8Array),\n 26 => Some(DataType::Uint8Array),\n _ => None,\n }\n }\n\n pub fn to_u8(self) -> u8 {\n match self {\n DataType::Unknown => 0,\n DataType::Boolean => 1,\n DataType::Byte => 2,\n DataType::Int16 => 3,\n DataType::Uint16 => 4,\n DataType::Int32 => 5,\n DataType::Uint32 => 6,\n DataType::Int64 => 7,\n DataType::Uint64 => 8,\n DataType::String => 9,\n DataType::ByteArray => 10,\n DataType::Int16Array => 11,\n DataType::Uint16Array => 12,\n DataType::Int32Array => 13,\n DataType::Uint32Array => 14,\n DataType::Int64Array => 15,\n DataType::Uint64Array => 16,\n DataType::StringArray => 17,\n DataType::HrTime => 18,\n DataType::NvList => 19,\n DataType::NvListArray => 20,\n DataType::BooleanValue => 21,\n DataType::Int8 => 22,\n DataType::Uint8 => 23,\n DataType::BooleanArray => 24,\n DataType::Int8Array => 25,\n DataType::Uint8Array => 26,\n }\n }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\npub trait GetNvValue<'a>: Sized {\n fn get(value: &'a NvValue) -> Option<Self>;\n}\n\nimpl<'a> GetNvValue<'a> for bool {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::BooleanValue(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for u8 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Byte(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for u16 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Uint16(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for u32 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Uint32(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for u64 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Uint64(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for i16 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Int16(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for i32 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Int32(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for i64 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Int64(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for &'a NvList {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::NvList(ref v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for &'a Vec<NvList> {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::NvListArray(ref v) => Some(v),\n _ => None,\n }\n }\n}\n<commit_msg>impl GetNvValue for &String<commit_after>use redox::*;\n\n\/\/ nvp implementation version\npub const NV_VERSION: i32 = 0;\n\n\/\/ nvlist header\n\/\/#[derive(Debug)]\npub struct NvList {\n pub version: i32,\n pub nvflag: u32, \/\/ persistent flags\n pub pairs: Vec<(String, NvValue)>,\n}\n\nimpl NvList {\n pub fn new(nvflag: u32) -> Self {\n NvList {\n version: NV_VERSION,\n nvflag: nvflag,\n pairs: Vec::new(),\n }\n }\n\n pub fn find(&self, name: &str) -> Option<&NvValue> {\n for pair in &self.pairs {\n if pair.0 == name {\n return Some(&pair.1);\n }\n }\n None\n }\n\n pub fn find_mut(&mut self, name: &str) -> Option<&mut NvValue> {\n for pair in &mut self.pairs {\n if pair.0 == name {\n return Some(&mut pair.1);\n }\n }\n None\n }\n\n pub fn get<'a, T: GetNvValue<'a>>(&'a self, name: &str) -> Option<T> {\n self.find(name).and_then(|x| GetNvValue::get(x))\n }\n}\n\nimpl fmt::Debug for NvList {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(f, \"NvList {{ version: {:X}, nvflag: {:X}, pairs: [\\n\", self.version, self.nvflag));\n for &(ref name, ref value) in &self.pairs {\n if name.is_empty() { break; }\n try!(write!(f, \"{} : {:?}\\n\", name, value));\n }\n try!(write!(f, \"] }}\\n\"));\n Ok(())\n }\n}\n\n\/\/ TODO Auto implement Debug. format! currently crashes with big u32 values\n\/\/#[derive(Debug)]\npub enum NvValue {\n Unknown,\n Boolean,\n Byte(u8),\n Int16(i16),\n Uint16(u16),\n Int32(i32),\n Uint32(u32),\n Int64(i64),\n Uint64(u64),\n String(String),\n ByteArray(Vec<u8>),\n Int16Array(Vec<i16>),\n Uint16Array(Vec<u16>),\n Int32Array(Vec<i32>),\n Uint32Array(Vec<u32>),\n Int64Array(Vec<i64>),\n Uint64Array(Vec<u64>),\n StringArray(Vec<String>),\n HrTime(i64),\n NvList(NvList),\n NvListArray(Vec<NvList>),\n BooleanValue(bool),\n Int8(i8),\n Uint8(u8),\n BooleanArray(Vec<bool>),\n Int8Array(Vec<i8>),\n Uint8Array(Vec<u8>),\n}\n\nimpl NvValue {\n pub fn data_type(&self) -> DataType {\n match *self {\n NvValue::Unknown => DataType::Unknown,\n NvValue::Boolean => DataType::Boolean,\n NvValue::Byte(_) => DataType::Byte,\n NvValue::Int16(_) => DataType::Int16,\n NvValue::Uint16(_) => DataType::Uint16,\n NvValue::Int32(_) => DataType::Int32,\n NvValue::Uint32(_) => DataType::Uint32,\n NvValue::Int64(_) => DataType::Int64,\n NvValue::Uint64(_) => DataType::Uint64,\n NvValue::String(_) => DataType::String,\n NvValue::ByteArray(_) => DataType::ByteArray,\n NvValue::Int16Array(_) => DataType::Int16Array,\n NvValue::Uint16Array(_) => DataType::Uint16Array,\n NvValue::Int32Array(_) => DataType::Int32Array,\n NvValue::Uint32Array(_) => DataType::Uint32Array,\n NvValue::Int64Array(_) => DataType::Int64Array,\n NvValue::Uint64Array(_) => DataType::Uint64Array,\n NvValue::StringArray(_) => DataType::StringArray,\n NvValue::HrTime(_) => DataType::HrTime,\n NvValue::NvList(_) => DataType::NvList,\n NvValue::NvListArray(_) => DataType::NvListArray,\n NvValue::BooleanValue(_) => DataType::BooleanValue,\n NvValue::Int8(_) => DataType::Int8,\n NvValue::Uint8(_) => DataType::Uint8,\n NvValue::BooleanArray(_) => DataType::BooleanArray,\n NvValue::Int8Array(_) => DataType::Int8Array,\n NvValue::Uint8Array(_) => DataType::Uint8Array,\n }\n }\n\n pub fn num_elements(&self) -> usize {\n match *self {\n NvValue::Unknown => 1,\n NvValue::Boolean => 1,\n NvValue::Byte(_) => 1,\n NvValue::Int16(_) => 1,\n NvValue::Uint16(_) => 1,\n NvValue::Int32(_) => 1,\n NvValue::Uint32(_) => 1,\n NvValue::Int64(_) => 1,\n NvValue::Uint64(_) => 1,\n NvValue::String(_) => 1,\n NvValue::ByteArray(ref a) => a.len(),\n NvValue::Int16Array(ref a) => a.len(),\n NvValue::Uint16Array(ref a) => a.len(),\n NvValue::Int32Array(ref a) => a.len(),\n NvValue::Uint32Array(ref a) => a.len(),\n NvValue::Int64Array(ref a) => a.len(),\n NvValue::Uint64Array(ref a) => a.len(),\n NvValue::StringArray(ref a) => a.len(),\n NvValue::HrTime(_) => 1,\n NvValue::NvList(_) => 1,\n NvValue::NvListArray(ref a) => a.len(),\n NvValue::BooleanValue(_) => 1,\n NvValue::Int8(_) => 1,\n NvValue::Uint8(_) => 1,\n NvValue::BooleanArray(ref a) => a.len(),\n NvValue::Int8Array(ref a) => a.len(),\n NvValue::Uint8Array(ref a) => a.len(),\n }\n }\n}\n\nimpl fmt::Debug for NvValue {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n NvValue::Int64(v) => write!(f, \"Int64(0x{:X})\", v),\n NvValue::Uint64(v) => write!(f, \"Uint64(0x{:X})\", v),\n NvValue::NvList(ref v) => write!(f, \"NvList({:?})\", v),\n NvValue::NvListArray(ref v) => {\n try!(write!(f, \"NvListArray([\"));\n for nv_list in v {\n try!(write!(f, \"NvList({:?})\", nv_list));\n }\n write!(f, \"])\")\n },\n NvValue::String(ref v) => { write!(f, \"String({})\", v) },\n _ => write!(f, \"{:?}\", self),\n }\n }\n}\n\n#[derive(Copy, Clone, Debug)]\npub enum DataType {\n Unknown = 0,\n Boolean,\n Byte,\n Int16,\n Uint16,\n Int32,\n Uint32,\n Int64,\n Uint64,\n String,\n ByteArray,\n Int16Array,\n Uint16Array,\n Int32Array,\n Uint32Array,\n Int64Array,\n Uint64Array,\n StringArray,\n HrTime,\n NvList,\n NvListArray,\n BooleanValue,\n Int8,\n Uint8,\n BooleanArray,\n Int8Array,\n Uint8Array,\n}\n\nimpl DataType {\n pub fn from_u8(u: u8) -> Option<DataType> {\n match u {\n 0 => Some(DataType::Unknown),\n 1 => Some(DataType::Boolean),\n 2 => Some(DataType::Byte),\n 3 => Some(DataType::Int16),\n 4 => Some(DataType::Uint16),\n 5 => Some(DataType::Int32),\n 6 => Some(DataType::Uint32),\n 7 => Some(DataType::Int64),\n 8 => Some(DataType::Uint64),\n 9 => Some(DataType::String),\n 10 => Some(DataType::ByteArray),\n 11 => Some(DataType::Int16Array),\n 12 => Some(DataType::Uint16Array),\n 13 => Some(DataType::Int32Array),\n 14 => Some(DataType::Uint32Array),\n 15 => Some(DataType::Int64Array),\n 16 => Some(DataType::Uint64Array),\n 17 => Some(DataType::StringArray),\n 18 => Some(DataType::HrTime),\n 19 => Some(DataType::NvList),\n 20 => Some(DataType::NvListArray),\n 21 => Some(DataType::BooleanValue),\n 22 => Some(DataType::Int8),\n 23 => Some(DataType::Uint8),\n 24 => Some(DataType::BooleanArray),\n 25 => Some(DataType::Int8Array),\n 26 => Some(DataType::Uint8Array),\n _ => None,\n }\n }\n\n pub fn to_u8(self) -> u8 {\n match self {\n DataType::Unknown => 0,\n DataType::Boolean => 1,\n DataType::Byte => 2,\n DataType::Int16 => 3,\n DataType::Uint16 => 4,\n DataType::Int32 => 5,\n DataType::Uint32 => 6,\n DataType::Int64 => 7,\n DataType::Uint64 => 8,\n DataType::String => 9,\n DataType::ByteArray => 10,\n DataType::Int16Array => 11,\n DataType::Uint16Array => 12,\n DataType::Int32Array => 13,\n DataType::Uint32Array => 14,\n DataType::Int64Array => 15,\n DataType::Uint64Array => 16,\n DataType::StringArray => 17,\n DataType::HrTime => 18,\n DataType::NvList => 19,\n DataType::NvListArray => 20,\n DataType::BooleanValue => 21,\n DataType::Int8 => 22,\n DataType::Uint8 => 23,\n DataType::BooleanArray => 24,\n DataType::Int8Array => 25,\n DataType::Uint8Array => 26,\n }\n }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\npub trait GetNvValue<'a>: Sized {\n fn get(value: &'a NvValue) -> Option<Self>;\n}\n\nimpl<'a> GetNvValue<'a> for bool {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::BooleanValue(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for u8 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Byte(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for u16 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Uint16(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for u32 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Uint32(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for u64 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Uint64(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for i16 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Int16(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for i32 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Int32(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for i64 {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::Int64(v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for &'a String {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::String(ref v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for &'a NvList {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::NvList(ref v) => Some(v),\n _ => None,\n }\n }\n}\n\nimpl<'a> GetNvValue<'a> for &'a Vec<NvList> {\n fn get(value: &'a NvValue) -> Option<Self> {\n match *value {\n NvValue::NvListArray(ref v) => Some(v),\n _ => None,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add text.rs<commit_after>\/\/ The MIT License (MIT)\n\n\/\/ Copyright (c) 2014 Y. T. CHUNG <zonyitoo@gmail.com>\n\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a copy of\n\/\/ this software and associated documentation files (the \"Software\"), to deal in\n\/\/ the Software without restriction, including without limitation the rights to\n\/\/ use, copy, modify, merge, publish, distribute, sublicense, and\/or sell copies of\n\/\/ the Software, and to permit persons to whom the Software is furnished to do so,\n\/\/ subject to the following conditions:\n\n\/\/ The above copyright notice and this permission notice shall be included in all\n\/\/ copies or substantial portions of the Software.\n\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\/\/ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\/\/ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\/\/ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\/\/ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\/\/ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n#![allow(dead_code)]\n\nuse std::io::{BufRead, Read, Write, Cursor, BufReader};\nuse std::str;\nuse std::fmt;\n\nuse proto::{Operation, MemCachedResult};\nuse proto;\n\n\/\/ Storage commands\nconst OP_SET: &'static str = \"set\";\nconst OP_ADD: &'static str = \"add\";\nconst OP_REPLACE: &'static str = \"replace\";\nconst OP_APPEND: &'static str = \"append\";\nconst OP_PREPEND: &'static str = \"prepend\";\nconst OP_CAS: &'static str = \"cas\";\nconst OP_DELETE: &'static str = \"delete\";\nconst OP_INCR: &'static str = \"incr\";\nconst OP_DECR: &'static str = \"decr\";\nconst OP_TOUCH: &'static str = \"touch\";\nconst OP_STATS: &'static str = \"stats\";\nconst OP_VERSION: &'static str = \"version\";\nconst OP_FLUSH_ALL: &'static str = \"flush_all\";\nconst OP_QUIT: &'static str = \"quit\";\n\n\/\/ Retrival commands\nconst OP_GET: &'static str = \"get\";\nconst OP_GETS: &'static str = \"gets\";\n\nconst REPLY_ERROR: &'static str = \"ERROR\";\nconst REPLY_CLIENT_ERROR: &'static str = \"CLIENT_ERROR\";\nconst REPLY_SERVER_ERROR: &'static str = \"SERVER_ERROR\";\n\nconst REPLY_STORED: &'static str = \"STORED\";\nconst REPLY_NOT_STORED: &'static str = \"NOT_STORED\";\nconst REPLY_EXISTS: &'static str = \"EXISTS\";\nconst REPLY_NOT_FOUND: &'static str = \"NOT_FOUND\";\nconst REPLY_END: &'static str = \"END\";\nconst REPLY_VALUE: &'static str = \"VALUE\";\nconst REPLY_DELETED: &'static str = \"DELETED\";\nconst REPLY_TOUCHED: &'static str = \"TOUCHED\";\nconst REPLY_OK: &'static str = \"OK\";\n\n#[derive(Debug, Clone)]\npub enum Reply {\n Error,\n ClientError(String),\n ServerError(String),\n\n NotStored,\n Exists,\n NotFound,\n}\n\nimpl Reply {\n pub fn desc(&self) -> &'static str {\n match self {\n &Reply::Error => \"error\",\n &Reply::ClientError(..) => \"client error\",\n &Reply::ServerError(..) => \"server error\",\n\n &Reply::NotStored => \"not stored\",\n &Reply::Exists => \"exists\",\n &Reply::NotFound => \"not found\",\n }\n }\n}\n\nimpl fmt::Display for Reply {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self {\n &Reply::Error => write!(f, \"error\"),\n &Reply::ClientError(ref e) => write!(f, \"client error: {}\", e),\n &Reply::ServerError(ref e) => write!(f, \"server error: {}\", e),\n\n &Reply::NotStored => write!(f, \"not stored\"),\n &Reply::Exists => write!(f, \"exists\"),\n &Reply::NotFound => write!(f, \"not found\"),\n }\n }\n}\n\n#[derive(Debug)]\npub enum CommandType {\n Set,\n Add,\n Replace,\n Append,\n Prepend,\n Cas,\n Get,\n Gets,\n}\n\npub struct TextProto<S: BufRead + Write + Send> {\n pub stream: S,\n}\n\nimpl<S: BufRead + Write + Send> TextProto<S> {\n pub fn new(stream: S) -> TextProto<S> {\n TextProto {\n stream: stream,\n }\n }\n}\n\nimpl<S: BufRead + Write + Send> Operation for TextProto<S> {\n fn set(&mut self, key: &[u8], value: &[u8], flags: u32, expiration: u32) -> MemCachedResult<()> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError{\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n let cmd = format!(\"{} {} {} {} {}\\r\\n\", OP_SET, strkey, flags, expiration, value.len());\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.write(value));\n try!(self.stream.write(b\"\\r\\n\"));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n try!(self.stream.read_line(&mut resp));\n let resp_str = resp.trim_right();\n\n let mut splitted = resp_str.split(' ');\n match (splitted.next(), splitted.next()) {\n (Some(REPLY_STORED), None) => {\n Ok(())\n },\n (Some(REPLY_NOT_STORED), None) => {\n Err(proto::Error::TextProtoError(Reply::NotStored))\n },\n (Some(REPLY_ERROR), None) => {\n Err(proto::Error::TextProtoError(Reply::Error))\n },\n (Some(REPLY_CLIENT_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n _ => {\n Err(proto::Error::OtherError {\n desc: \"Unknown reply\",\n detail: Some(resp_str.to_string())\n })\n }\n }\n }\n\n fn add(&mut self, key: &[u8], value: &[u8], flags: u32, expiration: u32) -> MemCachedResult<()> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError{\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n let cmd = format!(\"{} {} {} {} {}\\r\\n\", OP_ADD, strkey, flags, expiration, value.len());\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.write(value));\n try!(self.stream.write(b\"\\r\\n\"));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n try!(self.stream.read_line(&mut resp));\n let resp_str = resp.trim_right();\n\n let mut splitted = resp_str.split(' ');\n match (splitted.next(), splitted.next()) {\n (Some(REPLY_STORED), None) => {\n Ok(())\n },\n (Some(REPLY_NOT_STORED), None) => {\n Err(proto::Error::TextProtoError(Reply::NotStored))\n },\n (Some(REPLY_ERROR), None) => {\n Err(proto::Error::TextProtoError(Reply::Error))\n },\n (Some(REPLY_CLIENT_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n _ => {\n Err(proto::Error::OtherError {\n desc: \"Unknown reply\",\n detail: Some(resp_str.to_string())\n })\n }\n }\n }\n\n fn delete(&mut self, key: &[u8]) -> MemCachedResult<()> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError{\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n\n let cmd = format!(\"{} {}\\r\\n\", OP_DELETE, strkey);\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n try!(self.stream.read_line(&mut resp));\n let resp_str = resp.trim_right();\n\n let mut splitted = resp_str.split(' ');\n match (splitted.next(), splitted.next()) {\n (Some(REPLY_DELETED), None) => {\n Ok(())\n },\n (Some(REPLY_NOT_FOUND), None) => {\n Err(proto::Error::TextProtoError(Reply::NotFound))\n },\n (Some(REPLY_ERROR), None) => {\n Err(proto::Error::TextProtoError(Reply::Error))\n },\n (Some(REPLY_CLIENT_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n _ => {\n Err(proto::Error::OtherError {\n desc: \"Unknown reply\",\n detail: Some(resp_str.to_string())\n })\n }\n }\n }\n\n fn replace(&mut self, key: &[u8], value: &[u8], flags: u32, expiration: u32) -> MemCachedResult<()> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError{\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n let cmd = format!(\"{} {} {} {} {}\\r\\n\", OP_REPLACE, strkey, flags, expiration, value.len());\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.write(value));\n try!(self.stream.write(b\"\\r\\n\"));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n try!(self.stream.read_line(&mut resp));\n let resp_str = resp.trim_right();\n\n let mut splitted = resp_str.split(' ');\n match (splitted.next(), splitted.next()) {\n (Some(REPLY_STORED), None) => {\n Ok(())\n },\n (Some(REPLY_NOT_STORED), None) => {\n Err(proto::Error::TextProtoError(Reply::NotStored))\n },\n (Some(REPLY_ERROR), None) => {\n Err(proto::Error::TextProtoError(Reply::Error))\n },\n (Some(REPLY_CLIENT_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n _ => {\n Err(proto::Error::OtherError {\n desc: \"Unknown reply\",\n detail: Some(resp_str.to_string())\n })\n }\n }\n }\n\n fn get(&mut self, key: &[u8]) -> MemCachedResult<(Vec<u8>, u32)> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError {\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n let cmd = format!(\"{} {}\\r\\n\", OP_GET, strkey);\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n let (_key, flag, val_len) = {\n try!(self.stream.read_line(&mut resp));\n println!(\"RESP: {:?}\", resp);\n\n let mut splitted = resp.trim_right().split(' ');\n match (splitted.next(), splitted.next(), splitted.next(), splitted.next()) {\n (Some(REPLY_VALUE), Some(key), Some(flag), Some(val_len)) => {\n let flag = match flag.parse::<i32>() {\n Ok(f) => f as u32,\n Err(err) => return Err(proto::Error::OtherError {\n desc: \"Invalid flag\",\n detail: Some(err.to_string()),\n }),\n };\n\n if val_len.len() == 0 {\n return Err(proto::Error::OtherError {\n desc: \"Invalid value length\",\n detail: None,\n });\n }\n\n let val_len = match val_len.parse::<u64>() {\n Ok(vl) => vl,\n Err(err) => return Err(proto::Error::OtherError {\n desc: \"Invalid value length\",\n detail: Some(err.to_string()),\n }),\n };\n\n (key, flag, val_len)\n },\n (Some(REPLY_ERROR), _, _, _) => {\n return Err(proto::Error::TextProtoError(Reply::Error));\n },\n (Some(REPLY_CLIENT_ERROR), Some(error), _, _) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error), _, _) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n _ => {\n return Err(proto::Error::OtherError {\n desc: \"Invalid Response\",\n detail: Some(resp.clone()),\n });\n }\n }\n };\n\n let mut val = Vec::new();\n try!((&mut self.stream).take(val_len).read_to_end(&mut val));\n for _ in (&mut self.stream).take(2).bytes() {} \/\/ consumes \\r\\n\n\n let mut end = String::new();\n try!(self.stream.read_line(&mut end));\n let end = end.trim_right();\n\n if end.len() == 0 {\n return Err(proto::Error::OtherError {\n desc: \"Invalid Response\",\n detail: Some(end.to_owned()),\n });\n }\n\n match end {\n REPLY_END => Ok((val, flag)),\n _ => Err(proto::Error::OtherError {\n desc: \"Invalid Response\",\n detail: Some(end.to_owned()),\n })\n }\n }\n\n fn getk(&mut self, _key: &[u8]) -> MemCachedResult<(Vec<u8>, Vec<u8>, u32)> {\n panic!(\"TextProto does not support GetK command\");\n }\n\n fn increment(&mut self, key: &[u8], amount: u64, _initial: u64, _expiration: u32) -> MemCachedResult<u64> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError{\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n let cmd = format!(\"{} {} {}\\r\\n\", OP_INCR, strkey, amount);\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n try!(self.stream.read_line(&mut resp));\n let resp_str = resp.trim_right();\n\n let mut splitted = resp_str.split(' ');\n match (splitted.next(), splitted.next()) {\n (Some(REPLY_NOT_STORED), None) => {\n Err(proto::Error::TextProtoError(Reply::NotStored))\n },\n (Some(REPLY_ERROR), None) => {\n Err(proto::Error::TextProtoError(Reply::Error))\n },\n (Some(REPLY_CLIENT_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n (Some(value), None) => {\n match value.parse::<u64>() {\n Ok(val) => Ok(val),\n Err(err) => return Err(proto::Error::OtherError {\n desc: \"Invalid value\",\n detail: Some(err.to_string()),\n }),\n }\n },\n _ => {\n Err(proto::Error::OtherError {\n desc: \"Unknown reply\",\n detail: Some(resp_str.to_string())\n })\n }\n }\n }\n\n fn decrement(&mut self, key: &[u8], amount: u64, _initial: u64, _expiration: u32) -> MemCachedResult<u64> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError{\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n let cmd = format!(\"{} {} {}\\r\\n\", OP_DECR, strkey, amount);\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n try!(self.stream.read_line(&mut resp));\n let resp_str = resp.trim_right();\n\n let mut splitted = resp_str.split(' ');\n match (splitted.next(), splitted.next()) {\n (Some(REPLY_NOT_STORED), None) => {\n Err(proto::Error::TextProtoError(Reply::NotStored))\n },\n (Some(REPLY_ERROR), None) => {\n Err(proto::Error::TextProtoError(Reply::Error))\n },\n (Some(REPLY_CLIENT_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n (Some(value), None) => {\n match value.parse::<u64>() {\n Ok(val) => Ok(val),\n Err(err) => return Err(proto::Error::OtherError {\n desc: \"Invalid value\",\n detail: Some(err.to_string()),\n }),\n }\n },\n _ => {\n Err(proto::Error::OtherError {\n desc: \"Unknown reply\",\n detail: Some(resp_str.to_string())\n })\n }\n }\n }\n\n fn append(&mut self, key: &[u8], value: &[u8]) -> MemCachedResult<()> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError{\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n let cmd = format!(\"{} {}\\r\\n\", OP_APPEND, strkey);\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.write(value));\n try!(self.stream.write(b\"\\r\\n\"));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n try!(self.stream.read_line(&mut resp));\n let resp_str = resp.trim_right();\n\n let mut splitted = resp_str.split(' ');\n match (splitted.next(), splitted.next()) {\n (Some(REPLY_STORED), None) => {\n Ok(())\n },\n (Some(REPLY_NOT_STORED), None) => {\n Err(proto::Error::TextProtoError(Reply::NotStored))\n },\n (Some(REPLY_ERROR), None) => {\n Err(proto::Error::TextProtoError(Reply::Error))\n },\n (Some(REPLY_CLIENT_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n _ => {\n Err(proto::Error::OtherError {\n desc: \"Unknown reply\",\n detail: Some(resp_str.to_string())\n })\n }\n }\n }\n\n fn prepend(&mut self, key: &[u8], value: &[u8]) -> MemCachedResult<()> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError{\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n let cmd = format!(\"{} {}\\r\\n\", OP_PREPEND, strkey);\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.write(value));\n try!(self.stream.write(b\"\\r\\n\"));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n try!(self.stream.read_line(&mut resp));\n let resp_str = resp.trim_right();\n\n let mut splitted = resp_str.split(' ');\n match (splitted.next(), splitted.next()) {\n (Some(REPLY_STORED), None) => {\n Ok(())\n },\n (Some(REPLY_NOT_STORED), None) => {\n Err(proto::Error::TextProtoError(Reply::NotStored))\n },\n (Some(REPLY_ERROR), None) => {\n Err(proto::Error::TextProtoError(Reply::Error))\n },\n (Some(REPLY_CLIENT_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n _ => {\n Err(proto::Error::OtherError {\n desc: \"Unknown reply\",\n detail: Some(resp_str.to_string())\n })\n }\n }\n }\n\n fn touch(&mut self, key: &[u8], expiration: u32) -> MemCachedResult<()> {\n let strkey = match str::from_utf8(key) {\n Ok(s) => s,\n Err(..) => return Err(proto::Error::OtherError{\n desc: \"Key has to be a valid utf-8 string\",\n detail: None\n }),\n };\n let cmd = format!(\"{} {} {}\\r\\n\", OP_TOUCH, strkey, expiration);\n try!(self.stream.write_all(cmd.as_bytes()));\n try!(self.stream.flush());\n\n let mut resp = String::new();\n try!(self.stream.read_line(&mut resp));\n let resp_str = resp.trim_right();\n\n let mut splitted = resp_str.split(' ');\n match (splitted.next(), splitted.next()) {\n (Some(REPLY_TOUCHED), None) => {\n Ok(())\n },\n (Some(REPLY_NOT_FOUND), None) => {\n Err(proto::Error::TextProtoError(Reply::NotFound))\n },\n (Some(REPLY_ERROR), None) => {\n Err(proto::Error::TextProtoError(Reply::Error))\n },\n (Some(REPLY_CLIENT_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ClientError(error.to_owned())));\n },\n (Some(REPLY_SERVER_ERROR), Some(error)) => {\n return Err(proto::Error::TextProtoError(Reply::ServerError(error.to_owned())));\n },\n _ => {\n Err(proto::Error::OtherError {\n desc: \"Unknown reply\",\n detail: Some(resp_str.to_string())\n })\n }\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use std::net::TcpStream;\n use std::io::BufStream;\n use proto::text::TextProto;\n use proto::{Operation};\n\n const SERVER_ADDR: &'static str = \"127.0.0.1:11211\";\n\n fn get_client() -> TextProto<BufStream<TcpStream>> {\n let stream = TcpStream::connect(SERVER_ADDR).unwrap();\n TextProto::new(BufStream::new(stream))\n }\n\n #[test]\n fn test_set_get_delete() {\n let key = b\"test:test_text\";\n let val = b\"val\";\n\n let mut client = get_client();\n let set_resp = client.set(key, val, 0xdead, 200);\n assert!(set_resp.is_ok());\n\n let (get_val, flag) = client.get(key).unwrap();\n assert_eq!(flag, 0xdead);\n assert_eq!(&get_val[..], val);\n\n client.delete(key).unwrap();\n }\n\n #[test]\n fn test_add() {\n let key = b\"test:test_add\";\n let val = b\"val\";\n\n let mut client = get_client();\n client.add(key, val, 0xdead, 20).unwrap();\n\n let (get_val, flag) = client.get(key).unwrap();\n assert_eq!(flag, 0xdead);\n assert_eq!(&get_val[..], val);\n\n client.delete(key).unwrap();\n }\n\n #[test]\n fn test_replace() {\n let key = b\"test:test_replace\";\n let val = b\"val\";\n let replaced = b\"replaced\";\n\n let mut client = get_client();\n client.add(key, val, 0xdead, 20).unwrap();\n client.replace(key, replaced, 0xdeadbeef, 20).unwrap();\n\n let (get_val, flag) = client.get(key).unwrap();\n assert_eq!(flag, 0xdeadbeef);\n assert_eq!(&get_val[..], replaced);\n\n client.delete(key).unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! HTTP Server\nuse std::io::{Listener, IoResult, EndOfFile};\nuse std::io::net::ip::{IpAddr, Port, SocketAddr};\n\nuse intertwine::{Intertwine, Intertwined};\nuse macceptor::MoveAcceptor;\n\npub use self::request::Request;\npub use self::response::Response;\n\nuse {HttpResult};\nuse net::{NetworkListener, NetworkAcceptor, NetworkStream,\n HttpAcceptor, HttpListener, HttpStream,\n Fresh};\n\npub mod request;\npub mod response;\n\n\/\/\/ A server can listen on a TCP socket.\n\/\/\/\n\/\/\/ Once listening, it will create a `Request`\/`Response` pair for each\n\/\/\/ incoming connection, and hand them to the provided handler.\npub struct Server<L = HttpListener> {\n pairs: Vec<(IpAddr, Port)>\n}\n\nmacro_rules! try_option(\n ($e:expr) => {{\n match $e {\n Some(v) => v,\n None => return None\n }\n }}\n)\n\nimpl Server<HttpListener> {\n \/\/\/ Creates a new server that will handle `HttpStream`s.\n pub fn http(ip: IpAddr, port: Port) -> Server {\n Server { pairs: vec![(ip, port)] }\n }\n\n \/\/\/ Creates a server that can listen to many (ip, port) pairs.\n pub fn many(pairs: Vec<(IpAddr, Port)>) -> Server {\n Server { pairs: pairs }\n }\n}\n\nimpl<L: NetworkListener<S, A>, S: NetworkStream, A: NetworkAcceptor<S>> Server<L> {\n \/\/\/ Binds to a socket, and starts handling connections.\n \/\/\/\n \/\/\/ This method has unbound type parameters, so can be used when you want to use\n \/\/\/ something other than the provided HttpStream, HttpAcceptor, and HttpListener.\n pub fn listen_network<H, S, A, L>(self, handler: H) -> HttpResult<Listening<A>>\n where H: Handler<A, S>,\n S: NetworkStream,\n A: NetworkAcceptor<S>,\n L: NetworkListener<S, A>, {\n let mut acceptors = Vec::new();\n let mut sockets = Vec::new();\n for (ip, port) in self.pairs.move_iter() {\n let mut listener: L = try_io!(NetworkListener::<S, A>::bind(ip.to_string().as_slice(), port));\n\n sockets.push(try_io!(listener.socket_name()));\n\n let acceptor = try_io!(listener.listen());\n acceptors.push(acceptor.clone());\n }\n\n let connections = acceptors.clone()\n .move_iter()\n .map(|acceptor| acceptor.move_incoming())\n .intertwine();\n\n spawn(proc() {\n handler.handle(Incoming { from: connections });\n });\n\n Ok(Listening {\n acceptors: acceptors,\n sockets: sockets,\n })\n }\n\n \/\/\/ Binds to a socket and starts handling connections.\n pub fn listen<H: Handler<HttpAcceptor, HttpStream>>(self, handler: H) -> HttpResult<Listening<HttpAcceptor>> {\n self.listen_network::<H, HttpStream, HttpAcceptor, HttpListener>(handler)\n }\n}\n\n\/\/\/ An iterator over incoming connections, represented as pairs of\n\/\/\/ hyper Requests and Responses.\npub struct Incoming<S: Send = HttpStream> {\n from: Intertwined<IoResult<S>>\n}\n\nimpl<S: NetworkStream + 'static> Iterator<(Request, Response<Fresh>)> for Incoming<S> {\n fn next(&mut self) -> Option<(Request, Response<Fresh>)> {\n for conn in self.from {\n match conn {\n Ok(stream) => {\n debug!(\"Incoming stream\");\n let clone = stream.clone();\n let req = match Request::new(stream) {\n Ok(r) => r,\n Err(err) => {\n error!(\"creating Request: {}\", err);\n continue;\n }\n };\n let mut res = Response::new(clone);\n res.version = req.version;\n return Some((req, res))\n },\n Err(ref e) if e.kind == EndOfFile => return None, \/\/ server closed\n Err(e) => {\n error!(\"Connection failed: {}\", e);\n continue;\n }\n }\n }\n None\n }\n}\n\n\/\/\/ A listening server, which can later be closed.\npub struct Listening<A = HttpAcceptor> {\n acceptors: Vec<A>,\n \/\/\/ The socket addresses that the server is bound to.\n pub sockets: Vec<SocketAddr>,\n}\n\nimpl<A: NetworkAcceptor<S>, S: NetworkStream> Listening<A> {\n \/\/\/ Stop the server from listening to all of its socket addresses.\n \/\/\/\n \/\/\/ If closing any of the servers acceptors fails, this function returns Err\n \/\/\/ and does not close the rest of the acceptors.\n pub fn close(&mut self) -> HttpResult<()> {\n debug!(\"closing server\");\n for acceptor in self.acceptors.mut_iter() {\n try_io!(acceptor.close());\n }\n Ok(())\n }\n}\n\n\/\/\/ A handler that can handle incoming requests for a server.\npub trait Handler<A: NetworkAcceptor<S>, S: NetworkStream>: Send {\n \/\/\/ Receives a `Request`\/`Response` pair, and should perform some action on them.\n \/\/\/\n \/\/\/ This could reading from the request, and writing to the response.\n fn handle(self, Incoming<S>);\n}\n\nimpl<A: NetworkAcceptor<S>, S: NetworkStream> Handler<A, S> for fn(Incoming<S>) {\n fn handle(self, incoming: Incoming<S>) {\n (self)(incoming)\n }\n}\n\n<commit_msg>update deprecated syntax<commit_after>\/\/! HTTP Server\nuse std::io::{Listener, IoResult, EndOfFile};\nuse std::io::net::ip::{IpAddr, Port, SocketAddr};\n\nuse intertwine::{Intertwine, Intertwined};\nuse macceptor::MoveAcceptor;\n\npub use self::request::Request;\npub use self::response::Response;\n\nuse {HttpResult};\nuse net::{NetworkListener, NetworkAcceptor, NetworkStream,\n HttpAcceptor, HttpListener, HttpStream,\n Fresh};\n\npub mod request;\npub mod response;\n\n\/\/\/ A server can listen on a TCP socket.\n\/\/\/\n\/\/\/ Once listening, it will create a `Request`\/`Response` pair for each\n\/\/\/ incoming connection, and hand them to the provided handler.\npub struct Server<L = HttpListener> {\n pairs: Vec<(IpAddr, Port)>\n}\n\nmacro_rules! try_option(\n ($e:expr) => {{\n match $e {\n Some(v) => v,\n None => return None\n }\n }}\n)\n\nimpl Server<HttpListener> {\n \/\/\/ Creates a new server that will handle `HttpStream`s.\n pub fn http(ip: IpAddr, port: Port) -> Server {\n Server { pairs: vec![(ip, port)] }\n }\n\n \/\/\/ Creates a server that can listen to many (ip, port) pairs.\n pub fn many(pairs: Vec<(IpAddr, Port)>) -> Server {\n Server { pairs: pairs }\n }\n}\n\nimpl<L: NetworkListener<S, A>, S: NetworkStream, A: NetworkAcceptor<S>> Server<L> {\n \/\/\/ Binds to a socket, and starts handling connections.\n \/\/\/\n \/\/\/ This method has unbound type parameters, so can be used when you want to use\n \/\/\/ something other than the provided HttpStream, HttpAcceptor, and HttpListener.\n pub fn listen_network<H, S, A, L>(self, handler: H) -> HttpResult<Listening<A>>\n where H: Handler<A, S>,\n S: NetworkStream,\n A: NetworkAcceptor<S>,\n L: NetworkListener<S, A>, {\n let mut acceptors = Vec::new();\n let mut sockets = Vec::new();\n for (ip, port) in self.pairs.into_iter() {\n let mut listener: L = try_io!(NetworkListener::<S, A>::bind(ip.to_string().as_slice(), port));\n\n sockets.push(try_io!(listener.socket_name()));\n\n let acceptor = try_io!(listener.listen());\n acceptors.push(acceptor.clone());\n }\n\n let connections = acceptors.clone()\n .into_iter()\n .map(|acceptor| acceptor.move_incoming())\n .intertwine();\n\n spawn(proc() {\n handler.handle(Incoming { from: connections });\n });\n\n Ok(Listening {\n acceptors: acceptors,\n sockets: sockets,\n })\n }\n\n \/\/\/ Binds to a socket and starts handling connections.\n pub fn listen<H: Handler<HttpAcceptor, HttpStream>>(self, handler: H) -> HttpResult<Listening<HttpAcceptor>> {\n self.listen_network::<H, HttpStream, HttpAcceptor, HttpListener>(handler)\n }\n}\n\n\/\/\/ An iterator over incoming connections, represented as pairs of\n\/\/\/ hyper Requests and Responses.\npub struct Incoming<S: Send = HttpStream> {\n from: Intertwined<IoResult<S>>\n}\n\nimpl<S: NetworkStream + 'static> Iterator<(Request, Response<Fresh>)> for Incoming<S> {\n fn next(&mut self) -> Option<(Request, Response<Fresh>)> {\n for conn in self.from {\n match conn {\n Ok(stream) => {\n debug!(\"Incoming stream\");\n let clone = stream.clone();\n let req = match Request::new(stream) {\n Ok(r) => r,\n Err(err) => {\n error!(\"creating Request: {}\", err);\n continue;\n }\n };\n let mut res = Response::new(clone);\n res.version = req.version;\n return Some((req, res))\n },\n Err(ref e) if e.kind == EndOfFile => return None, \/\/ server closed\n Err(e) => {\n error!(\"Connection failed: {}\", e);\n continue;\n }\n }\n }\n None\n }\n}\n\n\/\/\/ A listening server, which can later be closed.\npub struct Listening<A = HttpAcceptor> {\n acceptors: Vec<A>,\n \/\/\/ The socket addresses that the server is bound to.\n pub sockets: Vec<SocketAddr>,\n}\n\nimpl<A: NetworkAcceptor<S>, S: NetworkStream> Listening<A> {\n \/\/\/ Stop the server from listening to all of its socket addresses.\n \/\/\/\n \/\/\/ If closing any of the servers acceptors fails, this function returns Err\n \/\/\/ and does not close the rest of the acceptors.\n pub fn close(&mut self) -> HttpResult<()> {\n debug!(\"closing server\");\n for acceptor in self.acceptors.iter_mut() {\n try_io!(acceptor.close());\n }\n Ok(())\n }\n}\n\n\/\/\/ A handler that can handle incoming requests for a server.\npub trait Handler<A: NetworkAcceptor<S>, S: NetworkStream>: Send {\n \/\/\/ Receives a `Request`\/`Response` pair, and should perform some action on them.\n \/\/\/\n \/\/\/ This could reading from the request, and writing to the response.\n fn handle(self, Incoming<S>);\n}\n\nimpl<A: NetworkAcceptor<S>, S: NetworkStream> Handler<A, S> for fn(Incoming<S>) {\n fn handle(self, incoming: Incoming<S>) {\n (self)(incoming)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implemented BubbleSort in Rust<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>A join based parallel version<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>debuginfo: Add a test case for issue #12886.<commit_after>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-android: FIXME(#10381)\n\n\/\/ compile-flags:-g\n\/\/ debugger:break issue12886.rs:29\n\/\/ debugger:run\n\/\/ debugger:next\n\/\/ check:[...]30[...]s\n\/\/ debugger:continue\n\n\/\/ IF YOU MODIFY THIS FILE, BE CAREFUL TO ADAPT THE LINE NUMBERS IN THE DEBUGGER COMMANDS\n\n\/\/ This test makes sure that gdb does not set unwanted breakpoints in inlined functions. If a\n\/\/ breakpoint existed in unwrap(), then calling `next` would (when stopped at line 27) would stop\n\/\/ in unwrap() instead of stepping over the function invocation. By making sure that `s` is\n\/\/ contained in the output, after calling `next` just once, we can be sure that we did not stop in\n\/\/ unwrap(). (The testing framework doesn't allow for checking that some text is *not* contained in\n\/\/ the output, which is why we have to make the test in this kind of roundabout way)\nfn bar() -> int {\n let s = Some(5).unwrap();\n s\n}\n\nfn main() {\n let _ = bar();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ error-pattern: mismatched types\n\nfn f(int x) {\n}\n\nfn main() {\n let () i;\n i = f();\n}\n<commit_msg>rustc: Fix error pattern in compile-fail\/arg-count-mismatch.rs<commit_after>\/\/ error-pattern: parameters were supplied\n\nfn f(int x) {\n}\n\nfn main() {\n let () i;\n i = f();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-test\n\npub fn main() {\n match 22i {\n 0 .. 3 => {} \/\/~ ERROR expected `=>`, found `..`\n _ => {}\n }\n}\n<commit_msg>Enable a test for .. in range patterns.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub fn main() {\n match 22i {\n 0 .. 3 => {} \/\/~ ERROR expected `=>`, found `..`\n _ => {}\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #20582. Fixes #20582.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for #20582. This test caused an ICE related to\n\/\/ inconsistent region erasure in trans.\n\nstruct Foo<'a> {\n buf: &'a[u8]\n}\n\nimpl<'a> Iterator for Foo<'a> {\n type Item = &'a[u8];\n\n fn next(&mut self) -> Option<<Self as Iterator>::Item> {\n Some(self.buf)\n }\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Optimize: Do not attempt to print if output is a pipe<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor imag-diary<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>that's not in stable<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>build.rs script<commit_after>use std::env;\nuse std::fs::File;\nuse std::io::Write;\nuse std::path::Path;\nuse std::process::Command;\n\nfn tags() -> String {\n let output = Command::new(\"git\").args(&[\"describe\", \"--abbrev=0\"]).output().expect(\"Failed to execute git describe\");\n String::from_utf8_lossy(&output.stdout).trim().to_string()\n}\n\nfn branch() -> String {\n let output = Command::new(\"git\").args(&[\"rev-parse\", \"--abbrev-ref\", \"HEAD\"]).output().expect(\"Failed to execute git rev-parse\");\n String::from_utf8_lossy(&output.stdout).trim().to_string()\n}\n\nfn date() -> String {\n let output = Command::new(\"date\").output().expect(\"Failed to execute date\");\n String::from_utf8_lossy(&output.stdout).trim().to_string()\n}\n\nfn commit_sha() -> String {\n let output = Command::new(\"git\").args(&[\"rev-parse\", \"HEAD\"]).output().expect(\"Failed to execute git rev-parse\");\n String::from_utf8_lossy(&output.stdout).trim().to_string()\n}\n\nfn local_changes() -> bool {\n let output = Command::new(\"git\").args(&[\"diff\", \"--exit-code\"]).output().expect(\"Failed to execute git diff\");\n !output.status.success()\n}\n\nfn kernel_version() -> String {\n let output = Command::new(\"uname\").arg(\"-r\").output().expect(\"Failed to execute uname\");\n String::from_utf8_lossy(&output.stdout).trim().to_string()\n}\n\n\n\nfn main() {\n let version = env!(\"CARGO_PKG_VERSION\");\n let tag = tags();\n let tag_message = if !tag.is_empty() { format!(\"Tag: {}\\n\", tag) } else { String::new() };\n let mut change_message = \"\";\n\n if local_changes() {\n change_message = \" with local changes\";\n }\n\n let version_info = format!(\"RustyVisor Version: {}\\nCommit: {}{}\\n{}Branch: {}\\nBuilt on: {}\\nKernel version: {}\", version, commit_sha(), change_message, tag_message, branch(), date(), kernel_version());\n\n let version_code = format!(\"const VERSION: &'static str = \\\"{}\\\";\", version_info);\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n let dest_path = Path::new(&out_dir).join(\"version.rs\");\n let mut f = File::create(&dest_path).unwrap();\n\n f.write_all(version_code.as_bytes()).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Zip integration tests<commit_after>\/\/ Copyright 2017 The Grin Developers\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nextern crate grin_util as util;\n\nuse std::fs::{self, File};\nuse std::path::Path;\nuse std::io::{self, Write};\nuse util::zip;\n\n#[test]\nfn zip_unzip() {\n\tlet root = Path::new(\".\/target\");\n\tlet zip_name = \".\/target\/zipped.zip\";\n\n\tfs::create_dir_all(root.join(\".\/to_zip\/sub\")).unwrap();\n\twrite_files(&root).unwrap();\n\n\tlet zip_file = File::create(zip_name).unwrap();\n\tzip::compress(&root.join(\".\/to_zip\"), &zip_file).unwrap();\n\tzip_file.sync_all();\n\t\n\tlet zip_path = Path::new(zip_name);\n\tassert!(zip_path.exists());\n\tassert!(zip_path.is_file());\n\tassert!(zip_path.metadata().unwrap().len() > 300);\n\t\n\tfs::create_dir_all(root.join(\".\/dezipped\")).unwrap();\n\tlet zip_file = File::open(zip_name).unwrap();\n\tzip::decompress(zip_file, &root.join(\".\/dezipped\")).unwrap();\n\n\tassert!(root.join(\"to_zip\/foo.txt\").is_file());\n\tassert!(root.join(\"to_zip\/bar.txt\").is_file());\n\tassert!(root.join(\"to_zip\/sub\").is_dir());\n\tlet lorem = root.join(\"to_zip\/sub\/lorem\");\n\tassert!(lorem.is_file());\n\tassert!(lorem.metadata().unwrap().len() == 55);\n}\n\nfn write_files(root: &Path) -> io::Result<()> {\n\tlet mut file = File::create(root.join(\"to_zip\/foo.txt\"))?;\n\tfile.write_all(b\"Hello, world!\")?;\n\tlet mut file = File::create(root.join(\"to_zip\/bar.txt\"))?;\n\tfile.write_all(b\"Goodbye, world!\")?;\n\tlet mut file = File::create(root.join(\"to_zip\/sub\/lorem\"))?;\n\tfile.write_all(b\"Lorem ipsum dolor sit amet, consectetur adipiscing elit\")?;\n\tOk(())\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::env;\nuse std::process::Command;\nuse std::path::{Path, PathBuf};\nuse std::fs::File;\nuse std::io::Write;\n\nstruct Test {\n repo: &'static str,\n name: &'static str,\n sha: &'static str,\n lock: Option<&'static str>,\n}\n\nconst TEST_REPOS: &'static [Test] = &[\n Test {\n name: \"cargo\",\n repo: \"https:\/\/github.com\/rust-lang\/cargo\",\n sha: \"7d79da08238e3d47e0bc4406155bdcc45ccb8c82\",\n lock: None,\n },\n Test {\n name: \"iron\",\n repo: \"https:\/\/github.com\/iron\/iron\",\n sha: \"16c858ec2901e2992fe5e529780f59fa8ed12903\",\n lock: Some(include_str!(\"lockfiles\/iron-Cargo.lock\")),\n },\n];\n\n\nfn main() {\n let args = env::args().collect::<Vec<_>>();\n let ref cargo = args[1];\n let out_dir = Path::new(&args[2]);\n let ref cargo = Path::new(cargo);\n\n for test in TEST_REPOS.iter().rev() {\n test_repo(cargo, out_dir, test);\n }\n}\n\nfn test_repo(cargo: &Path, out_dir: &Path, test: &Test) {\n println!(\"testing {}\", test.repo);\n let dir = clone_repo(test, out_dir);\n if let Some(lockfile) = test.lock {\n File::create(&dir.join(\"Cargo.lock\")).expect(\"\")\n .write_all(lockfile.as_bytes()).expect(\"\");\n }\n if !run_cargo_test(cargo, &dir) {\n panic!(\"tests failed for {}\", test.repo);\n }\n}\n\nfn clone_repo(test: &Test, out_dir: &Path) -> PathBuf {\n let out_dir = out_dir.join(test.name);\n\n if !out_dir.join(\".git\").is_dir() {\n let status = Command::new(\"git\")\n .arg(\"init\")\n .arg(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n \/\/ Try progressively deeper fetch depths to find the commit\n let mut found = false;\n for depth in &[0, 1, 10, 100, 1000, 100000] {\n if *depth > 0 {\n let status = Command::new(\"git\")\n .arg(\"fetch\")\n .arg(test.repo)\n .arg(\"master\")\n .arg(&format!(\"--depth={}\", depth))\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n let status = Command::new(\"git\")\n .arg(\"reset\")\n .arg(test.sha)\n .arg(\"--hard\")\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n\n if status.success() {\n found = true;\n break;\n }\n }\n\n if !found {\n panic!(\"unable to find commit {}\", test.sha)\n }\n let status = Command::new(\"git\")\n .arg(\"clean\")\n .arg(\"-fdx\")\n .current_dir(&out_dir)\n .status()\n .unwrap();\n assert!(status.success());\n\n out_dir\n}\n\nfn run_cargo_test(cargo_path: &Path, crate_path: &Path) -> bool {\n let status = Command::new(cargo_path)\n .arg(\"test\")\n \/\/ Disable rust-lang\/cargo's cross-compile tests\n .env(\"CFG_DISABLE_CROSS_TESTS\", \"1\")\n .current_dir(crate_path)\n .status()\n .expect(\"\");\n\n status.success()\n}\n<commit_msg>run rustfmt on cargotest folder in src\/tools\/cargotest<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::env;\nuse std::process::Command;\nuse std::path::{Path, PathBuf};\nuse std::fs::File;\nuse std::io::Write;\n\nstruct Test {\n repo: &'static str,\n name: &'static str,\n sha: &'static str,\n lock: Option<&'static str>,\n}\n\nconst TEST_REPOS: &'static [Test] = &[Test {\n name: \"cargo\",\n repo: \"https:\/\/github.com\/rust-lang\/cargo\",\n sha: \"7d79da08238e3d47e0bc4406155bdcc45ccb8c82\",\n lock: None,\n },\n Test {\n name: \"iron\",\n repo: \"https:\/\/github.com\/iron\/iron\",\n sha: \"16c858ec2901e2992fe5e529780f59fa8ed12903\",\n lock: Some(include_str!(\"lockfiles\/iron-Cargo.lock\")),\n }];\n\n\nfn main() {\n let args = env::args().collect::<Vec<_>>();\n let ref cargo = args[1];\n let out_dir = Path::new(&args[2]);\n let ref cargo = Path::new(cargo);\n\n for test in TEST_REPOS.iter().rev() {\n test_repo(cargo, out_dir, test);\n }\n}\n\nfn test_repo(cargo: &Path, out_dir: &Path, test: &Test) {\n println!(\"testing {}\", test.repo);\n let dir = clone_repo(test, out_dir);\n if let Some(lockfile) = test.lock {\n File::create(&dir.join(\"Cargo.lock\"))\n .expect(\"\")\n .write_all(lockfile.as_bytes())\n .expect(\"\");\n }\n if !run_cargo_test(cargo, &dir) {\n panic!(\"tests failed for {}\", test.repo);\n }\n}\n\nfn clone_repo(test: &Test, out_dir: &Path) -> PathBuf {\n let out_dir = out_dir.join(test.name);\n\n if !out_dir.join(\".git\").is_dir() {\n let status = Command::new(\"git\")\n .arg(\"init\")\n .arg(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n \/\/ Try progressively deeper fetch depths to find the commit\n let mut found = false;\n for depth in &[0, 1, 10, 100, 1000, 100000] {\n if *depth > 0 {\n let status = Command::new(\"git\")\n .arg(\"fetch\")\n .arg(test.repo)\n .arg(\"master\")\n .arg(&format!(\"--depth={}\", depth))\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n let status = Command::new(\"git\")\n .arg(\"reset\")\n .arg(test.sha)\n .arg(\"--hard\")\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n\n if status.success() {\n found = true;\n break;\n }\n }\n\n if !found {\n panic!(\"unable to find commit {}\", test.sha)\n }\n let status = Command::new(\"git\")\n .arg(\"clean\")\n .arg(\"-fdx\")\n .current_dir(&out_dir)\n .status()\n .unwrap();\n assert!(status.success());\n\n out_dir\n}\n\nfn run_cargo_test(cargo_path: &Path, crate_path: &Path) -> bool {\n let status = Command::new(cargo_path)\n .arg(\"test\")\n \/\/ Disable rust-lang\/cargo's cross-compile tests\n .env(\"CFG_DISABLE_CROSS_TESTS\", \"1\")\n .current_dir(crate_path)\n .status()\n .expect(\"\");\n\n status.success()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added Venue object<commit_after>use objects::Location;\n\n\/\/\/ Represents a venue\n#[derive(Clone, Serialize, Deserialize, Debug)]\npub struct Venue {\n pub location: Location,\n pub title: String,\n pub address: String,\n pub foursquare_id: Option<String>,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[Move stdlib] add basic test to ensure that generated files are up to date<commit_after>\/\/ Copyright (c) The Libra Core Contributors\n\/\/ SPDX-License-Identifier: Apache-2.0\n\nuse std::process::Command;\n\nfn assert_that_version_control_has_no_unstaged_changes() {\n let output = Command::new(\"git\")\n .arg(\"status\")\n .arg(\"--porcelain\")\n .output()\n .unwrap();\n assert!(\n output.stdout.is_empty(),\n \"Git repository should be in a clean state\"\n );\n assert!(output.status.success());\n}\n\n#[test]\nfn test_that_generated_file_are_up_to_date_in_git() {\n \/\/ Better not run the `stdlib` tool when the repository is not in a clean state.\n assert_that_version_control_has_no_unstaged_changes();\n\n assert!(Command::new(\"cargo\")\n .arg(\"run\")\n .arg(\"--release\")\n .arg(\"-p\")\n .arg(\"stdlib\")\n .status()\n .unwrap()\n .success());\n\n \/\/ Running the stdlib tool should not create unstaged changes.\n assert_that_version_control_has_no_unstaged_changes();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ -*- rust -*-\n\n#[doc = \"Classic Boolean logic reified as ADT\"];\n\nexport t;\nexport not, and, or, xor, implies;\nexport eq, ne, is_true, is_false;\nexport from_str, to_str, all_values, to_bit;\n\n#[doc = \"The type of boolean logic values\"]\ntype t = bool;\n\n#[doc(\n brief = \"Negation\/Inverse\",\n args(v = \"Value to Negate\/Invert\"),\n return = \"Negated\/Inverted Value\"\n)]\npure fn not(v: t) -> t { !v }\n\n#[doc(\n brief = \"Conjunction\",\n args(a = \"value `a`\",\n b = \"value `b`\"),\n return = \"`a` AND `b`\"\n)]\npure fn and(a: t, b: t) -> t { a && b }\n\n#[doc(\n brief = \"Disjunction\",\n args(a = \"value `a`\",\n b = \"value `b`\"),\n return = \"`a` OR `b`\"\n)]\npure fn or(a: t, b: t) -> t { a || b }\n\n#[doc(\n brief = \"Exclusive or, i.e. `or(and(a, not(b)), and(not(a), b))`\",\n args(a = \"value `a`\",\n b = \"value `b`\"),\n return = \"`a` XOR `b`\"\n)]\npure fn xor(a: t, b: t) -> t { (a && !b) || (!a && b) }\n\n#[doc(\n brief = \"Implication in the logic, i.e. from `a` follows `b`\",\n args(a = \"value `a`\",\n b = \"value `b`\"),\n return = \"`a` IMPLIES `b`\"\n)]\npure fn implies(a: t, b: t) -> t { !a || b }\n\n#[doc(\n brief = \"true if truth values `a` and `b` are indistinguishable in the logic\",\n args(a = \"value `a`\",\n b = \"value `b`\"),\n return = \"`a` == `b`\"\n)]\npure fn eq(a: t, b: t) -> bool { a == b }\n\n#[doc(\n brief = \"true if truth values `a` and `b` are distinguishable in the logic\",\n args(a = \"value `a`\",\n b = \"value `b`\"),\n return = \"`a` != `b`\"\n)]\npure fn ne(a: t, b: t) -> bool { a != b }\n\n#[doc(\n brief = \"true if `v` represents truth in the logic\",\n args(v = \"value `v`\"),\n return = \"bool(`v`)\"\n)]\npure fn is_true(v: t) -> bool { v }\n\n#[doc(\n brief = \"true if `v` represents falsehood in the logic\",\n args(v = \"value `v`\"),\n return = \"bool(!`v`)\"\n)]\npure fn is_false(v: t) -> bool { !v }\n\n#[doc(\n brief = \"Parse logic value from `s`\",\n args(v = \"string value `s`\"),\n return = \"true if `s` equals \\\"true\\\", else false\"\n)]\npure fn from_str(s: str) -> t {\n alt s {\n \"true\" { true }\n \"false\" { false }\n }\n}\n\n#[doc(\n brief = \"Convert `v` into a string\",\n args(v = \"truth value `v`\"),\n return = \"\\\"true\\\" if value `v` is true, else \\\"false\\\"\"\n)]\npure fn to_str(v: t) -> str { if v { \"true\" } else { \"false\" } }\n\n#[doc(\n brief = \"Iterates over all truth values by passing them to `blk` in an unspecified order\",\n args(v = \"block value `v`\"),\n return = \"Undefined return value\"\n)]\nfn all_values(blk: block(v: t)) {\n blk(true);\n blk(false);\n}\n\n#[doc(\n brief = \"converts truth value to an 8 bit byte\",\n args(v = \"value `v`\"),\n return = \"returns byte with value 1 if `v` has truth value of true, else 0\"\n)]\npure fn to_bit(v: t) -> u8 { if v { 1u8 } else { 0u8 } }\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n<commit_msg>make rustdocs more terse for bool.rs where it is obvious to programmers as per feedback from graydon<commit_after>\/\/ -*- rust -*-\n\n#[doc = \"Classic Boolean logic reified as ADT\"];\n\nexport t;\nexport not, and, or, xor, implies;\nexport eq, ne, is_true, is_false;\nexport from_str, to_str, all_values, to_bit;\n\n#[doc = \"The type of boolean logic values\"]\ntype t = bool;\n\n#[doc(\n brief = \"Negation\/Inverse\"\n)]\npure fn not(v: t) -> t { !v }\n\n#[doc(\n brief = \"Conjunction\"\n)]\npure fn and(a: t, b: t) -> t { a && b }\n\n#[doc(\n brief = \"Disjunction\"\n)]\npure fn or(a: t, b: t) -> t { a || b }\n\n#[doc(\n brief = \"Exclusive or, i.e. `or(and(a, not(b)), and(not(a), b))`\"\n)]\npure fn xor(a: t, b: t) -> t { (a && !b) || (!a && b) }\n\n#[doc(\n brief = \"Implication in the logic, i.e. from `a` follows `b`\"\n)]\npure fn implies(a: t, b: t) -> t { !a || b }\n\n#[doc(\n brief = \"true if truth values `a` and `b` are indistinguishable in the logic\"\n)]\npure fn eq(a: t, b: t) -> bool { a == b }\n\n#[doc(\n brief = \"true if truth values `a` and `b` are distinguishable in the logic\"\n)]\npure fn ne(a: t, b: t) -> bool { a != b }\n\n#[doc(\n brief = \"true if `v` represents truth in the logic\"\n)]\npure fn is_true(v: t) -> bool { v }\n\n#[doc(\n brief = \"true if `v` represents falsehood in the logic\"\n)]\npure fn is_false(v: t) -> bool { !v }\n\n#[doc(\n brief = \"Parse logic value from `s`\"\n)]\npure fn from_str(s: str) -> t {\n alt s {\n \"true\" { true }\n \"false\" { false }\n }\n}\n\n#[doc(\n brief = \"Convert `v` into a string\"\n)]\npure fn to_str(v: t) -> str { if v { \"true\" } else { \"false\" } }\n\n#[doc(\n brief = \"Iterates over all truth values by passing them to `blk` in an unspecified order\"\n)]\nfn all_values(blk: block(v: t)) {\n blk(true);\n blk(false);\n}\n\n#[doc(\n brief = \"converts truth value to an 8 bit byte\"\n)]\npure fn to_bit(v: t) -> u8 { if v { 1u8 } else { 0u8 } }\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n<|endoftext|>"} {"text":"<commit_before>\/\/ Dynamic arenas.\n\n\/\/ Arenas are used to quickly allocate objects that share a\n\/\/ lifetime. The arena uses ~[u8] vectors as a backing store to\n\/\/ allocate objects from. For each allocated object, the arena stores\n\/\/ a pointer to the type descriptor followed by the\n\/\/ object. (Potentially with alignment padding after each of them.)\n\/\/ When the arena is destroyed, it iterates through all of its chunks,\n\/\/ and uses the tydesc information to trace through the objects,\n\/\/ calling the destructors on them.\n\/\/ One subtle point that needs to be addressed is how to handle\n\/\/ failures while running the user provided initializer function. It\n\/\/ is important to not run the destructor on uninitalized objects, but\n\/\/ how to detect them is somewhat subtle. Since alloc() can be invoked\n\/\/ recursively, it is not sufficient to simply exclude the most recent\n\/\/ object. To solve this without requiring extra space, we use the low\n\/\/ order bit of the tydesc pointer to encode whether the object it\n\/\/ describes has been fully initialized.\n\n\/\/ A good extension of this scheme would be to segregate data with and\n\/\/ without destructors in order to avoid the overhead in the\n\/\/ plain-old-data case.\n\nexport arena, arena_with_size;\n\nimport list;\nimport list::{list, cons, nil};\nimport unsafe::reinterpret_cast;\nimport sys::TypeDesc;\nimport libc::size_t;\n\n#[abi = \"rust-intrinsic\"]\nextern mod rusti {\n fn move_val_init<T>(&dst: T, -src: T);\n}\nextern mod rustrt {\n #[rust_stack]\n fn rust_call_tydesc_glue(root: *u8, tydesc: *TypeDesc, field: size_t);\n}\n\/\/ This probably belongs somewhere else. Needs to be kept in sync with\n\/\/ changes to glue...\nconst tydesc_drop_glue_index: size_t = 3 as size_t;\n\n\/\/ The way arena uses arrays is really deeply awful. The arrays are\n\/\/ allocated, and have capacities reserved, but the fill for the array\n\/\/ will always stay at 0.\ntype chunk = {data: ~[u8], mut fill: uint};\n\nstruct arena {\n \/\/ The head is seperated out from the list as a unbenchmarked\n \/\/ microoptimization, to avoid needing to case on the list to\n \/\/ access the head.\n priv mut head: @chunk;\n priv mut chunks: @list<@chunk>;\n drop {\n unsafe {\n destroy_chunk(self.head);\n for list::each(self.chunks) |chunk| { destroy_chunk(chunk); }\n }\n }\n}\n\nfn chunk(size: uint) -> @chunk {\n let mut v = ~[];\n vec::reserve(v, size);\n @{ data: v, mut fill: 0u }\n}\n\nfn arena_with_size(initial_size: uint) -> arena {\n return arena {mut head: chunk(initial_size),\n mut chunks: @nil};\n}\n\nfn arena() -> arena {\n arena_with_size(32u)\n}\n\n#[inline(always)]\nfn round_up_to(base: uint, align: uint) -> uint {\n (base + (align - 1)) & !(align - 1)\n}\n\n\/\/ Walk down a chunk, running the destructors for any objects stored\n\/\/ in it.\nunsafe fn destroy_chunk(chunk: @chunk) {\n let mut idx = 0;\n let buf = vec::unsafe::to_ptr(chunk.data);\n let fill = chunk.fill;\n\n while idx < fill {\n let tydesc_data: *uint = reinterpret_cast(ptr::offset(buf, idx));\n let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);\n let size = (*tydesc).size, align = (*tydesc).align;\n\n let after_tydesc = idx + sys::size_of::<*TypeDesc>();\n\n let start = round_up_to(after_tydesc, align);\n\n \/\/debug!(\"freeing object: idx = %u, size = %u, align = %u, done = %b\",\n \/\/ start, size, align, is_done);\n if is_done {\n rustrt::rust_call_tydesc_glue(\n ptr::offset(buf, start), tydesc, tydesc_drop_glue_index);\n }\n\n \/\/ Find where the next tydesc lives\n idx = round_up_to(start + size, sys::pref_align_of::<*TypeDesc>());\n }\n}\n\n\/\/ We encode whether the object a tydesc describes has been\n\/\/ initialized in the arena in the low bit of the tydesc pointer. This\n\/\/ is necessary in order to properly do cleanup if a failure occurs\n\/\/ during an initializer.\n#[inline(always)]\nunsafe fn bitpack_tydesc_ptr(p: *TypeDesc, is_done: bool) -> uint {\n let p_bits: uint = reinterpret_cast(p);\n p_bits | (is_done as uint)\n}\n#[inline(always)]\nunsafe fn un_bitpack_tydesc_ptr(p: uint) -> (*TypeDesc, bool) {\n (reinterpret_cast(p & !1), p & 1 == 1)\n}\n\n\nimpl &arena {\n fn alloc_grow(n_bytes: uint, align: uint) -> (*u8, *u8) {\n \/\/ Allocate a new chunk.\n let chunk_size = vec::capacity(self.head.data);\n let new_min_chunk_size = uint::max(n_bytes, chunk_size);\n self.chunks = @cons(self.head, self.chunks);\n self.head = chunk(uint::next_power_of_two(new_min_chunk_size + 1u));\n\n return self.alloc_inner(n_bytes, align);\n }\n\n #[inline(always)]\n fn alloc_inner(n_bytes: uint, align: uint) -> (*u8, *u8) {\n let head = self.head;\n\n let after_tydesc = head.fill + sys::size_of::<*TypeDesc>();\n\n let start = round_up_to(after_tydesc, align);\n let end = start + n_bytes;\n if end > vec::capacity(head.data) {\n return self.alloc_grow(n_bytes, align);\n }\n\n \/\/debug!(\"idx = %u, size = %u, align = %u, fill = %u\",\n \/\/ start, n_bytes, align, head.fill);\n\n unsafe {\n let buf = vec::unsafe::to_ptr(head.data);\n let tydesc_p = ptr::offset(buf, head.fill);\n let p = ptr::offset(buf, start);\n head.fill = round_up_to(end, sys::pref_align_of::<*TypeDesc>());\n\n return (tydesc_p, p);\n }\n }\n\n #[inline(always)]\n fn alloc<T>(op: fn() -> T) -> &self\/T {\n unsafe {\n let tydesc = sys::get_type_desc::<T>();\n let (ty_ptr, ptr) =\n self.alloc_inner((*tydesc).size, (*tydesc).align);\n let ty_ptr: *mut uint = reinterpret_cast(ty_ptr);\n let ptr: *mut T = reinterpret_cast(ptr);\n \/\/ Write in our tydesc along with a bit indicating that it\n \/\/ has *not* been initialized yet.\n *ty_ptr = reinterpret_cast(tydesc);\n \/\/ Actually initialize it\n rusti::move_val_init(*ptr, op());\n \/\/ Now that we are done, update the tydesc to indicate that\n \/\/ the object is there.\n *ty_ptr = bitpack_tydesc_ptr(tydesc, true);\n\n return reinterpret_cast(ptr);\n }\n }\n}\n\n#[test]\nfn test_arena_destructors() {\n let arena = arena::arena();\n for uint::range(0, 10) |i| {\n \/\/ Arena allocate something with drop glue to make sure it\n \/\/ doesn't leak.\n do arena.alloc { @i };\n \/\/ Allocate something with funny size and alignment, to keep\n \/\/ things interesting.\n do arena.alloc { [0u8, 1u8, 2u8]\/3 };\n }\n}\n\n#[test]\n#[should_fail]\nfn test_arena_destructors_fail() {\n let arena = arena::arena();\n \/\/ Put some stuff in the arena.\n for uint::range(0, 10) |i| {\n \/\/ Arena allocate something with drop glue to make sure it\n \/\/ doesn't leak.\n do arena.alloc { @i };\n \/\/ Allocate something with funny size and alignment, to keep\n \/\/ things interesting.\n do arena.alloc { [0u8, 1u8, 2u8]\/3 };\n }\n \/\/ Now, fail while allocating\n do arena.alloc::<@int> {\n \/\/ First, recursively allocate something else; that needs to\n \/\/ get freed too.\n do arena.alloc { @20 };\n \/\/ Now fail.\n fail;\n };\n}\n<commit_msg>Have std::arena segregate POD data and non-POD data into different chunks.<commit_after>\/\/ Dynamic arenas.\n\n\/\/ Arenas are used to quickly allocate objects that share a\n\/\/ lifetime. The arena uses ~[u8] vectors as a backing store to\n\/\/ allocate objects from. For each allocated object, the arena stores\n\/\/ a pointer to the type descriptor followed by the\n\/\/ object. (Potentially with alignment padding after each of them.)\n\/\/ When the arena is destroyed, it iterates through all of its chunks,\n\/\/ and uses the tydesc information to trace through the objects,\n\/\/ calling the destructors on them.\n\/\/ One subtle point that needs to be addressed is how to handle\n\/\/ failures while running the user provided initializer function. It\n\/\/ is important to not run the destructor on uninitalized objects, but\n\/\/ how to detect them is somewhat subtle. Since alloc() can be invoked\n\/\/ recursively, it is not sufficient to simply exclude the most recent\n\/\/ object. To solve this without requiring extra space, we use the low\n\/\/ order bit of the tydesc pointer to encode whether the object it\n\/\/ describes has been fully initialized.\n\n\/\/ As an optimization, objects with destructors are stored in\n\/\/ different chunks than objects without destructors. This reduces\n\/\/ overhead when initializing plain-old-data and means we don't need\n\/\/ to waste time running the destructors of POD.\n\nexport arena, arena_with_size;\n\nimport list;\nimport list::{list, cons, nil};\nimport unsafe::reinterpret_cast;\nimport sys::TypeDesc;\nimport libc::size_t;\n\n#[abi = \"rust-intrinsic\"]\nextern mod rusti {\n fn move_val_init<T>(&dst: T, -src: T);\n fn needs_drop<T>() -> bool;\n}\nextern mod rustrt {\n #[rust_stack]\n fn rust_call_tydesc_glue(root: *u8, tydesc: *TypeDesc, field: size_t);\n}\n\/\/ This probably belongs somewhere else. Needs to be kept in sync with\n\/\/ changes to glue...\nconst tydesc_drop_glue_index: size_t = 3 as size_t;\n\n\/\/ The way arena uses arrays is really deeply awful. The arrays are\n\/\/ allocated, and have capacities reserved, but the fill for the array\n\/\/ will always stay at 0.\ntype chunk = {data: ~[u8], mut fill: uint, is_pod: bool};\n\nstruct arena {\n \/\/ The head is seperated out from the list as a unbenchmarked\n \/\/ microoptimization, to avoid needing to case on the list to\n \/\/ access the head.\n priv mut head: @chunk;\n priv mut pod_head: @chunk;\n priv mut chunks: @list<@chunk>;\n drop {\n unsafe {\n destroy_chunk(self.head);\n for list::each(self.chunks) |chunk| {\n if !chunk.is_pod { destroy_chunk(chunk); }\n }\n }\n }\n}\n\nfn chunk(size: uint, is_pod: bool) -> @chunk {\n let mut v = ~[];\n vec::reserve(v, size);\n @{ data: v, mut fill: 0u, is_pod: is_pod }\n}\n\nfn arena_with_size(initial_size: uint) -> arena {\n return arena {mut head: chunk(initial_size, false),\n mut pod_head: chunk(initial_size, true),\n mut chunks: @nil};\n}\n\nfn arena() -> arena {\n arena_with_size(32u)\n}\n\n#[inline(always)]\nfn round_up_to(base: uint, align: uint) -> uint {\n (base + (align - 1)) & !(align - 1)\n}\n\n\/\/ Walk down a chunk, running the destructors for any objects stored\n\/\/ in it.\nunsafe fn destroy_chunk(chunk: @chunk) {\n let mut idx = 0;\n let buf = vec::unsafe::to_ptr(chunk.data);\n let fill = chunk.fill;\n\n while idx < fill {\n let tydesc_data: *uint = reinterpret_cast(ptr::offset(buf, idx));\n let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);\n let size = (*tydesc).size, align = (*tydesc).align;\n\n let after_tydesc = idx + sys::size_of::<*TypeDesc>();\n\n let start = round_up_to(after_tydesc, align);\n\n \/\/debug!(\"freeing object: idx = %u, size = %u, align = %u, done = %b\",\n \/\/ start, size, align, is_done);\n if is_done {\n rustrt::rust_call_tydesc_glue(\n ptr::offset(buf, start), tydesc, tydesc_drop_glue_index);\n }\n\n \/\/ Find where the next tydesc lives\n idx = round_up_to(start + size, sys::pref_align_of::<*TypeDesc>());\n }\n}\n\n\/\/ We encode whether the object a tydesc describes has been\n\/\/ initialized in the arena in the low bit of the tydesc pointer. This\n\/\/ is necessary in order to properly do cleanup if a failure occurs\n\/\/ during an initializer.\n#[inline(always)]\nunsafe fn bitpack_tydesc_ptr(p: *TypeDesc, is_done: bool) -> uint {\n let p_bits: uint = reinterpret_cast(p);\n p_bits | (is_done as uint)\n}\n#[inline(always)]\nunsafe fn un_bitpack_tydesc_ptr(p: uint) -> (*TypeDesc, bool) {\n (reinterpret_cast(p & !1), p & 1 == 1)\n}\n\n\/\/ The duplication between the POD and non-POD functions is annoying.\nimpl &arena {\n \/\/ Functions for the POD part of the arena\n fn alloc_pod_grow(n_bytes: uint, align: uint) -> *u8 {\n \/\/ Allocate a new chunk.\n let chunk_size = vec::capacity(self.pod_head.data);\n let new_min_chunk_size = uint::max(n_bytes, chunk_size);\n self.chunks = @cons(self.pod_head, self.chunks);\n self.pod_head =\n chunk(uint::next_power_of_two(new_min_chunk_size + 1u), true);\n\n return self.alloc_pod_inner(n_bytes, align);\n }\n\n #[inline(always)]\n fn alloc_pod_inner(n_bytes: uint, align: uint) -> *u8 {\n let head = self.pod_head;\n\n let start = round_up_to(head.fill, align);\n let end = start + n_bytes;\n if end > vec::capacity(head.data) {\n return self.alloc_pod_grow(n_bytes, align);\n }\n head.fill = end;\n\n \/\/debug!(\"idx = %u, size = %u, align = %u, fill = %u\",\n \/\/ start, n_bytes, align, head.fill);\n\n unsafe {\n ptr::offset(vec::unsafe::to_ptr(head.data), start)\n }\n }\n\n #[inline(always)]\n fn alloc_pod<T>(op: fn() -> T) -> &self\/T {\n unsafe {\n let tydesc = sys::get_type_desc::<T>();\n let ptr = self.alloc_pod_inner((*tydesc).size, (*tydesc).align);\n let ptr: *mut T = reinterpret_cast(ptr);\n rusti::move_val_init(*ptr, op());\n return reinterpret_cast(ptr);\n }\n }\n\n \/\/ Functions for the non-POD part of the arena\n fn alloc_nonpod_grow(n_bytes: uint, align: uint) -> (*u8, *u8) {\n \/\/ Allocate a new chunk.\n let chunk_size = vec::capacity(self.head.data);\n let new_min_chunk_size = uint::max(n_bytes, chunk_size);\n self.chunks = @cons(self.head, self.chunks);\n self.head =\n chunk(uint::next_power_of_two(new_min_chunk_size + 1u), false);\n\n return self.alloc_nonpod_inner(n_bytes, align);\n }\n\n #[inline(always)]\n fn alloc_nonpod_inner(n_bytes: uint, align: uint) -> (*u8, *u8) {\n let head = self.head;\n\n let tydesc_start = head.fill;\n let after_tydesc = head.fill + sys::size_of::<*TypeDesc>();\n let start = round_up_to(after_tydesc, align);\n let end = start + n_bytes;\n if end > vec::capacity(head.data) {\n return self.alloc_nonpod_grow(n_bytes, align);\n }\n head.fill = round_up_to(end, sys::pref_align_of::<*TypeDesc>());\n\n \/\/debug!(\"idx = %u, size = %u, align = %u, fill = %u\",\n \/\/ start, n_bytes, align, head.fill);\n\n unsafe {\n let buf = vec::unsafe::to_ptr(head.data);\n return (ptr::offset(buf, tydesc_start), ptr::offset(buf, start));\n }\n }\n\n #[inline(always)]\n fn alloc_nonpod<T>(op: fn() -> T) -> &self\/T {\n unsafe {\n let tydesc = sys::get_type_desc::<T>();\n let (ty_ptr, ptr) =\n self.alloc_nonpod_inner((*tydesc).size, (*tydesc).align);\n let ty_ptr: *mut uint = reinterpret_cast(ty_ptr);\n let ptr: *mut T = reinterpret_cast(ptr);\n \/\/ Write in our tydesc along with a bit indicating that it\n \/\/ has *not* been initialized yet.\n *ty_ptr = reinterpret_cast(tydesc);\n \/\/ Actually initialize it\n rusti::move_val_init(*ptr, op());\n \/\/ Now that we are done, update the tydesc to indicate that\n \/\/ the object is there.\n *ty_ptr = bitpack_tydesc_ptr(tydesc, true);\n\n return reinterpret_cast(ptr);\n }\n }\n\n \/\/ The external interface\n #[inline(always)]\n fn alloc<T>(op: fn() -> T) -> &self\/T {\n if !rusti::needs_drop::<T>() {\n self.alloc_pod(op)\n } else { self.alloc_nonpod(op) }\n }\n}\n\n#[test]\nfn test_arena_destructors() {\n let arena = arena::arena();\n for uint::range(0, 10) |i| {\n \/\/ Arena allocate something with drop glue to make sure it\n \/\/ doesn't leak.\n do arena.alloc { @i };\n \/\/ Allocate something with funny size and alignment, to keep\n \/\/ things interesting.\n do arena.alloc { [0u8, 1u8, 2u8]\/3 };\n }\n}\n\n#[test]\n#[should_fail]\nfn test_arena_destructors_fail() {\n let arena = arena::arena();\n \/\/ Put some stuff in the arena.\n for uint::range(0, 10) |i| {\n \/\/ Arena allocate something with drop glue to make sure it\n \/\/ doesn't leak.\n do arena.alloc { @i };\n \/\/ Allocate something with funny size and alignment, to keep\n \/\/ things interesting.\n do arena.alloc { [0u8, 1u8, 2u8]\/3 };\n }\n \/\/ Now, fail while allocating\n do arena.alloc::<@int> {\n \/\/ First, recursively allocate something else; that needs to\n \/\/ get freed too.\n do arena.alloc { @20 };\n \/\/ Now fail.\n fail;\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>generic load hardening test<commit_after>\/\/ Test LVI load hardening on SGX enclave code\n\n\/\/ assembly-output: emit-asm\n\/\/ compile-flags: --crate-type staticlib\n\/\/ only-x86_64-fortanix-unknown-sgx\n\n#[no_mangle]\npub extern fn plus_one(r: &mut u64) {\n *r = *r + 1;\n}\n\n\/\/ CHECK: plus_one\n\/\/ CHECK: lfence\n\/\/ CHECK-NEXT: addq\n\/\/ CHECK: popq [[REGISTER:%[a-z]+]]\n\/\/ CHECK-NEXT: lfence\n\/\/ CHECK-NEXT: jmpq *[[REGISTER]]\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#![feature(plugin, custom_attribute)]\n#![plugin(gfx_macros)]\n\nextern crate cgmath;\nextern crate gfx;\nextern crate gfx_window_glutin;\nextern crate glutin;\nextern crate time;\nextern crate rand;\nextern crate genmesh;\nextern crate noise;\n\nuse std::fmt;\nuse rand::Rng;\nuse cgmath::FixedArray;\nuse cgmath::{Matrix4, Point3, Vector3};\nuse cgmath::{Transform, AffineMatrix3};\nuse gfx::traits::*;\nuse genmesh::{Vertices, Triangulate};\nuse genmesh::generators::{Plane, SharedVertex, IndexedPolygon};\nuse time::precise_time_s;\n\nuse noise::{Seed, perlin2};\n\n#[vertex_format]\n#[derive(Clone, Copy)]\nstruct Vertex {\n #[name = \"a_Pos\"]\n pos: [f32; 3],\n\n #[name = \"a_Color\"]\n color: [f32; 3],\n}\n\nimpl fmt::Debug for Vertex {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"Pos({}, {}, {})\", self.pos[0], self.pos[1], self.pos[2])\n }\n}\n\n\/\/ The shader_param attribute makes sure the following struct can be used to\n\/\/ pass parameters to a shader.\n#[shader_param]\nstruct Params<R: gfx::Resources> {\n #[name = \"u_Model\"]\n model: [[f32; 4]; 4],\n\n #[name = \"u_View\"]\n view: [[f32; 4]; 4],\n\n #[name = \"u_Proj\"]\n proj: [[f32; 4]; 4],\n\n _dummy: std::marker::PhantomData<R>,\n}\n\nfn calculate_color(height: f32) -> [f32; 3] {\n if height > 8.0 {\n [0.9, 0.9, 0.9] \/\/ white\n } else if height > 0.0 {\n [0.7, 0.7, 0.7] \/\/ greay\n } else if height > -5.0 {\n [0.2, 0.7, 0.2] \/\/ green\n } else {\n [0.2, 0.2, 0.7] \/\/ blue\n }\n}\n\npub fn main() {\n let mut canvas = gfx_window_glutin::init(glutin::Window::new().unwrap())\n .into_canvas();\n canvas.output.window.set_title(\"Terrain example\");\n\n let rand_seed = rand::thread_rng().gen();\n let seed = Seed::new(rand_seed);\n let plane = Plane::subdivide(256, 256);\n let vertex_data: Vec<Vertex> = plane.shared_vertex_iter()\n .map(|(x, y)| {\n let h = perlin2(&seed, &[x, y]) * 32.0;\n Vertex {\n pos: [25.0 * x, 25.0 * y, h],\n color: calculate_color(h),\n }\n })\n .collect();\n\n let index_data: Vec<u32> = plane.indexed_polygon_iter()\n .triangulate()\n .vertices()\n .map(|i| i as u32)\n .collect();\n\n let slice = canvas.factory\n .create_buffer_index::<u32>(&index_data)\n .to_slice(gfx::PrimitiveType::TriangleList);\n\n let mesh = canvas.factory.create_mesh(&vertex_data);\n let program = {\n let vs = gfx::ShaderSource {\n glsl_120: Some(include_bytes!(\"terrain_120.glslv\")),\n glsl_150: Some(include_bytes!(\"terrain_150.glslv\")),\n .. gfx::ShaderSource::empty()\n };\n let fs = gfx::ShaderSource {\n glsl_120: Some(include_bytes!(\"terrain_120.glslf\")),\n glsl_150: Some(include_bytes!(\"terrain_150.glslf\")),\n .. gfx::ShaderSource::empty()\n };\n canvas.factory.link_program_source(vs, fs, &canvas.device.get_capabilities())\n .unwrap()\n };\n\n let state = gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true);\n\n let data = Params {\n model: Matrix4::identity().into_fixed(),\n view: Matrix4::identity().into_fixed(),\n proj: cgmath::perspective(cgmath::deg(60.0f32), \n canvas.get_aspect_ratio(),\n 0.1, 1000.0\n ).into_fixed(),\n _dummy: std::marker::PhantomData,\n };\n let mut context = gfx::batch::Context::new();\n let mut batch = context.make_batch(&program, data, &mesh, slice, &state)\n .unwrap();\n\n 'main: loop {\n \/\/ quit when Esc is pressed.\n for event in canvas.output.window.poll_events() {\n match event {\n glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) => break 'main,\n glutin::Event::Closed => break 'main,\n _ => {},\n }\n }\n\n let time = precise_time_s() as f32;\n let x = time.sin();\n let y = time.cos();\n let view: AffineMatrix3<f32> = Transform::look_at(\n &Point3::new(x * 32.0, y * 32.0, 16.0),\n &Point3::new(0.0, 0.0, 0.0),\n &Vector3::unit_z(),\n );\n batch.params.view = view.mat.into_fixed();\n\n canvas.clear(gfx::ClearData {\n color: [0.3, 0.3, 0.3, 1.0],\n depth: 1.0,\n stencil: 0,\n });\n canvas.draw(&(&batch, &context)).unwrap();\n canvas.present();\n }\n}\n<commit_msg>Fixed the terrain example<commit_after>\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nextern crate cgmath;\n#[macro_use]\nextern crate gfx;\nextern crate gfx_window_glutin;\nextern crate glutin;\nextern crate time;\nextern crate rand;\nextern crate genmesh;\nextern crate noise;\n\nuse rand::Rng;\nuse cgmath::FixedArray;\nuse cgmath::{Matrix4, Point3, Vector3};\nuse cgmath::{Transform, AffineMatrix3};\nuse gfx::traits::*;\nuse genmesh::{Vertices, Triangulate};\nuse genmesh::generators::{Plane, SharedVertex, IndexedPolygon};\nuse time::precise_time_s;\n\nuse noise::{Seed, perlin2};\n\n\ngfx_vertex!( Vertex {\n a_Pos@ pos: [f32; 3],\n a_Color@ color: [f32; 3],\n});\n\ngfx_parameters!( Params\/ParamsLink {\n u_Model@ model: [[f32; 4]; 4],\n u_View@ view: [[f32; 4]; 4],\n u_Proj@ proj: [[f32; 4]; 4],\n});\n\nfn calculate_color(height: f32) -> [f32; 3] {\n if height > 8.0 {\n [0.9, 0.9, 0.9] \/\/ white\n } else if height > 0.0 {\n [0.7, 0.7, 0.7] \/\/ greay\n } else if height > -5.0 {\n [0.2, 0.7, 0.2] \/\/ green\n } else {\n [0.2, 0.2, 0.7] \/\/ blue\n }\n}\n\npub fn main() {\n let mut canvas = gfx_window_glutin::init(glutin::Window::new().unwrap())\n .into_canvas();\n canvas.output.window.set_title(\"Terrain example\");\n\n let rand_seed = rand::thread_rng().gen();\n let seed = Seed::new(rand_seed);\n let plane = Plane::subdivide(256, 256);\n let vertex_data: Vec<Vertex> = plane.shared_vertex_iter()\n .map(|(x, y)| {\n let h = perlin2(&seed, &[x, y]) * 32.0;\n Vertex {\n pos: [25.0 * x, 25.0 * y, h],\n color: calculate_color(h),\n }\n })\n .collect();\n\n let index_data: Vec<u32> = plane.indexed_polygon_iter()\n .triangulate()\n .vertices()\n .map(|i| i as u32)\n .collect();\n\n let slice = canvas.factory\n .create_buffer_index::<u32>(&index_data)\n .to_slice(gfx::PrimitiveType::TriangleList);\n\n let mesh = canvas.factory.create_mesh(&vertex_data);\n let program = {\n let vs = gfx::ShaderSource {\n glsl_120: Some(include_bytes!(\"terrain_120.glslv\")),\n glsl_150: Some(include_bytes!(\"terrain_150.glslv\")),\n .. gfx::ShaderSource::empty()\n };\n let fs = gfx::ShaderSource {\n glsl_120: Some(include_bytes!(\"terrain_120.glslf\")),\n glsl_150: Some(include_bytes!(\"terrain_150.glslf\")),\n .. gfx::ShaderSource::empty()\n };\n canvas.factory.link_program_source(vs, fs, &canvas.device.get_capabilities())\n .unwrap()\n };\n\n let state = gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true);\n\n let data = Params {\n model: Matrix4::identity().into_fixed(),\n view: Matrix4::identity().into_fixed(),\n proj: cgmath::perspective(cgmath::deg(60.0f32), \n canvas.get_aspect_ratio(),\n 0.1, 1000.0\n ).into_fixed(),\n _r: std::marker::PhantomData,\n };\n let mut context = gfx::batch::Context::new();\n let mut batch = context.make_batch(&program, data, &mesh, slice, &state)\n .unwrap();\n\n 'main: loop {\n \/\/ quit when Esc is pressed.\n for event in canvas.output.window.poll_events() {\n match event {\n glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) => break 'main,\n glutin::Event::Closed => break 'main,\n _ => {},\n }\n }\n\n let time = precise_time_s() as f32;\n let x = time.sin();\n let y = time.cos();\n let view: AffineMatrix3<f32> = Transform::look_at(\n &Point3::new(x * 32.0, y * 32.0, 16.0),\n &Point3::new(0.0, 0.0, 0.0),\n &Vector3::unit_z(),\n );\n batch.params.view = view.mat.into_fixed();\n\n canvas.clear(gfx::ClearData {\n color: [0.3, 0.3, 0.3, 1.0],\n depth: 1.0,\n stencil: 0,\n });\n canvas.draw(&(&batch, &context)).unwrap();\n canvas.present();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>xenstore structs<commit_after>#[repr(u32)]\nenum xsd_sockmsg_type {\n Debug = 0,\n Directory = 1,\n Read = 2,\n GetPerms = 3,\n Watch = 4,\n Unwatch = 5,\n TransactionStart = 6,\n TransactionEnd = 7,\n Introduce = 8,\n Release = 9,\n GetDomainPath = 10,\n Write = 11,\n Mkdir = 12,\n Rm = 13,\n SetPerms = 14,\n WatchEvent = 15,\n Error = 16,\n IsDomainIntroduced = 17,\n Resume = 18,\n SetTarget = 19,\n Restrict = 20,\n ResetWatches = 21,\n\n Invalid = 0xffff \/* Guaranteed to remain an invalid type *\/\n}\n\nstruct xsd_sockmsg {\n _type: xsd_sockmsg_type,\n req_id: u32,\n tx_id: u32,\n len: u32\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>An attempt at a macro to support HTML literals<commit_after>\/\/ A test of the macro system. Can we do HTML literals?\n\n\/\/ xfail-pretty\n\/\/ xfail-test\n\nmacro_rules! html {\n { $($body:tt)* } => {\n let builder = HTMLBuilder();\n build_html!{builder := $($body)*};\n builder.getDoc()\n }\n}\n\nmacro_rules! build_html {\n { $builder:expr := <\/$tag:ident> $($rest:tt)* } => {\n $builder.endTag(stringify!($tag));\n build_html!{ $builder := $($rest)* };\n };\n\n { $builder:expr := <$tag:ident> $($rest:tt)* } => {\n $builder.beginTag(stringify!($tag));\n build_html!{ $builder := $($rest)* };\n };\n\n { $builder:expr := . $($rest:tt)* } => {\n $builder.addText(~\".\");\n build_html!{ $builder := $($rest)* };\n };\n\n { $builder:expr := $word:ident $($rest:tt)* } => {\n $builder.addText(stringify!($word));\n build_html!{ $builder := $($rest)* };\n };\n\n { $builder:expr := } => { }\n}\n\nfn main() {\n\n let page = html! {\n <html>\n <head><title>This is the title.<\/title><\/head>\n <body>\n <p>This is some text<\/p>\n <\/body>\n <\/html>\n };\n\n \/\/ When we can do this, we are successful:\n \/\/\n \/\/let page = tag(~\"html\", ~[tag(~\"head\", ~[...])])\n\n}\n\nenum HTMLFragment { \n}\n\nstruct HTMLBuilder {\n bar: ();\n fn getDoc() -> HTMLFragment { fail }\n fn beginTag(tag: ~str) { }\n fn endTag(tag: ~str) { }\n fn addText(test: ~str) { }\n}\n\nfn HTMLBuilder() -> HTMLBuilder {\n HTMLBuilder { bar: () }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>user input and output functionality added<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse std::{mem, ptr};\nuse vk;\nuse gfx_core::{self as core, draw, pso, shade, target, tex};\nuse gfx_core::state::RefValues;\nuse gfx_core::{IndexType, VertexCount};\nuse native;\nuse {Resources, Share, SharePointer};\n\n\npub struct Buffer {\n inner: vk::CommandBuffer,\n family: u32,\n share: SharePointer,\n}\n\nimpl Buffer {\n #[doc(hidden)]\n pub fn new(b: vk::CommandBuffer, f: u32, s: SharePointer) -> Buffer {\n Buffer {\n inner: b,\n family: f,\n share: s,\n }\n }\n}\n\nimpl Drop for Buffer {\n fn drop(&mut self) {\n \/\/TODO\n }\n}\n\nimpl Buffer {\n pub fn image_barrier(&mut self, image: vk::Image, aspect: vk::ImageAspectFlags,\n old_layout: vk::ImageLayout, new_layout: vk::ImageLayout) {\n let barrier = vk::ImageMemoryBarrier {\n sType: vk::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,\n pNext: ptr::null(),\n srcAccessMask: if old_layout == vk::IMAGE_LAYOUT_PREINITIALIZED || new_layout == vk::IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL {\n vk::ACCESS_HOST_WRITE_BIT | vk::ACCESS_TRANSFER_WRITE_BIT\n } else {0},\n dstAccessMask: match new_layout {\n vk::IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL | vk::IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL =>\n vk::ACCESS_TRANSFER_READ_BIT | vk::ACCESS_HOST_WRITE_BIT | vk::ACCESS_TRANSFER_WRITE_BIT,\n vk::IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL => vk::ACCESS_SHADER_READ_BIT,\n _ => 0,\n },\n oldLayout: old_layout,\n newLayout: new_layout,\n srcQueueFamilyIndex: self.family,\n dstQueueFamilyIndex: self.family,\n image: image,\n subresourceRange: vk::ImageSubresourceRange {\n aspectMask: aspect,\n baseMipLevel: 0,\n levelCount: 1,\n baseArrayLayer: 0,\n layerCount: 1,\n },\n };\n let (_dev, vk) = self.share.get_device();\n unsafe {\n vk.CmdPipelineBarrier(self.inner,\n vk::PIPELINE_STAGE_TOP_OF_PIPE_BIT, vk::PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,\n 0, ptr::null(), 0, ptr::null(), 1, &barrier);\n }\n }\n}\n\nimpl draw::CommandBuffer<Resources> for Buffer {\n fn reset(&mut self) {}\n fn bind_pipeline_state(&mut self, _: native::Pipeline) {}\n fn bind_vertex_buffers(&mut self, _: pso::VertexBufferSet<Resources>) {}\n fn bind_constant_buffers(&mut self, _: &[pso::ConstantBufferParam<Resources>]) {}\n fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {}\n fn bind_resource_views(&mut self, _: &[pso::ResourceViewParam<Resources>]) {}\n fn bind_unordered_views(&mut self, _: &[pso::UnorderedViewParam<Resources>]) {}\n fn bind_samplers(&mut self, _: &[pso::SamplerParam<Resources>]) {}\n fn bind_pixel_targets(&mut self, _: pso::PixelTargetSet<Resources>) {}\n fn bind_index(&mut self, _: native::Buffer, _: IndexType) {}\n fn set_scissor(&mut self, _: target::Rect) {}\n fn set_ref_values(&mut self, _: RefValues) {}\n fn update_buffer(&mut self, _: native::Buffer, _: &[u8], _: usize) {}\n fn update_texture(&mut self, _: native::Texture, _: tex::Kind, _: Option<tex::CubeFace>,\n _: &[u8], _: tex::RawImageInfo) {}\n fn generate_mipmap(&mut self, _: native::TextureView) {}\n\n fn clear_color(&mut self, tv: native::TextureView, color: draw::ClearColor) {\n let (_, vk) = self.share.get_device();\n let value = match color {\n draw::ClearColor::Float(v) => vk::ClearColorValue::float32(v),\n draw::ClearColor::Int(v) => vk::ClearColorValue::int32(v),\n draw::ClearColor::Uint(v) => vk::ClearColorValue::uint32(v),\n };\n unsafe {\n vk.CmdClearColorImage(self.inner, tv.image, tv.layout, &value, 1, &tv.sub_range);\n }\n }\n\n fn clear_depth_stencil(&mut self, _: native::TextureView, _: Option<target::Depth>,\n _: Option<target::Stencil>) {}\n\n fn call_draw(&mut self, _: VertexCount, _: VertexCount, _: draw::InstanceOption) {}\n fn call_draw_indexed(&mut self, _: VertexCount, _: VertexCount,\n _: VertexCount, _: draw::InstanceOption) {}\n}\n\n\npub struct GraphicsQueue {\n share: SharePointer,\n family: u32,\n queue: vk::Queue,\n capabilities: core::Capabilities,\n}\n\nimpl GraphicsQueue {\n #[doc(hidden)]\n pub fn new(share: SharePointer, q: vk::Queue, qf_id: u32) -> GraphicsQueue {\n let caps = core::Capabilities {\n max_vertex_count: 0,\n max_index_count: 0,\n max_texture_size: 0,\n instance_base_supported: false,\n instance_call_supported: false,\n instance_rate_supported: false,\n vertex_base_supported: false,\n srgb_color_supported: false,\n constant_buffer_supported: false,\n unordered_access_view_supported: false,\n separate_blending_slots_supported: false,\n };\n GraphicsQueue {\n share: share,\n family: qf_id,\n queue: q,\n capabilities: caps,\n }\n }\n #[doc(hidden)]\n pub fn get_share(&self) -> &Share {\n &self.share\n }\n #[doc(hidden)]\n pub fn get_queue(&self) -> vk::Queue {\n self.queue\n }\n}\n\nimpl core::Device for GraphicsQueue {\n type Resources = Resources;\n type CommandBuffer = Buffer;\n\n fn get_capabilities(&self) -> &core::Capabilities {\n &self.capabilities\n }\n\n fn pin_submitted_resources(&mut self, _: &core::handle::Manager<Resources>) {}\n\n fn submit(&mut self, com: &mut Buffer) {\n assert_eq!(self.family, com.family);\n let (_, vk) = self.share.get_device();\n assert_eq!(vk::SUCCESS, unsafe {\n vk.EndCommandBuffer(com.inner)\n });\n let submit_info = vk::SubmitInfo {\n sType: vk::STRUCTURE_TYPE_SUBMIT_INFO,\n commandBufferCount: 1,\n pCommandBuffers: &com.inner,\n .. unsafe { mem::zeroed() }\n };\n assert_eq!(vk::SUCCESS, unsafe {\n vk.QueueSubmit(self.queue, 1, &submit_info, 0)\n });\n let begin_info = vk::CommandBufferBeginInfo {\n sType: vk::STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,\n pNext: ptr::null(),\n flags: 0,\n pInheritanceInfo: ptr::null(),\n };\n assert_eq!(vk::SUCCESS, unsafe {\n vk.BeginCommandBuffer(com.inner, &begin_info)\n });\n }\n\n \/\/note: this should really live elsewhere (Factory?)\n fn cleanup(&mut self) {\n let (dev, mut functions) = self.share.get_device();\n use gfx_core::handle::Producer;\n \/\/self.frame_handles.clear();\n self.share.handles.borrow_mut().clean_with(&mut functions,\n |vk, b| unsafe { \/\/buffer\n vk.DestroyBuffer(dev, b.buffer, ptr::null());\n vk.FreeMemory(dev, b.memory, ptr::null());\n },\n |vk, s| unsafe { \/\/shader\n vk.DestroyShaderModule(dev, *s, ptr::null());\n },\n |_, _p| (), \/\/program\n |vk, p| unsafe { \/\/PSO\n vk.DestroyPipeline(dev, p.pipeline, ptr::null());\n vk.DestroyPipelineLayout(dev, p.pipe_layout, ptr::null());\n vk.DestroyDescriptorSetLayout(dev, p.desc_layout, ptr::null());\n vk.DestroyDescriptorPool(dev, p.desc_pool, ptr::null());\n },\n |vk, t| if t.memory != 0 {unsafe { \/\/texture\n vk.DestroyImage(dev, t.image, ptr::null());\n vk.FreeMemory(dev, t.memory, ptr::null());\n }},\n |vk, v| unsafe { \/\/SRV\n vk.DestroyImageView(dev, v.view, ptr::null());\n },\n |_, _| (), \/\/UAV\n |vk, v| unsafe { \/\/RTV\n vk.DestroyImageView(dev, v.view, ptr::null());\n },\n |_, _v| (), \/\/DSV\n |_, _v| (), \/\/sampler\n |_, _| (), \/\/fence\n );\n }\n}\n<commit_msg>[vk] depth-stencil clears and view destruction<commit_after>\/\/ Copyright 2016 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse std::{mem, ptr};\nuse vk;\nuse gfx_core::{self as core, draw, pso, shade, target, tex};\nuse gfx_core::state::RefValues;\nuse gfx_core::{IndexType, VertexCount};\nuse native;\nuse {Resources, Share, SharePointer};\n\n\npub struct Buffer {\n inner: vk::CommandBuffer,\n family: u32,\n share: SharePointer,\n}\n\nimpl Buffer {\n #[doc(hidden)]\n pub fn new(b: vk::CommandBuffer, f: u32, s: SharePointer) -> Buffer {\n Buffer {\n inner: b,\n family: f,\n share: s,\n }\n }\n}\n\nimpl Drop for Buffer {\n fn drop(&mut self) {\n \/\/TODO\n }\n}\n\nimpl Buffer {\n pub fn image_barrier(&mut self, image: vk::Image, aspect: vk::ImageAspectFlags,\n old_layout: vk::ImageLayout, new_layout: vk::ImageLayout) {\n let barrier = vk::ImageMemoryBarrier {\n sType: vk::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,\n pNext: ptr::null(),\n srcAccessMask: if old_layout == vk::IMAGE_LAYOUT_PREINITIALIZED || new_layout == vk::IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL {\n vk::ACCESS_HOST_WRITE_BIT | vk::ACCESS_TRANSFER_WRITE_BIT\n } else {0},\n dstAccessMask: match new_layout {\n vk::IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL | vk::IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL =>\n vk::ACCESS_TRANSFER_READ_BIT | vk::ACCESS_HOST_WRITE_BIT | vk::ACCESS_TRANSFER_WRITE_BIT,\n vk::IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL => vk::ACCESS_SHADER_READ_BIT,\n _ => 0,\n },\n oldLayout: old_layout,\n newLayout: new_layout,\n srcQueueFamilyIndex: self.family,\n dstQueueFamilyIndex: self.family,\n image: image,\n subresourceRange: vk::ImageSubresourceRange {\n aspectMask: aspect,\n baseMipLevel: 0,\n levelCount: 1,\n baseArrayLayer: 0,\n layerCount: 1,\n },\n };\n let (_dev, vk) = self.share.get_device();\n unsafe {\n vk.CmdPipelineBarrier(self.inner,\n vk::PIPELINE_STAGE_TOP_OF_PIPE_BIT, vk::PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,\n 0, ptr::null(), 0, ptr::null(), 1, &barrier);\n }\n }\n}\n\nimpl draw::CommandBuffer<Resources> for Buffer {\n fn reset(&mut self) {}\n fn bind_pipeline_state(&mut self, _: native::Pipeline) {}\n fn bind_vertex_buffers(&mut self, _: pso::VertexBufferSet<Resources>) {}\n fn bind_constant_buffers(&mut self, _: &[pso::ConstantBufferParam<Resources>]) {}\n fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {}\n fn bind_resource_views(&mut self, _: &[pso::ResourceViewParam<Resources>]) {}\n fn bind_unordered_views(&mut self, _: &[pso::UnorderedViewParam<Resources>]) {}\n fn bind_samplers(&mut self, _: &[pso::SamplerParam<Resources>]) {}\n fn bind_pixel_targets(&mut self, _: pso::PixelTargetSet<Resources>) {}\n fn bind_index(&mut self, _: native::Buffer, _: IndexType) {}\n fn set_scissor(&mut self, _: target::Rect) {}\n fn set_ref_values(&mut self, _: RefValues) {}\n fn update_buffer(&mut self, _: native::Buffer, _: &[u8], _: usize) {}\n fn update_texture(&mut self, _: native::Texture, _: tex::Kind, _: Option<tex::CubeFace>,\n _: &[u8], _: tex::RawImageInfo) {}\n fn generate_mipmap(&mut self, _: native::TextureView) {}\n\n fn clear_color(&mut self, tv: native::TextureView, color: draw::ClearColor) {\n let (_, vk) = self.share.get_device();\n let value = match color {\n draw::ClearColor::Float(v) => vk::ClearColorValue::float32(v),\n draw::ClearColor::Int(v) => vk::ClearColorValue::int32(v),\n draw::ClearColor::Uint(v) => vk::ClearColorValue::uint32(v),\n };\n unsafe {\n vk.CmdClearColorImage(self.inner, tv.image, tv.layout, &value, 1, &tv.sub_range);\n }\n }\n\n fn clear_depth_stencil(&mut self, tv: native::TextureView, depth: Option<target::Depth>,\n stencil: Option<target::Stencil>) {\n let (_, vk) = self.share.get_device();\n let value = vk::ClearDepthStencilValue {\n depth: depth.unwrap_or(1.0), \/\/TODO\n stencil: stencil.unwrap_or(0) as u32, \/\/TODO\n };\n unsafe {\n vk.CmdClearDepthStencilImage(self.inner, tv.image, tv.layout, &value, 1, &tv.sub_range);\n }\n }\n\n fn call_draw(&mut self, _: VertexCount, _: VertexCount, _: draw::InstanceOption) {}\n fn call_draw_indexed(&mut self, _: VertexCount, _: VertexCount,\n _: VertexCount, _: draw::InstanceOption) {}\n}\n\n\npub struct GraphicsQueue {\n share: SharePointer,\n family: u32,\n queue: vk::Queue,\n capabilities: core::Capabilities,\n}\n\nimpl GraphicsQueue {\n #[doc(hidden)]\n pub fn new(share: SharePointer, q: vk::Queue, qf_id: u32) -> GraphicsQueue {\n let caps = core::Capabilities {\n max_vertex_count: 0,\n max_index_count: 0,\n max_texture_size: 0,\n instance_base_supported: false,\n instance_call_supported: false,\n instance_rate_supported: false,\n vertex_base_supported: false,\n srgb_color_supported: false,\n constant_buffer_supported: false,\n unordered_access_view_supported: false,\n separate_blending_slots_supported: false,\n };\n GraphicsQueue {\n share: share,\n family: qf_id,\n queue: q,\n capabilities: caps,\n }\n }\n #[doc(hidden)]\n pub fn get_share(&self) -> &Share {\n &self.share\n }\n #[doc(hidden)]\n pub fn get_queue(&self) -> vk::Queue {\n self.queue\n }\n}\n\nimpl core::Device for GraphicsQueue {\n type Resources = Resources;\n type CommandBuffer = Buffer;\n\n fn get_capabilities(&self) -> &core::Capabilities {\n &self.capabilities\n }\n\n fn pin_submitted_resources(&mut self, _: &core::handle::Manager<Resources>) {}\n\n fn submit(&mut self, com: &mut Buffer) {\n assert_eq!(self.family, com.family);\n let (_, vk) = self.share.get_device();\n assert_eq!(vk::SUCCESS, unsafe {\n vk.EndCommandBuffer(com.inner)\n });\n let submit_info = vk::SubmitInfo {\n sType: vk::STRUCTURE_TYPE_SUBMIT_INFO,\n commandBufferCount: 1,\n pCommandBuffers: &com.inner,\n .. unsafe { mem::zeroed() }\n };\n assert_eq!(vk::SUCCESS, unsafe {\n vk.QueueSubmit(self.queue, 1, &submit_info, 0)\n });\n let begin_info = vk::CommandBufferBeginInfo {\n sType: vk::STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,\n pNext: ptr::null(),\n flags: 0,\n pInheritanceInfo: ptr::null(),\n };\n assert_eq!(vk::SUCCESS, unsafe {\n vk.BeginCommandBuffer(com.inner, &begin_info)\n });\n }\n\n \/\/note: this should really live elsewhere (Factory?)\n fn cleanup(&mut self) {\n let (dev, mut functions) = self.share.get_device();\n use gfx_core::handle::Producer;\n \/\/self.frame_handles.clear();\n self.share.handles.borrow_mut().clean_with(&mut functions,\n |vk, b| unsafe { \/\/buffer\n vk.DestroyBuffer(dev, b.buffer, ptr::null());\n vk.FreeMemory(dev, b.memory, ptr::null());\n },\n |vk, s| unsafe { \/\/shader\n vk.DestroyShaderModule(dev, *s, ptr::null());\n },\n |_, _p| (), \/\/program\n |vk, p| unsafe { \/\/PSO\n vk.DestroyPipeline(dev, p.pipeline, ptr::null());\n vk.DestroyPipelineLayout(dev, p.pipe_layout, ptr::null());\n vk.DestroyDescriptorSetLayout(dev, p.desc_layout, ptr::null());\n vk.DestroyDescriptorPool(dev, p.desc_pool, ptr::null());\n },\n |vk, t| if t.memory != 0 {unsafe { \/\/texture\n vk.DestroyImage(dev, t.image, ptr::null());\n vk.FreeMemory(dev, t.memory, ptr::null());\n }},\n |vk, v| unsafe { \/\/SRV\n vk.DestroyImageView(dev, v.view, ptr::null());\n },\n |_, _| (), \/\/UAV\n |vk, v| unsafe { \/\/RTV\n vk.DestroyImageView(dev, v.view, ptr::null());\n },\n |vk, v| unsafe { \/\/DSV\n vk.DestroyImageView(dev, v.view, ptr::null());\n },\n |_, _v| (), \/\/sampler\n |_, _| (), \/\/fence\n );\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example for (un-)grabbing the mouse cursor<commit_after>#[cfg(target_os = \"android\")]\n#[macro_use]\nextern crate android_glue;\n\nextern crate glutin;\n\nuse glutin::{Event, ElementState};\n\nmod support;\n\n#[cfg(target_os = \"android\")]\nandroid_start!(main);\n\n#[cfg(not(feature = \"window\"))]\nfn main() { println!(\"This example requires glutin to be compiled with the `window` feature\"); }\n\n#[cfg(feature = \"window\")]\nfn main() {\n \n let window = glutin::Window::new().unwrap();\n window.set_title(\"glutin - Cursor grabbing test\");\n unsafe { window.make_current() };\n\n let context = support::load(&window);\n let mut grabbed = false;\n \n while !window.is_closed() {\n context.draw_frame((0.0, 1.0, 0.0, 1.0));\n window.swap_buffers();\n\n for event in window.poll_events() {\n match event {\n Event::KeyboardInput(ElementState::Pressed, _, _) => {\n if grabbed {\n grabbed = false;\n window.ungrab_cursor();\n }\n else {\n grabbed = true;\n window.grab_cursor().ok().expect(\"could not grab mouse cursor\");\n }\n },\n _ => (),\n }\n \n }\n \n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement a generic duplicate module that will be used for checking rule and Rust items duplication. It also generates a map<Ident, Item>.<commit_after>\/\/ Copyright 2014 Pierre Talbot (IRCAM)\n\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\npub use rust::{ExtCtxt,Span};\npub use rust;\npub use identifier::*;\npub use std::collections::HashMap;\npub use monad::partial::Partial;\n\nuse middle::semantics::ast::*;\nuse monad::partial::Partial::*;\n\npub trait ItemIdent {\n fn ident(&self) -> Ident;\n}\n\npub trait ItemSpan {\n fn span(&self) -> Span;\n}\n\nimpl ItemIdent for rust::Item {\n fn ident(&self) -> Ident {\n self.ident.clone()\n }\n}\n\nimpl ItemSpan for rust::Item {\n fn span(&self) -> Span {\n self.span.clone()\n }\n}\n\nimpl<InnerItem: ItemIdent> ItemIdent for rust::P<InnerItem> {\n fn ident(&self) -> Ident {\n self.deref().ident()\n }\n}\n\nimpl<InnerItem: ItemSpan> ItemSpan for rust::P<InnerItem> {\n fn span(&self) -> Span {\n self.deref().span()\n }\n}\n\nimpl ItemIdent for Rule {\n fn ident(&self) -> Ident {\n self.name.node.clone()\n }\n}\n\nimpl ItemSpan for Rule {\n fn span(&self) -> Span {\n self.name.span.clone()\n }\n}\n\npub struct DuplicateItem<'a, Item>\n{\n cx: &'a ExtCtxt<'a>,\n items: HashMap<Ident, Item>,\n has_duplicate: bool,\n what_is_duplicate: String\n}\n\nimpl<'a, Item: ItemIdent + ItemSpan> DuplicateItem<'a, Item>\n{\n pub fn analyse<ItemIter: Iterator<Item>>(cx: &'a ExtCtxt<'a>, iter: ItemIter, item_kind: String) -> Partial<HashMap<Ident, Item>>\n {\n let (min_size, _) = iter.size_hint();\n DuplicateItem {\n cx: cx,\n items: HashMap::with_capacity(min_size),\n has_duplicate: false,\n what_is_duplicate: item_kind\n }.populate(iter)\n .make()\n }\n\n fn populate<ItemIter: Iterator<Item>>(mut self, iter: ItemIter) -> DuplicateItem<'a, Item>\n {\n let mut iter = iter;\n for item in iter {\n let ident = item.ident();\n if self.items.contains_key(&ident) {\n self.duplicate_items(self.items.get(&ident).unwrap(), item);\n self.has_duplicate = true;\n } else {\n self.items.insert(ident, item);\n }\n }\n self\n }\n\n fn duplicate_items(&self, pre: &Item, current: Item)\n {\n self.cx.span_err(current.span(), format!(\n \"duplicate definition of {} `{}`\",\n self.what_is_duplicate, current.ident()).as_slice());\n self.cx.span_note(pre.span(), format!(\n \"previous definition of {} `{}` here\",\n self.what_is_duplicate, pre.ident()).as_slice());\n }\n\n fn make(self) -> Partial<HashMap<Ident, Item>>\n {\n if self.has_duplicate {\n Fake(self.items)\n } else {\n Value(self.items)\n }\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/ A raw test of vector appending performance.\n\nuse std;\nimport dvec::{dvec, extensions};\nimport io::writer_util;\n\nfn collect_raw(num: uint) -> [uint] {\n let mut result = [];\n for uint::range(0u, num) { |i|\n \/\/result += [i];\n vec::push(result, i);\n \/\/result = vec::append(result, [i]);\n }\n ret result;\n}\n\nfn collect_dvec(num: uint) -> [mut uint] {\n let result = dvec();\n for uint::range(0u, num) { |i|\n result.push(i);\n }\n ret dvec::unwrap(result);\n}\n\nfn main(args: [str]) {\n let args = if os::getenv(\"RUST_BENCH\").is_some() {\n [\"\", \"50000000\"]\n } else if args.len() <= 1u {\n [\"\", \"100000\"]\n } else {\n args\n };\n let max = uint::from_str(args[1]).get();\n let start = std::time::precise_time_s();\n let raw_v = collect_raw(max);\n let mid = std::time::precise_time_s();\n let dvec_v = collect_dvec(max);\n let end = std::time::precise_time_s();\n\n \/\/ check each vector\n assert raw_v.len() == max;\n for raw_v.eachi { |i, v| assert i == v; }\n assert dvec_v.len() == max;\n for dvec_v.eachi { |i, v| assert i == v; }\n\n let raw = mid - start;\n let dvec = end - mid;\n \n let maxf = max as float;\n let rawf = raw as float;\n let dvecf = dvec as float;\n \n io::stdout().write_str(#fmt(\"Raw : %? seconds\\n\", raw));\n io::stdout().write_str(#fmt(\" : %f op\/sec\\n\", maxf\/rawf));\n io::stdout().write_str(#fmt(\"\\n\"));\n io::stdout().write_str(#fmt(\"Dvec : %? seconds\\n\", dvec));\n io::stdout().write_str(#fmt(\" : %f op\/sec\\n\", maxf\/dvecf));\n io::stdout().write_str(#fmt(\"\\n\"));\n \n if dvec < raw {\n io::stdout().write_str(#fmt(\"Dvec is %f%% faster than raw\\n\",\n (rawf - dvecf) \/ rawf * 100.0));\n } else {\n io::stdout().write_str(#fmt(\"Raw is %f%% faster than dvec\\n\",\n (dvecf - rawf) \/ dvecf * 100.0));\n }\n}\n<commit_msg>Remove some commented out code so the pretty printer doesn't get confused.<commit_after>\/\/ A raw test of vector appending performance.\n\nuse std;\nimport dvec::{dvec, extensions};\nimport io::writer_util;\n\nfn collect_raw(num: uint) -> [uint] {\n let mut result = [];\n for uint::range(0u, num) { |i|\n vec::push(result, i);\n }\n ret result;\n}\n\nfn collect_dvec(num: uint) -> [mut uint] {\n let result = dvec();\n for uint::range(0u, num) { |i|\n result.push(i);\n }\n ret dvec::unwrap(result);\n}\n\nfn main(args: [str]) {\n let args = if os::getenv(\"RUST_BENCH\").is_some() {\n [\"\", \"50000000\"]\n } else if args.len() <= 1u {\n [\"\", \"100000\"]\n } else {\n args\n };\n let max = uint::from_str(args[1]).get();\n let start = std::time::precise_time_s();\n let raw_v = collect_raw(max);\n let mid = std::time::precise_time_s();\n let dvec_v = collect_dvec(max);\n let end = std::time::precise_time_s();\n\n \/\/ check each vector\n assert raw_v.len() == max;\n for raw_v.eachi { |i, v| assert i == v; }\n assert dvec_v.len() == max;\n for dvec_v.eachi { |i, v| assert i == v; }\n\n let raw = mid - start;\n let dvec = end - mid;\n \n let maxf = max as float;\n let rawf = raw as float;\n let dvecf = dvec as float;\n \n io::stdout().write_str(#fmt(\"Raw : %? seconds\\n\", raw));\n io::stdout().write_str(#fmt(\" : %f op\/sec\\n\", maxf\/rawf));\n io::stdout().write_str(#fmt(\"\\n\"));\n io::stdout().write_str(#fmt(\"Dvec : %? seconds\\n\", dvec));\n io::stdout().write_str(#fmt(\" : %f op\/sec\\n\", maxf\/dvecf));\n io::stdout().write_str(#fmt(\"\\n\"));\n \n if dvec < raw {\n io::stdout().write_str(#fmt(\"Dvec is %f%% faster than raw\\n\",\n (rawf - dvecf) \/ rawf * 100.0));\n } else {\n io::stdout().write_str(#fmt(\"Raw is %f%% faster than dvec\\n\",\n (dvecf - rawf) \/ dvecf * 100.0));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a benchmark for cross-task kernel memory region synchronization<commit_after>\/\/ Vectors are allocated in the Rust kernel's memory region, use of\n\/\/ which requires some amount of synchronization. This test exercises\n\/\/ that synchronization by spawning a number of tasks and then\n\/\/ allocating and freeing vectors.\n\nuse std;\nimport std::vec;\nimport std::uint;\nimport std::istr;\nimport std::task;\n\nfn f(n: uint) {\n for each i in uint::range(0u, n) {\n let v: [u8] = [];\n vec::reserve(v, 1000u);\n }\n}\n\nfn main(args: [istr]) {\n let n = if vec::len(args) < 2u {\n 100u\n } else {\n uint::parse_buf(istr::bytes(args[1]), 10u)\n };\n for each i in uint::range(0u, 100u) {\n task::spawn(bind f(n));\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix typo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Hello World!<commit_after>\nfn main() {\n for i in range(0, 5) {\n do spawn {\n println!(\"Hello {} !\", i * 10);\n }\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #81927 - vandenheuvel:issue_32498, r=Mark-Simulacrum<commit_after>\/\/ run-pass\n#![allow(dead_code)]\n\n\/\/ Making sure that no overflow occurs.\n\nstruct L<T> {\n n: Option<T>,\n}\ntype L8<T> = L<L<L<L<L<L<L<L<T>>>>>>>>;\ntype L64<T> = L8<L8<L8<L8<T>>>>;\n\nfn main() {\n use std::mem::size_of;\n assert_eq!(size_of::<L64<L64<()>>>(), 1);\n assert_eq!(size_of::<L<L64<L64<()>>>>(), 1);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>example: Add custom_expression implemenation guide<commit_after>extern crate ripin;\n\nuse std::env;\nuse std::marker::PhantomData;\nuse ripin::{Stack, pop_two_operands, TryFromRef};\nuse ripin::expression::Expression;\nuse ripin::evaluate::Evaluate;\n\n\/\/ Implementing Expression for a new specific type\n\/\/ is not a difficult thing to do:\n\n\/\/ First you will need something that\n\/\/ can be understand as an Operand type, like float or integer.\n\/\/ The type you choose need to implement TryFromRef.\n#[derive(Debug, Copy, Clone)]\nenum MyOperand {\n Number1,\n Number2,\n}\n\nimpl<'a> TryFromRef<&'a str> for MyOperand {\n type Err = MyOperandErr<&'a str>;\n fn try_from_ref(s: &&'a str) -> Result<Self, Self::Err> {\n match *s {\n \"1\" => Ok(MyOperand::Number1),\n \"2\" => Ok(MyOperand::Number2),\n _ => Err(MyOperandErr::InvalidToken(s))\n }\n }\n}\n\n\/\/ Secondly an Evaluator type to make evaluation\n\/\/ of Operands and generate other ones on the stack.\n\/\/ It needs to implement TryFromRef too.\n#[derive(Debug, Copy, Clone)]\nenum MyEvaluator<T> {\n Add,\n Sub,\n _Phantom(PhantomData<T>) \/\/ make it generic but\n \/\/ don't own the generic T type\n}\n\nimpl<'a, T> TryFromRef<&'a str> for MyEvaluator<T> {\n type Err = MyOperandErr<&'a str>;\n fn try_from_ref(s: &&'a str) -> Result<Self, Self::Err> {\n match *s {\n \"+\" => Ok(MyEvaluator::Add),\n \"-\" => Ok(MyEvaluator::Sub),\n _ => Err(MyOperandErr::InvalidToken(s))\n }\n }\n}\n\n\/\/ A clear error struct\/enum is really important for the parsing part\n#[derive(Debug)]\nenum MyOperandErr<T> {\n InvalidToken(T),\n}\n\n\/\/ Be careful both needs to implement the same TryFromRef Trait signature !\n\/\/ If the Operand type works with TryFromRef<&str>\n\/\/ then the Evaluator needs the same TryFromRef signature.\n\n\/\/ A clear error struct\/enum is really important for the evaluation part\n#[derive(Debug)]\nenum MyEvalErr<T> {\n CannotAddOperands(T, T),\n CannotSubOperands(T, T),\n NotEnoughOperands\n}\n\n\/\/ The last step is to implement the Evaluate trait on your custom Evaluator.\n\/\/ Evaluations are done with this trait.\nimpl Evaluate<MyOperand> for MyEvaluator<MyOperand> {\n type Err = MyEvalErr<MyOperand>;\n\n fn operands_needed(&self) -> usize {\n match *self {\n MyEvaluator::Add | MyEvaluator::Sub => 2,\n _ => unreachable!(), \/\/ _Phantom\n }\n }\n fn operands_generated(&self) -> usize {\n match *self {\n MyEvaluator::Add | MyEvaluator::Sub => 1,\n _ => unreachable!(), \/\/ _Phantom\n }\n }\n\n fn evaluate(self, stack: &mut Stack<MyOperand>) -> Result<(), Self::Err> {\n let (a, b) = pop_two_operands(stack).ok_or(MyEvalErr::NotEnoughOperands)?;\n match self {\n MyEvaluator::Add => {\n match (a, b) {\n (MyOperand::Number1, MyOperand::Number1) => {\n Ok(stack.push(MyOperand::Number2))\n },\n _ => Err(MyEvalErr::CannotAddOperands(a, b)),\n }\n },\n MyEvaluator::Sub => {\n match (a, b) {\n (MyOperand::Number2, MyOperand::Number1) => {\n Ok(stack.push(MyOperand::Number1))\n },\n _ => Err(MyEvalErr::CannotSubOperands(a, b)),\n }\n }\n _ => unreachable!() \/\/ _Phantom\n }\n }\n}\n\ntype MyExpression = Expression<MyOperand, MyEvaluator<MyOperand>>;\n\n\/\/ Once you implement the TryFromRef trait on your “custom” types,\n\/\/ make an iterator of it and give it to the Expression struct.\nfn main() {\n let expr_str = env::args().nth(1).unwrap_or_else(|| {\n println!(\"Give me an expression as first argument!\");\n \"1 1 +\".into()\n });\n\n let tokens = expr_str.split_whitespace();\n match MyExpression::from_iter(tokens) {\n Ok(expr) => println!(\"Evaluation of {:?}: {:?}\", expr_str, expr.evaluate()),\n Err(err) => println!(\"Parsing results in {:?}\", err),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add enum implementation<commit_after>use std::env;\n\nenum BinaryTree {\n Child(i32, Box<BinaryTree>, Box<BinaryTree>),\n None(i32),\n}\n\nimpl BinaryTree {\n pub fn new(item: i32, depth: i32) -> BinaryTree {\n if depth == 0 {\n BinaryTree::None(item)\n } else {\n BinaryTree::Child(item, \n Box::new(BinaryTree::new(2 * item - 1, depth - 1)),\n Box::new(BinaryTree::new(2 * item, depth - 1)))\n }\n }\n\n pub fn item_check(&self) -> i32 {\n match self {\n &BinaryTree::Child(i, ref l, ref r) => i + l.item_check() - r.item_check(),\n &BinaryTree::None(i) => i,\n }\n }\n}\n\nconst MIN_DEPTH : i32 = 4;\n\nfn main() {\n let depth = env::args().nth(1).unwrap_or(\"10\".to_string()).parse::<i32>().unwrap();\n println!(\"stretch tree of depth {}\\tcheck {}\", depth + 1,\n BinaryTree::new(0, depth + 1).item_check());\n let long_lived = BinaryTree::new(0, depth);\n for (d, i) in (MIN_DEPTH..depth+1).filter(| x | x % 2 == 0).map(| x | \n (x, (1..(1<<(depth-x+MIN_DEPTH))+1).fold(0,| check, j |\n check + BinaryTree::new(j, x).item_check() + BinaryTree::new(-j, x).item_check()))) {\n println!(\"{}\\ttrees of depth {}\\tcheck {}\", 1<<(depth-d+MIN_DEPTH)+1, d, i);\n }\n println!(\"long lived tree of depth {}\\tcheck {}\", depth, long_lived.item_check());\n}\n<|endoftext|>"} {"text":"<commit_before>use inflector::Inflector;\n\nuse botocore::{Service, Shape, Operation};\n\nuse self::ec2::Ec2Generator;\nuse self::json::JsonGenerator;\nuse self::query::QueryGenerator;\nuse self::rest_json::RestJsonGenerator;\n\nmod ec2;\nmod json;\nmod query;\nmod rest_json;\n\npub trait GenerateProtocol {\n fn generate_methods(&self, service: &Service) -> String;\n\n fn generate_prelude(&self, service: &Service) -> String;\n\n fn generate_struct_attributes(&self) -> String;\n\n fn generate_support_types(&self, _name: &str, _shape: &Shape, _service: &Service)\n -> Option<String> {\n None\n }\n\n fn generate_error_types(&self, _service: &Service) -> Option<String> {\n None\n }\n\n fn timestamp_type(&self) -> &'static str;\n}\n\npub fn generate_source(service: &Service) -> String {\n match &service.metadata.protocol[..] {\n \"json\" => generate(service, JsonGenerator),\n \"ec2\" => generate(service, Ec2Generator),\n \"query\" => generate(service, QueryGenerator),\n \"rest-json\" => generate(service, RestJsonGenerator),\n protocol => panic!(\"Unknown protocol {}\", protocol),\n }\n}\n\nfn generate<P>(service: &Service, protocol_generator: P) -> String where P: GenerateProtocol {\n format!(\n \"{prelude}\n\n {types}\n {error_types}\n\n {client}\",\n client = generate_client(service, &protocol_generator),\n prelude = &protocol_generator.generate_prelude(service),\n types = generate_types(service, &protocol_generator),\n error_types = protocol_generator.generate_error_types(service).unwrap_or(\"\".to_string()),\n )\n}\n\nfn generate_client<P>(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n format!(\n \"\/\/\/ A client for the {service_name} API.\n pub struct {type_name}<P> where P: ProvideAwsCredentials {{\n credentials_provider: P,\n region: region::Region,\n }}\n\n impl<P> {type_name}<P> where P: ProvideAwsCredentials {{\n pub fn new(credentials_provider: P, region: region::Region) -> Self {{\n {type_name} {{\n credentials_provider: credentials_provider,\n region: region,\n }}\n }}\n\n {methods}\n }}\n \",\n methods = protocol_generator.generate_methods(service),\n service_name = match &service.metadata.service_abbreviation {\n &Some(ref service_abbreviation) => service_abbreviation.as_str(),\n &None => {\n match service.metadata.endpoint_prefix {\n ref x if x == \"elastictranscoder\" => \"Amazon Elastic Transcoder\",\n ref x if x == \"cognito-identity\" => \"Amazon Cognito Identity\",\n _ => panic!(\"Unable to determine service abbreviation\"),\n }\n },\n },\n type_name = service.client_type_name(),\n )\n}\n\nfn generate_list(name: &str, shape: &Shape) -> String {\n format!(\"pub type {} = Vec<{}>;\", name, shape.member())\n}\n\nfn generate_map(name: &str, shape: &Shape) -> String {\n format!(\n \"pub type {} = ::std::collections::HashMap<{}, {}>;\",\n name,\n shape.key(),\n shape.value(),\n )\n}\n\nfn generate_primitive_type(name: &str, shape_type: &str, for_timestamps: &str) -> String {\n let primitive_type = match shape_type {\n \"blob\" => \"Vec<u8>\",\n \"boolean\" => \"bool\",\n \"double\" => \"f64\",\n \"float\" => \"f32\",\n \"integer\" => \"i32\",\n \"long\" => \"i64\",\n \"string\" => \"String\",\n \"timestamp\" => for_timestamps,\n primitive_type => panic!(\"Unknown primitive type: {}\", primitive_type),\n };\n\n format!(\"pub type {} = {};\", name, primitive_type)\n}\n\nfn generate_types<P>(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n service.shapes.iter().filter_map(|(name, shape)| {\n if name == \"String\" {\n return protocol_generator.generate_support_types(name, shape, &service);\n }\n\n if shape.exception() && service.typed_errors() {\n return None;\n }\n\n let mut parts = Vec::with_capacity(3);\n\n if let Some(ref docs) = shape.documentation {\n parts.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n match &shape.shape_type[..] {\n \"structure\" => parts.push(generate_struct(service, name, shape, protocol_generator)),\n \"map\" => parts.push(generate_map(name, shape)),\n \"list\" => parts.push(generate_list(name, shape)),\n shape_type => parts.push(generate_primitive_type(name, shape_type, protocol_generator.timestamp_type())),\n }\n\n if let Some(support_types) = protocol_generator.generate_support_types(name, shape, &service) {\n parts.push(support_types);\n }\n\n Some(parts.join(\"\\n\"))\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\n\n\nfn generate_struct<P>(\n service: &Service,\n name: &str,\n shape: &Shape,\n protocol_generator: &P,\n) -> String where P: GenerateProtocol {\n if shape.members.is_none() || shape.members.as_ref().unwrap().is_empty() {\n format!(\n \"{attributes}\n pub struct {name};\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n )\n } else {\n format!(\n \"{attributes}\n pub struct {name} {{\n {struct_fields}\n }}\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n struct_fields = generate_struct_fields(service, shape),\n )\n }\n\n}\n\npub fn generate_field_name(member_name: &str) -> String {\n let name = member_name.to_snake_case();\n if name == \"return\" || name == \"type\" {\n name + \"_\"\n } else {\n name\n }\n}\n\nfn generate_struct_fields(service: &Service, shape: &Shape) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n let mut lines = Vec::with_capacity(4);\n let name = generate_field_name(member_name);\n\n if let Some(ref docs) = member.documentation {\n lines.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n lines.push(\"#[allow(unused_attributes)]\".to_owned());\n lines.push(format!(\"#[serde(rename=\\\"{}\\\")]\", member_name));\n\n if let Some(shape_type) = service.shape_type_for_member(member) {\n if shape_type == \"blob\" {\n lines.push(\n \"#[serde(\n deserialize_with=\\\"::serialization::SerdeBlob::deserialize_blob\\\",\n serialize_with=\\\"::serialization::SerdeBlob::serialize_blob\\\",\n default,\n )]\".to_owned()\n );\n }\n }\n\n if shape.required(member_name) {\n lines.push(format!(\"pub {}: {},\", name, member.shape));\n } else if name == \"type\" {\n lines.push(format!(\"pub aws_{}: Option<{}>,\", name, member.shape));\n } else {\n lines.push(format!(\"pub {}: Option<{}>,\", name, member.shape));\n }\n\n lines.join(\"\\n\")\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\nimpl Operation {\n pub fn error_type_name(&self) -> String {\n format!(\"{}Error\", self.name)\n }\n}\n<commit_msg>fall back to service full name if abbreviation isn't present in botocore definition<commit_after>use inflector::Inflector;\n\nuse botocore::{Service, Shape, Operation};\n\nuse self::ec2::Ec2Generator;\nuse self::json::JsonGenerator;\nuse self::query::QueryGenerator;\nuse self::rest_json::RestJsonGenerator;\n\nmod ec2;\nmod json;\nmod query;\nmod rest_json;\n\npub trait GenerateProtocol {\n fn generate_methods(&self, service: &Service) -> String;\n\n fn generate_prelude(&self, service: &Service) -> String;\n\n fn generate_struct_attributes(&self) -> String;\n\n fn generate_support_types(&self, _name: &str, _shape: &Shape, _service: &Service)\n -> Option<String> {\n None\n }\n\n fn generate_error_types(&self, _service: &Service) -> Option<String> {\n None\n }\n\n fn timestamp_type(&self) -> &'static str;\n}\n\npub fn generate_source(service: &Service) -> String {\n match &service.metadata.protocol[..] {\n \"json\" => generate(service, JsonGenerator),\n \"ec2\" => generate(service, Ec2Generator),\n \"query\" => generate(service, QueryGenerator),\n \"rest-json\" => generate(service, RestJsonGenerator),\n protocol => panic!(\"Unknown protocol {}\", protocol),\n }\n}\n\nfn generate<P>(service: &Service, protocol_generator: P) -> String where P: GenerateProtocol {\n format!(\n \"{prelude}\n\n {types}\n {error_types}\n\n {client}\",\n client = generate_client(service, &protocol_generator),\n prelude = &protocol_generator.generate_prelude(service),\n types = generate_types(service, &protocol_generator),\n error_types = protocol_generator.generate_error_types(service).unwrap_or(\"\".to_string()),\n )\n}\n\nfn generate_client<P>(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n format!(\n \"\/\/\/ A client for the {service_name} API.\n pub struct {type_name}<P> where P: ProvideAwsCredentials {{\n credentials_provider: P,\n region: region::Region,\n }}\n\n impl<P> {type_name}<P> where P: ProvideAwsCredentials {{\n pub fn new(credentials_provider: P, region: region::Region) -> Self {{\n {type_name} {{\n credentials_provider: credentials_provider,\n region: region,\n }}\n }}\n\n {methods}\n }}\n \",\n methods = protocol_generator.generate_methods(service),\n service_name = match &service.metadata.service_abbreviation {\n &Some(ref service_abbreviation) => service_abbreviation.as_str(),\n &None => service.metadata.service_full_name.as_ref()\n },\n type_name = service.client_type_name(),\n )\n}\n\nfn generate_list(name: &str, shape: &Shape) -> String {\n format!(\"pub type {} = Vec<{}>;\", name, shape.member())\n}\n\nfn generate_map(name: &str, shape: &Shape) -> String {\n format!(\n \"pub type {} = ::std::collections::HashMap<{}, {}>;\",\n name,\n shape.key(),\n shape.value(),\n )\n}\n\nfn generate_primitive_type(name: &str, shape_type: &str, for_timestamps: &str) -> String {\n let primitive_type = match shape_type {\n \"blob\" => \"Vec<u8>\",\n \"boolean\" => \"bool\",\n \"double\" => \"f64\",\n \"float\" => \"f32\",\n \"integer\" => \"i32\",\n \"long\" => \"i64\",\n \"string\" => \"String\",\n \"timestamp\" => for_timestamps,\n primitive_type => panic!(\"Unknown primitive type: {}\", primitive_type),\n };\n\n format!(\"pub type {} = {};\", name, primitive_type)\n}\n\nfn generate_types<P>(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n service.shapes.iter().filter_map(|(name, shape)| {\n if name == \"String\" {\n return protocol_generator.generate_support_types(name, shape, &service);\n }\n\n if shape.exception() && service.typed_errors() {\n return None;\n }\n\n let mut parts = Vec::with_capacity(3);\n\n if let Some(ref docs) = shape.documentation {\n parts.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n match &shape.shape_type[..] {\n \"structure\" => parts.push(generate_struct(service, name, shape, protocol_generator)),\n \"map\" => parts.push(generate_map(name, shape)),\n \"list\" => parts.push(generate_list(name, shape)),\n shape_type => parts.push(generate_primitive_type(name, shape_type, protocol_generator.timestamp_type())),\n }\n\n if let Some(support_types) = protocol_generator.generate_support_types(name, shape, &service) {\n parts.push(support_types);\n }\n\n Some(parts.join(\"\\n\"))\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\n\n\nfn generate_struct<P>(\n service: &Service,\n name: &str,\n shape: &Shape,\n protocol_generator: &P,\n) -> String where P: GenerateProtocol {\n if shape.members.is_none() || shape.members.as_ref().unwrap().is_empty() {\n format!(\n \"{attributes}\n pub struct {name};\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n )\n } else {\n format!(\n \"{attributes}\n pub struct {name} {{\n {struct_fields}\n }}\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n struct_fields = generate_struct_fields(service, shape),\n )\n }\n\n}\n\npub fn generate_field_name(member_name: &str) -> String {\n let name = member_name.to_snake_case();\n if name == \"return\" || name == \"type\" {\n name + \"_\"\n } else {\n name\n }\n}\n\nfn generate_struct_fields(service: &Service, shape: &Shape) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n let mut lines = Vec::with_capacity(4);\n let name = generate_field_name(member_name);\n\n if let Some(ref docs) = member.documentation {\n lines.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n lines.push(\"#[allow(unused_attributes)]\".to_owned());\n lines.push(format!(\"#[serde(rename=\\\"{}\\\")]\", member_name));\n\n if let Some(shape_type) = service.shape_type_for_member(member) {\n if shape_type == \"blob\" {\n lines.push(\n \"#[serde(\n deserialize_with=\\\"::serialization::SerdeBlob::deserialize_blob\\\",\n serialize_with=\\\"::serialization::SerdeBlob::serialize_blob\\\",\n default,\n )]\".to_owned()\n );\n }\n }\n\n if shape.required(member_name) {\n lines.push(format!(\"pub {}: {},\", name, member.shape));\n } else if name == \"type\" {\n lines.push(format!(\"pub aws_{}: Option<{}>,\", name, member.shape));\n } else {\n lines.push(format!(\"pub {}: Option<{}>,\", name, member.shape));\n }\n\n lines.join(\"\\n\")\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\nimpl Operation {\n pub fn error_type_name(&self) -> String {\n format!(\"{}Error\", self.name)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>staircase<commit_after>\/\/https:\/\/www.hackerrank.com\/challenges\/staircase\n\nuse std::io;\nuse std::io::prelude::*;\n\nfn main() {\n let stdin = io::stdin();\n\n let count: i32 = stdin.lock().lines() \/\/iterator over lines in stdin\n .next().unwrap().unwrap() \/\/finally it's a string\n .trim().parse().unwrap(); \/\/and then parsing count value...But we don't need it :) lol\n for i in 0..count {\n for _ in 0..count - i - 1 {\n print!(\" \");\n }\n for _ in 0..i+1 {\n print!(\"#\");\n }\n println!(\"\");\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Touch up the iOS deploy script<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>std::borrow::Cow<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> add solution in rust for Easy 0001<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for safe intrinsics<commit_after>#![feature(intrinsics)]\n#![feature(no_core)]\n\n#![no_core]\n#![crate_name = \"foo\"]\n\nextern \"rust-intrinsic\" {\n \/\/ @has 'foo\/fn.abort.html'\n \/\/ @has - '\/\/pre[@class=\"rust fn\"]' 'pub extern \"rust-intrinsic\" fn abort() -> !'\n pub fn abort() -> !;\n \/\/ @has 'foo\/fn.unreachable.html'\n \/\/ @has - '\/\/pre[@class=\"rust fn\"]' 'pub unsafe extern \"rust-intrinsic\" fn unreachable() -> !'\n pub fn unreachable() -> !;\n}\n\nextern \"C\" {\n \/\/ @has 'foo\/fn.needs_drop.html'\n \/\/ @has - '\/\/pre[@class=\"rust fn\"]' 'pub unsafe extern \"C\" fn needs_drop() -> !'\n pub fn needs_drop() -> !;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rust queue<commit_after>struct Queue<T> {\n data: Vec<T>\n}\n\nimpl<T> Queue<T> {\n fn new() -> Queue<T> {\n Queue{data: vec![]}\n }\n\n fn enqueue(&mut self, e: T) {\n self.data.push(e)\n }\n\n fn dequeue(&mut self) -> Option<T> {\n if self.data.is_empty() {\n return None;\n }\n Some(self.data.remove(0))\n }\n\n fn is_empty(&self) -> bool {\n self.data.is_empty()\n }\n\n fn peek(&self) -> Option<&T> {\n if self.data.is_empty() {\n return None;\n }\n let e = &self.data[0];\n Some(e)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>check-in lazy static replacement<commit_after>\/\/! This module exists because lazy_static causes TSAN to\n\/\/! be very unhappy. We rely heavily on TSAN for finding\n\/\/! races, so we don't use lazy_static.\n\nuse std::sync::atomic::{AtomicBool, AtomicPtr, Ordering::SeqCst};\n\n\/\/\/ A lazily initialized value\npub struct Lazy<T, F> {\n value: AtomicPtr<T>,\n init_mu: AtomicBool,\n init: F,\n}\n\nimpl<T, F> Lazy<T, F> {\n \/\/\/ Create a new Lazy\n pub const fn new(init: F) -> Lazy<T, F>\n where\n F: Sized,\n {\n Lazy {\n value: AtomicPtr::new(std::ptr::null_mut()),\n init_mu: AtomicBool::new(false),\n init,\n }\n }\n}\n\nimpl<T, F> Drop for Lazy<T, F> {\n fn drop(&mut self) {\n let value_ptr = self.value.load(SeqCst);\n if !value_ptr.is_null() {\n unsafe { drop(Box::from_raw(value_ptr)) }\n }\n }\n}\n\nimpl<T, F> std::ops::Deref for Lazy<T, F>\nwhere\n F: Fn() -> T,\n{\n type Target = T;\n\n fn deref(&self) -> &T {\n let value_ptr = self.value.load(SeqCst);\n\n if !value_ptr.is_null() {\n unsafe {\n return &*value_ptr;\n }\n }\n\n while self.init_mu.compare_and_swap(false, true, SeqCst) != false {}\n\n let value_ptr = self.value.load(SeqCst);\n\n \/\/ we need to check this again because\n \/\/ maybe some other thread completed\n \/\/ the initialization already.\n if !value_ptr.is_null() {\n let unlock = self.init_mu.swap(false, SeqCst);\n assert!(unlock);\n unsafe {\n return &*value_ptr;\n }\n }\n\n let value = (self.init)();\n\n let value_ptr = Box::into_raw(Box::new(value));\n\n self.value.store(value_ptr, SeqCst);\n\n let unlock = self.init_mu.swap(false, SeqCst);\n assert!(unlock);\n\n unsafe { &*value_ptr }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add entry getter function<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adapt to new libimagstore::iter::Entries API<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>(feat) Added more convenience implementations of Handler.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Error when .env fails loading for non-parse reasons<commit_after><|endoftext|>"} {"text":"<commit_before>rm use std::mem;\nuse std::prelude::v1::*;\n\nuse {Future, IntoFuture, Poll, Async};\n\n\/\/\/ Future for the `select_any` combinator, waiting for one of any of a list of\n\/\/\/ futures to succesfully complete. unlike `select_all`, this future ignores all\n\/\/\/ but the last error, if there are any.\n\/\/\/\n\/\/\/ This is created by this `select_any` function.\n#[must_use = \"futures do nothing unless polled\"]\npub struct SelectAny<A> where A: Future {\n inner: Vec<A>,\n}\n\n\/\/\/ Creates a new future which will select the first successful future over a list of futures.\n\/\/\/\n\/\/\/ The returned future will wait for any future within `list` to be ready and Ok. Unlike\n\/\/\/ select_all, this will only return the first successful completion, or the last\n\/\/\/ failure. This is useful in contexts where any success is desired and failures\n\/\/\/ are ignored, unless all the futures fail.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ This function will panic if the iterator specified contains no items.\npub fn select_any<I>(iter: I) -> SelectAny<<I::Item as IntoFuture>::Future>\n where I: IntoIterator,\n I::Item: IntoFuture,\n{\n let ret = SelectAny {\n inner: iter.into_iter()\n .map(|a| a.into_future())\n .collect(),\n };\n assert!(ret.inner.len() > 0);\n ret\n}\n\nimpl<A> Future for SelectAny<A> where A: Future {\n type Item = (A::Item, Vec<A>);\n type Error = A::Error;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n \/\/ loop until we've either exhausted all errors, a success was hit, or nothing is ready\n loop {\n let item = self.inner.iter_mut().enumerate().filter_map(|(i, f)| {\n match f.poll() {\n Ok(Async::NotReady) => None,\n Ok(Async::Ready(e)) => Some((i, Ok(e))),\n Err(e) => Some((i, Err(e))),\n }\n }).next();\n\n match item {\n Some((idx, res)) => {\n \/\/ always remove Ok or Err, if it's not the last Err continue looping\n drop(self.inner.remove(idx));\n match res {\n Ok(e) => {\n let rest = mem::replace(&mut self.inner, Vec::new());\n return Ok(Async::Ready((e, rest)))\n },\n Err(e) => {\n if self.inner.is_empty() {\n return Err(e)\n }\n },\n }\n }\n None => {\n \/\/ based on the filter above, nothing is ready, return\n return Ok(Async::NotReady)\n },\n }\n }\n }\n}\n<commit_msg>remove rm typo<commit_after>use std::mem;\nuse std::prelude::v1::*;\n\nuse {Future, IntoFuture, Poll, Async};\n\n\/\/\/ Future for the `select_any` combinator, waiting for one of any of a list of\n\/\/\/ futures to succesfully complete. unlike `select_all`, this future ignores all\n\/\/\/ but the last error, if there are any.\n\/\/\/\n\/\/\/ This is created by this `select_any` function.\n#[must_use = \"futures do nothing unless polled\"]\npub struct SelectAny<A> where A: Future {\n inner: Vec<A>,\n}\n\n\/\/\/ Creates a new future which will select the first successful future over a list of futures.\n\/\/\/\n\/\/\/ The returned future will wait for any future within `list` to be ready and Ok. Unlike\n\/\/\/ select_all, this will only return the first successful completion, or the last\n\/\/\/ failure. This is useful in contexts where any success is desired and failures\n\/\/\/ are ignored, unless all the futures fail.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ This function will panic if the iterator specified contains no items.\npub fn select_any<I>(iter: I) -> SelectAny<<I::Item as IntoFuture>::Future>\n where I: IntoIterator,\n I::Item: IntoFuture,\n{\n let ret = SelectAny {\n inner: iter.into_iter()\n .map(|a| a.into_future())\n .collect(),\n };\n assert!(ret.inner.len() > 0);\n ret\n}\n\nimpl<A> Future for SelectAny<A> where A: Future {\n type Item = (A::Item, Vec<A>);\n type Error = A::Error;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n \/\/ loop until we've either exhausted all errors, a success was hit, or nothing is ready\n loop {\n let item = self.inner.iter_mut().enumerate().filter_map(|(i, f)| {\n match f.poll() {\n Ok(Async::NotReady) => None,\n Ok(Async::Ready(e)) => Some((i, Ok(e))),\n Err(e) => Some((i, Err(e))),\n }\n }).next();\n\n match item {\n Some((idx, res)) => {\n \/\/ always remove Ok or Err, if it's not the last Err continue looping\n drop(self.inner.remove(idx));\n match res {\n Ok(e) => {\n let rest = mem::replace(&mut self.inner, Vec::new());\n return Ok(Async::Ready((e, rest)))\n },\n Err(e) => {\n if self.inner.is_empty() {\n return Err(e)\n }\n },\n }\n }\n None => {\n \/\/ based on the filter above, nothing is ready, return\n return Ok(Async::NotReady)\n },\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add permissions to audio requests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>reorder audio module code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add image module.<commit_after>\/\/! Constructs for generating images using the TIS-100.\n\n\/\/\/ The colors that the TIS-100 can generate.\n#[derive(Debug, PartialEq, Eq, Copy, Clone)]\npub enum Color {\n Black,\n DarkGrey,\n BrightGrey,\n White,\n Red,\n}\n\nimpl Color {\n \/\/\/ Get the color for the given integer representation.\n fn from_isize(value: isize) -> Color {\n match value {\n 1 => DarkGrey,\n 2 => BrightGrey,\n 3 => White,\n 4 => Red,\n _ => Black,\n }\n }\n}\n\nuse self::Color::*;\n\n\/\/\/ The operational modes of the image.\n#[derive(Debug, PartialEq, Eq, Copy, Clone)]\nenum ImageMode {\n Move,\n Paint,\n}\n\nuse self::ImageMode::*;\n\n\/\/\/ An image that can receive values from the TIS-100. When in the `Move` mode, the image receives\n\/\/\/ coordinates that tell it where to draw. When in the `Paint` mode, the image will draw values.\n\/\/\/ Sending a negative value at any time will reset the image to `Move` mode.\n#[derive(Debug)]\npub struct Image {\n width: usize,\n height: usize,\n data: Vec<Color>,\n mode: ImageMode,\n position: Vec<isize>,\n offset: usize,\n}\n\nimpl Image {\n \/\/\/ Construct a new, empty `Image` with the given width and height.\n pub fn new(width: usize, height: usize) -> Image {\n let mut data = Vec::with_capacity(width * height);\n for _ in 0..width * height {\n data.push(Black);\n }\n\n Image {\n width: width,\n height: height,\n data: data,\n mode: Move,\n position: Vec::new(),\n offset: 0,\n }\n }\n\n \/\/\/ Construct a new `Image` with the given width, height, and initial values.\n pub fn with_data(data: &Vec<isize>, width: usize, height: usize) -> Image {\n assert_eq!(data.len(), width * height);\n\n let data = data.iter().map(|&i| Color::from_isize(i)).collect::<Vec<_>>();\n\n Image {\n width: width,\n height: height,\n data: data,\n mode: Move,\n position: Vec::new(),\n offset: 0,\n }\n }\n\n \/\/\/ Retrieve the image's data.\n pub fn data(&self) -> &Vec<Color> {\n &self.data\n }\n\n \/\/\/ Write a value to the image. If the image is in `Move` mode, then the value will be\n \/\/\/ interpreted as a coordinate. If the image is in `Paint` mode, then the value will be\n \/\/\/ interpreted as a color unless the value is negative.\n pub fn write(&mut self, value: isize) {\n if value < 0 {\n self.position.clear();\n self.mode = Move;\n self.offset = 0;\n return;\n }\n\n if self.mode == Move {\n self.position.push(value);\n\n if self.position.len() == 2 {\n self.mode = Paint;\n }\n\n return;\n }\n\n let row_off = self.position[0] as usize * self.width;\n\n if row_off < self.width * self.height {\n let col = self.position[1] as usize + self.offset;\n\n if col < self.width {\n self.data[row_off + col] = Color::from_isize(value);\n }\n }\n\n self.offset += 1;\n }\n}\n\nimpl PartialEq for Image {\n fn eq(&self, other: &Self) -> bool {\n self.data == other.data\n }\n}\n\nimpl Eq for Image {}\n\n#[test]\nfn test_color_from_isize() {\n assert_eq!(Color::from_isize(0), Black);\n assert_eq!(Color::from_isize(1), DarkGrey);\n assert_eq!(Color::from_isize(2), BrightGrey);\n assert_eq!(Color::from_isize(3), White);\n assert_eq!(Color::from_isize(4), Red);\n assert_eq!(Color::from_isize(5), Black);\n}\n\n#[test]\nfn test_image_with_data() {\n let expected = vec![DarkGrey, BrightGrey, White, Red];\n let data = vec![1, 2, 3, 4];\n let image = Image::with_data(&data, 2, 2);\n\n assert_eq!(expected, image.data().clone());\n}\n\n#[test]\nfn test_image_write() {\n let expected = vec![DarkGrey, BrightGrey, White, Red];\n let mut image = Image::new(2, 2);\n\n image.write(0);\n image.write(0);\n image.write(1);\n image.write(2);\n image.write(-1);\n image.write(1);\n image.write(0);\n image.write(3);\n image.write(4);\n image.write(-1);\n\n assert_eq!(expected, image.data().clone());\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\nregister_diagnostics! {\n E0154,\n E0157,\n E0153,\n E0251, \/\/ a named type or value has already been imported in this module\n E0252, \/\/ a named type or value has already been imported in this module\n E0253, \/\/ not directly importable\n E0254, \/\/ import conflicts with imported crate in this module\n E0255, \/\/ import conflicts with value in this module\n E0256, \/\/ import conflicts with type in this module\n E0257, \/\/ inherent implementations are only allowed on types defined in the current module\n E0258, \/\/ import conflicts with existing submodule\n E0259, \/\/ an extern crate has already been imported into this module\n E0260, \/\/ name conflicts with an external crate that has been imported into this module\n E0317, \/\/ user-defined types or type parameters cannot shadow the primitive types\n E0364, \/\/ item is private\n E0365 \/\/ item is private\n}\n<commit_msg>Add error explanation for E0317.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\n\/\/ Error messages for EXXXX errors.\n\/\/ Each message should start and end with a new line, and be wrapped to 80 characters.\n\/\/ In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable.\nregister_long_diagnostics! {\n\nE0317: r##\"\nUser-defined types or type parameters cannot shadow the primitive types.\nThis error indicates you tried to define a type, struct or enum with the same\nname as an existing primitive type, and is therefore invalid.\n\nSee the Types section of the reference for more information about the primitive\ntypes:\n\nhttp:\/\/doc.rust-lang.org\/nightly\/reference.html#types\n\"##\n\n}\n\nregister_diagnostics! {\n E0154,\n E0157,\n E0153,\n E0251, \/\/ a named type or value has already been imported in this module\n E0252, \/\/ a named type or value has already been imported in this module\n E0253, \/\/ not directly importable\n E0254, \/\/ import conflicts with imported crate in this module\n E0255, \/\/ import conflicts with value in this module\n E0256, \/\/ import conflicts with type in this module\n E0257, \/\/ inherent implementations are only allowed on types defined in the current module\n E0258, \/\/ import conflicts with existing submodule\n E0259, \/\/ an extern crate has already been imported into this module\n E0260, \/\/ name conflicts with an external crate that has been imported into this module\n E0364, \/\/ item is private\n E0365 \/\/ item is private\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use 'to_string' in 'raindrops'.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>blank message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a \"roundtrip\" example which deserializes and serializes a module.<commit_after>extern crate parity_wasm;\n\nuse std::env;\n\nfn main() {\n let args = env::args().collect::<Vec<_>>();\n if args.len() != 3 {\n println!(\"Usage: {} in.wasm out.wasm\", args[0]);\n return;\n }\n\n let module = parity_wasm::deserialize_file(&args[1]).expect(\"Failed to load module\");\n\n parity_wasm::serialize_to_file(&args[2], module).expect(\"Failed to write module\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>video.addToAlbum method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>While with mut countdown<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add I2C\/VL53L0X example<commit_after>#![no_std]\n\n\nextern crate cortex_m;\nextern crate stm32f0x2;\nextern crate stm32f0_hal;\n\n\nuse stm32f0x2::{\n GPIOB,\n I2C1,\n RCC,\n};\nuse stm32f0_hal::gpio;\n\n\nfn main() {\n \/\/ This example was developed and tested with the STM32F072B Discovery board\n \/\/ and the VL53L0X sattelite board. It assumes that the VL53L0X satellite\n \/\/ board is connected to the discovery board in the following way:\n \/\/ - GND to GND, VDD to 3V.\n \/\/ - XSDN_I to VDD, via a 10kOhm pull-up resistor.\n \/\/ - SCL_I to PB6, SDA_I to PB7.\n\n cortex_m::interrupt::free(|cs| {\n let rcc = RCC.borrow(cs);\n let gpiob = GPIOB.borrow(cs);\n let i2c = I2C1.borrow(cs);\n\n \/\/ Set up LEDs that we can later use to signal error or success\n \/\/\n \/\/ These LEDs are marked as LD3, LD4, and LD5 on the STM32F0 Discovery\n \/\/ board. Please note that the board's documentation that matches those\n \/\/ names to PC6, PC8, and PC9 is not correct. It also mixes up the\n \/\/ colors of the LEDs. At least that's the case with the board I have in\n \/\/ front of me.\n let mut led_init = gpio::Output::setup(gpio::Pin::PC8); \/\/ orange\n let mut led_success = gpio::Output::setup(gpio::Pin::PC9); \/\/ green\n let mut led_error = gpio::Output::setup(gpio::Pin::PC6); \/\/ red\n\n led_init.high();\n led_success.low();\n led_error.low();\n\n \/\/ Enable clock for GPIOB\n \/\/\n \/\/ I think this is already included in the GPIO output configuration\n \/\/ above, but it's more robust to do it again here explicitely, in case\n \/\/ that code changes, or this example switches to another method of\n \/\/ signalling success or failure.\n rcc.ahbenr.modify(|_, w| w.iopben().enabled());\n\n \/\/ Enable clock for I2C1\n rcc.apb1enr.modify(|_, w| w.i2c1en().enabled());\n\n \/\/ Reset I2C1 peripheral\n rcc.apb1rstr.modify(|_, w| w.i2c1rst().set_bit());\n rcc.apb1rstr.modify(|_, w| w.i2c1rst().clear_bit());\n\n \/\/ Select alternate I2C functions on pins\n \/\/ See data sheet, page 42 and reference manual, section 8.4.9.\n gpiob.afrl.modify(|_, w|\n w\n .afrl6().af1() \/\/ I2C1_SCL on PB6\n .afrl7().af1() \/\/ I2C1_SDA on PB7\n );\n\n \/\/ Configure the pins\n \/\/\n \/\/ I'm basically winging it here. If the need for setting the\n \/\/ configuration like this is explained somewhere in the reference\n \/\/ manual, I don't know where. The following is based on my own\n \/\/ understanding, and bits and pieces I've gathered from around the\n \/\/ internet.\n \/\/\n \/\/ Fact is, the pins need to be configured (there's no I2C signal\n \/\/ otherwise), and the following configuration works.\n gpiob.moder.modify(|_, w|\n w\n .moder6().alternate()\n .moder7().alternate()\n );\n gpiob.ospeedr.modify(|_, w|\n w\n .ospeedr6().low_speed()\n .ospeedr7().low_speed()\n );\n gpiob.otyper.modify(|_, w|\n w\n .ot6().open_drain()\n .ot7().open_drain()\n );\n\n \/\/ Select HSI as the clock source for I2C1\n \/\/ This is the default, so unless other code has changed this\n \/\/ configuration, the following line is redundant.\n rcc.cfgr3.modify(|_, w| w.i2c1sw().hsi());\n\n \/\/ Make sure the I2C peripheral is disabled\n \/\/ The configuration below is not allowed, while I2C is enabled.\n i2c.cr1.modify(|_, w| w.pe().clear_bit());\n\n \/\/ Noise filter configuration would go here, if required. By default, a\n \/\/ spec-compliant analog filter is enabled, and I don't see a reason to\n \/\/ change that.\n\n \/\/ Configure I2C timings\n \/\/ The following values are taken from table 91 on page 650 (section\n \/\/ 26.4.10) in the reference manual. This is the right table because the\n \/\/ 8 MHz HSI clock has been selected above. Let's choose 100 kHz for the\n \/\/ I2C frequency.\n i2c.timingr.modify(|_, w|\n unsafe {\n w\n .presc().bits(1)\n .scll().bits(0x13) \/\/ required because of master mode\n .sclh().bits(0xf) \/\/ required because of master mode\n .sdadel().bits(0x2)\n .scldel().bits(0x4)\n }\n );\n\n \/\/ Enable I2C peripheral\n i2c.cr1.modify(|_, w| w.pe().set_bit());\n\n \/\/ Configure and start I2C write\n i2c.cr2.write(|w|\n unsafe {\n w\n \/\/ 7-bit addressing mode\n .add10().clear_bit()\n \/\/ slave address\n .sadd1().bits(0x52 >> 1)\n .sadd0().bit(false) \/\/ write\n \/\/ write config\n .rd_wrn().clear_bit() \/\/ set transfer direction to write\n .nbytes().bits(1) \/\/ send 1 byte, the register index\n .reload().clear_bit() \/\/ only send <nbytes> bytes\n .autoend().set_bit() \/\/ automatically send STOP signal\n .start().set_bit() \/\/ start signal to start transmission\n }\n );\n\n \/\/ Wait for transmit register to be empty\n while i2c.isr.read().txe().bit_is_clear() {}\n\n \/\/ Send index of reference register\n i2c.txdr.write(|w| w.txdata().bits(0xC0));\n\n \/\/ Wait until transfer is complete\n \/\/ Since we set the AUTOEND flag above, the peripheral should\n \/\/ automatically generate a stop condition.\n while i2c.isr.read().stopf().bit_is_clear() {}\n\n \/\/ Configure and start I2C read\n i2c.cr2.write(|w|\n unsafe {\n w\n \/\/ 7-bit addressing mode\n .add10().clear_bit()\n \/\/ slave address\n .sadd1().bits(0x52 >> 1)\n .sadd0().bit(true) \/\/ read\n \/\/ read config\n .rd_wrn().set_bit() \/\/ set transfer direction to read\n .nbytes().bits(1) \/\/ receive 1 byte\n .reload().clear_bit() \/\/ only receive <nbytes> bytes\n .autoend().set_bit() \/\/ automatically send STOP signal\n .start().set_bit() \/\/ start signal to start transmission\n }\n );\n\n \/\/ Wait until byte has been received\n while i2c.isr.read().rxne().bit_is_clear() {}\n\n \/\/ Read received byte\n let byte = i2c.rxdr.read().rxdata().bits();\n\n \/\/ Check received data\n \/\/ We expect that it's the hardcoded value from the reference register\n \/\/ we were reading.\n if byte == 0xEE {\n led_success.high();\n }\n else {\n led_error.high();\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add Simple Workflow Service integration tests<commit_after>#![cfg(feature = \"swf\")]\n\nextern crate rusoto;\n\nuse rusoto::swf::{SwfClient, ListDomainsInput};\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\nfn should_list_domains() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = SwfClient::new(credentials, Region::UsEast1);\n\n let mut request = ListDomainsInput::default();\n request.maximum_page_size = Some(10);\n request.registration_status = \"REGISTERED\".to_string();\n\n match client.list_domains(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true) \n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n<|endoftext|>"} {"text":"<commit_before>use super::*;\n\nimpl Editor {\n \/\/\/ Get pos of next char\n pub fn next_pos(&self) -> (usize, usize) {\n \/\/ TODO: Add numerals\n if self.x() == self.text[self.y()].len() {\n if self.y() < self.text.len() - 1 {\n (0, self.y() + 1)\n } else {\n (self.x(), self.y())\n }\n } else {\n (self.x() + 1, self.y())\n }\n }\n\n \/\/\/ Get pos of previous char\n pub fn previous_pos(&self) -> (usize, usize) {\n if self.x() == 0 {\n if self.y() > 0 {\n (self.text[self.y()].len(), self.y() - 1)\n } else {\n (self.x(), self.y())\n }\n } else {\n (self.x() - 1, self.y())\n }\n }\n\n pub fn next(&mut self) {\n let (x, y) = self.next_pos();\n self.cursor_mut().x = x;\n self.cursor_mut().y = y;\n }\n pub fn previous(&mut self) {\n let (x, y) = self.previous_pos();\n self.cursor_mut().x = x;\n self.cursor_mut().y = y;\n }\n\n \/\/\/ Get right pos\n pub fn right_pos(&self, n: usize) -> (usize, usize) {\n let x = self.x() + n;\n\n if x > self.text[self.y()].len() {\n (self.text[self.y()].len(), self.y())\n } else {\n (x, self.y())\n }\n }\n pub fn right(&mut self, n: usize) {\n self.cursor_mut().x = self.right_pos(n).0;\n }\n\n \/\/\/ Get left pos\n pub fn left_pos(&self, n: usize) -> (usize, usize) {\n if n <= self.x() {\n (self.x() - n, self.y())\n } else {\n (0, self.y())\n }\n\n }\n pub fn left(&mut self, n: usize) {\n self.cursor_mut().x = self.left_pos(n).0;\n }\n\n \/\/\/ Get up pos\n pub fn up_pos(&self, n: usize) -> (usize, usize) {\n if n <= self.y() {\n (self.cursor().x, self.y() - n)\n } else {\n (self.cursor().x, 0)\n }\n }\n pub fn up(&mut self, n: usize) {\n let (x, y) = self.up_pos(n);\n self.cursor_mut().x = x;\n self.cursor_mut().y = y;\n }\n\n \/\/\/ Get down pos\n pub fn down_pos(&self, n: usize) -> (usize, usize) {\n let y = self.y() + n;\n\n if y >= self.text.len() {\n (self.cursor().x, self.text.len() - 1)\n } else {\n (self.cursor().x, y)\n }\n }\n\n pub fn down(&mut self, n: usize) {\n let (x, y) = self.down_pos(n);\n self.cursor_mut().x = x;\n self.cursor_mut().y = y;\n }\n\n}\n<commit_msg>Fix bug in previous function<commit_after>use super::*;\n\nimpl Editor {\n \/\/\/ Get pos of next char\n pub fn next_pos(&self) -> (usize, usize) {\n \/\/ TODO: Add numerals\n if self.x() == self.text[self.y()].len() {\n if self.y() < self.text.len() - 1 {\n (0, self.y() + 1)\n } else {\n (self.x(), self.y())\n }\n } else {\n (self.x() + 1, self.y())\n }\n }\n\n \/\/\/ Get pos of previous char\n pub fn previous_pos(&self) -> (usize, usize) {\n if self.x() == 0 {\n if self.y() > 0 {\n (self.text[self.y() - 1].len(), self.y() - 1)\n } else {\n (self.x(), self.y())\n }\n } else {\n (self.x() - 1, self.y())\n }\n }\n\n pub fn next(&mut self) {\n let (x, y) = self.next_pos();\n self.cursor_mut().x = x;\n self.cursor_mut().y = y;\n }\n pub fn previous(&mut self) {\n let (x, y) = self.previous_pos();\n self.cursor_mut().x = x;\n self.cursor_mut().y = y;\n }\n\n \/\/\/ Get right pos\n pub fn right_pos(&self, n: usize) -> (usize, usize) {\n let x = self.x() + n;\n\n if x > self.text[self.y()].len() {\n (self.text[self.y()].len(), self.y())\n } else {\n (x, self.y())\n }\n }\n pub fn right(&mut self, n: usize) {\n self.cursor_mut().x = self.right_pos(n).0;\n }\n\n \/\/\/ Get left pos\n pub fn left_pos(&self, n: usize) -> (usize, usize) {\n if n <= self.x() {\n (self.x() - n, self.y())\n } else {\n (0, self.y())\n }\n\n }\n pub fn left(&mut self, n: usize) {\n self.cursor_mut().x = self.left_pos(n).0;\n }\n\n \/\/\/ Get up pos\n pub fn up_pos(&self, n: usize) -> (usize, usize) {\n if n <= self.y() {\n (self.cursor().x, self.y() - n)\n } else {\n (self.cursor().x, 0)\n }\n }\n pub fn up(&mut self, n: usize) {\n let (x, y) = self.up_pos(n);\n self.cursor_mut().x = x;\n self.cursor_mut().y = y;\n }\n\n \/\/\/ Get down pos\n pub fn down_pos(&self, n: usize) -> (usize, usize) {\n let y = self.y() + n;\n\n if y >= self.text.len() {\n (self.cursor().x, self.text.len() - 1)\n } else {\n (self.cursor().x, y)\n }\n }\n\n pub fn down(&mut self, n: usize) {\n let (x, y) = self.down_pos(n);\n self.cursor_mut().x = x;\n self.cursor_mut().y = y;\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add RawLink<commit_after>use core::ptr;\nuse core::fmt;\nuse core::mem;\n\n\/\/\/ A `RawLink` provides an `Option`-like interface to a raw pointer.\n#[allow(raw_pointer_derive)]\n#[derive(Debug, Copy, Clone, PartialEq)]\npub struct RawLink<T>(*mut T);\n\nunsafe impl<T> Send for RawLink<T>\nwhere T: 'static\n , T: Send {}\n\nunsafe impl<T> Sync for RawLink<T>\nwhere T: Send\n , T: Sync {}\n\nimpl<T> Default for RawLink<T> {\n fn default() -> Self { Self::none() }\n}\n\nimpl<T> fmt::Display for RawLink<T>\nwhere T: fmt::Display {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n if self.0.is_null() {\n write!(f, \"RawLink::none\")\n } else {\n unsafe { write!(f, \"RawLink::some({})\", *self.0) }\n }\n\n }\n}\n\nimpl<T> RawLink<T> {\n\n \/\/\/ Equivalent of `Option::None` for a `RawLink`\n \/\/\/\n \/\/\/ # Returns\n \/\/\/ - A `RawLink<T>` wrapping a null pointer\n #[inline]\n pub fn none() -> RawLink<T> { RawLink(ptr::null_mut()) }\n\n \/\/\/ Equivalent of `Option::Some` for a `RawLink`\n \/\/\/\n \/\/\/ # Returns\n \/\/\/ - A `RawLink<T>` wrapping a pointer to the specified value\n #[inline]\n pub fn some(thing: &mut T) -> RawLink<T> { RawLink(thing) }\n\n \/\/\/ Resolve the `RawLink` to an `Option`\n \/\/\/\n \/\/\/ # Returns\n \/\/\/ - `Some<&'a T>` if the `RawLink` is not a null pointer\n \/\/\/ - `None` if the `RawLink` is a null pointer\n \/\/\/\n \/\/\/ # Unsafe due to\n \/\/\/ - Returning a reference with an arbitrary lifetime\n \/\/\/ - Dereferencing a raw pointer\n #[inline]\n pub unsafe fn resolve<'a>(&self) -> Option<&'a T> {\n self.0.as_ref()\n }\n\n \/\/\/ Resolve the `RawLink` to an `Option` on a mutable pointer\n \/\/\/\n \/\/\/ # Returns\n \/\/\/ - `Some<&'a mut T>` if the `RawLink` is not a null pointer\n \/\/\/ - `None` if the `RawLink` is a null pointer\n \/\/\/\n \/\/\/ # Unsafe due to\n \/\/\/ - Returning a reference with an arbitrary lifetime\n \/\/\/ - Dereferencing a raw pointer\n #[inline]\n pub unsafe fn resolve_mut<'a>(&self) -> Option<&'a mut T> {\n self.0.as_mut()\n }\n\n \/\/\/ Returns the `RawLink` and replaces it with `RawLink::none()`.\n #[inline]\n pub fn take(&mut self) -> Self { mem::replace(self, Self::none()) }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>changed get_opt to get_checked<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>impl Display for Action<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ xfail-fast - check-fast doesn't understand aux-build\n\/\/ aux-build:cci_intrinsic.rs\n\n\/\/ xfail-check\n\nuse cci_intrinsic;\nimport cci_intrinsic::atomic_xchng;\n\nfn main() {\n let mut x = 1;\n atomic_xchng(x, 5);\n assert x == 5;\n}\n<commit_msg>Fix xfail comment<commit_after>\/\/ xfail-fast - check-fast doesn't understand aux-build\n\/\/ aux-build:cci_intrinsic.rs\n\n\/\/ xfail-test\n\nuse cci_intrinsic;\nimport cci_intrinsic::atomic_xchng;\n\nfn main() {\n let mut x = 1;\n atomic_xchng(x, 5);\n assert x == 5;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add cargo-fmt binary<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Inspired by Paul Woolcock's cargo-fmt (https:\/\/github.com\/pwoolcoc\/cargo-fmt\/)\n\n#![cfg(not(test))]\n#![cfg(feature=\"cargo-fmt\")]\n\nextern crate getopts;\nextern crate walkdir;\nextern crate rustc_serialize;\n\nuse std::path::PathBuf;\nuse std::process::Command;\nuse std::env;\nuse std::str;\n\nuse getopts::Options;\nuse walkdir::{WalkDir, DirEntry};\nuse rustc_serialize::json::Json;\n\nfn main() {\n let mut opts = getopts::Options::new();\n opts.optflag(\"h\", \"help\", \"show this message\");\n\n let matches = match opts.parse(env::args().skip(1)) {\n Ok(m) => m,\n Err(e) => {\n print_usage(&opts, &e.to_string());\n return;\n }\n };\n\n if matches.opt_present(\"h\") {\n print_usage(&opts, \"\");\n } else {\n format_crate(&opts);\n }\n}\n\nfn print_usage(opts: &Options, reason: &str) {\n let msg = format!(\"{}\\nusage: cargo fmt [options]\", reason);\n println!(\"{}\\nThis utility formats all readable .rs files in the src directory of the \\\n current crate using rustfmt.\",\n opts.usage(&msg));\n}\n\nfn format_crate(opts: &Options) {\n let mut root = match locate_root() {\n Ok(r) => r,\n Err(e) => {\n print_usage(opts, &e.to_string());\n return;\n }\n };\n\n \/\/ Currently only files in [root]\/src can be formatted\n root.push(\"src\");\n \/\/ All unreadable or non .rs files are skipped\n let files: Vec<_> = WalkDir::new(root)\n .into_iter()\n .filter(is_rs_file)\n .filter_map(|f| f.ok())\n .map(|e| e.path().to_owned())\n .collect();\n\n format_files(&files).unwrap_or_else(|e| print_usage(opts, &e.to_string()));\n}\n\nfn locate_root() -> Result<PathBuf, std::io::Error> {\n \/\/ This seems adequate, as cargo-fmt can only be used systems that have Cargo installed\n let output = try!(Command::new(\"cargo\").arg(\"locate-project\").output());\n if output.status.success() {\n \/\/ We assume cargo locate-project is not broken and\n \/\/ it will output a valid json document\n let data = &String::from_utf8(output.stdout).unwrap();\n let json = Json::from_str(data).unwrap();\n let root = PathBuf::from(json.find(\"root\").unwrap().as_string().unwrap());\n\n \/\/ root.parent() should never fail if locate-project's output is correct\n Ok(root.parent().unwrap().to_owned())\n } else {\n \/\/ This happens when cargo-fmt is not used inside a crate\n Err(std::io::Error::new(std::io::ErrorKind::NotFound,\n str::from_utf8(&output.stderr).unwrap()))\n }\n}\n\nfn is_rs_file(entry: &Result<walkdir::DirEntry, walkdir::Error>) -> bool {\n match *entry {\n Ok(ref file) => {\n match file.path().extension() {\n Some(ext) => ext == \"rs\",\n None => false,\n }\n }\n Err(_) => false,\n }\n}\n\nfn format_files(files: &Vec<PathBuf>) -> Result<(), std::io::Error> {\n let mut command = try!(Command::new(\"rustfmt\")\n .arg(\"--write-mode=overwrite\")\n .args(files)\n .spawn());\n try!(command.wait());\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #33696 - nham:add-test-21225, r=luqmana<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test error message when enum variants are used as types\n\n\n\/\/ issue 21225\nenum Ty {\n A,\n B(Ty::A),\n \/\/~^ ERROR: found value `Ty::A` used as a type\n}\n\n\n\/\/ issue 19197\nenum E {\n A\n}\n\nimpl E::A {}\n\/\/~^ ERROR: found value `E::A` used as a type\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>use raii::Raii;\nuse {ffi, Handle, Return};\nuse std::ptr::null_mut;\nuse std;\n\n\/\/\/ Implemented for fixed size type those representation is directly compatible with ODBC\npub unsafe trait FixedSizeType: Sized + Default {\n fn c_data_type() -> ffi::SqlCDataType;\n}\n\n\/\/\/ Indicates that a type can be retrieved using `Cursor::get_data`\npub unsafe trait Output: Sized {\n fn get_data(stmt: &mut Raii<ffi::Stmt>,\n col_or_param_num: u16,\n buffer: &mut [u8])\n -> Return<Option<Self>>;\n}\n\nunsafe impl FixedSizeType for i16 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_SSHORT\n }\n}\n\nunsafe impl FixedSizeType for u16 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_USHORT\n }\n}\n\nunsafe impl FixedSizeType for i32 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_SLONG\n }\n}\n\nunsafe impl FixedSizeType for u32 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_ULONG\n }\n}\n\nunsafe impl FixedSizeType for f32 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_FLOAT\n }\n}\n\nunsafe impl FixedSizeType for f64 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_DOUBLE\n }\n}\n\nunsafe impl<T> Output for T\n where T: FixedSizeType\n{\n fn get_data(stmt: &mut Raii<ffi::Stmt>,\n col_or_param_num: u16,\n _: &mut [u8])\n -> Return<Option<Self>> {\n stmt.get_data_fixed_size(col_or_param_num)\n }\n}\n\nunsafe impl Output for String {\n fn get_data(stmt: &mut Raii<ffi::Stmt>,\n col_or_param_num: u16,\n buffer: &mut [u8])\n -> Return<Option<Self>> {\n stmt.get_data_string(col_or_param_num, buffer)\n }\n}\n\nimpl Raii<ffi::Stmt> {\n fn get_data_fixed_size<T>(&mut self, col_or_param_num: u16) -> Return<Option<T>>\n where T: FixedSizeType\n {\n let mut out = T::default();\n let mut indicator: ffi::SQLLEN = 0;\n unsafe {\n \/\/ Get buffer length...\n let result = ffi::SQLGetData(self.handle(),\n col_or_param_num,\n T::c_data_type(),\n &mut out as *mut T as ffi::SQLPOINTER,\n std::mem::size_of::<Self>() as ffi::SQLLEN,\n &mut indicator as *mut ffi::SQLLEN);\n match result {\n ffi::SQL_SUCCESS => {\n if indicator == ffi::SQL_NULL_DATA {\n Return::Success(None)\n } else {\n Return::Success(Some(out))\n }\n }\n ffi::SQL_SUCCESS_WITH_INFO => {\n if indicator == ffi::SQL_NULL_DATA {\n Return::Success(None)\n } else {\n Return::Success(Some(out))\n }\n }\n ffi::SQL_ERROR => Return::Error,\n ffi::SQL_NO_DATA => panic!(\"SQLGetData has already returned the colmun data\"),\n r => panic!(\"unexpected return value from SQLGetData: {:?}\", r),\n }\n }\n }\n\n fn get_data_string(&mut self,\n col_or_param_num: u16,\n buffer: &mut [u8])\n -> Return<Option<String>> {\n if buffer.len() == 0 {\n panic!(\"buffer length may not be null\");\n }\n if buffer.len() > ffi::SQLLEN::max_value() as usize {\n panic!(\"buffer is larger than {} bytes\", ffi::SQLLEN::max_value());\n }\n\n let mut indicator: ffi::SQLLEN = 0;\n unsafe {\n \/\/ Get buffer length...\n let result = ffi::SQLGetData(self.handle(),\n col_or_param_num,\n ffi::SQL_C_CHAR,\n buffer.as_mut_ptr() as ffi::SQLPOINTER,\n buffer.len() as ffi::SQLLEN,\n &mut indicator as *mut ffi::SQLLEN);\n match result {\n ffi::SQL_SUCCESS => {\n if indicator == ffi::SQL_NULL_DATA {\n Return::Success(None)\n } else {\n Return::Success(Some(std::str::from_utf8(&buffer[..(indicator as usize)])\n .unwrap()\n .to_owned()))\n }\n }\n ffi::SQL_SUCCESS_WITH_INFO => {\n if indicator == ffi::SQL_NO_TOTAL {\n Return::SuccessWithInfo(None)\n } else {\n \/\/ Check if string has been truncated. String is also truncated if\n \/\/ indicator is equal to BUF_LENGTH because of terminating nul\n if indicator >= buffer.len() as ffi::SQLLEN {\n let extra_space = (indicator as usize + 1) - (buffer.len() - 1);\n let mut heap_buf = Vec::with_capacity((indicator as usize) + 1);\n \/\/ Copy everything but the terminating zero into the new buffer\n heap_buf.extend_from_slice(&buffer[..(buffer.len() - 1)]);\n \/\/ increase length\n heap_buf.extend(std::iter::repeat(0).take(extra_space));\n \/\/ Get remainder of string\n let ret = ffi::SQLGetData(self.handle(),\n col_or_param_num,\n ffi::SQL_C_CHAR,\n heap_buf.as_mut_slice()[buffer.len() - 1..]\n .as_mut_ptr() as\n ffi::SQLPOINTER,\n extra_space as ffi::SQLLEN,\n null_mut());\n heap_buf.pop();\n let value = String::from_utf8(heap_buf).unwrap();\n match ret {\n ffi::SQL_SUCCESS => Return::Success(Some(value)),\n ffi::SQL_SUCCESS_WITH_INFO => Return::SuccessWithInfo(Some(value)),\n ffi::SQL_ERROR => Return::Error,\n r => panic!(\"SQLGetData returned {:?}\", r),\n }\n } else {\n \/\/ No truncation. Warning may be due to some other issue.\n Return::SuccessWithInfo(Some(std::str::from_utf8(&buffer[..(indicator as\n usize)])\n .unwrap()\n .to_owned()))\n }\n }\n }\n ffi::SQL_ERROR => Return::Error,\n ffi::SQL_NO_DATA => panic!(\"SQLGetData has already returned the colmun data\"),\n r => panic!(\"unexpected return value from SQLGetData: {:?}\", r),\n }\n }\n }\n}\n\n<commit_msg>first attempt at get_data for str<commit_after>use raii::Raii;\nuse {ffi, Handle, Return};\nuse std::ptr::null_mut;\nuse std::str::from_utf8;\nuse std;\n\n\/\/\/ Implemented for fixed size type those representation is directly compatible with ODBC\npub unsafe trait FixedSizeType: Sized + Default {\n fn c_data_type() -> ffi::SqlCDataType;\n}\n\n\/\/\/ Indicates that a type can be retrieved using `Cursor::get_data`\npub unsafe trait Output: Sized {\n fn get_data<'a>(stmt: &mut Raii<ffi::Stmt>,\n col_or_param_num: u16,\n buffer: &'a mut [u8])\n -> Return<Option<Self>>;\n}\n\nunsafe impl FixedSizeType for u8 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_UTINYINT\n }\n}\n\nunsafe impl FixedSizeType for i8 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_STINYINT\n }\n}\n\nunsafe impl FixedSizeType for i16 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_SSHORT\n }\n}\n\nunsafe impl FixedSizeType for u16 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_USHORT\n }\n}\n\nunsafe impl FixedSizeType for i32 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_SLONG\n }\n}\n\nunsafe impl FixedSizeType for u32 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_ULONG\n }\n}\n\nunsafe impl FixedSizeType for f32 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_FLOAT\n }\n}\n\nunsafe impl FixedSizeType for f64 {\n fn c_data_type() -> ffi::SqlCDataType {\n ffi::SQL_C_DOUBLE\n }\n}\n\nunsafe impl<T> Output for T\n where T: FixedSizeType\n{\n fn get_data(stmt: &mut Raii<ffi::Stmt>,\n col_or_param_num: u16,\n _: &mut [u8])\n -> Return<Option<Self>> {\n stmt.get_data_fixed_size(col_or_param_num)\n }\n}\n\nunsafe impl Output for String {\n fn get_data(stmt: &mut Raii<ffi::Stmt>,\n col_or_param_num: u16,\n buffer: &mut [u8])\n -> Return<Option<Self>> {\n stmt.get_data_string(col_or_param_num, buffer)\n }\n}\n\nimpl Raii<ffi::Stmt> {\n fn get_data_fixed_size<T>(&mut self, col_or_param_num: u16) -> Return<Option<T>>\n where T: FixedSizeType\n {\n let mut out = T::default();\n let mut indicator: ffi::SQLLEN = 0;\n unsafe {\n \/\/ Get buffer length...\n let result = ffi::SQLGetData(self.handle(),\n col_or_param_num,\n T::c_data_type(),\n &mut out as *mut T as ffi::SQLPOINTER,\n std::mem::size_of::<Self>() as ffi::SQLLEN,\n &mut indicator as *mut ffi::SQLLEN);\n match result {\n ffi::SQL_SUCCESS => {\n if indicator == ffi::SQL_NULL_DATA {\n Return::Success(None)\n } else {\n Return::Success(Some(out))\n }\n }\n ffi::SQL_SUCCESS_WITH_INFO => {\n if indicator == ffi::SQL_NULL_DATA {\n Return::Success(None)\n } else {\n Return::Success(Some(out))\n }\n }\n ffi::SQL_ERROR => Return::Error,\n ffi::SQL_NO_DATA => panic!(\"SQLGetData has already returned the colmun data\"),\n r => panic!(\"unexpected return value from SQLGetData: {:?}\", r),\n }\n }\n }\n\n fn get_data_str<'a>(&mut self,\n col_or_param: u16,\n buffer: &'a mut [u8])\n -> Return<Option<&'a str>> {\n if buffer.len() == 0 {\n panic!(\"buffer length may not be null\");\n }\n if buffer.len() > ffi::SQLLEN::max_value() as usize {\n panic!(\"buffer is larger than {} bytes\", ffi::SQLLEN::max_value());\n }\n let mut indicator: ffi::SQLLEN = 0;\n unsafe {\n \/\/ Get buffer length...\n let result = ffi::SQLGetData(self.handle(),\n col_or_param,\n ffi::SQL_C_CHAR,\n buffer.as_mut_ptr() as ffi::SQLPOINTER,\n buffer.len() as ffi::SQLLEN,\n &mut indicator as *mut ffi::SQLLEN);\n match result {\n ffi::SQL_SUCCESS => {\n if indicator == ffi::SQL_NULL_DATA {\n Return::Success(None)\n } else {\n Return::Success(Some(from_utf8(&buffer[..(indicator as usize)]).unwrap()))\n }\n }\n ffi::SQL_SUCCESS_WITH_INFO => {\n if indicator == ffi::SQL_NULL_DATA {\n Return::SuccessWithInfo(None)\n } else {\n if indicator >= buffer.len() as ffi::SQLLEN {\n panic!(\"Buffer is not large enough to hold string\")\n } else {\n Return::SuccessWithInfo(Some(from_utf8(&buffer[..(indicator as\n usize)])\n .unwrap()))\n }\n }\n }\n ffi::SQL_ERROR => Return::Error,\n ffi::SQL_NO_DATA => panic!(\"SQLGetData has already returned the colmun data\"),\n r => panic!(\"unexpected return value from SQLGetData: {:?}\", r),\n }\n }\n }\n\n fn get_data_string(&mut self,\n col_or_param_num: u16,\n buffer: &mut [u8])\n -> Return<Option<String>> {\n if buffer.len() == 0 {\n panic!(\"buffer length may not be null\");\n }\n if buffer.len() > ffi::SQLLEN::max_value() as usize {\n panic!(\"buffer is larger than {} bytes\", ffi::SQLLEN::max_value());\n }\n\n let mut indicator: ffi::SQLLEN = 0;\n unsafe {\n \/\/ Get buffer length...\n let result = ffi::SQLGetData(self.handle(),\n col_or_param_num,\n ffi::SQL_C_CHAR,\n buffer.as_mut_ptr() as ffi::SQLPOINTER,\n buffer.len() as ffi::SQLLEN,\n &mut indicator as *mut ffi::SQLLEN);\n match result {\n ffi::SQL_SUCCESS => {\n if indicator == ffi::SQL_NULL_DATA {\n Return::Success(None)\n } else {\n Return::Success(Some(from_utf8(&buffer[..(indicator as usize)])\n .unwrap()\n .to_owned()))\n }\n }\n ffi::SQL_SUCCESS_WITH_INFO => {\n if indicator == ffi::SQL_NO_TOTAL {\n Return::SuccessWithInfo(None)\n } else {\n \/\/ Check if string has been truncated. String is also truncated if\n \/\/ indicator is equal to BUF_LENGTH because of terminating nul\n if indicator >= buffer.len() as ffi::SQLLEN {\n let extra_space = (indicator as usize + 1) - (buffer.len() - 1);\n let mut heap_buf = Vec::with_capacity((indicator as usize) + 1);\n \/\/ Copy everything but the terminating zero into the new buffer\n heap_buf.extend_from_slice(&buffer[..(buffer.len() - 1)]);\n \/\/ increase length\n heap_buf.extend(std::iter::repeat(0).take(extra_space));\n \/\/ Get remainder of string\n let ret = ffi::SQLGetData(self.handle(),\n col_or_param_num,\n ffi::SQL_C_CHAR,\n heap_buf.as_mut_slice()[buffer.len() - 1..]\n .as_mut_ptr() as\n ffi::SQLPOINTER,\n extra_space as ffi::SQLLEN,\n null_mut());\n heap_buf.pop();\n let value = String::from_utf8(heap_buf).unwrap();\n match ret {\n ffi::SQL_SUCCESS => Return::Success(Some(value)),\n ffi::SQL_SUCCESS_WITH_INFO => Return::SuccessWithInfo(Some(value)),\n ffi::SQL_ERROR => Return::Error,\n r => panic!(\"SQLGetData returned {:?}\", r),\n }\n } else {\n \/\/ No truncation. Warning may be due to some other issue.\n Return::SuccessWithInfo(Some(from_utf8(&buffer[..(indicator as\n usize)])\n .unwrap()\n .to_owned()))\n }\n }\n }\n ffi::SQL_ERROR => Return::Error,\n ffi::SQL_NO_DATA => panic!(\"SQLGetData has already returned the colmun data\"),\n r => panic!(\"unexpected return value from SQLGetData: {:?}\", r),\n }\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>refactoring loop<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test for better ptr handling in enum niches<commit_after>\/\/ A niche-optimized enum where the discriminant is a pointer value -- relies on ptr-to-int casts in\n\/\/ the niche handling code.\n\/\/ compile-flags: -Zmiri-disable-stacked-borrows -Zmiri-disable-validation\n\nfn main() {\n let x = 42;\n let val: Option<&i32> = unsafe { std::mem::transmute((&x as *const i32).wrapping_offset(2)) };\n assert!(val.is_some());\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nenum E = u32;\nstruct S { a: u8, b: E }\nconst C: S = S { a: 0xA5, b: E(0xDEADBEEF) };\n\nfn main() {\n assert C.b == 0xDEADBEEF;\n}\n<commit_msg>pub fn main<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nenum E = u32;\nstruct S { a: u8, b: E }\nconst C: S = S { a: 0xA5, b: E(0xDEADBEEF) };\n\npub fn main() {\n assert C.b == 0xDEADBEEF;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added `ascii` module to core<commit_after>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Operations on ASCII strings and characters.\n\/\/!\n\/\/! Most string operations in Rust act on UTF-8 strings. However, at times it\n\/\/! makes more sense to only consider the ASCII character set for a specific\n\/\/! operation.\n\/\/!\n\/\/! The [`escape_default`] function provides an iterator over the bytes of an\n\/\/! escaped version of the character given.\n\/\/!\n\/\/! [`escape_default`]: fn.escape_default.html\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse fmt;\nuse ops::Range;\nuse iter::FusedIterator;\n\n\/\/\/ An iterator over the escaped version of a byte.\n\/\/\/\n\/\/\/ This `struct` is created by the [`escape_default`] function. See its\n\/\/\/ documentation for more.\n\/\/\/\n\/\/\/ [`escape_default`]: fn.escape_default.html\n#[unstable(feature = \"core_ascii\", issue = \"46409\")]\npub struct EscapeDefault {\n range: Range<usize>,\n data: [u8; 4],\n}\n\n\/\/\/ Returns an iterator that produces an escaped version of a `u8`.\n\/\/\/\n\/\/\/ The default is chosen with a bias toward producing literals that are\n\/\/\/ legal in a variety of languages, including C++11 and similar C-family\n\/\/\/ languages. The exact rules are:\n\/\/\/\n\/\/\/ * Tab is escaped as `\\t`.\n\/\/\/ * Carriage return is escaped as `\\r`.\n\/\/\/ * Line feed is escaped as `\\n`.\n\/\/\/ * Single quote is escaped as `\\'`.\n\/\/\/ * Double quote is escaped as `\\\"`.\n\/\/\/ * Backslash is escaped as `\\\\`.\n\/\/\/ * Any character in the 'printable ASCII' range `0x20` .. `0x7e`\n\/\/\/ inclusive is not escaped.\n\/\/\/ * Any other chars are given hex escapes of the form '\\xNN'.\n\/\/\/ * Unicode escapes are never generated by this function.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ let escaped = ascii::escape_default(b'0').next().unwrap();\n\/\/\/ assert_eq!(b'0', escaped);\n\/\/\/\n\/\/\/ let mut escaped = ascii::escape_default(b'\\t');\n\/\/\/\n\/\/\/ assert_eq!(b'\\\\', escaped.next().unwrap());\n\/\/\/ assert_eq!(b't', escaped.next().unwrap());\n\/\/\/\n\/\/\/ let mut escaped = ascii::escape_default(b'\\r');\n\/\/\/\n\/\/\/ assert_eq!(b'\\\\', escaped.next().unwrap());\n\/\/\/ assert_eq!(b'r', escaped.next().unwrap());\n\/\/\/\n\/\/\/ let mut escaped = ascii::escape_default(b'\\n');\n\/\/\/\n\/\/\/ assert_eq!(b'\\\\', escaped.next().unwrap());\n\/\/\/ assert_eq!(b'n', escaped.next().unwrap());\n\/\/\/\n\/\/\/ let mut escaped = ascii::escape_default(b'\\'');\n\/\/\/\n\/\/\/ assert_eq!(b'\\\\', escaped.next().unwrap());\n\/\/\/ assert_eq!(b'\\'', escaped.next().unwrap());\n\/\/\/\n\/\/\/ let mut escaped = ascii::escape_default(b'\"');\n\/\/\/\n\/\/\/ assert_eq!(b'\\\\', escaped.next().unwrap());\n\/\/\/ assert_eq!(b'\"', escaped.next().unwrap());\n\/\/\/\n\/\/\/ let mut escaped = ascii::escape_default(b'\\\\');\n\/\/\/\n\/\/\/ assert_eq!(b'\\\\', escaped.next().unwrap());\n\/\/\/ assert_eq!(b'\\\\', escaped.next().unwrap());\n\/\/\/\n\/\/\/ let mut escaped = ascii::escape_default(b'\\x9d');\n\/\/\/\n\/\/\/ assert_eq!(b'\\\\', escaped.next().unwrap());\n\/\/\/ assert_eq!(b'x', escaped.next().unwrap());\n\/\/\/ assert_eq!(b'9', escaped.next().unwrap());\n\/\/\/ assert_eq!(b'd', escaped.next().unwrap());\n\/\/\/ ```\n#[unstable(feature = \"core_ascii\", issue = \"46409\")]\npub fn escape_ascii(c: u8) -> EscapeDefault {\n let (data, len) = match c {\n b'\\t' => ([b'\\\\', b't', 0, 0], 2),\n b'\\r' => ([b'\\\\', b'r', 0, 0], 2),\n b'\\n' => ([b'\\\\', b'n', 0, 0], 2),\n b'\\\\' => ([b'\\\\', b'\\\\', 0, 0], 2),\n b'\\'' => ([b'\\\\', b'\\'', 0, 0], 2),\n b'\"' => ([b'\\\\', b'\"', 0, 0], 2),\n b'\\x20' ... b'\\x7e' => ([c, 0, 0, 0], 1),\n _ => ([b'\\\\', b'x', hexify(c >> 4), hexify(c & 0xf)], 4),\n };\n\n return EscapeDefault { range: 0..len, data };\n\n fn hexify(b: u8) -> u8 {\n match b {\n 0 ... 9 => b'0' + b,\n _ => b'a' + b - 10,\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Iterator for EscapeDefault {\n type Item = u8;\n fn next(&mut self) -> Option<u8> { self.range.next().map(|i| self.data[i]) }\n fn size_hint(&self) -> (usize, Option<usize>) { self.range.size_hint() }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl DoubleEndedIterator for EscapeDefault {\n fn next_back(&mut self) -> Option<u8> {\n self.range.next_back().map(|i| self.data[i])\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl ExactSizeIterator for EscapeDefault {}\n#[unstable(feature = \"fused\", issue = \"35602\")]\nimpl FusedIterator for EscapeDefault {}\n\n#[stable(feature = \"std_debug\", since = \"1.16.0\")]\nimpl fmt::Debug for EscapeDefault {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.pad(\"EscapeDefault { .. }\")\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use char::from_u32;\n\n #[test]\n fn test_is_ascii() {\n assert!(b\"\".is_ascii());\n assert!(b\"banana\\0\\x7F\".is_ascii());\n assert!(b\"banana\\0\\x7F\".iter().all(|b| b.is_ascii()));\n assert!(!b\"Vi\\xe1\\xbb\\x87t Nam\".is_ascii());\n assert!(!b\"Vi\\xe1\\xbb\\x87t Nam\".iter().all(|b| b.is_ascii()));\n assert!(!b\"\\xe1\\xbb\\x87\".iter().any(|b| b.is_ascii()));\n\n assert!(\"\".is_ascii());\n assert!(\"banana\\0\\u{7F}\".is_ascii());\n assert!(\"banana\\0\\u{7F}\".chars().all(|c| c.is_ascii()));\n assert!(!\"ประเทศไทย中华Việt Nam\".chars().all(|c| c.is_ascii()));\n assert!(!\"ประเทศไทย中华ệ \".chars().any(|c| c.is_ascii()));\n }\n\n #[test]\n fn test_to_ascii_uppercase() {\n assert_eq!(\"url()URL()uRl()ürl\".to_ascii_uppercase(), \"URL()URL()URL()üRL\");\n assert_eq!(\"hıKß\".to_ascii_uppercase(), \"HıKß\");\n\n for i in 0..501 {\n let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 }\n else { i };\n assert_eq!((from_u32(i).unwrap()).to_string().to_ascii_uppercase(),\n (from_u32(upper).unwrap()).to_string());\n }\n }\n\n #[test]\n fn test_to_ascii_lowercase() {\n assert_eq!(\"url()URL()uRl()Ürl\".to_ascii_lowercase(), \"url()url()url()Ürl\");\n \/\/ Dotted capital I, Kelvin sign, Sharp S.\n assert_eq!(\"HİKß\".to_ascii_lowercase(), \"hİKß\");\n\n for i in 0..501 {\n let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 }\n else { i };\n assert_eq!((from_u32(i).unwrap()).to_string().to_ascii_lowercase(),\n (from_u32(lower).unwrap()).to_string());\n }\n }\n\n #[test]\n fn test_make_ascii_lower_case() {\n macro_rules! test {\n ($from: expr, $to: expr) => {\n {\n let mut x = $from;\n x.make_ascii_lowercase();\n assert_eq!(x, $to);\n }\n }\n }\n test!(b'A', b'a');\n test!(b'a', b'a');\n test!(b'!', b'!');\n test!('A', 'a');\n test!('À', 'À');\n test!('a', 'a');\n test!('!', '!');\n test!(b\"H\\xc3\\x89\".to_vec(), b\"h\\xc3\\x89\");\n test!(\"HİKß\".to_string(), \"hİKß\");\n }\n\n\n #[test]\n fn test_make_ascii_upper_case() {\n macro_rules! test {\n ($from: expr, $to: expr) => {\n {\n let mut x = $from;\n x.make_ascii_uppercase();\n assert_eq!(x, $to);\n }\n }\n }\n test!(b'a', b'A');\n test!(b'A', b'A');\n test!(b'!', b'!');\n test!('a', 'A');\n test!('à', 'à');\n test!('A', 'A');\n test!('!', '!');\n test!(b\"h\\xc3\\xa9\".to_vec(), b\"H\\xc3\\xa9\");\n test!(\"hıKß\".to_string(), \"HıKß\");\n\n let mut x = \"Hello\".to_string();\n x[..3].make_ascii_uppercase(); \/\/ Test IndexMut on String.\n assert_eq!(x, \"HELlo\")\n }\n\n #[test]\n fn test_eq_ignore_ascii_case() {\n assert!(\"url()URL()uRl()Ürl\".eq_ignore_ascii_case(\"url()url()url()Ürl\"));\n assert!(!\"Ürl\".eq_ignore_ascii_case(\"ürl\"));\n \/\/ Dotted capital I, Kelvin sign, Sharp S.\n assert!(\"HİKß\".eq_ignore_ascii_case(\"hİKß\"));\n assert!(!\"İ\".eq_ignore_ascii_case(\"i\"));\n assert!(!\"K\".eq_ignore_ascii_case(\"k\"));\n assert!(!\"ß\".eq_ignore_ascii_case(\"s\"));\n\n for i in 0..501 {\n let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 }\n else { i };\n assert!((from_u32(i).unwrap()).to_string().eq_ignore_ascii_case(\n &from_u32(lower).unwrap().to_string()));\n }\n }\n\n #[test]\n fn inference_works() {\n let x = \"a\".to_string();\n x.eq_ignore_ascii_case(\"A\");\n }\n\n \/\/ Shorthands used by the is_ascii_* tests.\n macro_rules! assert_all {\n ($what:ident, $($str:tt),+) => {{\n $(\n for b in $str.chars() {\n if !b.$what() {\n panic!(\"expected {}({}) but it isn't\",\n stringify!($what), b);\n }\n }\n for b in $str.as_bytes().iter() {\n if !b.$what() {\n panic!(\"expected {}(0x{:02x})) but it isn't\",\n stringify!($what), b);\n }\n }\n assert!($str.$what());\n assert!($str.as_bytes().$what());\n )+\n }};\n ($what:ident, $($str:tt),+,) => (assert_all!($what,$($str),+))\n }\n macro_rules! assert_none {\n ($what:ident, $($str:tt),+) => {{\n $(\n for b in $str.chars() {\n if b.$what() {\n panic!(\"expected not-{}({}) but it is\",\n stringify!($what), b);\n }\n }\n for b in $str.as_bytes().iter() {\n if b.$what() {\n panic!(\"expected not-{}(0x{:02x})) but it is\",\n stringify!($what), b);\n }\n }\n )*\n }};\n ($what:ident, $($str:tt),+,) => (assert_none!($what,$($str),+))\n }\n\n #[test]\n fn test_is_ascii_alphabetic() {\n assert_all!(is_ascii_alphabetic,\n \"\",\n \"abcdefghijklmnopqrstuvwxyz\",\n \"ABCDEFGHIJKLMNOQPRSTUVWXYZ\",\n );\n assert_none!(is_ascii_alphabetic,\n \"0123456789\",\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n \" \\t\\n\\x0c\\r\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x09\\x0a\\x0b\\x0c\\x0d\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n }\n\n #[test]\n fn test_is_ascii_uppercase() {\n assert_all!(is_ascii_uppercase,\n \"\",\n \"ABCDEFGHIJKLMNOQPRSTUVWXYZ\",\n );\n assert_none!(is_ascii_uppercase,\n \"abcdefghijklmnopqrstuvwxyz\",\n \"0123456789\",\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n \" \\t\\n\\x0c\\r\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x09\\x0a\\x0b\\x0c\\x0d\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n }\n\n #[test]\n fn test_is_ascii_lowercase() {\n assert_all!(is_ascii_lowercase,\n \"abcdefghijklmnopqrstuvwxyz\",\n );\n assert_none!(is_ascii_lowercase,\n \"ABCDEFGHIJKLMNOQPRSTUVWXYZ\",\n \"0123456789\",\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n \" \\t\\n\\x0c\\r\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x09\\x0a\\x0b\\x0c\\x0d\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n }\n\n #[test]\n fn test_is_ascii_alphanumeric() {\n assert_all!(is_ascii_alphanumeric,\n \"\",\n \"abcdefghijklmnopqrstuvwxyz\",\n \"ABCDEFGHIJKLMNOQPRSTUVWXYZ\",\n \"0123456789\",\n );\n assert_none!(is_ascii_alphanumeric,\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n \" \\t\\n\\x0c\\r\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x09\\x0a\\x0b\\x0c\\x0d\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n }\n\n #[test]\n fn test_is_ascii_digit() {\n assert_all!(is_ascii_digit,\n \"\",\n \"0123456789\",\n );\n assert_none!(is_ascii_digit,\n \"abcdefghijklmnopqrstuvwxyz\",\n \"ABCDEFGHIJKLMNOQPRSTUVWXYZ\",\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n \" \\t\\n\\x0c\\r\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x09\\x0a\\x0b\\x0c\\x0d\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n }\n\n #[test]\n fn test_is_ascii_hexdigit() {\n assert_all!(is_ascii_hexdigit,\n \"\",\n \"0123456789\",\n \"abcdefABCDEF\",\n );\n assert_none!(is_ascii_hexdigit,\n \"ghijklmnopqrstuvwxyz\",\n \"GHIJKLMNOQPRSTUVWXYZ\",\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n \" \\t\\n\\x0c\\r\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x09\\x0a\\x0b\\x0c\\x0d\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n }\n\n #[test]\n fn test_is_ascii_punctuation() {\n assert_all!(is_ascii_punctuation,\n \"\",\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n );\n assert_none!(is_ascii_punctuation,\n \"abcdefghijklmnopqrstuvwxyz\",\n \"ABCDEFGHIJKLMNOQPRSTUVWXYZ\",\n \"0123456789\",\n \" \\t\\n\\x0c\\r\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x09\\x0a\\x0b\\x0c\\x0d\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n }\n\n #[test]\n fn test_is_ascii_graphic() {\n assert_all!(is_ascii_graphic,\n \"\",\n \"abcdefghijklmnopqrstuvwxyz\",\n \"ABCDEFGHIJKLMNOQPRSTUVWXYZ\",\n \"0123456789\",\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n );\n assert_none!(is_ascii_graphic,\n \" \\t\\n\\x0c\\r\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x09\\x0a\\x0b\\x0c\\x0d\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n }\n\n #[test]\n fn test_is_ascii_whitespace() {\n assert_all!(is_ascii_whitespace,\n \"\",\n \" \\t\\n\\x0c\\r\",\n );\n assert_none!(is_ascii_whitespace,\n \"abcdefghijklmnopqrstuvwxyz\",\n \"ABCDEFGHIJKLMNOQPRSTUVWXYZ\",\n \"0123456789\",\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x0b\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n }\n\n #[test]\n fn test_is_ascii_control() {\n assert_all!(is_ascii_control,\n \"\",\n \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\",\n \"\\x08\\x09\\x0a\\x0b\\x0c\\x0d\\x0e\\x0f\",\n \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\",\n \"\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f\",\n \"\\x7f\",\n );\n assert_none!(is_ascii_control,\n \"abcdefghijklmnopqrstuvwxyz\",\n \"ABCDEFGHIJKLMNOQPRSTUVWXYZ\",\n \"0123456789\",\n \"!\\\"#$%&'()*+,-.\/:;<=>?@[\\\\]^_`{|}~\",\n \" \",\n );\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add paramfile module to commit<commit_after>use std::error::Error;\nuse std::fs::File;\nuse std::io::Read;\n\nuse yaml::{Yaml, YamlLoader};\nuse parameter::{ParameterValue, ParameterValues};\n\n#[derive(Debug)]\n\/\/\/ ParameterFile struct\npub struct ParameterFile {\n \/\/\/ filename\n pub filename: String,\n \/\/\/ document string\n pub doc_str: String,\n \/\/\/ parameters allocated from this file\n pub parameters: ParameterValues,\n}\n\nfn parse_document(doc_str: &String, parameter_values: &mut ParameterValues) {\n let docs = YamlLoader::load_from_str(doc_str).unwrap();\n for doc in &docs {\n let primary_key = \"\";\n let param_values = parse_yaml(doc, primary_key);\n parameter_values.extend(param_values);\n }\n}\n\nfn parse_yaml(doc: &Yaml, primary_key: &str) -> ParameterValues {\n let mut param_values = ParameterValues::new();\n match doc {\n &Yaml::Hash(ref h) => {\n for (key,value) in h {\n let combined_key = primary_key.to_string() + key.as_str().unwrap();\n match value {\n &Yaml::String(ref s) => {\n let pv = ParameterValue::Plain(s.to_string());\n param_values.insert(combined_key,pv);\n },\n &Yaml::Integer(ref i) => {\n let pv = ParameterValue::Plain(i.to_string());\n param_values.insert(combined_key,pv);\n },\n &Yaml::Real(ref r) => {\n let pv = ParameterValue::Plain(r.to_string());\n param_values.insert(combined_key,pv);\n },\n _ => {\n \/\/ Value type not supported\n \/\/ Array, Alias and None\n }\n }\n }\n },\n &Yaml::String(ref s) => {\n let pv = ParameterValue::Plain(s.to_string());\n param_values.insert(primary_key.to_string(),pv);\n },\n _ => {\n \/\/ Key type not supported\n }\n }\n param_values\n}\n\nimpl ParameterFile {\n \/\/\/ Create a new parameterfile object, composed of a filename\n \/\/\/ and the parsed parameters\n pub fn from_file(filename: &str) -> Result<Self, String> {\n let mut parameter_values = ParameterValues::new();\n let mut fh = File::open(filename).map_err(|err| err.description().to_owned()).unwrap();\n let mut contents = String::new();\n fh.read_to_string(&mut contents).map_err(|err| err.description().to_owned())?;\n parse_document(&contents, &mut parameter_values);\n\n Ok(ParameterFile {\n filename: String::from(filename),\n doc_str: String::from(contents),\n parameters: parameter_values,\n })\n }\n\n \/\/\/ Create a new parameterfile object from a String representing a yaml document\n pub fn from_str(doc_str: String) -> Result<Self, String> {\n let mut parameter_values = ParameterValues::new();\n parse_document(&doc_str, &mut parameter_values);\n\n Ok(ParameterFile {\n filename: String::from(\"\"),\n doc_str: String::from(doc_str),\n parameters: parameter_values,\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Introduce mocks<commit_after>extern crate indy_crypto;\nextern crate rmp_serde;\nextern crate serde;\nextern crate serde_json;\nextern crate time;\n\nuse std::cmp::Eq;\nuse std::collections::{HashMap, HashSet};\nuse std::hash::{Hash, Hasher};\nuse super::zmq;\nuse errors::common::CommonError;\nuse utils::crypto::verkey_builder::build_full_verkey;\n\nuse self::indy_crypto::bls;\n\nuse services::ledger::merkletree::merkletree::MerkleTree;\nuse self::indy_crypto::utils::json::{JsonDecodable, JsonEncodable};\n\n\n#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\npub struct NodeData {\n pub alias: String,\n pub client_ip: Option<String>,\n #[serde(deserialize_with = \"string_or_number\")]\n #[serde(default)]\n pub client_port: Option<u64>,\n pub node_ip: Option<String>,\n #[serde(deserialize_with = \"string_or_number\")]\n #[serde(default)]\n pub node_port: Option<u64>,\n pub services: Option<Vec<String>>,\n pub blskey: Option<String>\n}\n\nfn string_or_number<'de, D>(deserializer: D) -> Result<Option<u64>, D::Error>\n where D: serde::Deserializer<'de>\n{\n let deser_res: Result<serde_json::Value, _> = serde::Deserialize::deserialize(deserializer);\n match deser_res {\n Ok(serde_json::Value::String(s)) => match s.parse::<u64>() {\n Ok(num) => Ok(Some(num)),\n Err(err) => Err(serde::de::Error::custom(format!(\"Invalid Node transaction: {:?}\", err)))\n },\n Ok(serde_json::Value::Number(n)) => match n.as_u64() {\n Some(num) => Ok(Some(num)),\n None => Err(serde::de::Error::custom(format!(\"Invalid Node transaction\")))\n },\n Ok(serde_json::Value::Null) => Ok(None),\n _ => Err(serde::de::Error::custom(format!(\"Invalid Node transaction\"))),\n }\n}\n\n#[derive(Serialize, Deserialize)]\n#[serde(untagged)]\npub enum NodeTransaction {\n NodeTransactionV0(NodeTransactionV0),\n NodeTransactionV1(NodeTransactionV1)\n}\n\nimpl JsonEncodable for NodeTransaction {}\n\nimpl<'a> JsonDecodable<'a> for NodeTransaction {}\n\n#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\npub struct NodeTransactionV0 {\n pub data: NodeData,\n pub dest: String,\n pub identifier: String,\n #[serde(rename = \"txnId\")]\n pub txn_id: Option<String>,\n pub verkey: Option<String>,\n #[serde(rename = \"type\")]\n pub txn_type: String\n}\n\n#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\n#[serde(rename_all = \"camelCase\")]\npub struct NodeTransactionV1 {\n pub txn: Txn,\n pub txn_metadata: Metadata,\n pub req_signature: ReqSignature,\n pub ver: String\n}\n\n#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\npub struct Txn {\n #[serde(rename = \"type\")]\n pub txn_type: String,\n #[serde(rename = \"protocolVersion\")]\n pub protocol_version: Option<i32>,\n pub data: TxnData,\n pub metadata: TxnMetadata\n}\n\n#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\n#[serde(rename_all = \"camelCase\")]\npub struct Metadata {\n pub creation_time: Option<u64>,\n pub seq_no: Option<i32>,\n pub txn_id: Option<String>\n}\n\n#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\n#[serde(rename_all = \"camelCase\")]\npub struct ReqSignature {\n #[serde(rename = \"type\")]\n pub type_: Option<String>,\n pub values: Option<Vec<ReqSignatureValue>>\n}\n\n#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\npub struct ReqSignatureValue {\n pub from: Option<String>,\n pub value: Option<String>\n}\n\n#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\npub struct TxnData {\n pub data: NodeData,\n pub dest: String,\n pub verkey: Option<String>\n}\n\n#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\n#[serde(rename_all = \"camelCase\")]\npub struct TxnMetadata {\n pub req_id: Option<i64>,\n pub from: String\n}\n\nimpl From<NodeTransaction> for NodeTransactionV1 {\n fn from(node_txn: NodeTransaction) -> Self {\n match node_txn {\n NodeTransaction::NodeTransactionV1(n_txn) => n_txn,\n NodeTransaction::NodeTransactionV0(n_txn) => {\n let txn = Txn {\n txn_type: n_txn.txn_type,\n protocol_version: None,\n data: TxnData {\n data: n_txn.data,\n dest: n_txn.dest,\n verkey: n_txn.verkey\n },\n metadata: TxnMetadata {\n req_id: None,\n from: n_txn.identifier\n },\n };\n NodeTransactionV1 {\n txn,\n txn_metadata: Metadata {\n seq_no: None,\n txn_id: n_txn.txn_id,\n creation_time: None\n },\n req_signature: ReqSignature {\n type_: None,\n values: None\n },\n ver: \"1\".to_string(),\n }\n }\n }\n }\n}\n\nimpl NodeTransactionV1 {\n pub fn update(&mut self, other: &mut NodeTransactionV1) -> Result<(), CommonError> {\n assert_eq!(self.txn.data.dest, other.txn.data.dest);\n assert_eq!(self.txn.data.data.alias, other.txn.data.data.alias);\n\n if let Some(ref mut client_ip) = other.txn.data.data.client_ip {\n self.txn.data.data.client_ip = Some(client_ip.to_owned());\n }\n if let Some(ref mut client_port) = other.txn.data.data.client_port {\n self.txn.data.data.client_port = Some(client_port.to_owned());\n }\n if let Some(ref mut node_ip) = other.txn.data.data.node_ip {\n self.txn.data.data.node_ip = Some(node_ip.to_owned());\n }\n if let Some(ref mut node_port) = other.txn.data.data.node_port {\n self.txn.data.data.node_port = Some(node_port.to_owned());\n }\n if let Some(ref mut blskey) = other.txn.data.data.blskey {\n self.txn.data.data.blskey = Some(blskey.to_owned());\n }\n if let Some(ref mut services) = other.txn.data.data.services {\n self.txn.data.data.services = Some(services.to_owned());\n }\n if other.txn.data.verkey.is_some() {\n self.txn.data.verkey = Some(build_full_verkey(&self.txn.data.dest, other.txn.data.verkey.as_ref().map(String::as_str))?);\n }\n Ok(())\n }\n}\n\n#[allow(non_snake_case)]\n#[derive(Serialize, Deserialize, Debug)]\npub struct LedgerStatus {\n pub txnSeqNo: usize,\n pub merkleRoot: String,\n pub ledgerId: u8,\n pub ppSeqNo: Option<String>,\n pub viewNo: Option<String>,\n}\n\n#[allow(non_snake_case)]\n#[derive(Serialize, Deserialize, Debug)]\npub struct ConsistencyProof {\n \/\/TODO almost all fields Option<> or find better approach\n pub seqNoEnd: usize,\n pub seqNoStart: usize,\n pub ledgerId: usize,\n pub hashes: Vec<String>,\n pub oldMerkleRoot: String,\n pub newMerkleRoot: String,\n}\n\n#[allow(non_snake_case)]\n#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]\npub struct CatchupReq {\n pub ledgerId: usize,\n pub seqNoStart: usize,\n pub seqNoEnd: usize,\n pub catchupTill: usize,\n}\n\nimpl<'a> JsonDecodable<'a> for CatchupReq {}\n\n#[allow(non_snake_case)]\n#[derive(Serialize, Deserialize, Debug, PartialEq)]\npub struct CatchupRep {\n pub ledgerId: usize,\n pub consProof: Vec<String>,\n pub txns: HashMap<String, serde_json::Value>,\n}\n\nimpl CatchupRep {\n pub fn min_tx(&self) -> Result<usize, CommonError> {\n let mut min = None;\n for (k, _) in self.txns.iter() {\n let val = k.parse::<usize>()\n .map_err(|err| CommonError::InvalidStructure(format!(\"{:?}\", err)))?;\n match min {\n None => min = Some(val),\n Some(m) => if val < m { min = Some(val) }\n }\n }\n min.ok_or(CommonError::InvalidStructure(format!(\"Empty Map\")))\n }\n}\n\n#[derive(Serialize, Debug, Deserialize)]\n#[serde(untagged)]\npub enum Reply {\n ReplyV0(ReplyV0),\n ReplyV1(ReplyV1)\n}\n\nimpl Reply {\n pub fn req_id(self) -> u64 {\n match self {\n Reply::ReplyV0(reply) => reply.result.req_id,\n Reply::ReplyV1(reply) => reply.result.txn.metadata.req_id\n }\n }\n}\n\n#[derive(Serialize, Deserialize, Debug)]\npub struct ReplyV0 {\n pub result: ResponseMetadata\n}\n\n#[derive(Serialize, Deserialize, Debug)]\npub struct ReplyV1 {\n pub result: ReplyResultV1\n}\n\n#[derive(Serialize, Deserialize, Debug)]\npub struct ReplyResultV1 {\n pub txn: ReplyTxnV1\n}\n\n#[derive(Serialize, Deserialize, Debug)]\npub struct ReplyTxnV1 {\n pub metadata: ResponseMetadata\n}\n\n#[derive(Serialize, Debug, Deserialize)]\n#[serde(untagged)]\npub enum Response {\n ResponseV0(ResponseV0),\n ResponseV1(ResponseV1)\n}\n\nimpl Response {\n pub fn req_id(&self) -> u64 {\n match self {\n &Response::ResponseV0(ref res) => res.req_id,\n &Response::ResponseV1(ref res) => res.metadata.req_id\n }\n }\n}\n\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(rename_all = \"camelCase\")]\npub struct ResponseV0 {\n pub req_id: u64\n}\n\n#[derive(Serialize, Deserialize, Debug)]\npub struct ResponseV1 {\n pub metadata: ResponseMetadata\n}\n\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(rename_all = \"camelCase\")]\npub struct ResponseMetadata {\n pub req_id: u64\n}\n\n#[derive(Serialize, Debug, Deserialize)]\n#[serde(untagged)]\npub enum PoolLedgerTxn {\n PoolLedgerTxnV0(PoolLedgerTxnV0),\n PoolLedgerTxnV1(PoolLedgerTxnV1)\n}\n\n#[derive(Serialize, Deserialize, Debug)]\npub struct PoolLedgerTxnV0 {\n pub txn: Response,\n}\n\n#[derive(Serialize, Deserialize, Debug)]\npub struct PoolLedgerTxnV1 {\n pub txn: PoolLedgerTxnDataV1,\n}\n\n#[derive(Serialize, Deserialize, Debug)]\npub struct PoolLedgerTxnDataV1 {\n pub txn: Response,\n}\n\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(rename_all = \"camelCase\")]\npub struct SimpleRequest {\n pub req_id: u64,\n}\n\nimpl JsonEncodable for SimpleRequest {}\n\nimpl<'a> JsonDecodable<'a> for SimpleRequest {}\n\n#[serde(tag = \"op\")]\n#[derive(Serialize, Deserialize, Debug)]\npub enum Message {\n #[serde(rename = \"CONSISTENCY_PROOF\")]\n ConsistencyProof(ConsistencyProof),\n #[serde(rename = \"LEDGER_STATUS\")]\n LedgerStatus(LedgerStatus),\n #[serde(rename = \"CATCHUP_REQ\")]\n CatchupReq(CatchupReq),\n #[serde(rename = \"CATCHUP_REP\")]\n CatchupRep(CatchupRep),\n #[serde(rename = \"REQACK\")]\n ReqACK(Response),\n #[serde(rename = \"REQNACK\")]\n ReqNACK(Response),\n #[serde(rename = \"REPLY\")]\n Reply(Reply),\n #[serde(rename = \"REJECT\")]\n Reject(Response),\n #[serde(rename = \"POOL_LEDGER_TXNS\")]\n PoolLedgerTxns(PoolLedgerTxn),\n Ping,\n Pong,\n}\n\nimpl Message {\n pub fn from_raw_str(str: &str) -> Result<Message, CommonError> {\n match str {\n \"po\" => Ok(Message::Pong),\n \"pi\" => Ok(Message::Ping),\n _ => Message::from_json(str).map_err(CommonError::from),\n }\n }\n}\n\nimpl JsonEncodable for Message {}\n\nimpl<'a> JsonDecodable<'a> for Message {}\n\n#[derive(Serialize, Deserialize)]\npub struct PoolConfig {\n pub genesis_txn: String\n}\n\nimpl JsonEncodable for PoolConfig {}\n\nimpl<'a> JsonDecodable<'a> for PoolConfig {}\n\nimpl PoolConfig {\n pub fn default_for_name(name: &str) -> PoolConfig {\n let mut txn = name.to_string();\n txn += \".txn\";\n PoolConfig { genesis_txn: txn }\n }\n}\n\npub struct RemoteNode {\n pub name: String,\n pub public_key: Vec<u8>,\n pub zaddr: String,\n pub zsock: Option<zmq::Socket>,\n pub is_blacklisted: bool,\n pub blskey: Option<bls::VerKey>\n}\n\npub struct CatchUpProcess {\n pub merkle_tree: MerkleTree,\n pub pending_reps: Vec<(CatchupRep, usize)>,\n pub resp_not_received_node_idx: HashSet<usize>,\n}\n\npub trait MinValue {\n fn get_min_index(&self) -> Result<usize, CommonError>;\n}\n\nimpl MinValue for Vec<(CatchupRep, usize)> {\n fn get_min_index(&self) -> Result<usize, CommonError> {\n let mut res = None;\n for (index, &(ref catchup_rep, _)) in self.iter().enumerate() {\n match res {\n None => { res = Some((catchup_rep, index)); }\n Some((min_rep, _)) => if catchup_rep.min_tx()? < min_rep.min_tx()? {\n res = Some((catchup_rep, index));\n }\n }\n }\n Ok(res.ok_or(CommonError::InvalidStructure(\"Element not Found\".to_string()))?.1)\n }\n}\n\n#[derive(Debug)]\npub struct HashableValue {\n pub inner: serde_json::Value\n}\n\nimpl Eq for HashableValue {}\n\nimpl Hash for HashableValue {\n fn hash<H: Hasher>(&self, state: &mut H) {\n serde_json::to_string(&self.inner).unwrap().hash(state); \/\/TODO\n }\n}\n\nimpl PartialEq for HashableValue {\n fn eq(&self, other: &HashableValue) -> bool {\n self.inner.eq(&other.inner)\n }\n}\n\n\n#[derive(Debug, PartialEq, Eq)]\npub struct ResendableRequest {\n pub request: String,\n pub start_node: usize,\n pub next_node: usize,\n pub next_try_send_time: Option<time::Tm>,\n}\n\n#[derive(Debug, PartialEq, Eq)]\npub struct CommandProcess {\n pub nack_cnt: usize,\n pub replies: HashMap<HashableValue, usize>,\n pub accum_replies : Option<HashableValue>,\n pub parent_cmd_ids: Vec<i32>,\n pub resendable_request: Option<ResendableRequest>,\n pub full_cmd_timeout: Option<time::Tm>,\n}\n\n#[derive(Debug, PartialEq, Eq)]\npub enum ZMQLoopAction {\n RequestToSend(RequestToSend),\n MessageToProcess(MessageToProcess),\n Terminate(i32),\n Refresh(i32),\n Timeout,\n}\n\n#[derive(Debug, PartialEq, Eq)]\npub struct RequestToSend {\n pub request: String,\n pub id: i32,\n}\n\n#[derive(Debug, PartialEq, Eq)]\npub struct MessageToProcess {\n pub message: String,\n pub node_idx: usize,\n}<|endoftext|>"} {"text":"<commit_before>use std::cell::{Cell, RefCell};\nuse std::io::{self, ErrorKind};\nuse std::sync::Arc;\nuse std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};\nuse std::sync::mpsc;\n\nuse mio;\nuse slab::Slab;\nuse futures::{Future, Tokens, Wake};\n\npub type Waiter = Arc<Wake>;\npub type Source = Arc<mio::Evented + Send + Sync>;\n\nstatic NEXT_LOOP_ID: AtomicUsize = ATOMIC_USIZE_INIT;\nscoped_thread_local!(static CURRENT_LOOP: Loop);\n\nconst SLAB_CAPACITY: usize = 1024 * 64;\n\npub struct Loop {\n id: usize,\n active: Cell<bool>,\n io: RefCell<mio::Poll>,\n tx: mio::channel::Sender<Message>,\n rx: mio::channel::Receiver<Message>,\n dispatch: RefCell<Slab<Scheduled, usize>>,\n}\n\n\/\/\/ Handle to an event loop, used to construct I\/O objects, send messages, and\n\/\/\/ otherwise interact indirectly with the event loop itself.\n\/\/\/\n\/\/\/ Handles can be cloned, and when cloned they will still refer to the\n\/\/\/ same underlying event loop.\n#[derive(Clone)]\npub struct LoopHandle {\n id: usize,\n tx: mio::channel::Sender<Message>,\n}\n\n#[derive(Copy, Clone)]\npub enum Direction {\n Read,\n Write,\n}\n\nstruct Scheduled {\n source: Source,\n reader: Option<Waiter>,\n writer: Option<Waiter>,\n}\n\nimpl Scheduled {\n fn waiter_for(&mut self, dir: Direction) -> &mut Option<Waiter> {\n match dir {\n Direction::Read => &mut self.reader,\n Direction::Write => &mut self.writer,\n }\n }\n\n fn event_set(&self) -> mio::EventSet {\n let mut set = mio::EventSet::none();\n if self.reader.is_some() {\n set = set | mio::EventSet::readable()\n }\n if self.writer.is_some() {\n set = set | mio::EventSet::writable()\n }\n set\n }\n}\n\nenum Message {\n AddSource(Source, Arc<AtomicUsize>, Waiter),\n DropSource(usize),\n Schedule(usize, Direction, Waiter),\n Deschedule(usize, Direction),\n Shutdown,\n}\n\nfn register(poll: &mut mio::Poll, token: usize, sched: &Scheduled) {\n \/\/ TODO: handle error\n poll.register(&*sched.source,\n mio::Token(token),\n mio::EventSet::none(),\n mio::PollOpt::level())\n .unwrap();\n}\n\nfn reregister(poll: &mut mio::Poll, token: usize, sched: &Scheduled) {\n \/\/ TODO: handle error\n poll.reregister(&*sched.source,\n mio::Token(token),\n sched.event_set(),\n mio::PollOpt::edge() | mio::PollOpt::oneshot())\n .unwrap();\n}\n\nfn deregister(poll: &mut mio::Poll, sched: &Scheduled) {\n \/\/ TODO: handle error\n poll.deregister(&*sched.source).unwrap();\n}\n\nimpl Loop {\n pub fn new() -> io::Result<Loop> {\n let (tx, rx) = mio::channel::from_std_channel(mpsc::channel());\n let io = try!(mio::Poll::new());\n try!(io.register(&rx,\n mio::Token(0),\n mio::EventSet::readable(),\n mio::PollOpt::edge()));\n Ok(Loop {\n id: NEXT_LOOP_ID.fetch_add(1, Ordering::Relaxed),\n active: Cell::new(true),\n io: RefCell::new(io),\n tx: tx,\n rx: rx,\n dispatch: RefCell::new(Slab::new_starting_at(1, SLAB_CAPACITY)),\n })\n }\n\n pub fn handle(&self) -> LoopHandle {\n LoopHandle {\n id: self.id,\n tx: self.tx.clone(),\n }\n }\n\n pub fn run<F: Future>(self, f: F) -> Result<F::Item, F::Error> {\n let (tx_res, rx_res) = mpsc::channel();\n let handle = self.handle();\n f.then(move |res| {\n handle.shutdown();\n tx_res.send(res)\n }).forget();\n\n while self.active.get() {\n let amt;\n \/\/ On Linux, Poll::poll is epoll_wait, which may return EINTR if a\n \/\/ ptracer attaches. This retry loop prevents crashing when\n \/\/ attaching strace, or similar.\n loop {\n match self.io.borrow_mut().poll(None) {\n Ok(a) => {\n amt = a;\n break;\n }\n Err(ref e) if e.kind() == ErrorKind::Interrupted => {}\n err @ Err(_) => {\n err.unwrap();\n }\n }\n }\n\n \/\/ TODO: coalesce token sets for a given Wake?\n for i in 0..amt {\n let event = self.io.borrow_mut().events().get(i).unwrap();\n let token = event.token().as_usize();\n if token == 0 {\n self.consume_queue();\n } else {\n let mut reader = None;\n let mut writer = None;\n\n if let Some(sched) = self.dispatch.borrow_mut().get_mut(token) {\n if event.kind().is_readable() {\n reader = sched.reader.take();\n }\n\n if event.kind().is_writable() {\n writer = sched.writer.take();\n }\n }\n\n CURRENT_LOOP.set(&self, || {\n if let Some(reader_wake) = reader.take() {\n reader_wake.wake(&Tokens::from_usize(token));\n }\n if let Some(writer_wake) = writer.take() {\n writer_wake.wake(&Tokens::from_usize(token));\n }\n });\n\n \/\/ For now, always reregister, to deal with the fact that\n \/\/ combined oneshot + read|write requires rearming even if\n \/\/ only one side fired.\n \/\/\n \/\/ TODO: optimize this\n if let Some(sched) = self.dispatch.borrow().get(token) {\n reregister(&mut self.io.borrow_mut(), token, &sched);\n }\n }\n }\n }\n\n rx_res.recv().unwrap()\n }\n\n fn add_source(&self, source: Source) -> usize {\n let sched = Scheduled {\n source: source,\n reader: None,\n writer: None,\n };\n let mut dispatch = self.dispatch.borrow_mut();\n \/\/ TODO: handle out of space\n let entry = dispatch.vacant_entry().unwrap();\n register(&mut self.io.borrow_mut(), entry.index(), &sched);\n entry.insert(sched).index()\n }\n\n fn drop_source(&self, token: usize) {\n let sched = self.dispatch.borrow_mut().remove(token).unwrap();\n deregister(&mut self.io.borrow_mut(), &sched);\n }\n\n fn schedule(&self, token: usize, dir: Direction, wake: Waiter) {\n let mut dispatch = self.dispatch.borrow_mut();\n let sched = dispatch.get_mut(token).unwrap();\n *sched.waiter_for(dir) = Some(wake);\n reregister(&mut self.io.borrow_mut(), token, sched);\n }\n\n fn deschedule(&self, token: usize, dir: Direction) {\n let mut dispatch = self.dispatch.borrow_mut();\n let sched = dispatch.get_mut(token).unwrap();\n *sched.waiter_for(dir) = None;\n reregister(&mut self.io.borrow_mut(), token, sched);\n }\n\n fn consume_queue(&self) {\n while let Ok(msg) = self.rx.try_recv() {\n self.notify(msg);\n }\n }\n\n fn notify(&self, msg: Message) {\n match msg {\n Message::AddSource(source, id, wake) => {\n let tok = self.add_source(source);\n id.store(tok, Ordering::Relaxed);\n wake.wake(&Tokens::from_usize(ADD_SOURCE_TOKEN));\n }\n Message::DropSource(tok) => self.drop_source(tok),\n Message::Schedule(tok, dir, wake) => self.schedule(tok, dir, wake),\n Message::Deschedule(tok, dir) => self.deschedule(tok, dir),\n Message::Shutdown => self.active.set(false),\n }\n }\n}\n\nimpl LoopHandle {\n fn send(&self, msg: Message) {\n let mut msg_dance = Some(msg);\n\n if CURRENT_LOOP.is_set() {\n CURRENT_LOOP.with(|lp| {\n if lp.id == self.id {\n \/\/ Need to execute all existing requests first, to ensure\n \/\/ that our message is processed \"in order\"\n lp.consume_queue();\n lp.notify(msg_dance.take().unwrap());\n }\n })\n }\n\n if let Some(msg) = msg_dance.take() {\n self.tx\n .send(msg)\n .map_err(|_| ())\n .expect(\"failed to send register message\") \/\/ todo: handle failure\n }\n }\n\n \/\/\/ Add a new source to an event loop, returning a future which will resolve\n \/\/\/ to the token that can be used to identify this source.\n \/\/\/\n \/\/\/ When a new I\/O object is created it needs to be communicated to the\n \/\/\/ event loop to ensure that it's registered and ready to receive\n \/\/\/ notifications. The event loop with then respond with a unique token that\n \/\/\/ this handle can be identified with (the resolved value of the returned\n \/\/\/ future).\n \/\/\/\n \/\/\/ This token is then passed in turn to each of the methods below to\n \/\/\/ interact with notifications on the I\/O object itself.\n pub fn add_source(&self, source: Source) -> AddSource {\n AddSource {\n loop_handle: self.clone(),\n source: Some(source),\n id: Arc::new(AtomicUsize::new(0)),\n scheduled: false,\n }\n }\n\n fn add_source_(&self, source: Source, id: Arc<AtomicUsize>, wake: Waiter) {\n self.send(Message::AddSource(source, id, wake));\n }\n\n \/\/\/ Begin listening for events on an event loop.\n \/\/\/\n \/\/\/ Once an I\/O object has been registered with the event loop through the\n \/\/\/ `add_source` method, this method can be used with the assigned token to\n \/\/\/ begin awaiting notifications.\n \/\/\/\n \/\/\/ The `dir` argument indicates how the I\/O object is expected to be\n \/\/\/ awaited on (either readable or writable) and the `wake` callback will be\n \/\/\/ invoked. Note that one the `wake` callback is invoked once it will not\n \/\/\/ be invoked again, it must be re-`schedule`d to continue receiving\n \/\/\/ notifications.\n pub fn schedule(&self, tok: usize, dir: Direction, wake: Arc<Wake>) {\n self.send(Message::Schedule(tok, dir, wake));\n }\n\n \/\/\/ Stop listening for events on an event loop.\n \/\/\/\n \/\/\/ Once a callback has been scheduled with the `schedule` method, it can be\n \/\/\/ unregistered from the event loop with this method. This method does not\n \/\/\/ guarantee that the callback will not be invoked if it hasn't already,\n \/\/\/ but a best effort will be made to ensure it is not called.\n pub fn deschedule(&self, tok: usize, dir: Direction) {\n self.send(Message::Deschedule(tok, dir));\n }\n\n \/\/\/ Unregister all information associated with a token on an event loop,\n \/\/\/ deallocating all internal resources assigned to the given token.\n \/\/\/\n \/\/\/ This method should be called whenever a source of events is being\n \/\/\/ destroyed. This will ensure that the event loop can reuse `tok` for\n \/\/\/ another I\/O object if necessary and also remove it from any poll\n \/\/\/ notifications and callbacks.\n \/\/\/\n \/\/\/ Note that wake callbacks may still be invoked after this method is\n \/\/\/ called as it may take some time for the message to drop a source to\n \/\/\/ reach the event loop. Despite this fact, this method will attempt to\n \/\/\/ ensure that the callbacks are **not** invoked, so pending scheduled\n \/\/\/ callbacks cannot be relied upon to get called.\n pub fn drop_source(&self, tok: usize) {\n self.send(Message::DropSource(tok));\n }\n\n pub fn shutdown(&self) {\n self.send(Message::Shutdown);\n }\n}\n\nconst ADD_SOURCE_TOKEN: usize = 0;\n\n\/\/\/ A future which will resolve a unique `tok` token for an I\/O object.\n\/\/\/\n\/\/\/ Created through the `LoopHandle::add_source` method, this future can also\n\/\/\/ resolve to an error if there's an issue communicating with the event loop.\npub struct AddSource {\n loop_handle: LoopHandle,\n source: Option<Source>,\n id: Arc<AtomicUsize>,\n scheduled: bool,\n}\n\nimpl Future for AddSource {\n type Item = usize;\n type Error = io::Error; \/\/ TODO: integrate channel error?\n\n fn poll(&mut self, tokens: &Tokens) -> Option<Result<usize, io::Error>> {\n if self.scheduled {\n if tokens.may_contain(&Tokens::from_usize(ADD_SOURCE_TOKEN)) {\n let id = self.id.load(Ordering::Relaxed);\n if id != 0 {\n return Some(Ok(id))\n }\n }\n } else {\n if CURRENT_LOOP.is_set() {\n let res = CURRENT_LOOP.with(|lp| {\n if lp.id == self.loop_handle.id {\n Some(lp.add_source(self.source.take().unwrap()))\n } else {\n None\n }\n });\n if let Some(id) = res {\n return Some(Ok(id));\n }\n }\n }\n\n None\n }\n\n fn schedule(&mut self, wake: Arc<Wake>) {\n if self.scheduled { return; }\n self.scheduled = true;\n self.loop_handle.add_source_(self.source.take().unwrap(), self.id.clone(), wake);\n }\n}\n<commit_msg>s\/Waiter\/Arc<Wake>\/<commit_after>use std::cell::{Cell, RefCell};\nuse std::io::{self, ErrorKind};\nuse std::sync::Arc;\nuse std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};\nuse std::sync::mpsc;\n\nuse mio;\nuse slab::Slab;\nuse futures::{Future, Tokens, Wake};\n\npub type Source = Arc<mio::Evented + Send + Sync>;\n\nstatic NEXT_LOOP_ID: AtomicUsize = ATOMIC_USIZE_INIT;\nscoped_thread_local!(static CURRENT_LOOP: Loop);\n\nconst SLAB_CAPACITY: usize = 1024 * 64;\n\npub struct Loop {\n id: usize,\n active: Cell<bool>,\n io: RefCell<mio::Poll>,\n tx: mio::channel::Sender<Message>,\n rx: mio::channel::Receiver<Message>,\n dispatch: RefCell<Slab<Scheduled, usize>>,\n}\n\n\/\/\/ Handle to an event loop, used to construct I\/O objects, send messages, and\n\/\/\/ otherwise interact indirectly with the event loop itself.\n\/\/\/\n\/\/\/ Handles can be cloned, and when cloned they will still refer to the\n\/\/\/ same underlying event loop.\n#[derive(Clone)]\npub struct LoopHandle {\n id: usize,\n tx: mio::channel::Sender<Message>,\n}\n\n#[derive(Copy, Clone)]\npub enum Direction {\n Read,\n Write,\n}\n\nstruct Scheduled {\n source: Source,\n reader: Option<Arc<Wake>>,\n writer: Option<Arc<Wake>>,\n}\n\nimpl Scheduled {\n fn waiter_for(&mut self, dir: Direction) -> &mut Option<Arc<Wake>> {\n match dir {\n Direction::Read => &mut self.reader,\n Direction::Write => &mut self.writer,\n }\n }\n\n fn event_set(&self) -> mio::EventSet {\n let mut set = mio::EventSet::none();\n if self.reader.is_some() {\n set = set | mio::EventSet::readable()\n }\n if self.writer.is_some() {\n set = set | mio::EventSet::writable()\n }\n set\n }\n}\n\nenum Message {\n AddSource(Source, Arc<AtomicUsize>, Arc<Wake>),\n DropSource(usize),\n Schedule(usize, Direction, Arc<Wake>),\n Deschedule(usize, Direction),\n Shutdown,\n}\n\nfn register(poll: &mut mio::Poll, token: usize, sched: &Scheduled) {\n \/\/ TODO: handle error\n poll.register(&*sched.source,\n mio::Token(token),\n mio::EventSet::none(),\n mio::PollOpt::level())\n .unwrap();\n}\n\nfn reregister(poll: &mut mio::Poll, token: usize, sched: &Scheduled) {\n \/\/ TODO: handle error\n poll.reregister(&*sched.source,\n mio::Token(token),\n sched.event_set(),\n mio::PollOpt::edge() | mio::PollOpt::oneshot())\n .unwrap();\n}\n\nfn deregister(poll: &mut mio::Poll, sched: &Scheduled) {\n \/\/ TODO: handle error\n poll.deregister(&*sched.source).unwrap();\n}\n\nimpl Loop {\n pub fn new() -> io::Result<Loop> {\n let (tx, rx) = mio::channel::from_std_channel(mpsc::channel());\n let io = try!(mio::Poll::new());\n try!(io.register(&rx,\n mio::Token(0),\n mio::EventSet::readable(),\n mio::PollOpt::edge()));\n Ok(Loop {\n id: NEXT_LOOP_ID.fetch_add(1, Ordering::Relaxed),\n active: Cell::new(true),\n io: RefCell::new(io),\n tx: tx,\n rx: rx,\n dispatch: RefCell::new(Slab::new_starting_at(1, SLAB_CAPACITY)),\n })\n }\n\n pub fn handle(&self) -> LoopHandle {\n LoopHandle {\n id: self.id,\n tx: self.tx.clone(),\n }\n }\n\n pub fn run<F: Future>(self, f: F) -> Result<F::Item, F::Error> {\n let (tx_res, rx_res) = mpsc::channel();\n let handle = self.handle();\n f.then(move |res| {\n handle.shutdown();\n tx_res.send(res)\n }).forget();\n\n while self.active.get() {\n let amt;\n \/\/ On Linux, Poll::poll is epoll_wait, which may return EINTR if a\n \/\/ ptracer attaches. This retry loop prevents crashing when\n \/\/ attaching strace, or similar.\n loop {\n match self.io.borrow_mut().poll(None) {\n Ok(a) => {\n amt = a;\n break;\n }\n Err(ref e) if e.kind() == ErrorKind::Interrupted => {}\n err @ Err(_) => {\n err.unwrap();\n }\n }\n }\n\n \/\/ TODO: coalesce token sets for a given Wake?\n for i in 0..amt {\n let event = self.io.borrow_mut().events().get(i).unwrap();\n let token = event.token().as_usize();\n if token == 0 {\n self.consume_queue();\n } else {\n let mut reader = None;\n let mut writer = None;\n\n if let Some(sched) = self.dispatch.borrow_mut().get_mut(token) {\n if event.kind().is_readable() {\n reader = sched.reader.take();\n }\n\n if event.kind().is_writable() {\n writer = sched.writer.take();\n }\n }\n\n CURRENT_LOOP.set(&self, || {\n if let Some(reader_wake) = reader.take() {\n reader_wake.wake(&Tokens::from_usize(token));\n }\n if let Some(writer_wake) = writer.take() {\n writer_wake.wake(&Tokens::from_usize(token));\n }\n });\n\n \/\/ For now, always reregister, to deal with the fact that\n \/\/ combined oneshot + read|write requires rearming even if\n \/\/ only one side fired.\n \/\/\n \/\/ TODO: optimize this\n if let Some(sched) = self.dispatch.borrow().get(token) {\n reregister(&mut self.io.borrow_mut(), token, &sched);\n }\n }\n }\n }\n\n rx_res.recv().unwrap()\n }\n\n fn add_source(&self, source: Source) -> usize {\n let sched = Scheduled {\n source: source,\n reader: None,\n writer: None,\n };\n let mut dispatch = self.dispatch.borrow_mut();\n \/\/ TODO: handle out of space\n let entry = dispatch.vacant_entry().unwrap();\n register(&mut self.io.borrow_mut(), entry.index(), &sched);\n entry.insert(sched).index()\n }\n\n fn drop_source(&self, token: usize) {\n let sched = self.dispatch.borrow_mut().remove(token).unwrap();\n deregister(&mut self.io.borrow_mut(), &sched);\n }\n\n fn schedule(&self, token: usize, dir: Direction, wake: Arc<Wake>) {\n let mut dispatch = self.dispatch.borrow_mut();\n let sched = dispatch.get_mut(token).unwrap();\n *sched.waiter_for(dir) = Some(wake);\n reregister(&mut self.io.borrow_mut(), token, sched);\n }\n\n fn deschedule(&self, token: usize, dir: Direction) {\n let mut dispatch = self.dispatch.borrow_mut();\n let sched = dispatch.get_mut(token).unwrap();\n *sched.waiter_for(dir) = None;\n reregister(&mut self.io.borrow_mut(), token, sched);\n }\n\n fn consume_queue(&self) {\n while let Ok(msg) = self.rx.try_recv() {\n self.notify(msg);\n }\n }\n\n fn notify(&self, msg: Message) {\n match msg {\n Message::AddSource(source, id, wake) => {\n let tok = self.add_source(source);\n id.store(tok, Ordering::Relaxed);\n wake.wake(&Tokens::from_usize(ADD_SOURCE_TOKEN));\n }\n Message::DropSource(tok) => self.drop_source(tok),\n Message::Schedule(tok, dir, wake) => self.schedule(tok, dir, wake),\n Message::Deschedule(tok, dir) => self.deschedule(tok, dir),\n Message::Shutdown => self.active.set(false),\n }\n }\n}\n\nimpl LoopHandle {\n fn send(&self, msg: Message) {\n let mut msg_dance = Some(msg);\n\n if CURRENT_LOOP.is_set() {\n CURRENT_LOOP.with(|lp| {\n if lp.id == self.id {\n \/\/ Need to execute all existing requests first, to ensure\n \/\/ that our message is processed \"in order\"\n lp.consume_queue();\n lp.notify(msg_dance.take().unwrap());\n }\n })\n }\n\n if let Some(msg) = msg_dance.take() {\n self.tx\n .send(msg)\n .map_err(|_| ())\n .expect(\"failed to send register message\") \/\/ todo: handle failure\n }\n }\n\n \/\/\/ Add a new source to an event loop, returning a future which will resolve\n \/\/\/ to the token that can be used to identify this source.\n \/\/\/\n \/\/\/ When a new I\/O object is created it needs to be communicated to the\n \/\/\/ event loop to ensure that it's registered and ready to receive\n \/\/\/ notifications. The event loop with then respond with a unique token that\n \/\/\/ this handle can be identified with (the resolved value of the returned\n \/\/\/ future).\n \/\/\/\n \/\/\/ This token is then passed in turn to each of the methods below to\n \/\/\/ interact with notifications on the I\/O object itself.\n pub fn add_source(&self, source: Source) -> AddSource {\n AddSource {\n loop_handle: self.clone(),\n source: Some(source),\n id: Arc::new(AtomicUsize::new(0)),\n scheduled: false,\n }\n }\n\n fn add_source_(&self, source: Source, id: Arc<AtomicUsize>, wake: Arc<Wake>) {\n self.send(Message::AddSource(source, id, wake));\n }\n\n \/\/\/ Begin listening for events on an event loop.\n \/\/\/\n \/\/\/ Once an I\/O object has been registered with the event loop through the\n \/\/\/ `add_source` method, this method can be used with the assigned token to\n \/\/\/ begin awaiting notifications.\n \/\/\/\n \/\/\/ The `dir` argument indicates how the I\/O object is expected to be\n \/\/\/ awaited on (either readable or writable) and the `wake` callback will be\n \/\/\/ invoked. Note that one the `wake` callback is invoked once it will not\n \/\/\/ be invoked again, it must be re-`schedule`d to continue receiving\n \/\/\/ notifications.\n pub fn schedule(&self, tok: usize, dir: Direction, wake: Arc<Wake>) {\n self.send(Message::Schedule(tok, dir, wake));\n }\n\n \/\/\/ Stop listening for events on an event loop.\n \/\/\/\n \/\/\/ Once a callback has been scheduled with the `schedule` method, it can be\n \/\/\/ unregistered from the event loop with this method. This method does not\n \/\/\/ guarantee that the callback will not be invoked if it hasn't already,\n \/\/\/ but a best effort will be made to ensure it is not called.\n pub fn deschedule(&self, tok: usize, dir: Direction) {\n self.send(Message::Deschedule(tok, dir));\n }\n\n \/\/\/ Unregister all information associated with a token on an event loop,\n \/\/\/ deallocating all internal resources assigned to the given token.\n \/\/\/\n \/\/\/ This method should be called whenever a source of events is being\n \/\/\/ destroyed. This will ensure that the event loop can reuse `tok` for\n \/\/\/ another I\/O object if necessary and also remove it from any poll\n \/\/\/ notifications and callbacks.\n \/\/\/\n \/\/\/ Note that wake callbacks may still be invoked after this method is\n \/\/\/ called as it may take some time for the message to drop a source to\n \/\/\/ reach the event loop. Despite this fact, this method will attempt to\n \/\/\/ ensure that the callbacks are **not** invoked, so pending scheduled\n \/\/\/ callbacks cannot be relied upon to get called.\n pub fn drop_source(&self, tok: usize) {\n self.send(Message::DropSource(tok));\n }\n\n pub fn shutdown(&self) {\n self.send(Message::Shutdown);\n }\n}\n\nconst ADD_SOURCE_TOKEN: usize = 0;\n\n\/\/\/ A future which will resolve a unique `tok` token for an I\/O object.\n\/\/\/\n\/\/\/ Created through the `LoopHandle::add_source` method, this future can also\n\/\/\/ resolve to an error if there's an issue communicating with the event loop.\npub struct AddSource {\n loop_handle: LoopHandle,\n source: Option<Source>,\n id: Arc<AtomicUsize>,\n scheduled: bool,\n}\n\nimpl Future for AddSource {\n type Item = usize;\n type Error = io::Error; \/\/ TODO: integrate channel error?\n\n fn poll(&mut self, tokens: &Tokens) -> Option<Result<usize, io::Error>> {\n if self.scheduled {\n if tokens.may_contain(&Tokens::from_usize(ADD_SOURCE_TOKEN)) {\n let id = self.id.load(Ordering::Relaxed);\n if id != 0 {\n return Some(Ok(id))\n }\n }\n } else {\n if CURRENT_LOOP.is_set() {\n let res = CURRENT_LOOP.with(|lp| {\n if lp.id == self.loop_handle.id {\n Some(lp.add_source(self.source.take().unwrap()))\n } else {\n None\n }\n });\n if let Some(id) = res {\n return Some(Ok(id));\n }\n }\n }\n\n None\n }\n\n fn schedule(&mut self, wake: Arc<Wake>) {\n if self.scheduled { return; }\n self.scheduled = true;\n self.loop_handle.add_source_(self.source.take().unwrap(), self.id.clone(), wake);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for prime-factors<commit_after>#[inline]\nfn is_prime(n: u64) -> bool {\n if n <= 1 {\n return false;\n }\n\n if n <= 3 {\n return true;\n }\n\n if n % 2 == 0 || n % 3 == 0 {\n return false;\n }\n\n let mut i = 5;\n while i * i <= n {\n if n % i == 0 || n % (i + 2) == 0 {\n return false;\n }\n\n i += 6;\n }\n\n true\n}\n\npub fn factors(number: u64) -> Vec<u64> {\n let mut p = 2;\n let mut v = number;\n let mut result = Vec::new();\n\n while p <= v {\n if !is_prime(p) || v % p != 0 {\n p += 1;\n continue;\n }\n\n v \/= p;\n\n result.push(p);\n }\n\n result\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add rust folder and helloWorld file<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate orbital;\n\nuse orbital::{BmpFile, Color, EventOption, Window};\n\nuse std::fs::File;\nuse std::io::Read;\nuse std::process::Command;\nuse std::thread;\n\nuse package::Package;\n\npub mod package;\n\nfn draw(window: &mut Window, packages: &Vec<Box<Package>>, shutdown: &BmpFile, mouse_x: i32, mouse_y: i32){\n window.set(Color::rgba(0, 0, 0, 128));\n\n let mut x = 0;\n for package in packages.iter() {\n if package.icon.has_data() {\n let y = window.height() as isize - package.icon.height() as isize;\n\n if mouse_y >= y as i32 && mouse_x >= x && mouse_x < x + package.icon.width() as i32 {\n window.rect(x as i32, y as i32,\n package.icon.width() as u32, package.icon.height() as u32,\n Color::rgba(128, 128, 128, 128));\n\n \/*\n window.rect(x as i32, y as i32 - 16,\n package.name.len() as u32 * 8, 16,\n Color::rgba(0, 0, 0, 128));\n\n let mut c_x = x;\n for c in package.name.chars() {\n window.char(c_x as i32, y as i32 - 16,\n c,\n Color::rgb(255, 255, 255),\n self.font.as_ptr() as usize);\n c_x += 8;\n }\n *\/\n }\n\n window.image(x as i32, y as i32,\n package.icon.width() as u32,\n package.icon.height() as u32,\n &package.icon);\n x = x + package.icon.width() as i32;\n }\n }\n\n if shutdown.has_data() {\n x = window.width() as i32 - shutdown.width() as i32;\n let y = window.height() as isize - shutdown.height() as isize;\n\n if mouse_y >= y as i32 && mouse_x >= x &&\n mouse_x < x + shutdown.width() as i32 {\n window.rect(x as i32, y as i32,\n shutdown.width() as u32, shutdown.height() as u32,\n Color::rgba(128, 128, 128, 128));\n }\n\n window.image(x as i32, y as i32,\n shutdown.width() as u32, shutdown.height() as u32,\n &shutdown);\n x = x + shutdown.width() as i32;\n }\n\n window.sync();\n}\n\nfn main() {\n let mut packages: Vec<Box<Package>> = Vec::new();\n\n \/\/TODO: Use a directory walk\n match File::open(\"file:\/apps\/\") {\n Ok(mut file) => {\n let mut string = String::new();\n if let Ok(_) = file.read_to_string(&mut string) {\n for folder in string.lines() {\n if folder.ends_with('\/') {\n packages.push(Package::from_path(&(\"file:\/apps\/\".to_string() + &folder)));\n }\n }\n }\n }\n Err(err) => println!(\"Failed to open apps: {}\", err),\n }\n \/*\n for package in packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if (accept.starts_with('*') &&\n path.ends_with(&accept.get_slice(Some(1), None))) ||\n (accept.ends_with('*') &&\n path.starts_with(&accept.get_slice(None, Some(accept.len() - 1)))) {\n accepted = true;\n break;\n }\n }\n if accepted {\n if Command::new(&package.binary).arg(&path).spawn_scheme().is_none() {\n println!(\"{}: Failed to launch\", package.binary);\n }\n break;\n }\n }\n *\/\n\n let shutdown = BmpFile::from_path(\"file:\/ui\/actions\/system-shutdown.bmp\");\n if ! shutdown.has_data() {\n println!(\"Failed to read shutdown icon\");\n }\n\n let mut window = Window::new(0, 568, 800, 32, \"Launcher\").unwrap();\n\n draw(&mut window, &packages, &shutdown, -1, -1);\n 'running: loop {\n for event in window.events() {\n println!(\"{:?}\", event.to_option());\n match event.to_option() {\n EventOption::Mouse(mouse_event) => {\n draw(&mut window, &packages, &shutdown, mouse_event.x, mouse_event.y);\n\n if mouse_event.left_button {\n let mut x = 0;\n for package in packages.iter() {\n if package.icon.has_data() {\n if mouse_event.x >= x && mouse_event.x < x + package.icon.width() as i32 {\n if let Err(err) = Command::new(&package.binary).spawn() {\n println!(\"{}: Failed to launch: {}\", package.binary, err);\n }\n }\n x = x + package.icon.width() as i32;\n }\n }\n\n if shutdown.has_data() {\n x = window.width() as i32 - shutdown.width() as i32;\n let y = window.height() as i32 - shutdown.height() as i32;\n if mouse_event.y >= y && mouse_event.x >= x &&\n mouse_event.x < x + shutdown.width() as i32 {\n File::create(\"acpi:off\");\n }\n }\n }\n },\n EventOption::Quit(_) => break 'running,\n _ => ()\n }\n }\n\n thread::yield_now();\n }\n}\n<commit_msg>Do not log events in launcher<commit_after>extern crate orbital;\n\nuse orbital::{BmpFile, Color, EventOption, Window};\n\nuse std::fs::File;\nuse std::io::Read;\nuse std::process::Command;\nuse std::thread;\n\nuse package::Package;\n\npub mod package;\n\nfn draw(window: &mut Window, packages: &Vec<Box<Package>>, shutdown: &BmpFile, mouse_x: i32, mouse_y: i32){\n window.set(Color::rgba(0, 0, 0, 128));\n\n let mut x = 0;\n for package in packages.iter() {\n if package.icon.has_data() {\n let y = window.height() as isize - package.icon.height() as isize;\n\n if mouse_y >= y as i32 && mouse_x >= x && mouse_x < x + package.icon.width() as i32 {\n window.rect(x as i32, y as i32,\n package.icon.width() as u32, package.icon.height() as u32,\n Color::rgba(128, 128, 128, 128));\n\n \/*\n window.rect(x as i32, y as i32 - 16,\n package.name.len() as u32 * 8, 16,\n Color::rgba(0, 0, 0, 128));\n\n let mut c_x = x;\n for c in package.name.chars() {\n window.char(c_x as i32, y as i32 - 16,\n c,\n Color::rgb(255, 255, 255),\n self.font.as_ptr() as usize);\n c_x += 8;\n }\n *\/\n }\n\n window.image(x as i32, y as i32,\n package.icon.width() as u32,\n package.icon.height() as u32,\n &package.icon);\n x = x + package.icon.width() as i32;\n }\n }\n\n if shutdown.has_data() {\n x = window.width() as i32 - shutdown.width() as i32;\n let y = window.height() as isize - shutdown.height() as isize;\n\n if mouse_y >= y as i32 && mouse_x >= x &&\n mouse_x < x + shutdown.width() as i32 {\n window.rect(x as i32, y as i32,\n shutdown.width() as u32, shutdown.height() as u32,\n Color::rgba(128, 128, 128, 128));\n }\n\n window.image(x as i32, y as i32,\n shutdown.width() as u32, shutdown.height() as u32,\n &shutdown);\n x = x + shutdown.width() as i32;\n }\n\n window.sync();\n}\n\nfn main() {\n let mut packages: Vec<Box<Package>> = Vec::new();\n\n \/\/TODO: Use a directory walk\n match File::open(\"file:\/apps\/\") {\n Ok(mut file) => {\n let mut string = String::new();\n if let Ok(_) = file.read_to_string(&mut string) {\n for folder in string.lines() {\n if folder.ends_with('\/') {\n packages.push(Package::from_path(&(\"file:\/apps\/\".to_string() + &folder)));\n }\n }\n }\n }\n Err(err) => println!(\"Failed to open apps: {}\", err),\n }\n \/*\n for package in packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if (accept.starts_with('*') &&\n path.ends_with(&accept.get_slice(Some(1), None))) ||\n (accept.ends_with('*') &&\n path.starts_with(&accept.get_slice(None, Some(accept.len() - 1)))) {\n accepted = true;\n break;\n }\n }\n if accepted {\n if Command::new(&package.binary).arg(&path).spawn_scheme().is_none() {\n println!(\"{}: Failed to launch\", package.binary);\n }\n break;\n }\n }\n *\/\n\n let shutdown = BmpFile::from_path(\"file:\/ui\/actions\/system-shutdown.bmp\");\n if ! shutdown.has_data() {\n println!(\"Failed to read shutdown icon\");\n }\n\n let mut window = Window::new(0, 568, 800, 32, \"Launcher\").unwrap();\n\n draw(&mut window, &packages, &shutdown, -1, -1);\n 'running: loop {\n for event in window.events() {\n match event.to_option() {\n EventOption::Mouse(mouse_event) => {\n draw(&mut window, &packages, &shutdown, mouse_event.x, mouse_event.y);\n\n if mouse_event.left_button {\n let mut x = 0;\n for package in packages.iter() {\n if package.icon.has_data() {\n if mouse_event.x >= x && mouse_event.x < x + package.icon.width() as i32 {\n if let Err(err) = Command::new(&package.binary).spawn() {\n println!(\"{}: Failed to launch: {}\", package.binary, err);\n }\n }\n x = x + package.icon.width() as i32;\n }\n }\n\n if shutdown.has_data() {\n x = window.width() as i32 - shutdown.width() as i32;\n let y = window.height() as i32 - shutdown.height() as i32;\n if mouse_event.y >= y && mouse_event.x >= x &&\n mouse_event.x < x + shutdown.width() as i32 {\n File::create(\"acpi:off\");\n }\n }\n }\n },\n EventOption::Quit(_) => break 'running,\n _ => ()\n }\n }\n\n thread::yield_now();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate orbital;\n\nuse orbital::{BmpFile, Color, EventOption, Window};\n\nuse std::fs::File;\nuse std::io::Read;\nuse std::process::Command;\nuse std::thread;\n\nuse package::Package;\n\npub mod package;\n\nfn draw(window: &mut Window, packages: &Vec<Box<Package>>, shutdown: &BmpFile, mouse_x: i32, mouse_y: i32){\n window.set(Color::rgba(0, 0, 0, 128));\n\n let mut x = 0;\n for package in packages.iter() {\n if package.icon.has_data() {\n let y = window.height() as isize - package.icon.height() as isize;\n\n if mouse_y >= y as i32 && mouse_x >= x && mouse_x < x + package.icon.width() as i32 {\n window.rect(x as i32, y as i32,\n package.icon.width() as u32, package.icon.height() as u32,\n Color::rgba(128, 128, 128, 128));\n\n \/*\n window.rect(x as i32, y as i32 - 16,\n package.name.len() as u32 * 8, 16,\n Color::rgba(0, 0, 0, 128));\n\n let mut c_x = x;\n for c in package.name.chars() {\n window.char(c_x as i32, y as i32 - 16,\n c,\n Color::rgb(255, 255, 255),\n self.font.as_ptr() as usize);\n c_x += 8;\n }\n *\/\n }\n\n window.image(x as i32, y as i32,\n package.icon.width() as u32,\n package.icon.height() as u32,\n &package.icon);\n x = x + package.icon.width() as i32;\n }\n }\n\n if shutdown.has_data() {\n x = window.width() as i32 - shutdown.width() as i32;\n let y = window.height() as isize - shutdown.height() as isize;\n\n if mouse_y >= y as i32 && mouse_x >= x &&\n mouse_x < x + shutdown.width() as i32 {\n window.rect(x as i32, y as i32,\n shutdown.width() as u32, shutdown.height() as u32,\n Color::rgba(128, 128, 128, 128));\n }\n\n window.image(x as i32, y as i32,\n shutdown.width() as u32, shutdown.height() as u32,\n &shutdown);\n x = x + shutdown.width() as i32;\n }\n\n window.sync();\n}\n\nfn main() {\n let mut packages: Vec<Box<Package>> = Vec::new();\n\n \/\/TODO: Use a directory walk\n match File::open(\"file:\/apps\/\") {\n Ok(mut file) => {\n let mut string = String::new();\n if let Ok(_) = file.read_to_string(&mut string) {\n for folder in string.lines() {\n if folder.ends_with('\/') {\n packages.push(Package::from_path(&(\"file:\/apps\/\".to_string() + &folder)));\n }\n }\n }\n }\n Err(err) => println!(\"Failed to open apps: {}\", err),\n }\n \/*\n for package in packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if (accept.starts_with('*') &&\n path.ends_with(&accept.get_slice(Some(1), None))) ||\n (accept.ends_with('*') &&\n path.starts_with(&accept.get_slice(None, Some(accept.len() - 1)))) {\n accepted = true;\n break;\n }\n }\n if accepted {\n if Command::new(&package.binary).arg(&path).spawn_scheme().is_none() {\n println!(\"{}: Failed to launch\", package.binary);\n }\n break;\n }\n }\n *\/\n\n let shutdown = BmpFile::from_path(\"file:\/ui\/actions\/system-shutdown.bmp\");\n if ! shutdown.has_data() {\n println!(\"Failed to read shutdown icon\");\n }\n\n let mut window = Window::new(0, 568, 800, 32, \"\").unwrap();\n\n draw(&mut window, &packages, &shutdown, -1, -1);\n 'running: loop {\n for event in window.events() {\n match event.to_option() {\n EventOption::Mouse(mouse_event) => {\n draw(&mut window, &packages, &shutdown, mouse_event.x, mouse_event.y);\n\n if mouse_event.left_button {\n let mut x = 0;\n for package in packages.iter() {\n if package.icon.has_data() {\n if mouse_event.x >= x && mouse_event.x < x + package.icon.width() as i32 {\n if let Err(err) = Command::new(&package.binary).spawn() {\n println!(\"{}: Failed to launch: {}\", package.binary, err);\n }\n }\n x = x + package.icon.width() as i32;\n }\n }\n\n if shutdown.has_data() {\n x = window.width() as i32 - shutdown.width() as i32;\n let y = window.height() as i32 - shutdown.height() as i32;\n if mouse_event.y >= y && mouse_event.x >= x &&\n mouse_event.x < x + shutdown.width() as i32 {\n File::create(\"acpi:off\");\n }\n }\n }\n },\n EventOption::Quit(_) => break 'running,\n _ => ()\n }\n }\n\n thread::yield_now();\n }\n}\n<commit_msg>Add launcher tooltips<commit_after>extern crate orbital;\n\nuse orbital::{BmpFile, Color, EventOption, Window};\n\nuse std::fs::File;\nuse std::io::Read;\nuse std::process::Command;\nuse std::thread;\n\nuse package::Package;\n\npub mod package;\n\nfn draw(window: &mut Window, packages: &Vec<Box<Package>>, shutdown: &BmpFile, mouse_x: i32, mouse_y: i32){\n window.set(Color::rgba(0, 0, 0, 0));\n let w = window.width();\n window.rect(0, 16, w, 32, Color::rgba(0, 0, 0, 128));\n\n let mut x = 0;\n for package in packages.iter() {\n if package.icon.has_data() {\n let y = window.height() as isize - package.icon.height() as isize;\n\n if mouse_y >= y as i32 && mouse_x >= x && mouse_x < x + package.icon.width() as i32 {\n window.rect(x as i32, y as i32,\n package.icon.width() as u32, package.icon.height() as u32,\n Color::rgba(128, 128, 128, 128));\n\n window.rect(x as i32, y as i32 - 16,\n package.name.len() as u32 * 8, 16,\n Color::rgba(0, 0, 0, 128));\n\n let mut c_x = x;\n for c in package.name.chars() {\n window.char(c_x as i32, y as i32 - 16, c, Color::rgb(255, 255, 255));\n c_x += 8;\n }\n }\n\n window.image(x as i32, y as i32,\n package.icon.width() as u32,\n package.icon.height() as u32,\n &package.icon);\n x = x + package.icon.width() as i32;\n }\n }\n\n if shutdown.has_data() {\n x = window.width() as i32 - shutdown.width() as i32;\n let y = window.height() as isize - shutdown.height() as isize;\n\n if mouse_y >= y as i32 && mouse_x >= x &&\n mouse_x < x + shutdown.width() as i32 {\n window.rect(x as i32, y as i32,\n shutdown.width() as u32, shutdown.height() as u32,\n Color::rgba(128, 128, 128, 128));\n }\n\n window.image(x as i32, y as i32,\n shutdown.width() as u32, shutdown.height() as u32,\n &shutdown);\n x = x + shutdown.width() as i32;\n }\n\n window.sync();\n}\n\nfn main() {\n let mut packages: Vec<Box<Package>> = Vec::new();\n\n \/\/TODO: Use a directory walk\n match File::open(\"file:\/apps\/\") {\n Ok(mut file) => {\n let mut string = String::new();\n if let Ok(_) = file.read_to_string(&mut string) {\n for folder in string.lines() {\n if folder.ends_with('\/') {\n packages.push(Package::from_path(&(\"file:\/apps\/\".to_string() + &folder)));\n }\n }\n }\n }\n Err(err) => println!(\"Failed to open apps: {}\", err),\n }\n \/*\n for package in packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if (accept.starts_with('*') &&\n path.ends_with(&accept.get_slice(Some(1), None))) ||\n (accept.ends_with('*') &&\n path.starts_with(&accept.get_slice(None, Some(accept.len() - 1)))) {\n accepted = true;\n break;\n }\n }\n if accepted {\n if Command::new(&package.binary).arg(&path).spawn_scheme().is_none() {\n println!(\"{}: Failed to launch\", package.binary);\n }\n break;\n }\n }\n *\/\n\n let shutdown = BmpFile::from_path(\"file:\/ui\/actions\/system-shutdown.bmp\");\n if ! shutdown.has_data() {\n println!(\"Failed to read shutdown icon\");\n }\n\n let mut window = Window::new(0, 600 - 48, 800, 48, \"\").unwrap();\n\n draw(&mut window, &packages, &shutdown, -1, -1);\n 'running: loop {\n for event in window.events() {\n match event.to_option() {\n EventOption::Mouse(mouse_event) => {\n draw(&mut window, &packages, &shutdown, mouse_event.x, mouse_event.y);\n\n if mouse_event.left_button {\n let mut x = 0;\n for package in packages.iter() {\n if package.icon.has_data() {\n let y = window.height() as i32 - package.icon.height() as i32;\n if mouse_event.y >= y && mouse_event.x >= x &&\n mouse_event.x < x + package.icon.width() as i32 {\n if let Err(err) = Command::new(&package.binary).spawn() {\n println!(\"{}: Failed to launch: {}\", package.binary, err);\n }\n }\n x = x + package.icon.width() as i32;\n }\n }\n\n if shutdown.has_data() {\n x = window.width() as i32 - shutdown.width() as i32;\n let y = window.height() as i32 - shutdown.height() as i32;\n if mouse_event.y >= y && mouse_event.x >= x &&\n mouse_event.x < x + shutdown.width() as i32 {\n File::create(\"acpi:off\");\n }\n }\n }\n },\n EventOption::Quit(_) => break 'running,\n _ => ()\n }\n }\n\n thread::yield_now();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>array<commit_after>fn main() {\n let a = [1, 2, 3];\n println!(\"a has {} elements\", a.len());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add function call test, Update gdb and test docker image install gdb by ppa.<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This test does not passed with gdb < 8.0. See #53497.\n\/\/ min-gdb-version 8.0\n\n\/\/ compile-flags:-g\n\n\/\/ === GDB TESTS ===================================================================================\n\n\/\/ gdb-command:run\n\n\/\/ gdb-command:print fun(45, true)\n\/\/ gdb-check:$1 = true\n\/\/ gdb-command:print fun(444, false)\n\/\/ gdb-check:$2 = false\n\n\/\/ gdb-command:print r.get_x()\n\/\/ gdb-check:$3 = 4\n\n#![allow(dead_code, unused_variables)]\n\nstruct RegularStruct {\n x: i32\n}\n\nimpl RegularStruct {\n fn get_x(&self) -> i32 {\n self.x\n }\n}\n\nfn main() {\n let _ = fun(4, true);\n let r = RegularStruct{x: 4};\n let _ = r.get_x();\n\n zzz(); \/\/ #break\n}\n\nfn fun(x: isize, y: bool) -> bool {\n y\n}\n\nfn zzz() { () }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Handle overlapping gradient stops for linear gradients<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Property based testing caught a nasty bug where empty lines were interpreted as records.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fixed, draft version<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>lib: fix deprecation warnings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Initial dump of draft graph implementation<commit_after>use std::collections::VecDeque;\nuse std::collections::HashSet;\n\n\/\/\/ A graph is represeted by as a weighted\n\/\/\/ [Adjenceny matrix](http:\/\/en.wikipedia.org\/wiki\/Adjacency_matrix)\nstruct Graph {\n graph: Vec<Vec<Option<i32>>>\n}\nimpl Graph {\n fn new(input: Vec<Vec<Option<i32>>>) -> Graph { Graph { graph: input } }\n\n \/*\n 1 procedure BFS(G,v) is\n 2 let Q be a queue\n 3 Q.push(v)\n 4 label v as discovered\n 5 while Q is not empty\n 6 v ← Q.pop()\n 7 for all edges from v to w in G.adjacentEdges(v) do\n 8 if w is not labeled as discovered\n 9 Q.push(w)\n 10 label w as discovered\n *\/\n fn bfs(&self, start: usize, target: usize) -> Option<VecDeque<usize>> {\n let mut q: VecDeque<usize> = VecDeque::new();\n let mut discovered: HashSet<usize> = HashSet::new();\n let mut prev: Vec<usize> = Vec::with_capacity(self.graph.len());\n let mut pathfound = false;\n for _ in (0..self.graph.len()) { prev.push(0); }\n\n q.push_back(start);\n discovered.insert(start);\n \/\/println!(\"q size: {} and is empty: {}\", q.len(), q.is_empty());\n\n while !q.is_empty() {\n let v = q.pop_front();\n match v {\n None => {}, \/\/ q is empty\n Some(v) => { \/\/ we are working on a new layer, branch the queue?\n \/\/ println!(\"Checking out: {}\", v);\n if !discovered.contains(&v) {\n \/\/println!(\"{:?} does not contain: {}\", discovered, v);\n discovered.insert(v);\n }\n if v == target {\n \/\/println!(\"Target located: q:{:?}, disco: {:?}, prev: {:?}\", q, discovered, prev);\n pathfound = true;\n }\n for i in (0..self.graph[v].len()) {\n match self.graph[v][i] {\n None => {}, \/\/ no vertex between v and i\n Some(_) => {\n \/\/println!(\" A vertex between: {} and {} exists. q: {:?}, disco: {:?}\", v, i, q, discovered);\n if !discovered.contains(&i) {\n q.push_back(i);\n prev[i]=v; \/\/track prev (v) on i\n }\n }\n }\n }\n }\n }\n }\n if pathfound {\n let mut path: VecDeque<usize> = VecDeque::new();\n let mut curr = target;\n \/\/ backtrack over the prev array to construc the path\n while curr != start {\n path.push_front(curr);\n curr = prev[curr];\n }\n path.push_front(start);\n return Some(path)\n }\n return None\n }\n}\n\n\n#[test]\nfn bfs_test() {\n let testgraph = vec![vec![Some(0), Some(20), Some(80), Some(50), None, None, None],\n vec![ None, Some(0), None, None, None, None, None],\n vec![ None, None, Some(0), None, None, None, Some(50)],\n vec![ None, None, None, Some(0), Some(50), None, None],\n vec![ None, None, Some(20), None, Some(0), Some(50), Some(40)],\n vec![ None, None, None, None, None, Some(0), None],\n vec![ None, None, None, None, None, None, Some(0)]];\n let start: usize = 0;\n let target: usize = 6;\n let g = Graph::new(testgraph);\n let res = g.bfs(start, target);\n \/\/assert_eq!(res.is_none(), false);\n match res {\n None => {\n println!(\"bfs search returned None\");\n assert!(false);\n }\n Some(result) => {\n println!(\"Bfs returned something: {:?}\", result);\n assert_eq!(result[result.len()-1], target);\n assert_eq!(result[0], start);\n \/\/assert!(false);\n }\n }\n}\n\n#[test]\nfn bfs_test_no_valid_path() {\n let testgraph = vec![vec![Some(0), Some(20), Some(80), Some(50), None, None, None],\n vec![ None, Some(0), None, None, None, None, None],\n vec![ None, None, Some(0), None, None, None, Some(50)],\n vec![ None, None, None, Some(0), Some(50), None, None],\n vec![ None, None, Some(20), None, Some(0), None, Some(40)],\n vec![ None, None, None, None, None, Some(0), None],\n vec![ None, None, None, None, None, None, Some(0)]];\n let start: usize = 0;\n let target: usize = 5; \/\/ There is no valid path between 0 and 5\n let g = Graph::new(testgraph);\n let res = g.bfs(start, target);\n\n \/\/ The expected return value is None\n match res {\n None => {\n println!(\"bfs search returned None\");\n assert!(true);\n }\n Some(result) => {\n println!(\"Bfs returned something: {:?}\", result);\n assert_eq!(result[result.len()-1], target);\n assert_eq!(result[0], start);\n assert!(false);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use absolute pathname of the current working directory<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ TODO:\n\/\/\n\/\/ panic here or panic there?\n\/\/ * catch_unwind is 6x slower than a normal call with panic=abort\n\/\/ * catch_unwind is 7x slower than a normal call with panic=unwind\n\/\/ * much of this is management of PANIC_COUNT\n\/\/ * also attributed to the indirect call\n\/\/ * data point - wangle deals with C++ exceptions\n\/\/\n\/\/ select() and returning a future back\n\/\/ * maybe this is just streams...\n\nmod cell;\nmod slot;\nmod util;\n\nmod error;\npub use error::{PollError, PollResult, FutureError, FutureResult};\n\n\/\/ Primitive futures\nmod collect;\nmod done;\nmod empty;\nmod failed;\nmod finished;\nmod lazy;\nmod promise;\npub use collect::{collect, Collect};\npub use done::{done, Done};\npub use empty::{empty, Empty};\npub use failed::{failed, Failed};\npub use finished::{finished, Finished};\npub use lazy::{lazy, Lazy};\npub use promise::{promise, Promise, Complete};\n\n\/\/ combinators\nmod and_then;\nmod chain;\nmod flatten;\nmod impls;\nmod join;\nmod map;\nmod map_err;\nmod or_else;\nmod select;\nmod then;\npub use and_then::AndThen;\npub use flatten::Flatten;\npub use join::Join;\npub use map::Map;\npub use map_err::MapErr;\npub use or_else::OrElse;\npub use select::Select;\npub use then::Then;\n\n\/\/ streams\npub mod stream;\n\n\/\/ TODO: Send + 'static is annoying, but required by cancel and_then, document\n\/\/ TODO: not object safe\n\/\/\n\/\/ FINISH CONDITIONS\n\/\/ - poll() return Some\n\/\/ - await() is called\n\/\/ - schedule() is called\n\/\/ - schedule_boxed() is called\n\/\/\n\/\/ BAD:\n\/\/ - doing any finish condition after an already called finish condition\n\/\/\n\/\/ WHAT HAPPENS\n\/\/ - panic?\npub trait Future: Send + 'static {\n type Item: Send + 'static;\n type Error: Send + 'static;\n\n fn poll(&mut self) -> Option<PollResult<Self::Item, Self::Error>>;\n\n \/\/ TODO: why is this not drop()\n \/\/ well what if you schedule() then drop, HUH?!\n fn cancel(&mut self);\n\n fn schedule<F>(&mut self, f: F)\n where F: FnOnce(PollResult<Self::Item, Self::Error>) + Send + 'static,\n Self: Sized;\n\n fn schedule_boxed(&mut self, f: Box<Callback<Self::Item, Self::Error>>);\n\n \/\/ TODO: why can't this be in this lib?\n \/\/ fn await(&mut self) -> FutureResult<Self::Item, Self::Error>;\n\n fn boxed(self) -> Box<Future<Item=Self::Item, Error=Self::Error>>\n where Self: Sized\n {\n Box::new(self)\n }\n\n fn map<F, U>(self, f: F) -> Map<Self, F>\n where F: FnOnce(Self::Item) -> U + Send + 'static,\n U: Send + 'static,\n Self: Sized,\n {\n assert_future::<U, Self::Error, _>(map::new(self, f))\n }\n\n fn map2<F, U>(self, f: F) -> Box<Future<Item=U, Error=Self::Error>>\n where F: FnOnce(Self::Item) -> U + Send + 'static,\n U: Send + 'static,\n Self: Sized,\n {\n self.then(|r| r.map(f)).boxed()\n }\n\n fn map_err<F, E>(self, f: F) -> MapErr<Self, F>\n where F: FnOnce(Self::Error) -> E + Send + 'static,\n E: Send + 'static,\n Self: Sized,\n {\n assert_future::<Self::Item, E, _>(map_err::new(self, f))\n }\n\n fn map_err2<F, E>(self, f: F) -> Box<Future<Item=Self::Item, Error=E>>\n where F: FnOnce(Self::Error) -> E + Send + 'static,\n E: Send + 'static,\n Self: Sized,\n {\n self.then(|res| res.map_err(f)).boxed()\n }\n\n fn then<F, B>(self, f: F) -> Then<Self, B, F>\n where F: FnOnce(Result<Self::Item, Self::Error>) -> B + Send + 'static,\n B: IntoFuture,\n Self: Sized,\n {\n assert_future::<B::Item, B::Error, _>(then::new(self, f))\n }\n\n fn and_then<F, B>(self, f: F) -> AndThen<Self, B, F>\n where F: FnOnce(Self::Item) -> B + Send + 'static,\n B: IntoFuture<Error = Self::Error>,\n Self: Sized,\n {\n assert_future::<B::Item, Self::Error, _>(and_then::new(self, f))\n }\n\n fn and_then2<F, B>(self, f: F) -> Box<Future<Item=B::Item, Error=Self::Error>>\n where F: FnOnce(Self::Item) -> B + Send + 'static,\n B: IntoFuture<Error = Self::Error>,\n Self: Sized,\n {\n self.then(|res| {\n match res {\n Ok(e) => f(e).into_future().boxed(),\n Err(e) => failed(e).boxed(),\n }\n }).boxed()\n }\n\n fn or_else<F, B>(self, f: F) -> OrElse<Self, B, F>\n where F: FnOnce(Self::Error) -> B + Send + 'static,\n B: IntoFuture<Item = Self::Item>,\n Self: Sized,\n {\n assert_future::<Self::Item, B::Error, _>(or_else::new(self, f))\n }\n\n fn or_else2<F, B>(self, f: F) -> Box<Future<Item=B::Item, Error=B::Error>>\n where F: FnOnce(Self::Error) -> B + Send + 'static,\n B: IntoFuture<Item = Self::Item>,\n Self: Sized,\n {\n self.then(|res| {\n match res {\n Ok(e) => finished(e).boxed(),\n Err(e) => f(e).into_future().boxed(),\n }\n }).boxed()\n }\n\n fn select<B>(self, other: B) -> Select<Self, B::Future>\n where B: IntoFuture<Item=Self::Item, Error=Self::Error>,\n Self: Sized,\n {\n let f = select::new(self, other.into_future());\n assert_future::<Self::Item, Self::Error, _>(f)\n }\n\n fn join<B>(self, other: B) -> Join<Self, B::Future>\n where B: IntoFuture<Error=Self::Error>,\n Self: Sized,\n {\n let f = join::new(self, other.into_future());\n assert_future::<(Self::Item, B::Item), Self::Error, _>(f)\n }\n\n fn flatten(self) -> Flatten<Self>\n where Self::Item: IntoFuture,\n <<Self as Future>::Item as IntoFuture>::Error:\n From<<Self as Future>::Error>,\n Self: Sized\n {\n let f = flatten::new(self);\n assert_future::<<<Self as Future>::Item as IntoFuture>::Item,\n <<Self as Future>::Item as IntoFuture>::Error,\n _>(f)\n }\n\n fn flatten2(self) -> Box<Future<Item=<<Self as Future>::Item as IntoFuture>::Item,\n Error=<<Self as Future>::Item as IntoFuture>::Error>>\n where Self::Item: IntoFuture,\n <<Self as Future>::Item as IntoFuture>::Error:\n From<<Self as Future>::Error>,\n Self: Sized\n {\n self.then(|res| {\n match res {\n Ok(e) => e.into_future().boxed(),\n Err(e) => failed(From::from(e)).boxed(),\n }\n }).boxed()\n }\n}\n\nfn assert_future<A, B, F>(t: F) -> F\n where F: Future<Item=A, Error=B>,\n A: Send + 'static,\n B: Send + 'static,\n{\n t\n}\n\npub trait Callback<T, E>: Send + 'static {\n fn call(self: Box<Self>, result: PollResult<T, E>);\n}\n\nimpl<F, T, E> Callback<T, E> for F\n where F: FnOnce(PollResult<T, E>) + Send + 'static\n{\n fn call(self: Box<F>, result: PollResult<T, E>) {\n (*self)(result)\n }\n}\n\npub trait IntoFuture: Send + 'static {\n type Future: Future<Item=Self::Item, Error=Self::Error>;\n type Item: Send + 'static;\n type Error: Send + 'static;\n\n fn into_future(self) -> Self::Future;\n}\n\nimpl<F: Future> IntoFuture for F {\n type Future = F;\n type Item = F::Item;\n type Error = F::Error;\n\n fn into_future(self) -> F {\n self\n }\n}\n<commit_msg>more data<commit_after>\/\/ TODO:\n\/\/\n\/\/ panic here or panic there?\n\/\/ * catch_unwind is 6x slower than a normal call with panic=abort\n\/\/ * can be 3x if we deal with PANIC_COUNT\n\/\/ * catch_unwind is 7x slower than a normal call with panic=unwind\n\/\/ * perspective, allocation is 20x slower than a noop call\n\/\/ * also attributed to the indirect call\n\/\/ * data point - wangle deals with C++ exceptions\n\/\/\n\/\/ select() and returning a future back\n\/\/ * maybe this is just streams...\n\nmod cell;\nmod slot;\nmod util;\n\nmod error;\npub use error::{PollError, PollResult, FutureError, FutureResult};\n\n\/\/ Primitive futures\nmod collect;\nmod done;\nmod empty;\nmod failed;\nmod finished;\nmod lazy;\nmod promise;\npub use collect::{collect, Collect};\npub use done::{done, Done};\npub use empty::{empty, Empty};\npub use failed::{failed, Failed};\npub use finished::{finished, Finished};\npub use lazy::{lazy, Lazy};\npub use promise::{promise, Promise, Complete};\n\n\/\/ combinators\nmod and_then;\nmod chain;\nmod flatten;\nmod impls;\nmod join;\nmod map;\nmod map_err;\nmod or_else;\nmod select;\nmod then;\npub use and_then::AndThen;\npub use flatten::Flatten;\npub use join::Join;\npub use map::Map;\npub use map_err::MapErr;\npub use or_else::OrElse;\npub use select::Select;\npub use then::Then;\n\n\/\/ streams\npub mod stream;\n\n\/\/ TODO: Send + 'static is annoying, but required by cancel and_then, document\n\/\/ TODO: not object safe\n\/\/\n\/\/ FINISH CONDITIONS\n\/\/ - poll() return Some\n\/\/ - await() is called\n\/\/ - schedule() is called\n\/\/ - schedule_boxed() is called\n\/\/\n\/\/ BAD:\n\/\/ - doing any finish condition after an already called finish condition\n\/\/\n\/\/ WHAT HAPPENS\n\/\/ - panic?\npub trait Future: Send + 'static {\n type Item: Send + 'static;\n type Error: Send + 'static;\n\n fn poll(&mut self) -> Option<PollResult<Self::Item, Self::Error>>;\n\n \/\/ TODO: why is this not drop()\n \/\/ well what if you schedule() then drop, HUH?!\n fn cancel(&mut self);\n\n fn schedule<F>(&mut self, f: F)\n where F: FnOnce(PollResult<Self::Item, Self::Error>) + Send + 'static,\n Self: Sized;\n\n fn schedule_boxed(&mut self, f: Box<Callback<Self::Item, Self::Error>>);\n\n \/\/ TODO: why can't this be in this lib?\n \/\/ fn await(&mut self) -> FutureResult<Self::Item, Self::Error>;\n\n fn boxed(self) -> Box<Future<Item=Self::Item, Error=Self::Error>>\n where Self: Sized\n {\n Box::new(self)\n }\n\n fn map<F, U>(self, f: F) -> Map<Self, F>\n where F: FnOnce(Self::Item) -> U + Send + 'static,\n U: Send + 'static,\n Self: Sized,\n {\n assert_future::<U, Self::Error, _>(map::new(self, f))\n }\n\n fn map2<F, U>(self, f: F) -> Box<Future<Item=U, Error=Self::Error>>\n where F: FnOnce(Self::Item) -> U + Send + 'static,\n U: Send + 'static,\n Self: Sized,\n {\n self.then(|r| r.map(f)).boxed()\n }\n\n fn map_err<F, E>(self, f: F) -> MapErr<Self, F>\n where F: FnOnce(Self::Error) -> E + Send + 'static,\n E: Send + 'static,\n Self: Sized,\n {\n assert_future::<Self::Item, E, _>(map_err::new(self, f))\n }\n\n fn map_err2<F, E>(self, f: F) -> Box<Future<Item=Self::Item, Error=E>>\n where F: FnOnce(Self::Error) -> E + Send + 'static,\n E: Send + 'static,\n Self: Sized,\n {\n self.then(|res| res.map_err(f)).boxed()\n }\n\n fn then<F, B>(self, f: F) -> Then<Self, B, F>\n where F: FnOnce(Result<Self::Item, Self::Error>) -> B + Send + 'static,\n B: IntoFuture,\n Self: Sized,\n {\n assert_future::<B::Item, B::Error, _>(then::new(self, f))\n }\n\n fn and_then<F, B>(self, f: F) -> AndThen<Self, B, F>\n where F: FnOnce(Self::Item) -> B + Send + 'static,\n B: IntoFuture<Error = Self::Error>,\n Self: Sized,\n {\n assert_future::<B::Item, Self::Error, _>(and_then::new(self, f))\n }\n\n fn and_then2<F, B>(self, f: F) -> Box<Future<Item=B::Item, Error=Self::Error>>\n where F: FnOnce(Self::Item) -> B + Send + 'static,\n B: IntoFuture<Error = Self::Error>,\n Self: Sized,\n {\n self.then(|res| {\n match res {\n Ok(e) => f(e).into_future().boxed(),\n Err(e) => failed(e).boxed(),\n }\n }).boxed()\n }\n\n fn or_else<F, B>(self, f: F) -> OrElse<Self, B, F>\n where F: FnOnce(Self::Error) -> B + Send + 'static,\n B: IntoFuture<Item = Self::Item>,\n Self: Sized,\n {\n assert_future::<Self::Item, B::Error, _>(or_else::new(self, f))\n }\n\n fn or_else2<F, B>(self, f: F) -> Box<Future<Item=B::Item, Error=B::Error>>\n where F: FnOnce(Self::Error) -> B + Send + 'static,\n B: IntoFuture<Item = Self::Item>,\n Self: Sized,\n {\n self.then(|res| {\n match res {\n Ok(e) => finished(e).boxed(),\n Err(e) => f(e).into_future().boxed(),\n }\n }).boxed()\n }\n\n fn select<B>(self, other: B) -> Select<Self, B::Future>\n where B: IntoFuture<Item=Self::Item, Error=Self::Error>,\n Self: Sized,\n {\n let f = select::new(self, other.into_future());\n assert_future::<Self::Item, Self::Error, _>(f)\n }\n\n fn join<B>(self, other: B) -> Join<Self, B::Future>\n where B: IntoFuture<Error=Self::Error>,\n Self: Sized,\n {\n let f = join::new(self, other.into_future());\n assert_future::<(Self::Item, B::Item), Self::Error, _>(f)\n }\n\n fn flatten(self) -> Flatten<Self>\n where Self::Item: IntoFuture,\n <<Self as Future>::Item as IntoFuture>::Error:\n From<<Self as Future>::Error>,\n Self: Sized\n {\n let f = flatten::new(self);\n assert_future::<<<Self as Future>::Item as IntoFuture>::Item,\n <<Self as Future>::Item as IntoFuture>::Error,\n _>(f)\n }\n\n fn flatten2(self) -> Box<Future<Item=<<Self as Future>::Item as IntoFuture>::Item,\n Error=<<Self as Future>::Item as IntoFuture>::Error>>\n where Self::Item: IntoFuture,\n <<Self as Future>::Item as IntoFuture>::Error:\n From<<Self as Future>::Error>,\n Self: Sized\n {\n self.then(|res| {\n match res {\n Ok(e) => e.into_future().boxed(),\n Err(e) => failed(From::from(e)).boxed(),\n }\n }).boxed()\n }\n}\n\nfn assert_future<A, B, F>(t: F) -> F\n where F: Future<Item=A, Error=B>,\n A: Send + 'static,\n B: Send + 'static,\n{\n t\n}\n\npub trait Callback<T, E>: Send + 'static {\n fn call(self: Box<Self>, result: PollResult<T, E>);\n}\n\nimpl<F, T, E> Callback<T, E> for F\n where F: FnOnce(PollResult<T, E>) + Send + 'static\n{\n fn call(self: Box<F>, result: PollResult<T, E>) {\n (*self)(result)\n }\n}\n\npub trait IntoFuture: Send + 'static {\n type Future: Future<Item=Self::Item, Error=Self::Error>;\n type Item: Send + 'static;\n type Error: Send + 'static;\n\n fn into_future(self) -> Self::Future;\n}\n\nimpl<F: Future> IntoFuture for F {\n type Future = F;\n type Item = F::Item;\n type Error = F::Error;\n\n fn into_future(self) -> F {\n self\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>More var stuff<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>auto merge of #15235 : Sawyer47\/rust\/issue-12552, r=huonw<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ this code used to cause an ICE\n\nfn main() {\n let t = Err(0);\n match t {\n Some(k) => match k { \/\/~ ERROR mismatched types\n a => println!(\"{}\", a)\n },\n None => () \/\/~ ERROR mismatched types\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-win32\nextern mod extra;\n\nuse std::comm::*;\nuse std::task;\n\nstruct complainer {\n c: SharedChan<bool>,\n}\n\nimpl Drop for complainer {\n fn drop(&self) {\n error!(\"About to send!\");\n self.c.send(true);\n error!(\"Sent!\");\n }\n}\n\nfn complainer(c: SharedChan<bool>) -> complainer {\n error!(\"Hello!\");\n complainer {\n c: c\n }\n}\n\nfn f(c: SharedChan<bool>) {\n let _c = complainer(c);\n fail!();\n}\n\npub fn main() {\n let (p, c) = stream();\n let c = SharedChan::new(c);\n task::spawn_unlinked(|| f(c.clone()) );\n error!(\"hiiiiiiiii\");\n assert!(p.recv());\n}\n<commit_msg>xfailed the myserious failing test<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-test\nextern mod extra;\n\nuse std::comm::*;\nuse std::task;\n\nstruct complainer {\n c: SharedChan<bool>,\n}\n\nimpl Drop for complainer {\n fn drop(&self) {\n error!(\"About to send!\");\n self.c.send(true);\n error!(\"Sent!\");\n }\n}\n\nfn complainer(c: SharedChan<bool>) -> complainer {\n error!(\"Hello!\");\n complainer {\n c: c\n }\n}\n\nfn f(c: SharedChan<bool>) {\n let _c = complainer(c);\n fail!();\n}\n\npub fn main() {\n let (p, c) = stream();\n let c = SharedChan::new(c);\n task::spawn_unlinked(|| f(c.clone()) );\n error!(\"hiiiiiiiii\");\n assert!(p.recv());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>doc vec<commit_after><|endoftext|>"} {"text":"<commit_before>use std::collections::BTreeMap;\nuse rustc_serialize::Decodable;\nuse rustc_serialize::base64::{\n FromBase64,\n ToBase64,\n};\nuse rustc_serialize::json::{\n self,\n Decoder,\n Json,\n};\nuse Component;\nuse error::Error;\nuse BASE_CONFIG;\n\n#[derive(Debug, Default, PartialEq)]\npub struct Claims {\n pub reg: Registered,\n pub private: BTreeMap<String, Json>,\n}\n\n#[derive(Debug, Default, PartialEq, RustcDecodable, RustcEncodable)]\npub struct Registered {\n pub iss: Option<String>,\n pub sub: Option<String>,\n pub aud: Option<String>,\n pub exp: Option<u64>,\n pub nbf: Option<u64>,\n pub iat: Option<u64>,\n pub jti: Option<String>,\n}\n\n\/\/\/ JWT Claims. Registered claims are directly accessible via the `Registered`\n\/\/\/ struct embedded, while private fields are a map that contains `Json`\n\/\/\/ values.\nimpl Claims {\n pub fn new(reg: Registered) -> Claims {\n Claims {\n reg: reg,\n private: BTreeMap::new(),\n }\n }\n}\n\nimpl Component for Claims {\n fn from_base64(raw: &str) -> Result<Claims, Error> {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n let tree = match try!(Json::from_str(&*s)) {\n Json::Object(x) => x,\n _ => return Err(Error::Format),\n };\n\n const FIELDS: [&'static str; 7] = [\n \"iss\", \"sub\", \"aud\",\n \"exp\", \"nbf\", \"iat\",\n \"jti\",\n ];\n\n let (reg, pri): (BTreeMap<_, _>, BTreeMap<_, _>) = tree.into_iter()\n .partition(|&(ref key, _)| {\n FIELDS.iter().any(|f| f == key)\n });\n\n let mut decoder = Decoder::new(Json::Object(reg));\n let reg_claims: Registered = try!(Decodable::decode(&mut decoder));\n\n Ok(Claims{\n reg: reg_claims,\n private: pri,\n })\n }\n\n fn to_base64(&self) -> Result<String, Error> {\n \/\/ Extremely inefficient\n let s = try!(json::encode(&self.reg));\n let mut tree = match try!(Json::from_str(&*s)) {\n Json::Object(x) => x,\n _ => return Err(Error::Format),\n };\n\n tree.extend(self.private.clone());\n\n let s = try!(json::encode(&tree));\n let enc = (&*s).as_bytes().to_base64(BASE_CONFIG);\n Ok(enc)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::default::Default;\n use claims::Claims;\n use Component;\n\n #[test]\n fn from_base64() {\n let enc = \"ew0KICAiaXNzIjogIm1pa2t5YW5nLmNvbSIsDQogICJleHAiOiAxMzAyMzE5MTAwLA0KICAibmFtZSI6ICJNaWNoYWVsIFlhbmciLA0KICAiYWRtaW4iOiB0cnVlDQp9\";\n let claims = Claims::from_base64(enc).unwrap();\n\n assert_eq!(claims.reg.iss.unwrap(), \"mikkyang.com\");\n assert_eq!(claims.reg.exp.unwrap(), 1302319100);\n }\n\n #[test]\n fn roundtrip() {\n let mut claims: Claims = Default::default();\n claims.reg.iss = Some(\"mikkyang.com\".into());\n claims.reg.exp = Some(1302319100);\n let enc = claims.to_base64().unwrap();\n assert_eq!(claims, Claims::from_base64(&*enc).unwrap());\n }\n}\n<commit_msg>Test multiple claim types<commit_after>use std::collections::BTreeMap;\nuse rustc_serialize::Decodable;\nuse rustc_serialize::base64::{\n FromBase64,\n ToBase64,\n};\nuse rustc_serialize::json::{\n self,\n Decoder,\n Json,\n};\nuse Component;\nuse error::Error;\nuse BASE_CONFIG;\n\n#[derive(Debug, Default, PartialEq)]\npub struct Claims {\n pub reg: Registered,\n pub private: BTreeMap<String, Json>,\n}\n\n#[derive(Debug, Default, PartialEq, RustcDecodable, RustcEncodable)]\npub struct Registered {\n pub iss: Option<String>,\n pub sub: Option<String>,\n pub aud: Option<String>,\n pub exp: Option<u64>,\n pub nbf: Option<u64>,\n pub iat: Option<u64>,\n pub jti: Option<String>,\n}\n\n\/\/\/ JWT Claims. Registered claims are directly accessible via the `Registered`\n\/\/\/ struct embedded, while private fields are a map that contains `Json`\n\/\/\/ values.\nimpl Claims {\n pub fn new(reg: Registered) -> Claims {\n Claims {\n reg: reg,\n private: BTreeMap::new(),\n }\n }\n}\n\nimpl Component for Claims {\n fn from_base64(raw: &str) -> Result<Claims, Error> {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n let tree = match try!(Json::from_str(&*s)) {\n Json::Object(x) => x,\n _ => return Err(Error::Format),\n };\n\n const FIELDS: [&'static str; 7] = [\n \"iss\", \"sub\", \"aud\",\n \"exp\", \"nbf\", \"iat\",\n \"jti\",\n ];\n\n let (reg, pri): (BTreeMap<_, _>, BTreeMap<_, _>) = tree.into_iter()\n .partition(|&(ref key, _)| {\n FIELDS.iter().any(|f| f == key)\n });\n\n let mut decoder = Decoder::new(Json::Object(reg));\n let reg_claims: Registered = try!(Decodable::decode(&mut decoder));\n\n Ok(Claims{\n reg: reg_claims,\n private: pri,\n })\n }\n\n fn to_base64(&self) -> Result<String, Error> {\n \/\/ Extremely inefficient\n let s = try!(json::encode(&self.reg));\n let mut tree = match try!(Json::from_str(&*s)) {\n Json::Object(x) => x,\n _ => return Err(Error::Format),\n };\n\n tree.extend(self.private.clone());\n\n let s = try!(json::encode(&tree));\n let enc = (&*s).as_bytes().to_base64(BASE_CONFIG);\n Ok(enc)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::default::Default;\n use claims::{Claims, Registered};\n use Component;\n\n #[test]\n fn from_base64() {\n let enc = \"ew0KICAiaXNzIjogIm1pa2t5YW5nLmNvbSIsDQogICJleHAiOiAxMzAyMzE5MTAwLA0KICAibmFtZSI6ICJNaWNoYWVsIFlhbmciLA0KICAiYWRtaW4iOiB0cnVlDQp9\";\n let claims = Claims::from_base64(enc).unwrap();\n\n assert_eq!(claims.reg.iss.unwrap(), \"mikkyang.com\");\n assert_eq!(claims.reg.exp.unwrap(), 1302319100);\n }\n\n #[test]\n fn multiple_types() {\n let enc = \"ew0KICAiaXNzIjogIm1pa2t5YW5nLmNvbSIsDQogICJleHAiOiAxMzAyMzE5MTAwLA0KICAibmFtZSI6ICJNaWNoYWVsIFlhbmciLA0KICAiYWRtaW4iOiB0cnVlDQp9\";\n let claims = Registered::from_base64(enc).unwrap();\n\n assert_eq!(claims.iss.unwrap(), \"mikkyang.com\");\n assert_eq!(claims.exp.unwrap(), 1302319100);\n }\n\n #[test]\n fn roundtrip() {\n let mut claims: Claims = Default::default();\n claims.reg.iss = Some(\"mikkyang.com\".into());\n claims.reg.exp = Some(1302319100);\n let enc = claims.to_base64().unwrap();\n assert_eq!(claims, Claims::from_base64(&*enc).unwrap());\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use super::*;\nuse redox::*;\n\nimpl Editor {\n \/\/\/ Remove from a given motion (row based)\n pub fn remove_rb<'a>(&mut self, (x, y): (usize, usize)) {\n if y == self.y() {\n \/\/ Single line mode\n let (a, b) = if self.x() < x {\n (self.x(), x)\n } else {\n (x, self.x())\n };\n for i in a..b {\n self.text[y].remove(i);\n }\n } else {\n \/\/ Full line mode\n let (a, b) = if self.y() < y {\n (self.y(), y)\n } else {\n (y, self.y())\n };\n for ln in a..(b + 1) {\n self.text.remove(ln);\n }\n }\n }\n}\n<commit_msg>Fix out-of-bound bug<commit_after>use super::*;\nuse redox::*;\n\nimpl Editor {\n \/\/\/ Remove from a given motion (row based)\n pub fn remove_rb<'a>(&mut self, (x, y): (usize, usize)) {\n if y == self.y() {\n \/\/ Single line mode\n let (a, b) = if self.x() < x {\n (self.x(), x)\n } else {\n (x, self.x())\n };\n for _ in a..b {\n self.text[y].remove(a);\n }\n } else {\n \/\/ Full line mode\n let (a, b) = if self.y() < y {\n (self.y(), y)\n } else {\n (y, self.y())\n };\n for _ in a..(b + 1) {\n if self.text.len() > 1 {\n self.text.remove(a);\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:hammer: Enable to ignore projects in rescheduled tasks<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>remove unused enum fro mreward<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Let the user configure the authentication mecanisms<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Outsource helper: getting the link partial from Entry<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\nextern crate url;\n\nuse std::env;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf};\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::Entry;\n\nuse url::{Url, UrlParser};\n\nuse Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\nfn main() {\n let docs = env::args().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut url = Url::from_file_path(&docs).unwrap();\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut url, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: String,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nimpl FileEntry {\n fn parse_ids(&mut self,\n file: &Path,\n contents: &str,\n errors: &mut bool)\n{\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i| {\n let frag = fragment.trim_left_matches(\"#\").to_owned();\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\",\n file.display(), i, fragment);\n }\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache,\n root: &Path,\n dir: &Path,\n url: &mut Url,\n errors: &mut bool)\n{\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n url.path_mut().unwrap().push(entry.file_name().into_string().unwrap());\n if kind.is_dir() {\n walk(cache, root, &path, url, errors);\n } else {\n let pretty_path = check(cache, root, &path, url, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to to reduce memory-usage\n entry.source = String::new();\n }\n }\n url.path_mut().unwrap().pop();\n }\n}\n\nfn check(cache: &mut Cache,\n root: &Path,\n file: &Path,\n base: &Url,\n errors: &mut bool) -> Option<PathBuf>\n{\n \/\/ ignore js files as they are not prone to errors as the rest of the\n \/\/ documentation is and they otherwise bring up false positives.\n if file.extension().and_then(|s| s.to_str()) == Some(\"js\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/string\/struct.String.html\") ||\n file.ends_with(\"collections\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"btree_set\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/btree_map\/struct.BTreeMap.html\") ||\n file.ends_with(\"collections\/hash_map\/struct.HashMap.html\") {\n return None;\n }\n\n if file.ends_with(\"std\/sys\/ext\/index.html\") {\n return None;\n }\n\n if let Some(file) = file.to_str() {\n \/\/ FIXME(#31948)\n if file.contains(\"ParseFloatError\") {\n return None;\n }\n \/\/ weird reexports, but this module is on its way out, so chalk it up to\n \/\/ \"rustdoc weirdness\" and move on from there\n if file.contains(\"scoped_tls\") {\n return None;\n }\n }\n\n let mut parser = UrlParser::new();\n parser.base_url(base);\n\n let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file).unwrap()\n .parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i| {\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path. If either of these fail then we\n \/\/ just keep going.\n let (parsed_url, path) = match url_to_file_path(&parser, url) {\n Some((url, path)) => (url, PathBuf::from(path)),\n None => return,\n };\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n return;\n }\n let res = load_file(cache, root, path.clone(), FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => panic!(format!(\"{}\", err)),\n Err(LoadError::BrokenRedirect(target, _)) => {\n print!(\"{}:{}: broken redirect to {}\",\n pretty_file.display(), i + 1, target.display());\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n if let Some(ref fragment) = parsed_url.fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-')\n .all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \",\n pretty_file.display(), i + 1);\n println!(\"`#{}` pointing to `{}`\",\n fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(cache: &mut Cache,\n root: &Path,\n file: PathBuf,\n redirect: Redirect) -> Result<(PathBuf, String), LoadError> {\n let mut contents = String::new();\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let maybe_redirect = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => {\n contents = entry.get().source.clone();\n None\n },\n Entry::Vacant(entry) => {\n let mut fp = try!(File::open(file.clone()).map_err(|err| {\n if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.clone(), err)\n } else {\n LoadError::IOError(err)\n }\n }));\n try!(fp.read_to_string(&mut contents)\n .map_err(|err| LoadError::IOError(err)));\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry {\n source: contents.clone(),\n ids: HashSet::new(),\n });\n }\n maybe\n },\n };\n let base = Url::from_file_path(&file).unwrap();\n let mut parser = UrlParser::new();\n parser.base_url(&base);\n\n match maybe_redirect.and_then(|url| url_to_file_path(&parser, &url)) {\n Some((_, redirect_file)) => {\n let path = PathBuf::from(redirect_file);\n load_file(cache, root, path, FromRedirect(true))\n }\n None => Ok((pretty_file, contents))\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = match lines.nth(6) {\n Some(l) => l,\n None => return None,\n };\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn url_to_file_path(parser: &UrlParser, url: &str) -> Option<(Url, PathBuf)> {\n parser.parse(url).ok().and_then(|parsed_url| {\n parsed_url.to_file_path().ok().map(|f| (parsed_url, f))\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str,\n attr: &str,\n mut f: F)\n{\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len() ..];\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_left_matches(\" \") != \"\" {\n continue\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_left_matches(\" \") != \"\" {\n continue\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n f(url, i)\n }\n }\n}\n<commit_msg>Add FIXME for linkchecker whitlists<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\nextern crate url;\n\nuse std::env;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf};\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::Entry;\n\nuse url::{Url, UrlParser};\n\nuse Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\nfn main() {\n let docs = env::args().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut url = Url::from_file_path(&docs).unwrap();\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut url, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: String,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nimpl FileEntry {\n fn parse_ids(&mut self,\n file: &Path,\n contents: &str,\n errors: &mut bool)\n{\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i| {\n let frag = fragment.trim_left_matches(\"#\").to_owned();\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\",\n file.display(), i, fragment);\n }\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache,\n root: &Path,\n dir: &Path,\n url: &mut Url,\n errors: &mut bool)\n{\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n url.path_mut().unwrap().push(entry.file_name().into_string().unwrap());\n if kind.is_dir() {\n walk(cache, root, &path, url, errors);\n } else {\n let pretty_path = check(cache, root, &path, url, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to to reduce memory-usage\n entry.source = String::new();\n }\n }\n url.path_mut().unwrap().pop();\n }\n}\n\nfn check(cache: &mut Cache,\n root: &Path,\n file: &Path,\n base: &Url,\n errors: &mut bool) -> Option<PathBuf>\n{\n \/\/ ignore js files as they are not prone to errors as the rest of the\n \/\/ documentation is and they otherwise bring up false positives.\n if file.extension().and_then(|s| s.to_str()) == Some(\"js\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32553)\n if file.ends_with(\"collections\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"btree_set\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/btree_map\/struct.BTreeMap.html\") ||\n file.ends_with(\"collections\/hash_map\/struct.HashMap.html\") {\n return None;\n }\n\n if file.ends_with(\"std\/sys\/ext\/index.html\") {\n return None;\n }\n\n if let Some(file) = file.to_str() {\n \/\/ FIXME(#31948)\n if file.contains(\"ParseFloatError\") {\n return None;\n }\n \/\/ weird reexports, but this module is on its way out, so chalk it up to\n \/\/ \"rustdoc weirdness\" and move on from there\n if file.contains(\"scoped_tls\") {\n return None;\n }\n }\n\n let mut parser = UrlParser::new();\n parser.base_url(base);\n\n let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file).unwrap()\n .parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i| {\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path. If either of these fail then we\n \/\/ just keep going.\n let (parsed_url, path) = match url_to_file_path(&parser, url) {\n Some((url, path)) => (url, PathBuf::from(path)),\n None => return,\n };\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n return;\n }\n let res = load_file(cache, root, path.clone(), FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => panic!(format!(\"{}\", err)),\n Err(LoadError::BrokenRedirect(target, _)) => {\n print!(\"{}:{}: broken redirect to {}\",\n pretty_file.display(), i + 1, target.display());\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n if let Some(ref fragment) = parsed_url.fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-')\n .all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \",\n pretty_file.display(), i + 1);\n println!(\"`#{}` pointing to `{}`\",\n fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(cache: &mut Cache,\n root: &Path,\n file: PathBuf,\n redirect: Redirect) -> Result<(PathBuf, String), LoadError> {\n let mut contents = String::new();\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let maybe_redirect = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => {\n contents = entry.get().source.clone();\n None\n },\n Entry::Vacant(entry) => {\n let mut fp = try!(File::open(file.clone()).map_err(|err| {\n if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.clone(), err)\n } else {\n LoadError::IOError(err)\n }\n }));\n try!(fp.read_to_string(&mut contents)\n .map_err(|err| LoadError::IOError(err)));\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry {\n source: contents.clone(),\n ids: HashSet::new(),\n });\n }\n maybe\n },\n };\n let base = Url::from_file_path(&file).unwrap();\n let mut parser = UrlParser::new();\n parser.base_url(&base);\n\n match maybe_redirect.and_then(|url| url_to_file_path(&parser, &url)) {\n Some((_, redirect_file)) => {\n let path = PathBuf::from(redirect_file);\n load_file(cache, root, path, FromRedirect(true))\n }\n None => Ok((pretty_file, contents))\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = match lines.nth(6) {\n Some(l) => l,\n None => return None,\n };\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn url_to_file_path(parser: &UrlParser, url: &str) -> Option<(Url, PathBuf)> {\n parser.parse(url).ok().and_then(|parsed_url| {\n parsed_url.to_file_path().ok().map(|f| (parsed_url, f))\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str,\n attr: &str,\n mut f: F)\n{\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len() ..];\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_left_matches(\" \") != \"\" {\n continue\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_left_matches(\" \") != \"\" {\n continue\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n f(url, i)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Init commit<commit_after>use std::fs::File;\nuse std::io::{BufRead, BufReader};\nuse std::path::Path;\nuse std::str::FromStr;\nuse std::thread;\n\nfn main() {\n\n let mat_path = Path::new(\"big_matrix_1.txt\");\n\n let mat_file = File::open(&mat_path).unwrap();\n\n let mat_reader = BufReader::new(mat_file);\n\n let mut mat_line_iterator = mat_reader.lines();\n\n let mut mat_content: Vec<Vec<u32>> = Vec::new();\n\n loop {\n match mat_line_iterator.next() {\n Some(result) => {\n let mat_line = result.unwrap();\n let slice_iter = mat_line.as_slice().split(',')\n .map(|x| {\n FromStr::from_str(x).unwrap()\n });\n mat_content.push(slice_iter.collect());\n },\n \n None => break,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add missing types test<commit_after>use crate::interpreter::types;\nuse std::rc::Rc;\nuse std::cell::RefCell;\n\n#[test]\nfn test_single_type_match() {\n let num1 = types::Type::Number(32f64);\n let mut val = match_type!(&num1,\n types::Type::Number(value) => value,\n _ => &-1f64\n );\n\n assert_eq!(val , &32f64);\n\n let num2 = types::Type::Reference(Rc::new(RefCell::new(types::Type::Number(32f64))));\n val = match_type!(&num2,\n types::Type::Number(value) => value,\n _ => &-1f64\n );\n\n assert_eq!(val , &32f64);\n}\n\n#[test]\nfn test_multiple_type_match() {\n let num1 = types::Type::Number(32f64);\n let num11 = types::Type::Number(18f64);\n let mut val = match_type!((&num1, &num11),\n (types::Type::Number(value1), types::Type::Number(value2)) => value1 + value2,\n _ => -1f64\n );\n\n assert_eq!(val , 50f64);\n\n let num2 = types::Type::Reference(Rc::new(RefCell::new(types::Type::Number(32f64))));\n let num21 = types::Type::Reference(Rc::new(RefCell::new(types::Type::Number(18f64))));\n val = match_type!((&num1, &num21),\n (types::Type::Number(value1), types::Type::Number(value2)) => value1 + value2,\n _ => -1f64\n );\n\n assert_eq!(val , 50f64);\n\n val = match_type!((&num11, &num2),\n (types::Type::Number(value1), types::Type::Number(value2)) => value1 + value2,\n _ => -1f64\n );\n\n assert_eq!(val , 50f64);\n\n val = match_type!((&num2, &num21),\n (types::Type::Number(value1), types::Type::Number(value2)) => value1 + value2,\n _ => -1f64\n );\n\n assert_eq!(val , 50f64);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor imag-link to fit new store iterator interface<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Context module<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\npub const DBUS_TIMEOUT: i32 = 20000; \/\/ millieconds\n\npub const STRATIS_VERSION: &'static str = \"1\";\npub const MANAGER_NAME: &'static str = \"\/Manager\";\npub const STRATIS_BASE_PATH: &'static str = \"\/org\/storage\/stratis1\";\npub const STRATIS_BASE_SERVICE: &'static str = \"org.storage.stratis1\";\npub const STRATIS_BASE_MANAGER: &'static str = \"\/org\/storage\/stratis1\/Manager\";\npub const STRATIS_MANAGER_INTERFACE: &'static str = \"org.storage.stratis1.Manager\";\npub const STRATIS_POOL_BASE_INTERFACE: &'static str = \"org.storage.stratis1.pool\";\npub const STRATIS_VOLUME_BASE_INTERFACE: &'static str = \"org.storage.stratis1.volume\";\npub const STRATIS_DEV_BASE_INTERFACE: &'static str = \"org.storage.stratis1.dev\";\npub const STRATIS_CACHE_BASE_INTERFACE: &'static str = \"org.storage.stratis1.cache\";\npub const STRATIS_POOL_BASE_PATH: &'static str = \"\/org\/storage\/stratis\/pool\";\n\n\npub const LIST_POOLS: &'static str = \"ListPools\";\npub const CREATE_POOL: &'static str = \"CreatePool\";\npub const DESTROY_POOL: &'static str = \"DestroyPool\";\npub const GET_POOL_OBJECT_PATH: &'static str = \"GetPoolObjectPath\";\npub const GET_VOLUME_OBJECT_PATH: &'static str = \"GetVolumeObjectPath\";\npub const GET_DEV_OBJECT_PATH: &'static str = \"GetDevObjectPath\";\npub const GET_CACHE_OBJECT_PATH: &'static str = \"GetCacheObjectPath\";\npub const GET_ERROR_CODES: &'static str = \"GetErrorCodes\";\npub const GET_RAID_LEVELS: &'static str = \"GetRaidLevels\";\npub const GET_DEV_TYPES: &'static str = \"GetDevTypes\";\n\npub trait HasCodes {\n \/\/\/ Indicates that this enum can be converted to an int or described\n \/\/\/ with a string.\n fn get_error_int(&self) -> u16;\n fn get_error_string(&self) -> &str;\n}\n\ncustom_derive! {\n #[derive(Copy, Clone, EnumDisplay,\n IterVariants(StratisDBusErrorVariants),\n IterVariantNames(StratisDBusErrorVariantNames))]\n #[allow(non_camel_case_types)]\n pub enum StratisErrorEnum {\n STRATIS_OK,\n STRATIS_ERROR,\n STRATIS_NULL,\n STRATIS_NOTFOUND,\n STRATIS_POOL_NOTFOUND,\n STRATIS_VOLUME_NOTFOUND,\n STRATIS_DEV_NOTFOUND,\n STRATIS_CACHE_NOTFOUND,\n STRATIS_BAD_PARAM,\n STRATIS_ALREADY_EXISTS,\n STRATIS_NULL_NAME,\n STRATIS_NO_POOLS,\n STRATIS_LIST_FAILURE,\n }\n}\n\nimpl HasCodes for StratisErrorEnum {\n fn get_error_int(&self) -> u16 {\n *self as u16\n }\n\n fn get_error_string(&self) -> &str {\n match *self {\n \/\/ TODO deal with internationalization\/do this better\n StratisErrorEnum::STRATIS_OK => \"Ok\",\n StratisErrorEnum::STRATIS_ERROR => \"A general error happened\",\n StratisErrorEnum::STRATIS_NULL => \"Null parameter was supplied\",\n StratisErrorEnum::STRATIS_NOTFOUND => \"Not found\",\n StratisErrorEnum::STRATIS_POOL_NOTFOUND => \"Pool not found\",\n StratisErrorEnum::STRATIS_VOLUME_NOTFOUND => \"Volume not found\",\n StratisErrorEnum::STRATIS_CACHE_NOTFOUND => \"Cache not found\",\n StratisErrorEnum::STRATIS_BAD_PARAM => \"Bad parameter\",\n StratisErrorEnum::STRATIS_DEV_NOTFOUND => \"Dev not found\",\n StratisErrorEnum::STRATIS_ALREADY_EXISTS => \"Already exists\",\n StratisErrorEnum::STRATIS_NULL_NAME => \"Null name supplied\",\n StratisErrorEnum::STRATIS_NO_POOLS => \"No pools\",\n StratisErrorEnum::STRATIS_LIST_FAILURE => \"List operation failure.\",\n }\n }\n}\n\ncustom_derive! {\n #[derive(Copy, Clone, EnumDisplay,\n IterVariants(StratisDBusRaidTypeVariants),\n IterVariantNames(StratisDBusRaidTypeVariantNames))]\n #[allow(non_camel_case_types)]\n pub enum StratisRaidType {\n STRATIS_RAID_TYPE_UNKNOWN,\n STRATIS_RAID_TYPE_SINGLE,\n STRATIS_RAID_TYPE_RAID1,\n STRATIS_RAID_TYPE_RAID5,\n STRATIS_RAID_TYPE_RAID6,\n }\n}\n\nimpl HasCodes for StratisRaidType {\n fn get_error_int(&self) -> u16 {\n *self as u16\n }\n\n fn get_error_string(&self) -> &str {\n match *self {\n StratisRaidType::STRATIS_RAID_TYPE_UNKNOWN => \"Ok\",\n StratisRaidType::STRATIS_RAID_TYPE_SINGLE => \"Single\",\n StratisRaidType::STRATIS_RAID_TYPE_RAID1 => \"Mirrored\",\n StratisRaidType::STRATIS_RAID_TYPE_RAID5 => {\n \"Block-level striping with distributed parity\"\n }\n StratisRaidType::STRATIS_RAID_TYPE_RAID6 => {\n \"Block-level striping with two distributed parities\"\n }\n }\n }\n}\n<commit_msg>Make string for unknown raid type be \"unknown\"<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\npub const DBUS_TIMEOUT: i32 = 20000; \/\/ millieconds\n\npub const STRATIS_VERSION: &'static str = \"1\";\npub const MANAGER_NAME: &'static str = \"\/Manager\";\npub const STRATIS_BASE_PATH: &'static str = \"\/org\/storage\/stratis1\";\npub const STRATIS_BASE_SERVICE: &'static str = \"org.storage.stratis1\";\npub const STRATIS_BASE_MANAGER: &'static str = \"\/org\/storage\/stratis1\/Manager\";\npub const STRATIS_MANAGER_INTERFACE: &'static str = \"org.storage.stratis1.Manager\";\npub const STRATIS_POOL_BASE_INTERFACE: &'static str = \"org.storage.stratis1.pool\";\npub const STRATIS_VOLUME_BASE_INTERFACE: &'static str = \"org.storage.stratis1.volume\";\npub const STRATIS_DEV_BASE_INTERFACE: &'static str = \"org.storage.stratis1.dev\";\npub const STRATIS_CACHE_BASE_INTERFACE: &'static str = \"org.storage.stratis1.cache\";\npub const STRATIS_POOL_BASE_PATH: &'static str = \"\/org\/storage\/stratis\/pool\";\n\n\npub const LIST_POOLS: &'static str = \"ListPools\";\npub const CREATE_POOL: &'static str = \"CreatePool\";\npub const DESTROY_POOL: &'static str = \"DestroyPool\";\npub const GET_POOL_OBJECT_PATH: &'static str = \"GetPoolObjectPath\";\npub const GET_VOLUME_OBJECT_PATH: &'static str = \"GetVolumeObjectPath\";\npub const GET_DEV_OBJECT_PATH: &'static str = \"GetDevObjectPath\";\npub const GET_CACHE_OBJECT_PATH: &'static str = \"GetCacheObjectPath\";\npub const GET_ERROR_CODES: &'static str = \"GetErrorCodes\";\npub const GET_RAID_LEVELS: &'static str = \"GetRaidLevels\";\npub const GET_DEV_TYPES: &'static str = \"GetDevTypes\";\n\npub trait HasCodes {\n \/\/\/ Indicates that this enum can be converted to an int or described\n \/\/\/ with a string.\n fn get_error_int(&self) -> u16;\n fn get_error_string(&self) -> &str;\n}\n\ncustom_derive! {\n #[derive(Copy, Clone, EnumDisplay,\n IterVariants(StratisDBusErrorVariants),\n IterVariantNames(StratisDBusErrorVariantNames))]\n #[allow(non_camel_case_types)]\n pub enum StratisErrorEnum {\n STRATIS_OK,\n STRATIS_ERROR,\n STRATIS_NULL,\n STRATIS_NOTFOUND,\n STRATIS_POOL_NOTFOUND,\n STRATIS_VOLUME_NOTFOUND,\n STRATIS_DEV_NOTFOUND,\n STRATIS_CACHE_NOTFOUND,\n STRATIS_BAD_PARAM,\n STRATIS_ALREADY_EXISTS,\n STRATIS_NULL_NAME,\n STRATIS_NO_POOLS,\n STRATIS_LIST_FAILURE,\n }\n}\n\nimpl HasCodes for StratisErrorEnum {\n fn get_error_int(&self) -> u16 {\n *self as u16\n }\n\n fn get_error_string(&self) -> &str {\n match *self {\n \/\/ TODO deal with internationalization\/do this better\n StratisErrorEnum::STRATIS_OK => \"Ok\",\n StratisErrorEnum::STRATIS_ERROR => \"A general error happened\",\n StratisErrorEnum::STRATIS_NULL => \"Null parameter was supplied\",\n StratisErrorEnum::STRATIS_NOTFOUND => \"Not found\",\n StratisErrorEnum::STRATIS_POOL_NOTFOUND => \"Pool not found\",\n StratisErrorEnum::STRATIS_VOLUME_NOTFOUND => \"Volume not found\",\n StratisErrorEnum::STRATIS_CACHE_NOTFOUND => \"Cache not found\",\n StratisErrorEnum::STRATIS_BAD_PARAM => \"Bad parameter\",\n StratisErrorEnum::STRATIS_DEV_NOTFOUND => \"Dev not found\",\n StratisErrorEnum::STRATIS_ALREADY_EXISTS => \"Already exists\",\n StratisErrorEnum::STRATIS_NULL_NAME => \"Null name supplied\",\n StratisErrorEnum::STRATIS_NO_POOLS => \"No pools\",\n StratisErrorEnum::STRATIS_LIST_FAILURE => \"List operation failure.\",\n }\n }\n}\n\ncustom_derive! {\n #[derive(Copy, Clone, EnumDisplay,\n IterVariants(StratisDBusRaidTypeVariants),\n IterVariantNames(StratisDBusRaidTypeVariantNames))]\n #[allow(non_camel_case_types)]\n pub enum StratisRaidType {\n STRATIS_RAID_TYPE_UNKNOWN,\n STRATIS_RAID_TYPE_SINGLE,\n STRATIS_RAID_TYPE_RAID1,\n STRATIS_RAID_TYPE_RAID5,\n STRATIS_RAID_TYPE_RAID6,\n }\n}\n\nimpl HasCodes for StratisRaidType {\n fn get_error_int(&self) -> u16 {\n *self as u16\n }\n\n fn get_error_string(&self) -> &str {\n match *self {\n StratisRaidType::STRATIS_RAID_TYPE_UNKNOWN => \"Unknown\",\n StratisRaidType::STRATIS_RAID_TYPE_SINGLE => \"Single\",\n StratisRaidType::STRATIS_RAID_TYPE_RAID1 => \"Mirrored\",\n StratisRaidType::STRATIS_RAID_TYPE_RAID5 => {\n \"Block-level striping with distributed parity\"\n }\n StratisRaidType::STRATIS_RAID_TYPE_RAID6 => {\n \"Block-level striping with two distributed parities\"\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! # The Rust Core Library\n\/\/!\n\/\/! The Rust Core Library is the dependency-free[^free] foundation of [The\n\/\/! Rust Standard Library](..\/std\/index.html). It is the portable glue\n\/\/! between the language and its libraries, defining the intrinsic and\n\/\/! primitive building blocks of all Rust code. It links to no\n\/\/! upstream libraries, no system libraries, and no libc.\n\/\/!\n\/\/! [^free]: Strictly speaking, there are some symbols which are needed but\n\/\/! they aren't always necessary.\n\/\/!\n\/\/! The core library is *minimal*: it isn't even aware of heap allocation,\n\/\/! nor does it provide concurrency or I\/O. These things require\n\/\/! platform integration, and this library is platform-agnostic.\n\/\/!\n\/\/! # How to use the core library\n\/\/!\n\/\/! Please note that all of these details are currently not considered stable.\n\/\/!\n\/\/ FIXME: Fill me in with more detail when the interface settles\n\/\/! This library is built on the assumption of a few existing symbols:\n\/\/!\n\/\/! * `memcpy`, `memcmp`, `memset` - These are core memory routines which are\n\/\/! often generated by LLVM. Additionally, this library can make explicit\n\/\/! calls to these functions. Their signatures are the same as found in C.\n\/\/! These functions are often provided by the system libc, but can also be\n\/\/! provided by the [rlibc crate](https:\/\/crates.io\/crates\/rlibc).\n\/\/!\n\/\/! * `rust_begin_panic` - This function takes four arguments, a\n\/\/! `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments\n\/\/! dictate the panic message, the file at which panic was invoked, and the\n\/\/! line and column inside the file. It is up to consumers of this core\n\/\/! library to define this panic function; it is only required to never\n\/\/! return. This requires a `lang` attribute named `panic_impl`.\n\/\/!\n\/\/! * `rust_eh_personality` - is used by the failure mechanisms of the\n\/\/! compiler. This is often mapped to GCC's personality function, but crates\n\/\/! which do not trigger a panic can be assured that this function is never\n\/\/! called. The `lang` attribute is called `eh_personality`.\n\n\/\/ Since libcore defines many fundamental lang items, all tests live in a\n\/\/ separate crate, libcoretest, to avoid bizarre issues.\n\/\/\n\/\/ Here we explicitly #[cfg]-out this whole crate when testing. If we don't do\n\/\/ this, both the generated test artifact and the linked libtest (which\n\/\/ transitively includes libcore) will both define the same set of lang items,\n\/\/ and this will cause the E0152 \"duplicate lang item found\" error. See\n\/\/ discussion in #50466 for details.\n\/\/\n\/\/ This cfg won't affect doc tests.\n#![cfg(not(test))]\n\n#![stable(feature = \"core\", since = \"1.6.0\")]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\",\n html_playground_url = \"https:\/\/play.rust-lang.org\/\",\n issue_tracker_base_url = \"https:\/\/github.com\/rust-lang\/rust\/issues\/\",\n test(no_crate_inject, attr(deny(warnings))),\n test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]\n\n#![no_core]\n#![deny(missing_docs)]\n#![deny(missing_debug_implementations)]\n\n#![feature(allow_internal_unstable)]\n#![feature(arbitrary_self_types)]\n#![feature(asm)]\n#![feature(associated_type_defaults)]\n#![feature(attr_literals)]\n#![feature(cfg_target_has_atomic)]\n#![feature(concat_idents)]\n#![feature(const_fn)]\n#![feature(const_int_ops)]\n#![feature(core_float)]\n#![feature(custom_attribute)]\n#![feature(doc_cfg)]\n#![feature(doc_spotlight)]\n#![feature(extern_types)]\n#![feature(fundamental)]\n#![feature(intrinsics)]\n#![feature(lang_items)]\n#![feature(link_llvm_intrinsics)]\n#![feature(never_type)]\n#![feature(exhaustive_patterns)]\n#![feature(macro_at_most_once_rep)]\n#![feature(no_core)]\n#![feature(on_unimplemented)]\n#![feature(optin_builtin_traits)]\n#![feature(prelude_import)]\n#![feature(repr_simd, platform_intrinsics)]\n#![feature(rustc_attrs)]\n#![feature(rustc_const_unstable)]\n#![feature(simd_ffi)]\n#![feature(core_slice_ext)]\n#![feature(core_str_ext)]\n#![feature(specialization)]\n#![feature(staged_api)]\n#![feature(stmt_expr_attributes)]\n#![feature(unboxed_closures)]\n#![feature(untagged_unions)]\n#![feature(unwind_attributes)]\n#![feature(doc_alias)]\n#![feature(inclusive_range_methods)]\n#![feature(repr_transparent)]\n#![feature(mmx_target_feature)]\n#![feature(tbm_target_feature)]\n#![feature(sse4a_target_feature)]\n#![feature(arm_target_feature)]\n#![feature(powerpc_target_feature)]\n#![feature(mips_target_feature)]\n#![feature(aarch64_target_feature)]\n#![feature(const_slice_len)]\n#![feature(const_str_as_bytes)]\n#![feature(const_str_len)]\n\n#[prelude_import]\n#[allow(unused)]\nuse prelude::v1::*;\n\n#[macro_use]\nmod macros;\n\n#[macro_use]\nmod internal_macros;\n\n#[path = \"num\/int_macros.rs\"]\n#[macro_use]\nmod int_macros;\n\n#[path = \"num\/uint_macros.rs\"]\n#[macro_use]\nmod uint_macros;\n\n#[path = \"num\/isize.rs\"] pub mod isize;\n#[path = \"num\/i8.rs\"] pub mod i8;\n#[path = \"num\/i16.rs\"] pub mod i16;\n#[path = \"num\/i32.rs\"] pub mod i32;\n#[path = \"num\/i64.rs\"] pub mod i64;\n#[path = \"num\/i128.rs\"] pub mod i128;\n\n#[path = \"num\/usize.rs\"] pub mod usize;\n#[path = \"num\/u8.rs\"] pub mod u8;\n#[path = \"num\/u16.rs\"] pub mod u16;\n#[path = \"num\/u32.rs\"] pub mod u32;\n#[path = \"num\/u64.rs\"] pub mod u64;\n#[path = \"num\/u128.rs\"] pub mod u128;\n\n#[path = \"num\/f32.rs\"] pub mod f32;\n#[path = \"num\/f64.rs\"] pub mod f64;\n\n#[macro_use]\npub mod num;\n\n\/* The libcore prelude, not as all-encompassing as the libstd prelude *\/\n\npub mod prelude;\n\n\/* Core modules for ownership management *\/\n\npub mod intrinsics;\npub mod mem;\npub mod ptr;\npub mod hint;\n\n\/* Core language traits *\/\n\npub mod marker;\npub mod ops;\npub mod cmp;\npub mod clone;\npub mod default;\npub mod convert;\npub mod borrow;\n\n\/* Core types and methods on primitives *\/\n\npub mod any;\npub mod array;\npub mod ascii;\npub mod sync;\npub mod cell;\npub mod char;\npub mod panic;\npub mod panicking;\npub mod iter;\npub mod option;\npub mod raw;\npub mod result;\n\npub mod slice;\npub mod str;\npub mod hash;\npub mod fmt;\npub mod time;\n\npub mod unicode;\n\n\/* Async *\/\npub mod future;\npub mod task;\n\n\/* Heap memory allocator trait *\/\n#[allow(missing_docs)]\npub mod alloc;\n\n\/\/ note: does not need to be public\nmod iter_private;\nmod nonzero;\nmod tuple;\nmod unit;\n\n\/\/ Pull in the the `coresimd` crate directly into libcore. This is where all the\n\/\/ architecture-specific (and vendor-specific) intrinsics are defined. AKA\n\/\/ things like SIMD and such. Note that the actual source for all this lies in a\n\/\/ different repository, rust-lang-nursery\/stdsimd. That's why the setup here is\n\/\/ a bit wonky.\n#[allow(unused_macros)]\nmacro_rules! test_v16 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v32 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v64 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v128 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v256 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v512 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }\n#[path = \"..\/stdsimd\/coresimd\/mod.rs\"]\n#[allow(missing_docs, missing_debug_implementations, dead_code, unused_imports)]\n#[unstable(feature = \"stdsimd\", issue = \"48556\")]\n#[cfg(not(stage0))] \/\/ allow changes to how stdsimd works in stage0\nmod coresimd;\n\n#[unstable(feature = \"stdsimd\", issue = \"48556\")]\n#[cfg(not(stage0))]\npub use coresimd::simd;\n#[stable(feature = \"simd_arch\", since = \"1.27.0\")]\n#[cfg(not(stage0))]\npub use coresimd::arch;\n<commit_msg>remove useless feature(repr_transparent)<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! # The Rust Core Library\n\/\/!\n\/\/! The Rust Core Library is the dependency-free[^free] foundation of [The\n\/\/! Rust Standard Library](..\/std\/index.html). It is the portable glue\n\/\/! between the language and its libraries, defining the intrinsic and\n\/\/! primitive building blocks of all Rust code. It links to no\n\/\/! upstream libraries, no system libraries, and no libc.\n\/\/!\n\/\/! [^free]: Strictly speaking, there are some symbols which are needed but\n\/\/! they aren't always necessary.\n\/\/!\n\/\/! The core library is *minimal*: it isn't even aware of heap allocation,\n\/\/! nor does it provide concurrency or I\/O. These things require\n\/\/! platform integration, and this library is platform-agnostic.\n\/\/!\n\/\/! # How to use the core library\n\/\/!\n\/\/! Please note that all of these details are currently not considered stable.\n\/\/!\n\/\/ FIXME: Fill me in with more detail when the interface settles\n\/\/! This library is built on the assumption of a few existing symbols:\n\/\/!\n\/\/! * `memcpy`, `memcmp`, `memset` - These are core memory routines which are\n\/\/! often generated by LLVM. Additionally, this library can make explicit\n\/\/! calls to these functions. Their signatures are the same as found in C.\n\/\/! These functions are often provided by the system libc, but can also be\n\/\/! provided by the [rlibc crate](https:\/\/crates.io\/crates\/rlibc).\n\/\/!\n\/\/! * `rust_begin_panic` - This function takes four arguments, a\n\/\/! `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments\n\/\/! dictate the panic message, the file at which panic was invoked, and the\n\/\/! line and column inside the file. It is up to consumers of this core\n\/\/! library to define this panic function; it is only required to never\n\/\/! return. This requires a `lang` attribute named `panic_impl`.\n\/\/!\n\/\/! * `rust_eh_personality` - is used by the failure mechanisms of the\n\/\/! compiler. This is often mapped to GCC's personality function, but crates\n\/\/! which do not trigger a panic can be assured that this function is never\n\/\/! called. The `lang` attribute is called `eh_personality`.\n\n\/\/ Since libcore defines many fundamental lang items, all tests live in a\n\/\/ separate crate, libcoretest, to avoid bizarre issues.\n\/\/\n\/\/ Here we explicitly #[cfg]-out this whole crate when testing. If we don't do\n\/\/ this, both the generated test artifact and the linked libtest (which\n\/\/ transitively includes libcore) will both define the same set of lang items,\n\/\/ and this will cause the E0152 \"duplicate lang item found\" error. See\n\/\/ discussion in #50466 for details.\n\/\/\n\/\/ This cfg won't affect doc tests.\n#![cfg(not(test))]\n\n#![stable(feature = \"core\", since = \"1.6.0\")]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\",\n html_playground_url = \"https:\/\/play.rust-lang.org\/\",\n issue_tracker_base_url = \"https:\/\/github.com\/rust-lang\/rust\/issues\/\",\n test(no_crate_inject, attr(deny(warnings))),\n test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]\n\n#![no_core]\n#![deny(missing_docs)]\n#![deny(missing_debug_implementations)]\n\n#![feature(allow_internal_unstable)]\n#![feature(arbitrary_self_types)]\n#![feature(asm)]\n#![feature(associated_type_defaults)]\n#![feature(attr_literals)]\n#![feature(cfg_target_has_atomic)]\n#![feature(concat_idents)]\n#![feature(const_fn)]\n#![feature(const_int_ops)]\n#![feature(core_float)]\n#![feature(custom_attribute)]\n#![feature(doc_cfg)]\n#![feature(doc_spotlight)]\n#![feature(extern_types)]\n#![feature(fundamental)]\n#![feature(intrinsics)]\n#![feature(lang_items)]\n#![feature(link_llvm_intrinsics)]\n#![feature(never_type)]\n#![feature(exhaustive_patterns)]\n#![feature(macro_at_most_once_rep)]\n#![feature(no_core)]\n#![feature(on_unimplemented)]\n#![feature(optin_builtin_traits)]\n#![feature(prelude_import)]\n#![feature(repr_simd, platform_intrinsics)]\n#![feature(rustc_attrs)]\n#![feature(rustc_const_unstable)]\n#![feature(simd_ffi)]\n#![feature(core_slice_ext)]\n#![feature(core_str_ext)]\n#![feature(specialization)]\n#![feature(staged_api)]\n#![feature(stmt_expr_attributes)]\n#![feature(unboxed_closures)]\n#![feature(untagged_unions)]\n#![feature(unwind_attributes)]\n#![feature(doc_alias)]\n#![feature(inclusive_range_methods)]\n#![feature(mmx_target_feature)]\n#![feature(tbm_target_feature)]\n#![feature(sse4a_target_feature)]\n#![feature(arm_target_feature)]\n#![feature(powerpc_target_feature)]\n#![feature(mips_target_feature)]\n#![feature(aarch64_target_feature)]\n#![feature(const_slice_len)]\n#![feature(const_str_as_bytes)]\n#![feature(const_str_len)]\n\n#[prelude_import]\n#[allow(unused)]\nuse prelude::v1::*;\n\n#[macro_use]\nmod macros;\n\n#[macro_use]\nmod internal_macros;\n\n#[path = \"num\/int_macros.rs\"]\n#[macro_use]\nmod int_macros;\n\n#[path = \"num\/uint_macros.rs\"]\n#[macro_use]\nmod uint_macros;\n\n#[path = \"num\/isize.rs\"] pub mod isize;\n#[path = \"num\/i8.rs\"] pub mod i8;\n#[path = \"num\/i16.rs\"] pub mod i16;\n#[path = \"num\/i32.rs\"] pub mod i32;\n#[path = \"num\/i64.rs\"] pub mod i64;\n#[path = \"num\/i128.rs\"] pub mod i128;\n\n#[path = \"num\/usize.rs\"] pub mod usize;\n#[path = \"num\/u8.rs\"] pub mod u8;\n#[path = \"num\/u16.rs\"] pub mod u16;\n#[path = \"num\/u32.rs\"] pub mod u32;\n#[path = \"num\/u64.rs\"] pub mod u64;\n#[path = \"num\/u128.rs\"] pub mod u128;\n\n#[path = \"num\/f32.rs\"] pub mod f32;\n#[path = \"num\/f64.rs\"] pub mod f64;\n\n#[macro_use]\npub mod num;\n\n\/* The libcore prelude, not as all-encompassing as the libstd prelude *\/\n\npub mod prelude;\n\n\/* Core modules for ownership management *\/\n\npub mod intrinsics;\npub mod mem;\npub mod ptr;\npub mod hint;\n\n\/* Core language traits *\/\n\npub mod marker;\npub mod ops;\npub mod cmp;\npub mod clone;\npub mod default;\npub mod convert;\npub mod borrow;\n\n\/* Core types and methods on primitives *\/\n\npub mod any;\npub mod array;\npub mod ascii;\npub mod sync;\npub mod cell;\npub mod char;\npub mod panic;\npub mod panicking;\npub mod iter;\npub mod option;\npub mod raw;\npub mod result;\n\npub mod slice;\npub mod str;\npub mod hash;\npub mod fmt;\npub mod time;\n\npub mod unicode;\n\n\/* Async *\/\npub mod future;\npub mod task;\n\n\/* Heap memory allocator trait *\/\n#[allow(missing_docs)]\npub mod alloc;\n\n\/\/ note: does not need to be public\nmod iter_private;\nmod nonzero;\nmod tuple;\nmod unit;\n\n\/\/ Pull in the the `coresimd` crate directly into libcore. This is where all the\n\/\/ architecture-specific (and vendor-specific) intrinsics are defined. AKA\n\/\/ things like SIMD and such. Note that the actual source for all this lies in a\n\/\/ different repository, rust-lang-nursery\/stdsimd. That's why the setup here is\n\/\/ a bit wonky.\n#[allow(unused_macros)]\nmacro_rules! test_v16 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v32 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v64 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v128 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v256 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v512 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }\n#[path = \"..\/stdsimd\/coresimd\/mod.rs\"]\n#[allow(missing_docs, missing_debug_implementations, dead_code, unused_imports)]\n#[unstable(feature = \"stdsimd\", issue = \"48556\")]\n#[cfg(not(stage0))] \/\/ allow changes to how stdsimd works in stage0\nmod coresimd;\n\n#[unstable(feature = \"stdsimd\", issue = \"48556\")]\n#[cfg(not(stage0))]\npub use coresimd::simd;\n#[stable(feature = \"simd_arch\", since = \"1.27.0\")]\n#[cfg(not(stage0))]\npub use coresimd::arch;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for double_parens lint.<commit_after>#![feature(plugin)]\n#![plugin(clippy)]\n\n#![deny(double_parens)]\n#![allow(dead_code)]\n\nfn dummy_fn<T>(_: T) {}\n\nstruct DummyStruct;\n\nimpl DummyStruct {\n fn dummy_method<T>(self, _: T) {}\n}\n\nfn simple_double_parens() -> i32 {\n ((0)) \/\/~ERROR Consider removing unnecessary double parentheses\n}\n\nfn fn_double_parens() {\n dummy_fn((0)); \/\/~ERROR Consider removing unnecessary double parentheses\n}\n\nfn method_double_parens(x: DummyStruct) {\n x.dummy_method((0)); \/\/~ERROR Consider removing unnecessary double parentheses\n}\n\nfn tuple_double_parens() -> (i32, i32) {\n ((1, 2)) \/\/~ERROR Consider removing unnecessary double parentheses\n}\n\nfn unit_double_parens() {\n (()) \/\/~ERROR Consider removing unnecessary double parentheses\n}\n\nfn fn_tuple_ok() {\n dummy_fn((1, 2));\n}\n\nfn method_tuple_ok(x: DummyStruct) {\n x.dummy_method((1, 2));\n}\n\nfn fn_unit_ok() {\n dummy_fn(());\n}\n\nfn method_unit_ok(x: DummyStruct) {\n x.dummy_method(());\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>KMP string matching<commit_after>\/\/ Palindrome substrings in O(n), Manacher's algorithm\n\/\/ length of odd palin centred at s[i] is len[2*i]\n\/\/ even palin btwn s[i],s[i+1]: len[2*i+1]\n\/\/ TODO: check for underflows\n\/\/ Alternative version:\n\/\/ for c in (..)\n\/\/ while bla\n\/\/ len[c] += 2;\n\/\/ if len[c]-r+i == len[c-r+i] { len.push(len[c]-r+i); r += 1; }\nfn find_pals(text: &[u8]) -> Vec<usize> {\n let mut len = Vec::with_capacity(2*text.len() - 1); \n len.push(1); len.push(0);\n let mut i = 1;\n while i < 2*text.len() - 2 {\n let max_len = ::std::cmp::min(i+1, 2*text.len()-1-i);\n while len[i] < max_len && text[(i-len[i]-1)\/2] == text[(i+len[i]+1)\/2] {\n len[i] += 2;\n }\n let mut d = 1;\n while len[i-d] < len[i]-d { len[i+d] = len[i-d]; d += 1; }\n len[i+d] = len[i]-d;\n i += d;\n }\n len\n}\n\n\/\/ fail[i] = len of longest proper prefix-suffix of pat[0...i]\nfn kmp_init(pat: &[u8]) -> Vec<usize> {\n let mut fail = Vec::with_capacity(pat.len());\n fail.push(0);\n let mut j = 0;\n for ch in &pat[1..] {\n while j > 0 && pat[j] != *ch { j = fail[j-1]; }\n if pat[j] == *ch { j += 1; }\n fail.push(j);\n }\n fail\n}\n\n\/\/ matches[i] = len of longest prefix of pat matching with suffix of text[0...i]\nfn kmp_match(text: &[u8], pat: &[u8]) -> Vec<usize> {\n let fail = kmp_init(pat);\n let mut matches = Vec::with_capacity(text.len());\n let mut j = 0;\n for ch in text {\n if j == pat.len() { j = fail[j-1]; }\n while j > 0 && pat[j] != *ch { j = fail[j-1]; }\n if pat[j] == *ch { j += 1; }\n matches.push(j);\n }\n matches\n}\n\n#[cfg(test)]\nmod test {\n use super::*;\n \n #[test]\n fn test_string() {\n let text = \"abcbc\".as_bytes();\n let pat = \"bc\".as_bytes();\n let matches = kmp_match(text, pat);\n let pal_len = find_pals(text);\n assert_eq!(matches, vec![0, 1, 2, 1, 2]);\n assert_eq!(pal_len, vec![1, 0, 1, 0, 3, 0, 3, 0, 1]);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Details of the `#[ruma_api(...)]` attributes.\n\nuse syn::{\n parse::{Parse, ParseStream},\n Ident, Token,\n};\n\n\/\/\/ Like syn::MetaNameValue, but expects an identifier as the value. Also, we don't care about the\n\/\/\/ the span of the equals sign, so we don't have the `eq_token` field from syn::MetaNameValue.\npub struct MetaNameValue {\n \/\/\/ The part left of the equals sign\n pub name: Ident,\n \/\/\/ The part right of the equals sign\n pub value: Ident,\n}\n\n\/\/\/ Like syn::Meta, but only parses ruma_api attributes\npub enum Meta {\n \/\/\/ A single word, like `query` in `#[ruma_api(query)]`\n Word(Ident),\n \/\/\/ A name-value pair, like `header = CONTENT_TYPE` in `#[ruma_api(header = CONTENT_TYPE)]`\n NameValue(MetaNameValue),\n}\n\nimpl Meta {\n \/\/\/ Check if the given attribute is a ruma_api attribute. If it is, parse it.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if the given attribute is a ruma_api attribute, but fails to parse.\n pub fn from_attribute(attr: &syn::Attribute) -> Option<Self> {\n match &attr.path {\n syn::Path {\n leading_colon: None,\n segments,\n } => {\n if segments.len() == 1 && segments[0].ident == \"ruma_api\" {\n Some(\n attr.parse_args()\n .expect(\"ruma_api! could not parse request field attributes\"),\n )\n } else {\n None\n }\n }\n _ => None,\n }\n }\n}\n\nimpl Parse for Meta {\n fn parse(input: ParseStream) -> syn::Result<Self> {\n let ident = input.parse()?;\n\n if input.peek(Token![=]) {\n let _ = input.parse::<Token![=]>();\n Ok(Meta::NameValue(MetaNameValue {\n name: ident,\n value: input.parse()?,\n }))\n } else {\n Ok(Meta::Word(ident))\n }\n }\n}\n<commit_msg>ruma-api-macros: Slight simplification of api\/attribute.rs<commit_after>\/\/! Details of the `#[ruma_api(...)]` attributes.\n\nuse syn::{\n parse::{Parse, ParseStream},\n Ident, Token,\n};\n\n\/\/\/ Like syn::MetaNameValue, but expects an identifier as the value. Also, we don't care about the\n\/\/\/ the span of the equals sign, so we don't have the `eq_token` field from syn::MetaNameValue.\npub struct MetaNameValue {\n \/\/\/ The part left of the equals sign\n pub name: Ident,\n \/\/\/ The part right of the equals sign\n pub value: Ident,\n}\n\n\/\/\/ Like syn::Meta, but only parses ruma_api attributes\npub enum Meta {\n \/\/\/ A single word, like `query` in `#[ruma_api(query)]`\n Word(Ident),\n \/\/\/ A name-value pair, like `header = CONTENT_TYPE` in `#[ruma_api(header = CONTENT_TYPE)]`\n NameValue(MetaNameValue),\n}\n\nimpl Meta {\n \/\/\/ Check if the given attribute is a ruma_api attribute. If it is, parse it.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if the given attribute is a ruma_api attribute, but fails to parse.\n pub fn from_attribute(attr: &syn::Attribute) -> Option<Self> {\n match &attr.path {\n syn::Path {\n leading_colon: None,\n segments,\n } if segments.len() == 1 && segments[0].ident == \"ruma_api\" => Some(\n attr.parse_args()\n .expect(\"ruma_api! could not parse request field attributes\"),\n ),\n _ => None,\n }\n }\n}\n\nimpl Parse for Meta {\n fn parse(input: ParseStream) -> syn::Result<Self> {\n let ident = input.parse()?;\n\n if input.peek(Token![=]) {\n let _ = input.parse::<Token![=]>();\n Ok(Meta::NameValue(MetaNameValue {\n name: ident,\n value: input.parse()?,\n }))\n } else {\n Ok(Meta::Word(ident))\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>program 0003: simple multiplication<commit_after>\/\/ product of two numbers\nfn main() {\n let a = 5.5;\n let b = 7.5;\n let c;\n\n c = a * b; \n println!(\"Value of {} * {} is {} \", a, b, c);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #26718 - nham:test-8640, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[allow(unused_imports)]\n\nmod foo {\n use baz::bar;\n \/\/~^ ERROR import `bar` conflicts with existing submodule\n mod bar {}\n}\nmod baz { pub mod bar {} }\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add layout for 'inline-block' and other atomic inline-level boxes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>adding tests directory<commit_after>extern crate bittorrent;\n\nuse bittorrent::default_handler::*;\nuse bittorrent::chunk::*;\nuse bittorrent::peer::*;\n\n#[test]\nfn test_nand_slice() {\n let a = vec![0, 0];\n let b = vec![0, 1];\n let c = nand_slice_vbr_len(&a, &b);\n\n assert_eq!(c, vec![255, 254]);\n}\n\n\/*#[test]\nfn test_unclaimed_fields() {\n let mut handler = DefaultHandler;\n\n handler.owned = vec![0, 0, 0];\n handler.request_map = vec![1, 0];\n\n let c = handler.unclaimed_fields();\n assert_eq!(c, vec![254, 255, 255]);\n}*\/\n\n#[test]\nfn test_set_have_singleton_bitfield() {\n let mut state = State::new();\n\n state.set_bitfield(vec![0]);\n state.set_have(2);\n\n assert_eq!(state.bitfield[0], 32);\n}\n\n#[test]\nfn test_set_have_longer_bitfield() {\n let mut state = State::new();\n\n state.set_bitfield(vec![0, 0, 0, 0]);\n state.set_have(23);\n\n assert_eq!(state.bitfield[0], 0);\n assert_eq!(state.bitfield[1], 0);\n assert_eq!(state.bitfield[2], 1);\n assert_eq!(state.bitfield[3], 0);\n}\n\n#[test]\nfn test_set_have_out_of_bounds() {\n let mut state = State::new();\n\n state.set_bitfield(vec![0, 1]);\n state.set_have(31);\n\n assert_eq!(state.bitfield[0], 0);\n assert_eq!(state.bitfield[1], 1);\n assert_eq!(state.bitfield[2], 0);\n assert_eq!(state.bitfield[3], 1);\n}\n\n#[test]\nfn test_get_block_boundaries_1() {\n let piece_length = 300;\n let index = 0;\n let offset = 50;\n let bytes = 1000;\n let block = Piece::from(piece_length, index, offset, bytes);\n\n assert_eq!(block, Piece::new(Position::new(0, 50), Position::new(3, 150)));\n}\n\n\n#[test]\nfn test_get_block_boundaries_2() {\n let piece_length = 5;\n let index = 0;\n let offset = 0;\n let bytes = 5;\n let block = Piece::from(piece_length, index, offset, bytes);\n\n assert_eq!(block, Piece::new(Position::new(0, 0), Position::new(1, 0)));\n}\n\n#[test]\nfn test_get_block_boundaries_3() {\n let piece_length = 5;\n let index = 0;\n let offset = 0;\n let bytes = 6;\n let block = Piece::from(piece_length, index, offset, bytes);\n\n assert_eq!(block, Piece::new(Position::new(0, 0), Position::new(1, 1)));\n}\n\n#[test]\nfn test_request_block () {\n let mut vec = vec![];\n let piece_length = 5;\n\n let a = { \/\/empty case\n let index = 0;\n let offset = 0;\n let bytes = 5;\n\n let expect = Piece::new(Position::new(index, offset), Position::new(index+1, 0));\n let block = Piece::from(piece_length, index, offset, bytes);\n Piece::add_to_boundary_vec(&mut vec, block);\n assert_eq!(vec[0], expect.clone());\n\n expect\n };\n\n let b = {\n let index = 5;\n let offset = 0;\n let bytes = 5;\n\n let expect = Piece::new(Position::new(index, offset), Position::new(index+1, 0));\n let block = Piece::from(piece_length, index, offset, bytes);\n Piece::add_to_boundary_vec(&mut vec, block);\n assert_eq!(vec, vec![a.clone(), expect.clone()]);\n\n expect\n };\n\n let c = {\n let index = 3;\n let offset = 0;\n let bytes = 5;\n\n let expect = Piece::new(Position::new(index, offset), Position::new(index+1, 0));\n let block = Piece::from(piece_length, index, offset, bytes);\n Piece::add_to_boundary_vec(&mut vec, block);\n assert_eq!(vec, vec![a.clone(), expect.clone(), b.clone()]);\n\n expect\n };\n\n let d = {\n let index = 4;\n let offset = 0;\n let bytes = 5;\n\n let expect = Piece::new(Position::new(index, offset), Position::new(index+1, 0));\n let block = Piece::from(piece_length, index, offset, bytes);\n Piece::add_to_boundary_vec(&mut vec, block);\n assert_eq!(vec, vec![a.clone(), c.clone(), expect.clone(), b.clone()]);\n\n expect\n };\n\n}\n\n#[test]\nfn test_request_block_with_compaction () {\n let mut vec = vec![];\n let piece_length = 5;\n\n let _ = {\n let index = 0;\n let offset = 0;\n let bytes = 5;\n\n let expect = Piece::new(Position::new(index, offset), Position::new(index, piece_length - 1));\n let block = Piece::from(piece_length, index, offset, bytes);\n Piece::add_to_boundary_vec(&mut vec, block)\n };\n\n let index = {\n let index = 1;\n let offset = 0;\n let bytes = 5;\n\n let expect = Piece::new(Position::new(index, offset), Position::new(index, piece_length - 1));\n let block = Piece::from(piece_length, index, offset, bytes);\n Piece::add_to_boundary_vec(&mut vec, block)\n };\n\n Piece::compact_if_possible(&mut vec, index);\n\n assert_eq!(vec, vec![Piece::new(Position::new(0, 0), Position::new(2, 0))]);\n}\n\n#[test]\npub fn test_convert_bitfield_to_piece_vec() {\n let p = Piece::convert_bitfield_to_piece_vec(&vec![1, 1]);\n assert_eq!(p, vec![Piece::new(Position::new(7, 0), Position::new(8, 0)),\n Piece::new(Position::new(15, 0), Position::new(16, 0))]);\n\n let a = Piece::convert_bitfield_to_piece_vec(&vec![128]);\n assert_eq!(a, vec![Piece::new(Position::new(0, 0), Position::new(1, 0))]);\n}\n\n#[test]\npub fn test_trivial_complement() {\n let a = vec![\n Piece::new(Position::new(0, 0), Position::new(3, 0))\n ];\n\n let b = vec![\n Piece::new(Position::new(0, 0), Position::new(1, 0))\n ];\n\n let a = Piece::complement(&a, &b);\n\n assert_eq!(a, vec![Piece::new(Position::new(1, 0), Position::new(3, 0))]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #41673 - bitshifter:repr-align-codegen-test, r=arielb1<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -C no-prepopulate-passes\n#![crate_type = \"lib\"]\n\n#![feature(attr_literals)]\n#![feature(repr_align)]\n\n#[repr(align(64))]\npub struct Align64(i32);\n\npub struct Nested64 {\n a: Align64,\n b: i32,\n c: i32,\n d: i8,\n}\n\npub enum Enum64 {\n A(Align64),\n B(i32),\n}\n\n\/\/ CHECK-LABEL: @align64\n#[no_mangle]\npub fn align64(i : i32) -> Align64 {\n\/\/ CHECK: %a64 = alloca %Align64, align 64\n\/\/ CHECK: call void @llvm.memcpy.{{.*}}(i8* %{{.*}}, i8* %{{.*}}, i{{[0-9]+}} 64, i32 64, i1 false)\n let a64 = Align64(i);\n a64\n}\n\n\/\/ CHECK-LABEL: @nested64\n#[no_mangle]\npub fn nested64(a: Align64, b: i32, c: i32, d: i8) -> Nested64 {\n\/\/ CHECK: %n64 = alloca %Nested64, align 64\n\/\/ CHECK: %a = alloca %Align64, align 64\n let n64 = Nested64 { a, b, c, d };\n n64\n}\n\n\/\/ CHECK-LABEL: @enum64\n#[no_mangle]\npub fn enum64(a: Align64) -> Enum64 {\n\/\/ CHECK: %e64 = alloca %Enum64, align 64\n\/\/ CHECK: %a = alloca %Align64, align 64\n let e64 = Enum64::A(a);\n e64\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for issue #16739<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(unboxed_closures)]\n\n\/\/ Test that unboxing shim for calling rust-call ABI methods through a\n\/\/ trait box works and does not cause an ICE\n\nstruct Foo { foo: uint }\n\nimpl FnOnce<(), uint> for Foo {\n #[rust_call_abi_hack]\n fn call_once(self, _: ()) -> uint { self.foo }\n}\n\nimpl FnOnce<(uint,), uint> for Foo {\n #[rust_call_abi_hack]\n fn call_once(self, (x,): (uint,)) -> uint { self.foo + x }\n}\n\nimpl FnOnce<(uint, uint), uint> for Foo {\n #[rust_call_abi_hack]\n fn call_once(self, (x, y): (uint, uint)) -> uint { self.foo + x + y }\n}\n\nfn main() {\n let f = box Foo { foo: 42 } as Box<FnOnce<(), uint>>;\n assert_eq!(f.call_once(()), 42);\n\n let f = box Foo { foo: 40 } as Box<FnOnce<(uint,), uint>>;\n assert_eq!(f.call_once((2,)), 42);\n\n let f = box Foo { foo: 40 } as Box<FnOnce<(uint, uint), uint>>;\n assert_eq!(f.call_once((1, 1)), 42);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #21622<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nstruct Index;\n\nimpl Index {\n fn new() -> Self { Index }\n}\n\nfn user() {\n let new = Index::new;\n\n fn inner() {\n let index = Index::new();\n }\n\n let index2 = new();\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #22620 - edwardw:issue-22356, r=brson<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::marker::{PhantomData, PhantomFn};\n\npub struct Handle<T, I>(T, I);\n\nimpl<T, I> Handle<T, I> {\n pub fn get_info(&self) -> &I {\n let Handle(_, ref info) = *self;\n info\n }\n}\n\npub struct BufferHandle<D: Device, T> {\n raw: RawBufferHandle<D>,\n _marker: PhantomData<T>,\n}\n\nimpl<D: Device, T> BufferHandle<D, T> {\n pub fn get_info(&self) -> &String {\n self.raw.get_info()\n }\n}\n\npub type RawBufferHandle<D: Device> = Handle<<D as Device>::Buffer, String>;\n\npub trait Device: PhantomFn<Self> {\n type Buffer;\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for issue 23699.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn gimme_a_raw_pointer<T>(_: *const T) { }\n\nfn test<T>(t: T) { }\n\nfn main() {\n \/\/ Clearly `pointer` must be of type `*const ()`.\n let pointer = &() as *const _;\n gimme_a_raw_pointer(pointer);\n\n let t = test as fn (i32);\n t(0i32);\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse util::nodemap::{FxHashMap, FxHashSet};\nuse ty::context::TyCtxt;\nuse ty::{AdtDef, VariantDef, FieldDef, Ty, TyS};\nuse ty::{DefId, Substs};\nuse ty::{AdtKind, Visibility};\nuse ty::TypeVariants::*;\n\npub use self::def_id_forest::DefIdForest;\n\nmod def_id_forest;\n\n\/\/ The methods in this module calculate DefIdForests of modules in which a\n\/\/ AdtDef\/VariantDef\/FieldDef is visibly uninhabited.\n\/\/\n\/\/ # Example\n\/\/ ```rust\n\/\/ enum Void {}\n\/\/ mod a {\n\/\/ pub mod b {\n\/\/ pub struct SecretlyUninhabited {\n\/\/ _priv: !,\n\/\/ }\n\/\/ }\n\/\/ }\n\/\/\n\/\/ mod c {\n\/\/ pub struct AlsoSecretlyUninhabited {\n\/\/ _priv: Void,\n\/\/ }\n\/\/ mod d {\n\/\/ }\n\/\/ }\n\/\/\n\/\/ struct Foo {\n\/\/ x: a::b::SecretlyUninhabited,\n\/\/ y: c::AlsoSecretlyUninhabited,\n\/\/ }\n\/\/ ```\n\/\/ In this code, the type Foo will only be visibly uninhabited inside the\n\/\/ modules b, c and d. Calling uninhabited_from on Foo or its AdtDef will\n\/\/ return the forest of modules {b, c->d} (represented in a DefIdForest by the\n\/\/ set {b, c})\n\/\/\n\/\/ We need this information for pattern-matching on Foo or types that contain\n\/\/ Foo.\n\/\/\n\/\/ # Example\n\/\/ ```rust\n\/\/ let foo_result: Result<T, Foo> = ... ;\n\/\/ let Ok(t) = foo_result;\n\/\/ ```\n\/\/ This code should only compile in modules where the uninhabitedness of Foo is\n\/\/ visible.\n\nimpl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {\n \/\/\/ Checks whether a type is visibly uninhabited from a particular module.\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ enum Void {}\n \/\/\/ mod a {\n \/\/\/ pub mod b {\n \/\/\/ pub struct SecretlyUninhabited {\n \/\/\/ _priv: !,\n \/\/\/ }\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ mod c {\n \/\/\/ pub struct AlsoSecretlyUninhabited {\n \/\/\/ _priv: Void,\n \/\/\/ }\n \/\/\/ mod d {\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ struct Foo {\n \/\/\/ x: a::b::SecretlyUninhabited,\n \/\/\/ y: c::AlsoSecretlyUninhabited,\n \/\/\/ }\n \/\/\/ ```\n \/\/\/ In this code, the type `Foo` will only be visibly uninhabited inside the\n \/\/\/ modules b, c and d. This effects pattern-matching on `Foo` or types that\n \/\/\/ contain `Foo`.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ let foo_result: Result<T, Foo> = ... ;\n \/\/\/ let Ok(t) = foo_result;\n \/\/\/ ```\n \/\/\/ This code should only compile in modules where the uninhabitedness of Foo is\n \/\/\/ visible.\n pub fn is_ty_uninhabited_from(self, module: DefId, ty: Ty<'tcx>) -> bool {\n \/\/ To check whether this type is uninhabited at all (not just from the\n \/\/ given node) you could check whether the forest is empty.\n \/\/ ```\n \/\/ forest.is_empty()\n \/\/ ```\n self.ty_inhabitedness_forest(ty).contains(self, module)\n }\n\n fn ty_inhabitedness_forest(self, ty: Ty<'tcx>) -> DefIdForest {\n ty.uninhabited_from(&mut FxHashMap(), self)\n }\n\n pub fn is_enum_variant_uninhabited_from(self,\n module: DefId,\n variant: &'tcx VariantDef,\n substs: &'tcx Substs<'tcx>)\n -> bool\n {\n self.variant_inhabitedness_forest(variant, substs).contains(self, module)\n }\n\n pub fn is_variant_uninhabited_from_all_modules(self,\n variant: &'tcx VariantDef,\n substs: &'tcx Substs<'tcx>)\n -> bool\n {\n !self.variant_inhabitedness_forest(variant, substs).is_empty()\n }\n\n fn variant_inhabitedness_forest(self, variant: &'tcx VariantDef, substs: &'tcx Substs<'tcx>)\n -> DefIdForest {\n \/\/ Determine the ADT kind:\n let adt_def_id = self.adt_def_id_of_variant(variant);\n let adt_kind = self.adt_def(adt_def_id).adt_kind();\n\n \/\/ Compute inhabitedness forest:\n variant.uninhabited_from(&mut FxHashMap(), self, substs, adt_kind)\n }\n}\n\nimpl<'a, 'gcx, 'tcx> AdtDef {\n \/\/\/ Calculate the forest of DefIds from which this adt is visibly uninhabited.\n fn uninhabited_from(\n &self,\n visited: &mut FxHashMap<DefId, FxHashSet<&'tcx Substs<'tcx>>>,\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n substs: &'tcx Substs<'tcx>) -> DefIdForest\n {\n DefIdForest::intersection(tcx, self.variants.iter().map(|v| {\n v.uninhabited_from(visited, tcx, substs, self.adt_kind())\n }))\n }\n}\n\nimpl<'a, 'gcx, 'tcx> VariantDef {\n \/\/\/ Calculate the forest of DefIds from which this variant is visibly uninhabited.\n fn uninhabited_from(\n &self,\n visited: &mut FxHashMap<DefId, FxHashSet<&'tcx Substs<'tcx>>>,\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n substs: &'tcx Substs<'tcx>,\n adt_kind: AdtKind) -> DefIdForest\n {\n match adt_kind {\n AdtKind::Union => {\n DefIdForest::intersection(tcx, self.fields.iter().map(|f| {\n f.uninhabited_from(visited, tcx, substs, false)\n }))\n },\n AdtKind::Struct => {\n DefIdForest::union(tcx, self.fields.iter().map(|f| {\n f.uninhabited_from(visited, tcx, substs, false)\n }))\n },\n AdtKind::Enum => {\n DefIdForest::union(tcx, self.fields.iter().map(|f| {\n f.uninhabited_from(visited, tcx, substs, true)\n }))\n },\n }\n }\n}\n\nimpl<'a, 'gcx, 'tcx> FieldDef {\n \/\/\/ Calculate the forest of DefIds from which this field is visibly uninhabited.\n fn uninhabited_from(\n &self,\n visited: &mut FxHashMap<DefId, FxHashSet<&'tcx Substs<'tcx>>>,\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n substs: &'tcx Substs<'tcx>,\n is_enum: bool) -> DefIdForest\n {\n let mut data_uninhabitedness = move || {\n self.ty(tcx, substs).uninhabited_from(visited, tcx)\n };\n \/\/ FIXME(canndrew): Currently enum fields are (incorrectly) stored with\n \/\/ Visibility::Invisible so we need to override self.vis if we're\n \/\/ dealing with an enum.\n if is_enum {\n data_uninhabitedness()\n } else {\n match self.vis {\n Visibility::Invisible => DefIdForest::empty(),\n Visibility::Restricted(from) => {\n let forest = DefIdForest::from_id(from);\n let iter = Some(forest).into_iter().chain(Some(data_uninhabitedness()));\n DefIdForest::intersection(tcx, iter)\n },\n Visibility::Public => data_uninhabitedness(),\n }\n }\n }\n}\n\nimpl<'a, 'gcx, 'tcx> TyS<'tcx> {\n \/\/\/ Calculate the forest of DefIds from which this type is visibly uninhabited.\n fn uninhabited_from(\n &self,\n visited: &mut FxHashMap<DefId, FxHashSet<&'tcx Substs<'tcx>>>,\n tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest\n {\n match self.sty {\n TyAdt(def, substs) => {\n {\n let substs_set = visited.entry(def.did).or_insert(FxHashSet::default());\n if !substs_set.insert(substs) {\n \/\/ We are already calculating the inhabitedness of this type.\n \/\/ The type must contain a reference to itself. Break the\n \/\/ infinite loop.\n return DefIdForest::empty();\n }\n if substs_set.len() >= tcx.sess.recursion_limit.get() \/ 4 {\n \/\/ We have gone very deep, reinstantiating this ADT inside\n \/\/ itself with different type arguments. We are probably\n \/\/ hitting an infinite loop. For example, it's possible to write:\n \/\/ a type Foo<T>\n \/\/ which contains a Foo<(T, T)>\n \/\/ which contains a Foo<((T, T), (T, T))>\n \/\/ which contains a Foo<(((T, T), (T, T)), ((T, T), (T, T)))>\n \/\/ etc.\n let error = format!(\"reached recursion limit while checking \\\n inhabitedness of `{}`\", self);\n tcx.sess.fatal(&error);\n }\n }\n let ret = def.uninhabited_from(visited, tcx, substs);\n let substs_set = visited.get_mut(&def.did).unwrap();\n substs_set.remove(substs);\n ret\n },\n\n TyNever => DefIdForest::full(tcx),\n TyTuple(ref tys, _) => {\n DefIdForest::union(tcx, tys.iter().map(|ty| {\n ty.uninhabited_from(visited, tcx)\n }))\n },\n TyArray(ty, len) => {\n if len.val.to_const_int().and_then(|i| i.to_u64()) == Some(0) {\n DefIdForest::empty()\n } else {\n ty.uninhabited_from(visited, tcx)\n }\n }\n TyRef(_, ref tm) => {\n tm.ty.uninhabited_from(visited, tcx)\n }\n\n _ => DefIdForest::empty(),\n }\n }\n}\n\n<commit_msg>add a function for testing if a ty is uninhabited from all modules<commit_after>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse util::nodemap::{FxHashMap, FxHashSet};\nuse ty::context::TyCtxt;\nuse ty::{AdtDef, VariantDef, FieldDef, Ty, TyS};\nuse ty::{DefId, Substs};\nuse ty::{AdtKind, Visibility};\nuse ty::TypeVariants::*;\n\npub use self::def_id_forest::DefIdForest;\n\nmod def_id_forest;\n\n\/\/ The methods in this module calculate DefIdForests of modules in which a\n\/\/ AdtDef\/VariantDef\/FieldDef is visibly uninhabited.\n\/\/\n\/\/ # Example\n\/\/ ```rust\n\/\/ enum Void {}\n\/\/ mod a {\n\/\/ pub mod b {\n\/\/ pub struct SecretlyUninhabited {\n\/\/ _priv: !,\n\/\/ }\n\/\/ }\n\/\/ }\n\/\/\n\/\/ mod c {\n\/\/ pub struct AlsoSecretlyUninhabited {\n\/\/ _priv: Void,\n\/\/ }\n\/\/ mod d {\n\/\/ }\n\/\/ }\n\/\/\n\/\/ struct Foo {\n\/\/ x: a::b::SecretlyUninhabited,\n\/\/ y: c::AlsoSecretlyUninhabited,\n\/\/ }\n\/\/ ```\n\/\/ In this code, the type Foo will only be visibly uninhabited inside the\n\/\/ modules b, c and d. Calling uninhabited_from on Foo or its AdtDef will\n\/\/ return the forest of modules {b, c->d} (represented in a DefIdForest by the\n\/\/ set {b, c})\n\/\/\n\/\/ We need this information for pattern-matching on Foo or types that contain\n\/\/ Foo.\n\/\/\n\/\/ # Example\n\/\/ ```rust\n\/\/ let foo_result: Result<T, Foo> = ... ;\n\/\/ let Ok(t) = foo_result;\n\/\/ ```\n\/\/ This code should only compile in modules where the uninhabitedness of Foo is\n\/\/ visible.\n\nimpl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {\n \/\/\/ Checks whether a type is visibly uninhabited from a particular module.\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ enum Void {}\n \/\/\/ mod a {\n \/\/\/ pub mod b {\n \/\/\/ pub struct SecretlyUninhabited {\n \/\/\/ _priv: !,\n \/\/\/ }\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ mod c {\n \/\/\/ pub struct AlsoSecretlyUninhabited {\n \/\/\/ _priv: Void,\n \/\/\/ }\n \/\/\/ mod d {\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ struct Foo {\n \/\/\/ x: a::b::SecretlyUninhabited,\n \/\/\/ y: c::AlsoSecretlyUninhabited,\n \/\/\/ }\n \/\/\/ ```\n \/\/\/ In this code, the type `Foo` will only be visibly uninhabited inside the\n \/\/\/ modules b, c and d. This effects pattern-matching on `Foo` or types that\n \/\/\/ contain `Foo`.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ let foo_result: Result<T, Foo> = ... ;\n \/\/\/ let Ok(t) = foo_result;\n \/\/\/ ```\n \/\/\/ This code should only compile in modules where the uninhabitedness of Foo is\n \/\/\/ visible.\n pub fn is_ty_uninhabited_from(self, module: DefId, ty: Ty<'tcx>) -> bool {\n \/\/ To check whether this type is uninhabited at all (not just from the\n \/\/ given node) you could check whether the forest is empty.\n \/\/ ```\n \/\/ forest.is_empty()\n \/\/ ```\n self.ty_inhabitedness_forest(ty).contains(self, module)\n }\n\n pub fn is_ty_uninhabited_from_all_modules(self, ty: Ty<'tcx>) -> bool {\n !self.ty_inhabitedness_forest(ty).is_empty()\n }\n\n fn ty_inhabitedness_forest(self, ty: Ty<'tcx>) -> DefIdForest {\n ty.uninhabited_from(&mut FxHashMap(), self)\n }\n\n pub fn is_enum_variant_uninhabited_from(self,\n module: DefId,\n variant: &'tcx VariantDef,\n substs: &'tcx Substs<'tcx>)\n -> bool\n {\n self.variant_inhabitedness_forest(variant, substs).contains(self, module)\n }\n\n pub fn is_variant_uninhabited_from_all_modules(self,\n variant: &'tcx VariantDef,\n substs: &'tcx Substs<'tcx>)\n -> bool\n {\n !self.variant_inhabitedness_forest(variant, substs).is_empty()\n }\n\n fn variant_inhabitedness_forest(self, variant: &'tcx VariantDef, substs: &'tcx Substs<'tcx>)\n -> DefIdForest {\n \/\/ Determine the ADT kind:\n let adt_def_id = self.adt_def_id_of_variant(variant);\n let adt_kind = self.adt_def(adt_def_id).adt_kind();\n\n \/\/ Compute inhabitedness forest:\n variant.uninhabited_from(&mut FxHashMap(), self, substs, adt_kind)\n }\n}\n\nimpl<'a, 'gcx, 'tcx> AdtDef {\n \/\/\/ Calculate the forest of DefIds from which this adt is visibly uninhabited.\n fn uninhabited_from(\n &self,\n visited: &mut FxHashMap<DefId, FxHashSet<&'tcx Substs<'tcx>>>,\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n substs: &'tcx Substs<'tcx>) -> DefIdForest\n {\n DefIdForest::intersection(tcx, self.variants.iter().map(|v| {\n v.uninhabited_from(visited, tcx, substs, self.adt_kind())\n }))\n }\n}\n\nimpl<'a, 'gcx, 'tcx> VariantDef {\n \/\/\/ Calculate the forest of DefIds from which this variant is visibly uninhabited.\n fn uninhabited_from(\n &self,\n visited: &mut FxHashMap<DefId, FxHashSet<&'tcx Substs<'tcx>>>,\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n substs: &'tcx Substs<'tcx>,\n adt_kind: AdtKind) -> DefIdForest\n {\n match adt_kind {\n AdtKind::Union => {\n DefIdForest::intersection(tcx, self.fields.iter().map(|f| {\n f.uninhabited_from(visited, tcx, substs, false)\n }))\n },\n AdtKind::Struct => {\n DefIdForest::union(tcx, self.fields.iter().map(|f| {\n f.uninhabited_from(visited, tcx, substs, false)\n }))\n },\n AdtKind::Enum => {\n DefIdForest::union(tcx, self.fields.iter().map(|f| {\n f.uninhabited_from(visited, tcx, substs, true)\n }))\n },\n }\n }\n}\n\nimpl<'a, 'gcx, 'tcx> FieldDef {\n \/\/\/ Calculate the forest of DefIds from which this field is visibly uninhabited.\n fn uninhabited_from(\n &self,\n visited: &mut FxHashMap<DefId, FxHashSet<&'tcx Substs<'tcx>>>,\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n substs: &'tcx Substs<'tcx>,\n is_enum: bool) -> DefIdForest\n {\n let mut data_uninhabitedness = move || {\n self.ty(tcx, substs).uninhabited_from(visited, tcx)\n };\n \/\/ FIXME(canndrew): Currently enum fields are (incorrectly) stored with\n \/\/ Visibility::Invisible so we need to override self.vis if we're\n \/\/ dealing with an enum.\n if is_enum {\n data_uninhabitedness()\n } else {\n match self.vis {\n Visibility::Invisible => DefIdForest::empty(),\n Visibility::Restricted(from) => {\n let forest = DefIdForest::from_id(from);\n let iter = Some(forest).into_iter().chain(Some(data_uninhabitedness()));\n DefIdForest::intersection(tcx, iter)\n },\n Visibility::Public => data_uninhabitedness(),\n }\n }\n }\n}\n\nimpl<'a, 'gcx, 'tcx> TyS<'tcx> {\n \/\/\/ Calculate the forest of DefIds from which this type is visibly uninhabited.\n fn uninhabited_from(\n &self,\n visited: &mut FxHashMap<DefId, FxHashSet<&'tcx Substs<'tcx>>>,\n tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest\n {\n match self.sty {\n TyAdt(def, substs) => {\n {\n let substs_set = visited.entry(def.did).or_insert(FxHashSet::default());\n if !substs_set.insert(substs) {\n \/\/ We are already calculating the inhabitedness of this type.\n \/\/ The type must contain a reference to itself. Break the\n \/\/ infinite loop.\n return DefIdForest::empty();\n }\n if substs_set.len() >= tcx.sess.recursion_limit.get() \/ 4 {\n \/\/ We have gone very deep, reinstantiating this ADT inside\n \/\/ itself with different type arguments. We are probably\n \/\/ hitting an infinite loop. For example, it's possible to write:\n \/\/ a type Foo<T>\n \/\/ which contains a Foo<(T, T)>\n \/\/ which contains a Foo<((T, T), (T, T))>\n \/\/ which contains a Foo<(((T, T), (T, T)), ((T, T), (T, T)))>\n \/\/ etc.\n let error = format!(\"reached recursion limit while checking \\\n inhabitedness of `{}`\", self);\n tcx.sess.fatal(&error);\n }\n }\n let ret = def.uninhabited_from(visited, tcx, substs);\n let substs_set = visited.get_mut(&def.did).unwrap();\n substs_set.remove(substs);\n ret\n },\n\n TyNever => DefIdForest::full(tcx),\n TyTuple(ref tys, _) => {\n DefIdForest::union(tcx, tys.iter().map(|ty| {\n ty.uninhabited_from(visited, tcx)\n }))\n },\n TyArray(ty, len) => {\n if len.val.to_const_int().and_then(|i| i.to_u64()) == Some(0) {\n DefIdForest::empty()\n } else {\n ty.uninhabited_from(visited, tcx)\n }\n }\n TyRef(_, ref tm) => {\n tm.ty.uninhabited_from(visited, tcx)\n }\n\n _ => DefIdForest::empty(),\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 1599<commit_after>\/\/ https:\/\/leetcode.com\/problems\/maximum-profit-of-operating-a-centennial-wheel\/\npub fn min_operations_max_profit(\n customers: Vec<i32>,\n boarding_cost: i32,\n running_cost: i32,\n) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", min_operations_max_profit(vec![8, 3], 5, 6)); \/\/ 3\n println!(\"{}\", min_operations_max_profit(vec![10, 9, 6], 6, 4)); \/\/ 7\n println!(\"{}\", min_operations_max_profit(vec![3, 4, 0, 5, 1], 1, 92)); \/\/ -1\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>imag-bookmark: Replace read with typed read<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>unit test for checked overflow during signed negation.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ error-pattern:thread '<main>' panicked at 'attempted to negate with overflow'\n\/\/ compile-flags: -C debug-assertions\n\n\/\/ (Work around constant-evaluation)\nfn value() -> i8 { std::i8::MIN }\n\nfn main() {\n let _x = -value();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adding 1D plotting example using curves from wikipedia<commit_after>extern crate bspline;\n\nuse std::iter;\n\n\/\/\/ Plot a simple 1D quadratic B-spline\nfn plot_quadratic() {\n let points = vec![0.0, 0.0, 1.0, 0.0, 0.0];\n let knots = vec![0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 3.0, 3.0];\n let t_start = knots[0];\n let t_end = knots[knots.len() - 1];\n\n let plot_w = 80;\n let plot_h = 30;\n let x_scale = plot_w as f32 \/ knots[knots.len() - 1];\n let y_scale = plot_h as f32 \/ 1.0;\n\n let mut plot: Vec<_> = iter::repeat(' ').take(plot_w * plot_h).collect();\n\n println!(\"Plotting Quadratic B-spline with:\\n\\tpoints = {:?}\\n\\tknots = {:?}\",\n points, knots);\n println!(\"\\tStarting at {}, ending at {}\", t_start, t_end);\n let spline = bspline::BSpline::new(2, points, knots);\n\n let step_size = 0.001;\n let steps = ((t_end - t_start) \/ step_size) as usize;\n for s in 0..steps {\n let t = step_size * s as f32 + t_start;\n let y = spline.point(t);\n let iy = (y * y_scale) as isize;\n let ix = (t * x_scale) as isize;\n if iy >= 0 && iy < plot_h as isize {\n plot[(plot_h - 1 - iy as usize) * plot_w + ix as usize] = 'O';\n }\n }\n for y in 0..plot_h {\n for x in 0..plot_w {\n print!(\"{}\", plot[y * plot_w + x]);\n }\n println!(\"\");\n }\n}\n\/\/\/ Plot a simple 1D cubic B-spline\nfn plot_cubic() {\n let points = vec![0.0, 0.0, 0.0, 6.0, 0.0, 0.0, 0.0];\n let knots = vec![-2.0, -2.0, -2.0, -2.0, -1.0, 0.0, 1.0, 2.0, 2.0, 2.0, 2.0];\n let t_start = knots[0];\n let t_end = knots[knots.len() - 1];\n\n let plot_w = 80;\n let plot_h = 30;\n let x_scale = plot_w as f32 \/ 4.0;\n let x_offset = 2.0;\n let y_scale = plot_h as f32 \/ 6.0;\n\n let mut plot: Vec<_> = iter::repeat(' ').take(plot_w * plot_h).collect();\n\n println!(\"Plotting Cubic B-spline with:\\n\\tpoints = {:?}\\n\\tknots = {:?}\",\n points, knots);\n println!(\"\\tStarting at {}, ending at {}\", t_start, t_end);\n let spline = bspline::BSpline::new(3, points, knots);\n\n let step_size = 0.001;\n let steps = ((t_end - t_start) \/ step_size) as usize;\n for s in 0..steps {\n let t = step_size * s as f32 + t_start;\n let y = spline.point(t);\n let iy = (y * y_scale) as isize;\n let ix = ((t + x_offset) * x_scale) as isize;\n if iy >= 0 && iy < plot_h as isize {\n plot[(plot_h - 1 - iy as usize) * plot_w + ix as usize] = 'O';\n }\n }\n for y in 0..plot_h {\n for x in 0..plot_w {\n print!(\"{}\", plot[y * plot_w + x]);\n }\n println!(\"\");\n }\n}\n\/\/\/ Plot a simple 1D quartic B-spline\nfn plot_quartic() {\n let points = vec![0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0];\n let knots = vec![0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 5.0, 5.0, 5.0, 5.0];\n let t_start = knots[0];\n let t_end = knots[knots.len() - 1];\n\n let plot_w = 80;\n let plot_h = 30;\n let x_scale = plot_w as f32 \/ 5.0;\n let y_scale = plot_h as f32 \/ 1.0;\n\n let mut plot: Vec<_> = iter::repeat(' ').take(plot_w * plot_h).collect();\n\n println!(\"Plotting Quartic B-spline with:\\n\\tpoints = {:?}\\n\\tknots = {:?}\",\n points, knots);\n println!(\"\\tStarting at {}, ending at {}\", t_start, t_end);\n let spline = bspline::BSpline::new(4, points, knots);\n\n let step_size = 0.001;\n let steps = ((t_end - t_start) \/ step_size) as usize;\n for s in 0..steps {\n let t = step_size * s as f32 + t_start;\n let y = spline.point(t);\n let iy = (y * y_scale) as isize;\n let ix = (t * x_scale) as isize;\n if iy >= 0 && iy < plot_h as isize {\n plot[(plot_h - 1 - iy as usize) * plot_w + ix as usize] = 'O';\n }\n }\n for y in 0..plot_h {\n for x in 0..plot_w {\n print!(\"{}\", plot[y * plot_w + x]);\n }\n println!(\"\");\n }\n}\n\nfn main() {\n let divider: String = iter::repeat('-').take(80).collect();\n plot_quadratic();\n println!(\"{}\\n\\n{}\", divider, divider);\n plot_cubic();\n println!(\"{}\\n\\n{}\", divider, divider);\n plot_quartic();\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>am 38f79d01: Merge \"Make ImageProcessing work.\" into honeycomb<commit_after><|endoftext|>"} {"text":"<commit_before>use audio::wav::*;\n\nuse programs::common::*;\n\nuse graphics::bmp::*;\n\npub struct Sprite {\n point: Point,\n image: BMP\n}\n\nimpl Sprite {\n pub fn draw(&self, content: &mut Display){\n content.image_alpha(self.point, self.image.data, self.image.size);\n }\n}\n\npub struct Application;\n\nimpl SessionItem for Application {\n fn main(&mut self, url: URL){\n let mut window = Window::new(Point::new((rand() % 400 + 50) as isize, (rand() % 300 + 50) as isize), Size::new(640, 480), \"Example Game (Loading)\".to_string());\n\n let mut player;\n {\n let mut resource = URL::from_string(&\"file:\/\/\/game\/ninjaroofront.bmp\".to_string()).open();\n let mut bytes: Vec<u8> = Vec::new();\n resource.read_to_end(&mut bytes);\n player = Sprite {\n point: Point::new(200, 200),\n image: unsafe{ BMP::from_data(bytes.as_ptr() as usize) }\n };\n }\n\n window.title = \"Example Game\".to_string();\n\n let sound;\n {\n let mut resource = URL::from_string(&\"file:\/\/\/game\/wilhelm.wav\".to_string()).open();\n let mut bytes: Vec<u8> = Vec::new();\n resource.read_to_end(&mut bytes);\n\n sound = WAV::from_data(&bytes);\n }\n\n let mut keys: Vec<u8> = Vec::new();\n let mut redraw = true;\n let mut running = true;\n while running {\n loop {\n match window.poll() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => {\n running = false;\n break;\n },\n K_CTRL => {\n let mut resource = URL::from_string(&\"audio:\/\/\".to_string()).open();\n resource.write(sound.data.as_slice());\n },\n _ => ()\n }\n\n let mut found = false;\n for key in keys.iter() {\n if *key == key_event.scancode {\n found = true;\n break;\n }\n }\n if ! found {\n keys.push(key_event.scancode);\n }\n }else{\n let mut i = 0;\n while i < keys.len() {\n let mut remove = false;\n if let Option::Some(key) = keys.get(i) {\n if *key == key_event.scancode {\n remove = true;\n }\n }\n if remove {\n keys.remove(i);\n }else{\n i += 1;\n }\n }\n }\n },\n EventOption::None => break,\n _ => ()\n }\n }\n\n for key in keys.iter() {\n match *key {\n K_LEFT => {\n player.point.x = max(0, player.point.x - 1);\n redraw = true;\n },\n K_RIGHT => {\n player.point.x = min(window.content.width as isize - 1, player.point.x + 1);\n redraw = true;\n },\n K_UP => {\n player.point.y = max(0, player.point.y - 1);\n redraw = true;\n },\n K_DOWN => {\n player.point.y = min(window.content.height as isize - 1, player.point.y + 1);\n redraw = true;\n },\n _ => ()\n }\n }\n\n if redraw {\n redraw = false;\n\n let content = &mut window.content;\n content.set(Color::new(128, 128, 255));\n\n player.draw(content);\n\n content.flip();\n\n RedrawEvent {\n redraw: REDRAW_ALL\n }.trigger();\n }\n\n Duration::new(0, 1000000000\/120).sleep();\n }\n }\n}\n\nimpl Application {\n pub fn new() -> Application {\n Application\n }\n}\n<commit_msg>Wait for other audio processes<commit_after>use audio::wav::*;\n\nuse programs::common::*;\n\nuse graphics::bmp::*;\n\npub struct Sprite {\n point: Point,\n image: BMP\n}\n\nimpl Sprite {\n pub fn draw(&self, content: &mut Display){\n content.image_alpha(self.point, self.image.data, self.image.size);\n }\n}\n\npub struct Application;\n\nimpl SessionItem for Application {\n fn main(&mut self, url: URL){\n let mut window = Window::new(Point::new((rand() % 400 + 50) as isize, (rand() % 300 + 50) as isize), Size::new(640, 480), \"Example Game (Loading)\".to_string());\n RedrawEvent { redraw: REDRAW_ALL }.trigger();\n\n let mut audio = URL::from_string(&\"audio:\/\/\".to_string()).open();\n\n let mut player;\n {\n let mut resource = URL::from_string(&\"file:\/\/\/game\/ninjaroofront.bmp\".to_string()).open();\n let mut bytes: Vec<u8> = Vec::new();\n resource.read_to_end(&mut bytes);\n player = Sprite {\n point: Point::new(200, 200),\n image: unsafe{ BMP::from_data(bytes.as_ptr() as usize) }\n };\n }\n\n let sound;\n {\n let mut resource = URL::from_string(&\"file:\/\/\/game\/wilhelm.wav\".to_string()).open();\n let mut bytes: Vec<u8> = Vec::new();\n resource.read_to_end(&mut bytes);\n\n sound = WAV::from_data(&bytes);\n }\n\n window.title = \"Example Game\".to_string();\n RedrawEvent { redraw: REDRAW_ALL }.trigger();\n\n let mut keys: Vec<u8> = Vec::new();\n let mut redraw = true;\n let mut running = true;\n while running {\n loop {\n match window.poll() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => {\n running = false;\n break;\n },\n K_DEL => {\n window.title = \"Example Game (Screaming)\".to_string();\n RedrawEvent { redraw: REDRAW_ALL }.trigger();\n audio.write(sound.data.as_slice());\n window.title = \"Example Game\".to_string();\n RedrawEvent { redraw: REDRAW_ALL }.trigger();\n },\n _ => ()\n }\n\n let mut found = false;\n for key in keys.iter() {\n if *key == key_event.scancode {\n found = true;\n break;\n }\n }\n if ! found {\n keys.push(key_event.scancode);\n }\n }else{\n let mut i = 0;\n while i < keys.len() {\n let mut remove = false;\n if let Option::Some(key) = keys.get(i) {\n if *key == key_event.scancode {\n remove = true;\n }\n }\n if remove {\n keys.remove(i);\n }else{\n i += 1;\n }\n }\n }\n },\n EventOption::None => break,\n _ => ()\n }\n }\n\n for key in keys.iter() {\n match *key {\n K_LEFT => {\n player.point.x = max(0, player.point.x - 1);\n redraw = true;\n },\n K_RIGHT => {\n player.point.x = min(window.content.width as isize - 1, player.point.x + 1);\n redraw = true;\n },\n K_UP => {\n player.point.y = max(0, player.point.y - 1);\n redraw = true;\n },\n K_DOWN => {\n player.point.y = min(window.content.height as isize - 1, player.point.y + 1);\n redraw = true;\n },\n _ => ()\n }\n }\n\n if redraw {\n redraw = false;\n\n let content = &mut window.content;\n content.set(Color::new(128, 128, 255));\n\n player.draw(content);\n\n content.flip();\n RedrawEvent { redraw: REDRAW_ALL }.trigger();\n }\n\n Duration::new(0, 1000000000\/120).sleep();\n }\n\n window.title = \"Example Game (Closing)\".to_string();\n RedrawEvent { redraw: REDRAW_ALL }.trigger();\n\n {\n let mut resource = URL::from_string(&\"file:\/\/\/game\/game_over.wav\".to_string()).open();\n let mut bytes: Vec<u8> = Vec::new();\n resource.read_to_end(&mut bytes);\n\n let game_over = WAV::from_data(&bytes);\n audio.write(game_over.data.as_slice());\n }\n }\n}\n\nimpl Application {\n pub fn new() -> Application {\n Application\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n\nOperations on the ubiquitous `Option` type.\n\nType `Option` represents an optional value.\n\nEvery `Option<T>` value can either be `Some(T)` or `None`. Where in other\nlanguages you might use a nullable type, in Rust you would use an option\ntype.\n\nOptions are most commonly used with pattern matching to query the presence\nof a value and take action, always accounting for the `None` case.\n\n# Example\n\n~~~\nlet msg = Some(~\"howdy\");\n\n\/\/ Take a reference to the contained string\nmatch msg {\n Some(ref m) => io::println(m),\n None => ()\n}\n\n\/\/ Remove the contained string, destroying the Option\nlet unwrapped_msg = match msg {\n Some(m) => m,\n None => ~\"default message\"\n};\n~~~\n\n*\/\n\nuse cmp::{Eq,Ord};\nuse ops::Add;\nuse kinds::Copy;\nuse util;\nuse num::Zero;\nuse old_iter::{BaseIter, MutableIter, ExtendedIter};\nuse old_iter;\nuse str::StrSlice;\nuse clone::DeepClone;\n\n#[cfg(test)] use str;\n\n\/\/\/ The option type\n#[deriving(Clone, DeepClone, Eq)]\npub enum Option<T> {\n None,\n Some(T),\n}\n\nimpl<T:Ord> Ord for Option<T> {\n fn lt(&self, other: &Option<T>) -> bool {\n match (self, other) {\n (&None, &None) => false,\n (&None, &Some(_)) => true,\n (&Some(_), &None) => false,\n (&Some(ref a), &Some(ref b)) => *a < *b\n }\n }\n\n fn le(&self, other: &Option<T>) -> bool {\n match (self, other) {\n (&None, &None) => true,\n (&None, &Some(_)) => true,\n (&Some(_), &None) => false,\n (&Some(ref a), &Some(ref b)) => *a <= *b\n }\n }\n\n fn ge(&self, other: &Option<T>) -> bool {\n !(self < other)\n }\n\n fn gt(&self, other: &Option<T>) -> bool {\n !(self <= other)\n }\n}\n\nimpl<T: Copy + Add<T,T>> Add<Option<T>, Option<T>> for Option<T> {\n #[inline(always)]\n fn add(&self, other: &Option<T>) -> Option<T> {\n match (*self, *other) {\n (None, None) => None,\n (_, None) => *self,\n (None, _) => *other,\n (Some(ref lhs), Some(ref rhs)) => Some(*lhs + *rhs)\n }\n }\n}\n\nimpl<T> BaseIter<T> for Option<T> {\n \/\/\/ Performs an operation on the contained value by reference\n #[inline(always)]\n fn each<'a>(&'a self, f: &fn(x: &'a T) -> bool) -> bool {\n match *self { None => true, Some(ref t) => { f(t) } }\n }\n\n #[inline(always)]\n fn size_hint(&self) -> Option<uint> {\n if self.is_some() { Some(1) } else { Some(0) }\n }\n}\n\nimpl<T> MutableIter<T> for Option<T> {\n #[inline(always)]\n fn each_mut<'a>(&'a mut self, f: &fn(&'a mut T) -> bool) -> bool {\n match *self { None => true, Some(ref mut t) => { f(t) } }\n }\n}\n\nimpl<A> ExtendedIter<A> for Option<A> {\n pub fn eachi(&self, blk: &fn(uint, v: &A) -> bool) -> bool {\n old_iter::eachi(self, blk)\n }\n pub fn all(&self, blk: &fn(&A) -> bool) -> bool {\n old_iter::all(self, blk)\n }\n pub fn any(&self, blk: &fn(&A) -> bool) -> bool {\n old_iter::any(self, blk)\n }\n pub fn foldl<B>(&self, b0: B, blk: &fn(&B, &A) -> B) -> B {\n old_iter::foldl(self, b0, blk)\n }\n pub fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {\n old_iter::position(self, f)\n }\n fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {\n old_iter::map_to_vec(self, op)\n }\n fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)\n -> ~[B] {\n old_iter::flat_map_to_vec(self, op)\n }\n}\n\nimpl<T> Option<T> {\n \/\/\/ Returns true if the option equals `none`\n pub fn is_none(&const self) -> bool {\n match *self { None => true, Some(_) => false }\n }\n\n \/\/\/ Returns true if the option contains some value\n #[inline(always)]\n pub fn is_some(&const self) -> bool { !self.is_none() }\n\n \/\/\/ Update an optional value by optionally running its content through a\n \/\/\/ function that returns an option.\n #[inline(always)]\n pub fn chain<U>(self, f: &fn(t: T) -> Option<U>) -> Option<U> {\n match self {\n Some(t) => f(t),\n None => None\n }\n }\n\n \/\/\/ Returns the leftmost Some() value, or None if both are None.\n #[inline(always)]\n pub fn or(self, optb: Option<T>) -> Option<T> {\n match self {\n Some(opta) => Some(opta),\n _ => optb\n }\n }\n\n \/\/\/ Update an optional value by optionally running its content by reference\n \/\/\/ through a function that returns an option.\n #[inline(always)]\n pub fn chain_ref<'a, U>(&'a self, f: &fn(x: &'a T) -> Option<U>)\n -> Option<U> {\n match *self {\n Some(ref x) => f(x),\n None => None\n }\n }\n\n \/\/\/ Maps a `some` value from one type to another by reference\n #[inline(always)]\n pub fn map<'a, U>(&self, f: &fn(&'a T) -> U) -> Option<U> {\n match *self { Some(ref x) => Some(f(x)), None => None }\n }\n\n \/\/\/ As `map`, but consumes the option and gives `f` ownership to avoid\n \/\/\/ copying.\n #[inline(always)]\n pub fn map_consume<U>(self, f: &fn(v: T) -> U) -> Option<U> {\n match self { None => None, Some(v) => Some(f(v)) }\n }\n\n \/\/\/ Applies a function to the contained value or returns a default\n #[inline(always)]\n pub fn map_default<'a, U>(&'a self, def: U, f: &fn(&'a T) -> U) -> U {\n match *self { None => def, Some(ref t) => f(t) }\n }\n\n \/\/\/ As `map_default`, but consumes the option and gives `f`\n \/\/\/ ownership to avoid copying.\n #[inline(always)]\n pub fn map_consume_default<U>(self, def: U, f: &fn(v: T) -> U) -> U {\n match self { None => def, Some(v) => f(v) }\n }\n\n \/\/\/ Apply a function to the contained value or do nothing\n pub fn mutate(&mut self, f: &fn(T) -> T) {\n if self.is_some() {\n *self = Some(f(self.swap_unwrap()));\n }\n }\n\n \/\/\/ Apply a function to the contained value or set it to a default\n pub fn mutate_default(&mut self, def: T, f: &fn(T) -> T) {\n if self.is_some() {\n *self = Some(f(self.swap_unwrap()));\n } else {\n *self = Some(def);\n }\n }\n\n \/**\n Gets an immutable reference to the value inside an option.\n\n # Failure\n\n Fails if the value equals `None`\n\n # Safety note\n\n In general, because this function may fail, its use is discouraged\n (calling `get` on `None` is akin to dereferencing a null pointer).\n Instead, prefer to use pattern matching and handle the `None`\n case explicitly.\n *\/\n #[inline(always)]\n pub fn get_ref<'a>(&'a self) -> &'a T {\n match *self {\n Some(ref x) => x,\n None => fail!(\"option::get_ref none\")\n }\n }\n\n \/**\n Gets a mutable reference to the value inside an option.\n\n # Failure\n\n Fails if the value equals `None`\n\n # Safety note\n\n In general, because this function may fail, its use is discouraged\n (calling `get` on `None` is akin to dereferencing a null pointer).\n Instead, prefer to use pattern matching and handle the `None`\n case explicitly.\n *\/\n #[inline(always)]\n pub fn get_mut_ref<'a>(&'a mut self) -> &'a mut T {\n match *self {\n Some(ref mut x) => x,\n None => fail!(\"option::get_mut_ref none\")\n }\n }\n\n #[inline(always)]\n pub fn unwrap(self) -> T {\n \/*!\n Moves a value out of an option type and returns it.\n\n Useful primarily for getting strings, vectors and unique pointers out\n of option types without copying them.\n\n # Failure\n\n Fails if the value equals `None`.\n\n # Safety note\n\n In general, because this function may fail, its use is discouraged.\n Instead, prefer to use pattern matching and handle the `None`\n case explicitly.\n *\/\n match self {\n Some(x) => x,\n None => fail!(\"option::unwrap none\")\n }\n }\n\n \/**\n * The option dance. Moves a value out of an option type and returns it,\n * replacing the original with `None`.\n *\n * # Failure\n *\n * Fails if the value equals `None`.\n *\/\n #[inline(always)]\n pub fn swap_unwrap(&mut self) -> T {\n if self.is_none() { fail!(\"option::swap_unwrap none\") }\n util::replace(self, None).unwrap()\n }\n\n \/**\n * Gets the value out of an option, printing a specified message on\n * failure\n *\n * # Failure\n *\n * Fails if the value equals `none`\n *\/\n #[inline(always)]\n pub fn expect(self, reason: &str) -> T {\n match self {\n Some(val) => val,\n None => fail!(reason.to_owned()),\n }\n }\n}\n\nimpl<T:Copy> Option<T> {\n \/**\n Gets the value out of an option\n\n # Failure\n\n Fails if the value equals `None`\n\n # Safety note\n\n In general, because this function may fail, its use is discouraged\n (calling `get` on `None` is akin to dereferencing a null pointer).\n Instead, prefer to use pattern matching and handle the `None`\n case explicitly.\n *\/\n #[inline(always)]\n pub fn get(self) -> T {\n match self {\n Some(x) => return x,\n None => fail!(\"option::get none\")\n }\n }\n\n \/\/\/ Returns the contained value or a default\n #[inline(always)]\n pub fn get_or_default(self, def: T) -> T {\n match self { Some(x) => x, None => def }\n }\n\n \/\/\/ Applies a function zero or more times until the result is none.\n #[inline(always)]\n pub fn while_some(self, blk: &fn(v: T) -> Option<T>) {\n let mut opt = self;\n while opt.is_some() {\n opt = blk(opt.unwrap());\n }\n }\n}\n\nimpl<T:Copy + Zero> Option<T> {\n \/\/\/ Returns the contained value or zero (for this type)\n #[inline(always)]\n pub fn get_or_zero(self) -> T {\n match self {\n Some(x) => x,\n None => Zero::zero()\n }\n }\n}\n\n#[test]\nfn test_unwrap_ptr() {\n unsafe {\n let x = ~0;\n let addr_x: *int = ::cast::transmute(&*x);\n let opt = Some(x);\n let y = opt.unwrap();\n let addr_y: *int = ::cast::transmute(&*y);\n assert_eq!(addr_x, addr_y);\n }\n}\n\n#[test]\nfn test_unwrap_str() {\n let x = ~\"test\";\n let addr_x = str::as_buf(x, |buf, _len| buf);\n let opt = Some(x);\n let y = opt.unwrap();\n let addr_y = str::as_buf(y, |buf, _len| buf);\n assert_eq!(addr_x, addr_y);\n}\n\n#[test]\nfn test_unwrap_resource() {\n struct R {\n i: @mut int,\n }\n\n #[unsafe_destructor]\n impl ::ops::Drop for R {\n fn finalize(&self) { *(self.i) += 1; }\n }\n\n fn R(i: @mut int) -> R {\n R {\n i: i\n }\n }\n\n let i = @mut 0;\n {\n let x = R(i);\n let opt = Some(x);\n let _y = opt.unwrap();\n }\n assert_eq!(*i, 1);\n}\n\n#[test]\nfn test_option_dance() {\n let x = Some(());\n let mut y = Some(5);\n let mut y2 = 0;\n for x.each |_x| {\n y2 = y.swap_unwrap();\n }\n assert_eq!(y2, 5);\n assert!(y.is_none());\n}\n#[test] #[should_fail] #[ignore(cfg(windows))]\nfn test_option_too_much_dance() {\n let mut y = Some(util::NonCopyable::new());\n let _y2 = y.swap_unwrap();\n let _y3 = y.swap_unwrap();\n}\n\n#[test]\nfn test_option_while_some() {\n let mut i = 0;\n do Some(10).while_some |j| {\n i += 1;\n if (j > 0) {\n Some(j-1)\n } else {\n None\n }\n }\n assert_eq!(i, 11);\n}\n\n#[test]\nfn test_get_or_zero() {\n let some_stuff = Some(42);\n assert_eq!(some_stuff.get_or_zero(), 42);\n let no_stuff: Option<int> = None;\n assert_eq!(no_stuff.get_or_zero(), 0);\n}\n<commit_msg>add Iterator implementations for Option<commit_after>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n\nOperations on the ubiquitous `Option` type.\n\nType `Option` represents an optional value.\n\nEvery `Option<T>` value can either be `Some(T)` or `None`. Where in other\nlanguages you might use a nullable type, in Rust you would use an option\ntype.\n\nOptions are most commonly used with pattern matching to query the presence\nof a value and take action, always accounting for the `None` case.\n\n# Example\n\n~~~\nlet msg = Some(~\"howdy\");\n\n\/\/ Take a reference to the contained string\nmatch msg {\n Some(ref m) => io::println(m),\n None => ()\n}\n\n\/\/ Remove the contained string, destroying the Option\nlet unwrapped_msg = match msg {\n Some(m) => m,\n None => ~\"default message\"\n};\n~~~\n\n*\/\n\nuse cmp::{Eq,Ord};\nuse ops::Add;\nuse kinds::Copy;\nuse util;\nuse num::Zero;\nuse old_iter::{BaseIter, MutableIter, ExtendedIter};\nuse old_iter;\nuse iterator::Iterator;\nuse str::StrSlice;\nuse clone::DeepClone;\n\n#[cfg(test)] use str;\n\n\/\/\/ The option type\n#[deriving(Clone, DeepClone, Eq)]\npub enum Option<T> {\n None,\n Some(T),\n}\n\nimpl<T:Ord> Ord for Option<T> {\n fn lt(&self, other: &Option<T>) -> bool {\n match (self, other) {\n (&None, &None) => false,\n (&None, &Some(_)) => true,\n (&Some(_), &None) => false,\n (&Some(ref a), &Some(ref b)) => *a < *b\n }\n }\n\n fn le(&self, other: &Option<T>) -> bool {\n match (self, other) {\n (&None, &None) => true,\n (&None, &Some(_)) => true,\n (&Some(_), &None) => false,\n (&Some(ref a), &Some(ref b)) => *a <= *b\n }\n }\n\n fn ge(&self, other: &Option<T>) -> bool {\n !(self < other)\n }\n\n fn gt(&self, other: &Option<T>) -> bool {\n !(self <= other)\n }\n}\n\nimpl<T: Copy + Add<T,T>> Add<Option<T>, Option<T>> for Option<T> {\n #[inline(always)]\n fn add(&self, other: &Option<T>) -> Option<T> {\n match (*self, *other) {\n (None, None) => None,\n (_, None) => *self,\n (None, _) => *other,\n (Some(ref lhs), Some(ref rhs)) => Some(*lhs + *rhs)\n }\n }\n}\n\nimpl<T> BaseIter<T> for Option<T> {\n \/\/\/ Performs an operation on the contained value by reference\n #[inline(always)]\n fn each<'a>(&'a self, f: &fn(x: &'a T) -> bool) -> bool {\n match *self { None => true, Some(ref t) => { f(t) } }\n }\n\n #[inline(always)]\n fn size_hint(&self) -> Option<uint> {\n if self.is_some() { Some(1) } else { Some(0) }\n }\n}\n\nimpl<T> MutableIter<T> for Option<T> {\n #[inline(always)]\n fn each_mut<'a>(&'a mut self, f: &fn(&'a mut T) -> bool) -> bool {\n match *self { None => true, Some(ref mut t) => { f(t) } }\n }\n}\n\nimpl<A> ExtendedIter<A> for Option<A> {\n pub fn eachi(&self, blk: &fn(uint, v: &A) -> bool) -> bool {\n old_iter::eachi(self, blk)\n }\n pub fn all(&self, blk: &fn(&A) -> bool) -> bool {\n old_iter::all(self, blk)\n }\n pub fn any(&self, blk: &fn(&A) -> bool) -> bool {\n old_iter::any(self, blk)\n }\n pub fn foldl<B>(&self, b0: B, blk: &fn(&B, &A) -> B) -> B {\n old_iter::foldl(self, b0, blk)\n }\n pub fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {\n old_iter::position(self, f)\n }\n fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {\n old_iter::map_to_vec(self, op)\n }\n fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)\n -> ~[B] {\n old_iter::flat_map_to_vec(self, op)\n }\n}\n\nimpl<T> Option<T> {\n #[inline]\n pub fn iter<'r>(&'r self) -> OptionIterator<'r, T> {\n match *self {\n Some(ref x) => OptionIterator{opt: Some(x)},\n None => OptionIterator{opt: None}\n }\n }\n\n #[inline]\n pub fn mut_iter<'r>(&'r mut self) -> OptionMutIterator<'r, T> {\n match *self {\n Some(ref mut x) => OptionMutIterator{opt: Some(x)},\n None => OptionMutIterator{opt: None}\n }\n }\n\n \/\/\/ Returns true if the option equals `none`\n #[inline]\n pub fn is_none(&const self) -> bool {\n match *self { None => true, Some(_) => false }\n }\n\n \/\/\/ Returns true if the option contains some value\n #[inline(always)]\n pub fn is_some(&const self) -> bool { !self.is_none() }\n\n \/\/\/ Update an optional value by optionally running its content through a\n \/\/\/ function that returns an option.\n #[inline(always)]\n pub fn chain<U>(self, f: &fn(t: T) -> Option<U>) -> Option<U> {\n match self {\n Some(t) => f(t),\n None => None\n }\n }\n\n \/\/\/ Returns the leftmost Some() value, or None if both are None.\n #[inline(always)]\n pub fn or(self, optb: Option<T>) -> Option<T> {\n match self {\n Some(opta) => Some(opta),\n _ => optb\n }\n }\n\n \/\/\/ Update an optional value by optionally running its content by reference\n \/\/\/ through a function that returns an option.\n #[inline(always)]\n pub fn chain_ref<'a, U>(&'a self, f: &fn(x: &'a T) -> Option<U>)\n -> Option<U> {\n match *self {\n Some(ref x) => f(x),\n None => None\n }\n }\n\n \/\/\/ Maps a `some` value from one type to another by reference\n #[inline(always)]\n pub fn map<'a, U>(&self, f: &fn(&'a T) -> U) -> Option<U> {\n match *self { Some(ref x) => Some(f(x)), None => None }\n }\n\n \/\/\/ As `map`, but consumes the option and gives `f` ownership to avoid\n \/\/\/ copying.\n #[inline(always)]\n pub fn map_consume<U>(self, f: &fn(v: T) -> U) -> Option<U> {\n match self { None => None, Some(v) => Some(f(v)) }\n }\n\n \/\/\/ Applies a function to the contained value or returns a default\n #[inline(always)]\n pub fn map_default<'a, U>(&'a self, def: U, f: &fn(&'a T) -> U) -> U {\n match *self { None => def, Some(ref t) => f(t) }\n }\n\n \/\/\/ As `map_default`, but consumes the option and gives `f`\n \/\/\/ ownership to avoid copying.\n #[inline(always)]\n pub fn map_consume_default<U>(self, def: U, f: &fn(v: T) -> U) -> U {\n match self { None => def, Some(v) => f(v) }\n }\n\n \/\/\/ Apply a function to the contained value or do nothing\n pub fn mutate(&mut self, f: &fn(T) -> T) {\n if self.is_some() {\n *self = Some(f(self.swap_unwrap()));\n }\n }\n\n \/\/\/ Apply a function to the contained value or set it to a default\n pub fn mutate_default(&mut self, def: T, f: &fn(T) -> T) {\n if self.is_some() {\n *self = Some(f(self.swap_unwrap()));\n } else {\n *self = Some(def);\n }\n }\n\n \/**\n Gets an immutable reference to the value inside an option.\n\n # Failure\n\n Fails if the value equals `None`\n\n # Safety note\n\n In general, because this function may fail, its use is discouraged\n (calling `get` on `None` is akin to dereferencing a null pointer).\n Instead, prefer to use pattern matching and handle the `None`\n case explicitly.\n *\/\n #[inline(always)]\n pub fn get_ref<'a>(&'a self) -> &'a T {\n match *self {\n Some(ref x) => x,\n None => fail!(\"option::get_ref none\")\n }\n }\n\n \/**\n Gets a mutable reference to the value inside an option.\n\n # Failure\n\n Fails if the value equals `None`\n\n # Safety note\n\n In general, because this function may fail, its use is discouraged\n (calling `get` on `None` is akin to dereferencing a null pointer).\n Instead, prefer to use pattern matching and handle the `None`\n case explicitly.\n *\/\n #[inline(always)]\n pub fn get_mut_ref<'a>(&'a mut self) -> &'a mut T {\n match *self {\n Some(ref mut x) => x,\n None => fail!(\"option::get_mut_ref none\")\n }\n }\n\n #[inline(always)]\n pub fn unwrap(self) -> T {\n \/*!\n Moves a value out of an option type and returns it.\n\n Useful primarily for getting strings, vectors and unique pointers out\n of option types without copying them.\n\n # Failure\n\n Fails if the value equals `None`.\n\n # Safety note\n\n In general, because this function may fail, its use is discouraged.\n Instead, prefer to use pattern matching and handle the `None`\n case explicitly.\n *\/\n match self {\n Some(x) => x,\n None => fail!(\"option::unwrap none\")\n }\n }\n\n \/**\n * The option dance. Moves a value out of an option type and returns it,\n * replacing the original with `None`.\n *\n * # Failure\n *\n * Fails if the value equals `None`.\n *\/\n #[inline(always)]\n pub fn swap_unwrap(&mut self) -> T {\n if self.is_none() { fail!(\"option::swap_unwrap none\") }\n util::replace(self, None).unwrap()\n }\n\n \/**\n * Gets the value out of an option, printing a specified message on\n * failure\n *\n * # Failure\n *\n * Fails if the value equals `none`\n *\/\n #[inline(always)]\n pub fn expect(self, reason: &str) -> T {\n match self {\n Some(val) => val,\n None => fail!(reason.to_owned()),\n }\n }\n}\n\nimpl<T:Copy> Option<T> {\n \/**\n Gets the value out of an option\n\n # Failure\n\n Fails if the value equals `None`\n\n # Safety note\n\n In general, because this function may fail, its use is discouraged\n (calling `get` on `None` is akin to dereferencing a null pointer).\n Instead, prefer to use pattern matching and handle the `None`\n case explicitly.\n *\/\n #[inline(always)]\n pub fn get(self) -> T {\n match self {\n Some(x) => return x,\n None => fail!(\"option::get none\")\n }\n }\n\n \/\/\/ Returns the contained value or a default\n #[inline(always)]\n pub fn get_or_default(self, def: T) -> T {\n match self { Some(x) => x, None => def }\n }\n\n \/\/\/ Applies a function zero or more times until the result is none.\n #[inline(always)]\n pub fn while_some(self, blk: &fn(v: T) -> Option<T>) {\n let mut opt = self;\n while opt.is_some() {\n opt = blk(opt.unwrap());\n }\n }\n}\n\nimpl<T:Copy + Zero> Option<T> {\n \/\/\/ Returns the contained value or zero (for this type)\n #[inline(always)]\n pub fn get_or_zero(self) -> T {\n match self {\n Some(x) => x,\n None => Zero::zero()\n }\n }\n}\n\npub struct OptionIterator<'self, A> {\n priv opt: Option<&'self A>\n}\n\nimpl<'self, A> Iterator<&'self A> for OptionIterator<'self, A> {\n fn next(&mut self) -> Option<&'self A> {\n util::replace(&mut self.opt, None)\n }\n}\n\npub struct OptionMutIterator<'self, A> {\n priv opt: Option<&'self mut A>\n}\n\nimpl<'self, A> Iterator<&'self mut A> for OptionMutIterator<'self, A> {\n fn next(&mut self) -> Option<&'self mut A> {\n util::replace(&mut self.opt, None)\n }\n}\n\n#[test]\nfn test_unwrap_ptr() {\n unsafe {\n let x = ~0;\n let addr_x: *int = ::cast::transmute(&*x);\n let opt = Some(x);\n let y = opt.unwrap();\n let addr_y: *int = ::cast::transmute(&*y);\n assert_eq!(addr_x, addr_y);\n }\n}\n\n#[test]\nfn test_unwrap_str() {\n let x = ~\"test\";\n let addr_x = str::as_buf(x, |buf, _len| buf);\n let opt = Some(x);\n let y = opt.unwrap();\n let addr_y = str::as_buf(y, |buf, _len| buf);\n assert_eq!(addr_x, addr_y);\n}\n\n#[test]\nfn test_unwrap_resource() {\n struct R {\n i: @mut int,\n }\n\n #[unsafe_destructor]\n impl ::ops::Drop for R {\n fn finalize(&self) { *(self.i) += 1; }\n }\n\n fn R(i: @mut int) -> R {\n R {\n i: i\n }\n }\n\n let i = @mut 0;\n {\n let x = R(i);\n let opt = Some(x);\n let _y = opt.unwrap();\n }\n assert_eq!(*i, 1);\n}\n\n#[test]\nfn test_option_dance() {\n let x = Some(());\n let mut y = Some(5);\n let mut y2 = 0;\n for x.each |_x| {\n y2 = y.swap_unwrap();\n }\n assert_eq!(y2, 5);\n assert!(y.is_none());\n}\n#[test] #[should_fail] #[ignore(cfg(windows))]\nfn test_option_too_much_dance() {\n let mut y = Some(util::NonCopyable::new());\n let _y2 = y.swap_unwrap();\n let _y3 = y.swap_unwrap();\n}\n\n#[test]\nfn test_option_while_some() {\n let mut i = 0;\n do Some(10).while_some |j| {\n i += 1;\n if (j > 0) {\n Some(j-1)\n } else {\n None\n }\n }\n assert_eq!(i, 11);\n}\n\n#[test]\nfn test_get_or_zero() {\n let some_stuff = Some(42);\n assert_eq!(some_stuff.get_or_zero(), 42);\n let no_stuff: Option<int> = None;\n assert_eq!(no_stuff.get_or_zero(), 0);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A type representing either success or failure\n\n#[allow(missing_doc)];\n\nuse clone::Clone;\nuse cmp::Eq;\nuse either;\nuse either::Either;\nuse iterator::IteratorUtil;\nuse option::{None, Option, Some};\nuse vec;\nuse vec::{OwnedVector, ImmutableVector};\nuse container::Container;\n\n\/\/\/ The result type\n#[deriving(Clone, Eq)]\npub enum Result<T, U> {\n \/\/\/ Contains the successful result value\n Ok(T),\n \/\/\/ Contains the error value\n Err(U)\n}\n\nimpl<T, E> Result<T, E> {\n \/**\n * Convert to the `either` type\n *\n * `Ok` result variants are converted to `either::Right` variants, `Err`\n * result variants are converted to `either::Left`.\n *\/\n #[inline]\n pub fn to_either(self)-> Either<E, T>{\n match self {\n Ok(t) => either::Right(t),\n Err(e) => either::Left(e),\n }\n }\n\n \/**\n * Get a reference to the value out of a successful result\n *\n * # Failure\n *\n * If the result is an error\n *\/\n #[inline]\n pub fn get_ref<'a>(&'a self) -> &'a T {\n match *self {\n Ok(ref t) => t,\n Err(ref e) => fail!(\"get_ref called on `Err` result: %?\", *e),\n }\n }\n\n \/\/\/ Returns true if the result is `Ok`\n #[inline]\n pub fn is_ok(&self) -> bool {\n match *self {\n Ok(_) => true,\n Err(_) => false\n }\n }\n\n \/\/\/ Returns true if the result is `Err`\n #[inline]\n pub fn is_err(&self) -> bool {\n !self.is_ok()\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Ok` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `self` is `Err` then it is\n * immediately returned. This function can be used to compose the results\n * of two functions.\n *\n * Example:\n *\n * do read_file(file).iter |buf| {\n * print_buf(buf)\n * }\n *\/\n #[inline]\n pub fn iter(&self, f: &fn(&T)) {\n match *self {\n Ok(ref t) => f(t),\n Err(_) => (),\n }\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Err` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `self` is `Ok` then it is\n * immediately returned. This function can be used to pass through a\n * successful result while handling an error.\n *\/\n #[inline]\n pub fn iter_err(&self, f: &fn(&E)) {\n match *self {\n Ok(_) => (),\n Err(ref e) => f(e),\n }\n }\n\n \/\/\/ Unwraps a result, assuming it is an `Ok(T)`\n #[inline]\n pub fn unwrap(self) -> T {\n match self {\n Ok(t) => t,\n Err(_) => fail!(\"unwrap called on an `Err` result\"),\n }\n }\n\n \/\/\/ Unwraps a result, assuming it is an `Err(U)`\n #[inline]\n pub fn unwrap_err(self) -> E {\n match self {\n Err(e) => e,\n Ok(_) => fail!(\"unwrap called on an `Ok` result\"),\n }\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Ok` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `self` is `Err` then it is\n * immediately returned. This function can be used to compose the results\n * of two functions.\n *\n * Example:\n *\n * let res = do read_file(file) |buf| {\n * Ok(parse_bytes(buf))\n * };\n *\/\n #[inline]\n pub fn chain<U>(self, op: &fn(T) -> Result<U, E>) -> Result<U, E> {\n match self {\n Ok(t) => op(t),\n Err(e) => Err(e),\n }\n }\n\n \/**\n * Call a function based on a previous result\n *\n * If `self` is `Err` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `self` is `Ok` then it is\n * immediately returned. This function can be used to pass through a\n * successful result while handling an error.\n *\/\n #[inline]\n pub fn chain_err<F>(self, op: &fn(E) -> Result<T, F>) -> Result<T, F> {\n match self {\n Ok(t) => Ok(t),\n Err(e) => op(e),\n }\n }\n}\n\nimpl<T: Clone, E> Result<T, E> {\n \/**\n * Get the value out of a successful result\n *\n * # Failure\n *\n * If the result is an error\n *\/\n #[inline]\n pub fn get(&self) -> T {\n match *self {\n Ok(ref t) => t.clone(),\n Err(ref e) => fail!(\"get called on `Err` result: %?\", *e),\n }\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Err` then the value is extracted and passed to `op`\n * whereupon `op`s result is wrapped in an `Err` and returned. if `self` is\n * `Ok` then it is immediately returned. This function can be used to pass\n * through a successful result while handling an error.\n *\/\n #[inline]\n pub fn map_err<F:Clone>(&self, op: &fn(&E) -> F) -> Result<T,F> {\n match *self {\n Ok(ref t) => Ok(t.clone()),\n Err(ref e) => Err(op(e))\n }\n }\n}\n\nimpl<T, E: Clone> Result<T, E> {\n \/**\n * Get the value out of an error result\n *\n * # Failure\n *\n * If the result is not an error\n *\/\n #[inline]\n pub fn get_err(&self) -> E {\n match *self {\n Err(ref e) => e.clone(),\n Ok(_) => fail!(\"get_err called on `Ok` result\")\n }\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Ok` then the value is extracted and passed to `op`\n * whereupon `op`s result is wrapped in `Ok` and returned. if `self` is\n * `Err` then it is immediately returned. This function can be used to\n * compose the results of two functions.\n *\n * Example:\n *\n * let res = do read_file(file).map |buf| {\n * parse_bytes(buf)\n * };\n *\/\n #[inline]\n pub fn map<U:Clone>(&self, op: &fn(&T) -> U) -> Result<U,E> {\n match *self {\n Ok(ref t) => Ok(op(t)),\n Err(ref e) => Err(e.clone())\n }\n }\n}\n\n\/**\n * Maps each element in the vector `ts` using the operation `op`. Should an\n * error occur, no further mappings are performed and the error is returned.\n * Should no error occur, a vector containing the result of each map is\n * returned.\n *\n * Here is an example which increments every integer in a vector,\n * checking for overflow:\n *\n * fn inc_conditionally(x: uint) -> result<uint,str> {\n * if x == uint::max_value { return Err(\"overflow\"); }\n * else { return Ok(x+1u); }\n * }\n * map(~[1u, 2u, 3u], inc_conditionally).chain {|incd|\n * assert!(incd == ~[2u, 3u, 4u]);\n * }\n *\/\n#[inline]\npub fn map_vec<T,U,V>(ts: &[T], op: &fn(&T) -> Result<V,U>)\n -> Result<~[V],U> {\n let mut vs: ~[V] = vec::with_capacity(ts.len());\n for ts.iter().advance |t| {\n match op(t) {\n Ok(v) => vs.push(v),\n Err(u) => return Err(u)\n }\n }\n return Ok(vs);\n}\n\n#[inline]\n#[allow(missing_doc)]\npub fn map_opt<T,\n U,\n V>(\n o_t: &Option<T>,\n op: &fn(&T) -> Result<V,U>)\n -> Result<Option<V>,U> {\n match *o_t {\n None => Ok(None),\n Some(ref t) => match op(t) {\n Ok(v) => Ok(Some(v)),\n Err(e) => Err(e)\n }\n }\n}\n\n\/**\n * Same as map, but it operates over two parallel vectors.\n *\n * A precondition is used here to ensure that the vectors are the same\n * length. While we do not often use preconditions in the standard\n * library, a precondition is used here because result::t is generally\n * used in 'careful' code contexts where it is both appropriate and easy\n * to accommodate an error like the vectors being of different lengths.\n *\/\n#[inline]\npub fn map_vec2<S,T,U,V>(ss: &[S], ts: &[T],\n op: &fn(&S,&T) -> Result<V,U>) -> Result<~[V],U> {\n\n assert!(vec::same_length(ss, ts));\n let n = ts.len();\n let mut vs = vec::with_capacity(n);\n let mut i = 0u;\n while i < n {\n match op(&ss[i],&ts[i]) {\n Ok(v) => vs.push(v),\n Err(u) => return Err(u)\n }\n i += 1u;\n }\n return Ok(vs);\n}\n\n\/**\n * Applies op to the pairwise elements from `ss` and `ts`, aborting on\n * error. This could be implemented using `map_zip()` but it is more efficient\n * on its own as no result vector is built.\n *\/\n#[inline]\npub fn iter_vec2<S,T,U>(ss: &[S], ts: &[T],\n op: &fn(&S,&T) -> Result<(),U>) -> Result<(),U> {\n\n assert!(vec::same_length(ss, ts));\n let n = ts.len();\n let mut i = 0u;\n while i < n {\n match op(&ss[i],&ts[i]) {\n Ok(()) => (),\n Err(u) => return Err(u)\n }\n i += 1u;\n }\n return Ok(());\n}\n\n#[cfg(test)]\nmod tests {\n use result::{Err, Ok, Result};\n use result;\n use either;\n\n pub fn op1() -> result::Result<int, ~str> { result::Ok(666) }\n\n pub fn op2(i: int) -> result::Result<uint, ~str> {\n result::Ok(i as uint + 1u)\n }\n\n pub fn op3() -> result::Result<int, ~str> { result::Err(~\"sadface\") }\n\n #[test]\n pub fn chain_success() {\n assert_eq!(op1().chain(op2).get(), 667u);\n }\n\n #[test]\n pub fn chain_failure() {\n assert_eq!(op3().chain( op2).get_err(), ~\"sadface\");\n }\n\n #[test]\n pub fn test_impl_iter() {\n let mut valid = false;\n Ok::<~str, ~str>(~\"a\").iter(|_x| valid = true);\n assert!(valid);\n\n Err::<~str, ~str>(~\"b\").iter(|_x| valid = false);\n assert!(valid);\n }\n\n #[test]\n pub fn test_impl_iter_err() {\n let mut valid = true;\n Ok::<~str, ~str>(~\"a\").iter_err(|_x| valid = false);\n assert!(valid);\n\n valid = false;\n Err::<~str, ~str>(~\"b\").iter_err(|_x| valid = true);\n assert!(valid);\n }\n\n #[test]\n pub fn test_impl_map() {\n assert_eq!(Ok::<~str, ~str>(~\"a\").map(|_x| ~\"b\"), Ok(~\"b\"));\n assert_eq!(Err::<~str, ~str>(~\"a\").map(|_x| ~\"b\"), Err(~\"a\"));\n }\n\n #[test]\n pub fn test_impl_map_err() {\n assert_eq!(Ok::<~str, ~str>(~\"a\").map_err(|_x| ~\"b\"), Ok(~\"a\"));\n assert_eq!(Err::<~str, ~str>(~\"a\").map_err(|_x| ~\"b\"), Err(~\"b\"));\n }\n\n #[test]\n pub fn test_get_ref_method() {\n let foo: Result<int, ()> = Ok(100);\n assert_eq!(*foo.get_ref(), 100);\n }\n\n #[test]\n pub fn test_to_either() {\n let r: Result<int, ()> = Ok(100);\n let err: Result<(), int> = Err(404);\n\n assert_eq!(r.to_either(), either::Right(100));\n assert_eq!(err.to_either(), either::Left(404));\n }\n}\n<commit_msg>std: cleanup imports in result::tests<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A type representing either success or failure\n\n#[allow(missing_doc)];\n\nuse clone::Clone;\nuse cmp::Eq;\nuse either;\nuse iterator::IteratorUtil;\nuse option::{None, Option, Some};\nuse vec;\nuse vec::{OwnedVector, ImmutableVector};\nuse container::Container;\n\n\/\/\/ The result type\n#[deriving(Clone, Eq)]\npub enum Result<T, U> {\n \/\/\/ Contains the successful result value\n Ok(T),\n \/\/\/ Contains the error value\n Err(U)\n}\n\nimpl<T, E> Result<T, E> {\n \/**\n * Convert to the `either` type\n *\n * `Ok` result variants are converted to `either::Right` variants, `Err`\n * result variants are converted to `either::Left`.\n *\/\n #[inline]\n pub fn to_either(self)-> either::Either<E, T>{\n match self {\n Ok(t) => either::Right(t),\n Err(e) => either::Left(e),\n }\n }\n\n \/**\n * Get a reference to the value out of a successful result\n *\n * # Failure\n *\n * If the result is an error\n *\/\n #[inline]\n pub fn get_ref<'a>(&'a self) -> &'a T {\n match *self {\n Ok(ref t) => t,\n Err(ref e) => fail!(\"get_ref called on `Err` result: %?\", *e),\n }\n }\n\n \/\/\/ Returns true if the result is `Ok`\n #[inline]\n pub fn is_ok(&self) -> bool {\n match *self {\n Ok(_) => true,\n Err(_) => false\n }\n }\n\n \/\/\/ Returns true if the result is `Err`\n #[inline]\n pub fn is_err(&self) -> bool {\n !self.is_ok()\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Ok` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `self` is `Err` then it is\n * immediately returned. This function can be used to compose the results\n * of two functions.\n *\n * Example:\n *\n * do read_file(file).iter |buf| {\n * print_buf(buf)\n * }\n *\/\n #[inline]\n pub fn iter(&self, f: &fn(&T)) {\n match *self {\n Ok(ref t) => f(t),\n Err(_) => (),\n }\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Err` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `self` is `Ok` then it is\n * immediately returned. This function can be used to pass through a\n * successful result while handling an error.\n *\/\n #[inline]\n pub fn iter_err(&self, f: &fn(&E)) {\n match *self {\n Ok(_) => (),\n Err(ref e) => f(e),\n }\n }\n\n \/\/\/ Unwraps a result, assuming it is an `Ok(T)`\n #[inline]\n pub fn unwrap(self) -> T {\n match self {\n Ok(t) => t,\n Err(_) => fail!(\"unwrap called on an `Err` result\"),\n }\n }\n\n \/\/\/ Unwraps a result, assuming it is an `Err(U)`\n #[inline]\n pub fn unwrap_err(self) -> E {\n match self {\n Err(e) => e,\n Ok(_) => fail!(\"unwrap called on an `Ok` result\"),\n }\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Ok` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `self` is `Err` then it is\n * immediately returned. This function can be used to compose the results\n * of two functions.\n *\n * Example:\n *\n * let res = do read_file(file) |buf| {\n * Ok(parse_bytes(buf))\n * };\n *\/\n #[inline]\n pub fn chain<U>(self, op: &fn(T) -> Result<U, E>) -> Result<U, E> {\n match self {\n Ok(t) => op(t),\n Err(e) => Err(e),\n }\n }\n\n \/**\n * Call a function based on a previous result\n *\n * If `self` is `Err` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `self` is `Ok` then it is\n * immediately returned. This function can be used to pass through a\n * successful result while handling an error.\n *\/\n #[inline]\n pub fn chain_err<F>(self, op: &fn(E) -> Result<T, F>) -> Result<T, F> {\n match self {\n Ok(t) => Ok(t),\n Err(e) => op(e),\n }\n }\n}\n\nimpl<T: Clone, E> Result<T, E> {\n \/**\n * Get the value out of a successful result\n *\n * # Failure\n *\n * If the result is an error\n *\/\n #[inline]\n pub fn get(&self) -> T {\n match *self {\n Ok(ref t) => t.clone(),\n Err(ref e) => fail!(\"get called on `Err` result: %?\", *e),\n }\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Err` then the value is extracted and passed to `op`\n * whereupon `op`s result is wrapped in an `Err` and returned. if `self` is\n * `Ok` then it is immediately returned. This function can be used to pass\n * through a successful result while handling an error.\n *\/\n #[inline]\n pub fn map_err<F:Clone>(&self, op: &fn(&E) -> F) -> Result<T,F> {\n match *self {\n Ok(ref t) => Ok(t.clone()),\n Err(ref e) => Err(op(e))\n }\n }\n}\n\nimpl<T, E: Clone> Result<T, E> {\n \/**\n * Get the value out of an error result\n *\n * # Failure\n *\n * If the result is not an error\n *\/\n #[inline]\n pub fn get_err(&self) -> E {\n match *self {\n Err(ref e) => e.clone(),\n Ok(_) => fail!(\"get_err called on `Ok` result\")\n }\n }\n\n \/**\n * Call a method based on a previous result\n *\n * If `self` is `Ok` then the value is extracted and passed to `op`\n * whereupon `op`s result is wrapped in `Ok` and returned. if `self` is\n * `Err` then it is immediately returned. This function can be used to\n * compose the results of two functions.\n *\n * Example:\n *\n * let res = do read_file(file).map |buf| {\n * parse_bytes(buf)\n * };\n *\/\n #[inline]\n pub fn map<U:Clone>(&self, op: &fn(&T) -> U) -> Result<U,E> {\n match *self {\n Ok(ref t) => Ok(op(t)),\n Err(ref e) => Err(e.clone())\n }\n }\n}\n\n\/**\n * Maps each element in the vector `ts` using the operation `op`. Should an\n * error occur, no further mappings are performed and the error is returned.\n * Should no error occur, a vector containing the result of each map is\n * returned.\n *\n * Here is an example which increments every integer in a vector,\n * checking for overflow:\n *\n * fn inc_conditionally(x: uint) -> result<uint,str> {\n * if x == uint::max_value { return Err(\"overflow\"); }\n * else { return Ok(x+1u); }\n * }\n * map(~[1u, 2u, 3u], inc_conditionally).chain {|incd|\n * assert!(incd == ~[2u, 3u, 4u]);\n * }\n *\/\n#[inline]\npub fn map_vec<T,U,V>(ts: &[T], op: &fn(&T) -> Result<V,U>)\n -> Result<~[V],U> {\n let mut vs: ~[V] = vec::with_capacity(ts.len());\n for ts.iter().advance |t| {\n match op(t) {\n Ok(v) => vs.push(v),\n Err(u) => return Err(u)\n }\n }\n return Ok(vs);\n}\n\n#[inline]\n#[allow(missing_doc)]\npub fn map_opt<T,\n U,\n V>(\n o_t: &Option<T>,\n op: &fn(&T) -> Result<V,U>)\n -> Result<Option<V>,U> {\n match *o_t {\n None => Ok(None),\n Some(ref t) => match op(t) {\n Ok(v) => Ok(Some(v)),\n Err(e) => Err(e)\n }\n }\n}\n\n\/**\n * Same as map, but it operates over two parallel vectors.\n *\n * A precondition is used here to ensure that the vectors are the same\n * length. While we do not often use preconditions in the standard\n * library, a precondition is used here because result::t is generally\n * used in 'careful' code contexts where it is both appropriate and easy\n * to accommodate an error like the vectors being of different lengths.\n *\/\n#[inline]\npub fn map_vec2<S,T,U,V>(ss: &[S], ts: &[T],\n op: &fn(&S,&T) -> Result<V,U>) -> Result<~[V],U> {\n\n assert!(vec::same_length(ss, ts));\n let n = ts.len();\n let mut vs = vec::with_capacity(n);\n let mut i = 0u;\n while i < n {\n match op(&ss[i],&ts[i]) {\n Ok(v) => vs.push(v),\n Err(u) => return Err(u)\n }\n i += 1u;\n }\n return Ok(vs);\n}\n\n\/**\n * Applies op to the pairwise elements from `ss` and `ts`, aborting on\n * error. This could be implemented using `map_zip()` but it is more efficient\n * on its own as no result vector is built.\n *\/\n#[inline]\npub fn iter_vec2<S,T,U>(ss: &[S], ts: &[T],\n op: &fn(&S,&T) -> Result<(),U>) -> Result<(),U> {\n\n assert!(vec::same_length(ss, ts));\n let n = ts.len();\n let mut i = 0u;\n while i < n {\n match op(&ss[i],&ts[i]) {\n Ok(()) => (),\n Err(u) => return Err(u)\n }\n i += 1u;\n }\n return Ok(());\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use either;\n\n pub fn op1() -> Result<int, ~str> { Ok(666) }\n\n pub fn op2(i: int) -> Result<uint, ~str> {\n Ok(i as uint + 1u)\n }\n\n pub fn op3() -> Result<int, ~str> { Err(~\"sadface\") }\n\n #[test]\n pub fn chain_success() {\n assert_eq!(op1().chain(op2).get(), 667u);\n }\n\n #[test]\n pub fn chain_failure() {\n assert_eq!(op3().chain( op2).get_err(), ~\"sadface\");\n }\n\n #[test]\n pub fn test_impl_iter() {\n let mut valid = false;\n Ok::<~str, ~str>(~\"a\").iter(|_x| valid = true);\n assert!(valid);\n\n Err::<~str, ~str>(~\"b\").iter(|_x| valid = false);\n assert!(valid);\n }\n\n #[test]\n pub fn test_impl_iter_err() {\n let mut valid = true;\n Ok::<~str, ~str>(~\"a\").iter_err(|_x| valid = false);\n assert!(valid);\n\n valid = false;\n Err::<~str, ~str>(~\"b\").iter_err(|_x| valid = true);\n assert!(valid);\n }\n\n #[test]\n pub fn test_impl_map() {\n assert_eq!(Ok::<~str, ~str>(~\"a\").map(|_x| ~\"b\"), Ok(~\"b\"));\n assert_eq!(Err::<~str, ~str>(~\"a\").map(|_x| ~\"b\"), Err(~\"a\"));\n }\n\n #[test]\n pub fn test_impl_map_err() {\n assert_eq!(Ok::<~str, ~str>(~\"a\").map_err(|_x| ~\"b\"), Ok(~\"a\"));\n assert_eq!(Err::<~str, ~str>(~\"a\").map_err(|_x| ~\"b\"), Err(~\"b\"));\n }\n\n #[test]\n pub fn test_get_ref_method() {\n let foo: Result<int, ()> = Ok(100);\n assert_eq!(*foo.get_ref(), 100);\n }\n\n #[test]\n pub fn test_to_either() {\n let r: Result<int, ()> = Ok(100);\n let err: Result<(), int> = Err(404);\n\n assert_eq!(r.to_either(), either::Right(100));\n assert_eq!(err.to_either(), either::Left(404));\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*! The Rust Runtime, including the task scheduler and I\/O\n\nThe `rt` module provides the private runtime infrastructure necessary\nto support core language features like the exchange and local heap,\nthe garbage collector, logging, local data and unwinding. It also\nimplements the default task scheduler and task model. Initialization\nroutines are provided for setting up runtime resources in common\nconfigurations, including that used by `rustc` when generating\nexecutables.\n\nIt is intended that the features provided by `rt` can be factored in a\nway such that the core library can be built with different 'profiles'\nfor different use cases, e.g. excluding the task scheduler. A number\nof runtime features though are critical to the functioning of the\nlanguage and an implementation must be provided regardless of the\nexecution environment.\n\nOf foremost importance is the global exchange heap, in the module\n`global_heap`. Very little practical Rust code can be written without\naccess to the global heap. Unlike most of `rt` the global heap is\ntruly a global resource and generally operates independently of the\nrest of the runtime.\n\nAll other runtime features are task-local, including the local heap,\nthe garbage collector, local storage, logging and the stack unwinder.\n\nThe relationship between `rt` and the rest of the core library is\nnot entirely clear yet and some modules will be moving into or\nout of `rt` as development proceeds.\n\nSeveral modules in `core` are clients of `rt`:\n\n* `std::task` - The user-facing interface to the Rust task model.\n* `std::task::local_data` - The interface to local data.\n* `std::gc` - The garbage collector.\n* `std::unstable::lang` - Miscellaneous lang items, some of which rely on `std::rt`.\n* `std::condition` - Uses local data.\n* `std::cleanup` - Local heap destruction.\n* `std::io` - In the future `std::io` will use an `rt` implementation.\n* `std::logging`\n* `std::pipes`\n* `std::comm`\n* `std::stackwalk`\n\n*\/\n\n\/\/ XXX: this should not be here.\n#[allow(missing_doc)];\n\nuse cell::Cell;\nuse clone::Clone;\nuse container::Container;\nuse iter::Iterator;\nuse option::{Option, None, Some};\nuse ptr::RawPtr;\nuse rt::local::Local;\nuse rt::sched::{Scheduler, Shutdown};\nuse rt::sleeper_list::SleeperList;\nuse rt::task::{Task, SchedTask, GreenTask, Sched};\nuse rt::uv::uvio::UvEventLoop;\nuse unstable::atomics::{AtomicInt, AtomicBool, SeqCst};\nuse unstable::sync::UnsafeArc;\nuse vec;\nuse vec::{OwnedVector, MutableVector, ImmutableVector};\n\nuse self::thread::Thread;\nuse self::work_queue::WorkQueue;\n\n\/\/ the os module needs to reach into this helper, so allow general access\n\/\/ through this reexport.\npub use self::util::set_exit_status;\n\n\/\/ this is somewhat useful when a program wants to spawn a \"reasonable\" number\n\/\/ of workers based on the constraints of the system that it's running on.\n\/\/ Perhaps this shouldn't be a `pub use` though and there should be another\n\/\/ method...\npub use self::util::default_sched_threads;\n\n\/\/ XXX: these probably shouldn't be public...\n#[doc(hidden)]\npub mod shouldnt_be_public {\n pub use super::sched::Scheduler;\n pub use super::kill::KillHandle;\n pub use super::thread::Thread;\n pub use super::work_queue::WorkQueue;\n pub use super::select::SelectInner;\n pub use super::rtio::EventLoop;\n pub use super::select::{SelectInner, SelectPortInner};\n pub use super::local_ptr::maybe_tls_key;\n}\n\n\/\/ Internal macros used by the runtime.\nmod macros;\n\n\/\/\/ The global (exchange) heap.\npub mod global_heap;\n\n\/\/\/ Implementations of language-critical runtime features like @.\npub mod task;\n\n\/\/\/ Facilities related to task failure, killing, and death.\nmod kill;\n\n\/\/\/ The coroutine task scheduler, built on the `io` event loop.\nmod sched;\n\n\/\/\/ Synchronous I\/O.\npub mod io;\n\n\/\/\/ The EventLoop and internal synchronous I\/O interface.\nmod rtio;\n\n\/\/\/ libuv and default rtio implementation.\npub mod uv;\n\n\/\/\/ The Local trait for types that are accessible via thread-local\n\/\/\/ or task-local storage.\npub mod local;\n\n\/\/\/ A parallel work-stealing deque.\nmod work_queue;\n\n\/\/\/ A parallel queue.\nmod message_queue;\n\n\/\/\/ A parallel data structure for tracking sleeping schedulers.\nmod sleeper_list;\n\n\/\/\/ Stack segments and caching.\npub mod stack;\n\n\/\/\/ CPU context swapping.\nmod context;\n\n\/\/\/ Bindings to system threading libraries.\nmod thread;\n\n\/\/\/ The runtime configuration, read from environment variables.\npub mod env;\n\n\/\/\/ The local, managed heap\npub mod local_heap;\n\n\/\/\/ The Logger trait and implementations\npub mod logging;\n\n\/\/\/ Crate map\npub mod crate_map;\n\n\/\/\/ Tools for testing the runtime\npub mod test;\n\n\/\/\/ Reference counting\npub mod rc;\n\n\/\/\/ A simple single-threaded channel type for passing buffered data between\n\/\/\/ scheduler and task context\npub mod tube;\n\n\/\/\/ Simple reimplementation of std::comm\npub mod comm;\n\nmod select;\n\n\/\/\/ The runtime needs to be able to put a pointer into thread-local storage.\nmod local_ptr;\n\n\/\/\/ Bindings to pthread\/windows thread-local storage.\nmod thread_local_storage;\n\n\/\/\/ Just stuff\nmod util;\n\n\/\/ Global command line argument storage\npub mod args;\n\n\/\/ Support for dynamic borrowck\npub mod borrowck;\n\n\/\/\/ Set up a default runtime configuration, given compiler-supplied arguments.\n\/\/\/\n\/\/\/ This is invoked by the `start` _language item_ (unstable::lang) to\n\/\/\/ run a Rust executable.\n\/\/\/\n\/\/\/ # Arguments\n\/\/\/\n\/\/\/ * `argc` & `argv` - The argument vector. On Unix this information is used\n\/\/\/ by os::args.\n\/\/\/\n\/\/\/ # Return value\n\/\/\/\n\/\/\/ The return value is used as the process return code. 0 on success, 101 on error.\npub fn start(argc: int, argv: **u8, main: ~fn()) -> int {\n\n init(argc, argv);\n let exit_code = run(main);\n cleanup();\n\n return exit_code;\n}\n\n\/\/\/ Like `start` but creates an additional scheduler on the current thread,\n\/\/\/ which in most cases will be the 'main' thread, and pins the main task to it.\n\/\/\/\n\/\/\/ This is appropriate for running code that must execute on the main thread,\n\/\/\/ such as the platform event loop and GUI.\npub fn start_on_main_thread(argc: int, argv: **u8, main: ~fn()) -> int {\n init(argc, argv);\n let exit_code = run_on_main_thread(main);\n cleanup();\n\n return exit_code;\n}\n\n\/\/\/ One-time runtime initialization.\n\/\/\/\n\/\/\/ Initializes global state, including frobbing\n\/\/\/ the crate's logging flags, registering GC\n\/\/\/ metadata, and storing the process arguments.\npub fn init(argc: int, argv: **u8) {\n \/\/ XXX: Derefing these pointers is not safe.\n \/\/ Need to propagate the unsafety to `start`.\n unsafe {\n args::init(argc, argv);\n env::init();\n logging::init();\n }\n}\n\n\/\/\/ One-time runtime cleanup.\npub fn cleanup() {\n args::cleanup();\n}\n\n\/\/\/ Execute the main function in a scheduler.\n\/\/\/\n\/\/\/ Configures the runtime according to the environment, by default\n\/\/\/ using a task scheduler with the same number of threads as cores.\n\/\/\/ Returns a process exit code.\npub fn run(main: ~fn()) -> int {\n run_(main, false)\n}\n\npub fn run_on_main_thread(main: ~fn()) -> int {\n run_(main, true)\n}\n\nfn run_(main: ~fn(), use_main_sched: bool) -> int {\n static DEFAULT_ERROR_CODE: int = 101;\n\n let nscheds = util::default_sched_threads();\n\n let main = Cell::new(main);\n\n \/\/ The shared list of sleeping schedulers.\n let sleepers = SleeperList::new();\n\n \/\/ Create a work queue for each scheduler, ntimes. Create an extra\n \/\/ for the main thread if that flag is set. We won't steal from it.\n let work_queues: ~[WorkQueue<~Task>] = vec::from_fn(nscheds, |_| WorkQueue::new());\n\n \/\/ The schedulers.\n let mut scheds = ~[];\n \/\/ Handles to the schedulers. When the main task ends these will be\n \/\/ sent the Shutdown message to terminate the schedulers.\n let mut handles = ~[];\n\n for work_queue in work_queues.iter() {\n rtdebug!(\"inserting a regular scheduler\");\n\n \/\/ Every scheduler is driven by an I\/O event loop.\n let loop_ = ~UvEventLoop::new();\n let mut sched = ~Scheduler::new(loop_,\n work_queue.clone(),\n work_queues.clone(),\n sleepers.clone());\n let handle = sched.make_handle();\n\n scheds.push(sched);\n handles.push(handle);\n }\n\n \/\/ If we need a main-thread task then create a main thread scheduler\n \/\/ that will reject any task that isn't pinned to it\n let main_sched = if use_main_sched {\n\n \/\/ Create a friend handle.\n let mut friend_sched = scheds.pop();\n let friend_handle = friend_sched.make_handle();\n scheds.push(friend_sched);\n\n \/\/ This scheduler needs a queue that isn't part of the stealee\n \/\/ set.\n let work_queue = WorkQueue::new();\n\n let main_loop = ~UvEventLoop::new();\n let mut main_sched = ~Scheduler::new_special(main_loop,\n work_queue,\n work_queues.clone(),\n sleepers.clone(),\n false,\n Some(friend_handle));\n let main_handle = main_sched.make_handle();\n handles.push(main_handle);\n Some(main_sched)\n } else {\n None\n };\n\n \/\/ Create a shared cell for transmitting the process exit\n \/\/ code from the main task to this function.\n let exit_code = UnsafeArc::new(AtomicInt::new(0));\n let exit_code_clone = exit_code.clone();\n\n \/\/ Used to sanity check that the runtime only exits once\n let exited_already = UnsafeArc::new(AtomicBool::new(false));\n\n \/\/ When the main task exits, after all the tasks in the main\n \/\/ task tree, shut down the schedulers and set the exit code.\n let handles = Cell::new(handles);\n let on_exit: ~fn(bool) = |exit_success| {\n unsafe {\n assert!(!(*exited_already.get()).swap(true, SeqCst),\n \"the runtime already exited\");\n }\n\n let mut handles = handles.take();\n for handle in handles.mut_iter() {\n handle.send(Shutdown);\n }\n\n unsafe {\n let exit_code = if exit_success {\n use rt::util;\n\n \/\/ If we're exiting successfully, then return the global\n \/\/ exit status, which can be set programmatically.\n util::get_exit_status()\n } else {\n DEFAULT_ERROR_CODE\n };\n (*exit_code_clone.get()).store(exit_code, SeqCst);\n }\n };\n\n let mut threads = ~[];\n\n let on_exit = Cell::new(on_exit);\n\n if !use_main_sched {\n\n \/\/ In the case where we do not use a main_thread scheduler we\n \/\/ run the main task in one of our threads.\n\n let mut main_task = ~Task::new_root(&mut scheds[0].stack_pool, None, main.take());\n main_task.death.on_exit = Some(on_exit.take());\n let main_task_cell = Cell::new(main_task);\n\n let sched = scheds.pop();\n let sched_cell = Cell::new(sched);\n let thread = do Thread::start {\n let sched = sched_cell.take();\n sched.bootstrap(main_task_cell.take());\n };\n threads.push(thread);\n }\n\n \/\/ Run each remaining scheduler in a thread.\n for sched in scheds.move_rev_iter() {\n rtdebug!(\"creating regular schedulers\");\n let sched_cell = Cell::new(sched);\n let thread = do Thread::start {\n let mut sched = sched_cell.take();\n let bootstrap_task = ~do Task::new_root(&mut sched.stack_pool, None) || {\n rtdebug!(\"boostraping a non-primary scheduler\");\n };\n sched.bootstrap(bootstrap_task);\n };\n threads.push(thread);\n }\n\n \/\/ If we do have a main thread scheduler, run it now.\n\n if use_main_sched {\n\n rtdebug!(\"about to create the main scheduler task\");\n\n let mut main_sched = main_sched.unwrap();\n\n let home = Sched(main_sched.make_handle());\n let mut main_task = ~Task::new_root_homed(&mut main_sched.stack_pool, None,\n home, main.take());\n main_task.death.on_exit = Some(on_exit.take());\n rtdebug!(\"bootstrapping main_task\");\n\n main_sched.bootstrap(main_task);\n }\n\n rtdebug!(\"waiting for threads\");\n\n \/\/ Wait for schedulers\n for thread in threads.move_iter() {\n thread.join();\n }\n\n \/\/ Return the exit code\n unsafe {\n (*exit_code.get()).load(SeqCst)\n }\n}\n\npub fn in_sched_context() -> bool {\n unsafe {\n let task_ptr: Option<*mut Task> = Local::try_unsafe_borrow();\n match task_ptr {\n Some(task) => {\n match (*task).task_type {\n SchedTask => true,\n _ => false\n }\n }\n None => false\n }\n }\n}\n\npub fn in_green_task_context() -> bool {\n unsafe {\n let task: Option<*mut Task> = Local::try_unsafe_borrow();\n match task {\n Some(task) => {\n match (*task).task_type {\n GreenTask(_) => true,\n _ => false\n }\n }\n None => false\n }\n }\n}\n<commit_msg>std::rt: Fix the set up of the main thread so that it doesn't try to steal work<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*! The Rust Runtime, including the task scheduler and I\/O\n\nThe `rt` module provides the private runtime infrastructure necessary\nto support core language features like the exchange and local heap,\nthe garbage collector, logging, local data and unwinding. It also\nimplements the default task scheduler and task model. Initialization\nroutines are provided for setting up runtime resources in common\nconfigurations, including that used by `rustc` when generating\nexecutables.\n\nIt is intended that the features provided by `rt` can be factored in a\nway such that the core library can be built with different 'profiles'\nfor different use cases, e.g. excluding the task scheduler. A number\nof runtime features though are critical to the functioning of the\nlanguage and an implementation must be provided regardless of the\nexecution environment.\n\nOf foremost importance is the global exchange heap, in the module\n`global_heap`. Very little practical Rust code can be written without\naccess to the global heap. Unlike most of `rt` the global heap is\ntruly a global resource and generally operates independently of the\nrest of the runtime.\n\nAll other runtime features are task-local, including the local heap,\nthe garbage collector, local storage, logging and the stack unwinder.\n\nThe relationship between `rt` and the rest of the core library is\nnot entirely clear yet and some modules will be moving into or\nout of `rt` as development proceeds.\n\nSeveral modules in `core` are clients of `rt`:\n\n* `std::task` - The user-facing interface to the Rust task model.\n* `std::task::local_data` - The interface to local data.\n* `std::gc` - The garbage collector.\n* `std::unstable::lang` - Miscellaneous lang items, some of which rely on `std::rt`.\n* `std::condition` - Uses local data.\n* `std::cleanup` - Local heap destruction.\n* `std::io` - In the future `std::io` will use an `rt` implementation.\n* `std::logging`\n* `std::pipes`\n* `std::comm`\n* `std::stackwalk`\n\n*\/\n\n\/\/ XXX: this should not be here.\n#[allow(missing_doc)];\n\nuse cell::Cell;\nuse clone::Clone;\nuse container::Container;\nuse iter::Iterator;\nuse option::{Option, None, Some};\nuse ptr::RawPtr;\nuse rt::local::Local;\nuse rt::sched::{Scheduler, Shutdown};\nuse rt::sleeper_list::SleeperList;\nuse rt::task::{Task, SchedTask, GreenTask, Sched};\nuse rt::uv::uvio::UvEventLoop;\nuse unstable::atomics::{AtomicInt, AtomicBool, SeqCst};\nuse unstable::sync::UnsafeArc;\nuse vec;\nuse vec::{OwnedVector, MutableVector, ImmutableVector};\n\nuse self::thread::Thread;\nuse self::work_queue::WorkQueue;\n\n\/\/ the os module needs to reach into this helper, so allow general access\n\/\/ through this reexport.\npub use self::util::set_exit_status;\n\n\/\/ this is somewhat useful when a program wants to spawn a \"reasonable\" number\n\/\/ of workers based on the constraints of the system that it's running on.\n\/\/ Perhaps this shouldn't be a `pub use` though and there should be another\n\/\/ method...\npub use self::util::default_sched_threads;\n\n\/\/ XXX: these probably shouldn't be public...\n#[doc(hidden)]\npub mod shouldnt_be_public {\n pub use super::sched::Scheduler;\n pub use super::kill::KillHandle;\n pub use super::thread::Thread;\n pub use super::work_queue::WorkQueue;\n pub use super::select::SelectInner;\n pub use super::rtio::EventLoop;\n pub use super::select::{SelectInner, SelectPortInner};\n pub use super::local_ptr::maybe_tls_key;\n}\n\n\/\/ Internal macros used by the runtime.\nmod macros;\n\n\/\/\/ The global (exchange) heap.\npub mod global_heap;\n\n\/\/\/ Implementations of language-critical runtime features like @.\npub mod task;\n\n\/\/\/ Facilities related to task failure, killing, and death.\nmod kill;\n\n\/\/\/ The coroutine task scheduler, built on the `io` event loop.\nmod sched;\n\n\/\/\/ Synchronous I\/O.\npub mod io;\n\n\/\/\/ The EventLoop and internal synchronous I\/O interface.\nmod rtio;\n\n\/\/\/ libuv and default rtio implementation.\npub mod uv;\n\n\/\/\/ The Local trait for types that are accessible via thread-local\n\/\/\/ or task-local storage.\npub mod local;\n\n\/\/\/ A parallel work-stealing deque.\nmod work_queue;\n\n\/\/\/ A parallel queue.\nmod message_queue;\n\n\/\/\/ A parallel data structure for tracking sleeping schedulers.\nmod sleeper_list;\n\n\/\/\/ Stack segments and caching.\npub mod stack;\n\n\/\/\/ CPU context swapping.\nmod context;\n\n\/\/\/ Bindings to system threading libraries.\nmod thread;\n\n\/\/\/ The runtime configuration, read from environment variables.\npub mod env;\n\n\/\/\/ The local, managed heap\npub mod local_heap;\n\n\/\/\/ The Logger trait and implementations\npub mod logging;\n\n\/\/\/ Crate map\npub mod crate_map;\n\n\/\/\/ Tools for testing the runtime\npub mod test;\n\n\/\/\/ Reference counting\npub mod rc;\n\n\/\/\/ A simple single-threaded channel type for passing buffered data between\n\/\/\/ scheduler and task context\npub mod tube;\n\n\/\/\/ Simple reimplementation of std::comm\npub mod comm;\n\nmod select;\n\n\/\/\/ The runtime needs to be able to put a pointer into thread-local storage.\nmod local_ptr;\n\n\/\/\/ Bindings to pthread\/windows thread-local storage.\nmod thread_local_storage;\n\n\/\/\/ Just stuff\nmod util;\n\n\/\/ Global command line argument storage\npub mod args;\n\n\/\/ Support for dynamic borrowck\npub mod borrowck;\n\n\/\/\/ Set up a default runtime configuration, given compiler-supplied arguments.\n\/\/\/\n\/\/\/ This is invoked by the `start` _language item_ (unstable::lang) to\n\/\/\/ run a Rust executable.\n\/\/\/\n\/\/\/ # Arguments\n\/\/\/\n\/\/\/ * `argc` & `argv` - The argument vector. On Unix this information is used\n\/\/\/ by os::args.\n\/\/\/\n\/\/\/ # Return value\n\/\/\/\n\/\/\/ The return value is used as the process return code. 0 on success, 101 on error.\npub fn start(argc: int, argv: **u8, main: ~fn()) -> int {\n\n init(argc, argv);\n let exit_code = run(main);\n cleanup();\n\n return exit_code;\n}\n\n\/\/\/ Like `start` but creates an additional scheduler on the current thread,\n\/\/\/ which in most cases will be the 'main' thread, and pins the main task to it.\n\/\/\/\n\/\/\/ This is appropriate for running code that must execute on the main thread,\n\/\/\/ such as the platform event loop and GUI.\npub fn start_on_main_thread(argc: int, argv: **u8, main: ~fn()) -> int {\n init(argc, argv);\n let exit_code = run_on_main_thread(main);\n cleanup();\n\n return exit_code;\n}\n\n\/\/\/ One-time runtime initialization.\n\/\/\/\n\/\/\/ Initializes global state, including frobbing\n\/\/\/ the crate's logging flags, registering GC\n\/\/\/ metadata, and storing the process arguments.\npub fn init(argc: int, argv: **u8) {\n \/\/ XXX: Derefing these pointers is not safe.\n \/\/ Need to propagate the unsafety to `start`.\n unsafe {\n args::init(argc, argv);\n env::init();\n logging::init();\n }\n}\n\n\/\/\/ One-time runtime cleanup.\npub fn cleanup() {\n args::cleanup();\n}\n\n\/\/\/ Execute the main function in a scheduler.\n\/\/\/\n\/\/\/ Configures the runtime according to the environment, by default\n\/\/\/ using a task scheduler with the same number of threads as cores.\n\/\/\/ Returns a process exit code.\npub fn run(main: ~fn()) -> int {\n run_(main, false)\n}\n\npub fn run_on_main_thread(main: ~fn()) -> int {\n run_(main, true)\n}\n\nfn run_(main: ~fn(), use_main_sched: bool) -> int {\n static DEFAULT_ERROR_CODE: int = 101;\n\n let nscheds = util::default_sched_threads();\n\n let main = Cell::new(main);\n\n \/\/ The shared list of sleeping schedulers.\n let sleepers = SleeperList::new();\n\n \/\/ Create a work queue for each scheduler, ntimes. Create an extra\n \/\/ for the main thread if that flag is set. We won't steal from it.\n let work_queues: ~[WorkQueue<~Task>] = vec::from_fn(nscheds, |_| WorkQueue::new());\n\n \/\/ The schedulers.\n let mut scheds = ~[];\n \/\/ Handles to the schedulers. When the main task ends these will be\n \/\/ sent the Shutdown message to terminate the schedulers.\n let mut handles = ~[];\n\n for work_queue in work_queues.iter() {\n rtdebug!(\"inserting a regular scheduler\");\n\n \/\/ Every scheduler is driven by an I\/O event loop.\n let loop_ = ~UvEventLoop::new();\n let mut sched = ~Scheduler::new(loop_,\n work_queue.clone(),\n work_queues.clone(),\n sleepers.clone());\n let handle = sched.make_handle();\n\n scheds.push(sched);\n handles.push(handle);\n }\n\n \/\/ If we need a main-thread task then create a main thread scheduler\n \/\/ that will reject any task that isn't pinned to it\n let main_sched = if use_main_sched {\n\n \/\/ Create a friend handle.\n let mut friend_sched = scheds.pop();\n let friend_handle = friend_sched.make_handle();\n scheds.push(friend_sched);\n\n \/\/ This scheduler needs a queue that isn't part of the stealee\n \/\/ set.\n let work_queue = WorkQueue::new();\n\n let main_loop = ~UvEventLoop::new();\n let mut main_sched = ~Scheduler::new_special(main_loop,\n work_queue,\n work_queues.clone(),\n sleepers.clone(),\n false,\n Some(friend_handle));\n let mut main_handle = main_sched.make_handle();\n \/\/ Allow the scheduler to exit when the main task exits.\n \/\/ Note: sending the shutdown message also prevents the scheduler\n \/\/ from pushing itself to the sleeper list, which is used for\n \/\/ waking up schedulers for work stealing; since this is a\n \/\/ non-work-stealing scheduler it should not be adding itself\n \/\/ to the list.\n main_handle.send_shutdown();\n Some(main_sched)\n } else {\n None\n };\n\n \/\/ Create a shared cell for transmitting the process exit\n \/\/ code from the main task to this function.\n let exit_code = UnsafeArc::new(AtomicInt::new(0));\n let exit_code_clone = exit_code.clone();\n\n \/\/ Used to sanity check that the runtime only exits once\n let exited_already = UnsafeArc::new(AtomicBool::new(false));\n\n \/\/ When the main task exits, after all the tasks in the main\n \/\/ task tree, shut down the schedulers and set the exit code.\n let handles = Cell::new(handles);\n let on_exit: ~fn(bool) = |exit_success| {\n unsafe {\n assert!(!(*exited_already.get()).swap(true, SeqCst),\n \"the runtime already exited\");\n }\n\n let mut handles = handles.take();\n for handle in handles.mut_iter() {\n handle.send(Shutdown);\n }\n\n unsafe {\n let exit_code = if exit_success {\n use rt::util;\n\n \/\/ If we're exiting successfully, then return the global\n \/\/ exit status, which can be set programmatically.\n util::get_exit_status()\n } else {\n DEFAULT_ERROR_CODE\n };\n (*exit_code_clone.get()).store(exit_code, SeqCst);\n }\n };\n\n let mut threads = ~[];\n\n let on_exit = Cell::new(on_exit);\n\n if !use_main_sched {\n\n \/\/ In the case where we do not use a main_thread scheduler we\n \/\/ run the main task in one of our threads.\n\n let mut main_task = ~Task::new_root(&mut scheds[0].stack_pool, None, main.take());\n main_task.death.on_exit = Some(on_exit.take());\n let main_task_cell = Cell::new(main_task);\n\n let sched = scheds.pop();\n let sched_cell = Cell::new(sched);\n let thread = do Thread::start {\n let sched = sched_cell.take();\n sched.bootstrap(main_task_cell.take());\n };\n threads.push(thread);\n }\n\n \/\/ Run each remaining scheduler in a thread.\n for sched in scheds.move_rev_iter() {\n rtdebug!(\"creating regular schedulers\");\n let sched_cell = Cell::new(sched);\n let thread = do Thread::start {\n let mut sched = sched_cell.take();\n let bootstrap_task = ~do Task::new_root(&mut sched.stack_pool, None) || {\n rtdebug!(\"boostraping a non-primary scheduler\");\n };\n sched.bootstrap(bootstrap_task);\n };\n threads.push(thread);\n }\n\n \/\/ If we do have a main thread scheduler, run it now.\n\n if use_main_sched {\n\n rtdebug!(\"about to create the main scheduler task\");\n\n let mut main_sched = main_sched.unwrap();\n\n let home = Sched(main_sched.make_handle());\n let mut main_task = ~Task::new_root_homed(&mut main_sched.stack_pool, None,\n home, main.take());\n main_task.death.on_exit = Some(on_exit.take());\n rtdebug!(\"bootstrapping main_task\");\n\n main_sched.bootstrap(main_task);\n }\n\n rtdebug!(\"waiting for threads\");\n\n \/\/ Wait for schedulers\n for thread in threads.move_iter() {\n thread.join();\n }\n\n \/\/ Return the exit code\n unsafe {\n (*exit_code.get()).load(SeqCst)\n }\n}\n\npub fn in_sched_context() -> bool {\n unsafe {\n let task_ptr: Option<*mut Task> = Local::try_unsafe_borrow();\n match task_ptr {\n Some(task) => {\n match (*task).task_type {\n SchedTask => true,\n _ => false\n }\n }\n None => false\n }\n }\n}\n\npub fn in_green_task_context() -> bool {\n unsafe {\n let task: Option<*mut Task> = Local::try_unsafe_borrow();\n match task {\n Some(task) => {\n match (*task).task_type {\n GreenTask(_) => true,\n _ => false\n }\n }\n None => false\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Benchmark to ensure our complicated code is actually faster<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for issue #17252<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstatic FOO: uint = FOO; \/\/~ ERROR recursive constant\n\nfn main() {\n let _x: [u8, ..FOO]; \/\/ caused stack overflow prior to fix\n let _y: uint = 1 + {\n static BAR: uint = BAR; \/\/~ ERROR recursive constant\n let _z: [u8, ..BAR]; \/\/ caused stack overflow prior to fix\n 1\n };\n}\n<|endoftext|>"} {"text":"<commit_before>#![cfg(feature = \"cloudwatch\")]\n\nextern crate rusoto_core;\nextern crate rusoto_cloudwatch;\n\nuse rusoto_cloudwatch::{CloudWatch, CloudWatchClient, PutMetricDataInput, Dimension, MetricDatum};\nuse rusoto_core::{DefaultCredentialsProvider, Region};\nuse rusoto_core::default_tls_client;\n\n#[test]\nfn should_put_metric_data() {\n let client = CloudWatchClient::new(default_tls_client().unwrap(),\n DefaultCredentialsProvider::new().unwrap(),\n Region::UsEast1);\n\n let metric_data = vec![MetricDatum {\n dimensions: Some(vec![Dimension {\n name: \"foo\".to_string(),\n value: \"bar\".to_string(),\n }]),\n metric_name: \"buffers\".to_string(),\n statistic_values: None,\n timestamp: None,\n unit: Some(\"Bytes\".to_string()),\n value: Some(1.0),\n }];\n let request = PutMetricDataInput {\n namespace: \"TestNamespace\".to_string(),\n metric_data: metric_data,\n };\n\n let response = client.put_metric_data(&request).unwrap();\n println!(\"{:#?}\", response);\n}\n<commit_msg>fix cloudwatch integration test<commit_after>#![cfg(feature = \"cloudwatch\")]\n\nextern crate rusoto_core;\nextern crate rusoto_cloudwatch;\n\nuse rusoto_cloudwatch::{CloudWatch, CloudWatchClient, PutMetricDataInput, Dimension, MetricDatum};\nuse rusoto_core::{DefaultCredentialsProvider, Region};\nuse rusoto_core::default_tls_client;\n\n#[test]\nfn should_put_metric_data() {\n let client = CloudWatchClient::new(default_tls_client().unwrap(),\n DefaultCredentialsProvider::new().unwrap(),\n Region::UsEast1);\n\n let metric_data = vec![MetricDatum {\n dimensions: Some(vec![Dimension {\n name: \"foo\".to_string(),\n value: \"bar\".to_string(),\n }]),\n metric_name: \"buffers\".to_string(),\n statistic_values: None,\n timestamp: None,\n unit: Some(\"Bytes\".to_string()),\n value: Some(1.0),\n ..Default::default()\n }];\n let request = PutMetricDataInput {\n namespace: \"TestNamespace\".to_string(),\n metric_data: metric_data,\n };\n\n let response = client.put_metric_data(&request).unwrap();\n println!(\"{:#?}\", response);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>simplify `from_elem` and add `from_fn`<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(resolve_trait_on_defaulted_unit)]\n\ntrait Deserialize {\n fn deserialize() -> Result<Self, String>\n}\n\nfn doit() -> Result<(), String> {\n let _ = Deserialize::deserialize()?;\n \/\/~^ ERROR attempt to resolve a trait\n Ok(())\n}\n\nfn main() {\n doit();\n}\n\n<commit_msg>Fix test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(resolve_trait_on_defaulted_unit)]\n\ntrait Deserialize: Sized {\n fn deserialize() -> Result<Self, String>;\n}\n\nimpl Deserialize for () {\n fn deserialize() -> Result<(), String> {\n Ok(())\n }\n}\n\nfn doit() -> Result<(), String> {\n let _ = Deserialize::deserialize()?;\n \/\/~^ ERROR code relies on type\n \/\/~| WARNING previously accepted\n Ok(())\n}\n\nfn main() {\n let _ = doit();\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>clone<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate core;\n\nuse std::io;\nuse std::fs;\nuse std::fs::PathExt;\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::collections::HashMap;\nuse self::core::str::StrExt;\nuse std::ffi::OsStr;\nuse liquid::Value;\n\nuse document::Document;\nuse util;\n\npub fn build(source: &Path, dest: &Path, layout_str: &str, posts_str: &str) -> io::Result<()>{\n \/\/ TODO make configurable\n let template_extensions = [OsStr::new(\"tpl\"), OsStr::new(\"md\")];\n\n let layouts_path = source.join(layout_str);\n let posts_path = source.join(posts_str);\n\n let mut layouts : HashMap<String, String> = HashMap::new();\n\n \/\/ go through the layout directory and add\n \/\/ filename -> text content to the layout map\n match fs::walk_dir(&layouts_path) {\n Ok(files) => for layout in files {\n let layout = try!(layout).path();\n if layout.is_file() {\n let mut text = String::new();\n try!(try!(File::open(&layout)).read_to_string(&mut text));\n layouts.insert(layout.as_path().file_name().unwrap().to_str().unwrap().to_string(), text);\n }\n },\n Err(_) => println!(\"Warning: No layout path found ({})\\n\", source.display())\n };\n\n let mut documents = vec![];\n let mut post_data = vec![];\n\n \/\/ walk source directory and find files that are written in\n \/\/ a template file extension\n for p in try!(fs::walk_dir(source)) {\n let p = p.unwrap().path();\n let path = p.as_path();\n \/\/ check for file extensions\n if template_extensions.contains(&path.extension().unwrap_or(OsStr::new(\"\")))\n \/\/ check that file is not in the layouts folder\n && path.parent() != Some(layouts_path.as_path()) {\n let doc = parse_document(&path, source);\n if path.parent() == Some(posts_path.as_path()){\n post_data.push(Value::Object(doc.get_attributes()));\n }\n documents.push(doc);\n }\n }\n\n for doc in documents.iter() {\n try!(doc.create_file(dest, &layouts, &post_data));\n }\n\n \/\/ copy everything\n if source != dest {\n try!(util::copy_recursive_filter(source, dest, &|p| -> bool {\n !p.file_name().unwrap().to_str().unwrap_or(\"\").starts_with(\".\")\n && !template_extensions.contains(&p.extension().unwrap_or(OsStr::new(\"\")))\n && p != dest\n && p != layouts_path.as_path()\n }));\n }\n\n Ok(())\n}\n\nfn parse_document(path: &Path, source: &Path) -> Document {\n let attributes = extract_attributes(path);\n let content = extract_content(path).unwrap();\n let new_path = path.relative_from(source).unwrap();\n \/\/ let markdown = path.extension().unwrap_or(OsStr::new(\"\")) == OsStr::new(\"md\");\n\n Document::new(\n new_path.to_str().unwrap().to_string(),\n attributes,\n content,\n \/\/ markdown\n )\n}\n\nfn parse_file(path: &Path) -> io::Result<String> {\n let mut file = try!(File::open(path));\n let mut text = String::new();\n try!(file.read_to_string(&mut text));\n Ok(text)\n}\n\nfn extract_attributes(path: &Path) -> HashMap<String, String> {\n let mut attributes = HashMap::new();\n attributes.insert(\"name\".to_string(), path.file_stem().unwrap().to_str().unwrap().to_string());\n\n let content = parse_file(path).unwrap();\n\n if content.contains(\"---\") {\n let mut content_splits = content.split(\"---\");\n\n let attribute_string = content_splits.nth(0).unwrap();\n\n for attribute_line in attribute_string.split(\"\\n\") {\n if !attribute_line.contains_char(':') {\n continue;\n }\n\n let mut attribute_split = attribute_line.split(':');\n\n \/\/ TODO: Refactor, find a better way for doing this\n \/\/ .nth() method is consuming the iterator and therefore the 0th index on the second method\n \/\/ is in real index 1\n let key = attribute_split.nth(0).unwrap().trim_matches(' ').to_string().clone();\n let value = attribute_split.nth(0).unwrap().trim_matches(' ').to_string().clone();\n\n attributes.insert(key, value);\n }\n }\n\n return attributes;\n}\n\nfn extract_content(path: &Path) -> io::Result<String> {\n let content = try!(parse_file(path));\n\n if content.contains(\"---\") {\n let mut content_splits = content.split(\"---\");\n\n return Ok(content_splits.nth(1).unwrap().to_string());\n }\n\n return Ok(content);\n}\n<commit_msg>Do not try to parse markdown files (currently commented out)<commit_after>extern crate core;\n\nuse std::io;\nuse std::fs;\nuse std::fs::PathExt;\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::collections::HashMap;\nuse self::core::str::StrExt;\nuse std::ffi::OsStr;\nuse liquid::Value;\n\nuse document::Document;\nuse util;\n\npub fn build(source: &Path, dest: &Path, layout_str: &str, posts_str: &str) -> io::Result<()>{\n \/\/ TODO make configurable\n let template_extensions = [OsStr::new(\"tpl\") \/*, OsStr::new(\"md\")*\/];\n\n let layouts_path = source.join(layout_str);\n let posts_path = source.join(posts_str);\n\n let mut layouts : HashMap<String, String> = HashMap::new();\n\n \/\/ go through the layout directory and add\n \/\/ filename -> text content to the layout map\n match fs::walk_dir(&layouts_path) {\n Ok(files) => for layout in files {\n let layout = try!(layout).path();\n if layout.is_file() {\n let mut text = String::new();\n try!(try!(File::open(&layout)).read_to_string(&mut text));\n layouts.insert(layout.as_path().file_name().unwrap().to_str().unwrap().to_string(), text);\n }\n },\n Err(_) => println!(\"Warning: No layout path found ({})\\n\", source.display())\n };\n\n let mut documents = vec![];\n let mut post_data = vec![];\n\n \/\/ walk source directory and find files that are written in\n \/\/ a template file extension\n for p in try!(fs::walk_dir(source)) {\n let p = p.unwrap().path();\n let path = p.as_path();\n \/\/ check for file extensions\n if template_extensions.contains(&path.extension().unwrap_or(OsStr::new(\"\")))\n \/\/ check that file is not in the layouts folder\n && path.parent() != Some(layouts_path.as_path()) {\n let doc = parse_document(&path, source);\n if path.parent() == Some(posts_path.as_path()){\n post_data.push(Value::Object(doc.get_attributes()));\n }\n documents.push(doc);\n }\n }\n\n for doc in documents.iter() {\n try!(doc.create_file(dest, &layouts, &post_data));\n }\n\n \/\/ copy everything\n if source != dest {\n try!(util::copy_recursive_filter(source, dest, &|p| -> bool {\n !p.file_name().unwrap().to_str().unwrap_or(\"\").starts_with(\".\")\n && !template_extensions.contains(&p.extension().unwrap_or(OsStr::new(\"\")))\n && p != dest\n && p != layouts_path.as_path()\n }));\n }\n\n Ok(())\n}\n\nfn parse_document(path: &Path, source: &Path) -> Document {\n let attributes = extract_attributes(path);\n let content = extract_content(path).unwrap();\n let new_path = path.relative_from(source).unwrap();\n \/\/ let markdown = path.extension().unwrap_or(OsStr::new(\"\")) == OsStr::new(\"md\");\n\n Document::new(\n new_path.to_str().unwrap().to_string(),\n attributes,\n content,\n \/\/ markdown\n )\n}\n\nfn parse_file(path: &Path) -> io::Result<String> {\n let mut file = try!(File::open(path));\n let mut text = String::new();\n try!(file.read_to_string(&mut text));\n Ok(text)\n}\n\nfn extract_attributes(path: &Path) -> HashMap<String, String> {\n let mut attributes = HashMap::new();\n attributes.insert(\"name\".to_string(), path.file_stem().unwrap().to_str().unwrap().to_string());\n\n let content = parse_file(path).unwrap();\n\n if content.contains(\"---\") {\n let mut content_splits = content.split(\"---\");\n\n let attribute_string = content_splits.nth(0).unwrap();\n\n for attribute_line in attribute_string.split(\"\\n\") {\n if !attribute_line.contains_char(':') {\n continue;\n }\n\n let mut attribute_split = attribute_line.split(':');\n\n \/\/ TODO: Refactor, find a better way for doing this\n \/\/ .nth() method is consuming the iterator and therefore the 0th index on the second method\n \/\/ is in real index 1\n let key = attribute_split.nth(0).unwrap().trim_matches(' ').to_string().clone();\n let value = attribute_split.nth(0).unwrap().trim_matches(' ').to_string().clone();\n\n attributes.insert(key, value);\n }\n }\n\n return attributes;\n}\n\nfn extract_content(path: &Path) -> io::Result<String> {\n let content = try!(parse_file(path));\n\n if content.contains(\"---\") {\n let mut content_splits = content.split(\"---\");\n\n return Ok(content_splits.nth(1).unwrap().to_string());\n }\n\n return Ok(content);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added config.rs to git repository<commit_after>pub struct Config;\n\nimpl Config {\n\tpub fn new() -> Config {\n\t\treturn Config;\n\t}\n\t\n\tpub fn width(&self) -> f64 {\n\t\treturn 200.0;\n\t}\n\t\n\tpub fn height(&self) -> f64 {\n\t\treturn 200.0;\n\t}\n\t\n\tpub fn astroid_gap_distance(&self) -> f64 {\n\t\treturn 25.0;\n\t}\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Mark stub in Cursor with unimplemented!()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Move the max sines definition in planet.rs so that it is more prominent<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Clean up nat tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Re-enable Min for Nat<commit_after><|endoftext|>"} {"text":"<commit_before>class cat {\n priv {\n let mutable meows : uint;\n }\n\n let how_hungry : int;\n\n new(in_x : uint, in_y : int) { meows = in_x; how_hungry = in_y; }\n}\n\nfn main() {\n let nyan : cat = cat(52u, 99);\n let kitty = cat(1000u, 2);\n log(debug, nyan.how_hungry);\n log(debug, kitty.how_hungry);\n}<commit_msg>Re-XFAIL test that I accidentally<commit_after>\/\/ xfail-test\nclass cat {\n priv {\n let mutable meows : uint;\n }\n\n let how_hungry : int;\n\n new(in_x : uint, in_y : int) { meows = in_x; how_hungry = in_y; }\n}\n\nfn main() {\n let nyan : cat = cat(52u, 99);\n let kitty = cat(1000u, 2);\n log(debug, nyan.how_hungry);\n log(debug, kitty.how_hungry);\n}<|endoftext|>"} {"text":"<commit_before>pub type clock_t = i64;\npub type suseconds_t = i64;\npub type dev_t = i32;\npub type sigset_t = ::c_uint;\npub type blksize_t = ::int32_t;\npub type fsblkcnt_t = ::uint64_t;\npub type fsfilcnt_t = ::uint64_t;\npub type pthread_attr_t = *mut ::c_void;\npub type pthread_mutex_t = *mut ::c_void;\npub type pthread_mutexattr_t = *mut ::c_void;\npub type pthread_cond_t = *mut ::c_void;\npub type pthread_rwlock_t = *mut ::c_void;\n\ns! {\n pub struct dirent {\n pub d_fileno: ::ino_t,\n pub d_off: ::off_t,\n pub d_reclen: u16,\n pub d_type: u8,\n pub d_namlen: u8,\n __d_padding: [u8; 4],\n pub d_name: [::c_char; 256],\n }\n\n pub struct glob_t {\n pub gl_pathc: ::c_int,\n __unused1: ::c_int,\n pub gl_offs: ::c_int,\n __unused2: ::c_int,\n pub gl_pathv: *mut *mut ::c_char,\n\n __unused3: *mut ::c_void,\n\n __unused4: *mut ::c_void,\n __unused5: *mut ::c_void,\n __unused6: *mut ::c_void,\n __unused7: *mut ::c_void,\n __unused8: *mut ::c_void,\n __unused9: *mut ::c_void,\n }\n\n pub struct stat {\n pub st_mode: ::mode_t,\n pub st_dev: ::dev_t,\n pub st_ino: ::ino_t,\n pub st_nlink: ::nlink_t,\n pub st_uid: ::uid_t,\n pub st_gid: ::gid_t,\n pub st_rdev: ::dev_t,\n pub st_atime: ::time_t,\n pub st_atime_nsec: ::c_long,\n pub st_mtime: ::time_t,\n pub st_mtime_nsec: ::c_long,\n pub st_ctime: ::time_t,\n pub st_ctime_nsec: ::c_long,\n pub st_size: ::off_t,\n pub st_blocks: ::blkcnt_t,\n pub st_blksize: ::blksize_t,\n pub st_flags: ::uint32_t,\n pub st_gen: ::uint32_t,\n pub st_birthtime: ::time_t,\n pub st_birthtime_nsec: ::c_long,\n }\n\n pub struct statvfs {\n pub f_bsize: ::c_ulong,\n pub f_frsize: ::c_ulong,\n pub f_blocks: ::fsblkcnt_t,\n pub f_bfree: ::fsblkcnt_t,\n pub f_bavail: ::fsblkcnt_t,\n pub f_files: ::fsfilcnt_t,\n pub f_ffree: ::fsfilcnt_t,\n pub f_favail: ::fsfilcnt_t,\n pub f_fsid: ::c_ulong,\n pub f_flag: ::c_ulong,\n pub f_namemax: ::c_ulong,\n }\n\n pub struct addrinfo {\n pub ai_flags: ::c_int,\n pub ai_family: ::c_int,\n pub ai_socktype: ::c_int,\n pub ai_protocol: ::c_int,\n pub ai_addrlen: ::socklen_t,\n pub ai_addr: *mut ::sockaddr,\n pub ai_canonname: *mut ::c_char,\n pub ai_next: *mut ::addrinfo,\n }\n\n pub struct sockaddr_storage {\n pub ss_len: u8,\n pub ss_family: ::sa_family_t,\n __ss_pad1: [u8; 6],\n __ss_pad2: i64,\n __ss_pad3: [u8; 240],\n }\n\n pub struct siginfo_t {\n pub si_signo: ::c_int,\n pub si_code: ::c_int,\n pub si_errno: ::c_int,\n pub si_addr: *mut ::c_char,\n __pad: [u8; 108],\n }\n\n pub struct Dl_info {\n pub dli_fname: *const ::c_char,\n pub dli_fbase: *mut ::c_void,\n pub dli_sname: *const ::c_char,\n pub dli_saddr: *mut ::c_void,\n }\n}\n\npub const O_CLOEXEC: ::c_int = 0x10000;\n\npub const MS_SYNC : ::c_int = 0x0002;\npub const MS_INVALIDATE : ::c_int = 0x0004;\n\npub const PTHREAD_STACK_MIN : ::size_t = 2048;\n\npub const ENOATTR : ::c_int = 83;\npub const EILSEQ : ::c_int = 84;\npub const EOVERFLOW : ::c_int = 87;\npub const ECANCELED : ::c_int = 88;\npub const EIDRM : ::c_int = 89;\npub const ENOMSG : ::c_int = 90;\npub const ENOTSUP : ::c_int = 91;\npub const ELAST : ::c_int = 91;\n\npub const F_DUPFD_CLOEXEC : ::c_int = 10;\n\npub const RLIM_NLIMITS: ::c_int = 9;\n\npub const SO_SNDTIMEO: ::c_int = 0x1005;\npub const SO_RCVTIMEO: ::c_int = 0x1006;\n\npub const KERN_PROC : ::c_int = 66;\npub const O_DSYNC : ::c_int = 128;\n\npub const MAP_RENAME : ::c_int = 0x0000;\npub const MAP_NORESERVE : ::c_int = 0x0000;\npub const MAP_HASSEMAPHORE : ::c_int = 0x0000;\n\npub const EIPSEC : ::c_int = 82;\npub const ENOMEDIUM : ::c_int = 85;\npub const EMEDIUMTYPE : ::c_int = 86;\n\npub const RUSAGE_THREAD: ::c_int = 1;\n\npub const MAP_COPY : ::c_int = 0x0002;\npub const MAP_NOEXTEND : ::c_int = 0x0000;\n\npub const _SC_CLK_TCK : ::c_int = 3;\npub const _SC_IOV_MAX : ::c_int = 51;\npub const _SC_GETGR_R_SIZE_MAX : ::c_int = 100;\npub const _SC_GETPW_R_SIZE_MAX : ::c_int = 101;\npub const _SC_LOGIN_NAME_MAX : ::c_int = 102;\npub const _SC_MQ_PRIO_MAX : ::c_int = 59;\npub const _SC_THREADS : ::c_int = 91;\npub const _SC_THREAD_ATTR_STACKADDR : ::c_int = 77;\npub const _SC_THREAD_ATTR_STACKSIZE : ::c_int = 78;\npub const _SC_THREAD_DESTRUCTOR_ITERATIONS : ::c_int = 80;\npub const _SC_THREAD_KEYS_MAX : ::c_int = 81;\npub const _SC_THREAD_PRIO_INHERIT : ::c_int = 82;\npub const _SC_THREAD_PRIO_PROTECT : ::c_int = 83;\npub const _SC_THREAD_PRIORITY_SCHEDULING : ::c_int = 84;\npub const _SC_THREAD_PROCESS_SHARED : ::c_int = 85;\npub const _SC_THREAD_SAFE_FUNCTIONS : ::c_int = 103;\npub const _SC_THREAD_STACK_MIN : ::c_int = 89;\npub const _SC_THREAD_THREADS_MAX : ::c_int = 90;\npub const _SC_TTY_NAME_MAX : ::c_int = 107;\npub const _SC_ATEXIT_MAX : ::c_int = 46;\npub const _SC_AIO_LISTIO_MAX : ::c_int = 42;\npub const _SC_AIO_MAX : ::c_int = 43;\npub const _SC_ASYNCHRONOUS_IO : ::c_int = 45;\npub const _SC_MAPPED_FILES : ::c_int = 53;\npub const _SC_MEMLOCK : ::c_int = 54;\npub const _SC_MEMLOCK_RANGE : ::c_int = 55;\npub const _SC_MEMORY_PROTECTION : ::c_int = 56;\npub const _SC_MESSAGE_PASSING : ::c_int = 57;\npub const _SC_MQ_OPEN_MAX : ::c_int = 58;\npub const _SC_PRIORITY_SCHEDULING : ::c_int = 61;\npub const _SC_SEMAPHORES : ::c_int = 67;\npub const _SC_SHARED_MEMORY_OBJECTS : ::c_int = 68;\npub const _SC_SYNCHRONIZED_IO : ::c_int = 75;\npub const _SC_TIMERS : ::c_int = 94;\npub const _SC_XOPEN_CRYPT : ::c_int = 117;\npub const _SC_XOPEN_ENH_I18N : ::c_int = 118;\npub const _SC_XOPEN_LEGACY : ::c_int = 119;\npub const _SC_XOPEN_REALTIME : ::c_int = 120;\npub const _SC_XOPEN_REALTIME_THREADS : ::c_int = 121;\npub const _SC_XOPEN_UNIX : ::c_int = 123;\npub const _SC_XOPEN_VERSION : ::c_int = 125;\npub const _SC_SEM_NSEMS_MAX : ::c_int = 31;\npub const _SC_SEM_VALUE_MAX : ::c_int = 32;\npub const _SC_AIO_PRIO_DELTA_MAX : ::c_int = 44;\npub const _SC_DELAYTIMER_MAX : ::c_int = 50;\npub const _SC_PRIORITIZED_IO : ::c_int = 60;\npub const _SC_REALTIME_SIGNALS : ::c_int = 64;\npub const _SC_RTSIG_MAX : ::c_int = 66;\npub const _SC_SIGQUEUE_MAX : ::c_int = 70;\npub const _SC_TIMER_MAX : ::c_int = 93;\n\npub const SIGSTKSZ: ::size_t = 40960;\n\npub const FD_SETSIZE: usize = 1024;\n\npub const ST_NOSUID: ::c_ulong = 2;\n\npub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = 0 as *mut _;\npub const PTHREAD_COND_INITIALIZER: pthread_cond_t = 0 as *mut _;\npub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = 0 as *mut _;\npub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 2;\n\npub const KERN_PROC_ARGS: ::c_int = 55;\n\npub const TMP_MAX : ::c_uint = 0x7fffffff;\n\npub const NI_MAXHOST: ::size_t = 256;\n\nextern {\n pub fn getnameinfo(sa: *const ::sockaddr,\n salen: ::socklen_t,\n host: *mut ::c_char,\n hostlen: ::size_t,\n serv: *mut ::c_char,\n servlen: ::size_t,\n flags: ::c_int) -> ::c_int;\n pub fn mprotect(addr: *mut ::c_void, len: ::size_t, prot: ::c_int)\n -> ::c_int;\n pub fn sysctl(name: *const ::c_int,\n namelen: ::c_uint,\n oldp: *mut ::c_void,\n oldlenp: *mut ::size_t,\n newp: *mut ::c_void,\n newlen: ::size_t)\n -> ::c_int;\n pub fn getentropy(buf: *mut ::c_void, buflen: ::size_t) -> ::c_int;\n}\n<commit_msg>Add IP v6 group membership options to OpenBSD build<commit_after>pub type clock_t = i64;\npub type suseconds_t = i64;\npub type dev_t = i32;\npub type sigset_t = ::c_uint;\npub type blksize_t = ::int32_t;\npub type fsblkcnt_t = ::uint64_t;\npub type fsfilcnt_t = ::uint64_t;\npub type pthread_attr_t = *mut ::c_void;\npub type pthread_mutex_t = *mut ::c_void;\npub type pthread_mutexattr_t = *mut ::c_void;\npub type pthread_cond_t = *mut ::c_void;\npub type pthread_rwlock_t = *mut ::c_void;\n\ns! {\n pub struct dirent {\n pub d_fileno: ::ino_t,\n pub d_off: ::off_t,\n pub d_reclen: u16,\n pub d_type: u8,\n pub d_namlen: u8,\n __d_padding: [u8; 4],\n pub d_name: [::c_char; 256],\n }\n\n pub struct glob_t {\n pub gl_pathc: ::c_int,\n __unused1: ::c_int,\n pub gl_offs: ::c_int,\n __unused2: ::c_int,\n pub gl_pathv: *mut *mut ::c_char,\n\n __unused3: *mut ::c_void,\n\n __unused4: *mut ::c_void,\n __unused5: *mut ::c_void,\n __unused6: *mut ::c_void,\n __unused7: *mut ::c_void,\n __unused8: *mut ::c_void,\n __unused9: *mut ::c_void,\n }\n\n pub struct stat {\n pub st_mode: ::mode_t,\n pub st_dev: ::dev_t,\n pub st_ino: ::ino_t,\n pub st_nlink: ::nlink_t,\n pub st_uid: ::uid_t,\n pub st_gid: ::gid_t,\n pub st_rdev: ::dev_t,\n pub st_atime: ::time_t,\n pub st_atime_nsec: ::c_long,\n pub st_mtime: ::time_t,\n pub st_mtime_nsec: ::c_long,\n pub st_ctime: ::time_t,\n pub st_ctime_nsec: ::c_long,\n pub st_size: ::off_t,\n pub st_blocks: ::blkcnt_t,\n pub st_blksize: ::blksize_t,\n pub st_flags: ::uint32_t,\n pub st_gen: ::uint32_t,\n pub st_birthtime: ::time_t,\n pub st_birthtime_nsec: ::c_long,\n }\n\n pub struct statvfs {\n pub f_bsize: ::c_ulong,\n pub f_frsize: ::c_ulong,\n pub f_blocks: ::fsblkcnt_t,\n pub f_bfree: ::fsblkcnt_t,\n pub f_bavail: ::fsblkcnt_t,\n pub f_files: ::fsfilcnt_t,\n pub f_ffree: ::fsfilcnt_t,\n pub f_favail: ::fsfilcnt_t,\n pub f_fsid: ::c_ulong,\n pub f_flag: ::c_ulong,\n pub f_namemax: ::c_ulong,\n }\n\n pub struct addrinfo {\n pub ai_flags: ::c_int,\n pub ai_family: ::c_int,\n pub ai_socktype: ::c_int,\n pub ai_protocol: ::c_int,\n pub ai_addrlen: ::socklen_t,\n pub ai_addr: *mut ::sockaddr,\n pub ai_canonname: *mut ::c_char,\n pub ai_next: *mut ::addrinfo,\n }\n\n pub struct sockaddr_storage {\n pub ss_len: u8,\n pub ss_family: ::sa_family_t,\n __ss_pad1: [u8; 6],\n __ss_pad2: i64,\n __ss_pad3: [u8; 240],\n }\n\n pub struct siginfo_t {\n pub si_signo: ::c_int,\n pub si_code: ::c_int,\n pub si_errno: ::c_int,\n pub si_addr: *mut ::c_char,\n __pad: [u8; 108],\n }\n\n pub struct Dl_info {\n pub dli_fname: *const ::c_char,\n pub dli_fbase: *mut ::c_void,\n pub dli_sname: *const ::c_char,\n pub dli_saddr: *mut ::c_void,\n }\n}\n\npub const O_CLOEXEC: ::c_int = 0x10000;\n\npub const MS_SYNC : ::c_int = 0x0002;\npub const MS_INVALIDATE : ::c_int = 0x0004;\n\npub const PTHREAD_STACK_MIN : ::size_t = 2048;\n\npub const ENOATTR : ::c_int = 83;\npub const EILSEQ : ::c_int = 84;\npub const EOVERFLOW : ::c_int = 87;\npub const ECANCELED : ::c_int = 88;\npub const EIDRM : ::c_int = 89;\npub const ENOMSG : ::c_int = 90;\npub const ENOTSUP : ::c_int = 91;\npub const ELAST : ::c_int = 91;\n\npub const F_DUPFD_CLOEXEC : ::c_int = 10;\n\npub const RLIM_NLIMITS: ::c_int = 9;\n\npub const SO_SNDTIMEO: ::c_int = 0x1005;\npub const SO_RCVTIMEO: ::c_int = 0x1006;\n\npub const IPV6_JOIN_GROUP: ::c_int = 12;\npub const IPV6_LEAVE_GROUP: ::c_int = 13;\n\npub const KERN_PROC : ::c_int = 66;\npub const O_DSYNC : ::c_int = 128;\n\npub const MAP_RENAME : ::c_int = 0x0000;\npub const MAP_NORESERVE : ::c_int = 0x0000;\npub const MAP_HASSEMAPHORE : ::c_int = 0x0000;\n\npub const EIPSEC : ::c_int = 82;\npub const ENOMEDIUM : ::c_int = 85;\npub const EMEDIUMTYPE : ::c_int = 86;\n\npub const RUSAGE_THREAD: ::c_int = 1;\n\npub const MAP_COPY : ::c_int = 0x0002;\npub const MAP_NOEXTEND : ::c_int = 0x0000;\n\npub const _SC_CLK_TCK : ::c_int = 3;\npub const _SC_IOV_MAX : ::c_int = 51;\npub const _SC_GETGR_R_SIZE_MAX : ::c_int = 100;\npub const _SC_GETPW_R_SIZE_MAX : ::c_int = 101;\npub const _SC_LOGIN_NAME_MAX : ::c_int = 102;\npub const _SC_MQ_PRIO_MAX : ::c_int = 59;\npub const _SC_THREADS : ::c_int = 91;\npub const _SC_THREAD_ATTR_STACKADDR : ::c_int = 77;\npub const _SC_THREAD_ATTR_STACKSIZE : ::c_int = 78;\npub const _SC_THREAD_DESTRUCTOR_ITERATIONS : ::c_int = 80;\npub const _SC_THREAD_KEYS_MAX : ::c_int = 81;\npub const _SC_THREAD_PRIO_INHERIT : ::c_int = 82;\npub const _SC_THREAD_PRIO_PROTECT : ::c_int = 83;\npub const _SC_THREAD_PRIORITY_SCHEDULING : ::c_int = 84;\npub const _SC_THREAD_PROCESS_SHARED : ::c_int = 85;\npub const _SC_THREAD_SAFE_FUNCTIONS : ::c_int = 103;\npub const _SC_THREAD_STACK_MIN : ::c_int = 89;\npub const _SC_THREAD_THREADS_MAX : ::c_int = 90;\npub const _SC_TTY_NAME_MAX : ::c_int = 107;\npub const _SC_ATEXIT_MAX : ::c_int = 46;\npub const _SC_AIO_LISTIO_MAX : ::c_int = 42;\npub const _SC_AIO_MAX : ::c_int = 43;\npub const _SC_ASYNCHRONOUS_IO : ::c_int = 45;\npub const _SC_MAPPED_FILES : ::c_int = 53;\npub const _SC_MEMLOCK : ::c_int = 54;\npub const _SC_MEMLOCK_RANGE : ::c_int = 55;\npub const _SC_MEMORY_PROTECTION : ::c_int = 56;\npub const _SC_MESSAGE_PASSING : ::c_int = 57;\npub const _SC_MQ_OPEN_MAX : ::c_int = 58;\npub const _SC_PRIORITY_SCHEDULING : ::c_int = 61;\npub const _SC_SEMAPHORES : ::c_int = 67;\npub const _SC_SHARED_MEMORY_OBJECTS : ::c_int = 68;\npub const _SC_SYNCHRONIZED_IO : ::c_int = 75;\npub const _SC_TIMERS : ::c_int = 94;\npub const _SC_XOPEN_CRYPT : ::c_int = 117;\npub const _SC_XOPEN_ENH_I18N : ::c_int = 118;\npub const _SC_XOPEN_LEGACY : ::c_int = 119;\npub const _SC_XOPEN_REALTIME : ::c_int = 120;\npub const _SC_XOPEN_REALTIME_THREADS : ::c_int = 121;\npub const _SC_XOPEN_UNIX : ::c_int = 123;\npub const _SC_XOPEN_VERSION : ::c_int = 125;\npub const _SC_SEM_NSEMS_MAX : ::c_int = 31;\npub const _SC_SEM_VALUE_MAX : ::c_int = 32;\npub const _SC_AIO_PRIO_DELTA_MAX : ::c_int = 44;\npub const _SC_DELAYTIMER_MAX : ::c_int = 50;\npub const _SC_PRIORITIZED_IO : ::c_int = 60;\npub const _SC_REALTIME_SIGNALS : ::c_int = 64;\npub const _SC_RTSIG_MAX : ::c_int = 66;\npub const _SC_SIGQUEUE_MAX : ::c_int = 70;\npub const _SC_TIMER_MAX : ::c_int = 93;\n\npub const SIGSTKSZ: ::size_t = 40960;\n\npub const FD_SETSIZE: usize = 1024;\n\npub const ST_NOSUID: ::c_ulong = 2;\n\npub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = 0 as *mut _;\npub const PTHREAD_COND_INITIALIZER: pthread_cond_t = 0 as *mut _;\npub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = 0 as *mut _;\npub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 2;\n\npub const KERN_PROC_ARGS: ::c_int = 55;\n\npub const TMP_MAX : ::c_uint = 0x7fffffff;\n\npub const NI_MAXHOST: ::size_t = 256;\n\nextern {\n pub fn getnameinfo(sa: *const ::sockaddr,\n salen: ::socklen_t,\n host: *mut ::c_char,\n hostlen: ::size_t,\n serv: *mut ::c_char,\n servlen: ::size_t,\n flags: ::c_int) -> ::c_int;\n pub fn mprotect(addr: *mut ::c_void, len: ::size_t, prot: ::c_int)\n -> ::c_int;\n pub fn sysctl(name: *const ::c_int,\n namelen: ::c_uint,\n oldp: *mut ::c_void,\n oldlenp: *mut ::size_t,\n newp: *mut ::c_void,\n newlen: ::size_t)\n -> ::c_int;\n pub fn getentropy(buf: *mut ::c_void, buflen: ::size_t) -> ::c_int;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #27133 - apasel422:issue-15919, r=rollup<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ error-pattern: too big for the current architecture\n\n#[cfg(target_pointer_width = \"32\")]\nfn main() {\n let x = [0usize; 0xffff_ffff];\n}\n\n#[cfg(target_pointer_width = \"64\")]\nfn main() {\n let x = [0usize; 0xffff_ffff_ffff_ffff];\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Comparing the guess and the secret number<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix: filter empty split results<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added template file for Rust problems.<commit_after>\/\/ USAGE EXAMPLES:\n\/\/\n\/\/ cargo run template\n\/\/ cargo run template -i apple\n\/\/ cargo run template --input バナナ\n\/\/ INPUT=\"☺️\" cargo run template\n\/\/\n\/\/ PROBLEM:\n\/\/ New problem template file. (Print number of characters in string.)\n\/\/\n\/\/ EXAMPLES:\n\/\/ \"apple\" => 5\n\/\/ \"バナナ\" => 3\n\/\/ \"☺️\" => 2\n\/\/\n\/\/ REFERENCE:\n\/\/ URL\n\npub fn run(string: String) {\n println!(\"{}\", solve(&string));\n}\n\nfn solve(string: &str) -> i64 {\n return string.chars().count() as i64;\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn run_ok() {\n run(\"\".to_string());\n }\n\n #[test]\n fn returns_expected() {\n assert_eq!(5, solve(\"apple\"));\n assert_eq!(3, solve(\"バナナ\"));\n assert_eq!(2, solve(\"☺️\"));\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor the surface format so that it is encapsulated in the surface type<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix the layout transitions in read_pixels(), and tidy<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add benchmark, I don't think travis will build this so it should be ok<commit_after>#![feature(test)]\n\nextern crate image;\nextern crate bspline;\nextern crate test;\n\nuse test::Bencher;\n\nuse std::ops::{Mul, Add, Index, IndexMut};\nuse std::iter;\n\n#[derive(Copy, Clone, Debug)]\nstruct Point {\n x: f32,\n y: f32,\n}\nimpl Point {\n fn new(x: f32, y: f32) -> Point {\n Point { x: x, y: y }\n }\n}\nimpl Mul<f32> for Point {\n type Output = Point;\n fn mul(self, rhs: f32) -> Point {\n Point { x: self.x * rhs, y: self.y * rhs }\n }\n}\nimpl Add for Point {\n type Output = Point;\n fn add(self, rhs: Point) -> Point {\n Point { x: self.x + rhs.x, y: self.y + rhs.y }\n }\n}\n\n\/\/\/ Clamp `x` to be between `min` and `max`\npub fn clamp<T: PartialOrd>(x: T, min: T, max: T) -> T {\n if x < min { min } else if x > max { max } else { x }\n}\n\n#[derive(Debug, Copy, Clone)]\npub struct Colorf {\n pub r: f32,\n pub g: f32,\n pub b: f32,\n}\n\nimpl Colorf {\n \/\/\/ Create an RGB color\n pub fn new(r: f32, g: f32, b: f32) -> Colorf {\n Colorf { r: r, g: g, b: b }\n }\n pub fn broadcast(x: f32) -> Colorf {\n Colorf { r: x, g: x, b: x }\n }\n \/\/\/ Clamp the color values between [0, 1]\n pub fn clamp(&self) -> Colorf {\n Colorf { r: clamp(self.r, 0.0, 1.0),\n g: clamp(self.g, 0.0, 1.0),\n b: clamp(self.b, 0.0, 1.0) }\n }\n \/\/\/ Convert the linear RGB color to sRGB\n pub fn to_srgb(&self) -> Colorf {\n let a = 0.055f32;\n let b = 1f32 \/ 2.4;\n let mut srgb = Colorf::broadcast(0.0);\n for i in 0..3 {\n if self[i] <= 0.0031308 {\n srgb[i] = 12.92 * self[i];\n } else {\n srgb[i] = (1.0 + a) * f32::powf(self[i], b) - a;\n }\n }\n srgb\n }\n}\nimpl Add for Colorf {\n type Output = Colorf;\n \/\/\/ Add two colors together\n fn add(self, rhs: Colorf) -> Colorf {\n Colorf { r: self.r + rhs.r, g: self.g + rhs.g, b: self.b + rhs.b }\n }\n}\nimpl Mul<f32> for Colorf {\n type Output = Colorf;\n \/\/\/ Scale the color by the float\n fn mul(self, rhs: f32) -> Colorf {\n Colorf { r: self.r * rhs, g: self.g * rhs, b: self.b * rhs }\n }\n}\nimpl Index<usize> for Colorf {\n type Output = f32;\n \/\/\/ Access the channels by index\n \/\/\/ \n \/\/\/ - 0 = r\n \/\/\/ - 1 = g\n \/\/\/ - 2 = b\n fn index(&self, i: usize) -> &f32 {\n match i {\n 0 => &self.r,\n 1 => &self.g,\n 2 => &self.b,\n _ => panic!(\"Invalid index into color\"),\n }\n }\n}\nimpl IndexMut<usize> for Colorf {\n \/\/\/ Access the channels by index\n \/\/\/ \n \/\/\/ - 0 = r\n \/\/\/ - 1 = g\n \/\/\/ - 2 = b\n fn index_mut(&mut self, i: usize) -> &mut f32 {\n match i {\n 0 => &mut self.r,\n 1 => &mut self.g,\n 2 => &mut self.b,\n _ => panic!(\"Invalid index into color\"),\n }\n }\n}\n\n\/\/\/ Evaluate the B-spline and plot it to the image buffer passed. The colors and points splines\n\/\/\/ should have the same t range.\nfn plot_2d(spline: &bspline::BSpline<Point>, colors: &bspline::BSpline<Colorf>, plot: &mut [u8],\n plot_dim: (usize, usize), scale: (f32, f32), offset: (f32, f32), show_control_pts: bool) {\n let step_size = 0.001;\n let t_range = spline.knot_domain();\n let steps = ((t_range.1 - t_range.0) \/ step_size) as usize;\n for s in 0..steps {\n let t = step_size * s as f32 + t_range.0;\n let pt = spline.point(t);\n let color = colors.point(t).to_srgb();\n let ix = ((pt.x + offset.0) * scale.0) as isize;\n let iy = ((pt.y + offset.1) * scale.1) as isize;\n for y in iy - 4..iy + 4 {\n for x in ix - 4..ix + 4 {\n if y >= 0 && y < plot_dim.1 as isize && x >= 0 && x < plot_dim.0 as isize {\n let px = (plot_dim.1 - 1 - y as usize) * plot_dim.0 * 3 + x as usize * 3;\n for i in 0..3 {\n plot[px + i] = (color[i] * 255.0) as u8;\n }\n }\n }\n }\n }\n \/\/ Draw the control points\n if show_control_pts {\n for pt in spline.control_points() {\n let ix = ((pt.x + offset.0) * scale.0) as isize;\n let iy = ((pt.y + offset.1) * scale.1) as isize;\n \/\/ Plot a black marker for each control point\n for y in iy - 3..iy + 3 {\n for x in ix - 3..ix + 3 {\n if y >= 0 && y < plot_dim.1 as isize && x >= 0 && x < plot_dim.0 as isize {\n let px = (plot_dim.1 - 1 - y as usize) * plot_dim.0 * 3 + x as usize * 3;\n plot[px] = 0;\n plot[px + 1] = 0;\n plot[px + 2] = 0;\n }\n }\n }\n }\n }\n}\n\n\/\/\/ Plot the text 'bspline' to create the logo for the library\n#[bench]\nfn bench_logo(bencher: &mut Bencher) {\n let points = vec![\/\/ Draw the b\n Point::new(-4.0, 4.0), Point::new(-4.0, -1.0), Point::new(-4.0, -1.0),\n Point::new(-2.0, 0.0), Point::new(-4.0, 1.35), Point::new(-4.0, 1.35),\n \/\/ Draw the s\n Point::new(-1.0, 1.5), Point::new(-1.0, 1.5), Point::new(-2.8, 1.0),\n Point::new(-0.5, 0.0), Point::new(-2.5, -0.5), Point::new(-2.5, -0.5),\n \/\/ Draw the p\n Point::new(0.0, -1.0), Point::new(0.0, 1.8), Point::new(0.0, 1.8),\n Point::new(0.0, 1.8), Point::new(-0.2, -1.8), Point::new(-0.2, -1.8),\n Point::new(-0.2, -1.8), Point::new(0.25, 2.5), Point::new(1.5, 1.2),\n Point::new(0.0, -0.5), Point::new(0.0, -0.5), Point::new(1.0, -0.3),\n Point::new(1.0, -0.3), Point::new(2.5, 1.5),\n \/\/ Draw the l\n Point::new(2.0, 3.0), Point::new(2.0, 3.0), Point::new(1.5, 1.5),\n Point::new(2.5, -0.5), Point::new(2.5, -0.5),\n \/\/ Draw the i\n Point::new(2.8, 1.3), Point::new(2.8, 1.3), Point::new(2.8, 1.3),\n \/\/ Draw the n\n Point::new(3.1, -0.5), Point::new(3.1, -0.5), Point::new(3.4, 1.2),\n Point::new(3.4, 1.2), Point::new(3.4, 1.2), Point::new(3.4, -0.5),\n Point::new(3.4, -0.5), Point::new(3.4, -0.5), Point::new(4.0, 2.3),\n Point::new(4.2, -0.5), Point::new(4.2, -0.5),\n \/\/ Draw the e\n Point::new(5.5, 0.8), Point::new(5.5, 0.8), Point::new(4.5, 1.3), Point::new(4.5, -0.3),\n Point::new(6.5, -0.3)];\n let knots = vec![0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 4.0, 5.0, 6.0, 7.0, 7.0,\n 8.0, 9.0, 9.0, 9.0, 10.0, 10.0, 10.0, 11.0, 12.0, 13.0, 13.0,\n 14.0, 14.0, 15.0, 16.0, 16.0, 17.0, 18.0, 18.0, 19.0, 19.0, 19.0,\n 20.0, 20.0, 21.0, 21.0, 21.0, 22.0, 22.0, 22.0, 23.0, 24.0, 24.0,\n 25.0, 25.0, 26.0, 27.0, 28.0, 28.0, 28.0, 28.0];\n\n let colors = vec![Colorf::new(1.0, 0.0, 0.0), Colorf::new(0.0, 0.0, 1.0), Colorf::new(0.0, 1.0, 0.0)];\n let color_knots = vec![0.0, 0.0, 0.0, 28.0, 28.0, 28.0];\n\n let plot_dim = (720, 540);\n let scale = (plot_dim.0 as f32 \/ 14.0, plot_dim.1 as f32 \/ 10.0);\n let offset = (6.0, 4.5);\n\n let mut plot: Vec<_> = iter::repeat(255u8).take(plot_dim.0 * plot_dim.1 * 3).collect();\n\n let spline = bspline::BSpline::new(3, points, knots);\n let color_spline = bspline::BSpline::new(2, colors, color_knots);\n\n bencher.iter(|| {\n plot_2d(&spline, &color_spline, &mut plot[..], plot_dim, scale, offset, false);\n });\n}\n\n\n<|endoftext|>"} {"text":"<commit_before>use rusqlite::Connection;\nuse rustc_serialize;\nuse elephant;\nuse std::collections::HashSet;\nuse std::vec::Vec;\n\n\npub struct dependencyPair {\n key: String,\n value: String,\n}\n\n\npub fn json_loader_elephant(conn: &Connection, pk_file: &i32, json: &rustc_serialize::json::Json) {\n let mut job_name = String::new();\n let mut job_provides: HashSet<String> = HashSet::new();\n let mut job_depends: HashSet<String> = HashSet::new();\n let mut job_vaiable_depends: HashSet<String> = HashSet::new();\n let mut job_vaiable_provides: HashSet<String> = HashSet::new();\n let mut job_requires_vaiable_pair: Vec<dependencyPair> = Vec::new();\n\n\n if json.is_object() {\n let sssbill = json.as_object();\n for &movie in &sssbill {\n\n if movie.contains_key(\"name\") {\n let resulkt = movie.get(\"name\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_string() {\n let str_item = itemfdsd.as_string();\n let foo = str_item.unwrap();\n job_name = String::from(foo);\n }\n } else {\n continue;\n }\n if movie.contains_key(\"provides\") {\n let resulkt = movie.get(\"provides\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_array() {\n let ssd = itemfdsd.as_array();\n let sdf = ssd.unwrap();\n let itemfdsd = sdf.iter();\n for elem in itemfdsd {\n if elem.is_string() {\n let sss = elem.as_string();\n let foo = sss.unwrap();\n let providers = String::from(foo);\n job_provides.insert(providers);\n }\n }\n }\n if itemfdsd.is_string() {\n let sss = itemfdsd.as_string();\n let foo = sss.unwrap();\n let providers = String::from(foo);\n job_provides.insert(providers);\n }\n }\n if movie.contains_key(\"depends\") {\n \/\/debug!(\"depends\");\n let resulkt = movie.get(\"depends\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_array() {\n let ssd = itemfdsd.as_array();\n let sdf = ssd.unwrap();\n let itemfdsd = sdf.iter();\n for elem in itemfdsd {\n\n if elem.is_string() {\n let sss = elem.as_string();\n let foo = sss.unwrap();\n let depends = String::from(foo);\n job_depends.insert(depends);\n }\n }\n }\n }\n if movie.contains_key(\"variables\") {\n let resulkt = movie.get(\"variables\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_object() {\n let objVariables = itemfdsd.as_object();\n for &iVariables in &objVariables {\n if iVariables.contains_key(\"require_keys\") {\n let resulkdt = movie.get(\"require_keys\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_array() {\n let ssd = itemfdsd.as_array();\n let sdf = ssd.unwrap();\n let itemfdsd = sdf.iter();\n for elem in itemfdsd {\n if elem.is_string() {\n let sss = elem.as_string();\n let foo = sss.unwrap();\n let name = String::from(foo);\n job_vaiable_depends.insert(name);\n }\n }\n }\n }\n if iVariables.contains_key(\"provides_keys\") {\n let resulkt = iVariables.get(\"provides_keys\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_array() {\n let ssd = itemfdsd.as_array();\n let sdf = ssd.unwrap();\n let itemfdsd = sdf.iter();\n for elem in itemfdsd {\n if elem.is_string() {\n let sss = elem.as_string();\n let foo = sss.unwrap();\n let name = String::from(foo);\n job_vaiable_provides.insert(name);\n \/\/ pk_variable_name = elephant::elephant_variable_pk(conn, &name);\n \/\/ elephant::elephant_job_provide_variables(conn, &pk_job, &pk_variable_name);\n }\n }\n }\n }\n if iVariables.contains_key(\"require_values\") {\n let resulkdtvv = iVariables.get(\"require_values\");\n let sdfc = resulkdtvv.unwrap();\n let itemfdsdff = sdfc.clone();\n if itemfdsdff.is_object() {\n let sssbill = itemfdsdff.as_object();\n for &dict_key in &sssbill {\n \/\/let value_deep = sssbill.get(&dict_key);\n for variable_name in dict_key.keys() {\n let variable_name_clone = variable_name.clone();\n\n let value = dict_key.get(variable_name);\n let unwrapped = value.unwrap();\n if unwrapped.is_string() {\n let sss = unwrapped.as_string();\n let foo = sss.unwrap();\n let name = String::from(foo);\n\n let dp = dependencyPair {\n key: variable_name_clone,\n value: name,\n };\n \/\/job_requires_vaiable_pair\n job_requires_vaiable_pair.push(dp);\n\n }\n }\n }\n }\n }\n }\n }\n }\n }\n let mut pk_job: i32 = 0;\n let mut pk_provider: i32 = 0;\n let mut pk_job_depend: i32 = 0;\n let mut pk_variable_name: i32 = 0;\n debug!(\"job_name:{}\", job_name);\n pk_job = elephant::elephant_job_pk(conn, &pk_file, &job_name);\n debug!(\"job_name::pk_job:{}\", pk_job);\n for item in job_vaiable_provides {\n debug!(\"job_vaiable_provides:{}\", item);\n let variable_name_result = elephant::elephant_variable_pk(conn, &item);\n match variable_name_result {\n Ok(pk_variable_name) => {\n let job_vaiable_provides_pk =\n elephant::elephant_job_provide_variables(conn, &pk_job, &pk_variable_name);\n \/\/debug!(\"job_vaiable_provides::job_vaiable_provides_pk={}\", pk_provider);\n }\n Err(_) => {}\n\n }\n\n\n }\n for item in job_vaiable_depends {\n debug!(\"job_vaiable_depends:{}\", item);\n\n let result_variable_pair = elephant::elephant_variable_pk(&conn, &item);\n match result_variable_pair {\n\n Ok(variable_pair_pk) => {\n debug!(\"job_vaiable_depends::variable_pair_pk={}\", variable_pair_pk);\n let job_depend_pair_pk =\n elephant::elephant_job_depend_pair_pk(&conn, &pk_job, &variable_pair_pk);\n debug!(\n \"job_vaiable_depends::job_depend_pair_pk={}\",\n job_depend_pair_pk\n );\n }\n Err(_) => {}\n }\n }\n for item in job_provides {\n debug!(\"job_provides:{}\", item);\n pk_provider = elephant::elephant_provider_pk(conn, &item);\n \/\/ println!(\"elephant_provider_pk={}\", foo);\n \/\/ let sq_order = 1;\n \/\/ pk_provider = elephant::elephant_job_depend_pk(conn, &pk_job, &pk_provider, &sq_order);\n debug!(\"job_provides::pk_provider={}\", pk_provider);\n\n elephant::elephant_job_require_variables(&conn, &pk_job, &pk_provider);\n }\n let mut order_job_depend: i32 = 0;\n for item in job_depends {\n debug!(\"job_depends:{}\", item);\n let item_pk = elephant::elephant_provider_pk(conn, &item);\n let pk_job_depend =\n elephant::elephant_job_depend_pk(conn, &pk_job, &item_pk, &order_job_depend);\n debug!(\"job_depends::pk_job_depend:{}\", pk_job_depend);\n order_job_depend += 10;\n }\n for item in job_requires_vaiable_pair {\n let variable_name_result = elephant::elephant_variable_pk(conn, &item.key);\n match variable_name_result {\n Ok(pk_variable_name) => {\n\n let variable_name_result =\n elephant::elephant_variable_pair_pk(conn, &pk_variable_name, &item.value);\n match variable_name_result {\n Ok(pk_variable_pair) => {\n let pk_variable_pair_dep = elephant::elephant_job_depend_pair_pk(\n conn,\n &pk_job,\n &pk_variable_pair,\n );\n }\n Err(_) => {}\n }\n \/\/println!(\"job_requires_vaiable_pair.key:{}\", item.key);\n \/\/println!(\"job_requires_vaiable_pair.value:{}\", item.value);\n \/\/println!(\"job_requires_vaiable_pair.pk_variable_name:{}\",pk_variable_name);\n \/\/println!(\"job_requires_vaiable_pair.pk_variable_pair:{}\",pk_variable_pair);\n \/\/println!(\"job_requires_vaiable_pair.pk_variable_pair_dep:{}\",pk_variable_pair_dep);\n }\n Err(_) => {}\n }\n\n }\n }\n}\n<commit_msg>Remove warnings<commit_after>use rusqlite::Connection;\nuse rustc_serialize;\nuse elephant;\nuse std::collections::HashSet;\nuse std::vec::Vec;\n\n\npub struct dependencyPair {\n key: String,\n value: String,\n}\n\n\npub fn json_loader_elephant(conn: &Connection, pk_file: &i32, json: &rustc_serialize::json::Json) {\n let mut job_name = String::new();\n let mut job_provides: HashSet<String> = HashSet::new();\n let mut job_depends: HashSet<String> = HashSet::new();\n let mut job_vaiable_depends: HashSet<String> = HashSet::new();\n let mut job_vaiable_provides: HashSet<String> = HashSet::new();\n let mut job_requires_vaiable_pair: Vec<dependencyPair> = Vec::new();\n\n\n if json.is_object() {\n let sssbill = json.as_object();\n for &movie in &sssbill {\n\n if movie.contains_key(\"name\") {\n let resulkt = movie.get(\"name\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_string() {\n let str_item = itemfdsd.as_string();\n let foo = str_item.unwrap();\n job_name = String::from(foo);\n }\n } else {\n continue;\n }\n if movie.contains_key(\"provides\") {\n let resulkt = movie.get(\"provides\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_array() {\n let ssd = itemfdsd.as_array();\n let sdf = ssd.unwrap();\n let itemfdsd = sdf.iter();\n for elem in itemfdsd {\n if elem.is_string() {\n let sss = elem.as_string();\n let foo = sss.unwrap();\n let providers = String::from(foo);\n job_provides.insert(providers);\n }\n }\n }\n if itemfdsd.is_string() {\n let sss = itemfdsd.as_string();\n let foo = sss.unwrap();\n let providers = String::from(foo);\n job_provides.insert(providers);\n }\n }\n if movie.contains_key(\"depends\") {\n \/\/debug!(\"depends\");\n let resulkt = movie.get(\"depends\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_array() {\n let ssd = itemfdsd.as_array();\n let sdf = ssd.unwrap();\n let itemfdsd = sdf.iter();\n for elem in itemfdsd {\n\n if elem.is_string() {\n let sss = elem.as_string();\n let foo = sss.unwrap();\n let depends = String::from(foo);\n job_depends.insert(depends);\n }\n }\n }\n }\n if movie.contains_key(\"variables\") {\n let resulkt = movie.get(\"variables\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_object() {\n let objVariables = itemfdsd.as_object();\n for &iVariables in &objVariables {\n if iVariables.contains_key(\"require_keys\") {\n let resulkdt = movie.get(\"require_keys\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_array() {\n let ssd = itemfdsd.as_array();\n let sdf = ssd.unwrap();\n let itemfdsd = sdf.iter();\n for elem in itemfdsd {\n if elem.is_string() {\n let sss = elem.as_string();\n let foo = sss.unwrap();\n let name = String::from(foo);\n job_vaiable_depends.insert(name);\n }\n }\n }\n }\n if iVariables.contains_key(\"provides_keys\") {\n let resulkt = iVariables.get(\"provides_keys\");\n let sdf = resulkt.unwrap();\n let itemfdsd = sdf.clone();\n if itemfdsd.is_array() {\n let ssd = itemfdsd.as_array();\n let sdf = ssd.unwrap();\n let itemfdsd = sdf.iter();\n for elem in itemfdsd {\n if elem.is_string() {\n let sss = elem.as_string();\n let foo = sss.unwrap();\n let name = String::from(foo);\n job_vaiable_provides.insert(name);\n \/\/ pk_variable_name = elephant::elephant_variable_pk(conn, &name);\n \/\/ elephant::elephant_job_provide_variables(conn, &pk_job, &pk_variable_name);\n }\n }\n }\n }\n if iVariables.contains_key(\"require_values\") {\n let resulkdtvv = iVariables.get(\"require_values\");\n let sdfc = resulkdtvv.unwrap();\n let itemfdsdff = sdfc.clone();\n if itemfdsdff.is_object() {\n let sssbill = itemfdsdff.as_object();\n for &dict_key in &sssbill {\n \/\/let value_deep = sssbill.get(&dict_key);\n for variable_name in dict_key.keys() {\n let variable_name_clone = variable_name.clone();\n\n let value = dict_key.get(variable_name);\n let unwrapped = value.unwrap();\n if unwrapped.is_string() {\n let sss = unwrapped.as_string();\n let foo = sss.unwrap();\n let name = String::from(foo);\n\n let dp = dependencyPair {\n key: variable_name_clone,\n value: name,\n };\n \/\/job_requires_vaiable_pair\n job_requires_vaiable_pair.push(dp);\n\n }\n }\n }\n }\n }\n }\n }\n }\n }\n let pk_job: i32;\n let mut pk_provider: i32;\n debug!(\"job_name:{}\", job_name);\n pk_job = elephant::elephant_job_pk(conn, &pk_file, &job_name);\n debug!(\"job_name::pk_job:{}\", pk_job);\n for item in job_vaiable_provides {\n debug!(\"job_vaiable_provides:{}\", item);\n let variable_name_result = elephant::elephant_variable_pk(conn, &item);\n match variable_name_result {\n Ok(pk_variable_name) => {\n let job_vaiable_provides_pk =\n elephant::elephant_job_provide_variables(conn, &pk_job, &pk_variable_name);\n \/\/debug!(\"job_vaiable_provides::job_vaiable_provides_pk={}\", pk_provider);\n }\n Err(_) => {}\n\n }\n\n\n }\n for item in job_vaiable_depends {\n debug!(\"job_vaiable_depends:{}\", item);\n\n let result_variable_pair = elephant::elephant_variable_pk(&conn, &item);\n match result_variable_pair {\n\n Ok(variable_pair_pk) => {\n debug!(\"job_vaiable_depends::variable_pair_pk={}\", variable_pair_pk);\n let job_depend_pair_pk =\n elephant::elephant_job_depend_pair_pk(&conn, &pk_job, &variable_pair_pk);\n debug!(\n \"job_vaiable_depends::job_depend_pair_pk={}\",\n job_depend_pair_pk\n );\n }\n Err(_) => {}\n }\n }\n for item in job_provides {\n debug!(\"job_provides:{}\", item);\n pk_provider = elephant::elephant_provider_pk(conn, &item);\n \/\/ println!(\"elephant_provider_pk={}\", foo);\n \/\/ let sq_order = 1;\n \/\/ pk_provider = elephant::elephant_job_depend_pk(conn, &pk_job, &pk_provider, &sq_order);\n debug!(\"job_provides::pk_provider={}\", pk_provider);\n\n elephant::elephant_job_require_variables(&conn, &pk_job, &pk_provider);\n }\n let mut order_job_depend: i32 = 0;\n for item in job_depends {\n debug!(\"job_depends:{}\", item);\n let item_pk = elephant::elephant_provider_pk(conn, &item);\n let pk_job_depend =\n elephant::elephant_job_depend_pk(conn, &pk_job, &item_pk, &order_job_depend);\n debug!(\"job_depends::pk_job_depend:{}\", pk_job_depend);\n order_job_depend += 10;\n }\n for item in job_requires_vaiable_pair {\n let variable_name_result = elephant::elephant_variable_pk(conn, &item.key);\n match variable_name_result {\n Ok(pk_variable_name) => {\n\n let variable_name_result =\n elephant::elephant_variable_pair_pk(conn, &pk_variable_name, &item.value);\n match variable_name_result {\n Ok(pk_variable_pair) => {\n let pk_variable_pair_dep = elephant::elephant_job_depend_pair_pk(\n conn,\n &pk_job,\n &pk_variable_pair,\n );\n }\n Err(_) => {}\n }\n \/\/println!(\"job_requires_vaiable_pair.key:{}\", item.key);\n \/\/println!(\"job_requires_vaiable_pair.value:{}\", item.value);\n \/\/println!(\"job_requires_vaiable_pair.pk_variable_name:{}\",pk_variable_name);\n \/\/println!(\"job_requires_vaiable_pair.pk_variable_pair:{}\",pk_variable_pair);\n \/\/println!(\"job_requires_vaiable_pair.pk_variable_pair_dep:{}\",pk_variable_pair_dep);\n }\n Err(_) => {}\n }\n\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for bytes!<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn main() {\n let vec = bytes!(\"abc\");\n assert_eq!(vec[0], 97);\n assert_eq!(vec[1], 98);\n assert_eq!(vec[2], 99);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_attrs)]\n\nmacro_rules! foo { () => {\n let x = 1;\n macro_rules! bar { () => {x} }\n let _ = bar!();\n}}\n\n#[rustc_error]\nfn main() { foo! {}; } \/\/~ ERROR compilation successful\n<|endoftext|>"} {"text":"<commit_before><commit_msg>More robust testing<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(conservative_impl_trait)]\n\ntrait Foo {\n fn foo(fn(u8) -> ()); \/\/~ NOTE type in trait\n fn bar(Option<u8>); \/\/~ NOTE type in trait\n fn baz((u8, u16)); \/\/~ NOTE type in trait\n fn qux() -> u8; \/\/~ NOTE type in trait\n}\n\nstruct Bar;\n\nimpl Foo for Bar {\n fn foo(_: fn(u16) -> ()) {}\n \/\/~^ ERROR method `foo` has an incompatible type for trait\n \/\/~| NOTE expected u8\n fn bar(_: Option<u16>) {}\n \/\/~^ ERROR method `bar` has an incompatible type for trait\n \/\/~| NOTE expected u8\n fn baz(_: (u16, u16)) {}\n \/\/~^ ERROR method `baz` has an incompatible type for trait\n \/\/~| NOTE expected u8\n fn qux() -> u16 { 5u16 }\n \/\/~^ ERROR method `qux` has an incompatible type for trait\n \/\/~| NOTE expected u8\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![crate_name = \"foo\"]\n\n#![doc(html_playground_url = \"\")]\n\n\/\/! module docs\n\/\/!\n\/\/! ```\n\/\/! println!(\"Hello, world!\");\n\/\/! ```\n\n\/\/ @!has foo\/index.html '\/\/a[@class=\"test-arrow\"]' \"Run\"\n<commit_msg>Add compile flags to playground-empty test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![crate_name = \"foo\"]\n\n#![doc(html_playground_url = \"\")]\n\n\/\/ compile-flags:-Z unstable-options --playground-url https:\/\/play.rust-lang.org\/\n\n\/\/! module docs\n\/\/!\n\/\/! ```\n\/\/! println!(\"Hello, world!\");\n\/\/! ```\n\n\/\/ @!has foo\/index.html '\/\/a[@class=\"test-arrow\"]' \"Run\"\n<|endoftext|>"} {"text":"<commit_before><commit_msg>blg2bib: Add binary blg2bib<commit_after>#[macro_use(crate_version, crate_authors)]\nextern crate clap;\nextern crate inspirer;\n\n#[macro_use]\nextern crate slog;\nextern crate slog_term;\nuse slog::DrainExt;\n\nuse clap::{App, Arg};\n\nuse std::fs::File;\nuse std::io::{Read, BufReader};\nuse std::io::{Write, BufWriter};\n\nfn main() {\n\n \/\/ Initialize logging\n let drain = slog_term::streamer()\n .stderr()\n .build()\n .fuse();\n let root_logger = slog::Logger::root(drain, o!(\"version\" => crate_version!()));\n info!(root_logger, \"Application started\");\n\n \/\/ Initialize instance of InspirerLib\n let lib = inspirer::Inspirer::init(Some(root_logger.new(o!())));\n\n \/\/ Define CLI\n let matches = App::new(\"blg2bib\")\n .version(crate_version!())\n .author(crate_authors!(\",\\n\"))\n .about(\"gets BibTeX keys from Inspire\")\n .arg(Arg::with_name(\"INPUT\")\n .help(\"Sets the file from which to extract BibTeX keys\")\n .index(1))\n .arg(Arg::with_name(\"OUTPUT\")\n .help(\"Sets the file to which results should be appended\")\n .index(2))\n .get_matches();\n\n \/\/ Get input from specified file or stdin\n let mut input_data = String::new();\n\n let mut input_file: File;\n let mut stdin = std::io::stdin();\n\n let reader: &mut Read = match matches.value_of(\"INPUT\") {\n Some(file_name) => {\n info!(root_logger, \"Reading from file\";\n \"file_name\" => file_name);\n input_file = File::open(file_name).expect(\"File not found\");\n &mut input_file\n }\n None => {\n info!(root_logger, \"Reading from stdin\");\n &mut stdin\n }\n };\n let mut reader = BufReader::new(reader);\n reader.read_to_string(&mut input_data).unwrap();\n\n \/\/ Extract BibTeX tags from document\n let keys = lib.blg2key(input_data);\n info!(root_logger, \"Extracted BibTeX keys\";\n \"number_of_keys\" => keys.len());\n\n \/\/ Retrieve BibTeX entries from inspire.net\n info!(root_logger, \"Retrieving entries\");\n let mut bibtex_entries: Vec<String> = Vec::new();\n for key in keys {\n debug!(root_logger, \"Retrieving record from inspire\";\n \"bibtex_key\" => key);\n if let Some(bibtex_entry) = lib.fetch_bibtex_with_key(key) {\n bibtex_entries.push(bibtex_entry);\n }\n }\n\n \/\/ Write BibTeX entries to file or stdout\n let mut stdout = std::io::stdout();\n let mut output_file: std::fs::File;\n\n let writer: &mut Write = match matches.value_of(\"OUTPUT\") {\n Some(file_name) => {\n info!(root_logger, \"Writing to file\";\n \"file_name\" => file_name);\n output_file = std::fs::OpenOptions::new()\n .append(true)\n .create(true)\n .open(file_name)\n .unwrap();\n &mut output_file\n }\n None => {\n info!(root_logger, \"Writing to stdout\");\n \/\/ stdout.lock();\n &mut stdout\n }\n };\n\n let mut writer = BufWriter::new(writer);\n\n for bibtex_entry in bibtex_entries {\n writer.write_all(&bibtex_entry.as_bytes()).unwrap();\n }\n\n writer.flush().unwrap();\n\n info!(root_logger, \"Done\");\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Basic functions for dealing with memory\n\/\/!\n\/\/! This module contains functions for querying the size and alignment of\n\/\/! types, initializing and manipulating memory.\n\nuse intrinsics;\nuse ptr;\n\npub use intrinsics::transmute;\n\n\/\/\/ Moves a thing into the void.\n\/\/\/\n\/\/\/ The forget function will take ownership of the provided value but neglect\n\/\/\/ to run any required cleanup or memory management operations on it.\n\/\/\/\n\/\/\/ This function is the unsafe version of the `drop` function because it does\n\/\/\/ not run any destructors.\n#[stable]\npub use intrinsics::forget;\n\n\/\/\/ Returns the size of a type in bytes.\n#[inline]\n#[stable]\npub fn size_of<T>() -> uint {\n unsafe { intrinsics::size_of::<T>() }\n}\n\n\/\/\/ Returns the size of the type that `_val` points to in bytes.\n#[inline]\n#[stable]\npub fn size_of_val<T>(_val: &T) -> uint {\n size_of::<T>()\n}\n\n\/\/\/ Returns the ABI-required minimum alignment of a type\n\/\/\/\n\/\/\/ This is the alignment used for struct fields. It may be smaller\n\/\/\/ than the preferred alignment.\n#[inline]\n#[stable]\npub fn min_align_of<T>() -> uint {\n unsafe { intrinsics::min_align_of::<T>() }\n}\n\n\/\/\/ Returns the ABI-required minimum alignment of the type of the value that\n\/\/\/ `_val` points to\n#[inline]\n#[stable]\npub fn min_align_of_val<T>(_val: &T) -> uint {\n min_align_of::<T>()\n}\n\n\/\/\/ Returns the alignment in memory for a type.\n\/\/\/\n\/\/\/ This function will return the alignment, in bytes, of a type in memory. If\n\/\/\/ the alignment returned is adhered to, then the type is guaranteed to\n\/\/\/ function properly.\n#[inline]\n#[stable]\npub fn align_of<T>() -> uint {\n \/\/ We use the preferred alignment as the default alignment for a type. This\n \/\/ appears to be what clang migrated towards as well:\n \/\/\n \/\/ http:\/\/lists.cs.uiuc.edu\/pipermail\/cfe-commits\/Week-of-Mon-20110725\/044411.html\n unsafe { intrinsics::pref_align_of::<T>() }\n}\n\n\/\/\/ Returns the alignment of the type of the value that `_val` points to.\n\/\/\/\n\/\/\/ This is similar to `align_of`, but function will properly handle types such\n\/\/\/ as trait objects (in the future), returning the alignment for an arbitrary\n\/\/\/ value at runtime.\n#[inline]\n#[stable]\npub fn align_of_val<T>(_val: &T) -> uint {\n align_of::<T>()\n}\n\n\/\/\/ Create a value initialized to zero.\n\/\/\/\n\/\/\/ This function is similar to allocating space for a local variable and\n\/\/\/ zeroing it out (an unsafe operation).\n\/\/\/\n\/\/\/ Care must be taken when using this function, if the type `T` has a\n\/\/\/ destructor and the value falls out of scope (due to unwinding or returning)\n\/\/\/ before being initialized, then the destructor will run on zeroed\n\/\/\/ data, likely leading to crashes.\n\/\/\/\n\/\/\/ This is useful for FFI functions sometimes, but should generally be avoided.\n#[inline]\n#[stable]\npub unsafe fn zeroed<T>() -> T {\n intrinsics::init()\n}\n\n\/\/\/ Create an uninitialized value.\n\/\/\/\n\/\/\/ Care must be taken when using this function, if the type `T` has a\n\/\/\/ destructor and the value falls out of scope (due to unwinding or returning)\n\/\/\/ before being initialized, then the destructor will run on uninitialized\n\/\/\/ data, likely leading to crashes.\n\/\/\/\n\/\/\/ This is useful for FFI functions sometimes, but should generally be avoided.\n#[inline]\n#[stable]\npub unsafe fn uninitialized<T>() -> T {\n intrinsics::uninit()\n}\n\n\/\/\/ Swap the values at two mutable locations of the same type, without\n\/\/\/ deinitialising or copying either one.\n#[inline]\n#[stable]\npub fn swap<T>(x: &mut T, y: &mut T) {\n unsafe {\n \/\/ Give ourselves some scratch space to work with\n let mut t: T = uninitialized();\n\n \/\/ Perform the swap, `&mut` pointers never alias\n ptr::copy_nonoverlapping_memory(&mut t, &*x, 1);\n ptr::copy_nonoverlapping_memory(x, &*y, 1);\n ptr::copy_nonoverlapping_memory(y, &t, 1);\n\n \/\/ y and t now point to the same thing, but we need to completely forget `t`\n \/\/ because it's no longer relevant.\n forget(t);\n }\n}\n\n\/\/\/ Replace the value at a mutable location with a new one, returning the old\n\/\/\/ value, without deinitialising or copying either one.\n\/\/\/\n\/\/\/ This is primarily used for transferring and swapping ownership of a value\n\/\/\/ in a mutable location. For example, this function allows consumption of\n\/\/\/ one field of a struct by replacing it with another value. The normal approach\n\/\/\/ doesn't always work:\n\/\/\/\n\/\/\/ ```rust,ignore\n\/\/\/ struct Buffer<T> { buf: Vec<T> }\n\/\/\/\n\/\/\/ impl<T> Buffer<T> {\n\/\/\/ fn get_and_reset(&mut self) -> Vec<T> {\n\/\/\/ \/\/ error: cannot move out of dereference of `&mut`-pointer\n\/\/\/ let buf = self.buf;\n\/\/\/ self.buf = Vec::new();\n\/\/\/ buf\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Note that `T` does not necessarily implement `Clone`, so it can't even\n\/\/\/ clone and reset `self.buf`. But `replace` can be used to disassociate\n\/\/\/ the original value of `self.buf` from `self`, allowing it to be returned:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # struct Buffer<T> { buf: Vec<T> }\n\/\/\/ impl<T> Buffer<T> {\n\/\/\/ fn get_and_reset(&mut self) -> Vec<T> {\n\/\/\/ use std::mem::replace;\n\/\/\/ replace(&mut self.buf, Vec::new())\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn replace<T>(dest: &mut T, mut src: T) -> T {\n swap(dest, &mut src);\n src\n}\n\n\/\/\/ Disposes of a value.\n\/\/\/\n\/\/\/ This function can be used to destroy any value by allowing `drop` to take\n\/\/\/ ownership of its argument.\n\/\/\/\n\/\/\/ # Example\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::cell::RefCell;\n\/\/\/\n\/\/\/ let x = RefCell::new(1i);\n\/\/\/\n\/\/\/ let mut mutable_borrow = x.borrow_mut();\n\/\/\/ *mutable_borrow = 1;\n\/\/\/ drop(mutable_borrow); \/\/ relinquish the mutable borrow on this slot\n\/\/\/\n\/\/\/ let borrow = x.borrow();\n\/\/\/ println!(\"{}\", *borrow);\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn drop<T>(_x: T) { }\n\n\/\/\/ Interprets `src` as `&U`, and then reads `src` without moving the contained\n\/\/\/ value.\n\/\/\/\n\/\/\/ This function will unsafely assume the pointer `src` is valid for\n\/\/\/ `sizeof(U)` bytes by transmuting `&T` to `&U` and then reading the `&U`. It\n\/\/\/ will also unsafely create a copy of the contained value instead of moving\n\/\/\/ out of `src`.\n\/\/\/\n\/\/\/ It is not a compile-time error if `T` and `U` have different sizes, but it\n\/\/\/ is highly encouraged to only invoke this function where `T` and `U` have the\n\/\/\/ same size. This function triggers undefined behavior if `U` is larger than\n\/\/\/ `T`.\n#[inline]\n#[stable]\npub unsafe fn transmute_copy<T, U>(src: &T) -> U {\n ptr::read(src as *const T as *const U)\n}\n\n\/\/\/ Transforms lifetime of the second pointer to match the first.\n#[inline]\n#[unstable = \"this function may be removed in the future due to its \\\n questionable utility\"]\npub unsafe fn copy_lifetime<'a, S, T:'a>(_ptr: &'a S, ptr: &T) -> &'a T {\n transmute(ptr)\n}\n\n\/\/\/ Transforms lifetime of the second mutable pointer to match the first.\n#[inline]\n#[unstable = \"this function may be removed in the future due to its \\\n questionable utility\"]\npub unsafe fn copy_mut_lifetime<'a, S, T:'a>(_ptr: &'a mut S,\n ptr: &mut T) -> &'a mut T {\n transmute(ptr)\n}\n<commit_msg>Document std::mem<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Basic functions for dealing with memory\n\/\/!\n\/\/! This module contains functions for querying the size and alignment of\n\/\/! types, initializing and manipulating memory.\n\nuse intrinsics;\nuse ptr;\n\npub use intrinsics::transmute;\n\n\/\/\/ Moves a thing into the void.\n\/\/\/\n\/\/\/ The forget function will take ownership of the provided value but neglect\n\/\/\/ to run any required cleanup or memory management operations on it.\n\/\/\/\n\/\/\/ This function is the unsafe version of the `drop` function because it does\n\/\/\/ not run any destructors.\n#[stable]\npub use intrinsics::forget;\n\n\/\/\/ Returns the size of a type in bytes.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ assert_eq!(4, mem::size_of::<i32>());\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn size_of<T>() -> uint {\n unsafe { intrinsics::size_of::<T>() }\n}\n\n\/\/\/ Returns the size of the type that `_val` points to in bytes.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ assert_eq!(4, mem::size_of_val(&5i32));\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn size_of_val<T>(_val: &T) -> uint {\n size_of::<T>()\n}\n\n\/\/\/ Returns the ABI-required minimum alignment of a type\n\/\/\/\n\/\/\/ This is the alignment used for struct fields. It may be smaller than the preferred alignment.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ assert_eq!(4, mem::min_align_of::<i32>());\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn min_align_of<T>() -> uint {\n unsafe { intrinsics::min_align_of::<T>() }\n}\n\n\/\/\/ Returns the ABI-required minimum alignment of the type of the value that `_val` points to\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ assert_eq!(4, mem::min_align_of_val(&5i32));\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn min_align_of_val<T>(_val: &T) -> uint {\n min_align_of::<T>()\n}\n\n\/\/\/ Returns the alignment in memory for a type.\n\/\/\/\n\/\/\/ This function will return the alignment, in bytes, of a type in memory. If the alignment\n\/\/\/ returned is adhered to, then the type is guaranteed to function properly.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ assert_eq!(4, mem::align_of::<i32>());\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn align_of<T>() -> uint {\n \/\/ We use the preferred alignment as the default alignment for a type. This\n \/\/ appears to be what clang migrated towards as well:\n \/\/\n \/\/ http:\/\/lists.cs.uiuc.edu\/pipermail\/cfe-commits\/Week-of-Mon-20110725\/044411.html\n unsafe { intrinsics::pref_align_of::<T>() }\n}\n\n\/\/\/ Returns the alignment of the type of the value that `_val` points to.\n\/\/\/\n\/\/\/ This is similar to `align_of`, but function will properly handle types such as trait objects\n\/\/\/ (in the future), returning the alignment for an arbitrary value at runtime.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ assert_eq!(4, mem::align_of_val(&5i32));\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn align_of_val<T>(_val: &T) -> uint {\n align_of::<T>()\n}\n\n\/\/\/ Create a value initialized to zero.\n\/\/\/\n\/\/\/ This function is similar to allocating space for a local variable and zeroing it out (an unsafe\n\/\/\/ operation).\n\/\/\/\n\/\/\/ Care must be taken when using this function, if the type `T` has a destructor and the value\n\/\/\/ falls out of scope (due to unwinding or returning) before being initialized, then the\n\/\/\/ destructor will run on zeroed data, likely leading to crashes.\n\/\/\/\n\/\/\/ This is useful for FFI functions sometimes, but should generally be avoided.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ let x: int = unsafe { mem::zeroed() };\n\/\/\/ ```\n#[inline]\n#[stable]\npub unsafe fn zeroed<T>() -> T {\n intrinsics::init()\n}\n\n\/\/\/ Create an uninitialized value.\n\/\/\/\n\/\/\/ Care must be taken when using this function, if the type `T` has a destructor and the value\n\/\/\/ falls out of scope (due to unwinding or returning) before being initialized, then the\n\/\/\/ destructor will run on uninitialized data, likely leading to crashes.\n\/\/\/\n\/\/\/ This is useful for FFI functions sometimes, but should generally be avoided.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ let x: int = unsafe { mem::uninitialized() };\n\/\/\/ ```\n#[inline]\n#[stable]\npub unsafe fn uninitialized<T>() -> T {\n intrinsics::uninit()\n}\n\n\/\/\/ Swap the values at two mutable locations of the same type, without deinitialising or copying\n\/\/\/ either one.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ let x = &mut 5i;\n\/\/\/ let y = &mut 42i;\n\/\/\/\n\/\/\/ mem::swap(x, y);\n\/\/\/\n\/\/\/ assert_eq!(42i, *x);\n\/\/\/ assert_eq!(5i, *y);\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn swap<T>(x: &mut T, y: &mut T) {\n unsafe {\n \/\/ Give ourselves some scratch space to work with\n let mut t: T = uninitialized();\n\n \/\/ Perform the swap, `&mut` pointers never alias\n ptr::copy_nonoverlapping_memory(&mut t, &*x, 1);\n ptr::copy_nonoverlapping_memory(x, &*y, 1);\n ptr::copy_nonoverlapping_memory(y, &t, 1);\n\n \/\/ y and t now point to the same thing, but we need to completely forget `t`\n \/\/ because it's no longer relevant.\n forget(t);\n }\n}\n\n\/\/\/ Replace the value at a mutable location with a new one, returning the old value, without\n\/\/\/ deinitialising or copying either one.\n\/\/\/\n\/\/\/ This is primarily used for transferring and swapping ownership of a value in a mutable\n\/\/\/ location.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ A simple example:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ let mut v: Vec<i32> = Vec::new();\n\/\/\/\n\/\/\/ mem::replace(&mut v, Vec::new());\n\/\/\/ ```\n\/\/\/\n\/\/\/ This function allows consumption of one field of a struct by replacing it with another value.\n\/\/\/ The normal approach doesn't always work:\n\/\/\/\n\/\/\/ ```rust,ignore\n\/\/\/ struct Buffer<T> { buf: Vec<T> }\n\/\/\/\n\/\/\/ impl<T> Buffer<T> {\n\/\/\/ fn get_and_reset(&mut self) -> Vec<T> {\n\/\/\/ \/\/ error: cannot move out of dereference of `&mut`-pointer\n\/\/\/ let buf = self.buf;\n\/\/\/ self.buf = Vec::new();\n\/\/\/ buf\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Note that `T` does not necessarily implement `Clone`, so it can't even clone and reset\n\/\/\/ `self.buf`. But `replace` can be used to disassociate the original value of `self.buf` from\n\/\/\/ `self`, allowing it to be returned:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ use std::mem;\n\/\/\/ # struct Buffer<T> { buf: Vec<T> }\n\/\/\/ impl<T> Buffer<T> {\n\/\/\/ fn get_and_reset(&mut self) -> Vec<T> {\n\/\/\/ mem::replace(&mut self.buf, Vec::new())\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn replace<T>(dest: &mut T, mut src: T) -> T {\n swap(dest, &mut src);\n src\n}\n\n\/\/\/ Disposes of a value.\n\/\/\/\n\/\/\/ This function can be used to destroy any value by allowing `drop` to take ownership of its\n\/\/\/ argument.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::cell::RefCell;\n\/\/\/\n\/\/\/ let x = RefCell::new(1i);\n\/\/\/\n\/\/\/ let mut mutable_borrow = x.borrow_mut();\n\/\/\/ *mutable_borrow = 1;\n\/\/\/\n\/\/\/ drop(mutable_borrow); \/\/ relinquish the mutable borrow on this slot\n\/\/\/\n\/\/\/ let borrow = x.borrow();\n\/\/\/ println!(\"{}\", *borrow);\n\/\/\/ ```\n#[inline]\n#[stable]\npub fn drop<T>(_x: T) { }\n\n\/\/\/ Interprets `src` as `&U`, and then reads `src` without moving the contained value.\n\/\/\/\n\/\/\/ This function will unsafely assume the pointer `src` is valid for `sizeof(U)` bytes by\n\/\/\/ transmuting `&T` to `&U` and then reading the `&U`. It will also unsafely create a copy of the\n\/\/\/ contained value instead of moving out of `src`.\n\/\/\/\n\/\/\/ It is not a compile-time error if `T` and `U` have different sizes, but it is highly encouraged\n\/\/\/ to only invoke this function where `T` and `U` have the same size. This function triggers\n\/\/\/ undefined behavior if `U` is larger than `T`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::mem;\n\/\/\/\n\/\/\/ let one = unsafe { mem::transmute_copy(&1i) };\n\/\/\/\n\/\/\/ assert_eq!(1u, one);\n\/\/\/ ```\n#[inline]\n#[stable]\npub unsafe fn transmute_copy<T, U>(src: &T) -> U {\n ptr::read(src as *const T as *const U)\n}\n\n\/\/\/ Transforms lifetime of the second pointer to match the first.\n#[inline]\n#[unstable = \"this function may be removed in the future due to its \\\n questionable utility\"]\npub unsafe fn copy_lifetime<'a, S, T:'a>(_ptr: &'a S, ptr: &T) -> &'a T {\n transmute(ptr)\n}\n\n\/\/\/ Transforms lifetime of the second mutable pointer to match the first.\n#[inline]\n#[unstable = \"this function may be removed in the future due to its \\\n questionable utility\"]\npub unsafe fn copy_mut_lifetime<'a, S, T:'a>(_ptr: &'a mut S,\n ptr: &mut T) -> &'a mut T {\n transmute(ptr)\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(warnings)]\n\nextern crate build_helper;\nextern crate cc;\n\nuse std::env;\nuse std::path::PathBuf;\nuse std::process::Command;\nuse build_helper::{run, native_lib_boilerplate};\n\nfn main() {\n \/\/ FIXME: This is a hack to support building targets that don't\n \/\/ support jemalloc alongside hosts that do. The jemalloc build is\n \/\/ controlled by a feature of the std crate, and if that feature\n \/\/ changes between targets, it invalidates the fingerprint of\n \/\/ std's build script (this is a cargo bug); so we must ensure\n \/\/ that the feature set used by std is the same across all\n \/\/ targets, which means we have to build the alloc_jemalloc crate\n \/\/ for targets like emscripten, even if we don't use it.\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let host = env::var(\"HOST\").expect(\"HOST was not set\");\n if target.contains(\"bitrig\") || target.contains(\"cloudabi\") || target.contains(\"emscripten\") ||\n target.contains(\"fuchsia\") || target.contains(\"msvc\") || target.contains(\"openbsd\") ||\n target.contains(\"redox\") || target.contains(\"rumprun\") || target.contains(\"wasm32\") {\n println!(\"cargo:rustc-cfg=dummy_jemalloc\");\n return;\n }\n\n if target.contains(\"android\") {\n println!(\"cargo:rustc-link-lib=gcc\");\n } else if !target.contains(\"windows\") && !target.contains(\"musl\") {\n println!(\"cargo:rustc-link-lib=pthread\");\n }\n\n if let Some(jemalloc) = env::var_os(\"JEMALLOC_OVERRIDE\") {\n let jemalloc = PathBuf::from(jemalloc);\n println!(\"cargo:rustc-link-search=native={}\",\n jemalloc.parent().unwrap().display());\n let stem = jemalloc.file_stem().unwrap().to_str().unwrap();\n let name = jemalloc.file_name().unwrap().to_str().unwrap();\n let kind = if name.ends_with(\".a\") {\n \"static\"\n } else {\n \"dylib\"\n };\n println!(\"cargo:rustc-link-lib={}={}\", kind, &stem[3..]);\n return;\n }\n\n let link_name = if target.contains(\"windows\") { \"jemalloc\" } else { \"jemalloc_pic\" };\n let native = match native_lib_boilerplate(\"jemalloc\", \"jemalloc\", link_name, \"lib\") {\n Ok(native) => native,\n _ => return,\n };\n\n let mut cmd = Command::new(\"sh\");\n cmd.arg(native.src_dir.join(\"configure\")\n .to_str()\n .unwrap()\n .replace(\"C:\\\\\", \"\/c\/\")\n .replace(\"\\\\\", \"\/\"))\n .current_dir(&native.out_dir)\n \/\/ jemalloc generates Makefile deps using GCC's \"-MM\" flag. This means\n \/\/ that GCC will run the preprocessor, and only the preprocessor, over\n \/\/ jemalloc's source files. If we don't specify CPPFLAGS, then at least\n \/\/ on ARM that step fails with a \"Missing implementation for 32-bit\n \/\/ atomic operations\" error. This is because no \"-march\" flag will be\n \/\/ passed to GCC, and then GCC won't define the\n \/\/ \"__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4\" macro that jemalloc needs to\n \/\/ select an atomic operation implementation.\n .env(\"CPPFLAGS\", env::var_os(\"CFLAGS\").unwrap_or_default());\n\n if target.contains(\"ios\") {\n cmd.arg(\"--disable-tls\");\n } else if target.contains(\"android\") {\n \/\/ We force android to have prefixed symbols because apparently\n \/\/ replacement of the libc allocator doesn't quite work. When this was\n \/\/ tested (unprefixed symbols), it was found that the `realpath`\n \/\/ function in libc would allocate with libc malloc (not jemalloc\n \/\/ malloc), and then the standard library would free with jemalloc free,\n \/\/ causing a segfault.\n \/\/\n \/\/ If the test suite passes, however, without symbol prefixes then we\n \/\/ should be good to go!\n cmd.arg(\"--with-jemalloc-prefix=je_\");\n cmd.arg(\"--disable-tls\");\n } else if target.contains(\"dragonfly\") || target.contains(\"musl\") {\n cmd.arg(\"--with-jemalloc-prefix=je_\");\n }\n\n \/\/ FIXME: building with jemalloc assertions is currently broken.\n \/\/ See <https:\/\/github.com\/rust-lang\/rust\/issues\/44152>.\n \/\/if cfg!(feature = \"debug\") {\n \/\/ cmd.arg(\"--enable-debug\");\n \/\/}\n\n cmd.arg(format!(\"--host={}\", build_helper::gnu_target(&target)));\n cmd.arg(format!(\"--build={}\", build_helper::gnu_target(&host)));\n\n \/\/ for some reason, jemalloc configure doesn't detect this value\n \/\/ automatically for this target\n if target == \"sparc64-unknown-linux-gnu\" {\n cmd.arg(\"--with-lg-quantum=4\");\n }\n\n run(&mut cmd);\n\n let mut make = Command::new(build_helper::make(&host));\n make.current_dir(&native.out_dir)\n .arg(\"build_lib_static\");\n\n \/\/ These are intended for mingw32-make which we don't use\n if cfg!(windows) {\n make.env_remove(\"MAKEFLAGS\").env_remove(\"MFLAGS\");\n }\n\n \/\/ mingw make seems... buggy? unclear...\n if !host.contains(\"windows\") {\n make.arg(\"-j\")\n .arg(env::var(\"NUM_JOBS\").expect(\"NUM_JOBS was not set\"));\n }\n\n run(&mut make);\n\n \/\/ The pthread_atfork symbols is used by jemalloc on android but the really\n \/\/ old android we're building on doesn't have them defined, so just make\n \/\/ sure the symbols are available.\n if target.contains(\"androideabi\") {\n println!(\"cargo:rerun-if-changed=pthread_atfork_dummy.c\");\n cc::Build::new()\n .flag(\"-fvisibility=hidden\")\n .file(\"pthread_atfork_dummy.c\")\n .compile(\"pthread_atfork_dummy\");\n }\n}\n<commit_msg>Make liballoc_jemalloc work on CloudABI.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(warnings)]\n\nextern crate build_helper;\nextern crate cc;\n\nuse std::env;\nuse std::path::PathBuf;\nuse std::process::Command;\nuse build_helper::{run, native_lib_boilerplate};\n\nfn main() {\n \/\/ FIXME: This is a hack to support building targets that don't\n \/\/ support jemalloc alongside hosts that do. The jemalloc build is\n \/\/ controlled by a feature of the std crate, and if that feature\n \/\/ changes between targets, it invalidates the fingerprint of\n \/\/ std's build script (this is a cargo bug); so we must ensure\n \/\/ that the feature set used by std is the same across all\n \/\/ targets, which means we have to build the alloc_jemalloc crate\n \/\/ for targets like emscripten, even if we don't use it.\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let host = env::var(\"HOST\").expect(\"HOST was not set\");\n if target.contains(\"bitrig\") || target.contains(\"emscripten\") || target.contains(\"fuchsia\") ||\n target.contains(\"msvc\") || target.contains(\"openbsd\") || target.contains(\"redox\") ||\n target.contains(\"rumprun\") || target.contains(\"wasm32\") {\n println!(\"cargo:rustc-cfg=dummy_jemalloc\");\n return;\n }\n\n \/\/ CloudABI ships with a copy of jemalloc that has been patched to\n \/\/ work well with sandboxing. Don't attempt to build our own copy,\n \/\/ as it won't build.\n if target.contains(\"cloudabi\") {\n return;\n }\n\n if target.contains(\"android\") {\n println!(\"cargo:rustc-link-lib=gcc\");\n } else if !target.contains(\"windows\") && !target.contains(\"musl\") {\n println!(\"cargo:rustc-link-lib=pthread\");\n }\n\n if let Some(jemalloc) = env::var_os(\"JEMALLOC_OVERRIDE\") {\n let jemalloc = PathBuf::from(jemalloc);\n println!(\"cargo:rustc-link-search=native={}\",\n jemalloc.parent().unwrap().display());\n let stem = jemalloc.file_stem().unwrap().to_str().unwrap();\n let name = jemalloc.file_name().unwrap().to_str().unwrap();\n let kind = if name.ends_with(\".a\") {\n \"static\"\n } else {\n \"dylib\"\n };\n println!(\"cargo:rustc-link-lib={}={}\", kind, &stem[3..]);\n return;\n }\n\n let link_name = if target.contains(\"windows\") { \"jemalloc\" } else { \"jemalloc_pic\" };\n let native = match native_lib_boilerplate(\"jemalloc\", \"jemalloc\", link_name, \"lib\") {\n Ok(native) => native,\n _ => return,\n };\n\n let mut cmd = Command::new(\"sh\");\n cmd.arg(native.src_dir.join(\"configure\")\n .to_str()\n .unwrap()\n .replace(\"C:\\\\\", \"\/c\/\")\n .replace(\"\\\\\", \"\/\"))\n .current_dir(&native.out_dir)\n \/\/ jemalloc generates Makefile deps using GCC's \"-MM\" flag. This means\n \/\/ that GCC will run the preprocessor, and only the preprocessor, over\n \/\/ jemalloc's source files. If we don't specify CPPFLAGS, then at least\n \/\/ on ARM that step fails with a \"Missing implementation for 32-bit\n \/\/ atomic operations\" error. This is because no \"-march\" flag will be\n \/\/ passed to GCC, and then GCC won't define the\n \/\/ \"__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4\" macro that jemalloc needs to\n \/\/ select an atomic operation implementation.\n .env(\"CPPFLAGS\", env::var_os(\"CFLAGS\").unwrap_or_default());\n\n if target.contains(\"ios\") {\n cmd.arg(\"--disable-tls\");\n } else if target.contains(\"android\") {\n \/\/ We force android to have prefixed symbols because apparently\n \/\/ replacement of the libc allocator doesn't quite work. When this was\n \/\/ tested (unprefixed symbols), it was found that the `realpath`\n \/\/ function in libc would allocate with libc malloc (not jemalloc\n \/\/ malloc), and then the standard library would free with jemalloc free,\n \/\/ causing a segfault.\n \/\/\n \/\/ If the test suite passes, however, without symbol prefixes then we\n \/\/ should be good to go!\n cmd.arg(\"--with-jemalloc-prefix=je_\");\n cmd.arg(\"--disable-tls\");\n } else if target.contains(\"dragonfly\") || target.contains(\"musl\") {\n cmd.arg(\"--with-jemalloc-prefix=je_\");\n }\n\n \/\/ FIXME: building with jemalloc assertions is currently broken.\n \/\/ See <https:\/\/github.com\/rust-lang\/rust\/issues\/44152>.\n \/\/if cfg!(feature = \"debug\") {\n \/\/ cmd.arg(\"--enable-debug\");\n \/\/}\n\n cmd.arg(format!(\"--host={}\", build_helper::gnu_target(&target)));\n cmd.arg(format!(\"--build={}\", build_helper::gnu_target(&host)));\n\n \/\/ for some reason, jemalloc configure doesn't detect this value\n \/\/ automatically for this target\n if target == \"sparc64-unknown-linux-gnu\" {\n cmd.arg(\"--with-lg-quantum=4\");\n }\n\n run(&mut cmd);\n\n let mut make = Command::new(build_helper::make(&host));\n make.current_dir(&native.out_dir)\n .arg(\"build_lib_static\");\n\n \/\/ These are intended for mingw32-make which we don't use\n if cfg!(windows) {\n make.env_remove(\"MAKEFLAGS\").env_remove(\"MFLAGS\");\n }\n\n \/\/ mingw make seems... buggy? unclear...\n if !host.contains(\"windows\") {\n make.arg(\"-j\")\n .arg(env::var(\"NUM_JOBS\").expect(\"NUM_JOBS was not set\"));\n }\n\n run(&mut make);\n\n \/\/ The pthread_atfork symbols is used by jemalloc on android but the really\n \/\/ old android we're building on doesn't have them defined, so just make\n \/\/ sure the symbols are available.\n if target.contains(\"androideabi\") {\n println!(\"cargo:rerun-if-changed=pthread_atfork_dummy.c\");\n cc::Build::new()\n .flag(\"-fvisibility=hidden\")\n .file(\"pthread_atfork_dummy.c\")\n .compile(\"pthread_atfork_dummy\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added traits for objects that are option-like<commit_after>\/\/! This module provides implementations of Option's methods for objects that are sufficiently\n\/\/! like Options. This modules is a candidate to be put in its own crate.\n\/\/!\n\/\/! Most of the examples for these methods were adapted from the `std::option` examples.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use checked::AsRefOption;\n\/\/!\n\/\/! struct OptionWrapper<T>(Option<T>);\n\/\/!\n\/\/! impl<T> AsRef<Option<T>> for OptionWrapper<T> {\n\/\/! fn as_ref(&self) -> &Option<T>{\n\/\/! &self.0\n\/\/! }\n\/\/! }\n\/\/!\n\/\/! impl<T> AsRefOption<T> for OptionWrapper<T> {}\n\/\/!\n\/\/! let x = OptionWrapper(Some(5_i32)); \/\/ x is an OptionWrapper<i32>\n\/\/!\n\/\/! \/\/ We can now use some of Option's methods on x directly.\n\/\/! assert!(x.is_some());\n\/\/! ```\n\nuse std::option::{Iter, IterMut};\n\n\n\/\/ These impls don't seem to work as intended.\n\/\/ impl<T, U> IntoOption<T> for U\n\/\/ where\n\/\/ U: Into<Option<T>>,\n\/\/ {\n\/\/ }\n\/\/\n\/\/ impl<T, U> AsRefOption<T> for U\n\/\/ where\n\/\/ U: AsRef<Option<T>>,\n\/\/ {\n\/\/ }\n\/\/\n\/\/ impl<T, U> AsMutOption<T> for U\n\/\/ where\n\/\/ U: AsMut<Option<T>>,\n\/\/ {\n\/\/ }\n\n\/\/\/ The `IntoOption` trait implements all the methods of `Option` that consume self.\npub trait IntoOption<T>: Into<Option<T>> {\n \/\/\/ Unwraps an option-like, yielding the content of a `Some`.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if the value is a `None` with a custom panic message provided by\n \/\/\/ `msg`.\n \/\/\/\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use checked::{Checked, IntoOption};\n \/\/\/\n \/\/\/ let x = Checked::from(5_i32);\n \/\/\/ assert_eq!(x.expect(\"the world is ending\"), 5);\n \/\/\/ ```\n \/\/\/\n \/\/\/ ```{.should_panic}\n \/\/\/ use checked::{Checked, IntoOption};\n \/\/\/\n \/\/\/ let x: Checked<&str> = Checked(None);\n \/\/\/ x.expect(\"the world is ending\"); \/\/ panics with `the world is ending`\n \/\/\/ ```\n fn expect(self, msg: &str) -> T {\n <Self as Into<Option<T>>>::into(self).expect(msg)\n }\n\n \/\/\/ Moves the value `v` out of the `Option<T>` if it is `Some(v)`.\n \/\/\/\n \/\/\/ In general, because this function may panic, its use is discouraged.\n \/\/\/ Instead, prefer to use pattern matching and handle the `None`\n \/\/\/ case explicitly.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if the self value equals `None`.\n \/\/\/\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use checked::{Checked, IntoOption};\n \/\/\/\n \/\/\/ let x = Checked(Some(\"air\"));\n \/\/\/ assert_eq!(x.unwrap(), \"air\");\n \/\/\/ ```\n \/\/\/\n \/\/\/ ```{.should_panic}\n \/\/\/ use checked::{Checked, IntoOption};\n \/\/\/\n \/\/\/ let x: Checked<&str> = Checked(None);\n \/\/\/ assert_eq!(x.unwrap(), \"air\"); \/\/ fails\n \/\/\/ ```\n fn unwrap(self) -> T {\n <Self as Into<Option<T>>>::into(self).unwrap()\n }\n\n \/\/\/ Returns the contained value or a default.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use checked::{Checked, IntoOption};\n \/\/\/\n \/\/\/ assert_eq!(Checked(Some(\"car\")).unwrap_or(\"bike\"), \"car\");\n \/\/\/ assert_eq!(Checked(None).unwrap_or(\"bike\"), \"bike\");\n \/\/\/ ```\n fn unwrap_or(self, def: T) -> T {\n <Self as Into<Option<T>>>::into(self).unwrap_or(def)\n }\n\n fn unwrap_or_else<F>(self, f: F) -> T\n where\n F: FnOnce() -> T,\n {\n <Self as Into<Option<T>>>::into(self).unwrap_or_else(f)\n }\n\n fn map<U, F>(self, f: F) -> Option<U>\n where\n F: FnOnce(T) -> U,\n {\n <Self as Into<Option<T>>>::into(self).map(f)\n }\n\n fn map_or<U, F>(self, default: U, f: F) -> U\n where\n F: FnOnce(T) -> U,\n {\n <Self as Into<Option<T>>>::into(self).map_or(default, f)\n }\n\n fn map_or_else<U, D, F>(self, default: D, f: F) -> U\n where\n D: FnOnce() -> U,\n F: FnOnce(T) -> U,\n {\n <Self as Into<Option<T>>>::into(self).map_or_else(default, f)\n }\n\n fn ok_or<E>(self, err: E) -> Result<T, E> {\n <Self as Into<Option<T>>>::into(self).ok_or(err)\n }\n\n fn ok_or_else<E, F>(self, err: F) -> Result<T, E>\n where\n F: FnOnce() -> E,\n {\n <Self as Into<Option<T>>>::into(self).ok_or_else(err)\n }\n\n fn and<U>(self, optb: Option<U>) -> Option<U> {\n <Self as Into<Option<T>>>::into(self).and(optb)\n }\n\n fn and_then<U, F>(self, f: F) -> Option<U>\n where\n F: FnOnce(T) -> Option<U>,\n {\n <Self as Into<Option<T>>>::into(self).and_then(f)\n }\n\n fn or(self, optb: Option<T>) -> Option<T> {\n <Self as Into<Option<T>>>::into(self).or(optb)\n }\n\n fn or_else<F>(self, f: F) -> Option<T>\n where\n F: FnOnce() -> Option<T>,\n {\n <Self as Into<Option<T>>>::into(self).or_else(f)\n }\n}\n\n\/\/\/ The `AsRefOption` trait implements all the methods of `Option` that take an immutable reference\n\/\/\/ to self.\npub trait AsRefOption<T>: AsRef<Option<T>> {\n fn is_some(&self) -> bool {\n <Self as AsRef<Option<T>>>::as_ref(self).is_some()\n }\n\n fn is_none(&self) -> bool {\n <Self as AsRef<Option<T>>>::as_ref(self).is_none()\n }\n\n fn as_ref(&self) -> Option<&T> {\n <Self as AsRef<Option<T>>>::as_ref(self).as_ref()\n }\n\n fn iter(&self) -> Iter<T> {\n <Self as AsRef<Option<T>>>::as_ref(self).iter()\n }\n}\n\n\/\/\/ The `AsMutOption` trait implements all the methods of `Option` that take a mutable reference to\n\/\/\/ self.\npub trait AsMutOption<T>: AsMut<Option<T>> {\n fn as_mut(&mut self) -> Option<&mut T> {\n <Self as AsMut<Option<T>>>::as_mut(self).as_mut()\n }\n\n fn iter_mut(&mut self) -> IterMut<T> {\n <Self as AsMut<Option<T>>>::as_mut(self).iter_mut()\n }\n\n #[cfg(feature = \"option_entry\")]\n fn get_or_insert(&mut self, v: T) -> &mut T {\n <Self as AsMut<Option<T>>>::as_mut(self).get_or_insert(v)\n }\n\n #[cfg(feature = \"option_entry\")]\n fn get_or_insert_with<F>(&mut self, f: F) -> &mut T\n where\n F: FnOnce() -> T,\n {\n <Self as AsMut<Option<T>>>::as_mut(self).get_or_insert_with(f)\n }\n\n fn take(&mut self) -> Option<T> {\n <Self as AsMut<Option<T>>>::as_mut(self).take()\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate minifb;\nextern crate bgfx_rs;\nextern crate viewdock;\n\nuse bgfx_rs::Bgfx;\nuse libc::{c_void, c_int};\nuse minifb::{Scale, WindowOptions, MouseMode, MouseButton, Key, KeyRepeat};\nuse core::view_plugins::{ViewHandle, ViewPlugins, ViewInstance};\nuse core::session::{Sessions, Session, SessionHandle};\nuse self::viewdock::{Workspace, Rect, Direction, DockHandle};\nuse imgui_sys::Imgui;\nuse prodbg_api::ui_ffi::{PDVec2};\nuse prodbg_api::view::CViewCallbacks;\n\nconst WIDTH: usize = 1280;\nconst HEIGHT: usize = 800;\n\npub struct Window {\n \/\/\/ minifb window\n pub win: minifb::Window,\n\n \/\/\/ Views in this window\n pub views: Vec<ViewHandle>,\n\n \/\/\/\n pub ws: Workspace,\n}\n\nstruct WindowState {\n pub showed_popup: u32,\n pub should_close: bool,\n}\n\n\/\/\/! Windows keeps track of all different windows that are present with in the application\n\/\/\/! There are several ways windows can be created:\n\/\/\/!\n\/\/\/! 1. User opens a new window using a shortcut or menu selection.\n\/\/\/! 2. User \"undocks\" a view from an existing window giving it it's own floating window.\n\/\/\/! 3. etc\n\npub struct Windows {\n \/\/\/ All the windows being tracked\n windows: Vec<Window>,\n current: usize,\n}\n\nimpl Windows {\n pub fn new() -> Windows {\n Windows {\n windows: Vec::new(),\n current: 0,\n }\n }\n\n \/\/\/ Create a default window which will only be created if there are no other\n pub fn create_default(&mut self) {\n if self.windows.len() > 0 {\n return;\n }\n\n let window = Self::create_window(WIDTH, HEIGHT).expect(\"Unable to create window\");\n\n self.windows.push(window)\n }\n\n pub fn create_window(width: usize, height: usize) -> minifb::Result<Window> {\n let res = minifb::Window::new(\"ProDBG\",\n width,\n height,\n WindowOptions {\n resize: true,\n scale: Scale::X1,\n ..WindowOptions::default()\n });\n match res {\n Ok(win) => {\n Bgfx::create_window(win.get_window_handle() as *const c_void,\n width as c_int,\n height as c_int);\n Ok(Window {\n win: win,\n views: Vec::new(),\n ws: Workspace::new(Rect::new(0.0, 0.0, width as f32, (height - 20) as f32)).unwrap(),\n })\n }\n Err(err) => Err(err),\n }\n }\n\n pub fn create_window_with_menus(&mut self) -> minifb::Result<Window> {\n \/\/const WIDTH: usize = 1280;\n \/\/const HEIGHT: usize = 1024;\n\n let window = try!(Self::create_window(WIDTH, HEIGHT));\n\n \/\/ for menu in &menus {\n \/\/ window.add_menu(m.name, m.menu);\n \/\/ }\n \/\/\n\n Ok(window)\n }\n\n pub fn update(&mut self, sessions: &mut Sessions, view_plugins: &mut ViewPlugins) {\n for i in (0..self.windows.len()).rev() {\n self.windows[i].update(sessions, view_plugins);\n\n if !self.windows[i].win.is_open() {\n self.windows.swap_remove(i);\n }\n }\n }\n\n pub fn get_current(&mut self) -> &mut Window {\n let current = self.current;\n &mut self.windows[current]\n }\n\n \/\/\/ Checks if application should exit (all window instances closed)\n pub fn should_exit(&self) -> bool {\n self.windows.len() == 0\n }\n\n \/\/\/ Save the state of the windows (usually done when exiting the application)\n pub fn save(_filename: &str) {}\n\n \/\/\/ Load the state of all the views from a previous run\n pub fn load(_filename: &str) {}\n}\n\nimpl Window {\n fn is_inside(v: (f32, f32), pos: PDVec2, size: PDVec2) -> bool {\n let x0 = pos.x;\n let y0 = pos.y;\n let x1 = pos.x + size.x;\n let y1 = pos.y + size.y;\n\n if (v.0 >= x0 && v.0 < x1) && (v.1 >= y0 && v.1 < y1) {\n true\n } else {\n false\n }\n }\n\n fn update_view(&self, instance: &mut ViewInstance, session: &mut Session, show_context_menu: bool, mouse: (f32, f32)) -> WindowState {\n let ui = instance.ui;\n\n if let Some(rect) = self.ws.get_rect_by_handle(DockHandle(instance.handle.0)) {\n Imgui::set_window_pos(rect.x, rect.y);\n Imgui::set_window_size(rect.width, rect.height);\n }\n\n let open = Imgui::begin_window(&instance.name, true);\n Imgui::init_state(ui.api);\n\n let pos = ui.get_window_pos();\n let size = ui.get_window_size();\n\n if Self::is_inside(mouse, pos, size) && show_context_menu {\n Imgui::mark_show_popup(ui.api, true);\n } else {\n Imgui::mark_show_popup(ui.api, false);\n }\n\n unsafe {\n let plugin_funcs = instance.plugin_type.plugin_funcs as *mut CViewCallbacks;\n ((*plugin_funcs).update.unwrap())(instance.plugin_data,\n ui.api as *mut c_void,\n session.reader.api as *mut c_void,\n session.get_current_writer().api as *mut c_void);\n }\n\n let has_shown_menu = Imgui::has_showed_popup(ui.api);\n\n Imgui::end_window();\n\n WindowState {\n showed_popup: has_shown_menu,\n should_close: !open,\n }\n }\n\n pub fn remove_views(&mut self, view_plugins: &mut ViewPlugins, views: &Vec<ViewHandle>) {\n for view in views {\n view_plugins.destroy_instance(*view);\n \/\/println!(\"------------------ before delete ----------------------------\");\n \/\/self.ws.dump_tree();\n self.ws.delete_by_handle(DockHandle(view.0));\n \/\/println!(\"------------------ after delete ----------------------------\");\n \/\/self.ws.dump_tree();\n }\n }\n\n pub fn update(&mut self, sessions: &mut Sessions, view_plugins: &mut ViewPlugins) {\n let mut views_to_delete = Vec::new();\n let mut has_shown_menu = 0u32;\n\n self.win.update();\n self.ws.update();\n\n let mouse = self.win.get_mouse_pos(MouseMode::Clamp).unwrap_or((0.0, 0.0));\n\n Bgfx::set_mouse_pos(mouse);\n Bgfx::set_mouse_state(0, self.win.get_mouse_down(MouseButton::Left));\n\n let show_context_menu = self.win.get_mouse_down(MouseButton::Right);\n\n for view in &self.views {\n if let Some(ref mut v) = view_plugins.get_view(*view) {\n if let Some(ref mut s) = sessions.get_session(v.session_handle) {\n let state = Self::update_view(self, v, s, show_context_menu, mouse);\n\n if state.should_close {\n views_to_delete.push(*view);\n }\n\n has_shown_menu |= state.showed_popup;\n }\n }\n }\n\n if self.win.is_key_pressed(Key::Down, KeyRepeat::No) {\n self.ws.dump_tree();\n }\n\n if self.win.is_key_pressed(Key::Up, KeyRepeat::No) {\n let _ = self.ws.save(\"\/Users\/danielcollin\/code\/temp\/test.xml\");\n }\n\n if self.win.is_key_pressed(Key::Right, KeyRepeat::No) {\n let ws = Workspace::load(\"\/Users\/danielcollin\/code\/temp\/test.xml\").unwrap();\n let docks = ws.get_docks();\n self.views.clear();\n\n for dock in &docks {\n \/\/println!(\"create stuff... {} - {}\", dock.name, dock.handle.0);\n let ui = Imgui::create_ui_instance();\n let handle = ViewHandle(dock.handle.0);\n if dock.name == \"\" {\n view_plugins.create_instance_with_handle(ui, &\"Bitmap View\".to_owned(), SessionHandle(0), ViewHandle(dock.handle.0));\n } else {\n view_plugins.create_instance_with_handle(ui, &dock.name, SessionHandle(0), ViewHandle(dock.handle.0));\n }\n self.views.push(handle);\n }\n\n self.ws = ws;\n }\n\n \/\/ if now plugin has showed a menu we do it here\n \/\/ TODO: Handle diffrent cases when attach menu on to plugin menu or not\n\n if has_shown_menu == 0 && show_context_menu {\n Self::show_popup(self, true, mouse, view_plugins);\n } else {\n Self::show_popup(self, false, mouse, view_plugins);\n }\n\n Self::remove_views(self, view_plugins, &views_to_delete);\n }\n\n fn split_view(&mut self, name: &String, view_plugins: &mut ViewPlugins, pos: (f32, f32), direction: Direction) {\n let ui = Imgui::create_ui_instance();\n if let Some(handle) = view_plugins.create_instance(ui, name, SessionHandle(0)) {\n if let Some(dock_handle) = self.ws.get_hover_dock(pos) {\n let new_handle = DockHandle(handle.0);\n self.ws.split_by_dock_handle(direction, dock_handle, new_handle);\n self.ws.set_name_to_handle(name, new_handle);\n \/\/self.ws.dump_tree_linear();\n } else {\n self.ws.new_split(DockHandle(handle.0), direction);\n \/\/println!(\"no split\");\n }\n\n self.views.push(handle);\n }\n }\n\n fn show_popup(&mut self, show: bool, mouse_pos: (f32, f32), view_plugins: &mut ViewPlugins) {\n let ui = Imgui::get_ui();\n\n if show {\n ui.open_popup(\"plugins\");\n }\n\n if ui.begin_popup(\"plugins\") {\n let plugin_names = view_plugins.get_plugin_names();\n\n if ui.begin_menu(\"Split Horizontally\", true) {\n for name in &plugin_names {\n if ui.menu_item(name, false, true) {\n Self::split_view(self, &name, view_plugins, mouse_pos, Direction::Horizontal);\n }\n }\n ui.end_menu();\n }\n\n if ui.begin_menu(\"Split Vertically\", true) {\n for name in &plugin_names {\n if ui.menu_item(name, false, true) {\n Self::split_view(self, &name, view_plugins, mouse_pos, Direction::Vertical);\n }\n }\n ui.end_menu();\n }\n\n ui.end_popup();\n }\n }\n}\n<commit_msg>Added basic draging of the sliders<commit_after>extern crate minifb;\nextern crate bgfx_rs;\nextern crate viewdock;\n\nuse bgfx_rs::Bgfx;\nuse libc::{c_void, c_int};\nuse minifb::{Scale, WindowOptions, MouseMode, MouseButton, Key, KeyRepeat};\nuse core::view_plugins::{ViewHandle, ViewPlugins, ViewInstance};\nuse core::session::{Sessions, Session, SessionHandle};\nuse self::viewdock::{Workspace, Rect, Direction, DockHandle, SplitHandle};\nuse imgui_sys::Imgui;\nuse prodbg_api::ui_ffi::{PDVec2};\nuse prodbg_api::view::CViewCallbacks;\n\nconst WIDTH: usize = 1280;\nconst HEIGHT: usize = 800;\n\nenum State {\n Default,\n DraggingSlider,\n}\n\npub struct MouseState {\n handle: Option<SplitHandle>,\n state: State,\n prev_mouse: (f32, f32)\n}\n\nimpl MouseState {\n pub fn new() -> MouseState {\n MouseState {\n handle: None,\n state: State::Default,\n prev_mouse: (0.0, 0.0),\n }\n }\n}\n\npub struct Window {\n \/\/\/ minifb window\n pub win: minifb::Window,\n\n \/\/\/ Views in this window\n pub views: Vec<ViewHandle>,\n\n \/\/\/\n pub ws: Workspace,\n\n pub mouse_state: MouseState,\n}\n\nstruct WindowState {\n pub showed_popup: u32,\n pub should_close: bool,\n}\n\n\/\/\/! Windows keeps track of all different windows that are present with in the application\n\/\/\/! There are several ways windows can be created:\n\/\/\/!\n\/\/\/! 1. User opens a new window using a shortcut or menu selection.\n\/\/\/! 2. User \"undocks\" a view from an existing window giving it it's own floating window.\n\/\/\/! 3. etc\n\npub struct Windows {\n \/\/\/ All the windows being tracked\n windows: Vec<Window>,\n current: usize,\n}\n\nimpl Windows {\n pub fn new() -> Windows {\n Windows {\n windows: Vec::new(),\n current: 0,\n }\n }\n\n \/\/\/ Create a default window which will only be created if there are no other\n pub fn create_default(&mut self) {\n if self.windows.len() > 0 {\n return;\n }\n\n let window = Self::create_window(WIDTH, HEIGHT).expect(\"Unable to create window\");\n\n self.windows.push(window)\n }\n\n pub fn create_window(width: usize, height: usize) -> minifb::Result<Window> {\n let res = minifb::Window::new(\"ProDBG\",\n width,\n height,\n WindowOptions {\n resize: true,\n scale: Scale::X1,\n ..WindowOptions::default()\n });\n match res {\n Ok(win) => {\n Bgfx::create_window(win.get_window_handle() as *const c_void,\n width as c_int,\n height as c_int);\n Ok(Window {\n win: win,\n views: Vec::new(),\n mouse_state: MouseState::new(),\n ws: Workspace::new(Rect::new(0.0, 0.0, width as f32, (height - 20) as f32)).unwrap(),\n })\n }\n Err(err) => Err(err),\n }\n }\n\n pub fn create_window_with_menus(&mut self) -> minifb::Result<Window> {\n \/\/const WIDTH: usize = 1280;\n \/\/const HEIGHT: usize = 1024;\n\n let window = try!(Self::create_window(WIDTH, HEIGHT));\n\n \/\/ for menu in &menus {\n \/\/ window.add_menu(m.name, m.menu);\n \/\/ }\n \/\/\n\n Ok(window)\n }\n\n pub fn update(&mut self, sessions: &mut Sessions, view_plugins: &mut ViewPlugins) {\n for i in (0..self.windows.len()).rev() {\n self.windows[i].update(sessions, view_plugins);\n\n if !self.windows[i].win.is_open() {\n self.windows.swap_remove(i);\n }\n }\n }\n\n pub fn get_current(&mut self) -> &mut Window {\n let current = self.current;\n &mut self.windows[current]\n }\n\n \/\/\/ Checks if application should exit (all window instances closed)\n pub fn should_exit(&self) -> bool {\n self.windows.len() == 0\n }\n\n \/\/\/ Save the state of the windows (usually done when exiting the application)\n pub fn save(_filename: &str) {}\n\n \/\/\/ Load the state of all the views from a previous run\n pub fn load(_filename: &str) {}\n}\n\nimpl Window {\n fn is_inside(v: (f32, f32), pos: PDVec2, size: PDVec2) -> bool {\n let x0 = pos.x;\n let y0 = pos.y;\n let x1 = pos.x + size.x;\n let y1 = pos.y + size.y;\n\n if (v.0 >= x0 && v.0 < x1) && (v.1 >= y0 && v.1 < y1) {\n true\n } else {\n false\n }\n }\n\n fn update_view(&self, instance: &mut ViewInstance, session: &mut Session, show_context_menu: bool, mouse: (f32, f32)) -> WindowState {\n let ui = instance.ui;\n\n if let Some(rect) = self.ws.get_rect_by_handle(DockHandle(instance.handle.0)) {\n Imgui::set_window_pos(rect.x, rect.y);\n Imgui::set_window_size(rect.width, rect.height);\n }\n\n let open = Imgui::begin_window(&instance.name, true);\n Imgui::init_state(ui.api);\n\n let pos = ui.get_window_pos();\n let size = ui.get_window_size();\n\n if Self::is_inside(mouse, pos, size) && show_context_menu {\n Imgui::mark_show_popup(ui.api, true);\n } else {\n Imgui::mark_show_popup(ui.api, false);\n }\n\n unsafe {\n let plugin_funcs = instance.plugin_type.plugin_funcs as *mut CViewCallbacks;\n ((*plugin_funcs).update.unwrap())(instance.plugin_data,\n ui.api as *mut c_void,\n session.reader.api as *mut c_void,\n session.get_current_writer().api as *mut c_void);\n }\n\n let has_shown_menu = Imgui::has_showed_popup(ui.api);\n\n Imgui::end_window();\n\n WindowState {\n showed_popup: has_shown_menu,\n should_close: !open,\n }\n }\n\n pub fn remove_views(&mut self, view_plugins: &mut ViewPlugins, views: &Vec<ViewHandle>) {\n for view in views {\n view_plugins.destroy_instance(*view);\n \/\/println!(\"------------------ before delete ----------------------------\");\n \/\/self.ws.dump_tree();\n self.ws.delete_by_handle(DockHandle(view.0));\n \/\/println!(\"------------------ after delete ----------------------------\");\n \/\/self.ws.dump_tree();\n }\n }\n\n fn update_mouse_state(&mut self, mouse_pos: (f32, f32)) {\n match self.mouse_state.state {\n State::Default => {\n if let Some(h) = self.ws.is_hovering_sizer(mouse_pos) {\n if self.win.get_mouse_down(MouseButton::Left) {\n self.mouse_state.handle = Some(h);\n self.mouse_state.state = State::DraggingSlider;\n }\n }\n },\n\n State::DraggingSlider => {\n let pm = self.mouse_state.prev_mouse;\n let delta = (pm.0 - mouse_pos.0, pm.1 - mouse_pos.1);\n\n if self.win.get_mouse_down(MouseButton::Left) {\n self.ws.drag_sizer(self.mouse_state.handle.unwrap(), delta);\n } else {\n self.mouse_state.handle = None;\n self.mouse_state.state = State::Default;\n }\n }\n }\n\n \/*\n if let Some(handle) = ws.is_hovering_sizer(mouse_pos) {\n let delta = (prev_mouse.0 - mouse_pos.0, prev_mouse.1 - mouse_pos.1);\n\n if window.get_mouse_down(MouseButton::Left) {\n println!(\"drangging sizer\");\n ws.drag_sizer(handle, delta);\n }\n }\n *\/\n\n self.mouse_state.prev_mouse = mouse_pos;\n }\n\n pub fn update(&mut self, sessions: &mut Sessions, view_plugins: &mut ViewPlugins) {\n let mut views_to_delete = Vec::new();\n let mut has_shown_menu = 0u32;\n\n self.win.update();\n self.ws.update();\n\n let mouse = self.win.get_mouse_pos(MouseMode::Clamp).unwrap_or((0.0, 0.0));\n\n self.update_mouse_state(mouse);\n\n Bgfx::set_mouse_pos(mouse);\n Bgfx::set_mouse_state(0, self.win.get_mouse_down(MouseButton::Left));\n\n let show_context_menu = self.win.get_mouse_down(MouseButton::Right);\n\n for view in &self.views {\n if let Some(ref mut v) = view_plugins.get_view(*view) {\n if let Some(ref mut s) = sessions.get_session(v.session_handle) {\n let state = Self::update_view(self, v, s, show_context_menu, mouse);\n\n if state.should_close {\n views_to_delete.push(*view);\n }\n\n has_shown_menu |= state.showed_popup;\n }\n }\n }\n\n if self.win.is_key_pressed(Key::Down, KeyRepeat::No) {\n self.ws.dump_tree();\n }\n\n if self.win.is_key_pressed(Key::Up, KeyRepeat::No) {\n let _ = self.ws.save(\"\/Users\/danielcollin\/code\/temp\/test.xml\");\n }\n\n if self.win.is_key_pressed(Key::Right, KeyRepeat::No) {\n let ws = Workspace::load(\"\/Users\/danielcollin\/code\/temp\/test.xml\").unwrap();\n let docks = ws.get_docks();\n self.views.clear();\n\n for dock in &docks {\n \/\/println!(\"create stuff... {} - {}\", dock.name, dock.handle.0);\n let ui = Imgui::create_ui_instance();\n let handle = ViewHandle(dock.handle.0);\n if dock.name == \"\" {\n view_plugins.create_instance_with_handle(ui, &\"Bitmap View\".to_owned(), SessionHandle(0), ViewHandle(dock.handle.0));\n } else {\n view_plugins.create_instance_with_handle(ui, &dock.name, SessionHandle(0), ViewHandle(dock.handle.0));\n }\n self.views.push(handle);\n }\n\n self.ws = ws;\n }\n\n \/\/ if now plugin has showed a menu we do it here\n \/\/ TODO: Handle diffrent cases when attach menu on to plugin menu or not\n\n if has_shown_menu == 0 && show_context_menu {\n Self::show_popup(self, true, mouse, view_plugins);\n } else {\n Self::show_popup(self, false, mouse, view_plugins);\n }\n\n Self::remove_views(self, view_plugins, &views_to_delete);\n }\n\n fn split_view(&mut self, name: &String, view_plugins: &mut ViewPlugins, pos: (f32, f32), direction: Direction) {\n let ui = Imgui::create_ui_instance();\n if let Some(handle) = view_plugins.create_instance(ui, name, SessionHandle(0)) {\n if let Some(dock_handle) = self.ws.get_hover_dock(pos) {\n let new_handle = DockHandle(handle.0);\n self.ws.split_by_dock_handle(direction, dock_handle, new_handle);\n self.ws.set_name_to_handle(name, new_handle);\n \/\/self.ws.dump_tree_linear();\n } else {\n self.ws.new_split(DockHandle(handle.0), direction);\n \/\/println!(\"no split\");\n }\n\n self.views.push(handle);\n }\n }\n\n fn show_popup(&mut self, show: bool, mouse_pos: (f32, f32), view_plugins: &mut ViewPlugins) {\n let ui = Imgui::get_ui();\n\n if show {\n ui.open_popup(\"plugins\");\n }\n\n if ui.begin_popup(\"plugins\") {\n let plugin_names = view_plugins.get_plugin_names();\n\n if ui.begin_menu(\"Split Horizontally\", true) {\n for name in &plugin_names {\n if ui.menu_item(name, false, true) {\n Self::split_view(self, &name, view_plugins, mouse_pos, Direction::Horizontal);\n }\n }\n ui.end_menu();\n }\n\n if ui.begin_menu(\"Split Vertically\", true) {\n for name in &plugin_names {\n if ui.menu_item(name, false, true) {\n Self::split_view(self, &name, view_plugins, mouse_pos, Direction::Vertical);\n }\n }\n ui.end_menu();\n }\n\n ui.end_popup();\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>fn main() {\n let number = 23;\n\n match number {\n \/\/ match single value\n 1 => println!(\"One!\"),\n \/\/ match several values\n 2 | 3 | 5 | 7 | 11 => println!(\"This is a prime\"),\n \/\/ match a range\n 13..19 => println!(\"A teen\"),\n \/\/ bind the rest of values to x\n \/\/ and use a `guard` to pick odd numbers\n x if x % 2 == 1 => println!(\"An odd one\"),\n \/\/ the rest of cases\n x => println!(\"{} ain't special\", x),\n }\n\n let pair = (2, 3);\n \/\/ match can be used to destructure a tuple\n match pair {\n (x, y) if x == y => println!(\"These are twins\"),\n (x, y) if x + y == 0 => println!(\"Antimatter, kaboom\"),\n \/\/ _ means don't bind the value to a variable\n (x, _) if x % 2 == 1 => println!(\"The first is odd\"),\n \/\/ _ can be used to match the rest of cases\n _ => println!(\"No correlation...\"),\n }\n\n \/\/ match is an expression\n let big_number = match number {\n 0 => 9000,\n \/\/ blocks are also valid branches\n x if x < 10 => {\n let y = x * x;\n let z = x * x * x;\n x + y + z\n } x => x,\n };\n}\n<commit_msg>match.rs: Formatting: Each branch on separate line<commit_after>fn main() {\n let number = 23;\n\n match number {\n \/\/ match single value\n 1 => println!(\"One!\"),\n \/\/ match several values\n 2 | 3 | 5 | 7 | 11 => println!(\"This is a prime\"),\n \/\/ match a range\n 13..19 => println!(\"A teen\"),\n \/\/ bind the rest of values to x\n \/\/ and use a `guard` to pick odd numbers\n x if x % 2 == 1 => println!(\"An odd one\"),\n \/\/ the rest of cases\n x => println!(\"{} ain't special\", x),\n }\n\n let pair = (2, 3);\n \/\/ match can be used to destructure a tuple\n match pair {\n (x, y) if x == y => println!(\"These are twins\"),\n (x, y) if x + y == 0 => println!(\"Antimatter, kaboom\"),\n \/\/ _ means don't bind the value to a variable\n (x, _) if x % 2 == 1 => println!(\"The first is odd\"),\n \/\/ _ can be used to match the rest of cases\n _ => println!(\"No correlation...\"),\n }\n\n \/\/ match is an expression\n let big_number = match number {\n 0 => 9000,\n \/\/ blocks are also valid branches\n x if x < 10 => {\n let y = x * x;\n let z = x * x * x;\n x + y + z\n },\n x => x,\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add reimport to lib.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a regression test for #313<commit_after>#[derive(PartialEq, Eq, PartialOrd, Ord)]\npub enum Foo {\n A(&'static str),\n _B,\n _C,\n}\n\npub fn main() {\n let mut b = std::collections::BTreeSet::new();\n b.insert(Foo::A(\"\\'\"));\n b.insert(Foo::A(\"\/=\"));\n b.insert(Foo::A(\"#\"));\n b.insert(Foo::A(\"0o\"));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:sparkles: Add the function of getting Todoist project name<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Missed error handling for KeyFile creation Fixed, added error message to output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use .as_bytes()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Formatting and stray reference<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix SliceIter overflow bug in corner cases (#146)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added `dependencies` module 2<commit_after>\/\/! Extract dependency information from extract info.\n\nuse piston_meta::MetaData;\nuse dependencies::Package;\nuse range::Range;\nuse std::rc::Rc;\n\n\/\/\/ Stores extract information.\npub struct Extract {\n \/\/\/ The package name.\n pub package: Rc<String>,\n \/\/\/ The url of the Cargo.toml.\n pub url: Rc<String>,\n \/\/\/ Whether to override the library version to simulate breaking change.\n pub override_version: Option<Rc<String>>,\n}\n\nimpl Extract {\n \/\/\/ Converts from meta data.\n pub fn from_meta_data(\n mut data: &[(Range, MetaData)],\n mut offset: usize,\n ignored: &mut Vec<Range>\n ) -> Result<(Range, Extract), ()> {\n use piston_meta::bootstrap::*;\n\n let start_offset = offset;\n let node = \"library\";\n let start_range = try!(start_node(node, data, offset));\n update(start_range, &mut data, &mut offset);\n\n let mut package: Option<Rc<String>> = None;\n let mut url: Option<Rc<String>> = None;\n let mut override_version: Option<Rc<String>> = None;\n loop {\n if let Ok(range) = end_node(node, data, offset) {\n update(range, &mut data, &mut offset);\n break;\n } else if let Ok((range, val)) = meta_string(\"package\", data, offset) {\n update(range, &mut data, &mut offset);\n package = Some(val);\n } else if let Ok((range, val)) = meta_string(\"url\", data, offset) {\n update(range, &mut data, &mut offset);\n url = Some(val);\n } else if let Ok((range, val)) = meta_string(\"override_version\", data, offset) {\n update(range, &mut data, &mut offset);\n override_version = Some(val);\n } else {\n let range = ignore(data, offset);\n update(range, &mut data, &mut offset);\n ignored.push(range);\n }\n }\n\n let package = try!(package.ok_or(()));\n let url = try!(url.ok_or(()));\n Ok((Range::new(start_offset, offset - start_offset), Extract {\n package: package,\n url: url,\n override_version: override_version,\n }))\n }\n}\n\n\/\/\/ Loads a text file from url.\npub fn load_text_file_from_url(url: &str) -> Result<String, String> {\n use hyper::client::Client;\n use hyper::{Url};\n use hyper::status::StatusCode;\n use std::io::Read;\n\n let url_address = try!(Url::parse(url)\n .map_err(|e| format!(\"Error parsing url: {}\", e)));\n let client = Client::new();\n let request = client.get(url_address);\n let mut response = try!(request.send()\n .map_err(|e| format!(\"Error fetching file over http {}: {}\",\n url, e.to_string())));\n if response.status == StatusCode::Ok {\n let mut data = String::new();\n try!(response.read_to_string(&mut data)\n .map_err(|e| format!(\"Error fetching file over http {}: {}\",\n url, e.to_string())));\n Ok(data)\n } else {\n Err(format!(\"Error fetching file over http {}: {}\",\n url, response.status))\n }\n}\n\n\/\/\/ Converts meta data into extract info.\npub fn convert_extract_info(\n mut data: &[(Range, MetaData)],\n ignored: &mut Vec<Range>\n) -> Result<Vec<Extract>, ()> {\n use piston_meta::bootstrap::*;\n\n let mut list = vec![];\n let mut offset = 0;\n loop {\n if let Ok((range, extract)) = Extract::from_meta_data(data, offset, ignored) {\n update(range, &mut data, &mut offset);\n list.push(extract);\n } else if offset < data.len() {\n return Err(());\n } else {\n break;\n }\n }\n Ok(list)\n}\n\n\/\/\/ Converts meta data into Cargo.toml information.\npub fn convert_cargo_toml(\n data: &[(Range, MetaData)],\n ignored: &mut Vec<Range>\n) -> Result<Package, ()> {\n let offset = 0;\n let (_, package) = try!(Package::from_meta_data(data, offset, ignored));\n Ok((package))\n}\n\n\/\/\/ Extracts dependency info.\npub fn extract_dependency_info_from(extract_info: &str) -> Result<String, String> {\n use piston_meta::*;\n use std::io::Write;\n\n let extract_meta_syntax = include_str!(\"..\/assets\/extract\/syntax.txt\");\n let extract_meta_rules = stderr_unwrap(extract_meta_syntax,\n syntax(extract_meta_syntax));\n let extract_data = stderr_unwrap(extract_info,\n parse(&extract_meta_rules, extract_info));\n\n let mut ignored = vec![];\n let list = try!(convert_extract_info(&extract_data, &mut ignored)\n .map_err(|_| String::from(\"Could not convert extract data\")));\n\n \/\/ Stores package and dependency information extracted from Cargo.toml.\n let mut package_data = vec![];\n\n \/\/ Extract information.\n let cargo_toml_syntax = include_str!(\"..\/assets\/cargo-toml\/syntax.txt\");\n let cargo_toml_rules = stderr_unwrap(cargo_toml_syntax,\n syntax(cargo_toml_syntax));\n for extract in &list {\n let config = try!(load_text_file_from_url(&extract.url));\n let cargo_toml_data = stderr_unwrap(&config,\n parse(&cargo_toml_rules, &config));\n\n let mut ignored = vec![];\n let package = try!(convert_cargo_toml(\n &cargo_toml_data, &mut ignored)\n .map_err(|_| format!(\"Could not convert Cargo.toml data for url `{}`\", &extract.url)));\n package_data.push(package);\n }\n\n let mut res: Vec<u8> = vec![];\n writeln!(res, \"{{\").unwrap();\n let n0 = package_data.len();\n for (i0, package) in package_data.iter().enumerate() {\n \/\/ Package name.\n write!(res, \" \").unwrap();\n json::write_string(&mut res, &package.name).unwrap();\n writeln!(res, \": {{\").unwrap();\n\n \/\/ Version.\n write!(res, \" \\\"version\\\": \").unwrap();\n json::write_string(&mut res, &package.version).unwrap();\n writeln!(res, \",\").unwrap();\n\n \/\/ Dependencies.\n writeln!(res, \" \\\"dependencies\\\": {{\").unwrap();\n let n1 = package.dependencies.len();\n for (i1, dependency) in package.dependencies.iter().enumerate() {\n write!(res, \" \").unwrap();\n json::write_string(&mut res, &dependency.name).unwrap();\n writeln!(res, \": {{\").unwrap();\n \/\/ Version.\n write!(res, \" \\\"version\\\": \").unwrap();\n json::write_string(&mut res, &dependency.version).unwrap();\n writeln!(res, \"\").unwrap();\n write!(res, \" }}\").unwrap();\n if i1 + 1 != n1 {\n writeln!(res, \",\").unwrap();\n } else {\n writeln!(res, \"\").unwrap();\n }\n }\n writeln!(res, \" }}\").unwrap();\n\n \/\/ End package.\n write!(res, \" }}\").unwrap();\n if i0 + 1 != n0 {\n writeln!(res, \",\").unwrap();\n } else {\n writeln!(res, \"\").unwrap();\n }\n }\n writeln!(res, \"}}\").unwrap();\n\n let res = try!(String::from_utf8(res)\n .map_err(|e| format!(\"UTF8 error: {}\", e)));\n\n Ok(res)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add Amazon Inspector integration tests<commit_after>#![cfg(feature = \"inspector\")]\n\nextern crate rusoto;\n\nuse rusoto::inspector::{InspectorClient, ListAssessmentRunsRequest};\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\nfn should_list_assessment_runs() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = InspectorClient::new(credentials, Region::UsEast1);\n\n let request = ListAssessmentRunsRequest::default();\n\n match client.list_assessment_runs(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true)\n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test that Hotwatch is send+sync<commit_after>fn require_send<T: Send>() {}\nfn require_sync<T: Sync>() {}\n\n#[test]\nfn hotwatch_send() {\n require_send::<hotwatch::Hotwatch>();\n}\n\n#[test]\nfn hotwatch_sync() {\n require_sync::<hotwatch::Hotwatch>();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix for enum namespacing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix button not rendering its label<commit_after><|endoftext|>"} {"text":"<commit_before>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::file::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl Command {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n let path = arg.clone();\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + &c.name) + \" exit\";\n\n commands.push(Command {\n name: \"help\".to_string(),\n main: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl Application {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if arg.len() > 0 {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n }\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if self.modes.len() > 0 {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(\"Terminal\");\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command == \"exit\" {\n break;\n } else if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>Remove clone in run command<commit_after>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::file::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl Command {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n println!(\"URL: {:?}\", file.path());\n\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + &c.name) + \" exit\";\n\n commands.push(Command {\n name: \"help\".to_string(),\n main: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl Application {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if arg.len() > 0 {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n }\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if self.modes.len() > 0 {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(\"Terminal\");\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command == \"exit\" {\n break;\n } else if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Invalid source code formating Fixed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Russian peasant multiplication in Rust<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2375<commit_after>\/\/ https:\/\/leetcode.com\/problems\/construct-smallest-number-from-di-string\/\npub fn smallest_number(pattern: String) -> String {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", smallest_number(\"IIIDIDDD\".to_string())); \/\/ \"123549876\"\n println!(\"{}\", smallest_number(\"DDD\".to_string())); \/\/ \"4321\"\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Generating a random number for guessing game (rust)<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ The Computer Language Benchmarks Game\n\/\/ http:\/\/benchmarksgame.alioth.debian.org\/\n\/\/\n\/\/ contributed by the Rust Project Developers\n\n\/\/ Copyright (c) 2012-2014 The Rust Project Developers\n\/\/\n\/\/ All rights reserved.\n\/\/\n\/\/ Redistribution and use in source and binary forms, with or without\n\/\/ modification, are permitted provided that the following conditions\n\/\/ are met:\n\/\/\n\/\/ - Redistributions of source code must retain the above copyright\n\/\/ notice, this list of conditions and the following disclaimer.\n\/\/\n\/\/ - Redistributions in binary form must reproduce the above copyright\n\/\/ notice, this list of conditions and the following disclaimer in\n\/\/ the documentation and\/or other materials provided with the\n\/\/ distribution.\n\/\/\n\/\/ - Neither the name of \"The Computer Language Benchmarks Game\" nor\n\/\/ the name of \"The Computer Language Shootout Benchmarks\" nor the\n\/\/ names of its contributors may be used to endorse or promote\n\/\/ products derived from this software without specific prior\n\/\/ written permission.\n\/\/\n\/\/ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\/\/ \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n\/\/ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS\n\/\/ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n\/\/ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\n\/\/ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n\/\/ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\/\/ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)\n\/\/ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,\n\/\/ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n\/\/ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED\n\/\/ OF THE POSSIBILITY OF SUCH DAMAGE.\n\nuse std::sync::mpsc::{channel, Sender, Receiver};\nuse std::thread::Thread;\n\nfn start(n_tasks: int, token: int) {\n let (tx, mut rx) = channel();\n tx.send(token).unwrap();\n for i in range(2, n_tasks + 1) {\n let (tx, next_rx) = channel();\n Thread::spawn(move|| roundtrip(i, tx, rx));\n rx = next_rx;\n }\n Thread::spawn(move|| roundtrip(1, tx, rx));\n}\n\nfn roundtrip(id: int, tx: Sender<int>, rx: Receiver<int>) {\n for token in rx.iter() {\n if token == 1 {\n println!(\"{}\", id);\n break;\n }\n tx.send(token - 1).unwrap();\n }\n}\n\nfn main() {\n let args = std::os::args();\n let args = args.as_slice();\n let token = if std::os::getenv(\"RUST_BENCH\").is_some() {\n 2000000\n } else {\n args.get(1).and_then(|arg| arg.parse()).unwrap_or(1000)\n };\n let n_tasks = args.get(2)\n .and_then(|arg| arg.parse())\n .unwrap_or(503);\n\n start(n_tasks, token);\n}\n<commit_msg>fix shootout-threadring.rs<commit_after>\/\/ The Computer Language Benchmarks Game\n\/\/ http:\/\/benchmarksgame.alioth.debian.org\/\n\/\/\n\/\/ contributed by the Rust Project Developers\n\n\/\/ Copyright (c) 2012-2014 The Rust Project Developers\n\/\/\n\/\/ All rights reserved.\n\/\/\n\/\/ Redistribution and use in source and binary forms, with or without\n\/\/ modification, are permitted provided that the following conditions\n\/\/ are met:\n\/\/\n\/\/ - Redistributions of source code must retain the above copyright\n\/\/ notice, this list of conditions and the following disclaimer.\n\/\/\n\/\/ - Redistributions in binary form must reproduce the above copyright\n\/\/ notice, this list of conditions and the following disclaimer in\n\/\/ the documentation and\/or other materials provided with the\n\/\/ distribution.\n\/\/\n\/\/ - Neither the name of \"The Computer Language Benchmarks Game\" nor\n\/\/ the name of \"The Computer Language Shootout Benchmarks\" nor the\n\/\/ names of its contributors may be used to endorse or promote\n\/\/ products derived from this software without specific prior\n\/\/ written permission.\n\/\/\n\/\/ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\/\/ \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n\/\/ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS\n\/\/ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n\/\/ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\n\/\/ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n\/\/ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\/\/ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)\n\/\/ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,\n\/\/ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n\/\/ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED\n\/\/ OF THE POSSIBILITY OF SUCH DAMAGE.\n\nuse std::sync::mpsc::{channel, Sender, Receiver};\nuse std::thread::Thread;\n\nfn start(n_tasks: i32, token: i32) {\n let (tx, mut rx) = channel();\n tx.send(token).unwrap();\n let mut guards = Vec::with_capacity(n_tasks as usize);\n for i in 2 .. n_tasks + 1 {\n let (tx, next_rx) = channel();\n let cur_rx = std::mem::replace(&mut rx, next_rx);\n guards.push(Thread::scoped(move|| roundtrip(i, tx, cur_rx)));\n }\n let guard = Thread::scoped(move|| roundtrip(1, tx, rx));\n}\n\nfn roundtrip(id: i32, tx: Sender<i32>, rx: Receiver<i32>) {\n for token in rx.iter() {\n if token == 1 {\n println!(\"{}\", id);\n break;\n }\n tx.send(token - 1).unwrap();\n }\n}\n\nfn main() {\n let args = std::os::args();\n let token = if std::os::getenv(\"RUST_BENCH\").is_some() {\n 2000000\n } else {\n args.get(1).and_then(|arg| arg.parse()).unwrap_or(1000)\n };\n let n_tasks = args.get(2)\n .and_then(|arg| arg.parse())\n .unwrap_or(503);\n\n start(n_tasks, token);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ -*- rust -*-\n\nuse std;\nimport std._str;\n\ntype t = tag(make_t(str), clam());\n\nfn main() {\n let str s = \"hi\"; \/\/ ref up\n let t x = make_t(s); \/\/ ref up\n\n alt (x) {\n case (make_t(y)) { log y; } \/\/ ref up and ref down\n case (_) { log \"?\"; fail; }\n }\n\n log _str.refcount(s);\n check (_str.refcount(s) == 2u);\n}\n<commit_msg>Modify alt-pattern-drop.rs to also insure the slot bound in the pattern doesn't also get dropped (again) at the end of the block containing the alt.<commit_after>\/\/ -*- rust -*-\n\nuse std;\nimport std._str;\n\ntype t = tag(make_t(str), clam());\n\nfn foo(str s) {\n let t x = make_t(s); \/\/ ref up\n\n alt (x) {\n case (make_t(y)) { log y; } \/\/ ref up then down\n case (_) { log \"?\"; fail; }\n }\n\n log _str.refcount(s);\n check (_str.refcount(s) == 3u);\n}\n\nfn main() {\n let str s = \"hi\"; \/\/ ref up\n foo(s); \/\/ ref up then down\n log _str.refcount(s);\n check (_str.refcount(s) == 1u);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue-27697<commit_after>\/\/ run-pass\n\nuse std::ops::Deref;\n\ntrait MyTrait {\n fn do_something(&self);\n fn as_str(&self) -> &str;\n}\n\nimpl Deref for dyn MyTrait {\n type Target = str;\n fn deref(&self) -> &Self::Target {\n self.as_str()\n }\n}\n\nfn trait_object_does_something(t: &dyn MyTrait) {\n t.do_something()\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This test is for Windows only.\n\/\/ ignore-android\n\/\/ ignore-bitrig\n\/\/ ignore-macos\n\/\/ ignore-dragonfly\n\/\/ ignore-freebsd\n\/\/ ignore-haiku\n\/\/ ignore-ios\n\/\/ ignore-linux\n\/\/ ignore-netbsd\n\/\/ ignore-openbsd\n\/\/ ignore-solaris\n\/\/ ignore-emscripten\n\n\/\/ aux-build:dummy.rs\n\/\/ aux-build:wrapper.rs\n\nextern crate wrapper;\n\n\/\/ Check that external symbols coming from foreign dylibs are adorned with 'dllimport',\n\/\/ whereas symbols coming from foreign staticlibs are not. (RFC-1717)\n\n\/\/ CHECK: @dylib_global1 = external dllimport local_unnamed_addr global i32\n\/\/ CHECK: @dylib_global2 = external dllimport local_unnamed_addr global i32\n\/\/ CHECK: @static_global1 = external local_unnamed_addr global i32\n\/\/ CHECK: @static_global2 = external local_unnamed_addr global i32\n\n\/\/ CHECK: declare dllimport i32 @dylib_func1(i32)\n\/\/ CHECK: declare dllimport i32 @dylib_func2(i32)\n\/\/ CHECK: declare i32 @static_func1(i32)\n\/\/ CHECK: declare i32 @static_func2(i32)\n\n#[link(name = \"dummy\", kind=\"dylib\")]\nextern \"C\" {\n pub fn dylib_func1(x: i32) -> i32;\n pub static dylib_global1: i32;\n}\n\n#[link(name = \"dummy\", kind=\"static\")]\nextern \"C\" {\n pub fn static_func1(x: i32) -> i32;\n pub static static_global1: i32;\n}\n\nfn main() {\n unsafe {\n dylib_func1(dylib_global1);\n wrapper::dylib_func2(wrapper::dylib_global2);\n\n static_func1(static_global1);\n wrapper::static_func2(wrapper::static_global2);\n }\n}\n<commit_msg>Ignore test on -windows-gnu.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This test is for *-windows-msvc only.\n\/\/ ignore-gnu\n\/\/ ignore-android\n\/\/ ignore-bitrig\n\/\/ ignore-macos\n\/\/ ignore-dragonfly\n\/\/ ignore-freebsd\n\/\/ ignore-haiku\n\/\/ ignore-ios\n\/\/ ignore-linux\n\/\/ ignore-netbsd\n\/\/ ignore-openbsd\n\/\/ ignore-solaris\n\/\/ ignore-emscripten\n\n\/\/ aux-build:dummy.rs\n\/\/ aux-build:wrapper.rs\n\nextern crate wrapper;\n\n\/\/ Check that external symbols coming from foreign dylibs are adorned with 'dllimport',\n\/\/ whereas symbols coming from foreign staticlibs are not. (RFC-1717)\n\n\/\/ CHECK: @dylib_global1 = external dllimport local_unnamed_addr global i32\n\/\/ CHECK: @dylib_global2 = external dllimport local_unnamed_addr global i32\n\/\/ CHECK: @static_global1 = external local_unnamed_addr global i32\n\/\/ CHECK: @static_global2 = external local_unnamed_addr global i32\n\n\/\/ CHECK: declare dllimport i32 @dylib_func1(i32)\n\/\/ CHECK: declare dllimport i32 @dylib_func2(i32)\n\/\/ CHECK: declare i32 @static_func1(i32)\n\/\/ CHECK: declare i32 @static_func2(i32)\n\n#[link(name = \"dummy\", kind=\"dylib\")]\nextern \"C\" {\n pub fn dylib_func1(x: i32) -> i32;\n pub static dylib_global1: i32;\n}\n\n#[link(name = \"dummy\", kind=\"static\")]\nextern \"C\" {\n pub fn static_func1(x: i32) -> i32;\n pub static static_global1: i32;\n}\n\nfn main() {\n unsafe {\n dylib_func1(dylib_global1);\n wrapper::dylib_func2(wrapper::dylib_global2);\n\n static_func1(static_global1);\n wrapper::static_func2(wrapper::static_global2);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for issue #16939<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(overloaded_calls)]\n\n\/\/ Make sure we don't ICE when making an overloaded call with the\n\/\/ wrong arity.\n\nfn _foo<F: Fn()> (f: F) {\n |t| f(t); \/\/~ ERROR E0058\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #20105<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn f<F, R>(_: F) where F: Fn() -> R {\n}\n\nfn main() {\n f(|| -> ! { () });\n\/\/~^ ERROR: computation may converge in a function marked as diverging [E0270]\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>A simple helloworld-like example available by default<commit_after>\/\/ Эта строка является комментарием и она будет проигнорирована компилятором\\n\n\/\/ Протестировать код можно нажав на кнопку \"Run\" вот тут ->\\n\n\/\/ так же можно использовать клавиатуру, нажав сочетание клавиш \"Ctrl + Enter\"\\n\n\\n\n\/\/ Этот код можно редактировать не стесняясь, дерзайте!\\n\n\/\/ Всегда можно вернуть оригинальный код, нажав на кнопку \"Reset\" вот тут ->\\n\n\\n\n\/\/ Это главная функция. С неё начинается исполнение любой программы\\n\nfn main() {\\n\n \/\/ Следующий код будет исполнен в момент, когда будет запущен исполняемый файл\\n\n \/\/ Отображаем текст в консоли\\n\n println!(\"Привет, мир!\");\\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue-54067<commit_after>\/\/ check-pass\n\/\/ ignore-emscripten no llvm_asm! support\n\n#![feature(llvm_asm)]\n\npub fn boot(addr: Option<u32>) {\n unsafe {\n llvm_asm!(\"mov sp, $0\"::\"r\" (addr));\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>finish new<commit_after>pub fn is_anagram(s: String, t: String) -> bool {\n if s.len() != t.len() {\n return false;\n };\n\n let length = s.len();\n let mut s_bytes = s.into_bytes();\n let mut t_bytes = t.into_bytes();\n\n s_bytes.sort();\n t_bytes.sort();\n println!(\"{:?},{:?}\", s_bytes, t_bytes);\n\n for i in 0..length {\n if s_bytes[i] != t_bytes[i] {\n return false;\n }\n }\n\n true\n}\n\nfn main() {\n println!(\n \"{:?}\",\n is_anagram(String::from(\"anagram\"), String::from(\"nagaram\"))\n );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add game backend engine<commit_after>pub type Row = [Player; 3];\n\npub type Field = [Row; 3];\n\npub type Coords = (usize, usize);\n\n\/\/\/ Must return `Coords` or `None` if a player chooses to surrender.\npub type PlayerController = fn(game: &Game) -> Option<Coords>;\n\n#[derive(Copy, Clone, PartialEq)]\npub enum Player {\n Nobody,\n X,\n O,\n}\n\npub struct Game {\n current_player: Player,\n field: Field,\n player1: PlayerController,\n player2: PlayerController,\n}\n\nimpl Game {\n pub fn new(player1: PlayerController, player2: PlayerController) -> Game {\n Game {\n field: [[Player::Nobody; 3]; 3],\n current_player: Player::X,\n player1: player1,\n player2: player2,\n }\n }\n\n pub fn field(&self) -> &Field {\n &self.field\n }\n\n pub fn current_player(&self) -> Player {\n self.current_player\n }\n\n pub fn play(&mut self) -> Player {\n loop {\n match self.current_player_coords() {\n Some((row, col)) => {\n if !self.cell_is_empty((row, col)) {\n panic!(\"player tries to set occupied cell - cheating!\")\n }\n self.field[row][col] = self.current_player;\n },\n _ => { self.switch_player(); return self.current_player },\n };\n\n match self.winner() {\n Some(winner) => return winner,\n _ => self.switch_player(),\n }\n }\n }\n\n pub fn cell_is_empty(&self, coords: Coords) -> bool {\n let (row, col) = coords;\n self.field[row][col] == Player::Nobody\n }\n\n fn current_player_coords(&self) -> Option<Coords> {\n match self.current_player {\n Player::X => (self.player1)(&self),\n Player::O => (self.player2)(&self),\n _ => panic!(\"current player can't be Nobody\"),\n }\n }\n\n fn switch_player(&mut self) {\n if self.current_player == Player::X {\n self.current_player = Player::O\n } else if self.current_player == Player::O {\n self.current_player = Player::X\n }\n }\n\n \/\/\/ Returns `Player` if it is already defined, otherwise `None`.\n fn winner(&self) -> Option<Player> {\n self.row_winner(self.row(0))\n .or( self.row_winner(self.row(1)) )\n .or( self.row_winner(self.row(2)) )\n .or( self.row_winner(self.col(0)) )\n .or( self.row_winner(self.col(1)) )\n .or( self.row_winner(self.col(2)) )\n .or( self.row_winner(self.diagonal()) )\n .or( self.row_winner(self.reverse_diagonal()) )\n .or( if self.has_empty_cell() { None } else { Some(Player::Nobody) } )\n }\n\n fn has_empty_cell(&self) -> bool {\n for row in &self.field {\n for col in row {\n if *col == Player::Nobody {\n return true;\n }\n }\n }\n false\n }\n\n \/\/\/ Returns `Player` if it is already defined, otherwise `None`.\n fn row_winner(&self, row: Vec<Player>) -> Option<Player> {\n if row[0] != Player::Nobody && row[0] == row[1] && row[1] == row[2] {\n Some(row[0])\n } else {\n None\n }\n }\n\n fn row(&self, number: usize) -> Vec<Player> {\n self.field[number].to_vec()\n }\n\n fn col(&self, number: usize) -> Vec<Player> {\n self.field.iter().map(|row| row[number]).collect()\n }\n\n fn diagonal(&self) -> Vec<Player> {\n self.field.iter().enumerate().map(|(i, row)| row[0 + i]).collect()\n }\n\n fn reverse_diagonal(&self) -> Vec<Player> {\n self.field.iter().enumerate().map(|(i, row)| row[2 - i]).collect()\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>re-added the meshbatch example<commit_after>\/\/! An example of how to use an `InstanceArray` to draw custom `Mesh`es with instanced draws.\n\nuse ggez::event;\nuse ggez::graphics::{self, Color, DrawParam};\nuse ggez::{Context, GameResult};\nuse glam::*;\nuse oorandom::Rand32;\nuse std::env;\nuse std::f32::consts::PI;\nuse std::path;\n\nconst TWO_PI: f32 = 2.0 * PI;\n\nstruct MainState {\n mesh_batch: graphics::InstanceArray,\n mesh: graphics::Mesh,\n}\n\nimpl MainState {\n fn new(ctx: &mut Context) -> GameResult<MainState> {\n let mut rng = Rand32::new(12345);\n let mesh = graphics::Mesh::from_data(\n ctx,\n graphics::MeshBuilder::new()\n .circle(\n graphics::DrawMode::stroke(4.0),\n Vec2::new(0.0, 0.0),\n 8.0,\n 1.0,\n (0, 0, 255).into(),\n )?\n .line(\n &[Vec2::new(0.0, 0.0), Vec2::new(8.0, 0.0)],\n 2.0,\n (255, 255, 0).into(),\n )?\n .build(),\n );\n\n \/\/ Generate enough instances to fill the entire screen\n let size = ctx.gfx.drawable_size();\n let items_x = (size.0 \/ 16.0) as u32;\n let items_y = (size.1 \/ 16.0) as u32;\n let mut mesh_batch = graphics::InstanceArray::new(ctx, None, items_x * items_y);\n\n mesh_batch.set((0..items_x).flat_map(|x| {\n (0..items_y).map(move |y| {\n let x = x as f32;\n let y = y as f32;\n\n DrawParam::new()\n .dest(Vec2::new(x * 16.0, y * 16.0))\n .rotation(rng.rand_float() * TWO_PI)\n })\n }));\n\n let s = MainState { mesh_batch, mesh };\n Ok(s)\n }\n}\n\nimpl event::EventHandler<ggez::GameError> for MainState {\n #[allow(clippy::needless_range_loop)]\n fn update(&mut self, ctx: &mut Context) -> GameResult {\n if ctx.time.ticks() % 100 == 0 {\n println!(\"Delta frame time: {:?} \", ctx.time.delta());\n println!(\"Average FPS: {}\", ctx.time.fps());\n }\n\n \/\/ Update first 50 instances in the mesh batch\n let delta_time = ctx.time.delta().as_secs_f32() * 1000.0;\n let instances = self.mesh_batch.instances();\n\n let mut updated_params = Vec::new();\n for i in 0..50 {\n let mut p = instances[i as usize];\n if let graphics::Transform::Values {\n ref mut rotation, ..\n } = p.transform\n {\n if (i % 2) == 0 {\n *rotation += 0.001 * TWO_PI * delta_time;\n if *rotation > TWO_PI {\n *rotation -= TWO_PI;\n }\n } else {\n *rotation -= 0.001 * TWO_PI * delta_time;\n if *rotation < 0.0 {\n *rotation += TWO_PI;\n }\n }\n }\n updated_params.push(p);\n }\n for i in 0..50 {\n \/\/ TODO: this is pretty inefficient and also a bit ridiculous\n \/\/ a way to update parts of an InstanceArray would be good to have\n self.mesh_batch.update(i, updated_params[i as usize]);\n }\n\n Ok(())\n }\n\n fn draw(&mut self, ctx: &mut Context) -> GameResult {\n let mut canvas = graphics::Canvas::from_frame(ctx, Color::BLACK);\n\n \/\/ Draw the batch\n canvas.draw_instanced_mesh(\n self.mesh.clone(),\n &self.mesh_batch,\n DrawParam::new().dest(glam::Vec2::new(5.0, 8.0)),\n );\n\n canvas.finish(ctx)?;\n Ok(())\n }\n}\n\npub fn main() -> GameResult {\n let resource_dir = if let Ok(manifest_dir) = env::var(\"CARGO_MANIFEST_DIR\") {\n let mut path = path::PathBuf::from(manifest_dir);\n path.push(\"resources\");\n path\n } else {\n path::PathBuf::from(\".\/resources\")\n };\n\n let cb = ggez::ContextBuilder::new(\"meshbatch\", \"ggez\").add_resource_path(resource_dir);\n let (mut ctx, event_loop) = cb.build()?;\n\n let state = MainState::new(&mut ctx)?;\n event::run(ctx, event_loop, state)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tests: Add test case. Closes #3250<commit_after>#[auto_serialize]\n\ntype t = (uint, uint);\n\n \n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Construct new struct CLI for structopt<commit_after>#[derive(StructOpt, PartialEq, Debug)]\npub struct App {\n #[structopt(short=\"c\", long = \"config\", help = \"Sets a custom config file\")]\n pub config: String,\n #[structopt(subcommand)]\n pub cmd: SubCmd,\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\npub enum SubCmd {\n #[structopt(name = \"shutdown\", about = \"shuts down the proxy without waiting for connections to finish\")]\n Shutdown {\n #[structopt(short = \"h\", long = \"hard\")]\n hard: Option<bool>\n },\n #[structopt(name = \"upgrade\", about = \"upgrades the proxy\")]\n Upgrades,\n #[structopt(name = \"status\", about = \"gets information on the running workers\")]\n Status,\n #[structopt(name = \"metrics\", about = \"gets statistics on the master and its workers\")]\n Metrics,\n #[structopt(name = \"logging\", about = \"change logging level\")]\n Logging {\n #[structopt(short = \"l\", long = \"level\", help = \"change logging level\")]\n level: String\n },\n #[structopt(name = \"state\", about = \"state management\")]\n State {\n #[structopt(subcommand)]\n cmd: StateCmd,\n },\n #[structopt(name = \"application\", about = \"application management\")]\n Application {\n #[structopt(subcommand)]\n cmd: ApplicationCmd,\n },\n #[structopt(name = \"backend\", about = \"backend management\")]\n Backend {\n #[structopt(subcommand)]\n cmd: BackendCmd,\n },\n #[structopt(name = \"frontend\", about = \"frontend management\")]\n Frontend {\n #[structopt(subcommand)]\n cmd: FrontendCmd,\n },\n #[structopt(name = \"certificate\", about = \"certificate management\")]\n Certificate {\n #[structopt(subcommand)]\n cmd: CertificateCmd,\n },\n #[structopt(name = \"query\", about = \"configuration state verification\")]\n Query {\n #[structopt(subcommand)]\n cmd: QueryCmd,\n }\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\npub enum StateCmd {\n #[structopt(name = \"save\", about = \"Save state to that file\")]\n Save {\n #[structopt(short = \"h\", long = \"hard\")]\n file: String,\n },\n #[structopt(name = \"load\", about = \"Load state from that file\")]\n Load {\n #[structopt(short = \"h\", long = \"hard\")]\n file: String,\n },\n #[structopt(name = \"dump\")]\n Dump,\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\npub enum ApplicationCmd {\n #[structopt(name = \"remove\")]\n Remove {\n #[structopt(short = \"i\", long = \"id\")]\n id: String,\n },\n #[structopt(name = \"add\")]\n Add{\n #[structopt(short = \"h\", long = \"hard\")]\n id: String,\n #[structopt(short = \"s\", long = \"sticky session\")]\n sticky_session: Option<bool>,\n },\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\npub enum BackendCmd {\n #[structopt(name = \"remove\")]\n Remove {\n #[structopt(short = \"i\", long = \"id\")]\n id: String,\n #[structopt(long = \"instance-id\")]\n instance_id: String,\n #[structopt(long = \"ip\")]\n ip: String,\n #[structopt(short = \"p\", long = \"prot\")]\n port: u16,\n },\n #[structopt(name = \"add\")]\n Add {\n #[structopt(short = \"i\", long = \"id\")]\n id: String,\n #[structopt(long = \"instance-id\")]\n instance_id: String,\n #[structopt(long = \"ip\")]\n ip: String,\n #[structopt(short = \"p\", long = \"prot\")]\n port: u16,\n },\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\npub enum FrontendCmd {\n #[structopt(name = \"add\")]\n Add {\n #[structopt(short = \"i\", long = \"id\", help = \"app id of the frontend\")]\n id: String,\n #[structopt(short = \"host\", long = \"hostname\")]\n hostname: String,\n #[structopt(short = \"p\", long = \"path\", help=\"URL prefix of the frontend\")]\n path_begin: Option<String>,\n #[structopt(long = \"certificate\", help=\"path to a certificate file\")]\n path_to_certificate: Option<String>,\n },\n #[structopt(name = \"remove\")]\n Remove {\n #[structopt(short = \"i\", long = \"id\", help = \"app id of the frontend\")]\n id: String,\n #[structopt(short = \"host\", long = \"hostname\")]\n hostname: String,\n #[structopt(short = \"p\", long = \"path\", help=\"URL prefix of the frontend\")]\n path_begin: Option<String>,\n #[structopt(long = \"certificate\", help=\"path to a certificate file\")]\n path_to_certificate: Option<String>,\n },\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\npub enum CertificateCmd {\n #[structopt(name = \"add\")]\n Add {\n #[structopt(long = \"certificate\", help = \"path to the certificate\")]\n certificate: String,\n #[structopt(long = \"certificate-chain\", help = \"path to the certificate chain\")]\n chain: String,\n #[structopt(long = \"key\", help = \"path to the key\")]\n key: Option<String>,\n },\n #[structopt(name = \"remove\")]\n Remove {\n #[structopt(short = \"cert\", long = \"certificate\", help = \"path to the certificate\")]\n certificate: String,\n },\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\npub enum QueryCmd {\n #[structopt(name = \"applications\")]\n Applications {\n #[structopt(short = \"i\", long=\"id\", help=\"application identifier\")]\n id: Option<String>\n }\n}<|endoftext|>"} {"text":"<commit_before>#![feature(asm)]\n#![feature(box_syntax)]\n#![feature(core_simd)]\n#![feature(core_slice_ext)]\n#![feature(core_str_ext)]\n#![feature(fundamental)]\n#![feature(lang_items)]\n#![feature(no_std)]\n#![feature(unboxed_closures)]\n#![feature(unsafe_no_drop_flag)]\n#![no_std]\n\nextern crate redox_alloc as alloc;\n\nuse core::clone::Clone;\nuse core::mem::size_of;\nuse core::result::Result;\n\nuse common::memory::*;\nuse common::string::*;\nuse common::vector::*;\nuse common::url::*;\n\nuse drivers::keyboard::*;\nuse drivers::mouse::*;\n\nuse graphics::color::*;\nuse graphics::point::*;\nuse graphics::size::*;\nuse graphics::window::*;\n\nuse programs::session::*;\n\n\/* TEST { *\/\nuse core::any::Any;\nuse core::ops::Fn;\nuse core::option::Option;\n\nuse alloc::boxed::*;\n\nuse common::debug::*;\n\/* } TEST *\/\n\n#[path=\"..\/src\/common\"]\nmod common {\n pub mod debug;\n pub mod memory;\n pub mod pci;\n pub mod pio;\n pub mod string;\n pub mod vector;\n pub mod url;\n}\n\n#[path=\"..\/src\/drivers\"]\nmod drivers {\n pub mod disk;\n pub mod keyboard;\n pub mod mouse;\n}\n\n#[path=\"..\/src\/filesystems\"]\nmod filesystems {\n pub mod unfs;\n}\n\n#[path=\"..\/src\/graphics\"]\nmod graphics {\n pub mod bmp;\n pub mod color;\n pub mod display;\n pub mod point;\n pub mod size;\n pub mod window;\n}\n\n#[path=\"..\/src\/programs\"]\nmod programs {\n pub mod session;\n}\n\n\/* TEST { *\/\nstruct EventA {\n x: isize,\n y: isize\n}\n\nstruct EventB {\n txt: String\n}\n\nstruct EventListener {\n fn_ptr: Box<Fn(&Box<Any>)>\n}\n\nimpl EventListener {\n pub fn call(&self, event: &Box<Any>){\n (*self.fn_ptr)(event);\n }\n}\n\nfn test(){\n let mut events: Vector<Box<Any>> = Vector::new();\n\n events.push(box EventB {\n txt: \"first test\".to_string()\n });\n events.push(box EventA {\n x: 2,\n y: 3\n });\n events.push(box EventB {\n txt: \"second test\".to_string()\n });\n\n let mut listeners: Vector<Box<EventListener>> = Vector::new();\n\n listeners.push(box EventListener {\n fn_ptr: box |event: &Box<Any>| {\n match event.downcast_ref::<EventA>() {\n Option::Some(a) => {\n d(\"Event A \");\n dd(a.x as usize);\n d(\", \");\n dd(a.y as usize);\n dl();\n },\n Option::None => ()\n }\n }\n });\n\n listeners.push(box EventListener {\n fn_ptr: box |event: &Box<Any>| {\n match event.downcast_ref::<EventB>() {\n Option::Some(b) => {\n d(\"Event B \");\n b.txt.d();\n dl();\n },\n Option::None => ()\n }\n }\n });\n\n for event in events.iter() {\n for listener in listeners.iter() {\n listener.call(event);\n }\n }\n}\n\/* } TEST *\/\n\npub struct Application {\n window: Window,\n output: String,\n command: String,\n offset: usize,\n scroll: Point,\n wrap: bool\n}\n\nimpl Application {\n fn append(&mut self, line: String) {\n self.output = self.output.clone() + line + \"\\n\";\n }\n\n #[allow(unused_variables)]\n fn on_command(&mut self, session: &Session){\n let mut args: Vector<String> = Vector::<String>::new();\n for arg in self.command.split(\" \".to_string()) {\n if arg.len() > 0 {\n args.push(arg);\n }\n }\n match args.get(0) {\n Result::Ok(cmd) => {\n if *cmd == \"echo\".to_string() {\n let mut echo = String::new();\n for i in 1..args.len() {\n match args.get(i) {\n Result::Ok(arg) => {\n if echo.len() == 0 {\n echo = arg.clone();\n }else{\n echo = echo + \" \" + arg.clone();\n }\n },\n Result::Err(_) => ()\n }\n }\n self.append(echo);\n }else if *cmd == \"exit\".to_string() {\n self.window.closed = true;\n }else if *cmd == \"test\".to_string() {\n test();\n }else if *cmd == \"url\".to_string() {\n match args.get(1) {\n Result::Ok(url_string) => {\n let url = URL::from_string(url_string.clone());\n self.append(url.to_string());\n\n unsafe{\n let self_ptr: *mut Application = self; \/\/ BIG NO NO\n session.on_url(&url, box move |response|{\n (*self_ptr).append(response);\n })\n }\n },\n Result::Err(_) => {\n for module in session.modules.iter() {\n let scheme = module.scheme();\n if scheme.len() > 0 {\n self.append(scheme);\n }\n }\n }\n }\n }else{\n self.append(\"Commands: echo exit url\".to_string());\n }\n },\n Result::Err(_) => ()\n }\n }\n}\n\nimpl SessionItem for Application {\n #[allow(unused_variables)]\n fn new() -> Application {\n Application {\n window: Window{\n point: Point::new(220, 100),\n size: Size::new(576, 400),\n title: String::from_str(\"Terminal\"),\n title_color: Color::new(0, 0, 0),\n border_color: Color::new(192, 192, 255),\n content_color: Color::alpha(128, 128, 160, 192),\n shaded: false,\n closed: false,\n dragging: false,\n last_mouse_point: Point::new(0, 0),\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n right_button: false,\n middle_button: false,\n valid: false\n }\n },\n output: String::new(),\n command: String::new(),\n offset: 0,\n scroll: Point::new(0, 0),\n wrap: true\n }\n }\n\n fn draw(&mut self, session: &Session, updates: &mut SessionUpdates) -> bool{\n let display = &session.display;\n if self.window.draw(display) {\n let scroll = self.scroll;\n\n let mut col = -scroll.x;\n let cols = self.window.size.width as isize \/ 8;\n let mut row = -scroll.y;\n let rows = self.window.size.height as isize \/ 16;\n\n for c in self.output.chars(){\n if self.wrap && col >= cols {\n col = -scroll.x;\n row += 1;\n }\n\n if c == '\\n' {\n col = -scroll.x;\n row += 1;\n }else if c == '\\t' {\n col += 8 - col % 8;\n }else{\n if col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, c, Color::new(224, 224, 224));\n }\n col += 1;\n }\n }\n\n if col > -scroll.x {\n col = -scroll.x;\n row += 1;\n }\n\n if col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, '#', Color::new(255, 255, 255));\n col += 2;\n }\n\n let mut i = 0;\n for c in self.command.chars(){\n if self.wrap && col >= cols {\n col = -scroll.x;\n row += 1;\n }\n\n if self.offset == i && col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, '_', Color::new(255, 255, 255));\n }\n\n if c == '\\n' {\n col = -scroll.x;\n row += 1;\n }else if c == '\\t' {\n col += 8 - col % 8;\n }else{\n if col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, c, Color::new(255, 255, 255));\n }\n col += 1;\n }\n\n i += 1;\n }\n\n if self.wrap && col >= cols {\n col = -scroll.x;\n row += 1;\n }\n\n if row >= rows {\n self.scroll.y += row - rows + 1;\n updates.redraw = REDRAW_ALL;\n }\n\n if self.offset == i && col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, '_', Color::new(255, 255, 255));\n }\n\n return true;\n }else{\n return false;\n }\n }\n\n #[allow(unused_variables)]\n fn on_key(&mut self, session: &Session, updates: &mut SessionUpdates, key_event: KeyEvent){\n if key_event.pressed {\n match key_event.scancode {\n 0x01 => self.window.closed = true,\n 0x47 => self.offset = 0,\n 0x4B => if self.offset > 0 {\n self.offset -= 1;\n },\n 0x4D => if self.offset < self.command.len() {\n self.offset += 1;\n },\n 0x4F => self.offset = self.command.len(),\n _ => ()\n }\n\n match key_event.character {\n '\\x00' => (),\n '\\x08' => {\n if self.offset > 0 {\n self.command = self.command.substr(0, self.offset - 1) + self.command.substr(self.offset, self.command.len() - self.offset);\n self.offset -= 1;\n }\n }\n '\\x1B' => self.command = String::new(),\n '\\n' => {\n if self.command.len() > 0 {\n self.output = self.output.clone() + \"# \".to_string() + self.command.clone() + \"\\n\";\n self.on_command(session);\n self.command = String::new();\n self.offset = 0;\n }\n },\n _ => {\n self.command = self.command.substr(0, self.offset) + key_event.character + self.command.substr(self.offset, self.command.len() - self.offset);\n self.offset += 1;\n }\n }\n }\n }\n\n #[allow(unused_variables)]\n fn on_mouse(&mut self, session: &Session, updates: &mut SessionUpdates, mouse_event: MouseEvent, allow_catch: bool) -> bool{\n return self.window.on_mouse(session.mouse_point, mouse_event, allow_catch);\n }\n}\n\n\/\/Class wrappers\n\nstatic mut application: *mut Application = 0 as *mut Application;\n\n#[no_mangle]\npub unsafe fn entry(){\n application = alloc(size_of::<Application>()) as *mut Application;\n *application = Application::new();\n}\n\n#[no_mangle]\npub unsafe fn draw(session: &Session, updates: &mut SessionUpdates) -> bool{\n if application as usize > 0 {\n return (*application).draw(session, updates);\n }else{\n return false;\n }\n}\n\n#[no_mangle]\npub unsafe fn on_key(session: &Session, updates: &mut SessionUpdates, key_event: KeyEvent){\n if application as usize > 0{\n (*application).on_key(session, updates, key_event);\n }\n}\n\n#[no_mangle]\npub unsafe fn on_mouse(session: &Session, updates: &mut SessionUpdates, mouse_event: MouseEvent, allow_catch: bool) -> bool{\n if application as usize > 0 {\n return (*application).on_mouse(session, updates, mouse_event, allow_catch);\n }else{\n return false;\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn memmove(dst: *mut u8, src: *const u8, len: isize){\n unsafe {\n if src < dst {\n let mut i = len;\n while i > 0 {\n i -= 1;\n *dst.offset(i) = *src.offset(i);\n }\n }else{\n let mut i = 0;\n while i < len {\n *dst.offset(i) = *src.offset(i);\n i += 1;\n }\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn memcpy(dst: *mut u8, src: *const u8, len: isize){\n unsafe {\n let mut i = 0;\n while i < len {\n *dst.offset(i) = *src.offset(i);\n i += 1;\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn memset(src: *mut u8, c: i32, len: isize) {\n unsafe {\n let mut i = 0;\n while i < len {\n *src.offset(i) = c as u8;\n i += 1;\n }\n }\n}\n<commit_msg>Do not do page-breaking thing in example.rs<commit_after>#![feature(asm)]\n#![feature(box_syntax)]\n#![feature(core_simd)]\n#![feature(core_slice_ext)]\n#![feature(core_str_ext)]\n#![feature(fundamental)]\n#![feature(lang_items)]\n#![feature(no_std)]\n#![feature(unboxed_closures)]\n#![feature(unsafe_no_drop_flag)]\n#![no_std]\n\nextern crate redox_alloc as alloc;\n\nuse core::clone::Clone;\nuse core::mem::size_of;\nuse core::result::Result;\n\nuse common::memory::*;\nuse common::string::*;\nuse common::vector::*;\nuse common::url::*;\n\nuse drivers::keyboard::*;\nuse drivers::mouse::*;\n\nuse graphics::color::*;\nuse graphics::point::*;\nuse graphics::size::*;\nuse graphics::window::*;\n\nuse programs::session::*;\n\n\/* TEST { *\/\nuse core::any::Any;\nuse core::ops::Fn;\nuse core::option::Option;\n\nuse alloc::boxed::*;\n\nuse common::debug::*;\n\/* } TEST *\/\n\n#[path=\"..\/src\/common\"]\nmod common {\n pub mod debug;\n pub mod memory;\n pub mod pci;\n pub mod pio;\n pub mod string;\n pub mod vector;\n pub mod url;\n}\n\n#[path=\"..\/src\/drivers\"]\nmod drivers {\n pub mod disk;\n pub mod keyboard;\n pub mod mouse;\n}\n\n#[path=\"..\/src\/filesystems\"]\nmod filesystems {\n pub mod unfs;\n}\n\n#[path=\"..\/src\/graphics\"]\nmod graphics {\n pub mod bmp;\n pub mod color;\n pub mod display;\n pub mod point;\n pub mod size;\n pub mod window;\n}\n\n#[path=\"..\/src\/programs\"]\nmod programs {\n pub mod session;\n}\n\n\/* TEST { *\/\nstruct EventA {\n x: isize,\n y: isize\n}\n\nstruct EventB {\n txt: String\n}\n\nstruct EventListener {\n fn_ptr: Box<Fn(&Box<Any>)>\n}\n\nimpl EventListener {\n pub fn call(&self, event: &Box<Any>){\n (*self.fn_ptr)(event);\n }\n}\n\nfn test(){\n let mut events: Vector<Box<Any>> = Vector::new();\n\n events.push(box EventB {\n txt: \"first test\".to_string()\n });\n events.push(box EventA {\n x: 2,\n y: 3\n });\n events.push(box EventB {\n txt: \"second test\".to_string()\n });\n\n let mut listeners: Vector<Box<EventListener>> = Vector::new();\n\n listeners.push(box EventListener {\n fn_ptr: box |event: &Box<Any>| {\n match event.downcast_ref::<EventA>() {\n Option::Some(a) => {\n d(\"Event A \");\n dd(a.x as usize);\n d(\", \");\n dd(a.y as usize);\n dl();\n },\n Option::None => ()\n }\n }\n });\n\n listeners.push(box EventListener {\n fn_ptr: box |event: &Box<Any>| {\n match event.downcast_ref::<EventB>() {\n Option::Some(b) => {\n d(\"Event B \");\n b.txt.d();\n dl();\n },\n Option::None => ()\n }\n }\n });\n\n for event in events.iter() {\n for listener in listeners.iter() {\n listener.call(event);\n }\n }\n}\n\/* } TEST *\/\n\npub struct Application {\n window: Window,\n output: String,\n command: String,\n offset: usize,\n scroll: Point,\n wrap: bool\n}\n\nimpl Application {\n fn append(&mut self, line: String) {\n self.output = self.output.clone() + line + \"\\n\";\n }\n\n #[allow(unused_variables)]\n fn on_command(&mut self, session: &Session){\n let mut args: Vector<String> = Vector::<String>::new();\n for arg in self.command.split(\" \".to_string()) {\n if arg.len() > 0 {\n args.push(arg);\n }\n }\n match args.get(0) {\n Result::Ok(cmd) => {\n if *cmd == \"echo\".to_string() {\n let mut echo = String::new();\n for i in 1..args.len() {\n match args.get(i) {\n Result::Ok(arg) => {\n if echo.len() == 0 {\n echo = arg.clone();\n }else{\n echo = echo + \" \" + arg.clone();\n }\n },\n Result::Err(_) => ()\n }\n }\n self.append(echo);\n }else if *cmd == \"exit\".to_string() {\n self.window.closed = true;\n }else if *cmd == \"test\".to_string() {\n test();\n }else if *cmd == \"url\".to_string() {\n match args.get(1) {\n Result::Ok(url_string) => {\n let url = URL::from_string(url_string.clone());\n self.append(url.to_string());\n },\n Result::Err(_) => {\n for module in session.modules.iter() {\n let scheme = module.scheme();\n if scheme.len() > 0 {\n self.append(scheme);\n }\n }\n }\n }\n }else{\n self.append(\"Commands: echo exit url\".to_string());\n }\n },\n Result::Err(_) => ()\n }\n }\n}\n\nimpl SessionItem for Application {\n #[allow(unused_variables)]\n fn new() -> Application {\n Application {\n window: Window{\n point: Point::new(220, 100),\n size: Size::new(576, 400),\n title: String::from_str(\"Terminal\"),\n title_color: Color::new(0, 0, 0),\n border_color: Color::new(192, 192, 255),\n content_color: Color::alpha(128, 128, 160, 192),\n shaded: false,\n closed: false,\n dragging: false,\n last_mouse_point: Point::new(0, 0),\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n right_button: false,\n middle_button: false,\n valid: false\n }\n },\n output: String::new(),\n command: String::new(),\n offset: 0,\n scroll: Point::new(0, 0),\n wrap: true\n }\n }\n\n fn draw(&mut self, session: &Session, updates: &mut SessionUpdates) -> bool{\n let display = &session.display;\n if self.window.draw(display) {\n let scroll = self.scroll;\n\n let mut col = -scroll.x;\n let cols = self.window.size.width as isize \/ 8;\n let mut row = -scroll.y;\n let rows = self.window.size.height as isize \/ 16;\n\n for c in self.output.chars(){\n if self.wrap && col >= cols {\n col = -scroll.x;\n row += 1;\n }\n\n if c == '\\n' {\n col = -scroll.x;\n row += 1;\n }else if c == '\\t' {\n col += 8 - col % 8;\n }else{\n if col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, c, Color::new(224, 224, 224));\n }\n col += 1;\n }\n }\n\n if col > -scroll.x {\n col = -scroll.x;\n row += 1;\n }\n\n if col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, '#', Color::new(255, 255, 255));\n col += 2;\n }\n\n let mut i = 0;\n for c in self.command.chars(){\n if self.wrap && col >= cols {\n col = -scroll.x;\n row += 1;\n }\n\n if self.offset == i && col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, '_', Color::new(255, 255, 255));\n }\n\n if c == '\\n' {\n col = -scroll.x;\n row += 1;\n }else if c == '\\t' {\n col += 8 - col % 8;\n }else{\n if col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, c, Color::new(255, 255, 255));\n }\n col += 1;\n }\n\n i += 1;\n }\n\n if self.wrap && col >= cols {\n col = -scroll.x;\n row += 1;\n }\n\n if row >= rows {\n self.scroll.y += row - rows + 1;\n updates.redraw = REDRAW_ALL;\n }\n\n if self.offset == i && col >= 0 && col < cols && row >= 0 && row < rows{\n let point = Point::new(self.window.point.x + 8 * col, self.window.point.y + 16 * row);\n display.char(point, '_', Color::new(255, 255, 255));\n }\n\n return true;\n }else{\n return false;\n }\n }\n\n #[allow(unused_variables)]\n fn on_key(&mut self, session: &Session, updates: &mut SessionUpdates, key_event: KeyEvent){\n if key_event.pressed {\n match key_event.scancode {\n 0x01 => self.window.closed = true,\n 0x47 => self.offset = 0,\n 0x4B => if self.offset > 0 {\n self.offset -= 1;\n },\n 0x4D => if self.offset < self.command.len() {\n self.offset += 1;\n },\n 0x4F => self.offset = self.command.len(),\n _ => ()\n }\n\n match key_event.character {\n '\\x00' => (),\n '\\x08' => {\n if self.offset > 0 {\n self.command = self.command.substr(0, self.offset - 1) + self.command.substr(self.offset, self.command.len() - self.offset);\n self.offset -= 1;\n }\n }\n '\\x1B' => self.command = String::new(),\n '\\n' => {\n if self.command.len() > 0 {\n self.output = self.output.clone() + \"# \".to_string() + self.command.clone() + \"\\n\";\n self.on_command(session);\n self.command = String::new();\n self.offset = 0;\n }\n },\n _ => {\n self.command = self.command.substr(0, self.offset) + key_event.character + self.command.substr(self.offset, self.command.len() - self.offset);\n self.offset += 1;\n }\n }\n }\n }\n\n #[allow(unused_variables)]\n fn on_mouse(&mut self, session: &Session, updates: &mut SessionUpdates, mouse_event: MouseEvent, allow_catch: bool) -> bool{\n return self.window.on_mouse(session.mouse_point, mouse_event, allow_catch);\n }\n}\n\n\/\/Class wrappers\n\nstatic mut application: *mut Application = 0 as *mut Application;\n\n#[no_mangle]\npub unsafe fn entry(){\n application = alloc(size_of::<Application>()) as *mut Application;\n *application = Application::new();\n}\n\n#[no_mangle]\npub unsafe fn draw(session: &Session, updates: &mut SessionUpdates) -> bool{\n if application as usize > 0 {\n return (*application).draw(session, updates);\n }else{\n return false;\n }\n}\n\n#[no_mangle]\npub unsafe fn on_key(session: &Session, updates: &mut SessionUpdates, key_event: KeyEvent){\n if application as usize > 0{\n (*application).on_key(session, updates, key_event);\n }\n}\n\n#[no_mangle]\npub unsafe fn on_mouse(session: &Session, updates: &mut SessionUpdates, mouse_event: MouseEvent, allow_catch: bool) -> bool{\n if application as usize > 0 {\n return (*application).on_mouse(session, updates, mouse_event, allow_catch);\n }else{\n return false;\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn memmove(dst: *mut u8, src: *const u8, len: isize){\n unsafe {\n if src < dst {\n let mut i = len;\n while i > 0 {\n i -= 1;\n *dst.offset(i) = *src.offset(i);\n }\n }else{\n let mut i = 0;\n while i < len {\n *dst.offset(i) = *src.offset(i);\n i += 1;\n }\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn memcpy(dst: *mut u8, src: *const u8, len: isize){\n unsafe {\n let mut i = 0;\n while i < len {\n *dst.offset(i) = *src.offset(i);\n i += 1;\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn memset(src: *mut u8, c: i32, len: isize) {\n unsafe {\n let mut i = 0;\n while i < len {\n *src.offset(i) = c as u8;\n i += 1;\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Only view entry if output is not a pipe<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Structured representation of a plugin's features and capabilities.\n\nuse std::io::{BufReader, Write};\nuse std::env;\nuse std::path::PathBuf;\nuse std::sync::{Arc, Mutex};\nuse std::thread;\nuse std::process::{Command, Stdio};\n\nuse serde_json::Value;\n\nuse xi_rpc::RpcLoop;\n\nuse tabs::ViewIdentifier;\nuse syntax::SyntaxDefinition;\nuse super::PluginManagerRef;\nuse super::{Plugin, PluginRef};\n\n\/\/ optional environment variable for debug plugin executables\nstatic PLUGIN_DIR: &'static str = \"XI_PLUGIN_DIR\";\n\n\/\/ example plugins. Eventually these should be loaded from disk.\npub fn debug_plugins() -> Vec<PluginDescription> {\n use self::PluginActivation::*;\n use self::SyntaxDefinition::*;\n let plugin_dir = match env::var(PLUGIN_DIR).map(PathBuf::from) {\n Ok(p) => p,\n Err(_) => env::current_exe().unwrap().parent().unwrap().to_owned(),\n };\n print_err!(\"looking for debug plugins in {:?}\", plugin_dir);\n\n let make_path = |p: &str| -> PathBuf {\n let mut pb = plugin_dir.clone();\n pb.push(p);\n pb\n };\n\n vec![\n PluginDescription::new(\"syntect\", \"0.0\", make_path(\"xi-syntect-plugin\"),\n vec![Autorun]),\n PluginDescription::new(\"braces\", \"0.0\", make_path(\"bracket_example.py\"),\n Vec::new()),\n PluginDescription::new(\"spellcheck\", \"0.0\", make_path(\"spellcheck.py\"),\n vec![OnSyntax(Markdown), OnSyntax(Plaintext)]),\n PluginDescription::new(\"shouty\", \"0.0\", make_path(\"shouty.py\"),\n Vec::new()),\n ].iter()\n .filter(|desc|{ \n if !desc.exec_path.exists() {\n print_err!(\"missing plugin {} at {:?}\", desc.name, desc.exec_path);\n false\n } else {\n true\n }\n })\n .map(|desc| desc.to_owned())\n .collect::<Vec<_>>()\n}\n\n\/\/\/ Describes attributes and capabilities of a plugin.\n\/\/\/\n\/\/\/ Note: - these will eventually be loaded from manifest files.\n#[derive(Debug, Clone)]\npub struct PluginDescription {\n pub name: String,\n pub version: String,\n \/\/scope: PluginScope,\n \/\/ more metadata ...\n \/\/\/ path to plugin executable\n pub exec_path: PathBuf,\n \/\/\/ Events that cause this plugin to run\n pub activations: Vec<PluginActivation>,\n}\n\n\/\/\/ `PluginActivation`s represent events that trigger running a plugin.\n#[derive(Debug, Clone)]\npub enum PluginActivation {\n \/\/\/ Always run this plugin, when available.\n Autorun,\n \/\/\/ Run this plugin if the provided SyntaxDefinition is active.\n OnSyntax(SyntaxDefinition),\n \/\/\/ Run this plugin in response to a given command.\n #[allow(dead_code)]\n OnCommand,\n}\n\nimpl PluginDescription {\n fn new<S, P>(name: S, version: S, exec_path: P,\n activations: Vec<PluginActivation>) -> Self\n where S: Into<String>, P: Into<PathBuf>\n {\n PluginDescription {\n name: name.into(),\n version: version.into(),\n exec_path: exec_path.into(),\n activations: activations,\n }\n }\n\n \/\/\/ Starts the executable described in this `PluginDescription`.\n \/\/TODO: make this a free function, & move out of manifest\n pub fn launch<W, C>(&self, manager_ref: &PluginManagerRef<W>,\n view_id: &ViewIdentifier, completion: C)\n where W: Write + Send + 'static,\n C: FnOnce(Result<PluginRef<W>, &'static str>) + Send + 'static\n \/\/ TODO: a real result type\n {\n let path = self.exec_path.clone();\n let view_id = view_id.to_owned();\n let manager_ref = manager_ref.to_weak();\n let description = self.clone();\n\n thread::spawn(move || {\n print_err!(\"starting plugin at path {:?}\", path);\n let mut child = Command::new(&path)\n .stdin(Stdio::piped())\n .stdout(Stdio::piped())\n .spawn()\n .expect(\"plugin failed to start\");\n let child_stdin = child.stdin.take().unwrap();\n let child_stdout = child.stdout.take().unwrap();\n let mut looper = RpcLoop::new(child_stdin);\n let peer = looper.get_peer();\n peer.send_rpc_notification(\"ping\", &Value::Array(Vec::new()));\n let plugin = Plugin {\n peer: peer,\n process: child,\n manager: manager_ref,\n description: description,\n view_id: view_id,\n };\n let mut plugin_ref = PluginRef(Arc::new(Mutex::new(plugin)));\n completion(Ok(plugin_ref.clone()));\n looper.mainloop(|| BufReader::new(child_stdout), &mut plugin_ref);\n });\n }\n}\n\n<commit_msg>Disable autoload py3 plugins<commit_after>\/\/ Copyright 2017 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Structured representation of a plugin's features and capabilities.\n\nuse std::io::{BufReader, Write};\nuse std::env;\nuse std::path::PathBuf;\nuse std::sync::{Arc, Mutex};\nuse std::thread;\nuse std::process::{Command, Stdio};\n\nuse serde_json::Value;\n\nuse xi_rpc::RpcLoop;\n\nuse tabs::ViewIdentifier;\nuse syntax::SyntaxDefinition;\nuse super::PluginManagerRef;\nuse super::{Plugin, PluginRef};\n\n\/\/ optional environment variable for debug plugin executables\nstatic PLUGIN_DIR: &'static str = \"XI_PLUGIN_DIR\";\n\n\/\/ example plugins. Eventually these should be loaded from disk.\npub fn debug_plugins() -> Vec<PluginDescription> {\n use self::PluginActivation::*;\n use self::SyntaxDefinition::*;\n let plugin_dir = match env::var(PLUGIN_DIR).map(PathBuf::from) {\n Ok(p) => p,\n Err(_) => env::current_exe().unwrap().parent().unwrap().to_owned(),\n };\n print_err!(\"looking for debug plugins in {:?}\", plugin_dir);\n\n let make_path = |p: &str| -> PathBuf {\n let mut pb = plugin_dir.clone();\n pb.push(p);\n pb\n };\n\n vec![\n PluginDescription::new(\"syntect\", \"0.0\", make_path(\"xi-syntect-plugin\"),\n vec![Autorun]),\n PluginDescription::new(\"braces\", \"0.0\", make_path(\"bracket_example.py\"),\n Vec::new()),\n PluginDescription::new(\"spellcheck\", \"0.0\", make_path(\"spellcheck.py\"),\n Vec::new()),\n PluginDescription::new(\"shouty\", \"0.0\", make_path(\"shouty.py\"),\n Vec::new()),\n ].iter()\n .filter(|desc|{ \n if !desc.exec_path.exists() {\n print_err!(\"missing plugin {} at {:?}\", desc.name, desc.exec_path);\n false\n } else {\n true\n }\n })\n .map(|desc| desc.to_owned())\n .collect::<Vec<_>>()\n}\n\n\/\/\/ Describes attributes and capabilities of a plugin.\n\/\/\/\n\/\/\/ Note: - these will eventually be loaded from manifest files.\n#[derive(Debug, Clone)]\npub struct PluginDescription {\n pub name: String,\n pub version: String,\n \/\/scope: PluginScope,\n \/\/ more metadata ...\n \/\/\/ path to plugin executable\n pub exec_path: PathBuf,\n \/\/\/ Events that cause this plugin to run\n pub activations: Vec<PluginActivation>,\n}\n\n\/\/\/ `PluginActivation`s represent events that trigger running a plugin.\n#[derive(Debug, Clone)]\npub enum PluginActivation {\n \/\/\/ Always run this plugin, when available.\n Autorun,\n \/\/\/ Run this plugin if the provided SyntaxDefinition is active.\n OnSyntax(SyntaxDefinition),\n \/\/\/ Run this plugin in response to a given command.\n #[allow(dead_code)]\n OnCommand,\n}\n\nimpl PluginDescription {\n fn new<S, P>(name: S, version: S, exec_path: P,\n activations: Vec<PluginActivation>) -> Self\n where S: Into<String>, P: Into<PathBuf>\n {\n PluginDescription {\n name: name.into(),\n version: version.into(),\n exec_path: exec_path.into(),\n activations: activations,\n }\n }\n\n \/\/\/ Starts the executable described in this `PluginDescription`.\n \/\/TODO: make this a free function, & move out of manifest\n pub fn launch<W, C>(&self, manager_ref: &PluginManagerRef<W>,\n view_id: &ViewIdentifier, completion: C)\n where W: Write + Send + 'static,\n C: FnOnce(Result<PluginRef<W>, &'static str>) + Send + 'static\n \/\/ TODO: a real result type\n {\n let path = self.exec_path.clone();\n let view_id = view_id.to_owned();\n let manager_ref = manager_ref.to_weak();\n let description = self.clone();\n\n thread::spawn(move || {\n print_err!(\"starting plugin at path {:?}\", path);\n let mut child = Command::new(&path)\n .stdin(Stdio::piped())\n .stdout(Stdio::piped())\n .spawn()\n .expect(\"plugin failed to start\");\n let child_stdin = child.stdin.take().unwrap();\n let child_stdout = child.stdout.take().unwrap();\n let mut looper = RpcLoop::new(child_stdin);\n let peer = looper.get_peer();\n peer.send_rpc_notification(\"ping\", &Value::Array(Vec::new()));\n let plugin = Plugin {\n peer: peer,\n process: child,\n manager: manager_ref,\n description: description,\n view_id: view_id,\n };\n let mut plugin_ref = PluginRef(Arc::new(Mutex::new(plugin)));\n completion(Ok(plugin_ref.clone()));\n looper.mainloop(|| BufReader::new(child_stdout), &mut plugin_ref);\n });\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ This file is part of rgtk.\n\/\/\n\/\/ rgtk is free software: you can redistribute it and\/or modify\n\/\/ it under the terms of the GNU Lesser General Public License as published by\n\/\/ the Free Software Foundation, either version 3 of the License, or\n\/\/ (at your option) any later version.\n\/\/\n\/\/ rgtk is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\/\/ GNU Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public License\n\/\/ along with rgtk. If not, see <http:\/\/www.gnu.org\/licenses\/>.\n\n\/\/! Report important messages to the user\n\nuse libc::c_int;\nuse glib::translate::ToGlibPtr;\n\nuse gtk::MessageType;\nuse gtk::cast::GTK_INFOBAR;\nuse gtk::{self, ffi};\nuse glib::to_gboolean;\n\n\/\/\/ InfoBar — Report important messages to the user\nstruct_Widget!(InfoBar);\n\nimpl InfoBar {\n pub fn new() -> Option<InfoBar> {\n let tmp_pointer = unsafe { ffi::gtk_info_bar_new() };\n check_pointer!(tmp_pointer, InfoBar)\n }\n\n pub fn add_action_widget<T: gtk::WidgetTrait>(&mut self, child: &T, response_id: i32) -> () {\n unsafe {\n ffi::gtk_info_bar_add_action_widget(GTK_INFOBAR(self.pointer), child.unwrap_widget(), response_id as c_int)\n }\n }\n\n pub fn add_button(&mut self, button_text: &str, response_id: i32) -> gtk::Button {\n let button = unsafe {\n ffi::gtk_info_bar_add_button(GTK_INFOBAR(self.pointer), button_text.borrow_to_glib().0, response_id as c_int)\n };\n gtk::FFIWidget::wrap_widget(button)\n }\n\n pub fn set_response_sensitive(&mut self, response_id: i32, setting: bool) -> () {\n unsafe { ffi::gtk_info_bar_set_response_sensitive(GTK_INFOBAR(self.pointer), response_id as c_int, to_gboolean(setting)); }\n }\n\n pub fn set_default_response(&mut self, response_id: i32) -> () {\n unsafe {\n ffi::gtk_info_bar_set_default_response(GTK_INFOBAR(self.pointer), response_id as c_int)\n }\n }\n\n pub fn response(&mut self, response_id: i32) -> () {\n unsafe {\n ffi::gtk_info_bar_response(GTK_INFOBAR(self.pointer), response_id as c_int)\n }\n }\n\n pub fn set_message_type(&mut self, message_type: MessageType) -> () {\n unsafe {\n ffi::gtk_info_bar_set_message_type(GTK_INFOBAR(self.pointer), message_type);\n }\n }\n\n pub fn get_message_type(&mut self) -> MessageType {\n unsafe {\n ffi::gtk_info_bar_get_message_type(GTK_INFOBAR(self.pointer))\n }\n }\n\n #[cfg(any(feature = \"GTK_3_10\",feature = \"GTK_3_12\", feature = \"GTK_3_14\"))]\n pub fn show_close_button(&mut self, show: bool) -> () {\n unsafe { ffi::gtk_info_bar_set_show_close_button(GTK_INFOBAR(self.pointer), to_gboolean(show)); }\n }\n\n #[cfg(any(feature = \"GTK_3_10\",feature = \"GTK_3_12\", feature = \"GTK_3_14\"))]\n pub fn get_show_close_button(&self) -> bool {\n unsafe { to_bool(ffi::gtk_info_bar_get_show_close_button(GTK_INFOBAR(self.pointer))) }\n }\n}\n\nimpl_drop!(InfoBar);\nimpl_TraitWidget!(InfoBar);\n\nimpl gtk::ContainerTrait for InfoBar {}\nimpl gtk::BoxTrait for InfoBar {}\nimpl gtk::OrientableTrait for InfoBar {}\n\nimpl_widget_events!(InfoBar);\n<commit_msg>Add to_bool back (removed in #239)<commit_after>\/\/ This file is part of rgtk.\n\/\/\n\/\/ rgtk is free software: you can redistribute it and\/or modify\n\/\/ it under the terms of the GNU Lesser General Public License as published by\n\/\/ the Free Software Foundation, either version 3 of the License, or\n\/\/ (at your option) any later version.\n\/\/\n\/\/ rgtk is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\/\/ GNU Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public License\n\/\/ along with rgtk. If not, see <http:\/\/www.gnu.org\/licenses\/>.\n\n\/\/! Report important messages to the user\n\nuse libc::c_int;\nuse glib::translate::ToGlibPtr;\n\nuse gtk::MessageType;\nuse gtk::cast::GTK_INFOBAR;\nuse gtk::{self, ffi};\nuse glib::{to_bool, to_gboolean};\n\n\/\/\/ InfoBar — Report important messages to the user\nstruct_Widget!(InfoBar);\n\nimpl InfoBar {\n pub fn new() -> Option<InfoBar> {\n let tmp_pointer = unsafe { ffi::gtk_info_bar_new() };\n check_pointer!(tmp_pointer, InfoBar)\n }\n\n pub fn add_action_widget<T: gtk::WidgetTrait>(&mut self, child: &T, response_id: i32) -> () {\n unsafe {\n ffi::gtk_info_bar_add_action_widget(GTK_INFOBAR(self.pointer), child.unwrap_widget(), response_id as c_int)\n }\n }\n\n pub fn add_button(&mut self, button_text: &str, response_id: i32) -> gtk::Button {\n let button = unsafe {\n ffi::gtk_info_bar_add_button(GTK_INFOBAR(self.pointer), button_text.borrow_to_glib().0, response_id as c_int)\n };\n gtk::FFIWidget::wrap_widget(button)\n }\n\n pub fn set_response_sensitive(&mut self, response_id: i32, setting: bool) -> () {\n unsafe { ffi::gtk_info_bar_set_response_sensitive(GTK_INFOBAR(self.pointer), response_id as c_int, to_gboolean(setting)); }\n }\n\n pub fn set_default_response(&mut self, response_id: i32) -> () {\n unsafe {\n ffi::gtk_info_bar_set_default_response(GTK_INFOBAR(self.pointer), response_id as c_int)\n }\n }\n\n pub fn response(&mut self, response_id: i32) -> () {\n unsafe {\n ffi::gtk_info_bar_response(GTK_INFOBAR(self.pointer), response_id as c_int)\n }\n }\n\n pub fn set_message_type(&mut self, message_type: MessageType) -> () {\n unsafe {\n ffi::gtk_info_bar_set_message_type(GTK_INFOBAR(self.pointer), message_type);\n }\n }\n\n pub fn get_message_type(&mut self) -> MessageType {\n unsafe {\n ffi::gtk_info_bar_get_message_type(GTK_INFOBAR(self.pointer))\n }\n }\n\n #[cfg(any(feature = \"GTK_3_10\",feature = \"GTK_3_12\", feature = \"GTK_3_14\"))]\n pub fn show_close_button(&mut self, show: bool) -> () {\n unsafe { ffi::gtk_info_bar_set_show_close_button(GTK_INFOBAR(self.pointer), to_gboolean(show)); }\n }\n\n #[cfg(any(feature = \"GTK_3_10\",feature = \"GTK_3_12\", feature = \"GTK_3_14\"))]\n pub fn get_show_close_button(&self) -> bool {\n unsafe { to_bool(ffi::gtk_info_bar_get_show_close_button(GTK_INFOBAR(self.pointer))) }\n }\n}\n\nimpl_drop!(InfoBar);\nimpl_TraitWidget!(InfoBar);\n\nimpl gtk::ContainerTrait for InfoBar {}\nimpl gtk::BoxTrait for InfoBar {}\nimpl gtk::OrientableTrait for InfoBar {}\n\nimpl_widget_events!(InfoBar);\n<|endoftext|>"} {"text":"<commit_before>use redox::{Box, String, Url};\nuse redox::{cmp, mem, ptr};\nuse redox::fs::File;\nuse redox::get_slice::GetSlice;\nuse redox::io::*;\nuse redox::ops::DerefMut;\nuse redox::to_num::ToNum;\nuse redox::syscall::sys_clone;\n\nuse orbital::event::Event;\nuse orbital::Point;\nuse orbital::Size;\n\nuse self::display::Display;\nuse self::session::Session;\nuse self::window::Window;\n\npub mod display;\npub mod package;\npub mod scheduler;\npub mod session;\npub mod window;\n\npub static mut session_ptr: *mut Session = 0 as *mut Session;\n\n\/\/\/ A window resource\npub struct Resource {\n \/\/\/ The window\n pub window: Box<Window>,\n \/\/\/ Seek point\n pub seek: usize,\n}\n\nimpl Resource {\n pub fn dup(&self) -> Option<Box<Resource>> {\n Some(box Resource {\n window: Window::new(self.window.point, self.window.size, self.window.title.clone()),\n seek: self.seek,\n })\n }\n\n \/\/\/ Return the url of this resource\n pub fn path(&self) -> Option<String> {\n Some(format!(\"orbital:\/\/\/{}\/{}\/{}\/{}\/{}\",\n self.window.point.x,\n self.window.point.y,\n self.window.size.width,\n self.window.size.height,\n self.window.title))\n }\n\n \/\/\/ Read data to buffer\n pub fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/\/Read events from window\n let mut i = 0;\n while buf.len() - i >= mem::size_of::<Event>() {\n match self.window.poll() {\n Some(event) => {\n unsafe { ptr::write(buf.as_ptr().offset(i as isize) as *mut Event, event) };\n i += mem::size_of::<Event>();\n }\n None => break,\n }\n }\n\n Some(i)\n }\n\n \/\/\/ Write to resource\n pub fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let content = &mut self.window.content;\n\n let size = cmp::min(content.size - self.seek, buf.len());\n unsafe {\n Display::copy_run(buf.as_ptr() as usize,\n content.offscreen + self.seek,\n size);\n }\n self.seek += size;\n\n return Some(size);\n }\n\n \/\/\/ Seek\n pub fn seek(&mut self, pos: SeekFrom) -> Option<usize> {\n let end = self.window.content.size;\n\n self.seek = match pos {\n SeekFrom::Start(offset) => cmp::min(end, cmp::max(0, offset)),\n SeekFrom::Current(offset) => cmp::min(end, cmp::max(0, self.seek as isize + offset) as usize),\n SeekFrom::End(offset) => cmp::min(end, cmp::max(0, end as isize + offset) as usize),\n };\n\n return Some(self.seek);\n }\n\n \/\/\/ Sync the resource, should flip\n pub fn sync(&mut self) -> bool {\n self.window.redraw();\n true\n }\n}\n\n\/\/\/ A window scheme\npub struct Scheme {\n pub session: Box<Session>,\n pub next_x: isize,\n pub next_y: isize,\n}\n\nimpl Scheme {\n pub fn new() -> Box<Scheme> {\n debugln!(\"- Starting Orbital\");\n debugln!(\" Console: Press F1\");\n debugln!(\" Desktop: Press F2\");\n let mut ret = box Scheme {\n session: Session::new(),\n next_x: 0,\n next_y: 0,\n };\n unsafe { session_ptr = ret.session.deref_mut() };\n ret\n }\n\n pub fn open(&mut self, url_str: &str, _: usize) -> Option<Box<Resource>> {\n \/\/window:\/\/host\/path\/path\/path is the path type we're working with.\n let url = Url::from_str(url_str);\n\n let host = url.host();\n if host.is_empty() {\n let path = url.path_parts();\n let mut pointx = match path.get(0) {\n Some(x) => x.to_num_signed(),\n None => 0,\n };\n let mut pointy = match path.get(1) {\n Some(y) => y.to_num_signed(),\n None => 0,\n };\n let size_width = match path.get(2) {\n Some(w) => w.to_num(),\n None => 100,\n };\n let size_height = match path.get(3) {\n Some(h) => h.to_num(),\n None => 100,\n };\n\n let mut title = match path.get(4) {\n Some(t) => t.clone(),\n None => String::new(),\n };\n for i in 5..path.len() {\n if let Some(t) = path.get(i) {\n title = title + \"\/\" + t;\n }\n }\n\n if pointx <= 0 || pointy <= 0 {\n if self.next_x > self.session.display.width as isize - size_width as isize {\n self.next_x = 0;\n }\n self.next_x += 32;\n pointx = self.next_x;\n\n if self.next_y > self.session.display.height as isize - size_height as isize {\n self.next_y = 0;\n }\n self.next_y += 32;\n pointy = self.next_y;\n }\n\n Some(box Resource {\n window: Window::new(Point::new(pointx, pointy), Size::new(size_width, size_height), title),\n seek: 0,\n })\n } else if host == \"launch\" {\n let path = url.path();\n\n unsafe {\n let reenable = scheduler::start_no_ints();\n\n for package in self.session.packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if (accept.starts_with('*') && path.ends_with(&accept.get_slice(Some(1), None)))\n || (accept.ends_with('*') && path.starts_with(&accept.get_slice(None, Some(accept.len() - 1))))\n {\n accepted = true;\n break;\n }\n }\n if accepted {\n let binary = package.binary.clone();\n println!(\"Exec {} {}\", binary, path);\n \/\/File::exec(&binary, &[&path]);\n break;\n }\n }\n\n scheduler::end_no_ints(reenable);\n }\n\n None\n } else {\n None\n }\n }\n\n pub fn event(&mut self, event: &Event) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n\n self.session.event(event);\n\n scheduler::end_no_ints(reenable);\n\n self.session.redraw();\n }\n }\n}\n\n\/\/TODO: This is a hack and it will go away\n#[cold]\n#[inline(never)]\n#[no_mangle]\npub unsafe extern \"C\" fn _event(scheme: *mut Scheme, event: *const Event) {\n (*scheme).event(&*event);\n}\n<commit_msg>Do not clone inside of orbital<commit_after>use redox::{Box, String, Url};\nuse redox::{cmp, mem, ptr};\nuse redox::fs::File;\nuse redox::get_slice::GetSlice;\nuse redox::io::*;\nuse redox::ops::DerefMut;\nuse redox::to_num::ToNum;\n\nuse orbital::event::Event;\nuse orbital::Point;\nuse orbital::Size;\n\nuse self::display::Display;\nuse self::session::Session;\nuse self::window::Window;\n\npub mod display;\npub mod package;\npub mod scheduler;\npub mod session;\npub mod window;\n\npub static mut session_ptr: *mut Session = 0 as *mut Session;\n\n\/\/\/ A window resource\npub struct Resource {\n \/\/\/ The window\n pub window: Box<Window>,\n \/\/\/ Seek point\n pub seek: usize,\n}\n\nimpl Resource {\n pub fn dup(&self) -> Option<Box<Resource>> {\n Some(box Resource {\n window: Window::new(self.window.point, self.window.size, self.window.title.clone()),\n seek: self.seek,\n })\n }\n\n \/\/\/ Return the url of this resource\n pub fn path(&self) -> Option<String> {\n Some(format!(\"orbital:\/\/\/{}\/{}\/{}\/{}\/{}\",\n self.window.point.x,\n self.window.point.y,\n self.window.size.width,\n self.window.size.height,\n self.window.title))\n }\n\n \/\/\/ Read data to buffer\n pub fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/\/Read events from window\n let mut i = 0;\n while buf.len() - i >= mem::size_of::<Event>() {\n match self.window.poll() {\n Some(event) => {\n unsafe { ptr::write(buf.as_ptr().offset(i as isize) as *mut Event, event) };\n i += mem::size_of::<Event>();\n }\n None => break,\n }\n }\n\n Some(i)\n }\n\n \/\/\/ Write to resource\n pub fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let content = &mut self.window.content;\n\n let size = cmp::min(content.size - self.seek, buf.len());\n unsafe {\n Display::copy_run(buf.as_ptr() as usize,\n content.offscreen + self.seek,\n size);\n }\n self.seek += size;\n\n return Some(size);\n }\n\n \/\/\/ Seek\n pub fn seek(&mut self, pos: SeekFrom) -> Option<usize> {\n let end = self.window.content.size;\n\n self.seek = match pos {\n SeekFrom::Start(offset) => cmp::min(end, cmp::max(0, offset)),\n SeekFrom::Current(offset) => cmp::min(end, cmp::max(0, self.seek as isize + offset) as usize),\n SeekFrom::End(offset) => cmp::min(end, cmp::max(0, end as isize + offset) as usize),\n };\n\n return Some(self.seek);\n }\n\n \/\/\/ Sync the resource, should flip\n pub fn sync(&mut self) -> bool {\n self.window.redraw();\n true\n }\n}\n\n\/\/\/ A window scheme\npub struct Scheme {\n pub session: Box<Session>,\n pub next_x: isize,\n pub next_y: isize,\n}\n\nimpl Scheme {\n pub fn new() -> Box<Scheme> {\n debugln!(\"- Starting Orbital\");\n debugln!(\" Console: Press F1\");\n debugln!(\" Desktop: Press F2\");\n let mut ret = box Scheme {\n session: Session::new(),\n next_x: 0,\n next_y: 0,\n };\n unsafe { session_ptr = ret.session.deref_mut() };\n ret\n }\n\n pub fn open(&mut self, url_str: &str, _: usize) -> Option<Box<Resource>> {\n \/\/window:\/\/host\/path\/path\/path is the path type we're working with.\n let url = Url::from_str(url_str);\n\n let host = url.host();\n if host.is_empty() {\n let path = url.path_parts();\n let mut pointx = match path.get(0) {\n Some(x) => x.to_num_signed(),\n None => 0,\n };\n let mut pointy = match path.get(1) {\n Some(y) => y.to_num_signed(),\n None => 0,\n };\n let size_width = match path.get(2) {\n Some(w) => w.to_num(),\n None => 100,\n };\n let size_height = match path.get(3) {\n Some(h) => h.to_num(),\n None => 100,\n };\n\n let mut title = match path.get(4) {\n Some(t) => t.clone(),\n None => String::new(),\n };\n for i in 5..path.len() {\n if let Some(t) = path.get(i) {\n title = title + \"\/\" + t;\n }\n }\n\n if pointx <= 0 || pointy <= 0 {\n if self.next_x > self.session.display.width as isize - size_width as isize {\n self.next_x = 0;\n }\n self.next_x += 32;\n pointx = self.next_x;\n\n if self.next_y > self.session.display.height as isize - size_height as isize {\n self.next_y = 0;\n }\n self.next_y += 32;\n pointy = self.next_y;\n }\n\n Some(box Resource {\n window: Window::new(Point::new(pointx, pointy), Size::new(size_width, size_height), title),\n seek: 0,\n })\n } else if host == \"launch\" {\n let path = url.path();\n\n unsafe {\n let reenable = scheduler::start_no_ints();\n\n for package in self.session.packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if (accept.starts_with('*') && path.ends_with(&accept.get_slice(Some(1), None)))\n || (accept.ends_with('*') && path.starts_with(&accept.get_slice(None, Some(accept.len() - 1))))\n {\n accepted = true;\n break;\n }\n }\n if accepted {\n let binary = package.binary.clone();\n println!(\"Exec {} {}\", binary, path);\n \/\/File::exec(&binary, &[&path]);\n break;\n }\n }\n\n scheduler::end_no_ints(reenable);\n }\n\n None\n } else {\n None\n }\n }\n\n pub fn event(&mut self, event: &Event) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n\n self.session.event(event);\n\n scheduler::end_no_ints(reenable);\n\n self.session.redraw();\n }\n }\n}\n\n\/\/TODO: This is a hack and it will go away\n#[cold]\n#[inline(never)]\n#[no_mangle]\npub unsafe extern \"C\" fn _event(scheme: *mut Scheme, event: *const Event) {\n (*scheme).event(&*event);\n}\n<|endoftext|>"} {"text":"<commit_before>\nuse super::*;\nmod basic_test;\n<commit_msg>Fix tests of module `wplot`, test under features<commit_after>\n#[ allow( unused_imports ) ]\nuse super::*;\n#[ cfg( feature = \"use_std\" ) ]\nmod basic_test;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example for discard server<commit_after>#[macro_use]\nextern crate log;\nextern crate env_logger;\n\nextern crate num_cpus;\nextern crate futures;\nextern crate ruyi;\n\nuse std::thread;\n\nuse futures::{Future, Stream};\n\nuse ruyi::reactor::{IntoStream, IntoTask, Task};\nuse ruyi::service::tcp::{self, Handler, Session};\n\n#[derive(Clone)]\nstruct Discard;\n\nimpl Handler for Discard {\n fn handle(&mut self, session: Session) -> Task {\n session\n .into_stream()\n .for_each(|data| {\n debug!(\"Discard:\\n{}\", data.as_hex_dump());\n Ok(())\n })\n .map_err(|e| error!(\"{}\", e))\n .into_task()\n }\n}\n\nfn main() {\n \/\/ Initialize logger\n env_logger::init().unwrap();\n\n let n = num_cpus::get();\n match tcp::Server::with_handler(Discard)\n .port(10009)\n .num_of_workers(n)\n .start() {\n Ok(()) => thread::park(),\n Err(e) => error!(\"{}\", e),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add srandmember documentation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Solution for cubic ufo in rust<commit_after>use std::io;\nuse std::f64;\nuse std::str::FromStr;\n\nfn main() {\n println!(\"cubicUFO\");\n\n println!(\"Input test cases.\");\n\n let mut case = String::new();\n\n io::stdin().read_line(&mut case)\n .expect(\"Failed to read line\"); \/\/read input for cases\n\n case = case.to_string();\n let cases: Vec<&str> = case.split(\"\\n\").collect();\n let case_test = u32::from_str(cases[0]).unwrap(); \/\/convert to integer\n\n for n in 1..=case_test {\n println!(\"\\nCase #{}\", n);\n\n println!(\"Input area for case #{}\", n);\n let mut area_in = String::new();\n io::stdin().read_line(&mut area_in)\n .expect(\"Failed to read line\"); \/\/read input for desiered area\n\n area_in = area_in.to_string();\n let mut new_area: Vec<&str> = area_in.split(\"\\n\").collect();\n let mut area = f64::from_str(new_area[0]).unwrap(); \/\/convert to f64\n\n \/\/rotation matrix\n if area <= f64::consts::SQRT_2 {\n let mut ar = area.powf(2.0) - 1.0;\n let mut theta = ar.asin() \/ 2.0;\n println!(\"[ {}, {}, {} ]\", 0.5, 0, 0);\n println!(\"[ {}, {}, {} ]\", 0.0, 0.5*theta.cos(), 0.5*theta.sin());\n println!(\"[ {}, {}, {} ]\", 0.0, -0.5*theta.sin(), 0.5*theta.cos());\n } else {\n let three_f64 = 3.0_f64;\n let two_f64 = 2.0_f64;\n let eight_f64 = 8.0_f64;\n let div = two_f64.sqrt() \/ three_f64.sqrt();\n let thi1 = area \/ three_f64.sqrt();\n let mut thi = thi1.asin() - div.asin();\n println!(\"[ {}, {}, {} ]\", thi.cos()\/2.0, thi.sin()\/2.0, 0);\n println!(\"[ {}, {}, {} ]\", -thi.sin() \/ eight_f64.sqrt(), thi.cos() \/ eight_f64.sqrt(), 1.0\/eight_f64.sqrt());\n println!(\"[ {}, {}, {} ]\", thi.sin() \/ eight_f64.sqrt(), -thi.cos() \/ eight_f64.sqrt(), 1.0\/eight_f64.sqrt());\n }\n\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>day 8 solved.<commit_after>use std::io::prelude::*;\nuse std::fs::File;\nuse std::path::Path;\n\nenum State {\n Normal,\n Backslash,\n ParseHex(bool)\n}\n\nfn main() {\n let mut f = File::open(Path::new(\"\/Users\/PetarV\/rust-proj\/advent-of-rust\/target\/input.txt\"))\n \t.ok()\n \t.expect(\"Failed to open the input file!\");\n\n\tlet mut input = String::new();\n\tf.read_to_string(&mut input)\n\t\t.ok()\n\t\t.expect(\"Failed to read from the input file!\");\n\n let mut chars_in_code = 0;\n let mut chars_in_memo = 0;\n\n for line in input.lines() {\n chars_in_code += line.len();\n chars_in_memo += line.chars()\n .fold((0, State::Normal), |(acc, st), ch| {\n match st {\n State::Normal => match ch {\n '\"' => (acc, State::Normal),\n '\\\\' => (acc + 1, State::Backslash),\n _ => (acc + 1, State::Normal)\n },\n State::Backslash => match ch {\n 'x' => (acc, State::ParseHex(true)),\n _ => (acc, State::Normal)\n },\n State::ParseHex(b) => if b {\n (acc, State::ParseHex(false))\n } else {\n (acc, State::Normal)\n }\n }\n }).0;\n }\n\n let ret = chars_in_code - chars_in_memo;\n\n println!(\"The difference in characters in code and memory is {}.\", ret);\n\n let mut chars_encoded = 0;\n\n for line in input.lines() {\n chars_encoded += line.chars().fold(2, |acc, ch| { \/\/ start at 2 to entail endquotes\n match ch {\n '\"' => acc + 2,\n '\\\\' => acc + 2,\n _ => acc + 1\n }\n });\n }\n\n let ret = chars_encoded - chars_in_code;\n\n println!(\"The difference in code characters in the encoded and original version is {}.\", ret);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>implementation<commit_after>extern crate bit_vec;\nextern crate farmhash;\n\nuse std::default::Default;\nuse std::f64::consts;\nuse bit_vec::BitVec;\nuse farmhash::hash64;\n\n\npub struct BloomFilter {\n bv: BitVec,\n hashes: usize\n}\n\n\nimpl Default for BloomFilter {\n #[inline]\n fn default() -> BloomFilter {\n \/\/ http:\/\/hur.st\/bloomfilter?n=10000&p=0.001\n BloomFilter::new(10_000, 0.001)\n }\n}\n\n\nimpl BloomFilter {\n pub fn new(capacity: usize, error_rate: f64) -> BloomFilter {\n assert!((error_rate > 0.0 && error_rate < 1.0) && capacity > 0);\n\n \/\/ https:\/\/en.wikipedia.org\/wiki\/Bloom_filter#Probability_of_false_positives\n let m = BloomFilter::num_of_bits_in_vec(capacity, error_rate);\n\n \/\/ https:\/\/en.wikipedia.org\/wiki\/Bloom_filter#Optimal_number_of_hash_functions\n let k = BloomFilter::num_of_hash_funcs(m, capacity);\n\n BloomFilter {\n bv: BitVec::from_elem(capacity, false),\n hashes: k\n }\n }\n\n fn num_of_bits_in_vec(capacity: usize, error_rate: f64) -> usize {\n (-1.0 * (((capacity as f64) * error_rate.ln()) \/ (1.0 \/ consts::LN_2.powf(2.0)).ln())).ceil() as usize\n }\n\n fn num_of_hash_funcs(m: usize, capacity: usize) -> usize {\n (consts::LN_2 * ((m as f64) \/ (capacity as f64))).round().abs() as usize\n }\n\n pub fn insert(&mut self, value: &str) {\n for i in 0..self.hashes {\n let index = self.nth_hash(&value, i);\n self.bv.set(index, true);\n }\n }\n\n fn nth_hash(&self, x: &str, m: usize) -> usize {\n let hashval = x.to_string() + &m.to_string(); \/\/ meh\n (hash64(&hashval.as_bytes()) % (self.bv.capacity() as u64)) as usize\n }\n\n pub fn has(&self, value: &str) -> bool {\n for i in 0..self.hashes {\n let index = self.nth_hash(&value, i);\n if !self.bv[index] {\n return false;\n }\n }\n true\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![deny(unused)]\n#![cfg_attr(test, deny(warnings))]\n#![recursion_limit=\"128\"]\n\n#[macro_use] extern crate failure;\n#[macro_use] extern crate log;\n#[macro_use] extern crate scoped_tls;\n#[macro_use] extern crate serde_derive;\n#[macro_use] extern crate serde_json;\nextern crate atty;\nextern crate crates_io as registry;\nextern crate crossbeam;\nextern crate curl;\nextern crate docopt;\nextern crate filetime;\nextern crate flate2;\nextern crate fs2;\nextern crate git2;\nextern crate glob;\nextern crate hex;\nextern crate home;\nextern crate ignore;\nextern crate jobserver;\nextern crate lazycell;\nextern crate libc;\nextern crate libgit2_sys;\nextern crate num_cpus;\nextern crate same_file;\nextern crate semver;\nextern crate serde;\nextern crate serde_ignored;\nextern crate shell_escape;\nextern crate tar;\nextern crate tempdir;\nextern crate termcolor;\nextern crate toml;\nextern crate url;\n#[cfg(target_os = \"macos\")]\nextern crate core_foundation;\n\nuse std::fmt;\n\nuse serde::Deserialize;\nuse serde::ser;\nuse docopt::Docopt;\nuse failure::Error;\n\nuse core::Shell;\nuse core::shell::Verbosity::Verbose;\n\npub use util::{CargoError, CargoResult, CliError, CliResult, Config};\npub use util::errors::Internal;\n\npub const CARGO_ENV: &'static str = \"CARGO\";\n\npub mod core;\npub mod ops;\npub mod sources;\npub mod util;\n\npub struct CommitInfo {\n pub short_commit_hash: String,\n pub commit_hash: String,\n pub commit_date: String,\n}\n\npub struct CfgInfo {\n \/\/ Information about the git repository we may have been built from.\n pub commit_info: Option<CommitInfo>,\n \/\/ The release channel we were built for.\n pub release_channel: String,\n}\n\npub struct VersionInfo {\n pub major: String,\n pub minor: String,\n pub patch: String,\n pub pre_release: Option<String>,\n \/\/ Information that's only available when we were built with\n \/\/ configure\/make, rather than cargo itself.\n pub cfg_info: Option<CfgInfo>,\n}\n\nimpl fmt::Display for VersionInfo {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"cargo {}.{}.{}\",\n self.major, self.minor, self.patch)?;\n if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) {\n if channel != \"stable\" {\n write!(f, \"-{}\", channel)?;\n let empty = String::from(\"\");\n write!(f, \"{}\", self.pre_release.as_ref().unwrap_or(&empty))?;\n }\n };\n\n if let Some(ref cfg) = self.cfg_info {\n if let Some(ref ci) = cfg.commit_info {\n write!(f, \" ({} {})\",\n ci.short_commit_hash, ci.commit_date)?;\n }\n };\n Ok(())\n }\n}\n\npub fn call_main_without_stdin<'de, Flags: Deserialize<'de>>(\n exec: fn(Flags, &mut Config) -> CliResult,\n config: &mut Config,\n usage: &str,\n args: &[String],\n options_first: bool) -> CliResult\n{\n let docopt = Docopt::new(usage).unwrap()\n .options_first(options_first)\n .argv(args.iter().map(|s| &s[..]))\n .help(true);\n\n let flags = docopt.deserialize().map_err(|e| {\n let code = if e.fatal() {1} else {0};\n CliError::new(e.into(), code)\n })?;\n\n exec(flags, config)\n}\n\npub fn print_json<T: ser::Serialize>(obj: &T) {\n let encoded = serde_json::to_string(&obj).unwrap();\n println!(\"{}\", encoded);\n}\n\npub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {\n debug!(\"exit_with_error; err={:?}\", err);\n\n let CliError { error, exit_code, unknown } = err;\n \/\/ exit_code == 0 is non-fatal error, e.g. docopt version info\n let fatal = exit_code != 0;\n\n let hide = unknown && shell.verbosity() != Verbose;\n\n if let Some(error) = error {\n if hide {\n drop(shell.error(\"An unknown error occurred\"))\n } else if fatal {\n drop(shell.error(&error))\n } else {\n println!(\"{}\", error);\n }\n\n if !handle_cause(&error, shell) || hide {\n drop(writeln!(shell.err(), \"\\nTo learn more, run the command again \\\n with --verbose.\"));\n }\n }\n\n std::process::exit(exit_code)\n}\n\npub fn handle_error(err: CargoError, shell: &mut Shell) {\n debug!(\"handle_error; err={:?}\", &err);\n\n let _ignored_result = shell.error(&err);\n handle_cause(&err, shell);\n}\n\nfn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool {\n fn print(error: String, shell: &mut Shell) {\n drop(writeln!(shell.err(), \"\\nCaused by:\"));\n drop(writeln!(shell.err(), \" {}\", error));\n }\n\n let verbose = shell.verbosity();\n\n if verbose == Verbose {\n \/\/ The first error has already been printed to the shell\n \/\/ Print all remaining errors\n for err in cargo_err.causes().skip(1) {\n print(err.to_string(), shell);\n }\n } else {\n \/\/ The first error has already been printed to the shell\n \/\/ Print remaining errors until one marked as Internal appears\n for err in cargo_err.causes().skip(1) {\n if err.downcast_ref::<Internal>().is_some() {\n return false;\n }\n\n print(err.to_string(), shell);\n }\n }\n\n true\n}\n\npub fn version() -> VersionInfo {\n macro_rules! env_str {\n ($name:expr) => { env!($name).to_string() }\n }\n macro_rules! option_env_str {\n ($name:expr) => { option_env!($name).map(|s| s.to_string()) }\n }\n match option_env!(\"CFG_RELEASE_CHANNEL\") {\n \/\/ We have environment variables set up from configure\/make.\n Some(_) => {\n let commit_info =\n option_env!(\"CFG_COMMIT_HASH\").map(|s| {\n CommitInfo {\n commit_hash: s.to_string(),\n short_commit_hash: option_env_str!(\"CFG_SHORT_COMMIT_HASH\").unwrap(),\n commit_date: option_env_str!(\"CFG_COMMIT_DATE\").unwrap(),\n }\n });\n VersionInfo {\n major: env_str!(\"CARGO_PKG_VERSION_MAJOR\"),\n minor: env_str!(\"CARGO_PKG_VERSION_MINOR\"),\n patch: env_str!(\"CARGO_PKG_VERSION_PATCH\"),\n pre_release: option_env_str!(\"CARGO_PKG_VERSION_PRE\"),\n cfg_info: Some(CfgInfo {\n release_channel: option_env_str!(\"CFG_RELEASE_CHANNEL\").unwrap(),\n commit_info: commit_info,\n }),\n }\n },\n \/\/ We are being compiled by Cargo itself.\n None => {\n VersionInfo {\n major: env_str!(\"CARGO_PKG_VERSION_MAJOR\"),\n minor: env_str!(\"CARGO_PKG_VERSION_MINOR\"),\n patch: env_str!(\"CARGO_PKG_VERSION_PATCH\"),\n pre_release: option_env_str!(\"CARGO_PKG_VERSION_PRE\"),\n cfg_info: None,\n }\n }\n }\n}\n<commit_msg>Fix DocOpt deserialization type bounds<commit_after>#![deny(unused)]\n#![cfg_attr(test, deny(warnings))]\n#![recursion_limit=\"128\"]\n\n#[macro_use] extern crate failure;\n#[macro_use] extern crate log;\n#[macro_use] extern crate scoped_tls;\n#[macro_use] extern crate serde_derive;\n#[macro_use] extern crate serde_json;\nextern crate atty;\nextern crate crates_io as registry;\nextern crate crossbeam;\nextern crate curl;\nextern crate docopt;\nextern crate filetime;\nextern crate flate2;\nextern crate fs2;\nextern crate git2;\nextern crate glob;\nextern crate hex;\nextern crate home;\nextern crate ignore;\nextern crate jobserver;\nextern crate lazycell;\nextern crate libc;\nextern crate libgit2_sys;\nextern crate num_cpus;\nextern crate same_file;\nextern crate semver;\nextern crate serde;\nextern crate serde_ignored;\nextern crate shell_escape;\nextern crate tar;\nextern crate tempdir;\nextern crate termcolor;\nextern crate toml;\nextern crate url;\n#[cfg(target_os = \"macos\")]\nextern crate core_foundation;\n\nuse std::fmt;\n\nuse serde::de::DeserializeOwned;\nuse serde::ser;\nuse docopt::Docopt;\nuse failure::Error;\n\nuse core::Shell;\nuse core::shell::Verbosity::Verbose;\n\npub use util::{CargoError, CargoResult, CliError, CliResult, Config};\npub use util::errors::Internal;\n\npub const CARGO_ENV: &'static str = \"CARGO\";\n\npub mod core;\npub mod ops;\npub mod sources;\npub mod util;\n\npub struct CommitInfo {\n pub short_commit_hash: String,\n pub commit_hash: String,\n pub commit_date: String,\n}\n\npub struct CfgInfo {\n \/\/ Information about the git repository we may have been built from.\n pub commit_info: Option<CommitInfo>,\n \/\/ The release channel we were built for.\n pub release_channel: String,\n}\n\npub struct VersionInfo {\n pub major: String,\n pub minor: String,\n pub patch: String,\n pub pre_release: Option<String>,\n \/\/ Information that's only available when we were built with\n \/\/ configure\/make, rather than cargo itself.\n pub cfg_info: Option<CfgInfo>,\n}\n\nimpl fmt::Display for VersionInfo {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"cargo {}.{}.{}\",\n self.major, self.minor, self.patch)?;\n if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) {\n if channel != \"stable\" {\n write!(f, \"-{}\", channel)?;\n let empty = String::from(\"\");\n write!(f, \"{}\", self.pre_release.as_ref().unwrap_or(&empty))?;\n }\n };\n\n if let Some(ref cfg) = self.cfg_info {\n if let Some(ref ci) = cfg.commit_info {\n write!(f, \" ({} {})\",\n ci.short_commit_hash, ci.commit_date)?;\n }\n };\n Ok(())\n }\n}\n\npub fn call_main_without_stdin<Flags: DeserializeOwned>(\n exec: fn(Flags, &mut Config) -> CliResult,\n config: &mut Config,\n usage: &str,\n args: &[String],\n options_first: bool) -> CliResult\n{\n let docopt = Docopt::new(usage).unwrap()\n .options_first(options_first)\n .argv(args.iter().map(|s| &s[..]))\n .help(true);\n\n let flags = docopt.deserialize().map_err(|e| {\n let code = if e.fatal() {1} else {0};\n CliError::new(e.into(), code)\n })?;\n\n exec(flags, config)\n}\n\npub fn print_json<T: ser::Serialize>(obj: &T) {\n let encoded = serde_json::to_string(&obj).unwrap();\n println!(\"{}\", encoded);\n}\n\npub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {\n debug!(\"exit_with_error; err={:?}\", err);\n\n let CliError { error, exit_code, unknown } = err;\n \/\/ exit_code == 0 is non-fatal error, e.g. docopt version info\n let fatal = exit_code != 0;\n\n let hide = unknown && shell.verbosity() != Verbose;\n\n if let Some(error) = error {\n if hide {\n drop(shell.error(\"An unknown error occurred\"))\n } else if fatal {\n drop(shell.error(&error))\n } else {\n println!(\"{}\", error);\n }\n\n if !handle_cause(&error, shell) || hide {\n drop(writeln!(shell.err(), \"\\nTo learn more, run the command again \\\n with --verbose.\"));\n }\n }\n\n std::process::exit(exit_code)\n}\n\npub fn handle_error(err: CargoError, shell: &mut Shell) {\n debug!(\"handle_error; err={:?}\", &err);\n\n let _ignored_result = shell.error(&err);\n handle_cause(&err, shell);\n}\n\nfn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool {\n fn print(error: String, shell: &mut Shell) {\n drop(writeln!(shell.err(), \"\\nCaused by:\"));\n drop(writeln!(shell.err(), \" {}\", error));\n }\n\n let verbose = shell.verbosity();\n\n if verbose == Verbose {\n \/\/ The first error has already been printed to the shell\n \/\/ Print all remaining errors\n for err in cargo_err.causes().skip(1) {\n print(err.to_string(), shell);\n }\n } else {\n \/\/ The first error has already been printed to the shell\n \/\/ Print remaining errors until one marked as Internal appears\n for err in cargo_err.causes().skip(1) {\n if err.downcast_ref::<Internal>().is_some() {\n return false;\n }\n\n print(err.to_string(), shell);\n }\n }\n\n true\n}\n\npub fn version() -> VersionInfo {\n macro_rules! env_str {\n ($name:expr) => { env!($name).to_string() }\n }\n macro_rules! option_env_str {\n ($name:expr) => { option_env!($name).map(|s| s.to_string()) }\n }\n match option_env!(\"CFG_RELEASE_CHANNEL\") {\n \/\/ We have environment variables set up from configure\/make.\n Some(_) => {\n let commit_info =\n option_env!(\"CFG_COMMIT_HASH\").map(|s| {\n CommitInfo {\n commit_hash: s.to_string(),\n short_commit_hash: option_env_str!(\"CFG_SHORT_COMMIT_HASH\").unwrap(),\n commit_date: option_env_str!(\"CFG_COMMIT_DATE\").unwrap(),\n }\n });\n VersionInfo {\n major: env_str!(\"CARGO_PKG_VERSION_MAJOR\"),\n minor: env_str!(\"CARGO_PKG_VERSION_MINOR\"),\n patch: env_str!(\"CARGO_PKG_VERSION_PATCH\"),\n pre_release: option_env_str!(\"CARGO_PKG_VERSION_PRE\"),\n cfg_info: Some(CfgInfo {\n release_channel: option_env_str!(\"CFG_RELEASE_CHANNEL\").unwrap(),\n commit_info: commit_info,\n }),\n }\n },\n \/\/ We are being compiled by Cargo itself.\n None => {\n VersionInfo {\n major: env_str!(\"CARGO_PKG_VERSION_MAJOR\"),\n minor: env_str!(\"CARGO_PKG_VERSION_MINOR\"),\n patch: env_str!(\"CARGO_PKG_VERSION_PATCH\"),\n pre_release: option_env_str!(\"CARGO_PKG_VERSION_PRE\"),\n cfg_info: None,\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a multiserver example supporting an arbitrary number of servers.<commit_after>extern crate futures;\nextern crate irc;\nextern crate tokio_core;\n\nuse std::default::Default;\nuse futures::future;\nuse irc::error;\nuse irc::client::prelude::*;\nuse tokio_core::reactor::Core;\n\nfn main() {\n let cfg1 = Config {\n nickname: Some(\"pickles\".to_owned()),\n server: Some(\"irc.fyrechat.net\".to_owned()),\n channels: Some(vec![\"#irc-crate\".to_owned()]),\n ..Default::default()\n };\n let cfg2 = Config {\n nickname: Some(\"pickles\".to_owned()),\n server: Some(\"irc.pdgn.co\".to_owned()),\n channels: Some(vec![\"#irc-crate\".to_owned()]),\n use_ssl: Some(true),\n ..Default::default()\n };\n\n let configs = vec![cfg1, cfg2];\n\n \/\/ Create an event loop to run the multiple connections on.\n let mut reactor = Core::new().unwrap();\n let handle = reactor.handle();\n\n for config in configs {\n let server = IrcServer::from_config(config).unwrap();\n server.identify().unwrap();\n\n handle.spawn(server.stream().for_each(move |message| {\n process_msg(&server, message)\n }).map_err(|e| Err(e).unwrap()))\n }\n\n \/\/ You might instead want to join all the futures and run them directly.\n reactor.run(future::empty::<(), ()>()).unwrap();\n}\n\nfn process_msg(server: &IrcServer, message: Message) -> error::Result<()> {\n print!(\"{}\", message);\n match message.command {\n Command::PRIVMSG(ref target, ref msg) => {\n if msg.contains(\"pickles\") {\n server.send_privmsg(target, \"Hi!\")?;\n }\n }\n _ => (),\n }\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rust: more Iterator<Rect> examples.<commit_after>use std::slice::Iter;\nuse std::iter::Filter;\n\nfn make_vec() -> Vec<i32> {\n vec![4, 5, 6, -1, -100, 2, 400, 400, 500, 600, 11, 13, 15, 17, 19, 21, 23, 25]\n}\n\npub fn run() {\n println!(\"********* Iterator - missing Linq examples *********\");\n \/\/ demo_union();\n \/\/ demo_single();\n \/\/ demo_single_or_default();\n \/\/ demo_intersect();\n \/\/ demo_join();\n \/\/ demo_first();\n \/\/ demo_distinct();\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Initial commit<commit_after>extern crate diesel_demo;\nextern crate diesel;\n\nuse self::diesel_demo::*;\nuse self::diesel_demo::models::*;\nuse self::diesel::prelude::*;\n\nfn main()\n{\n use diesel_demo::schema::posts::dsl::*;\n\n let connection = establish_connection();\n let results = posts.filter(published.eq(true))\n .limit(5)\n .load::<Post>(&connection)\n .expect(\"Error loading posts\");\n\n println!(\"Displaying {} posts\", results.len());\n for post in results\n {\n println!(\"{}\", post.title);\n println!(\"--------\\n\");\n println!(\"{}\", post.body);\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Document processes.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add sensor read methods<commit_after>use std;\nuse std::fs::File;\nuse std::io::prelude::*;\n\nfn get<T>(filename : & str) -> T where T : std::str::FromStr, <T>::Err: std::fmt::Display {\n let mut file = match File::open(filename) {\n Err(why) => panic!(\"couldn't open {}: {}\", filename, why),\n Ok(file) => file\n };\n\n let mut s = String::new();\n match file.read_to_string(&mut s) {\n Err(why) => panic!(\"couldn't read {}: {}\", filename, why),\n Ok(_) => {\n let mut lines = s.lines();\n match lines.next() {\n None => panic!(\"couldn't extract line {}\", s),\n Some(l) => match l.parse::<T>() {\n Err(why) => panic!(\"couldn't parse {}: {}\", l, why),\n Ok(val) => val\n }\n }\n }\n }\n}\n\npub fn get_pressure() -> f32 {\n\tget::<f32>(\"\/sys\/bus\/i2c\/devices\/i2c-1\/1-0076\/iio:device1\/in_pressure_input\")\n}\n\npub fn get_bmp280_temperature() -> i32 {\n get::<i32>(\"\/sys\/bus\/i2c\/devices\/i2c-1\/1-0076\/iio:device1\/in_temp_input\")\n}\n\npub fn get_htu21_temperature() -> i32{\n get::<i32>(\"\/sys\/bus\/i2c\/devices\/i2c-1\/1-0040\/iio:device0\/in_temp_input\")\n}\n\npub fn get_htu21_humidity() -> i32{\n get::<i32>(\"\/sys\/bus\/i2c\/devices\/i2c-1\/1-0040\/iio:device0\/in_humidityrelative_input\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>forgot to add the file itself<commit_after>\/\/! pseudo authenticator for use with plain access tokens\nuse crate::error::Error;\nuse crate::types::TokenInfo;\nuse hyper::client::connect::Connection;\n\/\/ use hyper::header;\nuse http::Uri;\n\/\/ use serde::{Deserialize, Serialize};\nuse std::error::Error as StdError;\nuse tokio::io::{AsyncRead, AsyncWrite};\nuse tower_service::Service;\n\/\/ use url::form_urlencoded;\n\n\/\/\/ the flow for the access token authenticator\npub struct AccessTokenFlow {\n pub(crate) access_token: String,\n}\n\nimpl AccessTokenFlow {\n \/\/\/ just return the access token\n pub(crate) async fn token<S, T>(\n &self,\n _hyper_client: &hyper::Client<S>,\n _scopes: &[T],\n ) -> Result<TokenInfo, Error>\n where\n T: AsRef<str>,\n S: Service<Uri> + Clone + Send + Sync + 'static,\n S::Response: Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static,\n S::Future: Send + Unpin + 'static,\n S::Error: Into<Box<dyn StdError + Send + Sync>>,\n {\n Ok(TokenInfo {\n access_token: self.access_token.clone(),\n refresh_token: None,\n expires_at: None,\n id_token: None,\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use quote::{ToTokens, Tokens};\nuse syn::{Field, Meta, NestedMeta};\n\nuse api::strip_serde_attrs;\n\n#[derive(Debug)]\npub struct Response {\n fields: Vec<ResponseField>,\n}\n\nimpl Response {\n pub fn has_body_fields(&self) -> bool {\n self.fields.iter().any(|field| field.is_body())\n }\n\n pub fn has_fields(&self) -> bool {\n self.fields.len() != 0\n }\n\n pub fn has_header_fields(&self) -> bool {\n self.fields.iter().any(|field| field.is_header())\n }\n\n pub fn init_fields(&self) -> Tokens {\n let mut tokens = Tokens::new();\n\n for response_field in self.fields.iter() {\n match *response_field {\n ResponseField::Body(ref field) => {\n let field_name = field.ident.as_ref()\n .expect(\"expected body field to have a name\");\n\n tokens.append(quote! {\n #field_name: response_body.#field_name,\n });\n }\n ResponseField::Header(ref field) => {\n let field_name = field.ident.as_ref()\n .expect(\"expected body field to have a name\");\n let field_type = &field.ty;\n\n tokens.append(quote! {\n #field_name: headers.remove::<#field_type>()\n .expect(\"missing expected request header\"),\n });\n }\n ResponseField::NewtypeBody(ref field) => {\n let field_name = field.ident.as_ref()\n .expect(\"expected body field to have a name\");\n\n tokens.append(quote! {\n #field_name: response_body,\n });\n }\n }\n }\n\n tokens\n }\n\n pub fn newtype_body_field(&self) -> Option<&Field> {\n for response_field in self.fields.iter() {\n match *response_field {\n ResponseField::NewtypeBody(ref field) => {\n\n return Some(field);\n }\n _ => continue,\n }\n }\n\n None\n }\n\n}\n\nimpl From<Vec<Field>> for Response {\n fn from(fields: Vec<Field>) -> Self {\n let mut has_newtype_body = false;\n\n let response_fields = fields.into_iter().map(|mut field| {\n let mut response_field_kind = ResponseFieldKind::Body;\n\n field.attrs = field.attrs.into_iter().filter(|attr| {\n let (attr_ident, nested_meta_items) = match attr.value {\n Meta::List(ref attr_ident, ref nested_meta_items) => {\n (attr_ident, nested_meta_items)\n }\n _ => return true,\n };\n\n if attr_ident != \"ruma_api\" {\n return true;\n }\n\n for nested_meta_item in nested_meta_items {\n match *nested_meta_item {\n NestedMeta::Meta(ref meta_item) => {\n match *meta_item {\n Meta::Word(ref ident) => {\n if ident == \"body\" {\n has_newtype_body = true;\n response_field_kind = ResponseFieldKind::NewtypeBody;\n } else if ident == \"header\" {\n response_field_kind = ResponseFieldKind::Header;\n } else {\n panic!(\n \"ruma_api! attribute meta item on responses must be: header\"\n );\n }\n }\n _ => panic!(\n \"ruma_api! attribute meta item on requests cannot be a list or name\/value pair\"\n ),\n }\n }\n NestedMeta::Literal(_) => panic!(\n \"ruma_api! attribute meta item on responses must be: header\"\n ),\n }\n }\n\n false\n }).collect();\n\n match response_field_kind {\n ResponseFieldKind::Body => {\n if has_newtype_body {\n panic!(\"ruma_api! responses cannot have both normal body fields and a newtype body field\");\n } else {\n return ResponseField::Body(field);\n }\n }\n ResponseFieldKind::Header => ResponseField::Header(field),\n ResponseFieldKind::NewtypeBody => ResponseField::NewtypeBody(field),\n }\n }).collect();\n\n Response {\n fields: response_fields,\n }\n }\n}\n\nimpl ToTokens for Response {\n fn to_tokens(&self, mut tokens: &mut Tokens) {\n tokens.append(quote! {\n \/\/\/ Data in the response from this API endpoint.\n #[derive(Debug)]\n pub struct Response\n });\n\n if self.fields.len() == 0 {\n tokens.append(\";\");\n } else {\n tokens.append(\"{\");\n\n for response_field in self.fields.iter() {\n strip_serde_attrs(response_field.field()).to_tokens(&mut tokens);\n\n tokens.append(\",\");\n }\n\n tokens.append(\"}\");\n }\n\n if let Some(newtype_body_field) = self.newtype_body_field() {\n let mut field = newtype_body_field.clone();\n\n field.ident = None;\n\n tokens.append(quote! {\n \/\/\/ Data in the response body.\n #[derive(Debug, Deserialize)]\n struct ResponseBody\n });\n\n tokens.append(\"(\");\n\n field.to_tokens(&mut tokens);\n\n tokens.append(\");\");\n } else if self.has_body_fields() {\n tokens.append(quote! {\n \/\/\/ Data in the response body.\n #[derive(Debug, Deserialize)]\n struct ResponseBody\n });\n\n tokens.append(\"{\");\n\n for response_field in self.fields.iter() {\n match *response_field {\n ResponseField::Body(ref field) => {\n field.to_tokens(&mut tokens);\n\n tokens.append(\",\");\n }\n _ => {}\n }\n }\n\n tokens.append(\"}\");\n }\n }\n}\n\n#[derive(Debug)]\npub enum ResponseField {\n Body(Field),\n Header(Field),\n NewtypeBody(Field),\n}\n\nimpl ResponseField {\n fn field(&self) -> &Field {\n match *self {\n ResponseField::Body(ref field) => field,\n ResponseField::Header(ref field) => field,\n ResponseField::NewtypeBody(ref field) => field,\n }\n }\n\n fn is_body(&self) -> bool {\n match *self {\n ResponseField::Body(_) => true,\n _ => false,\n }\n }\n\n fn is_header(&self) -> bool {\n match *self {\n ResponseField::Header(_) => true,\n _ => false,\n }\n }\n}\n\nenum ResponseFieldKind {\n Body,\n Header,\n NewtypeBody,\n}\n<commit_msg>ExprStruct --> Response<commit_after>use quote::{ToTokens, Tokens};\nuse syn::{ExprStruct, Field, FieldValue, FieldsNamed, Meta, NestedMeta};\n\nuse api::strip_serde_attrs;\n\npub struct Response {\n fields: Vec<ResponseField>,\n}\n\nimpl Response {\n pub fn has_body_fields(&self) -> bool {\n self.fields.iter().any(|field| field.is_body())\n }\n\n pub fn has_fields(&self) -> bool {\n self.fields.len() != 0\n }\n\n pub fn has_header_fields(&self) -> bool {\n self.fields.iter().any(|field| field.is_header())\n }\n\n pub fn init_fields(&self) -> Tokens {\n let mut tokens = Tokens::new();\n\n for response_field in self.fields.iter() {\n match *response_field {\n ResponseField::Body(ref field) => {\n let field_name = field.ident.as_ref()\n .expect(\"expected body field to have a name\");\n\n tokens.append(quote! {\n #field_name: response_body.#field_name,\n });\n }\n ResponseField::Header(ref field) => {\n let field_name = field.ident.as_ref()\n .expect(\"expected body field to have a name\");\n let field_type = &field.ty;\n\n tokens.append(quote! {\n #field_name: headers.remove::<#field_type>()\n .expect(\"missing expected request header\"),\n });\n }\n ResponseField::NewtypeBody(ref field) => {\n let field_name = field.ident.as_ref()\n .expect(\"expected body field to have a name\");\n\n tokens.append(quote! {\n #field_name: response_body,\n });\n }\n }\n }\n\n tokens\n }\n\n pub fn newtype_body_field(&self) -> Option<&Field> {\n for response_field in self.fields.iter() {\n match *response_field {\n ResponseField::NewtypeBody(ref field) => {\n\n return Some(field);\n }\n _ => continue,\n }\n }\n\n None\n }\n\n}\n\nimpl From<ExprStruct> for Response {\n fn from(expr: ExprStruct) -> Self {\n let mut has_newtype_body = false;\n\n let fields = expr.fields.into_iter().map(|mut field_value| {\n let mut field_kind = ResponseFieldKind::Body;\n\n field_value.attrs = field_value.attrs.into_iter().filter(|attr| {\n let meta = attr.interpret_meta()\n .expect(\"ruma_api! could not parse response field attributes\");\n\n let Meta::List(meta_list) = meta;\n\n if meta_list.ident.as_ref() != \"ruma_api\" {\n return true;\n }\n\n for nested_meta_item in meta_list.nested {\n match nested_meta_item {\n NestedMeta::Meta(meta_item) => {\n match meta_item {\n Meta::Word(ident) => {\n match ident.as_ref() {\n \"body\" => {\n has_newtype_body = true;\n field_kind = ResponseFieldKind::NewtypeBody;\n }\n \"header\" => field_kind = ResponseFieldKind::Header,\n _ => panic!(\n \"ruma_api! attribute meta item on responses must be: header\"\n ),\n }\n }\n _ => panic!(\n \"ruma_api! attribute meta item on responses cannot be a list or name\/value pair\"\n ),\n }\n }\n NestedMeta::Literal(_) => panic!(\n \"ruma_api! attribute meta item on responses must be: header\"\n ),\n }\n }\n\n false\n }).collect();\n\n match field_kind {\n ResponseFieldKind::Body => {\n if has_newtype_body {\n panic!(\"ruma_api! responses cannot have both normal body fields and a newtype body field\");\n } else {\n return ResponseField::Body(field_value);\n }\n }\n ResponseFieldKind::Header => ResponseField::Header(field_value),\n ResponseFieldKind::NewtypeBody => ResponseField::NewtypeBody(field_value),\n }\n }).collect();\n\n Response {\n fields,\n }\n }\n}\n\nimpl ToTokens for Response {\n fn to_tokens(&self, mut tokens: &mut Tokens) {\n tokens.append(quote! {\n \/\/\/ Data in the response from this API endpoint.\n #[derive(Debug)]\n pub struct Response\n });\n\n if self.fields.len() == 0 {\n tokens.append(\";\");\n } else {\n tokens.append(\"{\");\n\n for response_field in self.fields.iter() {\n strip_serde_attrs(response_field.field()).to_tokens(&mut tokens);\n\n tokens.append(\",\");\n }\n\n tokens.append(\"}\");\n }\n\n if let Some(newtype_body_field) = self.newtype_body_field() {\n let mut field = newtype_body_field.clone();\n\n field.ident = None;\n\n tokens.append(quote! {\n \/\/\/ Data in the response body.\n #[derive(Debug, Deserialize)]\n struct ResponseBody\n });\n\n tokens.append(\"(\");\n\n field.to_tokens(&mut tokens);\n\n tokens.append(\");\");\n } else if self.has_body_fields() {\n tokens.append(quote! {\n \/\/\/ Data in the response body.\n #[derive(Debug, Deserialize)]\n struct ResponseBody\n });\n\n tokens.append(\"{\");\n\n for response_field in self.fields.iter() {\n match *response_field {\n ResponseField::Body(ref field) => {\n field.to_tokens(&mut tokens);\n\n tokens.append(\",\");\n }\n _ => {}\n }\n }\n\n tokens.append(\"}\");\n }\n }\n}\n\npub enum ResponseField {\n Body(FieldValue),\n Header(FieldValue),\n NewtypeBody(FieldValue),\n}\n\nimpl ResponseField {\n fn field(&self) -> &FieldValue {\n match *self {\n ResponseField::Body(ref field) => field,\n ResponseField::Header(ref field) => field,\n ResponseField::NewtypeBody(ref field) => field,\n }\n }\n\n fn is_body(&self) -> bool {\n match *self {\n ResponseField::Body(_) => true,\n _ => false,\n }\n }\n\n fn is_header(&self) -> bool {\n match *self {\n ResponseField::Header(_) => true,\n _ => false,\n }\n }\n}\n\nenum ResponseFieldKind {\n Body,\n Header,\n NewtypeBody,\n}\n<|endoftext|>"} {"text":"<commit_before>#![cfg(feature = \"cognito-sync\")]\n\nextern crate rusoto_core;\nextern crate rusoto_cognito_sync;\n\nuse rusoto_cognito_sync::{CognitoSync, CognitoSyncClient, ListIdentityPoolUsageRequest};\nuse rusoto_core::{DefaultCredentialsProvider, Region, default_tls_client};\n\n#[test]\nfn should_list_identity_pool_usage() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = CognitoSyncClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1);\n let request = ListIdentityPoolUsageRequest::default();\n\n let result = client.list_identity_pool_usage(&request).unwrap();\n\tprintln!(\"{:#?}\", result);\n}<commit_msg>Fix whitespace issue in test.<commit_after>#![cfg(feature = \"cognito-sync\")]\n\nextern crate rusoto_core;\nextern crate rusoto_cognito_sync;\n\nuse rusoto_cognito_sync::{CognitoSync, CognitoSyncClient, ListIdentityPoolUsageRequest};\nuse rusoto_core::{DefaultCredentialsProvider, Region, default_tls_client};\n\n#[test]\nfn should_list_identity_pool_usage() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = CognitoSyncClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1);\n let request = ListIdentityPoolUsageRequest::default();\n\n let result = client.list_identity_pool_usage(&request).unwrap();\n println!(\"{:#?}\", result);\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Coherence phase\n\/\/\n\/\/ The job of the coherence phase of typechecking is to ensure that each trait\n\/\/ has at most one implementation for each type. Then we build a mapping from\n\/\/ each trait in the system to its implementations.\n\nimport middle::ty::{get, t, ty_box, ty_uniq, ty_ptr, ty_rptr, ty_enum};\nimport middle::ty::{ty_class, ty_nil, ty_bot, ty_bool, ty_int, ty_uint};\nimport middle::ty::{ty_float, ty_str, ty_estr, ty_vec, ty_evec, ty_rec};\nimport middle::ty::{ty_fn, ty_trait, ty_tup, ty_var, ty_var_integral};\nimport middle::ty::{ty_param, ty_self, ty_constr, ty_type, ty_opaque_box};\nimport middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, new_ty_hash};\nimport middle::ty::{subst};\nimport middle::typeck::infer::{infer_ctxt, mk_eqty, new_infer_ctxt};\nimport syntax::ast::{crate, def_id, item, item_impl, method, region_param};\nimport syntax::ast::{trait_ref};\nimport syntax::ast_util::{def_id_of_def, new_def_hash};\nimport syntax::visit::{default_simple_visitor, mk_simple_visitor};\nimport syntax::visit::{visit_crate};\n\nimport dvec::{dvec, extensions};\nimport result::{extensions};\nimport std::map::hashmap;\nimport uint::range;\n\nclass CoherenceInfo {\n \/\/ Contains implementations of methods that are inherent to a type.\n \/\/ Methods in these implementations don't need to be exported.\n let inherent_methods: hashmap<t,@dvec<@item>>;\n\n \/\/ Contains implementations of methods associated with a trait. For these,\n \/\/ the associated trait must be imported at the call site.\n let extension_methods: hashmap<def_id,@dvec<@item>>;\n\n new() {\n self.inherent_methods = new_ty_hash();\n self.extension_methods = new_def_hash();\n }\n}\n\nclass CoherenceChecker {\n let crate_context: @crate_ctxt;\n let inference_context: infer_ctxt;\n let info: @CoherenceInfo;\n\n new(crate_context: @crate_ctxt) {\n self.crate_context = crate_context;\n self.inference_context = new_infer_ctxt(crate_context.tcx);\n self.info = @CoherenceInfo();\n }\n\n fn check_coherence(crate: @crate) {\n \/\/ Check implementations. This populates the tables containing the\n \/\/ inherent methods and extension methods.\n\n visit_crate(*crate, (), mk_simple_visitor(@{\n visit_item: |item| {\n alt item.node {\n item_impl(_, _, associated_trait, self_type, _) {\n self.check_implementation(item, associated_trait);\n }\n _ {\n \/\/ Nothing to do.\n }\n };\n }\n with *default_simple_visitor()\n }));\n\n \/\/ Check trait coherence.\n for self.info.extension_methods.each |def_id, items| {\n self.check_implementation_coherence(def_id, items);\n }\n }\n\n fn check_implementation(item: @item,\n optional_associated_trait: option<@trait_ref>) {\n\n let self_type = self.crate_context.tcx.tcache.get(local_def(item.id));\n alt optional_associated_trait {\n none {\n alt self.get_base_type(self_type.ty) {\n none {\n let session = self.crate_context.tcx.sess;\n session.span_warn(item.span,\n \"no base type found for inherent \\\n implementation; implement a trait \\\n instead\");\n }\n some(base_type) {\n let implementation_list;\n alt self.info.inherent_methods.find(base_type) {\n none {\n implementation_list = @dvec();\n }\n some(existing_implementation_list) {\n implementation_list =\n existing_implementation_list;\n }\n }\n\n implementation_list.push(item);\n }\n }\n }\n some(associated_trait) {\n let def =\n self.crate_context.tcx.def_map.get(associated_trait.id);\n let def_id = def_id_of_def(def);\n\n let implementation_list;\n alt self.info.extension_methods.find(def_id) {\n none {\n implementation_list = @dvec();\n }\n some(existing_implementation_list) {\n implementation_list = existing_implementation_list;\n }\n }\n\n implementation_list.push(item);\n }\n }\n }\n\n fn get_base_type(original_type: t) -> option<t> {\n alt get(original_type).struct {\n ty_box(base_mutability_and_type) |\n ty_uniq(base_mutability_and_type) |\n ty_ptr(base_mutability_and_type) |\n ty_rptr(_, base_mutability_and_type) {\n self.get_base_type(base_mutability_and_type.ty)\n }\n\n ty_enum(*) | ty_class(*) {\n some(original_type)\n }\n\n ty_nil | ty_bot | ty_bool | ty_int(*) | ty_uint(*) | ty_float(*) |\n ty_str | ty_estr(*) | ty_vec(*) | ty_evec(*) | ty_rec(*) |\n ty_fn(*) | ty_trait(*) | ty_tup(*) | ty_var(*) |\n ty_var_integral(*) | ty_param(*) | ty_self | ty_constr(*) |\n ty_type | ty_opaque_box | ty_opaque_closure_ptr(*) |\n ty_unboxed_vec(*) {\n none\n }\n }\n }\n\n fn check_implementation_coherence(_trait_def_id: def_id,\n implementations: @dvec<@item>) {\n\n \/\/ Unify pairs of polytypes.\n for implementations.eachi |i, implementation_a| {\n let polytype_a =\n self.get_self_type_for_implementation(implementation_a);\n for range(i + 1, implementations.len()) |j| {\n let implementation_b = implementations.get_elt(j);\n let polytype_b =\n self.get_self_type_for_implementation(implementation_b);\n\n if self.polytypes_unify(polytype_a, polytype_b) {\n let session = self.crate_context.tcx.sess;\n session.span_err(implementation_b.span,\n \"conflicting implementations for a \\\n trait\");\n session.span_note(implementation_a.span,\n \"note conflicting implementation here\");\n }\n }\n }\n }\n\n fn polytypes_unify(polytype_a: ty_param_bounds_and_ty,\n polytype_b: ty_param_bounds_and_ty)\n -> bool {\n\n let monotype_a = self.universally_quantify_polytype(polytype_a);\n let monotype_b = self.universally_quantify_polytype(polytype_b);\n ret mk_eqty(self.inference_context, monotype_a, monotype_b).is_ok();\n }\n\n \/\/ Converts a polytype to a monotype by replacing all parameters with\n \/\/ type variables.\n fn universally_quantify_polytype(polytype: ty_param_bounds_and_ty) -> t {\n let self_region;\n alt polytype.rp {\n ast::rp_none {\n self_region = none;\n }\n ast::rp_self {\n self_region = some(self.inference_context.next_region_var())\n }\n };\n\n let bounds_count = polytype.bounds.len();\n let type_parameters =\n self.inference_context.next_ty_vars(bounds_count);\n\n let substitutions = {\n self_r: self_region,\n self_ty: none,\n tps: type_parameters\n };\n\n ret subst(self.crate_context.tcx, substitutions, polytype.ty);\n }\n\n fn get_self_type_for_implementation(implementation: @item)\n -> ty_param_bounds_and_ty {\n\n alt implementation.node {\n item_impl(*) {\n let def = local_def(implementation.id);\n ret self.crate_context.tcx.tcache.get(def);\n }\n _ {\n self.crate_context.tcx.sess.span_bug(implementation.span,\n \"not an implementation\");\n }\n }\n }\n}\n\nfn check_coherence(crate_context: @crate_ctxt, crate: @crate) {\n CoherenceChecker(crate_context).check_coherence(crate);\n}\n\n<commit_msg>rustc: Add some more checks to coherence<commit_after>\/\/ Coherence phase\n\/\/\n\/\/ The job of the coherence phase of typechecking is to ensure that each trait\n\/\/ has at most one implementation for each type. Then we build a mapping from\n\/\/ each trait in the system to its implementations.\n\nimport middle::ty::{get, t, ty_box, ty_uniq, ty_ptr, ty_rptr, ty_enum};\nimport middle::ty::{ty_class, ty_nil, ty_bot, ty_bool, ty_int, ty_uint};\nimport middle::ty::{ty_float, ty_str, ty_estr, ty_vec, ty_evec, ty_rec};\nimport middle::ty::{ty_fn, ty_trait, ty_tup, ty_var, ty_var_integral};\nimport middle::ty::{ty_param, ty_self, ty_constr, ty_type, ty_opaque_box};\nimport middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, new_ty_hash};\nimport middle::ty::{subst};\nimport middle::typeck::infer::{infer_ctxt, mk_subty, new_infer_ctxt};\nimport syntax::ast::{crate, def_id, item, item_class, item_const, item_enum};\nimport syntax::ast::{item_fn, item_foreign_mod, item_impl, item_mac};\nimport syntax::ast::{item_mod, item_trait, item_ty, local_crate, method};\nimport syntax::ast::{node_id, region_param, rp_none, rp_self, trait_ref};\nimport syntax::ast_util::{def_id_of_def, new_def_hash};\nimport syntax::visit::{default_simple_visitor, default_visitor};\nimport syntax::visit::{mk_simple_visitor, mk_vt, visit_crate, visit_item};\nimport syntax::visit::{visit_mod};\nimport util::ppaux::ty_to_str;\n\nimport dvec::{dvec, extensions};\nimport result::{extensions};\nimport std::map::{hashmap, int_hash};\nimport uint::range;\n\nclass CoherenceInfo {\n \/\/ Contains implementations of methods that are inherent to a type.\n \/\/ Methods in these implementations don't need to be exported.\n let inherent_methods: hashmap<t,@dvec<@item>>;\n\n \/\/ Contains implementations of methods associated with a trait. For these,\n \/\/ the associated trait must be imported at the call site.\n let extension_methods: hashmap<def_id,@dvec<@item>>;\n\n new() {\n self.inherent_methods = new_ty_hash();\n self.extension_methods = new_def_hash();\n }\n}\n\nclass CoherenceChecker {\n let crate_context: @crate_ctxt;\n let inference_context: infer_ctxt;\n let info: @CoherenceInfo;\n\n \/\/ A mapping from implementations to the corresponding base type\n \/\/ definition ID.\n let base_type_def_ids: hashmap<node_id,def_id>;\n\n \/\/ A set of implementations in privileged scopes; i.e. those\n \/\/ implementations that are defined in the same scope as their base types.\n let privileged_implementations: hashmap<node_id,()>;\n\n \/\/ The set of types that we are currently in the privileged scope of. This\n \/\/ is used while we traverse the AST while checking privileged scopes.\n let privileged_types: hashmap<def_id,()>;\n\n new(crate_context: @crate_ctxt) {\n self.crate_context = crate_context;\n self.inference_context = new_infer_ctxt(crate_context.tcx);\n self.info = @CoherenceInfo();\n\n self.base_type_def_ids = int_hash();\n self.privileged_implementations = int_hash();\n self.privileged_types = new_def_hash();\n }\n\n fn check_coherence(crate: @crate) {\n \/\/ Check implementations. This populates the tables containing the\n \/\/ inherent methods and extension methods.\n\n visit_crate(*crate, (), mk_simple_visitor(@{\n visit_item: |item| {\n alt item.node {\n item_impl(_, _, associated_trait, self_type, _) {\n self.check_implementation(item, associated_trait);\n }\n _ {\n \/\/ Nothing to do.\n }\n };\n }\n with *default_simple_visitor()\n }));\n\n \/\/ Check trait coherence.\n for self.info.extension_methods.each |def_id, items| {\n self.check_implementation_coherence(def_id, items);\n }\n\n \/\/ Check whether traits with base types are in privileged scopes.\n self.check_privileged_scopes(crate);\n }\n\n fn check_implementation(item: @item,\n optional_associated_trait: option<@trait_ref>) {\n\n let self_type = self.crate_context.tcx.tcache.get(local_def(item.id));\n alt optional_associated_trait {\n none {\n alt self.get_base_type(self_type.ty) {\n none {\n let session = self.crate_context.tcx.sess;\n session.span_warn(item.span,\n \"no base type found for inherent \\\n implementation; implement a trait \\\n instead\");\n }\n some(base_type) {\n let implementation_list;\n alt self.info.inherent_methods.find(base_type) {\n none {\n implementation_list = @dvec();\n }\n some(existing_implementation_list) {\n implementation_list =\n existing_implementation_list;\n }\n }\n\n implementation_list.push(item);\n }\n }\n }\n some(associated_trait) {\n let def =\n self.crate_context.tcx.def_map.get(associated_trait.id);\n let def_id = def_id_of_def(def);\n\n let implementation_list;\n alt self.info.extension_methods.find(def_id) {\n none {\n implementation_list = @dvec();\n }\n some(existing_implementation_list) {\n implementation_list = existing_implementation_list;\n }\n }\n\n implementation_list.push(item);\n }\n }\n\n \/\/ Add the implementation to the mapping from implementation to base\n \/\/ type def ID, if there is a base type for this implementation.\n alt self.get_base_type_def_id(self_type.ty) {\n none {\n \/\/ Nothing to do.\n }\n some(base_type_def_id) {\n self.base_type_def_ids.insert(item.id, base_type_def_id);\n }\n }\n }\n\n fn get_base_type(original_type: t) -> option<t> {\n alt get(original_type).struct {\n ty_box(base_mutability_and_type) |\n ty_uniq(base_mutability_and_type) |\n ty_ptr(base_mutability_and_type) |\n ty_rptr(_, base_mutability_and_type) {\n self.get_base_type(base_mutability_and_type.ty)\n }\n\n ty_enum(*) | ty_class(*) {\n some(original_type)\n }\n\n ty_nil | ty_bot | ty_bool | ty_int(*) | ty_uint(*) | ty_float(*) |\n ty_str | ty_estr(*) | ty_vec(*) | ty_evec(*) | ty_rec(*) |\n ty_fn(*) | ty_trait(*) | ty_tup(*) | ty_var(*) |\n ty_var_integral(*) | ty_param(*) | ty_self | ty_constr(*) |\n ty_type | ty_opaque_box | ty_opaque_closure_ptr(*) |\n ty_unboxed_vec(*) {\n none\n }\n }\n }\n\n \/\/ Returns the def ID of the base type.\n fn get_base_type_def_id(original_type: t) -> option<def_id> {\n alt self.get_base_type(original_type) {\n none {\n ret none;\n }\n some(base_type) {\n alt get(base_type).struct {\n ty_enum(def_id, _) | ty_class(def_id, _) {\n ret some(def_id);\n }\n _ {\n fail \"get_base_type() returned a type that wasn't an \\\n enum or class\";\n }\n }\n }\n }\n }\n\n fn check_implementation_coherence(_trait_def_id: def_id,\n implementations: @dvec<@item>) {\n\n \/\/ Unify pairs of polytypes.\n for implementations.eachi |i, implementation_a| {\n let polytype_a =\n self.get_self_type_for_implementation(implementation_a);\n for range(i + 1, implementations.len()) |j| {\n let implementation_b = implementations.get_elt(j);\n let polytype_b =\n self.get_self_type_for_implementation(implementation_b);\n\n if self.polytypes_unify(polytype_a, polytype_b) {\n let session = self.crate_context.tcx.sess;\n session.span_err(implementation_b.span,\n \"conflicting implementations for a \\\n trait\");\n session.span_note(implementation_a.span,\n \"note conflicting implementation here\");\n }\n }\n }\n }\n\n fn polytypes_unify(polytype_a: ty_param_bounds_and_ty,\n polytype_b: ty_param_bounds_and_ty)\n -> bool {\n\n let monotype_a = self.universally_quantify_polytype(polytype_a);\n let monotype_b = self.universally_quantify_polytype(polytype_b);\n ret mk_subty(self.inference_context, monotype_a, monotype_b).is_ok()\n || mk_subty(self.inference_context, monotype_b, monotype_a).is_ok();\n }\n\n \/\/ Converts a polytype to a monotype by replacing all parameters with\n \/\/ type variables.\n fn universally_quantify_polytype(polytype: ty_param_bounds_and_ty) -> t {\n let self_region;\n alt polytype.rp {\n rp_none {\n self_region = none;\n }\n rp_self {\n self_region = some(self.inference_context.next_region_var())\n }\n };\n\n let bounds_count = polytype.bounds.len();\n let type_parameters =\n self.inference_context.next_ty_vars(bounds_count);\n\n let substitutions = {\n self_r: self_region,\n self_ty: none,\n tps: type_parameters\n };\n\n ret subst(self.crate_context.tcx, substitutions, polytype.ty);\n }\n\n fn get_self_type_for_implementation(implementation: @item)\n -> ty_param_bounds_and_ty {\n\n alt implementation.node {\n item_impl(*) {\n let def = local_def(implementation.id);\n ret self.crate_context.tcx.tcache.get(def);\n }\n _ {\n self.crate_context.tcx.sess.span_bug(implementation.span,\n \"not an implementation\");\n }\n }\n }\n\n \/\/ Privileged scope checking\n\n fn check_privileged_scopes(crate: @crate) {\n visit_crate(*crate, (), mk_vt(@{\n visit_item: |item, _context, visitor| {\n alt item.node {\n item_mod(module) {\n \/\/ First, gather up all privileged types.\n let privileged_types =\n self.gather_privileged_types(module.items);\n for privileged_types.each |privileged_type| {\n #debug(\"(checking privileged scopes) entering \\\n privileged scope of %d:%d\",\n privileged_type.crate,\n privileged_type.node);\n\n self.privileged_types.insert(privileged_type, ());\n }\n\n \/\/ Then visit the module items.\n visit_mod(module, item.span, item.id, (), visitor);\n\n \/\/ Finally, remove privileged types from the map.\n for privileged_types.each |privileged_type| {\n self.privileged_types.remove(privileged_type);\n }\n }\n item_impl(_, _, optional_trait_ref, _, _) {\n alt self.base_type_def_ids.find(item.id) {\n none {\n \/\/ Nothing to do.\n }\n some(base_type_def_id) {\n \/\/ Check to see whether the implementation is\n \/\/ in the scope of its base type.\n\n let privileged_types = &self.privileged_types;\n if privileged_types.\n contains_key(base_type_def_id) {\n\n \/\/ Record that this implementation is OK.\n self.privileged_implementations.insert\n (item.id, ());\n } else {\n \/\/ This implementation is not in scope of\n \/\/ its base type. This still might be OK\n \/\/ if the trait is defined in the same\n \/\/ crate.\n\n alt optional_trait_ref {\n none {\n \/\/ There is no trait to implement,\n \/\/ so this is an error.\n\n let session =\n self.crate_context.tcx.sess;\n session.span_warn(item.span,\n \"cannot \\\n implement \\\n inherent \\\n methods for a \\\n type outside \\\n the scope the \\\n type was \\\n defined in; \\\n define and \\\n implement a \\\n trait \\\n instead\");\n }\n some(trait_ref) {\n \/\/ This is OK if and only if the\n \/\/ trait was defined in this\n \/\/ crate.\n\n let def_map = self.crate_context\n .tcx.def_map;\n let trait_def =\n def_map.get(trait_ref.id);\n let trait_id =\n def_id_of_def(trait_def);\n if trait_id.crate != local_crate {\n let session = self\n .crate_context.tcx.sess;\n session.span_warn(item.span,\n \"cannot \\\n provide \\\n an \\\n extension \\\n implement\\\n ation \\\n for a \\\n trait not \\\n defined \\\n in this \\\n crate\");\n }\n }\n }\n }\n }\n }\n\n visit_item(item, (), visitor);\n }\n _ {\n visit_item(item, (), visitor);\n }\n }\n }\n with *default_visitor()\n }));\n }\n\n fn gather_privileged_types(items: ~[@item]) -> @dvec<def_id> {\n let results = @dvec();\n for items.each |item| {\n alt item.node {\n item_class(*) | item_enum(*) {\n results.push(local_def(item.id));\n }\n\n item_const(*) | item_fn(*) | item_mod(*) |\n item_foreign_mod(*) | item_ty(*) | item_trait(*) |\n item_impl(*) | item_mac(*) {\n \/\/ Nothing to do.\n }\n }\n }\n\n ret results;\n }\n}\n\nfn check_coherence(crate_context: @crate_ctxt, crate: @crate) {\n CoherenceChecker(crate_context).check_coherence(crate);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix button click target<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for #13407<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nmod A {\n struct C;\n}\n\nfn main() {\n A::C = 1;\n \/\/~^ ERROR: illegal left-hand side expression\n \/\/~| ERROR: mismatched types\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add regression test for #70217<commit_after>\/\/ check-pass\n#![feature(const_generics)]\n#![allow(incomplete_features)]\n\nstruct Struct<const N: usize>;\n\nimpl<const N: usize> Struct<N> {\n fn method<const M: usize>(&self) {}\n}\n\nfn test<const N: usize, const M: usize>(x: Struct<N>) {\n Struct::<N>::method::<M>(&x);\n x.method::<N>();\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Document zrange<commit_after><|endoftext|>"} {"text":"<commit_before>extern mod glfw;\n\nfn main() {\n \/\/ Run this task on the main thread. Unlike C or C++, a Rust program\n \/\/ automatically starts a new thread, so this line is _essential_ to ensure\n \/\/ that the OS is able to update the window and recieve events from the user.\n do task::task().sched_mode(task::PlatformThread).spawn {\n \n glfw::set_error_callback(error_callback);\n \n if !glfw::init() {\n glfw::terminate();\n die!(~\"Failed to initialize GLFW\\n\");\n }\n \n let window =\n match glfw::Window::create(300, 300, \"Hello this is window\", glfw::Windowed) {\n Some(w) => { w }\n None => {\n glfw::terminate();\n die!(~\"Failed to open GLFW window\");\n }\n };\n \n window.make_context_current();\n \n let mut done = false;\n \n while !done {\n glfw::poll_events();\n if (window.get_key(glfw::KEY_ESC) == glfw::PRESS || window.get_param(glfw::SHOULD_CLOSE) != 0) {\n done = true;\n }\n }\n \n window.destroy();\n glfw::terminate();\n }\n}\n\nfn error_callback(_error: libc::c_int, description: ~str) {\n io::println(fmt!(\"GLFW Error: %s\", description));\n}<commit_msg>Improve manual-init example<commit_after>extern mod glfw;\n\nfn main() {\n \/\/ Run this task on the main thread. Unlike C or C++, a Rust program\n \/\/ automatically starts a new thread, so this line is _essential_ to ensure\n \/\/ that the OS is able to update the window and recieve events from the user.\n do task::task().sched_mode(task::PlatformThread).spawn {\n use core::private::finally::Finally;\n \n do (|| {\n \n glfw::set_error_callback(error_callback);\n \n if !glfw::init() {\n die!(~\"Failed to initialize GLFW\\n\");\n }\n \n let window =\n match glfw::Window::create(300, 300, \"Hello this is window\", glfw::Windowed) {\n Some(w) => w,\n None => die!(~\"Failed to open GLFW window\")\n };\n \n window.make_context_current();\n \n let mut done = false;\n \n while !done {\n glfw::poll_events();\n if (window.get_key(glfw::KEY_ESC) == glfw::PRESS || window.get_param(glfw::SHOULD_CLOSE) != 0) {\n done = true;\n }\n }\n \n window.destroy();\n \n }).finally {\n glfw::terminate(); \/\/ terminate glfw on completion\n }\n }\n}\n\nfn error_callback(_error: libc::c_int, description: ~str) {\n io::println(fmt!(\"GLFW Error: %s\", description));\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>alloc memory from top to down<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\/ A module for permission units\npub mod permission_unit;\n\/\/\/ A module for checking string (wildcard) matches\npub mod str_match;\n\/\/\/ A module for permissions\npub mod permission;\n\n#[test]\nfn test() {\n use str_match::*;\n use permission_unit::*;\n use permission::*;\n \/\/ Test string matches (wildcard chars)\n assert!(str_match(\"hey*hey\", \"heyabchey\"));\n assert!(str_match(\"hey\\\\*hey*\", \"hey*heycatsarefunny\"));\n \/\/ Test permission units\n assert!(PermissionUnit::from_str(\"rw=hey\").read());\n assert!(!PermissionUnit::from_str(\"r=hey\").write());\n assert!(PermissionUnit::from_str(\"r=hey\").read_foc());\n assert!(PermissionUnit::from_str(\"r=file:home\/*\").applies(&PermissionUnit::from_str(\"R=file:home\/lal\")));\n assert!(PermissionUnit::from_str(\"R=file:home\/lal\").read_foc);\n assert!(PermissionUnit::from_str(\"r=file:home\/lal\").read_foc());\n assert!(!PermissionUnit::from_str(\"RW=http:*\").read());\n \/\/ Test permissions\n assert!(Permission::from_str(\"rw=file:home\/*-rw=file:veryimportant\").test(PermissionUnit::from_str(\"rw=file:home\/lal\")));\n assert!(Permission::from_str(\"rw=file:home\/*-rw=file:veryimportant\").test(PermissionUnit::from_str(\"rw=file:home\/veryimportant\")));\n assert!(Permission::from_str(\"rw=i\\\\+can\\\\+do\\\\+like\\\\+this\").test(PermissionUnit::from_str(\"rw=file:i+can+do+like+this\")));\n \/\/ assert!(!Permission::from_str(\"RW=http:*+\").test(PermissionUnit::from_str(\"rw=http:\/\/google.com\")));\n \/\/ TODO: Failes when using uppercase RW\n}\n<commit_msg>It works! TODO: Focus<commit_after>\/\/\/ A module for permission units\npub mod permission_unit;\n\/\/\/ A module for checking string (wildcard) matches\npub mod str_match;\n\/\/\/ A module for permissions\npub mod permission;\n\n#[test]\nfn test() {\n use str_match::*;\n use permission_unit::*;\n use permission::*;\n \/\/ Test string matches (wildcard chars)\n assert!(str_match(\"hey*hey\", \"heyabchey\"));\n assert!(str_match(\"hey\\\\*hey*\", \"hey*heycatsarefunny\"));\n \/\/ Test permission units\n assert!(PermissionUnit::from_str(\"rw=hey\").read());\n assert!(!PermissionUnit::from_str(\"r=hey\").write());\n assert!(PermissionUnit::from_str(\"r=hey\").read_foc());\n assert!(PermissionUnit::from_str(\"r=file:home\/*\").applies(&PermissionUnit::from_str(\"R=file:home\/lal\")));\n assert!(PermissionUnit::from_str(\"R=file:home\/lal\").read_foc);\n assert!(PermissionUnit::from_str(\"r=file:home\/lal\").read_foc());\n assert!(!PermissionUnit::from_str(\"RW=http:*\").read());\n \/\/ Test permissions\n assert!(Permission::from_str(\"rw=file:home\/*-rw=file:veryimportant\").test(PermissionUnit::from_str(\"rw=file:home\/lal\")));\n assert!(Permission::from_str(\"rw=file:home\/*-rw=file:veryimportant\").test(PermissionUnit::from_str(\"rw=file:home\/veryimportant\")));\n assert!(Permission::from_str(\"rw=i\\\\+can\\\\+do\\\\+like\\\\+this\").test(PermissionUnit::from_str(\"rw=file:i+can+do+like+this\")));\n\n \/\/ assert!(!Permission::from_str(\"RW=http:*\").test(PermissionUnit::from_str(\"rw=http:\/\/google.com\")));\n \/\/ TODO: Failes when using uppercase RW\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[Auto] lib\/domain\/contact: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Calculate matches of qgram.<commit_after>\/\/ Copyright 2014 Johannes Köster.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse std::num::{Int, UnsignedInt, NumCast, cast};\nuse std::slice;\n\nuse alphabets::{Alphabet, RankTransform};\n\n\nfn qgram_push<Q: UnsignedInt + NumCast>(qgram: &mut Q, a: u8) {\n *qgram = *qgram << 2;\n *qgram = *qgram | cast(a).unwrap();\n}\n\n\nstruct QGrams<'a, Q: UnsignedInt + NumCast> {\n text: slice::Iter<'a, u8>,\n qgram: Q,\n q: usize,\n ranks: RankTransform,\n}\n\n\nimpl<'a, Q: UnsignedInt + NumCast> QGrams<'a, Q> {\n pub fn new(q: usize, text: &'a [u8], alphabet: &Alphabet) -> Self {\n let ranks = RankTransform::new(alphabet);\n let mut qgram: Q = cast(0).unwrap();\n let mut qgrams = QGrams { text: text.iter(), qgram: qgram, q: q, ranks: ranks };\n for _ in 0..q-1 {\n qgrams.next();\n }\n\n qgrams\n }\n}\n\n\nimpl<'a, Q: UnsignedInt + NumCast> Iterator for QGrams<'a, Q> {\n type Item = Q;\n\n fn next(&mut self) -> Option<Q> {\n match self.text.next() {\n Some(a) => {\n qgram_push(&mut self.qgram, self.ranks.get(*a));\n Some(self.qgram)\n },\n None => None\n }\n }\n}\n\n\npub struct QGramIndex<'a> {\n q: usize,\n alphabet: &'a Alphabet,\n address: Vec<usize>,\n pos: Vec<usize>,\n}\n\n\nimpl<'a> QGramIndex<'a> {\n pub fn new(q: usize, text: &[u8], alphabet: &'a Alphabet) -> Self {\n let qgram_count = alphabet.len().pow(q as u32);\n let mut address = vec![0; qgram_count];\n let mut pos = vec![0; text.len()];\n\n for qgram in QGrams::<u32>::new(q, text, alphabet) {\n address[qgram as usize] += 1;\n }\n\n for i in 1..address.len() {\n address[i] += address[i - 1];\n }\n\n {\n let mut offset = vec![0; qgram_count];\n for (i, qgram) in QGrams::<u32>::new(q, text, alphabet).enumerate() {\n pos[address[qgram as usize] + offset[qgram as usize]] = i;\n offset[qgram as usize] += 1;\n }\n }\n\n QGramIndex { q: q, alphabet: alphabet, address: address, pos: pos }\n }\n\n pub fn matches(&self, qgram: u32) -> &[usize] {\n &self.pos[self.address[qgram as usize]..self.address[qgram as usize + 1]]\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>update torrent logic<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a check-procs script<commit_after>\/\/! Check running processes\n\n#![allow(unused_variables, dead_code)]\n\nextern crate docopt;\nextern crate nix;\nextern crate regex;\nextern crate rustc_serialize;\n\nextern crate turbine_plugins;\n\nuse docopt::Docopt;\nuse nix::unistd::{getpid, getppid};\nuse regex::Regex;\n\nuse turbine_plugins::Status;\nuse turbine_plugins::procfs::RunningProcs;\nuse turbine_plugins::procfs::pid::Process;\n\n\nstatic USAGE: &'static str = \"\nUsage:\n check-procs <pattern> (--crit-under <N> | --crit-over <N>)\n check-procs -h | --help\n\nCheck that an expected number of processes are running.\n\nRequired Arguments:\n\n pattern Regex that command and its arguments must match\n\nOptions:\n -h, --help Show this message and exit\n --crit-under=<N> Error if there are fewer than this pattern procs\n --crit-over=<N> Error if there are more than this pattern procs\n\";\n\n#[derive(Debug, RustcDecodable)]\nstruct Args {\n arg_pattern: String,\n flag_crit_under: Option<usize>,\n flag_crit_over: Option<usize>\n}\n\nimpl Args {\n fn parse() -> Args {\n Docopt::new(USAGE)\n .and_then(|d| d.decode())\n .unwrap_or_else(|e| e.exit())\n }\n}\n\nfn main() {\n let args = Args::parse();\n let procs = RunningProcs::currently_running().unwrap();\n let re = Regex::new(&args.arg_pattern).unwrap_or_else(|e| {\n println!(\"{}\", e);\n Status::Critical.exit()\n });\n\n let me = getpid();\n let parent = getppid();\n\n let matches = procs.0.into_iter()\n .filter_map(\n |(pid, process): (i32, Process)|\n if re.is_match(&process.useful_cmdline()) &&\n !(pid == me || pid == parent) {\n Some((pid, process))\n } else {\n None\n })\n .collect::<Vec<(i32, Process)>>();\n\n let mut status = Status::Ok;\n if let Some(crit_over) = args.flag_crit_over {\n if matches.len() > crit_over {\n status = Status::Critical;\n println!(\"CRITICAL: there are {} process that match {:?} (greater than {})\",\n matches.len(), args.arg_pattern, crit_over);\n }\n };\n if let Some(crit_under) = args.flag_crit_under {\n if matches.len() < crit_under {\n status = Status::Critical;\n println!(\"CRITICAL: there are {} process that match {:?} (less than {})\",\n matches.len(), args.arg_pattern, crit_under);\n }\n }\n\n if status == Status::Ok {\n match (args.flag_crit_over, args.flag_crit_under) {\n (Some(o), Some(u)) => println!(\n \"OKAY: There are {} matching procs (between {} and {})\",\n matches.len(), o, u),\n (Some(o), None) => println!(\n \"OKAY: There are {} matching procs (less than {})\",\n matches.len(), o),\n (None, Some(u)) => println!(\n \"OKAY: There are {} matching procs (greater than {})\",\n matches.len(), u),\n (None, None) => unreachable!(),\n }\n }\n if matches.len() > 0 {\n println!(\"INFO: Matching processes:\");\n for process in matches.iter().take(20) {\n println!(\"[{:>5}] {}\", process.0, process.1.useful_cmdline());\n }\n if matches.len() > 20 {\n println!(\"And {} more...\", matches.len() - 20)\n }\n }\n status.exit();\n}\n\n#[cfg(test)]\nmod unit {\n use super::{Args, USAGE};\n\n use docopt::Docopt;\n\n #[test]\n fn validate_docstring() {\n let args: Args = Docopt::new(USAGE)\n .and_then(|d| d\n .argv(vec![\"c-p\", \"some.*proc\", \"--crit-under=1\"].into_iter())\n .decode())\n .unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added basic Item to put on map.<commit_after>use tcod::colors::{Color};\n\nuse traits::{Renderable, Movable, Position};\nuse point::Point;\n\n#[derive(Debug)]\npub struct Item {\n name: String,\n glyph: char,\n color: Color,\n position: Point<i16>,\n}\n\nimpl Item {\n pub fn new(name: &str, glyph: char, colour: Color, pos: Point<i16>) -> Item {\n Item {\n name: name,\n glyph: glyph,\n color: color,\n position: pos,\n }\n }\n}\n\nimpl Position for Unit {\n fn get_x(&self) -> i16 {\n self.position.x\n }\n\n fn get_y(&self) -> i16 {\n self.position.y\n }\n}\n\nimpl Renderable for Unit {\n fn get_color(&self) -> Color {\n self.color\n }\n\n fn get_glyph(&self) -> char {\n self.glyph\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a example<commit_after>#[macro_use]\nextern crate log;\n#[macro_use]\nextern crate s_structured_log;\nextern crate serde_json;\n\nuse s_structured_log::{JsonLogger, LoggerOutput, q};\n\nfn main() {\n JsonLogger::init(LoggerOutput::Stderr, log::LogLevelFilter::Info);\n\n \/\/ use json_object!\n s_info!(json_object! {\n \"Fruits\" => json_object! {\n \"on_the_table\" => json_object! {\n \"Apple\" => 1,\n \"Orange\" => \"two\",\n \"Grape\" => 1.0\n },\n \"in_the_basket\" => [\"Banana\", \"Strawberry\"]\n },\n \"Pets\" => [\n json_object! {\n \"name\" => \"Tama\",\n \"kind\" => \"cat\",\n \"age\" => 3\n },\n json_object! {\n \"name\" => \"Pochi\",\n \"kind\" => \"dog\",\n \"age\" => 5\n }\n ]\n });\n\n \/\/ use json_format! and target with `json:` prefix.\n info!(target: &format!(\"json:{}\", module_path!()),\n \"{}\",\n json_format! {\n \"Fruits\" => json_format! {\n \"on_the_table\" => json_format! {\n \"Apple\" => 1,\n \"Orange\" => q(\"two\"),\n \"Grape\" => 1.0\n },\n \"in_the_basket\" => json_format![q(\"Banana\"), q(\"Strawberry\")]\n },\n \"Pets\" => json_format![\n json_format! {\n \"name\" => q(\"Tama\"),\n \"kind\" => q(\"cat\"),\n \"age\" => 3\n },\n json_format! {\n \"name\" => q(\"Pochi\"),\n \"kind\" => q(\"dog\"),\n \"age\" => 5\n }\n ]\n });\n\n \/\/ use json_format! and default target.\n info!(\"{}\",\n json_format! {\n \"Fruits\" => json_format! {\n \"on_the_table\" => json_format! {\n \"Apple\" => 1,\n \"Orange\" => 2,\n \"Grape\" => 1\n },\n \"in_the_basket\" => json_format![q(\"Banana\"), q(\"Strawberry\")]\n },\n \"Pets\" => json_format![\n json_format! {\n \"name\" => q(\"Tama\"),\n \"kind\" => q(\"cat\")\n },\n json_format! {\n \"name\" => q(\"Pochi\"),\n \"kind\" => q(\"dog\")\n }\n ]\n });\n}\n<|endoftext|>"} {"text":"<commit_before>#[doc = \"Utilities for manipulating the char type\"];\n\n\/*\n Lu Uppercase_Letter an uppercase letter\n Ll Lowercase_Letter a lowercase letter\n Lt Titlecase_Letter a digraphic character, with first part uppercase\n Lm Modifier_Letter a modifier letter\n Lo Other_Letter other letters, including syllables and ideographs\n Mn Nonspacing_Mark a nonspacing combining mark (zero advance width)\n Mc Spacing_Mark a spacing combining mark (positive advance width)\n Me Enclosing_Mark an enclosing combining mark\n Nd Decimal_Number a decimal digit\n Nl Letter_Number a letterlike numeric character\n No Other_Number a numeric character of other type\n Pc Connector_Punctuation a connecting punctuation mark, like a tie\n Pd Dash_Punctuation a dash or hyphen punctuation mark\n Ps Open_Punctuation an opening punctuation mark (of a pair)\n Pe Close_Punctuation a closing punctuation mark (of a pair)\n Pi Initial_Punctuation an initial quotation mark\n Pf Final_Punctuation a final quotation mark\n Po Other_Punctuation a punctuation mark of other type\n Sm Math_Symbol a symbol of primarily mathematical use\n Sc Currency_Symbol a currency sign\n Sk Modifier_Symbol a non-letterlike modifier symbol\n So Other_Symbol a symbol of other type\n Zs Space_Separator a space character (of various non-zero widths)\n Zl Line_Separator U+2028 LINE SEPARATOR only\n Zp Paragraph_Separator U+2029 PARAGRAPH SEPARATOR only\n Cc Control a C0 or C1 control code\n Cf Format a format control character\n Cs Surrogate a surrogate code point\n Co Private_Use a private-use character\n Cn Unassigned a reserved unassigned code point or a noncharacter\n*\/\n\nexport is_alphabetic,\n is_XID_start, is_XID_continue,\n is_lowercase, is_uppercase,\n is_whitespace, is_alphanumeric,\n is_ascii,\n to_digit, to_lower, to_upper, maybe_digit, cmp;\n\nimport is_alphabetic = unicode::derived_property::Alphabetic;\nimport is_XID_start = unicode::derived_property::XID_Start;\nimport is_XID_continue = unicode::derived_property::XID_Continue;\n\n\n#[doc(\n brief = \"Indicates whether a character is in lower case, defined \\\n in terms of the Unicode General Category 'Ll'.\"\n)]\npure fn is_lowercase(c: char) -> bool {\n ret unicode::general_category::Ll(c);\n}\n\n#[doc(\n brief = \"Indicates whether a character is in upper case, defined \\\n in terms of the Unicode General Category 'Lu'.\"\n)]\npure fn is_uppercase(c: char) -> bool {\n ret unicode::general_category::Lu(c);\n}\n\n#[doc(\n brief = \"Indicates whether a character is whitespace, defined in \\\n terms of the Unicode General Categories 'Zs', 'Zl', 'Zp' \\\n additional 'Cc'-category control codes in the range [0x09, 0x0d]\"\n)]\npure fn is_whitespace(c: char) -> bool {\n ret ('\\x09' <= c && c <= '\\x0d')\n || unicode::general_category::Zs(c)\n || unicode::general_category::Zl(c)\n || unicode::general_category::Zp(c);\n}\n\n#[doc(\n brief = \"Indicates whether a character is alphanumeric, defined \\\n in terms of the Unicode General Categories 'Nd', \\\n 'Nl', 'No' and the Derived Core Property 'Alphabetic'.\"\n)]\npure fn is_alphanumeric(c: char) -> bool {\n ret unicode::derived_property::Alphabetic(c) ||\n unicode::general_category::Nd(c) ||\n unicode::general_category::Nl(c) ||\n unicode::general_category::No(c);\n}\n\n#[doc( brief = \"Indicates whether the character is an ASCII character\" )]\npure fn is_ascii(c: char) -> bool {\n c - ('\\x7F' & c) == '\\x00'\n}\n\n#[doc( brief = \"Indicates whether the character is numeric (Nd, Nl, or No)\" )]\npure fn is_digit(c: char) -> bool {\n ret unicode::general_category::Nd(c) ||\n unicode::general_category::Nl(c) ||\n unicode::general_category::No(c);\n}\n\n#[doc(\n brief = \"Convert a char to the corresponding digit. \\\n Safety note: This function fails if `c` is not a valid char\",\n return = \"If `c` is between '0' and '9', the corresponding value \\\n between 0 and 9. If `c` is 'a' or 'A', 10. If `c` is \\\n 'b' or 'B', 11, etc.\"\n)]\npure fn to_digit(c: char) -> u8 unsafe {\n alt maybe_digit(c) {\n option::some(x) { x }\n option::none { fail; }\n }\n}\n\n#[doc(\n brief = \"Convert a char to the corresponding digit. Returns none when \\\n character is not a valid hexadecimal digit.\"\n)]\npure fn maybe_digit(c: char) -> option<u8> {\n alt c {\n '0' to '9' { option::some(c as u8 - ('0' as u8)) }\n 'a' to 'z' { option::some(c as u8 + 10u8 - ('a' as u8)) }\n 'A' to 'Z' { option::some(c as u8 + 10u8 - ('A' as u8)) }\n _ { option::none }\n }\n}\n\n\/*\n FIXME: works only on ASCII\n*\/\n#[doc(\n brief = \"Convert a char to the corresponding lower case.\"\n)]\npure fn to_lower(c: char) -> char {\n alt c {\n 'A' to 'Z' { ((c as u8) + 32u8) as char }\n _ { c }\n }\n}\n\n\/*\n FIXME: works only on ASCII\n*\/\n#[doc(\n brief = \"Convert a char to the corresponding upper case.\"\n)]\npure fn to_upper(c: char) -> char {\n alt c {\n 'a' to 'z' { ((c as u8) - 32u8) as char }\n _ { c }\n }\n}\n\n#[doc(\n brief = \"Compare two chars.\",\n return = \"-1 if a<b, 0 if a==b, +1 if a>b\"\n)]\npure fn cmp(a: char, b: char) -> int {\n ret if b > a { -1 }\n else if b < a { 1 }\n else { 0 }\n}\n\n#[test]\nfn test_is_lowercase() {\n assert is_lowercase('a');\n assert is_lowercase('ö');\n assert is_lowercase('ß');\n assert !is_lowercase('Ü');\n assert !is_lowercase('P');\n}\n\n#[test]\nfn test_is_uppercase() {\n assert !is_uppercase('h');\n assert !is_uppercase('ä');\n assert !is_uppercase('ß');\n assert is_uppercase('Ö');\n assert is_uppercase('T');\n}\n\n#[test]\nfn test_is_whitespace() {\n assert is_whitespace(' ');\n assert is_whitespace('\\u2007');\n assert is_whitespace('\\t');\n assert is_whitespace('\\n');\n\n assert !is_whitespace('a');\n assert !is_whitespace('_');\n assert !is_whitespace('\\u0000');\n}\n\n#[test]\nfn test_to_digit() {\n assert (to_digit('0') == 0u8);\n assert (to_digit('1') == 1u8);\n assert (to_digit('2') == 2u8);\n assert (to_digit('9') == 9u8);\n assert (to_digit('a') == 10u8);\n assert (to_digit('A') == 10u8);\n assert (to_digit('b') == 11u8);\n assert (to_digit('B') == 11u8);\n assert (to_digit('z') == 35u8);\n assert (to_digit('Z') == 35u8);\n}\n\n#[test]\n#[should_fail]\n#[ignore(cfg(target_os = \"win32\"))]\nfn test_to_digit_fail_1() {\n to_digit(' ');\n}\n\n#[test]\n#[should_fail]\n#[ignore(cfg(target_os = \"win32\"))]\nfn test_to_digit_fail_2() {\n to_digit('$');\n}\n\n#[test]\nfn test_to_lower() {\n assert (to_lower('H') == 'h');\n assert (to_lower('e') == 'e');\n \/\/assert (to_lower('Ö') == 'ö');\n assert (to_lower('ß') == 'ß');\n}\n\n#[test]\nfn test_to_upper() {\n assert (to_upper('l') == 'L');\n assert (to_upper('Q') == 'Q');\n \/\/assert (to_upper('ü') == 'Ü');\n assert (to_upper('ß') == 'ß');\n}\n\n#[test]\nfn test_is_ascii() unsafe {\n assert str::all(\"banana\", char::is_ascii);\n assert ! str::all(\"ประเทศไทย中华Việt Nam\", char::is_ascii);\n}\n\n#[test]\nfn test_is_digit() {\n assert is_digit('2');\n assert is_digit('7');\n assert ! is_digit('c');\n assert ! is_digit('i');\n assert ! is_digit('z');\n assert ! is_digit('Q');\n}\n\n<commit_msg>(core::char) export is_digit<commit_after>#[doc = \"Utilities for manipulating the char type\"];\n\n\/*\n Lu Uppercase_Letter an uppercase letter\n Ll Lowercase_Letter a lowercase letter\n Lt Titlecase_Letter a digraphic character, with first part uppercase\n Lm Modifier_Letter a modifier letter\n Lo Other_Letter other letters, including syllables and ideographs\n Mn Nonspacing_Mark a nonspacing combining mark (zero advance width)\n Mc Spacing_Mark a spacing combining mark (positive advance width)\n Me Enclosing_Mark an enclosing combining mark\n Nd Decimal_Number a decimal digit\n Nl Letter_Number a letterlike numeric character\n No Other_Number a numeric character of other type\n Pc Connector_Punctuation a connecting punctuation mark, like a tie\n Pd Dash_Punctuation a dash or hyphen punctuation mark\n Ps Open_Punctuation an opening punctuation mark (of a pair)\n Pe Close_Punctuation a closing punctuation mark (of a pair)\n Pi Initial_Punctuation an initial quotation mark\n Pf Final_Punctuation a final quotation mark\n Po Other_Punctuation a punctuation mark of other type\n Sm Math_Symbol a symbol of primarily mathematical use\n Sc Currency_Symbol a currency sign\n Sk Modifier_Symbol a non-letterlike modifier symbol\n So Other_Symbol a symbol of other type\n Zs Space_Separator a space character (of various non-zero widths)\n Zl Line_Separator U+2028 LINE SEPARATOR only\n Zp Paragraph_Separator U+2029 PARAGRAPH SEPARATOR only\n Cc Control a C0 or C1 control code\n Cf Format a format control character\n Cs Surrogate a surrogate code point\n Co Private_Use a private-use character\n Cn Unassigned a reserved unassigned code point or a noncharacter\n*\/\n\nexport is_alphabetic,\n is_XID_start, is_XID_continue,\n is_lowercase, is_uppercase,\n is_whitespace, is_alphanumeric,\n is_ascii, is_digit,\n to_digit, to_lower, to_upper, maybe_digit, cmp;\n\nimport is_alphabetic = unicode::derived_property::Alphabetic;\nimport is_XID_start = unicode::derived_property::XID_Start;\nimport is_XID_continue = unicode::derived_property::XID_Continue;\n\n\n#[doc(\n brief = \"Indicates whether a character is in lower case, defined \\\n in terms of the Unicode General Category 'Ll'.\"\n)]\npure fn is_lowercase(c: char) -> bool {\n ret unicode::general_category::Ll(c);\n}\n\n#[doc(\n brief = \"Indicates whether a character is in upper case, defined \\\n in terms of the Unicode General Category 'Lu'.\"\n)]\npure fn is_uppercase(c: char) -> bool {\n ret unicode::general_category::Lu(c);\n}\n\n#[doc(\n brief = \"Indicates whether a character is whitespace, defined in \\\n terms of the Unicode General Categories 'Zs', 'Zl', 'Zp' \\\n additional 'Cc'-category control codes in the range [0x09, 0x0d]\"\n)]\npure fn is_whitespace(c: char) -> bool {\n ret ('\\x09' <= c && c <= '\\x0d')\n || unicode::general_category::Zs(c)\n || unicode::general_category::Zl(c)\n || unicode::general_category::Zp(c);\n}\n\n#[doc(\n brief = \"Indicates whether a character is alphanumeric, defined \\\n in terms of the Unicode General Categories 'Nd', \\\n 'Nl', 'No' and the Derived Core Property 'Alphabetic'.\"\n)]\npure fn is_alphanumeric(c: char) -> bool {\n ret unicode::derived_property::Alphabetic(c) ||\n unicode::general_category::Nd(c) ||\n unicode::general_category::Nl(c) ||\n unicode::general_category::No(c);\n}\n\n#[doc( brief = \"Indicates whether the character is an ASCII character\" )]\npure fn is_ascii(c: char) -> bool {\n c - ('\\x7F' & c) == '\\x00'\n}\n\n#[doc( brief = \"Indicates whether the character is numeric (Nd, Nl, or No)\" )]\npure fn is_digit(c: char) -> bool {\n ret unicode::general_category::Nd(c) ||\n unicode::general_category::Nl(c) ||\n unicode::general_category::No(c);\n}\n\n#[doc(\n brief = \"Convert a char to the corresponding digit. \\\n Safety note: This function fails if `c` is not a valid char\",\n return = \"If `c` is between '0' and '9', the corresponding value \\\n between 0 and 9. If `c` is 'a' or 'A', 10. If `c` is \\\n 'b' or 'B', 11, etc.\"\n)]\npure fn to_digit(c: char) -> u8 unsafe {\n alt maybe_digit(c) {\n option::some(x) { x }\n option::none { fail; }\n }\n}\n\n#[doc(\n brief = \"Convert a char to the corresponding digit. Returns none when \\\n character is not a valid hexadecimal digit.\"\n)]\npure fn maybe_digit(c: char) -> option<u8> {\n alt c {\n '0' to '9' { option::some(c as u8 - ('0' as u8)) }\n 'a' to 'z' { option::some(c as u8 + 10u8 - ('a' as u8)) }\n 'A' to 'Z' { option::some(c as u8 + 10u8 - ('A' as u8)) }\n _ { option::none }\n }\n}\n\n\/*\n FIXME: works only on ASCII\n*\/\n#[doc(\n brief = \"Convert a char to the corresponding lower case.\"\n)]\npure fn to_lower(c: char) -> char {\n alt c {\n 'A' to 'Z' { ((c as u8) + 32u8) as char }\n _ { c }\n }\n}\n\n\/*\n FIXME: works only on ASCII\n*\/\n#[doc(\n brief = \"Convert a char to the corresponding upper case.\"\n)]\npure fn to_upper(c: char) -> char {\n alt c {\n 'a' to 'z' { ((c as u8) - 32u8) as char }\n _ { c }\n }\n}\n\n#[doc(\n brief = \"Compare two chars.\",\n return = \"-1 if a<b, 0 if a==b, +1 if a>b\"\n)]\npure fn cmp(a: char, b: char) -> int {\n ret if b > a { -1 }\n else if b < a { 1 }\n else { 0 }\n}\n\n#[test]\nfn test_is_lowercase() {\n assert is_lowercase('a');\n assert is_lowercase('ö');\n assert is_lowercase('ß');\n assert !is_lowercase('Ü');\n assert !is_lowercase('P');\n}\n\n#[test]\nfn test_is_uppercase() {\n assert !is_uppercase('h');\n assert !is_uppercase('ä');\n assert !is_uppercase('ß');\n assert is_uppercase('Ö');\n assert is_uppercase('T');\n}\n\n#[test]\nfn test_is_whitespace() {\n assert is_whitespace(' ');\n assert is_whitespace('\\u2007');\n assert is_whitespace('\\t');\n assert is_whitespace('\\n');\n\n assert !is_whitespace('a');\n assert !is_whitespace('_');\n assert !is_whitespace('\\u0000');\n}\n\n#[test]\nfn test_to_digit() {\n assert (to_digit('0') == 0u8);\n assert (to_digit('1') == 1u8);\n assert (to_digit('2') == 2u8);\n assert (to_digit('9') == 9u8);\n assert (to_digit('a') == 10u8);\n assert (to_digit('A') == 10u8);\n assert (to_digit('b') == 11u8);\n assert (to_digit('B') == 11u8);\n assert (to_digit('z') == 35u8);\n assert (to_digit('Z') == 35u8);\n}\n\n#[test]\n#[should_fail]\n#[ignore(cfg(target_os = \"win32\"))]\nfn test_to_digit_fail_1() {\n to_digit(' ');\n}\n\n#[test]\n#[should_fail]\n#[ignore(cfg(target_os = \"win32\"))]\nfn test_to_digit_fail_2() {\n to_digit('$');\n}\n\n#[test]\nfn test_to_lower() {\n assert (to_lower('H') == 'h');\n assert (to_lower('e') == 'e');\n \/\/assert (to_lower('Ö') == 'ö');\n assert (to_lower('ß') == 'ß');\n}\n\n#[test]\nfn test_to_upper() {\n assert (to_upper('l') == 'L');\n assert (to_upper('Q') == 'Q');\n \/\/assert (to_upper('ü') == 'Ü');\n assert (to_upper('ß') == 'ß');\n}\n\n#[test]\nfn test_is_ascii() unsafe {\n assert str::all(\"banana\", char::is_ascii);\n assert ! str::all(\"ประเทศไทย中华Việt Nam\", char::is_ascii);\n}\n\n#[test]\nfn test_is_digit() {\n assert is_digit('2');\n assert is_digit('7');\n assert ! is_digit('c');\n assert ! is_digit('i');\n assert ! is_digit('z');\n assert ! is_digit('Q');\n}\n\n<|endoftext|>"} {"text":"<commit_before>#[doc = \"Random number generation\"];\n\nexport rng, seed, seeded_rng, weighted, extensions;\n\nenum rctx {}\n\n#[abi = \"cdecl\"]\nnative mod rustrt {\n fn rand_seed() -> [u8];\n fn rand_new() -> *rctx;\n fn rand_new_seeded(seed: [u8]) -> *rctx;\n fn rand_next(c: *rctx) -> u32;\n fn rand_free(c: *rctx);\n}\n\n#[doc = \"A random number generator\"]\niface rng {\n #[doc = \"Return the next random integer\"]\n fn next() -> u32;\n}\n\n#[doc = \"A value with a particular weight compared to other values\"]\ntype weighted<T> = { weight: uint, item: T };\n\n#[doc = \"Extension methods for random number generators\"]\nimpl extensions for rng {\n\n #[doc = \"Return a random int\"]\n fn gen_int() -> int {\n self.gen_i64() as int\n }\n\n #[doc = \"Return an int randomly chosen from the range [start, end), \\\n failing if start >= end\"]\n fn gen_int_range(start: int, end: int) -> int {\n assert start < end;\n start + int::abs(self.gen_int() % (end - start))\n }\n\n #[doc = \"Return a random i8\"]\n fn gen_i8() -> i8 {\n self.next() as i8\n }\n\n #[doc = \"Return a random i16\"]\n fn gen_i16() -> i16 {\n self.next() as i16\n }\n\n #[doc = \"Return a random i32\"]\n fn gen_i32() -> i32 {\n self.next() as i32\n }\n\n #[doc = \"Return a random i64\"]\n fn gen_i64() -> i64 {\n (self.next() as i64 << 32) | self.next() as i64\n }\n\n #[doc = \"Return a random uint\"]\n fn gen_uint() -> uint {\n self.gen_u64() as uint\n }\n\n #[doc = \"Return a uint randomly chosen from the range [start, end), \\\n failing if start >= end\"]\n fn gen_uint_range(start: uint, end: uint) -> uint {\n assert start < end;\n start + (self.gen_uint() % (end - start))\n }\n\n #[doc = \"Return a random u8\"]\n fn gen_u8() -> u8 {\n self.next() as u8\n }\n\n #[doc = \"Return a random u16\"]\n fn gen_u16() -> u16 {\n self.next() as u16\n }\n\n #[doc = \"Return a random u32\"]\n fn gen_u32() -> u32 {\n self.next()\n }\n\n #[doc = \"Return a random u64\"]\n fn gen_u64() -> u64 {\n (self.next() as u64 << 32) | self.next() as u64\n }\n\n #[doc = \"Return a random float\"]\n fn gen_float() -> float {\n self.gen_f64() as float\n }\n\n #[doc = \"Return a random f32\"]\n fn gen_f32() -> f32 {\n self.gen_f64() as f32\n }\n\n #[doc = \"Return a random f64\"]\n fn gen_f64() -> f64 {\n let u1 = self.next() as f64;\n let u2 = self.next() as f64;\n let u3 = self.next() as f64;\n let scale = u32::max_value as f64;\n ret ((u1 \/ scale + u2) \/ scale + u3) \/ scale;\n }\n\n #[doc = \"Return a random char\"]\n fn gen_char() -> char {\n self.next() as char\n }\n\n #[doc = \"Return a char randomly chosen from chars, failing if chars is \\\n empty\"]\n fn gen_char_from(chars: str) -> char {\n assert !chars.is_empty();\n self.choose(str::chars(chars))\n }\n\n #[doc = \"Return a random bool\"]\n fn gen_bool() -> bool {\n self.next() & 1u32 == 1u32\n }\n\n #[doc = \"Return a bool with a 1 in n chance of true\"]\n fn gen_weighted_bool(n: uint) -> bool {\n if n == 0u {\n true\n } else {\n self.gen_uint_range(1u, n + 1u) == 1u\n }\n }\n\n #[doc = \"Return a random string of the specified length composed of A-Z, \\\n a-z, 0-9\"]\n fn gen_str(len: uint) -> str {\n let charset = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\" +\n \"abcdefghijklmnopqrstuvwxyz\" +\n \"0123456789\";\n let mut s = \"\";\n let mut i = 0u;\n while (i < len) {\n s = s + str::from_char(self.gen_char_from(charset));\n i += 1u;\n }\n s\n }\n\n #[doc = \"Return a random byte string of the specified length\"]\n fn gen_bytes(len: uint) -> [u8] {\n vec::from_fn(len) {|_i|\n self.gen_u8()\n }\n }\n\n #[doc = \"Choose an item randomly, failing if values is empty\"]\n fn choose<T:copy>(values: [T]) -> T {\n self.choose_option(values).get()\n }\n\n #[doc = \"Choose some(item) randomly, returning none if values is empty\"]\n fn choose_option<T:copy>(values: [T]) -> option<T> {\n if values.is_empty() {\n none\n } else {\n some(values[self.gen_uint_range(0u, values.len())])\n }\n }\n\n #[doc = \"Choose an item respecting the relative weights, failing if \\\n the sum of the weights is 0\"]\n fn choose_weighted<T: copy>(v : [weighted<T>]) -> T {\n self.choose_weighted_option(v).get()\n }\n\n #[doc = \"Choose some(item) respecting the relative weights, returning \\\n none if the sum of the weights is 0\"]\n fn choose_weighted_option<T:copy>(v: [weighted<T>]) -> option<T> {\n let mut total = 0u;\n for v.each {|item|\n total += item.weight;\n }\n if total == 0u {\n ret none;\n }\n let chosen = self.gen_uint_range(0u, total);\n let mut so_far = 0u;\n for v.each {|item|\n so_far += item.weight;\n if so_far > chosen {\n ret some(item.item);\n }\n }\n unreachable();\n }\n\n #[doc = \"Return a vec containing copies of the items, in order, where \\\n the weight of the item determines how many copies there are\"]\n fn weighted_vec<T:copy>(v: [weighted<T>]) -> [T] {\n let mut r = [];\n for v.each {|item|\n uint::range(0u, item.weight) {|_i|\n r += [item.item];\n }\n }\n r\n }\n\n #[doc = \"Shuffle a vec\"]\n fn shuffle<T:copy>(values: [T]) -> [T] {\n let mut m = vec::to_mut(values);\n self.shuffle_mut(m);\n ret vec::from_mut(m);\n }\n\n #[doc = \"Shuffle a mutable vec in place\"]\n fn shuffle_mut<T>(&values: [mut T]) {\n let mut i = values.len();\n while i >= 2u {\n \/\/ invariant: elements with index >= i have been locked in place.\n i -= 1u;\n \/\/ lock element i in place.\n vec::swap(values, i, self.gen_uint_range(0u, i + 1u));\n }\n }\n\n}\n\nresource rand_res(c: *rctx) { rustrt::rand_free(c); }\n\nimpl of rng for @rand_res {\n fn next() -> u32 { ret rustrt::rand_next(**self); }\n}\n\n#[doc = \"Create a new random seed for seeded_rng\"]\nfn seed() -> [u8] {\n rustrt::rand_seed()\n}\n\n#[doc = \"Create a random number generator with a system specified seed\"]\nfn rng() -> rng {\n @rand_res(rustrt::rand_new()) as rng\n}\n\n#[doc = \"Create a random number generator using the specified seed. A \\\n generator constructed with a given seed will generate the same \\\n sequence of values as all other generators constructed with the \\\n same seed. The seed may be any length.\"]\nfn seeded_rng(seed: [u8]) -> rng {\n @rand_res(rustrt::rand_new_seeded(seed)) as rng\n}\n\n#[cfg(test)]\nmod tests {\n\n #[test]\n fn rng_seeded() {\n let seed = rand::seed();\n let ra = rand::seeded_rng(seed);\n let rb = rand::seeded_rng(seed);\n assert ra.gen_str(100u) == rb.gen_str(100u);\n }\n\n #[test]\n fn rng_seeded_custom_seed() {\n \/\/ much shorter than generated seeds which are 1024 bytes\n let seed = [2u8, 32u8, 4u8, 32u8, 51u8];\n let ra = rand::seeded_rng(seed);\n let rb = rand::seeded_rng(seed);\n assert ra.gen_str(100u) == rb.gen_str(100u);\n }\n\n #[test]\n fn gen_int_range() {\n let r = rand::rng();\n let a = r.gen_int_range(-3, 42);\n assert a >= -3 && a < 42;\n assert r.gen_int_range(0, 1) == 0;\n assert r.gen_int_range(-12, -11) == -12;\n }\n\n #[test]\n #[should_fail]\n #[ignore(cfg(target_os = \"win3\"))]\n fn gen_int_from_fail() {\n rand::rng().gen_int_range(5, -2);\n }\n\n #[test]\n fn gen_uint_range() {\n let r = rand::rng();\n let a = r.gen_uint_range(3u, 42u);\n assert a >= 3u && a < 42u;\n assert r.gen_uint_range(0u, 1u) == 0u;\n assert r.gen_uint_range(12u, 13u) == 12u;\n }\n\n #[test]\n #[should_fail]\n #[ignore(cfg(target_os = \"win3\"))]\n fn gen_uint_range_fail() {\n rand::rng().gen_uint_range(5u, 2u);\n }\n\n #[test]\n fn gen_float() {\n let r = rand::rng();\n let a = r.gen_float();\n let b = r.gen_float();\n log(debug, (a, b));\n }\n\n #[test]\n fn gen_weighted_bool() {\n let r = rand::rng();\n assert r.gen_weighted_bool(0u) == true;\n assert r.gen_weighted_bool(1u) == true;\n }\n\n #[test]\n fn gen_str() {\n let r = rand::rng();\n log(debug, r.gen_str(10u));\n log(debug, r.gen_str(10u));\n log(debug, r.gen_str(10u));\n assert r.gen_str(0u).len() == 0u;\n assert r.gen_str(10u).len() == 10u;\n assert r.gen_str(16u).len() == 16u;\n }\n\n #[test]\n fn gen_bytes() {\n let r = rand::rng();\n assert r.gen_bytes(0u).len() == 0u;\n assert r.gen_bytes(10u).len() == 10u;\n assert r.gen_bytes(16u).len() == 16u;\n }\n\n #[test]\n fn choose() {\n let r = rand::rng();\n assert r.choose([1, 1, 1]) == 1;\n }\n\n #[test]\n fn choose_option() {\n let r = rand::rng();\n assert r.choose_option([]) == none::<int>;\n assert r.choose_option([1, 1, 1]) == some(1);\n }\n\n #[test]\n fn choose_weighted() {\n let r = rand::rng();\n assert r.choose_weighted([{weight: 1u, item: 42}]) == 42;\n assert r.choose_weighted([\n {weight: 0u, item: 42},\n {weight: 1u, item: 43}\n ]) == 43;\n }\n\n #[test]\n fn choose_weighted_option() {\n let r = rand::rng();\n assert r.choose_weighted_option([{weight: 1u, item: 42}]) == some(42);\n assert r.choose_weighted_option([\n {weight: 0u, item: 42},\n {weight: 1u, item: 43}\n ]) == some(43);\n assert r.choose_weighted_option([]) == none::<int>;\n }\n\n #[test]\n fn weighted_vec() {\n let r = rand::rng();\n let empty: [int] = [];\n assert r.weighted_vec([]) == empty;\n assert r.weighted_vec([\n {weight: 0u, item: 3u},\n {weight: 1u, item: 2u},\n {weight: 2u, item: 1u}\n ]) == [2u, 1u, 1u];\n }\n\n #[test]\n fn shuffle() {\n let r = rand::rng();\n let empty: [int] = [];\n assert r.shuffle([]) == empty;\n assert r.shuffle([1, 1, 1]) == [1, 1, 1];\n }\n}\n\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n<commit_msg>core: Fix typos in ignore attrs<commit_after>#[doc = \"Random number generation\"];\n\nexport rng, seed, seeded_rng, weighted, extensions;\n\nenum rctx {}\n\n#[abi = \"cdecl\"]\nnative mod rustrt {\n fn rand_seed() -> [u8];\n fn rand_new() -> *rctx;\n fn rand_new_seeded(seed: [u8]) -> *rctx;\n fn rand_next(c: *rctx) -> u32;\n fn rand_free(c: *rctx);\n}\n\n#[doc = \"A random number generator\"]\niface rng {\n #[doc = \"Return the next random integer\"]\n fn next() -> u32;\n}\n\n#[doc = \"A value with a particular weight compared to other values\"]\ntype weighted<T> = { weight: uint, item: T };\n\n#[doc = \"Extension methods for random number generators\"]\nimpl extensions for rng {\n\n #[doc = \"Return a random int\"]\n fn gen_int() -> int {\n self.gen_i64() as int\n }\n\n #[doc = \"Return an int randomly chosen from the range [start, end), \\\n failing if start >= end\"]\n fn gen_int_range(start: int, end: int) -> int {\n assert start < end;\n start + int::abs(self.gen_int() % (end - start))\n }\n\n #[doc = \"Return a random i8\"]\n fn gen_i8() -> i8 {\n self.next() as i8\n }\n\n #[doc = \"Return a random i16\"]\n fn gen_i16() -> i16 {\n self.next() as i16\n }\n\n #[doc = \"Return a random i32\"]\n fn gen_i32() -> i32 {\n self.next() as i32\n }\n\n #[doc = \"Return a random i64\"]\n fn gen_i64() -> i64 {\n (self.next() as i64 << 32) | self.next() as i64\n }\n\n #[doc = \"Return a random uint\"]\n fn gen_uint() -> uint {\n self.gen_u64() as uint\n }\n\n #[doc = \"Return a uint randomly chosen from the range [start, end), \\\n failing if start >= end\"]\n fn gen_uint_range(start: uint, end: uint) -> uint {\n assert start < end;\n start + (self.gen_uint() % (end - start))\n }\n\n #[doc = \"Return a random u8\"]\n fn gen_u8() -> u8 {\n self.next() as u8\n }\n\n #[doc = \"Return a random u16\"]\n fn gen_u16() -> u16 {\n self.next() as u16\n }\n\n #[doc = \"Return a random u32\"]\n fn gen_u32() -> u32 {\n self.next()\n }\n\n #[doc = \"Return a random u64\"]\n fn gen_u64() -> u64 {\n (self.next() as u64 << 32) | self.next() as u64\n }\n\n #[doc = \"Return a random float\"]\n fn gen_float() -> float {\n self.gen_f64() as float\n }\n\n #[doc = \"Return a random f32\"]\n fn gen_f32() -> f32 {\n self.gen_f64() as f32\n }\n\n #[doc = \"Return a random f64\"]\n fn gen_f64() -> f64 {\n let u1 = self.next() as f64;\n let u2 = self.next() as f64;\n let u3 = self.next() as f64;\n let scale = u32::max_value as f64;\n ret ((u1 \/ scale + u2) \/ scale + u3) \/ scale;\n }\n\n #[doc = \"Return a random char\"]\n fn gen_char() -> char {\n self.next() as char\n }\n\n #[doc = \"Return a char randomly chosen from chars, failing if chars is \\\n empty\"]\n fn gen_char_from(chars: str) -> char {\n assert !chars.is_empty();\n self.choose(str::chars(chars))\n }\n\n #[doc = \"Return a random bool\"]\n fn gen_bool() -> bool {\n self.next() & 1u32 == 1u32\n }\n\n #[doc = \"Return a bool with a 1 in n chance of true\"]\n fn gen_weighted_bool(n: uint) -> bool {\n if n == 0u {\n true\n } else {\n self.gen_uint_range(1u, n + 1u) == 1u\n }\n }\n\n #[doc = \"Return a random string of the specified length composed of A-Z, \\\n a-z, 0-9\"]\n fn gen_str(len: uint) -> str {\n let charset = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\" +\n \"abcdefghijklmnopqrstuvwxyz\" +\n \"0123456789\";\n let mut s = \"\";\n let mut i = 0u;\n while (i < len) {\n s = s + str::from_char(self.gen_char_from(charset));\n i += 1u;\n }\n s\n }\n\n #[doc = \"Return a random byte string of the specified length\"]\n fn gen_bytes(len: uint) -> [u8] {\n vec::from_fn(len) {|_i|\n self.gen_u8()\n }\n }\n\n #[doc = \"Choose an item randomly, failing if values is empty\"]\n fn choose<T:copy>(values: [T]) -> T {\n self.choose_option(values).get()\n }\n\n #[doc = \"Choose some(item) randomly, returning none if values is empty\"]\n fn choose_option<T:copy>(values: [T]) -> option<T> {\n if values.is_empty() {\n none\n } else {\n some(values[self.gen_uint_range(0u, values.len())])\n }\n }\n\n #[doc = \"Choose an item respecting the relative weights, failing if \\\n the sum of the weights is 0\"]\n fn choose_weighted<T: copy>(v : [weighted<T>]) -> T {\n self.choose_weighted_option(v).get()\n }\n\n #[doc = \"Choose some(item) respecting the relative weights, returning \\\n none if the sum of the weights is 0\"]\n fn choose_weighted_option<T:copy>(v: [weighted<T>]) -> option<T> {\n let mut total = 0u;\n for v.each {|item|\n total += item.weight;\n }\n if total == 0u {\n ret none;\n }\n let chosen = self.gen_uint_range(0u, total);\n let mut so_far = 0u;\n for v.each {|item|\n so_far += item.weight;\n if so_far > chosen {\n ret some(item.item);\n }\n }\n unreachable();\n }\n\n #[doc = \"Return a vec containing copies of the items, in order, where \\\n the weight of the item determines how many copies there are\"]\n fn weighted_vec<T:copy>(v: [weighted<T>]) -> [T] {\n let mut r = [];\n for v.each {|item|\n uint::range(0u, item.weight) {|_i|\n r += [item.item];\n }\n }\n r\n }\n\n #[doc = \"Shuffle a vec\"]\n fn shuffle<T:copy>(values: [T]) -> [T] {\n let mut m = vec::to_mut(values);\n self.shuffle_mut(m);\n ret vec::from_mut(m);\n }\n\n #[doc = \"Shuffle a mutable vec in place\"]\n fn shuffle_mut<T>(&values: [mut T]) {\n let mut i = values.len();\n while i >= 2u {\n \/\/ invariant: elements with index >= i have been locked in place.\n i -= 1u;\n \/\/ lock element i in place.\n vec::swap(values, i, self.gen_uint_range(0u, i + 1u));\n }\n }\n\n}\n\nresource rand_res(c: *rctx) { rustrt::rand_free(c); }\n\nimpl of rng for @rand_res {\n fn next() -> u32 { ret rustrt::rand_next(**self); }\n}\n\n#[doc = \"Create a new random seed for seeded_rng\"]\nfn seed() -> [u8] {\n rustrt::rand_seed()\n}\n\n#[doc = \"Create a random number generator with a system specified seed\"]\nfn rng() -> rng {\n @rand_res(rustrt::rand_new()) as rng\n}\n\n#[doc = \"Create a random number generator using the specified seed. A \\\n generator constructed with a given seed will generate the same \\\n sequence of values as all other generators constructed with the \\\n same seed. The seed may be any length.\"]\nfn seeded_rng(seed: [u8]) -> rng {\n @rand_res(rustrt::rand_new_seeded(seed)) as rng\n}\n\n#[cfg(test)]\nmod tests {\n\n #[test]\n fn rng_seeded() {\n let seed = rand::seed();\n let ra = rand::seeded_rng(seed);\n let rb = rand::seeded_rng(seed);\n assert ra.gen_str(100u) == rb.gen_str(100u);\n }\n\n #[test]\n fn rng_seeded_custom_seed() {\n \/\/ much shorter than generated seeds which are 1024 bytes\n let seed = [2u8, 32u8, 4u8, 32u8, 51u8];\n let ra = rand::seeded_rng(seed);\n let rb = rand::seeded_rng(seed);\n assert ra.gen_str(100u) == rb.gen_str(100u);\n }\n\n #[test]\n fn gen_int_range() {\n let r = rand::rng();\n let a = r.gen_int_range(-3, 42);\n assert a >= -3 && a < 42;\n assert r.gen_int_range(0, 1) == 0;\n assert r.gen_int_range(-12, -11) == -12;\n }\n\n #[test]\n #[should_fail]\n #[ignore(cfg(target_os = \"win32\"))]\n fn gen_int_from_fail() {\n rand::rng().gen_int_range(5, -2);\n }\n\n #[test]\n fn gen_uint_range() {\n let r = rand::rng();\n let a = r.gen_uint_range(3u, 42u);\n assert a >= 3u && a < 42u;\n assert r.gen_uint_range(0u, 1u) == 0u;\n assert r.gen_uint_range(12u, 13u) == 12u;\n }\n\n #[test]\n #[should_fail]\n #[ignore(cfg(target_os = \"win32\"))]\n fn gen_uint_range_fail() {\n rand::rng().gen_uint_range(5u, 2u);\n }\n\n #[test]\n fn gen_float() {\n let r = rand::rng();\n let a = r.gen_float();\n let b = r.gen_float();\n log(debug, (a, b));\n }\n\n #[test]\n fn gen_weighted_bool() {\n let r = rand::rng();\n assert r.gen_weighted_bool(0u) == true;\n assert r.gen_weighted_bool(1u) == true;\n }\n\n #[test]\n fn gen_str() {\n let r = rand::rng();\n log(debug, r.gen_str(10u));\n log(debug, r.gen_str(10u));\n log(debug, r.gen_str(10u));\n assert r.gen_str(0u).len() == 0u;\n assert r.gen_str(10u).len() == 10u;\n assert r.gen_str(16u).len() == 16u;\n }\n\n #[test]\n fn gen_bytes() {\n let r = rand::rng();\n assert r.gen_bytes(0u).len() == 0u;\n assert r.gen_bytes(10u).len() == 10u;\n assert r.gen_bytes(16u).len() == 16u;\n }\n\n #[test]\n fn choose() {\n let r = rand::rng();\n assert r.choose([1, 1, 1]) == 1;\n }\n\n #[test]\n fn choose_option() {\n let r = rand::rng();\n assert r.choose_option([]) == none::<int>;\n assert r.choose_option([1, 1, 1]) == some(1);\n }\n\n #[test]\n fn choose_weighted() {\n let r = rand::rng();\n assert r.choose_weighted([{weight: 1u, item: 42}]) == 42;\n assert r.choose_weighted([\n {weight: 0u, item: 42},\n {weight: 1u, item: 43}\n ]) == 43;\n }\n\n #[test]\n fn choose_weighted_option() {\n let r = rand::rng();\n assert r.choose_weighted_option([{weight: 1u, item: 42}]) == some(42);\n assert r.choose_weighted_option([\n {weight: 0u, item: 42},\n {weight: 1u, item: 43}\n ]) == some(43);\n assert r.choose_weighted_option([]) == none::<int>;\n }\n\n #[test]\n fn weighted_vec() {\n let r = rand::rng();\n let empty: [int] = [];\n assert r.weighted_vec([]) == empty;\n assert r.weighted_vec([\n {weight: 0u, item: 3u},\n {weight: 1u, item: 2u},\n {weight: 2u, item: 1u}\n ]) == [2u, 1u, 1u];\n }\n\n #[test]\n fn shuffle() {\n let r = rand::rng();\n let empty: [int] = [];\n assert r.shuffle([]) == empty;\n assert r.shuffle([1, 1, 1]) == [1, 1, 1];\n }\n}\n\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Sort alphabets before comparison<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix my dumb mistake<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add `abort` example<commit_after>#![feature(core_intrinsics)]\n#![no_main]\n#![no_std]\n\nextern crate f3;\n\nuse core::intrinsics;\n\n#[export_name = \"main\"]\npub extern \"C\" fn main() -> ! {\n unsafe { intrinsics::abort() }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate crypto;\nextern crate rustc_serialize;\n\nuse crypto::digest::Digest;\nuse crypto::hmac::Hmac;\nuse crypto::mac::{\n Mac,\n MacResult,\n};\nuse rustc_serialize::base64::{\n self,\n CharacterSet,\n FromBase64,\n Newline,\n ToBase64,\n};\nuse error::Error;\nuse header::Header;\nuse claims::Claims;\n\npub mod error;\npub mod header;\npub mod claims;\n\npub struct Token {\n raw: Option<String>,\n header: Header,\n claims: Claims,\n}\n\nimpl Token {\n pub fn parse(raw: &str) -> Result<Token, Error> {\n let pieces: Vec<_> = raw.split('.').collect();\n\n Ok(Token {\n raw: Some(raw.into()),\n header: try!(Header::parse(pieces[0])),\n claims: try!(Claims::parse(pieces[1])),\n })\n }\n\n pub fn verify<D: Digest>(&self, key: &str, digest: D) -> bool {\n let raw = match self.raw {\n Some(ref s) => s,\n None => return false,\n };\n\n let pieces: Vec<_> = raw.rsplitn(2, '.').collect();\n let sig = pieces[0];\n let data = pieces[1];\n\n verify(sig, data, key, digest)\n }\n}\n\nconst BASE_CONFIG: base64::Config = base64::Config {\n char_set: CharacterSet::Standard,\n newline: Newline::LF,\n pad: false,\n line_length: None,\n};\n\nfn sign<D: Digest>(data: &str, key: &str, digest: D) -> String {\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n let mac = hmac.result();\n let code = mac.code();\n (*code).to_base64(BASE_CONFIG)\n}\n\nfn verify<D: Digest>(target: &str, data: &str, key: &str, digest: D) -> bool {\n let target_bytes = match target.from_base64() {\n Ok(x) => x,\n Err(_) => return false,\n };\n let target_mac = MacResult::new_from_owned(target_bytes);\n\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n hmac.result() == target_mac\n}\n\n#[cfg(test)]\nmod tests {\n use sign;\n use verify;\n use Token;\n use crypto::sha2::Sha256;\n\n #[test]\n pub fn sign_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let real_sig = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, \"secret\", Sha256::new());\n\n assert_eq!(sig, real_sig);\n }\n\n #[test]\n pub fn verify_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let target = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n assert!(verify(target, &*data, \"secret\", Sha256::new()));\n }\n\n #[test]\n pub fn raw_data() {\n let raw = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let token = Token::parse(raw).unwrap();\n\n {\n assert_eq!(token.header.alg, Some(\"HS256\".into()));\n }\n assert!(token.verify(\"secret\", Sha256::new()));\n }\n}\n<commit_msg>Add signed method<commit_after>extern crate crypto;\nextern crate rustc_serialize;\n\nuse crypto::digest::Digest;\nuse crypto::hmac::Hmac;\nuse crypto::mac::{\n Mac,\n MacResult,\n};\nuse rustc_serialize::base64::{\n self,\n CharacterSet,\n FromBase64,\n Newline,\n ToBase64,\n};\nuse error::Error;\nuse header::Header;\nuse claims::Claims;\n\npub mod error;\npub mod header;\npub mod claims;\n\npub struct Token {\n raw: Option<String>,\n header: Header,\n claims: Claims,\n}\n\nimpl Token {\n pub fn parse(raw: &str) -> Result<Token, Error> {\n let pieces: Vec<_> = raw.split('.').collect();\n\n Ok(Token {\n raw: Some(raw.into()),\n header: try!(Header::parse(pieces[0])),\n claims: try!(Claims::parse(pieces[1])),\n })\n }\n\n pub fn verify<D: Digest>(&self, key: &str, digest: D) -> bool {\n let raw = match self.raw {\n Some(ref s) => s,\n None => return false,\n };\n\n let pieces: Vec<_> = raw.rsplitn(2, '.').collect();\n let sig = pieces[0];\n let data = pieces[1];\n\n verify(sig, data, key, digest)\n }\n\n pub fn signed<D: Digest>(&self, key: &str, digest: D) -> Result<String, Error> {\n let header = try!(self.header.encode());\n let claims = try!(self.claims.encode());\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, key, digest);\n Ok(format!(\"{}.{}\", data, sig))\n }\n}\n\nconst BASE_CONFIG: base64::Config = base64::Config {\n char_set: CharacterSet::Standard,\n newline: Newline::LF,\n pad: false,\n line_length: None,\n};\n\nfn sign<D: Digest>(data: &str, key: &str, digest: D) -> String {\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n let mac = hmac.result();\n let code = mac.code();\n (*code).to_base64(BASE_CONFIG)\n}\n\nfn verify<D: Digest>(target: &str, data: &str, key: &str, digest: D) -> bool {\n let target_bytes = match target.from_base64() {\n Ok(x) => x,\n Err(_) => return false,\n };\n let target_mac = MacResult::new_from_owned(target_bytes);\n\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n hmac.result() == target_mac\n}\n\n#[cfg(test)]\nmod tests {\n use sign;\n use verify;\n use Token;\n use header::Header;\n use claims::Claims;\n use crypto::sha2::Sha256;\n\n #[test]\n pub fn sign_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let real_sig = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, \"secret\", Sha256::new());\n\n assert_eq!(sig, real_sig);\n }\n\n #[test]\n pub fn verify_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let target = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n assert!(verify(target, &*data, \"secret\", Sha256::new()));\n }\n\n #[test]\n pub fn raw_data() {\n let raw = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let token = Token::parse(raw).unwrap();\n\n {\n assert_eq!(token.header.alg, Some(\"HS256\".into()));\n }\n assert!(token.verify(\"secret\", Sha256::new()));\n }\n\n #[test]\n pub fn roundtrip() {\n let token = Token {\n raw: None,\n header: Default::default(),\n claims: Claims::new(Default::default()),\n };\n let key = \"secret\";\n let raw = token.signed(key, Sha256::new()).unwrap();\n let same = Token::parse(&*raw).unwrap();\n\n assert_eq!(same.header.typ, \"JWT\");\n assert_eq!(same.header.alg, Some(\"HS256\".into()));\n assert!(same.verify(key, Sha256::new()));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>continued ffi<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update docs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove redundancy by merging most from_filename logic into try_parents.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Removed return statement from get_client()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix warning<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Created 'game engine' library<commit_after>#![crate_id = \"gameengine\"]\n#![deny(missing_doc)]\n\n\/\/! Documentation goes here.\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add x86-in[bwl]<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fn sam_read initiated<commit_after>pub fn read_sam(sam_path: String, mode: Strig, min_quality: i32) {\n\t let mut view_options = '';\n\t let mut flag_on = 0x0;\n\t let mut flag_off = 0x900; \/\/Ignore secondary and supplementary alignments\n\n match flag_on {\n A => 0x1,\n C => 0x3,\n u => 0x4,\n 1 => 0x40,\n 2 => 0x80,\n - => 0x10,\n _ => println!(\"No matching flag!\");\n }\n\n match flag_off {\n a => 0x4,\n + => 0x10,\n D => 0x400,\n _ => println!(\"No matching flag!\");\n }\n\n\tview_options += '-f 0x%x -F 0x%x ' % (flag_on, flag_off)\n\n\tif min_quality > 0: view_options += '-q%d ' % min_quality\n\n\tout = shell_stdout('samtools view %s %s' % (view_options, sam_path))\n\tfor line in out:\n\t\tyield line.split('\\t')\n }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>textures<commit_after>use cgmath;\nuse cgmath::obb;\nuse cgmath::vector::*;\nuse gl;\nuse gl::types::*;\nuse sdl2::surface::ll::SDL_Surface;\n\n#[deriving(Clone)]\npub struct TextureVertex {\n pub position: Vector2<GLfloat>,\n pub texture_position: Vector2<GLfloat>,\n}\n\nimpl TextureVertex {\n #[inline]\n pub fn new(x: GLfloat, y: GLfloat, tx: GLfloat, ty: GLfloat) -> TextureVertex {\n TextureVertex {\n position: Vector2::new(x, y),\n texture_position: Vector2::new(tx, ty),\n }\n }\n}\n\npub struct Texture {\n handle: GLuint,\n size: Vector2<uint>,\n\n \/\/ If we've already calculated a preferred size, this keeps us from having\n \/\/ to constantly recompute the screen space size every frame, even when the\n \/\/ screen size hasn't actually changed.\n cached_preferred_size: Option<(Vector2<uint>, Vector2<GLfloat>)>,\n}\n\nfn floatify(x: Vector2<uint>) -> Vector2<GLfloat> {\n Vector2::new(x.x as GLfloat, x.y as GLfloat)\n}\n\nimpl Texture {\n \/\/\/ Create a texture with an explicit size, in pixels. Note that this still\n \/\/\/ needs to be translated into screen space: [-1.0, 1.0].\n fn new(size: Vector2<uint>) -> Texture {\n let mut texture = -1;\n gl::GenTexture(&mut texture);\n assert!(texture != -1);\n gl::BindTexture(gl::TEXTURE_2D, texture);\n\n Texture { handle: texture, size: size }\n }\n\n pub fn of_surface(image: *const SDL_Surface) -> Texture {\n let ret = Texture::new(Vector2::new(image.w, image.h));\n\n gl::TexImage2D(gl::TEXTURE_2D, 0, gl::RGBA as i32, image.w, image.h, 0, gl::BGRA, gl::UNSIGNED_INT_8_8_8_8_REV, image.pixels);\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as i32);\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as i32);\n\n ret\n }\n\n \/\/\/ Get the texture's preferred size, scaled to screen space.\n \/\/\/ Feel free to translate this as necessary when rendering.\n pub fn preferred_size(&mut self, window_size: Vector2<uint>) -> Vector2<GLfloat> {\n match self.cached_preferred_size {\n None => {\n let ret = floatify(self.size).div_v(&floatify(window_size));\n self.cached_preferred_size = Some(window_size, ret);\n ret\n },\n Some((orig_window, scaled_resolution)) => {\n if orig_window == window_size { return scaled_resolution; }\n\n let ret = flatify(self.size).div_v(&floatify(window_size));\n self.cached_preferred_size = Some(window_size, ret);\n ret\n }\n }\n }\n}\n\nimpl Drop for Texture {\n fn drop(&mut self) {\n gl::DeleteTexture(handle);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary clone() call<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#![deny(missing_docs)]\n\n\/\/! Low-level graphics abstraction for Rust. Mostly operates on data, not types.\n\/\/! Designed for use by libraries and higher-level abstractions only.\n\n#[macro_use]\nextern crate bitflags;\n#[macro_use]\nextern crate log;\nextern crate draw_state;\n\/\/extern crate num;\n\nuse std::fmt::Debug;\nuse std::hash::Hash;\nuse std::any::Any;\nuse std::slice::Iter;\n\npub use draw_state::{state, target};\npub use self::factory::Factory;\n\npub mod buffer;\npub mod command;\npub mod dummy;\npub mod factory;\npub mod format;\npub mod handle;\npub mod mapping;\npub mod memory;\npub mod pso;\npub mod shade;\npub mod texture;\n\n\/\/\/ Compile-time maximum number of vertex attributes.\npub const MAX_VERTEX_ATTRIBUTES: usize = 16;\n\/\/\/ Compile-time maximum number of color targets.\npub const MAX_COLOR_TARGETS: usize = 4;\n\/\/\/ Compile-time maximum number of constant buffers.\npub const MAX_CONSTANT_BUFFERS: usize = 14;\n\/\/\/ Compile-time maximum number of shader resource views (SRV).\npub const MAX_RESOURCE_VIEWS: usize = 32;\n\/\/\/ Compile-time maximum number of unordered access views (UAV).\npub const MAX_UNORDERED_VIEWS: usize = 4;\n\/\/\/ Compile-time maximum number of samplers.\npub const MAX_SAMPLERS: usize = 16;\n\n\/\/\/ Draw vertex count.\npub type VertexCount = u32;\n\/\/\/ Draw number of instances\npub type InstanceCount = u32;\n\/\/\/ Number of vertices in a patch\npub type PatchSize = u8;\n\n\/\/\/ Slot for an attribute.\npub type AttributeSlot = u8;\n\/\/\/ Slot for a constant buffer object.\npub type ConstantBufferSlot = u8;\n\/\/\/ Slot for a shader resource view.\npub type ResourceViewSlot = u8;\n\/\/\/ Slot for an unordered access object.\npub type UnorderedViewSlot = u8;\n\/\/\/ Slot for an active color buffer.\npub type ColorSlot = u8;\n\/\/\/ Slot for a sampler.\npub type SamplerSlot = u8;\n\nmacro_rules! define_shaders {\n ($($name:ident),+) => {$(\n #[allow(missing_docs)]\n #[derive(Clone, Debug, Eq, Hash, PartialEq)]\n pub struct $name<R: Resources>(handle::Shader<R>);\n impl<R: Resources> $name<R> {\n #[allow(missing_docs)]\n pub fn reference(&self, man: &mut handle::Manager<R>) -> &R::Shader {\n man.ref_shader(&self.0)\n }\n }\n )+}\n}\n\ndefine_shaders!(VertexShader, HullShader, DomainShader, GeometryShader, PixelShader);\n\n\/\/\/ A complete set of shaders to link a program.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\npub enum ShaderSet<R: Resources> {\n \/\/\/ Simple program: Vs-Ps\n Simple(VertexShader<R>, PixelShader<R>),\n \/\/\/ Geometry shader programs: Vs-Gs-Ps\n Geometry(VertexShader<R>, GeometryShader<R>, PixelShader<R>),\n \/\/\/ Tessellated TODO: Tessellated, TessellatedGeometry, TransformFeedback\n Tessellated(VertexShader<R>, HullShader<R>, DomainShader<R>, PixelShader<R>),\n\n}\n\nimpl<R: Resources> ShaderSet<R> {\n \/\/\/ Return the aggregated stage usage for the set.\n pub fn get_usage(&self) -> shade::Usage {\n match self {\n &ShaderSet::Simple(..) => shade::VERTEX | shade::PIXEL,\n &ShaderSet::Geometry(..) => shade::VERTEX | shade::GEOMETRY | shade::PIXEL,\n &ShaderSet::Tessellated(..) => shade::VERTEX | shade::HULL | shade::DOMAIN | shade::PIXEL,\n }\n }\n}\n\n\/\/TODO: use the appropriate units for max vertex count, etc\n\/\/\/ Features that the device supports.\n#[derive(Copy, Clone, Debug)]\n#[allow(missing_docs)] \/\/ pretty self-explanatory fields!\npub struct Capabilities {\n pub max_vertex_count: usize,\n pub max_index_count: usize,\n pub max_texture_size: usize,\n pub max_patch_size: usize,\n\n pub instance_base_supported: bool,\n pub instance_call_supported: bool,\n pub instance_rate_supported: bool,\n pub vertex_base_supported: bool,\n pub srgb_color_supported: bool,\n pub constant_buffer_supported: bool,\n pub unordered_access_view_supported: bool,\n pub separate_blending_slots_supported: bool,\n}\n\n\/\/\/ Describes what geometric primitives are created from vertex data.\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]\n#[repr(u8)]\npub enum Primitive {\n \/\/\/ Each vertex represents a single point.\n PointList,\n \/\/\/ Each pair of vertices represent a single line segment. For example, with `[a, b, c, d,\n \/\/\/ e]`, `a` and `b` form a line, `c` and `d` form a line, and `e` is discarded.\n LineList,\n \/\/\/ Every two consecutive vertices represent a single line segment. Visually forms a \"path\" of\n \/\/\/ lines, as they are all connected. For example, with `[a, b, c]`, `a` and `b` form a line\n \/\/\/ line, and `b` and `c` form a line.\n LineStrip,\n \/\/\/ Each triplet of vertices represent a single triangle. For example, with `[a, b, c, d, e]`,\n \/\/\/ `a`, `b`, and `c` form a triangle, `d` and `e` are discarded.\n TriangleList,\n \/\/\/ Every three consecutive vertices represent a single triangle. For example, with `[a, b, c,\n \/\/\/ d]`, `a`, `b`, and `c` form a triangle, and `b`, `c`, and `d` form a triangle.\n TriangleStrip,\n \/\/\/ Each quadtruplet of vertices represent a single line segment with adjacency information.\n \/\/\/ For example, with `[a, b, c, d]`, `b` and `c` form a line, and `a` and `d` are the adjacent\n \/\/\/ vertices.\n LineListAdjacency,\n \/\/\/ Every four consecutive vertices represent a single line segment with adjacency information.\n \/\/\/ For example, with `[a, b, c, d, e]`, `[a, b, c, d]` form a line segment with adjacency, and\n \/\/\/ `[b, c, d, e]` form a line segment with adjacency.\n LineStripAdjacency,\n \/\/\/ Each sextuplet of vertices represent a single traingle with adjacency information. For\n \/\/\/ example, with `[a, b, c, d, e, f]`, `a`, `c`, and `e` form a traingle, and `b`, `d`, and\n \/\/\/ `f` are the adjacent vertices, where `b` is adjacent to the edge formed by `a` and `c`, `d`\n \/\/\/ is adjacent to the edge `c` and `e`, and `f` is adjacent to the edge `e` and `a`.\n TriangleListAdjacency,\n \/\/\/ Every even-numbered vertex (every other starting from the first) represents an additional\n \/\/\/ vertex for the triangle strip, while odd-numbered vertices (every other starting from the\n \/\/\/ second) represent adjacent vertices. For example, with `[a, b, c, d, e, f, g, h]`, `[a, c,\n \/\/\/ e, g]` form a triangle strip, and `[b, d, f, h]` are the adjacent vertices, where `b`, `d`,\n \/\/\/ and `f` are adjacent to the first triangle in the strip, and `d`, `f`, and `h` are adjacent\n \/\/\/ to the second.\n TriangleStripAdjacency,\n \/\/\/ Patch list,\n \/\/\/ used with shaders capable of producing primitives on their own (tessellation)\n PatchList(PatchSize),\n}\n\n\/\/\/ A type of each index value in the slice's index buffer\n#[derive(Eq, Ord, PartialEq, PartialOrd, Hash, Copy, Clone, Debug)]\n#[allow(missing_docs)]\n#[repr(u8)]\npub enum IndexType {\n U16,\n U32,\n}\n\n\/\/\/ Different types of a specific API. \n#[allow(missing_docs)]\npub trait Resources: Clone + Hash + Debug + Eq + PartialEq + Any {\n type Buffer: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type Shader: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type Program: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type PipelineStateObject: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type Texture: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type ShaderResourceView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type UnorderedAccessView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type RenderTargetView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type DepthStencilView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type Sampler: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type Fence: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type Mapping: Hash + Debug + Eq + PartialEq + Any + Send + Sync + mapping::Gate<Self>;\n}\n\n\/\/\/ A `Device` is responsible for submitting `CommandBuffer`s to the GPU. \npub trait Device: Sized {\n \/\/\/ Associated `Resources` type.\n type Resources: Resources;\n \/\/\/ Associated `CommandBuffer` type. Every `Device` type can only work with one `CommandBuffer`\n \/\/\/ type.\n type CommandBuffer: command::Buffer<Self::Resources>;\n\n \/\/\/ Returns the capabilities of this `Ðevice`.\n fn get_capabilities(&self) -> &Capabilities;\n\n \/\/\/ Pin everything from this handle manager to live for a frame.\n fn pin_submitted_resources(&mut self, &handle::Manager<Self::Resources>);\n\n \/\/\/ Submits a `CommandBuffer` to the GPU for execution.\n\n fn submit(&mut self, &mut Self::CommandBuffer,\n access: &command::AccessInfo<Self::Resources>);\n\n \/\/\/ Submits a `CommandBuffer` to the GPU for execution.\n \/\/\/ returns a fence that is signaled after the GPU has executed all commands\n fn fenced_submit(&mut self,\n &mut Self::CommandBuffer,\n access: &command::AccessInfo<Self::Resources>,\n after: Option<handle::Fence<Self::Resources>>)\n -> handle::Fence<Self::Resources>;\n\n \/\/\/ Stalls the current thread until the fence is satisfied\n fn wait_fence(&mut self, &handle::Fence<Self::Resources>);\n\n \/\/\/ Cleanup unused resources. This should be called between frames. \n fn cleanup(&mut self);\n}\n\n\n\/\/\/ An `Instance` holds per-application state for a specific backend\npub trait Instance {\n \/\/\/ Associated `Adapter` type.\n type Adapter: Adapter;\n \/\/\/ Associated `Surface` type.\n type Surface: Surface;\n\n \/\/\/ Associated native `Window` type.\n type Window;\n\n \/\/\/ Instantiate a new `Instance`, this is our entry point for applications\n fn create() -> Self;\n\n \/\/\/ Enumerate all available adapters supporting this backend \n fn enumerate_adapters(&self) -> Vec<Self::Adapter>;\n\n \/\/\/ Create a new surface from a native window.\n fn create_surface(&self, window: &Self::Window) -> Self::Surface;\n}\n\n\/\/\/ Represents a physical or virtual device, which is capable of running the backend.\npub trait Adapter {\n \/\/\/ Associated `CommandQueue` type.\n type CommandQueue: CommandQueue;\n \/\/\/ Associated `Device` type.\n type Device: Device;\n \/\/\/ Associated `QueueFamily` type.\n type QueueFamily: QueueFamily;\n\n \/\/\/ Create a new device and command queues.\n fn open<'a, I>(&self, queue_descs: I) -> (Self::Device, Vec<Self::CommandQueue>)\n where I: Iterator<Item=(&'a Self::QueueFamily, u32)>;\n\n \/\/\/ Get the `AdapterInfo` for this adapater.\n fn get_info(&self) -> &AdapterInfo;\n\n \/\/\/ Return the supported queue families for this adapter.\n fn get_queue_families(&self) -> Iter<Self::QueueFamily>;\n}\n\n\/\/\/ Information about a backend adapater.\n#[derive(Clone, Debug)]\npub struct AdapterInfo {\n \/\/\/ Adapter name\n pub name: String,\n \/\/\/ Vendor PCI id of the adapter\n pub vendor: usize,\n \/\/\/ PCI id of the adapter\n pub device: usize,\n \/\/\/ The device is based on a software rasterizer\n pub software_rendering: bool,\n}\n\n\/\/\/ `QueueFamily` denotes a group of command queues provided by the backend\n\/\/\/ with the same properties\/type.\npub trait QueueFamily: 'static {\n \/\/\/ Associated `Surface` type.\n type Surface: Surface;\n\n \/\/\/ Check if the queue family supports presentation to a surface\n fn supports_present(&self, surface: &Self::Surface) -> bool;\n\n \/\/\/ Return the number of available queues of this family\n \/\/ TODO: some backends like d3d12 support infinite software queues (verify)\n fn num_queues(&self) -> u32;\n}\n\n\/\/\/ Dummy trait for command queues.\n\/\/\/ CommandBuffers will be later submitted to command queues instead of the device.\npub trait CommandQueue { }\n\n\/\/\/ A `Surface` abstracts the surface of a native window, which will be presented\npub trait Surface {\n \/\/\/ Associated `CommandQueue` type.\n type CommandQueue: CommandQueue;\n \/\/\/ Associated `SwapChain` type.\n type SwapChain: SwapChain;\n\n \/\/\/ Create a new swapchain from the current surface with an associated present queue.\n fn build_swapchain<T: format::RenderFormat>(&self, present_queue: &Self::CommandQueue)\n -> Self::SwapChain;\n}\n\n\/\/\/ Handle to a backbuffer of the swapchain.\npub struct Frame(usize);\n\nimpl Frame {\n #[doc(hidden)]\n pub fn new(id: usize) -> Self {\n Frame(id)\n }\n}\n\n\/\/\/ The `SwapChain` is the backend representation of the surface.\n\/\/\/ It consists of multiple buffers, which will be presented on the surface.\npub trait SwapChain {\n \/\/\/ Acquire a new frame for rendering. This needs to be called before presenting.\n fn acquire_frame(&mut self) -> Frame;\n\n \/\/\/ Present one acquired frame in FIFO order.\n fn present(&mut self);\n}\n<commit_msg>Refactor away the Instance trait and replace iterator with slice<commit_after>\/\/ Copyright 2015 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#![deny(missing_docs)]\n\n\/\/! Low-level graphics abstraction for Rust. Mostly operates on data, not types.\n\/\/! Designed for use by libraries and higher-level abstractions only.\n\n#[macro_use]\nextern crate bitflags;\n#[macro_use]\nextern crate log;\nextern crate draw_state;\n\/\/extern crate num;\n\nuse std::fmt::Debug;\nuse std::hash::Hash;\nuse std::any::Any;\n\npub use draw_state::{state, target};\npub use self::factory::Factory;\n\npub mod buffer;\npub mod command;\npub mod dummy;\npub mod factory;\npub mod format;\npub mod handle;\npub mod mapping;\npub mod memory;\npub mod pso;\npub mod shade;\npub mod texture;\n\n\/\/\/ Compile-time maximum number of vertex attributes.\npub const MAX_VERTEX_ATTRIBUTES: usize = 16;\n\/\/\/ Compile-time maximum number of color targets.\npub const MAX_COLOR_TARGETS: usize = 4;\n\/\/\/ Compile-time maximum number of constant buffers.\npub const MAX_CONSTANT_BUFFERS: usize = 14;\n\/\/\/ Compile-time maximum number of shader resource views (SRV).\npub const MAX_RESOURCE_VIEWS: usize = 32;\n\/\/\/ Compile-time maximum number of unordered access views (UAV).\npub const MAX_UNORDERED_VIEWS: usize = 4;\n\/\/\/ Compile-time maximum number of samplers.\npub const MAX_SAMPLERS: usize = 16;\n\n\/\/\/ Draw vertex count.\npub type VertexCount = u32;\n\/\/\/ Draw number of instances\npub type InstanceCount = u32;\n\/\/\/ Number of vertices in a patch\npub type PatchSize = u8;\n\n\/\/\/ Slot for an attribute.\npub type AttributeSlot = u8;\n\/\/\/ Slot for a constant buffer object.\npub type ConstantBufferSlot = u8;\n\/\/\/ Slot for a shader resource view.\npub type ResourceViewSlot = u8;\n\/\/\/ Slot for an unordered access object.\npub type UnorderedViewSlot = u8;\n\/\/\/ Slot for an active color buffer.\npub type ColorSlot = u8;\n\/\/\/ Slot for a sampler.\npub type SamplerSlot = u8;\n\nmacro_rules! define_shaders {\n ($($name:ident),+) => {$(\n #[allow(missing_docs)]\n #[derive(Clone, Debug, Eq, Hash, PartialEq)]\n pub struct $name<R: Resources>(handle::Shader<R>);\n impl<R: Resources> $name<R> {\n #[allow(missing_docs)]\n pub fn reference(&self, man: &mut handle::Manager<R>) -> &R::Shader {\n man.ref_shader(&self.0)\n }\n }\n )+}\n}\n\ndefine_shaders!(VertexShader, HullShader, DomainShader, GeometryShader, PixelShader);\n\n\/\/\/ A complete set of shaders to link a program.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\npub enum ShaderSet<R: Resources> {\n \/\/\/ Simple program: Vs-Ps\n Simple(VertexShader<R>, PixelShader<R>),\n \/\/\/ Geometry shader programs: Vs-Gs-Ps\n Geometry(VertexShader<R>, GeometryShader<R>, PixelShader<R>),\n \/\/\/ Tessellated TODO: Tessellated, TessellatedGeometry, TransformFeedback\n Tessellated(VertexShader<R>, HullShader<R>, DomainShader<R>, PixelShader<R>),\n\n}\n\nimpl<R: Resources> ShaderSet<R> {\n \/\/\/ Return the aggregated stage usage for the set.\n pub fn get_usage(&self) -> shade::Usage {\n match self {\n &ShaderSet::Simple(..) => shade::VERTEX | shade::PIXEL,\n &ShaderSet::Geometry(..) => shade::VERTEX | shade::GEOMETRY | shade::PIXEL,\n &ShaderSet::Tessellated(..) => shade::VERTEX | shade::HULL | shade::DOMAIN | shade::PIXEL,\n }\n }\n}\n\n\/\/TODO: use the appropriate units for max vertex count, etc\n\/\/\/ Features that the device supports.\n#[derive(Copy, Clone, Debug)]\n#[allow(missing_docs)] \/\/ pretty self-explanatory fields!\npub struct Capabilities {\n pub max_vertex_count: usize,\n pub max_index_count: usize,\n pub max_texture_size: usize,\n pub max_patch_size: usize,\n\n pub instance_base_supported: bool,\n pub instance_call_supported: bool,\n pub instance_rate_supported: bool,\n pub vertex_base_supported: bool,\n pub srgb_color_supported: bool,\n pub constant_buffer_supported: bool,\n pub unordered_access_view_supported: bool,\n pub separate_blending_slots_supported: bool,\n}\n\n\/\/\/ Describes what geometric primitives are created from vertex data.\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]\n#[repr(u8)]\npub enum Primitive {\n \/\/\/ Each vertex represents a single point.\n PointList,\n \/\/\/ Each pair of vertices represent a single line segment. For example, with `[a, b, c, d,\n \/\/\/ e]`, `a` and `b` form a line, `c` and `d` form a line, and `e` is discarded.\n LineList,\n \/\/\/ Every two consecutive vertices represent a single line segment. Visually forms a \"path\" of\n \/\/\/ lines, as they are all connected. For example, with `[a, b, c]`, `a` and `b` form a line\n \/\/\/ line, and `b` and `c` form a line.\n LineStrip,\n \/\/\/ Each triplet of vertices represent a single triangle. For example, with `[a, b, c, d, e]`,\n \/\/\/ `a`, `b`, and `c` form a triangle, `d` and `e` are discarded.\n TriangleList,\n \/\/\/ Every three consecutive vertices represent a single triangle. For example, with `[a, b, c,\n \/\/\/ d]`, `a`, `b`, and `c` form a triangle, and `b`, `c`, and `d` form a triangle.\n TriangleStrip,\n \/\/\/ Each quadtruplet of vertices represent a single line segment with adjacency information.\n \/\/\/ For example, with `[a, b, c, d]`, `b` and `c` form a line, and `a` and `d` are the adjacent\n \/\/\/ vertices.\n LineListAdjacency,\n \/\/\/ Every four consecutive vertices represent a single line segment with adjacency information.\n \/\/\/ For example, with `[a, b, c, d, e]`, `[a, b, c, d]` form a line segment with adjacency, and\n \/\/\/ `[b, c, d, e]` form a line segment with adjacency.\n LineStripAdjacency,\n \/\/\/ Each sextuplet of vertices represent a single traingle with adjacency information. For\n \/\/\/ example, with `[a, b, c, d, e, f]`, `a`, `c`, and `e` form a traingle, and `b`, `d`, and\n \/\/\/ `f` are the adjacent vertices, where `b` is adjacent to the edge formed by `a` and `c`, `d`\n \/\/\/ is adjacent to the edge `c` and `e`, and `f` is adjacent to the edge `e` and `a`.\n TriangleListAdjacency,\n \/\/\/ Every even-numbered vertex (every other starting from the first) represents an additional\n \/\/\/ vertex for the triangle strip, while odd-numbered vertices (every other starting from the\n \/\/\/ second) represent adjacent vertices. For example, with `[a, b, c, d, e, f, g, h]`, `[a, c,\n \/\/\/ e, g]` form a triangle strip, and `[b, d, f, h]` are the adjacent vertices, where `b`, `d`,\n \/\/\/ and `f` are adjacent to the first triangle in the strip, and `d`, `f`, and `h` are adjacent\n \/\/\/ to the second.\n TriangleStripAdjacency,\n \/\/\/ Patch list,\n \/\/\/ used with shaders capable of producing primitives on their own (tessellation)\n PatchList(PatchSize),\n}\n\n\/\/\/ A type of each index value in the slice's index buffer\n#[derive(Eq, Ord, PartialEq, PartialOrd, Hash, Copy, Clone, Debug)]\n#[allow(missing_docs)]\n#[repr(u8)]\npub enum IndexType {\n U16,\n U32,\n}\n\n\/\/\/ Different types of a specific API. \n#[allow(missing_docs)]\npub trait Resources: Clone + Hash + Debug + Eq + PartialEq + Any {\n type Buffer: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type Shader: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type Program: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type PipelineStateObject: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type Texture: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type ShaderResourceView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type UnorderedAccessView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type RenderTargetView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type DepthStencilView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type Sampler: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n type Fence: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n type Mapping: Hash + Debug + Eq + PartialEq + Any + Send + Sync + mapping::Gate<Self>;\n}\n\n\/\/\/ A `Device` is responsible for submitting `CommandBuffer`s to the GPU. \npub trait Device: Sized {\n \/\/\/ Associated `Resources` type.\n type Resources: Resources;\n \/\/\/ Associated `CommandBuffer` type. Every `Device` type can only work with one `CommandBuffer`\n \/\/\/ type.\n type CommandBuffer: command::Buffer<Self::Resources>;\n\n \/\/\/ Returns the capabilities of this `Ðevice`.\n fn get_capabilities(&self) -> &Capabilities;\n\n \/\/\/ Pin everything from this handle manager to live for a frame.\n fn pin_submitted_resources(&mut self, &handle::Manager<Self::Resources>);\n\n \/\/\/ Submits a `CommandBuffer` to the GPU for execution.\n\n fn submit(&mut self, &mut Self::CommandBuffer,\n access: &command::AccessInfo<Self::Resources>);\n\n \/\/\/ Submits a `CommandBuffer` to the GPU for execution.\n \/\/\/ returns a fence that is signaled after the GPU has executed all commands\n fn fenced_submit(&mut self,\n &mut Self::CommandBuffer,\n access: &command::AccessInfo<Self::Resources>,\n after: Option<handle::Fence<Self::Resources>>)\n -> handle::Fence<Self::Resources>;\n\n \/\/\/ Stalls the current thread until the fence is satisfied\n fn wait_fence(&mut self, &handle::Fence<Self::Resources>);\n\n \/\/\/ Cleanup unused resources. This should be called between frames. \n fn cleanup(&mut self);\n}\n\n\/\/\/ Represents a physical or virtual device, which is capable of running the backend.\npub trait Adapter: Sized {\n \/\/\/ Associated `CommandQueue` type.\n type CommandQueue: CommandQueue;\n \/\/\/ Associated `Device` type.\n type Device: Device;\n \/\/\/ Associated `QueueFamily` type.\n type QueueFamily: QueueFamily;\n\n \/\/\/ Enumerate all available adapters supporting this backend \n fn enumerate_adapters() -> Vec<Self>;\n\n \/\/\/ Create a new device and command queues.\n fn open<'a, I>(&self, queue_descs: I) -> (Self::Device, Vec<Self::CommandQueue>)\n where I: Iterator<Item=(&'a Self::QueueFamily, u32)>;\n\n \/\/\/ Get the `AdapterInfo` for this adapater.\n fn get_info(&self) -> &AdapterInfo;\n\n \/\/\/ Return the supported queue families for this adapter.\n fn get_queue_families(&self) -> &[Self::QueueFamily];\n}\n\n\/\/\/ Information about a backend adapater.\n#[derive(Clone, Debug)]\npub struct AdapterInfo {\n \/\/\/ Adapter name\n pub name: String,\n \/\/\/ Vendor PCI id of the adapter\n pub vendor: usize,\n \/\/\/ PCI id of the adapter\n pub device: usize,\n \/\/\/ The device is based on a software rasterizer\n pub software_rendering: bool,\n}\n\n\/\/\/ `QueueFamily` denotes a group of command queues provided by the backend\n\/\/\/ with the same properties\/type.\npub trait QueueFamily: 'static {\n \/\/\/ Associated `Surface` type.\n type Surface: Surface;\n\n \/\/\/ Check if the queue family supports presentation to a surface\n fn supports_present(&self, surface: &Self::Surface) -> bool;\n\n \/\/\/ Return the number of available queues of this family\n \/\/ TODO: some backends like d3d12 support infinite software queues (verify)\n fn num_queues(&self) -> u32;\n}\n\n\/\/\/ Dummy trait for command queues.\n\/\/\/ CommandBuffers will be later submitted to command queues instead of the device.\npub trait CommandQueue { }\n\n\/\/\/ A `Surface` abstracts the surface of a native window, which will be presented\npub trait Surface {\n \/\/\/ Associated `CommandQueue` type.\n type CommandQueue: CommandQueue;\n \/\/\/ Associated `SwapChain` type.\n type SwapChain: SwapChain;\n \/\/\/ Associated native `Window` type.\n type Window;\n\n \/\/\/ Create a new surface from a native window.\n fn from_window(window: &Self::Window) -> Self;\n\n \/\/\/ Create a new swapchain from the current surface with an associated present queue.\n fn build_swapchain<T: format::RenderFormat>(&self, present_queue: &Self::CommandQueue)\n -> Self::SwapChain;\n}\n\n\/\/\/ Handle to a backbuffer of the swapchain.\npub struct Frame(usize);\n\nimpl Frame {\n #[doc(hidden)]\n pub fn new(id: usize) -> Self {\n Frame(id)\n }\n}\n\n\/\/\/ The `SwapChain` is the backend representation of the surface.\n\/\/\/ It consists of multiple buffers, which will be presented on the surface.\npub trait SwapChain {\n \/\/\/ Acquire a new frame for rendering. This needs to be called before presenting.\n fn acquire_frame(&mut self) -> Frame;\n\n \/\/\/ Present one acquired frame in FIFO order.\n fn present(&mut self);\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\n#[derive(Copy, Clone)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn up(&mut self) {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n use self::Mode::*;\n match (mode, key_event.scancode) {\n (Insert, K_ESC) => mode = Normal,\n (Insert, K_BKSP) => if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n },\n (Insert, K_DEL) => if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len() - 1];\n },\n (_, K_F5) => self.reload(&mut window),\n (_, K_F6) => self.save(&mut window),\n (_, K_HOME) => self.offset = 0,\n (_, K_UP) => self.up(),\n (_, K_LEFT) => self.left(),\n (_, K_RIGHT) => self.right(),\n (_, K_END) => self.offset = self.string.len(),\n (_, K_DOWN) => self.down(),\n (m, _) => match (m, key_event.character) {\n (Normal, 'i') => mode = Insert,\n (Normal, 'h') => self.left(),\n (Normal, 'l') => self.right(),\n (Normal, 'k') => self.up(),\n (Normal, 'j') => self.down(),\n (Normal, 'G') => self.offset = self.string.len(),\n (Normal, 'a') => {\n self.right();\n mode = Insert;\n },\n (Insert, '\\0') => (),\n (Insert, _) => {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &key_event.character.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n },\n _ => {},\n }\n }\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<commit_msg>Add x and X commands to the text editor<commit_after>use redox::*;\n\n#[derive(Copy, Clone)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset + 1].to_string() +\n &self.string[self.offset + 1 .. self.string.len() - 1];\n }\n }\n\n fn up(&mut self) {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n use self::Mode::*;\n match (mode, key_event.scancode) {\n (Insert, K_ESC) => mode = Normal,\n (Insert, K_BKSP) => self.backspace(&mut window),\n (Insert, K_DEL) => self.delete(&mut window),\n (_, K_F5) => self.reload(&mut window),\n (_, K_F6) => self.save(&mut window),\n (_, K_HOME) => self.offset = 0,\n (_, K_UP) => self.up(),\n (_, K_LEFT) => self.left(),\n (_, K_RIGHT) => self.right(),\n (_, K_END) => self.offset = self.string.len(),\n (_, K_DOWN) => self.down(),\n (m, _) => match (m, key_event.character) {\n (Normal, 'i') => mode = Insert,\n (Normal, 'h') => self.left(),\n (Normal, 'l') => self.right(),\n (Normal, 'k') => self.up(),\n (Normal, 'j') => self.down(),\n (Normal, 'G') => self.offset = self.string.len(),\n (Normal, 'a') => {\n self.right();\n mode = Insert;\n },\n (Normal, 'x') => self.delete(&mut window),\n (Normal, 'X') => self.backspace(&mut window),\n (Insert, '\\0') => (),\n (Insert, _) => {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &key_event.character.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n },\n _ => {},\n }\n }\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fixed up exit to work as the other builtins<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>`which` builtin<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>RendererVk::flush contained the main loop from the vulkano triangle example, which obviously needed to disappear<commit_after><|endoftext|>"} {"text":"<commit_before>#[doc = \"Random number generation\"];\n\nexport rng, extensions;\n\nenum rctx {}\n\n#[abi = \"cdecl\"]\nnative mod rustrt {\n fn rand_new() -> *rctx;\n fn rand_next(c: *rctx) -> u32;\n fn rand_free(c: *rctx);\n}\n\n#[doc = \"A random number generator\"]\niface rng {\n #[doc = \"Return the next random integer\"]\n fn next() -> u32;\n}\n\n#[doc = \"Extension methods for random number generators\"]\nimpl extensions for rng {\n\n #[doc = \"Return a random float\"]\n fn gen_float() -> float {\n let u1 = self.next() as float;\n let u2 = self.next() as float;\n let u3 = self.next() as float;\n let scale = u32::max_value as float;\n ret ((u1 \/ scale + u2) \/ scale + u3) \/ scale;\n }\n\n #[doc = \"Return a random string composed of A-Z, a-z, 0-9.\"]\n fn gen_str(len: uint) -> str {\n let charset = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\" +\n \"abcdefghijklmnopqrstuvwxyz\" +\n \"0123456789\";\n let mut s = \"\";\n let mut i = 0u;\n while (i < len) {\n let n = self.next() as uint % charset.len();\n s = s + str::from_char(str::char_at(charset, n));\n i += 1u;\n }\n s\n }\n\n #[doc = \"Return a random byte string.\"]\n fn gen_bytes(len: uint) -> [u8] {\n let mut v = [];\n let mut i = 0u;\n while i < len {\n let n = self.next() as uint;\n v += [(n % (u8::max_value as uint)) as u8];\n i += 1u;\n }\n v\n }\n}\n\n#[doc = \"Create a random number generator\"]\nfn rng() -> rng {\n resource rand_res(c: *rctx) { rustrt::rand_free(c); }\n\n impl of rng for @rand_res {\n fn next() -> u32 { ret rustrt::rand_next(**self); }\n }\n\n @rand_res(rustrt::rand_new()) as rng\n}\n\n#[cfg(test)]\nmod tests {\n\n #[test]\n fn test() {\n let r1 = rand::rng();\n log(debug, r1.next());\n log(debug, r1.next());\n {\n let r2 = rand::rng();\n log(debug, r1.next());\n log(debug, r2.next());\n log(debug, r1.next());\n log(debug, r1.next());\n log(debug, r2.next());\n log(debug, r2.next());\n log(debug, r1.next());\n log(debug, r1.next());\n log(debug, r1.next());\n log(debug, r2.next());\n log(debug, r2.next());\n log(debug, r2.next());\n }\n log(debug, r1.next());\n log(debug, r1.next());\n }\n\n #[test]\n fn gen_float() {\n let r = rand::rng();\n let a = r.gen_float();\n let b = r.gen_float();\n log(debug, (a, b));\n }\n\n #[test]\n fn gen_str() {\n let r = rand::rng();\n log(debug, r.gen_str(10u));\n log(debug, r.gen_str(10u));\n log(debug, r.gen_str(10u));\n assert r.gen_str(0u).len() == 0u;\n assert r.gen_str(10u).len() == 10u;\n assert r.gen_str(16u).len() == 16u;\n }\n\n #[test]\n fn gen_bytes() {\n let r = rand::rng();\n assert r.gen_bytes(0u).len() == 0u;\n assert r.gen_bytes(10u).len() == 10u;\n assert r.gen_bytes(16u).len() == 16u;\n }\n}\n\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n<commit_msg>add a bunch more rand::rng extension methods - one for each primitive type and also some choose\/shuffle functions (for https:\/\/github.com\/mozilla\/rust\/issues\/2379)<commit_after>#[doc = \"Random number generation\"];\n\nexport rng, weighted, extensions;\n\nenum rctx {}\n\n#[abi = \"cdecl\"]\nnative mod rustrt {\n fn rand_new() -> *rctx;\n fn rand_next(c: *rctx) -> u32;\n fn rand_free(c: *rctx);\n}\n\n#[doc = \"A random number generator\"]\niface rng {\n #[doc = \"Return the next random integer\"]\n fn next() -> u32;\n}\n\n#[doc = \"A value with a particular weight compared to other values\"]\ntype weighted<T> = { weight: uint, item: T };\n\n#[doc = \"Extension methods for random number generators\"]\nimpl extensions for rng {\n\n #[doc = \"Return a random int\"]\n fn gen_int() -> int {\n self.gen_i64() as int\n }\n\n #[doc = \"Return an int randomly chosen from the range [start, end], \\\n failing if start > end\"]\n fn gen_int_from(start: int, end: int) -> int {\n assert start <= end;\n start + int::abs(self.gen_int() % (end - start + 1))\n }\n\n #[doc = \"Return a random i8\"]\n fn gen_i8() -> i8 {\n self.next() as i8\n }\n\n #[doc = \"Return a random i16\"]\n fn gen_i16() -> i16 {\n self.next() as i16\n }\n\n #[doc = \"Return a random i32\"]\n fn gen_i32() -> i32 {\n self.next() as i32\n }\n\n #[doc = \"Return a random i64\"]\n fn gen_i64() -> i64 {\n (self.next() as i64 << 32) | self.next() as i64\n }\n\n #[doc = \"Return a random uint\"]\n fn gen_uint() -> uint {\n self.gen_u64() as u64\n }\n\n #[doc = \"Return a uint randomly chosen from the range [start, end], \\\n failing if start > end\"]\n fn gen_uint_from(start: uint, end: uint) -> uint {\n assert start <= end;\n start + (self.gen_uint() % (end - start + 1u))\n }\n\n #[doc = \"Return a random u8\"]\n fn gen_u8() -> u8 {\n self.next() as u8\n }\n\n #[doc = \"Return a random u16\"]\n fn gen_u16() -> u16 {\n self.next() as u16\n }\n\n #[doc = \"Return a random u32\"]\n fn gen_u32() -> u32 {\n self.next()\n }\n\n #[doc = \"Return a random u64\"]\n fn gen_u64() -> u64 {\n (self.next() as u64 << 32) | self.next() as u64\n }\n\n #[doc = \"Return a random float\"]\n fn gen_float() -> float {\n self.gen_f64() as float\n }\n\n #[doc = \"Return a random f32\"]\n fn gen_f32() -> f32 {\n self.gen_f64() as f32\n }\n\n #[doc = \"Return a random f64\"]\n fn gen_f64() -> f64 {\n let u1 = self.next() as f64;\n let u2 = self.next() as f64;\n let u3 = self.next() as f64;\n let scale = u32::max_value as f64;\n ret ((u1 \/ scale + u2) \/ scale + u3) \/ scale;\n }\n\n #[doc = \"Return a random char\"]\n fn gen_char() -> char {\n self.next() as char\n }\n\n #[doc = \"Return a char randomly chosen from chars, failing if chars is \\\n empty\"]\n fn gen_char_from(chars: str) -> char {\n assert !chars.is_empty();\n self.choose(str::chars(chars))\n }\n\n #[doc = \"Return a random bool\"]\n fn gen_bool() -> bool {\n self.next() & 1u32 == 1u32\n }\n\n #[doc = \"Return a bool with a 1 in n chance of true\"]\n fn gen_weighted_bool(n: uint) -> bool {\n if n == 0u {\n true\n } else {\n self.gen_uint_from(1u, n) == 1u\n }\n }\n\n #[doc = \"Return a random string of the specified length composed of A-Z, \\\n a-z, 0-9\"]\n fn gen_str(len: uint) -> str {\n let charset = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\" +\n \"abcdefghijklmnopqrstuvwxyz\" +\n \"0123456789\";\n let mut s = \"\";\n let mut i = 0u;\n while (i < len) {\n s = s + str::from_char(self.gen_char_from(charset));\n i += 1u;\n }\n s\n }\n\n #[doc = \"Return a random byte string of the specified length\"]\n fn gen_bytes(len: uint) -> [u8] {\n vec::from_fn(len) {|_i|\n self.gen_u8()\n }\n }\n\n #[doc = \"Choose an item randomly, failing if values is empty\"]\n fn choose<T:copy>(values: [T]) -> T {\n self.choose_option(values).get()\n }\n\n #[doc = \"Choose some(item) randomly, returning none if values is empty\"]\n fn choose_option<T:copy>(values: [T]) -> option<T> {\n if values.is_empty() {\n none\n } else {\n some(values[self.gen_uint_from(0u, values.len() - 1u)])\n }\n }\n\n #[doc = \"Choose an item respecting the relative weights, failing if \\\n the sum of the weights is 0\"]\n fn choose_weighted<T: copy>(v : [weighted<T>]) -> T {\n self.choose_weighted_option(v).get()\n }\n\n #[doc = \"Choose some(item) respecting the relative weights, returning \\\n none if the sum of the weights is 0\"]\n fn choose_weighted_option<T:copy>(v: [weighted<T>]) -> option<T> {\n let mut total = 0u;\n for v.each {|item|\n total += item.weight;\n }\n if total == 0u {\n ret none;\n }\n let chosen = self.gen_uint_from(0u, total - 1u);\n let mut so_far = 0u;\n for v.each {|item|\n so_far += item.weight;\n if so_far > chosen {\n ret some(item.item);\n }\n }\n unreachable();\n }\n\n #[doc = \"Return a vec containing copies of the items, in order, where \\\n the weight of the item determines how many copies there are\"]\n fn weighted_vec<T:copy>(v: [weighted<T>]) -> [T] {\n let mut r = [];\n for v.each {|item|\n uint::range(0u, item.weight) {|_i|\n r += [item.item];\n }\n }\n r\n }\n\n #[doc = \"Shuffle a vec\"]\n fn shuffle<T:copy>(values: [T]) -> [T] {\n let mut m = vec::to_mut(values);\n self.shuffle_mut(m);\n ret vec::from_mut(m);\n }\n\n #[doc = \"Shuffle a mutable vec in place\"]\n fn shuffle_mut<T>(&values: [mut T]) {\n let mut i = values.len();\n while i >= 2u {\n \/\/ invariant: elements with index >= i have been locked in place.\n i -= 1u;\n \/\/ lock element i in place.\n vec::swap(values, i, self.gen_uint_from(0u, i));\n }\n }\n\n}\n\n#[doc = \"Create a random number generator\"]\nfn rng() -> rng {\n resource rand_res(c: *rctx) { rustrt::rand_free(c); }\n\n impl of rng for @rand_res {\n fn next() -> u32 { ret rustrt::rand_next(**self); }\n }\n\n @rand_res(rustrt::rand_new()) as rng\n}\n\n#[cfg(test)]\nmod tests {\n\n #[test]\n fn gen_int_from() {\n let r = rand::rng();\n let a = r.gen_int_from(-3, 42);\n assert a >= -3 && a <= 42;\n assert r.gen_int_from(0, 0) == 0;\n assert r.gen_int_from(-12, -12) == -12;\n }\n\n #[test]\n #[should_fail]\n fn gen_int_from_fail() {\n rand::rng().gen_int_from(5, -2);\n }\n\n #[test]\n fn gen_uint_from() {\n let r = rand::rng();\n let a = r.gen_uint_from(3u, 42u);\n assert a >= 3u && a <= 42u;\n assert r.gen_uint_from(0u, 0u) == 0u;\n assert r.gen_uint_from(12u, 12u) == 12u;\n }\n\n #[test]\n #[should_fail]\n fn gen_uint_from_fail() {\n rand::rng().gen_uint_from(5u, 2u);\n }\n\n #[test]\n fn gen_float() {\n let r = rand::rng();\n let a = r.gen_float();\n let b = r.gen_float();\n log(debug, (a, b));\n }\n\n #[test]\n fn gen_weighted_bool() {\n let r = rand::rng();\n assert r.gen_weighted_bool(0u) == true;\n assert r.gen_weighted_bool(1u) == true;\n }\n\n #[test]\n fn gen_str() {\n let r = rand::rng();\n log(debug, r.gen_str(10u));\n log(debug, r.gen_str(10u));\n log(debug, r.gen_str(10u));\n assert r.gen_str(0u).len() == 0u;\n assert r.gen_str(10u).len() == 10u;\n assert r.gen_str(16u).len() == 16u;\n }\n\n #[test]\n fn gen_bytes() {\n let r = rand::rng();\n assert r.gen_bytes(0u).len() == 0u;\n assert r.gen_bytes(10u).len() == 10u;\n assert r.gen_bytes(16u).len() == 16u;\n }\n\n #[test]\n fn choose() {\n let r = rand::rng();\n assert r.choose([1, 1, 1]) == 1;\n }\n\n #[test]\n fn choose_option() {\n let r = rand::rng();\n assert r.choose_option([]) == none::<int>;\n assert r.choose_option([1, 1, 1]) == some(1);\n }\n\n #[test]\n fn choose_weighted() {\n let r = rand::rng();\n assert r.choose_weighted([{weight: 1u, item: 42}]) == 42;\n assert r.choose_weighted([\n {weight: 0u, item: 42},\n {weight: 1u, item: 43}\n ]) == 43;\n }\n\n #[test]\n fn choose_weighted_option() {\n let r = rand::rng();\n assert r.choose_weighted_option([{weight: 1u, item: 42}]) == some(42);\n assert r.choose_weighted_option([\n {weight: 0u, item: 42},\n {weight: 1u, item: 43}\n ]) == some(43);\n assert r.choose_weighted_option([]) == none::<int>;\n }\n\n #[test]\n fn weighted_vec() {\n let r = rand::rng();\n let empty: [int] = [];\n assert r.weighted_vec([]) == empty;\n assert r.weighted_vec([\n {weight: 0u, item: 3u},\n {weight: 1u, item: 2u},\n {weight: 2u, item: 1u}\n ]) == [2u, 1u, 1u];\n }\n\n #[test]\n fn shuffle() {\n let r = rand::rng();\n let empty: [int] = [];\n assert r.shuffle([]) == empty;\n assert r.shuffle([1, 1, 1]) == [1, 1, 1];\n }\n}\n\n\n\/\/ Local Variables:\n\/\/ mode: rust;\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n<|endoftext|>"} {"text":"<commit_before>fn main() {\n 'outer: loop {\n println!(\"Entered the outer loop\");\n\n 'inner: loop {\n println!(\"Entered the inner loop\");\n\n \/\/ this would break the inner loop\n \/\/break;\n\n \/\/ this breaks the outer loop\n break 'outer;\n }\n\n println!(\"This point will never be reach\");\n }\n\n println!(\"Exited the outer loop\");\n}\n<commit_msg>loops: fixed typo<commit_after>fn main() {\n 'outer: loop {\n println!(\"Entered the outer loop\");\n\n 'inner: loop {\n println!(\"Entered the inner loop\");\n\n \/\/ this would break the inner loop\n \/\/break;\n\n \/\/ this breaks the outer loop\n break 'outer;\n }\n\n println!(\"This point will never be reached\");\n }\n\n println!(\"Exited the outer loop\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor 'beer-song': use functional style in 'sing'<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add example<commit_after>extern crate turtle;\n\nuse std::fs::File;\nuse turtle::{TurtleRecorder, Turtle};\n\nfn main() {\n let mut t = TurtleRecorder::new();\n t.pendown();\n t.forward(100.0);\n t.right(90.0);\n t.forward(100.0);\n t.penup();\n t.forward(10.0);\n t.pendown();\n t.right(90.0);\n t.forward(100.0);\n t.right(90.0);\n t.forward(100.0);\n t.save_as_svg(&mut File::create(\"test.svg\").unwrap()).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add spi usage example<commit_after>#![no_std]\n\nextern crate cortex_m;\nextern crate stm32f30x_hal as hal;\nextern crate embedded_hal;\n#[macro_use(block)]\nextern crate nb;\n\n\/\/ TODO Remove this dependancy\nuse embedded_hal::spi::{Mode, Phase, Polarity};\n\nuse hal::prelude::*;\nuse hal::spi::Spi;\nuse hal::stm32f30x;\n\nfn main() {\n let p = stm32f30x::Peripherals::take().unwrap();\n\n let mut flash = p.FLASH.constrain();\n let mut rcc = p.RCC.constrain();\n let mut gpioa = p.GPIOA.split(&mut rcc.ahb);\n\n \/\/ clock configuration using the default settings (all clocks run at 8 MHz)\n let clocks = rcc.cfgr.freeze(&mut flash.acr);\n\n \/\/ Set up SPI\n let pa5 = gpioa\n .pa5\n .into_af5(&mut gpioa.moder, &mut gpioa.afrl); \/\/ SCK\n\n let pa6 = gpioa\n .pa6\n .into_af5(&mut gpioa.moder, &mut gpioa.afrl); \/\/ MISO\n\n let pa7 = gpioa\n .pa7\n .into_af5(&mut gpioa.moder, &mut gpioa.afrl); \/\/ MOSI\n\n let mode = Mode { polarity: Polarity::IdleLow, phase: Phase::CaptureOnFirstTransition };\n let mut spi = Spi::spi1(p.SPI1, (pa5, pa6, pa7), mode, 100.khz(), clocks, &mut rcc.apb2);\n\n \/\/ Send data\n block!(spi.send(b'$')).ok();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>start of smoke tests, check channel history works<commit_after>use slack_api as slack;\n\nuse std::env;\n\nfn get_setup(\n) -> Result<(String, impl slack::requests::SlackWebRequestSender), Box<dyn std::error::Error>> {\n \/\/ You can generate a legacy token to quickly test these apis\n \/\/ https:\/\/api.slack.com\/custom-integrations\/legacy-tokens\n let token = env::var(\"SLACK_API_TOKEN\").map_err(|_| \"SLACK_API_TOKEN env var must be set\")?;\n let client = slack::default_client().map_err(|_| \"Could not get default_client\")?;\n Ok((token, client))\n}\n\n#[test]\nfn smoke_test() -> Result<(), Box<dyn std::error::Error>> {\n let (_, client) = get_setup()?;\n\n let resp = slack::api::test(\n &client,\n &slack::api::TestRequest {\n error: Some(\"my_error\"),\n foo: Some(\"it's me, foo\"),\n },\n )\n .err()\n .ok_or(\"Expected error\")?;\n\n assert_eq!(format!(\"{:?}\", resp), \"Unknown(\\\"my_error\\\")\");\n Ok(())\n}\n\n#[test]\nfn smoke_channels() -> Result<(), Box<dyn std::error::Error>> {\n let (token, client) = get_setup()?;\n\n let all_channels = slack::channels::list(\n &client,\n &token,\n &slack::channels::ListRequest {\n ..slack::channels::ListRequest::default()\n },\n )?\n .channels\n .ok_or(\"Expected some channels\")?;\n\n assert!(all_channels.len() > 0);\n\n for channel in &all_channels[..10] {\n let channel_id = channel.id.as_ref().ok_or(\"expected channel id\")?;\n\n let _channel_info = slack::channels::info(\n &client,\n &token,\n &slack::channels::InfoRequest {\n channel: &channel_id,\n ..Default::default()\n },\n )?\n .channel\n .ok_or(\"Expected some channel\")?;\n\n let _channel_history = slack::channels::history(\n &client,\n &token,\n &slack::channels::HistoryRequest {\n channel: &channel_id,\n oldest: Some(1234567890.1234.into()),\n count: Some(1),\n ..Default::default()\n },\n )?\n .messages\n .ok_or(\"Expected some messages\")?;\n }\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Return false from CanvasState::IsPointInPath for NaN\/infinite values<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make sure numbers are always little-endian when encoded<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Creating a gradient using color mixing<commit_after>\/\/! This example shows how you can use color mixing and pen size to produce a smooth gradient\n\nextern crate turtle;\n\nuse turtle::{Color, Turtle};\n\nfn main() {\n let mut turtle = Turtle::new();\n turtle.drawing_mut().set_title(\"Gradients!\");\n turtle.set_pen_size(200.0);\n\n turtle.pen_up();\n turtle.backward(250.0);\n turtle.pen_down();\n\n let red: Color = \"red\".into();\n let white: Color = \"white\".into();\n\n for i in 0..100+1 {\n turtle.set_pen_color(red.mix(white, i));\n turtle.forward(5.0);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add validate example<commit_after>extern crate varlink;\n\nuse std::io;\nuse std::io::prelude::*;\nuse varlink::parser::Varlink;\nuse std::process::exit;\n\nfn main() {\n let mut buffer = String::new();\n if let Err(e) = io::stdin().read_to_string(&mut buffer) {\n println!(\"Error {:?}\", e);\n exit(1);\n }\n\n match Varlink::from_string(&buffer) {\n Ok(v) => {\n println!(\"Syntax check passed!\");\n println!(\"Interfaces:\");\n for k in v.interfaces.keys() {\n println!(\"\\t{}\", k);\n }\n exit(0);\n }\n Err(e) => {\n println!(\"{}\", e);\n exit(1);\n }\n };\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed OSX compilation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #26536 - frewsxcv:regression-test-22375, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntrait A<T: A<T>> {}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #28791 - apasel422:issue-25439, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct Helper<'a, F: 'a>(&'a F);\n\nfn fix<F>(f: F) -> i32 where F: Fn(Helper<F>, i32) -> i32 {\n f(Helper(&f), 8)\n}\n\nfn main() {\n fix(|_, x| x);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>ICH: Add test case for type alias definitions<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for `type` definitions.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ We also test the ICH for `type` definitions exported in metadata. Same as\n\/\/ above, we want to make sure that the change between rev1 and rev2 also\n\/\/ results in a change of the ICH for the enum's metadata, and that it stays\n\/\/ the same between rev2 and rev3.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![crate_type=\"rlib\"]\n\n\n\/\/ Change type (primitive) -----------------------------------------------------\n#[cfg(cfail1)]\ntype ChangePrimitiveType = i32;\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype ChangePrimitiveType = i64;\n\n\n\n\/\/ Change mutability -----------------------------------------------------------\n#[cfg(cfail1)]\ntype ChangeMutability = &'static i32;\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype ChangeMutability = &'static mut i32;\n\n\n\n\/\/ Change mutability -----------------------------------------------------------\n#[cfg(cfail1)]\ntype ChangeLifetime<'a> = (&'static i32, &'a i32);\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype ChangeLifetime<'a> = (&'a i32, &'a i32);\n\n\n\n\/\/ Change type (struct) -----------------------------------------------------------\nstruct Struct1;\nstruct Struct2;\n\n#[cfg(cfail1)]\ntype ChangeTypeStruct = Struct1;\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype ChangeTypeStruct = Struct2;\n\n\n\n\/\/ Change type (tuple) ---------------------------------------------------------\n#[cfg(cfail1)]\ntype ChangeTypeTuple = (u32, u64);\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype ChangeTypeTuple = (u32, i64);\n\n\n\n\/\/ Change type (enum) ----------------------------------------------------------\nenum Enum1 {\n Var1,\n Var2,\n}\nenum Enum2 {\n Var1,\n Var2,\n}\n\n#[cfg(cfail1)]\ntype ChangeTypeEnum = Enum1;\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype ChangeTypeEnum = Enum2;\n\n\n\n\/\/ Add tuple field -------------------------------------------------------------\n#[cfg(cfail1)]\ntype AddTupleField = (i32, i64);\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype AddTupleField = (i32, i64, i16);\n\n\n\n\/\/ Change nested tuple field ---------------------------------------------------\n#[cfg(cfail1)]\ntype ChangeNestedTupleField = (i32, (i64, i16));\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype ChangeNestedTupleField = (i32, (i64, i8));\n\n\n\n\/\/ Add type param --------------------------------------------------------------\n#[cfg(cfail1)]\ntype AddTypeParam<T1> = (T1, T1);\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype AddTypeParam<T1, T2> = (T1, T2);\n\n\n\n\/\/ Add type param bound --------------------------------------------------------\n#[cfg(cfail1)]\ntype AddTypeParamBound<T1> = (T1, u32);\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype AddTypeParamBound<T1: Clone> = (T1, u32);\n\n\n\n\/\/ Add type param bound in where clause ----------------------------------------\n#[cfg(cfail1)]\ntype AddTypeParamBoundWhereClause<T1> where T1: Clone = (T1, u32);\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype AddTypeParamBoundWhereClause<T1> where T1: Clone+Copy = (T1, u32);\n\n\n\n\/\/ Add lifetime param ----------------------------------------------------------\n#[cfg(cfail1)]\ntype AddLifetimeParam<'a> = (&'a u32, &'a u32);\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype AddLifetimeParam<'a, 'b> = (&'a u32, &'b u32);\n\n\n\n\/\/ Add lifetime param bound ----------------------------------------------------\n#[cfg(cfail1)]\ntype AddLifetimeParamBound<'a, 'b> = (&'a u32, &'b u32);\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype AddLifetimeParamBound<'a, 'b: 'a> = (&'a u32, &'b u32);\n\n\n\n\/\/ Add lifetime param bound in where clause ------------------------------------\n#[cfg(cfail1)]\ntype AddLifetimeParamBoundWhereClause<'a, 'b, 'c>\nwhere 'b: 'a\n = (&'a u32, &'b u32, &'c u32);\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\ntype AddLifetimeParamBoundWhereClause<'a, 'b, 'c>\nwhere 'b: 'a,\n 'c: 'a\n = (&'a u32, &'b u32, &'c u32);\n\n\n\n\/\/ Change Trait Bound Indirectly -----------------------------------------------\ntrait ReferencedTrait1 {}\ntrait ReferencedTrait2 {}\n\nmod change_trait_bound_indirectly {\n #[cfg(cfail1)]\n use super::ReferencedTrait1 as Trait;\n #[cfg(not(cfail1))]\n use super::ReferencedTrait2 as Trait;\n\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n type ChangeTraitBoundIndirectly<T: Trait> = (T, u32);\n}\n\n\n\n\/\/ Change Trait Bound Indirectly In Where Clause -------------------------------\nmod change_trait_bound_indirectly_in_where_clause {\n #[cfg(cfail1)]\n use super::ReferencedTrait1 as Trait;\n #[cfg(not(cfail1))]\n use super::ReferencedTrait2 as Trait;\n\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n type ChangeTraitBoundIndirectly<T> where T : Trait = (T, u32);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add executable<commit_after>\nfn main() {\n println!(\"hello, world!\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Don't print debugger cursor if mode changed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #119<commit_after>#[crate_type = \"rlib\"];\n\nextern mod extra;\nextern mod math;\n\nuse std::iter::{AdditiveIterator, Filter, SkipWhile};\nuse extra::priority_queue::PriorityQueue;\nuse math::{arith, numconv};\n\npub static EXPECTED_ANSWER: &'static str = \"248155780267521\";\n\nstruct Power(uint, uint, uint);\n\nimpl Ord for Power {\n #[inline]\n fn lt(&self, other: &Power) -> bool {\n let Power(sn, sb, _) = *self;\n let Power(on, ob, _) = *other;\n sn > on || (sn == on && sb > ob)\n }\n}\n\nstruct Powers {\n queue: PriorityQueue<Power>\n}\n\nimpl Powers {\n #[inline]\n fn new() -> Powers {\n let mut queue = PriorityQueue::new();\n queue.push(Power(4, 2, 2));\n Powers { queue: queue }\n }\n}\n\nimpl Iterator<(uint, uint, uint)> for Powers {\n #[inline]\n fn next(&mut self) -> Option<(uint, uint, uint)> {\n let Power(n, b, e) = self.queue.pop();\n if b == 2 { self.queue.push(Power(n * b, b, e + 1)); }\n self.queue.push(Power(arith::pow(b + 1, e), b + 1, e));\n Some((n, b, e))\n }\n}\n\n#[inline]\nfn a<'a>() -> Filter<'a, (uint, uint, uint), SkipWhile<'a, (uint, uint, uint), Powers>> {\n Powers::new()\n .skip_while(|&(n, _b, _e)| n < 10)\n .filter(|&(n, b, _e)| numconv::to_digits(n, 10).sum() == b)\n}\n\npub fn solve() -> ~str {\n let (n, _b, _e) = a().nth(29).unwrap();\n n.to_str()\n}\n\n#[cfg(test)]\nmod test {\n use super::Powers;\n\n #[test]\n fn powers() {\n let mut it = Powers::new();\n assert_eq!(Some((4, 2, 2)), it.next());\n assert_eq!(Some((8, 2, 3)), it.next());\n assert_eq!(Some((9, 3, 2)), it.next());\n assert_eq!(Some((16, 2, 4)), it.next());\n assert_eq!(Some((16, 4, 2)), it.next());\n assert_eq!(Some((25, 5, 2)), it.next());\n assert_eq!(Some((27, 3, 3)), it.next());\n assert_eq!(Some((32, 2, 5)), it.next());\n assert_eq!(Some((36, 6, 2)), it.next());\n assert_eq!(Some((49, 7, 2)), it.next());\n assert_eq!(Some((64, 2, 6)), it.next());\n assert_eq!(Some((64, 4, 3)), it.next());\n assert_eq!(Some((64, 8, 2)), it.next());\n assert_eq!(Some((81, 3, 4)), it.next());\n assert_eq!(Some((81, 9, 2)), it.next());\n assert_eq!(Some((100, 10, 2)), it.next());\n }\n\n #[test]\n fn a() {\n let mut it = super::a();\n assert_eq!(Some((512, 8, 3)), it.nth(1));\n assert_eq!(Some((614656, 28, 4)), it.nth(8 - 1));\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![deny(warnings)]\nextern crate hyper;\nextern crate futures;\nextern crate tokio_core;\nextern crate pretty_env_logger;\n\nuse std::io::{self, Read, Write};\nuse std::net::TcpListener;\nuse std::thread;\nuse std::time::Duration;\n\nuse hyper::client::{Client, Request, HttpConnector};\nuse hyper::{Method, StatusCode};\n\nuse futures::{Future, Stream};\nuse futures::sync::oneshot;\n\nuse tokio_core::reactor::{Core, Handle, Timeout};\n\nfn client(handle: &Handle) -> Client<HttpConnector> {\n Client::new(handle)\n}\n\nfn s(buf: &[u8]) -> &str {\n ::std::str::from_utf8(buf).unwrap()\n}\n\nmacro_rules! test {\n (\n name: $name:ident,\n server:\n expected: $server_expected:expr,\n reply: $server_reply:expr,\n client:\n request:\n method: $client_method:ident,\n url: $client_url:expr,\n headers: [ $($request_headers:expr,)* ],\n body: $request_body:expr,\n\n response:\n status: $client_status:ident,\n headers: [ $($response_headers:expr,)* ],\n body: $response_body:expr,\n ) => (\n #[test]\n fn $name() {\n #![allow(unused)]\n use hyper::header::*;\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let mut core = Core::new().unwrap();\n let client = client(&core.handle());\n let mut req = Request::new(Method::$client_method, format!($client_url, addr=addr).parse().unwrap());\n $(\n req.headers_mut().set($request_headers);\n )*\n\n if let Some(body) = $request_body {\n let body: &'static str = body;\n req.set_body(body);\n }\n let res = client.request(req);\n\n let (tx, rx) = oneshot::channel();\n\n thread::spawn(move || {\n let mut inc = server.accept().unwrap().0;\n inc.set_read_timeout(Some(Duration::from_secs(5))).unwrap();\n inc.set_write_timeout(Some(Duration::from_secs(5))).unwrap();\n let expected = format!($server_expected, addr=addr);\n let mut buf = [0; 4096];\n let mut n = 0;\n while n < buf.len() && n < expected.len() {\n n += inc.read(&mut buf[n..]).unwrap();\n }\n assert_eq!(s(&buf[..n]), expected);\n\n inc.write_all($server_reply.as_ref()).unwrap();\n tx.complete(());\n });\n\n let rx = rx.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n\n let work = res.join(rx).map(|r| r.0);\n\n let res = core.run(work).unwrap();\n assert_eq!(res.status(), &StatusCode::$client_status);\n $(\n assert_eq!(res.headers().get(), Some(&$response_headers));\n )*\n }\n );\n}\n\nstatic REPLY_OK: &'static str = \"HTTP\/1.1 200 OK\\r\\nContent-Length: 0\\r\\n\\r\\n\";\n\ntest! {\n name: client_get,\n\n server:\n expected: \"GET \/ HTTP\/1.1\\r\\nHost: {addr}\\r\\n\\r\\n\",\n reply: REPLY_OK,\n\n client:\n request:\n method: Get,\n url: \"http:\/\/{addr}\/\",\n headers: [],\n body: None,\n response:\n status: Ok,\n headers: [\n ContentLength(0),\n ],\n body: None,\n}\n\ntest! {\n name: client_get_query,\n\n server:\n expected: \"GET \/foo?key=val HTTP\/1.1\\r\\nHost: {addr}\\r\\n\\r\\n\",\n reply: REPLY_OK,\n\n client:\n request:\n method: Get,\n url: \"http:\/\/{addr}\/foo?key=val#dont_send_me\",\n headers: [],\n body: None,\n response:\n status: Ok,\n headers: [\n ContentLength(0),\n ],\n body: None,\n}\n\ntest! {\n name: client_post_sized,\n\n server:\n expected: \"\\\n POST \/length HTTP\/1.1\\r\\n\\\n Host: {addr}\\r\\n\\\n Content-Length: 7\\r\\n\\\n \\r\\n\\\n foo bar\\\n \",\n reply: REPLY_OK,\n\n client:\n request:\n method: Post,\n url: \"http:\/\/{addr}\/length\",\n headers: [\n ContentLength(7),\n ],\n body: Some(\"foo bar\"),\n response:\n status: Ok,\n headers: [],\n body: None,\n}\n\ntest! {\n name: client_post_chunked,\n\n server:\n expected: \"\\\n POST \/chunks HTTP\/1.1\\r\\n\\\n Host: {addr}\\r\\n\\\n Transfer-Encoding: chunked\\r\\n\\\n \\r\\n\\\n B\\r\\n\\\n foo bar baz\\r\\n\\\n 0\\r\\n\\r\\n\\\n \",\n reply: REPLY_OK,\n\n client:\n request:\n method: Post,\n url: \"http:\/\/{addr}\/chunks\",\n headers: [\n TransferEncoding::chunked(),\n ],\n body: Some(\"foo bar baz\"),\n response:\n status: Ok,\n headers: [],\n body: None,\n}\n\n#[test]\nfn client_keep_alive() {\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let mut core = Core::new().unwrap();\n let client = client(&core.handle());\n\n\n let (tx1, rx1) = oneshot::channel();\n let (tx2, rx2) = oneshot::channel();\n thread::spawn(move || {\n let mut sock = server.accept().unwrap().0;\n sock.set_read_timeout(Some(Duration::from_secs(5))).unwrap();\n sock.set_write_timeout(Some(Duration::from_secs(5))).unwrap();\n let mut buf = [0; 4096];\n sock.read(&mut buf).expect(\"read 1\");\n sock.write_all(b\"HTTP\/1.1 200 OK\\r\\nContent-Length: 0\\r\\n\\r\\n\").expect(\"write 1\");\n tx1.complete(());\n\n sock.read(&mut buf).expect(\"read 2\");\n let second_get = b\"GET \/b HTTP\/1.1\\r\\n\";\n assert_eq!(&buf[..second_get.len()], second_get);\n sock.write_all(b\"HTTP\/1.1 200 OK\\r\\nContent-Length: 0\\r\\n\\r\\n\").expect(\"write 2\");\n tx2.complete(());\n });\n\n\n\n let rx = rx1.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n let res = client.get(format!(\"http:\/\/{}\/a\", addr).parse().unwrap());\n core.run(res.join(rx).map(|r| r.0)).unwrap();\n\n let rx = rx2.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n let res = client.get(format!(\"http:\/\/{}\/b\", addr).parse().unwrap());\n core.run(res.join(rx).map(|r| r.0)).unwrap();\n}\n\n\n#[test]\nfn client_pooled_socket_disconnected() {\n let _ = pretty_env_logger::init();\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let mut core = Core::new().unwrap();\n let client = client(&core.handle());\n\n\n let (tx1, rx1) = oneshot::channel();\n let (tx2, rx2) = oneshot::channel();\n thread::spawn(move || {\n let mut sock = server.accept().unwrap().0;\n sock.set_read_timeout(Some(Duration::from_secs(5))).unwrap();\n sock.set_write_timeout(Some(Duration::from_secs(5))).unwrap();\n let mut buf = [0; 4096];\n sock.read(&mut buf).expect(\"read 1\");\n let remote_addr = sock.peer_addr().unwrap().to_string();\n let out = format!(\"HTTP\/1.1 200 OK\\r\\nContent-Length: {}\\r\\n\\r\\n{}\", remote_addr.len(), remote_addr);\n sock.write_all(out.as_bytes()).expect(\"write 1\");\n drop(sock);\n tx1.complete(());\n\n let mut sock = server.accept().unwrap().0;\n sock.read(&mut buf).expect(\"read 2\");\n let second_get = b\"GET \/b HTTP\/1.1\\r\\n\";\n assert_eq!(&buf[..second_get.len()], second_get);\n let remote_addr = sock.peer_addr().unwrap().to_string();\n let out = format!(\"HTTP\/1.1 200 OK\\r\\nContent-Length: {}\\r\\n\\r\\n{}\", remote_addr.len(), remote_addr);\n sock.write_all(out.as_bytes()).expect(\"write 2\");\n tx2.complete(());\n });\n\n \/\/ spin shortly so we receive the hangup on the client socket\n let sleep = Timeout::new(Duration::from_millis(500), &core.handle()).unwrap();\n core.run(sleep).unwrap();\n\n let rx = rx1.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n let res = client.get(format!(\"http:\/\/{}\/a\", addr).parse().unwrap())\n .and_then(|res| {\n res.body()\n .map(|chunk| chunk.to_vec())\n .collect()\n .map(|vec| vec.concat())\n .map(|vec| String::from_utf8(vec).unwrap())\n });\n let addr1 = core.run(res.join(rx).map(|r| r.0)).unwrap();\n\n let rx = rx2.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n let res = client.get(format!(\"http:\/\/{}\/b\", addr).parse().unwrap())\n .and_then(|res| {\n res.body()\n .map(|chunk| chunk.to_vec())\n .collect()\n .map(|vec| vec.concat())\n .map(|vec| String::from_utf8(vec).unwrap())\n });\n let addr2 = core.run(res.join(rx).map(|r| r.0)).unwrap();\n\n assert_ne!(addr1, addr2);\n}\n<commit_msg>test(client): disable flaky disconnect test<commit_after>#![deny(warnings)]\nextern crate hyper;\nextern crate futures;\nextern crate tokio_core;\nextern crate pretty_env_logger;\n\nuse std::io::{self, Read, Write};\nuse std::net::TcpListener;\nuse std::thread;\nuse std::time::Duration;\n\nuse hyper::client::{Client, Request, HttpConnector};\nuse hyper::{Method, StatusCode};\n\nuse futures::Future;\nuse futures::sync::oneshot;\n\nuse tokio_core::reactor::{Core, Handle};\n\nfn client(handle: &Handle) -> Client<HttpConnector> {\n Client::new(handle)\n}\n\nfn s(buf: &[u8]) -> &str {\n ::std::str::from_utf8(buf).unwrap()\n}\n\nmacro_rules! test {\n (\n name: $name:ident,\n server:\n expected: $server_expected:expr,\n reply: $server_reply:expr,\n client:\n request:\n method: $client_method:ident,\n url: $client_url:expr,\n headers: [ $($request_headers:expr,)* ],\n body: $request_body:expr,\n\n response:\n status: $client_status:ident,\n headers: [ $($response_headers:expr,)* ],\n body: $response_body:expr,\n ) => (\n #[test]\n fn $name() {\n #![allow(unused)]\n use hyper::header::*;\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let mut core = Core::new().unwrap();\n let client = client(&core.handle());\n let mut req = Request::new(Method::$client_method, format!($client_url, addr=addr).parse().unwrap());\n $(\n req.headers_mut().set($request_headers);\n )*\n\n if let Some(body) = $request_body {\n let body: &'static str = body;\n req.set_body(body);\n }\n let res = client.request(req);\n\n let (tx, rx) = oneshot::channel();\n\n thread::spawn(move || {\n let mut inc = server.accept().unwrap().0;\n inc.set_read_timeout(Some(Duration::from_secs(5))).unwrap();\n inc.set_write_timeout(Some(Duration::from_secs(5))).unwrap();\n let expected = format!($server_expected, addr=addr);\n let mut buf = [0; 4096];\n let mut n = 0;\n while n < buf.len() && n < expected.len() {\n n += inc.read(&mut buf[n..]).unwrap();\n }\n assert_eq!(s(&buf[..n]), expected);\n\n inc.write_all($server_reply.as_ref()).unwrap();\n tx.complete(());\n });\n\n let rx = rx.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n\n let work = res.join(rx).map(|r| r.0);\n\n let res = core.run(work).unwrap();\n assert_eq!(res.status(), &StatusCode::$client_status);\n $(\n assert_eq!(res.headers().get(), Some(&$response_headers));\n )*\n }\n );\n}\n\nstatic REPLY_OK: &'static str = \"HTTP\/1.1 200 OK\\r\\nContent-Length: 0\\r\\n\\r\\n\";\n\ntest! {\n name: client_get,\n\n server:\n expected: \"GET \/ HTTP\/1.1\\r\\nHost: {addr}\\r\\n\\r\\n\",\n reply: REPLY_OK,\n\n client:\n request:\n method: Get,\n url: \"http:\/\/{addr}\/\",\n headers: [],\n body: None,\n response:\n status: Ok,\n headers: [\n ContentLength(0),\n ],\n body: None,\n}\n\ntest! {\n name: client_get_query,\n\n server:\n expected: \"GET \/foo?key=val HTTP\/1.1\\r\\nHost: {addr}\\r\\n\\r\\n\",\n reply: REPLY_OK,\n\n client:\n request:\n method: Get,\n url: \"http:\/\/{addr}\/foo?key=val#dont_send_me\",\n headers: [],\n body: None,\n response:\n status: Ok,\n headers: [\n ContentLength(0),\n ],\n body: None,\n}\n\ntest! {\n name: client_post_sized,\n\n server:\n expected: \"\\\n POST \/length HTTP\/1.1\\r\\n\\\n Host: {addr}\\r\\n\\\n Content-Length: 7\\r\\n\\\n \\r\\n\\\n foo bar\\\n \",\n reply: REPLY_OK,\n\n client:\n request:\n method: Post,\n url: \"http:\/\/{addr}\/length\",\n headers: [\n ContentLength(7),\n ],\n body: Some(\"foo bar\"),\n response:\n status: Ok,\n headers: [],\n body: None,\n}\n\ntest! {\n name: client_post_chunked,\n\n server:\n expected: \"\\\n POST \/chunks HTTP\/1.1\\r\\n\\\n Host: {addr}\\r\\n\\\n Transfer-Encoding: chunked\\r\\n\\\n \\r\\n\\\n B\\r\\n\\\n foo bar baz\\r\\n\\\n 0\\r\\n\\r\\n\\\n \",\n reply: REPLY_OK,\n\n client:\n request:\n method: Post,\n url: \"http:\/\/{addr}\/chunks\",\n headers: [\n TransferEncoding::chunked(),\n ],\n body: Some(\"foo bar baz\"),\n response:\n status: Ok,\n headers: [],\n body: None,\n}\n\n#[test]\nfn client_keep_alive() {\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let mut core = Core::new().unwrap();\n let client = client(&core.handle());\n\n\n let (tx1, rx1) = oneshot::channel();\n let (tx2, rx2) = oneshot::channel();\n thread::spawn(move || {\n let mut sock = server.accept().unwrap().0;\n sock.set_read_timeout(Some(Duration::from_secs(5))).unwrap();\n sock.set_write_timeout(Some(Duration::from_secs(5))).unwrap();\n let mut buf = [0; 4096];\n sock.read(&mut buf).expect(\"read 1\");\n sock.write_all(b\"HTTP\/1.1 200 OK\\r\\nContent-Length: 0\\r\\n\\r\\n\").expect(\"write 1\");\n tx1.complete(());\n\n sock.read(&mut buf).expect(\"read 2\");\n let second_get = b\"GET \/b HTTP\/1.1\\r\\n\";\n assert_eq!(&buf[..second_get.len()], second_get);\n sock.write_all(b\"HTTP\/1.1 200 OK\\r\\nContent-Length: 0\\r\\n\\r\\n\").expect(\"write 2\");\n tx2.complete(());\n });\n\n\n\n let rx = rx1.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n let res = client.get(format!(\"http:\/\/{}\/a\", addr).parse().unwrap());\n core.run(res.join(rx).map(|r| r.0)).unwrap();\n\n let rx = rx2.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n let res = client.get(format!(\"http:\/\/{}\/b\", addr).parse().unwrap());\n core.run(res.join(rx).map(|r| r.0)).unwrap();\n}\n\n\n\/* TODO: re-enable once rety works, its currently a flaky test\n#[test]\nfn client_pooled_socket_disconnected() {\n let _ = pretty_env_logger::init();\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let mut core = Core::new().unwrap();\n let client = client(&core.handle());\n\n\n let (tx1, rx1) = oneshot::channel();\n let (tx2, rx2) = oneshot::channel();\n thread::spawn(move || {\n let mut sock = server.accept().unwrap().0;\n sock.set_read_timeout(Some(Duration::from_secs(5))).unwrap();\n sock.set_write_timeout(Some(Duration::from_secs(5))).unwrap();\n let mut buf = [0; 4096];\n sock.read(&mut buf).expect(\"read 1\");\n let remote_addr = sock.peer_addr().unwrap().to_string();\n let out = format!(\"HTTP\/1.1 200 OK\\r\\nContent-Length: {}\\r\\n\\r\\n{}\", remote_addr.len(), remote_addr);\n sock.write_all(out.as_bytes()).expect(\"write 1\");\n drop(sock);\n tx1.complete(());\n\n let mut sock = server.accept().unwrap().0;\n sock.read(&mut buf).expect(\"read 2\");\n let second_get = b\"GET \/b HTTP\/1.1\\r\\n\";\n assert_eq!(&buf[..second_get.len()], second_get);\n let remote_addr = sock.peer_addr().unwrap().to_string();\n let out = format!(\"HTTP\/1.1 200 OK\\r\\nContent-Length: {}\\r\\n\\r\\n{}\", remote_addr.len(), remote_addr);\n sock.write_all(out.as_bytes()).expect(\"write 2\");\n tx2.complete(());\n });\n\n \/\/ spin shortly so we receive the hangup on the client socket\n let sleep = Timeout::new(Duration::from_millis(500), &core.handle()).unwrap();\n core.run(sleep).unwrap();\n\n let rx = rx1.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n let res = client.get(format!(\"http:\/\/{}\/a\", addr).parse().unwrap())\n .and_then(|res| {\n res.body()\n .map(|chunk| chunk.to_vec())\n .collect()\n .map(|vec| vec.concat())\n .map(|vec| String::from_utf8(vec).unwrap())\n });\n let addr1 = core.run(res.join(rx).map(|r| r.0)).unwrap();\n\n let rx = rx2.map_err(|_| hyper::Error::Io(io::Error::new(io::ErrorKind::Other, \"thread panicked\")));\n let res = client.get(format!(\"http:\/\/{}\/b\", addr).parse().unwrap())\n .and_then(|res| {\n res.body()\n .map(|chunk| chunk.to_vec())\n .collect()\n .map(|vec| vec.concat())\n .map(|vec| String::from_utf8(vec).unwrap())\n });\n let addr2 = core.run(res.join(rx).map(|r| r.0)).unwrap();\n\n assert_ne!(addr1, addr2);\n}\n*\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>field level mutability in structs used<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add example parsing ISO 8601 duration<commit_after>extern crate pom;\nuse pom::parser::*;\nuse pom::Parser;\n\nuse std::str::{self, FromStr};\n\n#[derive(Debug, PartialEq)]\nstruct Duration {\n\tyears: Option<f32>,\n\tmonths: Option<f32>,\n\tweeks: Option<f32>,\n\tdays: Option<f32>,\n\thours: Option<f32>,\n\tminutes: Option<f32>,\n\tseconds: Option<f32>,\n}\n\nfn number_separator() -> Parser<u8, ()> {\n\t\/\/ either '.' or ',' can be used as a separator between the whole and decimal part of a number\n\tone_of(b\".,\").discard()\n}\n\nfn number() -> Parser<u8, f32> {\n\tlet integer = one_of(b\"0123456789\").repeat(0..);\n\tlet frac = number_separator() + one_of(b\"0123456789\").repeat(1..);\n\tlet number = integer + frac.opt();\n\tnumber.collect().convert(str::from_utf8).convert(f32::from_str)\n}\n\nfn date_part() -> Parser<u8, (Option<f32>, Option<f32>, Option<f32>, Option<f32>)> {\n\t(\n\t\t(number() - sym(b'Y')).opt() +\n\t\t(number() - sym(b'M')).opt() +\n\t\t(number() - sym(b'W')).opt() +\n\t\t(number() - sym(b'D')).opt()\n\t)\n\t\t.map(|(((years,months), weeks), days)| {\n\t\t\t(years, months, weeks, days)\n\t\t})\n}\n\nfn time_part() -> Parser<u8, Option<(Option<f32>, Option<f32>, Option<f32>)>> {\n\t(\n\t\tsym(b'T') *\n\t\t\t(\n\t\t\t\t(number() - sym(b'H')).opt() +\n\t\t\t\t\t(number() - sym(b'M')).opt() +\n\t\t\t\t\t(number() - sym(b'S')).opt()\n\t\t\t)\n\t\t\t\t.map(|((hours,minutes), seconds)| {\n\t\t\t\t\t(hours, minutes, seconds)\n\t\t\t\t})\n\t).opt()\n}\n\nfn parser() -> Parser<u8, Duration> {\n\t(\n\t\tsym(b'P') *\n\t\tdate_part() +\n\t\ttime_part()\n\t)\n\t\t.map(|(date_elements, time_elements)|{\n\t\t\tlet (years, months, weeks, days) = date_elements;\n\t\t\tlet (hours, minutes, seconds) = match time_elements {\n\t\t\t\tNone => (None, None, None),\n\t\t\t\tSome(elements) => elements,\n\t\t\t};\n\n\t\t\tDuration {\n\t\t\t\tyears,\n\t\t\t\tmonths,\n\t\t\t\tweeks,\n\t\t\t\tdays,\n\t\t\t\thours,\n\t\t\t\tminutes,\n\t\t\t\tseconds,\n\t\t\t}\n\t\t})\n}\n\n\/\/\/ Parses the ISO 8601 Duration standard\n\/\/\/ https:\/\/en.wikipedia.org\/wiki\/ISO_8601#Durations\nfn main() {\n\tlet input = \"P3Y6M4DT12H30M5S\";\n\tlet result = parser().parse(input.as_bytes());\n\n\tassert_eq!(\n\t\tDuration {\n\t\t\tyears: Some(3f32),\n\t\t\tmonths: Some(6f32),\n\t\t\tweeks: None,\n\t\t\tdays: Some(4f32),\n\t\t\thours: Some(12f32),\n\t\t\tminutes: Some(30f32),\n\t\t\tseconds: Some(5f32)\n\t\t},\n\t\tresult.unwrap()\n\t);\n\n}\n<|endoftext|>"} {"text":"<commit_before>use libc::*;\nuse std::ffi::CString;\nuse std::mem::transmute;\n\n#[repr(C)]\npub struct CPDReaderAPI {\n private_data: *mut c_void,\n read_u8: extern \"C\" fn(data: *mut c_void),\n\n pub data: *mut c_void,\n pub read_get_event: extern fn(reader: *mut c_void) -> uint32_t,\n pub read_iterator_next_event: extern fn(reader: *mut c_void,\n it: *mut uint64_t) -> uint32_t,\n pub read_iterator_begin: extern fn(reader: *mut c_void, it: *mut uint64_t,\n keyName: *mut *const c_char, parentIt: uint64_t)\n -> uint32_t,\n pub read_iterator_next: extern fn(reader: *mut c_void, keyName: *mut *const c_char,\n it: *mut uint64_t) -> uint32_t,\n pub read_next_entry: extern fn(reader: *mut c_void, arrayIt: *mut uint64_t)\n -> int32_t,\n pub read_find_s8: extern fn(reader: *mut c_void, res: *mut int8_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_u8: extern fn(reader: *mut c_void, res: *mut uint8_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_s16: extern fn(reader: *mut c_void, res: *mut int16_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_u16: extern fn(reader: *mut c_void, res: *mut uint16_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_s32: extern fn(reader: *mut c_void, res: *mut int32_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_u32: extern fn(reader: *mut c_void, res: *mut uint32_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_s64: extern fn(reader: *mut c_void, res: *mut int64_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_u64: extern fn(reader: *mut c_void, res: *mut uint64_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_float: extern fn(reader: *mut c_void, res: *mut c_float, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_double: extern fn(reader: *mut c_void, res: *mut c_double, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_string: extern fn(reader: *mut c_void, res: *mut *const c_char, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_data: extern fn(reader: *mut c_void, data: *mut *mut c_void, size: *mut uint64_t,\n id: *const c_char, it: uint64_t) -> uint32_t,\n pub read_find_array: extern fn(reader: *mut c_void, arrayIt: *mut uint64_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_dump_data: extern fn(reader: *mut c_void),\n}\n\n#[repr(C)]\npub enum WriteStatus {\n Ok,\n Fail,\n}\n\n#[repr(C)]\npub struct CPDWriterAPI {\n private_data: *mut c_void,\n pub write_event_begin: extern \"C\" fn(writer: *mut c_void, event: uint16_t) -> WriteStatus,\n pub write_event_end: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_header_array_begin: extern fn(writer: *mut c_void, ids: *mut *const c_char)\n -> WriteStatus,\n pub write_header_array_end: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_array_begin: extern fn(writer: *mut c_void, name: *const c_char) -> WriteStatus,\n pub write_array_end: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_array_entry_begin: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_array_entry_end: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_s8: extern fn(writer: *mut c_void, id: *const c_char, v: int8_t) -> WriteStatus,\n pub write_u8: extern fn(writer: *mut c_void, id: *const c_char, v: uint8_t) -> WriteStatus,\n pub write_s16: extern fn(writer: *mut c_void, id: *const c_char, v: int16_t) -> WriteStatus,\n pub write_u16: extern fn(writer: *mut c_void, id: *const c_char, v: uint16_t) -> WriteStatus,\n pub write_s32: extern fn(writer: *mut c_void, id: *const c_char, v: int32_t) -> WriteStatus,\n pub write_u32: extern fn(writer: *mut c_void, id: *const c_char, v: uint32_t) -> WriteStatus,\n pub write_s64: extern fn(writer: *mut c_void, id: *const c_char, v: int64_t) -> WriteStatus,\n pub write_u64: extern fn(writer: *mut c_void, id: *const c_char, v: uint64_t) -> WriteStatus,\n pub write_float: extern fn(writer: *mut c_void, id: *const c_char, v: c_float) -> WriteStatus,\n pub write_double: extern fn(writer: *mut c_void, id: *const c_char, v: c_double) -> WriteStatus,\n pub write_string: extern fn(writer: *mut c_void, id: *const c_char, v: *const c_char)\n -> WriteStatus,\n pub write_data: extern fn(w: *mut c_void, id: *const c_char, d: *const uint8_t, l: c_uint)\n -> WriteStatus,\n}\n\npub struct Reader {\n pub api: *mut CPDReaderAPI,\n pub it: u64,\n}\n\npub struct Writer {\n pub api: *mut CPDWriterAPI,\n}\n\n#[repr(C)]\npub enum ReadType {\n None,\n S8,\n U8,\n S16,\n U16,\n S32,\n U32,\n S64,\n U64,\n Float,\n Double,\n EndNumericTypes,\n Str,\n Data,\n Event,\n Array,\n ArrayEntry,\n Count,\n}\n\n#[repr(C)]\npub enum ReadStatus {\n Ok = 1 << 8,\n Converted = 2 << 8,\n IllegalType = 3 << 8,\n NotFound = 4 << 8,\n Fail = 5 << 8,\n TypeMask = 0xff,\n}\n\npub struct ReaderIter {\n reader: Reader,\n curr_iter: u64,\n}\n\nimpl Clone for Reader {\n fn clone(&self) -> Self {\n return Reader {\n api: self.api,\n it: self.it,\n };\n }\n}\n\nfn status_res<T>(res: T, s: u32) -> Result<T, ReadStatus> {\n match (s >> 8) & 0xff {\n 1...2 => Ok(res),\n 3 => Err(ReadStatus::IllegalType),\n _ => Err(ReadStatus::NotFound),\n }\n}\n\nimpl Reader {\n pub fn new(in_api: *mut CPDReaderAPI, iter: u64) -> Self {\n return Reader {\n api: in_api,\n it: iter,\n };\n }\n\n pub fn find_u8(&self, id: &str) -> Result<u8, ReadStatus> {\n let s = CString::new(id).unwrap();\n let mut res = 0u8;\n let ret;\n\n unsafe {\n ret = ((*self.api).read_find_u8)(transmute(self.api),\n &mut res,\n s.as_ptr(),\n self.it);\n }\n\n return status_res(res, ret);\n }\n\n pub fn find_array(&self, id: &str) -> ReaderIter {\n let s = CString::new(id).unwrap();\n let mut t = 0u64;\n\n unsafe {\n ((*self.api).read_find_array)(transmute(self.api), &mut t, s.as_ptr(), 0);\n }\n\n ReaderIter {\n reader: self.clone(),\n curr_iter: t,\n }\n }\n}\n\nimpl Iterator for ReaderIter {\n type Item = Reader;\n fn next(&mut self) -> Option<Reader> {\n let ret;\n unsafe {\n ret = ((*self.reader.api).read_next_entry)((*self.reader.api).private_data,\n &mut self.curr_iter);\n }\n\n match ret {\n 0 => None,\n _ => Some(Reader::new(self.reader.api, self.curr_iter)),\n }\n }\n}\n\nimpl Writer {\n pub fn event_begin(&mut self, event: u16) {\n unsafe {\n ((*self.api).write_event_begin)(transmute(self.api), event);\n }\n }\n\n pub fn event_end(&mut self) {\n unsafe {\n ((*self.api).write_event_end)(transmute(self.api));\n }\n }\n\n pub fn write_array_begin(&mut self, name: &str) {\n let s = CString::new(name).unwrap();\n unsafe {\n ((*self.api).write_array_begin)(transmute(self.api), s.as_ptr());\n }\n }\n\n pub fn write_array_end(&mut self) {\n unsafe {\n ((*self.api).write_array_end)(transmute(self.api));\n }\n }\n\n pub fn write_array_entry_begin(&mut self) {\n unsafe {\n ((*self.api).write_array_entry_begin)(transmute(self.api));\n }\n }\n\n pub fn write_array_entry_end(&mut self) {\n unsafe {\n ((*self.api).write_array_entry_end)(transmute(self.api));\n }\n }\n\n pub fn write_s8(&mut self, id: &str, v: i8) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_s8)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_u8(&mut self, id: &str, v: u8) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_u8)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_s16(&mut self, id: &str, v: i16) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_s16)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_u16(&mut self, id: &str, v: u16) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_u16)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_s32(&mut self, id: &str, v: i32) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_s32)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_u32(&mut self, id: &str, v: u32) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_u32)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_s64(&mut self, id: &str, v: i64) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_s64)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_u64(&mut self, id: &str, v: u64) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_u64)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_float(&mut self, id: &str, v: f32) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_float)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_double(&mut self, id: &str, v: f64) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_double)(transmute(self.api), s.as_ptr(), v);\n }\n }\n\n pub fn write_string(&mut self, id: &str, v: &str) {\n let id_s = CString::new(id).unwrap();\n let v_s = CString::new(v).unwrap();\n unsafe {\n ((*self.api).write_string)(transmute(self.api), id_s.as_ptr(), v_s.as_ptr());\n }\n }\n\n pub fn write_data(&mut self, id: &str, data: &[u8]) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_data)(transmute(self.api),\n s.as_ptr(),\n data.as_ptr(),\n data.len() as u32);\n }\n }\n}\n<commit_msg>Use macros to reduce code size<commit_after>use libc::*;\nuse std::ffi::CString;\nuse std::mem::transmute;\n\n#[repr(C)]\npub struct CPDReaderAPI {\n pub data: *mut c_void,\n pub read_get_event: extern fn(reader: *mut c_void) -> uint32_t,\n pub read_iterator_next_event: extern fn(reader: *mut c_void,\n it: *mut uint64_t) -> uint32_t,\n pub read_iterator_begin: extern fn(reader: *mut c_void, it: *mut uint64_t,\n keyName: *mut *const c_char, parentIt: uint64_t)\n -> uint32_t,\n pub read_iterator_next: extern fn(reader: *mut c_void, keyName: *mut *const c_char,\n it: *mut uint64_t) -> uint32_t,\n pub read_next_entry: extern fn(reader: *mut c_void, arrayIt: *mut uint64_t)\n -> int32_t,\n pub read_find_s8: extern fn(reader: *mut c_void, res: *mut int8_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_u8: extern fn(reader: *mut c_void, res: *mut uint8_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_s16: extern fn(reader: *mut c_void, res: *mut int16_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_u16: extern fn(reader: *mut c_void, res: *mut uint16_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_s32: extern fn(reader: *mut c_void, res: *mut int32_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_u32: extern fn(reader: *mut c_void, res: *mut uint32_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_s64: extern fn(reader: *mut c_void, res: *mut int64_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_u64: extern fn(reader: *mut c_void, res: *mut uint64_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_float: extern fn(reader: *mut c_void, res: *mut c_float, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_double: extern fn(reader: *mut c_void, res: *mut c_double, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_string: extern fn(reader: *mut c_void, res: *mut *const c_char, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_find_data: extern fn(reader: *mut c_void, data: *mut *mut c_void, size: *mut uint64_t,\n id: *const c_char, it: uint64_t) -> uint32_t,\n pub read_find_array: extern fn(reader: *mut c_void, arrayIt: *mut uint64_t, id: *const c_char,\n it: uint64_t) -> uint32_t,\n pub read_dump_data: extern fn(reader: *mut c_void),\n}\n\n#[repr(C)]\npub enum WriteStatus {\n Ok,\n Fail,\n}\n\n#[repr(C)]\npub struct CPDWriterAPI {\n private_data: *mut c_void,\n pub write_event_begin: extern \"C\" fn(writer: *mut c_void, event: uint16_t) -> WriteStatus,\n pub write_event_end: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_header_array_begin: extern fn(writer: *mut c_void, ids: *mut *const c_char)\n -> WriteStatus,\n pub write_header_array_end: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_array_begin: extern fn(writer: *mut c_void, name: *const c_char) -> WriteStatus,\n pub write_array_end: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_array_entry_begin: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_array_entry_end: extern fn(writer: *mut c_void) -> WriteStatus,\n pub write_s8: extern fn(writer: *mut c_void, id: *const c_char, v: int8_t) -> WriteStatus,\n pub write_u8: extern fn(writer: *mut c_void, id: *const c_char, v: uint8_t) -> WriteStatus,\n pub write_s16: extern fn(writer: *mut c_void, id: *const c_char, v: int16_t) -> WriteStatus,\n pub write_u16: extern fn(writer: *mut c_void, id: *const c_char, v: uint16_t) -> WriteStatus,\n pub write_s32: extern fn(writer: *mut c_void, id: *const c_char, v: int32_t) -> WriteStatus,\n pub write_u32: extern fn(writer: *mut c_void, id: *const c_char, v: uint32_t) -> WriteStatus,\n pub write_s64: extern fn(writer: *mut c_void, id: *const c_char, v: int64_t) -> WriteStatus,\n pub write_u64: extern fn(writer: *mut c_void, id: *const c_char, v: uint64_t) -> WriteStatus,\n pub write_float: extern fn(writer: *mut c_void, id: *const c_char, v: c_float) -> WriteStatus,\n pub write_double: extern fn(writer: *mut c_void, id: *const c_char, v: c_double) -> WriteStatus,\n pub write_string: extern fn(writer: *mut c_void, id: *const c_char, v: *const c_char)\n -> WriteStatus,\n pub write_data: extern fn(w: *mut c_void, id: *const c_char, d: *const uint8_t, l: c_uint)\n -> WriteStatus,\n}\n\npub struct Reader {\n pub api: *mut CPDReaderAPI,\n pub it: u64,\n}\n\npub struct Writer {\n pub api: *mut CPDWriterAPI,\n}\n\n#[repr(C)]\npub enum ReadType {\n None,\n S8,\n U8,\n S16,\n U16,\n S32,\n U32,\n S64,\n U64,\n Float,\n Double,\n EndNumericTypes,\n Str,\n Data,\n Event,\n Array,\n ArrayEntry,\n Count,\n}\n\n#[repr(C)]\npub enum ReadStatus {\n Ok = 1 << 8,\n Converted = 2 << 8,\n IllegalType = 3 << 8,\n NotFound = 4 << 8,\n Fail = 5 << 8,\n TypeMask = 0xff,\n}\n\npub struct ReaderIter {\n reader: Reader,\n curr_iter: u64,\n}\n\nimpl Clone for Reader {\n fn clone(&self) -> Self {\n return Reader {\n api: self.api,\n it: self.it,\n };\n }\n}\n\nfn status_res<T>(res: T, s: u32) -> Result<T, ReadStatus> {\n match (s >> 8) & 0xff {\n 1...2 => Ok(res),\n 3 => Err(ReadStatus::IllegalType),\n _ => Err(ReadStatus::NotFound),\n }\n}\n\nmacro_rules! find_fun {\n ($c_name:ident, $name:ident, $data_type:ident) => {\n pub fn $name(&self, id: &str) -> Result<$data_type, ReadStatus> {\n let s = CString::new(id).unwrap();\n let mut res = 0 as $data_type;\n let ret;\n\n unsafe {\n ret = ((*self.api).$c_name)(transmute(self.api), &mut res, s.as_ptr(), self.it);\n }\n\n return status_res(res, ret);\n }\n }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\nimpl Reader {\n pub fn new(in_api: *mut CPDReaderAPI, iter: u64) -> Self {\n return Reader {\n api: in_api,\n it: iter,\n };\n }\n\n find_fun!(read_find_s8, find_s8, i8);\n find_fun!(read_find_u8, find_u8, u8);\n find_fun!(read_find_s16, find_s16, i16);\n find_fun!(read_find_u16, find_u16, u16);\n find_fun!(read_find_s32, find_s32, i32);\n find_fun!(read_find_u32, find_u32, u32);\n find_fun!(read_find_s64, find_s64, i64);\n find_fun!(read_find_u64, find_u64, u64);\n find_fun!(read_find_float, find_float, f32);\n find_fun!(read_find_double, find_double, f64);\n\n pub fn find_array(&self, id: &str) -> ReaderIter {\n let s = CString::new(id).unwrap();\n let mut t = 0u64;\n\n unsafe {\n ((*self.api).read_find_array)(transmute(self.api), &mut t, s.as_ptr(), 0);\n }\n\n ReaderIter {\n reader: self.clone(),\n curr_iter: t,\n }\n }\n}\n\nimpl Iterator for ReaderIter {\n type Item = Reader;\n fn next(&mut self) -> Option<Reader> {\n let ret;\n unsafe {\n ret = ((*self.reader.api).read_next_entry)(transmute(self.reader.api),\n &mut self.curr_iter);\n }\n\n match ret {\n 0 => None,\n _ => Some(Reader::new(self.reader.api, self.curr_iter)),\n }\n }\n}\n\nmacro_rules! write_fun {\n ($name:ident, $data_type:ident) => {\n pub fn $name(&mut self, id: &str, v: $data_type) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).$name)(transmute(self.api), s.as_ptr(), v);\n }\n }\n }\n}\n\nimpl Writer {\n pub fn event_begin(&mut self, event: u16) {\n unsafe {\n ((*self.api).write_event_begin)(transmute(self.api), event);\n }\n }\n\n pub fn event_end(&mut self) {\n unsafe {\n ((*self.api).write_event_end)(transmute(self.api));\n }\n }\n\n pub fn array_begin(&mut self, name: &str) {\n let s = CString::new(name).unwrap();\n unsafe {\n ((*self.api).write_array_begin)(transmute(self.api), s.as_ptr());\n }\n }\n\n pub fn array_end(&mut self) {\n unsafe {\n ((*self.api).write_array_end)(transmute(self.api));\n }\n }\n\n pub fn array_entry_begin(&mut self) {\n unsafe {\n ((*self.api).write_array_entry_begin)(transmute(self.api));\n }\n }\n\n pub fn array_entry_end(&mut self) {\n unsafe {\n ((*self.api).write_array_entry_end)(transmute(self.api));\n }\n }\n\n write_fun!(write_s8, i8);\n write_fun!(write_u8, u8);\n write_fun!(write_s16, i16);\n write_fun!(write_u16, u16);\n write_fun!(write_s32, i32);\n write_fun!(write_u32, u32);\n write_fun!(write_s64, i64);\n write_fun!(write_u64, u64);\n write_fun!(write_float, f32);\n write_fun!(write_double, f64);\n\n pub fn write_string(&mut self, id: &str, v: &str) {\n let id_s = CString::new(id).unwrap();\n let v_s = CString::new(v).unwrap();\n unsafe {\n ((*self.api).write_string)(transmute(self.api), id_s.as_ptr(), v_s.as_ptr());\n }\n }\n\n pub fn write_data(&mut self, id: &str, data: &[u8]) {\n let s = CString::new(id).unwrap();\n unsafe {\n ((*self.api).write_data)(transmute(self.api),\n s.as_ptr(),\n data.as_ptr(),\n data.len() as u32);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::sys;\n\n#[packed]\nstruct S4 {\n a: u8,\n b: [u8, .. 3],\n}\n\n#[packed]\nstruct S5 {\n a: u8,\n b: u32\n}\n\n#[packed]\nstruct S13_str {\n a: i64,\n b: f32,\n c: u8,\n d: ~str\n}\n\nenum Foo {\n Bar = 1,\n Baz = 2\n}\n\n#[packed]\nstruct S3_Foo {\n a: u8,\n b: u16,\n c: Foo\n}\n\n#[packed]\nstruct S7_Option {\n a: f32,\n b: u8,\n c: u16,\n d: Option<@mut f64>\n}\n\n\npub fn main() {\n assert_eq!(sys::size_of::<S4>(), 4);\n assert_eq!(sys::size_of::<S5>(), 5);\n assert_eq!(sys::size_of::<S13_str>(), 13 + sys::size_of::<~str>());\n assert_eq!(sys::size_of::<S3_Foo>(), 3 + sys::size_of::<Foo>());\n assert_eq!(sys::size_of::<S7_Option>(), 7 + sys::size_of::<Option<@mut f64>>());\n}\n<commit_msg>Update test for packed structs to also test being placed in statics.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::sys;\n\n#[packed]\nstruct S4 {\n a: u8,\n b: [u8, .. 3],\n}\n\n#[packed]\nstruct S5 {\n a: u8,\n b: u32\n}\n\n#[packed]\nstruct S13_str {\n a: i64,\n b: f32,\n c: u8,\n d: ~str\n}\n\nenum Foo {\n Bar = 1,\n Baz = 2\n}\n\n#[packed]\nstruct S3_Foo {\n a: u8,\n b: u16,\n c: Foo\n}\n\n#[packed]\nstruct S7_Option {\n a: f32,\n b: u8,\n c: u16,\n d: Option<@mut f64>\n}\n\n\/\/ Placing packed structs in statics should work\nstatic TEST_S4: S4 = S4 { a: 1, b: [2, 3, 4] };\nstatic TEST_S5: S5 = S5 { a: 3, b: 67 };\nstatic TEST_S3_Foo: S3_Foo = S3_Foo { a: 1, b: 2, c: Baz };\n\n\npub fn main() {\n assert_eq!(sys::size_of::<S4>(), 4);\n assert_eq!(sys::size_of::<S5>(), 5);\n assert_eq!(sys::size_of::<S13_str>(), 13 + sys::size_of::<~str>());\n assert_eq!(sys::size_of::<S3_Foo>(), 3 + sys::size_of::<Foo>());\n assert_eq!(sys::size_of::<S7_Option>(), 7 + sys::size_of::<Option<@mut f64>>());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Commit the example of the doc in an examples<commit_after>extern crate cabot;\n\nuse cabot::{RequestBuilder, Client};\n\nfn main() {\n let request = RequestBuilder::new(\"https:\/\/www.rust-lang.org\/\")\n .build()\n .unwrap();\n let client = Client::new();\n let response = client.execute(&request).unwrap();\n print!(\"{}\", response.body_as_string().unwrap());\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::mem::size_of;\n\npub fn main() {\n let x: [int, ..4] = [1, 2, 3, 4];\n assert_eq!(x[0], 1);\n assert_eq!(x[1], 2);\n assert_eq!(x[2], 3);\n assert_eq!(x[3], 4);\n\n assert_eq!(size_of::<[u8, ..4]>(), 4u);\n\n \/\/ FIXME #10183\n if cfg!(target_word_size = \"64\") {\n assert_eq!(size_of::<[u8, ..(1 << 32)]>(), (1u << 32));\n }\n}\n<commit_msg>fix a failing test<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::mem::size_of;\n\npub fn main() {\n let x: [int, ..4] = [1, 2, 3, 4];\n assert_eq!(x[0], 1);\n assert_eq!(x[1], 2);\n assert_eq!(x[2], 3);\n assert_eq!(x[3], 4);\n\n assert_eq!(size_of::<[u8, ..4]>(), 4u);\n\n \/\/ FIXME #10183\n \/\/ FIXME #18069\n \/\/if cfg!(target_word_size = \"64\") {\n \/\/ assert_eq!(size_of::<[u8, ..(1 << 32)]>(), (1u << 32));\n \/\/}\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse browser_host::{ServoCefBrowserHost, ServoCefBrowserHostExtensions};\nuse eutil::Downcast;\nuse frame::{ServoCefFrame, ServoCefFrameExtensions};\nuse interfaces::{CefBrowser, CefBrowserHost, CefClient, CefFrame, CefRequestContext};\nuse interfaces::{cef_browser_t, cef_browser_host_t, cef_client_t, cef_frame_t};\nuse interfaces::{cef_request_context_t};\nuse servo::Browser;\nuse types::{cef_browser_settings_t, cef_string_t, cef_window_info_t, cef_window_handle_t};\nuse window;\n\nuse compositing::windowing::{WindowNavigateMsg, WindowEvent};\nuse glutin_app;\nuse libc::c_int;\nuse std::cell::{Cell, RefCell, BorrowState};\nuse std::rc::Rc;\nuse std::sync::atomic::{AtomicIsize, Ordering};\n\nthread_local!(pub static ID_COUNTER: AtomicIsize = AtomicIsize::new(0));\nthread_local!(pub static BROWSERS: RefCell<Vec<CefBrowser>> = RefCell::new(vec!()));\n\npub enum ServoBrowser {\n Invalid,\n OnScreen(Browser),\n OffScreen(Browser),\n}\n\nimpl ServoBrowser {\n fn handle_event(&mut self, event: WindowEvent) {\n match *self {\n ServoBrowser::OnScreen(ref mut browser) => { browser.handle_events(vec![event]); }\n ServoBrowser::OffScreen(ref mut browser) => { browser.handle_events(vec![event]); }\n ServoBrowser::Invalid => {}\n }\n }\n\n pub fn get_title_for_main_frame(&self) {\n match *self {\n ServoBrowser::OnScreen(ref browser) => browser.get_title_for_main_frame(),\n ServoBrowser::OffScreen(ref browser) => browser.get_title_for_main_frame(),\n ServoBrowser::Invalid => {}\n }\n }\n\n pub fn pinch_zoom_level(&self) -> f32 {\n match *self {\n ServoBrowser::OnScreen(ref browser) => browser.pinch_zoom_level(),\n ServoBrowser::OffScreen(ref browser) => browser.pinch_zoom_level(),\n ServoBrowser::Invalid => 1.0,\n }\n }\n}\n\ncef_class_impl! {\n ServoCefBrowser : CefBrowser, cef_browser_t {\n fn get_host(&this,) -> *mut cef_browser_host_t {{\n this.downcast().host.clone()\n }}\n\n fn can_go_back(&this,) -> c_int {{\n this.downcast().back.get() as c_int\n }}\n\n fn can_go_forward(&this,) -> c_int {{\n this.downcast().forward.get() as c_int\n }}\n\n fn is_loading(&this,) -> c_int {{\n this.downcast().loading.get() as c_int\n }}\n\n fn go_back(&this,) -> () {{\n this.send_window_event(WindowEvent::Navigation(WindowNavigateMsg::Back));\n }}\n\n fn go_forward(&this,) -> () {{\n this.send_window_event(WindowEvent::Navigation(WindowNavigateMsg::Forward));\n }}\n\n \/\/ Returns the main (top-level) frame for the browser window.\n fn get_main_frame(&this,) -> *mut cef_frame_t {{\n this.downcast().frame.clone()\n }}\n\n fn get_identifier(&this,) -> c_int {{\n this.downcast().id as c_int\n }}\n }\n}\n\npub struct ServoCefBrowser {\n \/\/\/ A reference to the browser's primary frame.\n pub frame: CefFrame,\n \/\/\/ A reference to the browser's host.\n pub host: CefBrowserHost,\n \/\/\/ A reference to the browser client.\n pub client: CefClient,\n \/\/\/ the glutin window when using windowed rendering\n pub window: Option<Rc<glutin_app::window::Window>>,\n \/\/\/ Whether the on-created callback has fired yet.\n pub callback_executed: Cell<bool>,\n \/\/\/ whether the browser can navigate back\n pub back: Cell<bool>,\n \/\/\/ whether the browser can navigate forward\n pub forward: Cell<bool>,\n \/\/\/ whether the browser is loading\n pub loading: Cell<bool>,\n \/\/\/ the display system window handle: only to be used with host.get_window_handle()\n window_handle: cef_window_handle_t,\n\n id: isize,\n servo_browser: RefCell<ServoBrowser>,\n message_queue: RefCell<Vec<WindowEvent>>,\n}\n\nimpl ServoCefBrowser {\n pub fn new(window_info: &cef_window_info_t, client: CefClient) -> ServoCefBrowser {\n let frame = ServoCefFrame::new().as_cef_interface();\n let host = ServoCefBrowserHost::new(client.clone()).as_cef_interface();\n let mut window_handle: cef_window_handle_t = get_null_window_handle();\n let mut glutin_window: Option<Rc<glutin_app::window::Window>> = None;\n\n let servo_browser = if window_info.windowless_rendering_enabled == 0 {\n glutin_window = Some(glutin_app::create_window(window_info.parent_window as glutin_app::WindowID));\n let servo_browser = Browser::new(glutin_window.clone());\n window_handle = match glutin_window {\n Some(ref win) => win.platform_window() as cef_window_handle_t,\n None => get_null_window_handle()\n };\n ServoBrowser::OnScreen(servo_browser)\n } else {\n ServoBrowser::Invalid\n };\n\n let id = ID_COUNTER.with(|counter| {\n counter.fetch_add(1, Ordering::SeqCst)\n });\n\n ServoCefBrowser {\n frame: frame,\n host: host,\n client: client,\n window: glutin_window,\n callback_executed: Cell::new(false),\n servo_browser: RefCell::new(servo_browser),\n message_queue: RefCell::new(vec!()),\n id: id,\n back: Cell::new(false),\n forward: Cell::new(false),\n loading: Cell::new(false),\n window_handle: window_handle,\n }\n }\n}\n\npub trait ServoCefBrowserExtensions {\n fn init(&self, window_info: &cef_window_info_t);\n fn send_window_event(&self, event: WindowEvent);\n fn get_title_for_main_frame(&self);\n fn pinch_zoom_level(&self) -> f32;\n}\n\nimpl ServoCefBrowserExtensions for CefBrowser {\n fn init(&self, window_info: &cef_window_info_t) {\n if window_info.windowless_rendering_enabled != 0 {\n let window = window::Window::new(window_info.width, window_info.height);\n window.set_browser(self.clone());\n let servo_browser = Browser::new(Some(window.clone()));\n *self.downcast().servo_browser.borrow_mut() = ServoBrowser::OffScreen(servo_browser);\n }\n\n self.downcast().host.set_browser((*self).clone());\n self.downcast().frame.set_browser((*self).clone());\n if window_info.windowless_rendering_enabled == 0 {\n self.downcast().host.initialize_compositing();\n }\n }\n\n fn send_window_event(&self, event: WindowEvent) {\n self.downcast().message_queue.borrow_mut().push(event);\n\n loop {\n match self.downcast().servo_browser.borrow_state() {\n BorrowState::Unused => {\n let event = match self.downcast().message_queue.borrow_mut().pop() {\n None => return,\n Some(event) => event,\n };\n self.downcast().servo_browser.borrow_mut().handle_event(event);\n }\n _ => {\n \/\/ We're trying to send an event while processing another one. This will\n \/\/ cause general badness, so queue up that event instead of immediately\n \/\/ processing it.\n break\n }\n }\n }\n }\n\n fn get_title_for_main_frame(&self) {\n self.downcast().servo_browser.borrow().get_title_for_main_frame()\n }\n\n fn pinch_zoom_level(&self) -> f32 {\n self.downcast().servo_browser.borrow().pinch_zoom_level()\n }\n}\n\n#[cfg(target_os=\"macos\")]\npub fn get_null_window_handle() -> cef_window_handle_t {\n ptr::null_mut()\n}\n#[cfg(target_os=\"linux\")]\npub fn get_null_window_handle() -> cef_window_handle_t {\n 0\n}\n\npub fn update() {\n BROWSERS.with(|browsers| {\n for browser in browsers.borrow().iter() {\n if browser.downcast().callback_executed.get() == false {\n browser_callback_after_created(browser.clone());\n }\n let mut events = match browser.downcast().window {\n Some(ref win) => win.wait_events(),\n None => vec![WindowEvent::Idle]\n };\n loop {\n match events.pop() {\n Some(event) => browser.send_window_event(event),\n None => break\n }\n }\n }\n });\n}\n\npub fn close(browser: CefBrowser) {\n BROWSERS.with(|browsers| {\n let mut browsers = browsers.borrow_mut();\n browsers.iter()\n .position(|&ref n| n.downcast().id == browser.downcast().id)\n .map(|e| browsers.remove(e));\n });\n}\n\npub fn get_window(browser: &CefBrowser) -> cef_window_handle_t {\n browser.downcast().window_handle\n}\n\npub fn browser_callback_after_created(browser: CefBrowser) {\n if browser.downcast().client.is_null_cef_object() {\n return\n }\n let client = browser.downcast().client.clone();\n let life_span_handler = client.get_life_span_handler();\n if life_span_handler.is_not_null_cef_object() {\n life_span_handler.on_after_created(browser.clone());\n }\n browser.downcast().callback_executed.set(true);\n}\n\nfn browser_host_create(window_info: &cef_window_info_t,\n client: CefClient,\n url: *const cef_string_t,\n callback_executed: bool)\n -> CefBrowser {\n let browser = ServoCefBrowser::new(window_info, client).as_cef_interface();\n browser.init(window_info);\n if callback_executed {\n browser_callback_after_created(browser.clone());\n }\n \/\/if url != ptr::null() {\n \/\/unsafe { browser.downcast().frame.load_url(CefWrap::to_rust(url)); }\n \/\/}\n BROWSERS.with(|browsers| {\n browsers.borrow_mut().push(browser.clone());\n });\n browser\n}\n\ncef_static_method_impls! {\n fn cef_browser_host_create_browser(window_info: *const cef_window_info_t,\n client: *mut cef_client_t,\n url: *const cef_string_t,\n _browser_settings: *const cef_browser_settings_t,\n _request_context: *mut cef_request_context_t)\n -> c_int {{\n let client: CefClient = client;\n let _browser_settings: &cef_browser_settings_t = _browser_settings;\n let _request_context: CefRequestContext = _request_context;\n browser_host_create(window_info, client, url, false);\n 1i32\n }}\n fn cef_browser_host_create_browser_sync(window_info: *const cef_window_info_t,\n client: *mut cef_client_t,\n url: *const cef_string_t,\n _browser_settings: *const cef_browser_settings_t,\n _request_context: *mut cef_request_context_t)\n -> *mut cef_browser_t {{\n let client: CefClient = client;\n let _browser_settings: &cef_browser_settings_t = _browser_settings;\n let _request_context: CefRequestContext = _request_context;\n browser_host_create(window_info, client, url, true)\n }}\n}\n<commit_msg>force resize after sending cef_browser::on_after_created() callback<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse browser_host::{ServoCefBrowserHost, ServoCefBrowserHostExtensions};\nuse eutil::Downcast;\nuse frame::{ServoCefFrame, ServoCefFrameExtensions};\nuse interfaces::{CefBrowser, CefBrowserHost, CefClient, CefFrame, CefRequestContext};\nuse interfaces::{cef_browser_t, cef_browser_host_t, cef_client_t, cef_frame_t};\nuse interfaces::{cef_request_context_t};\nuse servo::Browser;\nuse types::{cef_browser_settings_t, cef_string_t, cef_window_info_t, cef_window_handle_t};\nuse window;\n\nuse compositing::windowing::{WindowNavigateMsg, WindowEvent};\nuse glutin_app;\nuse libc::c_int;\nuse std::cell::{Cell, RefCell, BorrowState};\nuse std::rc::Rc;\nuse std::sync::atomic::{AtomicIsize, Ordering};\n\nthread_local!(pub static ID_COUNTER: AtomicIsize = AtomicIsize::new(0));\nthread_local!(pub static BROWSERS: RefCell<Vec<CefBrowser>> = RefCell::new(vec!()));\n\npub enum ServoBrowser {\n Invalid,\n OnScreen(Browser),\n OffScreen(Browser),\n}\n\nimpl ServoBrowser {\n fn handle_event(&mut self, event: WindowEvent) {\n match *self {\n ServoBrowser::OnScreen(ref mut browser) => { browser.handle_events(vec![event]); }\n ServoBrowser::OffScreen(ref mut browser) => { browser.handle_events(vec![event]); }\n ServoBrowser::Invalid => {}\n }\n }\n\n pub fn get_title_for_main_frame(&self) {\n match *self {\n ServoBrowser::OnScreen(ref browser) => browser.get_title_for_main_frame(),\n ServoBrowser::OffScreen(ref browser) => browser.get_title_for_main_frame(),\n ServoBrowser::Invalid => {}\n }\n }\n\n pub fn pinch_zoom_level(&self) -> f32 {\n match *self {\n ServoBrowser::OnScreen(ref browser) => browser.pinch_zoom_level(),\n ServoBrowser::OffScreen(ref browser) => browser.pinch_zoom_level(),\n ServoBrowser::Invalid => 1.0,\n }\n }\n}\n\ncef_class_impl! {\n ServoCefBrowser : CefBrowser, cef_browser_t {\n fn get_host(&this,) -> *mut cef_browser_host_t {{\n this.downcast().host.clone()\n }}\n\n fn can_go_back(&this,) -> c_int {{\n this.downcast().back.get() as c_int\n }}\n\n fn can_go_forward(&this,) -> c_int {{\n this.downcast().forward.get() as c_int\n }}\n\n fn is_loading(&this,) -> c_int {{\n this.downcast().loading.get() as c_int\n }}\n\n fn go_back(&this,) -> () {{\n this.send_window_event(WindowEvent::Navigation(WindowNavigateMsg::Back));\n }}\n\n fn go_forward(&this,) -> () {{\n this.send_window_event(WindowEvent::Navigation(WindowNavigateMsg::Forward));\n }}\n\n \/\/ Returns the main (top-level) frame for the browser window.\n fn get_main_frame(&this,) -> *mut cef_frame_t {{\n this.downcast().frame.clone()\n }}\n\n fn get_identifier(&this,) -> c_int {{\n this.downcast().id as c_int\n }}\n }\n}\n\npub struct ServoCefBrowser {\n \/\/\/ A reference to the browser's primary frame.\n pub frame: CefFrame,\n \/\/\/ A reference to the browser's host.\n pub host: CefBrowserHost,\n \/\/\/ A reference to the browser client.\n pub client: CefClient,\n \/\/\/ the glutin window when using windowed rendering\n pub window: Option<Rc<glutin_app::window::Window>>,\n \/\/\/ Whether the on-created callback has fired yet.\n pub callback_executed: Cell<bool>,\n \/\/\/ whether the browser can navigate back\n pub back: Cell<bool>,\n \/\/\/ whether the browser can navigate forward\n pub forward: Cell<bool>,\n \/\/\/ whether the browser is loading\n pub loading: Cell<bool>,\n \/\/\/ the display system window handle: only to be used with host.get_window_handle()\n window_handle: cef_window_handle_t,\n\n id: isize,\n servo_browser: RefCell<ServoBrowser>,\n message_queue: RefCell<Vec<WindowEvent>>,\n}\n\nimpl ServoCefBrowser {\n pub fn new(window_info: &cef_window_info_t, client: CefClient) -> ServoCefBrowser {\n let frame = ServoCefFrame::new().as_cef_interface();\n let host = ServoCefBrowserHost::new(client.clone()).as_cef_interface();\n let mut window_handle: cef_window_handle_t = get_null_window_handle();\n let mut glutin_window: Option<Rc<glutin_app::window::Window>> = None;\n\n let servo_browser = if window_info.windowless_rendering_enabled == 0 {\n glutin_window = Some(glutin_app::create_window(window_info.parent_window as glutin_app::WindowID));\n let servo_browser = Browser::new(glutin_window.clone());\n window_handle = match glutin_window {\n Some(ref win) => win.platform_window() as cef_window_handle_t,\n None => get_null_window_handle()\n };\n ServoBrowser::OnScreen(servo_browser)\n } else {\n ServoBrowser::Invalid\n };\n\n let id = ID_COUNTER.with(|counter| {\n counter.fetch_add(1, Ordering::SeqCst)\n });\n\n ServoCefBrowser {\n frame: frame,\n host: host,\n client: client,\n window: glutin_window,\n callback_executed: Cell::new(false),\n servo_browser: RefCell::new(servo_browser),\n message_queue: RefCell::new(vec!()),\n id: id,\n back: Cell::new(false),\n forward: Cell::new(false),\n loading: Cell::new(false),\n window_handle: window_handle,\n }\n }\n}\n\npub trait ServoCefBrowserExtensions {\n fn init(&self, window_info: &cef_window_info_t);\n fn send_window_event(&self, event: WindowEvent);\n fn get_title_for_main_frame(&self);\n fn pinch_zoom_level(&self) -> f32;\n}\n\nimpl ServoCefBrowserExtensions for CefBrowser {\n fn init(&self, window_info: &cef_window_info_t) {\n if window_info.windowless_rendering_enabled != 0 {\n let window = window::Window::new(window_info.width, window_info.height);\n window.set_browser(self.clone());\n let servo_browser = Browser::new(Some(window.clone()));\n *self.downcast().servo_browser.borrow_mut() = ServoBrowser::OffScreen(servo_browser);\n }\n\n self.downcast().host.set_browser((*self).clone());\n self.downcast().frame.set_browser((*self).clone());\n if window_info.windowless_rendering_enabled == 0 {\n self.downcast().host.initialize_compositing();\n }\n }\n\n fn send_window_event(&self, event: WindowEvent) {\n self.downcast().message_queue.borrow_mut().push(event);\n\n loop {\n match self.downcast().servo_browser.borrow_state() {\n BorrowState::Unused => {\n let event = match self.downcast().message_queue.borrow_mut().pop() {\n None => return,\n Some(event) => event,\n };\n self.downcast().servo_browser.borrow_mut().handle_event(event);\n }\n _ => {\n \/\/ We're trying to send an event while processing another one. This will\n \/\/ cause general badness, so queue up that event instead of immediately\n \/\/ processing it.\n break\n }\n }\n }\n }\n\n fn get_title_for_main_frame(&self) {\n self.downcast().servo_browser.borrow().get_title_for_main_frame()\n }\n\n fn pinch_zoom_level(&self) -> f32 {\n self.downcast().servo_browser.borrow().pinch_zoom_level()\n }\n}\n\n#[cfg(target_os=\"macos\")]\npub fn get_null_window_handle() -> cef_window_handle_t {\n ptr::null_mut()\n}\n#[cfg(target_os=\"linux\")]\npub fn get_null_window_handle() -> cef_window_handle_t {\n 0\n}\n\npub fn update() {\n BROWSERS.with(|browsers| {\n for browser in browsers.borrow().iter() {\n if browser.downcast().callback_executed.get() == false {\n browser_callback_after_created(browser.clone());\n }\n let mut events = match browser.downcast().window {\n Some(ref win) => win.wait_events(),\n None => vec![WindowEvent::Idle]\n };\n loop {\n match events.pop() {\n Some(event) => browser.send_window_event(event),\n None => break\n }\n }\n }\n });\n}\n\npub fn close(browser: CefBrowser) {\n BROWSERS.with(|browsers| {\n let mut browsers = browsers.borrow_mut();\n browsers.iter()\n .position(|&ref n| n.downcast().id == browser.downcast().id)\n .map(|e| browsers.remove(e));\n });\n}\n\npub fn get_window(browser: &CefBrowser) -> cef_window_handle_t {\n browser.downcast().window_handle\n}\n\npub fn browser_callback_after_created(browser: CefBrowser) {\n if browser.downcast().client.is_null_cef_object() {\n return\n }\n let client = browser.downcast().client.clone();\n let life_span_handler = client.get_life_span_handler();\n if life_span_handler.is_not_null_cef_object() {\n life_span_handler.on_after_created(browser.clone());\n }\n browser.downcast().callback_executed.set(true);\n browser.downcast().host.was_resized();\n}\n\nfn browser_host_create(window_info: &cef_window_info_t,\n client: CefClient,\n url: *const cef_string_t,\n callback_executed: bool)\n -> CefBrowser {\n let browser = ServoCefBrowser::new(window_info, client).as_cef_interface();\n browser.init(window_info);\n if callback_executed {\n browser_callback_after_created(browser.clone());\n }\n \/\/if url != ptr::null() {\n \/\/unsafe { browser.downcast().frame.load_url(CefWrap::to_rust(url)); }\n \/\/}\n BROWSERS.with(|browsers| {\n browsers.borrow_mut().push(browser.clone());\n });\n browser\n}\n\ncef_static_method_impls! {\n fn cef_browser_host_create_browser(window_info: *const cef_window_info_t,\n client: *mut cef_client_t,\n url: *const cef_string_t,\n _browser_settings: *const cef_browser_settings_t,\n _request_context: *mut cef_request_context_t)\n -> c_int {{\n let client: CefClient = client;\n let _browser_settings: &cef_browser_settings_t = _browser_settings;\n let _request_context: CefRequestContext = _request_context;\n browser_host_create(window_info, client, url, false);\n 1i32\n }}\n fn cef_browser_host_create_browser_sync(window_info: *const cef_window_info_t,\n client: *mut cef_client_t,\n url: *const cef_string_t,\n _browser_settings: *const cef_browser_settings_t,\n _request_context: *mut cef_request_context_t)\n -> *mut cef_browser_t {{\n let client: CefClient = client;\n let _browser_settings: &cef_browser_settings_t = _browser_settings;\n let _request_context: CefRequestContext = _request_context;\n browser_host_create(window_info, client, url, true)\n }}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Only show calling points for now.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Log latencies at debug.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>for demo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Closes #7364 Test case<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[feature(managed_boxes)];\n\nuse std::cell::RefCell;\n\n\/\/ Regresion test for issue 7364\nstatic managed: @RefCell<int> = @RefCell::new(0);\n\/\/~^ ERROR static items are not allowed to have managed pointers\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for sensible #[start] error message.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[start]\nfn start(argc: int, argv: **u8, crate_map: *u8) -> int {\n \/\/~^ ERROR start function expects type: `fn(int, **u8) -> int`\n 0\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a regression test for #80772<commit_after>\/\/ check-pass\n\ntrait SomeTrait {}\n\npub struct Exhibit {\n constant: usize,\n factory: fn(&usize) -> Box<dyn SomeTrait>,\n}\n\npub const A_CONSTANT: &[Exhibit] = &[\n Exhibit {\n constant: 1,\n factory: |_| unimplemented!(),\n },\n Exhibit {\n constant: \"Hello world\".len(),\n factory: |_| unimplemented!(),\n },\n];\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update (7 kyu) Complementary DNA.rs<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate sync;\n\nuse std::task;\nuse sync::MutexArc;\n\nfn test_mutex_arc_nested() {\n let arc = ~MutexArc::new(1);\n let arc2 = ~MutexArc::new(*arc);\n\n task::spawn(proc() {\n (*arc2).access(|mutex| { \/\/~ ERROR instantiating a type parameter with an incompatible type\n })\n });\n}\n\nfn main() {}\n<commit_msg>Remove the compile-fail test that's now obsolete<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ ignore-fast\n\n\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[feature(managed_boxes)];\n\nextern crate collections;\n\nuse collections::list::{List, Cons, Nil};\n\nfn pure_length_go<T>(ls: &List<T>, acc: uint) -> uint {\n match *ls { Nil => { acc } Cons(_, ref tl) => { pure_length_go(&**tl, acc + 1u) } }\n}\n\nfn pure_length<T>(ls: &List<T>) -> uint { pure_length_go(ls, 0u) }\n\nfn nonempty_list<T>(ls: &List<T>) -> bool { pure_length(ls) > 0u }\n\nfn safe_head<T:Clone>(ls: &List<T>) -> T {\n assert!(!ls.is_empty());\n return ls.head().unwrap().clone();\n}\n\npub fn main() {\n let mylist = Cons(1u, ~Nil);\n assert!((nonempty_list(&mylist)));\n assert_eq!(safe_head(&mylist), 1u);\n}\n<commit_msg>test: remove pure test, which is now redundant with inline tests<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse boxed::Box;\nuse convert::Into;\nuse error;\nuse fmt;\nuse marker::{Send, Sync};\nuse option::Option::{self, Some, None};\nuse result;\nuse sys;\n\n\/\/\/ A specialized [`Result`][result] type for I\/O operations.\n\/\/\/\n\/\/\/ [result]: ..\/result\/enum.Result.html\n\/\/\/\n\/\/\/ This type is broadly used across `std::io` for any operation which may\n\/\/\/ produce an error.\n\/\/\/\n\/\/\/ This typedef is generally used to avoid writing out `io::Error` directly and\n\/\/\/ is otherwise a direct mapping to `Result`.\n\/\/\/\n\/\/\/ While usual Rust style is to import types directly, aliases of `Result`\n\/\/\/ often are not, to make it easier to distinguish between them. `Result` is\n\/\/\/ generally assumed to be `std::result::Result`, and so users of this alias\n\/\/\/ will generally use `io::Result` instead of shadowing the prelude's import\n\/\/\/ of `std::result::Result`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ A convenience function that bubbles an `io::Result` to its caller:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/\n\/\/\/ fn get_string() -> io::Result<String> {\n\/\/\/ let mut buffer = String::new();\n\/\/\/\n\/\/\/ try!(io::stdin().read_line(&mut buffer));\n\/\/\/\n\/\/\/ Ok(buffer)\n\/\/\/ }\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type Result<T> = result::Result<T, Error>;\n\n\/\/\/ The error type for I\/O operations of the `Read`, `Write`, `Seek`, and\n\/\/\/ associated traits.\n\/\/\/\n\/\/\/ Errors mostly originate from the underlying OS, but custom instances of\n\/\/\/ `Error` can be created with crafted error messages and a particular value of\n\/\/\/ `ErrorKind`.\n#[derive(Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Error {\n repr: Repr,\n}\n\nenum Repr {\n Os(i32),\n Custom(Box<Custom>),\n}\n\n#[derive(Debug)]\nstruct Custom {\n kind: ErrorKind,\n error: Box<error::Error+Send+Sync>,\n}\n\n\/\/\/ A list specifying general categories of I\/O error.\n\/\/\/\n\/\/\/ This list is intended to grow over time and it is not recommended to\n\/\/\/ exhaustively match against it.\n#[derive(Copy, PartialEq, Eq, Clone, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub enum ErrorKind {\n \/\/\/ An entity was not found, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotFound,\n \/\/\/ The operation lacked the necessary privileges to complete.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n PermissionDenied,\n \/\/\/ The connection was refused by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionRefused,\n \/\/\/ The connection was reset by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionReset,\n \/\/\/ The connection was aborted (terminated) by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionAborted,\n \/\/\/ The network operation failed because it was not connected yet.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotConnected,\n \/\/\/ A socket address could not be bound because the address is already in\n \/\/\/ use elsewhere.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrInUse,\n \/\/\/ A nonexistent interface was requested or the requested address was not\n \/\/\/ local.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrNotAvailable,\n \/\/\/ The operation failed because a pipe was closed.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n BrokenPipe,\n \/\/\/ An entity already exists, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AlreadyExists,\n \/\/\/ The operation needs to block to complete, but the blocking operation was\n \/\/\/ requested to not occur.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WouldBlock,\n \/\/\/ A parameter was incorrect.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n InvalidInput,\n \/\/\/ Data not valid for the operation were encountered.\n \/\/\/\n \/\/\/ Unlike `InvalidInput`, this typically means that the operation\n \/\/\/ parameters were valid, however the error was caused by malformed\n \/\/\/ input data.\n \/\/\/\n \/\/\/ For example, a function that reads a file into a string will error with\n \/\/\/ `InvalidData` if the file's contents are not valid UTF-8.\n #[stable(feature = \"io_invalid_data\", since = \"1.2.0\")]\n InvalidData,\n \/\/\/ The I\/O operation's timeout expired, causing it to be canceled.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n TimedOut,\n \/\/\/ An error returned when an operation could not be completed because a\n \/\/\/ call to `write` returned `Ok(0)`.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it wrote a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ written.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WriteZero,\n \/\/\/ This operation was interrupted.\n \/\/\/\n \/\/\/ Interrupted operations can typically be retried.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Interrupted,\n \/\/\/ Any I\/O error not part of this list.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Other,\n\n \/\/\/ An error returned when an operation could not be completed because an\n \/\/\/ \"end of file\" was reached prematurely.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it read a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ read.\n #[unstable(feature = \"read_exact\", reason = \"recently added\", issue = \"27585\")]\n UnexpectedEOF,\n\n \/\/\/ Any I\/O error not part of this list.\n #[unstable(feature = \"io_error_internals\",\n reason = \"better expressed through extensible enums that this \\\n enum cannot be exhaustively matched against\",\n issue = \"0\")]\n #[doc(hidden)]\n __Nonexhaustive,\n}\n\nimpl Error {\n \/\/\/ Creates a new I\/O error from a known kind of error as well as an\n \/\/\/ arbitrary error payload.\n \/\/\/\n \/\/\/ This function is used to generically create I\/O errors which do not\n \/\/\/ originate from the OS itself. The `error` argument is an arbitrary\n \/\/\/ payload which will be contained in this `Error`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::io::{Error, ErrorKind};\n \/\/\/\n \/\/\/ \/\/ errors can be created from strings\n \/\/\/ let custom_error = Error::new(ErrorKind::Other, \"oh no!\");\n \/\/\/\n \/\/\/ \/\/ errors can also be created from other errors\n \/\/\/ let custom_error2 = Error::new(ErrorKind::Interrupted, custom_error);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn new<E>(kind: ErrorKind, error: E) -> Error\n where E: Into<Box<error::Error+Send+Sync>>\n {\n Self::_new(kind, error.into())\n }\n\n fn _new(kind: ErrorKind, error: Box<error::Error+Send+Sync>) -> Error {\n Error {\n repr: Repr::Custom(Box::new(Custom {\n kind: kind,\n error: error,\n }))\n }\n }\n\n \/\/\/ Returns an error representing the last OS error which occurred.\n \/\/\/\n \/\/\/ This function reads the value of `errno` for the target platform (e.g.\n \/\/\/ `GetLastError` on Windows) and will return a corresponding instance of\n \/\/\/ `Error` for the error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn last_os_error() -> Error {\n Error::from_raw_os_error(sys::os::errno() as i32)\n }\n\n \/\/\/ Creates a new instance of an `Error` from a particular OS error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn from_raw_os_error(code: i32) -> Error {\n Error { repr: Repr::Os(code) }\n }\n\n \/\/\/ Returns the OS error that this error represents (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `last_os_error` or\n \/\/\/ `from_raw_os_error`, then this function will return `Some`, otherwise\n \/\/\/ it will return `None`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn raw_os_error(&self) -> Option<i32> {\n match self.repr {\n Repr::Os(i) => Some(i),\n Repr::Custom(..) => None,\n }\n }\n\n \/\/\/ Returns a reference to the inner error wrapped by this error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_ref(&self) -> Option<&(error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => Some(&*c.error),\n }\n }\n\n \/\/\/ Returns a mutable reference to the inner error wrapped by this error\n \/\/\/ (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_mut(&mut self) -> Option<&mut (error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref mut c) => Some(&mut *c.error),\n }\n }\n\n \/\/\/ Consumes the `Error`, returning its inner error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn into_inner(self) -> Option<Box<error::Error+Send+Sync>> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(c) => Some(c.error)\n }\n }\n\n \/\/\/ Returns the corresponding `ErrorKind` for this error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn kind(&self) -> ErrorKind {\n match self.repr {\n Repr::Os(code) => sys::decode_error_kind(code),\n Repr::Custom(ref c) => c.kind,\n }\n }\n}\n\nimpl fmt::Debug for Repr {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Repr::Os(ref code) =>\n fmt.debug_struct(\"Os\").field(\"code\", code)\n .field(\"message\", &sys::os::error_string(*code)).finish(),\n Repr::Custom(ref c) => fmt.debug_tuple(\"Custom\").field(c).finish(),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Display for Error {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match self.repr {\n Repr::Os(code) => {\n let detail = sys::os::error_string(code);\n write!(fmt, \"{} (os error {})\", detail, code)\n }\n Repr::Custom(ref c) => c.error.fmt(fmt),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl error::Error for Error {\n fn description(&self) -> &str {\n match self.repr {\n Repr::Os(..) => \"os error\",\n Repr::Custom(ref c) => c.error.description(),\n }\n }\n\n fn cause(&self) -> Option<&error::Error> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => c.error.cause(),\n }\n }\n}\n\nfn _assert_error_is_sync_send() {\n fn _is_sync_send<T: Sync+Send>() {}\n _is_sync_send::<Error>();\n}\n\n#[cfg(test)]\nmod test {\n use prelude::v1::*;\n use super::{Error, ErrorKind};\n use error;\n use error::Error as error_Error;\n use fmt;\n use sys::os::error_string;\n\n #[test]\n fn test_debug_error() {\n let code = 6;\n let msg = error_string(code);\n let err = Error { repr: super::Repr::Os(code) };\n let expected = format!(\"Error {{ repr: Os {{ code: {:?}, message: {:?} }} }}\", code, msg);\n assert_eq!(format!(\"{:?}\", err), expected);\n }\n\n #[test]\n fn test_downcasting() {\n #[derive(Debug)]\n struct TestError;\n\n impl fmt::Display for TestError {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n Ok(())\n }\n }\n\n impl error::Error for TestError {\n fn description(&self) -> &str {\n \"asdf\"\n }\n }\n\n \/\/ we have to call all of these UFCS style right now since method\n \/\/ resolution won't implicitly drop the Send+Sync bounds\n let mut err = Error::new(ErrorKind::Other, TestError);\n assert!(err.get_ref().unwrap().is::<TestError>());\n assert_eq!(\"asdf\", err.get_ref().unwrap().description());\n assert!(err.get_mut().unwrap().is::<TestError>());\n let extracted = err.into_inner().unwrap();\n extracted.downcast::<TestError>().unwrap();\n }\n}\n<commit_msg>fix link on std::result::Result<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse boxed::Box;\nuse convert::Into;\nuse error;\nuse fmt;\nuse marker::{Send, Sync};\nuse option::Option::{self, Some, None};\nuse result;\nuse sys;\n\n\/\/\/ A specialized [`Result`](..\/result\/enum.Result.html) type for I\/O \n\/\/\/ operations.\n\/\/\/\n\/\/\/ This type is broadly used across `std::io` for any operation which may\n\/\/\/ produce an error.\n\/\/\/\n\/\/\/ This typedef is generally used to avoid writing out `io::Error` directly and\n\/\/\/ is otherwise a direct mapping to `Result`.\n\/\/\/\n\/\/\/ While usual Rust style is to import types directly, aliases of `Result`\n\/\/\/ often are not, to make it easier to distinguish between them. `Result` is\n\/\/\/ generally assumed to be `std::result::Result`, and so users of this alias\n\/\/\/ will generally use `io::Result` instead of shadowing the prelude's import\n\/\/\/ of `std::result::Result`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ A convenience function that bubbles an `io::Result` to its caller:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/\n\/\/\/ fn get_string() -> io::Result<String> {\n\/\/\/ let mut buffer = String::new();\n\/\/\/\n\/\/\/ try!(io::stdin().read_line(&mut buffer));\n\/\/\/\n\/\/\/ Ok(buffer)\n\/\/\/ }\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type Result<T> = result::Result<T, Error>;\n\n\/\/\/ The error type for I\/O operations of the `Read`, `Write`, `Seek`, and\n\/\/\/ associated traits.\n\/\/\/\n\/\/\/ Errors mostly originate from the underlying OS, but custom instances of\n\/\/\/ `Error` can be created with crafted error messages and a particular value of\n\/\/\/ `ErrorKind`.\n#[derive(Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Error {\n repr: Repr,\n}\n\nenum Repr {\n Os(i32),\n Custom(Box<Custom>),\n}\n\n#[derive(Debug)]\nstruct Custom {\n kind: ErrorKind,\n error: Box<error::Error+Send+Sync>,\n}\n\n\/\/\/ A list specifying general categories of I\/O error.\n\/\/\/\n\/\/\/ This list is intended to grow over time and it is not recommended to\n\/\/\/ exhaustively match against it.\n#[derive(Copy, PartialEq, Eq, Clone, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub enum ErrorKind {\n \/\/\/ An entity was not found, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotFound,\n \/\/\/ The operation lacked the necessary privileges to complete.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n PermissionDenied,\n \/\/\/ The connection was refused by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionRefused,\n \/\/\/ The connection was reset by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionReset,\n \/\/\/ The connection was aborted (terminated) by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionAborted,\n \/\/\/ The network operation failed because it was not connected yet.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotConnected,\n \/\/\/ A socket address could not be bound because the address is already in\n \/\/\/ use elsewhere.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrInUse,\n \/\/\/ A nonexistent interface was requested or the requested address was not\n \/\/\/ local.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrNotAvailable,\n \/\/\/ The operation failed because a pipe was closed.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n BrokenPipe,\n \/\/\/ An entity already exists, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AlreadyExists,\n \/\/\/ The operation needs to block to complete, but the blocking operation was\n \/\/\/ requested to not occur.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WouldBlock,\n \/\/\/ A parameter was incorrect.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n InvalidInput,\n \/\/\/ Data not valid for the operation were encountered.\n \/\/\/\n \/\/\/ Unlike `InvalidInput`, this typically means that the operation\n \/\/\/ parameters were valid, however the error was caused by malformed\n \/\/\/ input data.\n \/\/\/\n \/\/\/ For example, a function that reads a file into a string will error with\n \/\/\/ `InvalidData` if the file's contents are not valid UTF-8.\n #[stable(feature = \"io_invalid_data\", since = \"1.2.0\")]\n InvalidData,\n \/\/\/ The I\/O operation's timeout expired, causing it to be canceled.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n TimedOut,\n \/\/\/ An error returned when an operation could not be completed because a\n \/\/\/ call to `write` returned `Ok(0)`.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it wrote a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ written.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WriteZero,\n \/\/\/ This operation was interrupted.\n \/\/\/\n \/\/\/ Interrupted operations can typically be retried.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Interrupted,\n \/\/\/ Any I\/O error not part of this list.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Other,\n\n \/\/\/ An error returned when an operation could not be completed because an\n \/\/\/ \"end of file\" was reached prematurely.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it read a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ read.\n #[unstable(feature = \"read_exact\", reason = \"recently added\", issue = \"27585\")]\n UnexpectedEOF,\n\n \/\/\/ Any I\/O error not part of this list.\n #[unstable(feature = \"io_error_internals\",\n reason = \"better expressed through extensible enums that this \\\n enum cannot be exhaustively matched against\",\n issue = \"0\")]\n #[doc(hidden)]\n __Nonexhaustive,\n}\n\nimpl Error {\n \/\/\/ Creates a new I\/O error from a known kind of error as well as an\n \/\/\/ arbitrary error payload.\n \/\/\/\n \/\/\/ This function is used to generically create I\/O errors which do not\n \/\/\/ originate from the OS itself. The `error` argument is an arbitrary\n \/\/\/ payload which will be contained in this `Error`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::io::{Error, ErrorKind};\n \/\/\/\n \/\/\/ \/\/ errors can be created from strings\n \/\/\/ let custom_error = Error::new(ErrorKind::Other, \"oh no!\");\n \/\/\/\n \/\/\/ \/\/ errors can also be created from other errors\n \/\/\/ let custom_error2 = Error::new(ErrorKind::Interrupted, custom_error);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn new<E>(kind: ErrorKind, error: E) -> Error\n where E: Into<Box<error::Error+Send+Sync>>\n {\n Self::_new(kind, error.into())\n }\n\n fn _new(kind: ErrorKind, error: Box<error::Error+Send+Sync>) -> Error {\n Error {\n repr: Repr::Custom(Box::new(Custom {\n kind: kind,\n error: error,\n }))\n }\n }\n\n \/\/\/ Returns an error representing the last OS error which occurred.\n \/\/\/\n \/\/\/ This function reads the value of `errno` for the target platform (e.g.\n \/\/\/ `GetLastError` on Windows) and will return a corresponding instance of\n \/\/\/ `Error` for the error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn last_os_error() -> Error {\n Error::from_raw_os_error(sys::os::errno() as i32)\n }\n\n \/\/\/ Creates a new instance of an `Error` from a particular OS error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn from_raw_os_error(code: i32) -> Error {\n Error { repr: Repr::Os(code) }\n }\n\n \/\/\/ Returns the OS error that this error represents (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `last_os_error` or\n \/\/\/ `from_raw_os_error`, then this function will return `Some`, otherwise\n \/\/\/ it will return `None`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn raw_os_error(&self) -> Option<i32> {\n match self.repr {\n Repr::Os(i) => Some(i),\n Repr::Custom(..) => None,\n }\n }\n\n \/\/\/ Returns a reference to the inner error wrapped by this error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_ref(&self) -> Option<&(error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => Some(&*c.error),\n }\n }\n\n \/\/\/ Returns a mutable reference to the inner error wrapped by this error\n \/\/\/ (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_mut(&mut self) -> Option<&mut (error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref mut c) => Some(&mut *c.error),\n }\n }\n\n \/\/\/ Consumes the `Error`, returning its inner error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn into_inner(self) -> Option<Box<error::Error+Send+Sync>> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(c) => Some(c.error)\n }\n }\n\n \/\/\/ Returns the corresponding `ErrorKind` for this error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn kind(&self) -> ErrorKind {\n match self.repr {\n Repr::Os(code) => sys::decode_error_kind(code),\n Repr::Custom(ref c) => c.kind,\n }\n }\n}\n\nimpl fmt::Debug for Repr {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Repr::Os(ref code) =>\n fmt.debug_struct(\"Os\").field(\"code\", code)\n .field(\"message\", &sys::os::error_string(*code)).finish(),\n Repr::Custom(ref c) => fmt.debug_tuple(\"Custom\").field(c).finish(),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Display for Error {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match self.repr {\n Repr::Os(code) => {\n let detail = sys::os::error_string(code);\n write!(fmt, \"{} (os error {})\", detail, code)\n }\n Repr::Custom(ref c) => c.error.fmt(fmt),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl error::Error for Error {\n fn description(&self) -> &str {\n match self.repr {\n Repr::Os(..) => \"os error\",\n Repr::Custom(ref c) => c.error.description(),\n }\n }\n\n fn cause(&self) -> Option<&error::Error> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => c.error.cause(),\n }\n }\n}\n\nfn _assert_error_is_sync_send() {\n fn _is_sync_send<T: Sync+Send>() {}\n _is_sync_send::<Error>();\n}\n\n#[cfg(test)]\nmod test {\n use prelude::v1::*;\n use super::{Error, ErrorKind};\n use error;\n use error::Error as error_Error;\n use fmt;\n use sys::os::error_string;\n\n #[test]\n fn test_debug_error() {\n let code = 6;\n let msg = error_string(code);\n let err = Error { repr: super::Repr::Os(code) };\n let expected = format!(\"Error {{ repr: Os {{ code: {:?}, message: {:?} }} }}\", code, msg);\n assert_eq!(format!(\"{:?}\", err), expected);\n }\n\n #[test]\n fn test_downcasting() {\n #[derive(Debug)]\n struct TestError;\n\n impl fmt::Display for TestError {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n Ok(())\n }\n }\n\n impl error::Error for TestError {\n fn description(&self) -> &str {\n \"asdf\"\n }\n }\n\n \/\/ we have to call all of these UFCS style right now since method\n \/\/ resolution won't implicitly drop the Send+Sync bounds\n let mut err = Error::new(ErrorKind::Other, TestError);\n assert!(err.get_ref().unwrap().is::<TestError>());\n assert_eq!(\"asdf\", err.get_ref().unwrap().description());\n assert!(err.get_mut().unwrap().is::<TestError>());\n let extracted = err.into_inner().unwrap();\n extracted.downcast::<TestError>().unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Documentation generation for rustbuild.\n\/\/!\n\/\/! This module implements generation for all bits and pieces of documentation\n\/\/! for the Rust project. This notably includes suites like the rust book, the\n\/\/! nomicon, standalone documentation, etc.\n\/\/!\n\/\/! Everything here is basically just a shim around calling either `rustbook` or\n\/\/! `rustdoc`.\n\nuse std::fs::{self, File};\nuse std::io::prelude::*;\nuse std::path::Path;\nuse std::process::Command;\n\nuse {Build, Compiler, Mode};\nuse util::{up_to_date, cp_r};\n\n\/\/\/ Invoke `rustbook` as compiled in `stage` for `target` for the doc book\n\/\/\/ `name` into the `out` path.\n\/\/\/\n\/\/\/ This will not actually generate any documentation if the documentation has\n\/\/\/ already been generated.\npub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) {\n t!(fs::create_dir_all(out));\n\n let out = out.join(name);\n let compiler = Compiler::new(stage, &build.config.build);\n let src = build.src.join(\"src\/doc\").join(name);\n let index = out.join(\"index.html\");\n let rustbook = build.tool(&compiler, \"rustbook\");\n if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {\n return\n }\n println!(\"Rustbook stage{} ({}) - {}\", stage, target, name);\n let _ = fs::remove_dir_all(&out);\n build.run(build.tool_cmd(&compiler, \"rustbook\")\n .arg(\"build\")\n .arg(&src)\n .arg(out));\n}\n\n\/\/\/ Generates all standalone documentation as compiled by the rustdoc in `stage`\n\/\/\/ for the `target` into `out`.\n\/\/\/\n\/\/\/ This will list all of `src\/doc` looking for markdown files and appropriately\n\/\/\/ perform transformations like substituting `VERSION`, `SHORT_HASH`, and\n\/\/\/ `STAMP` alongw ith providing the various header\/footer HTML we've cutomized.\n\/\/\/\n\/\/\/ In the end, this is just a glorified wrapper around rustdoc!\npub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} standalone ({})\", stage, target);\n t!(fs::create_dir_all(out));\n\n let compiler = Compiler::new(stage, &build.config.build);\n\n let favicon = build.src.join(\"src\/doc\/favicon.inc\");\n let footer = build.src.join(\"src\/doc\/footer.inc\");\n let full_toc = build.src.join(\"src\/doc\/full-toc.inc\");\n t!(fs::copy(build.src.join(\"src\/doc\/rust.css\"), out.join(\"rust.css\")));\n\n let version_input = build.src.join(\"src\/doc\/version_info.html.template\");\n let version_info = out.join(\"version_info.html\");\n\n if !up_to_date(&version_input, &version_info) {\n let mut info = String::new();\n t!(t!(File::open(&version_input)).read_to_string(&mut info));\n let blank = String::new();\n let short = build.short_ver_hash.as_ref().unwrap_or(&blank);\n let hash = build.ver_hash.as_ref().unwrap_or(&blank);\n let info = info.replace(\"VERSION\", &build.release)\n .replace(\"SHORT_HASH\", short)\n .replace(\"STAMP\", hash);\n t!(t!(File::create(&version_info)).write_all(info.as_bytes()));\n }\n\n for file in t!(fs::read_dir(build.src.join(\"src\/doc\"))) {\n let file = t!(file);\n let path = file.path();\n let filename = path.file_name().unwrap().to_str().unwrap();\n if !filename.ends_with(\".md\") || filename == \"README.md\" {\n continue\n }\n\n let html = out.join(filename).with_extension(\"html\");\n let rustdoc = build.rustdoc(&compiler);\n if up_to_date(&path, &html) &&\n up_to_date(&footer, &html) &&\n up_to_date(&favicon, &html) &&\n up_to_date(&full_toc, &html) &&\n up_to_date(&version_info, &html) &&\n up_to_date(&rustdoc, &html) {\n continue\n }\n\n let mut cmd = Command::new(&rustdoc);\n build.add_rustc_lib_path(&compiler, &mut cmd);\n cmd.arg(\"--html-after-content\").arg(&footer)\n .arg(\"--html-before-content\").arg(&version_info)\n .arg(\"--html-in-header\").arg(&favicon)\n .arg(\"--markdown-playground-url\")\n .arg(\"https:\/\/play.rust-lang.org\/\")\n .arg(\"-o\").arg(out)\n .arg(&path);\n\n if filename == \"reference.md\" {\n cmd.arg(\"--html-in-header\").arg(&full_toc);\n }\n\n if filename == \"not_found.md\" {\n cmd.arg(\"--markdown-no-toc\")\n .arg(\"--markdown-css\")\n .arg(\"https:\/\/doc.rust-lang.org\/rust.css\");\n } else {\n cmd.arg(\"--markdown-css\").arg(\"rust.css\");\n }\n build.run(&mut cmd);\n }\n}\n\n\/\/\/ Compile all standard library documentation.\n\/\/\/\n\/\/\/ This will generate all documentation for the standard library and its\n\/\/\/ dependencies. This is largely just a wrapper around `cargo doc`.\npub fn std(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} std ({})\", stage, target);\n t!(fs::create_dir_all(out));\n let compiler = Compiler::new(stage, &build.config.build);\n let out_dir = build.stage_out(&compiler, Mode::Libstd)\n .join(target).join(\"doc\");\n let rustdoc = build.rustdoc(&compiler);\n\n build.clear_if_dirty(&out_dir, &rustdoc);\n\n let mut cargo = build.cargo(&compiler, Mode::Libstd, target, \"doc\");\n cargo.arg(\"--manifest-path\")\n .arg(build.src.join(\"src\/rustc\/std_shim\/Cargo.toml\"))\n .arg(\"--features\").arg(build.std_features());\n build.run(&mut cargo);\n cp_r(&out_dir, out)\n}\n\n\/\/\/ Compile all libtest documentation.\n\/\/\/\n\/\/\/ This will generate all documentation for libtest and its dependencies. This\n\/\/\/ is largely just a wrapper around `cargo doc`.\npub fn test(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} test ({})\", stage, target);\n t!(fs::create_dir_all(out));\n let compiler = Compiler::new(stage, &build.config.build);\n let out_dir = build.stage_out(&compiler, Mode::Libtest)\n .join(target).join(\"doc\");\n let rustdoc = build.rustdoc(&compiler);\n\n build.clear_if_dirty(&out_dir, &rustdoc);\n\n let mut cargo = build.cargo(&compiler, Mode::Libtest, target, \"doc\");\n cargo.arg(\"--manifest-path\")\n .arg(build.src.join(\"src\/rustc\/test_shim\/Cargo.toml\"));\n build.run(&mut cargo);\n cp_r(&out_dir, out)\n}\n\n\/\/\/ Generate all compiler documentation.\n\/\/\/\n\/\/\/ This will generate all documentation for the compiler libraries and their\n\/\/\/ dependencies. This is largely just a wrapper around `cargo doc`.\npub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} compiler ({})\", stage, target);\n t!(fs::create_dir_all(out));\n let compiler = Compiler::new(stage, &build.config.build);\n let out_dir = build.stage_out(&compiler, Mode::Librustc)\n .join(target).join(\"doc\");\n let rustdoc = build.rustdoc(&compiler);\n if !up_to_date(&rustdoc, &out_dir.join(\"rustc\/index.html\")) {\n t!(fs::remove_dir_all(&out_dir));\n }\n let mut cargo = build.cargo(&compiler, Mode::Librustc, target, \"doc\");\n cargo.arg(\"--manifest-path\")\n .arg(build.src.join(\"src\/rustc\/Cargo.toml\"))\n .arg(\"--features\").arg(build.rustc_features());\n build.run(&mut cargo);\n cp_r(&out_dir, out)\n}\n\n\/\/\/ Generates the HTML rendered error-index by running the\n\/\/\/ `error_index_generator` tool.\npub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} error index ({})\", stage, target);\n t!(fs::create_dir_all(out));\n let compiler = Compiler::new(stage, &build.config.build);\n let mut index = build.tool_cmd(&compiler, \"error_index_generator\");\n index.arg(\"html\");\n index.arg(out.join(\"error-index.html\"));\n\n \/\/ FIXME: shouldn't have to pass this env var\n index.env(\"CFG_BUILD\", &build.config.build);\n\n build.run(&mut index);\n}\n<commit_msg>only remove directory if it exists<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Documentation generation for rustbuild.\n\/\/!\n\/\/! This module implements generation for all bits and pieces of documentation\n\/\/! for the Rust project. This notably includes suites like the rust book, the\n\/\/! nomicon, standalone documentation, etc.\n\/\/!\n\/\/! Everything here is basically just a shim around calling either `rustbook` or\n\/\/! `rustdoc`.\n\nuse std::fs::{self, File};\nuse std::io::prelude::*;\nuse std::path::Path;\nuse std::process::Command;\n\nuse {Build, Compiler, Mode};\nuse util::{up_to_date, cp_r};\n\n\/\/\/ Invoke `rustbook` as compiled in `stage` for `target` for the doc book\n\/\/\/ `name` into the `out` path.\n\/\/\/\n\/\/\/ This will not actually generate any documentation if the documentation has\n\/\/\/ already been generated.\npub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) {\n t!(fs::create_dir_all(out));\n\n let out = out.join(name);\n let compiler = Compiler::new(stage, &build.config.build);\n let src = build.src.join(\"src\/doc\").join(name);\n let index = out.join(\"index.html\");\n let rustbook = build.tool(&compiler, \"rustbook\");\n if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {\n return\n }\n println!(\"Rustbook stage{} ({}) - {}\", stage, target, name);\n let _ = fs::remove_dir_all(&out);\n build.run(build.tool_cmd(&compiler, \"rustbook\")\n .arg(\"build\")\n .arg(&src)\n .arg(out));\n}\n\n\/\/\/ Generates all standalone documentation as compiled by the rustdoc in `stage`\n\/\/\/ for the `target` into `out`.\n\/\/\/\n\/\/\/ This will list all of `src\/doc` looking for markdown files and appropriately\n\/\/\/ perform transformations like substituting `VERSION`, `SHORT_HASH`, and\n\/\/\/ `STAMP` alongw ith providing the various header\/footer HTML we've cutomized.\n\/\/\/\n\/\/\/ In the end, this is just a glorified wrapper around rustdoc!\npub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} standalone ({})\", stage, target);\n t!(fs::create_dir_all(out));\n\n let compiler = Compiler::new(stage, &build.config.build);\n\n let favicon = build.src.join(\"src\/doc\/favicon.inc\");\n let footer = build.src.join(\"src\/doc\/footer.inc\");\n let full_toc = build.src.join(\"src\/doc\/full-toc.inc\");\n t!(fs::copy(build.src.join(\"src\/doc\/rust.css\"), out.join(\"rust.css\")));\n\n let version_input = build.src.join(\"src\/doc\/version_info.html.template\");\n let version_info = out.join(\"version_info.html\");\n\n if !up_to_date(&version_input, &version_info) {\n let mut info = String::new();\n t!(t!(File::open(&version_input)).read_to_string(&mut info));\n let blank = String::new();\n let short = build.short_ver_hash.as_ref().unwrap_or(&blank);\n let hash = build.ver_hash.as_ref().unwrap_or(&blank);\n let info = info.replace(\"VERSION\", &build.release)\n .replace(\"SHORT_HASH\", short)\n .replace(\"STAMP\", hash);\n t!(t!(File::create(&version_info)).write_all(info.as_bytes()));\n }\n\n for file in t!(fs::read_dir(build.src.join(\"src\/doc\"))) {\n let file = t!(file);\n let path = file.path();\n let filename = path.file_name().unwrap().to_str().unwrap();\n if !filename.ends_with(\".md\") || filename == \"README.md\" {\n continue\n }\n\n let html = out.join(filename).with_extension(\"html\");\n let rustdoc = build.rustdoc(&compiler);\n if up_to_date(&path, &html) &&\n up_to_date(&footer, &html) &&\n up_to_date(&favicon, &html) &&\n up_to_date(&full_toc, &html) &&\n up_to_date(&version_info, &html) &&\n up_to_date(&rustdoc, &html) {\n continue\n }\n\n let mut cmd = Command::new(&rustdoc);\n build.add_rustc_lib_path(&compiler, &mut cmd);\n cmd.arg(\"--html-after-content\").arg(&footer)\n .arg(\"--html-before-content\").arg(&version_info)\n .arg(\"--html-in-header\").arg(&favicon)\n .arg(\"--markdown-playground-url\")\n .arg(\"https:\/\/play.rust-lang.org\/\")\n .arg(\"-o\").arg(out)\n .arg(&path);\n\n if filename == \"reference.md\" {\n cmd.arg(\"--html-in-header\").arg(&full_toc);\n }\n\n if filename == \"not_found.md\" {\n cmd.arg(\"--markdown-no-toc\")\n .arg(\"--markdown-css\")\n .arg(\"https:\/\/doc.rust-lang.org\/rust.css\");\n } else {\n cmd.arg(\"--markdown-css\").arg(\"rust.css\");\n }\n build.run(&mut cmd);\n }\n}\n\n\/\/\/ Compile all standard library documentation.\n\/\/\/\n\/\/\/ This will generate all documentation for the standard library and its\n\/\/\/ dependencies. This is largely just a wrapper around `cargo doc`.\npub fn std(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} std ({})\", stage, target);\n t!(fs::create_dir_all(out));\n let compiler = Compiler::new(stage, &build.config.build);\n let out_dir = build.stage_out(&compiler, Mode::Libstd)\n .join(target).join(\"doc\");\n let rustdoc = build.rustdoc(&compiler);\n\n build.clear_if_dirty(&out_dir, &rustdoc);\n\n let mut cargo = build.cargo(&compiler, Mode::Libstd, target, \"doc\");\n cargo.arg(\"--manifest-path\")\n .arg(build.src.join(\"src\/rustc\/std_shim\/Cargo.toml\"))\n .arg(\"--features\").arg(build.std_features());\n build.run(&mut cargo);\n cp_r(&out_dir, out)\n}\n\n\/\/\/ Compile all libtest documentation.\n\/\/\/\n\/\/\/ This will generate all documentation for libtest and its dependencies. This\n\/\/\/ is largely just a wrapper around `cargo doc`.\npub fn test(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} test ({})\", stage, target);\n t!(fs::create_dir_all(out));\n let compiler = Compiler::new(stage, &build.config.build);\n let out_dir = build.stage_out(&compiler, Mode::Libtest)\n .join(target).join(\"doc\");\n let rustdoc = build.rustdoc(&compiler);\n\n build.clear_if_dirty(&out_dir, &rustdoc);\n\n let mut cargo = build.cargo(&compiler, Mode::Libtest, target, \"doc\");\n cargo.arg(\"--manifest-path\")\n .arg(build.src.join(\"src\/rustc\/test_shim\/Cargo.toml\"));\n build.run(&mut cargo);\n cp_r(&out_dir, out)\n}\n\n\/\/\/ Generate all compiler documentation.\n\/\/\/\n\/\/\/ This will generate all documentation for the compiler libraries and their\n\/\/\/ dependencies. This is largely just a wrapper around `cargo doc`.\npub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} compiler ({})\", stage, target);\n t!(fs::create_dir_all(out));\n let compiler = Compiler::new(stage, &build.config.build);\n let out_dir = build.stage_out(&compiler, Mode::Librustc)\n .join(target).join(\"doc\");\n let rustdoc = build.rustdoc(&compiler);\n if !up_to_date(&rustdoc, &out_dir.join(\"rustc\/index.html\")) && out_dir.exists() {\n t!(fs::remove_dir_all(&out_dir));\n }\n let mut cargo = build.cargo(&compiler, Mode::Librustc, target, \"doc\");\n cargo.arg(\"--manifest-path\")\n .arg(build.src.join(\"src\/rustc\/Cargo.toml\"))\n .arg(\"--features\").arg(build.rustc_features());\n build.run(&mut cargo);\n cp_r(&out_dir, out)\n}\n\n\/\/\/ Generates the HTML rendered error-index by running the\n\/\/\/ `error_index_generator` tool.\npub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) {\n println!(\"Documenting stage{} error index ({})\", stage, target);\n t!(fs::create_dir_all(out));\n let compiler = Compiler::new(stage, &build.config.build);\n let mut index = build.tool_cmd(&compiler, \"error_index_generator\");\n index.arg(\"html\");\n index.arg(out.join(\"error-index.html\"));\n\n \/\/ FIXME: shouldn't have to pass this env var\n index.env(\"CFG_BUILD\", &build.config.build);\n\n build.run(&mut index);\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::arc::{Arc, Weak};\nuse alloc::boxed::Box;\n\nuse collections::{BTreeMap, String};\nuse collections::string::ToString;\n\nuse core::cell::Cell;\n\nuse scheduler::context::context_switch;\n\nuse schemes::{Result, Resource, ResourceSeek, KScheme, Url};\n\nuse sync::Intex;\n\nuse system::error::{Error, EBADF, EINVAL, ENOENT, ESPIPE};\n\nstruct SchemeInner {\n name: String,\n next_id: Cell<usize>,\n todo: Intex<BTreeMap<usize, (usize, usize, usize, usize)>>,\n done: Intex<BTreeMap<usize, (usize, usize, usize, usize)>>,\n}\n\nimpl SchemeInner {\n fn new(name: String) -> SchemeInner {\n SchemeInner {\n name: name,\n next_id: Cell::new(1),\n todo: Intex::new(BTreeMap::new()),\n done: Intex::new(BTreeMap::new()),\n }\n }\n\n fn call(&self, regs: &mut (usize, usize, usize, usize)) {\n let id = self.next_id.get();\n\n \/\/TODO: What should be done about collisions in self.todo or self.done?\n {\n let mut next_id = id + 1;\n if next_id <= 0 {\n next_id = 1;\n }\n self.next_id.set(next_id);\n }\n\n self.todo.lock().insert(id, *regs);\n\n loop {\n if let Some(new_regs) = self.done.lock().remove(&id) {\n *regs = new_regs;\n return\n }\n\n unsafe { context_switch(false) } ;\n }\n }\n}\n\npub struct SchemeResource {\n inner: Weak<SchemeInner>,\n file_id: usize,\n}\n\nimpl Resource for SchemeResource {\n \/\/\/ Duplicate the resource\n fn dup(&self) -> Result<Box<Resource>> {\n Err(Error::new(EBADF))\n }\n\n \/\/\/ Return the url of this resource\n fn url(&self) -> Url {\n Url::new()\n }\n\n \/\/ TODO: Make use of Write and Read trait\n \/\/\/ Read data to buffer\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n Err(Error::new(EBADF))\n }\n\n \/\/\/ Write to resource\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n Err(Error::new(EBADF))\n }\n\n \/\/\/ Seek\n fn seek(&mut self, pos: ResourceSeek) -> Result<usize> {\n Err(Error::new(EBADF))\n }\n\n \/\/\/ Sync the resource\n fn sync(&mut self) -> Result<()> {\n Err(Error::new(EBADF))\n }\n\n fn truncate(&mut self, len: usize) -> Result<()> {\n Err(Error::new(EBADF))\n }\n}\n\npub struct SchemeServerResource {\n inner: Weak<SchemeInner>,\n}\n\nimpl Resource for SchemeServerResource {\n \/\/\/ Duplicate the resource\n fn dup(&self) -> Result<Box<Resource>> {\n Ok(box SchemeServerResource {\n inner: self.inner.clone()\n })\n }\n\n \/\/\/ Return the url of this resource\n fn url(&self) -> Url {\n if let Some(scheme) = self.inner.upgrade() {\n Url::from_string(\":\".to_string() + &scheme.name)\n } else {\n Url::new()\n }\n }\n\n \/\/ TODO: Make use of Write and Read trait\n \/\/\/ Read data to buffer\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n if let Some(scheme) = self.inner.upgrade() {\n Ok(0)\n }else {\n Err(Error::new(EBADF))\n }\n }\n\n \/\/\/ Write to resource\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n if let Some(scheme) = self.inner.upgrade() {\n Ok(0)\n }else {\n Err(Error::new(EBADF))\n }\n }\n\n \/\/\/ Seek\n fn seek(&mut self, pos: ResourceSeek) -> Result<usize> {\n if let Some(scheme) = self.inner.upgrade() {\n Err(Error::new(ESPIPE))\n }else {\n Err(Error::new(EBADF))\n }\n }\n\n \/\/\/ Sync the resource\n fn sync(&mut self) -> Result<()> {\n if let Some(scheme) = self.inner.upgrade() {\n Err(Error::new(EINVAL))\n }else {\n Err(Error::new(EBADF))\n }\n }\n\n fn truncate(&mut self, len: usize) -> Result<()> {\n if let Some(scheme) = self.inner.upgrade() {\n Err(Error::new(EINVAL))\n }else {\n Err(Error::new(EBADF))\n }\n }\n}\n\n\/\/\/ Scheme has to be wrapped\npub struct Scheme {\n inner: Arc<SchemeInner>\n}\n\nimpl Scheme {\n pub fn new(name: String) -> Box<Scheme> {\n box Scheme {\n inner: Arc::new(SchemeInner::new(name))\n }\n }\n\n pub fn server(&self) -> Box<Resource> {\n box SchemeServerResource {\n inner: Arc::downgrade(&self.inner)\n }\n }\n}\n\nimpl KScheme for Scheme {\n fn on_irq(&mut self, irq: u8) {\n\n }\n\n fn on_poll(&mut self) {\n\n }\n\n fn scheme(&self) -> &str {\n &self.inner.name\n }\n\n fn open(&mut self, url: &Url, flags: usize) -> Result<Box<Resource>> {\n Err(Error::new(ENOENT))\n }\n\n fn unlink(&mut self, url: &Url) -> Result<()> {\n Err(Error::new(ENOENT))\n }\n}\n<commit_msg>Implement sender\/reciever on scheme bus, WIP syscall handlers<commit_after>use alloc::arc::{Arc, Weak};\nuse alloc::boxed::Box;\n\nuse collections::{BTreeMap, String};\nuse collections::string::ToString;\n\nuse core::cell::Cell;\nuse core::mem::size_of;\n\nuse scheduler::context::context_switch;\n\nuse schemes::{Result, Resource, ResourceSeek, KScheme, Url};\n\nuse sync::Intex;\n\nuse system::error::{Error, EBADF, EINVAL, ENOENT, ESPIPE};\nuse system::scheme::Packet;\nuse system::syscall::SYS_OPEN;\n\nstruct SchemeInner {\n name: String,\n next_id: Cell<usize>,\n todo: Intex<BTreeMap<usize, (usize, usize, usize, usize)>>,\n done: Intex<BTreeMap<usize, (usize, usize, usize, usize)>>,\n}\n\nimpl SchemeInner {\n fn new(name: String) -> SchemeInner {\n SchemeInner {\n name: name,\n next_id: Cell::new(1),\n todo: Intex::new(BTreeMap::new()),\n done: Intex::new(BTreeMap::new()),\n }\n }\n\n fn recv(&self, packet: &mut Packet) {\n loop {\n {\n let mut todo = self.todo.lock();\n\n packet.id = if let Some(id) = todo.keys().next() {\n *id\n } else {\n 0\n };\n\n if packet.id > 0 {\n if let Some(regs) = todo.remove(&packet.id) {\n packet.a = regs.0;\n packet.b = regs.1;\n packet.c = regs.2;\n packet.d = regs.3;\n return\n }\n }\n }\n\n unsafe { context_switch(false) } ;\n }\n }\n\n fn send(&self, packet: &Packet) {\n self.done.lock().insert(packet.id, (packet.a, packet.b, packet.c, packet.d));\n }\n\n fn call(&self, a: usize, b: usize, c: usize, d: usize) -> usize {\n let id = self.next_id.get();\n\n \/\/TODO: What should be done about collisions in self.todo or self.done?\n {\n let mut next_id = id + 1;\n if next_id <= 0 {\n next_id = 1;\n }\n self.next_id.set(next_id);\n }\n\n self.todo.lock().insert(id, (a, b, c, d));\n\n loop {\n if let Some(regs) = self.done.lock().remove(&id) {\n return regs.0;\n }\n\n unsafe { context_switch(false) } ;\n }\n }\n}\n\npub struct SchemeResource {\n inner: Weak<SchemeInner>,\n file_id: usize,\n}\n\nimpl Resource for SchemeResource {\n \/\/\/ Duplicate the resource\n fn dup(&self) -> Result<Box<Resource>> {\n Err(Error::new(EBADF))\n }\n\n \/\/\/ Return the url of this resource\n fn url(&self) -> Url {\n Url::new()\n }\n\n \/\/ TODO: Make use of Write and Read trait\n \/\/\/ Read data to buffer\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n Err(Error::new(EBADF))\n }\n\n \/\/\/ Write to resource\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n Err(Error::new(EBADF))\n }\n\n \/\/\/ Seek\n fn seek(&mut self, pos: ResourceSeek) -> Result<usize> {\n Err(Error::new(EBADF))\n }\n\n \/\/\/ Sync the resource\n fn sync(&mut self) -> Result<()> {\n Err(Error::new(EBADF))\n }\n\n fn truncate(&mut self, len: usize) -> Result<()> {\n Err(Error::new(EBADF))\n }\n}\n\npub struct SchemeServerResource {\n inner: Weak<SchemeInner>,\n}\n\nimpl Resource for SchemeServerResource {\n \/\/\/ Duplicate the resource\n fn dup(&self) -> Result<Box<Resource>> {\n Ok(box SchemeServerResource {\n inner: self.inner.clone()\n })\n }\n\n \/\/\/ Return the url of this resource\n fn url(&self) -> Url {\n if let Some(scheme) = self.inner.upgrade() {\n Url::from_string(\":\".to_string() + &scheme.name)\n } else {\n Url::new()\n }\n }\n\n \/\/ TODO: Make use of Write and Read trait\n \/\/\/ Read data to buffer\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n if let Some(scheme) = self.inner.upgrade() {\n if buf.len() == size_of::<Packet>() {\n let packet_ptr: *mut Packet = buf.as_mut_ptr() as *mut Packet;\n scheme.recv(unsafe { &mut *packet_ptr });\n\n Ok(size_of::<Packet>())\n } else {\n Err(Error::new(EINVAL))\n }\n }else {\n Err(Error::new(EBADF))\n }\n }\n\n \/\/\/ Write to resource\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n if let Some(scheme) = self.inner.upgrade() {\n if buf.len() == size_of::<Packet>() {\n let packet_ptr: *const Packet = buf.as_ptr() as *const Packet;\n scheme.send(unsafe { &*packet_ptr });\n\n Ok(size_of::<Packet>())\n } else {\n Err(Error::new(EINVAL))\n }\n }else {\n Err(Error::new(EBADF))\n }\n }\n\n \/\/\/ Seek\n fn seek(&mut self, pos: ResourceSeek) -> Result<usize> {\n if let Some(scheme) = self.inner.upgrade() {\n Err(Error::new(ESPIPE))\n }else {\n Err(Error::new(EBADF))\n }\n }\n\n \/\/\/ Sync the resource\n fn sync(&mut self) -> Result<()> {\n if let Some(scheme) = self.inner.upgrade() {\n Err(Error::new(EINVAL))\n }else {\n Err(Error::new(EBADF))\n }\n }\n\n fn truncate(&mut self, len: usize) -> Result<()> {\n if let Some(scheme) = self.inner.upgrade() {\n Err(Error::new(EINVAL))\n }else {\n Err(Error::new(EBADF))\n }\n }\n}\n\n\/\/\/ Scheme has to be wrapped\npub struct Scheme {\n inner: Arc<SchemeInner>\n}\n\nimpl Scheme {\n pub fn new(name: String) -> Box<Scheme> {\n box Scheme {\n inner: Arc::new(SchemeInner::new(name))\n }\n }\n\n pub fn server(&self) -> Box<Resource> {\n box SchemeServerResource {\n inner: Arc::downgrade(&self.inner)\n }\n }\n}\n\nimpl KScheme for Scheme {\n fn on_irq(&mut self, irq: u8) {\n\n }\n\n fn on_poll(&mut self) {\n\n }\n\n fn scheme(&self) -> &str {\n &self.inner.name\n }\n\n fn open(&mut self, url: &Url, flags: usize) -> Result<Box<Resource>> {\n let c_str = url.string.clone() + \"\\0\";\n debugln!(\"{} open: {}\", self.inner.name, self.inner.call(SYS_OPEN, c_str.as_ptr() as usize, 0, 0));\n\n Err(Error::new(ENOENT))\n }\n\n fn unlink(&mut self, url: &Url) -> Result<()> {\n Err(Error::new(ENOENT))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #28730 - apasel422:issue-21922, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ops::Add;\nfn show(z: i32) {\n println!(\"{}\", z)\n}\nfn main() {\n let x = 23;\n let y = 42;\n show(Add::add( x, y));\n show(Add::add( x, &y));\n show(Add::add(&x, y));\n show(Add::add(&x, &y));\n show( x + y);\n show( x + &y);\n show(&x + y);\n show(&x + &y);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::fmt;\nuse std::fmt::Show;\nuse std::hash::Hash;\nuse serialize::{Encodable, Decodable, Encoder, Decoder};\n\n\/\/\/ An owned smart pointer.\npub struct P<T> {\n ptr: Box<T>\n}\n\n#[allow(non_snake_case)]\n\/\/\/ Construct a P<T> from a T value.\npub fn P<T: 'static>(value: T) -> P<T> {\n P {\n ptr: box value\n }\n}\n\nimpl<T: 'static> P<T> {\n pub fn and_then<U>(self, f: |T| -> U) -> U {\n f(*self.ptr)\n }\n\n pub fn map(mut self, f: |T| -> T) -> P<T> {\n use std::{mem, ptr};\n unsafe {\n let p = &mut *self.ptr;\n \/\/ FIXME(#5016) this shouldn't need to zero to be safe.\n mem::move_val_init(p, f(ptr::read_and_zero(p)));\n }\n self\n }\n}\n\nimpl<T> Deref<T> for P<T> {\n fn deref<'a>(&'a self) -> &'a T {\n &*self.ptr\n }\n}\n\nimpl<T: 'static + Clone> Clone for P<T> {\n fn clone(&self) -> P<T> {\n P((**self).clone())\n }\n}\n\nimpl<T: PartialEq> PartialEq for P<T> {\n fn eq(&self, other: &P<T>) -> bool {\n **self == **other\n }\n}\n\nimpl<T: Eq> Eq for P<T> {}\n\nimpl<T: Show> Show for P<T> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n (**self).fmt(f)\n }\n}\n\nimpl<S, T: Hash<S>> Hash<S> for P<T> {\n fn hash(&self, state: &mut S) {\n (**self).hash(state);\n }\n}\n\nimpl<E, D: Decoder<E>, T: 'static + Decodable<D, E>> Decodable<D, E> for P<T> {\n fn decode(d: &mut D) -> Result<P<T>, E> {\n Decodable::decode(d).map(P)\n }\n}\n\nimpl<E, S: Encoder<E>, T: Encodable<S, E>> Encodable<S, E> for P<T> {\n fn encode(&self, s: &mut S) -> Result<(), E> {\n (**self).encode(s)\n }\n}\n<commit_msg>syntax: document the ptr module.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The AST pointer\n\/\/!\n\/\/! Provides `P<T>`, a frozen owned smart pointer, as a replacement for `@T` in the AST.\n\/\/!\n\/\/! # Motivations and benefits\n\/\/!\n\/\/! * **Identity**: sharing AST nodes is problematic for the various analysis passes\n\/\/! (e.g. one may be able to bypass the borrow checker with a shared `ExprAddrOf`\n\/\/! node taking a mutable borrow). The only reason `@T` in the AST hasn't caused\n\/\/! issues is because of inefficient folding passes which would always deduplicate\n\/\/! any such shared nodes. Even if the AST were to switch to an arena, this would\n\/\/! still hold, i.e. it couldn't use `&'a T`, but rather a wrapper like `P<'a, T>`.\n\/\/!\n\/\/! * **Immutability**: `P<T>` disallows mutating its inner `T`, unlike `Box<T>`\n\/\/! (unless it contains an `Unsafe` interior, but that may be denied later).\n\/\/! This mainly prevents mistakes, but can also enforces a kind of \"purity\".\n\/\/!\n\/\/! * **Efficiency**: folding can reuse allocation space for `P<T>` and `Vec<T>`,\n\/\/! the latter even when the input and output types differ (as it would be the\n\/\/! case with arenas or a GADT AST using type parameters to toggle features).\n\/\/!\n\/\/! * **Maintainability**: `P<T>` provides a fixed interface - `Deref`,\n\/\/! `and_then` and `map` - which can remain fully functional even if the\n\/\/! implementation changes (using a special thread-local heap, for example).\n\/\/! Moreover, a switch to, e.g. `P<'a, T>` would be easy and mostly automated.\n\nuse std::fmt;\nuse std::fmt::Show;\nuse std::hash::Hash;\nuse serialize::{Encodable, Decodable, Encoder, Decoder};\n\n\/\/\/ An owned smart pointer.\npub struct P<T> {\n ptr: Box<T>\n}\n\n#[allow(non_snake_case)]\n\/\/\/ Construct a `P<T>` from a `T` value.\npub fn P<T: 'static>(value: T) -> P<T> {\n P {\n ptr: box value\n }\n}\n\nimpl<T: 'static> P<T> {\n \/\/\/ Move out of the pointer.\n \/\/\/ Intended for chaining transformations not covered by `map`.\n pub fn and_then<U>(self, f: |T| -> U) -> U {\n f(*self.ptr)\n }\n\n \/\/\/ Transform the inner value, consuming `self` and producing a new `P<T>`.\n pub fn map(mut self, f: |T| -> T) -> P<T> {\n use std::{mem, ptr};\n unsafe {\n let p = &mut *self.ptr;\n \/\/ FIXME(#5016) this shouldn't need to zero to be safe.\n mem::move_val_init(p, f(ptr::read_and_zero(p)));\n }\n self\n }\n}\n\nimpl<T> Deref<T> for P<T> {\n fn deref<'a>(&'a self) -> &'a T {\n &*self.ptr\n }\n}\n\nimpl<T: 'static + Clone> Clone for P<T> {\n fn clone(&self) -> P<T> {\n P((**self).clone())\n }\n}\n\nimpl<T: PartialEq> PartialEq for P<T> {\n fn eq(&self, other: &P<T>) -> bool {\n **self == **other\n }\n}\n\nimpl<T: Eq> Eq for P<T> {}\n\nimpl<T: Show> Show for P<T> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n (**self).fmt(f)\n }\n}\n\nimpl<S, T: Hash<S>> Hash<S> for P<T> {\n fn hash(&self, state: &mut S) {\n (**self).hash(state);\n }\n}\n\nimpl<E, D: Decoder<E>, T: 'static + Decodable<D, E>> Decodable<D, E> for P<T> {\n fn decode(d: &mut D) -> Result<P<T>, E> {\n Decodable::decode(d).map(P)\n }\n}\n\nimpl<E, S: Encoder<E>, T: Encodable<S, E>> Encodable<S, E> for P<T> {\n fn encode(&self, s: &mut S) -> Result<(), E> {\n (**self).encode(s)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Get rid of fd hack<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>bugfix: echo was treating anything preceded by - as an option flag<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>started work on a read_line impl<commit_after>use std::io;\nuse std::io::{Write, Read};\n\nuse radix_trie::Trie;\n\nuse rsh::State;\n\npub struct Input {\n completions: Trie<String, String>,\n}\n\nimpl Input {\n pub fn from(s: &State) -> Input {\n Input { completions: Trie::new() }\n }\n\n pub fn prompt(&self, prompt: String) -> String {\n print!(\"{}\", prompt);\n\n \/\/ this forces the prompt to print\n io::stdout().flush().expect(\"unable to flush stdout\");\n\n \/\/ read the user input\n let mut input = String::new();\n\n for byt in io::stdin().bytes() {\n match byt {\n Ok(b) => {\n \/\/ TODO this is naive it ignores multi-byte utf-8 characters\n let c = b as char;\n\n input.push(c);\n\n if c == '\\n' {\n break;\n }\n }\n Err(e) => println!(\"Error reading from stdin: {}\", e),\n };\n }\n\n input\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::sync::atomic::Ordering::{Acquire, Release, Relaxed};\nuse std::sync::atomic::AtomicBool;\nuse std::{ptr, mem};\nuse std::thread::{self, Thread};\n\nuse mem::epoch::{self, Atomic, Owned, Shared};\nuse mem::CachePadded;\n\n\/\/\/ A Michael-Scott lock-free queue, with support for blocking `pop`s.\n\/\/\/\n\/\/\/ Usable with any number of producers and consumers.\n\/\/ The representation here is a singly-linked list, with a sentinel\n\/\/ node at the front. In general the `tail` pointer may lag behind the\n\/\/ actual tail. Non-sentinal nodes are either all `Data` or all\n\/\/ `Blocked` (requests for data from blocked threads).\npub struct MsQueue<T> {\n head: CachePadded<Atomic<Node<T>>>,\n tail: CachePadded<Atomic<Node<T>>>,\n}\n\nstruct Node<T> {\n payload: Payload<T>,\n next: Atomic<Node<T>>,\n}\n\nenum Payload<T> {\n \/\/\/ A node with actual data that can be popped.\n Data(T),\n \/\/\/ A node representing a blocked request for data.\n Blocked(*mut Signal<T>),\n}\n\n\/\/\/ A blocked request for data, which includes a slot to write the data.\nstruct Signal<T> {\n \/\/\/ Thread to unpark when data is ready.\n thread: Thread,\n \/\/\/ The actual data, when available.\n data: Option<T>,\n \/\/\/ Is the data ready? Needed to cope with spurious wakeups.\n ready: AtomicBool,\n}\n\nimpl<T> Node<T> {\n fn is_data(&self) -> bool {\n if let Payload::Data(_) = self.payload { true } else { false }\n }\n}\n\n\/\/ Any particular `T` should never accessed concurrently, so no need\n\/\/ for Sync.\nunsafe impl<T: Send> Sync for MsQueue<T> {}\nunsafe impl<T: Send> Send for MsQueue<T> {}\n\nimpl<T> MsQueue<T> {\n \/\/\/ Create a new, empty queue.\n pub fn new() -> MsQueue<T> {\n let q = MsQueue {\n head: CachePadded::new(Atomic::null()),\n tail: CachePadded::new(Atomic::null()),\n };\n let sentinel = Owned::new(Node {\n payload: unsafe { mem::uninitialized() },\n next: Atomic::null(),\n });\n let guard = epoch::pin();\n let sentinel = q.head.store_and_ref(sentinel, Relaxed, &guard);\n q.tail.store_shared(Some(sentinel), Relaxed);\n q\n }\n\n #[inline(always)]\n \/\/\/ Attempt to atomically place `n` into the `next` pointer of `onto`.\n \/\/\/\n \/\/\/ If unsuccessful, returns ownership of `n`, possibly updating\n \/\/\/ the queue's `tail` pointer.\n fn push_internal(&self,\n guard: &epoch::Guard,\n onto: Shared<Node<T>>,\n n: Owned<Node<T>>)\n -> Result<(), Owned<Node<T>>>\n {\n \/\/ is `onto` the actual tail?\n if let Some(next) = onto.next.load(Acquire, guard) {\n \/\/ if not, try to \"help\" by moving the tail pointer forward\n self.tail.cas_shared(Some(onto), Some(next), Release);\n Err(n)\n } else {\n \/\/ looks like the actual tail; attempt to link in `n`\n onto.next.cas_and_ref(None, n, Release, guard).map(|shared| {\n \/\/ try to move the tail pointer forward\n self.tail.cas_shared(Some(onto), Some(shared), Release);\n })\n }\n }\n\n \/\/\/ Add `t` to the back of the queue, possibly waking up threads\n \/\/\/ blocked on `pop`.\n pub fn push(&self, t: T) {\n \/\/\/ We may or may not need to allocate a node; once we do,\n \/\/\/ we cache that allocation.\n enum Cache<T> {\n Data(T),\n Node(Owned<Node<T>>),\n }\n\n impl<T> Cache<T> {\n \/\/\/ Extract the node if cached, or allocate if not.\n fn into_node(self) -> Owned<Node<T>> {\n match self {\n Cache::Data(t) => {\n Owned::new(Node {\n payload: Payload::Data(t),\n next: Atomic::null()\n })\n }\n Cache::Node(n) => n\n }\n }\n\n \/\/\/ Extract the data from the cache, deallocating any cached node.\n fn into_data(self) -> T {\n match self {\n Cache::Data(t) => t,\n Cache::Node(node) => {\n match node.into_inner().payload {\n Payload::Data(t) => t,\n _ => unreachable!(),\n }\n }\n }\n }\n }\n\n let mut cache = Cache::Data(t); \/\/ don't allocate up front\n let guard = epoch::pin();\n\n loop {\n \/\/ We push onto the tail, so we'll start optimistically by looking\n \/\/ there first.\n let tail = self.tail.load(Acquire, &guard).unwrap();\n\n \/\/ Is the queue in Data mode (empty queues can be viewed as either mode)?\n if tail.is_data() ||\n self.head.load(Relaxed, &guard).unwrap().as_raw() == tail.as_raw()\n {\n \/\/ Attempt to push onto the `tail` snapshot; fails if\n \/\/ `tail.next` has changed, which will always be the case if the\n \/\/ queue has transitioned to blocking mode.\n match self.push_internal(&guard, tail, cache.into_node()) {\n Ok(_) => return,\n Err(n) => {\n \/\/ replace the cache, retry whole thing\n cache = Cache::Node(n)\n }\n }\n } else {\n \/\/ Queue is in blocking mode. Attempt to unblock a thread.\n let head = self.head.load(Acquire, &guard).unwrap();\n \/\/ Get a handle on the first blocked node. Racy, so queue might\n \/\/ be empty or in data mode by the time we see it.\n let request = head.next.load(Acquire, &guard).and_then(|next| {\n match next.payload {\n Payload::Blocked(signal) => Some((next, signal)),\n Payload::Data(_) => None,\n }\n });\n if let Some((blocked_node, signal)) = request {\n \/\/ race to dequeue the node\n if self.head.cas_shared(Some(head), Some(blocked_node), Release) {\n unsafe {\n \/\/ signal the thread\n (*signal).data = Some(cache.into_data());\n (*signal).ready.store(true, Relaxed);\n (*signal).thread.unpark();\n guard.unlinked(head);\n return;\n }\n }\n }\n }\n }\n }\n\n #[inline(always)]\n \/\/ Attempt to pop a data node. `Ok(None)` if queue is empty or in blocking\n \/\/ mode; `Err(())` if lost race to pop.\n fn pop_internal(&self, guard: &epoch::Guard) -> Result<Option<T>, ()> {\n let head = self.head.load(Acquire, guard).unwrap();\n if let Some(next) = head.next.load(Acquire, guard) {\n if let Payload::Data(ref t) = next.payload {\n unsafe {\n if self.head.cas_shared(Some(head), Some(next), Release) {\n guard.unlinked(head);\n Ok(Some(ptr::read(t)))\n } else {\n Err(())\n }\n }\n } else {\n Ok(None)\n }\n } else {\n Ok(None)\n }\n }\n\n \/\/\/ Check if this queue is empty.\n pub fn is_empty(&self) -> bool {\n let guard = epoch::pin();\n let head = self.head.load(Acquire, &guard).unwrap();\n\n if let Some(next) = head.next.load(Acquire, &guard) {\n if let Payload::Data(_) = next.payload {\n false\n } else {\n true\n }\n } else {\n true\n }\n }\n\n \/\/\/ Attempt to dequeue from the front.\n \/\/\/\n \/\/\/ Returns `None` if the queue is observed to be empty.\n pub fn try_pop(&self) -> Option<T> {\n let guard = epoch::pin();\n loop {\n if let Ok(r) = self.pop_internal(&guard) {\n return r;\n }\n }\n }\n\n \/\/\/ Dequeue an element from the front of the queue, blocking if the queue is\n \/\/\/ empty.\n pub fn pop(&self) -> T {\n let guard = epoch::pin();\n\n \/\/ Fast path: keep retrying until we observe that the queue has no data,\n \/\/ avoiding the allocation of a blocked node.\n loop {\n match self.pop_internal(&guard) {\n Ok(Some(r)) => {\n return r;\n }\n Ok(None) => {\n break;\n }\n Err(()) => {}\n }\n }\n\n \/\/ The signal gets to live on the stack, since this stack frame will be\n \/\/ blocked until receiving the signal.\n let mut signal = Signal {\n thread: thread::current(),\n data: None,\n ready: AtomicBool::new(false),\n };\n\n \/\/ Go ahead and allocate the blocked node; chances are, we'll need it.\n let mut node = Owned::new(Node {\n payload: Payload::Blocked(&mut signal),\n next: Atomic::null(),\n });\n\n loop {\n \/\/ try a normal pop\n if let Ok(Some(r)) = self.pop_internal(&guard) {\n return r;\n }\n\n \/\/ At this point, we believe the queue is empty\/blocked.\n \/\/ Snapshot the tail, onto which we want to push a blocked node.\n let tail = self.tail.load(Relaxed, &guard).unwrap();\n\n \/\/ Double-check that we're in blocking mode\n if tail.is_data() {\n \/\/ The current tail is in data mode, so we probably need to abort.\n \/\/ BUT, it might be the sentinel, so check for that first.\n let head = self.head.load(Relaxed, &guard).unwrap();\n if tail.is_data() && tail.as_raw() != head.as_raw() { continue; }\n }\n\n \/\/ At this point, the tail snapshot is either a blocked node deep in\n \/\/ the queue, the sentinel, or no longer accessible from the queue.\n \/\/ In *ALL* of these cases, if we succeed in pushing onto the\n \/\/ snapshot, we know we are maintaining the core invariant: all\n \/\/ reachable, non-sentinel nodes have the same payload mode, in this\n \/\/ case, blocked.\n match self.push_internal(&guard, tail, node) {\n Ok(()) => {\n while !signal.ready.load(Relaxed) {\n thread::park();\n }\n return signal.data.unwrap();\n }\n Err(n) => {\n node = n;\n }\n }\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n const CONC_COUNT: i64 = 1000000;\n\n use scope;\n use super::*;\n\n #[test]\n fn push_try_pop_1() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n q.push(37);\n assert!(!q.is_empty());\n assert_eq!(q.try_pop(), Some(37));\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_try_pop_2() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n q.push(37);\n q.push(48);\n assert_eq!(q.try_pop(), Some(37));\n assert!(!q.is_empty());\n assert_eq!(q.try_pop(), Some(48));\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_try_pop_many_seq() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n for i in 0..200 {\n q.push(i)\n }\n assert!(!q.is_empty());\n for i in 0..200 {\n assert_eq!(q.try_pop(), Some(i));\n }\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_pop_1() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n q.push(37);\n assert!(!q.is_empty());\n assert_eq!(q.pop(), 37);\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_pop_2() {\n let q: MsQueue<i64> = MsQueue::new();\n q.push(37);\n q.push(48);\n assert_eq!(q.pop(), 37);\n assert_eq!(q.pop(), 48);\n }\n\n #[test]\n fn push_pop_many_seq() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n for i in 0..200 {\n q.push(i)\n }\n assert!(!q.is_empty());\n for i in 0..200 {\n assert_eq!(q.pop(), i);\n }\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_try_pop_many_spsc() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n\n scope(|scope| {\n scope.spawn(|| {\n let mut next = 0;\n\n while next < CONC_COUNT {\n if let Some(elem) = q.try_pop() {\n assert_eq!(elem, next);\n next += 1;\n }\n }\n });\n\n for i in 0..CONC_COUNT {\n q.push(i)\n }\n assert!(!q.is_empty());\n });\n }\n\n #[test]\n fn push_try_pop_many_spmc() {\n fn recv(_t: i32, q: &MsQueue<i64>) {\n let mut cur = -1;\n for _i in 0..CONC_COUNT {\n if let Some(elem) = q.try_pop() {\n assert!(elem > cur);\n cur = elem;\n\n if cur == CONC_COUNT - 1 { break }\n }\n }\n }\n\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n let qr = &q;\n scope(|scope| {\n for i in 0..3 {\n scope.spawn(move || recv(i, qr));\n }\n\n scope.spawn(|| {\n for i in 0..CONC_COUNT {\n q.push(i);\n }\n assert!(!q.is_empty());\n })\n });\n }\n\n #[test]\n fn push_try_pop_many_mpmc() {\n enum LR { Left(i64), Right(i64) }\n\n let q: MsQueue<LR> = MsQueue::new();\n assert!(q.is_empty());\n\n scope(|scope| {\n for _t in 0..2 {\n scope.spawn(|| {\n for i in CONC_COUNT-1..CONC_COUNT {\n q.push(LR::Left(i))\n }\n assert!(!q.is_empty());\n });\n scope.spawn(|| {\n for i in CONC_COUNT-1..CONC_COUNT {\n q.push(LR::Right(i))\n }\n assert!(!q.is_empty());\n });\n scope.spawn(|| {\n let mut vl = vec![];\n let mut vr = vec![];\n for _i in 0..CONC_COUNT {\n match q.try_pop() {\n Some(LR::Left(x)) => vl.push(x),\n Some(LR::Right(x)) => vr.push(x),\n _ => {}\n }\n }\n\n let mut vl2 = vl.clone();\n let mut vr2 = vr.clone();\n vl2.sort();\n vr2.sort();\n\n assert_eq!(vl, vl2);\n assert_eq!(vr, vr2);\n });\n\n assert!(q.is_empty());\n }\n });\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_pop_many_spsc() {\n let q: MsQueue<i64> = MsQueue::new();\n\n scope(|scope| {\n scope.spawn(|| {\n let mut next = 0;\n while next < CONC_COUNT {\n assert_eq!(q.pop(), next);\n next += 1;\n }\n });\n\n for i in 0..CONC_COUNT {\n q.push(i)\n }\n });\n assert!(q.is_empty());\n }\n\n #[test]\n fn is_empty_dont_pop() {\n let q: MsQueue<i64> = MsQueue::new();\n q.push(20);\n q.push(20);\n assert!(!q.is_empty());\n assert!(!q.is_empty());\n assert!(q.try_pop().is_some());\n }\n}\n<commit_msg>Fix test bustage<commit_after>use std::sync::atomic::Ordering::{Acquire, Release, Relaxed};\nuse std::sync::atomic::AtomicBool;\nuse std::{ptr, mem};\nuse std::thread::{self, Thread};\n\nuse mem::epoch::{self, Atomic, Owned, Shared};\nuse mem::CachePadded;\n\n\/\/\/ A Michael-Scott lock-free queue, with support for blocking `pop`s.\n\/\/\/\n\/\/\/ Usable with any number of producers and consumers.\n\/\/ The representation here is a singly-linked list, with a sentinel\n\/\/ node at the front. In general the `tail` pointer may lag behind the\n\/\/ actual tail. Non-sentinal nodes are either all `Data` or all\n\/\/ `Blocked` (requests for data from blocked threads).\npub struct MsQueue<T> {\n head: CachePadded<Atomic<Node<T>>>,\n tail: CachePadded<Atomic<Node<T>>>,\n}\n\nstruct Node<T> {\n payload: Payload<T>,\n next: Atomic<Node<T>>,\n}\n\nenum Payload<T> {\n \/\/\/ A node with actual data that can be popped.\n Data(T),\n \/\/\/ A node representing a blocked request for data.\n Blocked(*mut Signal<T>),\n}\n\n\/\/\/ A blocked request for data, which includes a slot to write the data.\nstruct Signal<T> {\n \/\/\/ Thread to unpark when data is ready.\n thread: Thread,\n \/\/\/ The actual data, when available.\n data: Option<T>,\n \/\/\/ Is the data ready? Needed to cope with spurious wakeups.\n ready: AtomicBool,\n}\n\nimpl<T> Node<T> {\n fn is_data(&self) -> bool {\n if let Payload::Data(_) = self.payload { true } else { false }\n }\n}\n\n\/\/ Any particular `T` should never accessed concurrently, so no need\n\/\/ for Sync.\nunsafe impl<T: Send> Sync for MsQueue<T> {}\nunsafe impl<T: Send> Send for MsQueue<T> {}\n\nimpl<T> MsQueue<T> {\n \/\/\/ Create a new, empty queue.\n pub fn new() -> MsQueue<T> {\n let q = MsQueue {\n head: CachePadded::new(Atomic::null()),\n tail: CachePadded::new(Atomic::null()),\n };\n let sentinel = Owned::new(Node {\n payload: unsafe { mem::uninitialized() },\n next: Atomic::null(),\n });\n let guard = epoch::pin();\n let sentinel = q.head.store_and_ref(sentinel, Relaxed, &guard);\n q.tail.store_shared(Some(sentinel), Relaxed);\n q\n }\n\n #[inline(always)]\n \/\/\/ Attempt to atomically place `n` into the `next` pointer of `onto`.\n \/\/\/\n \/\/\/ If unsuccessful, returns ownership of `n`, possibly updating\n \/\/\/ the queue's `tail` pointer.\n fn push_internal(&self,\n guard: &epoch::Guard,\n onto: Shared<Node<T>>,\n n: Owned<Node<T>>)\n -> Result<(), Owned<Node<T>>>\n {\n \/\/ is `onto` the actual tail?\n if let Some(next) = onto.next.load(Acquire, guard) {\n \/\/ if not, try to \"help\" by moving the tail pointer forward\n self.tail.cas_shared(Some(onto), Some(next), Release);\n Err(n)\n } else {\n \/\/ looks like the actual tail; attempt to link in `n`\n onto.next.cas_and_ref(None, n, Release, guard).map(|shared| {\n \/\/ try to move the tail pointer forward\n self.tail.cas_shared(Some(onto), Some(shared), Release);\n })\n }\n }\n\n \/\/\/ Add `t` to the back of the queue, possibly waking up threads\n \/\/\/ blocked on `pop`.\n pub fn push(&self, t: T) {\n \/\/\/ We may or may not need to allocate a node; once we do,\n \/\/\/ we cache that allocation.\n enum Cache<T> {\n Data(T),\n Node(Owned<Node<T>>),\n }\n\n impl<T> Cache<T> {\n \/\/\/ Extract the node if cached, or allocate if not.\n fn into_node(self) -> Owned<Node<T>> {\n match self {\n Cache::Data(t) => {\n Owned::new(Node {\n payload: Payload::Data(t),\n next: Atomic::null()\n })\n }\n Cache::Node(n) => n\n }\n }\n\n \/\/\/ Extract the data from the cache, deallocating any cached node.\n fn into_data(self) -> T {\n match self {\n Cache::Data(t) => t,\n Cache::Node(node) => {\n match node.into_inner().payload {\n Payload::Data(t) => t,\n _ => unreachable!(),\n }\n }\n }\n }\n }\n\n let mut cache = Cache::Data(t); \/\/ don't allocate up front\n let guard = epoch::pin();\n\n loop {\n \/\/ We push onto the tail, so we'll start optimistically by looking\n \/\/ there first.\n let tail = self.tail.load(Acquire, &guard).unwrap();\n\n \/\/ Is the queue in Data mode (empty queues can be viewed as either mode)?\n if tail.is_data() ||\n self.head.load(Relaxed, &guard).unwrap().as_raw() == tail.as_raw()\n {\n \/\/ Attempt to push onto the `tail` snapshot; fails if\n \/\/ `tail.next` has changed, which will always be the case if the\n \/\/ queue has transitioned to blocking mode.\n match self.push_internal(&guard, tail, cache.into_node()) {\n Ok(_) => return,\n Err(n) => {\n \/\/ replace the cache, retry whole thing\n cache = Cache::Node(n)\n }\n }\n } else {\n \/\/ Queue is in blocking mode. Attempt to unblock a thread.\n let head = self.head.load(Acquire, &guard).unwrap();\n \/\/ Get a handle on the first blocked node. Racy, so queue might\n \/\/ be empty or in data mode by the time we see it.\n let request = head.next.load(Acquire, &guard).and_then(|next| {\n match next.payload {\n Payload::Blocked(signal) => Some((next, signal)),\n Payload::Data(_) => None,\n }\n });\n if let Some((blocked_node, signal)) = request {\n \/\/ race to dequeue the node\n if self.head.cas_shared(Some(head), Some(blocked_node), Release) {\n unsafe {\n \/\/ signal the thread\n (*signal).data = Some(cache.into_data());\n (*signal).ready.store(true, Relaxed);\n (*signal).thread.unpark();\n guard.unlinked(head);\n return;\n }\n }\n }\n }\n }\n }\n\n #[inline(always)]\n \/\/ Attempt to pop a data node. `Ok(None)` if queue is empty or in blocking\n \/\/ mode; `Err(())` if lost race to pop.\n fn pop_internal(&self, guard: &epoch::Guard) -> Result<Option<T>, ()> {\n let head = self.head.load(Acquire, guard).unwrap();\n if let Some(next) = head.next.load(Acquire, guard) {\n if let Payload::Data(ref t) = next.payload {\n unsafe {\n if self.head.cas_shared(Some(head), Some(next), Release) {\n guard.unlinked(head);\n Ok(Some(ptr::read(t)))\n } else {\n Err(())\n }\n }\n } else {\n Ok(None)\n }\n } else {\n Ok(None)\n }\n }\n\n \/\/\/ Check if this queue is empty.\n pub fn is_empty(&self) -> bool {\n let guard = epoch::pin();\n let head = self.head.load(Acquire, &guard).unwrap();\n\n if let Some(next) = head.next.load(Acquire, &guard) {\n if let Payload::Data(_) = next.payload {\n false\n } else {\n true\n }\n } else {\n true\n }\n }\n\n \/\/\/ Attempt to dequeue from the front.\n \/\/\/\n \/\/\/ Returns `None` if the queue is observed to be empty.\n pub fn try_pop(&self) -> Option<T> {\n let guard = epoch::pin();\n loop {\n if let Ok(r) = self.pop_internal(&guard) {\n return r;\n }\n }\n }\n\n \/\/\/ Dequeue an element from the front of the queue, blocking if the queue is\n \/\/\/ empty.\n pub fn pop(&self) -> T {\n let guard = epoch::pin();\n\n \/\/ Fast path: keep retrying until we observe that the queue has no data,\n \/\/ avoiding the allocation of a blocked node.\n loop {\n match self.pop_internal(&guard) {\n Ok(Some(r)) => {\n return r;\n }\n Ok(None) => {\n break;\n }\n Err(()) => {}\n }\n }\n\n \/\/ The signal gets to live on the stack, since this stack frame will be\n \/\/ blocked until receiving the signal.\n let mut signal = Signal {\n thread: thread::current(),\n data: None,\n ready: AtomicBool::new(false),\n };\n\n \/\/ Go ahead and allocate the blocked node; chances are, we'll need it.\n let mut node = Owned::new(Node {\n payload: Payload::Blocked(&mut signal),\n next: Atomic::null(),\n });\n\n loop {\n \/\/ try a normal pop\n if let Ok(Some(r)) = self.pop_internal(&guard) {\n return r;\n }\n\n \/\/ At this point, we believe the queue is empty\/blocked.\n \/\/ Snapshot the tail, onto which we want to push a blocked node.\n let tail = self.tail.load(Relaxed, &guard).unwrap();\n\n \/\/ Double-check that we're in blocking mode\n if tail.is_data() {\n \/\/ The current tail is in data mode, so we probably need to abort.\n \/\/ BUT, it might be the sentinel, so check for that first.\n let head = self.head.load(Relaxed, &guard).unwrap();\n if tail.is_data() && tail.as_raw() != head.as_raw() { continue; }\n }\n\n \/\/ At this point, the tail snapshot is either a blocked node deep in\n \/\/ the queue, the sentinel, or no longer accessible from the queue.\n \/\/ In *ALL* of these cases, if we succeed in pushing onto the\n \/\/ snapshot, we know we are maintaining the core invariant: all\n \/\/ reachable, non-sentinel nodes have the same payload mode, in this\n \/\/ case, blocked.\n match self.push_internal(&guard, tail, node) {\n Ok(()) => {\n while !signal.ready.load(Relaxed) {\n thread::park();\n }\n return signal.data.unwrap();\n }\n Err(n) => {\n node = n;\n }\n }\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n const CONC_COUNT: i64 = 1000000;\n\n use scope;\n use super::*;\n\n #[test]\n fn push_try_pop_1() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n q.push(37);\n assert!(!q.is_empty());\n assert_eq!(q.try_pop(), Some(37));\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_try_pop_2() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n q.push(37);\n q.push(48);\n assert_eq!(q.try_pop(), Some(37));\n assert!(!q.is_empty());\n assert_eq!(q.try_pop(), Some(48));\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_try_pop_many_seq() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n for i in 0..200 {\n q.push(i)\n }\n assert!(!q.is_empty());\n for i in 0..200 {\n assert_eq!(q.try_pop(), Some(i));\n }\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_pop_1() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n q.push(37);\n assert!(!q.is_empty());\n assert_eq!(q.pop(), 37);\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_pop_2() {\n let q: MsQueue<i64> = MsQueue::new();\n q.push(37);\n q.push(48);\n assert_eq!(q.pop(), 37);\n assert_eq!(q.pop(), 48);\n }\n\n #[test]\n fn push_pop_many_seq() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n for i in 0..200 {\n q.push(i)\n }\n assert!(!q.is_empty());\n for i in 0..200 {\n assert_eq!(q.pop(), i);\n }\n assert!(q.is_empty());\n }\n\n #[test]\n fn push_try_pop_many_spsc() {\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n\n scope(|scope| {\n scope.spawn(|| {\n let mut next = 0;\n\n while next < CONC_COUNT {\n if let Some(elem) = q.try_pop() {\n assert_eq!(elem, next);\n next += 1;\n }\n }\n });\n\n for i in 0..CONC_COUNT {\n q.push(i)\n }\n });\n }\n\n #[test]\n fn push_try_pop_many_spmc() {\n fn recv(_t: i32, q: &MsQueue<i64>) {\n let mut cur = -1;\n for _i in 0..CONC_COUNT {\n if let Some(elem) = q.try_pop() {\n assert!(elem > cur);\n cur = elem;\n\n if cur == CONC_COUNT - 1 { break }\n }\n }\n }\n\n let q: MsQueue<i64> = MsQueue::new();\n assert!(q.is_empty());\n let qr = &q;\n scope(|scope| {\n for i in 0..3 {\n scope.spawn(move || recv(i, qr));\n }\n\n scope.spawn(|| {\n for i in 0..CONC_COUNT {\n q.push(i);\n }\n })\n });\n }\n\n #[test]\n fn push_try_pop_many_mpmc() {\n enum LR { Left(i64), Right(i64) }\n\n let q: MsQueue<LR> = MsQueue::new();\n assert!(q.is_empty());\n\n scope(|scope| {\n for _t in 0..2 {\n scope.spawn(|| {\n for i in CONC_COUNT-1..CONC_COUNT {\n q.push(LR::Left(i))\n }\n });\n scope.spawn(|| {\n for i in CONC_COUNT-1..CONC_COUNT {\n q.push(LR::Right(i))\n }\n });\n scope.spawn(|| {\n let mut vl = vec![];\n let mut vr = vec![];\n for _i in 0..CONC_COUNT {\n match q.try_pop() {\n Some(LR::Left(x)) => vl.push(x),\n Some(LR::Right(x)) => vr.push(x),\n _ => {}\n }\n }\n\n let mut vl2 = vl.clone();\n let mut vr2 = vr.clone();\n vl2.sort();\n vr2.sort();\n\n assert_eq!(vl, vl2);\n assert_eq!(vr, vr2);\n });\n }\n });\n }\n\n #[test]\n fn push_pop_many_spsc() {\n let q: MsQueue<i64> = MsQueue::new();\n\n scope(|scope| {\n scope.spawn(|| {\n let mut next = 0;\n while next < CONC_COUNT {\n assert_eq!(q.pop(), next);\n next += 1;\n }\n });\n\n for i in 0..CONC_COUNT {\n q.push(i)\n }\n });\n assert!(q.is_empty());\n }\n\n #[test]\n fn is_empty_dont_pop() {\n let q: MsQueue<i64> = MsQueue::new();\n q.push(20);\n q.push(20);\n assert!(!q.is_empty());\n assert!(!q.is_empty());\n assert!(q.try_pop().is_some());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test(Examples): add a simple login process example<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a regression test for issue 776<commit_after>use std::io::{self, Read};\nuse std::time::Duration;\nuse std::{net, thread};\n\nuse mio::net::TcpStream;\nuse mio::{Events, Interests, Poll, Token};\n\nmod util;\n\nuse util::init;\n\n#[test]\nfn issue_776() {\n init();\n\n let l = net::TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = l.local_addr().unwrap();\n\n let t = thread::spawn(move || {\n let mut s = l.accept().expect(\"accept\").0;\n s.set_read_timeout(Some(Duration::from_secs(5)))\n .expect(\"set_read_timeout\");\n let _ = s.read(&mut [0; 16]).expect(\"read\");\n });\n\n let mut poll = Poll::new().unwrap();\n let mut s = TcpStream::connect(addr).unwrap();\n\n poll.registry()\n .register(&s, Token(1), Interests::READABLE | Interests::WRITABLE)\n .unwrap();\n let mut events = Events::with_capacity(16);\n 'outer: loop {\n poll.poll(&mut events, None).unwrap();\n for event in &events {\n if event.token() == Token(1) {\n \/\/ connected\n break 'outer;\n }\n }\n }\n\n let mut b = [0; 1024];\n match s.read(&mut b) {\n Ok(_) => panic!(\"unexpected ok\"),\n Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => (),\n Err(e) => panic!(\"unexpected error: {:?}\", e),\n }\n\n drop(s);\n t.join().unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse target::TargetOptions;\nuse std::default::Default;\n\npub fn opts() -> TargetOptions {\n TargetOptions {\n linker: \"cc\".to_string(),\n dynamic_linking: true,\n executables: true,\n morestack: false,\n linker_is_gnu: true,\n has_rpath: true,\n position_independent_executables: true,\n\n .. Default::default()\n }\n}\n<commit_msg>switching to gnu archive format per review request<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse target::TargetOptions;\nuse std::default::Default;\n\npub fn opts() -> TargetOptions {\n TargetOptions {\n linker: \"cc\".to_string(),\n dynamic_linking: true,\n executables: true,\n morestack: false,\n linker_is_gnu: true,\n has_rpath: true,\n position_independent_executables: true,\n archive_format: \"gnu\".to_string(),\n\n .. Default::default()\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* Code to handle method lookups (which can be quite complex) *\/\n\nimport syntax::ast_map;\nimport middle::typeck::infer::methods; \/\/ next_ty_vars\nimport dvec::{dvec, extensions};\n\ntype candidate = {\n self_ty: ty::t, \/\/ type of a in a.b()\n self_substs: ty::substs, \/\/ values for any tvars def'd on the class\n rcvr_ty: ty::t, \/\/ type of receiver in the method def\n n_tps_m: uint, \/\/ number of tvars defined on the method\n fty: ty::t, \/\/ type of the method\n entry: method_map_entry\n};\n\nclass lookup {\n let fcx: @fn_ctxt;\n let expr: @ast::expr;\n let self_expr: @ast::expr;\n let borrow_scope: ast::node_id;\n let node_id: ast::node_id;\n let m_name: ast::ident;\n let mut self_ty: ty::t;\n let mut derefs: uint;\n let candidates: dvec<candidate>;\n let supplied_tps: [ty::t];\n let include_private: bool;\n\n new(fcx: @fn_ctxt,\n expr: @ast::expr, \/\/expr for a.b in a.b()\n self_expr: @ast::expr, \/\/a in a.b(...)\n borrow_scope: ast::node_id, \/\/scope to borrow the expr for\n node_id: ast::node_id, \/\/node id where to store type of fn\n m_name: ast::ident, \/\/b in a.b(...)\n self_ty: ty::t, \/\/type of a in a.b(...)\n supplied_tps: [ty::t], \/\/Xs in a.b::<Xs>(...)\n include_private: bool) {\n\n self.fcx = fcx;\n self.expr = expr;\n self.self_expr = self_expr;\n self.borrow_scope = borrow_scope;\n self.node_id = node_id;\n self.m_name = m_name;\n self.self_ty = self_ty;\n self.derefs = 0u;\n self.candidates = dvec();\n self.supplied_tps = supplied_tps;\n self.include_private = include_private;\n }\n\n \/\/ Entrypoint:\n fn method() -> option<method_map_entry> {\n #debug[\"method lookup(m_name=%s, self_ty=%s)\",\n *self.m_name, self.fcx.infcx.ty_to_str(self.self_ty)];\n\n loop {\n \/\/ First, see whether this is an interface-bounded parameter\n alt ty::get(self.self_ty).struct {\n ty::ty_param(n, did) {\n self.add_candidates_from_param(n, did);\n }\n ty::ty_iface(did, substs) {\n self.add_candidates_from_iface(did, substs);\n }\n ty::ty_class(did, substs) {\n self.add_candidates_from_class(did, substs);\n }\n _ { }\n }\n\n \/\/ if we found anything, stop now. otherwise continue to\n \/\/ loop for impls in scope. Note: I don't love these\n \/\/ semantics, but that's what we had so I am preserving\n \/\/ it.\n if self.candidates.len() > 0u {\n break;\n }\n\n self.add_candidates_from_scope();\n\n \/\/ if we found anything, stop before attempting auto-deref.\n if self.candidates.len() > 0u {\n break;\n }\n\n \/\/ check whether we can autoderef and if so loop around again.\n alt ty::deref(self.tcx(), self.self_ty, false) {\n none { break; }\n some(mt) {\n self.self_ty = mt.ty;\n self.derefs += 1u;\n }\n }\n }\n\n if self.candidates.len() == 0u { ret none; }\n\n if self.candidates.len() > 1u {\n self.tcx().sess.span_err(\n self.expr.span,\n \"multiple applicable methods in scope\");\n\n for self.candidates.eachi { |i, candidate|\n alt candidate.entry.origin {\n method_static(did) {\n self.report_static_candidate(i, did);\n }\n method_param(p) {\n self.report_param_candidate(i, p.iface_id);\n }\n method_iface(did, _) {\n self.report_iface_candidate(i, did);\n }\n }\n }\n }\n\n some(self.write_mty_from_candidate(self.candidates[0u]))\n }\n\n fn tcx() -> ty::ctxt { self.fcx.ccx.tcx }\n\n fn report_static_candidate(idx: uint, did: ast::def_id) {\n let span = if did.crate == ast::local_crate {\n alt check self.tcx().items.get(did.node) {\n ast_map::node_method(m, _, _) { m.span }\n }\n } else {\n self.expr.span\n };\n self.tcx().sess.span_note(\n span,\n #fmt[\"candidate #%u is `%s`\",\n (idx+1u),\n ty::item_path_str(self.tcx(), did)]);\n }\n\n fn report_param_candidate(idx: uint, did: ast::def_id) {\n self.tcx().sess.span_note(\n self.expr.span,\n #fmt[\"candidate #%u derives from the bound `%s`\",\n (idx+1u),\n ty::item_path_str(self.tcx(), did)]);\n }\n\n fn report_iface_candidate(idx: uint, did: ast::def_id) {\n self.tcx().sess.span_note(\n self.expr.span,\n #fmt[\"candidate #%u derives from the type of the receiver, \\\n which is the iface `%s`\",\n (idx+1u),\n ty::item_path_str(self.tcx(), did)]);\n }\n\n fn add_candidates_from_param(n: uint, did: ast::def_id) {\n let tcx = self.tcx();\n let mut iface_bnd_idx = 0u; \/\/ count only iface bounds\n let bounds = tcx.ty_param_bounds.get(did.node);\n for vec::each(*bounds) {|bound|\n let (iid, bound_substs) = alt bound {\n ty::bound_copy | ty::bound_send | ty::bound_const {\n cont; \/* ok *\/\n }\n ty::bound_iface(bound_t) {\n alt check ty::get(bound_t).struct {\n ty::ty_iface(i, substs) { (i, substs) }\n }\n }\n };\n\n let ifce_methods = ty::iface_methods(tcx, iid);\n alt vec::position(*ifce_methods, {|m| m.ident == self.m_name}) {\n none {\n \/* check next bound *\/\n iface_bnd_idx += 1u;\n }\n\n some(pos) {\n \/\/ Replace any appearance of `self` with the type of the\n \/\/ generic parameter itself. Note that this is the only case\n \/\/ where this replacement is necessary: in all other cases, we\n \/\/ are either invoking a method directly from an impl or class\n \/\/ (where the self type is not permitted), or from a iface\n \/\/ type (in which case methods that refer to self are not\n \/\/ permitted).\n let substs = {self_ty: some(self.self_ty)\n with bound_substs};\n\n self.add_candidates_from_m(\n substs, ifce_methods[pos],\n method_param({iface_id:iid,\n method_num:pos,\n param_num:n,\n bound_num:iface_bnd_idx}));\n }\n }\n }\n\n }\n\n fn add_candidates_from_iface(did: ast::def_id, iface_substs: ty::substs) {\n\n #debug[\"method_from_iface\"];\n\n let ms = *ty::iface_methods(self.tcx(), did);\n for ms.eachi {|i, m|\n if m.ident != self.m_name { cont; }\n\n let m_fty = ty::mk_fn(self.tcx(), m.fty);\n\n if ty::type_has_self(m_fty) {\n self.tcx().sess.span_err(\n self.expr.span,\n \"can not call a method that contains a \\\n self type through a boxed iface\");\n }\n\n if (*m.tps).len() > 0u {\n self.tcx().sess.span_err(\n self.expr.span,\n \"can not call a generic method through a \\\n boxed iface\");\n }\n\n \/\/ Note: although it is illegal to invoke a method that uses self\n \/\/ through a iface instance, we use a dummy subst here so that we\n \/\/ can soldier on with the compilation.\n let substs = {self_ty: some(self.self_ty)\n with iface_substs};\n\n self.add_candidates_from_m(\n substs, m, method_iface(did, i));\n }\n }\n\n fn add_candidates_from_class(did: ast::def_id, class_substs: ty::substs) {\n\n #debug[\"method_from_class\"];\n\n let ms = *ty::iface_methods(self.tcx(), did);\n\n for ms.each {|m|\n if m.ident != self.m_name { cont; }\n\n if m.vis == ast::private && !self.include_private {\n self.tcx().sess.span_fatal(\n self.expr.span,\n \"Call to private method not allowed outside \\\n its defining class\");\n }\n\n \/\/ look up method named <name>.\n let m_declared = ty::lookup_class_method_by_name(\n self.tcx(), did, self.m_name, self.expr.span);\n\n self.add_candidates_from_m(\n class_substs, m, method_static(m_declared));\n }\n }\n\n fn ty_from_did(did: ast::def_id) -> ty::t {\n alt check ty::get(ty::lookup_item_type(self.tcx(), did).ty).struct {\n ty::ty_fn(fty) {\n ty::mk_fn(self.tcx(), {proto: ast::proto_box with fty})\n }\n }\n \/*\n if did.crate == ast::local_crate {\n alt check self.tcx().items.get(did.node) {\n ast_map::node_method(m, _, _) {\n \/\/ NDM iface\/impl regions\n let mt = ty_of_method(self.fcx.ccx, m, ast::rp_none);\n ty::mk_fn(self.tcx(), {proto: ast::proto_box with mt.fty})\n }\n }\n } else {\n alt check ty::get(csearch::get_type(self.tcx(), did).ty).struct {\n ty::ty_fn(fty) {\n ty::mk_fn(self.tcx(), {proto: ast::proto_box with fty})\n }\n }\n }\n *\/\n }\n\n fn add_candidates_from_scope() {\n let impls_vecs = self.fcx.ccx.impl_map.get(self.expr.id);\n let mut added_any = false;\n\n #debug[\"method_from_scope\"];\n\n for list::each(impls_vecs) {|impls|\n for vec::each(*impls) {|im|\n \/\/ Check whether this impl has a method with the right name.\n for im.methods.find({|m| m.ident == self.m_name}).each {|m|\n\n \/\/ determine the `self` of the impl with fresh\n \/\/ variables for each parameter:\n let {substs: impl_substs, ty: impl_ty} =\n impl_self_ty(self.fcx, im.did);\n\n \/\/ if we can assign the caller to the callee, that's a\n \/\/ potential match. Collect those in the vector.\n let can_assign = self.fcx.can_mk_assignty(\n self.self_expr, self.borrow_scope,\n self.self_ty, impl_ty);\n #debug[\"can_assign = %?\", can_assign];\n alt can_assign {\n result::err(_) { \/* keep looking *\/ }\n result::ok(_) {\n let fty = self.ty_from_did(m.did);\n self.candidates.push(\n {self_ty: self.self_ty,\n self_substs: impl_substs,\n rcvr_ty: impl_ty,\n n_tps_m: m.n_tps,\n fty: fty,\n entry: {derefs: self.derefs,\n origin: method_static(m.did)}});\n added_any = true;\n }\n }\n }\n }\n\n \/\/ we want to find the innermost scope that has any\n \/\/ matches and then ignore outer scopes\n if added_any {ret;}\n }\n }\n\n fn add_candidates_from_m(self_substs: ty::substs,\n m: ty::method,\n origin: method_origin) {\n let tcx = self.fcx.ccx.tcx;\n\n \/\/ a bit hokey, but the method unbound has a bare protocol, whereas\n \/\/ a.b has a protocol like fn@() (perhaps eventually fn&()):\n let fty = ty::mk_fn(tcx, {proto: ast::proto_box with m.fty});\n\n self.candidates.push(\n {self_ty: self.self_ty,\n self_substs: self_substs,\n rcvr_ty: self.self_ty,\n n_tps_m: (*m.tps).len(),\n fty: fty,\n entry: {derefs: self.derefs, origin: origin}});\n }\n\n fn write_mty_from_candidate(cand: candidate) -> method_map_entry {\n let tcx = self.fcx.ccx.tcx;\n\n #debug[\"write_mty_from_candidate(n_tps_m=%u, fty=%s, entry=%?)\",\n cand.n_tps_m,\n self.fcx.infcx.ty_to_str(cand.fty),\n cand.entry];\n\n \/\/ Make the actual receiver type (cand.self_ty) assignable to the\n \/\/ required receiver type (cand.rcvr_ty). If this method is not\n \/\/ from an impl, this'll basically be a no-nop.\n alt self.fcx.mk_assignty(self.self_expr, self.borrow_scope,\n cand.self_ty, cand.rcvr_ty) {\n result::ok(_) {}\n result::err(_) {\n self.tcx().sess.span_bug(\n self.expr.span,\n #fmt[\"%s was assignable to %s but now is not?\",\n self.fcx.infcx.ty_to_str(cand.self_ty),\n self.fcx.infcx.ty_to_str(cand.rcvr_ty)]);\n }\n }\n\n \/\/ Construct the full set of type parameters for the method,\n \/\/ which is equal to the class tps + the method tps.\n let n_tps_supplied = self.supplied_tps.len();\n let n_tps_m = cand.n_tps_m;\n let m_substs = {\n if n_tps_supplied == 0u {\n self.fcx.infcx.next_ty_vars(n_tps_m)\n } else if n_tps_m == 0u {\n tcx.sess.span_err(\n self.expr.span,\n \"this method does not take type parameters\");\n self.fcx.infcx.next_ty_vars(n_tps_m)\n } else if n_tps_supplied != n_tps_m {\n tcx.sess.span_err(\n self.expr.span,\n \"incorrect number of type \\\n parameters given for this method\");\n self.fcx.infcx.next_ty_vars(n_tps_m)\n } else {\n self.supplied_tps\n }\n };\n\n let all_substs = {tps: cand.self_substs.tps + m_substs\n with cand.self_substs};\n\n self.fcx.write_ty_substs(self.node_id, cand.fty, all_substs);\n\n ret cand.entry;\n }\n}\n\n<commit_msg>Move vector addition out of trans and into libcore.<commit_after>\/* Code to handle method lookups (which can be quite complex) *\/\n\nimport syntax::ast_map;\nimport middle::typeck::infer::methods; \/\/ next_ty_vars\nimport dvec::{dvec, extensions};\n\ntype candidate = {\n self_ty: ty::t, \/\/ type of a in a.b()\n self_substs: ty::substs, \/\/ values for any tvars def'd on the class\n rcvr_ty: ty::t, \/\/ type of receiver in the method def\n n_tps_m: uint, \/\/ number of tvars defined on the method\n fty: ty::t, \/\/ type of the method\n entry: method_map_entry\n};\n\nclass lookup {\n let fcx: @fn_ctxt;\n let expr: @ast::expr;\n let self_expr: @ast::expr;\n let borrow_scope: ast::node_id;\n let node_id: ast::node_id;\n let m_name: ast::ident;\n let mut self_ty: ty::t;\n let mut derefs: uint;\n let candidates: dvec<candidate>;\n let supplied_tps: [ty::t];\n let include_private: bool;\n\n new(fcx: @fn_ctxt,\n expr: @ast::expr, \/\/expr for a.b in a.b()\n self_expr: @ast::expr, \/\/a in a.b(...)\n borrow_scope: ast::node_id, \/\/scope to borrow the expr for\n node_id: ast::node_id, \/\/node id where to store type of fn\n m_name: ast::ident, \/\/b in a.b(...)\n self_ty: ty::t, \/\/type of a in a.b(...)\n supplied_tps: [ty::t], \/\/Xs in a.b::<Xs>(...)\n include_private: bool) {\n\n self.fcx = fcx;\n self.expr = expr;\n self.self_expr = self_expr;\n self.borrow_scope = borrow_scope;\n self.node_id = node_id;\n self.m_name = m_name;\n self.self_ty = self_ty;\n self.derefs = 0u;\n self.candidates = dvec();\n self.supplied_tps = supplied_tps;\n self.include_private = include_private;\n }\n\n \/\/ Entrypoint:\n fn method() -> option<method_map_entry> {\n #debug[\"method lookup(m_name=%s, self_ty=%s)\",\n *self.m_name, self.fcx.infcx.ty_to_str(self.self_ty)];\n\n loop {\n \/\/ First, see whether this is an interface-bounded parameter\n alt ty::get(self.self_ty).struct {\n ty::ty_param(n, did) {\n self.add_candidates_from_param(n, did);\n }\n ty::ty_iface(did, substs) {\n self.add_candidates_from_iface(did, substs);\n }\n ty::ty_class(did, substs) {\n self.add_candidates_from_class(did, substs);\n }\n _ { }\n }\n\n \/\/ if we found anything, stop now. otherwise continue to\n \/\/ loop for impls in scope. Note: I don't love these\n \/\/ semantics, but that's what we had so I am preserving\n \/\/ it.\n if self.candidates.len() > 0u {\n break;\n }\n\n self.add_candidates_from_scope();\n\n \/\/ if we found anything, stop before attempting auto-deref.\n if self.candidates.len() > 0u {\n break;\n }\n\n \/\/ check whether we can autoderef and if so loop around again.\n alt ty::deref(self.tcx(), self.self_ty, false) {\n none { break; }\n some(mt) {\n self.self_ty = mt.ty;\n self.derefs += 1u;\n }\n }\n }\n\n if self.candidates.len() == 0u { ret none; }\n\n if self.candidates.len() > 1u {\n self.tcx().sess.span_err(\n self.expr.span,\n \"multiple applicable methods in scope\");\n\n for self.candidates.eachi { |i, candidate|\n alt candidate.entry.origin {\n method_static(did) {\n self.report_static_candidate(i, did);\n }\n method_param(p) {\n self.report_param_candidate(i, p.iface_id);\n }\n method_iface(did, _) {\n self.report_iface_candidate(i, did);\n }\n }\n }\n }\n\n some(self.write_mty_from_candidate(self.candidates[0u]))\n }\n\n fn tcx() -> ty::ctxt { self.fcx.ccx.tcx }\n\n fn report_static_candidate(idx: uint, did: ast::def_id) {\n let span = if did.crate == ast::local_crate {\n alt check self.tcx().items.get(did.node) {\n ast_map::node_method(m, _, _) { m.span }\n }\n } else {\n self.expr.span\n };\n self.tcx().sess.span_note(\n span,\n #fmt[\"candidate #%u is `%s`\",\n (idx+1u),\n ty::item_path_str(self.tcx(), did)]);\n }\n\n fn report_param_candidate(idx: uint, did: ast::def_id) {\n self.tcx().sess.span_note(\n self.expr.span,\n #fmt[\"candidate #%u derives from the bound `%s`\",\n (idx+1u),\n ty::item_path_str(self.tcx(), did)]);\n }\n\n fn report_iface_candidate(idx: uint, did: ast::def_id) {\n self.tcx().sess.span_note(\n self.expr.span,\n #fmt[\"candidate #%u derives from the type of the receiver, \\\n which is the iface `%s`\",\n (idx+1u),\n ty::item_path_str(self.tcx(), did)]);\n }\n\n fn add_candidates_from_param(n: uint, did: ast::def_id) {\n #debug[\"candidates_from_param\"];\n\n let tcx = self.tcx();\n let mut iface_bnd_idx = 0u; \/\/ count only iface bounds\n let bounds = tcx.ty_param_bounds.get(did.node);\n for vec::each(*bounds) {|bound|\n let (iid, bound_substs) = alt bound {\n ty::bound_copy | ty::bound_send | ty::bound_const {\n cont; \/* ok *\/\n }\n ty::bound_iface(bound_t) {\n alt check ty::get(bound_t).struct {\n ty::ty_iface(i, substs) { (i, substs) }\n }\n }\n };\n\n let ifce_methods = ty::iface_methods(tcx, iid);\n alt vec::position(*ifce_methods, {|m| m.ident == self.m_name}) {\n none {\n \/* check next bound *\/\n iface_bnd_idx += 1u;\n }\n\n some(pos) {\n \/\/ Replace any appearance of `self` with the type of the\n \/\/ generic parameter itself. Note that this is the only case\n \/\/ where this replacement is necessary: in all other cases, we\n \/\/ are either invoking a method directly from an impl or class\n \/\/ (where the self type is not permitted), or from a iface\n \/\/ type (in which case methods that refer to self are not\n \/\/ permitted).\n let substs = {self_ty: some(self.self_ty)\n with bound_substs};\n\n self.add_candidates_from_m(\n substs, ifce_methods[pos],\n method_param({iface_id:iid,\n method_num:pos,\n param_num:n,\n bound_num:iface_bnd_idx}));\n }\n }\n }\n\n }\n\n fn add_candidates_from_iface(did: ast::def_id, iface_substs: ty::substs) {\n\n #debug[\"method_from_iface\"];\n\n let ms = *ty::iface_methods(self.tcx(), did);\n for ms.eachi {|i, m|\n if m.ident != self.m_name { cont; }\n\n let m_fty = ty::mk_fn(self.tcx(), m.fty);\n\n if ty::type_has_self(m_fty) {\n self.tcx().sess.span_err(\n self.expr.span,\n \"can not call a method that contains a \\\n self type through a boxed iface\");\n }\n\n if (*m.tps).len() > 0u {\n self.tcx().sess.span_err(\n self.expr.span,\n \"can not call a generic method through a \\\n boxed iface\");\n }\n\n \/\/ Note: although it is illegal to invoke a method that uses self\n \/\/ through a iface instance, we use a dummy subst here so that we\n \/\/ can soldier on with the compilation.\n let substs = {self_ty: some(self.self_ty)\n with iface_substs};\n\n self.add_candidates_from_m(\n substs, m, method_iface(did, i));\n }\n }\n\n fn add_candidates_from_class(did: ast::def_id, class_substs: ty::substs) {\n\n #debug[\"method_from_class\"];\n\n let ms = *ty::iface_methods(self.tcx(), did);\n\n for ms.each {|m|\n if m.ident != self.m_name { cont; }\n\n if m.vis == ast::private && !self.include_private {\n self.tcx().sess.span_fatal(\n self.expr.span,\n \"Call to private method not allowed outside \\\n its defining class\");\n }\n\n \/\/ look up method named <name>.\n let m_declared = ty::lookup_class_method_by_name(\n self.tcx(), did, self.m_name, self.expr.span);\n\n self.add_candidates_from_m(\n class_substs, m, method_static(m_declared));\n }\n }\n\n fn ty_from_did(did: ast::def_id) -> ty::t {\n alt check ty::get(ty::lookup_item_type(self.tcx(), did).ty).struct {\n ty::ty_fn(fty) {\n ty::mk_fn(self.tcx(), {proto: ast::proto_box with fty})\n }\n }\n \/*\n if did.crate == ast::local_crate {\n alt check self.tcx().items.get(did.node) {\n ast_map::node_method(m, _, _) {\n \/\/ NDM iface\/impl regions\n let mt = ty_of_method(self.fcx.ccx, m, ast::rp_none);\n ty::mk_fn(self.tcx(), {proto: ast::proto_box with mt.fty})\n }\n }\n } else {\n alt check ty::get(csearch::get_type(self.tcx(), did).ty).struct {\n ty::ty_fn(fty) {\n ty::mk_fn(self.tcx(), {proto: ast::proto_box with fty})\n }\n }\n }\n *\/\n }\n\n fn add_candidates_from_scope() {\n let impls_vecs = self.fcx.ccx.impl_map.get(self.expr.id);\n let mut added_any = false;\n\n #debug[\"method_from_scope\"];\n\n for list::each(impls_vecs) {|impls|\n for vec::each(*impls) {|im|\n \/\/ Check whether this impl has a method with the right name.\n for im.methods.find({|m| m.ident == self.m_name}).each {|m|\n\n \/\/ determine the `self` of the impl with fresh\n \/\/ variables for each parameter:\n let {substs: impl_substs, ty: impl_ty} =\n impl_self_ty(self.fcx, im.did);\n\n \/\/ if we can assign the caller to the callee, that's a\n \/\/ potential match. Collect those in the vector.\n let can_assign = self.fcx.can_mk_assignty(\n self.self_expr, self.borrow_scope,\n self.self_ty, impl_ty);\n #debug[\"can_assign = %?\", can_assign];\n alt can_assign {\n result::err(_) { \/* keep looking *\/ }\n result::ok(_) {\n let fty = self.ty_from_did(m.did);\n self.candidates.push(\n {self_ty: self.self_ty,\n self_substs: impl_substs,\n rcvr_ty: impl_ty,\n n_tps_m: m.n_tps,\n fty: fty,\n entry: {derefs: self.derefs,\n origin: method_static(m.did)}});\n added_any = true;\n }\n }\n }\n }\n\n \/\/ we want to find the innermost scope that has any\n \/\/ matches and then ignore outer scopes\n if added_any {ret;}\n }\n }\n\n fn add_candidates_from_m(self_substs: ty::substs,\n m: ty::method,\n origin: method_origin) {\n let tcx = self.fcx.ccx.tcx;\n\n \/\/ a bit hokey, but the method unbound has a bare protocol, whereas\n \/\/ a.b has a protocol like fn@() (perhaps eventually fn&()):\n let fty = ty::mk_fn(tcx, {proto: ast::proto_box with m.fty});\n\n self.candidates.push(\n {self_ty: self.self_ty,\n self_substs: self_substs,\n rcvr_ty: self.self_ty,\n n_tps_m: (*m.tps).len(),\n fty: fty,\n entry: {derefs: self.derefs, origin: origin}});\n }\n\n fn write_mty_from_candidate(cand: candidate) -> method_map_entry {\n let tcx = self.fcx.ccx.tcx;\n\n #debug[\"write_mty_from_candidate(n_tps_m=%u, fty=%s, entry=%?)\",\n cand.n_tps_m,\n self.fcx.infcx.ty_to_str(cand.fty),\n cand.entry];\n\n \/\/ Make the actual receiver type (cand.self_ty) assignable to the\n \/\/ required receiver type (cand.rcvr_ty). If this method is not\n \/\/ from an impl, this'll basically be a no-nop.\n alt self.fcx.mk_assignty(self.self_expr, self.borrow_scope,\n cand.self_ty, cand.rcvr_ty) {\n result::ok(_) {}\n result::err(_) {\n self.tcx().sess.span_bug(\n self.expr.span,\n #fmt[\"%s was assignable to %s but now is not?\",\n self.fcx.infcx.ty_to_str(cand.self_ty),\n self.fcx.infcx.ty_to_str(cand.rcvr_ty)]);\n }\n }\n\n \/\/ Construct the full set of type parameters for the method,\n \/\/ which is equal to the class tps + the method tps.\n let n_tps_supplied = self.supplied_tps.len();\n let n_tps_m = cand.n_tps_m;\n let m_substs = {\n if n_tps_supplied == 0u {\n self.fcx.infcx.next_ty_vars(n_tps_m)\n } else if n_tps_m == 0u {\n tcx.sess.span_err(\n self.expr.span,\n \"this method does not take type parameters\");\n self.fcx.infcx.next_ty_vars(n_tps_m)\n } else if n_tps_supplied != n_tps_m {\n tcx.sess.span_err(\n self.expr.span,\n \"incorrect number of type \\\n parameters given for this method\");\n self.fcx.infcx.next_ty_vars(n_tps_m)\n } else {\n self.supplied_tps\n }\n };\n\n let all_substs = {tps: cand.self_substs.tps + m_substs\n with cand.self_substs};\n\n self.fcx.write_ty_substs(self.node_id, cand.fty, all_substs);\n\n ret cand.entry;\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix libimagcontact for new StoreId interface with Entries iterator<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #37124 - nikomatsakis:incr-comp-benchmark, r=michaelwoerister<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test where we change the body of a private method in an impl.\n\/\/ We then test what sort of functions must be rebuilt as a result.\n\n\/\/ revisions:rpass1 rpass2\n\/\/ compile-flags: -Z query-dep-graph\n\n#![feature(rustc_attrs)]\n#![feature(stmt_expr_attributes)]\n#![allow(dead_code)]\n\n#![rustc_partition_translated(module=\"struct_point-point\", cfg=\"rpass2\")]\n\n\/\/ FIXME(#37121) -- the following two modules *should* be reused but are not\n#![rustc_partition_translated(module=\"struct_point-fn_calls_methods_in_same_impl\", cfg=\"rpass2\")]\n#![rustc_partition_translated(module=\"struct_point-fn_calls_methods_in_another_impl\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"struct_point-fn_make_struct\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"struct_point-fn_read_field\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"struct_point-fn_write_field\", cfg=\"rpass2\")]\n\nmod point {\n pub struct Point {\n pub x: f32,\n pub y: f32,\n }\n\n impl Point {\n fn distance_squared(&self) -> f32 {\n #[cfg(rpass1)]\n return self.x + self.y;\n\n #[cfg(rpass2)]\n return self.x * self.x + self.y * self.y;\n }\n\n pub fn distance_from_origin(&self) -> f32 {\n self.distance_squared().sqrt()\n }\n }\n\n impl Point {\n pub fn translate(&mut self, x: f32, y: f32) {\n self.x += x;\n self.y += y;\n }\n }\n\n}\n\n\/\/\/ A fn item that calls (public) methods on `Point` from the same impl which changed\nmod fn_calls_methods_in_same_impl {\n use point::Point;\n\n \/\/ FIXME(#37121) -- we should not need to typeck this again\n #[rustc_dirty(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn check() {\n let x = Point { x: 2.0, y: 2.0 };\n x.distance_from_origin();\n }\n}\n\n\/\/\/ A fn item that calls (public) methods on `Point` from another impl\nmod fn_calls_methods_in_another_impl {\n use point::Point;\n\n \/\/ FIXME(#37121) -- we should not need to typeck this again\n #[rustc_dirty(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn check() {\n let mut x = Point { x: 2.0, y: 2.0 };\n x.translate(3.0, 3.0);\n }\n}\n\n\/\/\/ A fn item that makes an instance of `Point` but does not invoke methods\nmod fn_make_struct {\n use point::Point;\n\n #[rustc_clean(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn make_origin() -> Point {\n Point { x: 2.0, y: 2.0 }\n }\n}\n\n\/\/\/ A fn item that reads fields from `Point` but does not invoke methods\nmod fn_read_field {\n use point::Point;\n\n #[rustc_clean(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn get_x(p: Point) -> f32 {\n p.x\n }\n}\n\n\/\/\/ A fn item that writes to a field of `Point` but does not invoke methods\nmod fn_write_field {\n use point::Point;\n\n #[rustc_clean(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn inc_x(p: &mut Point) {\n p.x += 1.0;\n }\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Comments!<commit_after><|endoftext|>"} {"text":"<commit_before>pub use std::mem::uninitialized;\npub use std::collections::TreeMap;\npub use http::server::response::ResponseWriter;\npub use http::headers::response::HeaderCollection;\npub use iron::*;\npub use super::*;\npub use super::headers::*;\npub use super::super::cookie::*;\nuse serialize::json::Json;\n\npub fn get_cookie<'a>(headers: HeaderCollection, secret: Option<String>, key: &str, value: &str) -> String {\n let mut res = unsafe{ ResponseWriter::new(uninitialized()) };\n let signer = Cookie::new(secret);\n let cookie = (key.to_string(), value.to_string());\n res.set_cookie(&signer, cookie, headers);\n res.headers.extensions.find(&\"Set-Cookie\".to_string()).unwrap().clone()\n}\n\npub fn get_json_cookie<'a>(headers: HeaderCollection, secret: Option<String>, key: &str, value: &Json) -> String {\n let mut res = unsafe{ ResponseWriter::new(uninitialized()) };\n let signer = Cookie::new(secret);\n let cookie = (key.to_string(), value.clone());\n res.set_json_cookie(&signer, cookie, headers);\n res.headers.extensions.find(&\"Set-Cookie\".to_string()).unwrap().clone()\n}\n\n#[test]\nfn check_cookie() {\n let headers = HeaderCollection::empty();\n assert_eq!(get_cookie(headers, None, \"thing\", \"thing\"), \"thing=thing\".to_string());\n}\n\n#[test]\nfn check_escaping() {\n let headers = HeaderCollection::empty();\n assert_eq!(get_cookie(headers, None, \"~`!@#$%^&*()_+-={}|[]\\\\:\\\";'<>?,.\/'\", \"~`!@#$%^&*()_+-={}|[]\\\\:\\\";'<>?,.\/'\"),\n \"~%60%21%40%23%24%25%5E%26%2A%28%29_%2B-%3D%7B%7D%7C%5B%5D%5C%3A%22%3B%27%3C%3E%3F%2C.%2F%27=\\\n ~%60%21%40%23%24%25%5E%26%2A%28%29_%2B-%3D%7B%7D%7C%5B%5D%5C%3A%22%3B%27%3C%3E%3F%2C.%2F%27\".to_string());\n}\n\n#[test]\nfn check_headers() {\n let mut headers = HeaderCollection {\n expires: None,\n max_age: Some(42),\n domain: Some(\"example.com\".to_string()),\n path: Some(\"\/a\/path\".to_string()),\n secure: true,\n http_only: true,\n extensions: Some(TreeMap::<String, Option<String>>::new())\n };\n headers.extensions.as_mut().unwrap().insert(\"foo\".to_string(), Some(\"bar\".to_string()));\n headers.extensions.as_mut().unwrap().insert(\"@zzmp\".to_string(), None);\n assert_eq!(get_cookie(headers, None, \"thing\", \"thing\"),\n \"thing=thing; Max-Age=42; Domain=example.com; Path=\/a\/path; Secure; Http-Only; @zzmp; foo=bar\".to_string());\n}\n\n#[test]\nfn check_signature() {\n let headers = HeaderCollection::empty();\n assert_eq!(get_cookie(headers, Some(\"@zzmp\".to_string()), \"thing\", \"thung\"),\n \/\/ Hash of @zzmpthung\n \"thing=s:thung.2bc9a8b82a4a393ab67b2b8aaff0e3ab33cb4aca05ef4a0ba201141fbb029f42\".to_string());\n}\n\n\/\/TO DO\n#[test]\nfn check_json() {\n\n}\n\n#[test]\nfn check_signed_json() {\n\n}\n<commit_msg>(test) Json.<commit_after>pub use std::mem::uninitialized;\npub use std::collections::TreeMap;\npub use http::server::response::ResponseWriter;\npub use http::headers::response::HeaderCollection;\npub use iron::*;\npub use super::*;\npub use super::headers::*;\npub use super::super::cookie::*;\nuse serialize::json::{Json, ToJson};\n\npub fn get_cookie<'a>(headers: HeaderCollection, secret: Option<String>, key: &str, value: &str) -> String {\n let mut res = unsafe{ ResponseWriter::new(uninitialized()) };\n let signer = Cookie::new(secret);\n let cookie = (key.to_string(), value.to_string());\n res.set_cookie(&signer, cookie, headers);\n res.headers.extensions.find(&\"Set-Cookie\".to_string()).unwrap().clone()\n}\n\npub fn get_json_cookie<'a>(headers: HeaderCollection, secret: Option<String>, key: &str, value: Json) -> String {\n let mut res = unsafe{ ResponseWriter::new(uninitialized()) };\n let signer = Cookie::new(secret);\n let cookie = (key.to_string(), value);\n res.set_json_cookie(&signer, cookie, headers);\n res.headers.extensions.find(&\"Set-Cookie\".to_string()).unwrap().clone()\n}\n\n#[test]\nfn check_cookie() {\n let headers = HeaderCollection::empty();\n assert_eq!(get_cookie(headers, None, \"thing\", \"thing\"), \"thing=thing\".to_string());\n}\n\n#[test]\nfn check_escaping() {\n let headers = HeaderCollection::empty();\n assert_eq!(get_cookie(headers, None, \"~`!@#$%^&*()_+-={}|[]\\\\:\\\";'<>?,.\/'\", \"~`!@#$%^&*()_+-={}|[]\\\\:\\\";'<>?,.\/'\"),\n \/\/ Url component encoding\n \"~%60%21%40%23%24%25%5E%26%2A%28%29_%2B-%3D%7B%7D%7C%5B%5D%5C%3A%22%3B%27%3C%3E%3F%2C.%2F%27=\\\n ~%60%21%40%23%24%25%5E%26%2A%28%29_%2B-%3D%7B%7D%7C%5B%5D%5C%3A%22%3B%27%3C%3E%3F%2C.%2F%27\".to_string());\n}\n\n#[test]\nfn check_headers() {\n let mut headers = HeaderCollection {\n expires: None,\n max_age: Some(42),\n domain: Some(\"example.com\".to_string()),\n path: Some(\"\/a\/path\".to_string()),\n secure: true,\n http_only: true,\n extensions: Some(TreeMap::<String, Option<String>>::new())\n };\n headers.extensions.as_mut().unwrap().insert(\"foo\".to_string(), Some(\"bar\".to_string()));\n headers.extensions.as_mut().unwrap().insert(\"@zzmp\".to_string(), None);\n assert_eq!(get_cookie(headers, None, \"thing\", \"thing\"),\n \"thing=thing; Max-Age=42; Domain=example.com; Path=\/a\/path; Secure; Http-Only; @zzmp; foo=bar\".to_string());\n}\n\n#[test]\nfn check_signature() {\n let headers = HeaderCollection::empty();\n assert_eq!(get_cookie(headers, Some(\"@zzmp\".to_string()), \"thing\", \"thung\"),\n \/\/ Hash of @zzmpthung\n \"thing=s:thung.2bc9a8b82a4a393ab67b2b8aaff0e3ab33cb4aca05ef4a0ba201141fbb029f42\".to_string());\n}\n\n\/\/TO DO\n#[test]\nfn check_json() {\n let headers = HeaderCollection::empty();\n assert_eq!(get_json_cookie(headers, None, \"thing\", \"{\\\"foo\\\":\\\"bar\\\"}\".to_string().to_json()),\n \/\/ Url component encoded\n \"thing=j%3A%22%7B%22foo%22%3A%22bar%22%7D%22\".to_string());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add \"retrieve\" variants for retrieving instances<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A nice wrapper to consume dataflow results at several CFG\n\/\/! locations.\n\nuse rustc::mir::{BasicBlock, Location};\nuse rustc_data_structures::indexed_set::{IdxSetBuf, Iter};\nuse rustc_data_structures::indexed_vec::Idx;\n\nuse dataflow::{BitDenotation, BlockSets, DataflowResults};\nuse dataflow::move_paths::{HasMoveData, MovePathIndex};\n\nuse std::iter;\n\n\/\/\/ A trait for \"cartesian products\" of multiple FlowAtLocation.\n\/\/\/\n\/\/\/ There's probably a way to auto-impl this, but I think\n\/\/\/ it is cleaner to have manual visitor impls.\npub trait FlowsAtLocation {\n \/\/\/ Reset the state bitvector to represent the entry to block `bb`.\n fn reset_to_entry_of(&mut self, bb: BasicBlock);\n\n \/\/\/ Build gen + kill sets for statement at `loc`.\n \/\/\/\n \/\/\/ Note that invoking this method alone does not change the\n \/\/\/ `curr_state` -- you must invoke `apply_local_effect`\n \/\/\/ afterwards.\n fn reconstruct_statement_effect(&mut self, loc: Location);\n\n \/\/\/ Build gen + kill sets for terminator for `loc`.\n \/\/\/\n \/\/\/ Note that invoking this method alone does not change the\n \/\/\/ `curr_state` -- you must invoke `apply_local_effect`\n \/\/\/ afterwards.\n fn reconstruct_terminator_effect(&mut self, loc: Location);\n\n \/\/\/ Apply current gen + kill sets to `flow_state`.\n \/\/\/\n \/\/\/ (`loc` parameters can be ignored if desired by\n \/\/\/ client. For the terminator, the `stmt_idx` will be the number\n \/\/\/ of statements in the block.)\n fn apply_local_effect(&mut self, loc: Location);\n}\n\n\/\/\/ Represents the state of dataflow at a particular\n\/\/\/ CFG location, both before and after it is\n\/\/\/ executed.\n\/\/\/\n\/\/\/ Data flow results are typically computed only as basic block\n\/\/\/ boundaries. A `FlowInProgress` allows you to reconstruct the\n\/\/\/ effects at any point in the control-flow graph by starting with\n\/\/\/ the state at the start of the basic block (`reset_to_entry_of`)\n\/\/\/ and then replaying the effects of statements and terminators\n\/\/\/ (e.g. via `reconstruct_statement_effect` and\n\/\/\/ `reconstruct_terminator_effect`; don't forget to call\n\/\/\/ `apply_local_effect`).\npub struct FlowAtLocation<BD>\nwhere\n BD: BitDenotation,\n{\n base_results: DataflowResults<BD>,\n curr_state: IdxSetBuf<BD::Idx>,\n stmt_gen: IdxSetBuf<BD::Idx>,\n stmt_kill: IdxSetBuf<BD::Idx>,\n}\n\nimpl<BD> FlowAtLocation<BD>\nwhere\n BD: BitDenotation,\n{\n \/\/\/ Iterate over each bit set in the current state.\n pub fn each_state_bit<F>(&self, f: F)\n where\n F: FnMut(BD::Idx),\n {\n self.curr_state.iter().for_each(f)\n }\n\n \/\/\/ Iterate over each `gen` bit in the current effect (invoke\n \/\/\/ `reconstruct_statement_effect` or\n \/\/\/ `reconstruct_terminator_effect` first).\n pub fn each_gen_bit<F>(&self, f: F)\n where\n F: FnMut(BD::Idx),\n {\n self.stmt_gen.iter().for_each(f)\n }\n\n pub fn new(results: DataflowResults<BD>) -> Self {\n let bits_per_block = results.sets().bits_per_block();\n let curr_state = IdxSetBuf::new_empty(bits_per_block);\n let stmt_gen = IdxSetBuf::new_empty(bits_per_block);\n let stmt_kill = IdxSetBuf::new_empty(bits_per_block);\n FlowAtLocation {\n base_results: results,\n curr_state: curr_state,\n stmt_gen: stmt_gen,\n stmt_kill: stmt_kill,\n }\n }\n\n \/\/\/ Access the underlying operator.\n pub fn operator(&self) -> &BD {\n self.base_results.operator()\n }\n\n pub fn contains(&self, x: &BD::Idx) -> bool {\n self.curr_state.contains(x)\n }\n\n \/\/\/ Returns an iterator over the elements present in the current state.\n pub fn iter_incoming(&self) -> iter::Peekable<Iter<BD::Idx>> {\n self.curr_state.iter().peekable()\n }\n\n \/\/\/ Creates a clone of the current state and applies the local\n \/\/\/ effects to the clone (leaving the state of self intact).\n \/\/\/ Invokes `f` with an iterator over the resulting state.\n pub fn with_iter_outgoing<F>(&self, f: F)\n where\n F: FnOnce(Iter<BD::Idx>),\n {\n let mut curr_state = self.curr_state.clone();\n curr_state.union(&self.stmt_gen);\n curr_state.subtract(&self.stmt_kill);\n f(curr_state.iter());\n }\n}\n\nimpl<BD> FlowsAtLocation for FlowAtLocation<BD>\n where BD: BitDenotation\n{\n fn reset_to_entry_of(&mut self, bb: BasicBlock) {\n (*self.curr_state).clone_from(self.base_results.sets().on_entry_set_for(bb.index()));\n }\n\n fn reconstruct_statement_effect(&mut self, loc: Location) {\n self.stmt_gen.clear();\n self.stmt_kill.clear();\n {\n let mut sets = BlockSets {\n on_entry: &mut self.curr_state,\n gen_set: &mut self.stmt_gen,\n kill_set: &mut self.stmt_kill,\n };\n self.base_results\n .operator()\n .before_statement_effect(&mut sets, loc);\n }\n self.apply_local_effect(loc);\n\n let mut sets = BlockSets {\n on_entry: &mut self.curr_state,\n gen_set: &mut self.stmt_gen,\n kill_set: &mut self.stmt_kill,\n };\n self.base_results\n .operator()\n .statement_effect(&mut sets, loc);\n }\n\n fn reconstruct_terminator_effect(&mut self, loc: Location) {\n self.stmt_gen.clear();\n self.stmt_kill.clear();\n {\n let mut sets = BlockSets {\n on_entry: &mut self.curr_state,\n gen_set: &mut self.stmt_gen,\n kill_set: &mut self.stmt_kill,\n };\n self.base_results\n .operator()\n .before_terminator_effect(&mut sets, loc);\n }\n self.apply_local_effect(loc);\n\n let mut sets = BlockSets {\n on_entry: &mut self.curr_state,\n gen_set: &mut self.stmt_gen,\n kill_set: &mut self.stmt_kill,\n };\n self.base_results\n .operator()\n .terminator_effect(&mut sets, loc);\n }\n\n fn apply_local_effect(&mut self, _loc: Location) {\n self.curr_state.union(&self.stmt_gen);\n self.curr_state.subtract(&self.stmt_kill);\n }\n}\n\n\nimpl<'tcx, T> FlowAtLocation<T>\nwhere\n T: HasMoveData<'tcx> + BitDenotation<Idx = MovePathIndex>,\n{\n pub fn has_any_child_of(&self, mpi: T::Idx) -> Option<T::Idx> {\n let move_data = self.operator().move_data();\n\n let mut todo = vec![mpi];\n let mut push_siblings = false; \/\/ don't look at siblings of original `mpi`.\n while let Some(mpi) = todo.pop() {\n if self.contains(&mpi) {\n return Some(mpi);\n }\n let move_path = &move_data.move_paths[mpi];\n if let Some(child) = move_path.first_child {\n todo.push(child);\n }\n if push_siblings {\n if let Some(sibling) = move_path.next_sibling {\n todo.push(sibling);\n }\n } else {\n \/\/ after we've processed the original `mpi`, we should\n \/\/ always traverse the siblings of any of its\n \/\/ children.\n push_siblings = true;\n }\n }\n return None;\n }\n}\n<commit_msg>Avoid allocations in `has_any_child_of`.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A nice wrapper to consume dataflow results at several CFG\n\/\/! locations.\n\nuse rustc::mir::{BasicBlock, Location};\nuse rustc_data_structures::indexed_set::{IdxSetBuf, Iter};\nuse rustc_data_structures::indexed_vec::Idx;\n\nuse dataflow::{BitDenotation, BlockSets, DataflowResults};\nuse dataflow::move_paths::{HasMoveData, MovePathIndex};\n\nuse std::iter;\n\n\/\/\/ A trait for \"cartesian products\" of multiple FlowAtLocation.\n\/\/\/\n\/\/\/ There's probably a way to auto-impl this, but I think\n\/\/\/ it is cleaner to have manual visitor impls.\npub trait FlowsAtLocation {\n \/\/\/ Reset the state bitvector to represent the entry to block `bb`.\n fn reset_to_entry_of(&mut self, bb: BasicBlock);\n\n \/\/\/ Build gen + kill sets for statement at `loc`.\n \/\/\/\n \/\/\/ Note that invoking this method alone does not change the\n \/\/\/ `curr_state` -- you must invoke `apply_local_effect`\n \/\/\/ afterwards.\n fn reconstruct_statement_effect(&mut self, loc: Location);\n\n \/\/\/ Build gen + kill sets for terminator for `loc`.\n \/\/\/\n \/\/\/ Note that invoking this method alone does not change the\n \/\/\/ `curr_state` -- you must invoke `apply_local_effect`\n \/\/\/ afterwards.\n fn reconstruct_terminator_effect(&mut self, loc: Location);\n\n \/\/\/ Apply current gen + kill sets to `flow_state`.\n \/\/\/\n \/\/\/ (`loc` parameters can be ignored if desired by\n \/\/\/ client. For the terminator, the `stmt_idx` will be the number\n \/\/\/ of statements in the block.)\n fn apply_local_effect(&mut self, loc: Location);\n}\n\n\/\/\/ Represents the state of dataflow at a particular\n\/\/\/ CFG location, both before and after it is\n\/\/\/ executed.\n\/\/\/\n\/\/\/ Data flow results are typically computed only as basic block\n\/\/\/ boundaries. A `FlowInProgress` allows you to reconstruct the\n\/\/\/ effects at any point in the control-flow graph by starting with\n\/\/\/ the state at the start of the basic block (`reset_to_entry_of`)\n\/\/\/ and then replaying the effects of statements and terminators\n\/\/\/ (e.g. via `reconstruct_statement_effect` and\n\/\/\/ `reconstruct_terminator_effect`; don't forget to call\n\/\/\/ `apply_local_effect`).\npub struct FlowAtLocation<BD>\nwhere\n BD: BitDenotation,\n{\n base_results: DataflowResults<BD>,\n curr_state: IdxSetBuf<BD::Idx>,\n stmt_gen: IdxSetBuf<BD::Idx>,\n stmt_kill: IdxSetBuf<BD::Idx>,\n}\n\nimpl<BD> FlowAtLocation<BD>\nwhere\n BD: BitDenotation,\n{\n \/\/\/ Iterate over each bit set in the current state.\n pub fn each_state_bit<F>(&self, f: F)\n where\n F: FnMut(BD::Idx),\n {\n self.curr_state.iter().for_each(f)\n }\n\n \/\/\/ Iterate over each `gen` bit in the current effect (invoke\n \/\/\/ `reconstruct_statement_effect` or\n \/\/\/ `reconstruct_terminator_effect` first).\n pub fn each_gen_bit<F>(&self, f: F)\n where\n F: FnMut(BD::Idx),\n {\n self.stmt_gen.iter().for_each(f)\n }\n\n pub fn new(results: DataflowResults<BD>) -> Self {\n let bits_per_block = results.sets().bits_per_block();\n let curr_state = IdxSetBuf::new_empty(bits_per_block);\n let stmt_gen = IdxSetBuf::new_empty(bits_per_block);\n let stmt_kill = IdxSetBuf::new_empty(bits_per_block);\n FlowAtLocation {\n base_results: results,\n curr_state: curr_state,\n stmt_gen: stmt_gen,\n stmt_kill: stmt_kill,\n }\n }\n\n \/\/\/ Access the underlying operator.\n pub fn operator(&self) -> &BD {\n self.base_results.operator()\n }\n\n pub fn contains(&self, x: &BD::Idx) -> bool {\n self.curr_state.contains(x)\n }\n\n \/\/\/ Returns an iterator over the elements present in the current state.\n pub fn iter_incoming(&self) -> iter::Peekable<Iter<BD::Idx>> {\n self.curr_state.iter().peekable()\n }\n\n \/\/\/ Creates a clone of the current state and applies the local\n \/\/\/ effects to the clone (leaving the state of self intact).\n \/\/\/ Invokes `f` with an iterator over the resulting state.\n pub fn with_iter_outgoing<F>(&self, f: F)\n where\n F: FnOnce(Iter<BD::Idx>),\n {\n let mut curr_state = self.curr_state.clone();\n curr_state.union(&self.stmt_gen);\n curr_state.subtract(&self.stmt_kill);\n f(curr_state.iter());\n }\n}\n\nimpl<BD> FlowsAtLocation for FlowAtLocation<BD>\n where BD: BitDenotation\n{\n fn reset_to_entry_of(&mut self, bb: BasicBlock) {\n (*self.curr_state).clone_from(self.base_results.sets().on_entry_set_for(bb.index()));\n }\n\n fn reconstruct_statement_effect(&mut self, loc: Location) {\n self.stmt_gen.clear();\n self.stmt_kill.clear();\n {\n let mut sets = BlockSets {\n on_entry: &mut self.curr_state,\n gen_set: &mut self.stmt_gen,\n kill_set: &mut self.stmt_kill,\n };\n self.base_results\n .operator()\n .before_statement_effect(&mut sets, loc);\n }\n self.apply_local_effect(loc);\n\n let mut sets = BlockSets {\n on_entry: &mut self.curr_state,\n gen_set: &mut self.stmt_gen,\n kill_set: &mut self.stmt_kill,\n };\n self.base_results\n .operator()\n .statement_effect(&mut sets, loc);\n }\n\n fn reconstruct_terminator_effect(&mut self, loc: Location) {\n self.stmt_gen.clear();\n self.stmt_kill.clear();\n {\n let mut sets = BlockSets {\n on_entry: &mut self.curr_state,\n gen_set: &mut self.stmt_gen,\n kill_set: &mut self.stmt_kill,\n };\n self.base_results\n .operator()\n .before_terminator_effect(&mut sets, loc);\n }\n self.apply_local_effect(loc);\n\n let mut sets = BlockSets {\n on_entry: &mut self.curr_state,\n gen_set: &mut self.stmt_gen,\n kill_set: &mut self.stmt_kill,\n };\n self.base_results\n .operator()\n .terminator_effect(&mut sets, loc);\n }\n\n fn apply_local_effect(&mut self, _loc: Location) {\n self.curr_state.union(&self.stmt_gen);\n self.curr_state.subtract(&self.stmt_kill);\n }\n}\n\n\nimpl<'tcx, T> FlowAtLocation<T>\nwhere\n T: HasMoveData<'tcx> + BitDenotation<Idx = MovePathIndex>,\n{\n pub fn has_any_child_of(&self, mpi: T::Idx) -> Option<T::Idx> {\n \/\/ We process `mpi` before the loop below, for two reasons:\n \/\/ - it's a little different from the loop case (we don't traverse its\n \/\/ siblings);\n \/\/ - ~99% of the time the loop isn't reached, and this code is hot, so\n \/\/ we don't want to allocate `todo` unnecessarily.\n if self.contains(&mpi) {\n return Some(mpi);\n }\n let move_data = self.operator().move_data();\n let move_path = &move_data.move_paths[mpi];\n let mut todo = if let Some(child) = move_path.first_child {\n vec![child]\n } else {\n return None;\n };\n\n while let Some(mpi) = todo.pop() {\n if self.contains(&mpi) {\n return Some(mpi);\n }\n let move_path = &move_data.move_paths[mpi];\n if let Some(child) = move_path.first_child {\n todo.push(child);\n }\n \/\/ After we've processed the original `mpi`, we should always\n \/\/ traverse the siblings of any of its children.\n if let Some(sibling) = move_path.next_sibling {\n todo.push(sibling);\n }\n }\n return None;\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add temporary file to investigate callbacks<commit_after>use russell_lab::Vector;\n\nstruct Processor<'a> {\n callback: Box<dyn FnMut(&mut Vector, &[f64]) + 'a>,\n}\n\nimpl<'a> Processor<'a> {\n fn set_callback(&mut self, c: impl FnMut(&mut Vector, &[f64]) + 'a) {\n self.callback = Box::new(c);\n }\n\n fn process_events(&mut self) {\n let ksi = vec![0.0; 2];\n let mut x = Vector::new(2);\n (self.callback)(&mut x, &ksi);\n println!(\"got x =\\n{}\", x);\n }\n}\n\nfn simple_callback(x: &mut Vector, ksi: &[f64]) {\n println!(\"ksi = {:?}\", ksi);\n println!(\"{}\", x);\n}\n\nfn main() {\n let mut p = Processor {\n callback: Box::new(simple_callback),\n };\n p.process_events();\n let v = Vector::from(&[1.0, 2.0]);\n let x0 = 1.0;\n let x1 = 2.0;\n let my_fun = move |x: &mut Vector, ksi: &[f64]| {\n println!(\"using ksi = {:?}\", ksi);\n x[0] = v[0];\n x[1] = v[1];\n };\n println!(\"x0 = {}\", x0);\n println!(\"x1 = {}\", x1);\n \/\/ println!(\"v = {:?}\", v);\n p.set_callback(my_fun);\n p.process_events();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Began writing a wrapper that transforms a flat TimeSteward to a full one<commit_after>\/\/! A wrapper that turns a flat TimeSteward into an inefficient fall TimeSteward.\n\/\/!\n\/\/! This is done by storing snapshots and reverting to the previous snapshot whenever you try to query the past.\n\/\/!\n\n\nuse {DeterministicRandomId, ColumnId, FieldId, PredictorId, Column, ExtendedTime, StewardRc,\n Basics, FieldRc, TimeSteward, FiatEventOperationError, ValidSince, PredictorFn, TimeStewardSettings,ColumnList, ColumnListUser};\nuse std::collections::{HashMap, BTreeMap};\nuse std::cmp::max;\nuse std::marker::PhantomData;\nuse Snapshot as SuperSnapshot;\n\npub struct Steward<B: Basics, Steward0: TimeSteward<B> > {\n steward: Steward0,\n invalid_before: ValidSince <B::Time>,\n constants: B::Constants,\n settings: Steward0::Settings,\n fiat_events: BTreeMap<B::Time, i32>,\n snapshots: Vec<Steward0::Snapshot>,\n}\n\n\nimpl<B: Basics, Steward0: TimeSteward<B> > TimeSteward<B> for Steward<B, Steward0> {\n type Snapshot = Steward0::Snapshot;\n type Settings = Steward0::Settings;\n\n fn valid_since(&self) -> ValidSince<B::Time> {\n self.invalid_before.clone()\n }\n fn new_empty(constants: B::Constants, settings: Self::Settings) -> Self {\n Steward::<B, Steward0> {\n steward: TimeSteward::new_empty (constants.clone(), settings.clone()),\n invalid_before: ValidSince::TheBeginning,\n constants: constants,\n settings: settings,\n fiat_events: BTreeMap::new(),\n snapshots: Vec::new(),\n }\n }\n\n fn from_snapshot<'a, S: ::Snapshot<B>>(snapshot: & 'a S,\n settings: Self::Settings)\n -> Self\n where & 'a S: IntoIterator <Item = ::SnapshotEntry <'a, B>> {\n let steward = TimeSteward::from_snapshot::<'a, S> (snapshot, settings.clone());\n Steward::<B, Steward0> {\n steward: steward,\n invalid_before: ValidSince::Before (snapshot.now().clone()),\n constants: snapshot.constants().clone(),\n settings: settings,\n fiat_events: BTreeMap::new(),\n snapshots: vec![steward.snapshot_before (snapshot.now())],\n }\n }\n fn insert_fiat_event <E: ::EventFn <B>> (&mut self,\n time: B::Time,\n id: DeterministicRandomId,\n event: E)\n -> Result<(), FiatEventOperationError> {\n if self.valid_since() > time {\n return Err(FiatEventOperationError::InvalidTime);\n }\n \n }\n fn erase_fiat_event(&mut self,\n time: &B::Time,\n id: DeterministicRandomId)\n -> Result<(), FiatEventOperationError> {\n if self.valid_since() > *time {\n return Err(FiatEventOperationError::InvalidTime);\n }\n \n }\n\n fn snapshot_before<'b>(&'b mut self, time: &'b B::Time) -> Option<Self::Snapshot> {\n if self.valid_since() > *time {\n return None;\n }\n if self.steward.valid_since() > *time {\n while self.snapshots.last().map_or (false, | snapshot | snapshot.now() > time) {\n self.snapshots.pop();\n }\n self.steward = match self.snapshots.last() {\n None => TimeSteward::new_empty (self.constants.clone(), self.settings.clone()),\n Some (snapshot) => TimeSteward::from_snapshot::<Self::Snapshot> (snapshot, self.settings.clone()),\n }\n }\n self.snapshots.push (self.steward.snapshot_before (time));\n self.steward.snapshot_before (time)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-android: FIXME(#10381)\n\n\/\/ compile-flags:--debuginfo=1\n\n\/\/ Make sure functions have proper names\n\/\/ debugger:info functions\n\/\/ check:static void [...]main();\n\/\/ check:static void [...]some_function();\n\/\/ check:static void [...]some_other_function();\n\/\/ check:static void [...]zzz();\n\n\/\/ debugger:rbreak zzz\n\/\/ debugger:run\n\n\/\/ Make sure there is no information about locals\n\/\/ debugger:finish\n\/\/ debugger:info locals\n\/\/ check:No locals.\n\/\/ debugger:continue\n\n\n#[allow(unused_variable)];\n\nstruct Struct {\n a: i64,\n b: i32\n}\n\nfn main() {\n some_function(101, 202);\n}\n\n\nfn zzz() {()}\n\nfn some_function(a: int, b: int) {\n let some_variable = Struct { a: 11, b: 22 };\n let some_other_variable = 23;\n zzz();\n}\n\nfn some_other_function(a: int, b: int) -> bool { true }\n<commit_msg>debuginfo: Make limited-debuginfo test case more robust against GDB output variations.<commit_after>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-android: FIXME(#10381)\n\n\/\/ compile-flags:--debuginfo=1\n\n\/\/ Make sure functions have proper names\n\/\/ debugger:info functions\n\/\/ check:[...]void[...]main([...]);\n\/\/ check:[...]void[...]some_function([...]);\n\/\/ check:[...]void[...]some_other_function([...]);\n\/\/ check:[...]void[...]zzz([...]);\n\n\/\/ debugger:rbreak zzz\n\/\/ debugger:run\n\n\/\/ Make sure there is no information about locals\n\/\/ debugger:finish\n\/\/ debugger:info locals\n\/\/ check:No locals.\n\/\/ debugger:continue\n\n\n#[allow(unused_variable)];\n\nstruct Struct {\n a: i64,\n b: i32\n}\n\nfn main() {\n some_function(101, 202);\n}\n\n\nfn zzz() {()}\n\nfn some_function(a: int, b: int) {\n let some_variable = Struct { a: 11, b: 22 };\n let some_other_variable = 23;\n zzz();\n}\n\nfn some_other_function(a: int, b: int) -> bool { true }\n<|endoftext|>"} {"text":"<commit_before>#[cfg(bogus)]\nconst b: bool = false;\n\nconst b: bool = true;\n\n#[cfg(bogus)]\n#[abi = \"cdecl\"]\nextern mod rustrt {\n \/\/ This symbol doesn't exist and would be a link error if this\n \/\/ module was translated\n fn bogus();\n}\n\n#[abi = \"cdecl\"]\nextern mod rustrt { }\n\n#[cfg(bogus)]\ntype t = int;\n\ntype t = bool;\n\n#[cfg(bogus)]\nenum tg { foo, }\n\nenum tg { bar, }\n\n#[cfg(bogus)]\nstruct r {\n let i: int;\n new(i:int) { self.i = i; }\n}\n\nstruct r {\n let i: int;\n new(i:int) { self.i = i; }\n}\n\n#[cfg(bogus)]\nmod m {\n \/\/ This needs to parse but would fail in typeck. Since it's not in\n \/\/ the current config it should not be typechecked.\n fn bogus() { return 0; }\n}\n\nmod m {\n\n \/\/ Submodules have slightly different code paths than the top-level\n \/\/ module, so let's make sure this jazz works here as well\n #[cfg(bogus)]\n fn f() { }\n\n fn f() { }\n}\n\n\/\/ Since the bogus configuration isn't defined main will just be\n\/\/ parsed, but nothing further will be done with it\n#[cfg(bogus)]\nfn main() { fail }\n\nfn main() {\n \/\/ Exercise some of the configured items in ways that wouldn't be possible\n \/\/ if they had the bogus definition\n assert (b);\n let x: t = true;\n let y: tg = bar;\n\n test_in_fn_ctxt();\n}\n\nfn test_in_fn_ctxt() {\n #[cfg(bogus)]\n fn f() { fail }\n fn f() { }\n f();\n\n #[cfg(bogus)]\n const i: int = 0;\n const i: int = 1;\n assert (i == 1);\n}\n\nmod test_foreign_items {\n #[abi = \"cdecl\"]\n extern mod rustrt {\n #[cfg(bogus)]\n fn vec_from_buf_shared();\n fn vec_from_buf_shared();\n }\n}\n\nmod test_use_statements {\n #[cfg(bogus)]\n use flippity_foo;\n\n extern mod rustrt {\n #[cfg(bogus)]\n use flippity_foo;\n }\n}<commit_msg>Fix test case<commit_after>#[cfg(bogus)]\nconst b: bool = false;\n\nconst b: bool = true;\n\n#[cfg(bogus)]\n#[abi = \"cdecl\"]\nextern mod rustrt {\n \/\/ This symbol doesn't exist and would be a link error if this\n \/\/ module was translated\n fn bogus();\n}\n\n#[abi = \"cdecl\"]\nextern mod rustrt { }\n\n#[cfg(bogus)]\ntype t = int;\n\ntype t = bool;\n\n#[cfg(bogus)]\nenum tg { foo, }\n\nenum tg { bar, }\n\n#[cfg(bogus)]\nstruct r {\n let i: int;\n new(i:int) { self.i = i; }\n}\n\nstruct r {\n let i: int;\n new(i:int) { self.i = i; }\n}\n\n#[cfg(bogus)]\nmod m {\n \/\/ This needs to parse but would fail in typeck. Since it's not in\n \/\/ the current config it should not be typechecked.\n fn bogus() { return 0; }\n}\n\nmod m {\n\n \/\/ Submodules have slightly different code paths than the top-level\n \/\/ module, so let's make sure this jazz works here as well\n #[cfg(bogus)]\n fn f() { }\n\n fn f() { }\n}\n\n\/\/ Since the bogus configuration isn't defined main will just be\n\/\/ parsed, but nothing further will be done with it\n#[cfg(bogus)]\nfn main() { fail }\n\nfn main() {\n \/\/ Exercise some of the configured items in ways that wouldn't be possible\n \/\/ if they had the bogus definition\n assert (b);\n let x: t = true;\n let y: tg = bar;\n\n test_in_fn_ctxt();\n}\n\nfn test_in_fn_ctxt() {\n #[cfg(bogus)]\n fn f() { fail }\n fn f() { }\n f();\n\n #[cfg(bogus)]\n const i: int = 0;\n const i: int = 1;\n assert (i == 1);\n}\n\nmod test_foreign_items {\n #[abi = \"cdecl\"]\n extern mod rustrt {\n #[cfg(bogus)]\n fn rust_getcwd() -> *();\n fn rust_getcwd() -> *();\n }\n}\n\nmod test_use_statements {\n #[cfg(bogus)]\n use flippity_foo;\n\n extern mod rustrt {\n #[cfg(bogus)]\n use flippity_foo;\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Do not pass --rtp in testing environment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a run-pass test for existential traits in ARCs.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Tests that a heterogeneous list of existential types can be put inside an ARC\n\/\/ and shared between tasks as long as all types fulfill Const+Owned.\n\n\/\/ xfail-fast\n\nextern mod extra;\nuse extra::arc;\nuse std::comm;\nuse std::task;\nuse std::cell;\n\ntrait Pet {\n fn name(&self, blk: &fn(&str));\n fn num_legs(&self) -> uint;\n fn of_good_pedigree(&self) -> bool;\n}\n\nstruct Catte {\n num_whiskers: uint,\n name: ~str,\n}\n\nstruct Dogge {\n bark_decibels: uint,\n tricks_known: uint,\n name: ~str,\n}\n\nstruct Goldfyshe {\n swim_speed: uint,\n name: ~str,\n}\n\nimpl Pet for Catte {\n fn name(&self, blk: &fn(&str)) { blk(self.name) }\n fn num_legs(&self) -> uint { 4 }\n fn of_good_pedigree(&self) -> bool { self.num_whiskers >= 4 }\n}\nimpl Pet for Dogge {\n fn name(&self, blk: &fn(&str)) { blk(self.name) }\n fn num_legs(&self) -> uint { 4 }\n fn of_good_pedigree(&self) -> bool {\n self.bark_decibels < 70 || self.tricks_known > 20\n }\n}\nimpl Pet for Goldfyshe {\n fn name(&self, blk: &fn(&str)) { blk(self.name) }\n fn num_legs(&self) -> uint { 0 }\n fn of_good_pedigree(&self) -> bool { self.swim_speed >= 500 }\n}\n\nfn main() {\n let catte = Catte { num_whiskers: 7, name: ~\"alonzo_church\" };\n let dogge1 = Dogge { bark_decibels: 100, tricks_known: 42, name: ~\"alan_turing\" };\n let dogge2 = Dogge { bark_decibels: 55, tricks_known: 11, name: ~\"albert_einstein\" };\n let fishe = Goldfyshe { swim_speed: 998, name: ~\"alec_guinness\" };\n let arc = arc::ARC(~[~catte as ~Pet:Const+Owned,\n ~dogge1 as ~Pet:Const+Owned,\n ~fishe as ~Pet:Const+Owned,\n ~dogge2 as ~Pet:Const+Owned]);\n let (p1,c1) = comm::stream();\n let arc1 = cell::Cell::new(arc.clone());\n do task::spawn { check_legs(arc1.take()); c1.send(()); }\n let (p2,c2) = comm::stream();\n let arc2 = cell::Cell::new(arc.clone());\n do task::spawn { check_names(arc2.take()); c2.send(()); }\n let (p3,c3) = comm::stream();\n let arc3 = cell::Cell::new(arc.clone());\n do task::spawn { check_pedigree(arc3.take()); c3.send(()); }\n p1.recv();\n p2.recv();\n p3.recv();\n}\n\nfn check_legs(arc: arc::ARC<~[~Pet:Const+Owned]>) {\n let mut legs = 0;\n for arc.get().iter().advance |pet| {\n legs += pet.num_legs();\n }\n assert!(legs == 12);\n}\nfn check_names(arc: arc::ARC<~[~Pet:Const+Owned]>) {\n for arc.get().iter().advance |pet| {\n do pet.name |name| {\n assert!(name[0] == 'a' as u8 && name[1] == 'l' as u8);\n }\n }\n}\nfn check_pedigree(arc: arc::ARC<~[~Pet:Const+Owned]>) {\n for arc.get().iter().advance |pet| {\n assert!(pet.of_good_pedigree());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat(geektime_algo): add 35 trie<commit_after>\/\/ [leetcode 208](https:\/\/leetcode.com\/problems\/implement-trie-prefix-tree\/)\n\n#[derive(Default, Debug)]\nstruct Trie {\n is_ending: bool,\n nodes: [Option<Box<Trie>>; 26],\n}\n\nimpl Trie {\n fn new() -> Self {\n Default::default()\n }\n\n fn insert(&mut self, word: &str) {\n let mut curr = self;\n for i in word.chars().map(|c| (c as usize - 'a' as usize) as usize) {\n curr = curr.nodes[i].get_or_insert_with(Box::new(Trie::new()));\n }\n curr.is_ending = true;\n }\n\n fn find(&self, word: &str) -> bool {\n let mut curr = self;\n for i in word.chars().map(|c| (c as usize - 'a' as usize) as usize) {\n match curr.nodes[i].as_ref() {\n Some(node) => { curr = node; },\n None => { return false; },\n }\n }\n curr.is_ending\n }\n}\n\nfn main() {\n let mut m = Trie::new();\n m.insert(\"hello\");\n m.insert(\"she\");\n println!(\"{:?}\", m);\n let r = m.search(\"hello\");\n println!(\"{}\", r); \/\/ true\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Move log level to config struct<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rustc suggestion.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 1906<commit_after>\/\/ https:\/\/leetcode.com\/problems\/minimum-absolute-difference-queries\/\npub fn min_difference(nums: Vec<i32>, queries: Vec<Vec<i32>>) -> Vec<i32> {\n todo!()\n}\n\nfn main() {\n println!(\n \"{:?}\",\n min_difference(\n vec![1, 3, 4, 8],\n vec![vec![0, 1], vec![1, 2], vec![2, 3], vec![0, 3]]\n )\n ); \/\/ [2,1,4,1]\n println!(\n \"{:?}\",\n min_difference(\n vec![4, 5, 2, 2, 7, 10],\n vec![vec![2, 3], vec![0, 2], vec![0, 5], vec![3, 5]]\n )\n ); \/\/ [-1,1,1,3]\n}\n<|endoftext|>"} {"text":"<commit_before>fn main() {\n let x = @{mut a: @10, b: @20};\n match x {\n @{a, b} => { assert *a == 10; (*x).a = @30; assert *a == 30; }\n }\n}\n<commit_msg>test: Remove alt-implicit-copy.rs test. Incompatible with current borrow check semantics. rs=bustage<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #27<commit_after>\/\/ p(n) = n^2 + an + b is prime for n = 0 .. N\n\/\/ p(0) = b => b must be prime\n\/\/ p(1) = 1 + a + b => a > -(1+b)\n\/\/ p(2) = 4 + 2a + b\n\nextern mod euler;\nuse euler::prime::{ Prime };\n\nfn get_len(a: int, b: int, ps: &Prime) -> uint {\n let mut nu = 0;\n loop {\n let n = nu as int;\n let mut val = n * n + a * n + b;\n for ps.each |p| {\n if (p as int) == val {\n nu += 1;\n break;\n }\n if (p as int) > val {\n return nu;\n }\n }\n }\n}\n\nfn main() {\n let ps = Prime();\n let mut ans = { len: 0, a: 0, b: 0 };\n for ps.each |bu| {\n if bu >= 1000 { break; }\n let b = bu as int;\n for int::range(-b, 1000) |a| {\n let len = get_len(a, b, &ps);\n if len > ans.len {\n ans.len = len;\n ans.a = a;\n ans.b = b;\n }\n }\n }\n io::println(fmt!(\"n^2 + %dn + %d => %u len\", ans.a, ans.b, ans.len));\n io::println(fmt!(\"a * b = %d\", ans.a * ans.b));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that changing a tracked commandline argument invalidates\n\/\/ the cache while changing an untracked one doesn't.\n\n\/\/ revisions:rpass1 rpass2 rpass3\n\n#![feature(rustc_attrs)]\n\n#![rustc_partition_translated(module=\"commandline_args\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"commandline_args\", cfg=\"rpass3\")]\n\n\/\/ Between revisions 1 and 2, we are changing the debuginfo-level, which should\n\/\/ invalidate the cache. Between revisions 2 and 3, we are adding `--verbose`\n\/\/ which should have no effect on the cache:\n\/\/[rpass1] compile-flags: -C debuginfo=0\n\/\/[rpass2] compile-flags: -C debuginfo=2\n\/\/[rpass3] compile-flags: -C debuginfo=2 --verbose\n\npub fn main() {\n println!(\"hello world\");\n}\n<commit_msg>Fix incremental\/commandline-args test.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that changing a tracked commandline argument invalidates\n\/\/ the cache while changing an untracked one doesn't.\n\n\/\/ revisions:rpass1 rpass2 rpass3\n\n#![feature(rustc_attrs)]\n\n#![rustc_partition_translated(module=\"commandline_args\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"commandline_args\", cfg=\"rpass3\")]\n\n\/\/ Between revisions 1 and 2, we are changing the debuginfo-level, which should\n\/\/ invalidate the cache. Between revisions 2 and 3, we are adding `--verbose`\n\/\/ which should have no effect on the cache:\n\/\/[rpass1] compile-flags: -C debuginfo=0\n\/\/[rpass2] compile-flags: -C debuginfo=2\n\/\/[rpass3] compile-flags: -C debuginfo=2 --verbose\n\npub fn main() {\n \/\/ empty\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Support query syntax.<commit_after>use ::{Points, Value};\n\n\/\/\/ Resolve the points to line protocol format\npub fn line_serialization(points: Points) -> String {\n let mut line = Vec::new();\n for point in points.point {\n line.push(point.measurement);\n\n for (tag, value) in point.tags.iter() {\n line.push(\",\".to_string());\n line.push(tag.to_string());\n line.push(\"=\".to_string());\n\n match value {\n &Value::String(ref s) => line.push(s.to_string()),\n &Value::Float(ref f) => line.push(f.to_string()),\n &Value::Integer(ref i) => line.push(i.to_string()),\n &Value::Boolean(b) => line.push({ if b { \"true\".to_string() } else { \"false\".to_string() } })\n }\n }\n\n let mut was_first = true;\n\n for (field, value) in point.fields.iter() {\n line.push({\n if was_first {\n was_first = false;\n \" \"\n } else { \",\" }\n }.to_string());\n line.push(field.to_string());\n line.push(\"=\".to_string());\n\n match value {\n &Value::String(ref s) => line.push(s.to_string()),\n &Value::Float(ref f) => line.push(f.to_string()),\n &Value::Integer(ref i) => line.push(i.to_string()),\n &Value::Boolean(b) => line.push({ if b { \"true\".to_string() } else { \"false\".to_string() } })\n }\n }\n\n match point.timestamp {\n Some(t) => {\n line.push(\" \".to_string());\n line.push(t.to_string());\n }\n _ => {}\n }\n\n line.push(\"\\n\".to_string())\n }\n\n line.join(\"\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add const_fn in generics test<commit_after>\/\/ run-pass\n\n#![feature(min_const_generics)]\n\nconst fn identity<const T: u32>() -> u32 { T }\n\n#[derive(Eq, PartialEq, Debug)]\npub struct ConstU32<const U: u32>;\n\npub fn new() -> ConstU32<{ identity::<3>() }> {\n ConstU32::<{ identity::<3>() }>\n}\n\nfn main() {\n let v = new();\n assert_eq!(v, ConstU32::<3>);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ error-pattern: requires at least a format string argument\n\/\/ error-pattern: bad-format-args.rs:19:5: 19:15: note: in this expansion\n\n\/\/ error-pattern: expected token: `,`\n\/\/ error-pattern: bad-format-args.rs:20:5: 20:19: note: in this expansion\n\/\/ error-pattern: bad-format-args.rs:21:5: 21:22: note: in this expansion\n\nfn main() {\n format!();\n format!(\"\" 1);\n format!(\"\", 1 1);\n}\n<commit_msg>Fix touchy test to work with old and new error format<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ error-pattern: requires at least a format string argument\n\/\/ error-pattern: in this expansion\n\n\/\/ error-pattern: expected token: `,`\n\/\/ error-pattern: in this expansion\n\/\/ error-pattern: in this expansion\n\nfn main() {\n format!();\n format!(\"\" 1);\n format!(\"\", 1 1);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\/\/! * All unstable lang features have tests to ensure they are actually unstable\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(Debug, PartialEq, Clone)]\npub enum Status {\n Stable,\n Removed,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n Status::Removed => \"removed\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\n#[derive(Debug, Clone)]\npub struct Feature {\n pub level: Status,\n pub since: String,\n pub has_gate_test: bool,\n pub tracking_issue: Option<u32>,\n}\n\npub type Features = HashMap<String, Feature>;\n\npub fn check(path: &Path, bad: &mut bool, quiet: bool) {\n let mut features = collect_lang_features(path);\n assert!(!features.is_empty());\n\n let lib_features = get_and_check_lib_features(path, bad, &features);\n assert!(!lib_features.is_empty());\n\n let mut contents = String::new();\n\n super::walk_many(&[&path.join(\"test\/compile-fail\"),\n &path.join(\"test\/compile-fail-fulldeps\"),\n &path.join(\"test\/parse-fail\"),],\n &mut |path| super::filter_dirs(path),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n let filen_underscore = filename.replace(\"-\",\"_\").replace(\".rs\",\"\");\n let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features);\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n\n let gate_test_str = \"gate-test-\";\n\n if !line.contains(gate_test_str) {\n continue;\n }\n\n let feature_name = match line.find(gate_test_str) {\n Some(i) => {\n &line[i+gate_test_str.len()..line[i+1..].find(' ').unwrap_or(line.len())]\n },\n None => continue,\n };\n match features.get_mut(feature_name) {\n Some(f) => {\n if filename_is_gate_test {\n err(&format!(\"The file is already marked as gate test \\\n through its name, no need for a \\\n 'gate-test-{}' comment\",\n feature_name));\n }\n f.has_gate_test = true;\n }\n None => {\n err(&format!(\"gate-test test found referencing a nonexistent feature '{}'\",\n feature_name));\n }\n }\n }\n });\n\n \/\/ Only check the number of lang features.\n \/\/ Obligatory testing for library features is dumb.\n let gate_untested = features.iter()\n .filter(|&(_, f)| f.level == Status::Unstable)\n .filter(|&(_, f)| !f.has_gate_test)\n .collect::<Vec<_>>();\n\n for &(name, _) in gate_untested.iter() {\n println!(\"Expected a gate test for the feature '{}'.\", name);\n println!(\"Hint: create a file named 'feature-gate-{}.rs' in the compile-fail\\\n \\n test suite, with its failures due to missing usage of\\\n \\n #![feature({})].\", name, name);\n println!(\"Hint: If you already have such a test and don't want to rename it,\\\n \\n you can also add a \/\/ gate-test-{} line to the test file.\",\n name);\n }\n\n if gate_untested.len() > 0 {\n tidy_error!(bad, \"Found {} features without a gate test.\", gate_untested.len());\n }\n\n if *bad {\n return;\n }\n if quiet {\n println!(\"* {} features\", features.len());\n return;\n }\n\n let mut lines = Vec::new();\n for (name, feature) in features.iter() {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {\n if filen_underscore.starts_with(\"feature_gate\") {\n for (n, f) in features.iter_mut() {\n if filen_underscore == format!(\"feature_gate_{}\", n) {\n f.has_gate_test = true;\n return true;\n }\n }\n }\n return false;\n}\n\npub fn collect_lang_features(base_src_path: &Path) -> Features {\n let mut contents = String::new();\n let path = base_src_path.join(\"libsyntax\/feature_gate.rs\");\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Removed,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n let issue_str = parts.next().unwrap().trim();\n let tracking_issue = if issue_str.starts_with(\"None\") {\n None\n } else {\n let s = issue_str.split(\"(\").nth(1).unwrap().split(\")\").nth(0).unwrap();\n Some(s.parse().unwrap())\n };\n Some((name.to_owned(),\n Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n }))\n })\n .collect()\n}\n\npub fn collect_lib_features(base_src_path: &Path) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, _, _| {\n match res {\n Ok((name, feature)) => {\n if lib_features.get(name).is_some() {\n return;\n }\n lib_features.insert(name.to_owned(), feature);\n },\n Err(_) => (),\n }\n });\n lib_features\n}\n\nfn get_and_check_lib_features(base_src_path: &Path,\n bad: &mut bool,\n lang_features: &Features) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, file, line| {\n match res {\n Ok((name, f)) => {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), line, msg);\n };\n if lang_features.contains_key(name) && feature_name != \"proc_macro\" {\n err(\"duplicating a lang feature\");\n }\n if let Some(ref s) = lib_features.get(name) {\n if s.level != f.level {\n err(\"different stability level than before\");\n }\n if s.since != f.since {\n err(\"different `since` than before\");\n }\n if s.tracking_issue != f.tracking_issue {\n err(\"different `tracking_issue` than before\");\n }\n }\n lib_features.insert(name.to_owned(), f);\n },\n Err(msg) => {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), line, msg);\n },\n }\n\n });\n lib_features\n}\n\nfn map_lib_features(base_src_path: &Path,\n mf: &mut FnMut(Result<(&str, Feature), &str>, &Path, usize)) {\n let mut contents = String::new();\n super::walk(base_src_path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n let mut becoming_feature: Option<(String, Feature)> = None;\n for (i, line) in contents.lines().enumerate() {\n macro_rules! err {\n ($msg:expr) => {{\n mf(Err($msg), file, i + 1);\n continue;\n }};\n };\n if let Some((ref name, ref mut f)) = becoming_feature {\n if f.tracking_issue.is_none() {\n f.tracking_issue = find_attr_val(line, \"issue\")\n .map(|s| s.parse().unwrap());\n }\n if line.ends_with(\"]\") {\n mf(Ok((name, f.clone())), file, i + 1);\n } else if !line.ends_with(\",\") && !line.ends_with(\"\\\\\") {\n \/\/ We need to bail here because we might have missed the\n \/\/ end of a stability attribute above because the \"]\"\n \/\/ might not have been at the end of the line.\n \/\/ We could then get into the very unfortunate situation that\n \/\/ we continue parsing the file assuming the current stability\n \/\/ attribute has not ended, and ignoring possible feature\n \/\/ attributes in the process.\n err!(\"malformed stability attribute\");\n } else {\n continue;\n }\n }\n becoming_feature = None;\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => err!(\"malformed stability attribute\"),\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err!(\"malformed stability attribute\");\n }\n None => \"None\",\n };\n let tracking_issue = find_attr_val(line, \"issue\").map(|s| s.parse().unwrap());\n\n let feature = Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n };\n if line.contains(\"]\") {\n mf(Ok((feature_name, feature)), file, i + 1);\n } else {\n becoming_feature = Some((feature_name.to_owned(), feature));\n }\n }\n });\n}\n<commit_msg>Fix a semantic merge conflict<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\/\/! * All unstable lang features have tests to ensure they are actually unstable\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(Debug, PartialEq, Clone)]\npub enum Status {\n Stable,\n Removed,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n Status::Removed => \"removed\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\n#[derive(Debug, Clone)]\npub struct Feature {\n pub level: Status,\n pub since: String,\n pub has_gate_test: bool,\n pub tracking_issue: Option<u32>,\n}\n\npub type Features = HashMap<String, Feature>;\n\npub fn check(path: &Path, bad: &mut bool, quiet: bool) {\n let mut features = collect_lang_features(path);\n assert!(!features.is_empty());\n\n let lib_features = get_and_check_lib_features(path, bad, &features);\n assert!(!lib_features.is_empty());\n\n let mut contents = String::new();\n\n super::walk_many(&[&path.join(\"test\/compile-fail\"),\n &path.join(\"test\/compile-fail-fulldeps\"),\n &path.join(\"test\/parse-fail\"),],\n &mut |path| super::filter_dirs(path),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n let filen_underscore = filename.replace(\"-\",\"_\").replace(\".rs\",\"\");\n let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features);\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n\n let gate_test_str = \"gate-test-\";\n\n if !line.contains(gate_test_str) {\n continue;\n }\n\n let feature_name = match line.find(gate_test_str) {\n Some(i) => {\n &line[i+gate_test_str.len()..line[i+1..].find(' ').unwrap_or(line.len())]\n },\n None => continue,\n };\n match features.get_mut(feature_name) {\n Some(f) => {\n if filename_is_gate_test {\n err(&format!(\"The file is already marked as gate test \\\n through its name, no need for a \\\n 'gate-test-{}' comment\",\n feature_name));\n }\n f.has_gate_test = true;\n }\n None => {\n err(&format!(\"gate-test test found referencing a nonexistent feature '{}'\",\n feature_name));\n }\n }\n }\n });\n\n \/\/ Only check the number of lang features.\n \/\/ Obligatory testing for library features is dumb.\n let gate_untested = features.iter()\n .filter(|&(_, f)| f.level == Status::Unstable)\n .filter(|&(_, f)| !f.has_gate_test)\n .collect::<Vec<_>>();\n\n for &(name, _) in gate_untested.iter() {\n println!(\"Expected a gate test for the feature '{}'.\", name);\n println!(\"Hint: create a file named 'feature-gate-{}.rs' in the compile-fail\\\n \\n test suite, with its failures due to missing usage of\\\n \\n #![feature({})].\", name, name);\n println!(\"Hint: If you already have such a test and don't want to rename it,\\\n \\n you can also add a \/\/ gate-test-{} line to the test file.\",\n name);\n }\n\n if gate_untested.len() > 0 {\n tidy_error!(bad, \"Found {} features without a gate test.\", gate_untested.len());\n }\n\n if *bad {\n return;\n }\n if quiet {\n println!(\"* {} features\", features.len());\n return;\n }\n\n let mut lines = Vec::new();\n for (name, feature) in features.iter() {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {\n if filen_underscore.starts_with(\"feature_gate\") {\n for (n, f) in features.iter_mut() {\n if filen_underscore == format!(\"feature_gate_{}\", n) {\n f.has_gate_test = true;\n return true;\n }\n }\n }\n return false;\n}\n\npub fn collect_lang_features(base_src_path: &Path) -> Features {\n let mut contents = String::new();\n let path = base_src_path.join(\"libsyntax\/feature_gate.rs\");\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Removed,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n let issue_str = parts.next().unwrap().trim();\n let tracking_issue = if issue_str.starts_with(\"None\") {\n None\n } else {\n let s = issue_str.split(\"(\").nth(1).unwrap().split(\")\").nth(0).unwrap();\n Some(s.parse().unwrap())\n };\n Some((name.to_owned(),\n Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n }))\n })\n .collect()\n}\n\npub fn collect_lib_features(base_src_path: &Path) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, _, _| {\n match res {\n Ok((name, feature)) => {\n if lib_features.get(name).is_some() {\n return;\n }\n lib_features.insert(name.to_owned(), feature);\n },\n Err(_) => (),\n }\n });\n lib_features\n}\n\nfn get_and_check_lib_features(base_src_path: &Path,\n bad: &mut bool,\n lang_features: &Features) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, file, line| {\n match res {\n Ok((name, f)) => {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), line, msg);\n };\n if lang_features.contains_key(name) && name != \"proc_macro\" {\n err(\"duplicating a lang feature\");\n }\n if let Some(ref s) = lib_features.get(name) {\n if s.level != f.level {\n err(\"different stability level than before\");\n }\n if s.since != f.since {\n err(\"different `since` than before\");\n }\n if s.tracking_issue != f.tracking_issue {\n err(\"different `tracking_issue` than before\");\n }\n }\n lib_features.insert(name.to_owned(), f);\n },\n Err(msg) => {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), line, msg);\n },\n }\n\n });\n lib_features\n}\n\nfn map_lib_features(base_src_path: &Path,\n mf: &mut FnMut(Result<(&str, Feature), &str>, &Path, usize)) {\n let mut contents = String::new();\n super::walk(base_src_path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n let mut becoming_feature: Option<(String, Feature)> = None;\n for (i, line) in contents.lines().enumerate() {\n macro_rules! err {\n ($msg:expr) => {{\n mf(Err($msg), file, i + 1);\n continue;\n }};\n };\n if let Some((ref name, ref mut f)) = becoming_feature {\n if f.tracking_issue.is_none() {\n f.tracking_issue = find_attr_val(line, \"issue\")\n .map(|s| s.parse().unwrap());\n }\n if line.ends_with(\"]\") {\n mf(Ok((name, f.clone())), file, i + 1);\n } else if !line.ends_with(\",\") && !line.ends_with(\"\\\\\") {\n \/\/ We need to bail here because we might have missed the\n \/\/ end of a stability attribute above because the \"]\"\n \/\/ might not have been at the end of the line.\n \/\/ We could then get into the very unfortunate situation that\n \/\/ we continue parsing the file assuming the current stability\n \/\/ attribute has not ended, and ignoring possible feature\n \/\/ attributes in the process.\n err!(\"malformed stability attribute\");\n } else {\n continue;\n }\n }\n becoming_feature = None;\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => err!(\"malformed stability attribute\"),\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err!(\"malformed stability attribute\");\n }\n None => \"None\",\n };\n let tracking_issue = find_attr_val(line, \"issue\").map(|s| s.parse().unwrap());\n\n let feature = Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n };\n if line.contains(\"]\") {\n mf(Ok((feature_name, feature)), file, i + 1);\n } else {\n becoming_feature = Some((feature_name.to_owned(), feature));\n }\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that there are no stray `.stderr` files in UI test directories.\n\nuse std::path::Path;\n\npub fn check(path: &Path, bad: &mut bool) {\n super::walk_many(&[&path.join(\"test\/ui\"), &path.join(\"test\/ui-fulldeps\")],\n &mut |_| false,\n &mut |file_path| {\n if let Some(ext) = file_path.extension() {\n if (ext == \"stderr\" || ext == \"stdout\") && !file_path.with_extension(\"rs\").exists() {\n println!(\"Stray file with UI testing output: {:?}\", file_path);\n *bad = true;\n }\n }\n });\n}\n<commit_msg>Make tidy treat \"test\/ui\/foo.nll.stderr\" just like \"foo.stderr\".<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that there are no stray `.stderr` files in UI test directories.\n\nuse std::path::Path;\n\n\/\/ See rust-lang\/rust#48879: In addition to the mapping from `foo.rs`\n\/\/ to `foo.stderr`\/`foo.stdout`, we also can optionally have\n\/\/ `foo.$mode.stderr`, where $mode is one of the strings on this list,\n\/\/ as an alternative to use when running under that mode.\nstatic COMPARE_MODE_NAMES: [&'static str; 1] = [\"nll\"];\n\npub fn check(path: &Path, bad: &mut bool) {\n super::walk_many(&[&path.join(\"test\/ui\"), &path.join(\"test\/ui-fulldeps\")],\n &mut |_| false,\n &mut |file_path| {\n if let Some(ext) = file_path.extension() {\n if (ext == \"stderr\" || ext == \"stdout\") && !file_path.with_extension(\"rs\").exists() {\n\n \/\/ rust-lang\/rust#48879: this fn used to be beautful\n \/\/ because Path API special-cases replacing\n \/\/ extensions. That works great for \".stderr\" but not\n \/\/ so well for \".nll.stderr\". To support the latter,\n \/\/ we explicitly search backwards for mode's starting\n \/\/ point and build corresponding source name.\n let filename = file_path.file_name().expect(\"need filename\")\n .to_str().expect(\"need UTF-8 filename\");\n let found_matching_prefix = COMPARE_MODE_NAMES.iter().any(|mode| {\n if let Some(r_idx) = filename.rfind(&format!(\".{}\", mode)) {\n let source_name = format!(\"{}.rs\", &filename[0..r_idx]);\n let source_path = file_path.with_file_name(source_name);\n source_path.exists()\n } else {\n false\n }\n });\n\n if !found_matching_prefix {\n println!(\"Stray file with UI testing output: {:?}\", file_path);\n *bad = true;\n }\n }\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>and_then<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>End to End test for the TestFilter (#64)<commit_after>\/*\n * Copyright 2020 Google LLC All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\/\n\nextern crate quilkin;\n\n#[cfg(test)]\nmod tests {\n use std::net::{IpAddr, Ipv4Addr, SocketAddr};\n\n use slog::info;\n\n use quilkin::config::{Config, ConnectionConfig, EndPoint, Filter, Local};\n use quilkin::extensions::default_filters;\n use quilkin::test_utils::{echo_server, logger, recv_multiple_packets, run_proxy, TestFilter};\n\n #[tokio::test]\n async fn test_filter() {\n let base_logger = logger();\n\n \/\/ create two echo servers as endpoints\n let echo = echo_server().await;\n\n \/\/ create server configuration\n let server_port = 12346;\n let server_config = Config {\n local: Local { port: server_port },\n filters: vec![Filter {\n name: \"TestFilter\".to_string(),\n config: serde_yaml::Value::Null,\n }],\n connections: ConnectionConfig::Server {\n endpoints: vec![EndPoint {\n name: \"server\".to_string(),\n address: echo,\n connection_ids: vec![],\n }],\n },\n };\n\n let mut registry = default_filters(&base_logger);\n registry.insert(\"TestFilter\".to_string(), TestFilter {});\n let close_server = run_proxy(&base_logger, registry, server_config);\n\n \/\/ create a local client\n let client_port = 12347;\n let client_config = Config {\n local: Local { port: client_port },\n filters: vec![Filter {\n name: \"TestFilter\".to_string(),\n config: serde_yaml::Value::Null,\n }],\n connections: ConnectionConfig::Client {\n addresses: vec![SocketAddr::new(\n IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),\n server_port,\n )],\n connection_id: String::from(\"\"),\n lb_policy: None,\n },\n };\n let mut registry = default_filters(&base_logger);\n registry.insert(\"TestFilter\".to_string(), TestFilter {});\n let close_client = run_proxy(&base_logger, registry, client_config);\n\n \/\/ let's send the packet\n let (mut recv_chan, mut send) = recv_multiple_packets(&base_logger).await;\n\n \/\/ game_client\n let local_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), client_port);\n info!(base_logger, \"Sending hello\"; \"addr\" => local_addr);\n send.send_to(\"hello\".as_bytes(), &local_addr).await.unwrap();\n\n let result = recv_chan.recv().await.unwrap();\n \/\/ since we don't know the ephemeral ip addresses in use, we'll search for\n \/\/ substrings for the results we expect that the TestFilter will inject in\n \/\/ the round-tripped packets.\n assert_eq!(\n 2,\n result.matches(\"lrf\").count(),\n \"Should be 2 local_receive_filter calls in {}\",\n result\n );\n assert_eq!(\n 2,\n result.matches(\"lsf\").count(),\n \"Should be 2 local_send_filter calls in {}\",\n result\n );\n assert_eq!(\n 2,\n result.matches(\"esf\").count(),\n \"Should be 2 endpoint_send_filter calls in {}\",\n result\n );\n assert_eq!(\n 2,\n result.matches(\"erf\").count(),\n \"Should be 2 endpoint_receive_filter calls in {}\",\n result\n );\n\n close_server();\n close_client();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add benchmark<commit_after>#![feature(test)]\n\nextern crate test;\nuse test::Bencher;\n\nextern crate polylabel;\nuse polylabel::polylabel;\n\nextern crate geo;\nuse self::geo::{Point, LineString, Polygon};\n\n#[bench]\nfn bench_threads(b: &mut Bencher) {\n let coords = vec![\n (-75.57274028771249, 110.01960141091608),\n (-47.01425001453319, 224.2535625036333),\n (-44.1986052400443, 233.56520178444188),\n (-40.491516848197264, 242.55919851821028),\n (-35.928066677809895, 251.1504384831045),\n (-30.55144070299677, 259.2576189250935),\n (-24.412520344941953, 266.8040179623472),\n (-17.56940095820731, 273.7182206387879),\n (-10.086842048356445, 279.93479475582495),\n (-2.0356544237308825, 285.394910086574),\n (6.507969918963688, 290.0468951126591),\n (15.463178833668529, 293.8467260149487),\n (24.745225165064543, 296.758443290685),\n (34.26626874888323, 298.7544920543751),\n (43.93620768274765, 299.8159828020204),\n (53.66353100085455, 299.9328701709592),\n (63.35618468325523, 299.10404800363494),\n (72.92244280430123, 297.33735981566144),\n (82.27177557618647, 294.64952456911897),\n (91.31570607290114, 291.0659784535237),\n (99.96864752703414, 286.6206341717666),\n (108.14871327570971, 281.355560009008),\n (115.77849169077639, 275.3205817216414),\n (122.78577875973701, 268.57281101383126),\n (129.10426138467784, 261.17610506386103),\n (134.67414493283295, 253.20046221503722),\n (139.44271909999156, 244.72135954999587),\n (189.4427190999916, 144.72135954999578),\n (193.40316487178438, 135.7190256296874),\n (196.46014848027284, 126.37119176608674),\n (198.5841005952538, 116.76827688896145),\n (199.75447683394128, 107.00316725394137),\n (199.959956480919, 97.1703179802708),\n (199.19855199019082, 87.36483941339068),\n (197.47762821014587, 77.68157714970485),\n (194.8138311454814, 68.21419462218802),\n (191.23292694514913, 59.05426712072333),\n (186.76955267374814, 50.29039601045239),\n (181.46688127708177, 42.007351716050565),\n (175.37620398257155, 34.28525376159651),\n (168.5564341738489, 27.198795797276006),\n (161.07353753840516, 20.81652310901589),\n (152.99989400031805, 15.200169599491232),\n (98.33653286253586, -18.964431111622638),\n (97.01425001453319, -24.253562503633297),\n (94.16983504461093, -33.64583432864707),\n (90.41851308474087, -42.71407837639184),\n (85.79641141607766, -51.37096249948156),\n (80.34804340438832, -59.53311617147662),\n (74.12587981200636, -67.12193339062866),\n (67.189843475707, -74.06432969864774),\n (59.606732217031976, -80.2934460239878),\n (51.44957554275259, -85.74929257125446),\n (42.79693133079759, -90.37932655572841),\n (33.73212927494458, -94.13895821910516),\n (24.342468374316272, -96.99198025324264),\n (14.718376196296493, -98.91091649633165),\n (4.952538009623515, -99.87728654335396),\n (-4.860995825414805, -99.88178372248515),\n (-14.627715613363762, -98.92436472343178),\n (-17.953756809330994, -98.26435835897965),\n (-53.64820903700594, -226.76438637860946),\n (-56.7355378616229, -236.07963555856995),\n (-60.72105444017349, -245.0474181249662),\n (-65.5663760693013, -253.58136942939535),\n (-71.22483965299563, -261.59930285566344),\n (-77.64195109371464, -269.02400132182726),\n (-84.75591010033425, -275.7839609229046),\n (-92.49820535873518, -281.81407955256725),\n (-100.79427433320987, -287.05628387201347),\n (-109.56422134444159, -291.46008858796654),\n (-118.72358700857137, -294.98308265364733),\n (-128.18416162723517, -297.59133771033885),\n (-137.85483469517902, -299.2597348360279),\n (-147.64247234423098, -299.9722064543555),\n (-157.4528142733637, -299.72189107416057),\n (-167.19138152692128, -298.5111993693906),\n (-176.76438637860946, -296.3517909629941),\n (-186.0796355585698, -293.26446213837716),\n (-195.04741812496607, -289.2789455598266),\n (-203.58136942939524, -284.4336239306988),\n (-211.59930285566332, -278.7751603470045),\n (-219.02400132182714, -272.3580489062855),\n (-225.78396092290453, -265.2440898996658),\n (-231.8140795525672, -257.50179464126495),\n (-237.0562838720134, -249.20572566679022),\n (-241.46008858796648, -240.4357786555585),\n (-244.98308265364727, -231.27641299142869),\n (-247.59133771033882, -221.8158383727649),\n (-249.25973483602792, -212.145165304821),\n (-249.97220645435553, -202.35752765576902),\n (-249.72189107416057, -192.54718572663626),\n (-248.51119936939062, -182.8086184730787),\n (-246.35179096299407, -173.23561362139054),\n (-196.35179096299407, 6.7643863786094585),\n (-193.32576660256726, 15.920764023655508),\n (-189.43184924301974, 24.74309266215056),\n (-184.7062507874361, 33.14932810051302),\n (-179.19291744665992, 41.0612956486063),\n (-172.94312105678188, 48.40541711367358),\n (-166.01498227118805, 55.11339504865113),\n (-158.47293006129595, 61.12284789161923),\n (-150.3871025524086, 66.37789008984335),\n (-75.57274028771249, 110.01960141091608)];\n let ls = LineString(coords.iter().map(|e| Point::new(e.0, e.1)).collect());\n let poly = Polygon(ls, vec![]);\n b.iter(||{\n polylabel(\n &poly,\n &10.0\n );\n });\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ xfail-boot\n\/\/ xfail-stage0\nuse std;\nimport std._str;\n\nfn test(str actual, str expected) {\n log actual;\n log expected;\n check (_str.eq(actual, expected));\n}\n\nfn main() {\n test(#fmt(\"hello %d friends and %s things\", 10, \"formatted\"),\n \"hello 10 friends and formatted things\");\n\n \/\/ Simple tests for types\n test(#fmt(\"%d\", 1), \"1\");\n test(#fmt(\"%i\", 2), \"2\");\n test(#fmt(\"%i\", -1), \"-1\");\n test(#fmt(\"%u\", 10u), \"10\");\n test(#fmt(\"%s\", \"test\"), \"test\");\n test(#fmt(\"%b\", true), \"true\");\n test(#fmt(\"%b\", false), \"false\");\n test(#fmt(\"%c\", 'A'), \"A\");\n test(#fmt(\"%x\", 0xff_u), \"ff\");\n test(#fmt(\"%X\", 0x12ab_u), \"12AB\");\n test(#fmt(\"%t\", 0b11010101_u), \"11010101\");\n\n \/\/ 32-bit limits\n test(#fmt(\"%i\", -2147483648), \"-2147483648\");\n test(#fmt(\"%i\", 2147483647), \"2147483647\");\n test(#fmt(\"%u\", 4294967295u), \"4294967295\");\n test(#fmt(\"%x\", 0xffffffff_u), \"ffffffff\");\n test(#fmt(\"%t\", 0xffffffff_u), \"11111111111111111111111111111111\");\n\n \/\/ Widths\n test(#fmt(\"%10d\", 500), \" 500\");\n test(#fmt(\"%10d\", -500), \" -500\");\n test(#fmt(\"%10u\", 500u), \" 500\");\n test(#fmt(\"%10s\", \"test\"), \" test\");\n test(#fmt(\"%10b\", true), \" true\");\n test(#fmt(\"%10x\", 0xff_u), \" ff\");\n test(#fmt(\"%10X\", 0xff_u), \" FF\");\n test(#fmt(\"%10t\", 0xff_u), \" 11111111\");\n}\n<commit_msg>Add another test for #fmt conversion widths<commit_after>\/\/ xfail-boot\n\/\/ xfail-stage0\nuse std;\nimport std._str;\n\nfn test(str actual, str expected) {\n log actual;\n log expected;\n check (_str.eq(actual, expected));\n}\n\nfn main() {\n test(#fmt(\"hello %d friends and %s things\", 10, \"formatted\"),\n \"hello 10 friends and formatted things\");\n\n \/\/ Simple tests for types\n test(#fmt(\"%d\", 1), \"1\");\n test(#fmt(\"%i\", 2), \"2\");\n test(#fmt(\"%i\", -1), \"-1\");\n test(#fmt(\"%u\", 10u), \"10\");\n test(#fmt(\"%s\", \"test\"), \"test\");\n test(#fmt(\"%b\", true), \"true\");\n test(#fmt(\"%b\", false), \"false\");\n test(#fmt(\"%c\", 'A'), \"A\");\n test(#fmt(\"%x\", 0xff_u), \"ff\");\n test(#fmt(\"%X\", 0x12ab_u), \"12AB\");\n test(#fmt(\"%t\", 0b11010101_u), \"11010101\");\n\n \/\/ 32-bit limits\n test(#fmt(\"%i\", -2147483648), \"-2147483648\");\n test(#fmt(\"%i\", 2147483647), \"2147483647\");\n test(#fmt(\"%u\", 4294967295u), \"4294967295\");\n test(#fmt(\"%x\", 0xffffffff_u), \"ffffffff\");\n test(#fmt(\"%t\", 0xffffffff_u), \"11111111111111111111111111111111\");\n\n \/\/ Widths\n test(#fmt(\"%1d\", 500), \"500\");\n test(#fmt(\"%10d\", 500), \" 500\");\n test(#fmt(\"%10d\", -500), \" -500\");\n test(#fmt(\"%10u\", 500u), \" 500\");\n test(#fmt(\"%10s\", \"test\"), \" test\");\n test(#fmt(\"%10b\", true), \" true\");\n test(#fmt(\"%10x\", 0xff_u), \" ff\");\n test(#fmt(\"%10X\", 0xff_u), \" FF\");\n test(#fmt(\"%10t\", 0xff_u), \" 11111111\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>More verbose erroring in test<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Detecting language items.\n\/\/\n\/\/ Language items are items that represent concepts intrinsic to the language\n\/\/ itself. Examples are:\n\/\/\n\/\/ * Traits that specify \"kinds\"; e.g. \"Sync\", \"Send\".\n\/\/\n\/\/ * Traits that represent operators; e.g. \"Add\", \"Sub\", \"Index\".\n\/\/\n\/\/ * Functions called by the compiler itself.\n\npub use self::LangItem::*;\n\nuse dep_graph::DepNode;\nuse hir::map as hir_map;\nuse session::Session;\nuse hir::def_id::DefId;\nuse ty;\nuse middle::weak_lang_items;\nuse util::nodemap::FnvHashMap;\n\nuse syntax::ast;\nuse syntax::attr::AttrMetaMethods;\nuse syntax::parse::token::InternedString;\nuse hir::intravisit::Visitor;\nuse hir;\n\n\/\/ The actual lang items defined come at the end of this file in one handy table.\n\/\/ So you probably just want to nip down to the end.\nmacro_rules! lets_do_this {\n (\n $( $variant:ident, $name:expr, $method:ident; )*\n ) => {\n\n\nenum_from_u32! {\n #[derive(Copy, Clone, PartialEq, Eq, Hash)]\n pub enum LangItem {\n $($variant,)*\n }\n}\n\npub struct LanguageItems {\n pub items: Vec<Option<DefId>>,\n pub missing: Vec<LangItem>,\n}\n\nimpl LanguageItems {\n pub fn new() -> LanguageItems {\n fn foo(_: LangItem) -> Option<DefId> { None }\n\n LanguageItems {\n items: vec!($(foo($variant)),*),\n missing: Vec::new(),\n }\n }\n\n pub fn items(&self) -> &[Option<DefId>] {\n &*self.items\n }\n\n pub fn item_name(index: usize) -> &'static str {\n let item: Option<LangItem> = LangItem::from_u32(index as u32);\n match item {\n $( Some($variant) => $name, )*\n None => \"???\"\n }\n }\n\n pub fn require(&self, it: LangItem) -> Result<DefId, String> {\n match self.items[it as usize] {\n Some(id) => Ok(id),\n None => {\n Err(format!(\"requires `{}` lang_item\",\n LanguageItems::item_name(it as usize)))\n }\n }\n }\n\n pub fn require_owned_box(&self) -> Result<DefId, String> {\n self.require(OwnedBoxLangItem)\n }\n\n pub fn from_builtin_kind(&self, bound: ty::BuiltinBound)\n -> Result<DefId, String>\n {\n match bound {\n ty::BoundSend => self.require(SendTraitLangItem),\n ty::BoundSized => self.require(SizedTraitLangItem),\n ty::BoundCopy => self.require(CopyTraitLangItem),\n ty::BoundSync => self.require(SyncTraitLangItem),\n }\n }\n\n pub fn to_builtin_kind(&self, id: DefId) -> Option<ty::BuiltinBound> {\n if Some(id) == self.send_trait() {\n Some(ty::BoundSend)\n } else if Some(id) == self.sized_trait() {\n Some(ty::BoundSized)\n } else if Some(id) == self.copy_trait() {\n Some(ty::BoundCopy)\n } else if Some(id) == self.sync_trait() {\n Some(ty::BoundSync)\n } else {\n None\n }\n }\n\n pub fn fn_trait_kind(&self, id: DefId) -> Option<ty::ClosureKind> {\n let def_id_kinds = [\n (self.fn_trait(), ty::ClosureKind::Fn),\n (self.fn_mut_trait(), ty::ClosureKind::FnMut),\n (self.fn_once_trait(), ty::ClosureKind::FnOnce),\n ];\n\n for &(opt_def_id, kind) in &def_id_kinds {\n if Some(id) == opt_def_id {\n return Some(kind);\n }\n }\n\n None\n }\n\n $(\n #[allow(dead_code)]\n pub fn $method(&self) -> Option<DefId> {\n self.items[$variant as usize]\n }\n )*\n}\n\nstruct LanguageItemCollector<'a, 'tcx: 'a> {\n items: LanguageItems,\n\n ast_map: &'a hir_map::Map<'tcx>,\n\n session: &'a Session,\n\n item_refs: FnvHashMap<&'static str, usize>,\n}\n\nimpl<'a, 'v, 'tcx> Visitor<'v> for LanguageItemCollector<'a, 'tcx> {\n fn visit_item(&mut self, item: &hir::Item) {\n if let Some(value) = extract(&item.attrs) {\n let item_index = self.item_refs.get(&value[..]).cloned();\n\n if let Some(item_index) = item_index {\n self.collect_item(item_index, self.ast_map.local_def_id(item.id))\n } else {\n let span = self.ast_map.span(item.id);\n span_err!(self.session, span, E0522,\n \"definition of an unknown language item: `{}`.\",\n &value[..]);\n }\n }\n }\n}\n\nimpl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> {\n pub fn new(session: &'a Session, ast_map: &'a hir_map::Map<'tcx>)\n -> LanguageItemCollector<'a, 'tcx> {\n let mut item_refs = FnvHashMap();\n\n $( item_refs.insert($name, $variant as usize); )*\n\n LanguageItemCollector {\n session: session,\n ast_map: ast_map,\n items: LanguageItems::new(),\n item_refs: item_refs,\n }\n }\n\n pub fn collect_item(&mut self, item_index: usize,\n item_def_id: DefId) {\n \/\/ Check for duplicates.\n match self.items.items[item_index] {\n Some(original_def_id) if original_def_id != item_def_id => {\n let cstore = &self.session.cstore;\n let name = LanguageItems::item_name(item_index);\n let mut err = match self.ast_map.span_if_local(item_def_id) {\n Some(span) => struct_span_err!(\n self.session,\n span,\n E0152,\n \"duplicate lang item found: `{}`.\",\n name),\n None => self.session.struct_err(&format!(\n \"duplicate lang item in crate `{}`: `{}`.\",\n cstore.crate_name(item_def_id.krate),\n name)),\n };\n if let Some(span) = self.ast_map.span_if_local(original_def_id) {\n span_note!(&mut err, span,\n \"first defined here.\");\n } else {\n err.note(&format!(\"first defined in crate `{}`.\",\n cstore.crate_name(original_def_id.krate)));\n }\n err.emit();\n }\n _ => {\n \/\/ OK.\n }\n }\n\n \/\/ Matched.\n self.items.items[item_index] = Some(item_def_id);\n }\n\n pub fn collect_local_language_items(&mut self, krate: &hir::Crate) {\n krate.visit_all_items(self);\n }\n\n pub fn collect_external_language_items(&mut self) {\n let cstore = &self.session.cstore;\n\n for cnum in cstore.crates() {\n for (index, item_index) in cstore.lang_items(cnum) {\n let def_id = DefId { krate: cnum, index: index };\n self.collect_item(item_index, def_id);\n }\n }\n }\n\n pub fn collect(&mut self, krate: &hir::Crate) {\n self.collect_external_language_items();\n self.collect_local_language_items(krate);\n }\n}\n\npub fn extract(attrs: &[ast::Attribute]) -> Option<InternedString> {\n for attribute in attrs {\n match attribute.value_str() {\n Some(ref value) if attribute.check_name(\"lang\") => {\n return Some(value.clone());\n }\n _ => {}\n }\n }\n\n return None;\n}\n\npub fn collect_language_items(session: &Session,\n map: &hir_map::Map)\n -> LanguageItems {\n let _task = map.dep_graph.in_task(DepNode::CollectLanguageItems);\n let krate: &hir::Crate = map.krate();\n let mut collector = LanguageItemCollector::new(session, map);\n collector.collect(krate);\n let LanguageItemCollector { mut items, .. } = collector;\n weak_lang_items::check_crate(krate, session, &mut items);\n items\n}\n\n\/\/ End of the macro\n }\n}\n\nlets_do_this! {\n\/\/ Variant name, Name, Method name;\n CharImplItem, \"char\", char_impl;\n StrImplItem, \"str\", str_impl;\n SliceImplItem, \"slice\", slice_impl;\n ConstPtrImplItem, \"const_ptr\", const_ptr_impl;\n MutPtrImplItem, \"mut_ptr\", mut_ptr_impl;\n I8ImplItem, \"i8\", i8_impl;\n I16ImplItem, \"i16\", i16_impl;\n I32ImplItem, \"i32\", i32_impl;\n I64ImplItem, \"i64\", i64_impl;\n IsizeImplItem, \"isize\", isize_impl;\n U8ImplItem, \"u8\", u8_impl;\n U16ImplItem, \"u16\", u16_impl;\n U32ImplItem, \"u32\", u32_impl;\n U64ImplItem, \"u64\", u64_impl;\n UsizeImplItem, \"usize\", usize_impl;\n F32ImplItem, \"f32\", f32_impl;\n F64ImplItem, \"f64\", f64_impl;\n\n SendTraitLangItem, \"send\", send_trait;\n SizedTraitLangItem, \"sized\", sized_trait;\n UnsizeTraitLangItem, \"unsize\", unsize_trait;\n CopyTraitLangItem, \"copy\", copy_trait;\n SyncTraitLangItem, \"sync\", sync_trait;\n\n DropTraitLangItem, \"drop\", drop_trait;\n\n CoerceUnsizedTraitLangItem, \"coerce_unsized\", coerce_unsized_trait;\n\n AddTraitLangItem, \"add\", add_trait;\n SubTraitLangItem, \"sub\", sub_trait;\n MulTraitLangItem, \"mul\", mul_trait;\n DivTraitLangItem, \"div\", div_trait;\n RemTraitLangItem, \"rem\", rem_trait;\n NegTraitLangItem, \"neg\", neg_trait;\n NotTraitLangItem, \"not\", not_trait;\n BitXorTraitLangItem, \"bitxor\", bitxor_trait;\n BitAndTraitLangItem, \"bitand\", bitand_trait;\n BitOrTraitLangItem, \"bitor\", bitor_trait;\n ShlTraitLangItem, \"shl\", shl_trait;\n ShrTraitLangItem, \"shr\", shr_trait;\n AddAssignTraitLangItem, \"add_assign\", add_assign_trait;\n SubAssignTraitLangItem, \"sub_assign\", sub_assign_trait;\n MulAssignTraitLangItem, \"mul_assign\", mul_assign_trait;\n DivAssignTraitLangItem, \"div_assign\", div_assign_trait;\n RemAssignTraitLangItem, \"rem_assign\", rem_assign_trait;\n BitXorAssignTraitLangItem, \"bitxor_assign\", bitxor_assign_trait;\n BitAndAssignTraitLangItem, \"bitand_assign\", bitand_assign_trait;\n BitOrAssignTraitLangItem, \"bitor_assign\", bitor_assign_trait;\n ShlAssignTraitLangItem, \"shl_assign\", shl_assign_trait;\n ShrAssignTraitLangItem, \"shr_assign\", shr_assign_trait;\n IndexTraitLangItem, \"index\", index_trait;\n IndexMutTraitLangItem, \"index_mut\", index_mut_trait;\n\n UnsafeCellTypeLangItem, \"unsafe_cell\", unsafe_cell_type;\n\n DerefTraitLangItem, \"deref\", deref_trait;\n DerefMutTraitLangItem, \"deref_mut\", deref_mut_trait;\n\n FnTraitLangItem, \"fn\", fn_trait;\n FnMutTraitLangItem, \"fn_mut\", fn_mut_trait;\n FnOnceTraitLangItem, \"fn_once\", fn_once_trait;\n\n EqTraitLangItem, \"eq\", eq_trait;\n OrdTraitLangItem, \"ord\", ord_trait;\n\n StrEqFnLangItem, \"str_eq\", str_eq_fn;\n\n \/\/ A number of panic-related lang items. The `panic` item corresponds to\n \/\/ divide-by-zero and various panic cases with `match`. The\n \/\/ `panic_bounds_check` item is for indexing arrays.\n \/\/\n \/\/ The `begin_unwind` lang item has a predefined symbol name and is sort of\n \/\/ a \"weak lang item\" in the sense that a crate is not required to have it\n \/\/ defined to use it, but a final product is required to define it\n \/\/ somewhere. Additionally, there are restrictions on crates that use a weak\n \/\/ lang item, but do not have it defined.\n PanicFnLangItem, \"panic\", panic_fn;\n PanicBoundsCheckFnLangItem, \"panic_bounds_check\", panic_bounds_check_fn;\n PanicFmtLangItem, \"panic_fmt\", panic_fmt;\n\n ExchangeMallocFnLangItem, \"exchange_malloc\", exchange_malloc_fn;\n ExchangeFreeFnLangItem, \"exchange_free\", exchange_free_fn;\n BoxFreeFnLangItem, \"box_free\", box_free_fn;\n StrDupUniqFnLangItem, \"strdup_uniq\", strdup_uniq_fn;\n\n StartFnLangItem, \"start\", start_fn;\n\n EhPersonalityLangItem, \"eh_personality\", eh_personality;\n EhPersonalityCatchLangItem, \"eh_personality_catch\", eh_personality_catch;\n EhUnwindResumeLangItem, \"eh_unwind_resume\", eh_unwind_resume;\n MSVCTryFilterLangItem, \"msvc_try_filter\", msvc_try_filter;\n\n OwnedBoxLangItem, \"owned_box\", owned_box;\n\n PhantomDataItem, \"phantom_data\", phantom_data;\n\n \/\/ Deprecated:\n CovariantTypeItem, \"covariant_type\", covariant_type;\n ContravariantTypeItem, \"contravariant_type\", contravariant_type;\n InvariantTypeItem, \"invariant_type\", invariant_type;\n CovariantLifetimeItem, \"covariant_lifetime\", covariant_lifetime;\n ContravariantLifetimeItem, \"contravariant_lifetime\", contravariant_lifetime;\n InvariantLifetimeItem, \"invariant_lifetime\", invariant_lifetime;\n\n NoCopyItem, \"no_copy_bound\", no_copy_bound;\n\n NonZeroItem, \"non_zero\", non_zero;\n\n DebugTraitLangItem, \"debug_trait\", debug_trait;\n}\n<commit_msg>Renames \"lets_do_this\" macro more appropriately.<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Detecting language items.\n\/\/\n\/\/ Language items are items that represent concepts intrinsic to the language\n\/\/ itself. Examples are:\n\/\/\n\/\/ * Traits that specify \"kinds\"; e.g. \"Sync\", \"Send\".\n\/\/\n\/\/ * Traits that represent operators; e.g. \"Add\", \"Sub\", \"Index\".\n\/\/\n\/\/ * Functions called by the compiler itself.\n\npub use self::LangItem::*;\n\nuse dep_graph::DepNode;\nuse hir::map as hir_map;\nuse session::Session;\nuse hir::def_id::DefId;\nuse ty;\nuse middle::weak_lang_items;\nuse util::nodemap::FnvHashMap;\n\nuse syntax::ast;\nuse syntax::attr::AttrMetaMethods;\nuse syntax::parse::token::InternedString;\nuse hir::intravisit::Visitor;\nuse hir;\n\n\/\/ The actual lang items defined come at the end of this file in one handy table.\n\/\/ So you probably just want to nip down to the end.\nmacro_rules! language_item_table {\n (\n $( $variant:ident, $name:expr, $method:ident; )*\n ) => {\n\n\nenum_from_u32! {\n #[derive(Copy, Clone, PartialEq, Eq, Hash)]\n pub enum LangItem {\n $($variant,)*\n }\n}\n\npub struct LanguageItems {\n pub items: Vec<Option<DefId>>,\n pub missing: Vec<LangItem>,\n}\n\nimpl LanguageItems {\n pub fn new() -> LanguageItems {\n fn foo(_: LangItem) -> Option<DefId> { None }\n\n LanguageItems {\n items: vec!($(foo($variant)),*),\n missing: Vec::new(),\n }\n }\n\n pub fn items(&self) -> &[Option<DefId>] {\n &*self.items\n }\n\n pub fn item_name(index: usize) -> &'static str {\n let item: Option<LangItem> = LangItem::from_u32(index as u32);\n match item {\n $( Some($variant) => $name, )*\n None => \"???\"\n }\n }\n\n pub fn require(&self, it: LangItem) -> Result<DefId, String> {\n match self.items[it as usize] {\n Some(id) => Ok(id),\n None => {\n Err(format!(\"requires `{}` lang_item\",\n LanguageItems::item_name(it as usize)))\n }\n }\n }\n\n pub fn require_owned_box(&self) -> Result<DefId, String> {\n self.require(OwnedBoxLangItem)\n }\n\n pub fn from_builtin_kind(&self, bound: ty::BuiltinBound)\n -> Result<DefId, String>\n {\n match bound {\n ty::BoundSend => self.require(SendTraitLangItem),\n ty::BoundSized => self.require(SizedTraitLangItem),\n ty::BoundCopy => self.require(CopyTraitLangItem),\n ty::BoundSync => self.require(SyncTraitLangItem),\n }\n }\n\n pub fn to_builtin_kind(&self, id: DefId) -> Option<ty::BuiltinBound> {\n if Some(id) == self.send_trait() {\n Some(ty::BoundSend)\n } else if Some(id) == self.sized_trait() {\n Some(ty::BoundSized)\n } else if Some(id) == self.copy_trait() {\n Some(ty::BoundCopy)\n } else if Some(id) == self.sync_trait() {\n Some(ty::BoundSync)\n } else {\n None\n }\n }\n\n pub fn fn_trait_kind(&self, id: DefId) -> Option<ty::ClosureKind> {\n let def_id_kinds = [\n (self.fn_trait(), ty::ClosureKind::Fn),\n (self.fn_mut_trait(), ty::ClosureKind::FnMut),\n (self.fn_once_trait(), ty::ClosureKind::FnOnce),\n ];\n\n for &(opt_def_id, kind) in &def_id_kinds {\n if Some(id) == opt_def_id {\n return Some(kind);\n }\n }\n\n None\n }\n\n $(\n #[allow(dead_code)]\n pub fn $method(&self) -> Option<DefId> {\n self.items[$variant as usize]\n }\n )*\n}\n\nstruct LanguageItemCollector<'a, 'tcx: 'a> {\n items: LanguageItems,\n\n ast_map: &'a hir_map::Map<'tcx>,\n\n session: &'a Session,\n\n item_refs: FnvHashMap<&'static str, usize>,\n}\n\nimpl<'a, 'v, 'tcx> Visitor<'v> for LanguageItemCollector<'a, 'tcx> {\n fn visit_item(&mut self, item: &hir::Item) {\n if let Some(value) = extract(&item.attrs) {\n let item_index = self.item_refs.get(&value[..]).cloned();\n\n if let Some(item_index) = item_index {\n self.collect_item(item_index, self.ast_map.local_def_id(item.id))\n } else {\n let span = self.ast_map.span(item.id);\n span_err!(self.session, span, E0522,\n \"definition of an unknown language item: `{}`.\",\n &value[..]);\n }\n }\n }\n}\n\nimpl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> {\n pub fn new(session: &'a Session, ast_map: &'a hir_map::Map<'tcx>)\n -> LanguageItemCollector<'a, 'tcx> {\n let mut item_refs = FnvHashMap();\n\n $( item_refs.insert($name, $variant as usize); )*\n\n LanguageItemCollector {\n session: session,\n ast_map: ast_map,\n items: LanguageItems::new(),\n item_refs: item_refs,\n }\n }\n\n pub fn collect_item(&mut self, item_index: usize,\n item_def_id: DefId) {\n \/\/ Check for duplicates.\n match self.items.items[item_index] {\n Some(original_def_id) if original_def_id != item_def_id => {\n let cstore = &self.session.cstore;\n let name = LanguageItems::item_name(item_index);\n let mut err = match self.ast_map.span_if_local(item_def_id) {\n Some(span) => struct_span_err!(\n self.session,\n span,\n E0152,\n \"duplicate lang item found: `{}`.\",\n name),\n None => self.session.struct_err(&format!(\n \"duplicate lang item in crate `{}`: `{}`.\",\n cstore.crate_name(item_def_id.krate),\n name)),\n };\n if let Some(span) = self.ast_map.span_if_local(original_def_id) {\n span_note!(&mut err, span,\n \"first defined here.\");\n } else {\n err.note(&format!(\"first defined in crate `{}`.\",\n cstore.crate_name(original_def_id.krate)));\n }\n err.emit();\n }\n _ => {\n \/\/ OK.\n }\n }\n\n \/\/ Matched.\n self.items.items[item_index] = Some(item_def_id);\n }\n\n pub fn collect_local_language_items(&mut self, krate: &hir::Crate) {\n krate.visit_all_items(self);\n }\n\n pub fn collect_external_language_items(&mut self) {\n let cstore = &self.session.cstore;\n\n for cnum in cstore.crates() {\n for (index, item_index) in cstore.lang_items(cnum) {\n let def_id = DefId { krate: cnum, index: index };\n self.collect_item(item_index, def_id);\n }\n }\n }\n\n pub fn collect(&mut self, krate: &hir::Crate) {\n self.collect_external_language_items();\n self.collect_local_language_items(krate);\n }\n}\n\npub fn extract(attrs: &[ast::Attribute]) -> Option<InternedString> {\n for attribute in attrs {\n match attribute.value_str() {\n Some(ref value) if attribute.check_name(\"lang\") => {\n return Some(value.clone());\n }\n _ => {}\n }\n }\n\n return None;\n}\n\npub fn collect_language_items(session: &Session,\n map: &hir_map::Map)\n -> LanguageItems {\n let _task = map.dep_graph.in_task(DepNode::CollectLanguageItems);\n let krate: &hir::Crate = map.krate();\n let mut collector = LanguageItemCollector::new(session, map);\n collector.collect(krate);\n let LanguageItemCollector { mut items, .. } = collector;\n weak_lang_items::check_crate(krate, session, &mut items);\n items\n}\n\n\/\/ End of the macro\n }\n}\n\nlanguage_item_table! {\n\/\/ Variant name, Name, Method name;\n CharImplItem, \"char\", char_impl;\n StrImplItem, \"str\", str_impl;\n SliceImplItem, \"slice\", slice_impl;\n ConstPtrImplItem, \"const_ptr\", const_ptr_impl;\n MutPtrImplItem, \"mut_ptr\", mut_ptr_impl;\n I8ImplItem, \"i8\", i8_impl;\n I16ImplItem, \"i16\", i16_impl;\n I32ImplItem, \"i32\", i32_impl;\n I64ImplItem, \"i64\", i64_impl;\n IsizeImplItem, \"isize\", isize_impl;\n U8ImplItem, \"u8\", u8_impl;\n U16ImplItem, \"u16\", u16_impl;\n U32ImplItem, \"u32\", u32_impl;\n U64ImplItem, \"u64\", u64_impl;\n UsizeImplItem, \"usize\", usize_impl;\n F32ImplItem, \"f32\", f32_impl;\n F64ImplItem, \"f64\", f64_impl;\n\n SendTraitLangItem, \"send\", send_trait;\n SizedTraitLangItem, \"sized\", sized_trait;\n UnsizeTraitLangItem, \"unsize\", unsize_trait;\n CopyTraitLangItem, \"copy\", copy_trait;\n SyncTraitLangItem, \"sync\", sync_trait;\n\n DropTraitLangItem, \"drop\", drop_trait;\n\n CoerceUnsizedTraitLangItem, \"coerce_unsized\", coerce_unsized_trait;\n\n AddTraitLangItem, \"add\", add_trait;\n SubTraitLangItem, \"sub\", sub_trait;\n MulTraitLangItem, \"mul\", mul_trait;\n DivTraitLangItem, \"div\", div_trait;\n RemTraitLangItem, \"rem\", rem_trait;\n NegTraitLangItem, \"neg\", neg_trait;\n NotTraitLangItem, \"not\", not_trait;\n BitXorTraitLangItem, \"bitxor\", bitxor_trait;\n BitAndTraitLangItem, \"bitand\", bitand_trait;\n BitOrTraitLangItem, \"bitor\", bitor_trait;\n ShlTraitLangItem, \"shl\", shl_trait;\n ShrTraitLangItem, \"shr\", shr_trait;\n AddAssignTraitLangItem, \"add_assign\", add_assign_trait;\n SubAssignTraitLangItem, \"sub_assign\", sub_assign_trait;\n MulAssignTraitLangItem, \"mul_assign\", mul_assign_trait;\n DivAssignTraitLangItem, \"div_assign\", div_assign_trait;\n RemAssignTraitLangItem, \"rem_assign\", rem_assign_trait;\n BitXorAssignTraitLangItem, \"bitxor_assign\", bitxor_assign_trait;\n BitAndAssignTraitLangItem, \"bitand_assign\", bitand_assign_trait;\n BitOrAssignTraitLangItem, \"bitor_assign\", bitor_assign_trait;\n ShlAssignTraitLangItem, \"shl_assign\", shl_assign_trait;\n ShrAssignTraitLangItem, \"shr_assign\", shr_assign_trait;\n IndexTraitLangItem, \"index\", index_trait;\n IndexMutTraitLangItem, \"index_mut\", index_mut_trait;\n\n UnsafeCellTypeLangItem, \"unsafe_cell\", unsafe_cell_type;\n\n DerefTraitLangItem, \"deref\", deref_trait;\n DerefMutTraitLangItem, \"deref_mut\", deref_mut_trait;\n\n FnTraitLangItem, \"fn\", fn_trait;\n FnMutTraitLangItem, \"fn_mut\", fn_mut_trait;\n FnOnceTraitLangItem, \"fn_once\", fn_once_trait;\n\n EqTraitLangItem, \"eq\", eq_trait;\n OrdTraitLangItem, \"ord\", ord_trait;\n\n StrEqFnLangItem, \"str_eq\", str_eq_fn;\n\n \/\/ A number of panic-related lang items. The `panic` item corresponds to\n \/\/ divide-by-zero and various panic cases with `match`. The\n \/\/ `panic_bounds_check` item is for indexing arrays.\n \/\/\n \/\/ The `begin_unwind` lang item has a predefined symbol name and is sort of\n \/\/ a \"weak lang item\" in the sense that a crate is not required to have it\n \/\/ defined to use it, but a final product is required to define it\n \/\/ somewhere. Additionally, there are restrictions on crates that use a weak\n \/\/ lang item, but do not have it defined.\n PanicFnLangItem, \"panic\", panic_fn;\n PanicBoundsCheckFnLangItem, \"panic_bounds_check\", panic_bounds_check_fn;\n PanicFmtLangItem, \"panic_fmt\", panic_fmt;\n\n ExchangeMallocFnLangItem, \"exchange_malloc\", exchange_malloc_fn;\n ExchangeFreeFnLangItem, \"exchange_free\", exchange_free_fn;\n BoxFreeFnLangItem, \"box_free\", box_free_fn;\n StrDupUniqFnLangItem, \"strdup_uniq\", strdup_uniq_fn;\n\n StartFnLangItem, \"start\", start_fn;\n\n EhPersonalityLangItem, \"eh_personality\", eh_personality;\n EhPersonalityCatchLangItem, \"eh_personality_catch\", eh_personality_catch;\n EhUnwindResumeLangItem, \"eh_unwind_resume\", eh_unwind_resume;\n MSVCTryFilterLangItem, \"msvc_try_filter\", msvc_try_filter;\n\n OwnedBoxLangItem, \"owned_box\", owned_box;\n\n PhantomDataItem, \"phantom_data\", phantom_data;\n\n \/\/ Deprecated:\n CovariantTypeItem, \"covariant_type\", covariant_type;\n ContravariantTypeItem, \"contravariant_type\", contravariant_type;\n InvariantTypeItem, \"invariant_type\", invariant_type;\n CovariantLifetimeItem, \"covariant_lifetime\", covariant_lifetime;\n ContravariantLifetimeItem, \"contravariant_lifetime\", contravariant_lifetime;\n InvariantLifetimeItem, \"invariant_lifetime\", invariant_lifetime;\n\n NoCopyItem, \"no_copy_bound\", no_copy_bound;\n\n NonZeroItem, \"non_zero\", non_zero;\n\n DebugTraitLangItem, \"debug_trait\", debug_trait;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add configuration common module<commit_after>\/\/ src\/common\/config.rs\n\n\/\/\/ API Configuration\n\n\/\/Import Crates\nextern crate toml;\n\n\/\/Import Modules\nuse std::io;\nuse std::io::prelude::*;\nuse std::fs::File;\n\npub struct Config {\n pub database: Database\n}\n\n#[derive(Debug)]\npub struct Database {\n pub host: Option<String>,\n pub port: Option<i64>,\n pub name: Option<String>,\n pub username: Option<String>,\n pub password: Option<String>,\n}\n\nimpl Config{\n\n \/\/\/ Create A Config struct\n \/\/\/\n \/\/\/ # Returns\n \/\/\/ `Config` - Config struct\n pub fn new() -> Config{\n\n let mut database = None;\n\n match read_config_from_file(\"config\/config.toml\"){\n Ok(configString) => {\n \/\/debug!(\"Config String: {}\", configString);\n match toml::Parser::new(&configString).parse(){\n Some(configTable) => {\n match configTable.get(\"database\"){\n Some(dc) => {\n match dc.as_table(){\n Some(databaseConfig) => {\n \/\/std::collections::BTreeMap\n let host = match databaseConfig.get(\"host\"){\n Some(v) => {\n \/\/v: toml::Value\n match v.as_str(){\n Some(vs) => Some(vs.to_string()),\n None => {\n warn!(\"Cannot read database host as string\");\n None\n }\n }\n },\n None => {\n warn!(\"Database host not found in configuration\");\n None\n }\n };\n let port = match databaseConfig.get(\"port\"){\n Some(v) => v.as_integer(),\/\/v: toml::Value\n None => {\n warn!(\"Database port not found in configuration\");\n None\n }\n };\n let name = match databaseConfig.get(\"name\"){\n Some(v) => {\n \/\/v: toml::Value\n match v.as_str(){\n Some(vs) => Some(vs.to_string()),\n None => {\n warn!(\"Cannot read database name as string\");\n None\n }\n }\n },\n None => {\n warn!(\"Database name not found in configuration\");\n None\n }\n };\n let username = match databaseConfig.get(\"username\"){\n Some(v) => {\n \/\/v: toml::Value\n match v.as_str(){\n Some(vs) => Some(vs.to_string()),\n None => {\n warn!(\"Cannot read database username as string\");\n None\n }\n }\n },\n None => {\n warn!(\"Database username not found in configuration\");\n None\n }\n };\n let password = match databaseConfig.get(\"password\"){\n Some(v) => {\n \/\/v: toml::Value\n match v.as_str(){\n Some(vs) => Some(vs.to_string()),\n None => {\n warn!(\"Cannot read database password as string\");\n None\n }\n }\n },\n None => {\n warn!(\"Database password not found in configuration\");\n None\n }\n };\n\n database = Some(Database{\n host: host,\n port: port,\n name: name,\n username: username,\n password: password\n });\n },\n None => {}\n }\n },\n None => {\n warn!(\"No `database` configuration found.\");\n }\n }\n }\n None => {}\n }\n },\n Err(e) => {\n warn!(\"Error reading configuration file\");\n }\n }\n\n Config {\n database: match database{\n None => Database{\n host: None,\n port: None,\n name: None,\n username: None,\n password: None\n },\n Some(d) => d\n }\n }\n }\n}\n\n\/\/\/ Read Configuration From File\n\/\/\/\n\/\/\/ # Arguments\n\/\/\/ * `file` - &str The path to configuration file\n\/\/\/\n\/\/\/ # Returns\n\/\/\/ `Result<String>` - File Contents\nfn read_config_from_file(file: &'static str) -> Result<String, io::Error>{\n \/\/open the file\n let mut f = try!(File::open(file));\n\n \/\/ read file contents into String\n let mut buffer = String::new();\n try!(f.read_to_string(&mut buffer));\n\n Ok(buffer)\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Orphan checker: every impl either implements a trait defined in this\n\/\/! crate or pertains to a type defined in this crate.\n\nuse middle::traits;\nuse middle::ty;\nuse syntax::ast::{Item, ItemImpl};\nuse syntax::ast;\nuse syntax::ast_util;\nuse syntax::codemap::Span;\nuse syntax::visit;\nuse util::ppaux::{Repr, UserString};\n\npub fn check(tcx: &ty::ctxt) {\n let mut orphan = OrphanChecker { tcx: tcx };\n visit::walk_crate(&mut orphan, tcx.map.krate());\n}\n\nstruct OrphanChecker<'cx, 'tcx:'cx> {\n tcx: &'cx ty::ctxt<'tcx>\n}\n\nimpl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> {\n fn check_def_id(&self, span: Span, def_id: ast::DefId) {\n if def_id.krate != ast::LOCAL_CRATE {\n span_err!(self.tcx.sess, span, E0116,\n \"cannot associate methods with a type outside the \\\n crate the type is defined in; define and implement \\\n a trait or new type instead\");\n }\n }\n}\n\nimpl<'cx, 'tcx,'v> visit::Visitor<'v> for OrphanChecker<'cx, 'tcx> {\n fn visit_item(&mut self, item: &'v ast::Item) {\n let def_id = ast_util::local_def(item.id);\n match item.node {\n ast::ItemImpl(_, _, _, None, _, _) => {\n \/\/ For inherent impls, self type must be a nominal type\n \/\/ defined in this crate.\n debug!(\"coherence2::orphan check: inherent impl {}\", item.repr(self.tcx));\n let self_ty = ty::lookup_item_type(self.tcx, def_id).ty;\n match self_ty.sty {\n ty::ty_enum(def_id, _) |\n ty::ty_struct(def_id, _) => {\n self.check_def_id(item.span, def_id);\n }\n ty::ty_trait(ref data) => {\n self.check_def_id(item.span, data.principal_def_id());\n }\n ty::ty_uniq(..) => {\n self.check_def_id(item.span,\n self.tcx.lang_items.owned_box()\n .unwrap());\n }\n _ => {\n span_err!(self.tcx.sess, item.span, E0118,\n \"no base type found for inherent implementation; \\\n implement a trait or new type instead\");\n }\n }\n }\n ast::ItemImpl(_, _, _, Some(_), _, _) => {\n \/\/ \"Trait\" impl\n debug!(\"coherence2::orphan check: trait impl {}\", item.repr(self.tcx));\n let trait_def_id = ty::impl_trait_ref(self.tcx, def_id).unwrap().def_id;\n match traits::orphan_check(self.tcx, def_id) {\n Ok(()) => { }\n Err(traits::OrphanCheckErr::NoLocalInputType) => {\n if !ty::has_attr(self.tcx, trait_def_id, \"old_orphan_check\") {\n let self_ty = ty::lookup_item_type(self.tcx, def_id).ty;\n span_err!(\n self.tcx.sess, item.span, E0117,\n \"the type `{}` does not reference any \\\n types defined in this crate; \\\n only traits defined in the current crate can be \\\n implemented for arbitrary types\",\n self_ty.user_string(self.tcx));\n }\n }\n Err(traits::OrphanCheckErr::UncoveredTy(param_ty)) => {\n if !ty::has_attr(self.tcx, trait_def_id, \"old_orphan_check\") {\n span_err!(self.tcx.sess, item.span, E0210,\n \"type parameter `{}` is not constrained by any local type; \\\n only traits defined in the current crate can be implemented \\\n for a type parameter\",\n param_ty.user_string(self.tcx));\n self.tcx.sess.span_note(\n item.span,\n format!(\"for a limited time, you can add \\\n `#![feature(old_orphan_check)]` to your crate \\\n to disable this rule\").as_slice());\n }\n }\n }\n }\n _ => {\n \/\/ Not an impl\n }\n }\n\n visit::walk_item(self, item);\n }\n}\n<commit_msg>Adjust error message not to mention the self type<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Orphan checker: every impl either implements a trait defined in this\n\/\/! crate or pertains to a type defined in this crate.\n\nuse middle::traits;\nuse middle::ty;\nuse syntax::ast::{Item, ItemImpl};\nuse syntax::ast;\nuse syntax::ast_util;\nuse syntax::codemap::Span;\nuse syntax::visit;\nuse util::ppaux::{Repr, UserString};\n\npub fn check(tcx: &ty::ctxt) {\n let mut orphan = OrphanChecker { tcx: tcx };\n visit::walk_crate(&mut orphan, tcx.map.krate());\n}\n\nstruct OrphanChecker<'cx, 'tcx:'cx> {\n tcx: &'cx ty::ctxt<'tcx>\n}\n\nimpl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> {\n fn check_def_id(&self, span: Span, def_id: ast::DefId) {\n if def_id.krate != ast::LOCAL_CRATE {\n span_err!(self.tcx.sess, span, E0116,\n \"cannot associate methods with a type outside the \\\n crate the type is defined in; define and implement \\\n a trait or new type instead\");\n }\n }\n}\n\nimpl<'cx, 'tcx,'v> visit::Visitor<'v> for OrphanChecker<'cx, 'tcx> {\n fn visit_item(&mut self, item: &'v ast::Item) {\n let def_id = ast_util::local_def(item.id);\n match item.node {\n ast::ItemImpl(_, _, _, None, _, _) => {\n \/\/ For inherent impls, self type must be a nominal type\n \/\/ defined in this crate.\n debug!(\"coherence2::orphan check: inherent impl {}\", item.repr(self.tcx));\n let self_ty = ty::lookup_item_type(self.tcx, def_id).ty;\n match self_ty.sty {\n ty::ty_enum(def_id, _) |\n ty::ty_struct(def_id, _) => {\n self.check_def_id(item.span, def_id);\n }\n ty::ty_trait(ref data) => {\n self.check_def_id(item.span, data.principal_def_id());\n }\n ty::ty_uniq(..) => {\n self.check_def_id(item.span,\n self.tcx.lang_items.owned_box()\n .unwrap());\n }\n _ => {\n span_err!(self.tcx.sess, item.span, E0118,\n \"no base type found for inherent implementation; \\\n implement a trait or new type instead\");\n }\n }\n }\n ast::ItemImpl(_, _, _, Some(_), _, _) => {\n \/\/ \"Trait\" impl\n debug!(\"coherence2::orphan check: trait impl {}\", item.repr(self.tcx));\n let trait_def_id = ty::impl_trait_ref(self.tcx, def_id).unwrap().def_id;\n match traits::orphan_check(self.tcx, def_id) {\n Ok(()) => { }\n Err(traits::OrphanCheckErr::NoLocalInputType) => {\n if !ty::has_attr(self.tcx, trait_def_id, \"old_orphan_check\") {\n span_err!(\n self.tcx.sess, item.span, E0117,\n \"the impl does not reference any \\\n types defined in this crate; \\\n only traits defined in the current crate can be \\\n implemented for arbitrary types\");\n }\n }\n Err(traits::OrphanCheckErr::UncoveredTy(param_ty)) => {\n if !ty::has_attr(self.tcx, trait_def_id, \"old_orphan_check\") {\n span_err!(self.tcx.sess, item.span, E0210,\n \"type parameter `{}` is not constrained by any local type; \\\n only traits defined in the current crate can be implemented \\\n for a type parameter\",\n param_ty.user_string(self.tcx));\n self.tcx.sess.span_note(\n item.span,\n format!(\"for a limited time, you can add \\\n `#![feature(old_orphan_check)]` to your crate \\\n to disable this rule\").as_slice());\n }\n }\n }\n }\n _ => {\n \/\/ Not an impl\n }\n }\n\n visit::walk_item(self, item);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add adapter example w\/sync API<commit_after>\/\/ Demonstrates synchronous Search with an adapter chain, and\n\/\/ that the same adapters are used as in the async case.\n\/\/\n\/\/ If you comment out the first element of the adapters vector,\n\/\/ the program will crash when it hits a referral.\n\nuse ldap3::adapters::{Adapter, EntriesOnly, PagedResults};\nuse ldap3::result::Result;\nuse ldap3::{LdapConn, Scope, SearchEntry};\n\nfn main() -> Result<()> {\n let mut ldap = LdapConn::new(\"ldap:\/\/localhost:2389\")?;\n let adapters: Vec<Box<dyn Adapter<_>>> = vec![\n Box::new(EntriesOnly::new()),\n Box::new(PagedResults::new(400)),\n ];\n let mut search = ldap.streaming_search_with(\n adapters,\n \"dc=example,dc=org\",\n Scope::Subtree,\n \"(objectClass=*)\",\n vec![\"dn\"],\n )?;\n while let Some(entry) = search.next()? {\n let entry = SearchEntry::construct(entry);\n println!(\"{:?}\", entry);\n }\n let _res = search.result().success()?;\n Ok(ldap.unbind()?)\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ xfail-stage0\n\/\/ xfail-stage1\n\/\/ xfail-stage2\n\n\/\/ Regression test for issue #374\n\nuse std;\nimport std::option;\nimport std::option::none;\n\ntag sty {\n ty_nil;\n}\n\ntype raw_t = rec(sty struct,\n option::t[str] cname,\n uint hash);\n\nfn mk_raw_ty(sty st, &option::t[str] cname) -> raw_t {\n ret rec(struct=st,\n cname=cname,\n hash=0u);\n}\n\nfn main() {\n mk_raw_ty(ty_nil, none[str]);\n}\n<commit_msg>test: Un-xfail alias-uninit-value.rs. Closes #374.<commit_after>\/\/ Regression test for issue #374\n\nuse std;\nimport std::option;\nimport std::option::none;\n\ntag sty {\n ty_nil;\n}\n\ntype raw_t = rec(sty struct,\n option::t[str] cname,\n uint hash);\n\nfn mk_raw_ty(sty st, &option::t[str] cname) -> raw_t {\n ret rec(struct=st,\n cname=cname,\n hash=0u);\n}\n\nfn main() {\n mk_raw_ty(ty_nil, none[str]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove InternalLinker::set_internal_links()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>abort parser, if any, when pipeline exits<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #2995<commit_after>fn bad (p: *int) {\n let _q: &int = p as ∫ \/\/~ ERROR non-scalar cast\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Replaces 128-bit operators with lang item calls\n\nuse rustc::hir::def_id::DefId;\nuse rustc::middle::lang_items::LangItem;\nuse rustc::mir::*;\nuse rustc::ty::{Slice, Ty, TyCtxt, TypeVariants};\nuse rustc_data_structures::indexed_vec::{Idx};\nuse transform::{MirPass, MirSource};\nuse syntax;\n\npub struct Lower128Bit;\n\nimpl MirPass for Lower128Bit {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _src: MirSource,\n mir: &mut Mir<'tcx>) {\n let debugging_override = tcx.sess.opts.debugging_opts.lower_128bit_ops;\n let target_default = tcx.sess.host.options.i128_lowering;\n if !debugging_override.unwrap_or(target_default) {\n return\n }\n\n self.lower_128bit_ops(tcx, mir);\n }\n}\n\nimpl Lower128Bit {\n fn lower_128bit_ops<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &mut Mir<'tcx>) {\n let mut new_blocks = Vec::new();\n let cur_len = mir.basic_blocks().len();\n\n let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();\n for block in basic_blocks.iter_mut() {\n for i in (0..block.statements.len()).rev() {\n let (lang_item, rhs_kind) =\n if let Some((lang_item, rhs_kind)) =\n lower_to(&block.statements[i], local_decls, tcx)\n {\n (lang_item, rhs_kind)\n } else {\n continue;\n };\n\n let rhs_override_ty = rhs_kind.ty(tcx);\n let cast_local =\n match rhs_override_ty {\n None => None,\n Some(ty) => {\n let local_decl = LocalDecl::new_internal(\n ty, block.statements[i].source_info.span);\n Some(local_decls.push(local_decl))\n },\n };\n\n let storage_dead = cast_local.map(|local| {\n Statement {\n source_info: block.statements[i].source_info,\n kind: StatementKind::StorageDead(local),\n }\n });\n let after_call = BasicBlockData {\n statements: storage_dead.into_iter()\n .chain(block.statements.drain((i+1)..)).collect(),\n is_cleanup: block.is_cleanup,\n terminator: block.terminator.take(),\n };\n\n let bin_statement = block.statements.pop().unwrap();\n let (source_info, place, lhs, mut rhs) = match bin_statement {\n Statement {\n source_info,\n kind: StatementKind::Assign(\n place,\n Rvalue::BinaryOp(_, lhs, rhs))\n } => (source_info, place, lhs, rhs),\n Statement {\n source_info,\n kind: StatementKind::Assign(\n place,\n Rvalue::CheckedBinaryOp(_, lhs, rhs))\n } => (source_info, place, lhs, rhs),\n _ => bug!(\"Statement doesn't match pattern any more?\"),\n };\n\n if let Some(local) = cast_local {\n block.statements.push(Statement {\n source_info: source_info,\n kind: StatementKind::StorageLive(local),\n });\n block.statements.push(Statement {\n source_info: source_info,\n kind: StatementKind::Assign(\n Place::Local(local),\n Rvalue::Cast(\n CastKind::Misc,\n rhs,\n rhs_override_ty.unwrap())),\n });\n rhs = Operand::Move(Place::Local(local));\n }\n\n let call_did = check_lang_item_type(\n lang_item, &place, &lhs, &rhs, local_decls, tcx);\n\n let bb = BasicBlock::new(cur_len + new_blocks.len());\n new_blocks.push(after_call);\n\n block.terminator =\n Some(Terminator {\n source_info,\n kind: TerminatorKind::Call {\n func: Operand::function_handle(tcx, call_did,\n Slice::empty(), source_info.span),\n args: vec![lhs, rhs],\n destination: Some((place, bb)),\n cleanup: None,\n },\n });\n }\n }\n\n basic_blocks.extend(new_blocks);\n }\n}\n\nfn check_lang_item_type<'a, 'tcx, D>(\n lang_item: LangItem,\n place: &Place<'tcx>,\n lhs: &Operand<'tcx>,\n rhs: &Operand<'tcx>,\n local_decls: &D,\n tcx: TyCtxt<'a, 'tcx, 'tcx>)\n-> DefId\n where D: HasLocalDecls<'tcx>\n{\n let did = tcx.require_lang_item(lang_item);\n let poly_sig = tcx.fn_sig(did);\n let sig = poly_sig.no_late_bound_regions().unwrap();\n let lhs_ty = lhs.ty(local_decls, tcx);\n let rhs_ty = rhs.ty(local_decls, tcx);\n let place_ty = place.ty(local_decls, tcx).to_ty(tcx);\n let expected = [lhs_ty, rhs_ty, place_ty];\n assert_eq!(sig.inputs_and_output[..], expected,\n \"lang item {}\", tcx.def_symbol_name(did));\n did\n}\n\nfn lower_to<'a, 'tcx, D>(statement: &Statement<'tcx>, local_decls: &D, tcx: TyCtxt<'a, 'tcx, 'tcx>)\n -> Option<(LangItem, RhsKind)>\n where D: HasLocalDecls<'tcx>\n{\n match statement.kind {\n StatementKind::Assign(_, Rvalue::BinaryOp(bin_op, ref lhs, _)) => {\n let ty = lhs.ty(local_decls, tcx);\n if let Some(is_signed) = sign_of_128bit(ty) {\n return item_for_op(bin_op, is_signed);\n }\n },\n StatementKind::Assign(_, Rvalue::CheckedBinaryOp(bin_op, ref lhs, _)) => {\n let ty = lhs.ty(local_decls, tcx);\n if let Some(is_signed) = sign_of_128bit(ty) {\n return item_for_checked_op(bin_op, is_signed);\n }\n },\n _ => {},\n }\n None\n}\n\n#[derive(Copy, Clone)]\nenum RhsKind {\n Unchanged,\n ForceU128,\n ForceU32,\n}\n\nimpl RhsKind {\n fn ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Ty<'tcx>> {\n match *self {\n RhsKind::Unchanged => None,\n RhsKind::ForceU128 => Some(tcx.types.u128),\n RhsKind::ForceU32 => Some(tcx.types.u32),\n }\n }\n}\n\nfn sign_of_128bit(ty: Ty) -> Option<bool> {\n match ty.sty {\n TypeVariants::TyInt(syntax::ast::IntTy::I128) => Some(true),\n TypeVariants::TyUint(syntax::ast::UintTy::U128) => Some(false),\n _ => None,\n }\n}\n\nfn item_for_op(bin_op: BinOp, is_signed: bool) -> Option<(LangItem, RhsKind)> {\n let i = match (bin_op, is_signed) {\n (BinOp::Add, true) => (LangItem::I128AddFnLangItem, RhsKind::Unchanged),\n (BinOp::Add, false) => (LangItem::U128AddFnLangItem, RhsKind::Unchanged),\n (BinOp::Sub, true) => (LangItem::I128SubFnLangItem, RhsKind::Unchanged),\n (BinOp::Sub, false) => (LangItem::U128SubFnLangItem, RhsKind::Unchanged),\n (BinOp::Mul, true) => (LangItem::I128MulFnLangItem, RhsKind::Unchanged),\n (BinOp::Mul, false) => (LangItem::U128MulFnLangItem, RhsKind::Unchanged),\n (BinOp::Div, true) => (LangItem::I128DivFnLangItem, RhsKind::Unchanged),\n (BinOp::Div, false) => (LangItem::U128DivFnLangItem, RhsKind::Unchanged),\n (BinOp::Rem, true) => (LangItem::I128RemFnLangItem, RhsKind::Unchanged),\n (BinOp::Rem, false) => (LangItem::U128RemFnLangItem, RhsKind::Unchanged),\n (BinOp::Shl, true) => (LangItem::I128ShlFnLangItem, RhsKind::ForceU32),\n (BinOp::Shl, false) => (LangItem::U128ShlFnLangItem, RhsKind::ForceU32),\n (BinOp::Shr, true) => (LangItem::I128ShrFnLangItem, RhsKind::ForceU32),\n (BinOp::Shr, false) => (LangItem::U128ShrFnLangItem, RhsKind::ForceU32),\n _ => return None,\n };\n Some(i)\n}\n\nfn item_for_checked_op(bin_op: BinOp, is_signed: bool) -> Option<(LangItem, RhsKind)> {\n let i = match (bin_op, is_signed) {\n (BinOp::Add, true) => (LangItem::I128AddoFnLangItem, RhsKind::Unchanged),\n (BinOp::Add, false) => (LangItem::U128AddoFnLangItem, RhsKind::Unchanged),\n (BinOp::Sub, true) => (LangItem::I128SuboFnLangItem, RhsKind::Unchanged),\n (BinOp::Sub, false) => (LangItem::U128SuboFnLangItem, RhsKind::Unchanged),\n (BinOp::Mul, true) => (LangItem::I128MuloFnLangItem, RhsKind::Unchanged),\n (BinOp::Mul, false) => (LangItem::U128MuloFnLangItem, RhsKind::Unchanged),\n (BinOp::Shl, true) => (LangItem::I128ShloFnLangItem, RhsKind::ForceU128),\n (BinOp::Shl, false) => (LangItem::U128ShloFnLangItem, RhsKind::ForceU128),\n (BinOp::Shr, true) => (LangItem::I128ShroFnLangItem, RhsKind::ForceU128),\n (BinOp::Shr, false) => (LangItem::U128ShroFnLangItem, RhsKind::ForceU128),\n _ => bug!(\"That should be all the checked ones?\"),\n };\n Some(i)\n}\n<commit_msg>Rollup merge of #48727 - leodasvacas:refactor-contrived-match, r=rkruppe<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Replaces 128-bit operators with lang item calls\n\nuse rustc::hir::def_id::DefId;\nuse rustc::middle::lang_items::LangItem;\nuse rustc::mir::*;\nuse rustc::ty::{Slice, Ty, TyCtxt, TypeVariants};\nuse rustc_data_structures::indexed_vec::{Idx};\nuse transform::{MirPass, MirSource};\nuse syntax;\n\npub struct Lower128Bit;\n\nimpl MirPass for Lower128Bit {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _src: MirSource,\n mir: &mut Mir<'tcx>) {\n let debugging_override = tcx.sess.opts.debugging_opts.lower_128bit_ops;\n let target_default = tcx.sess.host.options.i128_lowering;\n if !debugging_override.unwrap_or(target_default) {\n return\n }\n\n self.lower_128bit_ops(tcx, mir);\n }\n}\n\nimpl Lower128Bit {\n fn lower_128bit_ops<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &mut Mir<'tcx>) {\n let mut new_blocks = Vec::new();\n let cur_len = mir.basic_blocks().len();\n\n let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();\n for block in basic_blocks.iter_mut() {\n for i in (0..block.statements.len()).rev() {\n let (lang_item, rhs_kind) =\n if let Some((lang_item, rhs_kind)) =\n lower_to(&block.statements[i], local_decls, tcx)\n {\n (lang_item, rhs_kind)\n } else {\n continue;\n };\n\n let rhs_override_ty = rhs_kind.ty(tcx);\n let cast_local =\n match rhs_override_ty {\n None => None,\n Some(ty) => {\n let local_decl = LocalDecl::new_internal(\n ty, block.statements[i].source_info.span);\n Some(local_decls.push(local_decl))\n },\n };\n\n let storage_dead = cast_local.map(|local| {\n Statement {\n source_info: block.statements[i].source_info,\n kind: StatementKind::StorageDead(local),\n }\n });\n let after_call = BasicBlockData {\n statements: storage_dead.into_iter()\n .chain(block.statements.drain((i+1)..)).collect(),\n is_cleanup: block.is_cleanup,\n terminator: block.terminator.take(),\n };\n\n let bin_statement = block.statements.pop().unwrap();\n let source_info = bin_statement.source_info;\n let (place, lhs, mut rhs) = match bin_statement.kind {\n StatementKind::Assign(place, Rvalue::BinaryOp(_, lhs, rhs))\n | StatementKind::Assign(place, Rvalue::CheckedBinaryOp(_, lhs, rhs)) => {\n (place, lhs, rhs)\n }\n _ => bug!(\"Statement doesn't match pattern any more?\"),\n };\n\n if let Some(local) = cast_local {\n block.statements.push(Statement {\n source_info: source_info,\n kind: StatementKind::StorageLive(local),\n });\n block.statements.push(Statement {\n source_info: source_info,\n kind: StatementKind::Assign(\n Place::Local(local),\n Rvalue::Cast(\n CastKind::Misc,\n rhs,\n rhs_override_ty.unwrap())),\n });\n rhs = Operand::Move(Place::Local(local));\n }\n\n let call_did = check_lang_item_type(\n lang_item, &place, &lhs, &rhs, local_decls, tcx);\n\n let bb = BasicBlock::new(cur_len + new_blocks.len());\n new_blocks.push(after_call);\n\n block.terminator =\n Some(Terminator {\n source_info,\n kind: TerminatorKind::Call {\n func: Operand::function_handle(tcx, call_did,\n Slice::empty(), source_info.span),\n args: vec![lhs, rhs],\n destination: Some((place, bb)),\n cleanup: None,\n },\n });\n }\n }\n\n basic_blocks.extend(new_blocks);\n }\n}\n\nfn check_lang_item_type<'a, 'tcx, D>(\n lang_item: LangItem,\n place: &Place<'tcx>,\n lhs: &Operand<'tcx>,\n rhs: &Operand<'tcx>,\n local_decls: &D,\n tcx: TyCtxt<'a, 'tcx, 'tcx>)\n-> DefId\n where D: HasLocalDecls<'tcx>\n{\n let did = tcx.require_lang_item(lang_item);\n let poly_sig = tcx.fn_sig(did);\n let sig = poly_sig.no_late_bound_regions().unwrap();\n let lhs_ty = lhs.ty(local_decls, tcx);\n let rhs_ty = rhs.ty(local_decls, tcx);\n let place_ty = place.ty(local_decls, tcx).to_ty(tcx);\n let expected = [lhs_ty, rhs_ty, place_ty];\n assert_eq!(sig.inputs_and_output[..], expected,\n \"lang item {}\", tcx.def_symbol_name(did));\n did\n}\n\nfn lower_to<'a, 'tcx, D>(statement: &Statement<'tcx>, local_decls: &D, tcx: TyCtxt<'a, 'tcx, 'tcx>)\n -> Option<(LangItem, RhsKind)>\n where D: HasLocalDecls<'tcx>\n{\n match statement.kind {\n StatementKind::Assign(_, Rvalue::BinaryOp(bin_op, ref lhs, _)) => {\n let ty = lhs.ty(local_decls, tcx);\n if let Some(is_signed) = sign_of_128bit(ty) {\n return item_for_op(bin_op, is_signed);\n }\n },\n StatementKind::Assign(_, Rvalue::CheckedBinaryOp(bin_op, ref lhs, _)) => {\n let ty = lhs.ty(local_decls, tcx);\n if let Some(is_signed) = sign_of_128bit(ty) {\n return item_for_checked_op(bin_op, is_signed);\n }\n },\n _ => {},\n }\n None\n}\n\n#[derive(Copy, Clone)]\nenum RhsKind {\n Unchanged,\n ForceU128,\n ForceU32,\n}\n\nimpl RhsKind {\n fn ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Ty<'tcx>> {\n match *self {\n RhsKind::Unchanged => None,\n RhsKind::ForceU128 => Some(tcx.types.u128),\n RhsKind::ForceU32 => Some(tcx.types.u32),\n }\n }\n}\n\nfn sign_of_128bit(ty: Ty) -> Option<bool> {\n match ty.sty {\n TypeVariants::TyInt(syntax::ast::IntTy::I128) => Some(true),\n TypeVariants::TyUint(syntax::ast::UintTy::U128) => Some(false),\n _ => None,\n }\n}\n\nfn item_for_op(bin_op: BinOp, is_signed: bool) -> Option<(LangItem, RhsKind)> {\n let i = match (bin_op, is_signed) {\n (BinOp::Add, true) => (LangItem::I128AddFnLangItem, RhsKind::Unchanged),\n (BinOp::Add, false) => (LangItem::U128AddFnLangItem, RhsKind::Unchanged),\n (BinOp::Sub, true) => (LangItem::I128SubFnLangItem, RhsKind::Unchanged),\n (BinOp::Sub, false) => (LangItem::U128SubFnLangItem, RhsKind::Unchanged),\n (BinOp::Mul, true) => (LangItem::I128MulFnLangItem, RhsKind::Unchanged),\n (BinOp::Mul, false) => (LangItem::U128MulFnLangItem, RhsKind::Unchanged),\n (BinOp::Div, true) => (LangItem::I128DivFnLangItem, RhsKind::Unchanged),\n (BinOp::Div, false) => (LangItem::U128DivFnLangItem, RhsKind::Unchanged),\n (BinOp::Rem, true) => (LangItem::I128RemFnLangItem, RhsKind::Unchanged),\n (BinOp::Rem, false) => (LangItem::U128RemFnLangItem, RhsKind::Unchanged),\n (BinOp::Shl, true) => (LangItem::I128ShlFnLangItem, RhsKind::ForceU32),\n (BinOp::Shl, false) => (LangItem::U128ShlFnLangItem, RhsKind::ForceU32),\n (BinOp::Shr, true) => (LangItem::I128ShrFnLangItem, RhsKind::ForceU32),\n (BinOp::Shr, false) => (LangItem::U128ShrFnLangItem, RhsKind::ForceU32),\n _ => return None,\n };\n Some(i)\n}\n\nfn item_for_checked_op(bin_op: BinOp, is_signed: bool) -> Option<(LangItem, RhsKind)> {\n let i = match (bin_op, is_signed) {\n (BinOp::Add, true) => (LangItem::I128AddoFnLangItem, RhsKind::Unchanged),\n (BinOp::Add, false) => (LangItem::U128AddoFnLangItem, RhsKind::Unchanged),\n (BinOp::Sub, true) => (LangItem::I128SuboFnLangItem, RhsKind::Unchanged),\n (BinOp::Sub, false) => (LangItem::U128SuboFnLangItem, RhsKind::Unchanged),\n (BinOp::Mul, true) => (LangItem::I128MuloFnLangItem, RhsKind::Unchanged),\n (BinOp::Mul, false) => (LangItem::U128MuloFnLangItem, RhsKind::Unchanged),\n (BinOp::Shl, true) => (LangItem::I128ShloFnLangItem, RhsKind::ForceU128),\n (BinOp::Shl, false) => (LangItem::U128ShloFnLangItem, RhsKind::ForceU128),\n (BinOp::Shr, true) => (LangItem::I128ShroFnLangItem, RhsKind::ForceU128),\n (BinOp::Shr, false) => (LangItem::U128ShroFnLangItem, RhsKind::ForceU128),\n _ => bug!(\"That should be all the checked ones?\"),\n };\n Some(i)\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Tests for build.rs rerun-if-env-changed and rustc-env\n\nuse cargo_test_support::basic_manifest;\nuse cargo_test_support::project;\nuse cargo_test_support::sleep_ms;\n\n#[cargo_test]\nfn rerun_if_env_changes() {\n let p = project()\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \"build.rs\",\n r#\"\n fn main() {\n println!(\"cargo:rerun-if-env-changed=FOO\");\n }\n \"#,\n )\n .build();\n\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"bar\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"baz\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"baz\")\n .with_stderr(\"[FINISHED] [..]\")\n .run();\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn rerun_if_env_or_file_changes() {\n let p = project()\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \"build.rs\",\n r#\"\n fn main() {\n println!(\"cargo:rerun-if-env-changed=FOO\");\n println!(\"cargo:rerun-if-changed=foo\");\n }\n \"#,\n )\n .file(\"foo\", \"\")\n .build();\n\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"bar\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"bar\")\n .with_stderr(\"[FINISHED] [..]\")\n .run();\n sleep_ms(1000);\n p.change_file(\"foo\", \"\");\n p.cargo(\"build\")\n .env(\"FOO\", \"bar\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn rustc_bootstrap() {\n let build_rs = r#\"\n fn main() {\n println!(\"cargo:rustc-env=RUSTC_BOOTSTRAP=1\");\n }\n \"#;\n let p = project()\n .file(\"Cargo.toml\", &basic_manifest(\"has-dashes\", \"0.0.1\"))\n .file(\"src\/lib.rs\", \"#![feature(rustc_attrs)]\")\n .file(\"build.rs\", build_rs)\n .build();\n \/\/ RUSTC_BOOTSTRAP unset on stable should error\n p.cargo(\"build\")\n .with_stderr_contains(\"error: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .with_stderr_contains(\n \"help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]\",\n )\n .with_status(101)\n .run();\n \/\/ nightly should warn whether or not RUSTC_BOOTSTRAP is set\n p.cargo(\"build\")\n .masquerade_as_nightly_cargo(&[\"RUSTC_BOOTSTRAP\"])\n \/\/ NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc\n \/\/ (this matters when tests are being run with a beta or stable cargo)\n .env(\"RUSTC_BOOTSTRAP\", \"1\")\n .with_stderr_contains(\"warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .run();\n \/\/ RUSTC_BOOTSTRAP set to the name of the library should warn\n p.cargo(\"build\")\n .env(\"RUSTC_BOOTSTRAP\", \"has_dashes\")\n .with_stderr_contains(\"warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .run();\n \/\/ RUSTC_BOOTSTRAP set to some random value should error\n p.cargo(\"build\")\n .env(\"RUSTC_BOOTSTRAP\", \"bar\")\n .with_stderr_contains(\"error: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .with_stderr_contains(\n \"help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]\",\n )\n .with_status(101)\n .run();\n\n \/\/ Tests for binaries instead of libraries\n let p = project()\n .file(\"Cargo.toml\", &basic_manifest(\"foo\", \"0.0.1\"))\n .file(\"src\/main.rs\", \"#![feature(rustc_attrs)] fn main() {}\")\n .file(\"build.rs\", build_rs)\n .build();\n \/\/ nightly should warn when there's no library whether or not RUSTC_BOOTSTRAP is set\n p.cargo(\"build\")\n .masquerade_as_nightly_cargo(&[\"RUSTC_BOOTSTRAP\"])\n \/\/ NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc\n \/\/ (this matters when tests are being run with a beta or stable cargo)\n .env(\"RUSTC_BOOTSTRAP\", \"1\")\n .with_stderr_contains(\"warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .run();\n \/\/ RUSTC_BOOTSTRAP conditionally set when there's no library should error (regardless of the value)\n p.cargo(\"build\")\n .env(\"RUSTC_BOOTSTRAP\", \"foo\")\n .with_stderr_contains(\"error: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .with_stderr_contains(\"help: [..] set the environment variable `RUSTC_BOOTSTRAP=1` [..]\")\n .with_status(101)\n .run();\n}\n<commit_msg>add failing test for CARGO_CFG_TARGET_FEATURE<commit_after>\/\/! Tests for build.rs rerun-if-env-changed and rustc-env\n\nuse cargo_test_support::basic_manifest;\nuse cargo_test_support::project;\nuse cargo_test_support::sleep_ms;\n\n#[cargo_test]\nfn rerun_if_env_changes() {\n let p = project()\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \"build.rs\",\n r#\"\n fn main() {\n println!(\"cargo:rerun-if-env-changed=FOO\");\n }\n \"#,\n )\n .build();\n\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"bar\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"baz\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"baz\")\n .with_stderr(\"[FINISHED] [..]\")\n .run();\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn rerun_if_env_or_file_changes() {\n let p = project()\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \"build.rs\",\n r#\"\n fn main() {\n println!(\"cargo:rerun-if-env-changed=FOO\");\n println!(\"cargo:rerun-if-changed=foo\");\n }\n \"#,\n )\n .file(\"foo\", \"\")\n .build();\n\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"bar\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n p.cargo(\"build\")\n .env(\"FOO\", \"bar\")\n .with_stderr(\"[FINISHED] [..]\")\n .run();\n sleep_ms(1000);\n p.change_file(\"foo\", \"\");\n p.cargo(\"build\")\n .env(\"FOO\", \"bar\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.0.1 ([..])\n[FINISHED] [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn rustc_bootstrap() {\n let build_rs = r#\"\n fn main() {\n println!(\"cargo:rustc-env=RUSTC_BOOTSTRAP=1\");\n }\n \"#;\n let p = project()\n .file(\"Cargo.toml\", &basic_manifest(\"has-dashes\", \"0.0.1\"))\n .file(\"src\/lib.rs\", \"#![feature(rustc_attrs)]\")\n .file(\"build.rs\", build_rs)\n .build();\n \/\/ RUSTC_BOOTSTRAP unset on stable should error\n p.cargo(\"build\")\n .with_stderr_contains(\"error: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .with_stderr_contains(\n \"help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]\",\n )\n .with_status(101)\n .run();\n \/\/ nightly should warn whether or not RUSTC_BOOTSTRAP is set\n p.cargo(\"build\")\n .masquerade_as_nightly_cargo(&[\"RUSTC_BOOTSTRAP\"])\n \/\/ NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc\n \/\/ (this matters when tests are being run with a beta or stable cargo)\n .env(\"RUSTC_BOOTSTRAP\", \"1\")\n .with_stderr_contains(\"warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .run();\n \/\/ RUSTC_BOOTSTRAP set to the name of the library should warn\n p.cargo(\"build\")\n .env(\"RUSTC_BOOTSTRAP\", \"has_dashes\")\n .with_stderr_contains(\"warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .run();\n \/\/ RUSTC_BOOTSTRAP set to some random value should error\n p.cargo(\"build\")\n .env(\"RUSTC_BOOTSTRAP\", \"bar\")\n .with_stderr_contains(\"error: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .with_stderr_contains(\n \"help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]\",\n )\n .with_status(101)\n .run();\n\n \/\/ Tests for binaries instead of libraries\n let p = project()\n .file(\"Cargo.toml\", &basic_manifest(\"foo\", \"0.0.1\"))\n .file(\"src\/main.rs\", \"#![feature(rustc_attrs)] fn main() {}\")\n .file(\"build.rs\", build_rs)\n .build();\n \/\/ nightly should warn when there's no library whether or not RUSTC_BOOTSTRAP is set\n p.cargo(\"build\")\n .masquerade_as_nightly_cargo(&[\"RUSTC_BOOTSTRAP\"])\n \/\/ NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc\n \/\/ (this matters when tests are being run with a beta or stable cargo)\n .env(\"RUSTC_BOOTSTRAP\", \"1\")\n .with_stderr_contains(\"warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .run();\n \/\/ RUSTC_BOOTSTRAP conditionally set when there's no library should error (regardless of the value)\n p.cargo(\"build\")\n .env(\"RUSTC_BOOTSTRAP\", \"foo\")\n .with_stderr_contains(\"error: Cannot set `RUSTC_BOOTSTRAP=1` [..]\")\n .with_stderr_contains(\"help: [..] set the environment variable `RUSTC_BOOTSTRAP=1` [..]\")\n .with_status(101)\n .run();\n}\n\n#[cargo_test]\n#[cfg(target_arch = \"x86_64\")]\nfn build_script_sees_cfg_target_feature() {\n let build_rs = r#\"\n fn main() {\n let cfg = std::env::var(\"CARGO_CFG_TARGET_FEATURE\").unwrap();\n eprintln!(\"CARGO_CFG_TARGET_FEATURE={cfg}\");\n }\n \"#;\n\n let configs = [\n r#\"\n [build]\n rustflags = [\"-Ctarget-feature=+sse4.1,+sse4.2\"]\n \"#,\n r#\"\n [target.'cfg(target_arch = \"x86_64\")']\n rustflags = [\"-Ctarget-feature=+sse4.1,+sse4.2\"]\n \"#,\n ];\n\n for config in configs {\n let p = project()\n .file(\".cargo\/config.toml\", config)\n .file(\"src\/lib.rs\", r#\"\"#)\n .file(\"build.rs\", build_rs)\n .build();\n\n p.cargo(\"build -vv\")\n .with_stderr_contains(\"[foo 0.0.1] CARGO_CFG_TARGET_FEATURE=[..]sse4.2[..]\")\n .with_stderr_contains(\"[..]-Ctarget-feature=[..]+sse4.2[..]\")\n .run();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use 'format' in 'raw_upload' example instead of checking Content-Type directly.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #32<commit_after>extern mod std;\nuse std::sort::{ merge_sort };\nuse std::map::{ HashMap, set_add };\n\n\/\/ possible num of digits combinations\n\/\/ 1 x 1 = 7 : NG 10 * 10\n\/\/ 1 x 2 = 6 : NG 10 * 100\n\/\/ 1 x 3 = 5 : NG 10 * 1000 = 10000\n\/\/ 1 x 4 = 4 : OK\n\/\/ 2 x 2 = 5 : NG 100 * 100 = 10000\n\/\/ 2 x 3 = 4 : OK\n\/\/ 3 x 3 = 3 : NG\n\npure fn num_to_digits(n: uint) -> ~[uint] {\n let buf = [mut\n 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0\n ]\/64;\n let mut filled_idx = buf.len();\n let mut itr = n;\n while itr != 0 {\n buf[filled_idx - 1] = itr % 10;\n filled_idx -= 1;\n itr \/= 10;\n }\n return vec::from_slice(vec::view(buf, filled_idx, buf.len()));\n}\n\npure fn digits_to_num(v: &[uint]) -> uint {\n let mut num = 0;\n for v.each |n| {\n num *= 10;\n num += *n;\n }\n return num;\n}\n\npure fn fill_zero(v: &[uint], n: uint) -> ~[uint] {\n assert n >= v.len();\n vec::from_elem(n - v.len(), 0) + v\n}\n\npure fn permutate_num(digits: &[uint], len: uint, min: uint, max: uint,\n f: fn(uint, &[uint])->bool) {\n let min_vec = fill_zero(num_to_digits(min), len);\n let max_vec = fill_zero(num_to_digits(max), len);\n perm_sub(digits, len, to_some(min_vec), to_some(max_vec), f);\n\n pure fn to_some(v: &a\/[uint]) -> Option<&a\/[uint]> { Some(v) }\n \n pure fn perm_sub(digits: &[uint], len: uint, min: Option<&[uint]>, max: Option<&[uint]>,\n f: fn(uint, &[uint])->bool) {\n if len == 0 {\n f(0, digits);\n return;\n }\n\n let unit = {\n let mut tmp = 1;\n for (len-1).times { tmp *= 10 }\n tmp\n };\n\n let buf = vec::to_mut(vec::from_elem(digits.len() - 1, 0));\n \n for digits.eachi |i, np| {\n let n = *np;\n\n let min_vec = match min {\n Some(v) if n < v[0] => loop,\n Some(v) if n == v[0] => Some(vec::view(v, 1, v.len())),\n _ => None\n };\n let max_vec = match max {\n Some(v) if n > v[0] => loop,\n Some(v) if n == v[0] => Some(vec::view(v, 1, v.len())),\n _ => None\n };\n\n for uint::range(0, i) |j| { buf[j] = digits[j]; }\n for uint::range(i, buf.len()) |j| { buf[j] = digits[j + 1]; }\n for perm_sub(buf, len - 1, min_vec, max_vec) |num, ds| {\n if !f(num + n * unit, ds) { return; }\n }\n }\n }\n}\n\nfn main() {\n let digits = &[1, 2, 3, 4, 5, 6, 7, 8, 9];\n let mut answer = HashMap::<uint, ()>();\n\n \/\/ 1 x 4 = 4\n \/\/ a b = c\n \/\/ 1 < a < 10\n \/\/ 1000 < b < 10000\n \/\/ 1000 < c = ab < 10000 => 1000 \/ a < b < 10000 \/ a\n \/\/ => 1000 < b < 10000 \/ a\n for permutate_num(digits, 1, 0, 9) |a, ds| {\n for permutate_num(ds, 4, 1000, 9999 \/ a) |b, ds| {\n let c = a * b;\n let c_digits = merge_sort(|a, b| a <= b, num_to_digits(c));\n if vec::eq(c_digits, ds) { set_add(answer, c); }\n }\n }\n\n \/\/ 2 x 3 = 4\n \/\/ a b = c\n \/\/ 10 < a < 100\n \/\/ 100 < b < 1000\n \/\/ 1000 < c = ab < 10000 => 1000 \/ a < b < 10000 \/ a\n \/\/ => 100 < b < 10000 \/ a\n for permutate_num(digits, 2, 10, 99) |a, ds| {\n for permutate_num(ds, 3, 100, 9999 \/ a) |b, ds| {\n let c = a * b;\n let c_digits = merge_sort(|a, b| a <= b, num_to_digits(c));\n if vec::eq(c_digits, ds) { set_add(answer, c); }\n }\n }\n\n let mut sum = 0;\n for answer.each_key |c| {\n sum += c;\n }\n io::println(fmt!(\"%u\", sum));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse std::io;\nuse std::process::Command;\nuse target::{LinkArgs, TargetOptions};\n\nuse self::Arch::*;\n\n#[allow(non_camel_case_types)]\n#[derive(Copy, Clone)]\npub enum Arch {\n Armv7,\n Armv7s,\n Arm64,\n I386,\n X86_64\n}\n\nimpl Arch {\n pub fn to_string(&self) -> &'static str {\n match self {\n &Armv7 => \"armv7\",\n &Armv7s => \"armv7s\",\n &Arm64 => \"arm64\",\n &I386 => \"i386\",\n &X86_64 => \"x86_64\"\n }\n }\n}\n\npub fn get_sdk_root(sdk_name: &str) -> Result<String, String> {\n let res = Command::new(\"xcrun\")\n .arg(\"--show-sdk-path\")\n .arg(\"-sdk\")\n .arg(sdk_name)\n .output()\n .and_then(|output| {\n if output.status.success() {\n Ok(String::from_utf8(output.stdout).unwrap())\n } else {\n let error = String::from_utf8(output.stderr);\n let error = format!(\"process exit with error: {}\",\n error.unwrap());\n Err(io::Error::new(io::ErrorKind::Other,\n &error[..]))\n }\n });\n\n match res {\n Ok(output) => Ok(output.trim().to_string()),\n Err(e) => Err(format!(\"failed to get {} SDK path: {}\", sdk_name, e))\n }\n}\n\nfn build_pre_link_args(arch: Arch) -> Result<LinkArgs, String> {\n let sdk_name = match arch {\n Armv7 | Armv7s | Arm64 => \"iphoneos\",\n I386 | X86_64 => \"iphonesimulator\"\n };\n\n let arch_name = arch.to_string();\n\n let sdk_root = get_sdk_root(sdk_name)?;\n\n let mut args = LinkArgs::new();\n args.insert(LinkerFlavor::Gcc,\n vec![\"-arch\".to_string(),\n arch_name.to_string(),\n \"-Wl,-syslibroot\".to_string(),\n sdk_root]);\n\n Ok(args)\n}\n\nfn target_cpu(arch: Arch) -> String {\n match arch {\n Armv7 => \"cortex-a8\", \/\/ iOS7 is supported on iPhone 4 and higher\n Armv7s => \"cortex-a9\",\n Arm64 => \"cyclone\",\n I386 => \"yonah\",\n X86_64 => \"core2\",\n }.to_string()\n}\n\npub fn opts(arch: Arch) -> Result<TargetOptions, String> {\n let pre_link_args = build_pre_link_args(arch)?;\n Ok(TargetOptions {\n cpu: target_cpu(arch),\n dynamic_linking: false,\n executables: true,\n pre_link_args,\n has_elf_tls: false,\n .. super::apple_base::opts()\n })\n}\n<commit_msg>disable jemalloc on exectuable for ios targets<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse std::io;\nuse std::process::Command;\nuse target::{LinkArgs, TargetOptions};\n\nuse self::Arch::*;\n\n#[allow(non_camel_case_types)]\n#[derive(Copy, Clone)]\npub enum Arch {\n Armv7,\n Armv7s,\n Arm64,\n I386,\n X86_64\n}\n\nimpl Arch {\n pub fn to_string(&self) -> &'static str {\n match self {\n &Armv7 => \"armv7\",\n &Armv7s => \"armv7s\",\n &Arm64 => \"arm64\",\n &I386 => \"i386\",\n &X86_64 => \"x86_64\"\n }\n }\n}\n\npub fn get_sdk_root(sdk_name: &str) -> Result<String, String> {\n let res = Command::new(\"xcrun\")\n .arg(\"--show-sdk-path\")\n .arg(\"-sdk\")\n .arg(sdk_name)\n .output()\n .and_then(|output| {\n if output.status.success() {\n Ok(String::from_utf8(output.stdout).unwrap())\n } else {\n let error = String::from_utf8(output.stderr);\n let error = format!(\"process exit with error: {}\",\n error.unwrap());\n Err(io::Error::new(io::ErrorKind::Other,\n &error[..]))\n }\n });\n\n match res {\n Ok(output) => Ok(output.trim().to_string()),\n Err(e) => Err(format!(\"failed to get {} SDK path: {}\", sdk_name, e))\n }\n}\n\nfn build_pre_link_args(arch: Arch) -> Result<LinkArgs, String> {\n let sdk_name = match arch {\n Armv7 | Armv7s | Arm64 => \"iphoneos\",\n I386 | X86_64 => \"iphonesimulator\"\n };\n\n let arch_name = arch.to_string();\n\n let sdk_root = get_sdk_root(sdk_name)?;\n\n let mut args = LinkArgs::new();\n args.insert(LinkerFlavor::Gcc,\n vec![\"-arch\".to_string(),\n arch_name.to_string(),\n \"-Wl,-syslibroot\".to_string(),\n sdk_root]);\n\n Ok(args)\n}\n\nfn target_cpu(arch: Arch) -> String {\n match arch {\n Armv7 => \"cortex-a8\", \/\/ iOS7 is supported on iPhone 4 and higher\n Armv7s => \"cortex-a9\",\n Arm64 => \"cyclone\",\n I386 => \"yonah\",\n X86_64 => \"core2\",\n }.to_string()\n}\n\npub fn opts(arch: Arch) -> Result<TargetOptions, String> {\n let pre_link_args = build_pre_link_args(arch)?;\n Ok(TargetOptions {\n cpu: target_cpu(arch),\n dynamic_linking: false,\n executables: true,\n pre_link_args,\n has_elf_tls: false,\n exe_allocation_crate: None,\n .. super::apple_base::opts()\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use or_else() instead of or()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>start new<commit_after>fn make_squares_list(n: i32) -> Vec<i32> {\n let mut re: Vec<i32> = vec![];\n for i in 0..n {\n let temp = i * i;\n if temp > n {\n break;\n } else {\n re.push(temp);\n }\n }\n\n re\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>lexer: fix case when an identifier is terminated by a comment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>adjust time source start time when coming out of the debugger<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A type representing either success or failure\n\n\/\/ NB: transitionary, de-mode-ing.\n\n#[forbid(deprecated_mode)];\n#[forbid(deprecated_pattern)];\n\nuse cmp::Eq;\nuse either::Either;\n\n\/\/\/ The result type\n#[deriving_eq]\npub enum Result<T, U> {\n \/\/\/ Contains the successful result value\n Ok(T),\n \/\/\/ Contains the error value\n Err(U)\n}\n\n\/**\n * Get the value out of a successful result\n *\n * # Failure\n *\n * If the result is an error\n *\/\npub pure fn get<T: Copy, U>(res: &Result<T, U>) -> T {\n match *res {\n Ok(copy t) => t,\n Err(ref the_err) => unsafe {\n fail fmt!(\"get called on error result: %?\", *the_err)\n }\n }\n}\n\n\/**\n * Get a reference to the value out of a successful result\n *\n * # Failure\n *\n * If the result is an error\n *\/\npub pure fn get_ref<T, U>(res: &a\/Result<T, U>) -> &a\/T {\n match *res {\n Ok(ref t) => t,\n Err(ref the_err) => unsafe {\n fail fmt!(\"get_ref called on error result: %?\", *the_err)\n }\n }\n}\n\n\/**\n * Get the value out of an error result\n *\n * # Failure\n *\n * If the result is not an error\n *\/\npub pure fn get_err<T, U: Copy>(res: &Result<T, U>) -> U {\n match *res {\n Err(copy u) => u,\n Ok(_) => fail ~\"get_err called on ok result\"\n }\n}\n\n\/\/\/ Returns true if the result is `ok`\npub pure fn is_ok<T, U>(res: &Result<T, U>) -> bool {\n match *res {\n Ok(_) => true,\n Err(_) => false\n }\n}\n\n\/\/\/ Returns true if the result is `err`\npub pure fn is_err<T, U>(res: &Result<T, U>) -> bool {\n !is_ok(res)\n}\n\n\/**\n * Convert to the `either` type\n *\n * `ok` result variants are converted to `either::right` variants, `err`\n * result variants are converted to `either::left`.\n *\/\npub pure fn to_either<T: Copy, U: Copy>(res: &Result<U, T>)\n -> Either<T, U> {\n match *res {\n Ok(copy res) => either::Right(res),\n Err(copy fail_) => either::Left(fail_)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `ok` then the value is extracted and passed to `op` whereupon\n * `op`s result is returned. if `res` is `err` then it is immediately\n * returned. This function can be used to compose the results of two\n * functions.\n *\n * Example:\n *\n * let res = chain(read_file(file)) { |buf|\n * ok(parse_bytes(buf))\n * }\n *\/\npub pure fn chain<T, U, V>(res: Result<T, V>, op: fn(T)\n -> Result<U, V>) -> Result<U, V> {\n match move res {\n Ok(move t) => op(move t),\n Err(move e) => Err(move e)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `err` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `res` is `ok` then it is\n * immediately returned. This function can be used to pass through a\n * successful result while handling an error.\n *\/\npub pure fn chain_err<T, U, V>(\n res: Result<T, V>,\n op: fn(t: V) -> Result<T, U>)\n -> Result<T, U> {\n match move res {\n Ok(move t) => Ok(move t),\n Err(move v) => op(move v)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `ok` then the value is extracted and passed to `op` whereupon\n * `op`s result is returned. if `res` is `err` then it is immediately\n * returned. This function can be used to compose the results of two\n * functions.\n *\n * Example:\n *\n * iter(read_file(file)) { |buf|\n * print_buf(buf)\n * }\n *\/\npub pure fn iter<T, E>(res: &Result<T, E>, f: fn(&T)) {\n match *res {\n Ok(ref t) => f(t),\n Err(_) => ()\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `err` then the value is extracted and passed to `op` whereupon\n * `op`s result is returned. if `res` is `ok` then it is immediately returned.\n * This function can be used to pass through a successful result while\n * handling an error.\n *\/\npub pure fn iter_err<T, E>(res: &Result<T, E>, f: fn(&E)) {\n match *res {\n Ok(_) => (),\n Err(ref e) => f(e)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `ok` then the value is extracted and passed to `op` whereupon\n * `op`s result is wrapped in `ok` and returned. if `res` is `err` then it is\n * immediately returned. This function can be used to compose the results of\n * two functions.\n *\n * Example:\n *\n * let res = map(read_file(file)) { |buf|\n * parse_bytes(buf)\n * }\n *\/\npub pure fn map<T, E: Copy, U: Copy>(res: &Result<T, E>, op: fn(&T) -> U)\n -> Result<U, E> {\n match *res {\n Ok(ref t) => Ok(op(t)),\n Err(copy e) => Err(e)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `err` then the value is extracted and passed to `op` whereupon\n * `op`s result is wrapped in an `err` and returned. if `res` is `ok` then it\n * is immediately returned. This function can be used to pass through a\n * successful result while handling an error.\n *\/\npub pure fn map_err<T: Copy, E, F: Copy>(res: &Result<T, E>, op: fn(&E) -> F)\n -> Result<T, F> {\n match *res {\n Ok(copy t) => Ok(t),\n Err(ref e) => Err(op(e))\n }\n}\n\nimpl<T, E> Result<T, E> {\n #[inline(always)]\n pure fn get_ref(&self) -> &self\/T { get_ref(self) }\n\n #[inline(always)]\n pure fn is_ok(&self) -> bool { is_ok(self) }\n\n #[inline(always)]\n pure fn is_err(&self) -> bool { is_err(self) }\n\n #[inline(always)]\n pure fn iter(&self, f: fn(&T)) { iter(self, f) }\n\n #[inline(always)]\n pure fn iter_err(&self, f: fn(&E)) { iter_err(self, f) }\n\n #[inline(always)]\n pure fn unwrap(self) -> T { unwrap(self) }\n\n #[inline(always)]\n pure fn unwrap_err(self) -> T { unwrap(self) }\n\n #[inline(always)]\n pure fn chain<U>(self, op: fn(T) -> Result<U,E>) -> Result<U,E> {\n chain(self, op)\n }\n\n #[inline(always)]\n pure fn chain_err<F>(self, op: fn(E) -> Result<T,F>) -> Result<T,F> {\n chain_err(self, op)\n }\n}\n\nimpl<T: Copy, E> Result<T, E> {\n #[inline(always)]\n pure fn get(&self) -> T { get(self) }\n\n #[inline(always)]\n pure fn map_err<F:Copy>(&self, op: fn(&E) -> F) -> Result<T,F> {\n map_err(self, op)\n }\n}\n\nimpl<T, E: Copy> Result<T, E> {\n #[inline(always)]\n pure fn get_err(&self) -> E { get_err(self) }\n\n #[inline(always)]\n pure fn map<U:Copy>(&self, op: fn(&T) -> U) -> Result<U,E> {\n map(self, op)\n }\n}\n\n\/**\n * Maps each element in the vector `ts` using the operation `op`. Should an\n * error occur, no further mappings are performed and the error is returned.\n * Should no error occur, a vector containing the result of each map is\n * returned.\n *\n * Here is an example which increments every integer in a vector,\n * checking for overflow:\n *\n * fn inc_conditionally(x: uint) -> result<uint,str> {\n * if x == uint::max_value { return err(\"overflow\"); }\n * else { return ok(x+1u); }\n * }\n * map(~[1u, 2u, 3u], inc_conditionally).chain {|incd|\n * assert incd == ~[2u, 3u, 4u];\n * }\n *\/\npub fn map_vec<T,U:Copy,V:Copy>(\n ts: &[T], op: fn(&T) -> Result<V,U>) -> Result<~[V],U> {\n\n let mut vs: ~[V] = vec::with_capacity(vec::len(ts));\n for vec::each(ts) |t| {\n match op(t) {\n Ok(copy v) => vs.push(v),\n Err(copy u) => return Err(u)\n }\n }\n return Ok(move vs);\n}\n\npub fn map_opt<T,U:Copy,V:Copy>(\n o_t: &Option<T>, op: fn(&T) -> Result<V,U>) -> Result<Option<V>,U> {\n\n match *o_t {\n None => Ok(None),\n Some(ref t) => match op(t) {\n Ok(copy v) => Ok(Some(v)),\n Err(copy e) => Err(e)\n }\n }\n}\n\n\/**\n * Same as map, but it operates over two parallel vectors.\n *\n * A precondition is used here to ensure that the vectors are the same\n * length. While we do not often use preconditions in the standard\n * library, a precondition is used here because result::t is generally\n * used in 'careful' code contexts where it is both appropriate and easy\n * to accommodate an error like the vectors being of different lengths.\n *\/\npub fn map_vec2<S,T,U:Copy,V:Copy>(ss: &[S], ts: &[T],\n op: fn(&S,&T) -> Result<V,U>) -> Result<~[V],U> {\n\n assert vec::same_length(ss, ts);\n let n = vec::len(ts);\n let mut vs = vec::with_capacity(n);\n let mut i = 0u;\n while i < n {\n match op(&ss[i],&ts[i]) {\n Ok(copy v) => vs.push(v),\n Err(copy u) => return Err(u)\n }\n i += 1u;\n }\n return Ok(move vs);\n}\n\n\/**\n * Applies op to the pairwise elements from `ss` and `ts`, aborting on\n * error. This could be implemented using `map2()` but it is more efficient\n * on its own as no result vector is built.\n *\/\npub fn iter_vec2<S,T,U:Copy>(ss: &[S], ts: &[T],\n op: fn(&S,&T) -> Result<(),U>) -> Result<(),U> {\n\n assert vec::same_length(ss, ts);\n let n = vec::len(ts);\n let mut i = 0u;\n while i < n {\n match op(&ss[i],&ts[i]) {\n Ok(()) => (),\n Err(copy u) => return Err(u)\n }\n i += 1u;\n }\n return Ok(());\n}\n\n\/\/\/ Unwraps a result, assuming it is an `ok(T)`\n#[inline(always)]\npub pure fn unwrap<T, U>(res: Result<T, U>) -> T {\n match move res {\n Ok(move t) => move t,\n Err(_) => fail ~\"unwrap called on an err result\"\n }\n}\n\n\/\/\/ Unwraps a result, assuming it is an `err(U)`\n#[inline(always)]\npub pure fn unwrap_err<T, U>(res: Result<T, U>) -> U {\n match move res {\n Err(move u) => move u,\n Ok(_) => fail ~\"unwrap called on an ok result\"\n }\n}\n\n#[cfg(test)]\n#[allow(non_implicitly_copyable_typarams)]\nmod tests {\n #[legacy_exports];\n fn op1() -> result::Result<int, ~str> { result::Ok(666) }\n\n fn op2(i: int) -> result::Result<uint, ~str> {\n result::Ok(i as uint + 1u)\n }\n\n fn op3() -> result::Result<int, ~str> { result::Err(~\"sadface\") }\n\n #[test]\n fn chain_success() {\n assert get(&chain(op1(), op2)) == 667u;\n }\n\n #[test]\n fn chain_failure() {\n assert get_err(&chain(op3(), op2)) == ~\"sadface\";\n }\n\n #[test]\n fn test_impl_iter() {\n let mut valid = false;\n Ok::<~str, ~str>(~\"a\").iter(|_x| valid = true);\n assert valid;\n\n Err::<~str, ~str>(~\"b\").iter(|_x| valid = false);\n assert valid;\n }\n\n #[test]\n fn test_impl_iter_err() {\n let mut valid = true;\n Ok::<~str, ~str>(~\"a\").iter_err(|_x| valid = false);\n assert valid;\n\n valid = false;\n Err::<~str, ~str>(~\"b\").iter_err(|_x| valid = true);\n assert valid;\n }\n\n #[test]\n fn test_impl_map() {\n assert Ok::<~str, ~str>(~\"a\").map(|_x| ~\"b\") == Ok(~\"b\");\n assert Err::<~str, ~str>(~\"a\").map(|_x| ~\"b\") == Err(~\"a\");\n }\n\n #[test]\n fn test_impl_map_err() {\n assert Ok::<~str, ~str>(~\"a\").map_err(|_x| ~\"b\") == Ok(~\"a\");\n assert Err::<~str, ~str>(~\"a\").map_err(|_x| ~\"b\") == Err(~\"b\");\n }\n\n #[test]\n fn test_get_ref_method() {\n let foo: Result<int, ()> = Ok(100);\n assert *foo.get_ref() == 100;\n }\n}\n<commit_msg>Fix Option::unwrap_err.<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A type representing either success or failure\n\n\/\/ NB: transitionary, de-mode-ing.\n\n#[forbid(deprecated_mode)];\n#[forbid(deprecated_pattern)];\n\nuse cmp::Eq;\nuse either::Either;\n\n\/\/\/ The result type\n#[deriving_eq]\npub enum Result<T, U> {\n \/\/\/ Contains the successful result value\n Ok(T),\n \/\/\/ Contains the error value\n Err(U)\n}\n\n\/**\n * Get the value out of a successful result\n *\n * # Failure\n *\n * If the result is an error\n *\/\npub pure fn get<T: Copy, U>(res: &Result<T, U>) -> T {\n match *res {\n Ok(copy t) => t,\n Err(ref the_err) => unsafe {\n fail fmt!(\"get called on error result: %?\", *the_err)\n }\n }\n}\n\n\/**\n * Get a reference to the value out of a successful result\n *\n * # Failure\n *\n * If the result is an error\n *\/\npub pure fn get_ref<T, U>(res: &a\/Result<T, U>) -> &a\/T {\n match *res {\n Ok(ref t) => t,\n Err(ref the_err) => unsafe {\n fail fmt!(\"get_ref called on error result: %?\", *the_err)\n }\n }\n}\n\n\/**\n * Get the value out of an error result\n *\n * # Failure\n *\n * If the result is not an error\n *\/\npub pure fn get_err<T, U: Copy>(res: &Result<T, U>) -> U {\n match *res {\n Err(copy u) => u,\n Ok(_) => fail ~\"get_err called on ok result\"\n }\n}\n\n\/\/\/ Returns true if the result is `ok`\npub pure fn is_ok<T, U>(res: &Result<T, U>) -> bool {\n match *res {\n Ok(_) => true,\n Err(_) => false\n }\n}\n\n\/\/\/ Returns true if the result is `err`\npub pure fn is_err<T, U>(res: &Result<T, U>) -> bool {\n !is_ok(res)\n}\n\n\/**\n * Convert to the `either` type\n *\n * `ok` result variants are converted to `either::right` variants, `err`\n * result variants are converted to `either::left`.\n *\/\npub pure fn to_either<T: Copy, U: Copy>(res: &Result<U, T>)\n -> Either<T, U> {\n match *res {\n Ok(copy res) => either::Right(res),\n Err(copy fail_) => either::Left(fail_)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `ok` then the value is extracted and passed to `op` whereupon\n * `op`s result is returned. if `res` is `err` then it is immediately\n * returned. This function can be used to compose the results of two\n * functions.\n *\n * Example:\n *\n * let res = chain(read_file(file)) { |buf|\n * ok(parse_bytes(buf))\n * }\n *\/\npub pure fn chain<T, U, V>(res: Result<T, V>, op: fn(T)\n -> Result<U, V>) -> Result<U, V> {\n match move res {\n Ok(move t) => op(move t),\n Err(move e) => Err(move e)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `err` then the value is extracted and passed to `op`\n * whereupon `op`s result is returned. if `res` is `ok` then it is\n * immediately returned. This function can be used to pass through a\n * successful result while handling an error.\n *\/\npub pure fn chain_err<T, U, V>(\n res: Result<T, V>,\n op: fn(t: V) -> Result<T, U>)\n -> Result<T, U> {\n match move res {\n Ok(move t) => Ok(move t),\n Err(move v) => op(move v)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `ok` then the value is extracted and passed to `op` whereupon\n * `op`s result is returned. if `res` is `err` then it is immediately\n * returned. This function can be used to compose the results of two\n * functions.\n *\n * Example:\n *\n * iter(read_file(file)) { |buf|\n * print_buf(buf)\n * }\n *\/\npub pure fn iter<T, E>(res: &Result<T, E>, f: fn(&T)) {\n match *res {\n Ok(ref t) => f(t),\n Err(_) => ()\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `err` then the value is extracted and passed to `op` whereupon\n * `op`s result is returned. if `res` is `ok` then it is immediately returned.\n * This function can be used to pass through a successful result while\n * handling an error.\n *\/\npub pure fn iter_err<T, E>(res: &Result<T, E>, f: fn(&E)) {\n match *res {\n Ok(_) => (),\n Err(ref e) => f(e)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `ok` then the value is extracted and passed to `op` whereupon\n * `op`s result is wrapped in `ok` and returned. if `res` is `err` then it is\n * immediately returned. This function can be used to compose the results of\n * two functions.\n *\n * Example:\n *\n * let res = map(read_file(file)) { |buf|\n * parse_bytes(buf)\n * }\n *\/\npub pure fn map<T, E: Copy, U: Copy>(res: &Result<T, E>, op: fn(&T) -> U)\n -> Result<U, E> {\n match *res {\n Ok(ref t) => Ok(op(t)),\n Err(copy e) => Err(e)\n }\n}\n\n\/**\n * Call a function based on a previous result\n *\n * If `res` is `err` then the value is extracted and passed to `op` whereupon\n * `op`s result is wrapped in an `err` and returned. if `res` is `ok` then it\n * is immediately returned. This function can be used to pass through a\n * successful result while handling an error.\n *\/\npub pure fn map_err<T: Copy, E, F: Copy>(res: &Result<T, E>, op: fn(&E) -> F)\n -> Result<T, F> {\n match *res {\n Ok(copy t) => Ok(t),\n Err(ref e) => Err(op(e))\n }\n}\n\nimpl<T, E> Result<T, E> {\n #[inline(always)]\n pure fn get_ref(&self) -> &self\/T { get_ref(self) }\n\n #[inline(always)]\n pure fn is_ok(&self) -> bool { is_ok(self) }\n\n #[inline(always)]\n pure fn is_err(&self) -> bool { is_err(self) }\n\n #[inline(always)]\n pure fn iter(&self, f: fn(&T)) { iter(self, f) }\n\n #[inline(always)]\n pure fn iter_err(&self, f: fn(&E)) { iter_err(self, f) }\n\n #[inline(always)]\n pure fn unwrap(self) -> T { unwrap(self) }\n\n #[inline(always)]\n pure fn unwrap_err(self) -> E { unwrap_err(self) }\n\n #[inline(always)]\n pure fn chain<U>(self, op: fn(T) -> Result<U,E>) -> Result<U,E> {\n chain(self, op)\n }\n\n #[inline(always)]\n pure fn chain_err<F>(self, op: fn(E) -> Result<T,F>) -> Result<T,F> {\n chain_err(self, op)\n }\n}\n\nimpl<T: Copy, E> Result<T, E> {\n #[inline(always)]\n pure fn get(&self) -> T { get(self) }\n\n #[inline(always)]\n pure fn map_err<F:Copy>(&self, op: fn(&E) -> F) -> Result<T,F> {\n map_err(self, op)\n }\n}\n\nimpl<T, E: Copy> Result<T, E> {\n #[inline(always)]\n pure fn get_err(&self) -> E { get_err(self) }\n\n #[inline(always)]\n pure fn map<U:Copy>(&self, op: fn(&T) -> U) -> Result<U,E> {\n map(self, op)\n }\n}\n\n\/**\n * Maps each element in the vector `ts` using the operation `op`. Should an\n * error occur, no further mappings are performed and the error is returned.\n * Should no error occur, a vector containing the result of each map is\n * returned.\n *\n * Here is an example which increments every integer in a vector,\n * checking for overflow:\n *\n * fn inc_conditionally(x: uint) -> result<uint,str> {\n * if x == uint::max_value { return err(\"overflow\"); }\n * else { return ok(x+1u); }\n * }\n * map(~[1u, 2u, 3u], inc_conditionally).chain {|incd|\n * assert incd == ~[2u, 3u, 4u];\n * }\n *\/\npub fn map_vec<T,U:Copy,V:Copy>(\n ts: &[T], op: fn(&T) -> Result<V,U>) -> Result<~[V],U> {\n\n let mut vs: ~[V] = vec::with_capacity(vec::len(ts));\n for vec::each(ts) |t| {\n match op(t) {\n Ok(copy v) => vs.push(v),\n Err(copy u) => return Err(u)\n }\n }\n return Ok(move vs);\n}\n\npub fn map_opt<T,U:Copy,V:Copy>(\n o_t: &Option<T>, op: fn(&T) -> Result<V,U>) -> Result<Option<V>,U> {\n\n match *o_t {\n None => Ok(None),\n Some(ref t) => match op(t) {\n Ok(copy v) => Ok(Some(v)),\n Err(copy e) => Err(e)\n }\n }\n}\n\n\/**\n * Same as map, but it operates over two parallel vectors.\n *\n * A precondition is used here to ensure that the vectors are the same\n * length. While we do not often use preconditions in the standard\n * library, a precondition is used here because result::t is generally\n * used in 'careful' code contexts where it is both appropriate and easy\n * to accommodate an error like the vectors being of different lengths.\n *\/\npub fn map_vec2<S,T,U:Copy,V:Copy>(ss: &[S], ts: &[T],\n op: fn(&S,&T) -> Result<V,U>) -> Result<~[V],U> {\n\n assert vec::same_length(ss, ts);\n let n = vec::len(ts);\n let mut vs = vec::with_capacity(n);\n let mut i = 0u;\n while i < n {\n match op(&ss[i],&ts[i]) {\n Ok(copy v) => vs.push(v),\n Err(copy u) => return Err(u)\n }\n i += 1u;\n }\n return Ok(move vs);\n}\n\n\/**\n * Applies op to the pairwise elements from `ss` and `ts`, aborting on\n * error. This could be implemented using `map2()` but it is more efficient\n * on its own as no result vector is built.\n *\/\npub fn iter_vec2<S,T,U:Copy>(ss: &[S], ts: &[T],\n op: fn(&S,&T) -> Result<(),U>) -> Result<(),U> {\n\n assert vec::same_length(ss, ts);\n let n = vec::len(ts);\n let mut i = 0u;\n while i < n {\n match op(&ss[i],&ts[i]) {\n Ok(()) => (),\n Err(copy u) => return Err(u)\n }\n i += 1u;\n }\n return Ok(());\n}\n\n\/\/\/ Unwraps a result, assuming it is an `ok(T)`\n#[inline(always)]\npub pure fn unwrap<T, U>(res: Result<T, U>) -> T {\n match move res {\n Ok(move t) => move t,\n Err(_) => fail ~\"unwrap called on an err result\"\n }\n}\n\n\/\/\/ Unwraps a result, assuming it is an `err(U)`\n#[inline(always)]\npub pure fn unwrap_err<T, U>(res: Result<T, U>) -> U {\n match move res {\n Err(move u) => move u,\n Ok(_) => fail ~\"unwrap called on an ok result\"\n }\n}\n\n#[cfg(test)]\n#[allow(non_implicitly_copyable_typarams)]\nmod tests {\n #[legacy_exports];\n fn op1() -> result::Result<int, ~str> { result::Ok(666) }\n\n fn op2(i: int) -> result::Result<uint, ~str> {\n result::Ok(i as uint + 1u)\n }\n\n fn op3() -> result::Result<int, ~str> { result::Err(~\"sadface\") }\n\n #[test]\n fn chain_success() {\n assert get(&chain(op1(), op2)) == 667u;\n }\n\n #[test]\n fn chain_failure() {\n assert get_err(&chain(op3(), op2)) == ~\"sadface\";\n }\n\n #[test]\n fn test_impl_iter() {\n let mut valid = false;\n Ok::<~str, ~str>(~\"a\").iter(|_x| valid = true);\n assert valid;\n\n Err::<~str, ~str>(~\"b\").iter(|_x| valid = false);\n assert valid;\n }\n\n #[test]\n fn test_impl_iter_err() {\n let mut valid = true;\n Ok::<~str, ~str>(~\"a\").iter_err(|_x| valid = false);\n assert valid;\n\n valid = false;\n Err::<~str, ~str>(~\"b\").iter_err(|_x| valid = true);\n assert valid;\n }\n\n #[test]\n fn test_impl_map() {\n assert Ok::<~str, ~str>(~\"a\").map(|_x| ~\"b\") == Ok(~\"b\");\n assert Err::<~str, ~str>(~\"a\").map(|_x| ~\"b\") == Err(~\"a\");\n }\n\n #[test]\n fn test_impl_map_err() {\n assert Ok::<~str, ~str>(~\"a\").map_err(|_x| ~\"b\") == Ok(~\"a\");\n assert Err::<~str, ~str>(~\"a\").map_err(|_x| ~\"b\") == Err(~\"b\");\n }\n\n #[test]\n fn test_get_ref_method() {\n let foo: Result<int, ()> = Ok(100);\n assert *foo.get_ref() == 100;\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse programs::common::*;\nuse graphics::point::*;\nuse graphics::size::*;\nuse graphics::window::*;\nuse common::string::*;\nuse common::resource::*;\n\nuse core::ops::DerefMut;\n\npub struct WindowScheme {\n pub current_window: *mut Window,\n}\n\npub struct WindowResource {\n pub active_window: Box<Window>,\n}\n\nimpl Resource for WindowResource {\n \/\/Required functions\n \/\/\/ Return the url of this resource\n fn url(&self) -> URL {\n return URL::from_string(&(\"window:\/\/test\".to_string()));\n } \n \/\/\/ Return the type of this resource\n fn stat(&self) -> ResourceType {\n return ResourceType::Window;\n }\n \/\/\/ Read data to buffer\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/\/TODO implement\n return Option::None;\n }\n \/\/\/ Write to resource\n fn write(&mut self, buf: &[u8]) -> Option<usize> {\n \/\/TODO implement\n return Option::None;\n }\n \/\/\/ Seek\n fn seek(&mut self, pos: ResourceSeek) -> Option<usize> {\n return Option::None; \/\/TODO implement\n }\n \/\/\/ Sync the resource\n fn sync(&mut self) -> bool {\n return true;\n }\n}\n \n\n\n\n\nimpl SessionItem for WindowScheme {\n fn scheme(&self) -> String {\n return \"window\".to_string();\n }\n\n fn open(&mut self, url: &URL) -> Box<Resource> {\n let scheme :String;\n let mut pointx :isize;\n let mut pointy :isize;\n let mut size_width :usize;\n let mut size_height :usize;\n let mut title :String;\n\n \/\/window:\/\/host\/path\/path\/path is the path type we're working with.\n let mut url_path = url.path_parts();\n pointx = match url_path.get(0) {\n Some(x) => x.to_num_signed(),\n None => 0,\n };\n pointy = match url_path.get(1) {\n Some(y) => y.to_num_signed(),\n None => 0,\n };\n size_width = match url_path.get(2) {\n Some(w) => w.to_num(),\n None => 10,\n };\n size_height = match url_path.get(3) {\n Some(h) => h.to_num(),\n None => 10,\n };\n title = match url_path.get(4) {\n Some(t) => t.clone(),\n None => \"Fail\".to_string(),\n };\n let mut p: Point = Point::new(pointx, pointy);\n let mut s: Size = Size::new(size_width, size_height);\n \n let mut newWin = Window::new(p, s, title);\n unsafe {\n newWin.ptr = newWin.deref_mut();\n self.current_window = newWin.ptr;\n \/\/self.raw_current = Box::into_raw(newWin);\n if newWin.ptr as usize > 0 {\n (*::session_ptr).add_window(self.current_window); \n }\n }\n \n return box WindowResource {\n active_window : newWin,\n };\n \/\/return box VecResource::new(URL::from_str(\"window:\/\/\"),\n \/\/ ResourceType::File,\n \/\/ newWin);\n }\n}\n\nimpl Drop for WindowScheme {\n fn drop(&mut self) {\n unsafe {\n (*::session_ptr).remove_window(self.current_window);\n }\n }\n}\n<commit_msg>rebasing<commit_after>use alloc::boxed::Box;\n\nuse programs::common::*;\nuse graphics::point::*;\nuse graphics::size::*;\nuse graphics::window::*;\nuse common::string::*;\nuse common::resource::*;\n\nuse core::ops::DerefMut;\n\npub struct WindowScheme {\n pub current_window: *mut Window,\n}\n\npub struct WindowResource {\n pub active_window: Box<Window>,\n}\n\nimpl Resource for WindowResource {\n \/\/Required functions\n \/\/\/ Return the url of this resource\n fn url(&self) -> URL {\n return URL::from_string(&(\"window:\/\/test\".to_string()));\n } \n \/\/\/ Return the type of this resource\n fn stat(&self) -> ResourceType {\n return ResourceType::Window;\n }\n \/\/\/ Read data to buffer\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/\/TODO implement\n return Option::None;\n }\n \/\/\/ Write to resource\n fn write(&mut self, buf: &[u8]) -> Option<usize> {\n \/\/TODO implement\n return Option::None;\n }\n \/\/\/ Seek\n fn seek(&mut self, pos: ResourceSeek) -> Option<usize> {\n return Option::None; \/\/TODO implement\n }\n \/\/\/ Sync the resource\n fn sync(&mut self) -> bool {\n return true;\n }\n}\n \n\n\n\n\nimpl SessionItem for WindowScheme {\n fn scheme(&self) -> String {\n return \"window\".to_string();\n }\n\n fn open(&mut self, url: &URL) -> Box<Resource> {\n let scheme :String;\n let mut pointx :isize;\n let mut pointy :isize;\n let mut size_width :usize;\n let mut size_height :usize;\n let mut title :String;\n\n \/\/window:\/\/host\/path\/path\/path is the path type we're working with.\n let mut url_path = url.path_parts();\n pointx = match url_path.get(0) {\n Some(x) => x.to_num_signed(),\n None => 0,\n };\n pointy = match url_path.get(1) {\n Some(y) => y.to_num_signed(),\n None => 0,\n };\n size_width = match url_path.get(2) {\n Some(w) => w.to_num(),\n None => 10,\n };\n size_height = match url_path.get(3) {\n Some(h) => h.to_num(),\n None => 10,\n };\n title = match url_path.get(4) {\n Some(t) => t.clone(),\n None => \"Fail\".to_string(),\n };\n let mut p: Point = Point::new(pointx, pointy);\n let mut s: Size = Size::new(size_width, size_height);\n let mut newWin = Window::new(p, s, title);\n unsafe {\n newWin.ptr = newWin.deref_mut();\n self.current_window = newWin.ptr;\n \/\/self.raw_current = Box::into_raw(newWin);\n if newWin.ptr as usize > 0 {\n (*::session_ptr).add_window(self.current_window); \n }\n }\n \n return box WindowResource {\n active_window : newWin,\n };\n \/\/return box VecResource::new(URL::from_str(\"window:\/\/\"),\n \/\/ ResourceType::File,\n \/\/ newWin);\n }\n}\n\nimpl Drop for WindowScheme {\n fn drop(&mut self) {\n unsafe {\n (*::session_ptr).remove_window(self.current_window);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ The Computer Language Benchmarks Game\n\/\/ http:\/\/benchmarksgame.alioth.debian.org\/\n\/\/\n\/\/ contributed by the Rust Project Developers\n\n\/\/ Copyright (c) 2013-2014 The Rust Project Developers\n\/\/\n\/\/ All rights reserved.\n\/\/\n\/\/ Redistribution and use in source and binary forms, with or without\n\/\/ modification, are permitted provided that the following conditions\n\/\/ are met:\n\/\/\n\/\/ - Redistributions of source code must retain the above copyright\n\/\/ notice, this list of conditions and the following disclaimer.\n\/\/\n\/\/ - Redistributions in binary form must reproduce the above copyright\n\/\/ notice, this list of conditions and the following disclaimer in\n\/\/ the documentation and\/or other materials provided with the\n\/\/ distribution.\n\/\/\n\/\/ - Neither the name of \"The Computer Language Benchmarks Game\" nor\n\/\/ the name of \"The Computer Language Shootout Benchmarks\" nor the\n\/\/ names of its contributors may be used to endorse or promote\n\/\/ products derived from this software without specific prior\n\/\/ written permission.\n\/\/\n\/\/ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\/\/ \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n\/\/ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS\n\/\/ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n\/\/ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\n\/\/ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n\/\/ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\/\/ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)\n\/\/ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,\n\/\/ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n\/\/ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED\n\/\/ OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\/\/ ignore-pretty very bad with line comments\n\/\/ ignore-android doesn't terminate?\n\n#![feature(slicing_syntax)]\n\nuse std::iter::range_step;\nuse std::io::{stdin, stdout, File};\n\nstatic LINE_LEN: uint = 60;\n\nfn make_complements() -> [u8, ..256] {\n let transforms = [\n ('A', 'T'), ('C', 'G'), ('G', 'C'), ('T', 'A'),\n ('U', 'A'), ('M', 'K'), ('R', 'Y'), ('W', 'W'),\n ('S', 'S'), ('Y', 'R'), ('K', 'M'), ('V', 'B'),\n ('H', 'D'), ('D', 'H'), ('B', 'V'), ('N', 'N'),\n ('\\n', '\\n')];\n let mut complements: [u8, ..256] = [0, ..256];\n for (i, c) in complements.iter_mut().enumerate() {\n *c = i as u8;\n }\n let lower = 'A' as u8 - 'a' as u8;\n for &(from, to) in transforms.iter() {\n complements[from as uint] = to as u8;\n complements[(from as u8 - lower) as uint] = to as u8;\n }\n complements\n}\n\nfn main() {\n let complements = make_complements();\n let data = if std::os::getenv(\"RUST_BENCH\").is_some() {\n File::open(&Path::new(\"shootout-k-nucleotide.data\")).read_to_end()\n } else {\n stdin().read_to_end()\n };\n let mut data = data.unwrap();\n\n for seq in data.as_mut_slice().split_mut(|c| *c == '>' as u8) {\n \/\/ skip header and last \\n\n let begin = match seq.iter().position(|c| *c == '\\n' as u8) {\n None => continue,\n Some(c) => c\n };\n let len = seq.len();\n let seq = seq[mut begin+1..len-1];\n\n \/\/ arrange line breaks\n let len = seq.len();\n let off = LINE_LEN - len % (LINE_LEN + 1);\n for i in range_step(LINE_LEN, len, LINE_LEN + 1) {\n for j in std::iter::count(i, -1).take(off) {\n seq[j] = seq[j - 1];\n }\n seq[i - off] = '\\n' as u8;\n }\n\n \/\/ reverse complement, as\n \/\/ seq.reverse(); for c in seq.iter_mut() {*c = complements[*c]}\n \/\/ but faster:\n let mut it = seq.iter_mut();\n loop {\n match (it.next(), it.next_back()) {\n (Some(front), Some(back)) => {\n let tmp = complements[*front as uint];\n *front = complements[*back as uint];\n *back = tmp;\n }\n (Some(last), None) => *last = complements[*last as uint], \/\/ last element\n _ => break \/\/ vector exhausted.\n }\n }\n }\n\n stdout().write(data.as_slice()).unwrap();\n}\n<commit_msg>auto merge of #18056 : TeXitoi\/rust\/shootout-reverse-complement-improvement, r=alexcrichton<commit_after>\/\/ The Computer Language Benchmarks Game\n\/\/ http:\/\/benchmarksgame.alioth.debian.org\/\n\/\/\n\/\/ contributed by the Rust Project Developers\n\n\/\/ Copyright (c) 2013-2014 The Rust Project Developers\n\/\/\n\/\/ All rights reserved.\n\/\/\n\/\/ Redistribution and use in source and binary forms, with or without\n\/\/ modification, are permitted provided that the following conditions\n\/\/ are met:\n\/\/\n\/\/ - Redistributions of source code must retain the above copyright\n\/\/ notice, this list of conditions and the following disclaimer.\n\/\/\n\/\/ - Redistributions in binary form must reproduce the above copyright\n\/\/ notice, this list of conditions and the following disclaimer in\n\/\/ the documentation and\/or other materials provided with the\n\/\/ distribution.\n\/\/\n\/\/ - Neither the name of \"The Computer Language Benchmarks Game\" nor\n\/\/ the name of \"The Computer Language Shootout Benchmarks\" nor the\n\/\/ names of its contributors may be used to endorse or promote\n\/\/ products derived from this software without specific prior\n\/\/ written permission.\n\/\/\n\/\/ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\/\/ \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n\/\/ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS\n\/\/ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n\/\/ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\n\/\/ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n\/\/ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\/\/ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)\n\/\/ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,\n\/\/ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n\/\/ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED\n\/\/ OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\/\/ ignore-pretty very bad with line comments\n\/\/ ignore-android doesn't terminate?\n\n#![feature(slicing_syntax)]\n\nuse std::iter::range_step;\nuse std::io::{stdin, stdout, File};\n\nstatic LINE_LEN: uint = 60;\n\nfn make_complements() -> [u8, ..256] {\n let transforms = [\n ('A', 'T'), ('C', 'G'), ('G', 'C'), ('T', 'A'),\n ('U', 'A'), ('M', 'K'), ('R', 'Y'), ('W', 'W'),\n ('S', 'S'), ('Y', 'R'), ('K', 'M'), ('V', 'B'),\n ('H', 'D'), ('D', 'H'), ('B', 'V'), ('N', 'N'),\n ('\\n', '\\n')];\n let mut complements: [u8, ..256] = [0, ..256];\n for (i, c) in complements.iter_mut().enumerate() {\n *c = i as u8;\n }\n let lower = 'A' as u8 - 'a' as u8;\n for &(from, to) in transforms.iter() {\n complements[from as uint] = to as u8;\n complements[(from as u8 - lower) as uint] = to as u8;\n }\n complements\n}\n\nfn main() {\n let complements = make_complements();\n let data = if std::os::getenv(\"RUST_BENCH\").is_some() {\n File::open(&Path::new(\"shootout-k-nucleotide.data\")).read_to_end()\n } else {\n stdin().read_to_end()\n };\n let mut data = data.unwrap();\n\n for seq in data.as_mut_slice().split_mut(|c| *c == '>' as u8) {\n \/\/ skip header and last \\n\n let begin = match seq.iter().position(|c| *c == '\\n' as u8) {\n None => continue,\n Some(c) => c\n };\n let len = seq.len();\n let seq = seq[mut begin+1..len-1];\n\n \/\/ arrange line breaks\n let len = seq.len();\n let off = LINE_LEN - len % (LINE_LEN + 1);\n for i in range_step(LINE_LEN, len, LINE_LEN + 1) {\n for j in std::iter::count(i, -1).take(off) {\n seq[j] = seq[j - 1];\n }\n seq[i - off] = '\\n' as u8;\n }\n\n \/\/ reverse complement, as\n \/\/ seq.reverse(); for c in seq.iter_mut() { *c = complements[*c] }\n \/\/ but faster:\n for (front, back) in two_side_iter(seq) {\n let tmp = complements[*front as uint];\n *front = complements[*back as uint];\n *back = tmp;\n }\n if seq.len() % 2 == 1 {\n let middle = &mut seq[seq.len() \/ 2];\n *middle = complements[*middle as uint];\n }\n }\n\n stdout().write(data.as_slice()).unwrap();\n}\n\npub struct TwoSideIter<'a, T: 'a> {\n first: *mut T,\n last: *mut T,\n marker: std::kinds::marker::ContravariantLifetime<'a>,\n marker2: std::kinds::marker::NoCopy\n}\n\npub fn two_side_iter<'a, T>(slice: &'a mut [T]) -> TwoSideIter<'a, T> {\n let len = slice.len();\n let first = slice.as_mut_ptr();\n let last = if len == 0 {\n first\n } else {\n unsafe { first.offset(len as int - 1) }\n };\n\n TwoSideIter {\n first: first,\n last: last,\n marker: std::kinds::marker::ContravariantLifetime,\n marker2: std::kinds::marker::NoCopy\n }\n}\n\nimpl<'a, T> Iterator<(&'a mut T, &'a mut T)> for TwoSideIter<'a, T> {\n fn next(&mut self) -> Option<(&'a mut T, &'a mut T)> {\n if self.first < self.last {\n let result = unsafe { (&mut *self.first, &mut *self.last) };\n self.first = unsafe { self.first.offset(1) };\n self.last = unsafe { self.last.offset(-1) };\n Some(result)\n } else {\n None\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #1506 - RalfJung:test-isolation, r=RalfJung<commit_after>\/\/ ignore-windows: File handling is not implemented yet\n\/\/ error-pattern: `open` not available when isolation is enabled\n\nfn main() {\n let _file = std::fs::File::open(\"file.txt\").unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #44317 - Dushistov:master, r=arielb1<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn is_copy<T: ::std::marker<i32>::Copy>() {}\n\/\/~^ ERROR type parameters are not allowed on this type [E0109]\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add test for #52213<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn transmute_lifetime<'a, 'b, T>(t: &'a (T,)) -> &'b T {\n match (&t,) { \/\/~ ERROR cannot infer an appropriate lifetime\n ((u,),) => u,\n }\n}\n\nfn main() {\n let x = {\n let y = Box::new((42,));\n transmute_lifetime(&y)\n };\n\n println!(\"{}\", x);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add an idle_loop example<commit_after>extern crate rustbox;\nextern crate tiny;\n\nuse std::borrow::Borrow;\n\nuse tiny::tui::{TUI, TUIRet};\n\nfn loop_() -> Option<String> {\n let mut tui = TUI::new();\n\n loop {\n match tui.idle_loop() {\n TUIRet::SendMsg(cmd) => {\n tui.show_outgoing_msg(cmd.into_iter().collect::<String>().borrow());\n },\n TUIRet::Abort => {\n return None;\n },\n _ => {}\n }\n }\n}\n\nfn main() {\n loop_().map(|err| println!(\"{}\", err));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>First part of new unified service.<commit_after>\/\/! Service for DNS.\n\nuse std::io;\nuse std::mem;\nuse std::time::Duration;\nuse futures::{Async, Future, Poll};\nuse futures::stream::{Peekable, Stream};\nuse tokio_core::reactor;\nuse tokio_core::channel::Receiver;\nuse super::conf::{ServerConf, TransportMode};\nuse super::request::ServiceRequest;\n\n\n\/\/------------ Transport -----------------------------------------------------\n\n\/\/\/ Something that can send and receive DNS messages.\npub trait Transport: io::Read + io::Write { }\n\n\n\/\/------------ TransportFactory ----------------------------------------------\n\n\/\/\/ Something that can make a new `Transport`.\npub trait TransportFactory {\n \/\/\/ The type of transport created by this factory.\n type Transport: Transport;\n\n \/\/\/ The type of future resolved while making a new transport.\n type Future: Future<Item=Self::Transport, Error=io::Error>;\n\n \/\/\/ Starts creating a new transport atop a given reactor.\n fn create(&self, reactor: &reactor::Handle) -> io::Result<Self::Future>;\n}\n\n\n\/\/------------ ServiceMode ---------------------------------------------------\n\n\/\/\/ The mode a service will run in.\n\/\/\/\n\/\/\/ This is essentially `conf::TransportMode` stripped of the variants that a\n\/\/\/ real service can’t have.\npub enum ServiceMode {\n SingleRequest,\n Sequential,\n Multiplex,\n}\n\nimpl ServiceMode {\n pub fn from_transport_mode(t: TransportMode, default: ServiceMode)\n -> Option<Self> {\n match t {\n TransportMode::None => None,\n TransportMode::Default => Some(default),\n TransportMode::SingleRequest => Some(ServiceMode::SingleRequest),\n TransportMode::Sequential => Some(ServiceMode::Sequential),\n TransportMode::Multiplex => Some(ServiceMode::Multiplex)\n }\n }\n}\n\n\n\/\/------------ Service -------------------------------------------------------\n\n\/\/\/ A service processes DNS requests.\npub struct Service<T: TransportFactory>(TrueService<T>);\n\nenum TrueService<T: TransportFactory> {\n Single(SingleService<T>),\n Sequential(Expiring<T, SequentialService<T>>),\n Multiplex(Expiring<T, MultiplexService<T>>)\n}\n\n\nimpl<T: TransportFactory> Service<T> {\n \/\/\/ Creates a new service.\n fn new(receiver: Receiver<ServiceRequest>, factory: T,\n reactor: reactor::Handle, mode: ServiceMode, conf: &ServerConf)\n -> Self {\n match mode {\n ServiceMode::SingleRequest => {\n Service(\n TrueService::Single(\n SingleService::new(receiver, factory, reactor, conf)\n )\n )\n }\n ServiceMode::Sequential => {\n Service(\n TrueService::Sequential(\n Expiring::new(receiver, factory, reactor, conf)\n )\n )\n }\n ServiceMode::Multiplex => {\n Service(\n TrueService::Multiplex(\n Expiring::new(receiver, factory, reactor, conf)\n )\n )\n }\n }\n }\n}\n\n\n\/\/--- Future\n\nimpl<T: TransportFactory> Future for Service<T> {\n type Item = ();\n type Error = io::Error;\n\n fn poll(&mut self) -> Poll<(), io::Error> {\n match self.0 {\n TrueService::Single(ref mut s) => s.poll(),\n TrueService::Sequential(ref mut s) => s.poll(),\n TrueService::Multiplex(ref mut s) => s.poll()\n }\n }\n}\n\n\n\/\/------------ SingleService -------------------------------------------------\n\n\/\/\/ A service in single request mode.\nstruct SingleService<T: TransportFactory> {\n receiver: Receiver<ServiceRequest>,\n factory: T,\n reactor: reactor::Handle,\n}\n\n\nimpl<T: TransportFactory> SingleService<T> {\n fn new(receiver: Receiver<ServiceRequest>, factory: T,\n reactor: reactor::Handle, conf: &ServerConf) -> Self {\n unimplemented!()\n }\n}\n\n\n\/\/--- Future\n\nimpl<T: TransportFactory> Future for SingleService<T> {\n type Item = ();\n type Error = io::Error;\n\n fn poll(&mut self) -> Poll<(), io::Error> {\n unimplemented!()\n }\n}\n\n\n\/\/------------ ExpiringService -----------------------------------------------\n\n\/\/\/ The service underneath `Expiring`.\n\/\/\/\n\/\/\/ Implementors need to be a stream of `()`, returning an item every time\n\/\/\/ the timeout needs to be refreshed. When they are done streaming, they\n\/\/\/ will be destroyed via their `finalize()` method, thereby retrieving the\n\/\/\/ receiver if it still exists.\ntrait ExpiringService<T: TransportFactory>: Stream<Item=(), Error=io::Error> {\n fn create(sock: T::Future, receiver: Peekable<Receiver<ServiceRequest>>,\n request_timeout: Duration) -> Self;\n fn finalize(self) -> Option<Peekable<Receiver<ServiceRequest>>>;\n}\n\n\n\/\/------------ Expiring ------------------------------------------------------\n\n\/\/\/ A wrapper for a service that will expire if nothing happens for too long.\nstruct Expiring<T: TransportFactory, S: ExpiringService<T>> {\n state: State<S>,\n factory: T,\n reactor: reactor::Handle,\n request_timeout: Duration,\n keep_alive: Duration,\n}\n\nenum State<S> {\n Idle(Peekable<Receiver<ServiceRequest>>),\n Active {\n service: S,\n timeout: Option<reactor::Timeout>\n },\n Dead\n}\n\n\nimpl<T: TransportFactory, S: ExpiringService<T>> Expiring<T, S> {\n fn new(receiver: Receiver<ServiceRequest>, factory: T,\n reactor: reactor::Handle, conf: &ServerConf) -> Self {\n Expiring {\n state: State::Idle(receiver.peekable()),\n factory: factory,\n reactor: reactor,\n request_timeout: conf.request_timeout,\n keep_alive: conf.keep_alive\n }\n }\n}\n\n\n\/\/--- Future\n\nimpl<T: TransportFactory, S: ExpiringService<T>> Future for Expiring<T, S> {\n type Item = ();\n type Error = io::Error;\n\n fn poll(&mut self) -> Poll<(), io::Error> {\n match self.state {\n State::Idle(ref mut receiver) => {\n try_ready!(receiver.peek());\n }\n State::Active{ref mut service, ref mut timeout} => {\n if let Some(ref mut timeout) = *timeout {\n match timeout.poll() {\n Ok(Async::Ready(())) => { }\n other => return other\n }\n }\n if let Some(()) = try_ready!(service.poll()) {\n *timeout = reactor::Timeout::new(self.keep_alive,\n &self.reactor).ok();\n if let Some(ref mut timeout) = *timeout {\n try_ready!(timeout.poll());\n \/\/ If we come here, the timeout triggered\n \/\/ already. Let’s panic because this can only lead\n \/\/ to trouble later ...\n panic!(\"Timeout triggered right away.\");\n }\n return Ok(Async::NotReady)\n }\n }\n State::Dead => panic!(\"poll on a dead service\")\n }\n\n self.state = match mem::replace(&mut self.state, State::Dead) {\n State::Idle(receiver) => {\n let sock = try!(self.factory.create(&self.reactor));\n let service = S::create(sock, receiver, self.request_timeout);\n let timeout = reactor::Timeout::new(self.keep_alive,\n &self.reactor).ok();\n State::Active{service: service, timeout: timeout}\n }\n State::Active{service, ..} => {\n match service.finalize() {\n Some(receiver) => State::Idle(receiver),\n None => return Ok(Async::Ready(()))\n }\n }\n State::Dead => panic!()\n };\n Ok(Async::NotReady)\n }\n}\n\n\n\/\/------------ SequentialService ---------------------------------------------\n\n\/\/\/ A service in sequential request mode.\nstruct SequentialService<T: TransportFactory> {\n phantom: ::std::marker::PhantomData<T>\n}\n\n\n\/\/--- ExpiringService\n\nimpl<T: TransportFactory> ExpiringService<T> for SequentialService<T> {\n fn create(sock: T::Future, recv: Peekable<Receiver<ServiceRequest>>,\n request_timeout: Duration) -> Self {\n unimplemented!()\n }\n\n fn finalize(self) -> Option<Peekable<Receiver<ServiceRequest>>> {\n unimplemented!()\n }\n}\n\n\n\/\/--- Stream\n\nimpl<T: TransportFactory> Stream for SequentialService<T> {\n type Item = ();\n type Error = io::Error;\n\n fn poll(&mut self) -> Poll<Option<()>, io::Error> {\n unimplemented!()\n }\n}\n\n\n\/\/------------ MultiplexService ----------------------------------------------\n\n\/\/\/ A service in multiplex mode.\nstruct MultiplexService<T: TransportFactory> {\n phantom: ::std::marker::PhantomData<T>\n}\n\n\n\/\/--- ExpiringService\n\nimpl<T: TransportFactory> ExpiringService<T> for MultiplexService<T> {\n fn create(sock: T::Future, recv: Peekable<Receiver<ServiceRequest>>,\n request_timeout: Duration) -> Self {\n unimplemented!()\n }\n\n fn finalize(self) -> Option<Peekable<Receiver<ServiceRequest>>> {\n unimplemented!()\n }\n}\n\n\n\/\/--- Stream\n\nimpl<T: TransportFactory> Stream for MultiplexService<T> {\n type Item = ();\n type Error = io::Error;\n\n fn poll(&mut self) -> Poll<Option<()>, io::Error> {\n unimplemented!()\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary trait restriction<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Start aggregating metrics<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ NB: transitionary, de-mode-ing.\n#[forbid(deprecated_mode)];\n#[forbid(deprecated_pattern)];\n\n\/\/! A type that represents one of two alternatives\n\nimport result::result;\n\n\/\/\/ The either type\nenum Either<T, U> {\n Left(T),\n Right(U)\n}\n\nfn either<T, U, V>(f_left: fn((&T)) -> V,\n f_right: fn((&U)) -> V, value: &Either<T, U>) -> V {\n \/*!\n * Applies a function based on the given either value\n *\n * If `value` is left(T) then `f_left` is applied to its contents, if\n * `value` is right(U) then `f_right` is applied to its contents, and the\n * result is returned.\n *\/\n\n match *value {\n Left(ref l) => f_left(l),\n Right(ref r) => f_right(r)\n }\n}\n\nfn lefts<T: copy, U>(eithers: &[Either<T, U>]) -> ~[T] {\n \/\/! Extracts from a vector of either all the left values\n\n let mut result: ~[T] = ~[];\n for vec::each(eithers) |elt| {\n match elt {\n Left(l) => vec::push(result, l),\n _ => { \/* fallthrough *\/ }\n }\n }\n return result;\n}\n\nfn rights<T, U: copy>(eithers: &[Either<T, U>]) -> ~[U] {\n \/\/! Extracts from a vector of either all the right values\n\n let mut result: ~[U] = ~[];\n for vec::each(eithers) |elt| {\n match elt {\n Right(r) => vec::push(result, r),\n _ => { \/* fallthrough *\/ }\n }\n }\n return result;\n}\n\nfn partition<T: copy, U: copy>(eithers: &[Either<T, U>])\n -> {lefts: ~[T], rights: ~[U]} {\n \/*!\n * Extracts from a vector of either all the left values and right values\n *\n * Returns a structure containing a vector of left values and a vector of\n * right values.\n *\/\n\n let mut lefts: ~[T] = ~[];\n let mut rights: ~[U] = ~[];\n for vec::each(eithers) |elt| {\n match elt {\n Left(l) => vec::push(lefts, l),\n Right(r) => vec::push(rights, r)\n }\n }\n return {lefts: lefts, rights: rights};\n}\n\npure fn flip<T: copy, U: copy>(eith: &Either<T, U>) -> Either<U, T> {\n \/\/! Flips between left and right of a given either\n\n match *eith {\n Right(r) => Left(r),\n Left(l) => Right(l)\n }\n}\n\npure fn to_result<T: copy, U: copy>(eith: &Either<T, U>) -> result<U, T> {\n \/*!\n * Converts either::t to a result::t\n *\n * Converts an `either` type to a `result` type, making the \"right\" choice\n * an ok result, and the \"left\" choice a fail\n *\/\n\n match *eith {\n Right(r) => result::ok(r),\n Left(l) => result::err(l)\n }\n}\n\npure fn is_left<T, U>(eith: &Either<T, U>) -> bool {\n \/\/! Checks whether the given value is a left\n\n match *eith { Left(_) => true, _ => false }\n}\n\npure fn is_right<T, U>(eith: &Either<T, U>) -> bool {\n \/\/! Checks whether the given value is a right\n\n match *eith { Right(_) => true, _ => false }\n}\n\n#[test]\nfn test_either_left() {\n let val = Left(10);\n fn f_left(x: &int) -> bool { *x == 10 }\n fn f_right(_x: &uint) -> bool { false }\n assert (either(f_left, f_right, &val));\n}\n\n#[test]\nfn test_either_right() {\n let val = Right(10u);\n fn f_left(_x: &int) -> bool { false }\n fn f_right(x: &uint) -> bool { *x == 10u }\n assert (either(f_left, f_right, &val));\n}\n\n#[test]\nfn test_lefts() {\n let input = ~[Left(10), Right(11), Left(12), Right(13), Left(14)];\n let result = lefts(input);\n assert (result == ~[10, 12, 14]);\n}\n\n#[test]\nfn test_lefts_none() {\n let input: ~[Either<int, int>] = ~[Right(10), Right(10)];\n let result = lefts(input);\n assert (vec::len(result) == 0u);\n}\n\n#[test]\nfn test_lefts_empty() {\n let input: ~[Either<int, int>] = ~[];\n let result = lefts(input);\n assert (vec::len(result) == 0u);\n}\n\n#[test]\nfn test_rights() {\n let input = ~[Left(10), Right(11), Left(12), Right(13), Left(14)];\n let result = rights(input);\n assert (result == ~[11, 13]);\n}\n\n#[test]\nfn test_rights_none() {\n let input: ~[Either<int, int>] = ~[Left(10), Left(10)];\n let result = rights(input);\n assert (vec::len(result) == 0u);\n}\n\n#[test]\nfn test_rights_empty() {\n let input: ~[Either<int, int>] = ~[];\n let result = rights(input);\n assert (vec::len(result) == 0u);\n}\n\n#[test]\nfn test_partition() {\n let input = ~[Left(10), Right(11), Left(12), Right(13), Left(14)];\n let result = partition(input);\n assert (result.lefts[0] == 10);\n assert (result.lefts[1] == 12);\n assert (result.lefts[2] == 14);\n assert (result.rights[0] == 11);\n assert (result.rights[1] == 13);\n}\n\n#[test]\nfn test_partition_no_lefts() {\n let input: ~[Either<int, int>] = ~[Right(10), Right(11)];\n let result = partition(input);\n assert (vec::len(result.lefts) == 0u);\n assert (vec::len(result.rights) == 2u);\n}\n\n#[test]\nfn test_partition_no_rights() {\n let input: ~[Either<int, int>] = ~[Left(10), Left(11)];\n let result = partition(input);\n assert (vec::len(result.lefts) == 2u);\n assert (vec::len(result.rights) == 0u);\n}\n\n#[test]\nfn test_partition_empty() {\n let input: ~[Either<int, int>] = ~[];\n let result = partition(input);\n assert (vec::len(result.lefts) == 0u);\n assert (vec::len(result.rights) == 0u);\n}\n\n\/\/\n\/\/ Local Variables:\n\/\/ mode: rust\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n\/\/\n<commit_msg>Add either::unwrap_{left,right}<commit_after>\/\/ NB: transitionary, de-mode-ing.\n#[forbid(deprecated_mode)];\n#[forbid(deprecated_pattern)];\n\n\/\/! A type that represents one of two alternatives\n\nimport result::result;\n\n\/\/\/ The either type\nenum Either<T, U> {\n Left(T),\n Right(U)\n}\n\nfn either<T, U, V>(f_left: fn((&T)) -> V,\n f_right: fn((&U)) -> V, value: &Either<T, U>) -> V {\n \/*!\n * Applies a function based on the given either value\n *\n * If `value` is left(T) then `f_left` is applied to its contents, if\n * `value` is right(U) then `f_right` is applied to its contents, and the\n * result is returned.\n *\/\n\n match *value {\n Left(ref l) => f_left(l),\n Right(ref r) => f_right(r)\n }\n}\n\nfn lefts<T: copy, U>(eithers: &[Either<T, U>]) -> ~[T] {\n \/\/! Extracts from a vector of either all the left values\n\n let mut result: ~[T] = ~[];\n for vec::each(eithers) |elt| {\n match elt {\n Left(l) => vec::push(result, l),\n _ => { \/* fallthrough *\/ }\n }\n }\n return result;\n}\n\nfn rights<T, U: copy>(eithers: &[Either<T, U>]) -> ~[U] {\n \/\/! Extracts from a vector of either all the right values\n\n let mut result: ~[U] = ~[];\n for vec::each(eithers) |elt| {\n match elt {\n Right(r) => vec::push(result, r),\n _ => { \/* fallthrough *\/ }\n }\n }\n return result;\n}\n\nfn partition<T: copy, U: copy>(eithers: &[Either<T, U>])\n -> {lefts: ~[T], rights: ~[U]} {\n \/*!\n * Extracts from a vector of either all the left values and right values\n *\n * Returns a structure containing a vector of left values and a vector of\n * right values.\n *\/\n\n let mut lefts: ~[T] = ~[];\n let mut rights: ~[U] = ~[];\n for vec::each(eithers) |elt| {\n match elt {\n Left(l) => vec::push(lefts, l),\n Right(r) => vec::push(rights, r)\n }\n }\n return {lefts: lefts, rights: rights};\n}\n\npure fn flip<T: copy, U: copy>(eith: &Either<T, U>) -> Either<U, T> {\n \/\/! Flips between left and right of a given either\n\n match *eith {\n Right(r) => Left(r),\n Left(l) => Right(l)\n }\n}\n\npure fn to_result<T: copy, U: copy>(eith: &Either<T, U>) -> result<U, T> {\n \/*!\n * Converts either::t to a result::t\n *\n * Converts an `either` type to a `result` type, making the \"right\" choice\n * an ok result, and the \"left\" choice a fail\n *\/\n\n match *eith {\n Right(r) => result::ok(r),\n Left(l) => result::err(l)\n }\n}\n\npure fn is_left<T, U>(eith: &Either<T, U>) -> bool {\n \/\/! Checks whether the given value is a left\n\n match *eith { Left(_) => true, _ => false }\n}\n\npure fn is_right<T, U>(eith: &Either<T, U>) -> bool {\n \/\/! Checks whether the given value is a right\n\n match *eith { Right(_) => true, _ => false }\n}\n\npure fn unwrap_left<T,U>(+eith: Either<T,U>) -> T {\n \/\/! Retrieves the value in the left branch. Fails if the either is Right.\n\n match move eith {\n Left(move x) => x, Right(_) => fail ~\"either::unwrap_left Right\"\n }\n}\n\npure fn unwrap_right<T,U>(+eith: Either<T,U>) -> U {\n \/\/! Retrieves the value in the right branch. Fails if the either is Left.\n\n match move eith {\n Right(move x) => x, Left(_) => fail ~\"either::unwrap_right Left\"\n }\n}\n\n#[test]\nfn test_either_left() {\n let val = Left(10);\n fn f_left(x: &int) -> bool { *x == 10 }\n fn f_right(_x: &uint) -> bool { false }\n assert (either(f_left, f_right, &val));\n}\n\n#[test]\nfn test_either_right() {\n let val = Right(10u);\n fn f_left(_x: &int) -> bool { false }\n fn f_right(x: &uint) -> bool { *x == 10u }\n assert (either(f_left, f_right, &val));\n}\n\n#[test]\nfn test_lefts() {\n let input = ~[Left(10), Right(11), Left(12), Right(13), Left(14)];\n let result = lefts(input);\n assert (result == ~[10, 12, 14]);\n}\n\n#[test]\nfn test_lefts_none() {\n let input: ~[Either<int, int>] = ~[Right(10), Right(10)];\n let result = lefts(input);\n assert (vec::len(result) == 0u);\n}\n\n#[test]\nfn test_lefts_empty() {\n let input: ~[Either<int, int>] = ~[];\n let result = lefts(input);\n assert (vec::len(result) == 0u);\n}\n\n#[test]\nfn test_rights() {\n let input = ~[Left(10), Right(11), Left(12), Right(13), Left(14)];\n let result = rights(input);\n assert (result == ~[11, 13]);\n}\n\n#[test]\nfn test_rights_none() {\n let input: ~[Either<int, int>] = ~[Left(10), Left(10)];\n let result = rights(input);\n assert (vec::len(result) == 0u);\n}\n\n#[test]\nfn test_rights_empty() {\n let input: ~[Either<int, int>] = ~[];\n let result = rights(input);\n assert (vec::len(result) == 0u);\n}\n\n#[test]\nfn test_partition() {\n let input = ~[Left(10), Right(11), Left(12), Right(13), Left(14)];\n let result = partition(input);\n assert (result.lefts[0] == 10);\n assert (result.lefts[1] == 12);\n assert (result.lefts[2] == 14);\n assert (result.rights[0] == 11);\n assert (result.rights[1] == 13);\n}\n\n#[test]\nfn test_partition_no_lefts() {\n let input: ~[Either<int, int>] = ~[Right(10), Right(11)];\n let result = partition(input);\n assert (vec::len(result.lefts) == 0u);\n assert (vec::len(result.rights) == 2u);\n}\n\n#[test]\nfn test_partition_no_rights() {\n let input: ~[Either<int, int>] = ~[Left(10), Left(11)];\n let result = partition(input);\n assert (vec::len(result.lefts) == 2u);\n assert (vec::len(result.rights) == 0u);\n}\n\n#[test]\nfn test_partition_empty() {\n let input: ~[Either<int, int>] = ~[];\n let result = partition(input);\n assert (vec::len(result.lefts) == 0u);\n assert (vec::len(result.rights) == 0u);\n}\n\n\/\/\n\/\/ Local Variables:\n\/\/ mode: rust\n\/\/ fill-column: 78;\n\/\/ indent-tabs-mode: nil\n\/\/ c-basic-offset: 4\n\/\/ buffer-file-coding-system: utf-8-unix\n\/\/ End:\n\/\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix TODOs. Modify some AST types.<commit_after><|endoftext|>"} {"text":"<commit_before>use column::{Alignment, Column, Cell};\nuse xattr::Attribute;\nuse dir::Dir;\nuse file::{File, GREY};\nuse options::{Columns, FileFilter, RecurseOptions};\nuse users::OSUsers;\n\nuse locale;\nuse ansi_term::Style::Plain;\n\n#[derive(PartialEq, Debug, Copy)]\npub struct Details {\n pub columns: Columns,\n pub header: bool,\n pub recurse: Option<(RecurseOptions, FileFilter)>,\n pub xattr: bool,\n}\n\nimpl Details {\n pub fn view(&self, dir: Option<&Dir>, files: &[File]) {\n \/\/ The output gets formatted into columns, which looks nicer. To\n \/\/ do this, we have to write the results into a table, instead of\n \/\/ displaying each file immediately, then calculating the maximum\n \/\/ width of each column based on the length of the results and\n \/\/ padding the fields during output.\n\n \/\/ Almost all the heavy lifting is done in a Table object, which\n \/\/ automatically calculates the width of each column and the\n \/\/ appropriate padding.\n let mut table = Table::with_columns(self.columns.for_dir(dir));\n if self.header { table.add_header() }\n\n self.add_files_to_table(&mut table, files, 0);\n table.print_table(self.xattr, self.recurse.is_some());\n }\n\n \/\/\/ Adds files to the table - recursively, if the `recurse` option\n \/\/\/ is present.\n fn add_files_to_table(&self, table: &mut Table, src: &[File], depth: usize) {\n for (index, file) in src.iter().enumerate() {\n table.add_row(file, depth, index == src.len() - 1);\n\n if let Some((r, filter)) = self.recurse {\n if r.tree == false || r.is_too_deep(depth) {\n continue;\n }\n\n if let Some(ref dir) = file.this {\n let mut files = dir.files(true);\n filter.transform_files(&mut files);\n self.add_files_to_table(table, &files, depth + 1);\n }\n }\n }\n }\n}\n\nstruct Row {\n depth: usize,\n cells: Vec<Cell>,\n name: String,\n last: bool,\n attrs: Vec<Attribute>,\n children: bool,\n}\n\ntype ColumnInfo = (usize, Alignment);\n\nstruct Table {\n columns: Vec<Column>,\n users: OSUsers,\n locale: UserLocale,\n rows: Vec<Row>,\n}\n\nimpl Table {\n fn with_columns(columns: Vec<Column>) -> Table {\n Table {\n columns: columns,\n users: OSUsers::empty_cache(),\n locale: UserLocale::new(),\n rows: Vec::new(),\n }\n }\n\n fn add_header(&mut self) {\n let row = Row {\n depth: 0,\n cells: self.columns.iter().map(|c| Cell::paint(Plain.underline(), c.header())).collect(),\n name: Plain.underline().paint(\"Name\").to_string(),\n last: false,\n attrs: Vec::new(),\n children: false,\n };\n\n self.rows.push(row);\n }\n\n fn cells_for_file(&mut self, file: &File) -> Vec<Cell> {\n self.columns.clone().iter()\n .map(|c| file.display(c, &mut self.users, &self.locale))\n .collect()\n }\n\n fn add_row(&mut self, file: &File, depth: usize, last: bool) {\n let row = Row {\n depth: depth,\n cells: self.cells_for_file(file),\n name: file.file_name_view(),\n last: last,\n attrs: file.xattrs.clone(),\n children: file.this.is_some(),\n };\n\n self.rows.push(row)\n }\n\n fn print_table(self, xattr: bool, show_children: bool) {\n let mut stack = Vec::new();\n\n let column_widths: Vec<usize> = range(0, self.columns.len())\n .map(|n| self.rows.iter().map(|row| row.cells[n].length).max().unwrap_or(0))\n .collect();\n\n for row in self.rows.iter() {\n for (n, width) in column_widths.iter().enumerate() {\n let padding = width - row.cells[n].length;\n print!(\"{} \", self.columns[n].alignment().pad_string(&row.cells[n].text, padding));\n }\n\n if show_children {\n stack.resize(row.depth + 1, TreePart::Edge);\n stack[row.depth] = if row.last { TreePart::Corner } else { TreePart::Edge };\n\n for i in 1 .. row.depth + 1 {\n print!(\"{}\", GREY.paint(stack[i].ascii_art()));\n }\n\n if row.children {\n stack[row.depth] = if row.last { TreePart::Blank } else { TreePart::Line };\n }\n\n if row.depth != 0 {\n print!(\" \");\n }\n }\n\n print!(\"{}\\n\", row.name);\n\n if xattr {\n let width = row.attrs.iter().map(|a| a.name().len()).max().unwrap_or(0);\n for attr in row.attrs.iter() {\n let name = attr.name();\n println!(\"{}\\t{}\",\n Alignment::Left.pad_string(name, width - name.len()),\n attr.size()\n )\n }\n }\n }\n }\n}\n\n#[derive(PartialEq, Debug, Clone)]\nenum TreePart {\n Edge,\n Corner,\n Blank,\n Line,\n}\n\nimpl TreePart {\n fn ascii_art(&self) -> &'static str {\n match *self {\n TreePart::Edge => \"├──\",\n TreePart::Line => \"│ \",\n TreePart::Corner => \"└──\",\n TreePart::Blank => \" \",\n }\n }\n}\n\npub struct UserLocale {\n pub time: locale::Time,\n pub numeric: locale::Numeric,\n}\n\nimpl UserLocale {\n pub fn new() -> UserLocale {\n UserLocale {\n time: locale::Time::load_user_locale().unwrap_or_else(|_| locale::Time::english()),\n numeric: locale::Numeric::load_user_locale().unwrap_or_else(|_| locale::Numeric::english()),\n }\n }\n\n pub fn default() -> UserLocale {\n UserLocale {\n time: locale::Time::english(),\n numeric: locale::Numeric::english(),\n }\n }\n}\n<commit_msg>Spew comments everywhere<commit_after>use column::{Alignment, Column, Cell};\nuse xattr::Attribute;\nuse dir::Dir;\nuse file::{File, GREY};\nuse options::{Columns, FileFilter, RecurseOptions};\nuse users::OSUsers;\n\nuse locale;\nuse ansi_term::Style::Plain;\n\n\/\/\/ With the **Details** view, the output gets formatted into columns, with\n\/\/\/ each `Column` object showing some piece of information about the file,\n\/\/\/ such as its size, or its permissions.\n\/\/\/\n\/\/\/ To do this, the results have to be written to a table, instead of\n\/\/\/ displaying each file immediately. Then, the width of each column can be\n\/\/\/ calculated based on the individual results, and the fields are padded\n\/\/\/ during output.\n\/\/\/\n\/\/\/ Almost all the heavy lifting is done in a Table object, which handles the\n\/\/\/ columns for each row.\n#[derive(PartialEq, Debug, Copy)]\npub struct Details {\n\n \/\/\/ A Columns object that says which columns should be included in the\n \/\/\/ output in the general case. Directories themselves can pick which\n \/\/\/ columns are *added* to this list, such as the Git column.\n pub columns: Columns,\n\n \/\/\/ Whether to recurse through directories with a tree view, and if so,\n \/\/\/ which options to use. This field is only relevant here if the `tree`\n \/\/\/ field of the RecurseOptions is `true`.\n pub recurse: Option<(RecurseOptions, FileFilter)>,\n\n \/\/\/ Whether to show a header line or not.\n pub header: bool,\n\n \/\/\/ Whether to show each file's extended attributes.\n pub xattr: bool,\n}\n\nimpl Details {\n pub fn view(&self, dir: Option<&Dir>, files: &[File]) {\n \/\/ First, transform the Columns object into a vector of columns for\n \/\/ the current directory.\n let mut table = Table::with_columns(self.columns.for_dir(dir));\n if self.header { table.add_header() }\n\n \/\/ Then add files to the table and print it out.\n self.add_files_to_table(&mut table, files, 0);\n table.print_table(self.xattr, self.recurse.is_some());\n }\n\n \/\/\/ Adds files to the table - recursively, if the `recurse` option\n \/\/\/ is present.\n fn add_files_to_table(&self, table: &mut Table, src: &[File], depth: usize) {\n for (index, file) in src.iter().enumerate() {\n table.add_file(file, depth, index == src.len() - 1);\n\n \/\/ There are two types of recursion that exa supports: a tree\n \/\/ view, which is dealt with here, and multiple listings, which is\n \/\/ dealt with in the main module. So only actually recurse if we\n \/\/ are in tree mode - the other case will be dealt with elsewhere.\n if let Some((r, filter)) = self.recurse {\n if r.tree == false || r.is_too_deep(depth) {\n continue;\n }\n\n \/\/ Use the filter to remove unwanted files *before* expanding\n \/\/ them, so we don't examine any directories that wouldn't\n \/\/ have their contents listed anyway.\n if let Some(ref dir) = file.this {\n let mut files = dir.files(true);\n filter.transform_files(&mut files);\n self.add_files_to_table(table, &files, depth + 1);\n }\n }\n }\n }\n}\n\nstruct Row {\n\n \/\/\/ Vector of cells to display.\n cells: Vec<Cell>,\n\n \/\/\/ This file's name, in coloured output. The name is treated separately\n \/\/\/ from the other cells, as it never requires padding.\n name: String,\n\n \/\/\/ How many directories deep into the tree structure this is. Directories\n \/\/\/ on top have depth 0.\n depth: usize,\n\n \/\/\/ Vector of this file's extended attributes, if that feature is active.\n attrs: Vec<Attribute>,\n\n \/\/\/ Whether this is the last entry in the directory. This flag is used\n \/\/\/ when calculating the tree view.\n last: bool,\n\n \/\/\/ Whether this file is a directory and has any children. Also used when\n \/\/\/ calculating the tree view.\n children: bool,\n}\n\n\/\/\/ A **Table** object gets built up by the view as it lists files and\n\/\/\/ directories.\nstruct Table {\n columns: Vec<Column>,\n users: OSUsers,\n locale: UserLocale,\n rows: Vec<Row>,\n}\n\nimpl Table {\n \/\/\/ Create a new, empty Table object, setting the caching fields to their\n \/\/\/ empty states.\n fn with_columns(columns: Vec<Column>) -> Table {\n Table {\n columns: columns,\n users: OSUsers::empty_cache(),\n locale: UserLocale::new(),\n rows: Vec::new(),\n }\n }\n\n \/\/\/ Add a dummy \"header\" row to the table, which contains the names of all\n \/\/\/ the columns, underlined. This has dummy data for the cases that aren't\n \/\/\/ actually used, such as the depth or list of attributes.\n fn add_header(&mut self) {\n let row = Row {\n depth: 0,\n cells: self.columns.iter().map(|c| Cell::paint(Plain.underline(), c.header())).collect(),\n name: Plain.underline().paint(\"Name\").to_string(),\n last: false,\n attrs: Vec::new(),\n children: false,\n };\n\n self.rows.push(row);\n }\n\n \/\/\/ Use the list of columns to find which cells should be produced for\n \/\/\/ this file, per-column.\n fn cells_for_file(&mut self, file: &File) -> Vec<Cell> {\n self.columns.clone().iter()\n .map(|c| file.display(c, &mut self.users, &self.locale))\n .collect()\n }\n\n \/\/\/ Get the cells for the given file, and add the result to the table.\n fn add_file(&mut self, file: &File, depth: usize, last: bool) {\n let row = Row {\n depth: depth,\n cells: self.cells_for_file(file),\n name: file.file_name_view(),\n last: last,\n attrs: file.xattrs.clone(),\n children: file.this.is_some(),\n };\n\n self.rows.push(row)\n }\n\n \/\/\/ Print the table to standard output, consuming it in the process.\n fn print_table(self, xattr: bool, show_children: bool) {\n let mut stack = Vec::new();\n\n \/\/ Work out the list of column widths by finding the longest cell for\n \/\/ each column, then formatting each cell in that column to be the\n \/\/ width of that one.\n let column_widths: Vec<usize> = range(0, self.columns.len())\n .map(|n| self.rows.iter().map(|row| row.cells[n].length).max().unwrap_or(0))\n .collect();\n\n for row in self.rows.into_iter() {\n for (n, width) in column_widths.iter().enumerate() {\n let padding = width - row.cells[n].length;\n print!(\"{} \", self.columns[n].alignment().pad_string(&row.cells[n].text, padding));\n }\n\n \/\/ A stack tracks which tree characters should be printed. It's\n \/\/ necessary to maintain information about the previously-printed\n \/\/ lines, as the output will change based on whether the\n \/\/ *previous* entry was the last in its directory.\n if show_children {\n stack.resize(row.depth + 1, TreePart::Edge);\n stack[row.depth] = if row.last { TreePart::Corner } else { TreePart::Edge };\n\n for i in 1 .. row.depth + 1 {\n print!(\"{}\", GREY.paint(stack[i].ascii_art()));\n }\n\n if row.children {\n stack[row.depth] = if row.last { TreePart::Blank } else { TreePart::Line };\n }\n\n \/\/ If any tree characters have been printed, then add an extra\n \/\/ space, which makes the output look much better.\n if row.depth != 0 {\n print!(\" \");\n }\n }\n\n \/\/ Print the name without worrying about padding.\n print!(\"{}\\n\", row.name);\n\n if xattr {\n let width = row.attrs.iter().map(|a| a.name().len()).max().unwrap_or(0);\n for attr in row.attrs.iter() {\n let name = attr.name();\n println!(\"{}\\t{}\",\n Alignment::Left.pad_string(name, width - name.len()),\n attr.size()\n )\n }\n }\n }\n }\n}\n\n#[derive(PartialEq, Debug, Clone)]\nenum TreePart {\n\n \/\/\/ Rightmost column, *not* the last in the directory.\n Edge,\n\n \/\/\/ Not the rightmost column, and the directory has not finished yet.\n Line,\n\n \/\/\/ Rightmost column, and the last in the directory.\n Corner,\n\n \/\/\/ Not the rightmost column, and the directory *has* finished.\n Blank,\n}\n\nimpl TreePart {\n fn ascii_art(&self) -> &'static str {\n match *self {\n TreePart::Edge => \"├──\",\n TreePart::Line => \"│ \",\n TreePart::Corner => \"└──\",\n TreePart::Blank => \" \",\n }\n }\n}\n\npub struct UserLocale {\n pub time: locale::Time,\n pub numeric: locale::Numeric,\n}\n\nimpl UserLocale {\n pub fn new() -> UserLocale {\n UserLocale {\n time: locale::Time::load_user_locale().unwrap_or_else(|_| locale::Time::english()),\n numeric: locale::Numeric::load_user_locale().unwrap_or_else(|_| locale::Numeric::english()),\n }\n }\n\n pub fn default() -> UserLocale {\n UserLocale {\n time: locale::Time::english(),\n numeric: locale::Numeric::english(),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse core::cmp;\n\nuse common::event::{KeyEvent, MouseEvent};\n\nuse drivers::pio::*;\n\nuse schemes::KScheme;\n\n\/\/\/ PS2\npub struct Ps2 {\n \/\/\/ The data\n data: Pio8,\n \/\/\/ The command\n cmd: Pio8,\n \/\/\/ Left shift?\n lshift: bool,\n \/\/\/ Right shift?\n rshift: bool,\n \/\/\/ Caps lock?\n caps_lock: bool,\n \/\/\/ Caps lock toggle\n caps_lock_toggle: bool,\n \/\/\/ The mouse packet\n mouse_packet: [u8; 4],\n \/\/\/ Mouse packet index\n mouse_i: usize,\n \/\/\/ Mouse point x\n mouse_x: isize,\n \/\/\/ Mouse point y\n mouse_y: isize,\n \/\/\/ Layout for keyboard\n \/\/\/ Currently : 0 = EN \/ 1 = FR\n layout: usize,\n}\n\nimpl Ps2 {\n \/\/\/ Create new PS2 data\n pub fn new() -> Box<Self> {\n let mut module = box Ps2 {\n data: Pio8::new(0x60),\n cmd: Pio8::new(0x64),\n lshift: false,\n rshift: false,\n caps_lock: false,\n caps_lock_toggle: false,\n mouse_packet: [0; 4],\n mouse_i: 0,\n mouse_x: 0,\n mouse_y: 0,\n layout: 0,\n };\n\n unsafe {\n module.keyboard_init();\n module.mouse_init();\n }\n\n module\n }\n\n unsafe fn wait0(&self) {\n while (self.cmd.read() & 1) == 0 {}\n }\n\n unsafe fn wait1(&self) {\n while (self.cmd.read() & 2) == 2 {}\n }\n\n unsafe fn keyboard_init(&mut self) {\n while (self.cmd.read() & 0x1) == 1 {\n self.data.read();\n }\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let flags = (self.data.read() & 0b00110111) | 1 | 0b10000;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(flags);\n\n \/\/ Set Defaults\n self.wait1();\n self.data.write(0xF6);\n self.wait0();\n self.data.read();\n\n \/\/ Set LEDS\n self.wait1();\n self.data.write(0xED);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(0);\n self.wait0();\n self.data.read();\n\n \/\/ Set Scancode Map:\n self.wait1();\n self.data.write(0xF0);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(1);\n self.wait0();\n self.data.read();\n }\n\n \/\/\/ Keyboard interrupt\n pub fn keyboard_interrupt(&mut self) -> Option<KeyEvent> {\n let scancode = unsafe { self.data.read() };\n\n if scancode == 0 {\n return None;\n } else if scancode == 0x2A {\n self.lshift = true;\n } else if scancode == 0xAA {\n self.lshift = false;\n } else if scancode == 0x36 {\n self.rshift = true;\n } else if scancode == 0xB6 {\n self.rshift = false;\n } else if scancode == 0x3A {\n if !self.caps_lock {\n self.caps_lock = true;\n self.caps_lock_toggle = true;\n } else {\n self.caps_lock_toggle = false;\n }\n } else if scancode == 0xBA {\n if self.caps_lock && !self.caps_lock_toggle {\n self.caps_lock = false;\n }\n }\n\n let shift;\n if self.caps_lock {\n shift = !(self.lshift || self.rshift);\n } else {\n shift = self.lshift || self.rshift;\n }\n\n return Some(KeyEvent {\n character: char_for_scancode(scancode & 0x7F, shift, self.layout),\n scancode: scancode & 0x7F,\n pressed: scancode < 0x80,\n });\n }\n\n unsafe fn mouse_cmd(&mut self, byte: u8) -> u8 {\n self.wait1();\n self.cmd.write(0xD4);\n self.wait1();\n self.data.write(byte);\n\n self.wait0();\n self.data.read()\n }\n\n \/\/\/ Initialize mouse\n pub unsafe fn mouse_init(&mut self) {\n \/\/ The Init Dance\n self.wait1();\n self.cmd.write(0xA8);\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let status = self.data.read() | 2;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(status);\n\n \/\/ Set defaults\n self.mouse_cmd(0xF6);\n\n \/\/ Enable Streaming\n self.mouse_cmd(0xF4);\n }\n\n \/\/\/ Mouse interrupt\n pub fn mouse_interrupt(&mut self) -> Option<MouseEvent> {\n let byte = unsafe { self.data.read() };\n if self.mouse_i == 0 {\n if byte & 0x8 == 0x8 {\n self.mouse_packet[0] = byte;\n self.mouse_i += 1;\n }\n } else if self.mouse_i == 1 {\n self.mouse_packet[1] = byte;\n\n self.mouse_i += 1;\n } else {\n self.mouse_packet[2] = byte;\n\n let left_button = (self.mouse_packet[0] & 1) == 1;\n let right_button = (self.mouse_packet[0] & 2) == 2;\n let middle_button = (self.mouse_packet[0] & 4) == 4;\n\n let x;\n if (self.mouse_packet[0] & 0x40) != 0x40 && self.mouse_packet[1] != 0 {\n x = self.mouse_packet[1] as isize -\n (((self.mouse_packet[0] as isize) << 4) & 0x100);\n } else {\n x = 0;\n }\n\n let y;\n if (self.mouse_packet[0] & 0x80) != 0x80 && self.mouse_packet[2] != 0 {\n y = (((self.mouse_packet[0] as isize) << 3) & 0x100) -\n self.mouse_packet[2] as isize;\n } else {\n y = 0;\n }\n\n unsafe {\n self.mouse_x = cmp::max(0,\n cmp::min((*::console).display.width as isize,\n self.mouse_x + x));\n self.mouse_y = cmp::max(0,\n cmp::min((*::console).display.height as isize,\n self.mouse_y + y));\n }\n\n self.mouse_i = 0;\n\n return Some(MouseEvent {\n x: self.mouse_x,\n y: self.mouse_y,\n left_button: left_button,\n right_button: right_button,\n middle_button: middle_button,\n });\n }\n\n return None;\n }\n\n \/\/\/ Function to change the layout of the keyboard\n pub fn change_layout(&mut self, layout: usize) {\n self.layout = layout;\n }\n\n}\n\nimpl KScheme for Ps2 {\n fn on_irq(&mut self, irq: u8) {\n if irq == 0x1 || irq == 0xC {\n self.on_poll();\n }\n }\n\n fn on_poll(&mut self) {\n loop {\n let status = unsafe { self.cmd.read() };\n if status & 0x21 == 1 {\n if let Some(key_event) = self.keyboard_interrupt() {\n key_event.trigger();\n }\n } else if status & 0x21 == 0x21 {\n if let Some(mouse_event) = self.mouse_interrupt() {\n mouse_event.trigger();\n }\n } else {\n break;\n }\n }\n }\n}\n\n\/\/\/ Function to return the character associated with the scancode, and the layout\nfn char_for_scancode(scancode: u8, shift: bool, layout: usize) -> char {\n let mut character = '\\x00';\n if scancode < 58 {\n let characters: [char; 2] =\n match layout {\n 0 => SCANCODES_EN[scancode as usize],\n 1 => SCANCODES_FR[scancode as usize],\n _ => SCANCODES_EN[scancode as usize],\n };\n if shift {\n character = characters[1]\n }\n \/\/Else...\n character = characters[0]\n }\n character\n}\n\nstatic SCANCODES_EN: [[char; 2]; 58] = [['\\0', '\\0'],\n ['\\x1B', '\\x1B'],\n ['1', '!'],\n ['2', '@'],\n ['3', '#'],\n ['4', '$'],\n ['5', '%'],\n ['6', '^'],\n ['7', '&'],\n ['8', '*'],\n ['9', '('],\n ['0', ')'],\n ['-', '_'],\n ['=', '+'],\n ['\\0', '\\0'],\n ['\\t', '\\t'],\n ['q', 'Q'],\n ['w', 'W'],\n ['e', 'E'],\n ['r', 'R'],\n ['t', 'T'],\n ['y', 'Y'],\n ['u', 'U'],\n ['i', 'I'],\n ['o', 'O'],\n ['p', 'P'],\n ['[', '{'],\n [']', '}'],\n ['\\n', '\\n'],\n ['\\0', '\\0'],\n ['a', 'A'],\n ['s', 'S'],\n ['d', 'D'],\n ['f', 'F'],\n ['g', 'G'],\n ['h', 'H'],\n ['j', 'J'],\n ['k', 'K'],\n ['l', 'L'],\n [';', ':'],\n ['\\'', '\"'],\n ['`', '~'],\n ['\\0', '\\0'],\n ['\\\\', '|'],\n ['z', 'Z'],\n ['x', 'X'],\n ['c', 'C'],\n ['v', 'V'],\n ['b', 'B'],\n ['n', 'N'],\n ['m', 'M'],\n [',', '<'],\n ['.', '>'],\n ['\/', '?'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n [' ', ' ']];\n\n static SCANCODES_FR: [[char; 2]; 58] = [['\\0', '\\0'],\n ['\\x1B', '\\x1B'],\n ['1', '&'],\n ['2', 'é'],\n ['3', '\"'],\n ['4', '\\''],\n ['5', '('],\n ['6', '-'],\n ['7', 'è'],\n ['8', '_'],\n ['9', 'ç'],\n ['0', 'à'],\n ['-', ')'],\n ['=', '='],\n ['\\0', '\\0'],\n ['\\t', '\\t'],\n ['a', 'A'],\n ['z', 'Z'],\n ['e', 'E'],\n ['r', 'R'],\n ['t', 'T'],\n ['y', 'Y'],\n ['u', 'U'],\n ['i', 'I'],\n ['o', 'O'],\n ['p', 'P'],\n ['^', '¨'],\n ['$', '£'],\n ['\\n', '\\n'],\n ['\\0', '\\0'],\n ['q', 'Q'],\n ['s', 'S'],\n ['d', 'D'],\n ['f', 'F'],\n ['g', 'G'],\n ['h', 'H'],\n ['j', 'J'],\n ['k', 'K'],\n ['l', 'L'],\n ['m', 'M'],\n ['ù', '%'],\n ['*', 'µ'],\n ['\\0', '\\0'],\n ['<', '>'],\n ['w', 'W'],\n ['x', 'X'],\n ['c', 'C'],\n ['v', 'V'],\n ['b', 'B'],\n ['n', 'N'],\n [',', '?'],\n [';', '.'],\n [':', '\/'],\n ['!', '§'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n [' ', ' ']];\n<commit_msg>layout module added as field of ps2<commit_after>use alloc::boxed::Box;\n\nuse core::cmp;\n\nuse common::event::{KeyEvent, MouseEvent};\n\nuse drivers::pio::*;\n\nuse schemes::KScheme;\n\nuse drivers::kb_layouts::layouts;\n\n\/\/\/ PS2\npub struct Ps2 {\n \/\/\/ The data\n data: Pio8,\n \/\/\/ The command\n cmd: Pio8,\n \/\/\/ Left shift?\n lshift: bool,\n \/\/\/ Right shift?\n rshift: bool,\n \/\/\/ Caps lock?\n caps_lock: bool,\n \/\/\/ Caps lock toggle\n caps_lock_toggle: bool,\n \/\/\/ The mouse packet\n mouse_packet: [u8; 4],\n \/\/\/ Mouse packet index\n mouse_i: usize,\n \/\/\/ Mouse point x\n mouse_x: isize,\n \/\/\/ Mouse point y\n mouse_y: isize,\n \/\/\/ Layout for keyboard\n \/\/\/ Currently : 0 = EN \/ 1 = FR\n layout: usize,\n}\n\nimpl Ps2 {\n \/\/\/ Create new PS2 data\n pub fn new() -> Box<Self> {\n let mut module = box Ps2 {\n data: Pio8::new(0x60),\n cmd: Pio8::new(0x64),\n lshift: false,\n rshift: false,\n caps_lock: false,\n caps_lock_toggle: false,\n mouse_packet: [0; 4],\n mouse_i: 0,\n mouse_x: 0,\n mouse_y: 0,\n layout: 0,\n };\n\n unsafe {\n module.keyboard_init();\n module.mouse_init();\n }\n\n module\n }\n\n unsafe fn wait0(&self) {\n while (self.cmd.read() & 1) == 0 {}\n }\n\n unsafe fn wait1(&self) {\n while (self.cmd.read() & 2) == 2 {}\n }\n\n unsafe fn keyboard_init(&mut self) {\n while (self.cmd.read() & 0x1) == 1 {\n self.data.read();\n }\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let flags = (self.data.read() & 0b00110111) | 1 | 0b10000;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(flags);\n\n \/\/ Set Defaults\n self.wait1();\n self.data.write(0xF6);\n self.wait0();\n self.data.read();\n\n \/\/ Set LEDS\n self.wait1();\n self.data.write(0xED);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(0);\n self.wait0();\n self.data.read();\n\n \/\/ Set Scancode Map:\n self.wait1();\n self.data.write(0xF0);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(1);\n self.wait0();\n self.data.read();\n }\n\n \/\/\/ Keyboard interrupt\n pub fn keyboard_interrupt(&mut self) -> Option<KeyEvent> {\n let scancode = unsafe { self.data.read() };\n\n if scancode == 0 {\n return None;\n } else if scancode == 0x2A {\n self.lshift = true;\n } else if scancode == 0xAA {\n self.lshift = false;\n } else if scancode == 0x36 {\n self.rshift = true;\n } else if scancode == 0xB6 {\n self.rshift = false;\n } else if scancode == 0x3A {\n if !self.caps_lock {\n self.caps_lock = true;\n self.caps_lock_toggle = true;\n } else {\n self.caps_lock_toggle = false;\n }\n } else if scancode == 0xBA {\n if self.caps_lock && !self.caps_lock_toggle {\n self.caps_lock = false;\n }\n }\n\n let shift;\n if self.caps_lock {\n shift = !(self.lshift || self.rshift);\n } else {\n shift = self.lshift || self.rshift;\n }\n\n return Some(KeyEvent {\n character: char_for_scancode(scancode & 0x7F, shift, self.layout),\n scancode: scancode & 0x7F,\n pressed: scancode < 0x80,\n });\n }\n\n unsafe fn mouse_cmd(&mut self, byte: u8) -> u8 {\n self.wait1();\n self.cmd.write(0xD4);\n self.wait1();\n self.data.write(byte);\n\n self.wait0();\n self.data.read()\n }\n\n \/\/\/ Initialize mouse\n pub unsafe fn mouse_init(&mut self) {\n \/\/ The Init Dance\n self.wait1();\n self.cmd.write(0xA8);\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let status = self.data.read() | 2;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(status);\n\n \/\/ Set defaults\n self.mouse_cmd(0xF6);\n\n \/\/ Enable Streaming\n self.mouse_cmd(0xF4);\n }\n\n \/\/\/ Mouse interrupt\n pub fn mouse_interrupt(&mut self) -> Option<MouseEvent> {\n let byte = unsafe { self.data.read() };\n if self.mouse_i == 0 {\n if byte & 0x8 == 0x8 {\n self.mouse_packet[0] = byte;\n self.mouse_i += 1;\n }\n } else if self.mouse_i == 1 {\n self.mouse_packet[1] = byte;\n\n self.mouse_i += 1;\n } else {\n self.mouse_packet[2] = byte;\n\n let left_button = (self.mouse_packet[0] & 1) == 1;\n let right_button = (self.mouse_packet[0] & 2) == 2;\n let middle_button = (self.mouse_packet[0] & 4) == 4;\n\n let x;\n if (self.mouse_packet[0] & 0x40) != 0x40 && self.mouse_packet[1] != 0 {\n x = self.mouse_packet[1] as isize -\n (((self.mouse_packet[0] as isize) << 4) & 0x100);\n } else {\n x = 0;\n }\n\n let y;\n if (self.mouse_packet[0] & 0x80) != 0x80 && self.mouse_packet[2] != 0 {\n y = (((self.mouse_packet[0] as isize) << 3) & 0x100) -\n self.mouse_packet[2] as isize;\n } else {\n y = 0;\n }\n\n unsafe {\n self.mouse_x = cmp::max(0,\n cmp::min((*::console).display.width as isize,\n self.mouse_x + x));\n self.mouse_y = cmp::max(0,\n cmp::min((*::console).display.height as isize,\n self.mouse_y + y));\n }\n\n self.mouse_i = 0;\n\n return Some(MouseEvent {\n x: self.mouse_x,\n y: self.mouse_y,\n left_button: left_button,\n right_button: right_button,\n middle_button: middle_button,\n });\n }\n\n return None;\n }\n\n \/\/\/ Function to change the layout of the keyboard\n pub fn change_layout(&mut self, layout: usize) {\n self.layout = layout;\n }\n\n}\n\nimpl KScheme for Ps2 {\n fn on_irq(&mut self, irq: u8) {\n if irq == 0x1 || irq == 0xC {\n self.on_poll();\n }\n }\n\n fn on_poll(&mut self) {\n loop {\n let status = unsafe { self.cmd.read() };\n if status & 0x21 == 1 {\n if let Some(key_event) = self.keyboard_interrupt() {\n key_event.trigger();\n }\n } else if status & 0x21 == 0x21 {\n if let Some(mouse_event) = self.mouse_interrupt() {\n mouse_event.trigger();\n }\n } else {\n break;\n }\n }\n }\n}\n\n\/\/\/ Function to return the character associated with the scancode, and the layout\nfn char_for_scancode(scancode: u8, shift: bool, layout: usize) -> char {\n let mut character = '\\x00';\n if scancode < 58 {\n let characters: [char; 2] =\n match layout {\n 0 => SCANCODES_EN[scancode as usize],\n 1 => SCANCODES_FR[scancode as usize],\n _ => SCANCODES_EN[scancode as usize],\n };\n if shift {\n character = characters[1]\n }\n \/\/Else...\n character = characters[0]\n }\n character\n}\n\nstatic SCANCODES_EN: [[char; 2]; 58] = [['\\0', '\\0'],\n ['\\x1B', '\\x1B'],\n ['1', '!'],\n ['2', '@'],\n ['3', '#'],\n ['4', '$'],\n ['5', '%'],\n ['6', '^'],\n ['7', '&'],\n ['8', '*'],\n ['9', '('],\n ['0', ')'],\n ['-', '_'],\n ['=', '+'],\n ['\\0', '\\0'],\n ['\\t', '\\t'],\n ['q', 'Q'],\n ['w', 'W'],\n ['e', 'E'],\n ['r', 'R'],\n ['t', 'T'],\n ['y', 'Y'],\n ['u', 'U'],\n ['i', 'I'],\n ['o', 'O'],\n ['p', 'P'],\n ['[', '{'],\n [']', '}'],\n ['\\n', '\\n'],\n ['\\0', '\\0'],\n ['a', 'A'],\n ['s', 'S'],\n ['d', 'D'],\n ['f', 'F'],\n ['g', 'G'],\n ['h', 'H'],\n ['j', 'J'],\n ['k', 'K'],\n ['l', 'L'],\n [';', ':'],\n ['\\'', '\"'],\n ['`', '~'],\n ['\\0', '\\0'],\n ['\\\\', '|'],\n ['z', 'Z'],\n ['x', 'X'],\n ['c', 'C'],\n ['v', 'V'],\n ['b', 'B'],\n ['n', 'N'],\n ['m', 'M'],\n [',', '<'],\n ['.', '>'],\n ['\/', '?'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n [' ', ' ']];\n\n static SCANCODES_FR: [[char; 2]; 58] = [['\\0', '\\0'],\n ['\\x1B', '\\x1B'],\n ['1', '&'],\n ['2', 'é'],\n ['3', '\"'],\n ['4', '\\''],\n ['5', '('],\n ['6', '-'],\n ['7', 'è'],\n ['8', '_'],\n ['9', 'ç'],\n ['0', 'à'],\n ['-', ')'],\n ['=', '='],\n ['\\0', '\\0'],\n ['\\t', '\\t'],\n ['a', 'A'],\n ['z', 'Z'],\n ['e', 'E'],\n ['r', 'R'],\n ['t', 'T'],\n ['y', 'Y'],\n ['u', 'U'],\n ['i', 'I'],\n ['o', 'O'],\n ['p', 'P'],\n ['^', '¨'],\n ['$', '£'],\n ['\\n', '\\n'],\n ['\\0', '\\0'],\n ['q', 'Q'],\n ['s', 'S'],\n ['d', 'D'],\n ['f', 'F'],\n ['g', 'G'],\n ['h', 'H'],\n ['j', 'J'],\n ['k', 'K'],\n ['l', 'L'],\n ['m', 'M'],\n ['ù', '%'],\n ['*', 'µ'],\n ['\\0', '\\0'],\n ['<', '>'],\n ['w', 'W'],\n ['x', 'X'],\n ['c', 'C'],\n ['v', 'V'],\n ['b', 'B'],\n ['n', 'N'],\n [',', '?'],\n [';', '.'],\n [':', '\/'],\n ['!', '§'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n ['\\0', '\\0'],\n [' ', ' ']];\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Do String borrow better<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>even better with the code<commit_after>\/\/\/ MPI-based method for summing many arrays\n\n\/\/ Argument parsing\n#[macro_use] extern crate clap;\n\/\/ logging\n#[macro_use] extern crate log;\nextern crate ndarray;\nextern crate env_logger;\n\/\/ lastly, this library\nextern crate cabarrus;\nuse cabarrus::errors::*;\nuse std::cmp::min;\nuse ndarray as nd;\nuse ndarray::prelude::*;\n\npub fn main() {\n \/\/ Main can't return a Result, and the ? operator needs the enclosing function to return Result\n inner_main().expect(\"Could not recover. Exiting.\");\n}\npub fn inner_main() -> Result<()> {\n env_logger::init().unwrap();\n let args = app_from_crate!()\n .arg_from_usage(\"<size> 'how many workers there are' \")\n .arg_from_usage(\"<rank> 'which worker am I, starting from 0' \")\n .arg_from_usage(\"<addends>.. 'files contains matrices to add'\")\n .arg_from_usage(\"<output> 'file in which to store the resulting matrix'\")\n .get_matches();\n \n let mats = args.values_of(\"addends\").unwrap();\n let size = value_t!(args, \"size\", usize).unwrap_or_else(|e| e.exit());\n let rank = value_t!(args, \"rank\", usize).unwrap_or_else(|e| e.exit());\n let chunksize = (mats.len() + size - 1) \/ size;\n \n let mut accum = None;\n for matname in mats.skip(rank*chunksize).take(chunksize) {\n let ref matfile = cabarrus::numpy::open_matrix_mmap(matname)?;\n let ref mat = cabarrus::numpy::read_matrix_mmap(matfile)?;\n accum = Some(match accum {\n Some(mut acc) => {acc += mat; acc}\n None => mat * 1.0\n });\n }\n if let Some(ref acc) = accum {\n cabarrus::numpy::write_matrix(args.value_of(\"output\").unwrap(), acc)?;\n }\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix for broken pipe panics<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>style: Remove SpecialColorKeyword enum and merge into SystemColor.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added chap6 exercise<commit_after>#[derive(Clone)]\n#[derive(Debug)]\nstruct MagicNumber {\n\tnum:f64,\n}\n\n#[derive(Debug)]\nstruct Alien {\n\tname:String,\n\ttentacles:u32,\n}\n\n\/\/ exercise chap6\nimpl Alien {\n\tfn grow_tentacles(&mut self) {\n\t\tself.tentacles+=2;\n\t}\n}\n\n#[derive(Debug)]\nstruct Magician {\n name:&'static str,\n power:u32,\n}\n\n#[allow(unused_variables)]\nfn main() {\n\n\tlet n = 42;\n\tmatch n {\n\t\tref n => println!(\"Got a reference {:p}\",n),\n\t}\n\n\tlet mut m = 89;\n\tmatch m {\n\t\tref mut mr => {println!(\"\\nGot a mutable reference {:?}\",mr);*mr= 231;},\n\t}\n\tprintln!(\"\\nm is now changed to {:?}\",m);\n\t\n\t\/\/getting reference via destructuring using ref keyword on LHS\n\tlet mag = Magician {name:\"Turok\",power:298};\n\tlet name_of_magician = {let Magician {name:ref ref_to_name,power:_}=mag;*ref_to_name};\n\tprintln!(\"{:?}\",name_of_magician );\n\n\t\/\/ Ownership in action\n\t\/\/ al has the ownership of the Alien struct instance\n\tlet al = Alien {name:\"Predator\".to_string(),tentacles:4};\n\t\/\/ ownership transferred to big_al\n\tlet mut big_al = al;\n\n\t\/\/ #[1]error: al2 cannot get a immutable reference to al because the resource has been moved\n\t\/\/ where al currently was pointing to, the new owner is big_al\n\t\/\/let al2 = &al;\n\t\/\/ same error as #[1]\n\t\/\/al.name = \"Omnivore\".to_string();\n\tbig_al.name = \"Krieger\".to_string();\n\n\t\/\/ exercise part of chapter 6\n\tprintln!(\"{:?}\",big_al);\n\tbig_al.grow_tentacles();\n\tprintln!(\"The tentacles upgraded to {:?}\",big_al.tentacles);\n\n}<|endoftext|>"} {"text":"<commit_before>use std::fs::File;\nuse std::path::Path;\nuse std::io::prelude::*;\nuse std::vec::Vec;\nuse std::io;\nuse std::env;\nuse std::collections::HashMap;\n\nstruct Tape {\n pos: usize,\n tape: Vec<isize>\n}\n\nimpl Tape {\n fn new() -> Tape { Tape { pos: 0, tape: vec![0] } }\n fn get(&self) -> isize { self.tape[self.pos] }\n fn getc(&self) -> char { self.get() as u8 as char }\n fn inc(&mut self) { self.tape[self.pos] += 1; }\n fn dec(&mut self) { self.tape[self.pos] -= 1; }\n fn advance(&mut self) { self.pos += 1; if self.tape.len() <= self.pos { self.tape.push(0) } }\n fn devance(&mut self) { if self.pos > 0 { self.pos -= 1; } }\n}\n\nstruct Program {\n code: Vec<char>,\n bracket_map: HashMap<usize, usize>\n}\n\nimpl Program {\n fn new(content: String) -> Program {\n let mut code: Vec<char> = Vec::new();\n let mut bracket_map = HashMap::new();\n let mut leftstack = Vec::new();\n let mut pc = 0;\n\n for c in content.chars() {\n match c {\n '+' | '-' | '.' | ',' | '<' | '>' => (),\n '[' => { leftstack.push(pc); },\n ']' => match leftstack.pop() {\n Some(left) => { bracket_map.insert(left, pc); bracket_map.insert(pc, left); }\n None => ()\n },\n _ => { continue; }\n }\n code.push(c);\n pc += 1;\n }\n Program{ code: code, bracket_map: bracket_map }\n }\n\n fn run(&self) {\n let mut pc: usize = 0;\n let len = self.code.len();\n let mut tape = Tape::new();\n\n while pc < len {\n match self.code[pc] {\n '+' => tape.inc(),\n '-' => tape.dec(),\n '>' => tape.advance(),\n '<' => tape.devance(),\n '[' => { if tape.get() == 0 { pc = self.bracket_map[&pc]; } },\n ']' => { if tape.get() != 0 { pc = self.bracket_map[&pc]; } },\n '.' => { print!(\"{}\", tape.getc()); io::stdout().flush().unwrap() },\n _ => ()\n }\n pc += 1;\n }\n }\n}\n\nfn main() {\n let arg1 = env::args().nth(1).unwrap();\n let path = Path::new(&arg1);\n let mut s = String::new();\n let mut file = File::open(&path).unwrap();\n file.read_to_string(&mut s).unwrap();\n Program::new(s).run()\n}\n<commit_msg>Use BTreeMap instead of HashMap in brainfuck.rs<commit_after>use std::fs::File;\nuse std::path::Path;\nuse std::io::prelude::*;\nuse std::vec::Vec;\nuse std::io;\nuse std::env;\nuse std::collections::BTreeMap;\n\nstruct Tape {\n pos: usize,\n tape: Vec<isize>\n}\n\nimpl Tape {\n fn new() -> Tape { Tape { pos: 0, tape: vec![0] } }\n fn get(&self) -> isize { self.tape[self.pos] }\n fn getc(&self) -> char { self.get() as u8 as char }\n fn inc(&mut self) { self.tape[self.pos] += 1; }\n fn dec(&mut self) { self.tape[self.pos] -= 1; }\n fn advance(&mut self) { self.pos += 1; if self.tape.len() <= self.pos { self.tape.push(0) } }\n fn devance(&mut self) { if self.pos > 0 { self.pos -= 1; } }\n}\n\nstruct Program {\n code: Vec<char>,\n bracket_map: BTreeMap<usize, usize>\n}\n\nimpl Program {\n fn new(content: String) -> Program {\n let mut code: Vec<char> = Vec::new();\n let mut bracket_map = BTreeMap::new();\n let mut leftstack = Vec::new();\n let mut pc = 0;\n\n for c in content.chars() {\n match c {\n '+' | '-' | '.' | ',' | '<' | '>' => (),\n '[' => { leftstack.push(pc); },\n ']' => match leftstack.pop() {\n Some(left) => { bracket_map.insert(left, pc); bracket_map.insert(pc, left); }\n None => ()\n },\n _ => { continue; }\n }\n code.push(c);\n pc += 1;\n }\n Program{ code: code, bracket_map: bracket_map }\n }\n\n fn run(&self) {\n let mut pc: usize = 0;\n let len = self.code.len();\n let mut tape = Tape::new();\n\n while pc < len {\n match self.code[pc] {\n '+' => tape.inc(),\n '-' => tape.dec(),\n '>' => tape.advance(),\n '<' => tape.devance(),\n '[' => { if tape.get() == 0 { pc = self.bracket_map[&pc]; } },\n ']' => { if tape.get() != 0 { pc = self.bracket_map[&pc]; } },\n '.' => { print!(\"{}\", tape.getc()); io::stdout().flush().unwrap() },\n _ => ()\n }\n pc += 1;\n }\n }\n}\n\nfn main() {\n let arg1 = env::args().nth(1).unwrap();\n let path = Path::new(&arg1);\n let mut s = String::new();\n let mut file = File::open(&path).unwrap();\n file.read_to_string(&mut s).unwrap();\n Program::new(s).run()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Moved NumericLiteral structure into separate file.<commit_after>use std::fmt::{self, Debug, Display};\r\nuse std::ops::{Add, Mul};\r\n\r\n#[derive(Debug, PartialEq)]\r\npub enum NumericLiteral {\r\n Integer(i32),\r\n Float(f64)\r\n}\r\n\r\nimpl fmt::Display for NumericLiteral {\r\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\r\n match *self {\r\n NumericLiteral::Integer(i) => write!(f, \"{}\", i),\r\n NumericLiteral::Float(i) => write!(f, \"{}\", i),\r\n }\r\n }\r\n}\r\n\r\nimpl Add for NumericLiteral {\r\n type Output = NumericLiteral;\r\n\r\n fn add(self, other: NumericLiteral) -> NumericLiteral {\r\n match self {\r\n NumericLiteral::Integer(i) => {\r\n match other {\r\n NumericLiteral::Integer(j) => NumericLiteral::Integer(i + j),\r\n NumericLiteral::Float(j) => NumericLiteral::Float(i as f64 + j),\r\n }\r\n },\r\n NumericLiteral::Float(i) => {\r\n match other {\r\n NumericLiteral::Integer(j) => NumericLiteral::Float(i + j as f64),\r\n NumericLiteral::Float(j) => NumericLiteral::Float(i + j),\r\n }\r\n }\r\n }\r\n }\r\n}\r\n\r\nimpl Mul for NumericLiteral {\r\n type Output = NumericLiteral;\r\n\r\n fn mul(self, other: NumericLiteral) -> NumericLiteral {\r\n match self {\r\n NumericLiteral::Integer(i) => {\r\n match other {\r\n NumericLiteral::Integer(j) => NumericLiteral::Integer(i * j),\r\n NumericLiteral::Float(j) => NumericLiteral::Float(i as f64 * j),\r\n }\r\n },\r\n NumericLiteral::Float(i) => {\r\n match other {\r\n NumericLiteral::Integer(j) => NumericLiteral::Float(i * j as f64),\r\n NumericLiteral::Float(j) => NumericLiteral::Float(i * j),\r\n }\r\n }\r\n }\r\n }\r\n}\r\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for Asset struct with interpolated path<commit_after>#[macro_use]\nextern crate rust_embed;\n\n\/\/\/ Test doc comment\n#[derive(RustEmbed)]\n#[folder = \"$CARGO_MANFEST_ROOT\/examples\/public\/\"]\nstruct Asset;\n\n#[test]\nfn get_works() {\n match Asset::get(\"index.html\") {\n None => assert!(false, \"index.html should exist\"),\n _ => assert!(true),\n }\n match Asset::get(\"gg.html\") {\n Some(_) => assert!(false, \"gg.html should not exist\"),\n _ => assert!(true),\n }\n match Asset::get(\"images\/llama.png\") {\n None => assert!(false, \"llama.png should exist\"),\n _ => assert!(true),\n }\n}\n\n#[test]\nfn iter_works() {\n let mut num_files = 0;\n for file in Asset::iter() {\n assert!(Asset::get(file.as_ref()).is_some());\n num_files += 1;\n }\n assert_eq!(num_files, 6);\n}\n\n#[test]\nfn trait_works_generic() {\n trait_works_generic_helper::<Asset>();\n}\nfn trait_works_generic_helper<E: rust_embed::RustEmbed>() {\n let mut num_files = 0;\n for file in E::iter() {\n assert!(E::get(file.as_ref()).is_some());\n num_files += 1;\n }\n assert_eq!(num_files, 6);\n assert!(E::get(\"gg.html\").is_none(), \"gg.html should not exist\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #288 - christophebiocca:patch-1, r=Manishearth<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>macro repeat<commit_after>macro_rules! find_min {\n ($x:expr) => {$x};\n ($x:expr, $($y:expr),+ ) => {\n std::cmp::min($x, find_min!($($y),+))\n }\n}\n\nfn main() {\n println!(\"Min of 1: {}\", find_min!(1u32));\n println!(\"Min of 1, 25: {}\", find_min!(1u32, 25u32));\n println!(\"Min of 1, 3,5,0,2,4: {}\", find_min!(1, 3,5,0,2,4));\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement plugin read and write attribute commands.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for block expressions of generic types. XFAILed<commit_after>\/\/ xfail-boot\n\/\/ xfail-stage0\n\/\/ -*- rust -*-\n\n\/\/ Tests for standalone blocks as expressions with dynamic type sizes\n\ntype compare[T] = fn(&T t1, &T t2) -> bool;\n\nfn test_generic[T](&T expected, &compare[T] eq) {\n let T actual = { expected };\n check (eq(expected, actual));\n}\n\nfn test_bool() {\n fn compare_bool(&bool b1, &bool b2) -> bool {\n ret b1 == b2;\n }\n auto eq = bind compare_bool(_, _);\n test_generic[bool](true, eq);\n}\n\n\nfn test_tup() {\n type t = tup(int, int);\n fn compare_tup(&t t1, &t t2) -> bool {\n ret t1 == t2;\n }\n auto eq = bind compare_tup(_, _);\n test_generic[t](tup(1, 2), eq);\n}\n\nfn test_vec() {\n fn compare_vec(&vec[int] v1, &vec[int] v2) -> bool {\n ret v1 == v2;\n }\n auto eq = bind compare_vec(_, _);\n test_generic[vec[int]](vec(1, 2), eq);\n}\n\nfn test_box() {\n fn compare_box(&@bool b1, &@bool b2) -> bool {\n ret *b1 == *b2;\n }\n auto eq = bind compare_box(_, _);\n test_generic[@bool](@true, eq);\n}\n\nfn main() {\n test_bool();\n test_tup();\n \/\/ FIXME: These two don't pass yet\n test_vec();\n test_box();\n}\n\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.\n\/\/! It has to be run really early, before transformations like inlining, because\n\/\/! introducing these calls *adds* UB -- so, conceptually, this pass is actually part\n\/\/! of MIR building, and only after this pass we think of the program has having the\n\/\/! normal MIR semantics.\n\nuse rustc::ty::{TyCtxt, RegionKind};\nuse rustc::mir::*;\nuse rustc::mir::transform::{MirPass, MirSource};\n\npub struct AddValidation;\n\nimpl MirPass for AddValidation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _: MirSource,\n mir: &mut Mir<'tcx>) {\n \/\/ PART 1\n \/\/ Add an AcquireValid at the beginning of the start block.\n if mir.arg_count > 0 {\n let acquire_stmt = Statement {\n source_info: SourceInfo {\n scope: ARGUMENT_VISIBILITY_SCOPE,\n span: mir.span, \/\/ TODO: Consider using just the span covering the function argument declaration\n },\n kind: StatementKind::Validate(ValidationOp::Acquire,\n \/\/ Skip return value, go over all the arguments\n mir.local_decls.iter_enumerated().skip(1).take(mir.arg_count)\n .map(|(local, local_decl)| (local_decl.ty, Lvalue::Local(local))).collect()\n )\n };\n mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 2\n \/\/ Add ReleaseValid\/AcquireValid around function call terminators. We don't use a visitor because\n \/\/ we need to access the block that a Call jumps to.\n let mut returns : Vec<(SourceInfo, Lvalue<'tcx>, BasicBlock)> = Vec::new(); \/\/ Here we collect the destinations.\n let local_decls = mir.local_decls.clone(); \/\/ TODO: Find a way to get rid of this clone.\n for block_data in mir.basic_blocks_mut() {\n match block_data.terminator {\n Some(Terminator { kind: TerminatorKind::Call { ref args, ref destination, .. }, source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n args.iter().filter_map(|op| {\n match op {\n &Operand::Consume(ref lval) => {\n let ty = lval.ty(&local_decls, tcx).to_ty(tcx);\n Some((ty, lval.clone()))\n },\n &Operand::Constant(..) => { None },\n }\n }).collect())\n };\n block_data.statements.push(release_stmt);\n \/\/ Remember the return destination for later\n if let &Some(ref destination) = destination {\n returns.push((source_info, destination.0.clone(), destination.1));\n }\n }\n Some(Terminator { kind: TerminatorKind::Drop { location: ref lval, .. }, source_info }) |\n Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref lval, .. }, source_info }) => {\n \/\/ Before the call: Release all arguments\n let ty = lval.ty(&local_decls, tcx).to_ty(tcx);\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release, vec![(ty, lval.clone())])\n };\n block_data.statements.push(release_stmt);\n \/\/ drop doesn't return anything, so we need no acquire.\n }\n _ => {\n \/\/ Not a block ending in a Call -> ignore.\n }\n }\n }\n \/\/ Now we go over the returns we collected to acquire the return values.\n for (source_info, dest_lval, dest_block) in returns {\n let ty = dest_lval.ty(&local_decls, tcx).to_ty(tcx);\n let acquire_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire, vec![(ty, dest_lval)])\n };\n mir.basic_blocks_mut()[dest_block].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 3\n \/\/ Add ReleaseValid\/AcquireValid around Ref. Again an iterator does not seem very suited as\n \/\/ we need to add new statements before and after each Ref.\n for block_data in mir.basic_blocks_mut() {\n \/\/ We want to insert statements around Ref commands as we iterate. To this end, we iterate backwards\n \/\/ using indices.\n for i in (0..block_data.statements.len()).rev() {\n let (dest_lval, re, src_lval) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_lval, Rvalue::Ref(re, _, ref src_lval)) => {\n (dest_lval.clone(), re, src_lval.clone())\n },\n _ => continue,\n };\n \/\/ So this is a ref, and we got all the data we wanted.\n let dest_ty = dest_lval.ty(&local_decls, tcx).to_ty(tcx);\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire, vec![(dest_ty, dest_lval)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ The source is released until the region of the borrow ends.\n let src_ty = src_lval.ty(&local_decls, tcx).to_ty(tcx);\n let op = match re {\n &RegionKind::ReScope(ce) => ValidationOp::Suspend(ce),\n &RegionKind::ReErased => bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => ValidationOp::Release,\n };\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(op, vec![(src_ty, src_lval)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n }\n }\n}\n<commit_msg>only emit Suspend validation for mutable paths<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.\n\/\/! It has to be run really early, before transformations like inlining, because\n\/\/! introducing these calls *adds* UB -- so, conceptually, this pass is actually part\n\/\/! of MIR building, and only after this pass we think of the program has having the\n\/\/! normal MIR semantics.\n\nuse rustc::ty::{TyCtxt, RegionKind};\nuse rustc::mir::*;\nuse rustc::mir::transform::{MirPass, MirSource};\n\npub struct AddValidation;\n\n\nfn is_lvalue_shared<'a, 'tcx, D>(lval: &Lvalue<'tcx>, local_decls: &D, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool\n where D: HasLocalDecls<'tcx>\n{\n use rustc::mir::Lvalue::*;\n\n match *lval {\n Local { .. } => false,\n Static(_) => true,\n Projection(ref proj) => {\n \/\/ If the base is shared, things stay shared\n if is_lvalue_shared(&proj.base, local_decls, tcx) {\n return true;\n }\n \/\/ A Deref projection may make things shared\n match proj.elem {\n ProjectionElem::Deref => {\n \/\/ Computing the inside the recursion makes this quadratic. We don't expect deep paths though.\n let ty = proj.base.ty(local_decls, tcx).to_ty(tcx);\n !ty.is_mutable_pointer()\n }\n _ => false,\n }\n }\n }\n}\n\nimpl MirPass for AddValidation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _: MirSource,\n mir: &mut Mir<'tcx>) {\n \/\/ PART 1\n \/\/ Add an AcquireValid at the beginning of the start block.\n if mir.arg_count > 0 {\n let acquire_stmt = Statement {\n source_info: SourceInfo {\n scope: ARGUMENT_VISIBILITY_SCOPE,\n span: mir.span, \/\/ TODO: Consider using just the span covering the function argument declaration\n },\n kind: StatementKind::Validate(ValidationOp::Acquire,\n \/\/ Skip return value, go over all the arguments\n mir.local_decls.iter_enumerated().skip(1).take(mir.arg_count)\n .map(|(local, local_decl)| (local_decl.ty, Lvalue::Local(local))).collect()\n )\n };\n mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 2\n \/\/ Add ReleaseValid\/AcquireValid around function call terminators. We don't use a visitor because\n \/\/ we need to access the block that a Call jumps to.\n let mut returns : Vec<(SourceInfo, Lvalue<'tcx>, BasicBlock)> = Vec::new(); \/\/ Here we collect the destinations.\n let local_decls = mir.local_decls.clone(); \/\/ TODO: Find a way to get rid of this clone.\n for block_data in mir.basic_blocks_mut() {\n match block_data.terminator {\n Some(Terminator { kind: TerminatorKind::Call { ref args, ref destination, .. }, source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n args.iter().filter_map(|op| {\n match op {\n &Operand::Consume(ref lval) => {\n let ty = lval.ty(&local_decls, tcx).to_ty(tcx);\n Some((ty, lval.clone()))\n },\n &Operand::Constant(..) => { None },\n }\n }).collect())\n };\n block_data.statements.push(release_stmt);\n \/\/ Remember the return destination for later\n if let &Some(ref destination) = destination {\n returns.push((source_info, destination.0.clone(), destination.1));\n }\n }\n Some(Terminator { kind: TerminatorKind::Drop { location: ref lval, .. }, source_info }) |\n Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref lval, .. }, source_info }) => {\n \/\/ Before the call: Release all arguments\n let ty = lval.ty(&local_decls, tcx).to_ty(tcx);\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release, vec![(ty, lval.clone())])\n };\n block_data.statements.push(release_stmt);\n \/\/ drop doesn't return anything, so we need no acquire.\n }\n _ => {\n \/\/ Not a block ending in a Call -> ignore.\n }\n }\n }\n \/\/ Now we go over the returns we collected to acquire the return values.\n for (source_info, dest_lval, dest_block) in returns {\n let ty = dest_lval.ty(&local_decls, tcx).to_ty(tcx);\n let acquire_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire, vec![(ty, dest_lval)])\n };\n mir.basic_blocks_mut()[dest_block].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 3\n \/\/ Add ReleaseValid\/AcquireValid around Ref. Again an iterator does not seem very suited as\n \/\/ we need to add new statements before and after each Ref.\n for block_data in mir.basic_blocks_mut() {\n \/\/ We want to insert statements around Ref commands as we iterate. To this end, we iterate backwards\n \/\/ using indices.\n for i in (0..block_data.statements.len()).rev() {\n let (dest_lval, re, src_lval) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_lval, Rvalue::Ref(re, _, ref src_lval)) => {\n (dest_lval.clone(), re, src_lval.clone())\n },\n _ => continue,\n };\n \/\/ So this is a ref, and we got all the data we wanted.\n let dest_ty = dest_lval.ty(&local_decls, tcx).to_ty(tcx);\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire, vec![(dest_ty, dest_lval)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ The source is released until the region of the borrow ends -- but not if it is shared.\n if !is_lvalue_shared(&src_lval, &local_decls, tcx) {\n let src_ty = src_lval.ty(&local_decls, tcx).to_ty(tcx);\n let op = match re {\n &RegionKind::ReScope(ce) => ValidationOp::Suspend(ce),\n &RegionKind::ReErased => bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => ValidationOp::Release,\n };\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(op, vec![(src_ty, src_lval)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse deriving::generic::*;\nuse deriving::generic::ty::*;\n\nuse syntax::ast::{MetaItem, Expr, BinOpKind};\nuse syntax::codemap::Span;\nuse syntax::ext::base::{ExtCtxt, Annotatable};\nuse syntax::ext::build::AstBuilder;\nuse syntax::parse::token::InternedString;\nuse syntax::ptr::P;\n\npub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Annotatable,\n push: &mut FnMut(Annotatable))\n{\n \/\/ structures are equal if all fields are equal, and non equal, if\n \/\/ any fields are not equal or if the enum variants are different\n fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {\n cs_fold(\n true, \/\/ use foldl\n |cx, span, subexpr, self_f, other_fs| {\n let other_f = match (other_fs.len(), other_fs.get(0)) {\n (1, Some(o_f)) => o_f,\n _ => cx.span_bug(span, \"not exactly 2 arguments in `derive(PartialEq)`\")\n };\n\n let eq = cx.expr_binary(span, BinOpKind::Eq, self_f, other_f.clone());\n\n cx.expr_binary(span, BinOpKind::And, subexpr, eq)\n },\n cx.expr_bool(span, true),\n Box::new(|cx, span, _, _| cx.expr_bool(span, false)),\n cx, span, substr)\n }\n fn cs_ne(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {\n cs_fold(\n true, \/\/ use foldl\n |cx, span, subexpr, self_f, other_fs| {\n let other_f = match (other_fs.len(), other_fs.get(0)) {\n (1, Some(o_f)) => o_f,\n _ => cx.span_bug(span, \"not exactly 2 arguments in `derive(PartialEq)`\")\n };\n\n let eq = cx.expr_binary(span, BinOpKind::Ne, self_f, other_f.clone());\n\n cx.expr_binary(span, BinOpKind::Or, subexpr, eq)\n },\n cx.expr_bool(span, false),\n Box::new(|cx, span, _, _| cx.expr_bool(span, true)),\n cx, span, substr)\n }\n\n macro_rules! md {\n ($name:expr, $f:ident) => { {\n let inline = cx.meta_word(span, InternedString::new(\"inline\"));\n let attrs = vec!(cx.attribute(span, inline));\n MethodDef {\n name: $name,\n generics: LifetimeBounds::empty(),\n explicit_self: borrowed_explicit_self(),\n args: vec!(borrowed_self()),\n ret_ty: Literal(path_local!(bool)),\n attributes: attrs,\n is_unsafe: false,\n combine_substructure: combine_substructure(Box::new(|a, b, c| {\n $f(a, b, c)\n }))\n }\n } }\n }\n\n let trait_def = TraitDef {\n span: span,\n attributes: Vec::new(),\n path: path_std!(cx, core::cmp::PartialEq),\n additional_bounds: Vec::new(),\n generics: LifetimeBounds::empty(),\n is_unsafe: false,\n methods: vec!(\n md!(\"eq\", cs_eq),\n md!(\"ne\", cs_ne)\n ),\n associated_types: Vec::new(),\n };\n trait_def.expand(cx, mitem, item, push)\n}\n<commit_msg>derive: Avoid emitting PartialEq::ne for c-like enums<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse deriving::generic::*;\nuse deriving::generic::ty::*;\n\nuse syntax::ast::{MetaItem, Expr, BinOpKind, ItemKind, VariantData};\nuse syntax::codemap::Span;\nuse syntax::ext::base::{ExtCtxt, Annotatable};\nuse syntax::ext::build::AstBuilder;\nuse syntax::parse::token::InternedString;\nuse syntax::ptr::P;\n\nfn is_clike_enum(item: &Annotatable) -> bool {\n match *item {\n Annotatable::Item(ref item) => {\n match item.node {\n ItemKind::Enum(ref enum_def, _) => {\n enum_def.variants.iter().all(|v|\n if let VariantData::Unit(..) = v.node.data {\n true\n } else {\n false\n }\n )\n }\n _ => false,\n }\n }\n _ => false,\n }\n}\n\npub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Annotatable,\n push: &mut FnMut(Annotatable))\n{\n \/\/ structures are equal if all fields are equal, and non equal, if\n \/\/ any fields are not equal or if the enum variants are different\n fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {\n cs_fold(\n true, \/\/ use foldl\n |cx, span, subexpr, self_f, other_fs| {\n let other_f = match (other_fs.len(), other_fs.get(0)) {\n (1, Some(o_f)) => o_f,\n _ => cx.span_bug(span, \"not exactly 2 arguments in `derive(PartialEq)`\")\n };\n\n let eq = cx.expr_binary(span, BinOpKind::Eq, self_f, other_f.clone());\n\n cx.expr_binary(span, BinOpKind::And, subexpr, eq)\n },\n cx.expr_bool(span, true),\n Box::new(|cx, span, _, _| cx.expr_bool(span, false)),\n cx, span, substr)\n }\n fn cs_ne(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {\n cs_fold(\n true, \/\/ use foldl\n |cx, span, subexpr, self_f, other_fs| {\n let other_f = match (other_fs.len(), other_fs.get(0)) {\n (1, Some(o_f)) => o_f,\n _ => cx.span_bug(span, \"not exactly 2 arguments in `derive(PartialEq)`\")\n };\n\n let eq = cx.expr_binary(span, BinOpKind::Ne, self_f, other_f.clone());\n\n cx.expr_binary(span, BinOpKind::Or, subexpr, eq)\n },\n cx.expr_bool(span, false),\n Box::new(|cx, span, _, _| cx.expr_bool(span, true)),\n cx, span, substr)\n }\n\n macro_rules! md {\n ($name:expr, $f:ident) => { {\n let inline = cx.meta_word(span, InternedString::new(\"inline\"));\n let attrs = vec!(cx.attribute(span, inline));\n MethodDef {\n name: $name,\n generics: LifetimeBounds::empty(),\n explicit_self: borrowed_explicit_self(),\n args: vec!(borrowed_self()),\n ret_ty: Literal(path_local!(bool)),\n attributes: attrs,\n is_unsafe: false,\n combine_substructure: combine_substructure(Box::new(|a, b, c| {\n $f(a, b, c)\n }))\n }\n } }\n }\n\n \/\/ avoid defining `ne` if we can\n let mut methods = vec![md!(\"eq\", cs_eq)];\n if !is_clike_enum(item) {\n methods.push(md!(\"ne\", cs_ne));\n }\n\n let trait_def = TraitDef {\n span: span,\n attributes: Vec::new(),\n path: path_std!(cx, core::cmp::PartialEq),\n additional_bounds: Vec::new(),\n generics: LifetimeBounds::empty(),\n is_unsafe: false,\n methods: methods,\n associated_types: Vec::new(),\n };\n trait_def.expand(cx, mitem, item, push)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This code produces a CFG with critical edges that, if we don't\n\/\/ handle properly, will cause invalid codegen.\n\n#![feature(rustc_attrs)]\n\nenum State {\n Both,\n Front,\n Back\n}\n\npub struct Foo<A: Iterator, B: Iterator> {\n state: State,\n a: A,\n b: B\n}\n\nimpl<A, B> Foo<A, B>\nwhere A: Iterator, B: Iterator<Item=A::Item>\n{\n \/\/ This is the function we care about\n #[rustc_mir]\n fn next(&mut self) -> Option<A::Item> {\n match self.state {\n State::Both => match self.a.next() {\n elt @ Some(..) => elt,\n None => {\n self.state = State::Back;\n self.b.next()\n }\n },\n State::Front => self.a.next(),\n State::Back => self.b.next(),\n }\n }\n}\n\n\/\/ Make sure we actually translate a version of the function\npub fn do_stuff(mut f: Foo<Box<Iterator<Item=u32>>, Box<Iterator<Item=u32>>>) {\n let _x = f.next();\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added long_chain example<commit_after>extern crate fibe;\n\nfn test(wait: fibe::Wait) {\n let mut front = fibe::Frontend::new();\n let mut last = front.add(move || {}, vec![]);\n for i in 1..300000 {\n \tlast = front.add(move || {}, vec![last]);\n\t}\n front.die(wait);\n}\n\nfn main() {\n test(fibe::Wait::Pending);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>create an example for pull request files<commit_after>extern crate env_logger;\nextern crate futures;\nextern crate hubcaps;\nextern crate tokio_core;\n\nuse std::env;\n\nuse futures::Stream;\nuse tokio_core::reactor::Core;\n\nuse hubcaps::{Credentials, Github, Result};\n\nfn main() -> Result<()> {\n drop(env_logger::init());\n match env::var(\"GITHUB_TOKEN\").ok() {\n Some(token) => {\n let mut core = Core::new()?;\n let github = Github::new(\n concat!(env!(\"CARGO_PKG_NAME\"), \"\/\", env!(\"CARGO_PKG_VERSION\")),\n Credentials::Token(token),\n &core.handle(),\n );\n for diff in core.run(github.repo(\"rust-lang\", \"rust\").pulls().get(49536).files())? {\n println!(\"{:#?}\", diff);\n }\n Ok(())\n }\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix build<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Part 2: define a type to encapsulate query inputs.<commit_after>\/\/ Copyright 2016 Mozilla\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use\n\/\/ this file except in compliance with the License. You may obtain a copy of the\n\/\/ License at http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/ Unless required by applicable law or agreed to in writing, software distributed\n\/\/ under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR\n\/\/ CONDITIONS OF ANY KIND, either express or implied. See the License for the\n\/\/ specific language governing permissions and limitations under the License.\n\nuse std::collections::BTreeMap;\n\nuse mentat_core::{\n TypedValue,\n ValueType,\n};\n\nuse mentat_query::{\n Variable,\n};\n\npub use errors::{\n Error,\n ErrorKind,\n Result,\n};\n\n\/\/\/ Define the inputs to a query. This is in two parts: a set of values known now, and a set of\n\/\/\/ types known now.\n\/\/\/ The separate map of types is to allow queries to be algebrized without full knowledge of\n\/\/\/ the bindings that will be used at execution time.\n\/\/\/ When built correctly, `types` is guaranteed to contain the types of `values` -- use\n\/\/\/ `QueryInputs::new` or `QueryInputs::with_values` to construct an instance.\npub struct QueryInputs {\n pub types: BTreeMap<Variable, ValueType>,\n pub values: BTreeMap<Variable, TypedValue>,\n}\n\nimpl Default for QueryInputs {\n fn default() -> Self {\n QueryInputs { types: BTreeMap::default(), values: BTreeMap::default() }\n }\n}\n\nimpl QueryInputs {\n pub fn with_value_sequence(vals: Vec<(Variable, TypedValue)>) -> QueryInputs {\n let values: BTreeMap<Variable, TypedValue> = vals.into_iter().collect();\n QueryInputs::with_values(values)\n }\n\n pub fn with_values(values: BTreeMap<Variable, TypedValue>) -> QueryInputs {\n QueryInputs {\n types: values.iter().map(|(var, val)| (var.clone(), val.value_type())).collect(),\n values: values,\n }\n }\n\n pub fn new(mut types: BTreeMap<Variable, ValueType>,\n values: BTreeMap<Variable, TypedValue>) -> Result<QueryInputs> {\n \/\/ Make sure that the types of the values agree with those in types, and collect.\n for (var, v) in values.iter() {\n let t = v.value_type();\n let old = types.insert(var.clone(), t);\n if let Some(old) = old {\n if old != t {\n bail!(ErrorKind::InputTypeDisagreement(var.name(), old, t));\n }\n }\n }\n Ok(QueryInputs { types: types, values: values })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>refactor(backend\/xcb): refactor window_name method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for multiple main functions<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Base64 binary-to-text encoding\nuse std::str;\n\n\/\/\/ Available encoding character sets\npub enum CharacterSet {\n \/\/\/ The standard character set (uses `+` and `\/`)\n Standard,\n \/\/\/ The URL safe character set (uses `-` and `_`)\n UrlSafe\n}\n\n\/\/\/ Contains configuration parameters for `to_base64`.\npub struct Config {\n \/\/\/ Character set to use\n char_set: CharacterSet,\n \/\/\/ True to pad output with `=` characters\n pad: bool,\n \/\/\/ `Some(len)` to wrap lines at `len`, `None` to disable line wrapping\n line_length: Option<uint>\n}\n\n\/\/\/ Configuration for RFC 4648 standard base64 encoding\npub static STANDARD: Config =\n Config {char_set: Standard, pad: true, line_length: None};\n\n\/\/\/ Configuration for RFC 4648 base64url encoding\npub static URL_SAFE: Config =\n Config {char_set: UrlSafe, pad: false, line_length: None};\n\n\/\/\/ Configuration for RFC 2045 MIME base64 encoding\npub static MIME: Config =\n Config {char_set: Standard, pad: true, line_length: Some(76)};\n\nstatic STANDARD_CHARS: &'static[u8] = bytes!(\"ABCDEFGHIJKLMNOPQRSTUVWXYZ\",\n \"abcdefghijklmnopqrstuvwxyz\",\n \"0123456789+\/\");\n\nstatic URLSAFE_CHARS: &'static[u8] = bytes!(\"ABCDEFGHIJKLMNOPQRSTUVWXYZ\",\n \"abcdefghijklmnopqrstuvwxyz\",\n \"0123456789-_\");\n\n\/\/\/ A trait for converting a value to base64 encoding.\npub trait ToBase64 {\n \/\/\/ Converts the value of `self` to a base64 value following the specified\n \/\/\/ format configuration, returning the owned string.\n fn to_base64(&self, config: Config) -> ~str;\n}\n\nimpl<'a> ToBase64 for &'a [u8] {\n \/**\n * Turn a vector of `u8` bytes into a base64 string.\n *\n * # Example\n *\n * ```rust\n * extern mod extra;\n * use extra::base64::{ToBase64, STANDARD};\n *\n * fn main () {\n * let str = [52,32].to_base64(STANDARD);\n * println!(\"base 64 output: {}\", str);\n * }\n * ```\n *\/\n fn to_base64(&self, config: Config) -> ~str {\n let bytes = match config.char_set {\n Standard => STANDARD_CHARS,\n UrlSafe => URLSAFE_CHARS\n };\n\n let mut v: ~[u8] = ~[];\n let mut i = 0;\n let mut cur_length = 0;\n let len = self.len();\n while i < len - (len % 3) {\n match config.line_length {\n Some(line_length) =>\n if cur_length >= line_length {\n v.push('\\r' as u8);\n v.push('\\n' as u8);\n cur_length = 0;\n },\n None => ()\n }\n\n let n = (self[i] as u32) << 16 |\n (self[i + 1] as u32) << 8 |\n (self[i + 2] as u32);\n\n \/\/ This 24-bit number gets separated into four 6-bit numbers.\n v.push(bytes[(n >> 18) & 63]);\n v.push(bytes[(n >> 12) & 63]);\n v.push(bytes[(n >> 6 ) & 63]);\n v.push(bytes[n & 63]);\n\n cur_length += 4;\n i += 3;\n }\n\n if len % 3 != 0 {\n match config.line_length {\n Some(line_length) =>\n if cur_length >= line_length {\n v.push('\\r' as u8);\n v.push('\\n' as u8);\n },\n None => ()\n }\n }\n\n \/\/ Heh, would be cool if we knew this was exhaustive\n \/\/ (the dream of bounded integer types)\n match len % 3 {\n 0 => (),\n 1 => {\n let n = (self[i] as u32) << 16;\n v.push(bytes[(n >> 18) & 63]);\n v.push(bytes[(n >> 12) & 63]);\n if config.pad {\n v.push('=' as u8);\n v.push('=' as u8);\n }\n }\n 2 => {\n let n = (self[i] as u32) << 16 |\n (self[i + 1u] as u32) << 8;\n v.push(bytes[(n >> 18) & 63]);\n v.push(bytes[(n >> 12) & 63]);\n v.push(bytes[(n >> 6 ) & 63]);\n if config.pad {\n v.push('=' as u8);\n }\n }\n _ => fail!(\"Algebra is broken, please alert the math police\")\n }\n\n unsafe {\n str::raw::from_utf8_owned(v)\n }\n }\n}\n\n\/\/\/ A trait for converting from base64 encoded values.\npub trait FromBase64 {\n \/\/\/ Converts the value of `self`, interpreted as base64 encoded data, into\n \/\/\/ an owned vector of bytes, returning the vector.\n fn from_base64(&self) -> Result<~[u8], FromBase64Error>;\n}\n\n\/\/\/ Errors that can occur when decoding a base64 encoded string\npub enum FromBase64Error {\n \/\/\/ The input contained a character not part of the base64 format\n InvalidBase64Character(char, uint),\n \/\/\/ The input had an invalid length\n InvalidBase64Length,\n}\n\nimpl ToStr for FromBase64Error {\n fn to_str(&self) -> ~str {\n match *self {\n InvalidBase64Character(ch, idx) =>\n format!(\"Invalid character '{}' at position {}\", ch, idx),\n InvalidBase64Length => ~\"Invalid length\",\n }\n }\n}\n\nimpl<'a> FromBase64 for &'a str {\n \/**\n * Convert any base64 encoded string (literal, `@`, `&`, or `~`)\n * to the byte values it encodes.\n *\n * You can use the `from_utf8_owned` function in `std::str`\n * to turn a `[u8]` into a string with characters corresponding to those\n * values.\n *\n * # Example\n *\n * This converts a string literal to base64 and back.\n *\n * ```rust\n * extern mod extra;\n * use extra::base64::{ToBase64, FromBase64, STANDARD};\n * use std::str;\n *\n * fn main () {\n * let hello_str = bytes!(\"Hello, World\").to_base64(STANDARD);\n * println!(\"base64 output: {}\", hello_str);\n * let res = hello_str.from_base64();\n * if res.is_ok() {\n * let optBytes = str::from_utf8_owned_opt(res.unwrap());\n * if optBytes.is_some() {\n * println!(\"decoded from base64: {}\", optBytes.unwrap());\n * }\n * }\n * }\n * ```\n *\/\n fn from_base64(&self) -> Result<~[u8], FromBase64Error> {\n let mut r = ~[];\n let mut buf: u32 = 0;\n let mut modulus = 0;\n\n let mut it = self.bytes().enumerate();\n for (idx, byte) in it {\n let val = byte as u32;\n\n match byte as char {\n 'A'..'Z' => buf |= val - 0x41,\n 'a'..'z' => buf |= val - 0x47,\n '0'..'9' => buf |= val + 0x04,\n '+'|'-' => buf |= 0x3E,\n '\/'|'_' => buf |= 0x3F,\n '\\r'|'\\n' => continue,\n '=' => break,\n _ => return Err(InvalidBase64Character(self.char_at(idx), idx)),\n }\n\n buf <<= 6;\n modulus += 1;\n if modulus == 4 {\n modulus = 0;\n r.push((buf >> 22) as u8);\n r.push((buf >> 14) as u8);\n r.push((buf >> 6 ) as u8);\n }\n }\n\n for (idx, byte) in it {\n if (byte as char) != '=' {\n return Err(InvalidBase64Character(self.char_at(idx), idx));\n }\n }\n\n match modulus {\n 2 => {\n r.push((buf >> 10) as u8);\n }\n 3 => {\n r.push((buf >> 16) as u8);\n r.push((buf >> 8 ) as u8);\n }\n 0 => (),\n _ => return Err(InvalidBase64Length),\n }\n\n Ok(r)\n }\n}\n\n#[cfg(test)]\nmod test {\n use test::BenchHarness;\n use base64::*;\n\n #[test]\n fn test_to_base64_basic() {\n assert_eq!(\"\".as_bytes().to_base64(STANDARD), ~\"\");\n assert_eq!(\"f\".as_bytes().to_base64(STANDARD), ~\"Zg==\");\n assert_eq!(\"fo\".as_bytes().to_base64(STANDARD), ~\"Zm8=\");\n assert_eq!(\"foo\".as_bytes().to_base64(STANDARD), ~\"Zm9v\");\n assert_eq!(\"foob\".as_bytes().to_base64(STANDARD), ~\"Zm9vYg==\");\n assert_eq!(\"fooba\".as_bytes().to_base64(STANDARD), ~\"Zm9vYmE=\");\n assert_eq!(\"foobar\".as_bytes().to_base64(STANDARD), ~\"Zm9vYmFy\");\n }\n\n #[test]\n fn test_to_base64_line_break() {\n assert!(![0u8, ..1000].to_base64(Config {line_length: None, ..STANDARD})\n .contains(\"\\r\\n\"));\n assert_eq!(\"foobar\".as_bytes().to_base64(Config {line_length: Some(4),\n ..STANDARD}),\n ~\"Zm9v\\r\\nYmFy\");\n }\n\n #[test]\n fn test_to_base64_padding() {\n assert_eq!(\"f\".as_bytes().to_base64(Config {pad: false, ..STANDARD}), ~\"Zg\");\n assert_eq!(\"fo\".as_bytes().to_base64(Config {pad: false, ..STANDARD}), ~\"Zm8\");\n }\n\n #[test]\n fn test_to_base64_url_safe() {\n assert_eq!([251, 255].to_base64(URL_SAFE), ~\"-_8\");\n assert_eq!([251, 255].to_base64(STANDARD), ~\"+\/8=\");\n }\n\n #[test]\n fn test_from_base64_basic() {\n assert_eq!(\"\".from_base64().unwrap(), \"\".as_bytes().to_owned());\n assert_eq!(\"Zg==\".from_base64().unwrap(), \"f\".as_bytes().to_owned());\n assert_eq!(\"Zm8=\".from_base64().unwrap(), \"fo\".as_bytes().to_owned());\n assert_eq!(\"Zm9v\".from_base64().unwrap(), \"foo\".as_bytes().to_owned());\n assert_eq!(\"Zm9vYg==\".from_base64().unwrap(), \"foob\".as_bytes().to_owned());\n assert_eq!(\"Zm9vYmE=\".from_base64().unwrap(), \"fooba\".as_bytes().to_owned());\n assert_eq!(\"Zm9vYmFy\".from_base64().unwrap(), \"foobar\".as_bytes().to_owned());\n }\n\n #[test]\n fn test_from_base64_newlines() {\n assert_eq!(\"Zm9v\\r\\nYmFy\".from_base64().unwrap(),\n \"foobar\".as_bytes().to_owned());\n }\n\n #[test]\n fn test_from_base64_urlsafe() {\n assert_eq!(\"-_8\".from_base64().unwrap(), \"+\/8=\".from_base64().unwrap());\n }\n\n #[test]\n fn test_from_base64_invalid_char() {\n assert!(\"Zm$=\".from_base64().is_err())\n assert!(\"Zg==$\".from_base64().is_err());\n }\n\n #[test]\n fn test_from_base64_invalid_padding() {\n assert!(\"Z===\".from_base64().is_err());\n }\n\n #[test]\n fn test_base64_random() {\n use std::rand::{task_rng, random, Rng};\n use std::vec;\n\n 1000.times(|| {\n let times = task_rng().gen_range(1u, 100);\n let v = vec::from_fn(times, |_| random::<u8>());\n assert_eq!(v.to_base64(STANDARD).from_base64().unwrap(), v);\n })\n }\n\n #[bench]\n pub fn bench_to_base64(bh: & mut BenchHarness) {\n let s = \"イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \\\n ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン\";\n bh.iter(|| {\n s.as_bytes().to_base64(STANDARD);\n });\n bh.bytes = s.len() as u64;\n }\n\n #[bench]\n pub fn bench_from_base64(bh: & mut BenchHarness) {\n let s = \"イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \\\n ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン\";\n let b = s.as_bytes().to_base64(STANDARD);\n bh.iter(|| {\n b.from_base64();\n });\n bh.bytes = b.len() as u64;\n }\n\n}\n<commit_msg>auto merge of #11652 : hdima\/rust\/base64-padding-newlines, r=alexcrichton<commit_after>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Base64 binary-to-text encoding\nuse std::str;\n\n\/\/\/ Available encoding character sets\npub enum CharacterSet {\n \/\/\/ The standard character set (uses `+` and `\/`)\n Standard,\n \/\/\/ The URL safe character set (uses `-` and `_`)\n UrlSafe\n}\n\n\/\/\/ Contains configuration parameters for `to_base64`.\npub struct Config {\n \/\/\/ Character set to use\n char_set: CharacterSet,\n \/\/\/ True to pad output with `=` characters\n pad: bool,\n \/\/\/ `Some(len)` to wrap lines at `len`, `None` to disable line wrapping\n line_length: Option<uint>\n}\n\n\/\/\/ Configuration for RFC 4648 standard base64 encoding\npub static STANDARD: Config =\n Config {char_set: Standard, pad: true, line_length: None};\n\n\/\/\/ Configuration for RFC 4648 base64url encoding\npub static URL_SAFE: Config =\n Config {char_set: UrlSafe, pad: false, line_length: None};\n\n\/\/\/ Configuration for RFC 2045 MIME base64 encoding\npub static MIME: Config =\n Config {char_set: Standard, pad: true, line_length: Some(76)};\n\nstatic STANDARD_CHARS: &'static[u8] = bytes!(\"ABCDEFGHIJKLMNOPQRSTUVWXYZ\",\n \"abcdefghijklmnopqrstuvwxyz\",\n \"0123456789+\/\");\n\nstatic URLSAFE_CHARS: &'static[u8] = bytes!(\"ABCDEFGHIJKLMNOPQRSTUVWXYZ\",\n \"abcdefghijklmnopqrstuvwxyz\",\n \"0123456789-_\");\n\n\/\/\/ A trait for converting a value to base64 encoding.\npub trait ToBase64 {\n \/\/\/ Converts the value of `self` to a base64 value following the specified\n \/\/\/ format configuration, returning the owned string.\n fn to_base64(&self, config: Config) -> ~str;\n}\n\nimpl<'a> ToBase64 for &'a [u8] {\n \/**\n * Turn a vector of `u8` bytes into a base64 string.\n *\n * # Example\n *\n * ```rust\n * extern mod extra;\n * use extra::base64::{ToBase64, STANDARD};\n *\n * fn main () {\n * let str = [52,32].to_base64(STANDARD);\n * println!(\"base 64 output: {}\", str);\n * }\n * ```\n *\/\n fn to_base64(&self, config: Config) -> ~str {\n let bytes = match config.char_set {\n Standard => STANDARD_CHARS,\n UrlSafe => URLSAFE_CHARS\n };\n\n let mut v: ~[u8] = ~[];\n let mut i = 0;\n let mut cur_length = 0;\n let len = self.len();\n while i < len - (len % 3) {\n match config.line_length {\n Some(line_length) =>\n if cur_length >= line_length {\n v.push('\\r' as u8);\n v.push('\\n' as u8);\n cur_length = 0;\n },\n None => ()\n }\n\n let n = (self[i] as u32) << 16 |\n (self[i + 1] as u32) << 8 |\n (self[i + 2] as u32);\n\n \/\/ This 24-bit number gets separated into four 6-bit numbers.\n v.push(bytes[(n >> 18) & 63]);\n v.push(bytes[(n >> 12) & 63]);\n v.push(bytes[(n >> 6 ) & 63]);\n v.push(bytes[n & 63]);\n\n cur_length += 4;\n i += 3;\n }\n\n if len % 3 != 0 {\n match config.line_length {\n Some(line_length) =>\n if cur_length >= line_length {\n v.push('\\r' as u8);\n v.push('\\n' as u8);\n },\n None => ()\n }\n }\n\n \/\/ Heh, would be cool if we knew this was exhaustive\n \/\/ (the dream of bounded integer types)\n match len % 3 {\n 0 => (),\n 1 => {\n let n = (self[i] as u32) << 16;\n v.push(bytes[(n >> 18) & 63]);\n v.push(bytes[(n >> 12) & 63]);\n if config.pad {\n v.push('=' as u8);\n v.push('=' as u8);\n }\n }\n 2 => {\n let n = (self[i] as u32) << 16 |\n (self[i + 1u] as u32) << 8;\n v.push(bytes[(n >> 18) & 63]);\n v.push(bytes[(n >> 12) & 63]);\n v.push(bytes[(n >> 6 ) & 63]);\n if config.pad {\n v.push('=' as u8);\n }\n }\n _ => fail!(\"Algebra is broken, please alert the math police\")\n }\n\n unsafe {\n str::raw::from_utf8_owned(v)\n }\n }\n}\n\n\/\/\/ A trait for converting from base64 encoded values.\npub trait FromBase64 {\n \/\/\/ Converts the value of `self`, interpreted as base64 encoded data, into\n \/\/\/ an owned vector of bytes, returning the vector.\n fn from_base64(&self) -> Result<~[u8], FromBase64Error>;\n}\n\n\/\/\/ Errors that can occur when decoding a base64 encoded string\npub enum FromBase64Error {\n \/\/\/ The input contained a character not part of the base64 format\n InvalidBase64Character(char, uint),\n \/\/\/ The input had an invalid length\n InvalidBase64Length,\n}\n\nimpl ToStr for FromBase64Error {\n fn to_str(&self) -> ~str {\n match *self {\n InvalidBase64Character(ch, idx) =>\n format!(\"Invalid character '{}' at position {}\", ch, idx),\n InvalidBase64Length => ~\"Invalid length\",\n }\n }\n}\n\nimpl<'a> FromBase64 for &'a str {\n \/**\n * Convert any base64 encoded string (literal, `@`, `&`, or `~`)\n * to the byte values it encodes.\n *\n * You can use the `from_utf8_owned` function in `std::str`\n * to turn a `[u8]` into a string with characters corresponding to those\n * values.\n *\n * # Example\n *\n * This converts a string literal to base64 and back.\n *\n * ```rust\n * extern mod extra;\n * use extra::base64::{ToBase64, FromBase64, STANDARD};\n * use std::str;\n *\n * fn main () {\n * let hello_str = bytes!(\"Hello, World\").to_base64(STANDARD);\n * println!(\"base64 output: {}\", hello_str);\n * let res = hello_str.from_base64();\n * if res.is_ok() {\n * let optBytes = str::from_utf8_owned_opt(res.unwrap());\n * if optBytes.is_some() {\n * println!(\"decoded from base64: {}\", optBytes.unwrap());\n * }\n * }\n * }\n * ```\n *\/\n fn from_base64(&self) -> Result<~[u8], FromBase64Error> {\n let mut r = ~[];\n let mut buf: u32 = 0;\n let mut modulus = 0;\n\n let mut it = self.bytes().enumerate();\n for (idx, byte) in it {\n let val = byte as u32;\n\n match byte as char {\n 'A'..'Z' => buf |= val - 0x41,\n 'a'..'z' => buf |= val - 0x47,\n '0'..'9' => buf |= val + 0x04,\n '+'|'-' => buf |= 0x3E,\n '\/'|'_' => buf |= 0x3F,\n '\\r'|'\\n' => continue,\n '=' => break,\n _ => return Err(InvalidBase64Character(self.char_at(idx), idx)),\n }\n\n buf <<= 6;\n modulus += 1;\n if modulus == 4 {\n modulus = 0;\n r.push((buf >> 22) as u8);\n r.push((buf >> 14) as u8);\n r.push((buf >> 6 ) as u8);\n }\n }\n\n for (idx, byte) in it {\n match byte as char {\n '='|'\\r'|'\\n' => continue,\n _ => return Err(InvalidBase64Character(self.char_at(idx), idx)),\n }\n }\n\n match modulus {\n 2 => {\n r.push((buf >> 10) as u8);\n }\n 3 => {\n r.push((buf >> 16) as u8);\n r.push((buf >> 8 ) as u8);\n }\n 0 => (),\n _ => return Err(InvalidBase64Length),\n }\n\n Ok(r)\n }\n}\n\n#[cfg(test)]\nmod test {\n use test::BenchHarness;\n use base64::*;\n\n #[test]\n fn test_to_base64_basic() {\n assert_eq!(\"\".as_bytes().to_base64(STANDARD), ~\"\");\n assert_eq!(\"f\".as_bytes().to_base64(STANDARD), ~\"Zg==\");\n assert_eq!(\"fo\".as_bytes().to_base64(STANDARD), ~\"Zm8=\");\n assert_eq!(\"foo\".as_bytes().to_base64(STANDARD), ~\"Zm9v\");\n assert_eq!(\"foob\".as_bytes().to_base64(STANDARD), ~\"Zm9vYg==\");\n assert_eq!(\"fooba\".as_bytes().to_base64(STANDARD), ~\"Zm9vYmE=\");\n assert_eq!(\"foobar\".as_bytes().to_base64(STANDARD), ~\"Zm9vYmFy\");\n }\n\n #[test]\n fn test_to_base64_line_break() {\n assert!(![0u8, ..1000].to_base64(Config {line_length: None, ..STANDARD})\n .contains(\"\\r\\n\"));\n assert_eq!(\"foobar\".as_bytes().to_base64(Config {line_length: Some(4),\n ..STANDARD}),\n ~\"Zm9v\\r\\nYmFy\");\n }\n\n #[test]\n fn test_to_base64_padding() {\n assert_eq!(\"f\".as_bytes().to_base64(Config {pad: false, ..STANDARD}), ~\"Zg\");\n assert_eq!(\"fo\".as_bytes().to_base64(Config {pad: false, ..STANDARD}), ~\"Zm8\");\n }\n\n #[test]\n fn test_to_base64_url_safe() {\n assert_eq!([251, 255].to_base64(URL_SAFE), ~\"-_8\");\n assert_eq!([251, 255].to_base64(STANDARD), ~\"+\/8=\");\n }\n\n #[test]\n fn test_from_base64_basic() {\n assert_eq!(\"\".from_base64().unwrap(), \"\".as_bytes().to_owned());\n assert_eq!(\"Zg==\".from_base64().unwrap(), \"f\".as_bytes().to_owned());\n assert_eq!(\"Zm8=\".from_base64().unwrap(), \"fo\".as_bytes().to_owned());\n assert_eq!(\"Zm9v\".from_base64().unwrap(), \"foo\".as_bytes().to_owned());\n assert_eq!(\"Zm9vYg==\".from_base64().unwrap(), \"foob\".as_bytes().to_owned());\n assert_eq!(\"Zm9vYmE=\".from_base64().unwrap(), \"fooba\".as_bytes().to_owned());\n assert_eq!(\"Zm9vYmFy\".from_base64().unwrap(), \"foobar\".as_bytes().to_owned());\n }\n\n #[test]\n fn test_from_base64_newlines() {\n assert_eq!(\"Zm9v\\r\\nYmFy\".from_base64().unwrap(),\n \"foobar\".as_bytes().to_owned());\n assert_eq!(\"Zm9vYg==\\r\\n\".from_base64().unwrap(),\n \"foob\".as_bytes().to_owned());\n }\n\n #[test]\n fn test_from_base64_urlsafe() {\n assert_eq!(\"-_8\".from_base64().unwrap(), \"+\/8=\".from_base64().unwrap());\n }\n\n #[test]\n fn test_from_base64_invalid_char() {\n assert!(\"Zm$=\".from_base64().is_err())\n assert!(\"Zg==$\".from_base64().is_err());\n }\n\n #[test]\n fn test_from_base64_invalid_padding() {\n assert!(\"Z===\".from_base64().is_err());\n }\n\n #[test]\n fn test_base64_random() {\n use std::rand::{task_rng, random, Rng};\n use std::vec;\n\n 1000.times(|| {\n let times = task_rng().gen_range(1u, 100);\n let v = vec::from_fn(times, |_| random::<u8>());\n assert_eq!(v.to_base64(STANDARD).from_base64().unwrap(), v);\n })\n }\n\n #[bench]\n pub fn bench_to_base64(bh: & mut BenchHarness) {\n let s = \"イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \\\n ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン\";\n bh.iter(|| {\n s.as_bytes().to_base64(STANDARD);\n });\n bh.bytes = s.len() as u64;\n }\n\n #[bench]\n pub fn bench_from_base64(bh: & mut BenchHarness) {\n let s = \"イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \\\n ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン\";\n let b = s.as_bytes().to_base64(STANDARD);\n bh.iter(|| {\n b.from_base64();\n });\n bh.bytes = b.len() as u64;\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added negative test for macro expansion in attributes<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[doc = $not_there] \/\/~ error: unexpected token: `$`\nfn main() { }\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ #30527 - We were not generating arms with guards in certain cases.\n\n#![feature(rustc_attrs)]\n\n#[rustc_mir]\nfn match_with_guard(x: Option<i8>) -> i8 {\n match x {\n Some(xyz) if xyz > 100 => 0,\n Some(_) => -1,\n None => -2\n }\n}\n\nfn main() {\n assert_eq!(match_with_guard(Some(111)), 0);\n assert_eq!(match_with_guard(Some(2)), -1);\n assert_eq!(match_with_guard(None), -2);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add has_command_any function<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix a couple of issues in from_utf8 docs<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Temporal quantification.\n\n#![stable(feature = \"time\", since = \"1.3.0\")]\n\nuse error::Error;\nuse fmt;\nuse ops::{Add, Sub};\nuse sys::time;\n\n#[stable(feature = \"time\", since = \"1.3.0\")]\npub use self::duration::Duration;\n\nmod duration;\n\n\/\/\/ A measurement of a monotonically increasing clock.\n\/\/\/\n\/\/\/ Instants are always guaranteed to be greater than any previously measured\n\/\/\/ instant when created, and are often useful for tasks such as measuring\n\/\/\/ benchmarks or timing how long an operation takes.\n\/\/\/\n\/\/\/ Note, however, that instants are not guaranteed to be **steady**. In other\n\/\/\/ words, each tick of the underlying clock may not be the same length (e.g.\n\/\/\/ some seconds may be longer than others). An instant may jump forwards or\n\/\/\/ experience time dilation (slow down or speed up), but it will never go\n\/\/\/ backwards.\n\/\/\/\n\/\/\/ Instants are opaque types that can only be compared to one another. There is\n\/\/\/ no method to get \"the number of seconds\" from an instant. Instead, it only\n\/\/\/ allows measuring the duration between two instants (or comparing two\n\/\/\/ instants).\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct Instant(time::Instant);\n\n\/\/\/ A measurement of the system clock appropriate for timestamps such as those\n\/\/\/ on files on the filesystem.\n\/\/\/\n\/\/\/ Distinct from the `Instant` type, this time measurement **is not\n\/\/\/ monotonic**. This means that you can save a file to the file system, then\n\/\/\/ save another file to the file system, **and the second file has a\n\/\/\/ `SystemTime` measurement earlier than the second**. In other words, an\n\/\/\/ operation that happens after another operation in real time may have an\n\/\/\/ earlier `SystemTime`!\n\/\/\/\n\/\/\/ Consequently, comparing two `SystemTime` instances to learn about the\n\/\/\/ duration between them returns a `Result` instead of an infallible `Duration`\n\/\/\/ to indicate that this sort of time drift may happen and needs to be handled.\n\/\/\/\n\/\/\/ Although a `SystemTime` cannot be directly inspected, the `UNIX_EPOCH`\n\/\/\/ constant is provided in this module as an anchor in time to learn\n\/\/\/ information about a `SystemTime`. By calculating the duration from this\n\/\/\/ fixed point in time, a `SystemTime` can be converted to a human-readable time,\n\/\/\/ or perhaps some other string representation.\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTime(time::SystemTime);\n\n\/\/\/ An error returned from the `duration_from_earlier` method on `SystemTime`,\n\/\/\/ used to learn about why how far in the opposite direction a timestamp lies.\n#[derive(Clone, Debug)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTimeError(Duration);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Instant {\n \/\/\/ Returns an instant corresponding to \"now\".\n pub fn now() -> Instant {\n Instant(time::Instant::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from another instant to this one.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function will panic if `earlier` is later than `self`, which should\n \/\/\/ only be possible if `earlier` was created after `self`. Because\n \/\/\/ `Instant` is monotonic, the only time that this should happen should be\n \/\/\/ a bug.\n pub fn duration_from_earlier(&self, earlier: Instant) -> Duration {\n self.0.sub_instant(&earlier.0)\n }\n\n \/\/\/ Returns the amount of time elapsed since this instant was created.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function may panic if the current time is earlier than this\n \/\/\/ instant, which is something that can happen if an `Instant` is\n \/\/\/ produced synthetically.\n pub fn elapsed(&self) -> Duration {\n Instant::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for Instant {\n type Output = Instant;\n\n fn add(self, other: Duration) -> Instant {\n Instant(self.0.add_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for Instant {\n type Output = Instant;\n\n fn sub(self, other: Duration) -> Instant {\n Instant(self.0.sub_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for Instant {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTime {\n \/\/\/ Returns the system time corresponding to \"now\".\n pub fn now() -> SystemTime {\n SystemTime(time::SystemTime::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from an earlier point in time.\n \/\/\/\n \/\/\/ This function may fail because measurements taken earlier are not\n \/\/\/ guaranteed to always be before later measurements (due to anomalies such\n \/\/\/ as the system clock being adjusted either forwards or backwards).\n \/\/\/\n \/\/\/ If successful, `Ok(duration)` is returned where the duration represents\n \/\/\/ the amount of time elapsed from the specified measurement to this one.\n \/\/\/\n \/\/\/ Returns an `Err` if `earlier` is later than `self`, and the error\n \/\/\/ contains how far from `self` the time is.\n pub fn duration_from_earlier(&self, earlier: SystemTime)\n -> Result<Duration, SystemTimeError> {\n self.0.sub_time(&earlier.0).map_err(SystemTimeError)\n }\n\n \/\/\/ Returns the amount of time elapsed since this system time was created.\n \/\/\/\n \/\/\/ This function may fail as the underlying system clock is susceptible to\n \/\/\/ drift and updates (e.g. the system clock could go backwards), so this\n \/\/\/ function may not always succeed. If successful, `Ok(duration)` is\n \/\/\/ returned where the duration represents the amount of time elapsed from\n \/\/\/ this time measurement to the current time.\n \/\/\/\n \/\/\/ Returns an `Err` if `self` is later than the current system time, and\n \/\/\/ the error contains how far from the current system time `self` is.\n pub fn elapsed(&self) -> Result<Duration, SystemTimeError> {\n SystemTime::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn add(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.add_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn sub(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.sub_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for SystemTime {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n\/\/\/ An anchor in time which can be used to create new `SystemTime` instances or\n\/\/\/ learn about where in time a `SystemTime` lies.\n\/\/\/\n\/\/\/ This constant is defined to be \"1970-01-01 00:00:00 UTC\" on all systems with\n\/\/\/ respect to the system clock. Using `duration_from_earlier` on an existing\n\/\/\/ `SystemTime` instance can tell how far away from this point in time a\n\/\/\/ measurement lies, and using `UNIX_EPOCH + duration` can be used to create a\n\/\/\/ `SystemTime` instance to represent another fixed point in time.\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub const UNIX_EPOCH: SystemTime = SystemTime(time::UNIX_EPOCH);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTimeError {\n \/\/\/ Returns the positive duration which represents how far forward the\n \/\/\/ second system time was from the first.\n \/\/\/\n \/\/\/ A `SystemTimeError` is returned from the `duration_from_earlier`\n \/\/\/ operation whenever the second duration, `earlier`, actually represents a\n \/\/\/ point later in time than the `self` of the method call. This function\n \/\/\/ will extract and return the amount of time later `earlier` actually is.\n pub fn duration(&self) -> Duration {\n self.0\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Error for SystemTimeError {\n fn description(&self) -> &str { \"other time was not earlier than self\" }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Display for SystemTimeError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"second time provided was later than self\")\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Instant, SystemTime, Duration, UNIX_EPOCH};\n\n macro_rules! assert_almost_eq {\n ($a:expr, $b:expr) => ({\n let (a, b) = ($a, $b);\n if a != b {\n let (a, b) = if a > b {(a, b)} else {(b, a)};\n assert!(a - Duration::new(0, 1) <= b);\n }\n })\n }\n\n #[test]\n fn instant_monotonic() {\n let a = Instant::now();\n let b = Instant::now();\n assert!(b >= a);\n }\n\n #[test]\n fn instant_elapsed() {\n let a = Instant::now();\n a.elapsed();\n }\n\n #[test]\n fn instant_math() {\n let a = Instant::now();\n let b = Instant::now();\n let dur = b.duration_from_earlier(a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a - second + second, a);\n }\n\n #[test]\n #[should_panic]\n fn instant_duration_panic() {\n let a = Instant::now();\n (a - Duration::new(1, 0)).duration_from_earlier(a);\n }\n\n #[test]\n fn system_time_math() {\n let a = SystemTime::now();\n let b = SystemTime::now();\n match b.duration_from_earlier(a) {\n Ok(dur) if dur == Duration::new(0, 0) => {\n assert_almost_eq!(a, b);\n }\n Ok(dur) => {\n assert!(b > a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n }\n Err(dur) => {\n let dur = dur.duration();\n assert!(a > b);\n assert_almost_eq!(b + dur, a);\n assert_almost_eq!(b - dur, a);\n }\n }\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a.duration_from_earlier(a - second).unwrap(), second);\n assert_almost_eq!(a.duration_from_earlier(a + second).unwrap_err()\n .duration(), second);\n\n assert_almost_eq!(a - second + second, a);\n\n let eighty_years = second * 60 * 60 * 24 * 365 * 80;\n assert_almost_eq!(a - eighty_years + eighty_years, a);\n assert_almost_eq!(a - (eighty_years * 10) + (eighty_years * 10), a);\n }\n\n #[test]\n fn system_time_elapsed() {\n let a = SystemTime::now();\n drop(a.elapsed());\n }\n\n #[test]\n fn since_epoch() {\n let ts = SystemTime::now();\n let a = ts.duration_from_earlier(UNIX_EPOCH).unwrap();\n let b = ts.duration_from_earlier(UNIX_EPOCH - Duration::new(1, 0)).unwrap();\n assert!(b > a);\n assert_eq!(b - a, Duration::new(1, 0));\n\n \/\/ let's assume that we're all running computers later than 2000\n let thirty_years = Duration::new(1, 0) * 60 * 60 * 24 * 365 * 30;\n assert!(a > thirty_years);\n\n \/\/ let's assume that we're all running computers earlier than 2090.\n \/\/ Should give us ~70 years to fix this!\n let hundred_twenty_years = thirty_years * 4;\n assert!(a < hundred_twenty_years);\n }\n}\n<commit_msg>doc: fix type name<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Temporal quantification.\n\n#![stable(feature = \"time\", since = \"1.3.0\")]\n\nuse error::Error;\nuse fmt;\nuse ops::{Add, Sub};\nuse sys::time;\n\n#[stable(feature = \"time\", since = \"1.3.0\")]\npub use self::duration::Duration;\n\nmod duration;\n\n\/\/\/ A measurement of a monotonically increasing clock.\n\/\/\/\n\/\/\/ Instants are always guaranteed to be greater than any previously measured\n\/\/\/ instant when created, and are often useful for tasks such as measuring\n\/\/\/ benchmarks or timing how long an operation takes.\n\/\/\/\n\/\/\/ Note, however, that instants are not guaranteed to be **steady**. In other\n\/\/\/ words, each tick of the underlying clock may not be the same length (e.g.\n\/\/\/ some seconds may be longer than others). An instant may jump forwards or\n\/\/\/ experience time dilation (slow down or speed up), but it will never go\n\/\/\/ backwards.\n\/\/\/\n\/\/\/ Instants are opaque types that can only be compared to one another. There is\n\/\/\/ no method to get \"the number of seconds\" from an instant. Instead, it only\n\/\/\/ allows measuring the duration between two instants (or comparing two\n\/\/\/ instants).\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct Instant(time::Instant);\n\n\/\/\/ A measurement of the system clock appropriate for timestamps such as those\n\/\/\/ on files on the filesystem.\n\/\/\/\n\/\/\/ Distinct from the `Instant` type, this time measurement **is not\n\/\/\/ monotonic**. This means that you can save a file to the file system, then\n\/\/\/ save another file to the file system, **and the second file has a\n\/\/\/ `SystemTime` measurement earlier than the second**. In other words, an\n\/\/\/ operation that happens after another operation in real time may have an\n\/\/\/ earlier `SystemTime`!\n\/\/\/\n\/\/\/ Consequently, comparing two `SystemTime` instances to learn about the\n\/\/\/ duration between them returns a `Result` instead of an infallible `Duration`\n\/\/\/ to indicate that this sort of time drift may happen and needs to be handled.\n\/\/\/\n\/\/\/ Although a `SystemTime` cannot be directly inspected, the `UNIX_EPOCH`\n\/\/\/ constant is provided in this module as an anchor in time to learn\n\/\/\/ information about a `SystemTime`. By calculating the duration from this\n\/\/\/ fixed point in time, a `SystemTime` can be converted to a human-readable time,\n\/\/\/ or perhaps some other string representation.\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTime(time::SystemTime);\n\n\/\/\/ An error returned from the `duration_from_earlier` method on `SystemTime`,\n\/\/\/ used to learn about why how far in the opposite direction a timestamp lies.\n#[derive(Clone, Debug)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTimeError(Duration);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Instant {\n \/\/\/ Returns an instant corresponding to \"now\".\n pub fn now() -> Instant {\n Instant(time::Instant::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from another instant to this one.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function will panic if `earlier` is later than `self`, which should\n \/\/\/ only be possible if `earlier` was created after `self`. Because\n \/\/\/ `Instant` is monotonic, the only time that this should happen should be\n \/\/\/ a bug.\n pub fn duration_from_earlier(&self, earlier: Instant) -> Duration {\n self.0.sub_instant(&earlier.0)\n }\n\n \/\/\/ Returns the amount of time elapsed since this instant was created.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function may panic if the current time is earlier than this\n \/\/\/ instant, which is something that can happen if an `Instant` is\n \/\/\/ produced synthetically.\n pub fn elapsed(&self) -> Duration {\n Instant::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for Instant {\n type Output = Instant;\n\n fn add(self, other: Duration) -> Instant {\n Instant(self.0.add_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for Instant {\n type Output = Instant;\n\n fn sub(self, other: Duration) -> Instant {\n Instant(self.0.sub_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for Instant {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTime {\n \/\/\/ Returns the system time corresponding to \"now\".\n pub fn now() -> SystemTime {\n SystemTime(time::SystemTime::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from an earlier point in time.\n \/\/\/\n \/\/\/ This function may fail because measurements taken earlier are not\n \/\/\/ guaranteed to always be before later measurements (due to anomalies such\n \/\/\/ as the system clock being adjusted either forwards or backwards).\n \/\/\/\n \/\/\/ If successful, `Ok(Duration)` is returned where the duration represents\n \/\/\/ the amount of time elapsed from the specified measurement to this one.\n \/\/\/\n \/\/\/ Returns an `Err` if `earlier` is later than `self`, and the error\n \/\/\/ contains how far from `self` the time is.\n pub fn duration_from_earlier(&self, earlier: SystemTime)\n -> Result<Duration, SystemTimeError> {\n self.0.sub_time(&earlier.0).map_err(SystemTimeError)\n }\n\n \/\/\/ Returns the amount of time elapsed since this system time was created.\n \/\/\/\n \/\/\/ This function may fail as the underlying system clock is susceptible to\n \/\/\/ drift and updates (e.g. the system clock could go backwards), so this\n \/\/\/ function may not always succeed. If successful, `Ok(duration)` is\n \/\/\/ returned where the duration represents the amount of time elapsed from\n \/\/\/ this time measurement to the current time.\n \/\/\/\n \/\/\/ Returns an `Err` if `self` is later than the current system time, and\n \/\/\/ the error contains how far from the current system time `self` is.\n pub fn elapsed(&self) -> Result<Duration, SystemTimeError> {\n SystemTime::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn add(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.add_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn sub(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.sub_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for SystemTime {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n\/\/\/ An anchor in time which can be used to create new `SystemTime` instances or\n\/\/\/ learn about where in time a `SystemTime` lies.\n\/\/\/\n\/\/\/ This constant is defined to be \"1970-01-01 00:00:00 UTC\" on all systems with\n\/\/\/ respect to the system clock. Using `duration_from_earlier` on an existing\n\/\/\/ `SystemTime` instance can tell how far away from this point in time a\n\/\/\/ measurement lies, and using `UNIX_EPOCH + duration` can be used to create a\n\/\/\/ `SystemTime` instance to represent another fixed point in time.\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub const UNIX_EPOCH: SystemTime = SystemTime(time::UNIX_EPOCH);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTimeError {\n \/\/\/ Returns the positive duration which represents how far forward the\n \/\/\/ second system time was from the first.\n \/\/\/\n \/\/\/ A `SystemTimeError` is returned from the `duration_from_earlier`\n \/\/\/ operation whenever the second duration, `earlier`, actually represents a\n \/\/\/ point later in time than the `self` of the method call. This function\n \/\/\/ will extract and return the amount of time later `earlier` actually is.\n pub fn duration(&self) -> Duration {\n self.0\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Error for SystemTimeError {\n fn description(&self) -> &str { \"other time was not earlier than self\" }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Display for SystemTimeError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"second time provided was later than self\")\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Instant, SystemTime, Duration, UNIX_EPOCH};\n\n macro_rules! assert_almost_eq {\n ($a:expr, $b:expr) => ({\n let (a, b) = ($a, $b);\n if a != b {\n let (a, b) = if a > b {(a, b)} else {(b, a)};\n assert!(a - Duration::new(0, 1) <= b);\n }\n })\n }\n\n #[test]\n fn instant_monotonic() {\n let a = Instant::now();\n let b = Instant::now();\n assert!(b >= a);\n }\n\n #[test]\n fn instant_elapsed() {\n let a = Instant::now();\n a.elapsed();\n }\n\n #[test]\n fn instant_math() {\n let a = Instant::now();\n let b = Instant::now();\n let dur = b.duration_from_earlier(a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a - second + second, a);\n }\n\n #[test]\n #[should_panic]\n fn instant_duration_panic() {\n let a = Instant::now();\n (a - Duration::new(1, 0)).duration_from_earlier(a);\n }\n\n #[test]\n fn system_time_math() {\n let a = SystemTime::now();\n let b = SystemTime::now();\n match b.duration_from_earlier(a) {\n Ok(dur) if dur == Duration::new(0, 0) => {\n assert_almost_eq!(a, b);\n }\n Ok(dur) => {\n assert!(b > a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n }\n Err(dur) => {\n let dur = dur.duration();\n assert!(a > b);\n assert_almost_eq!(b + dur, a);\n assert_almost_eq!(b - dur, a);\n }\n }\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a.duration_from_earlier(a - second).unwrap(), second);\n assert_almost_eq!(a.duration_from_earlier(a + second).unwrap_err()\n .duration(), second);\n\n assert_almost_eq!(a - second + second, a);\n\n let eighty_years = second * 60 * 60 * 24 * 365 * 80;\n assert_almost_eq!(a - eighty_years + eighty_years, a);\n assert_almost_eq!(a - (eighty_years * 10) + (eighty_years * 10), a);\n }\n\n #[test]\n fn system_time_elapsed() {\n let a = SystemTime::now();\n drop(a.elapsed());\n }\n\n #[test]\n fn since_epoch() {\n let ts = SystemTime::now();\n let a = ts.duration_from_earlier(UNIX_EPOCH).unwrap();\n let b = ts.duration_from_earlier(UNIX_EPOCH - Duration::new(1, 0)).unwrap();\n assert!(b > a);\n assert_eq!(b - a, Duration::new(1, 0));\n\n \/\/ let's assume that we're all running computers later than 2000\n let thirty_years = Duration::new(1, 0) * 60 * 60 * 24 * 365 * 30;\n assert!(a > thirty_years);\n\n \/\/ let's assume that we're all running computers earlier than 2090.\n \/\/ Should give us ~70 years to fix this!\n let hundred_twenty_years = thirty_years * 4;\n assert!(a < hundred_twenty_years);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\n\nuse glob::glob;\n\nuse super::file::FileID;\nuse super::file::File;\nuse module::Module;\n\ntype BackendOperationResult = Result<(), StorageBackendError>;\n\npub struct StorageBackend<'a> {\n basepath: String,\n module: &'a Module,\n}\n\nimpl<'a> StorageBackend<'a> {\n\n fn new(basepath: String, module: &'a Module) -> StorageBackend<'a> {\n StorageBackend {\n basepath: basepath,\n module: module,\n }\n }\n\n fn getFileList(&self) -> Option<Vec<(Path, FileID)>> {\n let files: Vec<&Path> = glob(&self.basepath[..]);\n files.map(|path| (path, file_id_from_path(path))).unwrap_or(None)\n }\n\n fn createEmpty() -> FileID {\n }\n\n fn createFile() -> File {\n }\n\n fn writeFile(f: File) -> BackendOperationResult {\n }\n\n fn createFileWithContent(content: String) -> BackendOperationResult {\n }\n\n fn readFile(id: FileID) -> String {\n }\n\n \/\/ TODO: Meta files are not covered yet\n\n}\n\nfn file_id_from_path(p: &Path) -> String {\n String::from(\"\")\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub explanation: String, \/\/ A long, user friendly description\n pub dataDump: Option<String> \/\/ Data dump, if any\n}\n\nimpl StorageBackendError {\n fn new(action: &'static str,\n desc : &'static str,\n explan: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n explanation: String::from(explan),\n dataDump: data,\n }\n }\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\\n\\n{}\",\n self.action, self.desc, self.explanation)\n }\n}\n\n<commit_msg>Implement getFileList()<commit_after>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::{File, FileID};\nuse module::Module;\n\ntype BackendOperationResult = Result<(), StorageBackendError>;\n\npub struct StorageBackend<'a> {\n basepath: String,\n module: &'a Module,\n}\n\nimpl<'a> StorageBackend<'a> {\n\n fn new(basepath: String, module: &'a Module) -> StorageBackend<'a> {\n StorageBackend {\n basepath: basepath,\n module: module,\n }\n }\n\n fn getFileList(&self) -> Option<Vec<FileID>> {\n let list = glob(&self.basepath[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n v.push(file_id_from_path(path.as_path()));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n fn createEmpty() -> FileID {\n }\n\n fn createFile() -> File {\n }\n\n fn writeFile(f: File) -> BackendOperationResult {\n }\n\n fn createFileWithContent(content: String) -> BackendOperationResult {\n }\n\n fn readFile(id: FileID) -> String {\n }\n\n \/\/ TODO: Meta files are not covered yet\n\n}\n\nfn file_id_from_path(p: &Path) -> String {\n String::from(\"\")\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub explanation: String, \/\/ A long, user friendly description\n pub dataDump: Option<String> \/\/ Data dump, if any\n}\n\nimpl StorageBackendError {\n fn new(action: &'static str,\n desc : &'static str,\n explan: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n explanation: String::from(explan),\n dataDump: data,\n }\n }\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\\n\\n{}\",\n self.action, self.desc, self.explanation)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\nuse std::result::Result;\nuse std::path::{Path, PathBuf};\nuse std::convert::From;\nuse std::convert::Into;\n\n#[derive(Debug)]\n#[derive(Clone)]\n\/\/ #[derive(Display)]\npub enum FileIDType {\n UUID,\n}\n\n#[derive(Clone)]\npub struct FileID {\n id: Option<String>,\n id_type: FileIDType,\n}\n\nimpl FileID {\n\n pub fn new(id_type: FileIDType, id: String) -> FileID {\n FileID {\n id: Some(id),\n id_type: id_type,\n }\n }\n\n pub fn is_valid(&self) -> bool {\n self.id.is_some()\n }\n\n}\n\nimpl Debug for FileID {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"FileID[{:?}]: {:?}\",\n self.id_type,\n self.id);\n Ok(())\n }\n\n}\n\nimpl Display for FileID {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"FileID[{:?}]: {:?}\",\n self.id_type,\n self.id);\n Ok(())\n }\n\n}\n\nimpl Into<String> for FileID {\n\n fn into(self) -> String {\n if let Some(id) = self.id {\n id.clone()\n } else {\n String::from(\"INVALID\")\n }\n }\n\n}\n\nimpl From<String> for FileID {\n\n fn from(s: String) -> FileID {\n unimplemented!()\n }\n\n}\n\nimpl<'a> From<&'a String> for FileID {\n\n fn from(s: &'a String) -> FileID {\n unimplemented!()\n }\n\n}\n\nimpl From<PathBuf> for FileID {\n\n fn from(s: PathBuf) -> FileID {\n unimplemented!()\n }\n\n}\n\nimpl<'a> From<&'a PathBuf> for FileID {\n\n fn from(s: &'a PathBuf) -> FileID {\n unimplemented!()\n }\n\n}\n\npub struct FileIDError {\n summary: String,\n descrip: String,\n}\n\nimpl FileIDError {\n\n pub fn new(s: String, d: String) -> FileIDError {\n FileIDError {\n summary: s,\n descrip: d,\n }\n }\n\n}\n\nimpl<'a> Error for FileIDError {\n\n fn description(&self) -> &str {\n &self.summary[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl<'a> Debug for FileIDError {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"FileIDError: '{}'\\n{}\", self.summary, self.descrip);\n Ok(())\n }\n\n}\n\nimpl<'a> Display for FileIDError {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"FileIDError: '{}'\", self.summary);\n Ok(())\n }\n\n}\n\npub type FileIDResult = Result<FileID, FileIDError>;\n\n#[cfg(test)]\nmod test {\n\n use super::{FileID, FileIDType};\n\n #[test]\n fn file_id_from_string() {\n setup_logger();\n\n let s1 = String::from(\"\/home\/user\/testmodule-UUID-some-id.imag\");\n let s2 = String::from(\"\/home\/user\/testmodule-UUID-some-id.extension.imag\");\n let s3 = String::from(\"\/home\/user\/testmodule-NOHASH-some-id.imag\");\n\n let id1 = FileID::from(s1);\n let id2 = FileID::from(s2);\n let id3 = FileID::from(s3);\n\n println!(\"Id 1 : {:?}\", id1);\n println!(\"Id 2 : {:?}\", id2);\n println!(\"Id 3 : {:?}\", id3);\n\n assert_eq!(FileIDType::UUID, id1.get_type());\n assert_eq!(FileIDType::UUID, id2.get_type());\n assert_eq!(FileIDType::NONE, id3.get_type());\n\n let f1 : String = id1.into();\n let f2 : String = id2.into();\n let f3 : String = id3.into();\n\n assert_eq!(String::from(\"some-id\"), f1);\n assert_eq!(String::from(\"some-id\"), f2);\n assert_eq!(String::from(\"INVALID\"), f3);\n }\n\n fn setup_logger() {\n extern crate log;\n use log::{LogLevelFilter, set_logger};\n use runtime::ImagLogger;\n\n log::set_logger(|max_log_lvl| {\n let lvl = LogLevelFilter::Debug;\n max_log_lvl.set(lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()))\n });\n debug!(\"Init logger for test\");\n }\n\n}\n\n<commit_msg>Implement: FileID::from(&String), FileID::from(String)<commit_after>use std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\nuse std::result::Result;\nuse std::path::{Path, PathBuf};\nuse std::convert::From;\nuse std::convert::Into;\n\nuse regex::Regex;\n\n#[derive(Debug)]\n#[derive(Clone)]\n#[derive(PartialEq)]\n#[derive(Eq)]\n\/\/ #[derive(Display)]\npub enum FileIDType {\n NONE,\n UUID,\n}\n\n#[derive(Clone)]\npub struct FileID {\n id: Option<String>,\n id_type: FileIDType,\n}\n\nimpl FileID {\n\n pub fn new(id_type: FileIDType, id: String) -> FileID {\n FileID {\n id: Some(id),\n id_type: id_type,\n }\n }\n\n pub fn is_valid(&self) -> bool {\n self.id.is_some()\n }\n\n pub fn get_type(&self) -> FileIDType {\n self.id_type.clone()\n }\n\n}\n\nimpl Debug for FileID {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"FileID[{:?}]: {:?}\",\n self.id_type,\n self.id);\n Ok(())\n }\n\n}\n\nimpl Display for FileID {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"FileID[{:?}]: {:?}\",\n self.id_type,\n self.id);\n Ok(())\n }\n\n}\n\nimpl Into<String> for FileID {\n\n fn into(self) -> String {\n if let Some(id) = self.id {\n id.clone()\n } else {\n String::from(\"INVALID\")\n }\n }\n\n}\n\nimpl From<String> for FileID {\n\n fn from(s: String) -> FileID {\n FileID::from(&s)\n }\n\n}\n\nimpl<'a> From<&'a String> for FileID {\n\n fn from(string: &'a String) -> FileID {\n\n let regex = Regex::new(r\"([:alnum:]*)-([:upper:]*)-([A-Za-z0-9-_]*)\\.(.*)\").unwrap();\n let s = string.split(\"\/\").last().unwrap_or(\"\");\n\n debug!(\"Regex build: {:?}\", regex);\n debug!(\"Matching string: '{}'\", s);\n regex.captures(s).and_then(|capts| {\n \/\/ first one is the whole string, index 1-N are the matches.\n if capts.len() != 5 {\n debug!(\"Matches, but not expected number of groups\");\n return None;\n }\n debug!(\"Matches: {}\", capts.len());\n\n let modname = capts.at(1).unwrap();\n let hashname = capts.at(2).unwrap();\n let mut hash = capts.at(3).unwrap();\n\n debug!(\"Destructure FilePath to ID:\");\n debug!(\" FilePath: {:?}\", s);\n debug!(\" Module Name: {:?}\", modname);\n debug!(\" Hash Name: {:?}\", hashname);\n debug!(\" Hash: {:?}\", hash);\n\n let idtype = select_id_type_from_str(hashname);\n match idtype {\n FileIDType::NONE => hash = \"INVALID\",\n _ => {},\n }\n\n Some(FileID::new(idtype, String::from(hash)))\n }).unwrap_or({\n debug!(\"Did not match\");\n FileID {\n id_type: FileIDType::NONE,\n id: None,\n }\n })\n }\n\n}\n\nimpl From<PathBuf> for FileID {\n\n fn from(s: PathBuf) -> FileID {\n unimplemented!()\n }\n\n}\n\nimpl<'a> From<&'a PathBuf> for FileID {\n\n fn from(s: &'a PathBuf) -> FileID {\n unimplemented!()\n }\n\n}\n\npub struct FileIDError {\n summary: String,\n descrip: String,\n}\n\nimpl FileIDError {\n\n pub fn new(s: String, d: String) -> FileIDError {\n FileIDError {\n summary: s,\n descrip: d,\n }\n }\n\n}\n\nimpl<'a> Error for FileIDError {\n\n fn description(&self) -> &str {\n &self.summary[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl<'a> Debug for FileIDError {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"FileIDError: '{}'\\n{}\", self.summary, self.descrip);\n Ok(())\n }\n\n}\n\nimpl<'a> Display for FileIDError {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"FileIDError: '{}'\", self.summary);\n Ok(())\n }\n\n}\n\nfn select_id_type_from_str(s: &str) -> FileIDType {\n match s {\n \"UUID\" => FileIDType::UUID,\n _ => FileIDType::NONE,\n }\n}\n\npub type FileIDResult = Result<FileID, FileIDError>;\n\n#[cfg(test)]\nmod test {\n\n use super::{FileID, FileIDType};\n\n #[test]\n fn file_id_from_string() {\n setup_logger();\n\n let s1 = String::from(\"\/home\/user\/testmodule-UUID-some-id.imag\");\n let s2 = String::from(\"\/home\/user\/testmodule-UUID-some-id.extension.imag\");\n let s3 = String::from(\"\/home\/user\/testmodule-NOHASH-some-id.imag\");\n\n let id1 = FileID::from(s1);\n let id2 = FileID::from(s2);\n let id3 = FileID::from(s3);\n\n println!(\"Id 1 : {:?}\", id1);\n println!(\"Id 2 : {:?}\", id2);\n println!(\"Id 3 : {:?}\", id3);\n\n assert_eq!(FileIDType::UUID, id1.get_type());\n assert_eq!(FileIDType::UUID, id2.get_type());\n assert_eq!(FileIDType::NONE, id3.get_type());\n\n let f1 : String = id1.into();\n let f2 : String = id2.into();\n let f3 : String = id3.into();\n\n assert_eq!(String::from(\"some-id\"), f1);\n assert_eq!(String::from(\"some-id\"), f2);\n assert_eq!(String::from(\"INVALID\"), f3);\n }\n\n fn setup_logger() {\n extern crate log;\n use log::{LogLevelFilter, set_logger};\n use runtime::ImagLogger;\n\n log::set_logger(|max_log_lvl| {\n let lvl = LogLevelFilter::Debug;\n max_log_lvl.set(lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()))\n });\n debug!(\"Init logger for test\");\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>EHCI works with touchscreen<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Get to the magical device descriptor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test for missing_doc in the external_doc test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add codegen test that makes sure PGO instrumentation is emitted as expected.<commit_after>\/\/ Test that `-Zpgo-gen` creates expected instrumentation artifacts in LLVM IR.\n\n\/\/ needs-profiler-support\n\/\/ compile-flags: -Z pgo-gen -Ccodegen-units=1\n\n\/\/ CHECK: @__llvm_profile_raw_version =\n\/\/ CHECK: @__profc_{{.*}}pgo_instrumentation{{.*}}some_function{{.*}} = private global\n\/\/ CHECK: @__profd_{{.*}}pgo_instrumentation{{.*}}some_function{{.*}} = private global\n\/\/ CHECK: @__profc_{{.*}}pgo_instrumentation{{.*}}main{{.*}} = private global\n\/\/ CHECK: @__profd_{{.*}}pgo_instrumentation{{.*}}main{{.*}} = private global\n\/\/ CHECK: @__llvm_profile_filename = {{.*}}\"default_%m.profraw\\00\"{{.*}}\n\n#[inline(never)]\nfn some_function() {\n\n}\n\nfn main() {\n some_function();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #10618 and #16382<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ issues #10618 and #16382\nstatic SIZE: int = 25;\n\nfn main() {\n let _a: [bool, ..1 as uint];\n let _b: [int, ..SIZE as uint] = [1, ..SIZE as uint];\n let _c: [bool, ..'\\n' as uint] = [true, ..'\\n' as uint];\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add details of renderer type and antialiasing on the OSD<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix item write pointer.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix strings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added string example<commit_after>fn main() {\n let mystr1: &'static str = \"This is a readonly string\";\n \n for c in mystr1.chars().rev() {\n print!(\"{}\", c);\n } \n \n println!(\"\");\n \n let mut mystr2 = String::new();\n for c in mystr1.chars() {\n mystr2.push(c);\n }\n \n println!(\"{}\", mystr2);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Introduce BlockLevelJob<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>embedding: address various review nitpicks for string_map<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adapt to new libimagstore::iter::Entries API<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a frame buffer for async reading.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Don't panic on read halfword from timer control reg<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test for check<commit_after>#[cfg(test)]\nmod tests {\n use std::process::Command;\n\n fn build_check() {\n Command::new(\"cargo\").arg(\"build\").status().unwrap_or_else(|e| {\n panic!(\"failed to execute process: {}\", e)\n });\n }\n\n #[test]\n fn it_outputs_empty_array() {\n build_check();\n\n let output = Command::new(\".\/target\/debug\/check\").output().unwrap_or_else(|e| {\n panic!(\"failed to execute process: {}\", e)\n });\n\n assert_eq!(String::from_utf8_lossy(&output.stdout), \"[]\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Script for printing expected attack damage table<commit_after>extern crate rand;\nextern crate calx_alg;\nextern crate world;\n\nuse std::io::Write;\nuse world::{roll, attack_damage};\n\nfn ev<F>(n: usize, f: F) -> f32\nwhere\n F: Fn(&mut rand::ThreadRng) -> f32,\n{\n let mut acc = 0.0;\n let mut rng = rand::thread_rng();\n for _ in 0..n {\n acc += f(&mut rng);\n }\n\n acc \/ n as f32\n}\n\nfn expected_dmg(advantage: i32) -> f32 {\n const REPEAT_ROLLS: usize = 1000000;\n\n ev(REPEAT_ROLLS, |rng| {\n let roll = roll(rng);\n let dmg = attack_damage(roll, advantage, 100);\n dmg as f32 \/ 100.0\n })\n}\n\nfn main() {\n print!(\" \");\n for one in 0..10 {\n print!(\" 0{}\", one);\n }\n println!(\"\");\n\n for tens in -3..10 {\n print!(\"{:>3}0 \", tens);\n for ones in 0..10 {\n let n = tens * 10 + ones;\n print!(\"{:.3} \", expected_dmg(n));\n ::std::io::stdout().flush();\n }\n println!(\"\");\n }\n let e = ev(1000000, |rng| {\n let roll = roll(rng);\n let dmg = attack_damage(roll, 0, 100);\n dmg as f32 \/ 100.0\n });\n println!(\"Hello, world!\");\n println!(\"Expected dmg: {}\", e);\n}\n<|endoftext|>"} {"text":"<commit_before>use std::result::Result as RResult;\nuse std::string::String;\n\npub use entry::Entry;\npub use error::StoreError;\npub use single_use_lock::SingleUseLock;\n\npub type Result<T> = RResult<T, StoreError>;\npub type LockedEntry = SingleUseLock<Entry>;\n\npub trait Store {\n fn create(&self, entry : Entry) -> Result<()>;\n fn retrieve(&self, id : String) -> Result<LockedEntry>;\n fn retrieve_copy(&self, id : String) -> Result<Entry>;\n fn update(&self, LockedEntry) -> Result<()>;\n fn delete(&self, id : String) -> Result<()>;\n}\n\n<commit_msg>Allow to update an item multiple times<commit_after>use std::result::Result as RResult;\nuse std::string::String;\n\npub use entry::Entry;\npub use error::StoreError;\npub use single_use_lock::SingleUseLock;\n\npub type Result<T> = RResult<T, StoreError>;\npub type LockedEntry = SingleUseLock<Entry>;\n\npub trait Store {\n fn create(&self, entry : Entry) -> Result<()>;\n fn retrieve(&self, id : String) -> Result<LockedEntry>;\n fn retrieve_copy(&self, id : String) -> Result<Entry>;\n fn update(&self, &LockedEntry) -> Result<()>;\n fn delete(&self, id : String) -> Result<()>;\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Cleaned up dice module<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The GLFW-RS Developers. For a full listing of the authors,\n\/\/ refer to the AUTHORS file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#[cfg(feature = \"glfw-sys\")]\n#[link(name = \"glfw3\", kind = \"static\")]\nextern {}\n\n#[cfg(not(feature = \"glfw-sys\"))]\n\/\/ leaving off `kind = static` allows for the specification of a dynamic library if desired\n#[link(name = \"glfw3\")]\nextern {}\n\n#[cfg(target_os=\"windows\")]\n#[link(name = \"opengl32\")]\n#[link(name = \"gdi32\")]\nextern {}\n\n#[cfg(target_os=\"linux\")]\n#[link(name = \"X11\")]\n#[link(name = \"GL\")]\n#[link(name = \"Xxf86vm\")]\n#[link(name = \"Xrandr\")]\n#[link(name = \"Xi\")]\n#[link(name = \"Xcursor\")]\n#[link(name = \"Xinerama\")]\nextern {}\n\n#[cfg(target_os=\"macos\")]\n#[link(name = \"Cocoa\", kind = \"framework\")]\n#[link(name = \"OpenGL\", kind = \"framework\")]\n#[link(name = \"IOKit\", kind = \"framework\")]\n#[link(name = \"CoreFoundation\", kind = \"framework\")]\n#[link(name = \"QuartzCore\", kind = \"framework\")]\nextern {}\n<commit_msg>Also link to libraries on other systems<commit_after>\/\/ Copyright 2014 The GLFW-RS Developers. For a full listing of the authors,\n\/\/ refer to the AUTHORS file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#[cfg(feature = \"glfw-sys\")]\n#[link(name = \"glfw3\", kind = \"static\")]\nextern {}\n\n#[cfg(not(feature = \"glfw-sys\"))]\n\/\/ leaving off `kind = static` allows for the specification of a dynamic library if desired\n#[link(name = \"glfw3\")]\nextern {}\n\n#[cfg(target_os=\"windows\")]\n#[link(name = \"opengl32\")]\n#[link(name = \"gdi32\")]\nextern {}\n\n#[cfg(any(target_os=\"linux\", target_os=\"freebsd\", target_os=\"dragonfly\"))]\n#[link(name = \"X11\")]\n#[link(name = \"GL\")]\n#[link(name = \"Xxf86vm\")]\n#[link(name = \"Xrandr\")]\n#[link(name = \"Xi\")]\n#[link(name = \"Xcursor\")]\n#[link(name = \"Xinerama\")]\nextern {}\n\n#[cfg(target_os=\"macos\")]\n#[link(name = \"Cocoa\", kind = \"framework\")]\n#[link(name = \"OpenGL\", kind = \"framework\")]\n#[link(name = \"IOKit\", kind = \"framework\")]\n#[link(name = \"CoreFoundation\", kind = \"framework\")]\n#[link(name = \"QuartzCore\", kind = \"framework\")]\nextern {}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n#![feature(alloc)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(core)]\n#![feature(exit_status)]\n#![feature(hash)]\n#![feature(int_uint)]\n#![feature(io)]\n#![feature(old_io)]\n#![feature(optin_builtin_traits)]\n#![feature(page_size)]\n#![feature(path)]\n#![feature(path_ext)]\n#![feature(plugin)]\n#![feature(rustc_private)]\n#![feature(std_misc)]\n#![feature(str_words)]\n#![feature(unicode)]\n#![feature(unsafe_destructor)]\n\n#![plugin(string_cache_plugin)]\n\n#[macro_use] extern crate log;\n\nextern crate alloc;\n#[macro_use] extern crate bitflags;\nextern crate collections;\nextern crate cssparser;\nextern crate geom;\nextern crate getopts;\nextern crate layers;\nextern crate libc;\n#[no_link] #[macro_use] extern crate cssparser;\nextern crate rand;\n#[cfg(target_os=\"linux\")]\nextern crate regex;\nextern crate \"rustc-serialize\" as rustc_serialize;\n#[cfg(target_os=\"macos\")]\nextern crate task_info;\nextern crate \"time\" as std_time;\nextern crate text_writer;\nextern crate selectors;\nextern crate string_cache;\nextern crate unicode;\nextern crate url;\n\nextern crate lazy_static;\n\npub use selectors::smallvec;\n\nuse std::sync::Arc;\n\npub mod cache;\npub mod cursor;\npub mod debug_utils;\npub mod deque;\npub mod linked_list;\npub mod fnv;\npub mod geometry;\npub mod logical_geometry;\npub mod memory;\npub mod namespace;\npub mod opts;\npub mod persistent_list;\npub mod range;\npub mod resource_files;\npub mod str;\npub mod task;\npub mod tid;\npub mod time;\npub mod taskpool;\npub mod task_state;\npub mod vec;\npub mod workqueue;\n\npub fn breakpoint() {\n unsafe { ::std::intrinsics::breakpoint() };\n}\n\n\/\/ Workaround for lack of `ptr_eq` on Arcs...\n#[inline]\npub fn arc_ptr_eq<T: 'static + Send + Sync>(a: &Arc<T>, b: &Arc<T>) -> bool {\n let a: &T = &**a;\n let b: &T = &**b;\n (a as *const T) == (b as *const T)\n}\n<commit_msg>auto merge of #5326 : Adenilson\/servo\/squashingWarnings03, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n#![feature(alloc)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(core)]\n#![feature(exit_status)]\n#![feature(hash)]\n#![feature(int_uint)]\n#![feature(io)]\n#![feature(old_io)]\n#![feature(optin_builtin_traits)]\n#![cfg_attr(target_os = \"linux\", feature(page_size, str_words))]\n#![feature(path)]\n#![feature(path_ext)]\n#![feature(plugin)]\n#![feature(rustc_private)]\n#![feature(std_misc)]\n#![feature(unicode)]\n#![feature(unsafe_destructor)]\n\n#![plugin(string_cache_plugin)]\n\n#[macro_use] extern crate log;\n\nextern crate alloc;\n#[macro_use] extern crate bitflags;\nextern crate collections;\nextern crate cssparser;\nextern crate geom;\nextern crate getopts;\nextern crate layers;\nextern crate libc;\n#[no_link] #[macro_use] extern crate cssparser;\nextern crate rand;\n#[cfg(target_os=\"linux\")]\nextern crate regex;\nextern crate \"rustc-serialize\" as rustc_serialize;\n#[cfg(target_os=\"macos\")]\nextern crate task_info;\nextern crate \"time\" as std_time;\nextern crate text_writer;\nextern crate selectors;\nextern crate string_cache;\nextern crate unicode;\nextern crate url;\n\nextern crate lazy_static;\n\npub use selectors::smallvec;\n\nuse std::sync::Arc;\n\npub mod cache;\npub mod cursor;\npub mod debug_utils;\npub mod deque;\npub mod linked_list;\npub mod fnv;\npub mod geometry;\npub mod logical_geometry;\npub mod memory;\npub mod namespace;\npub mod opts;\npub mod persistent_list;\npub mod range;\npub mod resource_files;\npub mod str;\npub mod task;\npub mod tid;\npub mod time;\npub mod taskpool;\npub mod task_state;\npub mod vec;\npub mod workqueue;\n\npub fn breakpoint() {\n unsafe { ::std::intrinsics::breakpoint() };\n}\n\n\/\/ Workaround for lack of `ptr_eq` on Arcs...\n#[inline]\npub fn arc_ptr_eq<T: 'static + Send + Sync>(a: &Arc<T>, b: &Arc<T>) -> bool {\n let a: &T = &**a;\n let b: &T = &**b;\n (a as *const T) == (b as *const T)\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Data structure measurement.\n\nuse libc::{c_void, size_t};\nuse std::cell::RefCell;\nuse std::collections::LinkedList;\nuse std::mem::transmute;\nuse std::sync::Arc;\n\n\nuse azure::azure_hl::Color;\nuse cursor::Cursor;\nuse euclid::{Point2D, Rect, SideOffsets2D, Size2D, Matrix2D, Matrix4};\nuse geometry::Au;\nuse range::Range;\n\nextern {\n \/\/ Get the size of a heap block.\n \/\/\n \/\/ Ideally Rust would expose a function like this in std::rt::heap, which would avoid the\n \/\/ jemalloc dependence.\n \/\/\n \/\/ The C prototype is `je_malloc_usable_size(JEMALLOC_USABLE_SIZE_CONST void *ptr)`. On some\n \/\/ platforms `JEMALLOC_USABLE_SIZE_CONST` is `const` and on some it is empty. But in practice\n \/\/ this function doesn't modify the contents of the block that `ptr` points to, so we use\n \/\/ `*const c_void` here.\n fn je_malloc_usable_size(ptr: *const c_void) -> size_t;\n}\n\n\/\/ A wrapper for je_malloc_usable_size that handles `EMPTY` and returns `usize`.\npub fn heap_size_of(ptr: *const c_void) -> usize {\n if ptr == ::std::rt::heap::EMPTY as *const c_void {\n 0\n } else {\n unsafe { je_malloc_usable_size(ptr) as usize }\n }\n}\n\n\/\/ The simplest trait for measuring the size of heap data structures. More complex traits that\n\/\/ return multiple measurements -- e.g. measure text separately from images -- are also possible,\n\/\/ and should be used when appropriate.\n\/\/\npub trait HeapSizeOf {\n \/\/\/ Measure the size of any heap-allocated structures that hang off this value, but not the\n \/\/\/ space taken up by the value itself (i.e. what size_of::<T> measures, more or less); that\n \/\/\/ space is handled by the implementation of HeapSizeOf for Box<T> below.\n fn heap_size_of_children(&self) -> usize;\n}\n\n\/\/ There are two possible ways to measure the size of `self` when it's on the heap: compute it\n\/\/ (with `::std::rt::heap::usable_size(::std::mem::size_of::<T>(), 0)`) or measure it directly\n\/\/ using the heap allocator (with `heap_size_of`). We do the latter, for the following reasons.\n\/\/\n\/\/ * The heap allocator is the true authority for the sizes of heap blocks; its measurement is\n\/\/ guaranteed to be correct. In comparison, size computations are error-prone. (For example, the\n\/\/ `rt::heap::usable_size` function used in some of Rust's non-default allocator implementations\n\/\/ underestimate the true usable size of heap blocks, which is safe in general but would cause\n\/\/ under-measurement here.)\n\/\/\n\/\/ * If we measure something that isn't a heap block, we'll get a crash. This keeps us honest,\n\/\/ which is important because unsafe code is involved and this can be gotten wrong.\n\/\/\n\/\/ However, in the best case, the two approaches should give the same results.\n\/\/\nimpl<T: HeapSizeOf> HeapSizeOf for Box<T> {\n fn heap_size_of_children(&self) -> usize {\n \/\/ Measure size of `self`.\n heap_size_of(&**self as *const T as *const c_void) + (**self).heap_size_of_children()\n }\n}\n\nimpl HeapSizeOf for String {\n fn heap_size_of_children(&self) -> usize {\n heap_size_of(self.as_ptr() as *const c_void)\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for Option<T> {\n fn heap_size_of_children(&self) -> usize {\n match *self {\n None => 0,\n Some(ref x) => x.heap_size_of_children()\n }\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for Arc<T> {\n fn heap_size_of_children(&self) -> usize {\n (**self).heap_size_of_children()\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for RefCell<T> {\n fn heap_size_of_children(&self) -> usize {\n self.borrow().heap_size_of_children()\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for Vec<T> {\n fn heap_size_of_children(&self) -> usize {\n heap_size_of(self.as_ptr() as *const c_void) +\n self.iter().fold(0, |n, elem| n + elem.heap_size_of_children())\n }\n}\n\n\/\/ FIXME(njn): We can't implement HeapSizeOf accurately for LinkedList because it requires access\n\/\/ to the private Node type. Eventually we'll want to add HeapSizeOf (or equivalent) to Rust\n\/\/ itself. In the meantime, we use the dirty hack of transmuting LinkedList into an identical type\n\/\/ (LinkedList2) and measuring that.\nimpl<T: HeapSizeOf> HeapSizeOf for LinkedList<T> {\n fn heap_size_of_children(&self) -> usize {\n let list2: &LinkedList2<T> = unsafe { transmute(self) };\n list2.heap_size_of_children()\n }\n}\n\nstruct LinkedList2<T> {\n _length: usize,\n list_head: Link<T>,\n _list_tail: Rawlink<Node<T>>,\n}\n\ntype Link<T> = Option<Box<Node<T>>>;\n\nstruct Rawlink<T> {\n _p: *mut T,\n}\n\nstruct Node<T> {\n next: Link<T>,\n _prev: Rawlink<Node<T>>,\n value: T,\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for Node<T> {\n \/\/ Unlike most heap_size_of_children() functions, this one does *not* measure descendents.\n \/\/ Instead, LinkedList2<T>::heap_size_of_children() handles that, so that it can use iteration\n \/\/ instead of recursion, which avoids potentially blowing the stack.\n fn heap_size_of_children(&self) -> usize {\n self.value.heap_size_of_children()\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for LinkedList2<T> {\n fn heap_size_of_children(&self) -> usize {\n let mut size = 0;\n let mut curr: &Link<T> = &self.list_head;\n while curr.is_some() {\n size += (*curr).heap_size_of_children();\n curr = &curr.as_ref().unwrap().next;\n }\n size\n }\n}\n\n\/\/ This is a basic sanity check. If the representation of LinkedList changes such that it becomes a\n\/\/ different size to LinkedList2, this will fail at compile-time.\n#[allow(dead_code)]\nunsafe fn linked_list2_check() {\n transmute::<LinkedList<i32>, LinkedList2<i32>>(panic!());\n}\n\n\/\/ Currently, types that implement the Drop type are larger than those that don't. Because\n\/\/ LinkedList implements Drop, LinkedList2 must also so that linked_list2_check() doesn't fail.\nimpl<T> Drop for LinkedList2<T> {\n fn drop(&mut self) {}\n}\n\n\/\/\/ For use on types defined in external crates\n\/\/\/ with known heap sizes.\n#[macro_export]\nmacro_rules! known_heap_size(\n ($size:expr, $($ty:ident),+) => (\n $(\n impl $crate::mem::HeapSizeOf for $ty {\n #[inline(always)]\n fn heap_size_of_children(&self) -> usize {\n $size\n }\n }\n )+\n );\n ($size: expr, $($ty:ident<$($gen:ident),+>),+) => (\n $(\n impl<$($gen: $crate::mem::HeapSizeOf),+> $crate::mem::HeapSizeOf for $ty<$($gen),+> {\n #[inline(always)]\n fn heap_size_of_children(&self) -> usize {\n $size\n }\n }\n )+\n );\n);\n\n\nknown_heap_size!(0, u8, u16, u32, u64, usize);\nknown_heap_size!(0, i8, i16, i32, i64, isize);\nknown_heap_size!(0, bool, f32, f64);\n\nknown_heap_size!(0, Rect<T>, Point2D<T>, Size2D<T>, Matrix2D<T>, SideOffsets2D<T>);\n\nknown_heap_size!(0, Au, Color, Cursor, Matrix4);\nknown_heap_size!(0, Range<T>);\n<commit_msg>Auto merge of #6932 - boghison:memrs, r=SimonSapin<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Data structure measurement.\n\nuse libc::{c_void, size_t};\nuse std::cell::RefCell;\nuse std::collections::LinkedList;\nuse std::mem::transmute;\nuse std::sync::Arc;\n\n\nuse azure::azure_hl::Color;\nuse cursor::Cursor;\nuse euclid::{Point2D, Rect, SideOffsets2D, Size2D, Matrix2D, Matrix4};\nuse geometry::Au;\nuse range::Range;\nuse url;\n\nextern {\n \/\/ Get the size of a heap block.\n \/\/\n \/\/ Ideally Rust would expose a function like this in std::rt::heap, which would avoid the\n \/\/ jemalloc dependence.\n \/\/\n \/\/ The C prototype is `je_malloc_usable_size(JEMALLOC_USABLE_SIZE_CONST void *ptr)`. On some\n \/\/ platforms `JEMALLOC_USABLE_SIZE_CONST` is `const` and on some it is empty. But in practice\n \/\/ this function doesn't modify the contents of the block that `ptr` points to, so we use\n \/\/ `*const c_void` here.\n fn je_malloc_usable_size(ptr: *const c_void) -> size_t;\n}\n\n\/\/ A wrapper for je_malloc_usable_size that handles `EMPTY` and returns `usize`.\npub fn heap_size_of(ptr: *const c_void) -> usize {\n if ptr == ::std::rt::heap::EMPTY as *const c_void {\n 0\n } else {\n unsafe { je_malloc_usable_size(ptr) as usize }\n }\n}\n\n\/\/ The simplest trait for measuring the size of heap data structures. More complex traits that\n\/\/ return multiple measurements -- e.g. measure text separately from images -- are also possible,\n\/\/ and should be used when appropriate.\n\/\/\npub trait HeapSizeOf {\n \/\/\/ Measure the size of any heap-allocated structures that hang off this value, but not the\n \/\/\/ space taken up by the value itself (i.e. what size_of::<T> measures, more or less); that\n \/\/\/ space is handled by the implementation of HeapSizeOf for Box<T> below.\n fn heap_size_of_children(&self) -> usize;\n}\n\n\/\/ There are two possible ways to measure the size of `self` when it's on the heap: compute it\n\/\/ (with `::std::rt::heap::usable_size(::std::mem::size_of::<T>(), 0)`) or measure it directly\n\/\/ using the heap allocator (with `heap_size_of`). We do the latter, for the following reasons.\n\/\/\n\/\/ * The heap allocator is the true authority for the sizes of heap blocks; its measurement is\n\/\/ guaranteed to be correct. In comparison, size computations are error-prone. (For example, the\n\/\/ `rt::heap::usable_size` function used in some of Rust's non-default allocator implementations\n\/\/ underestimate the true usable size of heap blocks, which is safe in general but would cause\n\/\/ under-measurement here.)\n\/\/\n\/\/ * If we measure something that isn't a heap block, we'll get a crash. This keeps us honest,\n\/\/ which is important because unsafe code is involved and this can be gotten wrong.\n\/\/\n\/\/ However, in the best case, the two approaches should give the same results.\n\/\/\nimpl<T: HeapSizeOf> HeapSizeOf for Box<T> {\n fn heap_size_of_children(&self) -> usize {\n \/\/ Measure size of `self`.\n heap_size_of(&**self as *const T as *const c_void) + (**self).heap_size_of_children()\n }\n}\n\nimpl HeapSizeOf for String {\n fn heap_size_of_children(&self) -> usize {\n heap_size_of(self.as_ptr() as *const c_void)\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for Option<T> {\n fn heap_size_of_children(&self) -> usize {\n match *self {\n None => 0,\n Some(ref x) => x.heap_size_of_children()\n }\n }\n}\n\nimpl HeapSizeOf for url::Url {\n fn heap_size_of_children(&self) -> usize {\n let &url::Url { ref scheme, ref scheme_data, ref query, ref fragment } = self;\n scheme.heap_size_of_children() +\n scheme_data.heap_size_of_children() +\n query.heap_size_of_children() +\n fragment.heap_size_of_children()\n }\n}\n\nimpl HeapSizeOf for url::SchemeData {\n fn heap_size_of_children(&self) -> usize {\n match self {\n &url::SchemeData::Relative(ref data) => data.heap_size_of_children(),\n &url::SchemeData::NonRelative(ref str) => str.heap_size_of_children()\n }\n }\n}\n\nimpl HeapSizeOf for url::RelativeSchemeData {\n fn heap_size_of_children(&self) -> usize {\n let &url::RelativeSchemeData { ref username, ref password, ref host,\n ref port, ref default_port, ref path } = self;\n username.heap_size_of_children() +\n password.heap_size_of_children() +\n host.heap_size_of_children() +\n port.heap_size_of_children() +\n default_port.heap_size_of_children() +\n path.heap_size_of_children()\n }\n}\n\nimpl HeapSizeOf for url::Host {\n fn heap_size_of_children(&self) -> usize {\n match self {\n &url::Host::Domain(ref str) => str.heap_size_of_children(),\n &url::Host::Ipv6(_) => 0\n }\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for Arc<T> {\n fn heap_size_of_children(&self) -> usize {\n (**self).heap_size_of_children()\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for RefCell<T> {\n fn heap_size_of_children(&self) -> usize {\n self.borrow().heap_size_of_children()\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for Vec<T> {\n fn heap_size_of_children(&self) -> usize {\n heap_size_of(self.as_ptr() as *const c_void) +\n self.iter().fold(0, |n, elem| n + elem.heap_size_of_children())\n }\n}\n\n\/\/ FIXME(njn): We can't implement HeapSizeOf accurately for LinkedList because it requires access\n\/\/ to the private Node type. Eventually we'll want to add HeapSizeOf (or equivalent) to Rust\n\/\/ itself. In the meantime, we use the dirty hack of transmuting LinkedList into an identical type\n\/\/ (LinkedList2) and measuring that.\nimpl<T: HeapSizeOf> HeapSizeOf for LinkedList<T> {\n fn heap_size_of_children(&self) -> usize {\n let list2: &LinkedList2<T> = unsafe { transmute(self) };\n list2.heap_size_of_children()\n }\n}\n\nstruct LinkedList2<T> {\n _length: usize,\n list_head: Link<T>,\n _list_tail: Rawlink<Node<T>>,\n}\n\ntype Link<T> = Option<Box<Node<T>>>;\n\nstruct Rawlink<T> {\n _p: *mut T,\n}\n\nstruct Node<T> {\n next: Link<T>,\n _prev: Rawlink<Node<T>>,\n value: T,\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for Node<T> {\n \/\/ Unlike most heap_size_of_children() functions, this one does *not* measure descendents.\n \/\/ Instead, LinkedList2<T>::heap_size_of_children() handles that, so that it can use iteration\n \/\/ instead of recursion, which avoids potentially blowing the stack.\n fn heap_size_of_children(&self) -> usize {\n self.value.heap_size_of_children()\n }\n}\n\nimpl<T: HeapSizeOf> HeapSizeOf for LinkedList2<T> {\n fn heap_size_of_children(&self) -> usize {\n let mut size = 0;\n let mut curr: &Link<T> = &self.list_head;\n while curr.is_some() {\n size += (*curr).heap_size_of_children();\n curr = &curr.as_ref().unwrap().next;\n }\n size\n }\n}\n\n\/\/ This is a basic sanity check. If the representation of LinkedList changes such that it becomes a\n\/\/ different size to LinkedList2, this will fail at compile-time.\n#[allow(dead_code)]\nunsafe fn linked_list2_check() {\n transmute::<LinkedList<i32>, LinkedList2<i32>>(panic!());\n}\n\n\/\/ Currently, types that implement the Drop type are larger than those that don't. Because\n\/\/ LinkedList implements Drop, LinkedList2 must also so that linked_list2_check() doesn't fail.\nimpl<T> Drop for LinkedList2<T> {\n fn drop(&mut self) {}\n}\n\n\/\/\/ For use on types defined in external crates\n\/\/\/ with known heap sizes.\n#[macro_export]\nmacro_rules! known_heap_size(\n ($size:expr, $($ty:ident),+) => (\n $(\n impl $crate::mem::HeapSizeOf for $ty {\n #[inline(always)]\n fn heap_size_of_children(&self) -> usize {\n $size\n }\n }\n )+\n );\n ($size: expr, $($ty:ident<$($gen:ident),+>),+) => (\n $(\n impl<$($gen: $crate::mem::HeapSizeOf),+> $crate::mem::HeapSizeOf for $ty<$($gen),+> {\n #[inline(always)]\n fn heap_size_of_children(&self) -> usize {\n $size\n }\n }\n )+\n );\n);\n\n\nknown_heap_size!(0, u8, u16, u32, u64, usize);\nknown_heap_size!(0, i8, i16, i32, i64, isize);\nknown_heap_size!(0, bool, f32, f64);\n\nknown_heap_size!(0, Rect<T>, Point2D<T>, Size2D<T>, Matrix2D<T>, SideOffsets2D<T>);\n\nknown_heap_size!(0, Au, Color, Cursor, Matrix4);\nknown_heap_size!(0, Range<T>);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Expose HSalsa20 as a lightweight oneshot hash function.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Invert pixel draw loop<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>languages\/rust\/rust-book\/3-chapter\/25-code\/multiple_phrases\/src\/english\/greetings.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>happy rs<commit_after>#![feature(core)]\n\nfn sumsqd(mut n: i32) -> i32 {\n let mut sq = 0;\n while n > 0 {\n let d = n % 10;\n sq += d*d;\n n \/= 10\n }\n sq\n}\n\nuse std::num::Int;\nfn cycle<T: Int>(a: T, f: fn(T) -> T) -> T {\n let mut t = a;\n let mut h = f(a);\n\n while t != h {\n t = f(t);\n h = f(f(h))\n }\n t\n}\n\nfn ishappy(n: i32) -> bool {\n cycle(n, sumsqd) == 1\n}\n\nfn main() {\n let happy = std::iter::count(1, 1)\n .filter(|&n| ishappy(n))\n .take(8)\n .collect::<Vec<i32>>();\n\n println!(\"{:?}\", happy)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Compute timer stats<commit_after><|endoftext|>"} {"text":"<commit_before>use std::io::{self, Read, Write};\n\nuse {Poll, Task};\nuse stream::Stream;\nuse io::Ready;\n\npub struct ReadyTracker<S> {\n inner: S,\n read_ready: bool,\n write_ready: bool,\n}\n\nimpl<S> ReadyTracker<S>\n where S: Stream<Item=Ready, Error=io::Error>,\n{\n pub fn new(s: S) -> ReadyTracker<S> {\n ReadyTracker {\n inner: s,\n read_ready: false,\n write_ready: false,\n }\n }\n}\n\nimpl<S> ReadyTracker<S> {\n pub fn maybe_read_ready(&self) -> bool {\n self.read_ready\n }\n\n pub fn maybe_write_ready(&self) -> bool {\n self.read_ready\n }\n}\n\nimpl<S> Stream for ReadyTracker<S>\n where S: Stream<Item=Ready, Error=io::Error>,\n{\n type Item = Ready;\n type Error = io::Error;\n\n fn poll(&mut self, task: &mut Task) -> Poll<Option<Ready>, io::Error> {\n match self.inner.poll(task) {\n Poll::Ok(Some(ready)) => {\n self.read_ready = self.read_ready || ready.is_read();\n self.write_ready = self.write_ready || ready.is_write();\n Poll::Ok(Some(ready))\n }\n other => other,\n }\n }\n\n fn schedule(&mut self, task: &mut Task) {\n self.inner.schedule(task)\n }\n}\n\nfn is_wouldblock<T>(res: &io::Result<T>) -> bool {\n match *res {\n Ok(_) => false,\n Err(ref e) => e.kind() == io::ErrorKind::WouldBlock,\n }\n}\n\nimpl<S: Read> Read for ReadyTracker<S> {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n let res = self.inner.read(buf);\n if is_wouldblock(&res) {\n debug!(\"read no longer ready\");\n self.read_ready = false;\n }\n return res\n }\n}\n\nimpl<S: Write> Write for ReadyTracker<S> {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n let res = self.inner.write(buf);\n if is_wouldblock(&res) {\n debug!(\"write no longer ready\");\n self.write_ready = false;\n }\n return res\n }\n\n fn flush(&mut self) -> io::Result<()> {\n let res = self.inner.flush();\n if is_wouldblock(&res) {\n debug!(\"write no longer ready\");\n self.write_ready = false;\n }\n return res\n }\n}\n\n<commit_msg>Fix a copy\/paste typo<commit_after>use std::io::{self, Read, Write};\n\nuse {Poll, Task};\nuse stream::Stream;\nuse io::Ready;\n\npub struct ReadyTracker<S> {\n inner: S,\n read_ready: bool,\n write_ready: bool,\n}\n\nimpl<S> ReadyTracker<S>\n where S: Stream<Item=Ready, Error=io::Error>,\n{\n pub fn new(s: S) -> ReadyTracker<S> {\n ReadyTracker {\n inner: s,\n read_ready: false,\n write_ready: false,\n }\n }\n}\n\nimpl<S> ReadyTracker<S> {\n pub fn maybe_read_ready(&self) -> bool {\n self.read_ready\n }\n\n pub fn maybe_write_ready(&self) -> bool {\n self.write_ready\n }\n}\n\nimpl<S> Stream for ReadyTracker<S>\n where S: Stream<Item=Ready, Error=io::Error>,\n{\n type Item = Ready;\n type Error = io::Error;\n\n fn poll(&mut self, task: &mut Task) -> Poll<Option<Ready>, io::Error> {\n match self.inner.poll(task) {\n Poll::Ok(Some(ready)) => {\n self.read_ready = self.read_ready || ready.is_read();\n self.write_ready = self.write_ready || ready.is_write();\n Poll::Ok(Some(ready))\n }\n other => other,\n }\n }\n\n fn schedule(&mut self, task: &mut Task) {\n self.inner.schedule(task)\n }\n}\n\nfn is_wouldblock<T>(res: &io::Result<T>) -> bool {\n match *res {\n Ok(_) => false,\n Err(ref e) => e.kind() == io::ErrorKind::WouldBlock,\n }\n}\n\nimpl<S: Read> Read for ReadyTracker<S> {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n let res = self.inner.read(buf);\n if is_wouldblock(&res) {\n debug!(\"read no longer ready\");\n self.read_ready = false;\n }\n return res\n }\n}\n\nimpl<S: Write> Write for ReadyTracker<S> {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n let res = self.inner.write(buf);\n if is_wouldblock(&res) {\n debug!(\"write no longer ready\");\n self.write_ready = false;\n }\n return res\n }\n\n fn flush(&mut self) -> io::Result<()> {\n let res = self.inner.flush();\n if is_wouldblock(&res) {\n debug!(\"write no longer ready\");\n self.write_ready = false;\n }\n return res\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ @has manual_impl\/trait.T.html\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait a_method definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait b_method definition.'\n\/\/\/ Docs associated with the trait definition.\npub trait T {\n \/\/\/ Docs associated with the trait a_method definition.\n fn a_method(&self) -> usize;\n\n \/\/\/ Docs associated with the trait b_method definition.\n fn b_method(&self) -> usize {\n self.a_method()\n }\n\n \/\/\/ Docs associated with the trait c_method definition.\n \/\/\/\n \/\/\/ There is another line\n fn c_method(&self) -> usize {\n self.a_method()\n }\n}\n\n\/\/ @has manual_impl\/struct.S1.html '\/\/*[@class=\"trait\"]' 'T'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S1 trait implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S1 trait a_method implementation.'\n\/\/ @!has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait a_method definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait b_method definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait b_method definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait c_method definition.'\n\/\/ @!has - '\/\/*[@class=\"docblock\"]' 'There is another line'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Read more'\npub struct S1(usize);\n\n\/\/\/ Docs associated with the S1 trait implementation.\nimpl T for S1 {\n \/\/\/ Docs associated with the S1 trait a_method implementation.\n fn a_method(&self) -> usize {\n self.0\n }\n}\n\n\/\/ @has manual_impl\/struct.S2.html '\/\/*[@class=\"trait\"]' 'T'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S2 trait implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S2 trait a_method implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S2 trait c_method implementation.'\n\/\/ @!has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait a_method definition.'\n\/\/ @!has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait c_method definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait b_method definition.'\npub struct S2(usize);\n\n\/\/\/ Docs associated with the S2 trait implementation.\nimpl T for S2 {\n \/\/\/ Docs associated with the S2 trait a_method implementation.\n fn a_method(&self) -> usize {\n self.0\n }\n\n \/\/\/ Docs associated with the S2 trait c_method implementation.\n fn c_method(&self) -> usize {\n 5\n }\n}\n\n\/\/ @has manual_impl\/struct.S3.html '\/\/*[@class=\"trait\"]' 'T'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S3 trait implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S3 trait b_method implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait a_method definition.'\npub struct S3(usize);\n\n\/\/\/ Docs associated with the S3 trait implementation.\nimpl T for S3 {\n fn a_method(&self) -> usize {\n self.0\n }\n\n \/\/\/ Docs associated with the S3 trait b_method implementation.\n fn b_method(&self) -> usize {\n 5\n }\n}\n<commit_msg>Remove duplicate test line<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ @has manual_impl\/trait.T.html\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait a_method definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait b_method definition.'\n\/\/\/ Docs associated with the trait definition.\npub trait T {\n \/\/\/ Docs associated with the trait a_method definition.\n fn a_method(&self) -> usize;\n\n \/\/\/ Docs associated with the trait b_method definition.\n fn b_method(&self) -> usize {\n self.a_method()\n }\n\n \/\/\/ Docs associated with the trait c_method definition.\n \/\/\/\n \/\/\/ There is another line\n fn c_method(&self) -> usize {\n self.a_method()\n }\n}\n\n\/\/ @has manual_impl\/struct.S1.html '\/\/*[@class=\"trait\"]' 'T'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S1 trait implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S1 trait a_method implementation.'\n\/\/ @!has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait a_method definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait b_method definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait c_method definition.'\n\/\/ @!has - '\/\/*[@class=\"docblock\"]' 'There is another line'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Read more'\npub struct S1(usize);\n\n\/\/\/ Docs associated with the S1 trait implementation.\nimpl T for S1 {\n \/\/\/ Docs associated with the S1 trait a_method implementation.\n fn a_method(&self) -> usize {\n self.0\n }\n}\n\n\/\/ @has manual_impl\/struct.S2.html '\/\/*[@class=\"trait\"]' 'T'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S2 trait implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S2 trait a_method implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S2 trait c_method implementation.'\n\/\/ @!has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait a_method definition.'\n\/\/ @!has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait c_method definition.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait b_method definition.'\npub struct S2(usize);\n\n\/\/\/ Docs associated with the S2 trait implementation.\nimpl T for S2 {\n \/\/\/ Docs associated with the S2 trait a_method implementation.\n fn a_method(&self) -> usize {\n self.0\n }\n\n \/\/\/ Docs associated with the S2 trait c_method implementation.\n fn c_method(&self) -> usize {\n 5\n }\n}\n\n\/\/ @has manual_impl\/struct.S3.html '\/\/*[@class=\"trait\"]' 'T'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S3 trait implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the S3 trait b_method implementation.'\n\/\/ @has - '\/\/*[@class=\"docblock\"]' 'Docs associated with the trait a_method definition.'\npub struct S3(usize);\n\n\/\/\/ Docs associated with the S3 trait implementation.\nimpl T for S3 {\n fn a_method(&self) -> usize {\n self.0\n }\n\n \/\/\/ Docs associated with the S3 trait b_method implementation.\n fn b_method(&self) -> usize {\n 5\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Networking primitives for TCP\/UDP communication.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse prelude::v1::*;\n\nuse io::{self, Error, ErrorKind};\nuse sys_common::net as net_imp;\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::ip::{IpAddr, Ipv4Addr, Ipv6Addr, Ipv6MulticastScope};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::addr::{SocketAddr, SocketAddrV4, SocketAddrV6, ToSocketAddrs};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::tcp::{TcpStream, TcpListener, Incoming};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::udp::UdpSocket;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::parser::AddrParseError;\n\nmod ip;\nmod addr;\nmod tcp;\nmod udp;\nmod parser;\n#[cfg(test)] mod test;\n\n\/\/\/ Possible values which can be passed to the `shutdown` method of `TcpStream`.\n#[derive(Copy, Clone, PartialEq, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub enum Shutdown {\n \/\/\/ Indicates that the reading portion of this stream\/socket should be shut\n \/\/\/ down. All currently blocked and future reads will return `Ok(0)`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Read,\n \/\/\/ Indicates that the writing portion of this stream\/socket should be shut\n \/\/\/ down. All currently blocked and future writes will return an error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Write,\n \/\/\/ Shut down both the reading and writing portions of this stream.\n \/\/\/\n \/\/\/ See `Shutdown::Read` and `Shutdown::Write` for more information.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Both,\n}\n\n#[doc(hidden)]\ntrait NetInt {\n fn from_be(i: Self) -> Self;\n fn to_be(&self) -> Self;\n}\nmacro_rules! doit {\n ($($t:ident)*) => ($(impl NetInt for $t {\n fn from_be(i: Self) -> Self { <$t>::from_be(i) }\n fn to_be(&self) -> Self { <$t>::to_be(*self) }\n })*)\n}\ndoit! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize }\n\nfn hton<I: NetInt>(i: I) -> I { i.to_be() }\nfn ntoh<I: NetInt>(i: I) -> I { I::from_be(i) }\n\nfn each_addr<A: ToSocketAddrs, F, T>(addr: A, mut f: F) -> io::Result<T>\n where F: FnMut(&SocketAddr) -> io::Result<T>\n{\n let mut last_err = None;\n for addr in addr.to_socket_addrs()? {\n match f(&addr) {\n Ok(l) => return Ok(l),\n Err(e) => last_err = Some(e),\n }\n }\n Err(last_err.unwrap_or_else(|| {\n Error::new(ErrorKind::InvalidInput,\n \"could not resolve to any addresses\")\n }))\n}\n\n\/\/\/ An iterator over `SocketAddr` values returned from a host lookup operation.\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\",\n issue = \"27705\")]\npub struct LookupHost(net_imp::LookupHost);\n\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\",\n issue = \"27705\")]\nimpl Iterator for LookupHost {\n type Item = SocketAddr;\n fn next(&mut self) -> Option<SocketAddr> { self.0.next() }\n}\n\n\/\/\/ Resolve the host specified by `host` as a number of `SocketAddr` instances.\n\/\/\/\n\/\/\/ This method may perform a DNS query to resolve `host` and may also inspect\n\/\/\/ system configuration to resolve the specified hostname.\n\/\/\/\n\/\/\/ The returned iterator will skip over any unknown addresses returned by the\n\/\/\/ operating system.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```no_run\n\/\/\/ #![feature(lookup_host)]\n\/\/\/\n\/\/\/ use std::net;\n\/\/\/\n\/\/\/ # fn foo() -> std::io::Result<()> {\n\/\/\/ for host in try!(net::lookup_host(\"rust-lang.org\")) {\n\/\/\/ println!(\"found address: {}\", host);\n\/\/\/ }\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\",\n issue = \"27705\")]\npub fn lookup_host(host: &str) -> io::Result<LookupHost> {\n net_imp::lookup_host(host).map(LookupHost)\n}\n<commit_msg>Link to relevant method\/struct for `std::net::Shutdown` docs.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Networking primitives for TCP\/UDP communication.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse prelude::v1::*;\n\nuse io::{self, Error, ErrorKind};\nuse sys_common::net as net_imp;\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::ip::{IpAddr, Ipv4Addr, Ipv6Addr, Ipv6MulticastScope};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::addr::{SocketAddr, SocketAddrV4, SocketAddrV6, ToSocketAddrs};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::tcp::{TcpStream, TcpListener, Incoming};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::udp::UdpSocket;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::parser::AddrParseError;\n\nmod ip;\nmod addr;\nmod tcp;\nmod udp;\nmod parser;\n#[cfg(test)] mod test;\n\n\/\/\/ Possible values which can be passed to the [`shutdown`] method of\n\/\/\/ [`TcpStream`].\n\/\/\/\n\/\/\/ [`shutdown`]: struct.TcpStream.html#method.shutdown\n\/\/\/ [`TcpStream`]: struct.TcpStream.html\n#[derive(Copy, Clone, PartialEq, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub enum Shutdown {\n \/\/\/ Indicates that the reading portion of this stream\/socket should be shut\n \/\/\/ down. All currently blocked and future reads will return `Ok(0)`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Read,\n \/\/\/ Indicates that the writing portion of this stream\/socket should be shut\n \/\/\/ down. All currently blocked and future writes will return an error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Write,\n \/\/\/ Shut down both the reading and writing portions of this stream.\n \/\/\/\n \/\/\/ See `Shutdown::Read` and `Shutdown::Write` for more information.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Both,\n}\n\n#[doc(hidden)]\ntrait NetInt {\n fn from_be(i: Self) -> Self;\n fn to_be(&self) -> Self;\n}\nmacro_rules! doit {\n ($($t:ident)*) => ($(impl NetInt for $t {\n fn from_be(i: Self) -> Self { <$t>::from_be(i) }\n fn to_be(&self) -> Self { <$t>::to_be(*self) }\n })*)\n}\ndoit! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize }\n\nfn hton<I: NetInt>(i: I) -> I { i.to_be() }\nfn ntoh<I: NetInt>(i: I) -> I { I::from_be(i) }\n\nfn each_addr<A: ToSocketAddrs, F, T>(addr: A, mut f: F) -> io::Result<T>\n where F: FnMut(&SocketAddr) -> io::Result<T>\n{\n let mut last_err = None;\n for addr in addr.to_socket_addrs()? {\n match f(&addr) {\n Ok(l) => return Ok(l),\n Err(e) => last_err = Some(e),\n }\n }\n Err(last_err.unwrap_or_else(|| {\n Error::new(ErrorKind::InvalidInput,\n \"could not resolve to any addresses\")\n }))\n}\n\n\/\/\/ An iterator over `SocketAddr` values returned from a host lookup operation.\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\",\n issue = \"27705\")]\npub struct LookupHost(net_imp::LookupHost);\n\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\",\n issue = \"27705\")]\nimpl Iterator for LookupHost {\n type Item = SocketAddr;\n fn next(&mut self) -> Option<SocketAddr> { self.0.next() }\n}\n\n\/\/\/ Resolve the host specified by `host` as a number of `SocketAddr` instances.\n\/\/\/\n\/\/\/ This method may perform a DNS query to resolve `host` and may also inspect\n\/\/\/ system configuration to resolve the specified hostname.\n\/\/\/\n\/\/\/ The returned iterator will skip over any unknown addresses returned by the\n\/\/\/ operating system.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```no_run\n\/\/\/ #![feature(lookup_host)]\n\/\/\/\n\/\/\/ use std::net;\n\/\/\/\n\/\/\/ # fn foo() -> std::io::Result<()> {\n\/\/\/ for host in try!(net::lookup_host(\"rust-lang.org\")) {\n\/\/\/ println!(\"found address: {}\", host);\n\/\/\/ }\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\",\n issue = \"27705\")]\npub fn lookup_host(host: &str) -> io::Result<LookupHost> {\n net_imp::lookup_host(host).map(LookupHost)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added timeout tcp client<commit_after>use std::net::TcpStream;\nuse std::str;\nuse std::io::{self, BufRead, BufReader, Write};\nuse std::time::Duration;\nuse std::net::SocketAddr;\n\nfn main () {\n let remote: SocketAddr = \"127.0.0.1:8888\".parse().unwrap();\n let mut stream = TcpStream::connect_timeout(&remote, Duration::from_secs(1))\n .expect(\"Could not connect to server\");\n stream.set_read_timeout(Some(Duration::from_secs(3))).expect(\"Could not set a read timeout\");\n\n loop {\n let mut input = String::new();\n let mut buffer: Vec<u8> = Vec::new();\n io::stdin().read_line(&mut input).expect(\"Failed to read from stdin\");\n stream.write(input.as_bytes()).expect(\"Failed to write to server\");\n\n let mut reader = BufReader::new(&stream);\n reader.read_until(b'\\n', &mut buffer).expect(\"Could not read into buffer\");\n print!(\"{}\", str::from_utf8(&buffer).expect(\"Could not write buffer as string\"));\n\n\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright (c) 2015 Daniel Grunwald\n\/\/\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a copy of this\n\/\/ software and associated documentation files (the \"Software\"), to deal in the Software\n\/\/ without restriction, including without limitation the rights to use, copy, modify, merge,\n\/\/ publish, distribute, sublicense, and\/or sell copies of the Software, and to permit persons\n\/\/ to whom the Software is furnished to do so, subject to the following conditions:\n\/\/\n\/\/ The above copyright notice and this permission notice shall be included in all copies or\n\/\/ substantial portions of the Software.\n\/\/\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n\/\/ INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n\/\/ PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE\n\/\/ FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n\/\/ OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\/\/ DEALINGS IN THE SOFTWARE.\n\nuse std::mem::{size_of, transmute, POST_DROP_USIZE};\nuse libc;\nuse ffi;\nuse python::{Python, PythonObject, PythonObjectWithCheckedDowncast, PythonObjectWithTypeObject, PythonObjectDowncastError, ToPythonPointer};\nuse objects::PyType;\nuse err::{PyErr, PyResult};\n\n#[unsafe_no_drop_flag]\n#[repr(C)]\npub struct PyObject<'p> {\n \/\/ PyObject<'p> owns one reference to the *PyObject\n \/\/ ptr is not null (except possibly due to #[unsafe_no_drop_flag])\n ptr: *mut ffi::PyObject,\n py : Python<'p>\n}\n\n\/\/\/ Dropping a `PyObject` decrements the reference count on the object by 1.\nimpl <'p> Drop for PyObject<'p> {\n #[inline]\n fn drop(&mut self) {\n \/\/ TODO: remove if and change Py_XDECREF to Py_DECREF when #[unsafe_no_drop_flag] disappears\n if self.ptr as usize != POST_DROP_USIZE {\n unsafe { ffi::Py_XDECREF(self.ptr); }\n }\n }\n}\n\n\/\/\/ Cloning a `PyObject` increments the reference count on the object by 1.\nimpl <'p> Clone for PyObject<'p> {\n #[inline]\n fn clone(&self) -> PyObject<'p> {\n unsafe { ffi::Py_INCREF(self.ptr) };\n PyObject { ptr: self.ptr, py: self.py }\n }\n}\n\nimpl <'p> PythonObject<'p> for PyObject<'p> {\n #[inline]\n fn as_object<'a>(&'a self) -> &'a PyObject<'p> {\n self\n }\n \n #[inline]\n fn into_object(self) -> PyObject<'p> {\n self\n }\n \n #[inline]\n unsafe fn unchecked_downcast_from(o: PyObject<'p>) -> PyObject<'p> {\n o\n }\n \n #[inline]\n unsafe fn unchecked_downcast_borrow_from<'a>(o: &'a PyObject<'p>) -> &'a PyObject<'p> {\n o\n }\n \n #[inline]\n fn python(&self) -> Python<'p> {\n self.py\n }\n}\n\nimpl <'p> PythonObjectWithCheckedDowncast<'p> for PyObject<'p> {\n #[inline]\n fn downcast_from(obj: PyObject<'p>) -> Result<PyObject<'p>, PythonObjectDowncastError<'p>> {\n Ok(obj)\n }\n \n #[inline]\n fn downcast_borrow_from<'a>(obj: &'a PyObject<'p>) -> Result<&'a PyObject<'p>, PythonObjectDowncastError<'p>> {\n Ok(obj)\n }\n}\n\nimpl <'p> PythonObjectWithTypeObject<'p> for PyObject<'p> {\n #[inline]\n fn type_object(py: Python<'p>) -> PyType<'p> {\n unsafe { PyType::from_type_ptr(py, &mut ffi::PyBaseObject_Type) }\n }\n}\n\nimpl <'p> ToPythonPointer for PyObject<'p> {\n #[inline]\n fn as_ptr(&self) -> *mut ffi::PyObject {\n self.ptr\n }\n \n #[inline]\n fn steal_ptr(self) -> *mut ffi::PyObject {\n let ptr = self.ptr;\n unsafe { ::std::mem::forget(self); }\n ptr\n }\n}\n\n\nimpl <'p> PyObject<'p> {\n \/\/\/ Creates a PyObject instance for the given FFI pointer.\n \/\/\/ This moves ownership over the pointer into the PyObject.\n \/\/\/ Undefined behavior if the pointer is NULL or invalid.\n #[inline]\n pub unsafe fn from_owned_ptr(py : Python<'p>, ptr : *mut ffi::PyObject) -> PyObject<'p> {\n debug_assert!(!ptr.is_null() && ffi::Py_REFCNT(ptr) > 0);\n PyObject { py: py, ptr: ptr }\n }\n \n \/\/\/ Creates a PyObject instance for the given FFI pointer.\n \/\/\/ Calls Py_INCREF() on the ptr.\n \/\/\/ Undefined behavior if the pointer is NULL or invalid.\n #[inline]\n pub unsafe fn from_borrowed_ptr(py : Python<'p>, ptr : *mut ffi::PyObject) -> PyObject<'p> {\n debug_assert!(!ptr.is_null() && ffi::Py_REFCNT(ptr) > 0);\n ffi::Py_INCREF(ptr);\n PyObject { py: py, ptr: ptr }\n }\n\n \/\/\/ Creates a PyObject instance for the given FFI pointer.\n \/\/\/ This moves ownership over the pointer into the PyObject.\n \/\/\/ Returns None for null pointers; undefined behavior if the pointer is invalid.\n #[inline]\n pub unsafe fn from_owned_ptr_opt(py: Python<'p>, ptr: *mut ffi::PyObject) -> Option<PyObject<'p>> {\n if ptr.is_null() {\n None\n } else {\n Some(PyObject::from_owned_ptr(py, ptr))\n }\n }\n \n \/\/\/ Returns None for null pointers; undefined behavior if the pointer is invalid.\n #[inline]\n pub unsafe fn from_borrowed_ptr_opt(py: Python<'p>, ptr: *mut ffi::PyObject) -> Option<PyObject<'p>> {\n if ptr.is_null() {\n None\n } else {\n Some(PyObject::from_borrowed_ptr(py, ptr))\n }\n }\n \n \/\/\/ Transmutes an owned FFI pointer to `&PyObject`.\n \/\/\/ Undefined behavior if the pointer is NULL or invalid.\n #[inline]\n pub unsafe fn borrow_from_owned_ptr<'a>(py : Python<'p>, ptr : &'a *mut ffi::PyObject) -> &'a PyObject<'p> {\n debug_assert!(!ptr.is_null() && ffi::Py_REFCNT(*ptr) > 0);\n transmute(ptr)\n }\n \n \/\/\/ Transmutes a slice of owned FFI pointers to `&[PyObject]`.\n \/\/\/ Undefined behavior if the pointer is NULL or invalid.\n #[inline]\n pub unsafe fn borrow_from_owned_ptr_slice<'a>(py : Python<'p>, ptr : &'a [*mut ffi::PyObject]) -> &'a [PyObject<'p>] {\n transmute(ptr)\n }\n \n \/\/\/ Gets the reference count of this python object.\n #[inline]\n pub fn get_refcnt(&self) -> usize {\n unsafe { ffi::Py_REFCNT(self.as_ptr()) as usize }\n }\n\n \/\/\/ Gets the python type object for this object's type.\n #[inline]\n pub fn get_type(&self) -> &PyType<'p> {\n unsafe {\n let t : &*mut ffi::PyTypeObject = &(*self.as_ptr()).ob_type;\n transmute(t)\n }\n }\n \n \/\/\/ Casts the PyObject to a concrete python object type.\n \/\/\/ Causes undefined behavior if the object is not of the expected type.\n \/\/\/ This is a wrapper function around `PythonObject::unchecked_downcast_from()`.\n #[inline]\n pub unsafe fn unchecked_cast_into<T>(self) -> T where T: PythonObject<'p> {\n PythonObject::unchecked_downcast_from(self)\n }\n \n \/\/\/ Casts the PyObject to a concrete python object type.\n \/\/\/ Returns a python `TypeError` if the object is not of the expected type.\n \/\/\/ This is a wrapper function around `PythonObjectWithCheckedDowncast::downcast_from()`.\n #[inline]\n pub fn cast_into<T>(self) -> Result<T, PythonObjectDowncastError<'p>> where T: PythonObjectWithCheckedDowncast<'p> {\n PythonObjectWithCheckedDowncast::downcast_from(self)\n }\n \n \/\/\/ Casts the PyObject to a concrete python object type.\n \/\/\/ Causes undefined behavior if the object is not of the expected type.\n \/\/\/ This is a wrapper function around `PythonObject::unchecked_downcast_borrow_from()`.\n #[inline]\n pub unsafe fn unchecked_cast_as<'s, T>(&'s self) -> &'s T where T: PythonObject<'p> {\n PythonObject::unchecked_downcast_borrow_from(self)\n }\n \n \/\/\/ Casts the PyObject to a concrete python object type.\n \/\/\/ Returns a python `TypeError` if the object is not of the expected type.\n \/\/\/ This is a wrapper function around `PythonObjectWithCheckedDowncast::downcast_borrow_from()`.\n #[inline]\n pub fn cast_as<'s, T>(&'s self) -> Result<&'s T, PythonObjectDowncastError<'p>> where T: PythonObjectWithCheckedDowncast<'p> {\n PythonObjectWithCheckedDowncast::downcast_borrow_from(self)\n }\n \n \/\/\/ Extracts some type from the python object.\n \/\/\/ This is a wrapper function around `FromPyObject::from_py_object()`.\n #[inline]\n pub fn extract<'s, T>(&'s self) -> Result<T, PyErr<'p>> where T: ::conversion::FromPyObject<'p, 's> {\n ::conversion::FromPyObject::from_py_object(self)\n }\n}\n\n\/\/\/ PyObject implements the `==` operator using reference equality:\n\/\/\/ `obj1 == obj2` in rust is equivalent to `obj1 is obj2` in python.\nimpl <'p> PartialEq for PyObject<'p> {\n #[inline]\n fn eq(&self, o : &PyObject<'p>) -> bool {\n self.ptr == o.ptr\n }\n}\n\n\/\/\/ PyObject implements the `==` operator using reference equality:\n\/\/\/ `obj1 == obj2` in rust is equivalent to `obj1 is obj2` in python.\nimpl <'p> Eq for PyObject<'p> { }\n\n\n#[test]\nfn test_sizeof() {\n \/\/ should be a static_assert, but size_of is not a compile-time const\n \/\/ these are necessary for the transmutes in this module\n assert_eq!(size_of::<PyObject>(), size_of::<*mut ffi::PyObject>());\n assert_eq!(size_of::<PyType>(), size_of::<*mut ffi::PyTypeObject>());\n}\n\n<commit_msg>Fix warning: unnecessary 'unsafe' block<commit_after>\/\/ Copyright (c) 2015 Daniel Grunwald\n\/\/\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a copy of this\n\/\/ software and associated documentation files (the \"Software\"), to deal in the Software\n\/\/ without restriction, including without limitation the rights to use, copy, modify, merge,\n\/\/ publish, distribute, sublicense, and\/or sell copies of the Software, and to permit persons\n\/\/ to whom the Software is furnished to do so, subject to the following conditions:\n\/\/\n\/\/ The above copyright notice and this permission notice shall be included in all copies or\n\/\/ substantial portions of the Software.\n\/\/\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n\/\/ INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n\/\/ PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE\n\/\/ FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n\/\/ OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\/\/ DEALINGS IN THE SOFTWARE.\n\nuse std::mem;\nuse libc;\nuse ffi;\nuse python::{Python, PythonObject, PythonObjectWithCheckedDowncast, PythonObjectWithTypeObject, PythonObjectDowncastError, ToPythonPointer};\nuse objects::PyType;\nuse err::{PyErr, PyResult};\n\n#[unsafe_no_drop_flag]\n#[repr(C)]\npub struct PyObject<'p> {\n \/\/ PyObject<'p> owns one reference to the *PyObject\n \/\/ ptr is not null (except possibly due to #[unsafe_no_drop_flag])\n ptr: *mut ffi::PyObject,\n py : Python<'p>\n}\n\n\/\/\/ Dropping a `PyObject` decrements the reference count on the object by 1.\nimpl <'p> Drop for PyObject<'p> {\n #[inline]\n fn drop(&mut self) {\n \/\/ TODO: remove if and change Py_XDECREF to Py_DECREF when #[unsafe_no_drop_flag] disappears\n if self.ptr as usize != mem::POST_DROP_USIZE {\n unsafe { ffi::Py_XDECREF(self.ptr); }\n }\n }\n}\n\n\/\/\/ Cloning a `PyObject` increments the reference count on the object by 1.\nimpl <'p> Clone for PyObject<'p> {\n #[inline]\n fn clone(&self) -> PyObject<'p> {\n unsafe { ffi::Py_INCREF(self.ptr) };\n PyObject { ptr: self.ptr, py: self.py }\n }\n}\n\nimpl <'p> PythonObject<'p> for PyObject<'p> {\n #[inline]\n fn as_object<'a>(&'a self) -> &'a PyObject<'p> {\n self\n }\n \n #[inline]\n fn into_object(self) -> PyObject<'p> {\n self\n }\n \n #[inline]\n unsafe fn unchecked_downcast_from(o: PyObject<'p>) -> PyObject<'p> {\n o\n }\n \n #[inline]\n unsafe fn unchecked_downcast_borrow_from<'a>(o: &'a PyObject<'p>) -> &'a PyObject<'p> {\n o\n }\n \n #[inline]\n fn python(&self) -> Python<'p> {\n self.py\n }\n}\n\nimpl <'p> PythonObjectWithCheckedDowncast<'p> for PyObject<'p> {\n #[inline]\n fn downcast_from(obj: PyObject<'p>) -> Result<PyObject<'p>, PythonObjectDowncastError<'p>> {\n Ok(obj)\n }\n \n #[inline]\n fn downcast_borrow_from<'a>(obj: &'a PyObject<'p>) -> Result<&'a PyObject<'p>, PythonObjectDowncastError<'p>> {\n Ok(obj)\n }\n}\n\nimpl <'p> PythonObjectWithTypeObject<'p> for PyObject<'p> {\n #[inline]\n fn type_object(py: Python<'p>) -> PyType<'p> {\n unsafe { PyType::from_type_ptr(py, &mut ffi::PyBaseObject_Type) }\n }\n}\n\nimpl <'p> ToPythonPointer for PyObject<'p> {\n #[inline]\n fn as_ptr(&self) -> *mut ffi::PyObject {\n self.ptr\n }\n \n #[inline]\n fn steal_ptr(self) -> *mut ffi::PyObject {\n let ptr = self.ptr;\n mem::forget(self);\n ptr\n }\n}\n\n\nimpl <'p> PyObject<'p> {\n \/\/\/ Creates a PyObject instance for the given FFI pointer.\n \/\/\/ This moves ownership over the pointer into the PyObject.\n \/\/\/ Undefined behavior if the pointer is NULL or invalid.\n #[inline]\n pub unsafe fn from_owned_ptr(py : Python<'p>, ptr : *mut ffi::PyObject) -> PyObject<'p> {\n debug_assert!(!ptr.is_null() && ffi::Py_REFCNT(ptr) > 0);\n PyObject { py: py, ptr: ptr }\n }\n \n \/\/\/ Creates a PyObject instance for the given FFI pointer.\n \/\/\/ Calls Py_INCREF() on the ptr.\n \/\/\/ Undefined behavior if the pointer is NULL or invalid.\n #[inline]\n pub unsafe fn from_borrowed_ptr(py : Python<'p>, ptr : *mut ffi::PyObject) -> PyObject<'p> {\n debug_assert!(!ptr.is_null() && ffi::Py_REFCNT(ptr) > 0);\n ffi::Py_INCREF(ptr);\n PyObject { py: py, ptr: ptr }\n }\n\n \/\/\/ Creates a PyObject instance for the given FFI pointer.\n \/\/\/ This moves ownership over the pointer into the PyObject.\n \/\/\/ Returns None for null pointers; undefined behavior if the pointer is invalid.\n #[inline]\n pub unsafe fn from_owned_ptr_opt(py: Python<'p>, ptr: *mut ffi::PyObject) -> Option<PyObject<'p>> {\n if ptr.is_null() {\n None\n } else {\n Some(PyObject::from_owned_ptr(py, ptr))\n }\n }\n \n \/\/\/ Returns None for null pointers; undefined behavior if the pointer is invalid.\n #[inline]\n pub unsafe fn from_borrowed_ptr_opt(py: Python<'p>, ptr: *mut ffi::PyObject) -> Option<PyObject<'p>> {\n if ptr.is_null() {\n None\n } else {\n Some(PyObject::from_borrowed_ptr(py, ptr))\n }\n }\n \n \/\/\/ Transmutes an owned FFI pointer to `&PyObject`.\n \/\/\/ Undefined behavior if the pointer is NULL or invalid.\n #[inline]\n pub unsafe fn borrow_from_owned_ptr<'a>(py : Python<'p>, ptr : &'a *mut ffi::PyObject) -> &'a PyObject<'p> {\n debug_assert!(!ptr.is_null() && ffi::Py_REFCNT(*ptr) > 0);\n mem::transmute(ptr)\n }\n \n \/\/\/ Transmutes a slice of owned FFI pointers to `&[PyObject]`.\n \/\/\/ Undefined behavior if the pointer is NULL or invalid.\n #[inline]\n pub unsafe fn borrow_from_owned_ptr_slice<'a>(py : Python<'p>, ptr : &'a [*mut ffi::PyObject]) -> &'a [PyObject<'p>] {\n mem::transmute(ptr)\n }\n \n \/\/\/ Gets the reference count of this python object.\n #[inline]\n pub fn get_refcnt(&self) -> usize {\n unsafe { ffi::Py_REFCNT(self.as_ptr()) as usize }\n }\n\n \/\/\/ Gets the python type object for this object's type.\n #[inline]\n pub fn get_type(&self) -> &PyType<'p> {\n unsafe {\n let t : &*mut ffi::PyTypeObject = &(*self.as_ptr()).ob_type;\n mem::transmute(t)\n }\n }\n \n \/\/\/ Casts the PyObject to a concrete python object type.\n \/\/\/ Causes undefined behavior if the object is not of the expected type.\n \/\/\/ This is a wrapper function around `PythonObject::unchecked_downcast_from()`.\n #[inline]\n pub unsafe fn unchecked_cast_into<T>(self) -> T where T: PythonObject<'p> {\n PythonObject::unchecked_downcast_from(self)\n }\n \n \/\/\/ Casts the PyObject to a concrete python object type.\n \/\/\/ Returns a python `TypeError` if the object is not of the expected type.\n \/\/\/ This is a wrapper function around `PythonObjectWithCheckedDowncast::downcast_from()`.\n #[inline]\n pub fn cast_into<T>(self) -> Result<T, PythonObjectDowncastError<'p>> where T: PythonObjectWithCheckedDowncast<'p> {\n PythonObjectWithCheckedDowncast::downcast_from(self)\n }\n \n \/\/\/ Casts the PyObject to a concrete python object type.\n \/\/\/ Causes undefined behavior if the object is not of the expected type.\n \/\/\/ This is a wrapper function around `PythonObject::unchecked_downcast_borrow_from()`.\n #[inline]\n pub unsafe fn unchecked_cast_as<'s, T>(&'s self) -> &'s T where T: PythonObject<'p> {\n PythonObject::unchecked_downcast_borrow_from(self)\n }\n \n \/\/\/ Casts the PyObject to a concrete python object type.\n \/\/\/ Returns a python `TypeError` if the object is not of the expected type.\n \/\/\/ This is a wrapper function around `PythonObjectWithCheckedDowncast::downcast_borrow_from()`.\n #[inline]\n pub fn cast_as<'s, T>(&'s self) -> Result<&'s T, PythonObjectDowncastError<'p>> where T: PythonObjectWithCheckedDowncast<'p> {\n PythonObjectWithCheckedDowncast::downcast_borrow_from(self)\n }\n \n \/\/\/ Extracts some type from the python object.\n \/\/\/ This is a wrapper function around `FromPyObject::from_py_object()`.\n #[inline]\n pub fn extract<'s, T>(&'s self) -> Result<T, PyErr<'p>> where T: ::conversion::FromPyObject<'p, 's> {\n ::conversion::FromPyObject::from_py_object(self)\n }\n}\n\n\/\/\/ PyObject implements the `==` operator using reference equality:\n\/\/\/ `obj1 == obj2` in rust is equivalent to `obj1 is obj2` in python.\nimpl <'p> PartialEq for PyObject<'p> {\n #[inline]\n fn eq(&self, o : &PyObject<'p>) -> bool {\n self.ptr == o.ptr\n }\n}\n\n\/\/\/ PyObject implements the `==` operator using reference equality:\n\/\/\/ `obj1 == obj2` in rust is equivalent to `obj1 is obj2` in python.\nimpl <'p> Eq for PyObject<'p> { }\n\n\n#[test]\nfn test_sizeof() {\n \/\/ should be a static_assert, but size_of is not a compile-time const\n \/\/ these are necessary for the transmutes in this module\n assert_eq!(mem::size_of::<PyObject>(), mem::size_of::<*mut ffi::PyObject>());\n assert_eq!(mem::size_of::<PyType>(), mem::size_of::<*mut ffi::PyTypeObject>());\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Trivial copy propagation pass.\n\/\/!\n\/\/! This uses def-use analysis to remove values that have exactly one def and one use, which must\n\/\/! be an assignment.\n\/\/!\n\/\/! To give an example, we look for patterns that look like:\n\/\/!\n\/\/! DEST = SRC\n\/\/! ...\n\/\/! USE(DEST)\n\/\/!\n\/\/! where `DEST` and `SRC` are both locals of some form. We replace that with:\n\/\/!\n\/\/! NOP\n\/\/! ...\n\/\/! USE(SRC)\n\/\/!\n\/\/! The assignment `DEST = SRC` must be (a) the only mutation of `DEST` and (b) the only\n\/\/! (non-mutating) use of `SRC`. These restrictions are conservative and may be relaxed in the\n\/\/! future.\n\nuse rustc::hir;\nuse rustc::mir::{Constant, Local, LocalKind, Location, Place, Mir, Operand, Rvalue, StatementKind};\nuse rustc::mir::visit::MutVisitor;\nuse rustc::ty::TyCtxt;\nuse transform::{MirPass, MirSource};\nuse util::def_use::DefUseAnalysis;\n\npub struct CopyPropagation;\n\nimpl MirPass for CopyPropagation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n source: MirSource,\n mir: &mut Mir<'tcx>) {\n \/\/ Don't run on constant MIR, because trans might not be able to\n \/\/ evaluate the modified MIR.\n \/\/ FIXME(eddyb) Remove check after miri is merged.\n let id = tcx.hir.as_local_node_id(source.def_id).unwrap();\n match (tcx.hir.body_owner_kind(id), source.promoted) {\n (_, Some(_)) |\n (hir::BodyOwnerKind::Const, _) |\n (hir::BodyOwnerKind::Static(_), _) => return,\n\n (hir::BodyOwnerKind::Fn, _) => {\n if tcx.is_const_fn(source.def_id) {\n \/\/ Don't run on const functions, as, again, trans might not be able to evaluate\n \/\/ the optimized IR.\n return\n }\n }\n }\n\n \/\/ We only run when the MIR optimization level is > 1.\n \/\/ This avoids a slow pass, and messing up debug info.\n if tcx.sess.opts.debugging_opts.mir_opt_level <= 1 {\n return;\n }\n\n let mut def_use_analysis = DefUseAnalysis::new(mir);\n loop {\n def_use_analysis.analyze(mir);\n\n if eliminate_self_assignments(mir, &def_use_analysis) {\n def_use_analysis.analyze(mir);\n }\n\n let mut changed = false;\n for dest_local in mir.local_decls.indices() {\n debug!(\"Considering destination local: {:?}\", dest_local);\n\n let action;\n let location;\n {\n \/\/ The destination must have exactly one def.\n let dest_use_info = def_use_analysis.local_info(dest_local);\n let dest_def_count = dest_use_info.def_count_not_including_drop();\n if dest_def_count == 0 {\n debug!(\" Can't copy-propagate local: dest {:?} undefined\",\n dest_local);\n continue\n }\n if dest_def_count > 1 {\n debug!(\" Can't copy-propagate local: dest {:?} defined {} times\",\n dest_local,\n dest_use_info.def_count());\n continue\n }\n if dest_use_info.use_count() == 0 {\n debug!(\" Can't copy-propagate local: dest {:?} unused\",\n dest_local);\n continue\n }\n \/\/ Conservatively gives up if the dest is an argument,\n \/\/ because there may be uses of the original argument value.\n if mir.local_kind(dest_local) == LocalKind::Arg {\n debug!(\" Can't copy-propagate local: dest {:?} (argument)\",\n dest_local);\n continue;\n }\n let dest_place_def = dest_use_info.defs_not_including_drop().next().unwrap();\n location = dest_place_def.location;\n\n let basic_block = &mir[location.block];\n let statement_index = location.statement_index;\n let statement = match basic_block.statements.get(statement_index) {\n Some(statement) => statement,\n None => {\n debug!(\" Can't copy-propagate local: used in terminator\");\n continue\n }\n };\n\n \/\/ That use of the source must be an assignment.\n match statement.kind {\n StatementKind::Assign(Place::Local(local), Rvalue::Use(ref operand)) if\n local == dest_local => {\n let maybe_action = match *operand {\n Operand::Copy(ref src_place) |\n Operand::Move(ref src_place) => {\n Action::local_copy(&mir, &def_use_analysis, src_place)\n }\n Operand::Constant(ref src_constant) => {\n Action::constant(src_constant)\n }\n };\n match maybe_action {\n Some(this_action) => action = this_action,\n None => continue,\n }\n }\n _ => {\n debug!(\" Can't copy-propagate local: source use is not an \\\n assignment\");\n continue\n }\n }\n }\n\n changed = action.perform(mir, &def_use_analysis, dest_local, location) || changed;\n \/\/ FIXME(pcwalton): Update the use-def chains to delete the instructions instead of\n \/\/ regenerating the chains.\n break\n }\n if !changed {\n break\n }\n }\n }\n}\n\nfn eliminate_self_assignments<'tcx>(\n mir: &mut Mir<'tcx>,\n def_use_analysis: &DefUseAnalysis<'tcx>,\n) -> bool {\n let mut changed = false;\n\n for dest_local in mir.local_decls.indices() {\n let dest_use_info = def_use_analysis.local_info(dest_local);\n\n for def in dest_use_info.defs_not_including_drop() {\n let location = def.location;\n if let Some(stmt) = mir[location.block].statements.get(location.statement_index) {\n match stmt.kind {\n StatementKind::Assign(\n Place::Local(local),\n Rvalue::Use(Operand::Copy(Place::Local(src_local))),\n ) |\n StatementKind::Assign(\n Place::Local(local),\n Rvalue::Use(Operand::Move(Place::Local(src_local))),\n ) if local == dest_local && dest_local == src_local => {}\n _ => {\n continue;\n }\n }\n } else {\n continue;\n }\n debug!(\"Deleting a self-assignment for {:?}\", dest_local);\n mir.make_statement_nop(location);\n changed = true;\n }\n }\n\n changed\n}\n\nenum Action<'tcx> {\n PropagateLocalCopy(Local),\n PropagateConstant(Constant<'tcx>),\n}\n\nimpl<'tcx> Action<'tcx> {\n fn local_copy(mir: &Mir<'tcx>, def_use_analysis: &DefUseAnalysis, src_place: &Place<'tcx>)\n -> Option<Action<'tcx>> {\n \/\/ The source must be a local.\n let src_local = if let Place::Local(local) = *src_place {\n local\n } else {\n debug!(\" Can't copy-propagate local: source is not a local\");\n return None;\n };\n\n \/\/ We're trying to copy propagate a local.\n \/\/ There must be exactly one use of the source used in a statement (not in a terminator).\n let src_use_info = def_use_analysis.local_info(src_local);\n let src_use_count = src_use_info.use_count();\n if src_use_count == 0 {\n debug!(\" Can't copy-propagate local: no uses\");\n return None\n }\n if src_use_count != 1 {\n debug!(\" Can't copy-propagate local: {} uses\", src_use_info.use_count());\n return None\n }\n\n \/\/ Verify that the source doesn't change in between. This is done conservatively for now,\n \/\/ by ensuring that the source has exactly one mutation. The goal is to prevent things\n \/\/ like:\n \/\/\n \/\/ DEST = SRC;\n \/\/ SRC = X;\n \/\/ USE(DEST);\n \/\/\n \/\/ From being misoptimized into:\n \/\/\n \/\/ SRC = X;\n \/\/ USE(SRC);\n let src_def_count = src_use_info.def_count_not_including_drop();\n \/\/ allow function arguments to be propagated\n let is_arg = mir.local_kind(src_local) == LocalKind::Arg;\n if (is_arg && src_def_count != 0) || (!is_arg && src_def_count != 1) {\n debug!(\n \" Can't copy-propagate local: {} defs of src{}\",\n src_def_count,\n if is_arg { \" (argument)\" } else { \"\" },\n );\n return None\n }\n\n Some(Action::PropagateLocalCopy(src_local))\n }\n\n fn constant(src_constant: &Constant<'tcx>) -> Option<Action<'tcx>> {\n Some(Action::PropagateConstant((*src_constant).clone()))\n }\n\n fn perform(self,\n mir: &mut Mir<'tcx>,\n def_use_analysis: &DefUseAnalysis<'tcx>,\n dest_local: Local,\n location: Location)\n -> bool {\n match self {\n Action::PropagateLocalCopy(src_local) => {\n \/\/ Eliminate the destination and the assignment.\n \/\/\n \/\/ First, remove all markers.\n \/\/\n \/\/ FIXME(pcwalton): Don't do this. Merge live ranges instead.\n debug!(\" Replacing all uses of {:?} with {:?} (local)\",\n dest_local,\n src_local);\n for place_use in &def_use_analysis.local_info(dest_local).defs_and_uses {\n if place_use.context.is_storage_marker() {\n mir.make_statement_nop(place_use.location)\n }\n }\n for place_use in &def_use_analysis.local_info(src_local).defs_and_uses {\n if place_use.context.is_storage_marker() {\n mir.make_statement_nop(place_use.location)\n }\n }\n\n \/\/ Replace all uses of the destination local with the source local.\n def_use_analysis.replace_all_defs_and_uses_with(dest_local, mir, src_local);\n\n \/\/ Finally, zap the now-useless assignment instruction.\n debug!(\" Deleting assignment\");\n mir.make_statement_nop(location);\n\n true\n }\n Action::PropagateConstant(src_constant) => {\n \/\/ First, remove all markers.\n \/\/\n \/\/ FIXME(pcwalton): Don't do this. Merge live ranges instead.\n debug!(\" Replacing all uses of {:?} with {:?} (constant)\",\n dest_local,\n src_constant);\n let dest_local_info = def_use_analysis.local_info(dest_local);\n for place_use in &dest_local_info.defs_and_uses {\n if place_use.context.is_storage_marker() {\n mir.make_statement_nop(place_use.location)\n }\n }\n\n \/\/ Replace all uses of the destination local with the constant.\n let mut visitor = ConstantPropagationVisitor::new(dest_local,\n src_constant);\n for dest_place_use in &dest_local_info.defs_and_uses {\n visitor.visit_location(mir, dest_place_use.location)\n }\n\n \/\/ Zap the assignment instruction if we eliminated all the uses. We won't have been\n \/\/ able to do that if the destination was used in a projection, because projections\n \/\/ must have places on their LHS.\n let use_count = dest_local_info.use_count();\n if visitor.uses_replaced == use_count {\n debug!(\" {} of {} use(s) replaced; deleting assignment\",\n visitor.uses_replaced,\n use_count);\n mir.make_statement_nop(location);\n true\n } else if visitor.uses_replaced == 0 {\n debug!(\" No uses replaced; not deleting assignment\");\n false\n } else {\n debug!(\" {} of {} use(s) replaced; not deleting assignment\",\n visitor.uses_replaced,\n use_count);\n true\n }\n }\n }\n }\n}\n\nstruct ConstantPropagationVisitor<'tcx> {\n dest_local: Local,\n constant: Constant<'tcx>,\n uses_replaced: usize,\n}\n\nimpl<'tcx> ConstantPropagationVisitor<'tcx> {\n fn new(dest_local: Local, constant: Constant<'tcx>)\n -> ConstantPropagationVisitor<'tcx> {\n ConstantPropagationVisitor {\n dest_local,\n constant,\n uses_replaced: 0,\n }\n }\n}\n\nimpl<'tcx> MutVisitor<'tcx> for ConstantPropagationVisitor<'tcx> {\n fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {\n self.super_operand(operand, location);\n\n match *operand {\n Operand::Copy(Place::Local(local)) |\n Operand::Move(Place::Local(local)) if local == self.dest_local => {}\n _ => return,\n }\n\n *operand = Operand::Constant(box self.constant.clone());\n self.uses_replaced += 1\n }\n}\n<commit_msg>mir: Run copy propagation on constant mir<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Trivial copy propagation pass.\n\/\/!\n\/\/! This uses def-use analysis to remove values that have exactly one def and one use, which must\n\/\/! be an assignment.\n\/\/!\n\/\/! To give an example, we look for patterns that look like:\n\/\/!\n\/\/! DEST = SRC\n\/\/! ...\n\/\/! USE(DEST)\n\/\/!\n\/\/! where `DEST` and `SRC` are both locals of some form. We replace that with:\n\/\/!\n\/\/! NOP\n\/\/! ...\n\/\/! USE(SRC)\n\/\/!\n\/\/! The assignment `DEST = SRC` must be (a) the only mutation of `DEST` and (b) the only\n\/\/! (non-mutating) use of `SRC`. These restrictions are conservative and may be relaxed in the\n\/\/! future.\n\nuse rustc::mir::{Constant, Local, LocalKind, Location, Place, Mir, Operand, Rvalue, StatementKind};\nuse rustc::mir::visit::MutVisitor;\nuse rustc::ty::TyCtxt;\nuse transform::{MirPass, MirSource};\nuse util::def_use::DefUseAnalysis;\n\npub struct CopyPropagation;\n\nimpl MirPass for CopyPropagation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _source: MirSource,\n mir: &mut Mir<'tcx>) {\n \/\/ We only run when the MIR optimization level is > 1.\n \/\/ This avoids a slow pass, and messing up debug info.\n if tcx.sess.opts.debugging_opts.mir_opt_level <= 1 {\n return;\n }\n\n let mut def_use_analysis = DefUseAnalysis::new(mir);\n loop {\n def_use_analysis.analyze(mir);\n\n if eliminate_self_assignments(mir, &def_use_analysis) {\n def_use_analysis.analyze(mir);\n }\n\n let mut changed = false;\n for dest_local in mir.local_decls.indices() {\n debug!(\"Considering destination local: {:?}\", dest_local);\n\n let action;\n let location;\n {\n \/\/ The destination must have exactly one def.\n let dest_use_info = def_use_analysis.local_info(dest_local);\n let dest_def_count = dest_use_info.def_count_not_including_drop();\n if dest_def_count == 0 {\n debug!(\" Can't copy-propagate local: dest {:?} undefined\",\n dest_local);\n continue\n }\n if dest_def_count > 1 {\n debug!(\" Can't copy-propagate local: dest {:?} defined {} times\",\n dest_local,\n dest_use_info.def_count());\n continue\n }\n if dest_use_info.use_count() == 0 {\n debug!(\" Can't copy-propagate local: dest {:?} unused\",\n dest_local);\n continue\n }\n \/\/ Conservatively gives up if the dest is an argument,\n \/\/ because there may be uses of the original argument value.\n if mir.local_kind(dest_local) == LocalKind::Arg {\n debug!(\" Can't copy-propagate local: dest {:?} (argument)\",\n dest_local);\n continue;\n }\n let dest_place_def = dest_use_info.defs_not_including_drop().next().unwrap();\n location = dest_place_def.location;\n\n let basic_block = &mir[location.block];\n let statement_index = location.statement_index;\n let statement = match basic_block.statements.get(statement_index) {\n Some(statement) => statement,\n None => {\n debug!(\" Can't copy-propagate local: used in terminator\");\n continue\n }\n };\n\n \/\/ That use of the source must be an assignment.\n match statement.kind {\n StatementKind::Assign(Place::Local(local), Rvalue::Use(ref operand)) if\n local == dest_local => {\n let maybe_action = match *operand {\n Operand::Copy(ref src_place) |\n Operand::Move(ref src_place) => {\n Action::local_copy(&mir, &def_use_analysis, src_place)\n }\n Operand::Constant(ref src_constant) => {\n Action::constant(src_constant)\n }\n };\n match maybe_action {\n Some(this_action) => action = this_action,\n None => continue,\n }\n }\n _ => {\n debug!(\" Can't copy-propagate local: source use is not an \\\n assignment\");\n continue\n }\n }\n }\n\n changed = action.perform(mir, &def_use_analysis, dest_local, location) || changed;\n \/\/ FIXME(pcwalton): Update the use-def chains to delete the instructions instead of\n \/\/ regenerating the chains.\n break\n }\n if !changed {\n break\n }\n }\n }\n}\n\nfn eliminate_self_assignments<'tcx>(\n mir: &mut Mir<'tcx>,\n def_use_analysis: &DefUseAnalysis<'tcx>,\n) -> bool {\n let mut changed = false;\n\n for dest_local in mir.local_decls.indices() {\n let dest_use_info = def_use_analysis.local_info(dest_local);\n\n for def in dest_use_info.defs_not_including_drop() {\n let location = def.location;\n if let Some(stmt) = mir[location.block].statements.get(location.statement_index) {\n match stmt.kind {\n StatementKind::Assign(\n Place::Local(local),\n Rvalue::Use(Operand::Copy(Place::Local(src_local))),\n ) |\n StatementKind::Assign(\n Place::Local(local),\n Rvalue::Use(Operand::Move(Place::Local(src_local))),\n ) if local == dest_local && dest_local == src_local => {}\n _ => {\n continue;\n }\n }\n } else {\n continue;\n }\n debug!(\"Deleting a self-assignment for {:?}\", dest_local);\n mir.make_statement_nop(location);\n changed = true;\n }\n }\n\n changed\n}\n\nenum Action<'tcx> {\n PropagateLocalCopy(Local),\n PropagateConstant(Constant<'tcx>),\n}\n\nimpl<'tcx> Action<'tcx> {\n fn local_copy(mir: &Mir<'tcx>, def_use_analysis: &DefUseAnalysis, src_place: &Place<'tcx>)\n -> Option<Action<'tcx>> {\n \/\/ The source must be a local.\n let src_local = if let Place::Local(local) = *src_place {\n local\n } else {\n debug!(\" Can't copy-propagate local: source is not a local\");\n return None;\n };\n\n \/\/ We're trying to copy propagate a local.\n \/\/ There must be exactly one use of the source used in a statement (not in a terminator).\n let src_use_info = def_use_analysis.local_info(src_local);\n let src_use_count = src_use_info.use_count();\n if src_use_count == 0 {\n debug!(\" Can't copy-propagate local: no uses\");\n return None\n }\n if src_use_count != 1 {\n debug!(\" Can't copy-propagate local: {} uses\", src_use_info.use_count());\n return None\n }\n\n \/\/ Verify that the source doesn't change in between. This is done conservatively for now,\n \/\/ by ensuring that the source has exactly one mutation. The goal is to prevent things\n \/\/ like:\n \/\/\n \/\/ DEST = SRC;\n \/\/ SRC = X;\n \/\/ USE(DEST);\n \/\/\n \/\/ From being misoptimized into:\n \/\/\n \/\/ SRC = X;\n \/\/ USE(SRC);\n let src_def_count = src_use_info.def_count_not_including_drop();\n \/\/ allow function arguments to be propagated\n let is_arg = mir.local_kind(src_local) == LocalKind::Arg;\n if (is_arg && src_def_count != 0) || (!is_arg && src_def_count != 1) {\n debug!(\n \" Can't copy-propagate local: {} defs of src{}\",\n src_def_count,\n if is_arg { \" (argument)\" } else { \"\" },\n );\n return None\n }\n\n Some(Action::PropagateLocalCopy(src_local))\n }\n\n fn constant(src_constant: &Constant<'tcx>) -> Option<Action<'tcx>> {\n Some(Action::PropagateConstant((*src_constant).clone()))\n }\n\n fn perform(self,\n mir: &mut Mir<'tcx>,\n def_use_analysis: &DefUseAnalysis<'tcx>,\n dest_local: Local,\n location: Location)\n -> bool {\n match self {\n Action::PropagateLocalCopy(src_local) => {\n \/\/ Eliminate the destination and the assignment.\n \/\/\n \/\/ First, remove all markers.\n \/\/\n \/\/ FIXME(pcwalton): Don't do this. Merge live ranges instead.\n debug!(\" Replacing all uses of {:?} with {:?} (local)\",\n dest_local,\n src_local);\n for place_use in &def_use_analysis.local_info(dest_local).defs_and_uses {\n if place_use.context.is_storage_marker() {\n mir.make_statement_nop(place_use.location)\n }\n }\n for place_use in &def_use_analysis.local_info(src_local).defs_and_uses {\n if place_use.context.is_storage_marker() {\n mir.make_statement_nop(place_use.location)\n }\n }\n\n \/\/ Replace all uses of the destination local with the source local.\n def_use_analysis.replace_all_defs_and_uses_with(dest_local, mir, src_local);\n\n \/\/ Finally, zap the now-useless assignment instruction.\n debug!(\" Deleting assignment\");\n mir.make_statement_nop(location);\n\n true\n }\n Action::PropagateConstant(src_constant) => {\n \/\/ First, remove all markers.\n \/\/\n \/\/ FIXME(pcwalton): Don't do this. Merge live ranges instead.\n debug!(\" Replacing all uses of {:?} with {:?} (constant)\",\n dest_local,\n src_constant);\n let dest_local_info = def_use_analysis.local_info(dest_local);\n for place_use in &dest_local_info.defs_and_uses {\n if place_use.context.is_storage_marker() {\n mir.make_statement_nop(place_use.location)\n }\n }\n\n \/\/ Replace all uses of the destination local with the constant.\n let mut visitor = ConstantPropagationVisitor::new(dest_local,\n src_constant);\n for dest_place_use in &dest_local_info.defs_and_uses {\n visitor.visit_location(mir, dest_place_use.location)\n }\n\n \/\/ Zap the assignment instruction if we eliminated all the uses. We won't have been\n \/\/ able to do that if the destination was used in a projection, because projections\n \/\/ must have places on their LHS.\n let use_count = dest_local_info.use_count();\n if visitor.uses_replaced == use_count {\n debug!(\" {} of {} use(s) replaced; deleting assignment\",\n visitor.uses_replaced,\n use_count);\n mir.make_statement_nop(location);\n true\n } else if visitor.uses_replaced == 0 {\n debug!(\" No uses replaced; not deleting assignment\");\n false\n } else {\n debug!(\" {} of {} use(s) replaced; not deleting assignment\",\n visitor.uses_replaced,\n use_count);\n true\n }\n }\n }\n }\n}\n\nstruct ConstantPropagationVisitor<'tcx> {\n dest_local: Local,\n constant: Constant<'tcx>,\n uses_replaced: usize,\n}\n\nimpl<'tcx> ConstantPropagationVisitor<'tcx> {\n fn new(dest_local: Local, constant: Constant<'tcx>)\n -> ConstantPropagationVisitor<'tcx> {\n ConstantPropagationVisitor {\n dest_local,\n constant,\n uses_replaced: 0,\n }\n }\n}\n\nimpl<'tcx> MutVisitor<'tcx> for ConstantPropagationVisitor<'tcx> {\n fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {\n self.super_operand(operand, location);\n\n match *operand {\n Operand::Copy(Place::Local(local)) |\n Operand::Move(Place::Local(local)) if local == self.dest_local => {}\n _ => return,\n }\n\n *operand = Operand::Constant(box self.constant.clone());\n self.uses_replaced += 1\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_private)]\n\nextern crate syntax;\nextern crate rustdoc;\nextern crate serialize as rustc_serialize;\n\nuse std::collections::BTreeMap;\nuse std::env;\nuse std::error::Error;\nuse std::fs::{read_dir, File};\nuse std::io::{Read, Write};\nuse std::path::Path;\nuse std::path::PathBuf;\n\nuse syntax::diagnostics::metadata::{get_metadata_dir, ErrorMetadataMap, ErrorMetadata};\n\nuse rustdoc::html::markdown::{Markdown, PLAYGROUND};\nuse rustc_serialize::json;\n\nenum OutputFormat {\n HTML(HTMLFormatter),\n Markdown(MarkdownFormatter),\n Unknown(String),\n}\n\nimpl OutputFormat {\n fn from(format: &str) -> OutputFormat {\n match &*format.to_lowercase() {\n \"html\" => OutputFormat::HTML(HTMLFormatter),\n \"markdown\" => OutputFormat::Markdown(MarkdownFormatter),\n s => OutputFormat::Unknown(s.to_owned()),\n }\n }\n}\n\ntrait Formatter {\n fn header(&self, output: &mut Write) -> Result<(), Box<Error>>;\n fn title(&self, output: &mut Write) -> Result<(), Box<Error>>;\n fn error_code_block(&self, output: &mut Write, info: &ErrorMetadata,\n err_code: &str) -> Result<(), Box<Error>>;\n fn footer(&self, output: &mut Write) -> Result<(), Box<Error>>;\n}\n\nstruct HTMLFormatter;\nstruct MarkdownFormatter;\n\nimpl Formatter for HTMLFormatter {\n fn header(&self, output: &mut Write) -> Result<(), Box<Error>> {\n write!(output, r##\"<!DOCTYPE html>\n<html>\n<head>\n<title>Rust Compiler Error Index<\/title>\n<meta charset=\"utf-8\">\n<!-- Include rust.css after main.css so its rules take priority. -->\n<link rel=\"stylesheet\" type=\"text\/css\" href=\"main.css\"\/>\n<link rel=\"stylesheet\" type=\"text\/css\" href=\"rust.css\"\/>\n<style>\n.error-undescribed {{\n display: none;\n}}\n<\/style>\n<\/head>\n<body>\n\"##)?;\n Ok(())\n }\n\n fn title(&self, output: &mut Write) -> Result<(), Box<Error>> {\n write!(output, \"<h1>Rust Compiler Error Index<\/h1>\\n\")?;\n Ok(())\n }\n\n fn error_code_block(&self, output: &mut Write, info: &ErrorMetadata,\n err_code: &str) -> Result<(), Box<Error>> {\n \/\/ Enclose each error in a div so they can be shown\/hidden en masse.\n let desc_desc = match info.description {\n Some(_) => \"error-described\",\n None => \"error-undescribed\",\n };\n let use_desc = match info.use_site {\n Some(_) => \"error-used\",\n None => \"error-unused\",\n };\n write!(output, \"<div class=\\\"{} {}\\\">\", desc_desc, use_desc)?;\n\n \/\/ Error title (with self-link).\n write!(output,\n \"<h2 id=\\\"{0}\\\" class=\\\"section-header\\\"><a href=\\\"#{0}\\\">{0}<\/a><\/h2>\\n\",\n err_code)?;\n\n \/\/ Description rendered as markdown.\n match info.description {\n Some(ref desc) => write!(output, \"{}\", Markdown(desc))?,\n None => write!(output, \"<p>No description.<\/p>\\n\")?,\n }\n\n write!(output, \"<\/div>\\n\")?;\n Ok(())\n }\n\n fn footer(&self, output: &mut Write) -> Result<(), Box<Error>> {\n write!(output, \"<\/body>\\n<\/html>\")?;\n Ok(())\n }\n}\n\nimpl Formatter for MarkdownFormatter {\n #[allow(unused_variables)]\n fn header(&self, output: &mut Write) -> Result<(), Box<Error>> {\n Ok(())\n }\n\n fn title(&self, output: &mut Write) -> Result<(), Box<Error>> {\n write!(output, \"# Rust Compiler Error Index\\n\")?;\n Ok(())\n }\n\n fn error_code_block(&self, output: &mut Write, info: &ErrorMetadata,\n err_code: &str) -> Result<(), Box<Error>> {\n Ok(match info.description {\n Some(ref desc) => write!(output, \"## {}\\n{}\\n\", err_code, desc)?,\n None => (),\n })\n }\n\n #[allow(unused_variables)]\n fn footer(&self, output: &mut Write) -> Result<(), Box<Error>> {\n Ok(())\n }\n}\n\n\/\/\/ Load all the metadata files from `metadata_dir` into an in-memory map.\nfn load_all_errors(metadata_dir: &Path) -> Result<ErrorMetadataMap, Box<Error>> {\n let mut all_errors = BTreeMap::new();\n\n for entry in read_dir(metadata_dir)? {\n let path = entry?.path();\n\n let mut metadata_str = String::new();\n File::open(&path).and_then(|mut f| f.read_to_string(&mut metadata_str))?;\n\n let some_errors: ErrorMetadataMap = json::decode(&metadata_str)?;\n\n for (err_code, info) in some_errors {\n all_errors.insert(err_code, info);\n }\n }\n\n Ok(all_errors)\n}\n\n\/\/\/ Output an HTML page for the errors in `err_map` to `output_path`.\nfn render_error_page<T: Formatter>(err_map: &ErrorMetadataMap, output_path: &Path,\n formatter: T) -> Result<(), Box<Error>> {\n let mut output_file = File::create(output_path)?;\n\n formatter.header(&mut output_file)?;\n formatter.title(&mut output_file)?;\n\n for (err_code, info) in err_map {\n formatter.error_code_block(&mut output_file, info, err_code)?;\n }\n\n formatter.footer(&mut output_file)\n}\n\nfn main_with_result(format: OutputFormat, dst: &Path) -> Result<(), Box<Error>> {\n let build_arch = env::var(\"CFG_BUILD\")?;\n let metadata_dir = get_metadata_dir(&build_arch);\n let err_map = load_all_errors(&metadata_dir)?;\n match format {\n OutputFormat::Unknown(s) => panic!(\"Unknown output format: {}\", s),\n OutputFormat::HTML(h) => render_error_page(&err_map, dst, h)?,\n OutputFormat::Markdown(m) => render_error_page(&err_map, dst, m)?,\n }\n Ok(())\n}\n\nfn parse_args() -> (OutputFormat, PathBuf) {\n let mut args = env::args().skip(1);\n let format = args.next().map(|a| OutputFormat::from(&a))\n .unwrap_or(OutputFormat::from(\"html\"));\n let dst = args.next().map(PathBuf::from).unwrap_or_else(|| {\n match format {\n OutputFormat::HTML(..) => PathBuf::from(\"doc\/error-index.html\"),\n OutputFormat::Markdown(..) => PathBuf::from(\"doc\/error-index.md\"),\n OutputFormat::Unknown(..) => PathBuf::from(\"<nul>\"),\n }\n });\n (format, dst)\n}\n\nfn main() {\n PLAYGROUND.with(|slot| {\n *slot.borrow_mut() = Some((None, String::from(\"https:\/\/play.rust-lang.org\/\")));\n });\n let (format, dst) = parse_args();\n if let Err(e) = main_with_result(format, &dst) {\n panic!(\"{}\", e.description());\n }\n}\n<commit_msg>Set hoedown to generate error index<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_private)]\n\nextern crate syntax;\nextern crate rustdoc;\nextern crate serialize as rustc_serialize;\n\nuse std::collections::BTreeMap;\nuse std::env;\nuse std::error::Error;\nuse std::fs::{read_dir, File};\nuse std::io::{Read, Write};\nuse std::path::Path;\nuse std::path::PathBuf;\n\nuse syntax::diagnostics::metadata::{get_metadata_dir, ErrorMetadataMap, ErrorMetadata};\n\nuse rustdoc::html::markdown::{Markdown, PLAYGROUND, RenderType};\nuse rustc_serialize::json;\n\nenum OutputFormat {\n HTML(HTMLFormatter),\n Markdown(MarkdownFormatter),\n Unknown(String),\n}\n\nimpl OutputFormat {\n fn from(format: &str) -> OutputFormat {\n match &*format.to_lowercase() {\n \"html\" => OutputFormat::HTML(HTMLFormatter),\n \"markdown\" => OutputFormat::Markdown(MarkdownFormatter),\n s => OutputFormat::Unknown(s.to_owned()),\n }\n }\n}\n\ntrait Formatter {\n fn header(&self, output: &mut Write) -> Result<(), Box<Error>>;\n fn title(&self, output: &mut Write) -> Result<(), Box<Error>>;\n fn error_code_block(&self, output: &mut Write, info: &ErrorMetadata,\n err_code: &str) -> Result<(), Box<Error>>;\n fn footer(&self, output: &mut Write) -> Result<(), Box<Error>>;\n}\n\nstruct HTMLFormatter;\nstruct MarkdownFormatter;\n\nimpl Formatter for HTMLFormatter {\n fn header(&self, output: &mut Write) -> Result<(), Box<Error>> {\n write!(output, r##\"<!DOCTYPE html>\n<html>\n<head>\n<title>Rust Compiler Error Index<\/title>\n<meta charset=\"utf-8\">\n<!-- Include rust.css after main.css so its rules take priority. -->\n<link rel=\"stylesheet\" type=\"text\/css\" href=\"main.css\"\/>\n<link rel=\"stylesheet\" type=\"text\/css\" href=\"rust.css\"\/>\n<style>\n.error-undescribed {{\n display: none;\n}}\n<\/style>\n<\/head>\n<body>\n\"##)?;\n Ok(())\n }\n\n fn title(&self, output: &mut Write) -> Result<(), Box<Error>> {\n write!(output, \"<h1>Rust Compiler Error Index<\/h1>\\n\")?;\n Ok(())\n }\n\n fn error_code_block(&self, output: &mut Write, info: &ErrorMetadata,\n err_code: &str) -> Result<(), Box<Error>> {\n \/\/ Enclose each error in a div so they can be shown\/hidden en masse.\n let desc_desc = match info.description {\n Some(_) => \"error-described\",\n None => \"error-undescribed\",\n };\n let use_desc = match info.use_site {\n Some(_) => \"error-used\",\n None => \"error-unused\",\n };\n write!(output, \"<div class=\\\"{} {}\\\">\", desc_desc, use_desc)?;\n\n \/\/ Error title (with self-link).\n write!(output,\n \"<h2 id=\\\"{0}\\\" class=\\\"section-header\\\"><a href=\\\"#{0}\\\">{0}<\/a><\/h2>\\n\",\n err_code)?;\n\n \/\/ Description rendered as markdown.\n match info.description {\n Some(ref desc) => write!(output, \"{}\", Markdown(desc, RenderType::Hoedown))?,\n None => write!(output, \"<p>No description.<\/p>\\n\")?,\n }\n\n write!(output, \"<\/div>\\n\")?;\n Ok(())\n }\n\n fn footer(&self, output: &mut Write) -> Result<(), Box<Error>> {\n write!(output, \"<\/body>\\n<\/html>\")?;\n Ok(())\n }\n}\n\nimpl Formatter for MarkdownFormatter {\n #[allow(unused_variables)]\n fn header(&self, output: &mut Write) -> Result<(), Box<Error>> {\n Ok(())\n }\n\n fn title(&self, output: &mut Write) -> Result<(), Box<Error>> {\n write!(output, \"# Rust Compiler Error Index\\n\")?;\n Ok(())\n }\n\n fn error_code_block(&self, output: &mut Write, info: &ErrorMetadata,\n err_code: &str) -> Result<(), Box<Error>> {\n Ok(match info.description {\n Some(ref desc) => write!(output, \"## {}\\n{}\\n\", err_code, desc)?,\n None => (),\n })\n }\n\n #[allow(unused_variables)]\n fn footer(&self, output: &mut Write) -> Result<(), Box<Error>> {\n Ok(())\n }\n}\n\n\/\/\/ Load all the metadata files from `metadata_dir` into an in-memory map.\nfn load_all_errors(metadata_dir: &Path) -> Result<ErrorMetadataMap, Box<Error>> {\n let mut all_errors = BTreeMap::new();\n\n for entry in read_dir(metadata_dir)? {\n let path = entry?.path();\n\n let mut metadata_str = String::new();\n File::open(&path).and_then(|mut f| f.read_to_string(&mut metadata_str))?;\n\n let some_errors: ErrorMetadataMap = json::decode(&metadata_str)?;\n\n for (err_code, info) in some_errors {\n all_errors.insert(err_code, info);\n }\n }\n\n Ok(all_errors)\n}\n\n\/\/\/ Output an HTML page for the errors in `err_map` to `output_path`.\nfn render_error_page<T: Formatter>(err_map: &ErrorMetadataMap, output_path: &Path,\n formatter: T) -> Result<(), Box<Error>> {\n let mut output_file = File::create(output_path)?;\n\n formatter.header(&mut output_file)?;\n formatter.title(&mut output_file)?;\n\n for (err_code, info) in err_map {\n formatter.error_code_block(&mut output_file, info, err_code)?;\n }\n\n formatter.footer(&mut output_file)\n}\n\nfn main_with_result(format: OutputFormat, dst: &Path) -> Result<(), Box<Error>> {\n let build_arch = env::var(\"CFG_BUILD\")?;\n let metadata_dir = get_metadata_dir(&build_arch);\n let err_map = load_all_errors(&metadata_dir)?;\n match format {\n OutputFormat::Unknown(s) => panic!(\"Unknown output format: {}\", s),\n OutputFormat::HTML(h) => render_error_page(&err_map, dst, h)?,\n OutputFormat::Markdown(m) => render_error_page(&err_map, dst, m)?,\n }\n Ok(())\n}\n\nfn parse_args() -> (OutputFormat, PathBuf) {\n let mut args = env::args().skip(1);\n let format = args.next().map(|a| OutputFormat::from(&a))\n .unwrap_or(OutputFormat::from(\"html\"));\n let dst = args.next().map(PathBuf::from).unwrap_or_else(|| {\n match format {\n OutputFormat::HTML(..) => PathBuf::from(\"doc\/error-index.html\"),\n OutputFormat::Markdown(..) => PathBuf::from(\"doc\/error-index.md\"),\n OutputFormat::Unknown(..) => PathBuf::from(\"<nul>\"),\n }\n });\n (format, dst)\n}\n\nfn main() {\n PLAYGROUND.with(|slot| {\n *slot.borrow_mut() = Some((None, String::from(\"https:\/\/play.rust-lang.org\/\")));\n });\n let (format, dst) = parse_args();\n if let Err(e) = main_with_result(format, &dst) {\n panic!(\"{}\", e.description());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #29030<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[derive(Debug)]\nstruct Message<'a, P: 'a = &'a [u8]> {\n header: &'a [u8],\n payload: P,\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>More solutions<commit_after>\/\/ https:\/\/leetcode.com\/problems\/evaluate-reverse-polish-notation\/\n\npub struct Solution;\n\nimpl Solution {\n pub fn eval_rpn(tokens: Vec<String>) -> i32 {\n let mut operands: Vec<i32> = Vec::new();\n for token in tokens {\n match token.as_str() {\n \"+\" => {\n let right = operands.pop().unwrap();\n let left = operands.pop().unwrap();\n operands.push(left + right);\n }\n \"-\" => {\n let right = operands.pop().unwrap();\n let left = operands.pop().unwrap();\n operands.push(left - right);\n }\n \"*\" => {\n let right = operands.pop().unwrap();\n let left = operands.pop().unwrap();\n operands.push(left * right);\n }\n \"\/\" => {\n let right = operands.pop().unwrap();\n let left = operands.pop().unwrap();\n operands.push(left \/ right);\n }\n num => {\n operands.push(num.parse().unwrap());\n }\n }\n }\n\n return *operands.last().unwrap();\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn test_1() {\n let rpn = [\"2\", \"1\", \"+\", \"3\", \"*\"]\n .iter()\n .map(|s| s.to_string())\n .collect();\n assert_eq!(Solution::eval_rpn(rpn), 9);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Stub out cargo-read-manifest<commit_after>#[crate_id=\"cargo-read-manifest\"];\n\nfn main() {\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add Kinesis Firehose integration tests<commit_after>#![cfg(feature = \"firehose\")]\n\nextern crate rusoto;\n\nuse rusoto::firehose::{KinesisFirehoseClient, ListDeliveryStreamsInput};\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\nfn should_list_delivery_streams() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = KinesisFirehoseClient::new(credentials, Region::UsEast1);\n\n let request = ListDeliveryStreamsInput::default();\n\n match client.list_delivery_streams(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true)\n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Regression test for https:\/\/github.com\/servo\/html5ever\/issues\/393\n\/\/\n\/\/ Create a dynamic atom − causing initialization of the golbal hash map −\n\/\/ in a thread that has a small stack.\n\/\/\n\/\/ This is a separate test program rather than a `#[test] fn` among others\n\/\/ to make sure that nothing else has already initialized the map in this process.\nfn main() {\n std::thread::Builder::new()\n .stack_size(50_000)\n .spawn(|| {\n string_cache::DefaultAtom::from(\"12345678\");\n })\n .unwrap()\n .join()\n .unwrap()\n}\n<commit_msg>Auto merge of #251 - notriddle:notriddle\/test-small-stack-warning, r=jdm<commit_after>\/\/ Regression test for https:\/\/github.com\/servo\/html5ever\/issues\/393\n\/\/\n\/\/ Create a dynamic atom − causing initialization of the global hash map −\n\/\/ in a thread that has a small stack.\n\/\/\n\/\/ This is a separate test program rather than a `#[test] fn` among others\n\/\/ to make sure that nothing else has already initialized the map in this process.\nfn main() {\n std::thread::Builder::new()\n .stack_size(50_000)\n .spawn(|| {\n let _atom = string_cache::DefaultAtom::from(\"12345678\");\n })\n .unwrap()\n .join()\n .unwrap()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added unit tests for SearchOptionsBuilder<commit_after>extern crate goji;\nextern crate serde_json;\nextern crate url;\n\nuse goji::*;\nuse std::collections::HashMap;\nuse url::form_urlencoded;\n\nmacro_rules! builder_pattern {\n ($($name:ident: ($param:ident, $value:expr, $query_param:expr, $query_value:expr),)*) => {\n $(\n #[test]\n fn $name() {\n let options = SearchOptionsBuilder::new()\n .$param($value)\n .build();\n\n let options_str = options.serialize().unwrap();\n\n let mut expected: HashMap<&str, &str> = HashMap::new();\n expected.insert($query_param, $query_value);\n\n let expected_str = form_urlencoded::Serializer::new(String::new())\n .extend_pairs(&expected)\n .finish();\n\n assert_eq!(options_str, expected_str);\n }\n )*\n }\n}\n\nbuilder_pattern! {\n build_pattern_validate: (validate, true, \"validateQuery\", \"true\"),\n build_pattern_fields: (fields, vec![\"field1\", \"field2\"], \"fields\", \"field1,field2\"),\n build_pattern_max_results: (max_results, 50, \"maxResults\", \"50\"),\n build_pattern_start_at: (start_at, 10, \"startAt\", \"10\"),\n build_pattern_type_name: (type_name, \"my_type\", \"type\", \"my_type\"),\n build_pattern_name: (name, \"my_name\", \"name\", \"my_name\"),\n build_pattern_project_key_or_id: (project_key_or_id, \"1234\", \"projectKeyOrId\", \"1234\"),\n build_pattern_expand: (expand, vec![\"expand1\", \"expand2\"], \"expand\", \"expand1,expand2\"),\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Finish arguments<commit_after><|endoftext|>"} {"text":"<commit_before>use std::{Box, String};\nuse std::collections::VecDeque;\nuse std::ops::DerefMut;\n\nuse orbital::{Color, Point, Size, Event, KeyEvent, MouseEvent, QuitEvent};\n\nuse super::display::Display;\nuse super::scheduler;\n\n\/\/\/ A window\npub struct Window {\n \/\/\/ The position of the window\n pub point: Point,\n \/\/\/ The size of the window\n pub size: Size,\n \/\/\/ The title of the window\n pub title: String,\n \/\/\/ The content of the window\n pub content: Box<Display>,\n \/\/\/ The color of the window title\n pub title_color: Color,\n \/\/\/ The color of the border\n pub border_color: Color,\n \/\/\/ Is the window focused?\n pub focused: bool,\n \/\/\/ Is the window minimized?\n pub minimized: bool,\n dragging: bool,\n last_mouse_event: MouseEvent,\n events: VecDeque<Event>,\n ptr: *mut Window,\n}\n\nimpl Window {\n \/\/\/ Create a new window\n pub fn new(point: Point, size: Size, title: String) -> Box<Self> {\n let mut ret = box Window {\n point: point,\n size: size,\n title: title,\n content: Display::new(size.width, size.height),\n title_color: Color::rgb(255, 255, 255),\n border_color: Color::rgba(64, 64, 64, 128),\n focused: false,\n minimized: false,\n dragging: false,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n right_button: false,\n middle_button: false,\n },\n events: VecDeque::new(),\n ptr: 0 as *mut Window,\n };\n\n unsafe {\n ret.ptr = ret.deref_mut();\n\n if ret.ptr as usize > 0 {\n (*super::session_ptr).add_window(ret.ptr);\n }\n }\n\n ret\n }\n\n \/\/\/ Poll the window (new)\n pub fn poll(&mut self) -> Option<Event> {\n let event_option;\n unsafe {\n let reenable = scheduler::start_no_ints();\n event_option = self.events.pop_front();\n scheduler::end_no_ints(reenable);\n }\n return event_option;\n }\n\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.content.flip();\n (*super::session_ptr).redraw = true;\n (*super::session_ptr).redraw();\n scheduler::end_no_ints(reenable);\n }\n }\n\n \/\/\/ Draw the window using a `Display`\n pub fn draw(&mut self, display: &Display, font: usize) {\n if self.focused {\n self.border_color = Color::rgba(128, 128, 128, 192);\n } else {\n self.border_color = Color::rgba(64, 64, 64, 128);\n }\n\n if self.minimized {\n self.title_color = Color::rgb(0, 0, 0);\n } else {\n self.title_color = Color::rgb(255, 255, 255);\n\n display.rect(Point::new(self.point.x - 2, self.point.y - 18),\n Size::new(self.size.width + 4, 18),\n self.border_color);\n\n let mut cursor = Point::new(self.point.x, self.point.y - 17);\n for c in self.title.chars() {\n if cursor.x + 8 <= self.point.x + self.size.width as isize {\n display.char(cursor, c, self.title_color, font);\n }\n cursor.x += 8;\n }\n\n display.rect(Point::new(self.point.x - 2, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n display.rect(Point::new(self.point.x - 2, self.point.y + self.size.height as isize),\n Size::new(self.size.width + 4, 2),\n self.border_color);\n display.rect(Point::new(self.point.x + self.size.width as isize, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n\n unsafe {\n let reenable = scheduler::start_no_ints();\n display.image(self.point,\n self.content.onscreen as *const Color,\n Size::new(self.content.width, self.content.height));\n scheduler::end_no_ints(reenable);\n }\n }\n }\n\n \/\/\/ Called on key press\n pub fn on_key(&mut self, key_event: KeyEvent) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(key_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n fn on_window_decoration(&self, x: isize, y: isize) -> bool {\n !self.minimized && x >= -2 && x < self.size.width as isize + 4 && y >= -18 && y < 0\n }\n\n fn on_window_body(&self, x: isize, y: isize) -> bool {\n !self.minimized && x >= 0 && x < self.size.width as isize && y >= 0 &&\n y < self.size.height as isize\n }\n\n fn mouse_button_pressed(mouse_event: &MouseEvent) -> bool {\n mouse_event.left_button || mouse_event.middle_button || mouse_event.right_button \n }\n\n \/\/\/ Called on mouse movement\n pub fn on_mouse(&mut self, orig_mouse_event: MouseEvent, allow_catch: bool, active_window: bool) -> bool {\n let mut mouse_event = orig_mouse_event;\n\n mouse_event.x -= self.point.x;\n mouse_event.y -= self.point.y;\n\n let mut caught = false;\n\n if allow_catch && (active_window || Window::mouse_button_pressed(&mouse_event)) {\n if self.on_window_body(mouse_event.x, mouse_event.y) {\n caught = true;\n } else if self.on_window_decoration(mouse_event.x, mouse_event.y) {\n caught = true;\n\n if mouse_event.left_button {\n if !self.last_mouse_event.left_button {\n self.dragging = true;\n }\n } else {\n self.dragging = false;\n }\n\n if mouse_event.right_button {\n if !self.last_mouse_event.right_button {\n self.minimized = !self.minimized;\n }\n }\n\n if mouse_event.middle_button {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(QuitEvent.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n }\n \n if self.dragging {\n self.point.x += orig_mouse_event.x - self.last_mouse_event.x;\n self.point.y += orig_mouse_event.y - self.last_mouse_event.y;\n caught = true;\n }\n } else {\n self.dragging = false;\n }\n\n self.last_mouse_event = orig_mouse_event;\n\n if caught && !self.dragging {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(mouse_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n caught\n }\n}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n unsafe {\n if self.ptr as usize > 0 {\n (*super::session_ptr).remove_window(self.ptr);\n }\n }\n }\n}\n<commit_msg>Fix for case when mouse button held down<commit_after>use std::{Box, String};\nuse std::collections::VecDeque;\nuse std::ops::DerefMut;\n\nuse orbital::{Color, Point, Size, Event, KeyEvent, MouseEvent, QuitEvent};\n\nuse super::display::Display;\nuse super::scheduler;\n\n\/\/\/ A window\npub struct Window {\n \/\/\/ The position of the window\n pub point: Point,\n \/\/\/ The size of the window\n pub size: Size,\n \/\/\/ The title of the window\n pub title: String,\n \/\/\/ The content of the window\n pub content: Box<Display>,\n \/\/\/ The color of the window title\n pub title_color: Color,\n \/\/\/ The color of the border\n pub border_color: Color,\n \/\/\/ Is the window focused?\n pub focused: bool,\n \/\/\/ Is the window minimized?\n pub minimized: bool,\n dragging: bool,\n last_mouse_event: MouseEvent,\n events: VecDeque<Event>,\n ptr: *mut Window,\n}\n\nimpl Window {\n \/\/\/ Create a new window\n pub fn new(point: Point, size: Size, title: String) -> Box<Self> {\n let mut ret = box Window {\n point: point,\n size: size,\n title: title,\n content: Display::new(size.width, size.height),\n title_color: Color::rgb(255, 255, 255),\n border_color: Color::rgba(64, 64, 64, 128),\n focused: false,\n minimized: false,\n dragging: false,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n right_button: false,\n middle_button: false,\n },\n events: VecDeque::new(),\n ptr: 0 as *mut Window,\n };\n\n unsafe {\n ret.ptr = ret.deref_mut();\n\n if ret.ptr as usize > 0 {\n (*super::session_ptr).add_window(ret.ptr);\n }\n }\n\n ret\n }\n\n \/\/\/ Poll the window (new)\n pub fn poll(&mut self) -> Option<Event> {\n let event_option;\n unsafe {\n let reenable = scheduler::start_no_ints();\n event_option = self.events.pop_front();\n scheduler::end_no_ints(reenable);\n }\n return event_option;\n }\n\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.content.flip();\n (*super::session_ptr).redraw = true;\n (*super::session_ptr).redraw();\n scheduler::end_no_ints(reenable);\n }\n }\n\n \/\/\/ Draw the window using a `Display`\n pub fn draw(&mut self, display: &Display, font: usize) {\n if self.focused {\n self.border_color = Color::rgba(128, 128, 128, 192);\n } else {\n self.border_color = Color::rgba(64, 64, 64, 128);\n }\n\n if self.minimized {\n self.title_color = Color::rgb(0, 0, 0);\n } else {\n self.title_color = Color::rgb(255, 255, 255);\n\n display.rect(Point::new(self.point.x - 2, self.point.y - 18),\n Size::new(self.size.width + 4, 18),\n self.border_color);\n\n let mut cursor = Point::new(self.point.x, self.point.y - 17);\n for c in self.title.chars() {\n if cursor.x + 8 <= self.point.x + self.size.width as isize {\n display.char(cursor, c, self.title_color, font);\n }\n cursor.x += 8;\n }\n\n display.rect(Point::new(self.point.x - 2, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n display.rect(Point::new(self.point.x - 2, self.point.y + self.size.height as isize),\n Size::new(self.size.width + 4, 2),\n self.border_color);\n display.rect(Point::new(self.point.x + self.size.width as isize, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n\n unsafe {\n let reenable = scheduler::start_no_ints();\n display.image(self.point,\n self.content.onscreen as *const Color,\n Size::new(self.content.width, self.content.height));\n scheduler::end_no_ints(reenable);\n }\n }\n }\n\n \/\/\/ Called on key press\n pub fn on_key(&mut self, key_event: KeyEvent) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(key_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n fn on_window_decoration(&self, x: isize, y: isize) -> bool {\n !self.minimized && x >= -2 && x < self.size.width as isize + 4 && y >= -18 && y < 0\n }\n\n fn on_window_body(&self, x: isize, y: isize) -> bool {\n !self.minimized && x >= 0 && x < self.size.width as isize && y >= 0 &&\n y < self.size.height as isize\n }\n\n fn mouse_button_pressed(mouse_event: &MouseEvent) -> bool {\n mouse_event.left_button || mouse_event.middle_button || mouse_event.right_button \n }\n\n \/\/\/ Called on mouse movement\n pub fn on_mouse(&mut self, orig_mouse_event: MouseEvent, allow_catch: bool, active_window: bool) -> bool {\n let mut mouse_event = orig_mouse_event;\n\n mouse_event.x -= self.point.x;\n mouse_event.y -= self.point.y;\n\n let mut caught = false;\n\n if allow_catch && (active_window || (Window::mouse_button_pressed(&mouse_event) && !Window::mouse_button_pressed(&self.last_mouse_event))) {\n if self.on_window_body(mouse_event.x, mouse_event.y) {\n caught = true;\n } else if self.on_window_decoration(mouse_event.x, mouse_event.y) {\n caught = true;\n\n if mouse_event.left_button {\n if !self.last_mouse_event.left_button {\n self.dragging = true;\n }\n } else {\n self.dragging = false;\n }\n\n if mouse_event.right_button {\n if !self.last_mouse_event.right_button {\n self.minimized = !self.minimized;\n }\n }\n\n if mouse_event.middle_button {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(QuitEvent.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n }\n \n if self.dragging {\n self.point.x += orig_mouse_event.x - self.last_mouse_event.x;\n self.point.y += orig_mouse_event.y - self.last_mouse_event.y;\n caught = true;\n }\n } else {\n self.dragging = false;\n }\n\n self.last_mouse_event = orig_mouse_event;\n\n if caught && !self.dragging {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(mouse_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n caught\n }\n}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n unsafe {\n if self.ptr as usize > 0 {\n (*super::session_ptr).remove_window(self.ptr);\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue-38591<commit_after>\/\/ run-pass\n\nstruct S<T> {\n t : T,\n s : Box<S<fn(u : T)>>\n}\n\nfn f(x : S<u32>) {}\n\nfn main () {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add some unit tests for bert2<commit_after>extern crate ppbert;\nextern crate num;\n\nuse ppbert::parser;\nuse ppbert::bertterm::BertTerm;\nuse ppbert::error::Result;\n\nfn p(bytes: &[u8]) -> Result<Vec<BertTerm>> {\n let mut parser = parser::Parser::new(bytes.to_vec());\n parser.parse_bert2()\n}\n\n#[test]\nfn zero_terms() {\n assert!(p(&[]).is_ok());\n}\n\n#[test]\nfn one_term() {\n \/\/ ppbert ignores the length.\n assert!(p(&[0, 131, 97, 0]).is_ok());\n assert!(p(&[0, 130, 97, 0]).is_err());\n}\n\n#[test]\nfn two_terms() {\n \/\/ ppbert ignores the length.\n assert!(p(&[0, 131, 97, 0,\n 1, 131, 97, 0]).is_ok());\n assert!(p(&[0, 130, 97, 0,\n 1, 131, 97, 0]).is_err());\n assert!(p(&[0, 131, 97, 0,\n 1, 130, 97, 0]).is_err());\n assert!(p(&[0, 130, 97, 0,\n 1, 130, 97, 0]).is_err());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for texture drawing<commit_after>#![feature(plugin)]\n#![feature(unboxed_closures)]\n\n#[plugin]\nextern crate glium_macros;\n\nextern crate glutin;\n\n#[macro_use]\nextern crate glium;\n\nuse std::default::Default;\nuse glium::Surface;\n\nmod support;\n\n#[test]\nfn texture_2d_draw() { \n let display = support::build_display();\n let (vb, ib) = support::build_rectangle_vb_ib(&display);\n\n let texture = glium::texture::Texture2d::new(&display, vec![\n vec![(255, 0, 0, 255), (255, 0, 0, 255)],\n vec![(255, 0, 0, 255), (255, 0, 0, 255u8)],\n ]);\n\n let program = glium::Program::from_source(&display,\n \"\n #version 110\n\n attribute vec2 position;\n\n void main() {\n gl_Position = vec4(position, 0.0, 1.0);\n }\n \",\n \"\n #version 110\n\n uniform sampler2D texture;\n\n void main() {\n gl_FragColor = texture2D(texture, vec2(0.5, 0.5));\n }\n \",\n None).unwrap();\n\n let output = support::build_renderable_texture(&display);\n output.as_surface().clear_color(0.0, 0.0, 0.0, 0.0);\n output.as_surface().draw(&vb, &ib, &program, &uniform!{ texture: &texture },\n &Default::default()).unwrap();\n\n let data: Vec<Vec<(f32, f32, f32, f32)>> = output.read();\n for row in data.iter() {\n for pixel in row.iter() {\n assert_eq!(pixel, &(1.0, 0.0, 0.0, 1.0));\n }\n }\n\n display.assert_no_error();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>recfactored tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix Hash vs PartialEq bug found by Clippy<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\npub static DEFAULT_ENTRY: &'static str = \"\\\n---\\\n[imag]\\\nlinks = []\\\nversion = \\\"0.4.0\\\"\\\n---\";\n\n\/\/\/ Generator helper macro for mock app (for testing only)\n\/\/\/\n\/\/\/ Requires the following crates in scope:\n\/\/\/\n\/\/\/ * std\n\/\/\/ * libimagrt\n\/\/\/ * clap\n\/\/\/\n#[macro_export]\nmacro_rules! make_mock_app {\n {\n app $appname:expr;\n module $module:ident;\n version $version:expr;\n } => {\n make_mock_app! {\n app $appname;\n module $module;\n version $version;\n with help \"This is a mocking app\";\n }\n };\n\n {\n app $appname:expr;\n modulename $module:ident;\n version $version:expr;\n with help $help:expr;\n }=> {\n mod $module {\n use clap::{App, ArgMatches};\n use libimagrt::spec::CliSpec;\n use libimagrt::runtime::Runtime;\n use libimagrt::error::RuntimeError;\n use libimagrt::configuration::{Configuration, InternalConfiguration};\n\n #[derive(Clone)]\n struct MockLinkApp<'a> {\n args: Vec<&'static str>,\n inner: App<'a, 'a>,\n }\n\n impl<'a> MockLinkApp<'a> {\n fn new(args: Vec<&'static str>) -> Self {\n MockLinkApp {\n args: args,\n inner: ::build_ui(Runtime::get_default_cli_builder($appname, $version, $help)),\n }\n }\n }\n\n impl<'a> CliSpec<'a> for MockLinkApp<'a> {\n fn name(&self) -> &str {\n self.inner.get_name()\n }\n\n fn matches(self) -> ArgMatches<'a> {\n self.inner.get_matches_from(self.args)\n }\n }\n\n impl<'a> InternalConfiguration for MockLinkApp<'a> {\n fn enable_logging(&self) -> bool {\n false\n }\n\n fn use_inmemory_fs(&self) -> bool {\n true\n }\n }\n\n pub fn generate_minimal_test_config() -> Option<Configuration> { ::toml::de::from_str(\"[store]\\nimplicit-create=true\")\n .map(Configuration::with_value)\n .ok()\n }\n\n pub fn generate_test_runtime<'a>(mut args: Vec<&'static str>) -> Result<Runtime<'a>, RuntimeError> {\n let mut cli_args = vec![\"imag-link\", \"--rtp\", \"\/tmp\"];\n\n cli_args.append(&mut args);\n\n let cli_app = MockLinkApp::new(cli_args);\n Runtime::with_configuration(cli_app, generate_minimal_test_config())\n }\n }\n };\n\n}\n\n<commit_msg>Bugfix: Fix macro to use the appname instead of hardcoded values<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\npub static DEFAULT_ENTRY: &'static str = \"\\\n---\\\n[imag]\\\nlinks = []\\\nversion = \\\"0.4.0\\\"\\\n---\";\n\n\/\/\/ Generator helper macro for mock app (for testing only)\n\/\/\/\n\/\/\/ Requires the following crates in scope:\n\/\/\/\n\/\/\/ * std\n\/\/\/ * libimagrt\n\/\/\/ * clap\n\/\/\/\n#[macro_export]\nmacro_rules! make_mock_app {\n {\n app $appname:expr;\n module $module:ident;\n version $version:expr;\n } => {\n make_mock_app! {\n app $appname;\n module $module;\n version $version;\n with help \"This is a mocking app\";\n }\n };\n\n {\n app $appname:expr;\n modulename $module:ident;\n version $version:expr;\n with help $help:expr;\n }=> {\n mod $module {\n use clap::{App, ArgMatches};\n use libimagrt::spec::CliSpec;\n use libimagrt::runtime::Runtime;\n use libimagrt::error::RuntimeError;\n use libimagrt::configuration::{Configuration, InternalConfiguration};\n\n #[derive(Clone)]\n struct MockLinkApp<'a> {\n args: Vec<&'static str>,\n inner: App<'a, 'a>,\n }\n\n impl<'a> MockLinkApp<'a> {\n fn new(args: Vec<&'static str>) -> Self {\n MockLinkApp {\n args: args,\n inner: ::build_ui(Runtime::get_default_cli_builder($appname, $version, $help)),\n }\n }\n }\n\n impl<'a> CliSpec<'a> for MockLinkApp<'a> {\n fn name(&self) -> &str {\n self.inner.get_name()\n }\n\n fn matches(self) -> ArgMatches<'a> {\n self.inner.get_matches_from(self.args)\n }\n }\n\n impl<'a> InternalConfiguration for MockLinkApp<'a> {\n fn enable_logging(&self) -> bool {\n false\n }\n\n fn use_inmemory_fs(&self) -> bool {\n true\n }\n }\n\n pub fn generate_minimal_test_config() -> Option<Configuration> { ::toml::de::from_str(\"[store]\\nimplicit-create=true\")\n .map(Configuration::with_value)\n .ok()\n }\n\n pub fn generate_test_runtime<'a>(mut args: Vec<&'static str>) -> Result<Runtime<'a>, RuntimeError> {\n let mut cli_args = vec![$appname, \"--rtp\", \"\/tmp\"];\n\n cli_args.append(&mut args);\n\n let cli_app = MockLinkApp::new(cli_args);\n Runtime::with_configuration(cli_app, generate_minimal_test_config())\n }\n }\n };\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>impl Deserialize for m.room.canonical_alias<commit_after><|endoftext|>"} {"text":"<commit_before>use regex::Regex;\n\nuse std::convert::Into;\nuse std::convert::From;\n\nuse std::option::Option;\n\n#[derive(Clone, Copy, Debug, PartialEq)]\npub struct KeyValue<K, V> {\n k: K,\n v: V,\n}\n\nimpl<K, V> KeyValue<K, V> {\n\n pub fn new(k: K, v: V) -> KeyValue<K, V> {\n KeyValue { k: k, v: v }\n }\n\n pub fn key(&self) -> &K {\n &self.k\n }\n\n pub fn value(&self) -> &V {\n &self.v\n }\n\n}\n\nimpl<K, V> Into<(K, V)> for KeyValue<K, V> {\n\n fn into(self) -> (K, V) {\n (self.k, self.v)\n }\n\n}\n\npub trait IntoKeyValue<K, V> {\n fn into_kv(self) -> Option<KeyValue<K, V>>;\n}\n\nimpl IntoKeyValue<String, String> for String {\n\n fn into_kv(self) -> Option<KeyValue<String, String>> {\n let key = {\n let r = \"^(?P<KEY>([^=]*))=(.*)$\";\n let r = Regex::new(r).unwrap();\n r.captures(&self[..])\n .and_then(|caps| caps.name(\"KEY\"))\n };\n\n let value = {\n let r = \"(.*)=(\\\"(?P<QVALUE>([^\\\"]*))\\\"|(?P<VALUE>(.*)))$\";\n let r = Regex::new(r).unwrap();\n r.captures(&self[..])\n .map(|caps| {\n caps.name(\"VALUE\")\n .or(caps.name(\"QVALUE\"))\n .unwrap_or(\"\")\n })\n };\n\n key.and_then(|k| {\n value.and_then(|v| {\n Some(KeyValue::new(String::from(k), String::from(v)))\n })\n })\n }\n\n}\n\n#[cfg(test)]\nmod test {\n use super::{KeyValue, IntoKeyValue};\n\n #[test]\n fn test_single_quoted() {\n assert!(String::from(\"foo='bar'\").into_kv().is_none());\n }\n\n #[test]\n fn test_double_quoted() {\n let s = String::from(\"foo=\\\"bar\\\"\").into_kv().unwrap();\n assert_eq!(KeyValue::new(String::from(\"foo\"), String::from(\"bar\")), s);\n }\n\n #[test]\n fn test_double_and_single_quoted() {\n assert!(String::from(\"foo=\\\"bar\\'\").into_kv().is_none());\n }\n\n #[test]\n fn test_single_and_double_quoted() {\n assert!(String::from(\"foo=\\'bar\\\"\").into_kv().is_none());\n }\n\n #[test]\n fn test_not_quoted() {\n let s = String::from(\"foo=bar\").into_kv().unwrap();\n assert_eq!(KeyValue::new(String::from(\"foo\"), String::from(\"bar\")), s);\n }\n\n}\n\n\n<commit_msg>tests: Allow quotes in keys<commit_after>use regex::Regex;\n\nuse std::convert::Into;\nuse std::convert::From;\n\nuse std::option::Option;\n\n#[derive(Clone, Copy, Debug, PartialEq)]\npub struct KeyValue<K, V> {\n k: K,\n v: V,\n}\n\nimpl<K, V> KeyValue<K, V> {\n\n pub fn new(k: K, v: V) -> KeyValue<K, V> {\n KeyValue { k: k, v: v }\n }\n\n pub fn key(&self) -> &K {\n &self.k\n }\n\n pub fn value(&self) -> &V {\n &self.v\n }\n\n}\n\nimpl<K, V> Into<(K, V)> for KeyValue<K, V> {\n\n fn into(self) -> (K, V) {\n (self.k, self.v)\n }\n\n}\n\npub trait IntoKeyValue<K, V> {\n fn into_kv(self) -> Option<KeyValue<K, V>>;\n}\n\nimpl IntoKeyValue<String, String> for String {\n\n fn into_kv(self) -> Option<KeyValue<String, String>> {\n let key = {\n let r = \"^(?P<KEY>([^=]*))=(.*)$\";\n let r = Regex::new(r).unwrap();\n r.captures(&self[..])\n .and_then(|caps| caps.name(\"KEY\"))\n };\n\n let value = {\n let r = \"(.*)=(\\\"(?P<QVALUE>([^\\\"]*))\\\"|(?P<VALUE>(.*)))$\";\n let r = Regex::new(r).unwrap();\n r.captures(&self[..])\n .map(|caps| {\n caps.name(\"VALUE\")\n .or(caps.name(\"QVALUE\"))\n .unwrap_or(\"\")\n })\n };\n\n key.and_then(|k| {\n value.and_then(|v| {\n Some(KeyValue::new(String::from(k), String::from(v)))\n })\n })\n }\n\n}\n\n#[cfg(test)]\nmod test {\n use super::{KeyValue, IntoKeyValue};\n\n #[test]\n fn test_single_quoted() {\n let s = String::from(\"foo='bar'\").into_kv().unwrap();\n assert_eq!(KeyValue::new(String::from(\"foo\"), String::from(\"\\'bar\\'\")), s);\n }\n\n #[test]\n fn test_double_quoted() {\n let s = String::from(\"foo=\\\"bar\\\"\").into_kv().unwrap();\n assert_eq!(KeyValue::new(String::from(\"foo\"), String::from(\"bar\")), s);\n }\n\n #[test]\n fn test_double_and_single_quoted() {\n let s = String::from(\"foo=\\\"bar\\'\").into_kv().unwrap();\n assert_eq!(KeyValue::new(String::from(\"foo\"), String::from(\"\\\"bar\\'\")), s);\n }\n\n #[test]\n fn test_single_and_double_quoted() {\n let s = String::from(\"foo=\\'bar\\\"\").into_kv().unwrap();\n assert_eq!(KeyValue::new(String::from(\"foo\"), String::from(\"\\'bar\\\"\")), s);\n }\n\n #[test]\n fn test_not_quoted() {\n let s = String::from(\"foo=bar\").into_kv().unwrap();\n assert_eq!(KeyValue::new(String::from(\"foo\"), String::from(\"bar\")), s);\n }\n\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added Caesar Shift<commit_after>fn caesar_shift(m:&[u8], d:int)->Vec<u8>{\n m.iter()\n .map(|x| x.to_int().unwrap())\n .map(|x| ((x + d) % 256) as u8)\n .collect::<Vec<u8>>()\n}\n\nfn main(){\n\n \/\/ Same as caesar_shift, but closure\n let cz = | m:&[u8], d:int | \n m.iter()\n .map(|x| x.to_int().unwrap())\n .map(|x| ((x + d) % 256) as u8)\n .collect::<Vec<u8>>();\n \n let input = \"abcdef\".as_bytes();\n let key =5;\n let enc_str = cz(input, key);\n let dec_str = cz(enc_str.as_slice(), -key);\n println!(\"{} -> {} -> {}\", input , enc_str, dec_str);\n \n println!(\"-------------------------\");\n println!(\"Char by char test\");\n \n for i in range(0i,256) {\n let ip = i as u8;\n let enc = cz([ip], key);\n let dec = cz(enc.as_slice(), -key);\n println!(\"{} -> {} -> {}\", ip , enc, dec);\n } \n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a reduction example.<commit_after>extern crate mpi;\n\nuse mpi::traits::*;\nuse mpi::topology::Rank;\nuse mpi::collective::{self, SystemOperation};\n\nfn main() {\n let universe = mpi::initialize().unwrap();\n let world = universe.world();\n let rank = world.rank();\n let size = world.size();\n let root_rank = 0;\n\n let mut sum: Rank = 0;\n\n world.process_at_rank(root_rank).reduce_into(&rank, Some(&mut sum), SystemOperation::sum());\n\n if rank == root_rank {\n assert_eq!(sum, size * (size - 1) \/ 2);\n }\n\n let mut max: Rank = -1;\n\n world.all_reduce_into(&rank, &mut max, SystemOperation::max());\n assert_eq!(max, size - 1);\n\n let a: u64 = 0b0000111111110000;\n let b: u64 = 0b0011110000111100;\n\n let mut c = b;\n collective::reduce_local_into(&a, &mut c, SystemOperation::bitwise_and());\n assert_eq!(c, 0b0000110000110000);\n\n let mut d = b;\n collective::reduce_local_into(&a, &mut d, SystemOperation::bitwise_or());\n assert_eq!(d, 0b0011111111111100);\n\n let mut e = b;\n collective::reduce_local_into(&a, &mut e, SystemOperation::bitwise_xor());\n assert_eq!(e, 0b0011001111001100);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove needless references<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse eutil::fptr_is_null;\nuse libc::{c_int};\nuse std::mem;\nuse string::{cef_string_userfree_utf8_alloc,cef_string_userfree_utf8_free,cef_string_utf8_set};\nuse types::{cef_string_list_t,cef_string_t};\n\n\/\/cef_string_list\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_alloc() -> *mut cef_string_list_t {\n unsafe {\n let lt: Box<Vec<*mut cef_string_t>> = box vec!();\n mem::transmute(lt)\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_size(lt: *mut cef_string_list_t) -> c_int {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return 0; }\n let v: *mut Vec<*mut cef_string_t> = mem::transmute(lt);\n (*v).len() as c_int\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_append(lt: *mut cef_string_list_t, value: *const cef_string_t) {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return; }\n let v: *mut Vec<*mut cef_string_t> = mem::transmute(lt);\n let cs = cef_string_userfree_utf8_alloc();\n cef_string_utf8_set(mem::transmute((*value).str), (*value).length, cs, 1);\n (*v).push(cs);\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_value(lt: *mut cef_string_list_t, index: c_int, value: *mut cef_string_t) -> c_int {\n unsafe {\n if index < 0 || fptr_is_null(mem::transmute(lt)) { return 0; }\n let v: *mut Vec<*mut cef_string_t> = mem::transmute(lt);\n if index as uint > (*v).len() - 1 { return 0; }\n let cs = (*v).get(index as uint);\n cef_string_utf8_set(mem::transmute((**cs).str), (**cs).length, value, 1)\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_clear(lt: *mut cef_string_list_t) {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return; }\n let v: *mut Vec<*mut cef_string_t> = mem::transmute(lt);\n if (*v).len() == 0 { return; }\n let mut cs;\n while (*v).len() != 0 {\n cs = (*v).pop();\n cef_string_userfree_utf8_free(cs.unwrap());\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_free(lt: *mut cef_string_list_t) {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return; }\n let v: Box<Vec<*mut cef_string_t>> = mem::transmute(lt);\n cef_string_list_clear(lt);\n drop(v);\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_copy(lt: *mut cef_string_list_t) -> *mut cef_string_list_t {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return 0 as *mut cef_string_list_t; }\n let v: *mut Vec<*mut cef_string_t> = mem::transmute(lt);\n let lt2 = cef_string_list_alloc();\n for cs in (*v).iter() {\n cef_string_list_append(lt2, mem::transmute((*cs)));\n }\n lt2\n }\n}\n<commit_msg>embedding: add string_map_to_vec() to reduce transmute calls<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse eutil::fptr_is_null;\nuse libc::{c_int};\nuse std::mem;\nuse string::{cef_string_userfree_utf8_alloc,cef_string_userfree_utf8_free,cef_string_utf8_set};\nuse types::{cef_string_list_t,cef_string_t};\n\n\nfn string_map_to_vec(lt: *mut cef_string_list_t) -> *mut Vec<*mut cef_string_t> {\n lt as *mut Vec<*mut cef_string_t>\n}\n\n\/\/cef_string_list\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_alloc() -> *mut cef_string_list_t {\n unsafe {\n let lt: Box<Vec<*mut cef_string_t>> = box vec!();\n mem::transmute(lt)\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_size(lt: *mut cef_string_list_t) -> c_int {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return 0; }\n let v = string_map_to_vec(lt);\n (*v).len() as c_int\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_append(lt: *mut cef_string_list_t, value: *const cef_string_t) {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return; }\n let v = string_map_to_vec(lt);\n let cs = cef_string_userfree_utf8_alloc();\n cef_string_utf8_set(mem::transmute((*value).str), (*value).length, cs, 1);\n (*v).push(cs);\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_value(lt: *mut cef_string_list_t, index: c_int, value: *mut cef_string_t) -> c_int {\n unsafe {\n if index < 0 || fptr_is_null(mem::transmute(lt)) { return 0; }\n let v = string_map_to_vec(lt);\n if index as uint > (*v).len() - 1 { return 0; }\n let cs = (*v).get(index as uint);\n cef_string_utf8_set(mem::transmute((**cs).str), (**cs).length, value, 1)\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_clear(lt: *mut cef_string_list_t) {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return; }\n let v = string_map_to_vec(lt);\n if (*v).len() == 0 { return; }\n let mut cs;\n while (*v).len() != 0 {\n cs = (*v).pop();\n cef_string_userfree_utf8_free(cs.unwrap());\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_free(lt: *mut cef_string_list_t) {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return; }\n let v: Box<Vec<*mut cef_string_t>> = mem::transmute(lt);\n cef_string_list_clear(lt);\n drop(v);\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_list_copy(lt: *mut cef_string_list_t) -> *mut cef_string_list_t {\n unsafe {\n if fptr_is_null(mem::transmute(lt)) { return 0 as *mut cef_string_list_t; }\n let v = string_map_to_vec(lt);\n let lt2 = cef_string_list_alloc();\n for cs in (*v).iter() {\n cef_string_list_append(lt2, mem::transmute((*cs)));\n }\n lt2\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add failing ui test showing unsafety of fn ptr not enforced<commit_after>#[cxx::bridge]\nmod ffi {\n unsafe extern \"C++\" {\n fn f(callback: fn(p: *const u8));\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nextern crate devicemapper;\nextern crate env_logger;\nextern crate libstratis;\nextern crate log;\nextern crate loopdev;\nextern crate tempdir;\n\nmod util;\n\nuse std::fs::OpenOptions;\nuse std::io::{Seek, Write, SeekFrom};\nuse std::path::{Path, PathBuf};\n\nuse devicemapper::{Bytes, Sectors};\nuse loopdev::{LoopControl, LoopDevice};\nuse tempdir::TempDir;\n\nuse libstratis::engine::IEC;\nuse libstratis::engine::strat_engine::device::wipe_sectors;\n\nuse util::logger::init_logger;\nuse util::simple_tests::test_basic_metadata;\nuse util::simple_tests::test_empty_pool;\nuse util::simple_tests::test_force_flag_dirty;\nuse util::simple_tests::test_force_flag_stratis;\nuse util::simple_tests::test_linear_device;\nuse util::simple_tests::test_pool_blockdevs;\nuse util::simple_tests::test_setup;\nuse util::simple_tests::test_teardown;\nuse util::simple_tests::test_thinpool_device;\n\n\n\/\/\/ Setup count loop backed devices in dir.\n\/\/\/ Make sure each loop device is backed by a 1 GiB file.\n\/\/\/ Wipe the first 1 MiB of the file.\nfn get_devices(count: u8, dir: &TempDir) -> Vec<LoopDevice> {\n let lc = LoopControl::open().unwrap();\n let mut loop_devices = Vec::new();\n\n let length = Bytes(IEC::Gi);\n let wipe_length = Bytes(IEC::Mi).sectors();\n for index in 0..count {\n let subdir = TempDir::new_in(dir, &index.to_string()).unwrap();\n let path = subdir.path().join(\"store\");\n let mut f = OpenOptions::new()\n .read(true)\n .write(true)\n .create(true)\n .open(&path)\n .unwrap();\n\n \/\/ the proper way to do this is fallocate, but nix doesn't implement yet.\n \/\/ TODO: see https:\/\/github.com\/nix-rust\/nix\/issues\/596\n f.seek(SeekFrom::Start(*length)).unwrap();\n f.write(&[0]).unwrap();\n f.flush().unwrap();\n\n \/\/ Wipe 1 MiB at the beginning, as data sits around on the files.\n OpenOptions::new()\n .read(true)\n .write(true)\n .create(true)\n .open(&path)\n .unwrap();\n wipe_sectors(&path, Sectors(0), wipe_length).unwrap();\n\n\n let ld = lc.next_free().unwrap();\n ld.attach(path, 0).unwrap();\n loop_devices.push(ld);\n }\n loop_devices\n}\n\n\n\/\/\/ Set up count loopbacked devices.\n\/\/\/ Then, run the designated test.\n\/\/\/ Then, take down the loop devices.\nfn test_with_spec<F>(count: u8, test: F) -> ()\n where F: Fn(&[&Path]) -> ()\n{\n init_logger();\n let tmpdir = TempDir::new(\"stratis\").unwrap();\n let loop_devices: Vec<LoopDevice> = get_devices(count, &tmpdir);\n let device_paths: Vec<PathBuf> = loop_devices\n .iter()\n .map(|x| x.get_path().unwrap())\n .collect();\n let device_paths: Vec<&Path> = device_paths.iter().map(|x| x.as_path()).collect();\n\n test(&device_paths);\n\n for dev in loop_devices {\n dev.detach().unwrap();\n }\n\n \/\/ Explicitly delete the temporary directory with all its contents.\n \/\/ Allowing it to be deleted on drop will cause any errors to be suppressed.\n \/\/ This may alert us to some problem with the underlying infrastructure\n \/\/ of the tests.\n tmpdir.close().unwrap();\n}\n\n\n#[test]\npub fn loop_test_force_flag_stratis() {\n test_with_spec(1, test_force_flag_stratis);\n test_with_spec(2, test_force_flag_stratis);\n test_with_spec(3, test_force_flag_stratis);\n}\n\n\n#[test]\npub fn loop_test_linear_device() {\n test_with_spec(2, test_linear_device);\n test_with_spec(3, test_linear_device);\n}\n\n\n#[test]\npub fn loop_test_thinpool_device() {\n test_with_spec(3, test_thinpool_device);\n}\n\n\n#[test]\npub fn loop_test_pool_blockdevs() {\n test_with_spec(3, test_pool_blockdevs);\n}\n\n#[test]\npub fn loop_test_force_flag_dirty() {\n test_with_spec(3, test_force_flag_dirty);\n}\n\n#[test]\npub fn loop_test_teardown() {\n test_with_spec(2, test_teardown);\n}\n\n#[test]\npub fn loop_test_setup() {\n test_with_spec(4, test_setup);\n}\n\n#[test]\npub fn loop_test_empty_pool() {\n test_with_spec(0, test_empty_pool);\n}\n\n#[test]\npub fn loop_test_basic_metadata() {\n test_with_spec(4, test_basic_metadata);\n}\n<commit_msg>wrap LoopDevice in a struct that implements drop<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nextern crate devicemapper;\nextern crate env_logger;\nextern crate libstratis;\nextern crate log;\nextern crate loopdev;\nextern crate tempdir;\n\nmod util;\n\nuse std::fs::OpenOptions;\nuse std::io::{Seek, SeekFrom, Write};\nuse std::path::{Path, PathBuf};\n\nuse devicemapper::{Bytes, Sectors};\nuse loopdev::{LoopControl, LoopDevice};\nuse tempdir::TempDir;\n\nuse libstratis::engine::IEC;\nuse libstratis::engine::strat_engine::device::wipe_sectors;\n\nuse util::logger::init_logger;\nuse util::simple_tests::test_basic_metadata;\nuse util::simple_tests::test_empty_pool;\nuse util::simple_tests::test_force_flag_dirty;\nuse util::simple_tests::test_force_flag_stratis;\nuse util::simple_tests::test_linear_device;\nuse util::simple_tests::test_pool_blockdevs;\nuse util::simple_tests::test_setup;\nuse util::simple_tests::test_teardown;\nuse util::simple_tests::test_thinpool_device;\n\npub struct LoopTestDev {\n ld: LoopDevice,\n}\n\nimpl LoopTestDev {\n pub fn new(lc: &LoopControl, path: &Path) -> LoopTestDev {\n OpenOptions::new()\n .read(true)\n .write(true)\n .open(path)\n .unwrap();\n\n let ld = lc.next_free().unwrap();\n ld.attach(path, 0).unwrap();\n \/\/ Wipe 1 MiB at the beginning, as data sits around on the files.\n wipe_sectors(&ld.get_path().unwrap(),\n Sectors(0),\n Bytes(IEC::Mi).sectors())\n .unwrap();\n\n LoopTestDev { ld: ld }\n }\n\n fn get_path(&self) -> PathBuf {\n self.ld.get_path().unwrap()\n }\n\n pub fn detach(&self) {\n self.ld.detach().unwrap()\n }\n}\n\nimpl Drop for LoopTestDev {\n fn drop(&mut self) {\n self.detach()\n }\n}\n\n\/\/\/ Setup count loop backed devices in dir.\n\/\/\/ Make sure each loop device is backed by a 1 GiB file.\n\/\/\/ Wipe the first 1 MiB of the file.\nfn get_devices(count: u8, dir: &TempDir) -> Vec<LoopTestDev> {\n let lc = LoopControl::open().unwrap();\n let mut loop_devices = Vec::new();\n\n for index in 0..count {\n let path = dir.path().join(format!(\"store{}\", &index));\n let mut f = OpenOptions::new()\n .read(true)\n .write(true)\n .create(true)\n .open(&path)\n .unwrap();\n\n \/\/ the proper way to do this is fallocate, but nix doesn't implement yet.\n \/\/ TODO: see https:\/\/github.com\/nix-rust\/nix\/issues\/596\n f.seek(SeekFrom::Start(IEC::Gi)).unwrap();\n f.write(&[0]).unwrap();\n f.flush().unwrap();\n\n let ltd = LoopTestDev::new(&lc, &path);\n\n loop_devices.push(ltd);\n }\n loop_devices\n}\n\n\n\/\/\/ Set up count loopbacked devices.\n\/\/\/ Then, run the designated test.\n\/\/\/ Then, take down the loop devices.\nfn test_with_spec<F>(count: u8, test: F) -> ()\n where F: Fn(&[&Path]) -> ()\n{\n init_logger();\n let tmpdir = TempDir::new(\"stratis\").unwrap();\n let loop_devices: Vec<LoopTestDev> = get_devices(count, &tmpdir);\n let device_paths: Vec<PathBuf> = loop_devices.iter().map(|x| x.get_path()).collect();\n let device_paths: Vec<&Path> = device_paths.iter().map(|x| x.as_path()).collect();\n\n test(&device_paths);\n\n}\n\n\n#[test]\npub fn loop_test_force_flag_stratis() {\n test_with_spec(1, test_force_flag_stratis);\n test_with_spec(2, test_force_flag_stratis);\n test_with_spec(3, test_force_flag_stratis);\n}\n\n\n#[test]\npub fn loop_test_linear_device() {\n test_with_spec(2, test_linear_device);\n test_with_spec(3, test_linear_device);\n}\n\n\n#[test]\npub fn loop_test_thinpool_device() {\n test_with_spec(3, test_thinpool_device);\n}\n\n\n#[test]\npub fn loop_test_pool_blockdevs() {\n test_with_spec(3, test_pool_blockdevs);\n}\n\n#[test]\npub fn loop_test_force_flag_dirty() {\n test_with_spec(3, test_force_flag_dirty);\n}\n\n#[test]\npub fn loop_test_teardown() {\n test_with_spec(2, test_teardown);\n}\n\n#[test]\npub fn loop_test_setup() {\n test_with_spec(4, test_setup);\n}\n\n#[test]\npub fn loop_test_empty_pool() {\n test_with_spec(0, test_empty_pool);\n}\n\n#[test]\npub fn loop_test_basic_metadata() {\n test_with_spec(4, test_basic_metadata);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Operations on manifest files.\n\nuse std::error::Error;\nuse std::fmt::{self, Display, Formatter};\nuse std::io::{self, BufRead};\nuse std::num::ParseIntError;\nuse std::path::Path;\nuse std::str::{self, FromStr};\nuse std::string::FromUtf8Error;\nuse std::usize;\n\nuse rawpath::RawPath;\n\n\n\/\/\/ Manifest file info.\npub struct Manifest {\n hostname: String,\n local_dir: RawPath,\n volumes: Vec<Option<Volume>>,\n}\n\n\/\/\/ Volume info.\npub struct Volume {\n start_path: PathBlock,\n end_path: PathBlock,\n hash_type: String,\n hash: Vec<u8>,\n}\n\n\/\/\/ wip\n#[derive(Debug)]\npub enum ParseError {\n \/\/\/ wip\n Io(io::Error),\n \/\/\/ wip\n MissingKeyword(String),\n \/\/\/ wip\n ParseInt(ParseIntError),\n \/\/\/ wip\n Utf8(FromUtf8Error),\n}\n\n\nstruct PathBlock {\n path: RawPath,\n block: Option<usize>,\n}\n\nstruct ManifestParser<R> {\n input: R,\n buf: Vec<u8>,\n}\n\n\nimpl Manifest {\n \/\/\/ Parses a stream to get a manifest.\n pub fn parse<R: BufRead>(m: &mut R) -> Result<Self, ParseError> {\n let parser = ManifestParser::new(m);\n parser.parse()\n }\n\n \/\/\/ wip\n pub fn hostname(&self) -> Option<&str> {\n Some(&self.hostname)\n }\n\n \/\/\/ wip\n pub fn local_dir(&self) -> Option<&Path> {\n self.local_dir.as_path()\n }\n\n \/\/\/ wip\n pub fn max_vol_num(&self) -> usize {\n self.volumes.len()\n }\n\n \/\/\/ wip\n pub fn volume(&self, num: usize) -> Option<&Volume> {\n self.volumes.get(num).and_then(|v| v.as_ref())\n }\n}\n\n\nimpl Volume {\n \/\/\/ wip\n pub fn start_path(&self) -> Option<&Path> {\n self.start_path.path.as_path()\n }\n\n \/\/\/ wip\n pub fn end_path(&self) -> Option<&Path> {\n self.end_path.path.as_path()\n }\n\n \/\/\/ wip\n pub fn start_path_bytes(&self) -> &[u8] {\n self.start_path.path.as_bytes()\n }\n\n \/\/\/ wip\n pub fn end_path_bytes(&self) -> &[u8] {\n self.end_path.path.as_bytes()\n }\n\n \/\/\/ wip\n pub fn start_block(&self) -> Option<usize> {\n self.start_path.block\n }\n\n \/\/\/ wip\n pub fn end_block(&self) -> Option<usize> {\n self.end_path.block\n }\n\n \/\/\/ wip\n pub fn hash_type(&self) -> &str {\n &self.hash_type\n }\n\n \/\/\/ wip\n pub fn hash(&self) -> &[u8] {\n &self.hash\n }\n}\n\n\nimpl Error for ParseError {\n fn description(&self) -> &str {\n match *self {\n ParseError::Io(ref err) => err.description(),\n ParseError::MissingKeyword(_) => \"missing keyword in manifest\",\n ParseError::ParseInt(ref err) => err.description(),\n ParseError::Utf8(ref err) => err.description(),\n }\n }\n}\n\nimpl Display for ParseError {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n match *self {\n ParseError::Io(ref e) => write!(fmt, \"{}\", e),\n ParseError::MissingKeyword(ref e) => write!(fmt, \"missing keyword '{}' in manifest\", e),\n ParseError::ParseInt(ref e) => write!(fmt, \"{}\", e),\n ParseError::Utf8(ref e) => write!(fmt, \"{}\", e),\n }\n }\n}\n\nimpl From<io::Error> for ParseError {\n fn from(err: io::Error) -> ParseError {\n ParseError::Io(err)\n }\n}\n\nimpl From<ParseIntError> for ParseError {\n fn from(err: ParseIntError) -> ParseError {\n ParseError::ParseInt(err)\n }\n}\n\nimpl From<FromUtf8Error> for ParseError {\n fn from(err: FromUtf8Error) -> ParseError {\n ParseError::Utf8(err)\n }\n}\n\n\nimpl<R: BufRead> ManifestParser<R> {\n pub fn new(input: R) -> Self {\n ManifestParser {\n input: input,\n buf: vec![],\n }\n }\n\n pub fn parse(mut self) -> Result<Manifest, ParseError> {\n let hostname = try!(self.read_param_str(\"Hostname\"));\n let local_dir = RawPath::with_bytes(try!(self.read_param_bytes(\"Localdir\")));\n\n let mut volumes = Vec::new();\n while let Some((vol, i)) = try!(self.read_volume()) {\n \/\/ resize volumes if necessary\n if i >= volumes.len() {\n volumes.reserve(i + 1);\n for _ in volumes.len()..i + 1 {\n volumes.push(None);\n }\n }\n volumes[i] = Some(vol);\n }\n\n Ok(Manifest {\n hostname: hostname,\n local_dir: local_dir,\n volumes: volumes,\n })\n }\n\n fn read_volume(&mut self) -> Result<Option<(Volume, usize)>, ParseError> {\n \/\/ volume number\n let mut param = try!(self.read_param_str(\"Volume\"));\n if param.ends_with(\":\") {\n param.pop();\n }\n let num = try!(usize::from_str(¶m));\n let start_path = try!(self.read_path_block(\"StartingPath\"));\n let end_path = try!(self.read_path_block(\"EndingPath\"));\n let (htype, h) = try!(self.read_hash_param());\n\n let vol = Volume {\n start_path: start_path,\n end_path: end_path,\n hash_type: htype,\n hash: h,\n };\n Ok(Some((vol, num)))\n }\n\n fn read_path_block(&mut self, key: &str) -> Result<PathBlock, ParseError> {\n try!(self.consume_whitespace());\n if !try!(self.consume_keyword(key)) {\n return Err(ParseError::MissingKeyword(key.to_owned()));\n }\n try!(self.consume_whitespace());\n let path = try!(self.read_param_value());\n try!(self.consume_whitespace());\n let block = if !try!(self.consume_byte(b'\\n')) {\n let bytes = try!(self.read_param_value());\n let s = try!(String::from_utf8(bytes));\n let num = try!(usize::from_str(&s));\n Some(num)\n } else {\n None\n };\n Ok(PathBlock {\n path: RawPath::with_bytes(path),\n block: block,\n })\n }\n\n fn read_hash_param(&mut self) -> Result<(String, Vec<u8>), ParseError> {\n try!(self.consume_whitespace());\n if !try!(self.consume_keyword(\"Hash\")) {\n return Err(ParseError::MissingKeyword(\"Hash\".to_owned()));\n }\n try!(self.consume_whitespace());\n let htype = try!(self.read_param_value_str());\n try!(self.consume_whitespace());\n let mut hash = try!(self.read_param_value());\n for b in &mut hash {\n *b -= b'0'\n }\n try!(self.consume_newline());\n\n Ok((htype, hash))\n }\n\n fn read_param_bytes(&mut self, key: &str) -> Result<Vec<u8>, ParseError> {\n try!(self.consume_whitespace());\n if !try!(self.consume_keyword(key)) {\n return Err(ParseError::MissingKeyword(key.to_owned()));\n }\n try!(self.consume_whitespace());\n match self.read_param_value() {\n Ok(res) => {\n try!(self.consume_newline());\n Ok(res)\n }\n Err(e) => Err(From::from(e)),\n }\n }\n\n fn read_param_str(&mut self, key: &str) -> Result<String, ParseError> {\n let bytes = try!(self.read_param_bytes(key));\n String::from_utf8(bytes).map_err(From::from)\n }\n\n fn consume_keyword(&mut self, key: &str) -> io::Result<bool> {\n let mut size = try!(self.input.read_until(b' ', &mut self.buf));\n if size > 0 && self.buf[size - 1] == b' ' {\n size -= 1\n }\n Ok(match_keyword(&self.buf[..size], key))\n }\n\n fn consume_newline(&mut self) -> io::Result<()> {\n self.consume_until(|b| {\n match b {\n b' ' | b'\\t' | b'\\r' | b'\\n' => false,\n _ => true,\n }\n })\n }\n\n fn consume_whitespace(&mut self) -> io::Result<()> {\n self.consume_until(|b| {\n match b {\n b' ' | b'\\t' => false,\n _ => true,\n }\n })\n }\n\n fn consume_until<F>(&mut self, mut f: F) -> io::Result<()>\n where F: FnMut(u8) -> bool\n {\n loop {\n let (pos, end) = {\n let buf = try!(self.input.fill_buf());\n match buf.iter().cloned().position(&mut f) {\n Some(p) => (p, true),\n None => (buf.len(), buf.is_empty()),\n }\n };\n self.input.consume(pos);\n if end {\n return Ok(());\n }\n }\n }\n\n fn read_param_value_str(&mut self) -> Result<String, ParseError> {\n let bytes = try!(self.read_param_value());\n String::from_utf8(bytes).map_err(From::from)\n }\n\n fn read_param_value(&mut self) -> io::Result<Vec<u8>> {\n if try!(self.consume_byte(b'\"')) {\n try!(self.input.read_until(b'\"', &mut self.buf));\n } else {\n try!(self.input.read_until(b'\\n', &mut self.buf));\n }\n let mut result = Vec::with_capacity(self.buf.len());\n \/\/ unescape\n for (i, b) in self.buf.iter().cloned().enumerate() {\n if b != b'\\\\' {\n result.push(b);\n } else {\n \/\/ expects a \\xNN where NN is a number string representing the escaped char in hex\n \/\/ e.g. \\x20 is the space ' '\n if self.buf.len() - i >= 4 && self.buf[i + 1] == b'x' {\n let num = ((self.buf[i + 2] - b'0') << 4) + self.buf[i + 3] - b'0';\n result.push(num);\n }\n }\n }\n Ok(result)\n }\n\n fn consume_byte(&mut self, expected: u8) -> io::Result<bool> {\n let found = {\n let buf = try!(self.input.fill_buf());\n buf.first().map_or(false, |b| *b == expected)\n };\n if found {\n self.input.consume(1);\n }\n Ok(found)\n }\n}\n\n\n#[inline]\nfn match_keyword(buf: &[u8], key: &str) -> bool {\n str::from_utf8(&buf).ok().map_or(false, |s| s == key)\n}\n\n\n#[cfg(test)]\nmod test {\n use super::*;\n use std::fs::File;\n use std::io::BufReader;\n\n #[test]\n fn parse_no_err() {\n let file = File::open(\"tests\/manifest\/full1.manifest\").unwrap();\n let mut bfile = BufReader::new(file);\n Manifest::parse(&mut bfile).unwrap();\n }\n}\n<commit_msg>Fixed read_param_value<commit_after>\/\/! Operations on manifest files.\n\nuse std::error::Error;\nuse std::fmt::{self, Display, Formatter};\nuse std::io::{self, BufRead};\nuse std::num::ParseIntError;\nuse std::path::Path;\nuse std::str::{self, FromStr};\nuse std::string::FromUtf8Error;\nuse std::usize;\n\nuse rawpath::RawPath;\n\n\n\/\/\/ Manifest file info.\npub struct Manifest {\n hostname: String,\n local_dir: RawPath,\n volumes: Vec<Option<Volume>>,\n}\n\n\/\/\/ Volume info.\npub struct Volume {\n start_path: PathBlock,\n end_path: PathBlock,\n hash_type: String,\n hash: Vec<u8>,\n}\n\n\/\/\/ wip\n#[derive(Debug)]\npub enum ParseError {\n \/\/\/ wip\n Io(io::Error),\n \/\/\/ wip\n MissingKeyword(String),\n \/\/\/ wip\n ParseInt(ParseIntError),\n \/\/\/ wip\n Utf8(FromUtf8Error),\n}\n\n\nstruct PathBlock {\n path: RawPath,\n block: Option<usize>,\n}\n\nstruct ManifestParser<R> {\n input: R,\n buf: Vec<u8>,\n}\n\n\nimpl Manifest {\n \/\/\/ Parses a stream to get a manifest.\n pub fn parse<R: BufRead>(m: &mut R) -> Result<Self, ParseError> {\n let parser = ManifestParser::new(m);\n parser.parse()\n }\n\n \/\/\/ wip\n pub fn hostname(&self) -> Option<&str> {\n Some(&self.hostname)\n }\n\n \/\/\/ wip\n pub fn local_dir(&self) -> Option<&Path> {\n self.local_dir.as_path()\n }\n\n \/\/\/ wip\n pub fn max_vol_num(&self) -> usize {\n self.volumes.len()\n }\n\n \/\/\/ wip\n pub fn volume(&self, num: usize) -> Option<&Volume> {\n self.volumes.get(num).and_then(|v| v.as_ref())\n }\n}\n\n\nimpl Volume {\n \/\/\/ wip\n pub fn start_path(&self) -> Option<&Path> {\n self.start_path.path.as_path()\n }\n\n \/\/\/ wip\n pub fn end_path(&self) -> Option<&Path> {\n self.end_path.path.as_path()\n }\n\n \/\/\/ wip\n pub fn start_path_bytes(&self) -> &[u8] {\n self.start_path.path.as_bytes()\n }\n\n \/\/\/ wip\n pub fn end_path_bytes(&self) -> &[u8] {\n self.end_path.path.as_bytes()\n }\n\n \/\/\/ wip\n pub fn start_block(&self) -> Option<usize> {\n self.start_path.block\n }\n\n \/\/\/ wip\n pub fn end_block(&self) -> Option<usize> {\n self.end_path.block\n }\n\n \/\/\/ wip\n pub fn hash_type(&self) -> &str {\n &self.hash_type\n }\n\n \/\/\/ wip\n pub fn hash(&self) -> &[u8] {\n &self.hash\n }\n}\n\n\nimpl Error for ParseError {\n fn description(&self) -> &str {\n match *self {\n ParseError::Io(ref err) => err.description(),\n ParseError::MissingKeyword(_) => \"missing keyword in manifest\",\n ParseError::ParseInt(ref err) => err.description(),\n ParseError::Utf8(ref err) => err.description(),\n }\n }\n}\n\nimpl Display for ParseError {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n match *self {\n ParseError::Io(ref e) => write!(fmt, \"{}\", e),\n ParseError::MissingKeyword(ref e) => write!(fmt, \"missing keyword '{}' in manifest\", e),\n ParseError::ParseInt(ref e) => write!(fmt, \"{}\", e),\n ParseError::Utf8(ref e) => write!(fmt, \"{}\", e),\n }\n }\n}\n\nimpl From<io::Error> for ParseError {\n fn from(err: io::Error) -> ParseError {\n ParseError::Io(err)\n }\n}\n\nimpl From<ParseIntError> for ParseError {\n fn from(err: ParseIntError) -> ParseError {\n ParseError::ParseInt(err)\n }\n}\n\nimpl From<FromUtf8Error> for ParseError {\n fn from(err: FromUtf8Error) -> ParseError {\n ParseError::Utf8(err)\n }\n}\n\n\nimpl<R: BufRead> ManifestParser<R> {\n pub fn new(input: R) -> Self {\n ManifestParser {\n input: input,\n buf: vec![],\n }\n }\n\n pub fn parse(mut self) -> Result<Manifest, ParseError> {\n let hostname = try!(self.read_param_str(\"Hostname\"));\n let local_dir = RawPath::with_bytes(try!(self.read_param_bytes(\"Localdir\")));\n\n let mut volumes = Vec::new();\n while let Some((vol, i)) = try!(self.read_volume()) {\n \/\/ resize volumes if necessary\n if i >= volumes.len() {\n volumes.reserve(i + 1);\n for _ in volumes.len()..i + 1 {\n volumes.push(None);\n }\n }\n volumes[i] = Some(vol);\n }\n\n Ok(Manifest {\n hostname: hostname,\n local_dir: local_dir,\n volumes: volumes,\n })\n }\n\n fn read_volume(&mut self) -> Result<Option<(Volume, usize)>, ParseError> {\n \/\/ volume number\n let mut param = try!(self.read_param_str(\"Volume\"));\n if param.ends_with(\":\") {\n param.pop();\n }\n let num = try!(usize::from_str(¶m));\n let start_path = try!(self.read_path_block(\"StartingPath\"));\n let end_path = try!(self.read_path_block(\"EndingPath\"));\n let (htype, h) = try!(self.read_hash_param());\n\n let vol = Volume {\n start_path: start_path,\n end_path: end_path,\n hash_type: htype,\n hash: h,\n };\n Ok(Some((vol, num)))\n }\n\n fn read_path_block(&mut self, key: &str) -> Result<PathBlock, ParseError> {\n try!(self.consume_whitespace());\n if !try!(self.consume_keyword(key)) {\n return Err(ParseError::MissingKeyword(key.to_owned()));\n }\n try!(self.consume_whitespace());\n let path = try!(self.read_param_value());\n try!(self.consume_whitespace());\n let block = if !try!(self.consume_byte(b'\\n')) {\n let bytes = try!(self.read_param_value());\n let s = try!(String::from_utf8(bytes));\n let num = try!(usize::from_str(&s));\n Some(num)\n } else {\n None\n };\n Ok(PathBlock {\n path: RawPath::with_bytes(path),\n block: block,\n })\n }\n\n fn read_hash_param(&mut self) -> Result<(String, Vec<u8>), ParseError> {\n try!(self.consume_whitespace());\n if !try!(self.consume_keyword(\"Hash\")) {\n return Err(ParseError::MissingKeyword(\"Hash\".to_owned()));\n }\n try!(self.consume_whitespace());\n let htype = try!(self.read_param_value_str());\n try!(self.consume_whitespace());\n let mut hash = try!(self.read_param_value());\n for b in &mut hash {\n *b -= b'0'\n }\n try!(self.consume_newline());\n\n Ok((htype, hash))\n }\n\n fn read_param_bytes(&mut self, key: &str) -> Result<Vec<u8>, ParseError> {\n try!(self.consume_whitespace());\n if !try!(self.consume_keyword(key)) {\n return Err(ParseError::MissingKeyword(key.to_owned()));\n }\n try!(self.consume_whitespace());\n match self.read_param_value() {\n Ok(res) => {\n try!(self.consume_newline());\n Ok(res)\n }\n Err(e) => Err(From::from(e)),\n }\n }\n\n fn read_param_str(&mut self, key: &str) -> Result<String, ParseError> {\n let bytes = try!(self.read_param_bytes(key));\n String::from_utf8(bytes).map_err(From::from)\n }\n\n fn consume_keyword(&mut self, key: &str) -> io::Result<bool> {\n let mut size = try!(self.input.read_until(b' ', &mut self.buf));\n if size > 0 && self.buf[size - 1] == b' ' {\n size -= 1\n }\n Ok(match_keyword(&self.buf[..size], key))\n }\n\n fn consume_newline(&mut self) -> io::Result<()> {\n self.consume_until(|b| {\n match b {\n b' ' | b'\\t' | b'\\r' | b'\\n' => false,\n _ => true,\n }\n })\n }\n\n fn consume_whitespace(&mut self) -> io::Result<()> {\n self.consume_until(|b| {\n match b {\n b' ' | b'\\t' => false,\n _ => true,\n }\n })\n }\n\n fn consume_until<F>(&mut self, mut f: F) -> io::Result<()>\n where F: FnMut(u8) -> bool\n {\n loop {\n let (pos, end) = {\n let buf = try!(self.input.fill_buf());\n match buf.iter().cloned().position(&mut f) {\n Some(p) => (p, true),\n None => (buf.len(), buf.is_empty()),\n }\n };\n self.input.consume(pos);\n if end {\n return Ok(());\n }\n }\n }\n\n fn read_param_value_str(&mut self) -> Result<String, ParseError> {\n let bytes = try!(self.read_param_value());\n String::from_utf8(bytes).map_err(From::from)\n }\n\n fn read_param_value(&mut self) -> io::Result<Vec<u8>> {\n let (mut size, term) = {\n if try!(self.consume_byte(b'\"')) {\n let s = try!(self.input.read_until(b'\"', &mut self.buf));\n (s, b'\"')\n } else {\n let s = try!(self.input.read_until(b'\\n', &mut self.buf));\n (s, b'\\n')\n }\n };\n if size > 0 && self.buf[size - 1] == term {\n size -= 1;\n }\n let buf = &self.buf[..size];\n let mut result = Vec::with_capacity(size);\n \/\/ unescape\n for (i, b) in buf.iter().cloned().enumerate() {\n if b != b'\\\\' {\n result.push(b);\n } else {\n \/\/ expects a \\xNN where NN is a number string representing the escaped char in hex\n \/\/ e.g. \\x20 is the space ' '\n if buf.len() - i >= 4 && self.buf[i + 1] == b'x' {\n let num = ((buf[i + 2] - b'0') << 4) + buf[i + 3] - b'0';\n result.push(num);\n }\n }\n }\n Ok(result)\n }\n\n fn consume_byte(&mut self, expected: u8) -> io::Result<bool> {\n let found = {\n let buf = try!(self.input.fill_buf());\n buf.first().map_or(false, |b| *b == expected)\n };\n if found {\n self.input.consume(1);\n }\n Ok(found)\n }\n}\n\n\n#[inline]\nfn match_keyword(buf: &[u8], key: &str) -> bool {\n str::from_utf8(&buf).ok().map_or(false, |s| s == key)\n}\n\n\n#[cfg(test)]\nmod test {\n use super::*;\n use std::fs::File;\n use std::io::BufReader;\n\n #[test]\n fn parse_no_err() {\n let file = File::open(\"tests\/manifest\/full1.manifest\").unwrap();\n let mut bfile = BufReader::new(file);\n Manifest::parse(&mut bfile).unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added opponent.rs; completely untested!<commit_after><|endoftext|>"} {"text":"<commit_before>use std::{fmt};\nuse os::token::Token;\n\n#[deriving(Copy, PartialEq, Eq, Clone, PartialOrd, Ord)]\npub struct PollOpt(uint);\n\npub const EDGE: PollOpt = PollOpt(0x020);\npub const LEVEL: PollOpt = PollOpt(0x040);\npub const ONESHOT: PollOpt = PollOpt(0x080);\n\nimpl PollOpt {\n #[inline]\n pub fn edge() -> PollOpt {\n EDGE | ONESHOT\n }\n\n #[inline]\n pub fn empty() -> PollOpt {\n PollOpt(0)\n }\n\n #[inline]\n pub fn all() -> PollOpt {\n EDGE | LEVEL | ONESHOT\n }\n\n #[inline]\n pub fn bits(&self) -> uint {\n let PollOpt(bits) = *self;\n bits\n }\n\n #[inline]\n pub fn contains(&self, other: PollOpt) -> bool {\n (*self & other) == other\n }\n}\n\nimpl BitOr<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitor(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() | other.bits())\n }\n}\n\nimpl BitXor<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitxor(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() ^ other.bits())\n }\n}\n\nimpl BitAnd<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitand(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() & other.bits())\n }\n}\n\nimpl Sub<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn sub(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() & !other.bits())\n }\n}\n\nimpl Not<PollOpt> for PollOpt {\n #[inline]\n fn not(&self) -> PollOpt {\n PollOpt(!self.bits() & PollOpt::all().bits())\n }\n}\n\nimpl fmt::Show for PollOpt {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (EDGE, \"Edge-Triggered\"),\n (LEVEL, \"Level-Triggered\"),\n (ONESHOT, \"OneShot\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\nbitflags!(\n #[deriving(Copy)]\n flags Interest: uint {\n const READABLE = 0x001,\n const WRITABLE = 0x002,\n const ERROR = 0x004,\n const HUP = 0x008,\n const HINTED = 0x010,\n const ALL = 0x001 | 0x002 | 0x008 \/\/epoll checks for ERROR no matter what\n }\n)\n\n\nimpl fmt::Show for Interest {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (READABLE, \"Readable\"),\n (WRITABLE, \"Writable\"),\n (ERROR, \"Error\"),\n (HUP, \"HupHint\"),\n (HINTED, \"Hinted\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\nbitflags!(\n #[deriving(Copy)]\n flags ReadHint: uint {\n const DATAHINT = 0x001,\n const HUPHINT = 0x002,\n const ERRORHINT = 0x004\n }\n)\n\nimpl fmt::Show for ReadHint {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (DATAHINT, \"DataHint\"),\n (HUPHINT, \"HupHint\"),\n (ERRORHINT, \"ErrorHint\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\n\n#[deriving(Copy, Show)]\npub struct IoEvent {\n kind: Interest,\n token: Token\n}\n\n\/\/\/ IoEvent represents the raw event that the OS-specific selector\n\/\/\/ returned. An event can represent more than one kind (such as\n\/\/\/ readable or writable) at a time.\n\/\/\/\n\/\/\/ These IoEvent objects are created by the OS-specific concrete\n\/\/\/ Selector when they have events to report.\nimpl IoEvent {\n \/\/\/ Create a new IoEvent.\n pub fn new(kind: Interest, token: uint) -> IoEvent {\n IoEvent {\n kind: kind,\n token: Token(token)\n }\n }\n\n pub fn token(&self) -> Token {\n self.token\n }\n\n \/\/\/ Return an optional hint for a readable handle. Currently,\n \/\/\/ this method supports the HupHint, which indicates that the\n \/\/\/ kernel reported that the remote side hung up. This allows a\n \/\/\/ consumer to avoid reading in order to discover the hangup.\n pub fn read_hint(&self) -> ReadHint {\n let mut hint = ReadHint::empty();\n\n \/\/ The backend doesn't support hinting\n if !self.kind.contains(HINTED) {\n return hint;\n }\n\n if self.kind.contains(HUP) {\n hint = hint | HUPHINT\n }\n\n if self.kind.contains(READABLE) {\n hint = hint | DATAHINT\n }\n\n if self.kind.contains(ERROR) {\n hint = hint | ERRORHINT\n }\n\n hint\n }\n\n \/\/\/ This event indicated that the handle is now readable\n pub fn is_readable(&self) -> bool {\n self.kind.contains(READABLE) || self.kind.contains(HUP)\n }\n\n \/\/\/ This event indicated that the handle is now writable\n pub fn is_writable(&self) -> bool {\n self.kind.contains(WRITABLE)\n }\n\n \/\/\/ This event indicated that the handle had an error\n pub fn is_error(&self) -> bool {\n self.kind.contains(ERROR)\n }\n}\n<commit_msg>event: bitflags! now derives copy on its own.<commit_after>use std::{fmt};\nuse os::token::Token;\n\n#[deriving(Copy, PartialEq, Eq, Clone, PartialOrd, Ord)]\npub struct PollOpt(uint);\n\npub const EDGE: PollOpt = PollOpt(0x020);\npub const LEVEL: PollOpt = PollOpt(0x040);\npub const ONESHOT: PollOpt = PollOpt(0x080);\n\nimpl PollOpt {\n #[inline]\n pub fn edge() -> PollOpt {\n EDGE | ONESHOT\n }\n\n #[inline]\n pub fn empty() -> PollOpt {\n PollOpt(0)\n }\n\n #[inline]\n pub fn all() -> PollOpt {\n EDGE | LEVEL | ONESHOT\n }\n\n #[inline]\n pub fn bits(&self) -> uint {\n let PollOpt(bits) = *self;\n bits\n }\n\n #[inline]\n pub fn contains(&self, other: PollOpt) -> bool {\n (*self & other) == other\n }\n}\n\nimpl BitOr<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitor(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() | other.bits())\n }\n}\n\nimpl BitXor<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitxor(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() ^ other.bits())\n }\n}\n\nimpl BitAnd<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitand(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() & other.bits())\n }\n}\n\nimpl Sub<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn sub(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() & !other.bits())\n }\n}\n\nimpl Not<PollOpt> for PollOpt {\n #[inline]\n fn not(&self) -> PollOpt {\n PollOpt(!self.bits() & PollOpt::all().bits())\n }\n}\n\nimpl fmt::Show for PollOpt {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (EDGE, \"Edge-Triggered\"),\n (LEVEL, \"Level-Triggered\"),\n (ONESHOT, \"OneShot\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\nbitflags!(\n flags Interest: uint {\n const READABLE = 0x001,\n const WRITABLE = 0x002,\n const ERROR = 0x004,\n const HUP = 0x008,\n const HINTED = 0x010,\n const ALL = 0x001 | 0x002 | 0x008 \/\/epoll checks for ERROR no matter what\n }\n)\n\n\nimpl fmt::Show for Interest {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (READABLE, \"Readable\"),\n (WRITABLE, \"Writable\"),\n (ERROR, \"Error\"),\n (HUP, \"HupHint\"),\n (HINTED, \"Hinted\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\nbitflags!(\n flags ReadHint: uint {\n const DATAHINT = 0x001,\n const HUPHINT = 0x002,\n const ERRORHINT = 0x004\n }\n)\n\nimpl fmt::Show for ReadHint {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (DATAHINT, \"DataHint\"),\n (HUPHINT, \"HupHint\"),\n (ERRORHINT, \"ErrorHint\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\n\n#[deriving(Copy, Show)]\npub struct IoEvent {\n kind: Interest,\n token: Token\n}\n\n\/\/\/ IoEvent represents the raw event that the OS-specific selector\n\/\/\/ returned. An event can represent more than one kind (such as\n\/\/\/ readable or writable) at a time.\n\/\/\/\n\/\/\/ These IoEvent objects are created by the OS-specific concrete\n\/\/\/ Selector when they have events to report.\nimpl IoEvent {\n \/\/\/ Create a new IoEvent.\n pub fn new(kind: Interest, token: uint) -> IoEvent {\n IoEvent {\n kind: kind,\n token: Token(token)\n }\n }\n\n pub fn token(&self) -> Token {\n self.token\n }\n\n \/\/\/ Return an optional hint for a readable handle. Currently,\n \/\/\/ this method supports the HupHint, which indicates that the\n \/\/\/ kernel reported that the remote side hung up. This allows a\n \/\/\/ consumer to avoid reading in order to discover the hangup.\n pub fn read_hint(&self) -> ReadHint {\n let mut hint = ReadHint::empty();\n\n \/\/ The backend doesn't support hinting\n if !self.kind.contains(HINTED) {\n return hint;\n }\n\n if self.kind.contains(HUP) {\n hint = hint | HUPHINT\n }\n\n if self.kind.contains(READABLE) {\n hint = hint | DATAHINT\n }\n\n if self.kind.contains(ERROR) {\n hint = hint | ERRORHINT\n }\n\n hint\n }\n\n \/\/\/ This event indicated that the handle is now readable\n pub fn is_readable(&self) -> bool {\n self.kind.contains(READABLE) || self.kind.contains(HUP)\n }\n\n \/\/\/ This event indicated that the handle is now writable\n pub fn is_writable(&self) -> bool {\n self.kind.contains(WRITABLE)\n }\n\n \/\/\/ This event indicated that the handle had an error\n pub fn is_error(&self) -> bool {\n self.kind.contains(ERROR)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Iron's HTTP Response representation and associated methods.\n\nuse std::io::{self, Write};\nuse std::fmt::{self, Debug};\nuse std::fs::File;\n\nuse typemap::TypeMap;\nuse plugin::Extensible;\nuse modifier::{Set, Modifier};\nuse hyper::header::Headers;\n\nuse status::{self, Status};\nuse {Plugin, headers};\n\npub use hyper::server::response::Response as HttpResponse;\nuse hyper::net::Fresh;\n\n\/\/\/ A `Write`r of HTTP response bodies.\npub struct ResponseBody<'a>(Box<Write + 'a>);\n\nimpl<'a> ResponseBody<'a> {\n \/\/\/ Create a new ResponseBody, mostly for use in mocking.\n pub fn new<W: Write + 'a>(writer: W) -> ResponseBody<'a> {\n ResponseBody(Box::new(writer))\n }\n}\n\nimpl<'a> Write for ResponseBody<'a> {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.0.write(buf)\n }\n\n fn flush(&mut self) -> io::Result<()> {\n self.0.flush()\n }\n}\n\n\/\/\/ Wrapper type to set `Read`ers as response bodies\npub struct BodyReader<R: Send>(pub R);\n\n\/\/\/ A trait which writes the body of an HTTP response.\npub trait WriteBody: Send {\n \/\/\/ Writes the body to the provided `ResponseBody`.\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()>;\n}\n\nimpl WriteBody for String {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n self.as_bytes().write_body(res)\n }\n}\n\nimpl<'a> WriteBody for &'a str {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n self.as_bytes().write_body(res)\n }\n}\n\nimpl WriteBody for Vec<u8> {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n res.write_all(self)\n }\n}\n\nimpl<'a> WriteBody for &'a [u8] {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n res.write_all(self)\n }\n}\n\nimpl WriteBody for File {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n io::copy(self, res).map(|_| ())\n }\n}\n\nimpl WriteBody for Box<io::Read + Send> {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n io::copy(self, res).map(|_| ())\n }\n}\n\nimpl <R: io::Read + Send> WriteBody for BodyReader<R> {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n io::copy(&mut self.0, res).map(|_| ())\n }\n}\n\n\/* Needs specialization :(\nimpl<R: Read + Send> WriteBody for R {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n io::copy(self, res)\n }\n}\n*\/\n\n\/\/\/ The response representation given to `Middleware`\npub struct Response {\n \/\/\/ The response status-code.\n pub status: Option<Status>,\n\n \/\/\/ The headers of the response.\n pub headers: Headers,\n\n \/\/\/ A TypeMap to be used as an extensible storage for data\n \/\/\/ associated with this Response.\n pub extensions: TypeMap,\n\n \/\/\/ The body of the response.\n pub body: Option<Box<WriteBody>>\n}\n\nimpl Response {\n \/\/\/ Construct a blank Response\n pub fn new() -> Response {\n Response {\n status: None, \/\/ Start with no response code.\n body: None, \/\/ Start with no body.\n headers: Headers::new(),\n extensions: TypeMap::new()\n }\n }\n\n \/\/\/ Construct a Response with the specified modifier pre-applied.\n pub fn with<M: Modifier<Response>>(m: M) -> Response {\n Response::new().set(m)\n }\n\n \/\/ `write_back` is used to put all the data added to `self`\n \/\/ back onto an `HttpResponse` so that it is sent back to the\n \/\/ client.\n \/\/\n \/\/ `write_back` consumes the `Response`.\n #[doc(hidden)]\n pub fn write_back(self, mut http_res: HttpResponse<Fresh>) {\n *http_res.headers_mut() = self.headers;\n\n \/\/ Default to a 404 if no response code was set\n *http_res.status_mut() = self.status.clone().unwrap_or(status::NotFound);\n\n let out = match self.body {\n Some(body) => write_with_body(http_res, body),\n None => {\n http_res.headers_mut().set(headers::ContentLength(0));\n http_res.start().and_then(|res| res.end())\n }\n };\n\n if let Err(e) = out {\n error!(\"Error writing response: {}\", e);\n }\n }\n}\n\nfn write_with_body(mut res: HttpResponse<Fresh>, mut body: Box<WriteBody>)\n -> io::Result<()> {\n let content_type = res.headers().get::<headers::ContentType>()\n .map_or_else(|| headers::ContentType(\"text\/plain\".parse().unwrap()),\n |cx| cx.clone());\n res.headers_mut().set(content_type);\n\n let mut raw_res = try!(res.start());\n try!(body.write_body(&mut ResponseBody::new(&mut raw_res)));\n raw_res.end()\n}\n\nimpl Debug for Response {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n writeln!(f, \"HTTP\/1.1 {} {}\\n{}\",\n self.status.unwrap_or(status::NotFound),\n self.status.unwrap_or(status::NotFound).canonical_reason().unwrap(),\n self.headers\n )\n }\n}\n\nimpl fmt::Display for Response {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n Debug::fmt(self, f)\n }\n}\n\n\/\/ Allow plugins to attach to responses.\nimpl Extensible for Response {\n fn extensions(&self) -> &TypeMap {\n &self.extensions\n }\n\n fn extensions_mut(&mut self) -> &mut TypeMap {\n &mut self.extensions\n }\n}\n\nimpl Plugin for Response {}\nimpl Set for Response {}\n<commit_msg>(fix) update Response status code display<commit_after>\/\/! Iron's HTTP Response representation and associated methods.\n\nuse std::io::{self, Write};\nuse std::fmt::{self, Debug};\nuse std::fs::File;\n\nuse typemap::TypeMap;\nuse plugin::Extensible;\nuse modifier::{Set, Modifier};\nuse hyper::header::Headers;\n\nuse status::{self, Status};\nuse {Plugin, headers};\n\npub use hyper::server::response::Response as HttpResponse;\nuse hyper::net::Fresh;\n\n\/\/\/ A `Write`r of HTTP response bodies.\npub struct ResponseBody<'a>(Box<Write + 'a>);\n\nimpl<'a> ResponseBody<'a> {\n \/\/\/ Create a new ResponseBody, mostly for use in mocking.\n pub fn new<W: Write + 'a>(writer: W) -> ResponseBody<'a> {\n ResponseBody(Box::new(writer))\n }\n}\n\nimpl<'a> Write for ResponseBody<'a> {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.0.write(buf)\n }\n\n fn flush(&mut self) -> io::Result<()> {\n self.0.flush()\n }\n}\n\n\/\/\/ Wrapper type to set `Read`ers as response bodies\npub struct BodyReader<R: Send>(pub R);\n\n\/\/\/ A trait which writes the body of an HTTP response.\npub trait WriteBody: Send {\n \/\/\/ Writes the body to the provided `ResponseBody`.\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()>;\n}\n\nimpl WriteBody for String {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n self.as_bytes().write_body(res)\n }\n}\n\nimpl<'a> WriteBody for &'a str {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n self.as_bytes().write_body(res)\n }\n}\n\nimpl WriteBody for Vec<u8> {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n res.write_all(self)\n }\n}\n\nimpl<'a> WriteBody for &'a [u8] {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n res.write_all(self)\n }\n}\n\nimpl WriteBody for File {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n io::copy(self, res).map(|_| ())\n }\n}\n\nimpl WriteBody for Box<io::Read + Send> {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n io::copy(self, res).map(|_| ())\n }\n}\n\nimpl <R: io::Read + Send> WriteBody for BodyReader<R> {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n io::copy(&mut self.0, res).map(|_| ())\n }\n}\n\n\/* Needs specialization :(\nimpl<R: Read + Send> WriteBody for R {\n fn write_body(&mut self, res: &mut ResponseBody) -> io::Result<()> {\n io::copy(self, res)\n }\n}\n*\/\n\n\/\/\/ The response representation given to `Middleware`\npub struct Response {\n \/\/\/ The response status-code.\n pub status: Option<Status>,\n\n \/\/\/ The headers of the response.\n pub headers: Headers,\n\n \/\/\/ A TypeMap to be used as an extensible storage for data\n \/\/\/ associated with this Response.\n pub extensions: TypeMap,\n\n \/\/\/ The body of the response.\n pub body: Option<Box<WriteBody>>\n}\n\nimpl Response {\n \/\/\/ Construct a blank Response\n pub fn new() -> Response {\n Response {\n status: None, \/\/ Start with no response code.\n body: None, \/\/ Start with no body.\n headers: Headers::new(),\n extensions: TypeMap::new()\n }\n }\n\n \/\/\/ Construct a Response with the specified modifier pre-applied.\n pub fn with<M: Modifier<Response>>(m: M) -> Response {\n Response::new().set(m)\n }\n\n \/\/ `write_back` is used to put all the data added to `self`\n \/\/ back onto an `HttpResponse` so that it is sent back to the\n \/\/ client.\n \/\/\n \/\/ `write_back` consumes the `Response`.\n #[doc(hidden)]\n pub fn write_back(self, mut http_res: HttpResponse<Fresh>) {\n *http_res.headers_mut() = self.headers;\n\n \/\/ Default to a 404 if no response code was set\n *http_res.status_mut() = self.status.clone().unwrap_or(status::NotFound);\n\n let out = match self.body {\n Some(body) => write_with_body(http_res, body),\n None => {\n http_res.headers_mut().set(headers::ContentLength(0));\n http_res.start().and_then(|res| res.end())\n }\n };\n\n if let Err(e) = out {\n error!(\"Error writing response: {}\", e);\n }\n }\n}\n\nfn write_with_body(mut res: HttpResponse<Fresh>, mut body: Box<WriteBody>)\n -> io::Result<()> {\n let content_type = res.headers().get::<headers::ContentType>()\n .map_or_else(|| headers::ContentType(\"text\/plain\".parse().unwrap()),\n |cx| cx.clone());\n res.headers_mut().set(content_type);\n\n let mut raw_res = try!(res.start());\n try!(body.write_body(&mut ResponseBody::new(&mut raw_res)));\n raw_res.end()\n}\n\nimpl Debug for Response {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n writeln!(f, \"HTTP\/1.1 {}\\n{}\",\n self.status.unwrap_or(status::NotFound),\n self.headers\n )\n }\n}\n\nimpl fmt::Display for Response {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n Debug::fmt(self, f)\n }\n}\n\n\/\/ Allow plugins to attach to responses.\nimpl Extensible for Response {\n fn extensions(&self) -> &TypeMap {\n &self.extensions\n }\n\n fn extensions_mut(&mut self) -> &mut TypeMap {\n &mut self.extensions\n }\n}\n\nimpl Plugin for Response {}\nimpl Set for Response {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>submit commit<commit_after>#![feature(proc_macro, plugin, custom_attribute, custom_derive, plugin)]\n#![plugin(serde_derive)]\n\nextern crate serde_json as json;\nextern crate tin_can_telephone as tct;\n\nuse json::ser::{to_vec};\nuse json::de::{from_slice};\n\nuse tct::stanza::Stanza;\n\n#[test]\nfn stanza_message_format() {\n\n let message : Stanza = from_slice(\n \"{\\\"Message\\\":{\\\"to\\\":\\\"colin\\\",\\\"from\\\":\\\"joe\\\",\\\"msg\\\":\\\"body example\\\"}}\"\n .as_bytes()).unwrap();\n\n let matching_to = \"colin\".to_string();\n let matching_from = \"joe\".to_string();\n let matching_msg = \"body example\".to_string();\n\n if let Stanza::Message{ to, from, msg } = message {\n assert_eq!(to, matching_to);\n assert_eq!(from, matching_from);\n assert_eq!(msg, matching_msg);\n }\n\n let group_message : Stanza = from_slice(\n \"{\\\"GroupMessage\\\":{\\\"to\\\":\\\"colin\\\",\\\"from\\\":\\\"joesteam\\\",\\\"msg\\\":\\\"body example\\\"}}\"\n .as_bytes()).unwrap();\n\n let matching_to = \"colin\".to_string();\n let matching_from = \"joesteam\".to_string();\n let matching_msg = \"body example\".to_string();\n\n if let Stanza::GroupMessage{ to, from, msg, .. } = group_message {\n assert_eq!(to, matching_to);\n assert_eq!(from, matching_from);\n assert_eq!(msg, matching_msg);\n }\n\n}\n\n#[test]\n\/\/\/ This will test the format we use for requesting data from the server.\nfn stanza_req_res_format() {\n\n use tct::stanza::{RequestKind, ResponseKind};\n use std::collections::{HashMap};\n\n let req : Stanza = from_slice(\n \"{\\\"Request\\\":{\\\"to\\\":\\\"him\\\",\\\n \\\"from\\\":\\\"her\\\",\\\"kind\\\":\\\"UserInfo\\\"}}\"\n .as_bytes()).unwrap();\n\n let matching_to = \"him\".to_string();\n let matching_from = \"her\".to_string();\n let matching_kind = RequestKind::UserInfo; \/\/ Where UserInfo is a variant of an enum\n\n if let Stanza::Request{ to, from, kind } = req {\n assert_eq!(to, matching_to);\n assert_eq!(from, matching_from);\n assert_eq!(kind, matching_kind);\n }\n\n let res : Stanza = from_slice(\n \"{\\\"Response\\\":{\\\"to\\\":\\\"guy\\\",\\\n \\\"from\\\":\\\"girl\\\",\\\"kind\\\":{\\\"\\\n UserInfo\\\":{\\\"one\\\":\\\"one_back\\\", \\\"two\\\":\\\"two_back\\\"}}}}\"\n .as_bytes()).unwrap();\n\n let matching_to = \"guy\".to_string();\n let matching_from = \"girl\".to_string();\n let mut map : HashMap<String, String> = HashMap::new();\n map.insert(\"one\".to_string(), \"one_back\".to_string());\n map.insert(\"two\".to_string(), \"two_back\".to_string());\n\n if let Stanza::Response{ to, from, kind } = res {\n assert_eq!(to, matching_to);\n assert_eq!(from, matching_from);\n if let ResponseKind::UserInfo(res_map) = kind {\n assert_eq!(map.get(\"one\"), res_map.get(\"one\"));\n assert_eq!(map.get(\"two\"), res_map.get(\"two\"));\n }\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::io;\nuse std::process::Command;\nuse target::TargetOptions;\n\nuse self::Arch::*;\n\n#[allow(non_camel_case_types)]\n#[derive(Copy, Clone)]\npub enum Arch {\n Armv7,\n Armv7s,\n Arm64,\n I386,\n X86_64\n}\n\nimpl Arch {\n pub fn to_string(&self) -> &'static str {\n match self {\n &Armv7 => \"armv7\",\n &Armv7s => \"armv7s\",\n &Arm64 => \"arm64\",\n &I386 => \"i386\",\n &X86_64 => \"x86_64\"\n }\n }\n}\n\npub fn get_sdk_root(sdk_name: &str) -> String {\n let res = Command::new(\"xcrun\")\n .arg(\"--show-sdk-path\")\n .arg(\"-sdk\")\n .arg(sdk_name)\n .output()\n .and_then(|output| {\n if output.status.success() {\n Ok(String::from_utf8(output.stdout).unwrap())\n } else {\n let error = String::from_utf8(output.stderr);\n let error = format!(\"process exit with error: {}\",\n error.unwrap());\n Err(io::Error::new(io::ErrorKind::Other,\n &error[..]))\n }\n });\n\n match res {\n Ok(output) => output.trim().to_string(),\n Err(e) => panic!(\"failed to get {} SDK path: {}\", sdk_name, e)\n }\n}\n\nfn pre_link_args(arch: Arch) -> Vec<String> {\n let sdk_name = match arch {\n Armv7 | Armv7s | Arm64 => \"iphoneos\",\n I386 | X86_64 => \"iphonesimulator\"\n };\n\n let arch_name = arch.to_string();\n\n vec![\"-arch\".to_string(), arch_name.to_string(),\n \"-Wl,-syslibroot\".to_string(), get_sdk_root(sdk_name)]\n}\n\nfn target_cpu(arch: Arch) -> String {\n match arch {\n Armv7 => \"cortex-a8\", \/\/ iOS7 is supported on iPhone 4 and higher\n Armv7s => \"cortex-a9\",\n Arm64 => \"cyclone\",\n I386 => \"generic\",\n X86_64 => \"x86-64\",\n }.to_string()\n}\n\npub fn opts(arch: Arch) -> TargetOptions {\n TargetOptions {\n cpu: target_cpu(arch),\n dynamic_linking: false,\n executables: true,\n pre_link_args: pre_link_args(arch),\n .. super::apple_base::opts()\n }\n}\n<commit_msg>Use correct target CPU for iOS simulator.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::io;\nuse std::process::Command;\nuse target::TargetOptions;\n\nuse self::Arch::*;\n\n#[allow(non_camel_case_types)]\n#[derive(Copy, Clone)]\npub enum Arch {\n Armv7,\n Armv7s,\n Arm64,\n I386,\n X86_64\n}\n\nimpl Arch {\n pub fn to_string(&self) -> &'static str {\n match self {\n &Armv7 => \"armv7\",\n &Armv7s => \"armv7s\",\n &Arm64 => \"arm64\",\n &I386 => \"i386\",\n &X86_64 => \"x86_64\"\n }\n }\n}\n\npub fn get_sdk_root(sdk_name: &str) -> String {\n let res = Command::new(\"xcrun\")\n .arg(\"--show-sdk-path\")\n .arg(\"-sdk\")\n .arg(sdk_name)\n .output()\n .and_then(|output| {\n if output.status.success() {\n Ok(String::from_utf8(output.stdout).unwrap())\n } else {\n let error = String::from_utf8(output.stderr);\n let error = format!(\"process exit with error: {}\",\n error.unwrap());\n Err(io::Error::new(io::ErrorKind::Other,\n &error[..]))\n }\n });\n\n match res {\n Ok(output) => output.trim().to_string(),\n Err(e) => panic!(\"failed to get {} SDK path: {}\", sdk_name, e)\n }\n}\n\nfn pre_link_args(arch: Arch) -> Vec<String> {\n let sdk_name = match arch {\n Armv7 | Armv7s | Arm64 => \"iphoneos\",\n I386 | X86_64 => \"iphonesimulator\"\n };\n\n let arch_name = arch.to_string();\n\n vec![\"-arch\".to_string(), arch_name.to_string(),\n \"-Wl,-syslibroot\".to_string(), get_sdk_root(sdk_name)]\n}\n\nfn target_cpu(arch: Arch) -> String {\n match arch {\n Armv7 => \"cortex-a8\", \/\/ iOS7 is supported on iPhone 4 and higher\n Armv7s => \"cortex-a9\",\n Arm64 => \"cyclone\",\n I386 => \"yonah\",\n X86_64 => \"core2\",\n }.to_string()\n}\n\npub fn opts(arch: Arch) -> TargetOptions {\n TargetOptions {\n cpu: target_cpu(arch),\n dynamic_linking: false,\n executables: true,\n pre_link_args: pre_link_args(arch),\n .. super::apple_base::opts()\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This file is part of rgtk.\n\/\/\n\/\/ rgtk is free software: you can redistribute it and\/or modify\n\/\/ it under the terms of the GNU Lesser General Public License as published by\n\/\/ the Free Software Foundation, either version 3 of the License, or\n\/\/ (at your option) any later version.\n\/\/\n\/\/ rgtk is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\/\/ GNU Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public License\n\/\/ along with rgtk. If not, see <http:\/\/www.gnu.org\/licenses\/>.\n\nuse gtk::{self, ffi};\nuse glib::{to_bool, to_gboolean};\nuse gtk::FFIWidget;\nuse gtk::cast::GTK_ABOUT_DIALOG;\nuse std::ffi::CString;\n\nstruct_Widget!(AboutDialog);\n\nimpl AboutDialog {\n pub fn new() -> Option<AboutDialog> {\n let tmp_pointer = unsafe { ffi::gtk_about_dialog_new() };\n\n if tmp_pointer.is_null() {\n None\n } else {\n Some(gtk::FFIWidget::wrap_widget(tmp_pointer))\n }\n }\n\n pub fn get_program_name(&self) -> Option<String> {\n unsafe {\n let name = ffi::gtk_about_dialog_get_program_name(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if name.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&name)).to_string())\n }\n }\n }\n\n pub fn set_program_name(&self, name: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(name.as_bytes());\n\n ffi::gtk_about_dialog_set_program_name(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_version(&self) -> Option<String> {\n unsafe {\n let version = ffi::gtk_about_dialog_get_version(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if version.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&version)).to_string())\n }\n }\n }\n\n pub fn set_version(&self, version: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(version.as_bytes());\n\n ffi::gtk_about_dialog_set_version(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_copyright(&self) -> Option<String> {\n unsafe {\n let copyright = ffi::gtk_about_dialog_get_copyright(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if copyright.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(©right)).to_string())\n }\n }\n }\n\n pub fn set_copyright(&self, copyright: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(copyright.as_bytes());\n\n ffi::gtk_about_dialog_set_copyright(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_comments(&self) -> Option<String> {\n unsafe {\n let comments = ffi::gtk_about_dialog_get_comments(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if comments.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&comments)).to_string())\n }\n }\n }\n\n pub fn set_comments(&self, comments: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(comments.as_bytes());\n\n ffi::gtk_about_dialog_set_comments(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_license(&self) -> Option<String> {\n unsafe {\n let license = ffi::gtk_about_dialog_get_license(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if license.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&license)).to_string())\n }\n }\n }\n\n pub fn set_license(&self, license: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(license.as_bytes());\n\n ffi::gtk_about_dialog_set_license(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_wrap_license(&self) -> bool {\n unsafe { to_bool(ffi::gtk_about_dialog_get_wrap_license(GTK_ABOUT_DIALOG(self.unwrap_widget()))) }\n }\n\n pub fn set_wrap_license(&self, wrap_license: bool) -> () {\n unsafe { ffi::gtk_about_dialog_set_wrap_license(GTK_ABOUT_DIALOG(self.unwrap_widget()), to_gboolean(wrap_license)) }\n }\n\n pub fn get_license_type(&self) -> gtk::License {\n unsafe { ffi::gtk_about_dialog_get_license_type(GTK_ABOUT_DIALOG(self.unwrap_widget())) }\n }\n\n pub fn set_license_type(&self, license_type: gtk::License) -> () {\n unsafe { ffi::gtk_about_dialog_set_license_type(GTK_ABOUT_DIALOG(self.unwrap_widget()), license_type) }\n }\n\n pub fn get_website(&self) -> Option<String> {\n unsafe {\n let website = ffi::gtk_about_dialog_get_website(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if website.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&website)).to_string())\n }\n }\n }\n\n pub fn set_website(&self, website: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(website.as_bytes());\n\n ffi::gtk_about_dialog_set_website(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_website_label(&self) -> Option<String> {\n unsafe {\n let website_label = ffi::gtk_about_dialog_get_website_label(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if website_label.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&website_label)).to_string())\n }\n }\n }\n\n pub fn set_website_label(&self, website_label: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(website_label.as_bytes());\n\n ffi::gtk_about_dialog_set_website_label(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_authors(&self) -> Vec<String> {\n let authors = unsafe { ffi::gtk_about_dialog_get_authors(GTK_ABOUT_DIALOG(self.unwrap_widget())) };\n let mut ret = Vec::new();\n\n if !authors.is_null() {\n let mut it = 0;\n\n unsafe {\n loop {\n let tmp = authors.offset(it);\n\n if tmp.is_null() {\n break;\n }\n ret.push(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&*tmp)).to_string());\n it += 1;\n }\n }\n }\n ret\n }\n\n pub fn set_authors(&self, authors: &Vec<String>) -> () {\n let mut tmp_vec = Vec::new();\n\n for tmp in authors.iter() {\n let c_str = CString::from_slice(tmp.as_bytes());\n\n tmp_vec.push(c_str.as_ptr());\n }\n unsafe { ffi::gtk_about_dialog_set_authors(GTK_ABOUT_DIALOG(self.unwrap_widget()), tmp_vec.as_slice().as_ptr()) }\n }\n\n pub fn get_artists(&self) -> Vec<String> {\n let artists = unsafe { ffi::gtk_about_dialog_get_artists(GTK_ABOUT_DIALOG(self.unwrap_widget())) };\n let mut ret = Vec::new();\n\n if !artists.is_null() {\n let mut it = 0;\n\n unsafe {\n loop {\n let tmp = artists.offset(it);\n\n if tmp.is_null() {\n break;\n }\n ret.push(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&*tmp)).to_string());\n it += 1;\n }\n }\n }\n ret\n }\n\n pub fn set_artists(&self, artists: &Vec<String>) -> () {\n let mut tmp_vec = Vec::new();\n\n for tmp in artists.iter() {\n let c_str = CString::from_slice(tmp.as_bytes());\n\n tmp_vec.push(c_str.as_ptr());\n }\n unsafe { ffi::gtk_about_dialog_set_artists(GTK_ABOUT_DIALOG(self.unwrap_widget()), tmp_vec.as_slice().as_ptr()) }\n }\n\n pub fn get_documenters(&self) -> Vec<String> {\n let documenters = unsafe { ffi::gtk_about_dialog_get_documenters(GTK_ABOUT_DIALOG(self.unwrap_widget())) };\n let mut ret = Vec::new();\n\n if !documenters.is_null() {\n let mut it = 0;\n\n unsafe {\n loop {\n let tmp = documenters.offset(it);\n\n if tmp.is_null() {\n break;\n }\n ret.push(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&*tmp)).to_string());\n it += 1;\n }\n }\n }\n ret\n }\n\n pub fn set_documenters(&self, documenters: &Vec<String>) -> () {\n let mut tmp_vec = Vec::new();\n\n for tmp in documenters.iter() {\n let c_str = CString::from_slice(tmp.as_bytes());\n \n tmp_vec.push(c_str.as_ptr());\n }\n unsafe { ffi::gtk_about_dialog_set_documenters(GTK_ABOUT_DIALOG(self.unwrap_widget()), tmp_vec.as_slice().as_ptr()) }\n }\n\n pub fn get_translator_credits(&self) -> Option<String> {\n unsafe {\n let translator_credits = ffi::gtk_about_dialog_get_translator_credits(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if translator_credits.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&translator_credits)).to_string())\n }\n }\n }\n\n pub fn set_translator_credits(&self, translator_credits: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(translator_credits.as_bytes());\n\n ffi::gtk_about_dialog_set_translator_credits(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n \/*pub fn get_logo(&self) -> Option<String> {\n let logo = unsafe { ffi::gtk_about_dialog_set_logo(self.pointer)) };\n\n if logo.is_null() {\n None\n } else {\n Some(unsafe { gtk::FFIWidget::wrap_widget(logo) })\n }\n }\n\n pub fn set_logo(&self, logo: Pixbuf) -> () {\n unsafe { ffi::gtk_about_dialog_set_logo(GTK_ABOUT_DIALOG(self.unwrap_widget()), GDK_PIXBUF(logo.unwrap_widget())) }\n }*\/\n\n pub fn get_logo_icon_name(&self) -> Option<String> {\n unsafe {\n let logo_icon_name = ffi::gtk_about_dialog_get_logo_icon_name(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if logo_icon_name.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&logo_icon_name)).to_string())\n }\n }\n }\n\n pub fn set_logo_icon_name(&self, logo_icon_name: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(logo_icon_name.as_bytes());\n\n ffi::gtk_about_dialog_set_logo_icon_name(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn add_credit_section(&self, section_name: &str, people: &Vec<String>) -> () {\n let mut tmp_vec = Vec::new();\n\n for tmp in people.iter() {\n let c_str = CString::from_slice(tmp.as_bytes());\n\n tmp_vec.push(c_str.as_ptr());\n }\n unsafe {\n let c_str = CString::from_slice(section_name.as_bytes());\n\n ffi::gtk_about_dialog_add_credit_section(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr(), tmp_vec.as_slice().as_ptr())\n }\n }\n\n \/*pub fn show(parent: Window, properties: Vec<String>) -> () {\n unsafe { ffi::gtk_show_about_dialog(GTK_WINDOW(parent), first_property_name, ...) }\n }*\/\n}\n\nimpl_drop!(AboutDialog);\nimpl_TraitWidget!(AboutDialog);\n\nimpl gtk::ContainerTrait for AboutDialog {}\nimpl gtk::BinTrait for AboutDialog {}\nimpl gtk::WindowTrait for AboutDialog {}\nimpl gtk::DialogTrait for AboutDialog {}\n\nimpl_widget_events!(AboutDialog);\n<commit_msg>Fix invalid c arrays<commit_after>\/\/ This file is part of rgtk.\n\/\/\n\/\/ rgtk is free software: you can redistribute it and\/or modify\n\/\/ it under the terms of the GNU Lesser General Public License as published by\n\/\/ the Free Software Foundation, either version 3 of the License, or\n\/\/ (at your option) any later version.\n\/\/\n\/\/ rgtk is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\/\/ GNU Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public License\n\/\/ along with rgtk. If not, see <http:\/\/www.gnu.org\/licenses\/>.\n\nuse gtk::{self, ffi};\nuse glib::{to_bool, to_gboolean};\nuse gtk::FFIWidget;\nuse gtk::cast::GTK_ABOUT_DIALOG;\nuse std::ffi::CString;\n\nstruct_Widget!(AboutDialog);\n\nimpl AboutDialog {\n pub fn new() -> Option<AboutDialog> {\n let tmp_pointer = unsafe { ffi::gtk_about_dialog_new() };\n\n if tmp_pointer.is_null() {\n None\n } else {\n Some(gtk::FFIWidget::wrap_widget(tmp_pointer))\n }\n }\n\n pub fn get_program_name(&self) -> Option<String> {\n unsafe {\n let name = ffi::gtk_about_dialog_get_program_name(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if name.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&name)).to_string())\n }\n }\n }\n\n pub fn set_program_name(&self, name: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(name.as_bytes());\n\n ffi::gtk_about_dialog_set_program_name(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_version(&self) -> Option<String> {\n unsafe {\n let version = ffi::gtk_about_dialog_get_version(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if version.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&version)).to_string())\n }\n }\n }\n\n pub fn set_version(&self, version: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(version.as_bytes());\n\n ffi::gtk_about_dialog_set_version(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_copyright(&self) -> Option<String> {\n unsafe {\n let copyright = ffi::gtk_about_dialog_get_copyright(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if copyright.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(©right)).to_string())\n }\n }\n }\n\n pub fn set_copyright(&self, copyright: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(copyright.as_bytes());\n\n ffi::gtk_about_dialog_set_copyright(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_comments(&self) -> Option<String> {\n unsafe {\n let comments = ffi::gtk_about_dialog_get_comments(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if comments.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&comments)).to_string())\n }\n }\n }\n\n pub fn set_comments(&self, comments: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(comments.as_bytes());\n\n ffi::gtk_about_dialog_set_comments(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_license(&self) -> Option<String> {\n unsafe {\n let license = ffi::gtk_about_dialog_get_license(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if license.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&license)).to_string())\n }\n }\n }\n\n pub fn set_license(&self, license: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(license.as_bytes());\n\n ffi::gtk_about_dialog_set_license(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_wrap_license(&self) -> bool {\n unsafe { to_bool(ffi::gtk_about_dialog_get_wrap_license(GTK_ABOUT_DIALOG(self.unwrap_widget()))) }\n }\n\n pub fn set_wrap_license(&self, wrap_license: bool) -> () {\n unsafe { ffi::gtk_about_dialog_set_wrap_license(GTK_ABOUT_DIALOG(self.unwrap_widget()), to_gboolean(wrap_license)) }\n }\n\n pub fn get_license_type(&self) -> gtk::License {\n unsafe { ffi::gtk_about_dialog_get_license_type(GTK_ABOUT_DIALOG(self.unwrap_widget())) }\n }\n\n pub fn set_license_type(&self, license_type: gtk::License) -> () {\n unsafe { ffi::gtk_about_dialog_set_license_type(GTK_ABOUT_DIALOG(self.unwrap_widget()), license_type) }\n }\n\n pub fn get_website(&self) -> Option<String> {\n unsafe {\n let website = ffi::gtk_about_dialog_get_website(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if website.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&website)).to_string())\n }\n }\n }\n\n pub fn set_website(&self, website: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(website.as_bytes());\n\n ffi::gtk_about_dialog_set_website(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_website_label(&self) -> Option<String> {\n unsafe {\n let website_label = ffi::gtk_about_dialog_get_website_label(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if website_label.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&website_label)).to_string())\n }\n }\n }\n\n pub fn set_website_label(&self, website_label: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(website_label.as_bytes());\n\n ffi::gtk_about_dialog_set_website_label(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn get_authors(&self) -> Vec<String> {\n let authors = unsafe { ffi::gtk_about_dialog_get_authors(GTK_ABOUT_DIALOG(self.unwrap_widget())) };\n let mut ret = Vec::new();\n\n if !authors.is_null() {\n let mut it = 0;\n\n unsafe {\n loop {\n let tmp = authors.offset(it);\n\n if tmp.is_null() {\n break;\n }\n ret.push(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&*tmp)).to_string());\n it += 1;\n }\n }\n }\n ret\n }\n\n pub fn set_authors(&self, authors: &Vec<String>) -> () {\n let mut tmp_vec = Vec::new();\n\n for tmp in authors.iter() {\n let c_str = CString::from_slice(tmp.as_bytes());\n\n tmp_vec.push(c_str.as_ptr());\n }\n tmp_vec.push(::std::ptr::null_mut());\n unsafe { ffi::gtk_about_dialog_set_authors(GTK_ABOUT_DIALOG(self.unwrap_widget()), tmp_vec.as_slice().as_ptr()) }\n }\n\n pub fn get_artists(&self) -> Vec<String> {\n let artists = unsafe { ffi::gtk_about_dialog_get_artists(GTK_ABOUT_DIALOG(self.unwrap_widget())) };\n let mut ret = Vec::new();\n\n if !artists.is_null() {\n let mut it = 0;\n\n unsafe {\n loop {\n let tmp = artists.offset(it);\n\n if tmp.is_null() {\n break;\n }\n ret.push(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&*tmp)).to_string());\n it += 1;\n }\n }\n }\n ret\n }\n\n pub fn set_artists(&self, artists: &Vec<String>) -> () {\n let mut tmp_vec = Vec::new();\n\n for tmp in artists.iter() {\n let c_str = CString::from_slice(tmp.as_bytes());\n\n tmp_vec.push(c_str.as_ptr());\n }\n tmp_vec.push(::std::ptr::null_mut());\n unsafe { ffi::gtk_about_dialog_set_artists(GTK_ABOUT_DIALOG(self.unwrap_widget()), tmp_vec.as_slice().as_ptr()) }\n }\n\n pub fn get_documenters(&self) -> Vec<String> {\n let documenters = unsafe { ffi::gtk_about_dialog_get_documenters(GTK_ABOUT_DIALOG(self.unwrap_widget())) };\n let mut ret = Vec::new();\n\n if !documenters.is_null() {\n let mut it = 0;\n\n unsafe {\n loop {\n let tmp = documenters.offset(it);\n\n if tmp.is_null() {\n break;\n }\n ret.push(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&*tmp)).to_string());\n it += 1;\n }\n }\n }\n ret\n }\n\n pub fn set_documenters(&self, documenters: &Vec<String>) -> () {\n let mut tmp_vec = Vec::new();\n\n for tmp in documenters.iter() {\n let c_str = CString::from_slice(tmp.as_bytes());\n \n tmp_vec.push(c_str.as_ptr());\n }\n tmp_vec.push(::std::ptr::null_mut());\n unsafe { ffi::gtk_about_dialog_set_documenters(GTK_ABOUT_DIALOG(self.unwrap_widget()), tmp_vec.as_slice().as_ptr()) }\n }\n\n pub fn get_translator_credits(&self) -> Option<String> {\n unsafe {\n let translator_credits = ffi::gtk_about_dialog_get_translator_credits(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if translator_credits.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&translator_credits)).to_string())\n }\n }\n }\n\n pub fn set_translator_credits(&self, translator_credits: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(translator_credits.as_bytes());\n\n ffi::gtk_about_dialog_set_translator_credits(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n \/*pub fn get_logo(&self) -> Option<String> {\n let logo = unsafe { ffi::gtk_about_dialog_set_logo(self.pointer)) };\n\n if logo.is_null() {\n None\n } else {\n Some(unsafe { gtk::FFIWidget::wrap_widget(logo) })\n }\n }\n\n pub fn set_logo(&self, logo: Pixbuf) -> () {\n unsafe { ffi::gtk_about_dialog_set_logo(GTK_ABOUT_DIALOG(self.unwrap_widget()), GDK_PIXBUF(logo.unwrap_widget())) }\n }*\/\n\n pub fn get_logo_icon_name(&self) -> Option<String> {\n unsafe {\n let logo_icon_name = ffi::gtk_about_dialog_get_logo_icon_name(GTK_ABOUT_DIALOG(self.unwrap_widget()));\n\n if logo_icon_name.is_null() {\n None\n } else {\n Some(String::from_utf8_lossy(::std::ffi::c_str_to_bytes(&logo_icon_name)).to_string())\n }\n }\n }\n\n pub fn set_logo_icon_name(&self, logo_icon_name: &str) -> () {\n unsafe {\n let c_str = CString::from_slice(logo_icon_name.as_bytes());\n\n ffi::gtk_about_dialog_set_logo_icon_name(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr())\n };\n }\n\n pub fn add_credit_section(&self, section_name: &str, people: &Vec<String>) -> () {\n let mut tmp_vec = Vec::new();\n\n for tmp in people.iter() {\n let c_str = CString::from_slice(tmp.as_bytes());\n\n tmp_vec.push(c_str.as_ptr());\n }\n tmp_vec.push(::std::ptr::null_mut());\n unsafe {\n let c_str = CString::from_slice(section_name.as_bytes());\n\n ffi::gtk_about_dialog_add_credit_section(GTK_ABOUT_DIALOG(self.unwrap_widget()), c_str.as_ptr(), tmp_vec.as_slice().as_ptr())\n }\n }\n\n \/*pub fn show(parent: Window, properties: Vec<String>) -> () {\n unsafe { ffi::gtk_show_about_dialog(GTK_WINDOW(parent), first_property_name, ...) }\n }*\/\n}\n\nimpl_drop!(AboutDialog);\nimpl_TraitWidget!(AboutDialog);\n\nimpl gtk::ContainerTrait for AboutDialog {}\nimpl gtk::BinTrait for AboutDialog {}\nimpl gtk::WindowTrait for AboutDialog {}\nimpl gtk::DialogTrait for AboutDialog {}\n\nimpl_widget_events!(AboutDialog);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add testcase.<commit_after>\/\/ xfail-pretty\n\nuse std;\nuse rustc;\n\nimport rustc::*;\nimport std::io::*;\n\nimport rustc::driver::diagnostic;\nimport rustc::syntax::ast;\nimport rustc::syntax::codemap;\nimport rustc::syntax::parse::parser;\nimport rustc::syntax::print::*;\n\nfn new_parse_sess() -> parser::parse_sess {\n let cm = codemap::new_codemap();\n let handler = diagnostic::mk_handler(option::none);\n let sess = @{\n cm: cm,\n mutable next_id: 1,\n span_diagnostic: diagnostic::mk_span_handler(handler, cm),\n mutable chpos: 0u,\n mutable byte_pos: 0u\n };\n ret sess;\n}\n\niface fake_ext_ctxt {\n fn session() -> fake_session;\n}\n\ntype fake_options = {cfg: ast::crate_cfg};\n\ntype fake_session = {opts: @fake_options,\n parse_sess: parser::parse_sess};\n\nimpl of fake_ext_ctxt for fake_session {\n fn session() -> fake_session {self}\n}\n\nfn mk_ctxt() -> fake_ext_ctxt {\n let opts : fake_options = {cfg: []};\n {opts: @opts, parse_sess: new_parse_sess()} as fake_ext_ctxt\n}\n\n\nfn main() {\n let ext_cx = mk_ctxt();\n\n let abc = #ast{23};\n check_pp(abc, pprust::print_expr, \"23\");\n\n let expr = #ast{1 - $0 + 8};\n check_pp(expr, pprust::print_expr, \"1 - $0 + 8\");\n\n let expr2 = rustc::syntax::ext::qquote::replace(expr, [abc]);\n check_pp(expr2, pprust::print_expr, \"1 - 23 + 8\");\n\n let expr3 = #ast{2 - $(abc) + 7};\n check_pp(expr3, pprust::print_expr, \"2 - 23 + 7\");\n\n let expr4 = #ast{2 - $(#(3)) + 9};\n check_pp(expr4, pprust::print_expr, \"2 - 3 + 9\");\n\n let ty = #ast(ty){option<int>};\n check_pp(ty, pprust::print_type, \"option<int>\");\n\n let item = #ast(item){const x : int = 10;};\n check_pp(item, pprust::print_item, \"const x: int = 10;\");\n\n let stmt = #ast(stmt){let x = 20;};\n check_pp(*stmt, pprust::print_stmt, \"let x = 20;\");\n\n let pat = #ast(pat){some(_)};\n check_pp(pat, pprust::print_pat, \"some(_)\");\n}\n\nfn check_pp<T>(expr: T, f: fn(pprust::ps, T), expect: str) {\n let buf = mk_mem_buffer();\n let pp = pprust::rust_printer(buf as std::io::writer);\n f(pp, expr);\n pp::eof(pp.s);\n let str = mem_buffer_str(buf);\n stdout().write_line(str);\n if expect != \"\" {assert str == expect;}\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Implements parallel traversals over the DOM and flow trees.\n\/\/!\n\/\/! This code is highly unsafe. Keep this file small and easy to audit.\n\nuse css::matching::{ApplicableDeclarations, CannotShare, MatchMethods, StyleWasShared};\nuse layout::construct::FlowConstructor;\nuse layout::context::LayoutContext;\nuse layout::extra::LayoutAuxMethods;\nuse layout::flow::{Flow, PreorderFlowTraversal, PostorderFlowTraversal};\nuse layout::flow;\nuse layout::layout_task::{AssignHeightsAndStoreOverflowTraversal, AssignWidthsTraversal};\nuse layout::layout_task::{BubbleWidthsTraversal};\nuse layout::util::{LayoutDataAccess, OpaqueNode};\nuse layout::wrapper::{layout_node_to_unsafe_layout_node, LayoutNode, PostorderNodeMutTraversal};\nuse layout::wrapper::{ThreadSafeLayoutNode, UnsafeLayoutNode};\n\nuse servo_util::time::{ProfilerChan, profile};\nuse servo_util::time;\nuse servo_util::workqueue::{WorkQueue, WorkUnit, WorkerProxy};\nuse std::cast;\nuse std::ptr;\nuse std::sync::atomics::{AtomicInt, Relaxed, SeqCst};\nuse style::{Stylist, TNode};\n\n#[allow(dead_code)]\nfn static_assertion(node: UnsafeLayoutNode) {\n unsafe {\n let _: PaddedUnsafeFlow = ::std::intrinsics::transmute(node);\n }\n}\n\n\/\/\/ Memory representation that is at least as large as UnsafeLayoutNode, as it must be\n\/\/\/ safely transmutable to and from that type to accommodate the type-unsafe parallel work\n\/\/\/ queue usage that stores both flows and nodes.\npub type PaddedUnsafeFlow = (uint, uint, uint);\n\ntrait UnsafeFlowConversions {\n fn to_flow(&self) -> UnsafeFlow;\n fn from_flow(flow: &UnsafeFlow) -> Self;\n}\n\nimpl UnsafeFlowConversions for PaddedUnsafeFlow {\n fn to_flow(&self) -> UnsafeFlow {\n let (vtable, ptr, _padding) = *self;\n (vtable, ptr)\n }\n\n fn from_flow(flow: &UnsafeFlow) -> PaddedUnsafeFlow {\n let &(vtable, ptr) = flow;\n (vtable, ptr, 0)\n }\n}\n\n\/\/\/ Vtable + pointer representation of a Flow trait object.\npub type UnsafeFlow = (uint, uint);\n\nfn null_unsafe_flow() -> UnsafeFlow {\n (0, 0)\n}\n\npub fn owned_flow_to_unsafe_flow(flow: *~Flow) -> UnsafeFlow {\n unsafe {\n cast::transmute_copy(&*flow)\n }\n}\n\npub fn mut_owned_flow_to_unsafe_flow(flow: *mut ~Flow) -> UnsafeFlow {\n unsafe {\n cast::transmute_copy(&*flow)\n }\n}\n\npub fn borrowed_flow_to_unsafe_flow(flow: &Flow) -> UnsafeFlow {\n unsafe {\n cast::transmute_copy(&flow)\n }\n}\n\npub fn mut_borrowed_flow_to_unsafe_flow(flow: &mut Flow) -> UnsafeFlow {\n unsafe {\n cast::transmute_copy(&flow)\n }\n}\n\n\/\/\/ Information that we need stored in each DOM node.\npub struct DomParallelInfo {\n \/\/\/ The number of children that still need work done.\n children_count: AtomicInt,\n}\n\nimpl DomParallelInfo {\n pub fn new() -> DomParallelInfo {\n DomParallelInfo {\n children_count: AtomicInt::new(0),\n }\n }\n}\n\n\/\/\/ Information that we need stored in each flow.\npub struct FlowParallelInfo {\n \/\/\/ The number of children that still need work done.\n children_count: AtomicInt,\n \/\/\/ The address of the parent flow.\n parent: UnsafeFlow,\n}\n\nimpl FlowParallelInfo {\n pub fn new() -> FlowParallelInfo {\n FlowParallelInfo {\n children_count: AtomicInt::new(0),\n parent: null_unsafe_flow(),\n }\n }\n}\n\n\/\/\/ A parallel bottom-up flow traversal.\ntrait ParallelPostorderFlowTraversal : PostorderFlowTraversal {\n \/\/\/ Process current flow and potentially traverse its ancestors.\n \/\/\/\n \/\/\/ If we are the last child that finished processing, recursively process\n \/\/\/ our parent. Else, stop.\n \/\/\/ Also, stop at the root (obviously :P).\n \/\/\/\n \/\/\/ Thus, if we start with all the leaves of a tree, we end up traversing\n \/\/\/ the whole tree bottom-up because each parent will be processed exactly\n \/\/\/ once (by the last child that finishes processing).\n \/\/\/\n \/\/\/ The only communication between siblings is that they both\n \/\/\/ fetch-and-subtract the parent's children count.\n fn run_parallel(&mut self,\n mut unsafe_flow: UnsafeFlow,\n _: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>) {\n loop {\n unsafe {\n \/\/ Get a real flow.\n let flow: &mut ~Flow = cast::transmute(&unsafe_flow);\n\n \/\/ Perform the appropriate traversal.\n if self.should_process(*flow) {\n self.process(*flow);\n }\n\n let base = flow::mut_base(*flow);\n\n \/\/ Reset the count of children for the next layout traversal.\n base.parallel.children_count.store(base.children.len() as int, Relaxed);\n\n \/\/ Possibly enqueue the parent.\n let unsafe_parent = base.parallel.parent;\n if unsafe_parent == null_unsafe_flow() {\n \/\/ We're done!\n break\n }\n\n \/\/ No, we're not at the root yet. Then are we the last child\n \/\/ of our parent to finish processing? If so, we can continue\n \/\/ on with our parent; otherwise, we've gotta wait.\n let parent: &mut ~Flow = cast::transmute(&unsafe_parent);\n let parent_base = flow::mut_base(*parent);\n if parent_base.parallel.children_count.fetch_sub(1, SeqCst) == 1 {\n \/\/ We were the last child of our parent. Reflow our parent.\n unsafe_flow = unsafe_parent\n } else {\n \/\/ Stop.\n break\n }\n }\n }\n }\n}\n\n\/\/\/ A parallel top-down flow traversal.\ntrait ParallelPreorderFlowTraversal : PreorderFlowTraversal {\n fn run_parallel(&mut self,\n unsafe_flow: UnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>);\n\n fn run_parallel_helper(&mut self,\n unsafe_flow: UnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>,\n top_down_func: extern \"Rust\" fn(PaddedUnsafeFlow,\n &mut WorkerProxy<*mut LayoutContext,\n PaddedUnsafeFlow>),\n bottom_up_func: extern \"Rust\" fn(PaddedUnsafeFlow,\n &mut WorkerProxy<*mut LayoutContext,\n PaddedUnsafeFlow>)) {\n let mut had_children = false;\n unsafe {\n \/\/ Get a real flow.\n let flow: &mut ~Flow = cast::transmute(&unsafe_flow);\n\n \/\/ Perform the appropriate traversal.\n self.process(*flow);\n\n \/\/ Possibly enqueue the children.\n for kid in flow::child_iter(*flow) {\n had_children = true;\n proxy.push(WorkUnit {\n fun: top_down_func,\n data: UnsafeFlowConversions::from_flow(&borrowed_flow_to_unsafe_flow(kid)),\n });\n }\n\n }\n\n \/\/ If there were no more children, start assigning heights.\n if !had_children {\n bottom_up_func(UnsafeFlowConversions::from_flow(&unsafe_flow), proxy)\n }\n }\n}\n\nimpl<'a> ParallelPostorderFlowTraversal for BubbleWidthsTraversal<'a> {}\n\nimpl<'a> ParallelPreorderFlowTraversal for AssignWidthsTraversal<'a> {\n fn run_parallel(&mut self,\n unsafe_flow: UnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>) {\n self.run_parallel_helper(unsafe_flow,\n proxy,\n assign_widths,\n assign_heights_and_store_overflow)\n }\n}\n\nimpl<'a> ParallelPostorderFlowTraversal for AssignHeightsAndStoreOverflowTraversal<'a> {}\n\nfn recalc_style_for_node(unsafe_layout_node: UnsafeLayoutNode,\n proxy: &mut WorkerProxy<*mut LayoutContext,UnsafeLayoutNode>) {\n unsafe {\n let layout_context: &mut LayoutContext = cast::transmute(*proxy.user_data());\n\n \/\/ Get a real layout node.\n let node: LayoutNode = ::std::intrinsics::transmute(unsafe_layout_node);\n\n \/\/ Initialize layout data.\n \/\/\n \/\/ FIXME(pcwalton): Stop allocating here. Ideally this should just be done by the HTML\n \/\/ parser.\n node.initialize_layout_data(layout_context.layout_chan.clone());\n\n \/\/ Get the parent node.\n let parent_opt = if OpaqueNode::from_layout_node(&node) == layout_context.reflow_root {\n None\n } else {\n node.parent_node()\n };\n\n \/\/ First, check to see whether we can share a style with someone.\n let style_sharing_candidate_cache = layout_context.style_sharing_candidate_cache();\n let sharing_result = node.share_style_if_possible(style_sharing_candidate_cache,\n parent_opt.clone());\n\n \/\/ Otherwise, match and cascade selectors.\n match sharing_result {\n CannotShare(mut shareable) => {\n let mut applicable_declarations = ApplicableDeclarations::new();\n\n if node.is_element() {\n \/\/ Perform the CSS selector matching.\n let stylist: &Stylist = cast::transmute(layout_context.stylist);\n node.match_node(stylist, &mut applicable_declarations, &mut shareable);\n }\n\n \/\/ Perform the CSS cascade.\n node.cascade_node(parent_opt,\n layout_context.initial_css_values.get(),\n &applicable_declarations,\n layout_context.applicable_declarations_cache());\n\n \/\/ Add ourselves to the LRU cache.\n if shareable {\n style_sharing_candidate_cache.insert_if_possible(&node);\n }\n }\n StyleWasShared(index) => style_sharing_candidate_cache.touch(index),\n }\n\n \/\/ Prepare for flow construction by counting the node's children and storing that count.\n let mut child_count = 0;\n for _ in node.children() {\n child_count += 1;\n }\n if child_count != 0 {\n let mut layout_data_ref = node.mutate_layout_data();\n match *layout_data_ref.get() {\n Some(ref mut layout_data) => {\n layout_data.data.parallel.children_count.store(child_count as int, Relaxed)\n }\n None => fail!(\"no layout data\"),\n }\n\n \/\/ Enqueue kids.\n for kid in node.children() {\n proxy.push(WorkUnit {\n fun: recalc_style_for_node,\n data: layout_node_to_unsafe_layout_node(&kid),\n });\n }\n return\n }\n\n \/\/ If we got here, we're a leaf. Start construction of flows for this node.\n construct_flows(unsafe_layout_node, proxy)\n }\n}\n\nfn construct_flows(mut unsafe_layout_node: UnsafeLayoutNode,\n proxy: &mut WorkerProxy<*mut LayoutContext,UnsafeLayoutNode>) {\n loop {\n let layout_context: &mut LayoutContext = unsafe {\n cast::transmute(*proxy.user_data())\n };\n\n \/\/ Get a real layout node.\n let node: LayoutNode = unsafe {\n cast::transmute(unsafe_layout_node)\n };\n\n \/\/ Construct flows for this node.\n {\n let mut flow_constructor = FlowConstructor::new(layout_context, None);\n flow_constructor.process(&ThreadSafeLayoutNode::new(&node));\n }\n\n \/\/ Reset the count of children for the next traversal.\n \/\/\n \/\/ FIXME(pcwalton): Use children().len() when the implementation of that is efficient.\n let mut child_count = 0;\n for _ in node.children() {\n child_count += 1\n }\n {\n let mut layout_data_ref = node.mutate_layout_data();\n match *layout_data_ref.get() {\n Some(ref mut layout_data) => {\n layout_data.data.parallel.children_count.store(child_count as int, Relaxed)\n }\n None => fail!(\"no layout data\"),\n }\n }\n\n \/\/ If this is the reflow root, we're done.\n if layout_context.reflow_root == OpaqueNode::from_layout_node(&node) {\n break\n }\n\n \/\/ Otherwise, enqueue the parent.\n match node.parent_node() {\n Some(parent) => {\n\n \/\/ No, we're not at the root yet. Then are we the last sibling of our parent?\n \/\/ If so, we can continue on with our parent; otherwise, we've gotta wait.\n unsafe {\n match *parent.borrow_layout_data_unchecked() {\n Some(ref parent_layout_data) => {\n let parent_layout_data = cast::transmute_mut(parent_layout_data);\n if parent_layout_data.data\n .parallel\n .children_count\n .fetch_sub(1, SeqCst) == 1 {\n \/\/ We were the last child of our parent. Construct flows for our\n \/\/ parent.\n unsafe_layout_node = layout_node_to_unsafe_layout_node(&parent)\n } else {\n \/\/ Get out of here and find another node to work on.\n break\n }\n }\n None => fail!(\"no layout data for parent?!\"),\n }\n }\n }\n None => fail!(\"no parent and weren't at reflow root?!\"),\n }\n }\n}\n\nfn assign_widths(unsafe_flow: PaddedUnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>) {\n let layout_context: &mut LayoutContext = unsafe {\n cast::transmute(*proxy.user_data())\n };\n let mut assign_widths_traversal = AssignWidthsTraversal {\n layout_context: layout_context,\n };\n assign_widths_traversal.run_parallel(unsafe_flow.to_flow(), proxy)\n}\n\nfn assign_heights_and_store_overflow(unsafe_flow: PaddedUnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>) {\n let layout_context: &mut LayoutContext = unsafe {\n cast::transmute(*proxy.user_data())\n };\n let mut assign_heights_traversal = AssignHeightsAndStoreOverflowTraversal {\n layout_context: layout_context,\n };\n assign_heights_traversal.run_parallel(unsafe_flow.to_flow(), proxy)\n}\n\npub fn recalc_style_for_subtree(root_node: &LayoutNode,\n layout_context: &mut LayoutContext,\n queue: &mut WorkQueue<*mut LayoutContext,UnsafeLayoutNode>) {\n unsafe {\n queue.data = cast::transmute(layout_context)\n }\n\n \/\/ Enqueue the root node.\n queue.push(WorkUnit {\n fun: recalc_style_for_node,\n data: layout_node_to_unsafe_layout_node(root_node),\n });\n\n queue.run();\n\n queue.data = ptr::mut_null()\n}\n\npub fn traverse_flow_tree_preorder(root: &mut ~Flow,\n profiler_chan: ProfilerChan,\n layout_context: &mut LayoutContext,\n queue: &mut WorkQueue<*mut LayoutContext,PaddedUnsafeFlow>) {\n unsafe {\n queue.data = cast::transmute(layout_context)\n }\n\n profile(time::LayoutParallelWarmupCategory, profiler_chan, || {\n queue.push(WorkUnit {\n fun: assign_widths,\n data: UnsafeFlowConversions::from_flow(&mut_owned_flow_to_unsafe_flow(root)),\n })\n });\n\n queue.run();\n\n queue.data = ptr::mut_null()\n}\n<commit_msg>layout: Update `parallel.rs` to use `OpaqueNodeMethods`<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Implements parallel traversals over the DOM and flow trees.\n\/\/!\n\/\/! This code is highly unsafe. Keep this file small and easy to audit.\n\nuse css::matching::{ApplicableDeclarations, CannotShare, MatchMethods, StyleWasShared};\nuse layout::construct::FlowConstructor;\nuse layout::context::LayoutContext;\nuse layout::extra::LayoutAuxMethods;\nuse layout::flow::{Flow, PreorderFlowTraversal, PostorderFlowTraversal};\nuse layout::flow;\nuse layout::layout_task::{AssignHeightsAndStoreOverflowTraversal, AssignWidthsTraversal};\nuse layout::layout_task::{BubbleWidthsTraversal};\nuse layout::util::{LayoutDataAccess, OpaqueNodeMethods};\nuse layout::wrapper::{layout_node_to_unsafe_layout_node, LayoutNode, PostorderNodeMutTraversal};\nuse layout::wrapper::{ThreadSafeLayoutNode, UnsafeLayoutNode};\n\nuse gfx::display_list::OpaqueNode;\nuse servo_util::time::{ProfilerChan, profile};\nuse servo_util::time;\nuse servo_util::workqueue::{WorkQueue, WorkUnit, WorkerProxy};\nuse std::cast;\nuse std::ptr;\nuse std::sync::atomics::{AtomicInt, Relaxed, SeqCst};\nuse style::{Stylist, TNode};\n\n#[allow(dead_code)]\nfn static_assertion(node: UnsafeLayoutNode) {\n unsafe {\n let _: PaddedUnsafeFlow = ::std::intrinsics::transmute(node);\n }\n}\n\n\/\/\/ Memory representation that is at least as large as UnsafeLayoutNode, as it must be\n\/\/\/ safely transmutable to and from that type to accommodate the type-unsafe parallel work\n\/\/\/ queue usage that stores both flows and nodes.\npub type PaddedUnsafeFlow = (uint, uint, uint);\n\ntrait UnsafeFlowConversions {\n fn to_flow(&self) -> UnsafeFlow;\n fn from_flow(flow: &UnsafeFlow) -> Self;\n}\n\nimpl UnsafeFlowConversions for PaddedUnsafeFlow {\n fn to_flow(&self) -> UnsafeFlow {\n let (vtable, ptr, _padding) = *self;\n (vtable, ptr)\n }\n\n fn from_flow(flow: &UnsafeFlow) -> PaddedUnsafeFlow {\n let &(vtable, ptr) = flow;\n (vtable, ptr, 0)\n }\n}\n\n\/\/\/ Vtable + pointer representation of a Flow trait object.\npub type UnsafeFlow = (uint, uint);\n\nfn null_unsafe_flow() -> UnsafeFlow {\n (0, 0)\n}\n\npub fn owned_flow_to_unsafe_flow(flow: *~Flow) -> UnsafeFlow {\n unsafe {\n cast::transmute_copy(&*flow)\n }\n}\n\npub fn mut_owned_flow_to_unsafe_flow(flow: *mut ~Flow) -> UnsafeFlow {\n unsafe {\n cast::transmute_copy(&*flow)\n }\n}\n\npub fn borrowed_flow_to_unsafe_flow(flow: &Flow) -> UnsafeFlow {\n unsafe {\n cast::transmute_copy(&flow)\n }\n}\n\npub fn mut_borrowed_flow_to_unsafe_flow(flow: &mut Flow) -> UnsafeFlow {\n unsafe {\n cast::transmute_copy(&flow)\n }\n}\n\n\/\/\/ Information that we need stored in each DOM node.\npub struct DomParallelInfo {\n \/\/\/ The number of children that still need work done.\n children_count: AtomicInt,\n}\n\nimpl DomParallelInfo {\n pub fn new() -> DomParallelInfo {\n DomParallelInfo {\n children_count: AtomicInt::new(0),\n }\n }\n}\n\n\/\/\/ Information that we need stored in each flow.\npub struct FlowParallelInfo {\n \/\/\/ The number of children that still need work done.\n children_count: AtomicInt,\n \/\/\/ The address of the parent flow.\n parent: UnsafeFlow,\n}\n\nimpl FlowParallelInfo {\n pub fn new() -> FlowParallelInfo {\n FlowParallelInfo {\n children_count: AtomicInt::new(0),\n parent: null_unsafe_flow(),\n }\n }\n}\n\n\/\/\/ A parallel bottom-up flow traversal.\ntrait ParallelPostorderFlowTraversal : PostorderFlowTraversal {\n \/\/\/ Process current flow and potentially traverse its ancestors.\n \/\/\/\n \/\/\/ If we are the last child that finished processing, recursively process\n \/\/\/ our parent. Else, stop.\n \/\/\/ Also, stop at the root (obviously :P).\n \/\/\/\n \/\/\/ Thus, if we start with all the leaves of a tree, we end up traversing\n \/\/\/ the whole tree bottom-up because each parent will be processed exactly\n \/\/\/ once (by the last child that finishes processing).\n \/\/\/\n \/\/\/ The only communication between siblings is that they both\n \/\/\/ fetch-and-subtract the parent's children count.\n fn run_parallel(&mut self,\n mut unsafe_flow: UnsafeFlow,\n _: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>) {\n loop {\n unsafe {\n \/\/ Get a real flow.\n let flow: &mut ~Flow = cast::transmute(&unsafe_flow);\n\n \/\/ Perform the appropriate traversal.\n if self.should_process(*flow) {\n self.process(*flow);\n }\n\n let base = flow::mut_base(*flow);\n\n \/\/ Reset the count of children for the next layout traversal.\n base.parallel.children_count.store(base.children.len() as int, Relaxed);\n\n \/\/ Possibly enqueue the parent.\n let unsafe_parent = base.parallel.parent;\n if unsafe_parent == null_unsafe_flow() {\n \/\/ We're done!\n break\n }\n\n \/\/ No, we're not at the root yet. Then are we the last child\n \/\/ of our parent to finish processing? If so, we can continue\n \/\/ on with our parent; otherwise, we've gotta wait.\n let parent: &mut ~Flow = cast::transmute(&unsafe_parent);\n let parent_base = flow::mut_base(*parent);\n if parent_base.parallel.children_count.fetch_sub(1, SeqCst) == 1 {\n \/\/ We were the last child of our parent. Reflow our parent.\n unsafe_flow = unsafe_parent\n } else {\n \/\/ Stop.\n break\n }\n }\n }\n }\n}\n\n\/\/\/ A parallel top-down flow traversal.\ntrait ParallelPreorderFlowTraversal : PreorderFlowTraversal {\n fn run_parallel(&mut self,\n unsafe_flow: UnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>);\n\n fn run_parallel_helper(&mut self,\n unsafe_flow: UnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>,\n top_down_func: extern \"Rust\" fn(PaddedUnsafeFlow,\n &mut WorkerProxy<*mut LayoutContext,\n PaddedUnsafeFlow>),\n bottom_up_func: extern \"Rust\" fn(PaddedUnsafeFlow,\n &mut WorkerProxy<*mut LayoutContext,\n PaddedUnsafeFlow>)) {\n let mut had_children = false;\n unsafe {\n \/\/ Get a real flow.\n let flow: &mut ~Flow = cast::transmute(&unsafe_flow);\n\n \/\/ Perform the appropriate traversal.\n self.process(*flow);\n\n \/\/ Possibly enqueue the children.\n for kid in flow::child_iter(*flow) {\n had_children = true;\n proxy.push(WorkUnit {\n fun: top_down_func,\n data: UnsafeFlowConversions::from_flow(&borrowed_flow_to_unsafe_flow(kid)),\n });\n }\n\n }\n\n \/\/ If there were no more children, start assigning heights.\n if !had_children {\n bottom_up_func(UnsafeFlowConversions::from_flow(&unsafe_flow), proxy)\n }\n }\n}\n\nimpl<'a> ParallelPostorderFlowTraversal for BubbleWidthsTraversal<'a> {}\n\nimpl<'a> ParallelPreorderFlowTraversal for AssignWidthsTraversal<'a> {\n fn run_parallel(&mut self,\n unsafe_flow: UnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>) {\n self.run_parallel_helper(unsafe_flow,\n proxy,\n assign_widths,\n assign_heights_and_store_overflow)\n }\n}\n\nimpl<'a> ParallelPostorderFlowTraversal for AssignHeightsAndStoreOverflowTraversal<'a> {}\n\nfn recalc_style_for_node(unsafe_layout_node: UnsafeLayoutNode,\n proxy: &mut WorkerProxy<*mut LayoutContext,UnsafeLayoutNode>) {\n unsafe {\n let layout_context: &mut LayoutContext = cast::transmute(*proxy.user_data());\n\n \/\/ Get a real layout node.\n let node: LayoutNode = ::std::intrinsics::transmute(unsafe_layout_node);\n\n \/\/ Initialize layout data.\n \/\/\n \/\/ FIXME(pcwalton): Stop allocating here. Ideally this should just be done by the HTML\n \/\/ parser.\n node.initialize_layout_data(layout_context.layout_chan.clone());\n\n \/\/ Get the parent node.\n let opaque_node: OpaqueNode = OpaqueNodeMethods::from_layout_node(&node);\n let parent_opt = if opaque_node == layout_context.reflow_root {\n None\n } else {\n node.parent_node()\n };\n\n \/\/ First, check to see whether we can share a style with someone.\n let style_sharing_candidate_cache = layout_context.style_sharing_candidate_cache();\n let sharing_result = node.share_style_if_possible(style_sharing_candidate_cache,\n parent_opt.clone());\n\n \/\/ Otherwise, match and cascade selectors.\n match sharing_result {\n CannotShare(mut shareable) => {\n let mut applicable_declarations = ApplicableDeclarations::new();\n\n if node.is_element() {\n \/\/ Perform the CSS selector matching.\n let stylist: &Stylist = cast::transmute(layout_context.stylist);\n node.match_node(stylist, &mut applicable_declarations, &mut shareable);\n }\n\n \/\/ Perform the CSS cascade.\n node.cascade_node(parent_opt,\n layout_context.initial_css_values.get(),\n &applicable_declarations,\n layout_context.applicable_declarations_cache());\n\n \/\/ Add ourselves to the LRU cache.\n if shareable {\n style_sharing_candidate_cache.insert_if_possible(&node);\n }\n }\n StyleWasShared(index) => style_sharing_candidate_cache.touch(index),\n }\n\n \/\/ Prepare for flow construction by counting the node's children and storing that count.\n let mut child_count = 0;\n for _ in node.children() {\n child_count += 1;\n }\n if child_count != 0 {\n let mut layout_data_ref = node.mutate_layout_data();\n match *layout_data_ref.get() {\n Some(ref mut layout_data) => {\n layout_data.data.parallel.children_count.store(child_count as int, Relaxed)\n }\n None => fail!(\"no layout data\"),\n }\n\n \/\/ Enqueue kids.\n for kid in node.children() {\n proxy.push(WorkUnit {\n fun: recalc_style_for_node,\n data: layout_node_to_unsafe_layout_node(&kid),\n });\n }\n return\n }\n\n \/\/ If we got here, we're a leaf. Start construction of flows for this node.\n construct_flows(unsafe_layout_node, proxy)\n }\n}\n\nfn construct_flows(mut unsafe_layout_node: UnsafeLayoutNode,\n proxy: &mut WorkerProxy<*mut LayoutContext,UnsafeLayoutNode>) {\n loop {\n let layout_context: &mut LayoutContext = unsafe {\n cast::transmute(*proxy.user_data())\n };\n\n \/\/ Get a real layout node.\n let node: LayoutNode = unsafe {\n cast::transmute(unsafe_layout_node)\n };\n\n \/\/ Construct flows for this node.\n {\n let mut flow_constructor = FlowConstructor::new(layout_context, None);\n flow_constructor.process(&ThreadSafeLayoutNode::new(&node));\n }\n\n \/\/ Reset the count of children for the next traversal.\n \/\/\n \/\/ FIXME(pcwalton): Use children().len() when the implementation of that is efficient.\n let mut child_count = 0;\n for _ in node.children() {\n child_count += 1\n }\n {\n let mut layout_data_ref = node.mutate_layout_data();\n match *layout_data_ref.get() {\n Some(ref mut layout_data) => {\n layout_data.data.parallel.children_count.store(child_count as int, Relaxed)\n }\n None => fail!(\"no layout data\"),\n }\n }\n\n \/\/ If this is the reflow root, we're done.\n let opaque_node: OpaqueNode = OpaqueNodeMethods::from_layout_node(&node);\n if layout_context.reflow_root == opaque_node {\n break\n }\n\n \/\/ Otherwise, enqueue the parent.\n match node.parent_node() {\n Some(parent) => {\n\n \/\/ No, we're not at the root yet. Then are we the last sibling of our parent?\n \/\/ If so, we can continue on with our parent; otherwise, we've gotta wait.\n unsafe {\n match *parent.borrow_layout_data_unchecked() {\n Some(ref parent_layout_data) => {\n let parent_layout_data = cast::transmute_mut(parent_layout_data);\n if parent_layout_data.data\n .parallel\n .children_count\n .fetch_sub(1, SeqCst) == 1 {\n \/\/ We were the last child of our parent. Construct flows for our\n \/\/ parent.\n unsafe_layout_node = layout_node_to_unsafe_layout_node(&parent)\n } else {\n \/\/ Get out of here and find another node to work on.\n break\n }\n }\n None => fail!(\"no layout data for parent?!\"),\n }\n }\n }\n None => fail!(\"no parent and weren't at reflow root?!\"),\n }\n }\n}\n\nfn assign_widths(unsafe_flow: PaddedUnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>) {\n let layout_context: &mut LayoutContext = unsafe {\n cast::transmute(*proxy.user_data())\n };\n let mut assign_widths_traversal = AssignWidthsTraversal {\n layout_context: layout_context,\n };\n assign_widths_traversal.run_parallel(unsafe_flow.to_flow(), proxy)\n}\n\nfn assign_heights_and_store_overflow(unsafe_flow: PaddedUnsafeFlow,\n proxy: &mut WorkerProxy<*mut LayoutContext,PaddedUnsafeFlow>) {\n let layout_context: &mut LayoutContext = unsafe {\n cast::transmute(*proxy.user_data())\n };\n let mut assign_heights_traversal = AssignHeightsAndStoreOverflowTraversal {\n layout_context: layout_context,\n };\n assign_heights_traversal.run_parallel(unsafe_flow.to_flow(), proxy)\n}\n\npub fn recalc_style_for_subtree(root_node: &LayoutNode,\n layout_context: &mut LayoutContext,\n queue: &mut WorkQueue<*mut LayoutContext,UnsafeLayoutNode>) {\n unsafe {\n queue.data = cast::transmute(layout_context)\n }\n\n \/\/ Enqueue the root node.\n queue.push(WorkUnit {\n fun: recalc_style_for_node,\n data: layout_node_to_unsafe_layout_node(root_node),\n });\n\n queue.run();\n\n queue.data = ptr::mut_null()\n}\n\npub fn traverse_flow_tree_preorder(root: &mut ~Flow,\n profiler_chan: ProfilerChan,\n layout_context: &mut LayoutContext,\n queue: &mut WorkQueue<*mut LayoutContext,PaddedUnsafeFlow>) {\n unsafe {\n queue.data = cast::transmute(layout_context)\n }\n\n profile(time::LayoutParallelWarmupCategory, profiler_chan, || {\n queue.push(WorkUnit {\n fun: assign_widths,\n data: UnsafeFlowConversions::from_flow(&mut_owned_flow_to_unsafe_flow(root)),\n })\n });\n\n queue.run();\n\n queue.data = ptr::mut_null()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>unsafe mutability of static variables<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implements operations suitable on Option type. It can represents emptiness.<commit_after>\/\/ Copyright 2015 Pierre Talbot (IRCAM)\n\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse ncollections::ops::*;\n\nimpl<T> Cardinality for Option<T>\n{\n type Size = usize;\n fn size(&self) -> usize {\n self.is_some() as usize\n }\n}\n\nimpl<T> Singleton<T> for Option<T> {\n fn singleton(value: T) -> Option<T> {\n Some(value)\n }\n}\n\nimpl<T> Empty for Option<T> {\n fn empty() -> Option<T> {\n None\n }\n}\n\nimpl<T: Bounded> Bounded for Option<T> {\n type Bound = T::Bound;\n fn lower(&self) -> T::Bound {\n debug_assert!(self.is_some(), \"Cannot access lower bound on empty `Option` type.\");\n self.as_ref().unwrap().lower()\n }\n fn upper(&self) -> T::Bound {\n debug_assert!(self.is_some(), \"Cannot access upper bound on empty `Option` type.\");\n self.as_ref().unwrap().upper()\n }\n}\n\nimpl<T: PartialEq+Clone> Intersection for Option<T> {\n type Output = Option<T>;\n fn intersection(&self, other: &Option<T>) -> Option<T> {\n if self.is_empty() || other.is_empty() || self != other {\n None\n }\n else {\n self.clone()\n }\n }\n}\n\nimpl<T: PartialEq+Clone> Difference for Option<T> {\n type Output = Option<T>;\n fn difference(&self, other: &Option<T>) -> Option<T> {\n if self.is_empty() || self == other {\n None\n }\n else {\n self.clone()\n }\n }\n}\n\nimpl<T, U> Disjoint<Option<U>> for Option<T> where\n T: Disjoint<U>\n{\n fn is_disjoint(&self, other: &Option<U>) -> bool {\n self.is_empty() || other.is_empty() ||\n self.as_ref().unwrap().is_disjoint(other.as_ref().unwrap())\n }\n}\n\nimpl<U, T> Contains<U> for Option<T> where\n T: Contains<U>\n{\n fn contains(&self, value: &U) -> bool {\n self.as_ref().map_or(false, |x| x.contains(value))\n }\n}\n\nimpl<T, U> Subset<Option<U>> for Option<T> where\n T: Subset<U>\n{\n fn is_subset(&self, other: &Option<U>) -> bool {\n if self.is_empty() { true }\n else if other.is_empty() { false }\n else {\n self.as_ref().unwrap().is_subset(other.as_ref().unwrap())\n }\n }\n}\n\nimpl<T> ProperSubset for Option<T> where\n T: Subset + PartialEq\n{\n fn is_proper_subset(&self, other: &Option<T>) -> bool {\n self.is_subset(other) && self != other\n }\n}\n\nimpl<T> Overlap for Option<T> where\n T: Overlap\n{\n fn overlap(&self, other: &Option<T>) -> bool {\n if self.is_empty() || other.is_empty() { false }\n else {\n self.as_ref().unwrap().overlap(other.as_ref().unwrap())\n }\n }\n}\n\nfn shrink_if<T, F>(value: &Option<T>, bound: T, cond: F) -> Option<T> where\n T: Ord+Clone,\n F: FnOnce(&T, &T) -> bool\n{\n match value {\n &Some(ref x) if cond(x, &bound) => Some(x.clone()),\n _ => None\n }\n}\n\nimpl<T> ShrinkLeft<T> for Option<T> where\n T: Ord+Clone\n{\n fn shrink_left(&self, lb: T) -> Self {\n shrink_if(self, lb, |x, lb| x >= lb)\n }\n}\n\nimpl<T> ShrinkRight<T> for Option<T> where\n T: Ord+Clone\n{\n fn shrink_right(&self, ub: T) -> Self {\n shrink_if(self, ub, |x, ub| x <= ub)\n }\n}\n\nimpl<T> StrictShrinkLeft<T> for Option<T> where\n T: Ord+Clone\n{\n fn strict_shrink_left(&self, lb: T) -> Self {\n shrink_if(self, lb, |x, lb| x > lb)\n }\n}\n\nimpl<T> StrictShrinkRight<T> for Option<T> where\n T: Ord+Clone\n{\n fn strict_shrink_right(&self, ub: T) -> Self {\n shrink_if(self, ub, |x, ub| x < ub)\n }\n}\n\n#[allow(non_upper_case_globals)]\n#[cfg(test)]\nmod tests {\n use ncollections::ops::*;\n\n const empty: Option<i32> = None;\n const zero: Option<i32> = Some(0);\n const ten: Option<i32> = Some(10);\n\n #[test]\n fn cardinality_test() {\n assert_eq!(empty.size(), 0);\n assert_eq!(zero.size(), 1);\n assert_eq!(ten.size(), 1);\n assert!(empty.is_empty());\n assert!(!empty.is_singleton());\n assert!(!zero.is_empty());\n assert!(zero.is_singleton());\n }\n\n #[test]\n fn constructors_test() {\n assert_eq!(empty, Empty::empty());\n assert_eq!(zero, Singleton::singleton(0));\n }\n\n #[test]\n fn bound_test() {\n assert_eq!(zero.lower(), 0);\n assert_eq!(zero.upper(), 0);\n assert_eq!(ten.lower(), 10);\n assert_eq!(ten.upper(), 10);\n }\n\n #[test]\n #[should_panic]\n fn bound_upper_panic_test() {\n empty.upper();\n }\n\n #[test]\n #[should_panic]\n fn bound_lower_panic_test() {\n empty.lower();\n }\n\n #[test]\n fn intersection_test() {\n let sym_cases = vec![\n (empty, empty, empty),\n (empty, zero, empty),\n (zero, zero, zero),\n (zero, ten, empty),\n (ten, ten, ten)\n ];\n\n for (x,y,r) in sym_cases.into_iter() {\n assert!(x.intersection(&y) == r, \"{:?} intersection {:?} is not equal to {:?}\", x, y, r);\n assert!(y.intersection(&x) == r, \"{:?} intersection {:?} is not equal to {:?}\", y, x, r);\n }\n }\n\n #[test]\n fn difference_test() {\n let cases = vec![\n (empty, empty, empty, empty),\n (empty, zero, empty, zero),\n (zero, zero, empty, empty),\n (zero, ten, zero, ten),\n (ten, ten, empty, empty)\n ];\n\n for (x,y,r1,r2) in cases.into_iter() {\n assert!(x.difference(&y) == r1, \"{:?} difference {:?} is not equal to {:?}\", x, y, r1);\n assert!(y.difference(&x) == r2, \"{:?} difference {:?} is not equal to {:?}\", y, x, r2);\n }\n }\n\n #[test]\n fn is_disjoint_test() {\n let sym_cases = vec![\n (empty, empty, true),\n (empty, zero, true),\n (zero, zero, false),\n (zero, ten, true),\n (ten, ten, false)\n ];\n\n for (x,y,r) in sym_cases.into_iter() {\n assert!(x.is_disjoint(&y) == r, \"{:?} disjoint {:?} is not equal to {:?}\", x, y, r);\n assert!(y.is_disjoint(&x) == r, \"{:?} disjoint {:?} is not equal to {:?}\", y, x, r);\n }\n }\n\n #[test]\n fn contains_test() {\n let cases = vec![\n (empty, 0, false),\n (empty, 1, false),\n (zero, 0, true),\n (zero, 1, false),\n (ten, 9, false),\n (ten, 10, true)\n ];\n\n for (x,y,r) in cases.into_iter() {\n assert!(x.contains(&y) == r, \"{:?} contains {:?} is not equal to {:?}\", x, y, r);\n }\n }\n\n #[test]\n fn subset_test() {\n let cases = vec![\n (empty, empty, true, true),\n (empty, zero, true, false),\n (zero, zero, true, true),\n (zero, ten, false, false),\n (ten, ten, true, true)\n ];\n\n for (x,y,r1,r2) in cases.into_iter() {\n assert!(x.is_subset(&y) == r1, \"{:?} subset {:?} is not equal to {:?}\", x, y, r1);\n assert!(y.is_subset(&x) == r2, \"{:?} subset {:?} is not equal to {:?}\", y, x, r2);\n }\n }\n\n #[test]\n fn proper_subset_test() {\n let cases = vec![\n (empty, empty, false, false),\n (empty, zero, true, false),\n (zero, zero, false, false),\n (zero, ten, false, false),\n (ten, ten, false, false)\n ];\n\n for (x,y,r1,r2) in cases.into_iter() {\n assert!(x.is_proper_subset(&y) == r1, \"{:?} proper_subset {:?} is not equal to {:?}\", x, y, r1);\n assert!(y.is_proper_subset(&x) == r2, \"{:?} proper_subset {:?} is not equal to {:?}\", y, x, r2);\n }\n }\n\n #[test]\n fn overlap_test() {\n let sym_cases = vec![\n (empty, empty, false),\n (empty, zero, false),\n (zero, zero, true),\n (zero, ten, false),\n (ten, ten, true)\n ];\n\n for (x,y,r) in sym_cases.into_iter() {\n assert!(x.overlap(&y) == r, \"{:?} overlap {:?} is not equal to {:?}\", x, y, r);\n assert!(y.overlap(&x) == r, \"{:?} overlap {:?} is not equal to {:?}\", y, x, r);\n }\n }\n\n #[test]\n fn shrink_tests() {\n \/\/ First two elements are data. The next are resp. for shrink_left, shrink_right,\n \/\/ strict_shrink_left and strict_shrink_right.\n let cases = vec![\n (empty, 0, empty, empty, empty, empty),\n (empty, 1, empty, empty, empty, empty),\n (zero, 0, zero, zero, empty, empty),\n (zero, 1, empty, zero, empty, zero),\n (ten, 9, ten, empty, ten, empty),\n (ten, 10, ten, ten, empty, empty),\n (ten, 11, empty, ten, empty, ten),\n ];\n\n for (x,y,r1,r2,r3,r4) in cases.into_iter() {\n assert!(x.shrink_left(y) == r1, \"{:?} shrink_left {:?} is not equal to {:?}\", x, y, r1);\n assert!(x.shrink_right(y) == r2, \"{:?} shrink_right {:?} is not equal to {:?}\", x, y, r2);\n assert!(x.strict_shrink_left(y) == r3, \"{:?} strict_shrink_left {:?} is not equal to {:?}\", x, y, r3);\n assert!(x.strict_shrink_right(y) == r4, \"{:?} strict_shrink_right {:?} is not equal to {:?}\", x, y, r4);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for try operator on Result<commit_after>\/\/ run-pass\n\n#![feature(try_trait_v2)]\n#![feature(const_trait_impl)]\n#![feature(const_try)]\n#![feature(const_convert)]\n\nfn main() {\n const fn foo() -> Result<bool, ()> {\n Err(())?;\n Ok(true)\n }\n\n const FOO: Result<bool, ()> = foo();\n assert_eq!(Err(()), FOO);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rustdoc: Add a test for fixed issue #16019<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nmacro_rules! define_struct {\n ($rounds:expr) => (\n struct Struct {\n sk: [u32; $rounds + 1]\n }\n )\n}\n\ndefine_struct!(2);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reduce database lookup upon insertion<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Whoops (add java.rs)<commit_after>use std::io::{Write, Result};\nuse {Function, Type, TypeKind};\n\nfn to_camel_case(snake_case: &str) -> String {\n let mut camel_case = String::new();\n\n for (u, split) in snake_case.split('_').enumerate() {\n for (i, c) in split.char_indices() {\n if u != 0 && i == 0 {\n camel_case.extend(c.to_uppercase());\n } else {\n camel_case.push(c);\n }\n }\n }\n\n camel_case\n}\n\nfn get_type(ty: &Type) -> &str {\n match (ty.kind, ty.name.as_str()) {\n (TypeKind::Ref, \"c_char\") => \"String\",\n (TypeKind::Ref, _) |\n (TypeKind::RefMut, _) => \"Pointer\",\n (_, t) if !ty.is_custom => {\n match t {\n \"i8\" => \"byte\",\n \"i16\" => \"short\",\n \"i32\" => \"int\",\n \"i64\" => \"long\",\n \"u8\" => \"byte\",\n \"u16\" => \"short\",\n \"u32\" => \"int\",\n \"u64\" => \"long\",\n \"usize\" => \"NativeLong\", \/\/ Not really correct\n \"f32\" => \"float\",\n \"f64\" => \"double\",\n \"bool\" => \"byte\",\n \"()\" => \"void\",\n \"c_char\" => \"byte\",\n x => x,\n }\n }\n _ => \"Pointer\",\n }\n}\n\npub fn write<W: Write>(mut writer: W, functions: &[Function]) -> Result<()> {\n let mut prefix = String::from(\"\");\n\n write!(writer,\n \"{}\",\n r#\"package livesplitcore;\n\nimport com.sun.jna.*;\n\npublic interface LiveSplitCore extends Library {\n LiveSplitCore INSTANCE = (LiveSplitCore) Native.loadLibrary(\"livesplit-core\", LiveSplitCore.class);\n\"#)?;\n\n for function in functions {\n let name = function.name.to_string();\n let mut splits = name.splitn(2, '_');\n let new_prefix = splits.next().unwrap();\n if !prefix.is_empty() && new_prefix != prefix {\n writeln!(writer, \"\")?;\n }\n prefix = new_prefix.to_string();\n\n write!(writer,\n r#\"\n {} {}(\"#,\n get_type(&function.output),\n &function.name)?;\n\n for (i, &(ref name, ref typ)) in function.inputs.iter().enumerate() {\n if i != 0 {\n write!(writer, \", \")?;\n }\n write!(writer,\n \"{} {}\",\n get_type(typ),\n if name == \"this\" {\n String::from(\"self\")\n } else {\n to_camel_case(name)\n })?;\n }\n\n write!(writer, \");\")?;\n }\n\n writeln!(writer,\n \"{}\",\n r#\"\n}\"#)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>send reset command to start the game environment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>refactor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>style refactors from code review<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Started part B for day 9 [WIP]<commit_after>use regex::Regex;\n\npub fn solve(data: &str) {\n\n\n println!(\"{}\", \"------------- [ PART B ] -------------\");\n println!(\"NOOT NOOT! its da sound of da pingu 🐧\");\n println!(\"{}\", \"--------------------------------------\");\n}\n\npub struct Node {\n content_length: usize,\n multiplier: usize,\n nodes: Vec<Node>\n}\n\nimpl Node {\n pub fn len(&self) -> usize {\n self.multiplier * (self.content_length + self.nodes.iter().fold(0, {|v, n| v + n.len() }))\n }\n}\n\n#[cfg(test)]\nmod node_tests {\n use part_b::Node;\n\n #[test]\n fn test_solve_without_subnodes() {\n let subject = Node { content_length: 10, multiplier: 3, nodes: Vec::new() };\n assert_eq!(subject.len(), 30);\n }\n\n #[test]\n fn test_solve_with_one_level_subnodes() {\n \/\/ example: (22x11)(3x5)ICQ(9x5)IYUPTHPKX\n\n let subnodes = vec![\n Node { content_length: 3, multiplier: 5, nodes: Vec::new() },\n Node { content_length: 9, multiplier: 5, nodes: Vec::new() },\n ];\n\n let subject = Node {\n content_length: 0,\n multiplier: 22,\n nodes: subnodes\n };\n\n assert_eq!(subject.len(), 1320);\n }\n\n fn test_solve_with_multi_level_subnodes() {\n \/\/ example: (answer should be 5220)\n \/\/ (127x2) => (1098 + 140 + 1372) * 2 = 5220\n \/\/ (41x6) => (144 + 39) * 6 = 1098\n \/\/ (16x9)SIUZCKMFZFXKUYTQ => 144\n \/\/ (13x3)YBCVHJPPFAONV => 39\n \/\/ (10x14)BTRWBQRUHA => 10 * 14 = 140\n \/\/ (57x4) => (180 + 55 + 72 + 36) * 4 = 1372\n \/\/ (12x15)ZUMPYOEOOBFW => 180\n \/\/ (5x11)YNLIJ => 55\n \/\/ (8x9)GBQFPTOH => 72\n \/\/ (9x3)GPFCSAPZD => 36\n\n let subject = Node {\n content_length: 0,\n multiplier: 2,\n nodes: vec![\n Node {\n content_length: 0,\n multiplier: 6,\n nodes: vec![\n Node { content_length: 16, multiplier: 9, nodes: Vec::new() },\n Node { content_length: 13, multiplier: 3, nodes: Vec::new() },\n ]\n },\n\n Node {\n content_length: 10,\n multiplier: 14,\n nodes: Vec::new()\n },\n\n Node {\n content_length: 0,\n multiplier: 4,\n nodes: vec![\n Node { content_length: 12, multiplier: 15, nodes: Vec::new() },\n Node { content_length: 5, multiplier: 11, nodes: Vec::new() },\n Node { content_length: 8, multiplier: 9, nodes: Vec::new() },\n Node { content_length: 9, multiplier: 3, nodes: Vec::new() },\n ]\n },\n ]\n };\n\n assert_eq!(subject.len(), 5220);\n\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>refactor(scanner): use shorthand for property assign<commit_after><|endoftext|>"} {"text":"<commit_before>use std::fmt;\nuse std::str::FromStr;\nuse header::{Header, HeaderFormat};\nuse super::util::{from_one_comma_delimited, fmt_comma_delimited};\n\n\/\/\/ The Cache-Control header.\n#[deriving(PartialEq, Clone, Show)]\npub struct CacheControl(pub Vec<CacheDirective>);\n\nderef!(CacheControl -> Vec<CacheDirective>);\n\nimpl Header for CacheControl {\n fn header_name(_: Option<CacheControl>) -> &'static str {\n \"Cache-Control\"\n }\n\n fn parse_header(raw: &[Vec<u8>]) -> Option<CacheControl> {\n let directives = raw.iter()\n .filter_map(|line| from_one_comma_delimited(line[]))\n .collect::<Vec<Vec<CacheDirective>>>()\n .concat_vec();\n if directives.len() > 0 {\n Some(CacheControl(directives))\n } else {\n None\n }\n }\n}\n\nimpl HeaderFormat for CacheControl {\n fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt_comma_delimited(fmt, self[])\n }\n}\n\n\/\/\/ CacheControl contains a list of these directives.\n#[deriving(PartialEq, Clone)]\npub enum CacheDirective {\n \/\/\/ \"no-cache\"\n NoCache,\n \/\/\/ \"no-store\"\n NoStore,\n \/\/\/ \"no-transform\"\n NoTransform,\n \/\/\/ \"only-if-cached\"\n OnlyIfCached,\n\n \/\/ request directives\n \/\/\/ \"max-age=delta\"\n MaxAge(uint),\n \/\/\/ \"max-stale=delta\"\n MaxStale(uint),\n \/\/\/ \"min-fresh=delta\"\n MinFresh(uint),\n\n \/\/ response directives\n \/\/\/ \"must-revalidate\"\n MustRevalidate,\n \/\/\/ \"public\"\n Public,\n \/\/\/ \"private\"\n Private,\n \/\/\/ \"proxy-revalidate\"\n ProxyRevalidate,\n \/\/\/ \"s-maxage=delta\"\n SMaxAge(uint),\n\n \/\/\/ Extension directives. Optionally include an argument.\n Extension(String, Option<String>)\n}\n\nimpl fmt::Show for CacheDirective {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n use self::CacheDirective::*;\n match *self {\n NoCache => \"no-cache\",\n NoStore => \"no-store\",\n NoTransform => \"no-transform\",\n OnlyIfCached => \"only-if-cached\",\n\n MaxAge(secs) => return write!(f, \"max-age={}\", secs),\n MaxStale(secs) => return write!(f, \"max-stale={}\", secs),\n MinFresh(secs) => return write!(f, \"min-fresh={}\", secs),\n\n MustRevalidate => \"must-revalidate\",\n Public => \"public\",\n Private => \"private\",\n ProxyRevalidate => \"proxy-revalidate\",\n SMaxAge(secs) => return write!(f, \"s-maxage={}\", secs),\n\n Extension(ref name, None) => name[],\n Extension(ref name, Some(ref arg)) => return write!(f, \"{}={}\", name, arg),\n\n }.fmt(f)\n }\n}\n\nimpl FromStr for CacheDirective {\n fn from_str(s: &str) -> Option<CacheDirective> {\n use self::CacheDirective::*;\n match s {\n \"no-cache\" => Some(NoCache),\n \"no-store\" => Some(NoStore),\n \"no-transform\" => Some(NoTransform),\n \"only-if-cached\" => Some(OnlyIfCached),\n \"must-revalidate\" => Some(MustRevalidate),\n \"public\" => Some(Public),\n \"private\" => Some(Private),\n \"proxy-revalidate\" => Some(ProxyRevalidate),\n \"\" => None,\n _ => match s.find('=') {\n Some(idx) if idx+1 < s.len() => match (s[..idx], s[idx+1..].trim_chars('\"')) {\n (\"max-age\" , secs) => secs.parse().map(MaxAge),\n (\"max-stale\", secs) => secs.parse().map(MaxStale),\n (\"min-fresh\", secs) => secs.parse().map(MinFresh),\n (\"s-maxage\", secs) => secs.parse().map(SMaxAge),\n (left, right) => Some(Extension(left.to_string(), Some(right.to_string())))\n },\n Some(_) => None,\n None => Some(Extension(s.to_string(), None))\n }\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use header::Header;\n use super::*;\n\n #[test]\n fn test_parse_multiple_headers() {\n let cache = Header::parse_header(&[b\"no-cache\".to_vec(), b\"private\".to_vec()]);\n assert_eq!(cache, Some(CacheControl(vec![CacheDirective::NoCache,\n CacheDirective::Private])))\n }\n\n #[test]\n fn test_parse_argument() {\n let cache = Header::parse_header(&[b\"max-age=100, private\".to_vec()]);\n assert_eq!(cache, Some(CacheControl(vec![CacheDirective::MaxAge(100),\n CacheDirective::Private])))\n }\n\n #[test]\n fn test_parse_quote_form() {\n let cache = Header::parse_header(&[b\"max-age=\\\"200\\\"\".to_vec()]);\n assert_eq!(cache, Some(CacheControl(vec![CacheDirective::MaxAge(200)])))\n }\n\n #[test]\n fn test_parse_extension() {\n let cache = Header::parse_header(&[b\"foo, bar=baz\".to_vec()]);\n assert_eq!(cache, Some(CacheControl(vec![CacheDirective::Extension(\"foo\".to_string(), None),\n CacheDirective::Extension(\"bar\".to_string(), Some(\"baz\".to_string()))])))\n }\n\n #[test]\n fn test_parse_bad_syntax() {\n let cache: Option<CacheControl> = Header::parse_header(&[b\"foo=\".to_vec()]);\n assert_eq!(cache, None)\n }\n}\n\nbench_header!(normal, CacheControl, { vec![b\"no-cache, private\".to_vec(), b\"max-age=100\".to_vec()] });\n\n<commit_msg>Replaced deprecated reference to trim_chars with trim_matches<commit_after>use std::fmt;\nuse std::str::FromStr;\nuse header::{Header, HeaderFormat};\nuse super::util::{from_one_comma_delimited, fmt_comma_delimited};\n\n\/\/\/ The Cache-Control header.\n#[deriving(PartialEq, Clone, Show)]\npub struct CacheControl(pub Vec<CacheDirective>);\n\nderef!(CacheControl -> Vec<CacheDirective>);\n\nimpl Header for CacheControl {\n fn header_name(_: Option<CacheControl>) -> &'static str {\n \"Cache-Control\"\n }\n\n fn parse_header(raw: &[Vec<u8>]) -> Option<CacheControl> {\n let directives = raw.iter()\n .filter_map(|line| from_one_comma_delimited(line[]))\n .collect::<Vec<Vec<CacheDirective>>>()\n .concat_vec();\n if directives.len() > 0 {\n Some(CacheControl(directives))\n } else {\n None\n }\n }\n}\n\nimpl HeaderFormat for CacheControl {\n fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt_comma_delimited(fmt, self[])\n }\n}\n\n\/\/\/ CacheControl contains a list of these directives.\n#[deriving(PartialEq, Clone)]\npub enum CacheDirective {\n \/\/\/ \"no-cache\"\n NoCache,\n \/\/\/ \"no-store\"\n NoStore,\n \/\/\/ \"no-transform\"\n NoTransform,\n \/\/\/ \"only-if-cached\"\n OnlyIfCached,\n\n \/\/ request directives\n \/\/\/ \"max-age=delta\"\n MaxAge(uint),\n \/\/\/ \"max-stale=delta\"\n MaxStale(uint),\n \/\/\/ \"min-fresh=delta\"\n MinFresh(uint),\n\n \/\/ response directives\n \/\/\/ \"must-revalidate\"\n MustRevalidate,\n \/\/\/ \"public\"\n Public,\n \/\/\/ \"private\"\n Private,\n \/\/\/ \"proxy-revalidate\"\n ProxyRevalidate,\n \/\/\/ \"s-maxage=delta\"\n SMaxAge(uint),\n\n \/\/\/ Extension directives. Optionally include an argument.\n Extension(String, Option<String>)\n}\n\nimpl fmt::Show for CacheDirective {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n use self::CacheDirective::*;\n match *self {\n NoCache => \"no-cache\",\n NoStore => \"no-store\",\n NoTransform => \"no-transform\",\n OnlyIfCached => \"only-if-cached\",\n\n MaxAge(secs) => return write!(f, \"max-age={}\", secs),\n MaxStale(secs) => return write!(f, \"max-stale={}\", secs),\n MinFresh(secs) => return write!(f, \"min-fresh={}\", secs),\n\n MustRevalidate => \"must-revalidate\",\n Public => \"public\",\n Private => \"private\",\n ProxyRevalidate => \"proxy-revalidate\",\n SMaxAge(secs) => return write!(f, \"s-maxage={}\", secs),\n\n Extension(ref name, None) => name[],\n Extension(ref name, Some(ref arg)) => return write!(f, \"{}={}\", name, arg),\n\n }.fmt(f)\n }\n}\n\nimpl FromStr for CacheDirective {\n fn from_str(s: &str) -> Option<CacheDirective> {\n use self::CacheDirective::*;\n match s {\n \"no-cache\" => Some(NoCache),\n \"no-store\" => Some(NoStore),\n \"no-transform\" => Some(NoTransform),\n \"only-if-cached\" => Some(OnlyIfCached),\n \"must-revalidate\" => Some(MustRevalidate),\n \"public\" => Some(Public),\n \"private\" => Some(Private),\n \"proxy-revalidate\" => Some(ProxyRevalidate),\n \"\" => None,\n _ => match s.find('=') {\n Some(idx) if idx+1 < s.len() => match (s[..idx], s[idx+1..].trim_matches('\"')) {\n (\"max-age\" , secs) => secs.parse().map(MaxAge),\n (\"max-stale\", secs) => secs.parse().map(MaxStale),\n (\"min-fresh\", secs) => secs.parse().map(MinFresh),\n (\"s-maxage\", secs) => secs.parse().map(SMaxAge),\n (left, right) => Some(Extension(left.to_string(), Some(right.to_string())))\n },\n Some(_) => None,\n None => Some(Extension(s.to_string(), None))\n }\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use header::Header;\n use super::*;\n\n #[test]\n fn test_parse_multiple_headers() {\n let cache = Header::parse_header(&[b\"no-cache\".to_vec(), b\"private\".to_vec()]);\n assert_eq!(cache, Some(CacheControl(vec![CacheDirective::NoCache,\n CacheDirective::Private])))\n }\n\n #[test]\n fn test_parse_argument() {\n let cache = Header::parse_header(&[b\"max-age=100, private\".to_vec()]);\n assert_eq!(cache, Some(CacheControl(vec![CacheDirective::MaxAge(100),\n CacheDirective::Private])))\n }\n\n #[test]\n fn test_parse_quote_form() {\n let cache = Header::parse_header(&[b\"max-age=\\\"200\\\"\".to_vec()]);\n assert_eq!(cache, Some(CacheControl(vec![CacheDirective::MaxAge(200)])))\n }\n\n #[test]\n fn test_parse_extension() {\n let cache = Header::parse_header(&[b\"foo, bar=baz\".to_vec()]);\n assert_eq!(cache, Some(CacheControl(vec![CacheDirective::Extension(\"foo\".to_string(), None),\n CacheDirective::Extension(\"bar\".to_string(), Some(\"baz\".to_string()))])))\n }\n\n #[test]\n fn test_parse_bad_syntax() {\n let cache: Option<CacheControl> = Header::parse_header(&[b\"foo=\".to_vec()]);\n assert_eq!(cache, None)\n }\n}\n\nbench_header!(normal, CacheControl, { vec![b\"no-cache, private\".to_vec(), b\"max-age=100\".to_vec()] });\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a non-regression test for issue #8372<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ The `for` loop use to keep a mutable borrow when executing its body,\n\/\/ making it impossible to re-use the iterator as follows.\n\/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/8372\n\/\/\n\/\/ This was fixed in https:\/\/github.com\/rust-lang\/rust\/pull\/15809\n\npub fn main() {\n let mut for_loop_values = Vec::new();\n let mut explicit_next_call_values = Vec::new();\n\n let mut iter = range(1i, 10);\n for i in iter {\n for_loop_values.push(i);\n explicit_next_call_values.push(iter.next());\n }\n\n assert_eq!(for_loop_values, vec![1, 3, 5, 7, 9]);\n assert_eq!(explicit_next_call_values, vec![Some(2), Some(4), Some(6), Some(8), None]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #1994 - RalfJung:unaligned-ptr-test, r=RalfJung<commit_after>\/\/ This should fail even without validation or Stacked Borrows.\n\/\/ compile-flags: -Zmiri-disable-validation -Zmiri-disable-stacked-borrows\n\nfn main() {\n \/\/ Make sure we notice when a u16 is loaded at offset 1 into a u8 allocation.\n \/\/ (This would be missed if u8 allocations are *always* at odd addresses.)\n for _ in 0..10 { \/\/ Try many times as this might work by chance.\n let x = [0u8; 4];\n let ptr = x.as_ptr().wrapping_offset(1).cast::<u16>();\n let _val = unsafe { *ptr }; \/\/~ERROR but alignment\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::fmt;\nuse std::str::FromStr;\n\n\/\/\/ The edition of the compiler (RFC 2052)\n#[derive(Clone, Copy, Hash, PartialOrd, Ord, Eq, PartialEq, Debug)]\n#[non_exhaustive]\npub enum Edition {\n \/\/ editions must be kept in order, newest to oldest\n\n \/\/\/ The 2015 edition\n Edition2015,\n \/\/\/ The 2018 edition\n Edition2018,\n\n \/\/ when adding new editions, be sure to update:\n \/\/\n \/\/ - Update the `ALL_EDITIONS` const\n \/\/ - Update the EDITION_NAME_LIST const\n \/\/ - add a `rust_####()` function to the session\n \/\/ - update the enum in Cargo's sources as well\n}\n\n\/\/ must be in order from oldest to newest\npub const ALL_EDITIONS: &[Edition] = &[Edition::Edition2015, Edition::Edition2018];\n\npub const EDITION_NAME_LIST: &'static str = \"2015|2018\";\n\npub const DEFAULT_EDITION: Edition = Edition::Edition2015;\n\nimpl fmt::Display for Edition {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let s = match *self {\n Edition::Edition2015 => \"2015\",\n Edition::Edition2018 => \"2018\",\n };\n write!(f, \"{}\", s)\n }\n}\n\nimpl Edition {\n pub fn lint_name(&self) -> &'static str {\n match *self {\n Edition::Edition2015 => \"edition_2015\",\n Edition::Edition2018 => \"edition_2018\",\n }\n }\n\n pub fn feature_name(&self) -> &'static str {\n match *self {\n Edition::Edition2015 => \"rust_2015_preview\",\n Edition::Edition2018 => \"rust_2018_preview\",\n }\n }\n\n pub fn is_stable(&self) -> bool {\n match *self {\n Edition::Edition2015 => true,\n Edition::Edition2018 => false,\n }\n }\n}\n\nimpl FromStr for Edition {\n type Err = ();\n fn from_str(s: &str) -> Result<Self, ()> {\n match s {\n \"2015\" => Ok(Edition::Edition2015),\n \"2018\" => Ok(Edition::Edition2018),\n _ => Err(())\n }\n }\n}\n<commit_msg>Rename breakage lints<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::fmt;\nuse std::str::FromStr;\n\n\/\/\/ The edition of the compiler (RFC 2052)\n#[derive(Clone, Copy, Hash, PartialOrd, Ord, Eq, PartialEq, Debug)]\n#[non_exhaustive]\npub enum Edition {\n \/\/ editions must be kept in order, newest to oldest\n\n \/\/\/ The 2015 edition\n Edition2015,\n \/\/\/ The 2018 edition\n Edition2018,\n\n \/\/ when adding new editions, be sure to update:\n \/\/\n \/\/ - Update the `ALL_EDITIONS` const\n \/\/ - Update the EDITION_NAME_LIST const\n \/\/ - add a `rust_####()` function to the session\n \/\/ - update the enum in Cargo's sources as well\n}\n\n\/\/ must be in order from oldest to newest\npub const ALL_EDITIONS: &[Edition] = &[Edition::Edition2015, Edition::Edition2018];\n\npub const EDITION_NAME_LIST: &'static str = \"2015|2018\";\n\npub const DEFAULT_EDITION: Edition = Edition::Edition2015;\n\nimpl fmt::Display for Edition {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let s = match *self {\n Edition::Edition2015 => \"2015\",\n Edition::Edition2018 => \"2018\",\n };\n write!(f, \"{}\", s)\n }\n}\n\nimpl Edition {\n pub fn lint_name(&self) -> &'static str {\n match *self {\n Edition::Edition2015 => \"rust_2015_breakage\",\n Edition::Edition2018 => \"rust_2018_breakage\",\n }\n }\n\n pub fn feature_name(&self) -> &'static str {\n match *self {\n Edition::Edition2015 => \"rust_2015_preview\",\n Edition::Edition2018 => \"rust_2018_preview\",\n }\n }\n\n pub fn is_stable(&self) -> bool {\n match *self {\n Edition::Edition2015 => true,\n Edition::Edition2018 => false,\n }\n }\n}\n\nimpl FromStr for Edition {\n type Err = ();\n fn from_str(s: &str) -> Result<Self, ()> {\n match s {\n \"2015\" => Ok(Edition::Edition2015),\n \"2018\" => Ok(Edition::Edition2018),\n _ => Err(())\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate futures;\nextern crate env_logger;\nextern crate futures_mio;\nextern crate futures_tls;\n\n#[macro_use]\nextern crate cfg_if;\n\nuse std::io::Error;\nuse std::net::ToSocketAddrs;\n\nuse futures::Future;\nuse futures_tls::ClientContext;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\ncfg_if! {\n if #[cfg(any(feature = \"force-openssl\",\n all(not(target_os = \"macos\"),\n not(target_os = \"windows\"))))] {\n extern crate openssl;\n\n fn get(err: &Error) -> &openssl::error::ErrorStack {\n let err = err.get_ref().unwrap();\n match *err.downcast_ref::<ossl::error::Error>().unwrap() {\n ossl::Error::Ssl(ref v) => v,\n ref e => panic!(\"not an ssl eror: {:?}\", e),\n }\n }\n\n fn verify_failed(err: &Error) {\n assert!(get(err).errors().iter().any(|e| {\n e.reason() == \"certificate verify failed\"\n }), \"bad errors: {:?}\", err);\n }\n\n use verify_failed as assert_expired_error;\n use verify_failed as assert_wrong_host;\n use verify_failed as assert_self_signed;\n use verify_failed as assert_untrusted_root;\n } else if #[cfg(target_os = \"macos\")] {\n extern crate security_framework;\n\n use security_framework::base::Error as SfError;\n\n fn assert_invalid_cert_chain(err: &Error) {\n let err = err.get_ref().unwrap();\n let err = err.downcast_ref::<SfError>().unwrap();\n assert_eq!(err.message().unwrap(), \"invalid certificate chain\");\n }\n\n use assert_invalid_cert_chain as assert_expired_error;\n use assert_invalid_cert_chain as assert_wrong_host;\n use assert_invalid_cert_chain as assert_self_signed;\n use assert_invalid_cert_chain as assert_untrusted_root;\n } else {\n extern crate winapi;\n\n fn assert_expired_error(err: &Error) {\n let code = err.raw_os_error().unwrap();\n assert_eq!(code as usize, winapi::CERT_E_EXPIRED as usize);\n }\n\n fn assert_wrong_host(err: &Error) {\n let code = err.raw_os_error().unwrap();\n assert_eq!(code as usize, winapi::CERT_E_CN_NO_MATCH as usize);\n }\n\n fn assert_self_signed(err: &Error) {\n let code = err.raw_os_error().unwrap();\n assert_eq!(code as usize, winapi::CERT_E_UNTRUSTEDROOT as usize);\n }\n\n fn assert_untrusted_root(err: &Error) {\n let code = err.raw_os_error().unwrap();\n assert_eq!(code as usize, winapi::CERT_E_UNTRUSTEDROOT as usize);\n }\n }\n}\n\nfn get_host(host: &'static str) -> Error {\n drop(env_logger::init());\n\n let addr = format!(\"{}:443\", host);\n let addr = t!(addr.to_socket_addrs()).next().unwrap();\n\n let mut l = t!(futures_mio::Loop::new());\n let client = l.handle().tcp_connect(&addr);\n let data = client.and_then(move |socket| {\n t!(ClientContext::new()).handshake(host, socket)\n });\n\n let res = l.run(data);\n assert!(res.is_err());\n res.err().unwrap()\n}\n\n#[test]\nfn expired() {\n assert_expired_error(&get_host(\"expired.badssl.com\"))\n}\n\n\/\/ TODO: the OSX builders on Travis apparently fail this tests spuriously?\n\/\/ passes locally though? Seems... bad!\n#[test]\n#[cfg_attr(all(target_os = \"macos\", feature = \"force-openssl\"), ignore)]\nfn wrong_host() {\n assert_wrong_host(&get_host(\"wrong.host.badssl.com\"))\n}\n\n#[test]\nfn self_signed() {\n assert_self_signed(&get_host(\"self-signed.badssl.com\"))\n}\n\n#[test]\nfn untrusted_root() {\n assert_untrusted_root(&get_host(\"untrusted-root.badssl.com\"))\n}\n<commit_msg>Fix compile against openssl<commit_after>extern crate futures;\nextern crate env_logger;\nextern crate futures_mio;\nextern crate futures_tls;\n\n#[macro_use]\nextern crate cfg_if;\n\nuse std::io::Error;\nuse std::net::ToSocketAddrs;\n\nuse futures::Future;\nuse futures_tls::ClientContext;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\ncfg_if! {\n if #[cfg(any(feature = \"force-openssl\",\n all(not(target_os = \"macos\"),\n not(target_os = \"windows\"))))] {\n extern crate openssl;\n\n use openssl::ssl;\n\n fn get(err: &Error) -> &openssl::error::ErrorStack {\n let err = err.get_ref().unwrap();\n match *err.downcast_ref::<ssl::error::Error>().unwrap() {\n ssl::Error::Ssl(ref v) => v,\n ref e => panic!(\"not an ssl eror: {:?}\", e),\n }\n }\n\n fn verify_failed(err: &Error) {\n assert!(get(err).errors().iter().any(|e| {\n e.reason() == \"certificate verify failed\"\n }), \"bad errors: {:?}\", err);\n }\n\n use verify_failed as assert_expired_error;\n use verify_failed as assert_wrong_host;\n use verify_failed as assert_self_signed;\n use verify_failed as assert_untrusted_root;\n } else if #[cfg(target_os = \"macos\")] {\n extern crate security_framework;\n\n use security_framework::base::Error as SfError;\n\n fn assert_invalid_cert_chain(err: &Error) {\n let err = err.get_ref().unwrap();\n let err = err.downcast_ref::<SfError>().unwrap();\n assert_eq!(err.message().unwrap(), \"invalid certificate chain\");\n }\n\n use assert_invalid_cert_chain as assert_expired_error;\n use assert_invalid_cert_chain as assert_wrong_host;\n use assert_invalid_cert_chain as assert_self_signed;\n use assert_invalid_cert_chain as assert_untrusted_root;\n } else {\n extern crate winapi;\n\n fn assert_expired_error(err: &Error) {\n let code = err.raw_os_error().unwrap();\n assert_eq!(code as usize, winapi::CERT_E_EXPIRED as usize);\n }\n\n fn assert_wrong_host(err: &Error) {\n let code = err.raw_os_error().unwrap();\n assert_eq!(code as usize, winapi::CERT_E_CN_NO_MATCH as usize);\n }\n\n fn assert_self_signed(err: &Error) {\n let code = err.raw_os_error().unwrap();\n assert_eq!(code as usize, winapi::CERT_E_UNTRUSTEDROOT as usize);\n }\n\n fn assert_untrusted_root(err: &Error) {\n let code = err.raw_os_error().unwrap();\n assert_eq!(code as usize, winapi::CERT_E_UNTRUSTEDROOT as usize);\n }\n }\n}\n\nfn get_host(host: &'static str) -> Error {\n drop(env_logger::init());\n\n let addr = format!(\"{}:443\", host);\n let addr = t!(addr.to_socket_addrs()).next().unwrap();\n\n let mut l = t!(futures_mio::Loop::new());\n let client = l.handle().tcp_connect(&addr);\n let data = client.and_then(move |socket| {\n t!(ClientContext::new()).handshake(host, socket)\n });\n\n let res = l.run(data);\n assert!(res.is_err());\n res.err().unwrap()\n}\n\n#[test]\nfn expired() {\n assert_expired_error(&get_host(\"expired.badssl.com\"))\n}\n\n\/\/ TODO: the OSX builders on Travis apparently fail this tests spuriously?\n\/\/ passes locally though? Seems... bad!\n#[test]\n#[cfg_attr(all(target_os = \"macos\", feature = \"force-openssl\"), ignore)]\nfn wrong_host() {\n assert_wrong_host(&get_host(\"wrong.host.badssl.com\"))\n}\n\n#[test]\nfn self_signed() {\n assert_self_signed(&get_host(\"self-signed.badssl.com\"))\n}\n\n#[test]\nfn untrusted_root() {\n assert_untrusted_root(&get_host(\"untrusted-root.badssl.com\"))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adjust HLL precision<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Sketch of App container<commit_after>use callback::AppId;\nuse core::intrinsics::{volatile_load, volatile_store};\nuse core::marker::PhantomData;\nuse core::mem::size_of;\nuse core::raw::Repr;\nuse mem::{AppPtr, Private};\nuse process;\n\npub struct Container<T: Default> {\n container_num: usize,\n ptr: PhantomData<T>\n}\n\npub enum Error {\n NoSuchApp,\n OutOfMemory\n}\n\nstatic mut CONTAINER_COUNTER : usize = 0;\n\npub struct Allocator<'a> {\n app: &'a mut process::Process<'a>,\n app_id: AppId\n}\n\nimpl<'a> Allocator<'a> {\n pub fn alloc<T>(&mut self, data: T) -> Result<AppPtr<Private, T>, Error> {\n unsafe {\n let appid = self.app_id;\n self.app.alloc(size_of::<T>()).map_or(Err(Error::OutOfMemory),\n |arr| {\n Ok(AppPtr::new(arr.repr().data as *mut T, appid))\n })\n }\n }\n}\n\nimpl<T: Default> Container<T> {\n pub unsafe fn create() -> Container<T> {\n let ctr = volatile_load(&CONTAINER_COUNTER);\n volatile_store(&mut CONTAINER_COUNTER, ctr + 1);\n Container {\n container_num: ctr,\n ptr: PhantomData\n }\n }\n\n pub fn enter<F, R>(&self, appid: AppId, fun: F) -> Result<R, Error>\n where F: Fn(&mut AppPtr<Private, T>, &mut Allocator) -> R, R: Copy {\n unsafe {\n match process::PROCS[appid.idx()] {\n Some(ref mut app) => {\n app.container_for(self.container_num).or_else(|| {\n app.alloc(size_of::<T>()).map(|root_arr| {\n root_arr.repr().data as *mut _\n })\n }).map_or(Err(Error::OutOfMemory), move |root_ptr| {\n let mut root = AppPtr::new(root_ptr as *mut _, appid);\n let mut allocator = Allocator { app: app, app_id: appid };\n let res = fun(&mut root, &mut allocator);\n Ok(res)\n })\n },\n None => Err(Error::NoSuchApp)\n }\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>docs: add refcell_view example<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse core::ops::{Deref, DerefMut};\nuse core::{mem, slice};\n\n#[derive(Copy, Clone, Debug, Default)]\n#[repr(packed)]\npub struct Stat {\n pub st_dev: u64,\n pub st_ino: u64,\n pub st_mode: u16,\n pub st_nlink: u32,\n pub st_uid: u32,\n pub st_gid: u32,\n pub st_size: u64,\n pub st_blksize: u32,\n pub st_blocks: u64,\n pub st_mtime: u64,\n pub st_mtime_nsec: u32,\n pub st_atime: u64,\n pub st_atime_nsec: u32,\n pub st_ctime: u64,\n pub st_ctime_nsec: u32,\n}\n\nimpl Deref for Stat {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe {\n slice::from_raw_parts(self as *const Stat as *const u8,\n mem::size_of::<Stat>()) as &[u8]\n }\n }\n}\n\nimpl DerefMut for Stat {\n fn deref_mut(&mut self) -> &mut [u8] {\n unsafe {\n slice::from_raw_parts_mut(self as *mut Stat as *mut u8,\n mem::size_of::<Stat>()) as &mut [u8]\n }\n }\n}\n\n#[derive(Copy, Clone, Debug, Default)]\n#[repr(packed)]\npub struct StatVfs {\n pub f_bsize: u32,\n pub f_blocks: u64,\n pub f_bfree: u64,\n pub f_bavail: u64,\n}\n\nimpl Deref for StatVfs {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe {\n slice::from_raw_parts(self as *const StatVfs as *const u8,\n mem::size_of::<StatVfs>()) as &[u8]\n }\n }\n}\n\nimpl DerefMut for StatVfs {\n fn deref_mut(&mut self) -> &mut [u8] {\n unsafe {\n slice::from_raw_parts_mut(self as *mut StatVfs as *mut u8,\n mem::size_of::<StatVfs>()) as &mut [u8]\n }\n }\n}\n\n#[derive(Copy, Clone, Debug, Default)]\n#[repr(packed)]\npub struct TimeSpec {\n pub tv_sec: i64,\n pub tv_nsec: i32,\n}\n\nimpl Deref for TimeSpec {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe {\n slice::from_raw_parts(self as *const TimeSpec as *const u8,\n mem::size_of::<TimeSpec>()) as &[u8]\n }\n }\n}\n\nimpl DerefMut for TimeSpec {\n fn deref_mut(&mut self) -> &mut [u8] {\n unsafe {\n slice::from_raw_parts_mut(self as *mut TimeSpec as *mut u8,\n mem::size_of::<TimeSpec>()) as &mut [u8]\n }\n }\n}\n<commit_msg>Switch to repr(C) for syscall structs<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse core::ops::{Deref, DerefMut};\nuse core::{mem, slice};\n\n#[derive(Copy, Clone, Debug, Default)]\n#[repr(C)]\npub struct Stat {\n pub st_dev: u64,\n pub st_ino: u64,\n pub st_mode: u16,\n pub st_nlink: u32,\n pub st_uid: u32,\n pub st_gid: u32,\n pub st_size: u64,\n pub st_blksize: u32,\n pub st_blocks: u64,\n pub st_mtime: u64,\n pub st_mtime_nsec: u32,\n pub st_atime: u64,\n pub st_atime_nsec: u32,\n pub st_ctime: u64,\n pub st_ctime_nsec: u32,\n}\n\nimpl Deref for Stat {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe {\n slice::from_raw_parts(self as *const Stat as *const u8,\n mem::size_of::<Stat>()) as &[u8]\n }\n }\n}\n\nimpl DerefMut for Stat {\n fn deref_mut(&mut self) -> &mut [u8] {\n unsafe {\n slice::from_raw_parts_mut(self as *mut Stat as *mut u8,\n mem::size_of::<Stat>()) as &mut [u8]\n }\n }\n}\n\n#[derive(Copy, Clone, Debug, Default)]\n#[repr(C)]\npub struct StatVfs {\n pub f_bsize: u32,\n pub f_blocks: u64,\n pub f_bfree: u64,\n pub f_bavail: u64,\n}\n\nimpl Deref for StatVfs {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe {\n slice::from_raw_parts(self as *const StatVfs as *const u8,\n mem::size_of::<StatVfs>()) as &[u8]\n }\n }\n}\n\nimpl DerefMut for StatVfs {\n fn deref_mut(&mut self) -> &mut [u8] {\n unsafe {\n slice::from_raw_parts_mut(self as *mut StatVfs as *mut u8,\n mem::size_of::<StatVfs>()) as &mut [u8]\n }\n }\n}\n\n#[derive(Copy, Clone, Debug, Default)]\n#[repr(C)]\npub struct TimeSpec {\n pub tv_sec: i64,\n pub tv_nsec: i32,\n}\n\nimpl Deref for TimeSpec {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe {\n slice::from_raw_parts(self as *const TimeSpec as *const u8,\n mem::size_of::<TimeSpec>()) as &[u8]\n }\n }\n}\n\nimpl DerefMut for TimeSpec {\n fn deref_mut(&mut self) -> &mut [u8] {\n unsafe {\n slice::from_raw_parts_mut(self as *mut TimeSpec as *mut u8,\n mem::size_of::<TimeSpec>()) as &mut [u8]\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2135<commit_after>\/\/ https:\/\/leetcode.com\/problems\/count-words-obtained-after-adding-a-letter\/\npub fn word_count(start_words: Vec<String>, target_words: Vec<String>) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\n \"{}\",\n word_count(\n vec![\"ant\".to_string(), \"act\".to_string(), \"tack\".to_string()],\n vec![\"tack\".to_string(), \"act\".to_string(), \"acti\".to_string()]\n )\n ); \/\/ 2\n println!(\n \"{}\",\n word_count(\n vec![\"ab\".to_string(), \"a\".to_string()],\n vec![\"abc\".to_string(), \"abcd\".to_string()]\n )\n ); \/\/ 1\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Small guide and FAQ for people coming from a bitcoin background<commit_after># Grin\/MimbleWimble for Bitcoiners\n\n## Privacy and Fungibility\n\nThere are 3 main properties of Grin transactions that make them private:\n\n1. There are no addresses.\n2. There are no amounts.\n3. 2 transactions, one spending the other, can be merged in a block to form only one, removing all intermediary information.\n\nThe 2 first properties mean that all transactions look the same. Unless you directly participated in the transaction, all inputs and outputs look like random pieces of data (in lingo, they all look like random curve points).\n\n## Scalability\n\nAs explained in the previous section, thanks to the MimbleWimble transaction and block format we can merge transactions when an output is directly spent by the input of another. It's as if when Alice gives money to Bob, and then Bob gives it all to Carol, Bob was never involved and his transaction is actually never even seen on the blockchain.\n\nPushing that further, between blocks, most outputs end up being spent sooner or later by another input. So *all spent outputs can be safely removed*. And the whole blockchain can be stored, downloaded and fully verified in just a few gigabytes or less (assuming a number of transactions similar to bitcoin).\n\nWhat this all means is the Grin blockchain scales with the number of users (unspent outputs), not the number of transactions.\n\n## Scripting\n\nMaybe you've heard that MimbleWimble doesn't support scripts. And in some way, that's true. But thanks to cryptographic trickery, many contracts that in bitcoin would require a script can be achieved with Grin using properties of Elliptic Curve Cryptography. So far, we know how to do:\n\n* Multi-signature transactions.\n* Atomic swaps.\n* Time-locked transaction and outputs.\n* Lightning Network\n\n## FAQ\n\n### Wait, what!? No address?\n\nNope, no address. All outputs in Grin are unique and have no common data with any previous output. Instead of relying on a known address to send money, transactions have to be built interactively, with 2 (or more) wallets exchanging data with one another. Practically, this isn't so much of a problem as there are multiple ways for 2 programs to interact privately and securely.\n\n### If transactions information get removed, can't I just cheat and create money?\n\nNo, and this is where MimbleWimble and Grin shine. Confidential transactions are a form of [homomorphic encryption](https:\/\/en.wikipedia.org\/wiki\/Homomorphic_encryption). Without revealing any amount, Grin can verify that the sum of all transaction inputs equal the sum of transaction outputs, plus the fee. Going even further, comparing the sum of all money created by mining with the total sum of money that's being held, Grin nodes can check the correctness of the total money supply.\n\n### If I listen to transaction relay, can I just figure out who they belong to before being cut-through?\n\nYou can figure out which outputs are being spent by which transaction. But the trail of data stops here. All inputs and outputs look like random pieces of data, so you can't tell if the money was transferred, still belongs to the same person, which output is the actual transfer and which is the change, etc. Grin transactions are built with *no identifiable piece of information*.\n\n### What about the quantum computaggedon\n\nIn every Grin output, we also include a bit of hashed data, which is quantum safe. If quantum computing was to become a reality, we can safely introduce additional verification that would protect existing coins from being hacked.\n\n### How does all this magic work?\n\nSee our [technical introduction](intro.md) to get started.\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added Aviator demo (WIP)<commit_after>\/*\n Example code is copied verbatim from:\n https:\/\/tympanus.net\/codrops\/2016\/04\/26\/the-aviator-animating-basic-3d-scene-threejs\/\n*\/\n\nextern crate cgmath;\nextern crate rand;\nextern crate three;\n\nuse std::f32::consts::PI;\nuse cgmath::prelude::*;\n\nstruct Cloud {\n group: three::Group,\n meshes: Vec<three::Mesh>,\n}\n\nimpl Cloud {\n fn new<R: rand::Rng>(rng: &mut R,\n factory: &mut three::Factory,\n scene: &mut three::Scene,\n parent: &three::Group,\n ) -> Self {\n let mut cloud = Cloud {\n group: factory.group(),\n meshes: Vec::new()\n };\n cloud.group.attach(scene, Some(parent));\n let geo = three::Geometry::new_box(20.0, 20.0, 20.0);\n let material = three::Material::MeshBasic{ color: 0xFFFFFF };\n for i in 0 .. rng.gen_range(3, 6) {\n let mut m = factory.mesh(geo.clone(), material.clone());\n let rot: three::Orientation = rng.gen();\n *m.transform_mut() = three::Transform {\n scale: rng.gen_range(0.1, 1.0),\n rot: rot.normalize(),\n disp: cgmath::vec3(i as f32 * 15.0, rng.next_f32() * 10.0, rng.next_f32() * 10.0),\n };\n m.attach(scene, Some(&cloud.group));\n cloud.meshes.push(m);\n }\n cloud\n }\n}\n\nstruct Sky {\n group: three::Group,\n clouds: Vec<Cloud>,\n}\n\nimpl Sky {\n fn new<R: rand::Rng>(rng: &mut R,\n factory: &mut three::Factory,\n scene: &mut three::Scene,\n ) -> Self {\n let mut sky = Sky {\n group: factory.group(),\n clouds: Vec::new(),\n };\n sky.group.attach(scene, None);\n let num = 20i32;\n let step_angle = PI * 2.0 \/ num as f32;\n for i in 0 .. num {\n let mut c = Cloud::new(rng, factory, scene, &sky.group);\n let angle = cgmath::Rad(i as f32 * step_angle);\n let dist = rng.gen_range(750.0, 950.0);\n *c.group.transform_mut() = three::Transform {\n scale: rng.gen_range(1.0, 3.0),\n rot: three::Orientation::from_angle_z(angle + cgmath::Rad::turn_div_4()),\n disp: cgmath::vec3(angle.cos() * dist,\n angle.sin() * dist,\n rng.gen_range(-800.0, -400.0)),\n };\n sky.clouds.push(c);\n }\n sky\n }\n}\n\nstruct AirPlane {\n group: three::Group,\n _cockpit: three::Mesh,\n _engine: three::Mesh,\n _tail: three::Mesh,\n _wing: three::Mesh,\n propeller_group: three::Group,\n _propeller: three::Mesh,\n _blade: three::Mesh,\n}\n\nimpl AirPlane {\n fn new(\n factory: &mut three::Factory,\n scene: &mut three::Scene,\n ) -> Self {\n let mut group = factory.group();\n group.attach(scene, None);\n\n let mut cockpit = factory.mesh(\n three::Geometry::new_box(60.0, 50.0, 50.0),\n three::Material::MeshBasic{ color: 0xFF0000 }\n );\n let mut engine = factory.mesh(\n three::Geometry::new_box(20.0, 50.0, 50.0),\n three::Material::MeshBasic{ color: 0xFFFFFF }\n );\n engine.transform_mut().disp.x = 40.0;\n let mut tail = factory.mesh(\n three::Geometry::new_box(15.0, 20.0, 5.0),\n three::Material::MeshBasic{ color: 0xFF0000 }\n );\n tail.transform_mut().disp = cgmath::vec3(-35.0, 25.0, 0.0);\n let mut wing = factory.mesh(\n three::Geometry::new_box(40.0, 8.0, 150.0),\n three::Material::MeshBasic{ color: 0xFF0000 }\n );\n\n let mut propeller_group = factory.group();\n propeller_group.transform_mut().disp = cgmath::vec3(50.0, 0.0, 0.0);\n propeller_group.attach(scene, Some(&group));\n let mut propeller = factory.mesh(\n three::Geometry::new_box(20.0, 10.0, 10.0),\n three::Material::MeshBasic{ color: 0xa52a2a }\n );\n propeller.attach(scene, Some(&propeller_group));\n let mut blade = factory.mesh(\n three::Geometry::new_box(1.0, 100.0, 20.0),\n three::Material::MeshBasic{ color: 0x23190f }\n );\n blade.transform_mut().disp = cgmath::vec3(8.0, 0.0, 0.0);\n blade.attach(scene, Some(&propeller_group));\n\n for mesh in [&mut cockpit, &mut engine, &mut tail, &mut wing].iter_mut() {\n mesh.attach(scene, Some(&group));\n }\n\n AirPlane {\n group,\n _cockpit: cockpit,\n _engine: engine,\n _tail: tail,\n _wing: wing,\n propeller_group,\n _propeller: propeller,\n _blade: blade,\n }\n }\n\n fn update(&mut self, dt: f32, target: (f32, f32)) {\n let mut pt = self.propeller_group.transform_mut();\n pt.rot = pt.rot * three::Orientation::from_angle_x(cgmath::Rad(0.3 * dt));\n self.group.transform_mut().disp =\n cgmath::vec3(0.0 + target.0 * 100.0, 100.0 + target.1 * 75.0, 0.0);\n }\n}\n\n\nfn main() {\n let mut rng = rand::thread_rng();\n let mut cam = three::PerspectiveCamera::new(60.0, 0.0, 1.0, 1000.0);\n cam.position = three::Position::new(0.0, 100.0, 200.0);\n let mut win = three::Window::new(\"Three-rs box mesh drawing example\", cam);\n\n \/\/TODO: win.scene.fog = Some(three::Fog::new(...));\n \/\/TODO: create lights\n \/\/TODO: Phong materials\n \/\/TODO: cast\/receive shadows\n\n let mut sea = {\n let geo = three::Geometry::new_cylinder(600.0, 600.0, 800.0, 40);\n let material = three::Material::MeshBasic{ color: 0x0000FF };\n win.factory.mesh(geo, material)\n };\n *sea.transform_mut() = three::Transform {\n scale: 1.0,\n rot: three::Orientation::from_angle_x(-cgmath::Rad::turn_div_4()),\n disp: cgmath::vec3(0.0, -600.0, 0.0),\n };\n sea.attach(&mut win.scene, None);\n\n let mut sky = Sky::new(&mut rng, &mut win.factory, &mut win.scene);\n sky.group.transform_mut().disp.y = -600.0;\n\n let mut airplane = AirPlane::new(&mut win.factory, &mut win.scene);\n *airplane.group.transform_mut() = three::Transform {\n scale: 0.25,\n rot: three::Orientation::one(),\n disp: cgmath::vec3(0.0, 100.0, 0.0),\n };\n\n while let Some(events) = win.update() {\n \/\/ assume the original velocities are given for 60fps\n let dt = events.time_delta * 60.0;\n\n airplane.update(dt, events.mouse_pos);\n\n if let (mut t, 0) = (sea.transform_mut(), 0) {\n t.rot = t.rot * three::Orientation::from_angle_y(cgmath::Rad(0.005 * dt));\n }\n if let (mut t, 0) = (sky.group.transform_mut(), 0) {\n t.rot = t.rot * three::Orientation::from_angle_z(cgmath::Rad(0.01 * dt));\n }\n\n win.render();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Translate the entire comments to pt-BR<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(const_fn)]\n#![feature(cfg_target_thread_local, thread_local_internals)]\n\ntype Foo = std::cell::RefCell<String>;\n\n#[cfg(target_thread_local)]\nstatic __KEY: std::thread::__FastLocalKeyInner<Foo> =\n std::thread::__FastLocalKeyInner::new();\n\/\/~^^ ERROR Sync` is not satisfied\n\/\/~^^^ ERROR Sync` is not satisfied\n\n#[cfg(not(target_thread_local))]\nstatic __KEY: std::thread::__OsLocalKeyInner<Foo> =\n std::thread::__OsLocalKeyInner::new();\n\nfn __getit() -> std::option::Option<\n &'static std::cell::UnsafeCell<\n std::option::Option<Foo>>>\n{\n __KEY.get() \/\/~ ERROR call to unsafe function requires unsafe\n}\n\nstatic FOO: std::thread::LocalKey<Foo> =\n std::thread::LocalKey::new(__getit, Default::default);\n\/\/~^ ERROR call to unsafe function requires unsafe\n\nfn main() {\n FOO.with(|foo| println!(\"{}\", foo.borrow()));\n std::thread::spawn(|| {\n FOO.with(|foo| *foo.borrow_mut() += \"foo\");\n }).join().unwrap();\n FOO.with(|foo| println!(\"{}\", foo.borrow()));\n}\n<commit_msg>fix test<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(const_fn)]\n#![feature(thread_local)]\n#![feature(cfg_target_thread_local, thread_local_internals)]\n\ntype Foo = std::cell::RefCell<String>;\n\n#[cfg(target_thread_local)]\n#[thread_local]\nstatic __KEY: std::thread::__FastLocalKeyInner<Foo> =\n std::thread::__FastLocalKeyInner::new();\n\n#[cfg(not(target_thread_local))]\nstatic __KEY: std::thread::__OsLocalKeyInner<Foo> =\n std::thread::__OsLocalKeyInner::new();\n\nfn __getit() -> std::option::Option<\n &'static std::cell::UnsafeCell<\n std::option::Option<Foo>>>\n{\n __KEY.get() \/\/~ ERROR call to unsafe function requires unsafe\n}\n\nstatic FOO: std::thread::LocalKey<Foo> =\n std::thread::LocalKey::new(__getit, Default::default);\n\/\/~^ ERROR call to unsafe function requires unsafe\n\nfn main() {\n FOO.with(|foo| println!(\"{}\", foo.borrow()));\n std::thread::spawn(|| {\n FOO.with(|foo| *foo.borrow_mut() += \"foo\");\n }).join().unwrap();\n FOO.with(|foo| println!(\"{}\", foo.borrow()));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Started working on VM engine<commit_after>enum Exp {\n Number(int)\n}\n\nstruct Engine {\n stack: Vec<Exp>\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>add a test for const fn methods, as suggested by @pnkfelix<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(const_fn)]\n\nstruct Foo { val: u32 }\n\nimpl Foo {\n const fn new() -> Foo {\n Foo { value: 22 }\n }\n}\n\nconst FOO: Foo = Foo::new();\n\npub fn main() {\n assert_eq!(FOO.value, 22);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add index over dates.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix Box<> handling in EventHandler<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added \"indent\" example<commit_after>extern crate piston_meta;\n\nuse piston_meta::*;\n\nfn main() {\n let text = r#\"\n1\n 2\n 3\n 4\n 5\n 6\n \"#;\n let rules = r#\"\n 2 node = [.$:\"num\" .l+(node:\"node\")]\n 1 document = [.w? node:\"node\" .w?]\n \"#;\n \/\/ Parse rules with meta language and convert to rules for parsing text.\n let rules = match syntax_errstr(rules) {\n Err(err) => {\n println!(\"{}\", err);\n return;\n }\n Ok(rules) => rules\n };\n let mut data = vec![];\n match parse_errstr(&rules, text, &mut data) {\n Err(err) => {\n println!(\"{}\", err);\n return;\n }\n Ok(()) => {}\n };\n json::print(&data);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #47425 - EdSchouten:immutable-tls, r=nikomatsakis<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for issue #47053\n\n#![feature(nll)]\n#![feature(thread_local)]\n\n#[thread_local]\nstatic FOO: isize = 5;\n\nfn main() {\n FOO = 6; \/\/~ ERROR cannot assign to immutable item `FOO` [E0594]\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>adept cat to new ArgParser version<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for #20225<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(unboxed_closures)]\n\nstruct Foo;\n\nimpl<'a, T> Fn<(&'a T,)> for Foo {\n type Output = ();\n\n extern \"rust-call\" fn call(&self, (_,): (T,)) {}\n \/\/~^ ERROR: has an incompatible type for trait: expected &-ptr\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #32222<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_attrs)]\n#![allow(warnings)]\n\nmod foo {\n pub fn bar() {}\n}\n\npub use foo::*;\nuse b::bar;\n\nmod foobar {\n use super::*;\n}\n\nmod a {\n pub mod bar {}\n}\n\nmod b {\n pub use a::bar;\n}\n\n#[rustc_error]\nfn main() {} \/\/~ ERROR compilation successful\n<|endoftext|>"} {"text":"<commit_before><commit_msg>oops, forgot new file<commit_after>\/\/! A lock-free stack which supports concurrent pushes and a concurrent call to\n\/\/! drain the entire stack all at once.\n\n\/\/ Borrowed directly from futures-rs.\n\nuse std::prelude::v1::*;\n\nuse std::sync::atomic::AtomicUsize;\nuse std::mem;\nuse std::marker;\nuse std::sync::atomic::Ordering::SeqCst;\n\npub struct Stack<T> {\n head: AtomicUsize,\n _marker: marker::PhantomData<T>,\n}\n\nstruct Node<T> {\n data: T,\n next: *mut Node<T>,\n}\n\npub struct Drain<T> {\n head: *mut Node<T>,\n}\n\nimpl<T> Stack<T> {\n pub fn new() -> Stack<T> {\n Stack {\n head: AtomicUsize::new(0),\n _marker: marker::PhantomData,\n }\n }\n\n pub fn push(&self, data: T) {\n let mut node = Box::new(Node { data: data, next: 0 as *mut _ });\n let mut head = self.head.load(SeqCst);\n loop {\n node.next = head as *mut _;\n let ptr = &*node as *const Node<T> as usize;\n match self.head.compare_exchange(head, ptr, SeqCst, SeqCst) {\n Ok(_) => {\n mem::forget(node);\n return\n }\n Err(cur) => head = cur,\n }\n }\n }\n\n pub fn drain(&self) -> Drain<T> {\n Drain {\n head: self.head.swap(0, SeqCst) as *mut _,\n }\n }\n}\n\nimpl<T> Drop for Stack<T> {\n fn drop(&mut self) {\n self.drain();\n }\n}\n\nimpl<T> Iterator for Drain<T> {\n type Item = T;\n\n fn next(&mut self) -> Option<T> {\n if self.head.is_null() {\n return None\n }\n unsafe {\n let node = Box::from_raw(self.head);\n self.head = node.next;\n return Some(node.data)\n }\n }\n}\n\nimpl<T> Drop for Drain<T> {\n fn drop(&mut self) {\n for item in self.by_ref() {\n drop(item);\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::prelude::v1::*;\n use std::rc::Rc;\n use std::cell::Cell;\n\n use super::Stack;\n\n struct Set(Rc<Cell<usize>>, usize);\n\n impl Drop for Set {\n fn drop(&mut self) {\n self.0.set(self.1);\n }\n }\n\n #[test]\n fn simple() {\n let s = Stack::new();\n s.push(1);\n s.push(2);\n s.push(4);\n assert_eq!(s.drain().collect::<Vec<_>>(), vec![4, 2, 1]);\n s.push(5);\n assert_eq!(s.drain().collect::<Vec<_>>(), vec![5]);\n assert_eq!(s.drain().collect::<Vec<_>>(), vec![]);\n }\n\n #[test]\n fn drain_drops() {\n let data = Rc::new(Cell::new(0));\n let s = Stack::new();\n s.push(Set(data.clone(), 1));\n drop(s.drain());\n assert_eq!(data.get(), 1);\n }\n\n #[test]\n fn drop_drops() {\n let data = Rc::new(Cell::new(0));\n let s = Stack::new();\n s.push(Set(data.clone(), 1));\n drop(s);\n assert_eq!(data.get(), 1);\n }\n}\n\nimpl ::futures::task::EventSet for Stack<usize> {\n fn insert(&self, id: usize) {\n self.push(id);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add the literals section<commit_after>fn main() {\n \/\/ Addition with unsigned integer\n println!(\"1 + 3 = {}\", 1u32 + 3);\n\n \/\/ Subtraction with signed integer\n \/\/ Rust refuses to compile if there is an overflow\n println!(\"1 - 3 = {}\", 1i32 - 3);\n\n \/\/ Boolean logic\n println!(\"true AND false is {}\", true && false);\n println!(\"true OR false is {}\", true || false);\n println!(\"NOT false is {}\", !true);\n\n \/\/ Bitwise operations\n println!(\"0110 AND 0011 is {:04b}\", 0b0110u32 & 0b0011);\n println!(\"0110 OR 0011 is {:04b}\", 0b0110u32 | 0b0011);\n println!(\"0110 XOR 0011 is {:04b}\", 0b0110u32 ^ 0b0011);\n println!(\"1 << 5 is {}\", 1u32 << 5);\n println!(\"0x80 >> 2 is 0x{:x}\", 0x80u32 >> 2);\n\n \/\/ Use underscores to improve readibility!\n println!(\"One million is written as {}\", 1_000_000u32);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Comment that right eye could be rendered using the command buffers recorded for the left eye with just the model view changed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test for `fn main() -> !`<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(termination_trait)]\n\n\/\/ error-pattern:oh, dear\n\nfn main() -> ! {\n panic!(\"oh, dear\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Log errors in the binary instead of printing them<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>update<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Sum effect, which adds two input streams into one output<commit_after>use super::effect::Effect;\nuse partial::Partial;\n\npub struct Sum;\n\npub struct SumProcessIter {\n partial : Partial,\n iter_complete : bool,\n}\n\nimpl Iterator for SumProcessIter {\n type Item=Partial;\n\n fn next(&mut self) -> Option<Partial> {\n match self.iter_complete {\n false => { self.iter_complete=true; Some(self.partial) }\n true => None\n }\n }\n}\n\nimpl Effect for Sum {\n fn process(&mut self, partial : &Partial, slot_no : u32) -> Box<Iterator<Item=Partial>> {\n Box::new(SumProcessIter {partial:*partial, iter_complete:false})\n }\n fn get_input_slot(&self, index : u32) -> Option<&str> {\n match index {\n 0 => Some(\"source0\"),\n 1 => Some(\"source1\"),\n _ => None\n }\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix transparency bug<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>bigpack: create target buf with data_start capacity<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make RequestToken::new private - should not have been public.<commit_after><|endoftext|>"} {"text":"<commit_before>#![cfg(all(unix, feature = \"os-poll\", feature = \"os-ext\"))]\n\nuse std::io::{self, Read, Write};\nuse std::process::{Command, Stdio};\nuse std::sync::{Arc, Barrier};\nuse std::thread;\nuse std::time::Duration;\n\nuse mio::event::Event;\nuse mio::unix::pipe::{self, Receiver, Sender};\nuse mio::{Events, Interest, Poll, Token};\n\nconst RECEIVER: Token = Token(0);\nconst SENDER: Token = Token(1);\n\nconst DATA1: &[u8; 11] = b\"Hello world\";\n\n#[test]\nfn smoke() {\n let mut poll = Poll::new().unwrap();\n let mut events = Events::with_capacity(8);\n\n let (mut sender, mut receiver) = pipe::new().unwrap();\n\n let mut buf = [0; 20];\n assert_would_block(receiver.read(&mut buf));\n\n poll.registry()\n .register(&mut receiver, RECEIVER, Interest::READABLE)\n .unwrap();\n poll.registry()\n .register(&mut sender, SENDER, Interest::WRITABLE)\n .unwrap();\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(SENDER, Interest::WRITABLE)],\n );\n let n = sender.write(DATA1).unwrap();\n assert_eq!(n, DATA1.len());\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(RECEIVER, Interest::READABLE)],\n );\n let n = receiver.read(&mut buf).unwrap();\n assert_eq!(n, DATA1.len());\n assert_eq!(&buf[..n], &*DATA1);\n}\n\n#[test]\nfn event_when_sender_is_dropped() {\n let mut poll = Poll::new().unwrap();\n let mut events = Events::with_capacity(8);\n\n let (mut sender, mut receiver) = pipe::new().unwrap();\n poll.registry()\n .register(&mut receiver, RECEIVER, Interest::READABLE)\n .unwrap();\n\n let barrier = Arc::new(Barrier::new(2));\n let thread_barrier = barrier.clone();\n\n let handle = thread::spawn(move || {\n let n = sender.write(DATA1).unwrap();\n assert_eq!(n, DATA1.len());\n thread_barrier.wait();\n\n thread_barrier.wait();\n drop(sender);\n thread_barrier.wait();\n });\n\n barrier.wait(); \/\/ Wait for the write to complete.\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(RECEIVER, Interest::READABLE)],\n );\n\n barrier.wait(); \/\/ Unblock the thread.\n barrier.wait(); \/\/ Wait until the sending end is dropped.\n\n expect_one_closed_event(&mut poll, &mut events, RECEIVER, true);\n\n handle.join().unwrap();\n}\n\n#[test]\nfn event_when_receiver_is_dropped() {\n let mut poll = Poll::new().unwrap();\n let mut events = Events::with_capacity(8);\n\n let (mut sender, receiver) = pipe::new().unwrap();\n poll.registry()\n .register(&mut sender, SENDER, Interest::WRITABLE)\n .unwrap();\n\n let barrier = Arc::new(Barrier::new(2));\n let thread_barrier = barrier.clone();\n\n let handle = thread::spawn(move || {\n thread_barrier.wait();\n drop(receiver);\n thread_barrier.wait();\n });\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(SENDER, Interest::WRITABLE)],\n );\n\n barrier.wait(); \/\/ Unblock the thread.\n barrier.wait(); \/\/ Wait until the receiving end is dropped.\n\n expect_one_closed_event(&mut poll, &mut events, SENDER, false);\n\n handle.join().unwrap();\n}\n\n#[test]\nfn from_child_process_io() {\n \/\/ `cat` simply echo everything that we write via standard in.\n let mut child = Command::new(\"cat\")\n .env_clear()\n .stdin(Stdio::piped())\n .stdout(Stdio::piped())\n .spawn()\n .expect(\"failed to start `cat` command\");\n\n let mut sender = Sender::from(child.stdin.take().unwrap());\n let mut receiver = Receiver::from(child.stdout.take().unwrap());\n\n let mut poll = Poll::new().unwrap();\n let mut events = Events::with_capacity(8);\n\n poll.registry()\n .register(&mut receiver, RECEIVER, Interest::READABLE)\n .unwrap();\n poll.registry()\n .register(&mut sender, SENDER, Interest::WRITABLE)\n .unwrap();\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(SENDER, Interest::WRITABLE)],\n );\n let n = sender.write(DATA1).unwrap();\n assert_eq!(n, DATA1.len());\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(RECEIVER, Interest::READABLE)],\n );\n let mut buf = [0; 20];\n let n = receiver.read(&mut buf).unwrap();\n assert_eq!(n, DATA1.len());\n assert_eq!(&buf[..n], &*DATA1);\n\n drop(sender);\n\n expect_one_closed_event(&mut poll, &mut events, RECEIVER, true);\n\n child.wait().unwrap();\n}\n\n#[test]\nfn nonblocking_child_process_io() {\n \/\/ `cat` simply echo everything that we write via standard in.\n let mut child = Command::new(\"cat\")\n .env_clear()\n .stdin(Stdio::piped())\n .stdout(Stdio::piped())\n .spawn()\n .expect(\"failed to start `cat` command\");\n\n let sender = Sender::from(child.stdin.take().unwrap());\n let mut receiver = Receiver::from(child.stdout.take().unwrap());\n\n receiver.set_nonblocking(true).unwrap();\n\n let mut buf = [0; 20];\n assert_would_block(receiver.read(&mut buf));\n\n drop(sender);\n child.wait().unwrap();\n}\n\n\/\/\/ An event that is expected to show up when `Poll` is polled, see\n\/\/\/ `expect_events`.\n#[derive(Debug)]\npub struct ExpectEvent {\n token: Token,\n interests: Interest,\n}\n\nimpl ExpectEvent {\n pub const fn new(token: Token, interests: Interest) -> ExpectEvent {\n ExpectEvent { token, interests }\n }\n\n fn matches(&self, event: &Event) -> bool {\n event.token() == self.token &&\n \/\/ If we expect a readiness then also match on the event.\n \/\/ In maths terms that is p -> q, which is the same as !p || q.\n (!self.interests.is_readable() || event.is_readable()) &&\n (!self.interests.is_writable() || event.is_writable()) &&\n (!self.interests.is_aio() || event.is_aio()) &&\n (!self.interests.is_lio() || event.is_lio())\n }\n}\n\npub fn expect_events(poll: &mut Poll, events: &mut Events, mut expected: Vec<ExpectEvent>) {\n \/\/ In a lot of calls we expect more then one event, but it could be that\n \/\/ poll returns the first event only in a single call. To be a bit more\n \/\/ lenient we'll poll a couple of times.\n for _ in 0..3 {\n poll.poll(events, Some(Duration::from_millis(500)))\n .expect(\"unable to poll\");\n\n for event in events.iter() {\n let index = expected.iter().position(|expected| expected.matches(event));\n\n if let Some(index) = index {\n expected.swap_remove(index);\n } else {\n \/\/ Must accept sporadic events.\n println!(\"got unexpected event: {:?}\", event);\n }\n }\n\n if expected.is_empty() {\n return;\n }\n }\n\n assert!(\n expected.is_empty(),\n \"the following expected events were not found: {:?}\",\n expected\n );\n}\n\n\/\/\/ Assert that the provided result is an `io::Error` with kind `WouldBlock`.\npub fn assert_would_block<T>(result: io::Result<T>) {\n match result {\n Ok(_) => panic!(\"unexpected OK result, expected a `WouldBlock` error\"),\n Err(ref err) if err.kind() == io::ErrorKind::WouldBlock => {}\n Err(err) => panic!(\"unexpected error result: {}\", err),\n }\n}\n\n\/\/\/ Expected a closed event. If `read` is true is checks for `is_read_closed`,\n\/\/\/ otherwise for `is_write_closed`.\npub fn expect_one_closed_event(poll: &mut Poll, events: &mut Events, token: Token, read: bool) {\n poll.poll(events, Some(Duration::from_secs(1))).unwrap();\n let mut iter = events.iter();\n let event = iter.next().unwrap();\n assert_eq!(event.token(), token, \"invalid token, event: {:#?}\", event);\n if read {\n assert!(\n event.is_read_closed(),\n \"expected closed or error, event: {:#?}\",\n event\n );\n } else {\n assert!(\n event.is_write_closed(),\n \"expected closed or error, event: {:#?}\",\n event\n );\n }\n assert!(iter.next().is_none());\n}\n<commit_msg>Use util functions in unix_pipe tests<commit_after>#![cfg(all(unix, feature = \"os-poll\", feature = \"os-ext\"))]\n\nuse std::io::{Read, Write};\nuse std::process::{Command, Stdio};\nuse std::sync::{Arc, Barrier};\nuse std::thread;\nuse std::time::Duration;\n\nuse mio::unix::pipe::{self, Receiver, Sender};\nuse mio::{Events, Interest, Poll, Token};\n\nmod util;\nuse util::{assert_would_block, expect_events, ExpectEvent};\n\nconst RECEIVER: Token = Token(0);\nconst SENDER: Token = Token(1);\n\nconst DATA1: &[u8; 11] = b\"Hello world\";\n\n#[test]\nfn smoke() {\n let mut poll = Poll::new().unwrap();\n let mut events = Events::with_capacity(8);\n\n let (mut sender, mut receiver) = pipe::new().unwrap();\n\n let mut buf = [0; 20];\n assert_would_block(receiver.read(&mut buf));\n\n poll.registry()\n .register(&mut receiver, RECEIVER, Interest::READABLE)\n .unwrap();\n poll.registry()\n .register(&mut sender, SENDER, Interest::WRITABLE)\n .unwrap();\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(SENDER, Interest::WRITABLE)],\n );\n let n = sender.write(DATA1).unwrap();\n assert_eq!(n, DATA1.len());\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(RECEIVER, Interest::READABLE)],\n );\n let n = receiver.read(&mut buf).unwrap();\n assert_eq!(n, DATA1.len());\n assert_eq!(&buf[..n], &*DATA1);\n}\n\n#[test]\nfn event_when_sender_is_dropped() {\n let mut poll = Poll::new().unwrap();\n let mut events = Events::with_capacity(8);\n\n let (mut sender, mut receiver) = pipe::new().unwrap();\n poll.registry()\n .register(&mut receiver, RECEIVER, Interest::READABLE)\n .unwrap();\n\n let barrier = Arc::new(Barrier::new(2));\n let thread_barrier = barrier.clone();\n\n let handle = thread::spawn(move || {\n let n = sender.write(DATA1).unwrap();\n assert_eq!(n, DATA1.len());\n thread_barrier.wait();\n\n thread_barrier.wait();\n drop(sender);\n thread_barrier.wait();\n });\n\n barrier.wait(); \/\/ Wait for the write to complete.\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(RECEIVER, Interest::READABLE)],\n );\n\n barrier.wait(); \/\/ Unblock the thread.\n barrier.wait(); \/\/ Wait until the sending end is dropped.\n\n expect_one_closed_event(&mut poll, &mut events, RECEIVER, true);\n\n handle.join().unwrap();\n}\n\n#[test]\nfn event_when_receiver_is_dropped() {\n let mut poll = Poll::new().unwrap();\n let mut events = Events::with_capacity(8);\n\n let (mut sender, receiver) = pipe::new().unwrap();\n poll.registry()\n .register(&mut sender, SENDER, Interest::WRITABLE)\n .unwrap();\n\n let barrier = Arc::new(Barrier::new(2));\n let thread_barrier = barrier.clone();\n\n let handle = thread::spawn(move || {\n thread_barrier.wait();\n drop(receiver);\n thread_barrier.wait();\n });\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(SENDER, Interest::WRITABLE)],\n );\n\n barrier.wait(); \/\/ Unblock the thread.\n barrier.wait(); \/\/ Wait until the receiving end is dropped.\n\n expect_one_closed_event(&mut poll, &mut events, SENDER, false);\n\n handle.join().unwrap();\n}\n\n#[test]\nfn from_child_process_io() {\n \/\/ `cat` simply echo everything that we write via standard in.\n let mut child = Command::new(\"cat\")\n .env_clear()\n .stdin(Stdio::piped())\n .stdout(Stdio::piped())\n .spawn()\n .expect(\"failed to start `cat` command\");\n\n let mut sender = Sender::from(child.stdin.take().unwrap());\n let mut receiver = Receiver::from(child.stdout.take().unwrap());\n\n let mut poll = Poll::new().unwrap();\n let mut events = Events::with_capacity(8);\n\n poll.registry()\n .register(&mut receiver, RECEIVER, Interest::READABLE)\n .unwrap();\n poll.registry()\n .register(&mut sender, SENDER, Interest::WRITABLE)\n .unwrap();\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(SENDER, Interest::WRITABLE)],\n );\n let n = sender.write(DATA1).unwrap();\n assert_eq!(n, DATA1.len());\n\n expect_events(\n &mut poll,\n &mut events,\n vec![ExpectEvent::new(RECEIVER, Interest::READABLE)],\n );\n let mut buf = [0; 20];\n let n = receiver.read(&mut buf).unwrap();\n assert_eq!(n, DATA1.len());\n assert_eq!(&buf[..n], &*DATA1);\n\n drop(sender);\n\n expect_one_closed_event(&mut poll, &mut events, RECEIVER, true);\n\n child.wait().unwrap();\n}\n\n#[test]\nfn nonblocking_child_process_io() {\n \/\/ `cat` simply echo everything that we write via standard in.\n let mut child = Command::new(\"cat\")\n .env_clear()\n .stdin(Stdio::piped())\n .stdout(Stdio::piped())\n .spawn()\n .expect(\"failed to start `cat` command\");\n\n let sender = Sender::from(child.stdin.take().unwrap());\n let mut receiver = Receiver::from(child.stdout.take().unwrap());\n\n receiver.set_nonblocking(true).unwrap();\n\n let mut buf = [0; 20];\n assert_would_block(receiver.read(&mut buf));\n\n drop(sender);\n child.wait().unwrap();\n}\n\n\/\/\/ Expected a closed event. If `read` is true is checks for `is_read_closed`,\n\/\/\/ otherwise for `is_write_closed`.\npub fn expect_one_closed_event(poll: &mut Poll, events: &mut Events, token: Token, read: bool) {\n poll.poll(events, Some(Duration::from_secs(1))).unwrap();\n let mut iter = events.iter();\n let event = iter.next().unwrap();\n assert_eq!(event.token(), token, \"invalid token, event: {:#?}\", event);\n if read {\n assert!(\n event.is_read_closed(),\n \"expected closed or error, event: {:#?}\",\n event\n );\n } else {\n assert!(\n event.is_write_closed(),\n \"expected closed or error, event: {:#?}\",\n event\n );\n }\n assert!(iter.next().is_none());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add more tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Small additions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove the experimental function call<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Restrict marks to be from a to z (é is invalid)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add missing plugin file<commit_after>use std::sync::Mutex;\nuse std::collections::HashMap;\nuse circular_queue::CircularQueue;\nuse regex::{Regex, RegexBuilder};\n\nuse irc::client::prelude::*;\n\nuse plugin::*;\n\nuse failure::Fail;\nuse failure::ResultExt;\nuse error::ErrorKind as FrippyErrorKind;\nuse error::FrippyError;\nuse self::error::*;\n\nlazy_static! {\n static ref RE: Regex = Regex::new(r\"^s\/((?:\\\\\/|[^\/])+)\/((?:\\\\\/|[^\/])*)\/(?:(\\w+))?\\s*$\").unwrap();\n}\n\n#[derive(PluginName, Debug)]\npub struct Sed {\n per_channel: usize,\n channel_messages: Mutex<HashMap<String, CircularQueue<String>>>,\n}\n\nmacro_rules! try_lock {\n ( $m:expr ) => {\n match $m.lock() {\n Ok(guard) => guard,\n Err(poisoned) => poisoned.into_inner(),\n }\n }\n}\n\nimpl Sed {\n pub fn new(per_channel: usize) -> Sed {\n Sed {\n per_channel: per_channel,\n channel_messages: Mutex::new(HashMap::new()),\n }\n }\n\n fn add_message(&self, channel: String, message: String) {\n let mut channel_messages = try_lock!(self.channel_messages);\n let messages = channel_messages\n .entry(channel)\n .or_insert(CircularQueue::with_capacity(self.per_channel));\n messages.push(message);\n }\n\n fn run_regex(&self, channel: &str, message: &str) -> Result<String, SedError> {\n let mut global_match = false;\n let mut case_insens = false;\n let mut ign_whitespace = false;\n let mut swap_greed = false;\n let mut disable_unicode = false;\n\n let captures = RE.captures(message).unwrap();\n debug!(\"{:?}\", captures);\n\n let first = captures.get(1).unwrap().as_str();\n let second = captures.get(2).unwrap().as_str();\n\n if let Some(flags) = captures.get(3) {\n let flags = flags.as_str();\n\n global_match = flags.contains('g');\n case_insens = flags.contains('i');\n ign_whitespace = flags.contains('x');\n disable_unicode = !flags.contains('u');\n swap_greed = flags.contains('U');\n }\n\n let user_re = RegexBuilder::new(&first.replace(r\"\\\/\", \"\/\"))\n .case_insensitive(case_insens)\n .ignore_whitespace(ign_whitespace)\n .unicode(disable_unicode)\n .swap_greed(swap_greed)\n .build().context(ErrorKind::InvalidRegex)?;\n\n let channel_messages = try_lock!(self.channel_messages);\n let messages = channel_messages.get(channel).ok_or(ErrorKind::NoMessages)?;\n\n for message in messages.iter() {\n if user_re.is_match(message) {\n let response = if global_match {\n user_re.replace_all(message, second)\n } else {\n user_re.replace(message, second)\n };\n\n return Ok(response.to_string());\n }\n }\n\n Err(ErrorKind::NoMatch)?\n }\n}\n\nimpl Plugin for Sed {\n fn execute(&self, client: &IrcClient, message: &Message) -> ExecutionStatus {\n match message.command {\n Command::PRIVMSG(_, ref content) => {\n let channel = message.response_target().unwrap();\n if channel == message.source_nickname().unwrap() {\n return ExecutionStatus::Done;\n }\n\n if RE.is_match(content) {\n let result = match self.run_regex(channel, content) {\n Ok(msg) => client.send_privmsg(channel, &msg),\n Err(e) => client.send_notice(channel, &e.to_string()),\n };\n\n match result {\n Err(e) => ExecutionStatus::Err(e.context(FrippyErrorKind::Connection).into()),\n Ok(_) => ExecutionStatus::Done,\n }\n } else {\n self.add_message(channel.to_string(), content.to_string());\n\n ExecutionStatus::Done\n }\n }\n _ => ExecutionStatus::Done,\n }\n }\n\n fn execute_threaded(&self, _: &IrcClient, _: &Message) -> Result<(), FrippyError> {\n panic!(\"Sed should not use threading\")\n }\n\n fn command(&self, client: &IrcClient, command: PluginCommand) -> Result<(), FrippyError> {\n Ok(client\n .send_notice(\n &command.source,\n \"Currently this Plugin does not implement any commands.\",\n )\n .context(FrippyErrorKind::Connection)?)\n }\n\n fn evaluate(&self, _: &IrcClient, _: PluginCommand) -> Result<String, String> {\n Err(String::from(\n \"Evaluation of commands is not implemented for sed at this time\",\n ))\n }\n}\n\npub mod error {\n #[derive(Copy, Clone, Eq, PartialEq, Debug, Fail, Error)]\n #[error = \"SedError\"]\n pub enum ErrorKind {\n \/\/\/ Invalid regex error\n #[fail(display = \"Invalid regex\")]\n InvalidRegex,\n\n \/\/\/ No messages found error\n #[fail(display = \"No messages were found for this channel\")]\n NoMessages,\n\n \/\/\/ No match found error\n #[fail(display = \"No recent messages match this regex\")]\n NoMatch,\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add --rcfile option for specifying which file to execute on startup.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Try again at implementing decoder<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Block resizing of the window<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a unique name generator<commit_after>extern crate regex;\n\nuse regex::Regex;\nuse std::fmt;\n\n\/\/\/ An identifier name generator.\npub struct Name {\n base: String,\n next: usize,\n}\n\nimpl Name {\n \/\/\/ Initialize a name generator with a base name. A unique identifier may\n \/\/\/ then be generated with the `next` and `to_string` functions.\n pub fn new(base: &str) -> Self {\n Name {\n base: String::from(base),\n next: 0,\n }\n }\n\n \/\/\/ Advances the generator to the next unique identifier. When passing\n \/\/\/ a `Name` through recursive function calls, this can be called before\n \/\/\/ the next recursion to increment the depth of the generated\n \/\/\/ identifiers.\n pub fn next(&mut self) -> &mut Self {\n self.next = self.next + 1;\n self\n }\n}\n\nimpl Name {\n \/\/\/ Creates a valid identifier from the template's short name to be used\n \/\/\/ in function or variable names generated from this template file:\n \/\/\/ `include\/header -> include_header`.\n pub fn id(&self) -> String {\n let re = Regex::new(r\"[^\\w]\").unwrap();\n re.replace_all(&self.base, \"_\")\n }\n}\n\nimpl fmt::Display for Name {\n \/\/\/ Creates a unique identifier to be used as a variable or function name.\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}{}\", self.id(), self.next)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::Name;\n\n #[test]\n fn id() {\n let name = Name::new(\"include\/header\");\n assert_eq!(\"include_header\", name.id());\n }\n\n #[test]\n fn next() {\n let mut name = Name::new(\"include\/header\");\n assert_eq!(\"include_header0\", name.to_string());\n\n name.next();\n assert_eq!(\"include_header1\", name.to_string());\n\n name.next();\n assert_eq!(\"include_header2\", name.to_string());\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use failure::{Backtrace, Context, Fail};\nuse lapin_async;\nuse std::fmt;\nuse std::io;\nuse tokio_timer;\n\nuse transport::CodecError;\n\n\/\/\/ The type of error that can be returned in this crate.\n\/\/\/\n\/\/\/ Instead of implementing the `Error` trait provided by the standard library,\n\/\/\/ it implemented the `Fail` trait provided by the `failure` crate. Doing so\n\/\/\/ means that this type guaranteed to be both sendable and usable across\n\/\/\/ threads, and that you'll be able to use the downcasting feature of the\n\/\/\/ `failure::Error` type.\n#[derive(Debug)]\npub struct Error {\n inner: Context<ErrorKind>,\n}\n\n\/\/\/ The different kinds of errors that can be reported.\n\/\/\/\n\/\/\/ This enumeration is deliberately not exported to the end-users of this\n\/\/\/ crate because it is not yet possible to prevent developers of matching\n\/\/\/ exhaustively against its variants.\n#[derive(Debug, Fail)]\npub(crate) enum ErrorKind {\n #[fail(display = \"The maximum number of channels for this connection has been reached\")]\n ChannelLimitReached,\n #[fail(display = \"Failed to open channel\")]\n ChannelOpenFailed,\n #[fail(display = \"Couldn't decode incoming frame: {}\", _0)]\n Decode(CodecError),\n #[fail(display = \"The connection was closed by the remote peer\")]\n ConnectionClosed,\n #[fail(display = \"Failed to connect: {}\", _0)]\n ConnectionFailed(#[fail(cause)] io::Error),\n #[fail(display = \"Basic get returned empty\")]\n EmptyBasicGet,\n #[fail(display = \"Couldn't encode outcoming frame: {}\", _0)]\n Encode(CodecError),\n #[fail(display = \"The timer of the heartbeat encountered an error: {}\", _0)]\n HeartbeatTimer(#[fail(cause)] tokio_timer::Error),\n #[fail(display = \"Failed to handle incoming frame: {:?}\", _0)]\n \/\/ FIXME: mark lapin_async's Error as cause once it implements Fail\n InvalidFrame(lapin_async::error::Error),\n #[fail(display = \"Couldn't parse URI: {}\", _0)]\n InvalidUri(String),\n #[fail(display = \"Transport mutex is poisoned\")]\n PoisonedMutex,\n #[fail(display = \"{}: {:?}\", _0, _1)]\n \/\/ FIXME: mark lapin_async's Error as cause once it implements Fail\n ProtocolError(String, lapin_async::error::Error)\n}\n\nimpl Error {\n \/\/\/ Returns true if the error is caused by the limit of channels being reached.\n pub fn is_channel_limit_reached(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::ChannelLimitReached => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by a channel that couldn't be opened.\n pub fn is_channel_open_failed(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::ChannelOpenFailed => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by a connection closed unexpectedly.\n pub fn is_connection_closed(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::ConnectionClosed => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by a connection that couldn't be established.\n pub fn is_connection_failed(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::ConnectionFailed(_) => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by a frame that couldn't be decoded.\n pub fn is_decode(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::Decode(_) => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by the `basic.get` response being empty.\n pub fn is_empty_basic_get(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::EmptyBasicGet => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by a frame that couldn't be encoded.\n pub fn is_encode(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::Encode(_) => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by the timer used in the heartbeat task.\n pub fn is_heartbeat_timer(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::HeartbeatTimer(_) => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by a malformed AMQP URI.\n pub fn is_invalid_uri(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::InvalidUri(_) => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by a mutex that got poisoned.\n pub fn is_poisoned_mutex(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::PoisonedMutex => true,\n _ => false,\n }\n }\n\n \/\/\/ Returns true if the error is caused by a protocol error.\n pub fn is_protocol_error(&self) -> bool {\n match *self.inner.get_context() {\n ErrorKind::ProtocolError(_, _) => true,\n _ => false,\n }\n }\n}\n\nimpl Fail for Error {\n fn cause(&self) -> Option<&Fail> {\n self.inner.cause()\n }\n\n fn backtrace(&self) -> Option<&Backtrace> {\n self.inner.backtrace()\n }\n}\n\nimpl fmt::Display for Error {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(&self.inner, f)\n }\n}\n\nimpl From<ErrorKind> for Error {\n fn from(kind: ErrorKind) -> Error {\n Error { inner: Context::new(kind) }\n }\n}\n\nimpl From<Context<ErrorKind>> for Error {\n fn from(inner: Context<ErrorKind>) -> Error {\n Error { inner: inner }\n }\n}\n<commit_msg>Expose ErrorKind and delete a bunch of is_* methods<commit_after>use failure::{Backtrace, Context, Fail};\nuse lapin_async;\nuse std::fmt;\nuse std::io;\nuse tokio_timer;\n\nuse transport::CodecError;\n\n\/\/\/ The type of error that can be returned in this crate.\n\/\/\/\n\/\/\/ Instead of implementing the `Error` trait provided by the standard library,\n\/\/\/ it implemented the `Fail` trait provided by the `failure` crate. Doing so\n\/\/\/ means that this type guaranteed to be both sendable and usable across\n\/\/\/ threads, and that you'll be able to use the downcasting feature of the\n\/\/\/ `failure::Error` type.\n#[derive(Debug)]\npub struct Error {\n inner: Context<ErrorKind>,\n}\n\n\/\/\/ The different kinds of errors that can be reported.\n\/\/\/\n\/\/\/ Event though we expose the complete enumeration of possible error variants, it is not\n\/\/\/ considered stable to exhaustively match on this enumeration: do it at your own risk.\n#[derive(Debug, Fail)]\npub enum ErrorKind {\n #[fail(display = \"The maximum number of channels for this connection has been reached\")]\n ChannelLimitReached,\n #[fail(display = \"Failed to open channel\")]\n ChannelOpenFailed,\n #[fail(display = \"Couldn't decode incoming frame: {}\", _0)]\n Decode(CodecError),\n #[fail(display = \"The connection was closed by the remote peer\")]\n ConnectionClosed,\n #[fail(display = \"Failed to connect: {}\", _0)]\n ConnectionFailed(#[fail(cause)] io::Error),\n #[fail(display = \"Basic get returned empty\")]\n EmptyBasicGet,\n #[fail(display = \"Couldn't encode outcoming frame: {}\", _0)]\n Encode(CodecError),\n #[fail(display = \"The timer of the heartbeat encountered an error: {}\", _0)]\n HeartbeatTimer(#[fail(cause)] tokio_timer::Error),\n #[fail(display = \"Failed to handle incoming frame: {:?}\", _0)]\n \/\/ FIXME: mark lapin_async's Error as cause once it implements Fail\n InvalidFrame(lapin_async::error::Error),\n #[fail(display = \"Couldn't parse URI: {}\", _0)]\n InvalidUri(String),\n #[fail(display = \"Transport mutex is poisoned\")]\n PoisonedMutex,\n #[fail(display = \"{}: {:?}\", _0, _1)]\n \/\/ FIXME: mark lapin_async's Error as cause once it implements Fail\n ProtocolError(String, lapin_async::error::Error),\n \/\/\/ A hack to prevent developers from exhaustively match on the enum's variants\n \/\/\/\n \/\/\/ The purpose of this variant is to let the `ErrorKind` enumeration grow more variants\n \/\/\/ without it being a breaking change for users. It is planned for the language to provide\n \/\/\/ this functionnality out of the box, though it has not been [stabilized] yet.\n \/\/\/\n \/\/\/ [stabilized]: https:\/\/github.com\/rust-lang\/rust\/issues\/44109\n #[doc(hidden)]\n #[fail(display = \"lapin_futures::error::ErrorKind::__Nonexhaustive: this should not be printed\")]\n __Nonexhaustive,\n}\n\nimpl Error {\n \/\/\/ Return the underlying `ErrorKind`\n pub fn kind(&self) -> &ErrorKind {\n self.inner.get_context()\n }\n}\n\nimpl Fail for Error {\n fn cause(&self) -> Option<&Fail> {\n self.inner.cause()\n }\n\n fn backtrace(&self) -> Option<&Backtrace> {\n self.inner.backtrace()\n }\n}\n\nimpl fmt::Display for Error {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(&self.inner, f)\n }\n}\n\nimpl From<ErrorKind> for Error {\n fn from(kind: ErrorKind) -> Error {\n Error { inner: Context::new(kind) }\n }\n}\n\nimpl From<Context<ErrorKind>> for Error {\n fn from(inner: Context<ErrorKind>) -> Error {\n Error { inner: inner }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Clean up code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Using Rayon to parallelize encoding.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Revert \"Work on ReadItem.\"<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>:bug: Fix the type of due_dateutc<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014-2016 Johannes Köster.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Handling log-probabilities.\n\npub mod cdf;\n\nuse std::mem;\nuse std::f64;\nuse std::iter;\nuse std::ops::{Add, Sub, Mul, Div};\n\nuse itertools::linspace;\nuse itertools::Itertools;\nuse itertools::misc::ToFloat;\n\n\n\/\/\/ A factor to convert log-probabilities to PHRED-scale (phred = p * `LOG_TO_PHRED_FACTOR`).\nconst LOG_TO_PHRED_FACTOR: f64 = -4.3429448190325175; \/\/ -10 * 1 \/ ln(10)\n\n\n\/\/\/ A factor to convert PHRED-scale to log-probabilities (p = phred * `PHRED_TO_LOG_FACTOR`).\nconst PHRED_TO_LOG_FACTOR: f64 = -0.23025850929940456; \/\/ 1 \/ (-10 * log10(e))\n\n\n\/\/\/ Calculate log(1 - p) with p given in log space without loss of precision as described in\n\/\/\/ http:\/\/cran.r-project.org\/web\/packages\/Rmpfr\/vignettes\/log1mexp-note.pdf.\nfn ln_1m_exp(p: f64) -> f64 {\n assert!(p <= 0.0);\n if p < -0.693 {\n (-p.exp()).ln_1p()\n } else {\n (-p.exp_m1()).ln()\n }\n}\n\ncustom_derive! {\n \/\/\/ A newtype for probabilities.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ #[macro_use]\n \/\/\/ extern crate approx;\n \/\/\/ # extern crate bio;\n \/\/\/ # fn main() {\n \/\/\/ use bio::stats::Prob;\n \/\/\/\n \/\/\/ let p = Prob(0.5);\n \/\/\/ let q = Prob(0.2);\n \/\/\/\n \/\/\/ assert_relative_eq!(*(p + q), *Prob(0.7));\n \/\/\/ # }\n \/\/\/ ```\n #[derive(\n NewtypeFrom,\n NewtypeDeref,\n NewtypeAdd(*),\n NewtypeSub(*),\n NewtypeMul(*),\n NewtypeDiv(*),\n PartialEq,\n PartialOrd,\n Copy,\n Clone,\n Debug,\n Default,\n RustcDecodable,\n RustcEncodable\n )]\n #[cfg_attr(feature = \"serde_macros\", derive(Serialize, Deserialize))]\n pub struct Prob(pub f64);\n}\n\n\ncustom_derive! {\n \/\/\/ A newtype for log-scale probabilities.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ #[macro_use]\n \/\/\/ extern crate approx;\n \/\/\/ # extern crate bio;\n \/\/\/ # fn main() {\n \/\/\/ use bio::stats::{LogProb, Prob};\n \/\/\/\n \/\/\/ \/\/ convert from probability\n \/\/\/ let p = LogProb::from(Prob(0.5));\n \/\/\/ \/\/ convert manually\n \/\/\/ let q = LogProb(0.2f64.ln());\n \/\/\/ \/\/ obtain zero probability in log-space\n \/\/\/ let o = LogProb::ln_one();\n \/\/\/\n \/\/\/ assert_relative_eq!(*Prob::from(p.ln_add_exp(q) + o), *Prob(0.7));\n \/\/\/ # }\n \/\/\/ ```\n #[derive(\n NewtypeFrom,\n NewtypeDeref,\n NewtypeAdd(*),\n NewtypeSub(*),\n PartialEq,\n PartialOrd,\n Copy,\n Clone,\n Debug,\n RustcDecodable,\n RustcEncodable\n )]\n #[cfg_attr(feature = \"serde_macros\", derive(Serialize, Deserialize))]\n pub struct LogProb(pub f64);\n}\n\n\ncustom_derive! {\n \/\/\/ A newtype for PHRED-scale probabilities.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ #[macro_use]\n \/\/\/ extern crate approx;\n \/\/\/ # extern crate bio;\n \/\/\/ # fn main() {\n \/\/\/ use bio::stats::{PHREDProb, Prob};\n \/\/\/\n \/\/\/ let p = PHREDProb::from(Prob(0.5));\n \/\/\/\n \/\/\/ assert_relative_eq!(*Prob::from(p), *Prob(0.5));\n \/\/\/ # }\n \/\/\/ ```\n #[derive(\n NewtypeFrom,\n NewtypeDeref,\n NewtypeAdd(*),\n NewtypeSub(*),\n PartialEq,\n PartialOrd,\n Copy,\n Clone,\n Debug,\n RustcDecodable,\n RustcEncodable\n )]\n #[cfg_attr(feature = \"serde_macros\", derive(Serialize, Deserialize))]\n pub struct PHREDProb(pub f64);\n}\n\n\n\/\/\/ Iterator returned by scans over logprobs.\npub type ScanIter<I> = iter::Scan<<I as IntoIterator>::IntoIter, LogProb, fn(&mut LogProb, LogProb) -> Option<LogProb>>;\n\n\nstatic LOGPROB_LN_ZERO: LogProb = LogProb(f64::NEG_INFINITY);\nstatic LOGPROB_LN_ONE: LogProb = LogProb(0.0);\n\n\nimpl LogProb {\n \/\/\/ Log-space representation of Pr=0\n pub fn ln_zero() -> LogProb {\n LOGPROB_LN_ZERO\n }\n\n \/\/\/ Log-space representation of Pr=1\n pub fn ln_one() -> LogProb {\n LOGPROB_LN_ONE\n }\n\n \/\/\/ Numerically stable calculation of 1 - p in log-space.\n pub fn ln_one_minus_exp(&self) -> LogProb {\n LogProb(ln_1m_exp(**self))\n }\n\n \/\/\/ Numerically stable sum of probabilities in log-space.\n pub fn ln_sum_exp(probs: &[LogProb]) -> LogProb {\n if probs.is_empty() {\n Self::ln_zero()\n } else {\n let mut pmax = probs[0];\n let mut imax = 0;\n for (i, &p) in probs.iter().enumerate().skip(1) {\n if p > pmax {\n pmax = p;\n imax = i;\n }\n }\n if pmax == Self::ln_zero() {\n Self::ln_zero()\n } else if *pmax == f64::INFINITY {\n LogProb(f64::INFINITY)\n } else {\n \/\/ TODO use sum() once it has been stabilized: .sum::<usize>()\n pmax + LogProb(\n (probs.iter()\n .enumerate()\n .filter_map(|(i, p)| {\n if i == imax {\n None\n } else {\n Some((p - pmax).exp())\n }\n })\n .fold(0.0, |s, e| s + e)\n ).ln_1p()\n )\n }\n }\n }\n\n \/\/\/ Numerically stable addition probabilities in log-space.\n pub fn ln_add_exp(self, other: LogProb) -> LogProb {\n let (mut p0, mut p1) = (self, other);\n if p1 > p0 {\n mem::swap(&mut p0, &mut p1);\n }\n if p0 == Self::ln_zero() {\n Self::ln_zero()\n } else if *p0 == f64::INFINITY {\n LogProb(f64::INFINITY)\n } else {\n p0 + LogProb((p1 - p0).exp().ln_1p())\n }\n }\n\n \/\/\/ Numerically stable subtraction of probabilities in log-space.\n pub fn ln_sub_exp(self, other: LogProb) -> LogProb {\n let (p0, p1) = (self, other);\n assert!(p0 >= p1,\n \"Subtraction would lead to negative probability, which is undefined in log space.\");\n if relative_eq!(*p0, *p1) || p0 == Self::ln_zero() {\n \/\/ the first case leads to zero,\n \/\/ in the second case p0 and p1 are -inf, which is fine\n Self::ln_zero()\n } else if *p0 == f64::INFINITY {\n LogProb(f64::INFINITY)\n } else {\n p0 + (p1 - p0).ln_one_minus_exp()\n }\n }\n\n \/\/\/ Calculate the cumulative sum of the given probabilities in a numerically stable way (Durbin 1998).\n pub fn ln_cumsum_exp<I: IntoIterator<Item = LogProb>>(probs: I) -> ScanIter<I> {\n probs.into_iter().scan(Self::ln_zero(), Self::scan_ln_add_exp)\n }\n\n \/\/\/ Integrate numerically stable over given log-space density in the interval [a, b]. Uses the trapezoidal rule with n grid points.\n pub fn ln_trapezoidal_integrate_exp<T, D>(density: &D, a: T, b: T, n: usize) -> LogProb where\n T: Copy + Add<Output=T> + Sub<Output=T> + Div<Output=T> + Mul<Output=T>,\n D: Fn(T) -> LogProb,\n usize: ToFloat<T>,\n f64: From<T>\n {\n let mut probs = linspace(a, b, n).dropping(1).dropping_back(1).map(|v| LogProb(*density(v) + 2.0f64.ln())).collect_vec();\n probs.push(density(a));\n probs.push(density(b));\n let width = f64::from(b - a);\n\n LogProb(*Self::ln_sum_exp(&probs) + width.ln() - (2.0 * (n - 1) as f64).ln())\n }\n\n \/\/\/ Integrate numerically stable over given log-space density in the interval [a, b]. Uses Simpson's rule with n (odd) grid points.\n pub fn ln_simpsons_integrate_exp<T, D>(density: &D, a: T, b: T, n: usize) -> LogProb where\n T: Copy + Add<Output=T> + Sub<Output=T> + Div<Output=T> + Mul<Output=T>,\n D: Fn(T) -> LogProb,\n usize: ToFloat<T>,\n f64: From<T>\n {\n assert!(n % 2 == 1, \"n must be odd\");\n let mut probs = linspace(a, b, n).enumerate().dropping(1).dropping_back(1).map(|(i, v)| {\n let weight = (2 + (i % 2) * 2) as f64;\n println!(\"weight {}\", weight);\n LogProb(*density(v) + weight.ln()) \/\/ factors alter between 2 and 4\n }).collect_vec();\n probs.push(density(a));\n probs.push(density(b));\n println!(\"probs {:?}\", probs);\n let width = f64::from(b - a);\n\n LogProb(*Self::ln_sum_exp(&probs) + width.ln() - ((n - 1) as f64).ln() - 3.0f64.ln())\n }\n\n fn scan_ln_add_exp(s: &mut LogProb, p: LogProb) -> Option<LogProb> {\n *s = s.ln_add_exp(p);\n Some(*s)\n }\n}\n\n\nimpl From<LogProb> for Prob {\n fn from(p: LogProb) -> Prob {\n Prob(p.exp())\n }\n}\n\n\nimpl From<PHREDProb> for Prob {\n fn from(p: PHREDProb) -> Prob {\n Prob(10.0f64.powf(-*p \/ 10.0))\n }\n}\n\n\nimpl From<Prob> for LogProb {\n fn from(p: Prob) -> LogProb {\n LogProb(p.ln())\n }\n}\n\n\nimpl From<PHREDProb> for LogProb {\n fn from(p: PHREDProb) -> LogProb {\n LogProb(*p * PHRED_TO_LOG_FACTOR)\n }\n}\n\n\nimpl From<Prob> for PHREDProb {\n fn from(p: Prob) -> PHREDProb {\n PHREDProb(-10.0 * p.log10())\n }\n}\n\n\nimpl From<LogProb> for PHREDProb {\n fn from(p: LogProb) -> PHREDProb {\n PHREDProb(*p * LOG_TO_PHRED_FACTOR)\n }\n}\n\n\nimpl Default for LogProb {\n fn default() -> LogProb {\n LogProb::ln_zero()\n }\n}\n\n\nimpl Default for PHREDProb {\n fn default() -> PHREDProb {\n PHREDProb::from(Prob(0.0))\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use itertools::Itertools;\n\n #[test]\n fn test_sum() {\n let probs = [LogProb::ln_zero(), LogProb::ln_one(), LogProb::ln_zero()];\n assert_eq!(LogProb::ln_sum_exp(&probs), LogProb::ln_one());\n }\n\n #[test]\n fn test_empty_sum() {\n assert_eq!(LogProb::ln_sum_exp(&[]), LogProb::ln_zero());\n }\n\n #[test]\n fn test_cumsum() {\n let probs = vec![LogProb::ln_zero(), LogProb(0.01f64.ln()), LogProb(0.001f64.ln())];\n assert_eq!(LogProb::ln_cumsum_exp(probs).collect_vec(),\n [LogProb::ln_zero(), LogProb(0.01f64.ln()), LogProb(0.011f64.ln())]);\n }\n\n #[test]\n fn test_sub() {\n assert_eq!(LogProb::ln_one().ln_sub_exp(LogProb::ln_one()), LogProb::ln_zero());\n assert_relative_eq!(*LogProb::ln_one().ln_sub_exp(LogProb(0.5f64.ln())), *LogProb(0.5f64.ln()));\n }\n\n #[test]\n fn test_one_minus() {\n assert_eq!(LogProb::ln_zero().ln_one_minus_exp(), LogProb::ln_one());\n assert_eq!(LogProb::ln_one().ln_one_minus_exp(), LogProb::ln_zero());\n }\n\n #[test]\n fn test_trapezoidal_integrate() {\n let density = |_| LogProb(0.1f64.ln());\n let prob = LogProb::ln_trapezoidal_integrate_exp(&density, 0.0, 10.0, 5);\n assert_relative_eq!(*prob, *LogProb::ln_one(), epsilon=0.0000001);\n }\n\n #[test]\n fn test_simpsons_integrate() {\n let density = |_| LogProb(0.1f64.ln());\n let prob = LogProb::ln_simpsons_integrate_exp(&density, 0.0, 10.0, 5);\n assert_relative_eq!(*prob, *LogProb::ln_one(), epsilon=0.0000001);\n }\n}\n<commit_msg>Remove debugging output.<commit_after>\/\/ Copyright 2014-2016 Johannes Köster.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Handling log-probabilities.\n\npub mod cdf;\n\nuse std::mem;\nuse std::f64;\nuse std::iter;\nuse std::ops::{Add, Sub, Mul, Div};\n\nuse itertools::linspace;\nuse itertools::Itertools;\nuse itertools::misc::ToFloat;\n\n\n\/\/\/ A factor to convert log-probabilities to PHRED-scale (phred = p * `LOG_TO_PHRED_FACTOR`).\nconst LOG_TO_PHRED_FACTOR: f64 = -4.3429448190325175; \/\/ -10 * 1 \/ ln(10)\n\n\n\/\/\/ A factor to convert PHRED-scale to log-probabilities (p = phred * `PHRED_TO_LOG_FACTOR`).\nconst PHRED_TO_LOG_FACTOR: f64 = -0.23025850929940456; \/\/ 1 \/ (-10 * log10(e))\n\n\n\/\/\/ Calculate log(1 - p) with p given in log space without loss of precision as described in\n\/\/\/ http:\/\/cran.r-project.org\/web\/packages\/Rmpfr\/vignettes\/log1mexp-note.pdf.\nfn ln_1m_exp(p: f64) -> f64 {\n assert!(p <= 0.0);\n if p < -0.693 {\n (-p.exp()).ln_1p()\n } else {\n (-p.exp_m1()).ln()\n }\n}\n\ncustom_derive! {\n \/\/\/ A newtype for probabilities.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ #[macro_use]\n \/\/\/ extern crate approx;\n \/\/\/ # extern crate bio;\n \/\/\/ # fn main() {\n \/\/\/ use bio::stats::Prob;\n \/\/\/\n \/\/\/ let p = Prob(0.5);\n \/\/\/ let q = Prob(0.2);\n \/\/\/\n \/\/\/ assert_relative_eq!(*(p + q), *Prob(0.7));\n \/\/\/ # }\n \/\/\/ ```\n #[derive(\n NewtypeFrom,\n NewtypeDeref,\n NewtypeAdd(*),\n NewtypeSub(*),\n NewtypeMul(*),\n NewtypeDiv(*),\n PartialEq,\n PartialOrd,\n Copy,\n Clone,\n Debug,\n Default,\n RustcDecodable,\n RustcEncodable\n )]\n #[cfg_attr(feature = \"serde_macros\", derive(Serialize, Deserialize))]\n pub struct Prob(pub f64);\n}\n\n\ncustom_derive! {\n \/\/\/ A newtype for log-scale probabilities.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ #[macro_use]\n \/\/\/ extern crate approx;\n \/\/\/ # extern crate bio;\n \/\/\/ # fn main() {\n \/\/\/ use bio::stats::{LogProb, Prob};\n \/\/\/\n \/\/\/ \/\/ convert from probability\n \/\/\/ let p = LogProb::from(Prob(0.5));\n \/\/\/ \/\/ convert manually\n \/\/\/ let q = LogProb(0.2f64.ln());\n \/\/\/ \/\/ obtain zero probability in log-space\n \/\/\/ let o = LogProb::ln_one();\n \/\/\/\n \/\/\/ assert_relative_eq!(*Prob::from(p.ln_add_exp(q) + o), *Prob(0.7));\n \/\/\/ # }\n \/\/\/ ```\n #[derive(\n NewtypeFrom,\n NewtypeDeref,\n NewtypeAdd(*),\n NewtypeSub(*),\n PartialEq,\n PartialOrd,\n Copy,\n Clone,\n Debug,\n RustcDecodable,\n RustcEncodable\n )]\n #[cfg_attr(feature = \"serde_macros\", derive(Serialize, Deserialize))]\n pub struct LogProb(pub f64);\n}\n\n\ncustom_derive! {\n \/\/\/ A newtype for PHRED-scale probabilities.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ #[macro_use]\n \/\/\/ extern crate approx;\n \/\/\/ # extern crate bio;\n \/\/\/ # fn main() {\n \/\/\/ use bio::stats::{PHREDProb, Prob};\n \/\/\/\n \/\/\/ let p = PHREDProb::from(Prob(0.5));\n \/\/\/\n \/\/\/ assert_relative_eq!(*Prob::from(p), *Prob(0.5));\n \/\/\/ # }\n \/\/\/ ```\n #[derive(\n NewtypeFrom,\n NewtypeDeref,\n NewtypeAdd(*),\n NewtypeSub(*),\n PartialEq,\n PartialOrd,\n Copy,\n Clone,\n Debug,\n RustcDecodable,\n RustcEncodable\n )]\n #[cfg_attr(feature = \"serde_macros\", derive(Serialize, Deserialize))]\n pub struct PHREDProb(pub f64);\n}\n\n\n\/\/\/ Iterator returned by scans over logprobs.\npub type ScanIter<I> = iter::Scan<<I as IntoIterator>::IntoIter, LogProb, fn(&mut LogProb, LogProb) -> Option<LogProb>>;\n\n\nstatic LOGPROB_LN_ZERO: LogProb = LogProb(f64::NEG_INFINITY);\nstatic LOGPROB_LN_ONE: LogProb = LogProb(0.0);\n\n\nimpl LogProb {\n \/\/\/ Log-space representation of Pr=0\n pub fn ln_zero() -> LogProb {\n LOGPROB_LN_ZERO\n }\n\n \/\/\/ Log-space representation of Pr=1\n pub fn ln_one() -> LogProb {\n LOGPROB_LN_ONE\n }\n\n \/\/\/ Numerically stable calculation of 1 - p in log-space.\n pub fn ln_one_minus_exp(&self) -> LogProb {\n LogProb(ln_1m_exp(**self))\n }\n\n \/\/\/ Numerically stable sum of probabilities in log-space.\n pub fn ln_sum_exp(probs: &[LogProb]) -> LogProb {\n if probs.is_empty() {\n Self::ln_zero()\n } else {\n let mut pmax = probs[0];\n let mut imax = 0;\n for (i, &p) in probs.iter().enumerate().skip(1) {\n if p > pmax {\n pmax = p;\n imax = i;\n }\n }\n if pmax == Self::ln_zero() {\n Self::ln_zero()\n } else if *pmax == f64::INFINITY {\n LogProb(f64::INFINITY)\n } else {\n \/\/ TODO use sum() once it has been stabilized: .sum::<usize>()\n pmax + LogProb(\n (probs.iter()\n .enumerate()\n .filter_map(|(i, p)| {\n if i == imax {\n None\n } else {\n Some((p - pmax).exp())\n }\n })\n .fold(0.0, |s, e| s + e)\n ).ln_1p()\n )\n }\n }\n }\n\n \/\/\/ Numerically stable addition probabilities in log-space.\n pub fn ln_add_exp(self, other: LogProb) -> LogProb {\n let (mut p0, mut p1) = (self, other);\n if p1 > p0 {\n mem::swap(&mut p0, &mut p1);\n }\n if p0 == Self::ln_zero() {\n Self::ln_zero()\n } else if *p0 == f64::INFINITY {\n LogProb(f64::INFINITY)\n } else {\n p0 + LogProb((p1 - p0).exp().ln_1p())\n }\n }\n\n \/\/\/ Numerically stable subtraction of probabilities in log-space.\n pub fn ln_sub_exp(self, other: LogProb) -> LogProb {\n let (p0, p1) = (self, other);\n assert!(p0 >= p1,\n \"Subtraction would lead to negative probability, which is undefined in log space.\");\n if relative_eq!(*p0, *p1) || p0 == Self::ln_zero() {\n \/\/ the first case leads to zero,\n \/\/ in the second case p0 and p1 are -inf, which is fine\n Self::ln_zero()\n } else if *p0 == f64::INFINITY {\n LogProb(f64::INFINITY)\n } else {\n p0 + (p1 - p0).ln_one_minus_exp()\n }\n }\n\n \/\/\/ Calculate the cumulative sum of the given probabilities in a numerically stable way (Durbin 1998).\n pub fn ln_cumsum_exp<I: IntoIterator<Item = LogProb>>(probs: I) -> ScanIter<I> {\n probs.into_iter().scan(Self::ln_zero(), Self::scan_ln_add_exp)\n }\n\n \/\/\/ Integrate numerically stable over given log-space density in the interval [a, b]. Uses the trapezoidal rule with n grid points.\n pub fn ln_trapezoidal_integrate_exp<T, D>(density: &D, a: T, b: T, n: usize) -> LogProb where\n T: Copy + Add<Output=T> + Sub<Output=T> + Div<Output=T> + Mul<Output=T>,\n D: Fn(T) -> LogProb,\n usize: ToFloat<T>,\n f64: From<T>\n {\n let mut probs = linspace(a, b, n).dropping(1).dropping_back(1).map(|v| LogProb(*density(v) + 2.0f64.ln())).collect_vec();\n probs.push(density(a));\n probs.push(density(b));\n let width = f64::from(b - a);\n\n LogProb(*Self::ln_sum_exp(&probs) + width.ln() - (2.0 * (n - 1) as f64).ln())\n }\n\n \/\/\/ Integrate numerically stable over given log-space density in the interval [a, b]. Uses Simpson's rule with n (odd) grid points.\n pub fn ln_simpsons_integrate_exp<T, D>(density: &D, a: T, b: T, n: usize) -> LogProb where\n T: Copy + Add<Output=T> + Sub<Output=T> + Div<Output=T> + Mul<Output=T>,\n D: Fn(T) -> LogProb,\n usize: ToFloat<T>,\n f64: From<T>\n {\n assert!(n % 2 == 1, \"n must be odd\");\n let mut probs = linspace(a, b, n).enumerate().dropping(1).dropping_back(1).map(|(i, v)| {\n let weight = (2 + (i % 2) * 2) as f64;\n LogProb(*density(v) + weight.ln()) \/\/ factors alter between 2 and 4\n }).collect_vec();\n probs.push(density(a));\n probs.push(density(b));\n let width = f64::from(b - a);\n\n LogProb(*Self::ln_sum_exp(&probs) + width.ln() - ((n - 1) as f64).ln() - 3.0f64.ln())\n }\n\n fn scan_ln_add_exp(s: &mut LogProb, p: LogProb) -> Option<LogProb> {\n *s = s.ln_add_exp(p);\n Some(*s)\n }\n}\n\n\nimpl From<LogProb> for Prob {\n fn from(p: LogProb) -> Prob {\n Prob(p.exp())\n }\n}\n\n\nimpl From<PHREDProb> for Prob {\n fn from(p: PHREDProb) -> Prob {\n Prob(10.0f64.powf(-*p \/ 10.0))\n }\n}\n\n\nimpl From<Prob> for LogProb {\n fn from(p: Prob) -> LogProb {\n LogProb(p.ln())\n }\n}\n\n\nimpl From<PHREDProb> for LogProb {\n fn from(p: PHREDProb) -> LogProb {\n LogProb(*p * PHRED_TO_LOG_FACTOR)\n }\n}\n\n\nimpl From<Prob> for PHREDProb {\n fn from(p: Prob) -> PHREDProb {\n PHREDProb(-10.0 * p.log10())\n }\n}\n\n\nimpl From<LogProb> for PHREDProb {\n fn from(p: LogProb) -> PHREDProb {\n PHREDProb(*p * LOG_TO_PHRED_FACTOR)\n }\n}\n\n\nimpl Default for LogProb {\n fn default() -> LogProb {\n LogProb::ln_zero()\n }\n}\n\n\nimpl Default for PHREDProb {\n fn default() -> PHREDProb {\n PHREDProb::from(Prob(0.0))\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use itertools::Itertools;\n\n #[test]\n fn test_sum() {\n let probs = [LogProb::ln_zero(), LogProb::ln_one(), LogProb::ln_zero()];\n assert_eq!(LogProb::ln_sum_exp(&probs), LogProb::ln_one());\n }\n\n #[test]\n fn test_empty_sum() {\n assert_eq!(LogProb::ln_sum_exp(&[]), LogProb::ln_zero());\n }\n\n #[test]\n fn test_cumsum() {\n let probs = vec![LogProb::ln_zero(), LogProb(0.01f64.ln()), LogProb(0.001f64.ln())];\n assert_eq!(LogProb::ln_cumsum_exp(probs).collect_vec(),\n [LogProb::ln_zero(), LogProb(0.01f64.ln()), LogProb(0.011f64.ln())]);\n }\n\n #[test]\n fn test_sub() {\n assert_eq!(LogProb::ln_one().ln_sub_exp(LogProb::ln_one()), LogProb::ln_zero());\n assert_relative_eq!(*LogProb::ln_one().ln_sub_exp(LogProb(0.5f64.ln())), *LogProb(0.5f64.ln()));\n }\n\n #[test]\n fn test_one_minus() {\n assert_eq!(LogProb::ln_zero().ln_one_minus_exp(), LogProb::ln_one());\n assert_eq!(LogProb::ln_one().ln_one_minus_exp(), LogProb::ln_zero());\n }\n\n #[test]\n fn test_trapezoidal_integrate() {\n let density = |_| LogProb(0.1f64.ln());\n let prob = LogProb::ln_trapezoidal_integrate_exp(&density, 0.0, 10.0, 5);\n assert_relative_eq!(*prob, *LogProb::ln_one(), epsilon=0.0000001);\n }\n\n #[test]\n fn test_simpsons_integrate() {\n let density = |_| LogProb(0.1f64.ln());\n let prob = LogProb::ln_simpsons_integrate_exp(&density, 0.0, 10.0, 5);\n assert_relative_eq!(*prob, *LogProb::ln_one(), epsilon=0.0000001);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! The Ref object is a helper over the link functionality, so one is able to create references to\n\/\/! files outside of the imag store.\n\nuse std::path::PathBuf;\nuse std::ops::Deref;\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\n\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagstore::store::Store;\nuse libimagerror::into::IntoError;\n\nuse toml::Value;\n\nuse error::RefErrorKind as REK;\nuse flags::RefFlags;\nuse result::Result;\nuse module_path::ModuleEntryPath;\n\npub struct Ref<'a>(FileLockEntry<'a>);\n\nimpl<'a> Ref<'a> {\n\n \/\/\/ Try to get `si` as Ref object from the store\n pub fn get(store: &'a Store, si: StoreId) -> Result<Ref<'a>> {\n match store.get(si) {\n Err(e) => return Err(REK::StoreReadError.into_error_with_cause(Box::new(e))),\n Ok(None) => return Err(REK::RefNotInStore.into_error()),\n Ok(Some(fle)) => Ref::read_reference(&fle).map(|_| Ref(fle)),\n }\n }\n\n fn read_reference(fle: &FileLockEntry<'a>) -> Result<PathBuf> {\n match fle.get_header().read(\"ref.reference\") {\n Ok(Some(Value::String(s))) => Ok(PathBuf::from(s)),\n Ok(Some(_)) => Err(REK::HeaderTypeError.into_error()),\n Ok(None) => Err(REK::HeaderFieldMissingError.into_error()),\n Err(e) => Err(REK::StoreReadError.into_error_with_cause(Box::new(e))),\n }\n }\n\n \/\/\/ Create a Ref object which refers to `pb`\n pub fn create(store: &'a Store, pb: PathBuf, flags: RefFlags) -> Result<Ref<'a>> {\n use std::fs::File;\n use std::io::Read;\n use crypto::sha1::Sha1;\n use crypto::digest::Digest;\n\n if !pb.exists() {\n return Err(REK::RefTargetDoesNotExist.into_error());\n }\n if flags.get_content_hashing() && pb.is_dir() {\n return Err(REK::RefTargetCannotBeHashed.into_error());\n }\n\n let (mut fle, content_hash, permissions, canonical_path) = { \/\/ scope to be able to fold\n try!(File::open(pb.clone())\n .map_err(Box::new)\n .map_err(|e| REK::RefTargetFileCannotBeOpened.into_error_with_cause(e))\n\n \/\/ If we were able to open this file,\n \/\/ we hash the contents of the file and return (file, hash)\n .and_then(|mut file| {\n let opt_contenthash = if flags.get_content_hashing() {\n let mut hasher = Sha1::new();\n let mut s = String::new();\n file.read_to_string(&mut s);\n hasher.input_str(&s[..]);\n Some(hasher.result_str())\n } else {\n None\n };\n\n Ok((file, opt_contenthash))\n })\n\n \/\/ and then we get the permissions if we have to\n \/\/ and return (file, content hash, permissions)\n .and_then(|(file, opt_contenthash)| {\n let opt_permissions = if flags.get_permission_tracking() {\n Some(try!(file\n .metadata()\n .map(|md| md.permissions())\n .map_err(Box::new)\n .map_err(|e| REK::RefTargetCannotReadPermissions.into_error_with_cause(e))\n ))\n } else {\n None\n };\n\n Ok((file, opt_contenthash, opt_permissions))\n })\n\n \/\/ and then we try to canonicalize the PathBuf, because we want to store a\n \/\/ canonicalized path\n \/\/ and return (file, content hash, permissions, canonicalized path)\n .and_then(|(file, opt_contenthash, opt_permissions)| {\n pb.canonicalize()\n .map(|can| (file, opt_contenthash, opt_permissions, can))\n \/\/ if PathBuf::canonicalize() failed, build an error from the return value\n .map_err(|e| REK::PathCanonicalizationError.into_error_with_cause(Box::new(e)))\n })\n\n \/\/ and then we hash the canonicalized path\n \/\/ and return (file, content hash, permissions, canonicalized path, path hash)\n .and_then(|(file, opt_contenthash, opt_permissions, can)| {\n let path_hash = try!(Ref::hash_path(&can)\n .map_err(Box::new)\n .map_err(|e| REK::PathHashingError.into_error_with_cause(e))\n );\n\n Ok((file, opt_contenthash, opt_permissions, can, path_hash))\n })\n\n \/\/ and then we convert the PathBuf of the canonicalized path to a String to be able\n \/\/ to save it in the Ref FileLockEntry obj\n \/\/ and return\n \/\/ (file, content hash, permissions, canonicalized path as String, path hash)\n .and_then(|(file, opt_conhash, opt_perm, can, path_hash)| {\n match can.to_str().map(String::from) {\n \/\/ UTF convert error in PathBuf::to_str(),\n None => Err(REK::PathUTF8Error.into_error()),\n Some(can) => Ok((file, opt_conhash, opt_perm, can, path_hash))\n }\n })\n\n \/\/ and then we create the FileLockEntry in the Store\n \/\/ and return (filelockentry, content hash, permissions, canonicalized path)\n .and_then(|(file, opt_conhash, opt_perm, can, path_hash)| {\n let fle = try!(store\n .create(ModuleEntryPath::new(path_hash))\n .map_err(Box::new)\n .map_err(|e| REK::StoreWriteError.into_error_with_cause(e))\n );\n\n Ok((fle, opt_conhash, opt_perm, can))\n })\n )\n };\n\n for tpl in [\n Some((\"ref\", Value::Table(BTreeMap::new()))),\n Some((\"ref.permissions\", Value::Table(BTreeMap::new()))),\n\n Some((\"ref.path\", Value::String(canonical_path))),\n\n content_hash.map(|h| (\"ref.content_hash\", Value::String(h))),\n permissions.map(|p| (\"ref.permissions.ro\", Value::Boolean(p.readonly()))),\n ].into_iter()\n {\n match tpl {\n &Some((ref s, ref v)) => {\n match fle.get_header_mut().insert(s, v.clone()) {\n Ok(false) => {\n let e = REK::HeaderFieldAlreadyExistsError.into_error();\n let e = Box::new(e);\n let e = REK::HeaderFieldWriteError.into_error_with_cause(e);\n return Err(e);\n },\n Err(e) => {\n let e = Box::new(e);\n let e = REK::HeaderFieldWriteError.into_error_with_cause(e);\n return Err(e);\n },\n _ => (),\n }\n }\n &None => {\n debug!(\"Not going to insert.\");\n }\n }\n }\n\n Ok(Ref(fle))\n }\n\n \/\/\/ Creates a Hash from a PathBuf by making the PathBuf absolute and then running a hash\n \/\/\/ algorithm on it\n fn hash_path(pb: &PathBuf) -> Result<String> {\n use std::io::Read;\n use crypto::sha1::Sha1;\n use crypto::digest::Digest;\n\n match pb.to_str() {\n Some(s) => {\n let mut hasher = Sha1::new();\n hasher.input_str(s);\n Ok(hasher.result_str())\n },\n None => return Err(REK::PathUTF8Error.into_error()),\n }\n }\n\n \/\/\/ check whether the pointer the Ref represents still points to a file which exists\n pub fn fs_link_exists(&self) -> Result<bool> {\n self.fs_file().map(|pathbuf| pathbuf.exists())\n }\n\n \/\/\/ Alias for `r.fs_link_exists() && r.deref().is_file()`\n pub fn is_ref_to_file(&self) -> Result<bool> {\n self.fs_file().map(|pathbuf| pathbuf.is_file())\n }\n\n \/\/\/ Alias for `r.fs_link_exists() && r.deref().is_dir()`\n pub fn is_ref_to_dir(&self) -> Result<bool> {\n self.fs_file().map(|pathbuf| pathbuf.is_dir())\n }\n\n \/\/\/ Alias for `!Ref::fs_link_exists()`\n pub fn is_dangling(&self) -> Result<bool> {\n self.fs_link_exists().map(|b| !b)\n }\n\n \/\/\/ check whether the pointer the Ref represents is valid\n \/\/\/ This includes:\n \/\/\/ - Hashsum of the file is still the same as stored in the Ref\n \/\/\/ - file permissions are still valid\n pub fn fs_link_valid(&self) -> bool {\n unimplemented!()\n }\n\n \/\/\/ Check whether the file permissions of the referenced file are equal to the stored\n \/\/\/ permissions\n pub fn fs_link_valid_permissions(&self) -> bool {\n unimplemented!()\n }\n\n \/\/\/ Check whether the Hashsum of the referenced file is equal to the stored hashsum\n pub fn fs_link_valid_hash(&self) -> bool {\n unimplemented!()\n }\n\n \/\/\/ Update the Ref by re-checking the file from FS\n \/\/\/ This errors if the file is not present or cannot be read()\n pub fn update_ref(&mut self) -> Result<()> {\n unimplemented!()\n }\n\n \/\/\/ Get the path of the file which is reffered to by this Ref\n pub fn fs_file(&self) -> Result<PathBuf> {\n match self.0.get_header().read(\"ref.path\") {\n Ok(Some(Value::String(ref s))) => Ok(PathBuf::from(s)),\n Ok(Some(_)) => Err(REK::HeaderTypeError.into_error()),\n Ok(None) => Err(REK::HeaderFieldMissingError.into_error()),\n Err(e) => Err(REK::StoreReadError.into_error_with_cause(Box::new(e))),\n }\n }\n\n \/\/\/ Check whether there is a reference to the file at `pb`\n pub fn exists(store: &Store, pb: PathBuf) -> Result<bool> {\n unimplemented!()\n }\n\n \/\/\/ Re-find a referenced file\n \/\/\/\n \/\/\/ This function tries to re-find a ref by searching all directories in `search_roots` recursively\n \/\/\/ for a file which matches the hash of the Ref `ref`.\n \/\/\/\n \/\/\/ If `search_roots` is `None`, it starts at the filesystem root `\/`.\n \/\/\/\n \/\/\/ # Warning\n \/\/\/\n \/\/\/ This option causes heavy I\/O as it recursively searches the Filesystem.\n pub fn refind(&self, search_roots: Option<Vec<PathBuf>>) -> Option<PathBuf> {\n unimplemented!()\n }\n\n}\n\nimpl<'a> Deref for Ref<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Ref<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n<commit_msg>Extract file-content-hashing functionality to new private function<commit_after>\/\/! The Ref object is a helper over the link functionality, so one is able to create references to\n\/\/! files outside of the imag store.\n\nuse std::path::PathBuf;\nuse std::ops::Deref;\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\nuse std::fs::File;\nuse std::io::Read;\n\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagstore::store::Store;\nuse libimagerror::into::IntoError;\n\nuse toml::Value;\nuse crypto::sha1::Sha1;\nuse crypto::digest::Digest;\n\nuse error::RefErrorKind as REK;\nuse flags::RefFlags;\nuse result::Result;\nuse module_path::ModuleEntryPath;\n\npub struct Ref<'a>(FileLockEntry<'a>);\n\nimpl<'a> Ref<'a> {\n\n \/\/\/ Try to get `si` as Ref object from the store\n pub fn get(store: &'a Store, si: StoreId) -> Result<Ref<'a>> {\n match store.get(si) {\n Err(e) => return Err(REK::StoreReadError.into_error_with_cause(Box::new(e))),\n Ok(None) => return Err(REK::RefNotInStore.into_error()),\n Ok(Some(fle)) => Ref::read_reference(&fle).map(|_| Ref(fle)),\n }\n }\n\n fn read_reference(fle: &FileLockEntry<'a>) -> Result<PathBuf> {\n match fle.get_header().read(\"ref.reference\") {\n Ok(Some(Value::String(s))) => Ok(PathBuf::from(s)),\n Ok(Some(_)) => Err(REK::HeaderTypeError.into_error()),\n Ok(None) => Err(REK::HeaderFieldMissingError.into_error()),\n Err(e) => Err(REK::StoreReadError.into_error_with_cause(Box::new(e))),\n }\n }\n\n \/\/\/ Create a Ref object which refers to `pb`\n pub fn create(store: &'a Store, pb: PathBuf, flags: RefFlags) -> Result<Ref<'a>> {\n if !pb.exists() {\n return Err(REK::RefTargetDoesNotExist.into_error());\n }\n if flags.get_content_hashing() && pb.is_dir() {\n return Err(REK::RefTargetCannotBeHashed.into_error());\n }\n\n let (mut fle, content_hash, permissions, canonical_path) = { \/\/ scope to be able to fold\n try!(File::open(pb.clone())\n .map_err(Box::new)\n .map_err(|e| REK::RefTargetFileCannotBeOpened.into_error_with_cause(e))\n\n \/\/ If we were able to open this file,\n \/\/ we hash the contents of the file and return (file, hash)\n .and_then(|mut file| {\n let opt_contenthash = if flags.get_content_hashing() {\n Some(hash_file_contents(&mut file))\n } else {\n None\n };\n\n Ok((file, opt_contenthash))\n })\n\n \/\/ and then we get the permissions if we have to\n \/\/ and return (file, content hash, permissions)\n .and_then(|(file, opt_contenthash)| {\n let opt_permissions = if flags.get_permission_tracking() {\n Some(try!(file\n .metadata()\n .map(|md| md.permissions())\n .map_err(Box::new)\n .map_err(|e| REK::RefTargetCannotReadPermissions.into_error_with_cause(e))\n ))\n } else {\n None\n };\n\n Ok((file, opt_contenthash, opt_permissions))\n })\n\n \/\/ and then we try to canonicalize the PathBuf, because we want to store a\n \/\/ canonicalized path\n \/\/ and return (file, content hash, permissions, canonicalized path)\n .and_then(|(file, opt_contenthash, opt_permissions)| {\n pb.canonicalize()\n .map(|can| (file, opt_contenthash, opt_permissions, can))\n \/\/ if PathBuf::canonicalize() failed, build an error from the return value\n .map_err(|e| REK::PathCanonicalizationError.into_error_with_cause(Box::new(e)))\n })\n\n \/\/ and then we hash the canonicalized path\n \/\/ and return (file, content hash, permissions, canonicalized path, path hash)\n .and_then(|(file, opt_contenthash, opt_permissions, can)| {\n let path_hash = try!(Ref::hash_path(&can)\n .map_err(Box::new)\n .map_err(|e| REK::PathHashingError.into_error_with_cause(e))\n );\n\n Ok((file, opt_contenthash, opt_permissions, can, path_hash))\n })\n\n \/\/ and then we convert the PathBuf of the canonicalized path to a String to be able\n \/\/ to save it in the Ref FileLockEntry obj\n \/\/ and return\n \/\/ (file, content hash, permissions, canonicalized path as String, path hash)\n .and_then(|(file, opt_conhash, opt_perm, can, path_hash)| {\n match can.to_str().map(String::from) {\n \/\/ UTF convert error in PathBuf::to_str(),\n None => Err(REK::PathUTF8Error.into_error()),\n Some(can) => Ok((file, opt_conhash, opt_perm, can, path_hash))\n }\n })\n\n \/\/ and then we create the FileLockEntry in the Store\n \/\/ and return (filelockentry, content hash, permissions, canonicalized path)\n .and_then(|(file, opt_conhash, opt_perm, can, path_hash)| {\n let fle = try!(store\n .create(ModuleEntryPath::new(path_hash))\n .map_err(Box::new)\n .map_err(|e| REK::StoreWriteError.into_error_with_cause(e))\n );\n\n Ok((fle, opt_conhash, opt_perm, can))\n })\n )\n };\n\n for tpl in [\n Some((\"ref\", Value::Table(BTreeMap::new()))),\n Some((\"ref.permissions\", Value::Table(BTreeMap::new()))),\n\n Some((\"ref.path\", Value::String(canonical_path))),\n\n content_hash.map(|h| (\"ref.content_hash\", Value::String(h))),\n permissions.map(|p| (\"ref.permissions.ro\", Value::Boolean(p.readonly()))),\n ].into_iter()\n {\n match tpl {\n &Some((ref s, ref v)) => {\n match fle.get_header_mut().insert(s, v.clone()) {\n Ok(false) => {\n let e = REK::HeaderFieldAlreadyExistsError.into_error();\n let e = Box::new(e);\n let e = REK::HeaderFieldWriteError.into_error_with_cause(e);\n return Err(e);\n },\n Err(e) => {\n let e = Box::new(e);\n let e = REK::HeaderFieldWriteError.into_error_with_cause(e);\n return Err(e);\n },\n _ => (),\n }\n }\n &None => {\n debug!(\"Not going to insert.\");\n }\n }\n }\n\n Ok(Ref(fle))\n }\n\n \/\/\/ Creates a Hash from a PathBuf by making the PathBuf absolute and then running a hash\n \/\/\/ algorithm on it\n fn hash_path(pb: &PathBuf) -> Result<String> {\n use std::io::Read;\n use crypto::sha1::Sha1;\n use crypto::digest::Digest;\n\n match pb.to_str() {\n Some(s) => {\n let mut hasher = Sha1::new();\n hasher.input_str(s);\n Ok(hasher.result_str())\n },\n None => return Err(REK::PathUTF8Error.into_error()),\n }\n }\n\n \/\/\/ check whether the pointer the Ref represents still points to a file which exists\n pub fn fs_link_exists(&self) -> Result<bool> {\n self.fs_file().map(|pathbuf| pathbuf.exists())\n }\n\n \/\/\/ Alias for `r.fs_link_exists() && r.deref().is_file()`\n pub fn is_ref_to_file(&self) -> Result<bool> {\n self.fs_file().map(|pathbuf| pathbuf.is_file())\n }\n\n \/\/\/ Alias for `r.fs_link_exists() && r.deref().is_dir()`\n pub fn is_ref_to_dir(&self) -> Result<bool> {\n self.fs_file().map(|pathbuf| pathbuf.is_dir())\n }\n\n \/\/\/ Alias for `!Ref::fs_link_exists()`\n pub fn is_dangling(&self) -> Result<bool> {\n self.fs_link_exists().map(|b| !b)\n }\n\n \/\/\/ check whether the pointer the Ref represents is valid\n \/\/\/ This includes:\n \/\/\/ - Hashsum of the file is still the same as stored in the Ref\n \/\/\/ - file permissions are still valid\n pub fn fs_link_valid(&self) -> bool {\n unimplemented!()\n }\n\n \/\/\/ Check whether the file permissions of the referenced file are equal to the stored\n \/\/\/ permissions\n pub fn fs_link_valid_permissions(&self) -> bool {\n unimplemented!()\n }\n\n \/\/\/ Check whether the Hashsum of the referenced file is equal to the stored hashsum\n pub fn fs_link_valid_hash(&self) -> bool {\n unimplemented!()\n }\n\n \/\/\/ Update the Ref by re-checking the file from FS\n \/\/\/ This errors if the file is not present or cannot be read()\n pub fn update_ref(&mut self) -> Result<()> {\n unimplemented!()\n }\n\n \/\/\/ Get the path of the file which is reffered to by this Ref\n pub fn fs_file(&self) -> Result<PathBuf> {\n match self.0.get_header().read(\"ref.path\") {\n Ok(Some(Value::String(ref s))) => Ok(PathBuf::from(s)),\n Ok(Some(_)) => Err(REK::HeaderTypeError.into_error()),\n Ok(None) => Err(REK::HeaderFieldMissingError.into_error()),\n Err(e) => Err(REK::StoreReadError.into_error_with_cause(Box::new(e))),\n }\n }\n\n \/\/\/ Check whether there is a reference to the file at `pb`\n pub fn exists(store: &Store, pb: PathBuf) -> Result<bool> {\n unimplemented!()\n }\n\n \/\/\/ Re-find a referenced file\n \/\/\/\n \/\/\/ This function tries to re-find a ref by searching all directories in `search_roots` recursively\n \/\/\/ for a file which matches the hash of the Ref `ref`.\n \/\/\/\n \/\/\/ If `search_roots` is `None`, it starts at the filesystem root `\/`.\n \/\/\/\n \/\/\/ # Warning\n \/\/\/\n \/\/\/ This option causes heavy I\/O as it recursively searches the Filesystem.\n pub fn refind(&self, search_roots: Option<Vec<PathBuf>>) -> Option<PathBuf> {\n unimplemented!()\n }\n\n}\n\nimpl<'a> Deref for Ref<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Ref<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n\nfn hash_file_contents(f: &mut File) -> String {\n let mut hasher = Sha1::new();\n let mut s = String::new();\n f.read_to_string(&mut s);\n hasher.input_str(&s[..]);\n hasher.result_str()\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\n\nuse semver::Version;\nuse std::fmt::{Display, Debug, Formatter};\nuse std::fmt::Error as FmtError;\nuse std::result::Result as RResult;\n\nuse libimagerror::into::IntoError;\n\nuse error::StoreErrorKind as SEK;\nuse store::Result;\nuse store::Store;\n\n\/\/\/ The Index into the Store\n#[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)]\npub struct StoreId {\n base: Option<PathBuf>,\n id: PathBuf,\n}\n\nimpl StoreId {\n\n pub fn new(base: Option<PathBuf>, id: PathBuf) -> StoreId {\n StoreId { base: base, id: id }\n }\n\n pub fn storified(self, store: &Store) -> StoreId {\n StoreId {\n base: Some(store.path().clone()),\n id: self.id\n }\n }\n\n pub fn exists(&self) -> bool {\n let pb : PathBuf = self.clone().into();\n pb.exists()\n }\n\n pub fn is_file(&self) -> bool {\n true\n }\n\n pub fn is_dir(&self) -> bool {\n false\n }\n\n pub fn to_str(&self) -> Result<String> {\n if self.base.is_some() {\n let mut base = self.base.as_ref().cloned().unwrap();\n base.push(self.id.clone());\n base\n } else {\n self.id.clone()\n }\n .to_str()\n .map(String::from)\n .ok_or(SEK::StoreIdHandlingError.into_error())\n }\n\n}\n\nimpl Into<PathBuf> for StoreId {\n\n fn into(self) -> PathBuf {\n let mut base = self.base.unwrap_or(PathBuf::from(\"\/\"));\n base.push(self.id);\n base\n }\n\n}\n\nimpl Display for StoreId {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n match self.id.to_str() {\n Some(s) => write!(fmt, \"{}\", s),\n None => write!(fmt, \"{}\", self.id.to_string_lossy()),\n }\n }\n\n}\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\npub trait IntoStoreId {\n fn into_storeid(self) -> StoreId;\n}\n\nimpl IntoStoreId for StoreId {\n fn into_storeid(self) -> StoreId {\n self\n }\n}\n\npub fn build_entry_path(store: &Store, path_elem: &str) -> Result<PathBuf> {\n debug!(\"Checking path element for version\");\n if path_elem.split('~').last().map_or(false, |v| Version::parse(v).is_err()) {\n debug!(\"Version cannot be parsed from {:?}\", path_elem);\n debug!(\"Path does not contain version!\");\n return Err(SEK::StorePathLacksVersion.into());\n }\n debug!(\"Version checking succeeded\");\n\n debug!(\"Building path from {:?}\", path_elem);\n let mut path = store.path().clone();\n\n if path_elem.starts_with('\/') {\n path.push(&path_elem[1..]);\n } else {\n path.push(path_elem);\n }\n\n Ok(path)\n}\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n \/\/\/ A helper module to create valid module entry paths\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n use $crate::storeid::StoreId;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(self) -> $crate::storeid::StoreId {\n StoreId::new(None, self.0)\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n iter: Box<Iterator<Item = StoreId>>,\n}\n\nimpl Debug for StoreIdIterator {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"StoreIdIterator\")\n }\n\n}\n\nimpl StoreIdIterator {\n\n pub fn new(iter: Box<Iterator<Item = StoreId>>) -> StoreIdIterator {\n StoreIdIterator {\n iter: iter,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.iter.next()\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(), \"test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<commit_msg>Add check in StoreId::new() whether the local part is absolute<commit_after>use std::path::PathBuf;\n\nuse semver::Version;\nuse std::fmt::{Display, Debug, Formatter};\nuse std::fmt::Error as FmtError;\nuse std::result::Result as RResult;\n\nuse libimagerror::into::IntoError;\n\nuse error::StoreErrorKind as SEK;\nuse store::Result;\nuse store::Store;\n\n\/\/\/ The Index into the Store\n#[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)]\npub struct StoreId {\n base: Option<PathBuf>,\n id: PathBuf,\n}\n\nimpl StoreId {\n\n pub fn new(base: Option<PathBuf>, id: PathBuf) -> Result<StoreId> {\n if id.is_absolute() {\n Err(SEK::StoreIdLocalPartAbsoluteError.into_error())\n } else {\n Ok(StoreId {\n base: base,\n id: id\n })\n }\n }\n\n pub fn storified(self, store: &Store) -> StoreId {\n StoreId {\n base: Some(store.path().clone()),\n id: self.id\n }\n }\n\n pub fn exists(&self) -> bool {\n let pb : PathBuf = self.clone().into();\n pb.exists()\n }\n\n pub fn is_file(&self) -> bool {\n true\n }\n\n pub fn is_dir(&self) -> bool {\n false\n }\n\n pub fn to_str(&self) -> Result<String> {\n if self.base.is_some() {\n let mut base = self.base.as_ref().cloned().unwrap();\n base.push(self.id.clone());\n base\n } else {\n self.id.clone()\n }\n .to_str()\n .map(String::from)\n .ok_or(SEK::StoreIdHandlingError.into_error())\n }\n\n}\n\nimpl Into<PathBuf> for StoreId {\n\n fn into(self) -> PathBuf {\n let mut base = self.base.unwrap_or(PathBuf::from(\"\/\"));\n base.push(self.id);\n base\n }\n\n}\n\nimpl Display for StoreId {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n match self.id.to_str() {\n Some(s) => write!(fmt, \"{}\", s),\n None => write!(fmt, \"{}\", self.id.to_string_lossy()),\n }\n }\n\n}\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\npub trait IntoStoreId {\n fn into_storeid(self) -> StoreId;\n}\n\nimpl IntoStoreId for StoreId {\n fn into_storeid(self) -> StoreId {\n self\n }\n}\n\npub fn build_entry_path(store: &Store, path_elem: &str) -> Result<PathBuf> {\n debug!(\"Checking path element for version\");\n if path_elem.split('~').last().map_or(false, |v| Version::parse(v).is_err()) {\n debug!(\"Version cannot be parsed from {:?}\", path_elem);\n debug!(\"Path does not contain version!\");\n return Err(SEK::StorePathLacksVersion.into());\n }\n debug!(\"Version checking succeeded\");\n\n debug!(\"Building path from {:?}\", path_elem);\n let mut path = store.path().clone();\n\n if path_elem.starts_with('\/') {\n path.push(&path_elem[1..]);\n } else {\n path.push(path_elem);\n }\n\n Ok(path)\n}\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n \/\/\/ A helper module to create valid module entry paths\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n use $crate::storeid::StoreId;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(self) -> $crate::storeid::StoreId {\n StoreId::new(None, self.0)\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n iter: Box<Iterator<Item = StoreId>>,\n}\n\nimpl Debug for StoreIdIterator {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"StoreIdIterator\")\n }\n\n}\n\nimpl StoreIdIterator {\n\n pub fn new(iter: Box<Iterator<Item = StoreId>>) -> StoreIdIterator {\n StoreIdIterator {\n iter: iter,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.iter.next()\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(), \"test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Removed extraneous extern function \\(al_remove_config_section\\) not available before Allegro v5.1.5<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Check that the hash of `foo` doesn't change just because we ordered\n\/\/ the nested items (or even added new ones).\n\n\/\/ revisions: rpass1 rpass2\n\n#![feature(rustc_attrs)]\n\n#[cfg(rpass1)]\nfn foo() {\n fn bar() { }\n fn baz() { }\n}\n\n#[cfg(rpass2)]\n#[rustc_clean(label=\"Hir\", cfg=\"rpass2\")]\n#[rustc_clean(label=\"HirBody\", cfg=\"rpass2\")]\nfn foo() {\n #[rustc_clean(label=\"Hir\", cfg=\"rpass2\")]\n #[rustc_clean(label=\"HirBody\", cfg=\"rpass2\")]\n fn baz() { } \/\/ order is different...\n\n #[rustc_clean(label=\"Hir\", cfg=\"rpass2\")]\n #[rustc_clean(label=\"HirBody\", cfg=\"rpass2\")]\n fn bar() { } \/\/ but that doesn't matter.\n\n fn bap() { } \/\/ neither does adding a new item\n}\n\nfn main() { }\n<commit_msg>incr.comp.: Adapt nested_items test to new HIR hashing rules.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Check that the hash of `foo` doesn't change just because we ordered\n\/\/ the nested items (or even added new ones).\n\n\/\/ revisions: cfail1 cfail2\n\/\/ must-compile-successfully\n\n#![crate_type = \"rlib\"]\n#![feature(rustc_attrs)]\n\n#[cfg(cfail1)]\npub fn foo() {\n pub fn bar() { }\n pub fn baz() { }\n}\n\n#[cfg(cfail2)]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_dirty(label=\"HirBody\", cfg=\"cfail2\")]\npub fn foo() {\n #[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"HirBody\", cfg=\"cfail2\")]\n pub fn baz() { } \/\/ order is different...\n\n #[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"HirBody\", cfg=\"cfail2\")]\n pub fn bar() { } \/\/ but that doesn't matter.\n\n pub fn bap() { } \/\/ neither does adding a new item\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add delete().<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::Bindings::EventBinding::EventMethods;\nuse dom::bindings::codegen::Bindings::ErrorEventBinding;\nuse dom::bindings::codegen::Bindings::ErrorEventBinding::ErrorEventMethods;\nuse dom::bindings::codegen::InheritTypes::{EventCast, ErrorEventDerived};\nuse dom::bindings::error::Fallible;\nuse dom::bindings::global::GlobalRef;\nuse dom::bindings::js::{JSRef, Temporary};\nuse js::jsapi::JSContext;\nuse dom::bindings::trace::JSTraceable;\n\nuse dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};\nuse dom::event::{Event, EventTypeId, ErrorEventTypeId};\nuse servo_util::str::DOMString;\n\nuse dom::bindings::cell::DOMRefCell;\nuse std::cell::{Cell};\nuse js::jsval::{JSVal, NullValue};\n\n#[dom_struct]\npub struct ErrorEvent {\n event: Event,\n message: DOMRefCell<DOMString>,\n filename: DOMRefCell<DOMString>,\n lineno: Cell<u32>,\n colno: Cell<u32>,\n error: Cell<JSVal>\n}\n\nimpl ErrorEventDerived for Event {\n fn is_errorevent(&self) -> bool {\n *self.type_id() == ErrorEventTypeId\n }\n}\n\nimpl ErrorEvent {\n fn new_inherited(type_id: EventTypeId) -> ErrorEvent {\n ErrorEvent {\n event: Event::new_inherited(type_id),\n message: DOMRefCell::new(\"\".to_string()),\n filename: DOMRefCell::new(\"\".to_string()),\n lineno: Cell::new(0),\n colno: Cell::new(0),\n error: Cell::new(NullValue())\n }\n }\n\n pub fn new_uninitialized(global: &GlobalRef) -> Temporary<ErrorEvent> {\n reflect_dom_object(box ErrorEvent::new_inherited(ErrorEventTypeId),\n *global,\n ErrorEventBinding::Wrap)\n }\n\n pub fn new(global: &GlobalRef,\n type_: DOMString,\n can_bubble: bool,\n cancelable: bool,\n message: DOMString,\n filename: DOMString,\n lineno: u32,\n colno: u32,\n error: JSVal) -> Temporary<ErrorEvent> {\n let ev = ErrorEvent::new_uninitialized(global).root();\n let event: JSRef<Event> = EventCast::from_ref(*ev);\n event.InitEvent(type_, can_bubble, cancelable);\n *ev.message.borrow_mut() = message;\n *ev.filename.borrow_mut() = filename;\n ev.lineno.set(lineno);\n ev.colno.set(colno);\n ev.error.set(error);\n Temporary::from_rooted(*ev)\n }\n\n pub fn Constructor(global: &GlobalRef,\n type_: DOMString,\n init: &ErrorEventBinding::ErrorEventInit) -> Fallible<Temporary<ErrorEvent>>{\n let msg = match init.message.as_ref() {\n Some(message) => message.clone(),\n None => \"\".to_string(),\n };\n\n let file_name = match init.filename.as_ref() {\n None => \"\".into_string(),\n Some(filename) => filename.clone(),\n };\n\n let line_num = init.lineno.unwrap_or(0);\n\n let col_num = init.colno.unwrap_or(0);\n\n let event = ErrorEvent::new(global, type_,\n init.parent.bubbles, init.parent.cancelable,\n msg, file_name,\n line_num, col_num, init.error);\n Ok(event)\n }\n\n}\n\nimpl<'a> ErrorEventMethods for JSRef<'a, ErrorEvent> {\n fn Lineno(self) -> u32 {\n self.lineno.get()\n }\n\n fn Colno(self) -> u32 {\n self.colno.get()\n }\n\n fn Message(self) -> DOMString {\n self.message.borrow().clone()\n }\n\n fn Filename(self) -> DOMString {\n self.filename.borrow().clone()\n }\n\n fn Error(self, _cx: *mut JSContext) -> JSVal {\n self.error.get()\n }\n\n}\n\nimpl Reflectable for ErrorEvent {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.event.reflector()\n }\n}\n<commit_msg>auto merge of #4372 : thiagopnts\/servo\/errorevent, r=Ms2ger<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::Bindings::EventBinding::EventMethods;\nuse dom::bindings::codegen::Bindings::ErrorEventBinding;\nuse dom::bindings::codegen::Bindings::ErrorEventBinding::ErrorEventMethods;\nuse dom::bindings::codegen::InheritTypes::{EventCast, ErrorEventDerived};\nuse dom::bindings::error::Fallible;\nuse dom::bindings::global::GlobalRef;\nuse dom::bindings::js::{JSRef, Temporary};\nuse js::jsapi::JSContext;\nuse dom::bindings::trace::JSTraceable;\n\nuse dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};\nuse dom::event::{Event, EventTypeId, ErrorEventTypeId, EventBubbles, Bubbles, DoesNotBubble, EventCancelable, Cancelable, NotCancelable};\nuse servo_util::str::DOMString;\n\nuse dom::bindings::cell::DOMRefCell;\nuse std::cell::{Cell};\nuse js::jsval::{JSVal, NullValue};\n\n#[dom_struct]\npub struct ErrorEvent {\n event: Event,\n message: DOMRefCell<DOMString>,\n filename: DOMRefCell<DOMString>,\n lineno: Cell<u32>,\n colno: Cell<u32>,\n error: Cell<JSVal>\n}\n\nimpl ErrorEventDerived for Event {\n fn is_errorevent(&self) -> bool {\n *self.type_id() == ErrorEventTypeId\n }\n}\n\nimpl ErrorEvent {\n fn new_inherited(type_id: EventTypeId) -> ErrorEvent {\n ErrorEvent {\n event: Event::new_inherited(type_id),\n message: DOMRefCell::new(\"\".to_string()),\n filename: DOMRefCell::new(\"\".to_string()),\n lineno: Cell::new(0),\n colno: Cell::new(0),\n error: Cell::new(NullValue())\n }\n }\n\n pub fn new_uninitialized(global: &GlobalRef) -> Temporary<ErrorEvent> {\n reflect_dom_object(box ErrorEvent::new_inherited(ErrorEventTypeId),\n *global,\n ErrorEventBinding::Wrap)\n }\n\n pub fn new(global: &GlobalRef,\n type_: DOMString,\n bubbles: EventBubbles,\n cancelable: EventCancelable,\n message: DOMString,\n filename: DOMString,\n lineno: u32,\n colno: u32,\n error: JSVal) -> Temporary<ErrorEvent> {\n let ev = ErrorEvent::new_uninitialized(global).root();\n let event: JSRef<Event> = EventCast::from_ref(*ev);\n event.InitEvent(type_, bubbles == Bubbles, cancelable == Cancelable);\n *ev.message.borrow_mut() = message;\n *ev.filename.borrow_mut() = filename;\n ev.lineno.set(lineno);\n ev.colno.set(colno);\n ev.error.set(error);\n Temporary::from_rooted(*ev)\n }\n\n pub fn Constructor(global: &GlobalRef,\n type_: DOMString,\n init: &ErrorEventBinding::ErrorEventInit) -> Fallible<Temporary<ErrorEvent>>{\n let msg = match init.message.as_ref() {\n Some(message) => message.clone(),\n None => \"\".to_string(),\n };\n\n let file_name = match init.filename.as_ref() {\n None => \"\".into_string(),\n Some(filename) => filename.clone(),\n };\n\n let line_num = init.lineno.unwrap_or(0);\n\n let col_num = init.colno.unwrap_or(0);\n\n let bubbles = if init.parent.bubbles { Bubbles } else { DoesNotBubble };\n\n let cancelable = if init.parent.cancelable { Cancelable } else { NotCancelable };\n\n let event = ErrorEvent::new(global, type_,\n bubbles, cancelable,\n msg, file_name,\n line_num, col_num, init.error);\n Ok(event)\n }\n\n}\n\nimpl<'a> ErrorEventMethods for JSRef<'a, ErrorEvent> {\n fn Lineno(self) -> u32 {\n self.lineno.get()\n }\n\n fn Colno(self) -> u32 {\n self.colno.get()\n }\n\n fn Message(self) -> DOMString {\n self.message.borrow().clone()\n }\n\n fn Filename(self) -> DOMString {\n self.filename.borrow().clone()\n }\n\n fn Error(self, _cx: *mut JSContext) -> JSVal {\n self.error.get()\n }\n\n}\n\nimpl Reflectable for ErrorEvent {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.event.reflector()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Congratulations! I have officially written a Rust program. That makes me a Rust programmer<commit_after>fn main(){\n \/\/ println! is a macro\n println!(\"Hello, world!\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added metric tests<commit_after>extern crate rustyprefetch;\nuse rustyprefetch::librp::metrics;\nuse rustyprefetch::librp::utils;\n\n#[test]\nfn metric_v17_test() {\n let v17_metric_buffer: &[u8] = &[\n 0x00,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x00,0x00,\n 0x02,0x00,0x00,0x00\n ];\n\n let v17_metric_entry = match metrics::MetricEntryV17::new(v17_metric_buffer){\n Ok(v17_metric_entry) => v17_metric_entry,\n Err(error) => panic!(error)\n };\n\n assert_eq!(v17_metric_entry.tracechain_index, 0);\n assert_eq!(v17_metric_entry.tracechain_count, 57);\n assert_eq!(v17_metric_entry.filename_offset, 0);\n assert_eq!(v17_metric_entry.filename_length, 50);\n assert_eq!(v17_metric_entry.unknown3, 2);\n}\n\n#[test]\nfn metric_v26_test() {\n let v26_metric_buffer: &[u8] = &[\n 0x00,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n 0x51,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xEA,0xD0,0x02,0x00,0x00,0x00,0x09,0x00\n ];\n\n let v26_metric_entry = match metrics::MetricEntryV26::new(v26_metric_buffer){\n Ok(v26_metric_entry) => v26_metric_entry,\n Err(error) => panic!(error)\n };\n\n assert_eq!(v26_metric_entry.tracechain_index, 0);\n assert_eq!(v26_metric_entry.tracechain_count, 297);\n assert_eq!(v26_metric_entry.unknown4, 33);\n assert_eq!(v26_metric_entry.filename_offset, 0);\n assert_eq!(v26_metric_entry.filename_length, 81);\n assert_eq!(v26_metric_entry.unknown3, 512);\n assert_eq!(v26_metric_entry.file_reference, 2533274790580458);\n}\n\n#[test]\nfn metric_v30_test() {\n let v30_metric_buffer: &[u8] = &[\n 0x00,0x00,0x00,0x00,0x2C,0x00,0x00,0x00,0x2A,0x00,0x00,0x00,0x00,0x00,0x00,0x00,\n 0x52,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x5B,0x3C,0x00,0x00,0x00,0x00,0x04,0x01\n ];\n\n let v30_metric_entry = match metrics::MetricEntryV30::new(v30_metric_buffer){\n Ok(v30_metric_entry) => v30_metric_entry,\n Err(error) => panic!(error)\n };\n\n assert_eq!(v30_metric_entry.tracechain_index, 0);\n assert_eq!(v30_metric_entry.tracechain_count, 44);\n assert_eq!(v30_metric_entry.unknown4, 42);\n assert_eq!(v30_metric_entry.filename_offset, 0);\n assert_eq!(v30_metric_entry.filename_length, 82);\n assert_eq!(v30_metric_entry.unknown3, 258);\n assert_eq!(v30_metric_entry.file_reference, 73183493944786011);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test: Send More Money.<commit_after>\/\/! Send More Money.\n\/\/!\n\/\/! https:\/\/en.wikipedia.org\/wiki\/Verbal_arithmetic\n\nextern crate puzzle_solver;\n\nuse puzzle_solver::{Puzzle,Solution,VarToken};\n\nfn make_send_more_money() -> (Puzzle, Vec<VarToken>) {\n let mut sys = Puzzle::new();\n let vars = sys.new_vars_with_candidates_1d(8, &[0,1,2,3,4,5,6,7,8,9]);\n let (s, e, n, d) = (vars[0], vars[1], vars[2], vars[3]);\n let (m, o, r, y) = (vars[4], vars[5], vars[6], vars[7]);\n\n sys.remove_candidates(s, &[0]);\n sys.remove_candidates(m, &[0]);\n\n sys.all_different(&vars);\n\n let send = 1000 * s + 100 * e + 10 * n + d;\n let more = 1000 * m + 100 * o + 10 * r + e;\n let money = 10000 * m + 1000 * o + 100 * n + 10 * e + y;\n sys.equals(send + more, money);\n\n (sys, vars)\n}\n\nfn print_send_more_money(dict: &Solution, vars: &Vec<VarToken>) {\n let (s, e, n, d) = (vars[0], vars[1], vars[2], vars[3]);\n let (m, o, r, y) = (vars[4], vars[5], vars[6], vars[7]);\n\n println!(\" {} {} {} {}\", dict[s], dict[e], dict[n], dict[d]);\n println!(\" + {} {} {} {}\", dict[m], dict[o], dict[r], dict[e]);\n println!(\"----------\");\n println!(\" {} {} {} {} {}\", dict[m], dict[o], dict[n], dict[e], dict[y]);\n}\n\nfn verify_send_more_money(dict: &Solution, vars: &Vec<VarToken>) {\n let (s, e, n, d) = (vars[0], vars[1], vars[2], vars[3]);\n let (m, o, r, y) = (vars[4], vars[5], vars[6], vars[7]);\n\n assert_eq!(dict[o], 0);\n assert_eq!(dict[m], 1);\n assert_eq!(dict[y], 2);\n assert_eq!(dict[e], 5);\n assert_eq!(dict[n], 6);\n assert_eq!(dict[d], 7);\n assert_eq!(dict[r], 8);\n assert_eq!(dict[s], 9);\n}\n\n#[test]\nfn sendmoremoney_carry() {\n let carry = [0,1];\n\n let (mut sys, vars) = make_send_more_money();\n let (s, e, n, d) = (vars[0], vars[1], vars[2], vars[3]);\n let (m, o, r, y) = (vars[4], vars[5], vars[6], vars[7]);\n let c1 = sys.new_var_with_candidates(&carry);\n let c2 = sys.new_var_with_candidates(&carry);\n let c3 = sys.new_var_with_candidates(&carry);\n sys.intersect_candidates(m, &carry); \/\/ c4 == m.\n\n sys.equals( d + e, 10 * c1 + y);\n sys.equals(c1 + n + r, 10 * c2 + e);\n sys.equals(c2 + e + o, 10 * c3 + n);\n sys.equals(c3 + s + m, 10 * m + o);\n\n let dict = sys.solve_unique().expect(\"solution\");\n println!(\"sendmoremoney_carry: {} guesses\", sys.num_guesses());\n print_send_more_money(&dict, &vars);\n verify_send_more_money(&dict, &vars);\n}\n\n\/*\n#[test]\nfn sendmoremoney_naive() {\n let (mut sys, vars) = make_send_more_money();\n let dict = sys.solve_unique().expect(\"solution\");\n println!(\"sendmoremoney_naive: {} guesses\", sys.num_guesses());\n print_send_more_money(&dict, &vars);\n verify_send_more_money(&dict, &vars);\n}\n*\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>WIP: OHCI<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for rust-lang\/rust#56327.<commit_after>\/\/ compile-pass\n\n\/\/ rust-lang\/rust#56327: Some occurrences of `dyn` within a macro are\n\/\/ not instances of identifiers, and thus should *not* be caught by the\n\/\/ keyword_ident lint.\n\/\/\n\/\/ Otherwise, rustfix replaces the type `Box<dyn Drop>` with\n\/\/ `Box<r#dyn Drop>`, which is injecting a bug rather than fixing\n\/\/ anything.\n\n#![deny(rust_2018_compatibility)]\n\nmacro_rules! foo {\n () => {\n fn generated_foo() {\n let _x: Box<dyn Drop>;\n }\n }\n}\n\nfoo!();\n\nfn main() {\n generated_foo();\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::audionode::MAX_CHANNEL_COUNT;\nuse dom::bindings::cell::DomRefCell;\nuse dom::bindings::codegen::Bindings::AudioBufferBinding::{self, AudioBufferMethods, AudioBufferOptions};\nuse dom::bindings::error::{Error, Fallible};\nuse dom::bindings::num::Finite;\nuse dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};\nuse dom::bindings::root::DomRoot;\nuse dom::window::Window;\nuse dom_struct::dom_struct;\nuse js::jsapi::{DetachDataDisposition, Heap, JSContext, JSObject, JS_DetachArrayBuffer};\nuse js::rust::CustomAutoRooterGuard;\nuse js::typedarray::{CreateWith, Float32Array};\nuse servo_media::audio::buffer_source_node::AudioBuffer as ServoMediaAudioBuffer;\nuse std::cmp::min;\nuse std::ptr::{self, NonNull};\nuse std::mem;\n\ntype JSAudioChannel = Heap<*mut JSObject>;\n\n#[dom_struct]\npub struct AudioBuffer {\n reflector_: Reflector,\n js_channels: DomRefCell<Vec<JSAudioChannel>>,\n #[ignore_malloc_size_of = \"servo_media\"]\n shared_channels: DomRefCell<ServoMediaAudioBuffer>,\n sample_rate: f32,\n length: u32,\n duration: f64,\n number_of_channels: u32,\n}\n\nimpl AudioBuffer {\n #[allow(unrooted_must_root)]\n #[allow(unsafe_code)]\n pub fn new_inherited(\n global: &Window,\n number_of_channels: u32,\n length: u32,\n sample_rate: f32,\n initial_data: Option<&[f32]>,\n ) -> AudioBuffer {\n let cx = global.get_cx();\n rooted_vec!(let mut js_channels_);\n for channel in 0..number_of_channels {\n rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>());\n let offset = (channel * length) as usize;\n match initial_data {\n Some(data) => {\n let _ = unsafe {\n Float32Array::create(\n cx,\n CreateWith::Slice(&data[offset..offset + (length as usize) - 1]),\n array.handle_mut(),\n )\n };\n },\n None => {\n let _ = unsafe {\n Float32Array::create(\n cx,\n CreateWith::Slice(&vec![0.; length as usize]),\n array.handle_mut(),\n )\n };\n }\n }\n let js_channel = Heap::default();\n js_channel.set(array.get());\n js_channels_.push(js_channel);\n }\n let js_channels = DomRefCell::new(Vec::new());\n mem::swap(&mut *js_channels.borrow_mut(), &mut *js_channels_);\n AudioBuffer {\n reflector_: Reflector::new(),\n js_channels,\n shared_channels: DomRefCell::new(ServoMediaAudioBuffer::new(\n number_of_channels as u8,\n length as usize,\n )),\n sample_rate,\n length,\n duration: length as f64 \/ sample_rate as f64,\n number_of_channels,\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(\n global: &Window,\n number_of_channels: u32,\n length: u32,\n sample_rate: f32,\n initial_data: Option<&[f32]>,\n ) -> DomRoot<AudioBuffer> {\n let buffer = AudioBuffer::new_inherited(\n global,\n number_of_channels,\n length,\n sample_rate,\n initial_data,\n );\n reflect_dom_object(Box::new(buffer), global, AudioBufferBinding::Wrap)\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-audiobuffer\n pub fn Constructor(\n window: &Window,\n options: &AudioBufferOptions,\n ) -> Fallible<DomRoot<AudioBuffer>> {\n if options.numberOfChannels > MAX_CHANNEL_COUNT {\n return Err(Error::NotSupported);\n }\n Ok(AudioBuffer::new(\n window,\n options.numberOfChannels,\n options.length,\n *options.sampleRate,\n None,\n ))\n }\n\n #[allow(unsafe_code)]\n unsafe fn restore_js_channel_data(&self, cx: *mut JSContext) -> bool {\n for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() {\n if !channel.get().is_null() {\n \/\/ Already have data in JS array.\n continue;\n }\n\n \/\/ Move the channel data from shared_channels to js_channels.\n rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>());\n let shared_channel = (*self.shared_channels.borrow_mut()).buffers.remove(i);\n if Float32Array::create(cx, CreateWith::Slice(&shared_channel), array.handle_mut())\n .is_err()\n {\n return false;\n }\n channel.set(array.get());\n }\n\n true\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#acquire-the-content\n #[allow(unsafe_code)]\n pub fn acquire_contents(&self) -> Option<ServoMediaAudioBuffer> {\n let cx = self.global().get_cx();\n for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() {\n \/\/ Step 1.\n if channel.get().is_null() {\n return None;\n }\n\n \/\/ Step 2.\n let channel_data = unsafe {\n typedarray!(in(cx) let array: Float32Array = channel.get());\n if let Ok(array) = array {\n let data = array.to_vec();\n let _ = JS_DetachArrayBuffer(cx, channel.handle(), DetachDataDisposition::KeepData);\n data\n } else {\n return None;\n }\n };\n\n channel.set(ptr::null_mut());\n\n \/\/ Step 3.\n (*self.shared_channels.borrow_mut()).buffers[i] = channel_data;\n\n \/\/ Step 4 will complete turning shared_channels\n \/\/ data into js_channels ArrayBuffers in restore_js_channel_data.\n }\n\n self.js_channels.borrow_mut().clear();\n\n Some((*self.shared_channels.borrow()).clone())\n }\n}\n\nimpl AudioBufferMethods for AudioBuffer {\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-samplerate\n fn SampleRate(&self) -> Finite<f32> {\n Finite::wrap(self.sample_rate)\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-length\n fn Length(&self) -> u32 {\n self.length\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-duration\n fn Duration(&self) -> Finite<f64> {\n Finite::wrap(self.duration)\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-numberofchannels\n fn NumberOfChannels(&self) -> u32 {\n self.number_of_channels\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-getchanneldata\n #[allow(unsafe_code)]\n unsafe fn GetChannelData(\n &self,\n cx: *mut JSContext,\n channel: u32,\n ) -> Fallible<NonNull<JSObject>> {\n if channel >= self.number_of_channels {\n return Err(Error::IndexSize);\n }\n\n if !self.restore_js_channel_data(cx) {\n return Err(Error::JSFailed);\n }\n\n Ok(NonNull::new_unchecked(\n self.js_channels.borrow()[channel as usize].get(),\n ))\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-copyfromchannel\n #[allow(unsafe_code)]\n fn CopyFromChannel(\n &self,\n mut destination: CustomAutoRooterGuard<Float32Array>,\n channel_number: u32,\n start_in_channel: u32,\n ) -> Fallible<()> {\n if channel_number >= self.number_of_channels || start_in_channel > self.length {\n return Err(Error::IndexSize);\n }\n\n let bytes_to_copy = min(self.length - start_in_channel, destination.len() as u32) as usize;\n let cx = self.global().get_cx();\n let channel_number = channel_number as usize;\n let offset = start_in_channel as usize;\n let mut dest = Vec::with_capacity(destination.len());\n\n \/\/ We either copy form js_channels or shared_channels.\n\n let js_channel = self.js_channels.borrow()[channel_number].get();\n if !js_channel.is_null() {\n typedarray!(in(cx) let array: Float32Array = js_channel);\n if let Ok(array) = array {\n let data = unsafe { array.as_slice() };\n dest.extend_from_slice(&data[offset..offset + bytes_to_copy]);\n return Ok(());\n }\n }\n\n if let Some(shared_channel) = self.shared_channels.borrow().buffers.get(channel_number) {\n dest.extend_from_slice(&shared_channel.as_slice()[offset..offset + bytes_to_copy]);\n }\n\n unsafe {\n destination.update(&dest);\n }\n\n Ok(())\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-copytochannel\n #[allow(unsafe_code)]\n fn CopyToChannel(\n &self,\n source: CustomAutoRooterGuard<Float32Array>,\n channel_number: u32,\n start_in_channel: u32,\n ) -> Fallible<()> {\n if channel_number >= self.number_of_channels || start_in_channel > (source.len() as u32) {\n return Err(Error::IndexSize);\n }\n\n let cx = self.global().get_cx();\n if unsafe { !self.restore_js_channel_data(cx) } {\n return Err(Error::JSFailed);\n }\n\n let js_channel = self.js_channels.borrow()[channel_number as usize].get();\n if js_channel.is_null() {\n \/\/ The array buffer was detached.\n return Err(Error::IndexSize);\n }\n\n typedarray!(in(cx) let array: Float32Array = js_channel);\n if let Ok(mut array) = array {\n let bytes_to_copy = min(self.length - start_in_channel, source.len() as u32) as usize;\n let offset = start_in_channel as usize;\n unsafe {\n array.update(&source.as_slice()[offset..offset + bytes_to_copy]);\n }\n } else {\n return Err(Error::IndexSize);\n }\n\n Ok(())\n }\n}\n<commit_msg>Enter compartment during AudioBuffer creation<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::audionode::MAX_CHANNEL_COUNT;\nuse dom::bindings::cell::DomRefCell;\nuse dom::bindings::codegen::Bindings::AudioBufferBinding::{self, AudioBufferMethods, AudioBufferOptions};\nuse dom::bindings::error::{Error, Fallible};\nuse dom::bindings::num::Finite;\nuse dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};\nuse dom::bindings::root::DomRoot;\nuse dom::window::Window;\nuse dom_struct::dom_struct;\nuse js::jsapi::{DetachDataDisposition, Heap, JSAutoCompartment, JSContext, JSObject, JS_DetachArrayBuffer};\nuse js::rust::CustomAutoRooterGuard;\nuse js::typedarray::{CreateWith, Float32Array};\nuse servo_media::audio::buffer_source_node::AudioBuffer as ServoMediaAudioBuffer;\nuse std::cmp::min;\nuse std::ptr::{self, NonNull};\nuse std::mem;\n\ntype JSAudioChannel = Heap<*mut JSObject>;\n\n#[dom_struct]\npub struct AudioBuffer {\n reflector_: Reflector,\n js_channels: DomRefCell<Vec<JSAudioChannel>>,\n #[ignore_malloc_size_of = \"servo_media\"]\n shared_channels: DomRefCell<ServoMediaAudioBuffer>,\n sample_rate: f32,\n length: u32,\n duration: f64,\n number_of_channels: u32,\n}\n\nimpl AudioBuffer {\n #[allow(unrooted_must_root)]\n #[allow(unsafe_code)]\n pub fn new_inherited(\n global: &Window,\n number_of_channels: u32,\n length: u32,\n sample_rate: f32,\n initial_data: Option<&[f32]>,\n ) -> AudioBuffer {\n let cx = global.get_cx();\n let _ac = JSAutoCompartment::new(cx, global.reflector().get_jsobject().get());\n rooted_vec!(let mut js_channels_);\n for channel in 0..number_of_channels {\n rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>());\n let offset = (channel * length) as usize;\n match initial_data {\n Some(data) => {\n let _ = unsafe {\n Float32Array::create(\n cx,\n CreateWith::Slice(&data[offset..offset + (length as usize) - 1]),\n array.handle_mut(),\n )\n };\n },\n None => {\n let _ = unsafe {\n Float32Array::create(\n cx,\n CreateWith::Slice(&vec![0.; length as usize]),\n array.handle_mut(),\n )\n };\n }\n }\n let js_channel = Heap::default();\n js_channel.set(array.get());\n js_channels_.push(js_channel);\n }\n let js_channels = DomRefCell::new(Vec::new());\n mem::swap(&mut *js_channels.borrow_mut(), &mut *js_channels_);\n AudioBuffer {\n reflector_: Reflector::new(),\n js_channels,\n shared_channels: DomRefCell::new(ServoMediaAudioBuffer::new(\n number_of_channels as u8,\n length as usize,\n )),\n sample_rate,\n length,\n duration: length as f64 \/ sample_rate as f64,\n number_of_channels,\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(\n global: &Window,\n number_of_channels: u32,\n length: u32,\n sample_rate: f32,\n initial_data: Option<&[f32]>,\n ) -> DomRoot<AudioBuffer> {\n let buffer = AudioBuffer::new_inherited(\n global,\n number_of_channels,\n length,\n sample_rate,\n initial_data,\n );\n reflect_dom_object(Box::new(buffer), global, AudioBufferBinding::Wrap)\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-audiobuffer\n pub fn Constructor(\n window: &Window,\n options: &AudioBufferOptions,\n ) -> Fallible<DomRoot<AudioBuffer>> {\n if options.numberOfChannels > MAX_CHANNEL_COUNT {\n return Err(Error::NotSupported);\n }\n Ok(AudioBuffer::new(\n window,\n options.numberOfChannels,\n options.length,\n *options.sampleRate,\n None,\n ))\n }\n\n #[allow(unsafe_code)]\n unsafe fn restore_js_channel_data(&self, cx: *mut JSContext) -> bool {\n for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() {\n if !channel.get().is_null() {\n \/\/ Already have data in JS array.\n continue;\n }\n\n \/\/ Move the channel data from shared_channels to js_channels.\n rooted!(in (cx) let mut array = ptr::null_mut::<JSObject>());\n let shared_channel = (*self.shared_channels.borrow_mut()).buffers.remove(i);\n if Float32Array::create(cx, CreateWith::Slice(&shared_channel), array.handle_mut())\n .is_err()\n {\n return false;\n }\n channel.set(array.get());\n }\n\n true\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#acquire-the-content\n #[allow(unsafe_code)]\n pub fn acquire_contents(&self) -> Option<ServoMediaAudioBuffer> {\n let cx = self.global().get_cx();\n for (i, channel) in self.js_channels.borrow_mut().iter().enumerate() {\n \/\/ Step 1.\n if channel.get().is_null() {\n return None;\n }\n\n \/\/ Step 2.\n let channel_data = unsafe {\n typedarray!(in(cx) let array: Float32Array = channel.get());\n if let Ok(array) = array {\n let data = array.to_vec();\n let _ = JS_DetachArrayBuffer(cx, channel.handle(), DetachDataDisposition::KeepData);\n data\n } else {\n return None;\n }\n };\n\n channel.set(ptr::null_mut());\n\n \/\/ Step 3.\n (*self.shared_channels.borrow_mut()).buffers[i] = channel_data;\n\n \/\/ Step 4 will complete turning shared_channels\n \/\/ data into js_channels ArrayBuffers in restore_js_channel_data.\n }\n\n self.js_channels.borrow_mut().clear();\n\n Some((*self.shared_channels.borrow()).clone())\n }\n}\n\nimpl AudioBufferMethods for AudioBuffer {\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-samplerate\n fn SampleRate(&self) -> Finite<f32> {\n Finite::wrap(self.sample_rate)\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-length\n fn Length(&self) -> u32 {\n self.length\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-duration\n fn Duration(&self) -> Finite<f64> {\n Finite::wrap(self.duration)\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-numberofchannels\n fn NumberOfChannels(&self) -> u32 {\n self.number_of_channels\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-getchanneldata\n #[allow(unsafe_code)]\n unsafe fn GetChannelData(\n &self,\n cx: *mut JSContext,\n channel: u32,\n ) -> Fallible<NonNull<JSObject>> {\n if channel >= self.number_of_channels {\n return Err(Error::IndexSize);\n }\n\n if !self.restore_js_channel_data(cx) {\n return Err(Error::JSFailed);\n }\n\n Ok(NonNull::new_unchecked(\n self.js_channels.borrow()[channel as usize].get(),\n ))\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-copyfromchannel\n #[allow(unsafe_code)]\n fn CopyFromChannel(\n &self,\n mut destination: CustomAutoRooterGuard<Float32Array>,\n channel_number: u32,\n start_in_channel: u32,\n ) -> Fallible<()> {\n if channel_number >= self.number_of_channels || start_in_channel > self.length {\n return Err(Error::IndexSize);\n }\n\n let bytes_to_copy = min(self.length - start_in_channel, destination.len() as u32) as usize;\n let cx = self.global().get_cx();\n let channel_number = channel_number as usize;\n let offset = start_in_channel as usize;\n let mut dest = Vec::with_capacity(destination.len());\n\n \/\/ We either copy form js_channels or shared_channels.\n\n let js_channel = self.js_channels.borrow()[channel_number].get();\n if !js_channel.is_null() {\n typedarray!(in(cx) let array: Float32Array = js_channel);\n if let Ok(array) = array {\n let data = unsafe { array.as_slice() };\n dest.extend_from_slice(&data[offset..offset + bytes_to_copy]);\n return Ok(());\n }\n }\n\n if let Some(shared_channel) = self.shared_channels.borrow().buffers.get(channel_number) {\n dest.extend_from_slice(&shared_channel.as_slice()[offset..offset + bytes_to_copy]);\n }\n\n unsafe {\n destination.update(&dest);\n }\n\n Ok(())\n }\n\n \/\/ https:\/\/webaudio.github.io\/web-audio-api\/#dom-audiobuffer-copytochannel\n #[allow(unsafe_code)]\n fn CopyToChannel(\n &self,\n source: CustomAutoRooterGuard<Float32Array>,\n channel_number: u32,\n start_in_channel: u32,\n ) -> Fallible<()> {\n if channel_number >= self.number_of_channels || start_in_channel > (source.len() as u32) {\n return Err(Error::IndexSize);\n }\n\n let cx = self.global().get_cx();\n if unsafe { !self.restore_js_channel_data(cx) } {\n return Err(Error::JSFailed);\n }\n\n let js_channel = self.js_channels.borrow()[channel_number as usize].get();\n if js_channel.is_null() {\n \/\/ The array buffer was detached.\n return Err(Error::IndexSize);\n }\n\n typedarray!(in(cx) let array: Float32Array = js_channel);\n if let Ok(mut array) = array {\n let bytes_to_copy = min(self.length - start_in_channel, source.len() as u32) as usize;\n let offset = start_in_channel as usize;\n unsafe {\n array.update(&source.as_slice()[offset..offset + bytes_to_copy]);\n }\n } else {\n return Err(Error::IndexSize);\n }\n\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>day 11 solved.<commit_after>use std::collections::HashMap;\n\nfn increment(vec: &mut Vec<u8>) {\n let mut stop = false;\n vec.reverse();\n for x in vec.iter_mut() {\n if *x == 122 { \/\/ 'z'\n *x = 97; \/\/ 'a'\n } else {\n *x += 1;\n stop = true;\n break;\n }\n }\n if !stop {\n vec.push(97); \/\/ append 'a' to front\n }\n vec.reverse();\n}\n\nfn main() {\n let mut password = \"cqjxjnds\".to_string();\n\n let mut vec = password.clone().into_bytes();\n\n loop {\n increment(&mut vec);\n\n password = String::from_utf8(vec.clone()).unwrap();\n\n let mut hmap = HashMap::new();\n\n let cond1 = vec.windows(3)\n .fold(false, |acc, win| {\n acc || win[0] + 1 == win[1] && win[1] + 1 == win[2]\n });\n\n let cond2 = password.chars()\n .fold(true, |acc, ch| {\n acc && ch != 'i' && ch != 'o' && ch != 'l'\n });\n\n let cond3 = vec.windows(2).enumerate()\n .fold(0, |acc, (i, win)| {\n acc + if win[0] == win[1] {\n if hmap.contains_key(win) {\n if hmap[win] != i - 1 {\n 1\n } else {\n 0\n }\n } else {\n hmap.insert(win.to_vec(), i);\n 1\n }\n } else {\n 0\n }\n }) >= 2;\n\n if cond1 && cond2 && cond3 {\n break;\n }\n }\n\n println!(\"The next password is {}.\", password);\n\n loop {\n increment(&mut vec);\n\n password = String::from_utf8(vec.clone()).unwrap();\n\n let mut hmap = HashMap::new();\n\n let cond1 = vec.windows(3)\n .fold(false, |acc, win| {\n acc || win[0] + 1 == win[1] && win[1] + 1 == win[2]\n });\n\n let cond2 = password.chars()\n .fold(true, |acc, ch| {\n acc && ch != 'i' && ch != 'o' && ch != 'l'\n });\n\n let cond3 = vec.windows(2).enumerate()\n .fold(0, |acc, (i, win)| {\n acc + if win[0] == win[1] {\n if hmap.contains_key(win) {\n if hmap[win] != i - 1 {\n 1\n } else {\n 0\n }\n } else {\n hmap.insert(win.to_vec(), i);\n 1\n }\n } else {\n 0\n }\n }) >= 2;\n\n if cond1 && cond2 && cond3 {\n break;\n }\n }\n \n println!(\"The next password after that is {}.\", password);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add outw\/outl to x86_64 forth natives<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add with_override convenience method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rustdoc: Add a test for should_fail in doctests<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags:--test\n\n\/\/\/ Example of rustdoc incorrectly parsing <code>```rust,should_panic<\/code>.\n\/\/\/\n\/\/\/ ```should_panic\n\/\/\/ fn main() { panic!(\"fee\"); }\n\/\/\/ ```\n\/\/\/\n\/\/\/ ```rust,should_panic\n\/\/\/ fn main() { panic!(\"fum\"); }\n\/\/\/ ```\npub fn foo() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix build<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ TODO: Refactor using a matrix for performance\n\nuse redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<commit_msg>Fix indentation<commit_after>\/\/ TODO: Refactor using a matrix for performance\n\nuse redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Day 20 part 1 and 2<commit_after>\/\/ advent20.rs\n\/\/ find ip not in range\n\nuse std::io;\nuse std::io::BufRead;\n\nfn main() {\n let stdin = io::stdin();\n let mut blacklist = vec![];\n\n for line in stdin.lock().lines().map(|l| l.expect(\"Failed to read line\")) {\n let ip_range: Vec<u32> =\n line.trim().split('-').map(|x| x.parse().expect(\"non-numeric IP\")).collect();\n if 2 != ip_range.len() {\n println!(\"unexpected input {}\", line);\n continue;\n }\n blacklist.push((ip_range[0], ip_range[1]));\n }\n blacklist.sort_by_key(|x| x.0);\n\n if let Some(ip) = find_first_unblocked_ip(&blacklist) {\n println!(\"Part 1 first available IP: {}\", ip);\n } else {\n println!(\"Part 1 no IPs available\");\n }\n\n println!(\"Part 2 total unblocked IPs: {}\",\n find_total_unblocked_ips(&blacklist));\n}\n\n\/\/ \/\/\/\/\/\/\/\n\/\/ Part 1\n\n#[cfg(test)]\nconst MAX_IP: u32 = 10;\n\n#[cfg(not(test))]\nconst MAX_IP: u32 = std::u32::MAX;\n\nfn find_first_unblocked_ip(blacklist: &[(u32, u32)]) -> Option<u32> {\n if let Some(&(lowest, _)) = blacklist.first() {\n if lowest > 0 {\n return Some(0);\n }\n }\n\n let mut highest = 0;\n for &(low, high) in blacklist {\n if low > highest + 1 {\n return Some(highest + 1);\n }\n highest = std::cmp::max(highest, high);\n if highest == MAX_IP {\n break;\n }\n }\n\n if highest < MAX_IP {\n Some(highest + 1)\n } else {\n None\n }\n}\n\n\/\/ \/\/\/\/\/\/\/\n\/\/ Part 2\n\nfn find_total_unblocked_ips(blacklist: &[(u32, u32)]) -> usize {\n let mut total_available = 0;\n\n if let Some(&(lowest, _)) = blacklist.first() {\n if lowest > 0 {\n total_available = 1;\n }\n }\n\n let mut highest = 0;\n for &(low, high) in blacklist {\n if highest < MAX_IP {\n total_available += (highest + 1..low).count();\n }\n highest = std::cmp::max(highest, high);\n if highest == MAX_IP {\n break;\n }\n }\n\n if highest < MAX_IP {\n total_available + (highest + 1..MAX_IP).count()\n } else {\n total_available\n }\n}\n\n\n\n\n\/\/ \/\/\/\/\/\/\n\/\/ Tests\n#[test]\nfn test_find_first_unblocked_ip() {\n let blacklist = vec![(0, 2), (4, 7), (5, 8)];\n assert_eq!(Some(3), find_first_unblocked_ip(&blacklist));\n}\n\n#[test]\nfn test_find_total_unblocked_ips() {\n let blacklist = vec![(0, 2), (4, 7), (5, 8)];\n assert_eq!(2, find_total_unblocked_ips(&blacklist));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Change check_connect to setup_connection<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor it even more<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added tests dir<commit_after>extern crate proton_cli;\n<|endoftext|>"} {"text":"<commit_before>\/\/ extern crate bindgen;\n#[cfg(target_env = \"msvc\")]\nextern crate cc;\nextern crate pkg_config;\nextern crate num_cpus;\n\nuse std::env;\nuse std::fs;\nuse std::path::{Path, PathBuf};\nuse std::process::{Command, Stdio};\n\n\/\/ Automatically write bindings to libsass\n\/\/#[allow(dead_code)]\n\/\/fn write_bindings() {\n\/\/ let bindings = bindgen::Builder::default()\n\/\/ .header(\"libsass\/include\/sass.h\")\n\/\/ .clang_arg(\"-Ilibsass\/include\")\n\/\/ \/\/ To avoid a test failing\n\/\/ .blacklist_type(\"max_align_t\")\n\/\/ \/\/ we do static linking so it should be fine\n\/\/ \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/36927\n\/\/ .rustified_enum(\".*\")\n\/\/ .generate()\n\/\/ .expect(\"Unable to generate bindings\");\n\/\/\n\/\/ \/\/ Write the bindings to the $OUT_DIR\/bindings.rs file.\n\/\/ let out_path = PathBuf::from(\"src\");\n\/\/ bindings\n\/\/ .write_to_file(out_path.join(\"bindings.rs\"))\n\/\/ .expect(\"Couldn't write bindings!\");\n\/\/}\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(n) => n,\n Err(e) => panic!(\"\\n{} failed with {}\\n\", stringify!($e), e),\n })\n}\n\nfn cp_r(dir: &Path, dest: &Path) {\n for entry in t!(fs::read_dir(dir)) {\n let entry = t!(entry);\n let path = entry.path();\n let dst = dest.join(path.file_name().unwrap());\n if t!(fs::metadata(&path)).is_file() {\n t!(fs::copy(path, dst));\n } else {\n t!(fs::create_dir_all(&dst));\n cp_r(&path, &dst);\n }\n }\n}\n\nfn get_libsass_folder() -> PathBuf {\n env::current_dir().unwrap().join(\"libsass\")\n}\n\n\/\/ linux\/unix\n#[cfg(not(target_env = \"msvc\"))]\nfn compile() {\n let target = env::var(\"TARGET\").expect(\"TARGET not found\");\n let src = get_libsass_folder();\n let dest = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n let build = dest.join(\"build\");\n t!(fs::create_dir_all(&build));\n cp_r(&src, &build);\n let is_bsd = target.contains(\"dragonfly\")\n || target.contains(\"freebsd\")\n || target.contains(\"netbsd\")\n || target.contains(\"openbsd\");\n let libprobe = | lib: &str | -> bool {\n Command::new(\"cc\")\n .arg(\"-xc++\")\n .arg(\"-o\/dev\/null\")\n .arg(format!(\"-l{}\",lib))\n .arg(\"-shared\")\n .stderr(Stdio::null())\n .status()\n .expect(\"\")\n .success()\n };\n\n let jobs = env::var(\"MAKE_LIBSASS_JOBS\").unwrap_or(num_cpus::get().to_string());\n let r = Command::new(if is_bsd { \"gmake\" } else { \"make\" })\n .current_dir(&build)\n .args(&[\"--jobs\", &jobs])\n .output()\n .expect(\"error running make\");\n\n if !r.status.success() {\n let err = String::from_utf8_lossy(&r.stderr);\n let out = String::from_utf8_lossy(&r.stdout);\n panic!(\"Build error:\\nSTDERR:{}\\nSTDOUT:{}\", err, out);\n }\n\n println!(\n \"cargo:rustc-link-search=native={}\",\n build.join(\"lib\").display()\n );\n println!(\"cargo:rustc-link-lib=static=sass\");\n\n if libprobe(\"c++_shared\") {\n println!(\"cargo:rustc-link-lib=dylib=c++_shared\");\n }\n else if libprobe(\"c++\") {\n println!(\"cargo:rustc-link-lib=dylib=c++\");\n }\n else if libprobe(\"stdc++\") {\n println!(\"cargo:rustc-link-lib=dylib=stdc++\");\n }\n else {\n panic!(\"no c++ library found\");\n }\n\n}\n\n\/\/ windows\n#[cfg(target_env = \"msvc\")]\nfn compile() {\n let src = get_libsass_folder();\n let target = env::var(\"TARGET\").expect(\"TARGET not found in environment\");\n let msvc_platform = if target.contains(\"x86_64\") {\n \"Win64\"\n } else {\n \"Win32\"\n };\n let dest = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n let build = dest.join(\"build\");\n t!(fs::create_dir_all(&build));\n cp_r(&src, &build);\n\n \/\/ Find an instance of devenv.exe from Visual Studio IDE in order to upgrade\n \/\/ libsass.sln to the current available IDE. Do nothing if no devenv.exe found\n let d = cc::windows_registry::find(target.as_str(), \"devenv.exe\");\n if let Some(mut d) = d {\n let d = d\n .args(&[\"\/upgrade\", \"win\\\\libsass.sln\"])\n .current_dir(&build)\n .output()\n .expect(\"error running devenv\");\n if !d.status.success() {\n let err = String::from_utf8_lossy(&d.stderr);\n let out = String::from_utf8_lossy(&d.stdout);\n println!(\"Upgrade error:\\nSTDERR:{}\\nSTDOUT:{}\", err, out);\n }\n }\n\n let search = Command::new(\"which\")\n .args(&[\"msbuild.exe\"])\n .output()\n .expect(\"Could not search for msbuild.exe on path\");\n let mut msbuild = if search.status.success() {\n Command::new(\"msbuild.exe\")\n } else {\n cc::windows_registry::find(target.as_str(), \"msbuild.exe\")\n .expect(\"Could not find msbuild.exe on the registry\")\n };\n\n let r = msbuild\n .args(&[\n \"win\\\\libsass.sln\",\n \"\/p:LIBSASS_STATIC_LIB=1\",\n \"\/p:Configuration=Release\",\n \"\/p:WholeProgramOptimization=false\",\n format!(\"\/p:Platform={}\", msvc_platform).as_str(),\n ])\n .current_dir(&build)\n .output()\n .expect(\"error running msbuild\");\n\n if !r.status.success() {\n let err = String::from_utf8_lossy(&r.stderr);\n let out = String::from_utf8_lossy(&r.stdout);\n panic!(\"Build error:\\nSTDERR:{}\\nSTDOUT:{}\", err, out);\n }\n\n println!(\n \"cargo:rustc-link-search=native={}\",\n build.join(\"win\").join(\"bin\").display()\n );\n println!(\"cargo:rustc-link-lib=static=libsass\");\n}\n\nfn main() {\n \/\/ Uncomment the line below to generate bindings. Doesn't work on CI as it\n \/\/ requires additional tooling\n \/\/ write_bindings();\n\n \/\/ Is it already built?\n if let Ok(_) = pkg_config::find_library(\"libsass\") {\n return;\n }\n\n compile();\n}\n<commit_msg>Try parallelizing msbuild<commit_after>\/\/ extern crate bindgen;\n#[cfg(target_env = \"msvc\")]\nextern crate cc;\nextern crate pkg_config;\nextern crate num_cpus;\n\nuse std::env;\nuse std::fs;\nuse std::path::{Path, PathBuf};\nuse std::process::{Command, Stdio};\n\n\/\/ Automatically write bindings to libsass\n\/\/#[allow(dead_code)]\n\/\/fn write_bindings() {\n\/\/ let bindings = bindgen::Builder::default()\n\/\/ .header(\"libsass\/include\/sass.h\")\n\/\/ .clang_arg(\"-Ilibsass\/include\")\n\/\/ \/\/ To avoid a test failing\n\/\/ .blacklist_type(\"max_align_t\")\n\/\/ \/\/ we do static linking so it should be fine\n\/\/ \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/36927\n\/\/ .rustified_enum(\".*\")\n\/\/ .generate()\n\/\/ .expect(\"Unable to generate bindings\");\n\/\/\n\/\/ \/\/ Write the bindings to the $OUT_DIR\/bindings.rs file.\n\/\/ let out_path = PathBuf::from(\"src\");\n\/\/ bindings\n\/\/ .write_to_file(out_path.join(\"bindings.rs\"))\n\/\/ .expect(\"Couldn't write bindings!\");\n\/\/}\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(n) => n,\n Err(e) => panic!(\"\\n{} failed with {}\\n\", stringify!($e), e),\n })\n}\n\nfn cp_r(dir: &Path, dest: &Path) {\n for entry in t!(fs::read_dir(dir)) {\n let entry = t!(entry);\n let path = entry.path();\n let dst = dest.join(path.file_name().unwrap());\n if t!(fs::metadata(&path)).is_file() {\n t!(fs::copy(path, dst));\n } else {\n t!(fs::create_dir_all(&dst));\n cp_r(&path, &dst);\n }\n }\n}\n\nfn get_libsass_folder() -> PathBuf {\n env::current_dir().unwrap().join(\"libsass\")\n}\n\n\/\/ linux\/unix\n#[cfg(not(target_env = \"msvc\"))]\nfn compile() {\n let target = env::var(\"TARGET\").expect(\"TARGET not found\");\n let src = get_libsass_folder();\n let dest = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n let build = dest.join(\"build\");\n t!(fs::create_dir_all(&build));\n cp_r(&src, &build);\n let is_bsd = target.contains(\"dragonfly\")\n || target.contains(\"freebsd\")\n || target.contains(\"netbsd\")\n || target.contains(\"openbsd\");\n let libprobe = | lib: &str | -> bool {\n Command::new(\"cc\")\n .arg(\"-xc++\")\n .arg(\"-o\/dev\/null\")\n .arg(format!(\"-l{}\",lib))\n .arg(\"-shared\")\n .stderr(Stdio::null())\n .status()\n .expect(\"\")\n .success()\n };\n\n let jobs = env::var(\"MAKE_LIBSASS_JOBS\").unwrap_or(num_cpus::get().to_string());\n let r = Command::new(if is_bsd { \"gmake\" } else { \"make\" })\n .current_dir(&build)\n .args(&[\"--jobs\", &jobs])\n .output()\n .expect(\"error running make\");\n\n if !r.status.success() {\n let err = String::from_utf8_lossy(&r.stderr);\n let out = String::from_utf8_lossy(&r.stdout);\n panic!(\"Build error:\\nSTDERR:{}\\nSTDOUT:{}\", err, out);\n }\n\n println!(\n \"cargo:rustc-link-search=native={}\",\n build.join(\"lib\").display()\n );\n println!(\"cargo:rustc-link-lib=static=sass\");\n\n if libprobe(\"c++_shared\") {\n println!(\"cargo:rustc-link-lib=dylib=c++_shared\");\n }\n else if libprobe(\"c++\") {\n println!(\"cargo:rustc-link-lib=dylib=c++\");\n }\n else if libprobe(\"stdc++\") {\n println!(\"cargo:rustc-link-lib=dylib=stdc++\");\n }\n else {\n panic!(\"no c++ library found\");\n }\n\n}\n\n\/\/ windows\n#[cfg(target_env = \"msvc\")]\nfn compile() {\n let src = get_libsass_folder();\n let target = env::var(\"TARGET\").expect(\"TARGET not found in environment\");\n let msvc_platform = if target.contains(\"x86_64\") {\n \"Win64\"\n } else {\n \"Win32\"\n };\n let dest = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n let build = dest.join(\"build\");\n t!(fs::create_dir_all(&build));\n cp_r(&src, &build);\n\n \/\/ Find an instance of devenv.exe from Visual Studio IDE in order to upgrade\n \/\/ libsass.sln to the current available IDE. Do nothing if no devenv.exe found\n let d = cc::windows_registry::find(target.as_str(), \"devenv.exe\");\n if let Some(mut d) = d {\n let d = d\n .args(&[\"\/upgrade\", \"win\\\\libsass.sln\"])\n .current_dir(&build)\n .output()\n .expect(\"error running devenv\");\n if !d.status.success() {\n let err = String::from_utf8_lossy(&d.stderr);\n let out = String::from_utf8_lossy(&d.stdout);\n println!(\"Upgrade error:\\nSTDERR:{}\\nSTDOUT:{}\", err, out);\n }\n }\n\n let search = Command::new(\"which\")\n .args(&[\"msbuild.exe\"])\n .output()\n .expect(\"Could not search for msbuild.exe on path\");\n let mut msbuild = if search.status.success() {\n Command::new(\"msbuild.exe\")\n } else {\n cc::windows_registry::find(target.as_str(), \"msbuild.exe\")\n .expect(\"Could not find msbuild.exe on the registry\")\n };\n\n let jobs = env::var(\"MAKE_LIBSASS_JOBS\").unwrap_or(num_cpus::get().to_string());\n\n let r = msbuild\n .args(&[\n \"win\\\\libsass.sln\",\n \"\/p:LIBSASS_STATIC_LIB=1\",\n \"\/p:Configuration=Release\",\n \"\/p:WholeProgramOptimization=false\",\n format!(\"\/m:{}\", jobs).as_str(),\n format!(\"\/p:Platform={}\", msvc_platform).as_str(),\n ])\n .current_dir(&build)\n .output()\n .expect(\"error running msbuild\");\n\n if !r.status.success() {\n let err = String::from_utf8_lossy(&r.stderr);\n let out = String::from_utf8_lossy(&r.stdout);\n panic!(\"Build error:\\nSTDERR:{}\\nSTDOUT:{}\", err, out);\n }\n\n println!(\n \"cargo:rustc-link-search=native={}\",\n build.join(\"win\").join(\"bin\").display()\n );\n println!(\"cargo:rustc-link-lib=static=libsass\");\n}\n\nfn main() {\n \/\/ Uncomment the line below to generate bindings. Doesn't work on CI as it\n \/\/ requires additional tooling\n \/\/ write_bindings();\n\n \/\/ Is it already built?\n if let Ok(_) = pkg_config::find_library(\"libsass\") {\n return;\n }\n\n compile();\n}\n<|endoftext|>"} {"text":"<commit_before>pub use self::dependency::Dependency;\npub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles};\npub use self::manifest::{EitherManifest, VirtualManifest};\npub use self::package::{Package, PackageSet};\npub use self::package_id::PackageId;\npub use self::package_id_spec::PackageIdSpec;\npub use self::registry::Registry;\npub use self::resolver::Resolve;\npub use self::shell::{Shell, Verbosity};\npub use self::source::{Source, SourceId, SourceMap, GitReference};\npub use self::summary::Summary;\npub use self::workspace::{Workspace, WorkspaceConfig};\n\npub mod source;\npub mod package;\npub mod package_id;\npub mod dependency;\npub mod manifest;\npub mod resolver;\npub mod summary;\npub mod shell;\npub mod registry;\nmod package_id_spec;\nmod workspace;\n<commit_msg>workspace: export Members as part of the public interface<commit_after>pub use self::dependency::Dependency;\npub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles};\npub use self::manifest::{EitherManifest, VirtualManifest};\npub use self::package::{Package, PackageSet};\npub use self::package_id::PackageId;\npub use self::package_id_spec::PackageIdSpec;\npub use self::registry::Registry;\npub use self::resolver::Resolve;\npub use self::shell::{Shell, Verbosity};\npub use self::source::{Source, SourceId, SourceMap, GitReference};\npub use self::summary::Summary;\npub use self::workspace::{Members, Workspace, WorkspaceConfig};\n\npub mod source;\npub mod package;\npub mod package_id;\npub mod dependency;\npub mod manifest;\npub mod resolver;\npub mod summary;\npub mod shell;\npub mod registry;\nmod package_id_spec;\nmod workspace;\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! # The Rust Standard Library\n\/\/!\n\/\/! The Rust Standard Library provides the essential runtime\n\/\/! functionality for building portable Rust software.\n\/\/! It is linked to all Rust crates by default.\n\/\/!\n\/\/! ## Intrinsic types and operations\n\/\/!\n\/\/! The [`ptr`](ptr\/index.html) and [`mem`](mem\/index.html)\n\/\/! modules deal with unsafe pointers and memory manipulation.\n\/\/! [`marker`](marker\/index.html) defines the special built-in traits,\n\/\/! and [`raw`](raw\/index.html) the runtime representation of Rust types.\n\/\/! These are some of the lowest-level building blocks in Rust.\n\/\/!\n\/\/! ## Math on primitive types and math traits\n\/\/!\n\/\/! Although basic operations on primitive types are implemented\n\/\/! directly by the compiler, the standard library additionally\n\/\/! defines many common operations through traits defined in\n\/\/! mod [`num`](num\/index.html).\n\/\/!\n\/\/! ## Pervasive types\n\/\/!\n\/\/! The [`option`](option\/index.html) and [`result`](result\/index.html)\n\/\/! modules define optional and error-handling types, `Option` and `Result`.\n\/\/! [`iter`](iter\/index.html) defines Rust's iterator protocol\n\/\/! along with a wide variety of iterators.\n\/\/! [`Cell` and `RefCell`](cell\/index.html) are for creating types that\n\/\/! manage their own mutability.\n\/\/!\n\/\/! ## Vectors, slices and strings\n\/\/!\n\/\/! The common container type, `Vec`, a growable vector backed by an\n\/\/! array, lives in the [`vec`](vec\/index.html) module. References to\n\/\/! arrays, `&[T]`, more commonly called \"slices\", are built-in types\n\/\/! for which the [`slice`](slice\/index.html) module defines many\n\/\/! methods.\n\/\/!\n\/\/! `&str`, a UTF-8 string, is a built-in type, and the standard library\n\/\/! defines methods for it on a variety of traits in the\n\/\/! [`str`](str\/index.html) module. Rust strings are immutable;\n\/\/! use the `String` type defined in [`string`](string\/index.html)\n\/\/! for a mutable string builder.\n\/\/!\n\/\/! For converting to strings use the [`format!`](fmt\/index.html)\n\/\/! macro, and for converting from strings use the\n\/\/! [`FromStr`](str\/trait.FromStr.html) trait.\n\/\/!\n\/\/! ## Platform abstractions\n\/\/!\n\/\/! Besides basic data types, the standard library is largely concerned\n\/\/! with abstracting over differences in common platforms, most notably\n\/\/! Windows and Unix derivatives. The [`os`](os\/index.html) module\n\/\/! provides a number of basic functions for interacting with the\n\/\/! operating environment, including program arguments, environment\n\/\/! variables, and directory navigation. The [`path`](path\/index.html)\n\/\/! module encapsulates the platform-specific rules for dealing\n\/\/! with file paths.\n\/\/!\n\/\/! `std` also includes modules for interoperating with the\n\/\/! C language: [`c_str`](c_str\/index.html) and\n\/\/! [`c_vec`](c_vec\/index.html).\n\/\/!\n\/\/! ## Concurrency, I\/O, and the runtime\n\/\/!\n\/\/! The [`thread`](thread\/index.html) module contains Rust's threading abstractions,\n\/\/! while [`comm`](comm\/index.html) contains the channel types for message\n\/\/! passing. [`sync`](sync\/index.html) contains further, primitive, shared\n\/\/! memory types, including [`atomic`](sync\/atomic\/index.html).\n\/\/!\n\/\/! Common types of I\/O, including files, TCP, UDP, pipes, Unix domain sockets,\n\/\/! timers, and process spawning, are defined in the\n\/\/! [`old_io`](old_io\/index.html) module.\n\/\/!\n\/\/! Rust's I\/O and concurrency depends on a small runtime interface\n\/\/! that lives, along with its support code, in mod [`rt`](rt\/index.html).\n\/\/! While a notable part of the standard library's architecture, this\n\/\/! module is not intended for public use.\n\/\/!\n\/\/! ## The Rust prelude and macros\n\/\/!\n\/\/! Finally, the [`prelude`](prelude\/index.html) defines a\n\/\/! common set of traits, types, and functions that are made available\n\/\/! to all code by default. [`macros`](macros\/index.html) contains\n\/\/! all the standard macros, such as `assert!`, `panic!`, `println!`,\n\/\/! and `format!`, also available to all Rust code.\n\/\/ Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"std\"]\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n#![staged_api]\n#![crate_type = \"rlib\"]\n#![crate_type = \"dylib\"]\n#![doc(html_logo_url = \"http:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"http:\/\/www.rust-lang.org\/favicon.ico\",\n html_root_url = \"http:\/\/doc.rust-lang.org\/nightly\/\",\n html_playground_url = \"http:\/\/play.rust-lang.org\/\")]\n\n#![feature(alloc)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(core)]\n#![feature(hash)]\n#![feature(lang_items)]\n#![feature(libc)]\n#![feature(linkage, thread_local, asm)]\n#![feature(old_impl_check)]\n#![feature(optin_builtin_traits)]\n#![feature(rand)]\n#![feature(staged_api)]\n#![feature(unboxed_closures)]\n#![feature(unicode)]\n#![feature(unsafe_destructor)]\n#![feature(unsafe_no_drop_flag)]\n#![feature(macro_reexport)]\n#![feature(hash)]\n#![feature(int_uint)]\n#![feature(unique)]\n#![feature(allow_internal_unstable)]\n#![feature(str_char)]\n#![cfg_attr(test, feature(test, rustc_private))]\n\n\/\/ Don't link to std. We are std.\n#![feature(no_std)]\n#![no_std]\n\n#![deny(missing_docs)]\n\n#[cfg(test)] extern crate test;\n#[cfg(test)] #[macro_use] extern crate log;\n\n#[macro_use]\n#[macro_reexport(assert, assert_eq, debug_assert, debug_assert_eq,\n unreachable, unimplemented, write, writeln)]\nextern crate core;\n\n#[macro_use]\n#[macro_reexport(vec, format)]\nextern crate \"collections\" as core_collections;\n\n#[allow(deprecated)] extern crate \"rand\" as core_rand;\nextern crate alloc;\nextern crate unicode;\nextern crate libc;\n\n#[macro_use] #[no_link] extern crate rustc_bitflags;\n\n\/\/ Make std testable by not duplicating lang items. See #2912\n#[cfg(test)] extern crate \"std\" as realstd;\n#[cfg(test)] pub use realstd::marker;\n#[cfg(test)] pub use realstd::ops;\n#[cfg(test)] pub use realstd::cmp;\n#[cfg(test)] pub use realstd::boxed;\n\n\n\/\/ NB: These reexports are in the order they should be listed in rustdoc\n\npub use core::any;\npub use core::cell;\npub use core::clone;\n#[cfg(not(test))] pub use core::cmp;\npub use core::default;\n#[allow(deprecated)]\npub use core::finally;\npub use core::hash;\npub use core::intrinsics;\npub use core::iter;\n#[cfg(not(test))] pub use core::marker;\npub use core::mem;\n#[cfg(not(test))] pub use core::ops;\npub use core::ptr;\npub use core::raw;\npub use core::simd;\npub use core::result;\npub use core::option;\npub use core::error;\n\n#[cfg(not(test))] pub use alloc::boxed;\npub use alloc::rc;\n\npub use core_collections::borrow;\npub use core_collections::fmt;\npub use core_collections::slice;\npub use core_collections::str;\npub use core_collections::string;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use core_collections::vec;\n\npub use unicode::char;\n\n\/* Exported macros *\/\n\n#[macro_use]\nmod macros;\n\nmod rtdeps;\n\n\/* The Prelude. *\/\n\npub mod prelude;\n\n\n\/* Primitive types *\/\n\n#[path = \"num\/float_macros.rs\"]\n#[macro_use]\nmod float_macros;\n\n#[path = \"num\/int_macros.rs\"]\n#[macro_use]\nmod int_macros;\n\n#[path = \"num\/uint_macros.rs\"]\n#[macro_use]\nmod uint_macros;\n\n#[path = \"num\/isize.rs\"] pub mod isize;\n#[path = \"num\/i8.rs\"] pub mod i8;\n#[path = \"num\/i16.rs\"] pub mod i16;\n#[path = \"num\/i32.rs\"] pub mod i32;\n#[path = \"num\/i64.rs\"] pub mod i64;\n\n#[path = \"num\/usize.rs\"] pub mod usize;\n#[path = \"num\/u8.rs\"] pub mod u8;\n#[path = \"num\/u16.rs\"] pub mod u16;\n#[path = \"num\/u32.rs\"] pub mod u32;\n#[path = \"num\/u64.rs\"] pub mod u64;\n\n#[path = \"num\/f32.rs\"] pub mod f32;\n#[path = \"num\/f64.rs\"] pub mod f64;\n\npub mod ascii;\npub mod thunk;\n\n\/* Common traits *\/\n\npub mod num;\n\n\/* Runtime and platform support *\/\n\n#[macro_use]\npub mod thread_local;\n\npub mod dynamic_lib;\npub mod ffi;\npub mod old_io;\npub mod io;\npub mod fs;\npub mod net;\npub mod os;\npub mod env;\npub mod path;\npub mod old_path;\npub mod process;\npub mod rand;\npub mod time;\n\n\/* Common data structures *\/\n\npub mod collections;\n\n\/* Threads and communication *\/\n\npub mod thread;\npub mod sync;\n\n#[macro_use]\n#[path = \"sys\/common\/mod.rs\"] mod sys_common;\n\n#[cfg(unix)]\n#[path = \"sys\/unix\/mod.rs\"] mod sys;\n#[cfg(windows)]\n#[path = \"sys\/windows\/mod.rs\"] mod sys;\n\npub mod rt;\nmod panicking;\n\n\/\/ Documentation for primitive types\n\nmod bool;\nmod unit;\nmod tuple;\n\n\/\/ A curious inner-module that's not exported that contains the binding\n\/\/ 'std' so that macro-expanded references to std::error and such\n\/\/ can be resolved within libstd.\n#[doc(hidden)]\nmod std {\n pub use sync; \/\/ used for select!()\n pub use error; \/\/ used for try!()\n pub use fmt; \/\/ used for any formatting strings\n #[allow(deprecated)]\n pub use old_io; \/\/ used for println!()\n pub use option; \/\/ used for bitflags!{}\n pub use rt; \/\/ used for panic!()\n pub use vec; \/\/ used for vec![]\n pub use cell; \/\/ used for tls!\n pub use thread_local; \/\/ used for thread_local!\n pub use marker; \/\/ used for tls!\n pub use ops; \/\/ used for bitflags!\n\n \/\/ The test runner calls ::std::env::args() but really wants realstd\n #[cfg(test)] pub use realstd::env as env;\n \/\/ The test runner requires std::slice::Vector, so re-export std::slice just for it.\n \/\/\n \/\/ It is also used in vec![]\n pub use slice;\n\n pub use boxed; \/\/ used for vec![]\n}\n<commit_msg>Rollup merge of #23392 - WiSaGaN:bugfix\/fix_deprecate_link, r=Manishearth<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! # The Rust Standard Library\n\/\/!\n\/\/! The Rust Standard Library provides the essential runtime\n\/\/! functionality for building portable Rust software.\n\/\/! It is linked to all Rust crates by default.\n\/\/!\n\/\/! ## Intrinsic types and operations\n\/\/!\n\/\/! The [`ptr`](ptr\/index.html) and [`mem`](mem\/index.html)\n\/\/! modules deal with unsafe pointers and memory manipulation.\n\/\/! [`marker`](marker\/index.html) defines the special built-in traits,\n\/\/! and [`raw`](raw\/index.html) the runtime representation of Rust types.\n\/\/! These are some of the lowest-level building blocks in Rust.\n\/\/!\n\/\/! ## Math on primitive types and math traits\n\/\/!\n\/\/! Although basic operations on primitive types are implemented\n\/\/! directly by the compiler, the standard library additionally\n\/\/! defines many common operations through traits defined in\n\/\/! mod [`num`](num\/index.html).\n\/\/!\n\/\/! ## Pervasive types\n\/\/!\n\/\/! The [`option`](option\/index.html) and [`result`](result\/index.html)\n\/\/! modules define optional and error-handling types, `Option` and `Result`.\n\/\/! [`iter`](iter\/index.html) defines Rust's iterator protocol\n\/\/! along with a wide variety of iterators.\n\/\/! [`Cell` and `RefCell`](cell\/index.html) are for creating types that\n\/\/! manage their own mutability.\n\/\/!\n\/\/! ## Vectors, slices and strings\n\/\/!\n\/\/! The common container type, `Vec`, a growable vector backed by an\n\/\/! array, lives in the [`vec`](vec\/index.html) module. References to\n\/\/! arrays, `&[T]`, more commonly called \"slices\", are built-in types\n\/\/! for which the [`slice`](slice\/index.html) module defines many\n\/\/! methods.\n\/\/!\n\/\/! `&str`, a UTF-8 string, is a built-in type, and the standard library\n\/\/! defines methods for it on a variety of traits in the\n\/\/! [`str`](str\/index.html) module. Rust strings are immutable;\n\/\/! use the `String` type defined in [`string`](string\/index.html)\n\/\/! for a mutable string builder.\n\/\/!\n\/\/! For converting to strings use the [`format!`](fmt\/index.html)\n\/\/! macro, and for converting from strings use the\n\/\/! [`FromStr`](str\/trait.FromStr.html) trait.\n\/\/!\n\/\/! ## Platform abstractions\n\/\/!\n\/\/! Besides basic data types, the standard library is largely concerned\n\/\/! with abstracting over differences in common platforms, most notably\n\/\/! Windows and Unix derivatives. The [`os`](os\/index.html) module\n\/\/! provides a number of basic functions for interacting with the\n\/\/! operating environment, including program arguments, environment\n\/\/! variables, and directory navigation. The [`path`](path\/index.html)\n\/\/! module encapsulates the platform-specific rules for dealing\n\/\/! with file paths.\n\/\/!\n\/\/! `std` also includes modules for interoperating with the\n\/\/! C language: [`c_str`](c_str\/index.html) and\n\/\/! [`c_vec`](c_vec\/index.html).\n\/\/!\n\/\/! ## Concurrency, I\/O, and the runtime\n\/\/!\n\/\/! The [`thread`](thread\/index.html) module contains Rust's threading abstractions.\n\/\/! [`sync`](sync\/index.html) contains further, primitive, shared memory types,\n\/\/! including [`atomic`](sync\/atomic\/index.html), and [`mpsc`](sync\/mpmc\/index.html),\n\/\/! which contains the channel types for message passing.\n\/\/!\n\/\/! Common types of I\/O, including files, TCP, UDP, pipes, Unix domain sockets,\n\/\/! timers, and process spawning, are defined in the\n\/\/! [`old_io`](old_io\/index.html) module.\n\/\/!\n\/\/! Rust's I\/O and concurrency depends on a small runtime interface\n\/\/! that lives, along with its support code, in mod [`rt`](rt\/index.html).\n\/\/! While a notable part of the standard library's architecture, this\n\/\/! module is not intended for public use.\n\/\/!\n\/\/! ## The Rust prelude and macros\n\/\/!\n\/\/! Finally, the [`prelude`](prelude\/index.html) defines a\n\/\/! common set of traits, types, and functions that are made available\n\/\/! to all code by default. [`macros`](macros\/index.html) contains\n\/\/! all the standard macros, such as `assert!`, `panic!`, `println!`,\n\/\/! and `format!`, also available to all Rust code.\n\/\/ Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"std\"]\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n#![staged_api]\n#![crate_type = \"rlib\"]\n#![crate_type = \"dylib\"]\n#![doc(html_logo_url = \"http:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"http:\/\/www.rust-lang.org\/favicon.ico\",\n html_root_url = \"http:\/\/doc.rust-lang.org\/nightly\/\",\n html_playground_url = \"http:\/\/play.rust-lang.org\/\")]\n\n#![feature(alloc)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(core)]\n#![feature(hash)]\n#![feature(lang_items)]\n#![feature(libc)]\n#![feature(linkage, thread_local, asm)]\n#![feature(old_impl_check)]\n#![feature(optin_builtin_traits)]\n#![feature(rand)]\n#![feature(staged_api)]\n#![feature(unboxed_closures)]\n#![feature(unicode)]\n#![feature(unsafe_destructor)]\n#![feature(unsafe_no_drop_flag)]\n#![feature(macro_reexport)]\n#![feature(hash)]\n#![feature(int_uint)]\n#![feature(unique)]\n#![feature(allow_internal_unstable)]\n#![feature(str_char)]\n#![cfg_attr(test, feature(test, rustc_private))]\n\n\/\/ Don't link to std. We are std.\n#![feature(no_std)]\n#![no_std]\n\n#![deny(missing_docs)]\n\n#[cfg(test)] extern crate test;\n#[cfg(test)] #[macro_use] extern crate log;\n\n#[macro_use]\n#[macro_reexport(assert, assert_eq, debug_assert, debug_assert_eq,\n unreachable, unimplemented, write, writeln)]\nextern crate core;\n\n#[macro_use]\n#[macro_reexport(vec, format)]\nextern crate \"collections\" as core_collections;\n\n#[allow(deprecated)] extern crate \"rand\" as core_rand;\nextern crate alloc;\nextern crate unicode;\nextern crate libc;\n\n#[macro_use] #[no_link] extern crate rustc_bitflags;\n\n\/\/ Make std testable by not duplicating lang items. See #2912\n#[cfg(test)] extern crate \"std\" as realstd;\n#[cfg(test)] pub use realstd::marker;\n#[cfg(test)] pub use realstd::ops;\n#[cfg(test)] pub use realstd::cmp;\n#[cfg(test)] pub use realstd::boxed;\n\n\n\/\/ NB: These reexports are in the order they should be listed in rustdoc\n\npub use core::any;\npub use core::cell;\npub use core::clone;\n#[cfg(not(test))] pub use core::cmp;\npub use core::default;\n#[allow(deprecated)]\npub use core::finally;\npub use core::hash;\npub use core::intrinsics;\npub use core::iter;\n#[cfg(not(test))] pub use core::marker;\npub use core::mem;\n#[cfg(not(test))] pub use core::ops;\npub use core::ptr;\npub use core::raw;\npub use core::simd;\npub use core::result;\npub use core::option;\npub use core::error;\n\n#[cfg(not(test))] pub use alloc::boxed;\npub use alloc::rc;\n\npub use core_collections::borrow;\npub use core_collections::fmt;\npub use core_collections::slice;\npub use core_collections::str;\npub use core_collections::string;\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use core_collections::vec;\n\npub use unicode::char;\n\n\/* Exported macros *\/\n\n#[macro_use]\nmod macros;\n\nmod rtdeps;\n\n\/* The Prelude. *\/\n\npub mod prelude;\n\n\n\/* Primitive types *\/\n\n#[path = \"num\/float_macros.rs\"]\n#[macro_use]\nmod float_macros;\n\n#[path = \"num\/int_macros.rs\"]\n#[macro_use]\nmod int_macros;\n\n#[path = \"num\/uint_macros.rs\"]\n#[macro_use]\nmod uint_macros;\n\n#[path = \"num\/isize.rs\"] pub mod isize;\n#[path = \"num\/i8.rs\"] pub mod i8;\n#[path = \"num\/i16.rs\"] pub mod i16;\n#[path = \"num\/i32.rs\"] pub mod i32;\n#[path = \"num\/i64.rs\"] pub mod i64;\n\n#[path = \"num\/usize.rs\"] pub mod usize;\n#[path = \"num\/u8.rs\"] pub mod u8;\n#[path = \"num\/u16.rs\"] pub mod u16;\n#[path = \"num\/u32.rs\"] pub mod u32;\n#[path = \"num\/u64.rs\"] pub mod u64;\n\n#[path = \"num\/f32.rs\"] pub mod f32;\n#[path = \"num\/f64.rs\"] pub mod f64;\n\npub mod ascii;\npub mod thunk;\n\n\/* Common traits *\/\n\npub mod num;\n\n\/* Runtime and platform support *\/\n\n#[macro_use]\npub mod thread_local;\n\npub mod dynamic_lib;\npub mod ffi;\npub mod old_io;\npub mod io;\npub mod fs;\npub mod net;\npub mod os;\npub mod env;\npub mod path;\npub mod old_path;\npub mod process;\npub mod rand;\npub mod time;\n\n\/* Common data structures *\/\n\npub mod collections;\n\n\/* Threads and communication *\/\n\npub mod thread;\npub mod sync;\n\n#[macro_use]\n#[path = \"sys\/common\/mod.rs\"] mod sys_common;\n\n#[cfg(unix)]\n#[path = \"sys\/unix\/mod.rs\"] mod sys;\n#[cfg(windows)]\n#[path = \"sys\/windows\/mod.rs\"] mod sys;\n\npub mod rt;\nmod panicking;\n\n\/\/ Documentation for primitive types\n\nmod bool;\nmod unit;\nmod tuple;\n\n\/\/ A curious inner-module that's not exported that contains the binding\n\/\/ 'std' so that macro-expanded references to std::error and such\n\/\/ can be resolved within libstd.\n#[doc(hidden)]\nmod std {\n pub use sync; \/\/ used for select!()\n pub use error; \/\/ used for try!()\n pub use fmt; \/\/ used for any formatting strings\n #[allow(deprecated)]\n pub use old_io; \/\/ used for println!()\n pub use option; \/\/ used for bitflags!{}\n pub use rt; \/\/ used for panic!()\n pub use vec; \/\/ used for vec![]\n pub use cell; \/\/ used for tls!\n pub use thread_local; \/\/ used for thread_local!\n pub use marker; \/\/ used for tls!\n pub use ops; \/\/ used for bitflags!\n\n \/\/ The test runner calls ::std::env::args() but really wants realstd\n #[cfg(test)] pub use realstd::env as env;\n \/\/ The test runner requires std::slice::Vector, so re-export std::slice just for it.\n \/\/\n \/\/ It is also used in vec![]\n pub use slice;\n\n pub use boxed; \/\/ used for vec![]\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Panic support in the standard library\n\n#![unstable(feature = \"std_panic\", reason = \"awaiting feedback\",\n issue = \"27719\")]\n\nuse any::Any;\nuse boxed::Box;\nuse cell::UnsafeCell;\nuse ops::{Deref, DerefMut};\nuse ptr::{Unique, Shared};\nuse rc::Rc;\nuse sync::{Arc, Mutex, RwLock};\nuse sys_common::unwind;\nuse thread::Result;\n\npub use panicking::{take_handler, set_handler, PanicInfo, Location};\n\n\/\/\/ A marker trait which represents \"panic safe\" types in Rust.\n\/\/\/\n\/\/\/ This trait is implemented by default for many types and behaves similarly in\n\/\/\/ terms of inference of implementation to the `Send` and `Sync` traits. The\n\/\/\/ purpose of this trait is to encode what types are safe to cross a `recover`\n\/\/\/ boundary with no fear of panic safety.\n\/\/\/\n\/\/\/ ## What is panic safety?\n\/\/\/\n\/\/\/ In Rust a function can \"return\" early if it either panics or calls a\n\/\/\/ function which transitively panics. This sort of control flow is not always\n\/\/\/ anticipated, and has the possibility of causing subtle bugs through a\n\/\/\/ combination of two cricial components:\n\/\/\/\n\/\/\/ 1. A data structure is in a temporarily invalid state when the thread\n\/\/\/ panics.\n\/\/\/ 2. This broken invariant is then later observed.\n\/\/\/\n\/\/\/ Typically in Rust, it is difficult to perform step (2) because catching a\n\/\/\/ panic involves either spawning a thread (which in turns makes it difficult\n\/\/\/ to later witness broken invariants) or using the `recover` function in this\n\/\/\/ module. Additionally, even if an invariant is witnessed, it typically isn't a\n\/\/\/ problem in Rust because there's no uninitialized values (like in C or C++).\n\/\/\/\n\/\/\/ It is possible, however, for **logical** invariants to be broken in Rust,\n\/\/\/ which can end up causing behavioral bugs. Another key aspect of panic safety\n\/\/\/ in Rust is that, in the absence of `unsafe` code, a panic cannot lead to\n\/\/\/ memory unsafety.\n\/\/\/\n\/\/\/ That was a bit of a whirlwind tour of panic safety, but for more information\n\/\/\/ about panic safety and how it applies to Rust, see an [associated RFC][rfc].\n\/\/\/\n\/\/\/ [rfc]: https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1236-stabilize-catch-panic.md\n\/\/\/\n\/\/\/ ## What is `RecoverSafe`?\n\/\/\/\n\/\/\/ Now that we've got an idea of what panic safety is in Rust, it's also\n\/\/\/ important to understand what this trait represents. As mentioned above, one\n\/\/\/ way to witness broken invariants is through the `recover` function in this\n\/\/\/ module as it allows catching a panic and then re-using the environment of\n\/\/\/ the closure.\n\/\/\/\n\/\/\/ Simply put, a type `T` implements `RecoverSafe` if it cannot easily allow\n\/\/\/ witnessing a broken invariant through the use of `recover` (catching a\n\/\/\/ panic). This trait is a marker trait, so it is automatically implemented for\n\/\/\/ many types, and it is also structurally composed (e.g. a struct is recover\n\/\/\/ safe if all of its components are recover safe).\n\/\/\/\n\/\/\/ Note, however, that this is not an unsafe trait, so there is not a succinct\n\/\/\/ contract that this trait is providing. Instead it is intended as more of a\n\/\/\/ \"speed bump\" to alert users of `recover` that broken invariants may be\n\/\/\/ witnessed and may need to be accounted for.\n\/\/\/\n\/\/\/ ## Who implements `RecoverSafe`?\n\/\/\/\n\/\/\/ Types such as `&mut T` and `&RefCell<T>` are examples which are **not**\n\/\/\/ recover safe. The general idea is that any mutable state which can be shared\n\/\/\/ across `recover` is not recover safe by default. This is because it is very\n\/\/\/ easy to witness a broken invariant outside of `recover` as the data is\n\/\/\/ simply accessed as usual.\n\/\/\/\n\/\/\/ Types like `&Mutex<T>`, however, are recover safe because they implement\n\/\/\/ poisoning by default. They still allow witnessing a broken invariant, but\n\/\/\/ they already provide their own \"speed bumps\" to do so.\n\/\/\/\n\/\/\/ ## When should `RecoverSafe` be used?\n\/\/\/\n\/\/\/ Is not intended that most types or functions need to worry about this trait.\n\/\/\/ It is only used as a bound on the `recover` function and as mentioned above,\n\/\/\/ the lack of `unsafe` means it is mostly an advisory. The `AssertRecoverSafe`\n\/\/\/ wrapper struct in this module can be used to force this trait to be\n\/\/\/ implemented for any closed over variables passed to the `recover` function\n\/\/\/ (more on this below).\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n#[rustc_on_unimplemented = \"the type {Self} may not be safely transferred \\\n across a recover boundary\"]\npub trait RecoverSafe {}\n\n\/\/\/ A marker trait representing types where a shared reference is considered\n\/\/\/ recover safe.\n\/\/\/\n\/\/\/ This trait is namely not implemented by `UnsafeCell`, the root of all\n\/\/\/ interior mutability.\n\/\/\/\n\/\/\/ This is a \"helper marker trait\" used to provide impl blocks for the\n\/\/\/ `RecoverSafe` trait, for more information see that documentation.\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n#[rustc_on_unimplemented = \"the type {Self} contains interior mutability \\\n and a reference may not be safely transferrable \\\n across a recover boundary\"]\npub trait RefRecoverSafe {}\n\n\/\/\/ A simple wrapper around a type to assert that it is panic safe.\n\/\/\/\n\/\/\/ When using `recover` it may be the case that some of the closed over\n\/\/\/ variables are not panic safe. For example if `&mut T` is captured the\n\/\/\/ compiler will generate a warning indicating that it is not panic safe. It\n\/\/\/ may not be the case, however, that this is actually a problem due to the\n\/\/\/ specific usage of `recover` if panic safety is specifically taken into\n\/\/\/ account. This wrapper struct is useful for a quick and lightweight\n\/\/\/ annotation that a variable is indeed panic safe.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ One way to use `AssertRecoverSafe` is to assert that the entire closure\n\/\/\/ itself is recover safe, bypassing all checks for all variables:\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic::{self, AssertRecoverSafe};\n\/\/\/\n\/\/\/ let mut variable = 4;\n\/\/\/\n\/\/\/ \/\/ This code will not compile because the closure captures `&mut variable`\n\/\/\/ \/\/ which is not considered panic safe by default.\n\/\/\/\n\/\/\/ \/\/ panic::recover(|| {\n\/\/\/ \/\/ variable += 3;\n\/\/\/ \/\/ });\n\/\/\/\n\/\/\/ \/\/ This, however, will compile due to the `AssertRecoverSafe` wrapper\n\/\/\/ let result = panic::recover(AssertRecoverSafe(|| {\n\/\/\/ variable += 3;\n\/\/\/ }));\n\/\/\/ \/\/ ...\n\/\/\/ ```\n\/\/\/\n\/\/\/ Wrapping the entire closure amounts to a blanket assertion that all captured\n\/\/\/ variables are recover safe. This has the downside that if new captures are\n\/\/\/ added in the future, they will also be considered recover safe. Therefore,\n\/\/\/ you may prefer to just wrap individual captures, as shown below. This is\n\/\/\/ more annotation, but it ensures that if a new capture is added which is not\n\/\/\/ recover safe, you will get a compilation error at that time, which will\n\/\/\/ allow you to consider whether that new capture in fact represent a bug or\n\/\/\/ not.\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic::{self, AssertRecoverSafe};\n\/\/\/\n\/\/\/ let mut variable = 4;\n\/\/\/ let other_capture = 3;\n\/\/\/\n\/\/\/ let result = {\n\/\/\/ let mut wrapper = AssertRecoverSafe(&mut variable);\n\/\/\/ panic::recover(move || {\n\/\/\/ **wrapper += other_capture;\n\/\/\/ })\n\/\/\/ };\n\/\/\/ \/\/ ...\n\/\/\/ ```\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\npub struct AssertRecoverSafe<T>(pub T);\n\n\/\/ Implementations of the `RecoverSafe` trait:\n\/\/\n\/\/ * By default everything is recover safe\n\/\/ * pointers T contains mutability of some form are not recover safe\n\/\/ * Unique, an owning pointer, lifts an implementation\n\/\/ * Types like Mutex\/RwLock which are explicilty poisoned are recover safe\n\/\/ * Our custom AssertRecoverSafe wrapper is indeed recover safe\nimpl RecoverSafe for .. {}\nimpl<'a, T: ?Sized> !RecoverSafe for &'a mut T {}\nimpl<'a, T: RefRecoverSafe + ?Sized> RecoverSafe for &'a T {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for *const T {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for *mut T {}\nimpl<T: RecoverSafe> RecoverSafe for Unique<T> {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Shared<T> {}\nimpl<T: ?Sized> RecoverSafe for Mutex<T> {}\nimpl<T: ?Sized> RecoverSafe for RwLock<T> {}\nimpl<T> RecoverSafe for AssertRecoverSafe<T> {}\n\n\/\/ not covered via the Shared impl above b\/c the inner contents use\n\/\/ Cell\/AtomicUsize, but the usage here is recover safe so we can lift the\n\/\/ impl up one level to Arc\/Rc itself\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Rc<T> {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Arc<T> {}\n\n\/\/ Pretty simple implementations for the `RefRecoverSafe` marker trait,\n\/\/ basically just saying that this is a marker trait and `UnsafeCell` is the\n\/\/ only thing which doesn't implement it (which then transitively applies to\n\/\/ everything else).\nimpl RefRecoverSafe for .. {}\nimpl<T: ?Sized> !RefRecoverSafe for UnsafeCell<T> {}\nimpl<T> RefRecoverSafe for AssertRecoverSafe<T> {}\n\nimpl<T> AssertRecoverSafe<T> {\n \/\/\/ Creates a new `AssertRecoverSafe` wrapper around the provided type.\n #[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n #[rustc_deprecated(reason = \"the type's field is now public, construct it directly\")]\n pub fn new(t: T) -> AssertRecoverSafe<T> {\n AssertRecoverSafe(t)\n }\n\n \/\/\/ Consumes the `AssertRecoverSafe`, returning the wrapped value.\n #[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n #[rustc_deprecated(reason = \"the type's field is now public, access it directly\")]\n pub fn into_inner(self) -> T {\n self.0\n }\n}\n\nimpl<T> Deref for AssertRecoverSafe<T> {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.0\n }\n}\n\nimpl<T> DerefMut for AssertRecoverSafe<T> {\n fn deref_mut(&mut self) -> &mut T {\n &mut self.0\n }\n}\n\nimpl<R, F: FnOnce() -> R> FnOnce<()> for AssertRecoverSafe<F> {\n type Output = R;\n\n extern \"rust-call\" fn call_once(self, _args: ()) -> R {\n (self.0)()\n }\n}\n\n\/\/\/ Invokes a closure, capturing the cause of panic if one occurs.\n\/\/\/\n\/\/\/ This function will return `Ok` with the closure's result if the closure\n\/\/\/ does not panic, and will return `Err(cause)` if the closure panics. The\n\/\/\/ `cause` returned is the object with which panic was originally invoked.\n\/\/\/\n\/\/\/ It is currently undefined behavior to unwind from Rust code into foreign\n\/\/\/ code, so this function is particularly useful when Rust is called from\n\/\/\/ another language (normally C). This can run arbitrary Rust code, capturing a\n\/\/\/ panic and allowing a graceful handling of the error.\n\/\/\/\n\/\/\/ It is **not** recommended to use this function for a general try\/catch\n\/\/\/ mechanism. The `Result` type is more appropriate to use for functions that\n\/\/\/ can fail on a regular basis.\n\/\/\/\n\/\/\/ The closure provided is required to adhere to the `RecoverSafe` to ensure\n\/\/\/ that all captured variables are safe to cross this recover boundary. The\n\/\/\/ purpose of this bound is to encode the concept of [exception safety][rfc] in\n\/\/\/ the type system. Most usage of this function should not need to worry about\n\/\/\/ this bound as programs are naturally panic safe without `unsafe` code. If it\n\/\/\/ becomes a problem the associated `AssertRecoverSafe` wrapper type in this\n\/\/\/ module can be used to quickly assert that the usage here is indeed exception\n\/\/\/ safe.\n\/\/\/\n\/\/\/ [rfc]: https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1236-stabilize-catch-panic.md\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic;\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ println!(\"hello!\");\n\/\/\/ });\n\/\/\/ assert!(result.is_ok());\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ panic!(\"oh no!\");\n\/\/\/ });\n\/\/\/ assert!(result.is_err());\n\/\/\/ ```\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\npub fn recover<F: FnOnce() -> R + RecoverSafe, R>(f: F) -> Result<R> {\n let mut result = None;\n unsafe {\n let result = &mut result;\n try!(unwind::try(move || *result = Some(f())))\n }\n Ok(result.unwrap())\n}\n\n\/\/\/ Triggers a panic without invoking the panic handler.\n\/\/\/\n\/\/\/ This is designed to be used in conjunction with `recover` to, for example,\n\/\/\/ carry a panic across a layer of C code.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```should_panic\n\/\/\/ #![feature(std_panic, recover, panic_propagate)]\n\/\/\/\n\/\/\/ use std::panic;\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ panic!(\"oh no!\");\n\/\/\/ });\n\/\/\/\n\/\/\/ if let Err(err) = result {\n\/\/\/ panic::propagate(err);\n\/\/\/ }\n\/\/\/ ```\n#[unstable(feature = \"panic_propagate\", reason = \"awaiting feedback\", issue = \"30752\")]\npub fn propagate(payload: Box<Any + Send>) -> ! {\n unwind::rust_panic(payload)\n}\n<commit_msg>Add a since to deprecations<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Panic support in the standard library\n\n#![unstable(feature = \"std_panic\", reason = \"awaiting feedback\",\n issue = \"27719\")]\n\nuse any::Any;\nuse boxed::Box;\nuse cell::UnsafeCell;\nuse ops::{Deref, DerefMut};\nuse ptr::{Unique, Shared};\nuse rc::Rc;\nuse sync::{Arc, Mutex, RwLock};\nuse sys_common::unwind;\nuse thread::Result;\n\npub use panicking::{take_handler, set_handler, PanicInfo, Location};\n\n\/\/\/ A marker trait which represents \"panic safe\" types in Rust.\n\/\/\/\n\/\/\/ This trait is implemented by default for many types and behaves similarly in\n\/\/\/ terms of inference of implementation to the `Send` and `Sync` traits. The\n\/\/\/ purpose of this trait is to encode what types are safe to cross a `recover`\n\/\/\/ boundary with no fear of panic safety.\n\/\/\/\n\/\/\/ ## What is panic safety?\n\/\/\/\n\/\/\/ In Rust a function can \"return\" early if it either panics or calls a\n\/\/\/ function which transitively panics. This sort of control flow is not always\n\/\/\/ anticipated, and has the possibility of causing subtle bugs through a\n\/\/\/ combination of two cricial components:\n\/\/\/\n\/\/\/ 1. A data structure is in a temporarily invalid state when the thread\n\/\/\/ panics.\n\/\/\/ 2. This broken invariant is then later observed.\n\/\/\/\n\/\/\/ Typically in Rust, it is difficult to perform step (2) because catching a\n\/\/\/ panic involves either spawning a thread (which in turns makes it difficult\n\/\/\/ to later witness broken invariants) or using the `recover` function in this\n\/\/\/ module. Additionally, even if an invariant is witnessed, it typically isn't a\n\/\/\/ problem in Rust because there's no uninitialized values (like in C or C++).\n\/\/\/\n\/\/\/ It is possible, however, for **logical** invariants to be broken in Rust,\n\/\/\/ which can end up causing behavioral bugs. Another key aspect of panic safety\n\/\/\/ in Rust is that, in the absence of `unsafe` code, a panic cannot lead to\n\/\/\/ memory unsafety.\n\/\/\/\n\/\/\/ That was a bit of a whirlwind tour of panic safety, but for more information\n\/\/\/ about panic safety and how it applies to Rust, see an [associated RFC][rfc].\n\/\/\/\n\/\/\/ [rfc]: https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1236-stabilize-catch-panic.md\n\/\/\/\n\/\/\/ ## What is `RecoverSafe`?\n\/\/\/\n\/\/\/ Now that we've got an idea of what panic safety is in Rust, it's also\n\/\/\/ important to understand what this trait represents. As mentioned above, one\n\/\/\/ way to witness broken invariants is through the `recover` function in this\n\/\/\/ module as it allows catching a panic and then re-using the environment of\n\/\/\/ the closure.\n\/\/\/\n\/\/\/ Simply put, a type `T` implements `RecoverSafe` if it cannot easily allow\n\/\/\/ witnessing a broken invariant through the use of `recover` (catching a\n\/\/\/ panic). This trait is a marker trait, so it is automatically implemented for\n\/\/\/ many types, and it is also structurally composed (e.g. a struct is recover\n\/\/\/ safe if all of its components are recover safe).\n\/\/\/\n\/\/\/ Note, however, that this is not an unsafe trait, so there is not a succinct\n\/\/\/ contract that this trait is providing. Instead it is intended as more of a\n\/\/\/ \"speed bump\" to alert users of `recover` that broken invariants may be\n\/\/\/ witnessed and may need to be accounted for.\n\/\/\/\n\/\/\/ ## Who implements `RecoverSafe`?\n\/\/\/\n\/\/\/ Types such as `&mut T` and `&RefCell<T>` are examples which are **not**\n\/\/\/ recover safe. The general idea is that any mutable state which can be shared\n\/\/\/ across `recover` is not recover safe by default. This is because it is very\n\/\/\/ easy to witness a broken invariant outside of `recover` as the data is\n\/\/\/ simply accessed as usual.\n\/\/\/\n\/\/\/ Types like `&Mutex<T>`, however, are recover safe because they implement\n\/\/\/ poisoning by default. They still allow witnessing a broken invariant, but\n\/\/\/ they already provide their own \"speed bumps\" to do so.\n\/\/\/\n\/\/\/ ## When should `RecoverSafe` be used?\n\/\/\/\n\/\/\/ Is not intended that most types or functions need to worry about this trait.\n\/\/\/ It is only used as a bound on the `recover` function and as mentioned above,\n\/\/\/ the lack of `unsafe` means it is mostly an advisory. The `AssertRecoverSafe`\n\/\/\/ wrapper struct in this module can be used to force this trait to be\n\/\/\/ implemented for any closed over variables passed to the `recover` function\n\/\/\/ (more on this below).\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n#[rustc_on_unimplemented = \"the type {Self} may not be safely transferred \\\n across a recover boundary\"]\npub trait RecoverSafe {}\n\n\/\/\/ A marker trait representing types where a shared reference is considered\n\/\/\/ recover safe.\n\/\/\/\n\/\/\/ This trait is namely not implemented by `UnsafeCell`, the root of all\n\/\/\/ interior mutability.\n\/\/\/\n\/\/\/ This is a \"helper marker trait\" used to provide impl blocks for the\n\/\/\/ `RecoverSafe` trait, for more information see that documentation.\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n#[rustc_on_unimplemented = \"the type {Self} contains interior mutability \\\n and a reference may not be safely transferrable \\\n across a recover boundary\"]\npub trait RefRecoverSafe {}\n\n\/\/\/ A simple wrapper around a type to assert that it is panic safe.\n\/\/\/\n\/\/\/ When using `recover` it may be the case that some of the closed over\n\/\/\/ variables are not panic safe. For example if `&mut T` is captured the\n\/\/\/ compiler will generate a warning indicating that it is not panic safe. It\n\/\/\/ may not be the case, however, that this is actually a problem due to the\n\/\/\/ specific usage of `recover` if panic safety is specifically taken into\n\/\/\/ account. This wrapper struct is useful for a quick and lightweight\n\/\/\/ annotation that a variable is indeed panic safe.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ One way to use `AssertRecoverSafe` is to assert that the entire closure\n\/\/\/ itself is recover safe, bypassing all checks for all variables:\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic::{self, AssertRecoverSafe};\n\/\/\/\n\/\/\/ let mut variable = 4;\n\/\/\/\n\/\/\/ \/\/ This code will not compile because the closure captures `&mut variable`\n\/\/\/ \/\/ which is not considered panic safe by default.\n\/\/\/\n\/\/\/ \/\/ panic::recover(|| {\n\/\/\/ \/\/ variable += 3;\n\/\/\/ \/\/ });\n\/\/\/\n\/\/\/ \/\/ This, however, will compile due to the `AssertRecoverSafe` wrapper\n\/\/\/ let result = panic::recover(AssertRecoverSafe(|| {\n\/\/\/ variable += 3;\n\/\/\/ }));\n\/\/\/ \/\/ ...\n\/\/\/ ```\n\/\/\/\n\/\/\/ Wrapping the entire closure amounts to a blanket assertion that all captured\n\/\/\/ variables are recover safe. This has the downside that if new captures are\n\/\/\/ added in the future, they will also be considered recover safe. Therefore,\n\/\/\/ you may prefer to just wrap individual captures, as shown below. This is\n\/\/\/ more annotation, but it ensures that if a new capture is added which is not\n\/\/\/ recover safe, you will get a compilation error at that time, which will\n\/\/\/ allow you to consider whether that new capture in fact represent a bug or\n\/\/\/ not.\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic::{self, AssertRecoverSafe};\n\/\/\/\n\/\/\/ let mut variable = 4;\n\/\/\/ let other_capture = 3;\n\/\/\/\n\/\/\/ let result = {\n\/\/\/ let mut wrapper = AssertRecoverSafe(&mut variable);\n\/\/\/ panic::recover(move || {\n\/\/\/ **wrapper += other_capture;\n\/\/\/ })\n\/\/\/ };\n\/\/\/ \/\/ ...\n\/\/\/ ```\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\npub struct AssertRecoverSafe<T>(pub T);\n\n\/\/ Implementations of the `RecoverSafe` trait:\n\/\/\n\/\/ * By default everything is recover safe\n\/\/ * pointers T contains mutability of some form are not recover safe\n\/\/ * Unique, an owning pointer, lifts an implementation\n\/\/ * Types like Mutex\/RwLock which are explicilty poisoned are recover safe\n\/\/ * Our custom AssertRecoverSafe wrapper is indeed recover safe\nimpl RecoverSafe for .. {}\nimpl<'a, T: ?Sized> !RecoverSafe for &'a mut T {}\nimpl<'a, T: RefRecoverSafe + ?Sized> RecoverSafe for &'a T {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for *const T {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for *mut T {}\nimpl<T: RecoverSafe> RecoverSafe for Unique<T> {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Shared<T> {}\nimpl<T: ?Sized> RecoverSafe for Mutex<T> {}\nimpl<T: ?Sized> RecoverSafe for RwLock<T> {}\nimpl<T> RecoverSafe for AssertRecoverSafe<T> {}\n\n\/\/ not covered via the Shared impl above b\/c the inner contents use\n\/\/ Cell\/AtomicUsize, but the usage here is recover safe so we can lift the\n\/\/ impl up one level to Arc\/Rc itself\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Rc<T> {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Arc<T> {}\n\n\/\/ Pretty simple implementations for the `RefRecoverSafe` marker trait,\n\/\/ basically just saying that this is a marker trait and `UnsafeCell` is the\n\/\/ only thing which doesn't implement it (which then transitively applies to\n\/\/ everything else).\nimpl RefRecoverSafe for .. {}\nimpl<T: ?Sized> !RefRecoverSafe for UnsafeCell<T> {}\nimpl<T> RefRecoverSafe for AssertRecoverSafe<T> {}\n\nimpl<T> AssertRecoverSafe<T> {\n \/\/\/ Creates a new `AssertRecoverSafe` wrapper around the provided type.\n #[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n #[rustc_deprecated(reason = \"the type's field is now public, construct it directly\", since = \"1.9.0\")]\n pub fn new(t: T) -> AssertRecoverSafe<T> {\n AssertRecoverSafe(t)\n }\n\n \/\/\/ Consumes the `AssertRecoverSafe`, returning the wrapped value.\n #[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n #[rustc_deprecated(reason = \"the type's field is now public, access it directly\", since = \"1.9.0)]\n pub fn into_inner(self) -> T {\n self.0\n }\n}\n\nimpl<T> Deref for AssertRecoverSafe<T> {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.0\n }\n}\n\nimpl<T> DerefMut for AssertRecoverSafe<T> {\n fn deref_mut(&mut self) -> &mut T {\n &mut self.0\n }\n}\n\nimpl<R, F: FnOnce() -> R> FnOnce<()> for AssertRecoverSafe<F> {\n type Output = R;\n\n extern \"rust-call\" fn call_once(self, _args: ()) -> R {\n (self.0)()\n }\n}\n\n\/\/\/ Invokes a closure, capturing the cause of panic if one occurs.\n\/\/\/\n\/\/\/ This function will return `Ok` with the closure's result if the closure\n\/\/\/ does not panic, and will return `Err(cause)` if the closure panics. The\n\/\/\/ `cause` returned is the object with which panic was originally invoked.\n\/\/\/\n\/\/\/ It is currently undefined behavior to unwind from Rust code into foreign\n\/\/\/ code, so this function is particularly useful when Rust is called from\n\/\/\/ another language (normally C). This can run arbitrary Rust code, capturing a\n\/\/\/ panic and allowing a graceful handling of the error.\n\/\/\/\n\/\/\/ It is **not** recommended to use this function for a general try\/catch\n\/\/\/ mechanism. The `Result` type is more appropriate to use for functions that\n\/\/\/ can fail on a regular basis.\n\/\/\/\n\/\/\/ The closure provided is required to adhere to the `RecoverSafe` to ensure\n\/\/\/ that all captured variables are safe to cross this recover boundary. The\n\/\/\/ purpose of this bound is to encode the concept of [exception safety][rfc] in\n\/\/\/ the type system. Most usage of this function should not need to worry about\n\/\/\/ this bound as programs are naturally panic safe without `unsafe` code. If it\n\/\/\/ becomes a problem the associated `AssertRecoverSafe` wrapper type in this\n\/\/\/ module can be used to quickly assert that the usage here is indeed exception\n\/\/\/ safe.\n\/\/\/\n\/\/\/ [rfc]: https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1236-stabilize-catch-panic.md\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic;\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ println!(\"hello!\");\n\/\/\/ });\n\/\/\/ assert!(result.is_ok());\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ panic!(\"oh no!\");\n\/\/\/ });\n\/\/\/ assert!(result.is_err());\n\/\/\/ ```\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\npub fn recover<F: FnOnce() -> R + RecoverSafe, R>(f: F) -> Result<R> {\n let mut result = None;\n unsafe {\n let result = &mut result;\n try!(unwind::try(move || *result = Some(f())))\n }\n Ok(result.unwrap())\n}\n\n\/\/\/ Triggers a panic without invoking the panic handler.\n\/\/\/\n\/\/\/ This is designed to be used in conjunction with `recover` to, for example,\n\/\/\/ carry a panic across a layer of C code.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```should_panic\n\/\/\/ #![feature(std_panic, recover, panic_propagate)]\n\/\/\/\n\/\/\/ use std::panic;\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ panic!(\"oh no!\");\n\/\/\/ });\n\/\/\/\n\/\/\/ if let Err(err) = result {\n\/\/\/ panic::propagate(err);\n\/\/\/ }\n\/\/\/ ```\n#[unstable(feature = \"panic_propagate\", reason = \"awaiting feedback\", issue = \"30752\")]\npub fn propagate(payload: Box<Any + Send>) -> ! {\n unwind::rust_panic(payload)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Extend libsimpleservo API with a method to request a media session action<commit_after><|endoftext|>"} {"text":"<commit_before>use toml::Table;\n\n#[derive(Debug, Clone)]\npub struct EntryHeader {\n toml: Table,\n}\n\nimpl EntryHeader {\n\n pub fn new(toml: Table) -> EntryHeader {\n EntryHeader {\n toml: toml,\n }\n }\n\n pub fn toml(&self) -> &Table {\n &self.toml\n }\n\n}\n<commit_msg>header.rs: Add documentation<commit_after>use toml::Table;\n\n\/**\n * EntryHeader\n *\n * This is basically a wrapper around toml::Table which provides convenience to the user of the\n * librray.\n *\/\n#[derive(Debug, Clone)]\npub struct EntryHeader {\n toml: Table,\n}\n\nimpl EntryHeader {\n\n \/**\n * Get a new header object with a already-filled toml table\n *\/\n pub fn new(toml: Table) -> EntryHeader {\n EntryHeader {\n toml: toml,\n }\n }\n\n \/**\n * Get the table which lives in the background\n *\/\n pub fn toml(&self) -> &Table {\n &self.toml\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate emu;\nextern crate spc;\nextern crate snes_apu;\n\nuse std::iter;\nuse std::env;\nuse std::io::{Result, Error, ErrorKind, Write, stdout, stdin};\nuse std::thread;\nuse std::sync::{Arc, Mutex};\n\nuse emu::audio_driver::AudioDriver;\nuse emu::audio_driver_factory;\n\nuse spc::spc::{Spc, Emulator};\nuse snes_apu::apu::Apu;\nuse snes_apu::dsp::dsp::{SAMPLE_RATE, BUFFER_LEN};\n\nfn main() {\n if let Err(e) = play_spc_files() {\n println!(\"ERROR: {}\", e);\n std::process::exit(1);\n }\n}\n\nfn play_spc_files() -> Result<()> {\n for file_name in try!(get_file_names()) {\n try!(play_spc_file(&file_name));\n }\n Ok(())\n}\n\nfn get_file_names() -> Result<iter::Skip<env::Args>> {\n let args = env::args();\n match args.len() {\n 1 => Err(Error::new(ErrorKind::Other, \"No file(s) specified\")),\n _ => Ok(args.skip(1))\n }\n}\n\nfn play_spc_file(file_name: &String) -> Result<()> {\n let spc = try!(Spc::load(file_name));\n\n println!(\"SPC: {}\", file_name);\n println!(\" Version Minor: {}\", spc.version_minor);\n println!(\" PC: {}\", spc.pc);\n println!(\" A: {}\", spc.a);\n println!(\" X: {}\", spc.x);\n println!(\" Y: {}\", spc.y);\n println!(\" PSW: {}\", spc.psw);\n println!(\" SP: {}\", spc.sp);\n\n if let Some(ref id666_tag) = spc.id666_tag {\n println!(\" ID666 tag present:\");\n println!(\" Song title: {}\", id666_tag.song_title);\n println!(\" Game title: {}\", id666_tag.game_title);\n println!(\" Dumper name: {}\", id666_tag.dumper_name);\n println!(\" Comments: {}\", id666_tag.comments);\n println!(\" Date dumped (MM\/DD\/YYYY): {}\", id666_tag.date_dumped);\n println!(\" Seconds to play before fading out: {}\", id666_tag.seconds_to_play_before_fading_out);\n println!(\" Fade out length: {}ms\", id666_tag.fade_out_length);\n println!(\" Artist name: {}\", id666_tag.artist_name);\n println!(\" Default channel disables: {}\", id666_tag.default_channel_disables);\n println!(\" Dumping emulator: {}\", match id666_tag.dumping_emulator {\n Emulator::Unknown => \"Unknown\",\n Emulator::ZSnes => \"ZSnes\",\n Emulator::Snes9x => \"Snes9x\"\n });\n } else {\n println!(\" No ID666 tag present.\");\n }\n\n let mut apu = Apu::new();\n apu.set_state(&spc);\n \/\/ Most SPC's have crap in the echo buffer on startup, so while it's not technically correct, we'll clear that.\n \/\/ The example for blargg's APU emulator (which is known to be the most accurate there is) also does this, so I\n \/\/ think we're OK to do it too :)\n apu.clear_echo_buffer();\n\n let mut driver = audio_driver_factory::create_default();\n driver.set_sample_rate(SAMPLE_RATE as i32);\n let mut left = [0; BUFFER_LEN];\n let mut right = [0; BUFFER_LEN];\n driver.set_render_callback(Some(Box::new(move |buffer, num_frames| {\n apu.render(&mut left, &mut right, num_frames as i32);\n for i in 0..num_frames {\n let j = i * 2;\n buffer[j + 0] = left[i] as f32 \/ 32768.0;\n buffer[j + 1] = right[i] as f32 \/ 32768.0;\n }\n })));\n\n println!(\"Return quits.\");\n try!(wait_for_key_press_with_busy_icon());\n\n Ok(())\n}\n\n\/\/ TODO: This function is super thread-safe but can panic XD\nfn wait_for_key_press_with_busy_icon() -> Result<()> {\n let is_done = Arc::new(Mutex::new(false));\n\n let thread_is_done = is_done.clone();\n let handle = thread::spawn(move || {\n let chars = ['-', '\/', '|', '\\\\'];\n let mut char_index = 0;\n while !*(thread_is_done.lock().unwrap()) {\n print!(\"\\r[{}]\", chars[char_index]);\n stdout().flush().unwrap();\n char_index = (char_index + 1) % chars.len();\n\n thread::sleep_ms(5);\n }\n print!(\"\\r \\r\");\n });\n\n let mut s = String::new();\n try!(stdin().read_line(&mut s));\n *is_done.lock().unwrap() = true;\n handle.join().unwrap();\n\n Ok(())\n}\n<commit_msg>Fade out length and delay work<commit_after>extern crate emu;\nextern crate spc;\nextern crate snes_apu;\n\nuse std::iter;\nuse std::env;\nuse std::io::{Result, Error, ErrorKind, Write, stdout, stdin};\nuse std::thread;\nuse std::sync::{Arc, Mutex};\n\nuse emu::audio_driver::AudioDriver;\nuse emu::audio_driver_factory;\n\nuse spc::spc::{Spc, Emulator};\nuse snes_apu::apu::Apu;\nuse snes_apu::dsp::dsp::{SAMPLE_RATE, BUFFER_LEN};\n\nfn main() {\n if let Err(e) = play_spc_files() {\n println!(\"ERROR: {}\", e);\n std::process::exit(1);\n }\n}\n\nfn play_spc_files() -> Result<()> {\n for file_name in try!(get_file_names()) {\n try!(play_spc_file(&file_name));\n }\n Ok(())\n}\n\nfn get_file_names() -> Result<iter::Skip<env::Args>> {\n let args = env::args();\n match args.len() {\n 1 => Err(Error::new(ErrorKind::Other, \"No file(s) specified\")),\n _ => Ok(args.skip(1))\n }\n}\n\nstruct SpcEndState {\n sample_pos: i32,\n fade_out_sample: i32,\n end_sample: i32\n}\n\nfn play_spc_file(file_name: &String) -> Result<()> {\n let spc = try!(Spc::load(file_name));\n\n println!(\"SPC: {}\", file_name);\n println!(\" Version Minor: {}\", spc.version_minor);\n println!(\" PC: {}\", spc.pc);\n println!(\" A: {}\", spc.a);\n println!(\" X: {}\", spc.x);\n println!(\" Y: {}\", spc.y);\n println!(\" PSW: {}\", spc.psw);\n println!(\" SP: {}\", spc.sp);\n\n if let Some(ref id666_tag) = spc.id666_tag {\n println!(\" ID666 tag present:\");\n println!(\" Song title: {}\", id666_tag.song_title);\n println!(\" Game title: {}\", id666_tag.game_title);\n println!(\" Dumper name: {}\", id666_tag.dumper_name);\n println!(\" Comments: {}\", id666_tag.comments);\n println!(\" Date dumped (MM\/DD\/YYYY): {}\", id666_tag.date_dumped);\n println!(\" Seconds to play before fading out: {}\", id666_tag.seconds_to_play_before_fading_out);\n println!(\" Fade out length: {}ms\", id666_tag.fade_out_length);\n println!(\" Artist name: {}\", id666_tag.artist_name);\n println!(\" Default channel disables: {}\", id666_tag.default_channel_disables);\n println!(\" Dumping emulator: {}\", match id666_tag.dumping_emulator {\n Emulator::Unknown => \"Unknown\",\n Emulator::ZSnes => \"ZSnes\",\n Emulator::Snes9x => \"Snes9x\"\n });\n } else {\n println!(\" No ID666 tag present.\");\n };\n\n let mut apu = Apu::new();\n apu.set_state(&spc);\n \/\/ Most SPC's have crap in the echo buffer on startup, so while it's not technically correct, we'll clear that.\n \/\/ The example for blargg's APU emulator (which is known to be the most accurate there is) also does this, so I\n \/\/ think we're OK to do it too :)\n apu.clear_echo_buffer();\n\n let mut driver = audio_driver_factory::create_default();\n driver.set_sample_rate(SAMPLE_RATE as i32);\n let mut left = Box::new([0; BUFFER_LEN]);\n let mut right = Box::new([0; BUFFER_LEN]);\n let end_state = if let Some(ref id666_tag) = spc.id666_tag {\n let fade_out_sample = id666_tag.seconds_to_play_before_fading_out * (SAMPLE_RATE as i32);\n let end_sample = fade_out_sample + id666_tag.fade_out_length * (SAMPLE_RATE as i32) \/ 1000;\n Some(Arc::new(Mutex::new(SpcEndState {\n sample_pos: 0,\n fade_out_sample: fade_out_sample,\n end_sample: end_sample\n })))\n } else {\n None\n };\n let driver_end_state = end_state.clone();\n driver.set_render_callback(Some(Box::new(move |buffer, num_frames| {\n apu.render(&mut *left, &mut *right, num_frames as i32);\n match driver_end_state {\n Some(ref state_mutex) => {\n let state = &mut *state_mutex.lock().unwrap();\n for i in 0..num_frames {\n let j = i * 2;\n let sample_index = state.sample_pos + (i as i32);\n let f = if sample_index >= state.end_sample {\n 0.0\n } else if sample_index >= state.fade_out_sample {\n 1.0 - ((sample_index - state.fade_out_sample) as f32) \/ ((state.end_sample - state.fade_out_sample) as f32)\n } else {\n 1.0\n };\n buffer[j + 0] = left[i] as f32 * f \/ 32768.0;\n buffer[j + 1] = right[i] as f32 * f \/ 32768.0;\n }\n state.sample_pos += num_frames as i32;\n },\n _ => {\n for i in 0..num_frames {\n let j = i * 2;\n buffer[j + 0] = left[i] as f32 \/ 32768.0;\n buffer[j + 1] = right[i] as f32 \/ 32768.0;\n }\n }\n }\n })));\n\n match end_state {\n Some(ref state_mutex) => {\n loop {\n {\n let state = &*state_mutex.lock().unwrap();\n if state.sample_pos >= state.end_sample {\n break;\n }\n }\n\n thread::sleep_ms(5);\n }\n },\n _ => {\n println!(\"Return stops song.\");\n try!(wait_for_key_press_with_busy_icon());\n }\n }\n\n Ok(())\n}\n\n\/\/ TODO: This function is super thread-safe but can panic XD\nfn wait_for_key_press_with_busy_icon() -> Result<()> {\n let is_done = Arc::new(Mutex::new(false));\n\n let thread_is_done = is_done.clone();\n let handle = thread::spawn(move || {\n let chars = ['-', '\/', '|', '\\\\'];\n let mut char_index = 0;\n while !*thread_is_done.lock().unwrap() {\n print!(\"\\r[{}]\", chars[char_index]);\n stdout().flush().unwrap();\n char_index = (char_index + 1) % chars.len();\n\n thread::sleep_ms(5);\n }\n print!(\"\\r \\r\");\n });\n\n let mut s = String::new();\n try!(stdin().read_line(&mut s));\n *is_done.lock().unwrap() = true;\n handle.join().unwrap();\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Some source has been updated.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add strings util for finding printable strings in binary files (similar to GNU strings)<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ issue #21405\n\/\/ ignore-tidy-linelength\n\nstruct Foo;\n\nfn foo<F>(f: F) where F: FnMut(Foo) {}\n\nfn main() {\n foo(|s| s.is_empty());\n \/\/~^ ERROR no method named `is_empty` found\n \/\/~^^ HELP #1: `core::slice::SliceExt`\n \/\/~^^^ HELP #2: `core::str::StrExt`\n \/\/~^^^^ HELP items from traits can only be used if the trait is implemented and in scope; the following traits define an item `is_empty`, perhaps you need to implement one of them:\n}\n<commit_msg>Fix compile-fail test for `ExactSizeIterator::is_empty`<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ issue #21405\n\/\/ ignore-tidy-linelength\n\nstruct Foo;\n\nfn foo<F>(f: F) where F: FnMut(Foo) {}\n\nfn main() {\n foo(|s| s.is_empty());\n \/\/~^ ERROR no method named `is_empty` found\n \/\/~^^ HELP #1: `std::iter::ExactSizeIterator`\n \/\/~^^^ HELP #2: `core::slice::SliceExt`\n \/\/~^^^^ HELP #3: `core::str::StrExt`\n \/\/~^^^^^ HELP items from traits can only be used if the trait is implemented and in scope; the following traits define an item `is_empty`, perhaps you need to implement one of them:\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Recompile SPIR-V if the reflection output is missing or out of date<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make progress implementing spec properly<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ Original implementation taken from rust-memchr\n\/\/ Copyright 2015 Andrew Gallant, bluss and Nicolas Koch\n\nuse cmp;\nuse mem;\n\nconst LO_U64: u64 = 0x0101010101010101;\nconst HI_U64: u64 = 0x8080808080808080;\n\n\/\/ use truncation\nconst LO_USIZE: usize = LO_U64 as usize;\nconst HI_USIZE: usize = HI_U64 as usize;\n\n\/\/\/ Return `true` if `x` contains any zero byte.\n\/\/\/\n\/\/\/ From *Matters Computational*, J. Arndt\n\/\/\/\n\/\/\/ \"The idea is to subtract one from each of the bytes and then look for\n\/\/\/ bytes where the borrow propagated all the way to the most significant\n\/\/\/ bit.\"\n#[inline]\nfn contains_zero_byte(x: usize) -> bool {\n x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0\n}\n\n#[cfg(target_pointer_width = \"16\")]\n#[inline]\nfn repeat_byte(b: u8) -> usize {\n (b as usize) << 8 | b as usize\n}\n\n#[cfg(not(target_pointer_width = \"16\"))]\n#[inline]\nfn repeat_byte(b: u8) -> usize {\n (b as usize) * (::usize::MAX \/ 255)\n}\n\n\/\/\/ Return the first index matching the byte `x` in `text`.\npub fn memchr(x: u8, text: &[u8]) -> Option<usize> {\n \/\/ Scan for a single byte value by reading two `usize` words at a time.\n \/\/\n \/\/ Split `text` in three parts\n \/\/ - unaligned initial part, before the first word aligned address in text\n \/\/ - body, scan by 2 words at a time\n \/\/ - the last remaining part, < 2 word size\n let len = text.len();\n let ptr = text.as_ptr();\n let usize_bytes = mem::size_of::<usize>();\n\n \/\/ search up to an aligned boundary\n let mut offset = ptr.align_offset(usize_bytes);\n if offset > 0 {\n offset = cmp::min(offset, len);\n if let Some(index) = text[..offset].iter().position(|elt| *elt == x) {\n return Some(index);\n }\n }\n\n \/\/ search the body of the text\n let repeated_x = repeat_byte(x);\n\n if len >= 2 * usize_bytes {\n while offset <= len - 2 * usize_bytes {\n unsafe {\n let u = *(ptr.offset(offset as isize) as *const usize);\n let v = *(ptr.offset((offset + usize_bytes) as isize) as *const usize);\n\n \/\/ break if there is a matching byte\n let zu = contains_zero_byte(u ^ repeated_x);\n let zv = contains_zero_byte(v ^ repeated_x);\n if zu || zv {\n break;\n }\n }\n offset += usize_bytes * 2;\n }\n }\n\n \/\/ find the byte after the point the body loop stopped\n text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i)\n}\n\n\/\/\/ Return the last index matching the byte `x` in `text`.\npub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {\n \/\/ Scan for a single byte value by reading two `usize` words at a time.\n \/\/\n \/\/ Split `text` in three parts\n \/\/ - unaligned tail, after the last word aligned address in text\n \/\/ - body, scan by 2 words at a time\n \/\/ - the first remaining bytes, < 2 word size\n let len = text.len();\n let ptr = text.as_ptr();\n let usize_bytes = mem::size_of::<usize>();\n\n let mut offset = {\n \/\/ We call this just to obtain the length of the suffix\n let (_, _, suffix) = unsafe { text.align_to::<usize>() };\n len - suffix.len()\n };\n if let Some(index) = text[offset..].iter().rposition(|elt| *elt == x) {\n return Some(offset + index);\n }\n\n \/\/ search the body of the text\n let repeated_x = repeat_byte(x);\n\n while offset >= 2 * usize_bytes {\n unsafe {\n let u = *(ptr.offset(offset as isize - 2 * usize_bytes as isize) as *const usize);\n let v = *(ptr.offset(offset as isize - usize_bytes as isize) as *const usize);\n\n \/\/ break if there is a matching byte\n let zu = contains_zero_byte(u ^ repeated_x);\n let zv = contains_zero_byte(v ^ repeated_x);\n if zu || zv {\n break;\n }\n }\n offset -= 2 * usize_bytes;\n }\n\n \/\/ find the byte before the point the body loop stopped\n text[..offset].iter().rposition(|elt| *elt == x)\n}\n<commit_msg>fix memrchr in miri<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ Original implementation taken from rust-memchr\n\/\/ Copyright 2015 Andrew Gallant, bluss and Nicolas Koch\n\nuse cmp;\nuse mem;\n\nconst LO_U64: u64 = 0x0101010101010101;\nconst HI_U64: u64 = 0x8080808080808080;\n\n\/\/ use truncation\nconst LO_USIZE: usize = LO_U64 as usize;\nconst HI_USIZE: usize = HI_U64 as usize;\n\n\/\/\/ Return `true` if `x` contains any zero byte.\n\/\/\/\n\/\/\/ From *Matters Computational*, J. Arndt\n\/\/\/\n\/\/\/ \"The idea is to subtract one from each of the bytes and then look for\n\/\/\/ bytes where the borrow propagated all the way to the most significant\n\/\/\/ bit.\"\n#[inline]\nfn contains_zero_byte(x: usize) -> bool {\n x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0\n}\n\n#[cfg(target_pointer_width = \"16\")]\n#[inline]\nfn repeat_byte(b: u8) -> usize {\n (b as usize) << 8 | b as usize\n}\n\n#[cfg(not(target_pointer_width = \"16\"))]\n#[inline]\nfn repeat_byte(b: u8) -> usize {\n (b as usize) * (::usize::MAX \/ 255)\n}\n\n\/\/\/ Return the first index matching the byte `x` in `text`.\npub fn memchr(x: u8, text: &[u8]) -> Option<usize> {\n \/\/ Scan for a single byte value by reading two `usize` words at a time.\n \/\/\n \/\/ Split `text` in three parts\n \/\/ - unaligned initial part, before the first word aligned address in text\n \/\/ - body, scan by 2 words at a time\n \/\/ - the last remaining part, < 2 word size\n let len = text.len();\n let ptr = text.as_ptr();\n let usize_bytes = mem::size_of::<usize>();\n\n \/\/ search up to an aligned boundary\n let mut offset = ptr.align_offset(usize_bytes);\n if offset > 0 {\n offset = cmp::min(offset, len);\n if let Some(index) = text[..offset].iter().position(|elt| *elt == x) {\n return Some(index);\n }\n }\n\n \/\/ search the body of the text\n let repeated_x = repeat_byte(x);\n\n if len >= 2 * usize_bytes {\n while offset <= len - 2 * usize_bytes {\n unsafe {\n let u = *(ptr.offset(offset as isize) as *const usize);\n let v = *(ptr.offset((offset + usize_bytes) as isize) as *const usize);\n\n \/\/ break if there is a matching byte\n let zu = contains_zero_byte(u ^ repeated_x);\n let zv = contains_zero_byte(v ^ repeated_x);\n if zu || zv {\n break;\n }\n }\n offset += usize_bytes * 2;\n }\n }\n\n \/\/ find the byte after the point the body loop stopped\n text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i)\n}\n\n\/\/\/ Return the last index matching the byte `x` in `text`.\npub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {\n \/\/ Scan for a single byte value by reading two `usize` words at a time.\n \/\/\n \/\/ Split `text` in three parts\n \/\/ - unaligned tail, after the last word aligned address in text\n \/\/ - body, scan by 2 words at a time\n \/\/ - the first remaining bytes, < 2 word size\n let len = text.len();\n let ptr = text.as_ptr();\n type Chunk = usize;\n\n let (min_aligned_offset, max_aligned_offset) = {\n \/\/ We call this just to obtain the length of the prefix and suffix.\n \/\/ In the middle we always process two chunks at once.\n let (prefix, _, suffix) = unsafe { text.align_to::<(Chunk, Chunk)>() };\n (prefix.len(), len - suffix.len())\n };\n\n let mut offset = max_aligned_offset;\n if let Some(index) = text[offset..].iter().rposition(|elt| *elt == x) {\n return Some(offset + index);\n }\n\n \/\/ search the body of the text, make sure we don't cross min_aligned_offset.\n \/\/ offset is always aligned, so just testing `>` is sufficient and avoids possible\n \/\/ overflow.\n let repeated_x = repeat_byte(x);\n let chunk_bytes = mem::size_of::<Chunk>();\n\n while offset > min_aligned_offset {\n unsafe {\n let u = *(ptr.offset(offset as isize - 2 * chunk_bytes as isize) as *const Chunk);\n let v = *(ptr.offset(offset as isize - chunk_bytes as isize) as *const Chunk);\n\n \/\/ break if there is a matching byte\n let zu = contains_zero_byte(u ^ repeated_x);\n let zv = contains_zero_byte(v ^ repeated_x);\n if zu || zv {\n break;\n }\n }\n offset -= 2 * chunk_bytes;\n }\n\n \/\/ find the byte before the point the body loop stopped\n text[..offset].iter().rposition(|elt| *elt == x)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>New Rust file for inspecting types<commit_after>#![feature(core)]\n\nfn assert_type<T>(_: &T, s:&'static str) {\n let type_name = unsafe { std::intrinsics::type_name::<T>() };\n assert_eq!(type_name, s);\n}\n\nfn main() {\n assert_type(& 1.0, \"f64\");\n assert_type(& 1.0f32, \"f32\");\n assert_type(& vec!(17, 100, 4), \"collections::vec::Vec<i32>\");\n assert_type(& \"Hello\", \"&'static str\");\n assert_type(& \"Hello\".to_string(), \"collections::string::String\");\n assert_type(& [1, 2, 3], \"[i32; 3]\");\n for i in 1..5 {\n assert_type(&i, \"i32\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use super::package::*;\nuse super::executor::*;\n\nuse alloc::boxed::Box;\n\nuse core::{cmp, ptr, mem};\n\nuse common::event::{self, Event, EventOption, KeyEvent, MouseEvent};\nuse common::resource::{NoneResource, Resource, ResourceType, URL, VecResource};\nuse common::scheduler::*;\nuse common::string::{String, ToString};\nuse common::vec::Vec;\n\nuse graphics::bmp::*;\nuse graphics::color::Color;\nuse graphics::display::Display;\nuse graphics::point::Point;\nuse graphics::size::Size;\nuse graphics::window::Window;\n\nuse programs::common::SessionItem;\n\npub struct Session {\n pub display: Display,\n pub background: BMPFile,\n pub cursor: BMPFile,\n pub mouse_point: Point,\n last_mouse_event: MouseEvent,\n pub items: Vec<Box<SessionItem>>,\n pub packages: Vec<Box<Package>>,\n pub windows: Vec<*mut Window>,\n pub windows_ordered: Vec<*mut Window>,\n pub redraw: usize,\n}\n\nimpl Session {\n pub fn new() -> Self {\n unsafe {\n Session {\n display: Display::root(),\n background: BMPFile::new(),\n cursor: BMPFile::new(),\n mouse_point: Point::new(0, 0),\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n },\n items: Vec::new(),\n packages: Vec::new(),\n windows: Vec::new(),\n windows_ordered: Vec::new(),\n redraw: event::REDRAW_ALL,\n }\n }\n }\n\n pub unsafe fn add_window(&mut self, add_window_ptr: *mut Window) {\n self.windows.push(add_window_ptr);\n self.windows_ordered.push(add_window_ptr);\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n\n pub unsafe fn remove_window(&mut self, remove_window_ptr: *mut Window) {\n let mut i = 0;\n while i < self.windows.len() {\n let mut remove = false;\n\n match self.windows.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n } else {\n i += 1;\n },\n Option::None => break,\n }\n\n if remove {\n self.windows.remove(i);\n }\n }\n\n i = 0;\n while i < self.windows_ordered.len() {\n let mut remove = false;\n\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n } else {\n i += 1;\n },\n Option::None => break,\n }\n\n if remove {\n self.windows_ordered.remove(i);\n }\n }\n\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n\n pub unsafe fn on_irq(&mut self, irq: u8) {\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_irq(irq);\n end_no_ints(reenable);\n }\n }\n\n pub unsafe fn on_poll(&mut self) {\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_poll();\n end_no_ints(reenable);\n }\n }\n\n pub fn open(&self, url: &URL) -> Box<Resource> {\n if url.scheme().len() == 0 {\n let mut list = String::new();\n\n for item in self.items.iter() {\n let scheme = item.scheme();\n if scheme.len() > 0 {\n if list.len() > 0 {\n list = list + \"\\n\" + scheme;\n } else {\n list = scheme;\n }\n }\n }\n\n box VecResource::new(URL::new(), ResourceType::Dir, list.to_utf8())\n } else {\n for item in self.items.iter() {\n if item.scheme() == url.scheme() {\n return item.open(url);\n }\n }\n box NoneResource\n }\n }\n\n fn on_key(&mut self, key_event: KeyEvent) {\n if self.windows.len() > 0 {\n match self.windows.get(self.windows.len() - 1) {\n Option::Some(window_ptr) => {\n unsafe {\n (**window_ptr).on_key(key_event);\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n }\n Option::None => (),\n }\n }\n }\n\n fn on_mouse(&mut self, mouse_event: MouseEvent) {\n let mut catcher = -1;\n\n if mouse_event.y >= self.display.height as isize - 32 {\n if mouse_event.left_button && !self.last_mouse_event.left_button {\n let mut x = 0;\n for package in self.packages.iter() {\n if package.icon.data.len() > 0 {\n if mouse_event.x >= x &&\n mouse_event.x < x + package.icon.size.width as isize {\n execute(&package.binary, &package.url, &Vec::new());\n }\n x += package.icon.size.width as isize;\n }\n }\n\n let mut chars = 32;\n while chars > 4 &&\n (x as usize + (chars * 8 + 3 * 4) * self.windows.len()) >\n self.display.width + 32 {\n chars -= 1;\n }\n\n x += 4;\n for window_ptr in self.windows_ordered.iter() {\n let w = (chars*8 + 2*4) as usize;\n if mouse_event.x >= x && mouse_event.x < x + w as isize {\n for j in 0..self.windows.len() {\n match self.windows.get(j) {\n Option::Some(catcher_window_ptr) =>\n if catcher_window_ptr == window_ptr {\n unsafe {\n if j == self.windows.len() - 1 {\n (**window_ptr).minimized = !(**window_ptr).minimized;\n } else {\n catcher = j as isize;\n (**window_ptr).minimized = false;\n }\n }\n break;\n },\n Option::None => break,\n }\n }\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n break;\n }\n x += w as isize;\n }\n }\n } else {\n for reverse_i in 0..self.windows.len() {\n let i = self.windows.len() - 1 - reverse_i;\n match self.windows.get(i) {\n Option::Some(window_ptr) => unsafe {\n if (**window_ptr).on_mouse(mouse_event, catcher < 0) {\n catcher = i as isize;\n\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n },\n Option::None => (),\n }\n }\n }\n\n if catcher >= 0 && catcher < self.windows.len() as isize - 1 {\n match self.windows.remove(catcher as usize) {\n Option::Some(window_ptr) => self.windows.push(window_ptr),\n Option::None => (),\n }\n }\n\n self.last_mouse_event = mouse_event;\n }\n\n pub unsafe fn redraw(&mut self) {\n if self.redraw > event::REDRAW_NONE {\n \/\/if self.redraw >= REDRAW_ALL {\n self.display.set(Color::new(64, 64, 64));\n if self.background.data.len() > 0 {\n self.background.draw(&self.display,\n Point::new((self.display.width as isize -\n self.background.size.width as isize) \/\n 2,\n (self.display.height as isize -\n self.background.size.height as isize) \/\n 2));\n }\n\n for i in 0..self.windows.len() {\n match self.windows.get(i) {\n Option::Some(window_ptr) => {\n (**window_ptr).focused = i == self.windows.len() - 1;\n (**window_ptr).draw(&self.display);\n }\n Option::None => (),\n }\n }\n\n self.display.rect(Point::new(0, self.display.height as isize - 32),\n Size::new(self.display.width, 32),\n Color::alpha(0, 0, 0, 128));\n\n let mut x = 0;\n for package in self.packages.iter() {\n if package.icon.data.len() > 0 {\n let y = self.display.height as isize - package.icon.size.height as isize;\n if self.mouse_point.y >= y && self.mouse_point.x >= x &&\n self.mouse_point.x < x + package.icon.size.width as isize {\n self.display.rect(Point::new(x, y),\n package.icon.size,\n Color::alpha(128, 128, 128, 128));\n\n let mut c_x = x;\n for c in package.name.chars() {\n self.display\n .char(Point::new(c_x, y - 16), c, Color::new(255, 255, 255));\n c_x += 8;\n }\n }\n package.icon.draw(&self.display, Point::new(x, y));\n x += package.icon.size.width as isize;\n }\n }\n\n let mut chars = 32;\n while chars > 4 &&\n (x as usize + (chars * 8 + 3 * 4) * self.windows.len()) >\n self.display.width + 32 {\n chars -= 1;\n }\n\n x += 4;\n for window_ptr in self.windows_ordered.iter() {\n let w = (chars*8 + 2*4) as usize;\n self.display.rect(Point::new(x, self.display.height as isize - 32),\n Size::new(w, 32),\n (**window_ptr).border_color);\n x += 4;\n\n for i in 0..chars {\n let c = (**window_ptr).title[i];\n if c != '\\0' {\n self.display.char(Point::new(x, self.display.height as isize - 24),\n c,\n (**window_ptr).title_color);\n }\n x += 8;\n }\n x += 8;\n }\n\n if self.cursor.data.len() > 0 {\n self.display.image_alpha(self.mouse_point,\n self.cursor.data.as_ptr(),\n self.cursor.size);\n } else {\n self.display.char(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9),\n 'X',\n Color::new(255, 255, 255));\n }\n \/\/}\n\n let reenable = start_no_ints();\n\n self.display.flip();\n\n \/*\n if self.cursor.data.len() > 0 {\n self.display.image_alpha_onscreen(self.mouse_point, self.cursor.data.as_ptr(), self.cursor.size);\n } else {\n self.display.char_onscreen(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9), 'X', Color::new(255, 255, 255));\n }\n *\/\n\n self.redraw = event::REDRAW_NONE;\n\n end_no_ints(reenable);\n }\n }\n\n pub fn event(&mut self, event: Event) {\n match event.to_option() {\n EventOption::Mouse(mouse_event) => self.on_mouse(mouse_event),\n EventOption::Key(key_event) => self.on_key(key_event),\n EventOption::Redraw(redraw_event) =>\n self.redraw = cmp::max(self.redraw, redraw_event.redraw),\n EventOption::Open(open_event) => {\n let url_string = open_event.url_string;\n\n if url_string.ends_with(\".bin\".to_string()) {\n execute(&URL::from_string(&url_string),\n &URL::new(),\n &Vec::new());\n } else {\n for package in self.packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if url_string.ends_with(accept.substr(1, accept.len() - 1)) {\n accepted = true;\n break;\n }\n }\n if accepted {\n let mut args: Vec<String> = Vec::new();\n args.push(url_string.clone());\n execute(&package.binary, &package.url, &args);\n break;\n }\n }\n }\n }\n _ => (),\n }\n }\n}\n<commit_msg>Use default color to make loading time less noticeable<commit_after>use super::package::*;\nuse super::executor::*;\n\nuse alloc::boxed::Box;\n\nuse core::{cmp, ptr, mem};\n\nuse common::event::{self, Event, EventOption, KeyEvent, MouseEvent};\nuse common::resource::{NoneResource, Resource, ResourceType, URL, VecResource};\nuse common::scheduler::*;\nuse common::string::{String, ToString};\nuse common::vec::Vec;\n\nuse graphics::bmp::*;\nuse graphics::color::Color;\nuse graphics::display::Display;\nuse graphics::point::Point;\nuse graphics::size::Size;\nuse graphics::window::Window;\n\nuse programs::common::SessionItem;\n\npub struct Session {\n pub display: Display,\n pub background: BMPFile,\n pub cursor: BMPFile,\n pub mouse_point: Point,\n last_mouse_event: MouseEvent,\n pub items: Vec<Box<SessionItem>>,\n pub packages: Vec<Box<Package>>,\n pub windows: Vec<*mut Window>,\n pub windows_ordered: Vec<*mut Window>,\n pub redraw: usize,\n}\n\nimpl Session {\n pub fn new() -> Self {\n unsafe {\n Session {\n display: Display::root(),\n background: BMPFile::new(),\n cursor: BMPFile::new(),\n mouse_point: Point::new(0, 0),\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n },\n items: Vec::new(),\n packages: Vec::new(),\n windows: Vec::new(),\n windows_ordered: Vec::new(),\n redraw: event::REDRAW_ALL,\n }\n }\n }\n\n pub unsafe fn add_window(&mut self, add_window_ptr: *mut Window) {\n self.windows.push(add_window_ptr);\n self.windows_ordered.push(add_window_ptr);\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n\n pub unsafe fn remove_window(&mut self, remove_window_ptr: *mut Window) {\n let mut i = 0;\n while i < self.windows.len() {\n let mut remove = false;\n\n match self.windows.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n } else {\n i += 1;\n },\n Option::None => break,\n }\n\n if remove {\n self.windows.remove(i);\n }\n }\n\n i = 0;\n while i < self.windows_ordered.len() {\n let mut remove = false;\n\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n } else {\n i += 1;\n },\n Option::None => break,\n }\n\n if remove {\n self.windows_ordered.remove(i);\n }\n }\n\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n\n pub unsafe fn on_irq(&mut self, irq: u8) {\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_irq(irq);\n end_no_ints(reenable);\n }\n }\n\n pub unsafe fn on_poll(&mut self) {\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_poll();\n end_no_ints(reenable);\n }\n }\n\n pub fn open(&self, url: &URL) -> Box<Resource> {\n if url.scheme().len() == 0 {\n let mut list = String::new();\n\n for item in self.items.iter() {\n let scheme = item.scheme();\n if scheme.len() > 0 {\n if list.len() > 0 {\n list = list + \"\\n\" + scheme;\n } else {\n list = scheme;\n }\n }\n }\n\n box VecResource::new(URL::new(), ResourceType::Dir, list.to_utf8())\n } else {\n for item in self.items.iter() {\n if item.scheme() == url.scheme() {\n return item.open(url);\n }\n }\n box NoneResource\n }\n }\n\n fn on_key(&mut self, key_event: KeyEvent) {\n if self.windows.len() > 0 {\n match self.windows.get(self.windows.len() - 1) {\n Option::Some(window_ptr) => {\n unsafe {\n (**window_ptr).on_key(key_event);\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n }\n Option::None => (),\n }\n }\n }\n\n fn on_mouse(&mut self, mouse_event: MouseEvent) {\n let mut catcher = -1;\n\n if mouse_event.y >= self.display.height as isize - 32 {\n if mouse_event.left_button && !self.last_mouse_event.left_button {\n let mut x = 0;\n for package in self.packages.iter() {\n if package.icon.data.len() > 0 {\n if mouse_event.x >= x &&\n mouse_event.x < x + package.icon.size.width as isize {\n execute(&package.binary, &package.url, &Vec::new());\n }\n x += package.icon.size.width as isize;\n }\n }\n\n let mut chars = 32;\n while chars > 4 &&\n (x as usize + (chars * 8 + 3 * 4) * self.windows.len()) >\n self.display.width + 32 {\n chars -= 1;\n }\n\n x += 4;\n for window_ptr in self.windows_ordered.iter() {\n let w = (chars*8 + 2*4) as usize;\n if mouse_event.x >= x && mouse_event.x < x + w as isize {\n for j in 0..self.windows.len() {\n match self.windows.get(j) {\n Option::Some(catcher_window_ptr) =>\n if catcher_window_ptr == window_ptr {\n unsafe {\n if j == self.windows.len() - 1 {\n (**window_ptr).minimized = !(**window_ptr).minimized;\n } else {\n catcher = j as isize;\n (**window_ptr).minimized = false;\n }\n }\n break;\n },\n Option::None => break,\n }\n }\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n break;\n }\n x += w as isize;\n }\n }\n } else {\n for reverse_i in 0..self.windows.len() {\n let i = self.windows.len() - 1 - reverse_i;\n match self.windows.get(i) {\n Option::Some(window_ptr) => unsafe {\n if (**window_ptr).on_mouse(mouse_event, catcher < 0) {\n catcher = i as isize;\n\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n },\n Option::None => (),\n }\n }\n }\n\n if catcher >= 0 && catcher < self.windows.len() as isize - 1 {\n match self.windows.remove(catcher as usize) {\n Option::Some(window_ptr) => self.windows.push(window_ptr),\n Option::None => (),\n }\n }\n\n self.last_mouse_event = mouse_event;\n }\n\n pub unsafe fn redraw(&mut self) {\n if self.redraw > event::REDRAW_NONE {\n \/\/if self.redraw >= REDRAW_ALL {\n self.display.set(Color::new(75, 163, 253));\n if self.background.data.len() > 0 {\n self.background.draw(&self.display,\n Point::new((self.display.width as isize -\n self.background.size.width as isize) \/\n 2,\n (self.display.height as isize -\n self.background.size.height as isize) \/\n 2));\n }\n\n for i in 0..self.windows.len() {\n match self.windows.get(i) {\n Option::Some(window_ptr) => {\n (**window_ptr).focused = i == self.windows.len() - 1;\n (**window_ptr).draw(&self.display);\n }\n Option::None => (),\n }\n }\n\n self.display.rect(Point::new(0, self.display.height as isize - 32),\n Size::new(self.display.width, 32),\n Color::alpha(0, 0, 0, 128));\n\n let mut x = 0;\n for package in self.packages.iter() {\n if package.icon.data.len() > 0 {\n let y = self.display.height as isize - package.icon.size.height as isize;\n if self.mouse_point.y >= y && self.mouse_point.x >= x &&\n self.mouse_point.x < x + package.icon.size.width as isize {\n self.display.rect(Point::new(x, y),\n package.icon.size,\n Color::alpha(128, 128, 128, 128));\n\n let mut c_x = x;\n for c in package.name.chars() {\n self.display\n .char(Point::new(c_x, y - 16), c, Color::new(255, 255, 255));\n c_x += 8;\n }\n }\n package.icon.draw(&self.display, Point::new(x, y));\n x += package.icon.size.width as isize;\n }\n }\n\n let mut chars = 32;\n while chars > 4 &&\n (x as usize + (chars * 8 + 3 * 4) * self.windows.len()) >\n self.display.width + 32 {\n chars -= 1;\n }\n\n x += 4;\n for window_ptr in self.windows_ordered.iter() {\n let w = (chars*8 + 2*4) as usize;\n self.display.rect(Point::new(x, self.display.height as isize - 32),\n Size::new(w, 32),\n (**window_ptr).border_color);\n x += 4;\n\n for i in 0..chars {\n let c = (**window_ptr).title[i];\n if c != '\\0' {\n self.display.char(Point::new(x, self.display.height as isize - 24),\n c,\n (**window_ptr).title_color);\n }\n x += 8;\n }\n x += 8;\n }\n\n if self.cursor.data.len() > 0 {\n self.display.image_alpha(self.mouse_point,\n self.cursor.data.as_ptr(),\n self.cursor.size);\n } else {\n self.display.char(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9),\n 'X',\n Color::new(255, 255, 255));\n }\n \/\/}\n\n let reenable = start_no_ints();\n\n self.display.flip();\n\n \/*\n if self.cursor.data.len() > 0 {\n self.display.image_alpha_onscreen(self.mouse_point, self.cursor.data.as_ptr(), self.cursor.size);\n } else {\n self.display.char_onscreen(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9), 'X', Color::new(255, 255, 255));\n }\n *\/\n\n self.redraw = event::REDRAW_NONE;\n\n end_no_ints(reenable);\n }\n }\n\n pub fn event(&mut self, event: Event) {\n match event.to_option() {\n EventOption::Mouse(mouse_event) => self.on_mouse(mouse_event),\n EventOption::Key(key_event) => self.on_key(key_event),\n EventOption::Redraw(redraw_event) =>\n self.redraw = cmp::max(self.redraw, redraw_event.redraw),\n EventOption::Open(open_event) => {\n let url_string = open_event.url_string;\n\n if url_string.ends_with(\".bin\".to_string()) {\n execute(&URL::from_string(&url_string),\n &URL::new(),\n &Vec::new());\n } else {\n for package in self.packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if url_string.ends_with(accept.substr(1, accept.len() - 1)) {\n accepted = true;\n break;\n }\n }\n if accepted {\n let mut args: Vec<String> = Vec::new();\n args.push(url_string.clone());\n execute(&package.binary, &package.url, &args);\n break;\n }\n }\n }\n }\n _ => (),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add cfail test for the new feature gate<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that the MSP430 interrupt ABI cannot be used when msp430_interrupt\n\/\/ feature gate is not used.\n\nextern \"msp430-interrupt\" fn foo() {}\n\/\/~^ ERROR msp430-interrupt ABI is experimental and subject to change\n\nfn main() {\n foo();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add privately uninhabited dead code test<commit_after>\/\/ compile-pass\n\n#![deny(unreachable_code)]\n\nmod foo {\n enum Bar {}\n\n #[allow(dead_code)]\n pub struct Foo {\n value: Bar, \/\/ \"privately\" uninhabited\n }\n\n pub fn give_foo() -> Foo { panic!() }\n}\n\nfn main() {\n foo::give_foo();\n println!(\"Hello, world!\"); \/\/ ok: we can't tell that this code is dead\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add ui simple helpers<commit_after>pub fn time_ui_fmtstr() -> &'static str {\n \"YYYY-MM-DD[THH[:mm[:ss]]]\"\n}\n\npub fn time_ui_fmtstr_expl() -> &'static str {\n #r\"In the UI, the format for Time is always YEAR-MONTH-DAY.\n Optionally, Time can be specified by seperating it from the date with 'T'.\n Minutes and Seconds are optional.\n \"#\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add Device Farm tests<commit_after>#![cfg(feature = \"devicefarm\")]\n\nextern crate rusoto;\n\nuse rusoto::devicefarm::{DeviceFarmClient, ListDevicesRequest};\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\npub fn should_list_devices() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = DeviceFarmClient::new(credentials, Region::UsWest2);\n\n let request = ListDevicesRequest::default();\n\n match client.list_devices(&request) {\n Ok(response) => {\n assert!(!response.devices.unwrap().is_empty())\n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Formatting mistake in the help function<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test #[from]<commit_after>use std::io;\nuse thiserror::Error;\n\n#[derive(Error, Debug)]\n#[error(\"...\")]\npub struct ErrorStruct {\n #[from]\n source: io::Error,\n}\n\n#[derive(Error, Debug)]\n#[error(\"...\")]\npub struct ErrorTuple(#[from] io::Error);\n\n#[derive(Error, Debug)]\n#[error(\"...\")]\npub enum ErrorEnum {\n Test {\n #[from]\n source: io::Error,\n },\n}\n\n#[derive(Error, Debug)]\n#[error(\"...\")]\npub enum Many {\n Any(#[from] anyhow::Error),\n Io(#[from] io::Error),\n}\n\nfn assert_impl<T: From<io::Error>>() {}\n\n#[test]\nfn test_from() {\n assert_impl::<ErrorStruct>();\n assert_impl::<ErrorTuple>();\n assert_impl::<ErrorEnum>();\n assert_impl::<Many>();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Servo Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate png;\nextern crate std;\nextern crate test;\n\nuse std::io;\nuse std::io::{File, Reader, Process};\nuse std::io::process::ExitStatus;\nuse std::os;\nuse std::str;\nuse test::{DynTestName, DynTestFn, TestDesc, TestOpts, TestDescAndFn};\nuse test::run_tests_console;\n\nfn main() {\n let args = os::args();\n let mut parts = args.tail().split(|e| \"--\" == e.as_slice());\n\n let files = parts.next().unwrap(); \/\/ .split() is never empty\n let servo_args = parts.next().unwrap_or(&[]);\n\n if files.len() == 0 {\n fail!(\"error: at least one reftest list must be given\");\n }\n\n let tests = parse_lists(files, servo_args);\n let test_opts = TestOpts {\n filter: None,\n run_ignored: false,\n logfile: None,\n run_tests: true,\n run_benchmarks: false,\n ratchet_noise_percent: None,\n ratchet_metrics: None,\n save_metrics: None,\n test_shard: None,\n };\n\n match run_tests_console(&test_opts, tests) {\n Ok(false) => os::set_exit_status(1), \/\/ tests failed\n Err(_) => os::set_exit_status(2), \/\/ I\/O-related failure\n _ => (),\n }\n}\n\n#[deriving(Eq)]\nenum ReftestKind {\n Same,\n Different,\n}\n\nstruct Reftest {\n name: ~str,\n kind: ReftestKind,\n files: [~str, ..2],\n id: uint,\n servo_args: ~[~str],\n}\n\nfn parse_lists(filenames: &[~str], servo_args: &[~str]) -> Vec<TestDescAndFn> {\n let mut tests = Vec::new();\n let mut next_id = 0;\n for file in filenames.iter() {\n let file_path = Path::new(file.clone());\n let contents = match File::open_mode(&file_path, io::Open, io::Read)\n .and_then(|mut f| {\n f.read_to_str()\n }) {\n Ok(s) => s,\n _ => fail!(\"Could not read file\"),\n };\n\n for line in contents.lines() {\n \/\/ ignore comments\n if line.starts_with(\"#\") {\n continue;\n }\n\n let parts: ~[&str] = line.split(' ').filter(|p| !p.is_empty()).collect();\n\n if parts.len() != 3 {\n fail!(\"reftest line: '{:s}' doesn't match 'KIND LEFT RIGHT'\", line);\n }\n\n let kind = match parts[0] {\n \"==\" => Same,\n \"!=\" => Different,\n _ => fail!(\"reftest line: '{:s}' has invalid kind '{:s}'\",\n line, parts[0])\n };\n let src_path = file_path.dir_path();\n let src_dir = src_path.display().to_str();\n let file_left = src_dir + \"\/\" + parts[1];\n let file_right = src_dir + \"\/\" + parts[2];\n\n let reftest = Reftest {\n name: parts[1] + \" \/ \" + parts[2],\n kind: kind,\n files: [file_left, file_right],\n id: next_id,\n servo_args: servo_args.to_owned(),\n };\n\n next_id += 1;\n\n tests.push(make_test(reftest));\n }\n }\n tests\n}\n\nfn make_test(reftest: Reftest) -> TestDescAndFn {\n let name = reftest.name.clone();\n TestDescAndFn {\n desc: TestDesc {\n name: DynTestName(name),\n ignore: false,\n should_fail: false,\n },\n testfn: DynTestFn(proc() {\n check_reftest(reftest);\n }),\n }\n}\n\nfn capture(reftest: &Reftest, side: uint) -> png::Image {\n let filename = format!(\"\/tmp\/servo-reftest-{:06u}-{:u}.png\", reftest.id, side);\n let mut args = reftest.servo_args.clone();\n args.push_all_move(~[\"-f\".to_owned(), \"-o\".to_owned(), filename.clone(), reftest.files[side].clone()]);\n\n let retval = match Process::status(\".\/servo\", args) {\n Ok(status) => status,\n Err(e) => fail!(\"failed to execute process: {}\", e),\n };\n assert!(retval == ExitStatus(0));\n\n png::load_png(&from_str::<Path>(filename).unwrap()).unwrap()\n}\n\nfn check_reftest(reftest: Reftest) {\n let left = capture(&reftest, 0);\n let right = capture(&reftest, 1);\n\n let pixels: ~[u8] = left.pixels.iter().zip(right.pixels.iter()).map(|(&a, &b)| {\n if a as i8 - b as i8 == 0 {\n \/\/ White for correct\n 0xFF\n } else {\n \/\/ \"1100\" in the RGBA channel with an error for an incorrect value\n \/\/ This results in some number of C0 and FFs, which is much more\n \/\/ readable (and distinguishable) than the previous difference-wise\n \/\/ scaling but does not require reconstructing the actual RGBA pixel.\n 0xC0\n }\n }).collect();\n\n if pixels.iter().any(|&a| a < 255) {\n let output_str = format!(\"\/tmp\/servo-reftest-{:06u}-diff.png\", reftest.id);\n let output = from_str::<Path>(output_str).unwrap();\n\n let img = png::Image {\n width: left.width,\n height: left.height,\n color_type: png::RGBA8,\n pixels: pixels,\n };\n let res = png::store_png(&img, &output);\n assert!(res.is_ok());\n\n assert!(reftest.kind == Different, \"rendering difference: {}\", output_str);\n } else {\n assert!(reftest.kind == Same);\n }\n}\n<commit_msg>auto merge of #2350 : zwarich\/servo\/unused-str-import, r=metajack<commit_after>\/\/ Copyright 2013 The Servo Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate png;\nextern crate std;\nextern crate test;\n\nuse std::io;\nuse std::io::{File, Reader, Process};\nuse std::io::process::ExitStatus;\nuse std::os;\nuse test::{DynTestName, DynTestFn, TestDesc, TestOpts, TestDescAndFn};\nuse test::run_tests_console;\n\nfn main() {\n let args = os::args();\n let mut parts = args.tail().split(|e| \"--\" == e.as_slice());\n\n let files = parts.next().unwrap(); \/\/ .split() is never empty\n let servo_args = parts.next().unwrap_or(&[]);\n\n if files.len() == 0 {\n fail!(\"error: at least one reftest list must be given\");\n }\n\n let tests = parse_lists(files, servo_args);\n let test_opts = TestOpts {\n filter: None,\n run_ignored: false,\n logfile: None,\n run_tests: true,\n run_benchmarks: false,\n ratchet_noise_percent: None,\n ratchet_metrics: None,\n save_metrics: None,\n test_shard: None,\n };\n\n match run_tests_console(&test_opts, tests) {\n Ok(false) => os::set_exit_status(1), \/\/ tests failed\n Err(_) => os::set_exit_status(2), \/\/ I\/O-related failure\n _ => (),\n }\n}\n\n#[deriving(Eq)]\nenum ReftestKind {\n Same,\n Different,\n}\n\nstruct Reftest {\n name: ~str,\n kind: ReftestKind,\n files: [~str, ..2],\n id: uint,\n servo_args: ~[~str],\n}\n\nfn parse_lists(filenames: &[~str], servo_args: &[~str]) -> Vec<TestDescAndFn> {\n let mut tests = Vec::new();\n let mut next_id = 0;\n for file in filenames.iter() {\n let file_path = Path::new(file.clone());\n let contents = match File::open_mode(&file_path, io::Open, io::Read)\n .and_then(|mut f| {\n f.read_to_str()\n }) {\n Ok(s) => s,\n _ => fail!(\"Could not read file\"),\n };\n\n for line in contents.lines() {\n \/\/ ignore comments\n if line.starts_with(\"#\") {\n continue;\n }\n\n let parts: ~[&str] = line.split(' ').filter(|p| !p.is_empty()).collect();\n\n if parts.len() != 3 {\n fail!(\"reftest line: '{:s}' doesn't match 'KIND LEFT RIGHT'\", line);\n }\n\n let kind = match parts[0] {\n \"==\" => Same,\n \"!=\" => Different,\n _ => fail!(\"reftest line: '{:s}' has invalid kind '{:s}'\",\n line, parts[0])\n };\n let src_path = file_path.dir_path();\n let src_dir = src_path.display().to_str();\n let file_left = src_dir + \"\/\" + parts[1];\n let file_right = src_dir + \"\/\" + parts[2];\n\n let reftest = Reftest {\n name: parts[1] + \" \/ \" + parts[2],\n kind: kind,\n files: [file_left, file_right],\n id: next_id,\n servo_args: servo_args.to_owned(),\n };\n\n next_id += 1;\n\n tests.push(make_test(reftest));\n }\n }\n tests\n}\n\nfn make_test(reftest: Reftest) -> TestDescAndFn {\n let name = reftest.name.clone();\n TestDescAndFn {\n desc: TestDesc {\n name: DynTestName(name),\n ignore: false,\n should_fail: false,\n },\n testfn: DynTestFn(proc() {\n check_reftest(reftest);\n }),\n }\n}\n\nfn capture(reftest: &Reftest, side: uint) -> png::Image {\n let filename = format!(\"\/tmp\/servo-reftest-{:06u}-{:u}.png\", reftest.id, side);\n let mut args = reftest.servo_args.clone();\n args.push_all_move(~[\"-f\".to_owned(), \"-o\".to_owned(), filename.clone(), reftest.files[side].clone()]);\n\n let retval = match Process::status(\".\/servo\", args) {\n Ok(status) => status,\n Err(e) => fail!(\"failed to execute process: {}\", e),\n };\n assert!(retval == ExitStatus(0));\n\n png::load_png(&from_str::<Path>(filename).unwrap()).unwrap()\n}\n\nfn check_reftest(reftest: Reftest) {\n let left = capture(&reftest, 0);\n let right = capture(&reftest, 1);\n\n let pixels: ~[u8] = left.pixels.iter().zip(right.pixels.iter()).map(|(&a, &b)| {\n if a as i8 - b as i8 == 0 {\n \/\/ White for correct\n 0xFF\n } else {\n \/\/ \"1100\" in the RGBA channel with an error for an incorrect value\n \/\/ This results in some number of C0 and FFs, which is much more\n \/\/ readable (and distinguishable) than the previous difference-wise\n \/\/ scaling but does not require reconstructing the actual RGBA pixel.\n 0xC0\n }\n }).collect();\n\n if pixels.iter().any(|&a| a < 255) {\n let output_str = format!(\"\/tmp\/servo-reftest-{:06u}-diff.png\", reftest.id);\n let output = from_str::<Path>(output_str).unwrap();\n\n let img = png::Image {\n width: left.width,\n height: left.height,\n color_type: png::RGBA8,\n pixels: pixels,\n };\n let res = png::store_png(&img, &output);\n assert!(res.is_ok());\n\n assert!(reftest.kind == Different, \"rendering difference: {}\", output_str);\n } else {\n assert!(reftest.kind == Same);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test that binop subtyping in rustc_typeck fixes #27949<commit_after>\/\/ run-pass\n\/\/\n\/\/ At one time, the `==` operator (and other binary operators) did not\n\/\/ support subtyping during type checking, and would therefore require\n\/\/ LHS and RHS to be exactly identical--i.e. to have the same lifetimes.\n\/\/\n\/\/ This was fixed in 1a7fb7dc78439a704f024609ce3dc0beb1386552.\n\n#[derive(Copy, Clone)]\nstruct Input<'a> {\n foo: &'a u32\n}\n\nimpl <'a> std::cmp::PartialEq<Input<'a>> for Input<'a> {\n fn eq(&self, other: &Input<'a>) -> bool {\n self.foo == other.foo\n }\n\n fn ne(&self, other: &Input<'a>) -> bool {\n self.foo != other.foo\n }\n}\n\n\nfn check_equal<'a, 'b>(x: Input<'a>, y: Input<'b>) -> bool {\n \/\/ Type checking error due to 'a != 'b prior to 1a7fb7dc78\n x == y\n}\n\nfn main() {\n let i = 1u32;\n let j = 1u32;\n let k = 2u32;\n\n let input_i = Input { foo: &i };\n let input_j = Input { foo: &j };\n let input_k = Input { foo: &k };\n assert!(check_equal(input_i, input_i));\n assert!(check_equal(input_i, input_j));\n assert!(!check_equal(input_i, input_k));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Match more status codes.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Quick test for calling points.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Code related to match expresions. These are sufficiently complex\n\/\/! to warrant their own module and submodules. :) This main module\n\/\/! includes the high-level algorithm, the submodules contain the\n\/\/! details.\n\nuse build::{BlockAnd, Builder};\nuse repr::*;\nuse hair::*;\n\n\/\/ helper functions, broken out by category:\nmod simplify;\nmod test;\nmod util;\n\nimpl<H:Hair> Builder<H> {\n pub fn match_expr(&mut self,\n destination: &Lvalue<H>,\n span: H::Span,\n mut block: BasicBlock,\n discriminant: ExprRef<H>,\n arms: Vec<Arm<H>>)\n -> BlockAnd<()>\n {\n let discriminant_lvalue =\n unpack!(block = self.as_lvalue(block, discriminant));\n\n let arm_blocks: Vec<BasicBlock> =\n arms.iter()\n .map(|_| self.cfg.start_new_block())\n .collect();\n\n let arm_bodies: Vec<ExprRef<H>> =\n arms.iter()\n .map(|arm| arm.body.clone())\n .collect();\n\n \/\/ assemble a list of candidates: there is one candidate per\n \/\/ pattern, which means there may be more than one candidate\n \/\/ *per arm*. These candidates are kept sorted such that the\n \/\/ highest priority candidate comes last in the list. This the\n \/\/ reverse of the order in which candidates are written in the\n \/\/ source.\n let candidates: Vec<Candidate<H>> =\n arms.into_iter()\n .zip(arm_blocks.iter())\n .rev() \/\/ highest priority comes last\n .flat_map(|(arm, &arm_block)| {\n let guard = arm.guard;\n arm.patterns.into_iter()\n .rev()\n .map(move |pat| (arm_block, pat, guard.clone()))\n })\n .map(|(arm_block, pattern, guard)| {\n Candidate {\n match_pairs: vec![self.match_pair(discriminant_lvalue.clone(), pattern)],\n bindings: vec![],\n guard: guard,\n arm_block: arm_block,\n }\n })\n .collect();\n\n \/\/ this will generate code to test discriminant_lvalue and\n \/\/ branch to the appropriate arm block\n let var_extent = self.extent_of_innermost_scope().unwrap();\n self.match_candidates(span, var_extent, candidates, block);\n\n \/\/ all the arm blocks will rejoin here\n let end_block = self.cfg.start_new_block();\n\n for (arm_body, &arm_block) in arm_bodies.into_iter().zip(arm_blocks.iter()) {\n let mut arm_block = arm_block;\n unpack!(arm_block = self.into(destination, arm_block, arm_body));\n self.cfg.terminate(arm_block, Terminator::Goto { target: end_block });\n }\n\n end_block.unit()\n }\n\n pub fn expr_into_pattern(&mut self,\n mut block: BasicBlock,\n var_extent: H::CodeExtent, \/\/ lifetime of vars\n irrefutable_pat: PatternRef<H>,\n initializer: ExprRef<H>)\n -> BlockAnd<()>\n {\n \/\/ optimize the case of `let x = ...`\n let irrefutable_pat = self.hir.mirror(irrefutable_pat);\n match irrefutable_pat.kind {\n PatternKind::Binding { mutability,\n name,\n mode: BindingMode::ByValue,\n var,\n ty,\n subpattern: None } => {\n let index = self.declare_binding(var_extent, mutability, name,\n var, ty, irrefutable_pat.span);\n let lvalue = Lvalue::Var(index);\n return self.into(&lvalue, block, initializer);\n }\n _ => { }\n }\n let lvalue = unpack!(block = self.as_lvalue(block, initializer));\n self.lvalue_into_pattern(block, var_extent,\n PatternRef::Mirror(Box::new(irrefutable_pat)), &lvalue)\n }\n\n pub fn lvalue_into_pattern(&mut self,\n mut block: BasicBlock,\n var_extent: H::CodeExtent,\n irrefutable_pat: PatternRef<H>,\n initializer: &Lvalue<H>)\n -> BlockAnd<()>\n {\n \/\/ create a dummy candidate\n let mut candidate = Candidate::<H> {\n match_pairs: vec![self.match_pair(initializer.clone(), irrefutable_pat)],\n bindings: vec![],\n guard: None,\n arm_block: block\n };\n\n \/\/ Simplify the candidate. Since the pattern is irrefutable, this should\n \/\/ always convert all match-pairs into bindings.\n unpack!(block = self.simplify_candidate(block, &mut candidate));\n\n if !candidate.match_pairs.is_empty() {\n self.hir.span_bug(\n candidate.match_pairs[0].pattern.span,\n &format!(\"match pairs {:?} remaining after simplifying irrefutable pattern\",\n candidate.match_pairs));\n }\n\n \/\/ now apply the bindings, which will also declare the variables\n self.bind_matched_candidate(block, var_extent, candidate.bindings);\n\n block.unit()\n }\n\n pub fn declare_uninitialized_variables(&mut self,\n var_extent: H::CodeExtent,\n pattern: PatternRef<H>)\n {\n let pattern = self.hir.mirror(pattern);\n match pattern.kind {\n PatternKind::Binding { mutability, name, mode: _, var, ty, subpattern } => {\n self.declare_binding(var_extent, mutability, name, var, ty, pattern.span);\n if let Some(subpattern) = subpattern {\n self.declare_uninitialized_variables(var_extent, subpattern);\n }\n }\n PatternKind::Array { prefix, slice, suffix } |\n PatternKind::Slice { prefix, slice, suffix } => {\n for subpattern in prefix.into_iter().chain(slice).chain(suffix) {\n self.declare_uninitialized_variables(var_extent, subpattern);\n }\n }\n PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {\n }\n PatternKind::Deref { subpattern } => {\n self.declare_uninitialized_variables(var_extent, subpattern);\n }\n PatternKind::Leaf { subpatterns } |\n PatternKind::Variant { subpatterns, .. } => {\n for subpattern in subpatterns {\n self.declare_uninitialized_variables(var_extent, subpattern.pattern);\n }\n }\n }\n }\n}\n\n#[derive(Clone, Debug)]\nstruct Candidate<H:Hair> {\n \/\/ all of these must be satisfied...\n match_pairs: Vec<MatchPair<H>>,\n\n \/\/ ...these bindings established...\n bindings: Vec<Binding<H>>,\n\n \/\/ ...and the guard must be evaluated...\n guard: Option<ExprRef<H>>,\n\n \/\/ ...and then we branch here.\n arm_block: BasicBlock,\n}\n\n#[derive(Clone, Debug)]\nstruct Binding<H:Hair> {\n span: H::Span,\n source: Lvalue<H>,\n name: H::Name,\n var_id: H::VarId,\n var_ty: H::Ty,\n mutability: Mutability,\n binding_mode: BindingMode<H>,\n}\n\n#[derive(Clone, Debug)]\nstruct MatchPair<H:Hair> {\n \/\/ this lvalue...\n lvalue: Lvalue<H>,\n\n \/\/ ... must match this pattern.\n pattern: Pattern<H>,\n}\n\n#[derive(Clone, Debug, PartialEq)]\nenum TestKind<H:Hair> {\n \/\/ test the branches of enum\n Switch { adt_def: H::AdtDef },\n\n \/\/ test for equality\n Eq { value: Literal<H>, ty: H::Ty },\n\n \/\/ test whether the value falls within an inclusive range\n Range { lo: Literal<H>, hi: Literal<H>, ty: H::Ty },\n\n \/\/ test length of the slice is equal to len\n Len { len: usize, op: BinOp },\n}\n\n#[derive(Debug)]\nstruct Test<H:Hair> {\n span: H::Span,\n kind: TestKind<H>,\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Main matching algorithm\n\nimpl<H:Hair> Builder<H> {\n fn match_candidates(&mut self,\n span: H::Span,\n var_extent: H::CodeExtent,\n mut candidates: Vec<Candidate<H>>,\n mut block: BasicBlock)\n {\n debug!(\"matched_candidate(span={:?}, var_extent={:?}, block={:?}, candidates={:?})\",\n span, var_extent, block, candidates);\n\n \/\/ Start by simplifying candidates. Once this process is\n \/\/ complete, all the match pairs which remain require some\n \/\/ form of test, whether it be a switch or pattern comparison.\n for candidate in &mut candidates {\n unpack!(block = self.simplify_candidate(block, candidate));\n }\n\n \/\/ The candidates are inversely sorted by priority. Check to\n \/\/ see whether the candidates in the front of the queue (and\n \/\/ hence back of the vec) have satisfied all their match\n \/\/ pairs.\n let fully_matched =\n candidates.iter().rev().take_while(|c| c.match_pairs.is_empty()).count();\n debug!(\"match_candidates: {:?} candidates fully matched\", fully_matched);\n for _ in 0..fully_matched {\n \/\/ If so, apply any bindings, test the guard (if any), and\n \/\/ branch to the arm.\n let candidate = candidates.pop().unwrap();\n if let Some(b) = self.bind_and_guard_matched_candidate(block, var_extent, candidate) {\n block = b;\n } else {\n \/\/ if None is returned, then any remaining candidates\n \/\/ are unreachable (at least not through this path).\n return;\n }\n }\n\n \/\/ If there are no candidates that still need testing, we're done.\n \/\/ Since all matches are exhaustive, execution should never reach this point.\n if candidates.is_empty() {\n return self.panic(block);\n }\n\n \/\/ otherwise, extract the next match pair and construct tests\n let match_pair = &candidates.last().unwrap().match_pairs[0];\n let test = self.test(match_pair);\n debug!(\"match_candidates: test={:?} match_pair={:?}\", test, match_pair);\n let target_blocks = self.perform_test(block, &match_pair.lvalue, &test);\n\n for (outcome, mut target_block) in target_blocks.into_iter().enumerate() {\n let applicable_candidates: Vec<Candidate<H>> =\n candidates.iter()\n .filter_map(|candidate| {\n unpack!(target_block =\n self.candidate_under_assumption(target_block,\n &match_pair.lvalue,\n &test.kind,\n outcome,\n candidate))\n })\n .collect();\n self.match_candidates(span, var_extent, applicable_candidates, target_block);\n }\n }\n\n \/\/\/ Initializes each of the bindings from the candidate by\n \/\/\/ moving\/copying\/ref'ing the source as appropriate. Tests the\n \/\/\/ guard, if any, and then branches to the arm. Returns the block\n \/\/\/ for the case where the guard fails.\n \/\/\/\n \/\/\/ Note: we check earlier that if there is a guard, there cannot\n \/\/\/ be move bindings. This isn't really important for the\n \/\/\/ self-consistency of this fn, but the reason for it should be\n \/\/\/ clear: after we've done the assignments, if there were move\n \/\/\/ bindings, further tests would be a use-after-move (which would\n \/\/\/ in turn be detected by the borrowck code that runs on the\n \/\/\/ MIR).\n fn bind_and_guard_matched_candidate(&mut self,\n mut block: BasicBlock,\n var_extent: H::CodeExtent,\n candidate: Candidate<H>)\n -> Option<BasicBlock> {\n debug!(\"bind_and_guard_matched_candidate(block={:?}, var_extent={:?}, candidate={:?})\",\n block, var_extent, candidate);\n\n debug_assert!(candidate.match_pairs.is_empty());\n\n self.bind_matched_candidate(block, var_extent, candidate.bindings);\n\n if let Some(guard) = candidate.guard {\n \/\/ the block to branch to if the guard fails; if there is no\n \/\/ guard, this block is simply unreachable\n let cond = unpack!(block = self.as_operand(block, guard));\n let otherwise = self.cfg.start_new_block();\n self.cfg.terminate(block, Terminator::If { cond: cond,\n targets: [candidate.arm_block, otherwise]});\n Some(otherwise)\n } else {\n self.cfg.terminate(block, Terminator::Goto { target: candidate.arm_block });\n None\n }\n }\n\n fn bind_matched_candidate(&mut self,\n block: BasicBlock,\n var_extent: H::CodeExtent,\n bindings: Vec<Binding<H>>) {\n debug!(\"bind_matched_candidate(block={:?}, var_extent={:?}, bindings={:?})\",\n block, var_extent, bindings);\n\n \/\/ Assign each of the bindings. This may trigger moves out of the candidate.\n for binding in bindings {\n \/\/ Create a variable for the `var_id` being bound. In the\n \/\/ case where there are multiple patterns for a single\n \/\/ arm, it may already exist.\n let var_index = if !self.var_indices.contains_key(&binding.var_id) {\n self.declare_binding(var_extent,\n binding.mutability,\n binding.name,\n binding.var_id,\n binding.var_ty,\n binding.span)\n } else {\n self.var_indices[&binding.var_id]\n };\n\n let rvalue = match binding.binding_mode {\n BindingMode::ByValue =>\n Rvalue::Use(Operand::Consume(binding.source)),\n BindingMode::ByRef(region, borrow_kind) =>\n Rvalue::Ref(region, borrow_kind, binding.source),\n };\n\n self.cfg.push_assign(block, binding.span, &Lvalue::Var(var_index), rvalue);\n }\n }\n\n fn declare_binding(&mut self,\n var_extent: H::CodeExtent,\n mutability: Mutability,\n name: H::Name,\n var_id: H::VarId,\n var_ty: H::Ty,\n span: H::Span)\n -> u32\n {\n debug!(\"declare_binding(var_id={:?}, name={:?}, var_ty={:?}, var_extent={:?}, span={:?})\",\n var_id, name, var_ty, var_extent, span);\n\n let index = self.var_decls.len();\n self.var_decls.push(VarDecl::<H> {\n mutability: mutability,\n name: name,\n ty: var_ty.clone(),\n });\n let index = index as u32;\n self.schedule_drop(span, var_extent, DropKind::Deep, &Lvalue::Var(index), var_ty);\n self.var_indices.insert(var_id, index);\n\n debug!(\"declare_binding: index={:?}\", index);\n\n index\n }\n}\n\n<commit_msg>rather than just passing the block around, track the candidate's index, and track which arms are reached (though in fact we don't make use of this right now -- we might later if we absorb the checking of patterns into MIR, as I would like)<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Code related to match expresions. These are sufficiently complex\n\/\/! to warrant their own module and submodules. :) This main module\n\/\/! includes the high-level algorithm, the submodules contain the\n\/\/! details.\n\nuse build::{BlockAnd, Builder};\nuse repr::*;\nuse hair::*;\n\n\/\/ helper functions, broken out by category:\nmod simplify;\nmod test;\nmod util;\n\nimpl<H:Hair> Builder<H> {\n pub fn match_expr(&mut self,\n destination: &Lvalue<H>,\n span: H::Span,\n mut block: BasicBlock,\n discriminant: ExprRef<H>,\n arms: Vec<Arm<H>>)\n -> BlockAnd<()>\n {\n let discriminant_lvalue =\n unpack!(block = self.as_lvalue(block, discriminant));\n\n let mut arm_blocks = ArmBlocks {\n blocks: arms.iter()\n .map(|_| self.cfg.start_new_block())\n .collect(),\n };\n\n let arm_bodies: Vec<ExprRef<H>> =\n arms.iter()\n .map(|arm| arm.body.clone())\n .collect();\n\n \/\/ assemble a list of candidates: there is one candidate per\n \/\/ pattern, which means there may be more than one candidate\n \/\/ *per arm*. These candidates are kept sorted such that the\n \/\/ highest priority candidate comes last in the list. This the\n \/\/ reverse of the order in which candidates are written in the\n \/\/ source.\n let candidates: Vec<Candidate<H>> =\n arms.into_iter()\n .enumerate()\n .rev() \/\/ highest priority comes last\n .flat_map(|(arm_index, arm)| {\n let guard = arm.guard;\n arm.patterns.into_iter()\n .rev()\n .map(move |pat| (arm_index, pat, guard.clone()))\n })\n .map(|(arm_index, pattern, guard)| {\n Candidate {\n match_pairs: vec![self.match_pair(discriminant_lvalue.clone(), pattern)],\n bindings: vec![],\n guard: guard,\n arm_index: arm_index,\n }\n })\n .collect();\n\n \/\/ this will generate code to test discriminant_lvalue and\n \/\/ branch to the appropriate arm block\n let var_extent = self.extent_of_innermost_scope().unwrap();\n self.match_candidates(span, var_extent, &mut arm_blocks, candidates, block);\n\n \/\/ all the arm blocks will rejoin here\n let end_block = self.cfg.start_new_block();\n\n for (arm_index, arm_body) in arm_bodies.into_iter().enumerate() {\n let mut arm_block = arm_blocks.blocks[arm_index];\n unpack!(arm_block = self.into(destination, arm_block, arm_body));\n self.cfg.terminate(arm_block, Terminator::Goto { target: end_block });\n }\n\n end_block.unit()\n }\n\n pub fn expr_into_pattern(&mut self,\n mut block: BasicBlock,\n var_extent: H::CodeExtent, \/\/ lifetime of vars\n irrefutable_pat: PatternRef<H>,\n initializer: ExprRef<H>)\n -> BlockAnd<()>\n {\n \/\/ optimize the case of `let x = ...`\n let irrefutable_pat = self.hir.mirror(irrefutable_pat);\n match irrefutable_pat.kind {\n PatternKind::Binding { mutability,\n name,\n mode: BindingMode::ByValue,\n var,\n ty,\n subpattern: None } => {\n let index = self.declare_binding(var_extent, mutability, name,\n var, ty, irrefutable_pat.span);\n let lvalue = Lvalue::Var(index);\n return self.into(&lvalue, block, initializer);\n }\n _ => { }\n }\n let lvalue = unpack!(block = self.as_lvalue(block, initializer));\n self.lvalue_into_pattern(block, var_extent,\n PatternRef::Mirror(Box::new(irrefutable_pat)), &lvalue)\n }\n\n pub fn lvalue_into_pattern(&mut self,\n mut block: BasicBlock,\n var_extent: H::CodeExtent,\n irrefutable_pat: PatternRef<H>,\n initializer: &Lvalue<H>)\n -> BlockAnd<()>\n {\n \/\/ create a dummy candidate\n let mut candidate = Candidate::<H> {\n match_pairs: vec![self.match_pair(initializer.clone(), irrefutable_pat)],\n bindings: vec![],\n guard: None,\n arm_index: 0, \/\/ since we don't call `match_candidates`, this field is unused\n };\n\n \/\/ Simplify the candidate. Since the pattern is irrefutable, this should\n \/\/ always convert all match-pairs into bindings.\n unpack!(block = self.simplify_candidate(block, &mut candidate));\n\n if !candidate.match_pairs.is_empty() {\n self.hir.span_bug(\n candidate.match_pairs[0].pattern.span,\n &format!(\"match pairs {:?} remaining after simplifying irrefutable pattern\",\n candidate.match_pairs));\n }\n\n \/\/ now apply the bindings, which will also declare the variables\n self.bind_matched_candidate(block, var_extent, candidate.bindings);\n\n block.unit()\n }\n\n pub fn declare_uninitialized_variables(&mut self,\n var_extent: H::CodeExtent,\n pattern: PatternRef<H>)\n {\n let pattern = self.hir.mirror(pattern);\n match pattern.kind {\n PatternKind::Binding { mutability, name, mode: _, var, ty, subpattern } => {\n self.declare_binding(var_extent, mutability, name, var, ty, pattern.span);\n if let Some(subpattern) = subpattern {\n self.declare_uninitialized_variables(var_extent, subpattern);\n }\n }\n PatternKind::Array { prefix, slice, suffix } |\n PatternKind::Slice { prefix, slice, suffix } => {\n for subpattern in prefix.into_iter().chain(slice).chain(suffix) {\n self.declare_uninitialized_variables(var_extent, subpattern);\n }\n }\n PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {\n }\n PatternKind::Deref { subpattern } => {\n self.declare_uninitialized_variables(var_extent, subpattern);\n }\n PatternKind::Leaf { subpatterns } |\n PatternKind::Variant { subpatterns, .. } => {\n for subpattern in subpatterns {\n self.declare_uninitialized_variables(var_extent, subpattern.pattern);\n }\n }\n }\n }\n}\n\n\/\/\/ List of blocks for each arm (and potentially other metadata in the\n\/\/\/ future).\nstruct ArmBlocks {\n blocks: Vec<BasicBlock>,\n}\n\n#[derive(Clone, Debug)]\nstruct Candidate<H:Hair> {\n \/\/ all of these must be satisfied...\n match_pairs: Vec<MatchPair<H>>,\n\n \/\/ ...these bindings established...\n bindings: Vec<Binding<H>>,\n\n \/\/ ...and the guard must be evaluated...\n guard: Option<ExprRef<H>>,\n\n \/\/ ...and then we branch to arm with this index.\n arm_index: usize,\n}\n\n#[derive(Clone, Debug)]\nstruct Binding<H:Hair> {\n span: H::Span,\n source: Lvalue<H>,\n name: H::Name,\n var_id: H::VarId,\n var_ty: H::Ty,\n mutability: Mutability,\n binding_mode: BindingMode<H>,\n}\n\n#[derive(Clone, Debug)]\nstruct MatchPair<H:Hair> {\n \/\/ this lvalue...\n lvalue: Lvalue<H>,\n\n \/\/ ... must match this pattern.\n pattern: Pattern<H>,\n}\n\n#[derive(Clone, Debug, PartialEq)]\nenum TestKind<H:Hair> {\n \/\/ test the branches of enum\n Switch { adt_def: H::AdtDef },\n\n \/\/ test for equality\n Eq { value: Literal<H>, ty: H::Ty },\n\n \/\/ test whether the value falls within an inclusive range\n Range { lo: Literal<H>, hi: Literal<H>, ty: H::Ty },\n\n \/\/ test length of the slice is equal to len\n Len { len: usize, op: BinOp },\n}\n\n#[derive(Debug)]\nstruct Test<H:Hair> {\n span: H::Span,\n kind: TestKind<H>,\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Main matching algorithm\n\nimpl<H:Hair> Builder<H> {\n fn match_candidates(&mut self,\n span: H::Span,\n var_extent: H::CodeExtent,\n arm_blocks: &mut ArmBlocks,\n mut candidates: Vec<Candidate<H>>,\n mut block: BasicBlock)\n {\n debug!(\"matched_candidate(span={:?}, var_extent={:?}, block={:?}, candidates={:?})\",\n span, var_extent, block, candidates);\n\n \/\/ Start by simplifying candidates. Once this process is\n \/\/ complete, all the match pairs which remain require some\n \/\/ form of test, whether it be a switch or pattern comparison.\n for candidate in &mut candidates {\n unpack!(block = self.simplify_candidate(block, candidate));\n }\n\n \/\/ The candidates are inversely sorted by priority. Check to\n \/\/ see whether the candidates in the front of the queue (and\n \/\/ hence back of the vec) have satisfied all their match\n \/\/ pairs.\n let fully_matched =\n candidates.iter().rev().take_while(|c| c.match_pairs.is_empty()).count();\n debug!(\"match_candidates: {:?} candidates fully matched\", fully_matched);\n for _ in 0..fully_matched {\n \/\/ If so, apply any bindings, test the guard (if any), and\n \/\/ branch to the arm.\n let candidate = candidates.pop().unwrap();\n if let Some(b) = self.bind_and_guard_matched_candidate(block, var_extent,\n arm_blocks, candidate) {\n block = b;\n } else {\n \/\/ if None is returned, then any remaining candidates\n \/\/ are unreachable (at least not through this path).\n return;\n }\n }\n\n \/\/ If there are no candidates that still need testing, we're done.\n \/\/ Since all matches are exhaustive, execution should never reach this point.\n if candidates.is_empty() {\n return self.panic(block);\n }\n\n \/\/ otherwise, extract the next match pair and construct tests\n let match_pair = &candidates.last().unwrap().match_pairs[0];\n let test = self.test(match_pair);\n debug!(\"match_candidates: test={:?} match_pair={:?}\", test, match_pair);\n let target_blocks = self.perform_test(block, &match_pair.lvalue, &test);\n\n for (outcome, mut target_block) in target_blocks.into_iter().enumerate() {\n let applicable_candidates: Vec<Candidate<H>> =\n candidates.iter()\n .filter_map(|candidate| {\n unpack!(target_block =\n self.candidate_under_assumption(target_block,\n &match_pair.lvalue,\n &test.kind,\n outcome,\n candidate))\n })\n .collect();\n self.match_candidates(span, var_extent, arm_blocks, applicable_candidates, target_block);\n }\n }\n\n \/\/\/ Initializes each of the bindings from the candidate by\n \/\/\/ moving\/copying\/ref'ing the source as appropriate. Tests the\n \/\/\/ guard, if any, and then branches to the arm. Returns the block\n \/\/\/ for the case where the guard fails.\n \/\/\/\n \/\/\/ Note: we check earlier that if there is a guard, there cannot\n \/\/\/ be move bindings. This isn't really important for the\n \/\/\/ self-consistency of this fn, but the reason for it should be\n \/\/\/ clear: after we've done the assignments, if there were move\n \/\/\/ bindings, further tests would be a use-after-move (which would\n \/\/\/ in turn be detected by the borrowck code that runs on the\n \/\/\/ MIR).\n fn bind_and_guard_matched_candidate(&mut self,\n mut block: BasicBlock,\n var_extent: H::CodeExtent,\n arm_blocks: &mut ArmBlocks,\n candidate: Candidate<H>)\n -> Option<BasicBlock> {\n debug!(\"bind_and_guard_matched_candidate(block={:?}, var_extent={:?}, candidate={:?})\",\n block, var_extent, candidate);\n\n debug_assert!(candidate.match_pairs.is_empty());\n\n self.bind_matched_candidate(block, var_extent, candidate.bindings);\n\n let arm_block = arm_blocks.blocks[candidate.arm_index];\n\n if let Some(guard) = candidate.guard {\n \/\/ the block to branch to if the guard fails; if there is no\n \/\/ guard, this block is simply unreachable\n let cond = unpack!(block = self.as_operand(block, guard));\n let otherwise = self.cfg.start_new_block();\n self.cfg.terminate(block, Terminator::If { cond: cond,\n targets: [arm_block, otherwise]});\n Some(otherwise)\n } else {\n self.cfg.terminate(block, Terminator::Goto { target: arm_block });\n None\n }\n }\n\n fn bind_matched_candidate(&mut self,\n block: BasicBlock,\n var_extent: H::CodeExtent,\n bindings: Vec<Binding<H>>) {\n debug!(\"bind_matched_candidate(block={:?}, var_extent={:?}, bindings={:?})\",\n block, var_extent, bindings);\n\n \/\/ Assign each of the bindings. This may trigger moves out of the candidate.\n for binding in bindings {\n \/\/ Create a variable for the `var_id` being bound. In the\n \/\/ case where there are multiple patterns for a single\n \/\/ arm, it may already exist.\n let var_index = if !self.var_indices.contains_key(&binding.var_id) {\n self.declare_binding(var_extent,\n binding.mutability,\n binding.name,\n binding.var_id,\n binding.var_ty,\n binding.span)\n } else {\n self.var_indices[&binding.var_id]\n };\n\n let rvalue = match binding.binding_mode {\n BindingMode::ByValue =>\n Rvalue::Use(Operand::Consume(binding.source)),\n BindingMode::ByRef(region, borrow_kind) =>\n Rvalue::Ref(region, borrow_kind, binding.source),\n };\n\n self.cfg.push_assign(block, binding.span, &Lvalue::Var(var_index), rvalue);\n }\n }\n\n fn declare_binding(&mut self,\n var_extent: H::CodeExtent,\n mutability: Mutability,\n name: H::Name,\n var_id: H::VarId,\n var_ty: H::Ty,\n span: H::Span)\n -> u32\n {\n debug!(\"declare_binding(var_id={:?}, name={:?}, var_ty={:?}, var_extent={:?}, span={:?})\",\n var_id, name, var_ty, var_extent, span);\n\n let index = self.var_decls.len();\n self.var_decls.push(VarDecl::<H> {\n mutability: mutability,\n name: name,\n ty: var_ty.clone(),\n });\n let index = index as u32;\n self.schedule_drop(span, var_extent, DropKind::Deep, &Lvalue::Var(index), var_ty);\n self.var_indices.insert(var_id, index);\n\n debug!(\"declare_binding: index={:?}\", index);\n\n index\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>use {Future, Poll, Async};\n\n\/\/\/ A future which \"fuse\"s a future once it's been resolved.\n\/\/\/\n\/\/\/ Normally futures can behave unpredictable once they're used after a future\n\/\/\/ has been resolved, but `Fuse` is always defined to return `Async::NotReady`\n\/\/\/ from `poll` after it has succeeded, and after it has succeeded all future\n\/\/\/ calls to `schedule` will be ignored.\n#[must_use = \"futures do nothing unless polled\"]\npub struct Fuse<A: Future> {\n future: Option<A>,\n}\n\npub fn new<A: Future>(f: A) -> Fuse<A> {\n Fuse {\n future: Some(f),\n }\n}\n\nimpl<A: Future> Future for Fuse<A> {\n type Item = A::Item;\n type Error = A::Error;\n\n fn poll(&mut self) -> Poll<A::Item, A::Error> {\n let res = self.future.as_mut().map(|f| f.poll());\n match res.unwrap_or(Ok(Async::NotReady)) {\n res @ Ok(Async::Ready(_)) |\n res @ Err(_) => {\n self.future = None;\n res\n }\n Ok(Async::NotReady) => Ok(Async::NotReady)\n }\n }\n}\n<commit_msg>Tweak `Fuse` docs: remove reference to `schedule`<commit_after>use {Future, Poll, Async};\n\n\/\/\/ A future which \"fuses\" a future once it's been resolved.\n\/\/\/\n\/\/\/ Normally futures can behave unpredictable once they're used after a future\n\/\/\/ has been resolved, but `Fuse` is always defined to return `Async::NotReady`\n\/\/\/ from `poll` after it has resolved successfully or returned an error.\n\/\/\/\n\/\/\/ This is created by the `Future::fuse` method.\n#[must_use = \"futures do nothing unless polled\"]\npub struct Fuse<A: Future> {\n future: Option<A>,\n}\n\npub fn new<A: Future>(f: A) -> Fuse<A> {\n Fuse {\n future: Some(f),\n }\n}\n\nimpl<A: Future> Future for Fuse<A> {\n type Item = A::Item;\n type Error = A::Error;\n\n fn poll(&mut self) -> Poll<A::Item, A::Error> {\n let res = self.future.as_mut().map(|f| f.poll());\n match res.unwrap_or(Ok(Async::NotReady)) {\n res @ Ok(Async::Ready(_)) |\n res @ Err(_) => {\n self.future = None;\n res\n }\n Ok(Async::NotReady) => Ok(Async::NotReady)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>draft alpha-conversion<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>blank message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>More VIP mapping<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #20797. Fixes #20797.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for #20797.\n\nuse std::default::Default;\nuse std::io::IoResult;\nuse std::io::fs;\nuse std::io::fs::PathExtensions;\n\n\/\/\/ A strategy for acquiring more subpaths to walk.\npub trait Strategy {\n type P: PathExtensions;\n \/\/\/ Get additional subpaths from a given path.\n fn get_more(&self, item: &Self::P) -> IoResult<Vec<Self::P>>;\n \/\/\/ Determine whether a path should be walked further.\n \/\/\/ This is run against each item from `get_more()`.\n fn prune(&self, p: &Self::P) -> bool;\n}\n\n\/\/\/ The basic fully-recursive strategy. Nothing is pruned.\n#[derive(Copy, Default)]\npub struct Recursive;\n\nimpl Strategy for Recursive {\n type P = Path;\n fn get_more(&self, p: &Path) -> IoResult<Vec<Path>> { fs::readdir(p) }\n\n fn prune(&self, _: &Path) -> bool { false }\n}\n\n\/\/\/ A directory walker of `P` using strategy `S`.\npub struct Subpaths<S: Strategy> {\n stack: Vec<S::P>,\n strategy: S,\n}\n\nimpl<S: Strategy> Subpaths<S> {\n \/\/\/ Create a directory walker with a root path and strategy.\n pub fn new(p: &S::P, strategy: S) -> IoResult<Subpaths<S>> {\n let stack = try!(strategy.get_more(p));\n Ok(Subpaths { stack: stack, strategy: strategy })\n }\n}\n\nimpl<S: Default + Strategy> Subpaths<S> {\n \/\/\/ Create a directory walker with a root path and a default strategy.\n pub fn walk(p: &S::P) -> IoResult<Subpaths<S>> {\n Subpaths::new(p, Default::default())\n }\n}\n\nimpl<S: Default + Strategy> Default for Subpaths<S> {\n fn default() -> Subpaths<S> {\n Subpaths { stack: Vec::new(), strategy: Default::default() }\n }\n}\n\nimpl<S: Strategy> Iterator for Subpaths<S> {\n type Item = S::P;\n fn next (&mut self) -> Option<S::P> {\n let mut opt_path = self.stack.pop();\n while opt_path.is_some() && self.strategy.prune(opt_path.as_ref().unwrap()) {\n opt_path = self.stack.pop();\n }\n match opt_path {\n Some(path) => {\n if PathExtensions::is_dir(&path) {\n let result = self.strategy.get_more(&path);\n match result {\n Ok(dirs) => { self.stack.extend(dirs.into_iter()); },\n Err(..) => { }\n }\n }\n Some(path)\n }\n None => None,\n }\n }\n}\n\nfn main() {\n let mut walker: Subpaths<Recursive> = Subpaths::walk(&Path::new(\"\/home\")).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add run-pass test<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ffi::OsString;\n\nfn main() {\n let os_str = OsString::from(\"Hello Rust!\");\n\n assert_eq!(os_str, \"Hello Rust!\");\n assert_eq!(\"Hello Rust!\", os_str);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>auto merge of #8956 : thestinger\/rust\/test, r=huonw<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-fast: check-fast screws up repr paths\n\nuse std::unstable::intrinsics::get_tydesc;\n\nstruct Foo<T> {\n x: T\n}\n\nfn main() {\n unsafe {\n assert_eq!((*get_tydesc::<int>()).name, \"int\");\n assert_eq!((*get_tydesc::<~[int]>()).name, \"~[int]\");\n assert_eq!((*get_tydesc::<Foo<uint>>()).name, \"Foo<uint>\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Integer and floating-point number formatting\n\n#![allow(deprecated)]\n\n\nuse fmt;\nuse ops::{Div, Rem, Sub};\nuse str;\nuse slice;\nuse ptr;\nuse mem;\n\n#[doc(hidden)]\ntrait Int: PartialEq + PartialOrd + Div<Output=Self> + Rem<Output=Self> +\n Sub<Output=Self> + Copy {\n fn zero() -> Self;\n fn from_u8(u: u8) -> Self;\n fn to_u8(&self) -> u8;\n fn to_u16(&self) -> u16;\n fn to_u32(&self) -> u32;\n fn to_u64(&self) -> u64;\n fn to_u128(&self) -> u128;\n}\n\nmacro_rules! doit {\n ($($t:ident)*) => ($(impl Int for $t {\n fn zero() -> $t { 0 }\n fn from_u8(u: u8) -> $t { u as $t }\n fn to_u8(&self) -> u8 { *self as u8 }\n fn to_u16(&self) -> u16 { *self as u16 }\n fn to_u32(&self) -> u32 { *self as u32 }\n fn to_u64(&self) -> u64 { *self as u64 }\n fn to_u128(&self) -> u128 { *self as u128 }\n })*)\n}\ndoit! { i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize }\n\n\/\/\/ A type that represents a specific radix\n#[doc(hidden)]\ntrait GenericRadix {\n \/\/\/ The number of digits.\n fn base(&self) -> u8;\n\n \/\/\/ A radix-specific prefix string.\n fn prefix(&self) -> &'static str {\n \"\"\n }\n\n \/\/\/ Converts an integer to corresponding radix digit.\n fn digit(&self, x: u8) -> u8;\n\n \/\/\/ Format an integer using the radix using a formatter.\n fn fmt_int<T: Int>(&self, mut x: T, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ The radix can be as low as 2, so we need a buffer of at least 128\n \/\/ characters for a base 2 number.\n let zero = T::zero();\n let is_nonnegative = x >= zero;\n let mut buf = [0; 128];\n let mut curr = buf.len();\n let base = T::from_u8(self.base());\n if is_nonnegative {\n \/\/ Accumulate each digit of the number from the least significant\n \/\/ to the most significant figure.\n for byte in buf.iter_mut().rev() {\n let n = x % base; \/\/ Get the current place value.\n x = x \/ base; \/\/ Deaccumulate the number.\n *byte = self.digit(n.to_u8()); \/\/ Store the digit in the buffer.\n curr -= 1;\n if x == zero {\n \/\/ No more digits left to accumulate.\n break\n };\n }\n } else {\n \/\/ Do the same as above, but accounting for two's complement.\n for byte in buf.iter_mut().rev() {\n let n = zero - (x % base); \/\/ Get the current place value.\n x = x \/ base; \/\/ Deaccumulate the number.\n *byte = self.digit(n.to_u8()); \/\/ Store the digit in the buffer.\n curr -= 1;\n if x == zero {\n \/\/ No more digits left to accumulate.\n break\n };\n }\n }\n let buf = unsafe { str::from_utf8_unchecked(&buf[curr..]) };\n f.pad_integral(is_nonnegative, self.prefix(), buf)\n }\n}\n\n\/\/\/ A binary (base 2) radix\n#[derive(Clone, PartialEq)]\nstruct Binary;\n\n\/\/\/ An octal (base 8) radix\n#[derive(Clone, PartialEq)]\nstruct Octal;\n\n\/\/\/ A decimal (base 10) radix\n#[derive(Clone, PartialEq)]\nstruct Decimal;\n\n\/\/\/ A hexadecimal (base 16) radix, formatted with lower-case characters\n#[derive(Clone, PartialEq)]\nstruct LowerHex;\n\n\/\/\/ A hexadecimal (base 16) radix, formatted with upper-case characters\n#[derive(Clone, PartialEq)]\nstruct UpperHex;\n\nmacro_rules! radix {\n ($T:ident, $base:expr, $prefix:expr, $($x:pat => $conv:expr),+) => {\n impl GenericRadix for $T {\n fn base(&self) -> u8 { $base }\n fn prefix(&self) -> &'static str { $prefix }\n fn digit(&self, x: u8) -> u8 {\n match x {\n $($x => $conv,)+\n x => panic!(\"number not in the range 0..{}: {}\", self.base() - 1, x),\n }\n }\n }\n }\n}\n\nradix! { Binary, 2, \"0b\", x @ 0 ... 1 => b'0' + x }\nradix! { Octal, 8, \"0o\", x @ 0 ... 7 => b'0' + x }\nradix! { Decimal, 10, \"\", x @ 0 ... 9 => b'0' + x }\nradix! { LowerHex, 16, \"0x\", x @ 0 ... 9 => b'0' + x,\n x @ 10 ... 15 => b'a' + (x - 10) }\nradix! { UpperHex, 16, \"0x\", x @ 0 ... 9 => b'0' + x,\n x @ 10 ... 15 => b'A' + (x - 10) }\n\nmacro_rules! int_base {\n ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::$Trait for $T {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n $Radix.fmt_int(*self as $U, f)\n }\n }\n }\n}\n\nmacro_rules! debug {\n ($T:ident) => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::Debug for $T {\n #[inline]\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n if f.debug_lower_hex() {\n fmt::LowerHex::fmt(self, f)\n } else if f.debug_upper_hex() {\n fmt::UpperHex::fmt(self, f)\n } else {\n fmt::Display::fmt(self, f)\n }\n }\n }\n }\n}\n\nmacro_rules! integer {\n ($Int:ident, $Uint:ident) => {\n int_base! { Binary for $Int as $Uint -> Binary }\n int_base! { Octal for $Int as $Uint -> Octal }\n int_base! { LowerHex for $Int as $Uint -> LowerHex }\n int_base! { UpperHex for $Int as $Uint -> UpperHex }\n debug! { $Int }\n\n int_base! { Binary for $Uint as $Uint -> Binary }\n int_base! { Octal for $Uint as $Uint -> Octal }\n int_base! { LowerHex for $Uint as $Uint -> LowerHex }\n int_base! { UpperHex for $Uint as $Uint -> UpperHex }\n debug! { $Uint }\n }\n}\ninteger! { isize, usize }\ninteger! { i8, u8 }\ninteger! { i16, u16 }\ninteger! { i32, u32 }\ninteger! { i64, u64 }\ninteger! { i128, u128 }\n\nconst DEC_DIGITS_LUT: &'static[u8] =\n b\"0001020304050607080910111213141516171819\\\n 2021222324252627282930313233343536373839\\\n 4041424344454647484950515253545556575859\\\n 6061626364656667686970717273747576777879\\\n 8081828384858687888990919293949596979899\";\n\nmacro_rules! impl_Display {\n ($($t:ident),*: $conv_fn:ident) => ($(\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::Display for $t {\n #[allow(unused_comparisons)]\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let is_nonnegative = *self >= 0;\n let mut n = if is_nonnegative {\n self.$conv_fn()\n } else {\n \/\/ convert the negative num to positive by summing 1 to it's 2 complement\n (!self.$conv_fn()).wrapping_add(1)\n };\n let mut buf: [u8; 39] = unsafe { mem::uninitialized() };\n let mut curr = buf.len() as isize;\n let buf_ptr = buf.as_mut_ptr();\n let lut_ptr = DEC_DIGITS_LUT.as_ptr();\n\n unsafe {\n \/\/ need at least 16 bits for the 4-characters-at-a-time to work.\n if ::mem::size_of::<$t>() >= 2 {\n \/\/ eagerly decode 4 characters at a time\n while n >= 10000 {\n let rem = (n % 10000) as isize;\n n \/= 10000;\n\n let d1 = (rem \/ 100) << 1;\n let d2 = (rem % 100) << 1;\n curr -= 4;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n ptr::copy_nonoverlapping(lut_ptr.offset(d2), buf_ptr.offset(curr + 2), 2);\n }\n }\n\n \/\/ if we reach here numbers are <= 9999, so at most 4 chars long\n let mut n = n as isize; \/\/ possibly reduce 64bit math\n\n \/\/ decode 2 more chars, if > 2 chars\n if n >= 100 {\n let d1 = (n % 100) << 1;\n n \/= 100;\n curr -= 2;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n }\n\n \/\/ decode last 1 or 2 chars\n if n < 10 {\n curr -= 1;\n *buf_ptr.offset(curr) = (n as u8) + b'0';\n } else {\n let d1 = n << 1;\n curr -= 2;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n }\n }\n\n let buf_slice = unsafe {\n str::from_utf8_unchecked(\n slice::from_raw_parts(buf_ptr.offset(curr), buf.len() - curr as usize))\n };\n f.pad_integral(is_nonnegative, \"\", buf_slice)\n }\n })*);\n}\n\nimpl_Display!(i8, u8, i16, u16, i32, u32: to_u32);\nimpl_Display!(i64, u64: to_u64);\nimpl_Display!(i128, u128: to_u128);\n#[cfg(target_pointer_width = \"16\")]\nimpl_Display!(isize, usize: to_u16);\n#[cfg(target_pointer_width = \"32\")]\nimpl_Display!(isize, usize: to_u32);\n#[cfg(target_pointer_width = \"64\")]\nimpl_Display!(isize, usize: to_u64);\n<commit_msg>Rollup merge of #49099 - glandium:master, r=sfackler<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Integer and floating-point number formatting\n\n#![allow(deprecated)]\n\n\nuse fmt;\nuse ops::{Div, Rem, Sub};\nuse str;\nuse slice;\nuse ptr;\nuse mem;\n\n#[doc(hidden)]\ntrait Int: PartialEq + PartialOrd + Div<Output=Self> + Rem<Output=Self> +\n Sub<Output=Self> + Copy {\n fn zero() -> Self;\n fn from_u8(u: u8) -> Self;\n fn to_u8(&self) -> u8;\n fn to_u16(&self) -> u16;\n fn to_u32(&self) -> u32;\n fn to_u64(&self) -> u64;\n fn to_u128(&self) -> u128;\n}\n\nmacro_rules! doit {\n ($($t:ident)*) => ($(impl Int for $t {\n fn zero() -> $t { 0 }\n fn from_u8(u: u8) -> $t { u as $t }\n fn to_u8(&self) -> u8 { *self as u8 }\n fn to_u16(&self) -> u16 { *self as u16 }\n fn to_u32(&self) -> u32 { *self as u32 }\n fn to_u64(&self) -> u64 { *self as u64 }\n fn to_u128(&self) -> u128 { *self as u128 }\n })*)\n}\ndoit! { i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize }\n\n\/\/\/ A type that represents a specific radix\n#[doc(hidden)]\ntrait GenericRadix {\n \/\/\/ The number of digits.\n const BASE: u8;\n\n \/\/\/ A radix-specific prefix string.\n const PREFIX: &'static str;\n\n \/\/\/ Converts an integer to corresponding radix digit.\n fn digit(x: u8) -> u8;\n\n \/\/\/ Format an integer using the radix using a formatter.\n fn fmt_int<T: Int>(&self, mut x: T, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ The radix can be as low as 2, so we need a buffer of at least 128\n \/\/ characters for a base 2 number.\n let zero = T::zero();\n let is_nonnegative = x >= zero;\n let mut buf = [0; 128];\n let mut curr = buf.len();\n let base = T::from_u8(Self::BASE);\n if is_nonnegative {\n \/\/ Accumulate each digit of the number from the least significant\n \/\/ to the most significant figure.\n for byte in buf.iter_mut().rev() {\n let n = x % base; \/\/ Get the current place value.\n x = x \/ base; \/\/ Deaccumulate the number.\n *byte = Self::digit(n.to_u8()); \/\/ Store the digit in the buffer.\n curr -= 1;\n if x == zero {\n \/\/ No more digits left to accumulate.\n break\n };\n }\n } else {\n \/\/ Do the same as above, but accounting for two's complement.\n for byte in buf.iter_mut().rev() {\n let n = zero - (x % base); \/\/ Get the current place value.\n x = x \/ base; \/\/ Deaccumulate the number.\n *byte = Self::digit(n.to_u8()); \/\/ Store the digit in the buffer.\n curr -= 1;\n if x == zero {\n \/\/ No more digits left to accumulate.\n break\n };\n }\n }\n let buf = unsafe { str::from_utf8_unchecked(&buf[curr..]) };\n f.pad_integral(is_nonnegative, Self::PREFIX, buf)\n }\n}\n\n\/\/\/ A binary (base 2) radix\n#[derive(Clone, PartialEq)]\nstruct Binary;\n\n\/\/\/ An octal (base 8) radix\n#[derive(Clone, PartialEq)]\nstruct Octal;\n\n\/\/\/ A decimal (base 10) radix\n#[derive(Clone, PartialEq)]\nstruct Decimal;\n\n\/\/\/ A hexadecimal (base 16) radix, formatted with lower-case characters\n#[derive(Clone, PartialEq)]\nstruct LowerHex;\n\n\/\/\/ A hexadecimal (base 16) radix, formatted with upper-case characters\n#[derive(Clone, PartialEq)]\nstruct UpperHex;\n\nmacro_rules! radix {\n ($T:ident, $base:expr, $prefix:expr, $($x:pat => $conv:expr),+) => {\n impl GenericRadix for $T {\n const BASE: u8 = $base;\n const PREFIX: &'static str = $prefix;\n fn digit(x: u8) -> u8 {\n match x {\n $($x => $conv,)+\n x => panic!(\"number not in the range 0..{}: {}\", Self::BASE - 1, x),\n }\n }\n }\n }\n}\n\nradix! { Binary, 2, \"0b\", x @ 0 ... 1 => b'0' + x }\nradix! { Octal, 8, \"0o\", x @ 0 ... 7 => b'0' + x }\nradix! { Decimal, 10, \"\", x @ 0 ... 9 => b'0' + x }\nradix! { LowerHex, 16, \"0x\", x @ 0 ... 9 => b'0' + x,\n x @ 10 ... 15 => b'a' + (x - 10) }\nradix! { UpperHex, 16, \"0x\", x @ 0 ... 9 => b'0' + x,\n x @ 10 ... 15 => b'A' + (x - 10) }\n\nmacro_rules! int_base {\n ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::$Trait for $T {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n $Radix.fmt_int(*self as $U, f)\n }\n }\n }\n}\n\nmacro_rules! debug {\n ($T:ident) => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::Debug for $T {\n #[inline]\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n if f.debug_lower_hex() {\n fmt::LowerHex::fmt(self, f)\n } else if f.debug_upper_hex() {\n fmt::UpperHex::fmt(self, f)\n } else {\n fmt::Display::fmt(self, f)\n }\n }\n }\n }\n}\n\nmacro_rules! integer {\n ($Int:ident, $Uint:ident) => {\n int_base! { Binary for $Int as $Uint -> Binary }\n int_base! { Octal for $Int as $Uint -> Octal }\n int_base! { LowerHex for $Int as $Uint -> LowerHex }\n int_base! { UpperHex for $Int as $Uint -> UpperHex }\n debug! { $Int }\n\n int_base! { Binary for $Uint as $Uint -> Binary }\n int_base! { Octal for $Uint as $Uint -> Octal }\n int_base! { LowerHex for $Uint as $Uint -> LowerHex }\n int_base! { UpperHex for $Uint as $Uint -> UpperHex }\n debug! { $Uint }\n }\n}\ninteger! { isize, usize }\ninteger! { i8, u8 }\ninteger! { i16, u16 }\ninteger! { i32, u32 }\ninteger! { i64, u64 }\ninteger! { i128, u128 }\n\nconst DEC_DIGITS_LUT: &'static[u8] =\n b\"0001020304050607080910111213141516171819\\\n 2021222324252627282930313233343536373839\\\n 4041424344454647484950515253545556575859\\\n 6061626364656667686970717273747576777879\\\n 8081828384858687888990919293949596979899\";\n\nmacro_rules! impl_Display {\n ($($t:ident),*: $conv_fn:ident) => ($(\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::Display for $t {\n #[allow(unused_comparisons)]\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let is_nonnegative = *self >= 0;\n let mut n = if is_nonnegative {\n self.$conv_fn()\n } else {\n \/\/ convert the negative num to positive by summing 1 to it's 2 complement\n (!self.$conv_fn()).wrapping_add(1)\n };\n let mut buf: [u8; 39] = unsafe { mem::uninitialized() };\n let mut curr = buf.len() as isize;\n let buf_ptr = buf.as_mut_ptr();\n let lut_ptr = DEC_DIGITS_LUT.as_ptr();\n\n unsafe {\n \/\/ need at least 16 bits for the 4-characters-at-a-time to work.\n if ::mem::size_of::<$t>() >= 2 {\n \/\/ eagerly decode 4 characters at a time\n while n >= 10000 {\n let rem = (n % 10000) as isize;\n n \/= 10000;\n\n let d1 = (rem \/ 100) << 1;\n let d2 = (rem % 100) << 1;\n curr -= 4;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n ptr::copy_nonoverlapping(lut_ptr.offset(d2), buf_ptr.offset(curr + 2), 2);\n }\n }\n\n \/\/ if we reach here numbers are <= 9999, so at most 4 chars long\n let mut n = n as isize; \/\/ possibly reduce 64bit math\n\n \/\/ decode 2 more chars, if > 2 chars\n if n >= 100 {\n let d1 = (n % 100) << 1;\n n \/= 100;\n curr -= 2;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n }\n\n \/\/ decode last 1 or 2 chars\n if n < 10 {\n curr -= 1;\n *buf_ptr.offset(curr) = (n as u8) + b'0';\n } else {\n let d1 = n << 1;\n curr -= 2;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n }\n }\n\n let buf_slice = unsafe {\n str::from_utf8_unchecked(\n slice::from_raw_parts(buf_ptr.offset(curr), buf.len() - curr as usize))\n };\n f.pad_integral(is_nonnegative, \"\", buf_slice)\n }\n })*);\n}\n\nimpl_Display!(i8, u8, i16, u16, i32, u32: to_u32);\nimpl_Display!(i64, u64: to_u64);\nimpl_Display!(i128, u128: to_u128);\n#[cfg(target_pointer_width = \"16\")]\nimpl_Display!(isize, usize: to_u16);\n#[cfg(target_pointer_width = \"32\")]\nimpl_Display!(isize, usize: to_u32);\n#[cfg(target_pointer_width = \"64\")]\nimpl_Display!(isize, usize: to_u64);\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse cmp;\nuse io::{self, SeekFrom, Read, Write, Seek, BufRead, Error, ErrorKind};\nuse fmt;\nuse mem;\n\n\/\/ =============================================================================\n\/\/ Forwarding implementations\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, R: Read + ?Sized> Read for &'a mut R {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n #[inline]\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_to_end(buf)\n }\n\n #[inline]\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_to_string(buf)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n (**self).read_exact(buf)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, W: Write + ?Sized> Write for &'a mut W {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { (**self).flush() }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n (**self).write_all(buf)\n }\n\n #[inline]\n fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {\n (**self).write_fmt(fmt)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, S: Seek + ?Sized> Seek for &'a mut S {\n #[inline]\n fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: BufRead + ?Sized> BufRead for &'a mut B {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }\n\n #[inline]\n fn consume(&mut self, amt: usize) { (**self).consume(amt) }\n\n #[inline]\n fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_until(byte, buf)\n }\n\n #[inline]\n fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_line(buf)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<R: Read + ?Sized> Read for Box<R> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n #[inline]\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_to_end(buf)\n }\n\n #[inline]\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_to_string(buf)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n (**self).read_exact(buf)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<W: Write + ?Sized> Write for Box<W> {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { (**self).flush() }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n (**self).write_all(buf)\n }\n\n #[inline]\n fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {\n (**self).write_fmt(fmt)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<S: Seek + ?Sized> Seek for Box<S> {\n #[inline]\n fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<B: BufRead + ?Sized> BufRead for Box<B> {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }\n\n #[inline]\n fn consume(&mut self, amt: usize) { (**self).consume(amt) }\n\n #[inline]\n fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_until(byte, buf)\n }\n\n #[inline]\n fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_line(buf)\n }\n}\n\n\/\/ =============================================================================\n\/\/ In-memory buffer implementations\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> Read for &'a [u8] {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n let amt = cmp::min(buf.len(), self.len());\n let (a, b) = self.split_at(amt);\n buf[..amt].copy_from_slice(a);\n *self = b;\n Ok(amt)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n if buf.len() > self.len() {\n return Err(Error::new(ErrorKind::UnexpectedEof,\n \"failed to fill whole buffer\"));\n }\n let (a, b) = self.split_at(buf.len());\n buf.copy_from_slice(a);\n *self = b;\n Ok(())\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> BufRead for &'a [u8] {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { Ok(*self) }\n\n #[inline]\n fn consume(&mut self, amt: usize) { *self = &self[amt..]; }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> Write for &'a mut [u8] {\n #[inline]\n fn write(&mut self, data: &[u8]) -> io::Result<usize> {\n let amt = cmp::min(data.len(), self.len());\n let (a, b) = mem::replace(self, &mut []).split_at_mut(amt);\n a.copy_from_slice(&data[..amt]);\n *self = b;\n Ok(amt)\n }\n\n #[inline]\n fn write_all(&mut self, data: &[u8]) -> io::Result<()> {\n if self.write(data)? == data.len() {\n Ok(())\n } else {\n Err(Error::new(ErrorKind::WriteZero, \"failed to write whole buffer\"))\n }\n }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Write for Vec<u8> {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.extend_from_slice(buf);\n Ok(buf.len())\n }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n self.extend_from_slice(buf);\n Ok(())\n }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n#[cfg(test)]\nmod tests {\n use io::prelude::*;\n use test;\n\n #[bench]\n fn bench_read_slice(b: &mut test::Bencher) {\n let buf = [5; 1024];\n let mut dst = [0; 128];\n\n b.iter(|| {\n let mut rd = &buf[..];\n for _ in 0..8 {\n let _ = rd.read(&mut dst);\n test::black_box(&dst);\n }\n })\n }\n\n #[bench]\n fn bench_write_slice(b: &mut test::Bencher) {\n let mut buf = [0; 1024];\n let src = [5; 128];\n\n b.iter(|| {\n let mut wr = &mut buf[..];\n for _ in 0..8 {\n let _ = wr.write_all(&src);\n test::black_box(&wr);\n }\n })\n }\n\n #[bench]\n fn bench_read_vec(b: &mut test::Bencher) {\n let buf = vec![5; 1024];\n let mut dst = [0; 128];\n\n b.iter(|| {\n let mut rd = &buf[..];\n for _ in 0..8 {\n let _ = rd.read(&mut dst);\n test::black_box(&dst);\n }\n })\n }\n\n #[bench]\n fn bench_write_vec(b: &mut test::Bencher) {\n let mut buf = Vec::with_capacity(1024);\n let src = [5; 128];\n\n b.iter(|| {\n let mut wr = &mut buf[..];\n for _ in 0..8 {\n let _ = wr.write_all(&src);\n test::black_box(&wr);\n }\n })\n }\n}\n<commit_msg>Add documentation for Read, Write impls for slices and Vec<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse cmp;\nuse io::{self, SeekFrom, Read, Write, Seek, BufRead, Error, ErrorKind};\nuse fmt;\nuse mem;\n\n\/\/ =============================================================================\n\/\/ Forwarding implementations\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, R: Read + ?Sized> Read for &'a mut R {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n #[inline]\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_to_end(buf)\n }\n\n #[inline]\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_to_string(buf)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n (**self).read_exact(buf)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, W: Write + ?Sized> Write for &'a mut W {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { (**self).flush() }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n (**self).write_all(buf)\n }\n\n #[inline]\n fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {\n (**self).write_fmt(fmt)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, S: Seek + ?Sized> Seek for &'a mut S {\n #[inline]\n fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: BufRead + ?Sized> BufRead for &'a mut B {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }\n\n #[inline]\n fn consume(&mut self, amt: usize) { (**self).consume(amt) }\n\n #[inline]\n fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_until(byte, buf)\n }\n\n #[inline]\n fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_line(buf)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<R: Read + ?Sized> Read for Box<R> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n #[inline]\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_to_end(buf)\n }\n\n #[inline]\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_to_string(buf)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n (**self).read_exact(buf)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<W: Write + ?Sized> Write for Box<W> {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { (**self).flush() }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n (**self).write_all(buf)\n }\n\n #[inline]\n fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {\n (**self).write_fmt(fmt)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<S: Seek + ?Sized> Seek for Box<S> {\n #[inline]\n fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<B: BufRead + ?Sized> BufRead for Box<B> {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }\n\n #[inline]\n fn consume(&mut self, amt: usize) { (**self).consume(amt) }\n\n #[inline]\n fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_until(byte, buf)\n }\n\n #[inline]\n fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_line(buf)\n }\n}\n\n\/\/ =============================================================================\n\/\/ In-memory buffer implementations\n\n\/\/\/ Read is implemented for `&[u8]` by copying from the slice.\n\/\/\/\n\/\/\/ Note that reading updates the slice to point to the yet unread part.\n\/\/\/ The slice will be empty when EOF is reached.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> Read for &'a [u8] {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n let amt = cmp::min(buf.len(), self.len());\n let (a, b) = self.split_at(amt);\n buf[..amt].copy_from_slice(a);\n *self = b;\n Ok(amt)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n if buf.len() > self.len() {\n return Err(Error::new(ErrorKind::UnexpectedEof,\n \"failed to fill whole buffer\"));\n }\n let (a, b) = self.split_at(buf.len());\n buf.copy_from_slice(a);\n *self = b;\n Ok(())\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> BufRead for &'a [u8] {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { Ok(*self) }\n\n #[inline]\n fn consume(&mut self, amt: usize) { *self = &self[amt..]; }\n}\n\n\/\/\/ Write is implemented for `&mut [u8]` by copying into the slice, overwriting\n\/\/\/ its data.\n\/\/\/\n\/\/\/ Note that writing updates the slice to point to the yet unwritten part.\n\/\/\/ The slice will be empty when it has been completely overwritten.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> Write for &'a mut [u8] {\n #[inline]\n fn write(&mut self, data: &[u8]) -> io::Result<usize> {\n let amt = cmp::min(data.len(), self.len());\n let (a, b) = mem::replace(self, &mut []).split_at_mut(amt);\n a.copy_from_slice(&data[..amt]);\n *self = b;\n Ok(amt)\n }\n\n #[inline]\n fn write_all(&mut self, data: &[u8]) -> io::Result<()> {\n if self.write(data)? == data.len() {\n Ok(())\n } else {\n Err(Error::new(ErrorKind::WriteZero, \"failed to write whole buffer\"))\n }\n }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n\/\/\/ Write is implemented for `Vec<u8>` by appending to the vector.\n\/\/\/ The vector will grow as needed.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Write for Vec<u8> {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.extend_from_slice(buf);\n Ok(buf.len())\n }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n self.extend_from_slice(buf);\n Ok(())\n }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n#[cfg(test)]\nmod tests {\n use io::prelude::*;\n use test;\n\n #[bench]\n fn bench_read_slice(b: &mut test::Bencher) {\n let buf = [5; 1024];\n let mut dst = [0; 128];\n\n b.iter(|| {\n let mut rd = &buf[..];\n for _ in 0..8 {\n let _ = rd.read(&mut dst);\n test::black_box(&dst);\n }\n })\n }\n\n #[bench]\n fn bench_write_slice(b: &mut test::Bencher) {\n let mut buf = [0; 1024];\n let src = [5; 128];\n\n b.iter(|| {\n let mut wr = &mut buf[..];\n for _ in 0..8 {\n let _ = wr.write_all(&src);\n test::black_box(&wr);\n }\n })\n }\n\n #[bench]\n fn bench_read_vec(b: &mut test::Bencher) {\n let buf = vec![5; 1024];\n let mut dst = [0; 128];\n\n b.iter(|| {\n let mut rd = &buf[..];\n for _ in 0..8 {\n let _ = rd.read(&mut dst);\n test::black_box(&dst);\n }\n })\n }\n\n #[bench]\n fn bench_write_vec(b: &mut test::Bencher) {\n let mut buf = Vec::with_capacity(1024);\n let src = [5; 128];\n\n b.iter(|| {\n let mut wr = &mut buf[..];\n for _ in 0..8 {\n let _ = wr.write_all(&src);\n test::black_box(&wr);\n }\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::error::Error;\nuse std::ffi::OsStr;\nuse std::path::{Path, PathBuf};\nuse std::process::Command;\n\nuse devicemapper::DM;\nuse devicemapper::{Bytes, Sectors};\nuse devicemapper::{ThinDev, ThinDevId, ThinStatus};\nuse devicemapper::ThinPoolDev;\n\nuse super::super::consts::IEC;\nuse super::super::engine::{Filesystem, HasName, HasUuid};\nuse super::super::errors::{EngineError, EngineResult, ErrorEnum};\nuse super::super::types::{FilesystemUuid, PoolUuid};\n\nuse super::dmdevice::{ThinRole, format_thin_name};\nuse super::serde_structs::{FilesystemSave, Recordable};\n\n#[derive(Debug)]\npub struct StratFilesystem {\n fs_id: FilesystemUuid,\n name: String,\n thin_dev: ThinDev,\n}\n\npub enum FilesystemStatus {\n Good,\n Failed,\n}\n\nimpl StratFilesystem {\n pub fn initialize(pool_uuid: &PoolUuid,\n fs_id: FilesystemUuid,\n thindev_id: ThinDevId,\n name: &str,\n dm: &DM,\n thin_pool: &ThinPoolDev)\n -> EngineResult<StratFilesystem> {\n let device_name = format_thin_name(pool_uuid, ThinRole::Filesystem(fs_id));\n let thin_dev = try!(ThinDev::new(&device_name,\n dm,\n thin_pool,\n thindev_id,\n Bytes(IEC::Ti).sectors()));\n let fs = StratFilesystem {\n fs_id: fs_id,\n name: name.to_owned(),\n thin_dev: thin_dev,\n };\n try!(create_fs(try!(fs.devnode()).as_path()));\n Ok(fs)\n }\n\n \/\/\/ Setup a filesystem, setting up the thin device as necessary.\n \/\/ FIXME: Check for still existing device mapper devices.\n pub fn setup(pool_uuid: &PoolUuid,\n fs_id: FilesystemUuid,\n thindev_id: ThinDevId,\n name: &str,\n size: Sectors,\n dm: &DM,\n thin_pool: &ThinPoolDev)\n -> EngineResult<StratFilesystem> {\n let device_name = format_thin_name(pool_uuid, ThinRole::Filesystem(fs_id));\n let thin_dev = try!(ThinDev::setup(&device_name, dm, thin_pool, thindev_id, size));\n\n Ok(StratFilesystem {\n fs_id: fs_id,\n name: name.to_owned(),\n thin_dev: thin_dev,\n })\n }\n\n pub fn check(&self, dm: &DM) -> EngineResult<FilesystemStatus> {\n match try!(self.thin_dev.status(dm)) {\n ThinStatus::Good((_mapped, _highest)) => {\n \/\/ TODO: check if filesystem is getting full and might need to\n \/\/ be extended (hint: use statfs(2))\n \/\/ TODO: periodically kick off fstrim?\n }\n ThinStatus::Fail => return Ok(FilesystemStatus::Failed),\n }\n Ok(FilesystemStatus::Good)\n }\n}\n\nimpl HasName for StratFilesystem {\n fn name(&self) -> &str {\n &self.name\n }\n}\n\nimpl HasUuid for StratFilesystem {\n fn uuid(&self) -> &FilesystemUuid {\n &self.fs_id\n }\n}\n\nimpl Filesystem for StratFilesystem {\n fn rename(&mut self, name: &str) {\n self.name = name.to_owned();\n }\n\n fn destroy(self) -> EngineResult<()> {\n let dm = try!(DM::new());\n match self.thin_dev.teardown(&dm) {\n Ok(_) => Ok(()),\n Err(e) => Err(EngineError::Engine(ErrorEnum::Error, e.description().into())),\n }\n }\n\n fn devnode(&self) -> EngineResult<PathBuf> {\n Ok(try!(self.thin_dev.devnode()))\n }\n}\n\nimpl Recordable<FilesystemSave> for StratFilesystem {\n fn record(&self) -> EngineResult<FilesystemSave> {\n Ok(FilesystemSave {\n name: self.name.clone(),\n uuid: self.fs_id,\n thin_id: self.thin_dev.id(),\n size: self.thin_dev.size(),\n })\n }\n}\n\npub fn create_fs(dev_path: &Path) -> EngineResult<()> {\n\n debug!(\"Create filesystem for : {:?}\", dev_path);\n let output = try!(Command::new(\"mkfs.xfs\")\n .arg(\"-f\")\n .arg(&dev_path)\n .output());\n\n if output.status.success() {\n debug!(\"Created xfs filesystem on {:?}\", dev_path)\n } else {\n let message = String::from_utf8_lossy(&output.stderr);\n debug!(\"stderr: {}\", message);\n return Err(EngineError::Engine(ErrorEnum::Error, message.into()));\n }\n Ok(())\n}\n\npub fn mount_fs(dev_path: &Path, mount_point: &Path) -> EngineResult<()> {\n\n debug!(\"Mount filesystem {:?} on : {:?}\", dev_path, mount_point);\n let output = try!(Command::new(\"mount\")\n .arg(&dev_path)\n .arg(mount_point)\n .output());\n\n if output.status.success() {\n debug!(\"Mounted filesystem on {:?}\", mount_point)\n } else {\n let message = String::from_utf8_lossy(&output.stderr);\n debug!(\"stderr: {}\", message);\n return Err(EngineError::Engine(ErrorEnum::Error, message.into()));\n }\n Ok(())\n}\n\npub fn unmount_fs<I, S>(mount_point: &Path, flags: I) -> EngineResult<()>\n where I: IntoIterator<Item = S>,\n S: AsRef<OsStr>\n{\n debug!(\"Unmount filesystem {:?}\", mount_point);\n\n let mut command = Command::new(\"umount\");\n let output = try!(command.arg(mount_point).args(flags).output());\n\n if output.status.success() {\n debug!(\"Unmounted filesystem {:?}\", mount_point)\n } else {\n let message = String::from_utf8_lossy(&output.stderr);\n debug!(\"stderr: {}\", message);\n return Err(EngineError::Engine(ErrorEnum::Error, message.into()));\n }\n Ok(())\n}\n<commit_msg>Add a method to get the thin_id for a filesystem<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::error::Error;\nuse std::ffi::OsStr;\nuse std::path::{Path, PathBuf};\nuse std::process::Command;\n\nuse devicemapper::DM;\nuse devicemapper::{Bytes, Sectors};\nuse devicemapper::{ThinDev, ThinDevId, ThinStatus};\nuse devicemapper::ThinPoolDev;\n\nuse super::super::consts::IEC;\nuse super::super::engine::{Filesystem, HasName, HasUuid};\nuse super::super::errors::{EngineError, EngineResult, ErrorEnum};\nuse super::super::types::{FilesystemUuid, PoolUuid};\n\nuse super::dmdevice::{ThinRole, format_thin_name};\nuse super::serde_structs::{FilesystemSave, Recordable};\n\n#[derive(Debug)]\npub struct StratFilesystem {\n fs_id: FilesystemUuid,\n name: String,\n thin_dev: ThinDev,\n}\n\npub enum FilesystemStatus {\n Good,\n Failed,\n}\n\nimpl StratFilesystem {\n pub fn initialize(pool_uuid: &PoolUuid,\n fs_id: FilesystemUuid,\n thindev_id: ThinDevId,\n name: &str,\n dm: &DM,\n thin_pool: &ThinPoolDev)\n -> EngineResult<StratFilesystem> {\n let device_name = format_thin_name(pool_uuid, ThinRole::Filesystem(fs_id));\n let thin_dev = try!(ThinDev::new(&device_name,\n dm,\n thin_pool,\n thindev_id,\n Bytes(IEC::Ti).sectors()));\n let fs = StratFilesystem {\n fs_id: fs_id,\n name: name.to_owned(),\n thin_dev: thin_dev,\n };\n try!(create_fs(try!(fs.devnode()).as_path()));\n Ok(fs)\n }\n\n \/\/\/ Setup a filesystem, setting up the thin device as necessary.\n \/\/ FIXME: Check for still existing device mapper devices.\n pub fn setup(pool_uuid: &PoolUuid,\n fs_id: FilesystemUuid,\n thindev_id: ThinDevId,\n name: &str,\n size: Sectors,\n dm: &DM,\n thin_pool: &ThinPoolDev)\n -> EngineResult<StratFilesystem> {\n let device_name = format_thin_name(pool_uuid, ThinRole::Filesystem(fs_id));\n let thin_dev = try!(ThinDev::setup(&device_name, dm, thin_pool, thindev_id, size));\n\n Ok(StratFilesystem {\n fs_id: fs_id,\n name: name.to_owned(),\n thin_dev: thin_dev,\n })\n }\n\n pub fn check(&self, dm: &DM) -> EngineResult<FilesystemStatus> {\n match try!(self.thin_dev.status(dm)) {\n ThinStatus::Good((_mapped, _highest)) => {\n \/\/ TODO: check if filesystem is getting full and might need to\n \/\/ be extended (hint: use statfs(2))\n \/\/ TODO: periodically kick off fstrim?\n }\n ThinStatus::Fail => return Ok(FilesystemStatus::Failed),\n }\n Ok(FilesystemStatus::Good)\n }\n\n \/\/\/ The thin id for the thin device that backs this filesystem.\n pub fn thin_id(&self) -> ThinDevId {\n self.thin_dev.id()\n }\n}\n\nimpl HasName for StratFilesystem {\n fn name(&self) -> &str {\n &self.name\n }\n}\n\nimpl HasUuid for StratFilesystem {\n fn uuid(&self) -> &FilesystemUuid {\n &self.fs_id\n }\n}\n\nimpl Filesystem for StratFilesystem {\n fn rename(&mut self, name: &str) {\n self.name = name.to_owned();\n }\n\n fn destroy(self) -> EngineResult<()> {\n let dm = try!(DM::new());\n match self.thin_dev.teardown(&dm) {\n Ok(_) => Ok(()),\n Err(e) => Err(EngineError::Engine(ErrorEnum::Error, e.description().into())),\n }\n }\n\n fn devnode(&self) -> EngineResult<PathBuf> {\n Ok(try!(self.thin_dev.devnode()))\n }\n}\n\nimpl Recordable<FilesystemSave> for StratFilesystem {\n fn record(&self) -> EngineResult<FilesystemSave> {\n Ok(FilesystemSave {\n name: self.name.clone(),\n uuid: self.fs_id,\n thin_id: self.thin_dev.id(),\n size: self.thin_dev.size(),\n })\n }\n}\n\npub fn create_fs(dev_path: &Path) -> EngineResult<()> {\n\n debug!(\"Create filesystem for : {:?}\", dev_path);\n let output = try!(Command::new(\"mkfs.xfs\")\n .arg(\"-f\")\n .arg(&dev_path)\n .output());\n\n if output.status.success() {\n debug!(\"Created xfs filesystem on {:?}\", dev_path)\n } else {\n let message = String::from_utf8_lossy(&output.stderr);\n debug!(\"stderr: {}\", message);\n return Err(EngineError::Engine(ErrorEnum::Error, message.into()));\n }\n Ok(())\n}\n\npub fn mount_fs(dev_path: &Path, mount_point: &Path) -> EngineResult<()> {\n\n debug!(\"Mount filesystem {:?} on : {:?}\", dev_path, mount_point);\n let output = try!(Command::new(\"mount\")\n .arg(&dev_path)\n .arg(mount_point)\n .output());\n\n if output.status.success() {\n debug!(\"Mounted filesystem on {:?}\", mount_point)\n } else {\n let message = String::from_utf8_lossy(&output.stderr);\n debug!(\"stderr: {}\", message);\n return Err(EngineError::Engine(ErrorEnum::Error, message.into()));\n }\n Ok(())\n}\n\npub fn unmount_fs<I, S>(mount_point: &Path, flags: I) -> EngineResult<()>\n where I: IntoIterator<Item = S>,\n S: AsRef<OsStr>\n{\n debug!(\"Unmount filesystem {:?}\", mount_point);\n\n let mut command = Command::new(\"umount\");\n let output = try!(command.arg(mount_point).args(flags).output());\n\n if output.status.success() {\n debug!(\"Unmounted filesystem {:?}\", mount_point)\n } else {\n let message = String::from_utf8_lossy(&output.stderr);\n debug!(\"stderr: {}\", message);\n return Err(EngineError::Engine(ErrorEnum::Error, message.into()));\n }\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added semicolons to make it compilable with latest nightly build<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Created update-licenses to update previous versions of crates to have their corresponding licenses.<commit_after>\/\/! Updates all of the licenses from the existing crates into each of their\n\/\/! already existing versions.\n\n\/\/\n\/\/ Usage:\n\/\/ cargo run --bin update-licenses\n\nextern crate cargo_registry;\nextern crate postgres;\n\nuse std::io::prelude::*;\n\nfn main() {\n let conn = cargo_registry::db::connect_now();\n {\n let tx = conn.transaction().unwrap();\n transfer(&tx);\n tx.set_commit();\n tx.finish().unwrap();\n }\n}\n\nfn transfer(tx: &postgres::transaction::Transaction) {\n let stmt = tx.prepare(\"SELECT id, name, license FROM crates\").unwrap();\n let rows = stmt.query(&[]).unwrap();\n\n for row in rows.iter() {\n let id: i32 = row.get(\"id\");\n let name: String = row.get(\"name\");\n let license: String = row.get(\"license\");\n\n println!(\"Setting the license for all versions of {} to {}.\", name, license);\n\n let num_updated = tx.execute(\"UPDATE versions SET license = $1 WHERE crate_id = $2\", &[&license, &id]).unwrap();\n assert!(num_updated > 0);\n }\n\n get_confirm(\"Finish committing?\");\n}\n\nfn get_confirm(msg: &str) {\n print!(\"{} [y\/N]: \", msg);\n std::io::stdout().flush().unwrap();\n \n let mut line = String::new();\n std::io::stdin().read_line(&mut line).unwrap();\n\n if !line.starts_with(\"y\") {\n std::process::exit(0);\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/! Shim which is passed to Cargo as \"rustc\" when running the bootstrap.\n\/\/!\n\/\/! This shim will take care of some various tasks that our build process\n\/\/! requires that Cargo can't quite do through normal configuration:\n\/\/!\n\/\/! 1. When compiling build scripts and build dependencies, we need a guaranteed\n\/\/! full standard library available. The only compiler which actually has\n\/\/! this is the snapshot, so we detect this situation and always compile with\n\/\/! the snapshot compiler.\n\/\/! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling\n\/\/! (and this slightly differs based on a whether we're using a snapshot or\n\/\/! not), so we do that all here.\n\/\/!\n\/\/! This may one day be replaced by RUSTFLAGS, but the dynamic nature of\n\/\/! switching compilers for the bootstrap and for build scripts will probably\n\/\/! never get replaced.\n\nuse std::env;\nuse std::path::PathBuf;\nuse std::process::Command;\nuse std::str::FromStr;\nuse std::time::Instant;\n\nfn main() {\n let args = env::args_os().skip(1).collect::<Vec<_>>();\n\n \/\/ Detect whether or not we're a build script depending on whether --target\n \/\/ is passed (a bit janky...)\n let target = args.windows(2).find(|w| &*w[0] == \"--target\").and_then(|w| w[1].to_str());\n let version = args.iter().find(|w| &**w == \"-vV\");\n\n let verbose = match env::var(\"RUSTC_VERBOSE\") {\n Ok(s) => usize::from_str(&s).expect(\"RUSTC_VERBOSE should be an integer\"),\n Err(_) => 0,\n };\n\n \/\/ Use a different compiler for build scripts, since there may not yet be a\n \/\/ libstd for the real compiler to use. However, if Cargo is attempting to\n \/\/ determine the version of the compiler, the real compiler needs to be\n \/\/ used. Currently, these two states are differentiated based on whether\n \/\/ --target and -vV is\/isn't passed.\n let (rustc, libdir) = if target.is_none() && version.is_none() {\n (\"RUSTC_SNAPSHOT\", \"RUSTC_SNAPSHOT_LIBDIR\")\n } else {\n (\"RUSTC_REAL\", \"RUSTC_LIBDIR\")\n };\n let stage = env::var(\"RUSTC_STAGE\").expect(\"RUSTC_STAGE was not set\");\n let sysroot = env::var_os(\"RUSTC_SYSROOT\").expect(\"RUSTC_SYSROOT was not set\");\n let on_fail = env::var_os(\"RUSTC_ON_FAIL\").map(Command::new);\n\n let rustc = env::var_os(rustc).unwrap_or_else(|| panic!(\"{:?} was not set\", rustc));\n let libdir = env::var_os(libdir).unwrap_or_else(|| panic!(\"{:?} was not set\", libdir));\n let mut dylib_path = bootstrap::util::dylib_path();\n dylib_path.insert(0, PathBuf::from(&libdir));\n\n let mut cmd = Command::new(rustc);\n cmd.args(&args).env(bootstrap::util::dylib_path_var(), env::join_paths(&dylib_path).unwrap());\n\n \/\/ Get the name of the crate we're compiling, if any.\n let crate_name =\n args.windows(2).find(|args| args[0] == \"--crate-name\").and_then(|args| args[1].to_str());\n\n if let Some(crate_name) = crate_name {\n if let Some(target) = env::var_os(\"RUSTC_TIME\") {\n if target == \"all\"\n || target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)\n {\n cmd.arg(\"-Ztime\");\n }\n }\n }\n\n \/\/ Print backtrace in case of ICE\n if env::var(\"RUSTC_BACKTRACE_ON_ICE\").is_ok() && env::var(\"RUST_BACKTRACE\").is_err() {\n cmd.env(\"RUST_BACKTRACE\", \"1\");\n }\n\n if let Ok(lint_flags) = env::var(\"RUSTC_LINT_FLAGS\") {\n cmd.args(lint_flags.split_whitespace());\n }\n\n if target.is_some() {\n \/\/ The stage0 compiler has a special sysroot distinct from what we\n \/\/ actually downloaded, so we just always pass the `--sysroot` option,\n \/\/ unless one is already set.\n if !args.iter().any(|arg| arg == \"--sysroot\") {\n cmd.arg(\"--sysroot\").arg(&sysroot);\n }\n\n \/\/ If we're compiling specifically the `panic_abort` crate then we pass\n \/\/ the `-C panic=abort` option. Note that we do not do this for any\n \/\/ other crate intentionally as this is the only crate for now that we\n \/\/ ship with panic=abort.\n \/\/\n \/\/ This... is a bit of a hack how we detect this. Ideally this\n \/\/ information should be encoded in the crate I guess? Would likely\n \/\/ require an RFC amendment to RFC 1513, however.\n \/\/\n \/\/ `compiler_builtins` are unconditionally compiled with panic=abort to\n \/\/ workaround undefined references to `rust_eh_unwind_resume` generated\n \/\/ otherwise, see issue https:\/\/github.com\/rust-lang\/rust\/issues\/43095.\n if crate_name == Some(\"panic_abort\")\n || crate_name == Some(\"compiler_builtins\") && stage != \"0\"\n {\n cmd.arg(\"-C\").arg(\"panic=abort\");\n }\n } else {\n \/\/ FIXME(rust-lang\/cargo#5754) we shouldn't be using special env vars\n \/\/ here, but rather Cargo should know what flags to pass rustc itself.\n\n \/\/ Override linker if necessary.\n if let Ok(host_linker) = env::var(\"RUSTC_HOST_LINKER\") {\n cmd.arg(format!(\"-Clinker={}\", host_linker));\n }\n\n if let Ok(s) = env::var(\"RUSTC_HOST_CRT_STATIC\") {\n if s == \"true\" {\n cmd.arg(\"-C\").arg(\"target-feature=+crt-static\");\n }\n if s == \"false\" {\n cmd.arg(\"-C\").arg(\"target-feature=-crt-static\");\n }\n }\n }\n\n if let Ok(map) = env::var(\"RUSTC_DEBUGINFO_MAP\") {\n cmd.arg(\"--remap-path-prefix\").arg(&map);\n }\n\n \/\/ Force all crates compiled by this compiler to (a) be unstable and (b)\n \/\/ allow the `rustc_private` feature to link to other unstable crates\n \/\/ also in the sysroot. We also do this for host crates, since those\n \/\/ may be proc macros, in which case we might ship them.\n if env::var_os(\"RUSTC_FORCE_UNSTABLE\").is_some() && (stage != \"0\" || target.is_some()) {\n cmd.arg(\"-Z\").arg(\"force-unstable-if-unmarked\");\n }\n\n if verbose > 1 {\n eprintln!(\n \"rustc command: {:?}={:?} {:?}\",\n bootstrap::util::dylib_path_var(),\n env::join_paths(&dylib_path).unwrap(),\n cmd,\n );\n eprintln!(\"sysroot: {:?}\", sysroot);\n eprintln!(\"libdir: {:?}\", libdir);\n }\n\n let start = Instant::now();\n let status = {\n let errmsg = format!(\"\\nFailed to run:\\n{:?}\\n-------------\", cmd);\n cmd.status().expect(&errmsg)\n };\n\n if env::var_os(\"RUSTC_PRINT_STEP_TIMINGS\").is_some() {\n if let Some(crate_name) = crate_name {\n let dur = start.elapsed();\n let is_test = args.iter().any(|a| a == \"--test\");\n eprintln!(\n \"[RUSTC-TIMING] {} test:{} {}.{:03}\",\n crate_name,\n is_test,\n dur.as_secs(),\n dur.subsec_millis()\n );\n }\n }\n\n if status.success() {\n std::process::exit(0);\n \/\/ note: everything below here is unreachable. do not put code that\n \/\/ should run on success, after this block.\n }\n println!(\"\\nDid not run successfully: {}\\n{:?}\\n-------------\", status, cmd);\n\n if let Some(mut on_fail) = on_fail {\n on_fail.status().expect(\"Could not run the on_fail command\");\n }\n\n \/\/ Preserve the exit code. In case of signal, exit with 0xfe since it's\n \/\/ awkward to preserve this status in a cross-platform way.\n match status.code() {\n Some(i) => std::process::exit(i),\n None => {\n eprintln!(\"rustc exited with {}\", status);\n std::process::exit(0xfe);\n }\n }\n}\n<commit_msg>Avoid dumping rustc invocations to stdout<commit_after>\/\/! Shim which is passed to Cargo as \"rustc\" when running the bootstrap.\n\/\/!\n\/\/! This shim will take care of some various tasks that our build process\n\/\/! requires that Cargo can't quite do through normal configuration:\n\/\/!\n\/\/! 1. When compiling build scripts and build dependencies, we need a guaranteed\n\/\/! full standard library available. The only compiler which actually has\n\/\/! this is the snapshot, so we detect this situation and always compile with\n\/\/! the snapshot compiler.\n\/\/! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling\n\/\/! (and this slightly differs based on a whether we're using a snapshot or\n\/\/! not), so we do that all here.\n\/\/!\n\/\/! This may one day be replaced by RUSTFLAGS, but the dynamic nature of\n\/\/! switching compilers for the bootstrap and for build scripts will probably\n\/\/! never get replaced.\n\nuse std::env;\nuse std::path::PathBuf;\nuse std::process::Command;\nuse std::str::FromStr;\nuse std::time::Instant;\n\nfn main() {\n let args = env::args_os().skip(1).collect::<Vec<_>>();\n\n \/\/ Detect whether or not we're a build script depending on whether --target\n \/\/ is passed (a bit janky...)\n let target = args.windows(2).find(|w| &*w[0] == \"--target\").and_then(|w| w[1].to_str());\n let version = args.iter().find(|w| &**w == \"-vV\");\n\n let verbose = match env::var(\"RUSTC_VERBOSE\") {\n Ok(s) => usize::from_str(&s).expect(\"RUSTC_VERBOSE should be an integer\"),\n Err(_) => 0,\n };\n\n \/\/ Use a different compiler for build scripts, since there may not yet be a\n \/\/ libstd for the real compiler to use. However, if Cargo is attempting to\n \/\/ determine the version of the compiler, the real compiler needs to be\n \/\/ used. Currently, these two states are differentiated based on whether\n \/\/ --target and -vV is\/isn't passed.\n let (rustc, libdir) = if target.is_none() && version.is_none() {\n (\"RUSTC_SNAPSHOT\", \"RUSTC_SNAPSHOT_LIBDIR\")\n } else {\n (\"RUSTC_REAL\", \"RUSTC_LIBDIR\")\n };\n let stage = env::var(\"RUSTC_STAGE\").expect(\"RUSTC_STAGE was not set\");\n let sysroot = env::var_os(\"RUSTC_SYSROOT\").expect(\"RUSTC_SYSROOT was not set\");\n let on_fail = env::var_os(\"RUSTC_ON_FAIL\").map(Command::new);\n\n let rustc = env::var_os(rustc).unwrap_or_else(|| panic!(\"{:?} was not set\", rustc));\n let libdir = env::var_os(libdir).unwrap_or_else(|| panic!(\"{:?} was not set\", libdir));\n let mut dylib_path = bootstrap::util::dylib_path();\n dylib_path.insert(0, PathBuf::from(&libdir));\n\n let mut cmd = Command::new(rustc);\n cmd.args(&args).env(bootstrap::util::dylib_path_var(), env::join_paths(&dylib_path).unwrap());\n\n \/\/ Get the name of the crate we're compiling, if any.\n let crate_name =\n args.windows(2).find(|args| args[0] == \"--crate-name\").and_then(|args| args[1].to_str());\n\n if let Some(crate_name) = crate_name {\n if let Some(target) = env::var_os(\"RUSTC_TIME\") {\n if target == \"all\"\n || target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)\n {\n cmd.arg(\"-Ztime\");\n }\n }\n }\n\n \/\/ Print backtrace in case of ICE\n if env::var(\"RUSTC_BACKTRACE_ON_ICE\").is_ok() && env::var(\"RUST_BACKTRACE\").is_err() {\n cmd.env(\"RUST_BACKTRACE\", \"1\");\n }\n\n if let Ok(lint_flags) = env::var(\"RUSTC_LINT_FLAGS\") {\n cmd.args(lint_flags.split_whitespace());\n }\n\n if target.is_some() {\n \/\/ The stage0 compiler has a special sysroot distinct from what we\n \/\/ actually downloaded, so we just always pass the `--sysroot` option,\n \/\/ unless one is already set.\n if !args.iter().any(|arg| arg == \"--sysroot\") {\n cmd.arg(\"--sysroot\").arg(&sysroot);\n }\n\n \/\/ If we're compiling specifically the `panic_abort` crate then we pass\n \/\/ the `-C panic=abort` option. Note that we do not do this for any\n \/\/ other crate intentionally as this is the only crate for now that we\n \/\/ ship with panic=abort.\n \/\/\n \/\/ This... is a bit of a hack how we detect this. Ideally this\n \/\/ information should be encoded in the crate I guess? Would likely\n \/\/ require an RFC amendment to RFC 1513, however.\n \/\/\n \/\/ `compiler_builtins` are unconditionally compiled with panic=abort to\n \/\/ workaround undefined references to `rust_eh_unwind_resume` generated\n \/\/ otherwise, see issue https:\/\/github.com\/rust-lang\/rust\/issues\/43095.\n if crate_name == Some(\"panic_abort\")\n || crate_name == Some(\"compiler_builtins\") && stage != \"0\"\n {\n cmd.arg(\"-C\").arg(\"panic=abort\");\n }\n } else {\n \/\/ FIXME(rust-lang\/cargo#5754) we shouldn't be using special env vars\n \/\/ here, but rather Cargo should know what flags to pass rustc itself.\n\n \/\/ Override linker if necessary.\n if let Ok(host_linker) = env::var(\"RUSTC_HOST_LINKER\") {\n cmd.arg(format!(\"-Clinker={}\", host_linker));\n }\n\n if let Ok(s) = env::var(\"RUSTC_HOST_CRT_STATIC\") {\n if s == \"true\" {\n cmd.arg(\"-C\").arg(\"target-feature=+crt-static\");\n }\n if s == \"false\" {\n cmd.arg(\"-C\").arg(\"target-feature=-crt-static\");\n }\n }\n }\n\n if let Ok(map) = env::var(\"RUSTC_DEBUGINFO_MAP\") {\n cmd.arg(\"--remap-path-prefix\").arg(&map);\n }\n\n \/\/ Force all crates compiled by this compiler to (a) be unstable and (b)\n \/\/ allow the `rustc_private` feature to link to other unstable crates\n \/\/ also in the sysroot. We also do this for host crates, since those\n \/\/ may be proc macros, in which case we might ship them.\n if env::var_os(\"RUSTC_FORCE_UNSTABLE\").is_some() && (stage != \"0\" || target.is_some()) {\n cmd.arg(\"-Z\").arg(\"force-unstable-if-unmarked\");\n }\n\n if verbose > 1 {\n eprintln!(\n \"rustc command: {:?}={:?} {:?}\",\n bootstrap::util::dylib_path_var(),\n env::join_paths(&dylib_path).unwrap(),\n cmd,\n );\n eprintln!(\"sysroot: {:?}\", sysroot);\n eprintln!(\"libdir: {:?}\", libdir);\n }\n\n let start = Instant::now();\n let status = {\n let errmsg = format!(\"\\nFailed to run:\\n{:?}\\n-------------\", cmd);\n cmd.status().expect(&errmsg)\n };\n\n if env::var_os(\"RUSTC_PRINT_STEP_TIMINGS\").is_some() {\n if let Some(crate_name) = crate_name {\n let dur = start.elapsed();\n let is_test = args.iter().any(|a| a == \"--test\");\n eprintln!(\n \"[RUSTC-TIMING] {} test:{} {}.{:03}\",\n crate_name,\n is_test,\n dur.as_secs(),\n dur.subsec_millis()\n );\n }\n }\n\n if status.success() {\n std::process::exit(0);\n \/\/ note: everything below here is unreachable. do not put code that\n \/\/ should run on success, after this block.\n }\n if verbose > 0 {\n println!(\"\\nDid not run successfully: {}\\n{:?}\\n-------------\", status, cmd);\n }\n\n if let Some(mut on_fail) = on_fail {\n on_fail.status().expect(\"Could not run the on_fail command\");\n }\n\n \/\/ Preserve the exit code. In case of signal, exit with 0xfe since it's\n \/\/ awkward to preserve this status in a cross-platform way.\n match status.code() {\n Some(i) => std::process::exit(i),\n None => {\n eprintln!(\"rustc exited with {}\", status);\n std::process::exit(0xfe);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::collections::HashMap;\n\nuse core::{\n Dependency,\n PackageId,\n Summary,\n Registry\n};\n\nuse util::{CargoResult, human, internal};\n\n\/* TODO:\n * - The correct input here is not a registry. Resolves should be performable\n * on package summaries vs. the packages themselves.\n *\/\npub fn resolve<R: Registry>(deps: &[Dependency],\n registry: &mut R) -> CargoResult<Vec<PackageId>> {\n log!(5, \"resolve; deps={}\", deps);\n\n let mut remaining = Vec::from_slice(deps);\n let mut resolve = HashMap::<String, Summary>::new();\n\n loop {\n let curr = match remaining.pop() {\n Some(curr) => curr,\n None => {\n let ret = resolve.values().map(|summary| {\n summary.get_package_id().clone()\n }).collect();\n log!(5, \"resolve complete; ret={}\", ret);\n return Ok(ret);\n }\n };\n\n let opts = try!(registry.query(&curr));\n\n if opts.len() == 0 {\n return Err(human(format!(\"No package named {} found\", curr.get_name())));\n }\n\n if opts.len() > 1 {\n return Err(internal(format!(\"At the moment, Cargo only supports a\\\n single source for a particular package name ({}).\", curr.get_name())));\n }\n\n let pkg = opts.get(0).clone();\n resolve.insert(pkg.get_name().to_str(), pkg.clone());\n\n for dep in pkg.get_dependencies().iter() {\n if !dep.is_transitive() { continue; }\n\n if !resolve.contains_key_equiv(&dep.get_name()) {\n remaining.push(dep.clone());\n }\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use url;\n use hamcrest::{assert_that, equal_to, contains};\n\n use core::source::{SourceId, RegistryKind, Location, Remote};\n use core::{Dependency, PackageId, Summary};\n use super::resolve;\n\n trait ToDep {\n fn to_dep(self) -> Dependency;\n }\n\n impl ToDep for &'static str {\n fn to_dep(self) -> Dependency {\n let url = url::from_str(\"http:\/\/example.com\").unwrap();\n let source_id = SourceId::new(RegistryKind, Remote(url));\n Dependency::parse(self, Some(\"1.0.0\"), &source_id).unwrap()\n }\n }\n\n impl ToDep for Dependency {\n fn to_dep(self) -> Dependency {\n self\n }\n }\n\n macro_rules! pkg(\n ($name:expr => $($deps:expr),+) => ({\n let d: Vec<Dependency> = vec!($($deps.to_dep()),+);\n\n Summary::new(&PackageId::new($name, \"1.0.0\", ®istry_loc()).unwrap(),\n d.as_slice())\n });\n\n ($name:expr) => (\n Summary::new(&PackageId::new($name, \"1.0.0\", ®istry_loc()).unwrap(),\n [])\n )\n )\n\n fn registry_loc() -> Location {\n Location::parse(\"http:\/\/www.example.com\/\").unwrap()\n }\n\n fn pkg(name: &str) -> Summary {\n Summary::new(&PackageId::new(name, \"1.0.0\", ®istry_loc()).unwrap(),\n &[])\n }\n\n fn dep(name: &str) -> Dependency {\n let url = url::from_str(\"http:\/\/example.com\").unwrap();\n let source_id = SourceId::new(RegistryKind, Remote(url));\n Dependency::parse(name, Some(\"1.0.0\"), &source_id).unwrap()\n }\n\n fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {\n pkgs\n }\n\n fn names(names: &[&'static str]) -> Vec<PackageId> {\n names.iter()\n .map(|name| PackageId::new(*name, \"1.0.0\", ®istry_loc()).unwrap())\n .collect()\n }\n\n #[test]\n pub fn test_resolving_empty_dependency_list() {\n let res = resolve([], &mut registry(vec!())).unwrap();\n\n assert_that(&res, equal_to(&names([])));\n }\n\n #[test]\n pub fn test_resolving_only_package() {\n let mut reg = registry(vec!(pkg(\"foo\")));\n let res = resolve([dep(\"foo\")], &mut reg);\n\n assert_that(&res.unwrap(), equal_to(&names([\"foo\"])));\n }\n\n #[test]\n pub fn test_resolving_one_dep() {\n let mut reg = registry(vec!(pkg(\"foo\"), pkg(\"bar\")));\n let res = resolve([dep(\"foo\")], &mut reg);\n\n assert_that(&res.unwrap(), equal_to(&names([\"foo\"])));\n }\n\n #[test]\n pub fn test_resolving_multiple_deps() {\n let mut reg = registry(vec!(pkg!(\"foo\"), pkg!(\"bar\"), pkg!(\"baz\")));\n let res = resolve([dep(\"foo\"), dep(\"baz\")], &mut reg).unwrap();\n\n assert_that(&res, contains(names([\"foo\", \"baz\"])).exactly());\n }\n\n #[test]\n pub fn test_resolving_transitive_deps() {\n let mut reg = registry(vec!(pkg!(\"foo\"), pkg!(\"bar\" => \"foo\")));\n let res = resolve([dep(\"bar\")], &mut reg).unwrap();\n\n assert_that(&res, contains(names([\"foo\", \"bar\"])));\n }\n\n #[test]\n pub fn test_resolving_common_transitive_deps() {\n let mut reg = registry(vec!(pkg!(\"foo\" => \"bar\"), pkg!(\"bar\")));\n let res = resolve([dep(\"foo\"), dep(\"bar\")], &mut reg).unwrap();\n\n assert_that(&res, contains(names([\"foo\", \"bar\"])));\n }\n\n #[test]\n pub fn test_resolving_with_dev_deps() {\n let mut reg = registry(vec!(\n pkg!(\"foo\" => \"bar\", dep(\"baz\").as_dev()),\n pkg!(\"baz\" => \"bat\", dep(\"bam\").as_dev()),\n pkg!(\"bar\"),\n pkg!(\"bat\")\n ));\n\n let res = resolve([dep(\"foo\"), dep(\"baz\").as_dev()], &mut reg).unwrap();\n\n assert_that(&res, contains(names([\"foo\", \"bar\", \"baz\"])));\n }\n}\n<commit_msg>auto merge of #116 : Indiv0\/cargo\/typofix, r=alexcrichton<commit_after>use std::collections::HashMap;\n\nuse core::{\n Dependency,\n PackageId,\n Summary,\n Registry\n};\n\nuse util::{CargoResult, human, internal};\n\n\/* TODO:\n * - The correct input here is not a registry. Resolves should be performable\n * on package summaries vs. the packages themselves.\n *\/\npub fn resolve<R: Registry>(deps: &[Dependency],\n registry: &mut R) -> CargoResult<Vec<PackageId>> {\n log!(5, \"resolve; deps={}\", deps);\n\n let mut remaining = Vec::from_slice(deps);\n let mut resolve = HashMap::<String, Summary>::new();\n\n loop {\n let curr = match remaining.pop() {\n Some(curr) => curr,\n None => {\n let ret = resolve.values().map(|summary| {\n summary.get_package_id().clone()\n }).collect();\n log!(5, \"resolve complete; ret={}\", ret);\n return Ok(ret);\n }\n };\n\n let opts = try!(registry.query(&curr));\n\n if opts.len() == 0 {\n return Err(human(format!(\"No package named {} found\", curr.get_name())));\n }\n\n if opts.len() > 1 {\n return Err(internal(format!(\"At the moment, Cargo only supports a \\\n single source for a particular package name ({}).\", curr.get_name())));\n }\n\n let pkg = opts.get(0).clone();\n resolve.insert(pkg.get_name().to_str(), pkg.clone());\n\n for dep in pkg.get_dependencies().iter() {\n if !dep.is_transitive() { continue; }\n\n if !resolve.contains_key_equiv(&dep.get_name()) {\n remaining.push(dep.clone());\n }\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use url;\n use hamcrest::{assert_that, equal_to, contains};\n\n use core::source::{SourceId, RegistryKind, Location, Remote};\n use core::{Dependency, PackageId, Summary};\n use super::resolve;\n\n trait ToDep {\n fn to_dep(self) -> Dependency;\n }\n\n impl ToDep for &'static str {\n fn to_dep(self) -> Dependency {\n let url = url::from_str(\"http:\/\/example.com\").unwrap();\n let source_id = SourceId::new(RegistryKind, Remote(url));\n Dependency::parse(self, Some(\"1.0.0\"), &source_id).unwrap()\n }\n }\n\n impl ToDep for Dependency {\n fn to_dep(self) -> Dependency {\n self\n }\n }\n\n macro_rules! pkg(\n ($name:expr => $($deps:expr),+) => ({\n let d: Vec<Dependency> = vec!($($deps.to_dep()),+);\n\n Summary::new(&PackageId::new($name, \"1.0.0\", ®istry_loc()).unwrap(),\n d.as_slice())\n });\n\n ($name:expr) => (\n Summary::new(&PackageId::new($name, \"1.0.0\", ®istry_loc()).unwrap(),\n [])\n )\n )\n\n fn registry_loc() -> Location {\n Location::parse(\"http:\/\/www.example.com\/\").unwrap()\n }\n\n fn pkg(name: &str) -> Summary {\n Summary::new(&PackageId::new(name, \"1.0.0\", ®istry_loc()).unwrap(),\n &[])\n }\n\n fn dep(name: &str) -> Dependency {\n let url = url::from_str(\"http:\/\/example.com\").unwrap();\n let source_id = SourceId::new(RegistryKind, Remote(url));\n Dependency::parse(name, Some(\"1.0.0\"), &source_id).unwrap()\n }\n\n fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {\n pkgs\n }\n\n fn names(names: &[&'static str]) -> Vec<PackageId> {\n names.iter()\n .map(|name| PackageId::new(*name, \"1.0.0\", ®istry_loc()).unwrap())\n .collect()\n }\n\n #[test]\n pub fn test_resolving_empty_dependency_list() {\n let res = resolve([], &mut registry(vec!())).unwrap();\n\n assert_that(&res, equal_to(&names([])));\n }\n\n #[test]\n pub fn test_resolving_only_package() {\n let mut reg = registry(vec!(pkg(\"foo\")));\n let res = resolve([dep(\"foo\")], &mut reg);\n\n assert_that(&res.unwrap(), equal_to(&names([\"foo\"])));\n }\n\n #[test]\n pub fn test_resolving_one_dep() {\n let mut reg = registry(vec!(pkg(\"foo\"), pkg(\"bar\")));\n let res = resolve([dep(\"foo\")], &mut reg);\n\n assert_that(&res.unwrap(), equal_to(&names([\"foo\"])));\n }\n\n #[test]\n pub fn test_resolving_multiple_deps() {\n let mut reg = registry(vec!(pkg!(\"foo\"), pkg!(\"bar\"), pkg!(\"baz\")));\n let res = resolve([dep(\"foo\"), dep(\"baz\")], &mut reg).unwrap();\n\n assert_that(&res, contains(names([\"foo\", \"baz\"])).exactly());\n }\n\n #[test]\n pub fn test_resolving_transitive_deps() {\n let mut reg = registry(vec!(pkg!(\"foo\"), pkg!(\"bar\" => \"foo\")));\n let res = resolve([dep(\"bar\")], &mut reg).unwrap();\n\n assert_that(&res, contains(names([\"foo\", \"bar\"])));\n }\n\n #[test]\n pub fn test_resolving_common_transitive_deps() {\n let mut reg = registry(vec!(pkg!(\"foo\" => \"bar\"), pkg!(\"bar\")));\n let res = resolve([dep(\"foo\"), dep(\"bar\")], &mut reg).unwrap();\n\n assert_that(&res, contains(names([\"foo\", \"bar\"])));\n }\n\n #[test]\n pub fn test_resolving_with_dev_deps() {\n let mut reg = registry(vec!(\n pkg!(\"foo\" => \"bar\", dep(\"baz\").as_dev()),\n pkg!(\"baz\" => \"bat\", dep(\"bam\").as_dev()),\n pkg!(\"bar\"),\n pkg!(\"bat\")\n ));\n\n let res = resolve([dep(\"foo\"), dep(\"baz\").as_dev()], &mut reg).unwrap();\n\n assert_that(&res, contains(names([\"foo\", \"bar\", \"baz\"])));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reimplement behaviour in imag-header<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>adds examples of opening a serial port<commit_after>extern crate serial;\n\n#[cfg(unix)]\nfn main() {\n use std::path::Path;\n\n serial::open(\"\/dev\/ttyUSB0\").unwrap();\n serial::open(Path::new(\"\/dev\/ttyUSB0\")).unwrap();\n serial::posix::TTYPort::open(Path::new(\"\/dev\/ttyUSB0\")).unwrap();\n}\n\n#[cfg(windows)]\nfn main() {\n serial::open(\"COM1\").unwrap();\n serial::windows::COMPort::open(\"COM1\").unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a 3D fluid demo.<commit_after>extern crate nalgebra as na;\n\nuse na::{Isometry3, Point3, Vector3};\nuse ncollide3d::shape::{Ball, Cuboid, ShapeHandle};\nuse nphysics3d::force_generator::DefaultForceGeneratorSet;\nuse nphysics3d::joint::{DefaultJointConstraintSet, FreeJoint, RevoluteJoint};\nuse nphysics3d::object::{\n BodyPartHandle, ColliderDesc, DefaultBodySet, DefaultColliderSet, Ground, MultibodyDesc,\n RigidBodyDesc,\n};\nuse nphysics3d::world::{DefaultGeometricalWorld, DefaultMechanicalWorld};\nuse nphysics_testbed3d::Testbed;\nuse salva3d::boundary::Boundary;\nuse salva3d::coupling::{ColliderCouplingManager, CouplingMethod};\nuse salva3d::fluid::Fluid;\nuse salva3d::LiquidWorld;\nuse std::f32;\n\npub fn init_world(testbed: &mut Testbed) {\n \/*\n * World\n *\/\n let mechanical_world = DefaultMechanicalWorld::new(Vector3::new(0.0, -9.81, 0.0));\n let geometrical_world = DefaultGeometricalWorld::new();\n let mut bodies = DefaultBodySet::new();\n let mut colliders = DefaultColliderSet::new();\n let joint_constraints = DefaultJointConstraintSet::new();\n let force_generators = DefaultForceGeneratorSet::new();\n\n \/*\n * Liquid world.\n *\/\n let particle_rad = 0.1;\n let mut liquid_world = LiquidWorld::new(particle_rad, 1.5);\n let mut coupling_manager = ColliderCouplingManager::new();\n\n \/\/ Liquid.\n let mut points1 = Vec::new();\n let mut points2 = Vec::new();\n let ni = 8;\n let nj = 8;\n let nk = 8;\n\n let shift2 = (nj as f32) * particle_rad * 2.0 + particle_rad;\n\n for i in 0..ni {\n for j in 0..nj {\n for k in 0..nk {\n let x = (i as f32) * particle_rad * 2.0 - ni as f32 * particle_rad;\n let y = (j as f32 + 1.0) * particle_rad * 2.0;\n let z = (k as f32) * particle_rad * 2.0 - nk as f32 * particle_rad;\n points1.push(Point3::new(x, y, z));\n points2.push(Point3::new(x, y + shift2, z));\n }\n }\n }\n\n let fluid = Fluid::new(points1, particle_rad, 1.2, 0.001);\n let fluid_handle = liquid_world.add_fluid(fluid);\n testbed.set_fluid_color(fluid_handle, Point3::new(0.8, 0.7, 1.0));\n\n let fluid = Fluid::new(points2, particle_rad, 1.0, 0.001);\n let fluid_handle = liquid_world.add_fluid(fluid);\n testbed.set_fluid_color(fluid_handle, Point3::new(0.6, 0.8, 0.5));\n\n \/*\n * Ground.\n *\/\n let ground_thickness = 0.1;\n let ground_half_width = 1.15;\n let ground_shape = ShapeHandle::new(Cuboid::new(Vector3::new(\n ground_half_width,\n ground_thickness,\n ground_half_width,\n )));\n\n let ground_handle = bodies.insert(Ground::new());\n\n let wall_poses = [\n Isometry3::translation(0.0, -ground_thickness, 0.0),\n Isometry3::new(\n Vector3::new(ground_half_width, ground_half_width, 0.0),\n Vector3::z() * (f32::consts::PI \/ 2.0),\n ),\n Isometry3::new(\n Vector3::new(-ground_half_width, ground_half_width, 0.0),\n Vector3::z() * (f32::consts::PI \/ 2.0),\n ),\n Isometry3::new(\n Vector3::new(0.0, ground_half_width, ground_half_width),\n Vector3::x() * (f32::consts::PI \/ 2.0),\n ),\n Isometry3::new(\n Vector3::new(0.0, ground_half_width, -ground_half_width),\n Vector3::x() * (f32::consts::PI \/ 2.0),\n ),\n ];\n\n for pose in wall_poses.into_iter() {\n let co = ColliderDesc::new(ground_shape.clone())\n .position(*pose)\n .build(BodyPartHandle(ground_handle, 0));\n let co_handle = colliders.insert(co);\n let bo_handle = liquid_world.add_boundary(Boundary::new(Vec::new()));\n coupling_manager.register_coupling(\n bo_handle,\n co_handle,\n CouplingMethod::DynamicContactSampling,\n );\n }\n\n \/*\n * Create a cuboid.\n *\/\n let rad = 0.2;\n let cuboid = ShapeHandle::new(Cuboid::new(Vector3::repeat(rad)));\n\n \/\/ Build the rigid body.\n let rb = RigidBodyDesc::new().translation(Vector3::y() * 7.0).build();\n let rb_handle = bodies.insert(rb);\n\n \/\/ Build the collider.\n let co = ColliderDesc::new(cuboid.clone())\n .density(1.0)\n .build(BodyPartHandle(rb_handle, 0));\n let co_handle = colliders.insert(co);\n let bo_handle = liquid_world.add_boundary(Boundary::new(Vec::new()));\n coupling_manager.register_coupling(\n bo_handle,\n co_handle,\n CouplingMethod::DynamicContactSampling,\n );\n\n \/*\n \/*\n * Create the deformable body and a collider for its boundary.\n *\/\n let mut deformable = FEMSurfaceDesc::quad(10, 1)\n .scale(Vector2::new(5.0, 0.5))\n .translation(Vector2::y() * 15.0)\n .young_modulus(500.0)\n .mass_damping(0.2)\n .density(2.0)\n .build();\n let collider_desc = deformable.boundary_collider_desc();\n let deformable_handle = bodies.insert(deformable);\n\n let co = collider_desc.build(deformable_handle);\n let co_handle = colliders.insert(co);\n let bo_handle = liquid_world.add_boundary(Boundary::new(Vec::new()));\n coupling_manager.register_coupling(\n bo_handle,\n co_handle,\n CouplingMethod::DynamicContactSampling,\n );\n\n \/*\n * Create a multibody.\n *\/\n let rad = 0.2;\n let num = 4;\n let body_shift = Vector2::x() * (rad * 2.5);\n let free = FreeJoint::new(Isometry2::translation(3.0, 20.0));\n\n let mut multibody_desc = MultibodyDesc::new(free);\n let mut curr = &mut multibody_desc;\n\n \/\/ Rotate the first joint so that the chain is vertical.\n let revo = RevoluteJoint::new(f32::consts::FRAC_PI_2);\n curr = curr.add_child(revo).set_body_shift(body_shift);\n\n for _ in 1usize..num {\n let revo = RevoluteJoint::new(0.0);\n curr = curr.add_child(revo).set_body_shift(body_shift);\n }\n\n let multibody = multibody_desc.build();\n let multibody_handle = bodies.insert(multibody);\n testbed.set_body_color(multibody_handle, Point3::new(0.7, 0.4, 0.5));\n\n \/\/ Create one collider for each link.\n let ball = ShapeHandle::new(Ball::new(rad));\n let collider_desc = ColliderDesc::new(ball.clone()).density(1.0);\n\n for i in 0..num + 1 {\n let co = collider_desc.build(BodyPartHandle(multibody_handle, i));\n let co_handle = colliders.insert(co);\n let bo_handle = liquid_world.add_boundary(Boundary::new(Vec::new()));\n coupling_manager.register_coupling(\n bo_handle,\n co_handle,\n CouplingMethod::DynamicContactSampling,\n );\n }\n *\/\n \/*\n * Set up the testbed.\n *\/\n testbed.set_body_wireframe(ground_handle, true);\n testbed.set_ground_handle(Some(ground_handle));\n testbed.set_world(\n mechanical_world,\n geometrical_world,\n bodies,\n colliders,\n joint_constraints,\n force_generators,\n );\n testbed.set_liquid_world(liquid_world, coupling_manager);\n}\n\nfn main() {\n let testbed = Testbed::from_builders(0, vec![(\"Boxes\", init_world)]);\n testbed.run()\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for unix systems\n\n#![allow(unused_imports)] \/\/ lots of cfg code here\n\nuse os::unix::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io::{self, Read, Write};\nuse iter;\nuse marker::PhantomData;\nuse mem;\nuse memchr;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse str;\nuse sys_common::mutex::Mutex;\nuse sys::{cvt, fd, syscall};\nuse vec;\n\nconst TMPBUF_SZ: usize = 128;\nstatic ENV_LOCK: Mutex = Mutex::new();\n\n\/\/\/ Returns the platform-specific value of errno\npub fn errno() -> i32 {\n 0\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errno: i32) -> String {\n if let Some(string) = syscall::STR_ERROR.get(errno as usize) {\n string.to_string()\n } else {\n \"unknown error\".to_string()\n }\n}\n\npub fn getcwd() -> io::Result<PathBuf> {\n let mut buf = [0; 4096];\n let count = cvt(syscall::getcwd(&mut buf))?;\n Ok(PathBuf::from(OsString::from_vec(buf[.. count].to_vec())))\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n cvt(syscall::chdir(p.to_str().unwrap())).and(Ok(()))\n}\n\npub struct SplitPaths<'a> {\n iter: iter::Map<slice::Split<'a, u8, fn(&u8) -> bool>,\n fn(&'a [u8]) -> PathBuf>,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n fn bytes_to_path(b: &[u8]) -> PathBuf {\n PathBuf::from(<OsStr as OsStrExt>::from_bytes(b))\n }\n fn is_colon(b: &u8) -> bool { *b == b':' }\n let unparsed = unparsed.as_bytes();\n SplitPaths {\n iter: unparsed.split(is_colon as fn(&u8) -> bool)\n .map(bytes_to_path as fn(&[u8]) -> PathBuf)\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option<PathBuf> { self.iter.next() }\n fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>\n where I: Iterator<Item=T>, T: AsRef<OsStr>\n{\n let mut joined = Vec::new();\n let sep = b':';\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref().as_bytes();\n if i > 0 { joined.push(sep) }\n if path.contains(&sep) {\n return Err(JoinPathsError)\n }\n joined.extend_from_slice(path);\n }\n Ok(OsStringExt::from_vec(joined))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains separator `:`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result<PathBuf> {\n use fs::File;\n\n let mut file = File::open(\"sys:exe\")?;\n\n let mut path = String::new();\n file.read_to_string(&mut path)?;\n\n if path.ends_with('\\n') {\n path.pop();\n }\n\n Ok(PathBuf::from(path))\n}\n\npub struct Env {\n iter: vec::IntoIter<(OsString, OsString)>,\n _dont_send_or_sync_me: PhantomData<*mut ()>,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n fn next(&mut self) -> Option<(OsString, OsString)> { self.iter.next() }\n fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }\n}\n\n\/\/\/ Returns a vector of (variable, value) byte-vector pairs for all the\n\/\/\/ environment variables of the current process.\npub fn env() -> Env {\n let mut variables: Vec<(OsString, OsString)> = Vec::new();\n if let Ok(mut file) = ::fs::File::open(\"env:\") {\n let mut string = String::new();\n if file.read_to_string(&mut string).is_ok() {\n for line in string.lines() {\n let mut parts = line.splitn(2, '=');\n if let Some(name) = parts.next() {\n let value = parts.next().unwrap_or(\"\");\n variables.push((OsString::from(name.to_string()),\n OsString::from(value.to_string())));\n }\n }\n }\n }\n Env { iter: variables.into_iter(), _dont_send_or_sync_me: PhantomData }\n}\n\npub fn getenv(key: &OsStr) -> io::Result<Option<OsString>> {\n if ! key.is_empty() {\n if let Ok(mut file) = ::fs::File::open(&(\"env:\".to_owned() + key.to_str().unwrap())) {\n let mut string = String::new();\n file.read_to_string(&mut string)?;\n Ok(Some(OsString::from(string)))\n } else {\n Ok(None)\n }\n } else {\n Ok(None)\n }\n}\n\npub fn setenv(key: &OsStr, value: &OsStr) -> io::Result<()> {\n if ! key.is_empty() {\n let mut file = ::fs::File::open(&(\"env:\".to_owned() + key.to_str().unwrap()))?;\n file.write_all(value.as_bytes())?;\n file.set_len(value.len() as u64)?;\n }\n Ok(())\n}\n\npub fn unsetenv(key: &OsStr) -> io::Result<()> {\n ::fs::remove_file(&(\"env:\".to_owned() + key.to_str().unwrap()))?;\n Ok(())\n}\n\npub fn page_size() -> usize {\n 4096\n}\n\npub fn temp_dir() -> PathBuf {\n ::env::var_os(\"TMPDIR\").map(PathBuf::from).unwrap_or_else(|| {\n PathBuf::from(\"\/tmp\")\n })\n}\n\npub fn home_dir() -> Option<PathBuf> {\n return ::env::var_os(\"HOME\").map(PathBuf::from);\n}\n\npub fn exit(code: i32) -> ! {\n let _ = syscall::exit(code as usize);\n unreachable!();\n}\n<commit_msg>Use libc errno<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for unix systems\n\n#![allow(unused_imports)] \/\/ lots of cfg code here\n\nuse os::unix::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io::{self, Read, Write};\nuse iter;\nuse marker::PhantomData;\nuse mem;\nuse memchr;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse str;\nuse sys_common::mutex::Mutex;\nuse sys::{cvt, fd, syscall};\nuse vec;\n\nconst TMPBUF_SZ: usize = 128;\nstatic ENV_LOCK: Mutex = Mutex::new();\n\nextern {\n #[link_name = \"__errno_location\"]\n fn errno_location() -> *mut i32;\n}\n\n\/\/\/ Returns the platform-specific value of errno\npub fn errno() -> i32 {\n unsafe {\n (*errno_location())\n }\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errno: i32) -> String {\n if let Some(string) = syscall::STR_ERROR.get(errno as usize) {\n string.to_string()\n } else {\n \"unknown error\".to_string()\n }\n}\n\npub fn getcwd() -> io::Result<PathBuf> {\n let mut buf = [0; 4096];\n let count = cvt(syscall::getcwd(&mut buf))?;\n Ok(PathBuf::from(OsString::from_vec(buf[.. count].to_vec())))\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n cvt(syscall::chdir(p.to_str().unwrap())).and(Ok(()))\n}\n\npub struct SplitPaths<'a> {\n iter: iter::Map<slice::Split<'a, u8, fn(&u8) -> bool>,\n fn(&'a [u8]) -> PathBuf>,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n fn bytes_to_path(b: &[u8]) -> PathBuf {\n PathBuf::from(<OsStr as OsStrExt>::from_bytes(b))\n }\n fn is_colon(b: &u8) -> bool { *b == b':' }\n let unparsed = unparsed.as_bytes();\n SplitPaths {\n iter: unparsed.split(is_colon as fn(&u8) -> bool)\n .map(bytes_to_path as fn(&[u8]) -> PathBuf)\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option<PathBuf> { self.iter.next() }\n fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>\n where I: Iterator<Item=T>, T: AsRef<OsStr>\n{\n let mut joined = Vec::new();\n let sep = b':';\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref().as_bytes();\n if i > 0 { joined.push(sep) }\n if path.contains(&sep) {\n return Err(JoinPathsError)\n }\n joined.extend_from_slice(path);\n }\n Ok(OsStringExt::from_vec(joined))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains separator `:`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result<PathBuf> {\n use fs::File;\n\n let mut file = File::open(\"sys:exe\")?;\n\n let mut path = String::new();\n file.read_to_string(&mut path)?;\n\n if path.ends_with('\\n') {\n path.pop();\n }\n\n Ok(PathBuf::from(path))\n}\n\npub struct Env {\n iter: vec::IntoIter<(OsString, OsString)>,\n _dont_send_or_sync_me: PhantomData<*mut ()>,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n fn next(&mut self) -> Option<(OsString, OsString)> { self.iter.next() }\n fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }\n}\n\n\/\/\/ Returns a vector of (variable, value) byte-vector pairs for all the\n\/\/\/ environment variables of the current process.\npub fn env() -> Env {\n let mut variables: Vec<(OsString, OsString)> = Vec::new();\n if let Ok(mut file) = ::fs::File::open(\"env:\") {\n let mut string = String::new();\n if file.read_to_string(&mut string).is_ok() {\n for line in string.lines() {\n let mut parts = line.splitn(2, '=');\n if let Some(name) = parts.next() {\n let value = parts.next().unwrap_or(\"\");\n variables.push((OsString::from(name.to_string()),\n OsString::from(value.to_string())));\n }\n }\n }\n }\n Env { iter: variables.into_iter(), _dont_send_or_sync_me: PhantomData }\n}\n\npub fn getenv(key: &OsStr) -> io::Result<Option<OsString>> {\n if ! key.is_empty() {\n if let Ok(mut file) = ::fs::File::open(&(\"env:\".to_owned() + key.to_str().unwrap())) {\n let mut string = String::new();\n file.read_to_string(&mut string)?;\n Ok(Some(OsString::from(string)))\n } else {\n Ok(None)\n }\n } else {\n Ok(None)\n }\n}\n\npub fn setenv(key: &OsStr, value: &OsStr) -> io::Result<()> {\n if ! key.is_empty() {\n let mut file = ::fs::File::open(&(\"env:\".to_owned() + key.to_str().unwrap()))?;\n file.write_all(value.as_bytes())?;\n file.set_len(value.len() as u64)?;\n }\n Ok(())\n}\n\npub fn unsetenv(key: &OsStr) -> io::Result<()> {\n ::fs::remove_file(&(\"env:\".to_owned() + key.to_str().unwrap()))?;\n Ok(())\n}\n\npub fn page_size() -> usize {\n 4096\n}\n\npub fn temp_dir() -> PathBuf {\n ::env::var_os(\"TMPDIR\").map(PathBuf::from).unwrap_or_else(|| {\n PathBuf::from(\"\/tmp\")\n })\n}\n\npub fn home_dir() -> Option<PathBuf> {\n return ::env::var_os(\"HOME\").map(PathBuf::from);\n}\n\npub fn exit(code: i32) -> ! {\n let _ = syscall::exit(code as usize);\n unreachable!();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>walk example<commit_after>extern crate ext4;\n\nuse std::env;\nuse std::fs;\n\nfn main() {\n let r = fs::File::open(env::args().nth(1).expect(\"one argument\")).expect(\"openable file\");\n let mut options = ext4::Options::default();\n options.checksums = ext4::Checksums::Enabled;\n let mut vol = ext4::SuperBlock::new_with_options(r, &options).expect(\"ext4 volume\");\n let root = vol.root().expect(\"root\");\n vol.walk(&root, \"\/\".to_string(), &mut |_, path, _, _| {\n println!(\"{}\", path);\n Ok(true)\n }).expect(\"walk\");\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet, HashMap};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n \"Unlicense OR MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0, rls\n \"colored\", \/\/ MPL-2.0, rustfmt\n \"ordslice\", \/\/ Apache-2.0, rls\n \"cloudabi\", \/\/ BSD-2-Clause, (rls -> crossbeam-channel 0.2 -> rand 0.5)\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [CrateVersion] = &[\n CrateVersion(\"rustc\", \"0.0.0\"),\n CrateVersion(\"rustc_codegen_llvm\", \"0.0.0\"),\n];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n Crate(\"aho-corasick\"),\n Crate(\"arrayvec\"),\n Crate(\"atty\"),\n Crate(\"backtrace\"),\n Crate(\"backtrace-sys\"),\n Crate(\"bitflags\"),\n Crate(\"byteorder\"),\n Crate(\"cc\"),\n Crate(\"chalk-engine\"),\n Crate(\"chalk-macros\"),\n Crate(\"cfg-if\"),\n Crate(\"cmake\"),\n Crate(\"crossbeam-deque\"),\n Crate(\"crossbeam-epoch\"),\n Crate(\"crossbeam-utils\"),\n Crate(\"datafrog\"),\n Crate(\"either\"),\n Crate(\"ena\"),\n Crate(\"env_logger\"),\n Crate(\"filetime\"),\n Crate(\"flate2\"),\n Crate(\"fuchsia-zircon\"),\n Crate(\"fuchsia-zircon-sys\"),\n Crate(\"getopts\"),\n Crate(\"humantime\"),\n Crate(\"jobserver\"),\n Crate(\"kernel32-sys\"),\n Crate(\"lazy_static\"),\n Crate(\"libc\"),\n Crate(\"log\"),\n Crate(\"log_settings\"),\n Crate(\"memchr\"),\n Crate(\"memoffset\"),\n Crate(\"miniz-sys\"),\n Crate(\"nodrop\"),\n Crate(\"num_cpus\"),\n Crate(\"owning_ref\"),\n Crate(\"parking_lot\"),\n Crate(\"parking_lot_core\"),\n Crate(\"polonius-engine\"),\n Crate(\"pkg-config\"),\n Crate(\"quick-error\"),\n Crate(\"rand\"),\n Crate(\"redox_syscall\"),\n Crate(\"redox_termios\"),\n Crate(\"regex\"),\n Crate(\"regex-syntax\"),\n Crate(\"remove_dir_all\"),\n Crate(\"rustc-demangle\"),\n Crate(\"rustc-hash\"),\n Crate(\"rustc-rayon\"),\n Crate(\"rustc-rayon-core\"),\n Crate(\"scoped-tls\"),\n Crate(\"scopeguard\"),\n Crate(\"smallvec\"),\n Crate(\"stable_deref_trait\"),\n Crate(\"tempfile\"),\n Crate(\"termcolor\"),\n Crate(\"terminon\"),\n Crate(\"termion\"),\n Crate(\"thread_local\"),\n Crate(\"ucd-util\"),\n Crate(\"unicode-width\"),\n Crate(\"unreachable\"),\n Crate(\"utf8-ranges\"),\n Crate(\"void\"),\n Crate(\"winapi\"),\n Crate(\"winapi-build\"),\n Crate(\"winapi-i686-pc-windows-gnu\"),\n Crate(\"winapi-x86_64-pc-windows-gnu\"),\n Crate(\"wincolor\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct CrateVersion<'a>(&'a str, &'a str); \/\/ (name, version)\n\nimpl<'a> Crate<'a> {\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\nimpl<'a> CrateVersion<'a> {\n \/\/\/ Returns the struct and whether or not the dep is in-tree\n pub fn from_str(s: &'a str) -> (Self, bool) {\n let mut parts = s.split(' ');\n let name = parts.next().unwrap();\n let version = parts.next().unwrap();\n let path = parts.next().unwrap();\n\n let is_path_dep = path.starts_with(\"(path+\");\n\n (CrateVersion(name, version), is_path_dep)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} {}\", self.0, self.1)\n }\n}\n\nimpl<'a> From<CrateVersion<'a>> for Crate<'a> {\n fn from(cv: CrateVersion<'a>) -> Crate<'a> {\n Crate(cv.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate, false);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n\n check_crate_duplicate(&resolve, bad);\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<CrateVersion<'a>>,\n krate: CrateVersion<'a>,\n must_be_on_whitelist: bool,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this path is in-tree, we don't require it to be on the whitelist\n if must_be_on_whitelist {\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate.into()) {\n unapproved.insert(krate.into());\n }\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let (krate, is_path_dep) = CrateVersion::from_str(dep);\n\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate, !is_path_dep);\n unapproved.append(&mut bad);\n }\n\n unapproved\n}\n\nfn check_crate_duplicate(resolve: &Resolve, bad: &mut bool) {\n const FORBIDDEN_TO_HAVE_DUPLICATES: &[&str] = &[\n \/\/ These two crates take quite a long time to build, let's not let two\n \/\/ versions of them accidentally sneak into our dependency graph to\n \/\/ ensure we keep our CI times under control\n \/\/ \"cargo\", \/\/ FIXME(#53005)\n \/\/ \"rustc-ap-syntax\", \/\/ FIXME(#53006)\n ];\n let mut name_to_id = HashMap::new();\n for node in resolve.nodes.iter() {\n name_to_id.entry(node.id.split_whitespace().next().unwrap())\n .or_insert(Vec::new())\n .push(&node.id);\n }\n\n for name in FORBIDDEN_TO_HAVE_DUPLICATES {\n if name_to_id[name].len() <= 1 {\n continue\n }\n println!(\"crate `{}` is duplicated in `Cargo.lock`\", name);\n for id in name_to_id[name].iter() {\n println!(\" * {}\", id);\n }\n *bad = true;\n }\n}\n<commit_msg>Deny future duplication of rustc-ap-syntax<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet, HashMap};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n \"Unlicense OR MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0, rls\n \"colored\", \/\/ MPL-2.0, rustfmt\n \"ordslice\", \/\/ Apache-2.0, rls\n \"cloudabi\", \/\/ BSD-2-Clause, (rls -> crossbeam-channel 0.2 -> rand 0.5)\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [CrateVersion] = &[\n CrateVersion(\"rustc\", \"0.0.0\"),\n CrateVersion(\"rustc_codegen_llvm\", \"0.0.0\"),\n];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n Crate(\"aho-corasick\"),\n Crate(\"arrayvec\"),\n Crate(\"atty\"),\n Crate(\"backtrace\"),\n Crate(\"backtrace-sys\"),\n Crate(\"bitflags\"),\n Crate(\"byteorder\"),\n Crate(\"cc\"),\n Crate(\"chalk-engine\"),\n Crate(\"chalk-macros\"),\n Crate(\"cfg-if\"),\n Crate(\"cmake\"),\n Crate(\"crossbeam-deque\"),\n Crate(\"crossbeam-epoch\"),\n Crate(\"crossbeam-utils\"),\n Crate(\"datafrog\"),\n Crate(\"either\"),\n Crate(\"ena\"),\n Crate(\"env_logger\"),\n Crate(\"filetime\"),\n Crate(\"flate2\"),\n Crate(\"fuchsia-zircon\"),\n Crate(\"fuchsia-zircon-sys\"),\n Crate(\"getopts\"),\n Crate(\"humantime\"),\n Crate(\"jobserver\"),\n Crate(\"kernel32-sys\"),\n Crate(\"lazy_static\"),\n Crate(\"libc\"),\n Crate(\"log\"),\n Crate(\"log_settings\"),\n Crate(\"memchr\"),\n Crate(\"memoffset\"),\n Crate(\"miniz-sys\"),\n Crate(\"nodrop\"),\n Crate(\"num_cpus\"),\n Crate(\"owning_ref\"),\n Crate(\"parking_lot\"),\n Crate(\"parking_lot_core\"),\n Crate(\"polonius-engine\"),\n Crate(\"pkg-config\"),\n Crate(\"quick-error\"),\n Crate(\"rand\"),\n Crate(\"redox_syscall\"),\n Crate(\"redox_termios\"),\n Crate(\"regex\"),\n Crate(\"regex-syntax\"),\n Crate(\"remove_dir_all\"),\n Crate(\"rustc-demangle\"),\n Crate(\"rustc-hash\"),\n Crate(\"rustc-rayon\"),\n Crate(\"rustc-rayon-core\"),\n Crate(\"scoped-tls\"),\n Crate(\"scopeguard\"),\n Crate(\"smallvec\"),\n Crate(\"stable_deref_trait\"),\n Crate(\"tempfile\"),\n Crate(\"termcolor\"),\n Crate(\"terminon\"),\n Crate(\"termion\"),\n Crate(\"thread_local\"),\n Crate(\"ucd-util\"),\n Crate(\"unicode-width\"),\n Crate(\"unreachable\"),\n Crate(\"utf8-ranges\"),\n Crate(\"void\"),\n Crate(\"winapi\"),\n Crate(\"winapi-build\"),\n Crate(\"winapi-i686-pc-windows-gnu\"),\n Crate(\"winapi-x86_64-pc-windows-gnu\"),\n Crate(\"wincolor\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct CrateVersion<'a>(&'a str, &'a str); \/\/ (name, version)\n\nimpl<'a> Crate<'a> {\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\nimpl<'a> CrateVersion<'a> {\n \/\/\/ Returns the struct and whether or not the dep is in-tree\n pub fn from_str(s: &'a str) -> (Self, bool) {\n let mut parts = s.split(' ');\n let name = parts.next().unwrap();\n let version = parts.next().unwrap();\n let path = parts.next().unwrap();\n\n let is_path_dep = path.starts_with(\"(path+\");\n\n (CrateVersion(name, version), is_path_dep)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} {}\", self.0, self.1)\n }\n}\n\nimpl<'a> From<CrateVersion<'a>> for Crate<'a> {\n fn from(cv: CrateVersion<'a>) -> Crate<'a> {\n Crate(cv.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate, false);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n\n check_crate_duplicate(&resolve, bad);\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<CrateVersion<'a>>,\n krate: CrateVersion<'a>,\n must_be_on_whitelist: bool,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this path is in-tree, we don't require it to be on the whitelist\n if must_be_on_whitelist {\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate.into()) {\n unapproved.insert(krate.into());\n }\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let (krate, is_path_dep) = CrateVersion::from_str(dep);\n\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate, !is_path_dep);\n unapproved.append(&mut bad);\n }\n\n unapproved\n}\n\nfn check_crate_duplicate(resolve: &Resolve, bad: &mut bool) {\n const FORBIDDEN_TO_HAVE_DUPLICATES: &[&str] = &[\n \/\/ These two crates take quite a long time to build, let's not let two\n \/\/ versions of them accidentally sneak into our dependency graph to\n \/\/ ensure we keep our CI times under control\n \/\/ \"cargo\", \/\/ FIXME(#53005)\n \"rustc-ap-syntax\",\n ];\n let mut name_to_id = HashMap::new();\n for node in resolve.nodes.iter() {\n name_to_id.entry(node.id.split_whitespace().next().unwrap())\n .or_insert(Vec::new())\n .push(&node.id);\n }\n\n for name in FORBIDDEN_TO_HAVE_DUPLICATES {\n if name_to_id[name].len() <= 1 {\n continue\n }\n println!(\"crate `{}` is duplicated in `Cargo.lock`\", name);\n for id in name_to_id[name].iter() {\n println!(\" * {}\", id);\n }\n *bad = true;\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\n#![deny(warnings)]\n\nuse std::collections::hash_map::Entry;\nuse std::collections::{HashMap, HashSet};\nuse std::env;\nuse std::fs;\nuse std::path::{Component, Path, PathBuf};\nuse std::rc::Rc;\n\nuse crate::Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => {\n match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n }\n };\n}\n\nfn main() {\n let docs = env::args_os().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: Rc<String>,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nfn small_url_encode(s: &str) -> String {\n s.replace(\"<\", \"%3C\")\n .replace(\">\", \"%3E\")\n .replace(\" \", \"%20\")\n .replace(\"?\", \"%3F\")\n .replace(\"'\", \"%27\")\n .replace(\"&\", \"%26\")\n .replace(\",\", \"%2C\")\n .replace(\":\", \"%3A\")\n .replace(\";\", \"%3B\")\n .replace(\"[\", \"%5B\")\n .replace(\"]\", \"%5D\")\n .replace(\"\\\"\", \"%22\")\n}\n\nimpl FileEntry {\n fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i, _| {\n let frag = fragment.trim_start_matches(\"#\").to_owned();\n let encoded = small_url_encode(&frag);\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\", file.display(), i, fragment);\n }\n \/\/ Just in case, we also add the encoded id.\n self.ids.insert(encoded);\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n if kind.is_dir() {\n walk(cache, root, &path, errors);\n } else {\n let pretty_path = check(cache, root, &path, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to reduce memory-usage\n entry.source = Rc::new(String::new());\n }\n }\n }\n}\n\nfn check(cache: &mut Cache, root: &Path, file: &Path, errors: &mut bool) -> Option<PathBuf> {\n \/\/ Ignore none HTML files.\n if file.extension().and_then(|s| s.to_str()) != Some(\"html\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/string\/struct.String.html\")\n || file.ends_with(\"interpret\/struct.ImmTy.html\")\n || file.ends_with(\"ast\/struct.ThinVec.html\")\n || file.ends_with(\"util\/struct.ThinVec.html\")\n || file.ends_with(\"layout\/struct.TyLayout.html\")\n || file.ends_with(\"humantime\/struct.Timestamp.html\")\n || file.ends_with(\"log\/index.html\")\n || file.ends_with(\"ty\/struct.Slice.html\")\n || file.ends_with(\"ty\/enum.Attributes.html\")\n || file.ends_with(\"ty\/struct.SymbolName.html\")\n || file.ends_with(\"io\/struct.IoSlice.html\")\n || file.ends_with(\"io\/struct.IoSliceMut.html\")\n {\n return None;\n }\n \/\/ FIXME(#32553)\n if file.ends_with(\"string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"btree_set\/struct.BTreeSet.html\")\n || file.ends_with(\"struct.BTreeSet.html\")\n || file.ends_with(\"btree_map\/struct.BTreeMap.html\")\n || file.ends_with(\"hash_map\/struct.HashMap.html\")\n || file.ends_with(\"hash_set\/struct.HashSet.html\")\n || file.ends_with(\"sync\/struct.Lrc.html\")\n || file.ends_with(\"sync\/struct.RwLock.html\")\n {\n return None;\n }\n\n let res = load_file(cache, root, file, SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file).unwrap().parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i, base| {\n \/\/ Ignore external URLs\n if url.starts_with(\"http:\")\n || url.starts_with(\"https:\")\n || url.starts_with(\"javascript:\")\n || url.starts_with(\"ftp:\")\n || url.starts_with(\"irc:\")\n || url.starts_with(\"data:\")\n {\n return;\n }\n let mut parts = url.splitn(2, \"#\");\n let url = parts.next().unwrap();\n let fragment = parts.next();\n let mut parts = url.splitn(2, \"?\");\n let url = parts.next().unwrap();\n\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path.\n let mut path = file.to_path_buf();\n if !base.is_empty() || !url.is_empty() {\n path.pop();\n for part in Path::new(base).join(url).components() {\n match part {\n Component::Prefix(_) | Component::RootDir => {\n \/\/ Avoid absolute paths as they make the docs not\n \/\/ relocatable by making assumptions on where the docs\n \/\/ are hosted relative to the site root.\n *errors = true;\n println!(\n \"{}:{}: absolute path - {}\",\n pretty_file.display(),\n i + 1,\n Path::new(base).join(url).display()\n );\n return;\n }\n Component::CurDir => {}\n Component::ParentDir => {\n path.pop();\n }\n Component::Normal(s) => {\n path.push(s);\n }\n }\n }\n }\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n \/\/ Links to directories show as directory listings when viewing\n \/\/ the docs offline so it's best to avoid them.\n *errors = true;\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\n \"{}:{}: directory link - {}\",\n pretty_file.display(),\n i + 1,\n pretty_path.display()\n );\n return;\n }\n if let Some(extension) = path.extension() {\n \/\/ Ignore none HTML files.\n if extension != \"html\" {\n return;\n }\n }\n let res = load_file(cache, root, &path, FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => {\n panic!(\"error loading {}: {}\", path.display(), err);\n }\n Err(LoadError::BrokenRedirect(target, _)) => {\n *errors = true;\n println!(\n \"{}:{}: broken redirect to {}\",\n pretty_file.display(),\n i + 1,\n target.display()\n );\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n if let Some(ref fragment) = fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-').all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n \/\/ These appear to be broken in mdbook right now?\n if fragment.starts_with(\"-\") {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(*fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \", pretty_file.display(), i + 1);\n println!(\"`#{}` pointing to `{}`\", fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(\n cache: &mut Cache,\n root: &Path,\n file: &Path,\n redirect: Redirect,\n) -> Result<(PathBuf, Rc<String>), LoadError> {\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let (maybe_redirect, contents) = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => (None, entry.get().source.clone()),\n Entry::Vacant(entry) => {\n let contents = match fs::read_to_string(file) {\n Ok(s) => Rc::new(s),\n Err(err) => {\n return Err(if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.to_path_buf(), err)\n } else {\n LoadError::IOError(err)\n });\n }\n };\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry { source: contents.clone(), ids: HashSet::new() });\n }\n (maybe, contents)\n }\n };\n match maybe_redirect.map(|url| file.parent().unwrap().join(url)) {\n Some(redirect_file) => load_file(cache, root, &redirect_file, FromRedirect(true)),\n None => Ok((pretty_file, contents)),\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = lines.nth(6)?;\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize, &str)>(contents: &str, attr: &str, mut f: F) {\n let mut base = \"\";\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len()..];\n \/\/ The base tag should always be the first link in the document so\n \/\/ we can get away with using one pass.\n let is_base = line[..j].ends_with(\"<base\");\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_start_matches(\" \") != \"\" {\n continue;\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_start_matches(\" \") != \"\" {\n continue;\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n if is_base {\n base = url;\n continue;\n }\n f(url, i, base)\n }\n }\n}\n<commit_msg>Cleanup linkchecker whitelist<commit_after>\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\n#![deny(warnings)]\n\nuse std::collections::hash_map::Entry;\nuse std::collections::{HashMap, HashSet};\nuse std::env;\nuse std::fs;\nuse std::path::{Component, Path, PathBuf};\nuse std::rc::Rc;\n\nuse crate::Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => {\n match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n }\n };\n}\n\nfn main() {\n let docs = env::args_os().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: Rc<String>,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nfn small_url_encode(s: &str) -> String {\n s.replace(\"<\", \"%3C\")\n .replace(\">\", \"%3E\")\n .replace(\" \", \"%20\")\n .replace(\"?\", \"%3F\")\n .replace(\"'\", \"%27\")\n .replace(\"&\", \"%26\")\n .replace(\",\", \"%2C\")\n .replace(\":\", \"%3A\")\n .replace(\";\", \"%3B\")\n .replace(\"[\", \"%5B\")\n .replace(\"]\", \"%5D\")\n .replace(\"\\\"\", \"%22\")\n}\n\nimpl FileEntry {\n fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i, _| {\n let frag = fragment.trim_start_matches(\"#\").to_owned();\n let encoded = small_url_encode(&frag);\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\", file.display(), i, fragment);\n }\n \/\/ Just in case, we also add the encoded id.\n self.ids.insert(encoded);\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n if kind.is_dir() {\n walk(cache, root, &path, errors);\n } else {\n let pretty_path = check(cache, root, &path, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to reduce memory-usage\n entry.source = Rc::new(String::new());\n }\n }\n }\n}\n\nfn check(cache: &mut Cache, root: &Path, file: &Path, errors: &mut bool) -> Option<PathBuf> {\n \/\/ Ignore none HTML files.\n if file.extension().and_then(|s| s.to_str()) != Some(\"html\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/io\/struct.IoSlice.html\")\n || file.ends_with(\"std\/string\/struct.String.html\")\n {\n return None;\n }\n \/\/ FIXME(#32553)\n if file.ends_with(\"alloc\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"alloc\/collections\/btree_map\/struct.BTreeMap.html\")\n || file.ends_with(\"alloc\/collections\/btree_set\/struct.BTreeSet.html\")\n || file.ends_with(\"std\/collections\/btree_map\/struct.BTreeMap.html\")\n || file.ends_with(\"std\/collections\/btree_set\/struct.BTreeSet.html\")\n || file.ends_with(\"std\/collections\/hash_map\/struct.HashMap.html\")\n || file.ends_with(\"std\/collections\/hash_set\/struct.HashSet.html\")\n {\n return None;\n }\n\n let res = load_file(cache, root, file, SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file).unwrap().parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i, base| {\n \/\/ Ignore external URLs\n if url.starts_with(\"http:\")\n || url.starts_with(\"https:\")\n || url.starts_with(\"javascript:\")\n || url.starts_with(\"ftp:\")\n || url.starts_with(\"irc:\")\n || url.starts_with(\"data:\")\n {\n return;\n }\n let mut parts = url.splitn(2, \"#\");\n let url = parts.next().unwrap();\n let fragment = parts.next();\n let mut parts = url.splitn(2, \"?\");\n let url = parts.next().unwrap();\n\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path.\n let mut path = file.to_path_buf();\n if !base.is_empty() || !url.is_empty() {\n path.pop();\n for part in Path::new(base).join(url).components() {\n match part {\n Component::Prefix(_) | Component::RootDir => {\n \/\/ Avoid absolute paths as they make the docs not\n \/\/ relocatable by making assumptions on where the docs\n \/\/ are hosted relative to the site root.\n *errors = true;\n println!(\n \"{}:{}: absolute path - {}\",\n pretty_file.display(),\n i + 1,\n Path::new(base).join(url).display()\n );\n return;\n }\n Component::CurDir => {}\n Component::ParentDir => {\n path.pop();\n }\n Component::Normal(s) => {\n path.push(s);\n }\n }\n }\n }\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n \/\/ Links to directories show as directory listings when viewing\n \/\/ the docs offline so it's best to avoid them.\n *errors = true;\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\n \"{}:{}: directory link - {}\",\n pretty_file.display(),\n i + 1,\n pretty_path.display()\n );\n return;\n }\n if let Some(extension) = path.extension() {\n \/\/ Ignore none HTML files.\n if extension != \"html\" {\n return;\n }\n }\n let res = load_file(cache, root, &path, FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => {\n panic!(\"error loading {}: {}\", path.display(), err);\n }\n Err(LoadError::BrokenRedirect(target, _)) => {\n *errors = true;\n println!(\n \"{}:{}: broken redirect to {}\",\n pretty_file.display(),\n i + 1,\n target.display()\n );\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n if let Some(ref fragment) = fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-').all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n \/\/ These appear to be broken in mdbook right now?\n if fragment.starts_with(\"-\") {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(*fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \", pretty_file.display(), i + 1);\n println!(\"`#{}` pointing to `{}`\", fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(\n cache: &mut Cache,\n root: &Path,\n file: &Path,\n redirect: Redirect,\n) -> Result<(PathBuf, Rc<String>), LoadError> {\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let (maybe_redirect, contents) = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => (None, entry.get().source.clone()),\n Entry::Vacant(entry) => {\n let contents = match fs::read_to_string(file) {\n Ok(s) => Rc::new(s),\n Err(err) => {\n return Err(if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.to_path_buf(), err)\n } else {\n LoadError::IOError(err)\n });\n }\n };\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry { source: contents.clone(), ids: HashSet::new() });\n }\n (maybe, contents)\n }\n };\n match maybe_redirect.map(|url| file.parent().unwrap().join(url)) {\n Some(redirect_file) => load_file(cache, root, &redirect_file, FromRedirect(true)),\n None => Ok((pretty_file, contents)),\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = lines.nth(6)?;\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize, &str)>(contents: &str, attr: &str, mut f: F) {\n let mut base = \"\";\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len()..];\n \/\/ The base tag should always be the first link in the document so\n \/\/ we can get away with using one pass.\n let is_base = line[..j].ends_with(\"<base\");\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_start_matches(\" \") != \"\" {\n continue;\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_start_matches(\" \") != \"\" {\n continue;\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n if is_base {\n base = url;\n continue;\n }\n f(url, i, base)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add first hack<commit_after>use std::io::buffered::BufferedStream;\nuse std::io::net::ip::SocketAddr;\nuse std::io::net::tcp::TcpStream;\n\nstatic PROTO_VERSION: i32 = 196608; \/\/ 3 << 16\n\n#[deriving(ToStr)]\nenum AuthType {\n AuthUnknown,\n AuthOk,\n AuthKerberosV4,\n AuthKerberosV5,\n AuthClearTextPassword,\n AuthCryptPassword(u16),\n AuthMD5Password(u32),\n AuthSCMCredential \n}\n\nfn write_cstring(io: &mut BufferedStream<TcpStream>, str: &str) {\n io.write_str(str);\n io.write_u8(0);\n}\n\nfn write_startup_message(io: &mut BufferedStream<TcpStream>, params: &[(&str, &str)]) {\n let mut sz: uint = 8 + 1;\n\n for &(k, v) in params.iter() {\n sz += k.len() + 1 + v.len() + 1;\n } \n\n io.write_be_i32(sz as i32);\n io.write_be_i32(PROTO_VERSION);\n\n for &(k, v) in params.iter() {\n write_cstring(io, k);\n write_cstring(io, v);\n }\n io.write_u8(0);\n io.flush();\n}\n\n\/\/ body_size excludes the typ byte and the size i32\nfn write_message_header(io: &mut BufferedStream<TcpStream>, typ: u8, payload: uint) {\n io.write_u8(typ);\n io.write_be_i32((payload + 4) as i32);\n}\n\nfn write_query_message(io: &mut BufferedStream<TcpStream>, query: &str) {\n write_message_header(io, 'Q' as u8, query.len() + 1); \n write_cstring(io, query);\n io.flush();\n}\n\nfn parse_auth_message(io: &mut BufferedStream<TcpStream>, rem_len: i32) -> AuthType {\n assert!(rem_len >= 4);\n let authtype = io.read_be_i32();\n match authtype {\n 0 => AuthOk, \n 1 => AuthKerberosV4,\n 2 => AuthKerberosV5,\n 3 => AuthClearTextPassword,\n 4 => {\n assert!(rem_len >= 4 + 2);\n let salt = io.read_be_u16();\n AuthCryptPassword(salt)\n }\n 5 => {\n assert!(rem_len >= 4 + 4);\n let salt = io.read_be_u32();\n AuthMD5Password(salt)\n }\n 6 => AuthSCMCredential,\n _ => AuthUnknown\n }\n}\n\nfn parse_message_header(io: &mut BufferedStream<TcpStream>) -> (u8, i32) {\n let typ = io.read_u8();\n let len = io.read_be_i32();\n return (typ, len);\n}\n\nfn main() {\n let addr = from_str::<SocketAddr>(\"127.0.0.1:5432\").unwrap();\n let tcp_stream = TcpStream::connect(addr).unwrap();\n let mut io = BufferedStream::new(tcp_stream);\n write_startup_message(&mut io, [(\"user\", \"mneumann\"), (\"database\", \"test\")]);\n\n let (typ, len) = parse_message_header(&mut io);\n println!(\"Typ: {:u} \/ len: {:d}\", typ, len);\n\n if typ == 82 \/* 'R' *\/ {\n let authmsg = parse_auth_message(&mut io, len-4);\n println!(\"{:?}\", authmsg);\n }\n else {\n fail!();\n }\n\n write_query_message(&mut io, \"select * from articles;\");\n \n let (typ, len) = parse_message_header(&mut io);\n println!(\"Typ: {:u} \/ len: {:d}\", typ, len);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>aggregation added<commit_after>\/\/ #![feature(scoped)]\n\/\/ #![feature(collections)]\n\nextern crate mmap;\nextern crate time;\nextern crate timely;\nextern crate columnar;\nextern crate dataflow_join;\n\nextern crate docopt;\nuse docopt::Docopt;\n\nuse std::thread;\n\nuse dataflow_join::graph::{GraphTrait, GraphMMap};\n\nuse timely::progress::timestamp::RootTimestamp;\nuse timely::progress::scope::Scope;\nuse timely::progress::nested::Summary::Local;\nuse timely::example_static::*;\nuse timely::communication::*;\nuse timely::communication::pact::Exchange;\n\nuse timely::networking::initialize_networking;\nuse timely::networking::initialize_networking_from_file;\n\nuse timely::drain::DrainExt;\n\nstatic USAGE: &'static str = \"\nUsage: pagerank <source> [options] [<arguments>...]\n\nOptions:\n -w <arg>, --workers <arg> number of workers per process [default: 1]\n -p <arg>, --processid <arg> identity of this process [default: 0]\n -n <arg>, --processes <arg> number of processes involved [default: 1]\n -h <arg>, --hosts <arg> list of host:port for workers\n\";\n\n\nfn main () {\n let args = Docopt::new(USAGE).and_then(|dopt| dopt.parse()).unwrap_or_else(|e| e.exit());\n\n \/\/ let workers = if let Ok(threads) = args.get_str(\"<workers>\").parse() { threads }\n \/\/ else { panic!(\"invalid setting for workers: {}\", args.get_str(\"<workers>\")) };\n \/\/ println!(\"starting pagerank dataflow with {:?} worker{}\", workers, if workers == 1 { \"\" } else { \"s\" });\n let source = args.get_str(\"<source>\").to_owned();\n\n let workers: u64 = if let Ok(threads) = args.get_str(\"-w\").parse() { threads }\n else { panic!(\"invalid setting for --workers: {}\", args.get_str(\"-t\")) };\n let process_id: u64 = if let Ok(proc_id) = args.get_str(\"-p\").parse() { proc_id }\n else { panic!(\"invalid setting for --processid: {}\", args.get_str(\"-p\")) };\n let processes: u64 = if let Ok(processes) = args.get_str(\"-n\").parse() { processes }\n else { panic!(\"invalid setting for --processes: {}\", args.get_str(\"-n\")) };\n\n println!(\"Starting pagerank dataflow with\");\n println!(\"\\tworkers:\\t{}\", workers);\n println!(\"\\tprocesses:\\t{}\", processes);\n println!(\"\\tprocessid:\\t{}\", process_id);\n\n \/\/ vector holding communicators to use; one per local worker.\n if processes > 1 {\n println!(\"Initializing BinaryCommunicator\");\n\n let hosts = args.get_str(\"-h\");\n let communicators = if hosts != \"\" {\n initialize_networking_from_file(hosts, process_id, workers).ok().expect(\"error initializing networking\")\n }\n else {\n let addresses = (0..processes).map(|index| format!(\"localhost:{}\", 2101 + index).to_string()).collect();\n initialize_networking(addresses, process_id, workers).ok().expect(\"error initializing networking\")\n };\n\n pagerank_multi(communicators, source);\n }\n else if workers > 1 {\n println!(\"Initializing ProcessCommunicator\");\n pagerank_multi(ProcessCommunicator::new_vector(workers), source);\n }\n else {\n println!(\"Initializing ThreadCommunicator\");\n pagerank_multi(vec![ThreadCommunicator], source);\n };\n}\n\nfn pagerank_multi<C>(communicators: Vec<C>, filename: String)\nwhere C: Communicator+Send {\n let mut guards = Vec::new();\n let workers = communicators.len();\n for communicator in communicators.into_iter() {\n let filename = filename.clone();\n guards.push(thread::Builder::new().name(format!(\"timely worker {}\", communicator.index()))\n .spawn(move || pagerank(communicator, filename, workers))\n .unwrap());\n }\n\n for guard in guards { guard.join().unwrap(); }\n}\n\nfn pagerank<C>(communicator: C, filename: String, workers: usize)\nwhere C: Communicator {\n let index = communicator.index() as usize;\n let peers = communicator.peers() as usize;\n\n let mut root = GraphRoot::new(communicator);\n let mut start = time::precise_time_s();\n let mut going = start;\n\n { \/\/ new scope avoids long borrow on root\n let mut builder = root.new_subgraph();\n\n \/\/ establish the beginnings of a loop,\n \/\/ 20 iterations, each time around += 1.\n let (helper, stream) = builder.loop_variable::<(u32, f32)>(RootTimestamp::new(20), Local(1));\n\n let graph = GraphMMap::<u32>::new(&filename);\n\n let nodes = graph.nodes();\n\n let mut src = vec![1.0; 1 + (nodes \/ peers as usize)]; \/\/ local rank accumulation\n let mut dst = vec![0.0; nodes]; \/\/ local rank accumulation\n\n\n \/\/ from feedback, place an operator that\n \/\/ aggregates and broadcasts ranks along edges.\n let ranks = stream.enable(builder).unary_notify(\n\n Exchange::new(|x: &(u32, f32)| x.0 as u64), \/\/ 1. how data should be exchanged\n format!(\"PageRank\"), \/\/ 2. a tasteful, descriptive name\n vec![RootTimestamp::new(0)], \/\/ 3. indicate an initial capability\n move |input, output, iterator| { \/\/ 4. provide the operator logic\n\n while let Some((iter, _)) = iterator.next() {\n\n if iter.inner == 10 {\n going = time::precise_time_s();\n }\n\n if iter.inner == 20 {\n if index == 0 {\n println!(\"average over 10 iters: {}\", (time::precise_time_s() - going) \/ 10.0);\n }\n }\n\n \/\/ \/---- should look familiar! ----\\\n for node in 0..src.len() {\n src[node] = 0.15 + 0.85 * src[node];\n }\n\n for node in 0..src.len() {\n let edges = graph.edges(index + peers * node);\n let value = src[node] \/ edges.len() as f32;\n for &b in edges {\n dst[b as usize] += value;\n }\n }\n \/\/ \\------ end familiar part ------\/\n\n output.give_at(&iter, dst.drain_temp()\n .enumerate()\n .filter(|&(_,f)| f != 0.0)\n .map(|(u,f)| (u as u32, f)));\n\n for _ in 0..(graph.nodes() + 1) { dst.push(0.0); }\n\n }\n\n while let Some((iter, data)) = input.pull() {\n iterator.notify_at(&iter);\n for (node, rank) in data.drain_temp() {\n src[node as usize \/ peers] += rank;\n }\n }\n }\n );\n\n let local_index = index as usize % workers;\n\n let mut acc = vec![0.0; 1 + (nodes \/ workers)];\n\n ranks\n .unary_notify(\n Exchange::new(move |x: &(u32, f32)| (workers * (index \/ workers)) as u64 + (x.0 as u64 % workers as u64)),\n format!(\"Aggregation\"),\n vec![],\n move |input, output, iterator| {\n while let Some((iter, data)) = input.pull() {\n iterator.notify_at(&iter);\n for (node, rank) in data.drain_temp() {\n acc[node as usize \/ workers] += rank;\n }\n }\n\n while let Some((item, _)) = iterator.next() {\n\n output.give_at(&item, acc.drain_temp().enumerate().filter(|x| x.1 != 0.0)\n .map(|(u,f)| (((u * workers + local_index) as u32), f)));\n\n for _ in 0..(1 + (nodes\/workers)) { acc.push(0.0); }\n assert!(acc.len() == (1 + (nodes\/workers)));\n }\n }\n )\n .connect_loop(helper);\n }\n\n while root.step() { }\n\n if index == 0 {\n println!(\"elapsed: {}\", time::precise_time_s() - start);\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add iron-intercept example<commit_after>extern crate iron;\nextern crate multipart;\n\nuse iron::prelude::*;\n\nuse multipart::server::Entries;\nuse multipart::server::iron::Intercept;\n\nfn main() {\n \/\/ We start with a basic request handler chain.\n let mut chain = Chain::new(|req: &mut Request|\n if let Some(entries) = req.extensions.get::<Entries>() {\n Ok(Response::with(format!(\"{:?}\", entries)))\n } else {\n Ok(Response::with(\"Not a multipart request\"))\n }\n );\n\n \/\/ `Intercept` will read out the entries and place them as an extension in the request.\n \/\/ It has various builder-style methods for changing how it will behave, but has sane settings\n \/\/ by default.\n chain.link_before(Intercept::default());\n\n Iron::new(chain).http(\"localhost:80\").unwrap();\n}<|endoftext|>"} {"text":"<commit_before>use std::ops::DerefMut;\n\nuse runtime::Runtime;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\n\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Entry;\n\npub type EditResult<T> = Result<T, RuntimeError>;\n\npub trait Edit {\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()>;\n}\n\nimpl Edit for Entry {\n\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {\n edit_in_tmpfile(rt, self.get_content_mut())\n .map(|_| ())\n }\n\n}\n\nimpl<'a> Edit for FileLockEntry<'a> {\n\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {\n self.deref_mut().edit_content(rt)\n }\n\n}\n\npub fn edit_in_tmpfile(rt: &Runtime, s: &mut String) -> EditResult<()> {\n use tempfile::NamedTempFile;\n use std::io::Seek;\n use std::io::Read;\n use std::io::SeekFrom;\n use std::io::Write;\n\n let file = try!(NamedTempFile::new());\n let file_path = file.path();\n let mut file = try!(file.reopen());\n\n try!(file.write_all(&s.clone().into_bytes()[..]));\n try!(file.sync_data());\n\n if let Some(mut editor) = rt.editor() {\n let exit_status = editor.arg(file_path).status();\n\n match exit_status.map(|s| s.success()) {\n Ok(true) => {\n file.sync_data()\n .and_then(|_| file.seek(SeekFrom::Start(0)))\n .and_then(|_| file.read_to_string(s))\n .map(|_| ())\n .map_err(|e| RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(e))))\n },\n Ok(false) => Err(RuntimeError::new(RuntimeErrorKind::ProcessExitFailure, None)),\n Err(e) => Err(RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(e)))),\n }\n } else {\n Err(RuntimeError::new(RuntimeErrorKind::Instantiate, None))\n }\n}\n<commit_msg>Implement Edit for String<commit_after>use std::ops::DerefMut;\n\nuse runtime::Runtime;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\n\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Entry;\n\npub type EditResult<T> = Result<T, RuntimeError>;\n\npub trait Edit {\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()>;\n}\n\nimpl Edit for String {\n\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {\n edit_in_tmpfile(rt, self).map(|_| ())\n }\n\n}\n\nimpl Edit for Entry {\n\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {\n edit_in_tmpfile(rt, self.get_content_mut())\n .map(|_| ())\n }\n\n}\n\nimpl<'a> Edit for FileLockEntry<'a> {\n\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {\n self.deref_mut().edit_content(rt)\n }\n\n}\n\npub fn edit_in_tmpfile(rt: &Runtime, s: &mut String) -> EditResult<()> {\n use tempfile::NamedTempFile;\n use std::io::Seek;\n use std::io::Read;\n use std::io::SeekFrom;\n use std::io::Write;\n\n let file = try!(NamedTempFile::new());\n let file_path = file.path();\n let mut file = try!(file.reopen());\n\n try!(file.write_all(&s.clone().into_bytes()[..]));\n try!(file.sync_data());\n\n if let Some(mut editor) = rt.editor() {\n let exit_status = editor.arg(file_path).status();\n\n match exit_status.map(|s| s.success()) {\n Ok(true) => {\n file.sync_data()\n .and_then(|_| file.seek(SeekFrom::Start(0)))\n .and_then(|_| file.read_to_string(s))\n .map(|_| ())\n .map_err(|e| RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(e))))\n },\n Ok(false) => Err(RuntimeError::new(RuntimeErrorKind::ProcessExitFailure, None)),\n Err(e) => Err(RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(e)))),\n }\n } else {\n Err(RuntimeError::new(RuntimeErrorKind::Instantiate, None))\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(deque_extras)]\n#![feature(box_syntax)]\n#![feature(plugin)]\n#![plugin(peg_syntax_ext)]\n\nextern crate glob;\n\nuse std::collections::HashMap;\nuse std::fs::File;\nuse std::io::{stdout, Read, Write};\nuse std::env;\nuse std::process;\nuse std::thread;\n\nuse self::directory_stack::DirectoryStack;\nuse self::input_editor::readln;\nuse self::peg::{parse, Job};\nuse self::variables::Variables;\nuse self::history::History;\nuse self::flow_control::{FlowControl, is_flow_control_command, Statement};\nuse self::status::{SUCCESS, NO_SUCH_COMMAND, TERMINATED};\nuse self::function::Function;\n\npub mod directory_stack;\npub mod to_num;\npub mod input_editor;\npub mod peg;\npub mod variables;\npub mod history;\npub mod flow_control;\npub mod status;\npub mod function;\n\n\n\/\/\/ This struct will contain all of the data structures related to this\n\/\/\/ instance of the shell.\npub struct Shell {\n variables: Variables,\n flow_control: FlowControl,\n directory_stack: DirectoryStack,\n history: History,\n functions: HashMap<String, Function>\n}\n\nimpl Shell {\n \/\/\/ Panics if DirectoryStack construction fails\n pub fn new() -> Self {\n Shell {\n variables: Variables::new(),\n flow_control: FlowControl::new(),\n directory_stack: DirectoryStack::new().expect(\"\"),\n history: History::new(),\n functions: HashMap::new()\n }\n }\n\n pub fn print_prompt(&self) {\n self.print_prompt_prefix();\n match self.flow_control.current_statement {\n Statement::For(_, _) => self.print_for_prompt(),\n Statement::Function => self.print_function_prompt(),\n _ => self.print_default_prompt(),\n }\n if let Err(message) = stdout().flush() {\n println!(\"{}: failed to flush prompt to stdout\", message);\n }\n\n }\n\n \/\/ TODO eventually this thing should be gone\n fn print_prompt_prefix(&self) {\n let prompt_prefix = self.flow_control.modes.iter().rev().fold(String::new(), |acc, mode| {\n acc +\n if mode.value {\n \"+ \"\n } else {\n \"- \"\n }\n });\n print!(\"{}\", prompt_prefix);\n }\n\n fn print_for_prompt(&self) {\n print!(\"for> \");\n }\n\n fn print_function_prompt(&self) {\n print!(\"fn> \");\n }\n\n fn print_default_prompt(&self) {\n let cwd = env::current_dir().ok().map_or(\"?\".to_string(),\n |ref p| p.to_str().unwrap_or(\"?\").to_string());\n print!(\"ion:{}# \", cwd);\n }\n\n fn on_command(&mut self, command_string: &str, commands: &HashMap<&str, Command>) {\n self.history.add(command_string.to_string());\n\n let mut jobs = parse(command_string);\n\n \/\/ Execute commands\n for job in jobs.drain(..) {\n if self.flow_control.collecting_block {\n \/\/ TODO move this logic into \"end\" command\n if job.command == \"end\" {\n self.flow_control.collecting_block = false;\n let block_jobs: Vec<Job> = self.flow_control\n .current_block\n .jobs\n .drain(..)\n .collect();\n match self.flow_control.current_statement.clone() {\n Statement::For(ref var, ref vals) => {\n let variable = var.clone();\n let values = vals.clone();\n for value in values {\n self.variables.set_var(&variable, &value);\n for job in block_jobs.iter() {\n self.run_job(job, commands);\n }\n }\n },\n Statement::Function(ref name) => {\n self.functions.insert(name.clone(), Function { name: name.clone(), jobs: block_jobs.clone() });\n },\n _ => {}\n }\n self.flow_control.current_statement = Statement::Default;\n } else {\n self.flow_control.current_block.jobs.push(job);\n }\n } else {\n if self.flow_control.skipping() && !is_flow_control_command(&job.command) {\n continue;\n }\n self.run_job(&job, commands);\n }\n }\n }\n\n fn run_job(&mut self, job: &Job, commands: &HashMap<&str, Command>) -> Option<i32> {\n let mut job = self.variables.expand_job(job);\n job.expand_globs();\n let exit_status = if let Some(command) = commands.get(job.command.as_str()) {\n Some((*command.main)(job.args.as_slice(), self))\n } else if self.functions.get(job.command.as_str()).is_some() { \/\/ Not really idiomatic but I don't know how to clone the value without borrowing self\n let function = self.functions.get(job.command.as_str()).unwrap().clone();\n let mut return_value = None;\n for function_job in function.jobs.iter() {\n return_value = self.run_job(&function_job, commands)\n }\n return_value\n } else {\n self.run_external_commmand(job)\n };\n if let Some(code) = exit_status {\n self.variables.set_var(\"?\", &code.to_string());\n self.history.previous_status = code;\n }\n exit_status\n }\n\n \/\/\/ Returns an exit code if a command was run\n fn run_external_commmand(&mut self, job: Job) -> Option<i32> {\n if job.background {\n thread::spawn(move || {\n let mut command = Shell::build_command(&job);\n command.stdin(process::Stdio::null());\n if let Ok(mut child) = command.spawn() {\n Shell::wait_and_get_status(&mut child, &job.command);\n }\n });\n None\n } else {\n if let Ok(mut child) = Shell::build_command(&job).spawn() {\n Some(Shell::wait_and_get_status(&mut child, &job.command))\n } else {\n println!(\"ion: command not found: {}\", job.command);\n Some(NO_SUCH_COMMAND)\n }\n }\n }\n\n fn build_command(job: &Job) -> process::Command {\n let mut command = process::Command::new(&job.command);\n for i in 1..job.args.len() {\n if let Some(arg) = job.args.get(i) {\n command.arg(arg);\n }\n }\n command\n }\n\n \/\/ TODO don't pass in command and do printing outside this function\n fn wait_and_get_status(child: &mut process::Child, command: &str) -> i32 {\n match child.wait() {\n Ok(status) => {\n if let Some(code) = status.code() {\n code\n } else {\n println!(\"{}: child ended by signal\", command);\n TERMINATED\n }\n }\n Err(err) => {\n println!(\"{}: Failed to wait: {}\", command, err);\n 100 \/\/ TODO what should we return here?\n }\n }\n }\n\n \/\/\/ Evaluates the source init file in the user's home directory. If the file does not exist,\n \/\/\/ the file will be created.\n fn evaluate_init_file(&mut self, commands: &HashMap<&'static str, Command>) {\n let mut source_file = std::env::home_dir().unwrap(); \/\/ Obtain home directory\n source_file.push(\".ionrc\"); \/\/ Location of ion init file\n\n if let Ok(mut file) = File::open(source_file.clone()) {\n let mut command_list = String::new();\n if let Err(message) = file.read_to_string(&mut command_list) {\n println!(\"{}: Failed to read {:?}\", message, source_file.clone());\n } else {\n self.on_command(&command_list, commands);\n }\n } else {\n if let Err(message) = File::create(source_file) {\n println!(\"{}\", message);\n }\n }\n }\n\n \/\/\/ Evaluates the given file and returns 'SUCCESS' if it succeeds.\n fn source_command(&mut self, arguments: &[String]) -> i32 {\n let commands = Command::map();\n match arguments.iter().skip(1).next() {\n Some(argument) => {\n if let Ok(mut file) = File::open(&argument) {\n let mut command_list = String::new();\n if let Err(message) = file.read_to_string(&mut command_list) {\n println!(\"{}: Failed to read {}\", message, argument);\n return status::FAILURE;\n } else {\n self.on_command(&command_list, &commands);\n return status::SUCCESS;\n }\n } else {\n println!(\"Failed to open {}\", argument);\n return status::FAILURE;\n }\n },\n None => {\n self.evaluate_init_file(&commands);\n return status::SUCCESS;\n },\n }\n }\n}\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the\n\/\/\/ functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ help: \"Describe what my_command does followed by a newline showing usage\",\n\/\/\/ main: box|args: &[String], &mut Shell| -> i32 {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command {\n pub name: &'static str,\n pub help: &'static str,\n pub main: Box<Fn(&[String], &mut Shell) -> i32>,\n}\n\nimpl Command {\n \/\/\/ Return the map from command names to commands\n pub fn map() -> HashMap<&'static str, Self> {\n let mut commands: HashMap<&str, Self> = HashMap::new();\n\n commands.insert(\"cd\",\n Command {\n name: \"cd\",\n help: \"Change the current directory\\n cd <path>\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.cd(args)\n },\n });\n\n commands.insert(\"dirs\",\n Command {\n name: \"dirs\",\n help: \"Display the current directory stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.dirs(args)\n },\n });\n\n commands.insert(\"exit\",\n Command {\n name: \"exit\",\n help: \"To exit the curent session\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n if let Some(status) = args.get(1) {\n if let Ok(status) = status.parse::<i32>() {\n process::exit(status);\n }\n }\n process::exit(shell.history.previous_status);\n },\n });\n\n commands.insert(\"let\",\n Command {\n name: \"let\",\n help: \"View, set or unset variables\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.variables.let_(args)\n },\n });\n\n commands.insert(\"read\",\n Command {\n name: \"read\",\n help: \"Read some variables\\n read <variable>\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.variables.read(args)\n },\n });\n\n commands.insert(\"pushd\",\n Command {\n name: \"pushd\",\n help: \"Push a directory to the stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.pushd(args)\n },\n });\n\n commands.insert(\"popd\",\n Command {\n name: \"popd\",\n help: \"Pop a directory from the stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.popd(args)\n },\n });\n\n commands.insert(\"history\",\n Command {\n name: \"history\",\n help: \"Display a log of all commands previously executed\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.history.history(args)\n },\n });\n\n commands.insert(\"if\",\n Command {\n name: \"if\",\n help: \"Conditionally execute code\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.if_(args)\n },\n });\n\n commands.insert(\"else\",\n Command {\n name: \"else\",\n help: \"Execute code if a previous condition was false\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.else_(args)\n },\n });\n\n commands.insert(\"end\",\n Command {\n name: \"end\",\n help: \"End a code block\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.end(args)\n },\n });\n\n commands.insert(\"for\",\n Command {\n name: \"for\",\n help: \"Iterate through a list\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.for_(args)\n },\n });\n\n commands.insert(\"source\",\n Command {\n name: \"source\",\n help: \"Evaluate the file following the command or re-initialize the init file\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.source_command(args)\n\n },\n });\n\n commands.insert(\"fn\",\n Command {\n name: \"fn\",\n help: \"Create a function\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.fn_(args)\n },\n });\n\n let command_helper: HashMap<&'static str, &'static str> = commands.iter()\n .map(|(k, v)| {\n (*k, v.help)\n })\n .collect();\n\n commands.insert(\"help\",\n Command {\n name: \"help\",\n help: \"Display helpful information about a given command, or list \\\n commands if none specified\\n help <command>\",\n main: box move |args: &[String], _: &mut Shell| -> i32 {\n if let Some(command) = args.get(1) {\n if command_helper.contains_key(command.as_str()) {\n match command_helper.get(command.as_str()) {\n Some(help) => println!(\"{}\", help),\n None => {\n println!(\"Command helper not found [run 'help']...\")\n }\n }\n } else {\n println!(\"Command helper not found [run 'help']...\");\n }\n } else {\n for (command, _help) in command_helper.iter() {\n println!(\"{}\", command);\n }\n }\n SUCCESS\n },\n });\n\n commands\n }\n}\n\nfn main() {\n let commands = Command::map();\n let mut shell = Shell::new();\n shell.evaluate_init_file(&commands);\n\n for arg in env::args().skip(1) {\n let mut command_list = String::new();\n if let Ok(mut file) = File::open(&arg) {\n if let Err(message) = file.read_to_string(&mut command_list) {\n println!(\"{}: Failed to read {}\", message, arg);\n }\n }\n shell.on_command(&command_list, &commands);\n \n \/\/ Exit with the previous command's exit status.\n process::exit(shell.history.previous_status);\n }\n\n shell.print_prompt();\n while let Some(command) = readln() {\n let command = command.trim();\n if !command.is_empty() {\n shell.on_command(command, &commands);\n }\n shell.print_prompt()\n }\n\n \/\/ Exit with the previous command's exit status.\n process::exit(shell.history.previous_status);\n}\n<commit_msg>Add dash c<commit_after>#![feature(deque_extras)]\n#![feature(box_syntax)]\n#![feature(plugin)]\n#![plugin(peg_syntax_ext)]\n\nextern crate glob;\n\nuse std::collections::HashMap;\nuse std::fs::File;\nuse std::io::{stdout, Read, Write};\nuse std::env;\nuse std::process;\nuse std::thread;\n\nuse self::directory_stack::DirectoryStack;\nuse self::input_editor::readln;\nuse self::peg::{parse, Job};\nuse self::variables::Variables;\nuse self::history::History;\nuse self::flow_control::{FlowControl, is_flow_control_command, Statement};\nuse self::status::{SUCCESS, NO_SUCH_COMMAND, TERMINATED};\nuse self::function::Function;\n\npub mod directory_stack;\npub mod to_num;\npub mod input_editor;\npub mod peg;\npub mod variables;\npub mod history;\npub mod flow_control;\npub mod status;\npub mod function;\n\n\n\/\/\/ This struct will contain all of the data structures related to this\n\/\/\/ instance of the shell.\npub struct Shell {\n variables: Variables,\n flow_control: FlowControl,\n directory_stack: DirectoryStack,\n history: History,\n functions: HashMap<String, Function>\n}\n\nimpl Shell {\n \/\/\/ Panics if DirectoryStack construction fails\n pub fn new() -> Self {\n Shell {\n variables: Variables::new(),\n flow_control: FlowControl::new(),\n directory_stack: DirectoryStack::new().expect(\"\"),\n history: History::new(),\n functions: HashMap::new()\n }\n }\n\n pub fn print_prompt(&self) {\n self.print_prompt_prefix();\n match self.flow_control.current_statement {\n Statement::For(_, _) => self.print_for_prompt(),\n Statement::Function => self.print_function_prompt(),\n _ => self.print_default_prompt(),\n }\n if let Err(message) = stdout().flush() {\n println!(\"{}: failed to flush prompt to stdout\", message);\n }\n\n }\n\n \/\/ TODO eventually this thing should be gone\n fn print_prompt_prefix(&self) {\n let prompt_prefix = self.flow_control.modes.iter().rev().fold(String::new(), |acc, mode| {\n acc +\n if mode.value {\n \"+ \"\n } else {\n \"- \"\n }\n });\n print!(\"{}\", prompt_prefix);\n }\n\n fn print_for_prompt(&self) {\n print!(\"for> \");\n }\n\n fn print_function_prompt(&self) {\n print!(\"fn> \");\n }\n\n fn print_default_prompt(&self) {\n let cwd = env::current_dir().ok().map_or(\"?\".to_string(),\n |ref p| p.to_str().unwrap_or(\"?\").to_string());\n print!(\"ion:{}# \", cwd);\n }\n\n fn on_command(&mut self, command_string: &str, commands: &HashMap<&str, Command>) {\n self.history.add(command_string.to_string());\n\n let mut jobs = parse(command_string);\n\n \/\/ Execute commands\n for job in jobs.drain(..) {\n if self.flow_control.collecting_block {\n \/\/ TODO move this logic into \"end\" command\n if job.command == \"end\" {\n self.flow_control.collecting_block = false;\n let block_jobs: Vec<Job> = self.flow_control\n .current_block\n .jobs\n .drain(..)\n .collect();\n match self.flow_control.current_statement.clone() {\n Statement::For(ref var, ref vals) => {\n let variable = var.clone();\n let values = vals.clone();\n for value in values {\n self.variables.set_var(&variable, &value);\n for job in block_jobs.iter() {\n self.run_job(job, commands);\n }\n }\n },\n Statement::Function(ref name) => {\n self.functions.insert(name.clone(), Function { name: name.clone(), jobs: block_jobs.clone() });\n },\n _ => {}\n }\n self.flow_control.current_statement = Statement::Default;\n } else {\n self.flow_control.current_block.jobs.push(job);\n }\n } else {\n if self.flow_control.skipping() && !is_flow_control_command(&job.command) {\n continue;\n }\n self.run_job(&job, commands);\n }\n }\n }\n\n fn run_job(&mut self, job: &Job, commands: &HashMap<&str, Command>) -> Option<i32> {\n let mut job = self.variables.expand_job(job);\n job.expand_globs();\n let exit_status = if let Some(command) = commands.get(job.command.as_str()) {\n Some((*command.main)(job.args.as_slice(), self))\n } else if self.functions.get(job.command.as_str()).is_some() { \/\/ Not really idiomatic but I don't know how to clone the value without borrowing self\n let function = self.functions.get(job.command.as_str()).unwrap().clone();\n let mut return_value = None;\n for function_job in function.jobs.iter() {\n return_value = self.run_job(&function_job, commands)\n }\n return_value\n } else {\n self.run_external_commmand(job)\n };\n if let Some(code) = exit_status {\n self.variables.set_var(\"?\", &code.to_string());\n self.history.previous_status = code;\n }\n exit_status\n }\n\n \/\/\/ Returns an exit code if a command was run\n fn run_external_commmand(&mut self, job: Job) -> Option<i32> {\n if job.background {\n thread::spawn(move || {\n let mut command = Shell::build_command(&job);\n command.stdin(process::Stdio::null());\n if let Ok(mut child) = command.spawn() {\n Shell::wait_and_get_status(&mut child, &job.command);\n }\n });\n None\n } else {\n if let Ok(mut child) = Shell::build_command(&job).spawn() {\n Some(Shell::wait_and_get_status(&mut child, &job.command))\n } else {\n println!(\"ion: command not found: {}\", job.command);\n Some(NO_SUCH_COMMAND)\n }\n }\n }\n\n fn build_command(job: &Job) -> process::Command {\n let mut command = process::Command::new(&job.command);\n for i in 1..job.args.len() {\n if let Some(arg) = job.args.get(i) {\n command.arg(arg);\n }\n }\n command\n }\n\n \/\/ TODO don't pass in command and do printing outside this function\n fn wait_and_get_status(child: &mut process::Child, command: &str) -> i32 {\n match child.wait() {\n Ok(status) => {\n if let Some(code) = status.code() {\n code\n } else {\n println!(\"{}: child ended by signal\", command);\n TERMINATED\n }\n }\n Err(err) => {\n println!(\"{}: Failed to wait: {}\", command, err);\n 100 \/\/ TODO what should we return here?\n }\n }\n }\n\n \/\/\/ Evaluates the source init file in the user's home directory. If the file does not exist,\n \/\/\/ the file will be created.\n fn evaluate_init_file(&mut self, commands: &HashMap<&'static str, Command>) {\n let mut source_file = std::env::home_dir().unwrap(); \/\/ Obtain home directory\n source_file.push(\".ionrc\"); \/\/ Location of ion init file\n\n if let Ok(mut file) = File::open(source_file.clone()) {\n let mut command_list = String::new();\n if let Err(message) = file.read_to_string(&mut command_list) {\n println!(\"{}: Failed to read {:?}\", message, source_file.clone());\n } else {\n self.on_command(&command_list, commands);\n }\n } else {\n if let Err(message) = File::create(source_file) {\n println!(\"{}\", message);\n }\n }\n }\n\n \/\/\/ Evaluates the given file and returns 'SUCCESS' if it succeeds.\n fn source_command(&mut self, arguments: &[String]) -> i32 {\n let commands = Command::map();\n match arguments.iter().skip(1).next() {\n Some(argument) => {\n if let Ok(mut file) = File::open(&argument) {\n let mut command_list = String::new();\n if let Err(message) = file.read_to_string(&mut command_list) {\n println!(\"{}: Failed to read {}\", message, argument);\n return status::FAILURE;\n } else {\n self.on_command(&command_list, &commands);\n return status::SUCCESS;\n }\n } else {\n println!(\"Failed to open {}\", argument);\n return status::FAILURE;\n }\n },\n None => {\n self.evaluate_init_file(&commands);\n return status::SUCCESS;\n },\n }\n }\n}\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the\n\/\/\/ functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ help: \"Describe what my_command does followed by a newline showing usage\",\n\/\/\/ main: box|args: &[String], &mut Shell| -> i32 {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command {\n pub name: &'static str,\n pub help: &'static str,\n pub main: Box<Fn(&[String], &mut Shell) -> i32>,\n}\n\nimpl Command {\n \/\/\/ Return the map from command names to commands\n pub fn map() -> HashMap<&'static str, Self> {\n let mut commands: HashMap<&str, Self> = HashMap::new();\n\n commands.insert(\"cd\",\n Command {\n name: \"cd\",\n help: \"Change the current directory\\n cd <path>\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.cd(args)\n },\n });\n\n commands.insert(\"dirs\",\n Command {\n name: \"dirs\",\n help: \"Display the current directory stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.dirs(args)\n },\n });\n\n commands.insert(\"exit\",\n Command {\n name: \"exit\",\n help: \"To exit the curent session\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n if let Some(status) = args.get(1) {\n if let Ok(status) = status.parse::<i32>() {\n process::exit(status);\n }\n }\n process::exit(shell.history.previous_status);\n },\n });\n\n commands.insert(\"let\",\n Command {\n name: \"let\",\n help: \"View, set or unset variables\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.variables.let_(args)\n },\n });\n\n commands.insert(\"read\",\n Command {\n name: \"read\",\n help: \"Read some variables\\n read <variable>\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.variables.read(args)\n },\n });\n\n commands.insert(\"pushd\",\n Command {\n name: \"pushd\",\n help: \"Push a directory to the stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.pushd(args)\n },\n });\n\n commands.insert(\"popd\",\n Command {\n name: \"popd\",\n help: \"Pop a directory from the stack\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.directory_stack.popd(args)\n },\n });\n\n commands.insert(\"history\",\n Command {\n name: \"history\",\n help: \"Display a log of all commands previously executed\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.history.history(args)\n },\n });\n\n commands.insert(\"if\",\n Command {\n name: \"if\",\n help: \"Conditionally execute code\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.if_(args)\n },\n });\n\n commands.insert(\"else\",\n Command {\n name: \"else\",\n help: \"Execute code if a previous condition was false\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.else_(args)\n },\n });\n\n commands.insert(\"end\",\n Command {\n name: \"end\",\n help: \"End a code block\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.end(args)\n },\n });\n\n commands.insert(\"for\",\n Command {\n name: \"for\",\n help: \"Iterate through a list\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.for_(args)\n },\n });\n\n commands.insert(\"source\",\n Command {\n name: \"source\",\n help: \"Evaluate the file following the command or re-initialize the init file\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.source_command(args)\n\n },\n });\n\n commands.insert(\"fn\",\n Command {\n name: \"fn\",\n help: \"Create a function\",\n main: box |args: &[String], shell: &mut Shell| -> i32 {\n shell.flow_control.fn_(args)\n },\n });\n\n let command_helper: HashMap<&'static str, &'static str> = commands.iter()\n .map(|(k, v)| {\n (*k, v.help)\n })\n .collect();\n\n commands.insert(\"help\",\n Command {\n name: \"help\",\n help: \"Display helpful information about a given command, or list \\\n commands if none specified\\n help <command>\",\n main: box move |args: &[String], _: &mut Shell| -> i32 {\n if let Some(command) = args.get(1) {\n if command_helper.contains_key(command.as_str()) {\n match command_helper.get(command.as_str()) {\n Some(help) => println!(\"{}\", help),\n None => {\n println!(\"Command helper not found [run 'help']...\")\n }\n }\n } else {\n println!(\"Command helper not found [run 'help']...\");\n }\n } else {\n for (command, _help) in command_helper.iter() {\n println!(\"{}\", command);\n }\n }\n SUCCESS\n },\n });\n\n commands\n }\n}\n\nfn main() {\n let commands = Command::map();\n let mut shell = Shell::new();\n shell.evaluate_init_file(&commands);\n\n let mut dash_c = false;\n for arg in env::args().skip(1) {\n if arg == \"-c\" {\n dash_c = true;\n } else {\n if dash_c {\n shell.on_command(&arg, &commands);\n } else {\n match File::open(&arg) {\n Ok(mut file) => {\n let mut command_list = String::new();\n match file.read_to_string(&mut command_list) {\n Ok(_) => shell.on_command(&command_list, &commands),\n Err(err) => println!(\"ion: failed to read {}: {}\", arg, err)\n }\n },\n Err(err) => println!(\"ion: failed to open {}: {}\", arg, err)\n }\n }\n \n \/\/ Exit with the previous command's exit status.\n process::exit(shell.history.previous_status);\n }\n }\n\n shell.print_prompt();\n while let Some(command) = readln() {\n let command = command.trim();\n if !command.is_empty() {\n shell.on_command(command, &commands);\n }\n shell.print_prompt()\n }\n\n \/\/ Exit with the previous command's exit status.\n process::exit(shell.history.previous_status);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added a glider test<commit_after><|endoftext|>"} {"text":"<commit_before>mod css;\nmod dom;\nmod html;\nmod style;\n\nfn main() {\n let root_node = html::parse(\"<div id='a' class='b'>Hello <em>there<\/em><\/div>\".to_string());\n println!(\"{}\\n\", root_node);\n\n let stylesheet = css::parse(\"div, *, span#foo.bar { display: block; height: 1px; }\".to_string());\n println!(\"{}\\n\", stylesheet);\n\n let style_tree = style::style_tree(&root_node, &stylesheet);\n println!(\"{}\\n\", style_tree);\n}\n<commit_msg>Add command-line args and file input<commit_after>extern crate getopts;\n\nuse getopts::{optopt,getopts};\nuse std::os::args;\nuse std::io::fs::File;\n\nmod css;\nmod dom;\nmod html;\nmod style;\n\nfn main() {\n \/\/ Parse command-line options:\n let opts = [\n optopt(\"h\", \"html\", \"HTML document\", \"FILENAME\"),\n optopt(\"c\", \"css\", \"CSS stylesheet\", \"FILENAME\"),\n ];\n let matches = match getopts(args().tail(), opts) {\n Ok(m) => m,\n Err(f) => fail!(f.to_string())\n };\n\n \/\/ Read input files:\n let read_source = |arg_filename: Option<String>, default_filename: &str| {\n let path = match arg_filename {\n Some(ref filename) => filename.as_slice(),\n None => default_filename,\n };\n File::open(&Path::new(path)).read_to_string().unwrap()\n };\n let html = read_source(matches.opt_str(\"h\"), \"examples\/test.html\");\n let css = read_source(matches.opt_str(\"c\"), \"examples\/test.css\");\n\n \/\/ Parsing and rendering:\n let root_node = html::parse(html);\n let stylesheet = css::parse(css);\n let style_tree = style::style_tree(&root_node, &stylesheet);\n\n \/\/ Debug output:\n println!(\"{}\\n\", style_tree);\n}\n<|endoftext|>"} {"text":"<commit_before>use std::iter::FromIterator;\n\nuse serde::{\n Deserialize,\n Serialize,\n};\n\nuse thiserror::Error;\n\nuse rnc_core::{\n urs,\n urs_taxid,\n};\n\nuse crate::normalize::utils;\n\nuse crate::normalize::ds::{\n accession::{\n AccessionVec,\n CrossReference,\n RawAccession,\n ReferenceVec,\n },\n basic::Basic,\n crs::{\n Crs,\n CrsVec,\n },\n feedback::{\n Feedback,\n FeedbackVec,\n },\n go_annotation::GoAnnotation,\n interacting_protein::{\n InteractingProtein,\n InteractingProteinVec,\n },\n interacting_rna::{\n InteractingRna,\n InteractingRnaVec,\n },\n precompute::{\n Precompute,\n PrecomputeSummary,\n },\n qa_status::QaStatus,\n r2dt::R2dt,\n rfam_hit::{\n RfamHit,\n RfamHitVec,\n },\n so_tree,\n};\n\n#[derive(Error, Debug)]\npub enum NormalizationError {\n #[error(\"Could not ungroup {0}\")]\n CountError(#[from] utils::Error),\n\n #[error(\"Could not parse {0}\")]\n UrsParsingError(#[from] urs::Error),\n\n #[error(\"Could not parse {0}\")]\n UrsTaxidParsingError(#[from] urs_taxid::Error),\n}\n\n#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]\npub struct Raw {\n pub id: usize,\n pub base: Basic,\n pub crs: Vec<Crs>,\n pub feedback: Vec<Feedback>,\n pub go_annotations: Vec<GoAnnotation>,\n pub interacting_proteins: Vec<InteractingProtein>,\n pub interacting_rnas: Vec<InteractingRna>,\n pub precompute: Precompute,\n pub qa_status: QaStatus,\n pub r2dt: Option<R2dt>,\n pub rfam_hits: Vec<RfamHit>,\n pub so_tree: so_tree::SoTree,\n}\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\npub struct Normalized {\n urs: String,\n taxid: usize,\n urs_taxid: String,\n short_urs: String,\n deleted: String,\n\n so_rna_type_tree: so_tree::SoTree,\n\n #[serde(flatten)]\n pre_summary: PrecomputeSummary,\n\n #[serde(flatten)]\n basic: Basic,\n\n \/\/ #[serde(flatten)]\n \/\/ dates: Dates,\n qa_status: QaStatus,\n secondary_structure: Option<R2dt>,\n\n accessions: AccessionVec,\n cross_references: Vec<CrossReference>,\n crs: CrsVec,\n feedback: FeedbackVec,\n go_annotations: Vec<GoAnnotation>,\n interacting_proteins: InteractingProteinVec,\n interacting_rnas: InteractingRnaVec,\n references: ReferenceVec,\n rfam_hits: RfamHitVec,\n}\n\nimpl Raw {\n pub fn urs_taxid(&self) -> String {\n return self.base.urs_taxid.to_owned();\n }\n\n pub fn urs(&self) -> Result<String, urs_taxid::Error> {\n let ut: urs_taxid::UrsTaxid = self.urs_taxid().parse()?;\n let urs: urs::Urs = ut.into();\n Ok(urs.to_string())\n }\n\n pub fn taxid(&self) -> Result<u64, urs_taxid::Error> {\n let ut: urs_taxid::UrsTaxid = self.urs_taxid().parse()?;\n Ok(ut.taxid())\n }\n\n pub fn short_urs(&self) -> Result<String, urs_taxid::Error> {\n let ut: urs_taxid::UrsTaxid = self.urs_taxid().parse()?;\n let urs: urs::Urs = ut.into();\n Ok(urs.short_urs())\n }\n\n pub fn short_urs_taxid(&self) -> Result<String, urs_taxid::Error> {\n let ut: urs_taxid::UrsTaxid = self.urs_taxid().parse()?;\n Ok(ut.short())\n }\n}\n\nimpl Normalized {\n pub fn new(\n raw: Raw,\n accessions: Vec<RawAccession>,\n ) -> Result<Self, NormalizationError> {\n let pre_summary = PrecomputeSummary::from(raw.precompute);\n let base = raw.base.clone();\n\n let urs_taxid = base.urs_taxid.to_owned();\n assert!(urs_taxid == accessions[0].urs_taxid);\n let parts: Vec<&str> = urs_taxid.split(\"_\").collect();\n let urs = parts[0].to_owned();\n let taxid = parts[1].parse::<usize>().unwrap();\n let parsed: urs_taxid::UrsTaxid = urs_taxid.parse()?;\n let references = ReferenceVec::from_iter(accessions.clone());\n\n Ok(Self {\n urs_taxid,\n urs,\n taxid,\n short_urs: parsed.short(),\n deleted: String::from(\"N\"),\n\n so_rna_type_tree: raw.so_tree,\n\n pre_summary,\n basic: base,\n qa_status: raw.qa_status,\n secondary_structure: raw.r2dt.into(),\n\n accessions: AccessionVec::from_iter(accessions.clone()),\n cross_references: accessions.into_iter().map(CrossReference::from).collect(),\n crs: CrsVec::from_iter(raw.crs.clone()),\n feedback: FeedbackVec::from_iter(raw.feedback.clone()),\n go_annotations: raw.go_annotations.clone(),\n interacting_proteins: InteractingProteinVec::from_iter(\n raw.interacting_proteins.clone(),\n ),\n interacting_rnas: InteractingRnaVec::from_iter(raw.interacting_rnas.clone()),\n references,\n rfam_hits: RfamHitVec::from_iter(raw.rfam_hits.clone()),\n })\n }\n}\n<commit_msg>Fix data schema<commit_after>use std::iter::FromIterator;\n\nuse serde::{\n Deserialize,\n Serialize,\n};\n\nuse thiserror::Error;\n\nuse rnc_core::{\n urs,\n urs_taxid,\n};\n\nuse crate::normalize::utils;\n\nuse crate::normalize::ds::{\n accession::{\n AccessionVec,\n CrossReference,\n RawAccession,\n ReferenceVec,\n },\n basic::Basic,\n crs::{\n Crs,\n CrsVec,\n },\n feedback::{\n Feedback,\n FeedbackVec,\n },\n go_annotation::GoAnnotation,\n interacting_protein::InteractingProtein,\n interacting_rna::InteractingRna,\n precompute::{\n Precompute,\n PrecomputeSummary,\n },\n qa_status::QaStatus,\n r2dt::R2dt,\n rfam_hit::{\n RfamHit,\n RfamHitVec,\n },\n so_tree,\n};\n\n#[derive(Error, Debug)]\npub enum NormalizationError {\n #[error(\"Could not ungroup {0}\")]\n CountError(#[from] utils::Error),\n\n #[error(\"Could not parse {0}\")]\n UrsParsingError(#[from] urs::Error),\n\n #[error(\"Could not parse {0}\")]\n UrsTaxidParsingError(#[from] urs_taxid::Error),\n}\n\n#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]\npub struct Raw {\n pub id: usize,\n pub base: Basic,\n pub crs: Vec<Crs>,\n pub feedback: Vec<Feedback>,\n pub go_annotations: Vec<GoAnnotation>,\n pub interacting_proteins: Vec<InteractingProtein>,\n pub interacting_rnas: Vec<InteractingRna>,\n pub precompute: Precompute,\n pub qa_status: QaStatus,\n pub r2dt: Option<R2dt>,\n pub rfam_hits: Vec<RfamHit>,\n pub so_tree: so_tree::SoTree,\n}\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\npub struct Normalized {\n urs: String,\n taxid: usize,\n urs_taxid: String,\n short_urs: String,\n deleted: String,\n\n so_rna_type_tree: so_tree::SoTree,\n\n #[serde(flatten)]\n pre_summary: PrecomputeSummary,\n\n #[serde(flatten)]\n basic: Basic,\n\n \/\/ #[serde(flatten)]\n \/\/ dates: Dates,\n qa_status: QaStatus,\n\n secondary: Option<R2dt>,\n\n #[serde(flatten)]\n accessions: AccessionVec,\n cross_references: Vec<CrossReference>,\n crs: CrsVec,\n overlaps: FeedbackVec,\n go_annotations: Vec<GoAnnotation>,\n\n interacting_proteins: Vec<InteractingProtein>,\n\n interacting_rnas: Vec<InteractingRna>,\n\n #[serde(flatten)]\n references: ReferenceVec,\n\n #[serde(flatten)]\n rfam_hits: RfamHitVec,\n}\n\nimpl Raw {\n pub fn urs_taxid(&self) -> String {\n return self.base.urs_taxid.to_owned();\n }\n\n pub fn urs(&self) -> Result<String, urs_taxid::Error> {\n let ut: urs_taxid::UrsTaxid = self.urs_taxid().parse()?;\n let urs: urs::Urs = ut.into();\n Ok(urs.to_string())\n }\n\n pub fn taxid(&self) -> Result<u64, urs_taxid::Error> {\n let ut: urs_taxid::UrsTaxid = self.urs_taxid().parse()?;\n Ok(ut.taxid())\n }\n\n pub fn short_urs(&self) -> Result<String, urs_taxid::Error> {\n let ut: urs_taxid::UrsTaxid = self.urs_taxid().parse()?;\n let urs: urs::Urs = ut.into();\n Ok(urs.short_urs())\n }\n\n pub fn short_urs_taxid(&self) -> Result<String, urs_taxid::Error> {\n let ut: urs_taxid::UrsTaxid = self.urs_taxid().parse()?;\n Ok(ut.short())\n }\n}\n\nimpl Normalized {\n pub fn new(raw: Raw, accessions: Vec<RawAccession>) -> Result<Self, NormalizationError> {\n let pre_summary = PrecomputeSummary::from(raw.precompute);\n let base = raw.base.clone();\n\n let urs_taxid = base.urs_taxid.to_owned();\n assert!(urs_taxid == accessions[0].urs_taxid);\n let parts: Vec<&str> = urs_taxid.split(\"_\").collect();\n let urs = parts[0].to_owned();\n let taxid = parts[1].parse::<usize>().unwrap();\n let parsed: urs_taxid::UrsTaxid = urs_taxid.parse()?;\n let references = ReferenceVec::from_iter(accessions.clone());\n\n Ok(Self {\n urs_taxid,\n urs,\n taxid,\n short_urs: parsed.short(),\n deleted: String::from(\"N\"),\n\n so_rna_type_tree: raw.so_tree,\n\n pre_summary,\n basic: base,\n qa_status: raw.qa_status,\n secondary: raw.r2dt,\n\n accessions: AccessionVec::from_iter(accessions.clone()),\n cross_references: accessions.into_iter().map(CrossReference::from).collect(),\n crs: CrsVec::from_iter(raw.crs.clone()),\n overlaps: FeedbackVec::from_iter(raw.feedback.clone()),\n go_annotations: raw.go_annotations.clone(),\n interacting_proteins: raw.interacting_proteins,\n interacting_rnas: raw.interacting_rnas,\n references,\n rfam_hits: RfamHitVec::from_iter(raw.rfam_hits.clone()),\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>basic algorithm defined<commit_after>extern crate nalgebra as na;\n#[allow(unused_imports)]\nuse na::{Vector,Dim,Real,Vector4,Vector3,Vector2,U1,Matrix,DVector,Dynamic,VecStorage};\nuse na::storage::{Storage};\nuse std::cmp;\n\n\n\n\/\/ evum CovvolveMode{\n\/\/ Full,\n\/\/ Valid,\n\/\/ Same\n\/\/ }\n\n\n\n#[allow(non_snake_case)]\nfn Convolve1D<R: Real, D: Dim, E: Dim, S: Storage<R, D>, Q: Storage<R, E>>(\n Vector : Vector<R,D,S>,\n Kernel : Vector<R,E,Q>\n ) -> Matrix<R, Dynamic, U1, VecStorage<R, Dynamic, U1>>\n {\n \/\/\n \/\/ Vector is the vector, Kervel is the kervel\n \/\/ C is the returv vector\n \/\/\n if Kernel.len() > Vector.len(){\n return Convolve1D(Kernel, Vector);\n }\n\n let V = Vector.len();\n let K = Kernel.len();\n let L = V + K - 1;\n let v = V as i8;\n let k = K as i8;\n let l = L as i8;\n let mut C = DVector::<R>::zeros(L);\n\n for i in 0..l{\n let u_i = cmp::max(0, i - k);\n let u_f = cmp::min(i, v - 1);\n if u_i == u_f{\n C[i as usize] += Vector[u_i as usize] * Kernel[(i - u_i) as usize];\n }\n else{\n for u in u_i..(u_f+1){\n if i - u < k{\n C[i as usize] += Vector[u as usize] * Kernel[(i - u ) as usize];\n }\n }\n }\n }\n C\n }\n\n\nfn main() {\n let v1 = Vector2::new(3.0,3.0);\n let v2 = Vector4::new(1.0,2.0,5.0,9.0);\n let x = Convolve1D(v1,v2);\n println!(\"{:?}\",x)\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Adapt flag forwarding to new runtime flags<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add ARM decode for coprocessor instructions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>updated to check\/remove dirty file<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update per collections renaming<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Enabling Microsoft Bitmap Version 4 as valid bitmap version.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>num-macros: port from ast::VariantKind to ast::VariantData<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Just check if we don't get an ICE for the _S type.\n\nuse std::cell::Cell;\nuse std::mem;\n\npub struct S {\n s: Cell<usize>\n}\n\npub type _S = [usize; 0 - (mem::size_of::<S>() != 4) as usize];\n<commit_msg>Update rustdoc test<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Just check if we don't get an ICE for the _S type.\n\nuse std::cell::Cell;\nuse std::mem;\n\npub struct S {\n s: Cell<usize>\n}\n\npub const N: usize = 0 - (mem::size_of::<S>() != 4) as usize;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z identify_regions -Z span_free_formats\n\/\/ ignore-tidy-linelength\n\n\/\/ Regression test for #43457: an `EndRegion` was missing from output\n\/\/ because compiler was using a faulty means for region map lookup.\n\nuse std::cell::RefCell;\n\nfn rc_refcell_test(r: RefCell<i32>) {\n r.borrow_mut();\n}\n\nfn main() { }\n\n\/\/ END RUST SOURCE\n\/\/ START rustc.node5.SimplifyCfg-qualify-consts.after.mir\n\/\/\n\/\/ fn rc_refcell_test(_1: std::cell::RefCell<i32>) -> () {\n\/\/ let mut _0: ();\n\/\/ scope 1 {\n\/\/ let _2: std::cell::RefCell<i32>;\n\/\/ }\n\/\/ let mut _3: std::cell::RefMut<'17dce, i32>;\n\/\/ let mut _4: &'17dce std::cell::RefCell<i32>;\n\/\/\n\/\/ bb0: {\n\/\/ StorageLive(_2);\n\/\/ _2 = _1;\n\/\/ StorageLive(_4);\n\/\/ _4 = &'17dce _2;\n\/\/ _3 = const <std::cell::RefCell<T>>::borrow_mut(_4) -> bb1;\n\/\/ }\n\/\/\n\/\/ bb1: {\n\/\/ drop(_3) -> bb2;\n\/\/ }\n\/\/\n\/\/ bb2: {\n\/\/ StorageDead(_4);\n\/\/ EndRegion('17dce);\n\/\/ _0 = ();\n\/\/ StorageDead(_2);\n\/\/ return;\n\/\/ }\n\/\/ }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>access: print error on decrypt\/decode failure<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add scroll example<commit_after>extern crate cursive;\n\nuse cursive::traits::Boxable;\nuse cursive::views::{Canvas, Dialog, ScrollView, LinearLayout, Button};\nuse cursive::Printer;\n\nfn main() {\n let mut siv = cursive::Cursive::default();\n\n siv.add_layer(Dialog::around(\n ScrollView::new(\n LinearLayout::vertical()\n .child(Button::new(\"Foo\", |s| s.add_layer(Dialog::info(\"Ah\"))))\n .child(Canvas::new(()).with_draw(draw).fixed_size((120, 40)))\n .child(Button::new(\"Bar\", |s| s.add_layer(Dialog::info(\"Uh\"))))\n ).scroll_x(true),\n ).fixed_size((60, 30)));\n\n siv.add_global_callback('q', |s| s.quit());\n\n siv.run();\n}\n\nfn draw(_: &(), p: &Printer) {\n for x in 0..p.size.x {\n for y in 0..p.size.y {\n let c = (x + 6*y) % 10;\n p.print((x, y), &format!(\"{}\", c));\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate glutin;\n#[macro_use]\nextern crate glium;\n\nuse glium::{Texture, Surface};\n\nmod support;\n\n#[test]\nfn texture_1d_creation() { \n let display = support::build_display();\n\n let texture = glium::texture::Texture1d::new(&display, vec![\n (0, 0, 0, 0),\n (0, 0, 0, 0),\n (0, 0, 0, 0u8),\n ]);\n\n assert_eq!(texture.get_width(), 3);\n assert_eq!(texture.get_height(), None);\n assert_eq!(texture.get_depth(), None);\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\n#[test]\nfn empty_texture1d_u8u8u8u8() {\n let display = support::build_display();\n\n let texture = glium::texture::Texture1d::new_empty(&display,\n glium::texture::UncompressedFloatFormat::\n U8U8U8U8, 128);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n}\n\n#[test]\nfn depth_texture_1d_creation() { \n let display = support::build_display();\n\n let texture = match glium::texture::DepthTexture1d::new_if_supported(&display, vec![0.0, 0.0, 0.0, 0.0f32]) {\n None => return,\n Some(t) => t\n };\n\n assert_eq!(texture.get_width(), 4);\n assert_eq!(texture.get_height(), None);\n assert_eq!(texture.get_depth(), None);\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\n#[test]\nfn texture_2d_creation() { \n let display = support::build_display();\n\n let texture = glium::texture::Texture2d::new(&display, vec![\n vec![(0, 0, 0, 0), (0, 0, 0, 0)],\n vec![(0, 0, 0, 0), (0, 0, 0, 0)],\n vec![(0, 0, 0, 0), (0, 0, 0, 0u8)],\n ]);\n\n assert_eq!(texture.get_width(), 2);\n assert_eq!(texture.get_height(), Some(3));\n assert_eq!(texture.get_depth(), None);\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\n#[test]\nfn empty_texture2d_u8u8u8u8() {\n let display = support::build_display();\n\n let texture = glium::texture::Texture2d::new_empty(&display,\n glium::texture::UncompressedFloatFormat::\n U8U8U8U8,\n 128, 128);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n}\n\n#[test]\nfn depth_texture_2d_creation() { \n let display = support::build_display();\n\n let texture = glium::texture::DepthTexture2d::new_if_supported(&display, vec![\n vec![0.0, 0.0, 0.0, 0.0f32],\n vec![0.0, 0.0, 0.0, 0.0f32],\n vec![0.0, 0.0, 0.0, 0.0f32],\n ]);\n\n let texture = match texture {\n None => return,\n Some(t) => t\n };\n\n assert_eq!(texture.get_width(), 4);\n assert_eq!(texture.get_height(), Some(3));\n assert_eq!(texture.get_depth(), None);\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\n#[test]\n#[ignore] \/\/ `thread 'empty_depth_texture2d_f32' panicked at 'assertion failed: version >= &GlVersion(3, 0)'`\nfn empty_depth_texture2d_f32() {\n let display = support::build_display();\n\n let texture = glium::texture::DepthTexture2d::new_empty(&display,\n glium::texture::DepthFormat::F32,\n 128, 128);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n}\n\n#[test]\nfn texture_3d_creation() { \n let display = support::build_display();\n\n let texture = glium::texture::Texture3d::new(&display, vec![\n vec![\n vec![(0, 0, 0, 0)],\n vec![(0, 0, 0, 0)],\n ],\n vec![\n vec![(0, 0, 0, 0)],\n vec![(0, 0, 0, 0)],\n ],\n vec![\n vec![(0, 0, 0, 0)],\n vec![(0, 0, 0, 0u8)],\n ],\n ]);\n\n assert_eq!(texture.get_width(), 1);\n assert_eq!(texture.get_height(), Some(2));\n assert_eq!(texture.get_depth(), Some(3));\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\n#[test]\nfn compressed_texture_2d_creation() {\n let display = support::build_display();\n\n let texture = glium::texture::CompressedTexture2d::new(&display, vec![\n vec![(0, 0, 0, 0), (0, 0, 0, 0)],\n vec![(0, 0, 0, 0), (0, 0, 0, 0)],\n vec![(0, 0, 0, 0), (0, 0, 0, 0u8)],\n ]);\n\n assert_eq!(texture.get_width(), 2);\n assert_eq!(texture.get_height(), Some(3));\n assert_eq!(texture.get_depth(), None);\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\nmacro_rules! empty_texture_test {\n ($test_name:ident, $tex_ty:ident, [$($dims:expr),+],\n $w:expr, $h:expr, $d:expr, $s:expr) =>\n (\n #[test]\n fn $test_name() {\n let display = support::build_display();\n\n let texture = glium::texture::$tex_ty::empty(&display, $($dims),+);\n\n assert_eq!(texture.get_width(), $w);\n assert_eq!(texture.get_height(), $h);\n assert_eq!(texture.get_depth(), $d);\n assert_eq!(texture.get_array_size(), $s);\n\n assert_eq!(texture.get_mipmap_levels(), 1);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n }\n );\n\n ($test_name:ident, maybe $tex_ty:ident, [$($dims:expr),+],\n $w:expr, $h:expr, $d:expr, $s:expr) =>\n (\n #[test]\n fn $test_name() {\n let display = support::build_display();\n\n let texture = match glium::texture::$tex_ty::empty_if_supported(&display, $($dims),+) {\n None => return,\n Some(t) => t\n };\n\n assert_eq!(texture.get_width(), $w);\n assert_eq!(texture.get_height(), $h);\n assert_eq!(texture.get_depth(), $d);\n assert_eq!(texture.get_array_size(), $s);\n\n assert_eq!(texture.get_mipmap_levels(), 1);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n }\n );\n}\n\n\/\/ TODO: compressed textures don't have \"empty\" yet\n\/*empty_texture_test!(empty_compressedtexture1d, CompressedTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_compressedtexture1darray, CompressedTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_compressedtexture2d, CompressedTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_compressedtexture2darray, CompressedTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_compressedtexture3d, CompressedTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);*\/\nempty_texture_test!(empty_depthstenciltexture1d, maybe DepthStencilTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_depthstenciltexture1darray, maybe DepthStencilTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_depthstenciltexture2d, maybe DepthStencilTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_depthstenciltexture2darray, maybe DepthStencilTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\n\/\/ TODO: non-working\n\/\/empty_texture_test!(empty_depthstenciltexture3d, DepthStencilTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_depthtexture1d, maybe DepthTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_depthtexture1darray, maybe DepthTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_depthtexture2d, maybe DepthTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_depthtexture2darray, maybe DepthTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\n\/\/ TODO: non-working\n\/\/empty_texture_test!(empty_depthtexture3d, DepthTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_integraltexture1d, maybe IntegralTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_integraltexture1darray,maybe IntegralTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_integraltexture2d, maybe IntegralTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_integraltexture2darray, maybe IntegralTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_integraltexture3d, maybe IntegralTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_stenciltexture1d, maybe StencilTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_stenciltexture1darray, maybe StencilTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_stenciltexture2d, maybe StencilTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_stenciltexture2darray, maybe StencilTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_stenciltexture3d, maybe StencilTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_texture1d, Texture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_texture1darray, Texture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_texture2d, Texture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_texture2darray, Texture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_texture3d, Texture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_unsignedtexture1d, maybe UnsignedTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_unsignedtexture1darray, maybe UnsignedTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_unsignedtexture2d, maybe UnsignedTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_unsignedtexture2darray, maybe UnsignedTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_unsignedtexture3d, maybe UnsignedTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\n<commit_msg>Fix texture creation tests<commit_after>extern crate glutin;\n#[macro_use]\nextern crate glium;\n\nuse glium::{Texture, Surface};\n\nmod support;\n\n#[test]\nfn empty_texture1d_u8u8u8u8() {\n let display = support::build_display();\n\n let texture = glium::texture::Texture1d::new_empty(&display,\n glium::texture::UncompressedFloatFormat::\n U8U8U8U8, 128);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n}\n\n#[test]\nfn depth_texture_1d_creation() { \n let display = support::build_display();\n\n let texture = match glium::texture::DepthTexture1d::new_if_supported(&display, vec![0.0, 0.0, 0.0, 0.0f32]) {\n None => return,\n Some(t) => t\n };\n\n assert_eq!(texture.get_width(), 4);\n assert_eq!(texture.get_height(), None);\n assert_eq!(texture.get_depth(), None);\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\n#[test]\nfn texture_2d_creation() { \n let display = support::build_display();\n\n let texture = glium::texture::Texture2d::new(&display, vec![\n vec![(0, 0, 0, 0), (0, 0, 0, 0)],\n vec![(0, 0, 0, 0), (0, 0, 0, 0)],\n vec![(0, 0, 0, 0), (0, 0, 0, 0u8)],\n ]);\n\n assert_eq!(texture.get_width(), 2);\n assert_eq!(texture.get_height(), Some(3));\n assert_eq!(texture.get_depth(), None);\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\n#[test]\nfn empty_texture2d_u8u8u8u8() {\n let display = support::build_display();\n\n let texture = glium::texture::Texture2d::new_empty(&display,\n glium::texture::UncompressedFloatFormat::\n U8U8U8U8,\n 128, 128);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n}\n\n#[test]\nfn depth_texture_2d_creation() { \n let display = support::build_display();\n\n let texture = glium::texture::DepthTexture2d::new_if_supported(&display, vec![\n vec![0.0, 0.0, 0.0, 0.0f32],\n vec![0.0, 0.0, 0.0, 0.0f32],\n vec![0.0, 0.0, 0.0, 0.0f32],\n ]);\n\n let texture = match texture {\n None => return,\n Some(t) => t\n };\n\n assert_eq!(texture.get_width(), 4);\n assert_eq!(texture.get_height(), Some(3));\n assert_eq!(texture.get_depth(), None);\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\n#[test]\n#[ignore] \/\/ `thread 'empty_depth_texture2d_f32' panicked at 'assertion failed: version >= &GlVersion(3, 0)'`\nfn empty_depth_texture2d_f32() {\n let display = support::build_display();\n\n let texture = glium::texture::DepthTexture2d::new_empty(&display,\n glium::texture::DepthFormat::F32,\n 128, 128);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n}\n\n#[test]\nfn compressed_texture_2d_creation() {\n let display = support::build_display();\n\n let texture = glium::texture::CompressedTexture2d::new(&display, vec![\n vec![(0, 0, 0, 0), (0, 0, 0, 0)],\n vec![(0, 0, 0, 0), (0, 0, 0, 0)],\n vec![(0, 0, 0, 0), (0, 0, 0, 0u8)],\n ]);\n\n assert_eq!(texture.get_width(), 2);\n assert_eq!(texture.get_height(), Some(3));\n assert_eq!(texture.get_depth(), None);\n assert_eq!(texture.get_array_size(), None);\n\n display.assert_no_error();\n}\n\nmacro_rules! empty_texture_test {\n ($test_name:ident, $tex_ty:ident, [$($dims:expr),+],\n $w:expr, $h:expr, $d:expr, $s:expr) =>\n (\n #[test]\n fn $test_name() {\n let display = support::build_display();\n\n let texture = glium::texture::$tex_ty::empty(&display, $($dims),+);\n\n assert_eq!(texture.get_width(), $w);\n assert_eq!(texture.get_height(), $h);\n assert_eq!(texture.get_depth(), $d);\n assert_eq!(texture.get_array_size(), $s);\n\n assert_eq!(texture.get_mipmap_levels(), 1);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n }\n );\n\n ($test_name:ident, maybe $tex_ty:ident, [$($dims:expr),+],\n $w:expr, $h:expr, $d:expr, $s:expr) =>\n (\n #[test]\n fn $test_name() {\n let display = support::build_display();\n\n let texture = match glium::texture::$tex_ty::empty_if_supported(&display, $($dims),+) {\n None => return,\n Some(t) => t\n };\n\n assert_eq!(texture.get_width(), $w);\n assert_eq!(texture.get_height(), $h);\n assert_eq!(texture.get_depth(), $d);\n assert_eq!(texture.get_array_size(), $s);\n\n assert_eq!(texture.get_mipmap_levels(), 1);\n\n display.assert_no_error();\n drop(texture);\n display.assert_no_error();\n }\n );\n}\n\n\/\/ TODO: compressed textures don't have \"empty\" yet\n\/*empty_texture_test!(empty_compressedtexture1d, CompressedTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_compressedtexture1darray, CompressedTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_compressedtexture2d, CompressedTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_compressedtexture2darray, CompressedTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_compressedtexture3d, CompressedTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);*\/\nempty_texture_test!(empty_depthstenciltexture1d, maybe DepthStencilTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_depthstenciltexture1darray, maybe DepthStencilTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_depthstenciltexture2d, maybe DepthStencilTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_depthstenciltexture2darray, maybe DepthStencilTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\n\/\/ TODO: non-working\n\/\/empty_texture_test!(empty_depthstenciltexture3d, DepthStencilTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_depthtexture1d, maybe DepthTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_depthtexture1darray, maybe DepthTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_depthtexture2d, maybe DepthTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_depthtexture2darray, maybe DepthTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\n\/\/ TODO: non-working\n\/\/empty_texture_test!(empty_depthtexture3d, DepthTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_integraltexture1d, maybe IntegralTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_integraltexture1darray, maybe IntegralTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_integraltexture2d, maybe IntegralTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_integraltexture2darray, maybe IntegralTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_integraltexture3d, maybe IntegralTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_stenciltexture1d, maybe StencilTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_stenciltexture1darray, maybe StencilTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_stenciltexture2d, maybe StencilTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_stenciltexture2darray, maybe StencilTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_stenciltexture3d, maybe StencilTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_texture1d, maybe Texture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_texture1darray, maybe Texture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_texture2d, Texture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_texture2darray, maybe Texture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_texture3d, maybe Texture3d, [64, 32, 16], 64, Some(32), Some(16), None);\nempty_texture_test!(empty_unsignedtexture1d, maybe UnsignedTexture1d, [64], 64, None, None, None);\nempty_texture_test!(empty_unsignedtexture1darray, maybe UnsignedTexture1dArray, [64, 32], 64, None, None, Some(32));\nempty_texture_test!(empty_unsignedtexture2d, maybe UnsignedTexture2d, [64, 32], 64, Some(32), None, None);\nempty_texture_test!(empty_unsignedtexture2darray, maybe UnsignedTexture2dArray, [64, 32, 16], 64, Some(32), None, Some(16));\nempty_texture_test!(empty_unsignedtexture3d, maybe UnsignedTexture3d, [64, 32, 16], 64, Some(32), Some(16), None);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>More WebIDL implementations.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Can only read directories.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Removed cmake cargo build<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add decode logic to sort opcodes into Operations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make launchtestxlw use constant.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Change at calls to get on capture groups<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add dumb Env<commit_after>\/\/\n\/\/ Copyright (c) 2016, Boris Popov <popov@whitekefir.ru>\n\/\/\n\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\/\/\n\nuse logger;\n\npub struct Env {\n device: String,\n dir: String,\n script: String\n}\n\nimpl Env {\n\n pub fn new() -> Env {\n return Env{device: \"\".to_string(),\n dir: \"\".to_string(),\n script: \"\".to_string()}\n }\n\n pub fn load(&self, l: &logger::Logger) {\n self.load_device(l);\n self.load_dir(l);\n self.load_script(l);\n }\n\n fn load_device(&self, l: &logger::Logger) {\n \/\/\n \/\/TODO\n \/\/\n }\n\n fn load_dir(&self, l: &logger::Logger) {\n \/\/\n \/\/TODO\n \/\/\n }\n\n fn load_script(&self, l: &logger::Logger) {\n \/\/\n \/\/TODO\n \/\/\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>use real fps to record<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>`IXAudio2Voice::SetOutputMatrix()`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix grammar<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added log module<commit_after>extern crate time;\n\nuse std::fmt;\nuse std::io;\nuse std::io::{Error, Write};\nuse std::cmp::{Ord, Ordering};\n\npub enum LogLevel {\n ERROR,\n WARN,\n INFO,\n VERBOSE\n}\n\n\n\/\/ ERROR > WARN > INFO > VERBOSE\nimpl Ord for LogLevel {\n fn cmp(&self, other: &Self) -> Ordering {\n match *self {\n LogLevel::ERROR => match *other {\n LogLevel::ERROR => Ordering::Equal,\n _ => Ordering::Greater,\n },\n LogLevel::WARN => match *other {\n LogLevel::ERROR => Ordering::Less,\n LogLevel::WARN => Ordering::Equal,\n _ => Ordering::Greater,\n },\n LogLevel::INFO => match *other {\n LogLevel::VERBOSE => Ordering::Greater,\n LogLevel::INFO => Ordering::Equal,\n _ => Ordering::Less,\n },\n LogLevel::VERBOSE => match *other {\n LogLevel::VERBOSE => Ordering::Equal,\n _ => Ordering::Less,\n },\n }\n }\n}\n\nimpl Eq for LogLevel {}\n\nimpl PartialEq for LogLevel {\n\n fn eq(&self, other: &Self) -> bool {\n self.cmp(other) == Ordering::Equal\n }\n\n fn ne(&self, other: &Self) -> bool {\n !self.eq(other)\n }\n\n}\n\nimpl PartialOrd for LogLevel {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n Some(self.cmp(other))\n }\n\n fn lt(&self, other: &Self) -> bool {\n self.cmp(other) == Ordering::Less\n }\n\n fn le(&self, other: &Self) -> bool {\n self.cmp(other) != Ordering::Greater\n }\n\n fn gt(&self, other: &Self) -> bool {\n self.cmp(other) == Ordering::Greater\n }\n\n fn ge(&self, other: &Self) -> bool {\n self.cmp(other) != Ordering::Less\n }\n}\n\n\/\/\/ Set global log level here\nstatic LOG_LEVEL: LogLevel = LogLevel::INFO;\n\n\/\/\/ the log function returning a result\n\/\/\/\n\/\/\/ # Examples\n\/\/\/ '''\n\/\/\/ log(LogLevel::ERROR, format_args!(\"This is bad!\")).unwrap();\n\/\/\/ '''\npub fn log(level: LogLevel, args: fmt::Arguments) -> Result<(), Error> {\n\n if level < LOG_LEVEL {\n return Ok(());\n }\n\n let stderr = io::stderr();\n let stdout = io::stdout();\n\n let mut stderr_lock = stderr.lock();\n let mut stdout_lock = stdout.lock();\n\n let cur_time = time::now();\n\n match level {\n LogLevel::ERROR => writeln!(stderr_lock, \"[ERROR] {}:{}:{} - {}\", cur_time.tm_hour, cur_time.tm_min, cur_time.tm_sec, args),\n LogLevel::WARN => writeln!(stdout_lock, \"[WARN] {}:{}:{} - {}\", cur_time.tm_hour, cur_time.tm_min, cur_time.tm_sec, args),\n LogLevel::INFO => writeln!(stdout_lock, \"[INFO] {}:{}:{} - {}\", cur_time.tm_hour, cur_time.tm_min, cur_time.tm_sec, args),\n LogLevel::VERBOSE => writeln!(stdout_lock, \"[VERBOSE] {}:{}:{} - {}\", cur_time.tm_hour, cur_time.tm_min, cur_time.tm_sec, args),\n }\n\n}\n\n\/\/\/ a log function ignoring any failure\n\/\/\/ \"log something, if possible\"\n\/\/\/\n\/\/\/ # Examples\n\/\/\/ '''\n\/\/\/ try_log(LogLevel::ERROR, format_args!(\"This is bad!\"));\n\/\/\/ '''\npub fn try_log(level: LogLevel, args: fmt::Arguments) {\n let _ = log(level, args);\n}\n\n\/\/ Helper Macro\n\n\/\/\/ Error Macro\n\/\/\/\n\/\/\/ # Examples\n\/\/\/ '''\n\/\/\/ error!(\"This is bad!\");\n\/\/\/ '''\nmacro_rules! error {\n ($($arg:tt)*) => (::utils::log::try_log(::utils::log::LogLevel::ERROR, format_args!($($arg)*)));\n}\n\n\/\/\/ Warning Macro\n\/\/\/\n\/\/\/ # Examples\n\/\/\/ '''\n\/\/\/ error!(\"This is maybe bad?!\");\n\/\/\/ '''\nmacro_rules! warn {\n ($($arg:tt)*) => (::utils::log::try_log(::utils::log::LogLevel::WARN, format_args!($($arg)*)));\n}\n\n\/\/\/ Info Macro\n\/\/\/\n\/\/\/ # Examples\n\/\/\/ '''\n\/\/\/ error!(\"This is just to let you know!\");\n\/\/\/ '''\nmacro_rules! info {\n ($($arg:tt)*) => (::utils::log::try_log(::utils::log::LogLevel::INFO, format_args!($($arg)*)));\n}\n\n\/\/\/ Verbose Macro\n\/\/\/\n\/\/\/ # Examples\n\/\/\/ '''\n\/\/\/ error!(\"You must really like spam!\");\n\/\/\/ '''\nmacro_rules! verbose {\n ($($arg:tt)*) => (::utils::log::try_log(::utils::log::LogLevel::VERBOSE, format_args!($($arg)*)));\n}\n\n\n#[test]\nfn test() {\n info!(\"Test!\");\n}\n\n\/\/To-Do add a way to check the result automatically\n#[test]\nfn multi_thread_test() {\n let mut joins = Vec::new();\n for _ in 0..10 {\n joins.push(std::thread::spawn(move || {\n info!(\"No corruption should happen in this ultra-extra-super-long string output even if run in multiple threads.\");\n }));\n }\n for handle in joins {\n let _ = handle.join();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>use BitConvertible trait<commit_after><|endoftext|>"} {"text":"<commit_before>\/*!\n\nA render buffer is similar to a texture, but is optimized for usage as a draw target.\n\nContrary to a texture, you can't sample nor modify the content of a render buffer.\nYou should prefer render buffers over textures when you know that you don't need to read or modify\nthe data of the render buffer.\n\n*\/\nuse std::rc::Rc;\nuse std::ops::{Deref, DerefMut};\nuse std::mem;\n\nuse framebuffer::{ColorAttachment, ToColorAttachment};\nuse framebuffer::{DepthAttachment, ToDepthAttachment};\nuse framebuffer::{StencilAttachment, ToStencilAttachment};\nuse framebuffer::{DepthStencilAttachment, ToDepthStencilAttachment};\nuse texture::{UncompressedFloatFormat, DepthFormat, StencilFormat, DepthStencilFormat, TextureKind};\n\nuse image_format;\n\nuse gl;\nuse GlObject;\nuse fbo::FramebuffersContainer;\nuse backend::Facade;\nuse context::Context;\nuse ContextExt;\nuse version::Version;\nuse version::Api;\n\n\/\/\/ Error while creating a render buffer.\n#[derive(Copy, Clone, Debug)]\npub enum CreationError {\n \/\/\/ The requested format is not supported.\n FormatNotSupported,\n}\n\nimpl From<image_format::FormatNotSupportedError> for CreationError {\n fn from(_: image_format::FormatNotSupportedError) -> CreationError {\n CreationError::FormatNotSupported\n }\n}\n\n\/\/\/ A render buffer is similar to a texture, but is optimized for usage as a draw target.\n\/\/\/\n\/\/\/ Contrary to a texture, you can't sample or modify the content of the `RenderBuffer`.\npub struct RenderBuffer {\n buffer: RenderBufferAny,\n}\n\nimpl RenderBuffer {\n \/\/\/ Builds a new render buffer.\n pub fn new<F>(facade: &F, format: UncompressedFloatFormat, width: u32, height: u32)\n -> Result<RenderBuffer, CreationError> where F: Facade\n {\n let format = image_format::TextureFormatRequest::Specific(image_format::TextureFormat::UncompressedFloat(format));\n let format = try!(image_format::format_request_to_glenum(&facade.get_context(), format, image_format::RequestType::Renderbuffer));\n\n Ok(RenderBuffer {\n buffer: RenderBufferAny::new(facade, format, TextureKind::Float, width, height, None)\n })\n }\n}\n\nimpl<'a> ToColorAttachment<'a> for &'a RenderBuffer {\n #[inline]\n fn to_color_attachment(self) -> ColorAttachment<'a> {\n ColorAttachment::RenderBuffer(self)\n }\n}\n\nimpl Deref for RenderBuffer {\n type Target = RenderBufferAny;\n\n #[inline]\n fn deref(&self) -> &RenderBufferAny {\n &self.buffer\n }\n}\n\nimpl DerefMut for RenderBuffer {\n #[inline]\n fn deref_mut(&mut self) -> &mut RenderBufferAny {\n &mut self.buffer\n }\n}\n\nimpl GlObject for RenderBuffer {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.buffer.get_id()\n }\n}\n\n\/\/\/ A render buffer is similar to a texture, but is optimized for usage as a draw target.\n\/\/\/\n\/\/\/ Contrary to a texture, you can't sample or modify the content of the `DepthRenderBuffer` directly.\npub struct DepthRenderBuffer {\n buffer: RenderBufferAny,\n}\n\nimpl DepthRenderBuffer {\n \/\/\/ Builds a new render buffer.\n pub fn new<F>(facade: &F, format: DepthFormat, width: u32, height: u32)\n -> Result<DepthRenderBuffer, CreationError> where F: Facade\n {\n let format = image_format::TextureFormatRequest::Specific(image_format::TextureFormat::DepthFormat(format));\n let format = try!(image_format::format_request_to_glenum(&facade.get_context(), format, image_format::RequestType::Renderbuffer));\n\n Ok(DepthRenderBuffer {\n buffer: RenderBufferAny::new(facade, format, TextureKind::Depth, width, height, None)\n })\n }\n}\n\nimpl<'a> ToDepthAttachment<'a> for &'a DepthRenderBuffer {\n fn to_depth_attachment(self) -> DepthAttachment<'a> {\n DepthAttachment::RenderBuffer(self)\n }\n}\n\nimpl Deref for DepthRenderBuffer {\n type Target = RenderBufferAny;\n\n #[inline]\n fn deref(&self) -> &RenderBufferAny {\n &self.buffer\n }\n}\n\nimpl DerefMut for DepthRenderBuffer {\n #[inline]\n fn deref_mut(&mut self) -> &mut RenderBufferAny {\n &mut self.buffer\n }\n}\n\nimpl GlObject for DepthRenderBuffer {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.buffer.get_id()\n }\n}\n\n\/\/\/ A render buffer is similar to a texture, but is optimized for usage as a draw target.\n\/\/\/\n\/\/\/ Contrary to a texture, you can't sample or modify the content of the `StencilRenderBuffer` directly.\npub struct StencilRenderBuffer {\n buffer: RenderBufferAny,\n}\n\nimpl StencilRenderBuffer {\n \/\/\/ Builds a new render buffer.\n pub fn new<F>(facade: &F, format: StencilFormat, width: u32, height: u32)\n -> Result<StencilRenderBuffer, CreationError> where F: Facade\n {\n let format = image_format::TextureFormatRequest::Specific(image_format::TextureFormat::StencilFormat(format));\n let format = try!(image_format::format_request_to_glenum(&facade.get_context(), format, image_format::RequestType::Renderbuffer));\n\n Ok(StencilRenderBuffer {\n buffer: RenderBufferAny::new(facade, format, TextureKind::Stencil, width, height, None)\n })\n }\n}\n\nimpl<'a> ToStencilAttachment<'a> for &'a StencilRenderBuffer {\n #[inline]\n fn to_stencil_attachment(self) -> StencilAttachment<'a> {\n StencilAttachment::RenderBuffer(self)\n }\n}\n\nimpl Deref for StencilRenderBuffer {\n type Target = RenderBufferAny;\n\n #[inline]\n fn deref(&self) -> &RenderBufferAny {\n &self.buffer\n }\n}\n\nimpl DerefMut for StencilRenderBuffer {\n #[inline]\n fn deref_mut(&mut self) -> &mut RenderBufferAny {\n &mut self.buffer\n }\n}\n\nimpl GlObject for StencilRenderBuffer {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.buffer.get_id()\n }\n}\n\n\/\/\/ A render buffer is similar to a texture, but is optimized for usage as a draw target.\n\/\/\/\n\/\/\/ Contrary to a texture, you can't sample or modify the content of the `DepthStencilRenderBuffer` directly.\npub struct DepthStencilRenderBuffer {\n buffer: RenderBufferAny,\n}\n\nimpl DepthStencilRenderBuffer {\n \/\/\/ Builds a new render buffer.\n pub fn new<F>(facade: &F, format: DepthStencilFormat, width: u32, height: u32)\n -> Result<DepthStencilRenderBuffer, CreationError> where F: Facade\n {\n let format = image_format::TextureFormatRequest::Specific(image_format::TextureFormat::DepthStencilFormat(format));\n let format = try!(image_format::format_request_to_glenum(&facade.get_context(), format, image_format::RequestType::Renderbuffer));\n\n Ok(DepthStencilRenderBuffer {\n buffer: RenderBufferAny::new(facade, format, TextureKind::DepthStencil, width, height, None)\n })\n }\n}\n\nimpl<'a> ToDepthStencilAttachment<'a> for &'a DepthStencilRenderBuffer {\n #[inline]\n fn to_depth_stencil_attachment(self) -> DepthStencilAttachment<'a> {\n DepthStencilAttachment::RenderBuffer(self)\n }\n}\n\nimpl Deref for DepthStencilRenderBuffer {\n type Target = RenderBufferAny;\n\n #[inline]\n fn deref(&self) -> &RenderBufferAny {\n &self.buffer\n }\n}\n\nimpl DerefMut for DepthStencilRenderBuffer {\n #[inline]\n fn deref_mut(&mut self) -> &mut RenderBufferAny {\n &mut self.buffer\n }\n}\n\nimpl GlObject for DepthStencilRenderBuffer {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.buffer.get_id()\n }\n}\n\n\/\/\/ A RenderBuffer of indeterminate type.\npub struct RenderBufferAny {\n context: Rc<Context>,\n id: gl::types::GLuint,\n width: u32,\n height: u32,\n samples: Option<u32>,\n kind: TextureKind,\n}\n\nimpl RenderBufferAny {\n \/\/\/ Builds a new render buffer.\n fn new<F>(facade: &F, format: gl::types::GLenum, kind: TextureKind, width: u32, height: u32,\n samples: Option<u32>) -> RenderBufferAny\n where F: Facade\n {\n unsafe {\n \/\/ TODO: check that dimensions don't exceed GL_MAX_RENDERBUFFER_SIZE\n \/\/ FIXME: gles2 only supports very few formats\n let mut ctxt = facade.get_context().make_current();\n let mut id = mem::uninitialized();\n\n if ctxt.version >= &Version(Api::Gl, 4, 5) ||\n ctxt.extensions.gl_arb_direct_state_access\n {\n ctxt.gl.CreateRenderbuffers(1, &mut id);\n if let Some(samples) = samples {\n ctxt.gl.NamedRenderbufferStorageMultisample(id, samples as gl::types::GLsizei,\n format, width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n } else {\n ctxt.gl.NamedRenderbufferStorage(id, format, width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n }\n\n } else if samples.is_some() && (ctxt.version >= &Version(Api::Gl, 3, 0) ||\n ctxt.version >= &Version(Api::GlEs, 3, 0) ||\n ctxt.extensions.gl_apple_framebuffer_multisample ||\n ctxt.extensions.gl_angle_framebuffer_multisample ||\n ctxt.extensions.gl_ext_multisampled_render_to_texture ||\n ctxt.extensions.gl_nv_framebuffer_multisample)\n {\n ctxt.gl.GenRenderbuffers(1, &mut id);\n ctxt.gl.BindRenderbuffer(gl::RENDERBUFFER, id);\n ctxt.state.renderbuffer = id;\n\n let samples = samples.unwrap();\n\n if ctxt.version >= &Version(Api::Gl, 3, 0) ||\n ctxt.version >= &Version(Api::GlEs, 3, 0)\n {\n ctxt.gl.RenderbufferStorageMultisample(gl::RENDERBUFFER, \n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if ctxt.extensions.gl_apple_framebuffer_multisample {\n ctxt.gl.RenderbufferStorageMultisampleAPPLE(gl::RENDERBUFFER,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if ctxt.extensions.gl_angle_framebuffer_multisample {\n ctxt.gl.RenderbufferStorageMultisampleANGLE(gl::RENDERBUFFER,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if ctxt.extensions.gl_ext_multisampled_render_to_texture {\n ctxt.gl.RenderbufferStorageMultisampleEXT(gl::RENDERBUFFER,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if ctxt.extensions.gl_nv_framebuffer_multisample {\n ctxt.gl.RenderbufferStorageMultisampleNV(gl::RENDERBUFFER,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else {\n unreachable!();\n }\n\n } else if samples.is_none() && (ctxt.version >= &Version(Api::Gl, 3, 0) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0))\n {\n ctxt.gl.GenRenderbuffers(1, &mut id);\n ctxt.gl.BindRenderbuffer(gl::RENDERBUFFER, id);\n ctxt.state.renderbuffer = id;\n ctxt.gl.RenderbufferStorage(gl::RENDERBUFFER, format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if samples.is_some() && ctxt.extensions.gl_ext_framebuffer_object &&\n ctxt.extensions.gl_ext_framebuffer_multisample\n {\n ctxt.gl.GenRenderbuffersEXT(1, &mut id);\n ctxt.gl.BindRenderbufferEXT(gl::RENDERBUFFER_EXT, id);\n ctxt.state.renderbuffer = id;\n\n let samples = samples.unwrap();\n ctxt.gl.RenderbufferStorageMultisampleEXT(gl::RENDERBUFFER_EXT,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if samples.is_none() && ctxt.extensions.gl_ext_framebuffer_object {\n ctxt.gl.GenRenderbuffersEXT(1, &mut id);\n ctxt.gl.BindRenderbufferEXT(gl::RENDERBUFFER_EXT, id);\n ctxt.state.renderbuffer = id;\n ctxt.gl.RenderbufferStorageEXT(gl::RENDERBUFFER_EXT, format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else {\n unreachable!();\n }\n\n RenderBufferAny {\n context: facade.get_context().clone(),\n id: id,\n width: width,\n height: height,\n samples: samples,\n kind: kind,\n }\n }\n }\n\n \/\/\/ Returns the dimensions of the render buffer.\n #[inline]\n pub fn get_dimensions(&self) -> (u32, u32) {\n (self.width, self.height)\n }\n\n \/\/\/ Returns the number of samples of the render buffer, or `None` if multisampling isn't\n \/\/\/ enabled.\n #[inline]\n pub fn get_samples(&self) -> Option<u32> {\n self.samples\n }\n\n \/\/\/ Returns the context used to create this renderbuffer.\n #[inline]\n pub fn get_context(&self) -> &Rc<Context> {\n &self.context\n }\n\n \/\/\/ Returns the kind of renderbuffer.\n #[inline]\n pub fn kind(&self) -> TextureKind {\n self.kind\n }\n}\n\nimpl Drop for RenderBufferAny {\n fn drop(&mut self) {\n unsafe {\n let mut ctxt = self.context.make_current();\n\n \/\/ removing FBOs which contain this buffer\n FramebuffersContainer::purge_renderbuffer(&mut ctxt, self.id);\n\n if ctxt.version >= &Version(Api::Gl, 3, 0) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n if ctxt.state.renderbuffer == self.id {\n ctxt.state.renderbuffer = 0;\n }\n\n ctxt.gl.DeleteRenderbuffers(1, [ self.id ].as_ptr());\n\n } else if ctxt.extensions.gl_ext_framebuffer_object {\n if ctxt.state.renderbuffer == self.id {\n ctxt.state.renderbuffer = 0;\n }\n\n ctxt.gl.DeleteRenderbuffersEXT(1, [ self.id ].as_ptr());\n\n } else {\n unreachable!();\n }\n }\n }\n}\n\nimpl GlObject for RenderBufferAny {\n type Id = gl::types::GLuint;\n \n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.id\n }\n}\n<commit_msg>fixed error handling in src\/framebuffer\/render_buffer.rs<commit_after>\/*!\n\nA render buffer is similar to a texture, but is optimized for usage as a draw target.\n\nContrary to a texture, you can't sample nor modify the content of a render buffer.\nYou should prefer render buffers over textures when you know that you don't need to read or modify\nthe data of the render buffer.\n\n*\/\nuse std::rc::Rc;\nuse std::ops::{Deref, DerefMut};\nuse std::{ mem, fmt };\nuse std::error::Error;\n\nuse framebuffer::{ColorAttachment, ToColorAttachment};\nuse framebuffer::{DepthAttachment, ToDepthAttachment};\nuse framebuffer::{StencilAttachment, ToStencilAttachment};\nuse framebuffer::{DepthStencilAttachment, ToDepthStencilAttachment};\nuse texture::{UncompressedFloatFormat, DepthFormat, StencilFormat, DepthStencilFormat, TextureKind};\n\nuse image_format;\n\nuse gl;\nuse GlObject;\nuse fbo::FramebuffersContainer;\nuse backend::Facade;\nuse context::Context;\nuse ContextExt;\nuse version::Version;\nuse version::Api;\n\n\/\/\/ Error while creating a render buffer.\n#[derive(Copy, Clone, Debug)]\npub enum CreationError {\n \/\/\/ The requested format is not supported.\n FormatNotSupported,\n}\n\nimpl fmt::Display for CreationError {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"{}\", self.description())\n }\n}\n\nimpl Error for CreationError {\n fn description(&self) -> &str {\n use self::CreationError::*;\n match *self {\n FormatNotSupported => \"The requested format is not supported\",\n }\n }\n}\n\nimpl From<image_format::FormatNotSupportedError> for CreationError {\n fn from(_: image_format::FormatNotSupportedError) -> CreationError {\n CreationError::FormatNotSupported\n }\n}\n\n\/\/\/ A render buffer is similar to a texture, but is optimized for usage as a draw target.\n\/\/\/\n\/\/\/ Contrary to a texture, you can't sample or modify the content of the `RenderBuffer`.\npub struct RenderBuffer {\n buffer: RenderBufferAny,\n}\n\nimpl RenderBuffer {\n \/\/\/ Builds a new render buffer.\n pub fn new<F>(facade: &F, format: UncompressedFloatFormat, width: u32, height: u32)\n -> Result<RenderBuffer, CreationError> where F: Facade\n {\n let format = image_format::TextureFormatRequest::Specific(image_format::TextureFormat::UncompressedFloat(format));\n let format = try!(image_format::format_request_to_glenum(&facade.get_context(), format, image_format::RequestType::Renderbuffer));\n\n Ok(RenderBuffer {\n buffer: RenderBufferAny::new(facade, format, TextureKind::Float, width, height, None)\n })\n }\n}\n\nimpl<'a> ToColorAttachment<'a> for &'a RenderBuffer {\n #[inline]\n fn to_color_attachment(self) -> ColorAttachment<'a> {\n ColorAttachment::RenderBuffer(self)\n }\n}\n\nimpl Deref for RenderBuffer {\n type Target = RenderBufferAny;\n\n #[inline]\n fn deref(&self) -> &RenderBufferAny {\n &self.buffer\n }\n}\n\nimpl DerefMut for RenderBuffer {\n #[inline]\n fn deref_mut(&mut self) -> &mut RenderBufferAny {\n &mut self.buffer\n }\n}\n\nimpl GlObject for RenderBuffer {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.buffer.get_id()\n }\n}\n\n\/\/\/ A render buffer is similar to a texture, but is optimized for usage as a draw target.\n\/\/\/\n\/\/\/ Contrary to a texture, you can't sample or modify the content of the `DepthRenderBuffer` directly.\npub struct DepthRenderBuffer {\n buffer: RenderBufferAny,\n}\n\nimpl DepthRenderBuffer {\n \/\/\/ Builds a new render buffer.\n pub fn new<F>(facade: &F, format: DepthFormat, width: u32, height: u32)\n -> Result<DepthRenderBuffer, CreationError> where F: Facade\n {\n let format = image_format::TextureFormatRequest::Specific(image_format::TextureFormat::DepthFormat(format));\n let format = try!(image_format::format_request_to_glenum(&facade.get_context(), format, image_format::RequestType::Renderbuffer));\n\n Ok(DepthRenderBuffer {\n buffer: RenderBufferAny::new(facade, format, TextureKind::Depth, width, height, None)\n })\n }\n}\n\nimpl<'a> ToDepthAttachment<'a> for &'a DepthRenderBuffer {\n fn to_depth_attachment(self) -> DepthAttachment<'a> {\n DepthAttachment::RenderBuffer(self)\n }\n}\n\nimpl Deref for DepthRenderBuffer {\n type Target = RenderBufferAny;\n\n #[inline]\n fn deref(&self) -> &RenderBufferAny {\n &self.buffer\n }\n}\n\nimpl DerefMut for DepthRenderBuffer {\n #[inline]\n fn deref_mut(&mut self) -> &mut RenderBufferAny {\n &mut self.buffer\n }\n}\n\nimpl GlObject for DepthRenderBuffer {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.buffer.get_id()\n }\n}\n\n\/\/\/ A render buffer is similar to a texture, but is optimized for usage as a draw target.\n\/\/\/\n\/\/\/ Contrary to a texture, you can't sample or modify the content of the `StencilRenderBuffer` directly.\npub struct StencilRenderBuffer {\n buffer: RenderBufferAny,\n}\n\nimpl StencilRenderBuffer {\n \/\/\/ Builds a new render buffer.\n pub fn new<F>(facade: &F, format: StencilFormat, width: u32, height: u32)\n -> Result<StencilRenderBuffer, CreationError> where F: Facade\n {\n let format = image_format::TextureFormatRequest::Specific(image_format::TextureFormat::StencilFormat(format));\n let format = try!(image_format::format_request_to_glenum(&facade.get_context(), format, image_format::RequestType::Renderbuffer));\n\n Ok(StencilRenderBuffer {\n buffer: RenderBufferAny::new(facade, format, TextureKind::Stencil, width, height, None)\n })\n }\n}\n\nimpl<'a> ToStencilAttachment<'a> for &'a StencilRenderBuffer {\n #[inline]\n fn to_stencil_attachment(self) -> StencilAttachment<'a> {\n StencilAttachment::RenderBuffer(self)\n }\n}\n\nimpl Deref for StencilRenderBuffer {\n type Target = RenderBufferAny;\n\n #[inline]\n fn deref(&self) -> &RenderBufferAny {\n &self.buffer\n }\n}\n\nimpl DerefMut for StencilRenderBuffer {\n #[inline]\n fn deref_mut(&mut self) -> &mut RenderBufferAny {\n &mut self.buffer\n }\n}\n\nimpl GlObject for StencilRenderBuffer {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.buffer.get_id()\n }\n}\n\n\/\/\/ A render buffer is similar to a texture, but is optimized for usage as a draw target.\n\/\/\/\n\/\/\/ Contrary to a texture, you can't sample or modify the content of the `DepthStencilRenderBuffer` directly.\npub struct DepthStencilRenderBuffer {\n buffer: RenderBufferAny,\n}\n\nimpl DepthStencilRenderBuffer {\n \/\/\/ Builds a new render buffer.\n pub fn new<F>(facade: &F, format: DepthStencilFormat, width: u32, height: u32)\n -> Result<DepthStencilRenderBuffer, CreationError> where F: Facade\n {\n let format = image_format::TextureFormatRequest::Specific(image_format::TextureFormat::DepthStencilFormat(format));\n let format = try!(image_format::format_request_to_glenum(&facade.get_context(), format, image_format::RequestType::Renderbuffer));\n\n Ok(DepthStencilRenderBuffer {\n buffer: RenderBufferAny::new(facade, format, TextureKind::DepthStencil, width, height, None)\n })\n }\n}\n\nimpl<'a> ToDepthStencilAttachment<'a> for &'a DepthStencilRenderBuffer {\n #[inline]\n fn to_depth_stencil_attachment(self) -> DepthStencilAttachment<'a> {\n DepthStencilAttachment::RenderBuffer(self)\n }\n}\n\nimpl Deref for DepthStencilRenderBuffer {\n type Target = RenderBufferAny;\n\n #[inline]\n fn deref(&self) -> &RenderBufferAny {\n &self.buffer\n }\n}\n\nimpl DerefMut for DepthStencilRenderBuffer {\n #[inline]\n fn deref_mut(&mut self) -> &mut RenderBufferAny {\n &mut self.buffer\n }\n}\n\nimpl GlObject for DepthStencilRenderBuffer {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.buffer.get_id()\n }\n}\n\n\/\/\/ A RenderBuffer of indeterminate type.\npub struct RenderBufferAny {\n context: Rc<Context>,\n id: gl::types::GLuint,\n width: u32,\n height: u32,\n samples: Option<u32>,\n kind: TextureKind,\n}\n\nimpl RenderBufferAny {\n \/\/\/ Builds a new render buffer.\n fn new<F>(facade: &F, format: gl::types::GLenum, kind: TextureKind, width: u32, height: u32,\n samples: Option<u32>) -> RenderBufferAny\n where F: Facade\n {\n unsafe {\n \/\/ TODO: check that dimensions don't exceed GL_MAX_RENDERBUFFER_SIZE\n \/\/ FIXME: gles2 only supports very few formats\n let mut ctxt = facade.get_context().make_current();\n let mut id = mem::uninitialized();\n\n if ctxt.version >= &Version(Api::Gl, 4, 5) ||\n ctxt.extensions.gl_arb_direct_state_access\n {\n ctxt.gl.CreateRenderbuffers(1, &mut id);\n if let Some(samples) = samples {\n ctxt.gl.NamedRenderbufferStorageMultisample(id, samples as gl::types::GLsizei,\n format, width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n } else {\n ctxt.gl.NamedRenderbufferStorage(id, format, width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n }\n\n } else if samples.is_some() && (ctxt.version >= &Version(Api::Gl, 3, 0) ||\n ctxt.version >= &Version(Api::GlEs, 3, 0) ||\n ctxt.extensions.gl_apple_framebuffer_multisample ||\n ctxt.extensions.gl_angle_framebuffer_multisample ||\n ctxt.extensions.gl_ext_multisampled_render_to_texture ||\n ctxt.extensions.gl_nv_framebuffer_multisample)\n {\n ctxt.gl.GenRenderbuffers(1, &mut id);\n ctxt.gl.BindRenderbuffer(gl::RENDERBUFFER, id);\n ctxt.state.renderbuffer = id;\n\n let samples = samples.unwrap();\n\n if ctxt.version >= &Version(Api::Gl, 3, 0) ||\n ctxt.version >= &Version(Api::GlEs, 3, 0)\n {\n ctxt.gl.RenderbufferStorageMultisample(gl::RENDERBUFFER,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if ctxt.extensions.gl_apple_framebuffer_multisample {\n ctxt.gl.RenderbufferStorageMultisampleAPPLE(gl::RENDERBUFFER,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if ctxt.extensions.gl_angle_framebuffer_multisample {\n ctxt.gl.RenderbufferStorageMultisampleANGLE(gl::RENDERBUFFER,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if ctxt.extensions.gl_ext_multisampled_render_to_texture {\n ctxt.gl.RenderbufferStorageMultisampleEXT(gl::RENDERBUFFER,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if ctxt.extensions.gl_nv_framebuffer_multisample {\n ctxt.gl.RenderbufferStorageMultisampleNV(gl::RENDERBUFFER,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else {\n unreachable!();\n }\n\n } else if samples.is_none() && (ctxt.version >= &Version(Api::Gl, 3, 0) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0))\n {\n ctxt.gl.GenRenderbuffers(1, &mut id);\n ctxt.gl.BindRenderbuffer(gl::RENDERBUFFER, id);\n ctxt.state.renderbuffer = id;\n ctxt.gl.RenderbufferStorage(gl::RENDERBUFFER, format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if samples.is_some() && ctxt.extensions.gl_ext_framebuffer_object &&\n ctxt.extensions.gl_ext_framebuffer_multisample\n {\n ctxt.gl.GenRenderbuffersEXT(1, &mut id);\n ctxt.gl.BindRenderbufferEXT(gl::RENDERBUFFER_EXT, id);\n ctxt.state.renderbuffer = id;\n\n let samples = samples.unwrap();\n ctxt.gl.RenderbufferStorageMultisampleEXT(gl::RENDERBUFFER_EXT,\n samples as gl::types::GLsizei,\n format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else if samples.is_none() && ctxt.extensions.gl_ext_framebuffer_object {\n ctxt.gl.GenRenderbuffersEXT(1, &mut id);\n ctxt.gl.BindRenderbufferEXT(gl::RENDERBUFFER_EXT, id);\n ctxt.state.renderbuffer = id;\n ctxt.gl.RenderbufferStorageEXT(gl::RENDERBUFFER_EXT, format,\n width as gl::types::GLsizei,\n height as gl::types::GLsizei);\n\n } else {\n unreachable!();\n }\n\n RenderBufferAny {\n context: facade.get_context().clone(),\n id: id,\n width: width,\n height: height,\n samples: samples,\n kind: kind,\n }\n }\n }\n\n \/\/\/ Returns the dimensions of the render buffer.\n #[inline]\n pub fn get_dimensions(&self) -> (u32, u32) {\n (self.width, self.height)\n }\n\n \/\/\/ Returns the number of samples of the render buffer, or `None` if multisampling isn't\n \/\/\/ enabled.\n #[inline]\n pub fn get_samples(&self) -> Option<u32> {\n self.samples\n }\n\n \/\/\/ Returns the context used to create this renderbuffer.\n #[inline]\n pub fn get_context(&self) -> &Rc<Context> {\n &self.context\n }\n\n \/\/\/ Returns the kind of renderbuffer.\n #[inline]\n pub fn kind(&self) -> TextureKind {\n self.kind\n }\n}\n\nimpl Drop for RenderBufferAny {\n fn drop(&mut self) {\n unsafe {\n let mut ctxt = self.context.make_current();\n\n \/\/ removing FBOs which contain this buffer\n FramebuffersContainer::purge_renderbuffer(&mut ctxt, self.id);\n\n if ctxt.version >= &Version(Api::Gl, 3, 0) ||\n ctxt.version >= &Version(Api::GlEs, 2, 0)\n {\n if ctxt.state.renderbuffer == self.id {\n ctxt.state.renderbuffer = 0;\n }\n\n ctxt.gl.DeleteRenderbuffers(1, [ self.id ].as_ptr());\n\n } else if ctxt.extensions.gl_ext_framebuffer_object {\n if ctxt.state.renderbuffer == self.id {\n ctxt.state.renderbuffer = 0;\n }\n\n ctxt.gl.DeleteRenderbuffersEXT(1, [ self.id ].as_ptr());\n\n } else {\n unreachable!();\n }\n }\n }\n}\n\nimpl GlObject for RenderBufferAny {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.id\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Store location info in `LocatedTT`.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #36086 - apasel422:issue-35423, r=alexcrichton<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn main () {\n let x = 4;\n match x {\n ref r if *r < 0 => println!(\"got negative num {} < 0\", r),\n e @ 1 ... 100 => println!(\"got number within range [1,100] {}\", e),\n _ => println!(\"no\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use codecleaner::{code_chunks, CodeIndicesIter};\n\npub struct StmtIndicesIter<'a> {\n src: &'a str,\n it: CodeIndicesIter<'a>,\n pos: usize,\n end: usize\n}\n\nimpl<'a> Iterator for StmtIndicesIter<'a> {\n type Item = (usize, usize);\n\n #[inline]\n fn next(&mut self) -> Option<(usize, usize)> {\n let src_bytes = self.src.as_bytes();\n let mut enddelim = b';';\n let mut bracelevel = 0u16;\n let mut parenlevel = 0i32;\n let mut start = self.pos;\n\n \/\/ loop on all code_chunks until we find a relevant open\/close pattern\n loop {\n \/\/ do we need the next chunk?\n if self.end == self.pos {\n \/\/ get the next chunk of code\n match self.it.next() {\n Some((ch_start, ch_end)) => {\n self.end = ch_end;\n if start == self.pos { start = ch_start; }\n self.pos = ch_start;\n }\n None => {\n \/\/ no more chunks. finished\n return if start < self.end { Some((start, self.end)) }\n else { None }\n }\n }\n }\n\n if start == self.pos {\n \/\/ if this is a new stmt block, skip the whitespace\n for &b in &src_bytes[self.pos..self.end] {\n match b {\n b' ' | b'\\r' | b'\\n' | b'\\t' => { self.pos += 1; },\n _ => { break; }\n }\n }\n start = self.pos;\n\n \/\/ test attribute #[foo = bar]\n if self.pos<self.end && src_bytes[self.pos] == b'#' {\n enddelim = b']'\n };\n }\n\n \/\/ iterate through the chunk, looking for stmt end\n for &b in &src_bytes[self.pos..self.end] {\n self.pos += 1;\n\n match b {\n b'(' => { parenlevel += 1; },\n b')' => { parenlevel -= 1; },\n b'{' => {\n \/\/ if we are top level and stmt is not a 'use' then\n \/\/ closebrace finishes the stmt\n if bracelevel == 0 && parenlevel == 0\n && !(is_a_use_stmt(self.src, start, self.pos)) {\n enddelim = b'}';\n }\n bracelevel += 1;\n },\n b'}' => {\n \/\/ have we reached the end of the scope?\n if bracelevel == 0 { return None; }\n bracelevel -= 1;\n },\n b'!' => {\n \/\/ macro if followed by at least one space or (\n \/\/ FIXME: test with boolean 'not' expression\n if parenlevel == 0 && bracelevel == 0\n && self.pos < self.end && (self.pos-start) > 1 {\n match src_bytes[self.pos] {\n b' ' | b'\\r' | b'\\n' | b'\\t' | b'(' => {\n enddelim = b')';\n },\n _ => {}\n }\n }\n }\n _ => {}\n }\n\n if enddelim == b && bracelevel == 0 && parenlevel == 0 {\n return Some((start, self.pos));\n }\n }\n }\n }\n}\n\nfn is_a_use_stmt(src: &str, start: usize, pos: usize) -> bool {\n let src_bytes = src.as_bytes();\n let whitespace = \" {\\t\\r\\n\".as_bytes();\n (pos > 3 && &src_bytes[start..start+3] == b\"use\" &&\n whitespace.contains(&src_bytes[start+3])) ||\n (pos > 7 && &src_bytes[start..(start+7)] == b\"pub use\" &&\n whitespace.contains(&src_bytes[start+7]))\n}\n\npub fn iter_stmts(src: &str) -> StmtIndicesIter {\n StmtIndicesIter{ src: src, it: code_chunks(src), pos: 0, end: 0 }\n}\n\n#[cfg(test)]\nmod test {\n\n use testutils::{rejustify, slice};\n use super::*;\n\n #[test]\n fn iterates_single_use_stmts() {\n let src = &rejustify(\"\n use std::Foo; \/\/ a comment\n use std::Bar;\n \");\n\n let mut it = iter_stmts(src);\n assert_eq!(\"use std::Foo;\", slice(src, it.next().unwrap()));\n assert_eq!(\"use std::Bar;\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_use_stmt_over_two_lines() {\n let src = &rejustify(\"\n use std::{Foo,\n Bar}; \/\/ a comment\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"use std::{Foo,\n Bar};\",slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_use_stmt_without_the_prefix() {\n let src = &rejustify(\"\n pub use {Foo,\n Bar}; \/\/ this is also legit apparently\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"pub use {Foo,\n Bar};\", slice(src, it.next().unwrap())\n );\n }\n\n #[test]\n fn iterates_while_stmt() {\n let src = &rejustify(\"\n while self.pos < 3 { }\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"while self.pos < 3 { }\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_lambda_arg() {\n let src = &rejustify(\"\n myfn(|n|{});\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"myfn(|n|{});\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_macro() {\n let src = \"\n mod foo;\n macro_rules! otry(\n ($e:expr) => (match $e { Some(e) => e, None => return })\n )\n mod bar;\n \";\n let mut it = iter_stmts(src);\n assert_eq!(\"mod foo;\", slice(src, it.next().unwrap()));\n assert_eq!(\"macro_rules! otry(\n ($e:expr) => (match $e { Some(e) => e, None => return })\n )\", slice(src, it.next().unwrap()));\n assert_eq!(\"mod bar;\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_macro_invocation() {\n let src = \"\n mod foo;\n local_data_key!(local_stdout: Box<Writer + Send>) \/\/ no ';'\n mod bar;\n \";\n let mut it = iter_stmts(src);\n assert_eq!(\"mod foo;\", slice(src, it.next().unwrap()));\n assert_eq!(\"local_data_key!(local_stdout: Box<Writer + Send>)\", slice(src, it.next().unwrap()));\n assert_eq!(\"mod bar;\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_if_else_stmt() {\n let src = \"\n if self.pos < 3 { } else { }\n \";\n let mut it = iter_stmts(src);\n assert_eq!(\"if self.pos < 3 { }\", slice(src, it.next().unwrap()));\n assert_eq!(\"else { }\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_inner_scope() {\n let src = \"\n while self.pos < 3 {\n let a = 35;\n return a + 35; \/\/ should iterate this\n }\n {\n b = foo; \/\/ but not this\n }\n \";\n\n let scope = &src[29..];\n let mut it = iter_stmts(scope);\n\n assert_eq!(\"let a = 35;\", slice(scope, it.next().unwrap()));\n assert_eq!(\"return a + 35;\", slice(scope, it.next().unwrap()));\n assert_eq!(None, it.next());\n }\n\n #[test]\n fn iterates_module_attribute() {\n let src = &rejustify(\"\n #![license = \\\"BSD\\\"]\n #[test]\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"#![license = \\\"BSD\\\"]\", slice(src, it.next().unwrap()));\n assert_eq!(\"#[test]\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_half_open_subscope_if_is_the_last_thing() {\n let src = \"\n let something = 35;\n while self.pos < 3 {\n let a = 35;\n return a + 35; \/\/ should iterate this\n \";\n\n let scope = src;\n let mut it = iter_stmts(scope);\n assert_eq!(\"let something = 35;\", slice(scope, it.next().unwrap()));\n assert_eq!(\"while self.pos < 3 {\n let a = 35;\n return a + 35; \/\/ should iterate this\n \", slice(scope, it.next().unwrap()));\n }\n\n}\n<commit_msg>codeiter: simplify is_a_use_stmt a little<commit_after>use codecleaner::{code_chunks, CodeIndicesIter};\n\npub struct StmtIndicesIter<'a> {\n src: &'a str,\n it: CodeIndicesIter<'a>,\n pos: usize,\n end: usize\n}\n\nimpl<'a> Iterator for StmtIndicesIter<'a> {\n type Item = (usize, usize);\n\n #[inline]\n fn next(&mut self) -> Option<(usize, usize)> {\n let src_bytes = self.src.as_bytes();\n let mut enddelim = b';';\n let mut bracelevel = 0u16;\n let mut parenlevel = 0i32;\n let mut start = self.pos;\n\n \/\/ loop on all code_chunks until we find a relevant open\/close pattern\n loop {\n \/\/ do we need the next chunk?\n if self.end == self.pos {\n \/\/ get the next chunk of code\n match self.it.next() {\n Some((ch_start, ch_end)) => {\n self.end = ch_end;\n if start == self.pos { start = ch_start; }\n self.pos = ch_start;\n }\n None => {\n \/\/ no more chunks. finished\n return if start < self.end { Some((start, self.end)) }\n else { None }\n }\n }\n }\n\n if start == self.pos {\n \/\/ if this is a new stmt block, skip the whitespace\n for &b in &src_bytes[self.pos..self.end] {\n match b {\n b' ' | b'\\r' | b'\\n' | b'\\t' => { self.pos += 1; },\n _ => { break; }\n }\n }\n start = self.pos;\n\n \/\/ test attribute #[foo = bar]\n if self.pos<self.end && src_bytes[self.pos] == b'#' {\n enddelim = b']'\n };\n }\n\n \/\/ iterate through the chunk, looking for stmt end\n for &b in &src_bytes[self.pos..self.end] {\n self.pos += 1;\n\n match b {\n b'(' => { parenlevel += 1; },\n b')' => { parenlevel -= 1; },\n b'{' => {\n \/\/ if we are top level and stmt is not a 'use' then\n \/\/ closebrace finishes the stmt\n if bracelevel == 0 && parenlevel == 0\n && !is_a_use_stmt(src_bytes, start, self.pos) {\n enddelim = b'}';\n }\n bracelevel += 1;\n },\n b'}' => {\n \/\/ have we reached the end of the scope?\n if bracelevel == 0 { return None; }\n bracelevel -= 1;\n },\n b'!' => {\n \/\/ macro if followed by at least one space or (\n \/\/ FIXME: test with boolean 'not' expression\n if parenlevel == 0 && bracelevel == 0\n && self.pos < self.end && (self.pos-start) > 1 {\n match src_bytes[self.pos] {\n b' ' | b'\\r' | b'\\n' | b'\\t' | b'(' => {\n enddelim = b')';\n },\n _ => {}\n }\n }\n }\n _ => {}\n }\n\n if enddelim == b && bracelevel == 0 && parenlevel == 0 {\n return Some((start, self.pos));\n }\n }\n }\n }\n}\n\nfn is_a_use_stmt(src_bytes: &[u8], start: usize, pos: usize) -> bool {\n let whitespace = b\" {\\t\\r\\n\";\n (pos > 3 && &src_bytes[start..start+3] == b\"use\" &&\n whitespace.contains(&src_bytes[start+3])) ||\n (pos > 7 && &src_bytes[start..(start+7)] == b\"pub use\" &&\n whitespace.contains(&src_bytes[start+7]))\n}\n\npub fn iter_stmts(src: &str) -> StmtIndicesIter {\n StmtIndicesIter{ src: src, it: code_chunks(src), pos: 0, end: 0 }\n}\n\n#[cfg(test)]\nmod test {\n\n use testutils::{rejustify, slice};\n use super::*;\n\n #[test]\n fn iterates_single_use_stmts() {\n let src = &rejustify(\"\n use std::Foo; \/\/ a comment\n use std::Bar;\n \");\n\n let mut it = iter_stmts(src);\n assert_eq!(\"use std::Foo;\", slice(src, it.next().unwrap()));\n assert_eq!(\"use std::Bar;\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_use_stmt_over_two_lines() {\n let src = &rejustify(\"\n use std::{Foo,\n Bar}; \/\/ a comment\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"use std::{Foo,\n Bar};\",slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_use_stmt_without_the_prefix() {\n let src = &rejustify(\"\n pub use {Foo,\n Bar}; \/\/ this is also legit apparently\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"pub use {Foo,\n Bar};\", slice(src, it.next().unwrap())\n );\n }\n\n #[test]\n fn iterates_while_stmt() {\n let src = &rejustify(\"\n while self.pos < 3 { }\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"while self.pos < 3 { }\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_lambda_arg() {\n let src = &rejustify(\"\n myfn(|n|{});\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"myfn(|n|{});\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_macro() {\n let src = \"\n mod foo;\n macro_rules! otry(\n ($e:expr) => (match $e { Some(e) => e, None => return })\n )\n mod bar;\n \";\n let mut it = iter_stmts(src);\n assert_eq!(\"mod foo;\", slice(src, it.next().unwrap()));\n assert_eq!(\"macro_rules! otry(\n ($e:expr) => (match $e { Some(e) => e, None => return })\n )\", slice(src, it.next().unwrap()));\n assert_eq!(\"mod bar;\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_macro_invocation() {\n let src = \"\n mod foo;\n local_data_key!(local_stdout: Box<Writer + Send>) \/\/ no ';'\n mod bar;\n \";\n let mut it = iter_stmts(src);\n assert_eq!(\"mod foo;\", slice(src, it.next().unwrap()));\n assert_eq!(\"local_data_key!(local_stdout: Box<Writer + Send>)\", slice(src, it.next().unwrap()));\n assert_eq!(\"mod bar;\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_if_else_stmt() {\n let src = \"\n if self.pos < 3 { } else { }\n \";\n let mut it = iter_stmts(src);\n assert_eq!(\"if self.pos < 3 { }\", slice(src, it.next().unwrap()));\n assert_eq!(\"else { }\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_inner_scope() {\n let src = \"\n while self.pos < 3 {\n let a = 35;\n return a + 35; \/\/ should iterate this\n }\n {\n b = foo; \/\/ but not this\n }\n \";\n\n let scope = &src[29..];\n let mut it = iter_stmts(scope);\n\n assert_eq!(\"let a = 35;\", slice(scope, it.next().unwrap()));\n assert_eq!(\"return a + 35;\", slice(scope, it.next().unwrap()));\n assert_eq!(None, it.next());\n }\n\n #[test]\n fn iterates_module_attribute() {\n let src = &rejustify(\"\n #![license = \\\"BSD\\\"]\n #[test]\n \");\n let mut it = iter_stmts(src);\n assert_eq!(\"#![license = \\\"BSD\\\"]\", slice(src, it.next().unwrap()));\n assert_eq!(\"#[test]\", slice(src, it.next().unwrap()));\n }\n\n #[test]\n fn iterates_half_open_subscope_if_is_the_last_thing() {\n let src = \"\n let something = 35;\n while self.pos < 3 {\n let a = 35;\n return a + 35; \/\/ should iterate this\n \";\n\n let scope = src;\n let mut it = iter_stmts(scope);\n assert_eq!(\"let something = 35;\", slice(scope, it.next().unwrap()));\n assert_eq!(\"while self.pos < 3 {\n let a = 35;\n return a + 35; \/\/ should iterate this\n \", slice(scope, it.next().unwrap()));\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before>use std::{run};\nuse std::io::fs::rmdir;\n\nstatic exe: &'static str = \"build\/mkdir\";\nstatic test_dir1: &'static str = \"mkdir_test1\";\nstatic test_dir2: &'static str = \"mkdir_test2\";\nstatic test_dir3: &'static str = \"mkdir_test3\";\nstatic test_dir4: &'static str = \"mkdir_test4\/mkdir_test4_1\";\nstatic test_dir5: &'static str = \"mkdir_test5\/mkdir_test5_1\";\n\nfn cleanup(dir: &'static str) {\n let d = dir.into_owned();\n let p = Path::new(d.into_owned());\n if p.exists() {\n rmdir(&p);\n }\n}\n\n#[test]\nfn test_mkdir_mkdir() {\n cleanup(test_dir1);\n let prog = run::process_status(exe.into_owned(), [test_dir1.into_owned()]);\n let exit_success = prog.unwrap().success();\n cleanup(test_dir1);\n assert_eq!(exit_success, true);\n}\n\n#[test]\nfn test_mkdir_dup_dir() {\n cleanup(test_dir2);\n let prog = run::process_status(exe.into_owned(), [test_dir2.into_owned()]);\n let exit_success = prog.unwrap().success();\n if !exit_success {\n cleanup(test_dir2);\n fail!();\n }\n let prog2 = run::process_status(exe.into_owned(), [test_dir2.into_owned()]);\n let exit_success2 = prog2.unwrap().success();\n cleanup(test_dir2);\n assert_eq!(exit_success2, false);\n}\n\n#[test]\nfn test_mkdir_mode() {\n cleanup(test_dir3);\n let prog = run::process_status(exe.into_owned(), [~\"-m\", ~\"755\", test_dir3.into_owned()]);\n let exit_success = prog.unwrap().success();\n cleanup(test_dir3);\n assert_eq!(exit_success, true);\n}\n\n#[test]\nfn test_mkdir_parent() {\n cleanup(test_dir4);\n let prog = run::process_status(exe.into_owned(), [~\"-p\", test_dir4.into_owned()]);\n let exit_success = prog.unwrap().success();\n cleanup(test_dir4);\n assert_eq!(exit_success, true);\n}\n\n#[test]\nfn test_mkdir_no_parent() {\n cleanup(test_dir5);\n let prog = run::process_status(exe.into_owned(), [test_dir5.into_owned()]);\n let exit_success = prog.unwrap().success();\n cleanup(test_dir5);\n assert_eq!(exit_success, false);\n}\n<commit_msg>update test dir paths into tmp dir<commit_after>use std::{run};\nuse std::io::fs::rmdir;\n\nstatic exe: &'static str = \"build\/mkdir\";\nstatic test_dir1: &'static str = \"tmp\/mkdir_test1\";\nstatic test_dir2: &'static str = \"tmp\/mkdir_test2\";\nstatic test_dir3: &'static str = \"tmp\/mkdir_test3\";\nstatic test_dir4: &'static str = \"tmp\/mkdir_test4\/mkdir_test4_1\";\nstatic test_dir5: &'static str = \"tmp\/mkdir_test5\/mkdir_test5_1\";\n\nfn cleanup(dir: &'static str) {\n let d = dir.into_owned();\n let p = Path::new(d.into_owned());\n if p.exists() {\n rmdir(&p);\n }\n}\n\n#[test]\nfn test_mkdir_mkdir() {\n cleanup(test_dir1);\n let prog = run::process_status(exe.into_owned(), [test_dir1.into_owned()]);\n let exit_success = prog.unwrap().success();\n cleanup(test_dir1);\n assert_eq!(exit_success, true);\n}\n\n#[test]\nfn test_mkdir_dup_dir() {\n cleanup(test_dir2);\n let prog = run::process_status(exe.into_owned(), [test_dir2.into_owned()]);\n let exit_success = prog.unwrap().success();\n if !exit_success {\n cleanup(test_dir2);\n fail!();\n }\n let prog2 = run::process_status(exe.into_owned(), [test_dir2.into_owned()]);\n let exit_success2 = prog2.unwrap().success();\n cleanup(test_dir2);\n assert_eq!(exit_success2, false);\n}\n\n#[test]\nfn test_mkdir_mode() {\n cleanup(test_dir3);\n let prog = run::process_status(exe.into_owned(), [~\"-m\", ~\"755\", test_dir3.into_owned()]);\n let exit_success = prog.unwrap().success();\n cleanup(test_dir3);\n assert_eq!(exit_success, true);\n}\n\n#[test]\nfn test_mkdir_parent() {\n cleanup(test_dir4);\n let prog = run::process_status(exe.into_owned(), [~\"-p\", test_dir4.into_owned()]);\n let exit_success = prog.unwrap().success();\n cleanup(test_dir4);\n assert_eq!(exit_success, true);\n}\n\n#[test]\nfn test_mkdir_no_parent() {\n cleanup(test_dir5);\n let prog = run::process_status(exe.into_owned(), [test_dir5.into_owned()]);\n let exit_success = prog.unwrap().success();\n cleanup(test_dir5);\n assert_eq!(exit_success, false);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[Auto] bin\/core\/git: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added text fields example.<commit_after>\/*\n * Copyright (c) 2017 Boucher, Antoni <bouanto@zoho.com>\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy of\n * this software and associated documentation files (the \"Software\"), to deal in\n * the Software without restriction, including without limitation the rights to\n * use, copy, modify, merge, publish, distribute, sublicense, and\/or sell copies of\n * the Software, and to permit persons to whom the Software is furnished to do so,\n * subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n *\/\n\nextern crate futures;\nextern crate gtk;\n#[macro_use]\nextern crate relm;\n\nuse futures::Future;\nuse futures::future::ok;\nuse gtk::{ContainerExt, EditableSignals, Entry, EntryExt, Label, WidgetExt, Window, WindowType};\nuse gtk::Orientation::Vertical;\nuse relm::{QuitFuture, Relm, UnitFuture, Widget};\n\nuse self::Msg::*;\n\n#[derive(Clone, Debug)]\nstruct Model {\n content: String,\n}\n\n#[derive(Clone)]\nenum Msg {\n Change,\n Quit,\n}\n\nstruct Widgets {\n input: Entry,\n label: Label,\n window: Window,\n}\n\nstruct Win {\n model: Model,\n widgets: Widgets,\n}\n\nimpl Win {\n fn view() -> Widgets {\n let vbox = gtk::Box::new(Vertical, 0);\n\n let input = Entry::new();\n vbox.add(&input);\n\n let label = Label::new(None);\n vbox.add(&label);\n\n let window = Window::new(WindowType::Toplevel);\n\n window.add(&vbox);\n\n window.show_all();\n\n Widgets {\n input: input,\n label: label,\n window: window,\n }\n }\n}\n\nimpl Widget<Msg> for Win {\n fn connect_events(&self, relm: &Relm<Msg>) {\n connect!(relm, self.widgets.input, connect_changed(_), Change);\n connect_no_inhibit!(relm, self.widgets.window, connect_delete_event(_, _), Quit);\n }\n\n fn new() -> Self {\n Win {\n model: Model {\n content: String::new(),\n },\n widgets: Self::view(),\n }\n }\n\n fn update(&mut self, event: Msg) -> UnitFuture {\n match event {\n Change => {\n self.model.content = self.widgets.input.get_text().unwrap().chars().rev().collect();\n self.widgets.label.set_text(&self.model.content);\n },\n Quit => return QuitFuture.boxed(),\n }\n\n ok(()).boxed()\n }\n}\n\nfn main() {\n Relm::run::<Win>().unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Server Requests\n\/\/!\n\/\/! These are requests that a `hyper::Server` receives, and include its method,\n\/\/! target URI, headers, and message body.\nuse std::io::IoResult;\nuse std::io::net::ip::SocketAddr;\n\nuse {HttpResult};\nuse version::{HttpVersion};\nuse method::Method::{self, Get, Head};\nuse header::Headers;\nuse header::common::{ContentLength, TransferEncoding};\nuse http::{read_request_line};\nuse http::HttpReader;\nuse http::HttpReader::{SizedReader, ChunkedReader, EmptyReader};\nuse uri::RequestUri;\n\n\/\/\/ A request bundles several parts of an incoming `NetworkStream`, given to a `Handler`.\npub struct Request<'a> {\n \/\/\/ The IP address of the remote connection.\n pub remote_addr: SocketAddr,\n \/\/\/ The `Method`, such as `Get`, `Post`, etc.\n pub method: Method,\n \/\/\/ The headers of the incoming request.\n pub headers: Headers,\n \/\/\/ The target request-uri for this request.\n pub uri: RequestUri,\n \/\/\/ The version of HTTP for this request.\n pub version: HttpVersion,\n body: HttpReader<&'a mut (Reader + 'a)>\n}\n\n\nimpl<'a> Request<'a> {\n\n \/\/\/ Create a new Request, reading the StartLine and Headers so they are\n \/\/\/ immediately useful.\n pub fn new(mut stream: &'a mut (Reader + 'a), addr: SocketAddr) -> HttpResult<Request<'a>> {\n let (method, uri, version) = try!(read_request_line(&mut stream));\n debug!(\"Request Line: {} {} {}\", method, uri, version);\n let headers = try!(Headers::from_raw(&mut stream));\n debug!(\"Headers: [\\n{}]\", headers);\n\n\n let body = if method == Get || method == Head {\n EmptyReader(stream)\n } else if headers.has::<ContentLength>() {\n match headers.get::<ContentLength>() {\n Some(&ContentLength(len)) => SizedReader(stream, len),\n None => unreachable!()\n }\n } else if headers.has::<TransferEncoding>() {\n todo!(\"check for Transfer-Encoding: chunked\");\n ChunkedReader(stream, None)\n } else {\n EmptyReader(stream)\n };\n\n Ok(Request {\n remote_addr: addr,\n method: method,\n uri: uri,\n headers: headers,\n version: version,\n body: body\n })\n }\n}\n\nimpl<'a> Reader for Request<'a> {\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {\n self.body.read(buf)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use mock::MockStream;\n use super::Request;\n\n use std::io::net::ip::SocketAddr;\n\n fn sock(s: &str) -> SocketAddr {\n s.parse().unwrap()\n }\n\n #[test]\n fn test_get_empty_body() {\n let mut stream = MockStream::with_input(b\"\\\n GET \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n \\r\\n\\\n I'm a bad request.\\r\\n\\\n \");\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n assert_eq!(req.read_to_string(), Ok(\"\".to_string()));\n }\n\n #[test]\n fn test_head_empty_body() {\n let mut stream = MockStream::with_input(b\"\\\n HEAD \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n \\r\\n\\\n I'm a bad request.\\r\\n\\\n \");\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n assert_eq!(req.read_to_string(), Ok(\"\".to_string()));\n }\n\n #[test]\n fn test_post_empty_body() {\n let mut stream = MockStream::with_input(b\"\\\n POST \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n \\r\\n\\\n I'm a bad request.\\r\\n\\\n \");\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n assert_eq!(req.read_to_string(), Ok(\"\".to_string()));\n }\n}\n<commit_msg>Add regression tests for chunked request parsing<commit_after>\/\/! Server Requests\n\/\/!\n\/\/! These are requests that a `hyper::Server` receives, and include its method,\n\/\/! target URI, headers, and message body.\nuse std::io::IoResult;\nuse std::io::net::ip::SocketAddr;\n\nuse {HttpResult};\nuse version::{HttpVersion};\nuse method::Method::{self, Get, Head};\nuse header::Headers;\nuse header::common::{ContentLength, TransferEncoding};\nuse http::{read_request_line};\nuse http::HttpReader;\nuse http::HttpReader::{SizedReader, ChunkedReader, EmptyReader};\nuse uri::RequestUri;\n\n\/\/\/ A request bundles several parts of an incoming `NetworkStream`, given to a `Handler`.\npub struct Request<'a> {\n \/\/\/ The IP address of the remote connection.\n pub remote_addr: SocketAddr,\n \/\/\/ The `Method`, such as `Get`, `Post`, etc.\n pub method: Method,\n \/\/\/ The headers of the incoming request.\n pub headers: Headers,\n \/\/\/ The target request-uri for this request.\n pub uri: RequestUri,\n \/\/\/ The version of HTTP for this request.\n pub version: HttpVersion,\n body: HttpReader<&'a mut (Reader + 'a)>\n}\n\n\nimpl<'a> Request<'a> {\n\n \/\/\/ Create a new Request, reading the StartLine and Headers so they are\n \/\/\/ immediately useful.\n pub fn new(mut stream: &'a mut (Reader + 'a), addr: SocketAddr) -> HttpResult<Request<'a>> {\n let (method, uri, version) = try!(read_request_line(&mut stream));\n debug!(\"Request Line: {} {} {}\", method, uri, version);\n let headers = try!(Headers::from_raw(&mut stream));\n debug!(\"Headers: [\\n{}]\", headers);\n\n\n let body = if method == Get || method == Head {\n EmptyReader(stream)\n } else if headers.has::<ContentLength>() {\n match headers.get::<ContentLength>() {\n Some(&ContentLength(len)) => SizedReader(stream, len),\n None => unreachable!()\n }\n } else if headers.has::<TransferEncoding>() {\n todo!(\"check for Transfer-Encoding: chunked\");\n ChunkedReader(stream, None)\n } else {\n EmptyReader(stream)\n };\n\n Ok(Request {\n remote_addr: addr,\n method: method,\n uri: uri,\n headers: headers,\n version: version,\n body: body\n })\n }\n}\n\nimpl<'a> Reader for Request<'a> {\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {\n self.body.read(buf)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use header::common::{Host, TransferEncoding};\n use header::common::transfer_encoding::Encoding;\n use mock::MockStream;\n use super::Request;\n\n use std::io::net::ip::SocketAddr;\n\n fn sock(s: &str) -> SocketAddr {\n s.parse().unwrap()\n }\n\n #[test]\n fn test_get_empty_body() {\n let mut stream = MockStream::with_input(b\"\\\n GET \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n \\r\\n\\\n I'm a bad request.\\r\\n\\\n \");\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n assert_eq!(req.read_to_string(), Ok(\"\".to_string()));\n }\n\n #[test]\n fn test_head_empty_body() {\n let mut stream = MockStream::with_input(b\"\\\n HEAD \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n \\r\\n\\\n I'm a bad request.\\r\\n\\\n \");\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n assert_eq!(req.read_to_string(), Ok(\"\".to_string()));\n }\n\n #[test]\n fn test_post_empty_body() {\n let mut stream = MockStream::with_input(b\"\\\n POST \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n \\r\\n\\\n I'm a bad request.\\r\\n\\\n \");\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n assert_eq!(req.read_to_string(), Ok(\"\".to_string()));\n }\n\n #[test]\n fn test_parse_chunked_request() {\n let mut stream = MockStream::with_input(b\"\\\n POST \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n Transfer-Encoding: chunked\\r\\n\\\n \\r\\n\\\n 1\\r\\n\\\n q\\r\\n\\\n 2\\r\\n\\\n we\\r\\n\\\n 2\\r\\n\\\n rt\\r\\n\\\n 0\\r\\n\\\n \\r\\n\"\n );\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n\n \/\/ The headers are correct?\n match req.headers.get::<Host>() {\n Some(host) => {\n assert_eq!(\"example.domain\", host.hostname);\n },\n None => panic!(\"Host header expected!\"),\n };\n match req.headers.get::<TransferEncoding>() {\n Some(encodings) => {\n assert_eq!(1, encodings.len());\n assert_eq!(Encoding::Chunked, encodings[0]);\n }\n None => panic!(\"Transfer-Encoding: chunked expected!\"),\n };\n \/\/ The content is correctly read?\n let body = req.read_to_string().unwrap();\n assert_eq!(\"qwert\", body);\n }\n\n \/\/\/ Tests that when a chunk size is not a valid radix-16 number, an error\n \/\/\/ is returned.\n #[test]\n fn test_invalid_chunk_size_not_hex_digit() {\n let mut stream = MockStream::with_input(b\"\\\n POST \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n Transfer-Encoding: chunked\\r\\n\\\n \\r\\n\\\n X\\r\\n\\\n 1\\r\\n\\\n 0\\r\\n\\\n \\r\\n\"\n );\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n\n assert!(req.read_to_string().is_err());\n }\n\n \/\/\/ Tests that when a chunk size contains an invalid extension, an error is\n \/\/\/ returned.\n #[test]\n fn test_invalid_chunk_size_extension() {\n let mut stream = MockStream::with_input(b\"\\\n POST \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n Transfer-Encoding: chunked\\r\\n\\\n \\r\\n\\\n 1 this is an invalid extension\\r\\n\\\n 1\\r\\n\\\n 0\\r\\n\\\n \\r\\n\"\n );\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n\n assert!(req.read_to_string().is_err());\n }\n\n \/\/\/ Tests that when a valid extension that contains a digit is appended to\n \/\/\/ the chunk size, the chunk is correctly read.\n #[test]\n fn test_chunk_size_with_extension() {\n let mut stream = MockStream::with_input(b\"\\\n POST \/ HTTP\/1.1\\r\\n\\\n Host: example.domain\\r\\n\\\n Transfer-Encoding: chunked\\r\\n\\\n \\r\\n\\\n 1;this is an extension with a digit 1\\r\\n\\\n 1\\r\\n\\\n 0\\r\\n\\\n \\r\\n\"\n );\n\n let mut req = Request::new(&mut stream, sock(\"127.0.0.1:80\")).unwrap();\n\n assert_eq!(\"1\", req.read_to_string().unwrap())\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove scope<commit_after><|endoftext|>"} {"text":"<commit_before>pub use self::fs::FileAbstraction;\n\n\/\/ TODO:\n\/\/ This whole thing can be written better with a trait based mechanism that is embedded into the\n\/\/ store. However it would mean rewriting most things to be generic which can be a pain in the ass.\n\n#[cfg(test)]\nmod fs {\n use error::StoreError as SE;\n use error::StoreErrorKind as SEK;\n use std::io::Cursor;\n use std::path::PathBuf;\n\n use libimagerror::into::IntoError;\n\n use std::collections::HashMap;\n use std::sync::Mutex;\n\n lazy_static! {\n static ref MAP: Mutex<HashMap<PathBuf, Cursor<Vec<u8>>>> = {\n Mutex::new(HashMap::new())\n };\n }\n\n \/\/\/ `FileAbstraction` type, this is the Test version!\n \/\/\/\n \/\/\/ A lazy file is either absent, but a path to it is available, or it is present.\n #[derive(Debug)]\n pub enum FileAbstraction {\n Absent(PathBuf),\n }\n\n impl FileAbstraction {\n\n \/**\n * Get the mutable file behind a FileAbstraction object\n *\/\n pub fn get_file_content(&mut self) -> Result<Cursor<Vec<u8>>, SE> {\n debug!(\"Getting lazy file: {:?}\", self);\n match *self {\n FileAbstraction::Absent(ref f) => {\n let map = MAP.lock().unwrap();\n return map.get(f).cloned().ok_or(SEK::FileNotFound.into_error());\n },\n };\n }\n\n pub fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {\n match *self {\n FileAbstraction::Absent(ref f) => {\n let mut map = MAP.lock().unwrap();\n if let Some(ref mut cur) = map.get_mut(f) {\n let mut vec = cur.get_mut();\n vec.clear();\n vec.extend_from_slice(buf);\n return Ok(());\n }\n let vec = Vec::from(buf);\n map.insert(f.clone(), Cursor::new(vec));\n return Ok(());\n },\n };\n }\n\n pub fn remove_file(_: &PathBuf) -> Result<(), SE> {\n Ok(())\n }\n\n pub fn copy(from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n let mut map = MAP.lock().unwrap();\n let a = map.get(from).unwrap().clone();\n map.insert(to.clone(), a);\n Ok(())\n }\n\n pub fn rename(from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n let mut map = MAP.lock().unwrap();\n let a = map.get(from).unwrap().clone();\n map.insert(to.clone(), a);\n Ok(())\n }\n\n pub fn create_dir_all(_: &PathBuf) -> Result<(), SE> {\n Ok(())\n }\n }\n}\n\n#[cfg(not(test))]\nmod fs {\n use error::{MapErrInto, StoreError as SE, StoreErrorKind as SEK};\n use std::io::{Seek, SeekFrom, Read};\n use std::path::{Path, PathBuf};\n use std::fs::{File, OpenOptions, create_dir_all, remove_file, copy, rename};\n\n \/\/\/ `FileAbstraction` type\n \/\/\/\n \/\/\/ A lazy file is either absent, but a path to it is available, or it is present.\n #[derive(Debug)]\n pub enum FileAbstraction {\n Absent(PathBuf),\n File(File, PathBuf)\n }\n\n fn open_file<A: AsRef<Path>>(p: A) -> ::std::io::Result<File> {\n OpenOptions::new().write(true).read(true).open(p)\n }\n\n fn create_file<A: AsRef<Path>>(p: A) -> ::std::io::Result<File> {\n if let Some(parent) = p.as_ref().parent() {\n debug!(\"Implicitely creating directory: {:?}\", parent);\n if let Err(e) = create_dir_all(parent) {\n return Err(e);\n }\n }\n OpenOptions::new().write(true).read(true).create(true).open(p)\n }\n\n impl FileAbstraction {\n\n \/**\n * Get the content behind this file\n *\/\n pub fn get_file_content(&mut self) -> Result<&mut Read, SE> {\n debug!(\"Getting lazy file: {:?}\", self);\n let (file, path) = match *self {\n FileAbstraction::File(ref mut f, _) => return {\n \/\/ We seek to the beginning of the file since we expect each\n \/\/ access to the file to be in a different context\n try!(f.seek(SeekFrom::Start(0))\n .map_err_into(SEK::FileNotSeeked));\n Ok(f)\n },\n FileAbstraction::Absent(ref p) => (try!(open_file(p).map_err_into(SEK::FileNotFound)),\n p.clone()),\n };\n *self = FileAbstraction::File(file, path);\n if let FileAbstraction::File(ref mut f, _) = *self {\n return Ok(f);\n }\n unreachable!()\n }\n\n \/**\n * Write the content of this file\n *\/\n pub fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {\n use std::io::Write;\n let (file, path) = match *self {\n FileAbstraction::File(ref mut f, _) => return {\n \/\/ We seek to the beginning of the file since we expect each\n \/\/ access to the file to be in a different context\n try!(f.seek(SeekFrom::Start(0))\n .map_err_into(SEK::FileNotCreated));\n f.write_all(buf).map_err_into(SEK::FileNotWritten)\n },\n FileAbstraction::Absent(ref p) => (try!(create_file(p).map_err_into(SEK::FileNotCreated)),\n p.clone()),\n };\n *self = FileAbstraction::File(file, path);\n if let FileAbstraction::File(ref mut f, _) = *self {\n return f.write_all(buf).map_err_into(SEK::FileNotWritten);\n }\n unreachable!();\n }\n\n pub fn remove_file(path: &PathBuf) -> Result<(), SE> {\n remove_file(path).map_err_into(SEK::FileNotRemoved)\n }\n\n pub fn copy(from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n copy(from, to).map_err_into(SEK::FileNotCopied).map(|_| ())\n }\n\n pub fn rename(from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n rename(from, to).map_err_into(SEK::FileNotRenamed)\n }\n\n pub fn create_dir_all(path: &PathBuf) -> Result<(), SE> {\n create_dir_all(path).map_err_into(SEK::DirNotCreated)\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::FileAbstraction;\n use std::io::Read;\n use std::path::PathBuf;\n\n #[test]\n fn lazy_file() {\n let mut path = PathBuf::from(\"\/tests\");\n path.set_file_name(\"test1\");\n let mut lf = FileAbstraction::Absent(path);\n lf.write_file_content(b\"Hello World\").unwrap();\n let mut bah = Vec::new();\n lf.get_file_content().unwrap().read_to_end(&mut bah).unwrap();\n assert_eq!(bah, b\"Hello World\");\n }\n\n}\n<commit_msg>Fix: FileAbstraction::remove_file(): Add implementation<commit_after>pub use self::fs::FileAbstraction;\n\n\/\/ TODO:\n\/\/ This whole thing can be written better with a trait based mechanism that is embedded into the\n\/\/ store. However it would mean rewriting most things to be generic which can be a pain in the ass.\n\n#[cfg(test)]\nmod fs {\n use error::StoreError as SE;\n use error::StoreErrorKind as SEK;\n use std::io::Cursor;\n use std::path::PathBuf;\n\n use libimagerror::into::IntoError;\n\n use std::collections::HashMap;\n use std::sync::Mutex;\n\n lazy_static! {\n static ref MAP: Mutex<HashMap<PathBuf, Cursor<Vec<u8>>>> = {\n Mutex::new(HashMap::new())\n };\n }\n\n \/\/\/ `FileAbstraction` type, this is the Test version!\n \/\/\/\n \/\/\/ A lazy file is either absent, but a path to it is available, or it is present.\n #[derive(Debug)]\n pub enum FileAbstraction {\n Absent(PathBuf),\n }\n\n impl FileAbstraction {\n\n \/**\n * Get the mutable file behind a FileAbstraction object\n *\/\n pub fn get_file_content(&mut self) -> Result<Cursor<Vec<u8>>, SE> {\n debug!(\"Getting lazy file: {:?}\", self);\n match *self {\n FileAbstraction::Absent(ref f) => {\n let map = MAP.lock().unwrap();\n return map.get(f).cloned().ok_or(SEK::FileNotFound.into_error());\n },\n };\n }\n\n pub fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {\n match *self {\n FileAbstraction::Absent(ref f) => {\n let mut map = MAP.lock().unwrap();\n if let Some(ref mut cur) = map.get_mut(f) {\n let mut vec = cur.get_mut();\n vec.clear();\n vec.extend_from_slice(buf);\n return Ok(());\n }\n let vec = Vec::from(buf);\n map.insert(f.clone(), Cursor::new(vec));\n return Ok(());\n },\n };\n }\n\n pub fn remove_file(path: &PathBuf) -> Result<(), SE> {\n MAP.lock().unwrap().remove(path);\n Ok(())\n }\n\n pub fn copy(from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n let mut map = MAP.lock().unwrap();\n let a = map.get(from).unwrap().clone();\n map.insert(to.clone(), a);\n Ok(())\n }\n\n pub fn rename(from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n let mut map = MAP.lock().unwrap();\n let a = map.get(from).unwrap().clone();\n map.insert(to.clone(), a);\n Ok(())\n }\n\n pub fn create_dir_all(_: &PathBuf) -> Result<(), SE> {\n Ok(())\n }\n }\n}\n\n#[cfg(not(test))]\nmod fs {\n use error::{MapErrInto, StoreError as SE, StoreErrorKind as SEK};\n use std::io::{Seek, SeekFrom, Read};\n use std::path::{Path, PathBuf};\n use std::fs::{File, OpenOptions, create_dir_all, remove_file, copy, rename};\n\n \/\/\/ `FileAbstraction` type\n \/\/\/\n \/\/\/ A lazy file is either absent, but a path to it is available, or it is present.\n #[derive(Debug)]\n pub enum FileAbstraction {\n Absent(PathBuf),\n File(File, PathBuf)\n }\n\n fn open_file<A: AsRef<Path>>(p: A) -> ::std::io::Result<File> {\n OpenOptions::new().write(true).read(true).open(p)\n }\n\n fn create_file<A: AsRef<Path>>(p: A) -> ::std::io::Result<File> {\n if let Some(parent) = p.as_ref().parent() {\n debug!(\"Implicitely creating directory: {:?}\", parent);\n if let Err(e) = create_dir_all(parent) {\n return Err(e);\n }\n }\n OpenOptions::new().write(true).read(true).create(true).open(p)\n }\n\n impl FileAbstraction {\n\n \/**\n * Get the content behind this file\n *\/\n pub fn get_file_content(&mut self) -> Result<&mut Read, SE> {\n debug!(\"Getting lazy file: {:?}\", self);\n let (file, path) = match *self {\n FileAbstraction::File(ref mut f, _) => return {\n \/\/ We seek to the beginning of the file since we expect each\n \/\/ access to the file to be in a different context\n try!(f.seek(SeekFrom::Start(0))\n .map_err_into(SEK::FileNotSeeked));\n Ok(f)\n },\n FileAbstraction::Absent(ref p) => (try!(open_file(p).map_err_into(SEK::FileNotFound)),\n p.clone()),\n };\n *self = FileAbstraction::File(file, path);\n if let FileAbstraction::File(ref mut f, _) = *self {\n return Ok(f);\n }\n unreachable!()\n }\n\n \/**\n * Write the content of this file\n *\/\n pub fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {\n use std::io::Write;\n let (file, path) = match *self {\n FileAbstraction::File(ref mut f, _) => return {\n \/\/ We seek to the beginning of the file since we expect each\n \/\/ access to the file to be in a different context\n try!(f.seek(SeekFrom::Start(0))\n .map_err_into(SEK::FileNotCreated));\n f.write_all(buf).map_err_into(SEK::FileNotWritten)\n },\n FileAbstraction::Absent(ref p) => (try!(create_file(p).map_err_into(SEK::FileNotCreated)),\n p.clone()),\n };\n *self = FileAbstraction::File(file, path);\n if let FileAbstraction::File(ref mut f, _) = *self {\n return f.write_all(buf).map_err_into(SEK::FileNotWritten);\n }\n unreachable!();\n }\n\n pub fn remove_file(path: &PathBuf) -> Result<(), SE> {\n remove_file(path).map_err_into(SEK::FileNotRemoved)\n }\n\n pub fn copy(from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n copy(from, to).map_err_into(SEK::FileNotCopied).map(|_| ())\n }\n\n pub fn rename(from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n rename(from, to).map_err_into(SEK::FileNotRenamed)\n }\n\n pub fn create_dir_all(path: &PathBuf) -> Result<(), SE> {\n create_dir_all(path).map_err_into(SEK::DirNotCreated)\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::FileAbstraction;\n use std::io::Read;\n use std::path::PathBuf;\n\n #[test]\n fn lazy_file() {\n let mut path = PathBuf::from(\"\/tests\");\n path.set_file_name(\"test1\");\n let mut lf = FileAbstraction::Absent(path);\n lf.write_file_content(b\"Hello World\").unwrap();\n let mut bah = Vec::new();\n lf.get_file_content().unwrap().read_to_end(&mut bah).unwrap();\n assert_eq!(bah, b\"Hello World\");\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: pass `--target` to `rustdoc` even if specified with host target<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for Pred<commit_after><|endoftext|>"} {"text":"<commit_before>use std::{fmt};\nuse os::token::Token;\n\n#[deriving(Copy, PartialEq, Eq, Clone, PartialOrd, Ord)]\npub struct PollOpt(uint);\n\npub const EDGE: PollOpt = PollOpt(0x020);\npub const LEVEL: PollOpt = PollOpt(0x040);\npub const ONESHOT: PollOpt = PollOpt(0x080);\n\nimpl PollOpt {\n #[inline]\n pub fn edge() -> PollOpt {\n EDGE | ONESHOT\n }\n\n #[inline]\n pub fn empty() -> PollOpt {\n PollOpt(0)\n }\n\n #[inline]\n pub fn all() -> PollOpt {\n EDGE | LEVEL | ONESHOT\n }\n\n #[inline]\n pub fn bits(&self) -> uint {\n let PollOpt(bits) = *self;\n bits\n }\n\n #[inline]\n pub fn contains(&self, other: PollOpt) -> bool {\n (*self & other) == other\n }\n}\n\nimpl BitOr<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitor(&self, other: &PollOpt) -> PollOpt {\n PollOpt(self.bits() | other.bits())\n }\n}\n\nimpl BitXor<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitxor(&self, other: &PollOpt) -> PollOpt {\n PollOpt(self.bits() ^ other.bits())\n }\n}\n\nimpl BitAnd<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitand(&self, other: &PollOpt) -> PollOpt {\n PollOpt(self.bits() & other.bits())\n }\n}\n\nimpl Sub<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn sub(&self, other: &PollOpt) -> PollOpt {\n PollOpt(self.bits() & !other.bits())\n }\n}\n\nimpl Not<PollOpt> for PollOpt {\n #[inline]\n fn not(&self) -> PollOpt {\n PollOpt(!self.bits() & PollOpt::all().bits())\n }\n}\n\nimpl fmt::Show for PollOpt {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (EDGE, \"Edge-Triggered\"),\n (LEVEL, \"Level-Triggered\"),\n (ONESHOT, \"OneShot\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\nbitflags!(\n #[deriving(Copy)]\n flags Interest: uint {\n const READABLE = 0x001,\n const WRITABLE = 0x002,\n const ERROR = 0x004,\n const HUP = 0x008,\n const HINTED = 0x010,\n const ALL = 0x001 | 0x002 | 0x008 \/\/epoll checks for ERROR no matter what\n }\n)\n\n\nimpl fmt::Show for Interest {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (READABLE, \"Readable\"),\n (WRITABLE, \"Writable\"),\n (ERROR, \"Error\"),\n (HUP, \"HupHint\"),\n (HINTED, \"Hinted\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\nbitflags!(\n #[deriving(Copy)]\n flags ReadHint: uint {\n const DATAHINT = 0x001,\n const HUPHINT = 0x002,\n const ERRORHINT = 0x004\n }\n)\n\nimpl fmt::Show for ReadHint {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (DATAHINT, \"DataHint\"),\n (HUPHINT, \"HupHint\"),\n (ERRORHINT, \"ErrorHint\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\n\n#[deriving(Copy, Show)]\npub struct IoEvent {\n kind: Interest,\n token: Token\n}\n\n\/\/\/ IoEvent represents the raw event that the OS-specific selector\n\/\/\/ returned. An event can represent more than one kind (such as\n\/\/\/ readable or writable) at a time.\n\/\/\/\n\/\/\/ These IoEvent objects are created by the OS-specific concrete\n\/\/\/ Selector when they have events to report.\nimpl IoEvent {\n \/\/\/ Create a new IoEvent.\n pub fn new(kind: Interest, token: uint) -> IoEvent {\n IoEvent {\n kind: kind,\n token: Token(token)\n }\n }\n\n pub fn token(&self) -> Token {\n self.token\n }\n\n \/\/\/ Return an optional hint for a readable handle. Currently,\n \/\/\/ this method supports the HupHint, which indicates that the\n \/\/\/ kernel reported that the remote side hung up. This allows a\n \/\/\/ consumer to avoid reading in order to discover the hangup.\n pub fn read_hint(&self) -> ReadHint {\n let mut hint = ReadHint::empty();\n\n \/\/ The backend doesn't support hinting\n if !self.kind.contains(HINTED) {\n return hint;\n }\n\n if self.kind.contains(HUP) {\n hint = hint | HUPHINT\n }\n\n if self.kind.contains(READABLE) {\n hint = hint | DATAHINT\n }\n\n if self.kind.contains(ERROR) {\n hint = hint | ERRORHINT\n }\n\n hint\n }\n\n \/\/\/ This event indicated that the handle is now readable\n pub fn is_readable(&self) -> bool {\n self.kind.contains(READABLE) || self.kind.contains(HUP)\n }\n\n \/\/\/ This event indicated that the handle is now writable\n pub fn is_writable(&self) -> bool {\n self.kind.contains(WRITABLE)\n }\n\n \/\/\/ This event indicated that the handle had an error\n pub fn is_error(&self) -> bool {\n self.kind.contains(ERROR)\n }\n}\n<commit_msg>event: fix for ops reform.<commit_after>use std::{fmt};\nuse os::token::Token;\n\n#[deriving(Copy, PartialEq, Eq, Clone, PartialOrd, Ord)]\npub struct PollOpt(uint);\n\npub const EDGE: PollOpt = PollOpt(0x020);\npub const LEVEL: PollOpt = PollOpt(0x040);\npub const ONESHOT: PollOpt = PollOpt(0x080);\n\nimpl PollOpt {\n #[inline]\n pub fn edge() -> PollOpt {\n EDGE | ONESHOT\n }\n\n #[inline]\n pub fn empty() -> PollOpt {\n PollOpt(0)\n }\n\n #[inline]\n pub fn all() -> PollOpt {\n EDGE | LEVEL | ONESHOT\n }\n\n #[inline]\n pub fn bits(&self) -> uint {\n let PollOpt(bits) = *self;\n bits\n }\n\n #[inline]\n pub fn contains(&self, other: PollOpt) -> bool {\n (*self & other) == other\n }\n}\n\nimpl BitOr<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitor(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() | other.bits())\n }\n}\n\nimpl BitXor<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitxor(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() ^ other.bits())\n }\n}\n\nimpl BitAnd<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn bitand(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() & other.bits())\n }\n}\n\nimpl Sub<PollOpt, PollOpt> for PollOpt {\n #[inline]\n fn sub(self, other: PollOpt) -> PollOpt {\n PollOpt(self.bits() & !other.bits())\n }\n}\n\nimpl Not<PollOpt> for PollOpt {\n #[inline]\n fn not(&self) -> PollOpt {\n PollOpt(!self.bits() & PollOpt::all().bits())\n }\n}\n\nimpl fmt::Show for PollOpt {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (EDGE, \"Edge-Triggered\"),\n (LEVEL, \"Level-Triggered\"),\n (ONESHOT, \"OneShot\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\nbitflags!(\n #[deriving(Copy)]\n flags Interest: uint {\n const READABLE = 0x001,\n const WRITABLE = 0x002,\n const ERROR = 0x004,\n const HUP = 0x008,\n const HINTED = 0x010,\n const ALL = 0x001 | 0x002 | 0x008 \/\/epoll checks for ERROR no matter what\n }\n)\n\n\nimpl fmt::Show for Interest {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (READABLE, \"Readable\"),\n (WRITABLE, \"Writable\"),\n (ERROR, \"Error\"),\n (HUP, \"HupHint\"),\n (HINTED, \"Hinted\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\nbitflags!(\n #[deriving(Copy)]\n flags ReadHint: uint {\n const DATAHINT = 0x001,\n const HUPHINT = 0x002,\n const ERRORHINT = 0x004\n }\n)\n\nimpl fmt::Show for ReadHint {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let mut one = false;\n let flags = [\n (DATAHINT, \"DataHint\"),\n (HUPHINT, \"HupHint\"),\n (ERRORHINT, \"ErrorHint\")];\n\n for &(flag, msg) in flags.iter() {\n if self.contains(flag) {\n if one { try!(write!(fmt, \" | \")) }\n try!(write!(fmt, \"{}\", msg));\n\n one = true\n }\n }\n\n Ok(())\n }\n}\n\n\n#[deriving(Copy, Show)]\npub struct IoEvent {\n kind: Interest,\n token: Token\n}\n\n\/\/\/ IoEvent represents the raw event that the OS-specific selector\n\/\/\/ returned. An event can represent more than one kind (such as\n\/\/\/ readable or writable) at a time.\n\/\/\/\n\/\/\/ These IoEvent objects are created by the OS-specific concrete\n\/\/\/ Selector when they have events to report.\nimpl IoEvent {\n \/\/\/ Create a new IoEvent.\n pub fn new(kind: Interest, token: uint) -> IoEvent {\n IoEvent {\n kind: kind,\n token: Token(token)\n }\n }\n\n pub fn token(&self) -> Token {\n self.token\n }\n\n \/\/\/ Return an optional hint for a readable handle. Currently,\n \/\/\/ this method supports the HupHint, which indicates that the\n \/\/\/ kernel reported that the remote side hung up. This allows a\n \/\/\/ consumer to avoid reading in order to discover the hangup.\n pub fn read_hint(&self) -> ReadHint {\n let mut hint = ReadHint::empty();\n\n \/\/ The backend doesn't support hinting\n if !self.kind.contains(HINTED) {\n return hint;\n }\n\n if self.kind.contains(HUP) {\n hint = hint | HUPHINT\n }\n\n if self.kind.contains(READABLE) {\n hint = hint | DATAHINT\n }\n\n if self.kind.contains(ERROR) {\n hint = hint | ERRORHINT\n }\n\n hint\n }\n\n \/\/\/ This event indicated that the handle is now readable\n pub fn is_readable(&self) -> bool {\n self.kind.contains(READABLE) || self.kind.contains(HUP)\n }\n\n \/\/\/ This event indicated that the handle is now writable\n pub fn is_writable(&self) -> bool {\n self.kind.contains(WRITABLE)\n }\n\n \/\/\/ This event indicated that the handle had an error\n pub fn is_error(&self) -> bool {\n self.kind.contains(ERROR)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse cmp;\nuse mem;\nuse ptr;\n\n\/\/\/ Rotation is much faster if it has access to a little bit of memory. This\n\/\/\/ union provides a RawVec-like interface, but to a fixed-size stack buffer.\n#[allow(unions_with_drop_fields)]\nunion RawArray<T> {\n \/\/\/ Ensure this is appropriately aligned for T, and is big\n \/\/\/ enough for two elements even if T is enormous.\n typed: [T; 2],\n \/\/\/ For normally-sized types, especially things like u8, having more\n \/\/\/ than 2 in the buffer is necessary for usefulness, so pad it out\n \/\/\/ enough to be helpful, but not so big as to risk overflow.\n _extra: [usize; 32],\n}\n\nimpl<T> RawArray<T> {\n fn new() -> Self {\n unsafe { mem::uninitialized() }\n }\n fn ptr(&self) -> *mut T {\n unsafe { &self.typed as *const T as *mut T }\n }\n fn cap() -> usize {\n if mem::size_of::<T>() == 0 {\n usize::max_value()\n } else {\n mem::size_of::<Self>() \/ mem::size_of::<T>()\n }\n }\n}\n\n\/\/\/ Rotates the range `[mid-left, mid+right)` such that the element at `mid`\n\/\/\/ becomes the first element. Equivalently, rotates the range `left`\n\/\/\/ elements to the left or `right` elements to the right.\n\/\/\/\n\/\/\/ # Safety\n\/\/\/\n\/\/\/ The specified range must be valid for reading and writing.\n\/\/\/ The type `T` must have non-zero size.\n\/\/\/\n\/\/\/ # Algorithm\n\/\/\/\n\/\/\/ For longer rotations, swap the left-most `delta = min(left, right)`\n\/\/\/ elements with the right-most `delta` elements. LLVM vectorizes this,\n\/\/\/ which is profitable as we only reach this step for a \"large enough\"\n\/\/\/ rotation. Doing this puts `delta` elements on the larger side into the\n\/\/\/ correct position, leaving a smaller rotate problem. Demonstration:\n\/\/\/\n\/\/\/ ```text\n\/\/\/ [ 6 7 8 9 10 11 12 13 . 1 2 3 4 5 ]\n\/\/\/ 1 2 3 4 5 [ 11 12 13 . 6 7 8 9 10 ]\n\/\/\/ 1 2 3 4 5 [ 8 9 10 . 6 7 ] 11 12 13\n\/\/\/ 1 2 3 4 5 6 7 [ 10 . 8 9 ] 11 12 13\n\/\/\/ 1 2 3 4 5 6 7 [ 9 . 8 ] 10 11 12 13\n\/\/\/ 1 2 3 4 5 6 7 8 [ . ] 9 10 11 12 13\n\/\/\/ ```\n\/\/\/\n\/\/\/ Once the rotation is small enough, copy some elements into a stack\n\/\/\/ buffer, `memmove` the others, and move the ones back from the buffer.\npub unsafe fn ptr_rotate<T>(mut left: usize, mid: *mut T, mut right: usize) {\n loop {\n let delta = cmp::min(left, right);\n if delta <= RawArray::<T>::cap() {\n break;\n }\n\n ptr_swap_n(\n mid.offset(-(left as isize)),\n mid.offset((right-delta) as isize),\n delta);\n\n if left <= right {\n right -= delta;\n } else {\n left -= delta;\n }\n }\n\n let rawarray = RawArray::new();\n let buf = rawarray.ptr();\n\n let dim = mid.offset(-(left as isize)).offset(right as isize);\n if left <= right {\n ptr::copy_nonoverlapping(mid.offset(-(left as isize)), buf, left);\n ptr::copy(mid, mid.offset(-(left as isize)), right);\n ptr::copy_nonoverlapping(buf, dim, left);\n }\n else {\n ptr::copy_nonoverlapping(mid, buf, right);\n ptr::copy(mid.offset(-(left as isize)), dim, left);\n ptr::copy_nonoverlapping(buf, mid.offset(-(left as isize)), right);\n }\n}\n\nunsafe fn ptr_swap_u8(a: *mut u8, b: *mut u8, n: usize) {\n for i in 0..n {\n ptr::swap(a.offset(i as isize), b.offset(i as isize));\n }\n}\nunsafe fn ptr_swap_u16(a: *mut u16, b: *mut u16, n: usize) {\n for i in 0..n {\n ptr::swap(a.offset(i as isize), b.offset(i as isize));\n }\n}\nunsafe fn ptr_swap_u32(a: *mut u32, b: *mut u32, n: usize) {\n for i in 0..n {\n ptr::swap(a.offset(i as isize), b.offset(i as isize));\n }\n}\nunsafe fn ptr_swap_u64(a: *mut u64, b: *mut u64, n: usize) {\n for i in 0..n {\n ptr::swap(a.offset(i as isize), b.offset(i as isize));\n }\n}\n\nunsafe fn ptr_swap_n<T>(a: *mut T, b: *mut T, n: usize) {\n \/\/ Doing this as a generic is 16% & 40% slower in two of the `String`\n \/\/ benchmarks, as (based on the block names) LLVM doesn't vectorize it.\n \/\/ Since this is just operating on raw memory, dispatch to a version\n \/\/ with appropriate alignment. Helps with code size as well, by\n \/\/ avoiding monomorphizing different unrolled loops for `i32`,\n \/\/ `u32`, `f32`, `[u32; 1]`, etc.\n let size_of_t = mem::size_of::<T>();\n let align_of_t = mem::align_of::<T>();\n\n let a64 = mem::align_of::<u64>();\n if a64 == 8 && align_of_t % a64 == 0 {\n return ptr_swap_u64(a as *mut u64, b as *mut u64, n * (size_of_t \/ 8));\n }\n\n let a32 = mem::align_of::<u32>();\n if a32 == 4 && align_of_t % a32 == 0 {\n return ptr_swap_u32(a as *mut u32, b as *mut u32, n * (size_of_t \/ 4));\n }\n\n let a16 = mem::align_of::<u16>();\n if a16 == 2 && align_of_t % a16 == 0 {\n return ptr_swap_u16(a as *mut u16, b as *mut u16, n * (size_of_t \/ 2));\n }\n\n ptr_swap_u8(a as *mut u8, b as *mut u8, n * size_of_t);\n}\n<commit_msg>Remove the optimization in ptr_swap_n<commit_after>\/\/ Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse cmp;\nuse mem;\nuse ptr;\n\n\/\/\/ Rotation is much faster if it has access to a little bit of memory. This\n\/\/\/ union provides a RawVec-like interface, but to a fixed-size stack buffer.\n#[allow(unions_with_drop_fields)]\nunion RawArray<T> {\n \/\/\/ Ensure this is appropriately aligned for T, and is big\n \/\/\/ enough for two elements even if T is enormous.\n typed: [T; 2],\n \/\/\/ For normally-sized types, especially things like u8, having more\n \/\/\/ than 2 in the buffer is necessary for usefulness, so pad it out\n \/\/\/ enough to be helpful, but not so big as to risk overflow.\n _extra: [usize; 32],\n}\n\nimpl<T> RawArray<T> {\n fn new() -> Self {\n unsafe { mem::uninitialized() }\n }\n fn ptr(&self) -> *mut T {\n unsafe { &self.typed as *const T as *mut T }\n }\n fn cap() -> usize {\n if mem::size_of::<T>() == 0 {\n usize::max_value()\n } else {\n mem::size_of::<Self>() \/ mem::size_of::<T>()\n }\n }\n}\n\n\/\/\/ Rotates the range `[mid-left, mid+right)` such that the element at `mid`\n\/\/\/ becomes the first element. Equivalently, rotates the range `left`\n\/\/\/ elements to the left or `right` elements to the right.\n\/\/\/\n\/\/\/ # Safety\n\/\/\/\n\/\/\/ The specified range must be valid for reading and writing.\n\/\/\/ The type `T` must have non-zero size.\n\/\/\/\n\/\/\/ # Algorithm\n\/\/\/\n\/\/\/ For longer rotations, swap the left-most `delta = min(left, right)`\n\/\/\/ elements with the right-most `delta` elements. LLVM vectorizes this,\n\/\/\/ which is profitable as we only reach this step for a \"large enough\"\n\/\/\/ rotation. Doing this puts `delta` elements on the larger side into the\n\/\/\/ correct position, leaving a smaller rotate problem. Demonstration:\n\/\/\/\n\/\/\/ ```text\n\/\/\/ [ 6 7 8 9 10 11 12 13 . 1 2 3 4 5 ]\n\/\/\/ 1 2 3 4 5 [ 11 12 13 . 6 7 8 9 10 ]\n\/\/\/ 1 2 3 4 5 [ 8 9 10 . 6 7 ] 11 12 13\n\/\/\/ 1 2 3 4 5 6 7 [ 10 . 8 9 ] 11 12 13\n\/\/\/ 1 2 3 4 5 6 7 [ 9 . 8 ] 10 11 12 13\n\/\/\/ 1 2 3 4 5 6 7 8 [ . ] 9 10 11 12 13\n\/\/\/ ```\n\/\/\/\n\/\/\/ Once the rotation is small enough, copy some elements into a stack\n\/\/\/ buffer, `memmove` the others, and move the ones back from the buffer.\npub unsafe fn ptr_rotate<T>(mut left: usize, mid: *mut T, mut right: usize) {\n loop {\n let delta = cmp::min(left, right);\n if delta <= RawArray::<T>::cap() {\n break;\n }\n\n ptr_swap_n(\n mid.offset(-(left as isize)),\n mid.offset((right-delta) as isize),\n delta);\n\n if left <= right {\n right -= delta;\n } else {\n left -= delta;\n }\n }\n\n let rawarray = RawArray::new();\n let buf = rawarray.ptr();\n\n let dim = mid.offset(-(left as isize)).offset(right as isize);\n if left <= right {\n ptr::copy_nonoverlapping(mid.offset(-(left as isize)), buf, left);\n ptr::copy(mid, mid.offset(-(left as isize)), right);\n ptr::copy_nonoverlapping(buf, dim, left);\n }\n else {\n ptr::copy_nonoverlapping(mid, buf, right);\n ptr::copy(mid.offset(-(left as isize)), dim, left);\n ptr::copy_nonoverlapping(buf, mid.offset(-(left as isize)), right);\n }\n}\n\nunsafe fn ptr_swap_n<T>(a: *mut T, b: *mut T, n: usize) {\n for i in 0..n {\n \/\/ These are nonoverlapping, so use mem::swap instead of ptr::swap\n mem::swap(&mut *a.offset(i as isize), &mut *b.offset(i as isize));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix clippy warning: next() instead of nth(0)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>adding buikd.rs<commit_after>extern crate gcc;\n\nfn main () {\n println!(\"cargo:rustc-flags=-l dylib=stdc++\");\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>rust 2<commit_after>fn get_next_fibs((n, m): (u32, u32)) -> (u32, u32) {\n (m, n + m)\n}\n\nfn main () {\n let mut current_fibs = (1, 1);\n let mut sum: u32 = 0;\n\n loop {\n current_fibs = get_next_fibs(current_fibs);\n\n let (n, _) = current_fibs;\n\n if n > 4000000 {\n break;\n }\n\n if n % 2 == 0 {\n sum += n;\n }\n }\n\n println!(\"{}\", sum);\n}\n<|endoftext|>"} {"text":"<commit_before>use std::fmt::Display;\nuse thiserror::Error;\n\n#[derive(Error, Debug)]\n#[error(\"braced error: {msg}\")]\nstruct BracedError {\n msg: String,\n}\n\n#[derive(Error, Debug)]\n#[error(\"braced error\")]\nstruct BracedUnused {\n extra: usize,\n}\n\n#[derive(Error, Debug)]\n#[error(\"tuple error: {0}\")]\nstruct TupleError(usize);\n\n#[derive(Error, Debug)]\n#[error(\"unit error\")]\nstruct UnitError;\n\n#[derive(Error, Debug)]\nenum EnumError {\n #[error(\"braced error: {id}\")]\n Braced { id: usize },\n #[error(\"tuple error: {0}\")]\n Tuple(usize),\n #[error(\"unit error\")]\n Unit,\n}\n\n#[derive(Error, Debug)]\n#[error(\"{MSG}: {id:?} (code {CODE:?})\")]\nstruct WithConstant {\n id: &'static str,\n}\n\nconst MSG: &str = \"failed to do\";\nconst CODE: usize = 9;\n\n#[derive(Error, Debug)]\n#[error(\"{0}\")]\nenum Inherit {\n Unit(UnitError),\n #[error(\"other error\")]\n Other(UnitError),\n}\n\n#[derive(Error, Debug)]\n#[error(\"fn main() {{}}\")]\nstruct Braces;\n\n#[derive(Error, Debug)]\n#[error(\"1 + 1 = {}\", 1 + 1)]\nstruct Arithmetic;\n\n#[derive(Error, Debug)]\n#[error(\"!bool = {}\", not(.0))]\nstruct NestedShorthand(bool);\n\n#[derive(Error, Debug)]\n#[error(\"...\")]\npub enum Void {}\n\nfn not(bool: &bool) -> bool {\n !*bool\n}\n\nfn assert<T: Display>(expected: &str, value: T) {\n assert_eq!(expected, value.to_string());\n}\n\n#[test]\nfn test_display() {\n let msg = \"T\".to_owned();\n assert(\"braced error: T\", BracedError { msg });\n assert(\"braced error\", BracedUnused { extra: 0 });\n assert(\"tuple error: 0\", TupleError(0));\n assert(\"unit error\", UnitError);\n assert(\"braced error: 0\", EnumError::Braced { id: 0 });\n assert(\"tuple error: 0\", EnumError::Tuple(0));\n assert(\"unit error\", EnumError::Unit);\n assert(\"failed to do: \\\"\\\" (code 9)\", WithConstant { id: \"\" });\n assert(\"unit error\", Inherit::Unit(UnitError));\n assert(\"other error\", Inherit::Other(UnitError));\n assert(\"fn main() {}\", Braces);\n assert(\"1 + 1 = 2\", Arithmetic);\n assert(\"!bool = false\", NestedShorthand(true));\n}\n<commit_msg>Break up test_display into individual test cases<commit_after>use std::fmt::Display;\nuse thiserror::Error;\n\nfn assert<T: Display>(expected: &str, value: T) {\n assert_eq!(expected, value.to_string());\n}\n\n#[test]\nfn test_braced() {\n #[derive(Error, Debug)]\n #[error(\"braced error: {msg}\")]\n struct Error {\n msg: String,\n }\n\n let msg = \"T\".to_owned();\n assert(\"braced error: T\", Error { msg });\n}\n\n#[test]\nfn test_braced_unused() {\n #[derive(Error, Debug)]\n #[error(\"braced error\")]\n struct Error {\n extra: usize,\n }\n\n assert(\"braced error\", Error { extra: 0 });\n}\n\n#[test]\nfn test_tuple() {\n #[derive(Error, Debug)]\n #[error(\"tuple error: {0}\")]\n struct Error(usize);\n\n assert(\"tuple error: 0\", Error(0));\n}\n\n#[test]\nfn test_unit() {\n #[derive(Error, Debug)]\n #[error(\"unit error\")]\n struct Error;\n\n assert(\"unit error\", Error);\n}\n\n#[test]\nfn test_enum() {\n #[derive(Error, Debug)]\n enum Error {\n #[error(\"braced error: {id}\")]\n Braced { id: usize },\n #[error(\"tuple error: {0}\")]\n Tuple(usize),\n #[error(\"unit error\")]\n Unit,\n }\n\n assert(\"braced error: 0\", Error::Braced { id: 0 });\n assert(\"tuple error: 0\", Error::Tuple(0));\n assert(\"unit error\", Error::Unit);\n}\n\n#[test]\nfn test_constants() {\n #[derive(Error, Debug)]\n #[error(\"{MSG}: {id:?} (code {CODE:?})\")]\n struct Error {\n id: &'static str,\n }\n\n const MSG: &str = \"failed to do\";\n const CODE: usize = 9;\n\n assert(\"failed to do: \\\"\\\" (code 9)\", Error { id: \"\" });\n}\n\n#[test]\nfn test_inherit() {\n #[derive(Error, Debug)]\n #[error(\"{0}\")]\n enum Error {\n Some(&'static str),\n #[error(\"other error\")]\n Other(&'static str),\n }\n\n assert(\"some error\", Error::Some(\"some error\"));\n assert(\"other error\", Error::Other(\"...\"));\n}\n\n#[test]\nfn test_brace_escape() {\n #[derive(Error, Debug)]\n #[error(\"fn main() {{}}\")]\n struct Error;\n\n assert(\"fn main() {}\", Error);\n}\n\n#[test]\nfn test_expr() {\n #[derive(Error, Debug)]\n #[error(\"1 + 1 = {}\", 1 + 1)]\n struct Error;\n assert(\"1 + 1 = 2\", Error);\n}\n\n#[test]\nfn test_nested() {\n #[derive(Error, Debug)]\n #[error(\"!bool = {}\", not(.0))]\n struct Error(bool);\n\n fn not(bool: &bool) -> bool {\n !*bool\n }\n\n assert(\"!bool = false\", Error(true));\n}\n\n#[test]\nfn test_void() {\n #[derive(Error, Debug)]\n #[error(\"...\")]\n pub enum Error {}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add At as a DepFn and implement for tuples<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! UDP relay proxy server\n\nuse std::{\n io::{self, Cursor},\n net::{IpAddr, Ipv4Addr, SocketAddr},\n sync::Arc,\n time::Duration,\n};\n\nuse bytes::BytesMut;\nuse futures::{future, future::AbortHandle, stream::FuturesUnordered, StreamExt};\nuse log::{debug, error, info, trace, warn};\nuse lru_time_cache::{Entry, LruCache};\nuse tokio::{\n self,\n net::udp::{RecvHalf, SendHalf},\n sync::{mpsc, Mutex},\n};\n\nuse crate::{\n config::ServerConfig,\n context::{Context, SharedContext},\n relay::{\n flow::{SharedMultiServerFlowStatistic, SharedServerFlowStatistic},\n socks5::Address,\n sys::create_udp_socket,\n utils::try_timeout,\n },\n};\n\nuse super::{\n crypto_io::{decrypt_payload, encrypt_payload},\n DEFAULT_TIMEOUT,\n MAXIMUM_UDP_PAYLOAD_SIZE,\n};\n\n\/\/ Represent a UDP association\nstruct UdpAssociation {\n \/\/ local -> remote Queue\n \/\/ Drops tx, will close local -> remote task\n tx: mpsc::Sender<Vec<u8>>,\n\n \/\/ local <- remote task life watcher\n watcher: AbortHandle,\n}\n\nimpl Drop for UdpAssociation {\n fn drop(&mut self) {\n self.watcher.abort();\n }\n}\n\nimpl UdpAssociation {\n \/\/\/ Create an association with addr\n async fn associate(\n context: SharedContext,\n svr_idx: usize,\n src_addr: SocketAddr,\n mut response_tx: mpsc::Sender<(SocketAddr, BytesMut)>,\n ) -> io::Result<UdpAssociation> {\n \/\/ Create a socket for receiving packets\n let local_addr = match context.config().local_addr {\n None => {\n \/\/ Let system allocate an address for us\n SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0)\n }\n Some(ref addr) => {\n \/\/ Uses configured local address\n addr.bind_addr(&context).await?\n }\n };\n let remote_udp = create_udp_socket(&local_addr).await?;\n\n let local_addr = remote_udp.local_addr().expect(\"could not determine port bound to\");\n debug!(\"created UDP Association for {} from {}\", src_addr, local_addr);\n\n \/\/ Create a channel for sending packets to remote\n \/\/ FIXME: Channel size 1024?\n let (tx, mut rx) = mpsc::channel::<Vec<u8>>(1024);\n\n \/\/ Splits socket into sender and receiver\n let (mut receiver, mut sender) = remote_udp.split();\n\n let timeout = context.config().udp_timeout.unwrap_or(DEFAULT_TIMEOUT);\n\n \/\/ local -> remote\n {\n let context = context.clone();\n tokio::spawn(async move {\n let svr_cfg = context.server_config(svr_idx);\n\n while let Some(pkt) = rx.recv().await {\n \/\/ pkt is already a raw packet, so just send it\n if let Err(err) =\n UdpAssociation::relay_l2r(&context, src_addr, &mut sender, &pkt[..], timeout, svr_cfg).await\n {\n error!(\"failed to relay packet, {} -> ..., error: {}\", src_addr, err);\n\n \/\/ FIXME: Ignore? Or how to deal with it?\n }\n }\n\n debug!(\"UDP ASSOCIATE {} -> .. finished\", src_addr);\n });\n }\n\n let (r2l_task, close_flag) = future::abortable(async move {\n let svr_cfg = context.server_config(svr_idx);\n\n loop {\n \/\/ Read and send back to source\n match UdpAssociation::relay_r2l(&context, src_addr, &mut receiver, &mut response_tx, svr_cfg).await {\n Ok(..) => {}\n Err(err) => {\n error!(\"failed to receive packet, {} <- .., error: {}\", src_addr, err);\n\n \/\/ FIXME: Don't break, or if you can find a way to drop the UdpAssociation\n \/\/ break;\n }\n }\n }\n });\n\n \/\/ local <- remote\n tokio::spawn(async move {\n let _ = r2l_task.await;\n\n debug!(\"UDP ASSOCIATE {} <- .. finished\", src_addr);\n });\n\n Ok(UdpAssociation {\n tx,\n watcher: close_flag,\n })\n }\n\n \/\/\/ Relay packets from local to remote\n async fn relay_l2r(\n context: &Context,\n src: SocketAddr,\n remote_udp: &mut SendHalf,\n pkt: &[u8],\n timeout: Duration,\n svr_cfg: &ServerConfig,\n ) -> io::Result<()> {\n \/\/ First of all, decrypt payload CLIENT -> SERVER\n let decrypted_pkt = match decrypt_payload(context, svr_cfg.method(), svr_cfg.key(), pkt) {\n Ok(Some(pkt)) => pkt,\n Ok(None) => {\n error!(\"failed to decrypt pkt in UDP relay, packet too short\");\n let err = io::Error::new(io::ErrorKind::InvalidData, \"packet too short\");\n return Err(err);\n }\n Err(err) => {\n error!(\"failed to decrypt pkt in UDP relay: {}\", err);\n let err = io::Error::new(io::ErrorKind::InvalidData, \"decrypt failed\");\n return Err(err);\n }\n };\n\n \/\/ CLIENT -> SERVER protocol: ADDRESS + PAYLOAD\n let mut cur = Cursor::new(decrypted_pkt);\n\n let addr = Address::read_from(&mut cur).await?;\n\n debug!(\"UDP ASSOCIATE {} <-> {} establishing\", src, addr);\n\n if context.check_outbound_blocked(&addr) {\n warn!(\"outbound {} is blocked by ACL rules\", addr);\n return Ok(());\n }\n\n \/\/ Take out internal buffer for optimizing one byte copy\n let header_len = cur.position() as usize;\n let decrypted_pkt = cur.into_inner();\n let body = &decrypted_pkt[header_len..];\n\n let send_len = match addr {\n Address::SocketAddress(ref remote_addr) => {\n debug!(\n \"UDP ASSOCIATE {} -> {} ({}), payload length {} bytes\",\n src,\n addr,\n remote_addr,\n body.len()\n );\n try_timeout(remote_udp.send_to(body, remote_addr), Some(timeout)).await?\n }\n Address::DomainNameAddress(ref dname, port) => lookup_outbound_then!(context, dname, port, |remote_addr| {\n match try_timeout(remote_udp.send_to(body, &remote_addr), Some(timeout)).await {\n Ok(l) => {\n debug!(\n \"UDP ASSOCIATE {} -> {} ({}), payload length {} bytes\",\n src,\n addr,\n remote_addr,\n body.len()\n );\n Ok(l)\n }\n Err(err) => {\n error!(\n \"UDP ASSOCIATE {} -> {} ({}), payload length {} bytes\",\n src,\n addr,\n remote_addr,\n body.len()\n );\n Err(err)\n }\n }\n })\n .map(|(_, l)| l)?,\n };\n\n assert_eq!(body.len(), send_len);\n\n Ok(())\n }\n\n \/\/\/ Relay packets from remote to local\n async fn relay_r2l(\n context: &Context,\n src_addr: SocketAddr,\n remote_udp: &mut RecvHalf,\n response_tx: &mut mpsc::Sender<(SocketAddr, BytesMut)>,\n svr_cfg: &ServerConfig,\n ) -> io::Result<()> {\n \/\/ Waiting for response from server SERVER -> CLIENT\n \/\/ Packet length is limited by MAXIMUM_UDP_PAYLOAD_SIZE, excess bytes will be discarded.\n let mut remote_buf = vec![0u8; MAXIMUM_UDP_PAYLOAD_SIZE];\n let (remote_recv_len, remote_addr) = remote_udp.recv_from(&mut remote_buf).await?;\n\n debug!(\n \"UDP ASSOCIATE {} <- {}, payload length {} bytes\",\n src_addr, remote_addr, remote_recv_len\n );\n\n \/\/ FIXME: The Address should be the Address that client sent\n let addr = Address::SocketAddress(remote_addr);\n\n \/\/ CLIENT <- SERVER protocol: ADDRESS + PAYLOAD\n let mut send_buf = Vec::new();\n addr.write_to_buf(&mut send_buf);\n send_buf.extend_from_slice(&remote_buf[..remote_recv_len]);\n\n let mut encrypt_buf = BytesMut::new();\n encrypt_payload(context, svr_cfg.method(), svr_cfg.key(), &send_buf, &mut encrypt_buf)?;\n\n \/\/ Send back to src_addr\n if let Err(err) = response_tx.send((src_addr, encrypt_buf)).await {\n error!(\"failed to send packet into response channel, error: {}\", err);\n\n \/\/ FIXME: What to do? Ignore?\n }\n\n Ok(())\n }\n\n \/\/ Send packet to remote\n \/\/\n \/\/ Return `Err` if receiver have been closed\n async fn send(&mut self, pkt: Vec<u8>) {\n if let Err(..) = self.tx.send(pkt).await {\n \/\/ SHOULDn't HAPPEN\n unreachable!(\"UDP Association local -> remote Queue closed unexpectly\");\n }\n }\n}\n\nfn serialize(saddr: &SocketAddr) -> [u8; 18] {\n let mut result = [0; 18];\n result[..16].copy_from_slice(&match saddr.ip() {\n IpAddr::V4(ref ip) => ip.to_ipv6_mapped(),\n IpAddr::V6(ref ip) => ip.clone(),\n }.octets());\n result[16..].copy_from_slice(&saddr.port().to_ne_bytes());\n result\n}\n\nasync fn listen(context: SharedContext, flow_stat: SharedServerFlowStatistic, svr_idx: usize) -> io::Result<()> {\n let svr_cfg = context.server_config(svr_idx);\n let listen_addr = svr_cfg.addr().bind_addr(&context).await?;\n\n let listener = create_udp_socket(&listen_addr).await?;\n let local_addr = listener.local_addr().expect(\"determine port bound to\");\n info!(\"shadowsocks UDP listening on {}\", local_addr);\n\n let (mut r, mut w) = listener.split();\n\n \/\/ NOTE: Associations are only eliminated by expire time by default\n \/\/ So it may exhaust all available file descriptors\n let timeout = context.config().udp_timeout.unwrap_or(DEFAULT_TIMEOUT);\n let assoc_map = if let Some(max_assoc) = context.config().udp_max_associations {\n LruCache::with_expiry_duration_and_capacity(timeout, max_assoc)\n } else {\n LruCache::with_expiry_duration(timeout)\n };\n let assoc_map = Arc::new(Mutex::new(assoc_map));\n\n \/\/ FIXME: Channel size 1024?\n let (tx, mut rx) = mpsc::channel::<(SocketAddr, BytesMut)>(1024);\n\n {\n \/\/ Tokio task for sending data back to clients\n\n let assoc_map = assoc_map.clone();\n let flow_stat = flow_stat.clone();\n\n tokio::spawn(async move {\n while let Some((src, pkt)) = rx.recv().await {\n let cache_key = serialize(&src);\n {\n let mut amap = assoc_map.lock().await;\n\n \/\/ Check or update expire time\n if amap.get(&cache_key).is_none() {\n debug!(\n \"UDP association {} <-> ... is already expired, throwing away packet {} bytes\",\n src,\n pkt.len()\n );\n continue;\n }\n }\n\n if let Err(err) = w.send_to(&pkt, &src).await {\n error!(\"UDP packet send failed, err: {:?}\", err);\n break;\n }\n\n flow_stat.udp().incr_tx(pkt.len() as u64);\n }\n\n \/\/ FIXME: How to stop the outer listener Future?\n });\n }\n\n let mut pkt_buf = vec![0u8; MAXIMUM_UDP_PAYLOAD_SIZE];\n\n loop {\n let (recv_len, src) = r.recv_from(&mut pkt_buf).await?;\n\n \/\/ Packet length is limited by MAXIMUM_UDP_PAYLOAD_SIZE, excess bytes will be discarded.\n let pkt = &pkt_buf[..recv_len];\n\n trace!(\"received UDP packet from {}, length {} bytes\", src, recv_len);\n flow_stat.udp().incr_rx(pkt.len() as u64);\n\n if recv_len == 0 {\n \/\/ For windows, it will generate a ICMP Port Unreachable Message\n \/\/ https:\/\/docs.microsoft.com\/en-us\/windows\/win32\/api\/winsock2\/nf-winsock2-recvfrom\n \/\/ Which will result in recv_from return 0.\n \/\/\n \/\/ It cannot be solved here, because `WSAGetLastError` is already set.\n \/\/\n \/\/ See `relay::udprelay::utils::create_socket` for more detail.\n continue;\n }\n\n \/\/ Check ACL\n if context.check_client_blocked(&src) {\n warn!(\"client {} is blocked by ACL rules\", src);\n continue;\n }\n\n \/\/ Check or (re)create an association\n {\n \/\/ Locks the whole association map\n let mut assoc_map = assoc_map.lock().await;\n\n \/\/ Get or create an association\n let assoc = match assoc_map.entry(serialize(&src)) {\n Entry::Occupied(oc) => oc.into_mut(),\n Entry::Vacant(vc) => vc.insert(\n UdpAssociation::associate(context.clone(), svr_idx, src, tx.clone())\n .await\n .expect(\"create udp association\"),\n ),\n };\n\n \/\/ FIXME: Lock is still kept for a mutable reference\n \/\/ Send to local -> remote task\n assoc.send(pkt.to_vec()).await;\n }\n }\n}\n\n\/\/\/ Starts a UDP relay server\npub async fn run(context: SharedContext, flow_stat: SharedMultiServerFlowStatistic) -> io::Result<()> {\n let vec_fut = FuturesUnordered::new();\n\n for (svr_idx, svr_cfg) in context.config().server.iter().enumerate() {\n let context = context.clone();\n let flow_stat = flow_stat\n .get(svr_cfg.addr().port())\n .expect(\"port not existed in multi-server flow statistic\")\n .clone();\n\n let svr_fut = listen(context, flow_stat, svr_idx);\n vec_fut.push(svr_fut);\n }\n\n match vec_fut.into_future().await.0 {\n Some(res) => {\n error!(\"one of UDP servers exited unexpectly, result: {:?}\", res);\n let err = io::Error::new(io::ErrorKind::Other, \"server exited unexpectly\");\n Err(err)\n }\n None => unreachable!(),\n }\n}\n<commit_msg>Reduce unnecessary cloning<commit_after>\/\/! UDP relay proxy server\n\nuse std::{\n io::{self, Cursor},\n net::{IpAddr, Ipv4Addr, SocketAddr},\n sync::Arc,\n time::Duration,\n};\n\nuse bytes::BytesMut;\nuse futures::{future, future::AbortHandle, stream::FuturesUnordered, StreamExt};\nuse log::{debug, error, info, trace, warn};\nuse lru_time_cache::{Entry, LruCache};\nuse tokio::{\n self,\n net::udp::{RecvHalf, SendHalf},\n sync::{mpsc, Mutex},\n};\n\nuse crate::{\n config::ServerConfig,\n context::{Context, SharedContext},\n relay::{\n flow::{SharedMultiServerFlowStatistic, SharedServerFlowStatistic},\n socks5::Address,\n sys::create_udp_socket,\n utils::try_timeout,\n },\n};\n\nuse super::{\n crypto_io::{decrypt_payload, encrypt_payload},\n DEFAULT_TIMEOUT,\n MAXIMUM_UDP_PAYLOAD_SIZE,\n};\n\n\/\/ Represent a UDP association\nstruct UdpAssociation {\n \/\/ local -> remote Queue\n \/\/ Drops tx, will close local -> remote task\n tx: mpsc::Sender<Vec<u8>>,\n\n \/\/ local <- remote task life watcher\n watcher: AbortHandle,\n}\n\nimpl Drop for UdpAssociation {\n fn drop(&mut self) {\n self.watcher.abort();\n }\n}\n\nimpl UdpAssociation {\n \/\/\/ Create an association with addr\n async fn associate(\n context: SharedContext,\n svr_idx: usize,\n src_addr: SocketAddr,\n mut response_tx: mpsc::Sender<(SocketAddr, BytesMut)>,\n ) -> io::Result<UdpAssociation> {\n \/\/ Create a socket for receiving packets\n let local_addr = match context.config().local_addr {\n None => {\n \/\/ Let system allocate an address for us\n SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0)\n }\n Some(ref addr) => {\n \/\/ Uses configured local address\n addr.bind_addr(&context).await?\n }\n };\n let remote_udp = create_udp_socket(&local_addr).await?;\n\n let local_addr = remote_udp.local_addr().expect(\"could not determine port bound to\");\n debug!(\"created UDP Association for {} from {}\", src_addr, local_addr);\n\n \/\/ Create a channel for sending packets to remote\n \/\/ FIXME: Channel size 1024?\n let (tx, mut rx) = mpsc::channel::<Vec<u8>>(1024);\n\n \/\/ Splits socket into sender and receiver\n let (mut receiver, mut sender) = remote_udp.split();\n\n let timeout = context.config().udp_timeout.unwrap_or(DEFAULT_TIMEOUT);\n\n \/\/ local -> remote\n {\n let context = context.clone();\n tokio::spawn(async move {\n let svr_cfg = context.server_config(svr_idx);\n\n while let Some(pkt) = rx.recv().await {\n \/\/ pkt is already a raw packet, so just send it\n if let Err(err) =\n UdpAssociation::relay_l2r(&context, src_addr, &mut sender, &pkt[..], timeout, svr_cfg).await\n {\n error!(\"failed to relay packet, {} -> ..., error: {}\", src_addr, err);\n\n \/\/ FIXME: Ignore? Or how to deal with it?\n }\n }\n\n debug!(\"UDP ASSOCIATE {} -> .. finished\", src_addr);\n });\n }\n\n let (r2l_task, close_flag) = future::abortable(async move {\n let svr_cfg = context.server_config(svr_idx);\n\n loop {\n \/\/ Read and send back to source\n match UdpAssociation::relay_r2l(&context, src_addr, &mut receiver, &mut response_tx, svr_cfg).await {\n Ok(..) => {}\n Err(err) => {\n error!(\"failed to receive packet, {} <- .., error: {}\", src_addr, err);\n\n \/\/ FIXME: Don't break, or if you can find a way to drop the UdpAssociation\n \/\/ break;\n }\n }\n }\n });\n\n \/\/ local <- remote\n tokio::spawn(async move {\n let _ = r2l_task.await;\n\n debug!(\"UDP ASSOCIATE {} <- .. finished\", src_addr);\n });\n\n Ok(UdpAssociation {\n tx,\n watcher: close_flag,\n })\n }\n\n \/\/\/ Relay packets from local to remote\n async fn relay_l2r(\n context: &Context,\n src: SocketAddr,\n remote_udp: &mut SendHalf,\n pkt: &[u8],\n timeout: Duration,\n svr_cfg: &ServerConfig,\n ) -> io::Result<()> {\n \/\/ First of all, decrypt payload CLIENT -> SERVER\n let decrypted_pkt = match decrypt_payload(context, svr_cfg.method(), svr_cfg.key(), pkt) {\n Ok(Some(pkt)) => pkt,\n Ok(None) => {\n error!(\"failed to decrypt pkt in UDP relay, packet too short\");\n let err = io::Error::new(io::ErrorKind::InvalidData, \"packet too short\");\n return Err(err);\n }\n Err(err) => {\n error!(\"failed to decrypt pkt in UDP relay: {}\", err);\n let err = io::Error::new(io::ErrorKind::InvalidData, \"decrypt failed\");\n return Err(err);\n }\n };\n\n \/\/ CLIENT -> SERVER protocol: ADDRESS + PAYLOAD\n let mut cur = Cursor::new(decrypted_pkt);\n\n let addr = Address::read_from(&mut cur).await?;\n\n debug!(\"UDP ASSOCIATE {} <-> {} establishing\", src, addr);\n\n if context.check_outbound_blocked(&addr) {\n warn!(\"outbound {} is blocked by ACL rules\", addr);\n return Ok(());\n }\n\n \/\/ Take out internal buffer for optimizing one byte copy\n let header_len = cur.position() as usize;\n let decrypted_pkt = cur.into_inner();\n let body = &decrypted_pkt[header_len..];\n\n let send_len = match addr {\n Address::SocketAddress(ref remote_addr) => {\n debug!(\n \"UDP ASSOCIATE {} -> {} ({}), payload length {} bytes\",\n src,\n addr,\n remote_addr,\n body.len()\n );\n try_timeout(remote_udp.send_to(body, remote_addr), Some(timeout)).await?\n }\n Address::DomainNameAddress(ref dname, port) => lookup_outbound_then!(context, dname, port, |remote_addr| {\n match try_timeout(remote_udp.send_to(body, &remote_addr), Some(timeout)).await {\n Ok(l) => {\n debug!(\n \"UDP ASSOCIATE {} -> {} ({}), payload length {} bytes\",\n src,\n addr,\n remote_addr,\n body.len()\n );\n Ok(l)\n }\n Err(err) => {\n error!(\n \"UDP ASSOCIATE {} -> {} ({}), payload length {} bytes\",\n src,\n addr,\n remote_addr,\n body.len()\n );\n Err(err)\n }\n }\n })\n .map(|(_, l)| l)?,\n };\n\n assert_eq!(body.len(), send_len);\n\n Ok(())\n }\n\n \/\/\/ Relay packets from remote to local\n async fn relay_r2l(\n context: &Context,\n src_addr: SocketAddr,\n remote_udp: &mut RecvHalf,\n response_tx: &mut mpsc::Sender<(SocketAddr, BytesMut)>,\n svr_cfg: &ServerConfig,\n ) -> io::Result<()> {\n \/\/ Waiting for response from server SERVER -> CLIENT\n \/\/ Packet length is limited by MAXIMUM_UDP_PAYLOAD_SIZE, excess bytes will be discarded.\n let mut remote_buf = vec![0u8; MAXIMUM_UDP_PAYLOAD_SIZE];\n let (remote_recv_len, remote_addr) = remote_udp.recv_from(&mut remote_buf).await?;\n\n debug!(\n \"UDP ASSOCIATE {} <- {}, payload length {} bytes\",\n src_addr, remote_addr, remote_recv_len\n );\n\n \/\/ FIXME: The Address should be the Address that client sent\n let addr = Address::SocketAddress(remote_addr);\n\n \/\/ CLIENT <- SERVER protocol: ADDRESS + PAYLOAD\n let mut send_buf = Vec::new();\n addr.write_to_buf(&mut send_buf);\n send_buf.extend_from_slice(&remote_buf[..remote_recv_len]);\n\n let mut encrypt_buf = BytesMut::new();\n encrypt_payload(context, svr_cfg.method(), svr_cfg.key(), &send_buf, &mut encrypt_buf)?;\n\n \/\/ Send back to src_addr\n if let Err(err) = response_tx.send((src_addr, encrypt_buf)).await {\n error!(\"failed to send packet into response channel, error: {}\", err);\n\n \/\/ FIXME: What to do? Ignore?\n }\n\n Ok(())\n }\n\n \/\/ Send packet to remote\n \/\/\n \/\/ Return `Err` if receiver have been closed\n async fn send(&mut self, pkt: Vec<u8>) {\n if let Err(..) = self.tx.send(pkt).await {\n \/\/ SHOULDn't HAPPEN\n unreachable!(\"UDP Association local -> remote Queue closed unexpectly\");\n }\n }\n}\n\nfn serialize(saddr: &SocketAddr) -> [u8; 18] {\n let mut result = [0; 18];\n result[..16].copy_from_slice(&match saddr.ip() {\n IpAddr::V4(ref ip) => &ip.to_ipv6_mapped(),\n IpAddr::V6(ref ip) => ip,\n }.octets());\n result[16..].copy_from_slice(&saddr.port().to_ne_bytes());\n result\n}\n\nasync fn listen(context: SharedContext, flow_stat: SharedServerFlowStatistic, svr_idx: usize) -> io::Result<()> {\n let svr_cfg = context.server_config(svr_idx);\n let listen_addr = svr_cfg.addr().bind_addr(&context).await?;\n\n let listener = create_udp_socket(&listen_addr).await?;\n let local_addr = listener.local_addr().expect(\"determine port bound to\");\n info!(\"shadowsocks UDP listening on {}\", local_addr);\n\n let (mut r, mut w) = listener.split();\n\n \/\/ NOTE: Associations are only eliminated by expire time by default\n \/\/ So it may exhaust all available file descriptors\n let timeout = context.config().udp_timeout.unwrap_or(DEFAULT_TIMEOUT);\n let assoc_map = if let Some(max_assoc) = context.config().udp_max_associations {\n LruCache::with_expiry_duration_and_capacity(timeout, max_assoc)\n } else {\n LruCache::with_expiry_duration(timeout)\n };\n let assoc_map = Arc::new(Mutex::new(assoc_map));\n\n \/\/ FIXME: Channel size 1024?\n let (tx, mut rx) = mpsc::channel::<(SocketAddr, BytesMut)>(1024);\n\n {\n \/\/ Tokio task for sending data back to clients\n\n let assoc_map = assoc_map.clone();\n let flow_stat = flow_stat.clone();\n\n tokio::spawn(async move {\n while let Some((src, pkt)) = rx.recv().await {\n let cache_key = serialize(&src);\n {\n let mut amap = assoc_map.lock().await;\n\n \/\/ Check or update expire time\n if amap.get(&cache_key).is_none() {\n debug!(\n \"UDP association {} <-> ... is already expired, throwing away packet {} bytes\",\n src,\n pkt.len()\n );\n continue;\n }\n }\n\n if let Err(err) = w.send_to(&pkt, &src).await {\n error!(\"UDP packet send failed, err: {:?}\", err);\n break;\n }\n\n flow_stat.udp().incr_tx(pkt.len() as u64);\n }\n\n \/\/ FIXME: How to stop the outer listener Future?\n });\n }\n\n let mut pkt_buf = vec![0u8; MAXIMUM_UDP_PAYLOAD_SIZE];\n\n loop {\n let (recv_len, src) = r.recv_from(&mut pkt_buf).await?;\n\n \/\/ Packet length is limited by MAXIMUM_UDP_PAYLOAD_SIZE, excess bytes will be discarded.\n let pkt = &pkt_buf[..recv_len];\n\n trace!(\"received UDP packet from {}, length {} bytes\", src, recv_len);\n flow_stat.udp().incr_rx(pkt.len() as u64);\n\n if recv_len == 0 {\n \/\/ For windows, it will generate a ICMP Port Unreachable Message\n \/\/ https:\/\/docs.microsoft.com\/en-us\/windows\/win32\/api\/winsock2\/nf-winsock2-recvfrom\n \/\/ Which will result in recv_from return 0.\n \/\/\n \/\/ It cannot be solved here, because `WSAGetLastError` is already set.\n \/\/\n \/\/ See `relay::udprelay::utils::create_socket` for more detail.\n continue;\n }\n\n \/\/ Check ACL\n if context.check_client_blocked(&src) {\n warn!(\"client {} is blocked by ACL rules\", src);\n continue;\n }\n\n \/\/ Check or (re)create an association\n {\n \/\/ Locks the whole association map\n let mut assoc_map = assoc_map.lock().await;\n\n \/\/ Get or create an association\n let assoc = match assoc_map.entry(serialize(&src)) {\n Entry::Occupied(oc) => oc.into_mut(),\n Entry::Vacant(vc) => vc.insert(\n UdpAssociation::associate(context.clone(), svr_idx, src, tx.clone())\n .await\n .expect(\"create udp association\"),\n ),\n };\n\n \/\/ FIXME: Lock is still kept for a mutable reference\n \/\/ Send to local -> remote task\n assoc.send(pkt.to_vec()).await;\n }\n }\n}\n\n\/\/\/ Starts a UDP relay server\npub async fn run(context: SharedContext, flow_stat: SharedMultiServerFlowStatistic) -> io::Result<()> {\n let vec_fut = FuturesUnordered::new();\n\n for (svr_idx, svr_cfg) in context.config().server.iter().enumerate() {\n let context = context.clone();\n let flow_stat = flow_stat\n .get(svr_cfg.addr().port())\n .expect(\"port not existed in multi-server flow statistic\")\n .clone();\n\n let svr_fut = listen(context, flow_stat, svr_idx);\n vec_fut.push(svr_fut);\n }\n\n match vec_fut.into_future().await.0 {\n Some(res) => {\n error!(\"one of UDP servers exited unexpectly, result: {:?}\", res);\n let err = io::Error::new(io::ErrorKind::Other, \"server exited unexpectly\");\n Err(err)\n }\n None => unreachable!(),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix some error cases in lexer<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Formatting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary allocations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>watch app nodes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for lucians-luscious-lasagna<commit_after>pub fn expected_minutes_in_oven() -> i32 {\n 40\n}\n\npub fn remaining_minutes_in_oven(actual_minutes_in_oven: i32) -> i32 {\n 40 - actual_minutes_in_oven\n}\n\npub fn preparation_time_in_minutes(number_of_layers: i32) -> i32 {\n number_of_layers * 2\n}\n\npub fn elapsed_time_in_minutes(number_of_layers: i32, actual_minutes_in_oven: i32) -> i32 {\n preparation_time_in_minutes(number_of_layers) + actual_minutes_in_oven\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add help command to print usage<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::dep_graph::DepNode;\nuse rustc::hir::def_id::DefId;\nuse rustc::hir::svh::Svh;\nuse rustc::session::Session;\nuse rustc::ty::TyCtxt;\nuse rustc_data_structures::fx::FxHashMap;\nuse rustc_serialize::Encodable as RustcEncodable;\nuse rustc_serialize::opaque::Encoder;\nuse std::hash::Hash;\nuse std::io::{self, Cursor, Write};\nuse std::fs::{self, File};\nuse std::path::PathBuf;\n\nuse IncrementalHashesMap;\nuse ich::Fingerprint;\nuse super::data::*;\nuse super::directory::*;\nuse super::hash::*;\nuse super::preds::*;\nuse super::fs::*;\nuse super::dirty_clean;\nuse super::file_format;\nuse calculate_svh::IchHasher;\n\npub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n incremental_hashes_map: &IncrementalHashesMap,\n svh: Svh) {\n debug!(\"save_dep_graph()\");\n let _ignore = tcx.dep_graph.in_ignore();\n let sess = tcx.sess;\n if sess.opts.incremental.is_none() {\n return;\n }\n\n let mut builder = DefIdDirectoryBuilder::new(tcx);\n let query = tcx.dep_graph.query();\n let mut hcx = HashContext::new(tcx, incremental_hashes_map);\n let preds = Predecessors::new(&query, &mut hcx);\n let mut current_metadata_hashes = FxHashMap();\n\n \/\/ IMPORTANT: We are saving the metadata hashes *before* the dep-graph,\n \/\/ since metadata-encoding might add new entries to the\n \/\/ DefIdDirectory (which is saved in the dep-graph file).\n save_in(sess,\n metadata_hash_export_path(sess),\n |e| encode_metadata_hashes(tcx,\n svh,\n &preds,\n &mut builder,\n &mut current_metadata_hashes,\n e));\n save_in(sess,\n dep_graph_path(sess),\n |e| encode_dep_graph(&preds, &mut builder, e));\n\n let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow();\n dirty_clean::check_dirty_clean_metadata(tcx,\n &*prev_metadata_hashes,\n ¤t_metadata_hashes);\n}\n\npub fn save_work_products(sess: &Session) {\n if sess.opts.incremental.is_none() {\n return;\n }\n\n debug!(\"save_work_products()\");\n let _ignore = sess.dep_graph.in_ignore();\n let path = work_products_path(sess);\n save_in(sess, path, |e| encode_work_products(sess, e));\n}\n\nfn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)\n where F: FnOnce(&mut Encoder) -> io::Result<()>\n{\n debug!(\"save: storing data in {}\", path_buf.display());\n\n \/\/ delete the old dep-graph, if any\n \/\/ Note: It's important that we actually delete the old file and not just\n \/\/ truncate and overwrite it, since it might be a shared hard-link, the\n \/\/ underlying data of which we don't want to modify\n if path_buf.exists() {\n match fs::remove_file(&path_buf) {\n Ok(()) => {\n debug!(\"save: remove old file\");\n }\n Err(err) => {\n sess.err(&format!(\"unable to delete old dep-graph at `{}`: {}\",\n path_buf.display(),\n err));\n return;\n }\n }\n }\n\n \/\/ generate the data in a memory buffer\n let mut wr = Cursor::new(Vec::new());\n file_format::write_file_header(&mut wr).unwrap();\n match encode(&mut Encoder::new(&mut wr)) {\n Ok(()) => {}\n Err(err) => {\n sess.err(&format!(\"could not encode dep-graph to `{}`: {}\",\n path_buf.display(),\n err));\n return;\n }\n }\n\n \/\/ write the data out\n let data = wr.into_inner();\n match File::create(&path_buf).and_then(|mut file| file.write_all(&data)) {\n Ok(_) => {\n debug!(\"save: data written to disk successfully\");\n }\n Err(err) => {\n sess.err(&format!(\"failed to write dep-graph to `{}`: {}\",\n path_buf.display(),\n err));\n return;\n }\n }\n}\n\npub fn encode_dep_graph(preds: &Predecessors,\n builder: &mut DefIdDirectoryBuilder,\n encoder: &mut Encoder)\n -> io::Result<()> {\n \/\/ First encode the commandline arguments hash\n let tcx = builder.tcx();\n tcx.sess.opts.dep_tracking_hash().encode(encoder)?;\n\n \/\/ Create a flat list of (Input, WorkProduct) edges for\n \/\/ serialization.\n let mut edges = vec![];\n for (&target, sources) in &preds.inputs {\n match *target {\n DepNode::MetaData(ref def_id) => {\n \/\/ Metadata *targets* are always local metadata nodes. We have\n \/\/ already handled those in `encode_metadata_hashes`.\n assert!(def_id.is_local());\n continue;\n }\n _ => (),\n }\n let target = builder.map(target);\n for &source in sources {\n let source = builder.map(source);\n edges.push((source, target.clone()));\n }\n }\n\n if tcx.sess.opts.debugging_opts.incremental_dump_hash {\n for (dep_node, hash) in &preds.hashes {\n println!(\"HIR hash for {:?} is {}\", dep_node, hash);\n }\n }\n\n \/\/ Create the serialized dep-graph.\n let graph = SerializedDepGraph {\n edges: edges,\n hashes: preds.hashes\n .iter()\n .map(|(&dep_node, &hash)| {\n SerializedHash {\n dep_node: builder.map(dep_node),\n hash: hash,\n }\n })\n .collect(),\n };\n\n debug!(\"graph = {:#?}\", graph);\n\n \/\/ Encode the directory and then the graph data.\n builder.directory().encode(encoder)?;\n graph.encode(encoder)?;\n\n Ok(())\n}\n\npub fn encode_metadata_hashes(tcx: TyCtxt,\n svh: Svh,\n preds: &Predecessors,\n builder: &mut DefIdDirectoryBuilder,\n current_metadata_hashes: &mut FxHashMap<DefId, Fingerprint>,\n encoder: &mut Encoder)\n -> io::Result<()> {\n \/\/ For each `MetaData(X)` node where `X` is local, accumulate a\n \/\/ hash. These are the metadata items we export. Downstream\n \/\/ crates will want to see a hash that tells them whether we might\n \/\/ have changed the metadata for a given item since they last\n \/\/ compiled.\n \/\/\n \/\/ (I initially wrote this with an iterator, but it seemed harder to read.)\n let mut serialized_hashes = SerializedMetadataHashes {\n hashes: vec![],\n index_map: FxHashMap()\n };\n\n let mut def_id_hashes = FxHashMap();\n\n for (&target, sources) in &preds.inputs {\n let def_id = match *target {\n DepNode::MetaData(def_id) => {\n assert!(def_id.is_local());\n def_id\n }\n _ => continue,\n };\n\n let mut def_id_hash = |def_id: DefId| -> u64 {\n *def_id_hashes.entry(def_id)\n .or_insert_with(|| {\n let index = builder.add(def_id);\n let path = builder.lookup_def_path(index);\n path.deterministic_hash(tcx)\n })\n };\n\n \/\/ To create the hash for each item `X`, we don't hash the raw\n \/\/ bytes of the metadata (though in principle we\n \/\/ could). Instead, we walk the predecessors of `MetaData(X)`\n \/\/ from the dep-graph. This corresponds to all the inputs that\n \/\/ were read to construct the metadata. To create the hash for\n \/\/ the metadata, we hash (the hash of) all of those inputs.\n debug!(\"save: computing metadata hash for {:?}\", def_id);\n\n \/\/ Create a vector containing a pair of (source-id, hash).\n \/\/ The source-id is stored as a `DepNode<u64>`, where the u64\n \/\/ is the det. hash of the def-path. This is convenient\n \/\/ because we can sort this to get a stable ordering across\n \/\/ compilations, even if the def-ids themselves have changed.\n let mut hashes: Vec<(DepNode<u64>, Fingerprint)> = sources.iter()\n .map(|dep_node| {\n let hash_dep_node = dep_node.map_def(|&def_id| Some(def_id_hash(def_id))).unwrap();\n let hash = preds.hashes[dep_node];\n (hash_dep_node, hash)\n })\n .collect();\n\n hashes.sort();\n let mut state = IchHasher::new();\n hashes.hash(&mut state);\n let hash = state.finish();\n\n debug!(\"save: metadata hash for {:?} is {}\", def_id, hash);\n\n if tcx.sess.opts.debugging_opts.incremental_dump_hash {\n println!(\"metadata hash for {:?} is {}\", def_id, hash);\n for dep_node in sources {\n println!(\"metadata hash for {:?} depends on {:?} with hash {}\",\n def_id, dep_node, preds.hashes[dep_node]);\n }\n }\n\n serialized_hashes.hashes.push(SerializedMetadataHash {\n def_index: def_id.index,\n hash: hash,\n });\n }\n\n if tcx.sess.opts.debugging_opts.query_dep_graph {\n for serialized_hash in &serialized_hashes.hashes {\n let def_id = DefId::local(serialized_hash.def_index);\n\n \/\/ Store entry in the index_map\n let def_path_index = builder.add(def_id);\n serialized_hashes.index_map.insert(def_id.index, def_path_index);\n\n \/\/ Record hash in current_metadata_hashes\n current_metadata_hashes.insert(def_id, serialized_hash.hash);\n }\n\n debug!(\"save: stored index_map (len={}) for serialized hashes\",\n serialized_hashes.index_map.len());\n }\n\n \/\/ Encode everything.\n svh.encode(encoder)?;\n serialized_hashes.encode(encoder)?;\n\n Ok(())\n}\n\npub fn encode_work_products(sess: &Session, encoder: &mut Encoder) -> io::Result<()> {\n let work_products: Vec<_> = sess.dep_graph\n .work_products()\n .iter()\n .map(|(id, work_product)| {\n SerializedWorkProduct {\n id: id.clone(),\n work_product: work_product.clone(),\n }\n })\n .collect();\n\n work_products.encode(encoder)\n}\n<commit_msg>Add some more info to -Zincremental-info<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::dep_graph::DepNode;\nuse rustc::hir::def_id::DefId;\nuse rustc::hir::svh::Svh;\nuse rustc::session::Session;\nuse rustc::ty::TyCtxt;\nuse rustc_data_structures::fx::FxHashMap;\nuse rustc_serialize::Encodable as RustcEncodable;\nuse rustc_serialize::opaque::Encoder;\nuse std::hash::Hash;\nuse std::io::{self, Cursor, Write};\nuse std::fs::{self, File};\nuse std::path::PathBuf;\n\nuse IncrementalHashesMap;\nuse ich::Fingerprint;\nuse super::data::*;\nuse super::directory::*;\nuse super::hash::*;\nuse super::preds::*;\nuse super::fs::*;\nuse super::dirty_clean;\nuse super::file_format;\nuse calculate_svh::IchHasher;\n\npub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n incremental_hashes_map: &IncrementalHashesMap,\n svh: Svh) {\n debug!(\"save_dep_graph()\");\n let _ignore = tcx.dep_graph.in_ignore();\n let sess = tcx.sess;\n if sess.opts.incremental.is_none() {\n return;\n }\n\n let mut builder = DefIdDirectoryBuilder::new(tcx);\n let query = tcx.dep_graph.query();\n\n if tcx.sess.opts.debugging_opts.incremental_info {\n println!(\"incremental: {} nodes in dep-graph\", query.graph.len_nodes());\n println!(\"incremental: {} edges in dep-graph\", query.graph.len_edges());\n }\n\n let mut hcx = HashContext::new(tcx, incremental_hashes_map);\n let preds = Predecessors::new(&query, &mut hcx);\n let mut current_metadata_hashes = FxHashMap();\n\n \/\/ IMPORTANT: We are saving the metadata hashes *before* the dep-graph,\n \/\/ since metadata-encoding might add new entries to the\n \/\/ DefIdDirectory (which is saved in the dep-graph file).\n save_in(sess,\n metadata_hash_export_path(sess),\n |e| encode_metadata_hashes(tcx,\n svh,\n &preds,\n &mut builder,\n &mut current_metadata_hashes,\n e));\n save_in(sess,\n dep_graph_path(sess),\n |e| encode_dep_graph(&preds, &mut builder, e));\n\n let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow();\n dirty_clean::check_dirty_clean_metadata(tcx,\n &*prev_metadata_hashes,\n ¤t_metadata_hashes);\n}\n\npub fn save_work_products(sess: &Session) {\n if sess.opts.incremental.is_none() {\n return;\n }\n\n debug!(\"save_work_products()\");\n let _ignore = sess.dep_graph.in_ignore();\n let path = work_products_path(sess);\n save_in(sess, path, |e| encode_work_products(sess, e));\n}\n\nfn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)\n where F: FnOnce(&mut Encoder) -> io::Result<()>\n{\n debug!(\"save: storing data in {}\", path_buf.display());\n\n \/\/ delete the old dep-graph, if any\n \/\/ Note: It's important that we actually delete the old file and not just\n \/\/ truncate and overwrite it, since it might be a shared hard-link, the\n \/\/ underlying data of which we don't want to modify\n if path_buf.exists() {\n match fs::remove_file(&path_buf) {\n Ok(()) => {\n debug!(\"save: remove old file\");\n }\n Err(err) => {\n sess.err(&format!(\"unable to delete old dep-graph at `{}`: {}\",\n path_buf.display(),\n err));\n return;\n }\n }\n }\n\n \/\/ generate the data in a memory buffer\n let mut wr = Cursor::new(Vec::new());\n file_format::write_file_header(&mut wr).unwrap();\n match encode(&mut Encoder::new(&mut wr)) {\n Ok(()) => {}\n Err(err) => {\n sess.err(&format!(\"could not encode dep-graph to `{}`: {}\",\n path_buf.display(),\n err));\n return;\n }\n }\n\n \/\/ write the data out\n let data = wr.into_inner();\n match File::create(&path_buf).and_then(|mut file| file.write_all(&data)) {\n Ok(_) => {\n debug!(\"save: data written to disk successfully\");\n }\n Err(err) => {\n sess.err(&format!(\"failed to write dep-graph to `{}`: {}\",\n path_buf.display(),\n err));\n return;\n }\n }\n}\n\npub fn encode_dep_graph(preds: &Predecessors,\n builder: &mut DefIdDirectoryBuilder,\n encoder: &mut Encoder)\n -> io::Result<()> {\n \/\/ First encode the commandline arguments hash\n let tcx = builder.tcx();\n tcx.sess.opts.dep_tracking_hash().encode(encoder)?;\n\n \/\/ Create a flat list of (Input, WorkProduct) edges for\n \/\/ serialization.\n let mut edges = vec![];\n for (&target, sources) in &preds.inputs {\n match *target {\n DepNode::MetaData(ref def_id) => {\n \/\/ Metadata *targets* are always local metadata nodes. We have\n \/\/ already handled those in `encode_metadata_hashes`.\n assert!(def_id.is_local());\n continue;\n }\n _ => (),\n }\n let target = builder.map(target);\n for &source in sources {\n let source = builder.map(source);\n edges.push((source, target.clone()));\n }\n }\n\n if tcx.sess.opts.debugging_opts.incremental_dump_hash {\n for (dep_node, hash) in &preds.hashes {\n println!(\"HIR hash for {:?} is {}\", dep_node, hash);\n }\n }\n\n \/\/ Create the serialized dep-graph.\n let graph = SerializedDepGraph {\n edges: edges,\n hashes: preds.hashes\n .iter()\n .map(|(&dep_node, &hash)| {\n SerializedHash {\n dep_node: builder.map(dep_node),\n hash: hash,\n }\n })\n .collect(),\n };\n\n if tcx.sess.opts.debugging_opts.incremental_info {\n println!(\"incremental: {} edges in serialized dep-graph\", graph.edges.len());\n println!(\"incremental: {} hashes in serialized dep-graph\", graph.hashes.len());\n }\n\n debug!(\"graph = {:#?}\", graph);\n\n \/\/ Encode the directory and then the graph data.\n builder.directory().encode(encoder)?;\n graph.encode(encoder)?;\n\n Ok(())\n}\n\npub fn encode_metadata_hashes(tcx: TyCtxt,\n svh: Svh,\n preds: &Predecessors,\n builder: &mut DefIdDirectoryBuilder,\n current_metadata_hashes: &mut FxHashMap<DefId, Fingerprint>,\n encoder: &mut Encoder)\n -> io::Result<()> {\n \/\/ For each `MetaData(X)` node where `X` is local, accumulate a\n \/\/ hash. These are the metadata items we export. Downstream\n \/\/ crates will want to see a hash that tells them whether we might\n \/\/ have changed the metadata for a given item since they last\n \/\/ compiled.\n \/\/\n \/\/ (I initially wrote this with an iterator, but it seemed harder to read.)\n let mut serialized_hashes = SerializedMetadataHashes {\n hashes: vec![],\n index_map: FxHashMap()\n };\n\n let mut def_id_hashes = FxHashMap();\n\n for (&target, sources) in &preds.inputs {\n let def_id = match *target {\n DepNode::MetaData(def_id) => {\n assert!(def_id.is_local());\n def_id\n }\n _ => continue,\n };\n\n let mut def_id_hash = |def_id: DefId| -> u64 {\n *def_id_hashes.entry(def_id)\n .or_insert_with(|| {\n let index = builder.add(def_id);\n let path = builder.lookup_def_path(index);\n path.deterministic_hash(tcx)\n })\n };\n\n \/\/ To create the hash for each item `X`, we don't hash the raw\n \/\/ bytes of the metadata (though in principle we\n \/\/ could). Instead, we walk the predecessors of `MetaData(X)`\n \/\/ from the dep-graph. This corresponds to all the inputs that\n \/\/ were read to construct the metadata. To create the hash for\n \/\/ the metadata, we hash (the hash of) all of those inputs.\n debug!(\"save: computing metadata hash for {:?}\", def_id);\n\n \/\/ Create a vector containing a pair of (source-id, hash).\n \/\/ The source-id is stored as a `DepNode<u64>`, where the u64\n \/\/ is the det. hash of the def-path. This is convenient\n \/\/ because we can sort this to get a stable ordering across\n \/\/ compilations, even if the def-ids themselves have changed.\n let mut hashes: Vec<(DepNode<u64>, Fingerprint)> = sources.iter()\n .map(|dep_node| {\n let hash_dep_node = dep_node.map_def(|&def_id| Some(def_id_hash(def_id))).unwrap();\n let hash = preds.hashes[dep_node];\n (hash_dep_node, hash)\n })\n .collect();\n\n hashes.sort();\n let mut state = IchHasher::new();\n hashes.hash(&mut state);\n let hash = state.finish();\n\n debug!(\"save: metadata hash for {:?} is {}\", def_id, hash);\n\n if tcx.sess.opts.debugging_opts.incremental_dump_hash {\n println!(\"metadata hash for {:?} is {}\", def_id, hash);\n for dep_node in sources {\n println!(\"metadata hash for {:?} depends on {:?} with hash {}\",\n def_id, dep_node, preds.hashes[dep_node]);\n }\n }\n\n serialized_hashes.hashes.push(SerializedMetadataHash {\n def_index: def_id.index,\n hash: hash,\n });\n }\n\n if tcx.sess.opts.debugging_opts.query_dep_graph {\n for serialized_hash in &serialized_hashes.hashes {\n let def_id = DefId::local(serialized_hash.def_index);\n\n \/\/ Store entry in the index_map\n let def_path_index = builder.add(def_id);\n serialized_hashes.index_map.insert(def_id.index, def_path_index);\n\n \/\/ Record hash in current_metadata_hashes\n current_metadata_hashes.insert(def_id, serialized_hash.hash);\n }\n\n debug!(\"save: stored index_map (len={}) for serialized hashes\",\n serialized_hashes.index_map.len());\n }\n\n \/\/ Encode everything.\n svh.encode(encoder)?;\n serialized_hashes.encode(encoder)?;\n\n Ok(())\n}\n\npub fn encode_work_products(sess: &Session, encoder: &mut Encoder) -> io::Result<()> {\n let work_products: Vec<_> = sess.dep_graph\n .work_products()\n .iter()\n .map(|(id, work_product)| {\n SerializedWorkProduct {\n id: id.clone(),\n work_product: work_product.clone(),\n }\n })\n .collect();\n\n work_products.encode(encoder)\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::borrow_set::BorrowSet;\nuse rustc::hir::def_id::DefId;\nuse rustc::mir::{ClosureRegionRequirements, ClosureOutlivesSubject, Mir};\nuse rustc::infer::InferCtxt;\nuse rustc::ty::{self, RegionKind, RegionVid};\nuse rustc::util::nodemap::FxHashMap;\nuse std::collections::BTreeSet;\nuse std::fmt::Debug;\nuse std::io;\nuse transform::MirSource;\nuse util::liveness::{LivenessResults, LocalSet};\nuse dataflow::FlowAtLocation;\nuse dataflow::MaybeInitializedPlaces;\nuse dataflow::move_paths::MoveData;\n\nuse util as mir_util;\nuse util::pretty::{self, ALIGN};\nuse self::mir_util::PassWhere;\n\nmod constraint_generation;\npub mod explain_borrow;\npub(crate) mod region_infer;\nmod renumber;\nmod subtype_constraint_generation;\npub(crate) mod type_check;\nmod universal_regions;\n\nuse self::region_infer::RegionInferenceContext;\nuse self::universal_regions::UniversalRegions;\n\n\n\/\/\/ Rewrites the regions in the MIR to use NLL variables, also\n\/\/\/ scraping out the set of universal regions (e.g., region parameters)\n\/\/\/ declared on the function. That set will need to be given to\n\/\/\/ `compute_regions`.\npub(in borrow_check) fn replace_regions_in_mir<'cx, 'gcx, 'tcx>(\n infcx: &InferCtxt<'cx, 'gcx, 'tcx>,\n def_id: DefId,\n param_env: ty::ParamEnv<'tcx>,\n mir: &mut Mir<'tcx>,\n) -> UniversalRegions<'tcx> {\n debug!(\"replace_regions_in_mir(def_id={:?})\", def_id);\n\n \/\/ Compute named region information. This also renumbers the inputs\/outputs.\n let universal_regions = UniversalRegions::new(infcx, def_id, param_env);\n\n \/\/ Replace all remaining regions with fresh inference variables.\n renumber::renumber_mir(infcx, mir);\n\n let source = MirSource::item(def_id);\n mir_util::dump_mir(infcx.tcx, None, \"renumber\", &0, source, mir, |_, _| Ok(()));\n\n universal_regions\n}\n\n\/\/\/ Computes the (non-lexical) regions from the input MIR.\n\/\/\/\n\/\/\/ This may result in errors being reported.\npub(in borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(\n infcx: &InferCtxt<'cx, 'gcx, 'tcx>,\n def_id: DefId,\n universal_regions: UniversalRegions<'tcx>,\n mir: &Mir<'tcx>,\n param_env: ty::ParamEnv<'gcx>,\n flow_inits: &mut FlowAtLocation<MaybeInitializedPlaces<'cx, 'gcx, 'tcx>>,\n move_data: &MoveData<'tcx>,\n _borrow_set: &BorrowSet<'tcx>,\n) -> (\n RegionInferenceContext<'tcx>,\n Option<ClosureRegionRequirements<'gcx>>,\n) {\n \/\/ Run the MIR type-checker.\n let liveness = &LivenessResults::compute(mir);\n let constraint_sets = &type_check::type_check(\n infcx,\n param_env,\n mir,\n def_id,\n &universal_regions,\n &liveness,\n flow_inits,\n move_data,\n );\n\n \/\/ Create the region inference context, taking ownership of the region inference\n \/\/ data that was contained in `infcx`.\n let var_origins = infcx.take_region_var_origins();\n let mut regioncx = RegionInferenceContext::new(var_origins, universal_regions, mir);\n subtype_constraint_generation::generate(&mut regioncx, mir, constraint_sets);\n\n\n \/\/ Generate non-subtyping constraints.\n constraint_generation::generate_constraints(infcx, &mut regioncx, &mir);\n\n \/\/ Solve the region constraints.\n let closure_region_requirements = regioncx.solve(infcx, &mir, def_id);\n\n \/\/ Dump MIR results into a file, if that is enabled. This let us\n \/\/ write unit-tests, as well as helping with debugging.\n dump_mir_results(\n infcx,\n liveness,\n MirSource::item(def_id),\n &mir,\n ®ioncx,\n &closure_region_requirements,\n );\n\n \/\/ We also have a `#[rustc_nll]` annotation that causes us to dump\n \/\/ information\n dump_annotation(infcx, &mir, def_id, ®ioncx, &closure_region_requirements);\n\n (regioncx, closure_region_requirements)\n}\n\nfn dump_mir_results<'a, 'gcx, 'tcx>(\n infcx: &InferCtxt<'a, 'gcx, 'tcx>,\n liveness: &LivenessResults,\n source: MirSource,\n mir: &Mir<'tcx>,\n regioncx: &RegionInferenceContext,\n closure_region_requirements: &Option<ClosureRegionRequirements>,\n) {\n if !mir_util::dump_enabled(infcx.tcx, \"nll\", source) {\n return;\n }\n\n let regular_liveness_per_location: FxHashMap<_, _> = mir.basic_blocks()\n .indices()\n .flat_map(|bb| {\n let mut results = vec![];\n liveness\n .regular\n .simulate_block(&mir, bb, |location, local_set| {\n results.push((location, local_set.clone()));\n });\n results\n })\n .collect();\n\n let drop_liveness_per_location: FxHashMap<_, _> = mir.basic_blocks()\n .indices()\n .flat_map(|bb| {\n let mut results = vec![];\n liveness\n .drop\n .simulate_block(&mir, bb, |location, local_set| {\n results.push((location, local_set.clone()));\n });\n results\n })\n .collect();\n\n mir_util::dump_mir(infcx.tcx, None, \"nll\", &0, source, mir, |pass_where, out| {\n match pass_where {\n \/\/ Before the CFG, dump out the values for each region variable.\n PassWhere::BeforeCFG => {\n regioncx.dump_mir(out)?;\n\n if let Some(closure_region_requirements) = closure_region_requirements {\n writeln!(out, \"|\")?;\n writeln!(out, \"| Free Region Constraints\")?;\n for_each_region_constraint(closure_region_requirements, &mut |msg| {\n writeln!(out, \"| {}\", msg)\n })?;\n }\n }\n\n \/\/ Before each basic block, dump out the values\n \/\/ that are live on entry to the basic block.\n PassWhere::BeforeBlock(bb) => {\n let s = live_variable_set(&liveness.regular.ins[bb], &liveness.drop.ins[bb]);\n writeln!(out, \" | Live variables on entry to {:?}: {}\", bb, s)?;\n }\n\n PassWhere::BeforeLocation(location) => {\n let s = live_variable_set(\n ®ular_liveness_per_location[&location],\n &drop_liveness_per_location[&location],\n );\n writeln!(\n out,\n \"{:ALIGN$} | Live variables on entry to {:?}: {}\",\n \"\",\n location,\n s,\n ALIGN = ALIGN\n )?;\n }\n\n PassWhere::AfterLocation(_) | PassWhere::AfterCFG => {}\n }\n Ok(())\n });\n\n \/\/ Also dump the inference graph constraints as a graphviz file.\n let _: io::Result<()> = do_catch! {{\n let mut file =\n pretty::create_dump_file(infcx.tcx, \"regioncx.dot\", None, \"nll\", &0, source)?;\n regioncx.dump_graphviz(&mut file)?;\n }};\n}\n\nfn dump_annotation<'a, 'gcx, 'tcx>(\n infcx: &InferCtxt<'a, 'gcx, 'tcx>,\n mir: &Mir<'tcx>,\n mir_def_id: DefId,\n regioncx: &RegionInferenceContext,\n closure_region_requirements: &Option<ClosureRegionRequirements>,\n) {\n let tcx = infcx.tcx;\n let base_def_id = tcx.closure_base_def_id(mir_def_id);\n if !tcx.has_attr(base_def_id, \"rustc_regions\") {\n return;\n }\n\n \/\/ When the enclosing function is tagged with `#[rustc_regions]`,\n \/\/ we dump out various bits of state as warnings. This is useful\n \/\/ for verifying that the compiler is behaving as expected. These\n \/\/ warnings focus on the closure region requirements -- for\n \/\/ viewing the intraprocedural state, the -Zdump-mir output is\n \/\/ better.\n\n if let Some(closure_region_requirements) = closure_region_requirements {\n let mut err = tcx.sess\n .diagnostic()\n .span_note_diag(mir.span, \"External requirements\");\n\n regioncx.annotate(&mut err);\n\n err.note(&format!(\n \"number of external vids: {}\",\n closure_region_requirements.num_external_vids\n ));\n\n \/\/ Dump the region constraints we are imposing *between* those\n \/\/ newly created variables.\n for_each_region_constraint(closure_region_requirements, &mut |msg| {\n err.note(msg);\n Ok(())\n }).unwrap();\n\n err.emit();\n } else {\n let mut err = tcx.sess\n .diagnostic()\n .span_note_diag(mir.span, \"No external requirements\");\n regioncx.annotate(&mut err);\n err.emit();\n }\n}\n\nfn for_each_region_constraint(\n closure_region_requirements: &ClosureRegionRequirements,\n with_msg: &mut dyn FnMut(&str) -> io::Result<()>,\n) -> io::Result<()> {\n for req in &closure_region_requirements.outlives_requirements {\n let subject: &dyn Debug = match &req.subject {\n ClosureOutlivesSubject::Region(subject) => subject,\n ClosureOutlivesSubject::Ty(ty) => ty,\n };\n with_msg(&format!(\n \"where {:?}: {:?}\",\n subject,\n req.outlived_free_region,\n ))?;\n }\n Ok(())\n}\n\n\/\/\/ Right now, we piggy back on the `ReVar` to store our NLL inference\n\/\/\/ regions. These are indexed with `RegionVid`. This method will\n\/\/\/ assert that the region is a `ReVar` and extract its internal index.\n\/\/\/ This is reasonable because in our MIR we replace all universal regions\n\/\/\/ with inference variables.\npub trait ToRegionVid {\n fn to_region_vid(self) -> RegionVid;\n}\n\nimpl<'tcx> ToRegionVid for &'tcx RegionKind {\n fn to_region_vid(self) -> RegionVid {\n if let ty::ReVar(vid) = self {\n *vid\n } else {\n bug!(\"region is not an ReVar: {:?}\", self)\n }\n }\n}\n\nimpl ToRegionVid for RegionVid {\n fn to_region_vid(self) -> RegionVid {\n self\n }\n}\n\nfn live_variable_set(regular: &LocalSet, drops: &LocalSet) -> String {\n \/\/ sort and deduplicate:\n let all_locals: BTreeSet<_> = regular.iter().chain(drops.iter()).collect();\n\n \/\/ construct a string with each local, including `(drop)` if it is\n \/\/ only dropped, versus a regular use.\n let mut string = String::new();\n for local in all_locals {\n string.push_str(&format!(\"{:?}\", local));\n\n if !regular.contains(&local) {\n assert!(drops.contains(&local));\n string.push_str(\" (drop)\");\n }\n\n string.push_str(\", \");\n }\n\n let len = if string.is_empty() {\n 0\n } else {\n string.len() - 2\n };\n\n format!(\"[{}]\", &string[..len])\n}\n<commit_msg>use `crate` visibility modifier<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::borrow_set::BorrowSet;\nuse rustc::hir::def_id::DefId;\nuse rustc::mir::{ClosureRegionRequirements, ClosureOutlivesSubject, Mir};\nuse rustc::infer::InferCtxt;\nuse rustc::ty::{self, RegionKind, RegionVid};\nuse rustc::util::nodemap::FxHashMap;\nuse std::collections::BTreeSet;\nuse std::fmt::Debug;\nuse std::io;\nuse transform::MirSource;\nuse util::liveness::{LivenessResults, LocalSet};\nuse dataflow::FlowAtLocation;\nuse dataflow::MaybeInitializedPlaces;\nuse dataflow::move_paths::MoveData;\n\nuse util as mir_util;\nuse util::pretty::{self, ALIGN};\nuse self::mir_util::PassWhere;\n\nmod constraint_generation;\npub mod explain_borrow;\ncrate mod region_infer;\nmod renumber;\nmod subtype_constraint_generation;\ncrate mod type_check;\nmod universal_regions;\n\nuse self::region_infer::RegionInferenceContext;\nuse self::universal_regions::UniversalRegions;\n\n\n\/\/\/ Rewrites the regions in the MIR to use NLL variables, also\n\/\/\/ scraping out the set of universal regions (e.g., region parameters)\n\/\/\/ declared on the function. That set will need to be given to\n\/\/\/ `compute_regions`.\npub(in borrow_check) fn replace_regions_in_mir<'cx, 'gcx, 'tcx>(\n infcx: &InferCtxt<'cx, 'gcx, 'tcx>,\n def_id: DefId,\n param_env: ty::ParamEnv<'tcx>,\n mir: &mut Mir<'tcx>,\n) -> UniversalRegions<'tcx> {\n debug!(\"replace_regions_in_mir(def_id={:?})\", def_id);\n\n \/\/ Compute named region information. This also renumbers the inputs\/outputs.\n let universal_regions = UniversalRegions::new(infcx, def_id, param_env);\n\n \/\/ Replace all remaining regions with fresh inference variables.\n renumber::renumber_mir(infcx, mir);\n\n let source = MirSource::item(def_id);\n mir_util::dump_mir(infcx.tcx, None, \"renumber\", &0, source, mir, |_, _| Ok(()));\n\n universal_regions\n}\n\n\/\/\/ Computes the (non-lexical) regions from the input MIR.\n\/\/\/\n\/\/\/ This may result in errors being reported.\npub(in borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(\n infcx: &InferCtxt<'cx, 'gcx, 'tcx>,\n def_id: DefId,\n universal_regions: UniversalRegions<'tcx>,\n mir: &Mir<'tcx>,\n param_env: ty::ParamEnv<'gcx>,\n flow_inits: &mut FlowAtLocation<MaybeInitializedPlaces<'cx, 'gcx, 'tcx>>,\n move_data: &MoveData<'tcx>,\n _borrow_set: &BorrowSet<'tcx>,\n) -> (\n RegionInferenceContext<'tcx>,\n Option<ClosureRegionRequirements<'gcx>>,\n) {\n \/\/ Run the MIR type-checker.\n let liveness = &LivenessResults::compute(mir);\n let constraint_sets = &type_check::type_check(\n infcx,\n param_env,\n mir,\n def_id,\n &universal_regions,\n &liveness,\n flow_inits,\n move_data,\n );\n\n \/\/ Create the region inference context, taking ownership of the region inference\n \/\/ data that was contained in `infcx`.\n let var_origins = infcx.take_region_var_origins();\n let mut regioncx = RegionInferenceContext::new(var_origins, universal_regions, mir);\n subtype_constraint_generation::generate(&mut regioncx, mir, constraint_sets);\n\n\n \/\/ Generate non-subtyping constraints.\n constraint_generation::generate_constraints(infcx, &mut regioncx, &mir);\n\n \/\/ Solve the region constraints.\n let closure_region_requirements = regioncx.solve(infcx, &mir, def_id);\n\n \/\/ Dump MIR results into a file, if that is enabled. This let us\n \/\/ write unit-tests, as well as helping with debugging.\n dump_mir_results(\n infcx,\n liveness,\n MirSource::item(def_id),\n &mir,\n ®ioncx,\n &closure_region_requirements,\n );\n\n \/\/ We also have a `#[rustc_nll]` annotation that causes us to dump\n \/\/ information\n dump_annotation(infcx, &mir, def_id, ®ioncx, &closure_region_requirements);\n\n (regioncx, closure_region_requirements)\n}\n\nfn dump_mir_results<'a, 'gcx, 'tcx>(\n infcx: &InferCtxt<'a, 'gcx, 'tcx>,\n liveness: &LivenessResults,\n source: MirSource,\n mir: &Mir<'tcx>,\n regioncx: &RegionInferenceContext,\n closure_region_requirements: &Option<ClosureRegionRequirements>,\n) {\n if !mir_util::dump_enabled(infcx.tcx, \"nll\", source) {\n return;\n }\n\n let regular_liveness_per_location: FxHashMap<_, _> = mir.basic_blocks()\n .indices()\n .flat_map(|bb| {\n let mut results = vec![];\n liveness\n .regular\n .simulate_block(&mir, bb, |location, local_set| {\n results.push((location, local_set.clone()));\n });\n results\n })\n .collect();\n\n let drop_liveness_per_location: FxHashMap<_, _> = mir.basic_blocks()\n .indices()\n .flat_map(|bb| {\n let mut results = vec![];\n liveness\n .drop\n .simulate_block(&mir, bb, |location, local_set| {\n results.push((location, local_set.clone()));\n });\n results\n })\n .collect();\n\n mir_util::dump_mir(infcx.tcx, None, \"nll\", &0, source, mir, |pass_where, out| {\n match pass_where {\n \/\/ Before the CFG, dump out the values for each region variable.\n PassWhere::BeforeCFG => {\n regioncx.dump_mir(out)?;\n\n if let Some(closure_region_requirements) = closure_region_requirements {\n writeln!(out, \"|\")?;\n writeln!(out, \"| Free Region Constraints\")?;\n for_each_region_constraint(closure_region_requirements, &mut |msg| {\n writeln!(out, \"| {}\", msg)\n })?;\n }\n }\n\n \/\/ Before each basic block, dump out the values\n \/\/ that are live on entry to the basic block.\n PassWhere::BeforeBlock(bb) => {\n let s = live_variable_set(&liveness.regular.ins[bb], &liveness.drop.ins[bb]);\n writeln!(out, \" | Live variables on entry to {:?}: {}\", bb, s)?;\n }\n\n PassWhere::BeforeLocation(location) => {\n let s = live_variable_set(\n ®ular_liveness_per_location[&location],\n &drop_liveness_per_location[&location],\n );\n writeln!(\n out,\n \"{:ALIGN$} | Live variables on entry to {:?}: {}\",\n \"\",\n location,\n s,\n ALIGN = ALIGN\n )?;\n }\n\n PassWhere::AfterLocation(_) | PassWhere::AfterCFG => {}\n }\n Ok(())\n });\n\n \/\/ Also dump the inference graph constraints as a graphviz file.\n let _: io::Result<()> = do_catch! {{\n let mut file =\n pretty::create_dump_file(infcx.tcx, \"regioncx.dot\", None, \"nll\", &0, source)?;\n regioncx.dump_graphviz(&mut file)?;\n }};\n}\n\nfn dump_annotation<'a, 'gcx, 'tcx>(\n infcx: &InferCtxt<'a, 'gcx, 'tcx>,\n mir: &Mir<'tcx>,\n mir_def_id: DefId,\n regioncx: &RegionInferenceContext,\n closure_region_requirements: &Option<ClosureRegionRequirements>,\n) {\n let tcx = infcx.tcx;\n let base_def_id = tcx.closure_base_def_id(mir_def_id);\n if !tcx.has_attr(base_def_id, \"rustc_regions\") {\n return;\n }\n\n \/\/ When the enclosing function is tagged with `#[rustc_regions]`,\n \/\/ we dump out various bits of state as warnings. This is useful\n \/\/ for verifying that the compiler is behaving as expected. These\n \/\/ warnings focus on the closure region requirements -- for\n \/\/ viewing the intraprocedural state, the -Zdump-mir output is\n \/\/ better.\n\n if let Some(closure_region_requirements) = closure_region_requirements {\n let mut err = tcx.sess\n .diagnostic()\n .span_note_diag(mir.span, \"External requirements\");\n\n regioncx.annotate(&mut err);\n\n err.note(&format!(\n \"number of external vids: {}\",\n closure_region_requirements.num_external_vids\n ));\n\n \/\/ Dump the region constraints we are imposing *between* those\n \/\/ newly created variables.\n for_each_region_constraint(closure_region_requirements, &mut |msg| {\n err.note(msg);\n Ok(())\n }).unwrap();\n\n err.emit();\n } else {\n let mut err = tcx.sess\n .diagnostic()\n .span_note_diag(mir.span, \"No external requirements\");\n regioncx.annotate(&mut err);\n err.emit();\n }\n}\n\nfn for_each_region_constraint(\n closure_region_requirements: &ClosureRegionRequirements,\n with_msg: &mut dyn FnMut(&str) -> io::Result<()>,\n) -> io::Result<()> {\n for req in &closure_region_requirements.outlives_requirements {\n let subject: &dyn Debug = match &req.subject {\n ClosureOutlivesSubject::Region(subject) => subject,\n ClosureOutlivesSubject::Ty(ty) => ty,\n };\n with_msg(&format!(\n \"where {:?}: {:?}\",\n subject,\n req.outlived_free_region,\n ))?;\n }\n Ok(())\n}\n\n\/\/\/ Right now, we piggy back on the `ReVar` to store our NLL inference\n\/\/\/ regions. These are indexed with `RegionVid`. This method will\n\/\/\/ assert that the region is a `ReVar` and extract its internal index.\n\/\/\/ This is reasonable because in our MIR we replace all universal regions\n\/\/\/ with inference variables.\npub trait ToRegionVid {\n fn to_region_vid(self) -> RegionVid;\n}\n\nimpl<'tcx> ToRegionVid for &'tcx RegionKind {\n fn to_region_vid(self) -> RegionVid {\n if let ty::ReVar(vid) = self {\n *vid\n } else {\n bug!(\"region is not an ReVar: {:?}\", self)\n }\n }\n}\n\nimpl ToRegionVid for RegionVid {\n fn to_region_vid(self) -> RegionVid {\n self\n }\n}\n\nfn live_variable_set(regular: &LocalSet, drops: &LocalSet) -> String {\n \/\/ sort and deduplicate:\n let all_locals: BTreeSet<_> = regular.iter().chain(drops.iter()).collect();\n\n \/\/ construct a string with each local, including `(drop)` if it is\n \/\/ only dropped, versus a regular use.\n let mut string = String::new();\n for local in all_locals {\n string.push_str(&format!(\"{:?}\", local));\n\n if !regular.contains(&local) {\n assert!(drops.contains(&local));\n string.push_str(\" (drop)\");\n }\n\n string.push_str(\", \");\n }\n\n let len = if string.is_empty() {\n 0\n } else {\n string.len() - 2\n };\n\n format!(\"[{}]\", &string[..len])\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright (c) IxMilia. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.\n\nextern crate chrono;\nuse self::chrono::*;\n\nextern crate uuid;\nuse self::uuid::Uuid;\n\nuse std::io;\nuse enum_primitive::FromPrimitive;\n\nuse ::Color;\nuse ::enums::*;\nuse ::tables::Layer;\n\npub fn verify_code(expected: i32, actual: i32) -> io::Result<()> {\n if expected == actual {\n Ok(())\n }\n else {\n Err(io::Error::new(io::ErrorKind::InvalidData, format!(\"expected code {} but got {}\", expected, actual)))\n }\n}\n\npub fn as_bool(v: i16) -> bool {\n v == 1\n}\n\nfn f64_to_adjusted_duration(f: f64) -> Duration {\n let days_since_dublin = f - 2415020.0; \/\/ julian dublin offset, e.g., December 31, 1899 12:00AM\n let seconds = days_since_dublin * 24.0 * 60.0 * 60.0;\n \/\/ functions consuming this need to use 1900\/01\/01 instead of 1899\/12\/31 as a base\n \/\/ so we counter the extra day and leap second here\n Duration::seconds(seconds as i64)\n - Duration::days(1)\n + Duration::seconds(1)\n}\n\npub fn as_datetime_local(date: f64) -> DateTime<Local> {\n \/\/ dates are represented as the fractional number of days elapsed since December 31, 1899.\n if date == 0.0 {\n Local.ymd(1900, 1, 1).and_hms(0, 0, 0)\n }\n else {\n Local.ymd(1900, 1, 1).and_hms(0, 0, 0) + f64_to_adjusted_duration(date)\n }\n}\n\npub fn as_datetime_utc(date: f64) -> DateTime<UTC> {\n \/\/ dates are represented as the fractional number of days elapsed since December 31, 1899.\n if date == 0.0 {\n UTC.ymd(1900, 1, 1).and_hms(0, 0, 0)\n }\n else {\n UTC.ymd(1900, 1, 1).and_hms(0, 0, 0) + f64_to_adjusted_duration(date)\n }\n}\n\npub fn as_double_local(date: DateTime<Local>) -> f64 {\n let epoch = Local.ymd(1900, 1, 1).and_hms(0, 0, 0);\n let duration = date - epoch;\n (duration.num_seconds() as f64 \/ 24.0 \/ 60.0 \/ 60.0) + 2415021f64\n}\n\npub fn as_double_utc(date: DateTime<UTC>) -> f64 {\n let epoch = UTC.ymd(1900, 1, 1).and_hms(0, 0, 0);\n let duration = date - epoch;\n (duration.num_seconds() as f64 \/ 24.0 \/ 60.0 \/ 60.0) + 2415021f64\n}\n\npub fn duration_as_double(duration: Duration) -> f64 {\n duration.num_seconds() as f64\n}\n\npub fn as_duration(d: f64) -> Duration {\n Duration::seconds(d as i64)\n}\n\npub fn as_u32(s: String) -> io::Result<u32> {\n let mut result = 0;\n for c in s.chars() {\n match c {\n '0' => result = result * 16,\n '1' => result = result * 16 + 1,\n '2' => result = result * 16 + 2,\n '3' => result = result * 16 + 3,\n '4' => result = result * 16 + 4,\n '5' => result = result * 16 + 5,\n '6' => result = result * 16 + 6,\n '7' => result = result * 16 + 7,\n '8' => result = result * 16 + 8,\n '9' => result = result * 16 + 9,\n 'A' | 'a' => result = result * 16 + 10,\n 'B' | 'b' => result = result * 16 + 11,\n 'C' | 'c' => result = result * 16 + 12,\n 'D' | 'd' => result = result * 16 + 13,\n 'E' | 'e' => result = result * 16 + 14,\n 'F' | 'f' => result = result * 16 + 15,\n _ => return Err(io::Error::new(io::ErrorKind::InvalidData, \"invalid hex character\")),\n }\n }\n\n Ok(result)\n}\n\npub fn as_handle(h: u32) -> String {\n format!(\"{:X}\", h)\n}\n\npub fn as_uuid(s: String) -> io::Result<Uuid> {\n match Uuid::parse_str(s.as_str()) {\n Ok(uuid) => Ok(uuid),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\npub fn as_short(b: bool) -> i16 {\n if b { 1 } else { 0 }\n}\n\npub fn uuid_string(u: &Uuid) -> String {\n format!(\"{}\", u)\n}\n\npub fn default_if_empty(val: &String, default: &str) -> String {\n if val.is_empty() { String::from(default) } else { val.clone() }\n}\n\npub fn ensure_positive_or_default(val: f64, default: f64) -> f64 {\n if val <= 0.0 { default } else { val }\n}\n\npub fn ensure_positive_or_default_i32(val: i32, default: i32) -> i32 {\n if val <= 0 { default } else { val }\n}\n\npub fn ensure_positive_or_default_i16(val: i16, default: i16) -> i16 {\n if val <= 0 { default } else { val }\n}\n\npub fn get_writable_linetype_name<'a>(val: &'a str) -> &'a str {\n if val.is_empty() { \"CONTINUOUS\" } else { val }\n}\n\npub fn clipping_from_bool(b: bool) -> Option<XrefClippingBoundaryVisibility> {\n XrefClippingBoundaryVisibility::from_i16(if b { 1 } else { 0 })\n}\n\npub fn bool_from_clipping(c: XrefClippingBoundaryVisibility) -> bool {\n c != XrefClippingBoundaryVisibility::NotDisplayedNotPlotted\n}\n\npub fn version_from_string(v: String) -> io::Result<AcadVersion> {\n match &*v {\n \"MC0.0\" => Ok(AcadVersion::Version_1_0),\n \"AC1.2\" => Ok(AcadVersion::Version_1_2),\n \"AC1.40\" => Ok(AcadVersion::Version_1_40),\n \"AC1.50\" => Ok(AcadVersion::Version_2_05),\n \"AC2.10\" => Ok(AcadVersion::Version_2_10),\n \"AC2.21\" => Ok(AcadVersion::Version_2_21),\n \"AC2.22\" => Ok(AcadVersion::Version_2_22),\n \"AC1001\" => Ok(AcadVersion::Version_2_22),\n \"AC1002\" => Ok(AcadVersion::Version_2_5),\n \"AC1003\" => Ok(AcadVersion::Version_2_6),\n \"AC1004\" => Ok(AcadVersion::R9),\n \"AC1006\" => Ok(AcadVersion::R10),\n \"AC1009\" => Ok(AcadVersion::R12),\n \"AC1011\" => Ok(AcadVersion::R13),\n \"AC1012\" => Ok(AcadVersion::R13),\n \"AC1014\" => Ok(AcadVersion::R14),\n \"14\" => Ok(AcadVersion::R14),\n \"14.01\" => Ok(AcadVersion::R14),\n \"AC1015\" => Ok(AcadVersion::R2000),\n \"15.0\" => Ok(AcadVersion::R2000),\n \"15.05\" => Ok(AcadVersion::R2000),\n \"15.06\" => Ok(AcadVersion::R2000),\n \"AC1018\" => Ok(AcadVersion::R2004),\n \"16.0\" => Ok(AcadVersion::R2004),\n \"16.1\" => Ok(AcadVersion::R2004),\n \"16.2\" => Ok(AcadVersion::R2004),\n \"AC1021\" => Ok(AcadVersion::R2007),\n \"17.0\" => Ok(AcadVersion::R2007),\n \"17.1\" => Ok(AcadVersion::R2007),\n \"17.2\" => Ok(AcadVersion::R2007),\n \"AC1024\" => Ok(AcadVersion::R2010),\n \"18.0\" => Ok(AcadVersion::R2010),\n \"18.1\" => Ok(AcadVersion::R2010),\n \"18.2\" => Ok(AcadVersion::R2010),\n \"AC1027\" => Ok(AcadVersion::R2013),\n \"19.0\" => Ok(AcadVersion::R2013),\n \"19.1\" => Ok(AcadVersion::R2013),\n \"19.2\" => Ok(AcadVersion::R2013),\n \"19.3\" => Ok(AcadVersion::R2013),\n _ => Err(io::Error::new(io::ErrorKind::InvalidData, format!(\"unsupported version {}\", v))),\n }\n}\n\npub fn string_from_version(v: &AcadVersion) -> String {\n String::from(\n match v {\n &AcadVersion::Version_1_0 => \"MC0.0\",\n &AcadVersion::Version_1_2 => \"AC1.2\",\n &AcadVersion::Version_1_40 => \"AC1.40\",\n &AcadVersion::Version_2_05 => \"AC1.50\",\n &AcadVersion::Version_2_10 => \"AC2.10\",\n &AcadVersion::Version_2_21 => \"AC2.21\",\n &AcadVersion::Version_2_22 => \"AC2.22\",\n &AcadVersion::Version_2_5 => \"AC1002\",\n &AcadVersion::Version_2_6 => \"AC1003\",\n &AcadVersion::R9 => \"AC1004\",\n &AcadVersion::R10 => \"AC1006\",\n &AcadVersion::R11 => \"AC1009\",\n &AcadVersion::R12 => \"AC1009\",\n &AcadVersion::R13 => \"AC1012\",\n &AcadVersion::R14 => \"AC1014\",\n &AcadVersion::R2000 => \"AC1015\",\n &AcadVersion::R2004 => \"AC1018\",\n &AcadVersion::R2007 => \"AC1021\",\n &AcadVersion::R2010 => \"AC1024\",\n &AcadVersion::R2013 => \"AC1027\",\n })\n}\n\npub fn parse_bool(s: String) -> io::Result<bool> {\n match parse_short(s) {\n Ok(0) => Ok(false),\n Ok(_) => Ok(true),\n Err(x) => Err(io::Error::new(io::ErrorKind::InvalidData, x)),\n }\n}\n\npub fn parse_double(s: String) -> io::Result<f64> {\n match s.parse::<f64>() {\n Ok(d) => Ok(d),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\npub fn parse_int(s: String) -> io::Result<i32> {\n match s.parse::<i32>() {\n Ok(i) => Ok(i),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\npub fn parse_long(s: String) -> io::Result<i64> {\n match s.parse::<i64>() {\n Ok(l) => Ok(l),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\npub fn parse_short(s: String) -> io::Result<i16> {\n match s.parse::<i16>() {\n Ok(s) => Ok(s),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\npub fn trim_trailing_newline(s: &mut String) {\n if s.ends_with('\\n') {\n s.pop();\n if s.ends_with('\\r') {\n s.pop();\n }\n }\n}\n\npub fn read_color_value(layer: &mut Layer, color: i16) -> Color {\n layer.is_layer_on = color >= 0;\n Color::from_raw_value(color.abs())\n}\n<commit_msg>don't create documentation for internal-only helper functions<commit_after>\/\/ Copyright (c) IxMilia. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.\n\nextern crate chrono;\nuse self::chrono::*;\n\nextern crate uuid;\nuse self::uuid::Uuid;\n\nuse std::io;\nuse enum_primitive::FromPrimitive;\n\nuse ::Color;\nuse ::enums::*;\nuse ::tables::Layer;\n\n#[doc(hidden)]\npub fn verify_code(expected: i32, actual: i32) -> io::Result<()> {\n if expected == actual {\n Ok(())\n }\n else {\n Err(io::Error::new(io::ErrorKind::InvalidData, format!(\"expected code {} but got {}\", expected, actual)))\n }\n}\n\n#[doc(hidden)]\npub fn as_bool(v: i16) -> bool {\n v == 1\n}\n\nfn f64_to_adjusted_duration(f: f64) -> Duration {\n let days_since_dublin = f - 2415020.0; \/\/ julian dublin offset, e.g., December 31, 1899 12:00AM\n let seconds = days_since_dublin * 24.0 * 60.0 * 60.0;\n \/\/ functions consuming this need to use 1900\/01\/01 instead of 1899\/12\/31 as a base\n \/\/ so we counter the extra day and leap second here\n Duration::seconds(seconds as i64)\n - Duration::days(1)\n + Duration::seconds(1)\n}\n\n#[doc(hidden)]\npub fn as_datetime_local(date: f64) -> DateTime<Local> {\n \/\/ dates are represented as the fractional number of days elapsed since December 31, 1899.\n if date == 0.0 {\n Local.ymd(1900, 1, 1).and_hms(0, 0, 0)\n }\n else {\n Local.ymd(1900, 1, 1).and_hms(0, 0, 0) + f64_to_adjusted_duration(date)\n }\n}\n\n#[doc(hidden)]\npub fn as_datetime_utc(date: f64) -> DateTime<UTC> {\n \/\/ dates are represented as the fractional number of days elapsed since December 31, 1899.\n if date == 0.0 {\n UTC.ymd(1900, 1, 1).and_hms(0, 0, 0)\n }\n else {\n UTC.ymd(1900, 1, 1).and_hms(0, 0, 0) + f64_to_adjusted_duration(date)\n }\n}\n\n#[doc(hidden)]\npub fn as_double_local(date: DateTime<Local>) -> f64 {\n let epoch = Local.ymd(1900, 1, 1).and_hms(0, 0, 0);\n let duration = date - epoch;\n (duration.num_seconds() as f64 \/ 24.0 \/ 60.0 \/ 60.0) + 2415021f64\n}\n\n#[doc(hidden)]\npub fn as_double_utc(date: DateTime<UTC>) -> f64 {\n let epoch = UTC.ymd(1900, 1, 1).and_hms(0, 0, 0);\n let duration = date - epoch;\n (duration.num_seconds() as f64 \/ 24.0 \/ 60.0 \/ 60.0) + 2415021f64\n}\n\n#[doc(hidden)]\npub fn duration_as_double(duration: Duration) -> f64 {\n duration.num_seconds() as f64\n}\n\n#[doc(hidden)]\npub fn as_duration(d: f64) -> Duration {\n Duration::seconds(d as i64)\n}\n\n#[doc(hidden)]\npub fn as_u32(s: String) -> io::Result<u32> {\n let mut result = 0;\n for c in s.chars() {\n match c {\n '0' => result = result * 16,\n '1' => result = result * 16 + 1,\n '2' => result = result * 16 + 2,\n '3' => result = result * 16 + 3,\n '4' => result = result * 16 + 4,\n '5' => result = result * 16 + 5,\n '6' => result = result * 16 + 6,\n '7' => result = result * 16 + 7,\n '8' => result = result * 16 + 8,\n '9' => result = result * 16 + 9,\n 'A' | 'a' => result = result * 16 + 10,\n 'B' | 'b' => result = result * 16 + 11,\n 'C' | 'c' => result = result * 16 + 12,\n 'D' | 'd' => result = result * 16 + 13,\n 'E' | 'e' => result = result * 16 + 14,\n 'F' | 'f' => result = result * 16 + 15,\n _ => return Err(io::Error::new(io::ErrorKind::InvalidData, \"invalid hex character\")),\n }\n }\n\n Ok(result)\n}\n\n#[doc(hidden)]\npub fn as_handle(h: u32) -> String {\n format!(\"{:X}\", h)\n}\n\n#[doc(hidden)]\npub fn as_uuid(s: String) -> io::Result<Uuid> {\n match Uuid::parse_str(s.as_str()) {\n Ok(uuid) => Ok(uuid),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\n#[doc(hidden)]\npub fn as_short(b: bool) -> i16 {\n if b { 1 } else { 0 }\n}\n\n#[doc(hidden)]\npub fn uuid_string(u: &Uuid) -> String {\n format!(\"{}\", u)\n}\n\n#[doc(hidden)]\npub fn default_if_empty(val: &String, default: &str) -> String {\n if val.is_empty() { String::from(default) } else { val.clone() }\n}\n\n#[doc(hidden)]\npub fn ensure_positive_or_default(val: f64, default: f64) -> f64 {\n if val <= 0.0 { default } else { val }\n}\n\n#[doc(hidden)]\npub fn ensure_positive_or_default_i32(val: i32, default: i32) -> i32 {\n if val <= 0 { default } else { val }\n}\n\n#[doc(hidden)]\npub fn ensure_positive_or_default_i16(val: i16, default: i16) -> i16 {\n if val <= 0 { default } else { val }\n}\n\n#[doc(hidden)]\npub fn get_writable_linetype_name<'a>(val: &'a str) -> &'a str {\n if val.is_empty() { \"CONTINUOUS\" } else { val }\n}\n\n#[doc(hidden)]\npub fn clipping_from_bool(b: bool) -> Option<XrefClippingBoundaryVisibility> {\n XrefClippingBoundaryVisibility::from_i16(if b { 1 } else { 0 })\n}\n\n#[doc(hidden)]\npub fn bool_from_clipping(c: XrefClippingBoundaryVisibility) -> bool {\n c != XrefClippingBoundaryVisibility::NotDisplayedNotPlotted\n}\n\n#[doc(hidden)]\npub fn version_from_string(v: String) -> io::Result<AcadVersion> {\n match &*v {\n \"MC0.0\" => Ok(AcadVersion::Version_1_0),\n \"AC1.2\" => Ok(AcadVersion::Version_1_2),\n \"AC1.40\" => Ok(AcadVersion::Version_1_40),\n \"AC1.50\" => Ok(AcadVersion::Version_2_05),\n \"AC2.10\" => Ok(AcadVersion::Version_2_10),\n \"AC2.21\" => Ok(AcadVersion::Version_2_21),\n \"AC2.22\" => Ok(AcadVersion::Version_2_22),\n \"AC1001\" => Ok(AcadVersion::Version_2_22),\n \"AC1002\" => Ok(AcadVersion::Version_2_5),\n \"AC1003\" => Ok(AcadVersion::Version_2_6),\n \"AC1004\" => Ok(AcadVersion::R9),\n \"AC1006\" => Ok(AcadVersion::R10),\n \"AC1009\" => Ok(AcadVersion::R12),\n \"AC1011\" => Ok(AcadVersion::R13),\n \"AC1012\" => Ok(AcadVersion::R13),\n \"AC1014\" => Ok(AcadVersion::R14),\n \"14\" => Ok(AcadVersion::R14),\n \"14.01\" => Ok(AcadVersion::R14),\n \"AC1015\" => Ok(AcadVersion::R2000),\n \"15.0\" => Ok(AcadVersion::R2000),\n \"15.05\" => Ok(AcadVersion::R2000),\n \"15.06\" => Ok(AcadVersion::R2000),\n \"AC1018\" => Ok(AcadVersion::R2004),\n \"16.0\" => Ok(AcadVersion::R2004),\n \"16.1\" => Ok(AcadVersion::R2004),\n \"16.2\" => Ok(AcadVersion::R2004),\n \"AC1021\" => Ok(AcadVersion::R2007),\n \"17.0\" => Ok(AcadVersion::R2007),\n \"17.1\" => Ok(AcadVersion::R2007),\n \"17.2\" => Ok(AcadVersion::R2007),\n \"AC1024\" => Ok(AcadVersion::R2010),\n \"18.0\" => Ok(AcadVersion::R2010),\n \"18.1\" => Ok(AcadVersion::R2010),\n \"18.2\" => Ok(AcadVersion::R2010),\n \"AC1027\" => Ok(AcadVersion::R2013),\n \"19.0\" => Ok(AcadVersion::R2013),\n \"19.1\" => Ok(AcadVersion::R2013),\n \"19.2\" => Ok(AcadVersion::R2013),\n \"19.3\" => Ok(AcadVersion::R2013),\n _ => Err(io::Error::new(io::ErrorKind::InvalidData, format!(\"unsupported version {}\", v))),\n }\n}\n\n#[doc(hidden)]\npub fn string_from_version(v: &AcadVersion) -> String {\n String::from(\n match v {\n &AcadVersion::Version_1_0 => \"MC0.0\",\n &AcadVersion::Version_1_2 => \"AC1.2\",\n &AcadVersion::Version_1_40 => \"AC1.40\",\n &AcadVersion::Version_2_05 => \"AC1.50\",\n &AcadVersion::Version_2_10 => \"AC2.10\",\n &AcadVersion::Version_2_21 => \"AC2.21\",\n &AcadVersion::Version_2_22 => \"AC2.22\",\n &AcadVersion::Version_2_5 => \"AC1002\",\n &AcadVersion::Version_2_6 => \"AC1003\",\n &AcadVersion::R9 => \"AC1004\",\n &AcadVersion::R10 => \"AC1006\",\n &AcadVersion::R11 => \"AC1009\",\n &AcadVersion::R12 => \"AC1009\",\n &AcadVersion::R13 => \"AC1012\",\n &AcadVersion::R14 => \"AC1014\",\n &AcadVersion::R2000 => \"AC1015\",\n &AcadVersion::R2004 => \"AC1018\",\n &AcadVersion::R2007 => \"AC1021\",\n &AcadVersion::R2010 => \"AC1024\",\n &AcadVersion::R2013 => \"AC1027\",\n })\n}\n\n#[doc(hidden)]\npub fn parse_bool(s: String) -> io::Result<bool> {\n match parse_short(s) {\n Ok(0) => Ok(false),\n Ok(_) => Ok(true),\n Err(x) => Err(io::Error::new(io::ErrorKind::InvalidData, x)),\n }\n}\n\n#[doc(hidden)]\npub fn parse_double(s: String) -> io::Result<f64> {\n match s.parse::<f64>() {\n Ok(d) => Ok(d),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\n#[doc(hidden)]\npub fn parse_int(s: String) -> io::Result<i32> {\n match s.parse::<i32>() {\n Ok(i) => Ok(i),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\n#[doc(hidden)]\npub fn parse_long(s: String) -> io::Result<i64> {\n match s.parse::<i64>() {\n Ok(l) => Ok(l),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\n#[doc(hidden)]\npub fn parse_short(s: String) -> io::Result<i16> {\n match s.parse::<i16>() {\n Ok(s) => Ok(s),\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),\n }\n}\n\n#[doc(hidden)]\npub fn trim_trailing_newline(s: &mut String) {\n if s.ends_with('\\n') {\n s.pop();\n if s.ends_with('\\r') {\n s.pop();\n }\n }\n}\n\n#[doc(hidden)]\npub fn read_color_value(layer: &mut Layer, color: i16) -> Color {\n layer.is_layer_on = color >= 0;\n Color::from_raw_value(color.abs())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement Display trait for Vector<T> (#33)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test the `UndefMask` type<commit_after>\/\/ ignore-cross-compile\n\/\/ ignore-stage1\n\n#![feature(rustc_private)]\n\nextern crate rustc;\n\nuse rustc::mir::interpret::UndefMask;\nuse rustc::ty::layout::Size;\n\nfn main() {\n let mut mask = UndefMask::new(Size::from_bytes(500));\n assert!(!mask.get(Size::from_bytes(499)));\n mask.set(Size::from_bytes(499), true);\n assert!(mask.get(Size::from_bytes(499)));\n mask.set_range_inbounds(Size::from_bytes(100), Size::from_bytes(256), true);\n for i in 0..100 {\n assert!(!mask.get(Size::from_bytes(i)));\n }\n for i in 100..256 {\n assert!(mask.get(Size::from_bytes(i)));\n }\n for i in 256..499 {\n assert!(!mask.get(Size::from_bytes(i)));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>byteread: Initial trait<commit_after>pub trait ByteRead {\n fn get_byte(&mut self) -> u8;\n\n fn get_u16l(&mut self) -> u16;\n fn get_u16b(&mut self) -> u16;\n\n fn get_u32l(&mut self) -> u32;\n fn get_u32b(&mut self) -> u32;\n\n fn get_u64l(&mut self) -> u64;\n fn get_u64b(&mut self) -> u64;\n\n fn get_f32l(&mut self) -> f32;\n fn get_f32b(&mut self) -> f32;\n\n fn get_f64l(&mut self) -> f64;\n fn get_f64b(&mut self) -> f64;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Enhance arg parsing to support reading multiple single char flags with one dash<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added filter tests<commit_after><|endoftext|>"} {"text":"<commit_before>use std::ffi::CStr;\nuse version::Version;\nuse version::Api;\nuse gl;\n\nmacro_rules! extensions {\n ($($string:expr => $field:ident,)+) => {\n \/\/\/ Contains data about the list of extensions.\n #[derive(Debug, Clone, Copy)]\n pub struct ExtensionsList {\n $(\n pub $field: bool,\n )+\n }\n\n \/\/\/ Returns the list of extensions supported by the backend.\n \/\/\/\n \/\/\/ The version must match the one of the backend.\n \/\/\/\n \/\/\/ *Safety*: the OpenGL context corresponding to `gl` must be current in the thread.\n \/\/\/\n \/\/\/ ## Panic\n \/\/\/\n \/\/\/ Can panic if the version number doesn't match the backend, leading to unloaded functions\n \/\/\/ being called.\n \/\/\/\n pub unsafe fn get_extensions(gl: &gl::Gl, version: &Version) -> ExtensionsList {\n let strings = get_extensions_strings(gl, version);\n\n let mut extensions = ExtensionsList {\n $(\n $field: false,\n )+\n };\n\n for extension in strings.into_iter() {\n match &extension[..] {\n $(\n $string => extensions.$field = true,\n )+\n _ => ()\n }\n }\n\n extensions\n }\n }\n}\n\nextensions! {\n \"GL_AMD_depth_clamp_separate\" => gl_amd_depth_clamp_separate,\n \"GL_AMD_query_buffer_object\" => gl_amd_query_buffer_object,\n \"GL_APPLE_sync\" => gl_apple_sync,\n \"GL_APPLE_vertex_array_object\" => gl_apple_vertex_array_object,\n \"GL_ARB_bindless_texture\" => gl_arb_bindless_texture,\n \"GL_ARB_buffer_storage\" => gl_arb_buffer_storage,\n \"GL_ARB_compute_shader\" => gl_arb_compute_shader,\n \"GL_ARB_copy_buffer\" => gl_arb_copy_buffer,\n \"GL_ARB_debug_output\" => gl_arb_debug_output,\n \"GL_ARB_depth_clamp\" => gl_arb_depth_clamp,\n \"GL_ARB_depth_texture\" => gl_arb_depth_texture,\n \"GL_ARB_direct_state_access\" => gl_arb_direct_state_access,\n \"GL_ARB_draw_buffers\" => gl_arb_draw_buffers,\n \"GL_ARB_draw_elements_base_vertex\" => gl_arb_draw_elements_base_vertex,\n \"GL_ARB_compatibility\" => gl_arb_compatibility,\n \"GL_ARB_ES2_compatibility\" => gl_arb_es2_compatibility,\n \"GL_ARB_ES3_compatibility\" => gl_arb_es3_compatibility,\n \"GL_ARB_ES3_1_compatibility\" => gl_arb_es3_1_compatibility,\n \"GL_ARB_ES3_2_compatibility\" => gl_arb_es3_2_compatibility,\n \"GL_ARB_fragment_shader\" => gl_arb_fragment_shader,\n \"GL_ARB_framebuffer_object\" => gl_arb_framebuffer_object,\n \"GL_ARB_framebuffer_sRGB\" => gl_arb_framebuffer_srgb,\n \"GL_ARB_geometry_shader4\" => gl_arb_geometry_shader4,\n \"GL_ARB_get_program_binary\" => gl_arb_get_programy_binary,\n \"GL_ARB_instanced_arrays\" => gl_arb_instanced_arrays,\n \"GL_ARB_invalidate_subdata\" => gl_arb_invalidate_subdata,\n \"GL_ARB_occlusion_query\" => gl_arb_occlusion_query,\n \"GL_ARB_occlusion_query2\" => gl_arb_occlusion_query2,\n \"GL_ARB_pixel_buffer_object\" => gl_arb_pixel_buffer_object,\n \"GL_ARB_program_interface_query\" => gl_arb_program_interface_query,\n \"GL_ARB_query_buffer_object\" => gl_arb_query_buffer_object,\n \"GL_ARB_map_buffer_range\" => gl_arb_map_buffer_range,\n \"GL_ARB_multi_draw_indirect\" => gl_arb_multi_draw_indirect,\n \"GL_ARB_provoking_vertex\" => gl_arb_provoking_vertex,\n \"GL_ARB_robustness\" => gl_arb_robustness,\n \"GL_ARB_robust_buffer_access_behavior\" => gl_arb_robust_buffer_access_behavior,\n \"GL_ARB_sampler_objects\" => gl_arb_sampler_objects,\n \"GL_ARB_shader_image_load_store\" => gl_arb_shader_image_load_store,\n \"GL_ARB_shader_objects\" => gl_arb_shader_objects,\n \"GL_ARB_shader_storage_buffer_object\" => gl_arb_shader_storage_buffer_object,\n \"GL_ARB_sync\" => gl_arb_sync,\n \"GL_ARB_tessellation_shader\" => gl_arb_tessellation_shader,\n \"GL_ARB_texture_buffer_object\" => gl_arb_texture_buffer_object,\n \"GL_ARB_texture_buffer_object_rgb32\" => gl_arb_texture_buffer_object_rgb32,\n \"GL_ARB_texture_compression_bptc\" => gl_arb_texture_compression_bptc,\n \"GL_ARB_texture_float\" => gl_arb_texture_float,\n \"GL_ARB_texture_multisample\" => gl_arb_texture_multisample,\n \"GL_ARB_texture_non_power_of_two\" => gl_arb_texture_non_power_of_two,\n \"GL_ARB_texture_rg\" => gl_arb_texture_rg,\n \"GL_ARB_texture_rgb10_a2ui\" => gl_arb_texture_rgb10_a2ui,\n \"GL_ARB_texture_stencil8\" => gl_arb_texture_stencil8,\n \"GL_ARB_texture_storage\" => gl_arb_texture_storage,\n \"GL_ARB_timer_query\" => gl_arb_timer_query,\n \"GL_ARB_transform_feedback3\" => gl_arb_transform_feedback3,\n \"GL_ARB_uniform_buffer_object\" => gl_arb_uniform_buffer_object,\n \"GL_ARB_vertex_array_object\" => gl_arb_vertex_array_object,\n \"GL_ARB_vertex_buffer_object\" => gl_arb_vertex_buffer_object,\n \"GL_ARB_vertex_half_float\" => gl_arb_vertex_half_float,\n \"GL_ARB_vertex_shader\" => gl_arb_vertex_shader,\n \"GL_ARB_vertex_type_10f_11f_11f_rev\" => gl_arb_vertex_type_10f_11f_11f_rev,\n \"GL_ARB_vertex_type_2_10_10_10_rev\" => gl_arb_vertex_type_2_10_10_10_rev,\n \"GL_ARM_rgba8\" => gl_arm_rgba8,\n \"GL_ATI_meminfo\" => gl_ati_meminfo,\n \"GL_ATI_draw_buffers\" => gl_ati_draw_buffers,\n \"GL_ATI_texture_float\" => gl_ati_texture_float,\n \"GL_EXT_buffer_storage\" => gl_ext_buffer_storage,\n \"GL_EXT_debug_marker\" => gl_ext_debug_marker,\n \"GL_EXT_direct_state_access\" => gl_ext_direct_state_access,\n \"GL_EXT_disjoint_timer_query\" => gl_ext_disjoint_timer_query,\n \"GL_EXT_framebuffer_blit\" => gl_ext_framebuffer_blit,\n \"GL_EXT_framebuffer_object\" => gl_ext_framebuffer_object,\n \"GL_EXT_framebuffer_sRGB\" => gl_ext_framebuffer_srgb,\n \"GL_EXT_geometry_shader\" => gl_ext_geometry_shader,\n \"GL_EXT_geometry_shader4\" => gl_ext_geometry_shader4,\n \"GL_EXT_gpu_shader4\" => gl_ext_gpu_shader4,\n \"GL_EXT_multi_draw_indirect\" => gl_ext_multi_draw_indirect,\n \"GL_EXT_occlusion_query_boolean\" => gl_ext_occlusion_query_boolean,\n \"GL_EXT_packed_depth_stencil\" => gl_ext_packed_depth_stencil,\n \"GL_EXT_packed_float\" => gl_ext_packed_float,\n \"GL_EXT_primitive_bounding_box\" => gl_ext_primitive_bounding_box,\n \"GL_EXT_provoking_vertex\" => gl_ext_provoking_vertex,\n \"GL_EXT_robustness\" => gl_ext_robustness,\n \"GL_EXT_sRGB_write_control\" => gl_ext_srgb_write_control,\n \"GL_EXT_texture_array\" => gl_ext_texture_array,\n \"GL_EXT_texture_buffer\" => gl_ext_texture_buffer,\n \"GL_EXT_texture_buffer_object\" => gl_ext_texture_buffer_object,\n \"GL_EXT_texture_compression_s3tc\" => gl_ext_texture_compression_s3tc,\n \"GL_EXT_texture_filter_anisotropic\" => gl_ext_texture_filter_anisotropic,\n \"GL_EXT_texture_integer\" => gl_ext_texture_integer,\n \"GL_EXT_texture_shared_exponent\" => gl_ext_texture_shared_exponent,\n \"GL_EXT_texture_snorm\" => gl_ext_texture_snorm,\n \"GL_EXT_texture_sRGB\" => gl_ext_texture_srgb,\n \"GL_EXT_transform_feedback\" => gl_ext_transform_feedback,\n \"GL_GREMEDY_string_marker\" => gl_gremedy_string_marker,\n \"GL_KHR_debug\" => gl_khr_debug,\n \"GL_KHR_context_flush_control\" => gl_khr_context_flush_control,\n \"GL_KHR_robustness\" => gl_khr_robustness,\n \"GL_KHR_robust_buffer_access_behavior\" => gl_khr_robust_buffer_access_behavior,\n \"GL_NV_conditional_render\" => gl_nv_conditional_render,\n \"GL_NV_copy_buffer\" => gl_nv_copy_buffer,\n \"GL_NV_depth_clamp\" => gl_nv_depth_clamp,\n \"GL_NV_half_float\" => gl_nv_half_float,\n \"GL_NV_pixel_buffer_object\" => gl_nv_pixel_buffer_object,\n \"GL_nV_vertex_attrib_integer_64bit\" => gl_nv_vertex_attrib_integer_64bit,\n \"GL_NVX_gpu_memory_info\" => gl_nvx_gpu_memory_info,\n \"GL_OES_depth_texture\" => gl_oes_depth_texture,\n \"GL_OES_draw_elements_base_vertex\" => gl_oes_draw_elements_base_vertex,\n \"GL_OES_fixed_point\" => gl_oes_fixed_point,\n \"GL_OES_geometry_shader\" => gl_oes_geometry_shader,\n \"GL_OES_packed_depth_stencil\" => gl_oes_packed_depth_stencil,\n \"GL_OES_primitive_bounding_box\" => gl_oes_primitive_bounding_box,\n \"GL_OES_rgb8_rgba8\" => gl_oes_rgb8_rgba8,\n \"GL_OES_stencil1\" => gl_oes_stencil1,\n \"GL_OES_stencil4\" => gl_oes_stencil4,\n \"GL_OES_tessellation_shader\" => gl_oes_tessellation_shader,\n \"GL_OES_texture_buffer\" => gl_oes_texture_buffer,\n \"GL_OES_texture_stencil8\" => gl_oes_texture_stencil8,\n \"GL_OES_vertex_array_object\" => gl_oes_vertex_array_object,\n \"GL_OES_vertex_half_float\" => gl_oes_vertex_half_float,\n \"GL_OES_vertex_type_10_10_10_2\" => gl_oes_vertex_type_10_10_10_2,\n}\n\n\/\/\/ Returns the list of all extension names supported by the OpenGL implementation.\n\/\/\/\n\/\/\/ The version must match the one of the backend.\n\/\/\/\n\/\/\/ *Safety*: the OpenGL context corresponding to `gl` must be current in the thread.\n\/\/\/\n\/\/\/ ## Panic\n\/\/\/\n\/\/\/ Can panic if the version number doesn't match the backend, leading to unloaded functions\n\/\/\/ being called.\n\/\/\/\nunsafe fn get_extensions_strings(gl: &gl::Gl, version: &Version) -> Vec<String> {\n if version >= &Version(Api::Gl, 3, 0) || version >= &Version(Api::GlEs, 3, 0) {\n let mut num_extensions = 0;\n gl.GetIntegerv(gl::NUM_EXTENSIONS, &mut num_extensions);\n\n (0 .. num_extensions).map(|num| {\n let ext = gl.GetStringi(gl::EXTENSIONS, num as gl::types::GLuint);\n String::from_utf8(CStr::from_ptr(ext as *const i8).to_bytes().to_vec()).unwrap()\n }).collect()\n\n } else {\n let list = gl.GetString(gl::EXTENSIONS);\n assert!(!list.is_null());\n let list = String::from_utf8(CStr::from_ptr(list as *const i8).to_bytes().to_vec())\n .unwrap();\n list.split(' ').map(|e| e.to_string()).collect()\n }\n}\n<commit_msg>Fix typo in extension name<commit_after>use std::ffi::CStr;\nuse version::Version;\nuse version::Api;\nuse gl;\n\nmacro_rules! extensions {\n ($($string:expr => $field:ident,)+) => {\n \/\/\/ Contains data about the list of extensions.\n #[derive(Debug, Clone, Copy)]\n pub struct ExtensionsList {\n $(\n pub $field: bool,\n )+\n }\n\n \/\/\/ Returns the list of extensions supported by the backend.\n \/\/\/\n \/\/\/ The version must match the one of the backend.\n \/\/\/\n \/\/\/ *Safety*: the OpenGL context corresponding to `gl` must be current in the thread.\n \/\/\/\n \/\/\/ ## Panic\n \/\/\/\n \/\/\/ Can panic if the version number doesn't match the backend, leading to unloaded functions\n \/\/\/ being called.\n \/\/\/\n pub unsafe fn get_extensions(gl: &gl::Gl, version: &Version) -> ExtensionsList {\n let strings = get_extensions_strings(gl, version);\n\n let mut extensions = ExtensionsList {\n $(\n $field: false,\n )+\n };\n\n for extension in strings.into_iter() {\n match &extension[..] {\n $(\n $string => extensions.$field = true,\n )+\n _ => ()\n }\n }\n\n extensions\n }\n }\n}\n\nextensions! {\n \"GL_AMD_depth_clamp_separate\" => gl_amd_depth_clamp_separate,\n \"GL_AMD_query_buffer_object\" => gl_amd_query_buffer_object,\n \"GL_APPLE_sync\" => gl_apple_sync,\n \"GL_APPLE_vertex_array_object\" => gl_apple_vertex_array_object,\n \"GL_ARB_bindless_texture\" => gl_arb_bindless_texture,\n \"GL_ARB_buffer_storage\" => gl_arb_buffer_storage,\n \"GL_ARB_compute_shader\" => gl_arb_compute_shader,\n \"GL_ARB_copy_buffer\" => gl_arb_copy_buffer,\n \"GL_ARB_debug_output\" => gl_arb_debug_output,\n \"GL_ARB_depth_clamp\" => gl_arb_depth_clamp,\n \"GL_ARB_depth_texture\" => gl_arb_depth_texture,\n \"GL_ARB_direct_state_access\" => gl_arb_direct_state_access,\n \"GL_ARB_draw_buffers\" => gl_arb_draw_buffers,\n \"GL_ARB_draw_elements_base_vertex\" => gl_arb_draw_elements_base_vertex,\n \"GL_ARB_compatibility\" => gl_arb_compatibility,\n \"GL_ARB_ES2_compatibility\" => gl_arb_es2_compatibility,\n \"GL_ARB_ES3_compatibility\" => gl_arb_es3_compatibility,\n \"GL_ARB_ES3_1_compatibility\" => gl_arb_es3_1_compatibility,\n \"GL_ARB_ES3_2_compatibility\" => gl_arb_es3_2_compatibility,\n \"GL_ARB_fragment_shader\" => gl_arb_fragment_shader,\n \"GL_ARB_framebuffer_object\" => gl_arb_framebuffer_object,\n \"GL_ARB_framebuffer_sRGB\" => gl_arb_framebuffer_srgb,\n \"GL_ARB_geometry_shader4\" => gl_arb_geometry_shader4,\n \"GL_ARB_get_program_binary\" => gl_arb_get_programy_binary,\n \"GL_ARB_instanced_arrays\" => gl_arb_instanced_arrays,\n \"GL_ARB_invalidate_subdata\" => gl_arb_invalidate_subdata,\n \"GL_ARB_occlusion_query\" => gl_arb_occlusion_query,\n \"GL_ARB_occlusion_query2\" => gl_arb_occlusion_query2,\n \"GL_ARB_pixel_buffer_object\" => gl_arb_pixel_buffer_object,\n \"GL_ARB_program_interface_query\" => gl_arb_program_interface_query,\n \"GL_ARB_query_buffer_object\" => gl_arb_query_buffer_object,\n \"GL_ARB_map_buffer_range\" => gl_arb_map_buffer_range,\n \"GL_ARB_multi_draw_indirect\" => gl_arb_multi_draw_indirect,\n \"GL_ARB_provoking_vertex\" => gl_arb_provoking_vertex,\n \"GL_ARB_robustness\" => gl_arb_robustness,\n \"GL_ARB_robust_buffer_access_behavior\" => gl_arb_robust_buffer_access_behavior,\n \"GL_ARB_sampler_objects\" => gl_arb_sampler_objects,\n \"GL_ARB_shader_image_load_store\" => gl_arb_shader_image_load_store,\n \"GL_ARB_shader_objects\" => gl_arb_shader_objects,\n \"GL_ARB_shader_storage_buffer_object\" => gl_arb_shader_storage_buffer_object,\n \"GL_ARB_sync\" => gl_arb_sync,\n \"GL_ARB_tessellation_shader\" => gl_arb_tessellation_shader,\n \"GL_ARB_texture_buffer_object\" => gl_arb_texture_buffer_object,\n \"GL_ARB_texture_buffer_object_rgb32\" => gl_arb_texture_buffer_object_rgb32,\n \"GL_ARB_texture_compression_bptc\" => gl_arb_texture_compression_bptc,\n \"GL_ARB_texture_float\" => gl_arb_texture_float,\n \"GL_ARB_texture_multisample\" => gl_arb_texture_multisample,\n \"GL_ARB_texture_non_power_of_two\" => gl_arb_texture_non_power_of_two,\n \"GL_ARB_texture_rg\" => gl_arb_texture_rg,\n \"GL_ARB_texture_rgb10_a2ui\" => gl_arb_texture_rgb10_a2ui,\n \"GL_ARB_texture_stencil8\" => gl_arb_texture_stencil8,\n \"GL_ARB_texture_storage\" => gl_arb_texture_storage,\n \"GL_ARB_timer_query\" => gl_arb_timer_query,\n \"GL_ARB_transform_feedback3\" => gl_arb_transform_feedback3,\n \"GL_ARB_uniform_buffer_object\" => gl_arb_uniform_buffer_object,\n \"GL_ARB_vertex_array_object\" => gl_arb_vertex_array_object,\n \"GL_ARB_vertex_buffer_object\" => gl_arb_vertex_buffer_object,\n \"GL_ARB_vertex_half_float\" => gl_arb_vertex_half_float,\n \"GL_ARB_vertex_shader\" => gl_arb_vertex_shader,\n \"GL_ARB_vertex_type_10f_11f_11f_rev\" => gl_arb_vertex_type_10f_11f_11f_rev,\n \"GL_ARB_vertex_type_2_10_10_10_rev\" => gl_arb_vertex_type_2_10_10_10_rev,\n \"GL_ARM_rgba8\" => gl_arm_rgba8,\n \"GL_ATI_meminfo\" => gl_ati_meminfo,\n \"GL_ATI_draw_buffers\" => gl_ati_draw_buffers,\n \"GL_ATI_texture_float\" => gl_ati_texture_float,\n \"GL_EXT_buffer_storage\" => gl_ext_buffer_storage,\n \"GL_EXT_debug_marker\" => gl_ext_debug_marker,\n \"GL_EXT_direct_state_access\" => gl_ext_direct_state_access,\n \"GL_EXT_disjoint_timer_query\" => gl_ext_disjoint_timer_query,\n \"GL_EXT_framebuffer_blit\" => gl_ext_framebuffer_blit,\n \"GL_EXT_framebuffer_object\" => gl_ext_framebuffer_object,\n \"GL_EXT_framebuffer_sRGB\" => gl_ext_framebuffer_srgb,\n \"GL_EXT_geometry_shader\" => gl_ext_geometry_shader,\n \"GL_EXT_geometry_shader4\" => gl_ext_geometry_shader4,\n \"GL_EXT_gpu_shader4\" => gl_ext_gpu_shader4,\n \"GL_EXT_multi_draw_indirect\" => gl_ext_multi_draw_indirect,\n \"GL_EXT_occlusion_query_boolean\" => gl_ext_occlusion_query_boolean,\n \"GL_EXT_packed_depth_stencil\" => gl_ext_packed_depth_stencil,\n \"GL_EXT_packed_float\" => gl_ext_packed_float,\n \"GL_EXT_primitive_bounding_box\" => gl_ext_primitive_bounding_box,\n \"GL_EXT_provoking_vertex\" => gl_ext_provoking_vertex,\n \"GL_EXT_robustness\" => gl_ext_robustness,\n \"GL_EXT_sRGB_write_control\" => gl_ext_srgb_write_control,\n \"GL_EXT_texture_array\" => gl_ext_texture_array,\n \"GL_EXT_texture_buffer\" => gl_ext_texture_buffer,\n \"GL_EXT_texture_buffer_object\" => gl_ext_texture_buffer_object,\n \"GL_EXT_texture_compression_s3tc\" => gl_ext_texture_compression_s3tc,\n \"GL_EXT_texture_filter_anisotropic\" => gl_ext_texture_filter_anisotropic,\n \"GL_EXT_texture_integer\" => gl_ext_texture_integer,\n \"GL_EXT_texture_shared_exponent\" => gl_ext_texture_shared_exponent,\n \"GL_EXT_texture_snorm\" => gl_ext_texture_snorm,\n \"GL_EXT_texture_sRGB\" => gl_ext_texture_srgb,\n \"GL_EXT_transform_feedback\" => gl_ext_transform_feedback,\n \"GL_GREMEDY_string_marker\" => gl_gremedy_string_marker,\n \"GL_KHR_debug\" => gl_khr_debug,\n \"GL_KHR_context_flush_control\" => gl_khr_context_flush_control,\n \"GL_KHR_robustness\" => gl_khr_robustness,\n \"GL_KHR_robust_buffer_access_behavior\" => gl_khr_robust_buffer_access_behavior,\n \"GL_NV_conditional_render\" => gl_nv_conditional_render,\n \"GL_NV_copy_buffer\" => gl_nv_copy_buffer,\n \"GL_NV_depth_clamp\" => gl_nv_depth_clamp,\n \"GL_NV_half_float\" => gl_nv_half_float,\n \"GL_NV_pixel_buffer_object\" => gl_nv_pixel_buffer_object,\n \"GL_NV_vertex_attrib_integer_64bit\" => gl_nv_vertex_attrib_integer_64bit,\n \"GL_NVX_gpu_memory_info\" => gl_nvx_gpu_memory_info,\n \"GL_OES_depth_texture\" => gl_oes_depth_texture,\n \"GL_OES_draw_elements_base_vertex\" => gl_oes_draw_elements_base_vertex,\n \"GL_OES_fixed_point\" => gl_oes_fixed_point,\n \"GL_OES_geometry_shader\" => gl_oes_geometry_shader,\n \"GL_OES_packed_depth_stencil\" => gl_oes_packed_depth_stencil,\n \"GL_OES_primitive_bounding_box\" => gl_oes_primitive_bounding_box,\n \"GL_OES_rgb8_rgba8\" => gl_oes_rgb8_rgba8,\n \"GL_OES_stencil1\" => gl_oes_stencil1,\n \"GL_OES_stencil4\" => gl_oes_stencil4,\n \"GL_OES_tessellation_shader\" => gl_oes_tessellation_shader,\n \"GL_OES_texture_buffer\" => gl_oes_texture_buffer,\n \"GL_OES_texture_stencil8\" => gl_oes_texture_stencil8,\n \"GL_OES_vertex_array_object\" => gl_oes_vertex_array_object,\n \"GL_OES_vertex_half_float\" => gl_oes_vertex_half_float,\n \"GL_OES_vertex_type_10_10_10_2\" => gl_oes_vertex_type_10_10_10_2,\n}\n\n\/\/\/ Returns the list of all extension names supported by the OpenGL implementation.\n\/\/\/\n\/\/\/ The version must match the one of the backend.\n\/\/\/\n\/\/\/ *Safety*: the OpenGL context corresponding to `gl` must be current in the thread.\n\/\/\/\n\/\/\/ ## Panic\n\/\/\/\n\/\/\/ Can panic if the version number doesn't match the backend, leading to unloaded functions\n\/\/\/ being called.\n\/\/\/\nunsafe fn get_extensions_strings(gl: &gl::Gl, version: &Version) -> Vec<String> {\n if version >= &Version(Api::Gl, 3, 0) || version >= &Version(Api::GlEs, 3, 0) {\n let mut num_extensions = 0;\n gl.GetIntegerv(gl::NUM_EXTENSIONS, &mut num_extensions);\n\n (0 .. num_extensions).map(|num| {\n let ext = gl.GetStringi(gl::EXTENSIONS, num as gl::types::GLuint);\n String::from_utf8(CStr::from_ptr(ext as *const i8).to_bytes().to_vec()).unwrap()\n }).collect()\n\n } else {\n let list = gl.GetString(gl::EXTENSIONS);\n assert!(!list.is_null());\n let list = String::from_utf8(CStr::from_ptr(list as *const i8).to_bytes().to_vec())\n .unwrap();\n list.split(' ').map(|e| e.to_string()).collect()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added gcd.rs<commit_after>fn main() {\n\t\/\/ read inuts from command line\n\tlet mut a = std::env::args().nth(1).unwrap().parse::<u64>().unwrap();\n\tlet mut b = std::env::args().nth(2).unwrap().parse::<u64>().unwrap();\n\tlet mut r = a%b;\n\tlet mut temp = a;\n\tloop {\n\t\tmatch r {\n\t\t\t0 => {println!(\"GCD is {}\",b );break;},\n\t\t\t_ => {temp=a;a=b;b=temp%b;r=a%b;},\n\t\t}\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>type_alias_enum_variants: add regression test for #61801.<commit_after>\/\/ In this regression test we check that a path pattern referring to a unit variant\n\/\/ through a type alias is successful in inferring the generic argument.\n\n\/\/ compile-pass\n\n#![feature(type_alias_enum_variants)]\n\nenum Opt<T> {\n N,\n S(T),\n}\n\ntype OptAlias<T> = Opt<T>;\n\nfn f1(x: OptAlias<u8>) {\n match x {\n OptAlias::N \/\/ We previously failed to infer `T` to `u8`.\n => (),\n _ => (),\n }\n\n match x {\n <\n OptAlias<_> \/\/ And we failed to infer this type also.\n >::N => (),\n _ => (),\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Simplify<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rust: Even or Odd (8 kyu)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>use hog method for object grouping<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Automatically derive the combined image sampler uniforms from reflection data<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add concept of RefFlags<commit_after>use std::collections::BTreeMap;\n\nuse toml::Value;\n\nuse error::RefErrorKind as REK;\nuse result::Result;\n\n#[derive(Default)]\npub struct RefFlags {\n content_hashing: bool,\n permission_tracking: bool,\n}\n\nimpl RefFlags {\n\n \/\/\/ Read the RefFlags from a TOML document\n \/\/\/\n \/\/\/ Assumes that the whole TOML tree is passed. So this looks up `ref.flags` to get the flags.\n \/\/\/ It assumes that this is a Map with Key = <name of the setting> and Value = boolean.\n pub fn read(v: &Value) -> Result<RefFlags> {\n unimplemented!()\n }\n\n \/\/\/ Build a TOML::Value from this RefFlags object.\n \/\/\/\n \/\/\/ Returns a Map which should be set in `ref.flags` in the header.\n pub fn into_toml(self) -> Result<Value> {\n unimplemented!()\n }\n\n \/\/\/ Alias for `RefFlags::content_hashing()`\n pub fn is_often_moving(mut self, b: bool) -> RefFlags {\n self.with_content_hashing(b)\n }\n\n pub fn with_content_hashing(self, b: bool) -> RefFlags {\n unimplemented!()\n }\n\n pub fn with_permission_tracking(mut self, b: bool) -> RefFlags {\n self.permission_tracking = b;\n self\n }\n\n\n pub fn get_content_hashing(&self) -> bool {\n unimplemented!()\n }\n\n pub fn get_permission_tracking(&self) -> bool {\n unimplemented!()\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>practice<commit_after>fn main() {\n println!(\"Hello World!\");\n println!(\"I'm a Rustacean!\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[Rust] `as` coerces into type conversion<commit_after>fn main() {\n let a = 0xffff_ffffu32;\n let b = a as i32;\n let c = b as i64;\n println!(\"{} {} {}\", a, b, c); \/\/ 4294967295 -1 -1\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Finalize documentation<commit_after><|endoftext|>"} {"text":"<commit_before>use std::borrow::Cow;\nuse std::ops::Not;\n\nuse crate::requests::*;\nuse crate::types::*;\n\n\/\/\/ Use this method to send general files. On success, the sent Message is returned.\n\/\/\/ Bots can currently send files of any type of up to 50 MB in size, this limit may be changed in the future.\n#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize)]\n#[must_use = \"requests do nothing unless sent\"]\npub struct SendDocument<'s, 'c> {\n chat_id: ChatRef,\n document: Cow<'s, str>,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n caption: Option<Cow<'c, str>>,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n parse_mode: Option<ParseMode>,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n reply_to_message_id: Option<MessageId>,\n #[serde(skip_serializing_if = \"Not::not\")]\n disable_notification: bool,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n reply_markup: Option<ReplyMarkup>,\n}\n\nimpl<'s, 'c> Request for SendDocument<'s, 'c> {\n type Type = JsonRequestType<Self>;\n type Response = JsonTrueToUnitResponse;\n\n fn serialize(&self) -> Result<HttpRequest, Error> {\n Self::Type::serialize(RequestUrl::method(\"sendDocument\"), self)\n }\n}\n\nimpl<'s, 'c> SendDocument<'s, 'c> {\n pub fn with_url<C, T>(chat: C, url: T) -> Self\n where\n C: ToChatRef,\n T: Into<Cow<'s, str>>,\n {\n Self {\n chat_id: chat.to_chat_ref(),\n document: url.into(),\n caption: None,\n parse_mode: None,\n reply_to_message_id: None,\n reply_markup: None,\n disable_notification: false,\n }\n }\n\n pub fn caption<T>(&mut self, caption: T) -> &mut Self\n where\n T: Into<Cow<'c, str>>,\n {\n self.caption = Some(caption.into());\n self\n }\n\n pub fn parse_mode(&mut self, parse_mode: ParseMode) -> &mut Self {\n self.parse_mode = Some(parse_mode);\n self\n }\n\n pub fn reply_to<R>(&mut self, to: R) -> &mut Self\n where\n R: ToMessageId,\n {\n self.reply_to_message_id = Some(to.to_message_id());\n self\n }\n\n pub fn reply_markup<R>(&mut self, reply_markup: R) -> &mut Self\n where\n R: Into<ReplyMarkup>,\n {\n self.reply_markup = Some(reply_markup.into());\n self\n }\n}\n\n\/\/\/ Can reply with a document\npub trait CanReplySendDocument {\n fn document_url_reply<'s, 'c, T>(&self, url: T) -> SendDocument<'s, 'c>\n where\n T: Into<Cow<'s, str>>;\n}\n\nimpl<M> CanReplySendDocument for M\nwhere\n M: ToMessageId + ToSourceChat,\n{\n fn document_url_reply<'s, 'c, T>(&self, url: T) -> SendDocument<'s, 'c>\n where\n T: Into<Cow<'s, str>>,\n {\n let mut req = SendDocument::with_url(self.to_source_chat(), url);\n req.reply_to(self.to_message_id());\n req\n }\n}\n\n\/\/\/ Send an audio\npub trait CanSendDocument {\n fn document_url<'s, 'c, T>(&self, url: T) -> SendDocument<'s, 'c>\n where\n T: Into<Cow<'s, str>>;\n}\n\nimpl<M> CanSendDocument for M\nwhere\n M: ToChatRef,\n{\n fn document_url<'s, 'c, T>(&self, url: T) -> SendDocument<'s, 'c>\n where\n T: Into<Cow<'s, str>>,\n {\n SendDocument::with_url(self.to_chat_ref(), url)\n }\n}\n<commit_msg>refactor: update SendDocument<commit_after>use std::borrow::Cow;\n\nuse crate::requests::*;\nuse crate::types::*;\n\n\/\/\/ Use this method to send general files. On success, the sent Message is returned.\n\/\/\/ Bots can currently send files of any type of up to 50 MB in size, this limit may be changed in the future.\n#[derive(Debug, Clone, PartialEq, PartialOrd)]\n#[must_use = \"requests do nothing unless sent\"]\npub struct SendDocument<'c> {\n chat_id: ChatRef,\n document: InputFile,\n thumb: Option<InputFile>,\n caption: Option<Cow<'c, str>>,\n parse_mode: Option<ParseMode>,\n reply_to_message_id: Option<MessageId>,\n disable_notification: bool,\n reply_markup: Option<ReplyMarkup>,\n}\n\nimpl<'c> ToMultipart for SendDocument<'c> {\n fn to_multipart(&self) -> Result<Multipart, Error> {\n multipart_map! {\n self,\n (chat_id (text));\n (document (raw));\n (thumb (raw), optional);\n (caption (text), optional);\n (parse_mode (text), optional);\n (reply_to_message_id (text), optional);\n (disable_notification (text), when_true);\n (reply_markup (json), optional);\n }\n }\n}\n\nimpl<'c> Request for SendDocument<'c> {\n type Type = MultipartRequestType<Self>;\n type Response = JsonTrueToUnitResponse;\n\n fn serialize(&self) -> Result<HttpRequest, Error> {\n Self::Type::serialize(RequestUrl::method(\"sendDocument\"), self)\n }\n}\n\nimpl<'c> SendDocument<'c> {\n pub fn new<C, V>(chat: C, document: V) -> Self\n where\n C: ToChatRef,\n V: Into<InputFile>,\n {\n Self {\n chat_id: chat.to_chat_ref(),\n document: document.into(),\n thumb: None,\n caption: None,\n parse_mode: None,\n reply_to_message_id: None,\n reply_markup: None,\n disable_notification: false,\n }\n }\n\n pub fn thumb<V>(&mut self, thumb: V) -> &mut Self where V: Into<InputFile> {\n self.thumb = Some(thumb.into());\n self\n }\n\n pub fn caption<T>(&mut self, caption: T) -> &mut Self\n where\n T: Into<Cow<'c, str>>,\n {\n self.caption = Some(caption.into());\n self\n }\n\n pub fn parse_mode(&mut self, parse_mode: ParseMode) -> &mut Self {\n self.parse_mode = Some(parse_mode);\n self\n }\n\n pub fn reply_to<R>(&mut self, to: R) -> &mut Self\n where\n R: ToMessageId,\n {\n self.reply_to_message_id = Some(to.to_message_id());\n self\n }\n\n pub fn disable_notification(&mut self) -> &mut Self {\n self.disable_notification = true;\n self\n }\n\n pub fn reply_markup<R>(&mut self, reply_markup: R) -> &mut Self\n where\n R: Into<ReplyMarkup>,\n {\n self.reply_markup = Some(reply_markup.into());\n self\n }\n}\n\n\/\/\/ Can reply with a dcoument\npub trait CanReplySendDocument {\n fn document_reply<'c, T>(&self, document: T) -> SendDocument<'c> where T: Into<InputFile>;\n}\n\nimpl<M> CanReplySendDocument for M\nwhere\n M: ToMessageId + ToSourceChat,\n{\n fn document_reply<'c, T>(&self, document: T) -> SendDocument<'c> where T: Into<InputFile>\n {\n let mut req = SendDocument::new(self.to_source_chat(), document);\n req.reply_to(self);\n req\n }\n}\n\n\/\/\/ Send a document\npub trait CanSendDocument {\n fn document<'c, T>(&self, document: T) -> SendDocument<'c> where T: Into<InputFile>;\n}\n\nimpl<M> CanSendDocument for M\nwhere\n M: ToChatRef,\n{\n fn document<'c, T>(&self, document: T) -> SendDocument<'c> where T: Into<InputFile>\n {\n SendDocument::new(self.to_chat_ref(), document)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add basic semicolon support<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Change MappedRegion's implementation with the standard lib's MemoryMap<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>v0.3.3.4 - Bug fixes with blacklisting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add methods to get BlackboardEntry<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>First basic implementation<commit_after>\/\/! Write-only Memory for Rust\n\/\/!\n\/\/! This is a wrapper type to disallow reading from the wrapped type\n\nuse std::convert::{AsRef, AsMut}\n\n\/\/\/ Make `T` Write-Only\npub struct<T> Wom<T> {\n #[doc(hidden)]\n _inner: T\n}\n\nimpl<T: ?Sized> AsRef<T> for Wom<T> {\n fn as_ref(&self) -> &T {\n panic!(\"readably referencing write-only memory\");\n }\n}\n\nimpl<T: ?Sized> AsMut<T> for Wom<T> {\n fn as_mut(&mut self) -> &mut T {\n self._inner\n }\n}\n\n\/\/TODO: Index, IndexMut\n<|endoftext|>"} {"text":"<commit_before><commit_msg>poke the dragon<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>lib: print bold strings, conditianal opt seperator<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate rustc_serialize;\nextern crate time;\nextern crate crypto;\n\nuse std::time::duration::Duration;\nuse rustc_serialize::base64;\nuse rustc_serialize::base64::{ToBase64, FromBase64};\nuse rustc_serialize::json;\nuse rustc_serialize::json::{ToJson, Json};\nuse std::collections::BTreeMap;\nuse crypto::sha2::{Sha256, Sha384, Sha512};\nuse crypto::hmac::Hmac;\nuse crypto::digest::Digest;\nuse crypto::mac::Mac;\nuse std::str;\n\ntype Payload = BTreeMap<String, String>;\n\nstruct Header {\n algorithm: Algorithm,\n ttype: String\n}\n\nimpl Header {\n pub fn new(alg: Algorithm) -> Header {\n Header { algorithm: alg, ttype: Header::std_type() }\n }\n \n pub fn std_type() -> String {\n \"JWT\".to_string()\n }\n}\n\npub enum Algorithm {\n HS256,\n HS384,\n HS512\n}\n\n\n\nimpl ToString for Algorithm {\n fn to_string(&self) -> String {\n unimplemented!() \n }\n}\n\npub enum Error {\n SignatureExpired,\n SignatureInvalid,\n JWTInvalid,\n IssuerInvalid,\n ExpirationInvalid,\n AudienceInvalid\n}\n\nimpl ToJson for Header {\n fn to_json(&self) -> json::Json {\n let mut map = BTreeMap::new();\n map.insert(\"typ\".to_string(), self.ttype.to_json());\n map.insert(\"alg\".to_string(), self.algorithm.to_string().to_json());\n Json::Object(map)\n }\n}\n\n\n\n\n \npub fn encode(header: Header, secret: String, payload: Option<Payload>) -> String {\n let signing_input = get_signing_input(payload, header.algorithm);\n let signature = sign_hmac(signing_input, secret);\n format!(\"{}.{}\", signing_input, signature);\n}\n\npub fn decode(encoded_token: String, secret: String) -> Result<(Header, Payload), Error> {\n\n}\n\npub fn is_valid(encoded_token: String, secret: String) -> bool {\n\n}\n\npub fn verify(encoded_token: String, secret: String, algorithm: Algorithm) -> Result<(Header, Payload), Error> {\n match decode_segments(encoded_token, true) {\n Some(header, payload, signature, signing_input) => {\n if !verify_signature(algorithm, signing_input, signature.as_bytes(), secret.to_string()) {\n return Err(Error::SignatureInvalid)\n }\n\n \/\/todo\n \/\/ verify_issuer(payload_json);\n \/\/ verify_expiration(payload_json);\n \/\/ verify_audience();\n \/\/ verify_subject();\n \/\/ verify_notbefore();\n \/\/ verify_issuedat();\n \/\/ verify_jwtid();\n\n \/\/todo\n Ok((header, payload))\n },\n\n None => Err(Error::JWTInvalid)\n }\n}\n\n\n\n\n\nfn segments_count() -> usize {\n 3\n}\n\nfn get_signing_input(payload: Payload, algorithm: Algorithm) -> String {\n let header = Header::new(algorithm);\n let header_json_str = header.to_json();\n let encoded_header = base64_url_encode(header_json_str.to_string().as_bytes()).to_string();\n let p = payload.into_iter().map(|(k, v)| (k, v.to_json())).collect();\n let payload_json = Json::Object(p);\n let encoded_payload = base64_url_encode(payload_json.to_string().as_bytes()).to_string();\n format!(\"{}.{}\", encoded_header, encoded_payload)\n}\n\nfn sign_hmac256(signing_input: String, secret: String) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS256)\n}\n\nfn sign_hmac384(signing_input: String, secret: String) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS384)\n}\n\nfn sign_hmac512(signing_input: String, secret: String) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS512)\n}\n\nfn sign_hmac(algorithm: Algorithm, header: Header, signing_input: String, secret: String) -> String {\n let mut hmac = match algorithm {\n Algorithm::HS256 => create_hmac(Sha256::new(), secret),\n Algorithm::HS384 => create_hmac(Sha384::new(), secret),\n Algorithm::HS512 => create_hmac(Sha512::new(), secret)\n };\n \n hmac.input(signing_input.to_string().as_bytes());\n base64_url_encode(hmac.result().code())\n}\n\nfn base64_url_encode(bytes: &[u8]) -> String {\n bytes.to_base64(base64::URL_SAFE)\n}\n\nfn json_to_tree(input: Json) -> BTreeMap<String, String> {\n match input {\n Json::Object(json_tree) => json_tree.into_iter().map(|(k, v)| (k, match v {\n Json::String(s) => s,\n _ => unreachable!()\n })).collect(),\n _ => unreachable!()\n }\n}\n\nfn decode_segments(encoded_token: String, perform_verification: bool) -> Option<(Header, Payload, String, String)> {\n let mut raw_segments = encoded_token.split_str(\".\");\n if raw_segments.count() != segments_count() {\n return Err(Error::JWTInvalid)\n }\n\n let header_segment = raw_segments.next().unwrap();\n let payload_segment = raw_segments.next().unwrap();\n let crypto_segment = raw_segments.next().unwrap();\n let (header, payload) = decode_header_and_payload(header_segment, payload_segment);\n\n \/\/ let signature = crypto_segment.as_bytes().from_base64().unwrap().as_slice();\n let signature = crypto_segment.as_bytes();\n let signature2 = signature.from_base64();\n let signature3 = signature2.unwrap();\n let signature4 = signature3.as_slice();\n match str::from_utf8(signature4) {\n Ok(x) => {\n let signing_input = format!(\"{}.{}\", header_segment, payload_segment);\n Some((header, payload, x.to_string(), signing_input))\n },\n Err(_) => None\n }\n}\n\nfn decode_header_and_payload<'a>(header_segment: &str, payload_segment: &str) -> (Header, Payload) {\n fn base64_to_json(input: &str) -> Json {\n let bytes = input.as_bytes().from_base64().unwrap();\n let s = str::from_utf8(bytes.as_slice()).unwrap();\n Json::from_str(s).unwrap()\n };\n\n let header_json = base64_to_json(header_segment);\n let header_tree = json_to_tree(header_json);\n let alg = header_tree.get(\"alg\").unwrap().as_slice();\n let header = Header::new2(alg);\n let payload_json = base64_to_json(payload_segment);\n let payload = json_to_tree(payload_json);\n (header, payload)\n}\n\nfn verify_signature(algorithm: Algorithm, signing_input: String, signature: &[u8], secret: String) -> bool {\n let mut hmac = match algorithm {\n Algorithm::HS256 => create_hmac(Sha256::new(), secret),\n Algorithm::HS384 => create_hmac(Sha384::new(), secret),\n Algorithm::HS512 => create_hmac(Sha512::new(), secret)\n };\n\n hmac.input(signing_input.to_string().as_bytes());\n secure_compare(signature, hmac.result().code())\n}\n\nfn secure_compare(a: &[u8], b: &[u8]) -> bool {\n if a.len() != b.len() {\n return false\n }\n\n let mut res = 0_u8;\n for (&x, &y) in a.iter().zip(b.iter()) {\n res |= x ^ y;\n }\n\n res == 0\n}\n\n\/\/ fn verify_issuer(payload_json: Json, iss: &str) -> bool {\n\/\/ \/\/ take \"iss\" from payload_json\n\/\/ \/\/ take \"iss\" from ...\n\/\/ \/\/ make sure they're equal\n\n\/\/ \/\/ if iss.is_empty() || signing_input.as_slice().is_whitespace() {\n\/\/ \/\/ return Err(Error::IssuerInvalid)\n\/\/ \/\/ }\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_expiration(payload_json: Json) -> bool {\n\/\/ let payload = json_to_tree(payload_json);\n\/\/ if payload.contains_key(\"exp\") {\n\/\/ match payload.get(\"exp\").unwrap().parse::<i64>() {\n\/\/ Ok(exp) => exp > time::get_time().sec,\n\/\/ Err(e) => panic!(e)\n\/\/ }\n\/\/ \/\/ if exp.is_empty() || signing_input.as_slice().is_whitespace() {\n\/\/ \/\/ return false\n\/\/ \/\/ }\n \n \n\/\/ } else {\n\/\/ false\n\/\/ }\n\/\/ }\n\n\/\/ fn verify_audience(payload_json: Json, aud: &str) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_subject(payload_json: Json) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_notbefore(payload_json: Json) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_issuedat(payload_json: Json) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_jwtid(payload_json: Json) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_generic(payload_json: Json, parameter_name: String) -> bool {\n\/\/ let payload = json_to_tree(payload_json);\n\/\/ if payload.contains_key(¶meter_name) {\n \n\/\/ }\n\n\/\/ unimplemented!()\n\/\/ }\n\nfn create_hmac<'a, D: Digest + 'a>(digest: D, some_str: String) -> Box<Mac + 'a> {\n Box::new(Hmac::new(digest, some_str.as_bytes()))\n}\n\n \n\n\n\n\/\/ #[cfg(test)]\n\/\/ mod tests {\n\/\/ extern crate time;\n\n\/\/ use super::sign;\n\/\/ use super::verify;\n\/\/ use super::secure_compare;\n\/\/ use super::Algorithm;\n\/\/ use std::collections::BTreeMap;\n\/\/ use std::time::duration::Duration;\n\n\/\/ #[test]\n\/\/ fn test_encode_and_decode_jwt() {\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ p1.insert(\"key2\".to_string(), \"val2\".to_string());\n\/\/ p1.insert(\"key3\".to_string(), \"val3\".to_string());\n\n\/\/ let secret = \"secret123\";\n\/\/ let jwt1 = sign(secret, Some(p1.clone()), Some(Algorithm::HS256));\n\/\/ let maybe_res = verify(jwt1.as_slice(), secret, None);\n\n\/\/ assert!(maybe_res.is_ok());\n\/\/ assert_eq!(jwt1, maybe_res.unwrap());\n\/\/ } \n\n\/\/ #[test]\n\/\/ fn test_decode_valid_jwt() {\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"key11\".to_string(), \"val1\".to_string());\n\/\/ p1.insert(\"key22\".to_string(), \"val2\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJrZXkxMSI6InZhbDEiLCJrZXkyMiI6InZhbDIifQ.jrcoVcRsmQqDEzSW9qOhG1HIrzV_n3nMhykNPnGvp9c\";\n\/\/ let maybe_res = verify(jwt.as_slice(), secret, None);\n \n\/\/ assert!(maybe_res.is_ok());\n\/\/ assert_eq!(p1, maybe_res.unwrap().payload);\n\/\/ }\n\n\/\/ #[test]\n\/\/ fn test_fails_when_expired() {\n\/\/ let now = time::get_time();\n\/\/ let past = now + Duration::minutes(-5);\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"exp\".to_string(), past.sec.to_string());\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = sign(secret, Some(p1.clone()), None);\n\/\/ let res = verify(jwt.as_slice(), secret, None);\n\/\/ assert!(res.is_ok());\n\/\/ }\n\n\/\/ #[test]\n\/\/ fn test_ok_when_expired_not_verified() {\n\/\/ let now = time::get_time();\n\/\/ let past = now + Duration::minutes(-5);\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"exp\".to_string(), past.sec.to_string());\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = sign(secret, Some(p1.clone()), None);\n\/\/ let res = verify(jwt.as_slice(), secret, None);\n\/\/ assert!(res.is_ok());\n\/\/ }\n \n\/\/ #[test]\n\/\/ fn test_secure_compare_same_strings() {\n\/\/ let str1 = \"same same\".as_bytes();\n\/\/ let str2 = \"same same\".as_bytes();\n\/\/ let res = secure_compare(str1, str2);\n\/\/ assert!(res);\n\/\/ }\n\n\/\/ #[test]\n\/\/ fn test_fails_when_secure_compare_different_strings() {\n\/\/ let str1 = \"same same\".as_bytes();\n\/\/ let str2 = \"same same but different\".as_bytes();\n\/\/ let res = secure_compare(str1, str2);\n\/\/ assert!(!res);\n\n\/\/ let str3 = \"same same\".as_bytes();\n\/\/ let str4 = \"same ssss\".as_bytes();\n\/\/ let res2 = secure_compare(str3, str4);\n\/\/ assert!(!res2);\n\/\/ }\n\/\/ }<commit_msg>refactored3<commit_after>extern crate rustc_serialize;\nextern crate time;\nextern crate crypto;\n\nuse std::time::duration::Duration;\nuse rustc_serialize::base64;\nuse rustc_serialize::base64::{ToBase64, FromBase64};\nuse rustc_serialize::json;\nuse rustc_serialize::json::{ToJson, Json};\nuse std::collections::BTreeMap;\nuse crypto::sha2::{Sha256, Sha384, Sha512};\nuse crypto::hmac::Hmac;\nuse crypto::digest::Digest;\nuse crypto::mac::Mac;\nuse std::str;\n\npub type Payload = BTreeMap<String, String>;\n\npub struct Header {\n algorithm: Algorithm,\n ttype: String\n}\n\nimpl Header {\n pub fn new(alg: Algorithm) -> Header {\n Header { algorithm: alg, ttype: Header::std_type() }\n }\n \n pub fn std_type() -> String {\n \"JWT\".to_string()\n }\n\n \n}\n\npub enum Algorithm {\n HS256,\n HS384,\n HS512\n}\n\nimpl ToString for Algorithm {\n fn to_string(&self) -> String {\n unimplemented!() \n }\n}\n\npub enum Error {\n SignatureExpired,\n SignatureInvalid,\n JWTInvalid,\n IssuerInvalid,\n ExpirationInvalid,\n AudienceInvalid\n}\n\nimpl ToJson for Header {\n fn to_json(&self) -> json::Json {\n let mut map = BTreeMap::new();\n map.insert(\"typ\".to_string(), self.ttype.to_json());\n map.insert(\"alg\".to_string(), self.algorithm.to_string().to_json());\n Json::Object(map)\n }\n}\n\n\n\n\n \npub fn encode(header: Header, secret: String, payload: Payload) -> String {\n let signing_input = get_signing_input(payload, header.algorithm);\n let signature = sign_hmac(signing_input, secret, header.algorithm);\n format!(\"{}.{}\", signing_input, signature)\n}\n\npub fn decode(encoded_token: String, secret: String) -> Result<(Header, Payload), Error> {\n unimplemented!()\n}\n\npub fn is_valid(encoded_token: String, secret: String) -> bool {\n unimplemented!()\n}\n\npub fn verify(encoded_token: String, secret: String, algorithm: Algorithm) -> Result<(Header, Payload), Error> {\n match decode_segments(encoded_token, true) {\n Some((header, payload, signature, signing_input)) => {\n if !verify_signature(algorithm, signing_input, signature.as_bytes(), secret.to_string()) {\n return Err(Error::SignatureInvalid)\n }\n\n \/\/todo\n \/\/ verify_issuer(payload_json);\n \/\/ verify_expiration(payload_json);\n \/\/ verify_audience();\n \/\/ verify_subject();\n \/\/ verify_notbefore();\n \/\/ verify_issuedat();\n \/\/ verify_jwtid();\n\n \/\/todo\n Ok((header, payload))\n },\n\n None => Err(Error::JWTInvalid)\n }\n}\n\n\n\n\n\nfn segments_count() -> usize {\n 3\n}\n\nfn get_signing_input(payload: Payload, algorithm: Algorithm) -> String {\n let header = Header::new(algorithm);\n let header_json_str = header.to_json();\n let encoded_header = base64_url_encode(header_json_str.to_string().as_bytes()).to_string();\n let p = payload.into_iter().map(|(k, v)| (k, v.to_json())).collect();\n let payload_json = Json::Object(p);\n let encoded_payload = base64_url_encode(payload_json.to_string().as_bytes()).to_string();\n format!(\"{}.{}\", encoded_header, encoded_payload)\n}\n\nfn sign_hmac256(signing_input: String, secret: String) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS256)\n}\n\nfn sign_hmac384(signing_input: String, secret: String) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS384)\n}\n\nfn sign_hmac512(signing_input: String, secret: String) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS512)\n}\n\nfn sign_hmac(signing_input: String, secret: String, algorithm: Algorithm) -> String {\n let mut hmac = match algorithm {\n Algorithm::HS256 => create_hmac(Sha256::new(), secret),\n Algorithm::HS384 => create_hmac(Sha384::new(), secret),\n Algorithm::HS512 => create_hmac(Sha512::new(), secret)\n };\n \n hmac.input(signing_input.as_bytes());\n base64_url_encode(hmac.result().code())\n}\n\nfn base64_url_encode(bytes: &[u8]) -> String {\n bytes.to_base64(base64::URL_SAFE)\n}\n\nfn json_to_tree(input: Json) -> BTreeMap<String, String> {\n match input {\n Json::Object(json_tree) => json_tree.into_iter().map(|(k, v)| (k, match v {\n Json::String(s) => s,\n _ => unreachable!()\n })).collect(),\n _ => unreachable!()\n }\n}\n\nfn decode_segments(encoded_token: String, perform_verification: bool) -> Option<(Header, Payload, String, String)> {\n let mut raw_segments = encoded_token.split(\".\");\n if raw_segments.count() != segments_count() {\n return None\n }\n\n let header_segment = raw_segments.next().unwrap();\n let payload_segment = raw_segments.next().unwrap();\n let crypto_segment = raw_segments.next().unwrap();\n let (header, payload) = decode_header_and_payload(header_segment, payload_segment);\n\n \/\/ let signature = crypto_segment.as_bytes().from_base64().unwrap().as_slice();\n let signature = crypto_segment.as_bytes();\n let signature2 = signature.from_base64();\n let signature3 = signature2.unwrap();\n let signature4 = signature3.as_slice();\n match str::from_utf8(signature4) {\n Ok(x) => {\n let signing_input = format!(\"{}.{}\", header_segment, payload_segment);\n Some((header, payload, x.to_string(), signing_input))\n },\n Err(_) => None\n }\n}\n\nfn decode_header_and_payload<'a>(header_segment: &str, payload_segment: &str) -> (Header, Payload) {\n fn base64_to_json(input: &str) -> Json {\n let bytes = input.as_bytes().from_base64().unwrap();\n let s = str::from_utf8(bytes.as_slice()).unwrap();\n Json::from_str(s).unwrap()\n };\n\n let header_json = base64_to_json(header_segment);\n let header_tree = json_to_tree(header_json);\n let alg = header_tree.get(\"alg\").unwrap();\n let header = Header::new(parse_algorithm(alg));\n let payload_json = base64_to_json(payload_segment);\n let payload = json_to_tree(payload_json);\n (header, payload)\n}\n\nfn parse_algorithm(alg: &str) -> Algorithm {\n match alg {\n \"HS256\" => Algorithm::HS256,\n \"HS384\" => Algorithm::HS384,\n \"HS512\" => Algorithm::HS512,\n _ => panic!(\"Unknown algorithm\")\n }\n}\n\nfn verify_signature(algorithm: Algorithm, signing_input: String, signature: &[u8], secret: String) -> bool {\n let mut hmac = match algorithm {\n Algorithm::HS256 => create_hmac(Sha256::new(), secret),\n Algorithm::HS384 => create_hmac(Sha384::new(), secret),\n Algorithm::HS512 => create_hmac(Sha512::new(), secret)\n };\n\n hmac.input(signing_input.to_string().as_bytes());\n secure_compare(signature, hmac.result().code())\n}\n\nfn secure_compare(a: &[u8], b: &[u8]) -> bool {\n if a.len() != b.len() {\n return false\n }\n\n let mut res = 0_u8;\n for (&x, &y) in a.iter().zip(b.iter()) {\n res |= x ^ y;\n }\n\n res == 0\n}\n\n\/\/ fn verify_issuer(payload_json: Json, iss: &str) -> bool {\n\/\/ \/\/ take \"iss\" from payload_json\n\/\/ \/\/ take \"iss\" from ...\n\/\/ \/\/ make sure they're equal\n\n\/\/ \/\/ if iss.is_empty() || signing_input.as_slice().is_whitespace() {\n\/\/ \/\/ return Err(Error::IssuerInvalid)\n\/\/ \/\/ }\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_expiration(payload_json: Json) -> bool {\n\/\/ let payload = json_to_tree(payload_json);\n\/\/ if payload.contains_key(\"exp\") {\n\/\/ match payload.get(\"exp\").unwrap().parse::<i64>() {\n\/\/ Ok(exp) => exp > time::get_time().sec,\n\/\/ Err(e) => panic!(e)\n\/\/ }\n\/\/ \/\/ if exp.is_empty() || signing_input.as_slice().is_whitespace() {\n\/\/ \/\/ return false\n\/\/ \/\/ }\n \n \n\/\/ } else {\n\/\/ false\n\/\/ }\n\/\/ }\n\n\/\/ fn verify_audience(payload_json: Json, aud: &str) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_subject(payload_json: Json) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_notbefore(payload_json: Json) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_issuedat(payload_json: Json) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_jwtid(payload_json: Json) -> bool {\n\/\/ unimplemented!()\n\/\/ }\n\n\/\/ fn verify_generic(payload_json: Json, parameter_name: String) -> bool {\n\/\/ let payload = json_to_tree(payload_json);\n\/\/ if payload.contains_key(¶meter_name) {\n \n\/\/ }\n\n\/\/ unimplemented!()\n\/\/ }\n\nfn create_hmac<'a, D: Digest + 'a>(digest: D, some_str: String) -> Box<Mac + 'a> {\n Box::new(Hmac::new(digest, some_str.as_bytes()))\n}\n\n \n\n\n\n\/\/ #[cfg(test)]\n\/\/ mod tests {\n\/\/ extern crate time;\n\n\/\/ use super::sign;\n\/\/ use super::verify;\n\/\/ use super::secure_compare;\n\/\/ use super::Algorithm;\n\/\/ use std::collections::BTreeMap;\n\/\/ use std::time::duration::Duration;\n\n\/\/ #[test]\n\/\/ fn test_encode_and_decode_jwt() {\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ p1.insert(\"key2\".to_string(), \"val2\".to_string());\n\/\/ p1.insert(\"key3\".to_string(), \"val3\".to_string());\n\n\/\/ let secret = \"secret123\";\n\/\/ let jwt1 = sign(secret, Some(p1.clone()), Some(Algorithm::HS256));\n\/\/ let maybe_res = verify(jwt1.as_slice(), secret, None);\n\n\/\/ assert!(maybe_res.is_ok());\n\/\/ assert_eq!(jwt1, maybe_res.unwrap());\n\/\/ } \n\n\/\/ #[test]\n\/\/ fn test_decode_valid_jwt() {\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"key11\".to_string(), \"val1\".to_string());\n\/\/ p1.insert(\"key22\".to_string(), \"val2\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJrZXkxMSI6InZhbDEiLCJrZXkyMiI6InZhbDIifQ.jrcoVcRsmQqDEzSW9qOhG1HIrzV_n3nMhykNPnGvp9c\";\n\/\/ let maybe_res = verify(jwt.as_slice(), secret, None);\n \n\/\/ assert!(maybe_res.is_ok());\n\/\/ assert_eq!(p1, maybe_res.unwrap().payload);\n\/\/ }\n\n\/\/ #[test]\n\/\/ fn test_fails_when_expired() {\n\/\/ let now = time::get_time();\n\/\/ let past = now + Duration::minutes(-5);\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"exp\".to_string(), past.sec.to_string());\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = sign(secret, Some(p1.clone()), None);\n\/\/ let res = verify(jwt.as_slice(), secret, None);\n\/\/ assert!(res.is_ok());\n\/\/ }\n\n\/\/ #[test]\n\/\/ fn test_ok_when_expired_not_verified() {\n\/\/ let now = time::get_time();\n\/\/ let past = now + Duration::minutes(-5);\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"exp\".to_string(), past.sec.to_string());\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = sign(secret, Some(p1.clone()), None);\n\/\/ let res = verify(jwt.as_slice(), secret, None);\n\/\/ assert!(res.is_ok());\n\/\/ }\n \n\/\/ #[test]\n\/\/ fn test_secure_compare_same_strings() {\n\/\/ let str1 = \"same same\".as_bytes();\n\/\/ let str2 = \"same same\".as_bytes();\n\/\/ let res = secure_compare(str1, str2);\n\/\/ assert!(res);\n\/\/ }\n\n\/\/ #[test]\n\/\/ fn test_fails_when_secure_compare_different_strings() {\n\/\/ let str1 = \"same same\".as_bytes();\n\/\/ let str2 = \"same same but different\".as_bytes();\n\/\/ let res = secure_compare(str1, str2);\n\/\/ assert!(!res);\n\n\/\/ let str3 = \"same same\".as_bytes();\n\/\/ let str4 = \"same ssss\".as_bytes();\n\/\/ let res2 = secure_compare(str3, str4);\n\/\/ assert!(!res2);\n\/\/ }\n\/\/ }<|endoftext|>"} {"text":"<commit_before>#![crate_name = \"piston\"]\n#![deny(missing_docs)]\n#![warn(dead_code)]\n#![feature(default_type_params)]\n#![feature(globs)]\n\n\/\/! A user friendly game engine written in Rust.\n\nextern crate time;\nextern crate sync;\n\n\/\/ Crates used to reexport.\nextern crate \"vecmath\" as vecmath_lib;\nextern crate \"shader_version\" as shader_version_lib;\nextern crate \"image\" as image_lib;\nextern crate \"graphics\" as graphics_lib;\nextern crate \"input\" as input_lib;\nextern crate \"event\" as event_lib;\nextern crate \"cam\" as cam_lib;\nextern crate \"noise\" as noise_lib;\nextern crate \"genmesh\" as genmesh_lib;\nextern crate \"sprite\" as sprite_lib;\n\n\/\/ Reexports.\npub use shader_version_lib as shader_version;\npub use image_lib as image;\npub use graphics_lib as graphics;\npub use vecmath_lib as vecmath;\npub use input_lib as input;\npub use event_lib as event;\npub use cam_lib as cam;\npub use noise_lib as noise;\npub use genmesh_lib as genmesh;\npub use sprite_lib as sprite;\n\npub use asset_store::AssetStore;\npub use event::{\n Render,\n Update,\n Input,\n Event,\n EventIterator,\n EventSettings,\n NoWindow,\n RenderArgs,\n UpdateArgs,\n Window,\n WindowSettings,\n};\n\nmod asset_store;\n<commit_msg>EventIterator has been renamed to Events upstream<commit_after>#![crate_name = \"piston\"]\n#![deny(missing_docs)]\n#![warn(dead_code)]\n#![feature(default_type_params)]\n#![feature(globs)]\n\n\/\/! A user friendly game engine written in Rust.\n\nextern crate time;\nextern crate sync;\n\n\/\/ Crates used to reexport.\nextern crate \"vecmath\" as vecmath_lib;\nextern crate \"shader_version\" as shader_version_lib;\nextern crate \"image\" as image_lib;\nextern crate \"graphics\" as graphics_lib;\nextern crate \"input\" as input_lib;\nextern crate \"event\" as event_lib;\nextern crate \"cam\" as cam_lib;\nextern crate \"noise\" as noise_lib;\nextern crate \"genmesh\" as genmesh_lib;\nextern crate \"sprite\" as sprite_lib;\n\n\/\/ Reexports.\npub use shader_version_lib as shader_version;\npub use image_lib as image;\npub use graphics_lib as graphics;\npub use vecmath_lib as vecmath;\npub use input_lib as input;\npub use event_lib as event;\npub use cam_lib as cam;\npub use noise_lib as noise;\npub use genmesh_lib as genmesh;\npub use sprite_lib as sprite;\n\npub use asset_store::AssetStore;\npub use event::{\n Render,\n Update,\n Input,\n Event,\n Events,\n EventSettings,\n NoWindow,\n RenderArgs,\n UpdateArgs,\n Window,\n WindowSettings,\n};\n\nmod asset_store;\n<|endoftext|>"} {"text":"<commit_before>\/\/! A library for window abstraction and event logic\n\/\/!\n\/\/! This library is used as an abstraction layer on top of\n\/\/! different window back-ends, such that libraries\n\/\/! can be written without more dependencies than required.\n\/\/!\n\/\/! An event loop can be created in different ways:\n\/\/!\n\/\/! ```ignore\n\/\/! \/\/ Move window by value (this prevents you from using the window elsewhere).\n\/\/! for e in Events::new(window) {\n\/\/! ...\n\/\/! }\n\/\/!\n\/\/! \/\/ Use shared reference (this allows you to use the window elsewhere).\n\/\/! let window = RefCell::new(window);\n\/\/! for e in Events::new(&window) {\n\/\/! ...\n\/\/! }\n\/\/!\n\/\/! \/\/ Use current window (the window must be set as current object).\n\/\/! for e in Events::new(current::UseCurrent::<Window>) {\n\/\/! ...\n\/\/! }\n\/\/!\n\/\/! \/\/ Specify usage.\n\/\/! let window = RefCell::new(window);\n\/\/! let usage = current::Use(&window);\n\/\/! for e in Events::new(usage) {\n\/\/! ...\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! It is also designed to provide an extensible model for events,\n\/\/! such that window back-ends can add new kinds of events.\n\/\/! The new event can be created as trait and implemented for\n\/\/! all types that uses `GenericEvent`.\n\/\/! For examples, see the different events in this library.\n\n#![crate_type = \"lib\"]\n#![crate_name = \"event\"]\n#![deny(missing_docs)]\n#![deny(missing_copy_implementations)]\n#![feature(default_type_params)]\n#![feature(globs)]\n\nextern crate input;\nextern crate serialize;\nextern crate current;\nextern crate event_loop;\nextern crate \"window\" as window_lib;\n\npub use window_lib as window;\npub use event_loop as events;\npub use window::{\n WindowSettings,\n NoWindow\n};\npub use events::{\n Events,\n MaxFps,\n Ups,\n UpdateArgs,\n RenderArgs,\n};\npub use generic_event::{ assert_event_trait, GenericEvent };\npub use update::{ UpdateEvent };\npub use render::{ RenderEvent };\npub use event::Event;\npub use press::PressEvent;\npub use release::ReleaseEvent;\npub use mouse::{ MouseCursorEvent, MouseRelativeEvent, MouseScrollEvent };\npub use text::TextEvent;\npub use resize::ResizeEvent;\npub use focus::FocusEvent;\n\npub mod ptr;\n\nmod generic_event;\nmod update;\nmod render;\nmod event;\nmod press;\nmod release;\nmod mouse;\nmod text;\nmod resize;\nmod focus;\n<commit_msg>Added `events` function<commit_after>\/\/! A library for window abstraction and event logic\n\/\/!\n\/\/! This library is used as an abstraction layer on top of\n\/\/! different window back-ends, such that libraries\n\/\/! can be written without more dependencies than required.\n\/\/!\n\/\/! An event loop can be created in different ways:\n\/\/!\n\/\/! ```ignore\n\/\/! \/\/ Move window by value (this prevents you from using the window elsewhere).\n\/\/! for e in Events::new(window) {\n\/\/! ...\n\/\/! }\n\/\/!\n\/\/! \/\/ Use shared reference (this allows you to use the window elsewhere).\n\/\/! let window = RefCell::new(window);\n\/\/! for e in Events::new(&window) {\n\/\/! ...\n\/\/! }\n\/\/!\n\/\/! \/\/ Use current window (the window must be set as current object).\n\/\/! for e in Events::new(current::UseCurrent::<Window>) {\n\/\/! ...\n\/\/! }\n\/\/!\n\/\/! \/\/ Specify usage.\n\/\/! let window = RefCell::new(window);\n\/\/! let usage = current::Use(&window);\n\/\/! for e in Events::new(usage) {\n\/\/! ...\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! It is also designed to provide an extensible model for events,\n\/\/! such that window back-ends can add new kinds of events.\n\/\/! The new event can be created as trait and implemented for\n\/\/! all types that uses `GenericEvent`.\n\/\/! For examples, see the different events in this library.\n\n#![crate_type = \"lib\"]\n#![crate_name = \"event\"]\n#![deny(missing_docs)]\n#![deny(missing_copy_implementations)]\n#![feature(default_type_params)]\n#![feature(globs)]\n\nextern crate input;\nextern crate serialize;\nextern crate current;\nextern crate event_loop;\nextern crate \"window\" as window_lib;\n\npub use window_lib as window;\npub use event_loop as events;\npub use window::{\n WindowSettings,\n NoWindow\n};\npub use events::{\n Events,\n MaxFps,\n Ups,\n UpdateArgs,\n RenderArgs,\n};\npub use generic_event::{ assert_event_trait, GenericEvent };\npub use update::{ UpdateEvent };\npub use render::{ RenderEvent };\npub use event::Event;\npub use press::PressEvent;\npub use release::ReleaseEvent;\npub use mouse::{ MouseCursorEvent, MouseRelativeEvent, MouseScrollEvent };\npub use text::TextEvent;\npub use resize::ResizeEvent;\npub use focus::FocusEvent;\n\npub mod ptr;\n\nmod generic_event;\nmod update;\nmod render;\nmod event;\nmod press;\nmod release;\nmod mouse;\nmod text;\nmod resize;\nmod focus;\n\n\/\/\/ Creates event iterator from window.\npub fn events<W>(window: W) -> event_loop::Events<W, Event> {\n event_loop::Events::new(window)\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>More logic clarification parens<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add initial hamming encoder\/decoder<commit_after>\/\/! This module provides `encode` and `decode` functions in `standard` and `shortened`\n\/\/! submodules, for encoding and decoding of the standard and shortened Hamming codes\n\/\/! defined by P25.\n\/\/!\n\/\/! These algorithms are sourced from \\[1].\n\/\/!\n\/\/! \\[1]: \"Coding Theory and Cryptography: The Essentials\", 2nd ed, Hankerson, Hoffman, et\n\/\/! al, 2000\n\n\/\/\/ Encoding and decoding of the (15, 11, 3) standard HammingDecoder code defined by P25.\npub mod standard {\n \/\/\/ Encode the 11 bits of data to a 15-bit codeword.\n pub fn encode(data: u16) -> u16 {\n assert!(data & 0xF800 == 0);\n matrix_mul_systematic!(data, GEN, u16)\n }\n\n \/\/\/ Try to decode the 15-bit word to the nearest codeword and return `Some((data,\n \/\/\/ err))`, where `data` are the 11 data bits and `err` is the number of errors, on\n \/\/\/ success and `None` on an unrecoverable error.\n pub fn decode(word: u16) -> Option<(u16, usize)> {\n assert!(word & 0x8000 == 0);\n super::decode_hamming::<StandardHamming>(word)\n }\n\n \/\/\/ Generator patterns for 4 parity bits.\n const GEN: [u16; 4] = [\n 0b11111110000,\n 0b11110001110,\n 0b11001101101,\n 0b10101011011,\n ];\n\n \/\/\/ Parity-check patterns for 4 syndromes.\n const PAR: [u16; 4] = [\n 0b111111100001000,\n 0b111100011100100,\n 0b110011011010010,\n 0b101010110110001,\n ];\n\n \/\/\/ Maps 4-bit syndrome values to bit error locations.\n const LOCATIONS: [u16; 16] = [\n 0,\n 0b0000000000000001,\n 0b0000000000000010,\n 0b0000000000010000,\n 0b0000000000000100,\n 0b0000000000100000,\n 0b0000000001000000,\n 0b0000000010000000,\n 0b0000000000001000,\n 0b0000000100000000,\n 0b0000001000000000,\n 0b0000010000000000,\n 0b0000100000000000,\n 0b0001000000000000,\n 0b0010000000000000,\n 0b0100000000000000,\n ];\n\n struct StandardHamming;\n\n impl super::HammingDecoder for StandardHamming {\n type Data = u16;\n\n fn data(word: u16) -> u16 { word >> 4 }\n fn par() -> [u16; 4] { PAR }\n fn locs() -> [u16; 16] { LOCATIONS }\n }\n}\n\n\/\/\/ Encoding and decoding of the (10, 6, 3) shortened HammingDecoder code defined by P25.\npub mod shortened {\n \/\/\/ Encode the 6 data bits to a 10-bit codeword.\n pub fn encode(data: u8) -> u16 {\n assert!(data & 0b11000000 == 0);\n matrix_mul_systematic!(data, GEN, u16)\n }\n\n \/\/\/ Try to decode the 10-bit word to the nearest codeword and return `Some((data,\n \/\/\/ err))`, where `data` are the 6 data bits and `err` is the number of errors, on\n \/\/\/ success and `None` on an unrecoverable error.\n pub fn decode(word: u16) -> Option<(u8, usize)> {\n assert!(word & 0xFC00 == 0);\n super::decode_hamming::<ShortHamming>(word)\n }\n\n const GEN: [u8; 4] = [\n 0b111001,\n 0b110101,\n 0b101110,\n 0b011110,\n ];\n\n const PAR: [u16; 4] = [\n 0b1110011000,\n 0b1101010100,\n 0b1011100010,\n 0b0111100001,\n ];\n\n const LOCATIONS: [u16; 16] = [\n 0,\n 0b0000000000000001,\n 0b0000000000000010,\n 0b0000000000100000,\n 0b0000000000000100,\n 0,\n 0,\n 0b0000000001000000,\n 0b0000000000001000,\n 0,\n 0,\n 0b0000000010000000,\n 0b0000000000010000,\n 0b0000000100000000,\n 0b0000001000000000,\n 0,\n ];\n\n struct ShortHamming;\n\n impl super::HammingDecoder for ShortHamming {\n type Data = u8;\n\n fn data(word: u16) -> u8 { (word >> 4) as u8 }\n fn par() -> [u16; 4] { PAR }\n fn locs() -> [u16; 16] { LOCATIONS }\n }\n}\n\n\/\/\/ Defines code-specific decoding functions.\ntrait HammingDecoder {\n \/\/\/ The type of the data bit output.\n type Data;\n\n \/\/\/ Convert the codeword to data bits.\n fn data(word: u16) -> Self::Data;\n\n \/\/\/ Return the parity-check patterns for 4 syndromes.\n fn par() -> [u16; 4];\n\n \/\/\/ Return the syndrome-error location map.\n fn locs() -> [u16; 16];\n}\n\n\/\/\/ Use the given decoder to decode the given word.\nfn decode_hamming<H: HammingDecoder>(word: u16) -> Option<(H::Data, usize)> {\n \/\/ Compute the 4-bit syndrome.\n let s = matrix_mul!(word, H::par(), u8);\n\n \/\/ A zero syndrome means it's a valid codeword (possibly different from the\n \/\/ transmitted codeword.)\n if s == 0 {\n return Some((H::data(word), 0));\n }\n\n match H::locs().get(s as usize) {\n \/\/ More than one error\/unrecoverable error.\n Some(&0) | None => None,\n \/\/ Valid location means the error can be corrected.\n Some(&loc) => Some((H::data(word ^ loc), 1)),\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::*;\n\n #[test]\n fn test_standard() {\n let w = 0b10101010101;\n let e = standard::encode(w);\n assert_eq!(standard::decode(e^0b000000000000000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000000000000001).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000000000000010).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000000000000100).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000000000001000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000000000010000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000000000100000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000000001000000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000000010000000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000000100000000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000001000000000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000010000000000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b000100000000000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b001000000000000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b010000000000000).unwrap().0, w);\n assert_eq!(standard::decode(e^0b100000000000000).unwrap().0, w);\n }\n\n #[test]\n fn test_shortened() {\n let w = 0b110011;\n let e = shortened::encode(w);\n assert_eq!(shortened::decode(e ^ 0b0000000000).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b0000000001).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b0000000010).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b0000000100).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b0000001000).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b0000010000).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b0000100000).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b0001000000).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b0010000000).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b0100000000).unwrap().0, w);\n assert_eq!(shortened::decode(e ^ 0b1000000000).unwrap().0, w);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added int options<commit_after>use std::ops::BitOr;\n\n\/\/\/ Define how a u64 field should be handled by tantivy.\n#[derive(Clone,Debug,PartialEq,Eq, RustcDecodable, RustcEncodable)]\npub struct IntOptions {\n indexed: bool,\n fast: bool,\n stored: bool,\n}\n\nimpl IntOptions {\n \n \/\/\/ Returns true iff the value is stored.\n pub fn is_stored(&self,) -> bool {\n self.stored\n }\n \n \n \/\/\/ Returns true iff the value is indexed.\n pub fn is_indexed(&self,) -> bool {\n self.indexed\n }\n \n \/\/\/ Returns true iff the value is a fast field. \n pub fn is_fast(&self,) -> bool {\n self.fast\n }\n \n \/\/\/ Set the u64 options as stored.\n \/\/\/\n \/\/\/ Only the fields that are set as *stored* are\n \/\/\/ persisted into the Tantivy's store.\n pub fn set_stored(mut self,) -> IntOptions {\n self.stored = true;\n self\n }\n \n \/\/\/ Set the u64 options as indexed.\n \/\/\/\n \/\/\/ Setting an integer as indexed will generate\n \/\/\/ a posting list for each value taken by the integer.\n pub fn set_indexed(mut self,) -> IntOptions {\n self.indexed = true;\n self\n }\n \n \/\/\/ Set the u64 options as a fast field.\n \/\/\/\n \/\/\/ Fast fields are designed for random access.\n \/\/\/ Access time are similar to a random lookup in an array. \n \/\/\/ If more than one value is associated to a fast field, only the last one is\n \/\/\/ kept.\n pub fn set_fast(mut self,) -> IntOptions {\n self.fast = true;\n self\n }\n}\n\nimpl Default for IntOptions {\n fn default() -> IntOptions {\n IntOptions {\n fast: false,\n indexed: false,\n stored: false,\n }\n } \n}\n\n\n\/\/\/ Shortcut for a u64 fast field.\n\/\/\/\n\/\/\/ Such a shortcut can be composed as follows `STORED | FAST | U64_INDEXED`\npub const FAST: IntOptions = IntOptions {\n indexed: false,\n stored: false,\n fast: true,\n};\n\n\/\/\/ Shortcut for a u64 indexed field.\n\/\/\/\n\/\/\/ Such a shortcut can be composed as follows `STORED | FAST | U64_INDEXED`\npub const U64_INDEXED: IntOptions = IntOptions {\n indexed: true,\n stored: false,\n fast: false,\n};\n\n\/\/\/ Shortcut for a u64 stored field. \n\/\/\/\n\/\/\/ Such a shortcut can be composed as follows `STORED | FAST | U64_INDEXED`\npub const U64_STORED: IntOptions = IntOptions {\n indexed: false,\n stored: true,\n fast: false,\n};\n\n\nimpl BitOr for IntOptions {\n\n type Output = IntOptions;\n\n fn bitor(self, other: IntOptions) -> IntOptions {\n let mut res = IntOptions::default();\n res.indexed = self.indexed | other.indexed;\n res.stored = self.stored | other.stored;\n res.fast = self.fast | other.fast;\n res\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>add bench test<commit_after>#![feature(test)]\n\nextern crate bit_field;\n\nuse bit_field::*;\n\npub trait BitOper {\n const BIT_LEN: usize;\n fn get_b(&self, idx: usize) -> bool;\n fn set_b(&mut self, idx: usize, val: bool);\n fn toggle(&mut self, idx: usize);\n}\n\npub trait BitArrayOper<T: BitOper> {\n fn get_blen(&self)->usize;\n fn get_b(&self, idx: usize) -> bool;\n fn set_b(&mut self, idx: usize, val: bool);\n fn toggle(&mut self, idx: usize);\n}\n\nimpl BitOper for u8 {\n const BIT_LEN: usize = std::mem::size_of::<Self>() as usize * 8;\n\n fn set_b(&mut self, idx: usize, val: bool) {\n assert!(idx < Self::BIT_LEN);\n if val {\n *self |= 1 << idx;\n } else {\n *self &= !(1 << idx);\n }\n }\n\n fn get_b(&self, idx: usize) -> bool {\n assert!(idx < Self::BIT_LEN);\n (self & 1 << idx) != 0\n }\n\n fn toggle(&mut self, idx: usize) {\n assert!(idx < Self::BIT_LEN);\n *self ^= 1 << idx;\n }\n}\n\nimpl BitOper for u32 {\n const BIT_LEN: usize = std::mem::size_of::<Self>() as usize * 8; \n fn set_b(&mut self, idx: usize, val: bool) {\n assert!(idx < Self::BIT_LEN);\n if val {\n *self |= 1 << idx;\n } else {\n *self &= !(1 << idx);\n }\n }\n\n fn get_b(&self, idx: usize) -> bool {\n assert!(idx < Self::BIT_LEN);\n (self & 1 << idx) != 0\n }\n\n fn toggle(&mut self, idx: usize) {\n assert!(idx < Self::BIT_LEN);\n *self ^= 1 << idx;\n }\n}\n\n\nimpl BitOper for u64 {\n const BIT_LEN: usize = std::mem::size_of::<Self>() as usize * 8; \n fn set_b(&mut self, idx: usize, val: bool) {\n assert!(idx < Self::BIT_LEN);\n if val {\n *self |= 1 << idx;\n } else {\n *self &= !(1 << idx);\n }\n }\n\n fn get_b(&self, idx: usize) -> bool {\n assert!(idx < Self::BIT_LEN);\n (self & 1 << idx) != 0\n }\n\n fn toggle(&mut self, idx: usize) {\n assert!(idx < Self::BIT_LEN);\n *self ^= 1 << idx;\n }\n}\n\nimpl <T:BitOper> BitArrayOper<T> for [T] {\n fn get_blen(&self) -> usize {\n self.len() * T::BIT_LEN\n }\n\n fn get_b(&self, idx: usize) -> bool {\n self[idx \/ T::BIT_LEN].get_b(idx % T::BIT_LEN)\n }\n\n fn set_b(&mut self, idx: usize, val: bool) {\n self[idx \/ T::BIT_LEN].set_b(idx % T::BIT_LEN, val);\n }\n\n fn toggle(&mut self, idx: usize) {\n self[idx \/ T::BIT_LEN].toggle(idx % T::BIT_LEN);\n }\n}\n\nextern crate test;\n\nuse test::Bencher;\n\nconst len: usize = 256;\n\n\nfn set_bitfield<T:BitField>(v: &mut Vec<T>) {\n for i in 0..v.len() * T::BIT_LENGTH {\n v.as_mut_slice().set_bit(i, true);;\n }\n }\n\n\nfn get_bitfield<T:BitField>(v: &Vec<T>) {\n for i in 0..v.len() * T::BIT_LENGTH {\n let _b = v.as_slice().get_bit(i);\n }\n }\n\n\n fn set_trivial<T:BitOper>(v: &mut Vec<T>) {\n for i in 0..v.len() * T::BIT_LEN {\n v.set_b(i, true);\n }\n}\n\n fn toggle<T:BitOper>(v: &mut Vec<T>) {\n for i in 0..v.len() * T::BIT_LEN {\n v.toggle(i);\n }\n}\n\n fn get_trivial<T:BitOper>(v: &Vec<T>) {\n for i in 0..v.len() * T::BIT_LEN {\n let _b = v.get_b(i);\n }\n}\n\n\n#[bench]\nfn u8_set_bitfield(b: &mut Bencher) {\n let mut v = vec![0u8; len];\n b.iter(|| {\n set_bitfield(&mut v);\n });\n}\n\n#[bench]\nfn u8_set_trivial(b: &mut Bencher) {\n let mut v = vec![0u8; len];\n\n b.iter(|| {\n set_trivial(&mut v);\n });\n}\n\n#[bench]\nfn u8_get_bitfield(b: &mut Bencher) {\n let v = vec![1u8; len];\n b.iter(|| {\n get_bitfield(&v);\n });\n}\n\n#[bench]\nfn u8_get_trivial(b: &mut Bencher) {\n let v = vec![1u8; len];\n b.iter(|| {\n get_trivial(&v);\n });\n}\n\n\n\n#[bench]\nfn u32_set_bitfield(b: &mut Bencher) {\n let mut v = vec![0u32; len];\n b.iter(|| {\n set_bitfield(&mut v);\n });\n}\n\n#[bench]\nfn u32_set_trivial(b: &mut Bencher) {\n let mut v = vec![0u32; len];\n\n b.iter(|| {\n set_trivial(&mut v);\n });\n}\n\n#[bench]\nfn u32_get_bitfield(b: &mut Bencher) {\n let v = vec![1u32; len];\n b.iter(|| {\n get_bitfield(&v);\n });\n}\n\n#[bench]\nfn u32_get_trivial(b: &mut Bencher) {\n let v = vec![1u32; len];\n b.iter(|| {\n get_trivial(&v);\n });\n}\n\n\n\/*\n\n#[bench]\nfn u64_set_bitfield(b: &mut Bencher) {\n let mut v = vec![0u64; len];\n b.iter(|| {\n set_bitfield(&mut v);\n });\n}\n\n#[bench]\nfn u64_set_trivial(b: &mut Bencher) {\n let mut v = vec![0u64; len];\n\n b.iter(|| {\n set_trivial(&mut v);\n });\n}\n\n#[bench]\nfn u64_get_bitfield(b: &mut Bencher) {\n let v = vec![1u64; len];\n b.iter(|| {\n get_bitfield(&v);\n });\n}\n\n#[bench]\nfn u64_get_trivial(b: &mut Bencher) {\n let v = vec![1u64; len];\n b.iter(|| {\n get_trivial(&v);\n });\n}\n*\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Minor cleanup for script analyzer<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added threading abstractions<commit_after>use alloc::{Allocator, SliceWrapper};\nuse super::BrotliAlloc;\nuse super::backward_references::BrotliEncoderParams;\npub trait Joinable<T:Send+'static>:Sized {\n fn join(self) -> Result<T, ()>;\n}\n\npub trait Spawnable<T:Send+'static> {\n type JoinHandle: Joinable<T>;\n fn spawn<F: FnOnce() -> T + Send + 'static>(f: F) -> Self::JoinHandle;\n}\n\n\npub struct CompressionThreadResult<Alloc:BrotliAlloc+Send+'static> where <Alloc as Allocator<u8>>::AllocatedMemory: Send {\n compressed: <Alloc as Allocator<u8>>::AllocatedMemory,\n compressed_size: usize,\n alloc: Alloc,\n}\nenum InternalSendAlloc<Alloc:BrotliAlloc+Send+'static, Join: Joinable<CompressionThreadResult<Alloc>>>\n where <Alloc as Allocator<u8>>::AllocatedMemory: Send {\n A(Alloc),\n Join(Join),\n Spawning,\n}\npub struct SendAlloc<Alloc:BrotliAlloc +Send+'static,\n Join:Joinable<CompressionThreadResult<Alloc>>>(InternalSendAlloc<Alloc, Join>)\n where <Alloc as Allocator<u8>>::AllocatedMemory: Send;\n\nimpl<Alloc:BrotliAlloc+Send+'static,Join:Joinable<CompressionThreadResult<Alloc>>> SendAlloc<Alloc, Join>\n where <Alloc as Allocator<u8>>::AllocatedMemory: Send {\n pub fn new(alloc: Alloc) -> Self {\n SendAlloc::<Alloc, Join>(InternalSendAlloc::A(alloc))\n }\n pub fn unwrap_or(self, other: Alloc) -> Alloc {\n match self.0 {\n InternalSendAlloc::A(alloc) => {\n alloc\n },\n InternalSendAlloc::Spawning | InternalSendAlloc::Join(_) => {\n other\n },\n }\n }\n pub fn unwrap(self) -> Alloc {\n match self.0 {\n InternalSendAlloc::A(alloc) => {\n alloc\n },\n InternalSendAlloc::Join(_) | InternalSendAlloc::Spawning => panic!(\"Item permanently borrowed\/leaked\"),\n }\n }\n}\n \n\nenum InternalOwned<T> {\n Item(T),\n Borrowed,\n}\n\npub struct Owned<T>(InternalOwned<T>);\nimpl<T> Owned<T> {\n pub fn new(data:T) -> Self {\n Owned::<T>(InternalOwned::Item(data))\n }\n pub fn unwrap_or(self, other: T) -> T {\n if let InternalOwned::Item(x) = self.0 {\n x\n } else {\n other\n }\n }\n pub fn unwrap(self) -> T {\n if let InternalOwned::Item(x) = self.0 {\n x\n } else {\n panic!(\"Item permanently borrowed\")\n }\n }\n}\n\n\n\n\nfn CompressMulti<Alloc:BrotliAlloc+Send+'static, SliceW: SliceWrapper<u8>, Spawner:Spawnable<CompressionThreadResult<Alloc>>> (\n params:&BrotliEncoderParams,\n input: &mut Owned<SliceW>,\n output: &mut [u8],\n alloc_per_thread:&mut [SendAlloc<Alloc, Spawner::JoinHandle>],\n thread_spawner: Spawner,\n) -> Result<usize, ()> where <Alloc as Allocator<u8>>::AllocatedMemory: Send {\n Err(())\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>I think I finished 6 too? Could it be better?<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse std::env;\nuse std::process::{Command, Stdio};\n\nfn main() {\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n let result = Command::new(\"make\")\n .args(&[\"-f\", \"makefile.cargo\"])\n .status()\n .unwrap();\n assert!(result.success());\n println!(\"cargo:rustc-flags=-L native={}\", out_dir);\n}\n<commit_msg>auto merge of #5453 : Ms2ger\/servo\/unused-import, r=glennw<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse std::env;\nuse std::process::Command;\n\nfn main() {\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n let result = Command::new(\"make\")\n .args(&[\"-f\", \"makefile.cargo\"])\n .status()\n .unwrap();\n assert!(result.success());\n println!(\"cargo:rustc-flags=-L native={}\", out_dir);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rust: altERnaTIng cAsE <=> ALTerNAtiNG CaSe<commit_after><|endoftext|>"} {"text":"<commit_before>use mime::Mime;\n\nheader! {\n \/\/\/ `Content-Type` header, defined in\n \/\/\/ [RFC7231](http:\/\/tools.ietf.org\/html\/rfc7231#section-3.1.1.5)\n \/\/\/\n \/\/\/ The `Content-Type` header field indicates the media type of the\n \/\/\/ associated representation: either the representation enclosed in the\n \/\/\/ message payload or the selected representation, as determined by the\n \/\/\/ message semantics. The indicated media type defines both the data\n \/\/\/ format and how that data is intended to be processed by a recipient,\n \/\/\/ within the scope of the received message semantics, after any content\n \/\/\/ codings indicated by Content-Encoding are decoded.\n \/\/\/\n \/\/\/ Although the `mime` crate allows the mime options to be any slice, this crate\n \/\/\/ forces the use of Vec. This is to make sure the same header can't have more than 1 type. If\n \/\/\/ this is an issue, it's possible to implement `Header` on a custom struct.\n \/\/\/\n \/\/\/ # ABNF\n \/\/\/ ```plain\n \/\/\/ Content-Type = media-type\n \/\/\/ ```\n \/\/\/\n \/\/\/ # Example values\n \/\/\/ * `text\/html; charset=ISO-8859-4`\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```\n \/\/\/ use hyper::header::{Headers, ContentType};\n \/\/\/ use hyper::mime::{Mime, TopLevel, SubLevel};\n \/\/\/\n \/\/\/ let mut headers = Headers::new();\n \/\/\/\n \/\/\/ headers.set(\n \/\/\/ ContentType(Mime(TopLevel::Text, SubLevel::Html, vec![]))\n \/\/\/ );\n \/\/\/ ```\n \/\/\/ ```\n \/\/\/ use hyper::header::{Headers, ContentType};\n \/\/\/ use hyper::mime::{Mime, TopLevel, SubLevel, Attr, Value};\n \/\/\/\n \/\/\/ let mut headers = Headers::new();\n \/\/\/\n \/\/\/ headers.set(\n \/\/\/ ContentType(Mime(TopLevel::Application, SubLevel::Json,\n \/\/\/ vec![(Attr::Charset, Value::Utf8)]))\n \/\/\/ );\n \/\/\/ ```\n (ContentType, \"Content-Type\") => [Mime]\n\n test_content_type {\n test_header!(\n test1,\n \/\/ FIXME: Should be b\"text\/html; charset=ISO-8859-4\" but mime crate lowercases\n \/\/ the whole value so parsing and formatting the value gives a different result\n vec![b\"text\/html; charset=iso-8859-4\"],\n Some(HeaderField(Mime(\n TopLevel::Text,\n SubLevel::Html,\n vec![(Attr::Charset, Value::Ext(\"iso-8859-4\".to_owned()))]))));\n }\n}\n\nimpl ContentType {\n \/\/\/ A constructor to easily create a `Content-Type: application\/json` header.\n #[inline]\n pub fn json() -> ContentType {\n ContentType(mime!(Application\/Json))\n }\n\n \/\/\/ A constructor to easily create a `Content-Type: text\/plain; charset=utf-8` header.\n #[inline]\n pub fn plaintext() -> ContentType {\n ContentType(mime!(Text\/Plain; Charset=Utf8))\n }\n\n \/\/\/ A constructor to easily create a `Content-Type: text\/html; charset=utf-8` header.\n #[inline]\n pub fn html() -> ContentType {\n ContentType(mime!(Text\/Html; Charset=Utf8))\n }\n\n \/\/\/ A constructor to easily create a `Content-Type: application\/www-form-url-encoded` header.\n #[inline]\n pub fn form_url_encoded() -> ContentType {\n ContentType(mime!(Application\/WwwFormUrlEncoded))\n }\n \/\/\/ A constructor to easily create a `Content-Type: image\/jpeg` header.\n #[inline]\n pub fn jpeg() -> ContentType {\n ContentType(mime!(Image\/Jpeg))\n }\n\n \/\/\/ A constructor to easily create a `Content-Type: image\/png` header.\n #[inline]\n pub fn png() -> ContentType {\n ContentType(mime!(Image\/Png))\n }\n}\n\nimpl Eq for ContentType {}\n\nbench_header!(bench, ContentType, { vec![b\"application\/json; charset=utf-8\".to_vec()] });\n<commit_msg>docs(header): update ContentType example to use ContentType::json()<commit_after>use mime::Mime;\n\nheader! {\n \/\/\/ `Content-Type` header, defined in\n \/\/\/ [RFC7231](http:\/\/tools.ietf.org\/html\/rfc7231#section-3.1.1.5)\n \/\/\/\n \/\/\/ The `Content-Type` header field indicates the media type of the\n \/\/\/ associated representation: either the representation enclosed in the\n \/\/\/ message payload or the selected representation, as determined by the\n \/\/\/ message semantics. The indicated media type defines both the data\n \/\/\/ format and how that data is intended to be processed by a recipient,\n \/\/\/ within the scope of the received message semantics, after any content\n \/\/\/ codings indicated by Content-Encoding are decoded.\n \/\/\/\n \/\/\/ Although the `mime` crate allows the mime options to be any slice, this crate\n \/\/\/ forces the use of Vec. This is to make sure the same header can't have more than 1 type. If\n \/\/\/ this is an issue, it's possible to implement `Header` on a custom struct.\n \/\/\/\n \/\/\/ # ABNF\n \/\/\/ ```plain\n \/\/\/ Content-Type = media-type\n \/\/\/ ```\n \/\/\/\n \/\/\/ # Example values\n \/\/\/ * `text\/html; charset=ISO-8859-4`\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```\n \/\/\/ use hyper::header::{Headers, ContentType};\n \/\/\/\n \/\/\/ let mut headers = Headers::new();\n \/\/\/\n \/\/\/ headers.set(\n \/\/\/ ContentType::json()\n \/\/\/ );\n \/\/\/ ```\n \/\/\/ ```\n \/\/\/ use hyper::header::{Headers, ContentType};\n \/\/\/ use hyper::mime::{Mime, TopLevel, SubLevel, Attr, Value};\n \/\/\/\n \/\/\/ let mut headers = Headers::new();\n \/\/\/\n \/\/\/ headers.set(\n \/\/\/ ContentType(Mime(TopLevel::Text, SubLevel::Html,\n \/\/\/ vec![(Attr::Charset, Value::Utf8)]))\n \/\/\/ );\n \/\/\/ ```\n (ContentType, \"Content-Type\") => [Mime]\n\n test_content_type {\n test_header!(\n test1,\n \/\/ FIXME: Should be b\"text\/html; charset=ISO-8859-4\" but mime crate lowercases\n \/\/ the whole value so parsing and formatting the value gives a different result\n vec![b\"text\/html; charset=iso-8859-4\"],\n Some(HeaderField(Mime(\n TopLevel::Text,\n SubLevel::Html,\n vec![(Attr::Charset, Value::Ext(\"iso-8859-4\".to_owned()))]))));\n }\n}\n\nimpl ContentType {\n \/\/\/ A constructor to easily create a `Content-Type: application\/json` header.\n #[inline]\n pub fn json() -> ContentType {\n ContentType(mime!(Application\/Json))\n }\n\n \/\/\/ A constructor to easily create a `Content-Type: text\/plain; charset=utf-8` header.\n #[inline]\n pub fn plaintext() -> ContentType {\n ContentType(mime!(Text\/Plain; Charset=Utf8))\n }\n\n \/\/\/ A constructor to easily create a `Content-Type: text\/html; charset=utf-8` header.\n #[inline]\n pub fn html() -> ContentType {\n ContentType(mime!(Text\/Html; Charset=Utf8))\n }\n\n \/\/\/ A constructor to easily create a `Content-Type: application\/www-form-url-encoded` header.\n #[inline]\n pub fn form_url_encoded() -> ContentType {\n ContentType(mime!(Application\/WwwFormUrlEncoded))\n }\n \/\/\/ A constructor to easily create a `Content-Type: image\/jpeg` header.\n #[inline]\n pub fn jpeg() -> ContentType {\n ContentType(mime!(Image\/Jpeg))\n }\n\n \/\/\/ A constructor to easily create a `Content-Type: image\/png` header.\n #[inline]\n pub fn png() -> ContentType {\n ContentType(mime!(Image\/Png))\n }\n}\n\nimpl Eq for ContentType {}\n\nbench_header!(bench, ContentType, { vec![b\"application\/json; charset=utf-8\".to_vec()] });\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[example] nbody (#117)<commit_after>#![feature(cfg_target_feature)]\n#![feature(target_feature)]\n\nextern crate stdsimd;\n\nuse self::stdsimd::simd;\nuse self::stdsimd::vendor;\n\nuse simd::{f64x2, f32x4};\n\nconst PI: f64 = 3.141592653589793;\nconst SOLAR_MASS: f64 = 4.0 * PI * PI;\nconst DAYS_PER_YEAR: f64 = 365.24;\n\npub trait Frsqrt {\n fn frsqrt(&self) -> Self;\n}\n\nimpl Frsqrt for f64x2 {\n fn frsqrt(&self) -> Self {\n unsafe {\n #[cfg(all(any(target_arch = \"x86\", target_arch = \"x86_64\"),\n target_feature = \"sse\"))]\n {\n let t = self.as_f32x2();\n let u = vendor::_mm_rsqrt_ps(\n f32x4::new(t.extract(0), t.extract(1), 0., 0.)).as_f64x4();\n f64x2::new(u.extract(0), u.extract(1))\n }\n #[cfg(all(any(target_arch = \"arm\", target_arch = \"aarch64\"),\n target_feature = \"neon\"))]\n {\n vendor::vrsqrte_f32(self.as_f32x2()).as_f64x2()\n }\n\n #[cfg(not(any(all(any(target_arch = \"x86\", target_arch = \"x86_64\"),\n target_feature = \"sse\"),\n all(any(target_arch = \"arm\", target_arch = \"aarch64\"),\n target_feature = \"neon\")\n )))]\n {\n self.replace(0, 1. \/ self.extract(0).sqrt());\n self.replace(1, 1. \/ self.extract(1).sqrt());\n *self\n }\n }\n }\n}\n\nstruct Body {\n x: [f64; 3],\n _fill: f64,\n v: [f64; 3],\n mass: f64,\n}\n\nimpl Body {\n fn new(x0: f64, x1: f64, x2: f64,\n v0: f64, v1: f64, v2: f64,\n mass: f64) -> Body {\n Body {\n x: [x0, x1, x2],\n _fill: 0.0,\n v: [v0, v1, v2],\n mass: mass,\n }\n }\n}\n\nconst N_BODIES: usize = 5;\nconst N: usize = N_BODIES * (N_BODIES - 1) \/ 2;\nfn offset_momentum(bodies: &mut [Body; N_BODIES]) {\n let (sun, rest) = bodies.split_at_mut(1);\n let sun = &mut sun[0];\n for body in rest {\n for k in 0..3 {\n sun.v[k] -= body.v[k] * body.mass \/ SOLAR_MASS;\n }\n }\n}\nfn advance(bodies: &mut [Body; N_BODIES], dt: f64) {\n let mut r = [[0.0; 4]; N];\n let mut mag = [0.0; N];\n\n let mut dx = [f64x2::splat(0.0); 3];\n let mut dsquared;\n let mut distance;\n let mut dmag;\n\n let mut i = 0;\n for j in 0..N_BODIES {\n for k in j+1..N_BODIES {\n for m in 0..3 {\n r[i][m] = bodies[j].x[m] - bodies[k].x[m];\n }\n i += 1;\n }\n }\n\n i = 0;\n while i < N {\n for m in 0..3 {\n dx[m] = f64x2::new(r[i][m], r[i+1][m]);\n }\n\n dsquared = dx[0] * dx[0] + dx[1] * dx[1] + dx[2] * dx[2];\n distance = dsquared.frsqrt();\n for _ in 0..2 {\n distance = distance * f64x2::splat(1.5) -\n ((f64x2::splat(0.5) * dsquared) * distance) * (distance * distance)\n }\n dmag = f64x2::splat(dt) \/ dsquared * distance;\n dmag.store(&mut mag, i);\n\n i += 2;\n }\n\n i = 0;\n for j in 0..N_BODIES {\n for k in j+1..N_BODIES {\n for m in 0..3 {\n bodies[j].v[m] -= r[i][m] * bodies[k].mass * mag[i];\n bodies[k].v[m] += r[i][m] * bodies[j].mass * mag[i];\n }\n i += 1\n }\n }\n for body in bodies {\n for m in 0..3 {\n body.x[m] += dt * body.v[m]\n }\n }\n}\n\nfn energy(bodies: &[Body; N_BODIES]) -> f64 {\n let mut e = 0.0;\n for i in 0..N_BODIES {\n let bi = &bodies[i];\n e += bi.mass * (bi.v[0] * bi.v[0] + bi.v[1] * bi.v[1] + bi.v[2] * bi.v[2]) \/ 2.0;\n for j in i+1..N_BODIES {\n let bj = &bodies[j];\n let mut dx = [0.0; 3];\n for k in 0..3 {\n dx[k] = bi.x[k] - bj.x[k];\n }\n let mut distance = 0.0;\n for &d in &dx { distance += d * d }\n e -= bi.mass * bj.mass \/ distance.sqrt()\n }\n }\n e\n}\n\nfn main() {\n let mut bodies: [Body; N_BODIES] = [\n \/* sun *\/\n Body::new(0.0, 0.0, 0.0,\n 0.0, 0.0, 0.0,\n SOLAR_MASS),\n \/* jupiter *\/\n Body::new(4.84143144246472090e+00,\n -1.16032004402742839e+00,\n -1.03622044471123109e-01 ,\n 1.66007664274403694e-03 * DAYS_PER_YEAR,\n 7.69901118419740425e-03 * DAYS_PER_YEAR,\n -6.90460016972063023e-05 * DAYS_PER_YEAR ,\n 9.54791938424326609e-04 * SOLAR_MASS\n ),\n \/* saturn *\/\n Body::new(8.34336671824457987e+00,\n 4.12479856412430479e+00,\n -4.03523417114321381e-01 ,\n -2.76742510726862411e-03 * DAYS_PER_YEAR,\n 4.99852801234917238e-03 * DAYS_PER_YEAR,\n 2.30417297573763929e-05 * DAYS_PER_YEAR ,\n 2.85885980666130812e-04 * SOLAR_MASS\n ),\n \/* uranus *\/\n Body::new(1.28943695621391310e+01,\n -1.51111514016986312e+01,\n -2.23307578892655734e-01 ,\n 2.96460137564761618e-03 * DAYS_PER_YEAR,\n 2.37847173959480950e-03 * DAYS_PER_YEAR,\n -2.96589568540237556e-05 * DAYS_PER_YEAR ,\n 4.36624404335156298e-05 * SOLAR_MASS\n ),\n \/* neptune *\/\n Body::new(1.53796971148509165e+01,\n -2.59193146099879641e+01,\n 1.79258772950371181e-01 ,\n 2.68067772490389322e-03 * DAYS_PER_YEAR,\n 1.62824170038242295e-03 * DAYS_PER_YEAR,\n -9.51592254519715870e-05 * DAYS_PER_YEAR ,\n 5.15138902046611451e-05 * SOLAR_MASS\n )\n ];\n\n let n: usize = std::env::args().nth(1).expect(\"need one arg\").parse().unwrap();\n\n offset_momentum(&mut bodies);\n println!(\"{:.9}\", energy(&bodies));\n for _ in 0..n {\n advance(&mut bodies, 0.01);\n }\n println!(\"{:.9}\", energy(&bodies));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Solaris-specific raw type definitions\n\n#![stable(feature = \"raw_ext\", since = \"1.1.0\")]\n#![rustc_deprecated(since = \"1.8.0\",\n reason = \"these type aliases are no longer supported by \\\n the standard library, the `libc` crate on \\\n crates.io should be used instead for the correct \\\n definitions\")]\n#![allow(deprecated)]\n\nuse os::raw::c_long;\nuse os::unix::raw::{uid_t, gid_t};\n\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type blkcnt_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type blksize_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type dev_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type fflags_t = u32;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type ino_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type mode_t = u32;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type nlink_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type off_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type time_t = i64;\n\n#[stable(feature = \"pthread_t\", since = \"1.8.0\")]\npub type pthread_t = usize;\n\n#[repr(C)]\n#[derive(Clone)]\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")]\npub struct stat {\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_dev: dev_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ino: ino_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mode: mode_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_nlink: nlink_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_uid: uid_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_gid: gid_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_rdev: dev_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_size: off_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_atime: time_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_atime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mtime: time_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mtime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ctime: time_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ctime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_blksize: blksize_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_blocks: blkcnt_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub __unused: [u8; 16]\n}\n<commit_msg>Rollup merge of #43597 - dhduvall:master, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Solaris-specific raw type definitions\n\n#![stable(feature = \"raw_ext\", since = \"1.1.0\")]\n#![rustc_deprecated(since = \"1.8.0\",\n reason = \"these type aliases are no longer supported by \\\n the standard library, the `libc` crate on \\\n crates.io should be used instead for the correct \\\n definitions\")]\n#![allow(deprecated)]\n\nuse os::raw::c_long;\nuse os::unix::raw::{uid_t, gid_t};\n\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type blkcnt_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type blksize_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type dev_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type fflags_t = u32;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type ino_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type mode_t = u32;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type nlink_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type off_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type time_t = i64;\n\n#[stable(feature = \"pthread_t\", since = \"1.8.0\")]\npub type pthread_t = u32;\n\n#[repr(C)]\n#[derive(Clone)]\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")]\npub struct stat {\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_dev: dev_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ino: ino_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mode: mode_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_nlink: nlink_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_uid: uid_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_gid: gid_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_rdev: dev_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_size: off_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_atime: time_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_atime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mtime: time_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mtime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ctime: time_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ctime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_blksize: blksize_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_blocks: blkcnt_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub __unused: [u8; 16]\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse devicemapper::{DM, DevId, DeviceInfo, DmFlags};\nuse engine::EngineResult;\n\nuse std::path::Path;\n\nuse super::blockdev::BlockDev;\n\nuse types::DataBlocks;\nuse types::Sectors;\n\npub struct ThinPoolDev {\n pub name: String,\n pub dev_info: Option<DeviceInfo>,\n data_block_size: Option<Sectors>,\n pub low_water_mark: Option<DataBlocks>, \/\/ How close to full before we are worried?\n pub meta_dev: Option<BlockDev>,\n pub data_dev: Option<BlockDev>,\n}\n\n\/\/\/ support use of DM to create pools for thin provisioned devices\nimpl ThinPoolDev {\n pub fn new(name: &str) -> ThinPoolDev {\n ThinPoolDev {\n name: name.to_owned(),\n dev_info: None,\n data_block_size: None,\n low_water_mark: None,\n meta_dev: None,\n data_dev: None,\n }\n }\n \/\/\/ Generate a Vec<> to be passed to DM. The format of the Vec entries is:\n \/\/\/ <start sec> <length> \"thin-pool\" \/dev\/meta \/dev\/data <block size> <low water mark>\n fn dm_table(&self,\n length: Sectors,\n data_block_size: Sectors,\n low_water_mark: DataBlocks,\n meta_dev: &Path,\n data_dev: &Path)\n -> Vec<(u64, u64, String, String)> {\n let mut table = Vec::new();\n let params = format!(\"{} {} {} {} 1 skip_block_zeroing\",\n meta_dev.to_string_lossy(),\n data_dev.to_string_lossy(),\n *data_block_size,\n *low_water_mark);\n table.push((0u64, length.0, \"thin-pool\".to_owned(), params));\n debug!(\"dmtable line : {:?}\", table);\n table\n }\n\n \/\/\/ Use DM to create a \"thin-pool\". A \"thin-pool\" is shared space for\n \/\/\/ other thin provisioned devices to use.\n \/\/\/\n \/\/\/ See section \"Setting up a fresh pool device\":\n \/\/\/ https:\/\/www.kernel.org\/doc\/Documentation\/device-mapper\/thin-provisioning.txt\n pub fn setup(&mut self,\n dm: &DM,\n length: Sectors,\n data_block_size: Sectors,\n low_water_mark: DataBlocks,\n meta_dev: &Path,\n data_dev: &Path)\n -> EngineResult<()> {\n\n debug!(\"setup : {}\", self.name);\n try!(dm.device_create(&self.name, None, DmFlags::empty()));\n\n let table = self.dm_table(length, data_block_size, low_water_mark, meta_dev, data_dev);\n self.data_block_size = Some(data_block_size);\n self.low_water_mark = Some(low_water_mark);\n let id = &DevId::Name(&self.name);\n self.dev_info = Some(try!(dm.table_load(id, &table)));\n try!(dm.device_suspend(id, DmFlags::empty()));\n\n Ok(())\n }\n\n pub fn message(&self, dm: &DM, message: &str) -> EngineResult<()> {\n try!(dm.target_msg(&DevId::Name(&self.name), 0, message));\n\n Ok(())\n }\n\n pub fn teardown(&mut self, dm: &DM) -> EngineResult<()> {\n try!(dm.device_remove(&DevId::Name(&self.name), DmFlags::empty()));\n\n Ok(())\n }\n}\n<commit_msg>Remove an unhelpful comment<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse devicemapper::{DM, DevId, DeviceInfo, DmFlags};\nuse engine::EngineResult;\n\nuse std::path::Path;\n\nuse super::blockdev::BlockDev;\n\nuse types::DataBlocks;\nuse types::Sectors;\n\npub struct ThinPoolDev {\n pub name: String,\n pub dev_info: Option<DeviceInfo>,\n data_block_size: Option<Sectors>,\n pub low_water_mark: Option<DataBlocks>,\n pub meta_dev: Option<BlockDev>,\n pub data_dev: Option<BlockDev>,\n}\n\n\/\/\/ support use of DM to create pools for thin provisioned devices\nimpl ThinPoolDev {\n pub fn new(name: &str) -> ThinPoolDev {\n ThinPoolDev {\n name: name.to_owned(),\n dev_info: None,\n data_block_size: None,\n low_water_mark: None,\n meta_dev: None,\n data_dev: None,\n }\n }\n \/\/\/ Generate a Vec<> to be passed to DM. The format of the Vec entries is:\n \/\/\/ <start sec> <length> \"thin-pool\" \/dev\/meta \/dev\/data <block size> <low water mark>\n fn dm_table(&self,\n length: Sectors,\n data_block_size: Sectors,\n low_water_mark: DataBlocks,\n meta_dev: &Path,\n data_dev: &Path)\n -> Vec<(u64, u64, String, String)> {\n let mut table = Vec::new();\n let params = format!(\"{} {} {} {} 1 skip_block_zeroing\",\n meta_dev.to_string_lossy(),\n data_dev.to_string_lossy(),\n *data_block_size,\n *low_water_mark);\n table.push((0u64, length.0, \"thin-pool\".to_owned(), params));\n debug!(\"dmtable line : {:?}\", table);\n table\n }\n\n \/\/\/ Use DM to create a \"thin-pool\". A \"thin-pool\" is shared space for\n \/\/\/ other thin provisioned devices to use.\n \/\/\/\n \/\/\/ See section \"Setting up a fresh pool device\":\n \/\/\/ https:\/\/www.kernel.org\/doc\/Documentation\/device-mapper\/thin-provisioning.txt\n pub fn setup(&mut self,\n dm: &DM,\n length: Sectors,\n data_block_size: Sectors,\n low_water_mark: DataBlocks,\n meta_dev: &Path,\n data_dev: &Path)\n -> EngineResult<()> {\n\n debug!(\"setup : {}\", self.name);\n try!(dm.device_create(&self.name, None, DmFlags::empty()));\n\n let table = self.dm_table(length, data_block_size, low_water_mark, meta_dev, data_dev);\n self.data_block_size = Some(data_block_size);\n self.low_water_mark = Some(low_water_mark);\n let id = &DevId::Name(&self.name);\n self.dev_info = Some(try!(dm.table_load(id, &table)));\n try!(dm.device_suspend(id, DmFlags::empty()));\n\n Ok(())\n }\n\n pub fn message(&self, dm: &DM, message: &str) -> EngineResult<()> {\n try!(dm.target_msg(&DevId::Name(&self.name), 0, message));\n\n Ok(())\n }\n\n pub fn teardown(&mut self, dm: &DM) -> EngineResult<()> {\n try!(dm.device_remove(&DevId::Name(&self.name), DmFlags::empty()));\n\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix Route::ranked type signature.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>A minor refactor of the Vulkan Descriptor Set Layout code<commit_after><|endoftext|>"} {"text":"<commit_before>use std::hash::{Hash, Hasher};\n\nuse rustc::ich::{StableHashingContext, StableHashingContextProvider};\nuse rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};\n\nuse super::{Frame, Memory, Machine};\n\n\/\/\/ The virtual machine state during const-evaluation at a given point in time.\n#[derive(Eq, PartialEq)]\npub struct EvalSnapshot<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {\n machine: M,\n memory: Memory<'a, 'mir, 'tcx, M>,\n stack: Vec<Frame<'mir, 'tcx>>,\n}\n\nimpl<'a, 'mir, 'tcx, M> EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n pub fn new(machine: &M, memory: &Memory<'a, 'mir, 'tcx, M>, stack: &[Frame<'mir, 'tcx>]) -> Self {\n EvalSnapshot {\n machine: machine.clone(),\n memory: memory.clone(),\n stack: stack.into(),\n }\n }\n}\n\nimpl<'a, 'mir, 'tcx, M> Hash for EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n fn hash<H: Hasher>(&self, state: &mut H) {\n \/\/ Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)\n let mut hcx = self.memory.tcx.get_stable_hashing_context();\n let mut hasher = StableHasher::<u64>::new();\n self.hash_stable(&mut hcx, &mut hasher);\n hasher.finish().hash(state)\n }\n}\n\nimpl<'a, 'b, 'mir, 'tcx, M> HashStable<StableHashingContext<'b>> for EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'b>, hasher: &mut StableHasher<W>) {\n let EvalSnapshot{ machine, memory, stack } = self;\n (machine, &memory.data, stack).hash_stable(hcx, hasher);\n }\n}\n<commit_msg>Introduce Snapshot and SnapshotContext traits<commit_after>use std::hash::{Hash, Hasher};\n\nuse rustc::ich::{StableHashingContext, StableHashingContextProvider};\nuse rustc::mir;\nuse rustc::mir::interpret::{AllocId, Pointer, Scalar, ScalarMaybeUndef, Relocations, Allocation, UndefMask};\nuse rustc::ty;\nuse rustc::ty::layout::Align;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};\nuse syntax::ast::Mutability;\nuse syntax::source_map::Span;\n\nuse super::eval_context::{LocalValue, StackPopCleanup};\nuse super::{Frame, Memory, Machine, Operand, MemPlace, Place, PlaceExtra, Value};\n\ntrait SnapshotContext<'a> {\n type To;\n type From;\n fn resolve(&'a self, id: &Self::From) -> Option<&'a Self::To>;\n}\n\ntrait Snapshot<'a, Ctx: SnapshotContext<'a>> {\n type Item;\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item;\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocIdSnapshot<'a>(Option<AllocationSnapshot<'a>>);\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for AllocId\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = AllocIdSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n AllocIdSnapshot(ctx.resolve(self).map(|alloc| alloc.snapshot(ctx)))\n }\n}\n\ntype PointerSnapshot<'a> = Pointer<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Pointer\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = PointerSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Pointer{ alloc_id, offset } = self;\n\n Pointer {\n alloc_id: alloc_id.snapshot(ctx),\n offset: *offset,\n }\n }\n}\n\ntype ScalarSnapshot<'a> = Scalar<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Scalar\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = ScalarSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Scalar::Ptr(p) => Scalar::Ptr(p.snapshot(ctx)),\n Scalar::Bits{ size, bits } => Scalar::Bits{\n size: *size,\n bits: *bits,\n },\n }\n }\n}\n\ntype ScalarMaybeUndefSnapshot<'a> = ScalarMaybeUndef<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for ScalarMaybeUndef\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = ScalarMaybeUndefSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.snapshot(ctx)),\n ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,\n }\n }\n}\n\ntype MemPlaceSnapshot<'a> = MemPlace<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for MemPlace\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = MemPlaceSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let MemPlace{ ptr, extra, align } = self;\n\n MemPlaceSnapshot{\n ptr: ptr.snapshot(ctx),\n extra: extra.snapshot(ctx),\n align: *align,\n }\n }\n}\n\ntype PlaceSnapshot<'a> = Place<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Place\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = PlaceSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Place::Ptr(p) => Place::Ptr(p.snapshot(ctx)),\n\n Place::Local{ frame, local } => Place::Local{\n frame: *frame,\n local: *local,\n },\n }\n }\n}\n\ntype PlaceExtraSnapshot<'a> = PlaceExtra<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for PlaceExtra\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = PlaceExtraSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n PlaceExtra::Vtable(p) => PlaceExtra::Vtable(p.snapshot(ctx)),\n PlaceExtra::Length(l) => PlaceExtra::Length(*l),\n PlaceExtra::None => PlaceExtra::None,\n }\n }\n}\n\ntype ValueSnapshot<'a> = Value<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Value\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = ValueSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Value::Scalar(s) => Value::Scalar(s.snapshot(ctx)),\n Value::ScalarPair(a, b) => Value::ScalarPair(a.snapshot(ctx), b.snapshot(ctx)),\n }\n }\n}\n\ntype OperandSnapshot<'a> = Operand<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Operand\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = OperandSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Operand::Immediate(v) => Operand::Immediate(v.snapshot(ctx)),\n Operand::Indirect(m) => Operand::Indirect(m.snapshot(ctx)),\n }\n }\n}\n\ntype LocalValueSnapshot<'a> = LocalValue<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for LocalValue\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = LocalValueSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n LocalValue::Live(v) => LocalValue::Live(v.snapshot(ctx)),\n LocalValue::Dead => LocalValue::Dead,\n }\n }\n}\n\ntype RelocationsSnapshot<'a> = Relocations<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Relocations\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = RelocationsSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n Relocations::from_presorted(self.iter().map(|(size, id)| (*size, id.snapshot(ctx))).collect())\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocationSnapshot<'a> {\n bytes: &'a [u8],\n relocations: RelocationsSnapshot<'a>,\n undef_mask: &'a UndefMask,\n align: &'a Align,\n runtime_mutability: &'a Mutability,\n}\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = AllocationSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Allocation { bytes, relocations, undef_mask, align, runtime_mutability } = self;\n\n AllocationSnapshot {\n bytes,\n undef_mask,\n align,\n runtime_mutability,\n relocations: relocations.snapshot(ctx),\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct FrameSnapshot<'a, 'tcx> {\n instance: &'a ty::Instance<'tcx>,\n span: &'a Span,\n return_to_block: &'a StackPopCleanup,\n return_place: PlaceSnapshot<'a>,\n locals: IndexVec<mir::Local, LocalValueSnapshot<'a>>,\n block: &'a mir::BasicBlock,\n stmt: usize,\n}\n\nimpl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = FrameSnapshot<'a, 'tcx>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Frame {\n mir: _,\n instance,\n span,\n return_to_block,\n return_place,\n locals,\n block,\n stmt,\n } = self;\n\n FrameSnapshot {\n instance,\n span,\n return_to_block,\n block,\n stmt: *stmt,\n return_place: return_place.snapshot(ctx),\n locals: locals.iter().map(|local| local.snapshot(ctx)).collect(),\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct MemorySnapshot<'a, 'mir: 'a, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx> + 'a> {\n data: &'a M::MemoryData,\n}\n\n\/\/\/ The virtual machine state during const-evaluation at a given point in time.\n#[derive(Eq, PartialEq)]\npub struct EvalSnapshot<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {\n machine: M,\n memory: Memory<'a, 'mir, 'tcx, M>,\n stack: Vec<Frame<'mir, 'tcx>>,\n}\n\nimpl<'a, 'mir, 'tcx, M> EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n pub fn new(machine: &M, memory: &Memory<'a, 'mir, 'tcx, M>, stack: &[Frame<'mir, 'tcx>]) -> Self {\n EvalSnapshot {\n machine: machine.clone(),\n memory: memory.clone(),\n stack: stack.into(),\n }\n }\n}\n\nimpl<'a, 'mir, 'tcx, M> Hash for EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n fn hash<H: Hasher>(&self, state: &mut H) {\n \/\/ Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)\n let mut hcx = self.memory.tcx.get_stable_hashing_context();\n let mut hasher = StableHasher::<u64>::new();\n self.hash_stable(&mut hcx, &mut hasher);\n hasher.finish().hash(state)\n }\n}\n\nimpl<'a, 'b, 'mir, 'tcx, M> HashStable<StableHashingContext<'b>> for EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'b>, hasher: &mut StableHasher<W>) {\n let EvalSnapshot{ machine, memory, stack } = self;\n (machine, &memory.data, stack).hash_stable(hcx, hasher);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rename cached_it<commit_after><|endoftext|>"} {"text":"<commit_before>use serialize::{Decodable, json};\nuse request::Request;\nuse typemap::Key;\nuse plugin::{Plugin, Pluggable};\nuse std::io;\nuse std::io::{Read, ErrorKind};\nuse std::rc::Rc;\n\n\/\/ Plugin boilerplate\nstruct JsonBodyParser;\nimpl Key for JsonBodyParser { type Value = String; }\nimpl<'a, 'b, 'k> Plugin<Request<'a, 'b, 'k>> for JsonBodyParser {\n \/\/ FIXME: Plugin requires Error to be `Clone`, but we can probably\n \/\/ do something so we don't need to have an `Rc`\n type Error = Rc<io::Error>;\n\n fn eval(req: &mut Request) -> Result<String, Rc<io::Error>> {\n let mut s = String::new();\n try!(req.origin.read_to_string(&mut s)\n .map_err(Rc::new));\n Ok(s)\n }\n}\n\npub trait JsonBody {\n fn json_as<T: Decodable>(&mut self) -> Result<T, Rc<io::Error>>;\n}\n\nimpl<'a, 'b, 'k> JsonBody for Request<'a, 'b, 'k> {\n fn json_as<T: Decodable>(&mut self) -> Result<T, Rc<io::Error>> {\n self.get::<JsonBodyParser>().and_then(|parsed|\n json::decode::<T>(&*parsed).map_err(|_err|\n Rc::new(io::Error::new(ErrorKind::Other,\n format!(\"Failed to parse JSON: {}\", _err)))\n \n )\n )\n }\n}\n<commit_msg>refactor(json): remove unnecessary Rc usage<commit_after>use serialize::{Decodable, json};\nuse request::Request;\nuse typemap::Key;\nuse plugin::{Plugin, Pluggable};\nuse std::io;\nuse std::io::{Read, ErrorKind};\n\n\/\/ Plugin boilerplate\nstruct JsonBodyParser;\nimpl Key for JsonBodyParser { type Value = String; }\nimpl<'a, 'b, 'k> Plugin<Request<'a, 'b, 'k>> for JsonBodyParser {\n type Error = io::Error;\n\n fn eval(req: &mut Request) -> Result<String, io::Error> {\n let mut s = String::new();\n try!(req.origin.read_to_string(&mut s));\n Ok(s)\n }\n}\n\npub trait JsonBody {\n fn json_as<T: Decodable>(&mut self) -> Result<T, io::Error>;\n}\n\nimpl<'a, 'b, 'k> JsonBody for Request<'a, 'b, 'k> {\n fn json_as<T: Decodable>(&mut self) -> Result<T, io::Error> {\n self.get_ref::<JsonBodyParser>().and_then(|parsed|\n json::decode::<T>(&*parsed).map_err(|err|\n io::Error::new(ErrorKind::Other, format!(\"Parse error: {}\", err))\n )\n )\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix\/ignore some lint errors<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement methods to get information by `NameSpPtr`.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add number literal parsing in tokenizer. Derive some traits for enums. Make macro match_head more flexible. Add lots of tests.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove dead code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add missing Debug impl for *_ENCODE_SET in define_encode_set macro<commit_after><|endoftext|>"} {"text":"<commit_before>\/* Fundamental layout structures and algorithms. *\/\n\nuse arc = std::arc;\nuse arc::ARC;\nuse au = gfx::geometry;\nuse au::au;\nuse core::dvec::DVec;\nuse core::to_str::ToStr;\nuse core::rand;\nuse css::styles::SpecifiedStyle;\nuse css::values::{BoxSizing, Length, Px, CSSDisplay, Specified, BgColor, BgColorTransparent};\nuse dl = gfx::display_list;\nuse dom::element::{ElementKind, HTMLDivElement, HTMLImageElement};\nuse dom::node::{Element, Node, NodeData, NodeKind, NodeTree};\nuse dom::rcu;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse geom::point::Point2D;\nuse image::{Image, ImageHolder};\nuse layout::context::LayoutContext;\nuse layout::debug::BoxedDebugMethods;\nuse layout::flow::FlowContext;\nuse layout::text::TextBoxData;\nuse servo_text::text_run::TextRun;\nuse std::net::url::Url;\nuse task::spawn;\nuse util::color::Color;\nuse util::tree;\nuse vec::{push, push_all};\n\n\/** \nRender boxes (`struct RenderBox`) are the leafs of the layout\ntree. They cannot position themselves. In general, render boxes do not\nhave a simple correspondence with CSS boxes as in the specification:\n\n * Several render boxes may correspond to the same CSS box or DOM\n node. For example, a CSS text box broken across two lines is\n represented by two render boxes.\n\n * Some CSS boxes are not created at all, such as some anonymous block\n boxes induced by inline boxes with block-level sibling boxes. In\n that case, Servo uses an InlineFlow with BlockFlow siblings; the\n InlineFlow is block-level, but not a block container. It is\n positioned as if it were a block box, but its children are\n positioned according to inline flow.\n\nFundamental box types include:\n\n * GenericBox: an empty box that contributes only borders, margins,\npadding, backgrounds. It is analogous to a CSS nonreplaced content box.\n\n * ImageBox: a box that represents a (replaced content) image and its\n accompanying borders, shadows, etc.\n\n * TextBox: a box representing a single run of text with a distinct\n style. A TextBox may be split into two or more render boxes across\n line breaks. Several TextBoxes may correspond to a single DOM text\n node. Split text boxes are implemented by referring to subsets of a\n master TextRun object.\n\n*\/\n\n\n\/* A box's kind influences how its styles are interpreted during\n layout. For example, replaced content such as images are resized\n differently than tables, text, or other content.\n\n It also holds data specific to different box types, such as text.\n*\/\n\n\nstruct RenderBoxData {\n \/* references to children, parent inline flow boxes *\/\n tree : tree::Tree<@RenderBox>,\n \/* originating DOM node *\/\n node : Node,\n \/* reference to containing flow context, which this box\n participates in *\/\n ctx : @FlowContext,\n \/* position of this box relative to owning flow *\/\n mut position : Rect<au>,\n font_size : Length,\n \/* TODO (Issue #87): debug only *\/\n mut id: int\n}\n\nenum RenderBoxType {\n RenderBox_Generic,\n RenderBox_Image,\n RenderBox_Text\n}\n\npub enum RenderBox {\n GenericBox(RenderBoxData),\n ImageBox(RenderBoxData, ImageHolder),\n TextBox(RenderBoxData, TextBoxData),\n UnscannedTextBox(RenderBoxData, ~str)\n}\n\nimpl RenderBox {\n pure fn d(&self) -> &self\/RenderBoxData {\n match *self {\n GenericBox(ref d) => d,\n ImageBox(ref d, _) => d,\n TextBox(ref d, _) => d,\n UnscannedTextBox(ref d, _) => d,\n }\n }\n}\n\nfn RenderBoxData(node: Node, ctx: @FlowContext, id: int) -> RenderBoxData {\n RenderBoxData {\n \/* will be set if box is parented *\/\n tree : tree::empty(),\n node : node,\n mut ctx : ctx,\n mut position : au::zero_rect(),\n font_size: Px(0.0),\n id : id\n }\n}\n\nimpl RenderBox {\n pure fn is_replaced() -> bool {\n match self {\n ImageBox(*) => true, \/\/ TODO: form elements, etc\n _ => false\n }\n }\n\n \/** In general, these functions are transitively impure because they\n * may cause glyphs to be allocated. For now, it's impure because of \n * holder.get_image()\n *\/\n fn get_min_width(ctx: &LayoutContext) -> au {\n match self {\n \/\/ TODO: this should account for min\/pref widths of the\n \/\/ box element in isolation. That includes\n \/\/ border\/margin\/padding but not child widths. The block\n \/\/ FlowContext will combine the width of this element and\n \/\/ that of its children to arrive at the context width.\n GenericBox(*) => au(0),\n \/\/ TODO: consult CSS 'width', margin, border.\n \/\/ TODO: If image isn't available, consult 'width'.\n ImageBox(_,i) => au::from_px(i.get_size().get_default(Size2D(0,0)).width),\n TextBox(_,d) => d.run.min_width_for_range(ctx, d.offset, d.length),\n UnscannedTextBox(*) => fail ~\"Shouldn't see unscanned boxes here.\"\n }\n }\n\n fn get_pref_width(_ctx: &LayoutContext) -> au {\n match self {\n \/\/ TODO: this should account for min\/pref widths of the\n \/\/ box element in isolation. That includes\n \/\/ border\/margin\/padding but not child widths. The block\n \/\/ FlowContext will combine the width of this element and\n \/\/ that of its children to arrive at the context width.\n GenericBox(*) => au(0),\n ImageBox(_,i) => au::from_px(i.get_size().get_default(Size2D(0,0)).width),\n\n \/\/ a text box cannot span lines, so assume that this is an unsplit text box.\n\n \/\/ TODO: If text boxes have been split to wrap lines, then\n \/\/ they could report a smaller pref width during incremental reflow.\n \/\/ maybe text boxes should report nothing, and the parent flow could\n \/\/ factor in min\/pref widths of any text runs that it owns.\n TextBox(_,d) => {\n let mut max_line_width: au = au(0);\n do d.run.iter_natural_lines_for_range(d.offset, d.length) |line_offset, line_len| {\n let mut line_width: au = au(0);\n do d.run.glyphs.iter_glyphs_for_range(line_offset, line_len) |_char_i, glyph| {\n line_width += glyph.advance()\n };\n\n if max_line_width < line_width {\n max_line_width = line_width;\n };\n true\n }\n\n max_line_width\n },\n UnscannedTextBox(*) => fail ~\"Shouldn't see unscanned boxes here.\"\n }\n }\n\n \/* Returns the amount of left, right \"fringe\" used by this\n box. This should be based on margin, border, padding, width. *\/\n fn get_used_width() -> (au, au) {\n \/\/ TODO: this should actually do some computation!\n \/\/ See CSS 2.1, Section 10.3, 10.4.\n\n (au(0), au(0))\n }\n \n \/* Returns the amount of left, right \"fringe\" used by this\n box. This should be based on margin, border, padding, width. *\/\n fn get_used_height() -> (au, au) {\n \/\/ TODO: this should actually do some computation!\n \/\/ See CSS 2.1, Section 10.5, 10.6.\n\n (au(0), au(0))\n }\n\n \/* The box formed by the content edge, as defined in CSS 2.1 Section 8.1.\n Coordinates are relative to the owning flow. *\/\n pure fn content_box() -> Rect<au> {\n match self {\n ImageBox(_,i) => {\n let size = i.size();\n Rect {\n origin: copy self.d().position.origin,\n size: Size2D(au::from_px(size.width),\n au::from_px(size.height))\n }\n },\n GenericBox(*) => {\n copy self.d().position\n \/* FIXME: The following hits an ICE for whatever reason\n\n let origin = self.d().position.origin;\n let size = self.d().position.size;\n let (offset_left, offset_right) = self.get_used_width();\n let (offset_top, offset_bottom) = self.get_used_height();\n\n Rect {\n origin: Point2D(origin.x + offset_left, origin.y + offset_top),\n size: Size2D(size.width - (offset_left + offset_right),\n size.height - (offset_top + offset_bottom))\n }*\/\n },\n TextBox(*) => {\n copy self.d().position\n },\n UnscannedTextBox(*) => fail ~\"Shouldn't see unscanned boxes here.\"\n }\n }\n\n \/* The box formed by the border edge, as defined in CSS 2.1 Section 8.1.\n Coordinates are relative to the owning flow. *\/\n fn border_box() -> Rect<au> {\n \/\/ TODO: actually compute content_box + padding + border\n self.content_box()\n }\n\n \/\/ TODO: to implement stacking contexts correctly, we need to\n \/\/ create a set of display lists, one per each layer of a stacking\n \/\/ context. (CSS 2.1, Section 9.9.1). Each box is passed the list\n \/\/ set representing the box's stacking context. When asked to\n \/\/ construct its constituent display items, each box puts its\n \/\/ DisplayItems into the correct stack layer (according to CSS 2.1\n \/\/ Appendix E). and then builder flattens the list at the end.\n\n \/* Methods for building a display list. This is a good candidate\n for a function pointer as the number of boxes explodes.\n\n # Arguments\n\n * `builder` - the display list builder which manages the coordinate system and options.\n * `dirty` - Dirty rectangle, in the coordinate system of the owning flow (self.ctx)\n * `origin` - Total offset from display list root flow to this box's owning flow\n * `list` - List to which items should be appended\n *\/\n fn build_display_list(_builder: &dl::DisplayListBuilder, dirty: &Rect<au>, \n offset: &Point2D<au>, list: &dl::DisplayList) {\n if !self.d().position.intersects(dirty) {\n return;\n }\n\n let bounds : Rect<au> = Rect(self.d().position.origin.add(offset),\n copy self.d().position.size);\n\n match self {\n UnscannedTextBox(*) => fail ~\"Shouldn't see unscanned boxes here.\",\n TextBox(_,d) => {\n list.push(~dl::Text(bounds, ~(copy *d.run), d.offset, d.length))\n },\n \/\/ TODO: items for background, border, outline\n GenericBox(*) => { },\n ImageBox(_,i) => {\n match i.get_image() {\n Some(image) => list.push(~dl::Image(bounds, arc::clone(&image))),\n \/* No image data at all? Okay, add some fallback content instead. *\/\n None => {\n \/\/ TODO: shouldn't need to unbox CSSValue by now\n let boxed_color = self.d().node.style().background_color;\n let color = match boxed_color {\n Specified(BgColor(c)) => c,\n Specified(BgColorTransparent) | _ => util::color::rgba(0,0,0,0.0)\n };\n list.push(~dl::SolidColor(bounds, color.red, color.green, color.blue));\n }\n }\n }\n }\n }\n}\n\ntrait ImageBoxMethods {\n \n}\n\n\/**\n * The tree holding render box relations. These are only defined for\n * nested CSS boxes that are nested in an otherwise inline flow\n * context.\n*\/\npub enum RenderBoxTree { RenderBoxTree }\n\nimpl RenderBoxTree : tree::ReadMethods<@RenderBox> {\n fn each_child(node: @RenderBox, f: fn(box: @RenderBox) -> bool) {\n tree::each_child(&self, &node, |box| f(*box) )\n }\n\n fn with_tree_fields<R>(b: &@RenderBox, f: fn(&tree::Tree<@RenderBox>) -> R) -> R {\n f(&b.d().tree)\n }\n}\n\nimpl RenderBoxTree : tree::WriteMethods<@RenderBox> {\n fn add_child(parent: @RenderBox, child: @RenderBox) {\n assert !core::box::ptr_eq(parent, child);\n tree::add_child(&self, parent, child)\n }\n\n fn with_tree_fields<R>(b: &@RenderBox, f: fn(&tree::Tree<@RenderBox>) -> R) -> R {\n f(&b.d().tree)\n }\n}\n\nimpl RenderBox : BoxedDebugMethods {\n fn dump(@self) {\n self.dump_indent(0u);\n }\n\n \/* Dumps the node tree, for debugging, with indentation. *\/\n fn dump_indent(@self, indent: uint) {\n let mut s = ~\"\";\n for uint::range(0u, indent) |_i| {\n s += ~\" \";\n }\n\n s += self.debug_str();\n debug!(\"%s\", s);\n\n for RenderBoxTree.each_child(self) |kid| {\n kid.dump_indent(indent + 1u) \n }\n }\n\n fn debug_str(@self) -> ~str {\n let repr = match self {\n @GenericBox(*) => ~\"GenericBox\",\n @ImageBox(*) => ~\"ImageBox\",\n @TextBox(_,d) => fmt!(\"TextBox(text=%s)\", str::substr(d.run.text, d.offset, d.length)),\n @UnscannedTextBox(_,s) => fmt!(\"UnscannedTextBox(%s)\", s)\n };\n\n fmt!(\"box b%?: %?\", self.d().id, repr)\n }\n}\n\n#[cfg(test)]\nmod test {\n use dom::element::{ElementData, HTMLDivElement, HTMLImageElement};\n use dom::node::{Element, NodeScope, Node, NodeKind};\n use dom::rcu::Scope;\n\n \/*\n use sdl;\n use sdl::video;\n\n fn with_screen(f: fn(*sdl::surface)) {\n let screen = video::set_video_mode(\n 320, 200, 32,\n ~[video::hwsurface], ~[video::doublebuf]);\n assert screen != ptr::null();\n\n f(screen);\n\n video::free_surface(screen);\n }\n *\/\n\n fn flat_bounds(root: @RenderBox) -> ~[Rect<au>] {\n let mut r = ~[];\n for tree::each_child(&RenderBoxTree, &root) |c| {\n push_all(&mut r, flat_bounds(*c));\n }\n\n push(&mut r, copy root.d().position);\n\n return r;\n }\n\n \/\/ TODO: redo tests here, but probably is part of box_builder.rs\n}\n\n<commit_msg>Create a trait for RenderBox.<commit_after>\/* Fundamental layout structures and algorithms. *\/\n\nuse arc = std::arc;\nuse arc::ARC;\nuse au = gfx::geometry;\nuse au::au;\nuse core::dvec::DVec;\nuse core::to_str::ToStr;\nuse core::rand;\nuse css::styles::SpecifiedStyle;\nuse css::values::{BoxSizing, Length, Px, CSSDisplay, Specified, BgColor, BgColorTransparent};\nuse dl = gfx::display_list;\nuse dom::element::{ElementKind, HTMLDivElement, HTMLImageElement};\nuse dom::node::{Element, Node, NodeData, NodeKind, NodeTree};\nuse dom::rcu;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse geom::point::Point2D;\nuse image::{Image, ImageHolder};\nuse layout::context::LayoutContext;\nuse layout::debug::BoxedDebugMethods;\nuse layout::flow::FlowContext;\nuse layout::text::TextBoxData;\nuse servo_text::text_run::TextRun;\nuse std::net::url::Url;\nuse task::spawn;\nuse util::color::Color;\nuse util::tree;\nuse vec::{push, push_all};\n\n\/** \nRender boxes (`struct RenderBox`) are the leafs of the layout\ntree. They cannot position themselves. In general, render boxes do not\nhave a simple correspondence with CSS boxes as in the specification:\n\n * Several render boxes may correspond to the same CSS box or DOM\n node. For example, a CSS text box broken across two lines is\n represented by two render boxes.\n\n * Some CSS boxes are not created at all, such as some anonymous block\n boxes induced by inline boxes with block-level sibling boxes. In\n that case, Servo uses an InlineFlow with BlockFlow siblings; the\n InlineFlow is block-level, but not a block container. It is\n positioned as if it were a block box, but its children are\n positioned according to inline flow.\n\nFundamental box types include:\n\n * GenericBox: an empty box that contributes only borders, margins,\npadding, backgrounds. It is analogous to a CSS nonreplaced content box.\n\n * ImageBox: a box that represents a (replaced content) image and its\n accompanying borders, shadows, etc.\n\n * TextBox: a box representing a single run of text with a distinct\n style. A TextBox may be split into two or more render boxes across\n line breaks. Several TextBoxes may correspond to a single DOM text\n node. Split text boxes are implemented by referring to subsets of a\n master TextRun object.\n\n*\/\n\n\n\/* A box's kind influences how its styles are interpreted during\n layout. For example, replaced content such as images are resized\n differently than tables, text, or other content.\n\n It also holds data specific to different box types, such as text.\n*\/\n\n\nstruct RenderBoxData {\n \/* references to children, parent inline flow boxes *\/\n tree : tree::Tree<@RenderBox>,\n \/* originating DOM node *\/\n node : Node,\n \/* reference to containing flow context, which this box\n participates in *\/\n ctx : @FlowContext,\n \/* position of this box relative to owning flow *\/\n mut position : Rect<au>,\n font_size : Length,\n \/* TODO (Issue #87): debug only *\/\n mut id: int\n}\n\nenum RenderBoxType {\n RenderBox_Generic,\n RenderBox_Image,\n RenderBox_Text\n}\n\npub enum RenderBox {\n GenericBox(RenderBoxData),\n ImageBox(RenderBoxData, ImageHolder),\n TextBox(RenderBoxData, TextBoxData),\n UnscannedTextBox(RenderBoxData, ~str)\n}\n\ntrait RenderBoxMethods {\n pure fn d(&self) -> &self\/RenderBoxData;\n\n pure fn is_replaced() -> bool;\n pure fn content_box() -> Rect<au>;\n pure fn border_box() -> Rect<au>;\n\n fn get_min_width(&LayoutContext) -> au;\n fn get_pref_width(&LayoutContext) -> au;\n fn get_used_width() -> (au, au);\n fn get_used_height() -> (au, au);\n fn build_display_list(&dl::DisplayListBuilder, dirty: &Rect<au>, \n offset: &Point2D<au>, &dl::DisplayList);\n}\n\nfn RenderBoxData(node: Node, ctx: @FlowContext, id: int) -> RenderBoxData {\n RenderBoxData {\n \/* will be set if box is parented *\/\n tree : tree::empty(),\n node : node,\n mut ctx : ctx,\n mut position : au::zero_rect(),\n font_size: Px(0.0),\n id : id\n }\n}\n\nimpl RenderBox : RenderBoxMethods {\n pure fn d(&self) -> &self\/RenderBoxData {\n match *self {\n GenericBox(ref d) => d,\n ImageBox(ref d, _) => d,\n TextBox(ref d, _) => d,\n UnscannedTextBox(ref d, _) => d,\n }\n }\n\n pure fn is_replaced() -> bool {\n match self {\n ImageBox(*) => true, \/\/ TODO: form elements, etc\n _ => false\n }\n }\n\n \/** In general, these functions are transitively impure because they\n * may cause glyphs to be allocated. For now, it's impure because of \n * holder.get_image()\n *\/\n fn get_min_width(ctx: &LayoutContext) -> au {\n match self {\n \/\/ TODO: this should account for min\/pref widths of the\n \/\/ box element in isolation. That includes\n \/\/ border\/margin\/padding but not child widths. The block\n \/\/ FlowContext will combine the width of this element and\n \/\/ that of its children to arrive at the context width.\n GenericBox(*) => au(0),\n \/\/ TODO: consult CSS 'width', margin, border.\n \/\/ TODO: If image isn't available, consult 'width'.\n ImageBox(_,i) => au::from_px(i.get_size().get_default(Size2D(0,0)).width),\n TextBox(_,d) => d.run.min_width_for_range(ctx, d.offset, d.length),\n UnscannedTextBox(*) => fail ~\"Shouldn't see unscanned boxes here.\"\n }\n }\n\n fn get_pref_width(_ctx: &LayoutContext) -> au {\n match self {\n \/\/ TODO: this should account for min\/pref widths of the\n \/\/ box element in isolation. That includes\n \/\/ border\/margin\/padding but not child widths. The block\n \/\/ FlowContext will combine the width of this element and\n \/\/ that of its children to arrive at the context width.\n GenericBox(*) => au(0),\n ImageBox(_,i) => au::from_px(i.get_size().get_default(Size2D(0,0)).width),\n\n \/\/ a text box cannot span lines, so assume that this is an unsplit text box.\n\n \/\/ TODO: If text boxes have been split to wrap lines, then\n \/\/ they could report a smaller pref width during incremental reflow.\n \/\/ maybe text boxes should report nothing, and the parent flow could\n \/\/ factor in min\/pref widths of any text runs that it owns.\n TextBox(_,d) => {\n let mut max_line_width: au = au(0);\n do d.run.iter_natural_lines_for_range(d.offset, d.length) |line_offset, line_len| {\n let mut line_width: au = au(0);\n do d.run.glyphs.iter_glyphs_for_range(line_offset, line_len) |_char_i, glyph| {\n line_width += glyph.advance()\n };\n\n if max_line_width < line_width {\n max_line_width = line_width;\n };\n true\n }\n\n max_line_width\n },\n UnscannedTextBox(*) => fail ~\"Shouldn't see unscanned boxes here.\"\n }\n }\n\n \/* Returns the amount of left, right \"fringe\" used by this\n box. This should be based on margin, border, padding, width. *\/\n fn get_used_width() -> (au, au) {\n \/\/ TODO: this should actually do some computation!\n \/\/ See CSS 2.1, Section 10.3, 10.4.\n\n (au(0), au(0))\n }\n \n \/* Returns the amount of left, right \"fringe\" used by this\n box. This should be based on margin, border, padding, width. *\/\n fn get_used_height() -> (au, au) {\n \/\/ TODO: this should actually do some computation!\n \/\/ See CSS 2.1, Section 10.5, 10.6.\n\n (au(0), au(0))\n }\n\n \/* The box formed by the content edge, as defined in CSS 2.1 Section 8.1.\n Coordinates are relative to the owning flow. *\/\n pure fn content_box() -> Rect<au> {\n match self {\n ImageBox(_,i) => {\n let size = i.size();\n Rect {\n origin: copy self.d().position.origin,\n size: Size2D(au::from_px(size.width),\n au::from_px(size.height))\n }\n },\n GenericBox(*) => {\n copy self.d().position\n \/* FIXME: The following hits an ICE for whatever reason\n\n let origin = self.d().position.origin;\n let size = self.d().position.size;\n let (offset_left, offset_right) = self.get_used_width();\n let (offset_top, offset_bottom) = self.get_used_height();\n\n Rect {\n origin: Point2D(origin.x + offset_left, origin.y + offset_top),\n size: Size2D(size.width - (offset_left + offset_right),\n size.height - (offset_top + offset_bottom))\n }*\/\n },\n TextBox(*) => {\n copy self.d().position\n },\n UnscannedTextBox(*) => fail ~\"Shouldn't see unscanned boxes here.\"\n }\n }\n\n \/* The box formed by the border edge, as defined in CSS 2.1 Section 8.1.\n Coordinates are relative to the owning flow. *\/\n pure fn border_box() -> Rect<au> {\n \/\/ TODO: actually compute content_box + padding + border\n self.content_box()\n }\n\n \/\/ TODO: to implement stacking contexts correctly, we need to\n \/\/ create a set of display lists, one per each layer of a stacking\n \/\/ context. (CSS 2.1, Section 9.9.1). Each box is passed the list\n \/\/ set representing the box's stacking context. When asked to\n \/\/ construct its constituent display items, each box puts its\n \/\/ DisplayItems into the correct stack layer (according to CSS 2.1\n \/\/ Appendix E). and then builder flattens the list at the end.\n\n \/* Methods for building a display list. This is a good candidate\n for a function pointer as the number of boxes explodes.\n\n # Arguments\n\n * `builder` - the display list builder which manages the coordinate system and options.\n * `dirty` - Dirty rectangle, in the coordinate system of the owning flow (self.ctx)\n * `origin` - Total offset from display list root flow to this box's owning flow\n * `list` - List to which items should be appended\n *\/\n fn build_display_list(_builder: &dl::DisplayListBuilder, dirty: &Rect<au>, \n offset: &Point2D<au>, list: &dl::DisplayList) {\n if !self.d().position.intersects(dirty) {\n return;\n }\n\n let bounds : Rect<au> = Rect(self.d().position.origin.add(offset),\n copy self.d().position.size);\n\n match self {\n UnscannedTextBox(*) => fail ~\"Shouldn't see unscanned boxes here.\",\n TextBox(_,d) => {\n list.push(~dl::Text(bounds, ~(copy *d.run), d.offset, d.length))\n },\n \/\/ TODO: items for background, border, outline\n GenericBox(*) => { },\n ImageBox(_,i) => {\n match i.get_image() {\n Some(image) => list.push(~dl::Image(bounds, arc::clone(&image))),\n \/* No image data at all? Okay, add some fallback content instead. *\/\n None => {\n \/\/ TODO: shouldn't need to unbox CSSValue by now\n let boxed_color = self.d().node.style().background_color;\n let color = match boxed_color {\n Specified(BgColor(c)) => c,\n Specified(BgColorTransparent) | _ => util::color::rgba(0,0,0,0.0)\n };\n list.push(~dl::SolidColor(bounds, color.red, color.green, color.blue));\n }\n }\n }\n }\n }\n}\n\n\/**\n * The tree holding render box relations. These are only defined for\n * nested CSS boxes that are nested in an otherwise inline flow\n * context.\n*\/\npub enum RenderBoxTree { RenderBoxTree }\n\nimpl RenderBoxTree : tree::ReadMethods<@RenderBox> {\n fn each_child(node: @RenderBox, f: fn(box: @RenderBox) -> bool) {\n tree::each_child(&self, &node, |box| f(*box) )\n }\n\n fn with_tree_fields<R>(b: &@RenderBox, f: fn(&tree::Tree<@RenderBox>) -> R) -> R {\n f(&b.d().tree)\n }\n}\n\nimpl RenderBoxTree : tree::WriteMethods<@RenderBox> {\n fn add_child(parent: @RenderBox, child: @RenderBox) {\n assert !core::box::ptr_eq(parent, child);\n tree::add_child(&self, parent, child)\n }\n\n fn with_tree_fields<R>(b: &@RenderBox, f: fn(&tree::Tree<@RenderBox>) -> R) -> R {\n f(&b.d().tree)\n }\n}\n\nimpl RenderBox : BoxedDebugMethods {\n fn dump(@self) {\n self.dump_indent(0u);\n }\n\n \/* Dumps the node tree, for debugging, with indentation. *\/\n fn dump_indent(@self, indent: uint) {\n let mut s = ~\"\";\n for uint::range(0u, indent) |_i| {\n s += ~\" \";\n }\n\n s += self.debug_str();\n debug!(\"%s\", s);\n\n for RenderBoxTree.each_child(self) |kid| {\n kid.dump_indent(indent + 1u) \n }\n }\n\n fn debug_str(@self) -> ~str {\n let repr = match self {\n @GenericBox(*) => ~\"GenericBox\",\n @ImageBox(*) => ~\"ImageBox\",\n @TextBox(_,d) => fmt!(\"TextBox(text=%s)\", str::substr(d.run.text, d.offset, d.length)),\n @UnscannedTextBox(_,s) => fmt!(\"UnscannedTextBox(%s)\", s)\n };\n\n fmt!(\"box b%?: %?\", self.d().id, repr)\n }\n}\n\n#[cfg(test)]\nmod test {\n use dom::element::{ElementData, HTMLDivElement, HTMLImageElement};\n use dom::node::{Element, NodeScope, Node, NodeKind};\n use dom::rcu::Scope;\n\n \/*\n use sdl;\n use sdl::video;\n\n fn with_screen(f: fn(*sdl::surface)) {\n let screen = video::set_video_mode(\n 320, 200, 32,\n ~[video::hwsurface], ~[video::doublebuf]);\n assert screen != ptr::null();\n\n f(screen);\n\n video::free_surface(screen);\n }\n *\/\n\n fn flat_bounds(root: @RenderBox) -> ~[Rect<au>] {\n let mut r = ~[];\n for tree::each_child(&RenderBoxTree, &root) |c| {\n push_all(&mut r, flat_bounds(*c));\n }\n\n push(&mut r, copy root.d().position);\n\n return r;\n }\n\n \/\/ TODO: redo tests here, but probably is part of box_builder.rs\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #78264 - JohnTitor:macro-test, r=petrochenkov<commit_after>\/\/ check-pass\n\/\/ Regression test of #77475, this used to be ICE.\n\n#![feature(decl_macro)]\n\nuse crate as _;\n\npub macro ice(){}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>clean up<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add codegen test for #45964<commit_after>\/\/ This test case checks that slice::{r}position functions do not\n\/\/ prevent optimizing away bounds checks\n\n\/\/ compile-flags: -O\n\n#![crate_type=\"rlib\"]\n\n\/\/ CHECK-LABEL: @test\n#[no_mangle]\npub fn test(y: &[u32], x: &u32, z: &u32) -> bool {\n let result = match y.iter().position(|a| a == x) {\n Some(p) => Ok(p),\n None => Err(()),\n };\n\n if let Ok(p) = result {\n \/\/ CHECK-NOT: panic\n y[p] == *z\n } else {\n false\n }\n}\n\n\/\/ CHECK-LABEL: @rtest\n#[no_mangle]\npub fn rtest(y: &[u32], x: &u32, z: &u32) -> bool {\n let result = match y.iter().rposition(|a| a == x) {\n Some(p) => Ok(p),\n None => Err(()),\n };\n\n if let Ok(p) = result {\n \/\/ CHECK-NOT: panic\n y[p] == *z\n } else {\n false\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat: add one more example<commit_after>extern crate telegram_bot_tokio;\nextern crate tokio_core;\nextern crate futures;\n\nuse std::env;\n\nuse futures::{Future, Stream};\nuse tokio_core::reactor::{Core, Handle};\nuse telegram_bot_tokio::{Api, Message, ParseMode, MessageKind, UpdateKind};\nuse telegram_bot_tokio::prelude::*;\n\nfn message_test(api: &Api, message: &Message, handle: &Handle) {\n let simple = api.send(&message.text_reply(\"Simple message\"));\n\n let markdown = api.send(&message.text_reply(\"`Markdown message`\")\n .parse_mode(ParseMode::Markdown)\n );\n\n let html = api.send(&message.text_reply(\"<b>Bold HTML message<\/b>\")\n .parse_mode(ParseMode::Html)\n );\n\n handle.spawn({\n let future = simple\n .and_then(|_| markdown)\n .and_then(|_| html);\n\n future.map_err(|_| ()).map(|_| ())\n })\n}\n\nfn preview_test(api: &Api, message: &Message, handle: &Handle) {\n let preview = api.send(&message.text_reply(\"Message with preview https:\/\/telegram.org\"));\n\n let no_preview = api.send(&message.text_reply(\"Message without preview https:\/\/telegram.org\")\n .disable_web_page_preview()\n );\n\n handle.spawn({\n let future = preview.and_then(|_| no_preview);\n\n future.map_err(|_| ()).map(|_| ())\n })\n}\n\nfn reply_test(api: &Api, message: &Message, handle: &Handle) {\n let msg = api.send(&message.text_reply(\"Reply to message\"));\n let chat = api.send(&message.chat.text(\"Text to message chat\"));\n\n let private = message.from.as_ref().map(|from| {\n api.send(&from.text(\"Private text\"))\n });\n\n handle.spawn({\n let future = msg.and_then(|_| chat).and_then(|_| private);\n\n future.map_err(|_| ()).map(|_| ())\n })\n}\n\nfn test(api: &Api, message: &Message, handle: &Handle) {\n if let MessageKind::Text {ref data, ..} = message.kind {\n match data.as_str() {\n \"\/message\" => message_test(api, message, handle),\n \"\/preview\" => preview_test(api, message, handle),\n \"\/reply\" => reply_test(api, message, handle),\n _ => (),\n }\n }\n}\n\nfn main() {\n let token = env::var(\"TELEGRAM_BOT_TOKEN\").unwrap();\n\n let mut core = Core::new().unwrap();\n let handle = core.handle();\n\n let api = Api::from_token(&core.handle(), &token).unwrap();\n\n let future = api.stream().for_each(|update| {\n if let UpdateKind::Message(message) = update.kind {\n test(&api, &message, &handle)\n }\n Ok(())\n });\n\n core.run(future).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Attempting more functionality.<commit_after><|endoftext|>"} {"text":"<commit_before>extern mod std;\n\nuse libc::c_char;\nuse libc::c_int;\nuse libc::size_t;\nuse libc::c_ulong;\nuse libc::c_void;\nuse ptr::null;\nuse ptr::addr_of;\nuse str::as_c_str;\nuse str::to_str;\n\nstruct mpz_struct {\n _mp_alloc: c_int,\n _mp_size: c_int,\n _mp_d: *c_void\n}\n\ntype mpz_t = *mpz_struct;\n\nextern mod gmp {\n fn __gmpz_init(x: mpz_t);\n fn __gmpz_clear(x: mpz_t);\n fn __gmpz_set_str(rop: mpz_t, str: *c_char, base: c_int) -> c_int;\n fn __gmpz_get_str(str: *c_char, base: c_int, op: mpz_t) -> *c_char;\n fn __gmpz_sizeinbase(op: mpz_t, base: c_int) -> size_t;\n fn __gmpz_cmp(op: mpz_t, op2: mpz_t) -> c_int;\n fn __gmpz_cmp_ui(op1: mpz_t, op2: c_ulong) -> c_int;\n fn __gmpz_add(rop: mpz_t, op1: mpz_t, op2: mpz_t);\n fn __gmpz_sub(rop: mpz_t, op1: mpz_t, op2: mpz_t);\n fn __gmpz_mul(rop: mpz_t, op1: mpz_t, op2: mpz_t);\n fn __gmpz_neg(rop: mpz_t, op: mpz_t);\n fn __gmpz_tdiv_q(r: mpz_t, n: mpz_t, d: mpz_t);\n fn __gmpz_mod(r: mpz_t, n: mpz_t, d: mpz_t);\n}\n\nuse gmp::*;\n\npub struct Mpz {\n priv mpz: mpz_struct,\n\n drop {\n __gmpz_clear(addr_of(&self.mpz));\n }\n}\n\nimpl Mpz {\n fn set_str(&self, s: &str, base: int) -> bool {\n let mpz = addr_of(&self.mpz);\n let r = as_c_str(s, { |s| __gmpz_set_str(mpz, s, base as c_int) });\n r == 0\n }\n\n fn size_in_base(&self, base: int) -> uint {\n __gmpz_sizeinbase(addr_of(&self.mpz), base as c_int) as uint\n }\n}\n\nimpl Mpz: cmp::Eq {\n pure fn eq(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) == 0\n }\n pure fn ne(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) != 0\n }\n}\n\nimpl Mpz: cmp::Ord {\n pure fn lt(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) < 0\n }\n pure fn le(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) <= 0\n }\n pure fn gt(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) > 0\n }\n pure fn ge(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) >= 0\n }\n}\n\nimpl Mpz: num::Num {\n pure fn add(other: &Mpz) -> Mpz unsafe {\n let res = init();\n __gmpz_add(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn sub(other: &Mpz) -> Mpz unsafe {\n let res = init();\n __gmpz_sub(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn mul(other: &Mpz) -> Mpz unsafe {\n let res = init();\n __gmpz_mul(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn div(other: &Mpz) -> Mpz unsafe {\n if __gmpz_cmp_ui(addr_of(&self.mpz), 0) == 0 {\n fail ~\"divide by zero\";\n }\n\n let res = init();\n __gmpz_tdiv_q(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn modulo(other: &Mpz) -> Mpz unsafe {\n if __gmpz_cmp_ui(addr_of(&self.mpz), 0) == 0 {\n fail ~\"divide by zero\";\n }\n\n let res = init();\n __gmpz_mod(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn neg() -> Mpz unsafe {\n let res = init();\n __gmpz_neg(addr_of(&res.mpz), addr_of(&self.mpz));\n res\n }\n pure fn to_int() -> int {\n fail ~\"not implemented\";\n }\n static pure fn from_int(other: int) -> Mpz unsafe {\n fail ~\"not implemented\";\n }\n}\n\npub fn init() -> Mpz {\n let mpz = mpz_struct { _mp_alloc: 0, _mp_size: 0, _mp_d: null() };\n __gmpz_init(addr_of(&mpz));\n Mpz { mpz: mpz }\n}\n\n#[cfg(test)]\nmod tests {\n #[test]\n fn size_in_base() {\n let x = init();\n x.set_str(\"150000\", 10);\n assert(x.size_in_base(10) == 6);\n }\n\n #[test]\n fn eq() {\n let x = init();\n x.set_str(\"4242142195\", 10);\n let y = init();\n y.set_str(\"4242142195\", 10);\n let z = init();\n z.set_str(\"4242142196\", 10);\n\n assert(x == y);\n assert(x != z);\n assert(y != z);\n }\n\n #[test]\n fn ord() {\n let x = init();\n x.set_str(\"40000000000000000000000\", 10);\n let y = init();\n y.set_str(\"45000000000000000000000\", 10);\n let z = init();\n z.set_str(\"50000000000000000000000\", 10);\n\n assert(x < y && x < z && y < z);\n assert(x <= x && x <= y && x <= z && y <= z);\n assert(z > y && z > x && y > x);\n assert(z >= z && z >= y && z >= x && y >= x);\n }\n\n #[test]\n #[should_fail]\n fn div_zero() {\n let x = init();\n x \/ x;\n }\n\n #[test]\n #[should_fail]\n fn modulo_zero() {\n let x = init();\n x % x;\n }\n\n #[test]\n fn test_div_round() {\n let x = init();\n let y = init();\n let mut z: Mpz;\n\n x.set_str(\"2\", 10);\n y.set_str(\"3\", 10);\n z = x \/ y;\n assert(__gmpz_cmp_ui(addr_of(&z.mpz), 2 \/ 3) == 0);\n\n x.set_str(\"2\", 10);\n y.set_str(\"-3\", 10);\n z = x \/ y;\n assert(__gmpz_cmp_ui(addr_of(&z.mpz), 2 \/ -3) == 0);\n }\n}\n<commit_msg>implement to_str trait<commit_after>extern mod std;\n\nuse libc::c_char;\nuse libc::c_int;\nuse libc::size_t;\nuse libc::c_ulong;\nuse libc::c_void;\nuse ptr::null;\nuse ptr::addr_of;\nuse str::as_c_str;\n\nstruct mpz_struct {\n _mp_alloc: c_int,\n _mp_size: c_int,\n _mp_d: *c_void\n}\n\ntype mpz_t = *mpz_struct;\n\nextern mod gmp {\n fn __gmpz_init(x: mpz_t);\n fn __gmpz_clear(x: mpz_t);\n fn __gmpz_set_str(rop: mpz_t, str: *c_char, base: c_int) -> c_int;\n fn __gmpz_get_str(str: *c_char, base: c_int, op: mpz_t) -> *c_char;\n fn __gmpz_sizeinbase(op: mpz_t, base: c_int) -> size_t;\n fn __gmpz_cmp(op: mpz_t, op2: mpz_t) -> c_int;\n fn __gmpz_cmp_ui(op1: mpz_t, op2: c_ulong) -> c_int;\n fn __gmpz_add(rop: mpz_t, op1: mpz_t, op2: mpz_t);\n fn __gmpz_sub(rop: mpz_t, op1: mpz_t, op2: mpz_t);\n fn __gmpz_mul(rop: mpz_t, op1: mpz_t, op2: mpz_t);\n fn __gmpz_neg(rop: mpz_t, op: mpz_t);\n fn __gmpz_tdiv_q(r: mpz_t, n: mpz_t, d: mpz_t);\n fn __gmpz_mod(r: mpz_t, n: mpz_t, d: mpz_t);\n}\n\nuse gmp::*;\n\npub struct Mpz {\n priv mpz: mpz_struct,\n\n drop {\n __gmpz_clear(addr_of(&self.mpz));\n }\n}\n\nimpl Mpz {\n fn set_str(&self, s: &str, base: int) -> bool {\n let mpz = addr_of(&self.mpz);\n let r = as_c_str(s, { |s| __gmpz_set_str(mpz, s, base as c_int) });\n r == 0\n }\n\n fn size_in_base(&self, base: int) -> uint {\n __gmpz_sizeinbase(addr_of(&self.mpz), base as c_int) as uint\n }\n}\n\nimpl Mpz: cmp::Eq {\n pure fn eq(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) == 0\n }\n pure fn ne(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) != 0\n }\n}\n\nimpl Mpz: cmp::Ord {\n pure fn lt(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) < 0\n }\n pure fn le(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) <= 0\n }\n pure fn gt(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) > 0\n }\n pure fn ge(other: &Mpz) -> bool unsafe {\n __gmpz_cmp(addr_of(&self.mpz), addr_of(&other.mpz)) >= 0\n }\n}\n\nimpl Mpz: num::Num {\n pure fn add(other: &Mpz) -> Mpz unsafe {\n let res = init();\n __gmpz_add(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn sub(other: &Mpz) -> Mpz unsafe {\n let res = init();\n __gmpz_sub(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn mul(other: &Mpz) -> Mpz unsafe {\n let res = init();\n __gmpz_mul(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn div(other: &Mpz) -> Mpz unsafe {\n if __gmpz_cmp_ui(addr_of(&self.mpz), 0) == 0 {\n fail ~\"divide by zero\";\n }\n\n let res = init();\n __gmpz_tdiv_q(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn modulo(other: &Mpz) -> Mpz unsafe {\n if __gmpz_cmp_ui(addr_of(&self.mpz), 0) == 0 {\n fail ~\"divide by zero\";\n }\n\n let res = init();\n __gmpz_mod(addr_of(&res.mpz), addr_of(&self.mpz), addr_of(&other.mpz));\n res\n }\n pure fn neg() -> Mpz unsafe {\n let res = init();\n __gmpz_neg(addr_of(&res.mpz), addr_of(&self.mpz));\n res\n }\n pure fn to_int() -> int {\n fail ~\"not implemented\";\n }\n static pure fn from_int(other: int) -> Mpz unsafe {\n fail ~\"not implemented\";\n }\n}\n\nimpl Mpz : to_str::ToStr {\n pure fn to_str() -> ~str unsafe {\n let length = self.size_in_base(10) + 2;\n let dst = vec::to_mut(vec::from_elem(length, '0'));\n let pdst = vec::raw::to_ptr(dst);\n\n str::raw::from_c_str(__gmpz_get_str(pdst as *c_char, 10, addr_of(&self.mpz)))\n }\n}\n\npub fn init() -> Mpz {\n let mpz = mpz_struct { _mp_alloc: 0, _mp_size: 0, _mp_d: null() };\n __gmpz_init(addr_of(&mpz));\n Mpz { mpz: mpz }\n}\n\n#[cfg(test)]\nmod tests {\n #[test]\n fn size_in_base() {\n let x = init();\n x.set_str(\"150000\", 10);\n assert(x.size_in_base(10) == 6);\n }\n\n #[test]\n fn eq() {\n let x = init();\n x.set_str(\"4242142195\", 10);\n let y = init();\n y.set_str(\"4242142195\", 10);\n let z = init();\n z.set_str(\"4242142196\", 10);\n\n assert(x == y);\n assert(x != z);\n assert(y != z);\n }\n\n #[test]\n fn ord() {\n let x = init();\n x.set_str(\"40000000000000000000000\", 10);\n let y = init();\n y.set_str(\"45000000000000000000000\", 10);\n let z = init();\n z.set_str(\"50000000000000000000000\", 10);\n\n assert(x < y && x < z && y < z);\n assert(x <= x && x <= y && x <= z && y <= z);\n assert(z > y && z > x && y > x);\n assert(z >= z && z >= y && z >= x && y >= x);\n }\n\n #[test]\n #[should_fail]\n fn div_zero() {\n let x = init();\n x \/ x;\n }\n\n #[test]\n #[should_fail]\n fn modulo_zero() {\n let x = init();\n x % x;\n }\n\n #[test]\n fn test_div_round() {\n let x = init();\n let y = init();\n let mut z: Mpz;\n\n x.set_str(\"2\", 10);\n y.set_str(\"3\", 10);\n z = x \/ y;\n assert(__gmpz_cmp_ui(addr_of(&z.mpz), 2 \/ 3) == 0);\n\n x.set_str(\"2\", 10);\n y.set_str(\"-3\", 10);\n z = x \/ y;\n assert(__gmpz_cmp_ui(addr_of(&z.mpz), 2 \/ -3) == 0);\n }\n\n #[test]\n fn to_str() {\n let x = init();\n x.set_str(\"1234567890\", 10);\n assert(x.to_str() == ~\"1234567890\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse clap::ArgMatches;\n\nuse runtime::Runtime;\nuse module::Module;\n\nuse storage::file::hash::FileHash;\nuse storage::file::id::FileID;\nuse storage::parser::FileHeaderParser;\nuse storage::parser::Parser;\nuse storage::json::parser::JsonHeaderParser;\n\nmod header;\n\npub struct BM<'a> {\n rt: &'a Runtime<'a>,\n}\n\nimpl<'a> BM<'a> {\n\n pub fn new(rt: &'a Runtime<'a>) -> BM<'a> {\n BM {\n rt: rt,\n }\n }\n\n fn runtime(&self) -> &Runtime {\n &self.rt\n }\n\n fn command_add(&self, matches: &ArgMatches) -> bool {\n use self::header::build_header;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n\n let url = matches.value_of(\"url\").map(String::from).unwrap(); \/\/ clap ensures this is present\n let tags = matches.value_of(\"tags\").and_then(|s| {\n Some(s.split(\",\").map(String::from).collect())\n }).unwrap_or(vec![]);\n\n debug!(\"Building header with\");\n debug!(\" url = '{:?}'\", url);\n debug!(\" tags = '{:?}'\", tags);\n let header = build_header(url, tags);\n\n let fileid = self.rt.store().new_file_with_header(self, header);\n self.rt.store().load(&fileid).and_then(|file| {\n info!(\"Created file in memory: {}\", fileid);\n Some(self.rt.store().persist(&parser, file))\n }).unwrap_or(false)\n }\n\n fn command_list(&self, matches: &ArgMatches) -> bool {\n use ui::file::{FilePrinter, TablePrinter};\n use self::header::get_url_from_header;\n use self::header::get_tags_from_header;\n use std::ops::Deref;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n let files = self.rt.store().load_for_module(self, &parser);\n let printer = TablePrinter::new(self.rt.is_verbose(), self.rt.is_debugging());\n\n printer.print_files_custom(files.into_iter(),\n &|file| {\n let fl = file.deref().borrow();\n let hdr = fl.header();\n let url = get_url_from_header(hdr).unwrap_or(String::from(\"Parser error\"));\n let tags = get_tags_from_header(hdr);\n\n debug!(\"Custom printer field: url = '{:?}'\", url);\n debug!(\"Custom printer field: tags = '{:?}'\", tags);\n\n vec![url, tags.join(\", \")]\n }\n );\n true\n }\n\n fn command_remove(&self, matches: &ArgMatches) -> bool {\n use std::process::exit;\n\n let result =\n if matches.is_present(\"id\") {\n debug!(\"Removing by ID (Hash)\");\n let hash = FileHash::from(matches.value_of(\"id\").unwrap());\n self.remove_by_hash(hash)\n } else if matches.is_present(\"tags\") {\n debug!(\"Removing by tags\");\n let tags = matches.value_of(\"tags\")\n .unwrap()\n .split(\",\")\n .map(String::from)\n .collect::<Vec<String>>();\n self.remove_by_tags(tags)\n } else if matches.is_present(\"match\") {\n debug!(\"Removing by match\");\n self.remove_by_match(String::from(matches.value_of(\"match\").unwrap()))\n } else {\n error!(\"Unexpected error. Exiting\");\n exit(1);\n false\n };\n\n if result {\n info!(\"Removing succeeded\");\n } else {\n info!(\"Removing failed\");\n }\n\n return result;\n }\n\n fn remove_by_hash(&self, hash: FileHash) -> bool {\n use std::ops::Deref;\n\n debug!(\"Removing for hash = '{:?}'\", hash);\n let parser = Parser::new(JsonHeaderParser::new(None));\n\n let file = self.rt.store().load_by_hash(self, &parser, hash);\n debug!(\"file = {:?}\", file);\n file.map(|file| {\n debug!(\"File loaded, can remove now: {:?}\", file);\n let f = file.deref().borrow();\n self.rt.store().remove(f.id().clone())\n }).unwrap_or(false)\n }\n\n fn remove_by_tags(&self, tags: Vec<String>) -> bool {\n use std::fs::remove_file;\n use std::ops::Deref;\n use self::header::get_tags_from_header;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n self.rt\n .store()\n .load_for_module(self, &parser)\n .iter()\n .filter(|file| {\n let f = file.deref().borrow();\n get_tags_from_header(f.header()).iter().any(|tag| {\n tags.iter().any(|remtag| remtag == tag)\n })\n }).map(|file| {\n let f = file.deref().borrow();\n self.rt.store().remove(f.id().clone())\n }).all(|x| x)\n }\n\n fn remove_by_match(&self, matcher: String) -> bool {\n use self::header::get_url_from_header;\n use std::fs::remove_file;\n use std::ops::Deref;\n use std::process::exit;\n use regex::Regex;\n\n let re = Regex::new(&matcher[..]).unwrap_or_else(|e| {\n error!(\"Cannot build regex out of '{}'\", matcher);\n error!(\"{}\", e);\n exit(1);\n });\n\n debug!(\"Compiled '{}' to regex: '{:?}'\", matcher, re);\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n self.rt\n .store()\n .load_for_module(self, &parser)\n .iter()\n .filter(|file| {\n let f = file.deref().borrow();\n let url = get_url_from_header(f.header());\n debug!(\"url = {:?}\", url);\n url.map(|u| {\n debug!(\"Matching '{}' ~= '{}'\", re.as_str(), u);\n re.is_match(&u[..])\n }).unwrap_or(false)\n }).map(|file| {\n let f = file.deref().borrow();\n self.rt.store().remove(f.id().clone())\n }).all(|x| x)\n }\n\n}\n\nimpl<'a> Module<'a> for BM<'a> {\n\n fn exec(&self, matches: &ArgMatches) -> bool {\n match matches.subcommand_name() {\n Some(\"add\") => {\n self.command_add(matches.subcommand_matches(\"add\").unwrap())\n },\n\n Some(\"list\") => {\n self.command_list(matches.subcommand_matches(\"list\").unwrap())\n },\n\n Some(\"remove\") => {\n self.command_remove(matches.subcommand_matches(\"remove\").unwrap())\n },\n\n Some(_) | None => {\n info!(\"No command given, doing nothing\");\n false\n },\n }\n }\n\n fn name(&self) -> &'static str {\n \"bookmark\"\n }\n}\n\nimpl<'a> Debug for BM<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"BM\");\n Ok(())\n }\n\n}\n\n<commit_msg>BM::command_add(): Add URL verification<commit_after>use std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse clap::ArgMatches;\n\nuse runtime::Runtime;\nuse module::Module;\n\nuse storage::Store;\nuse storage::file::hash::FileHash;\nuse storage::file::id::FileID;\nuse storage::parser::FileHeaderParser;\nuse storage::parser::Parser;\nuse storage::json::parser::JsonHeaderParser;\n\nmod header;\n\npub struct BM<'a> {\n rt: &'a Runtime<'a>,\n}\n\nimpl<'a> BM<'a> {\n\n pub fn new(rt: &'a Runtime<'a>) -> BM<'a> {\n BM {\n rt: rt,\n }\n }\n\n fn runtime(&self) -> &Runtime {\n &self.rt\n }\n\n fn command_add(&self, matches: &ArgMatches) -> bool {\n use std::process::exit;\n use self::header::build_header;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n\n let url = matches.value_of(\"url\").map(String::from).unwrap(); \/\/ clap ensures this is present\n\n if !self.validate_url(&url, &parser) {\n error!(\"URL validation failed, exiting.\");\n exit(1);\n } else {\n debug!(\"Verification succeeded\");\n }\n\n let tags = matches.value_of(\"tags\").and_then(|s| {\n Some(s.split(\",\").map(String::from).collect())\n }).unwrap_or(vec![]);\n\n debug!(\"Building header with\");\n debug!(\" url = '{:?}'\", url);\n debug!(\" tags = '{:?}'\", tags);\n let header = build_header(url, tags);\n\n let fileid = self.rt.store().new_file_with_header(self, header);\n self.rt.store().load(&fileid).and_then(|file| {\n info!(\"Created file in memory: {}\", fileid);\n Some(self.rt.store().persist(&parser, file))\n }).unwrap_or(false)\n }\n\n fn validate_url<HP>(&self, url: &String, parser: &Parser<HP>) -> bool\n where HP: FileHeaderParser\n {\n use self::header::get_url_from_header;\n use std::ops::Deref;\n use util::is_url;\n\n if !is_url(url) {\n error!(\"Url '{}' is not a valid URL. Will not store.\", url);\n return false;\n }\n\n let is_in_store = self.rt\n .store()\n .load_for_module(self, parser)\n .iter()\n .any(|file| {\n let f = file.deref().borrow();\n get_url_from_header(f.header()).map(|url_in_store| {\n &url_in_store == url\n }).unwrap_or(false)\n });\n\n if is_in_store {\n error!(\"URL '{}' seems to be in the store already\", url);\n return false;\n }\n\n return true;\n }\n\n fn command_list(&self, matches: &ArgMatches) -> bool {\n use ui::file::{FilePrinter, TablePrinter};\n use self::header::get_url_from_header;\n use self::header::get_tags_from_header;\n use std::ops::Deref;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n let files = self.rt.store().load_for_module(self, &parser);\n let printer = TablePrinter::new(self.rt.is_verbose(), self.rt.is_debugging());\n\n printer.print_files_custom(files.into_iter(),\n &|file| {\n let fl = file.deref().borrow();\n let hdr = fl.header();\n let url = get_url_from_header(hdr).unwrap_or(String::from(\"Parser error\"));\n let tags = get_tags_from_header(hdr);\n\n debug!(\"Custom printer field: url = '{:?}'\", url);\n debug!(\"Custom printer field: tags = '{:?}'\", tags);\n\n vec![url, tags.join(\", \")]\n }\n );\n true\n }\n\n fn command_remove(&self, matches: &ArgMatches) -> bool {\n use std::process::exit;\n\n let result =\n if matches.is_present(\"id\") {\n debug!(\"Removing by ID (Hash)\");\n let hash = FileHash::from(matches.value_of(\"id\").unwrap());\n self.remove_by_hash(hash)\n } else if matches.is_present(\"tags\") {\n debug!(\"Removing by tags\");\n let tags = matches.value_of(\"tags\")\n .unwrap()\n .split(\",\")\n .map(String::from)\n .collect::<Vec<String>>();\n self.remove_by_tags(tags)\n } else if matches.is_present(\"match\") {\n debug!(\"Removing by match\");\n self.remove_by_match(String::from(matches.value_of(\"match\").unwrap()))\n } else {\n error!(\"Unexpected error. Exiting\");\n exit(1);\n false\n };\n\n if result {\n info!(\"Removing succeeded\");\n } else {\n info!(\"Removing failed\");\n }\n\n return result;\n }\n\n fn remove_by_hash(&self, hash: FileHash) -> bool {\n use std::ops::Deref;\n\n debug!(\"Removing for hash = '{:?}'\", hash);\n let parser = Parser::new(JsonHeaderParser::new(None));\n\n let file = self.rt.store().load_by_hash(self, &parser, hash);\n debug!(\"file = {:?}\", file);\n file.map(|file| {\n debug!(\"File loaded, can remove now: {:?}\", file);\n let f = file.deref().borrow();\n self.rt.store().remove(f.id().clone())\n }).unwrap_or(false)\n }\n\n fn remove_by_tags(&self, tags: Vec<String>) -> bool {\n use std::fs::remove_file;\n use std::ops::Deref;\n use self::header::get_tags_from_header;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n self.rt\n .store()\n .load_for_module(self, &parser)\n .iter()\n .filter(|file| {\n let f = file.deref().borrow();\n get_tags_from_header(f.header()).iter().any(|tag| {\n tags.iter().any(|remtag| remtag == tag)\n })\n }).map(|file| {\n let f = file.deref().borrow();\n self.rt.store().remove(f.id().clone())\n }).all(|x| x)\n }\n\n fn remove_by_match(&self, matcher: String) -> bool {\n use self::header::get_url_from_header;\n use std::fs::remove_file;\n use std::ops::Deref;\n use std::process::exit;\n use regex::Regex;\n\n let re = Regex::new(&matcher[..]).unwrap_or_else(|e| {\n error!(\"Cannot build regex out of '{}'\", matcher);\n error!(\"{}\", e);\n exit(1);\n });\n\n debug!(\"Compiled '{}' to regex: '{:?}'\", matcher, re);\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n self.rt\n .store()\n .load_for_module(self, &parser)\n .iter()\n .filter(|file| {\n let f = file.deref().borrow();\n let url = get_url_from_header(f.header());\n debug!(\"url = {:?}\", url);\n url.map(|u| {\n debug!(\"Matching '{}' ~= '{}'\", re.as_str(), u);\n re.is_match(&u[..])\n }).unwrap_or(false)\n }).map(|file| {\n let f = file.deref().borrow();\n self.rt.store().remove(f.id().clone())\n }).all(|x| x)\n }\n\n}\n\nimpl<'a> Module<'a> for BM<'a> {\n\n fn exec(&self, matches: &ArgMatches) -> bool {\n match matches.subcommand_name() {\n Some(\"add\") => {\n self.command_add(matches.subcommand_matches(\"add\").unwrap())\n },\n\n Some(\"list\") => {\n self.command_list(matches.subcommand_matches(\"list\").unwrap())\n },\n\n Some(\"remove\") => {\n self.command_remove(matches.subcommand_matches(\"remove\").unwrap())\n },\n\n Some(_) | None => {\n info!(\"No command given, doing nothing\");\n false\n },\n }\n }\n\n fn name(&self) -> &'static str {\n \"bookmark\"\n }\n}\n\nimpl<'a> Debug for BM<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"BM\");\n Ok(())\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Simplified version.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #51358 - barzamin:tests-39963, r=oli-obk<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test case from #39963.\n\n#![feature(nll)]\n\n#[derive(Clone)]\nstruct Foo(Option<Box<Foo>>, Option<Box<Foo>>);\n\nfn test(f: &mut Foo) {\n match *f {\n Foo(Some(ref mut left), Some(ref mut right)) => match **left {\n Foo(Some(ref mut left), Some(ref mut right)) => panic!(),\n _ => panic!(),\n },\n _ => panic!(),\n }\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Non compiling naive algebraic datatype<commit_after>enum Tree {\n Empty,\n Leaf(i32),\n Node(Tree, Tree)\n}\n\nfn main() {\n let tree: Tree = Tree::Empty;\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n#![cfg_attr(not(feature = \"clippy\"), allow(unknown_lints))]\n#![cfg_attr(not(feature = \"clippy\"), allow(unit_arg))]\n#![allow(doc_markdown)]\n\nextern crate devicemapper;\nextern crate libstratis;\n#[macro_use]\nextern crate log;\nextern crate clap;\n#[cfg(feature = \"dbus_enabled\")]\nextern crate dbus;\nextern crate env_logger;\nextern crate libc;\nextern crate libudev;\nextern crate nix;\n\nuse std::cell::RefCell;\nuse std::env;\nuse std::fs::{File, OpenOptions};\nuse std::io::{ErrorKind, Read, Write};\nuse std::os::unix::io::AsRawFd;\nuse std::path::PathBuf;\nuse std::process::exit;\nuse std::rc::Rc;\n\nuse clap::{App, Arg, ArgMatches};\nuse env_logger::LogBuilder;\nuse libc::pid_t;\nuse log::{LogLevelFilter, SetLoggerError};\nuse nix::fcntl::{flock, FlockArg};\nuse nix::unistd::getpid;\n\n#[cfg(feature = \"dbus_enabled\")]\nuse dbus::WatchEvent;\n\nuse devicemapper::Device;\n\nuse libstratis::engine::{Engine, SimEngine, StratEngine};\nuse libstratis::stratis::{StratisError, StratisResult, VERSION};\n\nconst STRATISD_PID_PATH: &str = \"\/var\/run\/stratisd.pid\";\n\n\/\/\/ If writing a program error to stderr fails, panic.\nfn print_err(err: &StratisError) -> () {\n eprintln!(\"{}\", err);\n}\n\n\/\/\/ Configure and initialize the logger.\n\/\/\/ If debug is true, log at debug level. Otherwise read log configuration\n\/\/\/ parameters from the environment if RUST_LOG is set. Otherwise, just\n\/\/\/ accept the default configuration.\nfn initialize_log(debug: bool) -> Result<(), SetLoggerError> {\n let mut builder = LogBuilder::new();\n if debug {\n builder.filter(Some(\"stratisd\"), LogLevelFilter::Debug);\n builder.filter(Some(\"libstratis\"), LogLevelFilter::Debug);\n } else if let Ok(s) = env::var(\"RUST_LOG\") {\n builder.parse(&s);\n };\n\n builder.init()\n}\n\n\/\/\/ Given a udev event check to see if it's an add and if it is return the device node and\n\/\/\/ devicemapper::Device.\nfn handle_udev_add(event: &libudev::Event) -> Option<(Device, PathBuf)> {\n if event.event_type() == libudev::EventType::Add {\n let device = event.device();\n return device.devnode().and_then(|devnode| {\n device\n .devnum()\n .and_then(|devnum| Some((Device::from(devnum), PathBuf::from(devnode))))\n });\n }\n None\n}\n\n\/\/\/ To ensure only one instance of stratisd runs at a time, acquire an\n\/\/\/ exclusive lock. Return an error if lock attempt fails.\nfn trylock_pid_file() -> StratisResult<File> {\n let mut f = match OpenOptions::new()\n .read(true)\n .write(true)\n .create(true)\n .open(STRATISD_PID_PATH)\n {\n Ok(f) => f,\n Err(e) => {\n if e.kind() == ErrorKind::PermissionDenied {\n return Err(StratisError::Error(\n \"Must be running as root in order to start daemon.\".to_string(),\n ));\n }\n return Err(e.into());\n }\n };\n match flock(f.as_raw_fd(), FlockArg::LockExclusiveNonblock) {\n Ok(_) => {\n f.write_all(format!(\"{}\\n\", getpid()).as_bytes())?;\n Ok(f)\n }\n Err(_) => {\n let mut buf = String::new();\n f.read_to_string(&mut buf)?;\n \/\/ pidfile is supposed to contain pid of holder. But you never\n \/\/ know so be paranoid.\n let pid_str = buf.split_whitespace()\n .next()\n .and_then(|s| s.parse::<pid_t>().ok())\n .map(|pid| format!(\"{}\", pid))\n .unwrap_or_else(|| \"<unknown>\".into());\n Err(StratisError::Error(format!(\n \"Daemon already running with pid: {}\",\n pid_str\n )))\n }\n }\n}\n\nfn run(matches: &ArgMatches) -> StratisResult<()> {\n \/\/ Setup a udev listener before initializing the engine. A device may\n \/\/ appear after the engine has read the \/dev directory but before it has\n \/\/ completed initialization. Unless the udev event has been recorded, the\n \/\/ engine will miss the device.\n \/\/ This is especially important since stratisd must run during early boot.\n let context = libudev::Context::new()?;\n let mut monitor = libudev::Monitor::new(&context)?;\n monitor.match_subsystem_devtype(\"block\", \"disk\")?;\n let mut udev = monitor.listen()?;\n\n let engine: Rc<RefCell<Engine>> = {\n if matches.is_present(\"sim\") {\n info!(\"Using SimEngine\");\n Rc::new(RefCell::new(SimEngine::default()))\n } else {\n info!(\"Using StratEngine\");\n Rc::new(RefCell::new(StratEngine::initialize()?))\n }\n };\n\n \/*\n The file descriptor array indexes are laid out in the following:\n\n 0 == Always udev fd index\n 1 == engine index if eventable\n 1\/2 == Start of dbus client file descriptor(s), 1 if engine is not eventable, else 2\n *\/\n const FD_INDEX_UDEV: usize = 0;\n const FD_INDEX_ENGINE: usize = 1;\n\n \/*\n fds is a Vec of libc::pollfd structs. Ideally, it would be possible\n to use the higher level nix crate to handle polling. If this were possible,\n then the Vec would be one of nix::poll::PollFds and this would be more\n rustic. Unfortunately, the rust D-Bus library requires an explicit file\n descriptor to be passed as an argument to Connection::watch_handle(),\n and the explicit file descriptor can not be extracted from the PollFd\n struct. So, at this time, sticking with libc is less complex than\n converting to using nix, because if using nix, the file descriptor would\n have to be maintained in the Vec as well as the PollFd struct.\n *\/\n let mut fds = Vec::new();\n\n fds.push(libc::pollfd {\n fd: udev.as_raw_fd(),\n revents: 0,\n events: libc::POLLIN,\n });\n\n let eventable = engine.borrow().get_eventable();\n\n let poll_timeout = match eventable {\n Some(ref evt) => {\n fds.push(libc::pollfd {\n fd: evt.get_pollable_fd(),\n revents: 0,\n events: libc::POLLIN,\n });\n -1\n }\n None => 10000,\n };\n\n #[cfg(feature = \"dbus_enabled\")]\n let dbus_client_index_start = if eventable.is_some() {\n FD_INDEX_ENGINE + 1\n } else {\n FD_INDEX_ENGINE\n };\n\n #[cfg(feature = \"dbus_enabled\")]\n let (dbus_conn, mut tree, base_object_path, dbus_context) =\n libstratis::dbus_api::connect(Rc::clone(&engine))?;\n\n #[cfg(feature = \"dbus_enabled\")]\n for (_, pool_uuid, pool) in engine.borrow().pools() {\n libstratis::dbus_api::register_pool(\n &dbus_conn,\n &dbus_context,\n &mut tree,\n pool_uuid,\n pool,\n &base_object_path,\n )?;\n }\n\n loop {\n \/\/ Process any udev block events\n if fds[FD_INDEX_UDEV].revents != 0 {\n while let Some(event) = udev.receive_event() {\n if let Some((device, devnode)) = handle_udev_add(&event) {\n \/\/ If block evaluate returns an error we are going to ignore it as\n \/\/ there is nothing we can do for a device we are getting errors with.\n #[cfg(not(feature = \"dbus_enabled\"))]\n let _ = engine.borrow_mut().block_evaluate(device, devnode);\n\n #[cfg(feature = \"dbus_enabled\")]\n {\n let pool_uuid = engine\n .borrow_mut()\n .block_evaluate(device, devnode)\n .unwrap_or(None);\n\n if let Some(pool_uuid) = pool_uuid {\n libstratis::dbus_api::register_pool(\n &dbus_conn,\n &dbus_context,\n &mut tree,\n pool_uuid,\n engine\n .borrow()\n .get_pool(pool_uuid)\n .expect(\n \"block_evaluate() returned a pool UUID, pool must be available\",\n )\n .1,\n &base_object_path,\n )?;\n }\n }\n }\n }\n }\n\n \/\/ Handle engine events, if the engine is eventable\n match eventable {\n Some(ref evt) => {\n if fds[FD_INDEX_ENGINE].revents != 0 {\n evt.clear_event()?;\n engine.borrow_mut().evented()?;\n }\n }\n None => {\n \/\/ Unconditionally call evented() if engine has no eventable.\n \/\/ This looks like a bad idea, but the only engine that has\n \/\/ no eventable is the sim engine, and for that engine,\n \/\/ evented() is essentially a no-op.\n engine.borrow_mut().evented()?;\n }\n }\n\n \/\/ Iterate through D-Bus file descriptors (if enabled)\n #[cfg(feature = \"dbus_enabled\")]\n {\n for pfd in fds[dbus_client_index_start..]\n .iter()\n .filter(|pfd| pfd.revents != 0)\n {\n for item in dbus_conn.watch_handle(pfd.fd, WatchEvent::from_revents(pfd.revents)) {\n if let Err(r) =\n libstratis::dbus_api::handle(&dbus_conn, &item, &mut tree, &dbus_context)\n {\n print_err(&From::from(r));\n }\n }\n }\n\n \/\/ Refresh list of dbus fds to poll for every time. This can change as\n \/\/ D-Bus clients come and go.\n fds.truncate(dbus_client_index_start);\n\n fds.extend(dbus_conn.watch_fds().iter().map(|w| w.to_pollfd()));\n }\n\n let r = unsafe { libc::poll(fds.as_mut_ptr(), fds.len() as libc::c_ulong, poll_timeout) };\n\n \/\/ TODO: refine this behavior.\n \/\/ Different behaviors may be indicated, depending on the value of\n \/\/ errno when return value is -1.\n if r < 0 {\n return Err(StratisError::Error(format!(\n \"poll command failed: number of fds: {}, timeout: {}\",\n fds.len(),\n poll_timeout\n )));\n }\n }\n}\n\nfn main() {\n let matches = App::new(\"stratis\")\n .version(VERSION)\n .about(\"Stratis storage management\")\n .arg(\n Arg::with_name(\"debug\")\n .long(\"debug\")\n .help(\"Print additional output for debugging\"),\n )\n .arg(\n Arg::with_name(\"sim\")\n .long(\"sim\")\n .help(\"Use simulator engine\"),\n )\n .get_matches();\n\n let result = trylock_pid_file()\n .and_then(|_pidfile| {\n Ok(initialize_log(matches.is_present(\"debug\"))\n .expect(\"This is the first and only invocation of this method; it must succeed.\"))\n })\n .and_then(|_| run(&matches));\n if let Err(err) = result {\n print_err(&err);\n exit(1);\n } else {\n exit(0);\n }\n}\n<commit_msg>Drop clippy allow<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n#![cfg_attr(not(feature = \"clippy\"), allow(unknown_lints))]\n#![allow(doc_markdown)]\n\nextern crate devicemapper;\nextern crate libstratis;\n#[macro_use]\nextern crate log;\nextern crate clap;\n#[cfg(feature = \"dbus_enabled\")]\nextern crate dbus;\nextern crate env_logger;\nextern crate libc;\nextern crate libudev;\nextern crate nix;\n\nuse std::cell::RefCell;\nuse std::env;\nuse std::fs::{File, OpenOptions};\nuse std::io::{ErrorKind, Read, Write};\nuse std::os::unix::io::AsRawFd;\nuse std::path::PathBuf;\nuse std::process::exit;\nuse std::rc::Rc;\n\nuse clap::{App, Arg, ArgMatches};\nuse env_logger::LogBuilder;\nuse libc::pid_t;\nuse log::{LogLevelFilter, SetLoggerError};\nuse nix::fcntl::{flock, FlockArg};\nuse nix::unistd::getpid;\n\n#[cfg(feature = \"dbus_enabled\")]\nuse dbus::WatchEvent;\n\nuse devicemapper::Device;\n\nuse libstratis::engine::{Engine, SimEngine, StratEngine};\nuse libstratis::stratis::{StratisError, StratisResult, VERSION};\n\nconst STRATISD_PID_PATH: &str = \"\/var\/run\/stratisd.pid\";\n\n\/\/\/ If writing a program error to stderr fails, panic.\nfn print_err(err: &StratisError) -> () {\n eprintln!(\"{}\", err);\n}\n\n\/\/\/ Configure and initialize the logger.\n\/\/\/ If debug is true, log at debug level. Otherwise read log configuration\n\/\/\/ parameters from the environment if RUST_LOG is set. Otherwise, just\n\/\/\/ accept the default configuration.\nfn initialize_log(debug: bool) -> Result<(), SetLoggerError> {\n let mut builder = LogBuilder::new();\n if debug {\n builder.filter(Some(\"stratisd\"), LogLevelFilter::Debug);\n builder.filter(Some(\"libstratis\"), LogLevelFilter::Debug);\n } else if let Ok(s) = env::var(\"RUST_LOG\") {\n builder.parse(&s);\n };\n\n builder.init()\n}\n\n\/\/\/ Given a udev event check to see if it's an add and if it is return the device node and\n\/\/\/ devicemapper::Device.\nfn handle_udev_add(event: &libudev::Event) -> Option<(Device, PathBuf)> {\n if event.event_type() == libudev::EventType::Add {\n let device = event.device();\n return device.devnode().and_then(|devnode| {\n device\n .devnum()\n .and_then(|devnum| Some((Device::from(devnum), PathBuf::from(devnode))))\n });\n }\n None\n}\n\n\/\/\/ To ensure only one instance of stratisd runs at a time, acquire an\n\/\/\/ exclusive lock. Return an error if lock attempt fails.\nfn trylock_pid_file() -> StratisResult<File> {\n let mut f = match OpenOptions::new()\n .read(true)\n .write(true)\n .create(true)\n .open(STRATISD_PID_PATH)\n {\n Ok(f) => f,\n Err(e) => {\n if e.kind() == ErrorKind::PermissionDenied {\n return Err(StratisError::Error(\n \"Must be running as root in order to start daemon.\".to_string(),\n ));\n }\n return Err(e.into());\n }\n };\n match flock(f.as_raw_fd(), FlockArg::LockExclusiveNonblock) {\n Ok(_) => {\n f.write_all(format!(\"{}\\n\", getpid()).as_bytes())?;\n Ok(f)\n }\n Err(_) => {\n let mut buf = String::new();\n f.read_to_string(&mut buf)?;\n \/\/ pidfile is supposed to contain pid of holder. But you never\n \/\/ know so be paranoid.\n let pid_str = buf.split_whitespace()\n .next()\n .and_then(|s| s.parse::<pid_t>().ok())\n .map(|pid| format!(\"{}\", pid))\n .unwrap_or_else(|| \"<unknown>\".into());\n Err(StratisError::Error(format!(\n \"Daemon already running with pid: {}\",\n pid_str\n )))\n }\n }\n}\n\nfn run(matches: &ArgMatches) -> StratisResult<()> {\n \/\/ Setup a udev listener before initializing the engine. A device may\n \/\/ appear after the engine has read the \/dev directory but before it has\n \/\/ completed initialization. Unless the udev event has been recorded, the\n \/\/ engine will miss the device.\n \/\/ This is especially important since stratisd must run during early boot.\n let context = libudev::Context::new()?;\n let mut monitor = libudev::Monitor::new(&context)?;\n monitor.match_subsystem_devtype(\"block\", \"disk\")?;\n let mut udev = monitor.listen()?;\n\n let engine: Rc<RefCell<Engine>> = {\n if matches.is_present(\"sim\") {\n info!(\"Using SimEngine\");\n Rc::new(RefCell::new(SimEngine::default()))\n } else {\n info!(\"Using StratEngine\");\n Rc::new(RefCell::new(StratEngine::initialize()?))\n }\n };\n\n \/*\n The file descriptor array indexes are laid out in the following:\n\n 0 == Always udev fd index\n 1 == engine index if eventable\n 1\/2 == Start of dbus client file descriptor(s), 1 if engine is not eventable, else 2\n *\/\n const FD_INDEX_UDEV: usize = 0;\n const FD_INDEX_ENGINE: usize = 1;\n\n \/*\n fds is a Vec of libc::pollfd structs. Ideally, it would be possible\n to use the higher level nix crate to handle polling. If this were possible,\n then the Vec would be one of nix::poll::PollFds and this would be more\n rustic. Unfortunately, the rust D-Bus library requires an explicit file\n descriptor to be passed as an argument to Connection::watch_handle(),\n and the explicit file descriptor can not be extracted from the PollFd\n struct. So, at this time, sticking with libc is less complex than\n converting to using nix, because if using nix, the file descriptor would\n have to be maintained in the Vec as well as the PollFd struct.\n *\/\n let mut fds = Vec::new();\n\n fds.push(libc::pollfd {\n fd: udev.as_raw_fd(),\n revents: 0,\n events: libc::POLLIN,\n });\n\n let eventable = engine.borrow().get_eventable();\n\n let poll_timeout = match eventable {\n Some(ref evt) => {\n fds.push(libc::pollfd {\n fd: evt.get_pollable_fd(),\n revents: 0,\n events: libc::POLLIN,\n });\n -1\n }\n None => 10000,\n };\n\n #[cfg(feature = \"dbus_enabled\")]\n let dbus_client_index_start = if eventable.is_some() {\n FD_INDEX_ENGINE + 1\n } else {\n FD_INDEX_ENGINE\n };\n\n #[cfg(feature = \"dbus_enabled\")]\n let (dbus_conn, mut tree, base_object_path, dbus_context) =\n libstratis::dbus_api::connect(Rc::clone(&engine))?;\n\n #[cfg(feature = \"dbus_enabled\")]\n for (_, pool_uuid, pool) in engine.borrow().pools() {\n libstratis::dbus_api::register_pool(\n &dbus_conn,\n &dbus_context,\n &mut tree,\n pool_uuid,\n pool,\n &base_object_path,\n )?;\n }\n\n loop {\n \/\/ Process any udev block events\n if fds[FD_INDEX_UDEV].revents != 0 {\n while let Some(event) = udev.receive_event() {\n if let Some((device, devnode)) = handle_udev_add(&event) {\n \/\/ If block evaluate returns an error we are going to ignore it as\n \/\/ there is nothing we can do for a device we are getting errors with.\n #[cfg(not(feature = \"dbus_enabled\"))]\n let _ = engine.borrow_mut().block_evaluate(device, devnode);\n\n #[cfg(feature = \"dbus_enabled\")]\n {\n let pool_uuid = engine\n .borrow_mut()\n .block_evaluate(device, devnode)\n .unwrap_or(None);\n\n if let Some(pool_uuid) = pool_uuid {\n libstratis::dbus_api::register_pool(\n &dbus_conn,\n &dbus_context,\n &mut tree,\n pool_uuid,\n engine\n .borrow()\n .get_pool(pool_uuid)\n .expect(\n \"block_evaluate() returned a pool UUID, pool must be available\",\n )\n .1,\n &base_object_path,\n )?;\n }\n }\n }\n }\n }\n\n \/\/ Handle engine events, if the engine is eventable\n match eventable {\n Some(ref evt) => {\n if fds[FD_INDEX_ENGINE].revents != 0 {\n evt.clear_event()?;\n engine.borrow_mut().evented()?;\n }\n }\n None => {\n \/\/ Unconditionally call evented() if engine has no eventable.\n \/\/ This looks like a bad idea, but the only engine that has\n \/\/ no eventable is the sim engine, and for that engine,\n \/\/ evented() is essentially a no-op.\n engine.borrow_mut().evented()?;\n }\n }\n\n \/\/ Iterate through D-Bus file descriptors (if enabled)\n #[cfg(feature = \"dbus_enabled\")]\n {\n for pfd in fds[dbus_client_index_start..]\n .iter()\n .filter(|pfd| pfd.revents != 0)\n {\n for item in dbus_conn.watch_handle(pfd.fd, WatchEvent::from_revents(pfd.revents)) {\n if let Err(r) =\n libstratis::dbus_api::handle(&dbus_conn, &item, &mut tree, &dbus_context)\n {\n print_err(&From::from(r));\n }\n }\n }\n\n \/\/ Refresh list of dbus fds to poll for every time. This can change as\n \/\/ D-Bus clients come and go.\n fds.truncate(dbus_client_index_start);\n\n fds.extend(dbus_conn.watch_fds().iter().map(|w| w.to_pollfd()));\n }\n\n let r = unsafe { libc::poll(fds.as_mut_ptr(), fds.len() as libc::c_ulong, poll_timeout) };\n\n \/\/ TODO: refine this behavior.\n \/\/ Different behaviors may be indicated, depending on the value of\n \/\/ errno when return value is -1.\n if r < 0 {\n return Err(StratisError::Error(format!(\n \"poll command failed: number of fds: {}, timeout: {}\",\n fds.len(),\n poll_timeout\n )));\n }\n }\n}\n\nfn main() {\n let matches = App::new(\"stratis\")\n .version(VERSION)\n .about(\"Stratis storage management\")\n .arg(\n Arg::with_name(\"debug\")\n .long(\"debug\")\n .help(\"Print additional output for debugging\"),\n )\n .arg(\n Arg::with_name(\"sim\")\n .long(\"sim\")\n .help(\"Use simulator engine\"),\n )\n .get_matches();\n\n let result = trylock_pid_file()\n .and_then(|_pidfile| {\n initialize_log(matches.is_present(\"debug\")).map_err(|_| {\n StratisError::Error(\"The log was not initialized successfully. This is surprising, since this is the first and only initialization of the log, which is guaranteed to succeed\".into())\n })\n })\n .and_then(|_| run(&matches));\n if let Err(err) = result {\n print_err(&err);\n exit(1);\n } else {\n exit(0);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>std\/result<commit_after>mod checked {\n\n #[derive(Debug)]\n pub enum MathError {\n DivisionByZero,\n NegetiveLogrithm,\n NegetiveSquareRoot\n }\n\n pub type MathResult<f64> = Result<f64, MathError> ;\n\n pub fn div(d: f64, i: f64) -> MathResult<f64> {\n if i == 0f64 {\n Err(MathError::DivisionByZero)\n } else {\n Ok(d\/i)\n }\n }\n\n pub fn sqrt(i: f64) -> MathResult<f64> {\n if i <= 0f64 {\n Err(MathError::NegetiveSquareRoot)\n } else {\n Ok(i.sqrt())\n }\n }\n\n pub fn ln(i: f64) -> MathResult<f64> {\n if i <= 0f64 {\n Err(MathError::NegetiveLogrithm)\n } else {\n Ok(i.ln())\n }\n }\n\n}\n\nfn op(d: f64, i: f64) -> f64{\n match checked::div(d, i) {\n Err(e) => panic!(\"Error: {:?}\", e),\n Ok(q) => match checked::ln(q) {\n Err(e) => panic!(\"Error: {:?}\", e),\n Ok(s) => match checked::sqrt(s) {\n Err(e) => panic!(\"Error: {:?}\", e),\n Ok(r) => r\n }\n }\n }\n}\n\nfn op_try(d: f64, i: f64) -> checked::MathResult<f64> {\n let ratio = try!(checked::div(d,i));\n let ln = try!(checked::ln(ratio));\n checked::sqrt(ln)\n}\n\nfn op_(d: f64, i: f64) -> f64 {\n match op_try(d, i) {\n Ok(q) => q,\n Err(e) => panic!(e)\n }\n}\n\n\n\nfn main() {\n println!(\"operate: 50.0, 2.0: {}\", op(50.0, 2.0));\n \/\/ println!(\"operate: 50.0, 2.0: {}\", op(2.0, 50.0));\n\n println!(\"operate_try: 50.0, 2.0: {}\", op_(50.0, 2.0));\n println!(\"operate: 50.0, 2.0: {}\", op_(20.0, 50.0));\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rust flow control<commit_after>#![allow(dead_code,unused_variables)]\nfn main() {\n \/\/ 7.1\n let n = 5;\n let big_n =\n if n < 10 && n > -10 {\n 10 * n\n } else {\n n \/ 2\n };\n println!(\"{} -> {}\", n, big_n);\n\n \/\/ 7.2\n println!(\"loop{{}}\");\n let mut count = 0u32;\n loop {\n count += 1;\n println!(\"{:?}\", count);\n if count == 5 {\n println!(\"break\");\n break;\n }\n }\n\n \/\/ 7.2.1\n 'outer: loop {\n println!(\"outer loop\");\n 'inner: loop {\n println!(\"inner loop\");\n break 'outer;\n }\n }\n println!(\"exit outer loop\");\n\n \/\/ 7.3\n let mut n = 1;\n while n <= 3 {\n n += 1;\n }\n println!(\"while: {:?}\", n);\n\n \/\/ 7.4\n for n in 1..3 {\n println!(\"for: {:?}\", n);\n }\n\n \/\/ 7.5\n let number = 13;\n match number {\n 1 => println!(\"One!\"),\n 2 | 3 | 5 | 7 | 11 => println!(\"This is prime\"),\n 13...19 => println!(\"13...19 range\"),\n _ => println!(\"default\"),\n }\n\n \/\/ 7.5.1.1\n let pair = (0, -2);\n match pair {\n (0, y) => println!(\"(0, {})\", y),\n (x, 0) => println!(\"({}, 0)\", x),\n _ => println!(\"default\"),\n }\n\n \/\/ 7.5.1.2\n enum Color {\n Red, Blue, Green, RGB(u32, u32, u32)\n }\n let color = Color::Blue;\n match color {\n Color::Red => println!(\"Red\"),\n Color::Green => println!(\"Green\"),\n Color::Blue => println!(\"Blue\"),\n Color::RGB(r,g,b) => println!(\"RGB\"),\n }\n\n \/\/ 7.5.1.3\n let reference = &4;\n match reference {\n &val => println!(\"Value via destructing: {:?}\", val),\n }\n match *reference { \/\/ deference before matching\n val => println!(\"Value via dereferencing: {:?}\", val),\n }\n let ref a_reference = 3;\n let value = 5;\n match value {\n ref r => println!(\"Got reference value: {}\", r),\n }\n let mut mut_value = 6;\n match mut_value {\n ref mut m => {\n *m += 10;\n println!(\"mut_value: {}\", m);\n },\n }\n struct Foo { x: (u32, u32), y: u32 }\n let foo = Foo { x: (1,2), y: 3 };\n let Foo { y, .. } = foo; \/\/ some destructed values can be ignored\n println!(\"foo y: {:?}\", y);\n\n \/\/ 7.5.2\n let pair = (2, -2);\n match pair {\n (x, y) if x == y => println!(\"x == y\"),\n (x, _) => println!(\"pair x: {:?}\", x),\n }\n\n \/\/ 7.5.3\n fn age() -> u32 {\n 15\n }\n match age() {\n 0 => println!(\"age zero\"),\n n @ 1...22 => println!(\"age 1...22: {}\", n),\n _ => println!(\"default\"),\n }\n\n \/\/ 7.6\n let mut optional: Option<i32> = Some(7);\n match optional {\n Some(i) => println!(\"i: {:?}\", i),\n _ => {},\n }\n if let Some(i) = optional {\n println!(\"Matched number: {}\", i);\n } else {\n println!(\"No match\");\n }\n while let Some(i) = optional {\n println!(\"while let {{ {} }}\", i);\n if i > 7 {\n break;\n }\n optional = Some(i + 1);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate getopts;\nextern crate natord;\n\nuse file::File;\nuse column::{Column, SizeFormat};\nuse column::Column::*;\nuse output::View;\nuse term::dimensions;\n\nuse std::ascii::AsciiExt;\nuse std::slice::Iter;\nuse std::fmt;\n\nuse self::Misfire::*;\n\n\/\/\/ The *Options* struct represents a parsed version of the user's\n\/\/\/ command-line options.\n#[derive(PartialEq, Debug)]\npub struct Options {\n pub list_dirs: bool,\n path_strs: Vec<String>,\n reverse: bool,\n show_invisibles: bool,\n sort_field: SortField,\n view: View,\n}\n\nimpl Options {\n\n \/\/\/ Call getopts on the given slice of command-line strings.\n pub fn getopts(args: &[String]) -> Result<Options, Misfire> {\n let opts = &[\n getopts::optflag(\"1\", \"oneline\", \"display one entry per line\"),\n getopts::optflag(\"a\", \"all\", \"show dot-files\"),\n getopts::optflag(\"b\", \"binary\", \"use binary prefixes in file sizes\"),\n getopts::optflag(\"B\", \"bytes\", \"list file sizes in bytes, without prefixes\"),\n getopts::optflag(\"d\", \"list-dirs\", \"list directories as regular files\"),\n getopts::optflag(\"g\", \"group\", \"show group as well as user\"),\n getopts::optflag(\"h\", \"header\", \"show a header row at the top\"),\n getopts::optflag(\"H\", \"links\", \"show number of hard links\"),\n getopts::optflag(\"l\", \"long\", \"display extended details and attributes\"),\n getopts::optflag(\"i\", \"inode\", \"show each file's inode number\"),\n getopts::optflag(\"r\", \"reverse\", \"reverse order of files\"),\n getopts::optopt (\"s\", \"sort\", \"field to sort by\", \"WORD\"),\n getopts::optflag(\"S\", \"blocks\", \"show number of file system blocks\"),\n getopts::optflag(\"x\", \"across\", \"sort multi-column view entries across\"),\n getopts::optflag(\"?\", \"help\", \"show list of command-line options\"),\n ];\n\n let matches = match getopts::getopts(args, opts) {\n Ok(m) => m,\n Err(e) => return Err(Misfire::InvalidOptions(e)),\n };\n\n if matches.opt_present(\"help\") {\n return Err(Misfire::Help(getopts::usage(\"Usage:\\n exa [options] [files...]\", opts)));\n }\n\n let sort_field = match matches.opt_str(\"sort\") {\n Some(word) => try!(SortField::from_word(word)),\n None => SortField::Name,\n };\n\n Ok(Options {\n list_dirs: matches.opt_present(\"list-dirs\"),\n path_strs: if matches.free.is_empty() { vec![ \".\".to_string() ] } else { matches.free.clone() },\n reverse: matches.opt_present(\"reverse\"),\n show_invisibles: matches.opt_present(\"all\"),\n sort_field: sort_field,\n view: try!(view(&matches)),\n })\n }\n\n \/\/\/ Iterate over the non-option arguments left oven from getopts.\n pub fn path_strings(&self) -> Iter<String> {\n self.path_strs.iter()\n }\n\n \/\/\/ Display the files using this Option's View.\n pub fn view(&self, files: Vec<File>) {\n self.view.view(files)\n }\n\n \/\/\/ Transform the files somehow before listing them.\n pub fn transform_files<'a>(&self, mut files: Vec<File<'a>>) -> Vec<File<'a>> {\n\n if !self.show_invisibles {\n files = files.into_iter().filter(|f| !f.is_dotfile()).collect();\n }\n\n match self.sort_field {\n SortField::Unsorted => {},\n SortField::Name => files.sort_by(|a, b| natord::compare(a.name.as_slice(), b.name.as_slice())),\n SortField::Size => files.sort_by(|a, b| a.stat.size.cmp(&b.stat.size)),\n SortField::FileInode => files.sort_by(|a, b| a.stat.unstable.inode.cmp(&b.stat.unstable.inode)),\n SortField::Extension => files.sort_by(|a, b| {\n let exts = a.ext.clone().map(|e| e.to_ascii_lowercase()).cmp(&b.ext.clone().map(|e| e.to_ascii_lowercase()));\n let names = a.name.to_ascii_lowercase().cmp(&b.name.to_ascii_lowercase());\n exts.cmp(&names)\n }),\n }\n\n if self.reverse {\n files.reverse();\n }\n\n files\n }\n}\n\n\/\/\/ User-supplied field to sort by\n#[derive(PartialEq, Debug)]\npub enum SortField {\n Unsorted, Name, Extension, Size, FileInode\n}\n\nimpl Copy for SortField { }\n\nimpl SortField {\n\n \/\/\/ Find which field to use based on a user-supplied word.\n fn from_word(word: String) -> Result<SortField, Misfire> {\n match word.as_slice() {\n \"name\" => Ok(SortField::Name),\n \"size\" => Ok(SortField::Size),\n \"ext\" => Ok(SortField::Extension),\n \"none\" => Ok(SortField::Unsorted),\n \"inode\" => Ok(SortField::FileInode),\n field => Err(SortField::none(field))\n }\n }\n\n \/\/\/ How to display an error when the word didn't match with anything.\n fn none(field: &str) -> Misfire {\n Misfire::InvalidOptions(getopts::Fail::UnrecognizedOption(format!(\"--sort {}\", field)))\n }\n}\n\n\/\/\/ One of these things could happen instead of listing files.\n#[derive(PartialEq, Debug)]\npub enum Misfire {\n\n \/\/\/ The getopts crate didn't like these arguments.\n InvalidOptions(getopts::Fail),\n\n \/\/\/ The user asked for help. This isn't strictly an error, which is why\n \/\/\/ this enum isn't named Error!\n Help(String),\n\n \/\/\/ Two options were given that conflict with one another\n Conflict(&'static str, &'static str),\n\n \/\/\/ An option was given that does nothing when another one either is or\n \/\/\/ isn't present.\n Useless(&'static str, bool, &'static str),\n}\n\nimpl Misfire {\n \/\/\/ The OS return code this misfire should signify.\n pub fn error_code(&self) -> isize {\n if let Help(_) = *self { 2 }\n else { 3 }\n }\n}\n\nimpl fmt::Display for Misfire {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n InvalidOptions(ref e) => write!(f, \"{}\", e),\n Help(ref text) => write!(f, \"{}\", text),\n Conflict(a, b) => write!(f, \"Option --{} conflicts with option {}.\", a, b),\n Useless(a, false, b) => write!(f, \"Option --{} is useless without option --{}.\", a, b),\n Useless(a, true, b) => write!(f, \"Option --{} is useless given option --{}.\", a, b),\n }\n }\n}\n\n\/\/\/ Turns the Getopts results object into a View object.\nfn view(matches: &getopts::Matches) -> Result<View, Misfire> {\n if matches.opt_present(\"long\") {\n if matches.opt_present(\"across\") {\n Err(Misfire::Useless(\"across\", true, \"long\"))\n }\n else if matches.opt_present(\"oneline\") {\n Err(Misfire::Useless(\"across\", true, \"long\"))\n }\n else {\n Ok(View::Details(try!(columns(matches)), matches.opt_present(\"header\")))\n }\n }\n else if matches.opt_present(\"binary\") {\n Err(Misfire::Useless(\"binary\", false, \"long\"))\n }\n else if matches.opt_present(\"bytes\") {\n Err(Misfire::Useless(\"bytes\", false, \"long\"))\n }\n else if matches.opt_present(\"oneline\") {\n if matches.opt_present(\"across\") {\n Err(Misfire::Useless(\"across\", true, \"oneline\"))\n }\n else {\n Ok(View::Lines)\n }\n }\n else {\n match dimensions() {\n None => Ok(View::Lines),\n Some((width, _)) => Ok(View::Grid(matches.opt_present(\"across\"), width)),\n }\n }\n}\n\n\/\/\/ Finds out which file size the user has asked for.\nfn file_size(matches: &getopts::Matches) -> Result<SizeFormat, Misfire> {\n let binary = matches.opt_present(\"binary\");\n let bytes = matches.opt_present(\"bytes\");\n\n match (binary, bytes) {\n (true, true ) => Err(Misfire::Conflict(\"binary\", \"bytes\")),\n (true, false) => Ok(SizeFormat::BinaryBytes),\n (false, true ) => Ok(SizeFormat::JustBytes),\n (false, false) => Ok(SizeFormat::DecimalBytes),\n }\n}\n\n\/\/\/ Turns the Getopts results object into a list of columns for the columns\n\/\/\/ view, depending on the passed-in command-line arguments.\nfn columns(matches: &getopts::Matches) -> Result<Vec<Column>, Misfire> {\n let mut columns = vec![];\n\n if matches.opt_present(\"inode\") {\n columns.push(Inode);\n }\n\n columns.push(Permissions);\n\n if matches.opt_present(\"links\") {\n columns.push(HardLinks);\n }\n\n \/\/ Fail early here if two file size flags are given\n columns.push(FileSize(try!(file_size(matches))));\n\n if matches.opt_present(\"blocks\") {\n columns.push(Blocks);\n }\n\n columns.push(User);\n\n if matches.opt_present(\"group\") {\n columns.push(Group);\n }\n\n columns.push(FileName);\n Ok(columns)\n}\n\n#[cfg(test)]\nmod test {\n use super::Options;\n use super::Misfire;\n use super::Misfire::*;\n\n use std::fmt;\n\n fn is_helpful(misfire: Result<Options, Misfire>) -> bool {\n match misfire {\n Err(Help(_)) => true,\n _ => false,\n }\n }\n\n impl fmt::Display for Options {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{:?}\", self)\n }\n }\n\n #[test]\n fn help() {\n let opts = Options::getopts(&[ \"--help\".to_string() ]);\n assert!(is_helpful(opts))\n }\n\n #[test]\n fn help_with_file() {\n let opts = Options::getopts(&[ \"--help\".to_string(), \"me\".to_string() ]);\n assert!(is_helpful(opts))\n }\n\n #[test]\n fn files() {\n let opts = Options::getopts(&[ \"this file\".to_string(), \"that file\".to_string() ]).unwrap();\n let args: Vec<&String> = opts.path_strings().collect();\n assert_eq!(args, vec![ &\"this file\".to_string(), &\"that file\".to_string() ])\n }\n\n #[test]\n fn no_args() {\n let opts = Options::getopts(&[]).unwrap();\n let args: Vec<&String> = opts.path_strings().collect();\n assert_eq!(args, vec![ &\".\".to_string() ])\n }\n\n #[test]\n fn file_sizes() {\n let opts = Options::getopts(&[ \"--long\".to_string(), \"--binary\".to_string(), \"--bytes\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Conflict(\"binary\", \"bytes\"))\n }\n\n #[test]\n fn just_binary() {\n let opts = Options::getopts(&[ \"--binary\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"binary\", false, \"long\"))\n }\n\n #[test]\n fn just_bytes() {\n let opts = Options::getopts(&[ \"--bytes\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"bytes\", false, \"long\"))\n }\n\n #[test]\n fn long_across() {\n let opts = Options::getopts(&[ \"--long\".to_string(), \"--across\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"across\", true, \"long\"))\n }\n\n #[test]\n fn oneline_across() {\n let opts = Options::getopts(&[ \"--oneline\".to_string(), \"--across\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"across\", true, \"oneline\"))\n }\n}\n<commit_msg>Forbid a few more option combinations<commit_after>extern crate getopts;\nextern crate natord;\n\nuse file::File;\nuse column::{Column, SizeFormat};\nuse column::Column::*;\nuse output::View;\nuse term::dimensions;\n\nuse std::ascii::AsciiExt;\nuse std::slice::Iter;\nuse std::fmt;\n\nuse self::Misfire::*;\n\n\/\/\/ The *Options* struct represents a parsed version of the user's\n\/\/\/ command-line options.\n#[derive(PartialEq, Debug)]\npub struct Options {\n pub list_dirs: bool,\n path_strs: Vec<String>,\n reverse: bool,\n show_invisibles: bool,\n sort_field: SortField,\n view: View,\n}\n\nimpl Options {\n\n \/\/\/ Call getopts on the given slice of command-line strings.\n pub fn getopts(args: &[String]) -> Result<Options, Misfire> {\n let opts = &[\n getopts::optflag(\"1\", \"oneline\", \"display one entry per line\"),\n getopts::optflag(\"a\", \"all\", \"show dot-files\"),\n getopts::optflag(\"b\", \"binary\", \"use binary prefixes in file sizes\"),\n getopts::optflag(\"B\", \"bytes\", \"list file sizes in bytes, without prefixes\"),\n getopts::optflag(\"d\", \"list-dirs\", \"list directories as regular files\"),\n getopts::optflag(\"g\", \"group\", \"show group as well as user\"),\n getopts::optflag(\"h\", \"header\", \"show a header row at the top\"),\n getopts::optflag(\"H\", \"links\", \"show number of hard links\"),\n getopts::optflag(\"l\", \"long\", \"display extended details and attributes\"),\n getopts::optflag(\"i\", \"inode\", \"show each file's inode number\"),\n getopts::optflag(\"r\", \"reverse\", \"reverse order of files\"),\n getopts::optopt (\"s\", \"sort\", \"field to sort by\", \"WORD\"),\n getopts::optflag(\"S\", \"blocks\", \"show number of file system blocks\"),\n getopts::optflag(\"x\", \"across\", \"sort multi-column view entries across\"),\n getopts::optflag(\"?\", \"help\", \"show list of command-line options\"),\n ];\n\n let matches = match getopts::getopts(args, opts) {\n Ok(m) => m,\n Err(e) => return Err(Misfire::InvalidOptions(e)),\n };\n\n if matches.opt_present(\"help\") {\n return Err(Misfire::Help(getopts::usage(\"Usage:\\n exa [options] [files...]\", opts)));\n }\n\n let sort_field = match matches.opt_str(\"sort\") {\n Some(word) => try!(SortField::from_word(word)),\n None => SortField::Name,\n };\n\n Ok(Options {\n list_dirs: matches.opt_present(\"list-dirs\"),\n path_strs: if matches.free.is_empty() { vec![ \".\".to_string() ] } else { matches.free.clone() },\n reverse: matches.opt_present(\"reverse\"),\n show_invisibles: matches.opt_present(\"all\"),\n sort_field: sort_field,\n view: try!(view(&matches)),\n })\n }\n\n \/\/\/ Iterate over the non-option arguments left oven from getopts.\n pub fn path_strings(&self) -> Iter<String> {\n self.path_strs.iter()\n }\n\n \/\/\/ Display the files using this Option's View.\n pub fn view(&self, files: Vec<File>) {\n self.view.view(files)\n }\n\n \/\/\/ Transform the files somehow before listing them.\n pub fn transform_files<'a>(&self, mut files: Vec<File<'a>>) -> Vec<File<'a>> {\n\n if !self.show_invisibles {\n files = files.into_iter().filter(|f| !f.is_dotfile()).collect();\n }\n\n match self.sort_field {\n SortField::Unsorted => {},\n SortField::Name => files.sort_by(|a, b| natord::compare(a.name.as_slice(), b.name.as_slice())),\n SortField::Size => files.sort_by(|a, b| a.stat.size.cmp(&b.stat.size)),\n SortField::FileInode => files.sort_by(|a, b| a.stat.unstable.inode.cmp(&b.stat.unstable.inode)),\n SortField::Extension => files.sort_by(|a, b| {\n let exts = a.ext.clone().map(|e| e.to_ascii_lowercase()).cmp(&b.ext.clone().map(|e| e.to_ascii_lowercase()));\n let names = a.name.to_ascii_lowercase().cmp(&b.name.to_ascii_lowercase());\n exts.cmp(&names)\n }),\n }\n\n if self.reverse {\n files.reverse();\n }\n\n files\n }\n}\n\n\/\/\/ User-supplied field to sort by\n#[derive(PartialEq, Debug)]\npub enum SortField {\n Unsorted, Name, Extension, Size, FileInode\n}\n\nimpl Copy for SortField { }\n\nimpl SortField {\n\n \/\/\/ Find which field to use based on a user-supplied word.\n fn from_word(word: String) -> Result<SortField, Misfire> {\n match word.as_slice() {\n \"name\" => Ok(SortField::Name),\n \"size\" => Ok(SortField::Size),\n \"ext\" => Ok(SortField::Extension),\n \"none\" => Ok(SortField::Unsorted),\n \"inode\" => Ok(SortField::FileInode),\n field => Err(SortField::none(field))\n }\n }\n\n \/\/\/ How to display an error when the word didn't match with anything.\n fn none(field: &str) -> Misfire {\n Misfire::InvalidOptions(getopts::Fail::UnrecognizedOption(format!(\"--sort {}\", field)))\n }\n}\n\n\/\/\/ One of these things could happen instead of listing files.\n#[derive(PartialEq, Debug)]\npub enum Misfire {\n\n \/\/\/ The getopts crate didn't like these arguments.\n InvalidOptions(getopts::Fail),\n\n \/\/\/ The user asked for help. This isn't strictly an error, which is why\n \/\/\/ this enum isn't named Error!\n Help(String),\n\n \/\/\/ Two options were given that conflict with one another\n Conflict(&'static str, &'static str),\n\n \/\/\/ An option was given that does nothing when another one either is or\n \/\/\/ isn't present.\n Useless(&'static str, bool, &'static str),\n}\n\nimpl Misfire {\n \/\/\/ The OS return code this misfire should signify.\n pub fn error_code(&self) -> isize {\n if let Help(_) = *self { 2 }\n else { 3 }\n }\n}\n\nimpl fmt::Display for Misfire {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n InvalidOptions(ref e) => write!(f, \"{}\", e),\n Help(ref text) => write!(f, \"{}\", text),\n Conflict(a, b) => write!(f, \"Option --{} conflicts with option {}.\", a, b),\n Useless(a, false, b) => write!(f, \"Option --{} is useless without option --{}.\", a, b),\n Useless(a, true, b) => write!(f, \"Option --{} is useless given option --{}.\", a, b),\n }\n }\n}\n\n\/\/\/ Turns the Getopts results object into a View object.\nfn view(matches: &getopts::Matches) -> Result<View, Misfire> {\n if matches.opt_present(\"long\") {\n if matches.opt_present(\"across\") {\n Err(Misfire::Useless(\"across\", true, \"long\"))\n }\n else if matches.opt_present(\"oneline\") {\n Err(Misfire::Useless(\"across\", true, \"long\"))\n }\n else {\n Ok(View::Details(try!(columns(matches)), matches.opt_present(\"header\")))\n }\n }\n else if matches.opt_present(\"binary\") {\n Err(Misfire::Useless(\"binary\", false, \"long\"))\n }\n else if matches.opt_present(\"bytes\") {\n Err(Misfire::Useless(\"bytes\", false, \"long\"))\n }\n else if matches.opt_present(\"inode\") {\n Err(Misfire::Useless(\"inode\", false, \"long\"))\n }\n else if matches.opt_present(\"links\") {\n Err(Misfire::Useless(\"links\", false, \"long\"))\n }\n else if matches.opt_present(\"header\") {\n Err(Misfire::Useless(\"header\", false, \"long\"))\n }\n else if matches.opt_present(\"blocks\") {\n Err(Misfire::Useless(\"blocks\", false, \"long\"))\n }\n else if matches.opt_present(\"oneline\") {\n if matches.opt_present(\"across\") {\n Err(Misfire::Useless(\"across\", true, \"oneline\"))\n }\n else {\n Ok(View::Lines)\n }\n }\n else {\n match dimensions() {\n None => Ok(View::Lines),\n Some((width, _)) => Ok(View::Grid(matches.opt_present(\"across\"), width)),\n }\n }\n}\n\n\/\/\/ Finds out which file size the user has asked for.\nfn file_size(matches: &getopts::Matches) -> Result<SizeFormat, Misfire> {\n let binary = matches.opt_present(\"binary\");\n let bytes = matches.opt_present(\"bytes\");\n\n match (binary, bytes) {\n (true, true ) => Err(Misfire::Conflict(\"binary\", \"bytes\")),\n (true, false) => Ok(SizeFormat::BinaryBytes),\n (false, true ) => Ok(SizeFormat::JustBytes),\n (false, false) => Ok(SizeFormat::DecimalBytes),\n }\n}\n\n\/\/\/ Turns the Getopts results object into a list of columns for the columns\n\/\/\/ view, depending on the passed-in command-line arguments.\nfn columns(matches: &getopts::Matches) -> Result<Vec<Column>, Misfire> {\n let mut columns = vec![];\n\n if matches.opt_present(\"inode\") {\n columns.push(Inode);\n }\n\n columns.push(Permissions);\n\n if matches.opt_present(\"links\") {\n columns.push(HardLinks);\n }\n\n \/\/ Fail early here if two file size flags are given\n columns.push(FileSize(try!(file_size(matches))));\n\n if matches.opt_present(\"blocks\") {\n columns.push(Blocks);\n }\n\n columns.push(User);\n\n if matches.opt_present(\"group\") {\n columns.push(Group);\n }\n\n columns.push(FileName);\n Ok(columns)\n}\n\n#[cfg(test)]\nmod test {\n use super::Options;\n use super::Misfire;\n use super::Misfire::*;\n\n use std::fmt;\n\n fn is_helpful(misfire: Result<Options, Misfire>) -> bool {\n match misfire {\n Err(Help(_)) => true,\n _ => false,\n }\n }\n\n impl fmt::Display for Options {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{:?}\", self)\n }\n }\n\n #[test]\n fn help() {\n let opts = Options::getopts(&[ \"--help\".to_string() ]);\n assert!(is_helpful(opts))\n }\n\n #[test]\n fn help_with_file() {\n let opts = Options::getopts(&[ \"--help\".to_string(), \"me\".to_string() ]);\n assert!(is_helpful(opts))\n }\n\n #[test]\n fn files() {\n let opts = Options::getopts(&[ \"this file\".to_string(), \"that file\".to_string() ]).unwrap();\n let args: Vec<&String> = opts.path_strings().collect();\n assert_eq!(args, vec![ &\"this file\".to_string(), &\"that file\".to_string() ])\n }\n\n #[test]\n fn no_args() {\n let opts = Options::getopts(&[]).unwrap();\n let args: Vec<&String> = opts.path_strings().collect();\n assert_eq!(args, vec![ &\".\".to_string() ])\n }\n\n #[test]\n fn file_sizes() {\n let opts = Options::getopts(&[ \"--long\".to_string(), \"--binary\".to_string(), \"--bytes\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Conflict(\"binary\", \"bytes\"))\n }\n\n #[test]\n fn just_binary() {\n let opts = Options::getopts(&[ \"--binary\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"binary\", false, \"long\"))\n }\n\n #[test]\n fn just_bytes() {\n let opts = Options::getopts(&[ \"--bytes\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"bytes\", false, \"long\"))\n }\n\n #[test]\n fn long_across() {\n let opts = Options::getopts(&[ \"--long\".to_string(), \"--across\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"across\", true, \"long\"))\n }\n\n #[test]\n fn oneline_across() {\n let opts = Options::getopts(&[ \"--oneline\".to_string(), \"--across\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"across\", true, \"oneline\"))\n }\n\n #[test]\n fn just_header() {\n let opts = Options::getopts(&[ \"--header\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"header\", false, \"long\"))\n }\n\n #[test]\n fn just_inode() {\n let opts = Options::getopts(&[ \"--inode\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"inode\", false, \"long\"))\n }\n\n #[test]\n fn just_links() {\n let opts = Options::getopts(&[ \"--links\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"links\", false, \"long\"))\n }\n\n #[test]\n fn just_blocks() {\n let opts = Options::getopts(&[ \"--blocks\".to_string() ]);\n assert_eq!(opts.unwrap_err(), Misfire::Useless(\"blocks\", false, \"long\"))\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>begin mod program<commit_after>\/\/\n\/\/ Copyright (c) 2016, Boris Popov <popov@whitekefir.ru>\n\/\/\n\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\/\/\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\npub static NAME: &'static str = \"MountAndExec\";\npub static VERSION: &'static str = \"0.1.0\";\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/*\npub struct Info {\n\n pub name: &'static str,\n version: &'static str\n}\n\nimpl Info {\n pub fn init(&mut self) {\n self.name = &NAME;\n \/\/self.version = VERSION.to_string();\n \n \/\/self.name = \"MountAndExec\";\n \/\/\n \/\/TODO\n \/\/\n \/\/return;\n }\n*\/\n \/*\n fn area(&self) -> f64 {\n std::f64::consts::PI * (self.radius * self.radius)\n }\n *\/\n\/\/}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>(fixup) Renamed the quarterround_X and swizzle_X routines\/macros.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>generate map tiles<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>tests: some new tests for positionals<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Format decode() function to pass rustfmt<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add enable_all() method for utils that enable all flags by default e.g. the `wc` command and others<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>wrap tcp in vnc connection<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #150 - KiChjang:origin-doc, r=SimonSapin<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Another complete refactor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>loading flashgames docker is working now<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate serialize;\nextern crate time;\nextern crate \"crypto\" as crypto;\n\nuse serialize::base64;\nuse serialize::base64::{ToBase64, FromBase64};\nuse serialize::json;\nuse serialize::json::ToJson;\nuse serialize::json::Json;\nuse std::collections::BTreeMap;\nuse crypto::sha2::{Sha256, Sha384, Sha512};\nuse crypto::hmac::Hmac;\nuse crypto::digest::Digest;\nuse crypto::mac::Mac;\nuse std::str;\n\nstruct Header<'a> {\n alg: &'a str,\n typ: &'a str\n}\n\nimpl<'a> Header<'a> {\n pub fn new(alg: Algorithm) -> Header<'a> {\n Header{alg: algorithm_to_string(alg), typ: std_type()}\n }\n\n pub fn std_type() -> String {\n \"JWT\"\n }\n}\n\nstruct Token<'a> {\n header: Option<Header<'a>>,\n payload: BTreeMap<String, String>,\n signature: &'a str,\n signing_input: &'a str\n}\n\nimpl<'a> Token<'a> {\n fn new() -> Token<'a> {\n unimplemented!()\n }\n\n fn segments_count() -> usize {\n 3\n }\n}\n\npub enum Error {\n SignatureExpired,\n SignatureInvalid,\n JWTInvalid,\n IssuerInvalid,\n ExpirationInvalid,\n AudienceInvalid\n}\n\nenum Algorithm {\n HS256,\n HS384,\n HS512\n}\n\nfn algorithm_to_string(alg: Algorithm) -> String {\n match alg {\n Algorithm::HS256 => \"HS256\",\n Algorithm::HS384 => \"HS384\",\n Algorithm::HS512 => \"HS512\"\n }\n}\n\nimpl<'a> ToJson for Header<'a> {\n fn to_json(&self) -> json::Json {\n let mut map = BTreeMap::new();\n map.insert(\"typ\".to_string(), self.typ.to_json());\n map.insert(\"alg\".to_string(), self.alg.to_json());\n Json::Object(map)\n }\n}\n\npub fn sign(secret: &str, payload: Option<BTreeMap<String, String>>, algorithm: Option<Algorithm>) -> String {\n let signing_input = get_signing_input(payload, algorithm);\n let signature = sign_hmac(signing_input.as_slice(), secret, match algorithm {\n Some(x) => x,\n None => Algorithm::HS256\n });\n format!(\"{}.{}\", signing_input, signature)\n}\n\nfn get_signing_input(payload: Option<BTreeMap<String, String>>, algorithm: Option<Algorithm>) -> String {\n let header = Header::new(algorithm);\n let header_json_str = header.to_json();\n let encoded_header = base64_url_encode(header_json_str.to_string().as_bytes()).to_string();\n\n let payload = payload.into_iter().map(|(k, v)| (k, v.to_json())).collect(); \/\/todo - if payload is None\n let payload_json = Json::Object(payload);\n let encoded_payload = base64_url_encode(payload_json.to_string().as_bytes()).to_string();\n\n format!(\"{}.{}\", encoded_header, encoded_payload)\n}\n\nfn sign_hmac256(signing_input: &str, secret: &str) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS256)\n}\n\nfn sign_hmac384(signing_input: &str, secret: &str) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS384)\n}\n\nfn sign_hmac512(signing_input: &str, secret: &str) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS512)\n}\n\nfn sign_hmac(signing_input: &str, secret: &str, algorithm: Algorithm) -> String {\n let mut hmac = Hmac::new(match algorithm {\n Algorithm::HS256 => Sha256::new(),\n Algorithm::HS384 => Sha384::new(),\n Algorithm::HS512 => Sha512::new()\n }, secret.to_string().as_bytes()\n );\n hmac.input(signing_input.to_string().as_bytes());\n base64_url_encode(hmac.result().code())\n}\n\nfn base64_url_encode(bytes: &[u8]) -> String {\n bytes.to_base64(base64::URL_SAFE)\n}\n\nfn json_to_tree(input: Json) -> BTreeMap<String, String> {\n match input {\n Json::Object(json_tree) => json_tree.into_iter().map(|(k, v)| (k, match v {\n Json::String(s) => s,\n _ => unreachable!()\n })).collect(),\n _ => unreachable!()\n }\n}\n\npub fn verify(token_str: &str, secret: &str, options: Option<BTreeMap<String, String>>) -> Result<Token<'a>, Error> {\n if signing_input.is_empty() || signing_input.is_whitespace() {\n return None\n }\n\n verify_signature(signing_input, secret, signature);\n \/\/ verify_issuer(payload_json);\n \/\/ verify_expiration(payload_json);\n \/\/ verify_audience();\n \/\/ verify_subject();\n \/\/ verify_notbefore();\n \/\/ verify_issuedat();\n \/\/ verify_jwtid();\n}\n\nfn decode_segments(jwt: &str, perform_verification: bool) -> Result<(Json, Json, String, Vec<u8>), Error> {\n let mut raw_segments = jwt.split_str(\".\");\n if raw_segments.count() != Token::segments_count() {\n return Err(Error::JWTInvalid)\n }\n\n let header_segment = raw_segments.next().unwrap();\n let payload_segment = raw_segments.next().unwrap();\n let crypto_segment = raw_segments.next().unwrap();\n let (header, payload) = decode_header_and_payload(header_segment, payload_segment);\n let signature = if perform_verification {\n crypto_segment.as_bytes().from_base64().unwrap()\n } else {\n vec![]\n };\n\n let signing_input = format!(\"{}.{}\", header_segment, payload_segment);\n Ok((header, payload, signing_input, signature))\n}\n\nfn decode_header_and_payload(header_segment: &str, payload_segment: &str) -> (Json, Json) {\n fn base64_to_json(input: &str) -> Json {\n let bytes = input.as_bytes().from_base64().unwrap();\n let s = str::from_utf8(bytes.as_slice()).unwrap();\n json::from_str(s).unwrap()\n };\n\n let header_json = base64_to_json(header_segment);\n let payload_json = base64_to_json(payload_segment);\n (header_json, payload_json)\n}\n\nfn verify_signature(signing_input: &str, secret: &str, signature: &[u8]) -> bool {\n let mut hmac = Hmac::new(match algorithm {\n Algorithm::HS256 => Sha256::new(),\n Algorithm::HS384 => Sha384::new(),\n Algorithm::HS512 => Sha512::new(),\n _ => panic!()\n }, secret.to_string().as_bytes()\n );\n\n hmac.input(signing_input.to_string().as_bytes());\n secure_compare(signature, hmac.result().code())\n}\n\nfn secure_compare(a: &[u8], b: &[u8]) -> bool {\n if a.len() != b.len() {\n return false\n }\n\n let mut res = 0_u8;\n for (&x, &y) in a.iter().zip(b.iter()) {\n res |= x ^ y;\n }\n\n res == 0\n}\n\nfn verify_issuer(payload_json: Json, iss: &str) -> bool {\n \/\/ take \"iss\" from payload_json\n \/\/ take \"iss\" from ...\n \/\/ make sure they're equal\n\n \/\/ if iss.is_empty() || signing_input.as_slice().is_whitespace() {\n \/\/ return Err(Error::IssuerInvalid)\n \/\/ }\n unimplemented!()\n}\n\nfn verify_expiration(payload_json: Json) -> bool {\n let payload = json_to_tree(payload_json);\n if payload.contains_key(\"exp\") {\n let exp: i64 = json::from_str(payload.get(\"exp\").unwrap().as_slice()).unwrap();\n \/\/ if exp.is_empty() || signing_input.as_slice().is_whitespace() {\n \/\/ return false\n \/\/ }\n \n exp > time::get_time().sec\n } else {\n false\n }\n}\n\nfn verify_audience(payload_json: Json, aud: &str) -> bool {\n unimplemented!()\n}\n\nfn verify_subject(payload_json: Json) -> bool {\n unimplemented!()\n}\n\nfn verify_notbefore(payload_json: Json) -> bool {\n unimplemented!()\n}\n\nfn verify_issuedat(payload_json: Json) -> bool {\n unimplemented!()\n}\n\nfn verify_jwtid(payload_json: Json) -> bool {\n unimplemented!()\n}\n\nfn verify_generic(payload_json: Json, parameter_name: String) -> bool {\n let payload = json_to_tree(payload_json);\n if payload.contains_key(¶meter_name) {\n \n }\n\n unimplemented!()\n}\n\n#[cfg(test)]\nmod tests {\n extern crate time;\n\n use super::sign;\n use super::decode;\n use super::secure_compare;\n use super::Algorithm;\n use std::collections::BTreeMap;\n use std::time::duration::Duration;\n\n #[test]\n fn test_encode_and_decode_jwt() {\n let mut p1 = BTreeMap::new();\n p1.insert(\"key1\".to_string(), \"val1\".to_string());\n p1.insert(\"key2\".to_string(), \"val2\".to_string());\n p1.insert(\"key3\".to_string(), \"val3\".to_string());\n\n let secret = \"secret123\";\n let jwt1 = sign(secret, Some(p1.clone()), Some(Algorithm::HS256));\n let maybe_res = verify(jwt.as_slice(), secret, None);\n\n assert!(maybe_res.is_ok());\n assert_eq!(jwt1, maybe_res.unwrap());\n } \n\n #[test]\n fn test_decode_valid_jwt() {\n let mut p1 = BTreeMap::new();\n p1.insert(\"key11\".to_string(), \"val1\".to_string());\n p1.insert(\"key22\".to_string(), \"val2\".to_string());\n let secret = \"secret123\";\n let jwt = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJrZXkxMSI6InZhbDEiLCJrZXkyMiI6InZhbDIifQ.jrcoVcRsmQqDEzSW9qOhG1HIrzV_n3nMhykNPnGvp9c\";\n let maybe_res = verify(jwt.as_slice(), secret, None);\n \n assert!(maybe_res.is_ok());\n assert_eq!(p1, maybe_res.unwrap().payload);\n }\n\n #[test]\n fn test_fails_when_expired() {\n let now = time::get_time();\n let past = now + Duration::minutes(-5);\n let mut p1 = BTreeMap::new();\n p1.insert(\"exp\".to_string(), past.sec.to_string());\n p1.insert(\"key1\".to_string(), \"val1\".to_string());\n let secret = \"secret123\";\n let jwt = sign(secret, Some(p1.clone()), None);\n let res = verify(jwt.as_slice(), secret, None);\n assert!(res.is_ok());\n }\n\n #[test]\n fn test_ok_when_expired_not_verified() {\n let now = time::get_time();\n let past = now + Duration::minutes(-5);\n let mut p1 = BTreeMap::new();\n p1.insert(\"exp\".to_string(), past.sec.to_string());\n p1.insert(\"key1\".to_string(), \"val1\".to_string());\n let secret = \"secret123\";\n let jwt = sign(secret, Some(p1.clone()), None);\n let res = verify(jwt.as_slice(), secret, None);\n assert!(res.is_ok());\n }\n \n #[test]\n fn test_secure_compare_same_strings() {\n let str1 = \"same same\".as_bytes();\n let str2 = \"same same\".as_bytes();\n let res = secure_compare(str1, str2);\n assert!(res);\n }\n\n #[test]\n fn test_fails_when_secure_compare_different_strings() {\n let str1 = \"same same\".as_bytes();\n let str2 = \"same same but different\".as_bytes();\n let res = secure_compare(str1, str2);\n assert!(!res);\n\n let str3 = \"same same\".as_bytes();\n let str4 = \"same ssss\".as_bytes();\n let res2 = secure_compare(str3, str4);\n assert!(!res2);\n }\n}<commit_msg>refactored2<commit_after>extern crate serialize;\nextern crate time;\nextern crate \"crypto\" as crypto;\n\nuse serialize::base64;\nuse serialize::base64::{ToBase64, FromBase64};\nuse serialize::json;\nuse serialize::json::ToJson;\nuse serialize::json::Json;\nuse std::collections::BTreeMap;\nuse crypto::sha2::{Sha256, Sha384, Sha512};\nuse crypto::hmac::Hmac;\nuse crypto::digest::Digest;\nuse crypto::mac::Mac;\nuse std::str;\n\nstruct Header<'a> {\n alg: BTreeMap<&'a str, Algorithm>,\n typ: &'a str\n}\n\nimpl<'a> Header<'a> {\n pub fn new(alg: Algorithm) -> Header<'a> {\n let mut map = BTreeMap::new();\n map.insert(\"HS256\".to_string(), Algorithm::HS256);\n Header{alg: algorithm_to_string(alg), typ: Header::std_type()}\n }\n \n pub fn new(alg: &str) -> Header<'a> {\n Header::algorithms()\n alg_str match {\n \"HS256\" => Algorithm::HS256,\n \"HS384\" => Algorithm::HS384,\n \"HS512\" => Algorithm::HS512,\n _ => panic!(\"Unknown algorithm: {}\", alg_str)\n }\n Header{alg: algorithm_to_string(alg), typ: Header::std_type()}\n }\n\n pub fn std_type() -> String {\n \"JWT\"\n }\n\n fn algorithms() -> BTreeMap<String, Algorithm> {\n let mut map = BTreeMap::new();\n map.insert(\"HS256\".to_string(), Algorithm::HS256);\n map.insert(\"HS384\".to_string(), Algorithm::HS384);\n map.insert(\"HS512\".to_string(), Algorithm::HS512);\n }\n}\n\nstruct Token<'a> {\n header: Header<'a>,\n payload: Option<BTreeMap<String, String>>,\n signature: &'a str,\n signing_input: &'a str\n}\n\nimpl<'a> Token<'a> {\n fn new() -> Token<'a> {\n unimplemented!()\n }\n\n fn segments_count() -> usize {\n 3\n }\n}\n\npub enum Error {\n SignatureExpired,\n SignatureInvalid,\n JWTInvalid,\n IssuerInvalid,\n ExpirationInvalid,\n AudienceInvalid\n}\n\nenum Algorithm {\n HS256,\n HS384,\n HS512\n}\n\nimpl<'a> ToJson for Header<'a> {\n fn to_json(&self) -> json::Json {\n let mut map = BTreeMap::new();\n map.insert(\"typ\".to_string(), self.typ.to_json());\n map.insert(\"alg\".to_string(), self.alg.to_json());\n Json::Object(map)\n }\n}\n\npub fn sign(secret: &str, payload: Option<BTreeMap<String, String>>, algorithm: Option<Algorithm>) -> String {\n let signing_input = get_signing_input(payload, algorithm);\n let signature = sign_hmac(signing_input.as_slice(), secret, match algorithm {\n Some(x) => x,\n None => Algorithm::HS256\n });\n format!(\"{}.{}\", signing_input, signature)\n}\n\nfn get_signing_input(payload: Option<BTreeMap<String, String>>, algorithm: Option<Algorithm>) -> String {\n let header = Header::new(algorithm);\n let header_json_str = header.to_json();\n let encoded_header = base64_url_encode(header_json_str.to_string().as_bytes()).to_string();\n\n let payload = payload.into_iter().map(|(k, v)| (k, v.to_json())).collect(); \/\/todo - if payload is None\n let payload_json = Json::Object(payload);\n let encoded_payload = base64_url_encode(payload_json.to_string().as_bytes()).to_string();\n\n format!(\"{}.{}\", encoded_header, encoded_payload)\n}\n\nfn sign_hmac256(signing_input: &str, secret: &str) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS256)\n}\n\nfn sign_hmac384(signing_input: &str, secret: &str) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS384)\n}\n\nfn sign_hmac512(signing_input: &str, secret: &str) -> String {\n sign_hmac(signing_input, secret, Algorithm::HS512)\n}\n\nfn sign_hmac(signing_input: &str, secret: &str, algorithm: Algorithm) -> String {\n let mut hmac = Hmac::new(match algorithm {\n Algorithm::HS256 => Sha256::new(),\n Algorithm::HS384 => Sha384::new(),\n Algorithm::HS512 => Sha512::new()\n }, secret.to_string().as_bytes()\n );\n hmac.input(signing_input.to_string().as_bytes());\n base64_url_encode(hmac.result().code())\n}\n\nfn base64_url_encode(bytes: &[u8]) -> String {\n bytes.to_base64(base64::URL_SAFE)\n}\n\nfn json_to_tree(input: Json) -> BTreeMap<String, String> {\n match input {\n Json::Object(json_tree) => json_tree.into_iter().map(|(k, v)| (k, match v {\n Json::String(s) => s,\n _ => unreachable!()\n })).collect(),\n _ => unreachable!()\n }\n}\n\npub fn verify(jwt_token: &str, secret: &str, options: Option<BTreeMap<String, String>>) -> Result<Token<'a>, Error> {\n \/\/ if signing_input.is_empty() || signing_input.is_whitespace() {\n \/\/ return None\n \/\/ }\n match decode_segments(jwt_token, true) {\n Ok((header, payload, signing_input, signature)) => {\n let algorithm = \n verify_signature(algorithm, signing_input, secret, signature);\n \/\/ verify_issuer(payload_json);\n \/\/ verify_expiration(payload_json);\n \/\/ verify_audience();\n \/\/ verify_subject();\n \/\/ verify_notbefore();\n \/\/ verify_issuedat();\n \/\/ verify_jwtid();\n },\n Err(err) => err\n }\n}\n\nfn decode_segments(jwt_token: &str, perform_verification: bool) -> Result<(Json, Json, String, Vec<u8>), Error> {\n let mut raw_segments = jwt_token.split_str(\".\");\n if raw_segments.count() != Token::segments_count() {\n return Err(Error::JWTInvalid)\n }\n\n let header_segment = raw_segments.next().unwrap();\n let payload_segment = raw_segments.next().unwrap();\n let crypto_segment = raw_segments.next().unwrap();\n let (header, payload) = decode_header_and_payload(header_segment, payload_segment);\n let signature = if perform_verification {\n crypto_segment.as_bytes().from_base64().unwrap()\n } else {\n vec![]\n };\n\n let signing_input = format!(\"{}.{}\", header_segment, payload_segment);\n Ok((header, payload, signing_input, signature))\n}\n\nfn decode_header_and_payload(header_segment: &str, payload_segment: &str) -> (Header, BTreeMap<String, String>) {\n fn base64_to_json(input: &str) -> Json {\n let bytes = input.as_bytes().from_base64().unwrap();\n let s = str::from_utf8(bytes.as_slice()).unwrap();\n json::from_str(s).unwrap()\n };\n\n let header_json = base64_to_json(header_segment);\n\n\n let header = Header::new()\n let payload_json = base64_to_json(payload_segment);\n (header_json, payload_json)\n}\n\nfn verify_signature(algorithm: Algorithm, signing_input: &str, secret: &str, signature: &[u8]) -> bool {\n let mut hmac = Hmac::new(match algorithm {\n Algorithm::HS256 => Sha256::new(),\n Algorithm::HS384 => Sha384::new(),\n Algorithm::HS512 => Sha512::new(),\n _ => panic!()\n }, secret.to_string().as_bytes()\n );\n\n hmac.input(signing_input.to_string().as_bytes());\n secure_compare(signature, hmac.result().code())\n}\n\nfn secure_compare(a: &[u8], b: &[u8]) -> bool {\n if a.len() != b.len() {\n return false\n }\n\n let mut res = 0_u8;\n for (&x, &y) in a.iter().zip(b.iter()) {\n res |= x ^ y;\n }\n\n res == 0\n}\n\nfn verify_issuer(payload_json: Json, iss: &str) -> bool {\n \/\/ take \"iss\" from payload_json\n \/\/ take \"iss\" from ...\n \/\/ make sure they're equal\n\n \/\/ if iss.is_empty() || signing_input.as_slice().is_whitespace() {\n \/\/ return Err(Error::IssuerInvalid)\n \/\/ }\n unimplemented!()\n}\n\nfn verify_expiration(payload_json: Json) -> bool {\n let payload = json_to_tree(payload_json);\n if payload.contains_key(\"exp\") {\n let exp: i64 = json::from_str(payload.get(\"exp\").unwrap().as_slice()).unwrap();\n \/\/ if exp.is_empty() || signing_input.as_slice().is_whitespace() {\n \/\/ return false\n \/\/ }\n \n exp > time::get_time().sec\n } else {\n false\n }\n}\n\nfn verify_audience(payload_json: Json, aud: &str) -> bool {\n unimplemented!()\n}\n\nfn verify_subject(payload_json: Json) -> bool {\n unimplemented!()\n}\n\nfn verify_notbefore(payload_json: Json) -> bool {\n unimplemented!()\n}\n\nfn verify_issuedat(payload_json: Json) -> bool {\n unimplemented!()\n}\n\nfn verify_jwtid(payload_json: Json) -> bool {\n unimplemented!()\n}\n\nfn verify_generic(payload_json: Json, parameter_name: String) -> bool {\n let payload = json_to_tree(payload_json);\n if payload.contains_key(¶meter_name) {\n \n }\n\n unimplemented!()\n}\n\n#[cfg(test)]\nmod tests {\n extern crate time;\n\n use super::sign;\n use super::decode;\n use super::secure_compare;\n use super::Algorithm;\n use std::collections::BTreeMap;\n use std::time::duration::Duration;\n\n #[test]\n fn test_encode_and_decode_jwt() {\n let mut p1 = BTreeMap::new();\n p1.insert(\"key1\".to_string(), \"val1\".to_string());\n p1.insert(\"key2\".to_string(), \"val2\".to_string());\n p1.insert(\"key3\".to_string(), \"val3\".to_string());\n\n let secret = \"secret123\";\n let jwt1 = sign(secret, Some(p1.clone()), Some(Algorithm::HS256));\n let maybe_res = verify(jwt.as_slice(), secret, None);\n\n assert!(maybe_res.is_ok());\n assert_eq!(jwt1, maybe_res.unwrap());\n } \n\n #[test]\n fn test_decode_valid_jwt() {\n let mut p1 = BTreeMap::new();\n p1.insert(\"key11\".to_string(), \"val1\".to_string());\n p1.insert(\"key22\".to_string(), \"val2\".to_string());\n let secret = \"secret123\";\n let jwt = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJrZXkxMSI6InZhbDEiLCJrZXkyMiI6InZhbDIifQ.jrcoVcRsmQqDEzSW9qOhG1HIrzV_n3nMhykNPnGvp9c\";\n let maybe_res = verify(jwt.as_slice(), secret, None);\n \n assert!(maybe_res.is_ok());\n assert_eq!(p1, maybe_res.unwrap().payload);\n }\n\n #[test]\n fn test_fails_when_expired() {\n let now = time::get_time();\n let past = now + Duration::minutes(-5);\n let mut p1 = BTreeMap::new();\n p1.insert(\"exp\".to_string(), past.sec.to_string());\n p1.insert(\"key1\".to_string(), \"val1\".to_string());\n let secret = \"secret123\";\n let jwt = sign(secret, Some(p1.clone()), None);\n let res = verify(jwt.as_slice(), secret, None);\n assert!(res.is_ok());\n }\n\n #[test]\n fn test_ok_when_expired_not_verified() {\n let now = time::get_time();\n let past = now + Duration::minutes(-5);\n let mut p1 = BTreeMap::new();\n p1.insert(\"exp\".to_string(), past.sec.to_string());\n p1.insert(\"key1\".to_string(), \"val1\".to_string());\n let secret = \"secret123\";\n let jwt = sign(secret, Some(p1.clone()), None);\n let res = verify(jwt.as_slice(), secret, None);\n assert!(res.is_ok());\n }\n \n #[test]\n fn test_secure_compare_same_strings() {\n let str1 = \"same same\".as_bytes();\n let str2 = \"same same\".as_bytes();\n let res = secure_compare(str1, str2);\n assert!(res);\n }\n\n #[test]\n fn test_fails_when_secure_compare_different_strings() {\n let str1 = \"same same\".as_bytes();\n let str2 = \"same same but different\".as_bytes();\n let res = secure_compare(str1, str2);\n assert!(!res);\n\n let str3 = \"same same\".as_bytes();\n let str4 = \"same ssss\".as_bytes();\n let res2 = secure_compare(str3, str4);\n assert!(!res2);\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014-2015 Sam Doshi (sam@metal-fish.co.uk)\n\/\/ Copyright 2013-2014 Philippe Delrieu (philippe.delrieu@free.fr)\n\/\/\n\/\/ Licensed under the MIT License <LICENSE or http:\/\/opensource.org\/licenses\/MIT>.\n\/\/ This file may not be copied, modified, or distributed except according to those terms.\n\nextern crate libc;\nextern crate core;\nextern crate serialize;\n\nuse std::ptr;\nuse libc::c_char;\n\nmod ffi;\n\n\n\/\/ Types\n\/\/ -----\n\/\/\/ Used by PortMidi to refer to a Midi device\npub type PortMidiDeviceId = i32;\npub type PortMidiResult<T> = Result<T, PortMidiError>;\n\n\n\/\/ Errors\n\/\/ ------\n#[derive(Copy, Show, PartialEq, Eq)]\npub enum PortMidiError {\n HostError,\n InvalidDeviceId,\n InsufficientMemory,\n BufferTooSmall,\n BufferOverflow,\n BadPtr,\n BadData,\n InternalError,\n BufferMaxSize\n}\n\nfn from_pm_error(pm_error: ffi::PmError) -> PortMidiResult<()> {\n match pm_error {\n ffi::PmError::PmNoError => Ok(()),\n ffi::PmError::PmGotData => Ok(()),\n ffi::PmError::PmHostError => Err(PortMidiError::HostError),\n ffi::PmError::PmInvalidDeviceId => Err(PortMidiError::InvalidDeviceId),\n ffi::PmError::PmInsufficientMemory => Err(PortMidiError::InsufficientMemory),\n ffi::PmError::PmBufferTooSmall => Err(PortMidiError::BufferTooSmall),\n ffi::PmError::PmBufferOverflow => Err(PortMidiError::BufferOverflow),\n ffi::PmError::PmBadPtr => Err(PortMidiError::BadPtr),\n ffi::PmError::PmBadData => Err(PortMidiError::BadData),\n ffi::PmError::PmInternalError => Err(PortMidiError::InternalError),\n ffi::PmError::PmBufferMaxSize => Err(PortMidiError::BufferMaxSize),\n }\n}\n\n\n\/\/ Global fns\n\/\/ ----------\n\/\/\/ `initialize` initalizes the underlying PortMidi C library, call this\n\/\/\/ before using the library.\n\/\/\/\n\/\/\/ Once initialized, PortMidi will no longer pickup any new Midi devices that are\n\/\/\/ connected, i.e. it does not support hot plugging.\npub fn initialize() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Initialize()\n })\n}\n\n\/\/\/ `terminate` terminates the underlying PortMidi C library, call this\n\/\/\/ after using the library.\npub fn terminate() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Terminate()\n })\n}\n\n\/\/\/ Return the number of devices. This number will not change during the lifetime\n\/\/\/ of the program.\npub fn count_devices() -> PortMidiDeviceId {\n unsafe {\n ffi::Pm_CountDevices()\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default input, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_input_device_id() -> Option<PortMidiDeviceId> {\n let id = unsafe { ffi::Pm_GetDefaultInputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default output, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_output_device_id() -> Option<PortMidiDeviceId> {\n let id = unsafe { ffi::Pm_GetDefaultOutputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\n\/\/ DeviceInfo\n\/\/ ----------\n\/\/\/ Represents what we know about a device\n#[derive(Clone, Show)]\npub struct DeviceInfo {\n \/\/\/ The `PortMidiDeviceId` used with `OutputPort::new` and `InputPort::new`\n pub device_id: PortMidiDeviceId,\n \/\/\/ The name of the device\n pub name: String,\n \/\/\/ Is the device an input\n pub input: bool,\n \/\/\/ Is the device an output\n pub output: bool\n}\n\nimpl DeviceInfo {\n fn wrap(device_id: PortMidiDeviceId, device_info: *const ffi::PmDeviceInfo) -> DeviceInfo {\n let name = unsafe {\n let bytes = std::ffi::c_str_to_bytes(&(*device_info).name);\n std::str::from_utf8_unchecked(bytes).to_string()\n };\n let input = unsafe { (*device_info).input };\n let output = unsafe { (*device_info).output };\n\n DeviceInfo {\n device_id: device_id,\n name: name,\n input: input > 0,\n output: output > 0\n }\n }\n}\n\n\/\/\/ Returns a `DeviceInfo` with information about a device, or `None` if\n\/\/\/ it does not exist\npub fn get_device_info(device_id: PortMidiDeviceId) -> Option<DeviceInfo> {\n let info = unsafe { ffi::Pm_GetDeviceInfo(device_id) };\n if info.is_null() {\n None\n }\n else {\n Some(DeviceInfo::wrap(device_id, info))\n }\n}\n\n\n\/\/ Midi events\n\/\/ -----------\n\/\/\/ Represents a single midi message, see also `MidiEvent`\n\/\/\/\n\/\/\/ TODO: should we use u8?\n#[derive(Clone, Copy, PartialEq, Eq, Show)]\npub struct MidiMessage {\n pub status: i8,\n pub data1: i8,\n pub data2: i8,\n}\n\nimpl MidiMessage {\n fn wrap(cmessage : ffi::PmMessage) -> MidiMessage {\n MidiMessage {\n status: ((cmessage) & 0xFF) as i8,\n data1 : (((cmessage) >> 8) & 0xFF) as i8,\n data2 : (((cmessage) >> 16) & 0xFF) as i8,\n }\n }\n\n fn unwrap(&self) -> ffi::PmMessage {\n ((((self.data2 as i32) << 16) & 0xFF0000) |\n (((self.data1 as i32) << 8) & 0xFF00) |\n ((self.status as i32) & 0xFF)) as i32\n }\n}\n\n\/\/\/ Represents a time stamped midi event. See also `MidiMessage`\n\/\/\/\n\/\/\/ See the PortMidi documentation for how SysEx and midi realtime messages\n\/\/\/ are handled\n\/\/\/\n\/\/\/ TODO: what to do about the timestamp?\n#[derive(Clone, Copy, PartialEq, Eq, Show)]\npub struct MidiEvent {\n pub message : MidiMessage,\n pub timestamp : ffi::PmTimestamp,\n}\n\nimpl MidiEvent {\n fn wrap(event: ffi::PmEvent) -> MidiEvent {\n MidiEvent {\n message: MidiMessage::wrap(event.message),\n timestamp : event.timestamp,\n }\n }\n\n fn unwrap(&self) -> ffi::PmEvent {\n ffi::PmEvent {\n message: self.message.unwrap(),\n timestamp: self.timestamp,\n }\n }\n}\n\n\n\/\/ Input\n\/\/ -----\n\/\/\/ Representation of an input midi port\n#[allow(missing_copy_implementations)]\npub struct InputPort {\n pm_stream : *const ffi::PortMidiStream,\n input_device : ffi::PmDeviceId,\n buffer_size : i32,\n}\n\nimpl InputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(input_device : PortMidiDeviceId, buffer_size: i32) -> InputPort {\n InputPort {\n pm_stream : ptr::null(),\n input_device : input_device,\n buffer_size : buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenInput(&self.pm_stream, self.input_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null())\n })\n }\n\n \/\/\/ Reads a single `MidiEvent` if one is avaible\n \/\/\/\n \/\/\/ A `Result` of `None` means no event was available.\n \/\/\/\n \/\/\/ See the PortMidi documentation for information on how it deals with input\n \/\/\/ overflows\n pub fn read(&mut self) -> PortMidiResult<Option<MidiEvent>> {\n use std::num::FromPrimitive;\n \/\/get one note a the time\n let mut event = ffi::PmEvent { message : 0, timestamp : 0 };\n let no_of_notes = unsafe { ffi::Pm_Read(self.pm_stream, &mut event, 1) };\n match no_of_notes {\n y if y == 0 => Ok(None),\n y if y > 0 => Ok(Some(MidiEvent::wrap(event))),\n _ => {\n \/\/ if it's negative it's an error, convert it\n let maybe_pm_error: Option<ffi::PmError> = FromPrimitive::from_i32(no_of_notes);\n if let Some(pm_error) = maybe_pm_error {\n from_pm_error(pm_error).map(|_| None)\n }\n else {\n \/\/ what should we do, if we can't convert the error no?\n \/\/ should we panic?\n Ok(None)\n }\n }\n }\n }\n\n \/\/\/ `poll` tests if there is input available, either returing a bool or an error\n pub fn poll(&self) -> PortMidiResult<bool> {\n let pm_error = unsafe { ffi::Pm_Poll(self.pm_stream) };\n match pm_error {\n ffi::PmError::PmNoError => Ok(false),\n ffi::PmError::PmGotData => Ok(true),\n other => from_pm_error(other).map(|_| false)\n }\n }\n\n \/\/\/ Closes the input, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Output\n\/\/ ------\n\/\/\/ Representation of an output midi port\n#[allow(missing_copy_implementations)]\npub struct OutputPort {\n pm_stream: *const ffi::PortMidiStream,\n output_device: ffi::PmDeviceId,\n buffer_size: i32,\n}\n\nimpl OutputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(output_device: PortMidiDeviceId, buffer_size: i32) -> OutputPort {\n OutputPort {\n pm_stream: ptr::null(),\n output_device: output_device,\n buffer_size: buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenOutput(&self.pm_stream, self.output_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null(), 0)\n })\n }\n\n \/\/\/ Terminates outgoing messages immediately\n \/\/\/\n \/\/\/ The caller should immediately close the output port, this may\n \/\/\/ result in transmission of a partial midi message. Note, not all platforms\n \/\/\/ support abort.\n pub fn abort(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Abort(self.pm_stream)\n })\n }\n\n \/\/\/ Closes the midi stream, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/\/\/ Write a single `MidiEvent`\n pub fn write_event(&mut self, midi_event: MidiEvent) -> PortMidiResult<()> {\n let event = midi_event.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_Write(self.pm_stream, &event, 1)\n })\n }\n\n \/\/\/ Write a single `MidiMessage` immediately\n pub fn write_message(&mut self, midi_message: MidiMessage) -> PortMidiResult<()> {\n let message = midi_message.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_WriteShort(self.pm_stream, 0, message)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Old code\n\/\/ --------\n\/** Translate portmidi error number into human readable message.\n* These strings are constants (set at compile time) so client has\n* no need to allocate storage\n*\/\npub fn get_error_text(error_code: ffi::PmError) -> String {\n unsafe {\n let error_text = ffi::Pm_GetErrorText(error_code);\n let bytes = std::ffi::c_str_to_bytes(&error_text);\n std::str::from_utf8_unchecked(bytes).to_string()\n }\n}\n\n\/** Translate portmidi host error into human readable message.\n These strings are computed at run time, so client has to allocate storage.\n After this routine executes, the host error is cleared.\n*\/\npub fn get_host_error_text(msg: *const c_char, len: i32) {\n unsafe {\n ffi::Pm_GetHostErrorText(msg, len);\n }\n}\n\npub const HDRLENGTH: i32 = 50;\npub const PM_HOST_ERROR_MSG_LEN: i32 = 256;\n\n<commit_msg>remove unused crates<commit_after>\/\/ Copyright 2014-2015 Sam Doshi (sam@metal-fish.co.uk)\n\/\/ Copyright 2013-2014 Philippe Delrieu (philippe.delrieu@free.fr)\n\/\/\n\/\/ Licensed under the MIT License <LICENSE or http:\/\/opensource.org\/licenses\/MIT>.\n\/\/ This file may not be copied, modified, or distributed except according to those terms.\n\nextern crate libc;\n\nuse std::ptr;\nuse libc::c_char;\n\nmod ffi;\n\n\n\/\/ Types\n\/\/ -----\n\/\/\/ Used by PortMidi to refer to a Midi device\npub type PortMidiDeviceId = i32;\npub type PortMidiResult<T> = Result<T, PortMidiError>;\n\n\n\/\/ Errors\n\/\/ ------\n#[derive(Copy, Show, PartialEq, Eq)]\npub enum PortMidiError {\n HostError,\n InvalidDeviceId,\n InsufficientMemory,\n BufferTooSmall,\n BufferOverflow,\n BadPtr,\n BadData,\n InternalError,\n BufferMaxSize\n}\n\nfn from_pm_error(pm_error: ffi::PmError) -> PortMidiResult<()> {\n match pm_error {\n ffi::PmError::PmNoError => Ok(()),\n ffi::PmError::PmGotData => Ok(()),\n ffi::PmError::PmHostError => Err(PortMidiError::HostError),\n ffi::PmError::PmInvalidDeviceId => Err(PortMidiError::InvalidDeviceId),\n ffi::PmError::PmInsufficientMemory => Err(PortMidiError::InsufficientMemory),\n ffi::PmError::PmBufferTooSmall => Err(PortMidiError::BufferTooSmall),\n ffi::PmError::PmBufferOverflow => Err(PortMidiError::BufferOverflow),\n ffi::PmError::PmBadPtr => Err(PortMidiError::BadPtr),\n ffi::PmError::PmBadData => Err(PortMidiError::BadData),\n ffi::PmError::PmInternalError => Err(PortMidiError::InternalError),\n ffi::PmError::PmBufferMaxSize => Err(PortMidiError::BufferMaxSize),\n }\n}\n\n\n\/\/ Global fns\n\/\/ ----------\n\/\/\/ `initialize` initalizes the underlying PortMidi C library, call this\n\/\/\/ before using the library.\n\/\/\/\n\/\/\/ Once initialized, PortMidi will no longer pickup any new Midi devices that are\n\/\/\/ connected, i.e. it does not support hot plugging.\npub fn initialize() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Initialize()\n })\n}\n\n\/\/\/ `terminate` terminates the underlying PortMidi C library, call this\n\/\/\/ after using the library.\npub fn terminate() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Terminate()\n })\n}\n\n\/\/\/ Return the number of devices. This number will not change during the lifetime\n\/\/\/ of the program.\npub fn count_devices() -> PortMidiDeviceId {\n unsafe {\n ffi::Pm_CountDevices()\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default input, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_input_device_id() -> Option<PortMidiDeviceId> {\n let id = unsafe { ffi::Pm_GetDefaultInputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default output, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_output_device_id() -> Option<PortMidiDeviceId> {\n let id = unsafe { ffi::Pm_GetDefaultOutputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\n\/\/ DeviceInfo\n\/\/ ----------\n\/\/\/ Represents what we know about a device\n#[derive(Clone, Show)]\npub struct DeviceInfo {\n \/\/\/ The `PortMidiDeviceId` used with `OutputPort::new` and `InputPort::new`\n pub device_id: PortMidiDeviceId,\n \/\/\/ The name of the device\n pub name: String,\n \/\/\/ Is the device an input\n pub input: bool,\n \/\/\/ Is the device an output\n pub output: bool\n}\n\nimpl DeviceInfo {\n fn wrap(device_id: PortMidiDeviceId, device_info: *const ffi::PmDeviceInfo) -> DeviceInfo {\n let name = unsafe {\n let bytes = std::ffi::c_str_to_bytes(&(*device_info).name);\n std::str::from_utf8_unchecked(bytes).to_string()\n };\n let input = unsafe { (*device_info).input };\n let output = unsafe { (*device_info).output };\n\n DeviceInfo {\n device_id: device_id,\n name: name,\n input: input > 0,\n output: output > 0\n }\n }\n}\n\n\/\/\/ Returns a `DeviceInfo` with information about a device, or `None` if\n\/\/\/ it does not exist\npub fn get_device_info(device_id: PortMidiDeviceId) -> Option<DeviceInfo> {\n let info = unsafe { ffi::Pm_GetDeviceInfo(device_id) };\n if info.is_null() {\n None\n }\n else {\n Some(DeviceInfo::wrap(device_id, info))\n }\n}\n\n\n\/\/ Midi events\n\/\/ -----------\n\/\/\/ Represents a single midi message, see also `MidiEvent`\n\/\/\/\n\/\/\/ TODO: should we use u8?\n#[derive(Clone, Copy, PartialEq, Eq, Show)]\npub struct MidiMessage {\n pub status: i8,\n pub data1: i8,\n pub data2: i8,\n}\n\nimpl MidiMessage {\n fn wrap(cmessage : ffi::PmMessage) -> MidiMessage {\n MidiMessage {\n status: ((cmessage) & 0xFF) as i8,\n data1 : (((cmessage) >> 8) & 0xFF) as i8,\n data2 : (((cmessage) >> 16) & 0xFF) as i8,\n }\n }\n\n fn unwrap(&self) -> ffi::PmMessage {\n ((((self.data2 as i32) << 16) & 0xFF0000) |\n (((self.data1 as i32) << 8) & 0xFF00) |\n ((self.status as i32) & 0xFF)) as i32\n }\n}\n\n\/\/\/ Represents a time stamped midi event. See also `MidiMessage`\n\/\/\/\n\/\/\/ See the PortMidi documentation for how SysEx and midi realtime messages\n\/\/\/ are handled\n\/\/\/\n\/\/\/ TODO: what to do about the timestamp?\n#[derive(Clone, Copy, PartialEq, Eq, Show)]\npub struct MidiEvent {\n pub message : MidiMessage,\n pub timestamp : ffi::PmTimestamp,\n}\n\nimpl MidiEvent {\n fn wrap(event: ffi::PmEvent) -> MidiEvent {\n MidiEvent {\n message: MidiMessage::wrap(event.message),\n timestamp : event.timestamp,\n }\n }\n\n fn unwrap(&self) -> ffi::PmEvent {\n ffi::PmEvent {\n message: self.message.unwrap(),\n timestamp: self.timestamp,\n }\n }\n}\n\n\n\/\/ Input\n\/\/ -----\n\/\/\/ Representation of an input midi port\n#[allow(missing_copy_implementations)]\npub struct InputPort {\n pm_stream : *const ffi::PortMidiStream,\n input_device : ffi::PmDeviceId,\n buffer_size : i32,\n}\n\nimpl InputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(input_device : PortMidiDeviceId, buffer_size: i32) -> InputPort {\n InputPort {\n pm_stream : ptr::null(),\n input_device : input_device,\n buffer_size : buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenInput(&self.pm_stream, self.input_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null())\n })\n }\n\n \/\/\/ Reads a single `MidiEvent` if one is avaible\n \/\/\/\n \/\/\/ A `Result` of `None` means no event was available.\n \/\/\/\n \/\/\/ See the PortMidi documentation for information on how it deals with input\n \/\/\/ overflows\n pub fn read(&mut self) -> PortMidiResult<Option<MidiEvent>> {\n use std::num::FromPrimitive;\n \/\/get one note a the time\n let mut event = ffi::PmEvent { message : 0, timestamp : 0 };\n let no_of_notes = unsafe { ffi::Pm_Read(self.pm_stream, &mut event, 1) };\n match no_of_notes {\n y if y == 0 => Ok(None),\n y if y > 0 => Ok(Some(MidiEvent::wrap(event))),\n _ => {\n \/\/ if it's negative it's an error, convert it\n let maybe_pm_error: Option<ffi::PmError> = FromPrimitive::from_i32(no_of_notes);\n if let Some(pm_error) = maybe_pm_error {\n from_pm_error(pm_error).map(|_| None)\n }\n else {\n \/\/ what should we do, if we can't convert the error no?\n \/\/ should we panic?\n Ok(None)\n }\n }\n }\n }\n\n \/\/\/ `poll` tests if there is input available, either returing a bool or an error\n pub fn poll(&self) -> PortMidiResult<bool> {\n let pm_error = unsafe { ffi::Pm_Poll(self.pm_stream) };\n match pm_error {\n ffi::PmError::PmNoError => Ok(false),\n ffi::PmError::PmGotData => Ok(true),\n other => from_pm_error(other).map(|_| false)\n }\n }\n\n \/\/\/ Closes the input, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Output\n\/\/ ------\n\/\/\/ Representation of an output midi port\n#[allow(missing_copy_implementations)]\npub struct OutputPort {\n pm_stream: *const ffi::PortMidiStream,\n output_device: ffi::PmDeviceId,\n buffer_size: i32,\n}\n\nimpl OutputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(output_device: PortMidiDeviceId, buffer_size: i32) -> OutputPort {\n OutputPort {\n pm_stream: ptr::null(),\n output_device: output_device,\n buffer_size: buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenOutput(&self.pm_stream, self.output_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null(), 0)\n })\n }\n\n \/\/\/ Terminates outgoing messages immediately\n \/\/\/\n \/\/\/ The caller should immediately close the output port, this may\n \/\/\/ result in transmission of a partial midi message. Note, not all platforms\n \/\/\/ support abort.\n pub fn abort(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Abort(self.pm_stream)\n })\n }\n\n \/\/\/ Closes the midi stream, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/\/\/ Write a single `MidiEvent`\n pub fn write_event(&mut self, midi_event: MidiEvent) -> PortMidiResult<()> {\n let event = midi_event.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_Write(self.pm_stream, &event, 1)\n })\n }\n\n \/\/\/ Write a single `MidiMessage` immediately\n pub fn write_message(&mut self, midi_message: MidiMessage) -> PortMidiResult<()> {\n let message = midi_message.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_WriteShort(self.pm_stream, 0, message)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Old code\n\/\/ --------\n\/** Translate portmidi error number into human readable message.\n* These strings are constants (set at compile time) so client has\n* no need to allocate storage\n*\/\npub fn get_error_text(error_code: ffi::PmError) -> String {\n unsafe {\n let error_text = ffi::Pm_GetErrorText(error_code);\n let bytes = std::ffi::c_str_to_bytes(&error_text);\n std::str::from_utf8_unchecked(bytes).to_string()\n }\n}\n\n\/** Translate portmidi host error into human readable message.\n These strings are computed at run time, so client has to allocate storage.\n After this routine executes, the host error is cleared.\n*\/\npub fn get_host_error_text(msg: *const c_char, len: i32) {\n unsafe {\n ffi::Pm_GetHostErrorText(msg, len);\n }\n}\n\npub const HDRLENGTH: i32 = 50;\npub const PM_HOST_ERROR_MSG_LEN: i32 = 256;\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor FFI<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Change test output to use debug!()<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_name=\"kernel\"]\n#![crate_type=\"staticlib\"]\n#![feature(alloc)]\n#![feature(allocator)]\n#![feature(arc_counts)]\n#![feature(augmented_assignments)]\n#![feature(asm)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(const_fn)]\n#![feature(core_intrinsics)]\n#![feature(core_str_ext)]\n#![feature(core_slice_ext)]\n#![feature(fnbox)]\n#![feature(fundamental)]\n#![feature(lang_items)]\n#![feature(op_assign_traits)]\n#![feature(unboxed_closures)]\n#![feature(unsafe_no_drop_flag)]\n#![feature(unwind_attributes)]\n#![feature(vec_push_all)]\n#![feature(zero_one)]\n#![feature(collections_range)]\n#![no_std]\n\n#![allow(deprecated)]\n#![deny(warnings)]\n\n#[macro_use]\nextern crate alloc;\n\n#[macro_use]\nextern crate collections;\n\nextern crate system;\n\nuse acpi::Acpi;\n\nuse alloc::boxed::Box;\n\nuse arch::context::{context_switch, Context};\nuse arch::memory;\nuse arch::paging::Page;\nuse arch::regs::Regs;\nuse arch::tss::TSS;\n\nuse collections::string::ToString;\n\nuse core::{ptr, mem, usize};\nuse core::slice::SliceExt;\n\nuse common::time::Duration;\n\nuse drivers::pci;\nuse drivers::io::{Io, Pio};\nuse drivers::ps2::*;\nuse drivers::rtc::*;\nuse drivers::serial::*;\n\nuse env::Environment;\n\nuse graphics::display;\n\nuse schemes::context::*;\nuse schemes::debug::*;\nuse schemes::display::*;\nuse schemes::interrupt::*;\nuse schemes::memory::*;\nuse schemes::test::*;\n\nuse syscall::execute::execute;\nuse syscall::{do_sys_chdir, do_sys_exit, do_sys_open, syscall_handle};\n\npub use system::externs::*;\n\n\/\/\/ Common std-like functionality\n#[macro_use]\npub mod common;\n#[macro_use]\npub mod macros;\n\/\/\/ Allocation\npub mod alloc_system;\n\/\/\/ ACPI\npub mod acpi;\n\/\/\/ Architecture dependant\npub mod arch;\n\/\/\/ Audio\npub mod audio;\n\/\/\/ Disk drivers\npub mod disk;\n\/\/\/ Various drivers\npub mod drivers;\n\/\/\/ Environment\npub mod env;\n\/\/\/ Filesystems\npub mod fs;\n\/\/\/ Various graphical methods\npub mod graphics;\n\/\/\/ Networking\npub mod network;\n\/\/\/ Panic\npub mod panic;\n\/\/\/ Schemes\npub mod schemes;\n\/\/\/ Synchronization\npub mod sync;\n\/\/\/ System calls\npub mod syscall;\n\/\/\/ USB input\/output\npub mod usb;\n\npub static mut TSS_PTR: Option<&'static mut TSS> = None;\npub static mut ENV_PTR: Option<&'static mut Environment> = None;\n\npub fn env() -> &'static Environment {\n unsafe {\n match ENV_PTR {\n Some(&mut ref p) => p,\n None => unreachable!(),\n }\n }\n}\n\n\/\/\/ Pit duration\nstatic PIT_DURATION: Duration = Duration {\n secs: 0,\n nanos: 4500572,\n};\n\n\/\/\/ Idle loop (active while idle)\nfn idle_loop() {\n loop {\n unsafe { asm!(\"cli\" : : : : \"intel\", \"volatile\"); }\n\n let mut halt = true;\n\n for context in env().contexts.lock().iter().skip(1) {\n if ! context.blocked {\n halt = false;\n break;\n }\n }\n\n if halt {\n unsafe { asm!(\"sti ; hlt\" : : : : \"intel\", \"volatile\"); }\n } else {\n unsafe { asm!(\"sti ; nop\" : : : : \"intel\", \"volatile\"); }\n unsafe { context_switch(); }\n }\n }\n}\n\nextern {\n static mut __text_start: u8;\n static mut __text_end: u8;\n static mut __rodata_start: u8;\n static mut __rodata_end: u8;\n static mut __data_start: u8;\n static mut __data_end: u8;\n static mut __bss_start: u8;\n static mut __bss_end: u8;\n}\n\nstatic BSS_TEST_ZERO: usize = 0;\nstatic BSS_TEST_NONZERO: usize = usize::MAX;\n\n\/\/\/ Initialize kernel\nunsafe fn init(tss_data: usize) {\n\n \/\/ Test\n assume!(true);\n\n \/\/ Zero BSS, this initializes statics that are set to 0\n {\n let start_ptr = &mut __bss_start as *mut u8;\n let end_ptr = & __bss_end as *const u8 as usize;\n\n if start_ptr as usize <= end_ptr {\n let size = end_ptr - start_ptr as usize;\n memset(start_ptr, 0, size);\n }\n\n assert_eq!(BSS_TEST_ZERO, 0);\n assert_eq!(BSS_TEST_NONZERO, usize::MAX);\n }\n\n \/\/ Setup paging, this allows for memory allocation\n Page::init();\n memory::cluster_init();\n\n \/\/Get the VBE information before unmapping the first megabyte\n display::vbe_init();\n\n \/\/ Unmap first page (TODO: Unmap more)\n {\n let start_ptr = 0;\n let end_ptr = 0x1000;\n\n if start_ptr as usize <= end_ptr {\n let size = end_ptr - start_ptr as usize;\n for page in 0..(size + 4095)\/4096 {\n Page::new(start_ptr as usize + page * 4096).unmap();\n }\n }\n }\n\n \/\/Remap text\n {\n let start_ptr = & __text_start as *const u8 as usize;\n let end_ptr = & __text_end as *const u8 as usize;\n if start_ptr as usize <= end_ptr {\n let size = end_ptr - start_ptr as usize;\n for page in 0..(size + 4095)\/4096 {\n Page::new(start_ptr as usize + page * 4096).\n map_kernel_read(start_ptr as usize + page * 4096);\n }\n }\n }\n\n \/\/Remap rodata\n {\n let start_ptr = & __rodata_start as *const u8 as usize;\n let end_ptr = & __rodata_end as *const u8 as usize;\n if start_ptr <= end_ptr {\n let size = end_ptr - start_ptr;\n for page in 0..(size + 4095)\/4096 {\n Page::new(start_ptr + page * 4096).\n map_kernel_read(start_ptr + page * 4096);\n }\n }\n }\n\n TSS_PTR = Some(&mut *(tss_data as *mut TSS));\n ENV_PTR = Some(&mut *Box::into_raw(Environment::new()));\n\n match ENV_PTR {\n Some(ref mut env) => {\n env.contexts.lock().push(Context::root());\n\n env.console.lock().draw = true;\n\n debugln!(\"Redox {} bits\", mem::size_of::<usize>() * 8);\n\n if let Some(acpi) = Acpi::new() {\n env.schemes.lock().push(acpi);\n }\n\n *(env.clock_realtime.lock()) = Rtc::new().time();\n\n env.schemes.lock().push(Ps2::new());\n env.schemes.lock().push(Serial::new(0x3F8, 0x4));\n\n pci::pci_init(env);\n\n env.schemes.lock().push(DebugScheme::new());\n env.schemes.lock().push(box DisplayScheme);\n env.schemes.lock().push(box ContextScheme);\n env.schemes.lock().push(box InterruptScheme);\n env.schemes.lock().push(box MemoryScheme);\n env.schemes.lock().push(box TestScheme);\n\n env.contexts.lock().enabled = true;\n\n Context::spawn(\"kinit\".to_string(),\n box move || {\n {\n let wd_c = \"file:\/\\0\";\n do_sys_chdir(wd_c.as_ptr()).unwrap();\n\n let stdio_c = \"debug:\\0\";\n do_sys_open(stdio_c.as_ptr(), 0).unwrap();\n do_sys_open(stdio_c.as_ptr(), 0).unwrap();\n do_sys_open(stdio_c.as_ptr(), 0).unwrap();\n }\n\n if let Err(err) = execute(vec![\"init\".to_string()]) {\n debugln!(\"INIT: Failed to execute: {}\", err);\n }\n });\n },\n None => unreachable!(),\n }\n}\n\n#[cold]\n#[inline(never)]\n#[no_mangle]\n\/\/\/ Take regs for kernel calls and exceptions\npub extern \"cdecl\" fn kernel(interrupt: usize, mut regs: &mut Regs) {\n macro_rules! exception_inner {\n ($name:expr) => ({\n {\n let contexts = ::env().contexts.lock();\n if let Ok(context) = contexts.current() {\n debugln!(\"PID {}: {}\", context.pid, context.name);\n }\n }\n\n debugln!(\" INT {:X}: {}\", interrupt, $name);\n debugln!(\" CS: {:08X} IP: {:08X} FLG: {:08X}\", regs.cs, regs.ip, regs.flags);\n debugln!(\" SS: {:08X} SP: {:08X} BP: {:08X}\", regs.ss, regs.sp, regs.bp);\n debugln!(\" AX: {:08X} BX: {:08X} CX: {:08X} DX: {:08X}\", regs.ax, regs.bx, regs.cx, regs.dx);\n debugln!(\" DI: {:08X} SI: {:08X}\", regs.di, regs.di);\n\n let cr0: usize;\n let cr2: usize;\n let cr3: usize;\n let cr4: usize;\n unsafe {\n asm!(\"mov $0, cr0\" : \"=r\"(cr0) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr2\" : \"=r\"(cr2) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr3\" : \"=r\"(cr3) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr4\" : \"=r\"(cr4) : : : \"intel\", \"volatile\");\n }\n debugln!(\" CR0: {:08X} CR2: {:08X} CR3: {:08X} CR4: {:08X}\", cr0, cr2, cr3, cr4);\n\n let mut fsw: usize = 0;\n let mut fcw: usize = 0;\n unsafe {\n asm!(\"fnstsw $0\" : \"=*m\"(&mut fsw) : : : \"intel\", \"volatile\");\n asm!(\"fnstcw $0\" : \"=*m\"(&mut fcw) : : : \"intel\", \"volatile\");\n }\n debugln!(\" FSW: {:08X} FCW: {:08X}\", fsw, fcw);\n\n let sp = regs.sp as *const u32;\n for y in -15..16 {\n debug!(\" {:>3}:\", y * 8 * 4);\n for x in 0..8 {\n debug!(\" {:08X}\", unsafe { ptr::read(sp.offset(-(x + y * 8))) });\n }\n debug!(\"\\n\");\n }\n })\n };\n\n macro_rules! exception {\n ($name:expr) => ({\n exception_inner!($name);\n\n loop {\n do_sys_exit(usize::MAX);\n }\n })\n };\n\n macro_rules! exception_error {\n ($name:expr) => ({\n let error = regs.ip;\n regs.ip = regs.cs;\n regs.cs = regs.flags;\n regs.flags = regs.sp;\n regs.sp = regs.ss;\n regs.ss = 0;\n \/\/regs.ss = regs.error;\n\n exception_inner!($name);\n debugln!(\" ERR: {:08X}\", error);\n\n loop {\n do_sys_exit(usize::MAX);\n }\n })\n };\n\n \/\/Do not catch init interrupt\n if interrupt < 0xFF {\n env().interrupts.lock()[interrupt as usize] += 1;\n }\n\n match interrupt {\n 0x20 => {\n {\n let mut clock_monotonic = env().clock_monotonic.lock();\n *clock_monotonic = *clock_monotonic + PIT_DURATION;\n }\n {\n let mut clock_realtime = env().clock_realtime.lock();\n *clock_realtime = *clock_realtime + PIT_DURATION;\n }\n\n if let Ok(mut current) = env().contexts.lock().current_mut() {\n current.time += 1;\n }\n\n unsafe { context_switch(); }\n }\n i @ 0x21 ... 0x2F => {\n env().on_irq(i as u8 - 0x20);\n },\n 0x80 => syscall_handle(regs),\n 0xFF => {\n unsafe {\n init(regs.ax);\n idle_loop();\n }\n },\n 0x0 => exception!(\"Divide by zero exception\"),\n 0x1 => exception!(\"Debug exception\"),\n 0x2 => exception!(\"Non-maskable interrupt\"),\n 0x3 => exception!(\"Breakpoint exception\"),\n 0x4 => exception!(\"Overflow exception\"),\n 0x5 => exception!(\"Bound range exceeded exception\"),\n 0x6 => exception!(\"Invalid opcode exception\"),\n 0x7 => exception!(\"Device not available exception\"),\n 0x8 => exception_error!(\"Double fault\"),\n 0x9 => exception!(\"Coprocessor Segment Overrun\"), \/\/ legacy\n 0xA => exception_error!(\"Invalid TSS exception\"),\n 0xB => exception_error!(\"Segment not present exception\"),\n 0xC => exception_error!(\"Stack-segment fault\"),\n 0xD => exception_error!(\"General protection fault\"),\n 0xE => exception_error!(\"Page fault\"),\n 0x10 => exception!(\"x87 floating-point exception\"),\n 0x11 => exception_error!(\"Alignment check exception\"),\n 0x12 => exception!(\"Machine check exception\"),\n 0x13 => exception!(\"SIMD floating-point exception\"),\n 0x14 => exception!(\"Virtualization exception\"),\n 0x1E => exception_error!(\"Security exception\"),\n _ => exception!(\"Unknown Interrupt\"),\n }\n\n if interrupt >= 0x20 && interrupt < 0x30 {\n if interrupt >= 0x28 {\n Pio::<u8>::new(0xA0).write(0x20);\n }\n\n Pio::<u8>::new(0x20).write(0x20);\n }\n}\n<commit_msg>Remove the augmented_assignments feature flag<commit_after>#![crate_name=\"kernel\"]\n#![crate_type=\"staticlib\"]\n#![feature(alloc)]\n#![feature(allocator)]\n#![feature(arc_counts)]\n#![feature(asm)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(const_fn)]\n#![feature(core_intrinsics)]\n#![feature(core_str_ext)]\n#![feature(core_slice_ext)]\n#![feature(fnbox)]\n#![feature(fundamental)]\n#![feature(lang_items)]\n#![feature(op_assign_traits)]\n#![feature(unboxed_closures)]\n#![feature(unsafe_no_drop_flag)]\n#![feature(unwind_attributes)]\n#![feature(vec_push_all)]\n#![feature(zero_one)]\n#![feature(collections_range)]\n#![no_std]\n\n#![allow(deprecated)]\n#![deny(warnings)]\n\n#[macro_use]\nextern crate alloc;\n\n#[macro_use]\nextern crate collections;\n\nextern crate system;\n\nuse acpi::Acpi;\n\nuse alloc::boxed::Box;\n\nuse arch::context::{context_switch, Context};\nuse arch::memory;\nuse arch::paging::Page;\nuse arch::regs::Regs;\nuse arch::tss::TSS;\n\nuse collections::string::ToString;\n\nuse core::{ptr, mem, usize};\nuse core::slice::SliceExt;\n\nuse common::time::Duration;\n\nuse drivers::pci;\nuse drivers::io::{Io, Pio};\nuse drivers::ps2::*;\nuse drivers::rtc::*;\nuse drivers::serial::*;\n\nuse env::Environment;\n\nuse graphics::display;\n\nuse schemes::context::*;\nuse schemes::debug::*;\nuse schemes::display::*;\nuse schemes::interrupt::*;\nuse schemes::memory::*;\nuse schemes::test::*;\n\nuse syscall::execute::execute;\nuse syscall::{do_sys_chdir, do_sys_exit, do_sys_open, syscall_handle};\n\npub use system::externs::*;\n\n\/\/\/ Common std-like functionality\n#[macro_use]\npub mod common;\n#[macro_use]\npub mod macros;\n\/\/\/ Allocation\npub mod alloc_system;\n\/\/\/ ACPI\npub mod acpi;\n\/\/\/ Architecture dependant\npub mod arch;\n\/\/\/ Audio\npub mod audio;\n\/\/\/ Disk drivers\npub mod disk;\n\/\/\/ Various drivers\npub mod drivers;\n\/\/\/ Environment\npub mod env;\n\/\/\/ Filesystems\npub mod fs;\n\/\/\/ Various graphical methods\npub mod graphics;\n\/\/\/ Networking\npub mod network;\n\/\/\/ Panic\npub mod panic;\n\/\/\/ Schemes\npub mod schemes;\n\/\/\/ Synchronization\npub mod sync;\n\/\/\/ System calls\npub mod syscall;\n\/\/\/ USB input\/output\npub mod usb;\n\npub static mut TSS_PTR: Option<&'static mut TSS> = None;\npub static mut ENV_PTR: Option<&'static mut Environment> = None;\n\npub fn env() -> &'static Environment {\n unsafe {\n match ENV_PTR {\n Some(&mut ref p) => p,\n None => unreachable!(),\n }\n }\n}\n\n\/\/\/ Pit duration\nstatic PIT_DURATION: Duration = Duration {\n secs: 0,\n nanos: 4500572,\n};\n\n\/\/\/ Idle loop (active while idle)\nfn idle_loop() {\n loop {\n unsafe { asm!(\"cli\" : : : : \"intel\", \"volatile\"); }\n\n let mut halt = true;\n\n for context in env().contexts.lock().iter().skip(1) {\n if ! context.blocked {\n halt = false;\n break;\n }\n }\n\n if halt {\n unsafe { asm!(\"sti ; hlt\" : : : : \"intel\", \"volatile\"); }\n } else {\n unsafe { asm!(\"sti ; nop\" : : : : \"intel\", \"volatile\"); }\n unsafe { context_switch(); }\n }\n }\n}\n\nextern {\n static mut __text_start: u8;\n static mut __text_end: u8;\n static mut __rodata_start: u8;\n static mut __rodata_end: u8;\n static mut __data_start: u8;\n static mut __data_end: u8;\n static mut __bss_start: u8;\n static mut __bss_end: u8;\n}\n\nstatic BSS_TEST_ZERO: usize = 0;\nstatic BSS_TEST_NONZERO: usize = usize::MAX;\n\n\/\/\/ Initialize kernel\nunsafe fn init(tss_data: usize) {\n\n \/\/ Test\n assume!(true);\n\n \/\/ Zero BSS, this initializes statics that are set to 0\n {\n let start_ptr = &mut __bss_start as *mut u8;\n let end_ptr = & __bss_end as *const u8 as usize;\n\n if start_ptr as usize <= end_ptr {\n let size = end_ptr - start_ptr as usize;\n memset(start_ptr, 0, size);\n }\n\n assert_eq!(BSS_TEST_ZERO, 0);\n assert_eq!(BSS_TEST_NONZERO, usize::MAX);\n }\n\n \/\/ Setup paging, this allows for memory allocation\n Page::init();\n memory::cluster_init();\n\n \/\/Get the VBE information before unmapping the first megabyte\n display::vbe_init();\n\n \/\/ Unmap first page (TODO: Unmap more)\n {\n let start_ptr = 0;\n let end_ptr = 0x1000;\n\n if start_ptr as usize <= end_ptr {\n let size = end_ptr - start_ptr as usize;\n for page in 0..(size + 4095)\/4096 {\n Page::new(start_ptr as usize + page * 4096).unmap();\n }\n }\n }\n\n \/\/Remap text\n {\n let start_ptr = & __text_start as *const u8 as usize;\n let end_ptr = & __text_end as *const u8 as usize;\n if start_ptr as usize <= end_ptr {\n let size = end_ptr - start_ptr as usize;\n for page in 0..(size + 4095)\/4096 {\n Page::new(start_ptr as usize + page * 4096).\n map_kernel_read(start_ptr as usize + page * 4096);\n }\n }\n }\n\n \/\/Remap rodata\n {\n let start_ptr = & __rodata_start as *const u8 as usize;\n let end_ptr = & __rodata_end as *const u8 as usize;\n if start_ptr <= end_ptr {\n let size = end_ptr - start_ptr;\n for page in 0..(size + 4095)\/4096 {\n Page::new(start_ptr + page * 4096).\n map_kernel_read(start_ptr + page * 4096);\n }\n }\n }\n\n TSS_PTR = Some(&mut *(tss_data as *mut TSS));\n ENV_PTR = Some(&mut *Box::into_raw(Environment::new()));\n\n match ENV_PTR {\n Some(ref mut env) => {\n env.contexts.lock().push(Context::root());\n\n env.console.lock().draw = true;\n\n debugln!(\"Redox {} bits\", mem::size_of::<usize>() * 8);\n\n if let Some(acpi) = Acpi::new() {\n env.schemes.lock().push(acpi);\n }\n\n *(env.clock_realtime.lock()) = Rtc::new().time();\n\n env.schemes.lock().push(Ps2::new());\n env.schemes.lock().push(Serial::new(0x3F8, 0x4));\n\n pci::pci_init(env);\n\n env.schemes.lock().push(DebugScheme::new());\n env.schemes.lock().push(box DisplayScheme);\n env.schemes.lock().push(box ContextScheme);\n env.schemes.lock().push(box InterruptScheme);\n env.schemes.lock().push(box MemoryScheme);\n env.schemes.lock().push(box TestScheme);\n\n env.contexts.lock().enabled = true;\n\n Context::spawn(\"kinit\".to_string(),\n box move || {\n {\n let wd_c = \"file:\/\\0\";\n do_sys_chdir(wd_c.as_ptr()).unwrap();\n\n let stdio_c = \"debug:\\0\";\n do_sys_open(stdio_c.as_ptr(), 0).unwrap();\n do_sys_open(stdio_c.as_ptr(), 0).unwrap();\n do_sys_open(stdio_c.as_ptr(), 0).unwrap();\n }\n\n if let Err(err) = execute(vec![\"init\".to_string()]) {\n debugln!(\"INIT: Failed to execute: {}\", err);\n }\n });\n },\n None => unreachable!(),\n }\n}\n\n#[cold]\n#[inline(never)]\n#[no_mangle]\n\/\/\/ Take regs for kernel calls and exceptions\npub extern \"cdecl\" fn kernel(interrupt: usize, mut regs: &mut Regs) {\n macro_rules! exception_inner {\n ($name:expr) => ({\n {\n let contexts = ::env().contexts.lock();\n if let Ok(context) = contexts.current() {\n debugln!(\"PID {}: {}\", context.pid, context.name);\n }\n }\n\n debugln!(\" INT {:X}: {}\", interrupt, $name);\n debugln!(\" CS: {:08X} IP: {:08X} FLG: {:08X}\", regs.cs, regs.ip, regs.flags);\n debugln!(\" SS: {:08X} SP: {:08X} BP: {:08X}\", regs.ss, regs.sp, regs.bp);\n debugln!(\" AX: {:08X} BX: {:08X} CX: {:08X} DX: {:08X}\", regs.ax, regs.bx, regs.cx, regs.dx);\n debugln!(\" DI: {:08X} SI: {:08X}\", regs.di, regs.di);\n\n let cr0: usize;\n let cr2: usize;\n let cr3: usize;\n let cr4: usize;\n unsafe {\n asm!(\"mov $0, cr0\" : \"=r\"(cr0) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr2\" : \"=r\"(cr2) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr3\" : \"=r\"(cr3) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr4\" : \"=r\"(cr4) : : : \"intel\", \"volatile\");\n }\n debugln!(\" CR0: {:08X} CR2: {:08X} CR3: {:08X} CR4: {:08X}\", cr0, cr2, cr3, cr4);\n\n let mut fsw: usize = 0;\n let mut fcw: usize = 0;\n unsafe {\n asm!(\"fnstsw $0\" : \"=*m\"(&mut fsw) : : : \"intel\", \"volatile\");\n asm!(\"fnstcw $0\" : \"=*m\"(&mut fcw) : : : \"intel\", \"volatile\");\n }\n debugln!(\" FSW: {:08X} FCW: {:08X}\", fsw, fcw);\n\n let sp = regs.sp as *const u32;\n for y in -15..16 {\n debug!(\" {:>3}:\", y * 8 * 4);\n for x in 0..8 {\n debug!(\" {:08X}\", unsafe { ptr::read(sp.offset(-(x + y * 8))) });\n }\n debug!(\"\\n\");\n }\n })\n };\n\n macro_rules! exception {\n ($name:expr) => ({\n exception_inner!($name);\n\n loop {\n do_sys_exit(usize::MAX);\n }\n })\n };\n\n macro_rules! exception_error {\n ($name:expr) => ({\n let error = regs.ip;\n regs.ip = regs.cs;\n regs.cs = regs.flags;\n regs.flags = regs.sp;\n regs.sp = regs.ss;\n regs.ss = 0;\n \/\/regs.ss = regs.error;\n\n exception_inner!($name);\n debugln!(\" ERR: {:08X}\", error);\n\n loop {\n do_sys_exit(usize::MAX);\n }\n })\n };\n\n \/\/Do not catch init interrupt\n if interrupt < 0xFF {\n env().interrupts.lock()[interrupt as usize] += 1;\n }\n\n match interrupt {\n 0x20 => {\n {\n let mut clock_monotonic = env().clock_monotonic.lock();\n *clock_monotonic = *clock_monotonic + PIT_DURATION;\n }\n {\n let mut clock_realtime = env().clock_realtime.lock();\n *clock_realtime = *clock_realtime + PIT_DURATION;\n }\n\n if let Ok(mut current) = env().contexts.lock().current_mut() {\n current.time += 1;\n }\n\n unsafe { context_switch(); }\n }\n i @ 0x21 ... 0x2F => {\n env().on_irq(i as u8 - 0x20);\n },\n 0x80 => syscall_handle(regs),\n 0xFF => {\n unsafe {\n init(regs.ax);\n idle_loop();\n }\n },\n 0x0 => exception!(\"Divide by zero exception\"),\n 0x1 => exception!(\"Debug exception\"),\n 0x2 => exception!(\"Non-maskable interrupt\"),\n 0x3 => exception!(\"Breakpoint exception\"),\n 0x4 => exception!(\"Overflow exception\"),\n 0x5 => exception!(\"Bound range exceeded exception\"),\n 0x6 => exception!(\"Invalid opcode exception\"),\n 0x7 => exception!(\"Device not available exception\"),\n 0x8 => exception_error!(\"Double fault\"),\n 0x9 => exception!(\"Coprocessor Segment Overrun\"), \/\/ legacy\n 0xA => exception_error!(\"Invalid TSS exception\"),\n 0xB => exception_error!(\"Segment not present exception\"),\n 0xC => exception_error!(\"Stack-segment fault\"),\n 0xD => exception_error!(\"General protection fault\"),\n 0xE => exception_error!(\"Page fault\"),\n 0x10 => exception!(\"x87 floating-point exception\"),\n 0x11 => exception_error!(\"Alignment check exception\"),\n 0x12 => exception!(\"Machine check exception\"),\n 0x13 => exception!(\"SIMD floating-point exception\"),\n 0x14 => exception!(\"Virtualization exception\"),\n 0x1E => exception_error!(\"Security exception\"),\n _ => exception!(\"Unknown Interrupt\"),\n }\n\n if interrupt >= 0x20 && interrupt < 0x30 {\n if interrupt >= 0x28 {\n Pio::<u8>::new(0xA0).write(0x20);\n }\n\n Pio::<u8>::new(0x20).write(0x20);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Better names<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Initial commit<commit_after>extern crate time;\n\nuse std::env;\n\n#[derive(Clone, Debug)]\nenum Token {\n DOTTEDDATE, HYPHENDATE,\n HOURMIN,\n INT1DIGIT, INT2DIGIT, INT4DIGIT, INT58DIGIT,\n INT, FLOAT,\n NOW,\n AM, PM,\n NOON, MIDNIGHT, TEATIME,\n SUN, MON, TUE, WED, THU, FRI, SAT,\n TODAY, TOMORROW,\n NEXT,\n MINUTE, HOUR, DAY, WEEK, MONTH, YEAR,\n JAN, FEB, MAR, APR, MAY, JUN, JUL, AUG, SEP, OCT, NOV, DEC,\n UTC,\n PLUS, MINUS\n}\n\nfn usage(name: String) {\n println!(\"Usage: {} [timespec]\", name)\n}\n\nfn main() {\n let args: Vec<_> = env::args().collect();\n if args.len() > 1 {\n let result = parsetime(args[1].clone());\n println!(\"{}\", result.asctime());\n } else {\n usage(args[0].clone());\n }\n}\n\nfn parse_timespec(tokens: Vec<Token>, current_time: time::Tm) -> time::Tm {\n let mut new_time = time::empty_tm();\n for token in tokens {\n match token {\n Token::NOW => {\n new_time = current_time;\n }\n Token::MIDNIGHT => {\n new_time.tm_hour = 0;\n new_time.tm_min = 0;\n }\n Token::NOON => {\n new_time.tm_hour = 12;\n new_time.tm_min = 0;\n }\n Token::TEATIME => {\n new_time.tm_hour = 16;\n new_time.tm_min = 0;\n }\n Token::SUN => {\n new_time.tm_wday = 0;\n }\n Token::MON => {\n new_time.tm_wday = 1;\n }\n Token::TUE => {\n new_time.tm_wday = 2;\n }\n Token::WED => {\n new_time.tm_wday = 3;\n }\n Token::THU => {\n new_time.tm_wday = 4;\n }\n Token::FRI => {\n new_time.tm_wday = 5;\n }\n Token::SAT => {\n new_time.tm_wday = 6;\n }\n Token::JAN => {\n new_time.tm_mon = 0;\n }\n Token::FEB => {\n new_time.tm_mon = 1;\n }\n Token::MAR => {\n new_time.tm_mon = 2;\n }\n Token::APR => {\n new_time.tm_mon = 3;\n }\n Token::MAY => {\n new_time.tm_mon = 4;\n }\n Token::JUN => {\n new_time.tm_mon = 5;\n }\n Token::JUL => {\n new_time.tm_mon = 6;\n }\n Token::AUG => {\n new_time.tm_mon = 7;\n }\n Token::SEP => {\n new_time.tm_mon = 8;\n }\n Token::OCT => {\n new_time.tm_mon = 9;\n }\n Token::NOV => {\n new_time.tm_mon = 10;\n }\n Token::DEC => {\n new_time.tm_mon = 11;\n }\n Token::PLUS => {\n new_time = current_time;\n }\n _ => println!(\"Unknown token: {:?}\", token)\n }\n }\n return new_time;\n}\n\nfn increment_time(tokens: Vec<Token>) -> time::Tm {\n inc_dec_time(decrement_tm, tokens)\n}\n\nfn decrement_time(tokens: Vec<Token>) -> time::Tm {\n inc_dec_time(increment_tm, tokens)\n}\n\nfn inc_dec_time(inc_dec: fn(time::Tm, time::Duration) -> time::Tm, tokens:Vec<Token>) -> time::Tm {\n return time::now();\n}\n\nfn increment_tm(a: time::Tm, b: time::Duration) -> time::Tm {\n return a + b;\n}\n\nfn decrement_tm(a: time::Tm, b: time::Duration) -> time::Tm {\n return a - b;\n}\n\nfn tokenize (timespec: String) -> Vec<Token> {\n let mut tokens: Vec<Token> = vec![];\n let exprs = timespec.split(' ');\n for expr in exprs {\n match expr {\n \"now\" => {\n tokens.push(Token::NOW.clone());\n }\n \"midnight\" => {\n tokens.push(Token::MIDNIGHT.clone());\n }\n \"noon\" => {\n tokens.push(Token::NOON.clone());\n }\n \"teatime\" => {\n tokens.push(Token::TEATIME.clone());\n }\n \"sun\" | \"sunday\" => {\n tokens.push(Token::SUN.clone());\n }\n \"mon\" | \"monday\" => {\n tokens.push(Token::MON.clone());\n }\n \"tue\" | \"tuesday\" => {\n tokens.push(Token::TUE.clone());\n }\n \"wed\" | \"wednesday\" => {\n tokens.push(Token::WED.clone());\n }\n \"thu\" | \"thursday\" => {\n tokens.push(Token::TUE.clone());\n }\n \"fri\" | \"friday\" => {\n tokens.push(Token::FRI.clone());\n }\n \"sat\" | \"saturday\" => {\n tokens.push(Token::SAT.clone());\n }\n \"jan\" | \"january\" => {\n tokens.push(Token::JAN.clone());\n }\n \"feb\" | \"february\" => {\n tokens.push(Token::FEB.clone());\n }\n \"mar\" | \"march\" => {\n tokens.push(Token::MAR.clone());\n }\n \"apr\" | \"april\" => {\n tokens.push(Token::APR.clone());\n }\n \"may\" => {\n tokens.push(Token::MAY.clone());\n }\n \"jun\" | \"june\" => {\n tokens.push(Token::JUN.clone());\n }\n \"jul\" | \"july\" => {\n tokens.push(Token::JUL.clone());\n }\n \"aug\" | \"august\" => {\n tokens.push(Token::AUG.clone());\n }\n \"sep\" | \"september\" => {\n tokens.push(Token::SEP.clone());\n }\n \"oct\" | \"october\" => {\n tokens.push(Token::OCT.clone());\n }\n \"nov\" | \"november\" => {\n tokens.push(Token::NOV.clone());\n }\n \"dec\" | \"december\" => {\n tokens.push(Token::DEC.clone());\n }\n \"+\" => {\n tokens.push(Token::PLUS.clone());\n }\n _ => {\n }\n }\n }\n return tokens;\n}\n\nfn parsetime (timespec: String) -> time::Tm {\n let now = time::now();\n\n let tokens = tokenize(timespec);\n let time = parse_timespec(tokens, now);\n\n return time;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>formatting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added default values for host, port, and password<commit_after><|endoftext|>"} {"text":"<commit_before>#![forbid(unsafe_code)]\n\nextern crate bincode;\nextern crate clap;\nextern crate serde;\n#[macro_use]\nextern crate serde_derive;\nextern crate actix_web;\nextern crate actix_net;\nextern crate binascii;\nextern crate toml;\n#[macro_use]\nextern crate log;\nextern crate bzip2;\nextern crate fern;\nextern crate num_cpus;\nextern crate serde_json;\nextern crate futures;\n#[macro_use]\nextern crate lazy_static;\n\nmod config;\nmod server;\nmod stackvec;\nmod tracker;\nmod webserver;\n\nuse config::Configuration;\nuse std::process::exit;\n\nlazy_static!{\n static ref term_mutex: std::sync::Arc<std::sync::atomic::AtomicBool> = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false));\n}\n\nfn setup_logging(cfg: &Configuration) {\n let log_level = match cfg.get_log_level() {\n None => log::LevelFilter::Info,\n Some(level) => match level.as_str() {\n \"off\" => log::LevelFilter::Off,\n \"trace\" => log::LevelFilter::Trace,\n \"debug\" => log::LevelFilter::Debug,\n \"info\" => log::LevelFilter::Info,\n \"warn\" => log::LevelFilter::Warn,\n \"error\" => log::LevelFilter::Error,\n _ => {\n eprintln!(\"udpt: unknown log level encountered '{}'\", level.as_str());\n exit(-1);\n }\n },\n };\n\n if let Err(err) = fern::Dispatch::new()\n .format(|out, message, record| {\n out.finish(format_args!(\n \"{}[{}][{}]\\t{}\",\n std::time::SystemTime::now()\n .duration_since(std::time::UNIX_EPOCH)\n .unwrap()\n .as_secs(),\n record.target(),\n record.level(),\n message\n ))\n })\n .level(log_level)\n .chain(std::io::stdout())\n .apply()\n {\n eprintln!(\"udpt: failed to initialize logging. {}\", err);\n std::process::exit(-1);\n }\n info!(\"logging initialized.\");\n}\n\nfn signal_termination() {\n term_mutex.store(true, std::sync::atomic::Ordering::Relaxed);\n}\n\nfn main() {\n let parser = clap::App::new(\"udpt\")\n .about(\"High performance, lightweight, udp based torrent tracker.\")\n .author(\"Naim A. <naim94a@gmail.com>\")\n .arg(\n clap::Arg::with_name(\"config\")\n .takes_value(true)\n .short(\"-c\")\n .help(\"Configuration file to load.\")\n .required(true),\n );\n\n let matches = parser.get_matches();\n let cfg_path = matches.value_of(\"config\").unwrap();\n\n let cfg = match Configuration::load_file(cfg_path) {\n Ok(v) => std::sync::Arc::new(v),\n Err(e) => {\n eprintln!(\"udpt: failed to open configuration: {}\", e);\n return;\n }\n };\n\n setup_logging(&cfg);\n\n let tracker_obj = match cfg.get_db_path() {\n Some(path) => {\n let file_path = std::path::Path::new(path);\n if !file_path.exists() {\n warn!(\"database file \\\"{}\\\" doesn't exist.\", path);\n tracker::TorrentTracker::new(cfg.get_mode().clone())\n }\n else {\n let mut input_file = match std::fs::File::open(file_path) {\n Ok(v) => v,\n Err(err) => {\n error!(\"failed to open \\\"{}\\\". error: {}\", path.as_str(), err);\n panic!(\"error opening file. check logs.\");\n }\n };\n match tracker::TorrentTracker::load_database(cfg.get_mode().clone(), &mut input_file) {\n Ok(v) => v,\n Err(err) => {\n error!(\"failed to load database. error: {}\", err);\n panic!(\"failed to load database. check logs.\");\n }\n }\n }\n }\n None => tracker::TorrentTracker::new(cfg.get_mode().clone()),\n };\n\n let mut threads = Vec::new();\n\n let tracker = std::sync::Arc::new(tracker_obj);\n\n let http_server = if cfg.get_http_config().is_some() {\n let http_tracker_ref = tracker.clone();\n let cfg_ref = cfg.clone();\n\n Some(webserver::WebServer::new(http_tracker_ref, cfg_ref))\n } else {\n None\n };\n\n let udp_server = std::sync::Arc::new(server::UDPTracker::new(cfg.clone(), tracker.clone()).unwrap());\n\n trace!(\"Waiting for UDP packets\");\n let logical_cpus = num_cpus::get();\n for i in 0..logical_cpus {\n debug!(\"starting thread {}\/{}\", i + 1, logical_cpus);\n let server_handle = udp_server.clone();\n let thread_term_ref = term_mutex.clone();\n threads.push(std::thread::spawn(move || loop {\n match server_handle.accept_packet() {\n Err(e) => {\n if thread_term_ref.load(std::sync::atomic::Ordering::Relaxed) == true {\n debug!(\"Thread terminating...\");\n break;\n }\n match e.kind() {\n std::io::ErrorKind::TimedOut => {},\n std::io::ErrorKind::WouldBlock => {},\n _ => {\n error!(\"Failed to process packet. {}\", e);\n }\n }\n }\n Ok(_) => {}\n }\n }));\n }\n\n match cfg.get_db_path() {\n Some(db_path) => {\n let db_p = db_path.clone();\n let tracker_clone = tracker.clone();\n let cleanup_interval = match *cfg.get_cleanup_interval() {\n Some(v) => v,\n None => 10 * 60,\n };\n\n let thread_term_mutex = term_mutex.clone();\n threads.push(std::thread::spawn(move || {\n let timeout = std::time::Duration::new(cleanup_interval, 0);\n\n let timeout_start = std::time::Instant::now();\n let mut timeout_remaining = timeout;\n loop {\n std::thread::park_timeout(std::time::Duration::new(cleanup_interval, 0));\n\n if thread_term_mutex.load(std::sync::atomic::Ordering::Relaxed) {\n debug!(\"Maintenance thread terminating.\");\n break;\n }\n\n let elapsed = std::time::Instant::now() - timeout_start;\n if elapsed < timeout_remaining {\n timeout_remaining = timeout - elapsed;\n continue;\n }\n else {\n timeout_remaining = timeout;\n }\n\n debug!(\"periodically saving database.\");\n tracker_clone.periodic_task(db_p.as_str());\n debug!(\"database saved.\");\n }\n }));\n },\n None => {}\n }\n\n loop {\n if term_mutex.load(std::sync::atomic::Ordering::Relaxed) {\n \/\/ termination signaled. start cleanup.\n break;\n }\n std::thread::sleep(std::time::Duration::from_secs(1));\n }\n\n match http_server {\n Some(v) => v.shutdown(),\n None => {},\n };\n\n while !threads.is_empty() {\n if let Some(thread) = threads.pop() {\n thread.thread().unpark();\n let _ = thread.join();\n }\n }\n}\n<commit_msg>write db on termination<commit_after>#![forbid(unsafe_code)]\n\nextern crate bincode;\nextern crate clap;\nextern crate serde;\n#[macro_use]\nextern crate serde_derive;\nextern crate actix_web;\nextern crate actix_net;\nextern crate binascii;\nextern crate toml;\n#[macro_use]\nextern crate log;\nextern crate bzip2;\nextern crate fern;\nextern crate num_cpus;\nextern crate serde_json;\nextern crate futures;\n#[macro_use]\nextern crate lazy_static;\n\nmod config;\nmod server;\nmod stackvec;\nmod tracker;\nmod webserver;\n\nuse config::Configuration;\nuse std::process::exit;\n\nlazy_static!{\n static ref term_mutex: std::sync::Arc<std::sync::atomic::AtomicBool> = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false));\n}\n\nfn setup_logging(cfg: &Configuration) {\n let log_level = match cfg.get_log_level() {\n None => log::LevelFilter::Info,\n Some(level) => match level.as_str() {\n \"off\" => log::LevelFilter::Off,\n \"trace\" => log::LevelFilter::Trace,\n \"debug\" => log::LevelFilter::Debug,\n \"info\" => log::LevelFilter::Info,\n \"warn\" => log::LevelFilter::Warn,\n \"error\" => log::LevelFilter::Error,\n _ => {\n eprintln!(\"udpt: unknown log level encountered '{}'\", level.as_str());\n exit(-1);\n }\n },\n };\n\n if let Err(err) = fern::Dispatch::new()\n .format(|out, message, record| {\n out.finish(format_args!(\n \"{}[{}][{}]\\t{}\",\n std::time::SystemTime::now()\n .duration_since(std::time::UNIX_EPOCH)\n .unwrap()\n .as_secs(),\n record.target(),\n record.level(),\n message\n ))\n })\n .level(log_level)\n .chain(std::io::stdout())\n .apply()\n {\n eprintln!(\"udpt: failed to initialize logging. {}\", err);\n std::process::exit(-1);\n }\n info!(\"logging initialized.\");\n}\n\nfn signal_termination() {\n term_mutex.store(true, std::sync::atomic::Ordering::Relaxed);\n}\n\nfn main() {\n let parser = clap::App::new(env!(\"CARGO_PKG_NAME\"))\n .about(env!(\"CARGO_PKG_DESCRIPTION\"))\n .author(env!(\"CARGO_PKG_AUTHORS\"))\n .version(env!(\"CARGO_PKG_VERSION\"))\n .arg(\n clap::Arg::with_name(\"config\")\n .takes_value(true)\n .short(\"-c\")\n .help(\"Configuration file to load.\")\n .required(true),\n );\n\n let matches = parser.get_matches();\n let cfg_path = matches.value_of(\"config\").unwrap();\n\n let cfg = match Configuration::load_file(cfg_path) {\n Ok(v) => std::sync::Arc::new(v),\n Err(e) => {\n eprintln!(\"udpt: failed to open configuration: {}\", e);\n return;\n }\n };\n\n setup_logging(&cfg);\n\n let tracker_obj = match cfg.get_db_path() {\n Some(path) => {\n let file_path = std::path::Path::new(path);\n if !file_path.exists() {\n warn!(\"database file \\\"{}\\\" doesn't exist.\", path);\n tracker::TorrentTracker::new(cfg.get_mode().clone())\n }\n else {\n let mut input_file = match std::fs::File::open(file_path) {\n Ok(v) => v,\n Err(err) => {\n error!(\"failed to open \\\"{}\\\". error: {}\", path.as_str(), err);\n panic!(\"error opening file. check logs.\");\n }\n };\n match tracker::TorrentTracker::load_database(cfg.get_mode().clone(), &mut input_file) {\n Ok(v) => v,\n Err(err) => {\n error!(\"failed to load database. error: {}\", err);\n panic!(\"failed to load database. check logs.\");\n }\n }\n }\n }\n None => tracker::TorrentTracker::new(cfg.get_mode().clone()),\n };\n\n let mut threads = Vec::new();\n\n let tracker = std::sync::Arc::new(tracker_obj);\n\n let http_server = if cfg.get_http_config().is_some() {\n let http_tracker_ref = tracker.clone();\n let cfg_ref = cfg.clone();\n\n Some(webserver::WebServer::new(http_tracker_ref, cfg_ref))\n } else {\n None\n };\n\n let udp_server = std::sync::Arc::new(server::UDPTracker::new(cfg.clone(), tracker.clone()).unwrap());\n\n trace!(\"Waiting for UDP packets\");\n let logical_cpus = num_cpus::get();\n for i in 0..logical_cpus {\n debug!(\"starting thread {}\/{}\", i + 1, logical_cpus);\n let server_handle = udp_server.clone();\n let thread_term_ref = term_mutex.clone();\n threads.push(std::thread::spawn(move || loop {\n match server_handle.accept_packet() {\n Err(e) => {\n if thread_term_ref.load(std::sync::atomic::Ordering::Relaxed) == true {\n debug!(\"Thread terminating...\");\n break;\n }\n match e.kind() {\n std::io::ErrorKind::TimedOut => {},\n std::io::ErrorKind::WouldBlock => {},\n _ => {\n error!(\"Failed to process packet. {}\", e);\n }\n }\n }\n Ok(_) => {}\n }\n }));\n }\n\n match cfg.get_db_path() {\n Some(db_path) => {\n let db_p = db_path.clone();\n let tracker_clone = tracker.clone();\n let cleanup_interval = match *cfg.get_cleanup_interval() {\n Some(v) => v,\n None => 10 * 60,\n };\n\n let thread_term_mutex = term_mutex.clone();\n threads.push(std::thread::spawn(move || {\n let timeout = std::time::Duration::new(cleanup_interval, 0);\n\n let timeout_start = std::time::Instant::now();\n let mut timeout_remaining = timeout;\n loop {\n std::thread::park_timeout(std::time::Duration::new(cleanup_interval, 0));\n\n if thread_term_mutex.load(std::sync::atomic::Ordering::Relaxed) {\n debug!(\"Maintenance thread terminating.\");\n break;\n }\n\n let elapsed = std::time::Instant::now() - timeout_start;\n if elapsed < timeout_remaining {\n timeout_remaining = timeout - elapsed;\n continue;\n }\n else {\n timeout_remaining = timeout;\n }\n\n debug!(\"periodically saving database.\");\n tracker_clone.periodic_task(db_p.as_str());\n debug!(\"database saved.\");\n }\n }));\n },\n None => {}\n }\n\n loop {\n if term_mutex.load(std::sync::atomic::Ordering::Relaxed) {\n \/\/ termination signaled. start cleanup.\n break;\n }\n std::thread::sleep(std::time::Duration::from_secs(1));\n }\n\n match http_server {\n Some(v) => v.shutdown(),\n None => {},\n };\n\n while !threads.is_empty() {\n if let Some(thread) = threads.pop() {\n thread.thread().unpark();\n let _ = thread.join();\n }\n }\n\n if let Some(db_path) = cfg.get_db_path() {\n info!(\"running final cleanup & saving database...\");\n tracker.periodic_task(db_path.as_str());\n }\n info!(\"goodbye.\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>learning how to use proc properly<commit_after>\/*\n closu2.rs\n demonstrating closures\n vanilla\n*\/\n\nfn main(){\n fn call_twice(f: ||) { \n f(); \n f(); \n }\n \n let closure = || { \"I'm a closure, and it doesn't matter what type I am\"; };\n\n fn function() { \n \"I'm a normal function\"; \n }\n\n call_twice(|| println!(\"{:?}\",function()));\n\n call_twice(|| println!(\"{:?}\",\"pewpew\"));\n\n call_twice(proc(){println!(\"pew\")});\n\n call_twice(closure);\n call_twice(function);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>complete custom dictionary test<commit_after>#![cfg(test)]\n#![allow(non_upper_case_globals)]\n#![allow(dead_code)]\nextern crate core;\nextern crate brotli_decompressor;\nuse brotli_decompressor::{CustomRead, CustomWrite};\nuse super::brotli::enc::BrotliEncoderParams;\nuse super::brotli::concat::{BroCatli, BroCatliResult};\nuse std::io::{Read, Write};\nuse super::integration_tests::UnlimitedBuffer;\nstatic RANDOM_THEN_UNICODE : &'static [u8] = include_bytes!(\"..\/..\/testdata\/random_then_unicode\");\nstatic ALICE: &'static[u8] = include_bytes!(\"..\/..\/testdata\/alice29.txt\");\nuse super::Rebox;\n\n#[test]\nfn test_custom_dict() {\n let mut raw = UnlimitedBuffer::new(ALICE);\n let mut params = BrotliEncoderParams::default();\n params.quality = 10;\n let mut br = UnlimitedBuffer::new(&[]);\n let mut rt = UnlimitedBuffer::new(&[]);\n let dict = &ALICE[12515..23411];\n super::compress(&mut raw, &mut br, 4096, ¶ms, dict).unwrap();\n raw.reset_read();\n let mut vec = Vec::<u8>::new();\n vec.extend(dict);\n super::decompress(&mut br, &mut rt, 4096, Rebox::from(vec)).unwrap();\n assert_eq!(rt.data(), raw.data());\n assert_eq!(br.data().len(), 43629);\n}\n\n#[test]\nfn test_custom_wrong_dict_fails() {\n let mut raw = UnlimitedBuffer::new(ALICE);\n let mut params = BrotliEncoderParams::default();\n params.quality = 10;\n let mut br = UnlimitedBuffer::new(&[]);\n let mut rt = UnlimitedBuffer::new(&[]);\n let dict = &ALICE[12515..19515];\n super::compress(&mut raw, &mut br, 4096, ¶ms, dict).unwrap();\n raw.reset_read();\n let mut vec = Vec::<u8>::new();\n vec.extend(&dict[1..]); \/\/ slightly offset dictionary to be wrong, and ensure the dict was being used above\n match super::decompress(&mut br, &mut rt, 4096, Rebox::from(vec)) {\n Ok(_) => panic!(\"Decompression should have failed\"),\n Err(_) => {},\n }\n if rt.data() == raw.data() {\n panic!(\"they should be unequal\");\n }\n}\n\n#[test]\nfn test_custom_wrong_dict_fails_but_doesnt_disrupt_compression_strategy() {\n let mut raw = UnlimitedBuffer::new(ALICE);\n let mut params = BrotliEncoderParams::default();\n params.quality = 6;\n let mut br = UnlimitedBuffer::new(&[]);\n let mut rt = UnlimitedBuffer::new(&[]);\n let dict = &ALICE[12515..19515];\n super::compress(&mut raw, &mut br, 4096, ¶ms, dict).unwrap();\n raw.reset_read();\n let mut vec = Vec::<u8>::new();\n vec.extend(&dict[1..]); \/\/ slightly offset dictionary to be wrong, and ensure the dict was being used above\n super::decompress(&mut br, &mut rt, 4096, Rebox::from(vec)).unwrap();\n if rt.data() == raw.data() {\n panic!(\"they should be unequal\");\n }\n}\n\n\n#[test]\nfn test_custom_dict_for_multithreading() {\n let mut raws = [UnlimitedBuffer::new(&ALICE[..ALICE.len()\/3]),\n UnlimitedBuffer::new(&ALICE[ALICE.len()\/3..2*ALICE.len()\/3]),\n UnlimitedBuffer::new(&ALICE[2 * ALICE.len()\/3..]),\n ];\n let mut params = BrotliEncoderParams::default();\n params.quality = 10;\n params.appendable = true;\n let mut brs = [\n UnlimitedBuffer::new(&[]),\n UnlimitedBuffer::new(&[]),\n UnlimitedBuffer::new(&[]),\n ];\n let mut rts = [\n UnlimitedBuffer::new(&[]),\n UnlimitedBuffer::new(&[]),\n UnlimitedBuffer::new(&[]),\n ];\n let dicts = [\n &[],\n &ALICE[..ALICE.len()\/3],\n &ALICE[..2*ALICE.len()\/3],\n ];\n for (raw, (br, (rt, dict))) in raws.iter_mut().zip(brs.iter_mut().zip(rts.iter_mut().zip(dicts.iter()))) {\n super::compress(raw, br, 4096, ¶ms, dict).unwrap();\n raw.reset_read();\n let mut vec = Vec::<u8>::new();\n vec.extend(*dict);\n super::decompress(br, rt, 4096, Rebox::from(vec)).unwrap();\n assert_eq!(rt.data(), raw.data());\n params.catable = true;\n }\n let mut bro_cat_li = BroCatli::new();\n let mut output = UnlimitedBuffer::new(&[]);\n let mut ibuffer = vec![0u8; 1];\n let mut obuffer = vec![0u8; 1];\n let mut ooffset = 0usize;\n for brotli in brs.iter_mut() {\n brotli.reset_read();\n bro_cat_li.new_brotli_file();\n let mut input = brotli;\n loop {\n let mut ioffset = 0usize;\n match input.read(&mut ibuffer[..]) {\n Err(e) => panic!(e),\n Ok(cur_read) => {\n if cur_read == 0 {\n break;\n }\n loop {\n match bro_cat_li.stream(&ibuffer[..cur_read], &mut ioffset,\n &mut obuffer[..], &mut ooffset) {\n BroCatliResult::NeedsMoreOutput => {\n match output.write(&obuffer[..ooffset]) {\n Err(why) => panic!(\"couldn't write: {:}\", why),\n Ok(count) => {assert_eq!(count, ooffset);},\n }\n ooffset = 0;\n },\n BroCatliResult::NeedsMoreInput => {\n break;\n },\n BroCatliResult::Success => {\n panic!(\"Unexpected state: Success when streaming before finish\");\n },\n failure => {\n panic!(failure);\n },\n }\n }\n }\n }\n }\n }\n loop {\n match bro_cat_li.finish(&mut obuffer[..], &mut ooffset) {\n BroCatliResult::NeedsMoreOutput => {\n match output.write(&obuffer[..ooffset]) {\n Err(why) => panic!(\"couldn't write\\n{:}\", why),\n Ok(count) => {assert_eq!(count, ooffset);},\n }\n ooffset = 0;\n },\n BroCatliResult::NeedsMoreInput => {\n panic!(\"Unexpected EOF\");\n },\n BroCatliResult::Success => {\n if ooffset != 0 {\n match output.write(&obuffer[..ooffset]) {\n Err(why) => panic!(\"couldn't write\\n{:}\", why),\n Ok(count) => {assert_eq!(count, ooffset);},\n }\n }\n break;\n }\n failure => {\n panic!(failure)\n }\n }\n }\n let mut rt = UnlimitedBuffer::new(&[]);\n output.reset_read();\n super::decompress(&mut output, &mut rt, 4096, Rebox::default()).unwrap();\n assert_eq!(rt.data(), ALICE);\n assert_eq!(output.data().len(), 48564); \/\/ as opposed to 46487 with standard settings\n}\n\n\n<|endoftext|>"} {"text":"<commit_before>\/*\n This file is part of trig-rs, a library for doing typesafe trigonometry\n with a variety of angle formats (radians, degrees, grad, turns, and so on).\n*\/\n\n#![crate_name = \"trig\"]\n#![comment = \"Provides trigonometric primitives.\"]\n#![crate_type = \"dylib\"]\n#![crate_type = \"rlib\"]\n\n#![unstable]\n#![feature(macro_rules)]\n#![feature(struct_variant)]\n\nuse std::fmt;\n\n\/*\n Top-level functions.\n*\/\n\n\/\/\/ Calculate the sine.\n#[stable] #[inline] pub fn sin<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.sin() }\n\n\/\/\/ Calculate the cosine.\n#[stable] #[inline] pub fn cos<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.cos() }\n\n\/\/\/ Calculate the tangent.\n#[stable] #[inline] pub fn tan<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.tan() }\n\n\/\/\/ Calculate the arcsine (in radians).\n#[inline] pub fn asin<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.asin()) }\n\n\/\/\/ Calculate the arccosine (in radians).\n#[inline] pub fn acos<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.acos()) }\n\n\/\/\/ Calculate the arctangent (in radians).\n#[inline] pub fn atan<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.atan()) }\n\n\/*\n The Trigonometry trait.\n*\/\n\n\/\/\/ Represents an object for which trigonometric methods are sensible and return\n\/\/\/ values of type `S`.\n#[stable]\npub trait Trigonometry<S> {\n \/\/\/ Compute the sine of the object.\n fn sin(&self) -> S;\n \/\/\/ Compute the cosine of the object.\n fn cos(&self) -> S;\n \/\/\/ Compute the tangent of the object.\n fn tan(&self) -> S;\n \/\/ \/\/\/ Compute the cosecant of the object.\n \/\/ fn csc(&self) -> S;\n \/\/ \/\/\/ Compute the secant of the object.\n \/\/ fn sec(&self) -> S;\n \/\/ \/\/\/ Compute the cotangent of the object.\n \/\/ fn cot(&self) -> S;\n}\n\n\/*\n The Angle enum and its implementations.\n*\/\n\n\/\/\/ Base floating point types\npub trait BaseFloat: Primitive + FromPrimitive + fmt::Show + fmt::Float + Float + FloatMath {}\n\nimpl BaseFloat for f32 {}\nimpl BaseFloat for f64 {}\n\n\/\/\/ Encompasses representations of angles in the Euclidean plane.\n#[deriving(Clone, PartialEq, PartialOrd, Hash)]\npub enum Angle<S> {\n \/\/\/ An angle in radians.\n #[stable] Rad(S),\n \/\/\/ An angle in degrees.\n #[stable] Deg(S),\n \/\/\/ An angle in [gradians](http:\/\/en.wikipedia.org\/wiki\/Grad_(angle)).\n #[stable] Grad(S),\n \/\/\/ An angle in [turns](http:\/\/en.wikipedia.org\/wiki\/Turn_(geometry)).\n #[stable] Turn(S),\n \/\/\/ An angle as it would appear on the face of a clock.\n #[experimental] Clock {\n \/\/\/ The hours portion.\n pub hour: S,\n \/\/\/ The minutes portion.\n pub minute: S,\n \/\/\/ The seconds portion.\n pub second: S\n },\n}\n \nimpl<S: BaseFloat + Mul<S, S> + Div<S, S> + Rem<S, S>> Angle<S> {\n \/\/\/ Returns an angle in radians.\n pub fn radians(s: S) -> Angle<S> { Rad(s % Float::two_pi()) }\n \n \/\/\/ Returns an angle in degrees.\n pub fn degrees(s: S) -> Angle<S> { Deg(s % FromPrimitive::from_f64(360.0).unwrap()) }\n\n \/\/\/ Returns an angle in gradians.\n pub fn gradians(s: S) -> Angle<S> { Grad(s % FromPrimitive::from_f64(400.0).unwrap()) }\n\n \/\/\/ Returns an angle in turns.\n pub fn turns(s: S) -> Angle<S> { Turn(s.fract()) }\n\n \/\/\/ Returns an angle as it would appear on a clock.\n pub fn clock_face(hour: S, minute: S, second: S) -> Angle<S> {\n Clock { hour: hour, minute: minute, second: second }\n }\n\n \/\/\/ Converts an angle to radians.\n pub fn to_radians(&self) -> Angle<S> {\n match self {\n &Rad(val) => Angle::radians(val),\n &Deg(val) => Angle::radians(val.to_radians()),\n &Grad(val) => Angle::radians(val * Float::pi() \/ FromPrimitive::from_f64(200.0).unwrap()),\n &Turn(val) => Angle::radians(val * Float::two_pi()),\n _ => unimplemented!()\n }\n }\n\n \/\/\/ Converts an angle to degrees.\n pub fn to_degrees(&self) -> Angle<S> {\n match self {\n &Rad(val) => Angle::degrees(val.to_degrees()),\n &Deg(val) => Angle::degrees(val),\n &Grad(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0 \/ 400.0).unwrap()),\n &Turn(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0).unwrap()),\n _ => unimplemented!()\n }\n }\n\n \/\/\/ Converts an angle to gradians.\n pub fn to_gradians(&self) -> Angle<S> {\n match self {\n &Rad(val) => Angle::gradians(val \/ Float::pi() * FromPrimitive::from_f64(200.0).unwrap()),\n &Deg(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0 \/ 360.0).unwrap()),\n &Grad(val) => Angle::gradians(val),\n &Turn(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0).unwrap()),\n _ => unimplemented!()\n }\n }\n\n \/\/\/ Converts an angle to turns.\n pub fn to_turns(&self) -> Angle<S> {\n match self {\n &Rad(val) => Angle::turns(val \/ Float::two_pi()),\n &Deg(val) => Angle::turns(val \/ FromPrimitive::from_f64(360.0).unwrap()),\n &Grad(val) => Angle::turns(val \/ FromPrimitive::from_f64(400.0).unwrap()),\n &Turn(val) => Angle::turns(val),\n _ => unimplemented!()\n }\n }\n\n \/\/\/ One half of the domain. In radians, this is `π`.\n pub fn half() -> Angle<S> { Rad(Float::pi()) }\n\n \/\/\/ One quarter of the domain. In radians, this is `π\/2`.\n pub fn quarter() -> Angle<S> { Rad(Float::frac_pi_2()) }\n\n \/\/\/ One sixth of the domain. In radians, this is `π\/3`.\n pub fn sixth() -> Angle<S> { Rad(Float::frac_pi_3()) }\n\n \/\/\/ One eighth of the domain. In radians, this is `π\/4`.\n pub fn eighth() -> Angle<S> { Rad(Float::frac_pi_4()) }\n}\n\nmacro_rules! angle_trigonometry (\n ($($method:ident),+ ) => (\n impl<S: BaseFloat> Trigonometry<S> for Angle<S> {\n $(fn $method(&self) -> S {\n match self {\n &Rad(val) => val.$method(),\n &Deg(val) => val.to_radians().$method(),\n _ => fail!(\"Not yet implemented.\")\n }\n }\n )+\n }\n )\n)\n\nangle_trigonometry!(sin, cos, tan)\n\nmacro_rules! angle_ops (\n ($Trait:ident, $method:ident) => (\n impl<S: BaseFloat> $Trait<Angle<S>, Angle<S>> for Angle<S> {\n #[inline]\n fn $method(&self, other: &Angle<S>) -> Angle<S> {\n match (self, other) {\n (&Deg(s1), &Deg(s2)) => Angle::degrees(s1.$method(&s2)),\n (&Deg(s1), &Rad(s2)) => Angle::degrees(s1.$method(&s2.to_degrees())),\n (&Rad(s1), &Rad(s2)) => Angle::radians(s1.$method(&s2)),\n (&Rad(s1), &Deg(s2)) => Angle::radians(s1.$method(&s2.to_radians())),\n _ => fail!(\"Not yet implemented.\")\n }\n }\n }\n )\n)\n\nangle_ops!(Add, add)\nangle_ops!(Sub, sub)\nangle_ops!(Mul, mul)\nangle_ops!(Div, div)\n\nimpl<S: BaseFloat + fmt::Show> fmt::Show for Angle<S> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self {\n &Deg(val) => write!(f, \"{}°\", val),\n &Rad(val) => write!(f, \"{} rad\", val),\n &Grad(val) => write!(f, \"{} gon\", val),\n &Turn(val) => write!(f, \"{} turns\", val),\n _ => fail!(\"Not yet implemented.\")\n }\n }\n}\n\n\/*\n Test suite.\n*\/\n\n#[cfg(test)]\nmod test {\n use super::Angle;\n\n #[test]\n fn test_conversion() {\n let half: Angle<f64> = Angle::half();\n assert_eq!(half.to_degrees().to_gradians().to_turns().to_radians(), half);\n assert_eq!(half.to_turns().to_gradians().to_degrees().to_radians(), half);\n assert_eq!(half.to_degrees().to_turns().to_gradians().to_radians(), half);\n assert_eq!(half.to_gradians().to_radians(), half);\n }\n\n #[test]\n fn test_operators() {\n assert_eq!(Angle::degrees(100.0f64) + Angle::degrees(100.0f64), Angle::degrees(200.0f64));\n assert_eq!(Angle::degrees(100.0f64) - Angle::degrees(100.0f64), Angle::degrees(0.0f64));\n assert_eq!(Angle::degrees(100.0f64) + Angle::radians(0.0f64), Angle::degrees(100.0f64));\n assert_eq!(Angle::radians(1.0f64) - Angle::degrees(0.0f64), Angle::radians(1.0f64));\n assert_eq!(Angle::degrees(2.0f64) * Angle::degrees(100.0f64), Angle::degrees(200.0f64));\n }\n}\n<commit_msg>Implements Add and Sub by hand as concatenate operations.<commit_after>\/*\n This file is part of trig-rs, a library for doing typesafe trigonometry\n with a variety of angle formats (radians, degrees, grad, turns, and so on).\n*\/\n\n#![crate_name = \"trig\"]\n#![comment = \"Provides trigonometric primitives.\"]\n#![crate_type = \"dylib\"]\n#![crate_type = \"rlib\"]\n\n#![unstable]\n#![feature(macro_rules)]\n#![feature(struct_variant)]\n\nuse std::fmt;\n\n\/*\n Top-level functions.\n*\/\n\n\/\/\/ Calculate the sine.\n#[stable] #[inline] pub fn sin<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.sin() }\n\n\/\/\/ Calculate the cosine.\n#[stable] #[inline] pub fn cos<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.cos() }\n\n\/\/\/ Calculate the tangent.\n#[stable] #[inline] pub fn tan<S: BaseFloat, T: Trigonometry<S>>(t: T) -> S { t.tan() }\n\n\/\/\/ Calculate the arcsine (in radians).\n#[inline] pub fn asin<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.asin()) }\n\n\/\/\/ Calculate the arccosine (in radians).\n#[inline] pub fn acos<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.acos()) }\n\n\/\/\/ Calculate the arctangent (in radians).\n#[inline] pub fn atan<S: BaseFloat>(s: S) -> Angle<S> { Angle::radians(s.atan()) }\n\n\/*\n The Trigonometry trait.\n*\/\n\n\/\/\/ Represents an object for which trigonometric methods are sensible and return\n\/\/\/ values of type `S`.\n#[stable]\npub trait Trigonometry<S> {\n \/\/\/ Compute the sine of the object.\n fn sin(&self) -> S;\n \/\/\/ Compute the cosine of the object.\n fn cos(&self) -> S;\n \/\/\/ Compute the tangent of the object.\n fn tan(&self) -> S;\n \/\/ \/\/\/ Compute the cosecant of the object.\n \/\/ fn csc(&self) -> S;\n \/\/ \/\/\/ Compute the secant of the object.\n \/\/ fn sec(&self) -> S;\n \/\/ \/\/\/ Compute the cotangent of the object.\n \/\/ fn cot(&self) -> S;\n}\n\n\/*\n The Angle enum and its implementations.\n*\/\n\n\/\/\/ Base floating point types\npub trait BaseFloat: Primitive + FromPrimitive + fmt::Show + fmt::Float + Float + FloatMath {}\n\nimpl BaseFloat for f32 {}\nimpl BaseFloat for f64 {}\n\n\/\/\/ Encompasses representations of angles in the Euclidean plane.\n#[deriving(Clone, PartialEq, PartialOrd, Hash)]\npub enum Angle<S> {\n \/\/\/ An angle in radians.\n #[stable] Rad(S),\n \/\/\/ An angle in degrees.\n #[stable] Deg(S),\n \/\/\/ An angle in [gradians](http:\/\/en.wikipedia.org\/wiki\/Grad_(angle)).\n #[stable] Grad(S),\n \/\/\/ An angle in [turns](http:\/\/en.wikipedia.org\/wiki\/Turn_(geometry)).\n #[stable] Turn(S),\n \/\/\/ An angle as it would appear on the face of a clock.\n #[experimental] Clock {\n \/\/\/ The hours portion.\n pub hour: S,\n \/\/\/ The minutes portion.\n pub minute: S,\n \/\/\/ The seconds portion.\n pub second: S\n },\n}\n \nimpl<S: BaseFloat + Mul<S, S> + Div<S, S> + Rem<S, S>> Angle<S> {\n \/\/\/ Returns an angle in radians.\n pub fn radians(s: S) -> Angle<S> { Rad(s % Float::two_pi()) }\n \n \/\/\/ Returns an angle in degrees.\n pub fn degrees(s: S) -> Angle<S> { Deg(s % FromPrimitive::from_f64(360.0).unwrap()) }\n\n \/\/\/ Returns an angle in gradians.\n pub fn gradians(s: S) -> Angle<S> { Grad(s % FromPrimitive::from_f64(400.0).unwrap()) }\n\n \/\/\/ Returns an angle in turns.\n pub fn turns(s: S) -> Angle<S> { Turn(s.fract()) }\n\n \/\/\/ Returns an angle as it would appear on a clock.\n pub fn clock_face(hour: S, minute: S, second: S) -> Angle<S> {\n Clock { hour: hour, minute: minute, second: second }\n }\n\n \/\/\/ Converts an angle to radians.\n pub fn to_radians(&self) -> Angle<S> {\n match self {\n &Rad(val) => Angle::radians(val),\n &Deg(val) => Angle::radians(val.to_radians()),\n &Grad(val) => Angle::radians(val * Float::pi() \/ FromPrimitive::from_f64(200.0).unwrap()),\n &Turn(val) => Angle::radians(val * Float::two_pi()),\n _ => unimplemented!()\n }\n }\n\n \/\/\/ Converts an angle to degrees.\n pub fn to_degrees(&self) -> Angle<S> {\n match self {\n &Rad(val) => Angle::degrees(val.to_degrees()),\n &Deg(val) => Angle::degrees(val),\n &Grad(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0 \/ 400.0).unwrap()),\n &Turn(val) => Angle::degrees(val * FromPrimitive::from_f64(360.0).unwrap()),\n _ => unimplemented!()\n }\n }\n\n \/\/\/ Converts an angle to gradians.\n pub fn to_gradians(&self) -> Angle<S> {\n match self {\n &Rad(val) => Angle::gradians(val \/ Float::pi() * FromPrimitive::from_f64(200.0).unwrap()),\n &Deg(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0 \/ 360.0).unwrap()),\n &Grad(val) => Angle::gradians(val),\n &Turn(val) => Angle::gradians(val * FromPrimitive::from_f64(400.0).unwrap()),\n _ => unimplemented!()\n }\n }\n\n \/\/\/ Converts an angle to turns.\n pub fn to_turns(&self) -> Angle<S> {\n match self {\n &Rad(val) => Angle::turns(val \/ Float::two_pi()),\n &Deg(val) => Angle::turns(val \/ FromPrimitive::from_f64(360.0).unwrap()),\n &Grad(val) => Angle::turns(val \/ FromPrimitive::from_f64(400.0).unwrap()),\n &Turn(val) => Angle::turns(val),\n _ => unimplemented!()\n }\n }\n\n \/\/\/ One half of the domain. In radians, this is `π`.\n pub fn half() -> Angle<S> { Rad(Float::pi()) }\n\n \/\/\/ One quarter of the domain. In radians, this is `π\/2`.\n pub fn quarter() -> Angle<S> { Rad(Float::frac_pi_2()) }\n\n \/\/\/ One sixth of the domain. In radians, this is `π\/3`.\n pub fn sixth() -> Angle<S> { Rad(Float::frac_pi_3()) }\n\n \/\/\/ One eighth of the domain. In radians, this is `π\/4`.\n pub fn eighth() -> Angle<S> { Rad(Float::frac_pi_4()) }\n\n \/\/\/ Gets the raw value that is stored in the angle.\n \/\/\/\n \/\/\/ ## Failure\n \/\/\/\n \/\/\/ Clock-valued angles are not encoded as a single value, and so this\n \/\/\/ method will always fail for them.\n pub fn unwrap(&self) -> S {\n match self {\n &Rad(s)|&Deg(s)|&Grad(s)|&Turn(s) => s,\n _ => fail!(\"Clock values cannot be unwrapped.\")\n }\n }\n}\n\nimpl<S: BaseFloat> Add<Angle<S>, Angle<S>> for Angle<S> {\n #[inline]\n fn add(&self, other: &Angle<S>) -> Angle<S> {\n match (self, other) {\n (&Rad(val), othr) => Angle::radians(val + othr.to_radians().unwrap()),\n (&Deg(val), othr) => Angle::degrees(val + othr.to_degrees().unwrap()),\n (&Grad(val), othr) => Angle::gradians(val + othr.to_gradians().unwrap()),\n (&Turn(val), othr) => Angle::turns(val + othr.to_turns().unwrap()),\n _ => unimplemented!()\n }\n }\n}\n\nimpl<S: BaseFloat> Sub<Angle<S>, Angle<S>> for Angle<S> {\n #[inline]\n fn sub(&self, other: &Angle<S>) -> Angle<S> {\n match (self, other) {\n (&Rad(val), othr) => Angle::radians(val - othr.to_radians().unwrap()),\n (&Deg(val), othr) => Angle::degrees(val - othr.to_degrees().unwrap()),\n (&Grad(val), othr) => Angle::gradians(val - othr.to_gradians().unwrap()),\n (&Turn(val), othr) => Angle::turns(val - othr.to_turns().unwrap()),\n _ => unimplemented!()\n }\n }\n}\n\nimpl<S: BaseFloat + fmt::Show> fmt::Show for Angle<S> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self {\n &Deg(val) => write!(f, \"{}°\", val),\n &Rad(val) => write!(f, \"{} rad\", val),\n &Grad(val) => write!(f, \"{} gon\", val),\n &Turn(val) => write!(f, \"{} turns\", val),\n _ => fail!(\"Not yet implemented.\")\n }\n }\n}\n\nmacro_rules! angle_trigonometry (\n ($($method:ident),+ ) => (\n impl<S: BaseFloat> Trigonometry<S> for Angle<S> {\n $(fn $method(&self) -> S {\n match self {\n &Rad(val) => val.$method(),\n &Deg(val) => val.to_radians().$method(),\n _ => fail!(\"Not yet implemented.\")\n }\n }\n )+\n }\n )\n)\n\nangle_trigonometry!(sin, cos, tan)\n\n\/*\n Test suite.\n*\/\n\n#[cfg(test)]\nmod test {\n use super::Angle;\n\n #[test]\n fn test_conversion() {\n let half: Angle<f64> = Angle::half();\n assert_eq!(half.to_degrees().to_gradians().to_turns().to_radians(), half);\n assert_eq!(half.to_turns().to_gradians().to_degrees().to_radians(), half);\n assert_eq!(half.to_degrees().to_turns().to_gradians().to_radians(), half);\n assert_eq!(half.to_gradians().to_radians(), half);\n }\n\n #[test]\n fn test_operators() {\n assert_eq!(Angle::degrees(100.0f64) + Angle::degrees(100.0f64), Angle::degrees(200.0f64));\n assert_eq!(Angle::degrees(100.0f64) - Angle::degrees(100.0f64), Angle::degrees(0.0f64));\n assert_eq!(Angle::degrees(100.0f64) + Angle::radians(0.0f64), Angle::degrees(100.0f64));\n assert_eq!(Angle::radians(1.0f64) - Angle::degrees(0.0f64), Angle::radians(1.0f64));\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::collections::hash_map::RandomState;\nuse std::hash::{Hash, Hasher, BuildHasher};\n\nuse layout::Rect;\n\npub fn hash<T: Hash>(t: &T, area: &Rect) -> u64 {\n let state = RandomState::new();\n let mut hasher = state.build_hasher();\n t.hash(&mut hasher);\n area.hash(&mut hasher);\n hasher.finish()\n}\n<commit_msg>Fix layout cache on nightly<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>drop method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Deal with one of the validation layer warnings regarding multisample resolve<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use error output from runtime to print error<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Implementation of cookie creation and matching as specified by\n\/\/! http:\/\/tools.ietf.org\/html\/rfc6265\n\nuse cookie_storage::CookieSource;\nuse pub_domains::PUB_DOMAINS;\n\nuse cookie_rs;\nuse time::{Tm, now, at, Timespec};\nuse url::Url;\nuse std::borrow::ToOwned;\nuse std::i64;\nuse std::net::{Ipv4Addr, Ipv6Addr};\nuse std::time::Duration;\nuse std::str::FromStr;\n\n\/\/\/ A stored cookie that wraps the definition in cookie-rs. This is used to implement\n\/\/\/ various behaviours defined in the spec that rely on an associated request URL,\n\/\/\/ which cookie-rs and hyper's header parsing do not support.\n#[derive(Clone, Debug)]\npub struct Cookie {\n pub cookie: cookie_rs::Cookie,\n pub host_only: bool,\n pub persistent: bool,\n pub creation_time: Tm,\n pub last_access: Tm,\n pub expiry_time: Tm,\n}\n\nimpl Cookie {\n \/\/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.3\n pub fn new_wrapped(mut cookie: cookie_rs::Cookie, request: &Url, source: CookieSource)\n -> Option<Cookie> {\n \/\/ Step 3\n let (persistent, expiry_time) = match (&cookie.max_age, &cookie.expires) {\n (&Some(max_age), _) => (true, at(now().to_timespec() + Duration::seconds(max_age as i64))),\n (_, &Some(expires)) => (true, expires),\n _ => (false, at(Timespec::new(i64::MAX, 0)))\n };\n\n let url_host = request.host().map(|host| host.serialize()).unwrap_or(\"\".to_owned());\n\n \/\/ Step 4\n let mut domain = cookie.domain.clone().unwrap_or(\"\".to_owned());\n\n \/\/ Step 5\n match PUB_DOMAINS.iter().find(|&x| domain == *x) {\n Some(val) if *val == url_host => domain = \"\".to_string(),\n Some(_) => return None,\n None => {}\n }\n\n \/\/ Step 6\n let host_only = if !domain.is_empty() {\n if !Cookie::domain_match(&url_host, &domain) {\n return None;\n } else {\n cookie.domain = Some(domain);\n false\n }\n } else {\n cookie.domain = Some(url_host);\n true\n };\n\n \/\/ Step 7\n let mut path = cookie.path.unwrap_or(\"\".to_owned());\n if path.is_empty() || path.char_at(0) != '\/' {\n let url_path = request.serialize_path();\n let url_path = url_path.as_ref().map(|path| &**path);\n path = Cookie::default_path(url_path.unwrap_or(\"\"));\n }\n cookie.path = Some(path);\n\n\n \/\/ Step 10\n if cookie.httponly && source != CookieSource::HTTP {\n return None;\n }\n\n Some(Cookie {\n cookie: cookie,\n host_only: host_only,\n persistent: persistent,\n creation_time: now(),\n last_access: now(),\n expiry_time: expiry_time,\n })\n }\n\n pub fn touch(&mut self) {\n self.last_access = now();\n }\n\n \/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.1.4\n fn default_path(request_path: &str) -> String {\n if request_path == \"\" || request_path.char_at(0) != '\/' ||\n request_path.chars().filter(|&c| c == '\/').count() == 1 {\n \"\/\".to_owned()\n } else if request_path.ends_with(\"\/\") {\n request_path[..request_path.len() - 1].to_owned()\n } else {\n request_path.to_owned()\n }\n }\n\n \/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.1.4\n pub fn path_match(request_path: &str, cookie_path: &str) -> bool {\n request_path == cookie_path ||\n ( request_path.starts_with(cookie_path) &&\n ( request_path.ends_with(\"\/\") || request_path.char_at(cookie_path.len() - 1) == '\/' )\n )\n }\n\n \/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.1.3\n pub fn domain_match(string: &str, domain_string: &str) -> bool {\n if string == domain_string {\n return true;\n }\n if string.ends_with(domain_string)\n && string.char_at(string.len()-domain_string.len()-1) == '.'\n && Ipv4Addr::from_str(string).is_err()\n && Ipv6Addr::from_str(string).is_err() {\n return true;\n }\n false\n }\n\n \/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.4 step 1\n pub fn appropriate_for_url(&self, url: &Url, source: CookieSource) -> bool {\n let domain = url.host().map(|host| host.serialize());\n if self.host_only {\n if self.cookie.domain != domain {\n return false;\n }\n } else {\n if let (Some(ref domain), &Some(ref cookie_domain)) = (domain, &self.cookie.domain) {\n if !Cookie::domain_match(domain, cookie_domain) {\n return false;\n }\n }\n }\n\n if let (Some(ref path), &Some(ref cookie_path)) = (url.serialize_path(), &self.cookie.path) {\n if !Cookie::path_match(path, cookie_path) {\n return false;\n }\n }\n\n if self.cookie.secure && url.scheme != \"https\".to_string() {\n return false;\n }\n if self.cookie.httponly && source == CookieSource::NonHTTP {\n return false;\n }\n\n return true;\n }\n}\n\n#[test]\nfn test_domain_match() {\n assert!(Cookie::domain_match(\"foo.com\", \"foo.com\"));\n assert!(Cookie::domain_match(\"bar.foo.com\", \"foo.com\"));\n assert!(Cookie::domain_match(\"baz.bar.foo.com\", \"foo.com\"));\n\n assert!(!Cookie::domain_match(\"bar.foo.com\", \"bar.com\"));\n assert!(!Cookie::domain_match(\"bar.com\", \"baz.bar.com\"));\n assert!(!Cookie::domain_match(\"foo.com\", \"bar.com\"));\n\n assert!(!Cookie::domain_match(\"bar.com\", \"bbar.com\"));\n assert!(Cookie::domain_match(\"235.132.2.3\", \"235.132.2.3\"));\n assert!(!Cookie::domain_match(\"235.132.2.3\", \"1.1.1.1\"));\n assert!(!Cookie::domain_match(\"235.132.2.3\", \".2.3\"));\n}\n\n#[test]\nfn test_default_path() {\n assert!(&*Cookie::default_path(\"\/foo\/bar\/baz\/\") == \"\/foo\/bar\/baz\");\n assert!(&*Cookie::default_path(\"\/foo\/\") == \"\/foo\");\n assert!(&*Cookie::default_path(\"\/foo\") == \"\/\");\n assert!(&*Cookie::default_path(\"\/\") == \"\/\");\n assert!(&*Cookie::default_path(\"\") == \"\/\");\n assert!(&*Cookie::default_path(\"foo\") == \"\/\");\n}\n\n#[test]\nfn fn_cookie_constructor() {\n use cookie_storage::CookieSource;\n\n let url = &Url::parse(\"http:\/\/example.com\/foo\").unwrap();\n\n let gov_url = &Url::parse(\"http:\/\/gov.ac\/foo\").unwrap();\n \/\/ cookie name\/value test\n assert!(cookie_rs::Cookie::parse(\" baz \").is_err());\n assert!(cookie_rs::Cookie::parse(\" = bar \").is_err());\n assert!(cookie_rs::Cookie::parse(\" baz = \").is_ok());\n\n \/\/ cookie domains test\n let cookie = cookie_rs::Cookie::parse(\" baz = bar; Domain = \").unwrap();\n assert!(Cookie::new_wrapped(cookie.clone(), url, CookieSource::HTTP).is_some());\n let cookie = Cookie::new_wrapped(cookie, url, CookieSource::HTTP).unwrap();\n assert!(&**cookie.cookie.domain.as_ref().unwrap() == \"example.com\");\n\n \/\/ cookie public domains test\n let cookie = cookie_rs::Cookie::parse(\" baz = bar; Domain = gov.ac\").unwrap();\n assert!(Cookie::new_wrapped(cookie.clone(), url, CookieSource::HTTP).is_none());\n assert!(Cookie::new_wrapped(cookie, gov_url, CookieSource::HTTP).is_some());\n\n \/\/ cookie domain matching test\n let cookie = cookie_rs::Cookie::parse(\" baz = bar ; Secure; Domain = bazample.com\").unwrap();\n assert!(Cookie::new_wrapped(cookie, url, CookieSource::HTTP).is_none());\n\n let cookie = cookie_rs::Cookie::parse(\" baz = bar ; Secure; Path = \/foo\/bar\/\").unwrap();\n assert!(Cookie::new_wrapped(cookie, url, CookieSource::HTTP).is_some());\n\n let cookie = cookie_rs::Cookie::parse(\" baz = bar ; HttpOnly\").unwrap();\n assert!(Cookie::new_wrapped(cookie, url, CookieSource::NonHTTP).is_none());\n\n let cookie = cookie_rs::Cookie::parse(\" baz = bar ; Secure; Path = \/foo\/bar\/\").unwrap();\n let cookie = Cookie::new_wrapped(cookie, url, CookieSource::HTTP).unwrap();\n assert!(cookie.cookie.value.as_slice() == \"bar\");\n assert!(cookie.cookie.name.as_slice() == \"baz\");\n assert!(cookie.cookie.secure);\n assert!(cookie.cookie.path.as_ref().unwrap().as_slice() == \"\/foo\/bar\/\");\n assert!(cookie.cookie.domain.as_ref().unwrap().as_slice() == \"example.com\");\n assert!(cookie.host_only);\n\n let u = &Url::parse(\"http:\/\/example.com\/foobar\").unwrap();\n let cookie = cookie_rs::Cookie::parse(\"foobar=value;path=\/\").unwrap();\n assert!(Cookie::new_wrapped(cookie, u, CookieSource::HTTP).is_some());\n}\n<commit_msg>auto merge of #5403 : frewsxcv\/servo\/cookie-default-path, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Implementation of cookie creation and matching as specified by\n\/\/! http:\/\/tools.ietf.org\/html\/rfc6265\n\nuse cookie_storage::CookieSource;\nuse pub_domains::PUB_DOMAINS;\n\nuse cookie_rs;\nuse time::{Tm, now, at, Timespec};\nuse url::Url;\nuse std::borrow::ToOwned;\nuse std::i64;\nuse std::net::{Ipv4Addr, Ipv6Addr};\nuse std::time::Duration;\nuse std::str::FromStr;\n\n\/\/\/ A stored cookie that wraps the definition in cookie-rs. This is used to implement\n\/\/\/ various behaviours defined in the spec that rely on an associated request URL,\n\/\/\/ which cookie-rs and hyper's header parsing do not support.\n#[derive(Clone, Debug)]\npub struct Cookie {\n pub cookie: cookie_rs::Cookie,\n pub host_only: bool,\n pub persistent: bool,\n pub creation_time: Tm,\n pub last_access: Tm,\n pub expiry_time: Tm,\n}\n\nimpl Cookie {\n \/\/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.3\n pub fn new_wrapped(mut cookie: cookie_rs::Cookie, request: &Url, source: CookieSource)\n -> Option<Cookie> {\n \/\/ Step 3\n let (persistent, expiry_time) = match (&cookie.max_age, &cookie.expires) {\n (&Some(max_age), _) => (true, at(now().to_timespec() + Duration::seconds(max_age as i64))),\n (_, &Some(expires)) => (true, expires),\n _ => (false, at(Timespec::new(i64::MAX, 0)))\n };\n\n let url_host = request.host().map(|host| host.serialize()).unwrap_or(\"\".to_owned());\n\n \/\/ Step 4\n let mut domain = cookie.domain.clone().unwrap_or(\"\".to_owned());\n\n \/\/ Step 5\n match PUB_DOMAINS.iter().find(|&x| domain == *x) {\n Some(val) if *val == url_host => domain = \"\".to_string(),\n Some(_) => return None,\n None => {}\n }\n\n \/\/ Step 6\n let host_only = if !domain.is_empty() {\n if !Cookie::domain_match(&url_host, &domain) {\n return None;\n } else {\n cookie.domain = Some(domain);\n false\n }\n } else {\n cookie.domain = Some(url_host);\n true\n };\n\n \/\/ Step 7\n let mut path = cookie.path.unwrap_or(\"\".to_owned());\n if path.is_empty() || path.char_at(0) != '\/' {\n let url_path = request.serialize_path();\n let url_path = url_path.as_ref().map(|path| &**path);\n path = Cookie::default_path(url_path.unwrap_or(\"\")).to_owned();\n }\n cookie.path = Some(path);\n\n\n \/\/ Step 10\n if cookie.httponly && source != CookieSource::HTTP {\n return None;\n }\n\n Some(Cookie {\n cookie: cookie,\n host_only: host_only,\n persistent: persistent,\n creation_time: now(),\n last_access: now(),\n expiry_time: expiry_time,\n })\n }\n\n pub fn touch(&mut self) {\n self.last_access = now();\n }\n\n \/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.1.4\n fn default_path(request_path: &str) -> &str {\n \/\/ Step 2\n if request_path.is_empty() || !request_path.starts_with(\"\/\") {\n return \"\/\";\n }\n\n \/\/ Step 3\n let rightmost_slash_idx = request_path.rfind(\"\/\").unwrap();\n if rightmost_slash_idx == 0 {\n \/\/ There's only one slash; it's the first character\n return \"\/\";\n }\n\n \/\/ Step 4\n &request_path[..rightmost_slash_idx]\n }\n\n \/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.1.4\n pub fn path_match(request_path: &str, cookie_path: &str) -> bool {\n request_path == cookie_path ||\n ( request_path.starts_with(cookie_path) &&\n ( request_path.ends_with(\"\/\") || request_path.char_at(cookie_path.len() - 1) == '\/' )\n )\n }\n\n \/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.1.3\n pub fn domain_match(string: &str, domain_string: &str) -> bool {\n if string == domain_string {\n return true;\n }\n if string.ends_with(domain_string)\n && string.char_at(string.len()-domain_string.len()-1) == '.'\n && Ipv4Addr::from_str(string).is_err()\n && Ipv6Addr::from_str(string).is_err() {\n return true;\n }\n false\n }\n\n \/\/ http:\/\/tools.ietf.org\/html\/rfc6265#section-5.4 step 1\n pub fn appropriate_for_url(&self, url: &Url, source: CookieSource) -> bool {\n let domain = url.host().map(|host| host.serialize());\n if self.host_only {\n if self.cookie.domain != domain {\n return false;\n }\n } else {\n if let (Some(ref domain), &Some(ref cookie_domain)) = (domain, &self.cookie.domain) {\n if !Cookie::domain_match(domain, cookie_domain) {\n return false;\n }\n }\n }\n\n if let (Some(ref path), &Some(ref cookie_path)) = (url.serialize_path(), &self.cookie.path) {\n if !Cookie::path_match(path, cookie_path) {\n return false;\n }\n }\n\n if self.cookie.secure && url.scheme != \"https\".to_string() {\n return false;\n }\n if self.cookie.httponly && source == CookieSource::NonHTTP {\n return false;\n }\n\n return true;\n }\n}\n\n#[test]\nfn test_domain_match() {\n assert!(Cookie::domain_match(\"foo.com\", \"foo.com\"));\n assert!(Cookie::domain_match(\"bar.foo.com\", \"foo.com\"));\n assert!(Cookie::domain_match(\"baz.bar.foo.com\", \"foo.com\"));\n\n assert!(!Cookie::domain_match(\"bar.foo.com\", \"bar.com\"));\n assert!(!Cookie::domain_match(\"bar.com\", \"baz.bar.com\"));\n assert!(!Cookie::domain_match(\"foo.com\", \"bar.com\"));\n\n assert!(!Cookie::domain_match(\"bar.com\", \"bbar.com\"));\n assert!(Cookie::domain_match(\"235.132.2.3\", \"235.132.2.3\"));\n assert!(!Cookie::domain_match(\"235.132.2.3\", \"1.1.1.1\"));\n assert!(!Cookie::domain_match(\"235.132.2.3\", \".2.3\"));\n}\n\n#[test]\nfn test_default_path() {\n assert!(&*Cookie::default_path(\"\/foo\/bar\/baz\/\") == \"\/foo\/bar\/baz\");\n assert!(&*Cookie::default_path(\"\/foo\/bar\/baz\") == \"\/foo\/bar\");\n assert!(&*Cookie::default_path(\"\/foo\/\") == \"\/foo\");\n assert!(&*Cookie::default_path(\"\/foo\") == \"\/\");\n assert!(&*Cookie::default_path(\"\/\") == \"\/\");\n assert!(&*Cookie::default_path(\"\") == \"\/\");\n assert!(&*Cookie::default_path(\"foo\") == \"\/\");\n}\n\n#[test]\nfn fn_cookie_constructor() {\n use cookie_storage::CookieSource;\n\n let url = &Url::parse(\"http:\/\/example.com\/foo\").unwrap();\n\n let gov_url = &Url::parse(\"http:\/\/gov.ac\/foo\").unwrap();\n \/\/ cookie name\/value test\n assert!(cookie_rs::Cookie::parse(\" baz \").is_err());\n assert!(cookie_rs::Cookie::parse(\" = bar \").is_err());\n assert!(cookie_rs::Cookie::parse(\" baz = \").is_ok());\n\n \/\/ cookie domains test\n let cookie = cookie_rs::Cookie::parse(\" baz = bar; Domain = \").unwrap();\n assert!(Cookie::new_wrapped(cookie.clone(), url, CookieSource::HTTP).is_some());\n let cookie = Cookie::new_wrapped(cookie, url, CookieSource::HTTP).unwrap();\n assert!(&**cookie.cookie.domain.as_ref().unwrap() == \"example.com\");\n\n \/\/ cookie public domains test\n let cookie = cookie_rs::Cookie::parse(\" baz = bar; Domain = gov.ac\").unwrap();\n assert!(Cookie::new_wrapped(cookie.clone(), url, CookieSource::HTTP).is_none());\n assert!(Cookie::new_wrapped(cookie, gov_url, CookieSource::HTTP).is_some());\n\n \/\/ cookie domain matching test\n let cookie = cookie_rs::Cookie::parse(\" baz = bar ; Secure; Domain = bazample.com\").unwrap();\n assert!(Cookie::new_wrapped(cookie, url, CookieSource::HTTP).is_none());\n\n let cookie = cookie_rs::Cookie::parse(\" baz = bar ; Secure; Path = \/foo\/bar\/\").unwrap();\n assert!(Cookie::new_wrapped(cookie, url, CookieSource::HTTP).is_some());\n\n let cookie = cookie_rs::Cookie::parse(\" baz = bar ; HttpOnly\").unwrap();\n assert!(Cookie::new_wrapped(cookie, url, CookieSource::NonHTTP).is_none());\n\n let cookie = cookie_rs::Cookie::parse(\" baz = bar ; Secure; Path = \/foo\/bar\/\").unwrap();\n let cookie = Cookie::new_wrapped(cookie, url, CookieSource::HTTP).unwrap();\n assert!(cookie.cookie.value.as_slice() == \"bar\");\n assert!(cookie.cookie.name.as_slice() == \"baz\");\n assert!(cookie.cookie.secure);\n assert!(cookie.cookie.path.as_ref().unwrap().as_slice() == \"\/foo\/bar\/\");\n assert!(cookie.cookie.domain.as_ref().unwrap().as_slice() == \"example.com\");\n assert!(cookie.host_only);\n\n let u = &Url::parse(\"http:\/\/example.com\/foobar\").unwrap();\n let cookie = cookie_rs::Cookie::parse(\"foobar=value;path=\/\").unwrap();\n assert!(Cookie::new_wrapped(cookie, u, CookieSource::HTTP).is_some());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create an program to demonstrate random programs<commit_after>extern crate rand;\nextern crate nadezhda;\n\nuse nadezhda::grammar::Program;\n\ntrait Length {\n fn length(&self) -> i32;\n}\n\nimpl Length for Program {\n fn length(&self) -> i32 {\n 1 + match *self {\n Program::Forward(ref contained_program) => {\n contained_program.length()\n },\n Program::Backward(ref contained_program) => {\n contained_program.length()\n },\n Program::Stop => 0\n\n }\n }\n}\n\nfn main() {\n let program: Program = rand::random();\n\n println!(\"length of program is {}\", program.length());\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add publish method to query names for `namesp`.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Reimplement `namesp` structure with explicit ownership relations instead of `Rc`s.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>compact<commit_after><|endoftext|>"} {"text":"<commit_before>\n\/\/! Storing sounds, textures, animations etc.\n\n\/\/ Extern crates.\nuse graphics::*;\nuse HashMap = collections::HashMap;\nuse gl;\nuse gl::types::GLuint;\nuse libc::c_void;\nuse std::os::self_exe_path;\n\n\/\/ Local crate.\nuse png;\n\n\/\/\/ Represents a texture in Piston.\npub struct Texture {\n id: GLuint,\n width: u32,\n height: u32,\n}\n\n\/\/\/ A place to store sounds, textures, animations etc.\n\/\/\/\n\/\/\/ The idea is to have one object which the app can use\n\/\/\/ to load assets for the game with a simple interface.\npub struct AssetStore {\n \/\/ The folder to load assets from.\n assets_folder: Option<String>,\n \/\/ List of OpenGL textures.\n textures: Vec<Texture>,\n \/\/ Contains names of loaded textures.\n texture_files: HashMap<String, uint>,\n}\n\nimpl AssetStore {\n \/\/\/ Creates a new `AssetStore` from an assets folder.\n pub fn from_folder(assets_folder: &str) -> AssetStore {\n AssetStore {\n assets_folder: Some(assets_folder.to_string()),\n textures: Vec::new(),\n texture_files: HashMap::new(),\n }\n }\n\n \/\/\/ Creates an empty `AssetStore` with no assets.\n pub fn empty() -> AssetStore {\n AssetStore {\n assets_folder: None,\n textures: Vec::new(),\n texture_files: HashMap::new(),\n }\n }\n\n \/\/\/ Gets OpenGL texture from texture id.\n pub fn get_texture(&self, texture_id: uint) -> GLuint {\n self.textures.get(texture_id).id\n }\n\n \/\/\/ Loads image by relative file name to the asset root.\n pub fn load_image(&mut self, file: &str) -> Result<Image, String> {\n match self.texture_files.find_equiv(&file) {\n None => {},\n Some(&texture_id) => {\n let texture = self.textures.get(texture_id);\n return Ok(Image {\n texture_id: texture_id,\n texture_width: texture.width,\n texture_height: texture.height,\n source_rect: [0, 0, texture.width, texture.height],\n })\n },\n };\n\n let folder = self.assets_folder.as_ref().unwrap();\n let exe_path = self_exe_path();\n let exe_path = match exe_path {\n Some(path) => path,\n None => return Err(\"Could not get the path to executable\".to_string()),\n };\n let path = exe_path.join(Path::new(folder.as_slice())).join(Path::new(file));\n let img = match png::load_png(&path) {\n Ok(img) => img,\n Err(msg) => return Err(format!(\"Could not load '{}': {}\", file, msg)),\n };\n\n match img.color_type {\n png::RGBA8 => {},\n t => fail!(\"Unsupported color type {:?} in png\", t),\n };\n\n let mut id: GLuint = 0;\n unsafe {\n gl::GenTextures(1, &mut id);\n gl::BindTexture(gl::TEXTURE_2D, id);\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as i32);\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as i32);\n gl::TexImage2D(\n gl::TEXTURE_2D,\n 0,\n gl::RGBA as i32,\n img.width as i32,\n img.height as i32,\n 0,\n gl::RGBA,\n gl::UNSIGNED_BYTE,\n img.pixels.as_ptr() as *c_void\n );\n }\n let texture = Texture {\n id: id,\n width: img.width,\n height: img.height,\n };\n self.textures.push(texture);\n let texture_id = self.textures.len() - 1;\n\n self.texture_files.insert(file.to_string(), texture_id);\n Ok(Image {\n texture_id: texture_id,\n texture_width: texture.width,\n texture_height: texture.height,\n source_rect: [0, 0, texture.width, texture.height],\n })\n }\n}\n\n<commit_msg>Remove alias in asset_store<commit_after>\n\/\/! Storing sounds, textures, animations etc.\n\n\/\/ Extern crates.\nuse graphics::*;\nuse collections::HashMap;\nuse gl;\nuse gl::types::GLuint;\nuse libc::c_void;\nuse std::os::self_exe_path;\n\n\/\/ Local crate.\nuse png;\n\n\/\/\/ Represents a texture in Piston.\npub struct Texture {\n id: GLuint,\n width: u32,\n height: u32,\n}\n\n\/\/\/ A place to store sounds, textures, animations etc.\n\/\/\/\n\/\/\/ The idea is to have one object which the app can use\n\/\/\/ to load assets for the game with a simple interface.\npub struct AssetStore {\n \/\/ The folder to load assets from.\n assets_folder: Option<String>,\n \/\/ List of OpenGL textures.\n textures: Vec<Texture>,\n \/\/ Contains names of loaded textures.\n texture_files: HashMap<String, uint>,\n}\n\nimpl AssetStore {\n \/\/\/ Creates a new `AssetStore` from an assets folder.\n pub fn from_folder(assets_folder: &str) -> AssetStore {\n AssetStore {\n assets_folder: Some(assets_folder.to_string()),\n textures: Vec::new(),\n texture_files: HashMap::new(),\n }\n }\n\n \/\/\/ Creates an empty `AssetStore` with no assets.\n pub fn empty() -> AssetStore {\n AssetStore {\n assets_folder: None,\n textures: Vec::new(),\n texture_files: HashMap::new(),\n }\n }\n\n \/\/\/ Gets OpenGL texture from texture id.\n pub fn get_texture(&self, texture_id: uint) -> GLuint {\n self.textures.get(texture_id).id\n }\n\n \/\/\/ Loads image by relative file name to the asset root.\n pub fn load_image(&mut self, file: &str) -> Result<Image, String> {\n match self.texture_files.find_equiv(&file) {\n None => {},\n Some(&texture_id) => {\n let texture = self.textures.get(texture_id);\n return Ok(Image {\n texture_id: texture_id,\n texture_width: texture.width,\n texture_height: texture.height,\n source_rect: [0, 0, texture.width, texture.height],\n })\n },\n };\n\n let folder = self.assets_folder.as_ref().unwrap();\n let exe_path = self_exe_path();\n let exe_path = match exe_path {\n Some(path) => path,\n None => return Err(\"Could not get the path to executable\".to_string()),\n };\n let path = exe_path.join(Path::new(folder.as_slice())).join(Path::new(file));\n let img = match png::load_png(&path) {\n Ok(img) => img,\n Err(msg) => return Err(format!(\"Could not load '{}': {}\", file, msg)),\n };\n\n match img.color_type {\n png::RGBA8 => {},\n t => fail!(\"Unsupported color type {:?} in png\", t),\n };\n\n let mut id: GLuint = 0;\n unsafe {\n gl::GenTextures(1, &mut id);\n gl::BindTexture(gl::TEXTURE_2D, id);\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as i32);\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as i32);\n gl::TexImage2D(\n gl::TEXTURE_2D,\n 0,\n gl::RGBA as i32,\n img.width as i32,\n img.height as i32,\n 0,\n gl::RGBA,\n gl::UNSIGNED_BYTE,\n img.pixels.as_ptr() as *c_void\n );\n }\n let texture = Texture {\n id: id,\n width: img.width,\n height: img.height,\n };\n self.textures.push(texture);\n let texture_id = self.textures.len() - 1;\n\n self.texture_files.insert(file.to_string(), texture_id);\n Ok(Image {\n texture_id: texture_id,\n texture_width: texture.width,\n texture_height: texture.height,\n source_rect: [0, 0, texture.width, texture.height],\n })\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg(not(test))]\n\n#[macro_use]\nextern crate log;\nextern crate rustfmt;\nextern crate toml;\nextern crate env_logger;\nextern crate getopts;\n\nuse rustfmt::{WriteMode, run, run_from_stdin};\nuse rustfmt::config::Config;\n\nuse std::env;\nuse std::fs::{self, File};\nuse std::io::{self, Read, Write};\nuse std::path::{Path, PathBuf};\n\nuse getopts::Options;\n\n\/\/\/ Rustfmt operations.\nenum Operation {\n \/\/\/ Format a file and its child modules.\n Format(PathBuf, WriteMode),\n \/\/\/ Print the help message.\n Help,\n \/\/\/ Invalid program input, including reason.\n InvalidInput(String),\n \/\/\/ No file specified, read from stdin\n Stdin(String, WriteMode),\n}\n\n\/\/\/ Try to find a project file in the input file directory and its parents.\nfn lookup_project_file(input_file: &Path) -> io::Result<PathBuf> {\n let mut current = if input_file.is_relative() {\n try!(env::current_dir()).join(input_file)\n } else {\n input_file.to_path_buf()\n };\n\n \/\/ FIXME: We should canonize path to properly handle its parents,\n \/\/ but `canonicalize` function is unstable now (recently added API)\n \/\/ current = try!(fs::canonicalize(current));\n\n loop {\n \/\/ If the current directory has no parent, we're done searching.\n if !current.pop() {\n return Err(io::Error::new(io::ErrorKind::NotFound, \"Config not found\"));\n }\n let config_file = current.join(\"rustfmt.toml\");\n if fs::metadata(&config_file).is_ok() {\n return Ok(config_file);\n }\n }\n}\n\n\/\/\/ Try to find a project file. If it's found, read it.\nfn lookup_and_read_project_file(input_file: &Path) -> io::Result<(PathBuf, String)> {\n let path = try!(lookup_project_file(input_file));\n let mut file = try!(File::open(&path));\n let mut toml = String::new();\n try!(file.read_to_string(&mut toml));\n Ok((path, toml))\n}\n\nfn execute() -> i32 {\n let mut opts = Options::new();\n opts.optflag(\"h\", \"help\", \"show this message\");\n opts.optopt(\"\",\n \"write-mode\",\n \"mode to write in (not usable when piping from stdin)\",\n \"[replace|overwrite|display|diff|coverage]\");\n\n let operation = determine_operation(&opts, env::args().skip(1));\n\n match operation {\n Operation::InvalidInput(reason) => {\n print_usage(&opts, &reason);\n 1\n }\n Operation::Help => {\n print_usage(&opts, \"\");\n 0\n }\n Operation::Stdin(input, write_mode) => {\n \/\/ try to read config from local directory\n let config = match lookup_and_read_project_file(&Path::new(\".\")) {\n Ok((path, toml)) => {\n Config::from_toml(&toml)\n }\n Err(_) => Default::default(),\n };\n\n run_from_stdin(input, write_mode, &config);\n 0\n }\n Operation::Format(file, write_mode) => {\n let config = match lookup_and_read_project_file(&file) {\n Ok((path, toml)) => {\n println!(\"Using rustfmt config file: {}\", path.display());\n Config::from_toml(&toml)\n }\n Err(_) => Default::default(),\n };\n\n run(&file, write_mode, &config);\n 0\n }\n }\n}\n\nfn main() {\n let _ = env_logger::init();\n let exit_code = execute();\n\n \/\/ Make sure standard output is flushed before we exit.\n std::io::stdout().flush().unwrap();\n\n \/\/ Exit with given exit code.\n \/\/\n \/\/ NOTE: This immediately terminates the process without doing any cleanup,\n \/\/ so make sure to finish all necessary cleanup before this is called.\n std::process::exit(exit_code);\n}\n\nfn print_usage(opts: &Options, reason: &str) {\n let reason = format!(\"{}\\nusage: {} [options] <file>\",\n reason,\n env::current_exe().unwrap().display());\n println!(\"{}\", opts.usage(&reason));\n Config::print_docs();\n}\n\nfn determine_operation<I>(opts: &Options, args: I) -> Operation\n where I: Iterator<Item = String>\n{\n let matches = match opts.parse(args) {\n Ok(m) => m,\n Err(e) => return Operation::InvalidInput(e.to_string()),\n };\n\n if matches.opt_present(\"h\") {\n return Operation::Help;\n }\n\n \/\/ if no file argument is supplied, read from stdin\n if matches.free.len() == 0 {\n\n let mut buffer = String::new();\n match io::stdin().read_to_string(&mut buffer) {\n Ok(..) => (),\n Err(e) => return Operation::InvalidInput(e.to_string()),\n }\n\n \/\/ WriteMode is always plain for Stdin\n return Operation::Stdin(buffer, WriteMode::Plain);\n }\n\n let write_mode = match matches.opt_str(\"write-mode\") {\n Some(mode) => {\n match mode.parse() {\n Ok(mode) => mode,\n Err(..) => return Operation::InvalidInput(\"Unrecognized write mode\".into()),\n }\n }\n None => WriteMode::Replace,\n };\n\n Operation::Format(PathBuf::from(&matches.free[0]), write_mode)\n}\n<commit_msg>fix unused variable `path` warning<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg(not(test))]\n\n#[macro_use]\nextern crate log;\nextern crate rustfmt;\nextern crate toml;\nextern crate env_logger;\nextern crate getopts;\n\nuse rustfmt::{WriteMode, run, run_from_stdin};\nuse rustfmt::config::Config;\n\nuse std::env;\nuse std::fs::{self, File};\nuse std::io::{self, Read, Write};\nuse std::path::{Path, PathBuf};\n\nuse getopts::Options;\n\n\/\/\/ Rustfmt operations.\nenum Operation {\n \/\/\/ Format a file and its child modules.\n Format(PathBuf, WriteMode),\n \/\/\/ Print the help message.\n Help,\n \/\/\/ Invalid program input, including reason.\n InvalidInput(String),\n \/\/\/ No file specified, read from stdin\n Stdin(String, WriteMode),\n}\n\n\/\/\/ Try to find a project file in the input file directory and its parents.\nfn lookup_project_file(input_file: &Path) -> io::Result<PathBuf> {\n let mut current = if input_file.is_relative() {\n try!(env::current_dir()).join(input_file)\n } else {\n input_file.to_path_buf()\n };\n\n \/\/ FIXME: We should canonize path to properly handle its parents,\n \/\/ but `canonicalize` function is unstable now (recently added API)\n \/\/ current = try!(fs::canonicalize(current));\n\n loop {\n \/\/ If the current directory has no parent, we're done searching.\n if !current.pop() {\n return Err(io::Error::new(io::ErrorKind::NotFound, \"Config not found\"));\n }\n let config_file = current.join(\"rustfmt.toml\");\n if fs::metadata(&config_file).is_ok() {\n return Ok(config_file);\n }\n }\n}\n\n\/\/\/ Try to find a project file. If it's found, read it.\nfn lookup_and_read_project_file(input_file: &Path) -> io::Result<(PathBuf, String)> {\n let path = try!(lookup_project_file(input_file));\n let mut file = try!(File::open(&path));\n let mut toml = String::new();\n try!(file.read_to_string(&mut toml));\n Ok((path, toml))\n}\n\nfn execute() -> i32 {\n let mut opts = Options::new();\n opts.optflag(\"h\", \"help\", \"show this message\");\n opts.optopt(\"\",\n \"write-mode\",\n \"mode to write in (not usable when piping from stdin)\",\n \"[replace|overwrite|display|diff|coverage]\");\n\n let operation = determine_operation(&opts, env::args().skip(1));\n\n match operation {\n Operation::InvalidInput(reason) => {\n print_usage(&opts, &reason);\n 1\n }\n Operation::Help => {\n print_usage(&opts, \"\");\n 0\n }\n Operation::Stdin(input, write_mode) => {\n \/\/ try to read config from local directory\n let config = match lookup_and_read_project_file(&Path::new(\".\")) {\n Ok((_, toml)) => {\n Config::from_toml(&toml)\n }\n Err(_) => Default::default(),\n };\n\n run_from_stdin(input, write_mode, &config);\n 0\n }\n Operation::Format(file, write_mode) => {\n let config = match lookup_and_read_project_file(&file) {\n Ok((path, toml)) => {\n println!(\"Using rustfmt config file: {}\", path.display());\n Config::from_toml(&toml)\n }\n Err(_) => Default::default(),\n };\n\n run(&file, write_mode, &config);\n 0\n }\n }\n}\n\nfn main() {\n let _ = env_logger::init();\n let exit_code = execute();\n\n \/\/ Make sure standard output is flushed before we exit.\n std::io::stdout().flush().unwrap();\n\n \/\/ Exit with given exit code.\n \/\/\n \/\/ NOTE: This immediately terminates the process without doing any cleanup,\n \/\/ so make sure to finish all necessary cleanup before this is called.\n std::process::exit(exit_code);\n}\n\nfn print_usage(opts: &Options, reason: &str) {\n let reason = format!(\"{}\\nusage: {} [options] <file>\",\n reason,\n env::current_exe().unwrap().display());\n println!(\"{}\", opts.usage(&reason));\n Config::print_docs();\n}\n\nfn determine_operation<I>(opts: &Options, args: I) -> Operation\n where I: Iterator<Item = String>\n{\n let matches = match opts.parse(args) {\n Ok(m) => m,\n Err(e) => return Operation::InvalidInput(e.to_string()),\n };\n\n if matches.opt_present(\"h\") {\n return Operation::Help;\n }\n\n \/\/ if no file argument is supplied, read from stdin\n if matches.free.len() == 0 {\n\n let mut buffer = String::new();\n match io::stdin().read_to_string(&mut buffer) {\n Ok(..) => (),\n Err(e) => return Operation::InvalidInput(e.to_string()),\n }\n\n \/\/ WriteMode is always plain for Stdin\n return Operation::Stdin(buffer, WriteMode::Plain);\n }\n\n let write_mode = match matches.opt_str(\"write-mode\") {\n Some(mode) => {\n match mode.parse() {\n Ok(mode) => mode,\n Err(..) => return Operation::InvalidInput(\"Unrecognized write mode\".into()),\n }\n }\n None => WriteMode::Replace,\n };\n\n Operation::Format(PathBuf::from(&matches.free[0]), write_mode)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>simplify endpoint format selection<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for #2324<commit_after>\/\/ nested function calls with cast.\nfn main() {\n self.ptr\n .set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);\n self.ptr\n .set(intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize) as *mut u8);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add more system calls.<commit_after>use core::cell::RefCell;\nuse core::cell::Cell;\nuse hil::{AppId,Driver,Callback,AppSlice,Shared,NUM_PROCS};\nuse hil::spi_master::{SpiMaster,SpiCallback};\nuse core::cmp;\n\n\n\/* SPI operations are handled by coping into a kernel buffer for\n * writes and copying out of a kernel buffer for reads.\n *\n * If the application buffer is larger than the kernel buffer,\n * the driver issues multiple HAL operations. The len field\n * of an application keeps track of the length of the desired\n * operation, while the index variable keeps track of the \n * index an ongoing operation is at in the buffers. *\/\n\nstruct App {\n callback: Option<Callback>,\n app_read: Option<AppSlice<Shared, u8>>,\n app_write: Option<AppSlice<Shared, u8>>,\n len: Cell<usize>,\n index: Cell<usize>,\n}\n\npub struct Spi<'a, S: SpiMaster + 'a> {\n spi_master: &'a mut S,\n busy: Cell<bool>,\n apps: [RefCell<Option<App>>; NUM_PROCS],\n kernel_read: RefCell<Option<&'static mut [u8]>>,\n kernel_write: RefCell<Option<&'static mut [u8]>>,\n kernel_len: Cell<usize>\n}\n\nimpl<'a, S: SpiMaster> Spi<'a, S> {\n pub fn new(spi_master: &'a mut S) -> Spi<S> {\n Spi {\n spi_master: spi_master,\n busy: Cell::new(false),\n apps: [RefCell::new(None)],\n kernel_len: Cell::new(0),\n kernel_read : RefCell::new(None),\n kernel_write : RefCell::new(None),\n }\n }\n\n pub fn config_buffers(&mut self,\n read: &'static mut [u8],\n write: &'static mut [u8]) {\n let len = cmp::min(read.len(), write.len());\n self.kernel_len.set(len);\n self.kernel_read = RefCell::new(Some(read));\n self.kernel_write = RefCell::new(Some(write));\n }\n\n \/\/ Assumes checks for busy\/etc. already done\n \/\/ Updates app.index to be index + length of op \n fn do_next_read_write(&self, app: &mut App) {\n let start = app.index.get();\n let len = cmp::min(app.len.get() - start, self.kernel_len.get());\n let end = start + len;\n app.index.set(end);\n let mut kwrite = self.kernel_write.borrow_mut();\n let mut kread = self.kernel_read.borrow_mut();\n {\n use core::slice::bytes::copy_memory;\n let src = app.app_write.as_mut().unwrap();\n let mut kwbuf = kwrite.as_mut().unwrap();\n copy_memory(&src.as_ref()[start .. end], kwbuf);\n }\n let reading = app.app_read.is_some();\n if reading {\n self.spi_master.read_write_bytes(kwrite.take(), kread.take(), len);\n } else {\n self.spi_master.read_write_bytes(kwrite.take(), None, len);\n }\n }\n}\n\nimpl<'a, S: SpiMaster> Driver for Spi<'a, S> {\n fn allow(&self, appid: AppId,\n allow_num: usize, slice: AppSlice<Shared, u8>) -> isize {\n let app = appid.idx();\n match allow_num {\n 0 => {\n let mut appc = self.apps[app].borrow_mut();\n if appc.is_none() {\n *appc = Some(App {\n callback: None,\n app_read: Some(slice),\n app_write: None,\n len: Cell::new(0),\n index: Cell::new(0),\n })\n } else {\n appc.as_mut().map(|app| {\n app.app_read = Some(slice);\n });\n }\n 0\n },\n 1 => {\n let mut appc = self.apps[app].borrow_mut();\n if appc.is_none() {\n *appc = Some(App {\n callback: None,\n app_read: None,\n app_write: Some(slice),\n len: Cell::new(0),\n index: Cell::new(0),\n })\n } else {\n appc.as_mut().map(|app| app.app_write = Some(slice));\n }\n 0\n }\n _ => -1\n }\n }\n\n #[inline(never)]\n fn subscribe(&self, subscribe_num: usize, callback: Callback) -> isize {\n match subscribe_num {\n 0 \/* read_write *\/ => {\n let mut app = self.apps[0].borrow_mut();\n if app.is_none() {\n *app = Some(App {\n callback: Some(callback),\n app_read: None,\n app_write: None,\n len: Cell::new(0),\n index: Cell::new(0),\n });\n } else {\n app.as_mut().map(|a| a.callback = Some(callback));\n }\n 0\n },\n _ => -1\n }\n }\n \/*\n * 0: read\/write a single byte (blocking)\n * 1: read\/write buffers\n * - requires write buffer registered with allow\n * - read buffer optional\n * 2: set chip select\n * - valid values are 0-3\n * - invalid value will result in no chip select\n * 3: get chip select\n * - returns current selected peripheral\n * - If none selected, returns 255\n * x: lock spi\n * - if you perform an operation without the lock,\n * it implicitly acquires the lock before the\n * operation and releases it after\n * - while an app holds the lock no other app can issue\n * operations on SPI (they are buffered)\n * x+1: unlock spi\n * - does nothing if lock not held\n *\/\n\n fn command(&self, cmd_num: usize, arg1: usize) -> isize {\n match cmd_num {\n 0 \/* read_write_byte *\/ => { \n self.spi_master.read_write_byte(arg1 as u8) as isize\n },\n 1 \/* read_write_bytes *\/ => { \n if self.busy.get() {\n return -1;\n }\n let mut app = self.apps[0].borrow_mut();\n if app.is_none() {\n return -1;\n }\n app.as_mut().map(|mut a| {\n \/\/ If no write buffer, return\n if a.app_write.is_none() {\n return -1;\n }\n let mut mlen = 0;\n \/\/ If write buffer too small, return\n a.app_write.as_mut().map(|w| {\n mlen = w.len();\n });\n a.app_read.as_mut().map(|r| {\n mlen = cmp::min(mlen, r.len());\n });\n if mlen < arg1 {\n return -1;\n }\n a.len.set(arg1);\n a.index.set(0);\n self.busy.set(true);\n self.do_next_read_write(&mut a as &mut App);\n 0\n }); \n -1\n }\n 2 \/* set chip select *\/ => {\n let cs = arg1 as u8;\n if cs <= 3 {\n self.spi_master.set_chip_select(cs);\n 0\n } else {\n -1\n }\n }\n 3 \/* get chip select *\/ => {\n self.spi_master.get_chip_select() as isize\n }\n _ => -1\n }\n }\n}\n\n#[allow(dead_code)]\nfn each_some<'a, T, I, F>(lst: I, f: F)\n where T: 'a, I: Iterator<Item=&'a RefCell<Option<T>>>, F: Fn(&mut T) {\n for item in lst {\n item.borrow_mut().as_mut().map(|i| f(i));\n }\n}\n\nimpl<'a, S: SpiMaster> SpiCallback for Spi<'a, S> {\n fn read_write_done(&self, \n writebuf: Option<&'static mut [u8]>, \n readbuf: Option<&'static mut [u8]>,\n length: usize) {\n self.apps[0].borrow_mut().as_mut().map(|app| {\n if app.app_read.is_some() {\n use core::slice::bytes::copy_memory;\n let src = readbuf.as_ref().unwrap();\n let dest = app.app_read.as_mut().unwrap(); \n let start = app.index.get() - length;\n let end = start + length;\n copy_memory(&src[0 .. length], &mut dest.as_mut()[start .. end]);\n }\n\n *self.kernel_read.borrow_mut() = readbuf;\n *self.kernel_write.borrow_mut() = writebuf;\n\n if app.index.get() == app.len.get() {\n self.busy.set(false);\n app.len.set(0);\n app.index.set(0);\n app.callback.take().map(|mut cb| {\n cb.schedule(app.len.get(), 0, 0);\n });\n } else {\n self.do_next_read_write(app);\n }\n });\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fetch\/git: add basic version check to disable unsupported features<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fixed update_branch bug where checkout was only current branch<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add meh solution to problem 68<commit_after>#![feature(collections)]\n#[macro_use] extern crate libeuler;\n\nuse std::usize;\n\/\/\/ Consider the following \"magic\" 3-gon ring, filled with the numbers 1 to 6, and each line adding\n\/\/\/ to nine.\n\/\/\/\n\/\/\/ Working clockwise, and starting from the group of three with the numerically lowest external\n\/\/\/ node (4,3,2 in this example), each solution can be described uniquely. For example, the above\n\/\/\/ solution can be described by the set: 4,3,2; 6,2,1; 5,1,3.\n\/\/\/\n\/\/\/ It is possible to complete the ring with four different totals: 9, 10, 11, and 12. There are\n\/\/\/ eight solutions in total.\n\/\/\/ Total Solution Set\n\/\/\/ 9 4,2,3; 5,3,1; 6,1,2\n\/\/\/ 9 4,3,2; 6,2,1; 5,1,3\n\/\/\/ 10 2,3,5; 4,5,1; 6,1,3\n\/\/\/ 10 2,5,3; 6,3,1; 4,1,5\n\/\/\/ 11 1,4,6; 3,6,2; 5,2,4\n\/\/\/ 11 1,6,4; 5,4,2; 3,2,6\n\/\/\/ 12 1,5,6; 2,6,4; 3,4,5\n\/\/\/ 12 1,6,5; 3,5,4; 2,4,6\n\/\/\/\n\/\/\/ By concatenating each group it is possible to form 9-digit strings; the maximum string for a\n\/\/\/ 3-gon ring is 432621513.\n\/\/\/\n\/\/\/ Using the numbers 1 to 10, and depending on arrangements, it is possible to form 16- and\n\/\/\/ 17-digit strings. What is the maximum 16-digit string for a \"magic\" 5-gon ring?\nfn main() {\n solutions! {\n inputs: (size: usize = 5)\n\n sol naive {\n let mut n: Vec<_> = (0..(size*2)).map(|d| d + 1).collect();\n let mut minlen = usize::MAX;\n let mut max = 0;\n\n \/\/ [r1, r2, r3, r4, r5, e1, e2, e3, e4, e5]\n while n.next_permutation() {\n let ring = &n[0..5];\n\n if ring.iter().any(|&i| i == 10) {\n continue;\n }\n\n let sides: Vec<_> = (0..size)\n .map(|ix| (n[ix + size], n[ix], n[(ix + 1) % size]))\n .collect();\n\n let minside = sides.iter().map(|&(a, _, _)| a).min().unwrap();\n\n if sides[0].0 != minside {\n continue;\n }\n\n let (sameness, total) = sides.iter()\n .map(|&(a, b, c)| a+b+c)\n .fold((true, None), |(c, prev), v| {\n let t = c && (prev.is_none() || prev.unwrap() == v);\n\n (t, Some(v))\n });\n\n if sameness {\n println!(\"{:?} {:?} {}\", total, sides, sameness);\n\n let d = sides.iter()\n .map(|&(a, b, c)| format!(\"{}{}{}\", a, b, c))\n .collect::<Vec<_>>().concat();\n\n println!(\"{} {} \/ {} {}\", d, d.len(), max, minlen);\n\n let v: i64 = d.parse().unwrap();\n if d.len() < minlen || (d.len() == minlen && v > max) {\n max = v;\n minlen = d.len();\n }\n }\n }\n\n max\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add an example of parsing floats with CompleteStr<commit_after>#[macro_use]\nextern crate nom;\n\nuse nom::digit;\nuse nom::types::CompleteStr;\n\nuse std::str;\nuse std::str::FromStr;\n\n#[macro_export]\nmacro_rules! complete_named (\n ($name:ident, $submac:ident!( $($args:tt)* )) => (\n fn $name<'a>( i: CompleteStr<'a> ) -> nom::IResult<CompleteStr<'a>, CompleteStr<'a>, u32> {\n $submac!(i, $($args)*)\n }\n );\n ($name:ident<$o:ty>, $submac:ident!( $($args:tt)* )) => (\n fn $name<'a>( i: CompleteStr<'a> ) -> nom::IResult<CompleteStr<'a>, $o, u32> {\n $submac!(i, $($args)*)\n }\n );\n);\n\n\ncomplete_named!(unsigned_float <f32>,\n flat_map!(\n recognize!(\n alt!(\n delimited!(digit, tag!(\".\"), opt!(digit)) |\n delimited!(opt!(digit), tag!(\".\"), digit)\n )\n ),\n parse_to!(f32)\n )\n);\n\ncomplete_named!(float <f32>, map!(\n pair!(\n opt!(alt!(tag!(\"+\") | tag!(\"-\"))),\n unsigned_float\n ),\n |(sign, value): (Option<CompleteStr>, f32)| {\n sign.and_then(|s| s.0.chars().next()).and_then(|c| if c == '-' { Some(-1f32) } else { None }).unwrap_or(1f32) * value\n }\n));\n\n#[test]\nfn unsigned_float_test() {\n assert_eq!(unsigned_float(CompleteStr(\"123.456\")), Ok((CompleteStr(\"\"), 123.456)));\n assert_eq!(unsigned_float(CompleteStr(\"0.123\")), Ok((CompleteStr(\"\"), 0.123)));\n assert_eq!(unsigned_float(CompleteStr(\"123.0\")), Ok((CompleteStr(\"\"), 123.0)));\n assert_eq!(unsigned_float(CompleteStr(\"123.\")), Ok((CompleteStr(\"\"), 123.0)));\n assert_eq!(unsigned_float(CompleteStr(\".123\")), Ok((CompleteStr(\"\"), 0.123)));\n}\n\n#[test]\nfn float_test() {\n assert_eq!(float(CompleteStr(\"123.456\")), Ok((CompleteStr(\"\"), 123.456)));\n assert_eq!(float(CompleteStr(\"+123.456\")), Ok((CompleteStr(\"\"), 123.456)));\n assert_eq!(float(CompleteStr(\"-123.456\")), Ok((CompleteStr(\"\"), -123.456)));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Minor<commit_after><|endoftext|>"} {"text":"<commit_before>use super::SDTHeader;\n\n#[repr(packed)]\n#[derive(Clone, Copy, Debug, Default)]\npub struct DSDT {\n pub header: SDTHeader,\n pub data: &'static [u8]\n\n}\n\nimpl DSDT {\n pub fn new(header: *const SDTHeader) -> Option<Self> {\n if unsafe { (*header).valid(\"DSDT\") } {\n Some(DSDT {\n header: unsafe { *header },\n data: unsafe { (*header).data() }\n })\n } else {\n None\n }\n }\n}\n<commit_msg>Simplify `DSDT::new()` with fewer `unsafe` blocks<commit_after>use super::SDTHeader;\n\n#[repr(packed)]\n#[derive(Clone, Copy, Debug, Default)]\npub struct DSDT {\n pub header: SDTHeader,\n pub data: &'static [u8]\n\n}\n\nimpl DSDT {\n pub fn new(header: *const SDTHeader) -> Option<Self> {\n let header = unsafe { *header };\n if header.valid(\"DSDT\") {\n Some(DSDT {\n header: header,\n data: header.data(),\n })\n } else {\n None\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Log tuning.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Work on function type inference.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Must derive `Debug` so `println!` can be used.\n\/\/ `allow` required to silence warnings because only\n\/\/ one variant is used.\n#[allow(dead_code)]\n#[derive(Debug)]\nenum Color {\n \/\/ These 3 are specified solely by their name.\n Red,\n Blue,\n Green,\n \/\/ This requires 3 `i32`s and a name.\n RGB(i32, i32, i32),\n}\n\nfn main() {\n let color = Color::RGB(122, 17, 40);\n \/\/ TODO ^ Try different variants for `color`\n\n println!(\"What color is it?\");\n \/\/ An `enum` can be destructured using a `match`.\n match color {\n Color::Red => println!(\"The color is Red!\"),\n Color::Blue => println!(\"The color is Blue!\"),\n Color::Green => println!(\"The color is Green!\"),\n Color::RGB(r, g, b) => {\n println!(\"Red: {:?}, green: {:?}, and blue: {:?}!:\", r, g, b);\n },\n \/\/ Don't need another arm because all variants have been examined\n }\n}\n<commit_msg>flow control\/match\/destructuring\/enums: fix typo<commit_after>\/\/ Must derive `Debug` so `println!` can be used.\n\/\/ `allow` required to silence warnings because only\n\/\/ one variant is used.\n#[allow(dead_code)]\n#[derive(Debug)]\nenum Color {\n \/\/ These 3 are specified solely by their name.\n Red,\n Blue,\n Green,\n \/\/ This requires 3 `i32`s and a name.\n RGB(i32, i32, i32),\n}\n\nfn main() {\n let color = Color::RGB(122, 17, 40);\n \/\/ TODO ^ Try different variants for `color`\n\n println!(\"What color is it?\");\n \/\/ An `enum` can be destructured using a `match`.\n match color {\n Color::Red => println!(\"The color is Red!\"),\n Color::Blue => println!(\"The color is Blue!\"),\n Color::Green => println!(\"The color is Green!\"),\n Color::RGB(r, g, b) => {\n println!(\"Red: {:?}, green: {:?}, and blue: {:?}!\", r, g, b);\n },\n \/\/ Don't need another arm because all variants have been examined\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>pp: add test for raw strs in non-expression positions<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ pp-exact\n\n#[cfg = r#\"just parse this\"#]\nextern mod blah = r##\"blah\"##;\n\nfn main() { unsafe { asm!(r###\"blah\"###); } }\n<|endoftext|>"} {"text":"<commit_before>use std::{fmt, mem, ptr, isize};\nuse std::num::Int;\nuse std::ops::{Index, IndexMut};\nuse alloc::heap;\nuse os::token::Token;\n\n\/\/\/ A preallocated chunk of memory for storing objects of the same type.\npub struct Slab<T> {\n \/\/ Chunk of memory\n mem: *mut Entry<T>,\n \/\/ Number of elements currently in the slab\n len: isize,\n \/\/ The total number of elements that the slab can hold\n cap: isize,\n \/\/ The token offset\n off: usize,\n \/\/ Offset of the next available slot in the slab. Set to the slab's\n \/\/ capacity when the slab is full.\n nxt: isize,\n \/\/ The total number of slots that were initialized\n init: isize,\n}\n\nconst MAX: usize = isize::MAX as usize;\n\n\/\/ When Entry.nxt is set to this, the entry is in use\nconst IN_USE: isize = -1;\n\nunsafe impl<T> Send for Slab<T> where T: Send {}\n\nimpl<T> Slab<T> {\n pub fn new(cap: usize) -> Slab<T> {\n Slab::new_starting_at(Token(0), cap)\n }\n\n pub fn new_starting_at(offset: Token, cap: usize) -> Slab<T> {\n assert!(cap <= MAX, \"capacity too large\");\n \/\/ TODO:\n \/\/ - Rename to with_capacity\n \/\/ - Use a power of 2 capacity\n \/\/ - Ensure that mem size is less than usize::MAX\n\n let size = cap.checked_mul(mem::size_of::<Entry<T>>())\n .expect(\"capacity overflow\");\n\n let ptr = unsafe { heap::allocate(size, mem::min_align_of::<Entry<T>>()) };\n\n Slab {\n mem: ptr as *mut Entry<T>,\n cap: cap as isize,\n len: 0,\n off: offset.as_usize(),\n nxt: 0,\n init: 0,\n }\n }\n\n #[inline]\n pub fn count(&self) -> usize {\n self.len as usize\n }\n\n #[inline]\n pub fn is_empty(&self) -> bool {\n self.len == 0\n }\n\n #[inline]\n pub fn remaining(&self) -> usize {\n (self.cap - self.len) as usize\n }\n\n #[inline]\n pub fn has_remaining(&self) -> bool {\n self.remaining() > 0\n }\n\n #[inline]\n pub fn contains(&self, idx: Token) -> bool {\n if idx.as_usize() < self.off {\n return false;\n }\n\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n let idx = idx as isize;\n\n if idx < self.init {\n return self.entry(idx).in_use();\n }\n }\n\n false\n }\n\n pub fn get(&self, idx: Token) -> Option<&T> {\n assert!(self.contains(idx), \"slab does not contain token `{:?}`\", idx);\n\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n let idx = idx as isize;\n\n if idx < self.init {\n let entry = self.entry(idx);\n\n if entry.in_use() {\n return Some(&entry.val);\n }\n }\n }\n\n None\n }\n\n pub fn get_mut(&mut self, idx: Token) -> Option<&mut T> {\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n let idx = idx as isize;\n\n if idx < self.init {\n let mut entry = self.mut_entry(idx);\n\n if entry.in_use() {\n return Some(&mut entry.val);\n }\n }\n }\n\n None\n }\n\n pub fn insert(&mut self, val: T) -> Result<Token, T> {\n let idx = self.nxt;\n\n if idx == self.init {\n \/\/ Using an uninitialized entry\n if idx == self.cap {\n \/\/ No more capacity\n debug!(\"slab out of capacity; cap={}\", self.cap);\n return Err(val);\n }\n\n self.mut_entry(idx).put(val, true);\n\n self.init += 1;\n self.len = self.init;\n self.nxt = self.init;\n\n debug!(\"inserting into new slot; idx={}\", idx);\n }\n else {\n self.len += 1;\n self.nxt = self.mut_entry(idx).put(val, false);\n\n debug!(\"inserting into reused slot; idx={}\", idx);\n }\n\n Ok(self.idx_to_token(idx))\n }\n\n \/\/\/ Releases the given slot\n pub fn remove(&mut self, idx: Token) -> Option<T> {\n debug!(\"removing value; idx={:?}\", idx);\n\n \/\/ Cast to usize\n let idx = self.token_to_idx(idx);\n\n if idx > MAX {\n return None;\n }\n\n let idx = idx as isize;\n\n \/\/ Ensure index is within capacity of slab\n if idx >= self.init {\n return None;\n }\n\n let nxt = self.nxt;\n\n match self.mut_entry(idx).remove(nxt) {\n Some(v) => {\n self.nxt = idx;\n self.len -= 1;\n Some(v)\n }\n None => None\n }\n }\n\n pub fn iter(&self) -> SlabIter<T> {\n SlabIter {\n slab: self,\n cur_idx: 0,\n yielded: 0\n }\n }\n\n pub fn iter_mut(&mut self) -> SlabMutIter<T> {\n SlabMutIter {\n slab: self,\n cur_idx: 0,\n yielded: 0\n }\n }\n\n\n #[inline]\n fn entry(&self, idx: isize) -> &Entry<T> {\n unsafe { &*self.mem.offset(idx) }\n }\n\n #[inline]\n fn mut_entry(&mut self, idx: isize) -> &mut Entry<T> {\n unsafe { &mut *self.mem.offset(idx) }\n }\n\n #[inline]\n fn validate_idx(&self, idx: usize) -> isize {\n if idx <= MAX {\n let idx = idx as isize;\n\n if idx < self.init {\n return idx;\n }\n }\n\n panic!(\"invalid index {} -- greater than capacity {}\", idx, self.cap);\n }\n\n fn token_to_idx(&self, token: Token) -> usize {\n token.as_usize() - self.off\n }\n\n fn idx_to_token(&self, idx: isize) -> Token {\n Token(idx as usize + self.off)\n }\n}\n\nimpl<T> Index<Token> for Slab<T> {\n type Output = T;\n\n fn index<'a>(&'a self, idx: &Token) -> &'a T {\n let idx = self.token_to_idx(*idx);\n let idx = self.validate_idx(idx);\n\n let e = self.entry(idx);\n\n if !e.in_use() {\n panic!(\"invalid index; idx={}\", idx);\n }\n\n &e.val\n }\n}\n\nimpl<T> IndexMut<Token> for Slab<T> {\n fn index_mut<'a>(&'a mut self, idx: &Token) -> &'a mut T {\n let idx = self.token_to_idx(*idx);\n let idx = self.validate_idx(idx);\n\n let e = self.mut_entry(idx);\n\n if !e.in_use() {\n panic!(\"invalid index; idx={}\", idx);\n }\n\n &mut e.val\n }\n}\n\nimpl<T> fmt::Debug for Slab<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"Slab {{ len: {}, cap: {} }}\", self.len, self.cap)\n }\n}\n\n#[unsafe_destructor]\nimpl<T> Drop for Slab<T> {\n fn drop(&mut self) {\n \/\/ TODO: check whether or not this is needed with intrinsics::needs_drop\n let mut i = 0;\n\n while i < self.init {\n self.mut_entry(i).release();\n i += 1;\n }\n\n let cap = self.cap as usize;\n let size = cap.checked_mul(mem::size_of::<Entry<T>>()).unwrap();\n unsafe { heap::deallocate(self.mem as *mut u8, size, mem::min_align_of::<Entry<T>>()) };\n }\n}\n\n\/\/ Holds the values in the slab.\nstruct Entry<T> {\n nxt: isize,\n val: T\n}\n\nimpl<T> Entry<T> {\n #[inline]\n fn put(&mut self, val: T, init: bool) -> isize {\n assert!(init || self.nxt != IN_USE);\n\n let ret = self.nxt;\n\n unsafe { ptr::write(&mut self.val as *mut T, val); }\n self.nxt = IN_USE;\n\n \/\/ Could be uninitialized memory, but the caller (Slab) should guard\n \/\/ not use the return value in those cases.\n ret\n }\n\n fn remove(&mut self, nxt: isize) -> Option<T> {\n if self.in_use() {\n self.nxt = nxt;\n Some(unsafe { ptr::read(&self.val as *const T) })\n } else {\n None\n }\n }\n\n fn release(&mut self) {\n if self.in_use() {\n let _ = Some(unsafe { ptr::read(&self.val as *const T) });\n }\n }\n\n #[inline]\n fn in_use(&self) -> bool {\n self.nxt == IN_USE\n }\n}\n\npub struct SlabIter<'a, T: 'a> {\n slab: &'a Slab<T>,\n cur_idx: isize,\n yielded: isize\n}\n\nimpl <'a, T> Iterator for SlabIter<'a, T> {\n type Item = &'a T;\n\n fn next(&mut self) -> Option<&'a T> {\n while self.yielded < self.slab.len {\n let entry = self.slab.entry(self.cur_idx);\n self.cur_idx += 1;\n if entry.in_use() {\n self.yielded += 1;\n return Some(&entry.val);\n }\n }\n None\n }\n}\n\npub struct SlabMutIter<'a, T: 'a> {\n slab: &'a mut Slab<T>,\n cur_idx: isize,\n yielded: isize\n}\n\nimpl <'a, T> Iterator for SlabMutIter<'a, T> {\n type Item = &'a mut T;\n\n fn next(&mut self) -> Option<&'a mut T> {\n while self.yielded < self.slab.len {\n let entry = unsafe { &mut *self.slab.mem.offset(self.cur_idx) };\n self.cur_idx += 1;\n if entry.in_use() {\n self.yielded += 1;\n return Some(&mut entry.val);\n }\n }\n None\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::Slab;\n use {Token};\n\n #[test]\n fn test_insertion() {\n let mut slab = Slab::new(1);\n let token = slab.insert(10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], 10);\n }\n\n #[test]\n fn test_repeated_insertion() {\n let mut slab = Slab::new(10);\n\n for i in range(0, 10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], i + 10);\n }\n\n slab.insert(20).err().expect(\"Inserted when full\");\n }\n\n #[test]\n fn test_repeated_insertion_and_removal() {\n let mut slab = Slab::new(10);\n let mut tokens = vec![];\n\n for i in range(0, 10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n tokens.push(token);\n assert_eq!(slab[token], i + 10);\n }\n\n for &i in tokens.iter() {\n slab.remove(i);\n }\n\n slab.insert(20).ok().expect(\"Failed to insert in newly empty slab\");\n }\n\n #[test]\n fn test_insertion_when_full() {\n let mut slab = Slab::new(1);\n slab.insert(10).ok().expect(\"Failed to insert\");\n slab.insert(10).err().expect(\"Inserted into a full slab\");\n }\n\n #[test]\n fn test_removal_is_successful() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(10).ok().expect(\"Failed to insert\");\n slab.remove(t1);\n let t2 = slab.insert(20).ok().expect(\"Failed to insert\");\n assert_eq!(slab[t2], 20);\n }\n\n #[test]\n fn test_mut_retrieval() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(\"foo\".to_string()).ok().expect(\"Failed to insert\");\n\n slab[t1].push_str(\"bar\");\n\n assert_eq!(slab[t1].as_slice(), \"foobar\");\n }\n\n #[test]\n #[should_fail]\n fn test_reusing_slots_1() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n let t1 = slab.insert(456).unwrap();\n\n assert!(slab.count() == 2);\n assert!(slab.remaining() == 14);\n\n slab.remove(t0);\n\n assert!(slab.count() == 1, \"actual={}\", slab.count());\n assert!(slab.remaining() == 15);\n\n slab.remove(t1);\n\n assert!(slab.count() == 0);\n assert!(slab.remaining() == 16);\n\n let _ = slab[t1];\n }\n\n #[test]\n fn test_reusing_slots_2() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n\n assert!(slab[t0] == 123);\n assert!(slab.remove(t0) == Some(123));\n\n let t0 = slab.insert(456).unwrap();\n\n assert!(slab[t0] == 456);\n\n let t1 = slab.insert(789).unwrap();\n\n assert!(slab[t0] == 456);\n assert!(slab[t1] == 789);\n\n assert!(slab.remove(t0).unwrap() == 456);\n assert!(slab.remove(t1).unwrap() == 789);\n\n assert!(slab.count() == 0);\n }\n\n #[test]\n #[should_fail]\n fn test_accessing_out_of_bounds() {\n let slab = Slab::<usize>::new(16);\n slab[Token(0)];\n }\n\n #[test]\n fn test_contains() {\n let mut slab = Slab::new_starting_at(Token(5),16);\n assert!(!slab.contains(Token(0)));\n\n let tok = slab.insert(111).unwrap();\n assert!(slab.contains(tok));\n }\n\n #[test]\n fn test_iter() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in range(0, 4) {\n slab.insert(i).unwrap();\n }\n assert_eq!(slab.iter().map(|r| *r).collect(), vec![0, 1, 2, 3]);\n slab.remove(Token(1));\n assert_eq!(slab.iter().map(|r| *r).collect(), vec![0, 2, 3]);\n }\n\n #[test]\n fn test_iter_mut() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in range(0, 4) {\n slab.insert(i).unwrap();\n }\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n assert_eq!(slab.iter().map(|r| *r).collect(), vec![1, 2, 3, 4]);\n slab.remove(Token(2));\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n assert_eq!(slab.iter().map(|r| *r).collect(), vec![2, 3, 5]);\n }\n}\n<commit_msg>Fix deprecation warning in tests<commit_after>use std::{fmt, mem, ptr, isize};\nuse std::num::Int;\nuse std::ops::{Index, IndexMut};\nuse alloc::heap;\nuse os::token::Token;\n\n\/\/\/ A preallocated chunk of memory for storing objects of the same type.\npub struct Slab<T> {\n \/\/ Chunk of memory\n mem: *mut Entry<T>,\n \/\/ Number of elements currently in the slab\n len: isize,\n \/\/ The total number of elements that the slab can hold\n cap: isize,\n \/\/ The token offset\n off: usize,\n \/\/ Offset of the next available slot in the slab. Set to the slab's\n \/\/ capacity when the slab is full.\n nxt: isize,\n \/\/ The total number of slots that were initialized\n init: isize,\n}\n\nconst MAX: usize = isize::MAX as usize;\n\n\/\/ When Entry.nxt is set to this, the entry is in use\nconst IN_USE: isize = -1;\n\nunsafe impl<T> Send for Slab<T> where T: Send {}\n\nimpl<T> Slab<T> {\n pub fn new(cap: usize) -> Slab<T> {\n Slab::new_starting_at(Token(0), cap)\n }\n\n pub fn new_starting_at(offset: Token, cap: usize) -> Slab<T> {\n assert!(cap <= MAX, \"capacity too large\");\n \/\/ TODO:\n \/\/ - Rename to with_capacity\n \/\/ - Use a power of 2 capacity\n \/\/ - Ensure that mem size is less than usize::MAX\n\n let size = cap.checked_mul(mem::size_of::<Entry<T>>())\n .expect(\"capacity overflow\");\n\n let ptr = unsafe { heap::allocate(size, mem::min_align_of::<Entry<T>>()) };\n\n Slab {\n mem: ptr as *mut Entry<T>,\n cap: cap as isize,\n len: 0,\n off: offset.as_usize(),\n nxt: 0,\n init: 0,\n }\n }\n\n #[inline]\n pub fn count(&self) -> usize {\n self.len as usize\n }\n\n #[inline]\n pub fn is_empty(&self) -> bool {\n self.len == 0\n }\n\n #[inline]\n pub fn remaining(&self) -> usize {\n (self.cap - self.len) as usize\n }\n\n #[inline]\n pub fn has_remaining(&self) -> bool {\n self.remaining() > 0\n }\n\n #[inline]\n pub fn contains(&self, idx: Token) -> bool {\n if idx.as_usize() < self.off {\n return false;\n }\n\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n let idx = idx as isize;\n\n if idx < self.init {\n return self.entry(idx).in_use();\n }\n }\n\n false\n }\n\n pub fn get(&self, idx: Token) -> Option<&T> {\n assert!(self.contains(idx), \"slab does not contain token `{:?}`\", idx);\n\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n let idx = idx as isize;\n\n if idx < self.init {\n let entry = self.entry(idx);\n\n if entry.in_use() {\n return Some(&entry.val);\n }\n }\n }\n\n None\n }\n\n pub fn get_mut(&mut self, idx: Token) -> Option<&mut T> {\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n let idx = idx as isize;\n\n if idx < self.init {\n let mut entry = self.mut_entry(idx);\n\n if entry.in_use() {\n return Some(&mut entry.val);\n }\n }\n }\n\n None\n }\n\n pub fn insert(&mut self, val: T) -> Result<Token, T> {\n let idx = self.nxt;\n\n if idx == self.init {\n \/\/ Using an uninitialized entry\n if idx == self.cap {\n \/\/ No more capacity\n debug!(\"slab out of capacity; cap={}\", self.cap);\n return Err(val);\n }\n\n self.mut_entry(idx).put(val, true);\n\n self.init += 1;\n self.len = self.init;\n self.nxt = self.init;\n\n debug!(\"inserting into new slot; idx={}\", idx);\n }\n else {\n self.len += 1;\n self.nxt = self.mut_entry(idx).put(val, false);\n\n debug!(\"inserting into reused slot; idx={}\", idx);\n }\n\n Ok(self.idx_to_token(idx))\n }\n\n \/\/\/ Releases the given slot\n pub fn remove(&mut self, idx: Token) -> Option<T> {\n debug!(\"removing value; idx={:?}\", idx);\n\n \/\/ Cast to usize\n let idx = self.token_to_idx(idx);\n\n if idx > MAX {\n return None;\n }\n\n let idx = idx as isize;\n\n \/\/ Ensure index is within capacity of slab\n if idx >= self.init {\n return None;\n }\n\n let nxt = self.nxt;\n\n match self.mut_entry(idx).remove(nxt) {\n Some(v) => {\n self.nxt = idx;\n self.len -= 1;\n Some(v)\n }\n None => None\n }\n }\n\n pub fn iter(&self) -> SlabIter<T> {\n SlabIter {\n slab: self,\n cur_idx: 0,\n yielded: 0\n }\n }\n\n pub fn iter_mut(&mut self) -> SlabMutIter<T> {\n SlabMutIter {\n slab: self,\n cur_idx: 0,\n yielded: 0\n }\n }\n\n\n #[inline]\n fn entry(&self, idx: isize) -> &Entry<T> {\n unsafe { &*self.mem.offset(idx) }\n }\n\n #[inline]\n fn mut_entry(&mut self, idx: isize) -> &mut Entry<T> {\n unsafe { &mut *self.mem.offset(idx) }\n }\n\n #[inline]\n fn validate_idx(&self, idx: usize) -> isize {\n if idx <= MAX {\n let idx = idx as isize;\n\n if idx < self.init {\n return idx;\n }\n }\n\n panic!(\"invalid index {} -- greater than capacity {}\", idx, self.cap);\n }\n\n fn token_to_idx(&self, token: Token) -> usize {\n token.as_usize() - self.off\n }\n\n fn idx_to_token(&self, idx: isize) -> Token {\n Token(idx as usize + self.off)\n }\n}\n\nimpl<T> Index<Token> for Slab<T> {\n type Output = T;\n\n fn index<'a>(&'a self, idx: &Token) -> &'a T {\n let idx = self.token_to_idx(*idx);\n let idx = self.validate_idx(idx);\n\n let e = self.entry(idx);\n\n if !e.in_use() {\n panic!(\"invalid index; idx={}\", idx);\n }\n\n &e.val\n }\n}\n\nimpl<T> IndexMut<Token> for Slab<T> {\n fn index_mut<'a>(&'a mut self, idx: &Token) -> &'a mut T {\n let idx = self.token_to_idx(*idx);\n let idx = self.validate_idx(idx);\n\n let e = self.mut_entry(idx);\n\n if !e.in_use() {\n panic!(\"invalid index; idx={}\", idx);\n }\n\n &mut e.val\n }\n}\n\nimpl<T> fmt::Debug for Slab<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"Slab {{ len: {}, cap: {} }}\", self.len, self.cap)\n }\n}\n\n#[unsafe_destructor]\nimpl<T> Drop for Slab<T> {\n fn drop(&mut self) {\n \/\/ TODO: check whether or not this is needed with intrinsics::needs_drop\n let mut i = 0;\n\n while i < self.init {\n self.mut_entry(i).release();\n i += 1;\n }\n\n let cap = self.cap as usize;\n let size = cap.checked_mul(mem::size_of::<Entry<T>>()).unwrap();\n unsafe { heap::deallocate(self.mem as *mut u8, size, mem::min_align_of::<Entry<T>>()) };\n }\n}\n\n\/\/ Holds the values in the slab.\nstruct Entry<T> {\n nxt: isize,\n val: T\n}\n\nimpl<T> Entry<T> {\n #[inline]\n fn put(&mut self, val: T, init: bool) -> isize {\n assert!(init || self.nxt != IN_USE);\n\n let ret = self.nxt;\n\n unsafe { ptr::write(&mut self.val as *mut T, val); }\n self.nxt = IN_USE;\n\n \/\/ Could be uninitialized memory, but the caller (Slab) should guard\n \/\/ not use the return value in those cases.\n ret\n }\n\n fn remove(&mut self, nxt: isize) -> Option<T> {\n if self.in_use() {\n self.nxt = nxt;\n Some(unsafe { ptr::read(&self.val as *const T) })\n } else {\n None\n }\n }\n\n fn release(&mut self) {\n if self.in_use() {\n let _ = Some(unsafe { ptr::read(&self.val as *const T) });\n }\n }\n\n #[inline]\n fn in_use(&self) -> bool {\n self.nxt == IN_USE\n }\n}\n\npub struct SlabIter<'a, T: 'a> {\n slab: &'a Slab<T>,\n cur_idx: isize,\n yielded: isize\n}\n\nimpl <'a, T> Iterator for SlabIter<'a, T> {\n type Item = &'a T;\n\n fn next(&mut self) -> Option<&'a T> {\n while self.yielded < self.slab.len {\n let entry = self.slab.entry(self.cur_idx);\n self.cur_idx += 1;\n if entry.in_use() {\n self.yielded += 1;\n return Some(&entry.val);\n }\n }\n None\n }\n}\n\npub struct SlabMutIter<'a, T: 'a> {\n slab: &'a mut Slab<T>,\n cur_idx: isize,\n yielded: isize\n}\n\nimpl <'a, T> Iterator for SlabMutIter<'a, T> {\n type Item = &'a mut T;\n\n fn next(&mut self) -> Option<&'a mut T> {\n while self.yielded < self.slab.len {\n let entry = unsafe { &mut *self.slab.mem.offset(self.cur_idx) };\n self.cur_idx += 1;\n if entry.in_use() {\n self.yielded += 1;\n return Some(&mut entry.val);\n }\n }\n None\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::Slab;\n use {Token};\n\n #[test]\n fn test_insertion() {\n let mut slab = Slab::new(1);\n let token = slab.insert(10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], 10);\n }\n\n #[test]\n fn test_repeated_insertion() {\n let mut slab = Slab::new(10);\n\n for i in range(0, 10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], i + 10);\n }\n\n slab.insert(20).err().expect(\"Inserted when full\");\n }\n\n #[test]\n fn test_repeated_insertion_and_removal() {\n let mut slab = Slab::new(10);\n let mut tokens = vec![];\n\n for i in range(0, 10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n tokens.push(token);\n assert_eq!(slab[token], i + 10);\n }\n\n for &i in tokens.iter() {\n slab.remove(i);\n }\n\n slab.insert(20).ok().expect(\"Failed to insert in newly empty slab\");\n }\n\n #[test]\n fn test_insertion_when_full() {\n let mut slab = Slab::new(1);\n slab.insert(10).ok().expect(\"Failed to insert\");\n slab.insert(10).err().expect(\"Inserted into a full slab\");\n }\n\n #[test]\n fn test_removal_is_successful() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(10).ok().expect(\"Failed to insert\");\n slab.remove(t1);\n let t2 = slab.insert(20).ok().expect(\"Failed to insert\");\n assert_eq!(slab[t2], 20);\n }\n\n #[test]\n fn test_mut_retrieval() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(\"foo\".to_string()).ok().expect(\"Failed to insert\");\n\n slab[t1].push_str(\"bar\");\n\n assert_eq!(slab[t1].as_slice(), \"foobar\");\n }\n\n #[test]\n #[should_panic]\n fn test_reusing_slots_1() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n let t1 = slab.insert(456).unwrap();\n\n assert!(slab.count() == 2);\n assert!(slab.remaining() == 14);\n\n slab.remove(t0);\n\n assert!(slab.count() == 1, \"actual={}\", slab.count());\n assert!(slab.remaining() == 15);\n\n slab.remove(t1);\n\n assert!(slab.count() == 0);\n assert!(slab.remaining() == 16);\n\n let _ = slab[t1];\n }\n\n #[test]\n fn test_reusing_slots_2() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n\n assert!(slab[t0] == 123);\n assert!(slab.remove(t0) == Some(123));\n\n let t0 = slab.insert(456).unwrap();\n\n assert!(slab[t0] == 456);\n\n let t1 = slab.insert(789).unwrap();\n\n assert!(slab[t0] == 456);\n assert!(slab[t1] == 789);\n\n assert!(slab.remove(t0).unwrap() == 456);\n assert!(slab.remove(t1).unwrap() == 789);\n\n assert!(slab.count() == 0);\n }\n\n #[test]\n #[should_panic]\n fn test_accessing_out_of_bounds() {\n let slab = Slab::<usize>::new(16);\n slab[Token(0)];\n }\n\n #[test]\n fn test_contains() {\n let mut slab = Slab::new_starting_at(Token(5),16);\n assert!(!slab.contains(Token(0)));\n\n let tok = slab.insert(111).unwrap();\n assert!(slab.contains(tok));\n }\n\n #[test]\n fn test_iter() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in range(0, 4) {\n slab.insert(i).unwrap();\n }\n assert_eq!(slab.iter().map(|r| *r).collect(), vec![0, 1, 2, 3]);\n slab.remove(Token(1));\n assert_eq!(slab.iter().map(|r| *r).collect(), vec![0, 2, 3]);\n }\n\n #[test]\n fn test_iter_mut() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in range(0, 4) {\n slab.insert(i).unwrap();\n }\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n assert_eq!(slab.iter().map(|r| *r).collect(), vec![1, 2, 3, 4]);\n slab.remove(Token(2));\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n assert_eq!(slab.iter().map(|r| *r).collect(), vec![2, 3, 5]);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust trait alias example<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>empty message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Example for match_if_let<commit_after>#![feature(phase)]\n\n#[phase(plugin)]\nextern crate rust_clippy;\n\n\nfn main(){\n let x = Some(1u);\n match x {\n Some(y) => println!(\"{}\", y),\n _ => ()\n }\n \/\/ Not linted\n match x {\n Some(y) => println!(\"{}\", y),\n None => ()\n }\n let z = (1u,1u);\n match z {\n (2...3, 7...9) => println!(\"{}\", z),\n _ => {}\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>More VIP state cleanup<commit_after><|endoftext|>"} {"text":"<commit_before>use std::iter::Peekable;\nuse std::num::ParseIntError;\nuse std::collections::HashMap;\n\nuse xml::reader::events::*;\nuse xml::reader::Events;\nuse hyper::client::response::*;\n\n\/\/\/ generic Error for XML parsing\n#[derive(Debug)]\npub struct XmlParseError(pub String);\n\nimpl XmlParseError {\n\tpub fn new(msg: &str) -> XmlParseError {\n\t\tXmlParseError(msg.to_string())\n\t}\n}\n\n\/\/\/ syntactic sugar for the XML event stack we pass around\npub type XmlStack<'a> = Peekable<Events<'a, Response>>;\n\nimpl From<ParseIntError> for XmlParseError{\n fn from(_e:ParseIntError) -> XmlParseError { XmlParseError::new(\"ParseIntError\") }\n}\n\n\/\/\/ parse Some(String) if the next tag has the right name, otherwise None\npub fn optional_string_field(field_name: &str, stack: &mut XmlStack) -> Result<Option<String>, XmlParseError> {\n\tif try!(peek_at_name(stack)) == field_name {\n\t\tlet val = try!(string_field(field_name, stack));\n\t\tOk(Some(val))\n\t} else {\n\t\tOk(None)\n\t}\n}\n\n\/\/\/ return a string field with the right name or throw a parse error\npub fn string_field(name: &str, stack: &mut XmlStack) -> Result<String, XmlParseError> {\n\ttry!(start_element(name, stack));\n\tlet value = try!(characters(stack));\n\ttry!(end_element(name, stack));\n\tOk(value)\n}\n\n\/\/\/ return some XML Characters\npub fn characters(stack: &mut XmlStack ) -> Result<String, XmlParseError> {\n\tif let Some(XmlEvent::Characters(data)) = stack.next() {\n\t\tOk(data.to_string())\n\t} else {\n\t\tErr(XmlParseError::new(\"Expected characters\"))\n\t}\n\t\n}\n\n\/\/\/ get the name of the current element in the stack. throw a parse error if it's not a StartElement\npub fn peek_at_name(stack: &mut XmlStack) -> Result<String, XmlParseError> {\n\tlet current = stack.peek();\n\tif let Some(&XmlEvent::StartElement{ref name, ..}) = current {\n\t\tOk(name.local_name.to_string())\t\n\t} else {\t\t \n\t\tOk(\"\".to_string())\n\t}\t\n}\n\n\/\/\/ consume a StartElement with a specific name or throw an XmlParseError\npub fn start_element(element_name: &str, stack: &mut XmlStack) -> Result<HashMap<String, String>, XmlParseError> {\n\tlet next = stack.next();\n\tif let Some(XmlEvent::StartElement { name, attributes, .. }) = next {\n\t\tif name.local_name != element_name {\n\t\t\tErr(XmlParseError::new(&format!(\"Expected {} got {}\", element_name, name.local_name)))\n\t\t} else {\n\t\t\tlet mut attr_map = HashMap::new();\n\t\t\tfor attr in attributes {\n\t\t\t\tattr_map.insert(attr.name.local_name, attr.value);\n\t\t\t}\n\t\t\tOk(attr_map)\n\t\t}\n\t}else {\n \n \/\/ \tprintln!(\"{:#?}\", next);\t\t\n\t\tErr(XmlParseError::new(&format!(\"Expected StartElement {}\", element_name)))\n\t}\n}\t\n\t\t\n\/\/\/ consume an EndElement with a specific name or throw an XmlParseError\t\t\npub fn end_element(element_name: &str, stack: &mut XmlStack) -> Result<(), XmlParseError> {\n\tlet next = stack.next();\n\tif let Some(XmlEvent::EndElement { name, .. }) = next {\n\t\tif name.local_name != element_name {\n\t\t\tErr(XmlParseError::new(&format!(\"Expected {} got {}\", element_name, name.local_name)))\n\t\t} else {\n\t\t\tOk(())\n\t\t}\n\t}else {\n\t\tErr(XmlParseError::new(&format!(\"Expected EndElement {} got {:?}\", element_name, next)))\n\t}\n}\t\n\t\t\t\n<commit_msg>Starts refactoring of xmlstack to be more testable.<commit_after>use std::iter::Peekable;\nuse std::num::ParseIntError;\nuse std::collections::HashMap;\nuse std::io::{self, Read};\nuse xml::reader::events::*;\nuse xml::reader::Events;\nuse hyper::client::response::*;\n\n\/\/\/ generic Error for XML parsing\n#[derive(Debug)]\npub struct XmlParseError(pub String);\n\nimpl XmlParseError {\n\tpub fn new(msg: &str) -> XmlParseError {\n\t\tXmlParseError(msg.to_string())\n\t}\n}\n\n\/\/\/ syntactic sugar for the XML event stack we pass around\npub type XmlStack<'a> = Peekable<Events<'a, Response>>;\n\n\/\/ Wraps the Hyper Response type\npub struct XmlResponseFromAws<'b> {\n\tresponse: Response, \/\/ Hyper response type\n\txml_stack: Peekable<Events<'b, Response>> \/\/ refactor to use XmlStack type?\n}\n\nimpl <'b> Read for XmlResponseFromAws<'b> {\n\t\/\/ implement read. See https:\/\/github.com\/hyperium\/hyper\/blob\/master\/src\/client\/response.rs\n\tfn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\t\treturn self.response.read(buf);\n\t}\n}\n\n\/\/ TODO: move to tests\/xmlutils.rs\npub struct XmlResponseFromFile {\n\tfile_location: String\n}\n\nimpl Read for XmlResponseFromFile {\n\t#[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\t\t\/\/ Get a Result reader from our specified file\n panic!(\"Not implemented.\");\n }\n}\n\/\/ \/move to tests\/xmlutils.rs\n\n\nimpl From<ParseIntError> for XmlParseError{\n fn from(_e:ParseIntError) -> XmlParseError { XmlParseError::new(\"ParseIntError\") }\n}\n\n\/\/\/ parse Some(String) if the next tag has the right name, otherwise None\npub fn optional_string_field(field_name: &str, stack: &mut XmlStack) -> Result<Option<String>, XmlParseError> {\n\tif try!(peek_at_name(stack)) == field_name {\n\t\tlet val = try!(string_field(field_name, stack));\n\t\tOk(Some(val))\n\t} else {\n\t\tOk(None)\n\t}\n}\n\n\/\/\/ return a string field with the right name or throw a parse error\npub fn string_field(name: &str, stack: &mut XmlStack) -> Result<String, XmlParseError> {\n\ttry!(start_element(name, stack));\n\tlet value = try!(characters(stack));\n\ttry!(end_element(name, stack));\n\tOk(value)\n}\n\n\/\/\/ return some XML Characters\npub fn characters(stack: &mut XmlStack ) -> Result<String, XmlParseError> {\n\tif let Some(XmlEvent::Characters(data)) = stack.next() {\n\t\tOk(data.to_string())\n\t} else {\n\t\tErr(XmlParseError::new(\"Expected characters\"))\n\t}\n\n}\n\n\/\/\/ get the name of the current element in the stack. throw a parse error if it's not a StartElement\npub fn peek_at_name(stack: &mut XmlStack) -> Result<String, XmlParseError> {\n\tlet current = stack.peek();\n\tif let Some(&XmlEvent::StartElement{ref name, ..}) = current {\n\t\tOk(name.local_name.to_string())\n\t} else {\n\t\tOk(\"\".to_string())\n\t}\n}\n\n\/\/\/ consume a StartElement with a specific name or throw an XmlParseError\npub fn start_element(element_name: &str, stack: &mut XmlStack) -> Result<HashMap<String, String>, XmlParseError> {\n\tlet next = stack.next();\n\tif let Some(XmlEvent::StartElement { name, attributes, .. }) = next {\n\t\tif name.local_name != element_name {\n\t\t\tErr(XmlParseError::new(&format!(\"Expected {} got {}\", element_name, name.local_name)))\n\t\t} else {\n\t\t\tlet mut attr_map = HashMap::new();\n\t\t\tfor attr in attributes {\n\t\t\t\tattr_map.insert(attr.name.local_name, attr.value);\n\t\t\t}\n\t\t\tOk(attr_map)\n\t\t}\n\t}else {\n\n \/\/ \tprintln!(\"{:#?}\", next);\n\t\tErr(XmlParseError::new(&format!(\"Expected StartElement {}\", element_name)))\n\t}\n}\n\n\/\/\/ consume an EndElement with a specific name or throw an XmlParseError\npub fn end_element(element_name: &str, stack: &mut XmlStack) -> Result<(), XmlParseError> {\n\tlet next = stack.next();\n\tif let Some(XmlEvent::EndElement { name, .. }) = next {\n\t\tif name.local_name != element_name {\n\t\t\tErr(XmlParseError::new(&format!(\"Expected {} got {}\", element_name, name.local_name)))\n\t\t} else {\n\t\t\tOk(())\n\t\t}\n\t}else {\n\t\tErr(XmlParseError::new(&format!(\"Expected EndElement {} got {:?}\", element_name, next)))\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>vga: Testing something again.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add smoke test<commit_after>extern crate rudy;\n\n#[test]\nfn smoke_test() {\n use rudy::rudymap::RudyMap;\n\n let mut map: RudyMap<u32, u32> = RudyMap::new();\n let n = 10_000;\n\n for i in 0..n {\n assert!(map.insert(i, i).is_none());\n }\n\n for i in 0..n {\n assert_eq!(map.remove(i), Some(i));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add functionality to flush the internal cache<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add quicksort algorithm<commit_after><|endoftext|>"} {"text":"<commit_before>use time;\nuse std::io::timer::sleep;\n\nuse GameWindow;\nuse keyboard;\nuse mouse;\nuse event;\n\nuse std::cmp;\n\n\/\/\/ Render argument.\n#[deriving(Clone)]\npub struct RenderArgs {\n \/\/\/ Extrapolated time in seconds, used to do smooth animation.\n pub ext_dt: f64,\n \/\/\/ The width of rendered area.\n pub width: u32,\n \/\/\/ The height of rendered area.\n pub height: u32,\n}\n\n\/\/\/ Update argument.\n#[deriving(Clone)]\npub struct UpdateArgs {\n \/\/\/ Delta time in seconds.\n pub dt: f64,\n}\n\n\/\/\/ Key press arguments.\n#[deriving(Clone)]\npub struct KeyPressArgs {\n \/\/\/ Keyboard key.\n pub key: keyboard::Key,\n}\n\n\/\/\/ Key release arguments.\n#[deriving(Clone)]\npub struct KeyReleaseArgs {\n \/\/\/ Keyboard key.\n pub key: keyboard::Key,\n}\n\n\/\/\/ Mouse press arguments.\n#[deriving(Clone)]\npub struct MousePressArgs {\n \/\/\/ Mouse button.\n pub button: mouse::Button,\n}\n\n\/\/\/ Mouse release arguments.\n#[deriving(Clone)]\npub struct MouseReleaseArgs {\n \/\/\/ Mouse button.\n pub button: mouse::Button,\n}\n\n\/\/\/ Mouse move arguments.\n#[deriving(Clone)]\npub struct MouseMoveArgs {\n \/\/\/ x.\n pub x: f64,\n \/\/\/ y.\n pub y: f64,\n}\n\n\/\/\/ Mouse relative move arguments.\n#[deriving(Clone)]\npub struct MouseRelativeMoveArgs {\n \/\/\/ Delta x.\n pub dx: f64,\n \/\/\/ Delta y.\n pub dy: f64,\n}\n\n\/\/\/ Mouse scroll arguments.\n#[deriving(Clone)]\npub struct MouseScrollArgs {\n \/\/\/ x.\n pub x: f64,\n \/\/\/ y.\n pub y: f64,\n}\n\n\/\/\/ Contains the different game events.\n#[deriving(Clone)]\npub enum GameEvent {\n \/\/\/ Render graphics.\n Render(RenderArgs),\n \/\/\/ Update physical state of the game.\n Update(UpdateArgs),\n \/\/\/ Pressed a keyboard key.\n KeyPress(KeyPressArgs),\n \/\/\/ Released a keyboard key.\n KeyRelease(KeyReleaseArgs),\n \/\/\/ Pressed a mouse button.\n MousePress(MousePressArgs),\n \/\/\/ Released a mouse button.\n MouseRelease(MouseReleaseArgs),\n \/\/\/ Moved mouse cursor.\n MouseMove(MouseMoveArgs),\n \/\/\/ Moved mouse relative, not bounded by cursor.\n MouseRelativeMove(MouseRelativeMoveArgs),\n \/\/\/ Scrolled mouse.\n MouseScroll(MouseScrollArgs)\n}\n\n#[deriving(Show)]\nenum GameIteratorState {\n RenderState,\n SwapBuffersState,\n UpdateLoopState,\n HandleEventsState,\n MouseRelativeMoveState(f64, f64),\n UpdateState,\n}\n\n\/\/\/ Settings for the game loop behavior.\n#[deriving(Clone)]\npub struct GameIteratorSettings {\n \/\/\/ The number of updates per second (UPS).\n pub updates_per_second: u64,\n \/\/\/ The maximum number of frames per second (FPS target).\n pub max_frames_per_second: u64,\n}\n\n\/\/\/ A game loop iterator.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```Rust\n\/\/\/ let game_iter_settings = GameIteratorSettings {\n\/\/\/ updates_per_second: 120,\n\/\/\/ max_frames_per_second: 60,\n\/\/\/ };\n\/\/\/ let ref mut gl = Gl::new();\n\/\/\/ for e in GameIterator::new(&mut window, &game_iter_settings) {\n\/\/\/ match e {\n\/\/\/ Render(args) => {\n\/\/\/ \/\/ Set the viewport in window to render graphics.\n\/\/\/ gl.viewport(0, 0, args.width as i32, args.height as i32);\n\/\/\/ \/\/ Create graphics context with absolute coordinates.\n\/\/\/ let c = Context::abs(args.width as f64, args.height as f64);\n\/\/\/ \/\/ Do rendering here.\n\/\/\/ },\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct GameIterator<'a, W> {\n \/\/\/ The game window used by iterator.\n pub game_window: &'a mut W,\n state: GameIteratorState,\n last_update: u64,\n last_frame: u64,\n dt_update_in_ns: u64,\n dt_frame_in_ns: u64,\n dt: f64,\n}\n\nstatic billion: u64 = 1_000_000_000;\n\nimpl<'a, W: GameWindow> GameIterator<'a, W> {\n \/\/\/ Creates a new game iterator.\n pub fn new(\n game_window: &'a mut W, \n settings: &GameIteratorSettings\n ) -> GameIterator<'a, W> {\n let updates_per_second: u64 = settings.updates_per_second;\n let max_frames_per_second: u64 = settings.max_frames_per_second;\n\n let start = time::precise_time_ns();\n GameIterator {\n game_window: game_window,\n state: RenderState,\n last_update: start,\n last_frame: start,\n dt_update_in_ns: billion \/ updates_per_second,\n dt_frame_in_ns: billion \/ max_frames_per_second,\n dt: 1.0 \/ updates_per_second as f64,\n }\n }\n}\n\nimpl<'a, W: GameWindow> \nIterator<GameEvent> \nfor GameIterator<'a, W> {\n \/\/\/ Returns the next game event.\n fn next(&mut self) -> Option<GameEvent> {\n loop {\n match self.state {\n RenderState => {\n if self.game_window.should_close() { return None; }\n\n let start_render = time::precise_time_ns();\n let jump_frames = cmp::max(1, (start_render - self.last_frame) \/ self.dt_frame_in_ns);\n self.last_frame += self.dt_frame_in_ns * jump_frames;\n\n let (w, h) = self.game_window.get_size();\n if w != 0 && h != 0 {\n \/\/ Swap buffers next time.\n self.state = SwapBuffersState;\n return Some(Render(RenderArgs {\n \/\/ Extrapolate time forward to allow smooth motion.\n ext_dt: (start_render - self.last_update) as f64 \/ billion as f64,\n width: w,\n height: h,\n }\n ));\n }\n\n self.state = UpdateLoopState;\n },\n SwapBuffersState => {\n self.game_window.swap_buffers();\n self.state = UpdateLoopState;\n },\n UpdateLoopState => {\n let current_time = time::precise_time_ns();\n let next_frame = self.last_frame + self.dt_frame_in_ns;\n let next_update = self.last_update + self.dt_update_in_ns;\n let next_event = cmp::min(next_frame, next_update);\n if next_event > current_time {\n sleep( (next_event - current_time) \/ 1_000_000 );\n } else if next_event == next_frame {\n self.state = RenderState;\n } else {\n self.state = HandleEventsState;\n }\n },\n HandleEventsState => {\n \/\/ Handle all events before updating.\n return match self.game_window.poll_event() {\n event::KeyPressed(key) => {\n Some(KeyPress(KeyPressArgs {\n key: key,\n }))\n },\n event::KeyReleased(key) => {\n Some(KeyRelease(KeyReleaseArgs {\n key: key,\n }))\n },\n event::MouseButtonPressed(mouse_button) => {\n Some(MousePress(MousePressArgs {\n button: mouse_button,\n }))\n },\n event::MouseButtonReleased(mouse_button) => {\n Some(MouseRelease(MouseReleaseArgs {\n button: mouse_button,\n }))\n },\n event::MouseMoved(x, y, relative_move) => {\n match relative_move {\n Some((dx, dy)) =>\n self.state = MouseRelativeMoveState(dx, dy),\n None => {},\n };\n Some(MouseMove(MouseMoveArgs {\n x: x,\n y: y,\n }))\n },\n event::MouseScrolled(x, y) => {\n Some(MouseScroll(MouseScrollArgs {\n x: x,\n y: y\n }))\n },\n event::NoEvent => {\n self.state = UpdateState;\n continue;\n },\n }\n },\n MouseRelativeMoveState(dx, dy) => {\n self.state = HandleEventsState;\n return Some(MouseRelativeMove(MouseRelativeMoveArgs {\n dx: dx,\n dy: dy,\n }));\n },\n UpdateState => {\n self.state = UpdateLoopState;\n self.last_update += self.dt_update_in_ns;\n return Some(Update(UpdateArgs{\n dt: self.dt,\n }));\n },\n };\n }\n }\n}\n<commit_msg>Set `last_frame` directly<commit_after>use time;\nuse std::io::timer::sleep;\n\nuse GameWindow;\nuse keyboard;\nuse mouse;\nuse event;\n\nuse std::cmp;\n\n\/\/\/ Render argument.\n#[deriving(Clone)]\npub struct RenderArgs {\n \/\/\/ Extrapolated time in seconds, used to do smooth animation.\n pub ext_dt: f64,\n \/\/\/ The width of rendered area.\n pub width: u32,\n \/\/\/ The height of rendered area.\n pub height: u32,\n}\n\n\/\/\/ Update argument.\n#[deriving(Clone)]\npub struct UpdateArgs {\n \/\/\/ Delta time in seconds.\n pub dt: f64,\n}\n\n\/\/\/ Key press arguments.\n#[deriving(Clone)]\npub struct KeyPressArgs {\n \/\/\/ Keyboard key.\n pub key: keyboard::Key,\n}\n\n\/\/\/ Key release arguments.\n#[deriving(Clone)]\npub struct KeyReleaseArgs {\n \/\/\/ Keyboard key.\n pub key: keyboard::Key,\n}\n\n\/\/\/ Mouse press arguments.\n#[deriving(Clone)]\npub struct MousePressArgs {\n \/\/\/ Mouse button.\n pub button: mouse::Button,\n}\n\n\/\/\/ Mouse release arguments.\n#[deriving(Clone)]\npub struct MouseReleaseArgs {\n \/\/\/ Mouse button.\n pub button: mouse::Button,\n}\n\n\/\/\/ Mouse move arguments.\n#[deriving(Clone)]\npub struct MouseMoveArgs {\n \/\/\/ x.\n pub x: f64,\n \/\/\/ y.\n pub y: f64,\n}\n\n\/\/\/ Mouse relative move arguments.\n#[deriving(Clone)]\npub struct MouseRelativeMoveArgs {\n \/\/\/ Delta x.\n pub dx: f64,\n \/\/\/ Delta y.\n pub dy: f64,\n}\n\n\/\/\/ Mouse scroll arguments.\n#[deriving(Clone)]\npub struct MouseScrollArgs {\n \/\/\/ x.\n pub x: f64,\n \/\/\/ y.\n pub y: f64,\n}\n\n\/\/\/ Contains the different game events.\n#[deriving(Clone)]\npub enum GameEvent {\n \/\/\/ Render graphics.\n Render(RenderArgs),\n \/\/\/ Update physical state of the game.\n Update(UpdateArgs),\n \/\/\/ Pressed a keyboard key.\n KeyPress(KeyPressArgs),\n \/\/\/ Released a keyboard key.\n KeyRelease(KeyReleaseArgs),\n \/\/\/ Pressed a mouse button.\n MousePress(MousePressArgs),\n \/\/\/ Released a mouse button.\n MouseRelease(MouseReleaseArgs),\n \/\/\/ Moved mouse cursor.\n MouseMove(MouseMoveArgs),\n \/\/\/ Moved mouse relative, not bounded by cursor.\n MouseRelativeMove(MouseRelativeMoveArgs),\n \/\/\/ Scrolled mouse.\n MouseScroll(MouseScrollArgs)\n}\n\n#[deriving(Show)]\nenum GameIteratorState {\n RenderState,\n SwapBuffersState,\n UpdateLoopState,\n HandleEventsState,\n MouseRelativeMoveState(f64, f64),\n UpdateState,\n}\n\n\/\/\/ Settings for the game loop behavior.\n#[deriving(Clone)]\npub struct GameIteratorSettings {\n \/\/\/ The number of updates per second (UPS).\n pub updates_per_second: u64,\n \/\/\/ The maximum number of frames per second (FPS target).\n pub max_frames_per_second: u64,\n}\n\n\/\/\/ A game loop iterator.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```Rust\n\/\/\/ let game_iter_settings = GameIteratorSettings {\n\/\/\/ updates_per_second: 120,\n\/\/\/ max_frames_per_second: 60,\n\/\/\/ };\n\/\/\/ let ref mut gl = Gl::new();\n\/\/\/ for e in GameIterator::new(&mut window, &game_iter_settings) {\n\/\/\/ match e {\n\/\/\/ Render(args) => {\n\/\/\/ \/\/ Set the viewport in window to render graphics.\n\/\/\/ gl.viewport(0, 0, args.width as i32, args.height as i32);\n\/\/\/ \/\/ Create graphics context with absolute coordinates.\n\/\/\/ let c = Context::abs(args.width as f64, args.height as f64);\n\/\/\/ \/\/ Do rendering here.\n\/\/\/ },\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct GameIterator<'a, W> {\n \/\/\/ The game window used by iterator.\n pub game_window: &'a mut W,\n state: GameIteratorState,\n last_update: u64,\n last_frame: u64,\n dt_update_in_ns: u64,\n dt_frame_in_ns: u64,\n dt: f64,\n}\n\nstatic billion: u64 = 1_000_000_000;\n\nimpl<'a, W: GameWindow> GameIterator<'a, W> {\n \/\/\/ Creates a new game iterator.\n pub fn new(\n game_window: &'a mut W, \n settings: &GameIteratorSettings\n ) -> GameIterator<'a, W> {\n let updates_per_second: u64 = settings.updates_per_second;\n let max_frames_per_second: u64 = settings.max_frames_per_second;\n\n let start = time::precise_time_ns();\n GameIterator {\n game_window: game_window,\n state: RenderState,\n last_update: start,\n last_frame: start,\n dt_update_in_ns: billion \/ updates_per_second,\n dt_frame_in_ns: billion \/ max_frames_per_second,\n dt: 1.0 \/ updates_per_second as f64,\n }\n }\n}\n\nimpl<'a, W: GameWindow> \nIterator<GameEvent> \nfor GameIterator<'a, W> {\n \/\/\/ Returns the next game event.\n fn next(&mut self) -> Option<GameEvent> {\n loop {\n match self.state {\n RenderState => {\n if self.game_window.should_close() { return None; }\n\n let start_render = time::precise_time_ns();\n self.last_frame = start_render;\n\n let (w, h) = self.game_window.get_size();\n if w != 0 && h != 0 {\n \/\/ Swap buffers next time.\n self.state = SwapBuffersState;\n return Some(Render(RenderArgs {\n \/\/ Extrapolate time forward to allow smooth motion.\n ext_dt: (start_render - self.last_update) as f64 \/ billion as f64,\n width: w,\n height: h,\n }\n ));\n }\n\n self.state = UpdateLoopState;\n },\n SwapBuffersState => {\n self.game_window.swap_buffers();\n self.state = UpdateLoopState;\n },\n UpdateLoopState => {\n let current_time = time::precise_time_ns();\n let next_frame = self.last_frame + self.dt_frame_in_ns;\n let next_update = self.last_update + self.dt_update_in_ns;\n let next_event = cmp::min(next_frame, next_update);\n if next_event > current_time {\n sleep( (next_event - current_time) \/ 1_000_000 );\n } else if next_event == next_frame {\n self.state = RenderState;\n } else {\n self.state = HandleEventsState;\n }\n },\n HandleEventsState => {\n \/\/ Handle all events before updating.\n return match self.game_window.poll_event() {\n event::KeyPressed(key) => {\n Some(KeyPress(KeyPressArgs {\n key: key,\n }))\n },\n event::KeyReleased(key) => {\n Some(KeyRelease(KeyReleaseArgs {\n key: key,\n }))\n },\n event::MouseButtonPressed(mouse_button) => {\n Some(MousePress(MousePressArgs {\n button: mouse_button,\n }))\n },\n event::MouseButtonReleased(mouse_button) => {\n Some(MouseRelease(MouseReleaseArgs {\n button: mouse_button,\n }))\n },\n event::MouseMoved(x, y, relative_move) => {\n match relative_move {\n Some((dx, dy)) =>\n self.state = MouseRelativeMoveState(dx, dy),\n None => {},\n };\n Some(MouseMove(MouseMoveArgs {\n x: x,\n y: y,\n }))\n },\n event::MouseScrolled(x, y) => {\n Some(MouseScroll(MouseScrollArgs {\n x: x,\n y: y\n }))\n },\n event::NoEvent => {\n self.state = UpdateState;\n continue;\n },\n }\n },\n MouseRelativeMoveState(dx, dy) => {\n self.state = HandleEventsState;\n return Some(MouseRelativeMove(MouseRelativeMoveArgs {\n dx: dx,\n dy: dy,\n }));\n },\n UpdateState => {\n self.state = UpdateLoopState;\n self.last_update += self.dt_update_in_ns;\n return Some(Update(UpdateArgs{\n dt: self.dt,\n }));\n },\n };\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>future fix<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>inclusive range stable<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use Once instead of RwLock<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>more starts<commit_after>\/\/this is the start of the new user login shell\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test: Hidato.<commit_after>\/\/! Hidato.\n\/\/!\n\/\/! https:\/\/en.wikipedia.org\/wiki\/Hidato\n\nextern crate puzzle_solver;\n\nuse puzzle_solver::{Puzzle,Solution,Val,VarToken};\n\nconst WIDTH: usize = 8;\nconst HEIGHT: usize = 8;\nconst NA: i32 = -1;\ntype Board = [[i32; WIDTH]; HEIGHT];\n\nfn make_hidato(board: &Board) -> (Puzzle, Vec<VarToken>) {\n let mut sys = Puzzle::new();\n let mut pos = Vec::new();\n let mut count = 0;\n\n for y in 0..HEIGHT {\n for x in 0..WIDTH {\n if board[y][x] != NA {\n pos.push((WIDTH * y + x) as Val);\n count = count + 1;\n }\n }\n }\n\n let vars = sys.new_vars_with_candidates_1d(count, &pos);\n\n for y in 0..HEIGHT {\n for x in 0..WIDTH {\n if board[y][x] > 0 {\n let idx = (board[y][x] - 1) as usize;\n sys.set_value(vars[idx], (WIDTH * y + x) as Val);\n }\n }\n }\n\n sys.all_different(&vars);\n\n let stride = WIDTH as Val;\n let deltas = [\n -stride - 1, -stride, -stride + 1,\n -1, 1,\n stride - 1, stride, stride + 1 ];\n\n for i in 1..vars.len() {\n let step = sys.new_var_with_candidates(&deltas);\n sys.equals(vars[i], vars[i - 1] + step);\n }\n\n (sys, vars)\n}\n\nfn print_hidato(dict: &Solution, vars: &Vec<VarToken>) {\n let mut board = [[NA; WIDTH]; HEIGHT];\n\n for (idx, &var) in vars.iter().enumerate() {\n let x = (dict[var] as usize) % WIDTH;\n let y = (dict[var] as usize) \/ WIDTH;\n board[y][x] = (idx as i32) + 1;\n }\n\n for y in 0..HEIGHT {\n for x in 0..WIDTH {\n if board[y][x] == NA {\n print!(\" --\");\n } else {\n print!(\" {:2}\", board[y][x]);\n }\n }\n println!();\n }\n}\n\nfn verify_hidato(dict: &Solution, vars: &Vec<VarToken>, expected: &Board) {\n for (idx, &var) in vars.iter().enumerate() {\n let x = (dict[var] as usize) % WIDTH;\n let y = (dict[var] as usize) \/ WIDTH;\n assert_eq!((idx as i32) + 1, expected[y][x]);\n }\n}\n\n#[test]\nfn hidato_wikipedia() {\n let puzzle = [\n [ 0, 33, 35, 0, 0, NA, NA, NA ],\n [ 0, 0, 24, 22, 0, NA, NA, NA ],\n [ 0, 0, 0, 21, 0, 0, NA, NA ],\n [ 0, 26, 0, 13, 40, 11, NA, NA ],\n [ 27, 0, 0, 0, 9, 0, 1, NA ],\n [ NA, NA, 0, 0, 18, 0, 0, NA ],\n [ NA, NA, NA, NA, 0, 7, 0, 0 ],\n [ NA, NA, NA, NA, NA, NA, 5, 0 ] ];\n\n let expected = [\n [ 32, 33, 35, 36, 37, NA, NA, NA ],\n [ 31, 34, 24, 22, 38, NA, NA, NA ],\n [ 30, 25, 23, 21, 12, 39, NA, NA ],\n [ 29, 26, 20, 13, 40, 11, NA, NA ],\n [ 27, 28, 14, 19, 9, 10, 1, NA ],\n [ NA, NA, 15, 16, 18, 8, 2, NA ],\n [ NA, NA, NA, NA, 17, 7, 6, 3 ],\n [ NA, NA, NA, NA, NA, NA, 5, 4 ] ];\n\n let (mut sys, vars) = make_hidato(&puzzle);\n let dict = sys.solve_any().expect(\"solution\");\n print_hidato(&dict, &vars);\n verify_hidato(&dict, &vars, &expected);\n println!(\"hidato_wikipedia: {} guesses\", sys.num_guesses());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Changed<commit_after>+ blah\n- Yep, blah.\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::io::Write;\nuse std::io::stderr;\n\n\/\/\/ Print an Error type and its cause recursively\n\/\/\/\n\/\/\/ The error is printed with \"Error NNNN :\" as prefix, where \"NNNN\" is a number which increases\n\/\/\/ which each recursion into the errors cause. The error description is used to visualize what\n\/\/\/ failed and if there is a cause \"-- caused by:\" is appended, and the cause is printed on the next\n\/\/\/ line.\n\/\/\/\n\/\/\/ Example output:\n\/\/\/\n\/\/\/ ```ignore\n\/\/\/ Error 1 : Some error -- caused by:\n\/\/\/ Error 2 : Some other error -- caused by:\n\/\/\/ Error 3 : Yet another Error -- caused by:\n\/\/\/ ...\n\/\/\/\n\/\/\/ Error <NNNN> : <Error description>\n\/\/\/ ```\npub fn trace_error(e: &Error) {\n print_trace_maxdepth(count_error_causes(e), e, ::std::u64::MAX);\n write!(stderr(), \"\\n\").ok();\n}\n\n\/\/\/ Print an Error type and its cause recursively, but only `max` levels\n\/\/\/\n\/\/\/ Output is the same as for `trace_error()`, though there are only `max` levels printed.\npub fn trace_error_maxdepth(e: &Error, max: u64) {\n let n = count_error_causes(e);\n write!(stderr(),\n \"{}\/{} Levels of errors will be printed\\n\", (if max > n { n } else { max }), n).ok();\n print_trace_maxdepth(n, e, max);\n write!(stderr(), \"\").ok();\n}\n\n\/\/\/ Print an Error type and its cause recursively with the debug!() macro\n\/\/\/\n\/\/\/ Output is the same as for `trace_error()`.\npub fn trace_error_dbg(e: &Error) {\n print_trace_dbg(0, e);\n}\n\n\/\/\/ Helper function for `trace_error()` and `trace_error_maxdepth()`.\n\/\/\/\n\/\/\/ Returns the cause of the last processed error in the recursion, so `None` if all errors where\n\/\/\/ processed.\nfn print_trace_maxdepth(idx: u64, e: &Error, max: u64) -> Option<&Error> {\n if e.cause().is_some() && idx > 0 {\n match print_trace_maxdepth(idx - 1, e.cause().unwrap(), max) {\n None => write!(stderr(), \"\\n\").ok(),\n Some(_) => write!(stderr(), \" -- caused:\\n\").ok(),\n };\n } else {\n write!(stderr(), \"\\n\").ok();\n }\n write!(stderr(), \"ERROR[{:>4}]: {}\", idx, e.description()).ok();\n e.cause()\n}\n\n\/\/\/ Count errors in Error::cause() recursively\nfn count_error_causes(e: &Error) -> u64 {\n 1 + if e.cause().is_some() { count_error_causes(e.cause().unwrap()) } else { 0 }\n}\n\nfn print_trace_dbg(idx: u64, e: &Error) {\n debug!(\"ERROR[{:>4}]: {}\", idx, e.description());\n if e.cause().is_some() {\n debug!(\" -- caused by:\");\n print_trace_dbg(idx + 1, e.cause().unwrap());\n }\n}\n\n<commit_msg>Remove \"caused by\" marker<commit_after>use std::error::Error;\nuse std::io::Write;\nuse std::io::stderr;\n\n\/\/\/ Print an Error type and its cause recursively\n\/\/\/\n\/\/\/ The error is printed with \"Error NNNN :\" as prefix, where \"NNNN\" is a number which increases\n\/\/\/ which each recursion into the errors cause. The error description is used to visualize what\n\/\/\/ failed and if there is a cause \"-- caused by:\" is appended, and the cause is printed on the next\n\/\/\/ line.\n\/\/\/\n\/\/\/ Example output:\n\/\/\/\n\/\/\/ ```ignore\n\/\/\/ Error 1 : Some error -- caused by:\n\/\/\/ Error 2 : Some other error -- caused by:\n\/\/\/ Error 3 : Yet another Error -- caused by:\n\/\/\/ ...\n\/\/\/\n\/\/\/ Error <NNNN> : <Error description>\n\/\/\/ ```\npub fn trace_error(e: &Error) {\n print_trace_maxdepth(count_error_causes(e), e, ::std::u64::MAX);\n write!(stderr(), \"\\n\").ok();\n}\n\n\/\/\/ Print an Error type and its cause recursively, but only `max` levels\n\/\/\/\n\/\/\/ Output is the same as for `trace_error()`, though there are only `max` levels printed.\npub fn trace_error_maxdepth(e: &Error, max: u64) {\n let n = count_error_causes(e);\n write!(stderr(),\n \"{}\/{} Levels of errors will be printed\\n\", (if max > n { n } else { max }), n).ok();\n print_trace_maxdepth(n, e, max);\n write!(stderr(), \"\").ok();\n}\n\n\/\/\/ Print an Error type and its cause recursively with the debug!() macro\n\/\/\/\n\/\/\/ Output is the same as for `trace_error()`.\npub fn trace_error_dbg(e: &Error) {\n print_trace_dbg(0, e);\n}\n\n\/\/\/ Helper function for `trace_error()` and `trace_error_maxdepth()`.\n\/\/\/\n\/\/\/ Returns the cause of the last processed error in the recursion, so `None` if all errors where\n\/\/\/ processed.\nfn print_trace_maxdepth(idx: u64, e: &Error, max: u64) -> Option<&Error> {\n if e.cause().is_some() && idx > 0 {\n match print_trace_maxdepth(idx - 1, e.cause().unwrap(), max) {\n None => write!(stderr(), \"\\n\").ok(),\n Some(_) => write!(stderr(), \" -- caused:\\n\").ok(),\n };\n } else {\n write!(stderr(), \"\\n\").ok();\n }\n write!(stderr(), \"ERROR[{:>4}]: {}\", idx, e.description()).ok();\n e.cause()\n}\n\n\/\/\/ Count errors in Error::cause() recursively\nfn count_error_causes(e: &Error) -> u64 {\n 1 + if e.cause().is_some() { count_error_causes(e.cause().unwrap()) } else { 0 }\n}\n\nfn print_trace_dbg(idx: u64, e: &Error) {\n debug!(\"ERROR[{:>4}]: {}\", idx, e.description());\n if e.cause().is_some() {\n print_trace_dbg(idx + 1, e.cause().unwrap());\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>file size!<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Custom example server, unfinished but working<commit_after>\/\/ This test is not part of the original libmodbus lib!\n\/\/\n\/\/ It shows how to use the ModbusTCPPI context.\n\/\/\nextern crate libmodbus_rs;\n\nuse libmodbus_rs::{Modbus, ModbusMapping, ModbusServer, ModbusTCPPI, MODBUS_MAX_ADU_LENGTH};\nuse libmodbus_rs::errors::*; \/\/ for the `Result<T>` type\n\n\nfn run() -> Result<()> {\n let mut modbus = Modbus::new_tcp_pi(\"::0\", \"1502\")?;\n let mut socket = modbus.tcp_pi_listen(1)?;\n modbus.tcp_pi_accept(&mut socket)?;\n\n let modbus_mapping = ModbusMapping::new(500, 500, 500, 500)?;\n let mut query = vec![0u8; MODBUS_MAX_ADU_LENGTH as usize];\n\n loop {\n let request_len = modbus.receive(&mut query)?;\n modbus.reply(&query, request_len, &modbus_mapping)?;\n }\n}\n\nfn main() {\n if let Err(ref err) = run() {\n println!(\"Error: {}\", err);\n\n std::process::exit(1)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Clean out old code for 551, get ready for a faster solution<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add some tests for the typechecker (certainly not exhaustive yet)<commit_after>#[macro_use]\nextern crate interpreter;\n\n#[test]\nfn reassign_different_type() {\n run_test!(\n should_warn(typecheck)\n => r\"\n x = 1;\n x = true;\n \");\n}\n\n#[test]\nfn shadow_function() {\n run_test!(\n should_warn(typecheck)\n => r\"\n fn x { x }\n fn x { x*2 }\n \");\n}\n\n#[test]\nfn call_non_function() {\n run_test!(\n should_fail(typecheck)\n => r\"\n x = 1;\n y = x(1, 2, 3);\n \");\n}\n\n#[test]\nfn wrong_num_arguments() {\n run_test!(\n should_fail(typecheck)\n => r\"\n f x { x }\n x = f(1, 2, 3);\n \");\n run_test!(\n should_fail(typecheck)\n => r\"\n f x { x }\n x = f();\n \");\n}\n\n#[test]\nfn misspelled_argument() {\n run_test!(\n should_fail(typecheck)\n => r\"\n fn x { x }\n x = fn(y=5);\n \");\n}\n\n#[test]\nfn ambiguous_recursion() {\n run_test!(\n should_fail(typecheck)\n => r\"\n fn x { fn(x) }\n y = fn(1);\n \");\n}\n\n#[test]\nfn wrong_arg_type() {\n run_test!(\n should_fail(typecheck)\n => r\"\n fn x { x }\n x = fn(1);\n y = fn(true);\n \");\n run_test!(\n should_fail(typecheck)\n => r\"\n fn x { x }\n z = fn;\n x = z(1);\n y = fn(true);\n \");\n}\n\n#[test]\nfn wrong_block_type() {\n run_test!(\n should_fail(typecheck)\n => r\"\n x = {\n true;\n false;\n };\n \");\n}\n\n#[test]\nfn non_boolean_condition() {\n run_test!(\n should_fail(typecheck)\n => r\"\n x = true if 1 else false;\n \");\n}\n\n#[test]\nfn reassignment() {\n run_test!(\n should_pass(typecheck)\n => r\"\n x = true;\n y = x ^^ true;\n x = 1;\n y = x * 5;\n \");\n}\n\n#[test]\nfn variable_not_in_scope() {\n run_test!(\n should_fail(typecheck)\n => r\"\n x = a;\n \");\n run_test!(\n should_fail(typecheck)\n => r\"\n fn {\n a = 5;\n }\n x = a;\n \");\n}\n\n#[test]\nfn mismatched_conditional_types() {\n run_test!(\n should_fail(typecheck)\n => r\"\n x = 1 if true else false;\n \");\n}\n\n#[test]\nfn wrong_type_numerical_op() {\n run_test!(\n should_fail(typecheck)\n => r\"\n a = 5 + true;\n \");\n}\n\n#[test]\nfn wrong_type_comparison_op() {\n run_test!(\n should_fail(typecheck)\n => r\"\n a = 5 + true;\n \");\n}\n\n#[test]\nfn wrong_type_equality_op() {\n run_test!(\n should_fail(typecheck)\n => r\"\n a = 5 == true;\n \");\n}\n\n#[test]\nfn wrong_type_boolean_op() {\n run_test!(\n should_fail(typecheck)\n => r\"\n a = 5 && true;\n \");\n}\n\n#[test]\nfn wrong_type_prefix_op() {\n run_test!(\n should_fail(typecheck)\n => r\"\n a = -true;\n \");\n run_test!(\n should_fail(typecheck)\n => r\"\n a = !5;\n \");\n}\n\n#[test]\nfn internal_block_shadowing_captured_vars() {\n run_test!(\n should_pass(typecheck)\n => r\"\n x = 5;\n c = \\{x+5};\n z = {\n x = true;\n c();\n }\n \");\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Commitent<commit_after><|endoftext|>"} {"text":"<commit_before>use {Handler, Evented, Poll, Token};\nuse os::event::{IoEvent, Interest, PollOpt};\nuse notify::Notify;\nuse timer::{Timer, Timeout, TimerResult};\nuse std::default::Default;\nuse std::time::duration::Duration;\nuse std::{io, fmt, usize};\n\n\/\/\/ Configure EventLoop runtime details\n#[derive(Copy, Clone, Debug)]\npub struct EventLoopConfig {\n pub io_poll_timeout_ms: usize,\n\n \/\/ == Notifications ==\n pub notify_capacity: usize,\n pub messages_per_tick: usize,\n\n \/\/ == Timer ==\n pub timer_tick_ms: u64,\n pub timer_wheel_size: usize,\n pub timer_capacity: usize,\n}\n\nimpl Default for EventLoopConfig {\n fn default() -> EventLoopConfig {\n EventLoopConfig {\n io_poll_timeout_ms: 1_000,\n notify_capacity: 1_024,\n messages_per_tick: 64,\n timer_tick_ms: 100,\n timer_wheel_size: 1_024,\n timer_capacity: 65_536,\n }\n }\n}\n\n\/\/\/ Single threaded IO event loop.\npub struct EventLoop<H: Handler> {\n run: bool,\n poll: Poll,\n timer: Timer<H::Timeout>,\n notify: Notify<H::Message>,\n config: EventLoopConfig,\n}\n\n\/\/ Token used to represent notifications\nconst NOTIFY: Token = Token(usize::MAX);\n\nimpl<H: Handler> EventLoop<H> {\n\n \/\/\/ Initializes a new event loop using default configuration settings. The\n \/\/\/ event loop will not be running yet.\n pub fn new() -> io::Result<EventLoop<H>> {\n EventLoop::configured(Default::default())\n }\n\n pub fn configured(config: EventLoopConfig) -> io::Result<EventLoop<H>> {\n \/\/ Create the IO poller\n let mut poll = try!(Poll::new());\n\n \/\/ Create the timer\n let mut timer = Timer::new(\n config.timer_tick_ms,\n config.timer_wheel_size,\n config.timer_capacity);\n\n \/\/ Create cross thread notification queue\n let notify = try!(Notify::with_capacity(config.notify_capacity));\n\n \/\/ Register the notification wakeup FD with the IO poller\n try!(poll.register(¬ify, NOTIFY, Interest::readable() | Interest::writable() , PollOpt::edge()));\n\n \/\/ Set the timer's starting time reference point\n timer.setup();\n\n Ok(EventLoop {\n run: true,\n poll: poll,\n timer: timer,\n notify: notify,\n config: config,\n })\n }\n\n \/\/\/ Returns a sender that allows sending messages to the event loop in a\n \/\/\/ thread-safe way, waking up the event loop if needed.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ #![allow(unstable)]\n \/\/\/\n \/\/\/ use std::thread::Thread;\n \/\/\/ use mio::{EventLoop, Handler};\n \/\/\/\n \/\/\/ struct MyHandler;\n \/\/\/\n \/\/\/ impl Handler for MyHandler {\n \/\/\/ type Timeout = ();\n \/\/\/ type Message = u32;\n \/\/\/\n \/\/\/ fn notify(&mut self, event_loop: &mut EventLoop<MyHandler>, msg: u32) {\n \/\/\/ assert_eq!(msg, 123);\n \/\/\/ event_loop.shutdown();\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut event_loop = EventLoop::new().unwrap();\n \/\/\/ let sender = event_loop.channel();\n \/\/\/\n \/\/\/ \/\/ Send the notification from another thread\n \/\/\/ Thread::spawn(move || {\n \/\/\/ let _ = sender.send(123);\n \/\/\/ });\n \/\/\/\n \/\/\/ let _ = event_loop.run(&mut MyHandler);\n \/\/\/ ```\n \/\/\/\n \/\/\/ # Implementation Details\n \/\/\/\n \/\/\/ Each [EventLoop](#) contains a lock-free queue with a pre-allocated\n \/\/\/ buffer size. The size can be changed by modifying\n \/\/\/ [EventLoopConfig.notify_capacity](struct.EventLoopConfig.html#structfield.notify_capacity).\n \/\/\/ When a message is sent to the EventLoop, it is first pushed on to the\n \/\/\/ queue. Then, if the EventLoop is currently running, an atomic flag is\n \/\/\/ set to indicate that the next loop iteration should be started without\n \/\/\/ waiting.\n \/\/\/\n \/\/\/ If the loop is blocked waiting for IO events, then it is woken up. The\n \/\/\/ strategy for waking up the event loop is platform dependent. For\n \/\/\/ example, on a modern Linux OS, eventfd is used. On older OSes, a pipe\n \/\/\/ is used.\n \/\/\/\n \/\/\/ The strategy of setting an atomic flag if the event loop is not already\n \/\/\/ sleeping allows avoiding an expensive wakeup operation if at all possible.\n pub fn channel(&self) -> EventLoopSender<H::Message> {\n EventLoopSender::new(self.notify.clone())\n }\n\n \/\/\/ Schedules a timeout after the requested time interval. When the\n \/\/\/ duration has been reached,\n \/\/\/ [Handler::timeout](trait.Handler.html#method.timeout) will be invoked\n \/\/\/ passing in the supplied token.\n \/\/\/\n \/\/\/ Returns a handle to the timeout that can be used to cancel the timeout\n \/\/\/ using [#clear_timeout](#method.clear_timeout).\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ #![allow(unstable)]\n \/\/\/\n \/\/\/ use mio::{EventLoop, Handler};\n \/\/\/ use std::time::Duration;\n \/\/\/\n \/\/\/ struct MyHandler;\n \/\/\/\n \/\/\/ impl Handler for MyHandler {\n \/\/\/ type Timeout = u32;\n \/\/\/ type Message = ();\n \/\/\/\n \/\/\/ fn timeout(&mut self, event_loop: &mut EventLoop<MyHandler>, timeout: u32) {\n \/\/\/ assert_eq!(timeout, 123);\n \/\/\/ event_loop.shutdown();\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ let mut event_loop = EventLoop::new().unwrap();\n \/\/\/ let timeout = event_loop.timeout(123, Duration::milliseconds(300)).unwrap();\n \/\/\/ let _ = event_loop.run(&mut MyHandler);\n \/\/\/ ```\n pub fn timeout(&mut self, token: H::Timeout, delay: Duration) -> TimerResult<Timeout> {\n self.timer.timeout(token, delay)\n }\n\n \/\/\/ If the supplied timeout has not been triggered, cancel it such that it\n \/\/\/ will not be triggered in the future.\n pub fn clear_timeout(&mut self, timeout: Timeout) -> bool {\n self.timer.clear(timeout)\n }\n\n \/\/\/ Tells the event loop to exit after it is done handling all events in the\n \/\/\/ current iteration.\n pub fn shutdown(&mut self) {\n self.run = false;\n }\n\n \/\/\/ Registers an IO handle with the event loop.\n pub fn register<E: Evented>(&mut self, io: &E, token: Token) -> io::Result<()> {\n self.poll.register(io, token, Interest::readable(), PollOpt::level())\n }\n\n \/\/\/ Registers an IO handle with the event loop.\n pub fn register_opt<E: Evented>(&mut self, io: &E, token: Token, interest: Interest, opt: PollOpt) -> io::Result<()> {\n self.poll.register(io, token, interest, opt)\n }\n\n \/\/\/ Re-Registers an IO handle with the event loop.\n pub fn reregister<E: Evented>(&mut self, io: &E, token: Token, interest: Interest, opt: PollOpt) -> io::Result<()> {\n self.poll.reregister(io, token, interest, opt)\n }\n\n \/\/\/ Keep spinning the event loop indefinitely, and notify the handler whenever\n \/\/\/ any of the registered handles are ready.\n pub fn run(&mut self, handler: &mut H) -> io::Result<()> {\n self.run = true;\n\n while self.run {\n \/\/ Execute ticks as long as the event loop is running\n try!(self.run_once(handler));\n }\n\n Ok(())\n }\n\n \/\/\/ Deregisters an IO handle with the event loop.\n pub fn deregister<E: Evented>(&mut self, io: &E) -> io::Result<()> {\n self.poll.deregister(io)\n }\n\n \/\/\/ Spin the event loop once, with a timeout of one second, and notify the\n \/\/\/ handler if any of the registered handles become ready during that\n \/\/\/ time.\n pub fn run_once(&mut self, handler: &mut H) -> io::Result<()> {\n let mut messages;\n let mut pending;\n\n debug!(\"event loop tick\");\n\n \/\/ Check the notify channel for any pending messages. If there are any,\n \/\/ avoid blocking when polling for IO events. Messages will be\n \/\/ processed after IO events.\n messages = self.notify.check(self.config.messages_per_tick, true);\n pending = messages > 0;\n\n \/\/ Check the registered IO handles for any new events. Each poll\n \/\/ is for one second, so a shutdown request can last as long as\n \/\/ one second before it takes effect.\n let events = match self.io_poll(pending) {\n Ok(e) => e,\n Err(err) => {\n if err.kind() == io::ErrorKind::Interrupted {\n handler.interrupted(self);\n 0\n } else {\n return Err(err);\n }\n }\n };\n\n if !pending {\n \/\/ Indicate that the sleep period is over, also grab any additional\n \/\/ messages\n let remaining = self.config.messages_per_tick - messages;\n messages += self.notify.check(remaining, false);\n }\n\n self.io_process(handler, events);\n self.notify(handler, messages);\n self.timer_process(handler);\n\n Ok(())\n }\n\n #[inline]\n fn io_poll(&mut self, immediate: bool) -> io::Result<usize> {\n if immediate {\n self.poll.poll(0)\n } else {\n let mut sleep = self.timer.next_tick_in_ms() as usize;\n\n if sleep > self.config.io_poll_timeout_ms {\n sleep = self.config.io_poll_timeout_ms;\n }\n\n self.poll.poll(sleep)\n }\n }\n\n \/\/ Process IO events that have been previously polled\n fn io_process(&mut self, handler: &mut H, cnt: usize) {\n let mut i = 0;\n\n \/\/ Iterate over the notifications. Each event provides the token\n \/\/ it was registered with (which usually represents, at least, the\n \/\/ handle that the event is about) as well as information about\n \/\/ what kind of event occurred (readable, writable, signal, etc.)\n while i < cnt {\n let evt = self.poll.event(i);\n\n debug!(\"event={:?}\", evt);\n\n match evt.token() {\n NOTIFY => self.notify.cleanup(),\n _ => self.io_event(handler, evt)\n }\n\n i += 1;\n }\n }\n\n fn io_event(&mut self, handler: &mut H, evt: IoEvent) {\n let tok = evt.token();\n\n if evt.is_readable() {\n handler.readable(self, tok, evt.read_hint());\n }\n\n if evt.is_writable() {\n handler.writable(self, tok);\n }\n\n if evt.is_error() {\n println!(\" + ERROR\");\n }\n }\n\n fn notify(&mut self, handler: &mut H, mut cnt: usize) {\n while cnt > 0 {\n let msg = self.notify.poll()\n .expect(\"[BUG] at this point there should always be a message\");\n\n handler.notify(self, msg);\n cnt -= 1;\n }\n }\n\n fn timer_process(&mut self, handler: &mut H) {\n let now = self.timer.now();\n\n loop {\n match self.timer.tick_to(now) {\n Some(t) => handler.timeout(self, t),\n _ => return\n }\n }\n }\n}\n\nunsafe impl<H: Handler> Sync for EventLoop<H> { }\n\n\/\/\/ Sends messages to the EventLoop from other threads.\npub struct EventLoopSender<M: Send> {\n notify: Notify<M>\n}\n\nimpl<M: Send> Clone for EventLoopSender<M> {\n fn clone(&self) -> EventLoopSender<M> {\n EventLoopSender { notify: self.notify.clone() }\n }\n}\n\nimpl<M: Send> fmt::Debug for EventLoopSender<M> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"EventLoopSender<?> {{ ... }}\")\n }\n}\n\nunsafe impl<M: Send> Sync for EventLoopSender<M> { }\n\nimpl<M: Send> EventLoopSender<M> {\n fn new(notify: Notify<M>) -> EventLoopSender<M> {\n EventLoopSender { notify: notify }\n }\n\n pub fn send(&self, msg: M) -> Result<(), M> {\n self.notify.notify(msg)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::str;\n use std::sync::Arc;\n use std::sync::atomic::AtomicIsize;\n use std::sync::atomic::Ordering::SeqCst;\n use super::EventLoop;\n use {io, buf, Buf, Handler, Token, TryRead, TryWrite};\n use os::event;\n\n #[test]\n pub fn test_event_loop_size() {\n use std::mem;\n assert!(512 >= mem::size_of::<EventLoop<Funtimes>>());\n }\n\n struct Funtimes {\n rcount: Arc<AtomicIsize>,\n wcount: Arc<AtomicIsize>\n }\n\n impl Funtimes {\n fn new(rcount: Arc<AtomicIsize>, wcount: Arc<AtomicIsize>) -> Funtimes {\n Funtimes {\n rcount: rcount,\n wcount: wcount\n }\n }\n }\n\n impl Handler for Funtimes {\n type Timeout = usize;\n type Message = ();\n\n fn readable(&mut self, _event_loop: &mut EventLoop<Funtimes>, token: Token, _hint: event::ReadHint) {\n (*self.rcount).fetch_add(1, SeqCst);\n assert_eq!(token, Token(10));\n }\n }\n\n #[test]\n pub fn test_readable() {\n let mut event_loop = EventLoop::new().ok().expect(\"Couldn't make event loop\");\n\n let (mut reader, mut writer) = io::pipe().unwrap();\n\n let rcount = Arc::new(AtomicIsize::new(0));\n let wcount = Arc::new(AtomicIsize::new(0));\n let mut handler = Funtimes::new(rcount.clone(), wcount.clone());\n\n writer.write(&mut buf::SliceBuf::wrap(\"hello\".as_bytes())).unwrap();\n event_loop.register(&reader, Token(10)).unwrap();\n\n let _ = event_loop.run_once(&mut handler);\n let mut b = buf::ByteBuf::mut_with_capacity(16);\n\n assert_eq!((*rcount).load(SeqCst), 1);\n\n reader.read(&mut b).unwrap();\n\n assert_eq!(str::from_utf8(b.flip().bytes()).unwrap(), \"hello\");\n }\n}\n<commit_msg>Correctly notify handler of errors<commit_after>use {Handler, Evented, Poll, Token};\nuse os::event::{IoEvent, Interest, PollOpt};\nuse notify::Notify;\nuse timer::{Timer, Timeout, TimerResult};\nuse std::default::Default;\nuse std::time::duration::Duration;\nuse std::{io, fmt, usize};\n\n\/\/\/ Configure EventLoop runtime details\n#[derive(Copy, Clone, Debug)]\npub struct EventLoopConfig {\n pub io_poll_timeout_ms: usize,\n\n \/\/ == Notifications ==\n pub notify_capacity: usize,\n pub messages_per_tick: usize,\n\n \/\/ == Timer ==\n pub timer_tick_ms: u64,\n pub timer_wheel_size: usize,\n pub timer_capacity: usize,\n}\n\nimpl Default for EventLoopConfig {\n fn default() -> EventLoopConfig {\n EventLoopConfig {\n io_poll_timeout_ms: 1_000,\n notify_capacity: 1_024,\n messages_per_tick: 64,\n timer_tick_ms: 100,\n timer_wheel_size: 1_024,\n timer_capacity: 65_536,\n }\n }\n}\n\n\/\/\/ Single threaded IO event loop.\npub struct EventLoop<H: Handler> {\n run: bool,\n poll: Poll,\n timer: Timer<H::Timeout>,\n notify: Notify<H::Message>,\n config: EventLoopConfig,\n}\n\n\/\/ Token used to represent notifications\nconst NOTIFY: Token = Token(usize::MAX);\n\nimpl<H: Handler> EventLoop<H> {\n\n \/\/\/ Initializes a new event loop using default configuration settings. The\n \/\/\/ event loop will not be running yet.\n pub fn new() -> io::Result<EventLoop<H>> {\n EventLoop::configured(Default::default())\n }\n\n pub fn configured(config: EventLoopConfig) -> io::Result<EventLoop<H>> {\n \/\/ Create the IO poller\n let mut poll = try!(Poll::new());\n\n \/\/ Create the timer\n let mut timer = Timer::new(\n config.timer_tick_ms,\n config.timer_wheel_size,\n config.timer_capacity);\n\n \/\/ Create cross thread notification queue\n let notify = try!(Notify::with_capacity(config.notify_capacity));\n\n \/\/ Register the notification wakeup FD with the IO poller\n try!(poll.register(¬ify, NOTIFY, Interest::readable() | Interest::writable() , PollOpt::edge()));\n\n \/\/ Set the timer's starting time reference point\n timer.setup();\n\n Ok(EventLoop {\n run: true,\n poll: poll,\n timer: timer,\n notify: notify,\n config: config,\n })\n }\n\n \/\/\/ Returns a sender that allows sending messages to the event loop in a\n \/\/\/ thread-safe way, waking up the event loop if needed.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ #![allow(unstable)]\n \/\/\/\n \/\/\/ use std::thread::Thread;\n \/\/\/ use mio::{EventLoop, Handler};\n \/\/\/\n \/\/\/ struct MyHandler;\n \/\/\/\n \/\/\/ impl Handler for MyHandler {\n \/\/\/ type Timeout = ();\n \/\/\/ type Message = u32;\n \/\/\/\n \/\/\/ fn notify(&mut self, event_loop: &mut EventLoop<MyHandler>, msg: u32) {\n \/\/\/ assert_eq!(msg, 123);\n \/\/\/ event_loop.shutdown();\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut event_loop = EventLoop::new().unwrap();\n \/\/\/ let sender = event_loop.channel();\n \/\/\/\n \/\/\/ \/\/ Send the notification from another thread\n \/\/\/ Thread::spawn(move || {\n \/\/\/ let _ = sender.send(123);\n \/\/\/ });\n \/\/\/\n \/\/\/ let _ = event_loop.run(&mut MyHandler);\n \/\/\/ ```\n \/\/\/\n \/\/\/ # Implementation Details\n \/\/\/\n \/\/\/ Each [EventLoop](#) contains a lock-free queue with a pre-allocated\n \/\/\/ buffer size. The size can be changed by modifying\n \/\/\/ [EventLoopConfig.notify_capacity](struct.EventLoopConfig.html#structfield.notify_capacity).\n \/\/\/ When a message is sent to the EventLoop, it is first pushed on to the\n \/\/\/ queue. Then, if the EventLoop is currently running, an atomic flag is\n \/\/\/ set to indicate that the next loop iteration should be started without\n \/\/\/ waiting.\n \/\/\/\n \/\/\/ If the loop is blocked waiting for IO events, then it is woken up. The\n \/\/\/ strategy for waking up the event loop is platform dependent. For\n \/\/\/ example, on a modern Linux OS, eventfd is used. On older OSes, a pipe\n \/\/\/ is used.\n \/\/\/\n \/\/\/ The strategy of setting an atomic flag if the event loop is not already\n \/\/\/ sleeping allows avoiding an expensive wakeup operation if at all possible.\n pub fn channel(&self) -> EventLoopSender<H::Message> {\n EventLoopSender::new(self.notify.clone())\n }\n\n \/\/\/ Schedules a timeout after the requested time interval. When the\n \/\/\/ duration has been reached,\n \/\/\/ [Handler::timeout](trait.Handler.html#method.timeout) will be invoked\n \/\/\/ passing in the supplied token.\n \/\/\/\n \/\/\/ Returns a handle to the timeout that can be used to cancel the timeout\n \/\/\/ using [#clear_timeout](#method.clear_timeout).\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ #![allow(unstable)]\n \/\/\/\n \/\/\/ use mio::{EventLoop, Handler};\n \/\/\/ use std::time::Duration;\n \/\/\/\n \/\/\/ struct MyHandler;\n \/\/\/\n \/\/\/ impl Handler for MyHandler {\n \/\/\/ type Timeout = u32;\n \/\/\/ type Message = ();\n \/\/\/\n \/\/\/ fn timeout(&mut self, event_loop: &mut EventLoop<MyHandler>, timeout: u32) {\n \/\/\/ assert_eq!(timeout, 123);\n \/\/\/ event_loop.shutdown();\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ let mut event_loop = EventLoop::new().unwrap();\n \/\/\/ let timeout = event_loop.timeout(123, Duration::milliseconds(300)).unwrap();\n \/\/\/ let _ = event_loop.run(&mut MyHandler);\n \/\/\/ ```\n pub fn timeout(&mut self, token: H::Timeout, delay: Duration) -> TimerResult<Timeout> {\n self.timer.timeout(token, delay)\n }\n\n \/\/\/ If the supplied timeout has not been triggered, cancel it such that it\n \/\/\/ will not be triggered in the future.\n pub fn clear_timeout(&mut self, timeout: Timeout) -> bool {\n self.timer.clear(timeout)\n }\n\n \/\/\/ Tells the event loop to exit after it is done handling all events in the\n \/\/\/ current iteration.\n pub fn shutdown(&mut self) {\n self.run = false;\n }\n\n \/\/\/ Registers an IO handle with the event loop.\n pub fn register<E: Evented>(&mut self, io: &E, token: Token) -> io::Result<()> {\n self.poll.register(io, token, Interest::readable(), PollOpt::level())\n }\n\n \/\/\/ Registers an IO handle with the event loop.\n pub fn register_opt<E: Evented>(&mut self, io: &E, token: Token, interest: Interest, opt: PollOpt) -> io::Result<()> {\n self.poll.register(io, token, interest, opt)\n }\n\n \/\/\/ Re-Registers an IO handle with the event loop.\n pub fn reregister<E: Evented>(&mut self, io: &E, token: Token, interest: Interest, opt: PollOpt) -> io::Result<()> {\n self.poll.reregister(io, token, interest, opt)\n }\n\n \/\/\/ Keep spinning the event loop indefinitely, and notify the handler whenever\n \/\/\/ any of the registered handles are ready.\n pub fn run(&mut self, handler: &mut H) -> io::Result<()> {\n self.run = true;\n\n while self.run {\n \/\/ Execute ticks as long as the event loop is running\n try!(self.run_once(handler));\n }\n\n Ok(())\n }\n\n \/\/\/ Deregisters an IO handle with the event loop.\n pub fn deregister<E: Evented>(&mut self, io: &E) -> io::Result<()> {\n self.poll.deregister(io)\n }\n\n \/\/\/ Spin the event loop once, with a timeout of one second, and notify the\n \/\/\/ handler if any of the registered handles become ready during that\n \/\/\/ time.\n pub fn run_once(&mut self, handler: &mut H) -> io::Result<()> {\n let mut messages;\n let mut pending;\n\n debug!(\"event loop tick\");\n\n \/\/ Check the notify channel for any pending messages. If there are any,\n \/\/ avoid blocking when polling for IO events. Messages will be\n \/\/ processed after IO events.\n messages = self.notify.check(self.config.messages_per_tick, true);\n pending = messages > 0;\n\n \/\/ Check the registered IO handles for any new events. Each poll\n \/\/ is for one second, so a shutdown request can last as long as\n \/\/ one second before it takes effect.\n let events = match self.io_poll(pending) {\n Ok(e) => e,\n Err(err) => {\n if err.kind() == io::ErrorKind::Interrupted {\n handler.interrupted(self);\n 0\n } else {\n return Err(err);\n }\n }\n };\n\n if !pending {\n \/\/ Indicate that the sleep period is over, also grab any additional\n \/\/ messages\n let remaining = self.config.messages_per_tick - messages;\n messages += self.notify.check(remaining, false);\n }\n\n self.io_process(handler, events);\n self.notify(handler, messages);\n self.timer_process(handler);\n\n Ok(())\n }\n\n #[inline]\n fn io_poll(&mut self, immediate: bool) -> io::Result<usize> {\n if immediate {\n self.poll.poll(0)\n } else {\n let mut sleep = self.timer.next_tick_in_ms() as usize;\n\n if sleep > self.config.io_poll_timeout_ms {\n sleep = self.config.io_poll_timeout_ms;\n }\n\n self.poll.poll(sleep)\n }\n }\n\n \/\/ Process IO events that have been previously polled\n fn io_process(&mut self, handler: &mut H, cnt: usize) {\n let mut i = 0;\n\n \/\/ Iterate over the notifications. Each event provides the token\n \/\/ it was registered with (which usually represents, at least, the\n \/\/ handle that the event is about) as well as information about\n \/\/ what kind of event occurred (readable, writable, signal, etc.)\n while i < cnt {\n let evt = self.poll.event(i);\n\n debug!(\"event={:?}\", evt);\n\n match evt.token() {\n NOTIFY => self.notify.cleanup(),\n _ => self.io_event(handler, evt)\n }\n\n i += 1;\n }\n }\n\n fn io_event(&mut self, handler: &mut H, evt: IoEvent) {\n let tok = evt.token();\n\n if evt.is_readable() | evt.is_error() {\n handler.readable(self, tok, evt.read_hint());\n }\n\n if evt.is_writable() {\n handler.writable(self, tok);\n }\n }\n\n fn notify(&mut self, handler: &mut H, mut cnt: usize) {\n while cnt > 0 {\n let msg = self.notify.poll()\n .expect(\"[BUG] at this point there should always be a message\");\n\n handler.notify(self, msg);\n cnt -= 1;\n }\n }\n\n fn timer_process(&mut self, handler: &mut H) {\n let now = self.timer.now();\n\n loop {\n match self.timer.tick_to(now) {\n Some(t) => handler.timeout(self, t),\n _ => return\n }\n }\n }\n}\n\nunsafe impl<H: Handler> Sync for EventLoop<H> { }\n\n\/\/\/ Sends messages to the EventLoop from other threads.\npub struct EventLoopSender<M: Send> {\n notify: Notify<M>\n}\n\nimpl<M: Send> Clone for EventLoopSender<M> {\n fn clone(&self) -> EventLoopSender<M> {\n EventLoopSender { notify: self.notify.clone() }\n }\n}\n\nimpl<M: Send> fmt::Debug for EventLoopSender<M> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"EventLoopSender<?> {{ ... }}\")\n }\n}\n\nunsafe impl<M: Send> Sync for EventLoopSender<M> { }\n\nimpl<M: Send> EventLoopSender<M> {\n fn new(notify: Notify<M>) -> EventLoopSender<M> {\n EventLoopSender { notify: notify }\n }\n\n pub fn send(&self, msg: M) -> Result<(), M> {\n self.notify.notify(msg)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::str;\n use std::sync::Arc;\n use std::sync::atomic::AtomicIsize;\n use std::sync::atomic::Ordering::SeqCst;\n use super::EventLoop;\n use {io, buf, Buf, Handler, Token, TryRead, TryWrite};\n use os::event;\n\n #[test]\n pub fn test_event_loop_size() {\n use std::mem;\n assert!(512 >= mem::size_of::<EventLoop<Funtimes>>());\n }\n\n struct Funtimes {\n rcount: Arc<AtomicIsize>,\n wcount: Arc<AtomicIsize>\n }\n\n impl Funtimes {\n fn new(rcount: Arc<AtomicIsize>, wcount: Arc<AtomicIsize>) -> Funtimes {\n Funtimes {\n rcount: rcount,\n wcount: wcount\n }\n }\n }\n\n impl Handler for Funtimes {\n type Timeout = usize;\n type Message = ();\n\n fn readable(&mut self, _event_loop: &mut EventLoop<Funtimes>, token: Token, _hint: event::ReadHint) {\n (*self.rcount).fetch_add(1, SeqCst);\n assert_eq!(token, Token(10));\n }\n }\n\n #[test]\n pub fn test_readable() {\n let mut event_loop = EventLoop::new().ok().expect(\"Couldn't make event loop\");\n\n let (mut reader, mut writer) = io::pipe().unwrap();\n\n let rcount = Arc::new(AtomicIsize::new(0));\n let wcount = Arc::new(AtomicIsize::new(0));\n let mut handler = Funtimes::new(rcount.clone(), wcount.clone());\n\n writer.write(&mut buf::SliceBuf::wrap(\"hello\".as_bytes())).unwrap();\n event_loop.register(&reader, Token(10)).unwrap();\n\n let _ = event_loop.run_once(&mut handler);\n let mut b = buf::ByteBuf::mut_with_capacity(16);\n\n assert_eq!((*rcount).load(SeqCst), 1);\n\n reader.read(&mut b).unwrap();\n\n assert_eq!(str::from_utf8(b.flip().bytes()).unwrap(), \"hello\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tests: adds tests for FlagBuilder Display<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>improvements<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix breakage due to language changes.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>starting opinion dynamics model<commit_after>extern crate rand;\nextern crate djinn;\nextern crate redis;\nextern crate rustc_serialize;\n\nuse rand::Rng;\nuse redis::Client;\nuse djinn::{Agent, Manager, Simulation, Population, Updates, Redis, run};\nuse rand::distributions::{Weighted, WeightedChoice, IndependentSample};\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nstruct Opinion {\n polarity: f64,\n priority: f64,\n}\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nstruct Person {\n opinions: Vec<Opinion>,\n medias: Vec<Edge>,\n friends: Vec<Edge>,\n}\n\nimpl Person {\n pub fn new(opinions: Vec<Opinion>) -> Person {\n Person {\n opinions: opinions,\n medias: Vec::new(),\n friends: Vec::new(),\n }\n }\n}\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nstruct Edge {\n to: u64,\n weight: u32,\n}\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nenum EdgeType {\n Media,\n Friend,\n}\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nstruct Media {\n opinions: Vec<Opinion>,\n}\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nenum State {\n Person(Person),\n Media(Media),\n}\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nenum PersonUpdate {\n OpinionShift {\n idx: usize,\n polarity: f64,\n priority: f64,\n },\n TrustShift {\n id: u64,\n shift: f64,\n edgeType: EdgeType,\n },\n Meet { id: u64, trust: f64 },\n Discover { id: u64, trust: f64 },\n}\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nenum MediaUpdate {\n OpinionShift {\n idx: usize,\n polarity: f64,\n priority: f64,\n },\n}\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nenum Update {\n Person(PersonUpdate),\n Media(MediaUpdate),\n}\n\n#[derive(RustcDecodable, RustcEncodable, Debug, PartialEq, Clone)]\nstruct World {\n weather: String,\n}\n\n#[derive(Clone)]\nstruct OpinionDynamicsSim;\n\nimpl OpinionDynamicsSim {\n fn decide_person<R: Redis>(&self,\n id: u64,\n person: &Person,\n pop: &Population<Self, R>,\n updates: &mut Updates<Self>)\n -> () {\n let mut rng = rand::weak_rng();\n\n \/\/ talk to a person or consume media?\n if rng.gen::<f64>() < 0.5 {\n \/\/ choose a media.\n \/\/ the less media this person is familiar with,\n \/\/ the more likely they will encounter a random one.\n \/\/ otherwise, they choose one with probability based on how much they trust it.\n let p_rand_media = (person.medias.len() as f64) \/ 2.; \/\/ TODO denom should be a config val\n let media = if rng.gen::<f64>() < p_rand_media {\n let m = pop.random(\"media\");\n\n \/\/ create an edge\n \/\/ TODO check if already has edge to this one\n updates.queue(id,\n Update::Person(PersonUpdate::Discover {\n id: id,\n trust: 0., \/\/ TODO bootstrap trust in some way\n }));\n m\n } else {\n let mut items: Vec<Weighted<u64>> = person.medias\n .iter()\n .map(|e| {\n Weighted {\n item: e.to,\n weight: e.weight,\n }\n })\n .collect();\n let wc = WeightedChoice::new(&mut items);\n let id = wc.ind_sample(&mut rng);\n pop.get_agent(id).unwrap()\n };\n } else {\n \/\/ choose a person to talk to.\n let p_rand_person = (person.friends.len() as f64) \/ 2.; \/\/ TODO denom should be a config val\n let person = if rng.gen::<f64>() < p_rand_person {\n let p = pop.random(\"person\"); \/\/ TODO prob shouldnt be themselves\n \/\/ create an edge\n \/\/ TODO check if already has edge to this one\n updates.queue(id,\n Update::Person(PersonUpdate::Meet {\n id: id,\n trust: 0., \/\/ TODO bootstrap trust in some way\n }));\n p\n } else {\n let mut items: Vec<Weighted<u64>> = person.friends\n .iter()\n .map(|e| {\n Weighted {\n item: e.to,\n weight: e.weight,\n }\n })\n .collect();\n let wc = WeightedChoice::new(&mut items);\n let id = wc.ind_sample(&mut rng);\n pop.get_agent(id).unwrap()\n };\n }\n }\n}\n\nimpl Simulation for OpinionDynamicsSim {\n type State = State;\n type Update = Update;\n type World = World;\n\n fn decide<R: Redis>(&self,\n agent: &Agent<Self::State>,\n world: &Self::World,\n pop: &Population<Self, R>,\n updates: &mut Updates<Self>)\n -> () {\n match agent.state {\n State::Person(ref p) => {\n self.decide_person(agent.id, p, pop, updates);\n }\n State::Media(ref m) => {\n \/\/ TODO\n }\n }\n }\n\n fn update(&self, mut state: &mut Self::State, updates: Vec<Self::Update>) -> bool {\n \/\/ TODO\n false\n }\n\n fn on_spawns<R: Redis>(&self,\n agents: Vec<Agent<Self::State>>,\n population: &Population<Self, R>)\n -> () {\n \/\/ index newly created media\/people\n \/\/ so we can sample them later\n let mut media = Vec::new();\n let mut people = Vec::new();\n let _: Vec<()> = agents.iter()\n .map(|a| {\n match a.state {\n State::Person(_) => people.push(a.id),\n State::Media(_) => media.push(a.id),\n }\n })\n .collect();\n let _: () = population.indexes(\"media\", media);\n let _: () = population.indexes(\"people\", people);\n }\n}\n\n\/\/ TODO\n\/\/ person step:\n\/\/ - sample a person to talk to, based on trust. or encounter random.\n\/\/ - read a story from media, based on opinion alignment and importance\n\/\/ - talk to person\n\/\/ - change opinion based on media\n\/\/ - change opinion based on talking to person\n\/\/\n\nfn main() {\n \/\/ TODO\n \/\/ - bootstrap social network\n \/\/ - people more likely talk to likeminded friends\n \/\/ - randomly meet new people\n\n let sim = OpinionDynamicsSim {};\n\n \/\/ TODO\n let world = World { weather: \"sunny\".to_string() };\n\n \/\/ Setup the manager\n let addr = \"redis:\/\/127.0.0.1\/\";\n let pop_client = Client::open(addr).unwrap();\n let mut manager = Manager::new(addr, pop_client, sim.clone());\n\n let mut medias = vec![Media {\n opinions: vec![Opinion {\n polarity: -1.,\n priority: 0.5,\n },\n Opinion {\n polarity: -0.5,\n priority: 1.,\n }],\n },\n Media {\n opinions: vec![Opinion {\n polarity: 1.,\n priority: 0.8,\n },\n Opinion {\n polarity: -0.2,\n priority: 0.4,\n }],\n }];\n let media_ids = manager.spawns(medias.drain(..).map(|m| State::Media(m)).collect());\n\n let mut people = vec![Person::new(vec![Opinion {\n polarity: 1.,\n priority: 1.,\n },\n Opinion {\n polarity: 0.,\n priority: 0.,\n }]),\n Person::new(vec![Opinion {\n polarity: -1.,\n priority: 1.,\n },\n Opinion {\n polarity: 1.,\n priority: 0.,\n }])];\n let people_ids = manager.spawns(people.drain(..).map(|m| State::Person(m)).collect());\n\n \/\/ run(sim, world, manager, 4, 10);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add unit tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix packet id.<commit_after><|endoftext|>"} {"text":"<commit_before>use super::{Region, RegionIndex};\nuse std::mem;\nuse rustc::infer::InferCtxt;\nuse rustc::mir::{Location, Mir};\nuse rustc_data_structures::indexed_vec::{Idx, IndexVec};\nuse rustc_data_structures::fx::FxHashSet;\n\npub struct InferenceContext {\n definitions: IndexVec<RegionIndex, VarDefinition>,\n constraints: IndexVec<ConstraintIndex, Constraint>,\n errors: IndexVec<InferenceErrorIndex, InferenceError>,\n}\n\npub struct InferenceError {\n pub constraint_point: Location,\n pub name: (), \/\/ FIXME(nashenas88) RegionName\n}\n\nnewtype_index!(InferenceErrorIndex);\n\nstruct VarDefinition {\n name: (), \/\/ FIXME(nashenas88) RegionName\n value: Region,\n capped: bool,\n}\n\nimpl VarDefinition {\n pub fn new(value: Region) -> Self {\n Self {\n name: (),\n value,\n capped: false,\n }\n }\n}\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\npub struct Constraint {\n sub: RegionIndex,\n sup: RegionIndex,\n point: Location,\n}\n\nnewtype_index!(ConstraintIndex);\n\nimpl InferenceContext {\n pub fn new(values: IndexVec<RegionIndex, Region>) -> Self {\n Self {\n definitions: values.into_iter().map(VarDefinition::new).collect(),\n constraints: IndexVec::new(),\n errors: IndexVec::new(),\n }\n }\n\n #[allow(dead_code)]\n pub fn cap_var(&mut self, v: RegionIndex) {\n self.definitions[v].capped = true;\n }\n\n #[allow(dead_code)]\n pub fn add_live_point(&mut self, v: RegionIndex, point: Location) {\n debug!(\"add_live_point({:?}, {:?})\", v, point);\n let definition = &mut self.definitions[v];\n if definition.value.add_point(point) {\n if definition.capped {\n self.errors.push(InferenceError {\n constraint_point: point,\n name: definition.name,\n });\n }\n }\n }\n\n #[allow(dead_code)]\n pub fn add_outlives(&mut self, sup: RegionIndex, sub: RegionIndex, point: Location) {\n debug!(\"add_outlives({:?}: {:?} @ {:?}\", sup, sub, point);\n self.constraints.push(Constraint { sup, sub, point });\n }\n\n #[allow(dead_code)]\n pub fn region(&self, v: RegionIndex) -> &Region {\n &self.definitions[v].value\n }\n\n pub fn solve<'a, 'gcx, 'tcx>(\n &mut self,\n infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n ) -> IndexVec<InferenceErrorIndex, InferenceError>\n where\n 'gcx: 'tcx + 'a,\n 'tcx: 'a,\n {\n let mut changed = true;\n let mut dfs = Dfs::new(infcx, mir);\n while changed {\n changed = false;\n for constraint in &self.constraints {\n let sub = &self.definitions[constraint.sub].value.clone();\n let sup_def = &mut self.definitions[constraint.sup];\n debug!(\"constraint: {:?}\", constraint);\n debug!(\" sub (before): {:?}\", sub);\n debug!(\" sup (before): {:?}\", sup_def.value);\n\n if dfs.copy(sub, &mut sup_def.value, constraint.point) {\n changed = true;\n if sup_def.capped {\n \/\/ This is kind of a hack, but when we add a\n \/\/ constraint, the \"point\" is always the point\n \/\/ AFTER the action that induced the\n \/\/ constraint. So report the error on the\n \/\/ action BEFORE that.\n assert!(constraint.point.statement_index > 0);\n let p = Location {\n block: constraint.point.block,\n statement_index: constraint.point.statement_index - 1,\n };\n\n self.errors.push(InferenceError {\n constraint_point: p,\n name: sup_def.name,\n });\n }\n }\n\n debug!(\" sup (after) : {:?}\", sup_def.value);\n debug!(\" changed : {:?}\", changed);\n }\n debug!(\"\\n\");\n }\n\n mem::replace(&mut self.errors, IndexVec::new())\n }\n}\n\nstruct Dfs<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> {\n #[allow(dead_code)]\n infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n}\n\nimpl<'a, 'gcx: 'tcx, 'tcx: 'a> Dfs<'a, 'gcx, 'tcx> {\n fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, mir: &'a Mir<'tcx>) -> Self {\n Self { infcx, mir }\n }\n\n fn copy(\n &mut self,\n from_region: &Region,\n to_region: &mut Region,\n start_point: Location,\n ) -> bool {\n let mut changed = false;\n\n let mut stack = vec![];\n let mut visited = FxHashSet();\n\n stack.push(start_point);\n while let Some(p) = stack.pop() {\n debug!(\" dfs: p={:?}\", p);\n\n if !from_region.may_contain(p) {\n debug!(\" not in from-region\");\n continue;\n }\n\n if !visited.insert(p) {\n debug!(\" already visited\");\n continue;\n }\n\n changed |= to_region.add_point(p);\n\n let block_data = &self.mir[p.block];\n let successor_points = if p.statement_index < block_data.statements.len() {\n vec![Location {\n statement_index: p.statement_index + 1,\n ..p\n }]\n } else {\n block_data.terminator()\n .successors()\n .iter()\n .map(|&basic_block| Location {\n statement_index: 0,\n block: basic_block,\n })\n .collect::<Vec<_>>()\n };\n\n if successor_points.is_empty() {\n \/\/ FIXME handle free regions\n \/\/ If we reach the END point in the graph, then copy\n \/\/ over any skolemized end points in the `from_region`\n \/\/ and make sure they are included in the `to_region`.\n \/\/ for region_decl in self.infcx.tcx.tables.borrow().free_region_map() {\n \/\/ \/\/ FIXME(nashenas88) figure out skolemized_end points\n \/\/ let block = self.env.graph.skolemized_end(region_decl.name);\n \/\/ let skolemized_end_point = Location {\n \/\/ block,\n \/\/ statement_index: 0,\n \/\/ };\n \/\/ changed |= to_region.add_point(skolemized_end_point);\n \/\/ }\n } else {\n stack.extend(successor_points);\n }\n }\n\n changed\n }\n}\n<commit_msg>Add License to infer.rs<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse super::{Region, RegionIndex};\nuse std::mem;\nuse rustc::infer::InferCtxt;\nuse rustc::mir::{Location, Mir};\nuse rustc_data_structures::indexed_vec::{Idx, IndexVec};\nuse rustc_data_structures::fx::FxHashSet;\n\npub struct InferenceContext {\n definitions: IndexVec<RegionIndex, VarDefinition>,\n constraints: IndexVec<ConstraintIndex, Constraint>,\n errors: IndexVec<InferenceErrorIndex, InferenceError>,\n}\n\npub struct InferenceError {\n pub constraint_point: Location,\n pub name: (), \/\/ FIXME(nashenas88) RegionName\n}\n\nnewtype_index!(InferenceErrorIndex);\n\nstruct VarDefinition {\n name: (), \/\/ FIXME(nashenas88) RegionName\n value: Region,\n capped: bool,\n}\n\nimpl VarDefinition {\n pub fn new(value: Region) -> Self {\n Self {\n name: (),\n value,\n capped: false,\n }\n }\n}\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\npub struct Constraint {\n sub: RegionIndex,\n sup: RegionIndex,\n point: Location,\n}\n\nnewtype_index!(ConstraintIndex);\n\nimpl InferenceContext {\n pub fn new(values: IndexVec<RegionIndex, Region>) -> Self {\n Self {\n definitions: values.into_iter().map(VarDefinition::new).collect(),\n constraints: IndexVec::new(),\n errors: IndexVec::new(),\n }\n }\n\n #[allow(dead_code)]\n pub fn cap_var(&mut self, v: RegionIndex) {\n self.definitions[v].capped = true;\n }\n\n #[allow(dead_code)]\n pub fn add_live_point(&mut self, v: RegionIndex, point: Location) {\n debug!(\"add_live_point({:?}, {:?})\", v, point);\n let definition = &mut self.definitions[v];\n if definition.value.add_point(point) {\n if definition.capped {\n self.errors.push(InferenceError {\n constraint_point: point,\n name: definition.name,\n });\n }\n }\n }\n\n #[allow(dead_code)]\n pub fn add_outlives(&mut self, sup: RegionIndex, sub: RegionIndex, point: Location) {\n debug!(\"add_outlives({:?}: {:?} @ {:?}\", sup, sub, point);\n self.constraints.push(Constraint { sup, sub, point });\n }\n\n #[allow(dead_code)]\n pub fn region(&self, v: RegionIndex) -> &Region {\n &self.definitions[v].value\n }\n\n pub fn solve<'a, 'gcx, 'tcx>(\n &mut self,\n infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n ) -> IndexVec<InferenceErrorIndex, InferenceError>\n where\n 'gcx: 'tcx + 'a,\n 'tcx: 'a,\n {\n let mut changed = true;\n let mut dfs = Dfs::new(infcx, mir);\n while changed {\n changed = false;\n for constraint in &self.constraints {\n let sub = &self.definitions[constraint.sub].value.clone();\n let sup_def = &mut self.definitions[constraint.sup];\n debug!(\"constraint: {:?}\", constraint);\n debug!(\" sub (before): {:?}\", sub);\n debug!(\" sup (before): {:?}\", sup_def.value);\n\n if dfs.copy(sub, &mut sup_def.value, constraint.point) {\n changed = true;\n if sup_def.capped {\n \/\/ This is kind of a hack, but when we add a\n \/\/ constraint, the \"point\" is always the point\n \/\/ AFTER the action that induced the\n \/\/ constraint. So report the error on the\n \/\/ action BEFORE that.\n assert!(constraint.point.statement_index > 0);\n let p = Location {\n block: constraint.point.block,\n statement_index: constraint.point.statement_index - 1,\n };\n\n self.errors.push(InferenceError {\n constraint_point: p,\n name: sup_def.name,\n });\n }\n }\n\n debug!(\" sup (after) : {:?}\", sup_def.value);\n debug!(\" changed : {:?}\", changed);\n }\n debug!(\"\\n\");\n }\n\n mem::replace(&mut self.errors, IndexVec::new())\n }\n}\n\nstruct Dfs<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> {\n #[allow(dead_code)]\n infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n}\n\nimpl<'a, 'gcx: 'tcx, 'tcx: 'a> Dfs<'a, 'gcx, 'tcx> {\n fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, mir: &'a Mir<'tcx>) -> Self {\n Self { infcx, mir }\n }\n\n fn copy(\n &mut self,\n from_region: &Region,\n to_region: &mut Region,\n start_point: Location,\n ) -> bool {\n let mut changed = false;\n\n let mut stack = vec![];\n let mut visited = FxHashSet();\n\n stack.push(start_point);\n while let Some(p) = stack.pop() {\n debug!(\" dfs: p={:?}\", p);\n\n if !from_region.may_contain(p) {\n debug!(\" not in from-region\");\n continue;\n }\n\n if !visited.insert(p) {\n debug!(\" already visited\");\n continue;\n }\n\n changed |= to_region.add_point(p);\n\n let block_data = &self.mir[p.block];\n let successor_points = if p.statement_index < block_data.statements.len() {\n vec![Location {\n statement_index: p.statement_index + 1,\n ..p\n }]\n } else {\n block_data.terminator()\n .successors()\n .iter()\n .map(|&basic_block| Location {\n statement_index: 0,\n block: basic_block,\n })\n .collect::<Vec<_>>()\n };\n\n if successor_points.is_empty() {\n \/\/ FIXME handle free regions\n \/\/ If we reach the END point in the graph, then copy\n \/\/ over any skolemized end points in the `from_region`\n \/\/ and make sure they are included in the `to_region`.\n \/\/ for region_decl in self.infcx.tcx.tables.borrow().free_region_map() {\n \/\/ \/\/ FIXME(nashenas88) figure out skolemized_end points\n \/\/ let block = self.env.graph.skolemized_end(region_decl.name);\n \/\/ let skolemized_end_point = Location {\n \/\/ block,\n \/\/ statement_index: 0,\n \/\/ };\n \/\/ changed |= to_region.add_point(skolemized_end_point);\n \/\/ }\n } else {\n stack.extend(successor_points);\n }\n }\n\n changed\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create resources\/score.rs<commit_after>use super::params::*;\n\nuse super::rs_es::Client;\nuse super::rs_es::query::Query;\nuse super::rs_es::operations::search::SearchHitsHitsResult;\nuse super::rs_es::operations::bulk::{BulkResult, Action};\nuse super::rs_es::operations::delete::DeleteResult;\nuse super::rs_es::operations::mapping::*;\nuse super::rs_es::error::EsError;\n\nuse resource::*;\n\n\/\/\/ The type that we use in ElasticSearch for defining a `Score`.\nconst ES_TYPE: &'static str = \"score\";\n\n\/\/\/ A collection of `Score`s.\n#[derive(Serialize, Deserialize, Debug, Clone)]\npub struct SearchResults {\n pub total: u64,\n pub scores: Vec<Score>,\n}\n\n\/\/\/ The representation of the score that will be indexed into ElasticSearch.\n#[derive(Serialize, Deserialize, Debug, Clone)]\npub struct Score {\n pub match_id: String,\n pub job_id: u32,\n pub talent_id: u32,\n pub score: f32\n}\n\n\/\/\/ Convert an ElasticSearch result into a `Score`.\nimpl From<SearchHitsHitsResult<Score>> for Score {\n fn from(hit: SearchHitsHitsResult<Score>) -> Score {\n *hit.source.unwrap()\n }\n}\n\nimpl Score {\n pub fn search_filters(params: &Map) -> Query {\n let job_id = match params.get(\"job_id\") {\n Some(&Value::U64(ref job_id)) => *job_id,\n _ => 0\n };\n\n let talent_id = match params.get(\"talent_id\") {\n Some(&Value::U64(ref talent_id)) => *talent_id,\n _ => 0\n };\n\n Query::build_bool()\n .with_must(\n vec![\n Query::build_term(\"job_id\", job_id).build(),\n Query::build_term(\"talent_id\", talent_id).build()\n ])\n .build()\n }\n\n pub fn search(es: &mut Client, default_index: &str, params: &Map) -> SearchResults {\n let index: Vec<&str> = match params.get(\"index\") {\n Some(&Value::String(ref index)) => vec![&index[..]],\n _ => vec![default_index]\n };\n\n let result = es.search_query()\n .with_indexes(&*index)\n .with_query(&Score::search_filters(params))\n .send::<Score>();\n\n match result {\n Ok(result) => {\n let scores: Vec<Score> = result.hits.hits.into_iter()\n .map(Score::from)\n .collect();\n\n SearchResults {\n total: result.hits.total,\n scores: scores\n }\n },\n Err(err) => {\n error!(\"{:?}\", err);\n SearchResults { total: 0, scores: vec![] }\n }\n }\n }\n\n fn delete(&self, es: &mut Client, index: &str) -> Result<DeleteResult, EsError> {\n es.delete(index, ES_TYPE, &*self.match_id)\n .send()\n }\n}\n\nimpl Resource for Score {\n type Results = SearchResults;\n\n \/\/\/ Populate the ElasticSearch index with `Vec<Score>`\n fn index(es: &mut Client, index: &str, resources: Vec<Self>) -> Result<BulkResult, EsError> {\n es.bulk(&resources.into_iter()\n .map(|r| {\n let match_id = r.match_id.to_owned();\n Action::index(r).with_id(match_id)\n })\n .collect::<Vec<Action<Score>>>())\n .with_index(index)\n .with_doc_type(ES_TYPE)\n .send()\n }\n\n \/\/\/ We'll call this one from `talent` as a normal function, we won't expose it outside.\n fn search(_es: &mut Client, _default_index: &str, _params: &Map) -> Self::Results {\n unimplemented!();\n }\n\n \/\/\/ We'll call this one from `talent` as a normal function, we won't expose it outside.\n fn delete(_es: &mut Client, _id: &str, _index: &str) -> Result<DeleteResult, EsError> {\n unimplemented!();\n }\n\n \/\/\/ We leave ES to create the mapping by inferring it from the input.\n #[allow(unused_must_use)]\n fn reset_index(_es: &mut Client, _index: &str) -> Result<MappingResult, EsError> {\n unimplemented!();\n }\n}\n\n#[cfg(test)]\nmod tests {\n extern crate rs_es;\n use self::rs_es::Client;\n\n extern crate params;\n use self::params::*;\n\n use resource::*;\n\n use resources::{Score, Talent};\n use resources::score::SearchResults;\n use resources::tests::*;\n\n pub fn populate_index(mut client: &mut Client) -> bool {\n let scores = vec![\n Score {\n match_id: \"515ec9bb-0511-4464-92bb-bd21c5ed7b22\".to_owned(),\n job_id: 1,\n talent_id: 10,\n score: 0.545\n }\n ];\n\n Score::index(&mut client, &config.es.index, scores).is_ok()\n }\n\n fn refresh_index(client: &mut Client) {\n client.refresh()\n .with_indexes(&[&config.es.index])\n .send()\n .unwrap();\n }\n\n impl SearchResults {\n pub fn match_ids(&self) -> Vec<String> {\n self.scores.iter().map(|s| s.match_id.to_owned()).collect()\n }\n }\n\n #[test]\n fn test_search() {\n let mut client = make_client();\n\n assert!(Talent::reset_index(&mut client, &*config.es.index).is_ok());\n refresh_index(&mut client);\n\n assert!(populate_index(&mut client));\n refresh_index(&mut client);\n\n \/\/ no parameters are given\n {\n let results = Score::search(&mut client, &*config.es.index, &Map::new());\n assert_eq!(0, results.total);\n assert!(results.scores.is_empty());\n }\n\n \/\/ given parameters have an unexpected type\n {\n let mut map = Map::new();\n map.assign(\"job_id\", Value::String(\"2B\".into())).unwrap();\n map.assign(\"talent_id\", Value::String(\"9S\".into())).unwrap();\n\n let results = Score::search(&mut client, &*config.es.index, &map);\n assert_eq!(0, results.total);\n assert!(results.scores.is_empty());\n }\n\n \/\/ job_id and talent_id are given\n {\n let mut map = Map::new();\n map.assign(\"job_id\", Value::U64(1)).unwrap();\n map.assign(\"talent_id\", Value::U64(10)).unwrap();\n\n let results = Score::search(&mut client, &*config.es.index, &map);\n assert_eq!(1, results.total);\n assert_eq!(vec![\"515ec9bb-0511-4464-92bb-bd21c5ed7b22\"], results.match_ids());\n assert_ne!(vec![\"2a-2b-9s\"], results.match_ids());\n }\n\n \/\/ delete between searches\n {\n let mut map = Map::new();\n map.assign(\"job_id\", Value::U64(1)).unwrap();\n map.assign(\"talent_id\", Value::U64(10)).unwrap();\n\n let results = Score::search(&mut client, &*config.es.index, &map);\n assert_eq!(1, results.total);\n\n let score = &results.scores[0];\n score.delete(&mut client, &*config.es.index).unwrap();\n\n refresh_index(&mut client);\n\n let results = Score::search(&mut client, &*config.es.index, &map);\n assert_eq!(0, results.total);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create PID Variable To Store Shell's PID<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>substitution cipher<commit_after>use std::os::args;\nuse std::vec;\n\nfn main() {\n let input = args()[1];\n let shifts = build_vec(26, ~[]);\n do shifts.map |e| {\n println(fmt!(\"%?: %?\", e, input.map_chars(substitute(*e))));\n };\n}\n\nfn substitute(shift : int) -> @fn(char) -> char {\n |c:char| {\n ((((c as int) - ('A' as int) + shift) % 26) + ('A' as int)) as char\n }\n}\n\nfn build_vec(n : int, v : ~[int]) -> ~[int] {\n match n {\n 0 => v,\n _ => {\n let nv = vec::append(v, ~[n]);\n build_vec(n-1, nv)\n }\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>implementation of Point struct added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a basic test for specialization.<commit_after>#![feature(custom_attribute, specialization)]\n#![allow(dead_code, unused_attributes)]\n\ntrait IsUnit {\n fn is_unit() -> bool;\n}\n\nimpl<T> IsUnit for T {\n default fn is_unit() -> bool { false }\n}\n\nimpl IsUnit for () {\n fn is_unit() -> bool { true }\n}\n\n#[miri_run]\nfn specialization() -> (bool, bool) {\n (i32::is_unit(), <()>::is_unit())\n}\n<|endoftext|>"} {"text":"<commit_before>\nuse mix = sdl2_mixer;\nuse {\n AudioBackEnd,\n Music,\n Sound,\n};\n\n\/\/\/ Wraps SDL2_mixer chunk data.\npub struct SoundSDL2 {\n chunk: mix::Chunk,\n loops: int,\n}\n\nimpl SoundSDL2 {\n \/\/\/ Loads sound by relative file name to the asset root.\n pub fn from_path(path: &Path) -> Result<SoundSDL2, String> {\n match mix::Chunk::from_file(path) {\n Ok(chunk) => {\n Ok(SoundSDL2 {\n chunk: chunk,\n loops: 1,\n })\n },\n Err(msg) => {\n Err(format!(\"Could not load '{}': {}\", path.filename_str().unwrap(), msg))\n },\n }\n }\n}\n\nimpl Sound for SoundSDL2 {\n}\n\n\/\/\/ Wraps SDL2_mixer music data.\npub struct MusicSDL2 {\n music: mix::Music,\n loops: int,\n}\n\nimpl MusicSDL2 {\n \/\/\/ Loads music by relative file name to the asset root.\n pub fn from_path(path: &Path) -> Result<MusicSDL2, String> {\n match mix::Music::from_file(path) {\n Ok(music) => {\n Ok(MusicSDL2 {\n music: music,\n loops: 1,\n })\n },\n Err(msg) => {\n Err(format!(\"Could not load '{}': {}\", path.filename_str().unwrap(), msg))\n },\n }\n }\n}\n\nimpl Music for MusicSDL2 {\n}\n\n\/\/\/ An audio back end using SDL2_mixer\npub struct AudioSDL2;\n\nimpl AudioSDL2 {\n \/\/\/ Create a new instance\n pub fn new() -> AudioSDL2 {\n mix::init(mix::InitMp3 | mix::InitFlac\n | mix::InitMod | mix::InitFluidSynth\n | mix::InitModPlug | mix::InitOgg);\n mix::open_audio(mix::DEFAULT_FREQUENCY,\n mix::DEFAULT_FORMAT,\n mix::DEFAULT_CHANNELS,\n 1024).unwrap();\n mix::allocate_channels(mix::DEFAULT_CHANNELS);\n AudioSDL2\n }\n}\n\nimpl Drop for AudioSDL2 {\n fn drop(&mut self) {\n mix::quit();\n }\n}\n\nimpl AudioBackEnd<MusicSDL2, SoundSDL2> for AudioSDL2 {\n fn play_sound(&self, sound: &SoundSDL2) {\n match mix::Channel::all().play(&sound.chunk, sound.loops) {\n Err(msg) => {\n println!(\"Warning: {}\", msg);\n },\n _ => {}\n }\n }\n\n fn play_music(&self, music: &MusicSDL2) {\n match music.music.play(music.loops) {\n Err(msg) => {\n println!(\"Warning: {}\", msg);\n },\n _ => {}\n }\n }\n}\n\n<commit_msg>Removed 'Drop' for AudioSDL2<commit_after>\nuse mix = sdl2_mixer;\nuse {\n AudioBackEnd,\n Music,\n Sound,\n};\n\n\/\/\/ Wraps SDL2_mixer chunk data.\npub struct SoundSDL2 {\n chunk: mix::Chunk,\n loops: int,\n}\n\nimpl SoundSDL2 {\n \/\/\/ Loads sound by relative file name to the asset root.\n pub fn from_path(path: &Path) -> Result<SoundSDL2, String> {\n match mix::Chunk::from_file(path) {\n Ok(chunk) => {\n Ok(SoundSDL2 {\n chunk: chunk,\n loops: 1,\n })\n },\n Err(msg) => {\n Err(format!(\"Could not load '{}': {}\", path.filename_str().unwrap(), msg))\n },\n }\n }\n}\n\nimpl Sound for SoundSDL2 {\n}\n\n\/\/\/ Wraps SDL2_mixer music data.\npub struct MusicSDL2 {\n music: mix::Music,\n loops: int,\n}\n\nimpl MusicSDL2 {\n \/\/\/ Loads music by relative file name to the asset root.\n pub fn from_path(path: &Path) -> Result<MusicSDL2, String> {\n match mix::Music::from_file(path) {\n Ok(music) => {\n Ok(MusicSDL2 {\n music: music,\n loops: 1,\n })\n },\n Err(msg) => {\n Err(format!(\"Could not load '{}': {}\", path.filename_str().unwrap(), msg))\n },\n }\n }\n}\n\nimpl Music for MusicSDL2 {\n}\n\n\/\/\/ An audio back end using SDL2_mixer\npub struct AudioSDL2;\n\nimpl AudioSDL2 {\n \/\/\/ Create a new instance\n pub fn new() -> AudioSDL2 {\n mix::init(mix::InitMp3 | mix::InitFlac\n | mix::InitMod | mix::InitFluidSynth\n | mix::InitModPlug | mix::InitOgg);\n mix::open_audio(mix::DEFAULT_FREQUENCY,\n mix::DEFAULT_FORMAT,\n mix::DEFAULT_CHANNELS,\n 1024).unwrap();\n mix::allocate_channels(mix::DEFAULT_CHANNELS);\n AudioSDL2\n }\n}\n\nimpl AudioBackEnd<MusicSDL2, SoundSDL2> for AudioSDL2 {\n fn play_sound(&self, sound: &SoundSDL2) {\n match mix::Channel::all().play(&sound.chunk, sound.loops) {\n Err(msg) => {\n println!(\"Warning: {}\", msg);\n },\n _ => {}\n }\n }\n\n fn play_music(&self, music: &MusicSDL2) {\n match music.music.play(music.loops) {\n Err(msg) => {\n println!(\"Warning: {}\", msg);\n },\n _ => {}\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor one-time-use command-buffers -- use an auxiliary command buffer wherever an immediate action is required<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>starting to add boruvka data structures.<commit_after>extern mod std;\nextern mod extra;\n\nstruct ChildNode {\n r: int;\n g: int;\n b: int;\n x: int;\n y: int;\n \n}\n\nimpl ChildNode {\n\n}\n\nstruct ParentNode {\n \/\/ will need a port,chan here\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>initial commit<commit_after>use std::str;\nuse std::vec;\nuse std::rt::io::{Reader, Writer};\nuse std::rt::io::net::tcp::TcpStream;\nuse std::rt::io::net::ip::{Ipv4Addr, SocketAddr};\nuse std::rt::io::buffered::BufferedStream;\n\nfn main() {\n let ServerAddress = Ipv4Addr(127, 0, 0, 1); \/\/ Connect to localhost\n let ServerPort = 6667;\n let SocketAddress = SocketAddr { ip: ServerAddress, port: ServerPort };\n \n let tcp_stream = TcpStream::connect(SocketAddress).unwrap();\n \n let mut buf: ~[u8] = vec::from_elem(1024, 0u8);\n let mut stream = BufferedStream::new(tcp_stream);\n\n stream.write(bytes!(\"NICK rustbot\\r\\n\"));\n stream.write(bytes!(\"USER rustbot 8 * :rustbot\\r\\n\"));\n stream.flush();\n \n let mut g = 1;\n while g > 0 {\n let r = stream.read(buf);\n let s = str::from_utf8_slice(buf);\n match r { \n Some(nread) => {\n if s.contains(\"004\") { \n bot_loop() \/\/ This indicates we've connected to the server\n } else { \/\/ successfully.\n match s {\n _ => {\n ()\n \/* println(fmt!(\"Read %u bytes\", nread));\n * println(fmt!(\"%s\", s));\n *\/\n }\n }\n }\n },\n None => {\n println!(\"End of Stream!\");\n g = 0;\n }\n }\n }\n\n println(\"Connection timed out!\");\n}\n\nfn bot_loop() {\n println!(\"You made it, yay!\");\n loop {};\n}\n\n\/*\n * ^(?:[:@]([^\\\\s]+) )?([^\\\\s]+)(?: ((?:[^:\\\\s][^\\\\s]* ?)*))?(?: ?:(.*))?$\n * regex for parsing irc messages\n *\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>bbox own file<commit_after>use types::*;\n\nstruct BBox {\n ll: FVec2,\n ur: FVec2,\n}\n\nimpl BBox {\n fn max_dim(&self) -> f32 {\n (self.ur.x - self.ll.x).max(self.ur.y - self.ll.y)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Play with documentation for dice<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a constructor to Poly from Rect<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add option to show files size with print<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>remove useless enum<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added main function<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactoring<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added usage<commit_after><|endoftext|>"} {"text":"<commit_before>use serde_json::builder::ObjectBuilder;\nuse serde_json::Value;\nuse std::sync::mpsc::{\n Receiver as MpscReceiver,\n Sender as MpscSender,\n TryRecvError,\n};\nuse std::sync::{Arc, Mutex};\nuse std::time::{Duration as StdDuration, Instant};\nuse std::{env, thread};\nuse super::super::ClientError;\nuse super::{GatewayError, GatewayStatus};\nuse time::{self, Duration};\nuse websocket::client::request::Url as RequestUrl;\nuse websocket::client::{Receiver, Sender};\nuse websocket::stream::WebSocketStream;\nuse ::constants::{self, LARGE_THRESHOLD, OpCode};\nuse ::error::{Error, Result};\nuse ::internal::ws_impl::{ReceiverExt, SenderExt};\nuse ::model::event::{Event, GatewayEvent, ReadyEvent};\n\n#[inline]\npub fn parse_ready(event: GatewayEvent,\n tx: &MpscSender<GatewayStatus>,\n receiver: &mut Receiver<WebSocketStream>,\n identification: Value)\n -> Result<(ReadyEvent, u64)> {\n match event {\n GatewayEvent::Dispatch(seq, Event::Ready(event)) => {\n Ok((event, seq))\n },\n GatewayEvent::InvalidateSession => {\n debug!(\"Session invalidation\");\n\n let _ = tx.send(GatewayStatus::SendMessage(identification));\n\n match receiver.recv_json(GatewayEvent::decode)? {\n GatewayEvent::Dispatch(seq, Event::Ready(event)) => {\n Ok((event, seq))\n },\n other => {\n debug!(\"Unexpected event: {:?}\", other);\n\n Err(Error::Gateway(GatewayError::InvalidHandshake))\n },\n }\n },\n other => {\n debug!(\"Unexpected event: {:?}\", other);\n\n Err(Error::Gateway(GatewayError::InvalidHandshake))\n },\n }\n}\n\npub fn identify(token: &str, shard_info: Option<[u64; 2]>) -> Value {\n ObjectBuilder::new()\n .insert(\"op\", OpCode::Identify.num())\n .insert_object(\"d\", |mut object| {\n object = identify_compression(object)\n .insert(\"large_threshold\", LARGE_THRESHOLD) \/\/ max value\n .insert_object(\"properties\", |object| object\n .insert(\"$browser\", \"Ergonomic and high-level Rust library\")\n .insert(\"$device\", \"serenity\")\n .insert(\"$os\", env::consts::OS))\n .insert(\"token\", token)\n .insert(\"v\", constants::GATEWAY_VERSION);\n\n if let Some(shard_info) = shard_info {\n object = object.insert_array(\"shard\", |a| a\n .push(shard_info[0])\n .push(shard_info[1]));\n }\n\n object\n })\n .build()\n}\n\n#[inline(always)]\npub fn identify_compression(object: ObjectBuilder) -> ObjectBuilder {\n object.insert(\"compression\", !cfg!(feature=\"debug\"))\n}\n\npub fn build_gateway_url(base: &str) -> Result<RequestUrl> {\n RequestUrl::parse(&format!(\"{}?v={}\", base, constants::GATEWAY_VERSION))\n .map_err(|_| Error::Client(ClientError::Gateway))\n}\n\npub fn keepalive(interval: u64,\n heartbeat_sent: Arc<Mutex<Instant>>,\n mut sender: Sender<WebSocketStream>,\n channel: MpscReceiver<GatewayStatus>) {\n let mut base_interval = Duration::milliseconds(interval as i64);\n let mut next_tick = time::get_time() + base_interval;\n\n let mut last_sequence = 0;\n let mut last_successful = false;\n\n 'outer: loop {\n thread::sleep(StdDuration::from_millis(100));\n\n loop {\n match channel.try_recv() {\n Ok(GatewayStatus::Interval(interval)) => {\n base_interval = Duration::milliseconds(interval as i64);\n },\n Ok(GatewayStatus::Sender(new_sender)) => {\n sender = new_sender;\n },\n Ok(GatewayStatus::SendMessage(val)) => {\n if let Err(why) = sender.send_json(&val) {\n warn!(\"Error sending message: {:?}\", why);\n }\n },\n Ok(GatewayStatus::Sequence(seq)) => {\n last_sequence = seq;\n },\n Err(TryRecvError::Empty) => break,\n Err(TryRecvError::Disconnected) => break 'outer,\n }\n }\n\n if time::get_time() >= next_tick {\n next_tick = next_tick + base_interval;\n\n let map = ObjectBuilder::new()\n .insert(\"d\", last_sequence)\n .insert(\"op\", OpCode::Heartbeat.num())\n .build();\n\n trace!(\"Sending heartbeat d: {}\", last_sequence);\n\n match sender.send_json(&map) {\n Ok(_) => {\n let now = Instant::now();\n\n *heartbeat_sent.lock().unwrap() = now;\n },\n Err(why) => {\n warn!(\"Error sending keepalive: {:?}\", why);\n\n if last_successful {\n debug!(\"If next keepalive fails, closing\");\n } else {\n break;\n }\n\n last_successful = false;\n },\n }\n }\n }\n\n debug!(\"Closing keepalive\");\n\n match sender.shutdown_all() {\n Ok(_) => debug!(\"Successfully shutdown sender\/receiver\"),\n Err(why) => warn!(\"Failed to shutdown sender\/receiver: {:?}\", why),\n }\n}\n<commit_msg>Log only unexpected keepalive errors<commit_after>use serde_json::builder::ObjectBuilder;\nuse serde_json::Value;\nuse std::sync::mpsc::{\n Receiver as MpscReceiver,\n Sender as MpscSender,\n TryRecvError,\n};\nuse std::sync::{Arc, Mutex};\nuse std::time::{Duration as StdDuration, Instant};\nuse std::{env, thread};\nuse super::super::ClientError;\nuse super::{GatewayError, GatewayStatus};\nuse time::{self, Duration};\nuse websocket::client::request::Url as RequestUrl;\nuse websocket::client::{Receiver, Sender};\nuse websocket::result::WebSocketError as WsError;\nuse websocket::stream::WebSocketStream;\nuse ::constants::{self, LARGE_THRESHOLD, OpCode};\nuse ::error::{Error, Result};\nuse ::internal::ws_impl::{ReceiverExt, SenderExt};\nuse ::model::event::{Event, GatewayEvent, ReadyEvent};\n\n#[inline]\npub fn parse_ready(event: GatewayEvent,\n tx: &MpscSender<GatewayStatus>,\n receiver: &mut Receiver<WebSocketStream>,\n identification: Value)\n -> Result<(ReadyEvent, u64)> {\n match event {\n GatewayEvent::Dispatch(seq, Event::Ready(event)) => {\n Ok((event, seq))\n },\n GatewayEvent::InvalidateSession => {\n debug!(\"Session invalidation\");\n\n let _ = tx.send(GatewayStatus::SendMessage(identification));\n\n match receiver.recv_json(GatewayEvent::decode)? {\n GatewayEvent::Dispatch(seq, Event::Ready(event)) => {\n Ok((event, seq))\n },\n other => {\n debug!(\"Unexpected event: {:?}\", other);\n\n Err(Error::Gateway(GatewayError::InvalidHandshake))\n },\n }\n },\n other => {\n debug!(\"Unexpected event: {:?}\", other);\n\n Err(Error::Gateway(GatewayError::InvalidHandshake))\n },\n }\n}\n\npub fn identify(token: &str, shard_info: Option<[u64; 2]>) -> Value {\n ObjectBuilder::new()\n .insert(\"op\", OpCode::Identify.num())\n .insert_object(\"d\", |mut object| {\n object = identify_compression(object)\n .insert(\"large_threshold\", LARGE_THRESHOLD) \/\/ max value\n .insert_object(\"properties\", |object| object\n .insert(\"$browser\", \"Ergonomic and high-level Rust library\")\n .insert(\"$device\", \"serenity\")\n .insert(\"$os\", env::consts::OS))\n .insert(\"token\", token)\n .insert(\"v\", constants::GATEWAY_VERSION);\n\n if let Some(shard_info) = shard_info {\n object = object.insert_array(\"shard\", |a| a\n .push(shard_info[0])\n .push(shard_info[1]));\n }\n\n object\n })\n .build()\n}\n\n#[inline(always)]\npub fn identify_compression(object: ObjectBuilder) -> ObjectBuilder {\n object.insert(\"compression\", !cfg!(feature=\"debug\"))\n}\n\npub fn build_gateway_url(base: &str) -> Result<RequestUrl> {\n RequestUrl::parse(&format!(\"{}?v={}\", base, constants::GATEWAY_VERSION))\n .map_err(|_| Error::Client(ClientError::Gateway))\n}\n\npub fn keepalive(interval: u64,\n heartbeat_sent: Arc<Mutex<Instant>>,\n mut sender: Sender<WebSocketStream>,\n channel: MpscReceiver<GatewayStatus>) {\n let mut base_interval = Duration::milliseconds(interval as i64);\n let mut next_tick = time::get_time() + base_interval;\n\n let mut last_sequence = 0;\n let mut last_successful = false;\n\n 'outer: loop {\n thread::sleep(StdDuration::from_millis(100));\n\n loop {\n match channel.try_recv() {\n Ok(GatewayStatus::Interval(interval)) => {\n base_interval = Duration::milliseconds(interval as i64);\n },\n Ok(GatewayStatus::Sender(new_sender)) => {\n sender = new_sender;\n },\n Ok(GatewayStatus::SendMessage(val)) => {\n if let Err(why) = sender.send_json(&val) {\n warn!(\"Error sending message: {:?}\", why);\n }\n },\n Ok(GatewayStatus::Sequence(seq)) => {\n last_sequence = seq;\n },\n Err(TryRecvError::Empty) => break,\n Err(TryRecvError::Disconnected) => break 'outer,\n }\n }\n\n if time::get_time() >= next_tick {\n next_tick = next_tick + base_interval;\n\n let map = ObjectBuilder::new()\n .insert(\"d\", last_sequence)\n .insert(\"op\", OpCode::Heartbeat.num())\n .build();\n\n trace!(\"Sending heartbeat d: {}\", last_sequence);\n\n match sender.send_json(&map) {\n Ok(_) => {\n let now = Instant::now();\n\n *heartbeat_sent.lock().unwrap() = now;\n },\n Err(why) => {\n match why {\n Error::WebSocket(WsError::IoError(err)) => {\n if err.raw_os_error() != Some(32) {\n debug!(\"Err w\/ keepalive: {:?}\", err);\n }\n },\n other => warn!(\"Other err w\/ keepalive: {:?}\", other),\n }\n\n if last_successful {\n debug!(\"If next keepalive fails, closing\");\n } else {\n break;\n }\n\n last_successful = false;\n },\n }\n }\n }\n\n debug!(\"Closing keepalive\");\n\n match sender.shutdown_all() {\n Ok(_) => debug!(\"Successfully shutdown sender\/receiver\"),\n Err(why) => {\n \/\/ This can fail if the receiver already shutdown.\n if why.raw_os_error() != Some(107) {\n warn!(\"Failed to shutdown sender\/receiver: {:?}\", why);\n }\n },\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate alloc;\n\nuse self::alloc::boxed::*;\n\nuse core::ops::DerefMut;\n\nuse common::event::*;\nuse common::queue::*;\nuse common::scheduler;\nuse common::string::*;\n\nuse super::color::*;\nuse super::display::*;\nuse super::point::*;\nuse super::size::*;\n\n\/\/\/ A window\npub struct Window {\n \/\/\/ The position of the window\n pub point: Point,\n \/\/\/ The size of the window\n pub size: Size,\n \/\/\/ The title of the window\n pub title: String,\n \/\/\/ The content of the window\n pub content: Display,\n \/\/\/ The color of the window title\n pub title_color: Color,\n \/\/\/ The color of the border\n pub border_color: Color,\n \/\/\/ Is the window focused?\n pub focused: bool,\n \/\/\/ Is the window minimized?\n pub minimized: bool,\n dragging: bool,\n last_mouse_event: MouseEvent,\n events: Queue<Event>,\n ptr: *mut Window,\n}\n\nimpl Window {\n \/\/\/ Create a new window\n pub fn new(point: Point, size: Size, title: String) -> Box<Self> {\n let mut ret = Box::new(Window {\n point: point,\n size: size,\n title: title,\n content: Display::new(size.width, size.height),\n title_color: Color::new(255, 255, 255),\n border_color: Color::alpha(64, 64, 64, 128),\n focused: false,\n minimized: false,\n dragging: false,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n right_button: false,\n middle_button: false,\n },\n events: Queue::new(),\n ptr: 0 as *mut Window,\n });\n\n unsafe {\n ret.ptr = ret.deref_mut();\n\n if ret.ptr as usize > 0 {\n (*::session_ptr).add_window(ret.ptr);\n }\n }\n\n ret\n }\n\n \/\/\/ Poll the window (new)\n pub fn poll(&mut self) -> Option<Event> {\n let event_option;\n unsafe {\n let reenable = scheduler::start_no_ints();\n event_option = self.events.pop();\n scheduler::end_no_ints(reenable);\n }\n\n return event_option;\n }\n\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n self.content.flip();\n RedrawEvent { redraw: REDRAW_ALL }.to_event().trigger();\n }\n\n \/\/\/ Draw the window using a `Display`\n pub fn draw(&mut self, display: &Display) {\n if self.focused {\n self.border_color = Color::alpha(128, 128, 128, 192);\n } else {\n self.border_color = Color::alpha(64, 64, 64, 128);\n }\n\n if self.minimized {\n self.title_color = Color::new(0, 0, 0);\n } else {\n self.title_color = Color::new(255, 255, 255);\n\n display.rect(Point::new(self.point.x - 2, self.point.y - 18),\n Size::new(self.size.width + 4, 18),\n self.border_color);\n\n let mut cursor = Point::new(self.point.x, self.point.y - 17);\n for c in self.title.chars() {\n if cursor.x + 8 <= self.point.x + self.size.width as isize {\n display.char(cursor, c, self.title_color);\n }\n cursor.x += 8;\n }\n\n display.rect(Point::new(self.point.x - 2, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n display.rect(Point::new(self.point.x - 2,\n self.point.y + self.size.height as isize),\n Size::new(self.size.width + 4, 2),\n self.border_color);\n display.rect(Point::new(self.point.x + self.size.width as isize,\n self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n\n unsafe {\n let reenable = scheduler::start_no_ints();\n display.image(self.point,\n self.content.onscreen as *const u32,\n Size::new(self.content.width, self.content.height));\n scheduler::end_no_ints(reenable);\n }\n }\n }\n\n \/\/\/ Called on key press\n pub fn on_key(&mut self, key_event: KeyEvent) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push(key_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n \/\/\/ Called on mouse movement\n pub fn on_mouse(&mut self, orig_mouse_event: MouseEvent, allow_catch: bool) -> bool {\n let mut mouse_event = orig_mouse_event;\n\n mouse_event.x -= self.point.x;\n mouse_event.y -= self.point.y;\n\n let mut caught = false;\n\n if allow_catch {\n if mouse_event.left_button {\n if !self.minimized && mouse_event.x >= -2 &&\n mouse_event.x < self.size.width as isize + 4 &&\n mouse_event.y >= -18 &&\n mouse_event.y < self.size.height as isize + 2 {\n caught = true;\n }\n\n if !self.last_mouse_event.left_button && mouse_event.x >= -2 &&\n mouse_event.x < self.size.width as isize + 4 &&\n mouse_event.y >= -18 && mouse_event.y < 0 {\n self.dragging = true;\n caught = true;\n }\n } else {\n self.dragging = false;\n }\n\n if mouse_event.right_button {\n if !self.minimized && mouse_event.x >= -2 &&\n mouse_event.x < self.size.width as isize + 4 &&\n mouse_event.y >= -18 &&\n mouse_event.y < self.size.height as isize + 2 {\n caught = true;\n }\n\n if !self.last_mouse_event.right_button && mouse_event.x >= -2 &&\n mouse_event.x < self.size.width as isize + 4 &&\n mouse_event.y >= -18 && mouse_event.y < 0 {\n self.minimized = !self.minimized;\n caught = true;\n }\n }\n\n if self.dragging {\n self.point.x += orig_mouse_event.x - self.last_mouse_event.x;\n self.point.y += orig_mouse_event.y - self.last_mouse_event.y;\n caught = true;\n }\n } else {\n self.dragging = false;\n }\n\n self.last_mouse_event = orig_mouse_event;\n\n if caught && !self.dragging {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push(mouse_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n caught\n }\n}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n unsafe {\n if self.ptr as usize > 0 {\n (*::session_ptr).remove_window(self.ptr);\n }\n }\n }\n}\n<commit_msg>Fix crash<commit_after>extern crate alloc;\n\nuse self::alloc::boxed::*;\n\nuse core::ops::DerefMut;\n\nuse common::event::*;\nuse common::queue::*;\nuse common::scheduler;\nuse common::string::*;\n\nuse super::color::*;\nuse super::display::*;\nuse super::point::*;\nuse super::size::*;\n\n\/\/\/ A window\npub struct Window {\n \/\/\/ The position of the window\n pub point: Point,\n \/\/\/ The size of the window\n pub size: Size,\n \/\/\/ The title of the window\n pub title: String,\n \/\/\/ The content of the window\n pub content: Display,\n \/\/\/ The color of the window title\n pub title_color: Color,\n \/\/\/ The color of the border\n pub border_color: Color,\n \/\/\/ Is the window focused?\n pub focused: bool,\n \/\/\/ Is the window minimized?\n pub minimized: bool,\n dragging: bool,\n last_mouse_event: MouseEvent,\n events: Queue<Event>,\n ptr: *mut Window,\n}\n\nimpl Window {\n \/\/\/ Create a new window\n pub fn new(point: Point, size: Size, title: String) -> Box<Self> {\n let mut ret = box Window {\n point: point,\n size: size,\n title: title,\n content: Display::new(size.width, size.height),\n title_color: Color::new(255, 255, 255),\n border_color: Color::alpha(64, 64, 64, 128),\n focused: false,\n minimized: false,\n dragging: false,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n right_button: false,\n middle_button: false,\n },\n events: Queue::new(),\n ptr: 0 as *mut Window,\n };\n\n unsafe {\n ret.ptr = ret.deref_mut();\n\n if ret.ptr as usize > 0 {\n (*::session_ptr).add_window(ret.ptr);\n }\n }\n\n ret\n }\n\n \/\/\/ Poll the window (new)\n pub fn poll(&mut self) -> Option<Event> {\n let event_option;\n unsafe {\n let reenable = scheduler::start_no_ints();\n event_option = self.events.pop();\n scheduler::end_no_ints(reenable);\n }\n\n return event_option;\n }\n\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n self.content.flip();\n RedrawEvent { redraw: REDRAW_ALL }.to_event().trigger();\n }\n\n \/\/\/ Draw the window using a `Display`\n pub fn draw(&mut self, display: &Display) {\n if self.focused {\n self.border_color = Color::alpha(128, 128, 128, 192);\n } else {\n self.border_color = Color::alpha(64, 64, 64, 128);\n }\n\n if self.minimized {\n self.title_color = Color::new(0, 0, 0);\n } else {\n self.title_color = Color::new(255, 255, 255);\n\n display.rect(Point::new(self.point.x - 2, self.point.y - 18),\n Size::new(self.size.width + 4, 18),\n self.border_color);\n\n let mut cursor = Point::new(self.point.x, self.point.y - 17);\n for c in self.title.chars() {\n if cursor.x + 8 <= self.point.x + self.size.width as isize {\n display.char(cursor, c, self.title_color);\n }\n cursor.x += 8;\n }\n\n display.rect(Point::new(self.point.x - 2, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n display.rect(Point::new(self.point.x - 2,\n self.point.y + self.size.height as isize),\n Size::new(self.size.width + 4, 2),\n self.border_color);\n display.rect(Point::new(self.point.x + self.size.width as isize,\n self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n\n unsafe {\n let reenable = scheduler::start_no_ints();\n display.image(self.point,\n self.content.onscreen as *const u32,\n Size::new(self.content.width, self.content.height));\n scheduler::end_no_ints(reenable);\n }\n }\n }\n\n \/\/\/ Called on key press\n pub fn on_key(&mut self, key_event: KeyEvent) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push(key_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n \/\/\/ Called on mouse movement\n pub fn on_mouse(&mut self, orig_mouse_event: MouseEvent, allow_catch: bool) -> bool {\n let mut mouse_event = orig_mouse_event;\n\n mouse_event.x -= self.point.x;\n mouse_event.y -= self.point.y;\n\n let mut caught = false;\n\n if allow_catch {\n if mouse_event.left_button {\n if !self.minimized && mouse_event.x >= -2 &&\n mouse_event.x < self.size.width as isize + 4 &&\n mouse_event.y >= -18 &&\n mouse_event.y < self.size.height as isize + 2 {\n caught = true;\n }\n\n if !self.last_mouse_event.left_button && mouse_event.x >= -2 &&\n mouse_event.x < self.size.width as isize + 4 &&\n mouse_event.y >= -18 && mouse_event.y < 0 {\n self.dragging = true;\n caught = true;\n }\n } else {\n self.dragging = false;\n }\n\n if mouse_event.right_button {\n if !self.minimized && mouse_event.x >= -2 &&\n mouse_event.x < self.size.width as isize + 4 &&\n mouse_event.y >= -18 &&\n mouse_event.y < self.size.height as isize + 2 {\n caught = true;\n }\n\n if !self.last_mouse_event.right_button && mouse_event.x >= -2 &&\n mouse_event.x < self.size.width as isize + 4 &&\n mouse_event.y >= -18 && mouse_event.y < 0 {\n self.minimized = !self.minimized;\n caught = true;\n }\n }\n\n if self.dragging {\n self.point.x += orig_mouse_event.x - self.last_mouse_event.x;\n self.point.y += orig_mouse_event.y - self.last_mouse_event.y;\n caught = true;\n }\n } else {\n self.dragging = false;\n }\n\n self.last_mouse_event = orig_mouse_event;\n\n if caught && !self.dragging {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push(mouse_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n caught\n }\n}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n unsafe {\n if self.ptr as usize > 0 {\n (*::session_ptr).remove_window(self.ptr);\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>route mob attack logic to gamestate handler<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add title bar setting, add ability to produce any character using hex code<commit_after><|endoftext|>"} {"text":"<commit_before>use programs::common::*;\n\npub struct Application;\n\nimpl SessionItem for Application {\n fn main(&mut self, url: URL){\n let mut window = Window::new(Point::new((rand() % 400 + 50) as isize, (rand() % 300 + 50) as isize), Size::new(640, 480), \"Example Game\".to_string());\n let start_time = Duration::monotonic();\n let mut last_time = start_time;\n let mut running = true;\n while running {\n let current_time = Duration::monotonic();\n let run_time = current_time - start_time;\n let frame_time = current_time - last_time;\n last_time = current_time;\n\n loop {\n match window.poll() {\n EventOption::Key(key_event) => {\n if key_event.pressed && key_event.scancode == K_ESC {\n running = false;\n break;\n }\n },\n EventOption::None => break,\n _ => ()\n }\n }\n\n let content = &mut window.content;\n content.set(Color::new(255, 255, 255));\n\n { \/\/Draw\n let mut y = 0;\n content.text(Point::new(0, y), &\"Running Time\".to_string(), Color::new(50, 50, 50));\n y += 16;\n content.text(Point::new(8, y), &String::from_num_signed(run_time.secs as isize), Color::new(50, 50, 50));\n y += 16;\n content.text(Point::new(8, y), &String::from_num(run_time.nanos as usize), Color::new(50, 50, 50));\n y += 32;\n\n content.text(Point::new(0, y), &\"Frame Time\".to_string(), Color::new(50, 50, 50));\n y += 16;\n content.text(Point::new(8, y), &String::from_num_signed(frame_time.secs as isize), Color::new(50, 50, 50));\n y += 16;\n content.text(Point::new(8, y), &String::from_num(frame_time.nanos as usize), Color::new(50, 50, 50));\n y += 32;\n }\n\n content.flip();\n\n RedrawEvent {\n redraw: REDRAW_ALL\n }.trigger();\n\n sys_yield();\n }\n }\n}\n\nimpl Application {\n pub fn new() -> Application {\n Application\n }\n}\n<commit_msg>Game will load sprites, and sleep<commit_after>use programs::common::*;\n\nuse graphics::bmp::*;\n\npub fn sleep(duration: Duration){\n let start_time = Duration::monotonic();\n loop {\n let elapsed = Duration::monotonic() - start_time;\n if elapsed > duration {\n break;\n }else{\n sys_yield();\n }\n }\n}\n\npub struct Sprite {\n point: Point,\n image: BMP\n}\n\nimpl Sprite {\n pub fn draw(&self, content: &mut Display){\n content.image_alpha(self.point, self.image.data, self.image.size);\n }\n}\n\npub struct Application;\n\nimpl SessionItem for Application {\n fn main(&mut self, url: URL){\n let mut window = Window::new(Point::new((rand() % 400 + 50) as isize, (rand() % 300 + 50) as isize), Size::new(640, 480), \"Example Game (Loading)\".to_string());\n\n\n let mut player;\n {\n let mut image = URL::from_string(&\"file:\/\/\/game\/ninjaroofront.bmp\".to_string()).open();\n let mut bytes: Vec<u8> = Vec::new();\n image.read_to_end(&mut bytes);\n player = Sprite {\n point: Point::new(200, 200),\n image: unsafe{ BMP::from_data(bytes.as_ptr() as usize) }\n };\n }\n\n window.title = \"Example Game\".to_string();\n\n let mut keys: Vec<u8> = Vec::new();\n let mut redraw = true;\n let mut running = true;\n while running {\n loop {\n match window.poll() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => {\n running = false;\n break;\n },\n _ => ()\n }\n\n let mut found = false;\n for key in keys.iter() {\n if *key == key_event.scancode {\n found = true;\n break;\n }\n }\n if ! found {\n keys.push(key_event.scancode);\n }\n }else{\n let mut i = 0;\n while i < keys.len() {\n let mut remove = false;\n if let Option::Some(key) = keys.get(i) {\n if *key == key_event.scancode {\n remove = true;\n }\n }\n if remove {\n keys.remove(i);\n }else{\n i += 1;\n }\n }\n }\n },\n EventOption::None => break,\n _ => ()\n }\n }\n\n for key in keys.iter() {\n match *key {\n K_LEFT => {\n player.point.x = max(0, player.point.x - 1);\n redraw = true;\n },\n K_RIGHT => {\n player.point.x = min(window.content.width as isize - 1, player.point.x + 1);\n redraw = true;\n },\n K_UP => {\n player.point.y = max(0, player.point.y - 1);\n redraw = true;\n },\n K_DOWN => {\n player.point.y = min(window.content.height as isize - 1, player.point.y + 1);\n redraw = true;\n },\n _ => ()\n }\n }\n\n if redraw {\n redraw = false;\n\n let content = &mut window.content;\n content.set(Color::new(128, 128, 255));\n\n player.draw(content);\n\n content.flip();\n\n RedrawEvent {\n redraw: REDRAW_ALL\n }.trigger();\n }\n\n sleep(Duration {\n secs: 0,\n nanos: 1000000000\/120\n });\n }\n }\n}\n\nimpl Application {\n pub fn new() -> Application {\n Application\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::io::{IoResult, File};\nuse std::io::util::copy;\nuse http;\nuse http::headers::content_type::MediaType;\nuse time;\nuse mimes::get_media_type;\n\n\/\/\/A container for the response\npub struct Response<'a, 'b> {\n \/\/\/the original `http::server::ResponseWriter`\n pub origin: &'a mut http::server::ResponseWriter<'b>,\n}\n\nimpl<'a, 'b> Response<'a, 'b> {\n\n pub fn from_internal<'c, 'd>(response: &'c mut http::server::ResponseWriter<'d>) -> Response<'c, 'd> {\n Response {\n origin: response\n }\n }\n\n \/\/\/ Writes a response\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ response.send(\"hello world\");\n \/\/\/ ```\n pub fn send (&mut self, text: &str) {\n \/\/ TODO: This needs to be more sophisticated to return the correct headers\n \/\/ not just \"some headers\" :)\n Response::set_headers(self.origin);\n let _ = self.origin.write(text.as_bytes());\n }\n\n fn set_headers(response_writer: &mut http::server::ResponseWriter) {\n response_writer.headers.date = Some(time::now_utc());\n\n \/\/ we don't need to set this https:\/\/github.com\/Ogeon\/rustful\/issues\/3#issuecomment-44787613\n response_writer.headers.content_length = None;\n response_writer.headers.content_type = Some(MediaType {\n type_: String::from_str(\"text\"),\n subtype: String::from_str(\"plain\"),\n parameters: vec!((String::from_str(\"charset\"), String::from_str(\"UTF-8\")))\n });\n response_writer.headers.server = Some(String::from_str(\"Nickel\"));\n }\n\n pub fn send_file(&mut self, path: &Path) -> IoResult<()> {\n let mut file = try!(File::open(path));\n self.origin.headers.content_length = None;\n\n self.origin.headers.content_type = path.extension_str().and_then(get_media_type);\n self.origin.headers.server = Some(String::from_str(\"Nickel\"));\n copy(&mut file, self.origin)\n }\n}\n\n#[test]\nfn matches_content_type () {\n let path = &Path::new(\"test.txt\");\n let content_type = path.extension_str().and_then(get_media_type).unwrap();\n\n assert_eq!(content_type.type_.as_slice(), \"text\");\n assert_eq!(content_type.subtype.as_slice(), \"plain\");\n}\n<commit_msg>=BG= set default content type when not already set<commit_after>use std::io::{IoResult, File};\nuse std::io::util::copy;\nuse http;\nuse time;\nuse mimes::get_media_type;\n\n\/\/\/A container for the response\npub struct Response<'a, 'b> {\n \/\/\/the original `http::server::ResponseWriter`\n pub origin: &'a mut http::server::ResponseWriter<'b>,\n}\n\nimpl<'a, 'b> Response<'a, 'b> {\n\n pub fn from_internal<'c, 'd>(response: &'c mut http::server::ResponseWriter<'d>) -> Response<'c, 'd> {\n Response {\n origin: response\n }\n }\n\n \/\/\/ Writes a response\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ response.send(\"hello world\");\n \/\/\/ ```\n pub fn send (&mut self, text: &str) {\n \/\/ TODO: This needs to be more sophisticated to return the correct headers\n \/\/ not just \"some headers\" :)\n Response::set_headers(self.origin);\n let _ = self.origin.write(text.as_bytes());\n }\n\n fn set_headers(response_writer: &mut http::server::ResponseWriter) {\n response_writer.headers.date = Some(time::now_utc());\n\n \/\/ we don't need to set this https:\/\/github.com\/Ogeon\/rustful\/issues\/3#issuecomment-44787613\n response_writer.headers.content_length = None;\n response_writer.headers.content_type = Some(response_writer.headers.content_type\n .clone()\n .unwrap_or(get_media_type(\"txt\").unwrap()));\n response_writer.headers.server = Some(String::from_str(\"Nickel\"));\n }\n\n pub fn send_file(&mut self, path: &Path) -> IoResult<()> {\n let mut file = try!(File::open(path));\n self.origin.headers.content_length = None;\n\n self.origin.headers.content_type = path.extension_str().and_then(get_media_type);\n self.origin.headers.server = Some(String::from_str(\"Nickel\"));\n copy(&mut file, self.origin)\n }\n}\n\n#[test]\nfn matches_content_type () {\n let path = &Path::new(\"test.txt\");\n let content_type = path.extension_str().and_then(get_media_type).unwrap();\n\n assert_eq!(content_type.type_.as_slice(), \"text\");\n assert_eq!(content_type.subtype.as_slice(), \"plain\");\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::nll::region_infer::values::ToElementIndex;\nuse borrow_check::nll::region_infer::{ConstraintIndex, RegionInferenceContext};\nuse borrow_check::nll::type_check::Locations;\nuse rustc::hir::def_id::DefId;\nuse rustc::infer::error_reporting::nice_region_error::NiceRegionError;\nuse rustc::infer::InferCtxt;\nuse rustc::mir::{self, Location, Mir, Place, Rvalue, StatementKind, TerminatorKind};\nuse rustc::ty::RegionVid;\nuse rustc_data_structures::fx::FxHashSet;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse std::fmt;\nuse syntax_pos::Span;\n\nmod region_name;\nmod var_name;\n\n\/\/\/ Constraints that are considered interesting can be categorized to\n\/\/\/ determine why they are interesting. Order of variants indicates\n\/\/\/ sort order of the category, thereby influencing diagnostic output.\n#[derive(Debug, Eq, PartialEq, PartialOrd, Ord)]\nenum ConstraintCategory {\n Cast,\n Assignment,\n AssignmentToUpvar,\n Return,\n CallArgument,\n Other,\n Boring,\n}\n\nimpl fmt::Display for ConstraintCategory {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self {\n ConstraintCategory::Assignment |\n ConstraintCategory::AssignmentToUpvar => write!(f, \"assignment\"),\n ConstraintCategory::Return => write!(f, \"return\"),\n ConstraintCategory::Cast => write!(f, \"cast\"),\n ConstraintCategory::CallArgument => write!(f, \"argument\"),\n _ => write!(f, \"free region\"),\n }\n }\n}\n\nimpl<'tcx> RegionInferenceContext<'tcx> {\n \/\/\/ Walks the graph of constraints (where `'a: 'b` is considered\n \/\/\/ an edge `'a -> 'b`) to find all paths from `from_region` to\n \/\/\/ `to_region`. The paths are accumulated into the vector\n \/\/\/ `results`. The paths are stored as a series of\n \/\/\/ `ConstraintIndex` values -- in other words, a list of *edges*.\n fn find_constraint_paths_between_regions(\n &self,\n from_region: RegionVid,\n target_test: impl Fn(RegionVid) -> bool,\n ) -> Vec<Vec<ConstraintIndex>> {\n let mut results = vec![];\n self.find_constraint_paths_between_regions_helper(\n from_region,\n from_region,\n &target_test,\n &mut FxHashSet::default(),\n &mut vec![],\n &mut results,\n );\n results\n }\n\n \/\/\/ Helper for `find_constraint_paths_between_regions`.\n fn find_constraint_paths_between_regions_helper(\n &self,\n from_region: RegionVid,\n current_region: RegionVid,\n target_test: &impl Fn(RegionVid) -> bool,\n visited: &mut FxHashSet<RegionVid>,\n stack: &mut Vec<ConstraintIndex>,\n results: &mut Vec<Vec<ConstraintIndex>>,\n ) {\n \/\/ Check if we already visited this region.\n if !visited.insert(current_region) {\n return;\n }\n\n \/\/ Check if we reached the region we were looking for.\n if target_test(current_region) {\n if !stack.is_empty() {\n assert_eq!(self.constraints[stack[0]].sup, from_region);\n results.push(stack.clone());\n }\n return;\n }\n\n for constraint in self.constraint_graph.outgoing_edges(current_region) {\n assert_eq!(self.constraints[constraint].sup, current_region);\n stack.push(constraint);\n self.find_constraint_paths_between_regions_helper(\n from_region,\n self.constraints[constraint].sub,\n target_test,\n visited,\n stack,\n results,\n );\n stack.pop();\n }\n }\n\n \/\/\/ This function will return true if a constraint is interesting and false if a constraint\n \/\/\/ is not. It is useful in filtering constraint paths to only interesting points.\n fn constraint_is_interesting(&self, index: ConstraintIndex) -> bool {\n let constraint = self.constraints[index];\n debug!(\n \"constraint_is_interesting: locations={:?} constraint={:?}\",\n constraint.locations, constraint\n );\n if let Locations::Interesting(_) = constraint.locations {\n true\n } else {\n false\n }\n }\n\n \/\/\/ This function classifies a constraint from a location.\n fn classify_constraint(\n &self,\n index: ConstraintIndex,\n mir: &Mir<'tcx>,\n infcx: &InferCtxt<'_, '_, 'tcx>,\n ) -> (ConstraintCategory, Span) {\n let constraint = self.constraints[index];\n debug!(\"classify_constraint: constraint={:?}\", constraint);\n let span = constraint.locations.span(mir);\n let location = constraint.locations.from_location().unwrap_or(Location::START);\n\n if !self.constraint_is_interesting(index) {\n return (ConstraintCategory::Boring, span);\n }\n\n let data = &mir[location.block];\n debug!(\"classify_constraint: location={:?} data={:?}\", location, data);\n let category = if location.statement_index == data.statements.len() {\n if let Some(ref terminator) = data.terminator {\n debug!(\"classify_constraint: terminator.kind={:?}\", terminator.kind);\n match terminator.kind {\n TerminatorKind::DropAndReplace { .. } => ConstraintCategory::Assignment,\n TerminatorKind::Call { .. } => ConstraintCategory::CallArgument,\n _ => ConstraintCategory::Other,\n }\n } else {\n ConstraintCategory::Other\n }\n } else {\n let statement = &data.statements[location.statement_index];\n debug!(\"classify_constraint: statement.kind={:?}\", statement.kind);\n match statement.kind {\n StatementKind::Assign(ref place, ref rvalue) => {\n debug!(\"classify_constraint: place={:?} rvalue={:?}\", place, rvalue);\n let initial_category = if *place == Place::Local(mir::RETURN_PLACE) {\n ConstraintCategory::Return\n } else {\n match rvalue {\n Rvalue::Cast(..) => ConstraintCategory::Cast,\n Rvalue::Use(..) |\n Rvalue::Aggregate(..) => ConstraintCategory::Assignment,\n _ => ConstraintCategory::Other,\n }\n };\n\n if initial_category == ConstraintCategory::Assignment\n && place.is_upvar_field_projection(mir, &infcx.tcx).is_some() {\n ConstraintCategory::AssignmentToUpvar\n } else {\n initial_category\n }\n }\n _ => ConstraintCategory::Other,\n }\n };\n\n (category, span)\n }\n\n \/\/\/ Report an error because the universal region `fr` was required to outlive\n \/\/\/ `outlived_fr` but it is not known to do so. For example:\n \/\/\/\n \/\/\/ ```\n \/\/\/ fn foo<'a, 'b>(x: &'a u32) -> &'b u32 { x }\n \/\/\/ ```\n \/\/\/\n \/\/\/ Here we would be invoked with `fr = 'a` and `outlived_fr = `'b`.\n pub(super) fn report_error(\n &self,\n mir: &Mir<'tcx>,\n infcx: &InferCtxt<'_, '_, 'tcx>,\n mir_def_id: DefId,\n fr: RegionVid,\n outlived_fr: RegionVid,\n blame_span: Span,\n ) {\n debug!(\"report_error(fr={:?}, outlived_fr={:?})\", fr, outlived_fr);\n\n if let (Some(f), Some(o)) = (self.to_error_region(fr), self.to_error_region(outlived_fr)) {\n let tables = infcx.tcx.typeck_tables_of(mir_def_id);\n let nice = NiceRegionError::new_from_span(infcx.tcx, blame_span, o, f, Some(tables));\n if let Some(_error_reported) = nice.try_report() {\n return;\n }\n }\n\n \/\/ Find all paths\n let constraint_paths = self.find_constraint_paths_between_regions(fr, |r| r == outlived_fr);\n debug!(\"report_error: constraint_paths={:#?}\", constraint_paths);\n\n \/\/ Find the shortest such path.\n let path = constraint_paths.iter().min_by_key(|p| p.len()).unwrap();\n debug!(\"report_error: shortest_path={:?}\", path);\n\n \/\/ Classify each of the constraints along the path.\n let mut categorized_path: Vec<(ConstraintCategory, Span)> = path.iter()\n .map(|&index| self.classify_constraint(index, mir, infcx))\n .collect();\n debug!(\"report_error: categorized_path={:?}\", categorized_path);\n\n \/\/ Find what appears to be the most interesting path to report to the user.\n categorized_path.sort_by(|p0, p1| p0.0.cmp(&p1.0));\n debug!(\"report_error: sorted_path={:?}\", categorized_path);\n\n \/\/ Get a span\n let (category, span) = categorized_path.first().unwrap();\n\n match category {\n ConstraintCategory::AssignmentToUpvar =>\n self.report_closure_error(mir, infcx, fr, outlived_fr, span),\n _ =>\n self.report_general_error(mir, infcx, mir_def_id, fr, outlived_fr, category, span),\n }\n }\n\n fn report_closure_error(\n &self,\n mir: &Mir<'tcx>,\n infcx: &InferCtxt<'_, '_, 'tcx>,\n fr: RegionVid,\n outlived_fr: RegionVid,\n span: &Span,\n ) {\n let diag = &mut infcx.tcx.sess.struct_span_err(\n *span, &format!(\"borrowed data escapes outside of closure\"),\n );\n\n let (outlived_fr_name, outlived_fr_span) = self.get_var_name_and_span_for_region(\n infcx.tcx, mir, outlived_fr);\n\n if let Some(name) = outlived_fr_name {\n diag.span_label(\n outlived_fr_span,\n format!(\"`{}` is declared here, outside of the closure body\", name),\n );\n }\n\n let (fr_name, fr_span) = self.get_var_name_and_span_for_region(infcx.tcx, mir, fr);\n\n if let Some(name) = fr_name {\n diag.span_label(\n fr_span,\n format!(\"`{}` is a reference that is only valid in the closure body\", name),\n );\n\n diag.span_label(*span, format!(\"`{}` escapes the closure body here\", name));\n }\n\n diag.emit();\n }\n\n fn report_general_error(\n &self,\n mir: &Mir<'tcx>,\n infcx: &InferCtxt<'_, '_, 'tcx>,\n mir_def_id: DefId,\n fr: RegionVid,\n outlived_fr: RegionVid,\n category: &ConstraintCategory,\n span: &Span,\n ) {\n let diag = &mut infcx.tcx.sess.struct_span_err(\n *span, &format!(\"unsatisfied lifetime constraints\"), \/\/ FIXME\n );\n\n let counter = &mut 1;\n let fr_name = self.give_region_a_name(\n infcx.tcx, mir, mir_def_id, fr, counter, diag);\n let outlived_fr_name = self.give_region_a_name(\n infcx.tcx, mir, mir_def_id, outlived_fr, counter, diag);\n\n diag.span_label(*span, format!(\n \"{} requires that `{}` must outlive `{}`\",\n category, fr_name, outlived_fr_name,\n ));\n\n diag.emit();\n }\n\n \/\/ Find some constraint `X: Y` where:\n \/\/ - `fr1: X` transitively\n \/\/ - and `Y` is live at `elem`\n crate fn find_constraint(&self, fr1: RegionVid, elem: Location) -> RegionVid {\n let index = self.blame_constraint(fr1, elem);\n self.constraints[index].sub\n }\n\n \/\/\/ Tries to finds a good span to blame for the fact that `fr1`\n \/\/\/ contains `fr2`.\n pub(super) fn blame_constraint(\n &self,\n fr1: RegionVid,\n elem: impl ToElementIndex,\n ) -> ConstraintIndex {\n \/\/ Find everything that influenced final value of `fr`.\n let influenced_fr1 = self.dependencies(fr1);\n\n \/\/ Try to find some outlives constraint `'X: fr2` where `'X`\n \/\/ influenced `fr1`. Blame that.\n \/\/\n \/\/ NB, this is a pretty bad choice most of the time. In\n \/\/ particular, the connection between `'X` and `fr1` may not\n \/\/ be obvious to the user -- not to mention the naive notion\n \/\/ of dependencies, which doesn't account for the locations of\n \/\/ contraints at all. But it will do for now.\n let relevant_constraint = self.constraints\n .iter_enumerated()\n .filter_map(|(i, constraint)| {\n if !self.liveness_constraints.contains(constraint.sub, elem) {\n None\n } else {\n influenced_fr1[constraint.sup]\n .map(|distance| (distance, i))\n }\n })\n .min() \/\/ constraining fr1 with fewer hops *ought* to be more obvious\n .map(|(_dist, i)| i);\n\n relevant_constraint.unwrap_or_else(|| {\n bug!(\n \"could not find any constraint to blame for {:?}: {:?}\",\n fr1,\n elem,\n );\n })\n }\n\n \/\/\/ Finds all regions whose values `'a` may depend on in some way.\n \/\/\/ For each region, returns either `None` (does not influence\n \/\/\/ `'a`) or `Some(d)` which indicates that it influences `'a`\n \/\/\/ with distinct `d` (minimum number of edges that must be\n \/\/\/ traversed).\n \/\/\/\n \/\/\/ Used during error reporting, extremely naive and inefficient.\n fn dependencies(&self, r0: RegionVid) -> IndexVec<RegionVid, Option<usize>> {\n let mut result_set = IndexVec::from_elem(None, &self.definitions);\n let mut changed = true;\n result_set[r0] = Some(0); \/\/ distance 0 from `r0`\n\n while changed {\n changed = false;\n for constraint in self.constraints.iter() {\n if let Some(n) = result_set[constraint.sup] {\n let m = n + 1;\n if result_set[constraint.sub]\n .map(|distance| m < distance)\n .unwrap_or(true)\n {\n result_set[constraint.sub] = Some(m);\n changed = true;\n }\n }\n }\n }\n\n result_set\n }\n}\n<commit_msg>Modified how constraint classification happens to upvars, can now handle function call case.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::nll::region_infer::values::ToElementIndex;\nuse borrow_check::nll::region_infer::{ConstraintIndex, RegionInferenceContext};\nuse borrow_check::nll::type_check::Locations;\nuse rustc::hir::def_id::DefId;\nuse rustc::infer::error_reporting::nice_region_error::NiceRegionError;\nuse rustc::infer::InferCtxt;\nuse rustc::mir::{self, Location, Mir, Place, Rvalue, StatementKind, TerminatorKind};\nuse rustc::ty::RegionVid;\nuse rustc_data_structures::fx::FxHashSet;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse std::fmt;\nuse syntax_pos::Span;\n\nmod region_name;\nmod var_name;\n\n\/\/\/ Constraints that are considered interesting can be categorized to\n\/\/\/ determine why they are interesting. Order of variants indicates\n\/\/\/ sort order of the category, thereby influencing diagnostic output.\n#[derive(Debug, Eq, PartialEq, PartialOrd, Ord)]\nenum ConstraintCategory {\n Cast,\n Assignment,\n AssignmentToUpvar,\n Return,\n CallArgumentToUpvar,\n CallArgument,\n Other,\n Boring,\n}\n\nimpl fmt::Display for ConstraintCategory {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self {\n ConstraintCategory::Assignment |\n ConstraintCategory::AssignmentToUpvar => write!(f, \"assignment\"),\n ConstraintCategory::Return => write!(f, \"return\"),\n ConstraintCategory::Cast => write!(f, \"cast\"),\n ConstraintCategory::CallArgument |\n ConstraintCategory::CallArgumentToUpvar => write!(f, \"argument\"),\n _ => write!(f, \"free region\"),\n }\n }\n}\n\nimpl<'tcx> RegionInferenceContext<'tcx> {\n \/\/\/ Walks the graph of constraints (where `'a: 'b` is considered\n \/\/\/ an edge `'a -> 'b`) to find all paths from `from_region` to\n \/\/\/ `to_region`. The paths are accumulated into the vector\n \/\/\/ `results`. The paths are stored as a series of\n \/\/\/ `ConstraintIndex` values -- in other words, a list of *edges*.\n fn find_constraint_paths_between_regions(\n &self,\n from_region: RegionVid,\n target_test: impl Fn(RegionVid) -> bool,\n ) -> Vec<Vec<ConstraintIndex>> {\n let mut results = vec![];\n self.find_constraint_paths_between_regions_helper(\n from_region,\n from_region,\n &target_test,\n &mut FxHashSet::default(),\n &mut vec![],\n &mut results,\n );\n results\n }\n\n \/\/\/ Helper for `find_constraint_paths_between_regions`.\n fn find_constraint_paths_between_regions_helper(\n &self,\n from_region: RegionVid,\n current_region: RegionVid,\n target_test: &impl Fn(RegionVid) -> bool,\n visited: &mut FxHashSet<RegionVid>,\n stack: &mut Vec<ConstraintIndex>,\n results: &mut Vec<Vec<ConstraintIndex>>,\n ) {\n \/\/ Check if we already visited this region.\n if !visited.insert(current_region) {\n return;\n }\n\n \/\/ Check if we reached the region we were looking for.\n if target_test(current_region) {\n if !stack.is_empty() {\n assert_eq!(self.constraints[stack[0]].sup, from_region);\n results.push(stack.clone());\n }\n return;\n }\n\n for constraint in self.constraint_graph.outgoing_edges(current_region) {\n assert_eq!(self.constraints[constraint].sup, current_region);\n stack.push(constraint);\n self.find_constraint_paths_between_regions_helper(\n from_region,\n self.constraints[constraint].sub,\n target_test,\n visited,\n stack,\n results,\n );\n stack.pop();\n }\n }\n\n \/\/\/ This function will return true if a constraint is interesting and false if a constraint\n \/\/\/ is not. It is useful in filtering constraint paths to only interesting points.\n fn constraint_is_interesting(&self, index: ConstraintIndex) -> bool {\n let constraint = self.constraints[index];\n debug!(\n \"constraint_is_interesting: locations={:?} constraint={:?}\",\n constraint.locations, constraint\n );\n if let Locations::Interesting(_) = constraint.locations {\n true\n } else {\n false\n }\n }\n\n \/\/\/ This function classifies a constraint from a location.\n fn classify_constraint(\n &self,\n index: ConstraintIndex,\n mir: &Mir<'tcx>,\n _infcx: &InferCtxt<'_, '_, 'tcx>,\n ) -> (ConstraintCategory, Span) {\n let constraint = self.constraints[index];\n debug!(\"classify_constraint: constraint={:?}\", constraint);\n let span = constraint.locations.span(mir);\n let location = constraint.locations.from_location().unwrap_or(Location::START);\n\n if !self.constraint_is_interesting(index) {\n return (ConstraintCategory::Boring, span);\n }\n\n let data = &mir[location.block];\n debug!(\"classify_constraint: location={:?} data={:?}\", location, data);\n let category = if location.statement_index == data.statements.len() {\n if let Some(ref terminator) = data.terminator {\n debug!(\"classify_constraint: terminator.kind={:?}\", terminator.kind);\n match terminator.kind {\n TerminatorKind::DropAndReplace { .. } => ConstraintCategory::Assignment,\n TerminatorKind::Call { .. } => ConstraintCategory::CallArgument,\n _ => ConstraintCategory::Other,\n }\n } else {\n ConstraintCategory::Other\n }\n } else {\n let statement = &data.statements[location.statement_index];\n debug!(\"classify_constraint: statement.kind={:?}\", statement.kind);\n match statement.kind {\n StatementKind::Assign(ref place, ref rvalue) => {\n debug!(\"classify_constraint: place={:?} rvalue={:?}\", place, rvalue);\n if *place == Place::Local(mir::RETURN_PLACE) {\n ConstraintCategory::Return\n } else {\n match rvalue {\n Rvalue::Cast(..) => ConstraintCategory::Cast,\n Rvalue::Use(..) |\n Rvalue::Aggregate(..) => ConstraintCategory::Assignment,\n _ => ConstraintCategory::Other,\n }\n }\n }\n _ => ConstraintCategory::Other,\n }\n };\n\n (category, span)\n }\n\n \/\/\/ Report an error because the universal region `fr` was required to outlive\n \/\/\/ `outlived_fr` but it is not known to do so. For example:\n \/\/\/\n \/\/\/ ```\n \/\/\/ fn foo<'a, 'b>(x: &'a u32) -> &'b u32 { x }\n \/\/\/ ```\n \/\/\/\n \/\/\/ Here we would be invoked with `fr = 'a` and `outlived_fr = `'b`.\n pub(super) fn report_error(\n &self,\n mir: &Mir<'tcx>,\n infcx: &InferCtxt<'_, '_, 'tcx>,\n mir_def_id: DefId,\n fr: RegionVid,\n outlived_fr: RegionVid,\n blame_span: Span,\n ) {\n debug!(\"report_error(fr={:?}, outlived_fr={:?})\", fr, outlived_fr);\n\n if let (Some(f), Some(o)) = (self.to_error_region(fr), self.to_error_region(outlived_fr)) {\n let tables = infcx.tcx.typeck_tables_of(mir_def_id);\n let nice = NiceRegionError::new_from_span(infcx.tcx, blame_span, o, f, Some(tables));\n if let Some(_error_reported) = nice.try_report() {\n return;\n }\n }\n\n \/\/ Find all paths\n let constraint_paths = self.find_constraint_paths_between_regions(fr, |r| r == outlived_fr);\n debug!(\"report_error: constraint_paths={:#?}\", constraint_paths);\n\n \/\/ Find the shortest such path.\n let path = constraint_paths.iter().min_by_key(|p| p.len()).unwrap();\n debug!(\"report_error: shortest_path={:?}\", path);\n\n \/\/ Classify each of the constraints along the path.\n let mut categorized_path: Vec<(ConstraintCategory, Span)> = path.iter()\n .map(|&index| self.classify_constraint(index, mir, infcx))\n .collect();\n debug!(\"report_error: categorized_path={:?}\", categorized_path);\n\n \/\/ Find what appears to be the most interesting path to report to the user.\n categorized_path.sort_by(|p0, p1| p0.0.cmp(&p1.0));\n debug!(\"report_error: sorted_path={:?}\", categorized_path);\n\n \/\/ Get a span\n let (category, span) = categorized_path.first().unwrap();\n\n let category = match (\n category,\n self.universal_regions.is_local_free_region(fr),\n self.universal_regions.is_local_free_region(outlived_fr),\n ) {\n (ConstraintCategory::Assignment, true, false) =>\n &ConstraintCategory::AssignmentToUpvar,\n (ConstraintCategory::CallArgument, true, false) =>\n &ConstraintCategory::CallArgumentToUpvar,\n (category, _, _) => category,\n };\n\n debug!(\"report_error: category={:?}\", category);\n match category {\n ConstraintCategory::AssignmentToUpvar |\n ConstraintCategory::CallArgumentToUpvar =>\n self.report_closure_error(mir, infcx, fr, outlived_fr, span),\n _ =>\n self.report_general_error(mir, infcx, mir_def_id, fr, outlived_fr, category, span),\n }\n }\n\n fn report_closure_error(\n &self,\n mir: &Mir<'tcx>,\n infcx: &InferCtxt<'_, '_, 'tcx>,\n fr: RegionVid,\n outlived_fr: RegionVid,\n span: &Span,\n ) {\n let diag = &mut infcx.tcx.sess.struct_span_err(\n *span, &format!(\"borrowed data escapes outside of closure\"),\n );\n\n let (outlived_fr_name, outlived_fr_span) = self.get_var_name_and_span_for_region(\n infcx.tcx, mir, outlived_fr);\n\n if let Some(name) = outlived_fr_name {\n diag.span_label(\n outlived_fr_span,\n format!(\"`{}` is declared here, outside of the closure body\", name),\n );\n }\n\n let (fr_name, fr_span) = self.get_var_name_and_span_for_region(infcx.tcx, mir, fr);\n\n if let Some(name) = fr_name {\n diag.span_label(\n fr_span,\n format!(\"`{}` is a reference that is only valid in the closure body\", name),\n );\n\n diag.span_label(*span, format!(\"`{}` escapes the closure body here\", name));\n }\n\n diag.emit();\n }\n\n fn report_general_error(\n &self,\n mir: &Mir<'tcx>,\n infcx: &InferCtxt<'_, '_, 'tcx>,\n mir_def_id: DefId,\n fr: RegionVid,\n outlived_fr: RegionVid,\n category: &ConstraintCategory,\n span: &Span,\n ) {\n let diag = &mut infcx.tcx.sess.struct_span_err(\n *span, &format!(\"unsatisfied lifetime constraints\"), \/\/ FIXME\n );\n\n let counter = &mut 1;\n let fr_name = self.give_region_a_name(\n infcx.tcx, mir, mir_def_id, fr, counter, diag);\n let outlived_fr_name = self.give_region_a_name(\n infcx.tcx, mir, mir_def_id, outlived_fr, counter, diag);\n\n diag.span_label(*span, format!(\n \"{} requires that `{}` must outlive `{}`\",\n category, fr_name, outlived_fr_name,\n ));\n\n diag.emit();\n }\n\n \/\/ Find some constraint `X: Y` where:\n \/\/ - `fr1: X` transitively\n \/\/ - and `Y` is live at `elem`\n crate fn find_constraint(&self, fr1: RegionVid, elem: Location) -> RegionVid {\n let index = self.blame_constraint(fr1, elem);\n self.constraints[index].sub\n }\n\n \/\/\/ Tries to finds a good span to blame for the fact that `fr1`\n \/\/\/ contains `fr2`.\n pub(super) fn blame_constraint(\n &self,\n fr1: RegionVid,\n elem: impl ToElementIndex,\n ) -> ConstraintIndex {\n \/\/ Find everything that influenced final value of `fr`.\n let influenced_fr1 = self.dependencies(fr1);\n\n \/\/ Try to find some outlives constraint `'X: fr2` where `'X`\n \/\/ influenced `fr1`. Blame that.\n \/\/\n \/\/ NB, this is a pretty bad choice most of the time. In\n \/\/ particular, the connection between `'X` and `fr1` may not\n \/\/ be obvious to the user -- not to mention the naive notion\n \/\/ of dependencies, which doesn't account for the locations of\n \/\/ contraints at all. But it will do for now.\n let relevant_constraint = self.constraints\n .iter_enumerated()\n .filter_map(|(i, constraint)| {\n if !self.liveness_constraints.contains(constraint.sub, elem) {\n None\n } else {\n influenced_fr1[constraint.sup]\n .map(|distance| (distance, i))\n }\n })\n .min() \/\/ constraining fr1 with fewer hops *ought* to be more obvious\n .map(|(_dist, i)| i);\n\n relevant_constraint.unwrap_or_else(|| {\n bug!(\n \"could not find any constraint to blame for {:?}: {:?}\",\n fr1,\n elem,\n );\n })\n }\n\n \/\/\/ Finds all regions whose values `'a` may depend on in some way.\n \/\/\/ For each region, returns either `None` (does not influence\n \/\/\/ `'a`) or `Some(d)` which indicates that it influences `'a`\n \/\/\/ with distinct `d` (minimum number of edges that must be\n \/\/\/ traversed).\n \/\/\/\n \/\/\/ Used during error reporting, extremely naive and inefficient.\n fn dependencies(&self, r0: RegionVid) -> IndexVec<RegionVid, Option<usize>> {\n let mut result_set = IndexVec::from_elem(None, &self.definitions);\n let mut changed = true;\n result_set[r0] = Some(0); \/\/ distance 0 from `r0`\n\n while changed {\n changed = false;\n for constraint in self.constraints.iter() {\n if let Some(n) = result_set[constraint.sup] {\n let m = n + 1;\n if result_set[constraint.sub]\n .map(|distance| m < distance)\n .unwrap_or(true)\n {\n result_set[constraint.sub] = Some(m);\n changed = true;\n }\n }\n }\n }\n\n result_set\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add reader benchmark (#147)<commit_after>\/\/ Licensed to the Apache Software Foundation (ASF) under one\n\/\/ or more contributor license agreements. See the NOTICE file\n\/\/ distributed with this work for additional information\n\/\/ regarding copyright ownership. The ASF licenses this file\n\/\/ to you under the Apache License, Version 2.0 (the\n\/\/ \"License\"); you may not use this file except in compliance\n\/\/ with the License. You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing,\n\/\/ software distributed under the License is distributed on an\n\/\/ \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\/\/ KIND, either express or implied. See the License for the\n\/\/ specific language governing permissions and limitations\n\/\/ under the License.\n\n#![feature(test)]\nextern crate parquet;\nextern crate test;\n\nuse std::fs::File;\nuse std::path::Path;\n\nuse parquet::file::reader::{FileReader, SerializedFileReader};\nuse test::Bencher;\n\n#[bench]\nfn record_reader_10k_collect(bench: &mut Bencher) {\n let path = Path::new(\"data\/10k-v2.parquet\");\n let file = File::open(&path).unwrap();\n let len = file.metadata().unwrap().len();\n let parquet_reader = SerializedFileReader::new(file).unwrap();\n\n bench.bytes = len;\n bench.iter(|| {\n let iter = parquet_reader.get_row_iter(None).unwrap();\n let _ = iter.collect::<Vec<_>>();\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore(examples): add ping\/pong low level frame access example<commit_after>\/\/\/ An example demonstrating how to send and recieve a custom ping\/pong frame.\n\/\/\/ This example also shows using a separate thread to represent another\n\/\/\/ component of your system as well as how to use a custom Factory.\nextern crate ws;\nextern crate env_logger;\nextern crate time;\n\nuse std::thread;\nuse std::thread::sleep;\nuse std::time::Duration;\nuse std::str::from_utf8;\nuse std::sync::mpsc::channel;\nuse std::sync::mpsc::Sender as ThreadOut;\n\nuse ws::{Builder, CloseCode, OpCode, Sender, Frame, Factory, Handler, Message, Result};\n\nfn main () {\n\n \/\/ Setup logging\n env_logger::init().unwrap();\n\n \/\/ Set timer channel\n let (tx, rx) = channel();\n\n \/\/ Create WebSocket\n let socket = ws::Builder::new().build(ServerFactory { timer: tx }).unwrap();\n\n \/\/ Get broadcaster for timer\n let all = socket.broadcaster();\n\n \/\/ Start timer thread\n let timer = thread::spawn(move || {\n loop {\n \/\/ Test latency every 5 seconds\n sleep(Duration::from_secs(5));\n\n \/\/ Check if timer needs to go down\n if let Ok(_) = rx.try_recv() {\n println!(\"Timer is going down.\");\n break;\n }\n\n \/\/ Ping all connections with current time\n if let Err(err) = all.ping(time::precise_time_ns().to_string().into()) {\n println!(\"Unable to ping connections: {:?}\", err);\n println!(\"Timer is going down.\");\n break;\n }\n }\n });\n\n \/\/ Start WebSocket server\n socket.listen(\"127.0.0.1:3012\").unwrap();\n\n \/\/ Once the server is done, wait for the timer to shutdown as well\n timer.join().unwrap();\n}\n\nstruct ServerFactory {\n timer: ThreadOut<usize>,\n}\n\nimpl Factory for ServerFactory {\n type Handler = Server;\n\n fn connection_made(&mut self, out: Sender) -> Self::Handler {\n Server { out: out }\n }\n\n fn on_shutdown(&mut self) {\n if let Err(err) = self.timer.send(0) {\n println!(\"Unable to shut down timer: {:?}\", err)\n }\n }\n\n}\n\n\/\/ For accessing the default handler implementation\nstruct DefaultHandler;\n\nimpl Handler for DefaultHandler {}\n\n\/\/ Server WebSocket handler\nstruct Server {\n out: Sender,\n}\n\nimpl Handler for Server {\n\n fn on_message(&mut self, msg: Message) -> Result<()> {\n println!(\"Server got message '{}'. \", msg);\n self.out.send(msg)\n }\n\n fn on_close(&mut self, code: CloseCode, reason: &str) {\n println!(\"WebSocket closing for ({:?}) {}\", code, reason);\n println!(\"Shutting down server after first connection closes.\");\n self.out.shutdown().unwrap();\n }\n\n fn on_frame(&mut self, frame: Frame) -> Result<Option<Frame>> {\n if frame.opcode() == OpCode::Pong {\n if let Ok(pong) = try!(from_utf8(frame.payload())).parse::<u64>() {\n let now = time::precise_time_ns();\n println!(\"Latency is {:.3}ms.\", (now - pong) as f64 \/ 1_000_000f64);\n } else {\n println!(\"Received bad pong.\");\n }\n }\n\n \/\/ Run default frame validation\n DefaultHandler.on_frame(frame)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Checks if the correct registers are being used to pass arguments\n\/\/ when the sysv64 ABI is specified.\n\n\/\/ compile-flags: -Zepoch=2018\n\npub trait Foo {}\n\n\/\/ should compile without the dyn trait feature flag\nfn foo(x: &dyn Foo) {}\n\npub fn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-pass\n\n\/\/ Bastion of the Turbofish\n\/\/ ------------------------\n\/\/ Beware travellers, lest you venture into waters callous and unforgiving,\n\/\/ where hope must be abandoned, ere it is cruelly torn from you. For here\n\/\/ stands the bastion of the Turbofish: an impenetrable fortress holding\n\/\/ unshaking against those who would dare suggest the supererogation of the\n\/\/ Turbofish.\n\/\/\n\/\/ Once I was young and foolish and had the impudence to imagine that I could\n\/\/ shake free from the coils by which that creature had us tightly bound. I\n\/\/ dared to suggest that there was a better way: a brighter future, in which\n\/\/ Rustaceans both new and old could be rid of that vile beast. But alas! In\n\/\/ my foolhardiness my ignorance was unveiled and my dreams were dashed\n\/\/ unforgivingly against the rock of syntactic ambiguity.\n\/\/\n\/\/ This humble program, small and insignificant though it might seem,\n\/\/ demonstrates that to which we had previously cast a blind eye: an ambiguity\n\/\/ in permitting generic arguments to be provided without the consent of the\n\/\/ Great Turbofish. Should you be so naïve as to try to revolt against its\n\/\/ mighty clutches, here shall its wrath be indomitably displayed. This\n\/\/ program must pass for all eternity, fundamentally at odds with an impetuous\n\/\/ rebellion against the Turbofish.\n\/\/\n\/\/ My heart aches in sorrow, for I know I am defeated. Let this be a warning\n\/\/ to all those who come after. Here stands the bastion of the Turbofish.\n\nfn main() {\n let (oh, woe, is, me) = (\"the\", \"Turbofish\", \"remains\", \"undefeated\");\n let _: (bool, bool) = (oh<woe, is>(me));\n}\n<commit_msg>Rollup merge of #53562 - varkor:bastion-of-the-turbofish, r=nagisa<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-pass\n\n\/\/ Bastion of the Turbofish\n\/\/ ------------------------\n\/\/ Beware travellers, lest you venture into waters callous and unforgiving,\n\/\/ where hope must be abandoned, ere it is cruelly torn from you. For here\n\/\/ stands the bastion of the Turbofish: an impenetrable fortress holding\n\/\/ unshaking against those who would dare suggest the supererogation of the\n\/\/ Turbofish.\n\/\/\n\/\/ Once I was young and foolish and had the impudence to imagine that I could\n\/\/ shake free from the coils by which that creature had us tightly bound. I\n\/\/ dared to suggest that there was a better way: a brighter future, in which\n\/\/ Rustaceans both new and old could be rid of that vile beast. But alas! In\n\/\/ my foolhardiness my ignorance was unveiled and my dreams were dashed\n\/\/ unforgivingly against the rock of syntactic ambiguity.\n\/\/\n\/\/ This humble program, small and insignificant though it might seem,\n\/\/ demonstrates that to which we had previously cast a blind eye: an ambiguity\n\/\/ in permitting generic arguments to be provided without the consent of the\n\/\/ Great Turbofish. Should you be so naïve as to try to revolt against its\n\/\/ mighty clutches, here shall its wrath be indomitably displayed. This\n\/\/ program must pass for all eternity, fundamentally at odds with an impetuous\n\/\/ rebellion against the Turbofish.\n\/\/\n\/\/ My heart aches in sorrow, for I know I am defeated. Let this be a warning\n\/\/ to all those who come after. Here stands the bastion of the Turbofish.\n\n\/\/ See https:\/\/github.com\/rust-lang\/rust\/pull\/53562\n\/\/ and https:\/\/github.com\/rust-lang\/rfcs\/pull\/2527\n\/\/ for context.\n\nfn main() {\n let (oh, woe, is, me) = (\"the\", \"Turbofish\", \"remains\", \"undefeated\");\n let _: (bool, bool) = (oh<woe, is>(me));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Long Jump + Test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Initial tests<commit_after><|endoftext|>"} {"text":"<commit_before>\nuse std;\nuse input;\nuse {\n Action,\n After,\n AlwaysSucceed,\n Behavior,\n Event,\n Failure,\n If,\n Input,\n Fail,\n Pressed,\n Released,\n Running,\n Select,\n Sequence,\n Status,\n Success,\n Update,\n UpdateArgs,\n UpdateEvent,\n Wait,\n WaitForever,\n WhenAll,\n WhenAny,\n While,\n};\n\n\/\/\/ Keeps track of a behavior.\n#[deriving(Clone, PartialEq)]\npub enum State<A, S> {\n \/\/\/ Returns `Success` when button is pressed.\n PressedState(input::Button),\n \/\/\/ Returns `Success` when button is released.\n ReleasedState(input::Button),\n \/\/\/ Executes an action.\n ActionState(A, Option<S>),\n \/\/\/ Converts `Success` into `Failure` and vice versa.\n FailState(Box<State<A, S>>),\n \/\/\/ Ignores failures and always return `Success`.\n AlwaysSucceedState(Box<State<A, S>>),\n \/\/\/ Keeps track of waiting for a period of time before continuing.\n \/\/\/\n \/\/\/ f64: Total time in seconds to wait\n \/\/\/\n \/\/\/ f64: Time elapsed in seconds\n WaitState(f64, f64),\n \/\/\/ Waits forever.\n WaitForeverState,\n \/\/\/ Keeps track of an `If` behavior.\n \/\/\/ If status is `Running`, then it evaluates the condition.\n \/\/\/ If status is `Success`, then it evaluates the success behavior.\n \/\/\/ If status is `Failure`, then it evaluates the failure behavior.\n IfState(Box<Behavior<A>>, Box<Behavior<A>>, Status, Box<State<A, S>>),\n \/\/\/ Keeps track of a `Select` behavior.\n SelectState(Vec<Behavior<A>>, uint, Box<State<A, S>>),\n \/\/\/ Keeps track of an `Sequence` behavior.\n SequenceState(Vec<Behavior<A>>, uint, Box<State<A, S>>),\n \/\/\/ Keeps track of a `While` behavior.\n WhileState(Box<State<A, S>>, Vec<Behavior<A>>, uint, Box<State<A, S>>),\n \/\/\/ Keeps track of a `WhenAll` behavior.\n WhenAllState(Vec<Option<State<A, S>>>),\n \/\/\/ Keeps track of a `WhenAny` behavior.\n WhenAnyState(Vec<Option<State<A, S>>>),\n \/\/\/ Keeps track of an `After` behavior.\n AfterState(uint, Vec<State<A, S>>),\n}\n\n\/\/ `Sequence` and `Select` share same algorithm.\n\/\/\n\/\/ `Sequence` fails if any fails and succeeds when all succeeds.\n\/\/ `Select` succeeds if any succeeds and fails when all fails.\nfn sequence<A: Clone, S>(\n select: bool,\n seq: &Vec<Behavior<A>>,\n i: &mut uint,\n cursor: &mut Box<State<A, S>>,\n e: &Event,\n f: |dt: f64, action: &A, state: &mut Option<S>| -> (Status, f64)\n) -> (Status, f64) {\n let (status, inv_status) = if select {\n \/\/ `Select`\n (Failure, Success)\n } else {\n \/\/ `Sequence`\n (Success, Failure)\n };\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n while *i < seq.len() {\n match cursor.event(\n match *e {\n Update(_) => {\n remaining_e = UpdateEvent::from_dt(remaining_dt).unwrap();\n &remaining_e\n }\n _ => e\n },\n |dt, a, s| f(dt, a, s)) {\n (Running, _) => { break; },\n (s, new_dt) if s == inv_status => {\n return (inv_status, new_dt);\n }\n (s, new_dt) if s == status => {\n remaining_dt = match *e {\n \/\/ Change update event with remaining delta time.\n Update(_) => new_dt,\n \/\/ Other events are 'consumed' and not passed to next.\n \/\/ If this is the last event, then the sequence succeeded.\n _ => if *i == seq.len() - 1 {\n return (status, new_dt)\n } else {\n return (Running, 0.0)\n }\n }\n }\n _ => unreachable!()\n };\n *i += 1;\n \/\/ If end of sequence,\n \/\/ return the 'dt' that is left.\n if *i >= seq.len() { return (status, remaining_dt); }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cursor = State::new(seq[*i].clone());\n }\n (Running, 0.0)\n}\n\n\/\/ `WhenAll` and `WhenAny` share same algorithm.\n\/\/\n\/\/ `WhenAll` fails if any fails and succeeds when all succeeds.\n\/\/ `WhenAny` succeeds if any succeeds and fails when all fails.\nfn when_all<A: Clone, S>(\n any: bool,\n cursors: &mut Vec<Option<State<A, S>>>,\n e: &Event,\n f: |dt: f64, action: &A, state: &mut Option<S>| -> (Status, f64)\n) -> (Status, f64) {\n let (status, inv_status) = if any {\n \/\/ `WhenAny`\n (Failure, Success)\n } else {\n \/\/ `WhenAll`\n (Success, Failure)\n };\n \/\/ Get the least delta time left over.\n let mut min_dt = std::f64::MAX_VALUE;\n \/\/ Count number of terminated events.\n let mut terminated = 0;\n for cur in cursors.iter_mut() {\n match *cur {\n None => {}\n Some(ref mut cur) => {\n match cur.event(e, |dt, a, s| f(dt, a, s)) {\n (Running, _) => { continue; },\n (s, new_dt) if s == inv_status => {\n \/\/ Fail for `WhenAll`.\n \/\/ Succeed for `WhenAny`.\n return (inv_status, new_dt);\n }\n (s, new_dt) if s == status => {\n min_dt = min_dt.min(new_dt);\n }\n _ => unreachable!()\n }\n }\n }\n\n terminated += 1;\n *cur = None;\n }\n match terminated {\n \/\/ If there are no events, there is a whole 'dt' left.\n 0 if cursors.len() == 0 => (status, match *e {\n Update(UpdateArgs { dt }) => dt,\n \/\/ Other kind of events happen instantly.\n _ => 0.0\n }),\n \/\/ If all events terminated, the least delta time is left.\n n if cursors.len() == n => (status, min_dt),\n _ => (Running, 0.0)\n }\n}\n\nimpl<A: Clone, S> State<A, S> {\n \/\/\/ Creates a state from a behavior.\n pub fn new(behavior: Behavior<A>) -> State<A, S> {\n match behavior {\n Pressed(button) => PressedState(button),\n Released(button) => ReleasedState(button),\n Action(action) => ActionState(action, None),\n Fail(ev) => FailState(box State::new(*ev)),\n AlwaysSucceed(ev) => AlwaysSucceedState(box State::new(*ev)),\n Wait(dt) => WaitState(dt, 0.0),\n WaitForever => WaitForeverState,\n If(condition, success, failure) => {\n let state = State::new(*condition);\n IfState(success, failure, Running, box state)\n }\n Select(sel) => {\n let state = State::new(sel[0].clone());\n SelectState(sel, 0, box state)\n }\n Sequence(seq) => {\n let state = State::new(seq[0].clone());\n SequenceState(seq, 0, box state)\n }\n While(ev, rep) => {\n let state = State::new(rep[0].clone());\n WhileState(box State::new(*ev), rep, 0, box state)\n }\n WhenAll(all)\n => WhenAllState(all.into_iter().map(\n |ev| Some(State::new(ev))).collect()),\n WhenAny(all)\n => WhenAnyState(all.into_iter().map(\n |ev| Some(State::new(ev))).collect()),\n After(seq)\n => AfterState(0, seq.into_iter().map(\n |ev| State::new(ev)).collect()),\n }\n }\n\n \/\/\/ Updates the cursor that tracks an event.\n \/\/\/\n \/\/\/ The action need to return status and remaining delta time.\n \/\/\/ Returns status and the remaining delta time.\n pub fn event(\n &mut self,\n e: &Event,\n f: |dt: f64, action: &A, state: &mut Option<S>| -> (Status, f64)\n ) -> (Status, f64) {\n match (e, self) {\n (&Input(input::Press(button_pressed)), &PressedState(button))\n if button_pressed == button => {\n \/\/ Button press is considered to happen instantly.\n \/\/ There is no remaining delta time because this is input event.\n (Success, 0.0)\n }\n (&Input(input::Release(button_released)), &ReleasedState(button))\n if button_released == button => {\n \/\/ Button release is considered to happen instantly.\n \/\/ There is no remaining delta time because this is input event.\n (Success, 0.0)\n }\n (&Update(UpdateArgs { dt }),\n &ActionState(ref action, ref mut state)) => {\n \/\/ Execute action.\n f(dt, action, state)\n }\n (_, &FailState(ref mut cur)) => {\n match cur.event(e, f) {\n (Running, dt) => (Running, dt),\n (Failure, dt) => (Success, dt),\n (Success, dt) => (Failure, dt),\n }\n }\n (_, &AlwaysSucceedState(ref mut cur)) => {\n match cur.event(e, f) {\n (Running, dt) => (Running, dt),\n (_, dt) => (Success, dt),\n }\n }\n (&Update(UpdateArgs { dt }), &WaitState(wait_t, ref mut t)) => {\n if *t + dt >= wait_t {\n let remaining_dt = *t + dt - wait_t;\n *t = wait_t;\n (Success, remaining_dt)\n } else {\n *t += dt;\n (Running, 0.0)\n }\n }\n (_, &IfState(ref success, ref failure,\n ref mut status, ref mut state)) => {\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n \/\/ Run in a loop to evaluate success or failure with\n \/\/ remaining delta time after condition.\n loop {\n *status = match *status {\n Running => {\n match state.event(e, |dt, a, s| f(dt, a, s)) {\n (Running, dt) => { return (Running, dt); },\n (Success, dt) => {\n **state = State::new((**success).clone());\n remaining_dt = dt;\n Success\n }\n (Failure, dt) => {\n **state = State::new((**failure).clone());\n remaining_dt = dt;\n Failure\n }\n }\n }\n _ => {\n return state.event(match *e {\n Update(_) => {\n remaining_e = UpdateEvent::from_dt(\n remaining_dt).unwrap();\n &remaining_e\n }\n _ => e\n }, |dt, a, s| f(dt, a, s));\n }\n }\n }\n }\n (_, &SelectState(ref seq, ref mut i, ref mut cursor)) => {\n let select = true;\n sequence(select, seq, i, cursor, e, f)\n }\n (_, &SequenceState(ref seq, ref mut i, ref mut cursor)) => {\n let select = false;\n sequence(select, seq, i, cursor, e, f)\n }\n (_, &WhileState(ref mut ev_cursor, ref rep, ref mut i,\n ref mut cursor)) => {\n \/\/ If the event terminates, do not execute the loop.\n match ev_cursor.event(e, |dt, a, s| f(dt, a, s)) {\n (Running, _) => {}\n x => return x,\n };\n let cur = cursor;\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n loop {\n match cur.event(match *e {\n Update(_) => {\n remaining_e = UpdateEvent::from_dt(\n remaining_dt).unwrap();\n &remaining_e\n }\n _ => e\n },\n |dt, a, s| f(dt, a, s)) {\n (Failure, x) => return (Failure, x),\n (Running, _) => { break },\n (Success, new_dt) => {\n remaining_dt = match *e {\n \/\/ Change update event with remaining delta time.\n Update(_) => new_dt,\n \/\/ Other events are 'consumed' and not passed to next.\n _ => return (Running, 0.0)\n }\n }\n };\n *i += 1;\n \/\/ If end of repeated events,\n \/\/ start over from the first one.\n if *i >= rep.len() { *i = 0; }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cur = State::new(rep[*i].clone());\n }\n (Running, 0.0)\n }\n (_, &WhenAllState(ref mut cursors)) => {\n let any = false;\n when_all(any, cursors, e, f)\n }\n (_, &WhenAnyState(ref mut cursors)) => {\n let any = true;\n when_all(any, cursors, e, f)\n }\n (_, &AfterState(ref mut i, ref mut cursors)) => {\n \/\/ Get the least delta time left over.\n let mut min_dt = std::f64::MAX_VALUE;\n for j in range(*i, cursors.len()) {\n match cursors.get_mut(j).event(e, |dt, a, s| f(dt, a, s)) {\n (Running, _) => { min_dt = 0.0; }\n (Success, new_dt) => {\n \/\/ Remaining delta time must be less to succeed.\n if *i == j && new_dt < min_dt {\n *i += 1;\n min_dt = new_dt;\n } else {\n \/\/ Return least delta time because\n \/\/ that is when failure is detected.\n return (Failure, min_dt.min(new_dt));\n }\n }\n (Failure, new_dt) => {\n return (Failure, new_dt);\n }\n };\n }\n if *i == cursors.len() {\n (Success, min_dt)\n } else {\n (Running, 0.0)\n }\n }\n _ => (Running, 0.0)\n }\n }\n}\n<commit_msg>Generic press and release<commit_after>\nuse std;\nuse input;\nuse {\n Action,\n After,\n AlwaysSucceed,\n Behavior,\n Event,\n Failure,\n If,\n Fail,\n Pressed,\n PressEvent,\n Released,\n ReleaseEvent,\n Running,\n Select,\n Sequence,\n Status,\n Success,\n Update,\n UpdateArgs,\n UpdateEvent,\n Wait,\n WaitForever,\n WhenAll,\n WhenAny,\n While,\n};\n\npub static RUNNING: (Status, f64) = (Running, 0.0);\n\n\/\/\/ Keeps track of a behavior.\n#[deriving(Clone, PartialEq)]\npub enum State<A, S> {\n \/\/\/ Returns `Success` when button is pressed.\n PressedState(input::Button),\n \/\/\/ Returns `Success` when button is released.\n ReleasedState(input::Button),\n \/\/\/ Executes an action.\n ActionState(A, Option<S>),\n \/\/\/ Converts `Success` into `Failure` and vice versa.\n FailState(Box<State<A, S>>),\n \/\/\/ Ignores failures and always return `Success`.\n AlwaysSucceedState(Box<State<A, S>>),\n \/\/\/ Keeps track of waiting for a period of time before continuing.\n \/\/\/\n \/\/\/ f64: Total time in seconds to wait\n \/\/\/\n \/\/\/ f64: Time elapsed in seconds\n WaitState(f64, f64),\n \/\/\/ Waits forever.\n WaitForeverState,\n \/\/\/ Keeps track of an `If` behavior.\n \/\/\/ If status is `Running`, then it evaluates the condition.\n \/\/\/ If status is `Success`, then it evaluates the success behavior.\n \/\/\/ If status is `Failure`, then it evaluates the failure behavior.\n IfState(Box<Behavior<A>>, Box<Behavior<A>>, Status, Box<State<A, S>>),\n \/\/\/ Keeps track of a `Select` behavior.\n SelectState(Vec<Behavior<A>>, uint, Box<State<A, S>>),\n \/\/\/ Keeps track of an `Sequence` behavior.\n SequenceState(Vec<Behavior<A>>, uint, Box<State<A, S>>),\n \/\/\/ Keeps track of a `While` behavior.\n WhileState(Box<State<A, S>>, Vec<Behavior<A>>, uint, Box<State<A, S>>),\n \/\/\/ Keeps track of a `WhenAll` behavior.\n WhenAllState(Vec<Option<State<A, S>>>),\n \/\/\/ Keeps track of a `WhenAny` behavior.\n WhenAnyState(Vec<Option<State<A, S>>>),\n \/\/\/ Keeps track of an `After` behavior.\n AfterState(uint, Vec<State<A, S>>),\n}\n\n\/\/ `Sequence` and `Select` share same algorithm.\n\/\/\n\/\/ `Sequence` fails if any fails and succeeds when all succeeds.\n\/\/ `Select` succeeds if any succeeds and fails when all fails.\nfn sequence<A: Clone, S>(\n select: bool,\n seq: &Vec<Behavior<A>>,\n i: &mut uint,\n cursor: &mut Box<State<A, S>>,\n e: &Event,\n f: |dt: f64, action: &A, state: &mut Option<S>| -> (Status, f64)\n) -> (Status, f64) {\n let (status, inv_status) = if select {\n \/\/ `Select`\n (Failure, Success)\n } else {\n \/\/ `Sequence`\n (Success, Failure)\n };\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n while *i < seq.len() {\n match cursor.event(\n match *e {\n Update(_) => {\n remaining_e = UpdateEvent::from_dt(remaining_dt).unwrap();\n &remaining_e\n }\n _ => e\n },\n |dt, a, s| f(dt, a, s)) {\n (Running, _) => { break; },\n (s, new_dt) if s == inv_status => {\n return (inv_status, new_dt);\n }\n (s, new_dt) if s == status => {\n remaining_dt = match *e {\n \/\/ Change update event with remaining delta time.\n Update(_) => new_dt,\n \/\/ Other events are 'consumed' and not passed to next.\n \/\/ If this is the last event, then the sequence succeeded.\n _ => if *i == seq.len() - 1 {\n return (status, new_dt)\n } else {\n return RUNNING\n }\n }\n }\n _ => unreachable!()\n };\n *i += 1;\n \/\/ If end of sequence,\n \/\/ return the 'dt' that is left.\n if *i >= seq.len() { return (status, remaining_dt); }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cursor = State::new(seq[*i].clone());\n }\n RUNNING\n}\n\n\/\/ `WhenAll` and `WhenAny` share same algorithm.\n\/\/\n\/\/ `WhenAll` fails if any fails and succeeds when all succeeds.\n\/\/ `WhenAny` succeeds if any succeeds and fails when all fails.\nfn when_all<A: Clone, S>(\n any: bool,\n cursors: &mut Vec<Option<State<A, S>>>,\n e: &Event,\n f: |dt: f64, action: &A, state: &mut Option<S>| -> (Status, f64)\n) -> (Status, f64) {\n let (status, inv_status) = if any {\n \/\/ `WhenAny`\n (Failure, Success)\n } else {\n \/\/ `WhenAll`\n (Success, Failure)\n };\n \/\/ Get the least delta time left over.\n let mut min_dt = std::f64::MAX_VALUE;\n \/\/ Count number of terminated events.\n let mut terminated = 0;\n for cur in cursors.iter_mut() {\n match *cur {\n None => {}\n Some(ref mut cur) => {\n match cur.event(e, |dt, a, s| f(dt, a, s)) {\n (Running, _) => { continue; },\n (s, new_dt) if s == inv_status => {\n \/\/ Fail for `WhenAll`.\n \/\/ Succeed for `WhenAny`.\n return (inv_status, new_dt);\n }\n (s, new_dt) if s == status => {\n min_dt = min_dt.min(new_dt);\n }\n _ => unreachable!()\n }\n }\n }\n\n terminated += 1;\n *cur = None;\n }\n match terminated {\n \/\/ If there are no events, there is a whole 'dt' left.\n 0 if cursors.len() == 0 => (status, match *e {\n Update(UpdateArgs { dt }) => dt,\n \/\/ Other kind of events happen instantly.\n _ => 0.0\n }),\n \/\/ If all events terminated, the least delta time is left.\n n if cursors.len() == n => (status, min_dt),\n _ => RUNNING\n }\n}\n\nimpl<A: Clone, S> State<A, S> {\n \/\/\/ Creates a state from a behavior.\n pub fn new(behavior: Behavior<A>) -> State<A, S> {\n match behavior {\n Pressed(button) => PressedState(button),\n Released(button) => ReleasedState(button),\n Action(action) => ActionState(action, None),\n Fail(ev) => FailState(box State::new(*ev)),\n AlwaysSucceed(ev) => AlwaysSucceedState(box State::new(*ev)),\n Wait(dt) => WaitState(dt, 0.0),\n WaitForever => WaitForeverState,\n If(condition, success, failure) => {\n let state = State::new(*condition);\n IfState(success, failure, Running, box state)\n }\n Select(sel) => {\n let state = State::new(sel[0].clone());\n SelectState(sel, 0, box state)\n }\n Sequence(seq) => {\n let state = State::new(seq[0].clone());\n SequenceState(seq, 0, box state)\n }\n While(ev, rep) => {\n let state = State::new(rep[0].clone());\n WhileState(box State::new(*ev), rep, 0, box state)\n }\n WhenAll(all)\n => WhenAllState(all.into_iter().map(\n |ev| Some(State::new(ev))).collect()),\n WhenAny(all)\n => WhenAnyState(all.into_iter().map(\n |ev| Some(State::new(ev))).collect()),\n After(seq)\n => AfterState(0, seq.into_iter().map(\n |ev| State::new(ev)).collect()),\n }\n }\n\n \/\/\/ Updates the cursor that tracks an event.\n \/\/\/\n \/\/\/ The action need to return status and remaining delta time.\n \/\/\/ Returns status and the remaining delta time.\n pub fn event(\n &mut self,\n e: &Event,\n f: |dt: f64, action: &A, state: &mut Option<S>| -> (Status, f64)\n ) -> (Status, f64) {\n match (e, self) {\n (_, &PressedState(button)) => {\n e.press(|button_pressed| {\n if button_pressed != button { return RUNNING; }\n\n \/\/ Button press is considered to happen instantly.\n \/\/ There is no remaining delta time because\n \/\/ this is input event.\n (Success, 0.0)\n }).unwrap_or(RUNNING)\n }\n (_, &ReleasedState(button)) => {\n e.release(|button_released| {\n if button_released != button { return RUNNING; }\n\n \/\/ Button release is considered to happen instantly.\n \/\/ There is no remaining delta time because\n \/\/ this is input event.\n (Success, 0.0)\n }).unwrap_or(RUNNING)\n }\n (&Update(UpdateArgs { dt }),\n &ActionState(ref action, ref mut state)) => {\n \/\/ Execute action.\n f(dt, action, state)\n }\n (_, &FailState(ref mut cur)) => {\n match cur.event(e, f) {\n (Running, dt) => (Running, dt),\n (Failure, dt) => (Success, dt),\n (Success, dt) => (Failure, dt),\n }\n }\n (_, &AlwaysSucceedState(ref mut cur)) => {\n match cur.event(e, f) {\n (Running, dt) => (Running, dt),\n (_, dt) => (Success, dt),\n }\n }\n (&Update(UpdateArgs { dt }), &WaitState(wait_t, ref mut t)) => {\n if *t + dt >= wait_t {\n let remaining_dt = *t + dt - wait_t;\n *t = wait_t;\n (Success, remaining_dt)\n } else {\n *t += dt;\n RUNNING\n }\n }\n (_, &IfState(ref success, ref failure,\n ref mut status, ref mut state)) => {\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n \/\/ Run in a loop to evaluate success or failure with\n \/\/ remaining delta time after condition.\n loop {\n *status = match *status {\n Running => {\n match state.event(e, |dt, a, s| f(dt, a, s)) {\n (Running, dt) => { return (Running, dt); },\n (Success, dt) => {\n **state = State::new((**success).clone());\n remaining_dt = dt;\n Success\n }\n (Failure, dt) => {\n **state = State::new((**failure).clone());\n remaining_dt = dt;\n Failure\n }\n }\n }\n _ => {\n return state.event(match *e {\n Update(_) => {\n remaining_e = UpdateEvent::from_dt(\n remaining_dt).unwrap();\n &remaining_e\n }\n _ => e\n }, |dt, a, s| f(dt, a, s));\n }\n }\n }\n }\n (_, &SelectState(ref seq, ref mut i, ref mut cursor)) => {\n let select = true;\n sequence(select, seq, i, cursor, e, f)\n }\n (_, &SequenceState(ref seq, ref mut i, ref mut cursor)) => {\n let select = false;\n sequence(select, seq, i, cursor, e, f)\n }\n (_, &WhileState(ref mut ev_cursor, ref rep, ref mut i,\n ref mut cursor)) => {\n \/\/ If the event terminates, do not execute the loop.\n match ev_cursor.event(e, |dt, a, s| f(dt, a, s)) {\n (Running, _) => {}\n x => return x,\n };\n let cur = cursor;\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n loop {\n match cur.event(match *e {\n Update(_) => {\n remaining_e = UpdateEvent::from_dt(\n remaining_dt).unwrap();\n &remaining_e\n }\n _ => e\n },\n |dt, a, s| f(dt, a, s)) {\n (Failure, x) => return (Failure, x),\n (Running, _) => { break },\n (Success, new_dt) => {\n remaining_dt = match *e {\n \/\/ Change update event with remaining delta time.\n Update(_) => new_dt,\n \/\/ Other events are 'consumed' and not passed to next.\n _ => return RUNNING\n }\n }\n };\n *i += 1;\n \/\/ If end of repeated events,\n \/\/ start over from the first one.\n if *i >= rep.len() { *i = 0; }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cur = State::new(rep[*i].clone());\n }\n RUNNING\n }\n (_, &WhenAllState(ref mut cursors)) => {\n let any = false;\n when_all(any, cursors, e, f)\n }\n (_, &WhenAnyState(ref mut cursors)) => {\n let any = true;\n when_all(any, cursors, e, f)\n }\n (_, &AfterState(ref mut i, ref mut cursors)) => {\n \/\/ Get the least delta time left over.\n let mut min_dt = std::f64::MAX_VALUE;\n for j in range(*i, cursors.len()) {\n match cursors.get_mut(j).event(e, |dt, a, s| f(dt, a, s)) {\n (Running, _) => { min_dt = 0.0; }\n (Success, new_dt) => {\n \/\/ Remaining delta time must be less to succeed.\n if *i == j && new_dt < min_dt {\n *i += 1;\n min_dt = new_dt;\n } else {\n \/\/ Return least delta time because\n \/\/ that is when failure is detected.\n return (Failure, min_dt.min(new_dt));\n }\n }\n (Failure, new_dt) => {\n return (Failure, new_dt);\n }\n };\n }\n if *i == cursors.len() {\n (Success, min_dt)\n } else {\n RUNNING\n }\n }\n _ => RUNNING\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013-2014 Simon Sapin.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse std::char;\nuse std::num::from_str_radix;\nuse std::path;\nuse super::{UrlParser, Url, SchemeData, RelativeSchemeData, Host};\n\n\n#[test]\nfn url_parsing() {\n for test in parse_test_data(include_str!(\"urltestdata.txt\")).into_iter() {\n let Test {\n input,\n base,\n scheme: expected_scheme,\n username: expected_username,\n password: expected_password,\n host: expected_host,\n port: expected_port,\n path: expected_path,\n query: expected_query,\n fragment: expected_fragment,\n expected_failure,\n } = test;\n let base = match Url::parse(base.as_slice()) {\n Ok(base) => base,\n Err(message) => panic!(\"Error parsing base {}: {}\", base, message)\n };\n let url = UrlParser::new().base_url(&base).parse(input.as_slice());\n if expected_scheme.is_none() {\n if url.is_ok() && !expected_failure {\n panic!(\"Expected a parse error for URL {}\", input);\n }\n continue\n }\n let Url { scheme, scheme_data, query, fragment, .. } = match url {\n Ok(url) => url,\n Err(message) => {\n if expected_failure {\n continue\n } else {\n panic!(\"Error parsing URL {}: {}\", input, message)\n }\n }\n };\n\n macro_rules! assert_eq {\n ($a: expr, $b: expr) => {\n {\n let a = $a;\n let b = $b;\n if a != b {\n if expected_failure {\n continue\n } else {\n panic!(\"{:?} != {:?}\", a, b)\n }\n }\n }\n }\n }\n\n assert_eq!(Some(scheme), expected_scheme);\n match scheme_data {\n SchemeData::Relative(RelativeSchemeData {\n username, password, host, port, default_port: _, path,\n }) => {\n assert_eq!(username, expected_username);\n assert_eq!(password, expected_password);\n let host = host.serialize();\n assert_eq!(host, expected_host);\n assert_eq!(port, expected_port);\n assert_eq!(Some(format!(\"\/{}\", path.connect(\"\/\"))), expected_path);\n },\n SchemeData::NonRelative(scheme_data) => {\n assert_eq!(Some(scheme_data), expected_path);\n assert_eq!(String::new(), expected_username);\n assert_eq!(None, expected_password);\n assert_eq!(String::new(), expected_host);\n assert_eq!(None, expected_port);\n },\n }\n fn opt_prepend(prefix: &str, opt_s: Option<String>) -> Option<String> {\n opt_s.map(|s| format!(\"{}{}\", prefix, s))\n }\n assert_eq!(opt_prepend(\"?\", query), expected_query);\n assert_eq!(opt_prepend(\"#\", fragment), expected_fragment);\n\n assert!(!expected_failure, \"Unexpected success for {}\", input);\n }\n}\n\nstruct Test {\n input: String,\n base: String,\n scheme: Option<String>,\n username: String,\n password: Option<String>,\n host: String,\n port: Option<u16>,\n path: Option<String>,\n query: Option<String>,\n fragment: Option<String>,\n expected_failure: bool,\n}\n\nfn parse_test_data(input: &str) -> Vec<Test> {\n let mut tests: Vec<Test> = Vec::new();\n for line in input.lines() {\n if line == \"\" || line.starts_with(\"#\") {\n continue\n }\n let mut pieces = line.split(' ').collect::<Vec<&str>>();\n let expected_failure = pieces[0] == \"XFAIL\";\n if expected_failure {\n pieces.remove(0);\n }\n let input = unescape(pieces.remove(0));\n let mut test = Test {\n input: input,\n base: if pieces.is_empty() || pieces[0] == \"\" {\n tests.last().unwrap().base.clone()\n } else {\n unescape(pieces.remove(0))\n },\n scheme: None,\n username: String::new(),\n password: None,\n host: String::new(),\n port: None,\n path: None,\n query: None,\n fragment: None,\n expected_failure: expected_failure,\n };\n for piece in pieces.into_iter() {\n if piece == \"\" || piece.starts_with(\"#\") {\n continue\n }\n let colon = piece.find(':').unwrap();\n let value = unescape(&piece[colon + 1..]);\n match &piece[..colon] {\n \"s\" => test.scheme = Some(value),\n \"u\" => test.username = value,\n \"pass\" => test.password = Some(value),\n \"h\" => test.host = value,\n \"port\" => test.port = Some(value.parse().unwrap()),\n \"p\" => test.path = Some(value),\n \"q\" => test.query = Some(value),\n \"f\" => test.fragment = Some(value),\n _ => panic!(\"Invalid token\")\n }\n }\n tests.push(test)\n }\n tests\n}\n\nfn unescape(input: &str) -> String {\n let mut output = String::new();\n let mut chars = input.chars();\n loop {\n match chars.next() {\n None => return output,\n Some(c) => output.push(\n if c == '\\\\' {\n match chars.next().unwrap() {\n '\\\\' => '\\\\',\n 'n' => '\\n',\n 'r' => '\\r',\n 's' => ' ',\n 't' => '\\t',\n 'f' => '\\x0C',\n 'u' => {\n let mut hex = String::new();\n hex.push(chars.next().unwrap());\n hex.push(chars.next().unwrap());\n hex.push(chars.next().unwrap());\n hex.push(chars.next().unwrap());\n from_str_radix(hex.as_slice(), 16).ok()\n .and_then(char::from_u32).unwrap()\n }\n _ => panic!(\"Invalid test data input\"),\n }\n } else {\n c\n }\n )\n }\n }\n}\n\n\n#[test]\nfn file_paths() {\n assert_eq!(Url::from_file_path(&path::posix::Path::new(\"relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::posix::Path::new(\"..\/relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::windows::Path::new(\"relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::windows::Path::new(r\"..\\relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::windows::Path::new(r\"\\drive-relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::windows::Path::new(r\"\\\\ucn\\\")), Err(()));\n\n let mut url = Url::from_file_path(&path::posix::Path::new(\"\/foo\/bar\")).unwrap();\n assert_eq!(url.host(), Some(&Host::Domain(\"\".to_string())));\n assert_eq!(url.path(), Some([\"foo\".to_string(), \"bar\".to_string()].as_slice()));\n assert!(url.to_file_path() == Ok(path::posix::Path::new(\"\/foo\/bar\")));\n\n url.path_mut().unwrap()[1] = \"ba\\0r\".to_string();\n assert!(url.to_file_path::<path::posix::Path>() == Err(()));\n\n url.path_mut().unwrap()[1] = \"ba%00r\".to_string();\n assert!(url.to_file_path::<path::posix::Path>() == Err(()));\n\n \/\/ Invalid UTF-8\n url.path_mut().unwrap()[1] = \"ba%80r\".to_string();\n assert!(url.to_file_path() == Ok(path::posix::Path::new(\n \/* note: byte string, invalid UTF-8 *\/ b\"\/foo\/ba\\x80r\")));\n\n let mut url = Url::from_file_path(&path::windows::Path::new(r\"C:\\foo\\bar\")).unwrap();\n assert_eq!(url.host(), Some(&Host::Domain(\"\".to_string())));\n assert_eq!(url.path(), Some([\"C:\".to_string(), \"foo\".to_string(), \"bar\".to_string()].as_slice()));\n assert!(url.to_file_path::<path::windows::Path>()\n == Ok(path::windows::Path::new(r\"C:\\foo\\bar\")));\n\n url.path_mut().unwrap()[2] = \"ba\\0r\".to_string();\n assert!(url.to_file_path::<path::windows::Path>() == Err(()));\n\n url.path_mut().unwrap()[2] = \"ba%00r\".to_string();\n assert!(url.to_file_path::<path::windows::Path>() == Err(()));\n\n \/\/ Invalid UTF-8\n url.path_mut().unwrap()[2] = \"ba%80r\".to_string();\n assert!(url.to_file_path::<path::windows::Path>() == Err(()));\n}\n\n\n#[test]\nfn directory_paths() {\n assert_eq!(Url::from_directory_path(&path::posix::Path::new(\"relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::posix::Path::new(\"..\/relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::windows::Path::new(\"relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::windows::Path::new(r\"..\\relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::windows::Path::new(r\"\\drive-relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::windows::Path::new(r\"\\\\ucn\\\")), Err(()));\n\n let url = Url::from_directory_path(&path::posix::Path::new(\"\/foo\/bar\")).unwrap();\n assert_eq!(url.host(), Some(&Host::Domain(\"\".to_string())));\n assert_eq!(url.path(), Some([\"foo\".to_string(), \"bar\".to_string(), \"\".to_string()].as_slice()));\n\n let url = Url::from_directory_path(&path::windows::Path::new(r\"C:\\foo\\bar\")).unwrap();\n assert_eq!(url.host(), Some(&Host::Domain(\"\".to_string())));\n assert_eq!(url.path(), Some([\n \"C:\".to_string(), \"foo\".to_string(), \"bar\".to_string(), \"\".to_string()].as_slice()));\n}\n<commit_msg>Fix old_path import in tests.<commit_after>\/\/ Copyright 2013-2014 Simon Sapin.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse std::char;\nuse std::num::from_str_radix;\nuse std::old_path as path;\nuse super::{UrlParser, Url, SchemeData, RelativeSchemeData, Host};\n\n\n#[test]\nfn url_parsing() {\n for test in parse_test_data(include_str!(\"urltestdata.txt\")).into_iter() {\n let Test {\n input,\n base,\n scheme: expected_scheme,\n username: expected_username,\n password: expected_password,\n host: expected_host,\n port: expected_port,\n path: expected_path,\n query: expected_query,\n fragment: expected_fragment,\n expected_failure,\n } = test;\n let base = match Url::parse(base.as_slice()) {\n Ok(base) => base,\n Err(message) => panic!(\"Error parsing base {}: {}\", base, message)\n };\n let url = UrlParser::new().base_url(&base).parse(input.as_slice());\n if expected_scheme.is_none() {\n if url.is_ok() && !expected_failure {\n panic!(\"Expected a parse error for URL {}\", input);\n }\n continue\n }\n let Url { scheme, scheme_data, query, fragment, .. } = match url {\n Ok(url) => url,\n Err(message) => {\n if expected_failure {\n continue\n } else {\n panic!(\"Error parsing URL {}: {}\", input, message)\n }\n }\n };\n\n macro_rules! assert_eq {\n ($a: expr, $b: expr) => {\n {\n let a = $a;\n let b = $b;\n if a != b {\n if expected_failure {\n continue\n } else {\n panic!(\"{:?} != {:?}\", a, b)\n }\n }\n }\n }\n }\n\n assert_eq!(Some(scheme), expected_scheme);\n match scheme_data {\n SchemeData::Relative(RelativeSchemeData {\n username, password, host, port, default_port: _, path,\n }) => {\n assert_eq!(username, expected_username);\n assert_eq!(password, expected_password);\n let host = host.serialize();\n assert_eq!(host, expected_host);\n assert_eq!(port, expected_port);\n assert_eq!(Some(format!(\"\/{}\", path.connect(\"\/\"))), expected_path);\n },\n SchemeData::NonRelative(scheme_data) => {\n assert_eq!(Some(scheme_data), expected_path);\n assert_eq!(String::new(), expected_username);\n assert_eq!(None, expected_password);\n assert_eq!(String::new(), expected_host);\n assert_eq!(None, expected_port);\n },\n }\n fn opt_prepend(prefix: &str, opt_s: Option<String>) -> Option<String> {\n opt_s.map(|s| format!(\"{}{}\", prefix, s))\n }\n assert_eq!(opt_prepend(\"?\", query), expected_query);\n assert_eq!(opt_prepend(\"#\", fragment), expected_fragment);\n\n assert!(!expected_failure, \"Unexpected success for {}\", input);\n }\n}\n\nstruct Test {\n input: String,\n base: String,\n scheme: Option<String>,\n username: String,\n password: Option<String>,\n host: String,\n port: Option<u16>,\n path: Option<String>,\n query: Option<String>,\n fragment: Option<String>,\n expected_failure: bool,\n}\n\nfn parse_test_data(input: &str) -> Vec<Test> {\n let mut tests: Vec<Test> = Vec::new();\n for line in input.lines() {\n if line == \"\" || line.starts_with(\"#\") {\n continue\n }\n let mut pieces = line.split(' ').collect::<Vec<&str>>();\n let expected_failure = pieces[0] == \"XFAIL\";\n if expected_failure {\n pieces.remove(0);\n }\n let input = unescape(pieces.remove(0));\n let mut test = Test {\n input: input,\n base: if pieces.is_empty() || pieces[0] == \"\" {\n tests.last().unwrap().base.clone()\n } else {\n unescape(pieces.remove(0))\n },\n scheme: None,\n username: String::new(),\n password: None,\n host: String::new(),\n port: None,\n path: None,\n query: None,\n fragment: None,\n expected_failure: expected_failure,\n };\n for piece in pieces.into_iter() {\n if piece == \"\" || piece.starts_with(\"#\") {\n continue\n }\n let colon = piece.find(':').unwrap();\n let value = unescape(&piece[colon + 1..]);\n match &piece[..colon] {\n \"s\" => test.scheme = Some(value),\n \"u\" => test.username = value,\n \"pass\" => test.password = Some(value),\n \"h\" => test.host = value,\n \"port\" => test.port = Some(value.parse().unwrap()),\n \"p\" => test.path = Some(value),\n \"q\" => test.query = Some(value),\n \"f\" => test.fragment = Some(value),\n _ => panic!(\"Invalid token\")\n }\n }\n tests.push(test)\n }\n tests\n}\n\nfn unescape(input: &str) -> String {\n let mut output = String::new();\n let mut chars = input.chars();\n loop {\n match chars.next() {\n None => return output,\n Some(c) => output.push(\n if c == '\\\\' {\n match chars.next().unwrap() {\n '\\\\' => '\\\\',\n 'n' => '\\n',\n 'r' => '\\r',\n 's' => ' ',\n 't' => '\\t',\n 'f' => '\\x0C',\n 'u' => {\n let mut hex = String::new();\n hex.push(chars.next().unwrap());\n hex.push(chars.next().unwrap());\n hex.push(chars.next().unwrap());\n hex.push(chars.next().unwrap());\n from_str_radix(hex.as_slice(), 16).ok()\n .and_then(char::from_u32).unwrap()\n }\n _ => panic!(\"Invalid test data input\"),\n }\n } else {\n c\n }\n )\n }\n }\n}\n\n\n#[test]\nfn file_paths() {\n assert_eq!(Url::from_file_path(&path::posix::Path::new(\"relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::posix::Path::new(\"..\/relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::windows::Path::new(\"relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::windows::Path::new(r\"..\\relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::windows::Path::new(r\"\\drive-relative\")), Err(()));\n assert_eq!(Url::from_file_path(&path::windows::Path::new(r\"\\\\ucn\\\")), Err(()));\n\n let mut url = Url::from_file_path(&path::posix::Path::new(\"\/foo\/bar\")).unwrap();\n assert_eq!(url.host(), Some(&Host::Domain(\"\".to_string())));\n assert_eq!(url.path(), Some([\"foo\".to_string(), \"bar\".to_string()].as_slice()));\n assert!(url.to_file_path() == Ok(path::posix::Path::new(\"\/foo\/bar\")));\n\n url.path_mut().unwrap()[1] = \"ba\\0r\".to_string();\n assert!(url.to_file_path::<path::posix::Path>() == Err(()));\n\n url.path_mut().unwrap()[1] = \"ba%00r\".to_string();\n assert!(url.to_file_path::<path::posix::Path>() == Err(()));\n\n \/\/ Invalid UTF-8\n url.path_mut().unwrap()[1] = \"ba%80r\".to_string();\n assert!(url.to_file_path() == Ok(path::posix::Path::new(\n \/* note: byte string, invalid UTF-8 *\/ b\"\/foo\/ba\\x80r\")));\n\n let mut url = Url::from_file_path(&path::windows::Path::new(r\"C:\\foo\\bar\")).unwrap();\n assert_eq!(url.host(), Some(&Host::Domain(\"\".to_string())));\n assert_eq!(url.path(), Some([\"C:\".to_string(), \"foo\".to_string(), \"bar\".to_string()].as_slice()));\n assert!(url.to_file_path::<path::windows::Path>()\n == Ok(path::windows::Path::new(r\"C:\\foo\\bar\")));\n\n url.path_mut().unwrap()[2] = \"ba\\0r\".to_string();\n assert!(url.to_file_path::<path::windows::Path>() == Err(()));\n\n url.path_mut().unwrap()[2] = \"ba%00r\".to_string();\n assert!(url.to_file_path::<path::windows::Path>() == Err(()));\n\n \/\/ Invalid UTF-8\n url.path_mut().unwrap()[2] = \"ba%80r\".to_string();\n assert!(url.to_file_path::<path::windows::Path>() == Err(()));\n}\n\n\n#[test]\nfn directory_paths() {\n assert_eq!(Url::from_directory_path(&path::posix::Path::new(\"relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::posix::Path::new(\"..\/relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::windows::Path::new(\"relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::windows::Path::new(r\"..\\relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::windows::Path::new(r\"\\drive-relative\")), Err(()));\n assert_eq!(Url::from_directory_path(&path::windows::Path::new(r\"\\\\ucn\\\")), Err(()));\n\n let url = Url::from_directory_path(&path::posix::Path::new(\"\/foo\/bar\")).unwrap();\n assert_eq!(url.host(), Some(&Host::Domain(\"\".to_string())));\n assert_eq!(url.path(), Some([\"foo\".to_string(), \"bar\".to_string(), \"\".to_string()].as_slice()));\n\n let url = Url::from_directory_path(&path::windows::Path::new(r\"C:\\foo\\bar\")).unwrap();\n assert_eq!(url.host(), Some(&Host::Domain(\"\".to_string())));\n assert_eq!(url.path(), Some([\n \"C:\".to_string(), \"foo\".to_string(), \"bar\".to_string(), \"\".to_string()].as_slice()));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>22 - mutability<commit_after>#[allow(dead_code)]\nstruct Book {\n \/\/ `&'static str` is a reference to a string allocated in read only memory\n author: &'static str,\n title: &'static str,\n year: uint,\n}\n\n\/\/ This function takes a reference to a book\nfn borrow_book(book: &Book) {\n println!(\"I borrowed {} {} edition\", book.title, book.year);\n}\n\n\/\/ This function takes a reference to a mutable book\nfn new_edition(book: &mut Book) {\n \/\/ the fields of the book can be modified\n book.year = 2014;\n}\n\nfn main() {\n \/\/ An immutable Book\n let geb = Book {\n \/\/ string literals have type `&'static str`\n author: \"Douglas Hofstadter\",\n title: \"Gödel, Escher, Bach\",\n year: 1979,\n };\n\n \/\/ Immutably borrow `geb`\n borrow_book(&geb);\n\n \/\/ Error! Can't borrow an immutable object as mutable\n new_edition(&mut geb);\n \/\/ FIXME ^ Comment out this line\n\n \/\/ `mutable_geb` is a mutable copy of `geb`\n let mut mutable_geb = geb;\n\n \/\/ Borrow a mutable object as mutable\n new_edition(&mut mutable_geb);\n\n \/\/ Mutable objects can be immutably borrowed\n borrow_book(&mutable_geb);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Change the include path.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Export request and response fields as public<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add framework for ARM Instruction decoding<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>\tmodified: bin\/dr-daemon.rs<commit_after><|endoftext|>"} {"text":"<commit_before>use std::io::{fs, IoResult};\nuse file::{File, GREY};\n\n#[cfg(feature=\"git\")] use ansi_term::ANSIString;\n#[cfg(feature=\"git\")] use ansi_term::Colour::*;\n#[cfg(feature=\"git\")] use git2;\n\n\/\/\/ A **Dir** provides a cached list of the file paths in a directory that's\n\/\/\/ being listed.\n\/\/\/\n\/\/\/ This object gets passed to the Files themselves, in order for them to\n\/\/\/ check the existence of surrounding files, then highlight themselves\n\/\/\/ accordingly. (See `File#get_source_files`)\npub struct Dir {\n contents: Vec<Path>,\n path: Path,\n git: Option<Git>,\n}\n\nimpl Dir {\n \/\/\/ Create a new Dir object filled with all the files in the directory\n \/\/\/ pointed to by the given path. Fails if the directory can't be read, or\n \/\/\/ isn't actually a directory.\n pub fn readdir(path: Path) -> IoResult<Dir> {\n fs::readdir(&path).map(|paths| Dir {\n contents: paths,\n path: path.clone(),\n git: Git::scan(&path).ok(),\n })\n }\n\n \/\/\/ Produce a vector of File objects from an initialised directory,\n \/\/\/ printing out an error if any of the Files fail to be created.\n pub fn files(&self) -> Vec<File> {\n let mut files = vec![];\n\n for path in self.contents.iter() {\n match File::from_path(path, Some(self)) {\n Ok(file) => files.push(file),\n Err(e) => println!(\"{}: {}\", path.display(), e),\n }\n }\n\n files\n }\n\n \/\/\/ Whether this directory contains a file with the given path.\n pub fn contains(&self, path: &Path) -> bool {\n self.contents.contains(path)\n }\n\n \/\/\/ Append a path onto the path specified by this directory.\n pub fn join(&self, child: Path) -> Path {\n self.path.join(child)\n }\n\n \/\/\/ Return whether there's a Git repository on or above this directory.\n pub fn has_git_repo(&self) -> bool {\n self.git.is_some()\n }\n\n \/\/\/ Get a string describing the Git status of the given file.\n pub fn git_status(&self, path: &Path, prefix_lookup: bool) -> String {\n match (&self.git, prefix_lookup) {\n (&Some(ref git), false) => git.status(path),\n (&Some(ref git), true) => git.dir_status(path),\n (&None, _) => GREY.paint(\"--\").to_string(),\n }\n }\n}\n\n\/\/\/ Container of Git statuses for all the files in this folder's Git repository.\n#[cfg(feature=\"git\")]\nstruct Git {\n statuses: Vec<(String, git2::Status)>,\n}\n\n#[cfg(feature=\"git\")]\nimpl Git {\n\n \/\/\/ Discover a Git repository on or above this directory, scanning it for\n \/\/\/ the files' statuses if one is found.\n fn scan(path: &Path) -> Result<Git, git2::Error> {\n let repo = try!(git2::Repository::discover(path));\n let statuses = try!(repo.statuses(None));\n\n Ok(Git { statuses: statuses.iter().map(|e| (e.path().unwrap().to_string(), e.status())).collect() })\n }\n\n \/\/\/ Get the status for the file at the given path, if present.\n fn status(&self, path: &Path) -> String {\n let status = self.statuses.iter()\n .find(|p| p.0 == path.as_str().unwrap());\n\n match status {\n Some(&(_, s)) => format!(\"{}{}\", Git::index_status(s), Git::working_tree_status(s)),\n None => GREY.paint(\"--\").to_string(),\n }\n }\n\n \/\/\/ Get the combined status for all the files whose paths begin with the\n \/\/\/ path that gets passed in. This is used for getting the status of\n \/\/\/ directories, which don't really have an 'official' status.\n fn dir_status(&self, dir: &Path) -> String {\n let status = self.statuses.iter()\n .filter(|p| p.0.starts_with(dir.as_str().unwrap()))\n .fold(git2::Status::empty(), |a, b| a | b.1);\n match status {\n s => format!(\"{}{}\", Git::index_status(s), Git::working_tree_status(s)),\n }\n }\n\n \/\/\/ The character to display if the file has been modified, but not staged.\n fn working_tree_status(status: git2::Status) -> ANSIString<'static> {\n match status {\n s if s.contains(git2::STATUS_WT_NEW) => Green.paint(\"A\"),\n s if s.contains(git2::STATUS_WT_MODIFIED) => Blue.paint(\"M\"),\n s if s.contains(git2::STATUS_WT_DELETED) => Red.paint(\"D\"),\n s if s.contains(git2::STATUS_WT_RENAMED) => Yellow.paint(\"R\"),\n s if s.contains(git2::STATUS_WT_TYPECHANGE) => Purple.paint(\"T\"),\n _ => GREY.paint(\"-\"),\n }\n }\n\n \/\/\/ The character to display if the file has been modified, and the change\n \/\/\/ has been staged.\n fn index_status(status: git2::Status) -> ANSIString<'static> {\n match status {\n s if s.contains(git2::STATUS_INDEX_NEW) => Green.paint(\"A\"),\n s if s.contains(git2::STATUS_INDEX_MODIFIED) => Blue.paint(\"M\"),\n s if s.contains(git2::STATUS_INDEX_DELETED) => Red.paint(\"D\"),\n s if s.contains(git2::STATUS_INDEX_RENAMED) => Yellow.paint(\"R\"),\n s if s.contains(git2::STATUS_INDEX_TYPECHANGE) => Purple.paint(\"T\"),\n _ => GREY.paint(\"-\"),\n }\n }\n}\n\n#[cfg(not(feature=\"git\"))]\nstruct Git;\n\n#[cfg(not(feature=\"git\"))]\nimpl Git {\n fn scan(_: &Path) -> Result<Git, ()> {\n \/\/ Don't do anything without Git support\n Err(())\n }\n\n fn status(&self, _: &Path) -> String {\n \/\/ The Err above means that this should never happen\n panic!(\"Tried to access a Git repo without Git support!\");\n }\n}\n<commit_msg>Compare vectors, not strings<commit_after>use std::io::{fs, IoResult};\nuse file::{File, GREY};\n\n#[cfg(feature=\"git\")] use ansi_term::ANSIString;\n#[cfg(feature=\"git\")] use ansi_term::Colour::*;\n#[cfg(feature=\"git\")] use git2;\n\n\/\/\/ A **Dir** provides a cached list of the file paths in a directory that's\n\/\/\/ being listed.\n\/\/\/\n\/\/\/ This object gets passed to the Files themselves, in order for them to\n\/\/\/ check the existence of surrounding files, then highlight themselves\n\/\/\/ accordingly. (See `File#get_source_files`)\npub struct Dir {\n contents: Vec<Path>,\n path: Path,\n git: Option<Git>,\n}\n\nimpl Dir {\n \/\/\/ Create a new Dir object filled with all the files in the directory\n \/\/\/ pointed to by the given path. Fails if the directory can't be read, or\n \/\/\/ isn't actually a directory.\n pub fn readdir(path: Path) -> IoResult<Dir> {\n fs::readdir(&path).map(|paths| Dir {\n contents: paths,\n path: path.clone(),\n git: Git::scan(&path).ok(),\n })\n }\n\n \/\/\/ Produce a vector of File objects from an initialised directory,\n \/\/\/ printing out an error if any of the Files fail to be created.\n pub fn files(&self) -> Vec<File> {\n let mut files = vec![];\n\n for path in self.contents.iter() {\n match File::from_path(path, Some(self)) {\n Ok(file) => files.push(file),\n Err(e) => println!(\"{}: {}\", path.display(), e),\n }\n }\n\n files\n }\n\n \/\/\/ Whether this directory contains a file with the given path.\n pub fn contains(&self, path: &Path) -> bool {\n self.contents.contains(path)\n }\n\n \/\/\/ Append a path onto the path specified by this directory.\n pub fn join(&self, child: Path) -> Path {\n self.path.join(child)\n }\n\n \/\/\/ Return whether there's a Git repository on or above this directory.\n pub fn has_git_repo(&self) -> bool {\n self.git.is_some()\n }\n\n \/\/\/ Get a string describing the Git status of the given file.\n pub fn git_status(&self, path: &Path, prefix_lookup: bool) -> String {\n match (&self.git, prefix_lookup) {\n (&Some(ref git), false) => git.status(path),\n (&Some(ref git), true) => git.dir_status(path),\n (&None, _) => GREY.paint(\"--\").to_string(),\n }\n }\n}\n\n\/\/\/ Container of Git statuses for all the files in this folder's Git repository.\n#[cfg(feature=\"git\")]\nstruct Git {\n statuses: Vec<(Vec<u8>, git2::Status)>,\n}\n\n#[cfg(feature=\"git\")]\nimpl Git {\n\n \/\/\/ Discover a Git repository on or above this directory, scanning it for\n \/\/\/ the files' statuses if one is found.\n fn scan(path: &Path) -> Result<Git, git2::Error> {\n let repo = try!(git2::Repository::discover(path));\n let statuses = try!(repo.statuses(None)).iter()\n .map(|e| (e.path_bytes().to_vec(), e.status()))\n .collect();\n Ok(Git { statuses: statuses })\n }\n\n \/\/\/ Get the status for the file at the given path, if present.\n fn status(&self, path: &Path) -> String {\n let status = self.statuses.iter()\n .find(|p| p.0 == path.as_vec());\n match status {\n Some(&(_, s)) => format!(\"{}{}\", Git::index_status(s), Git::working_tree_status(s)),\n None => GREY.paint(\"--\").to_string(),\n }\n }\n\n \/\/\/ Get the combined status for all the files whose paths begin with the\n \/\/\/ path that gets passed in. This is used for getting the status of\n \/\/\/ directories, which don't really have an 'official' status.\n fn dir_status(&self, dir: &Path) -> String {\n let status = self.statuses.iter()\n .filter(|p| p.0.starts_with(dir.as_vec()))\n .fold(git2::Status::empty(), |a, b| a | b.1);\n match status {\n s => format!(\"{}{}\", Git::index_status(s), Git::working_tree_status(s)),\n }\n }\n\n \/\/\/ The character to display if the file has been modified, but not staged.\n fn working_tree_status(status: git2::Status) -> ANSIString<'static> {\n match status {\n s if s.contains(git2::STATUS_WT_NEW) => Green.paint(\"A\"),\n s if s.contains(git2::STATUS_WT_MODIFIED) => Blue.paint(\"M\"),\n s if s.contains(git2::STATUS_WT_DELETED) => Red.paint(\"D\"),\n s if s.contains(git2::STATUS_WT_RENAMED) => Yellow.paint(\"R\"),\n s if s.contains(git2::STATUS_WT_TYPECHANGE) => Purple.paint(\"T\"),\n _ => GREY.paint(\"-\"),\n }\n }\n\n \/\/\/ The character to display if the file has been modified, and the change\n \/\/\/ has been staged.\n fn index_status(status: git2::Status) -> ANSIString<'static> {\n match status {\n s if s.contains(git2::STATUS_INDEX_NEW) => Green.paint(\"A\"),\n s if s.contains(git2::STATUS_INDEX_MODIFIED) => Blue.paint(\"M\"),\n s if s.contains(git2::STATUS_INDEX_DELETED) => Red.paint(\"D\"),\n s if s.contains(git2::STATUS_INDEX_RENAMED) => Yellow.paint(\"R\"),\n s if s.contains(git2::STATUS_INDEX_TYPECHANGE) => Purple.paint(\"T\"),\n _ => GREY.paint(\"-\"),\n }\n }\n}\n\n#[cfg(not(feature=\"git\"))]\nstruct Git;\n\n#[cfg(not(feature=\"git\"))]\nimpl Git {\n fn scan(_: &Path) -> Result<Git, ()> {\n \/\/ Don't do anything without Git support\n Err(())\n }\n\n fn status(&self, _: &Path) -> String {\n \/\/ The Err above means that this should never happen\n panic!(\"Tried to access a Git repo without Git support!\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: `Response.token` has type u64 instead of i64<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Minor fixes to idiomatic code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>os specific clean<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>chg comments for documentation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Made queue_destroy require &self not &mut self<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_name = \"rusoto\"]\n#![crate_type = \"lib\"]\n#![cfg_attr(feature = \"unstable\", feature(custom_derive, plugin))]\n#![cfg_attr(feature = \"unstable\", plugin(serde_macros))]\n#![cfg_attr(feature = \"nightly-testing\", plugin(clippy))]\n#![cfg_attr(feature = \"nightly-testing\", allow(used_underscore_binding, ptr_arg))]\n#![allow(dead_code)]\n#![cfg_attr(not(feature = \"unstable\"), deny(warnings))]\n\n\/\/! Rusoto is an [AWS](https:\/\/aws.amazon.com\/) SDK for Rust.\n\/\/! A high level overview is available in `README.md` at https:\/\/github.com\/rusoto\/rusoto.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! The following code shows a simple example of using Rusoto's DynamoDB API to\n\/\/! list the names of all tables in a database.\n\/\/!\n\/\/! ```\n\/\/! use std::default::Default;\n\/\/!\n\/\/! use rusoto::{DefaultCredentialsProvider, Region};\n\/\/! use rusoto::dynamodb::{DynamoDbClient, ListTablesInput};\n\/\/!\n\/\/! let provider = DefaultCredentialsProvider::new().unwrap();\n\/\/! let client = DynamoDbClient::new(provider, Region::UsEast1);\n\/\/! let list_tables_input: ListTablesInput = Default::default();\n\/\/!\n\/\/! match client.list_tables(&list_tables_input) {\n\/\/! Ok(output) => {\n\/\/! match output.table_names {\n\/\/! Some(table_name_list) => {\n\/\/! println!(\"Tables in database:\");\n\/\/!\n\/\/! for table_name in table_name_list {\n\/\/! println!(\"{}\", table_name);\n\/\/! }\n\/\/! },\n\/\/! None => println!(\"No tables in database!\"),\n\/\/! }\n\/\/! },\n\/\/! Err(error) => {\n\/\/! println!(\"Error: {:?}\", error);\n\/\/! },\n\/\/! }\n\nextern crate chrono;\nextern crate hyper;\n#[macro_use] extern crate log;\nextern crate openssl;\nextern crate regex;\nextern crate rustc_serialize;\nextern crate serde;\nextern crate serde_json;\nextern crate time;\nextern crate url;\nextern crate xml;\n\npub use credential::{\n AwsCredentials,\n ChainProvider,\n EnvironmentProvider,\n IamProvider,\n ProfileProvider,\n ProvideAwsCredentials,\n DefaultCredentialsProvider,\n DefaultCredentialsProviderSync,\n};\npub use region::{ParseRegionError, Region};\npub use request::{DispatchSignedRequest, HttpResponse, HttpDispatchError};\n\nmod credential;\nmod param;\nmod region;\nmod request;\nmod xmlerror;\nmod xmlutil;\nmod serialization;\n#[macro_use] mod signature;\n\n\n#[cfg(feature = \"acm\")]\npub mod acm;\n#[cfg(feature = \"cloudhsm\")]\npub mod cloudhsm;\n#[cfg(feature = \"cloudtrail\")]\npub mod cloudtrail;\n#[cfg(feature = \"codecommit\")]\npub mod codecommit;\n#[cfg(feature = \"codedeploy\")]\npub mod codedeploy;\n#[cfg(feature = \"codepipeline\")]\npub mod codepipeline;\n#[cfg(feature = \"cognito-identity\")]\npub mod cognitoidentity;\n#[cfg(feature = \"config\")]\npub mod config;\n#[cfg(feature = \"datapipeline\")]\npub mod datapipeline;\n#[cfg(feature = \"devicefarm\")]\npub mod devicefarm;\n#[cfg(feature = \"directconnect\")]\npub mod directconnect;\n#[cfg(feature = \"ds\")]\npub mod ds;\n#[cfg(feature = \"dynamodb\")]\npub mod dynamodb;\n#[cfg(feature = \"dynamodbstreams\")]\npub mod dynamodbstreams;\n#[cfg(feature = \"ec2\")]\npub mod ec2;\n#[cfg(feature = \"ecr\")]\npub mod ecr;\n#[cfg(feature = \"ecs\")]\npub mod ecs;\n#[cfg(feature = \"emr\")]\npub mod emr;\n#[cfg(feature = \"elastictranscoder\")]\npub mod elastictranscoder;\n#[cfg(feature = \"events\")]\npub mod events;\n#[cfg(feature = \"firehose\")]\npub mod firehose;\n#[cfg(feature = \"inspector\")]\npub mod inspector;\n#[cfg(feature = \"iot\")]\npub mod iot;\n#[cfg(feature = \"kinesis\")]\npub mod kinesis;\n#[cfg(feature = \"kms\")]\npub mod kms;\n#[cfg(feature = \"logs\")]\npub mod logs;\n#[cfg(feature = \"machinelearning\")]\npub mod machinelearning;\n#[cfg(feature = \"marketplacecommerceanalytics\")]\npub mod marketplacecommerceanalytics;\n#[cfg(feature = \"opsworks\")]\npub mod opsworks;\n#[cfg(feature = \"route53domains\")]\npub mod route53domains;\n#[cfg(feature = \"s3\")]\npub mod s3;\n#[cfg(feature = \"sqs\")]\npub mod sqs;\n#[cfg(feature = \"ssm\")]\npub mod ssm;\n#[cfg(feature = \"storagegateway\")]\npub mod storagegateway;\n#[cfg(feature = \"swf\")]\npub mod swf;\n#[cfg(feature = \"waf\")]\npub mod waf;\n#[cfg(feature = \"workspaces\")]\npub mod workspaces;\n\n\/*\n#[cfg(feature = \"gamelift\")]\npub mod gamelift;\n#[cfg(feature = \"support\")]\npub mod support;\n*\/\n\n<commit_msg>Annotates Dynamodb example to not compile.<commit_after>#![crate_name = \"rusoto\"]\n#![crate_type = \"lib\"]\n#![cfg_attr(feature = \"unstable\", feature(custom_derive, plugin))]\n#![cfg_attr(feature = \"unstable\", plugin(serde_macros))]\n#![cfg_attr(feature = \"nightly-testing\", plugin(clippy))]\n#![cfg_attr(feature = \"nightly-testing\", allow(used_underscore_binding, ptr_arg))]\n#![allow(dead_code)]\n#![cfg_attr(not(feature = \"unstable\"), deny(warnings))]\n\n\/\/! Rusoto is an [AWS](https:\/\/aws.amazon.com\/) SDK for Rust.\n\/\/! A high level overview is available in `README.md` at https:\/\/github.com\/rusoto\/rusoto.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! The following code shows a simple example of using Rusoto's DynamoDB API to\n\/\/! list the names of all tables in a database.\n\/\/!\n\/\/! ```rust,ignore\n\/\/! use std::default::Default;\n\/\/!\n\/\/! use rusoto::{DefaultCredentialsProvider, Region};\n\/\/! use rusoto::dynamodb::{DynamoDbClient, ListTablesInput};\n\/\/!\n\/\/! let provider = DefaultCredentialsProvider::new().unwrap();\n\/\/! let client = DynamoDbClient::new(provider, Region::UsEast1);\n\/\/! let list_tables_input: ListTablesInput = Default::default();\n\/\/!\n\/\/! match client.list_tables(&list_tables_input) {\n\/\/! Ok(output) => {\n\/\/! match output.table_names {\n\/\/! Some(table_name_list) => {\n\/\/! println!(\"Tables in database:\");\n\/\/!\n\/\/! for table_name in table_name_list {\n\/\/! println!(\"{}\", table_name);\n\/\/! }\n\/\/! },\n\/\/! None => println!(\"No tables in database!\"),\n\/\/! }\n\/\/! },\n\/\/! Err(error) => {\n\/\/! println!(\"Error: {:?}\", error);\n\/\/! },\n\/\/! }\n\nextern crate chrono;\nextern crate hyper;\n#[macro_use] extern crate log;\nextern crate openssl;\nextern crate regex;\nextern crate rustc_serialize;\nextern crate serde;\nextern crate serde_json;\nextern crate time;\nextern crate url;\nextern crate xml;\n\npub use credential::{\n AwsCredentials,\n ChainProvider,\n EnvironmentProvider,\n IamProvider,\n ProfileProvider,\n ProvideAwsCredentials,\n DefaultCredentialsProvider,\n DefaultCredentialsProviderSync,\n};\npub use region::{ParseRegionError, Region};\npub use request::{DispatchSignedRequest, HttpResponse, HttpDispatchError};\n\nmod credential;\nmod param;\nmod region;\nmod request;\nmod xmlerror;\nmod xmlutil;\nmod serialization;\n#[macro_use] mod signature;\n\n\n#[cfg(feature = \"acm\")]\npub mod acm;\n#[cfg(feature = \"cloudhsm\")]\npub mod cloudhsm;\n#[cfg(feature = \"cloudtrail\")]\npub mod cloudtrail;\n#[cfg(feature = \"codecommit\")]\npub mod codecommit;\n#[cfg(feature = \"codedeploy\")]\npub mod codedeploy;\n#[cfg(feature = \"codepipeline\")]\npub mod codepipeline;\n#[cfg(feature = \"cognito-identity\")]\npub mod cognitoidentity;\n#[cfg(feature = \"config\")]\npub mod config;\n#[cfg(feature = \"datapipeline\")]\npub mod datapipeline;\n#[cfg(feature = \"devicefarm\")]\npub mod devicefarm;\n#[cfg(feature = \"directconnect\")]\npub mod directconnect;\n#[cfg(feature = \"ds\")]\npub mod ds;\n#[cfg(feature = \"dynamodb\")]\npub mod dynamodb;\n#[cfg(feature = \"dynamodbstreams\")]\npub mod dynamodbstreams;\n#[cfg(feature = \"ec2\")]\npub mod ec2;\n#[cfg(feature = \"ecr\")]\npub mod ecr;\n#[cfg(feature = \"ecs\")]\npub mod ecs;\n#[cfg(feature = \"emr\")]\npub mod emr;\n#[cfg(feature = \"elastictranscoder\")]\npub mod elastictranscoder;\n#[cfg(feature = \"events\")]\npub mod events;\n#[cfg(feature = \"firehose\")]\npub mod firehose;\n#[cfg(feature = \"inspector\")]\npub mod inspector;\n#[cfg(feature = \"iot\")]\npub mod iot;\n#[cfg(feature = \"kinesis\")]\npub mod kinesis;\n#[cfg(feature = \"kms\")]\npub mod kms;\n#[cfg(feature = \"logs\")]\npub mod logs;\n#[cfg(feature = \"machinelearning\")]\npub mod machinelearning;\n#[cfg(feature = \"marketplacecommerceanalytics\")]\npub mod marketplacecommerceanalytics;\n#[cfg(feature = \"opsworks\")]\npub mod opsworks;\n#[cfg(feature = \"route53domains\")]\npub mod route53domains;\n#[cfg(feature = \"s3\")]\npub mod s3;\n#[cfg(feature = \"sqs\")]\npub mod sqs;\n#[cfg(feature = \"ssm\")]\npub mod ssm;\n#[cfg(feature = \"storagegateway\")]\npub mod storagegateway;\n#[cfg(feature = \"swf\")]\npub mod swf;\n#[cfg(feature = \"waf\")]\npub mod waf;\n#[cfg(feature = \"workspaces\")]\npub mod workspaces;\n\n\/*\n#[cfg(feature = \"gamelift\")]\npub mod gamelift;\n#[cfg(feature = \"support\")]\npub mod support;\n*\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #209 - servo:doc, r=SimonSapin<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>A potentially functional implementation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Additional account test<commit_after><|endoftext|>"} {"text":"<commit_before>#![deny(missing_docs)]\n\n\/\/! The official Piston window back-end for the Piston game engine\n\nextern crate glutin_window;\nextern crate piston;\nextern crate gfx;\nextern crate gfx_device_gl;\nextern crate gfx_graphics;\nextern crate graphics;\n\nuse std::cell::RefCell;\nuse std::rc::Rc;\nuse std::any::Any;\n\nuse glutin_window::GlutinWindow;\nuse piston::{ event, window };\nuse gfx::traits::*;\nuse gfx_graphics::{ Gfx2d, GfxGraphics };\nuse graphics::Context;\n\n\/\/\/ The type of event emitted from event loop.\npub type PistonEvent = event::Event<<GlutinWindow as window::Window>::Event>;\n\n\/\/\/ Contains everything required for controlling window, graphics, event loop.\n#[derive(Clone)]\npub struct PistonWindow<T = ()> {\n \/\/\/ The window.\n pub window: Rc<RefCell<GlutinWindow>>,\n \/\/\/ The gfx data.\n pub gfx: Rc<RefCell<Gfx>>,\n \/\/\/ The event loop.\n pub events: Rc<RefCell<event::events::Events<GlutinWindow, PistonEvent>>>,\n \/\/\/ The event.\n pub event: Option<PistonEvent>,\n \/\/\/ Application structure.\n pub app: Rc<RefCell<T>>,\n}\n\n\/\/\/ Contains Gfx data.\npub struct Gfx {\n \/\/\/ The device.\n pub device: gfx_device_gl::Device,\n \/\/\/ The output.\n pub output: gfx_device_gl::Output,\n \/\/\/ The factory.\n pub factory: gfx_device_gl::Factory,\n \/\/\/ Renderer.\n pub renderer: gfx::render::Renderer<gfx_device_gl::Resources,\n gfx_device_gl::CommandBuffer>,\n \/\/\/ Gfx2d.\n pub g2d: Gfx2d<gfx_device_gl::Resources>,\n}\n\nimpl<T> PistonWindow<T> {\n \/\/\/ Creates a new piston object.\n pub fn new(window: Rc<RefCell<GlutinWindow>>, app: Rc<RefCell<T>>) -> Self {\n use piston::event::Events;\n use piston::window::{ OpenGLWindow, Window };\n\n let (mut device, mut factory) = gfx_device_gl::create(|s| window.borrow_mut().get_proc_address(s));\n let size = window.borrow().size();\n let output = factory.make_fake_output(size.width as u16, size.height as u16);\n let renderer = factory.create_renderer();\n let g2d = Gfx2d::new(&mut device, &mut factory);\n\n PistonWindow {\n window: window.clone(),\n gfx: Rc::new(RefCell::new(Gfx {\n device: device,\n output: output,\n factory: factory,\n renderer: renderer,\n g2d: g2d,\n })),\n events: Rc::new(RefCell::new(window.events())),\n event: None,\n app: app,\n }\n }\n\n \/\/\/ Changes application structure.\n pub fn app<U>(self, app: Rc<RefCell<U>>) -> PistonWindow<U> {\n PistonWindow {\n window: self.window,\n gfx: self.gfx,\n events: self.events,\n event: self.event,\n app: app,\n }\n }\n\n \/\/\/ Renders 2D graphics.\n pub fn draw_2d<F>(&self, f: F)\n where F: FnMut(Context, &mut GfxGraphics<\n gfx_device_gl::Resources, gfx_device_gl::CommandBuffer,\n gfx_device_gl::Output>)\n {\n use piston::event::RenderEvent;\n\n if let Some(ref e) = self.event {\n if let Some(args) = e.render_args() {\n let &mut Gfx { ref mut device, ref mut renderer, ref mut output,\n ref mut g2d, .. } = &mut *self.gfx.borrow_mut();\n g2d.draw(renderer, output, args.viewport(), f);\n device.submit(renderer.as_buffer());\n renderer.reset();\n }\n }\n }\n\n \/\/\/ Renders 3D graphics.\n pub fn draw_3d<F>(&self, mut f: F)\n where F: FnMut(&mut Gfx)\n {\n use piston::event::RenderEvent;\n\n if let Some(ref e) = self.event {\n if let Some(_) = e.render_args() {\n f(&mut *self.gfx.borrow_mut())\n }\n }\n }\n}\n\nimpl Iterator for PistonWindow {\n type Item = PistonWindow;\n\n fn next(&mut self) -> Option<PistonWindow> {\n use piston::event::AfterRenderEvent;\n\n if let Some(e) = self.events.borrow_mut().next() {\n if let Some(_) = e.after_render_args() {\n \/\/ After swapping buffers.\n let &mut Gfx {\n ref mut device,\n ref mut factory,\n ..\n } = &mut *self.gfx.borrow_mut();\n device.after_frame();\n factory.cleanup();\n }\n\n Some(PistonWindow {\n window: self.window.clone(),\n gfx: self.gfx.clone(),\n events: self.events.clone(),\n event: Some(e),\n app: self.app.clone(),\n })\n } else { None }\n }\n}\n\nimpl event::GenericEvent for PistonWindow {\n fn event_id(&self) -> event::EventId {\n match self.event {\n Some(ref e) => e.event_id(),\n None => event::EventId(\"\")\n }\n }\n\n fn with_args<'a, F, U>(&'a self, f: F) -> U\n where F: FnMut(&Any) -> U\n {\n self.event.as_ref().unwrap().with_args(f)\n }\n\n fn from_args(event_id: event::EventId, any: &Any, old_event: &Self) -> Option<Self> {\n if let Some(ref e) = old_event.event {\n match event::GenericEvent::from_args(event_id, any, e) {\n Some(e) => {\n Some(PistonWindow {\n window: old_event.window.clone(),\n gfx: old_event.gfx.clone(),\n events: old_event.events.clone(),\n event: Some(e),\n app: old_event.app.clone(),\n })\n }\n None => None\n }\n } else { None }\n }\n}\n\nimpl window::Window for PistonWindow {\n type Event = <GlutinWindow as window::Window>::Event;\n\n fn should_close(&self) -> bool { self.window.borrow().should_close() }\n fn size(&self) -> window::Size { self.window.borrow().size() }\n fn draw_size(&self) -> window::Size { self.window.borrow().draw_size() }\n fn swap_buffers(&mut self) { self.window.borrow_mut().swap_buffers() }\n fn poll_event(&mut self) -> Option<Self::Event> {\n window::Window::poll_event(&mut *self.window.borrow_mut())\n }\n}\n\nimpl window::AdvancedWindow for PistonWindow {\n fn get_title(&self) -> String { self.window.borrow().get_title() }\n fn set_title(&mut self, title: String) {\n self.window.borrow_mut().set_title(title)\n }\n fn get_exit_on_esc(&self) -> bool { self.window.borrow().get_exit_on_esc() }\n fn set_exit_on_esc(&mut self, value: bool) {\n self.window.borrow_mut().set_exit_on_esc(value)\n }\n fn set_capture_cursor(&mut self, value: bool) {\n self.window.borrow_mut().set_capture_cursor(value)\n }\n}\n\n\/\/\/ Creates a new empty application.\npub fn empty_app() -> Rc<RefCell<()>> { Rc::new(RefCell::new(())) }\n<commit_msg>Updated `Gfx::output` on resize<commit_after>#![deny(missing_docs)]\n\n\/\/! The official Piston window back-end for the Piston game engine\n\nextern crate glutin_window;\nextern crate piston;\nextern crate gfx;\nextern crate gfx_device_gl;\nextern crate gfx_graphics;\nextern crate graphics;\n\nuse std::cell::RefCell;\nuse std::rc::Rc;\nuse std::any::Any;\n\nuse glutin_window::GlutinWindow;\nuse piston::{ event, window };\nuse gfx::traits::*;\nuse gfx_graphics::{ Gfx2d, GfxGraphics };\nuse graphics::Context;\n\n\/\/\/ The type of event emitted from event loop.\npub type PistonEvent = event::Event<<GlutinWindow as window::Window>::Event>;\n\n\/\/\/ Contains everything required for controlling window, graphics, event loop.\n#[derive(Clone)]\npub struct PistonWindow<T = ()> {\n \/\/\/ The window.\n pub window: Rc<RefCell<GlutinWindow>>,\n \/\/\/ The gfx data.\n pub gfx: Rc<RefCell<Gfx>>,\n \/\/\/ The event loop.\n pub events: Rc<RefCell<event::events::Events<GlutinWindow, PistonEvent>>>,\n \/\/\/ The event.\n pub event: Option<PistonEvent>,\n \/\/\/ Application structure.\n pub app: Rc<RefCell<T>>,\n}\n\n\/\/\/ Contains Gfx data.\npub struct Gfx {\n \/\/\/ The device.\n pub device: gfx_device_gl::Device,\n \/\/\/ The output.\n pub output: gfx_device_gl::Output,\n \/\/\/ The factory.\n pub factory: gfx_device_gl::Factory,\n \/\/\/ Renderer.\n pub renderer: gfx::render::Renderer<gfx_device_gl::Resources,\n gfx_device_gl::CommandBuffer>,\n \/\/\/ Gfx2d.\n pub g2d: Gfx2d<gfx_device_gl::Resources>,\n}\n\nimpl<T> PistonWindow<T> {\n \/\/\/ Creates a new piston object.\n pub fn new(window: Rc<RefCell<GlutinWindow>>, app: Rc<RefCell<T>>) -> Self {\n use piston::event::Events;\n use piston::window::{ OpenGLWindow, Window };\n\n let (mut device, mut factory) = gfx_device_gl::create(|s| window.borrow_mut().get_proc_address(s));\n let size = window.borrow().size();\n let output = factory.make_fake_output(size.width as u16, size.height as u16);\n let renderer = factory.create_renderer();\n let g2d = Gfx2d::new(&mut device, &mut factory);\n\n PistonWindow {\n window: window.clone(),\n gfx: Rc::new(RefCell::new(Gfx {\n device: device,\n output: output,\n factory: factory,\n renderer: renderer,\n g2d: g2d,\n })),\n events: Rc::new(RefCell::new(window.events())),\n event: None,\n app: app,\n }\n }\n\n \/\/\/ Changes application structure.\n pub fn app<U>(self, app: Rc<RefCell<U>>) -> PistonWindow<U> {\n PistonWindow {\n window: self.window,\n gfx: self.gfx,\n events: self.events,\n event: self.event,\n app: app,\n }\n }\n\n \/\/\/ Renders 2D graphics.\n pub fn draw_2d<F>(&self, f: F)\n where F: FnMut(Context, &mut GfxGraphics<\n gfx_device_gl::Resources, gfx_device_gl::CommandBuffer,\n gfx_device_gl::Output>)\n {\n use piston::event::RenderEvent;\n\n if let Some(ref e) = self.event {\n if let Some(args) = e.render_args() {\n let &mut Gfx { ref mut device, ref mut renderer, ref mut output,\n ref mut g2d, .. } = &mut *self.gfx.borrow_mut();\n g2d.draw(renderer, output, args.viewport(), f);\n device.submit(renderer.as_buffer());\n renderer.reset();\n }\n }\n }\n\n \/\/\/ Renders 3D graphics.\n pub fn draw_3d<F>(&self, mut f: F)\n where F: FnMut(&mut Gfx)\n {\n use piston::event::RenderEvent;\n\n if let Some(ref e) = self.event {\n if let Some(_) = e.render_args() {\n f(&mut *self.gfx.borrow_mut())\n }\n }\n }\n}\n\nimpl Iterator for PistonWindow {\n type Item = PistonWindow;\n\n fn next(&mut self) -> Option<PistonWindow> {\n use piston::event::*;\n\n if let Some(e) = self.events.borrow_mut().next() {\n if let Some(_) = e.after_render_args() {\n \/\/ After swapping buffers.\n let &mut Gfx {\n ref mut device,\n ref mut factory,\n ..\n } = &mut *self.gfx.borrow_mut();\n device.after_frame();\n factory.cleanup();\n }\n\n if let Some(size) = e.resize_args() {\n let &mut Gfx {\n ref mut output,\n ref mut factory,\n ..\n } = &mut *self.gfx.borrow_mut();\n *output = factory.make_fake_output(size[0] as u16, size[1] as u16);\n }\n\n Some(PistonWindow {\n window: self.window.clone(),\n gfx: self.gfx.clone(),\n events: self.events.clone(),\n event: Some(e),\n app: self.app.clone(),\n })\n } else { None }\n }\n}\n\nimpl event::GenericEvent for PistonWindow {\n fn event_id(&self) -> event::EventId {\n match self.event {\n Some(ref e) => e.event_id(),\n None => event::EventId(\"\")\n }\n }\n\n fn with_args<'a, F, U>(&'a self, f: F) -> U\n where F: FnMut(&Any) -> U\n {\n self.event.as_ref().unwrap().with_args(f)\n }\n\n fn from_args(event_id: event::EventId, any: &Any, old_event: &Self) -> Option<Self> {\n if let Some(ref e) = old_event.event {\n match event::GenericEvent::from_args(event_id, any, e) {\n Some(e) => {\n Some(PistonWindow {\n window: old_event.window.clone(),\n gfx: old_event.gfx.clone(),\n events: old_event.events.clone(),\n event: Some(e),\n app: old_event.app.clone(),\n })\n }\n None => None\n }\n } else { None }\n }\n}\n\nimpl window::Window for PistonWindow {\n type Event = <GlutinWindow as window::Window>::Event;\n\n fn should_close(&self) -> bool { self.window.borrow().should_close() }\n fn size(&self) -> window::Size { self.window.borrow().size() }\n fn draw_size(&self) -> window::Size { self.window.borrow().draw_size() }\n fn swap_buffers(&mut self) { self.window.borrow_mut().swap_buffers() }\n fn poll_event(&mut self) -> Option<Self::Event> {\n window::Window::poll_event(&mut *self.window.borrow_mut())\n }\n}\n\nimpl window::AdvancedWindow for PistonWindow {\n fn get_title(&self) -> String { self.window.borrow().get_title() }\n fn set_title(&mut self, title: String) {\n self.window.borrow_mut().set_title(title)\n }\n fn get_exit_on_esc(&self) -> bool { self.window.borrow().get_exit_on_esc() }\n fn set_exit_on_esc(&mut self, value: bool) {\n self.window.borrow_mut().set_exit_on_esc(value)\n }\n fn set_capture_cursor(&mut self, value: bool) {\n self.window.borrow_mut().set_capture_cursor(value)\n }\n}\n\n\/\/\/ Creates a new empty application.\npub fn empty_app() -> Rc<RefCell<()>> { Rc::new(RefCell::new(())) }\n<|endoftext|>"} {"text":"<commit_before>#![feature(core, collections, io,\n std_misc, box_syntax, unsafe_destructor)]\n#![deny(missing_docs)]\n#![cfg_attr(test, deny(warnings))]\n#![cfg_attr(test, feature(alloc, test))]\n\n\/\/! # Hyper\n\/\/! Hyper is a fast, modern HTTP implementation written in and for Rust. It\n\/\/! is a low-level typesafe abstraction over raw HTTP, providing an elegant\n\/\/! layer over \"stringly-typed\" HTTP.\n\/\/!\n\/\/! Hyper offers both an HTTP\/S client an HTTP server which can be used to drive\n\/\/! complex web applications written entirely in Rust.\n\/\/!\n\/\/! ## Internal Design\n\/\/!\n\/\/! Hyper is designed as a relatively low-level wrapper over raw HTTP. It should\n\/\/! allow the implementation of higher-level abstractions with as little pain as\n\/\/! possible, and should not irrevocably hide any information from its users.\n\/\/!\n\/\/! ### Common Functionality\n\/\/!\n\/\/! Functionality and code shared between the Server and Client implementations can\n\/\/! be found in `src` directly - this includes `NetworkStream`s, `Method`s,\n\/\/! `StatusCode`, and so on.\n\/\/!\n\/\/! #### Methods\n\/\/!\n\/\/! Methods are represented as a single `enum` to remain as simple as possible.\n\/\/! Extension Methods are represented as raw `String`s. A method's safety and\n\/\/! idempotence can be accessed using the `safe` and `idempotent` methods.\n\/\/!\n\/\/! #### StatusCode\n\/\/!\n\/\/! Status codes are also represented as a single, exhaustive, `enum`. This\n\/\/! representation is efficient, typesafe, and ergonomic as it allows the use of\n\/\/! `match` to disambiguate known status codes.\n\/\/!\n\/\/! #### Headers\n\/\/!\n\/\/! Hyper's header representation is likely the most complex API exposed by Hyper.\n\/\/!\n\/\/! Hyper's headers are an abstraction over an internal `HashMap` and provides a\n\/\/! typesafe API for interacting with headers that does not rely on the use of\n\/\/! \"string-typing.\"\n\/\/!\n\/\/! Each HTTP header in Hyper has an associated type and implementation of the\n\/\/! `Header` trait, which defines an HTTP headers name as a string, how to parse\n\/\/! that header, and how to format that header.\n\/\/!\n\/\/! Headers are then parsed from the string representation lazily when the typed\n\/\/! representation of a header is requested and formatted back into their string\n\/\/! representation when headers are written back to the client.\n\/\/!\n\/\/! #### NetworkStream and NetworkAcceptor\n\/\/!\n\/\/! These are found in `src\/net.rs` and define the interface that acceptors and\n\/\/! streams must fulfill for them to be used within Hyper. They are by and large\n\/\/! internal tools and you should only need to mess around with them if you want to\n\/\/! mock or replace `TcpStream` and `TcpAcceptor`.\n\/\/!\n\/\/! ### Server\n\/\/!\n\/\/! Server-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in in `src\/server`.\n\/\/!\n\/\/! #### Handler + Server\n\/\/!\n\/\/! A `Handler` in Hyper accepts a `Request` and `Response`. This is where\n\/\/! user-code can handle each connection. The server accepts connections in a\n\/\/! task pool with a customizable number of threads, and passes the Request \/\n\/\/! Response to the handler.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An incoming HTTP Request is represented as a struct containing\n\/\/! a `Reader` over a `NetworkStream`, which represents the body, headers, a remote\n\/\/! address, an HTTP version, and a `Method` - relatively standard stuff.\n\/\/!\n\/\/! `Request` implements `Reader` itself, meaning that you can ergonomically get\n\/\/! the body out of a `Request` using standard `Reader` methods and helpers.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! An outgoing HTTP Response is also represented as a struct containing a `Writer`\n\/\/! over a `NetworkStream` which represents the Response body in addition to\n\/\/! standard items such as the `StatusCode` and HTTP version. `Response`'s `Writer`\n\/\/! implementation provides a streaming interface for sending data over to the\n\/\/! client.\n\/\/!\n\/\/! One of the traditional problems with representing outgoing HTTP Responses is\n\/\/! tracking the write-status of the Response - have we written the status-line,\n\/\/! the headers, the body, etc.? Hyper tracks this information statically using the\n\/\/! type system and prevents you, using the type system, from writing headers after\n\/\/! you have started writing to the body or vice versa.\n\/\/!\n\/\/! Hyper does this through a phantom type parameter in the definition of Response,\n\/\/! which tracks whether you are allowed to write to the headers or the body. This\n\/\/! phantom type can have two values `Fresh` or `Streaming`, with `Fresh`\n\/\/! indicating that you can write the headers and `Streaming` indicating that you\n\/\/! may write to the body, but not the headers.\n\/\/!\n\/\/! ### Client\n\/\/!\n\/\/! Client-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in `src\/client`.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An outgoing HTTP Request is represented as a struct containing a `Writer` over\n\/\/! a `NetworkStream` which represents the Request body in addition to the standard\n\/\/! information such as headers and the request method.\n\/\/!\n\/\/! Outgoing Requests track their write-status in almost exactly the same way as\n\/\/! outgoing HTTP Responses do on the Server, so we will defer to the explanation\n\/\/! in the documentation for sever Response.\n\/\/!\n\/\/! Requests expose an efficient streaming interface instead of a builder pattern,\n\/\/! but they also provide the needed interface for creating a builder pattern over\n\/\/! the API exposed by core Hyper.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! Incoming HTTP Responses are represented as a struct containing a `Reader` over\n\/\/! a `NetworkStream` and contain headers, a status, and an http version. They\n\/\/! implement `Reader` and can be read to get the data out of a `Response`.\n\/\/!\n\nextern crate \"rustc-serialize\" as serialize;\nextern crate time;\nextern crate url;\nextern crate openssl;\nextern crate cookie;\nextern crate unicase;\nextern crate httparse;\nextern crate num_cpus;\n\n#[macro_use]\nextern crate log;\n\n#[cfg(test)]\nextern crate test;\n\n\npub use mimewrapper::mime;\npub use url::Url;\npub use client::Client;\npub use error::{HttpResult, HttpError};\npub use method::Method::{Get, Head, Post, Delete};\npub use status::StatusCode::{Ok, BadRequest, NotFound};\npub use server::Server;\n\nmacro_rules! todo(\n ($($arg:tt)*) => (if cfg!(not(ndebug)) {\n trace!(\"TODO: {:?}\", format_args!($($arg)*))\n })\n);\n\nmacro_rules! inspect(\n ($name:expr, $value:expr) => ({\n let v = $value;\n trace!(\"inspect: {:?} = {:?}\", $name, v);\n v\n })\n);\n\n#[cfg(test)]\n#[macro_use]\nmod mock;\n\npub mod client;\npub mod error;\npub mod method;\npub mod header;\npub mod http;\npub mod net;\npub mod server;\npub mod status;\npub mod uri;\npub mod version;\n\n\nmod mimewrapper {\n \/\/\/ Re-exporting the mime crate, for convenience.\n extern crate mime;\n}\n\n#[allow(unconditional_recursion)]\nfn _assert_send<T: Send>() {\n _assert_send::<client::Request<net::Fresh>>();\n _assert_send::<client::Response>();\n}\n<commit_msg>docs(hyper): Tag docs for reexports<commit_after>#![doc(html_root_url = \"https:\/\/hyperium.github.io\/hyper\/hyper\/index.html\")]\n#![feature(core, collections, io,\n std_misc, box_syntax, unsafe_destructor)]\n#![deny(missing_docs)]\n#![cfg_attr(test, deny(warnings))]\n#![cfg_attr(test, feature(alloc, test))]\n\n\/\/! # Hyper\n\/\/! Hyper is a fast, modern HTTP implementation written in and for Rust. It\n\/\/! is a low-level typesafe abstraction over raw HTTP, providing an elegant\n\/\/! layer over \"stringly-typed\" HTTP.\n\/\/!\n\/\/! Hyper offers both an HTTP\/S client an HTTP server which can be used to drive\n\/\/! complex web applications written entirely in Rust.\n\/\/!\n\/\/! ## Internal Design\n\/\/!\n\/\/! Hyper is designed as a relatively low-level wrapper over raw HTTP. It should\n\/\/! allow the implementation of higher-level abstractions with as little pain as\n\/\/! possible, and should not irrevocably hide any information from its users.\n\/\/!\n\/\/! ### Common Functionality\n\/\/!\n\/\/! Functionality and code shared between the Server and Client implementations can\n\/\/! be found in `src` directly - this includes `NetworkStream`s, `Method`s,\n\/\/! `StatusCode`, and so on.\n\/\/!\n\/\/! #### Methods\n\/\/!\n\/\/! Methods are represented as a single `enum` to remain as simple as possible.\n\/\/! Extension Methods are represented as raw `String`s. A method's safety and\n\/\/! idempotence can be accessed using the `safe` and `idempotent` methods.\n\/\/!\n\/\/! #### StatusCode\n\/\/!\n\/\/! Status codes are also represented as a single, exhaustive, `enum`. This\n\/\/! representation is efficient, typesafe, and ergonomic as it allows the use of\n\/\/! `match` to disambiguate known status codes.\n\/\/!\n\/\/! #### Headers\n\/\/!\n\/\/! Hyper's header representation is likely the most complex API exposed by Hyper.\n\/\/!\n\/\/! Hyper's headers are an abstraction over an internal `HashMap` and provides a\n\/\/! typesafe API for interacting with headers that does not rely on the use of\n\/\/! \"string-typing.\"\n\/\/!\n\/\/! Each HTTP header in Hyper has an associated type and implementation of the\n\/\/! `Header` trait, which defines an HTTP headers name as a string, how to parse\n\/\/! that header, and how to format that header.\n\/\/!\n\/\/! Headers are then parsed from the string representation lazily when the typed\n\/\/! representation of a header is requested and formatted back into their string\n\/\/! representation when headers are written back to the client.\n\/\/!\n\/\/! #### NetworkStream and NetworkAcceptor\n\/\/!\n\/\/! These are found in `src\/net.rs` and define the interface that acceptors and\n\/\/! streams must fulfill for them to be used within Hyper. They are by and large\n\/\/! internal tools and you should only need to mess around with them if you want to\n\/\/! mock or replace `TcpStream` and `TcpAcceptor`.\n\/\/!\n\/\/! ### Server\n\/\/!\n\/\/! Server-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in in `src\/server`.\n\/\/!\n\/\/! #### Handler + Server\n\/\/!\n\/\/! A `Handler` in Hyper accepts a `Request` and `Response`. This is where\n\/\/! user-code can handle each connection. The server accepts connections in a\n\/\/! task pool with a customizable number of threads, and passes the Request \/\n\/\/! Response to the handler.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An incoming HTTP Request is represented as a struct containing\n\/\/! a `Reader` over a `NetworkStream`, which represents the body, headers, a remote\n\/\/! address, an HTTP version, and a `Method` - relatively standard stuff.\n\/\/!\n\/\/! `Request` implements `Reader` itself, meaning that you can ergonomically get\n\/\/! the body out of a `Request` using standard `Reader` methods and helpers.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! An outgoing HTTP Response is also represented as a struct containing a `Writer`\n\/\/! over a `NetworkStream` which represents the Response body in addition to\n\/\/! standard items such as the `StatusCode` and HTTP version. `Response`'s `Writer`\n\/\/! implementation provides a streaming interface for sending data over to the\n\/\/! client.\n\/\/!\n\/\/! One of the traditional problems with representing outgoing HTTP Responses is\n\/\/! tracking the write-status of the Response - have we written the status-line,\n\/\/! the headers, the body, etc.? Hyper tracks this information statically using the\n\/\/! type system and prevents you, using the type system, from writing headers after\n\/\/! you have started writing to the body or vice versa.\n\/\/!\n\/\/! Hyper does this through a phantom type parameter in the definition of Response,\n\/\/! which tracks whether you are allowed to write to the headers or the body. This\n\/\/! phantom type can have two values `Fresh` or `Streaming`, with `Fresh`\n\/\/! indicating that you can write the headers and `Streaming` indicating that you\n\/\/! may write to the body, but not the headers.\n\/\/!\n\/\/! ### Client\n\/\/!\n\/\/! Client-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in `src\/client`.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An outgoing HTTP Request is represented as a struct containing a `Writer` over\n\/\/! a `NetworkStream` which represents the Request body in addition to the standard\n\/\/! information such as headers and the request method.\n\/\/!\n\/\/! Outgoing Requests track their write-status in almost exactly the same way as\n\/\/! outgoing HTTP Responses do on the Server, so we will defer to the explanation\n\/\/! in the documentation for sever Response.\n\/\/!\n\/\/! Requests expose an efficient streaming interface instead of a builder pattern,\n\/\/! but they also provide the needed interface for creating a builder pattern over\n\/\/! the API exposed by core Hyper.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! Incoming HTTP Responses are represented as a struct containing a `Reader` over\n\/\/! a `NetworkStream` and contain headers, a status, and an http version. They\n\/\/! implement `Reader` and can be read to get the data out of a `Response`.\n\/\/!\n\nextern crate \"rustc-serialize\" as serialize;\nextern crate time;\nextern crate url;\nextern crate openssl;\nextern crate cookie;\nextern crate unicase;\nextern crate httparse;\nextern crate num_cpus;\n\n#[macro_use]\nextern crate log;\n\n#[cfg(test)]\nextern crate test;\n\n\npub use mimewrapper::mime;\npub use url::Url;\npub use client::Client;\npub use error::{HttpResult, HttpError};\npub use method::Method::{Get, Head, Post, Delete};\npub use status::StatusCode::{Ok, BadRequest, NotFound};\npub use server::Server;\n\nmacro_rules! todo(\n ($($arg:tt)*) => (if cfg!(not(ndebug)) {\n trace!(\"TODO: {:?}\", format_args!($($arg)*))\n })\n);\n\nmacro_rules! inspect(\n ($name:expr, $value:expr) => ({\n let v = $value;\n trace!(\"inspect: {:?} = {:?}\", $name, v);\n v\n })\n);\n\n#[cfg(test)]\n#[macro_use]\nmod mock;\n\npub mod client;\npub mod error;\npub mod method;\npub mod header;\npub mod http;\npub mod net;\npub mod server;\npub mod status;\npub mod uri;\npub mod version;\n\n\nmod mimewrapper {\n \/\/\/ Re-exporting the mime crate, for convenience.\n extern crate mime;\n}\n\n#[allow(unconditional_recursion)]\nfn _assert_send<T: Send>() {\n _assert_send::<client::Request<net::Fresh>>();\n _assert_send::<client::Response>();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add transform to Pattern::Surface to properly draw images<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>WIP using Plank's method for GF(2^4)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>v8 rub complete<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Print first interface name (proof of concept)<commit_after>extern crate libc;\n\nuse libc::{c_char, c_int, c_ushort, c_uint, c_void};\nuse std::c_str::CString;\n\nextern {\n fn printf(s: *const libc::c_char) -> libc::c_int;\n}\n\nextern {\n \/\/ takes a pointer to the first item in an array of ifaddr pointers (in\n \/\/ other words, a pointer to a pointer to an ifaddr)\n fn getifaddrs(addrs: *mut *mut ifaddrs) -> c_int;\n}\n\n\/\/ TODO: try to call getifaddrs (and freeifaddrs, too) to get the list of interfaces, just like in Tony's code\n\nstruct sockaddr {\n sa_family: c_ushort,\n sa_data: [c_char, ..14]\n}\n\nstruct ifaddrs {\n ifa_next: *const ifaddrs,\n ifa_name: *const c_char,\n ifa_flags: c_uint,\n ifa_addr: *const sockaddr,\n ifa_netmask: *const sockaddr,\n ifa_ifu: *const sockaddr,\n ifa_data: *const c_void\n}\n\nfn main() {\n let x =\n unsafe {\n printf(\"Hello, world!\\n\".to_c_str().as_ptr())\n };\n\n println!(\"{}\", x);\n\n \/\/ let addrs = ifaddrs {\n \/\/ ifa_next : std::ptr::mut_null(),\n \/\/ ifa_name : std::ptr::mut_null(),\n \/\/ ifa_flags : 0,\n \/\/ ifa_addr : std::ptr::mut_null(),\n \/\/ ifa_netmask : std::ptr::mut_null(),\n \/\/ ifa_ifu : std::ptr::mut_null(),\n \/\/ ifa_data : std::ptr::mut_null(),\n \/\/ };\n \/\/ TOOD: figure out a better default capacity than 20 (see the ptr module: I think I just need to give it a mutable pointer; no allocation needed)\n let mut addrs : Vec<*mut ifaddrs> = Vec::with_capacity(1);\n\n unsafe {\n let ptr = addrs.as_mut_ptr(); \/\/ *mut *mut ifaddr\n let ret = getifaddrs(ptr);\n println!(\"return code: {}\", ret);\n\n let first_ptr = *ptr;\n println!(\"{}\", first_ptr);\n let first_addr = *first_ptr;\n println!(\"{}\", first_addr.ifa_next);\n println!(\"{}\", CString::new(first_addr.ifa_name, false).as_str());\n\n\n\n \/\/ let first_ptr = addrs.get(0);\n \/\/ let first_addr = *first_ptr;\n \/\/ let first_addr_addr = *first_addr;\n \/\/ first_addr_addr.ifa_flags;\n \/\/ println!(\"{}\", first_addr_addr.ifa_name);\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>String -> &str ('a lifetime).<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>`IXAudio2Voice::SetEffectParameters()`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement `recv()` call for datagram sockets<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Providing the features between \"full\" and \"derive\" of syn.\n\/\/!\n\/\/! This crate provides the following two unique data structures.\n\/\/!\n\/\/! * [`syn_mid::ItemFn`] -- A function whose body is not parsed.\n\/\/!\n\/\/! ```text\n\/\/! fn process(n: usize) -> Result<()> { ... }\n\/\/! ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^ ^\n\/\/! ```\n\/\/!\n\/\/! * [`syn_mid::Block`] -- A block whose body is not parsed.\n\/\/!\n\/\/! ```text\n\/\/! { ... }\n\/\/! ^ ^\n\/\/! ```\n\/\/!\n\/\/! Other data structures are the same as data structures of [syn]. These are defined in this crate\n\/\/! because they cannot be used in [syn] without \"full\" feature.\n\/\/!\n\/\/! ## Optional features\n\/\/!\n\/\/! syn-mid in the default features aims to provide the features between \"full\"\n\/\/! and \"derive\" of [syn].\n\/\/!\n\/\/! * **`clone-impls`** — Clone impls for all syntax tree types.\n\/\/!\n\/\/! [`syn_mid::ItemFn`]: struct.ItemFn.html\n\/\/! [`syn_mid::Block`]: struct.Block.html\n\/\/! [syn]: https:\/\/github.com\/dtolnay\/syn\n\/\/!\n\n#![doc(html_root_url = \"https:\/\/docs.rs\/syn-mid\/0.4.0\")]\n#![doc(test(\n no_crate_inject,\n attr(\n deny(warnings, rust_2018_idioms, single_use_lifetimes),\n allow(dead_code)\n )\n))]\n#![warn(unsafe_code)]\n#![warn(rust_2018_idioms, unreachable_pub)]\n#![warn(single_use_lifetimes)]\n#![warn(clippy::all, clippy::pedantic)]\n#![allow(\n clippy::eval_order_dependence,\n clippy::large_enum_variant,\n clippy::module_name_repetitions,\n clippy::use_self\n)]\n\n\/\/ Many of the code contained in this crate are copies from https:\/\/github.com\/dtolnay\/syn.\n\n#[macro_use]\nmod macros;\n\nmod arg;\nmod pat;\nmod path;\n\npub use self::arg::*;\npub use self::pat::*;\n\nuse proc_macro2::TokenStream;\nuse syn::{\n punctuated::Punctuated, token, Abi, Attribute, Generics, Ident, ReturnType, Token, Visibility,\n};\n\nast_struct! {\n \/\/\/ A braced block containing Rust statements.\n pub struct Block {\n pub brace_token: token::Brace,\n \/\/\/ Statements in a block\n pub stmts: TokenStream,\n }\n}\n\nast_struct! {\n \/\/\/ A free-standing function: `fn process(n: usize) -> Result<()> { ...\n \/\/\/ }`.\n pub struct ItemFn {\n pub attrs: Vec<Attribute>,\n pub vis: Visibility,\n pub constness: Option<Token![const]>,\n pub asyncness: Option<Token![async]>,\n pub unsafety: Option<Token![unsafe]>,\n pub abi: Option<Abi>,\n pub fn_token: Token![fn],\n pub ident: Ident,\n pub generics: Generics,\n pub paren_token: token::Paren,\n pub inputs: Punctuated<FnArg, Token![,]>,\n pub output: ReturnType,\n pub block: Block,\n }\n}\n\nmod parsing {\n use syn::{\n braced, parenthesized,\n parse::{Parse, ParseStream, Result},\n Abi, Attribute, Generics, Ident, ReturnType, Token, Visibility, WhereClause,\n };\n\n use super::{Block, FnArg, ItemFn};\n\n impl Parse for Block {\n fn parse(input: ParseStream<'_>) -> Result<Self> {\n let content;\n Ok(Self {\n brace_token: braced!(content in input),\n stmts: content.parse()?,\n })\n }\n }\n\n impl Parse for ItemFn {\n fn parse(input: ParseStream<'_>) -> Result<Self> {\n let attrs = input.call(Attribute::parse_outer)?;\n let vis: Visibility = input.parse()?;\n let constness: Option<Token![const]> = input.parse()?;\n let asyncness: Option<Token![async]> = input.parse()?;\n let unsafety: Option<Token![unsafe]> = input.parse()?;\n let abi: Option<Abi> = input.parse()?;\n let fn_token: Token![fn] = input.parse()?;\n let ident: Ident = input.parse()?;\n let generics: Generics = input.parse()?;\n\n let content;\n let paren_token = parenthesized!(content in input);\n let inputs = content.parse_terminated(FnArg::parse)?;\n\n let output: ReturnType = input.parse()?;\n let where_clause: Option<WhereClause> = input.parse()?;\n\n let block = input.parse()?;\n\n Ok(Self {\n attrs,\n vis,\n constness,\n asyncness,\n unsafety,\n abi,\n fn_token,\n ident,\n generics: Generics {\n where_clause,\n ..generics\n },\n paren_token,\n inputs,\n output,\n block,\n })\n }\n }\n}\n\nmod printing {\n use proc_macro2::TokenStream;\n use quote::{ToTokens, TokenStreamExt};\n\n use super::{Block, ItemFn};\n\n impl ToTokens for Block {\n fn to_tokens(&self, tokens: &mut TokenStream) {\n self.brace_token.surround(tokens, |tokens| {\n tokens.append_all(self.stmts.clone());\n });\n }\n }\n\n impl ToTokens for ItemFn {\n fn to_tokens(&self, tokens: &mut TokenStream) {\n tokens.append_all(&self.attrs);\n self.vis.to_tokens(tokens);\n self.constness.to_tokens(tokens);\n self.asyncness.to_tokens(tokens);\n self.unsafety.to_tokens(tokens);\n self.abi.to_tokens(tokens);\n self.fn_token.to_tokens(tokens);\n self.ident.to_tokens(tokens);\n self.generics.to_tokens(tokens);\n self.paren_token.surround(tokens, |tokens| {\n self.inputs.to_tokens(tokens);\n });\n self.output.to_tokens(tokens);\n self.generics.where_clause.to_tokens(tokens);\n self.block.brace_token.surround(tokens, |tokens| {\n tokens.append_all(self.block.stmts.clone());\n });\n }\n }\n}\n<commit_msg>Tweak lints<commit_after>\/\/! Providing the features between \"full\" and \"derive\" of syn.\n\/\/!\n\/\/! This crate provides the following two unique data structures.\n\/\/!\n\/\/! * [`syn_mid::ItemFn`] -- A function whose body is not parsed.\n\/\/!\n\/\/! ```text\n\/\/! fn process(n: usize) -> Result<()> { ... }\n\/\/! ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^ ^\n\/\/! ```\n\/\/!\n\/\/! * [`syn_mid::Block`] -- A block whose body is not parsed.\n\/\/!\n\/\/! ```text\n\/\/! { ... }\n\/\/! ^ ^\n\/\/! ```\n\/\/!\n\/\/! Other data structures are the same as data structures of [syn]. These are defined in this crate\n\/\/! because they cannot be used in [syn] without \"full\" feature.\n\/\/!\n\/\/! ## Optional features\n\/\/!\n\/\/! syn-mid in the default features aims to provide the features between \"full\"\n\/\/! and \"derive\" of [syn].\n\/\/!\n\/\/! * **`clone-impls`** — Clone impls for all syntax tree types.\n\/\/!\n\/\/! [`syn_mid::ItemFn`]: struct.ItemFn.html\n\/\/! [`syn_mid::Block`]: struct.Block.html\n\/\/! [syn]: https:\/\/github.com\/dtolnay\/syn\n\/\/!\n\n#![doc(html_root_url = \"https:\/\/docs.rs\/syn-mid\/0.4.0\")]\n#![doc(test(\n no_crate_inject,\n attr(\n deny(warnings, rust_2018_idioms, single_use_lifetimes),\n allow(dead_code)\n )\n))]\n#![warn(unsafe_code)]\n#![warn(rust_2018_idioms, single_use_lifetimes, unreachable_pub)]\n#![warn(clippy::all, clippy::pedantic)]\n#![allow(\n clippy::eval_order_dependence,\n clippy::large_enum_variant,\n clippy::module_name_repetitions,\n clippy::use_self\n)]\n\n\/\/ Many of the code contained in this crate are copies from https:\/\/github.com\/dtolnay\/syn.\n\n#[macro_use]\nmod macros;\n\nmod arg;\nmod pat;\nmod path;\n\npub use self::arg::*;\npub use self::pat::*;\n\nuse proc_macro2::TokenStream;\nuse syn::{\n punctuated::Punctuated, token, Abi, Attribute, Generics, Ident, ReturnType, Token, Visibility,\n};\n\nast_struct! {\n \/\/\/ A braced block containing Rust statements.\n pub struct Block {\n pub brace_token: token::Brace,\n \/\/\/ Statements in a block\n pub stmts: TokenStream,\n }\n}\n\nast_struct! {\n \/\/\/ A free-standing function: `fn process(n: usize) -> Result<()> { ...\n \/\/\/ }`.\n pub struct ItemFn {\n pub attrs: Vec<Attribute>,\n pub vis: Visibility,\n pub constness: Option<Token![const]>,\n pub asyncness: Option<Token![async]>,\n pub unsafety: Option<Token![unsafe]>,\n pub abi: Option<Abi>,\n pub fn_token: Token![fn],\n pub ident: Ident,\n pub generics: Generics,\n pub paren_token: token::Paren,\n pub inputs: Punctuated<FnArg, Token![,]>,\n pub output: ReturnType,\n pub block: Block,\n }\n}\n\nmod parsing {\n use syn::{\n braced, parenthesized,\n parse::{Parse, ParseStream, Result},\n Abi, Attribute, Generics, Ident, ReturnType, Token, Visibility, WhereClause,\n };\n\n use super::{Block, FnArg, ItemFn};\n\n impl Parse for Block {\n fn parse(input: ParseStream<'_>) -> Result<Self> {\n let content;\n Ok(Self {\n brace_token: braced!(content in input),\n stmts: content.parse()?,\n })\n }\n }\n\n impl Parse for ItemFn {\n fn parse(input: ParseStream<'_>) -> Result<Self> {\n let attrs = input.call(Attribute::parse_outer)?;\n let vis: Visibility = input.parse()?;\n let constness: Option<Token![const]> = input.parse()?;\n let asyncness: Option<Token![async]> = input.parse()?;\n let unsafety: Option<Token![unsafe]> = input.parse()?;\n let abi: Option<Abi> = input.parse()?;\n let fn_token: Token![fn] = input.parse()?;\n let ident: Ident = input.parse()?;\n let generics: Generics = input.parse()?;\n\n let content;\n let paren_token = parenthesized!(content in input);\n let inputs = content.parse_terminated(FnArg::parse)?;\n\n let output: ReturnType = input.parse()?;\n let where_clause: Option<WhereClause> = input.parse()?;\n\n let block = input.parse()?;\n\n Ok(Self {\n attrs,\n vis,\n constness,\n asyncness,\n unsafety,\n abi,\n fn_token,\n ident,\n generics: Generics {\n where_clause,\n ..generics\n },\n paren_token,\n inputs,\n output,\n block,\n })\n }\n }\n}\n\nmod printing {\n use proc_macro2::TokenStream;\n use quote::{ToTokens, TokenStreamExt};\n\n use super::{Block, ItemFn};\n\n impl ToTokens for Block {\n fn to_tokens(&self, tokens: &mut TokenStream) {\n self.brace_token.surround(tokens, |tokens| {\n tokens.append_all(self.stmts.clone());\n });\n }\n }\n\n impl ToTokens for ItemFn {\n fn to_tokens(&self, tokens: &mut TokenStream) {\n tokens.append_all(&self.attrs);\n self.vis.to_tokens(tokens);\n self.constness.to_tokens(tokens);\n self.asyncness.to_tokens(tokens);\n self.unsafety.to_tokens(tokens);\n self.abi.to_tokens(tokens);\n self.fn_token.to_tokens(tokens);\n self.ident.to_tokens(tokens);\n self.generics.to_tokens(tokens);\n self.paren_token.surround(tokens, |tokens| {\n self.inputs.to_tokens(tokens);\n });\n self.output.to_tokens(tokens);\n self.generics.where_clause.to_tokens(tokens);\n self.block.brace_token.surround(tokens, |tokens| {\n tokens.append_all(self.block.stmts.clone());\n });\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Allow staged_experimental<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>update import<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add new testcase to show the nature of our exciting block-expr \/ trailing unop interaction.<commit_after>\/*\n *\n * When you write a block-expression thing followed by\n * a lone unary operator, you can get a surprising parse:\n *\n * if (...) { ... }\n * -num;\n *\n * for example, or:\n *\n * if (...) { ... }\n * *box;\n *\n * These will parse as subtraction and multiplication binops.\n * To get them to parse \"the way you want\" you need to brace\n * the leading unops:\n\n * if (...) { ... }\n * {-num};\n *\n * or alternatively, semi-separate them:\n *\n * if (...) { ... };\n * -num;\n *\n * This seems a little wonky, but the alternative is to lower\n * precedence of such block-like exprs to the point where\n * you have to parenthesize them to get them to occur in the\n * RHS of a binop. For example, you'd have to write:\n *\n * 12 + (if (foo) { 13 } else { 14 });\n *\n * rather than:\n *\n * 12 + if (foo) { 13 } else { 14 };\n *\n * Since we want to maintain the ability to write the latter,\n * we leave the parens-burden on the trailing unop case.\n *\n *\/\n\nfn main() {\n\n auto num = 12;\n\n assert if (true) { 12 } else { 12 } - num == 0;\n assert 12 - if (true) { 12 } else { 12 } == 0;\n if (true) { 12 } {-num};\n if (true) { 12 }; {-num};\n if (true) { 12 };;; -num;\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ pub mod bufstream;\n\/\/ mod buf;\n\/\/ pub mod channel;\n\/\/ pub mod mio;\n\/\/ pub mod stream;\nmod cell;\nmod slot;\nmod util;\n\nmod error;\npub use error::{PollError, PollResult, FutureError, FutureResult};\n\n\/\/ Primitive futures\nmod done;\nmod empty;\nmod failed;\nmod finished;\nmod lazy;\nmod promise;\npub use done::{done, Done};\npub use empty::{empty, Empty};\npub use failed::{failed, Failed};\npub use finished::{finished, Finished};\npub use lazy::{lazy, Lazy};\npub use promise::{promise, Promise, Complete};\n\n\/\/ combinators\nmod and_then;\nmod chain;\nmod flatten;\nmod impls;\nmod join;\nmod map;\nmod map_err;\nmod or_else;\nmod select;\nmod then;\npub use and_then::AndThen;\npub use flatten::Flatten;\npub use join::Join;\npub use map::Map;\npub use map_err::MapErr;\npub use or_else::OrElse;\npub use select::Select;\npub use then::Then;\n\nmod collect;\npub use collect::{collect, Collect};\n\n\/\/ streams\n\/\/ pub mod stream;\n\n\/\/ TODO: Send + 'static is annoying, but required by cancel and_then, document\n\/\/ TODO: not object safe\n\/\/\n\/\/ FINISH CONDITIONS\n\/\/ - poll() return Some\n\/\/ - await() is called\n\/\/ - schedule() is called\n\/\/ - schedule_boxed() is called\n\/\/\n\/\/ BAD:\n\/\/ - doing any finish condition after an already called finish condition\n\/\/\n\/\/ WHAT HAPPENS\n\/\/ - panic?\npub trait Future: Send + 'static {\n type Item: Send + 'static;\n type Error: Send + 'static;\n\n fn poll(&mut self) -> Option<PollResult<Self::Item, Self::Error>>;\n\n \/\/ TODO: why is this not drop()\n \/\/ well what if you schedule() then drop, HUH?!\n fn cancel(&mut self);\n\n fn schedule<F>(&mut self, f: F)\n where F: FnOnce(PollResult<Self::Item, Self::Error>) + Send + 'static,\n Self: Sized;\n\n fn schedule_boxed(&mut self, f: Box<Callback<Self::Item, Self::Error>>);\n\n \/\/ TODO: why can't this be in this lib?\n \/\/ fn await(&mut self) -> FutureResult<Self::Item, Self::Error>;\n\n fn boxed(self) -> Box<Future<Item=Self::Item, Error=Self::Error>>\n where Self: Sized\n {\n Box::new(self)\n }\n\n \/\/ TODO: compare this to `.then(|x| x.map(f))`\n fn map<F, U>(self, f: F) -> Map<Self, F>\n where F: FnOnce(Self::Item) -> U + Send + 'static,\n Self: Sized,\n {\n map::new(self, f)\n }\n\n \/\/ TODO: compare this to `.then(|x| x.map_err(f))`\n fn map_err<F, E>(self, f: F) -> MapErr<Self, F>\n where F: FnOnce(Self::Error) -> E + Send + 'static,\n E: Send + 'static,\n Self: Sized,\n {\n map_err::new(self, f)\n }\n\n fn then<F, B>(self, f: F) -> Then<Self, B, F>\n where F: FnOnce(Result<Self::Item, Self::Error>) -> B + Send + 'static,\n B: IntoFuture,\n Self: Sized,\n {\n then::new(self, f)\n }\n\n \/\/ TODO: compare this to\n \/\/ ```\n \/\/ .then(|res| {\n \/\/ match res {\n \/\/ Ok(e) => Either::First(f(e).into_future()),\n \/\/ Err(e) => Either::Second(failed(e)),\n \/\/ }\n \/\/ })\n \/\/ ```\n fn and_then<F, B>(self, f: F) -> AndThen<Self, B, F>\n where F: FnOnce(Self::Item) -> B + Send + 'static,\n B: IntoFuture<Error = Self::Error>,\n Self: Sized,\n {\n and_then::new(self, f)\n }\n\n \/\/ TODO: compare this to\n \/\/ ```\n \/\/ .then(|res| {\n \/\/ match res {\n \/\/ Ok(e) => Either::First(finished(e)),\n \/\/ Err(e) => Either::Second(f(e).into_future()),\n \/\/ }\n \/\/ })\n \/\/ ```\n fn or_else<F, B>(self, f: F) -> OrElse<Self, B, F>\n where F: FnOnce(Self::Error) -> B + Send + 'static,\n B: IntoFuture<Item = Self::Item>,\n Self: Sized,\n {\n or_else::new(self, f)\n }\n\n fn select<B>(self, other: B) -> Select<Self, B::Future>\n where B: IntoFuture<Item=Self::Item, Error=Self::Error>,\n Self: Sized,\n {\n select::new(self, other.into_future())\n }\n\n fn join<B>(self, other: B) -> Join<Self, B::Future>\n where B: IntoFuture<Error=Self::Error>,\n Self: Sized,\n {\n join::new(self, other.into_future())\n }\n\n \/\/ TODO: check this is the same as `and_then(|x| x)`\n fn flatten(self) -> Flatten<Self>\n where Self::Item: IntoFuture,\n <<Self as Future>::Item as IntoFuture>::Error:\n From<<Self as Future>::Error>,\n Self: Sized\n {\n flatten::new(self)\n }\n}\n\npub trait Callback<T, E>: Send + 'static {\n fn call(self: Box<Self>, result: PollResult<T, E>);\n}\n\nimpl<F, T, E> Callback<T, E> for F\n where F: FnOnce(PollResult<T, E>) + Send + 'static\n{\n fn call(self: Box<F>, result: PollResult<T, E>) {\n (*self)(result)\n }\n}\n\npub trait IntoFuture: Send + 'static {\n type Future: Future<Item=Self::Item, Error=Self::Error>;\n type Item: Send + 'static;\n type Error: Send + 'static;\n\n fn into_future(self) -> Self::Future;\n}\n\nimpl<F: Future> IntoFuture for F {\n type Future = F;\n type Item = F::Item;\n type Error = F::Error;\n\n fn into_future(self) -> F {\n self\n }\n}\n<commit_msg>Hey look, you can implement almost everything with then()<commit_after>\/\/ pub mod bufstream;\n\/\/ mod buf;\n\/\/ pub mod channel;\n\/\/ pub mod mio;\n\/\/ pub mod stream;\nmod cell;\nmod slot;\nmod util;\n\nmod error;\npub use error::{PollError, PollResult, FutureError, FutureResult};\n\n\/\/ Primitive futures\nmod collect;\nmod done;\nmod empty;\nmod failed;\nmod finished;\nmod lazy;\nmod promise;\npub use collect::{collect, Collect};\npub use done::{done, Done};\npub use empty::{empty, Empty};\npub use failed::{failed, Failed};\npub use finished::{finished, Finished};\npub use lazy::{lazy, Lazy};\npub use promise::{promise, Promise, Complete};\n\n\/\/ combinators\nmod and_then;\nmod chain;\nmod flatten;\nmod impls;\nmod join;\nmod map;\nmod map_err;\nmod or_else;\nmod select;\nmod then;\npub use and_then::AndThen;\npub use flatten::Flatten;\npub use join::Join;\npub use map::Map;\npub use map_err::MapErr;\npub use or_else::OrElse;\npub use select::Select;\npub use then::Then;\n\n\/\/ streams\n\/\/ pub mod stream;\n\n\/\/ TODO: Send + 'static is annoying, but required by cancel and_then, document\n\/\/ TODO: not object safe\n\/\/\n\/\/ FINISH CONDITIONS\n\/\/ - poll() return Some\n\/\/ - await() is called\n\/\/ - schedule() is called\n\/\/ - schedule_boxed() is called\n\/\/\n\/\/ BAD:\n\/\/ - doing any finish condition after an already called finish condition\n\/\/\n\/\/ WHAT HAPPENS\n\/\/ - panic?\npub trait Future: Send + 'static {\n type Item: Send + 'static;\n type Error: Send + 'static;\n\n fn poll(&mut self) -> Option<PollResult<Self::Item, Self::Error>>;\n\n \/\/ TODO: why is this not drop()\n \/\/ well what if you schedule() then drop, HUH?!\n fn cancel(&mut self);\n\n fn schedule<F>(&mut self, f: F)\n where F: FnOnce(PollResult<Self::Item, Self::Error>) + Send + 'static,\n Self: Sized;\n\n fn schedule_boxed(&mut self, f: Box<Callback<Self::Item, Self::Error>>);\n\n \/\/ TODO: why can't this be in this lib?\n \/\/ fn await(&mut self) -> FutureResult<Self::Item, Self::Error>;\n\n fn boxed(self) -> Box<Future<Item=Self::Item, Error=Self::Error>>\n where Self: Sized\n {\n Box::new(self)\n }\n\n fn map<F, U>(self, f: F) -> Map<Self, F>\n where F: FnOnce(Self::Item) -> U + Send + 'static,\n U: Send + 'static,\n Self: Sized,\n {\n assert_future::<U, Self::Error, _>(map::new(self, f))\n }\n\n fn map2<F, U>(self, f: F) -> Box<Future<Item=U, Error=Self::Error>>\n where F: FnOnce(Self::Item) -> U + Send + 'static,\n U: Send + 'static,\n Self: Sized,\n {\n self.then(|r| r.map(f)).boxed()\n }\n\n fn map_err<F, E>(self, f: F) -> MapErr<Self, F>\n where F: FnOnce(Self::Error) -> E + Send + 'static,\n E: Send + 'static,\n Self: Sized,\n {\n assert_future::<Self::Item, E, _>(map_err::new(self, f))\n }\n\n fn map_err2<F, E>(self, f: F) -> Box<Future<Item=Self::Item, Error=E>>\n where F: FnOnce(Self::Error) -> E + Send + 'static,\n E: Send + 'static,\n Self: Sized,\n {\n self.then(|res| res.map_err(f)).boxed()\n }\n\n fn then<F, B>(self, f: F) -> Then<Self, B, F>\n where F: FnOnce(Result<Self::Item, Self::Error>) -> B + Send + 'static,\n B: IntoFuture,\n Self: Sized,\n {\n assert_future::<B::Item, B::Error, _>(then::new(self, f))\n }\n\n fn and_then<F, B>(self, f: F) -> AndThen<Self, B, F>\n where F: FnOnce(Self::Item) -> B + Send + 'static,\n B: IntoFuture<Error = Self::Error>,\n Self: Sized,\n {\n assert_future::<B::Item, Self::Error, _>(and_then::new(self, f))\n }\n\n fn and_then2<F, B>(self, f: F) -> Box<Future<Item=B::Item, Error=Self::Error>>\n where F: FnOnce(Self::Item) -> B + Send + 'static,\n B: IntoFuture<Error = Self::Error>,\n Self: Sized,\n {\n self.then(|res| {\n match res {\n Ok(e) => f(e).into_future().boxed(),\n Err(e) => failed(e).boxed(),\n }\n }).boxed()\n }\n\n fn or_else<F, B>(self, f: F) -> OrElse<Self, B, F>\n where F: FnOnce(Self::Error) -> B + Send + 'static,\n B: IntoFuture<Item = Self::Item>,\n Self: Sized,\n {\n assert_future::<Self::Item, B::Error, _>(or_else::new(self, f))\n }\n\n fn or_else2<F, B>(self, f: F) -> Box<Future<Item=B::Item, Error=B::Error>>\n where F: FnOnce(Self::Error) -> B + Send + 'static,\n B: IntoFuture<Item = Self::Item>,\n Self: Sized,\n {\n self.then(|res| {\n match res {\n Ok(e) => finished(e).boxed(),\n Err(e) => f(e).into_future().boxed(),\n }\n }).boxed()\n }\n\n fn select<B>(self, other: B) -> Select<Self, B::Future>\n where B: IntoFuture<Item=Self::Item, Error=Self::Error>,\n Self: Sized,\n {\n let f = select::new(self, other.into_future());\n assert_future::<Self::Item, Self::Error, _>(f)\n }\n\n fn join<B>(self, other: B) -> Join<Self, B::Future>\n where B: IntoFuture<Error=Self::Error>,\n Self: Sized,\n {\n let f = join::new(self, other.into_future());\n assert_future::<(Self::Item, B::Item), Self::Error, _>(f)\n }\n\n fn flatten(self) -> Flatten<Self>\n where Self::Item: IntoFuture,\n <<Self as Future>::Item as IntoFuture>::Error:\n From<<Self as Future>::Error>,\n Self: Sized\n {\n let f = flatten::new(self);\n assert_future::<<<Self as Future>::Item as IntoFuture>::Item,\n <<Self as Future>::Item as IntoFuture>::Error,\n _>(f)\n }\n\n fn flatten2(self) -> Box<Future<Item=<<Self as Future>::Item as IntoFuture>::Item,\n Error=<<Self as Future>::Item as IntoFuture>::Error>>\n where Self::Item: IntoFuture,\n <<Self as Future>::Item as IntoFuture>::Error:\n From<<Self as Future>::Error>,\n Self: Sized\n {\n self.then(|res| {\n match res {\n Ok(e) => e.into_future().boxed(),\n Err(e) => failed(From::from(e)).boxed(),\n }\n })\n }\n}\n\nfn assert_future<A, B, F>(t: F) -> F\n where F: Future<Item=A, Error=B>,\n A: Send + 'static,\n B: Send + 'static,\n{\n t\n}\n\npub trait Callback<T, E>: Send + 'static {\n fn call(self: Box<Self>, result: PollResult<T, E>);\n}\n\nimpl<F, T, E> Callback<T, E> for F\n where F: FnOnce(PollResult<T, E>) + Send + 'static\n{\n fn call(self: Box<F>, result: PollResult<T, E>) {\n (*self)(result)\n }\n}\n\npub trait IntoFuture: Send + 'static {\n type Future: Future<Item=Self::Item, Error=Self::Error>;\n type Item: Send + 'static;\n type Error: Send + 'static;\n\n fn into_future(self) -> Self::Future;\n}\n\nimpl<F: Future> IntoFuture for F {\n type Future = F;\n type Item = F::Item;\n type Error = F::Error;\n\n fn into_future(self) -> F {\n self\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Re-enable lints<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rename some things<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Digit slicing<commit_after>use std::slice;\nuse std::mem;\nuse num::bigint::{BigUint, BigDigit};\n\n\/\/\/ Return a little-endian byte slice corresponding to an in-memory unsigned\n\/\/\/ integer value.\n\/\/\/\n\/\/\/ Will fail horribly if your hardware is not little-endian.\npub trait DigitSlice {\n fn as_digits<'a>(&'a self) -> &'a [u8];\n}\n\nimpl DigitSlice for BigUint {\n fn as_digits<'a>(&'a self) -> &'a [u8] {\n let n_bytes = (self.bits() + 7) \/ 8;\n\n unsafe {\n let ptr = mem::transmute::<&BigUint, &Vec<BigDigit>>(self)\n .as_ptr() as *const u8;\n slice::from_raw_parts(ptr, n_bytes)\n }\n }\n}\n\n\nmacro_rules! primitive_impl {\n ($t:ty) => {\n impl DigitSlice for $t {\n fn as_digits<'a>(&'a self) -> &'a[u8] {\n let n_bytes = (mem::size_of::<$t>() * 8 - self.leading_zeros() as usize + 7) \/ 8;\n\n unsafe {\n let ptr: *const u8 = mem::transmute(self);\n slice::from_raw_parts(ptr, n_bytes)\n }\n }\n }\n }\n}\n\nprimitive_impl!(u8);\nprimitive_impl!(u16);\nprimitive_impl!(u32);\nprimitive_impl!(u64);\nprimitive_impl!(usize);\n\n#[cfg(test)]\nmod tests {\n use num::{BigUint, FromPrimitive};\n use super::DigitSlice;\n\n #[test]\n fn test_slice() {\n assert_eq!(0u8.as_digits(), &[]);\n assert_eq!(1u8.as_digits(), &[1]);\n\n assert_eq!(BigUint::from_u32(12345).unwrap().as_digits(),\n 12345u32.as_digits());\n\n assert_eq!(256u32.as_digits(), &[0, 1]);\n\n assert_eq!(BigUint::parse_bytes(b\"112233445566778899\", 16)\n .unwrap()\n .as_digits(),\n &[0x99, 0x88, 0x77, 0x66, 0x55, 0x44, 0x33, 0x22, 0x11]);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make (most) struct fields public.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement Camera.view_matrix()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>actually added texture class source code file<commit_after>use std::path::{Path, PathBuf};\nuse std::collections::HashMap;\nuse opengl_graphics::Texture;\n\npub struct TextureCache {\n cache : HashMap<String, Texture>,\n base_path : PathBuf\n}\n\nimpl TextureCache {\n pub fn new(path : String) -> TextureCache {\n TextureCache {\n cache : HashMap::new(),\n base_path : Path::new(&path).to_path_buf()\n }\n }\n\n pub fn get_asset(&mut self, key : &String) -> &Texture {\n if !self.cache.contains_key(key) {\n self.load_asset(key);\n }\n self.cache.get(key).unwrap()\n }\n\n fn load_asset(&mut self, key : &String) {\n let full_path = self.base_path.join(key);\n let texture = Texture::from_path(full_path);\n\n match texture {\n Ok(res) => {\n self.cache.insert(key.clone(), res);\n },\n Err(err) => panic!(\"attempted to load texture {:?} from path {:?}, got error {:?}\", key, self.base_path.to_str(), err)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add ui test of improper suffix on array len<commit_after>#[cxx::bridge]\nmod ffi {\n unsafe extern \"C++\" {\n fn array() -> [String; 12u16];\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Day 4 in rust<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add wget (WIP)<commit_after>use std::env;\nuse std::fs::File;\nuse std::io::{stderr, Read, Write};\nuse std::process;\nuse std::str;\n\nfn main() {\n if let Some(url) = env::args().nth(1) {\n let (scheme, reference) = url.split_at(url.find(':').unwrap_or(0));\n if scheme == \"http\" {\n let mut parts = reference.split('\/').skip(2); \/\/skip first two slashes\n let remote = parts.next().unwrap_or(\"\");\n let path = parts.next().unwrap_or(\"\/\");\n\n let mut remote_parts = remote.split(':');\n let host = remote_parts.next().unwrap_or(\"127.0.0.1\");\n let port = remote_parts.next().unwrap_or(\"80\");\n\n let tcp = format!(\"tcp:{}:{}\", host, port);\n write!(stderr(), \"{}\\n\", tcp).unwrap();\n\n let mut stream = File::open(tcp).unwrap();\n write!(stream, \"GET {} HTTP\/1.0\\r\\n\\r\\n\", path).unwrap();\n stream.flush().unwrap();\n\n let mut bytes = [0; 65536];\n let count = stream.read(&mut bytes).unwrap();\n\n println!(\"{}\", unsafe { str::from_utf8_unchecked(&bytes[.. count]) });\n } else {\n write!(stderr(), \"wget: unknown scheme '{}'\\n\", scheme).unwrap();\n process::exit(1);\n }\n } else {\n write!(stderr(), \"wget: http:\/\/host:port\/path\\n\").unwrap();\n process::exit(1);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a bitfield type (thanks to @danopia)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Documentação dos comandos<commit_after>\/\/! Esse modulo serve somente para conter informações sobre os comandos\n\nuse parser::kw::*;\n\npub fn doc_move() -> String {\n format!(\"Comando:\\n\\t \\\"{0}\\\".\\nArgumentos:\\n\\t Variavel, Valor.\\nExemplo:\\n\\t {0}: A, \\\n B\\nDescrição:\\n\\tEsse comando copia o conteudo de B para a variavel A. B pode ser \\\n um valor ou uma variavel (seu valor é usado)\",\n KW_MOVE)\n}\n\npub fn doc_clear() -> String {\n format!(\"Comando:\\n\\t \\\"{0}\\\".\\nArgumentos:\\n\\t Variavel.\\nExemplo:\\n\\t {0}: \\\n A\\nDescrição:\\n\\tEsse comando limpa o valor da variavel A\",\n KW_CLEAR)\n}\n\npub fn doc_xor() -> String {\n format!(\"Comando:\\n\\t \\\"{0}\\\".\\nArgumentos:\\n\\t Variavel, Valor.\\nExemplo:\\n\\t {0}: A, \\\n B\\nDescrição:\\n\\tEsse comando faz a operação binária XOR (eXclusive OR) na variavel \\\n A com o valor B.\",\n KW_XOR)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a failing case for the producers that I can't work out<commit_after>#[macro_use]\nextern crate nom;\nextern crate pcapng;\n\nuse std::env;\nuse nom::{FileProducer,MemProducer,Producer};\nuse nom::{ConsumerState,Consumer};\nuse nom::{IResult};\nuse pcapng::block::{RawBlock,parse_blocks,parse_block};\n\nstruct DebugConsumer<'a> {\n pub blocks: Vec<RawBlock<'a>>,\n}\n\nimpl<'a> Consumer for DebugConsumer<'a> {\n fn consume(&mut self, input: &[u8]) -> ConsumerState {\n println!(\"Consum called\");\n \/\/ if let IResult::Done(_, blocks) = pcapng::block::parse_blocks(input) {\n match pcapng::block::parse_blocks(input) {\n IResult::Done(_, blocks) => {\n for i in blocks {\n println!(\"consuming: {:?}\", i);\n \/\/ self.blocks.push(i);\n }\n }\n IResult::Error(e) => panic!(\"Error: {:?}\", e),\n IResult::Incomplete(_) => panic!(\"Incomplete\"),\n }\n ConsumerState::ConsumerDone\n }\n\n fn end(&mut self) {\n println!(\"Done!\");\n }\n}\n\nnamed!(printer,\n chain!(\n block: parse_block ,\n ||{\n println!(\"Got a blocks\");\n \/\/ for i in blocks {\n \/\/ println!(\"{:?}\", i);\n \/\/ }\n println!(\"{:?}\", block);\n &[]\n }\n ));\n\npusher!(print, printer);\n\n\n\nfn main() {\n let args: Vec<_> = env::args().collect();\n if args.len() != 2 {\n println!(\"Usage: {} <foo.pcapng>\", args[0]);\n return;\n }\n\n \/\/ This works, and prints my packet\n let input = b\"\\n\\r\\r\\n\\x1c\\x00\\x00\\x00M<+\\x1a\\x01\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x1c\\x00\\x00\\x00\";\n match pcapng::block::parse_blocks(input) {\n IResult::Done(_, blocks) => {\n for i in blocks {\n println!(\"consuming: {:?}\", i);\n \/\/ self.blocks.push(i);\n }\n }\n IResult::Error(e) => panic!(\"Error: {:?}\", e),\n IResult::Incomplete(_) => panic!(\"Incomplete\"),\n }\n\n \/\/ This explodes, hitting the Error case in debug consumer\n let mut producer = MemProducer::new(b\"\\n\\r\\r\\n\\x1c\\x00\\x00\\x00M<+\\x1a\\x01\\x00\\x00\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x1c\\x00\\x00\\x00\", 8);\n let mut c = DebugConsumer { blocks: vec![] };\n println!(\"running memproducer\");\n c.run(&mut producer);\n for i in c.blocks {\n \/\/ println!(\"{:?}\", i.parse());\n println!(\"{:?}\", i);\n }\n\n \/\/ As does this\n let mut producer = FileProducer::new(&args[1][..], 64).unwrap();\n let mut c = DebugConsumer { blocks: vec![] };\n println!(\"Running fileproducer\");\n c.run(&mut producer);\n for i in c.blocks {\n \/\/ println!(\"{:?}\", i.parse());\n println!(\"{:?}\", i);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Keep track of emulated cycles, not ns<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>practice<commit_after>#[allow(unused_variables)]\n#[allow(unused_assignments)]\nfn main() {\n let mut x: i32 = 1;\n x = 7;\n let x = x;\n println!(\"{}\", x);\n\n let y = 4;\n let y = \"I can also be bound to text!\";\n println!(\"{}\", y);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>practice<commit_after>fn main() {\n print_number(5);\n}\n\nfn print_number(x: i32) {\n println!(\"x is {}\", x);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 Johannes Köster, Christopher Schröder.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Fasta reading and writing.\n\n\nuse std::io;\nuse std::io::prelude::*;\nuse std::ascii::AsciiExt;\n\n\npub struct FastaReader<R: io::Read> {\n reader: io::BufReader<R>,\n line: String\n}\n\n\nimpl<R: io::Read> FastaReader<R> {\n \/\/\/ Create a new FastQ reader.\n pub fn new(reader: R) -> Self {\n FastaReader { reader: io::BufReader::new(reader), line: String::new() }\n }\n\n pub fn read(&mut self, record: &mut Record) -> io::Result<()> {\n record.clear();\n if self.line.is_empty() {\n try!(self.reader.read_line(&mut self.line));\n if self.line.is_empty() {\n return Ok(());\n }\n }\n\n if !self.line.starts_with(\">\") {\n return Err(io::Error::new(\n io::ErrorKind::Other,\n \"Expected > at record start.\",\n None,\n ));\n }\n record.header.push_str(&self.line);\n loop {\n self.line.clear();\n try!(self.reader.read_line(&mut self.line));\n record.seq.push_str(&self.line.trim_right());\n if self.line.is_empty() || self.line.starts_with(\">\") {\n break;\n }\n }\n\n Ok(())\n }\n\n \/\/\/ Return an iterator over the records of this FastQ file.\n pub fn records(self) -> Records<R> {\n Records { reader: self }\n }\n}\n\n\nimpl<R: io::Read + io::Seek> FastaReader<R> {\n \/\/\/ Seek to a given offset. Intended for internal use by IndexedFastaReader.\n fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> {\n self.reader.get_mut().seek(pos)\n }\n}\n\n\npub struct Record {\n header: String,\n seq: String,\n}\n\n\nimpl Record {\n pub fn new() -> Self {\n Record { header: String::new(), seq: String::new() }\n }\n\n pub fn is_empty(&self) -> bool {\n self.header.is_empty() && self.seq.is_empty()\n }\n\n \/\/\/ Check validity of Fasta record.\n pub fn check(&self) -> Result<(), &str> {\n if self.id().is_none() {\n return Err(\"Expecting id for FastQ record.\");\n }\n if !self.seq.is_ascii() {\n return Err(\"Non-ascii character found in sequence.\");\n }\n\n Ok(())\n }\n\n \/\/\/ Return the id of the record.\n pub fn id(&self) -> Option<&str> {\n self.header[1..].words().next()\n }\n\n \/\/\/ Return descriptions if present.\n pub fn desc(&self) -> Vec<&str> {\n self.header[1..].words().skip(1).collect()\n }\n\n \/\/\/ Return the sequence of the record.\n pub fn seq(&self) -> &[u8] {\n self.seq.as_bytes()\n }\n\n fn clear(&mut self) {\n self.header.clear();\n self.seq.clear();\n }\n}\n\n\n\/\/\/ An iterator over the records of a Fasta file.\npub struct Records<R: io::Read> {\n reader: FastaReader<R>,\n}\n\n\nimpl<R: io::Read> Iterator for Records<R> {\n type Item = io::Result<Record>;\n\n fn next(&mut self) -> Option<io::Result<Record>> {\n let mut record = Record::new();\n match self.reader.read(&mut record) {\n Ok(()) if record.is_empty() => None,\n Ok(()) => Some(Ok(record)),\n Err(err) => Some(Err(err))\n }\n }\n}\n\n\n\/\/\/ A Fasta writer.\npub struct FastaWriter<W: io::Write> {\n writer: io::BufWriter<W>,\n}\n\n\nimpl<W: io::Write> FastaWriter<W> {\n \/\/\/ Create a new Fasta writer.\n pub fn new(writer: W) -> Self {\n FastaWriter { writer: io::BufWriter::new(writer) }\n }\n\n \/\/\/ Directly write a Fasta record.\n pub fn write_record(&mut self, record: Record) -> io::Result<()> {\n self.write(record.id().unwrap_or(\"\"), &record.desc(), record.seq())\n }\n\n \/\/\/ Write a Fasta record with given values.\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ * `id` - the record id\n \/\/\/ * `desc` - the optional descriptions\n \/\/\/ * `seq` - the sequence\n pub fn write(&mut self, id: &str, desc: &[&str], seq: &[u8]) -> io::Result<()> {\n try!(self.writer.write(b\">\"));\n try!(self.writer.write(id.as_bytes()));\n if !desc.is_empty() {\n for d in desc {\n try!(self.writer.write(b\" \"));\n try!(self.writer.write(d.as_bytes()));\n }\n }\n try!(self.writer.write(b\"\\n\"));\n try!(self.writer.write(seq));\n try!(self.writer.write(b\"\\n\"));\n\n Ok(())\n }\n\n \/\/\/ Flush the writer, ensuring that everything is written.\n pub fn flush(&mut self) -> io::Result<()> {\n self.writer.flush()\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io;\n\n const FASTA_FILE: &'static [u8] = b\">id desc\nACCGTAGGCTGA\n\";\n\n #[test]\n fn test_reader() {\n let reader = FastaReader::new(FASTA_FILE);\n let records: Vec<io::Result<Record>> = reader.records().collect();\n assert!(records.len() == 1);\n for res in records {\n let record = res.ok().unwrap();\n assert_eq!(record.check(), Ok(()));\n assert_eq!(record.id(), Some(\"id\"));\n assert_eq!(record.desc(), [\"desc\"]);\n assert_eq!(record.seq(), b\"ACCGTAGGCTGA\");\n }\n }\n\n #[test]\n fn test_writer() {\n let mut writer = FastaWriter::new(Vec::new());\n writer.write(\"id\", &[\"desc\"], b\"ACCGTAGGCTGA\").ok().expect(\"Expected successful write\");\n writer.flush().ok().expect(\"Expected successful write\");\n assert_eq!(writer.writer.get_ref(), &FASTA_FILE);\n }\n}\n<commit_msg>Temporarily allow dead code.<commit_after>\/\/ Copyright 2014 Johannes Köster, Christopher Schröder.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Fasta reading and writing.\n\n\nuse std::io;\nuse std::io::prelude::*;\nuse std::ascii::AsciiExt;\n\n\npub struct FastaReader<R: io::Read> {\n reader: io::BufReader<R>,\n line: String\n}\n\n\nimpl<R: io::Read> FastaReader<R> {\n \/\/\/ Create a new FastQ reader.\n pub fn new(reader: R) -> Self {\n FastaReader { reader: io::BufReader::new(reader), line: String::new() }\n }\n\n pub fn read(&mut self, record: &mut Record) -> io::Result<()> {\n record.clear();\n if self.line.is_empty() {\n try!(self.reader.read_line(&mut self.line));\n if self.line.is_empty() {\n return Ok(());\n }\n }\n\n if !self.line.starts_with(\">\") {\n return Err(io::Error::new(\n io::ErrorKind::Other,\n \"Expected > at record start.\",\n None,\n ));\n }\n record.header.push_str(&self.line);\n loop {\n self.line.clear();\n try!(self.reader.read_line(&mut self.line));\n record.seq.push_str(&self.line.trim_right());\n if self.line.is_empty() || self.line.starts_with(\">\") {\n break;\n }\n }\n\n Ok(())\n }\n\n \/\/\/ Return an iterator over the records of this FastQ file.\n pub fn records(self) -> Records<R> {\n Records { reader: self }\n }\n}\n\n\nimpl<R: io::Read + io::Seek> FastaReader<R> {\n \/\/\/ Seek to a given offset. Intended for internal use by IndexedFastaReader.\n #[allow(dead_code)]\n fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> {\n self.reader.get_mut().seek(pos)\n }\n}\n\n\npub struct Record {\n header: String,\n seq: String,\n}\n\n\nimpl Record {\n pub fn new() -> Self {\n Record { header: String::new(), seq: String::new() }\n }\n\n pub fn is_empty(&self) -> bool {\n self.header.is_empty() && self.seq.is_empty()\n }\n\n \/\/\/ Check validity of Fasta record.\n pub fn check(&self) -> Result<(), &str> {\n if self.id().is_none() {\n return Err(\"Expecting id for FastQ record.\");\n }\n if !self.seq.is_ascii() {\n return Err(\"Non-ascii character found in sequence.\");\n }\n\n Ok(())\n }\n\n \/\/\/ Return the id of the record.\n pub fn id(&self) -> Option<&str> {\n self.header[1..].words().next()\n }\n\n \/\/\/ Return descriptions if present.\n pub fn desc(&self) -> Vec<&str> {\n self.header[1..].words().skip(1).collect()\n }\n\n \/\/\/ Return the sequence of the record.\n pub fn seq(&self) -> &[u8] {\n self.seq.as_bytes()\n }\n\n fn clear(&mut self) {\n self.header.clear();\n self.seq.clear();\n }\n}\n\n\n\/\/\/ An iterator over the records of a Fasta file.\npub struct Records<R: io::Read> {\n reader: FastaReader<R>,\n}\n\n\nimpl<R: io::Read> Iterator for Records<R> {\n type Item = io::Result<Record>;\n\n fn next(&mut self) -> Option<io::Result<Record>> {\n let mut record = Record::new();\n match self.reader.read(&mut record) {\n Ok(()) if record.is_empty() => None,\n Ok(()) => Some(Ok(record)),\n Err(err) => Some(Err(err))\n }\n }\n}\n\n\n\/\/\/ A Fasta writer.\npub struct FastaWriter<W: io::Write> {\n writer: io::BufWriter<W>,\n}\n\n\nimpl<W: io::Write> FastaWriter<W> {\n \/\/\/ Create a new Fasta writer.\n pub fn new(writer: W) -> Self {\n FastaWriter { writer: io::BufWriter::new(writer) }\n }\n\n \/\/\/ Directly write a Fasta record.\n pub fn write_record(&mut self, record: Record) -> io::Result<()> {\n self.write(record.id().unwrap_or(\"\"), &record.desc(), record.seq())\n }\n\n \/\/\/ Write a Fasta record with given values.\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ * `id` - the record id\n \/\/\/ * `desc` - the optional descriptions\n \/\/\/ * `seq` - the sequence\n pub fn write(&mut self, id: &str, desc: &[&str], seq: &[u8]) -> io::Result<()> {\n try!(self.writer.write(b\">\"));\n try!(self.writer.write(id.as_bytes()));\n if !desc.is_empty() {\n for d in desc {\n try!(self.writer.write(b\" \"));\n try!(self.writer.write(d.as_bytes()));\n }\n }\n try!(self.writer.write(b\"\\n\"));\n try!(self.writer.write(seq));\n try!(self.writer.write(b\"\\n\"));\n\n Ok(())\n }\n\n \/\/\/ Flush the writer, ensuring that everything is written.\n pub fn flush(&mut self) -> io::Result<()> {\n self.writer.flush()\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io;\n\n const FASTA_FILE: &'static [u8] = b\">id desc\nACCGTAGGCTGA\n\";\n\n #[test]\n fn test_reader() {\n let reader = FastaReader::new(FASTA_FILE);\n let records: Vec<io::Result<Record>> = reader.records().collect();\n assert!(records.len() == 1);\n for res in records {\n let record = res.ok().unwrap();\n assert_eq!(record.check(), Ok(()));\n assert_eq!(record.id(), Some(\"id\"));\n assert_eq!(record.desc(), [\"desc\"]);\n assert_eq!(record.seq(), b\"ACCGTAGGCTGA\");\n }\n }\n\n #[test]\n fn test_writer() {\n let mut writer = FastaWriter::new(Vec::new());\n writer.write(\"id\", &[\"desc\"], b\"ACCGTAGGCTGA\").ok().expect(\"Expected successful write\");\n writer.flush().ok().expect(\"Expected successful write\");\n assert_eq!(writer.writer.get_ref(), &FASTA_FILE);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse std::collections::BTreeMap;\nuse std::ops::{Deref, DerefMut};\nuse std::io::BufRead;\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse uuid::Uuid;\n\nuse task_hookrs::task::Task as TTask;\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagstore::store::{FileLockEntry, Store};\nuse libimagstore::storeid::{IntoStoreId, StoreIdIterator, StoreId};\nuse libimagerror::trace::MapErrTrace;\nuse libimagutil::debug_result::DebugResult;\nuse module_path::ModuleEntryPath;\n\nuse error::{TodoError, TodoErrorKind, MapErrInto};\nuse result::Result;\n\n\/\/\/ Task struct containing a `FileLockEntry`\n#[derive(Debug)]\npub struct Task<'a>(FileLockEntry<'a>);\n\nimpl<'a> Task<'a> {\n\n \/\/\/ Concstructs a new `Task` with a `FileLockEntry`\n pub fn new(fle: FileLockEntry<'a>) -> Task<'a> {\n Task(fle)\n }\n\n pub fn import<R: BufRead>(store: &'a Store, mut r: R) -> Result<(Task<'a>, String, Uuid)> {\n let mut line = String::new();\n r.read_line(&mut line);\n import_task(&line.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .map_dbg_err_str(\"Error while importing task\")\n .map_err_dbg_trace()\n .and_then(|t| {\n let uuid = t.uuid().clone();\n t.into_task(store).map(|t| (t, line, uuid))\n })\n }\n\n \/\/\/ Get a task from an import string. That is: read the imported string, get the UUID from it\n \/\/\/ and try to load this UUID from store.\n \/\/\/\n \/\/\/ Possible return values are:\n \/\/\/\n \/\/\/ * Ok(Ok(Task))\n \/\/\/ * Ok(Err(String)) - where the String is the String read from the `r` parameter\n \/\/\/ * Err(_) - where the error is an error that happened during evaluation\n \/\/\/\n pub fn get_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<RResult<Task<'a>, String>>\n {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::get_from_string(store, line)\n }\n\n \/\/\/ Get a task from a String. The String is expected to contain the JSON-representation of the\n \/\/\/ Task to get from the store (only the UUID really matters in this case)\n \/\/\/\n \/\/\/ For an explanation on the return values see `Task::get_from_import()`.\n pub fn get_from_string(store: &'a Store, s: String) -> Result<RResult<Task<'a>, String>> {\n import_task(s.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .map_dbg_err_str(\"Error while importing task\")\n .map_err_dbg_trace()\n .map(|t| t.uuid().clone())\n .and_then(|uuid| Task::get_from_uuid(store, uuid))\n .and_then(|o| match o {\n None => Ok(Err(s)),\n Some(t) => Ok(Ok(t)),\n })\n }\n\n \/\/\/ Get a task from an UUID.\n \/\/\/\n \/\/\/ If there is no task with this UUID, this returns `Ok(None)`.\n pub fn get_from_uuid(store: &'a Store, uuid: Uuid) -> Result<Option<Task<'a>>> {\n ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid))\n .into_storeid()\n .and_then(|store_id| store.get(store_id))\n .map(|o| o.map(Task::new))\n .map_err_into(TodoErrorKind::StoreError)\n }\n\n \/\/\/ Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to\n \/\/\/ implicitely create the task if it does not exist.\n pub fn retrieve_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<Task<'a>> {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::retrieve_from_string(store, line)\n }\n\n \/\/\/ Retrieve a task from a String. The String is expected to contain the JSON-representation of\n \/\/\/ the Task to retrieve from the store (only the UUID really matters in this case)\n pub fn retrieve_from_string(store: &'a Store, s: String) -> Result<Task<'a>> {\n Task::get_from_string(store, s)\n .and_then(|opt| match opt {\n Ok(task) => Ok(task),\n Err(string) => import_task(string.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .map_dbg_err_str(\"Error while importing task\")\n .map_err_dbg_trace()\n .and_then(|t| t.into_task(store)),\n })\n }\n\n pub fn delete_by_imports<R: BufRead>(store: &Store, r: R) -> Result<()> {\n use serde_json::ser::to_string as serde_to_string;\n use task_hookrs::status::TaskStatus;\n\n for (counter, res_ttask) in import_tasks(r).into_iter().enumerate() {\n match res_ttask {\n Ok(ttask) => {\n if counter % 2 == 1 {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n match serde_to_string(&ttask).map_err_into(TodoErrorKind::ImportError) {\n \/\/ use println!() here, as we talk with TW\n Ok(val) => println!(\"{}\", val),\n Err(e) => return Err(e),\n }\n\n \/\/ Taskwarrior does not have the concept of deleted tasks, but only modified\n \/\/ ones.\n \/\/\n \/\/ Here we check if the status of a task is deleted and if yes, we delete it\n \/\/ from the store.\n if *ttask.status() == TaskStatus::Deleted {\n match Task::delete_by_uuid(store, *ttask.uuid()) {\n Ok(_) => info!(\"Deleted task {}\", *ttask.uuid()),\n Err(e) => return Err(e),\n }\n }\n } \/\/ end if c % 2\n },\n Err(e) => return Err(e).map_err_into(TodoErrorKind::ImportError),\n }\n }\n Ok(())\n }\n\n pub fn delete_by_uuid(store: &Store, uuid: Uuid) -> Result<()> {\n ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid))\n .into_storeid()\n .and_then(|id| store.delete(id))\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all_as_ids(store: &Store) -> Result<StoreIdIterator> {\n store.retrieve_for_module(\"todo\/taskwarrior\")\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all(store: &Store) -> Result<TaskIterator> {\n Task::all_as_ids(store)\n .map(|iter| TaskIterator::new(store, iter))\n }\n\n}\n\nimpl<'a> Deref for Task<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Task<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n\n\/\/\/ A trait to get a `libimagtodo::task::Task` out of the implementing object.\npub trait IntoTask<'a> {\n\n \/\/\/ # Usage\n \/\/\/ ```ignore\n \/\/\/ use std::io::stdin;\n \/\/\/\n \/\/\/ use task_hookrs::task::Task;\n \/\/\/ use task_hookrs::import::import;\n \/\/\/ use libimagstore::store::{Store, FileLockEntry};\n \/\/\/\n \/\/\/ if let Ok(task_hookrs_task) = import(stdin()) {\n \/\/\/ \/\/ Store is given at runtime\n \/\/\/ let task = task_hookrs_task.into_filelockentry(store);\n \/\/\/ println!(\"Task with uuid: {}\", task.flentry.get_header().get(\"todo.uuid\"));\n \/\/\/ }\n \/\/\/ ```\n fn into_task(self, store : &'a Store) -> Result<Task<'a>>;\n\n}\n\nimpl<'a> IntoTask<'a> for TTask {\n\n fn into_task(self, store : &'a Store) -> Result<Task<'a>> {\n use toml_query::read::TomlValueReadExt;\n use toml_query::set::TomlValueSetExt;\n use libimagerror::into::IntoError;\n\n \/\/ Helper for toml_query::read::TomlValueReadExt::read() return value, which does only\n \/\/ return Result<T> instead of Result<Option<T>>, which is a real inconvenience.\n \/\/\n let no_identifier = |e: &::toml_query::error::Error| -> bool {\n is_match!(e.kind(), &::toml_query::error::ErrorKind::IdentifierNotFoundInDocument(_))\n };\n\n let uuid = self.uuid();\n ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid))\n .into_storeid()\n .map_err_into(TodoErrorKind::StoreIdError)\n .and_then(|id| {\n store.retrieve(id)\n .map_err_into(TodoErrorKind::StoreError)\n .and_then(|mut fle| {\n {\n let mut hdr = fle.get_header_mut();\n let todo_query = String::from(\"todo\");\n\n if let Err(e) = hdr.read(&todo_query) {\n if no_identifier(&e) {\n try!(hdr\n .set(&String::from(\"todo\"), Value::Table(BTreeMap::new()))\n .map_err_into(TodoErrorKind::StoreError));\n } else {\n let e = Box::new(e);\n return Err(TodoErrorKind::StoreError.into_error_with_cause(e))\n }\n }\n\n try!(hdr.set(&String::from(\"todo.uuid\"),\n Value::String(format!(\"{}\", uuid)))\n .map_err_into(TodoErrorKind::StoreError));\n }\n\n \/\/ If none of the errors above have returned the function, everything is fine\n Ok(Task::new(fle))\n })\n })\n }\n\n}\n\ntrait FromStoreId {\n fn from_storeid<'a>(&'a Store, StoreId) -> Result<Task<'a>>;\n}\n\nimpl<'a> FromStoreId for Task<'a> {\n\n fn from_storeid<'b>(store: &'b Store, id: StoreId) -> Result<Task<'b>> {\n match store.retrieve(id) {\n Err(e) => Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(c) => Ok(Task::new( c )),\n }\n }\n}\n\npub struct TaskIterator<'a> {\n store: &'a Store,\n iditer: StoreIdIterator,\n}\n\nimpl<'a> TaskIterator<'a> {\n\n pub fn new(store: &'a Store, iditer: StoreIdIterator) -> TaskIterator<'a> {\n TaskIterator {\n store: store,\n iditer: iditer,\n }\n }\n\n}\n\nimpl<'a> Iterator for TaskIterator<'a> {\n type Item = Result<Task<'a>>;\n\n fn next(&mut self) -> Option<Result<Task<'a>>> {\n self.iditer.next().map(|id| Task::from_storeid(self.store, id))\n }\n}\n\n<commit_msg>Cleanup codebase to be more readable<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse std::collections::BTreeMap;\nuse std::ops::{Deref, DerefMut};\nuse std::io::BufRead;\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse uuid::Uuid;\n\nuse task_hookrs::task::Task as TTask;\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagstore::store::{FileLockEntry, Store};\nuse libimagstore::storeid::{IntoStoreId, StoreIdIterator, StoreId};\nuse module_path::ModuleEntryPath;\n\nuse error::{TodoErrorKind as TEK, MapErrInto};\nuse result::Result;\n\n\/\/\/ Task struct containing a `FileLockEntry`\n#[derive(Debug)]\npub struct Task<'a>(FileLockEntry<'a>);\n\nimpl<'a> Task<'a> {\n\n \/\/\/ Concstructs a new `Task` with a `FileLockEntry`\n pub fn new(fle: FileLockEntry<'a>) -> Task<'a> {\n Task(fle)\n }\n\n pub fn import<R: BufRead>(store: &'a Store, mut r: R) -> Result<(Task<'a>, String, Uuid)> {\n let mut line = String::new();\n r.read_line(&mut line);\n import_task(&line.as_str())\n .map_err_into(TEK::ImportError)\n .and_then(|t| {\n let uuid = t.uuid().clone();\n t.into_task(store).map(|t| (t, line, uuid))\n })\n }\n\n \/\/\/ Get a task from an import string. That is: read the imported string, get the UUID from it\n \/\/\/ and try to load this UUID from store.\n \/\/\/\n \/\/\/ Possible return values are:\n \/\/\/\n \/\/\/ * Ok(Ok(Task))\n \/\/\/ * Ok(Err(String)) - where the String is the String read from the `r` parameter\n \/\/\/ * Err(_) - where the error is an error that happened during evaluation\n \/\/\/\n pub fn get_from_import<R>(store: &'a Store, mut r: R) -> Result<RResult<Task<'a>, String>>\n where R: BufRead\n {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::get_from_string(store, line)\n }\n\n \/\/\/ Get a task from a String. The String is expected to contain the JSON-representation of the\n \/\/\/ Task to get from the store (only the UUID really matters in this case)\n \/\/\/\n \/\/\/ For an explanation on the return values see `Task::get_from_import()`.\n pub fn get_from_string(store: &'a Store, s: String) -> Result<RResult<Task<'a>, String>> {\n import_task(s.as_str())\n .map_err_into(TEK::ImportError)\n .map(|t| t.uuid().clone())\n .and_then(|uuid| Task::get_from_uuid(store, uuid))\n .and_then(|o| match o {\n None => Ok(Err(s)),\n Some(t) => Ok(Ok(t)),\n })\n }\n\n \/\/\/ Get a task from an UUID.\n \/\/\/\n \/\/\/ If there is no task with this UUID, this returns `Ok(None)`.\n pub fn get_from_uuid(store: &'a Store, uuid: Uuid) -> Result<Option<Task<'a>>> {\n ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid))\n .into_storeid()\n .and_then(|store_id| store.get(store_id))\n .map(|o| o.map(Task::new))\n .map_err_into(TEK::StoreError)\n }\n\n \/\/\/ Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to\n \/\/\/ implicitely create the task if it does not exist.\n pub fn retrieve_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<Task<'a>> {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::retrieve_from_string(store, line)\n }\n\n \/\/\/ Retrieve a task from a String. The String is expected to contain the JSON-representation of\n \/\/\/ the Task to retrieve from the store (only the UUID really matters in this case)\n pub fn retrieve_from_string(store: &'a Store, s: String) -> Result<Task<'a>> {\n Task::get_from_string(store, s)\n .and_then(|opt| match opt {\n Ok(task) => Ok(task),\n Err(string) => import_task(string.as_str())\n .map_err_into(TEK::ImportError)\n .and_then(|t| t.into_task(store)),\n })\n }\n\n pub fn delete_by_imports<R: BufRead>(store: &Store, r: R) -> Result<()> {\n use serde_json::ser::to_string as serde_to_string;\n use task_hookrs::status::TaskStatus;\n\n for (counter, res_ttask) in import_tasks(r).into_iter().enumerate() {\n match res_ttask {\n Ok(ttask) => {\n if counter % 2 == 1 {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n match serde_to_string(&ttask).map_err_into(TEK::ImportError) {\n \/\/ use println!() here, as we talk with TW\n Ok(val) => println!(\"{}\", val),\n Err(e) => return Err(e),\n }\n\n \/\/ Taskwarrior does not have the concept of deleted tasks, but only modified\n \/\/ ones.\n \/\/\n \/\/ Here we check if the status of a task is deleted and if yes, we delete it\n \/\/ from the store.\n if *ttask.status() == TaskStatus::Deleted {\n match Task::delete_by_uuid(store, *ttask.uuid()) {\n Ok(_) => info!(\"Deleted task {}\", *ttask.uuid()),\n Err(e) => return Err(e),\n }\n }\n } \/\/ end if c % 2\n },\n Err(e) => return Err(e).map_err_into(TEK::ImportError),\n }\n }\n Ok(())\n }\n\n pub fn delete_by_uuid(store: &Store, uuid: Uuid) -> Result<()> {\n ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid))\n .into_storeid()\n .and_then(|id| store.delete(id))\n .map_err_into(TEK::StoreError)\n }\n\n pub fn all_as_ids(store: &Store) -> Result<StoreIdIterator> {\n store.retrieve_for_module(\"todo\/taskwarrior\")\n .map_err_into(TEK::StoreError)\n }\n\n pub fn all(store: &Store) -> Result<TaskIterator> {\n Task::all_as_ids(store)\n .map(|iter| TaskIterator::new(store, iter))\n }\n\n}\n\nimpl<'a> Deref for Task<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Task<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n\n\/\/\/ A trait to get a `libimagtodo::task::Task` out of the implementing object.\npub trait IntoTask<'a> {\n\n \/\/\/ # Usage\n \/\/\/ ```ignore\n \/\/\/ use std::io::stdin;\n \/\/\/\n \/\/\/ use task_hookrs::task::Task;\n \/\/\/ use task_hookrs::import::import;\n \/\/\/ use libimagstore::store::{Store, FileLockEntry};\n \/\/\/\n \/\/\/ if let Ok(task_hookrs_task) = import(stdin()) {\n \/\/\/ \/\/ Store is given at runtime\n \/\/\/ let task = task_hookrs_task.into_filelockentry(store);\n \/\/\/ println!(\"Task with uuid: {}\", task.flentry.get_header().get(\"todo.uuid\"));\n \/\/\/ }\n \/\/\/ ```\n fn into_task(self, store : &'a Store) -> Result<Task<'a>>;\n\n}\n\nimpl<'a> IntoTask<'a> for TTask {\n\n fn into_task(self, store : &'a Store) -> Result<Task<'a>> {\n use toml_query::read::TomlValueReadExt;\n use toml_query::set::TomlValueSetExt;\n\n \/\/ Helper for toml_query::read::TomlValueReadExt::read() return value, which does only\n \/\/ return Result<T> instead of Result<Option<T>>, which is a real inconvenience.\n \/\/\n let no_identifier = |e: &::toml_query::error::Error| -> bool {\n is_match!(e.kind(), &::toml_query::error::ErrorKind::IdentifierNotFoundInDocument(_))\n };\n\n let uuid = self.uuid();\n ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid))\n .into_storeid()\n .map_err_into(TEK::StoreIdError)\n .and_then(|id| {\n store.retrieve(id)\n .map_err_into(TEK::StoreError)\n .and_then(|mut fle| {\n {\n let mut hdr = fle.get_header_mut();\n if try!(hdr.read(\"todo\").map_err_into(TEK::StoreError)).is_none() {\n try!(hdr\n .set(\"todo\", Value::Table(BTreeMap::new()))\n .map_err_into(TEK::StoreError));\n }\n\n try!(hdr.set(\"todo.uuid\", Value::String(format!(\"{}\",uuid)))\n .map_err_into(TEK::StoreError));\n }\n\n \/\/ If none of the errors above have returned the function, everything is fine\n Ok(Task::new(fle))\n })\n })\n }\n\n}\n\ntrait FromStoreId {\n fn from_storeid<'a>(&'a Store, StoreId) -> Result<Task<'a>>;\n}\n\nimpl<'a> FromStoreId for Task<'a> {\n\n fn from_storeid<'b>(store: &'b Store, id: StoreId) -> Result<Task<'b>> {\n store.retrieve(id)\n .map_err_into(TEK::StoreError)\n .map(Task::new)\n }\n}\n\npub struct TaskIterator<'a> {\n store: &'a Store,\n iditer: StoreIdIterator,\n}\n\nimpl<'a> TaskIterator<'a> {\n\n pub fn new(store: &'a Store, iditer: StoreIdIterator) -> TaskIterator<'a> {\n TaskIterator {\n store: store,\n iditer: iditer,\n }\n }\n\n}\n\nimpl<'a> Iterator for TaskIterator<'a> {\n type Item = Result<Task<'a>>;\n\n fn next(&mut self) -> Option<Result<Task<'a>>> {\n self.iditer.next().map(|id| Task::from_storeid(self.store, id))\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::collections::BTreeMap;\nuse rustc_serialize::Decodable;\nuse rustc_serialize::base64::{\n FromBase64,\n ToBase64,\n};\nuse rustc_serialize::json::{\n self,\n Decoder,\n Json,\n};\nuse error::Error;\nuse BASE_CONFIG;\n\n#[derive(Debug, Default, PartialEq)]\npub struct Claims {\n pub reg: Registered,\n pub private: BTreeMap<String, Json>,\n}\n\n#[derive(Debug, Default, PartialEq, RustcDecodable, RustcEncodable)]\npub struct Registered {\n pub iss: Option<String>,\n pub sub: Option<String>,\n pub aud: Option<String>,\n pub exp: Option<u64>,\n pub nbf: Option<u64>,\n pub iat: Option<u64>,\n pub jti: Option<String>,\n}\n\nimpl Claims {\n pub fn new(reg: Registered) -> Claims {\n Claims {\n reg: reg,\n private: BTreeMap::new(),\n }\n }\n\n pub fn parse(raw: &str) -> Result<Claims, Error> {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n let tree = match try!(Json::from_str(&*s)) {\n Json::Object(x) => x,\n _ => return Err(Error::Format),\n };\n\n const FIELDS: [&'static str; 7] = [\n \"iss\", \"sub\", \"aud\",\n \"exp\", \"nbf\", \"iat\",\n \"jti\",\n ];\n\n let (reg, pri): (BTreeMap<_, _>, BTreeMap<_, _>) = tree.into_iter()\n .partition(|&(ref key, _)| {\n FIELDS.iter().any(|f| f == key)\n });\n\n let mut decoder = Decoder::new(Json::Object(reg));\n let reg_claims: Registered = try!(Decodable::decode(&mut decoder));\n\n Ok(Claims{\n reg: reg_claims,\n private: pri,\n })\n }\n\n pub fn encode(&self) -> Result<String, Error> {\n \/\/ Extremely inefficient\n let s = try!(json::encode(&self.reg));\n let mut tree = match try!(Json::from_str(&*s)) {\n Json::Object(x) => x,\n _ => return Err(Error::Format),\n };\n\n tree.extend(self.private.clone());\n\n let s = try!(json::encode(&tree));\n let enc = (&*s).as_bytes().to_base64(BASE_CONFIG);\n Ok(enc)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::default::Default;\n use claims::Claims;\n\n #[test]\n fn parse() {\n let enc = \"ew0KICAiaXNzIjogIm1pa2t5YW5nLmNvbSIsDQogICJleHAiOiAxMzAyMzE5MTAwLA0KICAibmFtZSI6ICJNaWNoYWVsIFlhbmciLA0KICAiYWRtaW4iOiB0cnVlDQp9\";\n let claims = Claims::parse(enc).unwrap();\n\n assert_eq!(claims.reg.iss.unwrap(), \"mikkyang.com\");\n assert_eq!(claims.reg.exp.unwrap(), 1302319100);\n }\n\n #[test]\n fn roundtrip() {\n let mut claims: Claims = Default::default();\n claims.reg.iss = Some(\"mikkyang.com\".into());\n claims.reg.exp = Some(1302319100);\n let enc = claims.encode().unwrap();\n assert_eq!(claims, Claims::parse(&*enc).unwrap());\n }\n}\n<commit_msg>Add documentation<commit_after>use std::collections::BTreeMap;\nuse rustc_serialize::Decodable;\nuse rustc_serialize::base64::{\n FromBase64,\n ToBase64,\n};\nuse rustc_serialize::json::{\n self,\n Decoder,\n Json,\n};\nuse error::Error;\nuse BASE_CONFIG;\n\n#[derive(Debug, Default, PartialEq)]\npub struct Claims {\n pub reg: Registered,\n pub private: BTreeMap<String, Json>,\n}\n\n#[derive(Debug, Default, PartialEq, RustcDecodable, RustcEncodable)]\npub struct Registered {\n pub iss: Option<String>,\n pub sub: Option<String>,\n pub aud: Option<String>,\n pub exp: Option<u64>,\n pub nbf: Option<u64>,\n pub iat: Option<u64>,\n pub jti: Option<String>,\n}\n\n\/\/\/ JWT Claims. Registered claims are directly accessible via the `Registered`\n\/\/\/ struct embedded, while private fields are a map that contains `Json`\n\/\/\/ values.\nimpl Claims {\n pub fn new(reg: Registered) -> Claims {\n Claims {\n reg: reg,\n private: BTreeMap::new(),\n }\n }\n\n pub fn parse(raw: &str) -> Result<Claims, Error> {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n let tree = match try!(Json::from_str(&*s)) {\n Json::Object(x) => x,\n _ => return Err(Error::Format),\n };\n\n const FIELDS: [&'static str; 7] = [\n \"iss\", \"sub\", \"aud\",\n \"exp\", \"nbf\", \"iat\",\n \"jti\",\n ];\n\n let (reg, pri): (BTreeMap<_, _>, BTreeMap<_, _>) = tree.into_iter()\n .partition(|&(ref key, _)| {\n FIELDS.iter().any(|f| f == key)\n });\n\n let mut decoder = Decoder::new(Json::Object(reg));\n let reg_claims: Registered = try!(Decodable::decode(&mut decoder));\n\n Ok(Claims{\n reg: reg_claims,\n private: pri,\n })\n }\n\n pub fn encode(&self) -> Result<String, Error> {\n \/\/ Extremely inefficient\n let s = try!(json::encode(&self.reg));\n let mut tree = match try!(Json::from_str(&*s)) {\n Json::Object(x) => x,\n _ => return Err(Error::Format),\n };\n\n tree.extend(self.private.clone());\n\n let s = try!(json::encode(&tree));\n let enc = (&*s).as_bytes().to_base64(BASE_CONFIG);\n Ok(enc)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::default::Default;\n use claims::Claims;\n\n #[test]\n fn parse() {\n let enc = \"ew0KICAiaXNzIjogIm1pa2t5YW5nLmNvbSIsDQogICJleHAiOiAxMzAyMzE5MTAwLA0KICAibmFtZSI6ICJNaWNoYWVsIFlhbmciLA0KICAiYWRtaW4iOiB0cnVlDQp9\";\n let claims = Claims::parse(enc).unwrap();\n\n assert_eq!(claims.reg.iss.unwrap(), \"mikkyang.com\");\n assert_eq!(claims.reg.exp.unwrap(), 1302319100);\n }\n\n #[test]\n fn roundtrip() {\n let mut claims: Claims = Default::default();\n claims.reg.iss = Some(\"mikkyang.com\".into());\n claims.reg.exp = Some(1302319100);\n let enc = claims.encode().unwrap();\n assert_eq!(claims, Claims::parse(&*enc).unwrap());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add message assembling.<commit_after>\/\/! Construction of DNS messages for sending\n\/\/!\n\nuse std::collections::HashMap;\nuse std::io;\nuse std::mem;\nuse std::net;\nuse std::ptr;\nuse super::header::Header;\nuse super::name;\nuse super::record::{RecordData, RecordDataAssembly};\n\n\/\/------------ Assembly -----------------------------------------------------\n\n\/\/\/ A type to assemble an outgoing DNS message.\n\/\/\/\n\/\/\/ When assembling messages you sadly need to know what is going on later\n\/\/\/ or come back to parts written earlier. The Assembly type does exactly\n\/\/\/ that.\n\/\/\/\n\/\/\/ Use this type directly if you already know what to put into the message\n\/\/\/ before starting to create it. Otherwise, use `Message` which allows you\n\/\/\/ put things together step by step.\n\/\/\/\n#[derive(Debug)]\npub struct Assembly {\n \/\/ The message's data.\n inner: Vec<u8>,\n\n \/\/ If we are writing to a stream, the message actually starts with two\n \/\/ octets for the length, but references for name pointers and such\n \/\/ are relative to after that. To make the math easier, we simply keep\n \/\/ the index of the real start of the message in here.\n origin: usize,\n\n \/\/ Maximum size of the resulting package (and thus, `self.inner`).\n \/\/\n \/\/ This is limited to 65535 due either to the maximum UDP datagram\n \/\/ size or the 16 bit length marker in a stream. We still use usize\n \/\/ here because with origin we can overrun the 16 bit boundary.\n maxlen: usize,\n\n \/\/ If we overrun our maxlen while writing, we set this flag. The\n \/\/ higher level methods know what to do in this case.\n tc: bool,\n\n \/\/ We need a copy of the header because we may have to modify the\n \/\/ tc bit when we are done.\n header: Header,\n\n \/\/ If we do compression, then we will store labels in this here map.\n \/\/ If we don't do compression, there is no map.\n compress: Option<HashMap<name::DomainNameBuf, u16>>,\n}\n\n\/\/ Management\n\/\/\nimpl Assembly {\n\n \/\/\/ Creates a new assembly.\n \/\/\/\n \/\/\/ The maximum package size is given in `maxlen`. If more data is\n \/\/\/ pushed to the assembly, it will be truncated at a record boundary\n \/\/\/ and the TC bit will be set in the header.\n \/\/\/\n \/\/\/ If the assembly is for a stream, set `for_stream` to true. In this\n \/\/\/ case the first two octets of the resulting data will be the size\n \/\/\/ of the message in network order.\n \/\/\/\n \/\/\/ Set `compress` to false, if you want domain names never to be\n \/\/\/ compressed.\n \/\/\/\n pub fn new(maxlen: u16, for_stream: bool, compress: bool) -> Assembly {\n let map = match compress {\n true => Some(HashMap::new()),\n false => None\n };\n if for_stream {\n let mut res = Assembly { inner: Vec::new(),\n maxlen: (maxlen as usize) + 2,\n origin: 2, tc: false,\n header: Header::new(),\n compress: map };\n res.push_u16(0);\n res\n }\n else {\n Assembly { inner: Vec::new(), maxlen: maxlen as usize, origin: 0,\n tc: false, header: Header::new(), compress: map }\n }\n }\n}\n\n\/\/ Low level writing -- use this in record data implementations.\n\/\/\nimpl Assembly {\n\n \/\/\/ Pushes a u8 to the end of the assembly.\n \/\/\/\n pub fn push_u8(&mut self, data: u8) {\n if self.keep_pushing(1) {\n self.inner.push(data)\n }\n }\n\n \/\/\/ Pushes a u16 to the end of the assembly.\n \/\/\/\n pub fn push_u16(&mut self, data: u16) {\n if self.keep_pushing(2) {\n let data = data.to_be();\n let bytes: [u8; 2] = unsafe { mem::transmute(data) };\n self.inner.extend(&bytes);\n }\n }\n\n \/\/\/ Pushes a u32 to the end of the assembly.\n \/\/\/\n pub fn push_u32(&mut self, data: u32) {\n if self.keep_pushing(4) {\n let data = data.to_be();\n let bytes: [u8; 4] = unsafe { mem::transmute(data) };\n self.inner.extend(&bytes);\n }\n }\n\n \/\/\/ Pushes `data` to the end of the assembly.\n \/\/\/\n pub fn push_bytes(&mut self, data: &[u8]) {\n if self.keep_pushing(data.len()) {\n self.inner.extend(data);\n }\n }\n\n \/\/\/ Pushes a domain name to the end of the assembly.\n \/\/\/\n \/\/\/ The domain name will not be compressed, even if compression is\n \/\/\/ enabled for the assembly. This is the default because RFC 3597\n \/\/\/ limits name compression to those record types intially defined in\n \/\/\/ RFC 1035. So, if you implement a `RecordDataAssembly`, you likely\n \/\/\/ want to use uncompressed name pushage.\n \/\/\/\n pub fn push_name(&mut self, name: &name::DomainName) {\n assert!(!name.is_empty());\n\n if self.compress.is_none() {\n self._push_name_simple(name);\n }\n else {\n self._push_name_uncompressed(name);\n }\n }\n\n \/\/\/ Pushes a domain name to the end of the assembly employing compression.\n \/\/\/\n \/\/\/ This is similar to `push_name()` except that name compression will\n \/\/\/ be used if it has been enabled for the assembly.\n \/\/\/\n pub fn push_name_compressed(&mut self, name: &name::DomainName) {\n assert!(!name.is_empty());\n\n if self.compress.is_none() {\n self._push_name_simple(name);\n }\n else {\n self._push_name_compressed(name);\n }\n }\n\n \/\/ Simply pushes `name`'s bytes. Luckily, our internal domain name\n \/\/ encoding is actually the wire format. Smart, huh?!\n fn _push_name_simple(&mut self, name: &name::DomainName) {\n if self.keep_pushing(name.as_bytes().len()) {\n self.push_bytes(name.as_bytes());\n }\n }\n\n \/\/ Pushes `name` storing the label positions for later referencing\n \/\/ if compression is enabled.\n fn _push_name_uncompressed(&mut self, name: &name::DomainName) {\n let mut name = name;\n while !name.is_empty() {\n name = self._push_label(name);\n }\n }\n\n \/\/ Pushes `name`, using a reference for the longest known tail.\n fn _push_name_compressed(&mut self, name: &name::DomainName) {\n let mut name = name;\n while !name.is_empty() {\n let pos;\n {\n pos = match self.compress {\n None => None,\n Some(ref map) => {\n match map.get(name) {\n None => None,\n Some(&pos) => Some(pos)\n }\n }\n }\n }\n match pos {\n None => name = self._push_label(name),\n Some(pos) => {\n if self.keep_pushing(2) {\n self.push_u16(pos | 0xC000);\n }\n return\n }\n }\n }\n }\n\n \/\/ Pushes the first label in `name`, store a reference to `name` for\n \/\/ later use in compression.\n fn _push_label<'a>(&mut self, name: &'a name::DomainName)\n -> &'a name::DomainName\n {\n let pos = self.pos();\n match self.compress {\n None => { },\n Some(ref mut map) => {\n map.insert(name.to_owned(), (pos - self.origin) as u16);\n }\n }\n let (head, tail) = name.split_first().unwrap();\n assert!(head.len() < 64);\n if self.keep_pushing(head.as_bytes().len() + 1) {\n self.push_u8(head.len() as u8);\n self.push_bytes(head.as_bytes());\n }\n tail\n }\n\n \/\/ Returns whether it is fine to push `len` octets.\n \/\/\n fn keep_pushing(&mut self, len: usize) -> bool {\n if self.inner.len() + len > self.maxlen {\n self.tc = true;\n false\n }\n else { true }\n }\n\n \/\/\/ Returns the current position of the assembly.\n \/\/\/\n \/\/\/ You must use the returned position for later use in the replace\n \/\/\/ methods only. Do not assign any other meaning to it.\n \/\/\/\n pub fn pos(&self) -> usize {\n self.inner.len()\n }\n\n \/\/\/ Replaces the u8 at position `pos` with `data`.\n \/\/\/\n pub fn replace_u8(&mut self, pos: usize, data: u8) {\n assert!(pos < self.inner.len());\n self.inner[pos] = data;\n }\n\n \/\/\/ Replaces the u16 beginning at position `pos` with `data`.\n \/\/\/\n pub fn replace_u16(&mut self, pos: usize, data: u16) {\n let data = data.to_be();\n assert!(pos + 1 < self.inner.len());\n unsafe {\n let src: [u8; 2] = mem::transmute(data);\n ptr::copy_nonoverlapping(&src.as_ptr(),\n &mut self.inner[pos .. pos + 2].as_ptr(),\n 2);\n }\n }\n\n \/\/\/ Replaces the u32 beginning at position `pos` with `data`.\n \/\/\/\n pub fn replace_u32(&mut self, pos: usize, data: u32) {\n let data = data.to_be();\n assert!(pos + 1 < self.inner.len());\n unsafe {\n let src: [u8; 4] = mem::transmute(data);\n ptr::copy_nonoverlapping(&src.as_ptr(),\n &mut self.inner[pos .. pos + 4].as_ptr(),\n 4);\n }\n }\n}\n\n\/\/ High-level writing\n\/\/\nimpl Assembly {\n \/\/\/ Pushes the message header to the assembly.\n \/\/\/\n \/\/\/ A call to this method must be the first thing on a new assembly.\n \/\/\/ Later there must not be any more calls to it on threat of a panic.\n \/\/\/\n \/\/\/ The arguments are the actual header, followed by the number of\n \/\/\/ elements in each seection. You must follow up this call with the\n \/\/\/ correct number of calls to `push_question()` and `push_rr()` or\n \/\/\/ `push_raw_rr()`. Finally, you must finish with calling `finish()`.\n \/\/\/\n pub fn push_header(&mut self, header: &Header, qdcount: u16, ancount: u16,\n nscount: u16, arcount: u16)\n {\n assert!(self.pos() == self.origin);\n self.header = header.clone();\n self.header.set_tc(false);\n let header_bytes = self.header.as_u32();\n self.push_u32(header_bytes);\n self.push_u16(qdcount);\n self.push_u16(ancount);\n self.push_u16(nscount);\n self.push_u16(arcount);\n }\n\n \/\/\/ Finishes the assembly.\n \/\/\/\n \/\/\/ This must be the last thing you do with the assembly. To make this\n \/\/\/ clear, the method moves the inner vec back to the caller. This\n \/\/\/ returned vec can be sent over the wire as is.\n \/\/\/\n pub fn finish(mut self) -> Vec<u8> {\n if self.origin == 2 {\n let len = (self.inner.len() - self.origin) as u16;\n self.replace_u16(0, len);\n }\n if self.tc {\n self.header.set_tc(true);\n let pos = self.origin;\n let header_bytes = self.header.as_u32();\n self.replace_u32(pos, header_bytes);\n }\n self.inner\n }\n\n \/\/\/ Pushes a question to the assembly.\n \/\/\/\n \/\/\/ If doing so would exceed the maximum length, nothing will be\n \/\/\/ written, the TC bit will be set in the header later, and false\n \/\/\/ will be returned now.\n \/\/\/\n \/\/\/ If all goes well, true will be returned.\n \/\/\/\n pub fn push_question(&mut self, qname: &name::DomainName, qtype: u16,\n qclass: u16) -> bool\n {\n if self.tc { return false }\n let pos = self.pos();\n self.push_name_compressed(qname);\n self.push_u16(qtype);\n self.push_u16(qclass);\n if self.tc {\n self.inner.truncate(pos);\n }\n !self.tc\n }\n\n \/\/\/ Pushes a raw resource record to the assembly.\n \/\/\/\n \/\/\/ For our purposes, a raw resource record is one for which the type\n \/\/\/ is explicitely given and record data is already given in its wire\n \/\/\/ format.\n \/\/\/\n \/\/\/ Returns true if the entire record could be pushed to the assembly.\n \/\/\/ If this would exceed the maximum length, nothing is pushed at all\n \/\/\/ and false is returned. Upon finishing the assembly, the TC bit will\n \/\/\/ be set in the header.\n \/\/\/\n pub fn push_raw_rr(&mut self, name: &name::DomainName, rtype: u16,\n rclass: u16, ttl: u32, rdata: &[u8]) -> bool\n {\n assert!(rdata.len() <= ::std::u16::MAX as usize); \n if self.tc { return false }\n let pos = self.pos();\n self.push_name_compressed(name);\n self.push_u16(rtype);\n self.push_u16(rclass);\n self.push_u32(ttl);\n self.push_u16(rdata.len() as u16);\n self.push_bytes(rdata);\n if self.tc {\n self.inner.truncate(pos);\n }\n !self.tc\n }\n\n \/\/\/ Pushes a resource record to the assembly.\n \/\/\/\n \/\/\/ The record data is being assembled by calling `assembly()` on the\n \/\/\/ trait object `rdata`. If pushing\n \/\/\/ the record would exceed the maximum length, it is removed again and\n \/\/\/ the TC bit will later be set in the header when the assembly is\n \/\/\/ being finished. The method returns whether this did not happen.\n \/\/\/\n pub fn push_rr(&mut self, name: &name::DomainName, rtype: u16,\n rclass: u16, ttl: u32,\n rdata: &RecordDataAssembly) -> bool\n {\n if self.tc { return false }\n let pos = self.pos();\n self.push_name_compressed(name);\n self.push_u16(rtype);\n self.push_u16(rclass);\n self.push_u32(ttl);\n let rdlen_pos = self.pos();\n self.push_u16(0);\n rdata.assemble(self);\n if self.tc {\n self.inner.truncate(pos);\n }\n else {\n let rdlen = self.pos() - rdlen_pos;\n self.replace_u16(rdlen_pos, rdlen as u16);\n }\n !self.tc\n }\n\n}\n\n\/\/------------ Message ------------------------------------------------------\n\n\/\/\/ An outgoing DNS message.\n\/\/\/\n\/\/\/ The message consists of four sections that are vecs over questions or\n\/\/\/ resource records, keeping their own data.\n\/\/\/\n\/\/\/ Once you have all data collected, you can have the actual wire data\n\/\/\/ assembled or even sent out directly.\n\/\/\/\n#[derive(Debug)]\npub struct Message {\n header: Header, \n question: QuestionSection,\n answer: RecordSection,\n authority: RecordSection,\n additional: RecordSection\n}\n\nimpl Message {\n \/\/\/ Creates a new, empty message.\n \/\/\/\n pub fn new() -> Message {\n Message { header: Header::new(), question: QuestionSection::new(),\n answer: RecordSection::new(),\n authority: RecordSection::new(),\n additional: RecordSection::new() }\n }\n\n \/\/\/ Gives access to the message header.\n \/\/\/\n pub fn header(&mut self) -> &mut Header {\n &mut self.header\n }\n\n \/\/\/ Gives access to the question section.\n \/\/\/\n \/\/\/ The question section contains the question being asked in a query.\n \/\/\/\n pub fn question(&mut self) -> &mut QuestionSection {\n &mut self.question\n }\n\n \/\/\/ Gives access to the zone section of an Update query.\n \/\/\/\n \/\/\/ In an update query, the question section is actually the zone\n \/\/\/ section specifing the zone to be updated.\n \/\/\/\n pub fn zone(&mut self) -> &mut QuestionSection {\n &mut self.question\n }\n\n \/\/\/ Gives access to the answer section.\n \/\/\/\n \/\/\/ The answer sections contains those resource records that answer\n \/\/\/ the question.\n \/\/\/\n \/\/\/ In an Update query, this \n \/\/\/\n pub fn answer(&mut self) -> &mut RecordSection {\n &mut self.answer\n }\n\n \/\/\/ Gives access to the prerequiste section of an Update query.\n \/\/\/\n \/\/\/ The prerequisite section contains resource records or resource\n \/\/\/ record sets that must or must not preexist.\n \/\/\/\n pub fn prerequisite(&mut self) -> &mut RecordSection {\n &mut self.answer\n }\n\n \/\/\/ Gives access to the authority section.\n \/\/\/\n \/\/\/ The authority section contains resource records identifying the\n \/\/\/ authoritative name servers responsible for answering the question.\n \/\/\/\n pub fn authority(&mut self) -> &mut RecordSection {\n &mut self.authority\n }\n\n \/\/\/ Gives access to the update section of an Update query.\n \/\/\/\n \/\/\/ The update section contains resource records or resource record\n \/\/\/ sets to be added or delete.\n \/\/\/\n pub fn update(&mut self) -> &mut RecordSection {\n &mut self.authority\n }\n\n \/\/\/ Gives access to the additional section.\n \/\/\/\n \/\/\/ The additional section contains resource records that help making\n \/\/\/ sense of the answer. What exactly ought to be in here depends on\n \/\/\/ the record type being asked for.\n \/\/\/\n pub fn additional(&mut self) -> &mut RecordSection {\n &mut self.additional\n }\n\n \/\/--- Finish it off ...\n \n \/\/\/ Assembles the message.\n \/\/\/\n \/\/\/ Returns a vector containing the wire data of the message and whether\n \/\/\/ the wire message had to be truncated because it would have been\n \/\/\/ longer than `maxlen`.\n \/\/\/\n \/\/\/ If you plan on writing the data into a stream, such as a\n \/\/\/ `TcpStream`, set `for_stream` to true. In this case, the resulting\n \/\/\/ data will start with the required length indication. If you wish\n \/\/\/ domain names to be compressed where applicable, set `compress` to\n \/\/\/ true.\n pub fn assemble(&self, maxlen: u16, for_stream: bool, compress: bool)\n -> (Vec<u8>, bool)\n {\n let mut asm = Assembly::new(maxlen, for_stream, compress);\n asm.push_header(&self.header, self.question.len(),\n self.answer.len(), self.authority.len(),\n self.additional.len());\n let complete = self.question.assemble(&mut asm)\n && self.answer.assemble(&mut asm)\n && self.authority.assemble(&mut asm)\n && self.additional.assemble(&mut asm);\n (asm.finish(), complete)\n }\n\n pub fn to_udp<A: net::ToSocketAddrs>(&self, sock: &net::UdpSocket,\n addr: A, maxlen: u16, compress: bool)\n -> io::Result<(usize, bool)>\n {\n let (vec, complete) = self.assemble(maxlen, false, compress);\n sock.send_to(&vec, addr).map(|x| (x, complete))\n }\n\n pub fn to_writer<W: io::Write>(&self, w: &mut W, compress: bool)\n -> io::Result<(usize, bool)>\n {\n let (vec, complete) = self.assemble(::std::u16::MAX, true, compress);\n w.write(&vec).map(|x| (x, complete))\n }\n}\n\n\n\/\/------------ QuestionSection ----------------------------------------------\n\n#[derive(Debug)]\nstruct Question {\n qname: name::DomainNameBuf,\n qtype: u16,\n qclass: u16,\n}\n\n\n\/\/\/ Collects the questions for a message.\n\/\/\/\n#[derive(Debug)]\npub struct QuestionSection {\n inner: Vec<Question>,\n}\n\nimpl QuestionSection {\n fn new() -> QuestionSection {\n QuestionSection { inner: Vec::new() }\n }\n\n \/\/\/ Adds a questions.\n \/\/\/\n pub fn push(&mut self, qname: name::DomainNameBuf, qtype: u16,\n qclass: u16)\n {\n assert!(self.inner.len() < ::std::u16::MAX as usize);\n self.inner.push(Question { qname: qname, qtype: qtype,\n qclass: qclass })\n }\n\n \/\/\/ Returns the length of the question section.\n \/\/\/\n pub fn len(&self) -> u16 {\n self.inner.len() as u16\n }\n\n fn assemble(&self, asm: &mut Assembly) -> bool {\n for q in self.inner.iter() {\n if !asm.push_question(&q.qname, q.qtype, q.qclass) {\n return false\n }\n }\n true\n }\n}\n\n\n\/\/------------ RecordSection ------------------------------------------------\n\n#[derive(Debug)]\nstruct Record {\n name: name::DomainNameBuf,\n rtype: u16,\n rclass: u16,\n ttl: u32,\n rdata: Box<RecordDataAssembly>\n}\n\n\n\/\/\/ Collects the resource records of one of the three record sections.\n\/\/\/\n#[derive(Debug)]\npub struct RecordSection {\n inner: Vec<Record>,\n}\n\nimpl RecordSection {\n fn new() -> RecordSection {\n RecordSection { inner: Vec::new() }\n }\n\n pub fn push<R: RecordData + 'static>(&mut self, name: name::DomainNameBuf,\n rclass: u16, ttl: u32,\n data: R)\n {\n self.inner.push(Record { name: name, rtype: R::rtype(),\n rclass: rclass, ttl: ttl,\n rdata: Box::new(data) })\n }\n\n \/\/\/ Returns the number of records in the section.\n \/\/\/\n pub fn len(&self) -> u16 {\n self.inner.len() as u16\n }\n\n fn assemble(&self, asm: &mut Assembly) -> bool {\n for rr in self.inner.iter() {\n if !asm.push_rr(&rr.name, rr.rtype, rr.rclass, rr.ttl, &*rr.rdata)\n {\n return false\n }\n }\n true\n }\n}\n\n\/\/------------ Tests --------------------------------------------------------\n\n#[cfg(test)]\nmod tests {\n\n #[test]\n fn test_it() {\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use parse::Executable;\nuse instruction;\nuse instruction::{Instruction, Condition, Operand};\nuse port;\nuse port::GenericPort;\n\n#[derive(Default)]\npub struct CpuState {\n pub acc: i32,\n pub bak: i32,\n pc: i32,\n}\n\npub struct CpuPorts {\n up: GenericPort,\n down: GenericPort,\n left: GenericPort,\n right: GenericPort,\n}\n\nimpl CpuPorts {\n fn new() -> Self {\n CpuPorts {\n up: GenericPort::create(port::CpuPort::new()),\n down: GenericPort::create(port::CpuPort::new()),\n left: GenericPort::create(port::CpuPort::new()),\n right: GenericPort::create(port::CpuPort::new()),\n }\n }\n\n fn write_port(&mut self, port: instruction::Port, val: i32) -> bool {\n match port {\n instruction::Port::Up => &self.up,\n instruction::Port::Down => &self.down,\n instruction::Port::Left => &self.left,\n instruction::Port::Right => &self.right,\n }.write(val)\n }\n\n fn read_port(&mut self, port: instruction::Port) -> Option<i32> {\n panic!(\"Unimplemented port read\");\n }\n}\n\npub struct Cpu {\n state: CpuState,\n outports: CpuPorts,\n inports: CpuPorts,\n executable: Executable,\n}\n\nfn get_operand(state: &CpuState, ports: &mut CpuPorts, op: &Operand) -> Option<i32> {\n match op {\n &Operand::Lit(i) => Some(i),\n &Operand::ACC => Some(state.acc),\n &Operand::Port(ref p) => ports.read_port(p.to_owned()),\n }\n}\n\nimpl Cpu {\n pub fn new(executable: Executable) -> Cpu {\n Self::with_ports(CpuPorts::new(), executable)\n }\n\n pub fn with_ports(out: CpuPorts, executable: Executable) -> Cpu {\n Cpu { state: Default::default(),\n outports: out,\n inports: CpuPorts::new(),\n executable: executable,\n }\n }\n\n pub fn execute(&mut self) -> bool {\n if self.executable.len() == 0 {\n return false;\n }\n\n let advance_pc = match *self.executable.insn_at(self.pc()) {\n Instruction::NOP => true,\n Instruction::MOV { ref src, ref dst } => {\n match get_operand(&self.state, &mut self.inports, src) {\n Some(i) => match dst {\n &Operand::Lit(_) => panic!(\"Cannot store to a literal\"),\n &Operand::ACC => { self.state.acc = i; true },\n &Operand::Port(ref p) => self.outports.write_port(p.to_owned(), i),\n },\n None => false\n }\n },\n Instruction::SWP => {\n let tmp = self.state.acc;\n self.state.acc = self.state.bak;\n self.state.bak = tmp;\n true\n },\n Instruction::SAV => {\n self.state.bak = self.state.acc;\n true\n },\n Instruction::ADD { ref addend } => {\n match get_operand(&self.state, &mut self.inports, addend) {\n Some(i) => { self.state.acc += i; true },\n None => false\n }\n },\n Instruction::SUB { ref subtrahend } => {\n match get_operand(&self.state, &mut self.inports, subtrahend) {\n Some(i) => { self.state.acc -= i; true },\n None => false\n }\n },\n Instruction::NEG => {\n self.state.acc = -self.state.acc;\n true\n },\n Instruction::J { ref cond, ref dst } => {\n if match *cond {\n Condition::Unconditional => true,\n Condition::Ez => self.state.acc == 0,\n Condition::Nz => self.state.acc != 0,\n Condition::Gz => self.state.acc > 0,\n Condition::Lz => self.state.acc < 0,\n } {\n self.state.pc = self.executable.label_line(dst) as i32;\n false\n } else {\n true\n }\n },\n Instruction::JRO { ref dst } => {\n match get_operand(&self.state, &mut self.inports, dst) {\n Some(i) => { self.state.pc += i; true },\n None => false\n }\n },\n };\n if advance_pc {\n self.state.pc += 1;\n }\n \/* Handle wrapping at the end and via JRO *\/\n self.state.pc %= self.executable.len() as i32;\n if self.state.pc < 0 {\n self.state.pc = self.executable.len() as i32 + self.state.pc;\n }\n true\n }\n\n pub fn current_line(&self) -> u32 {\n self.executable.srcline_at(self.pc())\n }\n\n fn pc(&self) -> usize {\n self.state.pc as usize\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Cpu, CpuPorts};\n use port::Port;\n use parse;\n\n #[test]\n fn test_cpu_wrapping() {\n let e = parse::parse(\"TOP: NOP\\nNOP\").unwrap();\n let mut cpu = Cpu::new(e);\n assert_eq!(cpu.current_line(), 0);\n cpu.execute();\n assert_eq!(cpu.current_line(), 1);\n cpu.execute();\n assert_eq!(cpu.current_line(), 0);\n cpu.execute();\n assert_eq!(cpu.current_line(), 1);\n cpu.execute();\n }\n\n #[test]\n fn test_mov() {\n let e = parse::parse(\"MOV 10 ACC\\nNOP\").unwrap();\n let mut cpu = Cpu::new(e);\n assert_eq!(cpu.current_line(), 0);\n assert_eq!(cpu.state.acc, 0);\n cpu.execute();\n assert_eq!(cpu.current_line(), 1);\n assert_eq!(cpu.state.acc, 10);\n }\n\n #[test]\n fn test_add_sub() {\n let e = parse::parse(\"ADD 10\\nADD -20\\nSUB 10\\nSUB -30\").unwrap();\n let mut cpu = Cpu::new(e);\n assert_eq!(cpu.current_line(), 0);\n assert_eq!(cpu.state.acc, 0);\n cpu.execute();\n assert_eq!(cpu.current_line(), 1);\n assert_eq!(cpu.state.acc, 10);\n cpu.execute();\n assert_eq!(cpu.current_line(), 2);\n assert_eq!(cpu.state.acc, -10);\n cpu.execute();\n assert_eq!(cpu.current_line(), 3);\n assert_eq!(cpu.state.acc, -20);\n cpu.execute();\n assert_eq!(cpu.current_line(), 0);\n assert_eq!(cpu.state.acc, 10);\n }\n\n #[test]\n fn test_ports() {\n let e = parse::parse(\"MOV 10 DOWN\").unwrap();\n let mut cpu = Cpu::new(e);\n cpu.execute();\n assert_eq!(cpu.outports.down.read().unwrap(), 10);\n }\n\n #[test]\n fn port_borrow() {\n let e = parse::parse(\"MOV 10 DOWN\").unwrap();\n let ports = CpuPorts::new();\n let down = ports.down.clone();\n let mut cpu = Cpu::with_ports(ports, e);\n cpu.execute();\n assert_eq!(down.read().unwrap(), 10);\n }\n}\n<commit_msg>cpu: implement port read<commit_after>use parse::Executable;\nuse instruction;\nuse instruction::{Instruction, Condition, Operand};\nuse port;\nuse port::GenericPort;\n\n#[derive(Default)]\npub struct CpuState {\n pub acc: i32,\n pub bak: i32,\n pc: i32,\n}\n\npub struct CpuPorts {\n up: GenericPort,\n down: GenericPort,\n left: GenericPort,\n right: GenericPort,\n}\n\nimpl CpuPorts {\n fn new() -> Self {\n CpuPorts {\n up: GenericPort::create(port::CpuPort::new()),\n down: GenericPort::create(port::CpuPort::new()),\n left: GenericPort::create(port::CpuPort::new()),\n right: GenericPort::create(port::CpuPort::new()),\n }\n }\n\n \/\/\/ Index ports structure by instruction port enum\n fn match_port(&mut self, port: instruction::Port) -> &mut GenericPort {\n match port {\n instruction::Port::Up => &mut self.up,\n instruction::Port::Down => &mut self.down,\n instruction::Port::Left => &mut self.left,\n instruction::Port::Right => &mut self.right,\n }\n }\n\n fn write_port(&mut self, port: instruction::Port, val: i32) -> bool {\n self.match_port(port).write(val)\n }\n\n fn read_port(&mut self, port: instruction::Port) -> Option<i32> {\n self.match_port(port).read()\n }\n}\n\npub struct Cpu {\n state: CpuState,\n outports: CpuPorts,\n inports: CpuPorts,\n executable: Executable,\n}\n\nfn get_operand(state: &CpuState, ports: &mut CpuPorts, op: &Operand) -> Option<i32> {\n match op {\n &Operand::Lit(i) => Some(i),\n &Operand::ACC => Some(state.acc),\n &Operand::Port(ref p) => ports.read_port(p.to_owned()),\n }\n}\n\nimpl Cpu {\n pub fn new(executable: Executable) -> Cpu {\n Self::with_ports(CpuPorts::new(), executable)\n }\n\n pub fn with_ports(out: CpuPorts, executable: Executable) -> Cpu {\n Cpu { state: Default::default(),\n outports: out,\n inports: CpuPorts::new(),\n executable: executable,\n }\n }\n\n pub fn execute(&mut self) -> bool {\n if self.executable.len() == 0 {\n return false;\n }\n\n let advance_pc = match *self.executable.insn_at(self.pc()) {\n Instruction::NOP => true,\n Instruction::MOV { ref src, ref dst } => {\n match get_operand(&self.state, &mut self.inports, src) {\n Some(i) => match dst {\n &Operand::Lit(_) => panic!(\"Cannot store to a literal\"),\n &Operand::ACC => { self.state.acc = i; true },\n &Operand::Port(ref p) => self.outports.write_port(p.to_owned(), i),\n },\n None => false\n }\n },\n Instruction::SWP => {\n let tmp = self.state.acc;\n self.state.acc = self.state.bak;\n self.state.bak = tmp;\n true\n },\n Instruction::SAV => {\n self.state.bak = self.state.acc;\n true\n },\n Instruction::ADD { ref addend } => {\n match get_operand(&self.state, &mut self.inports, addend) {\n Some(i) => { self.state.acc += i; true },\n None => false\n }\n },\n Instruction::SUB { ref subtrahend } => {\n match get_operand(&self.state, &mut self.inports, subtrahend) {\n Some(i) => { self.state.acc -= i; true },\n None => false\n }\n },\n Instruction::NEG => {\n self.state.acc = -self.state.acc;\n true\n },\n Instruction::J { ref cond, ref dst } => {\n if match *cond {\n Condition::Unconditional => true,\n Condition::Ez => self.state.acc == 0,\n Condition::Nz => self.state.acc != 0,\n Condition::Gz => self.state.acc > 0,\n Condition::Lz => self.state.acc < 0,\n } {\n self.state.pc = self.executable.label_line(dst) as i32;\n false\n } else {\n true\n }\n },\n Instruction::JRO { ref dst } => {\n match get_operand(&self.state, &mut self.inports, dst) {\n Some(i) => { self.state.pc += i; true },\n None => false\n }\n },\n };\n if advance_pc {\n self.state.pc += 1;\n }\n \/* Handle wrapping at the end and via JRO *\/\n self.state.pc %= self.executable.len() as i32;\n if self.state.pc < 0 {\n self.state.pc = self.executable.len() as i32 + self.state.pc;\n }\n true\n }\n\n pub fn current_line(&self) -> u32 {\n self.executable.srcline_at(self.pc())\n }\n\n fn pc(&self) -> usize {\n self.state.pc as usize\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Cpu, CpuPorts};\n use port::Port;\n use parse;\n\n #[test]\n fn test_cpu_wrapping() {\n let e = parse::parse(\"TOP: NOP\\nNOP\").unwrap();\n let mut cpu = Cpu::new(e);\n assert_eq!(cpu.current_line(), 0);\n cpu.execute();\n assert_eq!(cpu.current_line(), 1);\n cpu.execute();\n assert_eq!(cpu.current_line(), 0);\n cpu.execute();\n assert_eq!(cpu.current_line(), 1);\n cpu.execute();\n }\n\n #[test]\n fn test_mov() {\n let e = parse::parse(\"MOV 10 ACC\\nNOP\").unwrap();\n let mut cpu = Cpu::new(e);\n assert_eq!(cpu.current_line(), 0);\n assert_eq!(cpu.state.acc, 0);\n cpu.execute();\n assert_eq!(cpu.current_line(), 1);\n assert_eq!(cpu.state.acc, 10);\n }\n\n #[test]\n fn test_add_sub() {\n let e = parse::parse(\"ADD 10\\nADD -20\\nSUB 10\\nSUB -30\").unwrap();\n let mut cpu = Cpu::new(e);\n assert_eq!(cpu.current_line(), 0);\n assert_eq!(cpu.state.acc, 0);\n cpu.execute();\n assert_eq!(cpu.current_line(), 1);\n assert_eq!(cpu.state.acc, 10);\n cpu.execute();\n assert_eq!(cpu.current_line(), 2);\n assert_eq!(cpu.state.acc, -10);\n cpu.execute();\n assert_eq!(cpu.current_line(), 3);\n assert_eq!(cpu.state.acc, -20);\n cpu.execute();\n assert_eq!(cpu.current_line(), 0);\n assert_eq!(cpu.state.acc, 10);\n }\n\n #[test]\n fn test_ports() {\n let e = parse::parse(\"MOV 10 DOWN\").unwrap();\n let mut cpu = Cpu::new(e);\n cpu.execute();\n assert_eq!(cpu.outports.down.read().unwrap(), 10);\n }\n\n #[test]\n fn port_borrow() {\n let e = parse::parse(\"MOV 10 DOWN\").unwrap();\n let ports = CpuPorts::new();\n let down = ports.down.clone();\n let mut cpu = Cpu::with_ports(ports, e);\n cpu.execute();\n assert_eq!(down.read().unwrap(), 10);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add missing test for unreachable! macro<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ error-pattern:internal error: entered unreachable code\nfn main() { unreachable!() }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add span information to token streams when possible.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Formatting for chunk_view<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add some lifetime annotations<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_id = \"piston\"]\n#![deny(missing_doc)]\n#![warn(dead_code)]\n\n\/\/! A user friendly graphics engine.\n\nextern crate time;\nextern crate graphics;\nextern crate log;\nextern crate collections;\nextern crate gl;\nextern crate glfw;\nextern crate sdl2;\nextern crate image;\nextern crate libc;\nextern crate debug;\n\npub use Game = game::Game;\n\npub use Render = game_iterator::Render;\npub use Update = game_iterator::Update;\npub use KeyPress = game_iterator::KeyPress;\npub use KeyRelease = game_iterator::KeyRelease;\npub use MousePress = game_iterator::MousePress;\npub use MouseRelease = game_iterator::MouseRelease;\npub use MouseMove = game_iterator::MouseMove;\npub use MouseRelativeMove = game_iterator::MouseRelativeMove;\n\npub use GameEvent = game_iterator::GameEvent;\npub use GameIterator = game_iterator::GameIterator;\npub use GameIteratorSettings = game_iterator::GameIteratorSettings;\npub use RenderArgs = game_iterator::RenderArgs;\npub use UpdateArgs = game_iterator::UpdateArgs;\npub use KeyPressArgs = game_iterator::KeyPressArgs;\npub use KeyReleaseArgs = game_iterator::KeyReleaseArgs;\npub use MousePressArgs = game_iterator::MousePressArgs;\npub use MouseReleaseArgs = game_iterator::MouseReleaseArgs;\npub use MouseMoveArgs = game_iterator::MouseMoveArgs;\npub use MouseRelativeMoveArgs = game_iterator::MouseRelativeMoveArgs;\n\npub use GameWindow = game_window::GameWindow;\npub use GameWindowSDL2 = game_window_sdl2::GameWindowSDL2;\npub use GameWindowGLFW = game_window_glfw::GameWindowGLFW;\npub use GameWindowSettings = game_window_settings::GameWindowSettings;\npub use Gl = gl_back_end::Gl;\npub use AssetStore = asset_store::AssetStore;\npub use Texture = texture::Texture;\n\npub mod shader_utils;\npub mod game_window;\npub mod keyboard;\npub mod event;\npub mod mouse;\n\nmod game;\nmod game_iterator;\nmod game_window_sdl2;\nmod game_window_glfw;\nmod game_window_settings;\nmod gl_back_end;\nmod asset_store;\nmod texture;\n\n<commit_msg>Added conditional compilation for GLFW and SDL2<commit_after>#![crate_id = \"piston\"]\n#![deny(missing_doc)]\n#![warn(dead_code)]\n\n\/\/! A user friendly graphics engine.\n#[cfg(glfw)]\nextern crate glfw;\n#[cfg(sdl2)]\nextern crate sdl2;\n\nextern crate time;\nextern crate graphics;\nextern crate log;\nextern crate collections;\nextern crate gl;\nextern crate image;\nextern crate libc;\nextern crate debug;\n\npub use Game = game::Game;\n\npub use Render = game_iterator::Render;\npub use Update = game_iterator::Update;\npub use KeyPress = game_iterator::KeyPress;\npub use KeyRelease = game_iterator::KeyRelease;\npub use MousePress = game_iterator::MousePress;\npub use MouseRelease = game_iterator::MouseRelease;\npub use MouseMove = game_iterator::MouseMove;\npub use MouseRelativeMove = game_iterator::MouseRelativeMove;\n\npub use GameEvent = game_iterator::GameEvent;\npub use GameIterator = game_iterator::GameIterator;\npub use GameIteratorSettings = game_iterator::GameIteratorSettings;\npub use RenderArgs = game_iterator::RenderArgs;\npub use UpdateArgs = game_iterator::UpdateArgs;\npub use KeyPressArgs = game_iterator::KeyPressArgs;\npub use KeyReleaseArgs = game_iterator::KeyReleaseArgs;\npub use MousePressArgs = game_iterator::MousePressArgs;\npub use MouseReleaseArgs = game_iterator::MouseReleaseArgs;\npub use MouseMoveArgs = game_iterator::MouseMoveArgs;\npub use MouseRelativeMoveArgs = game_iterator::MouseRelativeMoveArgs;\n\npub use GameWindow = game_window::GameWindow;\n#[cfg(sdl2)]\npub use GameWindowSDL2 = game_window_sdl2::GameWindowSDL2;\n#[cfg(glfw)]\npub use GameWindowGLFW = game_window_glfw::GameWindowGLFW;\npub use GameWindowSettings = game_window_settings::GameWindowSettings;\npub use Gl = gl_back_end::Gl;\npub use AssetStore = asset_store::AssetStore;\npub use Texture = texture::Texture;\n\npub mod shader_utils;\npub mod game_window;\npub mod keyboard;\npub mod event;\npub mod mouse;\n\nmod game;\nmod game_iterator;\nmod game_window_settings;\nmod gl_back_end;\nmod asset_store;\nmod texture;\n\n#[cfg(sdl2)]\nmod game_window_sdl2;\n#[cfg(glfw)]\nmod game_window_glfw;\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>refactor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix(doctest): fix fallout of recent changes in operator dispatch<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Restore unsigned.redacted_because<commit_after><|endoftext|>"} {"text":"<commit_before>use std::fmt::{self, Debug, Formatter};\n\n\/\/\/ A data-structure for storing a sequence of 4-bit values.\n\/\/\/\n\/\/\/ Values are stored in a `Vec<u8>`, with two values per byte.\n\/\/\/\n\/\/\/ Values at even indices are stored in the most-significant half of their byte,\n\/\/\/ while values at odd indices are stored in the least-significant half.\n\/\/\/\n\/\/\/ Imagine a vector of MSB bytes, and you'll be right.\n\/\/\/\n\/\/\/ n = [_ _ | _ _ | _ _]\n\/\/\/\n\/\/\/ # Invariants\n\/\/\/ * If the length is odd, then the second half of the last byte must be 0.\npub struct NibbleVec {\n length: usize,\n data: Vec<u8>\n}\n\nimpl NibbleVec {\n \/\/\/ Create an empty nibble vector.\n pub fn new() -> NibbleVec {\n NibbleVec {\n length: 0,\n data: Vec::new()\n }\n }\n\n \/\/\/ Create a nibble vector from a vector of bytes.\n \/\/\/\n \/\/\/ Each byte is split into two 4-bit entries (MSB, LSB).\n pub fn from_byte_vec(vec: Vec<u8>) -> NibbleVec {\n let length = 2 * vec.len();\n NibbleVec {\n length: length,\n data: vec\n }\n }\n\n \/\/\/ Get the number of elements stored in the vector.\n pub fn len(&self) -> usize {\n self.length\n }\n\n \/\/\/ Fetch a single entry from the vector.\n \/\/\/\n \/\/\/ Guaranteed to be a value in the interval [0, 15].\n \/\/\/\n \/\/\/ **Panics** if `idx >= self.len()`.\n pub fn get(&self, idx: usize) -> u8 {\n if idx >= self.length {\n panic!(\"attempted access beyond vector end. len is {}, index is {}\", self.length, idx);\n }\n let vec_idx = idx \/ 2;\n match idx % 2 {\n \/\/ If the index is even, take the first (most significant) half of the stored byte.\n 0 => self.data[vec_idx] >> 4,\n \/\/ If the index is odd, take the second (least significant) half.\n _ => self.data[vec_idx] & 0x0F\n }\n }\n\n \/\/\/ Add a single nibble to the vector.\n \/\/\/\n \/\/\/ Only the 4 least-significant bits of the value are used.\n pub fn push(&mut self, val: u8) {\n if self.length % 2 == 0 {\n self.data.push(val << 4);\n } else {\n let vec_len = self.data.len();\n\n \/\/ Zero the second half of the last byte just to be safe.\n self.data[vec_len - 1] &= 0xF0;\n\n \/\/ Write the new value.\n self.data[vec_len - 1] |= val & 0x0F;\n }\n self.length += 1;\n }\n\n \/\/\/ Split the vector into two parts.\n \/\/\/\n \/\/\/ All elements at or following the given index are returned in a new `NibbleVec`,\n \/\/\/ with exactly `idx` elements remaining in this vector.\n \/\/\/\n \/\/\/ **Panics** if `idx > self.len()`.\n pub fn split(&mut self, idx: usize) -> NibbleVec {\n if idx > self.length {\n panic!(\"attempted to split past vector end. len is {}, index is {}\", self.length, idx);\n } else if idx == self.length {\n NibbleVec::new()\n } else if idx % 2 == 0 {\n self.split_even(idx)\n } else {\n self.split_odd(idx)\n }\n }\n\n \/\/\/ Split function for odd *indices*.\n #[inline(always)]\n fn split_odd(&mut self, idx: usize) -> NibbleVec {\n let tail_vec_size = (self.length - idx) \/ 2;\n let mut tail = NibbleVec::from_byte_vec(Vec::with_capacity(tail_vec_size));\n\n \/\/ Perform an overlap copy, copying the last nibble of the original vector only if\n \/\/ the length of the new tail is *odd*.\n let tail_length = self.length - idx;\n let take_last = tail_length % 2 == 1;\n self.overlap_copy(idx \/ 2, self.data.len(), &mut tail.data, &mut tail.length, take_last);\n\n \/\/ Remove the copied bytes, being careful to skip the idx byte.\n for _ in range(idx \/ 2 + 1, self.data.len()) {\n self.data.pop();\n }\n\n \/\/ Zero the second half of the index byte so as to maintain the last-nibble invariant.\n self.data[idx \/ 2] &= 0xF0;\n\n \/\/ Update the length of the first NibbleVec.\n self.length = idx;\n\n tail\n }\n\n \/\/\/ Split function for even *indices*.\n #[inline(always)]\n fn split_even(&mut self, idx: usize) -> NibbleVec {\n \/\/ Avoid allocating a temporary vector by copying all the bytes in order, then popping them.\n let tail_vec_size = (self.length - idx) \/ 2;\n let mut tail = NibbleVec::from_byte_vec(Vec::with_capacity(tail_vec_size));\n\n \/\/ Copy the bytes.\n for i in range(idx \/ 2, self.data.len()) {\n tail.data.push(self.data[i]);\n }\n\n \/\/ Pop the same bytes.\n for _ in range(0, tail_vec_size) {\n self.data.pop();\n }\n\n \/\/ Update lengths.\n tail.length = self.length - idx;\n self.length = idx;\n\n tail\n }\n\n \/\/\/ Copy data between the second half of self.data[start] and\n \/\/\/ self.data[end - 1]. The second half of the last entry is included\n \/\/\/ if include_last is true.\n #[inline(always)]\n fn overlap_copy(&self, start: usize, end: usize, vec: &mut Vec<u8>, length: &mut usize, include_last: bool) {\n \/\/ Copy up to the first half of the last byte.\n for i in range(start, end - 1) {\n \/\/ The first half is the second half of the old entry.\n let first_half = self.data[i] & 0x0f;\n\n \/\/ The second half is the first half of the next entry.\n let second_half = self.data[i + 1] >> 4;\n\n vec.push((first_half << 4) | second_half);\n *length += 2;\n }\n\n if include_last {\n let last = self.data[end - 1] & 0x0f;\n vec.push(last << 4);\n *length += 1;\n }\n }\n\n \/\/\/ Append another nibble vector.\n pub fn join(&mut self, other: &NibbleVec) {\n \/\/ If the length is even, we can append directly.\n if self.length % 2 == 0 {\n self.length += other.length;\n self.data.push_all(other.data.as_slice());\n return;\n }\n\n \/\/ If the other vector is empty, bail out.\n if other.len() == 0 {\n return;\n }\n\n \/\/ If the length is odd, we have to perform an overlap copy.\n \/\/ Copy the first half of the first element, to make the vector an even length.\n self.push(other.get(0));\n\n \/\/ Copy the rest of the vector using an overlap copy.\n let take_last = other.len() % 2 == 0;\n other.overlap_copy(0, other.data.len(), &mut self.data, &mut self.length, take_last);\n }\n}\n\nimpl PartialEq<[u8]> for NibbleVec {\n fn eq(&self, other: &[u8]) -> bool {\n if other.len() != self.len() {\n return false;\n }\n\n for (i, x) in other.iter().enumerate() {\n if self.get(i) != *x {\n return false;\n }\n }\n true\n }\n}\n\nimpl Clone for NibbleVec {\n fn clone(&self) -> NibbleVec {\n NibbleVec {\n length: self.length,\n data: self.data.clone()\n }\n }\n}\n\nimpl Debug for NibbleVec {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"NibbleVec [\"));\n\n if self.len() > 0 {\n try!(write!(fmt, \"{}\", self.get(0)));\n }\n\n for i in range(1, self.len()) {\n try!(write!(fmt, \", {}\", self.get(i)));\n }\n write!(fmt, \"]\")\n }\n}\n\n#[cfg(test)]\nmod test {\n use NibbleVec;\n\n fn v8_7_6_5() -> NibbleVec {\n NibbleVec::from_byte_vec(vec![8 << 4 | 7, 6 << 4 | 5])\n }\n\n fn v11_10_9() -> NibbleVec {\n let mut result = NibbleVec::from_byte_vec(vec![11 << 4 | 10]);\n result.push(9);\n result\n }\n\n #[test]\n fn get() {\n let nv = NibbleVec::from_byte_vec(vec![3 << 4 | 7]);\n assert_eq!(nv.get(0), 3u8);\n assert_eq!(nv.get(1), 7u8);\n }\n\n #[test]\n fn push() {\n let mut nv = NibbleVec::new();\n let data = vec![0, 1, 3, 5, 7, 9, 11, 15];\n for val in data.iter() {\n nv.push(*val);\n }\n\n for (i, val) in data.iter().enumerate() {\n assert_eq!(nv.get(i), *val);\n }\n }\n\n fn split_test( nibble_vec: &NibbleVec,\n idx: usize,\n first: Vec<u8>,\n second: Vec<u8>) {\n let mut init = nibble_vec.clone();\n let tail = init.split(idx);\n assert!(init == first[..]);\n assert!(tail == second[..]);\n }\n\n #[test]\n fn split_even_length() {\n let even_length = v8_7_6_5();\n split_test(&even_length, 0, vec![], vec![8, 7, 6, 5]);\n split_test(&even_length, 1, vec![8], vec![7, 6, 5]);\n split_test(&even_length, 2, vec![8, 7], vec![6, 5]);\n split_test(&even_length, 4, vec![8, 7, 6, 5], vec![]);\n }\n\n #[test]\n fn split_odd_length() {\n let odd_length = v11_10_9();\n split_test(&odd_length, 0, vec![], vec![11, 10, 9]);\n split_test(&odd_length, 1, vec![11], vec![10, 9]);\n split_test(&odd_length, 2, vec![11, 10], vec![9]);\n split_test(&odd_length, 3, vec![11, 10, 9], vec![]);\n }\n\n \/\/\/ Join vec2 onto vec1 and ensure that the results matches the one expected.\n fn join_test(vec1: &NibbleVec, vec2: &NibbleVec, result: Vec<u8>) {\n let mut joined = vec1.clone();\n joined.join(vec2);\n assert!(joined == result[..]);\n }\n\n #[test]\n fn join_even_length() {\n let v1 = v8_7_6_5();\n let v2 = v11_10_9();\n join_test(&v1, &v2, vec![8, 7, 6, 5, 11, 10, 9]);\n join_test(&v1, &v1, vec![8, 7, 6, 5, 8, 7, 6, 5]);\n join_test(&v1, &NibbleVec::new(), vec![8, 7, 6, 5]);\n join_test(&NibbleVec::new(), &v1, vec![8, 7, 6, 5]);\n }\n\n #[test]\n fn join_odd_length() {\n let v1 = v8_7_6_5();\n let v2 = v11_10_9();\n join_test(&v2, &v1, vec![11, 10, 9, 8, 7, 6, 5]);\n join_test(&v2, &v2, vec![11, 10, 9, 11, 10, 9]);\n join_test(&v2, &NibbleVec::new(), vec![11, 10, 9]);\n }\n\n \/\/\/ Ensure that the last nibble is zeroed before reuse.\n #[test]\n fn memory_reuse() {\n let mut vec = NibbleVec::new();\n vec.push(10);\n vec.push(1);\n\n \/\/ Pushing.\n vec.split(1);\n vec.push(2);\n assert_eq!(vec.get(1), 2);\n\n \/\/ Joining.\n vec.split(1);\n vec.join(&NibbleVec::from_byte_vec(vec![1 << 4 | 3, 5 << 4]));\n assert_eq!(vec.get(1), 1);\n }\n}\n<commit_msg>Make join take self by value to allow chaining.<commit_after>use std::fmt::{self, Debug, Formatter};\n\n\/\/\/ A data-structure for storing a sequence of 4-bit values.\n\/\/\/\n\/\/\/ Values are stored in a `Vec<u8>`, with two values per byte.\n\/\/\/\n\/\/\/ Values at even indices are stored in the most-significant half of their byte,\n\/\/\/ while values at odd indices are stored in the least-significant half.\n\/\/\/\n\/\/\/ Imagine a vector of MSB bytes, and you'll be right.\n\/\/\/\n\/\/\/ n = [_ _ | _ _ | _ _]\n\/\/\/\n\/\/\/ # Invariants\n\/\/\/ * If the length is odd, then the second half of the last byte must be 0.\npub struct NibbleVec {\n length: usize,\n data: Vec<u8>\n}\n\nimpl NibbleVec {\n \/\/\/ Create an empty nibble vector.\n pub fn new() -> NibbleVec {\n NibbleVec {\n length: 0,\n data: Vec::new()\n }\n }\n\n \/\/\/ Create a nibble vector from a vector of bytes.\n \/\/\/\n \/\/\/ Each byte is split into two 4-bit entries (MSB, LSB).\n pub fn from_byte_vec(vec: Vec<u8>) -> NibbleVec {\n let length = 2 * vec.len();\n NibbleVec {\n length: length,\n data: vec\n }\n }\n\n \/\/\/ Get the number of elements stored in the vector.\n pub fn len(&self) -> usize {\n self.length\n }\n\n \/\/\/ Fetch a single entry from the vector.\n \/\/\/\n \/\/\/ Guaranteed to be a value in the interval [0, 15].\n \/\/\/\n \/\/\/ **Panics** if `idx >= self.len()`.\n pub fn get(&self, idx: usize) -> u8 {\n if idx >= self.length {\n panic!(\"attempted access beyond vector end. len is {}, index is {}\", self.length, idx);\n }\n let vec_idx = idx \/ 2;\n match idx % 2 {\n \/\/ If the index is even, take the first (most significant) half of the stored byte.\n 0 => self.data[vec_idx] >> 4,\n \/\/ If the index is odd, take the second (least significant) half.\n _ => self.data[vec_idx] & 0x0F\n }\n }\n\n \/\/\/ Add a single nibble to the vector.\n \/\/\/\n \/\/\/ Only the 4 least-significant bits of the value are used.\n pub fn push(&mut self, val: u8) {\n if self.length % 2 == 0 {\n self.data.push(val << 4);\n } else {\n let vec_len = self.data.len();\n\n \/\/ Zero the second half of the last byte just to be safe.\n self.data[vec_len - 1] &= 0xF0;\n\n \/\/ Write the new value.\n self.data[vec_len - 1] |= val & 0x0F;\n }\n self.length += 1;\n }\n\n \/\/\/ Split the vector into two parts.\n \/\/\/\n \/\/\/ All elements at or following the given index are returned in a new `NibbleVec`,\n \/\/\/ with exactly `idx` elements remaining in this vector.\n \/\/\/\n \/\/\/ **Panics** if `idx > self.len()`.\n pub fn split(&mut self, idx: usize) -> NibbleVec {\n if idx > self.length {\n panic!(\"attempted to split past vector end. len is {}, index is {}\", self.length, idx);\n } else if idx == self.length {\n NibbleVec::new()\n } else if idx % 2 == 0 {\n self.split_even(idx)\n } else {\n self.split_odd(idx)\n }\n }\n\n \/\/\/ Split function for odd *indices*.\n #[inline(always)]\n fn split_odd(&mut self, idx: usize) -> NibbleVec {\n let tail_vec_size = (self.length - idx) \/ 2;\n let mut tail = NibbleVec::from_byte_vec(Vec::with_capacity(tail_vec_size));\n\n \/\/ Perform an overlap copy, copying the last nibble of the original vector only if\n \/\/ the length of the new tail is *odd*.\n let tail_length = self.length - idx;\n let take_last = tail_length % 2 == 1;\n self.overlap_copy(idx \/ 2, self.data.len(), &mut tail.data, &mut tail.length, take_last);\n\n \/\/ Remove the copied bytes, being careful to skip the idx byte.\n for _ in range(idx \/ 2 + 1, self.data.len()) {\n self.data.pop();\n }\n\n \/\/ Zero the second half of the index byte so as to maintain the last-nibble invariant.\n self.data[idx \/ 2] &= 0xF0;\n\n \/\/ Update the length of the first NibbleVec.\n self.length = idx;\n\n tail\n }\n\n \/\/\/ Split function for even *indices*.\n #[inline(always)]\n fn split_even(&mut self, idx: usize) -> NibbleVec {\n \/\/ Avoid allocating a temporary vector by copying all the bytes in order, then popping them.\n let tail_vec_size = (self.length - idx) \/ 2;\n let mut tail = NibbleVec::from_byte_vec(Vec::with_capacity(tail_vec_size));\n\n \/\/ Copy the bytes.\n for i in range(idx \/ 2, self.data.len()) {\n tail.data.push(self.data[i]);\n }\n\n \/\/ Pop the same bytes.\n for _ in range(0, tail_vec_size) {\n self.data.pop();\n }\n\n \/\/ Update lengths.\n tail.length = self.length - idx;\n self.length = idx;\n\n tail\n }\n\n \/\/\/ Copy data between the second half of self.data[start] and\n \/\/\/ self.data[end - 1]. The second half of the last entry is included\n \/\/\/ if include_last is true.\n #[inline(always)]\n fn overlap_copy(&self, start: usize, end: usize, vec: &mut Vec<u8>, length: &mut usize, include_last: bool) {\n \/\/ Copy up to the first half of the last byte.\n for i in range(start, end - 1) {\n \/\/ The first half is the second half of the old entry.\n let first_half = self.data[i] & 0x0f;\n\n \/\/ The second half is the first half of the next entry.\n let second_half = self.data[i + 1] >> 4;\n\n vec.push((first_half << 4) | second_half);\n *length += 2;\n }\n\n if include_last {\n let last = self.data[end - 1] & 0x0f;\n vec.push(last << 4);\n *length += 1;\n }\n }\n\n \/\/\/ Append another nibble vector.\n pub fn join(mut self, other: &NibbleVec) -> NibbleVec {\n \/\/ If the length is even, we can append directly.\n if self.length % 2 == 0 {\n self.length += other.length;\n self.data.push_all(other.data.as_slice());\n return self;\n }\n\n \/\/ If the other vector is empty, bail out.\n if other.len() == 0 {\n return self;\n }\n\n \/\/ If the length is odd, we have to perform an overlap copy.\n \/\/ Copy the first half of the first element, to make the vector an even length.\n self.push(other.get(0));\n\n \/\/ Copy the rest of the vector using an overlap copy.\n let take_last = other.len() % 2 == 0;\n other.overlap_copy(0, other.data.len(), &mut self.data, &mut self.length, take_last);\n\n self\n }\n}\n\nimpl PartialEq<[u8]> for NibbleVec {\n fn eq(&self, other: &[u8]) -> bool {\n if other.len() != self.len() {\n return false;\n }\n\n for (i, x) in other.iter().enumerate() {\n if self.get(i) != *x {\n return false;\n }\n }\n true\n }\n}\n\nimpl Clone for NibbleVec {\n fn clone(&self) -> NibbleVec {\n NibbleVec {\n length: self.length,\n data: self.data.clone()\n }\n }\n}\n\nimpl Debug for NibbleVec {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"NibbleVec [\"));\n\n if self.len() > 0 {\n try!(write!(fmt, \"{}\", self.get(0)));\n }\n\n for i in range(1, self.len()) {\n try!(write!(fmt, \", {}\", self.get(i)));\n }\n write!(fmt, \"]\")\n }\n}\n\n#[cfg(test)]\nmod test {\n use NibbleVec;\n\n fn v8_7_6_5() -> NibbleVec {\n NibbleVec::from_byte_vec(vec![8 << 4 | 7, 6 << 4 | 5])\n }\n\n fn v11_10_9() -> NibbleVec {\n let mut result = NibbleVec::from_byte_vec(vec![11 << 4 | 10]);\n result.push(9);\n result\n }\n\n #[test]\n fn get() {\n let nv = NibbleVec::from_byte_vec(vec![3 << 4 | 7]);\n assert_eq!(nv.get(0), 3u8);\n assert_eq!(nv.get(1), 7u8);\n }\n\n #[test]\n fn push() {\n let mut nv = NibbleVec::new();\n let data = vec![0, 1, 3, 5, 7, 9, 11, 15];\n for val in data.iter() {\n nv.push(*val);\n }\n\n for (i, val) in data.iter().enumerate() {\n assert_eq!(nv.get(i), *val);\n }\n }\n\n fn split_test( nibble_vec: &NibbleVec,\n idx: usize,\n first: Vec<u8>,\n second: Vec<u8>) {\n let mut init = nibble_vec.clone();\n let tail = init.split(idx);\n assert!(init == first[..]);\n assert!(tail == second[..]);\n }\n\n #[test]\n fn split_even_length() {\n let even_length = v8_7_6_5();\n split_test(&even_length, 0, vec![], vec![8, 7, 6, 5]);\n split_test(&even_length, 1, vec![8], vec![7, 6, 5]);\n split_test(&even_length, 2, vec![8, 7], vec![6, 5]);\n split_test(&even_length, 4, vec![8, 7, 6, 5], vec![]);\n }\n\n #[test]\n fn split_odd_length() {\n let odd_length = v11_10_9();\n split_test(&odd_length, 0, vec![], vec![11, 10, 9]);\n split_test(&odd_length, 1, vec![11], vec![10, 9]);\n split_test(&odd_length, 2, vec![11, 10], vec![9]);\n split_test(&odd_length, 3, vec![11, 10, 9], vec![]);\n }\n\n \/\/\/ Join vec2 onto vec1 and ensure that the results matches the one expected.\n fn join_test(vec1: &NibbleVec, vec2: &NibbleVec, result: Vec<u8>) {\n let joined = vec1.clone().join(vec2);\n assert!(joined == result[..]);\n }\n\n #[test]\n fn join_even_length() {\n let v1 = v8_7_6_5();\n let v2 = v11_10_9();\n join_test(&v1, &v2, vec![8, 7, 6, 5, 11, 10, 9]);\n join_test(&v1, &v1, vec![8, 7, 6, 5, 8, 7, 6, 5]);\n join_test(&v1, &NibbleVec::new(), vec![8, 7, 6, 5]);\n join_test(&NibbleVec::new(), &v1, vec![8, 7, 6, 5]);\n }\n\n #[test]\n fn join_odd_length() {\n let v1 = v8_7_6_5();\n let v2 = v11_10_9();\n join_test(&v2, &v1, vec![11, 10, 9, 8, 7, 6, 5]);\n join_test(&v2, &v2, vec![11, 10, 9, 11, 10, 9]);\n join_test(&v2, &NibbleVec::new(), vec![11, 10, 9]);\n }\n\n \/\/\/ Ensure that the last nibble is zeroed before reuse.\n #[test]\n fn memory_reuse() {\n let mut vec = NibbleVec::new();\n vec.push(10);\n vec.push(1);\n\n \/\/ Pushing.\n vec.split(1);\n vec.push(2);\n assert_eq!(vec.get(1), 2);\n\n \/\/ Joining.\n vec.split(1);\n vec = vec.join(&NibbleVec::from_byte_vec(vec![1 << 4 | 3, 5 << 4]));\n assert_eq!(vec.get(1), 1);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test_sort_zero_sized_type should test pdqsort, not rust's<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor, clean up, optimize, tweak<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Return IO errors that aren't NotFound when reading parent env files.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Simplify `ArgParser` API<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>make structs cloneable<commit_after><|endoftext|>"} {"text":"<commit_before>\/*!\nThis is an OAuth 2.0 library for the Rust language. It defines types and traits useful\nfor implementing an OAuth 2.0 compliant system as specified by RFC 6749.\n\nWARNING: We've just started. It is practically useless at this point.\n\n<h2>Applicability<\/h2>\n\nOAuth 2.0 is a protocol framework that allows an application (the <em>Client<\/em>, usually a web\nsite) to obtain limited access to an HTTP service (the <em>Resource Server<\/em>) on behalf of\na <em>Resource Owner<\/em> which interacts with the Client via a <em>User-Agent<\/em> (browser).\nThis is mediated via an <em>Authorization Server<\/em> (which could be th Resource Server\nitself, or separated from it).\n\n<blockquote>\nThe term \"client\" can be confusing here. The client of the OAuth service is typically a\nweb site. The client of that web site is the user-agent (browser). To minimize\nconfusion, the user-agent will not be referred to as a client.\n<\/blockquote>\n\n<h2>OAuth is a Framework Only<\/h2>\n\nOAuth 2.0 is an Authorization Framework. In order to get something usable, you must supply\nthe missing pieces. And there are quite a few missing pieces which you will need to implement\nin order to get a working system. These include:\n\n<ul>\n<li>Issuing and receiving HTTP requests (e.g. you'll need to plug in some HTTP\n library, we won't pick one for you).<\/li>\n<li>Storing state. Often database tables are used. Manytimes the Authorization\n Server and Resource Server use the same database, or perhaps are the same\n server. This is out of scope, and left up to you.<\/li>\n<li>Initial client registration (between the Client and the Authorization\n Server). Often people just use config files, but this is for you to decide.<\/li>\n<li>Client authentication (by the Authorization Server)<\/li>\n<li>User-Agent session management (by the Client). Usually via a session cookie,\n but we leave this up to you.<\/li>\n<li>User-Agent authentication and authorization (by the Authorization Server)<\/li>\n<li>Perhaps more<\/li>\n<\/ul>\n\n<h2>Sample Implementation<\/h2>\n\nFIXME: A sample implementation is intended to be supplied to demonstrate how to use this\nlibrary.\n\n<h2>Coverage and Standard Support<\/h2>\n\nWe do not (and likely will not) support every standard compliant way to use OAuth 2.0.\nBut we do try to be as flexible as possible. That being said, the following limitations\napply:\n\n<ul>\n<li>All HTTP traffic is required to be TLS protected. All endpoints must use the\n <em>https<\/em> scheme. The standard only requires this of most traffic.<\/li>\n<li>All IDs and tokens are taken to be respresented in UTF-8 encodings. We will not\n work with other encodings. The standard is silent on most encoding issues.<\/li>\n<li>FIXME: More limitations will be added to this list as the development progresses.<\/li>\n<\/ul>\n*\/\n\nextern crate url;\n\nuse std::fmt;\nuse std::fmt::Display;\n\npub mod syntax;\npub mod resource_server;\npub mod authorization_server;\npub mod client;\n\n\/\/\/ Client Identifier, issued to Clients by Authorization Servers when registering\n\/\/\/\n\/\/\/ See RFC 6749 Section 2.2. In particular:\n\/\/\/ <ul>\n\/\/\/ <li>The authorization server issues this to the client at registration, and uses it\n\/\/\/ to look up details about the client during the main protocol.<\/li>\n\/\/\/ <li>It is not a secret.<\/li>\n\/\/\/ <\/ul>\n\/\/\n\/\/\/ Charset validator ```syntax::valid_client_id_str```\npub type ClientId = String;\n\n\n\/\/\/ Client Type, either 'confidential' or 'public'.\n\/\/\/\n\/\/\/ See RFC 6749 Section 2.2. In particular:\n\/\/\/ <ul>\n\/\/\/ <li>If the client cannot be trusted with secrets, it is 'public'. This usually includes\n\/\/\/ all clients in end-user hands like javascript ones, but strictly speaking it depends\n\/\/\/ on your security model.<\/li>\n\/\/\/ <\/ul>\npub enum ClientType {\n ConfidentialClient,\n PublicClient,\n}\nimpl Display for ClientType {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result\n {\n match *self {\n ClientType::ConfidentialClient => write!(f, \"confidential\"),\n ClientType::PublicClient => write!(f, \"public\"),\n }\n }\n}\n<commit_msg>minor<commit_after>\/*!\nThis is an OAuth 2.0 library for the Rust language. It defines types and traits useful\nfor implementing an OAuth 2.0 compliant system as specified by RFC 6749.\n\nWARNING: We've just started. It is practically useless at this point.\n\n<h2>Applicability<\/h2>\n\nOAuth 2.0 is a protocol framework that allows an application (the <em>Client<\/em>, usually a web\nsite) to obtain limited access to an HTTP service (the <em>Resource Server<\/em>) on behalf of\na <em>Resource Owner<\/em> which interacts with the Client via a <em>User-Agent<\/em> (browser).\nThis is mediated via an <em>Authorization Server<\/em> (which could be the Resource Server\nitself, or separated from it).\n\n<blockquote>\nThe term \"client\" can be confusing here. The client of the OAuth service is typically a\nweb site. The client of that web site is the user-agent (browser). To minimize\nconfusion, the user-agent will not be referred to as a client.\n<\/blockquote>\n\n<h2>OAuth is a Framework Only<\/h2>\n\nOAuth 2.0 is an Authorization Framework. In order to get something usable, you must supply\nthe missing pieces. And there are quite a few missing pieces which you will need to implement\nin order to get a working system. These include:\n\n<ul>\n<li>Issuing and receiving HTTP requests (e.g. you'll need to work with HTTP somehow,\n perhaps with hyper or other HTTP related library, or directly).<\/li>\n<li>Storing state. Often database tables are used. Manytimes the Authorization\n Server and Resource Server use the same database, or perhaps are the same\n server. This is out of scope, and left up to you.<\/li>\n<li>Initial client registration (between the Client and the Authorization\n Server). Often people just use config files, but this is for you to decide.<\/li>\n<li>Client authentication (by the Authorization Server)<\/li>\n<li>User-Agent session management (by the Client). Usually via a session cookie,\n but we leave this up to you.<\/li>\n<li>User-Agent authentication and authorization (by the Authorization Server)<\/li>\n<li>Perhaps more<\/li>\n<\/ul>\n\n<h2>Sample Implementation<\/h2>\n\nFIXME: A sample implementation is intended to be supplied to demonstrate how to use this\nlibrary.\n\n<h2>Coverage and Standard Support<\/h2>\n\nWe do not (and likely will not) support every standard compliant way to use OAuth 2.0.\nBut we do try to be as flexible as possible. That being said, the following limitations\napply:\n\n<ul>\n<li>All HTTP traffic is required to be TLS protected. All endpoints must use the\n <em>https<\/em> scheme. The standard only requires this of most traffic.<\/li>\n<li>All IDs and tokens are taken to be respresented in UTF-8 encodings. We will not\n work with other encodings. The standard is silent on most encoding issues.<\/li>\n<li>FIXME: More limitations will be added to this list as the development progresses.<\/li>\n<\/ul>\n*\/\n\nextern crate url;\n\nuse std::fmt;\nuse std::fmt::Display;\n\npub mod syntax;\npub mod resource_server;\npub mod authorization_server;\npub mod client;\n\n\/\/\/ Client Identifier, issued to Clients by Authorization Servers when registering\n\/\/\/\n\/\/\/ See RFC 6749 Section 2.2. In particular:\n\/\/\/ <ul>\n\/\/\/ <li>The authorization server issues this to the client at registration, and uses it\n\/\/\/ to look up details about the client during the main protocol.<\/li>\n\/\/\/ <li>It is not a secret.<\/li>\n\/\/\/ <\/ul>\n\/\/\n\/\/\/ Charset validator ```syntax::valid_client_id_str```\npub type ClientId = String;\n\n\n\/\/\/ Client Type, either 'confidential' or 'public'.\n\/\/\/\n\/\/\/ See RFC 6749 Section 2.2. In particular:\n\/\/\/ <ul>\n\/\/\/ <li>If the client cannot be trusted with secrets, it is 'public'. This usually includes\n\/\/\/ all clients in end-user hands like javascript ones, but strictly speaking it depends\n\/\/\/ on your security model.<\/li>\n\/\/\/ <\/ul>\npub enum ClientType {\n ConfidentialClient,\n PublicClient,\n}\nimpl Display for ClientType {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result\n {\n match *self {\n ClientType::ConfidentialClient => write!(f, \"confidential\"),\n ClientType::PublicClient => write!(f, \"public\"),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement number_of_keys for DbPage<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate libc;\n\nuse std::ffi::CString;\nuse std::memory::transmute;\n\n#[link(name = \"rust\")]\nextern {\n fn klee_make_symbolic(data: *const libc::c_void, length: usize, name: *const libc::c_char);\n}\n\npub fn any(data: *const libc::c_void, length: usize, name: &str) {\n let name_cstr = CString::new(name).unwrap();\n unsafe {\n klee_make_symbolic(data, length, name_cstr.as_ptr());\n }\n}\n\npub fn i32(data: *const i32, length: usize, name: &str) {\n let name_cstr = CString::new(name).unwrap();\n unsafe {\n klee_make_symbolic(transmute(data), length, name_cstr.as_ptr());\n }\n}\n\npub fn u32(data: *const u32, length: usize, name: &str) {\n let name_cstr = CString::new(name).unwrap();\n unsafe {\n klee_make_symbolic(transmute(data), length, name_cstr.as_ptr());\n }\n}\n<commit_msg>Update lib.rs<commit_after>extern crate libc;\n\nuse std::ffi::CString;\nuse std::mem::transmute;\n\n#[link(name = \"rust\")]\nextern {\n fn klee_make_symbolic(data: *const libc::c_void, length: usize, name: *const libc::c_char);\n}\n\npub fn any(data: *const libc::c_void, length: usize, name: &str) {\n let name_cstr = CString::new(name).unwrap();\n unsafe {\n klee_make_symbolic(data, length, name_cstr.as_ptr());\n }\n}\n\npub fn i32(data: *const i32, length: usize, name: &str) {\n let name_cstr = CString::new(name).unwrap();\n unsafe {\n klee_make_symbolic(transmute(data), length, name_cstr.as_ptr());\n }\n}\n\npub fn u32(data: *const u32, length: usize, name: &str) {\n let name_cstr = CString::new(name).unwrap();\n unsafe {\n klee_make_symbolic(transmute(data), length, name_cstr.as_ptr());\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use quote::{ToTokens, Tokens};\n\nuse metadata::Metadata;\nuse parse::Entry;\nuse request::Request;\nuse response::Response;\n\n#[derive(Debug)]\npub struct Api {\n metadata: Metadata,\n request: Request,\n response: Response,\n}\n\nimpl ToTokens for Api {\n fn to_tokens(&self, tokens: &mut Tokens) {\n let description = &self.metadata.description;\n let method = &self.metadata.method;\n let name = &self.metadata.name;\n let path = &self.metadata.path;\n let rate_limited = &self.metadata.rate_limited;\n let requires_authentication = &self.metadata.requires_authentication;\n\n let request_types = {\n let mut tokens = Tokens::new();\n self.request.to_tokens(&mut tokens);\n tokens\n };\n let response_types = {\n let mut tokens = Tokens::new();\n self.response.to_tokens(&mut tokens);\n tokens\n };\n\n let add_body_to_request = if self.request.has_body_fields() {\n let request_body_init_fields = self.request.request_body_init_fields();\n\n quote! {\n let request_body = RequestBody {\n #request_body_init_fields\n };\n\n hyper_request.set_body(\n ::serde_json::to_vec(&request_body)\n .expect(\"failed to serialize request body to JSON\")\n );\n }\n } else {\n Tokens::new()\n };\n\n let deserialize_response_body = if self.response.has_body_fields() {\n quote! {\n let bytes = hyper_response.body().fold::<_, _, Result<_, ::hyper::Error>>(\n Vec::new(),\n |mut bytes, chunk| {\n bytes.write_all(&chunk).expect(\"failed to append body chunk\");\n\n Ok(bytes)\n }).wait().expect(\"failed to read response body chunks into byte vector\");\n\n let response_body: ResponseBody = ::serde_json::from_slice(bytes.as_slice())\n .expect(\"failed to deserialize body\");\n }\n } else {\n Tokens::new()\n };\n\n let response_init_fields = if self.response.has_fields() {\n self.response.init_fields()\n } else {\n Tokens::new()\n };\n\n tokens.append(quote! {\n use std::io::Write as _Write;\n\n use ::futures::{Future as _Future, Stream as _Stream};\n use ::ruma_api::Endpoint as _RumaApiEndpoint;\n\n \/\/\/ The API endpoint.\n #[derive(Debug)]\n pub struct Endpoint;\n\n #request_types\n\n impl ::std::convert::TryFrom<Request> for ::hyper::Request {\n type Error = ();\n\n #[allow(unused_mut, unused_variables)]\n fn try_from(request: Request) -> Result<Self, Self::Error> {\n let metadata = Endpoint::METADATA;\n\n let mut hyper_request = ::hyper::Request::new(\n metadata.method,\n metadata.path.parse().expect(\"failed to parse request URI\"),\n );\n\n #add_body_to_request\n\n Ok(hyper_request)\n }\n }\n\n #response_types\n\n impl ::std::convert::TryFrom<::hyper::Response> for Response {\n type Error = ();\n\n fn try_from(hyper_response: ::hyper::Response) -> Result<Self, Self::Error> {\n #deserialize_response_body\n\n let response = Response {\n #response_init_fields\n };\n\n Ok(response)\n }\n }\n\n impl ::ruma_api::Endpoint for Endpoint {\n type Request = Request;\n type Response = Response;\n\n const METADATA: ::ruma_api::Metadata = ::ruma_api::Metadata {\n description: #description,\n method: ::hyper::#method,\n name: #name,\n path: #path,\n rate_limited: #rate_limited,\n requires_authentication: #requires_authentication,\n };\n }\n });\n }\n}\n\nimpl From<Vec<Entry>> for Api {\n fn from(entries: Vec<Entry>) -> Api {\n if entries.len() != 3 {\n panic!(\"ruma_api! expects 3 blocks: metadata, request, and response\");\n }\n\n let mut metadata = None;\n let mut request = None;\n let mut response = None;\n\n for entry in entries {\n match entry {\n Entry::Metadata(fields) => metadata = Some(Metadata::from(fields)),\n Entry::Request(fields) => request = Some(Request::from(fields)),\n Entry::Response(fields) => response = Some(Response::from(fields)),\n }\n }\n\n Api {\n metadata: metadata.expect(\"ruma_api! is missing metadata\"),\n request: request.expect(\"ruma_api! is missing request\"),\n response: response.expect(\"ruma_api! is missing response\"),\n }\n }\n}\n<commit_msg>Propagate errors instead of panicking.<commit_after>use quote::{ToTokens, Tokens};\n\nuse metadata::Metadata;\nuse parse::Entry;\nuse request::Request;\nuse response::Response;\n\n#[derive(Debug)]\npub struct Api {\n metadata: Metadata,\n request: Request,\n response: Response,\n}\n\nimpl ToTokens for Api {\n fn to_tokens(&self, tokens: &mut Tokens) {\n let description = &self.metadata.description;\n let method = &self.metadata.method;\n let name = &self.metadata.name;\n let path = &self.metadata.path;\n let rate_limited = &self.metadata.rate_limited;\n let requires_authentication = &self.metadata.requires_authentication;\n\n let request_types = {\n let mut tokens = Tokens::new();\n self.request.to_tokens(&mut tokens);\n tokens\n };\n let response_types = {\n let mut tokens = Tokens::new();\n self.response.to_tokens(&mut tokens);\n tokens\n };\n\n let add_body_to_request = if self.request.has_body_fields() {\n let request_body_init_fields = self.request.request_body_init_fields();\n\n quote! {\n let request_body = RequestBody {\n #request_body_init_fields\n };\n\n hyper_request.set_body(::serde_json::to_vec(&request_body)?);\n }\n } else {\n Tokens::new()\n };\n\n let deserialize_response_body = if self.response.has_body_fields() {\n let mut tokens = Tokens::new();\n\n tokens.append(quote! {\n hyper_response.body()\n .fold::<_, _, Result<_, ::std::io::Error>>(Vec::new(), |mut bytes, chunk| {\n bytes.write_all(&chunk)?;\n\n Ok(bytes)\n })\n .map_err(::ruma_api::Error::from)\n .and_then(|bytes| {\n ::serde_json::from_slice::<ResponseBody>(bytes.as_slice())\n .map_err(::ruma_api::Error::from)\n })\n });\n\n tokens.append(\".and_then(|response_body| {\");\n\n tokens\n } else {\n let mut tokens = Tokens::new();\n\n tokens.append(quote! {\n ::futures::future::ok(())\n });\n\n tokens.append(\".and_then(|_| {\");\n\n tokens\n };\n\n let mut closure_end = Tokens::new();\n closure_end.append(\"})\");\n\n let response_init_fields = if self.response.has_fields() {\n self.response.init_fields()\n } else {\n Tokens::new()\n };\n\n tokens.append(quote! {\n use std::io::Write as _Write;\n\n use ::futures::{Future as _Future, Stream as _Stream};\n use ::ruma_api::Endpoint as _RumaApiEndpoint;\n\n \/\/\/ The API endpoint.\n #[derive(Debug)]\n pub struct Endpoint;\n\n #request_types\n\n impl ::std::convert::TryFrom<Request> for ::hyper::Request {\n type Error = ::ruma_api::Error;\n\n #[allow(unused_mut, unused_variables)]\n fn try_from(request: Request) -> Result<Self, Self::Error> {\n let metadata = Endpoint::METADATA;\n\n let mut hyper_request = ::hyper::Request::new(\n metadata.method,\n metadata.path.parse()?,\n );\n\n #add_body_to_request\n\n Ok(hyper_request)\n }\n }\n\n #response_types\n\n impl ::std::convert::TryFrom<::hyper::Response> for Response {\n type Error = ::ruma_api::Error;\n\n fn try_from(hyper_response: ::hyper::Response) -> Result<Self, Self::Error> {\n #deserialize_response_body\n\n let response = Response {\n #response_init_fields\n };\n\n Ok(response)\n #closure_end\n .wait()\n }\n }\n\n impl ::ruma_api::Endpoint for Endpoint {\n type Request = Request;\n type Response = Response;\n\n const METADATA: ::ruma_api::Metadata = ::ruma_api::Metadata {\n description: #description,\n method: ::hyper::#method,\n name: #name,\n path: #path,\n rate_limited: #rate_limited,\n requires_authentication: #requires_authentication,\n };\n }\n });\n }\n}\n\nimpl From<Vec<Entry>> for Api {\n fn from(entries: Vec<Entry>) -> Api {\n if entries.len() != 3 {\n panic!(\"ruma_api! expects 3 blocks: metadata, request, and response\");\n }\n\n let mut metadata = None;\n let mut request = None;\n let mut response = None;\n\n for entry in entries {\n match entry {\n Entry::Metadata(fields) => metadata = Some(Metadata::from(fields)),\n Entry::Request(fields) => request = Some(Request::from(fields)),\n Entry::Response(fields) => response = Some(Response::from(fields)),\n }\n }\n\n Api {\n metadata: metadata.expect(\"ruma_api! is missing metadata\"),\n request: request.expect(\"ruma_api! is missing request\"),\n response: response.expect(\"ruma_api! is missing response\"),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test example.<commit_after>#![allow(unused_variables)]\n#[macro_use]\nextern crate cargonauts;\n\nuse cargonauts::api;\n\nroutes! {\n resource User => [\"get\"] {\n related Photo: \"has-many\";\n }\n resource Photo => [\"get\", \"index\"] {\n related User: \"has-one\";\n }\n}\n\nstruct User;\n\nimpl cargonauts::Serialize for User {\n fn serialize<S: cargonauts::Serializer>(&self, serializer: &mut S) -> Result<(), S::Error> {\n unimplemented!()\n }\n}\n\nimpl api::Resource for User {\n type Id = u32;\n\n fn id(&self) -> u32 {\n unimplemented!()\n }\n\n fn resource() -> &'static str {\n \"user\"\n }\n}\n\nimpl api::Get for User {\n fn get(id: Self::Id) -> Option<User> {\n unimplemented!()\n }\n}\n\nimpl api::HasMany<Photo> for User {\n fn has_many(id: &Self::Id) -> Vec<Photo> {\n unimplemented!()\n }\n}\n\nstruct Photo;\n\nimpl cargonauts::Serialize for Photo {\n fn serialize<S: cargonauts::Serializer>(&self, serializer: &mut S) -> Result<(), S::Error> {\n unimplemented!()\n }\n}\n\nimpl api::Resource for Photo {\n type Id = u32;\n\n fn id(&self) -> u32 {\n unimplemented!()\n }\n\n fn resource() -> &'static str {\n \"photo\"\n }\n}\n\nimpl api::Get for Photo {\n fn get(id: Self::Id) -> Option<Photo> {\n unimplemented!()\n }\n}\n\nimpl api::Index for Photo {\n fn index() -> Vec<Photo> {\n unimplemented!()\n }\n}\n\nimpl api::HasOne<User> for Photo {\n fn has_one(id: &Self::Id) -> Option<User> {\n unimplemented!()\n }\n}\n\n#[test]\nfn it_compiles() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement search_dbpage_from_root method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>broken build: fiddling with action space for flash cursor input<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(macro_rules, phase, default_type_params)]\n#![deny(missing_doc)]\n#![deny(warnings)]\n#![experimental]\n\n\/\/! # Hyper\n\/\/! Hyper is a fast, modern HTTP implementation written in and for Rust. It\n\/\/! is a low-level typesafe abstraction over raw HTTP, providing an elegant\n\/\/! layer over \"stringly-typed\" HTTP.\n\/\/!\n\/\/! Hyper offers both an HTTP\/S client an HTTP server which can be used to drive\n\/\/! complex web applications written entirely in Rust.\n\/\/!\n\/\/! ## Internal Design\n\/\/!\n\/\/! Hyper is designed as a relatively low-level wrapped over raw HTTP. It should\n\/\/! allow the implementation of higher-level abstractions with as little pain as\n\/\/! possible, and should not irrevocably hide any information from its users.\n\/\/!\n\/\/! ### Common Functionality\n\/\/!\n\/\/! Functionality and code shared between the Server and Client implementations can\n\/\/! be found in `src` directly - this includes `NetworkStream`s, `Method`s,\n\/\/! `StatusCode`, and so on.\n\/\/!\n\/\/! #### Methods\n\/\/!\n\/\/! Methods are represented as a single `enum` to remain as simple as possible.\n\/\/! Extension Methods are represented as raw `String`s. A method's safety and\n\/\/! idempotence can be accessed using the `safe` and `idempotent` methods.\n\/\/!\n\/\/! #### StatusCode\n\/\/!\n\/\/! Status codes are also represented as a single, exhaustive, `enum`. This\n\/\/! representation is efficient, typesafe, and ergonomic as it allows the use of\n\/\/! `match` to disambiguate known status codes.\n\/\/!\n\/\/! #### Headers\n\/\/!\n\/\/! Hyper's header representation is likely the most complex API exposed by Hyper.\n\/\/!\n\/\/! Hyper's headers are an abstraction over an internal `HashMap` and provides a\n\/\/! typesafe API for interacting with headers that does not rely on the use of\n\/\/! \"string-typing.\"\n\/\/!\n\/\/! Each HTTP header in Hyper has an associated type and implementation of the\n\/\/! `Header` trait, which defines an HTTP headers name as a string, how to parse\n\/\/! that header, and how to format that header.\n\/\/!\n\/\/! Headers are then parsed from the string representation lazily when the typed\n\/\/! representation of a header is requested and formatted back into their string\n\/\/! representation when headers are written back to the client.\n\/\/!\n\/\/! #### NetworkStream and NetworkAcceptor\n\/\/!\n\/\/! These are found in `src\/net.rs` and define the interface that acceptors and\n\/\/! streams must fulfill for them to be used within Hyper. They are by and large\n\/\/! internal tools and you should only need to mess around with them if you want to\n\/\/! mock or replace `TcpStream` and `TcpAcceptor`.\n\/\/!\n\/\/! ### Server\n\/\/!\n\/\/! Server-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in in `src\/serer`.\n\/\/!\n\/\/! #### Handler + Server\n\/\/!\n\/\/! A Handler in Hyper just accepts an Iterator of `(Request, Response)` pairs and\n\/\/! does whatever it wants with it. This gives Handlers maximum flexibility to decide\n\/\/! on concurrency strategy and exactly how they want to distribute the work of\n\/\/! dealing with `Request` and `Response.`\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An incoming HTTP Request is represented as a struct containing\n\/\/! a `Reader` over a `NetworkStream`, which represents the body, headers, a remote\n\/\/! address, an HTTP version, and a `Method` - relatively standard stuff.\n\/\/!\n\/\/! `Request` implements `Reader` itself, meaning that you can ergonomically get\n\/\/! the body out of a `Request` using standard `Reader` methods and helpers.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! An outgoing HTTP Response is also represented as a struct containing a `Writer`\n\/\/! over a `NetworkStream` which represents the Response body in addition to\n\/\/! standard items such as the `StatusCode` and HTTP version. `Response`'s `Writer`\n\/\/! implementation provides a streaming interface for sending data over to the\n\/\/! client.\n\/\/!\n\/\/! One of the traditional problems with representing outgoing HTTP Responses is\n\/\/! tracking the write-status of the Response - have we written the status-line,\n\/\/! the headers, the body, etc.? Hyper tracks this information statically using the\n\/\/! type system and prevents you, using the type system, from writing headers after\n\/\/! you have started writing to the body or vice versa.\n\/\/!\n\/\/! Hyper does this through a phantom type parameter in the definition of Response,\n\/\/! which tracks whether you are allowed to write to the headers or the body. This\n\/\/! phantom type can have two values `Fresh` or `Streaming`, with `Fresh`\n\/\/! indicating that you can write the headers and `Streaming` indicating that you\n\/\/! may write to the body, but not the headers.\n\/\/!\n\/\/! ### Client\n\/\/!\n\/\/! Client-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in `src\/client`.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An outgoing HTTP Request is represented as a struct containing a `Writer` over\n\/\/! a `NetworkStream` which represents the Request body in addition to the standard\n\/\/! information such as headers and the request method.\n\/\/!\n\/\/! Outgoing Requests track their write-status in almost exactly the same way as\n\/\/! outgoing HTTP Responses do on the Server, so we will defer to the explanation\n\/\/! in the documentation for sever Response.\n\/\/!\n\/\/! Requests expose an efficient streaming interface instead of a builder pattern,\n\/\/! but they also provide the needed interface for creating a builder pattern over\n\/\/! the API exposed by core Hyper.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! Incoming HTTP Responses are represented as a struct containing a `Reader` over\n\/\/! a `NetworkStream` and contain headers, a status, and an http version. They\n\/\/! implement `Reader` and can be read to get the data out of a `Response`.\n\/\/!\n\nextern crate time;\nextern crate url;\nextern crate openssl;\n#[phase(plugin,link)] extern crate log;\n#[cfg(test)] extern crate test;\nextern crate \"unsafe-any\" as uany;\nextern crate \"move-acceptor\" as macceptor;\nextern crate intertwine;\nextern crate typeable;\n\npub use std::io::net::ip::{SocketAddr, IpAddr, Ipv4Addr, Ipv6Addr, Port};\npub use mimewrapper::mime;\npub use url::Url;\npub use method::{Get, Head, Post, Delete};\npub use status::{Ok, BadRequest, NotFound};\npub use server::Server;\n\nuse std::fmt;\nuse std::io::IoError;\n\nuse std::rt::backtrace;\n\n\nmacro_rules! try_io(\n ($e:expr) => (match $e { Ok(v) => v, Err(e) => return Err(::HttpIoError(e)) })\n)\n\nmacro_rules! todo(\n ($($arg:tt)*) => (if cfg!(not(ndebug)) {\n format_args!(|args| log!(5, \"TODO: {}\", args), $($arg)*)\n })\n)\n\n#[allow(dead_code)]\nstruct Trace;\n\nimpl fmt::Show for Trace {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let _ = backtrace::write(fmt);\n ::std::result::Ok(())\n }\n}\n\nmacro_rules! trace(\n ($($arg:tt)*) => (if cfg!(not(ndebug)) {\n format_args!(|args| log!(5, \"{}\\n{}\", args, ::Trace), $($arg)*)\n })\n)\n\npub mod client;\npub mod method;\npub mod header;\npub mod http;\npub mod net;\npub mod server;\npub mod status;\npub mod uri;\npub mod version;\n\n\nmod mimewrapper {\n \/\/\/ Re-exporting the mime crate, for convenience.\n extern crate mime;\n}\n\n\n\/\/\/ Result type often returned from methods that can have `HttpError`s.\npub type HttpResult<T> = Result<T, HttpError>;\n\n\/\/\/ A set of errors that can occur parsing HTTP streams.\n#[deriving(Show, PartialEq, Clone)]\npub enum HttpError {\n \/\/\/ An invalid `Method`, such as `GE,T`.\n HttpMethodError,\n \/\/\/ An invalid `RequestUri`, such as `exam ple.domain`.\n HttpUriError,\n \/\/\/ An invalid `HttpVersion`, such as `HTP\/1.1`\n HttpVersionError,\n \/\/\/ An invalid `Header`.\n HttpHeaderError,\n \/\/\/ An invalid `Status`, such as `1337 ELITE`.\n HttpStatusError,\n \/\/\/ An `IoError` that occured while trying to read or write to a network stream.\n HttpIoError(IoError),\n}\n\n\/\/FIXME: when Opt-in Built-in Types becomes a thing, we can force these structs\n\/\/to be Send. For now, this has the compiler do a static check.\nfn _assert_send<T: Send>() {\n _assert_send::<client::Request<net::Fresh>>();\n _assert_send::<client::Response>();\n\n _assert_send::<server::Request>();\n _assert_send::<server::Response<net::Fresh>>();\n}\n<commit_msg>Fix typo in documentation<commit_after>#![feature(macro_rules, phase, default_type_params)]\n#![deny(missing_doc)]\n#![deny(warnings)]\n#![experimental]\n\n\/\/! # Hyper\n\/\/! Hyper is a fast, modern HTTP implementation written in and for Rust. It\n\/\/! is a low-level typesafe abstraction over raw HTTP, providing an elegant\n\/\/! layer over \"stringly-typed\" HTTP.\n\/\/!\n\/\/! Hyper offers both an HTTP\/S client an HTTP server which can be used to drive\n\/\/! complex web applications written entirely in Rust.\n\/\/!\n\/\/! ## Internal Design\n\/\/!\n\/\/! Hyper is designed as a relatively low-level wrapped over raw HTTP. It should\n\/\/! allow the implementation of higher-level abstractions with as little pain as\n\/\/! possible, and should not irrevocably hide any information from its users.\n\/\/!\n\/\/! ### Common Functionality\n\/\/!\n\/\/! Functionality and code shared between the Server and Client implementations can\n\/\/! be found in `src` directly - this includes `NetworkStream`s, `Method`s,\n\/\/! `StatusCode`, and so on.\n\/\/!\n\/\/! #### Methods\n\/\/!\n\/\/! Methods are represented as a single `enum` to remain as simple as possible.\n\/\/! Extension Methods are represented as raw `String`s. A method's safety and\n\/\/! idempotence can be accessed using the `safe` and `idempotent` methods.\n\/\/!\n\/\/! #### StatusCode\n\/\/!\n\/\/! Status codes are also represented as a single, exhaustive, `enum`. This\n\/\/! representation is efficient, typesafe, and ergonomic as it allows the use of\n\/\/! `match` to disambiguate known status codes.\n\/\/!\n\/\/! #### Headers\n\/\/!\n\/\/! Hyper's header representation is likely the most complex API exposed by Hyper.\n\/\/!\n\/\/! Hyper's headers are an abstraction over an internal `HashMap` and provides a\n\/\/! typesafe API for interacting with headers that does not rely on the use of\n\/\/! \"string-typing.\"\n\/\/!\n\/\/! Each HTTP header in Hyper has an associated type and implementation of the\n\/\/! `Header` trait, which defines an HTTP headers name as a string, how to parse\n\/\/! that header, and how to format that header.\n\/\/!\n\/\/! Headers are then parsed from the string representation lazily when the typed\n\/\/! representation of a header is requested and formatted back into their string\n\/\/! representation when headers are written back to the client.\n\/\/!\n\/\/! #### NetworkStream and NetworkAcceptor\n\/\/!\n\/\/! These are found in `src\/net.rs` and define the interface that acceptors and\n\/\/! streams must fulfill for them to be used within Hyper. They are by and large\n\/\/! internal tools and you should only need to mess around with them if you want to\n\/\/! mock or replace `TcpStream` and `TcpAcceptor`.\n\/\/!\n\/\/! ### Server\n\/\/!\n\/\/! Server-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in in `src\/server`.\n\/\/!\n\/\/! #### Handler + Server\n\/\/!\n\/\/! A Handler in Hyper just accepts an Iterator of `(Request, Response)` pairs and\n\/\/! does whatever it wants with it. This gives Handlers maximum flexibility to decide\n\/\/! on concurrency strategy and exactly how they want to distribute the work of\n\/\/! dealing with `Request` and `Response.`\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An incoming HTTP Request is represented as a struct containing\n\/\/! a `Reader` over a `NetworkStream`, which represents the body, headers, a remote\n\/\/! address, an HTTP version, and a `Method` - relatively standard stuff.\n\/\/!\n\/\/! `Request` implements `Reader` itself, meaning that you can ergonomically get\n\/\/! the body out of a `Request` using standard `Reader` methods and helpers.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! An outgoing HTTP Response is also represented as a struct containing a `Writer`\n\/\/! over a `NetworkStream` which represents the Response body in addition to\n\/\/! standard items such as the `StatusCode` and HTTP version. `Response`'s `Writer`\n\/\/! implementation provides a streaming interface for sending data over to the\n\/\/! client.\n\/\/!\n\/\/! One of the traditional problems with representing outgoing HTTP Responses is\n\/\/! tracking the write-status of the Response - have we written the status-line,\n\/\/! the headers, the body, etc.? Hyper tracks this information statically using the\n\/\/! type system and prevents you, using the type system, from writing headers after\n\/\/! you have started writing to the body or vice versa.\n\/\/!\n\/\/! Hyper does this through a phantom type parameter in the definition of Response,\n\/\/! which tracks whether you are allowed to write to the headers or the body. This\n\/\/! phantom type can have two values `Fresh` or `Streaming`, with `Fresh`\n\/\/! indicating that you can write the headers and `Streaming` indicating that you\n\/\/! may write to the body, but not the headers.\n\/\/!\n\/\/! ### Client\n\/\/!\n\/\/! Client-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in `src\/client`.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An outgoing HTTP Request is represented as a struct containing a `Writer` over\n\/\/! a `NetworkStream` which represents the Request body in addition to the standard\n\/\/! information such as headers and the request method.\n\/\/!\n\/\/! Outgoing Requests track their write-status in almost exactly the same way as\n\/\/! outgoing HTTP Responses do on the Server, so we will defer to the explanation\n\/\/! in the documentation for sever Response.\n\/\/!\n\/\/! Requests expose an efficient streaming interface instead of a builder pattern,\n\/\/! but they also provide the needed interface for creating a builder pattern over\n\/\/! the API exposed by core Hyper.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! Incoming HTTP Responses are represented as a struct containing a `Reader` over\n\/\/! a `NetworkStream` and contain headers, a status, and an http version. They\n\/\/! implement `Reader` and can be read to get the data out of a `Response`.\n\/\/!\n\nextern crate time;\nextern crate url;\nextern crate openssl;\n#[phase(plugin,link)] extern crate log;\n#[cfg(test)] extern crate test;\nextern crate \"unsafe-any\" as uany;\nextern crate \"move-acceptor\" as macceptor;\nextern crate intertwine;\nextern crate typeable;\n\npub use std::io::net::ip::{SocketAddr, IpAddr, Ipv4Addr, Ipv6Addr, Port};\npub use mimewrapper::mime;\npub use url::Url;\npub use method::{Get, Head, Post, Delete};\npub use status::{Ok, BadRequest, NotFound};\npub use server::Server;\n\nuse std::fmt;\nuse std::io::IoError;\n\nuse std::rt::backtrace;\n\n\nmacro_rules! try_io(\n ($e:expr) => (match $e { Ok(v) => v, Err(e) => return Err(::HttpIoError(e)) })\n)\n\nmacro_rules! todo(\n ($($arg:tt)*) => (if cfg!(not(ndebug)) {\n format_args!(|args| log!(5, \"TODO: {}\", args), $($arg)*)\n })\n)\n\n#[allow(dead_code)]\nstruct Trace;\n\nimpl fmt::Show for Trace {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let _ = backtrace::write(fmt);\n ::std::result::Ok(())\n }\n}\n\nmacro_rules! trace(\n ($($arg:tt)*) => (if cfg!(not(ndebug)) {\n format_args!(|args| log!(5, \"{}\\n{}\", args, ::Trace), $($arg)*)\n })\n)\n\npub mod client;\npub mod method;\npub mod header;\npub mod http;\npub mod net;\npub mod server;\npub mod status;\npub mod uri;\npub mod version;\n\n\nmod mimewrapper {\n \/\/\/ Re-exporting the mime crate, for convenience.\n extern crate mime;\n}\n\n\n\/\/\/ Result type often returned from methods that can have `HttpError`s.\npub type HttpResult<T> = Result<T, HttpError>;\n\n\/\/\/ A set of errors that can occur parsing HTTP streams.\n#[deriving(Show, PartialEq, Clone)]\npub enum HttpError {\n \/\/\/ An invalid `Method`, such as `GE,T`.\n HttpMethodError,\n \/\/\/ An invalid `RequestUri`, such as `exam ple.domain`.\n HttpUriError,\n \/\/\/ An invalid `HttpVersion`, such as `HTP\/1.1`\n HttpVersionError,\n \/\/\/ An invalid `Header`.\n HttpHeaderError,\n \/\/\/ An invalid `Status`, such as `1337 ELITE`.\n HttpStatusError,\n \/\/\/ An `IoError` that occured while trying to read or write to a network stream.\n HttpIoError(IoError),\n}\n\n\/\/FIXME: when Opt-in Built-in Types becomes a thing, we can force these structs\n\/\/to be Send. For now, this has the compiler do a static check.\nfn _assert_send<T: Send>() {\n _assert_send::<client::Request<net::Fresh>>();\n _assert_send::<client::Response>();\n\n _assert_send::<server::Request>();\n _assert_send::<server::Response<net::Fresh>>();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use triple slash for documentation on Url::origin method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>A few more changes to work on x86, confirmed functioning.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>better error reporting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add replace() methods set() does not return the old value anymore.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse std::convert::Into;\nuse std::fmt::{Display, Formatter, Error as FmtError};\n\nuse chrono::naive::datetime::NaiveDateTime;\nuse chrono::naive::time::NaiveTime;\nuse chrono::naive::date::NaiveDate;\nuse chrono::Datelike;\nuse chrono::Timelike;\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagstore::store::Result as StoreResult;\n\nuse error::DiaryError as DE;\nuse error::DiaryErrorKind as DEK;\n\nuse module_path::ModuleEntryPath;\n\n#[derive(Debug, Clone)]\npub struct DiaryId {\n name: String,\n year: i32,\n month: u32,\n day: u32,\n hour: u32,\n minute: u32,\n}\n\nimpl DiaryId {\n\n pub fn new(name: String, y: i32, m: u32, d: u32, h: u32, min: u32) -> DiaryId {\n DiaryId {\n name: name,\n year: y,\n month: m,\n day: d,\n hour: h,\n minute: min,\n }\n }\n\n pub fn from_datetime<DT: Datelike + Timelike>(diary_name: String, dt: DT) -> DiaryId {\n DiaryId::new(diary_name, dt.year(), dt.month(), dt.day(), dt.hour(), dt.minute())\n }\n\n pub fn diary_name(&self) -> &String {\n &self.name\n }\n\n pub fn year(&self) -> i32 {\n self.year\n }\n\n pub fn month(&self) -> u32 {\n self.month\n }\n\n pub fn day(&self) -> u32 {\n self.day\n }\n\n pub fn hour(&self) -> u32 {\n self.hour\n }\n\n pub fn minute(&self) -> u32 {\n self.minute\n }\n\n pub fn with_diary_name(mut self, name: String) -> DiaryId {\n self.name = name;\n self\n }\n\n pub fn with_year(mut self, year: i32) -> DiaryId {\n self.year = year;\n self\n }\n\n pub fn with_month(mut self, month: u32) -> DiaryId {\n self.month = month;\n self\n }\n\n pub fn with_day(mut self, day: u32) -> DiaryId {\n self.day = day;\n self\n }\n\n pub fn with_hour(mut self, hour: u32) -> DiaryId {\n self.hour = hour;\n self\n }\n\n pub fn with_minute(mut self, minute: u32) -> DiaryId {\n self.minute = minute;\n self\n }\n\n pub fn now(name: String) -> DiaryId {\n use chrono::offset::local::Local;\n\n let now = Local::now();\n let now_date = now.date().naive_local();\n let now_time = now.time();\n let dt = NaiveDateTime::new(now_date, now_time);\n\n DiaryId::new(name, dt.year(), dt.month(), dt.day(), dt.hour(), dt.minute())\n }\n\n}\n\nimpl Default for DiaryId {\n\n \/\/\/ Create a default DiaryId which is a diaryid for a diary named \"default\" with\n \/\/\/ time = 0000-00-00 00:00:00\n fn default() -> DiaryId {\n let dt = NaiveDateTime::new(NaiveDate::from_ymd(0, 0, 0), NaiveTime::from_hms(0, 0, 0));\n DiaryId::from_datetime(String::from(\"default\"), dt)\n }\n}\n\nimpl IntoStoreId for DiaryId {\n\n fn into_storeid(self) -> StoreResult<StoreId> {\n let s : String = self.into();\n ModuleEntryPath::new(s).into_storeid()\n }\n\n}\n\nimpl Into<String> for DiaryId {\n\n fn into(self) -> String {\n format!(\"{}\/{:0>4}\/{:0>2}\/{:0>2}\/{:0>2}:{:0>2}\",\n self.name, self.year, self.month, self.day, self.hour, self.minute)\n }\n\n}\n\nimpl Display for DiaryId {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n write!(fmt, \"{}\/{:0>4}\/{:0>2}\/{:0>2}\/{:0>2}:{:0>2}\",\n self.name, self.year, self.month, self.day, self.hour, self.minute)\n }\n\n}\n\nimpl Into<NaiveDateTime> for DiaryId {\n\n fn into(self) -> NaiveDateTime {\n let d = NaiveDate::from_ymd(self.year, self.month, self.day);\n let t = NaiveTime::from_hms(self.hour, self.minute, 0);\n NaiveDateTime::new(d, t)\n }\n\n}\n\npub trait FromStoreId : Sized {\n\n fn from_storeid(&StoreId) -> Result<Self, DE>;\n\n}\n\nuse std::path::Component;\n\nfn component_to_str<'a>(com: Component<'a>) -> Result<&'a str, DE> {\n match com {\n Component::Normal(s) => Some(s),\n _ => None,\n }.and_then(|s| s.to_str())\n .ok_or(DE::new(DEK::ParseError, None))\n}\n\nimpl FromStoreId for DiaryId {\n\n fn from_storeid(s: &StoreId) -> Result<DiaryId, DE> {\n use std::str::FromStr;\n\n let mut cmps = s.components().rev();\n let (hour, minute) = try!(cmps.next()\n .ok_or(DE::new(DEK::ParseError, None))\n .and_then(component_to_str)\n .and_then(|time| {\n let mut time = time.split(\":\");\n let hour = time.next().and_then(|s| FromStr::from_str(s).ok());\n let minute = time.next()\n .and_then(|s| s.split(\"~\").next())\n .and_then(|s| FromStr::from_str(s).ok());\n\n debug!(\"Hour = {:?}\", hour);\n debug!(\"Minute = {:?}\", minute);\n\n match (hour, minute) {\n (Some(h), Some(m)) => Ok((h, m)),\n _ => return Err(DE::new(DEK::ParseError, None)),\n }\n }));\n\n let day: Result<u32,_> = cmps.next()\n .ok_or(DE::new(DEK::ParseError, None))\n .and_then(component_to_str)\n .and_then(|s| s.parse::<u32>()\n .map_err(|e| DE::new(DEK::ParseError, Some(Box::new(e)))));\n\n let month: Result<u32,_> = cmps.next()\n .ok_or(DE::new(DEK::ParseError, None))\n .and_then(component_to_str)\n .and_then(|s| s.parse::<u32>()\n .map_err(|e| DE::new(DEK::ParseError, Some(Box::new(e)))));\n\n let year: Result<i32,_> = cmps.next()\n .ok_or(DE::new(DEK::ParseError, None))\n .and_then(component_to_str)\n .and_then(|s| s.parse::<i32>()\n .map_err(|e| DE::new(DEK::ParseError, Some(Box::new(e)))));\n\n let name = cmps.next()\n .ok_or(DE::new(DEK::ParseError, None))\n .and_then(component_to_str).map(String::from);\n\n debug!(\"Day = {:?}\", day);\n debug!(\"Month = {:?}\", month);\n debug!(\"Year = {:?}\", year);\n debug!(\"Name = {:?}\", name);\n\n let day = try!(day);\n let month = try!(month);\n let year = try!(year);\n let name = try!(name);\n\n Ok(DiaryId::new(name, year, month, day, hour, minute))\n }\n\n}\n\n<commit_msg>Refactor fetching of next component into function<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse std::convert::Into;\nuse std::fmt::{Display, Formatter, Error as FmtError};\n\nuse chrono::naive::datetime::NaiveDateTime;\nuse chrono::naive::time::NaiveTime;\nuse chrono::naive::date::NaiveDate;\nuse chrono::Datelike;\nuse chrono::Timelike;\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagstore::store::Result as StoreResult;\n\nuse error::DiaryError as DE;\nuse error::DiaryErrorKind as DEK;\n\nuse module_path::ModuleEntryPath;\n\n#[derive(Debug, Clone)]\npub struct DiaryId {\n name: String,\n year: i32,\n month: u32,\n day: u32,\n hour: u32,\n minute: u32,\n}\n\nimpl DiaryId {\n\n pub fn new(name: String, y: i32, m: u32, d: u32, h: u32, min: u32) -> DiaryId {\n DiaryId {\n name: name,\n year: y,\n month: m,\n day: d,\n hour: h,\n minute: min,\n }\n }\n\n pub fn from_datetime<DT: Datelike + Timelike>(diary_name: String, dt: DT) -> DiaryId {\n DiaryId::new(diary_name, dt.year(), dt.month(), dt.day(), dt.hour(), dt.minute())\n }\n\n pub fn diary_name(&self) -> &String {\n &self.name\n }\n\n pub fn year(&self) -> i32 {\n self.year\n }\n\n pub fn month(&self) -> u32 {\n self.month\n }\n\n pub fn day(&self) -> u32 {\n self.day\n }\n\n pub fn hour(&self) -> u32 {\n self.hour\n }\n\n pub fn minute(&self) -> u32 {\n self.minute\n }\n\n pub fn with_diary_name(mut self, name: String) -> DiaryId {\n self.name = name;\n self\n }\n\n pub fn with_year(mut self, year: i32) -> DiaryId {\n self.year = year;\n self\n }\n\n pub fn with_month(mut self, month: u32) -> DiaryId {\n self.month = month;\n self\n }\n\n pub fn with_day(mut self, day: u32) -> DiaryId {\n self.day = day;\n self\n }\n\n pub fn with_hour(mut self, hour: u32) -> DiaryId {\n self.hour = hour;\n self\n }\n\n pub fn with_minute(mut self, minute: u32) -> DiaryId {\n self.minute = minute;\n self\n }\n\n pub fn now(name: String) -> DiaryId {\n use chrono::offset::local::Local;\n\n let now = Local::now();\n let now_date = now.date().naive_local();\n let now_time = now.time();\n let dt = NaiveDateTime::new(now_date, now_time);\n\n DiaryId::new(name, dt.year(), dt.month(), dt.day(), dt.hour(), dt.minute())\n }\n\n}\n\nimpl Default for DiaryId {\n\n \/\/\/ Create a default DiaryId which is a diaryid for a diary named \"default\" with\n \/\/\/ time = 0000-00-00 00:00:00\n fn default() -> DiaryId {\n let dt = NaiveDateTime::new(NaiveDate::from_ymd(0, 0, 0), NaiveTime::from_hms(0, 0, 0));\n DiaryId::from_datetime(String::from(\"default\"), dt)\n }\n}\n\nimpl IntoStoreId for DiaryId {\n\n fn into_storeid(self) -> StoreResult<StoreId> {\n let s : String = self.into();\n ModuleEntryPath::new(s).into_storeid()\n }\n\n}\n\nimpl Into<String> for DiaryId {\n\n fn into(self) -> String {\n format!(\"{}\/{:0>4}\/{:0>2}\/{:0>2}\/{:0>2}:{:0>2}\",\n self.name, self.year, self.month, self.day, self.hour, self.minute)\n }\n\n}\n\nimpl Display for DiaryId {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n write!(fmt, \"{}\/{:0>4}\/{:0>2}\/{:0>2}\/{:0>2}:{:0>2}\",\n self.name, self.year, self.month, self.day, self.hour, self.minute)\n }\n\n}\n\nimpl Into<NaiveDateTime> for DiaryId {\n\n fn into(self) -> NaiveDateTime {\n let d = NaiveDate::from_ymd(self.year, self.month, self.day);\n let t = NaiveTime::from_hms(self.hour, self.minute, 0);\n NaiveDateTime::new(d, t)\n }\n\n}\n\npub trait FromStoreId : Sized {\n\n fn from_storeid(&StoreId) -> Result<Self, DE>;\n\n}\n\nuse std::path::Component;\n\nfn component_to_str<'a>(com: Component<'a>) -> Result<&'a str, DE> {\n match com {\n Component::Normal(s) => Some(s),\n _ => None,\n }.and_then(|s| s.to_str())\n .ok_or(DE::new(DEK::ParseError, None))\n}\n\nimpl FromStoreId for DiaryId {\n\n fn from_storeid(s: &StoreId) -> Result<DiaryId, DE> {\n use std::str::FromStr;\n\n use std::path::Components;\n use std::iter::Rev;\n\n fn next_component<'a>(components: &'a mut Rev<Components>) -> Result<&'a str, DE> {\n components.next()\n .ok_or(DE::new(DEK::ParseError, None))\n .and_then(component_to_str)\n }\n\n let mut cmps = s.components().rev();\n\n let (hour, minute) = try!(next_component(&mut cmps).and_then(|time| {\n let mut time = time.split(\":\");\n let hour = time.next().and_then(|s| FromStr::from_str(s).ok());\n let minute = time.next()\n .and_then(|s| s.split(\"~\").next())\n .and_then(|s| FromStr::from_str(s).ok());\n\n debug!(\"Hour = {:?}\", hour);\n debug!(\"Minute = {:?}\", minute);\n\n match (hour, minute) {\n (Some(h), Some(m)) => Ok((h, m)),\n _ => return Err(DE::new(DEK::ParseError, None)),\n }\n }));\n\n let day: Result<u32,_> = next_component(&mut cmps)\n .and_then(|s| s.parse::<u32>()\n .map_err(|e| DE::new(DEK::ParseError, Some(Box::new(e)))));\n\n let month: Result<u32,_> = next_component(&mut cmps)\n .and_then(|s| s.parse::<u32>()\n .map_err(|e| DE::new(DEK::ParseError, Some(Box::new(e)))));\n\n let year: Result<i32,_> = next_component(&mut cmps)\n .and_then(|s| s.parse::<i32>()\n .map_err(|e| DE::new(DEK::ParseError, Some(Box::new(e)))));\n\n let name = next_component(&mut cmps).map(String::from);\n\n debug!(\"Day = {:?}\", day);\n debug!(\"Month = {:?}\", month);\n debug!(\"Year = {:?}\", year);\n debug!(\"Name = {:?}\", name);\n\n let day = try!(day);\n let month = try!(month);\n let year = try!(year);\n let name = try!(name);\n\n Ok(DiaryId::new(name, year, month, day, hour, minute))\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>#![cfg_attr(feature = \"clippy\", feature(plugin))]\n#![cfg_attr(feature = \"clippy\", plugin(clippy))]\n#![cfg_attr(not(feature = \"clippy\"), allow(unknown_lints))]\n\nextern crate devicemapper;\nextern crate clap;\n#[macro_use]\nextern crate nix;\nextern crate crc;\nextern crate byteorder;\nextern crate uuid;\nextern crate time;\nextern crate dbus;\nextern crate term;\nextern crate rand;\nextern crate serde;\n#[macro_use]\nextern crate serde_derive;\nextern crate serde_json;\n#[macro_use]\nextern crate log;\n\n#[cfg(test)]\nextern crate quickcheck;\n\npub mod engine;\npub mod dbus_api;\npub mod stratis;\n\n\n#[macro_use]\nextern crate custom_derive;\n#[macro_use]\nextern crate enum_derive;\n<commit_msg>Ignore clippy doc-markdown remarks<commit_after>#![cfg_attr(feature = \"clippy\", feature(plugin))]\n#![cfg_attr(feature = \"clippy\", plugin(clippy))]\n#![cfg_attr(not(feature = \"clippy\"), allow(unknown_lints))]\n\n#![allow(doc_markdown)]\n\nextern crate devicemapper;\nextern crate clap;\n#[macro_use]\nextern crate nix;\nextern crate crc;\nextern crate byteorder;\nextern crate uuid;\nextern crate time;\nextern crate dbus;\nextern crate term;\nextern crate rand;\nextern crate serde;\n#[macro_use]\nextern crate serde_derive;\nextern crate serde_json;\n#[macro_use]\nextern crate log;\n\n#[cfg(test)]\nextern crate quickcheck;\n\npub mod engine;\npub mod dbus_api;\npub mod stratis;\n\n\n#[macro_use]\nextern crate custom_derive;\n#[macro_use]\nextern crate enum_derive;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #63379 - jackh726:issue-53096, r=Centril,oli-obk<commit_after>\/\/ check-pass\n#![feature(const_fn)]\n#![feature(type_alias_impl_trait)]\n\ntype Foo = impl Fn() -> usize;\nconst fn bar() -> Foo { || 0usize }\nconst BAZR: Foo = bar();\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix warnings on latest nightly<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix cvt.sw rounding errors<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>loading textures may be working<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Redundant complexity for generator (review remark) Simplified<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>update compact.rs<commit_after>extern crate timely;\nextern crate differential_dataflow;\n\nuse differential_dataflow::input::Input;\nuse differential_dataflow::operators::Threshold;\n\nfn main() {\n\n let large: usize = std::env::args().nth(1).unwrap().parse().unwrap();\n let small: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n let total: usize = std::env::args().nth(4).unwrap().parse().unwrap();\n\n \/\/ define a new timely dataflow computation.\n timely::execute_from_args(std::env::args().skip(3), move |worker| {\n\n let timer = ::std::time::Instant::now();\n\n let mut probe = timely::dataflow::operators::probe::Handle::new();\n\n \/\/ create a dataflow managing an ever-changing edge collection.\n \tlet mut handle = worker.dataflow(|scope| {\n let (handle, input) = scope.new_collection();\n input.distinct().probe_with(&mut probe);\n handle\n });\n\n println!(\"{:?}:\\tloading edges\", timer.elapsed());\n\n let mut next = batch;\n let mut value = worker.index();\n while value < total {\n if value >= next {\n handle.advance_to(next);\n handle.flush();\n next += batch;\n while probe.less_than(handle.time()) { worker.step(); }\n \/\/ println!(\"{:?}\\tround {} loaded\", timer.elapsed(), next);\n }\n handle.advance_to(value);\n handle.insert(value % large);\n handle.insert(value % small);\n value += worker.peers();\n }\n\n handle.advance_to(total);\n handle.flush();\n while probe.less_than(handle.time()) { worker.step(); }\n\n println!(\"{:?}\\tdata loaded\", timer.elapsed());\n\n let mut next = batch;\n let mut value = worker.index();\n while value < total {\n if value >= next {\n handle.advance_to(total + next);\n handle.flush();\n next += batch;\n while probe.less_than(handle.time()) { worker.step(); }\n \/\/ println!(\"{:?}\\tround {} unloaded\", timer.elapsed(), next);\n }\n handle.advance_to(total + value);\n handle.remove(value % large);\n handle.remove(value % small);\n value += worker.peers();\n }\n\n handle.advance_to(total + total);\n handle.flush();\n while probe.less_than(handle.time()) { worker.step(); }\n\n println!(\"{:?}\\tdata unloaded\", timer.elapsed());\n\n while worker.step() { }\n\n }).unwrap();\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>re-enabled lto_build test on 32-bit MSVC<commit_after><|endoftext|>"} {"text":"<commit_before>use core::ptr;\n\nuse super::SDTHeader;\n\n#[repr(packed)]\n#[derive(Clone, Copy, Debug, Default)]\npub struct SSDT {\n pub header: SDTHeader,\n pub data: &'static [u8]\n\n}\n\nimpl SSDT {\n pub fn new(header: *const SDTHeader) -> Option<Self> {\n if unsafe { (*header).valid(\"SSDT\") } {\n Some(SSDT {\n header: unsafe { *header },\n data: unsafe { (*header).data() }\n })\n } else {\n None\n }\n }\n}\n<commit_msg>Simplify `SSDT::new()` with fewer `unsafe` blocks<commit_after>use core::ptr;\n\nuse super::SDTHeader;\n\n#[repr(packed)]\n#[derive(Clone, Copy, Debug, Default)]\npub struct SSDT {\n pub header: SDTHeader,\n pub data: &'static [u8]\n\n}\n\nimpl SSDT {\n pub fn new(header: *const SDTHeader) -> Option<Self> {\n let header = unsafe { *header };\n if header.valid(\"SSDT\") {\n Some(SSDT {\n header: header,\n data: header.data(),\n })\n } else {\n None\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary Clone bounds<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rust? Why not? Let's try it out!<commit_after>extern crate curl;\nextern crate regex;\n\nuse curl::http;\nuse regex::Regex;\nuse std::io::prelude::*;\nuse std::fs::File;\n\nstatic URL: &'static str = \"http:\/\/alpha.wallhaven.cc\/random\";\n\nstatic REX: &'static str = r#\"<img [^>]*? src=\".*?th-(\\d+\\..{3})\"[^>]*?>\"#;\n\nfn get_body(url : &str) -> String {\n let resp = http::handle()\n .get(url)\n .exec()\n .unwrap_or_else(|e| {\n panic!(\"Failed to get {}; error is {}\", url, e);\n });\n let body = std::str::from_utf8(resp.get_body()).unwrap_or_else(|e| {\n panic!(\"Failed to parse response from {}; error is {}\", url, e);\n });\n let ret = body.to_string();\n return ret; \n}\n\nfn get_img_name(html : &str) -> String {\n let re = Regex::new(REX).unwrap();\n let cap = re.captures(html).unwrap();\n let mut ret_a = \"http:\/\/wallpapers.wallhaven.cc\/wallpapers\/full\/wallhaven-\".to_string();\n let ret_b = cap.at(1).unwrap_or(\"\");\n let ret = ret_a+ret_b; \/\/ :0\n return ret;\n}\n\n\/\/fn download_img(url : str) {\n\/\/ let mut f = try!(File::create(\"foo.jpg\"));\n\/\/}\n\nfn main() {\n let html = get_body(URL);\n let img = get_img_name(&html);\n println!(\"{}\", img); \n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>labels for loops added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test-tools\/gen_lines.rs<commit_after>use std::env;\n\nfn main() {\n let os_args = env::args();\n let args: Vec<_> = os_args.collect();\n\n if args.len() != 4 {\n println!(\"usage : {} start numline width\", args[0]);\n return;\n }\n\n let start_num = args[1].trim_right().parse::<u64>().unwrap_or(0);\n let stop_num = args[2].trim_right().parse::<u64>().unwrap_or(0);\n let width_num = args[3].trim_right().parse::<u64>().unwrap_or(0);\n\n gen_lines(start_num, stop_num, width_num);\n}\n\nfn gen_lines(start: u64, stop: u64, linewidth: u64) -> () {\n let string = gen_line(linewidth);\n\n for x in start..start + stop + 1 {\n print!(\"{:012} {}\", x, string);\n }\n}\n\nfn gen_line(linewidth: u64) -> String {\n let mut string = String::new();\n\n let table = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f',\n 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',\n 'w', 'x', 'y', 'z'];\n\n for x in 0..linewidth {\n string.push(table[x as usize % table.len()]);\n }\n string.push('\\n');\n\n return string;\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.\n\/\/! It has to be run really early, before transformations like inlining, because\n\/\/! introducing these calls *adds* UB -- so, conceptually, this pass is actually part\n\/\/! of MIR building, and only after this pass we think of the program has having the\n\/\/! normal MIR semantics.\n\nuse rustc::ty::{self, TyCtxt, RegionKind};\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::mir::transform::{MirPass, MirSource};\nuse rustc::middle::region::CodeExtent;\n\npub struct AddValidation;\n\n\/\/\/ Determine the \"context\" of the lval: Mutability and region.\nfn lval_context<'a, 'tcx, D>(\n lval: &Lvalue<'tcx>,\n local_decls: &D,\n tcx: TyCtxt<'a, 'tcx, 'tcx>\n) -> (Option<CodeExtent>, hir::Mutability)\n where D: HasLocalDecls<'tcx>\n{\n use rustc::mir::Lvalue::*;\n\n match *lval {\n Local { .. } => (None, hir::MutMutable),\n Static(_) => (None, hir::MutImmutable),\n Projection(ref proj) => {\n match proj.elem {\n ProjectionElem::Deref => {\n \/\/ Computing the inside the recursion makes this quadratic.\n \/\/ We don't expect deep paths though.\n let ty = proj.base.ty(local_decls, tcx).to_ty(tcx);\n \/\/ A Deref projection may restrict the context, this depends on the type\n \/\/ being deref'd.\n let context = match ty.sty {\n ty::TyRef(re, tam) => {\n let re = match re {\n &RegionKind::ReScope(ce) => Some(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => None\n };\n (re, tam.mutbl)\n }\n ty::TyRawPtr(_) =>\n \/\/ There is no guarantee behind even a mutable raw pointer,\n \/\/ no write locks are acquired there, so we also don't want to\n \/\/ release any.\n (None, hir::MutImmutable),\n ty::TyAdt(adt, _) if adt.is_box() => (None, hir::MutMutable),\n _ => bug!(\"Deref on a non-pointer type {:?}\", ty),\n };\n \/\/ \"Intersect\" this restriction with proj.base.\n if let (Some(_), hir::MutImmutable) = context {\n \/\/ This is already as restricted as it gets, no need to even recurse\n context\n } else {\n let base_context = lval_context(&proj.base, local_decls, tcx);\n \/\/ The region of the outermost Deref is always most restrictive.\n let re = context.0.or(base_context.0);\n let mutbl = context.1.and(base_context.1);\n (re, mutbl)\n }\n\n }\n _ => lval_context(&proj.base, local_decls, tcx),\n }\n }\n }\n}\n\nimpl MirPass for AddValidation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _: MirSource,\n mir: &mut Mir<'tcx>) {\n if !tcx.sess.opts.debugging_opts.mir_emit_validate {\n return;\n }\n\n let local_decls = mir.local_decls.clone(); \/\/ FIXME: Find a way to get rid of this clone.\n\n \/\/\/ Convert an lvalue to a validation operand.\n let lval_to_operand = |lval: Lvalue<'tcx>| -> ValidationOperand<'tcx, Lvalue<'tcx>> {\n let (re, mutbl) = lval_context(&lval, &local_decls, tcx);\n let ty = lval.ty(&local_decls, tcx).to_ty(tcx);\n ValidationOperand { lval, ty, re, mutbl }\n };\n\n \/\/ PART 1\n \/\/ Add an AcquireValid at the beginning of the start block.\n if mir.arg_count > 0 {\n let acquire_stmt = Statement {\n source_info: SourceInfo {\n scope: ARGUMENT_VISIBILITY_SCOPE,\n span: mir.span, \/\/ FIXME: Consider using just the span covering the function\n \/\/ argument declaration.\n },\n kind: StatementKind::Validate(ValidationOp::Acquire,\n \/\/ Skip return value, go over all the arguments\n mir.local_decls.iter_enumerated().skip(1).take(mir.arg_count)\n .map(|(local, _)| lval_to_operand(Lvalue::Local(local))).collect()\n )\n };\n mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 2\n \/\/ Add ReleaseValid\/AcquireValid around function call terminators. We don't use a visitor\n \/\/ because we need to access the block that a Call jumps to.\n let mut returns : Vec<(SourceInfo, Lvalue<'tcx>, BasicBlock)> = Vec::new();\n for block_data in mir.basic_blocks_mut() {\n match block_data.terminator {\n Some(Terminator { kind: TerminatorKind::Call { ref args, ref destination, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n args.iter().filter_map(|op| {\n match op {\n &Operand::Consume(ref lval) =>\n Some(lval_to_operand(lval.clone())),\n &Operand::Constant(..) => { None },\n }\n }).collect())\n };\n block_data.statements.push(release_stmt);\n \/\/ Remember the return destination for later\n if let &Some(ref destination) = destination {\n returns.push((source_info, destination.0.clone(), destination.1));\n }\n }\n Some(Terminator { kind: TerminatorKind::Drop { location: ref lval, .. },\n source_info }) |\n Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref lval, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![lval_to_operand(lval.clone())]),\n };\n block_data.statements.push(release_stmt);\n \/\/ drop doesn't return anything, so we need no acquire.\n }\n _ => {\n \/\/ Not a block ending in a Call -> ignore.\n }\n }\n }\n \/\/ Now we go over the returns we collected to acquire the return values.\n for (source_info, dest_lval, dest_block) in returns {\n let acquire_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n mir.basic_blocks_mut()[dest_block].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 3\n \/\/ Add ReleaseValid\/AcquireValid around Ref. Again an iterator does not seem very suited\n \/\/ as we need to add new statements before and after each Ref.\n for block_data in mir.basic_blocks_mut() {\n \/\/ We want to insert statements around Ref commands as we iterate. To this end, we\n \/\/ iterate backwards using indices.\n for i in (0..block_data.statements.len()).rev() {\n let (dest_lval, re, src_lval) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_lval, Rvalue::Ref(re, _, ref src_lval)) => {\n (dest_lval.clone(), re, src_lval.clone())\n },\n _ => continue,\n };\n \/\/ So this is a ref, and we got all the data we wanted.\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ The source is released until the region of the borrow ends.\n let op = match re {\n &RegionKind::ReScope(ce) => ValidationOp::Suspend(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => ValidationOp::Release,\n };\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(op, vec![lval_to_operand(src_lval)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n }\n }\n}\n<commit_msg>after a Ref, only acquire the Deref'd destination<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.\n\/\/! It has to be run really early, before transformations like inlining, because\n\/\/! introducing these calls *adds* UB -- so, conceptually, this pass is actually part\n\/\/! of MIR building, and only after this pass we think of the program has having the\n\/\/! normal MIR semantics.\n\nuse rustc::ty::{self, TyCtxt, RegionKind};\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::mir::transform::{MirPass, MirSource};\nuse rustc::middle::region::CodeExtent;\n\npub struct AddValidation;\n\n\/\/\/ Determine the \"context\" of the lval: Mutability and region.\nfn lval_context<'a, 'tcx, D>(\n lval: &Lvalue<'tcx>,\n local_decls: &D,\n tcx: TyCtxt<'a, 'tcx, 'tcx>\n) -> (Option<CodeExtent>, hir::Mutability)\n where D: HasLocalDecls<'tcx>\n{\n use rustc::mir::Lvalue::*;\n\n match *lval {\n Local { .. } => (None, hir::MutMutable),\n Static(_) => (None, hir::MutImmutable),\n Projection(ref proj) => {\n match proj.elem {\n ProjectionElem::Deref => {\n \/\/ Computing the inside the recursion makes this quadratic.\n \/\/ We don't expect deep paths though.\n let ty = proj.base.ty(local_decls, tcx).to_ty(tcx);\n \/\/ A Deref projection may restrict the context, this depends on the type\n \/\/ being deref'd.\n let context = match ty.sty {\n ty::TyRef(re, tam) => {\n let re = match re {\n &RegionKind::ReScope(ce) => Some(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => None\n };\n (re, tam.mutbl)\n }\n ty::TyRawPtr(_) =>\n \/\/ There is no guarantee behind even a mutable raw pointer,\n \/\/ no write locks are acquired there, so we also don't want to\n \/\/ release any.\n (None, hir::MutImmutable),\n ty::TyAdt(adt, _) if adt.is_box() => (None, hir::MutMutable),\n _ => bug!(\"Deref on a non-pointer type {:?}\", ty),\n };\n \/\/ \"Intersect\" this restriction with proj.base.\n if let (Some(_), hir::MutImmutable) = context {\n \/\/ This is already as restricted as it gets, no need to even recurse\n context\n } else {\n let base_context = lval_context(&proj.base, local_decls, tcx);\n \/\/ The region of the outermost Deref is always most restrictive.\n let re = context.0.or(base_context.0);\n let mutbl = context.1.and(base_context.1);\n (re, mutbl)\n }\n\n }\n _ => lval_context(&proj.base, local_decls, tcx),\n }\n }\n }\n}\n\nimpl MirPass for AddValidation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _: MirSource,\n mir: &mut Mir<'tcx>) {\n if !tcx.sess.opts.debugging_opts.mir_emit_validate {\n return;\n }\n\n let local_decls = mir.local_decls.clone(); \/\/ FIXME: Find a way to get rid of this clone.\n\n \/\/\/ Convert an lvalue to a validation operand.\n let lval_to_operand = |lval: Lvalue<'tcx>| -> ValidationOperand<'tcx, Lvalue<'tcx>> {\n let (re, mutbl) = lval_context(&lval, &local_decls, tcx);\n let ty = lval.ty(&local_decls, tcx).to_ty(tcx);\n ValidationOperand { lval, ty, re, mutbl }\n };\n\n \/\/ PART 1\n \/\/ Add an AcquireValid at the beginning of the start block.\n if mir.arg_count > 0 {\n let acquire_stmt = Statement {\n source_info: SourceInfo {\n scope: ARGUMENT_VISIBILITY_SCOPE,\n span: mir.span, \/\/ FIXME: Consider using just the span covering the function\n \/\/ argument declaration.\n },\n kind: StatementKind::Validate(ValidationOp::Acquire,\n \/\/ Skip return value, go over all the arguments\n mir.local_decls.iter_enumerated().skip(1).take(mir.arg_count)\n .map(|(local, _)| lval_to_operand(Lvalue::Local(local))).collect()\n )\n };\n mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 2\n \/\/ Add ReleaseValid\/AcquireValid around function call terminators. We don't use a visitor\n \/\/ because we need to access the block that a Call jumps to.\n let mut returns : Vec<(SourceInfo, Lvalue<'tcx>, BasicBlock)> = Vec::new();\n for block_data in mir.basic_blocks_mut() {\n match block_data.terminator {\n Some(Terminator { kind: TerminatorKind::Call { ref args, ref destination, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n args.iter().filter_map(|op| {\n match op {\n &Operand::Consume(ref lval) =>\n Some(lval_to_operand(lval.clone())),\n &Operand::Constant(..) => { None },\n }\n }).collect())\n };\n block_data.statements.push(release_stmt);\n \/\/ Remember the return destination for later\n if let &Some(ref destination) = destination {\n returns.push((source_info, destination.0.clone(), destination.1));\n }\n }\n Some(Terminator { kind: TerminatorKind::Drop { location: ref lval, .. },\n source_info }) |\n Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref lval, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![lval_to_operand(lval.clone())]),\n };\n block_data.statements.push(release_stmt);\n \/\/ drop doesn't return anything, so we need no acquire.\n }\n _ => {\n \/\/ Not a block ending in a Call -> ignore.\n }\n }\n }\n \/\/ Now we go over the returns we collected to acquire the return values.\n for (source_info, dest_lval, dest_block) in returns {\n let acquire_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n mir.basic_blocks_mut()[dest_block].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 3\n \/\/ Add ReleaseValid\/AcquireValid around Ref. Again an iterator does not seem very suited\n \/\/ as we need to add new statements before and after each Ref.\n for block_data in mir.basic_blocks_mut() {\n \/\/ We want to insert statements around Ref commands as we iterate. To this end, we\n \/\/ iterate backwards using indices.\n for i in (0..block_data.statements.len()).rev() {\n let (dest_lval, re, src_lval) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_lval, Rvalue::Ref(re, _, ref src_lval)) => {\n (dest_lval.clone(), re, src_lval.clone())\n },\n _ => continue,\n };\n \/\/ So this is a ref, and we got all the data we wanted.\n \/\/ Do an acquire of the result -- but only what it points to, so add a Deref\n \/\/ projection.\n let dest_lval = Projection { base: dest_lval, elem: ProjectionElem::Deref };\n let dest_lval = Lvalue::Projection(Box::new(dest_lval));\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ The source is released until the region of the borrow ends.\n let op = match re {\n &RegionKind::ReScope(ce) => ValidationOp::Suspend(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => ValidationOp::Release,\n };\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(op, vec![lval_to_operand(src_lval)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add new test for object method numbering mismatches.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test for using an object with an associated type binding as the\n\/\/ instantiation for a generic type with a bound.\n\ntrait SomeTrait {\n type SomeType;\n\n fn get(&self) -> Self::SomeType;\n}\n\nfn get_int<T:SomeTrait<SomeType=i32>+?Sized>(x: &T) -> i32 {\n x.get()\n}\n\nimpl SomeTrait for i32 {\n type SomeType = i32;\n fn get(&self) -> i32 {\n *self\n }\n}\n\nfn main() {\n let x = 22_i32;\n let x1: &SomeTrait<SomeType=i32> = &x;\n let y = get_int(x1);\n assert_eq!(x, y);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for 'packet loss' decode calls<commit_after>\/\/! Test that supplying empty packets does forward error correction.\n\nextern crate opus;\nuse opus::*;\n\n#[test]\nfn blah() {\n let mut opus = Decoder::new(48000, Channels::Mono).unwrap();\n\n let mut output = vec![0i16; 5760];\n let size = opus.decode(&[], &mut output[..], true).unwrap();\n assert_eq!(size, 5760);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Rename 'find' to 'get', fixing warning<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add decode tests for ALU\/status transfer operations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Started firework example<commit_after>extern crate pancurses;\r\n\r\nuse pancurses::*;\r\n\r\nfn main() {\r\n let window = initialize();\r\n window.set_nodelay(true);\r\n if (has_colors()) { start_color(); }\r\n noecho();\r\n end();\r\n}\r\n<|endoftext|>"} {"text":"<commit_before>\/\/! Crate ruma_api contains core types used to define the requests and responses for each endpoint\n\/\/! in the various [Matrix](https:\/\/matrix.org) API specifications.\n\/\/! These types can be shared by client and server code for all Matrix APIs.\n\/\/!\n\/\/! When implementing a new Matrix API, each endpoint has a type that implements `Endpoint`, plus\n\/\/! the necessary associated types.\n\/\/! An implementation of `Endpoint` contains all the information about the HTTP method, the path and\n\/\/! input parameters for requests, and the structure of a successful response.\n\/\/! Such types can then be used by client code to make requests, and by server code to fulfill\n\/\/! those requests.\n#![deny(missing_debug_implementations)]\n#![deny(missing_docs)]\n#![feature(try_from)]\n\nextern crate futures;\nextern crate http;\nextern crate hyper;\n#[cfg(test)]\nextern crate ruma_identifiers;\n#[cfg(test)]\n#[macro_use]\nextern crate serde_derive;\nextern crate serde_json;\nextern crate serde_urlencoded;\n\nuse std::convert::TryInto;\nuse std::io;\n\nuse futures::future::FutureFrom;\nuse http::{Method, Request, Response, StatusCode};\n\n\/\/\/ A Matrix API endpoint.\npub trait Endpoint<T, U> {\n \/\/\/ Data needed to make a request to the endpoint.\n type Request: TryInto<Request<T>, Error = Error>;\n \/\/\/ Data returned from the endpoint.\n type Response: FutureFrom<Response<U>, Error = Error>;\n\n \/\/\/ Metadata about the endpoint.\n const METADATA: Metadata;\n}\n\n\/\/\/ An error when converting an `Endpoint::Request` to a `http::Request` or a `http::Response` to\n\/\/\/ an `Endpoint::Response`.\n#[derive(Debug)]\npub enum Error {\n \/\/\/ An HTTP error.\n Http(http::Error),\n \/\/\/ An Hyper error.\n Hyper(hyper::Error),\n \/\/\/ A I\/O error.\n Io(io::Error),\n \/\/\/ A Serde JSON error.\n SerdeJson(serde_json::Error),\n \/\/\/ A Serde URL encoding error.\n SerdeUrlEncoded(serde_urlencoded::ser::Error),\n \/\/\/ An HTTP status code indicating error.\n StatusCode(StatusCode),\n \/\/\/ Standard hack to prevent exhaustive matching.\n \/\/\/ This will be replaced by the #[non_exhaustive] feature when available.\n #[doc(hidden)]\n __Nonexhaustive,\n}\n\nimpl From<http::Error> for Error {\n fn from(error: http::Error) -> Self {\n Error::Http(error)\n }\n}\n\nimpl From<hyper::Error> for Error {\n fn from(error: hyper::Error) -> Self {\n Error::Hyper(error)\n }\n}\n\nimpl From<io::Error> for Error {\n fn from(error: io::Error) -> Self {\n Error::Io(error)\n }\n}\n\nimpl From<serde_json::Error> for Error {\n fn from(error: serde_json::Error) -> Self {\n Error::SerdeJson(error)\n }\n}\n\nimpl From<serde_urlencoded::ser::Error> for Error {\n fn from(error: serde_urlencoded::ser::Error) -> Self {\n Error::SerdeUrlEncoded(error)\n }\n}\n\n\/\/\/ Metadata about an API endpoint.\n#[derive(Clone, Debug)]\npub struct Metadata {\n \/\/\/ A human-readable description of the endpoint.\n pub description: &'static str,\n \/\/\/ The HTTP method used by this endpoint.\n pub method: Method,\n \/\/\/ A unique identifier for this endpoint.\n pub name: &'static str,\n \/\/\/ The path of this endpoint's URL, with variable names where path parameters should be filled\n \/\/\/ in during a request.\n pub path: &'static str,\n \/\/\/ Whether or not this endpoint is rate limited by the server.\n pub rate_limited: bool,\n \/\/\/ Whether or not the server requires an authenticated user for this endpoint.\n pub requires_authentication: bool,\n}\n\n#[cfg(test)]\nmod tests {\n \/\/\/ PUT \/_matrix\/client\/r0\/directory\/room\/:room_alias\n pub mod create {\n use std::convert::TryFrom;\n\n use futures::future::{err, ok, FutureFrom, FutureResult};\n use http::method::Method;\n use http::{Request as HttpRequest, Response as HttpResponse};\n use ruma_identifiers::{RoomAliasId, RoomId};\n use serde_json;\n\n use super::super::{Endpoint as ApiEndpoint, Error, Metadata};\n\n #[derive(Debug)]\n pub struct Endpoint;\n\n impl ApiEndpoint<Vec<u8>, Vec<u8>> for Endpoint {\n type Request = Request;\n type Response = Response;\n\n const METADATA: Metadata = Metadata {\n description: \"Add an alias to a room.\",\n method: Method::PUT,\n name: \"create_alias\",\n path: \"\/_matrix\/client\/r0\/directory\/room\/:room_alias\",\n rate_limited: false,\n requires_authentication: true,\n };\n }\n\n \/\/\/ A request to create a new room alias.\n #[derive(Debug)]\n pub struct Request {\n pub room_id: RoomId, \/\/ body\n pub room_alias: RoomAliasId, \/\/ path\n }\n\n #[derive(Debug, Serialize)]\n struct RequestBody {\n room_id: RoomId,\n }\n\n impl TryFrom<Request> for HttpRequest<Vec<u8>> {\n type Error = Error;\n\n fn try_from(request: Request) -> Result<HttpRequest<Vec<u8>>, Self::Error> {\n let metadata = Endpoint::METADATA;\n\n let path = metadata\n .path\n .to_string()\n .replace(\":room_alias\", &request.room_alias.to_string());\n\n let request_body = RequestBody {\n room_id: request.room_id,\n };\n\n let http_request = HttpRequest::builder()\n .method(metadata.method)\n .uri(path.as_ref())\n .body(serde_json::to_vec(&request_body).map_err(Error::from)?)?;\n\n Ok(http_request)\n }\n }\n\n \/\/\/ The response to a request to create a new room alias.\n pub struct Response;\n\n impl FutureFrom<HttpResponse<Vec<u8>>> for Response {\n type Future = FutureResult<Self, Self::Error>;\n type Error = Error;\n\n fn future_from(\n http_response: HttpResponse<Vec<u8>>,\n ) -> FutureResult<Self, Self::Error> {\n if http_response.status().is_success() {\n ok(Response)\n } else {\n err(Error::StatusCode(http_response.status().clone()))\n }\n }\n }\n }\n}\n<commit_msg>Use `Hyper::Body` as default for `Endpoint`<commit_after>\/\/! Crate ruma_api contains core types used to define the requests and responses for each endpoint\n\/\/! in the various [Matrix](https:\/\/matrix.org) API specifications.\n\/\/! These types can be shared by client and server code for all Matrix APIs.\n\/\/!\n\/\/! When implementing a new Matrix API, each endpoint has a type that implements `Endpoint`, plus\n\/\/! the necessary associated types.\n\/\/! An implementation of `Endpoint` contains all the information about the HTTP method, the path and\n\/\/! input parameters for requests, and the structure of a successful response.\n\/\/! Such types can then be used by client code to make requests, and by server code to fulfill\n\/\/! those requests.\n#![deny(missing_debug_implementations)]\n#![deny(missing_docs)]\n#![feature(try_from)]\n\nextern crate futures;\nextern crate http;\nextern crate hyper;\n#[cfg(test)]\nextern crate ruma_identifiers;\n#[cfg(test)]\n#[macro_use]\nextern crate serde_derive;\nextern crate serde_json;\nextern crate serde_urlencoded;\n\nuse std::convert::TryInto;\nuse std::io;\n\nuse futures::future::FutureFrom;\nuse http::{Method, Request, Response, StatusCode};\nuse hyper::Body;\n\n\/\/\/ A Matrix API endpoint.\npub trait Endpoint<T = Body, U = Body> {\n \/\/\/ Data needed to make a request to the endpoint.\n type Request: TryInto<Request<T>, Error = Error>;\n \/\/\/ Data returned from the endpoint.\n type Response: FutureFrom<Response<U>, Error = Error>;\n\n \/\/\/ Metadata about the endpoint.\n const METADATA: Metadata;\n}\n\n\/\/\/ An error when converting an `Endpoint::Request` to a `http::Request` or a `http::Response` to\n\/\/\/ an `Endpoint::Response`.\n#[derive(Debug)]\npub enum Error {\n \/\/\/ An HTTP error.\n Http(http::Error),\n \/\/\/ An Hyper error.\n Hyper(hyper::Error),\n \/\/\/ A I\/O error.\n Io(io::Error),\n \/\/\/ A Serde JSON error.\n SerdeJson(serde_json::Error),\n \/\/\/ A Serde URL encoding error.\n SerdeUrlEncoded(serde_urlencoded::ser::Error),\n \/\/\/ An HTTP status code indicating error.\n StatusCode(StatusCode),\n \/\/\/ Standard hack to prevent exhaustive matching.\n \/\/\/ This will be replaced by the #[non_exhaustive] feature when available.\n #[doc(hidden)]\n __Nonexhaustive,\n}\n\nimpl From<http::Error> for Error {\n fn from(error: http::Error) -> Self {\n Error::Http(error)\n }\n}\n\nimpl From<hyper::Error> for Error {\n fn from(error: hyper::Error) -> Self {\n Error::Hyper(error)\n }\n}\n\nimpl From<io::Error> for Error {\n fn from(error: io::Error) -> Self {\n Error::Io(error)\n }\n}\n\nimpl From<serde_json::Error> for Error {\n fn from(error: serde_json::Error) -> Self {\n Error::SerdeJson(error)\n }\n}\n\nimpl From<serde_urlencoded::ser::Error> for Error {\n fn from(error: serde_urlencoded::ser::Error) -> Self {\n Error::SerdeUrlEncoded(error)\n }\n}\n\n\/\/\/ Metadata about an API endpoint.\n#[derive(Clone, Debug)]\npub struct Metadata {\n \/\/\/ A human-readable description of the endpoint.\n pub description: &'static str,\n \/\/\/ The HTTP method used by this endpoint.\n pub method: Method,\n \/\/\/ A unique identifier for this endpoint.\n pub name: &'static str,\n \/\/\/ The path of this endpoint's URL, with variable names where path parameters should be filled\n \/\/\/ in during a request.\n pub path: &'static str,\n \/\/\/ Whether or not this endpoint is rate limited by the server.\n pub rate_limited: bool,\n \/\/\/ Whether or not the server requires an authenticated user for this endpoint.\n pub requires_authentication: bool,\n}\n\n#[cfg(test)]\nmod tests {\n \/\/\/ PUT \/_matrix\/client\/r0\/directory\/room\/:room_alias\n pub mod create {\n use std::convert::TryFrom;\n\n use futures::future::{err, ok, FutureFrom, FutureResult};\n use http::method::Method;\n use http::{Request as HttpRequest, Response as HttpResponse};\n use ruma_identifiers::{RoomAliasId, RoomId};\n use serde_json;\n\n use super::super::{Endpoint as ApiEndpoint, Error, Metadata};\n\n #[derive(Debug)]\n pub struct Endpoint;\n\n impl ApiEndpoint<Vec<u8>, Vec<u8>> for Endpoint {\n type Request = Request;\n type Response = Response;\n\n const METADATA: Metadata = Metadata {\n description: \"Add an alias to a room.\",\n method: Method::PUT,\n name: \"create_alias\",\n path: \"\/_matrix\/client\/r0\/directory\/room\/:room_alias\",\n rate_limited: false,\n requires_authentication: true,\n };\n }\n\n \/\/\/ A request to create a new room alias.\n #[derive(Debug)]\n pub struct Request {\n pub room_id: RoomId, \/\/ body\n pub room_alias: RoomAliasId, \/\/ path\n }\n\n #[derive(Debug, Serialize)]\n struct RequestBody {\n room_id: RoomId,\n }\n\n impl TryFrom<Request> for HttpRequest<Vec<u8>> {\n type Error = Error;\n\n fn try_from(request: Request) -> Result<HttpRequest<Vec<u8>>, Self::Error> {\n let metadata = Endpoint::METADATA;\n\n let path = metadata\n .path\n .to_string()\n .replace(\":room_alias\", &request.room_alias.to_string());\n\n let request_body = RequestBody {\n room_id: request.room_id,\n };\n\n let http_request = HttpRequest::builder()\n .method(metadata.method)\n .uri(path.as_ref())\n .body(serde_json::to_vec(&request_body).map_err(Error::from)?)?;\n\n Ok(http_request)\n }\n }\n\n \/\/\/ The response to a request to create a new room alias.\n pub struct Response;\n\n impl FutureFrom<HttpResponse<Vec<u8>>> for Response {\n type Future = FutureResult<Self, Self::Error>;\n type Error = Error;\n\n fn future_from(\n http_response: HttpResponse<Vec<u8>>,\n ) -> FutureResult<Self, Self::Error> {\n if http_response.status().is_success() {\n ok(Response)\n } else {\n err(Error::StatusCode(http_response.status().clone()))\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added first extern function.<commit_after>extern crate libc;\nuse std::str;\nuse std::ptr;\nuse std::ffi::CStr;\n\nstruct Loc;\nstruct Errno;\n\n#[link(name = \"spsps\")]\nextern{\n\t\/\/This returns and owned C string.\n\tfn spsps_loc_to_string(ptr: *mut Loc) -> *mut libc::c_char;\n}\n\nfn loc_to_string(loc: &mut Loc) -> String{\n\tlet tmp = unsafe { CStr::from_ptr(spsps_loc_to_string(loc)) };\n\tlet retval = str::from_utf8(tmp.to_bytes()).unwrap_or(\"\").to_owned();\n\tunsafe {libc::free(tmp.as_ptr() as *mut libc::c_void);}\n\tretval\n }<|endoftext|>"} {"text":"<commit_before><commit_msg>export cert<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>lib: add xtensis lib<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Disable functor and monad for now<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Reform API structure.<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate zip;\nextern crate quick_xml;\nextern crate encoding;\nextern crate byteorder;\n#[macro_use]\nextern crate error_chain;\n\n#[macro_use]\nextern crate log;\n\nmod errors;\npub mod vba;\n\nuse std::path::Path;\nuse std::fs::File;\nuse std::io::BufReader;\nuse std::collections::HashMap;\nuse std::slice::Chunks;\n\npub use errors::*;\nuse vba::VbaProject;\n\nuse zip::read::{ZipFile, ZipArchive};\nuse zip::result::ZipError;\nuse quick_xml::{XmlReader, Event, AsStr};\n\nmacro_rules! unexp {\n ($pat: expr) => {\n {\n return Err($pat.into());\n }\n };\n ($pat: expr, $($args: expr)* ) => {\n {\n return Err(format!($pat, $($args)*).into());\n }\n };\n}\n\n#[derive(Debug, Clone)]\npub enum DataType {\n Int(i64),\n Float(f64),\n String(String),\n Empty,\n}\n\nenum FileType {\n \/\/\/ Compound File Binary Format [MS-CFB]\n CFB(File),\n Zip(ZipArchive<File>),\n}\n\npub struct Excel {\n zip: FileType,\n strings: Vec<String>,\n \/\/\/ Map of sheet names\/sheet path within zip archive\n sheets: HashMap<String, String>,\n}\n\n#[derive(Debug, Default)]\npub struct Range {\n position: (u32, u32),\n size: (usize, usize),\n inner: Vec<DataType>,\n}\n\n\/\/\/ An iterator to read `Range` struct row by row\npub struct Rows<'a> {\n inner: Chunks<'a, DataType>,\n}\n\nimpl Excel {\n\n \/\/\/ Opens a new workbook\n pub fn open<P: AsRef<Path>>(path: P) -> Result<Excel> {\n let f = try!(File::open(&path));\n let zip = match path.as_ref().extension().and_then(|s| s.to_str()) {\n Some(\"xls\") | Some(\"xla\") => FileType::CFB(f),\n Some(\"xlsb\") | Some(\"xlsm\") | Some(\"xlam\") => FileType::Zip(try!(ZipArchive::new(f))),\n Some(e) => return Err(format!(\"unrecognized extension: {:?}\", e).into()),\n None => return Err(\"expecting a file with an extension\".into()),\n };\n Ok(Excel { zip: zip, strings: vec![], sheets: HashMap::new() })\n }\n\n \/\/\/ Does the workbook contain a vba project\n pub fn has_vba(&mut self) -> bool {\n match self.zip {\n FileType::CFB(_) => true,\n FileType::Zip(ref mut z) => z.by_name(\"xl\/vbaProject.bin\").is_ok()\n }\n }\n\n \/\/\/ Gets vba project\n pub fn vba_project(&mut self) -> Result<VbaProject> {\n match self.zip {\n FileType::CFB(ref mut f) => {\n let len = try!(f.metadata()).len() as usize;\n VbaProject::new(f, len)\n },\n FileType::Zip(ref mut z) => {\n let f = try!(z.by_name(\"xl\/vbaProject.bin\"));\n let len = f.size() as usize;\n VbaProject::new(f, len)\n }\n }\n }\n\n \/\/\/ Get all data from `Worksheet`\n pub fn worksheet_range(&mut self, name: &str) -> Result<Range> {\n try!(self.read_shared_strings());\n try!(self.read_sheets_names());\n let strings = &self.strings;\n let z = match self.zip {\n FileType::CFB(_) => return Err(\"worksheet_range not implemented for CFB files\".into()),\n FileType::Zip(ref mut z) => z\n };\n let ws = match self.sheets.get(name) {\n Some(p) => try!(z.by_name(p)),\n None => unexp!(\"Sheet '{}' does not exist\", name),\n };\n Range::from_worksheet(ws, strings)\n }\n\n \/\/\/ Loop through all archive files and opens 'xl\/worksheets' files\n \/\/\/ Store sheet name and path into self.sheets\n fn read_sheets_names(&mut self) -> Result<()> {\n if self.sheets.is_empty() {\n let sheets = {\n let mut sheets = HashMap::new();\n let z = match self.zip {\n FileType::CFB(_) => return Err(\"read_sheet_names not implemented for CFB files\".into()),\n FileType::Zip(ref mut z) => z\n };\n for i in 0..z.len() {\n let f = try!(z.by_index(i));\n let name = f.name().to_string();\n if name.starts_with(\"xl\/worksheets\/\") {\n let xml = XmlReader::from_reader(BufReader::new(f))\n .with_check(false)\n .trim_text(false);\n 'xml_loop: for res_event in xml {\n if let Ok(Event::Start(ref e)) = res_event {\n if e.name() == b\"sheetPr\" {\n for a in e.attributes() {\n if let Ok((b\"codeName\", v)) = a {\n sheets.insert(try!(v.as_str()).to_string(), name);\n break 'xml_loop;\n }\n }\n }\n }\n }\n }\n }\n sheets\n };\n self.sheets = sheets;\n }\n Ok(())\n }\n\n \/\/\/ Read shared string list\n fn read_shared_strings(&mut self) -> Result<()> {\n if self.strings.is_empty() {\n let z = match self.zip {\n FileType::CFB(_) => return Err(\"read_shared_strings not implemented for CFB files\".into()),\n FileType::Zip(ref mut z) => z\n };\n match z.by_name(\"xl\/sharedStrings.xml\") {\n Ok(f) => {\n let mut xml = XmlReader::from_reader(BufReader::new(f))\n .with_check(false)\n .trim_text(false);\n\n let mut strings = Vec::new();\n while let Some(res_event) = xml.next() {\n match res_event {\n Ok(Event::Start(ref e)) if e.name() == b\"t\" => {\n strings.push(try!(xml.read_text(b\"t\")));\n }\n Err(e) => return Err(e.into()),\n _ => (),\n }\n }\n self.strings = strings;\n },\n Err(ZipError::FileNotFound) => (),\n Err(e) => return Err(e.into()),\n }\n }\n\n Ok(())\n }\n\n}\n\nimpl Range {\n\n \/\/\/ open a xml `ZipFile` reader and read content of *sheetData* and *dimension* node\n fn from_worksheet(xml: ZipFile, strings: &[String]) -> Result<Range> {\n let mut xml = XmlReader::from_reader(BufReader::new(xml))\n .with_check(false)\n .trim_text(false);\n let mut data = Range::default();\n while let Some(res_event) = xml.next() {\n match res_event {\n Err(e) => return Err(e.into()),\n Ok(Event::Start(ref e)) => {\n match e.name() {\n b\"dimension\" => match e.attributes().filter_map(|a| a.ok())\n .find(|&(key, _)| key == b\"ref\") {\n Some((_, dim)) => {\n let (position, size) = try!(get_dimension(try!(dim.as_str())));\n data.position = position;\n data.size = (size.0 as usize, size.1 as usize);\n data.inner.reserve_exact(data.size.0 * data.size.1);\n },\n None => unexp!(\"Expecting dimension, got {:?}\", e),\n },\n b\"sheetData\" => {\n let _ = try!(data.read_sheet_data(&mut xml, strings));\n }\n _ => (),\n }\n },\n _ => (),\n }\n }\n data.inner.shrink_to_fit();\n Ok(data)\n }\n \n \/\/\/ get worksheet position (row, column)\n pub fn get_position(&self) -> (u32, u32) {\n self.position\n }\n\n \/\/\/ get size\n pub fn get_size(&self) -> (usize, usize) {\n self.size\n }\n\n \/\/\/ get cell value\n pub fn get_value(&self, i: usize, j: usize) -> &DataType {\n let idx = i * self.size.0 + j;\n &self.inner[idx]\n }\n\n \/\/\/ get an iterator over inner rows\n pub fn rows(&self) -> Rows {\n let width = self.size.0;\n Rows { inner: self.inner.chunks(width) }\n }\n\n \/\/\/ read sheetData node\n fn read_sheet_data(&mut self, xml: &mut XmlReader<BufReader<ZipFile>>, strings: &[String]) \n -> Result<()> \n {\n while let Some(res_event) = xml.next() {\n match res_event {\n Err(e) => return Err(e.into()),\n Ok(Event::Start(ref c_element)) => {\n if c_element.name() == b\"c\" {\n loop {\n match xml.next() {\n Some(Err(e)) => return Err(e.into()),\n Some(Ok(Event::Start(ref e))) => {\n if e.name() == b\"v\" {\n let v = try!(xml.read_text(b\"v\"));\n let value = match c_element.attributes()\n .filter_map(|a| a.ok())\n .find(|&(k, _)| k == b\"t\") {\n Some((_, b\"s\")) => {\n let idx: usize = try!(v.parse());\n DataType::String(strings[idx].clone())\n },\n \/\/ TODO: check in styles to know which type is\n \/\/ supposed to be used\n _ => match v.parse() {\n Ok(i) => DataType::Int(i),\n Err(_) => try!(v.parse()\n .map(DataType::Float)),\n },\n };\n self.inner.push(value);\n break;\n } else {\n unexp!(\"not v node\");\n }\n },\n Some(Ok(Event::End(ref e))) => {\n if e.name() == b\"c\" {\n self.inner.push(DataType::Empty);\n break;\n }\n }\n None => unexp!(\"End of xml\"),\n _ => (),\n }\n }\n }\n },\n Ok(Event::End(ref e)) if e.name() == b\"sheetData\" => return Ok(()),\n _ => (),\n }\n }\n unexp!(\"Could not find <\/sheetData>\")\n }\n\n}\n\nimpl<'a> Iterator for Rows<'a> {\n type Item = &'a [DataType];\n fn next(&mut self) -> Option<&'a [DataType]> {\n self.inner.next()\n }\n}\n\n\/\/\/ converts a text representation (e.g. \"A6:G67\") of a dimension into integers\n\/\/\/ - top left (row, column), \n\/\/\/ - size (width, height)\nfn get_dimension(dimension: &str) -> Result<((u32, u32), (u32, u32))> {\n match dimension.chars().position(|c| c == ':') {\n None => {\n get_row_column(dimension).map(|position| (position, (1, 1)))\n }, \n Some(p) => {\n let top_left = try!(get_row_column(&dimension[..p]));\n let bottom_right = try!(get_row_column(&dimension[p + 1..]));\n Ok((top_left, (bottom_right.0 - top_left.0 + 1, bottom_right.1 - top_left.1 + 1)))\n }\n }\n}\n\n\/\/\/ converts a text range name into its position (row, column)\nfn get_row_column(range: &str) -> Result<(u32, u32)> {\n let mut col = 0;\n let mut pow = 1;\n let mut rowpos = range.len();\n let mut readrow = true;\n for c in range.chars().rev() {\n match c {\n '0'...'9' => {\n if readrow {\n rowpos -= 1;\n } else {\n unexp!(\"Numeric character are only allowed at the end of the range: {}\", c);\n }\n }\n c @ 'A'...'Z' => {\n readrow = false;\n col += ((c as u8 - b'A') as u32 + 1) * pow;\n pow *= 26;\n },\n c @ 'a'...'z' => {\n readrow = false;\n col += ((c as u8 - b'a') as u32 + 1) * pow;\n pow *= 26;\n },\n _ => unexp!(\"Expecting alphanumeric character, got {:?}\", c),\n }\n }\n let row = try!(range[rowpos..].parse());\n Ok((row, col))\n}\n\n#[cfg(test)]\nmod tests {\n\n extern crate env_logger;\n\n use super::Excel;\n use std::fs::File;\n use super::vba::VbaProject;\n\n #[test]\n fn test_range_sample() {\n let mut xl = Excel::open(\"\/home\/jtuffe\/download\/DailyValo_FX_Rates_Credit_05 25 16.xlsm\")\n .expect(\"cannot open excel file\");\n println!(\"{:?}\", xl.sheets);\n let data = xl.worksheet_range(\"Sheet1\");\n assert!(data.is_ok());\n for (i, r) in data.unwrap().rows().enumerate() {\n println!(\"Row {}: {:?}\", i, r);\n }\n }\n \n #[test]\n fn test_vba() {\n\n env_logger::init().unwrap();\n\n\/\/ let path = \"\/home\/jtuffe\/download\/test_vba.xlsm\";\n let path = \"\/home\/jtuffe\/download\/Extractions Simples.xlsb\";\n let path = \"\/home\/jtuffe\/download\/test_xl\/ReportRDM_CVA VF_v3.xlsm\";\n let path = \"\/home\/jtuffe\/download\/KelvinsAutoEmailer.xls\";\n let f = File::open(path).unwrap();\n let len = f.metadata().unwrap().len() as usize;\n let vba_project = VbaProject::new(f, len).unwrap();\n let vba = vba_project.read_vba();\n let (references, modules) = vba.unwrap();\n println!(\"references: {:#?}\", references);\n for module in &modules {\n let data = vba_project.read_module(module).unwrap();\n println!(\"module {}:\\r\\n{}\", module.name, data);\n }\n\n }\n}\n<commit_msg>do not discard xlsx<commit_after>extern crate zip;\nextern crate quick_xml;\nextern crate encoding;\nextern crate byteorder;\n#[macro_use]\nextern crate error_chain;\n\n#[macro_use]\nextern crate log;\n\nmod errors;\npub mod vba;\n\nuse std::path::Path;\nuse std::fs::File;\nuse std::io::BufReader;\nuse std::collections::HashMap;\nuse std::slice::Chunks;\n\npub use errors::*;\nuse vba::VbaProject;\n\nuse zip::read::{ZipFile, ZipArchive};\nuse zip::result::ZipError;\nuse quick_xml::{XmlReader, Event, AsStr};\n\nmacro_rules! unexp {\n ($pat: expr) => {\n {\n return Err($pat.into());\n }\n };\n ($pat: expr, $($args: expr)* ) => {\n {\n return Err(format!($pat, $($args)*).into());\n }\n };\n}\n\n#[derive(Debug, Clone)]\npub enum DataType {\n Int(i64),\n Float(f64),\n String(String),\n Empty,\n}\n\nenum FileType {\n \/\/\/ Compound File Binary Format [MS-CFB]\n CFB(File),\n Zip(ZipArchive<File>),\n}\n\npub struct Excel {\n zip: FileType,\n strings: Vec<String>,\n \/\/\/ Map of sheet names\/sheet path within zip archive\n sheets: HashMap<String, String>,\n}\n\n#[derive(Debug, Default)]\npub struct Range {\n position: (u32, u32),\n size: (usize, usize),\n inner: Vec<DataType>,\n}\n\n\/\/\/ An iterator to read `Range` struct row by row\npub struct Rows<'a> {\n inner: Chunks<'a, DataType>,\n}\n\nimpl Excel {\n\n \/\/\/ Opens a new workbook\n pub fn open<P: AsRef<Path>>(path: P) -> Result<Excel> {\n let f = try!(File::open(&path));\n let zip = match path.as_ref().extension().and_then(|s| s.to_str()) {\n Some(\"xls\") | Some(\"xla\") => FileType::CFB(f),\n Some(\"xlsx\") | Some(\"xlsb\") | Some(\"xlsm\") | \n Some(\"xlam\") => FileType::Zip(try!(ZipArchive::new(f))),\n Some(e) => return Err(format!(\"unrecognized extension: {:?}\", e).into()),\n None => return Err(\"expecting a file with an extension\".into()),\n };\n Ok(Excel { zip: zip, strings: vec![], sheets: HashMap::new() })\n }\n\n \/\/\/ Does the workbook contain a vba project\n pub fn has_vba(&mut self) -> bool {\n match self.zip {\n FileType::CFB(_) => true,\n FileType::Zip(ref mut z) => z.by_name(\"xl\/vbaProject.bin\").is_ok()\n }\n }\n\n \/\/\/ Gets vba project\n pub fn vba_project(&mut self) -> Result<VbaProject> {\n match self.zip {\n FileType::CFB(ref mut f) => {\n let len = try!(f.metadata()).len() as usize;\n VbaProject::new(f, len)\n },\n FileType::Zip(ref mut z) => {\n let f = try!(z.by_name(\"xl\/vbaProject.bin\"));\n let len = f.size() as usize;\n VbaProject::new(f, len)\n }\n }\n }\n\n \/\/\/ Get all data from `Worksheet`\n pub fn worksheet_range(&mut self, name: &str) -> Result<Range> {\n try!(self.read_shared_strings());\n try!(self.read_sheets_names());\n let strings = &self.strings;\n let z = match self.zip {\n FileType::CFB(_) => return Err(\"worksheet_range not implemented for CFB files\".into()),\n FileType::Zip(ref mut z) => z\n };\n let ws = match self.sheets.get(name) {\n Some(p) => try!(z.by_name(p)),\n None => unexp!(\"Sheet '{}' does not exist\", name),\n };\n Range::from_worksheet(ws, strings)\n }\n\n \/\/\/ Loop through all archive files and opens 'xl\/worksheets' files\n \/\/\/ Store sheet name and path into self.sheets\n fn read_sheets_names(&mut self) -> Result<()> {\n if self.sheets.is_empty() {\n let sheets = {\n let mut sheets = HashMap::new();\n let z = match self.zip {\n FileType::CFB(_) => return Err(\"read_sheet_names not implemented for CFB files\".into()),\n FileType::Zip(ref mut z) => z\n };\n for i in 0..z.len() {\n let f = try!(z.by_index(i));\n let name = f.name().to_string();\n if name.starts_with(\"xl\/worksheets\/\") {\n let xml = XmlReader::from_reader(BufReader::new(f))\n .with_check(false)\n .trim_text(false);\n 'xml_loop: for res_event in xml {\n if let Ok(Event::Start(ref e)) = res_event {\n if e.name() == b\"sheetPr\" {\n for a in e.attributes() {\n if let Ok((b\"codeName\", v)) = a {\n sheets.insert(try!(v.as_str()).to_string(), name);\n break 'xml_loop;\n }\n }\n }\n }\n }\n }\n }\n sheets\n };\n self.sheets = sheets;\n }\n Ok(())\n }\n\n \/\/\/ Read shared string list\n fn read_shared_strings(&mut self) -> Result<()> {\n if self.strings.is_empty() {\n let z = match self.zip {\n FileType::CFB(_) => return Err(\"read_shared_strings not implemented for CFB files\".into()),\n FileType::Zip(ref mut z) => z\n };\n match z.by_name(\"xl\/sharedStrings.xml\") {\n Ok(f) => {\n let mut xml = XmlReader::from_reader(BufReader::new(f))\n .with_check(false)\n .trim_text(false);\n\n let mut strings = Vec::new();\n while let Some(res_event) = xml.next() {\n match res_event {\n Ok(Event::Start(ref e)) if e.name() == b\"t\" => {\n strings.push(try!(xml.read_text(b\"t\")));\n }\n Err(e) => return Err(e.into()),\n _ => (),\n }\n }\n self.strings = strings;\n },\n Err(ZipError::FileNotFound) => (),\n Err(e) => return Err(e.into()),\n }\n }\n\n Ok(())\n }\n\n}\n\nimpl Range {\n\n \/\/\/ open a xml `ZipFile` reader and read content of *sheetData* and *dimension* node\n fn from_worksheet(xml: ZipFile, strings: &[String]) -> Result<Range> {\n let mut xml = XmlReader::from_reader(BufReader::new(xml))\n .with_check(false)\n .trim_text(false);\n let mut data = Range::default();\n while let Some(res_event) = xml.next() {\n match res_event {\n Err(e) => return Err(e.into()),\n Ok(Event::Start(ref e)) => {\n match e.name() {\n b\"dimension\" => match e.attributes().filter_map(|a| a.ok())\n .find(|&(key, _)| key == b\"ref\") {\n Some((_, dim)) => {\n let (position, size) = try!(get_dimension(try!(dim.as_str())));\n data.position = position;\n data.size = (size.0 as usize, size.1 as usize);\n data.inner.reserve_exact(data.size.0 * data.size.1);\n },\n None => unexp!(\"Expecting dimension, got {:?}\", e),\n },\n b\"sheetData\" => {\n let _ = try!(data.read_sheet_data(&mut xml, strings));\n }\n _ => (),\n }\n },\n _ => (),\n }\n }\n data.inner.shrink_to_fit();\n Ok(data)\n }\n \n \/\/\/ get worksheet position (row, column)\n pub fn get_position(&self) -> (u32, u32) {\n self.position\n }\n\n \/\/\/ get size\n pub fn get_size(&self) -> (usize, usize) {\n self.size\n }\n\n \/\/\/ get cell value\n pub fn get_value(&self, i: usize, j: usize) -> &DataType {\n let idx = i * self.size.0 + j;\n &self.inner[idx]\n }\n\n \/\/\/ get an iterator over inner rows\n pub fn rows(&self) -> Rows {\n let width = self.size.0;\n Rows { inner: self.inner.chunks(width) }\n }\n\n \/\/\/ read sheetData node\n fn read_sheet_data(&mut self, xml: &mut XmlReader<BufReader<ZipFile>>, strings: &[String]) \n -> Result<()> \n {\n while let Some(res_event) = xml.next() {\n match res_event {\n Err(e) => return Err(e.into()),\n Ok(Event::Start(ref c_element)) => {\n if c_element.name() == b\"c\" {\n loop {\n match xml.next() {\n Some(Err(e)) => return Err(e.into()),\n Some(Ok(Event::Start(ref e))) => {\n if e.name() == b\"v\" {\n let v = try!(xml.read_text(b\"v\"));\n let value = match c_element.attributes()\n .filter_map(|a| a.ok())\n .find(|&(k, _)| k == b\"t\") {\n Some((_, b\"s\")) => {\n let idx: usize = try!(v.parse());\n DataType::String(strings[idx].clone())\n },\n \/\/ TODO: check in styles to know which type is\n \/\/ supposed to be used\n _ => match v.parse() {\n Ok(i) => DataType::Int(i),\n Err(_) => try!(v.parse()\n .map(DataType::Float)),\n },\n };\n self.inner.push(value);\n break;\n } else {\n unexp!(\"not v node\");\n }\n },\n Some(Ok(Event::End(ref e))) => {\n if e.name() == b\"c\" {\n self.inner.push(DataType::Empty);\n break;\n }\n }\n None => unexp!(\"End of xml\"),\n _ => (),\n }\n }\n }\n },\n Ok(Event::End(ref e)) if e.name() == b\"sheetData\" => return Ok(()),\n _ => (),\n }\n }\n unexp!(\"Could not find <\/sheetData>\")\n }\n\n}\n\nimpl<'a> Iterator for Rows<'a> {\n type Item = &'a [DataType];\n fn next(&mut self) -> Option<&'a [DataType]> {\n self.inner.next()\n }\n}\n\n\/\/\/ converts a text representation (e.g. \"A6:G67\") of a dimension into integers\n\/\/\/ - top left (row, column), \n\/\/\/ - size (width, height)\nfn get_dimension(dimension: &str) -> Result<((u32, u32), (u32, u32))> {\n match dimension.chars().position(|c| c == ':') {\n None => {\n get_row_column(dimension).map(|position| (position, (1, 1)))\n }, \n Some(p) => {\n let top_left = try!(get_row_column(&dimension[..p]));\n let bottom_right = try!(get_row_column(&dimension[p + 1..]));\n Ok((top_left, (bottom_right.0 - top_left.0 + 1, bottom_right.1 - top_left.1 + 1)))\n }\n }\n}\n\n\/\/\/ converts a text range name into its position (row, column)\nfn get_row_column(range: &str) -> Result<(u32, u32)> {\n let mut col = 0;\n let mut pow = 1;\n let mut rowpos = range.len();\n let mut readrow = true;\n for c in range.chars().rev() {\n match c {\n '0'...'9' => {\n if readrow {\n rowpos -= 1;\n } else {\n unexp!(\"Numeric character are only allowed at the end of the range: {}\", c);\n }\n }\n c @ 'A'...'Z' => {\n readrow = false;\n col += ((c as u8 - b'A') as u32 + 1) * pow;\n pow *= 26;\n },\n c @ 'a'...'z' => {\n readrow = false;\n col += ((c as u8 - b'a') as u32 + 1) * pow;\n pow *= 26;\n },\n _ => unexp!(\"Expecting alphanumeric character, got {:?}\", c),\n }\n }\n let row = try!(range[rowpos..].parse());\n Ok((row, col))\n}\n\n#[cfg(test)]\nmod tests {\n\n extern crate env_logger;\n\n use super::Excel;\n use std::fs::File;\n use super::vba::VbaProject;\n\n #[test]\n fn test_range_sample() {\n let mut xl = Excel::open(\"\/home\/jtuffe\/download\/DailyValo_FX_Rates_Credit_05 25 16.xlsm\")\n .expect(\"cannot open excel file\");\n println!(\"{:?}\", xl.sheets);\n let data = xl.worksheet_range(\"Sheet1\");\n assert!(data.is_ok());\n for (i, r) in data.unwrap().rows().enumerate() {\n println!(\"Row {}: {:?}\", i, r);\n }\n }\n \n #[test]\n fn test_vba() {\n\n env_logger::init().unwrap();\n\n\/\/ let path = \"\/home\/jtuffe\/download\/test_vba.xlsm\";\n let path = \"\/home\/jtuffe\/download\/Extractions Simples.xlsb\";\n let path = \"\/home\/jtuffe\/download\/test_xl\/ReportRDM_CVA VF_v3.xlsm\";\n let path = \"\/home\/jtuffe\/download\/KelvinsAutoEmailer.xls\";\n let f = File::open(path).unwrap();\n let len = f.metadata().unwrap().len() as usize;\n let vba_project = VbaProject::new(f, len).unwrap();\n let vba = vba_project.read_vba();\n let (references, modules) = vba.unwrap();\n println!(\"references: {:#?}\", references);\n for module in &modules {\n let data = vba_project.read_module(module).unwrap();\n println!(\"module {}:\\r\\n{}\", module.name, data);\n }\n\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>impl Hash for Vec<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Change a comment to a doc comment.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Redox support<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Stylistic nit<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>start new<commit_after>use std::ops::Add;\n\n#[derive(Debug)]\nstruct Year {\n num: i32,\n leap: bool,\n}\n\nimpl Year {\n fn new(y: i32) -> Self {\n Year {\n num: y,\n leap: Self::judge_leap(y),\n }\n }\n\n fn judge_leap(y: i32) -> bool {\n if (y % 4) != 0 {\n return false;\n } else if (y % 100) != 0 {\n return true;\n } else if (y % 400) != 0 {\n return false;\n } else {\n true\n }\n }\n}\n\n#[derive(Debug)]\nstruct Month(u8);\n\nimpl Month {\n fn new(m: u8) -> Result<Self, String> {\n if m < 1 || m > 12 {\n return Err(String::from(\"Are you kidding me\"));\n }\n\n Ok(Month(m))\n }\n}\n\n#[derive(Debug)]\nstruct Days(i32);\n\nimpl Days {\n fn new(d:i32) -> \n}\n\n#[derive(Debug)]\nstruct Date {\n Y: Year,\n M: Month,\n D: Days,\n}\n\nimpl Date {\n fn new(y: i32, m: u8, d: i32) -> Result<Self, String> {\n Ok(Date {\n Y: Year::new(y),\n M: Month::new(m)?,\n D: Days::new(d),\n })\n }\n}\n\nimpl Add<u32> for Date {\n type Output = Self;\n fn add(self, other: u32) -> Self {\n Date::new()\n }\n}\n\nfn date_calculation(d: Date, days: i32) -> Date {\n Date::new()\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix minor typo\/bug<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rename Pure to Leaf and Roll to Nest<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>A compelling argument from outside in implementation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Read and write loops<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>mouse movement is still missing, no documentation from x11 so tough to call<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>No comment....<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Append a 'read' prefix to associated Archive functions that read from memory<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove old tests module<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>cargo build --release to optimize.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #211 - servo:url-username, r=SimonSapin<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Minor logic simplification on cookie matching.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Problem 06<commit_after>enum List<T> {\n Cons(T, ~List<T>),\n Nil\n}\n\nimpl<T: Eq> Eq for List<T> {\n fn eq(&self, ys: &List<T>) -> bool {\n match (self, ys) {\n (&Nil, &Nil) => true,\n (&Cons(ref x, ~ref rest_x), &Cons(ref y, ~ref rest_y)) if x == y =>\n rest_x == rest_y,\n _ => false\n }\n }\n}\n\nfn is_palindrome<T: Clone+Eq>(list: &List<T>) -> bool {\n fn rev<T: Clone>(list: &List<T>) -> List<T> {\n fn rev_aux<T: Clone>(list: &List<T>, acc: List<T>) -> List<T> {\n match *list {\n Nil => acc,\n Cons(ref elem, ~ref rest) => rev_aux(rest, Cons((*elem).clone(), ~acc))\n }\n }\n rev_aux(list, Nil)\n }\n list == &rev(list)\n}\n\nfn main() {\n let kayak: List<char> =\n Cons('k', ~Cons('a', ~Cons('y', ~Cons('a', ~Cons('k', ~Nil)))));\n let list: List<char> =\n Cons('l', ~Cons('i', ~Cons('s', ~Cons('t', ~Nil))));\n\n assert!(is_palindrome(&kayak));\n assert!(!is_palindrome(&list));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skipped tests do not get counted as failed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Build both dynamic and static libraries<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate orbclient;\nextern crate sinulation;\n\nuse super::start;\n\n#[cfg(target_os = \"redox\")] \/\/ if os is redox use these trig functions instead of the ones from standard lib\nuse sinulation::Trig;\n\npub struct Shader {\n pub id: u16,\n pub shader: Box<Fn(&Triangle, &mut start::Window, &Shader)>,\n pub image_data: orbclient::BmpFile,\n}\n\nimpl Shader {\n pub fn new(id: u16, shader: Box<Fn(&Triangle, &mut start::Window, &Shader)>) -> Shader {\n Shader {\n id: id,\n shader: shader,\n image_data: orbclient::BmpFile::default(),\n }\n }\n\n pub fn null() -> Shader {\n Shader {\n id: 0,\n shader: Box::new(|triangle: &Triangle, window: &mut start::Window, wrapper: &Shader| {}),\n image_data: orbclient::BmpFile::default(),\n }\n }\n\n \/*pub fn apply(self, triangle: &Triangle) {\n (self.shader)(triangle);\n }*\/\n}\n\n\/\/\/ Color struct. Stores colors in 8-bit RGB.\n#[derive(Debug, Clone, Copy)]\npub struct Color {\n pub r: u8,\n pub g: u8,\n pub b: u8,\n}\n\nimpl Color {\n pub fn new(r: u8, g: u8, b: u8) -> Color {\n Color {\n r: r,\n g: g,\n b: b,\n }\n }\n\n \/\/\/ Converts tetrahedrane colors to ones accepted by orbclient library.\n pub fn orb_color(&self) -> orbclient::color::Color {\n orbclient::color::Color::rgb(self.r, self.g, self.b)\n }\n}\n\n\/\/\/ 2D point. Coordinates are screen pixels.\n#[derive(Clone, Copy, Debug)]\npub struct FlatPoint {\n pub x: i32,\n pub y: i32,\n}\n\n\/\/\/ 3D point. Coordinates are floating point, similar to OpenGL coordinates. \n\/\/\/\n\/\/\/ 0.0 is center, -1.0 is left, 1.0 is right etc.\n\/\/\/\n#[derive(Clone, Copy, Debug)]\npub struct DepthPoint {\n pub x: f32,\n pub y: f32,\n pub z: f32,\n\n pub x_y: f32,\n pub x_z: f32,\n pub y_z: f32,\n\n last_x_y: f32,\n last_x_z: f32,\n last_y_z: f32,\n}\n\nimpl DepthPoint {\n \/\/\/ Creates a new 3D point.\n pub fn new(x: f32, y: f32, z: f32) -> DepthPoint {\n DepthPoint {\n x: x, \n y: y,\n z: z,\n\n x_y: 0.0,\n x_z: 0.0,\n y_z: 0.0,\n \n last_x_y: 0.0,\n last_x_z: 0.0,\n last_y_z: 0.0,\n }\n }\n\n \/\/\/ Converts into 2D point, with perspective.\n pub fn flat_point(&mut self, engine_scr_x: u32, engine_scr_y: u32, offset_x: f32, offset_y: f32, offset_z: f32) -> FlatPoint { \n if self.z > -0.01 && self.z < 0.0 {\n self.z = 0.001\n }\n\n else if self.z < 0.1 { \/\/ Prevents division by nearly 0, that cause integer overflow\/underflow\n self.z = 0.11;\n }\n\n FlatPoint {\n x: ((engine_scr_x as f32 * (self.x + offset_x) as f32\/(self.z + offset_z)) + engine_scr_x as f32 \/ 2.0) as i32, \n y: ((engine_scr_x as f32 * (self.y + offset_y) as f32\/(self.z + offset_z)) + engine_scr_y as f32 \/ 2.0) as i32,\n }\n }\n\n \/\/\/ Applies camera rotations from variables `x_y`, `x_z` and `y_z`\n pub fn apply_camera_rotations(&mut self, engine: &start::Window) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32::consts::PI;\n\n #[cfg(target_os = \"redox\")]\n const PI: f32 = 3.141592653589793;\n \n let x_y = self.x_y;\n let x_z = self.x_z;\n let y_z = self.y_z;\n\n let last_x_y = self.last_x_y;\n let last_x_z = self.last_x_z;\n let last_y_z = self.last_y_z;\n\n self.camera_rotate_x_y(&engine, x_y - last_x_y);\n self.camera_rotate_x_z(&engine, x_z - last_x_z);\n self.camera_rotate_y_z(&engine, y_z - last_y_z);\n\n self.last_x_y = x_y;\n self.last_x_z = x_z;\n self.last_y_z = y_z;\n\n \/\/normalize rotations\n if self.x_z > (PI * 2.0) {\n self.x_z -= (PI * 2.0);\n }\n\n if self.x_y > (PI * 2.0) {\n self.x_y -= (PI * 2.0);\n }\n\n if self.y_z > (PI * 2.0) {\n self.y_z -= (PI * 2.0);\n } \n }\n\n pub fn camera_rotate_x_y(&mut self, engine: &start::Window, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= engine.camera_x;\n self.y -= engine.camera_y;\n\n let new_x = self.x * c - self.y * s;\n let new_y = self.x * s + self.y * c;\n\n self.x = new_x + engine.camera_x;\n self.y = new_y + engine.camera_y;\n }\n \n pub fn camera_rotate_x_z(&mut self, engine: &start::Window, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= engine.camera_x;\n self.z -= engine.camera_z;\n\n let new_x = self.x * c - self.z * s;\n let new_z = self.x * s + self.z * c;\n\n self.x = new_x + engine.camera_x;\n self.z = new_z + engine.camera_z;\n }\n\n pub fn camera_rotate_y_z(&mut self, engine: &start::Window, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.y -= engine.camera_y;\n self.z -= engine.camera_z;\n\n let new_y = self.y * c - self.z * s;\n let new_z = self.y * s + self.z * c;\n\n self.y = new_y + engine.camera_y;\n self.z = new_z + engine.camera_z;\n } \n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= x;\n self.y -= y;\n\n let new_x = self.x * c - self.y * s;\n let new_y = self.x * s + self.y * c;\n\n self.x = new_x + x;\n self.y = new_y + y;\n }\n \n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= x;\n self.z -= z;\n\n let new_x = self.x * c - self.z * s;\n let new_z = self.x * s + self.z * c;\n\n self.x = new_x + x;\n self.z = new_z + z;\n }\n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.y -= y;\n self.z -= z;\n\n let new_y = self.y * c - self.z * s;\n let new_z = self.y * s + self.z * c;\n\n self.y = new_y + y;\n self.z = new_z + z;\n } \n}\n\n\/\/\/ Triangle. `p1`, `p2`, `p3` are it's vertexes.\n#[derive(Clone, Copy)]\npub struct Triangle {\n pub p1: DepthPoint,\n pub p2: DepthPoint,\n pub p3: DepthPoint,\n\n pub x: f32,\n pub y: f32,\n pub z: f32,\n\n pub x_y: f32,\n pub x_z: f32,\n pub y_z: f32,\n\n pub color: Color,\n\n pub shader_ids: [u16; 8],\n}\n\nimpl Triangle {\n \/\/\/ Creates a new triangle\n pub fn new(p1: DepthPoint, p2: DepthPoint, p3: DepthPoint, x: f32, y: f32, z: f32, color: Color) -> Triangle {\n Triangle {\n p1: p1,\n p2: p2, \n p3: p3,\n\n x: x,\n y: y, \n z: z,\n\n x_y: 0.0,\n x_z: 0.0,\n y_z: 0.0,\n \n color: color,\n\n shader_ids: [0; 8],\n }\n }\n\n \/\/\/ Applies camera rotations from variables `x_y`, `x_z` and `y_z`\n pub fn apply_camera_rotations(&mut self, engine: &start::Window) {\n self.p1.x_y += self.x_y;\n self.p1.x_z += self.x_z;\n self.p1.y_z += self.y_z;\n\n self.p2.x_y += self.x_y;\n self.p2.x_z += self.x_z;\n self.p2.y_z += self.y_z;\n\n self.p3.x_y += self.x_y;\n self.p3.x_z += self.x_z;\n self.p3.y_z += self.y_z;\n\n self.p1.apply_camera_rotations(&engine);\n self.p2.apply_camera_rotations(&engine);\n self.p3.apply_camera_rotations(&engine);\n\n self.p1.x_y -= self.x_y;\n self.p1.x_z -= self.x_z;\n self.p1.y_z -= self.y_z;\n\n self.p2.x_y -= self.x_y;\n self.p2.x_z -= self.x_z;\n self.p2.y_z -= self.y_z;\n\n self.p3.x_y -= self.x_y;\n self.p3.x_z -= self.x_z;\n self.p3.y_z -= self.y_z;\n }\n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n self.p1.coord_rotate_x_y(x, y, angle);\n self.p2.coord_rotate_x_y(x, y, angle);\n self.p3.coord_rotate_x_y(x, y, angle);\n }\n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n self.p1.coord_rotate_x_z(x, z, angle);\n self.p2.coord_rotate_x_z(x, z, angle);\n self.p3.coord_rotate_x_z(x, z, angle);\n }\n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n self.p1.coord_rotate_y_z(y, z, angle);\n self.p2.coord_rotate_y_z(y, z, angle);\n self.p3.coord_rotate_y_z(y, z, angle);\n }\n}\n\n\/\/\/ A group of triangles.\n\/\/#[derive(Clone)]\npub struct TriangleGroup {\n pub triangles: Vec<Triangle>,\n pub shader_ids: [u16; 8],\n}\n\nimpl TriangleGroup {\n \/\/\/ Create a new group of triangles.\n pub fn new(triangles: Vec<Triangle>) -> TriangleGroup {\n TriangleGroup {\n triangles: triangles,\n shader_ids: [0; 8],\n }\n }\n}\n<commit_msg>Added a flag field to shaders<commit_after>extern crate orbclient;\nextern crate sinulation;\n\nuse super::start;\n\n#[cfg(target_os = \"redox\")] \/\/ if os is redox use these trig functions instead of the ones from standard lib\nuse sinulation::Trig;\n\npub struct Shader {\n pub id: u16,\n pub shader: Box<Fn(&Triangle, &mut start::Window, &Shader)>,\n pub image_data: orbclient::BmpFile,\n pub flags: [f32; 8],\n}\n\nimpl Shader {\n pub fn new(id: u16, shader: Box<Fn(&Triangle, &mut start::Window, &Shader)>) -> Shader {\n Shader {\n id: id,\n shader: shader,\n image_data: orbclient::BmpFile::default(),\n flags: [0.0; 8],\n }\n }\n\n pub fn null() -> Shader {\n Shader {\n id: 0,\n shader: Box::new(|triangle: &Triangle, window: &mut start::Window, wrapper: &Shader| {}),\n image_data: orbclient::BmpFile::default(),\n flags: [0.0; 8],\n }\n }\n\n \/*pub fn apply(self, triangle: &Triangle) {\n (self.shader)(triangle);\n }*\/\n}\n\n\/\/\/ Color struct. Stores colors in 8-bit RGB.\n#[derive(Debug, Clone, Copy)]\npub struct Color {\n pub r: u8,\n pub g: u8,\n pub b: u8,\n}\n\nimpl Color {\n pub fn new(r: u8, g: u8, b: u8) -> Color {\n Color {\n r: r,\n g: g,\n b: b,\n }\n }\n\n \/\/\/ Converts tetrahedrane colors to ones accepted by orbclient library.\n pub fn orb_color(&self) -> orbclient::color::Color {\n orbclient::color::Color::rgb(self.r, self.g, self.b)\n }\n}\n\n\/\/\/ 2D point. Coordinates are screen pixels.\n#[derive(Clone, Copy, Debug)]\npub struct FlatPoint {\n pub x: i32,\n pub y: i32,\n}\n\n\/\/\/ 3D point. Coordinates are floating point, similar to OpenGL coordinates. \n\/\/\/\n\/\/\/ 0.0 is center, -1.0 is left, 1.0 is right etc.\n\/\/\/\n#[derive(Clone, Copy, Debug)]\npub struct DepthPoint {\n pub x: f32,\n pub y: f32,\n pub z: f32,\n\n pub x_y: f32,\n pub x_z: f32,\n pub y_z: f32,\n\n last_x_y: f32,\n last_x_z: f32,\n last_y_z: f32,\n}\n\nimpl DepthPoint {\n \/\/\/ Creates a new 3D point.\n pub fn new(x: f32, y: f32, z: f32) -> DepthPoint {\n DepthPoint {\n x: x, \n y: y,\n z: z,\n\n x_y: 0.0,\n x_z: 0.0,\n y_z: 0.0,\n \n last_x_y: 0.0,\n last_x_z: 0.0,\n last_y_z: 0.0,\n }\n }\n\n \/\/\/ Converts into 2D point, with perspective.\n pub fn flat_point(&mut self, engine_scr_x: u32, engine_scr_y: u32, offset_x: f32, offset_y: f32, offset_z: f32) -> FlatPoint { \n if self.z > -0.01 && self.z < 0.0 {\n self.z = 0.001\n }\n\n else if self.z < 0.1 { \/\/ Prevents division by nearly 0, that cause integer overflow\/underflow\n self.z = 0.11;\n }\n\n FlatPoint {\n x: ((engine_scr_x as f32 * (self.x + offset_x) as f32\/(self.z + offset_z)) + engine_scr_x as f32 \/ 2.0) as i32, \n y: ((engine_scr_x as f32 * (self.y + offset_y) as f32\/(self.z + offset_z)) + engine_scr_y as f32 \/ 2.0) as i32,\n }\n }\n\n \/\/\/ Applies camera rotations from variables `x_y`, `x_z` and `y_z`\n pub fn apply_camera_rotations(&mut self, engine: &start::Window) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32::consts::PI;\n\n #[cfg(target_os = \"redox\")]\n const PI: f32 = 3.141592653589793;\n \n let x_y = self.x_y;\n let x_z = self.x_z;\n let y_z = self.y_z;\n\n let last_x_y = self.last_x_y;\n let last_x_z = self.last_x_z;\n let last_y_z = self.last_y_z;\n\n self.camera_rotate_x_y(&engine, x_y - last_x_y);\n self.camera_rotate_x_z(&engine, x_z - last_x_z);\n self.camera_rotate_y_z(&engine, y_z - last_y_z);\n\n self.last_x_y = x_y;\n self.last_x_z = x_z;\n self.last_y_z = y_z;\n\n \/\/normalize rotations\n if self.x_z > (PI * 2.0) {\n self.x_z -= (PI * 2.0);\n }\n\n if self.x_y > (PI * 2.0) {\n self.x_y -= (PI * 2.0);\n }\n\n if self.y_z > (PI * 2.0) {\n self.y_z -= (PI * 2.0);\n } \n }\n\n pub fn camera_rotate_x_y(&mut self, engine: &start::Window, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= engine.camera_x;\n self.y -= engine.camera_y;\n\n let new_x = self.x * c - self.y * s;\n let new_y = self.x * s + self.y * c;\n\n self.x = new_x + engine.camera_x;\n self.y = new_y + engine.camera_y;\n }\n \n pub fn camera_rotate_x_z(&mut self, engine: &start::Window, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= engine.camera_x;\n self.z -= engine.camera_z;\n\n let new_x = self.x * c - self.z * s;\n let new_z = self.x * s + self.z * c;\n\n self.x = new_x + engine.camera_x;\n self.z = new_z + engine.camera_z;\n }\n\n pub fn camera_rotate_y_z(&mut self, engine: &start::Window, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.y -= engine.camera_y;\n self.z -= engine.camera_z;\n\n let new_y = self.y * c - self.z * s;\n let new_z = self.y * s + self.z * c;\n\n self.y = new_y + engine.camera_y;\n self.z = new_z + engine.camera_z;\n } \n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= x;\n self.y -= y;\n\n let new_x = self.x * c - self.y * s;\n let new_y = self.x * s + self.y * c;\n\n self.x = new_x + x;\n self.y = new_y + y;\n }\n \n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= x;\n self.z -= z;\n\n let new_x = self.x * c - self.z * s;\n let new_z = self.x * s + self.z * c;\n\n self.x = new_x + x;\n self.z = new_z + z;\n }\n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.y -= y;\n self.z -= z;\n\n let new_y = self.y * c - self.z * s;\n let new_z = self.y * s + self.z * c;\n\n self.y = new_y + y;\n self.z = new_z + z;\n } \n}\n\n\/\/\/ Triangle. `p1`, `p2`, `p3` are it's vertexes.\n#[derive(Clone, Copy)]\npub struct Triangle {\n pub p1: DepthPoint,\n pub p2: DepthPoint,\n pub p3: DepthPoint,\n\n pub x: f32,\n pub y: f32,\n pub z: f32,\n\n pub x_y: f32,\n pub x_z: f32,\n pub y_z: f32,\n\n pub color: Color,\n\n pub shader_ids: [u16; 8],\n}\n\nimpl Triangle {\n \/\/\/ Creates a new triangle\n pub fn new(p1: DepthPoint, p2: DepthPoint, p3: DepthPoint, x: f32, y: f32, z: f32, color: Color) -> Triangle {\n Triangle {\n p1: p1,\n p2: p2, \n p3: p3,\n\n x: x,\n y: y, \n z: z,\n\n x_y: 0.0,\n x_z: 0.0,\n y_z: 0.0,\n \n color: color,\n\n shader_ids: [0; 8],\n }\n }\n\n \/\/\/ Applies camera rotations from variables `x_y`, `x_z` and `y_z`\n pub fn apply_camera_rotations(&mut self, engine: &start::Window) {\n self.p1.x_y += self.x_y;\n self.p1.x_z += self.x_z;\n self.p1.y_z += self.y_z;\n\n self.p2.x_y += self.x_y;\n self.p2.x_z += self.x_z;\n self.p2.y_z += self.y_z;\n\n self.p3.x_y += self.x_y;\n self.p3.x_z += self.x_z;\n self.p3.y_z += self.y_z;\n\n self.p1.apply_camera_rotations(&engine);\n self.p2.apply_camera_rotations(&engine);\n self.p3.apply_camera_rotations(&engine);\n\n self.p1.x_y -= self.x_y;\n self.p1.x_z -= self.x_z;\n self.p1.y_z -= self.y_z;\n\n self.p2.x_y -= self.x_y;\n self.p2.x_z -= self.x_z;\n self.p2.y_z -= self.y_z;\n\n self.p3.x_y -= self.x_y;\n self.p3.x_z -= self.x_z;\n self.p3.y_z -= self.y_z;\n }\n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n self.p1.coord_rotate_x_y(x, y, angle);\n self.p2.coord_rotate_x_y(x, y, angle);\n self.p3.coord_rotate_x_y(x, y, angle);\n }\n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n self.p1.coord_rotate_x_z(x, z, angle);\n self.p2.coord_rotate_x_z(x, z, angle);\n self.p3.coord_rotate_x_z(x, z, angle);\n }\n\n \/\/\/ Rotates the point around provided coordinates by the angle.\n pub fn coord_rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n self.p1.coord_rotate_y_z(y, z, angle);\n self.p2.coord_rotate_y_z(y, z, angle);\n self.p3.coord_rotate_y_z(y, z, angle);\n }\n}\n\n\/\/\/ A group of triangles.\n\/\/#[derive(Clone)]\npub struct TriangleGroup {\n pub triangles: Vec<Triangle>,\n pub shader_ids: [u16; 8],\n}\n\nimpl TriangleGroup {\n \/\/\/ Create a new group of triangles.\n pub fn new(triangles: Vec<Triangle>) -> TriangleGroup {\n TriangleGroup {\n triangles: triangles,\n shader_ids: [0; 8],\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Removed Clone from FlagArg<commit_after><|endoftext|>"} {"text":"<commit_before>use std::io::Write;\nuse serde::ser::{self, Impossible, Serialize};\nuse error::Error;\n\n\n\/\/\/ An XML Serializer.\npub struct Serializer<W>\n where W: Write\n{\n writer: W,\n}\n\nimpl<W> Serializer<W>\n where W: Write\n{\n pub fn new(writer: W) -> Self {\n Self { writer: writer }\n }\n}\n\n\n#[allow(unused_variables)]\nimpl<'w, W> ser::Serializer for &'w mut Serializer<W>\n where W: Write\n{\n type Ok = ();\n type Error = Error;\n\n type SerializeSeq = Impossible<Self::Ok, Self::Error>;\n type SerializeTuple = Impossible<Self::Ok, Self::Error>;\n type SerializeTupleStruct = Impossible<Self::Ok, Self::Error>;\n type SerializeTupleVariant = Impossible<Self::Ok, Self::Error>;\n type SerializeMap = Map<'w, W>;\n type SerializeStruct = Struct<'w, W>;\n type SerializeStructVariant = Impossible<Self::Ok, Self::Error>;\n\n fn serialize_bool(mut self, v: bool) -> Result<Self::Ok, Self::Error> {\n if v {\n write!(self.writer, \"true\")?;\n } else {\n write!(self.writer, \"false\")?;\n }\n\n Ok(())\n }\n\n fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_i8\".to_string()))\n }\n\n fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_i16\".to_string()))\n }\n\n fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_i32\".to_string()))\n }\n\n fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_i64\".to_string()))\n }\n\n fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_u8\".to_string()))\n }\n\n fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_u16\".to_string()))\n }\n\n fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {\n write!(self.writer, \"{}\", v)\n .map(|_| ())\n .map_err(|e| e.into())\n }\n\n fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_u64\".to_string()))\n }\n\n fn serialize_f32(self, v: f32) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_f32\".to_string()))\n }\n\n fn serialize_f64(self, v: f64) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_f64\".to_string()))\n }\n\n fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_char\".to_string()))\n }\n\n fn serialize_str(self, value: &str) -> Result<Self::Ok, Self::Error> {\n write!(self.writer, \"{}\", value)\n .map(|_| ())\n .map_err(|e| e.into())\n }\n\n fn serialize_bytes(self, value: &[u8]) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_bytes\".to_string()))\n }\n\n fn serialize_none(self) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_none\".to_string()))\n }\n\n fn serialize_some<T: ?Sized + Serialize>(self, value: &T) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_some\".to_string()))\n }\n\n fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_unit\".to_string()))\n }\n\n fn serialize_unit_struct(self, name: &'static str) -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_unit_struct\".to_string()))\n }\n\n fn serialize_unit_variant(self,\n name: &'static str,\n variant_index: usize,\n variant: &'static str)\n -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_unit_variant\".to_string()))\n }\n\n fn serialize_newtype_struct<T: ?Sized + Serialize>(self,\n name: &'static str,\n value: &T)\n -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_newtype_struct\".to_string()))\n }\n\n fn serialize_newtype_variant<T: ?Sized + Serialize>(self,\n name: &'static str,\n variant_index: usize,\n variant: &'static str,\n value: &T)\n -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_newtype_variant\".to_string()))\n }\n\n fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_seq\".to_string()))\n }\n\n fn serialize_seq_fixed_size(self, size: usize) -> Result<Self::SerializeSeq, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_seq_fixed_size\".to_string()))\n }\n\n fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_tuple\".to_string()))\n }\n\n fn serialize_tuple_struct(self,\n name: &'static str,\n len: usize)\n -> Result<Self::SerializeTupleStruct, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_tuple_struct\".to_string()))\n }\n\n fn serialize_tuple_variant(self,\n name: &'static str,\n variant_index: usize,\n variant: &'static str,\n len: usize)\n -> Result<Self::SerializeTupleVariant, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_tuple_variant\".to_string()))\n }\n\n fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {\n Ok(Map { parent: self })\n }\n\n fn serialize_struct(self,\n name: &'static str,\n len: usize)\n -> Result<Self::SerializeStruct, Self::Error> {\n write!(self.writer, \"<{}>\", name)?;\n Ok(Struct {\n parent: self,\n name: name,\n })\n }\n\n fn serialize_struct_variant(self,\n name: &'static str,\n variant_index: usize,\n variant: &'static str,\n len: usize)\n -> Result<Self::SerializeStructVariant, Self::Error> {\n Err(Error::UnsupportedOperation(\"Result\".to_string()))\n }\n}\n\n\/\/\/ An implementation of SerializeStruct for serializing to XML.\npub struct Struct<'w, W>\n where W: 'w + Write\n{\n parent: &'w mut Serializer<W>,\n name: &'w str,\n}\n\nimpl<'w, W> ser::SerializeStruct for Struct<'w, W>\n where W: 'w + Write\n{\n type Ok = ();\n type Error = Error;\n\n fn serialize_field<T: ?Sized + Serialize>(&mut self,\n key: &'static str,\n value: &T)\n -> Result<(), Self::Error> {\n write!(self.parent.writer, \"<{}>\", key)?;\n value.serialize(&mut *self.parent)?;\n write!(self.parent.writer, \"<\/{}>\", key)?;\n Ok(())\n }\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n write!(self.parent.writer, \"<\/{}>\", self.name).map_err(|e| e.into())\n }\n}\n\n\/\/\/ An implementation of SerializeMap for serializing to XML.\npub struct Map<'w, W>\n where W: 'w + Write\n{\n parent: &'w mut Serializer<W>,\n}\n\nimpl<'w, W> ser::SerializeMap for Map<'w, W>\n where W: 'w + Write\n{\n type Ok = ();\n type Error = Error;\n\n fn serialize_key<T: ?Sized + Serialize>(&mut self, _: &T) -> Result<(), Self::Error> {\n panic!(\"impossible to serialize just the key, please use serialize_entry()\")\n }\n\n fn serialize_value<T: ?Sized + Serialize>(&mut self, value: &T) -> Result<(), Self::Error> {\n value.serialize(&mut *self.parent)\n }\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n Ok(())\n }\n\n fn serialize_entry<K: ?Sized + Serialize, V: ?Sized + Serialize>(&mut self,\n key: &K,\n value: &V)\n -> Result<(), Self::Error> {\n \/\/ TODO: Is it possible to ensure our key is never a composite type?\n \/\/ Anything which isn't a \"primitive\" would lead to malformed XML here...\n write!(self.parent.writer, \"<\")?;\n key.serialize(&mut *self.parent)?;\n write!(self.parent.writer, \">\")?;\n\n value.serialize(&mut *self.parent)?;\n\n write!(self.parent.writer, \"<\/\")?;\n key.serialize(&mut *self.parent)?;\n write!(self.parent.writer, \">\")?;\n Ok(())\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use serde::Serializer as SerSerializer;\n use serde::ser::{SerializeMap, SerializeStruct};\n\n #[test]\n fn test_serialize_bool() {\n let inputs = vec![(true, \"true\"), (false, \"false\")];\n\n for (src, should_be) in inputs {\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n ser.serialize_bool(src).unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, should_be);\n }\n }\n\n #[test]\n fn test_start_serialize_struct() {\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n let _ = ser.serialize_struct(\"foo\", 0).unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, \"<foo>\");\n }\n\n #[test]\n fn test_serialize_struct_field() {\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n let mut struct_ser = Struct {\n parent: &mut ser,\n name: \"baz\",\n };\n struct_ser.serialize_field(\"foo\", \"bar\").unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, \"<foo>bar<\/foo>\");\n }\n\n #[test]\n fn test_serialize_struct() {\n #[derive(Serialize)]\n struct Person {\n name: String,\n age: u32,\n }\n\n let bob = Person {\n name: \"Bob\".to_string(),\n age: 42,\n };\n let should_be = \"<Person><name>Bob<\/name><age>42<\/age><\/Person>\";\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n bob.serialize(&mut ser).unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, should_be);\n }\n\n #[test]\n fn test_serialize_map_entries() {\n let should_be = \"<name>Bob<\/name><age>5<\/age>\";\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n let mut map = Map { parent: &mut ser };\n map.serialize_entry(\"name\", \"Bob\").unwrap();\n map.serialize_entry(\"age\", \"5\").unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, should_be);\n }\n}\n<commit_msg>Added implementations for most primitives<commit_after>use std::io::Write;\nuse std::fmt::Display;\nuse serde::ser::{self, Impossible, Serialize};\nuse error::Error;\n\n\n\/\/\/ An XML Serializer.\npub struct Serializer<W>\n where W: Write\n{\n writer: W,\n}\n\nimpl<W> Serializer<W>\n where W: Write\n{\n pub fn new(writer: W) -> Self {\n Self { writer: writer }\n }\n\n fn write_primitive<P: Display>(&mut self, primitive: P) -> Result<(), Error> {\n write!(self.writer, \"{}\", primitive)\n .map(|_| ())\n .map_err(|e| e.into())\n }\n}\n\n\n#[allow(unused_variables)]\nimpl<'w, W> ser::Serializer for &'w mut Serializer<W>\n where W: Write\n{\n type Ok = ();\n type Error = Error;\n\n type SerializeSeq = Impossible<Self::Ok, Self::Error>;\n type SerializeTuple = Impossible<Self::Ok, Self::Error>;\n type SerializeTupleStruct = Impossible<Self::Ok, Self::Error>;\n type SerializeTupleVariant = Impossible<Self::Ok, Self::Error>;\n type SerializeMap = Map<'w, W>;\n type SerializeStruct = Struct<'w, W>;\n type SerializeStructVariant = Impossible<Self::Ok, Self::Error>;\n\n fn serialize_bool(mut self, v: bool) -> Result<Self::Ok, Self::Error> {\n if v {\n write!(self.writer, \"true\")?;\n } else {\n write!(self.writer, \"false\")?;\n }\n\n Ok(())\n }\n\n fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_f32(self, v: f32) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_f64(self, v: f64) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(v)\n }\n\n fn serialize_str(self, value: &str) -> Result<Self::Ok, Self::Error> {\n self.write_primitive(value)\n }\n\n fn serialize_bytes(self, value: &[u8]) -> Result<Self::Ok, Self::Error> {\n \/\/ TODO: I imagine you'd want to use base64 here.\n \/\/ Not sure how to roundtrip effectively though...\n Err(Error::UnsupportedOperation(\"serialize_bytes\".to_string()))\n }\n\n fn serialize_none(self) -> Result<Self::Ok, Self::Error> {\n Ok(())\n }\n\n fn serialize_some<T: ?Sized + Serialize>(self, value: &T) -> Result<Self::Ok, Self::Error> {\n value.serialize(self)\n }\n\n fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {\n self.serialize_none()\n }\n\n fn serialize_unit_struct(self, name: &'static str) -> Result<Self::Ok, Self::Error> {\n write!(self.writer, \"<{0}><\/{0}>\", name)\n .map(|_| ())\n .map_err(|e| e.into())\n }\n\n fn serialize_unit_variant(self,\n name: &'static str,\n variant_index: usize,\n variant: &'static str)\n -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_unit_variant\".to_string()))\n }\n\n fn serialize_newtype_struct<T: ?Sized + Serialize>(self,\n name: &'static str,\n value: &T)\n -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_newtype_struct\".to_string()))\n }\n\n fn serialize_newtype_variant<T: ?Sized + Serialize>(self,\n name: &'static str,\n variant_index: usize,\n variant: &'static str,\n value: &T)\n -> Result<Self::Ok, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_newtype_variant\".to_string()))\n }\n\n fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {\n \/\/ TODO: Figure out how to constrain the things written to only be composites\n Err(Error::UnsupportedOperation(\"serialize_seq\".to_string()))\n }\n\n fn serialize_seq_fixed_size(self, size: usize) -> Result<Self::SerializeSeq, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_seq_fixed_size\".to_string()))\n }\n\n fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_tuple\".to_string()))\n }\n\n fn serialize_tuple_struct(self,\n name: &'static str,\n len: usize)\n -> Result<Self::SerializeTupleStruct, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_tuple_struct\".to_string()))\n }\n\n fn serialize_tuple_variant(self,\n name: &'static str,\n variant_index: usize,\n variant: &'static str,\n len: usize)\n -> Result<Self::SerializeTupleVariant, Self::Error> {\n Err(Error::UnsupportedOperation(\"serialize_tuple_variant\".to_string()))\n }\n\n fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {\n Ok(Map { parent: self })\n }\n\n fn serialize_struct(self,\n name: &'static str,\n len: usize)\n -> Result<Self::SerializeStruct, Self::Error> {\n write!(self.writer, \"<{}>\", name)?;\n Ok(Struct {\n parent: self,\n name: name,\n })\n }\n\n fn serialize_struct_variant(self,\n name: &'static str,\n variant_index: usize,\n variant: &'static str,\n len: usize)\n -> Result<Self::SerializeStructVariant, Self::Error> {\n Err(Error::UnsupportedOperation(\"Result\".to_string()))\n }\n}\n\n\/\/\/ An implementation of SerializeStruct for serializing to XML.\npub struct Struct<'w, W>\n where W: 'w + Write\n{\n parent: &'w mut Serializer<W>,\n name: &'w str,\n}\n\nimpl<'w, W> ser::SerializeStruct for Struct<'w, W>\n where W: 'w + Write\n{\n type Ok = ();\n type Error = Error;\n\n fn serialize_field<T: ?Sized + Serialize>(&mut self,\n key: &'static str,\n value: &T)\n -> Result<(), Self::Error> {\n write!(self.parent.writer, \"<{}>\", key)?;\n value.serialize(&mut *self.parent)?;\n write!(self.parent.writer, \"<\/{}>\", key)?;\n Ok(())\n }\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n write!(self.parent.writer, \"<\/{}>\", self.name).map_err(|e| e.into())\n }\n}\n\n\/\/\/ An implementation of SerializeMap for serializing to XML.\npub struct Map<'w, W>\n where W: 'w + Write\n{\n parent: &'w mut Serializer<W>,\n}\n\nimpl<'w, W> ser::SerializeMap for Map<'w, W>\n where W: 'w + Write\n{\n type Ok = ();\n type Error = Error;\n\n fn serialize_key<T: ?Sized + Serialize>(&mut self, _: &T) -> Result<(), Self::Error> {\n panic!(\"impossible to serialize just the key, please use serialize_entry()\")\n }\n\n fn serialize_value<T: ?Sized + Serialize>(&mut self, value: &T) -> Result<(), Self::Error> {\n value.serialize(&mut *self.parent)\n }\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n Ok(())\n }\n\n fn serialize_entry<K: ?Sized + Serialize, V: ?Sized + Serialize>(&mut self,\n key: &K,\n value: &V)\n -> Result<(), Self::Error> {\n \/\/ TODO: Is it possible to ensure our key is never a composite type?\n \/\/ Anything which isn't a \"primitive\" would lead to malformed XML here...\n write!(self.parent.writer, \"<\")?;\n key.serialize(&mut *self.parent)?;\n write!(self.parent.writer, \">\")?;\n\n value.serialize(&mut *self.parent)?;\n\n write!(self.parent.writer, \"<\/\")?;\n key.serialize(&mut *self.parent)?;\n write!(self.parent.writer, \">\")?;\n Ok(())\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use serde::Serializer as SerSerializer;\n use serde::ser::{SerializeMap, SerializeStruct};\n\n #[test]\n fn test_serialize_bool() {\n let inputs = vec![(true, \"true\"), (false, \"false\")];\n\n for (src, should_be) in inputs {\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n ser.serialize_bool(src).unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, should_be);\n }\n }\n\n #[test]\n fn test_start_serialize_struct() {\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n let _ = ser.serialize_struct(\"foo\", 0).unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, \"<foo>\");\n }\n\n #[test]\n fn test_serialize_struct_field() {\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n let mut struct_ser = Struct {\n parent: &mut ser,\n name: \"baz\",\n };\n struct_ser.serialize_field(\"foo\", \"bar\").unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, \"<foo>bar<\/foo>\");\n }\n\n #[test]\n fn test_serialize_struct() {\n #[derive(Serialize)]\n struct Person {\n name: String,\n age: u32,\n }\n\n let bob = Person {\n name: \"Bob\".to_string(),\n age: 42,\n };\n let should_be = \"<Person><name>Bob<\/name><age>42<\/age><\/Person>\";\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n bob.serialize(&mut ser).unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, should_be);\n }\n\n #[test]\n fn test_serialize_map_entries() {\n let should_be = \"<name>Bob<\/name><age>5<\/age>\";\n let mut buffer = Vec::new();\n\n {\n let mut ser = Serializer::new(&mut buffer);\n let mut map = Map { parent: &mut ser };\n map.serialize_entry(\"name\", \"Bob\").unwrap();\n map.serialize_entry(\"age\", \"5\").unwrap();\n }\n\n let got = String::from_utf8(buffer).unwrap();\n assert_eq!(got, should_be);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Recursive Aliases Closes #610<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_name = \"uu_test\"]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) mahkoh (ju.orth [at] gmail [dot] com)\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\nextern crate libc;\n#[cfg(target_os = \"redox\")]\nextern crate syscall;\n\nuse std::collections::HashMap;\nuse std::ffi::OsString;\nuse std::env::args_os;\nuse std::str::from_utf8;\n\nstatic NAME: &'static str = \"test\";\n\n\/\/ TODO: decide how to handle non-UTF8 input for all the utils\n\/\/ Definitely don't use [u8], try keeping it as OsStr or OsString instead\npub fn uumain(_: Vec<String>) -> i32 {\n let args = args_os().collect::<Vec<OsString>>();\n \/\/ This is completely disregarding valid windows paths that aren't valid unicode\n let args = args.iter()\n .map(|a| a.to_str().unwrap().as_bytes())\n .collect::<Vec<&[u8]>>();\n if args.is_empty() {\n return 2;\n }\n let args = if !args[0].ends_with(NAME.as_bytes()) {\n &args[1..]\n } else {\n &args[..]\n };\n let args = match args[0] {\n b\"[\" => match args[args.len() - 1] {\n b\"]\" => &args[1..args.len() - 1],\n _ => return 2,\n },\n _ => &args[1..args.len()],\n };\n let mut error = false;\n let retval = 1 - parse_expr(args, &mut error) as i32;\n if error {\n 2\n } else {\n retval\n }\n}\n\nfn one(args: &[&[u8]]) -> bool {\n args[0].len() > 0\n}\n\nfn two(args: &[&[u8]], error: &mut bool) -> bool {\n match args[0] {\n b\"!\" => !one(&args[1..]),\n b\"-b\" => path(args[1], PathCondition::BlockSpecial),\n b\"-c\" => path(args[1], PathCondition::CharacterSpecial),\n b\"-d\" => path(args[1], PathCondition::Directory),\n b\"-e\" => path(args[1], PathCondition::Exists),\n b\"-f\" => path(args[1], PathCondition::Regular),\n b\"-g\" => path(args[1], PathCondition::GroupIDFlag),\n b\"-h\" => path(args[1], PathCondition::SymLink),\n b\"-L\" => path(args[1], PathCondition::SymLink),\n b\"-n\" => one(&args[1..]),\n b\"-p\" => path(args[1], PathCondition::FIFO),\n b\"-r\" => path(args[1], PathCondition::Readable),\n b\"-S\" => path(args[1], PathCondition::Socket),\n b\"-s\" => path(args[1], PathCondition::NonEmpty),\n b\"-t\" => isatty(args[1]),\n b\"-u\" => path(args[1], PathCondition::UserIDFlag),\n b\"-w\" => path(args[1], PathCondition::Writable),\n b\"-x\" => path(args[1], PathCondition::Executable),\n b\"-z\" => !one(&args[1..]),\n _ => {\n *error = true;\n false\n }\n }\n}\n\nfn three(args: &[&[u8]], error: &mut bool) -> bool {\n match args[1] {\n b\"=\" => args[0] == args[2],\n b\"==\" => args[0] == args[2],\n b\"!=\" => args[0] != args[2],\n b\"-eq\" => integers(args[0], args[2], IntegerCondition::Equal),\n b\"-ne\" => integers(args[0], args[2], IntegerCondition::Unequal),\n b\"-gt\" => integers(args[0], args[2], IntegerCondition::Greater),\n b\"-ge\" => integers(args[0], args[2], IntegerCondition::GreaterEqual),\n b\"-lt\" => integers(args[0], args[2], IntegerCondition::Less),\n b\"-le\" => integers(args[0], args[2], IntegerCondition::LessEqual),\n _ => match args[0] {\n b\"!\" => !two(&args[1..], error),\n _ => {\n *error = true;\n false\n }\n },\n }\n}\n\nfn four(args: &[&[u8]], error: &mut bool) -> bool {\n match args[0] {\n b\"!\" => !three(&args[1..], error),\n _ => {\n *error = true;\n false\n }\n }\n}\n\nenum IntegerCondition {\n Equal,\n Unequal,\n Greater,\n GreaterEqual,\n Less,\n LessEqual,\n}\n\nfn integers(a: &[u8], b: &[u8], cond: IntegerCondition) -> bool {\n let (a, b): (&str, &str) = match (from_utf8(a), from_utf8(b)) {\n (Ok(a), Ok(b)) => (a, b),\n _ => return false,\n };\n let (a, b): (i64, i64) = match (a.parse(), b.parse()) {\n (Ok(a), Ok(b)) => (a, b),\n _ => return false,\n };\n match cond {\n IntegerCondition::Equal => a == b,\n IntegerCondition::Unequal => a != b,\n IntegerCondition::Greater => a > b,\n IntegerCondition::GreaterEqual => a >= b,\n IntegerCondition::Less => a < b,\n IntegerCondition::LessEqual => a <= b,\n }\n}\n\nfn isatty(fd: &[u8]) -> bool {\n from_utf8(fd)\n .ok()\n .and_then(|s| s.parse().ok())\n .map_or(false, |i| {\n #[cfg(not(target_os = \"redox\"))]\n unsafe { libc::isatty(i) == 1 }\n #[cfg(target_os = \"redox\")]\n syscall::dup(i, b\"termios\").map(syscall::close).is_ok()\n })\n}\n\nfn dispatch(args: &mut &[&[u8]], error: &mut bool) -> bool {\n let (val, idx) = match args.len() {\n 0 => {\n *error = true;\n (false, 0)\n }\n 1 => (one(*args), 1),\n 2 => dispatch_two(args, error),\n 3 => dispatch_three(args, error),\n _ => dispatch_four(args, error),\n };\n *args = &(*args)[idx..];\n val\n}\n\nfn dispatch_two(args: &mut &[&[u8]], error: &mut bool) -> (bool, usize) {\n let val = two(*args, error);\n if *error {\n *error = false;\n (one(*args), 1)\n } else {\n (val, 2)\n }\n}\n\nfn dispatch_three(args: &mut &[&[u8]], error: &mut bool) -> (bool, usize) {\n let val = three(*args, error);\n if *error {\n *error = false;\n dispatch_two(args, error)\n } else {\n (val, 3)\n }\n}\n\nfn dispatch_four(args: &mut &[&[u8]], error: &mut bool) -> (bool, usize) {\n let val = four(*args, error);\n if *error {\n *error = false;\n dispatch_three(args, error)\n } else {\n (val, 4)\n }\n}\n\n#[derive(Clone, Copy)]\nenum Precedence {\n Unknown = 0,\n Paren, \/\/ FIXME: this is useless (parentheses have not been implemented)\n Or,\n And,\n BUnOp,\n BinOp,\n UnOp,\n}\n\nfn parse_expr(mut args: &[&[u8]], error: &mut bool) -> bool {\n if args.len() == 0 {\n false\n } else {\n let hashmap = setup_hashmap();\n let lhs = dispatch(&mut args, error);\n\n if args.len() > 0 {\n parse_expr_helper(&hashmap, &mut args, lhs, Precedence::Unknown, error)\n } else {\n lhs\n }\n }\n}\n\nfn parse_expr_helper<'a>(\n hashmap: &HashMap<&'a [u8], Precedence>,\n args: &mut &[&'a [u8]],\n mut lhs: bool,\n min_prec: Precedence,\n error: &mut bool,\n) -> bool {\n let mut prec = *hashmap.get(&args[0]).unwrap_or_else(|| {\n *error = true;\n &min_prec\n });\n while !*error && args.len() > 0 && prec as usize >= min_prec as usize {\n let op = args[0];\n *args = &(*args)[1..];\n let mut rhs = dispatch(args, error);\n while args.len() > 0 {\n let subprec = *hashmap.get(&args[0]).unwrap_or_else(|| {\n *error = true;\n &min_prec\n });\n if subprec as usize <= prec as usize || *error {\n break;\n }\n rhs = parse_expr_helper(hashmap, args, rhs, subprec, error);\n }\n lhs = match prec {\n Precedence::UnOp | Precedence::BUnOp => {\n *error = true;\n false\n }\n Precedence::And => lhs && rhs,\n Precedence::Or => lhs || rhs,\n Precedence::BinOp => three(\n &[\n if lhs { b\" \" } else { b\"\" },\n op,\n if rhs { b\" \" } else { b\"\" },\n ],\n error,\n ),\n Precedence::Paren => unimplemented!(), \/\/ TODO: implement parentheses\n _ => unreachable!(),\n };\n if args.len() > 0 {\n prec = *hashmap.get(&args[0]).unwrap_or_else(|| {\n *error = true;\n &min_prec\n });\n }\n }\n lhs\n}\n\n#[inline]\nfn setup_hashmap<'a>() -> HashMap<&'a [u8], Precedence> {\n let mut hashmap = HashMap::<&'a [u8], Precedence>::new();\n\n hashmap.insert(b\"-b\", Precedence::UnOp);\n hashmap.insert(b\"-c\", Precedence::UnOp);\n hashmap.insert(b\"-d\", Precedence::UnOp);\n hashmap.insert(b\"-e\", Precedence::UnOp);\n hashmap.insert(b\"-f\", Precedence::UnOp);\n hashmap.insert(b\"-g\", Precedence::UnOp);\n hashmap.insert(b\"-h\", Precedence::UnOp);\n hashmap.insert(b\"-L\", Precedence::UnOp);\n hashmap.insert(b\"-n\", Precedence::UnOp);\n hashmap.insert(b\"-p\", Precedence::UnOp);\n hashmap.insert(b\"-r\", Precedence::UnOp);\n hashmap.insert(b\"-S\", Precedence::UnOp);\n hashmap.insert(b\"-s\", Precedence::UnOp);\n hashmap.insert(b\"-t\", Precedence::UnOp);\n hashmap.insert(b\"-u\", Precedence::UnOp);\n hashmap.insert(b\"-w\", Precedence::UnOp);\n hashmap.insert(b\"-x\", Precedence::UnOp);\n hashmap.insert(b\"-z\", Precedence::UnOp);\n\n hashmap.insert(b\"=\", Precedence::BinOp);\n hashmap.insert(b\"!=\", Precedence::BinOp);\n hashmap.insert(b\"-eq\", Precedence::BinOp);\n hashmap.insert(b\"-ne\", Precedence::BinOp);\n hashmap.insert(b\"-gt\", Precedence::BinOp);\n hashmap.insert(b\"-ge\", Precedence::BinOp);\n hashmap.insert(b\"-lt\", Precedence::BinOp);\n hashmap.insert(b\"-le\", Precedence::BinOp);\n\n hashmap.insert(b\"!\", Precedence::BUnOp);\n\n hashmap.insert(b\"-a\", Precedence::And);\n hashmap.insert(b\"-o\", Precedence::Or);\n\n hashmap.insert(b\"(\", Precedence::Paren);\n hashmap.insert(b\")\", Precedence::Paren);\n\n hashmap\n}\n\n#[derive(Eq, PartialEq)]\nenum PathCondition {\n BlockSpecial,\n CharacterSpecial,\n Directory,\n Exists,\n Regular,\n GroupIDFlag,\n SymLink,\n FIFO,\n Readable,\n Socket,\n NonEmpty,\n UserIDFlag,\n Writable,\n Executable,\n}\n\n#[cfg(not(windows))]\nfn path(path: &[u8], cond: PathCondition) -> bool {\n use std::os::unix::fs::{MetadataExt, FileTypeExt};\n use std::os::unix::ffi::OsStrExt;\n use std::fs::{self, Metadata};\n use std::ffi::OsStr;\n\n let path = OsStr::from_bytes(path);\n\n static S_ISUID: u32 = 0o4000;\n static S_ISGID: u32 = 0o2000;\n\n enum Permission {\n Read = 0o4,\n Write = 0o2,\n Execute = 0o1,\n }\n\n let perm = |metadata: Metadata, p: Permission| {\n #[cfg(not(target_os = \"redox\"))]\n let (uid, gid) = unsafe { (libc::getuid(), libc::getgid()) };\n #[cfg(target_os = \"redox\")]\n let (uid, gid) = (syscall::getuid().unwrap() as u32,\n syscall::getgid().unwrap() as u32);\n\n if uid == metadata.uid() {\n metadata.mode() & ((p as u32) << 6) != 0\n } else if gid == metadata.gid() {\n metadata.mode() & ((p as u32) << 3) != 0\n } else {\n metadata.mode() & ((p as u32)) != 0\n }\n };\n\n let metadata = if cond == PathCondition::SymLink {\n fs::symlink_metadata(path)\n } else {\n fs::metadata(path)\n };\n\n let metadata = match metadata {\n Ok(metadata) => metadata,\n Err(_) => { return false; }\n };\n\n let file_type = metadata.file_type();\n\n match cond {\n PathCondition::BlockSpecial => file_type.is_block_device(),\n PathCondition::CharacterSpecial => file_type.is_char_device(),\n PathCondition::Directory => file_type.is_dir(),\n PathCondition::Exists => true,\n PathCondition::Regular => file_type.is_file(),\n PathCondition::GroupIDFlag => metadata.mode() & S_ISGID != 0,\n PathCondition::SymLink => metadata.file_type().is_symlink(),\n PathCondition::FIFO => file_type.is_fifo(),\n PathCondition::Readable => perm(metadata, Permission::Read),\n PathCondition::Socket => file_type.is_socket(),\n PathCondition::NonEmpty => metadata.size() > 0,\n PathCondition::UserIDFlag => metadata.mode() & S_ISUID != 0,\n PathCondition::Writable => perm(metadata, Permission::Write),\n PathCondition::Executable => perm(metadata, Permission::Execute),\n }\n}\n\n#[cfg(windows)]\nfn path(path: &[u8], cond: PathCondition) -> bool {\n use std::fs::metadata;\n let path = from_utf8(path).unwrap();\n let stat = match metadata(path) {\n Ok(s) => s,\n _ => return false,\n };\n match cond {\n PathCondition::BlockSpecial => false,\n PathCondition::CharacterSpecial => false,\n PathCondition::Directory => stat.is_dir(),\n PathCondition::Exists => true,\n PathCondition::Regular => stat.is_file(),\n PathCondition::GroupIDFlag => false,\n PathCondition::SymLink => false,\n PathCondition::FIFO => false,\n PathCondition::Readable => false, \/\/ TODO\n PathCondition::Socket => false,\n PathCondition::NonEmpty => stat.len() > 0,\n PathCondition::UserIDFlag => false,\n PathCondition::Writable => false, \/\/ TODO\n PathCondition::Executable => false, \/\/ TODO\n }\n}\n<commit_msg>Change static to const<commit_after>#![crate_name = \"uu_test\"]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) mahkoh (ju.orth [at] gmail [dot] com)\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\nextern crate libc;\n#[cfg(target_os = \"redox\")]\nextern crate syscall;\n\nuse std::collections::HashMap;\nuse std::ffi::OsString;\nuse std::env::args_os;\nuse std::str::from_utf8;\n\nstatic NAME: &'static str = \"test\";\n\n\/\/ TODO: decide how to handle non-UTF8 input for all the utils\n\/\/ Definitely don't use [u8], try keeping it as OsStr or OsString instead\npub fn uumain(_: Vec<String>) -> i32 {\n let args = args_os().collect::<Vec<OsString>>();\n \/\/ This is completely disregarding valid windows paths that aren't valid unicode\n let args = args.iter()\n .map(|a| a.to_str().unwrap().as_bytes())\n .collect::<Vec<&[u8]>>();\n if args.is_empty() {\n return 2;\n }\n let args = if !args[0].ends_with(NAME.as_bytes()) {\n &args[1..]\n } else {\n &args[..]\n };\n let args = match args[0] {\n b\"[\" => match args[args.len() - 1] {\n b\"]\" => &args[1..args.len() - 1],\n _ => return 2,\n },\n _ => &args[1..args.len()],\n };\n let mut error = false;\n let retval = 1 - parse_expr(args, &mut error) as i32;\n if error {\n 2\n } else {\n retval\n }\n}\n\nfn one(args: &[&[u8]]) -> bool {\n args[0].len() > 0\n}\n\nfn two(args: &[&[u8]], error: &mut bool) -> bool {\n match args[0] {\n b\"!\" => !one(&args[1..]),\n b\"-b\" => path(args[1], PathCondition::BlockSpecial),\n b\"-c\" => path(args[1], PathCondition::CharacterSpecial),\n b\"-d\" => path(args[1], PathCondition::Directory),\n b\"-e\" => path(args[1], PathCondition::Exists),\n b\"-f\" => path(args[1], PathCondition::Regular),\n b\"-g\" => path(args[1], PathCondition::GroupIDFlag),\n b\"-h\" => path(args[1], PathCondition::SymLink),\n b\"-L\" => path(args[1], PathCondition::SymLink),\n b\"-n\" => one(&args[1..]),\n b\"-p\" => path(args[1], PathCondition::FIFO),\n b\"-r\" => path(args[1], PathCondition::Readable),\n b\"-S\" => path(args[1], PathCondition::Socket),\n b\"-s\" => path(args[1], PathCondition::NonEmpty),\n b\"-t\" => isatty(args[1]),\n b\"-u\" => path(args[1], PathCondition::UserIDFlag),\n b\"-w\" => path(args[1], PathCondition::Writable),\n b\"-x\" => path(args[1], PathCondition::Executable),\n b\"-z\" => !one(&args[1..]),\n _ => {\n *error = true;\n false\n }\n }\n}\n\nfn three(args: &[&[u8]], error: &mut bool) -> bool {\n match args[1] {\n b\"=\" => args[0] == args[2],\n b\"==\" => args[0] == args[2],\n b\"!=\" => args[0] != args[2],\n b\"-eq\" => integers(args[0], args[2], IntegerCondition::Equal),\n b\"-ne\" => integers(args[0], args[2], IntegerCondition::Unequal),\n b\"-gt\" => integers(args[0], args[2], IntegerCondition::Greater),\n b\"-ge\" => integers(args[0], args[2], IntegerCondition::GreaterEqual),\n b\"-lt\" => integers(args[0], args[2], IntegerCondition::Less),\n b\"-le\" => integers(args[0], args[2], IntegerCondition::LessEqual),\n _ => match args[0] {\n b\"!\" => !two(&args[1..], error),\n _ => {\n *error = true;\n false\n }\n },\n }\n}\n\nfn four(args: &[&[u8]], error: &mut bool) -> bool {\n match args[0] {\n b\"!\" => !three(&args[1..], error),\n _ => {\n *error = true;\n false\n }\n }\n}\n\nenum IntegerCondition {\n Equal,\n Unequal,\n Greater,\n GreaterEqual,\n Less,\n LessEqual,\n}\n\nfn integers(a: &[u8], b: &[u8], cond: IntegerCondition) -> bool {\n let (a, b): (&str, &str) = match (from_utf8(a), from_utf8(b)) {\n (Ok(a), Ok(b)) => (a, b),\n _ => return false,\n };\n let (a, b): (i64, i64) = match (a.parse(), b.parse()) {\n (Ok(a), Ok(b)) => (a, b),\n _ => return false,\n };\n match cond {\n IntegerCondition::Equal => a == b,\n IntegerCondition::Unequal => a != b,\n IntegerCondition::Greater => a > b,\n IntegerCondition::GreaterEqual => a >= b,\n IntegerCondition::Less => a < b,\n IntegerCondition::LessEqual => a <= b,\n }\n}\n\nfn isatty(fd: &[u8]) -> bool {\n from_utf8(fd)\n .ok()\n .and_then(|s| s.parse().ok())\n .map_or(false, |i| {\n #[cfg(not(target_os = \"redox\"))]\n unsafe { libc::isatty(i) == 1 }\n #[cfg(target_os = \"redox\")]\n syscall::dup(i, b\"termios\").map(syscall::close).is_ok()\n })\n}\n\nfn dispatch(args: &mut &[&[u8]], error: &mut bool) -> bool {\n let (val, idx) = match args.len() {\n 0 => {\n *error = true;\n (false, 0)\n }\n 1 => (one(*args), 1),\n 2 => dispatch_two(args, error),\n 3 => dispatch_three(args, error),\n _ => dispatch_four(args, error),\n };\n *args = &(*args)[idx..];\n val\n}\n\nfn dispatch_two(args: &mut &[&[u8]], error: &mut bool) -> (bool, usize) {\n let val = two(*args, error);\n if *error {\n *error = false;\n (one(*args), 1)\n } else {\n (val, 2)\n }\n}\n\nfn dispatch_three(args: &mut &[&[u8]], error: &mut bool) -> (bool, usize) {\n let val = three(*args, error);\n if *error {\n *error = false;\n dispatch_two(args, error)\n } else {\n (val, 3)\n }\n}\n\nfn dispatch_four(args: &mut &[&[u8]], error: &mut bool) -> (bool, usize) {\n let val = four(*args, error);\n if *error {\n *error = false;\n dispatch_three(args, error)\n } else {\n (val, 4)\n }\n}\n\n#[derive(Clone, Copy)]\nenum Precedence {\n Unknown = 0,\n Paren, \/\/ FIXME: this is useless (parentheses have not been implemented)\n Or,\n And,\n BUnOp,\n BinOp,\n UnOp,\n}\n\nfn parse_expr(mut args: &[&[u8]], error: &mut bool) -> bool {\n if args.len() == 0 {\n false\n } else {\n let hashmap = setup_hashmap();\n let lhs = dispatch(&mut args, error);\n\n if args.len() > 0 {\n parse_expr_helper(&hashmap, &mut args, lhs, Precedence::Unknown, error)\n } else {\n lhs\n }\n }\n}\n\nfn parse_expr_helper<'a>(\n hashmap: &HashMap<&'a [u8], Precedence>,\n args: &mut &[&'a [u8]],\n mut lhs: bool,\n min_prec: Precedence,\n error: &mut bool,\n) -> bool {\n let mut prec = *hashmap.get(&args[0]).unwrap_or_else(|| {\n *error = true;\n &min_prec\n });\n while !*error && args.len() > 0 && prec as usize >= min_prec as usize {\n let op = args[0];\n *args = &(*args)[1..];\n let mut rhs = dispatch(args, error);\n while args.len() > 0 {\n let subprec = *hashmap.get(&args[0]).unwrap_or_else(|| {\n *error = true;\n &min_prec\n });\n if subprec as usize <= prec as usize || *error {\n break;\n }\n rhs = parse_expr_helper(hashmap, args, rhs, subprec, error);\n }\n lhs = match prec {\n Precedence::UnOp | Precedence::BUnOp => {\n *error = true;\n false\n }\n Precedence::And => lhs && rhs,\n Precedence::Or => lhs || rhs,\n Precedence::BinOp => three(\n &[\n if lhs { b\" \" } else { b\"\" },\n op,\n if rhs { b\" \" } else { b\"\" },\n ],\n error,\n ),\n Precedence::Paren => unimplemented!(), \/\/ TODO: implement parentheses\n _ => unreachable!(),\n };\n if args.len() > 0 {\n prec = *hashmap.get(&args[0]).unwrap_or_else(|| {\n *error = true;\n &min_prec\n });\n }\n }\n lhs\n}\n\n#[inline]\nfn setup_hashmap<'a>() -> HashMap<&'a [u8], Precedence> {\n let mut hashmap = HashMap::<&'a [u8], Precedence>::new();\n\n hashmap.insert(b\"-b\", Precedence::UnOp);\n hashmap.insert(b\"-c\", Precedence::UnOp);\n hashmap.insert(b\"-d\", Precedence::UnOp);\n hashmap.insert(b\"-e\", Precedence::UnOp);\n hashmap.insert(b\"-f\", Precedence::UnOp);\n hashmap.insert(b\"-g\", Precedence::UnOp);\n hashmap.insert(b\"-h\", Precedence::UnOp);\n hashmap.insert(b\"-L\", Precedence::UnOp);\n hashmap.insert(b\"-n\", Precedence::UnOp);\n hashmap.insert(b\"-p\", Precedence::UnOp);\n hashmap.insert(b\"-r\", Precedence::UnOp);\n hashmap.insert(b\"-S\", Precedence::UnOp);\n hashmap.insert(b\"-s\", Precedence::UnOp);\n hashmap.insert(b\"-t\", Precedence::UnOp);\n hashmap.insert(b\"-u\", Precedence::UnOp);\n hashmap.insert(b\"-w\", Precedence::UnOp);\n hashmap.insert(b\"-x\", Precedence::UnOp);\n hashmap.insert(b\"-z\", Precedence::UnOp);\n\n hashmap.insert(b\"=\", Precedence::BinOp);\n hashmap.insert(b\"!=\", Precedence::BinOp);\n hashmap.insert(b\"-eq\", Precedence::BinOp);\n hashmap.insert(b\"-ne\", Precedence::BinOp);\n hashmap.insert(b\"-gt\", Precedence::BinOp);\n hashmap.insert(b\"-ge\", Precedence::BinOp);\n hashmap.insert(b\"-lt\", Precedence::BinOp);\n hashmap.insert(b\"-le\", Precedence::BinOp);\n\n hashmap.insert(b\"!\", Precedence::BUnOp);\n\n hashmap.insert(b\"-a\", Precedence::And);\n hashmap.insert(b\"-o\", Precedence::Or);\n\n hashmap.insert(b\"(\", Precedence::Paren);\n hashmap.insert(b\")\", Precedence::Paren);\n\n hashmap\n}\n\n#[derive(Eq, PartialEq)]\nenum PathCondition {\n BlockSpecial,\n CharacterSpecial,\n Directory,\n Exists,\n Regular,\n GroupIDFlag,\n SymLink,\n FIFO,\n Readable,\n Socket,\n NonEmpty,\n UserIDFlag,\n Writable,\n Executable,\n}\n\n#[cfg(not(windows))]\nfn path(path: &[u8], cond: PathCondition) -> bool {\n use std::os::unix::fs::{MetadataExt, FileTypeExt};\n use std::os::unix::ffi::OsStrExt;\n use std::fs::{self, Metadata};\n use std::ffi::OsStr;\n\n let path = OsStr::from_bytes(path);\n\n const S_ISUID: u32 = 0o4000;\n const S_ISGID: u32 = 0o2000;\n\n enum Permission {\n Read = 0o4,\n Write = 0o2,\n Execute = 0o1,\n }\n\n let perm = |metadata: Metadata, p: Permission| {\n #[cfg(not(target_os = \"redox\"))]\n let (uid, gid) = unsafe { (libc::getuid(), libc::getgid()) };\n #[cfg(target_os = \"redox\")]\n let (uid, gid) = (syscall::getuid().unwrap() as u32,\n syscall::getgid().unwrap() as u32);\n\n if uid == metadata.uid() {\n metadata.mode() & ((p as u32) << 6) != 0\n } else if gid == metadata.gid() {\n metadata.mode() & ((p as u32) << 3) != 0\n } else {\n metadata.mode() & ((p as u32)) != 0\n }\n };\n\n let metadata = if cond == PathCondition::SymLink {\n fs::symlink_metadata(path)\n } else {\n fs::metadata(path)\n };\n\n let metadata = match metadata {\n Ok(metadata) => metadata,\n Err(_) => { return false; }\n };\n\n let file_type = metadata.file_type();\n\n match cond {\n PathCondition::BlockSpecial => file_type.is_block_device(),\n PathCondition::CharacterSpecial => file_type.is_char_device(),\n PathCondition::Directory => file_type.is_dir(),\n PathCondition::Exists => true,\n PathCondition::Regular => file_type.is_file(),\n PathCondition::GroupIDFlag => metadata.mode() & S_ISGID != 0,\n PathCondition::SymLink => metadata.file_type().is_symlink(),\n PathCondition::FIFO => file_type.is_fifo(),\n PathCondition::Readable => perm(metadata, Permission::Read),\n PathCondition::Socket => file_type.is_socket(),\n PathCondition::NonEmpty => metadata.size() > 0,\n PathCondition::UserIDFlag => metadata.mode() & S_ISUID != 0,\n PathCondition::Writable => perm(metadata, Permission::Write),\n PathCondition::Executable => perm(metadata, Permission::Execute),\n }\n}\n\n#[cfg(windows)]\nfn path(path: &[u8], cond: PathCondition) -> bool {\n use std::fs::metadata;\n let path = from_utf8(path).unwrap();\n let stat = match metadata(path) {\n Ok(s) => s,\n _ => return false,\n };\n match cond {\n PathCondition::BlockSpecial => false,\n PathCondition::CharacterSpecial => false,\n PathCondition::Directory => stat.is_dir(),\n PathCondition::Exists => true,\n PathCondition::Regular => stat.is_file(),\n PathCondition::GroupIDFlag => false,\n PathCondition::SymLink => false,\n PathCondition::FIFO => false,\n PathCondition::Readable => false, \/\/ TODO\n PathCondition::Socket => false,\n PathCondition::NonEmpty => stat.len() > 0,\n PathCondition::UserIDFlag => false,\n PathCondition::Writable => false, \/\/ TODO\n PathCondition::Executable => false, \/\/ TODO\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>start new<commit_after>\/*fn replace_words<'a>(words: &'a Vec<&'a str>, sentence: &'a str) -> Vec<String> {\n let temp = sentence.split(' ').collect();\n let mut result: Vec<String>;\n for w in temp {}\n}*\/\n\nfn match_head_of_str(a: &str, b: &str) -> bool {\n let len1 = a.chars().count();\n let len2 = b.chars().count();\n\n if len1 >= len2 {\n return a.starts_with(b);\n } else {\n return b.starts_with(a);\n }\n}\n\nfn main() {\n let words = vec![\"cat\", \"bat\", \"rat\"];\n let sentence = \"the cattle was rattled by the battery\";\n \/\/println!(\"{:?}\", replace_words(&words, &sentence));\n println!(\"{:?}\", match_head_of_str(\"aa\", \"aaab\"));\n}\n<|endoftext|>"} {"text":"<commit_before>use std::ops::DerefMut;\n\nuse runtime::Runtime;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\n\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Entry;\n\npub type EditResult<T> = Result<T, RuntimeError>;\n\npub trait Edit {\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()>;\n}\n\nimpl Edit for Entry {\n\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {\n edit_in_tmpfile(rt, self.get_content_mut())\n .map(|_| ())\n }\n\n}\n\nimpl<'a> Edit for FileLockEntry<'a> {\n\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {\n self.deref_mut().edit_content(rt)\n }\n\n}\n\npub fn edit_in_tmpfile(rt: &Runtime, s: &mut String) -> EditResult<()> {\n use tempfile::NamedTempFile;\n use std::io::Seek;\n use std::io::Read;\n use std::io::SeekFrom;\n use std::io::Write;\n\n let file = try!(NamedTempFile::new());\n let file_path = file.path();\n let mut file = try!(file.reopen());\n\n file.write_all(&s.clone().into_bytes()[..]);\n\n try!(file.sync_data());\n\n if let Some(mut editor) = rt.editor() {\n let exit_status = editor.arg(file_path).status();\n\n match exit_status.map(|s| s.success()) {\n Ok(true) => {\n file.sync_data()\n .and_then(|_| file.seek(SeekFrom::Start(0)))\n .and_then(|_| file.read_to_string(s))\n .map(|_| ())\n .map_err(|e| RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(e))))\n },\n Ok(_) => Err(RuntimeError::new(RuntimeErrorKind::ProcessExitFailure, None)),\n Err(e) => Err(RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(e)))),\n }\n } else {\n Err(RuntimeError::new(RuntimeErrorKind::Instantiate, None))\n }\n}\n<commit_msg>We can match for true\/false instead of true\/_<commit_after>use std::ops::DerefMut;\n\nuse runtime::Runtime;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\n\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Entry;\n\npub type EditResult<T> = Result<T, RuntimeError>;\n\npub trait Edit {\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()>;\n}\n\nimpl Edit for Entry {\n\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {\n edit_in_tmpfile(rt, self.get_content_mut())\n .map(|_| ())\n }\n\n}\n\nimpl<'a> Edit for FileLockEntry<'a> {\n\n fn edit_content(&mut self, rt: &Runtime) -> EditResult<()> {\n self.deref_mut().edit_content(rt)\n }\n\n}\n\npub fn edit_in_tmpfile(rt: &Runtime, s: &mut String) -> EditResult<()> {\n use tempfile::NamedTempFile;\n use std::io::Seek;\n use std::io::Read;\n use std::io::SeekFrom;\n use std::io::Write;\n\n let file = try!(NamedTempFile::new());\n let file_path = file.path();\n let mut file = try!(file.reopen());\n\n file.write_all(&s.clone().into_bytes()[..]);\n\n try!(file.sync_data());\n\n if let Some(mut editor) = rt.editor() {\n let exit_status = editor.arg(file_path).status();\n\n match exit_status.map(|s| s.success()) {\n Ok(true) => {\n file.sync_data()\n .and_then(|_| file.seek(SeekFrom::Start(0)))\n .and_then(|_| file.read_to_string(s))\n .map(|_| ())\n .map_err(|e| RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(e))))\n },\n Ok(false) => Err(RuntimeError::new(RuntimeErrorKind::ProcessExitFailure, None)),\n Err(e) => Err(RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(e)))),\n }\n } else {\n Err(RuntimeError::new(RuntimeErrorKind::Instantiate, None))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rust - 19: Add solution for part 2<commit_after>\/\/ adventofcode - day 19\n\/\/ part 2\n\nuse std::io::prelude::*;\nuse std::fs::File;\n\nstruct Replacement {\n from: String,\n to: String,\n}\n\nimpl Replacement {\n fn print(&self) {\n println!(\"Replacement: {} -> {}\", self.from, self.to);\n }\n}\n\nfn main(){\n println!(\"Advent of Code - day 19 | part 2\");\n\n \/\/ import data\n let data = import_data();\n\n let (replacements, molecule) = parse_data(data);\n\n println!(\"Replacements possible:\");\n for replacement in &replacements {\n replacement.print();\n }\n\n println!(\"\\nMolecule: {}\\n\", molecule);\n\n let steps = dissolve_molecule(&molecule, &replacements, \"e\");\n\n println!(\"It took {} steps to create the molecule.\", steps);\n}\n\nfn dissolve_molecule(base: &str, repls: &Vec<Replacement>, goal: &str) -> i32 {\n\n let mut mol = base.to_string();\n let mut ctr = 0;\n while mol != goal {\n for ref repl in repls {\n if mol.contains(&repl.to) {\n mol = replace_first(&mol, &repl.to, &repl.from);\n ctr += 1;\n }\n }\n }\n\n ctr\n}\n\nfn replace_first(string: &str, from: &str, to: &str) -> String {\n let matches: Vec<_> = string.match_indices(&from).collect();\n\n return if matches.len() > 0 {\n let (idx, _) = matches[0];\n\n \/\/ split the string into 3 strings:\n \/\/ a: substring BEFORE our element\n \/\/ _: the substring to replace\n \/\/ b: substring AFTER our element\n let (a, tmp) = string.split_at( idx );\n let (_, b) = tmp.split_at( from.len() );\n\n let mut result = a.to_string();\n result.push_str(&to);\n result.push_str(b);\n\n result\n } else {\n string.to_string()\n }\n}\n\nfn parse_data(data: String) -> (Vec<Replacement>, String) {\n\n let mut lines = data.lines();\n\n let mut replacements = Vec::new();\n loop {\n let line = lines.next().unwrap();\n \/\/ an empty line marks the end of the replacements, next line will be\n \/\/ our molecule\n if line == \"\" {\n break;\n }\n\n let replacement = string_to_replacement(line);\n replacements.push( replacement );\n }\n\n let molecule = lines.next().unwrap();\n\n ( replacements, molecule.parse::<String>().unwrap() )\n}\n\nfn string_to_replacement(string: &str) -> Replacement {\n let properties = string.split(\" => \")\n .map(|s| s.parse::<String>().unwrap())\n .collect::<Vec<String>>();\n\n Replacement { from: properties[0].clone(), to: properties[1].clone() }\n}\n\n\/\/ This function simply imports the data set from a file called input.txt\nfn import_data() -> String {\n let mut file = match File::open(\"..\/..\/inputs\/19.txt\") {\n Ok(f) => f,\n Err(e) => panic!(\"file error: {}\", e),\n };\n\n let mut data = String::new();\n match file.read_to_string(&mut data){\n Ok(_) => {},\n Err(e) => panic!(\"file error: {}\", e),\n };\n\n data.pop();\n data\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Minor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add animation code<commit_after>use num::rational::Ratio;\n\nuse buffer::RgbaImage;\n\n\/\/\/ Hold the frames of the animated image\npub struct Frames {\n frames: Vec<Frame>,\n current_frame: uint,\n}\n\nimpl Frames {\n \/\/\/ Contructs a new frame iterator\n pub fn new(frames: Vec<Frame>) -> Frames {\n Frames {\n frames: frames,\n current_frame: 0\n }\n }\n}\n\n\/\/\/ A single animation frame\n#[deriving(Clone)]\npub struct Frame {\n \/\/\/ Delay between the frames in s\n delay: Ratio<u16>,\n buffer: RgbaImage,\n}\n\nimpl Frame {\n \/\/\/ Contructs a new frame\n pub fn new(buffer: RgbaImage) -> Frame {\n Frame {\n delay: Ratio::from_integer(0),\n buffer: buffer\n }\n }\n}\n\nimpl<'a> Iterator<Frame> for Frames {\n fn next(&mut self) -> Option<Frame> {\n let frame = self.current_frame;\n self.current_frame += 1;\n self.frames.get(frame).map(|v| v.clone())\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>day 3 solved.<commit_after>use std::collections::HashSet;\nuse std::io::prelude::*;\nuse std::fs::File;\nuse std::path::Path;\n\nfn main() {\n let mut f = File::open(Path::new(\"\/Users\/PetarV\/rust-proj\/advent-of-rust\/target\/input.txt\"))\n \t.ok()\n \t.expect(\"Failed to open the input file!\");\n\n let mut input = String::new();\n\n\tf.read_to_string(&mut input)\n\t\t.ok()\n\t\t.expect(\"Failed to read from the input file!\");\n\n let mut pos_x = 0;\n let mut pos_y = 0;\n let mut visited = HashSet::new();\n\n visited.insert((0, 0));\n\n for ch in input.chars() {\n match ch {\n 'v' => pos_x = pos_x + 1,\n '>' => pos_y = pos_y + 1,\n '^' => pos_x = pos_x - 1,\n '<' => pos_y = pos_y - 1,\n _ => ()\n };\n visited.insert((pos_x, pos_y));\n }\n\n let ret = visited.len();\n\n println!(\"The Santa has visited {} houses.\", ret);\n\n let mut pos_x = [0, 0];\n let mut pos_y = [0, 0];\n let mut visited = HashSet::new();\n\n visited.insert((0, 0));\n\n for (i, ch) in input.chars().enumerate() {\n let ind = i % 2;\n match ch {\n 'v' => pos_x[ind] = pos_x[ind] + 1,\n '>' => pos_y[ind] = pos_y[ind] + 1,\n '^' => pos_x[ind] = pos_x[ind] - 1,\n '<' => pos_y[ind] = pos_y[ind] - 1,\n _ => ()\n };\n visited.insert((pos_x[ind], pos_y[ind]));\n }\n\n let ret = visited.len();\n\n println!(\"The Santa and Robo-Santa have visited {} houses.\", ret);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>core view<commit_after>use std::path::PathBuf;\nuse std::fs::File;\nuse std::io::{BufReader, BufRead, Error as IoError};\n\n#[derive(Debug)]\npub struct View {\n path: Option<PathBuf>,\n lines: Vec<Line>\n}\n\n#[derive(Debug)]\npub struct Line {\n text: String\n}\n\nimpl Line {\n fn new(text: String) -> Line {\n Line {\n text: text\n }\n }\n}\n\nimpl View {\n pub fn new() -> View {\n View {\n path: None,\n lines: Vec::new()\n }\n }\n\n pub fn open(path: PathBuf) -> Result<View, IoError> {\n let mut lines = Vec::new();\n for text in BufReader::new(try!(File::open(&path))).lines() {\n lines.push(Line::new(try!(text)));\n }\n Ok(View {\n path: Some(path),\n lines: lines\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Convert to new closure syntax: &fn() -> ||<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>- Added test for cases when there creatures without assigned action. - Fix popping logic<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>heap and stack<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Placeholder for pattern tests<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Handles syntax highlighting and other styling.\n\/\/!\n\/\/! Plugins provide syntax highlighting information in the form of 'scopes'.\n\/\/! Scope information originating from any number of plugins can be resolved\n\/\/! into styles using a theme, augmented with additional style definitions.\n\nuse std::collections::BTreeMap;\nuse syntect::parsing::Scope;\nuse syntect::highlighting::StyleModifier;\n\nuse xi_rope::interval::Interval;\nuse xi_rope::spans::{Spans, SpansBuilder};\n\nuse tabs::DocumentCtx;\nuse styles::Style;\nuse plugins::PluginPid;\n\n\/\/\/ A collection of layers containing scope information.\n#[derive(Default)]\n\/\/TODO: rename. Probably to `Layers`\npub struct Scopes {\n layers: BTreeMap<PluginPid, ScopeLayer>,\n merged: Spans<Style>,\n}\n\n\/\/\/ A collection of scope spans from a single source.\npub struct ScopeLayer {\n stack_lookup: Vec<Vec<Scope>>,\n style_lookup: Vec<Style>,\n \/\/ TODO: this might be efficient (in memory at least) if we use\n \/\/ a prefix tree.\n \/\/\/ style state of existing scope spans, so we can more efficiently\n \/\/\/ compute styles of child spans.\n style_cache: BTreeMap<Vec<Scope>, StyleModifier>,\n \/\/\/ Human readable scope names, for debugging\n name_lookup: Vec<Vec<String>>,\n scope_spans: Spans<u32>,\n style_spans: Spans<Style>,\n}\n\nimpl Scopes {\n\n pub fn get_merged(&self) -> &Spans<Style> {\n &self.merged\n }\n\n \/\/\/ Adds the provided scopes to the layer's lookup table.\n pub fn add_scopes(&mut self, layer: PluginPid, scopes: Vec<Vec<String>>,\n doc_ctx: &DocumentCtx) {\n self.create_if_missing(layer);\n self.layers.get_mut(&layer).unwrap().add_scopes(scopes, doc_ctx);\n }\n\n \/\/\/ Inserts empty spans at the given interval for all layers.\n \/\/\/\n \/\/\/ This is useful for clearing spans, and for updating spans\n \/\/\/ as edits occur.\n pub fn update_all(&mut self, iv: Interval, len: usize) {\n self.merged.edit(iv, SpansBuilder::new(len).build());\n let empty_spans = SpansBuilder::new(len).build();\n for layer in self.layers.values_mut() {\n layer.update_scopes(iv, &empty_spans);\n }\n self.resolve_styles(iv);\n }\n\n \/\/\/ Updates the scope spans for a given layer.\n pub fn update_layer(&mut self, layer: PluginPid, iv: Interval, spans: Spans<u32>) {\n self.create_if_missing(layer);\n self.layers.get_mut(&layer).unwrap().update_scopes(iv, &spans);\n self.resolve_styles(iv);\n }\n\n \/\/\/ Removes a given layer. This will remove all styles derived from\n \/\/\/ that layer's scopes.\n pub fn remove_layer(&mut self, layer: PluginPid) -> Option<ScopeLayer> {\n let layer = self.layers.remove(&layer);\n if layer.is_some() {\n let iv_all = Interval::new_closed_closed(0, self.merged.len());\n \/\/TODO: should Spans<T> have a clear() method?\n self.merged = SpansBuilder::new(self.merged.len()).build();\n self.resolve_styles(iv_all);\n }\n layer\n }\n\n pub fn theme_changed(&mut self, doc_ctx: &DocumentCtx) {\n for layer in self.layers.values_mut() {\n layer.theme_changed(doc_ctx);\n }\n self.merged = SpansBuilder::new(self.merged.len()).build();\n let iv_all = Interval::new_closed_closed(0, self.merged.len());\n self.resolve_styles(iv_all);\n }\n\n \/\/\/ Resolves styles from all layers for the given interval, updating\n \/\/\/ the master style spans.\n fn resolve_styles(&mut self, iv: Interval) {\n if self.layers.is_empty() {\n return\n }\n let mut layer_iter = self.layers.values();\n let mut resolved = layer_iter.next().unwrap().style_spans.subseq(iv);\n\n for other in layer_iter {\n let spans = other.style_spans.subseq(iv);\n assert_eq!(resolved.len(), spans.len());\n resolved = resolved.merge(&spans, |a, b| {\n match b {\n Some(b) => a.merge(b),\n None => a.to_owned(),\n }\n });\n }\n self.merged.edit(iv, resolved);\n }\n\n \/\/\/ Prints scopes and style information for the given `Interval`.\n pub fn debug_print_spans(&self, iv: Interval) {\n for (id, layer) in self.layers.iter() {\n let spans = layer.scope_spans.subseq(iv);\n let styles = layer.style_spans.subseq(iv);\n if spans.iter().next().is_some() {\n eprintln!(\"scopes for layer {:?}:\", id);\n for (iv, val) in spans.iter() {\n eprintln!(\"{}: {:?}\", iv, layer.name_lookup[*val as usize]);\n }\n eprintln!(\"styles:\");\n for (iv, val) in styles.iter() {\n eprintln!(\"{}: {:?}\", iv, val);\n }\n }\n }\n }\n\n\n fn create_if_missing(&mut self, layer_id: PluginPid) {\n if !self.layers.contains_key(&layer_id) {\n self.layers.insert(layer_id, ScopeLayer::new(self.merged.len()));\n }\n }\n}\n\nimpl Default for ScopeLayer {\n fn default() -> Self {\n ScopeLayer {\n stack_lookup: Vec::new(),\n style_lookup: Vec::new(),\n name_lookup: Vec::new(),\n style_cache: BTreeMap::new(),\n scope_spans: Spans::default(),\n style_spans: Spans::default(),\n }\n }\n}\n\nimpl ScopeLayer {\n\n pub fn new(len: usize) -> Self {\n ScopeLayer {\n stack_lookup: Vec::new(),\n style_lookup: Vec::new(),\n name_lookup: Vec::new(),\n style_cache: BTreeMap::new(),\n scope_spans: SpansBuilder::new(len).build(),\n style_spans: SpansBuilder::new(len).build(),\n }\n }\n\n fn theme_changed(&mut self, doc_ctx: &DocumentCtx) {\n \/\/ recompute styles with the new theme\n let cur_stacks = self.stack_lookup.clone();\n self.style_lookup = self.styles_for_stacks(&cur_stacks, doc_ctx);\n let iv_all = Interval::new_closed_closed(0, self.style_spans.len());\n self.style_spans = SpansBuilder::new(self.style_spans.len()).build();\n \/\/ this feels unnecessary but we can't pass in a reference to self\n \/\/ and I don't want to get fancy unless there's an actual perf problem\n let scopes = self.scope_spans.clone();\n self.update_styles(iv_all, &scopes)\n }\n\n fn add_scopes(&mut self, scopes: Vec<Vec<String>>,\n doc_ctx: &DocumentCtx) {\n let mut stacks = Vec::with_capacity(scopes.len());\n for stack in scopes {\n let scopes = stack.iter().map(|s| Scope::new(&s))\n .filter(|result| match *result {\n Err(ref err) => {\n eprintln!(\"failed to resolve scope {}\\nErr: {:?}\",\n &stack.join(\" \"),\n err);\n false\n }\n _ => true\n })\n .map(|s| s.unwrap())\n .collect::<Vec<_>>();\n stacks.push(scopes);\n self.name_lookup.push(stack);\n }\n\n let mut new_styles = self.styles_for_stacks(stacks.as_slice(), doc_ctx);\n self.stack_lookup.append(&mut stacks);\n self.style_lookup.append(&mut new_styles);\n }\n\n fn styles_for_stacks(&mut self, stacks: &[Vec<Scope>],\n doc_ctx: &DocumentCtx) -> Vec<Style> {\n let style_map = doc_ctx.get_style_map().lock().unwrap();\n let highlighter = style_map.get_highlighter();\n let mut new_styles = Vec::new();\n\n for stack in stacks {\n let mut last_style: Option<StyleModifier> = None;\n let mut upper_bound_of_last = stack.len() as usize;\n\n \/\/ walk backwards through stack to see if we have an existing\n \/\/ style for any child stacks.\n for i in 0..stack.len()-1 {\n let prev_range = 0..stack.len() - (i + 1);\n if let Some(s) = self.style_cache.get(&stack[prev_range]) {\n last_style = Some(*s);\n upper_bound_of_last = stack.len() - (i + 1);\n break\n }\n }\n let mut base_style_mod = last_style.unwrap_or_default();\n\n \/\/ apply the stack, generating children as needed.\n for i in upper_bound_of_last..stack.len() {\n let style_mod = highlighter.get_style(&stack[0..i+1]);\n base_style_mod = base_style_mod.apply(style_mod);\n }\n\n let style = Style::from_syntect_style_mod(&base_style_mod);\n self.style_cache.insert(stack.clone(), base_style_mod);\n\n new_styles.push(style);\n }\n new_styles\n }\n\n fn update_scopes(&mut self, iv: Interval, spans: &Spans<u32>) {\n self.scope_spans.edit(iv, spans.to_owned());\n self.update_styles(iv, spans);\n }\n\n \/\/\/ Updates `self.style_spans`, mapping scopes to styles and combining\n \/\/\/ adjacent and equal spans.\n fn update_styles(&mut self, iv: Interval, spans: &Spans<u32>) {\n\n \/\/ NOTE: This is a tradeoff. Keeping both u32 and Style spans for each\n \/\/ layer makes debugging simpler and reduces the total number of spans\n \/\/ on the wire (because we combine spans that resolve to the same style)\n \/\/ but it does require additional computation + memory up front.\n let mut sb = SpansBuilder::new(spans.len());\n let mut spans_iter = spans.iter();\n let mut prev = spans_iter.next();\n {\n \/\/ distinct adjacent scopes can often resolve to the same style,\n \/\/ so we combine them when building the styles.\n let style_eq = |i1: &u32, i2: &u32| {\n self.style_lookup[*i1 as usize] == self.style_lookup[*i2 as usize]\n };\n\n while let Some((p_iv, p_val)) = prev {\n match spans_iter.next() {\n Some((n_iv, n_val)) if n_iv.start() == p_iv.end() && style_eq(p_val, n_val) => {\n prev = Some((p_iv.union(n_iv), p_val));\n }\n other => {\n sb.add_span(p_iv, self.style_lookup[*p_val as usize].to_owned());\n prev = other;\n }\n }\n }\n }\n self.style_spans.edit(iv, sb.build());\n }\n}\n<commit_msg>Remove redundant name lookup from ScopeLayer<commit_after>\/\/ Copyright 2017 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Handles syntax highlighting and other styling.\n\/\/!\n\/\/! Plugins provide syntax highlighting information in the form of 'scopes'.\n\/\/! Scope information originating from any number of plugins can be resolved\n\/\/! into styles using a theme, augmented with additional style definitions.\n\nuse std::collections::BTreeMap;\nuse syntect::parsing::Scope;\nuse syntect::highlighting::StyleModifier;\n\nuse xi_rope::interval::Interval;\nuse xi_rope::spans::{Spans, SpansBuilder};\n\nuse tabs::DocumentCtx;\nuse styles::Style;\nuse plugins::PluginPid;\n\n\/\/\/ A collection of layers containing scope information.\n#[derive(Default)]\n\/\/TODO: rename. Probably to `Layers`\npub struct Scopes {\n layers: BTreeMap<PluginPid, ScopeLayer>,\n merged: Spans<Style>,\n}\n\n\/\/\/ A collection of scope spans from a single source.\npub struct ScopeLayer {\n stack_lookup: Vec<Vec<Scope>>,\n style_lookup: Vec<Style>,\n \/\/ TODO: this might be efficient (in memory at least) if we use\n \/\/ a prefix tree.\n \/\/\/ style state of existing scope spans, so we can more efficiently\n \/\/\/ compute styles of child spans.\n style_cache: BTreeMap<Vec<Scope>, StyleModifier>,\n \/\/\/ Human readable scope names, for debugging\n scope_spans: Spans<u32>,\n style_spans: Spans<Style>,\n}\n\nimpl Scopes {\n\n pub fn get_merged(&self) -> &Spans<Style> {\n &self.merged\n }\n\n \/\/\/ Adds the provided scopes to the layer's lookup table.\n pub fn add_scopes(&mut self, layer: PluginPid, scopes: Vec<Vec<String>>,\n doc_ctx: &DocumentCtx) {\n self.create_if_missing(layer);\n self.layers.get_mut(&layer).unwrap().add_scopes(scopes, doc_ctx);\n }\n\n \/\/\/ Inserts empty spans at the given interval for all layers.\n \/\/\/\n \/\/\/ This is useful for clearing spans, and for updating spans\n \/\/\/ as edits occur.\n pub fn update_all(&mut self, iv: Interval, len: usize) {\n self.merged.edit(iv, SpansBuilder::new(len).build());\n let empty_spans = SpansBuilder::new(len).build();\n for layer in self.layers.values_mut() {\n layer.update_scopes(iv, &empty_spans);\n }\n self.resolve_styles(iv);\n }\n\n \/\/\/ Updates the scope spans for a given layer.\n pub fn update_layer(&mut self, layer: PluginPid, iv: Interval, spans: Spans<u32>) {\n self.create_if_missing(layer);\n self.layers.get_mut(&layer).unwrap().update_scopes(iv, &spans);\n self.resolve_styles(iv);\n }\n\n \/\/\/ Removes a given layer. This will remove all styles derived from\n \/\/\/ that layer's scopes.\n pub fn remove_layer(&mut self, layer: PluginPid) -> Option<ScopeLayer> {\n let layer = self.layers.remove(&layer);\n if layer.is_some() {\n let iv_all = Interval::new_closed_closed(0, self.merged.len());\n \/\/TODO: should Spans<T> have a clear() method?\n self.merged = SpansBuilder::new(self.merged.len()).build();\n self.resolve_styles(iv_all);\n }\n layer\n }\n\n pub fn theme_changed(&mut self, doc_ctx: &DocumentCtx) {\n for layer in self.layers.values_mut() {\n layer.theme_changed(doc_ctx);\n }\n self.merged = SpansBuilder::new(self.merged.len()).build();\n let iv_all = Interval::new_closed_closed(0, self.merged.len());\n self.resolve_styles(iv_all);\n }\n\n \/\/\/ Resolves styles from all layers for the given interval, updating\n \/\/\/ the master style spans.\n fn resolve_styles(&mut self, iv: Interval) {\n if self.layers.is_empty() {\n return\n }\n let mut layer_iter = self.layers.values();\n let mut resolved = layer_iter.next().unwrap().style_spans.subseq(iv);\n\n for other in layer_iter {\n let spans = other.style_spans.subseq(iv);\n assert_eq!(resolved.len(), spans.len());\n resolved = resolved.merge(&spans, |a, b| {\n match b {\n Some(b) => a.merge(b),\n None => a.to_owned(),\n }\n });\n }\n self.merged.edit(iv, resolved);\n }\n\n \/\/\/ Prints scopes and style information for the given `Interval`.\n pub fn debug_print_spans(&self, iv: Interval) {\n for (id, layer) in self.layers.iter() {\n let spans = layer.scope_spans.subseq(iv);\n let styles = layer.style_spans.subseq(iv);\n if spans.iter().next().is_some() {\n eprintln!(\"scopes for layer {:?}:\", id);\n for (iv, val) in spans.iter() {\n eprintln!(\"{}: {:?}\", iv, layer.stack_lookup[*val as usize]);\n }\n eprintln!(\"styles:\");\n for (iv, val) in styles.iter() {\n eprintln!(\"{}: {:?}\", iv, val);\n }\n }\n }\n }\n\n\n fn create_if_missing(&mut self, layer_id: PluginPid) {\n if !self.layers.contains_key(&layer_id) {\n self.layers.insert(layer_id, ScopeLayer::new(self.merged.len()));\n }\n }\n}\n\nimpl Default for ScopeLayer {\n fn default() -> Self {\n ScopeLayer {\n stack_lookup: Vec::new(),\n style_lookup: Vec::new(),\n style_cache: BTreeMap::new(),\n scope_spans: Spans::default(),\n style_spans: Spans::default(),\n }\n }\n}\n\nimpl ScopeLayer {\n\n pub fn new(len: usize) -> Self {\n ScopeLayer {\n stack_lookup: Vec::new(),\n style_lookup: Vec::new(),\n style_cache: BTreeMap::new(),\n scope_spans: SpansBuilder::new(len).build(),\n style_spans: SpansBuilder::new(len).build(),\n }\n }\n\n fn theme_changed(&mut self, doc_ctx: &DocumentCtx) {\n \/\/ recompute styles with the new theme\n let cur_stacks = self.stack_lookup.clone();\n self.style_lookup = self.styles_for_stacks(&cur_stacks, doc_ctx);\n let iv_all = Interval::new_closed_closed(0, self.style_spans.len());\n self.style_spans = SpansBuilder::new(self.style_spans.len()).build();\n \/\/ this feels unnecessary but we can't pass in a reference to self\n \/\/ and I don't want to get fancy unless there's an actual perf problem\n let scopes = self.scope_spans.clone();\n self.update_styles(iv_all, &scopes)\n }\n\n fn add_scopes(&mut self, scopes: Vec<Vec<String>>,\n doc_ctx: &DocumentCtx) {\n let mut stacks = Vec::with_capacity(scopes.len());\n for stack in scopes {\n let scopes = stack.iter().map(|s| Scope::new(&s))\n .filter(|result| match *result {\n Err(ref err) => {\n eprintln!(\"failed to resolve scope {}\\nErr: {:?}\",\n &stack.join(\" \"),\n err);\n false\n }\n _ => true\n })\n .map(|s| s.unwrap())\n .collect::<Vec<_>>();\n stacks.push(scopes);\n }\n\n let mut new_styles = self.styles_for_stacks(stacks.as_slice(), doc_ctx);\n self.stack_lookup.append(&mut stacks);\n self.style_lookup.append(&mut new_styles);\n }\n\n fn styles_for_stacks(&mut self, stacks: &[Vec<Scope>],\n doc_ctx: &DocumentCtx) -> Vec<Style> {\n let style_map = doc_ctx.get_style_map().lock().unwrap();\n let highlighter = style_map.get_highlighter();\n let mut new_styles = Vec::new();\n\n for stack in stacks {\n let mut last_style: Option<StyleModifier> = None;\n let mut upper_bound_of_last = stack.len() as usize;\n\n \/\/ walk backwards through stack to see if we have an existing\n \/\/ style for any child stacks.\n for i in 0..stack.len()-1 {\n let prev_range = 0..stack.len() - (i + 1);\n if let Some(s) = self.style_cache.get(&stack[prev_range]) {\n last_style = Some(*s);\n upper_bound_of_last = stack.len() - (i + 1);\n break\n }\n }\n let mut base_style_mod = last_style.unwrap_or_default();\n\n \/\/ apply the stack, generating children as needed.\n for i in upper_bound_of_last..stack.len() {\n let style_mod = highlighter.get_style(&stack[0..i+1]);\n base_style_mod = base_style_mod.apply(style_mod);\n }\n\n let style = Style::from_syntect_style_mod(&base_style_mod);\n self.style_cache.insert(stack.clone(), base_style_mod);\n\n new_styles.push(style);\n }\n new_styles\n }\n\n fn update_scopes(&mut self, iv: Interval, spans: &Spans<u32>) {\n self.scope_spans.edit(iv, spans.to_owned());\n self.update_styles(iv, spans);\n }\n\n \/\/\/ Updates `self.style_spans`, mapping scopes to styles and combining\n \/\/\/ adjacent and equal spans.\n fn update_styles(&mut self, iv: Interval, spans: &Spans<u32>) {\n\n \/\/ NOTE: This is a tradeoff. Keeping both u32 and Style spans for each\n \/\/ layer makes debugging simpler and reduces the total number of spans\n \/\/ on the wire (because we combine spans that resolve to the same style)\n \/\/ but it does require additional computation + memory up front.\n let mut sb = SpansBuilder::new(spans.len());\n let mut spans_iter = spans.iter();\n let mut prev = spans_iter.next();\n {\n \/\/ distinct adjacent scopes can often resolve to the same style,\n \/\/ so we combine them when building the styles.\n let style_eq = |i1: &u32, i2: &u32| {\n self.style_lookup[*i1 as usize] == self.style_lookup[*i2 as usize]\n };\n\n while let Some((p_iv, p_val)) = prev {\n match spans_iter.next() {\n Some((n_iv, n_val)) if n_iv.start() == p_iv.end() && style_eq(p_val, n_val) => {\n prev = Some((p_iv.union(n_iv), p_val));\n }\n other => {\n sb.add_span(p_iv, self.style_lookup[*p_val as usize].to_owned());\n prev = other;\n }\n }\n }\n }\n self.style_spans.edit(iv, sb.build());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>how to read a file<commit_after>\/*\n * How to read a file.\n * Future work: as a variant, we may use the C bindings to call mmap\/munmap\n *\/\n\n\/* read the file path by calling the read_whole_file_str function *\/\nfn read_file_whole(path: ~str) -> ~str {\n let res = io::read_whole_file_str(&Path(path));\n if result::is_err(&res) {\n fail ~\"file_reader error: \" + result::get_err(&res);\n }\n res.get()\n}\n\n\/* read the file path line by line *\/\nfn read_file_lines(path: ~str) -> ~str {\n let res = io::file_reader(&Path(path));\n if result::is_err(&res) {\n fail ~\"file_reader error: \" + result::get_err(&res);\n }\n\n let mut content = ~\"\";\n let reader = res.get();\n loop {\n let line = (reader as io::ReaderUtil).read_line();\n if reader.eof() {\n break;\n }\n \/\/ read_line does not return the '\\n', so we add it\n content += line + ~\"\\n\";\n }\n\n content\n}\n\n\nfn main() {\n let filename = ~\"read_file.rs\";\n \/\/let content = read_file_whole(copy filename);\n let content = read_file_lines(copy filename);\n io::println(~\"the content of \" + filename + ~\" is [\\n\" + content + ~\"]\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added expression example<commit_after>fn main() {\n let x = 5u32;\n\n let y = {\n let x_squared = x * x;\n let x_cube = x_squared * x;\n\n \/\/ This expression will be assigned to `y`\n x_cube + x_squared + x\n };\n\n let z = {\n \/\/ The semicolon suppresses this expression and `()` is assigned to `z`\n 2 * x;\n };\n\n println!(\"x is {:?}\", x);\n println!(\"y is {:?}\", y);\n println!(\"z is {:?}\", z);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>update<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for a big enum<commit_after>#[macro_use]\nextern crate enum_map;\n#[macro_use]\nextern crate enum_map_derive;\n\n#[derive(EnumMap)]\nenum Example {\n A,\n B,\n C,\n D,\n E,\n F,\n G,\n H,\n I,\n J,\n K,\n L,\n M,\n N,\n O,\n P,\n Q,\n R,\n S,\n T,\n U,\n V,\n W,\n X,\n Y,\n Z,\n Aa,\n Bb,\n Cc,\n Dd,\n Ee,\n Ff,\n Gg,\n Hh,\n Ii,\n Jj,\n Kk,\n Ll,\n Mm,\n Nn,\n Oo,\n Pp,\n Qq,\n Rr,\n Ss,\n Tt,\n Uu,\n Vv,\n Ww,\n Xx,\n Yy,\n Zz,\n}\n\n#[test]\nfn huge_enum() {\n let map = enum_map! { _ => 2 };\n assert_eq!(map[Example::Xx], 2);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add unit tests for mesh creation.<commit_after>use crate::tests;\nuse crate::*;\n\nconst TRIANGLE_VERTS: &[graphics::Vertex] = &[\n graphics::Vertex {\n pos: [0.0, 0.0],\n uv: [0.0, 0.0],\n color: [1.0, 1.0, 1.0, 1.0],\n },\n graphics::Vertex {\n pos: [0.0, 0.0],\n uv: [0.0, 0.0],\n color: [1.0, 1.0, 1.0, 1.0],\n },\n graphics::Vertex {\n pos: [0.0, 0.0],\n uv: [0.0, 0.0],\n color: [1.0, 1.0, 1.0, 1.0],\n },\n ];\n\n\/\/\/ Mesh creation fails if verts or indices are empty.\n#[test]\nfn test_mesh_verts_empty() {\n let (mut ctx, _ev) = tests::make_context();\n let verts: Vec<graphics::Vertex> = vec![\n ];\n let indices: Vec<u32> = vec![\n ];\n let m = graphics::Mesh::from_raw(&mut ctx, &verts, &indices, None);\n assert!(m.is_err());\n\n let m = graphics::Mesh::from_raw(&mut ctx, TRIANGLE_VERTS, &indices, None);\n assert!(m.is_err());\n\n}\n\n\/\/\/ Mesh creation fails if not enough indices to make a triangle.\n#[test]\nfn test_mesh_verts_invalid_count() {\n let (mut ctx, _ev) = tests::make_context();\n let indices: Vec<u32> = vec![\n 0, 1\n ];\n let m = graphics::Mesh::from_raw(&mut ctx, TRIANGLE_VERTS, &indices, None);\n assert!(m.is_err());\n\n let indices: Vec<u32> = vec![\n 0, 1, 2, 0\n ];\n let m = graphics::Mesh::from_raw(&mut ctx, TRIANGLE_VERTS, &indices, None);\n assert!(m.is_err());\n}\n\n\n\/\/\/ TODO: This is actually tricky to test for well...\n#[test]\nfn test_mesh_points_clockwise() {\n let (mut ctx, _ev) = tests::make_context();\n\n \/\/ Points in CCW order\n let points: Vec<graphics::Point2> = vec![\n graphics::Point2::new(0.0, 0.0),\n graphics::Point2::new(0.0, -1.0),\n graphics::Point2::new(-1.0, -1.0),\n ];\n \n let trapezoid_mesh = graphics::Mesh::new_polygon(\n &mut ctx, \n graphics::DrawMode::fill(), \n &points,\n [0.0, 0.0, 1.0, 1.0].into()\n );\n\n assert!(trapezoid_mesh.is_err());\n}\n<|endoftext|>"} {"text":"<commit_before>use std::fmt;\nuse std::io::{self, Read};\nuse std::sync::{Arc, Mutex};\n\nuse hyper::client::IntoUrl;\nuse hyper::header::{Headers, ContentType, Location, Referer, UserAgent, Accept};\nuse hyper::method::Method;\nuse hyper::status::StatusCode;\nuse hyper::version::HttpVersion;\nuse hyper::{Url};\n\nuse serde::{Deserialize, Serialize};\nuse serde_json;\nuse serde_urlencoded;\n\nuse ::body::{self, Body};\nuse ::redirect::{RedirectPolicy, check_redirect};\n\nstatic DEFAULT_USER_AGENT: &'static str = concat!(env!(\"CARGO_PKG_NAME\"), \"\/\", env!(\"CARGO_PKG_VERSION\"));\n\n\/\/\/ A `Client` to make Requests with.\n\/\/\/\n\/\/\/ The Client has various configuration values to tweak, but the defaults\n\/\/\/ are set to what is usually the most commonly desired value.\n\/\/\/\n\/\/\/ The `Client` holds a connection pool internally, so it is advised that\n\/\/\/ you create one and reuse it.\n#[derive(Clone)]\npub struct Client {\n inner: Arc<ClientRef>, \/\/::hyper::Client,\n}\n\nimpl Client {\n \/\/\/ Constructs a new `Client`.\n pub fn new() -> ::Result<Client> {\n let mut client = try!(new_hyper_client());\n client.set_redirect_policy(::hyper::client::RedirectPolicy::FollowNone);\n Ok(Client {\n inner: Arc::new(ClientRef {\n hyper: client,\n redirect_policy: Mutex::new(RedirectPolicy::default()),\n }),\n })\n }\n\n \/\/\/ Set a `RedirectPolicy` for this client.\n pub fn redirect(&mut self, policy: RedirectPolicy) {\n *self.inner.redirect_policy.lock().unwrap() = policy;\n }\n\n \/\/\/ Convenience method to make a `GET` request to a URL.\n pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Get, url)\n }\n\n \/\/\/ Convenience method to make a `POST` request to a URL.\n pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Post, url)\n }\n\n \/\/\/ Convenience method to make a `HEAD` request to a URL.\n pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Head, url)\n }\n\n \/\/\/ Start building a `Request` with the `Method` and `Url`.\n \/\/\/\n \/\/\/ Returns a `RequestBuilder`, which will allow setting headers and\n \/\/\/ request body before sending.\n pub fn request<U: IntoUrl>(&self, method: Method, url: U) -> RequestBuilder {\n let url = url.into_url();\n RequestBuilder {\n client: self.inner.clone(),\n method: method,\n url: url,\n _version: HttpVersion::Http11,\n headers: Headers::new(),\n\n body: None,\n }\n }\n}\n\nimpl fmt::Debug for Client {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Client\")\n .field(\"redirect_policy\", &self.inner.redirect_policy)\n .finish()\n }\n}\n\nstruct ClientRef {\n hyper: ::hyper::Client,\n redirect_policy: Mutex<RedirectPolicy>,\n}\n\nfn new_hyper_client() -> ::Result<::hyper::Client> {\n use tls::TlsClient;\n Ok(::hyper::Client::with_connector(\n ::hyper::client::Pool::with_connector(\n Default::default(),\n ::hyper::net::HttpsConnector::new(try!(TlsClient::new()))\n )\n ))\n}\n\n\n\/\/\/ A builder to construct the properties of a `Request`.\npub struct RequestBuilder {\n client: Arc<ClientRef>,\n\n method: Method,\n url: Result<Url, ::UrlError>,\n _version: HttpVersion,\n headers: Headers,\n\n body: Option<::Result<Body>>,\n}\n\nimpl RequestBuilder {\n \/\/\/ Add a `Header` to this Request.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ use reqwest::header::UserAgent;\n \/\/\/ let client = reqwest::Client::new().expect(\"client failed to construct\");\n \/\/\/\n \/\/\/ let res = client.get(\"https:\/\/www.rust-lang.org\")\n \/\/\/ .header(UserAgent(\"foo\".to_string()))\n \/\/\/ .send();\n \/\/\/ ```\n pub fn header<H: ::header::Header + ::header::HeaderFormat>(mut self, header: H) -> RequestBuilder {\n self.headers.set(header);\n self\n }\n \/\/\/ Add a set of Headers to the existing ones on this Request.\n \/\/\/\n \/\/\/ The headers will be merged in to any already set.\n pub fn headers(mut self, headers: ::header::Headers) -> RequestBuilder {\n self.headers.extend(headers.iter());\n self\n }\n\n \/\/\/ Set the request body.\n pub fn body<T: Into<Body>>(mut self, body: T) -> RequestBuilder {\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Send a form body.\n \/\/\/\n \/\/\/ Sets the body to the url encoded serialization of the passed value,\n \/\/\/ and also sets the `Content-Type: application\/www-form-url-encoded`\n \/\/\/ header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut params = HashMap::new();\n \/\/\/ params.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .form(¶ms)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn form<T: Serialize>(mut self, form: &T) -> RequestBuilder {\n let body = serde_urlencoded::to_string(form).map_err(::Error::from);\n self.headers.set(ContentType::form_url_encoded());\n self.body = Some(body.map(|b| b.into()));\n self\n }\n\n \/\/\/ Send a JSON body.\n \/\/\/\n \/\/\/ Sets the body to the JSON serialization of the passed value, and\n \/\/\/ also sets the `Content-Type: application\/json` header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut map = HashMap::new();\n \/\/\/ map.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .json(&map)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn json<T: Serialize>(mut self, json: &T) -> RequestBuilder {\n let body = serde_json::to_vec(json).expect(\"serde to_vec cannot fail\");\n self.headers.set(ContentType::json());\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Constructs the Request and sends it the target URL, returning a Response.\n pub fn send(mut self) -> ::Result<Response> {\n if !self.headers.has::<UserAgent>() {\n self.headers.set(UserAgent(DEFAULT_USER_AGENT.to_owned()));\n }\n\n if !self.headers.has::<Accept>() {\n self.headers.set(Accept::star());\n }\n\n let client = self.client;\n let mut method = self.method;\n let mut url = try!(self.url);\n let mut headers = self.headers;\n let mut body = match self.body {\n Some(b) => Some(try!(b)),\n None => None,\n };\n\n let mut urls = Vec::new();\n\n loop {\n let res = {\n debug!(\"request {:?} \\\"{}\\\"\", method, url);\n let mut req = client.hyper.request(method.clone(), url.clone())\n .headers(headers.clone());\n\n if let Some(ref mut b) = body {\n let body = body::as_hyper_body(b);\n req = req.body(body);\n }\n\n try!(req.send())\n };\n\n let should_redirect = match res.status {\n StatusCode::MovedPermanently |\n StatusCode::Found |\n StatusCode::SeeOther => {\n body = None;\n match method {\n Method::Get | Method::Head => {},\n _ => {\n method = Method::Get;\n }\n }\n true\n },\n StatusCode::TemporaryRedirect |\n StatusCode::PermanentRedirect => {\n if let Some(ref body) = body {\n body::can_reset(body)\n } else {\n true\n }\n },\n _ => false,\n };\n\n if should_redirect {\n let loc = {\n let loc = res.headers.get::<Location>().map(|loc| url.join(loc));\n if let Some(loc) = loc {\n loc\n } else {\n return Ok(Response {\n inner: res\n });\n }\n };\n\n url = match loc {\n Ok(loc) => {\n headers.set(Referer(url.to_string()));\n urls.push(url);\n if check_redirect(&client.redirect_policy.lock().unwrap(), &loc, &urls)? {\n loc\n } else {\n debug!(\"redirect_policy disallowed redirection to '{}'\", loc);\n return Ok(Response {\n inner: res\n })\n }\n },\n Err(e) => {\n debug!(\"Location header had invalid URI: {:?}\", e);\n return Ok(Response {\n inner: res\n })\n }\n };\n\n debug!(\"redirecting to {:?} '{}'\", method, url);\n\n \/\/TODO: removeSensitiveHeaders(&mut headers, &url);\n } else {\n return Ok(Response {\n inner: res\n });\n }\n }\n }\n}\n\nimpl fmt::Debug for RequestBuilder {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"RequestBuilder\")\n .field(\"method\", &self.method)\n .field(\"url\", &self.url)\n .field(\"headers\", &self.headers)\n .finish()\n }\n}\n\n\/\/\/ A Response to a submitted `Request`.\npub struct Response {\n inner: ::hyper::client::Response,\n}\n\nimpl Response {\n \/\/\/ Get the `StatusCode`.\n #[inline]\n pub fn status(&self) -> &StatusCode {\n &self.inner.status\n }\n\n \/\/\/ Get the `Headers`.\n #[inline]\n pub fn headers(&self) -> &Headers {\n &self.inner.headers\n }\n\n \/\/\/ Get the `HttpVersion`.\n #[inline]\n pub fn version(&self) -> &HttpVersion {\n &self.inner.version\n }\n\n \/\/\/ Try and deserialize the response body as JSON.\n #[inline]\n pub fn json<T: Deserialize>(&mut self) -> ::Result<T> {\n serde_json::from_reader(self).map_err(::Error::from)\n }\n}\n\n\/\/\/ Read the body of the Response.\nimpl Read for Response {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.inner.read(buf)\n }\n}\n\nimpl fmt::Debug for Response {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Response\")\n .field(\"status\", self.status())\n .field(\"headers\", self.headers())\n .field(\"version\", self.version())\n .finish()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use ::body;\n use hyper::method::Method;\n use hyper::Url;\n use hyper::header::{Host, Headers, ContentType};\n use std::collections::HashMap;\n use serde_urlencoded;\n use serde_json;\n\n #[test]\n fn basic_get_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.get(some_url);\n\n assert_eq!(r.method, Method::Get);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_head_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.head(some_url);\n\n assert_eq!(r.method, Method::Head);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_post_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.post(some_url);\n\n assert_eq!(r.method, Method::Post);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn add_header() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let header = Host {\n hostname: \"google.com\".to_string(),\n port: None,\n };\n\n \/\/ Add a copy of the header to the request builder\n r = r.header(header.clone());\n\n \/\/ then check it was actually added\n assert_eq!(r.headers.get::<Host>(), Some(&header));\n }\n\n #[test]\n fn add_headers() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let header = Host {\n hostname: \"google.com\".to_string(),\n port: None,\n };\n\n let mut headers = Headers::new();\n headers.set(header);\n\n \/\/ Add a copy of the headers to the request builder\n r = r.headers(headers.clone());\n\n \/\/ then make sure they were added correctly\n assert_eq!(r.headers, headers);\n }\n\n #[test]\n fn add_body() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let body = \"Some interesting content\";\n\n r = r.body(body);\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n assert_eq!(buf, body);\n }\n\n #[test]\n fn add_form() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let mut form_data = HashMap::new();\n form_data.insert(\"foo\", \"bar\");\n\n r = r.form(&form_data);\n\n \/\/ Make sure the content type was set\n assert_eq!(r.headers.get::<ContentType>(), Some(&ContentType::form_url_encoded()));\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n let body_should_be = serde_urlencoded::to_string(&form_data).unwrap();\n assert_eq!(buf, body_should_be);\n }\n\n #[test]\n fn add_json() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let mut json_data = HashMap::new();\n json_data.insert(\"foo\", \"bar\");\n\n r = r.json(&json_data);\n\n \/\/ Make sure the content type was set\n assert_eq!(r.headers.get::<ContentType>(), Some(&ContentType::json()));\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n let body_should_be = serde_json::to_string(&json_data).unwrap();\n assert_eq!(buf, body_should_be);\n }\n}\n<commit_msg>remove Clone impl from Client<commit_after>use std::fmt;\nuse std::io::{self, Read};\nuse std::sync::{Arc, Mutex};\n\nuse hyper::client::IntoUrl;\nuse hyper::header::{Headers, ContentType, Location, Referer, UserAgent, Accept};\nuse hyper::method::Method;\nuse hyper::status::StatusCode;\nuse hyper::version::HttpVersion;\nuse hyper::{Url};\n\nuse serde::{Deserialize, Serialize};\nuse serde_json;\nuse serde_urlencoded;\n\nuse ::body::{self, Body};\nuse ::redirect::{RedirectPolicy, check_redirect};\n\nstatic DEFAULT_USER_AGENT: &'static str = concat!(env!(\"CARGO_PKG_NAME\"), \"\/\", env!(\"CARGO_PKG_VERSION\"));\n\n\/\/\/ A `Client` to make Requests with.\n\/\/\/\n\/\/\/ The Client has various configuration values to tweak, but the defaults\n\/\/\/ are set to what is usually the most commonly desired value.\n\/\/\/\n\/\/\/ The `Client` holds a connection pool internally, so it is advised that\n\/\/\/ you create one and reuse it.\npub struct Client {\n inner: Arc<ClientRef>, \/\/::hyper::Client,\n}\n\nimpl Client {\n \/\/\/ Constructs a new `Client`.\n pub fn new() -> ::Result<Client> {\n let mut client = try!(new_hyper_client());\n client.set_redirect_policy(::hyper::client::RedirectPolicy::FollowNone);\n Ok(Client {\n inner: Arc::new(ClientRef {\n hyper: client,\n redirect_policy: Mutex::new(RedirectPolicy::default()),\n }),\n })\n }\n\n \/\/\/ Set a `RedirectPolicy` for this client.\n pub fn redirect(&mut self, policy: RedirectPolicy) {\n *self.inner.redirect_policy.lock().unwrap() = policy;\n }\n\n \/\/\/ Convenience method to make a `GET` request to a URL.\n pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Get, url)\n }\n\n \/\/\/ Convenience method to make a `POST` request to a URL.\n pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Post, url)\n }\n\n \/\/\/ Convenience method to make a `HEAD` request to a URL.\n pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Head, url)\n }\n\n \/\/\/ Start building a `Request` with the `Method` and `Url`.\n \/\/\/\n \/\/\/ Returns a `RequestBuilder`, which will allow setting headers and\n \/\/\/ request body before sending.\n pub fn request<U: IntoUrl>(&self, method: Method, url: U) -> RequestBuilder {\n let url = url.into_url();\n RequestBuilder {\n client: self.inner.clone(),\n method: method,\n url: url,\n _version: HttpVersion::Http11,\n headers: Headers::new(),\n\n body: None,\n }\n }\n}\n\nimpl fmt::Debug for Client {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Client\")\n .field(\"redirect_policy\", &self.inner.redirect_policy)\n .finish()\n }\n}\n\nstruct ClientRef {\n hyper: ::hyper::Client,\n redirect_policy: Mutex<RedirectPolicy>,\n}\n\nfn new_hyper_client() -> ::Result<::hyper::Client> {\n use tls::TlsClient;\n Ok(::hyper::Client::with_connector(\n ::hyper::client::Pool::with_connector(\n Default::default(),\n ::hyper::net::HttpsConnector::new(try!(TlsClient::new()))\n )\n ))\n}\n\n\n\/\/\/ A builder to construct the properties of a `Request`.\npub struct RequestBuilder {\n client: Arc<ClientRef>,\n\n method: Method,\n url: Result<Url, ::UrlError>,\n _version: HttpVersion,\n headers: Headers,\n\n body: Option<::Result<Body>>,\n}\n\nimpl RequestBuilder {\n \/\/\/ Add a `Header` to this Request.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ use reqwest::header::UserAgent;\n \/\/\/ let client = reqwest::Client::new().expect(\"client failed to construct\");\n \/\/\/\n \/\/\/ let res = client.get(\"https:\/\/www.rust-lang.org\")\n \/\/\/ .header(UserAgent(\"foo\".to_string()))\n \/\/\/ .send();\n \/\/\/ ```\n pub fn header<H: ::header::Header + ::header::HeaderFormat>(mut self, header: H) -> RequestBuilder {\n self.headers.set(header);\n self\n }\n \/\/\/ Add a set of Headers to the existing ones on this Request.\n \/\/\/\n \/\/\/ The headers will be merged in to any already set.\n pub fn headers(mut self, headers: ::header::Headers) -> RequestBuilder {\n self.headers.extend(headers.iter());\n self\n }\n\n \/\/\/ Set the request body.\n pub fn body<T: Into<Body>>(mut self, body: T) -> RequestBuilder {\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Send a form body.\n \/\/\/\n \/\/\/ Sets the body to the url encoded serialization of the passed value,\n \/\/\/ and also sets the `Content-Type: application\/www-form-url-encoded`\n \/\/\/ header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut params = HashMap::new();\n \/\/\/ params.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .form(¶ms)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn form<T: Serialize>(mut self, form: &T) -> RequestBuilder {\n let body = serde_urlencoded::to_string(form).map_err(::Error::from);\n self.headers.set(ContentType::form_url_encoded());\n self.body = Some(body.map(|b| b.into()));\n self\n }\n\n \/\/\/ Send a JSON body.\n \/\/\/\n \/\/\/ Sets the body to the JSON serialization of the passed value, and\n \/\/\/ also sets the `Content-Type: application\/json` header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut map = HashMap::new();\n \/\/\/ map.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .json(&map)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn json<T: Serialize>(mut self, json: &T) -> RequestBuilder {\n let body = serde_json::to_vec(json).expect(\"serde to_vec cannot fail\");\n self.headers.set(ContentType::json());\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Constructs the Request and sends it the target URL, returning a Response.\n pub fn send(mut self) -> ::Result<Response> {\n if !self.headers.has::<UserAgent>() {\n self.headers.set(UserAgent(DEFAULT_USER_AGENT.to_owned()));\n }\n\n if !self.headers.has::<Accept>() {\n self.headers.set(Accept::star());\n }\n\n let client = self.client;\n let mut method = self.method;\n let mut url = try!(self.url);\n let mut headers = self.headers;\n let mut body = match self.body {\n Some(b) => Some(try!(b)),\n None => None,\n };\n\n let mut urls = Vec::new();\n\n loop {\n let res = {\n debug!(\"request {:?} \\\"{}\\\"\", method, url);\n let mut req = client.hyper.request(method.clone(), url.clone())\n .headers(headers.clone());\n\n if let Some(ref mut b) = body {\n let body = body::as_hyper_body(b);\n req = req.body(body);\n }\n\n try!(req.send())\n };\n\n let should_redirect = match res.status {\n StatusCode::MovedPermanently |\n StatusCode::Found |\n StatusCode::SeeOther => {\n body = None;\n match method {\n Method::Get | Method::Head => {},\n _ => {\n method = Method::Get;\n }\n }\n true\n },\n StatusCode::TemporaryRedirect |\n StatusCode::PermanentRedirect => {\n if let Some(ref body) = body {\n body::can_reset(body)\n } else {\n true\n }\n },\n _ => false,\n };\n\n if should_redirect {\n let loc = {\n let loc = res.headers.get::<Location>().map(|loc| url.join(loc));\n if let Some(loc) = loc {\n loc\n } else {\n return Ok(Response {\n inner: res\n });\n }\n };\n\n url = match loc {\n Ok(loc) => {\n headers.set(Referer(url.to_string()));\n urls.push(url);\n if check_redirect(&client.redirect_policy.lock().unwrap(), &loc, &urls)? {\n loc\n } else {\n debug!(\"redirect_policy disallowed redirection to '{}'\", loc);\n return Ok(Response {\n inner: res\n })\n }\n },\n Err(e) => {\n debug!(\"Location header had invalid URI: {:?}\", e);\n return Ok(Response {\n inner: res\n })\n }\n };\n\n debug!(\"redirecting to {:?} '{}'\", method, url);\n\n \/\/TODO: removeSensitiveHeaders(&mut headers, &url);\n } else {\n return Ok(Response {\n inner: res\n });\n }\n }\n }\n}\n\nimpl fmt::Debug for RequestBuilder {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"RequestBuilder\")\n .field(\"method\", &self.method)\n .field(\"url\", &self.url)\n .field(\"headers\", &self.headers)\n .finish()\n }\n}\n\n\/\/\/ A Response to a submitted `Request`.\npub struct Response {\n inner: ::hyper::client::Response,\n}\n\nimpl Response {\n \/\/\/ Get the `StatusCode`.\n #[inline]\n pub fn status(&self) -> &StatusCode {\n &self.inner.status\n }\n\n \/\/\/ Get the `Headers`.\n #[inline]\n pub fn headers(&self) -> &Headers {\n &self.inner.headers\n }\n\n \/\/\/ Get the `HttpVersion`.\n #[inline]\n pub fn version(&self) -> &HttpVersion {\n &self.inner.version\n }\n\n \/\/\/ Try and deserialize the response body as JSON.\n #[inline]\n pub fn json<T: Deserialize>(&mut self) -> ::Result<T> {\n serde_json::from_reader(self).map_err(::Error::from)\n }\n}\n\n\/\/\/ Read the body of the Response.\nimpl Read for Response {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.inner.read(buf)\n }\n}\n\nimpl fmt::Debug for Response {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Response\")\n .field(\"status\", self.status())\n .field(\"headers\", self.headers())\n .field(\"version\", self.version())\n .finish()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use ::body;\n use hyper::method::Method;\n use hyper::Url;\n use hyper::header::{Host, Headers, ContentType};\n use std::collections::HashMap;\n use serde_urlencoded;\n use serde_json;\n\n #[test]\n fn basic_get_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.get(some_url);\n\n assert_eq!(r.method, Method::Get);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_head_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.head(some_url);\n\n assert_eq!(r.method, Method::Head);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_post_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.post(some_url);\n\n assert_eq!(r.method, Method::Post);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn add_header() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let header = Host {\n hostname: \"google.com\".to_string(),\n port: None,\n };\n\n \/\/ Add a copy of the header to the request builder\n r = r.header(header.clone());\n\n \/\/ then check it was actually added\n assert_eq!(r.headers.get::<Host>(), Some(&header));\n }\n\n #[test]\n fn add_headers() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let header = Host {\n hostname: \"google.com\".to_string(),\n port: None,\n };\n\n let mut headers = Headers::new();\n headers.set(header);\n\n \/\/ Add a copy of the headers to the request builder\n r = r.headers(headers.clone());\n\n \/\/ then make sure they were added correctly\n assert_eq!(r.headers, headers);\n }\n\n #[test]\n fn add_body() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let body = \"Some interesting content\";\n\n r = r.body(body);\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n assert_eq!(buf, body);\n }\n\n #[test]\n fn add_form() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let mut form_data = HashMap::new();\n form_data.insert(\"foo\", \"bar\");\n\n r = r.form(&form_data);\n\n \/\/ Make sure the content type was set\n assert_eq!(r.headers.get::<ContentType>(), Some(&ContentType::form_url_encoded()));\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n let body_should_be = serde_urlencoded::to_string(&form_data).unwrap();\n assert_eq!(buf, body_should_be);\n }\n\n #[test]\n fn add_json() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let mut json_data = HashMap::new();\n json_data.insert(\"foo\", \"bar\");\n\n r = r.json(&json_data);\n\n \/\/ Make sure the content type was set\n assert_eq!(r.headers.get::<ContentType>(), Some(&ContentType::json()));\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n let body_should_be = serde_json::to_string(&json_data).unwrap();\n assert_eq!(buf, body_should_be);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Basic logging<commit_after>\n#[macro_export]\nmacro_rules! debug {\n\t($fm:expr) => ({\n\t\tprintln!(\"[DEBUG]: {}\", format!($fm))\n\t});\n\t($fm:expr, $($arg:expr),*) => ({\n\t\tprintln!(\"[DEBUG]: {}\", format!($fm, $($arg,)* ))\n\t});\n}\n\n#[macro_export]\nmacro_rules! info {\n\t($fm:expr) => ({\n\t\tprintln!(\"[INFO]: {}\", format!($fm))\n\t});\n\t($fm:expr, $($arg:expr)*) => ({\n\t\tprintln!(\"[INFO]: {}\", format!($fm, $($arg,)* ))\n\t});\n}\n\n#[macro_export]\nmacro_rules! warn {\n\t($fm:expr) => ({\n\t\tprintln!(\"[WARN]: {}\", format!($fm))\n\t});\n\t($fm:expr, $($arg:expr),*) => ({\n\t\tprintln!(\"[WARN]: {}\", format!($fm, $($arg,)* ))\n\t});\n}\n\n#[macro_export]\nmacro_rules! error {\n\t($fm:expr) => ({\n\t\tprintln!(\"[ERROR]: {}\", format!($fm))\n\t});\n\t($fm:expr, $($arg:expr)*) => ({\n\t\tpanic!(\"[ERROR]: {}\", format!($fm, $($arg,)* ))\n\t});\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added whitespace to `src\/macros.rs` so that `rustfmt` doesn't trash the opening comment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add parser.rs file<commit_after>\/\/\/ For some reason, Pest cannot find the grammar file if listed in parser\/mod.rs, so I listed it here\nuse pest::Parser;\n\nuse ast::Expression;\n\nuse std::f64;\n\nconst _GRAMMAR: &'static str = include_str!(\"grammar\/grammar.pest\");\n\n#[derive(Parser)]\n#[grammar = \"grammar\/grammar.pest\"]\npub struct ExpressionParser;\n\npub fn parse(input: &'static str) -> Expression {\n \/\/ TODO: Remove unwrap\n let mut pairs = ExpressionParser::parse_str(Rule::expression_literal, input).unwrap();\n\n let first = pairs.nth(0).unwrap().into_inner().nth(0).unwrap();\n\n match first.as_rule() {\n Rule::boolean_literal =>\n Expression::Boolean(\n first.into_span().as_str().parse::<bool>().unwrap()\n ),\n \n Rule::identifier_literal =>\n Expression::Identifier(first.into_span().as_str().to_string()),\n\n \/\/ TODO: For now, just conver the value into a normal f64. No formatting!\n Rule::decimal_digits_literal =>\n Expression::Number(\n first.into_span().as_str().parse::<f64>().unwrap()\n ),\n\n Rule::binary_digits_literal =>\n \/\/ The parse function does not support binary digits, so do it the hard way\n Expression::Number(\n f64::from(i32::from_str_radix(&first.into_span().as_str()[2..], 2).unwrap())\n ),\n\n Rule::oct_digits_literal =>\n \/\/ The parse function does not support octal digits, so do it the hard way\n Expression::Number(\n f64::from(i32::from_str_radix(&first.into_span().as_str()[2..], 8).unwrap())\n ),\n\n Rule::hex_digits_literal =>\n \/\/ The parse function does not support hexadecimal digits, so do it the hard way\n Expression::Number(\n f64::from(i32::from_str_radix(&first.into_span().as_str()[2..], 16).unwrap())\n ),\n\n \/\/ Temporary\n _ => unreachable!()\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove use of std::num::ToPrimitive<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>First tranche of SPIR-V reflection -- read uniform offsets and binding points, then update uniform offsets<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for network<commit_after>\/\/\n\/\/ Sysinfo\n\/\/\n\/\/ Copyright (c) 2020 Guillaume Gomez\n\/\/\n\n\/\/ This test is used to ensure that the processors are loaded whatever the method\n\/\/ used to initialize `System`.\n\nextern crate sysinfo;\n\n#[test]\nfn test_processor() {\n use sysinfo::{NetworksExt, SystemExt};\n\n let s = sysinfo::System::new();\n assert_eq!(s.get_networks().iter().count(), 0);\n let s = sysinfo::System::new_all();\n assert!(s.get_networks().iter().count() > 0);\n}\n<|endoftext|>"} {"text":"<commit_before>use libimagstore::hook::error::HookError as HE;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\n\ngenerate_error_module!(\n generate_error_types!(GitHookError, GitHookErrorKind,\n ConfigError => \"Configuration Error\",\n NoConfigError => \"No Configuration\",\n ConfigTypeError => \"Configuration value type wrong\",\n RuntimeInformationSetupError => \"Couldn't setup runtime information for git hook\",\n RepositoryError => \"Error while interacting with git repository\",\n RepositoryBranchError => \"Error while interacting with git branch(es)\",\n RepositoryBranchNameFetchingError => \"Error while fetching branch name\",\n RepositorySignatureFetchingError => \"Error while fetching Authors\/Committers signature\",\n RepositoryIndexFetchingError => \"Error while fetching Repository Index\",\n RepositoryPathAddingError => \"Error while adding Path to Index\",\n RepositoryTreeWritingError => \"Error while writing repository tree\",\n RepositoryTreeFindingError => \"Error while finding repository tree\",\n RepositoryCommitFindingError => \"Error while finding commit\",\n RepositoryCommittingError => \"Error while committing\",\n HeadFetchError => \"Error while getting HEAD\",\n NotOnBranch => \"No Branch is checked out\",\n MkRepo => \"Repository creation error\",\n MkSignature => \"Error while building Signature object\"\n );\n);\n\nimpl GitHookError {\n\n pub fn inside_of<T>(self, h: HEK) -> HookResult<T> {\n Err(HE::new(h, Some(Box::new(self))))\n }\n\n}\n\nimpl From<GitHookError> for HE {\n\n fn from(he: GitHookError) -> HE {\n HE::new(HEK::HookExecutionError, Some(Box::new(he)))\n }\n\n}\n\npub use self::error::GitHookError;\npub use self::error::GitHookErrorKind;\npub use self::error::MapErrInto;\n\n<commit_msg>Add error kind and impl From<Git2Error> for HE<commit_after>use git2::Error as Git2Error;\n\nuse libimagstore::hook::error::HookError as HE;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\n\ngenerate_error_module!(\n generate_error_types!(GitHookError, GitHookErrorKind,\n ConfigError => \"Configuration Error\",\n NoConfigError => \"No Configuration\",\n ConfigTypeError => \"Configuration value type wrong\",\n RuntimeInformationSetupError => \"Couldn't setup runtime information for git hook\",\n RepositoryBackendError => \"Error in the git library\",\n RepositoryError => \"Error while interacting with git repository\",\n RepositoryBranchError => \"Error while interacting with git branch(es)\",\n RepositoryBranchNameFetchingError => \"Error while fetching branch name\",\n RepositorySignatureFetchingError => \"Error while fetching Authors\/Committers signature\",\n RepositoryIndexFetchingError => \"Error while fetching Repository Index\",\n RepositoryPathAddingError => \"Error while adding Path to Index\",\n RepositoryTreeWritingError => \"Error while writing repository tree\",\n RepositoryTreeFindingError => \"Error while finding repository tree\",\n RepositoryCommitFindingError => \"Error while finding commit\",\n RepositoryCommittingError => \"Error while committing\",\n HeadFetchError => \"Error while getting HEAD\",\n NotOnBranch => \"No Branch is checked out\",\n MkRepo => \"Repository creation error\",\n MkSignature => \"Error while building Signature object\"\n );\n);\n\nimpl GitHookError {\n\n pub fn inside_of<T>(self, h: HEK) -> HookResult<T> {\n Err(HE::new(h, Some(Box::new(self))))\n }\n\n}\n\nimpl From<GitHookError> for HE {\n\n fn from(he: GitHookError) -> HE {\n HE::new(HEK::HookExecutionError, Some(Box::new(he)))\n }\n\n}\n\nimpl From<Git2Error> for GitHookError {\n\n fn from(ge: Git2Error) -> GitHookError {\n GitHookError::new(GitHookErrorKind::RepositoryBackendError, Some(Box::new(ge)))\n }\n\n}\n\npub use self::error::GitHookError;\npub use self::error::GitHookErrorKind;\npub use self::error::MapErrInto;\n\n<|endoftext|>"} {"text":"<commit_before>use std::fmt::{Debug, Formatter};\nuse std::fmt;\nuse std::ops::Deref;\n\nuse clap::ArgMatches;\n\nuse runtime::Runtime;\nuse module::Module;\n\nuse storage::parser::FileHeaderParser;\nuse storage::parser::Parser;\nuse storage::json::parser::JsonHeaderParser;\nuse module::helpers::cli::create_tag_filter;\nuse module::helpers::cli::create_hash_filter;\nuse module::helpers::cli::create_text_header_field_grep_filter;\nuse module::helpers::cli::CliFileFilter;\n\nmod header;\n\nuse self::header::get_url_from_header;\nuse self::header::get_tags_from_header;\n\npub struct BM<'a> {\n rt: &'a Runtime<'a>,\n}\n\nimpl<'a> BM<'a> {\n\n pub fn new(rt: &'a Runtime<'a>) -> BM<'a> {\n BM {\n rt: rt,\n }\n }\n\n \/**\n * Subcommand: add\n *\/\n fn command_add(&self, matches: &ArgMatches) -> bool {\n use ansi_term::Colour::{Green, Yellow, Red};\n use std::process::exit;\n use self::header::build_header;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n\n let url = matches.value_of(\"url\").map(String::from).unwrap(); \/\/ clap ensures this is present\n\n if !self.validate_url(&url, &parser) {\n error!(\"URL validation failed, exiting.\");\n exit(1);\n } else {\n debug!(\"Verification succeeded\");\n }\n\n let tags = matches.value_of(\"tags\").and_then(|s| {\n Some(s.split(\",\").map(String::from).collect())\n }).unwrap_or(vec![]);\n\n debug!(\"Building header with\");\n debug!(\" url = '{:?}'\", url);\n debug!(\" tags = '{:?}'\", tags);\n let header = build_header(url, tags);\n\n let fileid = self.rt\n .store()\n .new_file_with_header(self, header);\n\n let result = self.rt\n .store()\n .load(self, &parser, &fileid)\n .map(|file| {\n info!(\"{}\", Yellow.paint(format!(\"Created file in memory: {}\", fileid)));\n self.rt\n .store()\n .persist(&parser, file)\n })\n .unwrap_or(false);\n\n if result {\n info!(\"{}\", Red.paint(\"Adding worked\"));\n } else {\n info!(\"{}\", Green.paint(\"Adding failed\"));\n }\n\n result\n }\n\n fn validate_url<HP>(&self, url: &String, parser: &Parser<HP>) -> bool\n where HP: FileHeaderParser\n {\n use util::is_url;\n\n if !is_url(url) {\n error!(\"Url '{}' is not a valid URL. Will not store.\", url);\n return false;\n }\n\n let is_in_store = self.rt\n .store()\n .load_for_module(self, parser)\n .iter()\n .any(|file| {\n let f = file.deref().borrow();\n get_url_from_header(f.header()).map(|url_in_store| {\n &url_in_store == url\n }).unwrap_or(false)\n });\n\n if is_in_store {\n error!(\"URL '{}' seems to be in the store already\", url);\n return false;\n }\n\n return true;\n }\n\n \/**\n * Subcommand: list\n *\/\n fn command_list(&self, matches: &ArgMatches) -> bool {\n use ui::file::{FilePrinter, TablePrinter};\n use std::ops::Deref;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n let filter = {\n let hash_filter = create_hash_filter(matches, \"id\", true);\n let text_filter = create_text_header_field_grep_filter(matches, \"match\", \"URL\", true);\n let tags_filter = create_tag_filter(matches, \"tags\", true);\n hash_filter.and(Box::new(text_filter)).and(Box::new(tags_filter))\n };\n\n let files = self.rt\n .store()\n .load_for_module(self, &parser)\n .into_iter()\n .filter(|file| filter.filter_file(file));\n let printer = TablePrinter::new(self.rt.is_verbose(), self.rt.is_debugging());\n\n printer.print_files_custom(files,\n &|file| {\n let fl = file.deref().borrow();\n let hdr = fl.header();\n let url = get_url_from_header(hdr).unwrap_or(String::from(\"Parser error\"));\n let tags = get_tags_from_header(hdr);\n\n debug!(\"Custom printer field: url = '{:?}'\", url);\n debug!(\"Custom printer field: tags = '{:?}'\", tags);\n\n vec![url, tags.join(\", \")]\n }\n );\n true\n }\n\n \/**\n * Subcommand: open\n *\/\n fn command_open(&self, matches: &ArgMatches) -> bool {\n use ansi_term::Colour::{Green, Red};\n use open;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n let filter : Box<CliFileFilter> = {\n let hash_filter = create_hash_filter(matches, \"id\", true);\n let text_filter = create_text_header_field_grep_filter(matches, \"match\", \"URL\", true);\n let tags_filter = create_tag_filter(matches, \"tags\", true);\n Box::new(hash_filter.and(Box::new(text_filter)).and(Box::new(tags_filter)))\n };\n let result = self.rt\n .store()\n .load_for_module(self, &parser)\n .iter()\n .filter(|file| filter.filter_file(file))\n .map(|file| {\n debug!(\"File loaded, can open now: {:?}\", file);\n let f = file.deref().borrow();\n get_url_from_header(f.header()).map(|url| {\n if open::that(&url[..]).is_ok() {\n info!(\"{}\", Green.paint(format!(\"open({})\", url)));\n true\n } else {\n info!(\"{}\", Red.paint(format!(\"could not open({})\", url)));\n false\n }\n })\n .unwrap_or(false)\n })\n .fold((0, 0), |acc, succeeded| {\n let (worked, failed) = acc;\n if succeeded {\n (worked + 1, failed)\n } else {\n (worked, failed + 1)\n }\n });\n\n let (succ, fail) = result;\n info!(\"{}\", Green.paint(format!(\"open() succeeded for {} files\", succ)));\n info!(\"{}\", Red.paint(format!( \"open() failed for {} files\", fail)));\n return fail == 0;\n }\n\n \/**\n * Subcommand: remove\n *\/\n fn command_remove(&self, matches: &ArgMatches) -> bool {\n use ansi_term::Colour::{Green, Red};\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n\n let filter = {\n let hash_filter = create_hash_filter(matches, \"id\", false);\n let text_filter = create_text_header_field_grep_filter(matches, \"match\", \"URL\", false);\n let tags_filter = create_tag_filter(matches, \"tags\", false);\n hash_filter.or(Box::new(text_filter)).or(Box::new(tags_filter))\n };\n\n let result = self.rt\n .store()\n .load_for_module(self, &parser)\n .iter()\n .filter(|file| filter.filter_file(file))\n .map(|file| {\n debug!(\"File loaded, can remove now: {:?}\", file);\n let f = file.deref().borrow();\n self.rt.store().remove(f.id().clone())\n })\n .fold((0, 0), |acc, succeeded| {\n let (worked, failed) = acc;\n if succeeded {\n (worked + 1, failed)\n } else {\n (worked, failed + 1)\n }\n });\n\n let (worked, failed) = result;\n\n info!(\"{}\", Green.paint(format!(\"Removing succeeded for {} files\", worked)));\n info!(\"{}\", Red.paint(format!( \"Removing failed for {} files\", failed)));\n\n return failed == 0;\n }\n\n \/**\n * Subcommand: add_tags\n *\/\n fn command_add_tags(&self, matches: &ArgMatches) -> bool {\n use module::helpers::header::tags::data::alter_tags_in_files;\n use self::header::rebuild_header_with_tags;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n alter_tags_in_files(self, matches, &parser, |old_tags, cli_tags| {\n let mut new_tags = old_tags.clone();\n new_tags.append(&mut cli_tags.clone());\n new_tags\n }, rebuild_header_with_tags)\n }\n\n \/**\n * Subcommand: rm_tags\n *\/\n fn command_rm_tags(&self, matches: &ArgMatches) -> bool {\n use module::helpers::header::tags::data::alter_tags_in_files;\n use self::header::rebuild_header_with_tags;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n alter_tags_in_files(self, matches, &parser, |old_tags, cli_tags| {\n old_tags.clone()\n .into_iter()\n .filter(|tag| !cli_tags.contains(tag))\n .collect()\n }, rebuild_header_with_tags)\n }\n\n \/**\n * Subcommand: set_tags\n *\/\n fn command_set_tags(&self, matches: &ArgMatches) -> bool {\n use module::helpers::header::tags::data::alter_tags_in_files;\n use self::header::rebuild_header_with_tags;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n alter_tags_in_files(self, matches, &parser, |_, cli_tags| {\n cli_tags.clone()\n }, rebuild_header_with_tags)\n }\n\n}\n\n\/**\n * Trait implementation for BM module\n *\/\nimpl<'a> Module<'a> for BM<'a> {\n\n fn exec(&self, matches: &ArgMatches) -> bool {\n use ansi_term::Colour::{Yellow, Red};\n\n match matches.subcommand_name() {\n Some(\"add\") => {\n self.command_add(matches.subcommand_matches(\"add\").unwrap())\n },\n\n Some(\"list\") => {\n self.command_list(matches.subcommand_matches(\"list\").unwrap())\n },\n\n Some(\"open\") => {\n self.command_open(matches.subcommand_matches(\"open\").unwrap())\n },\n\n Some(\"remove\") => {\n self.command_remove(matches.subcommand_matches(\"remove\").unwrap())\n },\n\n Some(\"add_tags\") => {\n self.command_add_tags(matches.subcommand_matches(\"add_tags\").unwrap())\n },\n\n Some(\"rm_tags\") => {\n self.command_rm_tags(matches.subcommand_matches(\"rm_tags\").unwrap())\n },\n\n Some(\"set_tags\") => {\n self.command_set_tags(matches.subcommand_matches(\"set_tags\").unwrap())\n },\n\n Some(_) | None => {\n info!(\"{}\", Red.bold().paint(\"No command given, doing nothing\"));\n false\n },\n }\n }\n\n fn name(&self) -> &'static str {\n \"bookmark\"\n }\n\n fn runtime(&self) -> &Runtime {\n self.rt\n }\n}\n\nimpl<'a> Debug for BM<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"BM\"));\n Ok(())\n }\n\n}\n\n<commit_msg>Remove unused import<commit_after>use std::fmt::{Debug, Formatter};\nuse std::fmt;\nuse std::ops::Deref;\n\nuse clap::ArgMatches;\n\nuse runtime::Runtime;\nuse module::Module;\n\nuse storage::parser::FileHeaderParser;\nuse storage::parser::Parser;\nuse storage::json::parser::JsonHeaderParser;\nuse module::helpers::cli::create_tag_filter;\nuse module::helpers::cli::create_hash_filter;\nuse module::helpers::cli::create_text_header_field_grep_filter;\nuse module::helpers::cli::CliFileFilter;\n\nmod header;\n\nuse self::header::get_url_from_header;\nuse self::header::get_tags_from_header;\n\npub struct BM<'a> {\n rt: &'a Runtime<'a>,\n}\n\nimpl<'a> BM<'a> {\n\n pub fn new(rt: &'a Runtime<'a>) -> BM<'a> {\n BM {\n rt: rt,\n }\n }\n\n \/**\n * Subcommand: add\n *\/\n fn command_add(&self, matches: &ArgMatches) -> bool {\n use ansi_term::Colour::{Green, Yellow, Red};\n use std::process::exit;\n use self::header::build_header;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n\n let url = matches.value_of(\"url\").map(String::from).unwrap(); \/\/ clap ensures this is present\n\n if !self.validate_url(&url, &parser) {\n error!(\"URL validation failed, exiting.\");\n exit(1);\n } else {\n debug!(\"Verification succeeded\");\n }\n\n let tags = matches.value_of(\"tags\").and_then(|s| {\n Some(s.split(\",\").map(String::from).collect())\n }).unwrap_or(vec![]);\n\n debug!(\"Building header with\");\n debug!(\" url = '{:?}'\", url);\n debug!(\" tags = '{:?}'\", tags);\n let header = build_header(url, tags);\n\n let fileid = self.rt\n .store()\n .new_file_with_header(self, header);\n\n let result = self.rt\n .store()\n .load(self, &parser, &fileid)\n .map(|file| {\n info!(\"{}\", Yellow.paint(format!(\"Created file in memory: {}\", fileid)));\n self.rt\n .store()\n .persist(&parser, file)\n })\n .unwrap_or(false);\n\n if result {\n info!(\"{}\", Red.paint(\"Adding worked\"));\n } else {\n info!(\"{}\", Green.paint(\"Adding failed\"));\n }\n\n result\n }\n\n fn validate_url<HP>(&self, url: &String, parser: &Parser<HP>) -> bool\n where HP: FileHeaderParser\n {\n use util::is_url;\n\n if !is_url(url) {\n error!(\"Url '{}' is not a valid URL. Will not store.\", url);\n return false;\n }\n\n let is_in_store = self.rt\n .store()\n .load_for_module(self, parser)\n .iter()\n .any(|file| {\n let f = file.deref().borrow();\n get_url_from_header(f.header()).map(|url_in_store| {\n &url_in_store == url\n }).unwrap_or(false)\n });\n\n if is_in_store {\n error!(\"URL '{}' seems to be in the store already\", url);\n return false;\n }\n\n return true;\n }\n\n \/**\n * Subcommand: list\n *\/\n fn command_list(&self, matches: &ArgMatches) -> bool {\n use ui::file::{FilePrinter, TablePrinter};\n use std::ops::Deref;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n let filter = {\n let hash_filter = create_hash_filter(matches, \"id\", true);\n let text_filter = create_text_header_field_grep_filter(matches, \"match\", \"URL\", true);\n let tags_filter = create_tag_filter(matches, \"tags\", true);\n hash_filter.and(Box::new(text_filter)).and(Box::new(tags_filter))\n };\n\n let files = self.rt\n .store()\n .load_for_module(self, &parser)\n .into_iter()\n .filter(|file| filter.filter_file(file));\n let printer = TablePrinter::new(self.rt.is_verbose(), self.rt.is_debugging());\n\n printer.print_files_custom(files,\n &|file| {\n let fl = file.deref().borrow();\n let hdr = fl.header();\n let url = get_url_from_header(hdr).unwrap_or(String::from(\"Parser error\"));\n let tags = get_tags_from_header(hdr);\n\n debug!(\"Custom printer field: url = '{:?}'\", url);\n debug!(\"Custom printer field: tags = '{:?}'\", tags);\n\n vec![url, tags.join(\", \")]\n }\n );\n true\n }\n\n \/**\n * Subcommand: open\n *\/\n fn command_open(&self, matches: &ArgMatches) -> bool {\n use ansi_term::Colour::{Green, Red};\n use open;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n let filter : Box<CliFileFilter> = {\n let hash_filter = create_hash_filter(matches, \"id\", true);\n let text_filter = create_text_header_field_grep_filter(matches, \"match\", \"URL\", true);\n let tags_filter = create_tag_filter(matches, \"tags\", true);\n Box::new(hash_filter.and(Box::new(text_filter)).and(Box::new(tags_filter)))\n };\n let result = self.rt\n .store()\n .load_for_module(self, &parser)\n .iter()\n .filter(|file| filter.filter_file(file))\n .map(|file| {\n debug!(\"File loaded, can open now: {:?}\", file);\n let f = file.deref().borrow();\n get_url_from_header(f.header()).map(|url| {\n if open::that(&url[..]).is_ok() {\n info!(\"{}\", Green.paint(format!(\"open({})\", url)));\n true\n } else {\n info!(\"{}\", Red.paint(format!(\"could not open({})\", url)));\n false\n }\n })\n .unwrap_or(false)\n })\n .fold((0, 0), |acc, succeeded| {\n let (worked, failed) = acc;\n if succeeded {\n (worked + 1, failed)\n } else {\n (worked, failed + 1)\n }\n });\n\n let (succ, fail) = result;\n info!(\"{}\", Green.paint(format!(\"open() succeeded for {} files\", succ)));\n info!(\"{}\", Red.paint(format!( \"open() failed for {} files\", fail)));\n return fail == 0;\n }\n\n \/**\n * Subcommand: remove\n *\/\n fn command_remove(&self, matches: &ArgMatches) -> bool {\n use ansi_term::Colour::{Green, Red};\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n\n let filter = {\n let hash_filter = create_hash_filter(matches, \"id\", false);\n let text_filter = create_text_header_field_grep_filter(matches, \"match\", \"URL\", false);\n let tags_filter = create_tag_filter(matches, \"tags\", false);\n hash_filter.or(Box::new(text_filter)).or(Box::new(tags_filter))\n };\n\n let result = self.rt\n .store()\n .load_for_module(self, &parser)\n .iter()\n .filter(|file| filter.filter_file(file))\n .map(|file| {\n debug!(\"File loaded, can remove now: {:?}\", file);\n let f = file.deref().borrow();\n self.rt.store().remove(f.id().clone())\n })\n .fold((0, 0), |acc, succeeded| {\n let (worked, failed) = acc;\n if succeeded {\n (worked + 1, failed)\n } else {\n (worked, failed + 1)\n }\n });\n\n let (worked, failed) = result;\n\n info!(\"{}\", Green.paint(format!(\"Removing succeeded for {} files\", worked)));\n info!(\"{}\", Red.paint(format!( \"Removing failed for {} files\", failed)));\n\n return failed == 0;\n }\n\n \/**\n * Subcommand: add_tags\n *\/\n fn command_add_tags(&self, matches: &ArgMatches) -> bool {\n use module::helpers::header::tags::data::alter_tags_in_files;\n use self::header::rebuild_header_with_tags;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n alter_tags_in_files(self, matches, &parser, |old_tags, cli_tags| {\n let mut new_tags = old_tags.clone();\n new_tags.append(&mut cli_tags.clone());\n new_tags\n }, rebuild_header_with_tags)\n }\n\n \/**\n * Subcommand: rm_tags\n *\/\n fn command_rm_tags(&self, matches: &ArgMatches) -> bool {\n use module::helpers::header::tags::data::alter_tags_in_files;\n use self::header::rebuild_header_with_tags;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n alter_tags_in_files(self, matches, &parser, |old_tags, cli_tags| {\n old_tags.clone()\n .into_iter()\n .filter(|tag| !cli_tags.contains(tag))\n .collect()\n }, rebuild_header_with_tags)\n }\n\n \/**\n * Subcommand: set_tags\n *\/\n fn command_set_tags(&self, matches: &ArgMatches) -> bool {\n use module::helpers::header::tags::data::alter_tags_in_files;\n use self::header::rebuild_header_with_tags;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n alter_tags_in_files(self, matches, &parser, |_, cli_tags| {\n cli_tags.clone()\n }, rebuild_header_with_tags)\n }\n\n}\n\n\/**\n * Trait implementation for BM module\n *\/\nimpl<'a> Module<'a> for BM<'a> {\n\n fn exec(&self, matches: &ArgMatches) -> bool {\n use ansi_term::Colour::Red;\n\n match matches.subcommand_name() {\n Some(\"add\") => {\n self.command_add(matches.subcommand_matches(\"add\").unwrap())\n },\n\n Some(\"list\") => {\n self.command_list(matches.subcommand_matches(\"list\").unwrap())\n },\n\n Some(\"open\") => {\n self.command_open(matches.subcommand_matches(\"open\").unwrap())\n },\n\n Some(\"remove\") => {\n self.command_remove(matches.subcommand_matches(\"remove\").unwrap())\n },\n\n Some(\"add_tags\") => {\n self.command_add_tags(matches.subcommand_matches(\"add_tags\").unwrap())\n },\n\n Some(\"rm_tags\") => {\n self.command_rm_tags(matches.subcommand_matches(\"rm_tags\").unwrap())\n },\n\n Some(\"set_tags\") => {\n self.command_set_tags(matches.subcommand_matches(\"set_tags\").unwrap())\n },\n\n Some(_) | None => {\n info!(\"{}\", Red.bold().paint(\"No command given, doing nothing\"));\n false\n },\n }\n }\n\n fn name(&self) -> &'static str {\n \"bookmark\"\n }\n\n fn runtime(&self) -> &Runtime {\n self.rt\n }\n}\n\nimpl<'a> Debug for BM<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"BM\"));\n Ok(())\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add numeral sign<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add actual trait -.-<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test: simple globals<commit_after>#[macro_use]\nextern crate wayland_client as wayc;\nextern crate wayland_server as ways;\n\nmod helpers;\n\nmod server_utils {\n use ways::{Client, EventLoop, EventLoopHandle, GlobalHandler};\n use ways::protocol::wl_compositor::WlCompositor;\n\n struct CompositorHandler;\n\n impl GlobalHandler<WlCompositor> for CompositorHandler {\n fn bind(&mut self, evlh: &mut EventLoopHandle, client: &Client, global: WlCompositor) {}\n }\n\n \/\/ max supported version: 4\n pub fn insert_compositor(event_loop: &mut EventLoop, v: i32) {\n let hid = event_loop.add_handler(CompositorHandler);\n let _ = event_loop.register_global::<WlCompositor, CompositorHandler>(hid, v);\n }\n}\n\nwayland_env!(ClientEnv);\n\nmod client_utils {\n use super::ClientEnv;\n use wayc::{EnvHandler, EventQueue};\n use wayc::protocol::wl_registry::WlRegistry;\n\n pub fn insert_handler(event_queue: &mut EventQueue, registry: &WlRegistry) -> usize {\n let hid = event_queue.add_handler(EnvHandler::<ClientEnv>::new());\n event_queue.register::<_, EnvHandler<ClientEnv>>(registry, hid);\n hid\n }\n}\n\n#[test]\nfn simple_global() {\n \/\/ server setup\n \/\/\n let (mut server_display, mut server_event_loop) = ways::create_display();\n let socket_name = server_display\n .add_socket_auto()\n .expect(\"Failed to create a server socket.\");\n println!(\"{:?}\", socket_name);\n self::server_utils::insert_compositor(&mut server_event_loop, 1);\n\n \/\/ client setup\n \/\/\n let (mut client_display, mut client_event_queue) =\n wayc::connect_to(&socket_name).expect(\"Failed to connect to server.\");\n let client_registry = client_display.get_registry();\n let client_handler_hid = self::client_utils::insert_handler(&mut client_event_queue, &client_registry);\n\n \/\/ message passing\n \/\/\n client_display.flush().unwrap();\n \/\/ for some reason, two dispatches are needed\n server_event_loop.dispatch(Some(10)).unwrap();\n server_event_loop.dispatch(Some(10)).unwrap();\n server_display.flush_clients();\n client_event_queue.dispatch().unwrap();\n\n \/\/ result assertions\n \/\/\n let state = client_event_queue.state();\n let env = state.get_handler::<wayc::EnvHandler<ClientEnv>>(client_handler_hid);\n let globals = env.globals();\n assert!(globals.len() == 1);\n assert_eq!(globals[0], (1, \"wl_compositor\".into(), 1));\n}\n\n#[test]\nfn multi_versions() {\n \/\/ server setup\n \/\/\n let (mut server_display, mut server_event_loop) = ways::create_display();\n let socket_name = server_display\n .add_socket_auto()\n .expect(\"Failed to create a server socket.\");\n println!(\"{:?}\", socket_name);\n self::server_utils::insert_compositor(&mut server_event_loop, 4);\n self::server_utils::insert_compositor(&mut server_event_loop, 2);\n self::server_utils::insert_compositor(&mut server_event_loop, 3);\n self::server_utils::insert_compositor(&mut server_event_loop, 1);\n\n \/\/ client setup\n \/\/\n let (mut client_display, mut client_event_queue) =\n wayc::connect_to(&socket_name).expect(\"Failed to connect to server.\");\n let client_registry = client_display.get_registry();\n let client_handler_hid = self::client_utils::insert_handler(&mut client_event_queue, &client_registry);\n\n \/\/ message passing\n \/\/\n client_display.flush().unwrap();\n \/\/ for some reason, two dispatches are needed\n server_event_loop.dispatch(Some(10)).unwrap();\n server_event_loop.dispatch(Some(10)).unwrap();\n server_display.flush_clients();\n client_event_queue.dispatch().unwrap();\n\n \/\/ result assertions\n \/\/\n let state = client_event_queue.state();\n let env = state.get_handler::<wayc::EnvHandler<ClientEnv>>(client_handler_hid);\n let globals = env.globals();\n assert!(globals.len() == 4);\n let mut seen = [false; 4];\n for &(id, ref interface, version) in globals {\n assert!(interface == \"wl_compositor\");\n seen[version as usize - 1] = true;\n }\n assert_eq!(seen, [true, true, true, true]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fetch\/git: remove unused, pointless variable<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a 'main.rs' file as example.<commit_after>\nextern crate \"xor-rs\" as xorfn;\nextern crate collections;\n\nfn main() {\n let source = [95, 80, 96, 71, 120, 25, 44, 92, 120, 71, 96, 79, 54];\n let result = xorfn::xor(source, [23, 53, 12, 43]);\n\n match collections::str::from_utf8(result.as_slice()) {\n Some(string) => println!(\"{}\", string),\n _ => {}\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>DRY fix - UI text<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Corrected implementation of FlattenedEmoji PartialEq<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>style(Typo): Correct a spelling mistake<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added dual-buffer for AqDataDesc as well as the data vec.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Better level management<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>split up functions instead of chaining them, replace returns with prints<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Quit closes AgMD1 driver<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add unit tests for math module<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>examples: Make example for SureNode code<commit_after>\/\/\/ Walking example.\n\nuse naming::Naming;\nuse rsure::{\n Estimate,\n Result,\n node::{\n self,\n fs,\n load,\n HashUpdater,\n NodeWriter,\n Source,\n SureNode,\n },\n};\nuse std::{\n path::Path,\n};\n\nfn main() -> Result<()> {\n rsure::log_init();\n\n let base = \".\";\n\n let mut naming = Naming::new(\".\", \"haha\", \"dat\", true);\n\n let mut estimate = Estimate { files: 0, bytes: 0 };\n let tmp_name = {\n let mut nf = naming.new_temp(true)?;\n naming.add_cleanup(nf.name.clone());\n let src = fs::scan_fs(base)?\n .inspect(|node| {\n match node {\n Ok(n @ SureNode::File { .. }) => {\n if n.needs_hash() {\n estimate.files += 1;\n estimate.bytes += n.size();\n }\n }\n _ => (),\n }\n });\n node::save_to(&mut nf.writer, src)?;\n nf.name\n };\n println!(\"name: {:?}\", tmp_name);\n\n \/\/ Update the hashes.\n let loader = Loader { name: &tmp_name };\n let hu = HashUpdater::new(loader, &mut naming);\n let hm = hu.compute(base, &estimate)?;\n let nf = naming.new_temp(true)?;\n hm.merge(&mut NodeWriter::new(nf.writer)?)?;\n\n naming.rename_to_main(&nf.name)?;\n\n Ok(())\n}\n\nstruct Loader<'a> {\n name: &'a Path,\n}\n\nimpl<'a> Source for Loader<'a> {\n fn iter(&mut self) -> Result<Box<dyn Iterator<Item = Result<SureNode>> + Send>> {\n Ok(Box::new(load(self.name)?))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tests\/mcfg.rs: added test_example_mcfg_to_negra()<commit_after>extern crate log_domain;\nextern crate rustomata;\n\nuse log_domain::LogDomain;\nuse std::fs::File;\nuse std::io::Read;\n\nuse rustomata::pmcfg::{PMCFG, PMCFGRule, separate_terminal_rules};\nuse rustomata::pmcfg::negra::to_negra;\nuse rustomata::recognisable::Recognisable;\nuse rustomata::tree_stack_automaton::*;\nuse rustomata::tree_stack_automaton::{PosState, TreeStackAutomaton};\n\nfn example_tree_stack_automaton()\n -> TreeStackAutomaton<PosState<PMCFGRule<String, String, LogDomain<f64>>>, String, LogDomain<f64>>\n{\n let mut grammar_file = File::open(\"examples\/example.mcfg\").unwrap();\n let mut grammar_string = String::new();\n let _ = grammar_file.read_to_string(&mut grammar_string);\n let grammar: PMCFG<String, String, _> = grammar_string.parse().unwrap();\n\n let automaton = TreeStackAutomaton::from(grammar);\n automaton\n}\n\n#[test]\nfn test_example_mcfg_to_negra() {\n let automaton = example_tree_stack_automaton();\n let tree_stack = automaton.recognise(vec![\n String::from(\"a\"), String::from(\"a\"), String::from(\"b\"),\n String::from(\"c\"), String::from(\"c\"), String::from(\"d\")\n ]).next().unwrap().0;\n\n let syntax_tree = to_abstract_syntax_tree(tree_stack.storage.to_tree());\n let separated_syntax_tree = separate_terminal_rules(&syntax_tree);\n\n let negra_string = to_negra(&separated_syntax_tree, 0);\n let negra_control_string = String::from(\n \"#BOS 0\\n\\\n a\\ta\\t--\\t--\\t1\\n\\\n a\\ta\\t--\\t--\\t2\\n\\\n b\\tb\\t--\\t--\\t3\\n\\\n c\\tc\\t--\\t--\\t1\\n\\\n c\\tc\\t--\\t--\\t2\\n\\\n d\\td\\t--\\t--\\t3\\n\\\n #1\\tA\\t--\\t--\\t4\\n\\\n #2\\tA\\t--\\t--\\t1\\n\\\n #3\\tB\\t--\\t--\\t4\\n\\\n #4\\tS\\t--\\t--\\t0\\n\\\n #EOS 0\"\n );\n\n assert_eq!(negra_control_string, negra_string);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test: xkcd knapsack problem.<commit_after>\/\/! xkcd Knapsack Problem.\n\/\/!\n\/\/! https:\/\/en.wikipedia.org\/wiki\/Knapsack_problem\n\/\/! https:\/\/xkcd.com\/287\/\n\nextern crate num_rational;\nextern crate num_traits;\nextern crate puzzle_solver;\n\nuse num_rational::Ratio;\nuse num_traits::ToPrimitive;\nuse puzzle_solver::{LinExpr,Puzzle,Val};\n\n#[test]\nfn xkcd_knapsack() {\n let menu = [\n (Ratio::new(2_15, 100), \"Mixed Fruit\"),\n (Ratio::new(2_75, 100), \"French Fries\"),\n (Ratio::new(3_35, 100), \"Side Salad\"),\n (Ratio::new(3_55, 100), \"Hot Wings\"),\n (Ratio::new(4_20, 100), \"Mozzarella Sticks\"),\n (Ratio::new(5_80, 100), \"Sampler Plate\") ];\n\n let mut sys = Puzzle::new();\n let mut vars = Vec::with_capacity(menu.len());\n let total = Ratio::new(15_05, 100);\n\n for &(cost, _) in menu.iter() {\n let num = (total \/ cost).floor().to_integer();\n let var = sys.new_var_with_candidates(&(0..(num + 1)).collect::<Vec<Val>>());\n vars.push(var)\n }\n\n sys.equals(total, vars.iter().zip(menu.iter()).fold(LinExpr::from(0),\n |sum, (&var, &(cost, _))| sum + var * cost));\n\n let solutions = sys.solve_all();\n assert_eq!(solutions.len(), 2);\n\n for dict in solutions.iter() {\n println!(\"\");\n for (&var, &(cost, string)) in vars.iter().zip(menu.iter()) {\n let numer = cost.numer().to_f32().unwrap();\n let denom = cost.denom().to_f32().unwrap();\n println!(\" {} x {:.2} {}\", dict[var], numer \/ denom, string);\n }\n }\n\n println!(\"xkcd_knapsack: {} guesses\", sys.num_guesses());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added 7b<commit_after><|endoftext|>"} {"text":"<commit_before>use std::str::raw::from_c_str;\nuse std::ptr::read;\nuse std::collections::hashmap::HashMap;\n\nmod c {\n #![allow(non_camel_case_types)]\n extern crate libc;\n pub use self::libc::{\n c_char,\n c_int,\n uid_t,\n gid_t,\n time_t\n };\n\n pub struct c_passwd {\n pub pw_name: *const c_char, \/\/ login name\n pub pw_passwd: *const c_char,\n pub pw_uid: c_int, \/\/ user ID\n pub pw_gid: c_int, \/\/ group ID\n pub pw_change: time_t,\n pub pw_class: *const c_char,\n pub pw_gecos: *const c_char, \/\/ full name\n pub pw_dir: *const c_char, \/\/ login dir\n pub pw_shell: *const c_char, \/\/ login shell\n pub pw_expire: time_t \/\/ password expiry time\n }\n\n pub struct c_group {\n pub gr_name: *const c_char, \/\/ group name\n pub gr_passwd: *const c_char, \/\/ password\n pub gr_gid: gid_t, \/\/ group id\n pub gr_mem: *const *const c_char, \/\/ names of users in the group\n }\n\n extern {\n pub fn getpwuid(uid: c_int) -> *const c_passwd;\n pub fn getgrgid(gid: uid_t) -> *const c_group;\n pub fn getuid() -> libc::c_int;\n }\n}\n\npub struct Unix {\n user_names: HashMap<u32, Option<String>>, \/\/ mapping of user IDs to user names\n group_names: HashMap<u32, Option<String>>, \/\/ mapping of groups IDs to group names\n groups: HashMap<u32, bool>, \/\/ mapping of group IDs to whether the current user is a member\n pub uid: u32, \/\/ current user's ID\n pub username: String, \/\/ current user's name\n}\n\nimpl Unix {\n pub fn empty_cache() -> Unix {\n let uid = unsafe { c::getuid() };\n let infoptr = unsafe { c::getpwuid(uid as i32) };\n let info = unsafe { infoptr.to_option().unwrap() }; \/\/ the user has to have a name\n\n let username = unsafe { from_c_str(info.pw_name) };\n\n let mut user_names = HashMap::new();\n user_names.insert(uid as u32, Some(username.clone()));\n\n \/\/ Unix groups work like this: every group has a list of\n \/\/ users, referred to by their names. But, every user also has\n \/\/ a primary group, which isn't in this list. So handle this\n \/\/ case immediately after we look up the user's details.\n let mut groups = HashMap::new();\n groups.insert(info.pw_gid as u32, true);\n\n Unix {\n user_names: user_names,\n group_names: HashMap::new(),\n uid: uid as u32,\n username: username,\n groups: groups,\n }\n }\n\n pub fn get_user_name(&self, uid: u32) -> Option<String> {\n self.user_names[uid].clone()\n }\n\n pub fn get_group_name(&self, gid: u32) -> Option<String> {\n self.group_names[gid].clone()\n }\n\n pub fn is_group_member(&self, gid: u32) -> bool {\n self.groups[gid]\n }\n\n pub fn load_user(&mut self, uid: u32) {\n let pw = unsafe { c::getpwuid(uid as i32) };\n if pw.is_not_null() {\n let username = unsafe { Some(from_c_str(read(pw).pw_name)) };\n self.user_names.insert(uid, username);\n }\n else {\n self.user_names.insert(uid, None);\n }\n }\n\n fn group_membership(group: *const *const i8, uname: &String) -> bool {\n let mut i = 0;\n\n \/\/ The list of members is a pointer to a pointer of\n \/\/ characters, terminated by a null pointer. So the first call\n \/\/ to `to_option` will always succeed, as that memory is\n \/\/ guaranteed to be there (unless we go past the end of RAM).\n \/\/ The second call will return None if it's a null pointer.\n\n loop {\n match unsafe { group.offset(i).as_ref().unwrap().as_ref() } {\n Some(username) => {\n if unsafe { from_c_str(username) } == *uname {\n return true;\n }\n }\n None => {\n return false;\n }\n }\n i += 1;\n }\n }\n\n pub fn load_group(&mut self, gid: u32) {\n match unsafe { c::getgrgid(gid).to_option() } {\n None => {\n self.group_names.find_or_insert(gid, None);\n self.groups.find_or_insert(gid, false);\n },\n Some(r) => {\n let group_name = unsafe { Some(from_c_str(r.gr_name)) };\n self.groups.find_or_insert(gid, Unix::group_membership(r.gr_mem, &self.username));\n self.group_names.find_or_insert(gid, group_name);\n }\n }\n \n }\n}\n\n<commit_msg>Replace to_option() with as_ref()<commit_after>use std::str::raw::from_c_str;\nuse std::ptr::read;\nuse std::collections::hashmap::HashMap;\n\nmod c {\n #![allow(non_camel_case_types)]\n extern crate libc;\n pub use self::libc::{\n c_char,\n c_int,\n uid_t,\n gid_t,\n time_t\n };\n\n #[repr(C)]\n pub struct c_passwd {\n pub pw_name: *const c_char, \/\/ login name\n pub pw_passwd: *const c_char,\n pub pw_uid: c_int, \/\/ user ID\n pub pw_gid: c_int, \/\/ group ID\n pub pw_change: time_t,\n pub pw_class: *const c_char,\n pub pw_gecos: *const c_char, \/\/ full name\n pub pw_dir: *const c_char, \/\/ login dir\n pub pw_shell: *const c_char, \/\/ login shell\n pub pw_expire: time_t \/\/ password expiry time\n }\n\n #[repr(C)]\n pub struct c_group {\n pub gr_name: *const c_char, \/\/ group name\n pub gr_passwd: *const c_char, \/\/ password\n pub gr_gid: gid_t, \/\/ group id\n pub gr_mem: *const *const c_char, \/\/ names of users in the group\n }\n\n extern {\n pub fn getpwuid(uid: c_int) -> *const c_passwd;\n pub fn getgrgid(gid: uid_t) -> *const c_group;\n pub fn getuid() -> libc::c_int;\n }\n}\n\npub struct Unix {\n user_names: HashMap<u32, Option<String>>, \/\/ mapping of user IDs to user names\n group_names: HashMap<u32, Option<String>>, \/\/ mapping of groups IDs to group names\n groups: HashMap<u32, bool>, \/\/ mapping of group IDs to whether the current user is a member\n pub uid: u32, \/\/ current user's ID\n pub username: String, \/\/ current user's name\n}\n\nimpl Unix {\n pub fn empty_cache() -> Unix {\n let uid = unsafe { c::getuid() };\n let infoptr = unsafe { c::getpwuid(uid as i32) };\n let info = unsafe { infoptr.as_ref().unwrap() }; \/\/ the user has to have a name\n\n let username = unsafe { from_c_str(info.pw_name) };\n\n let mut user_names = HashMap::new();\n user_names.insert(uid as u32, Some(username.clone()));\n\n \/\/ Unix groups work like this: every group has a list of\n \/\/ users, referred to by their names. But, every user also has\n \/\/ a primary group, which isn't in this list. So handle this\n \/\/ case immediately after we look up the user's details.\n let mut groups = HashMap::new();\n groups.insert(info.pw_gid as u32, true);\n\n Unix {\n user_names: user_names,\n group_names: HashMap::new(),\n uid: uid as u32,\n username: username,\n groups: groups,\n }\n }\n\n pub fn get_user_name(&self, uid: u32) -> Option<String> {\n self.user_names[uid].clone()\n }\n\n pub fn get_group_name(&self, gid: u32) -> Option<String> {\n self.group_names[gid].clone()\n }\n\n pub fn is_group_member(&self, gid: u32) -> bool {\n self.groups[gid]\n }\n\n pub fn load_user(&mut self, uid: u32) {\n let pw = unsafe { c::getpwuid(uid as i32) };\n if pw.is_not_null() {\n let username = unsafe { Some(from_c_str(read(pw).pw_name)) };\n self.user_names.insert(uid, username);\n }\n else {\n self.user_names.insert(uid, None);\n }\n }\n\n fn group_membership(group: *const *const i8, uname: &String) -> bool {\n let mut i = 0;\n\n \/\/ The list of members is a pointer to a pointer of\n \/\/ characters, terminated by a null pointer. So the first call\n \/\/ to `as_ref` will always succeed, as that memory is\n \/\/ guaranteed to be there (unless we go past the end of RAM).\n \/\/ The second call will return None if it's a null pointer.\n\n loop {\n match unsafe { group.offset(i).as_ref().unwrap().as_ref() } {\n Some(username) => {\n if unsafe { from_c_str(username) } == *uname {\n return true;\n }\n }\n None => {\n return false;\n }\n }\n i += 1;\n }\n }\n\n pub fn load_group(&mut self, gid: u32) {\n match unsafe { c::getgrgid(gid).as_ref() } {\n None => {\n self.group_names.find_or_insert(gid, None);\n self.groups.find_or_insert(gid, false);\n },\n Some(r) => {\n let group_name = unsafe { Some(from_c_str(r.gr_name)) };\n self.groups.find_or_insert(gid, Unix::group_membership(r.gr_mem, &self.username));\n self.group_names.find_or_insert(gid, group_name);\n }\n }\n \n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>extern crate sdl2;\nextern crate sdl2_sys;\nextern crate libc;\n\nextern crate sdl3d;\n\nuse sdl3d::vid::*;\nuse sdl3d::start;\n\nfn main() {\n use sdl2::pixels::Color::RGB;\n\n let mut engine = start::Engine::new(1280, 720, \"Hello!\".to_string(), 1 as usize);\n\n engine.renderer.set_draw_color(RGB(20, 40, 60));\n engine.renderer.clear();\n engine.renderer.set_draw_color(RGB(200, 200, 200));\n\n let point1 = DepthPoint::new(0.0, -0.5, 2.0);\n let point2 = DepthPoint::new(0.5, 0.5, 2.0);\n let point3 = DepthPoint::new(-0.5, 0.5, 2.0);\n\n let triangle = Triangle::new(point1, point2, point3, 0.0, 0.0, 0.0);\n\n 'game_loop: loop {\n engine.camera_z += 0.01;\n engine.renderer.set_draw_color(RGB(20, 40, 60));\n engine.renderer.clear();\n engine.renderer.set_draw_color(RGB(200, 200, 200));\n\n engine.render_queue.push(triangle);\n\n engine.render();\n\n engine.renderer.present();\n std::thread::sleep(std::time::Duration::from_millis(33));\n }\n}<commit_msg>Made triangle test compatible with the current library<commit_after>extern crate orbclient;\n\nextern crate sdl3d;\n\nuse sdl3d::vid::*;\nuse sdl3d::start;\n\nfn main() {\n\n let mut engine = start::Engine::new(1280, 720, \"Hello!\", 1 as usize);\n\n engine.window.set(Color::new(20, 40, 60).orb_color());\n\n let point1 = DepthPoint::new(0.0, -0.5, 2.0);\n let point2 = DepthPoint::new(0.5, 0.5, 2.0);\n let point3 = DepthPoint::new(-0.5, 0.5, 2.0);\n\n let triangle = Triangle::new(point1, point2, point3, 0.0, 0.0, 0.0, Color::new(200, 200, 200));\n\n 'game_loop: loop {\n engine.window.set(Color::new(20, 40, 60).orb_color());\n\n\n engine.camera_z += 0.01;\n\n engine.render_queue.push(triangle);\n\n engine.render();\n\n engine.window.sync();\n std::thread::sleep(std::time::Duration::from_millis(33));\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Change window title.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Moving thread spawning into functions themselves to reduce duplicate variable types and clean things up<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor some redundant code in command dispatch.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implemented some error handling<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Comment out debugger stuff<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added function<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug module<commit_after>\/\/\/ Write to console if the debug option is set (with newline)\n#[macro_export]\nmacro_rules! debugln {\n ($e:expr, $($arg:tt)*) => ({\n if $e.options.debug {\n println!($($arg)*);\n }\n });\n}\n\n\/\/\/ Write to console if the debug option is set\n#[macro_export]\nmacro_rules! debug {\n ($e:expr, $($arg:tt)*) => ({\n if $e.options.debug {\n print!($($arg)*);\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse core::cmp;\n\nuse common::string::{String, ToString};\n\nuse graphics::display::Display;\n\nuse schemes::{KScheme, Resource, ResourceSeek, URL};\n\npub struct DisplayScheme;\n\n\/\/ Should there only be one display per session?\npub struct DisplayResource {\n\tpub display: Box<Display>,\n\tpub seek: usize,\n}\n\nimpl Resource for DisplayResource {\n\t\/\/ can't think of when you would wish to duplicate a display\n\tfn dup(&self) -> Option<Box<Resource>> {\n\t\tNone\n\t}\n\n\t\/\/\/ Return the URL for display resource\n\tfn url(&self) -> URL {\n\t\treturn URL::from_string(&(\"display:\/\/\".to_string()));\n\t}\n\n\t\/\/ not sure what to return here\n\tfn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n\t\tNone\n\t}\n\n\n\tfn write(&mut self, buf: &[u8]) -> Option<usize> {\n\t\tlet display = &mut self.display;\n\n\t\tlet size = cmp::min(display.size - self.seek, buf.len());\n\t\tunsafe {\n\t\t\tDisplay::copy_run(buf.as_ptr() as usize,\n\t\t\t display.offscreen + self.seek,\n\t\t\t size);\n\t\t}\n\t\tself.seek += size;\n\t\treturn Some(size);\n\t}\n\n\tfn seek(&mut self, pos: ResourceSeek) -> Option<usize> {\n\t\tlet end = self.display.size;\n\n\t\tself.seek = match pos {\n\t\t\tResourceSeek::Start(offset) => cmp::min(end, cmp::max(0, offset)),\n\t\t\tResourceSeek::Current(offset) => cmp::min(end, cmp::max(0, self.seek as isize + offset) as usize),\n\t\t\tResourceSeek::End(offset) => cmp::min(end, cmp::max(0, end as isize + offset) as usize),\n\t\t};\n\n\t\treturn Some(self.seek);\n\t}\n\n\tfn sync(&mut self) -> bool {\n\t\tself.display.flip();\n\t\treturn true;\n\t}\n}\n\nimpl KScheme for DisplayScheme {\n\tfn scheme(&self) -> String {\n\t\treturn \"display\".to_string();\n\t}\n\n\tfn open(&mut self, url: &URL) -> Option<Box<Resource>> {\n\t\t\/\/ TODO: ponder these things:\n\t\t\/\/ - should display:\/\/ be the only only valid url\n\t\t\/\/ for this scheme?\n\t\t\/\/ - maybe \"read\" should support displays at some other location\n\t\t\/\/ like built in screen sharing capability or something\n\t\tunsafe {\n\t\t\treturn Some(box DisplayResource {\n\t\t\t display: Display::root(),\n\t\t\t seek: 0,\n\t\t\t });\n\t\t}\n\t}\n}\n<commit_msg>Convert tabs to spaces in `kernel::schemes::display`<commit_after>use alloc::boxed::Box;\n\nuse core::cmp;\n\nuse common::string::{String, ToString};\n\nuse graphics::display::Display;\n\nuse schemes::{KScheme, Resource, ResourceSeek, URL};\n\npub struct DisplayScheme;\n\n\/\/ Should there only be one display per session?\npub struct DisplayResource {\n pub display: Box<Display>,\n pub seek: usize,\n}\n\nimpl Resource for DisplayResource {\n \/\/ can't think of when you would wish to duplicate a display\n fn dup(&self) -> Option<Box<Resource>> {\n None\n }\n\n \/\/\/ Return the URL for display resource\n fn url(&self) -> URL {\n return URL::from_string(&(\"display:\/\/\".to_string()));\n }\n\n \/\/ not sure what to return here\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n None\n }\n\n\n fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let display = &mut self.display;\n\n let size = cmp::min(display.size - self.seek, buf.len());\n unsafe {\n Display::copy_run(buf.as_ptr() as usize,\n display.offscreen + self.seek,\n size);\n }\n self.seek += size;\n return Some(size);\n }\n\n fn seek(&mut self, pos: ResourceSeek) -> Option<usize> {\n let end = self.display.size;\n\n self.seek = match pos {\n ResourceSeek::Start(offset) => cmp::min(end, cmp::max(0, offset)),\n ResourceSeek::Current(offset) => cmp::min(end, cmp::max(0, self.seek as isize + offset) as usize),\n ResourceSeek::End(offset) => cmp::min(end, cmp::max(0, end as isize + offset) as usize),\n };\n\n return Some(self.seek);\n }\n\n fn sync(&mut self) -> bool {\n self.display.flip();\n return true;\n }\n}\n\nimpl KScheme for DisplayScheme {\n fn scheme(&self) -> String {\n return \"display\".to_string();\n }\n\n fn open(&mut self, url: &URL) -> Option<Box<Resource>> {\n \/\/ TODO: ponder these things:\n \/\/ - should display:\/\/ be the only only valid url\n \/\/ for this scheme?\n \/\/ - maybe \"read\" should support displays at some other location\n \/\/ like built in screen sharing capability or something\n unsafe {\n return Some(box DisplayResource {\n display: Display::root(),\n seek: 0,\n });\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::arc::Arc;\n\nuse arch::context::{CONTEXT_STACK_SIZE, CONTEXT_STACK_ADDR, context_switch, context_userspace, Context, ContextMemory};\nuse arch::elf::Elf;\nuse arch::memory;\nuse arch::regs::Regs;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse common::slice::GetSlice;\n\nuse core::cell::UnsafeCell;\nuse core::ops::DerefMut;\nuse core::{mem, ptr};\n\nuse fs::Url;\n\nuse system::error::{Error, Result, ENOEXEC};\n\nfn execute_inner(url: Url) -> Result<(*mut Context, usize)> {\n let mut vec: Vec<u8> = Vec::new();\n\n {\n let mut resource = try!(url.open());\n 'reading: loop {\n let mut bytes = [0; 4096];\n match resource.read(&mut bytes) {\n Ok(0) => break 'reading,\n Ok(count) => vec.push_all(bytes.get_slice(.. count)),\n Err(err) => return Err(err)\n }\n }\n }\n\n match Elf::from(&vec) {\n Ok(executable) => {\n let entry = unsafe { executable.entry() };\n let mut memory = Vec::new();\n unsafe {\n for segment in executable.load_segment().iter() {\n let virtual_address = segment.vaddr as usize;\n let virtual_size = segment.mem_len as usize;\n\n let offset = virtual_address % 4096;\n\n let physical_address = memory::alloc(virtual_size + offset);\n\n if physical_address > 0 {\n \/\/ Copy progbits\n ::memcpy((physical_address + offset) as *mut u8,\n (executable.data.as_ptr() as usize + segment.off as usize) as *const u8,\n segment.file_len as usize);\n \/\/ Zero bss\n if segment.mem_len > segment.file_len {\n ::memset((physical_address + offset + segment.file_len as usize) as *mut u8,\n 0,\n segment.mem_len as usize - segment.file_len as usize);\n }\n\n memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address - offset,\n virtual_size: virtual_size + offset,\n writeable: segment.flags & 2 == 2,\n allocated: true,\n });\n }\n }\n }\n\n if entry > 0 && ! memory.is_empty() {\n let mut contexts = ::env().contexts.lock();\n let mut context = try!(contexts.current_mut());\n\n \/\/debugln!(\"{}: {}: execute {}\", context.pid, context.name, url.string);\n\n context.name = url.string;\n context.cwd = Arc::new(UnsafeCell::new(unsafe { (*context.cwd.get()).clone() }));\n\n unsafe { context.unmap() };\n context.memory = Arc::new(UnsafeCell::new(memory));\n unsafe { context.map() };\n\n Ok((context.deref_mut(), entry))\n } else {\n Err(Error::new(ENOEXEC))\n }\n },\n Err(msg) => {\n debugln!(\"execute: failed to exec '{}': {}\", url.string, msg);\n Err(Error::new(ENOEXEC))\n }\n }\n}\n\npub fn execute_outer(context_ptr: *mut Context, entry: usize, mut args: Vec<String>) -> ! {\n Context::spawn(\"kexec\".to_string(), box move || {\n let context = unsafe { &mut *context_ptr };\n\n let mut context_args: Vec<usize> = Vec::new();\n context_args.push(0); \/\/ ENVP\n context_args.push(0); \/\/ ARGV NULL\n let mut argc = 0;\n while let Some(mut arg) = args.pop() {\n if ! arg.ends_with('\\0') {\n arg.push('\\0');\n }\n\n let physical_address = arg.as_ptr() as usize;\n let virtual_address = context.next_mem();\n let virtual_size = arg.len();\n\n mem::forget(arg);\n\n unsafe {\n (*context.memory.get()).push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size,\n writeable: false,\n allocated: true,\n });\n }\n\n context_args.push(virtual_address as usize);\n argc += 1;\n }\n context_args.push(argc);\n\n \/\/TODO: No default heap, fix brk\n {\n let virtual_address = context.next_mem();\n let virtual_size = 4096;\n let physical_address = unsafe { memory::alloc(virtual_size) };\n if physical_address > 0 {\n unsafe {\n (*context.memory.get()).push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size,\n writeable: true,\n allocated: true\n });\n }\n }\n }\n\n context.regs = Regs::default();\n context.regs.sp = context.kernel_stack + CONTEXT_STACK_SIZE - 128;\n\n context.stack = Some(ContextMemory {\n physical_address: unsafe { memory::alloc(CONTEXT_STACK_SIZE) },\n virtual_address: CONTEXT_STACK_ADDR,\n virtual_size: CONTEXT_STACK_SIZE,\n writeable: true,\n allocated: true,\n });\n\n let user_sp = if let Some(ref stack) = context.stack {\n let mut sp = stack.physical_address + stack.virtual_size - 128;\n for arg in context_args.iter() {\n sp -= mem::size_of::<usize>();\n unsafe { ptr::write(sp as *mut usize, *arg) };\n }\n sp - stack.physical_address + stack.virtual_address\n } else {\n 0\n };\n\n unsafe {\n context.push(0x20 | 3);\n context.push(user_sp);\n context.push(1 << 9);\n context.push(0x18 | 3);\n context.push(entry);\n context.push(context_userspace as usize);\n }\n\n if let Some(vfork) = context.vfork.take() {\n unsafe { (*vfork).blocked = false; }\n }\n });\n\n loop {\n unsafe { context_switch() };\n }\n}\n\n\/\/\/ Execute an executable\npub fn execute(args: Vec<String>) -> Result<usize> {\n let contexts = ::env().contexts.lock();\n let current = try!(contexts.current());\n\n if let Ok((context_ptr, entry)) = execute_inner(Url::from_string(current.canonicalize(args.get(0).map_or(\"\", |p| &p)))) {\n execute_outer(context_ptr, entry, args);\n }else{\n let (context_ptr, entry) = try!(execute_inner(Url::from_string(\"file:\/bin\/\".to_string() + args.get(0).map_or(\"\", |p| &p))));\n execute_outer(context_ptr, entry, args);\n }\n}\n<commit_msg>Add hashbang uspport<commit_after>use alloc::arc::Arc;\n\nuse arch::context::{CONTEXT_STACK_SIZE, CONTEXT_STACK_ADDR, context_switch, context_userspace, Context, ContextMemory};\nuse arch::elf::Elf;\nuse arch::memory;\nuse arch::regs::Regs;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse common::slice::GetSlice;\n\nuse core::cell::UnsafeCell;\nuse core::ops::DerefMut;\nuse core::{mem, ptr, str};\n\nuse fs::Url;\n\nuse system::error::{Error, Result, ENOEXEC};\n\npub fn execute_thread(context_ptr: *mut Context, entry: usize, mut args: Vec<String>) -> ! {\n Context::spawn(\"kexec\".to_string(), box move || {\n let context = unsafe { &mut *context_ptr };\n\n let mut context_args: Vec<usize> = Vec::new();\n context_args.push(0); \/\/ ENVP\n context_args.push(0); \/\/ ARGV NULL\n let mut argc = 0;\n while let Some(mut arg) = args.pop() {\n if ! arg.ends_with('\\0') {\n arg.push('\\0');\n }\n\n let physical_address = arg.as_ptr() as usize;\n let virtual_address = context.next_mem();\n let virtual_size = arg.len();\n\n mem::forget(arg);\n\n unsafe {\n (*context.memory.get()).push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size,\n writeable: false,\n allocated: true,\n });\n }\n\n context_args.push(virtual_address as usize);\n argc += 1;\n }\n context_args.push(argc);\n\n \/\/TODO: No default heap, fix brk\n {\n let virtual_address = context.next_mem();\n let virtual_size = 4096;\n let physical_address = unsafe { memory::alloc(virtual_size) };\n if physical_address > 0 {\n unsafe {\n (*context.memory.get()).push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size,\n writeable: true,\n allocated: true\n });\n }\n }\n }\n\n context.regs = Regs::default();\n context.regs.sp = context.kernel_stack + CONTEXT_STACK_SIZE - 128;\n\n context.stack = Some(ContextMemory {\n physical_address: unsafe { memory::alloc(CONTEXT_STACK_SIZE) },\n virtual_address: CONTEXT_STACK_ADDR,\n virtual_size: CONTEXT_STACK_SIZE,\n writeable: true,\n allocated: true,\n });\n\n let user_sp = if let Some(ref stack) = context.stack {\n let mut sp = stack.physical_address + stack.virtual_size - 128;\n for arg in context_args.iter() {\n sp -= mem::size_of::<usize>();\n unsafe { ptr::write(sp as *mut usize, *arg) };\n }\n sp - stack.physical_address + stack.virtual_address\n } else {\n 0\n };\n\n unsafe {\n context.push(0x20 | 3);\n context.push(user_sp);\n context.push(1 << 9);\n context.push(0x18 | 3);\n context.push(entry);\n context.push(context_userspace as usize);\n }\n\n if let Some(vfork) = context.vfork.take() {\n unsafe { (*vfork).blocked = false; }\n }\n });\n\n loop {\n unsafe { context_switch() };\n }\n}\n\n\/\/\/ Execute an executable\npub fn execute(mut args: Vec<String>) -> Result<usize> {\n let contexts = ::env().contexts.lock();\n let current = try!(contexts.current());\n\n let mut vec: Vec<u8> = Vec::new();\n\n let mut url = Url::from_string(current.canonicalize(args.get(0).map_or(\"\", |p| &p)));\n {\n let mut resource = if let Ok(resource) = url.open() {\n resource\n } else {\n url = Url::from_string(\"file:\/bin\/\".to_string() + args.get(0).map_or(\"\", |p| &p));\n try!(url.open())\n };\n\n 'reading: loop {\n let mut bytes = [0; 4096];\n match resource.read(&mut bytes) {\n Ok(0) => break 'reading,\n Ok(count) => vec.push_all(bytes.get_slice(.. count)),\n Err(err) => return Err(err)\n }\n }\n }\n\n if vec.starts_with(b\"#!\") {\n if let Some(mut arg) = args.get_mut(0) {\n *arg = url.string;\n }\n if let Some(line) = unsafe { str::from_utf8_unchecked(&vec[2..]) }.lines().next() {\n let mut i = 0;\n for arg in line.trim().split(' ') {\n if ! arg.is_empty() {\n args.insert(i, arg.to_string());\n i += 1;\n }\n }\n if i == 0 {\n args.insert(i, \"\/bin\/sh\".to_string());\n }\n execute(args)\n } else {\n Err(Error::new(ENOEXEC))\n }\n } else {\n match Elf::from(&vec) {\n Ok(executable) => {\n let entry = unsafe { executable.entry() };\n let mut memory = Vec::new();\n unsafe {\n for segment in executable.load_segment().iter() {\n let virtual_address = segment.vaddr as usize;\n let virtual_size = segment.mem_len as usize;\n\n let offset = virtual_address % 4096;\n\n let physical_address = memory::alloc(virtual_size + offset);\n\n if physical_address > 0 {\n \/\/ Copy progbits\n ::memcpy((physical_address + offset) as *mut u8,\n (executable.data.as_ptr() as usize + segment.off as usize) as *const u8,\n segment.file_len as usize);\n \/\/ Zero bss\n if segment.mem_len > segment.file_len {\n ::memset((physical_address + offset + segment.file_len as usize) as *mut u8,\n 0,\n segment.mem_len as usize - segment.file_len as usize);\n }\n\n memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address - offset,\n virtual_size: virtual_size + offset,\n writeable: segment.flags & 2 == 2,\n allocated: true,\n });\n }\n }\n }\n\n if entry > 0 && ! memory.is_empty() {\n let mut contexts = ::env().contexts.lock();\n let mut context = try!(contexts.current_mut());\n\n \/\/debugln!(\"{}: {}: execute {}\", context.pid, context.name, url.string);\n\n context.name = url.string;\n context.cwd = Arc::new(UnsafeCell::new(unsafe { (*context.cwd.get()).clone() }));\n\n unsafe { context.unmap() };\n context.memory = Arc::new(UnsafeCell::new(memory));\n unsafe { context.map() };\n\n execute_thread(context.deref_mut(), entry, args);\n } else {\n Err(Error::new(ENOEXEC))\n }\n },\n Err(msg) => {\n debugln!(\"execute: failed to exec '{}': {}\", url.string, msg);\n Err(Error::new(ENOEXEC))\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\nfn main() {\n \/\/ Create a path to the desired file\n let path = Path::new(\"hello.txt\");\n let display = path.display();\n\n \/\/ Open the path in read-only mode, returns `io::Result<File>`\n let mut file = match File::open(&path) {\n \/\/ The `description` method of `io::Error` returns a string that describes the error\n Err(why) => panic!(\"couldn't open {}: {}\", display,\n Error::description(&why)),\n Ok(file) => file,\n };\n\n \/\/ Read the file contents into a string, returns `io::Result<usize>`\n let mut s = String::new();\n match file.read_to_string(&mut s) {\n Err(why) => panic!(\"couldn't read {}: {}\", display,\n Error::description(&why)),\n Ok(_) => print!(\"{} contains:\\n{}\", display, s),\n }\n\n \/\/ `file` goes out of scope, and the \"hello.txt\" file gets closed\n}\n<commit_msg>file\/open: add a newline<commit_after>use std::error::Error;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\nfn main() {\n \/\/ Create a path to the desired file\n let path = Path::new(\"hello.txt\");\n let display = path.display();\n\n \/\/ Open the path in read-only mode, returns `io::Result<File>`\n let mut file = match File::open(&path) {\n \/\/ The `description` method of `io::Error` returns a string that\n \/\/ describes the error\n Err(why) => panic!(\"couldn't open {}: {}\", display,\n Error::description(&why)),\n Ok(file) => file,\n };\n\n \/\/ Read the file contents into a string, returns `io::Result<usize>`\n let mut s = String::new();\n match file.read_to_string(&mut s) {\n Err(why) => panic!(\"couldn't read {}: {}\", display,\n Error::description(&why)),\n Ok(_) => print!(\"{} contains:\\n{}\", display, s),\n }\n\n \/\/ `file` goes out of scope, and the \"hello.txt\" file gets closed\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example that demonstrates custom error callbacks<commit_after>\/\/ Copyright 2014 The GLFW-RS Developers. For a full listing of the authors,\n\/\/ refer to the AUTHORS file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Demonstrates how custom error callbacks with user data can be created\n\n#![feature(phase)]\n\nextern crate native;\nextern crate glfw;\n#[phase(syntax, link)] extern crate log;\n\nuse std::cell::Cell;\n\n#[start]\nfn start(argc: int, argv: **u8) -> int {\n native::start(argc, argv, main)\n}\n\nfn main() {\n let glfw = glfw::init(Some(\n glfw::Callback {\n f: error_callback,\n data: Cell::new(0),\n }\n )).unwrap();\n\n \/\/ Force the error callback to be triggered\n glfw.window_hint(glfw::ContextVersion(40000, 3000)); \/\/ Ridiculous!\n let _ = glfw.create_window(300, 300, \"Hey this won't work.\", glfw::Windowed);\n let _ = glfw.create_window(300, 300, \"Nope, not working.\", glfw::Windowed);\n let _ = glfw.create_window(300, 300, \"Stop it! :(\", glfw::Windowed);\n}\n\nfn error_callback(_: glfw::Error, description: ~str, error_count: &Cell<uint>) {\n error!(\"GLFW error {}: {}\", error_count.get(), description);\n error_count.set(error_count.get() + 1);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add some trace logging for debugging mappings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a simple main.rs file to test everything alltogether<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>finish the core elements of the game. bomb placement and user input.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix #4: Implement generate_md5() function<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `make clean` in rustbuild.\n\/\/!\n\/\/! Responsible for cleaning out a build directory of all old and stale\n\/\/! artifacts to prepare for a fresh build. Currently doesn't remove the\n\/\/! `build\/cache` directory (download cache) or the `build\/$target\/llvm`\n\/\/! directory as we want that cached between builds.\n\nuse std::fs;\nuse std::path::Path;\n\nuse build::Build;\n\npub fn clean(build: &Build) {\n for host in build.config.host.iter() {\n\n let out = build.out.join(host);\n\n rm_rf(build, &out.join(\"compiler-rt\"));\n rm_rf(build, &out.join(\"doc\"));\n\n for stage in 0..4 {\n rm_rf(build, &out.join(format!(\"stage{}\", stage)));\n rm_rf(build, &out.join(format!(\"stage{}-std\", stage)));\n rm_rf(build, &out.join(format!(\"stage{}-rustc\", stage)));\n rm_rf(build, &out.join(format!(\"stage{}-tools\", stage)));\n rm_rf(build, &out.join(format!(\"stage{}-test\", stage)));\n }\n }\n}\n\nfn rm_rf(build: &Build, path: &Path) {\n if path.exists() {\n build.verbose(&format!(\"removing `{}`\", path.display()));\n t!(fs::remove_dir_all(path));\n }\n}\n<commit_msg>rustbuild: Clean out tmp directory on `make clean`<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `make clean` in rustbuild.\n\/\/!\n\/\/! Responsible for cleaning out a build directory of all old and stale\n\/\/! artifacts to prepare for a fresh build. Currently doesn't remove the\n\/\/! `build\/cache` directory (download cache) or the `build\/$target\/llvm`\n\/\/! directory as we want that cached between builds.\n\nuse std::fs;\nuse std::path::Path;\n\nuse build::Build;\n\npub fn clean(build: &Build) {\n rm_rf(build, \"tmp\".as_ref());\n rm_rf(build, &build.out.join(\"tmp\"));\n\n for host in build.config.host.iter() {\n\n let out = build.out.join(host);\n\n rm_rf(build, &out.join(\"compiler-rt\"));\n rm_rf(build, &out.join(\"doc\"));\n\n for stage in 0..4 {\n rm_rf(build, &out.join(format!(\"stage{}\", stage)));\n rm_rf(build, &out.join(format!(\"stage{}-std\", stage)));\n rm_rf(build, &out.join(format!(\"stage{}-rustc\", stage)));\n rm_rf(build, &out.join(format!(\"stage{}-tools\", stage)));\n rm_rf(build, &out.join(format!(\"stage{}-test\", stage)));\n }\n }\n}\n\nfn rm_rf(build: &Build, path: &Path) {\n if path.exists() {\n build.verbose(&format!(\"removing `{}`\", path.display()));\n t!(fs::remove_dir_all(path));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Alter formatting for words in Option::cloned doc comment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Replace .next() with .eat(1) where appropriate<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update serde attrs in call<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>impl Display for Cell<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix issue #3 - index out of bounds<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Revert some more<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>`if` isn't surrounded by empty lines<commit_after><|endoftext|>"} {"text":"<commit_before>use std::sync::{Mutex, Arc};\nuse std::sync::atomics::{AtomicUint, SeqCst};\n\n\npub struct EWMA {\n pub uncounted: AtomicUint, \/\/ This tracks uncounted events\n alpha: f64,\n rate: Mutex<f64>,\n init: bool,\n}\n\n\npub struct EWMASnapshot {\n value: f64\n}\n\n\nimpl EWMASnapshot {\n fn rate(&self) -> f64 {\n return self.value;\n }\n}\n\nimpl EWMA {\n pub fn rate(&self) -> f64 {\n let r = self.rate.lock();\n *r * (1e9 as f64)\n }\n\n pub fn snapshot(&self) -> EWMASnapshot {\n return EWMASnapshot{ value: self.rate() };\n }\n\n pub fn tick(&mut self) {\n let counter: uint = self.uncounted.load(SeqCst);\n\n self.uncounted.fetch_sub(counter, SeqCst); \/\/ Broken atm\n\n let mut rate = self.rate.lock();\n let i_rate = (counter as f64) \/ (5e9);\n\n if self.init {\n *rate += self.alpha * (i_rate - *rate);\n } else {\n self.init = true;\n *rate = i_rate;\n }\n\n rate.cond.signal();\n }\n\n pub fn update(&self, n: uint) {\n self.uncounted.fetch_add(n, SeqCst);\n }\n\n pub fn new(alpha: f64) -> EWMA {\n return EWMA{\n uncounted: AtomicUint::new(0u),\n alpha: alpha,\n rate: Mutex::new(0f64),\n init: false,\n }\n }\n\n}\n\n#[cfg(test)]\nmod test {\n use ewma::EWMA;\n use std::num::Float;\n\n \/\/ Tick a minute\n fn elapse_minute(e: &mut EWMA) {\n for i in range(0i, 12i) {\n e.tick();\n }\n }\n\n \/\/ Returns whether the rate() is within 0.0001 of expected after ticking a minute\n fn within(e: &mut EWMA, expected: f64) -> bool {\n elapse_minute(e);\n let r: f64 = e.rate();\n (r - expected).abs() < 0.0001\n }\n\n #[test]\n fn ewma1() {\n let i = -5.0f64\/60.0f64\/1f64;\n let mut e = EWMA::new(1f64 - i.exp());\n e.update(3u);\n e.tick();\n\n let mut r: f64;\n\n \/\/ initial\n r = e.rate();\n assert_eq!(r, 0.6f64);\n\n \/\/ 1 minute\n assert_eq!(within(&mut e, 0.22072766470286553f64), true);\n\n \/\/ 2 minute\n assert_eq!(within(&mut e, 0.08120116994196772f64), true);\n\n \/\/ 3 minute\n assert_eq!(within(&mut e, 0.029872241020718428f64), true);\n\n \/\/ 4 minute\n assert_eq!(within(&mut e, 0.01098938333324054f64), true);\n\n \/\/ 5 minute\n assert_eq!(within(&mut e, 0.004042768199451294f64), true);\n\n \/\/ 6 minute\n assert_eq!(within(&mut e, 0.0014872513059998212f64), true);\n\n \/\/ 7 minute\n assert_eq!(within(&mut e, 0.0005471291793327122f64), true);\n\n \/\/ 8 minute\n assert_eq!(within(&mut e, 0.00020127757674150815f64), true);\n\n \/\/ 9 minute\n assert_eq!(within(&mut e, 7.404588245200814e-05f64), true);\n\n \/\/ 10 minute\n assert_eq!(within(&mut e, 2.7239957857491083e-05f64), true);\n\n \/\/ 11 minute\n assert_eq!(within(&mut e, 1.0021020474147462e-05f64), true);\n\n \/\/ 12 minute\n assert_eq!(within(&mut e, 3.6865274119969525e-06f64), true);\n\n \/\/ 13 minute\n assert_eq!(within(&mut e, 1.3561976441886433e-06f64), true);\n\n \/\/ 14 minute\n assert_eq!(within(&mut e, 4.989172314621449e-07f64), true);\n\n \/\/ 15 minute\n assert_eq!(within(&mut e, 1.8354139230109722e-07f64), true);\n }\n\n #[test]\n fn ewma5() {\n let i = -5.0f64\/60.0f64\/5f64;\n let mut e = EWMA::new(1f64 - i.exp());\n e.update(3u);\n e.tick();\n\n let r: f64 = e.rate();\n assert_eq!(r, 0.6f64);\n\n \/\/ 1 minute\n assert_eq!(within(&mut e, 0.49123845184678905f64), true);\n\n \/\/ 2 minute\n assert_eq!(within(&mut e, 0.4021920276213837f64), true);\n\n \/\/ 3 minute\n assert_eq!(within(&mut e, 0.32928698165641596f64), true);\n\n \/\/ 4 minute\n assert_eq!(within(&mut e, 0.269597378470333f64), true);\n\n \/\/ 5 minute\n assert_eq!(within(&mut e, 0.2207276647028654f64), true);\n\n \/\/ 6 minute\n assert_eq!(within(&mut e, 0.18071652714732128f64), true);\n\n \/\/ 7 minute\n assert_eq!(within(&mut e, 0.14795817836496392f64), true);\n\n \/\/ 8 minute\n assert_eq!(within(&mut e, 0.12113791079679326f64), true);\n\n \/\/ 9 minute\n assert_eq!(within(&mut e, 0.09917933293295193f64), true);\n\n \/\/ 10 minute\n assert_eq!(within(&mut e, 0.08120116994196763f64), true);\n\n \/\/ 11 minute\n assert_eq!(within(&mut e, 0.06648189501740036), true);\n\n \/\/ 12 minute\n assert_eq!(within(&mut e, 0.05443077197364752f64), true);\n\n \/\/ 13 minute\n assert_eq!(within(&mut e, 0.04456414692860035f64), true);\n\n \/\/ 14 minute\n assert_eq!(within(&mut e, 0.03648603757513079f64), true);\n\n \/\/ 15 minute\n assert_eq!(within(&mut e, 0.0298722410207183831020718428f64), true);\n }\n\n #[test]\n fn ewma15() {\n let i = -5.0f64\/60.0f64\/15f64;\n let mut e = EWMA::new(1f64 - i.exp());\n e.update(3u);\n e.tick();\n\n let r: f64 = e.rate();\n assert_eq!(r, 0.6f64);\n\n \/\/ 1 minute\n assert_eq!(within(&mut e, 0.5613041910189706f64), true);\n\n \/\/ 2 minute\n assert_eq!(within(&mut e, 0.5251039914257684f64), true);\n\n \/\/ 3 minute\n assert_eq!(within(&mut e, 0.4912384518467888184678905f64), true);\n\n \/\/ 4 minute\n assert_eq!(within(&mut e, 0.459557003018789f64), true);\n\n \/\/ 5 minute\n assert_eq!(within(&mut e, 0.4299187863442732f64), true);\n\n \/\/ 6 minute\n assert_eq!(within(&mut e, 0.4021920276213831f64), true);\n\n \/\/ 7 minute\n assert_eq!(within(&mut e, 0.37625345116383313f64), true);\n\n \/\/ 8 minute\n assert_eq!(within(&mut e, 0.3519877317060185f64), true);\n\n \/\/ 9 minute\n assert_eq!(within(&mut e, 0.3292869816564153165641596f64), true);\n\n \/\/ 10 minute\n assert_eq!(within(&mut e, 0.3080502714195546f64), true);\n\n \/\/ 11 minute\n assert_eq!(within(&mut e, 0.2881831806538789f64), true);\n\n \/\/ 12 minute\n assert_eq!(within(&mut e, 0.26959737847033216f64), true);\n\n \/\/ 13 minute\n assert_eq!(within(&mut e, 0.2522102307052083f64), true);\n\n \/\/ 14 minute\n assert_eq!(within(&mut e, 0.23594443252115815f64), true);\n\n \/\/ 15 minute\n assert_eq!(within(&mut e, 0.2207276647028646247028654470286553f64), true);\n }\n}<commit_msg>Should be pub<commit_after>use std::sync::{Mutex, Arc};\nuse std::sync::atomics::{AtomicUint, SeqCst};\n\n\npub struct EWMA {\n pub uncounted: AtomicUint, \/\/ This tracks uncounted events\n alpha: f64,\n rate: Mutex<f64>,\n init: bool,\n}\n\n\npub struct EWMASnapshot {\n value: f64\n}\n\n\nimpl EWMASnapshot {\n pub fn rate(&self) -> f64 {\n return self.value;\n }\n}\n\nimpl EWMA {\n pub fn rate(&self) -> f64 {\n let r = self.rate.lock();\n *r * (1e9 as f64)\n }\n\n pub fn snapshot(&self) -> EWMASnapshot {\n return EWMASnapshot{ value: self.rate() };\n }\n\n pub fn tick(&mut self) {\n let counter: uint = self.uncounted.load(SeqCst);\n\n self.uncounted.fetch_sub(counter, SeqCst); \/\/ Broken atm\n\n let mut rate = self.rate.lock();\n let i_rate = (counter as f64) \/ (5e9);\n\n if self.init {\n *rate += self.alpha * (i_rate - *rate);\n } else {\n self.init = true;\n *rate = i_rate;\n }\n\n rate.cond.signal();\n }\n\n pub fn update(&self, n: uint) {\n self.uncounted.fetch_add(n, SeqCst);\n }\n\n pub fn new(alpha: f64) -> EWMA {\n return EWMA{\n uncounted: AtomicUint::new(0u),\n alpha: alpha,\n rate: Mutex::new(0f64),\n init: false,\n }\n }\n\n}\n\n#[cfg(test)]\nmod test {\n use ewma::EWMA;\n use std::num::Float;\n\n \/\/ Tick a minute\n fn elapse_minute(e: &mut EWMA) {\n for i in range(0i, 12i) {\n e.tick();\n }\n }\n\n \/\/ Returns whether the rate() is within 0.0001 of expected after ticking a minute\n fn within(e: &mut EWMA, expected: f64) -> bool {\n elapse_minute(e);\n let r: f64 = e.rate();\n (r - expected).abs() < 0.0001\n }\n\n #[test]\n fn ewma1() {\n let i = -5.0f64\/60.0f64\/1f64;\n let mut e = EWMA::new(1f64 - i.exp());\n e.update(3u);\n e.tick();\n\n let mut r: f64;\n\n \/\/ initial\n r = e.rate();\n assert_eq!(r, 0.6f64);\n\n \/\/ 1 minute\n assert_eq!(within(&mut e, 0.22072766470286553f64), true);\n\n \/\/ 2 minute\n assert_eq!(within(&mut e, 0.08120116994196772f64), true);\n\n \/\/ 3 minute\n assert_eq!(within(&mut e, 0.029872241020718428f64), true);\n\n \/\/ 4 minute\n assert_eq!(within(&mut e, 0.01098938333324054f64), true);\n\n \/\/ 5 minute\n assert_eq!(within(&mut e, 0.004042768199451294f64), true);\n\n \/\/ 6 minute\n assert_eq!(within(&mut e, 0.0014872513059998212f64), true);\n\n \/\/ 7 minute\n assert_eq!(within(&mut e, 0.0005471291793327122f64), true);\n\n \/\/ 8 minute\n assert_eq!(within(&mut e, 0.00020127757674150815f64), true);\n\n \/\/ 9 minute\n assert_eq!(within(&mut e, 7.404588245200814e-05f64), true);\n\n \/\/ 10 minute\n assert_eq!(within(&mut e, 2.7239957857491083e-05f64), true);\n\n \/\/ 11 minute\n assert_eq!(within(&mut e, 1.0021020474147462e-05f64), true);\n\n \/\/ 12 minute\n assert_eq!(within(&mut e, 3.6865274119969525e-06f64), true);\n\n \/\/ 13 minute\n assert_eq!(within(&mut e, 1.3561976441886433e-06f64), true);\n\n \/\/ 14 minute\n assert_eq!(within(&mut e, 4.989172314621449e-07f64), true);\n\n \/\/ 15 minute\n assert_eq!(within(&mut e, 1.8354139230109722e-07f64), true);\n }\n\n #[test]\n fn ewma5() {\n let i = -5.0f64\/60.0f64\/5f64;\n let mut e = EWMA::new(1f64 - i.exp());\n e.update(3u);\n e.tick();\n\n let r: f64 = e.rate();\n assert_eq!(r, 0.6f64);\n\n \/\/ 1 minute\n assert_eq!(within(&mut e, 0.49123845184678905f64), true);\n\n \/\/ 2 minute\n assert_eq!(within(&mut e, 0.4021920276213837f64), true);\n\n \/\/ 3 minute\n assert_eq!(within(&mut e, 0.32928698165641596f64), true);\n\n \/\/ 4 minute\n assert_eq!(within(&mut e, 0.269597378470333f64), true);\n\n \/\/ 5 minute\n assert_eq!(within(&mut e, 0.2207276647028654f64), true);\n\n \/\/ 6 minute\n assert_eq!(within(&mut e, 0.18071652714732128f64), true);\n\n \/\/ 7 minute\n assert_eq!(within(&mut e, 0.14795817836496392f64), true);\n\n \/\/ 8 minute\n assert_eq!(within(&mut e, 0.12113791079679326f64), true);\n\n \/\/ 9 minute\n assert_eq!(within(&mut e, 0.09917933293295193f64), true);\n\n \/\/ 10 minute\n assert_eq!(within(&mut e, 0.08120116994196763f64), true);\n\n \/\/ 11 minute\n assert_eq!(within(&mut e, 0.06648189501740036), true);\n\n \/\/ 12 minute\n assert_eq!(within(&mut e, 0.05443077197364752f64), true);\n\n \/\/ 13 minute\n assert_eq!(within(&mut e, 0.04456414692860035f64), true);\n\n \/\/ 14 minute\n assert_eq!(within(&mut e, 0.03648603757513079f64), true);\n\n \/\/ 15 minute\n assert_eq!(within(&mut e, 0.0298722410207183831020718428f64), true);\n }\n\n #[test]\n fn ewma15() {\n let i = -5.0f64\/60.0f64\/15f64;\n let mut e = EWMA::new(1f64 - i.exp());\n e.update(3u);\n e.tick();\n\n let r: f64 = e.rate();\n assert_eq!(r, 0.6f64);\n\n \/\/ 1 minute\n assert_eq!(within(&mut e, 0.5613041910189706f64), true);\n\n \/\/ 2 minute\n assert_eq!(within(&mut e, 0.5251039914257684f64), true);\n\n \/\/ 3 minute\n assert_eq!(within(&mut e, 0.4912384518467888184678905f64), true);\n\n \/\/ 4 minute\n assert_eq!(within(&mut e, 0.459557003018789f64), true);\n\n \/\/ 5 minute\n assert_eq!(within(&mut e, 0.4299187863442732f64), true);\n\n \/\/ 6 minute\n assert_eq!(within(&mut e, 0.4021920276213831f64), true);\n\n \/\/ 7 minute\n assert_eq!(within(&mut e, 0.37625345116383313f64), true);\n\n \/\/ 8 minute\n assert_eq!(within(&mut e, 0.3519877317060185f64), true);\n\n \/\/ 9 minute\n assert_eq!(within(&mut e, 0.3292869816564153165641596f64), true);\n\n \/\/ 10 minute\n assert_eq!(within(&mut e, 0.3080502714195546f64), true);\n\n \/\/ 11 minute\n assert_eq!(within(&mut e, 0.2881831806538789f64), true);\n\n \/\/ 12 minute\n assert_eq!(within(&mut e, 0.26959737847033216f64), true);\n\n \/\/ 13 minute\n assert_eq!(within(&mut e, 0.2522102307052083f64), true);\n\n \/\/ 14 minute\n assert_eq!(within(&mut e, 0.23594443252115815f64), true);\n\n \/\/ 15 minute\n assert_eq!(within(&mut e, 0.2207276647028646247028654470286553f64), true);\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Added ZDT1 synthetic test function<commit_after>\/\/\/ ZDT1 bi-objective test function\n\/\/\/\n\/\/\/ Evaluates solution parameters using the ZDT1 [1] synthetic test\n\/\/\/ test function to produce two objective values.\n\/\/\/\n\/\/\/ [1 ]E. Zitzler, K. Deb, and L. Thiele. Comparison of Multiobjective\n\/\/\/ Evolutionary Algorithms: Empirical Results. Evolutionary\n\/\/\/ Computation, 8(2):173-195, 2000\npub fn zdt1(parameters: [f32; 30]) -> [f32; 2] {\n\n \/\/ objective function 1\n let f1 = parameters[0];\n \/\/ objective function 2\n let mut g = 1_f32;\n\n \/\/ g(x)\n for i in 1..parameters.len() {\n g = g + ((9_f32 \/ (parameters.len() as f32 - 1_f32)) * parameters[i]);\n }\n\n \/\/ h(f1, x)\n let h = 1_f32 - (f1 \/ g).sqrt();\n\n \/\/ f2(x)\n let f2 = g * h;\n\n return [f1, f2];\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a derivation of the equality trait<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Formatting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Public times are optional for passing points.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added options for reloading assets<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Factor connect() call into edge helper function.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>blaaaah<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Basics eval (no priority), only support OR<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added app level help \/ versioning<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>ADD: main.rs, to make println!() debug output easier<commit_after>extern crate compression;\n\nfn main() {\n\tuse std::io::{ Cursor, Read };\n\tuse compression::gzip::Decompressor;\n\n\tlet gzip_stream = Cursor::new(vec![0x1fu8, 0x8bu8, 0x08u8, 0x08u8, 0x9fu8, 0x30u8, 0x04u8, 0x56u8, 0x00u8, 0x03u8, 0x78u8, 0x78u8, 0x78u8, 0x78u8, 0x78u8, 0x79u8, 0x79u8, 0x79u8, 0x79u8, 0x79u8, 0x2eu8, 0x74u8, 0x78u8, 0x74u8, 0x00u8, 0xabu8, 0xa8u8, 0x00u8, 0x82u8, 0x4au8, 0x10u8, 0x00u8, 0x00u8, 0x42u8, 0x62u8, 0xddu8, 0x64u8, 0x0au8, 0x00u8, 0x00u8, 0x00u8]);\n\tlet mut decompressed = &mut String::new();\n\n\tlet _ = Decompressor::new(gzip_stream).read_to_string(&mut decompressed);\n\n\tassert_eq!(\"xxxxxyyyyy\", decompressed);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create main.rs; retested successfully against kernel module<commit_after>extern crate rsnl;\n\n\/\/use rsnl::{socket};\n\n\nfn main() {\n\tlet nls = rsnl::socket::new();\n\tnls.connect(rsnl::NetlinkProtocol::zu);\n\tlet buf = 0;\n\n\tnls.send_simple(0xfa, 0, &buf, 0);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>slight misunderstanding of how related numbers work, rename for clarity<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>(feat) Added Mount struct.<commit_after>use http::server::request::{AbsolutePath};\nuse regex::Regex;\n\nuse iron::{Iron, Middleware, Request, Response, Alloy, Furnace};\nuse iron::middleware::{Status, Continue, Unwind};\n\n#[deriving(Clone)]\npub struct Mount<F> {\n route: String,\n matches: Regex,\n iron: Iron<F>\n}\n\nimpl<F> Mount<F> {\n pub fn new(route: &str, iron: Iron<F>) -> Mount<F> {\n Mount {\n route: route.to_string(),\n iron: iron,\n matches: to_regex(route)\n }\n }\n}\n\nfn to_regex(route: &str) -> Regex {\n Regex::new(\"^\".to_string().append(route).as_slice()).unwrap()\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add unfinished test: sphinx_packet_unwrap_test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove useless conversion<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove parsing REPL code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added -i option<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test impl of new Plan<commit_after>use error::*;\nuse ffi::*;\nuse types::*;\n\nuse std::marker::PhantomData;\n\npub struct Plan<A, B, Plan: PlanSpec> {\n plan: Plan,\n alignment: Alignment,\n phantom: PhantomData<(A, B)>,\n}\n\nimpl<A, B, P: PlanSpec> Drop for Plan<A, B, P> {\n fn drop(&mut self) {\n self.plan.destroy();\n }\n}\n\npub trait C2CPlan {\n type Complex;\n fn new(shape: &[usize], in_: &mut [Self::Complex], out: &mut [Self::Complex], sign: Sign, flag: Flag) -> Result<Self>;\n fn c2c(&mut self, in_: &mut [Self::Complex], out: &mut [Self::Complex]);\n}\n\nimpl C2CPlan for Plan<c64, c64, Plan64> {\n pub fn new(\n shape: &[usize],\n in_: &mut [c64],\n out: &mut [c64],\n sign: Sign,\n flag: Flag,\n ) -> Result<Self> {\n let plan = excall!{ fftw_plan_dft(\n shape.len() as i32,\n shape.to_cint().as_mut_ptr() as *mut _,\n in_.as_mut_ptr(),\n out.as_mut_ptr(),\n sign as i32, flag.into())\n }.validate()?;\n Ok(Self {\n plan,\n alignment: Alignment::new(in_, out),\n phantom: PhantomData,\n })\n }\n}\n\npub type Plan64 = fftw_plan;\npub type Plan32 = fftwf_plan;\n\npub trait PlanSpec: Clone + Copy {\n fn validate(self) -> Result<Self>;\n fn destroy(self);\n fn print(self);\n}\n\nimpl PlanSpec for Plan64 {\n fn validate(self) -> Result<Self> {\n if self.is_null() {\n Err(InvalidPlanError::new().into())\n } else {\n Ok(self)\n }\n }\n fn destroy(self) {\n excall!{ fftw_destroy_plan(self) }\n }\n fn print(self) {\n excall!{ fftw_print_plan(self) }\n }\n}\n\nimpl PlanSpec for Plan32 {\n fn validate(self) -> Result<Self> {\n if self.is_null() {\n Err(InvalidPlanError::new().into())\n } else {\n Ok(self)\n }\n }\n fn destroy(self) {\n excall!{ fftwf_destroy_plan(self) }\n }\n fn print(self) {\n excall!{ fftwf_print_plan(self) }\n }\n}\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\nstruct Alignment {\n in_: i32,\n out: i32,\n n_in_: usize,\n n_out: usize,\n}\n\nfn alignment_of<T>(a: &[T]) -> i32 {\n unsafe { fftw_alignment_of(a.as_ptr() as *mut _) }\n}\n\nimpl Alignment {\n fn new<A, B>(in_: &[A], out: &[B]) -> Self {\n Self {\n in_: alignment_of(in_),\n out: alignment_of(out),\n n_in_: in_.len(),\n n_out: out.len(),\n }\n }\n\n fn check<A, B>(&self, in_: &[A], out: &[B]) -> Result<()> {\n let args = Self::new(in_, out);\n if *self != args {\n Err(InputMismatchError {\n origin: *self,\n args,\n }.into())\n } else {\n Ok(())\n }\n }\n}\n\n#[derive(Debug)]\npub struct InputMismatchError {\n origin: Alignment,\n args: Alignment,\n}\n\ntrait ToCInt {\n fn to_cint(&self) -> Vec<i32>;\n}\n\nimpl ToCInt for [usize] {\n fn to_cint(&self) -> Vec<i32> {\n self.iter().map(|&x| x as i32).collect()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add basic REPL<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Read lines of a file<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>back to parsing I guess. make it work before making it pretty<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>:hammer: Remove invalid unwrap process<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(collections, convert, core, exit_status, file_type, fs_ext, fs_mode)]\n#![feature(metadata_ext, raw_ext, scoped, symlink_metadata)]\n\nextern crate ansi_term;\nextern crate datetime;\nextern crate getopts;\nextern crate libc;\nextern crate locale;\nextern crate natord;\nextern crate num_cpus;\nextern crate number_prefix;\nextern crate pad;\nextern crate users;\nextern crate unicode_width;\n\n#[cfg(feature=\"git\")]\nextern crate git2;\n\nuse std::env;\nuse std::fs;\nuse std::path::{Component, Path, PathBuf};\nuse std::sync::mpsc::{channel, sync_channel};\nuse std::thread;\n\nuse dir::Dir;\nuse file::File;\nuse options::{Options, View};\n\nmod colours;\nmod column;\nmod dir;\nmod feature;\nmod file;\nmod filetype;\nmod options;\nmod output;\nmod term;\n\n#[cfg(not(test))]\nstruct Exa<'dir> {\n count: usize,\n options: Options,\n dirs: Vec<PathBuf>,\n files: Vec<File<'dir>>,\n}\n\n#[cfg(not(test))]\nimpl<'dir> Exa<'dir> {\n fn new(options: Options) -> Exa<'dir> {\n Exa {\n count: 0,\n options: options,\n dirs: Vec::new(),\n files: Vec::new(),\n }\n }\n\n fn load(&mut self, files: &[String]) {\n \/\/ Separate the user-supplied paths into directories and files.\n \/\/ Files are shown first, and then each directory is expanded\n \/\/ and listed second.\n\n let is_tree = self.options.dir_action.is_tree() || self.options.dir_action.is_as_file();\n let total_files = files.len();\n\n \/\/ Denotes the maxinum number of concurrent threads\n let (thread_capacity_tx, thread_capacity_rs) = sync_channel(8 * num_cpus::get());\n\n \/\/ Communication between consumer thread and producer threads\n enum StatResult<'dir> {\n File(File<'dir>),\n Path(PathBuf),\n Error\n }\n\n let (results_tx, results_rx) = channel();\n\n \/\/ Spawn consumer thread\n let _consumer = thread::scoped(move || {\n for _ in 0..total_files {\n\n \/\/ Make room for more producer threads\n let _ = thread_capacity_rs.recv();\n\n \/\/ Receive a producer's result\n match results_rx.recv() {\n Ok(result) => match result {\n StatResult::File(file) => self.files.push(file),\n StatResult::Path(path) => self.dirs.push(path),\n StatResult::Error => ()\n },\n Err(_) => unreachable!(),\n }\n self.count += 1;\n }\n });\n\n for file in files.iter() {\n let file = file.clone();\n let results_tx = results_tx.clone();\n\n \/\/ Block until there is room for another thread\n let _ = thread_capacity_tx.send(());\n\n \/\/ Spawn producer thread\n thread::spawn(move || {\n let path = Path::new(&*file);\n let _ = results_tx.send(match fs::metadata(&path) {\n Ok(metadata) => {\n if !metadata.is_dir() {\n StatResult::File(File::with_metadata(metadata, &path, None, false))\n }\n else if is_tree {\n StatResult::File(File::with_metadata(metadata, &path, None, true))\n }\n else {\n StatResult::Path(path.to_path_buf())\n }\n }\n Err(e) => {\n println!(\"{}: {}\", file, e);\n StatResult::Error\n }\n });\n });\n }\n }\n\n fn print_files(&self) {\n if !self.files.is_empty() {\n self.print(None, &self.files[..]);\n }\n }\n\n fn print_dirs(&mut self) {\n let mut first = self.files.is_empty();\n\n \/\/ Directories are put on a stack rather than just being iterated through,\n \/\/ as the vector can change as more directories are added.\n loop {\n let dir_path = match self.dirs.pop() {\n None => break,\n Some(f) => f,\n };\n\n \/\/ Put a gap between directories, or between the list of files and the\n \/\/ first directory.\n if first {\n first = false;\n }\n else {\n print!(\"\\n\");\n }\n\n match Dir::readdir(&dir_path) {\n Ok(ref dir) => {\n let mut files = dir.files(false);\n self.options.transform_files(&mut files);\n\n \/\/ When recursing, add any directories to the dirs stack\n \/\/ backwards: the *last* element of the stack is used each\n \/\/ time, so by inserting them backwards, they get displayed in\n \/\/ the correct sort order.\n if let Some(recurse_opts) = self.options.dir_action.recurse_options() {\n let depth = dir_path.components().filter(|&c| c != Component::CurDir).count() + 1;\n if !recurse_opts.tree && !recurse_opts.is_too_deep(depth) {\n for dir in files.iter().filter(|f| f.is_directory()).rev() {\n self.dirs.push(dir.path.clone());\n }\n }\n }\n\n if self.count > 1 {\n println!(\"{}:\", dir_path.display());\n }\n self.count += 1;\n\n self.print(Some(dir), &files[..]);\n }\n Err(e) => {\n println!(\"{}: {}\", dir_path.display(), e);\n return;\n }\n };\n }\n }\n\n fn print(&self, dir: Option<&Dir>, files: &[File]) {\n match self.options.view {\n View::Grid(g) => g.view(files),\n View::Details(d) => d.view(dir, files),\n View::Lines(l) => l.view(files),\n }\n }\n}\n\n#[cfg(not(test))]\nfn main() {\n let args: Vec<String> = env::args().collect();\n\n match Options::getopts(args.tail()) {\n Ok((options, paths)) => {\n let mut exa = Exa::new(options);\n exa.load(&paths);\n exa.print_files();\n exa.print_dirs();\n },\n Err(e) => {\n println!(\"{}\", e);\n env::set_exit_status(e.error_code());\n },\n };\n}\n<commit_msg>StatResult::Path -> Dir<commit_after>#![feature(collections, convert, core, exit_status, file_type, fs_ext, fs_mode)]\n#![feature(metadata_ext, raw_ext, scoped, symlink_metadata)]\n\nextern crate ansi_term;\nextern crate datetime;\nextern crate getopts;\nextern crate libc;\nextern crate locale;\nextern crate natord;\nextern crate num_cpus;\nextern crate number_prefix;\nextern crate pad;\nextern crate users;\nextern crate unicode_width;\n\n#[cfg(feature=\"git\")]\nextern crate git2;\n\nuse std::env;\nuse std::fs;\nuse std::path::{Component, Path, PathBuf};\nuse std::sync::mpsc::{channel, sync_channel};\nuse std::thread;\n\nuse dir::Dir;\nuse file::File;\nuse options::{Options, View};\n\nmod colours;\nmod column;\nmod dir;\nmod feature;\nmod file;\nmod filetype;\nmod options;\nmod output;\nmod term;\n\n#[cfg(not(test))]\nstruct Exa<'dir> {\n count: usize,\n options: Options,\n dirs: Vec<PathBuf>,\n files: Vec<File<'dir>>,\n}\n\n#[cfg(not(test))]\nimpl<'dir> Exa<'dir> {\n fn new(options: Options) -> Exa<'dir> {\n Exa {\n count: 0,\n options: options,\n dirs: Vec::new(),\n files: Vec::new(),\n }\n }\n\n fn load(&mut self, files: &[String]) {\n \/\/ Separate the user-supplied paths into directories and files.\n \/\/ Files are shown first, and then each directory is expanded\n \/\/ and listed second.\n\n let is_tree = self.options.dir_action.is_tree() || self.options.dir_action.is_as_file();\n let total_files = files.len();\n\n \/\/ Denotes the maxinum number of concurrent threads\n let (thread_capacity_tx, thread_capacity_rs) = sync_channel(8 * num_cpus::get());\n\n \/\/ Communication between consumer thread and producer threads\n enum StatResult<'dir> {\n File(File<'dir>),\n Dir(PathBuf),\n Error\n }\n\n let (results_tx, results_rx) = channel();\n\n \/\/ Spawn consumer thread\n let _consumer = thread::scoped(move || {\n for _ in 0..total_files {\n\n \/\/ Make room for more producer threads\n let _ = thread_capacity_rs.recv();\n\n \/\/ Receive a producer's result\n match results_rx.recv() {\n Ok(result) => match result {\n StatResult::File(file) => self.files.push(file),\n StatResult::Dir(path) => self.dirs.push(path),\n StatResult::Error => ()\n },\n Err(_) => unreachable!(),\n }\n self.count += 1;\n }\n });\n\n for file in files.iter() {\n let file = file.clone();\n let results_tx = results_tx.clone();\n\n \/\/ Block until there is room for another thread\n let _ = thread_capacity_tx.send(());\n\n \/\/ Spawn producer thread\n thread::spawn(move || {\n let path = Path::new(&*file);\n let _ = results_tx.send(match fs::metadata(&path) {\n Ok(metadata) => {\n if !metadata.is_dir() {\n StatResult::File(File::with_metadata(metadata, &path, None, false))\n }\n else if is_tree {\n StatResult::File(File::with_metadata(metadata, &path, None, true))\n }\n else {\n StatResult::Dir(path.to_path_buf())\n }\n }\n Err(e) => {\n println!(\"{}: {}\", file, e);\n StatResult::Error\n }\n });\n });\n }\n }\n\n fn print_files(&self) {\n if !self.files.is_empty() {\n self.print(None, &self.files[..]);\n }\n }\n\n fn print_dirs(&mut self) {\n let mut first = self.files.is_empty();\n\n \/\/ Directories are put on a stack rather than just being iterated through,\n \/\/ as the vector can change as more directories are added.\n loop {\n let dir_path = match self.dirs.pop() {\n None => break,\n Some(f) => f,\n };\n\n \/\/ Put a gap between directories, or between the list of files and the\n \/\/ first directory.\n if first {\n first = false;\n }\n else {\n print!(\"\\n\");\n }\n\n match Dir::readdir(&dir_path) {\n Ok(ref dir) => {\n let mut files = dir.files(false);\n self.options.transform_files(&mut files);\n\n \/\/ When recursing, add any directories to the dirs stack\n \/\/ backwards: the *last* element of the stack is used each\n \/\/ time, so by inserting them backwards, they get displayed in\n \/\/ the correct sort order.\n if let Some(recurse_opts) = self.options.dir_action.recurse_options() {\n let depth = dir_path.components().filter(|&c| c != Component::CurDir).count() + 1;\n if !recurse_opts.tree && !recurse_opts.is_too_deep(depth) {\n for dir in files.iter().filter(|f| f.is_directory()).rev() {\n self.dirs.push(dir.path.clone());\n }\n }\n }\n\n if self.count > 1 {\n println!(\"{}:\", dir_path.display());\n }\n self.count += 1;\n\n self.print(Some(dir), &files[..]);\n }\n Err(e) => {\n println!(\"{}: {}\", dir_path.display(), e);\n return;\n }\n };\n }\n }\n\n fn print(&self, dir: Option<&Dir>, files: &[File]) {\n match self.options.view {\n View::Grid(g) => g.view(files),\n View::Details(d) => d.view(dir, files),\n View::Lines(l) => l.view(files),\n }\n }\n}\n\n#[cfg(not(test))]\nfn main() {\n let args: Vec<String> = env::args().collect();\n\n match Options::getopts(args.tail()) {\n Ok((options, paths)) => {\n let mut exa = Exa::new(options);\n exa.load(&paths);\n exa.print_files();\n exa.print_dirs();\n },\n Err(e) => {\n println!(\"{}\", e);\n env::set_exit_status(e.error_code());\n },\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Clean up unneeded imports<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>cross bar general layout<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Derived Debug for Config<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor: extract methods<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added condition variable<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>short detour<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[Rust] The hash of &str is same with the hash of String<commit_after>use std::collections::hash_map::DefaultHasher;\nuse std::hash::Hash;\nuse std::hash::Hasher;\n\nfn concat(a: String, b: String) -> String {\n format!(\"{}{}\", a, b)\n}\n\nfn main() {\n let a: &'static str = \"ab\";\n let b: String = concat(\"a\".to_string(), \"b\".to_string());\n let c: &str = b.as_str();\n\n let mut a_hasher = DefaultHasher::new();\n a.hash(&mut a_hasher);\n\n let mut b_hasher = DefaultHasher::new();\n b.hash(&mut b_hasher);\n\n let mut c_hasher = DefaultHasher::new();\n c.hash(&mut c_hasher);\n\n assert_eq!(a_hasher.finish(), b_hasher.finish());\n assert_eq!(a_hasher.finish(), c_hasher.finish());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed up memset, added memcpy<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Some lints that are built in to the compiler.\n\/\/!\n\/\/! These are the built-in lints that are emitted direct in the main\n\/\/! compiler code, rather than using their own custom pass. Those\n\/\/! lints are all available in `rustc_lint::builtin`.\n\nuse lint::{LintPass, LateLintPass, LintArray};\n\ndeclare_lint! {\n pub CONST_ERR,\n Warn,\n \"constant evaluation detected erroneous expression\"\n}\n\ndeclare_lint! {\n pub UNUSED_IMPORTS,\n Warn,\n \"imports that are never used\"\n}\n\ndeclare_lint! {\n pub UNUSED_EXTERN_CRATES,\n Allow,\n \"extern crates that are never used\"\n}\n\ndeclare_lint! {\n pub UNUSED_QUALIFICATIONS,\n Allow,\n \"detects unnecessarily qualified names\"\n}\n\ndeclare_lint! {\n pub UNKNOWN_LINTS,\n Warn,\n \"unrecognized lint attribute\"\n}\n\ndeclare_lint! {\n pub UNUSED_VARIABLES,\n Warn,\n \"detect variables which are not used in any way\"\n}\n\ndeclare_lint! {\n pub UNUSED_ASSIGNMENTS,\n Warn,\n \"detect assignments that will never be read\"\n}\n\ndeclare_lint! {\n pub DEAD_CODE,\n Warn,\n \"detect unused, unexported items\"\n}\n\ndeclare_lint! {\n pub UNREACHABLE_CODE,\n Warn,\n \"detects unreachable code paths\"\n}\n\ndeclare_lint! {\n pub WARNINGS,\n Warn,\n \"mass-change the level for lints which produce warnings\"\n}\n\ndeclare_lint! {\n pub UNUSED_FEATURES,\n Warn,\n \"unused or unknown features found in crate-level #[feature] directives\"\n}\n\ndeclare_lint! {\n pub STABLE_FEATURES,\n Warn,\n \"stable features found in #[feature] directive\"\n}\n\ndeclare_lint! {\n pub UNKNOWN_CRATE_TYPES,\n Deny,\n \"unknown crate type found in #[crate_type] directive\"\n}\n\ndeclare_lint! {\n pub FAT_PTR_TRANSMUTES,\n Allow,\n \"detects transmutes of fat pointers\"\n}\n\ndeclare_lint! {\n pub TRIVIAL_CASTS,\n Allow,\n \"detects trivial casts which could be removed\"\n}\n\ndeclare_lint! {\n pub TRIVIAL_NUMERIC_CASTS,\n Allow,\n \"detects trivial casts of numeric types which could be removed\"\n}\n\ndeclare_lint! {\n pub PRIVATE_IN_PUBLIC,\n Warn,\n \"detect private items in public interfaces not caught by the old implementation\"\n}\n\ndeclare_lint! {\n pub INACCESSIBLE_EXTERN_CRATE,\n Warn,\n \"use of inaccessible extern crate erroneously allowed\"\n}\n\ndeclare_lint! {\n pub INVALID_TYPE_PARAM_DEFAULT,\n Warn,\n \"type parameter default erroneously allowed in invalid location\"\n}\n\ndeclare_lint! {\n pub ILLEGAL_FLOATING_POINT_CONSTANT_PATTERN,\n Warn,\n \"floating-point constants cannot be used in patterns\"\n}\n\ndeclare_lint! {\n pub ILLEGAL_STRUCT_OR_ENUM_CONSTANT_PATTERN,\n Warn,\n \"constants of struct or enum type can only be used in a pattern if \\\n the struct or enum has `#[derive(PartialEq, Eq)]`\"\n}\n\ndeclare_lint! {\n pub MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT,\n Deny,\n \"unit struct or enum variant erroneously allowed to match via path::ident(..)\"\n}\n\ndeclare_lint! {\n pub RAW_POINTER_DERIVE,\n Warn,\n \"uses of #[derive] with raw pointers are rarely correct\"\n}\n\ndeclare_lint! {\n pub TRANSMUTE_FROM_FN_ITEM_TYPES,\n Warn,\n \"transmute from function item type to pointer-sized type erroneously allowed\"\n}\n\ndeclare_lint! {\n pub HR_LIFETIME_IN_ASSOC_TYPE,\n Warn,\n \"binding for associated type references higher-ranked lifetime \\\n that does not appear in the trait input types\"\n}\n\ndeclare_lint! {\n pub OVERLAPPING_INHERENT_IMPLS,\n Warn,\n \"two overlapping inherent impls define an item with the same name were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub RENAMED_AND_REMOVED_LINTS,\n Warn,\n \"lints that have been renamed or removed\"\n}\n\ndeclare_lint! {\n pub SUPER_OR_SELF_IN_GLOBAL_PATH,\n Warn,\n \"detects super or self keywords at the beginning of global path\"\n}\n\ndeclare_lint! {\n pub UNSIZED_IN_TUPLE,\n Warn,\n \"unsized types in the interior of a tuple were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub OBJECT_UNSAFE_FRAGMENT,\n Warn,\n \"object-unsafe non-principal fragments in object types were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub LIFETIME_UNDERSCORE,\n Warn,\n \"lifetimes or labels named `'_` were erroneously allowed\"\n}\n\n\/\/\/ Does nothing as a lint pass, but registers some `Lint`s\n\/\/\/ which are used by other parts of the compiler.\n#[derive(Copy, Clone)]\npub struct HardwiredLints;\n\nimpl LintPass for HardwiredLints {\n fn get_lints(&self) -> LintArray {\n lint_array!(\n UNUSED_IMPORTS,\n UNUSED_EXTERN_CRATES,\n UNUSED_QUALIFICATIONS,\n UNKNOWN_LINTS,\n UNUSED_VARIABLES,\n UNUSED_ASSIGNMENTS,\n DEAD_CODE,\n UNREACHABLE_CODE,\n WARNINGS,\n UNUSED_FEATURES,\n STABLE_FEATURES,\n UNKNOWN_CRATE_TYPES,\n FAT_PTR_TRANSMUTES,\n TRIVIAL_CASTS,\n TRIVIAL_NUMERIC_CASTS,\n PRIVATE_IN_PUBLIC,\n INACCESSIBLE_EXTERN_CRATE,\n INVALID_TYPE_PARAM_DEFAULT,\n ILLEGAL_FLOATING_POINT_CONSTANT_PATTERN,\n ILLEGAL_STRUCT_OR_ENUM_CONSTANT_PATTERN,\n MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT,\n CONST_ERR,\n RAW_POINTER_DERIVE,\n TRANSMUTE_FROM_FN_ITEM_TYPES,\n OVERLAPPING_INHERENT_IMPLS,\n RENAMED_AND_REMOVED_LINTS,\n SUPER_OR_SELF_IN_GLOBAL_PATH,\n UNSIZED_IN_TUPLE,\n OBJECT_UNSAFE_FRAGMENT,\n HR_LIFETIME_IN_ASSOC_TYPE,\n LIFETIME_UNDERSCORE\n )\n }\n}\n\nimpl LateLintPass for HardwiredLints {}\n<commit_msg>Deny (by default) transmuting from fn item types to pointer-sized types.<commit_after>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Some lints that are built in to the compiler.\n\/\/!\n\/\/! These are the built-in lints that are emitted direct in the main\n\/\/! compiler code, rather than using their own custom pass. Those\n\/\/! lints are all available in `rustc_lint::builtin`.\n\nuse lint::{LintPass, LateLintPass, LintArray};\n\ndeclare_lint! {\n pub CONST_ERR,\n Warn,\n \"constant evaluation detected erroneous expression\"\n}\n\ndeclare_lint! {\n pub UNUSED_IMPORTS,\n Warn,\n \"imports that are never used\"\n}\n\ndeclare_lint! {\n pub UNUSED_EXTERN_CRATES,\n Allow,\n \"extern crates that are never used\"\n}\n\ndeclare_lint! {\n pub UNUSED_QUALIFICATIONS,\n Allow,\n \"detects unnecessarily qualified names\"\n}\n\ndeclare_lint! {\n pub UNKNOWN_LINTS,\n Warn,\n \"unrecognized lint attribute\"\n}\n\ndeclare_lint! {\n pub UNUSED_VARIABLES,\n Warn,\n \"detect variables which are not used in any way\"\n}\n\ndeclare_lint! {\n pub UNUSED_ASSIGNMENTS,\n Warn,\n \"detect assignments that will never be read\"\n}\n\ndeclare_lint! {\n pub DEAD_CODE,\n Warn,\n \"detect unused, unexported items\"\n}\n\ndeclare_lint! {\n pub UNREACHABLE_CODE,\n Warn,\n \"detects unreachable code paths\"\n}\n\ndeclare_lint! {\n pub WARNINGS,\n Warn,\n \"mass-change the level for lints which produce warnings\"\n}\n\ndeclare_lint! {\n pub UNUSED_FEATURES,\n Warn,\n \"unused or unknown features found in crate-level #[feature] directives\"\n}\n\ndeclare_lint! {\n pub STABLE_FEATURES,\n Warn,\n \"stable features found in #[feature] directive\"\n}\n\ndeclare_lint! {\n pub UNKNOWN_CRATE_TYPES,\n Deny,\n \"unknown crate type found in #[crate_type] directive\"\n}\n\ndeclare_lint! {\n pub FAT_PTR_TRANSMUTES,\n Allow,\n \"detects transmutes of fat pointers\"\n}\n\ndeclare_lint! {\n pub TRIVIAL_CASTS,\n Allow,\n \"detects trivial casts which could be removed\"\n}\n\ndeclare_lint! {\n pub TRIVIAL_NUMERIC_CASTS,\n Allow,\n \"detects trivial casts of numeric types which could be removed\"\n}\n\ndeclare_lint! {\n pub PRIVATE_IN_PUBLIC,\n Warn,\n \"detect private items in public interfaces not caught by the old implementation\"\n}\n\ndeclare_lint! {\n pub INACCESSIBLE_EXTERN_CRATE,\n Warn,\n \"use of inaccessible extern crate erroneously allowed\"\n}\n\ndeclare_lint! {\n pub INVALID_TYPE_PARAM_DEFAULT,\n Warn,\n \"type parameter default erroneously allowed in invalid location\"\n}\n\ndeclare_lint! {\n pub ILLEGAL_FLOATING_POINT_CONSTANT_PATTERN,\n Warn,\n \"floating-point constants cannot be used in patterns\"\n}\n\ndeclare_lint! {\n pub ILLEGAL_STRUCT_OR_ENUM_CONSTANT_PATTERN,\n Warn,\n \"constants of struct or enum type can only be used in a pattern if \\\n the struct or enum has `#[derive(PartialEq, Eq)]`\"\n}\n\ndeclare_lint! {\n pub MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT,\n Deny,\n \"unit struct or enum variant erroneously allowed to match via path::ident(..)\"\n}\n\ndeclare_lint! {\n pub RAW_POINTER_DERIVE,\n Warn,\n \"uses of #[derive] with raw pointers are rarely correct\"\n}\n\ndeclare_lint! {\n pub TRANSMUTE_FROM_FN_ITEM_TYPES,\n Deny,\n \"transmute from function item type to pointer-sized type erroneously allowed\"\n}\n\ndeclare_lint! {\n pub HR_LIFETIME_IN_ASSOC_TYPE,\n Warn,\n \"binding for associated type references higher-ranked lifetime \\\n that does not appear in the trait input types\"\n}\n\ndeclare_lint! {\n pub OVERLAPPING_INHERENT_IMPLS,\n Warn,\n \"two overlapping inherent impls define an item with the same name were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub RENAMED_AND_REMOVED_LINTS,\n Warn,\n \"lints that have been renamed or removed\"\n}\n\ndeclare_lint! {\n pub SUPER_OR_SELF_IN_GLOBAL_PATH,\n Warn,\n \"detects super or self keywords at the beginning of global path\"\n}\n\ndeclare_lint! {\n pub UNSIZED_IN_TUPLE,\n Warn,\n \"unsized types in the interior of a tuple were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub OBJECT_UNSAFE_FRAGMENT,\n Warn,\n \"object-unsafe non-principal fragments in object types were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub LIFETIME_UNDERSCORE,\n Warn,\n \"lifetimes or labels named `'_` were erroneously allowed\"\n}\n\n\/\/\/ Does nothing as a lint pass, but registers some `Lint`s\n\/\/\/ which are used by other parts of the compiler.\n#[derive(Copy, Clone)]\npub struct HardwiredLints;\n\nimpl LintPass for HardwiredLints {\n fn get_lints(&self) -> LintArray {\n lint_array!(\n UNUSED_IMPORTS,\n UNUSED_EXTERN_CRATES,\n UNUSED_QUALIFICATIONS,\n UNKNOWN_LINTS,\n UNUSED_VARIABLES,\n UNUSED_ASSIGNMENTS,\n DEAD_CODE,\n UNREACHABLE_CODE,\n WARNINGS,\n UNUSED_FEATURES,\n STABLE_FEATURES,\n UNKNOWN_CRATE_TYPES,\n FAT_PTR_TRANSMUTES,\n TRIVIAL_CASTS,\n TRIVIAL_NUMERIC_CASTS,\n PRIVATE_IN_PUBLIC,\n INACCESSIBLE_EXTERN_CRATE,\n INVALID_TYPE_PARAM_DEFAULT,\n ILLEGAL_FLOATING_POINT_CONSTANT_PATTERN,\n ILLEGAL_STRUCT_OR_ENUM_CONSTANT_PATTERN,\n MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT,\n CONST_ERR,\n RAW_POINTER_DERIVE,\n TRANSMUTE_FROM_FN_ITEM_TYPES,\n OVERLAPPING_INHERENT_IMPLS,\n RENAMED_AND_REMOVED_LINTS,\n SUPER_OR_SELF_IN_GLOBAL_PATH,\n UNSIZED_IN_TUPLE,\n OBJECT_UNSAFE_FRAGMENT,\n HR_LIFETIME_IN_ASSOC_TYPE,\n LIFETIME_UNDERSCORE\n )\n }\n}\n\nimpl LateLintPass for HardwiredLints {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Avoid unnecessary buffer requests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add iter() method and implement Drop trait<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Disable timer interrupt if zero status is cleared<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary newline<commit_after><|endoftext|>"} {"text":"<commit_before>use crate::prelude::*;\n\nfn codegen_print(fx: &mut FunctionCx<'_, '_, impl cranelift_module::Backend>, msg: &str) {\n let puts = fx\n .module\n .declare_function(\n \"puts\",\n Linkage::Import,\n &Signature {\n call_conv: CallConv::triple_default(fx.triple()),\n params: vec![AbiParam::new(pointer_ty(fx.tcx))],\n returns: vec![],\n },\n )\n .unwrap();\n let puts = fx.module.declare_func_in_func(puts, &mut fx.bcx.func);\n #[cfg(debug_assertions)]\n {\n fx.add_entity_comment(puts, \"puts\");\n }\n\n let symbol_name = fx.tcx.symbol_name(fx.instance);\n let real_msg = format!(\"trap at {:?} ({}): {}\\0\", fx.instance, symbol_name, msg);\n let mut data_ctx = DataContext::new();\n data_ctx.define(real_msg.as_bytes().to_vec().into_boxed_slice());\n let msg_id = fx\n .module\n .declare_data(\n &(symbol_name.name.as_str().to_string() + msg),\n Linkage::Local,\n false,\n false,\n None,\n )\n .unwrap();\n\n \/\/ Ignore DuplicateDefinition error, as the data will be the same\n let _ = fx.module.define_data(msg_id, &data_ctx);\n\n let local_msg_id = fx.module.declare_data_in_func(msg_id, fx.bcx.func);\n #[cfg(debug_assertions)]\n {\n fx.add_entity_comment(local_msg_id, msg);\n }\n let msg_ptr = fx.bcx.ins().global_value(pointer_ty(fx.tcx), local_msg_id);\n fx.bcx.ins().call(puts, &[msg_ptr]);\n}\n\n\/\/\/ Use this when `rustc_codegen_llvm` would insert a call to the panic handler.\n\/\/\/\n\/\/\/ Trap code: user0\npub fn trap_panic(\n fx: &mut FunctionCx<'_, '_, impl cranelift_module::Backend>,\n msg: impl AsRef<str>,\n) {\n codegen_print(fx, msg.as_ref());\n fx.bcx.ins().trap(TrapCode::User(0));\n}\n\n\/\/\/ Use this for example when a function call should never return. This will fill the current block,\n\/\/\/ so you can **not** add instructions to it afterwards.\n\/\/\/\n\/\/\/ Trap code: user65535\npub fn trap_unreachable(\n fx: &mut FunctionCx<'_, '_, impl cranelift_module::Backend>,\n msg: impl AsRef<str>,\n) {\n codegen_print(fx, msg.as_ref());\n fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);\n}\n\n\/\/\/ Use this when something is unimplemented, but `libcore` or `libstd` requires it to codegen.\n\/\/\/ Unlike `trap_unreachable` this will not fill the current block, so you **must** add instructions\n\/\/\/ to it afterwards.\n\/\/\/\n\/\/\/ Trap code: user65535\npub fn trap_unimplemented(\n fx: &mut FunctionCx<'_, '_, impl cranelift_module::Backend>,\n msg: impl AsRef<str>,\n) {\n codegen_print(fx, msg.as_ref());\n let true_ = fx.bcx.ins().iconst(types::I32, 1);\n fx.bcx.ins().trapnz(true_, TrapCode::User(!0));\n}\n\n\/\/\/ Like `trap_unreachable` but returns a fake value of the specified type.\n\/\/\/\n\/\/\/ Trap code: user65535\npub fn trap_unreachable_ret_value<'tcx>(\n fx: &mut FunctionCx<'_, 'tcx, impl cranelift_module::Backend>,\n dest_layout: TyLayout<'tcx>,\n msg: impl AsRef<str>,\n) -> CValue<'tcx> {\n trap_unimplemented(fx, msg);\n CValue::by_ref(Pointer::const_addr(fx, 0), dest_layout)\n}\n<commit_msg>Hash trap message symbol nameThis saves ~7% of disk space<commit_after>use std::collections::hash_map::DefaultHasher;\nuse std::hash::{Hash, Hasher};\n\nuse crate::prelude::*;\n\nfn codegen_print(fx: &mut FunctionCx<'_, '_, impl cranelift_module::Backend>, msg: &str) {\n let puts = fx\n .module\n .declare_function(\n \"puts\",\n Linkage::Import,\n &Signature {\n call_conv: CallConv::triple_default(fx.triple()),\n params: vec![AbiParam::new(pointer_ty(fx.tcx))],\n returns: vec![],\n },\n )\n .unwrap();\n let puts = fx.module.declare_func_in_func(puts, &mut fx.bcx.func);\n #[cfg(debug_assertions)]\n {\n fx.add_entity_comment(puts, \"puts\");\n }\n\n let symbol_name = fx.tcx.symbol_name(fx.instance);\n let real_msg = format!(\"trap at {:?} ({}): {}\\0\", fx.instance, symbol_name, msg);\n let mut hasher = DefaultHasher::new();\n real_msg.hash(&mut hasher);\n let msg_hash = hasher.finish();\n let mut data_ctx = DataContext::new();\n data_ctx.define(real_msg.as_bytes().to_vec().into_boxed_slice());\n let msg_id = fx\n .module\n .declare_data(\n &format!(\"__trap_{:08x}\", msg_hash),\n Linkage::Local,\n false,\n false,\n None,\n )\n .unwrap();\n\n \/\/ Ignore DuplicateDefinition error, as the data will be the same\n let _ = fx.module.define_data(msg_id, &data_ctx);\n\n let local_msg_id = fx.module.declare_data_in_func(msg_id, fx.bcx.func);\n #[cfg(debug_assertions)]\n {\n fx.add_entity_comment(local_msg_id, msg);\n }\n let msg_ptr = fx.bcx.ins().global_value(pointer_ty(fx.tcx), local_msg_id);\n fx.bcx.ins().call(puts, &[msg_ptr]);\n}\n\n\/\/\/ Use this when `rustc_codegen_llvm` would insert a call to the panic handler.\n\/\/\/\n\/\/\/ Trap code: user0\npub fn trap_panic(\n fx: &mut FunctionCx<'_, '_, impl cranelift_module::Backend>,\n msg: impl AsRef<str>,\n) {\n codegen_print(fx, msg.as_ref());\n fx.bcx.ins().trap(TrapCode::User(0));\n}\n\n\/\/\/ Use this for example when a function call should never return. This will fill the current block,\n\/\/\/ so you can **not** add instructions to it afterwards.\n\/\/\/\n\/\/\/ Trap code: user65535\npub fn trap_unreachable(\n fx: &mut FunctionCx<'_, '_, impl cranelift_module::Backend>,\n msg: impl AsRef<str>,\n) {\n codegen_print(fx, msg.as_ref());\n fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);\n}\n\n\/\/\/ Use this when something is unimplemented, but `libcore` or `libstd` requires it to codegen.\n\/\/\/ Unlike `trap_unreachable` this will not fill the current block, so you **must** add instructions\n\/\/\/ to it afterwards.\n\/\/\/\n\/\/\/ Trap code: user65535\npub fn trap_unimplemented(\n fx: &mut FunctionCx<'_, '_, impl cranelift_module::Backend>,\n msg: impl AsRef<str>,\n) {\n codegen_print(fx, msg.as_ref());\n let true_ = fx.bcx.ins().iconst(types::I32, 1);\n fx.bcx.ins().trapnz(true_, TrapCode::User(!0));\n}\n\n\/\/\/ Like `trap_unreachable` but returns a fake value of the specified type.\n\/\/\/\n\/\/\/ Trap code: user65535\npub fn trap_unreachable_ret_value<'tcx>(\n fx: &mut FunctionCx<'_, 'tcx, impl cranelift_module::Backend>,\n dest_layout: TyLayout<'tcx>,\n msg: impl AsRef<str>,\n) -> CValue<'tcx> {\n trap_unimplemented(fx, msg);\n CValue::by_ref(Pointer::const_addr(fx, 0), dest_layout)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Equip Now hover<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Networking primitives for TCP\/UDP communication.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse prelude::v1::*;\n\nuse io::{self, Error, ErrorKind};\nuse sys_common::net as net_imp;\n\npub use self::ip::{IpAddr, Ipv4Addr, Ipv6Addr, Ipv6MulticastScope};\npub use self::addr::{SocketAddr, SocketAddrV4, SocketAddrV6, ToSocketAddrs};\npub use self::tcp::{TcpStream, TcpListener, Incoming};\npub use self::udp::UdpSocket;\npub use self::parser::AddrParseError;\n\nmod ip;\nmod addr;\nmod tcp;\nmod udp;\nmod parser;\n#[cfg(test)] mod test;\n\n\/\/\/ Possible values which can be passed to the `shutdown` method of `TcpStream`\n\/\/\/ and `UdpSocket`.\n#[derive(Copy, Clone, PartialEq, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub enum Shutdown {\n \/\/\/ Indicates that the reading portion of this stream\/socket should be shut\n \/\/\/ down. All currently blocked and future reads will return `Ok(0)`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Read,\n \/\/\/ Indicates that the writing portion of this stream\/socket should be shut\n \/\/\/ down. All currently blocked and future writes will return an error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Write,\n \/\/\/ Shut down both the reading and writing portions of this stream.\n \/\/\/\n \/\/\/ See `Shutdown::Read` and `Shutdown::Write` for more information.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Both,\n}\n\n#[doc(hidden)]\ntrait NetInt {\n fn from_be(i: Self) -> Self;\n fn to_be(&self) -> Self;\n}\nmacro_rules! doit {\n ($($t:ident)*) => ($(impl NetInt for $t {\n fn from_be(i: Self) -> Self { <$t>::from_be(i) }\n fn to_be(&self) -> Self { <$t>::to_be(*self) }\n })*)\n}\ndoit! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize }\n\nfn hton<I: NetInt>(i: I) -> I { i.to_be() }\nfn ntoh<I: NetInt>(i: I) -> I { I::from_be(i) }\n\nfn each_addr<A: ToSocketAddrs, F, T>(addr: A, mut f: F) -> io::Result<T>\n where F: FnMut(&SocketAddr) -> io::Result<T>\n{\n let mut last_err = None;\n for addr in try!(addr.to_socket_addrs()) {\n match f(&addr) {\n Ok(l) => return Ok(l),\n Err(e) => last_err = Some(e),\n }\n }\n Err(last_err.unwrap_or_else(|| {\n Error::new(ErrorKind::InvalidInput,\n \"could not resolve to any addresses\")\n }))\n}\n\n\/\/\/ An iterator over `SocketAddr` values returned from a host lookup operation.\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\")]\npub struct LookupHost(net_imp::LookupHost);\n\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\")]\nimpl Iterator for LookupHost {\n type Item = io::Result<SocketAddr>;\n fn next(&mut self) -> Option<io::Result<SocketAddr>> { self.0.next() }\n}\n\n\/\/\/ Resolve the host specified by `host` as a number of `SocketAddr` instances.\n\/\/\/\n\/\/\/ This method may perform a DNS query to resolve `host` and may also inspect\n\/\/\/ system configuration to resolve the specified hostname.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```no_run\n\/\/\/ # #![feature(lookup_host)]\n\/\/\/ use std::net;\n\/\/\/\n\/\/\/ # fn foo() -> std::io::Result<()> {\n\/\/\/ for host in try!(net::lookup_host(\"rust-lang.org\")) {\n\/\/\/ println!(\"found address: {}\", try!(host));\n\/\/\/ }\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\")]\npub fn lookup_host(host: &str) -> io::Result<LookupHost> {\n net_imp::lookup_host(host).map(LookupHost)\n}\n\n\/\/\/ Resolve the given address to a hostname.\n\/\/\/\n\/\/\/ This function may perform a DNS query to resolve `addr` and may also inspect\n\/\/\/ system configuration to resolve the specified address. If the address\n\/\/\/ cannot be resolved, it is returned in string format.\n#[unstable(feature = \"lookup_addr\", reason = \"recent addition\")]\npub fn lookup_addr(addr: &IpAddr) -> io::Result<String> {\n net_imp::lookup_addr(addr)\n}\n<commit_msg>Auto merge of #26457 - meqif:master, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Networking primitives for TCP\/UDP communication.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse prelude::v1::*;\n\nuse io::{self, Error, ErrorKind};\nuse sys_common::net as net_imp;\n\npub use self::ip::{IpAddr, Ipv4Addr, Ipv6Addr, Ipv6MulticastScope};\npub use self::addr::{SocketAddr, SocketAddrV4, SocketAddrV6, ToSocketAddrs};\npub use self::tcp::{TcpStream, TcpListener, Incoming};\npub use self::udp::UdpSocket;\npub use self::parser::AddrParseError;\n\nmod ip;\nmod addr;\nmod tcp;\nmod udp;\nmod parser;\n#[cfg(test)] mod test;\n\n\/\/\/ Possible values which can be passed to the `shutdown` method of `TcpStream`.\n#[derive(Copy, Clone, PartialEq, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub enum Shutdown {\n \/\/\/ Indicates that the reading portion of this stream\/socket should be shut\n \/\/\/ down. All currently blocked and future reads will return `Ok(0)`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Read,\n \/\/\/ Indicates that the writing portion of this stream\/socket should be shut\n \/\/\/ down. All currently blocked and future writes will return an error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Write,\n \/\/\/ Shut down both the reading and writing portions of this stream.\n \/\/\/\n \/\/\/ See `Shutdown::Read` and `Shutdown::Write` for more information.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Both,\n}\n\n#[doc(hidden)]\ntrait NetInt {\n fn from_be(i: Self) -> Self;\n fn to_be(&self) -> Self;\n}\nmacro_rules! doit {\n ($($t:ident)*) => ($(impl NetInt for $t {\n fn from_be(i: Self) -> Self { <$t>::from_be(i) }\n fn to_be(&self) -> Self { <$t>::to_be(*self) }\n })*)\n}\ndoit! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize }\n\nfn hton<I: NetInt>(i: I) -> I { i.to_be() }\nfn ntoh<I: NetInt>(i: I) -> I { I::from_be(i) }\n\nfn each_addr<A: ToSocketAddrs, F, T>(addr: A, mut f: F) -> io::Result<T>\n where F: FnMut(&SocketAddr) -> io::Result<T>\n{\n let mut last_err = None;\n for addr in try!(addr.to_socket_addrs()) {\n match f(&addr) {\n Ok(l) => return Ok(l),\n Err(e) => last_err = Some(e),\n }\n }\n Err(last_err.unwrap_or_else(|| {\n Error::new(ErrorKind::InvalidInput,\n \"could not resolve to any addresses\")\n }))\n}\n\n\/\/\/ An iterator over `SocketAddr` values returned from a host lookup operation.\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\")]\npub struct LookupHost(net_imp::LookupHost);\n\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\")]\nimpl Iterator for LookupHost {\n type Item = io::Result<SocketAddr>;\n fn next(&mut self) -> Option<io::Result<SocketAddr>> { self.0.next() }\n}\n\n\/\/\/ Resolve the host specified by `host` as a number of `SocketAddr` instances.\n\/\/\/\n\/\/\/ This method may perform a DNS query to resolve `host` and may also inspect\n\/\/\/ system configuration to resolve the specified hostname.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```no_run\n\/\/\/ # #![feature(lookup_host)]\n\/\/\/ use std::net;\n\/\/\/\n\/\/\/ # fn foo() -> std::io::Result<()> {\n\/\/\/ for host in try!(net::lookup_host(\"rust-lang.org\")) {\n\/\/\/ println!(\"found address: {}\", try!(host));\n\/\/\/ }\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[unstable(feature = \"lookup_host\", reason = \"unsure about the returned \\\n iterator and returning socket \\\n addresses\")]\npub fn lookup_host(host: &str) -> io::Result<LookupHost> {\n net_imp::lookup_host(host).map(LookupHost)\n}\n\n\/\/\/ Resolve the given address to a hostname.\n\/\/\/\n\/\/\/ This function may perform a DNS query to resolve `addr` and may also inspect\n\/\/\/ system configuration to resolve the specified address. If the address\n\/\/\/ cannot be resolved, it is returned in string format.\n#[unstable(feature = \"lookup_addr\", reason = \"recent addition\")]\npub fn lookup_addr(addr: &IpAddr) -> io::Result<String> {\n net_imp::lookup_addr(addr)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add benchmark for #844<commit_after>\/\/! for #844\n#![feature(test)]\nextern crate racer_testutils;\nextern crate test;\nuse test::Bencher;\n\nuse racer_testutils::*;\n\n#[bench]\nfn glob_imports5(b: &mut Bencher) {\n let src = r\"\nuse a::*;\nuse b::*;\nuse c::*;\nuse d::*;\nuse e::*;\npub fn foo() -> () {\n Whatever::~\n}\n\";\n let mut var = vec![];\n b.iter(|| {\n var = get_all_completions(src, None);\n })\n}\n\n#[bench]\nfn glob_imports6(b: &mut Bencher) {\n let src = r\"\nuse a::*;\nuse b::*;\nuse c::*;\nuse d::*;\nuse e::*;\nuse f::*;\npub fn foo() -> () {\n Whatever::~\n}\n\";\n let mut var = vec![];\n b.iter(|| {\n var = get_all_completions(src, None);\n })\n}\n\n#[bench]\nfn glob_imports7(b: &mut Bencher) {\n let src = r\"\nuse a::*;\nuse b::*;\nuse c::*;\nuse d::*;\nuse e::*;\nuse f::*;\nuse g::*;\npub fn foo() -> () {\n Whatever::~\n}\n\";\n let mut var = vec![];\n b.iter(|| {\n var = get_all_completions(src, None);\n })\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ See LICENSE file for copyright and license details.\n\n#![feature(core, collections, box_syntax)] \/\/ TODO\n\nextern crate cgmath;\nextern crate rand;\nextern crate common;\n\npub mod geom;\npub mod map;\npub mod ai;\npub mod command;\npub mod player;\npub mod unit;\npub mod object;\npub mod dir;\npub mod game_state;\npub mod core;\npub mod pathfinder;\npub mod fov;\n\nmod fow;\nmod internal_state;\n\n\/\/ vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:\n<commit_msg>core\/lib.rs: Made 'object', 'ai', 'player' and 'fov' private<commit_after>\/\/ See LICENSE file for copyright and license details.\n\n#![feature(core, collections, box_syntax)] \/\/ TODO\n\nextern crate cgmath;\nextern crate rand;\nextern crate common;\n\npub mod geom;\npub mod map;\npub mod command;\npub mod unit;\npub mod dir;\npub mod game_state;\npub mod core;\npub mod pathfinder;\n\nmod object;\nmod ai;\nmod player;\nmod fov;\nmod fow;\nmod internal_state;\n\n\/\/ vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:\n<|endoftext|>"} {"text":"<commit_before>use toml::Table;\n\n\/**\n * EntryHeader\n *\n * This is basically a wrapper around toml::Table which provides convenience to the user of the\n * librray.\n *\/\n#[derive(Debug, Clone)]\npub struct EntryHeader {\n toml: Table,\n}\n\nimpl EntryHeader {\n\n \/**\n * Get a new header object with a already-filled toml table\n *\/\n pub fn new(toml: Table) -> EntryHeader {\n EntryHeader {\n toml: toml,\n }\n }\n\n \/**\n * Get the table which lives in the background\n *\/\n pub fn toml(&self) -> &Table {\n &self.toml\n }\n\n}\n<commit_msg>Add: EntryHeader::parse()<commit_after>use std::error::Error;\nuse std::result::Result as RResult;\n\nuse toml::Table;\n\npub mod error {\n use std::fmt::{Debug, Display, Formatter};\n use std::fmt;\n use std::error::Error;\n use toml;\n\n #[derive(Clone)]\n pub enum ParserErrorKind {\n TOMLParserErrors,\n MissingMainSection,\n }\n\n pub struct ParserError {\n kind: ParserErrorKind,\n cause: Option<Box<Error>>,\n }\n\n impl ParserError {\n\n pub fn new(k: ParserErrorKind, cause: Option<Box<Error>>) -> ParserError {\n ParserError {\n kind: k,\n cause: cause,\n }\n }\n\n }\n\n impl Debug for ParserError {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n try!(write!(f, \"{:?}\", self.description()));\n Ok(())\n }\n\n }\n\n impl Display for ParserError {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n try!(write!(f, \"{}\", self.description()));\n Ok(())\n }\n\n }\n\n impl Error for ParserError {\n\n fn description(&self) -> &str {\n match self.kind {\n ParserErrorKind::MissingMainSection => \"Missing main section\",\n ParserErrorKind::TOMLParserErrors => \"Several TOML-Parser-Errors\",\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n }\n\n}\n\n\nuse self::error::ParserErrorKind;\nuse self::error::ParserError;\n\n\/**\n * EntryHeader\n *\n * This is basically a wrapper around toml::Table which provides convenience to the user of the\n * librray.\n *\/\n#[derive(Debug, Clone)]\npub struct EntryHeader {\n toml: Table,\n}\n\npub type Result<V> = RResult<V, error::ParserError>;\n\n\/**\n * Wrapper type around file header (TOML) object\n *\/\nimpl EntryHeader {\n\n \/**\n * Get a new header object with a already-filled toml table\n *\/\n pub fn new(toml: Table) -> EntryHeader {\n EntryHeader {\n toml: toml,\n }\n }\n\n \/**\n * Get the table which lives in the background\n *\/\n pub fn toml(&self) -> &Table {\n &self.toml\n }\n\n pub fn parse(s: &str) -> Result<EntryHeader> {\n use toml::Parser;\n\n let mut parser = Parser::new(s);\n parser.parse()\n .ok_or(ParserError::new(ParserErrorKind::TOMLParserErrors, None))\n .map(|table| EntryHeader::new(table))\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add EMR integration tests<commit_after>#![cfg(feature = \"emr\")]\n\nextern crate rusoto;\n\nuse rusoto::emr::{EmrClient, ListClustersInput, DescribeJobFlowsInput, DescribeJobFlowsError};\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\nfn should_list_clusters() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = EmrClient::new(credentials, Region::UsEast1);\n\n let request = ListClustersInput::default();\n\n match client.list_clusters(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true)\n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n\n#[test]\nfn should_handle_deprecation_gracefully() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = EmrClient::new(credentials, Region::UsEast1);\n\n let request = DescribeJobFlowsInput::default();\n\n match client.describe_job_flows(&request) {\n Err(DescribeJobFlowsError::Validation(msg)) => assert!(msg.contains(\"DescribeJobFlows API is deprecated.\")),\n err @ _ => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>panic!<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add simple integration test<commit_after>extern crate time;\nextern crate sqlite3;\n\nuse time::Timespec;\n\n\nuse sqlite3::{DatabaseConnection, Row, Error, Done, SqliteResult};\n\n#[deriving(Show)]\nstruct Person {\n id: i32,\n name: String,\n time_created: Timespec,\n \/\/ TODO: data: Option<Vec<u8>>\n}\n\npub fn main() {\n println!(\"hello!\");\n match io() {\n Ok(_) => (),\n Err(oops) => fail!(oops)\n }\n}\n\nfn io() -> SqliteResult<()> {\n let mut conn = try!(DatabaseConnection::new());\n\n try!(conn.exec(\"CREATE TABLE person (\n id SERIAL PRIMARY KEY,\n name VARCHAR NOT NULL,\n time_created TIMESTAMP NOT NULL\n )\"));\n println!(\"created\");\n\n let me = Person {\n id: 0,\n name: \"Dan\".to_string(),\n time_created: time::get_time(),\n };\n {\n let mut tx = try!(conn.prepare(\"INSERT INTO person (name, time_created)\n VALUES ($1, $2)\"));\n let changes = try!(tx.update([&me.name, &me.time_created]));\n println!(\"inserted {} {}\", changes, me);\n }\n\n let mut stmt = try!(conn.prepare(\"SELECT id, name, time_created FROM person\"));\n let mut rows = try!(stmt.query([]));\n println!(\"selecting\");\n loop {\n match rows.step() {\n Row(ref mut row) => {\n println!(\"type of row 0: {}\", row.column_type(0));\n println!(\"type of row 1: {}\", row.column_type(1));\n println!(\"type of row 2: {}\", row.column_type(2));\n println!(\"text of row 2: {}\", row.column_text(2));\n\n let person = Person {\n id: row.get(0u),\n name: row.get(1u),\n time_created: row.get(2u)\n };\n println!(\"Found person {}\", person);\n },\n Error(oops) => return Err(oops),\n Done(_) => break\n }\n }\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>shadowing in pattern matching done<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Character manipulation.\n\/\/!\n\/\/! For more details, see ::rustc_unicode::char (a.k.a. std::char)\n\n#![allow(non_snake_case)]\n#![doc(primitive = \"char\")]\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse iter::Iterator;\nuse mem::transmute;\nuse option::Option::{None, Some};\nuse option::Option;\nuse slice::SliceExt;\n\n\/\/ UTF-8 ranges and tags for encoding characters\nconst TAG_CONT: u8 = 0b1000_0000;\nconst TAG_TWO_B: u8 = 0b1100_0000;\nconst TAG_THREE_B: u8 = 0b1110_0000;\nconst TAG_FOUR_B: u8 = 0b1111_0000;\nconst MAX_ONE_B: u32 = 0x80;\nconst MAX_TWO_B: u32 = 0x800;\nconst MAX_THREE_B: u32 = 0x10000;\n\n\/*\n Lu Uppercase_Letter an uppercase letter\n Ll Lowercase_Letter a lowercase letter\n Lt Titlecase_Letter a digraphic character, with first part uppercase\n Lm Modifier_Letter a modifier letter\n Lo Other_Letter other letters, including syllables and ideographs\n Mn Nonspacing_Mark a nonspacing combining mark (zero advance width)\n Mc Spacing_Mark a spacing combining mark (positive advance width)\n Me Enclosing_Mark an enclosing combining mark\n Nd Decimal_Number a decimal digit\n Nl Letter_Number a letterlike numeric character\n No Other_Number a numeric character of other type\n Pc Connector_Punctuation a connecting punctuation mark, like a tie\n Pd Dash_Punctuation a dash or hyphen punctuation mark\n Ps Open_Punctuation an opening punctuation mark (of a pair)\n Pe Close_Punctuation a closing punctuation mark (of a pair)\n Pi Initial_Punctuation an initial quotation mark\n Pf Final_Punctuation a final quotation mark\n Po Other_Punctuation a punctuation mark of other type\n Sm Math_Symbol a symbol of primarily mathematical use\n Sc Currency_Symbol a currency sign\n Sk Modifier_Symbol a non-letterlike modifier symbol\n So Other_Symbol a symbol of other type\n Zs Space_Separator a space character (of various non-zero widths)\n Zl Line_Separator U+2028 LINE SEPARATOR only\n Zp Paragraph_Separator U+2029 PARAGRAPH SEPARATOR only\n Cc Control a C0 or C1 control code\n Cf Format a format control character\n Cs Surrogate a surrogate code point\n Co Private_Use a private-use character\n Cn Unassigned a reserved unassigned code point or a noncharacter\n*\/\n\n\/\/\/ The highest valid code point\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub const MAX: char = '\\u{10ffff}';\n\n\/\/\/ Converts a `u32` to an `Option<char>`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::char;\n\/\/\/\n\/\/\/ assert_eq!(char::from_u32(0x2764), Some('❤'));\n\/\/\/ assert_eq!(char::from_u32(0x110000), None); \/\/ invalid character\n\/\/\/ ```\n#[inline]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn from_u32(i: u32) -> Option<char> {\n \/\/ catch out-of-bounds and surrogates\n if (i > MAX as u32) || (i >= 0xD800 && i <= 0xDFFF) {\n None\n } else {\n Some(unsafe { transmute(i) })\n }\n}\n\n\/\/\/ Converts a number to the character representing it.\n\/\/\/\n\/\/\/ # Return value\n\/\/\/\n\/\/\/ Returns `Some(char)` if `num` represents one digit under `radix`,\n\/\/\/ using one character of `0-9` or `a-z`, or `None` if it doesn't.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ Panics if given an `radix` > 36.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::char;\n\/\/\/\n\/\/\/ let c = char::from_digit(4, 10);\n\/\/\/\n\/\/\/ assert_eq!(c, Some('4'));\n\/\/\/ ```\n#[inline]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn from_digit(num: u32, radix: u32) -> Option<char> {\n if radix > 36 {\n panic!(\"from_digit: radix is too high (maximum 36)\");\n }\n if num < radix {\n unsafe {\n if num < 10 {\n Some(transmute('0' as u32 + num))\n } else {\n Some(transmute('a' as u32 + num - 10))\n }\n }\n } else {\n None\n }\n}\n\n\/\/ NB: the stabilization and documentation for this trait is in\n\/\/ unicode\/char.rs, not here\n#[allow(missing_docs)] \/\/ docs in libunicode\/u_char.rs\n#[doc(hidden)]\n#[unstable(feature = \"core_char_ext\",\n reason = \"the stable interface is `impl char` in later crate\")]\npub trait CharExt {\n fn is_digit(self, radix: u32) -> bool;\n fn to_digit(self, radix: u32) -> Option<u32>;\n fn escape_unicode(self) -> EscapeUnicode;\n fn escape_default(self) -> EscapeDefault;\n fn len_utf8(self) -> usize;\n fn len_utf16(self) -> usize;\n fn encode_utf8(self, dst: &mut [u8]) -> Option<usize>;\n fn encode_utf16(self, dst: &mut [u16]) -> Option<usize>;\n}\n\nimpl CharExt for char {\n #[inline]\n fn is_digit(self, radix: u32) -> bool {\n self.to_digit(radix).is_some()\n }\n\n #[inline]\n fn to_digit(self, radix: u32) -> Option<u32> {\n if radix > 36 {\n panic!(\"to_digit: radix is too high (maximum 36)\");\n }\n let val = match self {\n '0' ... '9' => self as u32 - '0' as u32,\n 'a' ... 'z' => self as u32 - 'a' as u32 + 10,\n 'A' ... 'Z' => self as u32 - 'A' as u32 + 10,\n _ => return None,\n };\n if val < radix { Some(val) }\n else { None }\n }\n\n #[inline]\n fn escape_unicode(self) -> EscapeUnicode {\n EscapeUnicode { c: self, state: EscapeUnicodeState::Backslash }\n }\n\n #[inline]\n fn escape_default(self) -> EscapeDefault {\n let init_state = match self {\n '\\t' => EscapeDefaultState::Backslash('t'),\n '\\r' => EscapeDefaultState::Backslash('r'),\n '\\n' => EscapeDefaultState::Backslash('n'),\n '\\\\' => EscapeDefaultState::Backslash('\\\\'),\n '\\'' => EscapeDefaultState::Backslash('\\''),\n '\"' => EscapeDefaultState::Backslash('\"'),\n '\\x20' ... '\\x7e' => EscapeDefaultState::Char(self),\n _ => EscapeDefaultState::Unicode(self.escape_unicode())\n };\n EscapeDefault { state: init_state }\n }\n\n #[inline]\n fn len_utf8(self) -> usize {\n let code = self as u32;\n if code < MAX_ONE_B {\n 1\n } else if code < MAX_TWO_B {\n 2\n } else if code < MAX_THREE_B {\n 3\n } else {\n 4\n }\n }\n\n #[inline]\n fn len_utf16(self) -> usize {\n let ch = self as u32;\n if (ch & 0xFFFF) == ch { 1 } else { 2 }\n }\n\n #[inline]\n fn encode_utf8(self, dst: &mut [u8]) -> Option<usize> {\n encode_utf8_raw(self as u32, dst)\n }\n\n #[inline]\n fn encode_utf16(self, dst: &mut [u16]) -> Option<usize> {\n encode_utf16_raw(self as u32, dst)\n }\n}\n\n\/\/\/ Encodes a raw u32 value as UTF-8 into the provided byte buffer,\n\/\/\/ and then returns the number of bytes written.\n\/\/\/\n\/\/\/ If the buffer is not large enough, nothing will be written into it\n\/\/\/ and a `None` will be returned.\n#[inline]\n#[unstable(feature = \"char_internals\",\n reason = \"this function should not be exposed publicly\")]\npub fn encode_utf8_raw(code: u32, dst: &mut [u8]) -> Option<usize> {\n \/\/ Marked #[inline] to allow llvm optimizing it away\n if code < MAX_ONE_B && !dst.is_empty() {\n dst[0] = code as u8;\n Some(1)\n } else if code < MAX_TWO_B && dst.len() >= 2 {\n dst[0] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;\n dst[1] = (code & 0x3F) as u8 | TAG_CONT;\n Some(2)\n } else if code < MAX_THREE_B && dst.len() >= 3 {\n dst[0] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;\n dst[1] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n dst[2] = (code & 0x3F) as u8 | TAG_CONT;\n Some(3)\n } else if dst.len() >= 4 {\n dst[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;\n dst[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT;\n dst[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n dst[3] = (code & 0x3F) as u8 | TAG_CONT;\n Some(4)\n } else {\n None\n }\n}\n\n\/\/\/ Encodes a raw u32 value as UTF-16 into the provided `u16` buffer,\n\/\/\/ and then returns the number of `u16`s written.\n\/\/\/\n\/\/\/ If the buffer is not large enough, nothing will be written into it\n\/\/\/ and a `None` will be returned.\n#[inline]\n#[unstable(feature = \"char_internals\",\n reason = \"this function should not be exposed publicly\")]\npub fn encode_utf16_raw(mut ch: u32, dst: &mut [u16]) -> Option<usize> {\n \/\/ Marked #[inline] to allow llvm optimizing it away\n if (ch & 0xFFFF) == ch && !dst.is_empty() {\n \/\/ The BMP falls through (assuming non-surrogate, as it should)\n dst[0] = ch as u16;\n Some(1)\n } else if dst.len() >= 2 {\n \/\/ Supplementary planes break into surrogates.\n ch -= 0x1_0000;\n dst[0] = 0xD800 | ((ch >> 10) as u16);\n dst[1] = 0xDC00 | ((ch as u16) & 0x3FF);\n Some(2)\n } else {\n None\n }\n}\n\n\/\/\/ An iterator over the characters that represent a `char`, as escaped by\n\/\/\/ Rust's unicode escaping rules.\n#[derive(Clone)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct EscapeUnicode {\n c: char,\n state: EscapeUnicodeState\n}\n\n#[derive(Clone)]\nenum EscapeUnicodeState {\n Backslash,\n Type,\n LeftBrace,\n Value(usize),\n RightBrace,\n Done,\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Iterator for EscapeUnicode {\n type Item = char;\n\n fn next(&mut self) -> Option<char> {\n match self.state {\n EscapeUnicodeState::Backslash => {\n self.state = EscapeUnicodeState::Type;\n Some('\\\\')\n }\n EscapeUnicodeState::Type => {\n self.state = EscapeUnicodeState::LeftBrace;\n Some('u')\n }\n EscapeUnicodeState::LeftBrace => {\n let mut n = 0;\n while (self.c as u32) >> (4 * (n + 1)) != 0 {\n n += 1;\n }\n self.state = EscapeUnicodeState::Value(n);\n Some('{')\n }\n EscapeUnicodeState::Value(offset) => {\n let v = match ((self.c as i32) >> (offset * 4)) & 0xf {\n i @ 0 ... 9 => '0' as i32 + i,\n i => 'a' as i32 + (i - 10)\n };\n if offset == 0 {\n self.state = EscapeUnicodeState::RightBrace;\n } else {\n self.state = EscapeUnicodeState::Value(offset - 1);\n }\n Some(unsafe { transmute(v) })\n }\n EscapeUnicodeState::RightBrace => {\n self.state = EscapeUnicodeState::Done;\n Some('}')\n }\n EscapeUnicodeState::Done => None,\n }\n }\n}\n\n\/\/\/ An iterator over the characters that represent a `char`, escaped\n\/\/\/ for maximum portability.\n#[derive(Clone)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct EscapeDefault {\n state: EscapeDefaultState\n}\n\n#[derive(Clone)]\nenum EscapeDefaultState {\n Backslash(char),\n Char(char),\n Done,\n Unicode(EscapeUnicode),\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Iterator for EscapeDefault {\n type Item = char;\n\n fn next(&mut self) -> Option<char> {\n match self.state {\n EscapeDefaultState::Backslash(c) => {\n self.state = EscapeDefaultState::Char(c);\n Some('\\\\')\n }\n EscapeDefaultState::Char(c) => {\n self.state = EscapeDefaultState::Done;\n Some(c)\n }\n EscapeDefaultState::Done => None,\n EscapeDefaultState::Unicode(ref mut iter) => iter.next()\n }\n }\n}\n<commit_msg>std: Update stable since for `core::char`<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Character manipulation.\n\/\/!\n\/\/! For more details, see ::rustc_unicode::char (a.k.a. std::char)\n\n#![allow(non_snake_case)]\n#![doc(primitive = \"char\")]\n#![stable(feature = \"core_char\", since = \"1.2.0\")]\n\nuse iter::Iterator;\nuse mem::transmute;\nuse option::Option::{None, Some};\nuse option::Option;\nuse slice::SliceExt;\n\n\/\/ UTF-8 ranges and tags for encoding characters\nconst TAG_CONT: u8 = 0b1000_0000;\nconst TAG_TWO_B: u8 = 0b1100_0000;\nconst TAG_THREE_B: u8 = 0b1110_0000;\nconst TAG_FOUR_B: u8 = 0b1111_0000;\nconst MAX_ONE_B: u32 = 0x80;\nconst MAX_TWO_B: u32 = 0x800;\nconst MAX_THREE_B: u32 = 0x10000;\n\n\/*\n Lu Uppercase_Letter an uppercase letter\n Ll Lowercase_Letter a lowercase letter\n Lt Titlecase_Letter a digraphic character, with first part uppercase\n Lm Modifier_Letter a modifier letter\n Lo Other_Letter other letters, including syllables and ideographs\n Mn Nonspacing_Mark a nonspacing combining mark (zero advance width)\n Mc Spacing_Mark a spacing combining mark (positive advance width)\n Me Enclosing_Mark an enclosing combining mark\n Nd Decimal_Number a decimal digit\n Nl Letter_Number a letterlike numeric character\n No Other_Number a numeric character of other type\n Pc Connector_Punctuation a connecting punctuation mark, like a tie\n Pd Dash_Punctuation a dash or hyphen punctuation mark\n Ps Open_Punctuation an opening punctuation mark (of a pair)\n Pe Close_Punctuation a closing punctuation mark (of a pair)\n Pi Initial_Punctuation an initial quotation mark\n Pf Final_Punctuation a final quotation mark\n Po Other_Punctuation a punctuation mark of other type\n Sm Math_Symbol a symbol of primarily mathematical use\n Sc Currency_Symbol a currency sign\n Sk Modifier_Symbol a non-letterlike modifier symbol\n So Other_Symbol a symbol of other type\n Zs Space_Separator a space character (of various non-zero widths)\n Zl Line_Separator U+2028 LINE SEPARATOR only\n Zp Paragraph_Separator U+2029 PARAGRAPH SEPARATOR only\n Cc Control a C0 or C1 control code\n Cf Format a format control character\n Cs Surrogate a surrogate code point\n Co Private_Use a private-use character\n Cn Unassigned a reserved unassigned code point or a noncharacter\n*\/\n\n\/\/\/ The highest valid code point\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub const MAX: char = '\\u{10ffff}';\n\n\/\/\/ Converts a `u32` to an `Option<char>`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::char;\n\/\/\/\n\/\/\/ assert_eq!(char::from_u32(0x2764), Some('❤'));\n\/\/\/ assert_eq!(char::from_u32(0x110000), None); \/\/ invalid character\n\/\/\/ ```\n#[inline]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn from_u32(i: u32) -> Option<char> {\n \/\/ catch out-of-bounds and surrogates\n if (i > MAX as u32) || (i >= 0xD800 && i <= 0xDFFF) {\n None\n } else {\n Some(unsafe { transmute(i) })\n }\n}\n\n\/\/\/ Converts a number to the character representing it.\n\/\/\/\n\/\/\/ # Return value\n\/\/\/\n\/\/\/ Returns `Some(char)` if `num` represents one digit under `radix`,\n\/\/\/ using one character of `0-9` or `a-z`, or `None` if it doesn't.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ Panics if given an `radix` > 36.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::char;\n\/\/\/\n\/\/\/ let c = char::from_digit(4, 10);\n\/\/\/\n\/\/\/ assert_eq!(c, Some('4'));\n\/\/\/ ```\n#[inline]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn from_digit(num: u32, radix: u32) -> Option<char> {\n if radix > 36 {\n panic!(\"from_digit: radix is too high (maximum 36)\");\n }\n if num < radix {\n unsafe {\n if num < 10 {\n Some(transmute('0' as u32 + num))\n } else {\n Some(transmute('a' as u32 + num - 10))\n }\n }\n } else {\n None\n }\n}\n\n\/\/ NB: the stabilization and documentation for this trait is in\n\/\/ unicode\/char.rs, not here\n#[allow(missing_docs)] \/\/ docs in libunicode\/u_char.rs\n#[doc(hidden)]\n#[unstable(feature = \"core_char_ext\",\n reason = \"the stable interface is `impl char` in later crate\")]\npub trait CharExt {\n fn is_digit(self, radix: u32) -> bool;\n fn to_digit(self, radix: u32) -> Option<u32>;\n fn escape_unicode(self) -> EscapeUnicode;\n fn escape_default(self) -> EscapeDefault;\n fn len_utf8(self) -> usize;\n fn len_utf16(self) -> usize;\n fn encode_utf8(self, dst: &mut [u8]) -> Option<usize>;\n fn encode_utf16(self, dst: &mut [u16]) -> Option<usize>;\n}\n\nimpl CharExt for char {\n #[inline]\n fn is_digit(self, radix: u32) -> bool {\n self.to_digit(radix).is_some()\n }\n\n #[inline]\n fn to_digit(self, radix: u32) -> Option<u32> {\n if radix > 36 {\n panic!(\"to_digit: radix is too high (maximum 36)\");\n }\n let val = match self {\n '0' ... '9' => self as u32 - '0' as u32,\n 'a' ... 'z' => self as u32 - 'a' as u32 + 10,\n 'A' ... 'Z' => self as u32 - 'A' as u32 + 10,\n _ => return None,\n };\n if val < radix { Some(val) }\n else { None }\n }\n\n #[inline]\n fn escape_unicode(self) -> EscapeUnicode {\n EscapeUnicode { c: self, state: EscapeUnicodeState::Backslash }\n }\n\n #[inline]\n fn escape_default(self) -> EscapeDefault {\n let init_state = match self {\n '\\t' => EscapeDefaultState::Backslash('t'),\n '\\r' => EscapeDefaultState::Backslash('r'),\n '\\n' => EscapeDefaultState::Backslash('n'),\n '\\\\' => EscapeDefaultState::Backslash('\\\\'),\n '\\'' => EscapeDefaultState::Backslash('\\''),\n '\"' => EscapeDefaultState::Backslash('\"'),\n '\\x20' ... '\\x7e' => EscapeDefaultState::Char(self),\n _ => EscapeDefaultState::Unicode(self.escape_unicode())\n };\n EscapeDefault { state: init_state }\n }\n\n #[inline]\n fn len_utf8(self) -> usize {\n let code = self as u32;\n if code < MAX_ONE_B {\n 1\n } else if code < MAX_TWO_B {\n 2\n } else if code < MAX_THREE_B {\n 3\n } else {\n 4\n }\n }\n\n #[inline]\n fn len_utf16(self) -> usize {\n let ch = self as u32;\n if (ch & 0xFFFF) == ch { 1 } else { 2 }\n }\n\n #[inline]\n fn encode_utf8(self, dst: &mut [u8]) -> Option<usize> {\n encode_utf8_raw(self as u32, dst)\n }\n\n #[inline]\n fn encode_utf16(self, dst: &mut [u16]) -> Option<usize> {\n encode_utf16_raw(self as u32, dst)\n }\n}\n\n\/\/\/ Encodes a raw u32 value as UTF-8 into the provided byte buffer,\n\/\/\/ and then returns the number of bytes written.\n\/\/\/\n\/\/\/ If the buffer is not large enough, nothing will be written into it\n\/\/\/ and a `None` will be returned.\n#[inline]\n#[unstable(feature = \"char_internals\",\n reason = \"this function should not be exposed publicly\")]\n#[doc(hidden)]\npub fn encode_utf8_raw(code: u32, dst: &mut [u8]) -> Option<usize> {\n \/\/ Marked #[inline] to allow llvm optimizing it away\n if code < MAX_ONE_B && !dst.is_empty() {\n dst[0] = code as u8;\n Some(1)\n } else if code < MAX_TWO_B && dst.len() >= 2 {\n dst[0] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;\n dst[1] = (code & 0x3F) as u8 | TAG_CONT;\n Some(2)\n } else if code < MAX_THREE_B && dst.len() >= 3 {\n dst[0] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;\n dst[1] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n dst[2] = (code & 0x3F) as u8 | TAG_CONT;\n Some(3)\n } else if dst.len() >= 4 {\n dst[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;\n dst[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT;\n dst[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n dst[3] = (code & 0x3F) as u8 | TAG_CONT;\n Some(4)\n } else {\n None\n }\n}\n\n\/\/\/ Encodes a raw u32 value as UTF-16 into the provided `u16` buffer,\n\/\/\/ and then returns the number of `u16`s written.\n\/\/\/\n\/\/\/ If the buffer is not large enough, nothing will be written into it\n\/\/\/ and a `None` will be returned.\n#[inline]\n#[unstable(feature = \"char_internals\",\n reason = \"this function should not be exposed publicly\")]\n#[doc(hidden)]\npub fn encode_utf16_raw(mut ch: u32, dst: &mut [u16]) -> Option<usize> {\n \/\/ Marked #[inline] to allow llvm optimizing it away\n if (ch & 0xFFFF) == ch && !dst.is_empty() {\n \/\/ The BMP falls through (assuming non-surrogate, as it should)\n dst[0] = ch as u16;\n Some(1)\n } else if dst.len() >= 2 {\n \/\/ Supplementary planes break into surrogates.\n ch -= 0x1_0000;\n dst[0] = 0xD800 | ((ch >> 10) as u16);\n dst[1] = 0xDC00 | ((ch as u16) & 0x3FF);\n Some(2)\n } else {\n None\n }\n}\n\n\/\/\/ An iterator over the characters that represent a `char`, as escaped by\n\/\/\/ Rust's unicode escaping rules.\n#[derive(Clone)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct EscapeUnicode {\n c: char,\n state: EscapeUnicodeState\n}\n\n#[derive(Clone)]\nenum EscapeUnicodeState {\n Backslash,\n Type,\n LeftBrace,\n Value(usize),\n RightBrace,\n Done,\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Iterator for EscapeUnicode {\n type Item = char;\n\n fn next(&mut self) -> Option<char> {\n match self.state {\n EscapeUnicodeState::Backslash => {\n self.state = EscapeUnicodeState::Type;\n Some('\\\\')\n }\n EscapeUnicodeState::Type => {\n self.state = EscapeUnicodeState::LeftBrace;\n Some('u')\n }\n EscapeUnicodeState::LeftBrace => {\n let mut n = 0;\n while (self.c as u32) >> (4 * (n + 1)) != 0 {\n n += 1;\n }\n self.state = EscapeUnicodeState::Value(n);\n Some('{')\n }\n EscapeUnicodeState::Value(offset) => {\n let v = match ((self.c as i32) >> (offset * 4)) & 0xf {\n i @ 0 ... 9 => '0' as i32 + i,\n i => 'a' as i32 + (i - 10)\n };\n if offset == 0 {\n self.state = EscapeUnicodeState::RightBrace;\n } else {\n self.state = EscapeUnicodeState::Value(offset - 1);\n }\n Some(unsafe { transmute(v) })\n }\n EscapeUnicodeState::RightBrace => {\n self.state = EscapeUnicodeState::Done;\n Some('}')\n }\n EscapeUnicodeState::Done => None,\n }\n }\n}\n\n\/\/\/ An iterator over the characters that represent a `char`, escaped\n\/\/\/ for maximum portability.\n#[derive(Clone)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct EscapeDefault {\n state: EscapeDefaultState\n}\n\n#[derive(Clone)]\nenum EscapeDefaultState {\n Backslash(char),\n Char(char),\n Done,\n Unicode(EscapeUnicode),\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Iterator for EscapeDefault {\n type Item = char;\n\n fn next(&mut self) -> Option<char> {\n match self.state {\n EscapeDefaultState::Backslash(c) => {\n self.state = EscapeDefaultState::Char(c);\n Some('\\\\')\n }\n EscapeDefaultState::Char(c) => {\n self.state = EscapeDefaultState::Done;\n Some(c)\n }\n EscapeDefaultState::Done => None,\n EscapeDefaultState::Unicode(ref mut iter) => iter.next()\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add test suite for nested subcommands (texitoi\/structopt#1)<commit_after>\/\/ Copyright (c) 2017 Guillaume Pinot <texitoi(a)texitoi.eu>\n\/\/\n\/\/ This work is free. You can redistribute it and\/or modify it under\n\/\/ the terms of the Do What The Fuck You Want To Public License,\n\/\/ Version 2, as published by Sam Hocevar. See the COPYING file for\n\/\/ more details.\n\nextern crate structopt;\n#[macro_use] extern crate structopt_derive;\n\nuse structopt::StructOpt;\n\n#[derive(StructOpt, PartialEq, Debug)]\nstruct Opt {\n #[structopt(short = \"f\", long = \"force\")]\n force: bool,\n #[structopt(short = \"v\", long = \"verbose\")]\n verbose: u64,\n #[structopt(subcommand)]\n cmd: Sub\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\nenum Sub {\n #[structopt(name = \"fetch\")]\n Fetch {},\n #[structopt(name = \"add\")]\n Add {}\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\nstruct Opt2 {\n #[structopt(short = \"f\", long = \"force\")]\n force: bool,\n #[structopt(short = \"v\", long = \"verbose\")]\n verbose: u64,\n #[structopt(subcommand)]\n cmd: Option<Sub>\n}\n\n#[test]\nfn test_no_cmd() {\n let result = Opt::clap().get_matches_from_safe(&[\"test\"]);\n assert!(result.is_err());\n\n assert_eq!(Opt2 { force: false, verbose: 0, cmd: None },\n Opt2::from_clap(Opt2::clap().get_matches_from(&[\"test\"])));\n}\n\n#[test]\nfn test_fetch() {\n assert_eq!(Opt { force: false, verbose: 3, cmd: Sub::Fetch {} },\n Opt::from_clap(Opt::clap().get_matches_from(&[\"test\", \"-vvv\", \"fetch\"])));\n assert_eq!(Opt { force: true, verbose: 0, cmd: Sub::Fetch {} },\n Opt::from_clap(Opt::clap().get_matches_from(&[\"test\", \"--force\", \"fetch\"])));\n}\n\n#[test]\nfn test_add() {\n assert_eq!(Opt { force: false, verbose: 0, cmd: Sub::Add {} },\n Opt::from_clap(Opt::clap().get_matches_from(&[\"test\", \"add\"])));\n assert_eq!(Opt { force: false, verbose: 2, cmd: Sub::Add {} },\n Opt::from_clap(Opt::clap().get_matches_from(&[\"test\", \"-vv\", \"add\"])));\n}\n\n#[test]\nfn test_badinput() {\n let result = Opt::clap().get_matches_from_safe(&[\"test\", \"badcmd\"]);\n assert!(result.is_err());\n let result = Opt::clap().get_matches_from_safe(&[\"test\", \"add\", \"--verbose\"]);\n assert!(result.is_err());\n let result = Opt::clap().get_matches_from_safe(&[\"test\", \"--badopt\", \"add\"]);\n assert!(result.is_err());\n let result = Opt::clap().get_matches_from_safe(&[\"test\", \"add\", \"--badopt\"]);\n assert!(result.is_err());\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\nstruct Opt3 {\n #[structopt(short = \"a\", long = \"all\")]\n all: bool,\n #[structopt(subcommand)]\n cmd: Sub2\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\nenum Sub2 {\n #[structopt(name = \"foo\")]\n Foo {\n file: String,\n #[structopt(subcommand)]\n cmd: Sub3\n },\n #[structopt(name = \"bar\")]\n Bar {\n }\n}\n\n#[derive(StructOpt, PartialEq, Debug)]\nenum Sub3 {\n #[structopt(name = \"baz\")]\n Baz {},\n #[structopt(name = \"quux\")]\n Quux {}\n}\n\n#[test]\nfn test_subsubcommand() {\n assert_eq!(\n Opt3 {\n all: true,\n cmd: Sub2::Foo { file: \"lib.rs\".to_string(), cmd: Sub3::Quux {} }\n },\n Opt3::from_clap(Opt3::clap().get_matches_from(&[\"test\", \"--all\", \"foo\", \"lib.rs\", \"quux\"]))\n );\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Build configuration for Rust's release channels.\n\/\/!\n\/\/! Implements the stable\/beta\/nightly channel distinctions by setting various\n\/\/! flags like the `unstable_features`, calculating variables like `release` and\n\/\/! `package_vers`, and otherwise indicating to the compiler what it should\n\/\/! print out as part of its version information.\n\nuse std::path::Path;\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse Build;\nuse config::Config;\n\n\/\/ The version number\npub const CFG_RELEASE_NUM: &str = \"1.23.0\";\n\n\/\/ An optional number to put after the label, e.g. '.2' -> '-beta.2'\n\/\/ Be sure to make this starts with a dot to conform to semver pre-release\n\/\/ versions (section 9)\npub const CFG_PRERELEASE_VERSION: &str = \".1\";\n\npub struct GitInfo {\n inner: Option<Info>,\n}\n\nstruct Info {\n commit_date: String,\n sha: String,\n short_sha: String,\n}\n\nimpl GitInfo {\n pub fn new(config: &Config, dir: &Path) -> GitInfo {\n \/\/ See if this even begins to look like a git dir\n if config.ignore_git || !dir.join(\".git\").exists() {\n return GitInfo { inner: None }\n }\n\n \/\/ Make sure git commands work\n let out = Command::new(\"git\")\n .arg(\"rev-parse\")\n .current_dir(dir)\n .output()\n .expect(\"failed to spawn git\");\n if !out.status.success() {\n return GitInfo { inner: None }\n }\n\n \/\/ Ok, let's scrape some info\n let ver_date = output(Command::new(\"git\").current_dir(dir)\n .arg(\"log\").arg(\"-1\")\n .arg(\"--date=short\")\n .arg(\"--pretty=format:%cd\"));\n let ver_hash = output(Command::new(\"git\").current_dir(dir)\n .arg(\"rev-parse\").arg(\"HEAD\"));\n let short_ver_hash = output(Command::new(\"git\")\n .current_dir(dir)\n .arg(\"rev-parse\")\n .arg(\"--short=9\")\n .arg(\"HEAD\"));\n GitInfo {\n inner: Some(Info {\n commit_date: ver_date.trim().to_string(),\n sha: ver_hash.trim().to_string(),\n short_sha: short_ver_hash.trim().to_string(),\n }),\n }\n }\n\n pub fn sha(&self) -> Option<&str> {\n self.inner.as_ref().map(|s| &s.sha[..])\n }\n\n pub fn sha_short(&self) -> Option<&str> {\n self.inner.as_ref().map(|s| &s.short_sha[..])\n }\n\n pub fn commit_date(&self) -> Option<&str> {\n self.inner.as_ref().map(|s| &s.commit_date[..])\n }\n\n pub fn version(&self, build: &Build, num: &str) -> String {\n let mut version = build.release(num);\n if let Some(ref inner) = self.inner {\n version.push_str(\" (\");\n version.push_str(&inner.short_sha);\n version.push_str(\" \");\n version.push_str(&inner.commit_date);\n version.push_str(\")\");\n }\n version\n }\n\n pub fn is_git(&self) -> bool {\n self.inner.is_some()\n }\n}\n<commit_msg>Increment Nightly version to 1.24.0<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Build configuration for Rust's release channels.\n\/\/!\n\/\/! Implements the stable\/beta\/nightly channel distinctions by setting various\n\/\/! flags like the `unstable_features`, calculating variables like `release` and\n\/\/! `package_vers`, and otherwise indicating to the compiler what it should\n\/\/! print out as part of its version information.\n\nuse std::path::Path;\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse Build;\nuse config::Config;\n\n\/\/ The version number\npub const CFG_RELEASE_NUM: &str = \"1.24.0\";\n\n\/\/ An optional number to put after the label, e.g. '.2' -> '-beta.2'\n\/\/ Be sure to make this starts with a dot to conform to semver pre-release\n\/\/ versions (section 9)\npub const CFG_PRERELEASE_VERSION: &str = \".1\";\n\npub struct GitInfo {\n inner: Option<Info>,\n}\n\nstruct Info {\n commit_date: String,\n sha: String,\n short_sha: String,\n}\n\nimpl GitInfo {\n pub fn new(config: &Config, dir: &Path) -> GitInfo {\n \/\/ See if this even begins to look like a git dir\n if config.ignore_git || !dir.join(\".git\").exists() {\n return GitInfo { inner: None }\n }\n\n \/\/ Make sure git commands work\n let out = Command::new(\"git\")\n .arg(\"rev-parse\")\n .current_dir(dir)\n .output()\n .expect(\"failed to spawn git\");\n if !out.status.success() {\n return GitInfo { inner: None }\n }\n\n \/\/ Ok, let's scrape some info\n let ver_date = output(Command::new(\"git\").current_dir(dir)\n .arg(\"log\").arg(\"-1\")\n .arg(\"--date=short\")\n .arg(\"--pretty=format:%cd\"));\n let ver_hash = output(Command::new(\"git\").current_dir(dir)\n .arg(\"rev-parse\").arg(\"HEAD\"));\n let short_ver_hash = output(Command::new(\"git\")\n .current_dir(dir)\n .arg(\"rev-parse\")\n .arg(\"--short=9\")\n .arg(\"HEAD\"));\n GitInfo {\n inner: Some(Info {\n commit_date: ver_date.trim().to_string(),\n sha: ver_hash.trim().to_string(),\n short_sha: short_ver_hash.trim().to_string(),\n }),\n }\n }\n\n pub fn sha(&self) -> Option<&str> {\n self.inner.as_ref().map(|s| &s.sha[..])\n }\n\n pub fn sha_short(&self) -> Option<&str> {\n self.inner.as_ref().map(|s| &s.short_sha[..])\n }\n\n pub fn commit_date(&self) -> Option<&str> {\n self.inner.as_ref().map(|s| &s.commit_date[..])\n }\n\n pub fn version(&self, build: &Build, num: &str) -> String {\n let mut version = build.release(num);\n if let Some(ref inner) = self.inner {\n version.push_str(\" (\");\n version.push_str(&inner.short_sha);\n version.push_str(\" \");\n version.push_str(&inner.commit_date);\n version.push_str(\")\");\n }\n version\n }\n\n pub fn is_git(&self) -> bool {\n self.inner.is_some()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add assembler start<commit_after>pub struct Instruction {\n label: String,\n operation: String,\n operands: Vec<String>\n}\n\npub fn parse_instruction(instruction: String) -> Instruction {\n let mut instruction_iterator = instruction.split(\" \");\n let instruction_vector = instruction_iterator.collect::<Vec<&str>>();\n\n let mut operands_iterator = instruction_vector[1].split(\",\");\n let operands_vector = operands_iterator.map(|x| x.to_string()).collect::<Vec<String>>();\n\n Instruction {\n label: \"\".to_string(),\n operation: instruction_vector[0].to_string(),\n operands: operands_vector\n }\n}\n\npub fn assemble(program: String) -> Vec<Instruction> {\n let mut instruction_iterator = program.split(\"\\n\");\n let mut vec: Vec<Instruction> = Vec::with_capacity(2);\n for line in instruction_iterator {\n println!(\"{}\", line);\n vec.push(parse_instruction(line.to_string()));\n }\n vec\n}\n\n#[test]\nfn can_assemble() {\n let program = \"ldi r0,$0f\\ninc r0\".to_string();\n let instructions = assemble(program);\n\n assert_eq!(instructions.len(), 2);\n\n let ldi = &instructions[0];\n assert_eq!(\"\", ldi.label);\n assert_eq!(\"ldi\", ldi.operation);\n assert_eq!(vec![\"r0\",\"$0f\"], ldi.operands);\n\n let inc = &instructions[1];\n assert_eq!(\"\", inc.label);\n assert_eq!(\"inc\", inc.operation);\n assert_eq!(vec![\"r0\"], inc.operands);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add struct for subframe data<commit_after>pub enum Data {\n Constant(i32),\n Verbatim(Vec<i32>),\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test to reproduce link errors reported in gz\/rust-x86#1<commit_after>\/\/ Verify that we can be linked against an appliction which only uses\n\/\/ libcore, which is common in kernel space.\n\n#![feature(no_std, lang_items)]\n#![no_std]\n\nextern crate x86;\n\nfn main() {\n}\n\n\/\/ We want to supply these definitions ourselves, and not have them\n\/\/ accidentally pulled in via the x86 crate.\n#[lang = \"eh_personality\"]\nextern \"C\" fn eh_personality() {\n}\n\n#[lang = \"panic_fmt\"]\nextern \"C\" fn panic_fmt(\n args: ::core::fmt::Arguments, file: &str, line: usize)\n -> !\n{\n loop {}\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ functions to ask the user for data, with crate:spinner\n\nuse std::io::stdin;\nuse std::io::BufRead;\nuse std::io::BufReader;\n\nuse regex::Regex;\nuse ansi_term::Colour::*;\n\n\/\/\/ Ask the user for a Yes\/No answer. Optionally provide a default value. If none is provided, this\n\/\/\/ keeps loop{}ing\npub fn ask_bool(s: &str, default: Option<bool>) -> bool {\n ask_bool_(s, default, &mut BufReader::new(stdin()))\n}\n\nfn ask_bool_<R: BufRead>(s: &str, default: Option<bool>, input: &mut R) -> bool {\n lazy_static! {\n static ref R_YES: Regex = Regex::new(r\"^[Yy]$\").unwrap();\n static ref R_NO: Regex = Regex::new(r\"^[Nn]$\").unwrap();\n }\n\n loop {\n ask_question(s, false);\n if match default { Some(s) => s, _ => true } {\n println!(\" [Yn]: \");\n } else {\n println!(\" [yN]: \");\n }\n\n let mut s = String::new();\n let _ = input.read_line(&mut s);\n\n if R_YES.is_match(&s[..]) {\n return true\n } else if R_NO.is_match(&s[..]) {\n return false\n } else {\n if default.is_some() {\n return default.unwrap();\n }\n\n \/\/ else again...\n }\n }\n}\n\n\/\/\/ Ask the user for an unsigned number. Optionally provide a default value. If none is provided,\n\/\/\/ this keeps loop{}ing\npub fn ask_uint(s: &str, default: Option<u64>) -> u64 {\n ask_uint_(s, default, &mut BufReader::new(stdin()))\n}\n\nfn ask_uint_<R: BufRead>(s: &str, default: Option<u64>, input: &mut R) -> u64 {\n use std::str::FromStr;\n\n loop {\n ask_question(s, false);\n\n let mut s = String::new();\n let _ = input.read_line(&mut s);\n\n let u : Result<u64, _> = FromStr::from_str(&s[..]);\n match u {\n Ok(u) => { return u; },\n Err(_) => {\n if default.is_some() {\n return default.unwrap();\n } \/\/ else keep looping\n }\n }\n }\n}\n\n\/\/\/ Ask the user for a String.\n\/\/\/\n\/\/\/ If `permit_empty` is set to false, the default value will be returned if the user inserts an\n\/\/\/ empty string.\n\/\/\/\n\/\/\/ If the `permit_empty` value is true, the `default` value is never returned.\n\/\/\/\n\/\/\/ If the `permit_multiline` is set to true, the `prompt` will be displayed before each input line.\n\/\/\/\n\/\/\/ If the `eof` parameter is `None`, the input ends as soon as there is an empty line input from\n\/\/\/ the user. If the parameter is `Some(text)`, the input ends if the input line is equal to `text`.\npub fn ask_string(s: &str,\n default: Option<String>,\n permit_empty: bool,\n permit_multiline: bool,\n eof: Option<&str>,\n prompt: &str)\n -> String\n{\n ask_string_(s,\n default,\n permit_empty,\n permit_multiline,\n eof,\n prompt,\n &mut BufReader::new(stdin()))\n}\n\npub fn ask_string_<R: BufRead>(s: &str,\n default: Option<String>,\n permit_empty: bool,\n permit_multiline: bool,\n eof: Option<&str>,\n prompt: &str,\n input: &mut R)\n -> String\n{\n let mut v = vec![];\n loop {\n ask_question(s, true);\n print!(\"{}\", prompt);\n\n let mut s = String::new();\n let _ = input.read_line(&mut s);\n\n if permit_multiline {\n if permit_multiline && eof.map(|e| e == s).unwrap_or(false) {\n return v.join(\"\\n\");\n }\n\n if permit_empty || v.len() != 0 {\n v.push(s);\n }\n print!(\"{}\", prompt);\n } else {\n if s.len() == 0 && permit_empty {\n return s;\n } else if s.len() == 0 && !permit_empty {\n if default.is_some() {\n return default.unwrap();\n } else {\n continue;\n }\n } else {\n return s;\n }\n }\n }\n}\n\n\/\/\/ Helper function to print a imag question string. The `question` argument may not contain a\n\/\/\/ trailing questionmark.\n\/\/\/\n\/\/\/ The `nl` parameter can be used to configure whether a newline character should be printed\npub fn ask_question(question: &str, nl: bool) {\n if nl {\n println!(\"[imag]: {}?\", Yellow.paint(question));\n } else {\n print!(\"[imag]: {}?\", Yellow.paint(question));\n }\n}\n\n#[cfg(test)]\nmod test {\n use std::io::BufReader;\n\n use super::ask_bool_;\n use super::ask_uint_;\n\n #[test]\n fn test_ask_bool_nodefault_yes() {\n let question = \"Is this true\";\n let default = None;\n let answers = \"\\n\\n\\n\\n\\ny\";\n\n assert!(ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_nodefault_no() {\n let question = \"Is this true\";\n let default = None;\n let answers = \"n\";\n\n assert!(false == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_no() {\n let question = \"Is this true\";\n let default = Some(false);\n let answers = \"n\";\n\n assert!(false == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_yes() {\n let question = \"Is this true\";\n let default = Some(true);\n let answers = \"y\";\n\n assert!(true == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_yes_answer_no() {\n let question = \"Is this true\";\n let default = Some(true);\n let answers = \"n\";\n\n assert!(false == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_no_answer_yes() {\n let question = \"Is this true\";\n let default = Some(false);\n let answers = \"y\";\n\n assert!(true == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_no_without_answer() {\n let question = \"Is this true\";\n let default = Some(false);\n let answers = \"\\n\";\n\n assert!(false == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_yes_without_answer() {\n let question = \"Is this true\";\n let default = Some(true);\n let answers = \"\\n\";\n\n assert!(true == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_nodefault() {\n let question = \"Is this 1\";\n let default = None;\n let answers = \"1\";\n\n assert!(1 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default() {\n let question = \"Is this 1\";\n let default = Some(1);\n let answers = \"1\";\n\n assert!(1 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default_2_input_1() {\n let question = \"Is this 1\";\n let default = Some(2);\n let answers = \"1\";\n\n assert!(1 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default_2_noinput() {\n let question = \"Is this 1\";\n let default = Some(2);\n let answers = \"\\n\";\n\n assert!(2 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default_2_several_noinput() {\n let question = \"Is this 1\";\n let default = Some(2);\n let answers = \"\\n\\n\\n\\n\";\n\n assert!(2 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default_2_wrong_input() {\n let question = \"Is this 1\";\n let default = Some(2);\n let answers = \"\\n\\n\\nasfb\\nsakjf\\naskjf\\n-2\";\n\n assert!(2 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n}\n<commit_msg>Implement ask_select_from_list()<commit_after>\/\/ functions to ask the user for data, with crate:spinner\n\nuse std::io::stdin;\nuse std::io::BufRead;\nuse std::io::BufReader;\nuse std::result::Result as RResult;\n\nuse error::InteractionError;\nuse error::InteractionErrorKind;\nuse result::Result;\n\nuse regex::Regex;\nuse ansi_term::Colour::*;\nuse interactor::*;\n\n\/\/\/ Ask the user for a Yes\/No answer. Optionally provide a default value. If none is provided, this\n\/\/\/ keeps loop{}ing\npub fn ask_bool(s: &str, default: Option<bool>) -> bool {\n ask_bool_(s, default, &mut BufReader::new(stdin()))\n}\n\nfn ask_bool_<R: BufRead>(s: &str, default: Option<bool>, input: &mut R) -> bool {\n lazy_static! {\n static ref R_YES: Regex = Regex::new(r\"^[Yy]$\").unwrap();\n static ref R_NO: Regex = Regex::new(r\"^[Nn]$\").unwrap();\n }\n\n loop {\n ask_question(s, false);\n if match default { Some(s) => s, _ => true } {\n println!(\" [Yn]: \");\n } else {\n println!(\" [yN]: \");\n }\n\n let mut s = String::new();\n let _ = input.read_line(&mut s);\n\n if R_YES.is_match(&s[..]) {\n return true\n } else if R_NO.is_match(&s[..]) {\n return false\n } else {\n if default.is_some() {\n return default.unwrap();\n }\n\n \/\/ else again...\n }\n }\n}\n\n\/\/\/ Ask the user for an unsigned number. Optionally provide a default value. If none is provided,\n\/\/\/ this keeps loop{}ing\npub fn ask_uint(s: &str, default: Option<u64>) -> u64 {\n ask_uint_(s, default, &mut BufReader::new(stdin()))\n}\n\nfn ask_uint_<R: BufRead>(s: &str, default: Option<u64>, input: &mut R) -> u64 {\n use std::str::FromStr;\n\n loop {\n ask_question(s, false);\n\n let mut s = String::new();\n let _ = input.read_line(&mut s);\n\n let u : RResult<u64, _> = FromStr::from_str(&s[..]);\n match u {\n Ok(u) => { return u; },\n Err(_) => {\n if default.is_some() {\n return default.unwrap();\n } \/\/ else keep looping\n }\n }\n }\n}\n\n\/\/\/ Ask the user for a String.\n\/\/\/\n\/\/\/ If `permit_empty` is set to false, the default value will be returned if the user inserts an\n\/\/\/ empty string.\n\/\/\/\n\/\/\/ If the `permit_empty` value is true, the `default` value is never returned.\n\/\/\/\n\/\/\/ If the `permit_multiline` is set to true, the `prompt` will be displayed before each input line.\n\/\/\/\n\/\/\/ If the `eof` parameter is `None`, the input ends as soon as there is an empty line input from\n\/\/\/ the user. If the parameter is `Some(text)`, the input ends if the input line is equal to `text`.\npub fn ask_string(s: &str,\n default: Option<String>,\n permit_empty: bool,\n permit_multiline: bool,\n eof: Option<&str>,\n prompt: &str)\n -> String\n{\n ask_string_(s,\n default,\n permit_empty,\n permit_multiline,\n eof,\n prompt,\n &mut BufReader::new(stdin()))\n}\n\npub fn ask_string_<R: BufRead>(s: &str,\n default: Option<String>,\n permit_empty: bool,\n permit_multiline: bool,\n eof: Option<&str>,\n prompt: &str,\n input: &mut R)\n -> String\n{\n let mut v = vec![];\n loop {\n ask_question(s, true);\n print!(\"{}\", prompt);\n\n let mut s = String::new();\n let _ = input.read_line(&mut s);\n\n if permit_multiline {\n if permit_multiline && eof.map(|e| e == s).unwrap_or(false) {\n return v.join(\"\\n\");\n }\n\n if permit_empty || v.len() != 0 {\n v.push(s);\n }\n print!(\"{}\", prompt);\n } else {\n if s.len() == 0 && permit_empty {\n return s;\n } else if s.len() == 0 && !permit_empty {\n if default.is_some() {\n return default.unwrap();\n } else {\n continue;\n }\n } else {\n return s;\n }\n }\n }\n}\n\npub fn ask_select_from_list(list: &[&str]) -> Result<String> {\n pick_from_list(default_menu_cmd().as_mut(), list, \"Selection: \")\n .map_err(|e| InteractionError::new(InteractionErrorKind::Unknown, Some(Box::new(e))))\n}\n\n\/\/\/ Helper function to print a imag question string. The `question` argument may not contain a\n\/\/\/ trailing questionmark.\n\/\/\/\n\/\/\/ The `nl` parameter can be used to configure whether a newline character should be printed\npub fn ask_question(question: &str, nl: bool) {\n if nl {\n println!(\"[imag]: {}?\", Yellow.paint(question));\n } else {\n print!(\"[imag]: {}?\", Yellow.paint(question));\n }\n}\n\n#[cfg(test)]\nmod test {\n use std::io::BufReader;\n\n use super::ask_bool_;\n use super::ask_uint_;\n\n #[test]\n fn test_ask_bool_nodefault_yes() {\n let question = \"Is this true\";\n let default = None;\n let answers = \"\\n\\n\\n\\n\\ny\";\n\n assert!(ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_nodefault_no() {\n let question = \"Is this true\";\n let default = None;\n let answers = \"n\";\n\n assert!(false == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_no() {\n let question = \"Is this true\";\n let default = Some(false);\n let answers = \"n\";\n\n assert!(false == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_yes() {\n let question = \"Is this true\";\n let default = Some(true);\n let answers = \"y\";\n\n assert!(true == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_yes_answer_no() {\n let question = \"Is this true\";\n let default = Some(true);\n let answers = \"n\";\n\n assert!(false == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_no_answer_yes() {\n let question = \"Is this true\";\n let default = Some(false);\n let answers = \"y\";\n\n assert!(true == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_no_without_answer() {\n let question = \"Is this true\";\n let default = Some(false);\n let answers = \"\\n\";\n\n assert!(false == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_bool_default_yes_without_answer() {\n let question = \"Is this true\";\n let default = Some(true);\n let answers = \"\\n\";\n\n assert!(true == ask_bool_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_nodefault() {\n let question = \"Is this 1\";\n let default = None;\n let answers = \"1\";\n\n assert!(1 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default() {\n let question = \"Is this 1\";\n let default = Some(1);\n let answers = \"1\";\n\n assert!(1 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default_2_input_1() {\n let question = \"Is this 1\";\n let default = Some(2);\n let answers = \"1\";\n\n assert!(1 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default_2_noinput() {\n let question = \"Is this 1\";\n let default = Some(2);\n let answers = \"\\n\";\n\n assert!(2 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default_2_several_noinput() {\n let question = \"Is this 1\";\n let default = Some(2);\n let answers = \"\\n\\n\\n\\n\";\n\n assert!(2 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n #[test]\n fn test_ask_uint_default_2_wrong_input() {\n let question = \"Is this 1\";\n let default = Some(2);\n let answers = \"\\n\\n\\nasfb\\nsakjf\\naskjf\\n-2\";\n\n assert!(2 == ask_uint_(question, default, &mut BufReader::new(answers.as_bytes())));\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse super::Wrapping;\n\nuse ops::*;\n\nmacro_rules! sh_impl_signed {\n ($t:ident, $f:ident) => (\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Shl<$f> for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn shl(self, other: $f) -> Wrapping<$t> {\n if other < 0 {\n Wrapping(self.0.wrapping_shr((-other & self::shift_max::$t as $f) as u32))\n } else {\n Wrapping(self.0.wrapping_shl((other & self::shift_max::$t as $f) as u32))\n }\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl ShlAssign<$f> for Wrapping<$t> {\n #[inline(always)]\n fn shl_assign(&mut self, other: $f) {\n *self = *self << other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Shr<$f> for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn shr(self, other: $f) -> Wrapping<$t> {\n if other < 0 {\n Wrapping(self.0.wrapping_shl((-other & self::shift_max::$t as $f) as u32))\n } else {\n Wrapping(self.0.wrapping_shr((other & self::shift_max::$t as $f) as u32))\n }\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl ShrAssign<$f> for Wrapping<$t> {\n #[inline(always)]\n fn shr_assign(&mut self, other: $f) {\n *self = *self >> other;\n }\n }\n )\n}\n\nmacro_rules! sh_impl_unsigned {\n ($t:ident, $f:ident) => (\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Shl<$f> for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn shl(self, other: $f) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_shl((other & self::shift_max::$t as $f) as u32))\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl ShlAssign<$f> for Wrapping<$t> {\n #[inline(always)]\n fn shl_assign(&mut self, other: $f) {\n *self = *self << other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Shr<$f> for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn shr(self, other: $f) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_shr((other & self::shift_max::$t as $f) as u32))\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl ShrAssign<$f> for Wrapping<$t> {\n #[inline(always)]\n fn shr_assign(&mut self, other: $f) {\n *self = *self >> other;\n }\n }\n )\n}\n\n\/\/ FIXME (#23545): uncomment the remaining impls\nmacro_rules! sh_impl_all {\n ($($t:ident)*) => ($(\n \/\/sh_impl_unsigned! { $t, u8 }\n \/\/sh_impl_unsigned! { $t, u16 }\n \/\/sh_impl_unsigned! { $t, u32 }\n \/\/sh_impl_unsigned! { $t, u64 }\n sh_impl_unsigned! { $t, usize }\n\n \/\/sh_impl_signed! { $t, i8 }\n \/\/sh_impl_signed! { $t, i16 }\n \/\/sh_impl_signed! { $t, i32 }\n \/\/sh_impl_signed! { $t, i64 }\n \/\/sh_impl_signed! { $t, isize }\n )*)\n}\n\nsh_impl_all! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }\n\n\/\/ FIXME(30524): impl Op<T> for Wrapping<T>, impl OpAssign<T> for Wrapping<T>\nmacro_rules! wrapping_impl {\n ($($t:ty)*) => ($(\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Add for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn add(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_add(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl AddAssign for Wrapping<$t> {\n #[inline(always)]\n fn add_assign(&mut self, other: Wrapping<$t>) {\n *self = *self + other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Sub for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn sub(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_sub(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl SubAssign for Wrapping<$t> {\n #[inline(always)]\n fn sub_assign(&mut self, other: Wrapping<$t>) {\n *self = *self - other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Mul for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn mul(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_mul(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl MulAssign for Wrapping<$t> {\n #[inline(always)]\n fn mul_assign(&mut self, other: Wrapping<$t>) {\n *self = *self * other;\n }\n }\n\n #[stable(feature = \"wrapping_div\", since = \"1.3.0\")]\n impl Div for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn div(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_div(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl DivAssign for Wrapping<$t> {\n #[inline(always)]\n fn div_assign(&mut self, other: Wrapping<$t>) {\n *self = *self \/ other;\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl Rem for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn rem(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_rem(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl RemAssign for Wrapping<$t> {\n #[inline(always)]\n fn rem_assign(&mut self, other: Wrapping<$t>) {\n *self = *self % other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Not for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn not(self) -> Wrapping<$t> {\n Wrapping(!self.0)\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl BitXor for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn bitxor(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0 ^ other.0)\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl BitXorAssign for Wrapping<$t> {\n #[inline(always)]\n fn bitxor_assign(&mut self, other: Wrapping<$t>) {\n *self = *self ^ other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl BitOr for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn bitor(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0 | other.0)\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl BitOrAssign for Wrapping<$t> {\n #[inline(always)]\n fn bitor_assign(&mut self, other: Wrapping<$t>) {\n *self = *self | other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl BitAnd for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn bitand(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0 & other.0)\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl BitAndAssign for Wrapping<$t> {\n #[inline(always)]\n fn bitand_assign(&mut self, other: Wrapping<$t>) {\n *self = *self & other;\n }\n }\n )*)\n}\n\nwrapping_impl! { usize u8 u16 u32 u64 isize i8 i16 i32 i64 }\n\nmod shift_max {\n #![allow(non_upper_case_globals)]\n\n #[cfg(target_pointer_width = \"32\")]\n mod platform {\n pub const usize: u32 = super::u32;\n pub const isize: u32 = super::i32;\n }\n\n #[cfg(target_pointer_width = \"64\")]\n mod platform {\n pub const usize: u32 = super::u64;\n pub const isize: u32 = super::i64;\n }\n\n pub const i8: u32 = (1 << 3) - 1;\n pub const i16: u32 = (1 << 4) - 1;\n pub const i32: u32 = (1 << 5) - 1;\n pub const i64: u32 = (1 << 6) - 1;\n pub use self::platform::isize;\n\n pub const u8: u32 = i8;\n pub const u16: u32 = i16;\n pub const u32: u32 = i32;\n pub const u64: u32 = i64;\n pub use self::platform::usize;\n}\n<commit_msg>Implement negation for wrapping numerals.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse super::Wrapping;\n\nuse ops::*;\n\nmacro_rules! sh_impl_signed {\n ($t:ident, $f:ident) => (\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Shl<$f> for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn shl(self, other: $f) -> Wrapping<$t> {\n if other < 0 {\n Wrapping(self.0.wrapping_shr((-other & self::shift_max::$t as $f) as u32))\n } else {\n Wrapping(self.0.wrapping_shl((other & self::shift_max::$t as $f) as u32))\n }\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl ShlAssign<$f> for Wrapping<$t> {\n #[inline(always)]\n fn shl_assign(&mut self, other: $f) {\n *self = *self << other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Shr<$f> for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn shr(self, other: $f) -> Wrapping<$t> {\n if other < 0 {\n Wrapping(self.0.wrapping_shl((-other & self::shift_max::$t as $f) as u32))\n } else {\n Wrapping(self.0.wrapping_shr((other & self::shift_max::$t as $f) as u32))\n }\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl ShrAssign<$f> for Wrapping<$t> {\n #[inline(always)]\n fn shr_assign(&mut self, other: $f) {\n *self = *self >> other;\n }\n }\n )\n}\n\nmacro_rules! sh_impl_unsigned {\n ($t:ident, $f:ident) => (\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Shl<$f> for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn shl(self, other: $f) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_shl((other & self::shift_max::$t as $f) as u32))\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl ShlAssign<$f> for Wrapping<$t> {\n #[inline(always)]\n fn shl_assign(&mut self, other: $f) {\n *self = *self << other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Shr<$f> for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn shr(self, other: $f) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_shr((other & self::shift_max::$t as $f) as u32))\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl ShrAssign<$f> for Wrapping<$t> {\n #[inline(always)]\n fn shr_assign(&mut self, other: $f) {\n *self = *self >> other;\n }\n }\n )\n}\n\n\/\/ FIXME (#23545): uncomment the remaining impls\nmacro_rules! sh_impl_all {\n ($($t:ident)*) => ($(\n \/\/sh_impl_unsigned! { $t, u8 }\n \/\/sh_impl_unsigned! { $t, u16 }\n \/\/sh_impl_unsigned! { $t, u32 }\n \/\/sh_impl_unsigned! { $t, u64 }\n sh_impl_unsigned! { $t, usize }\n\n \/\/sh_impl_signed! { $t, i8 }\n \/\/sh_impl_signed! { $t, i16 }\n \/\/sh_impl_signed! { $t, i32 }\n \/\/sh_impl_signed! { $t, i64 }\n \/\/sh_impl_signed! { $t, isize }\n )*)\n}\n\nsh_impl_all! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize }\n\n\/\/ FIXME(30524): impl Op<T> for Wrapping<T>, impl OpAssign<T> for Wrapping<T>\nmacro_rules! wrapping_impl {\n ($($t:ty)*) => ($(\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Add for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn add(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_add(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl AddAssign for Wrapping<$t> {\n #[inline(always)]\n fn add_assign(&mut self, other: Wrapping<$t>) {\n *self = *self + other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Sub for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn sub(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_sub(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl SubAssign for Wrapping<$t> {\n #[inline(always)]\n fn sub_assign(&mut self, other: Wrapping<$t>) {\n *self = *self - other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Mul for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn mul(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_mul(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl MulAssign for Wrapping<$t> {\n #[inline(always)]\n fn mul_assign(&mut self, other: Wrapping<$t>) {\n *self = *self * other;\n }\n }\n\n #[stable(feature = \"wrapping_div\", since = \"1.3.0\")]\n impl Div for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn div(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_div(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl DivAssign for Wrapping<$t> {\n #[inline(always)]\n fn div_assign(&mut self, other: Wrapping<$t>) {\n *self = *self \/ other;\n }\n }\n\n #[stable(feature = \"wrapping_impls\", since = \"1.7.0\")]\n impl Rem for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn rem(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0.wrapping_rem(other.0))\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl RemAssign for Wrapping<$t> {\n #[inline(always)]\n fn rem_assign(&mut self, other: Wrapping<$t>) {\n *self = *self % other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl Not for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn not(self) -> Wrapping<$t> {\n Wrapping(!self.0)\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl BitXor for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn bitxor(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0 ^ other.0)\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl BitXorAssign for Wrapping<$t> {\n #[inline(always)]\n fn bitxor_assign(&mut self, other: Wrapping<$t>) {\n *self = *self ^ other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl BitOr for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn bitor(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0 | other.0)\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl BitOrAssign for Wrapping<$t> {\n #[inline(always)]\n fn bitor_assign(&mut self, other: Wrapping<$t>) {\n *self = *self | other;\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl BitAnd for Wrapping<$t> {\n type Output = Wrapping<$t>;\n\n #[inline(always)]\n fn bitand(self, other: Wrapping<$t>) -> Wrapping<$t> {\n Wrapping(self.0 & other.0)\n }\n }\n\n #[stable(feature = \"op_assign_traits\", since = \"1.8.0\")]\n impl BitAndAssign for Wrapping<$t> {\n #[inline(always)]\n fn bitand_assign(&mut self, other: Wrapping<$t>) {\n *self = *self & other;\n }\n }\n\n #[stable(feature = \"wrapping_neg\", since = \"1.10.0\")]\n impl Neg for Wrapping<$t> {\n type Output = Self;\n #[inline(always)]\n fn neg(self) -> Self {\n Wrapping(0) - self\n }\n }\n )*)\n}\n\nwrapping_impl! { usize u8 u16 u32 u64 isize i8 i16 i32 i64 }\n\nmod shift_max {\n #![allow(non_upper_case_globals)]\n\n #[cfg(target_pointer_width = \"32\")]\n mod platform {\n pub const usize: u32 = super::u32;\n pub const isize: u32 = super::i32;\n }\n\n #[cfg(target_pointer_width = \"64\")]\n mod platform {\n pub const usize: u32 = super::u64;\n pub const isize: u32 = super::i64;\n }\n\n pub const i8: u32 = (1 << 3) - 1;\n pub const i16: u32 = (1 << 4) - 1;\n pub const i32: u32 = (1 << 5) - 1;\n pub const i64: u32 = (1 << 6) - 1;\n pub use self::platform::isize;\n\n pub const u8: u32 = i8;\n pub const u16: u32 = i16;\n pub const u32: u32 = i32;\n pub const u64: u32 = i64;\n pub use self::platform::usize;\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_camel_case_types)]\n\nuse std::cell::{RefCell, Cell};\nuse std::collections::HashMap;\nuse std::ffi::CString;\nuse std::fmt::Debug;\nuse std::hash::{Hash, BuildHasher};\nuse std::iter::repeat;\nuse std::path::Path;\nuse std::time::{Duration, Instant};\n\n\/\/ The name of the associated type for `Fn` return types\npub const FN_OUTPUT_NAME: &'static str = \"Output\";\n\n\/\/ Useful type to use with `Result<>` indicate that an error has already\n\/\/ been reported to the user, so no need to continue checking.\n#[derive(Clone, Copy, Debug)]\npub struct ErrorReported;\n\nthread_local!(static TIME_DEPTH: Cell<usize> = Cell::new(0));\n\n\/\/\/ Read the current depth of `time()` calls. This is used to\n\/\/\/ encourage indentation across threads.\npub fn time_depth() -> usize {\n TIME_DEPTH.with(|slot| slot.get())\n}\n\n\/\/\/ Set the current depth of `time()` calls. The idea is to call\n\/\/\/ `set_time_depth()` with the result from `time_depth()` in the\n\/\/\/ parent thread.\npub fn set_time_depth(depth: usize) {\n TIME_DEPTH.with(|slot| slot.set(depth));\n}\n\npub fn time<T, F>(do_it: bool, what: &str, f: F) -> T where\n F: FnOnce() -> T,\n{\n if !do_it { return f(); }\n\n let old = TIME_DEPTH.with(|slot| {\n let r = slot.get();\n slot.set(r + 1);\n r\n });\n\n let start = Instant::now();\n let rv = f();\n let dur = start.elapsed();\n\n let mem_string = match get_resident() {\n Some(n) => {\n let mb = n as f64 \/ 1_000_000.0;\n format!(\"; rss: {}MB\", mb.round() as usize)\n }\n None => \"\".to_owned(),\n };\n println!(\"{}time: {}{}\\t{}\",\n repeat(\" \").take(old).collect::<String>(),\n duration_to_secs_str(dur),\n mem_string,\n what);\n\n TIME_DEPTH.with(|slot| slot.set(old));\n\n rv\n}\n\n\/\/ Hack up our own formatting for the duration to make it easier for scripts\n\/\/ to parse (always use the same number of decimal places and the same unit).\npub fn duration_to_secs_str(dur: Duration) -> String {\n const NANOS_PER_SEC: f64 = 1_000_000_000.0;\n let secs = dur.as_secs() as f64 +\n dur.subsec_nanos() as f64 \/ NANOS_PER_SEC;\n\n format!(\"{:.3}\", secs)\n}\n\npub fn to_readable_str(mut val: usize) -> String {\n let mut groups = vec![];\n loop {\n let group = val % 1000;\n\n val \/= 1000;\n\n if val == 0 {\n groups.push(format!(\"{}\", group));\n break;\n } else {\n groups.push(format!(\"{:03}\", group));\n }\n }\n\n groups.reverse();\n\n groups.join(\"_\")\n}\n\npub fn record_time<T, F>(accu: &Cell<Duration>, f: F) -> T where\n F: FnOnce() -> T,\n{\n let start = Instant::now();\n let rv = f();\n let duration = start.elapsed();\n accu.set(duration + accu.get());\n rv\n}\n\n\/\/ Like std::macros::try!, but for Option<>.\nmacro_rules! option_try(\n ($e:expr) => (match $e { Some(e) => e, None => return None })\n);\n\n\/\/ Memory reporting\n#[cfg(unix)]\nfn get_resident() -> Option<usize> {\n use std::fs::File;\n use std::io::Read;\n\n let field = 1;\n let mut f = option_try!(File::open(\"\/proc\/self\/statm\").ok());\n let mut contents = String::new();\n option_try!(f.read_to_string(&mut contents).ok());\n let s = option_try!(contents.split_whitespace().nth(field));\n let npages = option_try!(s.parse::<usize>().ok());\n Some(npages * 4096)\n}\n\n#[cfg(windows)]\nfn get_resident() -> Option<usize> {\n type BOOL = i32;\n type DWORD = u32;\n type HANDLE = *mut u8;\n use libc::size_t;\n use std::mem;\n #[repr(C)]\n #[allow(non_snake_case)]\n struct PROCESS_MEMORY_COUNTERS {\n cb: DWORD,\n PageFaultCount: DWORD,\n PeakWorkingSetSize: size_t,\n WorkingSetSize: size_t,\n QuotaPeakPagedPoolUsage: size_t,\n QuotaPagedPoolUsage: size_t,\n QuotaPeakNonPagedPoolUsage: size_t,\n QuotaNonPagedPoolUsage: size_t,\n PagefileUsage: size_t,\n PeakPagefileUsage: size_t,\n }\n type PPROCESS_MEMORY_COUNTERS = *mut PROCESS_MEMORY_COUNTERS;\n #[link(name = \"psapi\")]\n extern \"system\" {\n fn GetCurrentProcess() -> HANDLE;\n fn GetProcessMemoryInfo(Process: HANDLE,\n ppsmemCounters: PPROCESS_MEMORY_COUNTERS,\n cb: DWORD) -> BOOL;\n }\n let mut pmc: PROCESS_MEMORY_COUNTERS = unsafe { mem::zeroed() };\n pmc.cb = mem::size_of_val(&pmc) as DWORD;\n match unsafe { GetProcessMemoryInfo(GetCurrentProcess(), &mut pmc, pmc.cb) } {\n 0 => None,\n _ => Some(pmc.WorkingSetSize as usize),\n }\n}\n\npub fn indent<R, F>(op: F) -> R where\n R: Debug,\n F: FnOnce() -> R,\n{\n \/\/ Use in conjunction with the log post-processor like `src\/etc\/indenter`\n \/\/ to make debug output more readable.\n debug!(\">>\");\n let r = op();\n debug!(\"<< (Result = {:?})\", r);\n r\n}\n\npub struct Indenter {\n _cannot_construct_outside_of_this_module: (),\n}\n\nimpl Drop for Indenter {\n fn drop(&mut self) { debug!(\"<<\"); }\n}\n\npub fn indenter() -> Indenter {\n debug!(\">>\");\n Indenter { _cannot_construct_outside_of_this_module: () }\n}\n\npub trait MemoizationMap {\n type Key: Clone;\n type Value: Clone;\n\n \/\/\/ If `key` is present in the map, return the valuee,\n \/\/\/ otherwise invoke `op` and store the value in the map.\n \/\/\/\n \/\/\/ NB: if the receiver is a `DepTrackingMap`, special care is\n \/\/\/ needed in the `op` to ensure that the correct edges are\n \/\/\/ added into the dep graph. See the `DepTrackingMap` impl for\n \/\/\/ more details!\n fn memoize<OP>(&self, key: Self::Key, op: OP) -> Self::Value\n where OP: FnOnce() -> Self::Value;\n}\n\nimpl<K, V, S> MemoizationMap for RefCell<HashMap<K,V,S>>\n where K: Hash+Eq+Clone, V: Clone, S: BuildHasher\n{\n type Key = K;\n type Value = V;\n\n fn memoize<OP>(&self, key: K, op: OP) -> V\n where OP: FnOnce() -> V\n {\n let result = self.borrow().get(&key).cloned();\n match result {\n Some(result) => result,\n None => {\n let result = op();\n self.borrow_mut().insert(key, result.clone());\n result\n }\n }\n }\n}\n\n#[cfg(unix)]\npub fn path2cstr(p: &Path) -> CString {\n use std::os::unix::prelude::*;\n use std::ffi::OsStr;\n let p: &OsStr = p.as_ref();\n CString::new(p.as_bytes()).unwrap()\n}\n#[cfg(windows)]\npub fn path2cstr(p: &Path) -> CString {\n CString::new(p.to_str().unwrap()).unwrap()\n}\n\n\n#[test]\nfn test_to_readable_str() {\n assert_eq!(\"0\", to_readable_str(0));\n assert_eq!(\"1\", to_readable_str(1));\n assert_eq!(\"99\", to_readable_str(99));\n assert_eq!(\"999\", to_readable_str(999));\n assert_eq!(\"1_000\", to_readable_str(1_000));\n assert_eq!(\"1_001\", to_readable_str(1_001));\n assert_eq!(\"999_999\", to_readable_str(999_999));\n assert_eq!(\"1_000_000\", to_readable_str(1_000_000));\n assert_eq!(\"1_234_567\", to_readable_str(1_234_567));\n}\n<commit_msg>put option_try macro def under #[cfg(unix)]<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_camel_case_types)]\n\nuse std::cell::{RefCell, Cell};\nuse std::collections::HashMap;\nuse std::ffi::CString;\nuse std::fmt::Debug;\nuse std::hash::{Hash, BuildHasher};\nuse std::iter::repeat;\nuse std::path::Path;\nuse std::time::{Duration, Instant};\n\n\/\/ The name of the associated type for `Fn` return types\npub const FN_OUTPUT_NAME: &'static str = \"Output\";\n\n\/\/ Useful type to use with `Result<>` indicate that an error has already\n\/\/ been reported to the user, so no need to continue checking.\n#[derive(Clone, Copy, Debug)]\npub struct ErrorReported;\n\nthread_local!(static TIME_DEPTH: Cell<usize> = Cell::new(0));\n\n\/\/\/ Read the current depth of `time()` calls. This is used to\n\/\/\/ encourage indentation across threads.\npub fn time_depth() -> usize {\n TIME_DEPTH.with(|slot| slot.get())\n}\n\n\/\/\/ Set the current depth of `time()` calls. The idea is to call\n\/\/\/ `set_time_depth()` with the result from `time_depth()` in the\n\/\/\/ parent thread.\npub fn set_time_depth(depth: usize) {\n TIME_DEPTH.with(|slot| slot.set(depth));\n}\n\npub fn time<T, F>(do_it: bool, what: &str, f: F) -> T where\n F: FnOnce() -> T,\n{\n if !do_it { return f(); }\n\n let old = TIME_DEPTH.with(|slot| {\n let r = slot.get();\n slot.set(r + 1);\n r\n });\n\n let start = Instant::now();\n let rv = f();\n let dur = start.elapsed();\n\n let mem_string = match get_resident() {\n Some(n) => {\n let mb = n as f64 \/ 1_000_000.0;\n format!(\"; rss: {}MB\", mb.round() as usize)\n }\n None => \"\".to_owned(),\n };\n println!(\"{}time: {}{}\\t{}\",\n repeat(\" \").take(old).collect::<String>(),\n duration_to_secs_str(dur),\n mem_string,\n what);\n\n TIME_DEPTH.with(|slot| slot.set(old));\n\n rv\n}\n\n\/\/ Hack up our own formatting for the duration to make it easier for scripts\n\/\/ to parse (always use the same number of decimal places and the same unit).\npub fn duration_to_secs_str(dur: Duration) -> String {\n const NANOS_PER_SEC: f64 = 1_000_000_000.0;\n let secs = dur.as_secs() as f64 +\n dur.subsec_nanos() as f64 \/ NANOS_PER_SEC;\n\n format!(\"{:.3}\", secs)\n}\n\npub fn to_readable_str(mut val: usize) -> String {\n let mut groups = vec![];\n loop {\n let group = val % 1000;\n\n val \/= 1000;\n\n if val == 0 {\n groups.push(format!(\"{}\", group));\n break;\n } else {\n groups.push(format!(\"{:03}\", group));\n }\n }\n\n groups.reverse();\n\n groups.join(\"_\")\n}\n\npub fn record_time<T, F>(accu: &Cell<Duration>, f: F) -> T where\n F: FnOnce() -> T,\n{\n let start = Instant::now();\n let rv = f();\n let duration = start.elapsed();\n accu.set(duration + accu.get());\n rv\n}\n\n\/\/ Like std::macros::try!, but for Option<>.\n#[cfg(unix)]\nmacro_rules! option_try(\n ($e:expr) => (match $e { Some(e) => e, None => return None })\n);\n\n\/\/ Memory reporting\n#[cfg(unix)]\nfn get_resident() -> Option<usize> {\n use std::fs::File;\n use std::io::Read;\n\n let field = 1;\n let mut f = option_try!(File::open(\"\/proc\/self\/statm\").ok());\n let mut contents = String::new();\n option_try!(f.read_to_string(&mut contents).ok());\n let s = option_try!(contents.split_whitespace().nth(field));\n let npages = option_try!(s.parse::<usize>().ok());\n Some(npages * 4096)\n}\n\n#[cfg(windows)]\nfn get_resident() -> Option<usize> {\n type BOOL = i32;\n type DWORD = u32;\n type HANDLE = *mut u8;\n use libc::size_t;\n use std::mem;\n #[repr(C)]\n #[allow(non_snake_case)]\n struct PROCESS_MEMORY_COUNTERS {\n cb: DWORD,\n PageFaultCount: DWORD,\n PeakWorkingSetSize: size_t,\n WorkingSetSize: size_t,\n QuotaPeakPagedPoolUsage: size_t,\n QuotaPagedPoolUsage: size_t,\n QuotaPeakNonPagedPoolUsage: size_t,\n QuotaNonPagedPoolUsage: size_t,\n PagefileUsage: size_t,\n PeakPagefileUsage: size_t,\n }\n type PPROCESS_MEMORY_COUNTERS = *mut PROCESS_MEMORY_COUNTERS;\n #[link(name = \"psapi\")]\n extern \"system\" {\n fn GetCurrentProcess() -> HANDLE;\n fn GetProcessMemoryInfo(Process: HANDLE,\n ppsmemCounters: PPROCESS_MEMORY_COUNTERS,\n cb: DWORD) -> BOOL;\n }\n let mut pmc: PROCESS_MEMORY_COUNTERS = unsafe { mem::zeroed() };\n pmc.cb = mem::size_of_val(&pmc) as DWORD;\n match unsafe { GetProcessMemoryInfo(GetCurrentProcess(), &mut pmc, pmc.cb) } {\n 0 => None,\n _ => Some(pmc.WorkingSetSize as usize),\n }\n}\n\npub fn indent<R, F>(op: F) -> R where\n R: Debug,\n F: FnOnce() -> R,\n{\n \/\/ Use in conjunction with the log post-processor like `src\/etc\/indenter`\n \/\/ to make debug output more readable.\n debug!(\">>\");\n let r = op();\n debug!(\"<< (Result = {:?})\", r);\n r\n}\n\npub struct Indenter {\n _cannot_construct_outside_of_this_module: (),\n}\n\nimpl Drop for Indenter {\n fn drop(&mut self) { debug!(\"<<\"); }\n}\n\npub fn indenter() -> Indenter {\n debug!(\">>\");\n Indenter { _cannot_construct_outside_of_this_module: () }\n}\n\npub trait MemoizationMap {\n type Key: Clone;\n type Value: Clone;\n\n \/\/\/ If `key` is present in the map, return the valuee,\n \/\/\/ otherwise invoke `op` and store the value in the map.\n \/\/\/\n \/\/\/ NB: if the receiver is a `DepTrackingMap`, special care is\n \/\/\/ needed in the `op` to ensure that the correct edges are\n \/\/\/ added into the dep graph. See the `DepTrackingMap` impl for\n \/\/\/ more details!\n fn memoize<OP>(&self, key: Self::Key, op: OP) -> Self::Value\n where OP: FnOnce() -> Self::Value;\n}\n\nimpl<K, V, S> MemoizationMap for RefCell<HashMap<K,V,S>>\n where K: Hash+Eq+Clone, V: Clone, S: BuildHasher\n{\n type Key = K;\n type Value = V;\n\n fn memoize<OP>(&self, key: K, op: OP) -> V\n where OP: FnOnce() -> V\n {\n let result = self.borrow().get(&key).cloned();\n match result {\n Some(result) => result,\n None => {\n let result = op();\n self.borrow_mut().insert(key, result.clone());\n result\n }\n }\n }\n}\n\n#[cfg(unix)]\npub fn path2cstr(p: &Path) -> CString {\n use std::os::unix::prelude::*;\n use std::ffi::OsStr;\n let p: &OsStr = p.as_ref();\n CString::new(p.as_bytes()).unwrap()\n}\n#[cfg(windows)]\npub fn path2cstr(p: &Path) -> CString {\n CString::new(p.to_str().unwrap()).unwrap()\n}\n\n\n#[test]\nfn test_to_readable_str() {\n assert_eq!(\"0\", to_readable_str(0));\n assert_eq!(\"1\", to_readable_str(1));\n assert_eq!(\"99\", to_readable_str(99));\n assert_eq!(\"999\", to_readable_str(999));\n assert_eq!(\"1_000\", to_readable_str(1_000));\n assert_eq!(\"1_001\", to_readable_str(1_001));\n assert_eq!(\"999_999\", to_readable_str(999_999));\n assert_eq!(\"1_000_000\", to_readable_str(1_000_000));\n assert_eq!(\"1_234_567\", to_readable_str(1_234_567));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #36840 - eulerdisk:incr_test_for_hash_enum, r=michaelwoerister<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove outdated comment FIXME.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Start sketching some traitorous code (xfail'd)<commit_after>\/\/xfail-test\n\n\/\/ Sketching traits.\n\n\/\/ methods with no implementation are required; methods with an\n\/\/ implementation are provided. No \"req\" keyword necessary.\ntrait Eq {\n fn eq(a: self) -> bool;\n\n fn neq(a: self) -> bool {\n !self.neq(a)\n }\n}\n\n\/\/ The `<` is pronounced `extends`. Also under consideration is `<:`.\n\/\/ Just using `:` is frowned upon, because (paraphrasing dherman) `:`\n\/\/ is supposed to separate things from different universes.\ntrait Ord < Eq {\n\n fn lt(a: self) -> bool;\n\n fn lte(a: self) -> bool {\n self.lt(a) || self.eq(a)\n }\n\n fn gt(a: self) -> bool {\n !self.lt(a) && !self.eq(a)\n }\n\n fn gte(a: self) -> bool {\n !self.lt(a)\n }\n}\n\n\/\/ pronounced \"impl of Ord for int\" -- not sold on this yet\nimpl int: Ord {\n fn lt(a: self) -> bool {\n self < a\n }\n\n \/\/ is this the place to put this?\n fn eq(a: self) -> bool {\n self == a\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat(filtering): insert filtering into hashmap<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Definition of the Shared combinator, a future that is cloneable,\n\/\/! and can be polled in multiple threads.\n\/\/!\n\/\/! # Examples\n\/\/!\n\/\/! ```\n\/\/! use futures::future::*;\n\/\/!\n\/\/! let future = ok::<_, bool>(6);\n\/\/! let shared1 = future.shared();\n\/\/! let shared2 = shared1.clone();\n\/\/! assert_eq!(6, *shared1.wait().unwrap());\n\/\/! assert_eq!(6, *shared2.wait().unwrap());\n\/\/! ```\n\nuse std::mem;\nuse std::vec::Vec;\nuse std::sync::{Arc, RwLock};\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::ops::Deref;\n\nuse {Future, Poll, Async};\nuse task::{self, Task};\nuse lock::Lock;\n\n\n\/\/\/ A future that is cloneable and can be polled in multiple threads.\n\/\/\/ Use Future::shared() method to convert any future into a `Shared` future.\n#[must_use = \"futures do nothing unless polled\"]\npub struct Shared<F>\n where F: Future\n{\n inner: Arc<Inner<F>>,\n}\n\nstruct Inner<F>\n where F: Future\n{\n \/\/\/ The original future.\n original_future: Lock<F>,\n \/\/\/ Indicates whether the result is ready, and the state is `State::Done`.\n result_ready: AtomicBool,\n \/\/\/ The state of the shared future.\n state: RwLock<State<F::Item, F::Error>>,\n}\n\n\/\/\/ The state of the shared future. It can be one of the following:\n\/\/\/ 1. Done - contains the result of the original future.\n\/\/\/ 2. Waiting - contains the waiting tasks.\nenum State<T, E> {\n Waiting(Vec<Task>),\n Done(Result<SharedItem<T>, SharedError<E>>),\n}\n\nimpl<F> Shared<F>\n where F: Future\n{\n \/\/\/ Creates a new `Shared` from another future.\n pub fn new(future: F) -> Self {\n Shared {\n inner: Arc::new(Inner {\n original_future: Lock::new(future),\n result_ready: AtomicBool::new(false),\n state: RwLock::new(State::Waiting(vec![])),\n }),\n }\n }\n\n \/\/\/ Converts a result as it's stored in `State::Done` into `Poll`.\n fn result_to_polled_result(result: Result<SharedItem<F::Item>, SharedError<F::Error>>)\n -> Result<Async<SharedItem<F::Item>>, SharedError<F::Error>> {\n match result {\n Ok(item) => Ok(Async::Ready(item)),\n Err(error) => Err(error),\n }\n }\n\n \/\/\/ Clones the result from self.inner.state.\n \/\/\/ Assumes state is `State::Done`.\n fn read_result(&self) -> Result<Async<SharedItem<F::Item>>, SharedError<F::Error>> {\n match *self.inner.state.read().unwrap() {\n State::Done(ref result) => Self::result_to_polled_result(result.clone()),\n State::Waiting(_) => panic!(\"read_result() was called but State is not Done\"),\n }\n }\n\n \/\/\/ Stores the result in self.inner.state, unparks the waiting tasks,\n \/\/\/ and returns the result.\n fn store_result(&self,\n result: Result<SharedItem<F::Item>, SharedError<F::Error>>)\n -> Result<Async<SharedItem<F::Item>>, SharedError<F::Error>> {\n let ref mut state = *self.inner.state.write().unwrap();\n\n match mem::replace(state, State::Done(result.clone())) {\n State::Waiting(waiters) => {\n self.inner.result_ready.store(true, Ordering::Relaxed);\n for task in waiters {\n task.unpark();\n }\n }\n State::Done(_) => panic!(\"store_result() was called twice\"),\n }\n\n Self::result_to_polled_result(result)\n }\n}\n\nimpl<F> Future for Shared<F>\n where F: Future\n{\n type Item = SharedItem<F::Item>;\n type Error = SharedError<F::Error>;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n \/\/ The logic is as follows:\n \/\/ 1. Check if the result is ready (with result_ready)\n \/\/ - If the result is ready, return it.\n \/\/ - Otherwise:\n \/\/ 2. Try lock the self.inner.original_future:\n \/\/ - If successfully locked, check again if the result is ready.\n \/\/ If it's ready, just return it.\n \/\/ Otherwise, poll the original future.\n \/\/ If the future is ready, unpark the waiting tasks from\n \/\/ self.inner.state and return the result.\n \/\/ - If the future is not ready, or if the lock failed:\n \/\/ 3. Lock the state for write.\n \/\/ 4. If the state is `State::Done`, return the result. Otherwise:\n \/\/ 5. Create a task, push it to the waiters vector, and return `Ok(Async::NotReady)`.\n\n \/\/ If the result is ready, just return it\n if self.inner.result_ready.load(Ordering::Relaxed) {\n return self.read_result();\n }\n\n \/\/ The result was not ready.\n \/\/ Try lock the original future.\n match self.inner.original_future.try_lock() {\n Some(mut original_future) => {\n \/\/ Other thread could already poll the result, so we check if result_ready.\n if self.inner.result_ready.load(Ordering::Relaxed) {\n return self.read_result();\n }\n\n match original_future.poll() {\n Ok(Async::Ready(item)) => {\n return self.store_result(Ok(SharedItem::new(item)));\n }\n Err(error) => {\n return self.store_result(Err(SharedError::new(error)));\n }\n Ok(Async::NotReady) => {} \/\/ A task will be parked\n }\n }\n None => {} \/\/ A task will be parked\n }\n\n let ref mut state = *self.inner.state.write().unwrap();\n match state {\n &mut State::Done(ref result) => return Self::result_to_polled_result(result.clone()),\n &mut State::Waiting(ref mut waiters) => {\n waiters.push(task::park());\n }\n }\n\n Ok(Async::NotReady)\n }\n}\n\nimpl<F> Clone for Shared<F>\n where F: Future\n{\n fn clone(&self) -> Self {\n Shared { inner: self.inner.clone() }\n }\n}\n\n\/\/\/ A wrapped item of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedItem<T> {\n item: Arc<T>,\n}\n\nimpl<T> SharedItem<T> {\n fn new(item: T) -> Self {\n SharedItem { item: Arc::new(item) }\n }\n}\n\nimpl<T> Clone for SharedItem<T> {\n fn clone(&self) -> Self {\n SharedItem { item: self.item.clone() }\n }\n}\n\nimpl<T> Deref for SharedItem<T> {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.item.as_ref()\n }\n}\n\n\/\/\/ A wrapped error of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedError<E> {\n error: Arc<E>,\n}\n\nimpl<E> SharedError<E> {\n fn new(error: E) -> Self {\n SharedError { error: Arc::new(error) }\n }\n}\n\nimpl<T> Clone for SharedError<T> {\n fn clone(&self) -> Self {\n SharedError { error: self.error.clone() }\n }\n}\n\nimpl<E> Deref for SharedError<E> {\n type Target = E;\n\n fn deref(&self) -> &E {\n &self.error.as_ref()\n }\n}\n<commit_msg>Removed result_to_polled_result, use map() instead<commit_after>\/\/! Definition of the Shared combinator, a future that is cloneable,\n\/\/! and can be polled in multiple threads.\n\/\/!\n\/\/! # Examples\n\/\/!\n\/\/! ```\n\/\/! use futures::future::*;\n\/\/!\n\/\/! let future = ok::<_, bool>(6);\n\/\/! let shared1 = future.shared();\n\/\/! let shared2 = shared1.clone();\n\/\/! assert_eq!(6, *shared1.wait().unwrap());\n\/\/! assert_eq!(6, *shared2.wait().unwrap());\n\/\/! ```\n\nuse std::mem;\nuse std::vec::Vec;\nuse std::sync::{Arc, RwLock};\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::ops::Deref;\n\nuse {Future, Poll, Async};\nuse task::{self, Task};\nuse lock::Lock;\n\n\n\/\/\/ A future that is cloneable and can be polled in multiple threads.\n\/\/\/ Use Future::shared() method to convert any future into a `Shared` future.\n#[must_use = \"futures do nothing unless polled\"]\npub struct Shared<F>\n where F: Future\n{\n inner: Arc<Inner<F>>,\n}\n\nstruct Inner<F>\n where F: Future\n{\n \/\/\/ The original future.\n original_future: Lock<F>,\n \/\/\/ Indicates whether the result is ready, and the state is `State::Done`.\n result_ready: AtomicBool,\n \/\/\/ The state of the shared future.\n state: RwLock<State<F::Item, F::Error>>,\n}\n\n\/\/\/ The state of the shared future. It can be one of the following:\n\/\/\/ 1. Done - contains the result of the original future.\n\/\/\/ 2. Waiting - contains the waiting tasks.\nenum State<T, E> {\n Waiting(Vec<Task>),\n Done(Result<SharedItem<T>, SharedError<E>>),\n}\n\nimpl<F> Shared<F>\n where F: Future\n{\n \/\/\/ Creates a new `Shared` from another future.\n pub fn new(future: F) -> Self {\n Shared {\n inner: Arc::new(Inner {\n original_future: Lock::new(future),\n result_ready: AtomicBool::new(false),\n state: RwLock::new(State::Waiting(vec![])),\n }),\n }\n }\n\n \/\/\/ Clones the result from self.inner.state.\n \/\/\/ Assumes state is `State::Done`.\n fn read_result(&self) -> Result<Async<SharedItem<F::Item>>, SharedError<F::Error>> {\n match *self.inner.state.read().unwrap() {\n State::Done(ref result) => result.clone().map(Async::Ready),\n State::Waiting(_) => panic!(\"read_result() was called but State is not Done\"),\n }\n }\n\n \/\/\/ Stores the result in self.inner.state, unparks the waiting tasks,\n \/\/\/ and returns the result.\n fn store_result(&self,\n result: Result<SharedItem<F::Item>, SharedError<F::Error>>)\n -> Result<Async<SharedItem<F::Item>>, SharedError<F::Error>> {\n let ref mut state = *self.inner.state.write().unwrap();\n\n match mem::replace(state, State::Done(result.clone())) {\n State::Waiting(waiters) => {\n self.inner.result_ready.store(true, Ordering::Relaxed);\n for task in waiters {\n task.unpark();\n }\n }\n State::Done(_) => panic!(\"store_result() was called twice\"),\n }\n\n result.clone().map(Async::Ready)\n }\n}\n\nimpl<F> Future for Shared<F>\n where F: Future\n{\n type Item = SharedItem<F::Item>;\n type Error = SharedError<F::Error>;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n \/\/ The logic is as follows:\n \/\/ 1. Check if the result is ready (with result_ready)\n \/\/ - If the result is ready, return it.\n \/\/ - Otherwise:\n \/\/ 2. Try lock the self.inner.original_future:\n \/\/ - If successfully locked, check again if the result is ready.\n \/\/ If it's ready, just return it.\n \/\/ Otherwise, poll the original future.\n \/\/ If the future is ready, unpark the waiting tasks from\n \/\/ self.inner.state and return the result.\n \/\/ - If the future is not ready, or if the lock failed:\n \/\/ 3. Lock the state for write.\n \/\/ 4. If the state is `State::Done`, return the result. Otherwise:\n \/\/ 5. Create a task, push it to the waiters vector, and return `Ok(Async::NotReady)`.\n\n \/\/ If the result is ready, just return it\n if self.inner.result_ready.load(Ordering::Relaxed) {\n return self.read_result();\n }\n\n \/\/ The result was not ready.\n \/\/ Try lock the original future.\n match self.inner.original_future.try_lock() {\n Some(mut original_future) => {\n \/\/ Other thread could already poll the result, so we check if result_ready.\n if self.inner.result_ready.load(Ordering::Relaxed) {\n return self.read_result();\n }\n\n match original_future.poll() {\n Ok(Async::Ready(item)) => {\n return self.store_result(Ok(SharedItem::new(item)));\n }\n Err(error) => {\n return self.store_result(Err(SharedError::new(error)));\n }\n Ok(Async::NotReady) => {} \/\/ A task will be parked\n }\n }\n None => {} \/\/ A task will be parked\n }\n\n let ref mut state = *self.inner.state.write().unwrap();\n match state {\n &mut State::Done(ref result) => return result.clone().map(Async::Ready),\n &mut State::Waiting(ref mut waiters) => {\n waiters.push(task::park());\n }\n }\n\n Ok(Async::NotReady)\n }\n}\n\nimpl<F> Clone for Shared<F>\n where F: Future\n{\n fn clone(&self) -> Self {\n Shared { inner: self.inner.clone() }\n }\n}\n\n\/\/\/ A wrapped item of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedItem<T> {\n item: Arc<T>,\n}\n\nimpl<T> SharedItem<T> {\n fn new(item: T) -> Self {\n SharedItem { item: Arc::new(item) }\n }\n}\n\nimpl<T> Clone for SharedItem<T> {\n fn clone(&self) -> Self {\n SharedItem { item: self.item.clone() }\n }\n}\n\nimpl<T> Deref for SharedItem<T> {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.item.as_ref()\n }\n}\n\n\/\/\/ A wrapped error of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedError<E> {\n error: Arc<E>,\n}\n\nimpl<E> SharedError<E> {\n fn new(error: E) -> Self {\n SharedError { error: Arc::new(error) }\n }\n}\n\nimpl<T> Clone for SharedError<T> {\n fn clone(&self) -> Self {\n SharedError { error: self.error.clone() }\n }\n}\n\nimpl<E> Deref for SharedError<E> {\n type Target = E;\n\n fn deref(&self) -> &E {\n &self.error.as_ref()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add helpers - implement get_block_hash<commit_after>\/*\n * Collection of helper functions used in the state transition modules\n *\/\nuse super::active_state::ActiveState;\nuse super::block::Block;\nuse super::chain_config::ChainConfig;\nuse super::utils::errors::ParameterError;\nuse super::utils::types::Hash256;\n\n\/*\n pub fn get_signed_parent_hashes(\n active_state: &ActiveState,\n block: &Block,\n attestation: &AttestationRecord,\n chain_config: &ChainConfig)\n -> Vec<Hash256> {\n }\n *\/\n\npub fn get_block_hash(\n active_state_recent_block_hashes: &Vec<Hash256>,\n current_block_slot: &u64,\n slot: &u64,\n cycle_length: &u64, \/\/ convert from standard u8\n) -> Result<Hash256, ParameterError> {\n \/\/ active_state must have at 2*cycle_length hashes\n assert_error!(\n active_state_recent_block_hashes.len() as u64 == cycle_length * 2,\n ParameterError::InvalidInput(String::from(\n \"active state has incorrect number of block hashes\"\n ))\n );\n\n let state_start_slot = (*current_block_slot)\n .checked_sub(cycle_length * 2)\n .unwrap_or(0);\n\n assert_error!(\n (state_start_slot <= *slot) && (*slot < *current_block_slot),\n ParameterError::InvalidInput(String::from(\"incorrect slot number\"))\n );\n\n let index = 2 * cycle_length + (*slot) - *current_block_slot; \/\/ should always be positive\n Ok(active_state_recent_block_hashes[index as usize])\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn test_get_block_hash() {\n let block_slot: u64 = 10;\n let slot: u64 = 3;\n let cycle_length: u64 = 8;\n\n let mut block_hashes: Vec<Hash256> = Vec::new();\n for _i in 0..2 * cycle_length {\n block_hashes.push(Hash256::random());\n }\n\n let result = get_block_hash(&block_hashes, &block_slot, &slot, &cycle_length).unwrap();\n\n assert_eq!(\n result,\n block_hashes[(2 * cycle_length + slot - block_slot) as usize]\n );\n\n println!(\"{:?}\", result);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fixed cargo build error<commit_after>pub fn estimate_price(mileage: u32, theta0: f32, theta1: f32) -> u32 {\n (theta0 + (theta1 * mileage as f32)) as u32\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>moved row builder<commit_after>use std::collections::HashMap;\nuse super::value::Value;\n\npub struct RowBuilder {\n pub data: HashMap<String, Value>,\n}\n\nimpl RowBuilder {\n pub fn new() -> RowBuilder {\n RowBuilder{data:HashMap::new()}\n }\n pub fn set_string(&mut self, key: &str, val: &str) -> &mut RowBuilder {\n self.data.insert(key.to_string(), Value::from(val));\n self\n }\n pub fn set_int(&mut self, key: &str, val: i64) -> &mut RowBuilder {\n self.data.insert(key.to_string(), Value::from(val));\n self\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>update page<commit_after>extern crate diesel;\nextern crate dotenv;\nextern crate kuchiki;\nextern crate server;\n\nuse diesel::pg::PgConnection;\nuse diesel::prelude::*;\nuse dotenv::dotenv;\nuse kuchiki::traits::*;\nuse server::models::Ad;\nuse server::schema::ads::*;\nuse server::start_logging;\nuse std::env;\n\nfn main() {\n dotenv().ok();\n start_logging();\n let database_url = env::var(\"DATABASE_URL\").expect(\"DATABASE_URL must be set\");\n let conn = PgConnection::establish(&database_url).unwrap();\n let dbads: Vec<Ad> = ads.order(create_at.desc())\n .filter(page.is_null())\n .load::<Ad>(&*conn)\n .expect(\"Couldn't get ads.\");\n for ad in dbads {\n let document = kuchiki::parse_html().one(ad.html.clone());\n\n let html_page = get_advertiser_link(&document).ok().and_then(|l| {\n l.attributes.borrow().get(\"href\").map(|i| i.to_string())\n });\n if html_page.is_ok() {\n diesel::update(ads.find(ad.id))\n .set((page.eq(html_page.unwrap())))\n .execute(&conn)\n .unwrap();\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor error handling<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Readded import test<commit_after>\n\/\/ Copyright 2017 The gltf Library Developers\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate gltf;\n\nuse std::{fs, io, path};\n\nfn try_import(path: &path::Path) {\n let _ = gltf::import(&path).map_err(|err| {\n println!(\"{:?}: {:#?}\", path, err);\n panic!();\n });\n}\n\nfn run() -> io::Result<()> {\n let sample_dir_path = path::Path::new(\".\/glTF-Sample-Models\/2.0\");\n for entry in fs::read_dir(&sample_dir_path)? {\n let entry = entry?;\n let metadata = entry.metadata()?;\n if metadata.is_dir() {\n let entry_path = entry.path();\n if let Some(file_name) = entry_path.file_name() {\n let mut gltf_path = entry_path.join(\"glTF\").join(file_name);\n gltf_path.set_extension(\"gltf\");\n try_import(&gltf_path);\n }\n }\n }\n Ok(())\n}\n\n#[test]\nfn import() {\n \/\/ Import all 'standard' glTF in the glTF-Sample-Models\/2.0 directory.\n run().expect(\"No I\/O errors\");\n\n \/\/ Minimal example taken from https:\/\/github.com\/javagl\/glTF-Tutorials\/blob\/master\/gltfTutorial\/gltfTutorial_003_MinimalGltfFile.md\n try_import(path::Path::new(\"tests\/minimal.gltf\"));\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test with non-Copy data<commit_after>use std::borrow::ToOwned;\n\n#[macro_use]\nextern crate literator;\n\n#[test]\nfn test_non_copy() {\n let v: Vec<String> = container![\"foo\".to_owned(), \"bar\".to_owned()];\n assert_eq!(&v, &[\"foo\", \"bar\"]);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This file is part of rgtk.\n\/\/\n\/\/ rgtk is free software: you can redistribute it and\/or modify\n\/\/ it under the terms of the GNU Lesser General Public License as published by\n\/\/ the Free Software Foundation, either version 3 of the License, or\n\/\/ (at your option) any later version.\n\/\/\n\/\/ rgtk is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\/\/ GNU Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public License\n\/\/ along with rgtk. If not, see <http:\/\/www.gnu.org\/licenses\/>.\n\n\/\/! A widget for displaying both trees and lists\n\nuse gtk;\nuse gtk::ffi::{mod, FFIWidget};\nuse gtk::traits;\nuse gtk::cast::GTK_TREE_VIEW;\n\n\/\/\/ TreeView — A widget for displaying both trees and lists\nstruct_Widget!(TreeView)\n\nimpl TreeView {\n pub fn new() -> Option<TreeView> {\n let tmp_pointer = unsafe { ffi::gtk_tree_view_new() };\n check_pointer!(tmp_pointer, TreeView)\n }\n\n pub fn new_with_model(model: >k::TreeModel) -> Option<TreeView> {\n let tmp_pointer = unsafe { ffi::gtk_tree_view_new_with_model(model.get_pointer()) };\n check_pointer!(tmp_pointer, TreeView)\n }\n\n pub fn get_headers_visible(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_headers_visible(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_headers_visible(&mut self, visible: bool) {\n unsafe {\n ffi::gtk_tree_view_set_headers_visible(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(visible))\n }\n }\n\n pub fn columns_autosize(&mut self) {\n unsafe {\n ffi::gtk_tree_view_columns_autosize(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn get_headers_clickable(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_headers_clickable(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_headers_clickable(&mut self, setting: bool) {\n unsafe {\n ffi::gtk_tree_view_set_headers_clickable(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(setting))\n }\n }\n\n pub fn get_rules_hint(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_rules_hint(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_rules_hint(&mut self, setting: bool) {\n unsafe {\n ffi::gtk_tree_view_set_rules_hint(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(setting))\n }\n }\n\n #[cfg(any(GTK_3_8, GTK_3_10, GTK_3_12))]\n pub fn get_activate_on_single_click(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_activate_on_single_click(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n #[cfg(any(GTK_3_8, GTK_3_10, GTK_3_12))]\n pub fn set_activate_on_single_click(&mut self, setting: bool) {\n unsafe {\n ffi::gtk_tree_view_set_activate_on_single_click(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(setting))\n }\n }\n\n #[cfg(any(GTK_3_4, GTK_3_6, GTK_3_8, GTK_3_10, GTK_3_12))]\n pub fn get_n_columns(&self) -> uint {\n unsafe {\n ffi::gtk_tree_view_get_n_columns(GTK_TREE_VIEW(self.pointer)) as uint\n }\n }\n\n pub fn scroll_to_point(&mut self, tree_x: i32, tree_y: i32) {\n unsafe {\n ffi::gtk_tree_view_scroll_to_point(GTK_TREE_VIEW(self.pointer), tree_x, tree_y)\n }\n }\n\n pub fn expand_all(&mut self) {\n unsafe {\n ffi::gtk_tree_view_expand_all(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn collapse_all(&mut self) {\n unsafe {\n ffi::gtk_tree_view_collapse_all(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn get_reorderable(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_reorderable(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_reorderable(&mut self, reorderable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_reorderable(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(reorderable))\n }\n }\n\n pub fn unset_rows_drag_source(&mut self) {\n unsafe {\n ffi::gtk_tree_view_unset_rows_drag_source(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn unset_rows_drag_dest(&mut self) {\n unsafe {\n ffi::gtk_tree_view_unset_rows_drag_dest(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn get_enable_search(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_enable_search(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_enable_search(&mut self, enable_search: bool) {\n unsafe {\n ffi::gtk_tree_view_set_enable_search(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable_search))\n }\n }\n\n pub fn get_search_column(&self) -> i32 {\n unsafe {\n ffi::gtk_tree_view_get_search_column(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn set_search_column(&mut self, column: i32) {\n unsafe {\n ffi::gtk_tree_view_set_search_column(GTK_TREE_VIEW(self.pointer), column)\n }\n }\n\n pub fn get_search_entry(&self) -> gtk::Entry {\n unsafe {\n ffi::FFIWidget::wrap(ffi::gtk_tree_view_get_search_entry(GTK_TREE_VIEW(self.pointer))\n as *mut ffi::C_GtkWidget)\n }\n }\n\n pub fn set_search_entry(&mut self, entry: &mut gtk::Entry) {\n unsafe {\n ffi::gtk_tree_view_set_search_entry(GTK_TREE_VIEW(self.pointer),\n entry.get_widget() as *mut ffi::C_GtkEntry)\n }\n }\n\n pub fn widget_to_tree_coords(&self, wx: i32, wy: i32) -> (i32, i32) {\n let mut tx = 0i32;\n let mut ty = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n wx, wy, &mut tx, &mut ty);\n }\n (tx, ty)\n }\n\n pub fn tree_to_widget_coords(&self, tx: i32, ty: i32) -> (i32, i32) {\n let mut wx = 0i32;\n let mut wy = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n tx, ty, &mut wx, &mut wy);\n }\n (wx, wy)\n }\n\n pub fn widget_to_bin_window_coords(&self, wx: i32, wy: i32) -> (i32, i32) {\n let mut bx = 0i32;\n let mut by = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n wx, wy, &mut bx, &mut by);\n }\n (bx, by)\n }\n\n pub fn bin_window_to_widget_coords(&self, bx: i32, by: i32) -> (i32, i32) {\n let mut wx = 0i32;\n let mut wy = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n bx, by, &mut wx, &mut wy);\n }\n (wx, wy)\n }\n\n pub fn tree_to_bin_window_coords(&self, tx: i32, ty: i32) -> (i32, i32) {\n let mut bx = 0i32;\n let mut by = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n tx, ty, &mut bx, &mut by);\n }\n (bx, by)\n }\n\n pub fn bin_window_to_tree_coords(&self, bx: i32, by: i32) -> (i32, i32) {\n let mut tx = 0i32;\n let mut ty = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n bx, by, &mut tx, &mut ty);\n }\n (tx, ty)\n }\n\n pub fn get_fixed_height_mode(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_fixed_height_mode(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_fixed_height_mode(&mut self, enable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_fixed_height_mode(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable))\n }\n }\n\n pub fn get_hover_selection(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_hover_selection(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_hover_selection(&mut self, hover: bool) {\n unsafe {\n ffi::gtk_tree_view_set_hover_selection(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(hover))\n }\n }\n\n pub fn get_hover_expand(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_hover_expand(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_hover_expand(&mut self, expand: bool) {\n unsafe {\n ffi::gtk_tree_view_set_hover_expand(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(expand))\n }\n }\n\n pub fn get_rubber_banding(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_rubber_banding(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_rubber_banding(&mut self, enable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_rubber_banding(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable))\n }\n }\n\n pub fn is_rubber_banding_active(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_is_rubber_banding_active(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn get_grid_lines(&self) -> gtk::TreeViewGridLines {\n unsafe {\n ffi::gtk_tree_view_get_grid_lines(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn set_grid_lines(&mut self, grid_lines: gtk::TreeViewGridLines) {\n unsafe {\n ffi::gtk_tree_view_set_grid_lines(GTK_TREE_VIEW(self.pointer), grid_lines)\n }\n }\n\n pub fn get_enable_tree_lines(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_enable_tree_lines(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_enable_tree_lines(&mut self, enable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_enable_tree_lines(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable))\n }\n }\n\n pub fn get_show_expanders(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_show_expanders(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_show_expanders(&mut self, enable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_show_expanders(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable))\n }\n }\n\n pub fn get_level_indentation(&self) -> i32 {\n unsafe {\n ffi::gtk_tree_view_get_level_indentation(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn set_level_indentation(&mut self, indentation: i32) {\n unsafe {\n ffi::gtk_tree_view_set_level_indentation(GTK_TREE_VIEW(self.pointer),\n indentation)\n }\n }\n\n pub fn get_tooltip_column(&self) -> i32 {\n unsafe {\n ffi::gtk_tree_view_get_tooltip_column(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn set_tooltip_column(&mut self, column: i32) {\n unsafe {\n ffi::gtk_tree_view_set_tooltip_column(GTK_TREE_VIEW(self.pointer),\n column)\n }\n }\n\n pub fn get_model(&self) -> Option<gtk::TreeModel> {\n let tmp_pointer = unsafe { ffi::gtk_tree_view_get_model(GTK_TREE_VIEW(self.pointer)) };\n\n if tmp_pointer.is_null() {\n None\n } else {\n Some(gtk::TreeModel::wrap_pointer(tmp_pointer))\n }\n }\n\n pub fn set_model(&mut self, model: >k::TreeModel) {\n unsafe {\n ffi::gtk_tree_view_set_model(GTK_TREE_VIEW(self.pointer),\n model.get_pointer())\n }\n }\n\n pub fn append_column(&mut self, column: >k::TreeViewColumn) -> i32 {\n unsafe { ffi::gtk_tree_view_append_column(GTK_TREE_VIEW(self.pointer),\n column.get_pointer()) }\n }\n}\n\nimpl_drop!(TreeView)\nimpl_TraitWidget!(TreeView)\n\nimpl traits::Container for TreeView {}\nimpl traits::Scrollable for TreeView {}\n<commit_msg>Add convenience function<commit_after>\/\/ This file is part of rgtk.\n\/\/\n\/\/ rgtk is free software: you can redistribute it and\/or modify\n\/\/ it under the terms of the GNU Lesser General Public License as published by\n\/\/ the Free Software Foundation, either version 3 of the License, or\n\/\/ (at your option) any later version.\n\/\/\n\/\/ rgtk is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\/\/ GNU Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public License\n\/\/ along with rgtk. If not, see <http:\/\/www.gnu.org\/licenses\/>.\n\n\/\/! A widget for displaying both trees and lists\n\nuse gtk;\nuse gtk::ffi::{mod, FFIWidget};\nuse gtk::traits;\nuse gtk::cast::GTK_TREE_VIEW;\n\n\/\/\/ TreeView — A widget for displaying both trees and lists\nstruct_Widget!(TreeView)\n\nimpl TreeView {\n pub fn new() -> Option<TreeView> {\n let tmp_pointer = unsafe { ffi::gtk_tree_view_new() };\n check_pointer!(tmp_pointer, TreeView)\n }\n\n pub fn new_with_model(model: >k::TreeModel) -> Option<TreeView> {\n let tmp_pointer = unsafe { ffi::gtk_tree_view_new_with_model(model.get_pointer()) };\n check_pointer!(tmp_pointer, TreeView)\n }\n\n pub fn get_headers_visible(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_headers_visible(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_headers_visible(&mut self, visible: bool) {\n unsafe {\n ffi::gtk_tree_view_set_headers_visible(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(visible))\n }\n }\n\n pub fn columns_autosize(&mut self) {\n unsafe {\n ffi::gtk_tree_view_columns_autosize(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn get_headers_clickable(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_headers_clickable(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_headers_clickable(&mut self, setting: bool) {\n unsafe {\n ffi::gtk_tree_view_set_headers_clickable(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(setting))\n }\n }\n\n pub fn get_rules_hint(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_rules_hint(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_rules_hint(&mut self, setting: bool) {\n unsafe {\n ffi::gtk_tree_view_set_rules_hint(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(setting))\n }\n }\n\n #[cfg(any(GTK_3_8, GTK_3_10, GTK_3_12))]\n pub fn get_activate_on_single_click(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_activate_on_single_click(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n #[cfg(any(GTK_3_8, GTK_3_10, GTK_3_12))]\n pub fn set_activate_on_single_click(&mut self, setting: bool) {\n unsafe {\n ffi::gtk_tree_view_set_activate_on_single_click(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(setting))\n }\n }\n\n #[cfg(any(GTK_3_4, GTK_3_6, GTK_3_8, GTK_3_10, GTK_3_12))]\n pub fn get_n_columns(&self) -> uint {\n unsafe {\n ffi::gtk_tree_view_get_n_columns(GTK_TREE_VIEW(self.pointer)) as uint\n }\n }\n\n pub fn scroll_to_point(&mut self, tree_x: i32, tree_y: i32) {\n unsafe {\n ffi::gtk_tree_view_scroll_to_point(GTK_TREE_VIEW(self.pointer), tree_x, tree_y)\n }\n }\n\n pub fn expand_all(&mut self) {\n unsafe {\n ffi::gtk_tree_view_expand_all(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn collapse_all(&mut self) {\n unsafe {\n ffi::gtk_tree_view_collapse_all(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn get_reorderable(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_reorderable(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_reorderable(&mut self, reorderable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_reorderable(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(reorderable))\n }\n }\n\n pub fn unset_rows_drag_source(&mut self) {\n unsafe {\n ffi::gtk_tree_view_unset_rows_drag_source(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn unset_rows_drag_dest(&mut self) {\n unsafe {\n ffi::gtk_tree_view_unset_rows_drag_dest(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn get_enable_search(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_enable_search(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_enable_search(&mut self, enable_search: bool) {\n unsafe {\n ffi::gtk_tree_view_set_enable_search(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable_search))\n }\n }\n\n pub fn get_search_column(&self) -> i32 {\n unsafe {\n ffi::gtk_tree_view_get_search_column(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn set_search_column(&mut self, column: i32) {\n unsafe {\n ffi::gtk_tree_view_set_search_column(GTK_TREE_VIEW(self.pointer), column)\n }\n }\n\n pub fn get_search_entry(&self) -> gtk::Entry {\n unsafe {\n ffi::FFIWidget::wrap(ffi::gtk_tree_view_get_search_entry(GTK_TREE_VIEW(self.pointer))\n as *mut ffi::C_GtkWidget)\n }\n }\n\n pub fn set_search_entry(&mut self, entry: &mut gtk::Entry) {\n unsafe {\n ffi::gtk_tree_view_set_search_entry(GTK_TREE_VIEW(self.pointer),\n entry.get_widget() as *mut ffi::C_GtkEntry)\n }\n }\n\n pub fn widget_to_tree_coords(&self, wx: i32, wy: i32) -> (i32, i32) {\n let mut tx = 0i32;\n let mut ty = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n wx, wy, &mut tx, &mut ty);\n }\n (tx, ty)\n }\n\n pub fn tree_to_widget_coords(&self, tx: i32, ty: i32) -> (i32, i32) {\n let mut wx = 0i32;\n let mut wy = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n tx, ty, &mut wx, &mut wy);\n }\n (wx, wy)\n }\n\n pub fn widget_to_bin_window_coords(&self, wx: i32, wy: i32) -> (i32, i32) {\n let mut bx = 0i32;\n let mut by = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n wx, wy, &mut bx, &mut by);\n }\n (bx, by)\n }\n\n pub fn bin_window_to_widget_coords(&self, bx: i32, by: i32) -> (i32, i32) {\n let mut wx = 0i32;\n let mut wy = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n bx, by, &mut wx, &mut wy);\n }\n (wx, wy)\n }\n\n pub fn tree_to_bin_window_coords(&self, tx: i32, ty: i32) -> (i32, i32) {\n let mut bx = 0i32;\n let mut by = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n tx, ty, &mut bx, &mut by);\n }\n (bx, by)\n }\n\n pub fn bin_window_to_tree_coords(&self, bx: i32, by: i32) -> (i32, i32) {\n let mut tx = 0i32;\n let mut ty = 0i32;\n unsafe {\n ffi::gtk_tree_view_convert_widget_to_tree_coords(GTK_TREE_VIEW(self.pointer),\n bx, by, &mut tx, &mut ty);\n }\n (tx, ty)\n }\n\n pub fn get_fixed_height_mode(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_fixed_height_mode(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_fixed_height_mode(&mut self, enable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_fixed_height_mode(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable))\n }\n }\n\n pub fn get_hover_selection(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_hover_selection(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_hover_selection(&mut self, hover: bool) {\n unsafe {\n ffi::gtk_tree_view_set_hover_selection(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(hover))\n }\n }\n\n pub fn get_hover_expand(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_hover_expand(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_hover_expand(&mut self, expand: bool) {\n unsafe {\n ffi::gtk_tree_view_set_hover_expand(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(expand))\n }\n }\n\n pub fn get_rubber_banding(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_rubber_banding(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_rubber_banding(&mut self, enable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_rubber_banding(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable))\n }\n }\n\n pub fn is_rubber_banding_active(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_is_rubber_banding_active(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn get_grid_lines(&self) -> gtk::TreeViewGridLines {\n unsafe {\n ffi::gtk_tree_view_get_grid_lines(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn set_grid_lines(&mut self, grid_lines: gtk::TreeViewGridLines) {\n unsafe {\n ffi::gtk_tree_view_set_grid_lines(GTK_TREE_VIEW(self.pointer), grid_lines)\n }\n }\n\n pub fn get_enable_tree_lines(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_enable_tree_lines(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_enable_tree_lines(&mut self, enable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_enable_tree_lines(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable))\n }\n }\n\n pub fn get_show_expanders(&self) -> bool {\n unsafe {\n ffi::to_bool(ffi::gtk_tree_view_get_show_expanders(GTK_TREE_VIEW(self.pointer)))\n }\n }\n\n pub fn set_show_expanders(&mut self, enable: bool) {\n unsafe {\n ffi::gtk_tree_view_set_show_expanders(GTK_TREE_VIEW(self.pointer),\n ffi::to_gboolean(enable))\n }\n }\n\n pub fn get_level_indentation(&self) -> i32 {\n unsafe {\n ffi::gtk_tree_view_get_level_indentation(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn set_level_indentation(&mut self, indentation: i32) {\n unsafe {\n ffi::gtk_tree_view_set_level_indentation(GTK_TREE_VIEW(self.pointer),\n indentation)\n }\n }\n\n pub fn get_tooltip_column(&self) -> i32 {\n unsafe {\n ffi::gtk_tree_view_get_tooltip_column(GTK_TREE_VIEW(self.pointer))\n }\n }\n\n pub fn set_tooltip_column(&mut self, column: i32) {\n unsafe {\n ffi::gtk_tree_view_set_tooltip_column(GTK_TREE_VIEW(self.pointer),\n column)\n }\n }\n\n pub fn get_model(&self) -> Option<gtk::TreeModel> {\n let tmp_pointer = unsafe { ffi::gtk_tree_view_get_model(GTK_TREE_VIEW(self.pointer)) };\n\n if tmp_pointer.is_null() {\n None\n } else {\n Some(gtk::TreeModel::wrap_pointer(tmp_pointer))\n }\n }\n\n pub fn set_model(&mut self, model: >k::TreeModel) {\n unsafe {\n ffi::gtk_tree_view_set_model(GTK_TREE_VIEW(self.pointer),\n model.get_pointer())\n }\n }\n\n pub fn append_column(&mut self, column: >k::TreeViewColumn) -> i32 {\n unsafe { ffi::gtk_tree_view_append_column(GTK_TREE_VIEW(self.pointer),\n column.get_pointer()) }\n }\n\n pub fn append_text_column(&mut self, column: >k::TreeViewColumn) -> i32 {\n let cell = gtk::CellRendererText::new().unwrap();\n column.pack_start(&cell, true);\n column.add_attribute(&cell, \"text\", 0);\n self.append_column(column)\n }\n}\n\nimpl_drop!(TreeView)\nimpl_TraitWidget!(TreeView)\n\nimpl traits::Container for TreeView {}\nimpl traits::Scrollable for TreeView {}\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Error as FmtError;\nuse std::io::Error as IOError;\n\ngenerate_error_types!(RuntimeError, RuntimeErrorKind,\n Instantiate => \"Could not instantiate\",\n IOError => \"IO Error\",\n ProcessExitFailure => \"Process exited with failure\"\n);\n\nimpl From<IOError> for RuntimeError {\n\n fn from(ioe: IOError) -> RuntimeError {\n RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(ioe)))\n }\n\n}\n\n<commit_msg>libimagrt: Replace error module imports with macro helper<commit_after>generate_error_imports!();\nuse std::io::Error as IOError;\n\ngenerate_error_types!(RuntimeError, RuntimeErrorKind,\n Instantiate => \"Could not instantiate\",\n IOError => \"IO Error\",\n ProcessExitFailure => \"Process exited with failure\"\n);\n\nimpl From<IOError> for RuntimeError {\n\n fn from(ioe: IOError) -> RuntimeError {\n RuntimeError::new(RuntimeErrorKind::IOError, Some(Box::new(ioe)))\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use static maps for simple color lookups<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix Array Checks<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add testcase for #11493<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This file must never have a trailing newline\n\nfn main() {\n let x = Some(3);\n let y = x.as_ref().unwrap_or(&5); \/\/~ ERROR: borrowed value does not live long enough\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>FS notify now uses secondary thread to batch up FS events and do sync<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added bitboard tests<commit_after>use board::sjadam_move_gen::BitBoard;\nuse board::std_board;\nuse board::std_board::ChessBoard;\nuse board::std_move::ChessMove;\nuse search_algorithms::board::EvalBoard;\nuse search_algorithms::game_move::Move;\n\n#[test]\nfn rotate() {\n let start_board = BitBoard::from_board(&ChessBoard::start_board(), |p| !p.is_empty());\n let mut empty = BitBoard::empty();\n empty.rotate();\n assert_eq!(BitBoard::empty(), empty);\n let mut board = start_board.clone();\n board.rotate();\n println!(\"{:?}\", board);\n board.rotate();\n assert_eq!(start_board, board);\n \n board.rotate();\n board.rotate();\n assert_eq!(start_board, board);\n\n board.rotate();\n for rank in 0..8 {\n for &file in [0, 1, 6, 7].iter() {\n assert!(board.get(std_board::Square::from_ints(file, rank)));\n }\n for &file in [2, 3, 4, 5].iter() {\n assert!(!board.get(std_board::Square::from_ints(file, rank)),\n \"Found piece in the middle of board \\n{:?}\", board);\n }\n }\n\n board = BitBoard::from_u64(0b01000100_01001000_01010000_01010000_01110000_01000100_01000100_01111000);\n board.rotate();\n board.rotate();\n board.rotate();\n board.rotate();\n assert_eq!(board, BitBoard::from_u64(0b01000100_01001000_01010000_01010000_01110000_01000100_01000100_01111000));\n}\n\n#[test]\nfn rank() {\n let start_board = BitBoard::all_from_board(&ChessBoard::start_board());\n println!(\"{:?}\", start_board);\n for rank in 0..8 {\n println!(\"{}\", start_board.rank(rank));\n }\n assert_eq!(start_board.rank(0), 255);\n assert_eq!(start_board.rank(1), 255);\n assert_eq!(start_board.rank(2), 0);\n assert_eq!(start_board.rank(6), 255);\n}\n\n#[test]\nfn file() {\n let start_board = BitBoard::all_from_board(&ChessBoard::start_board());\n assert_eq!(start_board.file(0), 0b1100_0011);\n let mut chess_board = ChessBoard::start_board();\n chess_board.do_move(ChessMove::from_alg(\"a2a4\").unwrap());\n let board = BitBoard::all_from_board(&chess_board);\n assert_eq!(board.file(0), 0b1100_1001);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a new \"ui\" example<commit_after>extern crate rustty;\n\nuse rustty::{\n Terminal,\n Event,\n};\nuse rustty::ui::{\n Widget,\n Painter,\n Alignable,\n HorizontalAlign,\n VerticalAlign,\n create_button,\n};\n\nfn create_maindlg() -> Widget {\n let mut maindlg = Widget::new(60, 10);\n let s = \"Hello! This is a showcase of the ui module!\";\n let x = maindlg.halign_line(s, HorizontalAlign::Middle, 1);\n maindlg.printline(x, 2, s);\n let mut b = create_button(\"Quit\", Some('q'));\n b.align(&maindlg, HorizontalAlign::Middle, VerticalAlign::Bottom, 1);\n b.draw_into(&mut maindlg);\n maindlg.draw_box();\n maindlg\n}\n\nfn main() {\n let mut term = Terminal::new().unwrap();\n let mut maindlg = create_maindlg();\n maindlg.align(&term, HorizontalAlign::Middle, VerticalAlign::Middle, 0);\n 'main: loop {\n while let Some(Event::Key(ch)) = term.get_event(0).unwrap() {\n match ch {\n 'q' | 'Q' => break 'main,\n _ => {},\n }\n }\n\n maindlg.draw_into(&mut term);\n term.swap_buffers().unwrap();\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tutorial-15_3-do.rs<commit_after>\/**\n * 15.3 Do syntax\n * http:\/\/static.rust-lang.org\/doc\/master\/tutorial.html#do-syntax\n *\n * @license MIT license <http:\/\/www.opensource.org\/licenses\/mit-license.php>\n *\/\n\/\/ The do expression makes it easier to call functions that take procedures as arguments.\n\n\/\/ A function that takes a procedure as argument:\nfn call_it(op: proc(v: int)) {\n\t op(10)\n}\n\nfn main() {\n\t\/\/ As a caller, if we use a closure to provide the final operator argument, we can write it in a way that has a pleasant, block-like structure.\n\tcall_it(proc(n) {\n\t\t println(n.to_str());\n\t});\n\n\t\/\/ This is such a useful pattern that Rust has a special form of function call for these functions.\n\tdo call_it() |n| {\n\t\t println(n.to_str());\n\t}\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Clarify segment handling for '..'.<commit_after><|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse std::process::Command;\nuse std::env;\nuse std::io::stderr;\nuse std::io::Write;\n\npub use clap::App;\n\nuse clap::{Arg, ArgMatches};\nuse log;\nuse log::LogLevelFilter;\n\nuse configuration::Configuration;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\nuse error::MapErrInto;\nuse logger::ImagLogger;\n\nuse libimagstore::store::Store;\n\n#[derive(Debug)]\npub struct Runtime<'a> {\n rtp: PathBuf,\n configuration: Option<Configuration>,\n cli_matches: ArgMatches<'a>,\n store: Store,\n}\n\nimpl<'a> Runtime<'a> {\n\n \/**\n * Gets the CLI spec for the program and retreives the config file path (or uses the default on\n * in $HOME\/.imag\/config, $XDG_CONFIG_DIR\/imag\/config or from env(\"$IMAG_CONFIG\")\n * and builds the Runtime object with it.\n *\n * The cli_spec object should be initially build with the ::get_default_cli_builder() function.\n *\n *\/\n pub fn new(cli_spec: App<'a, 'a>) -> Result<Runtime<'a>, RuntimeError> {\n use std::env;\n\n use libimagstore::hook::position::HookPosition as HP;\n use libimagstore::hook::Hook;\n use libimagstore::error::StoreErrorKind;\n use libimagstorestdhook::debug::DebugHook;\n use libimagstorestdhook::vcs::git::create::CreateHook as GitCreateHook;\n use libimagstorestdhook::vcs::git::delete::DeleteHook as GitDeleteHook;\n use libimagstorestdhook::vcs::git::retrieve::RetrieveHook as GitRetrieveHook;\n use libimagstorestdhook::vcs::git::update::UpdateHook as GitUpdateHook;\n use libimagerror::trace::trace_error;\n use libimagerror::trace::trace_error_dbg;\n use libimagerror::into::IntoError;\n\n use configuration::error::ConfigErrorKind;\n\n let matches = cli_spec.get_matches();\n\n let is_debugging = matches.is_present(\"debugging\");\n let is_verbose = matches.is_present(\"verbosity\");\n let colored = !matches.is_present(\"no-color-output\");\n\n Runtime::init_logger(is_debugging, is_verbose, colored);\n\n let rtp : PathBuf = matches.value_of(\"runtimepath\")\n .map_or_else(|| {\n env::var(\"HOME\")\n .map(PathBuf::from)\n .map(|mut p| { p.push(\".imag\"); p})\n .unwrap_or_else(|_| {\n panic!(\"You seem to be $HOME-less. Please get a $HOME before using this software. We are sorry for you and hope you have some accommodation anyways.\");\n })\n }, PathBuf::from);\n let storepath = matches.value_of(\"storepath\")\n .map_or_else(|| {\n let mut spath = rtp.clone();\n spath.push(\"store\");\n spath\n }, PathBuf::from);\n\n let configpath = matches.value_of(\"config\")\n .map_or_else(|| rtp.clone(), PathBuf::from);\n\n let cfg = match Configuration::new(&configpath) {\n Err(e) => if e.err_type() != ConfigErrorKind::NoConfigFileFound {\n return Err(RuntimeErrorKind::Instantiate.into_error_with_cause(Box::new(e)));\n } else {\n warn!(\"No config file found.\");\n warn!(\"Continuing without configuration file\");\n None\n },\n\n Ok(mut cfg) => {\n if let Err(e) = cfg.override_config(get_override_specs(&matches)) {\n error!(\"Could not apply config overrides\");\n trace_error(&e);\n\n \/\/ TODO: continue question (interactive)\n }\n\n Some(cfg)\n }\n };\n\n let store_config = match cfg {\n Some(ref c) => c.store_config().cloned(),\n None => None,\n };\n\n if is_debugging {\n write!(stderr(), \"Config: {:?}\\n\", cfg).ok();\n write!(stderr(), \"Store-config: {:?}\\n\", store_config).ok();\n }\n\n Store::new(storepath.clone(), store_config).map(|mut store| {\n \/\/ If we are debugging, generate hooks for all positions\n if is_debugging {\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(DebugHook::new(HP::PreCreate)) , \"debug\", HP::PreCreate),\n (Box::new(DebugHook::new(HP::PostCreate)) , \"debug\", HP::PostCreate),\n (Box::new(DebugHook::new(HP::PreRetrieve)) , \"debug\", HP::PreRetrieve),\n (Box::new(DebugHook::new(HP::PostRetrieve)) , \"debug\", HP::PostRetrieve),\n (Box::new(DebugHook::new(HP::PreUpdate)) , \"debug\", HP::PreUpdate),\n (Box::new(DebugHook::new(HP::PostUpdate)) , \"debug\", HP::PostUpdate),\n (Box::new(DebugHook::new(HP::PreDelete)) , \"debug\", HP::PreDelete),\n (Box::new(DebugHook::new(HP::PostDelete)) , \"debug\", HP::PostDelete),\n ];\n\n \/\/ If hook registration fails, trace the error and warn, but continue.\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n }\n\n let sp = storepath;\n\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(GitCreateHook::new(sp.clone(), HP::PostCreate)) , \"vcs\", HP::PostCreate),\n (Box::new(GitDeleteHook::new(sp.clone(), HP::PreDelete)) , \"vcs\", HP::PreDelete),\n (Box::new(GitRetrieveHook::new(sp.clone(), HP::PostRetrieve)), \"vcs\", HP::PostRetrieve),\n (Box::new(GitUpdateHook::new(sp, HP::PostUpdate)) , \"vcs\", HP::PostUpdate),\n ];\n\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n\n Runtime {\n cli_matches: matches,\n configuration: cfg,\n rtp: rtp,\n store: store,\n }\n })\n .map_err_into(RuntimeErrorKind::Instantiate)\n }\n\n \/**\n * Get a commandline-interface builder object from `clap`\n *\n * This commandline interface builder object already contains some predefined interface flags:\n * * -v | --verbose for verbosity\n * * --debug for debugging\n * * -c <file> | --config <file> for alternative configuration file\n * * -r <path> | --rtp <path> for alternative runtimepath\n * * --store <path> for alternative store path\n * Each has the appropriate help text included.\n *\n * The `appname` shall be \"imag-<command>\".\n *\/\n pub fn get_default_cli_builder(appname: &'a str,\n version: &'a str,\n about: &'a str)\n -> App<'a, 'a>\n {\n App::new(appname)\n .version(version)\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .about(about)\n .arg(Arg::with_name(Runtime::arg_verbosity_name())\n .short(\"v\")\n .long(\"verbose\")\n .help(\"Enables verbosity\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_debugging_name())\n .long(\"debug\")\n .help(\"Enables debugging output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_no_color_output_name())\n .long(\"no-color\")\n .help(\"Disable color output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_config_name())\n .long(\"config\")\n .help(\"Path to alternative config file\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_config_override_name())\n .long(\"override-config\")\n .help(\"Override a configuration settings. Use 'key=value' pairs, where the key is a path in the TOML configuration. The value must be present in the configuration and be convertible to the type of the configuration setting. If the argument does not contain a '=', it gets ignored. Setting Arrays and Tables is not yet supported.\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_runtimepath_name())\n .long(\"rtp\")\n .help(\"Alternative runtimepath\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_storepath_name())\n .long(\"store\")\n .help(\"Alternative storepath. Must be specified as full path, can be outside of the RTP\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_editor_name())\n .long(\"editor\")\n .help(\"Set editor\")\n .required(false)\n .takes_value(true))\n }\n\n pub fn arg_names() -> Vec<&'static str> {\n vec![\n Runtime::arg_verbosity_name(),\n Runtime::arg_debugging_name(),\n Runtime::arg_no_color_output_name(),\n Runtime::arg_config_name(),\n Runtime::arg_config_override_name(),\n Runtime::arg_runtimepath_name(),\n Runtime::arg_storepath_name(),\n Runtime::arg_editor_name(),\n ]\n }\n\n pub fn arg_verbosity_name() -> &'static str {\n \"verbosity\"\n }\n\n pub fn arg_debugging_name() -> &'static str {\n \"debugging\"\n }\n\n pub fn arg_no_color_output_name() -> &'static str {\n \"no-color-output\"\n }\n\n pub fn arg_config_name() -> &'static str {\n \"config\"\n }\n\n pub fn arg_config_override_name() -> &'static str {\n \"config-override\"\n }\n\n pub fn arg_runtimepath_name() -> &'static str {\n \"runtimepath\"\n }\n\n pub fn arg_storepath_name() -> &'static str {\n \"storepath\"\n }\n\n pub fn arg_editor_name() -> &'static str {\n \"editor\"\n }\n\n \/**\n * Initialize the internal logger\n *\/\n fn init_logger(is_debugging: bool, is_verbose: bool, colored: bool) {\n use std::env::var as env_var;\n use env_logger;\n\n if env_var(\"IMAG_LOG_ENV\").is_ok() {\n env_logger::init().unwrap();\n } else {\n let lvl = if is_debugging {\n LogLevelFilter::Debug\n } else if is_verbose {\n LogLevelFilter::Info\n } else {\n LogLevelFilter::Warn\n };\n\n log::set_logger(|max_log_lvl| {\n max_log_lvl.set(lvl);\n debug!(\"Init logger with {}\", lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()).with_color(colored))\n })\n .map_err(|_| {\n panic!(\"Could not setup logger\");\n })\n .ok();\n }\n }\n\n \/**\n * Get the verbosity flag value\n *\/\n pub fn is_verbose(&self) -> bool {\n self.cli_matches.is_present(\"verbosity\")\n }\n\n \/**\n * Get the debugging flag value\n *\/\n pub fn is_debugging(&self) -> bool {\n self.cli_matches.is_present(\"debugging\")\n }\n\n \/**\n * Get the runtimepath\n *\/\n pub fn rtp(&self) -> &PathBuf {\n &self.rtp\n }\n\n \/**\n * Get the commandline interface matches\n *\/\n pub fn cli(&self) -> &ArgMatches {\n &self.cli_matches\n }\n\n \/**\n * Get the configuration object\n *\/\n pub fn config(&self) -> Option<&Configuration> {\n self.configuration.as_ref()\n }\n\n \/**\n * Get the store object\n *\/\n pub fn store(&self) -> &Store {\n &self.store\n }\n\n pub fn editor(&self) -> Option<Command> {\n self.cli()\n .value_of(\"editor\")\n .map(String::from)\n .or({\n match self.configuration {\n Some(ref c) => c.editor().cloned(),\n _ => None,\n }\n })\n .or(env::var(\"EDITOR\").ok())\n .map(Command::new)\n }\n}\n\nfn get_override_specs(matches: &ArgMatches) -> Vec<String> {\n matches\n .values_of(\"config-override\")\n .map(|values| {\n values\n .filter(|s| {\n let b = s.contains(\"=\");\n if !b { warn!(\"override '{}' does not contain '=' - will be ignored!\", s); }\n b\n })\n .map(String::from)\n .collect()\n })\n .unwrap_or(vec![])\n}\n\n<commit_msg>Remove hook initialization from runtime setup<commit_after>use std::path::PathBuf;\nuse std::process::Command;\nuse std::env;\nuse std::io::stderr;\nuse std::io::Write;\n\npub use clap::App;\n\nuse clap::{Arg, ArgMatches};\nuse log;\nuse log::LogLevelFilter;\n\nuse configuration::Configuration;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\nuse error::MapErrInto;\nuse logger::ImagLogger;\n\nuse libimagstore::store::Store;\n\n#[derive(Debug)]\npub struct Runtime<'a> {\n rtp: PathBuf,\n configuration: Option<Configuration>,\n cli_matches: ArgMatches<'a>,\n store: Store,\n}\n\nimpl<'a> Runtime<'a> {\n\n \/**\n * Gets the CLI spec for the program and retreives the config file path (or uses the default on\n * in $HOME\/.imag\/config, $XDG_CONFIG_DIR\/imag\/config or from env(\"$IMAG_CONFIG\")\n * and builds the Runtime object with it.\n *\n * The cli_spec object should be initially build with the ::get_default_cli_builder() function.\n *\n *\/\n pub fn new(cli_spec: App<'a, 'a>) -> Result<Runtime<'a>, RuntimeError> {\n use std::env;\n\n use libimagstore::hook::position::HookPosition as HP;\n use libimagstore::hook::Hook;\n use libimagstore::error::StoreErrorKind;\n use libimagstorestdhook::debug::DebugHook;\n use libimagstorestdhook::vcs::git::delete::DeleteHook as GitDeleteHook;\n use libimagstorestdhook::vcs::git::update::UpdateHook as GitUpdateHook;\n use libimagerror::trace::trace_error;\n use libimagerror::trace::trace_error_dbg;\n use libimagerror::into::IntoError;\n\n use configuration::error::ConfigErrorKind;\n\n let matches = cli_spec.get_matches();\n\n let is_debugging = matches.is_present(\"debugging\");\n let is_verbose = matches.is_present(\"verbosity\");\n let colored = !matches.is_present(\"no-color-output\");\n\n Runtime::init_logger(is_debugging, is_verbose, colored);\n\n let rtp : PathBuf = matches.value_of(\"runtimepath\")\n .map_or_else(|| {\n env::var(\"HOME\")\n .map(PathBuf::from)\n .map(|mut p| { p.push(\".imag\"); p})\n .unwrap_or_else(|_| {\n panic!(\"You seem to be $HOME-less. Please get a $HOME before using this software. We are sorry for you and hope you have some accommodation anyways.\");\n })\n }, PathBuf::from);\n let storepath = matches.value_of(\"storepath\")\n .map_or_else(|| {\n let mut spath = rtp.clone();\n spath.push(\"store\");\n spath\n }, PathBuf::from);\n\n let configpath = matches.value_of(\"config\")\n .map_or_else(|| rtp.clone(), PathBuf::from);\n\n let cfg = match Configuration::new(&configpath) {\n Err(e) => if e.err_type() != ConfigErrorKind::NoConfigFileFound {\n return Err(RuntimeErrorKind::Instantiate.into_error_with_cause(Box::new(e)));\n } else {\n warn!(\"No config file found.\");\n warn!(\"Continuing without configuration file\");\n None\n },\n\n Ok(mut cfg) => {\n if let Err(e) = cfg.override_config(get_override_specs(&matches)) {\n error!(\"Could not apply config overrides\");\n trace_error(&e);\n\n \/\/ TODO: continue question (interactive)\n }\n\n Some(cfg)\n }\n };\n\n let store_config = match cfg {\n Some(ref c) => c.store_config().cloned(),\n None => None,\n };\n\n if is_debugging {\n write!(stderr(), \"Config: {:?}\\n\", cfg).ok();\n write!(stderr(), \"Store-config: {:?}\\n\", store_config).ok();\n }\n\n Store::new(storepath.clone(), store_config).map(|mut store| {\n \/\/ If we are debugging, generate hooks for all positions\n if is_debugging {\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(DebugHook::new(HP::PreCreate)) , \"debug\", HP::PreCreate),\n (Box::new(DebugHook::new(HP::PostCreate)) , \"debug\", HP::PostCreate),\n (Box::new(DebugHook::new(HP::PreRetrieve)) , \"debug\", HP::PreRetrieve),\n (Box::new(DebugHook::new(HP::PostRetrieve)) , \"debug\", HP::PostRetrieve),\n (Box::new(DebugHook::new(HP::PreUpdate)) , \"debug\", HP::PreUpdate),\n (Box::new(DebugHook::new(HP::PostUpdate)) , \"debug\", HP::PostUpdate),\n (Box::new(DebugHook::new(HP::PreDelete)) , \"debug\", HP::PreDelete),\n (Box::new(DebugHook::new(HP::PostDelete)) , \"debug\", HP::PostDelete),\n ];\n\n \/\/ If hook registration fails, trace the error and warn, but continue.\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n }\n\n let sp = storepath;\n\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(GitDeleteHook::new(sp.clone(), HP::PreDelete)) , \"vcs\", HP::PreDelete),\n (Box::new(GitUpdateHook::new(sp, HP::PostUpdate)) , \"vcs\", HP::PostUpdate),\n ];\n\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n\n Runtime {\n cli_matches: matches,\n configuration: cfg,\n rtp: rtp,\n store: store,\n }\n })\n .map_err_into(RuntimeErrorKind::Instantiate)\n }\n\n \/**\n * Get a commandline-interface builder object from `clap`\n *\n * This commandline interface builder object already contains some predefined interface flags:\n * * -v | --verbose for verbosity\n * * --debug for debugging\n * * -c <file> | --config <file> for alternative configuration file\n * * -r <path> | --rtp <path> for alternative runtimepath\n * * --store <path> for alternative store path\n * Each has the appropriate help text included.\n *\n * The `appname` shall be \"imag-<command>\".\n *\/\n pub fn get_default_cli_builder(appname: &'a str,\n version: &'a str,\n about: &'a str)\n -> App<'a, 'a>\n {\n App::new(appname)\n .version(version)\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .about(about)\n .arg(Arg::with_name(Runtime::arg_verbosity_name())\n .short(\"v\")\n .long(\"verbose\")\n .help(\"Enables verbosity\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_debugging_name())\n .long(\"debug\")\n .help(\"Enables debugging output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_no_color_output_name())\n .long(\"no-color\")\n .help(\"Disable color output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_config_name())\n .long(\"config\")\n .help(\"Path to alternative config file\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_config_override_name())\n .long(\"override-config\")\n .help(\"Override a configuration settings. Use 'key=value' pairs, where the key is a path in the TOML configuration. The value must be present in the configuration and be convertible to the type of the configuration setting. If the argument does not contain a '=', it gets ignored. Setting Arrays and Tables is not yet supported.\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_runtimepath_name())\n .long(\"rtp\")\n .help(\"Alternative runtimepath\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_storepath_name())\n .long(\"store\")\n .help(\"Alternative storepath. Must be specified as full path, can be outside of the RTP\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_editor_name())\n .long(\"editor\")\n .help(\"Set editor\")\n .required(false)\n .takes_value(true))\n }\n\n pub fn arg_names() -> Vec<&'static str> {\n vec![\n Runtime::arg_verbosity_name(),\n Runtime::arg_debugging_name(),\n Runtime::arg_no_color_output_name(),\n Runtime::arg_config_name(),\n Runtime::arg_config_override_name(),\n Runtime::arg_runtimepath_name(),\n Runtime::arg_storepath_name(),\n Runtime::arg_editor_name(),\n ]\n }\n\n pub fn arg_verbosity_name() -> &'static str {\n \"verbosity\"\n }\n\n pub fn arg_debugging_name() -> &'static str {\n \"debugging\"\n }\n\n pub fn arg_no_color_output_name() -> &'static str {\n \"no-color-output\"\n }\n\n pub fn arg_config_name() -> &'static str {\n \"config\"\n }\n\n pub fn arg_config_override_name() -> &'static str {\n \"config-override\"\n }\n\n pub fn arg_runtimepath_name() -> &'static str {\n \"runtimepath\"\n }\n\n pub fn arg_storepath_name() -> &'static str {\n \"storepath\"\n }\n\n pub fn arg_editor_name() -> &'static str {\n \"editor\"\n }\n\n \/**\n * Initialize the internal logger\n *\/\n fn init_logger(is_debugging: bool, is_verbose: bool, colored: bool) {\n use std::env::var as env_var;\n use env_logger;\n\n if env_var(\"IMAG_LOG_ENV\").is_ok() {\n env_logger::init().unwrap();\n } else {\n let lvl = if is_debugging {\n LogLevelFilter::Debug\n } else if is_verbose {\n LogLevelFilter::Info\n } else {\n LogLevelFilter::Warn\n };\n\n log::set_logger(|max_log_lvl| {\n max_log_lvl.set(lvl);\n debug!(\"Init logger with {}\", lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()).with_color(colored))\n })\n .map_err(|_| {\n panic!(\"Could not setup logger\");\n })\n .ok();\n }\n }\n\n \/**\n * Get the verbosity flag value\n *\/\n pub fn is_verbose(&self) -> bool {\n self.cli_matches.is_present(\"verbosity\")\n }\n\n \/**\n * Get the debugging flag value\n *\/\n pub fn is_debugging(&self) -> bool {\n self.cli_matches.is_present(\"debugging\")\n }\n\n \/**\n * Get the runtimepath\n *\/\n pub fn rtp(&self) -> &PathBuf {\n &self.rtp\n }\n\n \/**\n * Get the commandline interface matches\n *\/\n pub fn cli(&self) -> &ArgMatches {\n &self.cli_matches\n }\n\n \/**\n * Get the configuration object\n *\/\n pub fn config(&self) -> Option<&Configuration> {\n self.configuration.as_ref()\n }\n\n \/**\n * Get the store object\n *\/\n pub fn store(&self) -> &Store {\n &self.store\n }\n\n pub fn editor(&self) -> Option<Command> {\n self.cli()\n .value_of(\"editor\")\n .map(String::from)\n .or({\n match self.configuration {\n Some(ref c) => c.editor().cloned(),\n _ => None,\n }\n })\n .or(env::var(\"EDITOR\").ok())\n .map(Command::new)\n }\n}\n\nfn get_override_specs(matches: &ArgMatches) -> Vec<String> {\n matches\n .values_of(\"config-override\")\n .map(|values| {\n values\n .filter(|s| {\n let b = s.contains(\"=\");\n if !b { warn!(\"override '{}' does not contain '=' - will be ignored!\", s); }\n b\n })\n .map(String::from)\n .collect()\n })\n .unwrap_or(vec![])\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix git http.proxy config setting.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(feature = \"futures_api\",\n reason = \"futures in libcore are unstable\",\n issue = \"50547\")]\n\nuse fmt;\nuse marker::Unpin;\nuse ptr::NonNull;\n\n\/\/\/ A `Waker` is a handle for waking up a task by notifying its executor that it\n\/\/\/ is ready to be run.\n\/\/\/\n\/\/\/ This handle contains a trait object pointing to an instance of the `UnsafeWake`\n\/\/\/ trait, allowing notifications to get routed through it.\n#[repr(transparent)]\npub struct Waker {\n inner: NonNull<UnsafeWake>,\n}\n\nimpl Unpin for Waker {}\nunsafe impl Send for Waker {}\nunsafe impl Sync for Waker {}\n\nimpl Waker {\n \/\/\/ Constructs a new `Waker` directly.\n \/\/\/\n \/\/\/ Note that most code will not need to call this. Implementers of the\n \/\/\/ `UnsafeWake` trait will typically provide a wrapper that calls this\n \/\/\/ but you otherwise shouldn't call it directly.\n \/\/\/\n \/\/\/ If you're working with the standard library then it's recommended to\n \/\/\/ use the `Waker::from` function instead which works with the safe\n \/\/\/ `Arc` type and the safe `Wake` trait.\n #[inline]\n pub unsafe fn new(inner: NonNull<UnsafeWake>) -> Self {\n Waker { inner: inner }\n }\n\n \/\/\/ Wake up the task associated with this `Waker`.\n #[inline]\n pub fn wake(&self) {\n unsafe { self.inner.as_ref().wake() }\n }\n\n \/\/\/ Returns whether or not this `Waker` and `other` awaken the same task.\n \/\/\/\n \/\/\/ This function works on a best-effort basis, and may return false even\n \/\/\/ when the `Waker`s would awaken the same task. However, if this function\n \/\/\/ returns true, it is guaranteed that the `Waker`s will awaken the same\n \/\/\/ task.\n \/\/\/\n \/\/\/ This function is primarily used for optimization purposes.\n #[inline]\n pub fn will_wake(&self, other: &Waker) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Clone for Waker {\n #[inline]\n fn clone(&self) -> Self {\n unsafe {\n self.inner.as_ref().clone_raw()\n }\n }\n}\n\nimpl fmt::Debug for Waker {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Waker\")\n .finish()\n }\n}\n\nimpl Drop for Waker {\n #[inline]\n fn drop(&mut self) {\n unsafe {\n self.inner.as_ref().drop_raw()\n }\n }\n}\n\n\/\/\/ A `LocalWaker` is a handle for waking up a task by notifying its executor that it\n\/\/\/ is ready to be run.\n\/\/\/\n\/\/\/ This is similar to the `Waker` type, but cannot be sent across threads.\n\/\/\/ Task executors can use this type to implement more optimized singlethreaded wakeup\n\/\/\/ behavior.\n#[repr(transparent)]\npub struct LocalWaker {\n inner: NonNull<UnsafeWake>,\n}\n\nimpl Unpin for LocalWaker {}\nimpl !Send for LocalWaker {}\nimpl !Sync for LocalWaker {}\n\nimpl LocalWaker {\n \/\/\/ Constructs a new `LocalWaker` directly.\n \/\/\/\n \/\/\/ Note that most code will not need to call this. Implementers of the\n \/\/\/ `UnsafeWake` trait will typically provide a wrapper that calls this\n \/\/\/ but you otherwise shouldn't call it directly.\n \/\/\/\n \/\/\/ If you're working with the standard library then it's recommended to\n \/\/\/ use the `LocalWaker::from` function instead which works with the safe\n \/\/\/ `Rc` type and the safe `LocalWake` trait.\n \/\/\/\n \/\/\/ For this function to be used safely, it must be sound to call `inner.wake_local()`\n \/\/\/ on the current thread.\n #[inline]\n pub unsafe fn new(inner: NonNull<UnsafeWake>) -> Self {\n LocalWaker { inner: inner }\n }\n\n \/\/\/ Wake up the task associated with this `LocalWaker`.\n #[inline]\n pub fn wake(&self) {\n unsafe { self.inner.as_ref().wake_local() }\n }\n\n \/\/\/ Returns whether or not this `LocalWaker` and `other` `LocalWaker` awaken the same task.\n \/\/\/\n \/\/\/ This function works on a best-effort basis, and may return false even\n \/\/\/ when the `LocalWaker`s would awaken the same task. However, if this function\n \/\/\/ returns true, it is guaranteed that the `LocalWaker`s will awaken the same\n \/\/\/ task.\n \/\/\/\n \/\/\/ This function is primarily used for optimization purposes.\n #[inline]\n pub fn will_wake(&self, other: &LocalWaker) -> bool {\n self.inner == other.inner\n }\n\n \/\/\/ Returns whether or not this `LocalWaker` and `other` `Waker` awaken the same task.\n \/\/\/\n \/\/\/ This function works on a best-effort basis, and may return false even\n \/\/\/ when the `Waker`s would awaken the same task. However, if this function\n \/\/\/ returns true, it is guaranteed that the `LocalWaker`s will awaken the same\n \/\/\/ task.\n \/\/\/\n \/\/\/ This function is primarily used for optimization purposes.\n #[inline]\n pub fn will_wake_nonlocal(&self, other: &Waker) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl From<LocalWaker> for Waker {\n #[inline]\n fn from(local_waker: LocalWaker) -> Self {\n Waker { inner: local_waker.inner }\n }\n}\n\nimpl Clone for LocalWaker {\n #[inline]\n fn clone(&self) -> Self {\n unsafe {\n LocalWaker { inner: self.inner.as_ref().clone_raw().inner }\n }\n }\n}\n\nimpl fmt::Debug for LocalWaker {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Waker\")\n .finish()\n }\n}\n\nimpl Drop for LocalWaker {\n #[inline]\n fn drop(&mut self) {\n unsafe {\n self.inner.as_ref().drop_raw()\n }\n }\n}\n\n\/\/\/ An unsafe trait for implementing custom memory management for a `Waker` or `LocalWaker`.\n\/\/\/\n\/\/\/ A `Waker` conceptually is a cloneable trait object for `Wake`, and is\n\/\/\/ most often essentially just `Arc<dyn Wake>`. However, in some contexts\n\/\/\/ (particularly `no_std`), it's desirable to avoid `Arc` in favor of some\n\/\/\/ custom memory management strategy. This trait is designed to allow for such\n\/\/\/ customization.\n\/\/\/\n\/\/\/ When using `std`, a default implementation of the `UnsafeWake` trait is provided for\n\/\/\/ `Arc<T>` where `T: Wake` and `Rc<T>` where `T: LocalWake`.\n\/\/\/\n\/\/\/ Although the methods on `UnsafeWake` take pointers rather than references,\npub unsafe trait UnsafeWake: Send + Sync {\n \/\/\/ Creates a clone of this `UnsafeWake` and stores it behind a `Waker`.\n \/\/\/\n \/\/\/ This function will create a new uniquely owned handle that under the\n \/\/\/ hood references the same notification instance. In other words calls\n \/\/\/ to `wake` on the returned handle should be equivalent to calls to\n \/\/\/ `wake` on this handle.\n \/\/\/\n \/\/\/ # Unsafety\n \/\/\/\n \/\/\/ This function is unsafe to call because it's asserting the `UnsafeWake`\n \/\/\/ value is in a consistent state, i.e. hasn't been dropped.\n unsafe fn clone_raw(&self) -> Waker;\n\n \/\/\/ Drops this instance of `UnsafeWake`, deallocating resources\n \/\/\/ associated with it.\n \/\/\/\n \/\/\/ FIXME(cramertj)\n \/\/\/ This method is intended to have a signature such as:\n \/\/\/\n \/\/\/ ```ignore (not-a-doctest)\n \/\/\/ fn drop_raw(self: *mut Self);\n \/\/\/ ```\n \/\/\/\n \/\/\/ Unfortunately in Rust today that signature is not object safe.\n \/\/\/ Nevertheless it's recommended to implement this function *as if* that\n \/\/\/ were its signature. As such it is not safe to call on an invalid\n \/\/\/ pointer, nor is the validity of the pointer guaranteed after this\n \/\/\/ function returns.\n \/\/\/\n \/\/\/ # Unsafety\n \/\/\/\n \/\/\/ This function is unsafe to call because it's asserting the `UnsafeWake`\n \/\/\/ value is in a consistent state, i.e. hasn't been dropped.\n unsafe fn drop_raw(&self);\n\n \/\/\/ Indicates that the associated task is ready to make progress and should\n \/\/\/ be `poll`ed.\n \/\/\/\n \/\/\/ Executors generally maintain a queue of \"ready\" tasks; `wake` should place\n \/\/\/ the associated task onto this queue.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Implementations should avoid panicking, but clients should also be prepared\n \/\/\/ for panics.\n \/\/\/\n \/\/\/ # Unsafety\n \/\/\/\n \/\/\/ This function is unsafe to call because it's asserting the `UnsafeWake`\n \/\/\/ value is in a consistent state, i.e. hasn't been dropped.\n unsafe fn wake(&self);\n\n \/\/\/ Indicates that the associated task is ready to make progress and should\n \/\/\/ be `poll`ed. This function is the same as `wake`, but can only be called\n \/\/\/ from the thread that this `UnsafeWake` is \"local\" to. This allows for\n \/\/\/ implementors to provide specialized wakeup behavior specific to the current\n \/\/\/ thread. This function is called by `LocalWaker::wake`.\n \/\/\/\n \/\/\/ Executors generally maintain a queue of \"ready\" tasks; `wake_local` should place\n \/\/\/ the associated task onto this queue.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Implementations should avoid panicking, but clients should also be prepared\n \/\/\/ for panics.\n \/\/\/\n \/\/\/ # Unsafety\n \/\/\/\n \/\/\/ This function is unsafe to call because it's asserting the `UnsafeWake`\n \/\/\/ value is in a consistent state, i.e. hasn't been dropped, and that the\n \/\/\/ `UnsafeWake` hasn't moved from the thread on which it was created.\n unsafe fn wake_local(&self) {\n self.wake()\n }\n}\n<commit_msg>task: remove wrong comments about non-existent LocalWake trait<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(feature = \"futures_api\",\n reason = \"futures in libcore are unstable\",\n issue = \"50547\")]\n\nuse fmt;\nuse marker::Unpin;\nuse ptr::NonNull;\n\n\/\/\/ A `Waker` is a handle for waking up a task by notifying its executor that it\n\/\/\/ is ready to be run.\n\/\/\/\n\/\/\/ This handle contains a trait object pointing to an instance of the `UnsafeWake`\n\/\/\/ trait, allowing notifications to get routed through it.\n#[repr(transparent)]\npub struct Waker {\n inner: NonNull<UnsafeWake>,\n}\n\nimpl Unpin for Waker {}\nunsafe impl Send for Waker {}\nunsafe impl Sync for Waker {}\n\nimpl Waker {\n \/\/\/ Constructs a new `Waker` directly.\n \/\/\/\n \/\/\/ Note that most code will not need to call this. Implementers of the\n \/\/\/ `UnsafeWake` trait will typically provide a wrapper that calls this\n \/\/\/ but you otherwise shouldn't call it directly.\n \/\/\/\n \/\/\/ If you're working with the standard library then it's recommended to\n \/\/\/ use the `Waker::from` function instead which works with the safe\n \/\/\/ `Arc` type and the safe `Wake` trait.\n #[inline]\n pub unsafe fn new(inner: NonNull<UnsafeWake>) -> Self {\n Waker { inner: inner }\n }\n\n \/\/\/ Wake up the task associated with this `Waker`.\n #[inline]\n pub fn wake(&self) {\n unsafe { self.inner.as_ref().wake() }\n }\n\n \/\/\/ Returns whether or not this `Waker` and `other` awaken the same task.\n \/\/\/\n \/\/\/ This function works on a best-effort basis, and may return false even\n \/\/\/ when the `Waker`s would awaken the same task. However, if this function\n \/\/\/ returns true, it is guaranteed that the `Waker`s will awaken the same\n \/\/\/ task.\n \/\/\/\n \/\/\/ This function is primarily used for optimization purposes.\n #[inline]\n pub fn will_wake(&self, other: &Waker) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Clone for Waker {\n #[inline]\n fn clone(&self) -> Self {\n unsafe {\n self.inner.as_ref().clone_raw()\n }\n }\n}\n\nimpl fmt::Debug for Waker {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Waker\")\n .finish()\n }\n}\n\nimpl Drop for Waker {\n #[inline]\n fn drop(&mut self) {\n unsafe {\n self.inner.as_ref().drop_raw()\n }\n }\n}\n\n\/\/\/ A `LocalWaker` is a handle for waking up a task by notifying its executor that it\n\/\/\/ is ready to be run.\n\/\/\/\n\/\/\/ This is similar to the `Waker` type, but cannot be sent across threads.\n\/\/\/ Task executors can use this type to implement more optimized singlethreaded wakeup\n\/\/\/ behavior.\n#[repr(transparent)]\npub struct LocalWaker {\n inner: NonNull<UnsafeWake>,\n}\n\nimpl Unpin for LocalWaker {}\nimpl !Send for LocalWaker {}\nimpl !Sync for LocalWaker {}\n\nimpl LocalWaker {\n \/\/\/ Constructs a new `LocalWaker` directly.\n \/\/\/\n \/\/\/ Note that most code will not need to call this. Implementers of the\n \/\/\/ `UnsafeWake` trait will typically provide a wrapper that calls this\n \/\/\/ but you otherwise shouldn't call it directly.\n \/\/\/\n \/\/\/ If you're working with the standard library then it's recommended to\n \/\/\/ use the `local_waker_from_nonlocal` or `local_waker` to convert a `Waker`\n \/\/\/ into a `LocalWaker`.\n \/\/\/\n \/\/\/ For this function to be used safely, it must be sound to call `inner.wake_local()`\n \/\/\/ on the current thread.\n #[inline]\n pub unsafe fn new(inner: NonNull<UnsafeWake>) -> Self {\n LocalWaker { inner: inner }\n }\n\n \/\/\/ Wake up the task associated with this `LocalWaker`.\n #[inline]\n pub fn wake(&self) {\n unsafe { self.inner.as_ref().wake_local() }\n }\n\n \/\/\/ Returns whether or not this `LocalWaker` and `other` `LocalWaker` awaken the same task.\n \/\/\/\n \/\/\/ This function works on a best-effort basis, and may return false even\n \/\/\/ when the `LocalWaker`s would awaken the same task. However, if this function\n \/\/\/ returns true, it is guaranteed that the `LocalWaker`s will awaken the same\n \/\/\/ task.\n \/\/\/\n \/\/\/ This function is primarily used for optimization purposes.\n #[inline]\n pub fn will_wake(&self, other: &LocalWaker) -> bool {\n self.inner == other.inner\n }\n\n \/\/\/ Returns whether or not this `LocalWaker` and `other` `Waker` awaken the same task.\n \/\/\/\n \/\/\/ This function works on a best-effort basis, and may return false even\n \/\/\/ when the `Waker`s would awaken the same task. However, if this function\n \/\/\/ returns true, it is guaranteed that the `LocalWaker`s will awaken the same\n \/\/\/ task.\n \/\/\/\n \/\/\/ This function is primarily used for optimization purposes.\n #[inline]\n pub fn will_wake_nonlocal(&self, other: &Waker) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl From<LocalWaker> for Waker {\n #[inline]\n fn from(local_waker: LocalWaker) -> Self {\n Waker { inner: local_waker.inner }\n }\n}\n\nimpl Clone for LocalWaker {\n #[inline]\n fn clone(&self) -> Self {\n unsafe {\n LocalWaker { inner: self.inner.as_ref().clone_raw().inner }\n }\n }\n}\n\nimpl fmt::Debug for LocalWaker {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Waker\")\n .finish()\n }\n}\n\nimpl Drop for LocalWaker {\n #[inline]\n fn drop(&mut self) {\n unsafe {\n self.inner.as_ref().drop_raw()\n }\n }\n}\n\n\/\/\/ An unsafe trait for implementing custom memory management for a `Waker` or `LocalWaker`.\n\/\/\/\n\/\/\/ A `Waker` conceptually is a cloneable trait object for `Wake`, and is\n\/\/\/ most often essentially just `Arc<dyn Wake>`. However, in some contexts\n\/\/\/ (particularly `no_std`), it's desirable to avoid `Arc` in favor of some\n\/\/\/ custom memory management strategy. This trait is designed to allow for such\n\/\/\/ customization.\n\/\/\/\n\/\/\/ When using `std`, a default implementation of the `UnsafeWake` trait is provided for\n\/\/\/ `Arc<T>` where `T: Wake`.\npub unsafe trait UnsafeWake: Send + Sync {\n \/\/\/ Creates a clone of this `UnsafeWake` and stores it behind a `Waker`.\n \/\/\/\n \/\/\/ This function will create a new uniquely owned handle that under the\n \/\/\/ hood references the same notification instance. In other words calls\n \/\/\/ to `wake` on the returned handle should be equivalent to calls to\n \/\/\/ `wake` on this handle.\n \/\/\/\n \/\/\/ # Unsafety\n \/\/\/\n \/\/\/ This function is unsafe to call because it's asserting the `UnsafeWake`\n \/\/\/ value is in a consistent state, i.e. hasn't been dropped.\n unsafe fn clone_raw(&self) -> Waker;\n\n \/\/\/ Drops this instance of `UnsafeWake`, deallocating resources\n \/\/\/ associated with it.\n \/\/\/\n \/\/\/ FIXME(cramertj)\n \/\/\/ This method is intended to have a signature such as:\n \/\/\/\n \/\/\/ ```ignore (not-a-doctest)\n \/\/\/ fn drop_raw(self: *mut Self);\n \/\/\/ ```\n \/\/\/\n \/\/\/ Unfortunately in Rust today that signature is not object safe.\n \/\/\/ Nevertheless it's recommended to implement this function *as if* that\n \/\/\/ were its signature. As such it is not safe to call on an invalid\n \/\/\/ pointer, nor is the validity of the pointer guaranteed after this\n \/\/\/ function returns.\n \/\/\/\n \/\/\/ # Unsafety\n \/\/\/\n \/\/\/ This function is unsafe to call because it's asserting the `UnsafeWake`\n \/\/\/ value is in a consistent state, i.e. hasn't been dropped.\n unsafe fn drop_raw(&self);\n\n \/\/\/ Indicates that the associated task is ready to make progress and should\n \/\/\/ be `poll`ed.\n \/\/\/\n \/\/\/ Executors generally maintain a queue of \"ready\" tasks; `wake` should place\n \/\/\/ the associated task onto this queue.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Implementations should avoid panicking, but clients should also be prepared\n \/\/\/ for panics.\n \/\/\/\n \/\/\/ # Unsafety\n \/\/\/\n \/\/\/ This function is unsafe to call because it's asserting the `UnsafeWake`\n \/\/\/ value is in a consistent state, i.e. hasn't been dropped.\n unsafe fn wake(&self);\n\n \/\/\/ Indicates that the associated task is ready to make progress and should\n \/\/\/ be `poll`ed. This function is the same as `wake`, but can only be called\n \/\/\/ from the thread that this `UnsafeWake` is \"local\" to. This allows for\n \/\/\/ implementors to provide specialized wakeup behavior specific to the current\n \/\/\/ thread. This function is called by `LocalWaker::wake`.\n \/\/\/\n \/\/\/ Executors generally maintain a queue of \"ready\" tasks; `wake_local` should place\n \/\/\/ the associated task onto this queue.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Implementations should avoid panicking, but clients should also be prepared\n \/\/\/ for panics.\n \/\/\/\n \/\/\/ # Unsafety\n \/\/\/\n \/\/\/ This function is unsafe to call because it's asserting the `UnsafeWake`\n \/\/\/ value is in a consistent state, i.e. hasn't been dropped, and that the\n \/\/\/ `UnsafeWake` hasn't moved from the thread on which it was created.\n unsafe fn wake_local(&self) {\n self.wake()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[Auto] bin\/core\/imag: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Same example for the complex parser<commit_after>extern crate nom;\nextern crate chrono;\nextern crate iso8601;\n\nuse iso8601::datetime;\nuse std::env;\nuse nom::IResult::*;\nuse chrono::LocalResult;\n\nfn main() {\n let mut args = env::args();\n let _program = args.next().unwrap();\n\n for arg in args {\n let t = datetime(arg.as_bytes());\n match t {\n Done(_, dt) => {\n match dt.to_chrono() {\n LocalResult::Single(s) => println!(\"Date: {:?}\", s),\n LocalResult::Ambiguous(a,b) => println!(\"Date ambiguous: {:?} - {:?}\", a, b),\n LocalResult::None => println!(\"Invalid datetime string: {:?}\", arg),\n }\n }\n _ => { println!(\"Can't parse {:?}\", arg); }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix #719: Completions for reference fields<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix grammar of expression and pattern.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix typo in 'type' parameter for command 'hab origin key export'<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Use retrieve in retrieve variant<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>it works, and stack should be big enough<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor 'proverb'.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Try alternative approaches to some functions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor imag-view to new store iterator interface<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #874 - RalfJung:zst, r=RalfJung<commit_after>fn main() {\n let p = {\n let b = Box::new(42);\n &*b as *const i32 as *const ()\n };\n let _x = unsafe { *p }; \/\/~ ERROR dangling pointer was dereferenced\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Traversals over the DOM and flow trees, running the layout computations.\n\nuse css::node_style::StyledNode;\nuse css::matching::{ApplicableDeclarations, MatchMethods, StyleSharingResult};\nuse construct::FlowConstructor;\nuse context::LayoutContext;\nuse flow::{Flow, MutableFlowUtils};\nuse flow::{PreorderFlowTraversal, PostorderFlowTraversal};\nuse flow;\nuse incremental::{RestyleDamage, BUBBLE_ISIZES, REFLOW, REFLOW_OUT_OF_FLOW};\nuse wrapper::{layout_node_to_unsafe_layout_node, LayoutNode};\nuse wrapper::{PostorderNodeMutTraversal, ThreadSafeLayoutNode, UnsafeLayoutNode};\nuse wrapper::{PreorderDomTraversal, PostorderDomTraversal};\n\nuse servo_util::bloom::BloomFilter;\nuse servo_util::opts;\nuse servo_util::tid::tid;\nuse style::node::TNode;\n\nuse std::cell::RefCell;\nuse std::mem;\n\n\/\/\/ Every time we do another layout, the old bloom filters are invalid. This is\n\/\/\/ detected by ticking a generation number every layout.\ntype Generation = uint;\n\n\/\/\/ A pair of the bloom filter used for css selector matching, and the node to\n\/\/\/ which it applies. This is used to efficiently do `Descendant` selector\n\/\/\/ matches. Thanks to the bloom filter, we can avoid walking up the tree\n\/\/\/ looking for ancestors that aren't there in the majority of cases.\n\/\/\/\n\/\/\/ As we walk down the DOM tree a task-local bloom filter is built of all the\n\/\/\/ CSS `SimpleSelector`s which are part of a `Descendant` compound selector\n\/\/\/ (i.e. paired with a `Descendant` combinator, in the `next` field of a\n\/\/\/ `CompoundSelector`.\n\/\/\/\n\/\/\/ Before a `Descendant` selector match is tried, it's compared against the\n\/\/\/ bloom filter. If the bloom filter can exclude it, the selector is quickly\n\/\/\/ rejected.\n\/\/\/\n\/\/\/ When done styling a node, all selectors previously inserted into the filter\n\/\/\/ are removed.\n\/\/\/\n\/\/\/ Since a work-stealing queue is used for styling, sometimes, the bloom filter\n\/\/\/ will no longer be the for the parent of the node we're currently on. When\n\/\/\/ this happens, the task local bloom filter will be thrown away and rebuilt.\nthread_local!(static STYLE_BLOOM: RefCell<Option<(Box<BloomFilter>, UnsafeLayoutNode, Generation)>> = RefCell::new(None));\n\n\/\/\/ Returns the task local bloom filter.\n\/\/\/\n\/\/\/ If one does not exist, a new one will be made for you. If it is out of date,\n\/\/\/ it will be thrown out and a new one will be made for you.\nfn take_task_local_bloom_filter(parent_node: Option<LayoutNode>, layout_context: &LayoutContext)\n -> Box<BloomFilter> {\n STYLE_BLOOM.with(|style_bloom| {\n match (parent_node, style_bloom.borrow_mut().take()) {\n \/\/ Root node. Needs new bloom filter.\n (None, _ ) => {\n debug!(\"[{}] No parent, but new bloom filter!\", tid());\n box BloomFilter::new()\n }\n \/\/ No bloom filter for this thread yet.\n (Some(parent), None) => {\n let mut bloom_filter = box BloomFilter::new();\n insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, layout_context);\n bloom_filter\n }\n \/\/ Found cached bloom filter.\n (Some(parent), Some((mut bloom_filter, old_node, old_generation))) => {\n \/\/ Hey, the cached parent is our parent! We can reuse the bloom filter.\n if old_node == layout_node_to_unsafe_layout_node(&parent) &&\n old_generation == layout_context.shared.generation {\n debug!(\"[{}] Parent matches (={}). Reusing bloom filter.\", tid(), old_node.0);\n bloom_filter.clone()\n } else {\n \/\/ Oh no. the cached parent is stale. I guess we need a new one. Reuse the existing\n \/\/ allocation to avoid malloc churn.\n *bloom_filter = BloomFilter::new();\n insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, layout_context);\n bloom_filter\n }\n },\n }\n })\n}\n\nfn put_task_local_bloom_filter(bf: Box<BloomFilter>,\n unsafe_node: &UnsafeLayoutNode,\n layout_context: &LayoutContext) {\n let bf: *mut BloomFilter = unsafe { mem::transmute(bf) };\n STYLE_BLOOM.with(|style_bloom| {\n assert!(style_bloom.borrow().is_none(),\n \"Putting into a never-taken task-local bloom filter\");\n let bf: Box<BloomFilter> = unsafe { mem::transmute(bf) };\n *style_bloom.borrow_mut() = Some((bf, *unsafe_node, layout_context.shared.generation));\n })\n}\n\n\/\/\/ \"Ancestors\" in this context is inclusive of ourselves.\nfn insert_ancestors_into_bloom_filter(bf: &mut Box<BloomFilter>,\n mut n: LayoutNode,\n layout_context: &LayoutContext) {\n debug!(\"[{}] Inserting ancestors.\", tid());\n let mut ancestors = 0u;\n loop {\n ancestors += 1;\n\n n.insert_into_bloom_filter(&mut **bf);\n n = match n.layout_parent_node(layout_context.shared) {\n None => break,\n Some(p) => p,\n };\n }\n debug!(\"[{}] Inserted {} ancestors.\", tid(), ancestors);\n}\n\n\/\/\/ The recalc-style-for-node traversal, which styles each node and must run before\n\/\/\/ layout computation. This computes the styles applied to each node.\n#[derive(Copy)]\npub struct RecalcStyleForNode<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PreorderDomTraversal for RecalcStyleForNode<'a> {\n #[inline]\n fn process(&self, node: LayoutNode) {\n \/\/ Initialize layout data.\n \/\/\n \/\/ FIXME(pcwalton): Stop allocating here. Ideally this should just be done by the HTML\n \/\/ parser.\n node.initialize_layout_data(self.layout_context.shared.layout_chan.clone());\n\n \/\/ Get the parent node.\n let parent_opt = node.layout_parent_node(self.layout_context.shared);\n\n \/\/ Get the style bloom filter.\n let bf = take_task_local_bloom_filter(parent_opt, self.layout_context);\n\n \/\/ Just needs to be wrapped in an option for `match_node`.\n let some_bf = Some(bf);\n\n let nonincremental_layout = opts::get().nonincremental_layout;\n if nonincremental_layout || node.is_dirty() {\n \/\/ Remove existing CSS styles from nodes whose content has changed (e.g. text changed),\n \/\/ to force non-incremental reflow.\n if node.has_changed() {\n let node = ThreadSafeLayoutNode::new(&node);\n node.unstyle();\n }\n\n \/\/ Check to see whether we can share a style with someone.\n let style_sharing_candidate_cache =\n self.layout_context.style_sharing_candidate_cache();\n let sharing_result = unsafe {\n node.share_style_if_possible(style_sharing_candidate_cache,\n parent_opt.clone())\n };\n \/\/ Otherwise, match and cascade selectors.\n match sharing_result {\n StyleSharingResult::CannotShare(mut shareable) => {\n let mut applicable_declarations = ApplicableDeclarations::new();\n\n if node.is_element() {\n \/\/ Perform the CSS selector matching.\n let stylist = unsafe { &*self.layout_context.shared.stylist };\n node.match_node(stylist,\n &some_bf,\n &mut applicable_declarations,\n &mut shareable);\n } else {\n ThreadSafeLayoutNode::new(&node).set_restyle_damage(RestyleDamage::all())\n }\n\n \/\/ Perform the CSS cascade.\n unsafe {\n node.cascade_node(parent_opt,\n &applicable_declarations,\n self.layout_context.applicable_declarations_cache());\n }\n\n \/\/ Add ourselves to the LRU cache.\n if shareable {\n style_sharing_candidate_cache.insert_if_possible(&node);\n }\n }\n StyleSharingResult::StyleWasShared(index, damage) => {\n style_sharing_candidate_cache.touch(index);\n ThreadSafeLayoutNode::new(&node).set_restyle_damage(damage);\n }\n }\n }\n\n let mut bf = some_bf.unwrap();\n\n let unsafe_layout_node = layout_node_to_unsafe_layout_node(&node);\n\n \/\/ Before running the children, we need to insert our nodes into the bloom\n \/\/ filter.\n debug!(\"[{}] + {:X}\", tid(), unsafe_layout_node.0);\n node.insert_into_bloom_filter(&mut *bf);\n\n \/\/ NB: flow construction updates the bloom filter on the way up.\n put_task_local_bloom_filter(bf, &unsafe_layout_node, self.layout_context);\n }\n}\n\n\/\/\/ The flow construction traversal, which builds flows for styled nodes.\n#[derive(Copy)]\npub struct ConstructFlows<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PostorderDomTraversal for ConstructFlows<'a> {\n #[inline]\n fn process(&self, node: LayoutNode) {\n \/\/ Construct flows for this node.\n {\n let tnode = ThreadSafeLayoutNode::new(&node);\n\n \/\/ Always reconstruct if incremental layout is turned off.\n let nonincremental_layout = opts::get().nonincremental_layout;\n if nonincremental_layout || node.has_dirty_descendants() {\n let mut flow_constructor = FlowConstructor::new(self.layout_context);\n if nonincremental_layout || !flow_constructor.repair_if_possible(&tnode) {\n flow_constructor.process(&tnode);\n debug!(\"Constructed flow for {:x}: {:x}\",\n tnode.debug_id(),\n tnode.flow_debug_id());\n }\n }\n\n \/\/ Reset the layout damage in this node. It's been propagated to the\n \/\/ flow by the flow constructor.\n tnode.set_restyle_damage(RestyleDamage::empty());\n }\n\n unsafe {\n node.set_changed(false);\n node.set_dirty(false);\n node.set_dirty_siblings(false);\n node.set_dirty_descendants(false);\n }\n\n let unsafe_layout_node = layout_node_to_unsafe_layout_node(&node);\n\n let (mut bf, old_node, old_generation) =\n STYLE_BLOOM.with(|style_bloom| {\n mem::replace(&mut *style_bloom.borrow_mut(), None)\n .expect(\"The bloom filter should have been set by style recalc.\")\n });\n\n assert_eq!(old_node, unsafe_layout_node);\n assert_eq!(old_generation, self.layout_context.shared.generation);\n\n match node.layout_parent_node(self.layout_context.shared) {\n None => {\n debug!(\"[{}] - {:X}, and deleting BF.\", tid(), unsafe_layout_node.0);\n \/\/ If this is the reflow root, eat the task-local bloom filter.\n }\n Some(parent) => {\n \/\/ Otherwise, put it back, but remove this node.\n node.remove_from_bloom_filter(&mut *bf);\n let unsafe_parent = layout_node_to_unsafe_layout_node(&parent);\n put_task_local_bloom_filter(bf, &unsafe_parent, self.layout_context);\n },\n };\n }\n}\n\n\/\/\/ The flow tree verification traversal. This is only on in debug builds.\n#[cfg(debug)]\nstruct FlowTreeVerification;\n\n#[cfg(debug)]\nimpl PreorderFlow for FlowTreeVerification {\n #[inline]\n fn process(&mut self, flow: &mut Flow) {\n let base = flow::base(flow);\n if !base.flags.is_leaf() && !base.flags.is_nonleaf() {\n println!(\"flow tree verification failed: flow wasn't a leaf or a nonleaf!\");\n flow.dump();\n panic!(\"flow tree verification failed\")\n }\n }\n}\n\n\/\/\/ The bubble-inline-sizes traversal, the first part of layout computation. This computes\n\/\/\/ preferred and intrinsic inline-sizes and bubbles them up the tree.\npub struct BubbleISizes<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PostorderFlowTraversal for BubbleISizes<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n flow.bubble_inline_sizes();\n flow::mut_base(flow).restyle_damage.remove(BUBBLE_ISIZES);\n }\n\n #[inline]\n fn should_process(&self, flow: &mut Flow) -> bool {\n flow::base(flow).restyle_damage.contains(BUBBLE_ISIZES)\n }\n}\n\n\/\/\/ The assign-inline-sizes traversal. In Gecko this corresponds to `Reflow`.\n#[derive(Copy)]\npub struct AssignISizes<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PreorderFlowTraversal for AssignISizes<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n flow.assign_inline_sizes(self.layout_context);\n }\n\n #[inline]\n fn should_process(&self, flow: &mut Flow) -> bool {\n flow::base(flow).restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW)\n }\n}\n\n\/\/\/ The assign-block-sizes-and-store-overflow traversal, the last (and most expensive) part of\n\/\/\/ layout computation. Determines the final block-sizes for all layout objects, computes\n\/\/\/ positions, and computes overflow regions. In Gecko this corresponds to `Reflow` and\n\/\/\/ `FinishAndStoreOverflow`.\n#[derive(Copy)]\npub struct AssignBSizesAndStoreOverflow<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PostorderFlowTraversal for AssignBSizesAndStoreOverflow<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n \/\/ Can't do anything with flows impacted by floats until we reach their inorder parent.\n \/\/ NB: We must return without resetting the restyle bits for these, as we haven't actually\n \/\/ reflowed anything!\n if flow::base(flow).flags.impacted_by_floats() {\n return\n }\n\n flow.assign_block_size(self.layout_context);\n flow.store_overflow(self.layout_context);\n }\n\n #[inline]\n fn should_process(&self, flow: &mut Flow) -> bool {\n flow::base(flow).restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW)\n }\n}\n\n#[derive(Copy)]\npub struct ComputeAbsolutePositions<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PreorderFlowTraversal for ComputeAbsolutePositions<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n flow.compute_absolute_position();\n }\n}\n\n#[derive(Copy)]\npub struct BuildDisplayList<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PostorderFlowTraversal for BuildDisplayList<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n flow.build_display_list(self.layout_context);\n }\n}\n<commit_msg>Remove transmutes from put_task_local_bloom_filter.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Traversals over the DOM and flow trees, running the layout computations.\n\nuse css::node_style::StyledNode;\nuse css::matching::{ApplicableDeclarations, MatchMethods, StyleSharingResult};\nuse construct::FlowConstructor;\nuse context::LayoutContext;\nuse flow::{Flow, MutableFlowUtils};\nuse flow::{PreorderFlowTraversal, PostorderFlowTraversal};\nuse flow;\nuse incremental::{RestyleDamage, BUBBLE_ISIZES, REFLOW, REFLOW_OUT_OF_FLOW};\nuse wrapper::{layout_node_to_unsafe_layout_node, LayoutNode};\nuse wrapper::{PostorderNodeMutTraversal, ThreadSafeLayoutNode, UnsafeLayoutNode};\nuse wrapper::{PreorderDomTraversal, PostorderDomTraversal};\n\nuse servo_util::bloom::BloomFilter;\nuse servo_util::opts;\nuse servo_util::tid::tid;\nuse style::node::TNode;\n\nuse std::cell::RefCell;\nuse std::mem;\n\n\/\/\/ Every time we do another layout, the old bloom filters are invalid. This is\n\/\/\/ detected by ticking a generation number every layout.\ntype Generation = uint;\n\n\/\/\/ A pair of the bloom filter used for css selector matching, and the node to\n\/\/\/ which it applies. This is used to efficiently do `Descendant` selector\n\/\/\/ matches. Thanks to the bloom filter, we can avoid walking up the tree\n\/\/\/ looking for ancestors that aren't there in the majority of cases.\n\/\/\/\n\/\/\/ As we walk down the DOM tree a task-local bloom filter is built of all the\n\/\/\/ CSS `SimpleSelector`s which are part of a `Descendant` compound selector\n\/\/\/ (i.e. paired with a `Descendant` combinator, in the `next` field of a\n\/\/\/ `CompoundSelector`.\n\/\/\/\n\/\/\/ Before a `Descendant` selector match is tried, it's compared against the\n\/\/\/ bloom filter. If the bloom filter can exclude it, the selector is quickly\n\/\/\/ rejected.\n\/\/\/\n\/\/\/ When done styling a node, all selectors previously inserted into the filter\n\/\/\/ are removed.\n\/\/\/\n\/\/\/ Since a work-stealing queue is used for styling, sometimes, the bloom filter\n\/\/\/ will no longer be the for the parent of the node we're currently on. When\n\/\/\/ this happens, the task local bloom filter will be thrown away and rebuilt.\nthread_local!(static STYLE_BLOOM: RefCell<Option<(Box<BloomFilter>, UnsafeLayoutNode, Generation)>> = RefCell::new(None));\n\n\/\/\/ Returns the task local bloom filter.\n\/\/\/\n\/\/\/ If one does not exist, a new one will be made for you. If it is out of date,\n\/\/\/ it will be thrown out and a new one will be made for you.\nfn take_task_local_bloom_filter(parent_node: Option<LayoutNode>, layout_context: &LayoutContext)\n -> Box<BloomFilter> {\n STYLE_BLOOM.with(|style_bloom| {\n match (parent_node, style_bloom.borrow_mut().take()) {\n \/\/ Root node. Needs new bloom filter.\n (None, _ ) => {\n debug!(\"[{}] No parent, but new bloom filter!\", tid());\n box BloomFilter::new()\n }\n \/\/ No bloom filter for this thread yet.\n (Some(parent), None) => {\n let mut bloom_filter = box BloomFilter::new();\n insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, layout_context);\n bloom_filter\n }\n \/\/ Found cached bloom filter.\n (Some(parent), Some((mut bloom_filter, old_node, old_generation))) => {\n \/\/ Hey, the cached parent is our parent! We can reuse the bloom filter.\n if old_node == layout_node_to_unsafe_layout_node(&parent) &&\n old_generation == layout_context.shared.generation {\n debug!(\"[{}] Parent matches (={}). Reusing bloom filter.\", tid(), old_node.0);\n bloom_filter.clone()\n } else {\n \/\/ Oh no. the cached parent is stale. I guess we need a new one. Reuse the existing\n \/\/ allocation to avoid malloc churn.\n *bloom_filter = BloomFilter::new();\n insert_ancestors_into_bloom_filter(&mut bloom_filter, parent, layout_context);\n bloom_filter\n }\n },\n }\n })\n}\n\nfn put_task_local_bloom_filter(bf: Box<BloomFilter>,\n unsafe_node: &UnsafeLayoutNode,\n layout_context: &LayoutContext) {\n STYLE_BLOOM.with(move |style_bloom| {\n assert!(style_bloom.borrow().is_none(),\n \"Putting into a never-taken task-local bloom filter\");\n *style_bloom.borrow_mut() = Some((bf, *unsafe_node, layout_context.shared.generation));\n })\n}\n\n\/\/\/ \"Ancestors\" in this context is inclusive of ourselves.\nfn insert_ancestors_into_bloom_filter(bf: &mut Box<BloomFilter>,\n mut n: LayoutNode,\n layout_context: &LayoutContext) {\n debug!(\"[{}] Inserting ancestors.\", tid());\n let mut ancestors = 0u;\n loop {\n ancestors += 1;\n\n n.insert_into_bloom_filter(&mut **bf);\n n = match n.layout_parent_node(layout_context.shared) {\n None => break,\n Some(p) => p,\n };\n }\n debug!(\"[{}] Inserted {} ancestors.\", tid(), ancestors);\n}\n\n\/\/\/ The recalc-style-for-node traversal, which styles each node and must run before\n\/\/\/ layout computation. This computes the styles applied to each node.\n#[derive(Copy)]\npub struct RecalcStyleForNode<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PreorderDomTraversal for RecalcStyleForNode<'a> {\n #[inline]\n fn process(&self, node: LayoutNode) {\n \/\/ Initialize layout data.\n \/\/\n \/\/ FIXME(pcwalton): Stop allocating here. Ideally this should just be done by the HTML\n \/\/ parser.\n node.initialize_layout_data(self.layout_context.shared.layout_chan.clone());\n\n \/\/ Get the parent node.\n let parent_opt = node.layout_parent_node(self.layout_context.shared);\n\n \/\/ Get the style bloom filter.\n let bf = take_task_local_bloom_filter(parent_opt, self.layout_context);\n\n \/\/ Just needs to be wrapped in an option for `match_node`.\n let some_bf = Some(bf);\n\n let nonincremental_layout = opts::get().nonincremental_layout;\n if nonincremental_layout || node.is_dirty() {\n \/\/ Remove existing CSS styles from nodes whose content has changed (e.g. text changed),\n \/\/ to force non-incremental reflow.\n if node.has_changed() {\n let node = ThreadSafeLayoutNode::new(&node);\n node.unstyle();\n }\n\n \/\/ Check to see whether we can share a style with someone.\n let style_sharing_candidate_cache =\n self.layout_context.style_sharing_candidate_cache();\n let sharing_result = unsafe {\n node.share_style_if_possible(style_sharing_candidate_cache,\n parent_opt.clone())\n };\n \/\/ Otherwise, match and cascade selectors.\n match sharing_result {\n StyleSharingResult::CannotShare(mut shareable) => {\n let mut applicable_declarations = ApplicableDeclarations::new();\n\n if node.is_element() {\n \/\/ Perform the CSS selector matching.\n let stylist = unsafe { &*self.layout_context.shared.stylist };\n node.match_node(stylist,\n &some_bf,\n &mut applicable_declarations,\n &mut shareable);\n } else {\n ThreadSafeLayoutNode::new(&node).set_restyle_damage(RestyleDamage::all())\n }\n\n \/\/ Perform the CSS cascade.\n unsafe {\n node.cascade_node(parent_opt,\n &applicable_declarations,\n self.layout_context.applicable_declarations_cache());\n }\n\n \/\/ Add ourselves to the LRU cache.\n if shareable {\n style_sharing_candidate_cache.insert_if_possible(&node);\n }\n }\n StyleSharingResult::StyleWasShared(index, damage) => {\n style_sharing_candidate_cache.touch(index);\n ThreadSafeLayoutNode::new(&node).set_restyle_damage(damage);\n }\n }\n }\n\n let mut bf = some_bf.unwrap();\n\n let unsafe_layout_node = layout_node_to_unsafe_layout_node(&node);\n\n \/\/ Before running the children, we need to insert our nodes into the bloom\n \/\/ filter.\n debug!(\"[{}] + {:X}\", tid(), unsafe_layout_node.0);\n node.insert_into_bloom_filter(&mut *bf);\n\n \/\/ NB: flow construction updates the bloom filter on the way up.\n put_task_local_bloom_filter(bf, &unsafe_layout_node, self.layout_context);\n }\n}\n\n\/\/\/ The flow construction traversal, which builds flows for styled nodes.\n#[derive(Copy)]\npub struct ConstructFlows<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PostorderDomTraversal for ConstructFlows<'a> {\n #[inline]\n fn process(&self, node: LayoutNode) {\n \/\/ Construct flows for this node.\n {\n let tnode = ThreadSafeLayoutNode::new(&node);\n\n \/\/ Always reconstruct if incremental layout is turned off.\n let nonincremental_layout = opts::get().nonincremental_layout;\n if nonincremental_layout || node.has_dirty_descendants() {\n let mut flow_constructor = FlowConstructor::new(self.layout_context);\n if nonincremental_layout || !flow_constructor.repair_if_possible(&tnode) {\n flow_constructor.process(&tnode);\n debug!(\"Constructed flow for {:x}: {:x}\",\n tnode.debug_id(),\n tnode.flow_debug_id());\n }\n }\n\n \/\/ Reset the layout damage in this node. It's been propagated to the\n \/\/ flow by the flow constructor.\n tnode.set_restyle_damage(RestyleDamage::empty());\n }\n\n unsafe {\n node.set_changed(false);\n node.set_dirty(false);\n node.set_dirty_siblings(false);\n node.set_dirty_descendants(false);\n }\n\n let unsafe_layout_node = layout_node_to_unsafe_layout_node(&node);\n\n let (mut bf, old_node, old_generation) =\n STYLE_BLOOM.with(|style_bloom| {\n mem::replace(&mut *style_bloom.borrow_mut(), None)\n .expect(\"The bloom filter should have been set by style recalc.\")\n });\n\n assert_eq!(old_node, unsafe_layout_node);\n assert_eq!(old_generation, self.layout_context.shared.generation);\n\n match node.layout_parent_node(self.layout_context.shared) {\n None => {\n debug!(\"[{}] - {:X}, and deleting BF.\", tid(), unsafe_layout_node.0);\n \/\/ If this is the reflow root, eat the task-local bloom filter.\n }\n Some(parent) => {\n \/\/ Otherwise, put it back, but remove this node.\n node.remove_from_bloom_filter(&mut *bf);\n let unsafe_parent = layout_node_to_unsafe_layout_node(&parent);\n put_task_local_bloom_filter(bf, &unsafe_parent, self.layout_context);\n },\n };\n }\n}\n\n\/\/\/ The flow tree verification traversal. This is only on in debug builds.\n#[cfg(debug)]\nstruct FlowTreeVerification;\n\n#[cfg(debug)]\nimpl PreorderFlow for FlowTreeVerification {\n #[inline]\n fn process(&mut self, flow: &mut Flow) {\n let base = flow::base(flow);\n if !base.flags.is_leaf() && !base.flags.is_nonleaf() {\n println!(\"flow tree verification failed: flow wasn't a leaf or a nonleaf!\");\n flow.dump();\n panic!(\"flow tree verification failed\")\n }\n }\n}\n\n\/\/\/ The bubble-inline-sizes traversal, the first part of layout computation. This computes\n\/\/\/ preferred and intrinsic inline-sizes and bubbles them up the tree.\npub struct BubbleISizes<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PostorderFlowTraversal for BubbleISizes<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n flow.bubble_inline_sizes();\n flow::mut_base(flow).restyle_damage.remove(BUBBLE_ISIZES);\n }\n\n #[inline]\n fn should_process(&self, flow: &mut Flow) -> bool {\n flow::base(flow).restyle_damage.contains(BUBBLE_ISIZES)\n }\n}\n\n\/\/\/ The assign-inline-sizes traversal. In Gecko this corresponds to `Reflow`.\n#[derive(Copy)]\npub struct AssignISizes<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PreorderFlowTraversal for AssignISizes<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n flow.assign_inline_sizes(self.layout_context);\n }\n\n #[inline]\n fn should_process(&self, flow: &mut Flow) -> bool {\n flow::base(flow).restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW)\n }\n}\n\n\/\/\/ The assign-block-sizes-and-store-overflow traversal, the last (and most expensive) part of\n\/\/\/ layout computation. Determines the final block-sizes for all layout objects, computes\n\/\/\/ positions, and computes overflow regions. In Gecko this corresponds to `Reflow` and\n\/\/\/ `FinishAndStoreOverflow`.\n#[derive(Copy)]\npub struct AssignBSizesAndStoreOverflow<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PostorderFlowTraversal for AssignBSizesAndStoreOverflow<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n \/\/ Can't do anything with flows impacted by floats until we reach their inorder parent.\n \/\/ NB: We must return without resetting the restyle bits for these, as we haven't actually\n \/\/ reflowed anything!\n if flow::base(flow).flags.impacted_by_floats() {\n return\n }\n\n flow.assign_block_size(self.layout_context);\n flow.store_overflow(self.layout_context);\n }\n\n #[inline]\n fn should_process(&self, flow: &mut Flow) -> bool {\n flow::base(flow).restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW)\n }\n}\n\n#[derive(Copy)]\npub struct ComputeAbsolutePositions<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PreorderFlowTraversal for ComputeAbsolutePositions<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n flow.compute_absolute_position();\n }\n}\n\n#[derive(Copy)]\npub struct BuildDisplayList<'a> {\n pub layout_context: &'a LayoutContext<'a>,\n}\n\nimpl<'a> PostorderFlowTraversal for BuildDisplayList<'a> {\n #[inline]\n fn process(&self, flow: &mut Flow) {\n flow.build_display_list(self.layout_context);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add logic to determine implicit constructors.<commit_after>\/\/ Copyright 2022 Google LLC\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ https:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Module which understands C++ constructor synthesis rules.\n\n#[cfg_attr(test, derive(Eq, PartialEq))]\npub(super) struct ImplicitConstructorsNeeded {\n default_constructor: bool,\n copy_constructor_taking_t: bool,\n copy_constructor_taking_const_t: bool,\n move_constructor: bool,\n}\n\npub(super) struct ExplicitItemsFound {\n move_constructor: bool,\n copy_constructor: bool,\n any_other_constructor: bool,\n all_bases_have_const_copy_constructors: bool,\n all_fields_have_const_copy_constructors: bool,\n destructor: bool,\n copy_assignment_operator: bool,\n move_assignment_operator: bool,\n}\n\n#[cfg(test)]\nimpl Default for ExplicitItemsFound {\n fn default() -> Self {\n Self {\n move_constructor: false,\n copy_constructor: false,\n any_other_constructor: false,\n all_bases_have_const_copy_constructors: true,\n all_fields_have_const_copy_constructors: true,\n destructor: false,\n copy_assignment_operator: false,\n move_assignment_operator: false,\n }\n }\n}\n\npub(super) fn determine_implicit_constructors(\n explicits: ExplicitItemsFound,\n) -> ImplicitConstructorsNeeded {\n let any_constructor =\n explicits.copy_constructor || explicits.move_constructor || explicits.any_other_constructor;\n \/\/ If no user-declared constructors of any kind are provided for a class type (struct, class, or union), the compiler will always declare a default constructor as an inline public member of its class.\n let default_constructor = !any_constructor;\n\n \/\/ If no user-defined copy constructors are provided for a class type (struct, class, or union), the compiler will always declare a copy constructor as a non-explicit inline public member of its class\n let (copy_constructor_taking_const_t, copy_constructor_taking_t) = if explicits.copy_constructor\n {\n (false, false)\n } else if explicits.all_bases_have_const_copy_constructors\n && explicits.all_fields_have_const_copy_constructors\n {\n (true, false)\n } else {\n (false, true)\n };\n\n \/\/ If no user-defined move constructors are provided for a class type (struct, class, or union), and all of the following is true:\n \/\/ there are no user-declared copy constructors;\n \/\/ there are no user-declared copy assignment operators;\n \/\/ there are no user-declared move assignment operators;\n \/\/ there is no user-declared destructor.\n \/\/ then the compiler will declare a move constructor\n let move_constructor = !(explicits.move_constructor\n || explicits.copy_constructor\n || explicits.destructor\n || explicits.copy_assignment_operator\n || explicits.move_assignment_operator);\n\n ImplicitConstructorsNeeded {\n default_constructor,\n copy_constructor_taking_t,\n copy_constructor_taking_const_t,\n move_constructor,\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::determine_implicit_constructors;\n\n use super::ExplicitItemsFound;\n\n #[test]\n fn test_simple() {\n let inputs = ExplicitItemsFound::default();\n let outputs = determine_implicit_constructors(inputs);\n assert_eq!(true, outputs.default_constructor);\n assert_eq!(true, outputs.copy_constructor_taking_const_t);\n assert_eq!(false, outputs.copy_constructor_taking_t);\n assert_eq!(true, outputs.move_constructor);\n }\n\n #[test]\n fn test_with_destructor() {\n let inputs = ExplicitItemsFound {\n destructor: true,\n ..Default::default()\n };\n let outputs = determine_implicit_constructors(inputs);\n assert_eq!(true, outputs.default_constructor);\n assert_eq!(true, outputs.copy_constructor_taking_const_t);\n assert_eq!(false, outputs.copy_constructor_taking_t);\n assert_eq!(false, outputs.move_constructor);\n }\n\n #[test]\n fn test_with_pesky_base() {\n let inputs = ExplicitItemsFound {\n all_bases_have_const_copy_constructors: false,\n ..Default::default()\n };\n let outputs = determine_implicit_constructors(inputs);\n assert_eq!(true, outputs.default_constructor);\n assert_eq!(false, outputs.copy_constructor_taking_const_t);\n assert_eq!(true, outputs.copy_constructor_taking_t);\n assert_eq!(true, outputs.move_constructor);\n }\n\n #[test]\n fn test_with_user_defined_move_constructor() {\n let inputs = ExplicitItemsFound {\n move_constructor: true,\n ..Default::default()\n };\n let outputs = determine_implicit_constructors(inputs);\n assert_eq!(false, outputs.default_constructor);\n assert_eq!(true, outputs.copy_constructor_taking_const_t);\n assert_eq!(false, outputs.copy_constructor_taking_t);\n assert_eq!(false, outputs.move_constructor);\n }\n\n #[test]\n fn test_with_user_defined_misc_constructor() {\n let inputs = ExplicitItemsFound {\n any_other_constructor: true,\n ..Default::default()\n };\n let outputs = determine_implicit_constructors(inputs);\n assert_eq!(false, outputs.default_constructor);\n assert_eq!(true, outputs.copy_constructor_taking_const_t);\n assert_eq!(false, outputs.copy_constructor_taking_t);\n assert_eq!(true, outputs.move_constructor);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Closes #2: Capture total payload length for captured messages<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>destructuring tuple using let<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>ip2country unit test.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add disablelist command<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Optimize default<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse llvm;\nuse llvm::ValueRef;\nuse common::*;\nuse rustc::ty::Ty;\n\npub fn slice_for_each<'a, 'tcx, F>(\n bcx: &BlockAndBuilder<'a, 'tcx>,\n data_ptr: ValueRef,\n unit_ty: Ty<'tcx>,\n len: ValueRef,\n f: F\n) -> BlockAndBuilder<'a, 'tcx> where F: FnOnce(&BlockAndBuilder<'a, 'tcx>, ValueRef) {\n \/\/ Special-case vectors with elements of size 0 so they don't go out of bounds (#9890)\n let zst = type_is_zero_size(bcx.ccx(), unit_ty);\n let add = |bcx: &BlockAndBuilder, a, b| if zst {\n bcx.add(a, b)\n } else {\n bcx.inbounds_gep(a, &[b])\n };\n\n let body_bcx = bcx.fcx().build_new_block(\"slice_loop_body\");\n let next_bcx = bcx.fcx().build_new_block(\"slice_loop_next\");\n let header_bcx = bcx.fcx().build_new_block(\"slice_loop_header\");\n\n let start = if zst {\n C_uint(bcx.ccx(), 0usize)\n } else {\n data_ptr\n };\n let end = add(&bcx, start, len);\n\n bcx.br(header_bcx.llbb());\n let current = header_bcx.phi(val_ty(start), &[start], &[bcx.llbb()]);\n\n let keep_going = header_bcx.icmp(llvm::IntNE, current, end);\n header_bcx.cond_br(keep_going, body_bcx.llbb(), next_bcx.llbb());\n\n f(&body_bcx, if zst { data_ptr } else { current });\n let next = add(&body_bcx, current, C_uint(bcx.ccx(), 1usize));\n body_bcx.add_incoming_to_phi(current, next, body_bcx.llbb());\n body_bcx.br(header_bcx.llbb());\n next_bcx\n}\n<commit_msg>Make add_incoming_to_phi call slightly less confusing.<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse llvm;\nuse llvm::ValueRef;\nuse common::*;\nuse rustc::ty::Ty;\n\npub fn slice_for_each<'a, 'tcx, F>(\n bcx: &BlockAndBuilder<'a, 'tcx>,\n data_ptr: ValueRef,\n unit_ty: Ty<'tcx>,\n len: ValueRef,\n f: F\n) -> BlockAndBuilder<'a, 'tcx> where F: FnOnce(&BlockAndBuilder<'a, 'tcx>, ValueRef) {\n \/\/ Special-case vectors with elements of size 0 so they don't go out of bounds (#9890)\n let zst = type_is_zero_size(bcx.ccx(), unit_ty);\n let add = |bcx: &BlockAndBuilder, a, b| if zst {\n bcx.add(a, b)\n } else {\n bcx.inbounds_gep(a, &[b])\n };\n\n let body_bcx = bcx.fcx().build_new_block(\"slice_loop_body\");\n let next_bcx = bcx.fcx().build_new_block(\"slice_loop_next\");\n let header_bcx = bcx.fcx().build_new_block(\"slice_loop_header\");\n\n let start = if zst {\n C_uint(bcx.ccx(), 0usize)\n } else {\n data_ptr\n };\n let end = add(&bcx, start, len);\n\n bcx.br(header_bcx.llbb());\n let current = header_bcx.phi(val_ty(start), &[start], &[bcx.llbb()]);\n\n let keep_going = header_bcx.icmp(llvm::IntNE, current, end);\n header_bcx.cond_br(keep_going, body_bcx.llbb(), next_bcx.llbb());\n\n f(&body_bcx, if zst { data_ptr } else { current });\n let next = add(&body_bcx, current, C_uint(bcx.ccx(), 1usize));\n header_bcx.add_incoming_to_phi(current, next, body_bcx.llbb());\n body_bcx.br(header_bcx.llbb());\n next_bcx\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse prelude::v1::*;\n\nuse ffi::CStr;\nuse io;\nuse libc::{self, c_int, size_t};\nuse net::{SocketAddr, Shutdown};\nuse str;\nuse sys::fd::FileDesc;\nuse sys_common::{AsInner, FromInner, IntoInner};\nuse sys_common::net::{getsockopt, setsockopt};\nuse time::Duration;\n\npub use sys::{cvt, cvt_r};\npub use libc as netc;\n\npub type wrlen_t = size_t;\n\npub struct Socket(FileDesc);\n\npub fn init() {}\n\npub fn cvt_gai(err: c_int) -> io::Result<()> {\n if err == 0 { return Ok(()) }\n\n let detail = unsafe {\n str::from_utf8(CStr::from_ptr(libc::gai_strerror(err)).to_bytes()).unwrap()\n .to_owned()\n };\n Err(io::Error::new(io::ErrorKind::Other,\n &format!(\"failed to lookup address information: {}\",\n detail)[..]))\n}\n\nimpl Socket {\n pub fn new(addr: &SocketAddr, ty: c_int) -> io::Result<Socket> {\n let fam = match *addr {\n SocketAddr::V4(..) => libc::AF_INET,\n SocketAddr::V6(..) => libc::AF_INET6,\n };\n unsafe {\n let fd = try!(cvt(libc::socket(fam, ty, 0)));\n let fd = FileDesc::new(fd);\n fd.set_cloexec();\n Ok(Socket(fd))\n }\n }\n\n pub fn accept(&self, storage: *mut libc::sockaddr,\n len: *mut libc::socklen_t) -> io::Result<Socket> {\n let fd = try!(cvt_r(|| unsafe {\n libc::accept(self.0.raw(), storage, len)\n }));\n let fd = FileDesc::new(fd);\n fd.set_cloexec();\n Ok(Socket(fd))\n }\n\n pub fn duplicate(&self) -> io::Result<Socket> {\n self.0.duplicate().map(Socket)\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n self.0.read(buf)\n }\n\n pub fn set_timeout(&self, dur: Option<Duration>, kind: libc::c_int) -> io::Result<()> {\n let timeout = match dur {\n Some(dur) => {\n if dur.as_secs() == 0 && dur.subsec_nanos() == 0 {\n return Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot set a 0 duration timeout\"));\n }\n\n let secs = if dur.as_secs() > libc::time_t::max_value() as u64 {\n libc::time_t::max_value()\n } else {\n dur.as_secs() as libc::time_t\n };\n let mut timeout = libc::timeval {\n tv_sec: secs,\n tv_usec: (dur.subsec_nanos() \/ 1000) as libc::suseconds_t,\n };\n if timeout.tv_sec == 0 && timeout.tv_usec == 0 {\n timeout.tv_usec = 1;\n }\n timeout\n }\n None => {\n libc::timeval {\n tv_sec: 0,\n tv_usec: 0,\n }\n }\n };\n setsockopt(self, libc::SOL_SOCKET, kind, timeout)\n }\n\n pub fn timeout(&self, kind: libc::c_int) -> io::Result<Option<Duration>> {\n let raw: libc::timeval = try!(getsockopt(self, libc::SOL_SOCKET, kind));\n if raw.tv_sec == 0 && raw.tv_usec == 0 {\n Ok(None)\n } else {\n let sec = raw.tv_sec as u64;\n let nsec = (raw.tv_usec as u32) * 1000;\n Ok(Some(Duration::new(sec, nsec)))\n }\n }\n\n pub fn shutdown(&self, how: Shutdown) -> io::Result<()> {\n let how = match how {\n Shutdown::Write => libc::SHUT_WR,\n Shutdown::Read => libc::SHUT_RD,\n Shutdown::Both => libc::SHUT_RDWR,\n };\n try!(cvt(unsafe { libc::shutdown(self.0.raw(), how) }));\n Ok(())\n }\n}\n\nimpl AsInner<c_int> for Socket {\n fn as_inner(&self) -> &c_int { self.0.as_inner() }\n}\n\nimpl FromInner<c_int> for Socket {\n fn from_inner(fd: c_int) -> Socket { Socket(FileDesc::new(fd)) }\n}\n\nimpl IntoInner<c_int> for Socket {\n fn into_inner(self) -> c_int { self.0.into_raw() }\n}\n<commit_msg>std: Atomically set CLOEXEC for sockets if possible<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse prelude::v1::*;\n\nuse ffi::CStr;\nuse io;\nuse libc::{self, c_int, size_t};\nuse net::{SocketAddr, Shutdown};\nuse str;\nuse sys::fd::FileDesc;\nuse sys_common::{AsInner, FromInner, IntoInner};\nuse sys_common::net::{getsockopt, setsockopt};\nuse time::Duration;\n\npub use sys::{cvt, cvt_r};\npub use libc as netc;\n\npub type wrlen_t = size_t;\n\n\/\/ See below for the usage of SOCK_CLOEXEC, but this constant is only defined on\n\/\/ Linux currently (e.g. support doesn't exist on other platforms). In order to\n\/\/ get name resolution to work and things to compile we just define a dummy\n\/\/ SOCK_CLOEXEC here for other platforms. Note that the dummy constant isn't\n\/\/ actually ever used (the blocks below are wrapped in `if cfg!` as well.\n#[cfg(target_os = \"linux\")]\nuse libc::SOCK_CLOEXEC;\n#[cfg(not(target_os = \"linux\"))]\nconst SOCK_CLOEXEC: c_int = 0;\n\npub struct Socket(FileDesc);\n\npub fn init() {}\n\npub fn cvt_gai(err: c_int) -> io::Result<()> {\n if err == 0 { return Ok(()) }\n\n let detail = unsafe {\n str::from_utf8(CStr::from_ptr(libc::gai_strerror(err)).to_bytes()).unwrap()\n .to_owned()\n };\n Err(io::Error::new(io::ErrorKind::Other,\n &format!(\"failed to lookup address information: {}\",\n detail)[..]))\n}\n\nimpl Socket {\n pub fn new(addr: &SocketAddr, ty: c_int) -> io::Result<Socket> {\n let fam = match *addr {\n SocketAddr::V4(..) => libc::AF_INET,\n SocketAddr::V6(..) => libc::AF_INET6,\n };\n unsafe {\n \/\/ On linux we first attempt to pass the SOCK_CLOEXEC flag to\n \/\/ atomically create the socket and set it as CLOEXEC. Support for\n \/\/ this option, however, was added in 2.6.27, and we still support\n \/\/ 2.6.18 as a kernel, so if the returned error is EINVAL we\n \/\/ fallthrough to the fallback.\n if cfg!(target_os = \"linux\") {\n match cvt(libc::socket(fam, ty | SOCK_CLOEXEC, 0)) {\n Ok(fd) => return Ok(Socket(FileDesc::new(fd))),\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {}\n Err(e) => return Err(e),\n }\n }\n\n let fd = try!(cvt(libc::socket(fam, ty, 0)));\n let fd = FileDesc::new(fd);\n fd.set_cloexec();\n Ok(Socket(fd))\n }\n }\n\n pub fn accept(&self, storage: *mut libc::sockaddr,\n len: *mut libc::socklen_t) -> io::Result<Socket> {\n let fd = try!(cvt_r(|| unsafe {\n libc::accept(self.0.raw(), storage, len)\n }));\n let fd = FileDesc::new(fd);\n fd.set_cloexec();\n Ok(Socket(fd))\n }\n\n pub fn duplicate(&self) -> io::Result<Socket> {\n self.0.duplicate().map(Socket)\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n self.0.read(buf)\n }\n\n pub fn set_timeout(&self, dur: Option<Duration>, kind: libc::c_int) -> io::Result<()> {\n let timeout = match dur {\n Some(dur) => {\n if dur.as_secs() == 0 && dur.subsec_nanos() == 0 {\n return Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot set a 0 duration timeout\"));\n }\n\n let secs = if dur.as_secs() > libc::time_t::max_value() as u64 {\n libc::time_t::max_value()\n } else {\n dur.as_secs() as libc::time_t\n };\n let mut timeout = libc::timeval {\n tv_sec: secs,\n tv_usec: (dur.subsec_nanos() \/ 1000) as libc::suseconds_t,\n };\n if timeout.tv_sec == 0 && timeout.tv_usec == 0 {\n timeout.tv_usec = 1;\n }\n timeout\n }\n None => {\n libc::timeval {\n tv_sec: 0,\n tv_usec: 0,\n }\n }\n };\n setsockopt(self, libc::SOL_SOCKET, kind, timeout)\n }\n\n pub fn timeout(&self, kind: libc::c_int) -> io::Result<Option<Duration>> {\n let raw: libc::timeval = try!(getsockopt(self, libc::SOL_SOCKET, kind));\n if raw.tv_sec == 0 && raw.tv_usec == 0 {\n Ok(None)\n } else {\n let sec = raw.tv_sec as u64;\n let nsec = (raw.tv_usec as u32) * 1000;\n Ok(Some(Duration::new(sec, nsec)))\n }\n }\n\n pub fn shutdown(&self, how: Shutdown) -> io::Result<()> {\n let how = match how {\n Shutdown::Write => libc::SHUT_WR,\n Shutdown::Read => libc::SHUT_RD,\n Shutdown::Both => libc::SHUT_RDWR,\n };\n try!(cvt(unsafe { libc::shutdown(self.0.raw(), how) }));\n Ok(())\n }\n}\n\nimpl AsInner<c_int> for Socket {\n fn as_inner(&self) -> &c_int { self.0.as_inner() }\n}\n\nimpl FromInner<c_int> for Socket {\n fn from_inner(fd: c_int) -> Socket { Socket(FileDesc::new(fd)) }\n}\n\nimpl IntoInner<c_int> for Socket {\n fn into_inner(self) -> c_int { self.0.into_raw() }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Useful synchronization primitives.\n\/\/!\n\/\/! ## The need for synchronization\n\/\/!\n\/\/! Conceptually, a Rust program is simply a series of operations which will\n\/\/! be executed on a computer. The timeline of events happening in the program\n\/\/! is consistent with the order of the operations in the code.\n\/\/!\n\/\/! Considering the following code, operating on some global static variables:\n\/\/!\n\/\/! ```rust\n\/\/! static mut A: u32 = 0;\n\/\/! static mut B: u32 = 0;\n\/\/! static mut C: u32 = 0;\n\/\/!\n\/\/! fn main() {\n\/\/! unsafe {\n\/\/! A = 3;\n\/\/! B = 4;\n\/\/! A = A + B;\n\/\/! C = B;\n\/\/! println!(\"{} {} {}\", A, B, C);\n\/\/! C = A;\n\/\/! }\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! It appears _as if_ some variables stored in memory are changed, an addition\n\/\/! is performed, result is stored in `A` and the variable `C` is modified twice.\n\/\/! When only a single thread is involved, the results are as expected:\n\/\/! the line `7 4 4` gets printed.\n\/\/!\n\/\/! As for what happens behind the scenes, when optimizations are enabled the\n\/\/! final generated machine code might look very different from the code:\n\/\/!\n\/\/! - The first store to `C` might be moved before the store to `A` or `B`,\n\/\/! _as if_ we had written `C = 4; A = 3; B = 4`.\n\/\/!\n\/\/! - Assignment of `A + B` to `A` might be removed, since the sum can be stored\n\/\/! in a temporary location until it gets printed, with the global variable\n\/\/! never getting updated.\n\/\/!\n\/\/! - The final result could be determined just by looking at the code at compile time,\n\/\/! so [constant folding] might turn the whole block into a simple `println!(\"7 4 4\")`.\n\/\/!\n\/\/! The compiler is allowed to perform any combination of these optimizations, as long\n\/\/! as the final optimized code, when executed, produces the same results as the one\n\/\/! without optimizations.\n\/\/!\n\/\/! When multiprocessing is involved (either multiple CPU cores, or multiple\n\/\/! physical CPUs), access to global variables (which are shared between threads)\n\/\/! could lead to nondeterministic results, **even if** compiler optimizations\n\/\/! are disabled.\n\/\/!\n\/\/! Note that thanks to Rust's safety guarantees, accessing global (static)\n\/\/! variables requires `unsafe` code, assuming we don't use any of the\n\/\/! synchronization primitives in this module.\n\/\/!\n\/\/! [constant folding]: https:\/\/en.wikipedia.org\/wiki\/Constant_folding\n\/\/!\n\/\/! ## Out-of-order execution\n\/\/!\n\/\/! Instructions can execute in a different order from the one we define, due to\n\/\/! various reasons:\n\/\/!\n\/\/! - **Compiler** reordering instructions: if the compiler can issue an\n\/\/! instruction at an earlier point, it will try to do so. For example, it\n\/\/! might hoist memory loads at the top of a code block, so that the CPU can\n\/\/! start [prefetching] the values from memory.\n\/\/!\n\/\/! In single-threaded scenarios, this can cause issues when writing\n\/\/! signal handlers or certain kinds of low-level code.\n\/\/! Use [compiler fences] to prevent this reordering.\n\/\/!\n\/\/! - **Single processor** executing instructions [out-of-order]: modern CPUs are\n\/\/! capable of [superscalar] execution, i.e. multiple instructions might be\n\/\/! executing at the same time, even though the machine code describes a\n\/\/! sequential process.\n\/\/!\n\/\/! This kind of reordering is handled transparently by the CPU.\n\/\/!\n\/\/! - **Multiprocessor** system, where multiple hardware threads run at the same time.\n\/\/! In multi-threaded scenarios, you can use two kinds of primitives to deal\n\/\/! with synchronization:\n\/\/! - [memory fences] to ensure memory accesses are made visibile to other\n\/\/! CPUs in the right order.\n\/\/! - [atomic operations] to ensure simultaneous access to the same memory\n\/\/! location doesn't lead to undefined behavior.\n\/\/!\n\/\/! [prefetching]: https:\/\/en.wikipedia.org\/wiki\/Cache_prefetching\n\/\/! [compiler fences]: atomic::compiler_fence\n\/\/! [out-of-order]: https:\/\/en.wikipedia.org\/wiki\/Out-of-order_execution\n\/\/! [superscalar]: https:\/\/en.wikipedia.org\/wiki\/Superscalar_processor\n\/\/! [memory fences]: atomic::fence\n\/\/! [atomics operations]: atomic\n\/\/!\n\/\/! ## Higher-level synchronization objects\n\/\/!\n\/\/! Most of the low-level synchronization primitives are quite error-prone and\n\/\/! inconvenient to use, which is why the standard library also exposes some\n\/\/! higher-level synchronization objects.\n\/\/!\n\/\/! These abstractions can be built out of lower-level primitives. For efficiency,\n\/\/! the sync objects in the standard library are usually implemented with help\n\/\/! from the operating system's kernel, which is able to reschedule the threads\n\/\/! while they are blocked on acquiring a lock.\n\/\/!\n\/\/! ## Efficiency\n\/\/!\n\/\/! Higher-level synchronization mechanisms are usually heavy-weight.\n\/\/! While most atomic operations can execute instantaneously, acquiring a\n\/\/! [`Mutex`] can involve blocking until another thread releases it.\n\/\/! For [`RwLock`], while! any number of readers may acquire it without\n\/\/! blocking, each writer will have exclusive access.\n\/\/!\n\/\/! On the other hand, communication over [channels] can provide a fairly\n\/\/! high-level interface without sacrificing performance, at the cost of\n\/\/! somewhat more memory.\n\/\/!\n\/\/! The more synchronization exists between CPUs, the smaller the performance\n\/\/! gains from multithreading will be.\n\/\/!\n\/\/! [channels]: mpsc\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use alloc_crate::sync::{Arc, Weak};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use core::sync::atomic;\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::barrier::{Barrier, BarrierWaitResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::condvar::{Condvar, WaitTimeoutResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::mutex::{Mutex, MutexGuard};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::once::{Once, OnceState, ONCE_INIT};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use sys_common::poison::{PoisonError, TryLockError, TryLockResult, LockResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::rwlock::{RwLock, RwLockReadGuard, RwLockWriteGuard};\n\npub mod mpsc;\n\nmod barrier;\nmod condvar;\nmod mutex;\nmod once;\nmod rwlock;\n<commit_msg>Fix broken links<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Useful synchronization primitives.\n\/\/!\n\/\/! ## The need for synchronization\n\/\/!\n\/\/! Conceptually, a Rust program is simply a series of operations which will\n\/\/! be executed on a computer. The timeline of events happening in the program\n\/\/! is consistent with the order of the operations in the code.\n\/\/!\n\/\/! Considering the following code, operating on some global static variables:\n\/\/!\n\/\/! ```rust\n\/\/! static mut A: u32 = 0;\n\/\/! static mut B: u32 = 0;\n\/\/! static mut C: u32 = 0;\n\/\/!\n\/\/! fn main() {\n\/\/! unsafe {\n\/\/! A = 3;\n\/\/! B = 4;\n\/\/! A = A + B;\n\/\/! C = B;\n\/\/! println!(\"{} {} {}\", A, B, C);\n\/\/! C = A;\n\/\/! }\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! It appears _as if_ some variables stored in memory are changed, an addition\n\/\/! is performed, result is stored in `A` and the variable `C` is modified twice.\n\/\/! When only a single thread is involved, the results are as expected:\n\/\/! the line `7 4 4` gets printed.\n\/\/!\n\/\/! As for what happens behind the scenes, when optimizations are enabled the\n\/\/! final generated machine code might look very different from the code:\n\/\/!\n\/\/! - The first store to `C` might be moved before the store to `A` or `B`,\n\/\/! _as if_ we had written `C = 4; A = 3; B = 4`.\n\/\/!\n\/\/! - Assignment of `A + B` to `A` might be removed, since the sum can be stored\n\/\/! in a temporary location until it gets printed, with the global variable\n\/\/! never getting updated.\n\/\/!\n\/\/! - The final result could be determined just by looking at the code at compile time,\n\/\/! so [constant folding] might turn the whole block into a simple `println!(\"7 4 4\")`.\n\/\/!\n\/\/! The compiler is allowed to perform any combination of these optimizations, as long\n\/\/! as the final optimized code, when executed, produces the same results as the one\n\/\/! without optimizations.\n\/\/!\n\/\/! When multiprocessing is involved (either multiple CPU cores, or multiple\n\/\/! physical CPUs), access to global variables (which are shared between threads)\n\/\/! could lead to nondeterministic results, **even if** compiler optimizations\n\/\/! are disabled.\n\/\/!\n\/\/! Note that thanks to Rust's safety guarantees, accessing global (static)\n\/\/! variables requires `unsafe` code, assuming we don't use any of the\n\/\/! synchronization primitives in this module.\n\/\/!\n\/\/! [constant folding]: https:\/\/en.wikipedia.org\/wiki\/Constant_folding\n\/\/!\n\/\/! ## Out-of-order execution\n\/\/!\n\/\/! Instructions can execute in a different order from the one we define, due to\n\/\/! various reasons:\n\/\/!\n\/\/! - **Compiler** reordering instructions: if the compiler can issue an\n\/\/! instruction at an earlier point, it will try to do so. For example, it\n\/\/! might hoist memory loads at the top of a code block, so that the CPU can\n\/\/! start [prefetching] the values from memory.\n\/\/!\n\/\/! In single-threaded scenarios, this can cause issues when writing\n\/\/! signal handlers or certain kinds of low-level code.\n\/\/! Use [compiler fences] to prevent this reordering.\n\/\/!\n\/\/! - **Single processor** executing instructions [out-of-order]: modern CPUs are\n\/\/! capable of [superscalar] execution, i.e. multiple instructions might be\n\/\/! executing at the same time, even though the machine code describes a\n\/\/! sequential process.\n\/\/!\n\/\/! This kind of reordering is handled transparently by the CPU.\n\/\/!\n\/\/! - **Multiprocessor** system, where multiple hardware threads run at the same time.\n\/\/! In multi-threaded scenarios, you can use two kinds of primitives to deal\n\/\/! with synchronization:\n\/\/! - [memory fences] to ensure memory accesses are made visibile to other\n\/\/! CPUs in the right order.\n\/\/! - [atomic operations] to ensure simultaneous access to the same memory\n\/\/! location doesn't lead to undefined behavior.\n\/\/!\n\/\/! [prefetching]: https:\/\/en.wikipedia.org\/wiki\/Cache_prefetching\n\/\/! [compiler fences]: crate::sync::atomic::compiler_fence\n\/\/! [out-of-order]: https:\/\/en.wikipedia.org\/wiki\/Out-of-order_execution\n\/\/! [superscalar]: https:\/\/en.wikipedia.org\/wiki\/Superscalar_processor\n\/\/! [memory fences]: crate::sync::atomic::fence\n\/\/! [atomic operations]: crate::sync::atomic\n\/\/!\n\/\/! ## Higher-level synchronization objects\n\/\/!\n\/\/! Most of the low-level synchronization primitives are quite error-prone and\n\/\/! inconvenient to use, which is why the standard library also exposes some\n\/\/! higher-level synchronization objects.\n\/\/!\n\/\/! These abstractions can be built out of lower-level primitives. For efficiency,\n\/\/! the sync objects in the standard library are usually implemented with help\n\/\/! from the operating system's kernel, which is able to reschedule the threads\n\/\/! while they are blocked on acquiring a lock.\n\/\/!\n\/\/! ## Efficiency\n\/\/!\n\/\/! Higher-level synchronization mechanisms are usually heavy-weight.\n\/\/! While most atomic operations can execute instantaneously, acquiring a\n\/\/! [`Mutex`] can involve blocking until another thread releases it.\n\/\/! For [`RwLock`], while any number of readers may acquire it without\n\/\/! blocking, each writer will have exclusive access.\n\/\/!\n\/\/! On the other hand, communication over [channels] can provide a fairly\n\/\/! high-level interface without sacrificing performance, at the cost of\n\/\/! somewhat more memory.\n\/\/!\n\/\/! The more synchronization exists between CPUs, the smaller the performance\n\/\/! gains from multithreading will be.\n\/\/!\n\/\/! [`Mutex`]: crate::sync::Mutex\n\/\/! [`RwLock`]: crate::sync::RwLock\n\/\/! [channels]: crate::sync::mpsc\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use alloc_crate::sync::{Arc, Weak};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use core::sync::atomic;\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::barrier::{Barrier, BarrierWaitResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::condvar::{Condvar, WaitTimeoutResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::mutex::{Mutex, MutexGuard};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::once::{Once, OnceState, ONCE_INIT};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use sys_common::poison::{PoisonError, TryLockError, TryLockResult, LockResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::rwlock::{RwLock, RwLockReadGuard, RwLockWriteGuard};\n\npub mod mpsc;\n\nmod barrier;\nmod condvar;\nmod mutex;\nmod once;\nmod rwlock;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>hello rust<commit_after>fn main() {\n println!(\"hello\");\n}\n<|endoftext|>"} {"text":"<commit_before>use std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse yaml_rust::Yaml;\n\nuse storage::parser::{FileHeaderParser, ParserError};\nuse storage::file::header::spec::FileHeaderSpec;\nuse storage::file::header::data::FileHeaderData;\n\npub struct YamlHeaderParser {\n spec: Option<FileHeaderSpec>,\n}\n\nimpl YamlHeaderParser {\n\n pub fn new(spec: Option<FileHeaderSpec>) -> YamlHeaderParser {\n YamlHeaderParser {\n spec: spec\n }\n }\n\n}\n\nimpl Display for YamlHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"YamlHeaderParser\"));\n Ok(())\n }\n\n}\n\nimpl Debug for YamlHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"YamlHeaderParser, Spec: {:?}\", self.spec));\n Ok(())\n }\n\n}\n\nimpl FileHeaderParser for YamlHeaderParser {\n\n fn read(&self, string: Option<String>) -> Result<FileHeaderData, ParserError> {\n use yaml_rust::YamlLoader;\n if string.is_some() {\n let s = string.unwrap();\n YamlLoader::load_from_str(&s[..])\n .map(|mut vec_yaml| {\n vec_yaml.pop().map(|f| {\n visit_yaml(f)\n }).unwrap()\n })\n .map_err(|e| {\n debug!(\"YAML parser error: {:?}\", e);\n ParserError::short(&s[..], s.clone(), 0)\n })\n } else {\n Ok(FileHeaderData::Null)\n }\n\n }\n\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {\n use yaml_rust::YamlEmitter;\n\n let mut buffer = String::new();\n let result = {\n let mut emitter = YamlEmitter::new(&mut buffer);\n emitter.dump(&visit_header(data))\n };\n result\n .map_err(|e| {\n error!(\"Error emitting YAML.\");\n debug!(\"YAML parser error: {:?}\", e);\n ParserError::short(&buffer[..], buffer.clone(), 0)\n })\n .map(|_| buffer)\n }\n\n}\n\nfn visit_yaml(v: Yaml) -> FileHeaderData {\n use std::process::exit;\n\n match v {\n Yaml::Real(_) => FileHeaderData::Float(v.as_f64().unwrap()),\n Yaml::Integer(i) => {\n if i > 0 {\n debug!(\"Castring {} : i64 -> u64\", i);\n FileHeaderData::UInteger(i as u64)\n } else {\n FileHeaderData::Integer(i)\n }\n },\n Yaml::String(s) => FileHeaderData::Text(s),\n Yaml::Boolean(b) => FileHeaderData::Bool(b),\n\n Yaml::Array(vec) => {\n FileHeaderData::Array {\n values: Box::new(vec.clone().into_iter().map(|i| visit_yaml(i)).collect())\n }\n },\n\n Yaml::Hash(btree) => {\n let btree = btree.clone();\n FileHeaderData::Map{\n keys: btree.into_iter().map(|(k, v)|\n FileHeaderData::Key {\n name: String::from(k.as_str().unwrap()),\n value: Box::new(visit_yaml(v)),\n }\n ).collect()\n }\n },\n\n Yaml::Alias(_) => {\n warn!(\"YAML::ALIAS is not yet fully supported by rust-yaml\");\n FileHeaderData::Null\n },\n\n Yaml::Null => FileHeaderData::Null,\n\n Yaml::BadValue => {\n warn!(\"YAML parsing error\");\n exit(1);\n },\n }\n}\n\nfn visit_header(h: &FileHeaderData) -> Yaml {\n use std::ops::Deref;\n use std::collections::BTreeMap;\n use std::process::exit;\n\n match h {\n &FileHeaderData::Null => Yaml::Null,\n &FileHeaderData::Float(f) => Yaml::Real(format!(\"{}\", f)),\n &FileHeaderData::Integer(i) => Yaml::Integer(i),\n &FileHeaderData::UInteger(u) => {\n debug!(\"Might be losing data now: u64 -> i64 cast\");\n Yaml::Integer(u as i64)\n },\n &FileHeaderData::Text(ref s) => Yaml::String(s.clone()),\n &FileHeaderData::Bool(b) => Yaml::Boolean(b),\n\n &FileHeaderData::Array{values: ref a} => {\n Yaml::Array(a.deref().into_iter().map(|e| visit_header(e)).collect())\n },\n\n &FileHeaderData::Key{name: _, value: _} => {\n error!(\"Something went terribly wrong when trying to emit YAML\");\n exit(1);\n },\n\n &FileHeaderData::Map{ref keys} => {\n let mut map : BTreeMap<Yaml, Yaml> = BTreeMap::new();\n\n let failed = keys.into_iter().map(|key| {\n match key {\n &FileHeaderData::Key{ref name, ref value} => {\n let k = Yaml::String(name.clone());\n let v = visit_header(value.deref());\n\n map.insert(k, v).is_none()\n },\n\n _ => {\n error!(\"Something went terribly wrong when trying to emit YAML\");\n exit(1);\n }\n }\n })\n .fold(0, |acc, succeeded : bool| {\n if !succeeded { acc + 1 } else { acc }\n });\n\n debug!(\"Failed to insert {} keys\", failed);\n Yaml::Hash(map)\n },\n }\n}\n\n#[cfg(test)]\nmod test {\n use std::ops::Deref;\n\n use super::YamlHeaderParser;\n use storage::parser::FileHeaderParser;\n use storage::file::header::data::FileHeaderData as FHD;\n use storage::file::header::spec::FileHeaderSpec as FHS;\n\n #[test]\n fn test_deserialization() {\n let text = String::from(\"a: 1\\nb: 2\");\n let spec = FHS::Array { allowed_types: vec![\n FHS::Map {\n keys: vec![\n FHS::Key {\n name: String::from(\"a\"),\n value_type: Box::new(FHS::UInteger)\n },\n ]\n }\n ]\n };\n\n let parser = YamlHeaderParser::new(Some(spec));\n let parsed = parser.read(Some(text));\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n debug!(\"Parsed: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{ref keys}) => {\n keys.into_iter().map(|k| {\n match k {\n &FHD::Key{ref name, ref value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::UInteger(u) => assert!(u == 1 || u == 2),\n &FHD::Integer(_) => assert!(false, \"Found Integer, expected UInteger\"),\n _ => assert!(false, \"Integers are not here\"),\n };\n },\n _ => assert!(false, \"Key is not a Key\"),\n };\n })\n .all(|x| x == ());\n },\n _ => assert!(false, \"Map is not a Map\"),\n }\n }\n}\n\n<commit_msg>Add deserialize->serialize test<commit_after>use std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse yaml_rust::Yaml;\n\nuse storage::parser::{FileHeaderParser, ParserError};\nuse storage::file::header::spec::FileHeaderSpec;\nuse storage::file::header::data::FileHeaderData;\n\npub struct YamlHeaderParser {\n spec: Option<FileHeaderSpec>,\n}\n\nimpl YamlHeaderParser {\n\n pub fn new(spec: Option<FileHeaderSpec>) -> YamlHeaderParser {\n YamlHeaderParser {\n spec: spec\n }\n }\n\n}\n\nimpl Display for YamlHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"YamlHeaderParser\"));\n Ok(())\n }\n\n}\n\nimpl Debug for YamlHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"YamlHeaderParser, Spec: {:?}\", self.spec));\n Ok(())\n }\n\n}\n\nimpl FileHeaderParser for YamlHeaderParser {\n\n fn read(&self, string: Option<String>) -> Result<FileHeaderData, ParserError> {\n use yaml_rust::YamlLoader;\n if string.is_some() {\n let s = string.unwrap();\n YamlLoader::load_from_str(&s[..])\n .map(|mut vec_yaml| {\n vec_yaml.pop().map(|f| {\n visit_yaml(f)\n }).unwrap()\n })\n .map_err(|e| {\n debug!(\"YAML parser error: {:?}\", e);\n ParserError::short(&s[..], s.clone(), 0)\n })\n } else {\n Ok(FileHeaderData::Null)\n }\n\n }\n\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {\n use yaml_rust::YamlEmitter;\n\n let mut buffer = String::new();\n let result = {\n let mut emitter = YamlEmitter::new(&mut buffer);\n emitter.dump(&visit_header(data))\n };\n result\n .map_err(|e| {\n error!(\"Error emitting YAML.\");\n debug!(\"YAML parser error: {:?}\", e);\n ParserError::short(&buffer[..], buffer.clone(), 0)\n })\n .map(|_| buffer)\n }\n\n}\n\nfn visit_yaml(v: Yaml) -> FileHeaderData {\n use std::process::exit;\n\n match v {\n Yaml::Real(_) => FileHeaderData::Float(v.as_f64().unwrap()),\n Yaml::Integer(i) => {\n if i > 0 {\n debug!(\"Castring {} : i64 -> u64\", i);\n FileHeaderData::UInteger(i as u64)\n } else {\n FileHeaderData::Integer(i)\n }\n },\n Yaml::String(s) => FileHeaderData::Text(s),\n Yaml::Boolean(b) => FileHeaderData::Bool(b),\n\n Yaml::Array(vec) => {\n FileHeaderData::Array {\n values: Box::new(vec.clone().into_iter().map(|i| visit_yaml(i)).collect())\n }\n },\n\n Yaml::Hash(btree) => {\n let btree = btree.clone();\n FileHeaderData::Map{\n keys: btree.into_iter().map(|(k, v)|\n FileHeaderData::Key {\n name: String::from(k.as_str().unwrap()),\n value: Box::new(visit_yaml(v)),\n }\n ).collect()\n }\n },\n\n Yaml::Alias(_) => {\n warn!(\"YAML::ALIAS is not yet fully supported by rust-yaml\");\n FileHeaderData::Null\n },\n\n Yaml::Null => FileHeaderData::Null,\n\n Yaml::BadValue => {\n warn!(\"YAML parsing error\");\n exit(1);\n },\n }\n}\n\nfn visit_header(h: &FileHeaderData) -> Yaml {\n use std::ops::Deref;\n use std::collections::BTreeMap;\n use std::process::exit;\n\n match h {\n &FileHeaderData::Null => Yaml::Null,\n &FileHeaderData::Float(f) => Yaml::Real(format!(\"{}\", f)),\n &FileHeaderData::Integer(i) => Yaml::Integer(i),\n &FileHeaderData::UInteger(u) => {\n debug!(\"Might be losing data now: u64 -> i64 cast\");\n Yaml::Integer(u as i64)\n },\n &FileHeaderData::Text(ref s) => Yaml::String(s.clone()),\n &FileHeaderData::Bool(b) => Yaml::Boolean(b),\n\n &FileHeaderData::Array{values: ref a} => {\n Yaml::Array(a.deref().into_iter().map(|e| visit_header(e)).collect())\n },\n\n &FileHeaderData::Key{name: _, value: _} => {\n error!(\"Something went terribly wrong when trying to emit YAML\");\n exit(1);\n },\n\n &FileHeaderData::Map{ref keys} => {\n let mut map : BTreeMap<Yaml, Yaml> = BTreeMap::new();\n\n let failed = keys.into_iter().map(|key| {\n match key {\n &FileHeaderData::Key{ref name, ref value} => {\n let k = Yaml::String(name.clone());\n let v = visit_header(value.deref());\n\n map.insert(k, v).is_none()\n },\n\n _ => {\n error!(\"Something went terribly wrong when trying to emit YAML\");\n exit(1);\n }\n }\n })\n .fold(0, |acc, succeeded : bool| {\n if !succeeded { acc + 1 } else { acc }\n });\n\n debug!(\"Failed to insert {} keys\", failed);\n Yaml::Hash(map)\n },\n }\n}\n\n#[cfg(test)]\nmod test {\n use std::ops::Deref;\n\n use super::YamlHeaderParser;\n use storage::parser::FileHeaderParser;\n use storage::file::header::data::FileHeaderData as FHD;\n use storage::file::header::spec::FileHeaderSpec as FHS;\n\n #[test]\n fn test_deserialization() {\n let text = String::from(\"a: 1\\nb: 2\");\n let spec = FHS::Array { allowed_types: vec![\n FHS::Map {\n keys: vec![\n FHS::Key {\n name: String::from(\"a\"),\n value_type: Box::new(FHS::UInteger)\n },\n ]\n }\n ]\n };\n\n let parser = YamlHeaderParser::new(Some(spec));\n let parsed = parser.read(Some(text));\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n debug!(\"Parsed: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{ref keys}) => {\n keys.into_iter().map(|k| {\n match k {\n &FHD::Key{ref name, ref value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::UInteger(u) => assert!(u == 1 || u == 2),\n &FHD::Integer(_) => assert!(false, \"Found Integer, expected UInteger\"),\n _ => assert!(false, \"Integers are not here\"),\n };\n },\n _ => assert!(false, \"Key is not a Key\"),\n };\n })\n .all(|x| x == ());\n },\n _ => assert!(false, \"Map is not a Map\"),\n }\n }\n\n #[test]\n fn test_desser() {\n use yaml_rust::YamlLoader;\n\n let text = String::from(\"a: [1, 32, 42]\\nb: -2\");\n let parser = YamlHeaderParser::new(None);\n\n let des = parser.read(Some(text.clone()));\n assert!(des.is_ok(), \"Deserializing failed\");\n\n let ser = parser.write(&des.unwrap());\n assert!(ser.is_ok(), \"Parser error when serializing deserialized text\");\n\n let yaml_text = YamlLoader::load_from_str(&text[..]);\n let yaml_ser = YamlLoader::load_from_str(&ser.unwrap()[..]);\n\n assert!(yaml_text.is_ok(), \"Could not use yaml_rust to serialize text for comparison\");\n assert!(yaml_ser.is_ok(), \"Could not use yaml_rust to serialize serialized-deserialized text for comparison\");\n assert_eq!(yaml_text.unwrap(), yaml_ser.unwrap());\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add error type for store<commit_after>use std::error::Error;\nuse std::fmt::Debug;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Error as FmtError;\nuse std::clone::Clone;\n\nuse std::io::Error as IOError;\n\npub struct StoreError {\n name: Option<&'static str>,\n expl: Option<&'static str>,\n cause: Option<Box<Error>>,\n}\n\nimpl StoreError {\n\n pub fn new() -> StoreError {\n StoreError {\n name: None,\n expl: None,\n cause: None,\n }\n }\n\n pub fn with_name(mut self, n: &'static str) -> StoreError {\n self.name = Some(n);\n self\n }\n\n pub fn with_expl(mut self, e: &'static str) -> StoreError {\n self.expl = Some(e);\n self\n }\n\n pub fn with_cause(mut self, e: Box<Error>) -> StoreError {\n self.cause = Some(e);\n self\n }\n\n}\n\nimpl Debug for StoreError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"[{:?}]: {:?}, caused: {:?}\", self.name, self.expl, self.cause));\n Ok(())\n }\n\n}\n\nimpl Display for StoreError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"[{}]: {}\",\n self.name.unwrap_or(\"StoreError\"),\n self.expl.unwrap_or(\"\")));\n Ok(())\n }\n\n}\n\nimpl Error for StoreError {\n\n fn description(&self) -> &str {\n self.expl.unwrap_or(\"\")\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add basic REPL shell for Forth<commit_after>extern crate forth;\n\nuse std::io;\nuse std::io::prelude::*;\nuse forth::Forth;\n\nfn main() {\n let mut forth = Forth::new();\n\n loop {\n print!(\"> \");\n io::stdout().flush().expect(\"Failed to flush output\");\n\n let mut line = String::new();\n io::stdin().read_line(&mut line).expect(\n \"Failed to read from input\",\n );\n\n if let Err(e) = forth.eval(&line) {\n println!(\"Error: {:?}\", e);\n }\n\n println!(\"{:?}\", forth.stack());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #125<commit_after>#[crate_type = \"rlib\"];\n\nextern mod math;\n\nuse std::iter;\nuse std::hashmap::HashSet;\nuse math::{arith, numconv};\n\npub static EXPECTED_ANSWER: &'static str = \"2906969179\";\n\nfn palindromic_sum_set(limit: uint) -> HashSet<uint> {\n let mut set = HashSet::new();\n let mut sq_sums: ~[uint] = ~[];\n\n let mut it = iter::count(1u, 1)\n .map(|n| n * n)\n .take_while(|&pow| pow < limit);\n\n for pow in it {\n for j in range(0, sq_sums.len()).invert() {\n let s = sq_sums[j] + pow;\n if s >= limit { break; }\n\n if numconv::is_palindromic(s, 10) { set.insert(s); }\n sq_sums[j] = s;\n }\n sq_sums.push(pow);\n }\n\n set\n}\n\npub fn solve() -> ~str {\n let limit = arith::pow(10, 8);\n let set = palindromic_sum_set(limit);\n set.iter().fold(0, |x, &y| x + y).to_str()\n}\n\n#[cfg(test)]\nmod test {\n #[test]\n fn palindromic_sum_below_1000() {\n let set = super::palindromic_sum_set(1000);\n assert_eq!(4164, set.iter().fold(0, |x, &y| x + y));\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! HTTP\/HTTPS URL type for Iron.\n\nuse url::{Host, RelativeSchemeData};\nuse url::{whatwg_scheme_type_mapper};\nuse url::{self, SchemeData, SchemeType};\nuse url::format::{PathFormatter, UserInfoFormatter};\nuse std::fmt;\n\n\/\/\/ HTTP\/HTTPS URL type for Iron.\n#[derive(PartialEq, Eq, Clone, Debug)]\npub struct Url {\n \/\/\/ The lower-cased scheme of the URL, typically \"http\" or \"https\".\n pub scheme: String,\n\n \/\/\/ The host field of the URL, probably a domain.\n pub host: Host,\n\n \/\/\/ The connection port.\n pub port: u16,\n\n \/\/\/ The URL path, the resource to be accessed.\n \/\/\/\n \/\/\/ A *non-empty* vector encoding the parts of the URL path.\n \/\/\/ Empty entries of `\"\"` correspond to trailing slashes.\n pub path: Vec<String>,\n\n \/\/\/ The URL username field, from the userinfo section of the URL.\n \/\/\/\n \/\/\/ `None` if the `@` character was not part of the input OR\n \/\/\/ if a blank username was provided.\n \/\/\/ Otherwise, a non-empty string.\n pub username: Option<String>,\n\n \/\/\/ The URL password field, from the userinfo section of the URL.\n \/\/\/\n \/\/\/ `None` if the `@` character was not part of the input OR\n \/\/\/ if a blank password was provided.\n \/\/\/ Otherwise, a non-empty string.\n pub password: Option<String>,\n\n \/\/\/ The URL query string.\n \/\/\/\n \/\/\/ `None` if the `?` character was not part of the input.\n \/\/\/ Otherwise, a possibly empty, percent encoded string.\n pub query: Option<String>,\n\n \/\/\/ The URL fragment.\n \/\/\/\n \/\/\/ `None` if the `#` character was not part of the input.\n \/\/\/ Otherwise, a possibly empty, percent encoded string.\n pub fragment: Option<String>\n}\n\nimpl Url {\n \/\/\/ Create a URL from a string.\n \/\/\/\n \/\/\/ The input must be a valid URL with a special scheme for this to succeed.\n \/\/\/\n \/\/\/ HTTP and HTTPS are special schemes.\n \/\/\/\n \/\/\/ See: http:\/\/url.spec.whatwg.org\/#special-scheme\n pub fn parse(input: &str) -> Result<Url, String> {\n \/\/ Parse the string using rust-url, then convert.\n match url::Url::parse(input) {\n Ok(raw_url) => Url::from_generic_url(raw_url),\n Err(e) => Err(format!(\"{}\", e))\n }\n }\n\n \/\/\/ Create a `Url` from a `rust-url` `Url`.\n pub fn from_generic_url(raw_url: url::Url) -> Result<Url, String> {\n \/\/ Create an Iron URL by extracting the special scheme data.\n match raw_url.scheme_data {\n SchemeData::Relative(data) => {\n \/\/ Extract the port as a 16-bit unsigned integer.\n let port: u16 = match data.port {\n \/\/ If explicitly defined, unwrap it.\n Some(port) => port,\n\n \/\/ Otherwise, use the scheme's default port.\n None => {\n match whatwg_scheme_type_mapper(&raw_url.scheme) {\n SchemeType::Relative(port) => port,\n _ => return Err(format!(\"Invalid special scheme: `{}`\",\n raw_url.scheme))\n }\n }\n };\n\n \/\/ Map empty usernames to None.\n let username = match &*data.username {\n \"\" => None,\n _ => Some(data.username)\n };\n\n \/\/ Map empty passwords to None.\n let password = match data.password {\n None => None,\n Some(ref x) if x.is_empty() => None,\n Some(password) => Some(password)\n };\n\n Ok(Url {\n scheme: raw_url.scheme,\n host: data.host,\n port: port,\n path: data.path,\n username: username,\n password: password,\n query: raw_url.query,\n fragment: raw_url.fragment\n })\n },\n _ => Err(format!(\"Not a special scheme: `{}`\", raw_url.scheme))\n }\n }\n\n \/\/\/ Create a `rust-url` `Url` from a `Url`.\n pub fn into_generic_url(self) -> url::Url {\n let default_port = whatwg_scheme_type_mapper(&self.scheme).default_port();\n\n url::Url {\n scheme: self.scheme,\n scheme_data: SchemeData::Relative(\n RelativeSchemeData {\n username: self.username.unwrap_or(\"\".to_string()),\n password: self.password,\n host: self.host,\n port: Some(self.port),\n default_port: default_port,\n path: self.path\n }\n ),\n query: self.query,\n fragment: self.fragment\n }\n }\n}\n\nimpl fmt::Display for Url {\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n \/\/ Write the scheme.\n try!(self.scheme.fmt(formatter));\n try!(\":\/\/\".fmt(formatter));\n\n \/\/ Write the user info.\n try!(write!(formatter, \"{}\", UserInfoFormatter {\n username: self.username.as_ref().map(|s| &**s).unwrap_or(\"\"),\n password: self.password.as_ref().map(|s| &**s)\n }));\n\n \/\/ Write the host.\n try!(self.host.fmt(formatter));\n\n \/\/ Write the port.\n try!(\":\".fmt(formatter));\n try!(self.port.fmt(formatter));\n\n \/\/ Write the path.\n try!(write!(formatter, \"{}\", PathFormatter { path: &self.path }));\n\n \/\/ Write the query.\n match self.query {\n Some(ref query) => {\n try!(\"?\".fmt(formatter));\n try!(query.fmt(formatter));\n },\n None => ()\n }\n\n \/\/ Write the fragment.\n match self.fragment {\n Some(ref fragment) => {\n try!(\"#\".fmt(formatter));\n try!(fragment.fmt(formatter));\n },\n None => ()\n }\n\n Ok(())\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::Url;\n\n #[test]\n fn test_default_port() {\n assert_eq!(Url::parse(\"http:\/\/example.com\/wow\").unwrap().port, 80u16);\n assert_eq!(Url::parse(\"https:\/\/example.com\/wow\").unwrap().port, 443u16);\n }\n\n #[test]\n fn test_explicit_port() {\n assert_eq!(Url::parse(\"http:\/\/localhost:3097\").unwrap().port, 3097u16);\n }\n\n #[test]\n fn test_empty_username() {\n assert!(Url::parse(\"http:\/\/@example.com\").unwrap().username.is_none());\n assert!(Url::parse(\"http:\/\/:password@example.com\").unwrap().username.is_none());\n }\n\n #[test]\n fn test_empty_password() {\n assert!(Url::parse(\"http:\/\/michael@example.com\").unwrap().password.is_none());\n assert!(Url::parse(\"http:\/\/:@example.com\").unwrap().password.is_none());\n }\n\n #[test]\n fn test_formatting() {\n assert_eq!(Url::parse(\"http:\/\/michael@example.com\/path\/?q=wow\").unwrap().to_string(),\n \"http:\/\/michael@example.com:80\/path\/?q=wow\".to_string());\n }\n\n #[test]\n fn test_conversion() {\n let url_str = \"https:\/\/user:password@iron.com:8080\/path?q=wow#fragment\";\n let url = Url::parse(url_str).unwrap();\n\n \/\/ Convert to a generic URL and check fidelity.\n let raw_url = url.clone().into_generic_url();\n assert_eq!(::url::Url::parse(url_str).unwrap(), raw_url);\n\n \/\/ Convert back to an Iron URL and check fidelity.\n let new_url = Url::from_generic_url(raw_url).unwrap();\n assert_eq!(url, new_url);\n }\n}\n<commit_msg>Add more tests for username and password in module<commit_after>\/\/! HTTP\/HTTPS URL type for Iron.\n\nuse url::{Host, RelativeSchemeData};\nuse url::{whatwg_scheme_type_mapper};\nuse url::{self, SchemeData, SchemeType};\nuse url::format::{PathFormatter, UserInfoFormatter};\nuse std::fmt;\n\n\/\/\/ HTTP\/HTTPS URL type for Iron.\n#[derive(PartialEq, Eq, Clone, Debug)]\npub struct Url {\n \/\/\/ The lower-cased scheme of the URL, typically \"http\" or \"https\".\n pub scheme: String,\n\n \/\/\/ The host field of the URL, probably a domain.\n pub host: Host,\n\n \/\/\/ The connection port.\n pub port: u16,\n\n \/\/\/ The URL path, the resource to be accessed.\n \/\/\/\n \/\/\/ A *non-empty* vector encoding the parts of the URL path.\n \/\/\/ Empty entries of `\"\"` correspond to trailing slashes.\n pub path: Vec<String>,\n\n \/\/\/ The URL username field, from the userinfo section of the URL.\n \/\/\/\n \/\/\/ `None` if the `@` character was not part of the input OR\n \/\/\/ if a blank username was provided.\n \/\/\/ Otherwise, a non-empty string.\n pub username: Option<String>,\n\n \/\/\/ The URL password field, from the userinfo section of the URL.\n \/\/\/\n \/\/\/ `None` if the `@` character was not part of the input OR\n \/\/\/ if a blank password was provided.\n \/\/\/ Otherwise, a non-empty string.\n pub password: Option<String>,\n\n \/\/\/ The URL query string.\n \/\/\/\n \/\/\/ `None` if the `?` character was not part of the input.\n \/\/\/ Otherwise, a possibly empty, percent encoded string.\n pub query: Option<String>,\n\n \/\/\/ The URL fragment.\n \/\/\/\n \/\/\/ `None` if the `#` character was not part of the input.\n \/\/\/ Otherwise, a possibly empty, percent encoded string.\n pub fragment: Option<String>\n}\n\nimpl Url {\n \/\/\/ Create a URL from a string.\n \/\/\/\n \/\/\/ The input must be a valid URL with a special scheme for this to succeed.\n \/\/\/\n \/\/\/ HTTP and HTTPS are special schemes.\n \/\/\/\n \/\/\/ See: http:\/\/url.spec.whatwg.org\/#special-scheme\n pub fn parse(input: &str) -> Result<Url, String> {\n \/\/ Parse the string using rust-url, then convert.\n match url::Url::parse(input) {\n Ok(raw_url) => Url::from_generic_url(raw_url),\n Err(e) => Err(format!(\"{}\", e))\n }\n }\n\n \/\/\/ Create a `Url` from a `rust-url` `Url`.\n pub fn from_generic_url(raw_url: url::Url) -> Result<Url, String> {\n \/\/ Create an Iron URL by extracting the special scheme data.\n match raw_url.scheme_data {\n SchemeData::Relative(data) => {\n \/\/ Extract the port as a 16-bit unsigned integer.\n let port: u16 = match data.port {\n \/\/ If explicitly defined, unwrap it.\n Some(port) => port,\n\n \/\/ Otherwise, use the scheme's default port.\n None => {\n match whatwg_scheme_type_mapper(&raw_url.scheme) {\n SchemeType::Relative(port) => port,\n _ => return Err(format!(\"Invalid special scheme: `{}`\",\n raw_url.scheme))\n }\n }\n };\n\n \/\/ Map empty usernames to None.\n let username = match &*data.username {\n \"\" => None,\n _ => Some(data.username)\n };\n\n \/\/ Map empty passwords to None.\n let password = match data.password {\n None => None,\n Some(ref x) if x.is_empty() => None,\n Some(password) => Some(password)\n };\n\n Ok(Url {\n scheme: raw_url.scheme,\n host: data.host,\n port: port,\n path: data.path,\n username: username,\n password: password,\n query: raw_url.query,\n fragment: raw_url.fragment\n })\n },\n _ => Err(format!(\"Not a special scheme: `{}`\", raw_url.scheme))\n }\n }\n\n \/\/\/ Create a `rust-url` `Url` from a `Url`.\n pub fn into_generic_url(self) -> url::Url {\n let default_port = whatwg_scheme_type_mapper(&self.scheme).default_port();\n\n url::Url {\n scheme: self.scheme,\n scheme_data: SchemeData::Relative(\n RelativeSchemeData {\n username: self.username.unwrap_or(\"\".to_string()),\n password: self.password,\n host: self.host,\n port: Some(self.port),\n default_port: default_port,\n path: self.path\n }\n ),\n query: self.query,\n fragment: self.fragment\n }\n }\n}\n\nimpl fmt::Display for Url {\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n \/\/ Write the scheme.\n try!(self.scheme.fmt(formatter));\n try!(\":\/\/\".fmt(formatter));\n\n \/\/ Write the user info.\n try!(write!(formatter, \"{}\", UserInfoFormatter {\n username: self.username.as_ref().map(|s| &**s).unwrap_or(\"\"),\n password: self.password.as_ref().map(|s| &**s)\n }));\n\n \/\/ Write the host.\n try!(self.host.fmt(formatter));\n\n \/\/ Write the port.\n try!(\":\".fmt(formatter));\n try!(self.port.fmt(formatter));\n\n \/\/ Write the path.\n try!(write!(formatter, \"{}\", PathFormatter { path: &self.path }));\n\n \/\/ Write the query.\n match self.query {\n Some(ref query) => {\n try!(\"?\".fmt(formatter));\n try!(query.fmt(formatter));\n },\n None => ()\n }\n\n \/\/ Write the fragment.\n match self.fragment {\n Some(ref fragment) => {\n try!(\"#\".fmt(formatter));\n try!(fragment.fmt(formatter));\n },\n None => ()\n }\n\n Ok(())\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::Url;\n\n #[test]\n fn test_default_port() {\n assert_eq!(Url::parse(\"http:\/\/example.com\/wow\").unwrap().port, 80u16);\n assert_eq!(Url::parse(\"https:\/\/example.com\/wow\").unwrap().port, 443u16);\n }\n\n #[test]\n fn test_explicit_port() {\n assert_eq!(Url::parse(\"http:\/\/localhost:3097\").unwrap().port, 3097u16);\n }\n\n #[test]\n fn test_empty_username() {\n assert!(Url::parse(\"http:\/\/@example.com\").unwrap().username.is_none());\n assert!(Url::parse(\"http:\/\/:password@example.com\").unwrap().username.is_none());\n }\n\n #[test]\n fn test_not_empty_username() {\n let user = Url::parse(\"http:\/\/john:pass@example.com\").unwrap().username;\n assert_eq!(user.unwrap(), \"john\");\n\n let user = Url::parse(\"http:\/\/john:@example.com\").unwrap().username;\n assert_eq!(user.unwrap(), \"john\");\n }\n\n #[test]\n fn test_empty_password() {\n assert!(Url::parse(\"http:\/\/michael@example.com\").unwrap().password.is_none());\n assert!(Url::parse(\"http:\/\/:@example.com\").unwrap().password.is_none());\n }\n\n #[test]\n fn test_not_empty_password() {\n let pass = Url::parse(\"http:\/\/michael:pass@example.com\").unwrap().password;\n assert_eq!(pass.unwrap(), \"pass\");\n\n let pass = Url::parse(\"http:\/\/:pass@example.com\").unwrap().password;\n assert_eq!(pass.unwrap(), \"pass\");\n }\n\n #[test]\n fn test_formatting() {\n assert_eq!(Url::parse(\"http:\/\/michael@example.com\/path\/?q=wow\").unwrap().to_string(),\n \"http:\/\/michael@example.com:80\/path\/?q=wow\".to_string());\n }\n\n #[test]\n fn test_conversion() {\n let url_str = \"https:\/\/user:password@iron.com:8080\/path?q=wow#fragment\";\n let url = Url::parse(url_str).unwrap();\n\n \/\/ Convert to a generic URL and check fidelity.\n let raw_url = url.clone().into_generic_url();\n assert_eq!(::url::Url::parse(url_str).unwrap(), raw_url);\n\n \/\/ Convert back to an Iron URL and check fidelity.\n let new_url = Url::from_generic_url(raw_url).unwrap();\n assert_eq!(url, new_url);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2423<commit_after>\/\/ https:\/\/leetcode.com\/problems\/remove-letter-to-equalize-frequency\/\npub fn equal_frequency(word: String) -> bool {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", equal_frequency(\"abcc\".to_string())); \/\/ true\n println!(\"{}\", equal_frequency(\"aazz\".to_string())); \/\/ false\n}\n<|endoftext|>"} {"text":"<commit_before>use std::hash::{Hash, Hasher};\n\nuse rustc::ich::{StableHashingContext, StableHashingContextProvider};\nuse rustc::mir;\nuse rustc::mir::interpret::{AllocId, Pointer, Scalar, ScalarMaybeUndef, Relocations, Allocation, UndefMask};\nuse rustc::ty;\nuse rustc::ty::layout::Align;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};\nuse syntax::ast::Mutability;\nuse syntax::source_map::Span;\n\nuse super::eval_context::{LocalValue, StackPopCleanup};\nuse super::{Frame, Memory, Machine, Operand, MemPlace, Place, PlaceExtra, Value};\n\ntrait SnapshotContext<'a> {\n type To;\n type From;\n fn resolve(&'a self, id: &Self::From) -> Option<&'a Self::To>;\n}\n\ntrait Snapshot<'a, Ctx: SnapshotContext<'a>> {\n type Item;\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item;\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocIdSnapshot<'a>(Option<AllocationSnapshot<'a>>);\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for AllocId\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = AllocIdSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n AllocIdSnapshot(ctx.resolve(self).map(|alloc| alloc.snapshot(ctx)))\n }\n}\n\ntype PointerSnapshot<'a> = Pointer<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Pointer\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = PointerSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Pointer{ alloc_id, offset } = self;\n\n Pointer {\n alloc_id: alloc_id.snapshot(ctx),\n offset: *offset,\n }\n }\n}\n\ntype ScalarSnapshot<'a> = Scalar<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Scalar\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = ScalarSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Scalar::Ptr(p) => Scalar::Ptr(p.snapshot(ctx)),\n Scalar::Bits{ size, bits } => Scalar::Bits{\n size: *size,\n bits: *bits,\n },\n }\n }\n}\n\ntype ScalarMaybeUndefSnapshot<'a> = ScalarMaybeUndef<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for ScalarMaybeUndef\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = ScalarMaybeUndefSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.snapshot(ctx)),\n ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,\n }\n }\n}\n\ntype MemPlaceSnapshot<'a> = MemPlace<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for MemPlace\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = MemPlaceSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let MemPlace{ ptr, extra, align } = self;\n\n MemPlaceSnapshot{\n ptr: ptr.snapshot(ctx),\n extra: extra.snapshot(ctx),\n align: *align,\n }\n }\n}\n\ntype PlaceSnapshot<'a> = Place<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Place\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = PlaceSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Place::Ptr(p) => Place::Ptr(p.snapshot(ctx)),\n\n Place::Local{ frame, local } => Place::Local{\n frame: *frame,\n local: *local,\n },\n }\n }\n}\n\ntype PlaceExtraSnapshot<'a> = PlaceExtra<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for PlaceExtra\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = PlaceExtraSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n PlaceExtra::Vtable(p) => PlaceExtra::Vtable(p.snapshot(ctx)),\n PlaceExtra::Length(l) => PlaceExtra::Length(*l),\n PlaceExtra::None => PlaceExtra::None,\n }\n }\n}\n\ntype ValueSnapshot<'a> = Value<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Value\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = ValueSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Value::Scalar(s) => Value::Scalar(s.snapshot(ctx)),\n Value::ScalarPair(a, b) => Value::ScalarPair(a.snapshot(ctx), b.snapshot(ctx)),\n }\n }\n}\n\ntype OperandSnapshot<'a> = Operand<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Operand\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = OperandSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Operand::Immediate(v) => Operand::Immediate(v.snapshot(ctx)),\n Operand::Indirect(m) => Operand::Indirect(m.snapshot(ctx)),\n }\n }\n}\n\ntype LocalValueSnapshot<'a> = LocalValue<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for LocalValue\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = LocalValueSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n LocalValue::Live(v) => LocalValue::Live(v.snapshot(ctx)),\n LocalValue::Dead => LocalValue::Dead,\n }\n }\n}\n\ntype RelocationsSnapshot<'a> = Relocations<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Relocations\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = RelocationsSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n Relocations::from_presorted(self.iter().map(|(size, id)| (*size, id.snapshot(ctx))).collect())\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocationSnapshot<'a> {\n bytes: &'a [u8],\n relocations: RelocationsSnapshot<'a>,\n undef_mask: &'a UndefMask,\n align: &'a Align,\n runtime_mutability: &'a Mutability,\n}\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = AllocationSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Allocation { bytes, relocations, undef_mask, align, runtime_mutability } = self;\n\n AllocationSnapshot {\n bytes,\n undef_mask,\n align,\n runtime_mutability,\n relocations: relocations.snapshot(ctx),\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct FrameSnapshot<'a, 'tcx> {\n instance: &'a ty::Instance<'tcx>,\n span: &'a Span,\n return_to_block: &'a StackPopCleanup,\n return_place: PlaceSnapshot<'a>,\n locals: IndexVec<mir::Local, LocalValueSnapshot<'a>>,\n block: &'a mir::BasicBlock,\n stmt: usize,\n}\n\nimpl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = FrameSnapshot<'a, 'tcx>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Frame {\n mir: _,\n instance,\n span,\n return_to_block,\n return_place,\n locals,\n block,\n stmt,\n } = self;\n\n FrameSnapshot {\n instance,\n span,\n return_to_block,\n block,\n stmt: *stmt,\n return_place: return_place.snapshot(ctx),\n locals: locals.iter().map(|local| local.snapshot(ctx)).collect(),\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct MemorySnapshot<'a, 'mir: 'a, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx> + 'a> {\n data: &'a M::MemoryData,\n}\n\n\/\/\/ The virtual machine state during const-evaluation at a given point in time.\n#[derive(Eq, PartialEq)]\npub struct EvalSnapshot<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {\n machine: M,\n memory: Memory<'a, 'mir, 'tcx, M>,\n stack: Vec<Frame<'mir, 'tcx>>,\n}\n\nimpl<'a, 'mir, 'tcx, M> EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n pub fn new(machine: &M, memory: &Memory<'a, 'mir, 'tcx, M>, stack: &[Frame<'mir, 'tcx>]) -> Self {\n EvalSnapshot {\n machine: machine.clone(),\n memory: memory.clone(),\n stack: stack.into(),\n }\n }\n}\n\nimpl<'a, 'mir, 'tcx, M> Hash for EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n fn hash<H: Hasher>(&self, state: &mut H) {\n \/\/ Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)\n let mut hcx = self.memory.tcx.get_stable_hashing_context();\n let mut hasher = StableHasher::<u64>::new();\n self.hash_stable(&mut hcx, &mut hasher);\n hasher.finish().hash(state)\n }\n}\n\nimpl<'a, 'b, 'mir, 'tcx, M> HashStable<StableHashingContext<'b>> for EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'b>, hasher: &mut StableHasher<W>) {\n let EvalSnapshot{ machine, memory, stack } = self;\n (machine, &memory.data, stack).hash_stable(hcx, hasher);\n }\n}\n<commit_msg>Impl SnapshotContext for Memory<commit_after>use std::hash::{Hash, Hasher};\n\nuse rustc::ich::{StableHashingContext, StableHashingContextProvider};\nuse rustc::mir;\nuse rustc::mir::interpret::{AllocId, Pointer, Scalar, ScalarMaybeUndef, Relocations, Allocation, UndefMask};\nuse rustc::ty;\nuse rustc::ty::layout::Align;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};\nuse syntax::ast::Mutability;\nuse syntax::source_map::Span;\n\nuse super::eval_context::{LocalValue, StackPopCleanup};\nuse super::{Frame, Memory, Machine, Operand, MemPlace, Place, PlaceExtra, Value};\n\ntrait SnapshotContext<'a> {\n type To;\n type From;\n fn resolve(&'a self, id: &Self::From) -> Option<&'a Self::To>;\n}\n\ntrait Snapshot<'a, Ctx: SnapshotContext<'a>> {\n type Item;\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item;\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocIdSnapshot<'a>(Option<AllocationSnapshot<'a>>);\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for AllocId\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = AllocIdSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n AllocIdSnapshot(ctx.resolve(self).map(|alloc| alloc.snapshot(ctx)))\n }\n}\n\ntype PointerSnapshot<'a> = Pointer<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Pointer\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = PointerSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Pointer{ alloc_id, offset } = self;\n\n Pointer {\n alloc_id: alloc_id.snapshot(ctx),\n offset: *offset,\n }\n }\n}\n\ntype ScalarSnapshot<'a> = Scalar<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Scalar\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = ScalarSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Scalar::Ptr(p) => Scalar::Ptr(p.snapshot(ctx)),\n Scalar::Bits{ size, bits } => Scalar::Bits{\n size: *size,\n bits: *bits,\n },\n }\n }\n}\n\ntype ScalarMaybeUndefSnapshot<'a> = ScalarMaybeUndef<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for ScalarMaybeUndef\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = ScalarMaybeUndefSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.snapshot(ctx)),\n ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,\n }\n }\n}\n\ntype MemPlaceSnapshot<'a> = MemPlace<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for MemPlace\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = MemPlaceSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let MemPlace{ ptr, extra, align } = self;\n\n MemPlaceSnapshot{\n ptr: ptr.snapshot(ctx),\n extra: extra.snapshot(ctx),\n align: *align,\n }\n }\n}\n\ntype PlaceSnapshot<'a> = Place<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Place\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = PlaceSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Place::Ptr(p) => Place::Ptr(p.snapshot(ctx)),\n\n Place::Local{ frame, local } => Place::Local{\n frame: *frame,\n local: *local,\n },\n }\n }\n}\n\ntype PlaceExtraSnapshot<'a> = PlaceExtra<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for PlaceExtra\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = PlaceExtraSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n PlaceExtra::Vtable(p) => PlaceExtra::Vtable(p.snapshot(ctx)),\n PlaceExtra::Length(l) => PlaceExtra::Length(*l),\n PlaceExtra::None => PlaceExtra::None,\n }\n }\n}\n\ntype ValueSnapshot<'a> = Value<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Value\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = ValueSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Value::Scalar(s) => Value::Scalar(s.snapshot(ctx)),\n Value::ScalarPair(a, b) => Value::ScalarPair(a.snapshot(ctx), b.snapshot(ctx)),\n }\n }\n}\n\ntype OperandSnapshot<'a> = Operand<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Operand\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = OperandSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Operand::Immediate(v) => Operand::Immediate(v.snapshot(ctx)),\n Operand::Indirect(m) => Operand::Indirect(m.snapshot(ctx)),\n }\n }\n}\n\ntype LocalValueSnapshot<'a> = LocalValue<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for LocalValue\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = LocalValueSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n LocalValue::Live(v) => LocalValue::Live(v.snapshot(ctx)),\n LocalValue::Dead => LocalValue::Dead,\n }\n }\n}\n\ntype RelocationsSnapshot<'a> = Relocations<AllocIdSnapshot<'a>>;\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Relocations\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = RelocationsSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n Relocations::from_presorted(self.iter().map(|(size, id)| (*size, id.snapshot(ctx))).collect())\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocationSnapshot<'a> {\n bytes: &'a [u8],\n relocations: RelocationsSnapshot<'a>,\n undef_mask: &'a UndefMask,\n align: &'a Align,\n runtime_mutability: &'a Mutability,\n}\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = AllocationSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Allocation { bytes, relocations, undef_mask, align, runtime_mutability } = self;\n\n AllocationSnapshot {\n bytes,\n undef_mask,\n align,\n runtime_mutability,\n relocations: relocations.snapshot(ctx),\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct FrameSnapshot<'a, 'tcx> {\n instance: &'a ty::Instance<'tcx>,\n span: &'a Span,\n return_to_block: &'a StackPopCleanup,\n return_place: PlaceSnapshot<'a>,\n locals: IndexVec<mir::Local, LocalValueSnapshot<'a>>,\n block: &'a mir::BasicBlock,\n stmt: usize,\n}\n\nimpl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>\n where Ctx: SnapshotContext<'a, To=Allocation, From=AllocId>,\n{\n type Item = FrameSnapshot<'a, 'tcx>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Frame {\n mir: _,\n instance,\n span,\n return_to_block,\n return_place,\n locals,\n block,\n stmt,\n } = self;\n\n FrameSnapshot {\n instance,\n span,\n return_to_block,\n block,\n stmt: *stmt,\n return_place: return_place.snapshot(ctx),\n locals: locals.iter().map(|local| local.snapshot(ctx)).collect(),\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct MemorySnapshot<'a, 'mir: 'a, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx> + 'a> {\n data: &'a M::MemoryData,\n}\n\nimpl<'a, 'b, 'mir, 'tcx, M> SnapshotContext<'b> for Memory<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n type To = Allocation;\n type From = AllocId;\n fn resolve(&'b self, id: &Self::From) -> Option<&'b Self::To> {\n self.get(*id).ok()\n }\n}\n\n\/\/\/ The virtual machine state during const-evaluation at a given point in time.\n#[derive(Eq, PartialEq)]\npub struct EvalSnapshot<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {\n machine: M,\n memory: Memory<'a, 'mir, 'tcx, M>,\n stack: Vec<Frame<'mir, 'tcx>>,\n}\n\nimpl<'a, 'mir, 'tcx, M> EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n pub fn new(machine: &M, memory: &Memory<'a, 'mir, 'tcx, M>, stack: &[Frame<'mir, 'tcx>]) -> Self {\n EvalSnapshot {\n machine: machine.clone(),\n memory: memory.clone(),\n stack: stack.into(),\n }\n }\n}\n\nimpl<'a, 'mir, 'tcx, M> Hash for EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n fn hash<H: Hasher>(&self, state: &mut H) {\n \/\/ Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)\n let mut hcx = self.memory.tcx.get_stable_hashing_context();\n let mut hasher = StableHasher::<u64>::new();\n self.hash_stable(&mut hcx, &mut hasher);\n hasher.finish().hash(state)\n }\n}\n\nimpl<'a, 'b, 'mir, 'tcx, M> HashStable<StableHashingContext<'b>> for EvalSnapshot<'a, 'mir, 'tcx, M>\n where M: Machine<'mir, 'tcx>,\n{\n fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'b>, hasher: &mut StableHasher<W>) {\n let EvalSnapshot{ machine, memory, stack } = self;\n (machine, &memory.data, stack).hash_stable(hcx, hasher);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Upgrade variables to 64 bit<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Cleanup Reliability functions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Finish RakNet packet?<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add run-pass test for From<NonZero><commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn main() {\n use std::num::NonZeroU32;\n let nz = NonZeroU32::new(5).unwrap();\n let num: u32 = nz.into();\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_attrs)]\n#![allow(dead_code)]\n\nmod foo {\n pub use self::bar::T;\n mod bar {\n pub trait T {\n fn f(&self) {}\n }\n impl T for () {}\n }\n}\n\nfn g() {\n use foo::T;\n ().f(); \/\/ Check that this does not trigger a privacy error\n}\n\n#[rustc_error]\nfn main() {} \/\/~ ERROR compilation successful\n<commit_msg>Add regression test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_attrs, get_type_id)]\n#![allow(dead_code)]\n\nmod foo {\n pub use self::bar::T;\n mod bar {\n pub trait T {\n fn f(&self) {}\n }\n impl T for () {}\n }\n}\n\nfn g() {\n use foo::T;\n ().f(); \/\/ Check that this does not trigger a privacy error\n}\n\nfn f() {\n let error = ::std::thread::spawn(|| {}).join().unwrap_err();\n error.get_type_id(); \/\/ Regression test for #21670\n}\n\n#[rustc_error]\nfn main() {} \/\/~ ERROR compilation successful\n<|endoftext|>"} {"text":"<commit_before>use inflector::Inflector;\n\nuse std::collections::HashMap;\n\nuse botocore::{Operation, Service};\nuse super::GenerateProtocol;\n\npub struct JsonGenerator;\n\nimpl GenerateProtocol for JsonGenerator {\n fn generate_methods(&self, service: &Service) -> String {\n service.operations.values().map(|operation| {\n\n let output_type = operation.output_shape_or(\"()\");\n\n format!(\"\n {documentation}\n pub fn {method_name}(&self, input: &{input_type}) -> {result_type} {{\n let encoded = serde_json::to_string(input).unwrap();\n let mut request = SignedRequest::new(\\\"{http_method}\\\", \\\"{endpoint_prefix}\\\", self.region, \\\"{request_uri}\\\");\n request.set_content_type(\\\"application\/x-amz-json-{json_version}\\\".to_owned());\n request.add_header(\\\"x-amz-target\\\", \\\"{target_prefix}.{name}\\\");\n request.set_payload(Some(encoded.as_bytes()));\n let mut result = request.sign_and_execute(try!(self.credentials_provider.credentials()));\n let status = result.status.to_u16();\n let mut body = String::new();\n result.read_to_string(&mut body).unwrap();\n match status {{\n 200 => {{\n {ok_response}\n }}\n _ => {err_response},\n }}\n }}\n \",\n documentation = generate_documentation(operation).unwrap_or(\"\".to_owned()),\n endpoint_prefix = service.metadata.endpoint_prefix,\n http_method = operation.http.method,\n input_type = operation.input_shape(),\n method_name = operation.name.to_snake_case(),\n name = operation.name,\n ok_response = generate_ok_response(operation, output_type),\n err_response = generate_err_response(service, operation),\n result_type = generate_result_type(service, operation, output_type),\n request_uri = operation.http.request_uri,\n target_prefix = service.metadata.target_prefix.as_ref().unwrap(),\n json_version = service.metadata.json_version.as_ref().unwrap(),\n )\n }).collect::<Vec<String>>().join(\"\\n\")\n }\n\n fn generate_prelude(&self, service: &Service) -> String {\n format!(\n \"use std::io::Read;\n\n use serde_json;\n\n use credential::ProvideAwsCredentials;\n use region;\n use signature::SignedRequest;\n\n {error_imports}\",\n error_imports = generate_error_imports(service))\n }\n\n fn generate_struct_attributes(&self) -> String {\n \"#[derive(Debug, Default, Deserialize, Serialize)]\".to_owned()\n }\n\n fn generate_error_types(&self, service: &Service) -> Option<String>{\n if service.typed_errors() {\n\n \/\/ grab error type documentation for use with error enums in generated code\n \/\/ botocore presents errors as structs. we filter those out in generate_types.\n let mut error_documentation = HashMap::new();\n\n for (name, shape) in service.shapes.iter() {\n if shape.exception() && shape.documentation.is_some() {\n error_documentation.insert(name, shape.documentation.as_ref().unwrap());\n }\n }\n\n Some(service.operations.iter()\n .filter_map(|(_, operation)| generate_error_type(operation, &error_documentation) )\n .collect::<Vec<String>>()\n .join(\"\\n\")\n )\n } else {\n None\n }\n }\n\n fn timestamp_type(&self) -> &'static str {\n \"f64\"\n }\n\n}\n\n\npub fn generate_error_type(operation: &Operation, error_documentation: &HashMap<&String, &String>,) -> Option<String> {\n\n let error_type_name = operation.error_type_name();\n\n Some(format!(\"\n #[derive(Debug, PartialEq)]\n pub enum {type_name} {{\n {error_types}\n }}\n\n impl {type_name} {{\n pub fn from_body(body: &str) -> {type_name} {{\n match from_str::<SerdeJsonValue>(body) {{\n Ok(json) => {{\n let error_type: &str = match json.find(\\\"__type\\\") {{\n Some(error_type) => error_type.as_string().unwrap_or(\\\"Unknown\\\"),\n None => \\\"Unknown\\\",\n }};\n\n match error_type {{\n {type_matchers}\n }}\n }},\n Err(_) => {type_name}::Unknown(String::from(body))\n }}\n }}\n }}\n impl From<AwsError> for {type_name} {{\n fn from(err: AwsError) -> {type_name} {{\n {type_name}::Unknown(err.message)\n }}\n }}\n impl fmt::Display for {type_name} {{\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {{\n write!(f, \\\"{{}}\\\", self.description())\n }}\n }}\n impl Error for {type_name} {{\n fn description(&self) -> &str {{\n match *self {{\n {description_matchers}\n }}\n }}\n }}\n \",\n type_name = error_type_name,\n error_types = generate_error_enum_types(operation, error_documentation).unwrap_or(String::from(\"\")),\n type_matchers = generate_error_type_matchers(operation).unwrap_or(String::from(\"\")),\n description_matchers = generate_error_description_matchers(operation).unwrap_or(String::from(\"\"))))\n}\n\nfn generate_error_enum_types(operation: &Operation, error_documentation: &HashMap<&String, &String>) -> Option<String> {\n let mut enum_types: Vec<String> = Vec::new();\n\n if operation.errors.is_some() {\n for error in operation.errors.as_ref().unwrap().iter() {\n enum_types.push(format!(\"\\n\/\/\/{}\\n{}(String)\",\n error_documentation.get(&error.shape).unwrap_or(&&String::from(\"\")),\n error.idiomatic_error_name()));\n }\n }\n\n enum_types.push(\"\/\/\/ A validation error occurred. Details from AWS are provided.\\nValidation(String)\".to_string());\n enum_types.push(\"\/\/\/ An unknown error occurred. The raw HTTP response is provided.\\nUnknown(String)\".to_string());\n Some(enum_types.join(\",\"))\n}\n\nfn generate_error_type_matchers(operation: &Operation) -> Option<String> {\n let mut type_matchers: Vec<String> = Vec::new();\n let error_type = operation.error_type_name();\n\n if operation.errors.is_some() {\n for error in operation.errors.as_ref().unwrap().iter() {\n type_matchers.push(format!(\"\\\"{error_shape}\\\" => {error_type}::{error_name}(String::from(body))\",\n error_shape = error.shape,\n error_type = error_type,\n error_name = error.idiomatic_error_name()))\n }\n }\n\n type_matchers.push(format!(\"\\\"Validation\\\" => {error_type}::Validation(String::from(body))\", error_type = error_type));\n type_matchers.push(format!(\"_ => {error_type}::Unknown(String::from(body))\", error_type = error_type));\n Some(type_matchers.join(\",\"))\n}\n\nfn generate_error_description_matchers(operation: &Operation) -> Option<String> {\n let mut type_matchers: Vec<String> = Vec::new();\n let error_type = operation.error_type_name();\n\n if operation.errors.is_some() {\n for error in operation.errors.as_ref().unwrap().iter() {\n type_matchers.push(format!(\"{error_type}::{error_shape}(ref cause) => cause\",\n error_type = operation.error_type_name(),\n error_shape = error.idiomatic_error_name()))\n }\n }\n\n type_matchers.push(format!(\"{error_type}::Validation(ref cause) => cause\", error_type = error_type));\n type_matchers.push(format!(\"{error_type}::Unknown(ref cause) => cause\", error_type = error_type));\n Some(type_matchers.join(\",\"))\n}\n\nfn generate_result_type<'a>(service: &Service, operation: &Operation, output_type: &'a str) -> String {\n if service.typed_errors() {\n format!(\"Result<{}, {}>\", output_type, operation.error_type_name())\n } else {\n format!(\"AwsResult<{}>\", output_type)\n }\n}\n\nfn generate_error_imports(service: &Service) -> &'static str {\n if service.typed_errors() {\n \"use error::AwsError;\n use std::error::Error;\n use std::fmt;\n use serde_json::Value as SerdeJsonValue;\n use serde_json::from_str;\"\n } else {\n \"use error::{AwsResult, parse_json_protocol_error};\"\n }\n}\n\nfn generate_documentation(operation: &Operation) -> Option<String> {\n operation.documentation.as_ref().map(|docs| {\n format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\"))\n })\n}\n\nfn generate_ok_response(operation: &Operation, output_type: &str) -> String {\n if operation.output.is_some() {\n format!(\"Ok(serde_json::from_str::<{}>(&body).unwrap())\", output_type)\n } else {\n \"Ok(())\".to_owned()\n }\n}\n\nfn generate_err_response(service: &Service, operation: &Operation) -> String {\n if service.typed_errors() {\n format!(\"Err({}::from_body(&body))\", operation.error_type_name())\n } else {\n String::from(\"Err(parse_json_protocol_error(&body))\") \n }\n}\n<commit_msg>don't add a Validation type to the error enum if the protocol being generated alredy defines one<commit_after>use inflector::Inflector;\n\nuse std::collections::HashMap;\n\nuse botocore::{Operation, Service};\nuse super::GenerateProtocol;\n\npub struct JsonGenerator;\n\nimpl GenerateProtocol for JsonGenerator {\n fn generate_methods(&self, service: &Service) -> String {\n service.operations.values().map(|operation| {\n\n let output_type = operation.output_shape_or(\"()\");\n\n format!(\"\n {documentation}\n pub fn {method_name}(&self, input: &{input_type}) -> {result_type} {{\n let encoded = serde_json::to_string(input).unwrap();\n let mut request = SignedRequest::new(\\\"{http_method}\\\", \\\"{endpoint_prefix}\\\", self.region, \\\"{request_uri}\\\");\n request.set_content_type(\\\"application\/x-amz-json-{json_version}\\\".to_owned());\n request.add_header(\\\"x-amz-target\\\", \\\"{target_prefix}.{name}\\\");\n request.set_payload(Some(encoded.as_bytes()));\n let mut result = request.sign_and_execute(try!(self.credentials_provider.credentials()));\n let status = result.status.to_u16();\n let mut body = String::new();\n result.read_to_string(&mut body).unwrap();\n match status {{\n 200 => {{\n {ok_response}\n }}\n _ => {err_response},\n }}\n }}\n \",\n documentation = generate_documentation(operation).unwrap_or(\"\".to_owned()),\n endpoint_prefix = service.metadata.endpoint_prefix,\n http_method = operation.http.method,\n input_type = operation.input_shape(),\n method_name = operation.name.to_snake_case(),\n name = operation.name,\n ok_response = generate_ok_response(operation, output_type),\n err_response = generate_err_response(service, operation),\n result_type = generate_result_type(service, operation, output_type),\n request_uri = operation.http.request_uri,\n target_prefix = service.metadata.target_prefix.as_ref().unwrap(),\n json_version = service.metadata.json_version.as_ref().unwrap(),\n )\n }).collect::<Vec<String>>().join(\"\\n\")\n }\n\n fn generate_prelude(&self, service: &Service) -> String {\n format!(\n \"use std::io::Read;\n\n use serde_json;\n\n use credential::ProvideAwsCredentials;\n use region;\n use signature::SignedRequest;\n\n {error_imports}\",\n error_imports = generate_error_imports(service))\n }\n\n fn generate_struct_attributes(&self) -> String {\n \"#[derive(Debug, Default, Deserialize, Serialize)]\".to_owned()\n }\n\n fn generate_error_types(&self, service: &Service) -> Option<String>{\n if service.typed_errors() {\n\n \/\/ grab error type documentation for use with error enums in generated code\n \/\/ botocore presents errors as structs. we filter those out in generate_types.\n let mut error_documentation = HashMap::new();\n\n for (name, shape) in service.shapes.iter() {\n if shape.exception() && shape.documentation.is_some() {\n error_documentation.insert(name, shape.documentation.as_ref().unwrap());\n }\n }\n\n Some(service.operations.iter()\n .filter_map(|(_, operation)| generate_error_type(operation, &error_documentation) )\n .collect::<Vec<String>>()\n .join(\"\\n\")\n )\n } else {\n None\n }\n }\n\n fn timestamp_type(&self) -> &'static str {\n \"f64\"\n }\n\n}\n\n\npub fn generate_error_type(operation: &Operation, error_documentation: &HashMap<&String, &String>,) -> Option<String> {\n\n let error_type_name = operation.error_type_name();\n\n Some(format!(\"\n #[derive(Debug, PartialEq)]\n pub enum {type_name} {{\n {error_types}\n }}\n\n impl {type_name} {{\n pub fn from_body(body: &str) -> {type_name} {{\n match from_str::<SerdeJsonValue>(body) {{\n Ok(json) => {{\n let error_type: &str = match json.find(\\\"__type\\\") {{\n Some(error_type) => error_type.as_string().unwrap_or(\\\"Unknown\\\"),\n None => \\\"Unknown\\\",\n }};\n\n match error_type {{\n {type_matchers}\n }}\n }},\n Err(_) => {type_name}::Unknown(String::from(body))\n }}\n }}\n }}\n impl From<AwsError> for {type_name} {{\n fn from(err: AwsError) -> {type_name} {{\n {type_name}::Unknown(err.message)\n }}\n }}\n impl fmt::Display for {type_name} {{\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {{\n write!(f, \\\"{{}}\\\", self.description())\n }}\n }}\n impl Error for {type_name} {{\n fn description(&self) -> &str {{\n match *self {{\n {description_matchers}\n }}\n }}\n }}\n \",\n type_name = error_type_name,\n error_types = generate_error_enum_types(operation, error_documentation).unwrap_or(String::from(\"\")),\n type_matchers = generate_error_type_matchers(operation).unwrap_or(String::from(\"\")),\n description_matchers = generate_error_description_matchers(operation).unwrap_or(String::from(\"\"))))\n}\n\nfn generate_error_enum_types(operation: &Operation, error_documentation: &HashMap<&String, &String>) -> Option<String> {\n let mut enum_types: Vec<String> = Vec::new();\n let mut add_validation = true;\n\n if operation.errors.is_some() {\n for error in operation.errors.as_ref().unwrap().iter() {\n let error_name = error.idiomatic_error_name();\n\n enum_types.push(format!(\"\\n\/\/\/{}\\n{}(String)\",\n error_documentation.get(&error.shape).unwrap_or(&&String::from(\"\")),\n error_name));\n\n if error_name == \"Validation\" {\n add_validation = false;\n }\n }\n }\n\n if add_validation {\n enum_types.push(\"\/\/\/ A validation error occurred. Details from AWS are provided.\\nValidation(String)\".to_string());\n }\n\n enum_types.push(\"\/\/\/ An unknown error occurred. The raw HTTP response is provided.\\nUnknown(String)\".to_string());\n Some(enum_types.join(\",\"))\n}\n\nfn generate_error_type_matchers(operation: &Operation) -> Option<String> {\n let mut type_matchers: Vec<String> = Vec::new();\n let error_type = operation.error_type_name();\n let mut add_validation = true;\n\n if operation.errors.is_some() {\n for error in operation.errors.as_ref().unwrap().iter() {\n let error_name = error.idiomatic_error_name();\n\n type_matchers.push(format!(\"\\\"{error_shape}\\\" => {error_type}::{error_name}(String::from(body))\",\n error_shape = error.shape,\n error_type = error_type,\n error_name = error_name));\n\n if error_name == \"Validation\" {\n add_validation = false;\n }\n\n }\n }\n\n if add_validation {\n type_matchers.push(format!(\"\\\"Validation\\\" => {error_type}::Validation(String::from(body))\", error_type = error_type));\n }\n\n type_matchers.push(format!(\"_ => {error_type}::Unknown(String::from(body))\", error_type = error_type));\n Some(type_matchers.join(\",\"))\n}\n\nfn generate_error_description_matchers(operation: &Operation) -> Option<String> {\n let mut type_matchers: Vec<String> = Vec::new();\n let error_type = operation.error_type_name();\n let mut add_validation = true;\n\n if operation.errors.is_some() {\n for error in operation.errors.as_ref().unwrap().iter() {\n let error_name = error.idiomatic_error_name();\n type_matchers.push(format!(\"{error_type}::{error_shape}(ref cause) => cause\",\n error_type = operation.error_type_name(),\n error_shape = error_name));\n\n if error_name == \"Validation\" {\n add_validation = false;\n }\n\n }\n }\n\n if add_validation {\n type_matchers.push(format!(\"{error_type}::Validation(ref cause) => cause\", error_type = error_type));\n }\n\n type_matchers.push(format!(\"{error_type}::Unknown(ref cause) => cause\", error_type = error_type));\n Some(type_matchers.join(\",\"))\n}\n\nfn generate_result_type<'a>(service: &Service, operation: &Operation, output_type: &'a str) -> String {\n if service.typed_errors() {\n format!(\"Result<{}, {}>\", output_type, operation.error_type_name())\n } else {\n format!(\"AwsResult<{}>\", output_type)\n }\n}\n\nfn generate_error_imports(service: &Service) -> &'static str {\n if service.typed_errors() {\n \"use error::AwsError;\n use std::error::Error;\n use std::fmt;\n use serde_json::Value as SerdeJsonValue;\n use serde_json::from_str;\"\n } else {\n \"use error::{AwsResult, parse_json_protocol_error};\"\n }\n}\n\nfn generate_documentation(operation: &Operation) -> Option<String> {\n operation.documentation.as_ref().map(|docs| {\n format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\"))\n })\n}\n\nfn generate_ok_response(operation: &Operation, output_type: &str) -> String {\n if operation.output.is_some() {\n format!(\"Ok(serde_json::from_str::<{}>(&body).unwrap())\", output_type)\n } else {\n \"Ok(())\".to_owned()\n }\n}\n\nfn generate_err_response(service: &Service, operation: &Operation) -> String {\n if service.typed_errors() {\n format!(\"Err({}::from_body(&body))\", operation.error_type_name())\n } else {\n String::from(\"Err(parse_json_protocol_error(&body))\") \n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Modify prompt<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module contains the \"canonicalizer\" itself.\n\/\/!\n\/\/! For an overview of what canonicaliation is and how it fits into\n\/\/! rustc, check out the [chapter in the rustc guide][c].\n\/\/!\n\/\/! [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html\n\nuse infer::canonical::{\n Canonical, CanonicalTyVarKind, CanonicalVarInfo, CanonicalVarKind, Canonicalized,\n SmallCanonicalVarValues,\n};\nuse infer::InferCtxt;\nuse std::sync::atomic::Ordering;\nuse ty::fold::{TypeFoldable, TypeFolder};\nuse ty::subst::Kind;\nuse ty::{self, CanonicalVar, Lift, List, Ty, TyCtxt, TypeFlags};\n\nuse rustc_data_structures::fx::FxHashMap;\nuse rustc_data_structures::indexed_vec::Idx;\nuse smallvec::SmallVec;\n\nimpl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {\n \/\/\/ Canonicalizes a query value `V`. When we canonicalize a query,\n \/\/\/ we not only canonicalize unbound inference variables, but we\n \/\/\/ *also* replace all free regions whatsoever. So for example a\n \/\/\/ query like `T: Trait<'static>` would be canonicalized to\n \/\/\/\n \/\/\/ ```text\n \/\/\/ T: Trait<'?0>\n \/\/\/ ```\n \/\/\/\n \/\/\/ with a mapping M that maps `'?0` to `'static`.\n \/\/\/\n \/\/\/ To get a good understanding of what is happening here, check\n \/\/\/ out the [chapter in the rustc guide][c].\n \/\/\/\n \/\/\/ [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html#canonicalizing-the-query\n pub fn canonicalize_query<V>(\n &self,\n value: &V,\n var_values: &mut SmallCanonicalVarValues<'tcx>\n ) -> Canonicalized<'gcx, V>\n where\n V: TypeFoldable<'tcx> + Lift<'gcx>,\n {\n self.tcx\n .sess\n .perf_stats\n .queries_canonicalized\n .fetch_add(1, Ordering::Relaxed);\n\n Canonicalizer::canonicalize(\n value,\n Some(self),\n self.tcx,\n CanonicalizeRegionMode {\n static_region: true,\n other_free_regions: true,\n },\n var_values,\n )\n }\n\n \/\/\/ Canonicalizes a query *response* `V`. When we canonicalize a\n \/\/\/ query response, we only canonicalize unbound inference\n \/\/\/ variables, and we leave other free regions alone. So,\n \/\/\/ continuing with the example from `canonicalize_query`, if\n \/\/\/ there was an input query `T: Trait<'static>`, it would have\n \/\/\/ been canonicalized to\n \/\/\/\n \/\/\/ ```text\n \/\/\/ T: Trait<'?0>\n \/\/\/ ```\n \/\/\/\n \/\/\/ with a mapping M that maps `'?0` to `'static`. But if we found that there\n \/\/\/ exists only one possible impl of `Trait`, and it looks like\n \/\/\/\n \/\/\/ impl<T> Trait<'static> for T { .. }\n \/\/\/\n \/\/\/ then we would prepare a query result R that (among other\n \/\/\/ things) includes a mapping to `'?0 := 'static`. When\n \/\/\/ canonicalizing this query result R, we would leave this\n \/\/\/ reference to `'static` alone.\n \/\/\/\n \/\/\/ To get a good understanding of what is happening here, check\n \/\/\/ out the [chapter in the rustc guide][c].\n \/\/\/\n \/\/\/ [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html#canonicalizing-the-query-result\n pub fn canonicalize_response<V>(\n &self,\n value: &V,\n ) -> Canonicalized<'gcx, V>\n where\n V: TypeFoldable<'tcx> + Lift<'gcx>,\n {\n let mut var_values = SmallVec::new();\n Canonicalizer::canonicalize(\n value,\n Some(self),\n self.tcx,\n CanonicalizeRegionMode {\n static_region: false,\n other_free_regions: false,\n },\n &mut var_values\n )\n }\n\n \/\/\/ A hacky variant of `canonicalize_query` that does not\n \/\/\/ canonicalize `'static`. Unfortunately, the existing leak\n \/\/\/ check treaks `'static` differently in some cases (see also\n \/\/\/ #33684), so if we are performing an operation that may need to\n \/\/\/ prove \"leak-check\" related things, we leave `'static`\n \/\/\/ alone.\n \/\/\/\n \/\/\/ FIXME(#48536) -- once we have universes, we can remove this and just use\n \/\/\/ `canonicalize_query`.\n pub fn canonicalize_hr_query_hack<V>(\n &self,\n value: &V,\n var_values: &mut SmallCanonicalVarValues<'tcx>\n ) -> Canonicalized<'gcx, V>\n where\n V: TypeFoldable<'tcx> + Lift<'gcx>,\n {\n self.tcx\n .sess\n .perf_stats\n .queries_canonicalized\n .fetch_add(1, Ordering::Relaxed);\n\n Canonicalizer::canonicalize(\n value,\n Some(self),\n self.tcx,\n CanonicalizeRegionMode {\n static_region: false,\n other_free_regions: true,\n },\n var_values\n )\n }\n}\n\n\/\/\/ If this flag is true, then all free regions will be replaced with\n\/\/\/ a canonical var. This is used to make queries as generic as\n\/\/\/ possible. For example, the query `F: Foo<'static>` would be\n\/\/\/ canonicalized to `F: Foo<'0>`.\nstruct CanonicalizeRegionMode {\n static_region: bool,\n other_free_regions: bool,\n}\n\nimpl CanonicalizeRegionMode {\n fn any(&self) -> bool {\n self.static_region || self.other_free_regions\n }\n}\n\nstruct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n variables: SmallVec<[CanonicalVarInfo; 8]>,\n var_values: &'cx mut SmallCanonicalVarValues<'tcx>,\n \/\/ Note that indices is only used once `var_values` is big enough to be\n \/\/ heap-allocated.\n indices: FxHashMap<Kind<'tcx>, CanonicalVar>,\n canonicalize_region_mode: CanonicalizeRegionMode,\n needs_canonical_flags: TypeFlags,\n}\n\nimpl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {\n fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {\n self.tcx\n }\n\n fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {\n match *r {\n ty::ReLateBound(..) => {\n \/\/ leave bound regions alone\n r\n }\n\n ty::ReVar(vid) => {\n let r = self\n .infcx\n .unwrap()\n .borrow_region_constraints()\n .opportunistic_resolve_var(self.tcx, vid);\n let info = CanonicalVarInfo {\n kind: CanonicalVarKind::Region,\n };\n debug!(\n \"canonical: region var found with vid {:?}, \\\n opportunistically resolved to {:?}\",\n vid, r\n );\n let cvar = self.canonical_var(info, r.into());\n self.tcx().mk_region(ty::ReCanonical(cvar))\n }\n\n ty::ReStatic => {\n if self.canonicalize_region_mode.static_region {\n let info = CanonicalVarInfo {\n kind: CanonicalVarKind::Region,\n };\n let cvar = self.canonical_var(info, r.into());\n self.tcx().mk_region(ty::ReCanonical(cvar))\n } else {\n r\n }\n }\n\n ty::ReEarlyBound(..)\n | ty::ReFree(_)\n | ty::ReScope(_)\n | ty::RePlaceholder(..)\n | ty::ReEmpty\n | ty::ReErased => {\n if self.canonicalize_region_mode.other_free_regions {\n let info = CanonicalVarInfo {\n kind: CanonicalVarKind::Region,\n };\n let cvar = self.canonical_var(info, r.into());\n self.tcx().mk_region(ty::ReCanonical(cvar))\n } else {\n r\n }\n }\n\n ty::ReClosureBound(..) | ty::ReCanonical(_) => {\n bug!(\"canonical region encountered during canonicalization\")\n }\n }\n }\n\n fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {\n match t.sty {\n ty::Infer(ty::TyVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::General, t),\n\n ty::Infer(ty::IntVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Int, t),\n\n ty::Infer(ty::FloatVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Float, t),\n\n ty::Infer(ty::FreshTy(_))\n | ty::Infer(ty::FreshIntTy(_))\n | ty::Infer(ty::FreshFloatTy(_)) => {\n bug!(\"encountered a fresh type during canonicalization\")\n }\n\n ty::Infer(ty::CanonicalTy(_)) => {\n bug!(\"encountered a canonical type during canonicalization\")\n }\n\n ty::Closure(..)\n | ty::Generator(..)\n | ty::GeneratorWitness(..)\n | ty::Bool\n | ty::Char\n | ty::Int(..)\n | ty::Uint(..)\n | ty::Float(..)\n | ty::Adt(..)\n | ty::Str\n | ty::Error\n | ty::Array(..)\n | ty::Slice(..)\n | ty::RawPtr(..)\n | ty::Ref(..)\n | ty::FnDef(..)\n | ty::FnPtr(_)\n | ty::Dynamic(..)\n | ty::Never\n | ty::Tuple(..)\n | ty::Projection(..)\n | ty::UnnormalizedProjection(..)\n | ty::Foreign(..)\n | ty::Param(..)\n | ty::Opaque(..) => {\n if t.flags.intersects(self.needs_canonical_flags) {\n t.super_fold_with(self)\n } else {\n t\n }\n }\n }\n }\n}\n\nimpl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {\n \/\/\/ The main `canonicalize` method, shared impl of\n \/\/\/ `canonicalize_query` and `canonicalize_response`.\n fn canonicalize<V>(\n value: &V,\n infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n canonicalize_region_mode: CanonicalizeRegionMode,\n var_values: &'cx mut SmallCanonicalVarValues<'tcx>\n ) -> Canonicalized<'gcx, V>\n where\n V: TypeFoldable<'tcx> + Lift<'gcx>,\n {\n debug_assert!(\n !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS),\n \"canonicalizing a canonical value: {:?}\",\n value,\n );\n\n let needs_canonical_flags = if canonicalize_region_mode.any() {\n TypeFlags::HAS_FREE_REGIONS | TypeFlags::KEEP_IN_LOCAL_TCX\n } else {\n TypeFlags::KEEP_IN_LOCAL_TCX\n };\n\n let gcx = tcx.global_tcx();\n\n \/\/ Fast path: nothing that needs to be canonicalized.\n if !value.has_type_flags(needs_canonical_flags) {\n let out_value = gcx.lift(value).unwrap();\n let canon_value = Canonical {\n variables: List::empty(),\n value: out_value,\n };\n return canon_value;\n }\n\n let mut canonicalizer = Canonicalizer {\n infcx,\n tcx,\n canonicalize_region_mode,\n needs_canonical_flags,\n variables: SmallVec::new(),\n var_values,\n indices: FxHashMap::default(),\n };\n let out_value = value.fold_with(&mut canonicalizer);\n\n \/\/ Once we have canonicalized `out_value`, it should not\n \/\/ contain anything that ties it to this inference context\n \/\/ anymore, so it should live in the global arena.\n let out_value = gcx.lift(&out_value).unwrap_or_else(|| {\n bug!(\n \"failed to lift `{:?}`, canonicalized from `{:?}`\",\n out_value,\n value\n )\n });\n\n let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables);\n\n Canonical {\n variables: canonical_variables,\n value: out_value,\n }\n }\n\n \/\/\/ Creates a canonical variable replacing `kind` from the input,\n \/\/\/ or returns an existing variable if `kind` has already been\n \/\/\/ seen. `kind` is expected to be an unbound variable (or\n \/\/\/ potentially a free region).\n fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> CanonicalVar {\n let Canonicalizer {\n variables,\n var_values,\n indices,\n ..\n } = self;\n\n \/\/ This code is hot. `variables` and `var_values` are usually small\n \/\/ (fewer than 8 elements ~95% of the time). They are SmallVec's to\n \/\/ avoid allocations in those cases. We also don't use `indices` to\n \/\/ determine if a kind has been seen before until the limit of 8 has\n \/\/ been exceeded, to also avoid allocations for `indices`.\n if !var_values.spilled() {\n \/\/ `var_values` is stack-allocated. `indices` isn't used yet. Do a\n \/\/ direct linear search of `var_values`.\n if let Some(idx) = var_values.iter().position(|&k| k == kind) {\n \/\/ `kind` is already present in `var_values`.\n CanonicalVar::new(idx)\n } else {\n \/\/ `kind` isn't present in `var_values`. Append it. Likewise\n \/\/ for `info` and `variables`.\n variables.push(info);\n var_values.push(kind);\n assert_eq!(variables.len(), var_values.len());\n\n \/\/ If `var_values` has become big enough to be heap-allocated,\n \/\/ fill up `indices` to facilitate subsequent lookups.\n if var_values.spilled() {\n assert!(indices.is_empty());\n *indices =\n var_values.iter()\n .enumerate()\n .map(|(i, &kind)| (kind, CanonicalVar::new(i)))\n .collect();\n }\n \/\/ The cv is the index of the appended element.\n CanonicalVar::new(var_values.len() - 1)\n }\n } else {\n \/\/ `var_values` is large. Do a hashmap search via `indices`.\n *indices\n .entry(kind)\n .or_insert_with(|| {\n variables.push(info);\n var_values.push(kind);\n assert_eq!(variables.len(), var_values.len());\n CanonicalVar::new(variables.len() - 1)\n })\n }\n }\n\n \/\/\/ Given a type variable `ty_var` of the given kind, first check\n \/\/\/ if `ty_var` is bound to anything; if so, canonicalize\n \/\/\/ *that*. Otherwise, create a new canonical variable for\n \/\/\/ `ty_var`.\n fn canonicalize_ty_var(&mut self, ty_kind: CanonicalTyVarKind, ty_var: Ty<'tcx>) -> Ty<'tcx> {\n let infcx = self.infcx.expect(\"encountered ty-var without infcx\");\n let bound_to = infcx.shallow_resolve(ty_var);\n if bound_to != ty_var {\n self.fold_ty(bound_to)\n } else {\n let info = CanonicalVarInfo {\n kind: CanonicalVarKind::Ty(ty_kind),\n };\n let cvar = self.canonical_var(info, ty_var.into());\n self.tcx().mk_infer(ty::InferTy::CanonicalTy(cvar))\n }\n }\n}\n<commit_msg>canonicalizer.rs: rustfmt<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module contains the \"canonicalizer\" itself.\n\/\/!\n\/\/! For an overview of what canonicaliation is and how it fits into\n\/\/! rustc, check out the [chapter in the rustc guide][c].\n\/\/!\n\/\/! [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html\n\nuse infer::canonical::{\n Canonical, CanonicalTyVarKind, CanonicalVarInfo, CanonicalVarKind, Canonicalized,\n SmallCanonicalVarValues,\n};\nuse infer::InferCtxt;\nuse std::sync::atomic::Ordering;\nuse ty::fold::{TypeFoldable, TypeFolder};\nuse ty::subst::Kind;\nuse ty::{self, CanonicalVar, Lift, List, Ty, TyCtxt, TypeFlags};\n\nuse rustc_data_structures::fx::FxHashMap;\nuse rustc_data_structures::indexed_vec::Idx;\nuse smallvec::SmallVec;\n\nimpl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {\n \/\/\/ Canonicalizes a query value `V`. When we canonicalize a query,\n \/\/\/ we not only canonicalize unbound inference variables, but we\n \/\/\/ *also* replace all free regions whatsoever. So for example a\n \/\/\/ query like `T: Trait<'static>` would be canonicalized to\n \/\/\/\n \/\/\/ ```text\n \/\/\/ T: Trait<'?0>\n \/\/\/ ```\n \/\/\/\n \/\/\/ with a mapping M that maps `'?0` to `'static`.\n \/\/\/\n \/\/\/ To get a good understanding of what is happening here, check\n \/\/\/ out the [chapter in the rustc guide][c].\n \/\/\/\n \/\/\/ [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html#canonicalizing-the-query\n pub fn canonicalize_query<V>(\n &self,\n value: &V,\n var_values: &mut SmallCanonicalVarValues<'tcx>,\n ) -> Canonicalized<'gcx, V>\n where\n V: TypeFoldable<'tcx> + Lift<'gcx>,\n {\n self.tcx\n .sess\n .perf_stats\n .queries_canonicalized\n .fetch_add(1, Ordering::Relaxed);\n\n Canonicalizer::canonicalize(\n value,\n Some(self),\n self.tcx,\n CanonicalizeRegionMode {\n static_region: true,\n other_free_regions: true,\n },\n var_values,\n )\n }\n\n \/\/\/ Canonicalizes a query *response* `V`. When we canonicalize a\n \/\/\/ query response, we only canonicalize unbound inference\n \/\/\/ variables, and we leave other free regions alone. So,\n \/\/\/ continuing with the example from `canonicalize_query`, if\n \/\/\/ there was an input query `T: Trait<'static>`, it would have\n \/\/\/ been canonicalized to\n \/\/\/\n \/\/\/ ```text\n \/\/\/ T: Trait<'?0>\n \/\/\/ ```\n \/\/\/\n \/\/\/ with a mapping M that maps `'?0` to `'static`. But if we found that there\n \/\/\/ exists only one possible impl of `Trait`, and it looks like\n \/\/\/\n \/\/\/ impl<T> Trait<'static> for T { .. }\n \/\/\/\n \/\/\/ then we would prepare a query result R that (among other\n \/\/\/ things) includes a mapping to `'?0 := 'static`. When\n \/\/\/ canonicalizing this query result R, we would leave this\n \/\/\/ reference to `'static` alone.\n \/\/\/\n \/\/\/ To get a good understanding of what is happening here, check\n \/\/\/ out the [chapter in the rustc guide][c].\n \/\/\/\n \/\/\/ [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html#canonicalizing-the-query-result\n pub fn canonicalize_response<V>(&self, value: &V) -> Canonicalized<'gcx, V>\n where\n V: TypeFoldable<'tcx> + Lift<'gcx>,\n {\n let mut var_values = SmallVec::new();\n Canonicalizer::canonicalize(\n value,\n Some(self),\n self.tcx,\n CanonicalizeRegionMode {\n static_region: false,\n other_free_regions: false,\n },\n &mut var_values,\n )\n }\n\n \/\/\/ A hacky variant of `canonicalize_query` that does not\n \/\/\/ canonicalize `'static`. Unfortunately, the existing leak\n \/\/\/ check treaks `'static` differently in some cases (see also\n \/\/\/ #33684), so if we are performing an operation that may need to\n \/\/\/ prove \"leak-check\" related things, we leave `'static`\n \/\/\/ alone.\n \/\/\/\n \/\/\/ FIXME(#48536) -- once we have universes, we can remove this and just use\n \/\/\/ `canonicalize_query`.\n pub fn canonicalize_hr_query_hack<V>(\n &self,\n value: &V,\n var_values: &mut SmallCanonicalVarValues<'tcx>,\n ) -> Canonicalized<'gcx, V>\n where\n V: TypeFoldable<'tcx> + Lift<'gcx>,\n {\n self.tcx\n .sess\n .perf_stats\n .queries_canonicalized\n .fetch_add(1, Ordering::Relaxed);\n\n Canonicalizer::canonicalize(\n value,\n Some(self),\n self.tcx,\n CanonicalizeRegionMode {\n static_region: false,\n other_free_regions: true,\n },\n var_values,\n )\n }\n}\n\n\/\/\/ If this flag is true, then all free regions will be replaced with\n\/\/\/ a canonical var. This is used to make queries as generic as\n\/\/\/ possible. For example, the query `F: Foo<'static>` would be\n\/\/\/ canonicalized to `F: Foo<'0>`.\nstruct CanonicalizeRegionMode {\n static_region: bool,\n other_free_regions: bool,\n}\n\nimpl CanonicalizeRegionMode {\n fn any(&self) -> bool {\n self.static_region || self.other_free_regions\n }\n}\n\nstruct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n variables: SmallVec<[CanonicalVarInfo; 8]>,\n var_values: &'cx mut SmallCanonicalVarValues<'tcx>,\n \/\/ Note that indices is only used once `var_values` is big enough to be\n \/\/ heap-allocated.\n indices: FxHashMap<Kind<'tcx>, CanonicalVar>,\n canonicalize_region_mode: CanonicalizeRegionMode,\n needs_canonical_flags: TypeFlags,\n}\n\nimpl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {\n fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {\n self.tcx\n }\n\n fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {\n match *r {\n ty::ReLateBound(..) => {\n \/\/ leave bound regions alone\n r\n }\n\n ty::ReVar(vid) => {\n let r = self.infcx\n .unwrap()\n .borrow_region_constraints()\n .opportunistic_resolve_var(self.tcx, vid);\n let info = CanonicalVarInfo {\n kind: CanonicalVarKind::Region,\n };\n debug!(\n \"canonical: region var found with vid {:?}, \\\n opportunistically resolved to {:?}\",\n vid, r\n );\n let cvar = self.canonical_var(info, r.into());\n self.tcx().mk_region(ty::ReCanonical(cvar))\n }\n\n ty::ReStatic => {\n if self.canonicalize_region_mode.static_region {\n let info = CanonicalVarInfo {\n kind: CanonicalVarKind::Region,\n };\n let cvar = self.canonical_var(info, r.into());\n self.tcx().mk_region(ty::ReCanonical(cvar))\n } else {\n r\n }\n }\n\n ty::ReEarlyBound(..)\n | ty::ReFree(_)\n | ty::ReScope(_)\n | ty::RePlaceholder(..)\n | ty::ReEmpty\n | ty::ReErased => {\n if self.canonicalize_region_mode.other_free_regions {\n let info = CanonicalVarInfo {\n kind: CanonicalVarKind::Region,\n };\n let cvar = self.canonical_var(info, r.into());\n self.tcx().mk_region(ty::ReCanonical(cvar))\n } else {\n r\n }\n }\n\n ty::ReClosureBound(..) | ty::ReCanonical(_) => {\n bug!(\"canonical region encountered during canonicalization\")\n }\n }\n }\n\n fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {\n match t.sty {\n ty::Infer(ty::TyVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::General, t),\n\n ty::Infer(ty::IntVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Int, t),\n\n ty::Infer(ty::FloatVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Float, t),\n\n ty::Infer(ty::FreshTy(_))\n | ty::Infer(ty::FreshIntTy(_))\n | ty::Infer(ty::FreshFloatTy(_)) => {\n bug!(\"encountered a fresh type during canonicalization\")\n }\n\n ty::Infer(ty::CanonicalTy(_)) => {\n bug!(\"encountered a canonical type during canonicalization\")\n }\n\n ty::Closure(..)\n | ty::Generator(..)\n | ty::GeneratorWitness(..)\n | ty::Bool\n | ty::Char\n | ty::Int(..)\n | ty::Uint(..)\n | ty::Float(..)\n | ty::Adt(..)\n | ty::Str\n | ty::Error\n | ty::Array(..)\n | ty::Slice(..)\n | ty::RawPtr(..)\n | ty::Ref(..)\n | ty::FnDef(..)\n | ty::FnPtr(_)\n | ty::Dynamic(..)\n | ty::Never\n | ty::Tuple(..)\n | ty::Projection(..)\n | ty::UnnormalizedProjection(..)\n | ty::Foreign(..)\n | ty::Param(..)\n | ty::Opaque(..) => {\n if t.flags.intersects(self.needs_canonical_flags) {\n t.super_fold_with(self)\n } else {\n t\n }\n }\n }\n }\n}\n\nimpl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {\n \/\/\/ The main `canonicalize` method, shared impl of\n \/\/\/ `canonicalize_query` and `canonicalize_response`.\n fn canonicalize<V>(\n value: &V,\n infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n canonicalize_region_mode: CanonicalizeRegionMode,\n var_values: &'cx mut SmallCanonicalVarValues<'tcx>,\n ) -> Canonicalized<'gcx, V>\n where\n V: TypeFoldable<'tcx> + Lift<'gcx>,\n {\n debug_assert!(\n !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS),\n \"canonicalizing a canonical value: {:?}\",\n value,\n );\n\n let needs_canonical_flags = if canonicalize_region_mode.any() {\n TypeFlags::HAS_FREE_REGIONS | TypeFlags::KEEP_IN_LOCAL_TCX\n } else {\n TypeFlags::KEEP_IN_LOCAL_TCX\n };\n\n let gcx = tcx.global_tcx();\n\n \/\/ Fast path: nothing that needs to be canonicalized.\n if !value.has_type_flags(needs_canonical_flags) {\n let out_value = gcx.lift(value).unwrap();\n let canon_value = Canonical {\n variables: List::empty(),\n value: out_value,\n };\n return canon_value;\n }\n\n let mut canonicalizer = Canonicalizer {\n infcx,\n tcx,\n canonicalize_region_mode,\n needs_canonical_flags,\n variables: SmallVec::new(),\n var_values,\n indices: FxHashMap::default(),\n };\n let out_value = value.fold_with(&mut canonicalizer);\n\n \/\/ Once we have canonicalized `out_value`, it should not\n \/\/ contain anything that ties it to this inference context\n \/\/ anymore, so it should live in the global arena.\n let out_value = gcx.lift(&out_value).unwrap_or_else(|| {\n bug!(\n \"failed to lift `{:?}`, canonicalized from `{:?}`\",\n out_value,\n value\n )\n });\n\n let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables);\n\n Canonical {\n variables: canonical_variables,\n value: out_value,\n }\n }\n\n \/\/\/ Creates a canonical variable replacing `kind` from the input,\n \/\/\/ or returns an existing variable if `kind` has already been\n \/\/\/ seen. `kind` is expected to be an unbound variable (or\n \/\/\/ potentially a free region).\n fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> CanonicalVar {\n let Canonicalizer {\n variables,\n var_values,\n indices,\n ..\n } = self;\n\n \/\/ This code is hot. `variables` and `var_values` are usually small\n \/\/ (fewer than 8 elements ~95% of the time). They are SmallVec's to\n \/\/ avoid allocations in those cases. We also don't use `indices` to\n \/\/ determine if a kind has been seen before until the limit of 8 has\n \/\/ been exceeded, to also avoid allocations for `indices`.\n if !var_values.spilled() {\n \/\/ `var_values` is stack-allocated. `indices` isn't used yet. Do a\n \/\/ direct linear search of `var_values`.\n if let Some(idx) = var_values.iter().position(|&k| k == kind) {\n \/\/ `kind` is already present in `var_values`.\n CanonicalVar::new(idx)\n } else {\n \/\/ `kind` isn't present in `var_values`. Append it. Likewise\n \/\/ for `info` and `variables`.\n variables.push(info);\n var_values.push(kind);\n assert_eq!(variables.len(), var_values.len());\n\n \/\/ If `var_values` has become big enough to be heap-allocated,\n \/\/ fill up `indices` to facilitate subsequent lookups.\n if var_values.spilled() {\n assert!(indices.is_empty());\n *indices = var_values\n .iter()\n .enumerate()\n .map(|(i, &kind)| (kind, CanonicalVar::new(i)))\n .collect();\n }\n \/\/ The cv is the index of the appended element.\n CanonicalVar::new(var_values.len() - 1)\n }\n } else {\n \/\/ `var_values` is large. Do a hashmap search via `indices`.\n *indices.entry(kind).or_insert_with(|| {\n variables.push(info);\n var_values.push(kind);\n assert_eq!(variables.len(), var_values.len());\n CanonicalVar::new(variables.len() - 1)\n })\n }\n }\n\n \/\/\/ Given a type variable `ty_var` of the given kind, first check\n \/\/\/ if `ty_var` is bound to anything; if so, canonicalize\n \/\/\/ *that*. Otherwise, create a new canonical variable for\n \/\/\/ `ty_var`.\n fn canonicalize_ty_var(&mut self, ty_kind: CanonicalTyVarKind, ty_var: Ty<'tcx>) -> Ty<'tcx> {\n let infcx = self.infcx.expect(\"encountered ty-var without infcx\");\n let bound_to = infcx.shallow_resolve(ty_var);\n if bound_to != ty_var {\n self.fold_ty(bound_to)\n } else {\n let info = CanonicalVarInfo {\n kind: CanonicalVarKind::Ty(ty_kind),\n };\n let cvar = self.canonical_var(info, ty_var.into());\n self.tcx().mk_infer(ty::InferTy::CanonicalTy(cvar))\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #17<commit_after>fn to_word_under10(n: uint) -> ~str {\n return match n {\n 0 => ~\"zero\",\n 1 => ~\"one\",\n 2 => ~\"two\",\n 3 => ~\"three\",\n 4 => ~\"four\",\n 5 => ~\"five\",\n 6 => ~\"six\",\n 7 => ~\"seven\",\n 8 => ~\"eight\",\n 9 => ~\"nine\",\n _ => fail\n };\n}\n\nfn to_word_under20(n: uint) -> ~str {\n assert n < 20;\n if n < 10 { return to_word_under10(n); }\n return match n {\n 10 => ~\"ten\",\n 11 => ~\"eleven\",\n 12 => ~\"twelve\",\n 13 => ~\"thirteen\",\n 14 => ~\"fourteen\",\n 15 => ~\"fifteen\",\n 16 => ~\"sixteen\",\n 17 => ~\"seventeen\",\n 18 => ~\"eighteen\",\n 19 => ~\"nineteen\",\n _ => fail\n };\n}\n\nfn to_word_under100(n: uint) -> ~str {\n assert n < 100;\n if n < 20 { return to_word_under20(n); }\n\n let prefix = match n \/ 10 {\n 0 | 1 => fail,\n 2 => ~\"twenty\",\n 3 => ~\"thirty\",\n 4 => ~\"forty\",\n 5 => ~\"fifty\",\n 6 => ~\"sixty\",\n 7 => ~\"seventy\",\n 8 => ~\"eighty\",\n 9 => ~\"ninety\",\n _ => fail\n };\n if n % 10 != 0 {\n return prefix + \"-\" + to_word_under10(n % 10);\n } else {\n return prefix;\n }\n}\n\nfn to_word_under1000(n: uint) -> ~str {\n assert n < 1000;\n if n < 100 { return to_word_under100(n); }\n\n let prefix = to_word_under10(n \/ 100) + \" hundred\";\n if n % 100 != 0 {\n return prefix + \" and \" + to_word_under100(n % 100);\n } else {\n return prefix;\n }\n}\n\nfn to_word(n: uint) -> ~str {\n assert n <= 1000;\n if n < 1000 { return to_word_under1000(n); }\n return ~\"one thousand\";\n}\n\nfn main() {\n let mut sum = 0;\n for 1000.timesi |i| {\n let n = i + 1;\n io::println(fmt!(\"%4u => %s\", n, to_word(n)));\n for str::each(to_word(n)) |b| {\n if b != ('-' as u8) && b != (' ' as u8) {\n sum += 1;\n }\n }\n }\n io::println(fmt!(\"%u\", sum));\n}<|endoftext|>"} {"text":"<commit_before>extern crate hyper;\nextern crate time;\n\nuse hyper::Client;\nuse hyper::header::Connection;\nuse time::*;\nuse std::thread;\nuse std::sync::{Arc, Mutex};\n\nstruct Request {\n elapsed_time: f64\n}\n\nimpl Request{\n fn new(elapsed_time: f64) -> Request{\n Request {\n elapsed_time: elapsed_time,\n }\n }\n}\n\nfn main() {\n let requests = Arc::new(Mutex::new(Vec::new()));\n let threads = Arc::new(Mutex::new(Vec::new()));\n\n for _x in 0..100 {\n println!(\"Spawning thread: {}\", _x);\n\n let mut client = Client::new();\n let thread_items = requests.clone();\n\n let handle = thread::spawn(move || {\n for _y in 0..100 {\n println!(\"Firing requests: {}\", _y);\n\n let start = time::precise_time_s();\n \n let _res = client.get(\"http:\/\/jacob.uk.com\")\n .header(Connection::close()) \n .send().unwrap();\n\n let end = time::precise_time_s();\n \n thread_items.lock().unwrap().push((Request::new(end-start)));\n }\n });\n\n threads.lock().unwrap().push((handle));\n }\n\n for t in threads.iter(){\n println!(\"Hello World\");\n }\n}\n<commit_msg>lock the thread before we unwrap and iterate over it<commit_after>extern crate hyper;\nextern crate time;\n\nuse hyper::Client;\nuse hyper::header::Connection;\nuse time::*;\nuse std::thread;\nuse std::sync::{Arc, Mutex};\n\nstruct Request {\n elapsed_time: f64\n}\n\nimpl Request{\n fn new(elapsed_time: f64) -> Request{\n Request {\n elapsed_time: elapsed_time,\n }\n }\n}\n\nfn main() {\n let requests = Arc::new(Mutex::new(Vec::new()));\n let threads = Arc::new(Mutex::new(Vec::new()));\n\n for _x in 0..100 {\n println!(\"Spawning thread: {}\", _x);\n\n let mut client = Client::new();\n let thread_items = requests.clone();\n\n let handle = thread::spawn(move || {\n for _y in 0..100 {\n println!(\"Firing requests: {}\", _y);\n\n let start = time::precise_time_s();\n \n let _res = client.get(\"http:\/\/jacob.uk.com\")\n .header(Connection::close()) \n .send().unwrap();\n\n let end = time::precise_time_s();\n \n thread_items.lock().unwrap().push((Request::new(end-start)));\n }\n });\n\n threads.lock().unwrap().push((handle));\n }\n\n for t in threads.lock().unwrap().iter() {\n \/\/error: cannot move out of borrowed content\n \/\/t.join();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Basic map generation without numbers<commit_after><|endoftext|>"} {"text":"<commit_before>trait Normalize {\n fn normalize(&self) -> Self;\n}\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\nenum Const {\n Data,\n Codata,\n}\n\nimpl Normalize for Const {\n fn normalize(&self) -> Const {\n *self\n }\n}\n\nimpl ToString for Const {\n fn to_string(&self) -> String {\n match *self {\n Const::Data => \"data\".to_string(),\n Const::Codata => \"codata\".to_string(),\n }\n }\n}\n\n#[derive(Clone, Debug, Eq, PartialEq)]\nstruct Var {\n name: String,\n idx: i32,\n}\n\nimpl Var {\n pub fn new(name: &str, idx: i32) -> Var {\n Var {\n name: name.to_string(),\n idx: idx,\n }\n }\n}\n\nimpl Normalize for Var {\n fn normalize(&self) -> Var {\n self.clone()\n }\n}\n\nimpl ToString for Var {\n fn to_string(&self) -> String {\n if self.idx == 0 {\n self.name.clone()\n } else {\n self.name.clone() + \"@\" + &self.idx.to_string()\n }\n }\n}\n\n#[derive(Clone, Debug, Eq, PartialEq)]\nenum Expr {\n \/\/ Type system constants\n Const(Const),\n \/\/ Bound variables\n Var(Var),\n \/\/ Lambda\n Lam(Var, Box<Expr>, Box<Expr>),\n \/\/ \"forall\"\n Pi(Var, Box<Expr>, Box<Expr>),\n \/\/ Function application\n App(Box<Expr>, Box<Expr>),\n}\n\nimpl Expr {\n pub fn constant(constant: Const) -> Expr {\n Expr::Const(constant)\n }\n pub fn var(v: &Var) -> Expr {\n Expr::Var(v.clone())\n }\n pub fn lam(var: Var, ty: Expr, body: Expr) -> Expr {\n Expr::Lam(var, Box::new(ty), Box::new(body))\n }\n pub fn pi(var: Var, ty: Expr, body: Expr) -> Expr {\n Expr::Pi(var, Box::new(ty), Box::new(body))\n }\n pub fn app(f: Expr, arg: Expr) -> Expr {\n Expr::App(Box::new(f), Box::new(arg))\n }\n\n pub fn is_constant(&self) -> bool {\n match *self {\n Expr::Const(_) => true,\n _ => false,\n }\n }\n pub fn is_var(&self) -> bool {\n match *self {\n Expr::Var(_) => true,\n _ => false,\n }\n }\n pub fn is_lam(&self) -> bool {\n match *self {\n Expr::Lam(_, _, _) => true,\n _ => false,\n }\n }\n pub fn is_pi(&self) -> bool {\n match *self {\n Expr::Pi(_, _, _) => true,\n _ => false,\n }\n }\n pub fn is_app(&self) -> bool {\n match *self {\n Expr::App(_, _) => true,\n _ => false,\n }\n }\n}\n\nfn constant(c: Const) -> Expr {\n Expr::constant(c)\n}\nfn var(v: &Var) -> Expr {\n Expr::var(v)\n}\nfn lam(var: Var, ty: Expr, body: Expr) -> Expr {\n Expr::lam(var, ty, body)\n}\nfn pi(var: Var, ty: Expr, body: Expr) -> Expr {\n Expr::pi(var, ty, body)\n}\nfn app(f: Expr, arg: Expr) -> Expr {\n Expr::app(f, arg)\n}\n\nfn replace(val: &Var, with: &Expr, body: &Expr) -> Expr {\n use Expr::*;\n match *body {\n Const(ref constant) => body.clone(),\n Var(ref var) => {\n if *var == *val {\n with.clone()\n } else {\n body.clone()\n }\n }\n Lam(ref var, ref ty, ref body) => {\n \/\/ TODO: Check var == val\n let ty = Box::new(replace(val, with, &ty));\n let body = Box::new(replace(val, with, &body));\n Lam(var.clone(), ty, body)\n }\n Pi(ref var, ref ty, ref body) => {\n \/\/ TODO: Check var == val\n let ty = Box::new(replace(val, with, &ty));\n let body = Box::new(replace(val, with, &body));\n Pi(var.clone(), ty, body)\n }\n App(ref f, ref arg) => {\n let f = Box::new(replace(val, with, &f));\n let arg = Box::new(replace(val, with, &arg));\n App(f, arg)\n }\n }\n}\n\nimpl Normalize for Expr {\n fn normalize(&self) -> Expr {\n use Expr::*;\n \/\/ println!(\"normalize {}\", self.to_string());\n match *self {\n Const(ref constant) => Const(constant.normalize()),\n Var(ref var) => Var(var.normalize()),\n Lam(ref var, ref ty, ref body) => {\n let l = &Lam(var.normalize(),\n Box::new(ty.normalize()),\n Box::new(body.normalize()));\n l.clone()\n }\n Pi(ref var, ref ty, ref body) => {\n let p = &Pi(var.normalize(),\n Box::new(ty.normalize()),\n Box::new(body.normalize()));\n p.clone()\n }\n App(ref f, ref arg) => {\n let f = f.normalize();\n let arg = arg.normalize();\n if let Lam(var, ty, body) = f {\n replace(&var, &arg, &body)\n } else if let Pi(var, ty, body) = f {\n replace(&var, &arg, &body)\n } else {\n panic!(\"f isn't a function {}\", f.to_string())\n }\n }\n }\n }\n}\n\nimpl ToString for Expr {\n fn to_string(&self) -> String {\n use Expr::*;\n match *self {\n Const(ref constant) => constant.to_string(),\n Var(ref var) => var.to_string(),\n Lam(ref var, ref ty, ref body) => {\n \"(\".to_string() + &var.to_string() + \" : \" + &ty.to_string() + \") -> \" +\n &body.to_string()\n }\n Pi(ref var, ref ty, ref body) => {\n \"forall (\".to_string() + &var.to_string() + \" : \" + &ty.to_string() +\n \") -> \" + &body.to_string()\n }\n App(ref f, ref arg) => {\n if f.is_lam() || f.is_pi() {\n \"(\".to_string() + &f.to_string() + \") \" + &arg.to_string()\n } else {\n f.to_string() + \" \" + &arg.to_string()\n }\n }\n }\n }\n}\n\nfn main() {\n println!(\"{}\", Const::Data.to_string());\n let a = Var::new(\"a\", 0);\n let x = Var::new(\"x\", 0);\n let expra = var(&a);\n let exprx = var(&x);\n println!(\"{}\", x.to_string());\n let id = pi(a.clone(), constant(Const::Data), lam(x, expra, exprx));\n println!(\"{}\", id.to_string());\n println!(\"{}\", id.normalize().to_string());\n let apply_int = app(id.clone(), var(&Var::new(\"int\", 0)));\n println!(\"{}\", apply_int.to_string());\n println!(\"{}\", apply_int.normalize().to_string());\n let apply_id = app(app(id, var(&Var::new(\"int\", 0))), var(&Var::new(\"1\", 0)));\n println!(\"{}\", apply_id.to_string());\n println!(\"{}\", apply_id.normalize().to_string());\n}\n\n#[test]\nfn test_to_string() {\n let codata = Const::Codata;\n println!(\"{}\", codata.to_string());\n assert_eq!(\"codata\", codata.to_string());\n let a = Var::new(\"a\", 0);\n let x = Var::new(\"x\", 0);\n let expra = var(&a);\n let exprx = var(&x);\n println!(\"{}\", x.to_string());\n assert_eq!(\"x\", x.to_string());\n let id = pi(a.clone(), constant(Const::Data), lam(x, expra, exprx));\n println!(\"{}\", id.to_string());\n assert_eq!(\"forall (a : data) -> (x : a) -> x\", id.to_string());\n let apply_id = app(app(id, var(&Var::new(\"int\", 0))), var(&Var::new(\"1\", 0)));\n println!(\"{}\", apply_id.to_string());\n assert_eq!(\"(forall (a : data) -> (x : a) -> x) int 1\",\n apply_id.to_string());\n}\n<commit_msg>Begin testing applying id to itself.<commit_after>trait Normalize {\n fn normalize(&self) -> Self;\n}\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\nenum Const {\n Data,\n Codata,\n}\n\nimpl Normalize for Const {\n fn normalize(&self) -> Const {\n *self\n }\n}\n\nimpl ToString for Const {\n fn to_string(&self) -> String {\n match *self {\n Const::Data => \"data\".to_string(),\n Const::Codata => \"codata\".to_string(),\n }\n }\n}\n\n#[derive(Clone, Debug, Eq, PartialEq)]\nstruct Var {\n name: String,\n idx: i32,\n}\n\nimpl Var {\n pub fn new(name: &str, idx: i32) -> Var {\n Var {\n name: name.to_string(),\n idx: idx,\n }\n }\n\n pub fn shift(&self) -> Var {\n Var {\n name: self.name.clone(),\n idx: self.idx + 1,\n }\n }\n}\n\nimpl Normalize for Var {\n fn normalize(&self) -> Var {\n self.clone()\n }\n}\n\nimpl ToString for Var {\n fn to_string(&self) -> String {\n if self.name.len() == 0 {\n \"\".to_string()\n } else if self.idx == 0 {\n self.name.clone()\n } else {\n self.name.clone() + \"@\" + &self.idx.to_string()\n }\n }\n}\n\n#[derive(Clone, Debug, Eq, PartialEq)]\nenum Expr {\n \/\/ Type system constants\n Const(Const),\n \/\/ Bound variables\n Var(Var),\n \/\/ Lambda\n Lam(Var, Box<Expr>, Box<Expr>),\n \/\/ \"forall\"\n Pi(Var, Box<Expr>, Box<Expr>),\n \/\/ Function application\n App(Box<Expr>, Box<Expr>),\n}\n\nimpl Expr {\n pub fn constant(constant: Const) -> Expr {\n Expr::Const(constant)\n }\n pub fn var(v: &Var) -> Expr {\n Expr::Var(v.clone())\n }\n pub fn lam(var: Var, ty: Expr, body: Expr) -> Expr {\n Expr::Lam(var, Box::new(ty), Box::new(body))\n }\n pub fn pi(var: Var, ty: Expr, body: Expr) -> Expr {\n Expr::Pi(var, Box::new(ty), Box::new(body))\n }\n pub fn app(f: Expr, arg: Expr) -> Expr {\n Expr::App(Box::new(f), Box::new(arg))\n }\n\n pub fn is_constant(&self) -> bool {\n match *self {\n Expr::Const(_) => true,\n _ => false,\n }\n }\n pub fn is_var(&self) -> bool {\n match *self {\n Expr::Var(_) => true,\n _ => false,\n }\n }\n pub fn is_lam(&self) -> bool {\n match *self {\n Expr::Lam(_, _, _) => true,\n _ => false,\n }\n }\n pub fn is_pi(&self) -> bool {\n match *self {\n Expr::Pi(_, _, _) => true,\n _ => false,\n }\n }\n pub fn is_app(&self) -> bool {\n match *self {\n Expr::App(_, _) => true,\n _ => false,\n }\n }\n}\n\nfn constant(c: Const) -> Expr {\n Expr::constant(c)\n}\nfn var(v: &Var) -> Expr {\n Expr::var(v)\n}\nfn lam(var: Var, ty: Expr, body: Expr) -> Expr {\n Expr::lam(var, ty, body)\n}\nfn pi(var: Var, ty: Expr, body: Expr) -> Expr {\n Expr::pi(var, ty, body)\n}\nfn app(f: Expr, arg: Expr) -> Expr {\n Expr::app(f, arg)\n}\n\nfn replace(val: &Var, with: &Expr, body: &Expr) -> Expr {\n use Expr::*;\n match *body {\n Const(ref constant) => body.clone(),\n Var(ref var) => {\n if *var == *val {\n with.clone()\n } else {\n body.clone()\n }\n }\n Lam(ref var, ref ty, ref body) => {\n \/\/ TODO: Check var == val\n \/\/ let val = if *var == *val {\n \/\/ &val\n \/\/ } else {\n \/\/ &val\n \/\/ };\n let ty = Box::new(replace(val, with, &ty));\n let body = Box::new(replace(val, with, &body));\n Lam(var.clone(), ty, body)\n }\n Pi(ref var, ref ty, ref body) => {\n \/\/ TODO: Check var == val\n let ty = Box::new(replace(val, with, &ty));\n let body = Box::new(replace(val, with, &body));\n Pi(var.clone(), ty, body)\n }\n App(ref f, ref arg) => {\n let f = Box::new(replace(val, with, &f));\n let arg = Box::new(replace(val, with, &arg));\n App(f, arg)\n }\n }\n}\n\nimpl Normalize for Expr {\n fn normalize(&self) -> Expr {\n use Expr::*;\n \/\/ println!(\"normalize {}\", self.to_string());\n match *self {\n Const(ref constant) => Const(constant.normalize()),\n Var(ref var) => Var(var.normalize()),\n Lam(ref var, ref ty, ref body) => {\n let l = &Lam(var.normalize(),\n Box::new(ty.normalize()),\n Box::new(body.normalize()));\n l.clone()\n }\n Pi(ref var, ref ty, ref body) => {\n let p = &Pi(var.normalize(),\n Box::new(ty.normalize()),\n Box::new(body.normalize()));\n p.clone()\n }\n App(ref f, ref arg) => {\n let f = f.normalize();\n let arg = arg.normalize();\n if let Lam(var, ty, body) = f {\n replace(&var, &arg, &body)\n } else if let Pi(var, ty, body) = f {\n replace(&var, &arg, &body)\n } else {\n panic!(\"f isn't a function {}\", f.to_string())\n }\n }\n }\n }\n}\n\nimpl ToString for Expr {\n fn to_string(&self) -> String {\n use Expr::*;\n match *self {\n Const(ref constant) => constant.to_string(),\n Var(ref var) => var.to_string(),\n Lam(ref var, ref ty, ref body) => {\n let vars = var.to_string();\n if vars.len() == 0 {\n ty.to_string() + \" -> \" + &body.to_string()\n } else {\n \"(\".to_string() + &var.to_string() + \" : \" + &ty.to_string() +\n \") -> \" + &body.to_string()\n }\n }\n Pi(ref var, ref ty, ref body) => {\n let vars = var.to_string();\n if vars.len() == 0 {\n \"forall (\".to_string() + &ty.to_string() + \") -> \" + &body.to_string()\n } else {\n \"forall (\".to_string() + &var.to_string() + \" : \" +\n &ty.to_string() + \") -> \" + &body.to_string()\n }\n }\n App(ref f, ref arg) => {\n let mut s = String::new();\n if f.is_lam() || f.is_pi() {\n s.push('(');\n s.push_str(&f.to_string()[..]);\n s.push(')');\n } else {\n s.push_str(&f.to_string()[..]);\n }\n s.push(' ');\n if arg.is_lam() || arg.is_pi() {\n s.push('(');\n s.push_str(&arg.to_string()[..]);\n s.push(')');\n } else {\n s.push_str(&arg.to_string()[..]);\n }\n \/\/ + \" \".to_string() + (if arg.is_lam() || arg.is_pi() {\n \/\/ \"(\".to_string() + &arg.to_string() + \")\"\n \/\/ } else {\n \/\/ &arg.to_string()\n \/\/ })\n s\n }\n }\n }\n}\n\nfn main() {\n \/\/ println!(\"{}\", Const::Data.to_string());\n \/\/ let a = Var::new(\"a\", 0);\n \/\/ let x = Var::new(\"x\", 0);\n \/\/ let expra = var(&a);\n \/\/ let exprx = var(&x);\n \/\/ println!(\"{}\", x.to_string());\n \/\/ let id = pi(a.clone(), constant(Const::Data), lam(x, expra, exprx));\n \/\/ println!(\"{}\", id.to_string());\n \/\/ println!(\"{}\", id.normalize().to_string());\n \/\/ let apply_int = app(id.clone(), var(&Var::new(\"int\", 0)));\n \/\/ println!(\"{}\", apply_int.to_string());\n \/\/ println!(\"{}\", apply_int.normalize().to_string());\n \/\/ let apply_id = app(app(id, var(&Var::new(\"int\", 0))), var(&Var::new(\"1\", 0)));\n \/\/ println!(\"{}\", apply_id.to_string());\n \/\/ println!(\"{}\", apply_id.normalize().to_string());\n\n \/\/ ( \\(id : forall (a : *) -> a -> a)\n \/\/ -> id (forall (a : *) -> a -> a) id -- Apply the identity function to itself\n \/\/ )\n \/\/\n \/\/ -- id\n \/\/ (\\(a : *) -> \\(x : a) -> x)\n\n let a = Var::new(\"a\", 0);\n let x = Var::new(\"x\", 0);\n let id = Var::new(\"id\", 0);\n let unused = Var::new(\"\", 0);\n let ty = pi(a.clone(),\n constant(Const::Data),\n lam(unused, Expr::var(&a), Expr::var(&a)));\n println!(\"{}\", ty.to_string());\n let id_impl = pi(a.clone(),\n constant(Const::Data),\n lam(x.clone(), Expr::var(&a), Expr::var(&x)));\n println!(\"{}\", id_impl.to_string());\n let id2 = lam(id.clone(),\n ty.clone(),\n app(app(Expr::var(&id.clone()), ty.clone()),\n Expr::var(&id.clone())));\n println!(\"{}\", id2.to_string());\n let id2app = app(lam(id.clone(),\n ty.clone(),\n app(app(Expr::var(&id.clone()), ty.clone()),\n Expr::var(&id.clone()))),\n id_impl.clone());\n println!(\"{}\", id2app.to_string());\n println!(\"{}\", id2app.normalize().to_string());\n}\n\n#[test]\nfn test_to_string() {\n let codata = Const::Codata;\n println!(\"{}\", codata.to_string());\n assert_eq!(\"codata\", codata.to_string());\n let a = Var::new(\"a\", 0);\n let x = Var::new(\"x\", 0);\n let expra = var(&a);\n let exprx = var(&x);\n println!(\"{}\", x.to_string());\n assert_eq!(\"x\", x.to_string());\n let id = pi(a.clone(), constant(Const::Data), lam(x, expra, exprx));\n println!(\"{}\", id.to_string());\n assert_eq!(\"forall (a : data) -> (x : a) -> x\", id.to_string());\n let apply_id = app(app(id, var(&Var::new(\"int\", 0))), var(&Var::new(\"1\", 0)));\n println!(\"{}\", apply_id.to_string());\n assert_eq!(\"(forall (a : data) -> (x : a) -> x) int 1\",\n apply_id.to_string());\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 Matthew Collins\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#![recursion_limit=\"300\"]\n#![feature(const_fn)]\n\nextern crate sdl2;\nextern crate image;\nextern crate time;\nextern crate byteorder;\nextern crate serde_json;\nextern crate openssl;\nextern crate hyper;\nextern crate flate2;\nextern crate rand;\nextern crate rustc_serialize;\nextern crate cgmath;\n#[macro_use]\nextern crate log;\n#[macro_use]\nextern crate lazy_static;\nextern crate collision;\npub extern crate steven_blocks;\nextern crate steven_shared as shared;\n\n#[macro_use]\npub mod macros;\n\npub mod ecs;\npub mod protocol;\npub mod format;\npub mod nbt;\npub mod item;\npub mod gl;\npub mod types;\npub mod resources;\npub mod render;\npub mod ui;\npub mod screen;\npub mod settings;\n#[macro_use]\npub mod console;\npub mod server;\npub mod world;\npub mod chunk_builder;\npub mod auth;\npub mod model;\npub mod entity;\n\nuse std::sync::{Arc, RwLock, Mutex};\nuse std::marker::PhantomData;\nuse std::thread;\nuse std::sync::mpsc;\n\nconst CL_BRAND: console::CVar<String> = console::CVar {\n ty: PhantomData,\n name: \"cl_brand\",\n description: \"cl_brand has the value of the clients current 'brand'. e.g. \\\"Steven\\\" or \\\n \\\"Vanilla\\\"\",\n mutable: false,\n serializable: false,\n default: &|| \"Steven\".to_owned(),\n};\n\npub struct Game {\n renderer: render::Renderer,\n screen_sys: screen::ScreenSystem,\n resource_manager: Arc<RwLock<resources::Manager>>,\n console: Arc<Mutex<console::Console>>,\n should_close: bool,\n\n server: server::Server,\n focused: bool,\n chunk_builder: chunk_builder::ChunkBuilder,\n\n connect_reply: Option<mpsc::Receiver<Result<server::Server, protocol::Error>>>,\n}\n\nimpl Game {\n pub fn connect_to(&mut self, address: &str) {\n let (tx, rx) = mpsc::channel();\n self.connect_reply = Some(rx);\n let address = address.to_owned();\n let resources = self.resource_manager.clone();\n let console = self.console.clone();\n thread::spawn(move || {\n tx.send(server::Server::connect(resources, console, &address)).unwrap();\n });\n }\n\n pub fn tick(&mut self, delta: f64) {\n if !self.server.is_connected() {\n self.renderer.camera.yaw += 0.005 * delta;\n if self.renderer.camera.yaw > ::std::f64::consts::PI * 2.0 {\n self.renderer.camera.yaw = 0.0;\n }\n }\n\n if let Some(disconnect_reason) = self.server.disconnect_reason.take() {\n self.screen_sys.replace_screen(Box::new(screen::ServerList::new(\n Some(disconnect_reason)\n )));\n }\n if !self.server.is_connected() {\n self.focused = false;\n }\n\n let mut clear_reply = false;\n if let Some(ref recv) = self.connect_reply {\n if let Ok(server) = recv.try_recv() {\n clear_reply = true;\n match server {\n Ok(val) => {\n self.screen_sys.pop_screen();\n self.focused = true;\n self.server.remove(&mut self.renderer);\n self.server = val;\n },\n Err(err) => {\n let msg = match err {\n protocol::Error::Disconnect(val) => val,\n err => {\n let mut msg = format::TextComponent::new(&format!(\"{}\", err));\n msg.modifier.color = Some(format::Color::Red);\n format::Component::Text(msg)\n },\n };\n self.screen_sys.replace_screen(Box::new(screen::ServerList::new(\n Some(msg)\n )));\n }\n }\n }\n }\n if clear_reply {\n self.connect_reply = None;\n }\n }\n}\n\nfn main() {\n let con = Arc::new(Mutex::new(console::Console::new()));\n {\n let mut con = con.lock().unwrap();\n con.register(CL_BRAND);\n auth::register_vars(&mut con);\n settings::register_vars(&mut con);\n con.load_config();\n con.save_config();\n }\n\n let proxy = console::ConsoleProxy::new(con.clone());\n\n log::set_logger(|max_log_level| {\n max_log_level.set(log::LogLevelFilter::Trace);\n Box::new(proxy)\n }).unwrap();\n\n info!(\"test compile\");\n\n let resource_manager = Arc::new(RwLock::new(resources::Manager::new()));\n {\n resource_manager.write().unwrap().tick();\n }\n\n let sdl = sdl2::init().unwrap();\n let sdl_video = sdl.video().unwrap();\n\n sdl_video.gl_set_swap_interval(1);\n\n let window = sdl2::video::WindowBuilder::new(&sdl_video, \"Steven\", 854, 480)\n .opengl()\n .resizable()\n .build()\n .expect(\"Could not create sdl window.\");\n let gl_attr = sdl_video.gl_attr();\n gl_attr.set_stencil_size(0);\n gl_attr.set_depth_size(24);\n gl_attr.set_context_major_version(3);\n gl_attr.set_context_minor_version(2);\n gl_attr.set_context_profile(sdl2::video::GLProfile::Core);\n\n let gl_context = window.gl_create_context().unwrap();\n window.gl_make_current(&gl_context).expect(\"Could not set current context.\");\n\n gl::init(&sdl_video);\n\n let renderer = render::Renderer::new(resource_manager.clone());\n let mut ui_container = ui::Container::new();\n\n let mut last_frame = time::now();\n let frame_time = (time::Duration::seconds(1).num_nanoseconds().unwrap() as f64) \/ 60.0;\n\n let mut screen_sys = screen::ScreenSystem::new();\n screen_sys.add_screen(Box::new(screen::Login::new(con.clone())));\n\n let textures = renderer.get_textures();\n let mut game = Game {\n server: server::Server::dummy_server(resource_manager.clone(), con.clone()),\n focused: false,\n renderer: renderer,\n screen_sys: screen_sys,\n resource_manager: resource_manager.clone(),\n console: con,\n should_close: false,\n chunk_builder: chunk_builder::ChunkBuilder::new(resource_manager, textures),\n connect_reply: None,\n };\n game.renderer.camera.pos = cgmath::Point3::new(0.5, 13.2, 0.5);\n\n let mut events = sdl.event_pump().unwrap();\n while !game.should_close {\n let version = {\n let mut res = game.resource_manager.write().unwrap();\n res.tick();\n res.version()\n };\n\n let now = time::now();\n let diff = now - last_frame;\n last_frame = now;\n let delta = (diff.num_nanoseconds().unwrap() as f64) \/ frame_time;\n let (width, height) = window.drawable_size();\n\n game.tick(delta);\n game.server.tick(&mut game.renderer, delta);\n\n game.renderer.update_camera(width, height);\n game.server.world.compute_render_list(&mut game.renderer);\n game.chunk_builder.tick(&mut game.server.world, &mut game.renderer, version);\n\n game.screen_sys.tick(delta, &mut game.renderer, &mut ui_container);\n game.console\n .lock()\n .unwrap()\n .tick(&mut ui_container, &mut game.renderer, delta, width as f64);\n ui_container.tick(&mut game.renderer, delta, width as f64, height as f64);\n game.renderer.tick(&mut game.server.world, delta, width, height);\n\n window.gl_swap_window();\n\n for event in events.poll_iter() {\n handle_window_event(&window, &mut game, &mut ui_container, event)\n }\n }\n}\n\nfn handle_window_event(window: &sdl2::video::Window,\n game: &mut Game,\n ui_container: &mut ui::Container,\n event: sdl2::event::Event) {\n use sdl2::event::Event;\n use sdl2::keyboard::Keycode;\n use sdl2::mouse::Mouse;\n use std::f64::consts::PI;\n\n let mouse = window.subsystem().sdl().mouse();\n\n match event {\n Event::Quit{..} => game.should_close = true,\n\n Event::MouseMotion{x, y, xrel, yrel, ..} => {\n let (width, height) = window.size();\n if game.focused {\n if !mouse.relative_mouse_mode() {\n mouse.set_relative_mouse_mode(true);\n }\n if let Some(player) = game.server.player {\n let s = 2000.0 + 0.01;\n let (rx, ry) = (xrel as f64 \/ s, yrel as f64 \/ s);\n let rotation = game.server.entities.get_component_mut(player, game.server.rotation).unwrap();\n rotation.yaw -= rx;\n rotation.pitch -= ry;\n if rotation.pitch < (PI\/2.0) + 0.01 {\n rotation.pitch = (PI\/2.0) + 0.01;\n }\n if rotation.pitch > (PI\/2.0)*3.0 - 0.01 {\n rotation.pitch = (PI\/2.0)*3.0 - 0.01;\n }\n }\n } else {\n if mouse.relative_mouse_mode() {\n mouse.set_relative_mouse_mode(false);\n }\n ui_container.hover_at(game, x as f64, y as f64, width as f64, height as f64);\n }\n }\n Event::MouseButtonUp{mouse_btn: Mouse::Left, x, y, ..} => {\n let (width, height) = window.size();\n\n if game.server.is_connected() && !game.focused && !game.screen_sys.is_current_closable() {\n game.focused = true;\n if !mouse.relative_mouse_mode() {\n mouse.set_relative_mouse_mode(true);\n }\n return;\n }\n if !game.focused {\n if mouse.relative_mouse_mode() {\n mouse.set_relative_mouse_mode(false);\n }\n ui_container.click_at(game, x as f64, y as f64, width as f64, height as f64);\n }\n }\n Event::MouseWheel{x, y, ..} => {\n game.screen_sys.on_scroll(x as f64, y as f64);\n }\n Event::KeyUp{keycode: Some(Keycode::Escape), ..} => {\n if game.focused {\n mouse.set_relative_mouse_mode(false);\n game.focused = false;\n game.screen_sys.replace_screen(Box::new(screen::SettingsMenu::new(game.console.clone(), true)));\n } else if game.screen_sys.is_current_closable() {\n mouse.set_relative_mouse_mode(true);\n game.focused = true;\n game.screen_sys.pop_screen();\n }\n }\n Event::KeyDown{keycode: Some(Keycode::Backquote), ..} => {\n game.console.lock().unwrap().toggle();\n }\n Event::KeyDown{keycode: Some(key), ..} => {\n if game.focused {\n let console = game.console.lock().unwrap();\n if let Some(steven_key) = settings::Stevenkey::get_by_keycode(key, &console) {\n game.server.key_press(true, steven_key);\n }\n } else {\n ui_container.key_press(game, key, true);\n }\n }\n Event::KeyUp{keycode: Some(key), ..} => {\n if game.focused {\n let console = game.console.lock().unwrap();\n if let Some(steven_key) = settings::Stevenkey::get_by_keycode(key, &console) {\n game.server.key_press(false, steven_key);\n }\n } else {\n ui_container.key_press(game, key, false);\n }\n }\n Event::TextInput{text, ..} => {\n if !game.focused {\n for c in text.chars() {\n ui_container.key_type(game, c);\n }\n }\n }\n _ => (),\n }\n}\n<commit_msg>Updat1<commit_after>\/\/ Copyright 2016 Matthew Collins\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#![recursion_limit=\"300\"]\n#![feature(const_fn)]\n\nextern crate sdl2;\nextern crate image;\nextern crate time;\nextern crate byteorder;\nextern crate serde_json;\nextern crate openssl;\nextern crate hyper;\nextern crate flate2;\nextern crate rand;\nextern crate rustc_serialize;\nextern crate cgmath;\n#[macro_use]\nextern crate log;\n#[macro_use]\nextern crate lazy_static;\nextern crate collision;\npub extern crate steven_blocks;\nextern crate steven_shared as shared;\n\n#[macro_use]\npub mod macros;\n\npub mod ecs;\npub mod protocol;\npub mod format;\npub mod nbt;\npub mod item;\npub mod gl;\npub mod types;\npub mod resources;\npub mod render;\npub mod ui;\npub mod screen;\npub mod settings;\n#[macro_use]\npub mod console;\npub mod server;\npub mod world;\npub mod chunk_builder;\npub mod auth;\npub mod model;\npub mod entity;\n\nuse std::sync::{Arc, RwLock, Mutex};\nuse std::marker::PhantomData;\nuse std::thread;\nuse std::sync::mpsc;\n\nconst CL_BRAND: console::CVar<String> = console::CVar {\n ty: PhantomData,\n name: \"cl_brand\",\n description: \"cl_brand has the value of the clients current 'brand'. e.g. \\\"Steven\\\" or \\\n \\\"Vanilla\\\"\",\n mutable: false,\n serializable: false,\n default: &|| \"Steven\".to_owned(),\n};\n\npub struct Game {\n renderer: render::Renderer,\n screen_sys: screen::ScreenSystem,\n resource_manager: Arc<RwLock<resources::Manager>>,\n console: Arc<Mutex<console::Console>>,\n should_close: bool,\n\n server: server::Server,\n focused: bool,\n chunk_builder: chunk_builder::ChunkBuilder,\n\n connect_reply: Option<mpsc::Receiver<Result<server::Server, protocol::Error>>>,\n}\n\nimpl Game {\n pub fn connect_to(&mut self, address: &str) {\n let (tx, rx) = mpsc::channel();\n self.connect_reply = Some(rx);\n let address = address.to_owned();\n let resources = self.resource_manager.clone();\n let console = self.console.clone();\n thread::spawn(move || {\n tx.send(server::Server::connect(resources, console, &address)).unwrap();\n });\n }\n\n pub fn tick(&mut self, delta: f64) {\n if !self.server.is_connected() {\n self.renderer.camera.yaw += 0.005 * delta;\n if self.renderer.camera.yaw > ::std::f64::consts::PI * 2.0 {\n self.renderer.camera.yaw = 0.0;\n }\n }\n\n if let Some(disconnect_reason) = self.server.disconnect_reason.take() {\n self.screen_sys.replace_screen(Box::new(screen::ServerList::new(\n Some(disconnect_reason)\n )));\n }\n if !self.server.is_connected() {\n self.focused = false;\n }\n\n let mut clear_reply = false;\n if let Some(ref recv) = self.connect_reply {\n if let Ok(server) = recv.try_recv() {\n clear_reply = true;\n match server {\n Ok(val) => {\n self.screen_sys.pop_screen();\n self.focused = true;\n self.server.remove(&mut self.renderer);\n self.server = val;\n },\n Err(err) => {\n let msg = match err {\n protocol::Error::Disconnect(val) => val,\n err => {\n let mut msg = format::TextComponent::new(&format!(\"{}\", err));\n msg.modifier.color = Some(format::Color::Red);\n format::Component::Text(msg)\n },\n };\n self.screen_sys.replace_screen(Box::new(screen::ServerList::new(\n Some(msg)\n )));\n }\n }\n }\n }\n if clear_reply {\n self.connect_reply = None;\n }\n }\n}\n\nfn main() {\n let con = Arc::new(Mutex::new(console::Console::new()));\n {\n let mut con = con.lock().unwrap();\n con.register(CL_BRAND);\n auth::register_vars(&mut con);\n settings::register_vars(&mut con);\n con.load_config();\n con.save_config();\n }\n\n let proxy = console::ConsoleProxy::new(con.clone());\n\n log::set_logger(|max_log_level| {\n max_log_level.set(log::LogLevelFilter::Trace);\n Box::new(proxy)\n }).unwrap();\n\n info!(\"test compi\");\n\n let resource_manager = Arc::new(RwLock::new(resources::Manager::new()));\n {\n resource_manager.write().unwrap().tick();\n }\n\n let sdl = sdl2::init().unwrap();\n let sdl_video = sdl.video().unwrap();\n\n sdl_video.gl_set_swap_interval(1);\n\n let window = sdl2::video::WindowBuilder::new(&sdl_video, \"Steven\", 854, 480)\n .opengl()\n .resizable()\n .build()\n .expect(\"Could not create sdl window.\");\n let gl_attr = sdl_video.gl_attr();\n gl_attr.set_stencil_size(0);\n gl_attr.set_depth_size(24);\n gl_attr.set_context_major_version(3);\n gl_attr.set_context_minor_version(2);\n gl_attr.set_context_profile(sdl2::video::GLProfile::Core);\n\n let gl_context = window.gl_create_context().unwrap();\n window.gl_make_current(&gl_context).expect(\"Could not set current context.\");\n\n gl::init(&sdl_video);\n\n let renderer = render::Renderer::new(resource_manager.clone());\n let mut ui_container = ui::Container::new();\n\n let mut last_frame = time::now();\n let frame_time = (time::Duration::seconds(1).num_nanoseconds().unwrap() as f64) \/ 60.0;\n\n let mut screen_sys = screen::ScreenSystem::new();\n screen_sys.add_screen(Box::new(screen::Login::new(con.clone())));\n\n let textures = renderer.get_textures();\n let mut game = Game {\n server: server::Server::dummy_server(resource_manager.clone(), con.clone()),\n focused: false,\n renderer: renderer,\n screen_sys: screen_sys,\n resource_manager: resource_manager.clone(),\n console: con,\n should_close: false,\n chunk_builder: chunk_builder::ChunkBuilder::new(resource_manager, textures),\n connect_reply: None,\n };\n game.renderer.camera.pos = cgmath::Point3::new(0.5, 13.2, 0.5);\n\n let mut events = sdl.event_pump().unwrap();\n while !game.should_close {\n let version = {\n let mut res = game.resource_manager.write().unwrap();\n res.tick();\n res.version()\n };\n\n let now = time::now();\n let diff = now - last_frame;\n last_frame = now;\n let delta = (diff.num_nanoseconds().unwrap() as f64) \/ frame_time;\n let (width, height) = window.drawable_size();\n\n game.tick(delta);\n game.server.tick(&mut game.renderer, delta);\n\n game.renderer.update_camera(width, height);\n game.server.world.compute_render_list(&mut game.renderer);\n game.chunk_builder.tick(&mut game.server.world, &mut game.renderer, version);\n\n game.screen_sys.tick(delta, &mut game.renderer, &mut ui_container);\n game.console\n .lock()\n .unwrap()\n .tick(&mut ui_container, &mut game.renderer, delta, width as f64);\n ui_container.tick(&mut game.renderer, delta, width as f64, height as f64);\n game.renderer.tick(&mut game.server.world, delta, width, height);\n\n window.gl_swap_window();\n\n for event in events.poll_iter() {\n handle_window_event(&window, &mut game, &mut ui_container, event)\n }\n }\n}\n\nfn handle_window_event(window: &sdl2::video::Window,\n game: &mut Game,\n ui_container: &mut ui::Container,\n event: sdl2::event::Event) {\n use sdl2::event::Event;\n use sdl2::keyboard::Keycode;\n use sdl2::mouse::Mouse;\n use std::f64::consts::PI;\n\n let mouse = window.subsystem().sdl().mouse();\n\n match event {\n Event::Quit{..} => game.should_close = true,\n\n Event::MouseMotion{x, y, xrel, yrel, ..} => {\n let (width, height) = window.size();\n if game.focused {\n if !mouse.relative_mouse_mode() {\n mouse.set_relative_mouse_mode(true);\n }\n if let Some(player) = game.server.player {\n let s = 2000.0 + 0.01;\n let (rx, ry) = (xrel as f64 \/ s, yrel as f64 \/ s);\n let rotation = game.server.entities.get_component_mut(player, game.server.rotation).unwrap();\n rotation.yaw -= rx;\n rotation.pitch -= ry;\n if rotation.pitch < (PI\/2.0) + 0.01 {\n rotation.pitch = (PI\/2.0) + 0.01;\n }\n if rotation.pitch > (PI\/2.0)*3.0 - 0.01 {\n rotation.pitch = (PI\/2.0)*3.0 - 0.01;\n }\n }\n } else {\n if mouse.relative_mouse_mode() {\n mouse.set_relative_mouse_mode(false);\n }\n ui_container.hover_at(game, x as f64, y as f64, width as f64, height as f64);\n }\n }\n Event::MouseButtonUp{mouse_btn: Mouse::Left, x, y, ..} => {\n let (width, height) = window.size();\n\n if game.server.is_connected() && !game.focused && !game.screen_sys.is_current_closable() {\n game.focused = true;\n if !mouse.relative_mouse_mode() {\n mouse.set_relative_mouse_mode(true);\n }\n return;\n }\n if !game.focused {\n if mouse.relative_mouse_mode() {\n mouse.set_relative_mouse_mode(false);\n }\n ui_container.click_at(game, x as f64, y as f64, width as f64, height as f64);\n }\n }\n Event::MouseWheel{x, y, ..} => {\n game.screen_sys.on_scroll(x as f64, y as f64);\n }\n Event::KeyUp{keycode: Some(Keycode::Escape), ..} => {\n if game.focused {\n mouse.set_relative_mouse_mode(false);\n game.focused = false;\n game.screen_sys.replace_screen(Box::new(screen::SettingsMenu::new(game.console.clone(), true)));\n } else if game.screen_sys.is_current_closable() {\n mouse.set_relative_mouse_mode(true);\n game.focused = true;\n game.screen_sys.pop_screen();\n }\n }\n Event::KeyDown{keycode: Some(Keycode::Backquote), ..} => {\n game.console.lock().unwrap().toggle();\n }\n Event::KeyDown{keycode: Some(key), ..} => {\n if game.focused {\n let console = game.console.lock().unwrap();\n if let Some(steven_key) = settings::Stevenkey::get_by_keycode(key, &console) {\n game.server.key_press(true, steven_key);\n }\n } else {\n ui_container.key_press(game, key, true);\n }\n }\n Event::KeyUp{keycode: Some(key), ..} => {\n if game.focused {\n let console = game.console.lock().unwrap();\n if let Some(steven_key) = settings::Stevenkey::get_by_keycode(key, &console) {\n game.server.key_press(false, steven_key);\n }\n } else {\n ui_container.key_press(game, key, false);\n }\n }\n Event::TextInput{text, ..} => {\n if !game.focused {\n for c in text.chars() {\n ui_container.key_type(game, c);\n }\n }\n }\n _ => (),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add new example file_info<commit_after>extern crate zip;\n\nuse std::fs;\n\nfn main() {\n std::process::exit(real_main());\n}\n\nfn real_main() -> i32 {\n let args: Vec<_> = std::env::args().collect();\n if args.len() < 2 {\n println!(\"Usage: {} <filename>\", args[0]);\n return 1;\n }\n let fname = std::path::Path::new(&*args[1]);\n let file = fs::File::open(&fname).unwrap();\n\n let mut archive = zip::ZipArchive::new(file).unwrap();\n\n for i in 0..archive.len() {\n let file = archive.by_index(i).unwrap();\n let outpath = file.sanitized_name();\n\n {\n let comment = file.comment();\n if !comment.is_empty() {\n println!(\"Entry {} comment: {}\", i, comment);\n }\n }\n\n if (&*file.name()).ends_with('\/') {\n println!(\"Entry {} is a directory with name \\\"{}\\\"\", i, outpath.as_path().display());\n } else {\n println!(\"Entry {} is a file with name \\\"{}\\\" ({} bytes)\", i, outpath.as_path().display(), file.size());\n }\n }\n return 0;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adds Rust solution for #006<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add code documentation to shell::completer<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add remaining docs for Store.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>tests<commit_after>use super::*;\n\n#[test]\nfn queue() {\n\tlet mut q = Queue::new();\n\tassert_eq!(q.queue(\"hello\"), Ok(1));\n}\n\n#[test]\nfn dequeue() {\n\tlet mut q = Queue::new();\n\tq.queue(\"hello\").unwrap();\n\tassert_eq!(q.dequeue(), Some(\"hello\"));\n}\n\n#[test]\nfn no_capacity() {\n\tlet q: Queue<u8> = Queue::new();\n\tassert_eq!(q.capacity(), None);\n}\n\n#[test]\nfn some_capacity() {\n\tlet q: Queue<u8> = Queue::with_capacity(12);\n\tassert_eq!(q.capacity(), Some(12));\n}\n\n#[test]\nfn queue_full_capacity() {\n\tlet mut q: Queue<u8> = Queue::with_capacity(0);\n\tassert_eq!(q.queue(3), Err(()));\n}\n\n#[test]\nfn peek_at_empty() {\n\tlet q: Queue<u8> = Queue::with_capacity(1);\n\tassert_eq!(q.peek(), None);\n}\n\n#[test]\nfn peek_at_something() {\n\tlet mut q = Queue::new();\n\tq.queue(1).unwrap();\n\tassert_eq!(q.peek(), Some(1));\n}\n\n#[test]\nfn no_len() {\n\tlet q: Queue<u8> = Queue::new();\n\tassert_eq!(q.len(), 0);\n}\n\n#[test]\nfn some_len() {\n\tlet mut q = Queue::new();\n\tq.queue(1).unwrap();\n\tq.queue(2).unwrap();\n\tassert_eq!(q.len(), 2);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a function to QuadTree to get objects that have been inserted<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add `alternative_names.rs` example<commit_after>#[macro_use(o,slog_log,slog_trace,slog_debug,slog_info,slog_warn,slog_error,slog_crit)]\nextern crate slog;\n\nextern crate slog_term;\n\nuse slog::drain::IntoLogger;\n\nfn main() {\n let log = slog_term::stderr().into_logger(o!());\n\n slog_trace!(log, \"logging a trace message\");\n slog_debug!(log, \"debug values\", \"x\" => 1, \"y\" => -1);\n slog_info!(log, \"some interesting info\", \"where\" => \"right here\");\n slog_warn!(log, \"be cautious!\", \"why\" => \"you never know...\");\n slog_error!(log, \"something's wrong\", \"type\" => \"unknown\");\n slog_crit!(log, \"abandoning test\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #21562<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(core)]\n\nextern crate core;\nuse core::marker::Sync;\n\nstatic SARRAY: [i32; 1] = [11];\n\nstruct MyStruct {\n pub arr: *const [i32],\n}\nunsafe impl Sync for MyStruct {}\n\nstatic mystruct: MyStruct = MyStruct {\n arr: &SARRAY\n};\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rustify nvpairs.h for Tedsta<commit_after>enum DataType {\n Unknown = 0,\n Boolean,\n Byte,\n Int16,\n Uint16,\n Int32,\n Uint32,\n Int64,\n Uint64,\n String, \/\/ TODO: What to name this string type?\n ByteArray,\n Int16Array,\n Uint16Array,\n Int32Array,\n Uint32Array,\n Int64Array,\n Uint64Array,\n StringArray,\n Hrtime,\n Nvlist, \/\/ TODO: What to name this ?\n NvlistArray,\n BooleanValue,\n Int8,\n Uint8,\n BooleanArray,\n Int8Array,\n Uint8Array\n}\n\nstruct NvPair {\n nvp_size: i32, \/\/ size of this nvpair\n nvp_name_sz: i16, \/\/ length of name string\n nvp_reserve: i16, \/\/ not used\n nvp_value_elem: i32, \/\/ number of elements for array types\n nvp_type: DataType, \/\/ type of value\n \/\/ name string\n \/\/ aligned ptr array for string arrays\n \/\/ aligned array of data for value\n}\n\n\/\/ nvlist header\nstruct NvList {\n nvl_version: i32\n nvl_nvflag: u32 \/\/ persistent flags\n nvl_priv: u64 \/\/ ptr to private data if not packed\n nvl_flag: u32\n nvl_pad: i32 \/\/ currently not used, for alignment\n}\n\n\/\/ nvp implementation version\nconst NV_VERSION: i32 = 0;\n\n\/\/ nvlist pack encoding\nconst NV_ENCODE_NATIVE: u8 = 0;\nconst NV_ENCODE_XDR: u8 = 1;\n\n\/\/ nvlist persistent unique name flags, stored in nvl_nvflags\nconst NV_UNIQUE_NAME: u32 = 0x1;\nconst NV_UNIQUE_NAME_TYPE: u32 = 0x2;\n\n\/\/ nvlist lookup pairs related flags\nconst NV_FLAG_NOENTOK: isize = 0x1;\n\n\/* What to do about these macros?\n\/\/ convenience macros\n#define NV_ALIGN(x) (((ulong_t)(x) + 7ul) & ~7ul)\n#define NV_ALIGN4(x) (((x) + 3) & ~3)\n\n#define NVP_SIZE(nvp) ((nvp)->nvp_size)\n#define NVP_NAME(nvp) ((char *)(nvp) + sizeof (nvpair_t))\n#define NVP_TYPE(nvp) ((nvp)->nvp_type)\n#define NVP_NELEM(nvp) ((nvp)->nvp_value_elem)\n#define NVP_VALUE(nvp) ((char *)(nvp) + NV_ALIGN(sizeof (nvpair_t) \\\n + (nvp)->nvp_name_sz))\n\n#define NVL_VERSION(nvl) ((nvl)->nvl_version)\n#define NVL_SIZE(nvl) ((nvl)->nvl_size)\n#define NVL_FLAG(nvl) ((nvl)->nvl_flag)\n*\/\n\n\/\/ NV allocator framework\nstruct NvAllocOps;\n\nstruct NvAlloc<> {\n nva_ops: &'static NvAllocOps,\n nva_arg: Any, \/\/ This was a void pointer type.\n \/\/ Not sure if Any is the correct type.\n}\n\nstruct NvAllocOps {\n int (*nv_ao_init)(nv_alloc_t *, __va_list);\n void (*nv_ao_fini)(nv_alloc_t *);\n void *(*nv_ao_alloc)(nv_alloc_t *, size_t);\n void (*nv_ao_free)(nv_alloc_t *, void *, size_t);\n void (*nv_ao_reset)(nv_alloc_t *);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>docs(examples): add hello world<commit_after>use rocks::prelude::*;\n\nfn main() {\n let opt = Options::default().map_db_options(|db_opt| db_opt.create_if_missing(true));\n\n let db = DB::open(opt, \".\/data\").unwrap();\n\n assert!(db.put(WriteOptions::default_instance(), b\"hello\", b\"world\").is_ok());\n match db.get(ReadOptions::default_instance(), b\"hello\") {\n Ok(ref value) => println!(\"hello: {:?}\", value),\n Err(e) => eprintln!(\"error: {}\", e),\n }\n let _ = db.delete(&WriteOptions::default(), b\"hello\").unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example that instantly removes the port it added.<commit_after>use std::net::SocketAddrV4;\nuse std::env;\n\nextern crate igd;\n\nfn main() {\n match igd::search_gateway() {\n Err(ref err) => match *err {\n igd::SearchError::IoError(ref ioe) => println!(\"IoError: {}\", ioe),\n _ => println!(\"{:?}\", err),\n },\n Ok(gateway) => {\n let args: Vec<_> = env::args().collect();\n if args.len() != 4 {\n println!(\"Usage: add_remove <local_ip> <local_port> <remote_port>\");\n return;\n }\n let local_ip = args[1].parse().expect(\"Invalid IP address\");\n let local_port = args[2].parse().expect(\"Invalid local port\");\n let remote_port = args[3].parse().expect(\"Invalid remote port\");\n\n let local_addr = SocketAddrV4::new(local_ip, local_port);\n\n match gateway.add_port(igd::PortMappingProtocol::TCP, remote_port,\n local_addr, 60, \"crust\") {\n Err(ref err) => println!(\"{:?}\", err),\n Ok(()) => {\n println!(\"AddPortMapping successful.\");\n match gateway.remove_port(igd::PortMappingProtocol::TCP, remote_port) {\n Err(ref err) => println!(\"Error removing: {:?}\", err),\n Ok(_) => println!(\"DeletePortMapping successful.\"),\n }\n },\n }\n },\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::str::FromStr;\nuse std::ascii::AsciiExt;\n\nbitflags! {\n flags Flags: u8 {\n const REQUIRED = 0b00000001,\n const MULTIPLE = 0b00000010,\n const EMPTY_VALS = 0b00000100,\n const GLOBAL = 0b00001000,\n const HIDDEN = 0b00010000,\n const TAKES_VAL = 0b00100000,\n const USE_DELIM = 0b01000000,\n const NEXT_LINE_HELP = 0b10000000,\n }\n}\n\n#[doc(hidden)]\n#[derive(Debug, Clone)]\npub struct ArgFlags(Flags);\n\nimpl ArgFlags {\n pub fn new() -> Self {\n ArgFlags(EMPTY_VALS | USE_DELIM)\n }\n\n impl_settings!{ArgSettings,\n Required => REQUIRED,\n Multiple => MULTIPLE,\n EmptyValues => EMPTY_VALS,\n Global => GLOBAL,\n Hidden => HIDDEN,\n TakesValue => TAKES_VAL,\n UseValueDelimiter => USE_DELIM,\n NextLineHelp => NEXT_LINE_HELP\n }\n}\n\nimpl Default for ArgFlags {\n fn default() -> Self {\n ArgFlags::new()\n }\n}\n\n\/\/\/ Various settings that apply to arguments and may be set, unset, and checked via getter\/setter\n\/\/\/ methods `Arg::set`, `Arg::unset`, and `Arg::is_set`\n#[derive(Debug, PartialEq, Copy, Clone)]\n#[doc(hidden)]\npub enum ArgSettings {\n \/\/\/ The argument must be used\n Required,\n \/\/\/ The argument may be used multiple times such as `--flag --flag`\n Multiple,\n \/\/\/ The argument allows empty values such as `--option \"\"`\n EmptyValues,\n \/\/\/ The argument should be propagated down through all child subcommands\n Global,\n \/\/\/ The argument should **not** be shown in help text\n Hidden,\n \/\/\/ The argument accepts a value, such as `--option <value>`\n TakesValue,\n \/\/\/ Determines if the argument allows values to be grouped via a delimter\n UseValueDelimiter,\n \/\/\/ Prints the help text on the line after the argument\n NextLineHelp,\n}\n\nimpl FromStr for ArgSettings {\n type Err = String;\n fn from_str(s: &str) -> Result<Self, <Self as FromStr>::Err> {\n match &*s.to_ascii_lowercase() {\n \"required\" => Ok(ArgSettings::Required),\n \"multiple\" => Ok(ArgSettings::Multiple),\n \"global\" => Ok(ArgSettings::Global),\n \"emptyvalues\" => Ok(ArgSettings::EmptyValues),\n \"hidden\" => Ok(ArgSettings::Hidden),\n \"takesvalue\" => Ok(ArgSettings::TakesValue),\n \"usevaluedelimiter\" => Ok(ArgSettings::UseValueDelimiter),\n \"nextlinehelp\" => Ok(ArgSettings::NextLineHelp),\n _ => Err(\"unknown ArgSetting, cannot convert from str\".to_owned()),\n }\n }\n}\n<commit_msg>sprinkle #[derive(Copy)]<commit_after>use std::str::FromStr;\nuse std::ascii::AsciiExt;\n\nbitflags! {\n flags Flags: u8 {\n const REQUIRED = 0b00000001,\n const MULTIPLE = 0b00000010,\n const EMPTY_VALS = 0b00000100,\n const GLOBAL = 0b00001000,\n const HIDDEN = 0b00010000,\n const TAKES_VAL = 0b00100000,\n const USE_DELIM = 0b01000000,\n const NEXT_LINE_HELP = 0b10000000,\n }\n}\n\n#[doc(hidden)]\n#[derive(Debug, Clone, Copy)]\npub struct ArgFlags(Flags);\n\nimpl ArgFlags {\n pub fn new() -> Self {\n ArgFlags(EMPTY_VALS | USE_DELIM)\n }\n\n impl_settings!{ArgSettings,\n Required => REQUIRED,\n Multiple => MULTIPLE,\n EmptyValues => EMPTY_VALS,\n Global => GLOBAL,\n Hidden => HIDDEN,\n TakesValue => TAKES_VAL,\n UseValueDelimiter => USE_DELIM,\n NextLineHelp => NEXT_LINE_HELP\n }\n}\n\nimpl Default for ArgFlags {\n fn default() -> Self {\n ArgFlags::new()\n }\n}\n\n\/\/\/ Various settings that apply to arguments and may be set, unset, and checked via getter\/setter\n\/\/\/ methods `Arg::set`, `Arg::unset`, and `Arg::is_set`\n#[derive(Debug, PartialEq, Copy, Clone)]\n#[doc(hidden)]\npub enum ArgSettings {\n \/\/\/ The argument must be used\n Required,\n \/\/\/ The argument may be used multiple times such as `--flag --flag`\n Multiple,\n \/\/\/ The argument allows empty values such as `--option \"\"`\n EmptyValues,\n \/\/\/ The argument should be propagated down through all child subcommands\n Global,\n \/\/\/ The argument should **not** be shown in help text\n Hidden,\n \/\/\/ The argument accepts a value, such as `--option <value>`\n TakesValue,\n \/\/\/ Determines if the argument allows values to be grouped via a delimter\n UseValueDelimiter,\n \/\/\/ Prints the help text on the line after the argument\n NextLineHelp,\n}\n\nimpl FromStr for ArgSettings {\n type Err = String;\n fn from_str(s: &str) -> Result<Self, <Self as FromStr>::Err> {\n match &*s.to_ascii_lowercase() {\n \"required\" => Ok(ArgSettings::Required),\n \"multiple\" => Ok(ArgSettings::Multiple),\n \"global\" => Ok(ArgSettings::Global),\n \"emptyvalues\" => Ok(ArgSettings::EmptyValues),\n \"hidden\" => Ok(ArgSettings::Hidden),\n \"takesvalue\" => Ok(ArgSettings::TakesValue),\n \"usevaluedelimiter\" => Ok(ArgSettings::UseValueDelimiter),\n \"nextlinehelp\" => Ok(ArgSettings::NextLineHelp),\n _ => Err(\"unknown ArgSetting, cannot convert from str\".to_owned()),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::str::FromStr;\nuse std::ascii::AsciiExt;\n\nbitflags! {\n flags Flags: u8 {\n const REQUIRED = 0b00000001,\n const MULTIPLE = 0b00000010,\n const EMPTY_VALS = 0b00000100,\n const GLOBAL = 0b00001000,\n const HIDDEN = 0b00010000,\n const TAKES_VAL = 0b00100000,\n const USE_DELIM = 0b01000000,\n const NEXT_LINE_HELP = 0b10000000,\n }\n}\n\n#[doc(hidden)]\n#[derive(Debug, Clone)]\npub struct ArgFlags(Flags);\n\nimpl ArgFlags {\n pub fn new() -> Self {\n ArgFlags(EMPTY_VALS | USE_DELIM)\n }\n\n impl_settings!{ArgSettings,\n Required => REQUIRED,\n Multiple => MULTIPLE,\n EmptyValues => EMPTY_VALS,\n Global => GLOBAL,\n Hidden => HIDDEN,\n TakesValue => TAKES_VAL,\n UseValueDelimiter => USE_DELIM,\n NextLineHelp => NEXT_LINE_HELP\n }\n}\n\nimpl Default for ArgFlags {\n fn default() -> Self {\n ArgFlags::new()\n }\n}\n\n\/\/\/ Various settings that apply to arguments and may be set, unset, and checked via getter\/setter\n\/\/\/ methods `Arg::set`, `Arg::unset`, and `Arg::is_set`\n#[derive(Debug, PartialEq, Copy, Clone)]\npub enum ArgSettings {\n \/\/\/ The argument must be used\n Required,\n \/\/\/ The argument may be used multiple times such as `--flag --flag`\n Multiple,\n \/\/\/ The argument allows empty values such as `--option \"\"`\n EmptyValues,\n \/\/\/ The argument should be propagated down through all child subcommands\n Global,\n \/\/\/ The argument should **not** be shown in help text\n Hidden,\n \/\/\/ The argument accepts a value, such as `--option <value>`\n TakesValue,\n \/\/\/ Determines if the argument allows values to be grouped via a delimter\n UseValueDelimiter,\n \/\/\/ Prints the help text on the line after the argument\n NextLineHelp,\n}\n\nimpl FromStr for ArgSettings {\n type Err = String;\n fn from_str(s: &str) -> Result<Self, <Self as FromStr>::Err> {\n match &*s.to_ascii_lowercase() {\n \"required\" => Ok(ArgSettings::Required),\n \"multiple\" => Ok(ArgSettings::Multiple),\n \"global\" => Ok(ArgSettings::Global),\n \"emptyvalues\" => Ok(ArgSettings::EmptyValues),\n \"hidden\" => Ok(ArgSettings::Hidden),\n \"takesvalue\" => Ok(ArgSettings::TakesValue),\n \"usevaluedelimiter\" => Ok(ArgSettings::UseValueDelimiter),\n \"nextlinehelp\" => Ok(ArgSettings::NextLineHelp),\n _ => Err(\"unknown ArgSetting, cannot convert from str\".to_owned()),\n }\n }\n}\n<commit_msg>chore: hides ArgSettings in docs<commit_after>use std::str::FromStr;\nuse std::ascii::AsciiExt;\n\nbitflags! {\n flags Flags: u8 {\n const REQUIRED = 0b00000001,\n const MULTIPLE = 0b00000010,\n const EMPTY_VALS = 0b00000100,\n const GLOBAL = 0b00001000,\n const HIDDEN = 0b00010000,\n const TAKES_VAL = 0b00100000,\n const USE_DELIM = 0b01000000,\n const NEXT_LINE_HELP = 0b10000000,\n }\n}\n\n#[doc(hidden)]\n#[derive(Debug, Clone)]\npub struct ArgFlags(Flags);\n\nimpl ArgFlags {\n pub fn new() -> Self {\n ArgFlags(EMPTY_VALS | USE_DELIM)\n }\n\n impl_settings!{ArgSettings,\n Required => REQUIRED,\n Multiple => MULTIPLE,\n EmptyValues => EMPTY_VALS,\n Global => GLOBAL,\n Hidden => HIDDEN,\n TakesValue => TAKES_VAL,\n UseValueDelimiter => USE_DELIM,\n NextLineHelp => NEXT_LINE_HELP\n }\n}\n\nimpl Default for ArgFlags {\n fn default() -> Self {\n ArgFlags::new()\n }\n}\n\n\/\/\/ Various settings that apply to arguments and may be set, unset, and checked via getter\/setter\n\/\/\/ methods `Arg::set`, `Arg::unset`, and `Arg::is_set`\n#[derive(Debug, PartialEq, Copy, Clone)]\n#[doc(hidden)]\npub enum ArgSettings {\n \/\/\/ The argument must be used\n Required,\n \/\/\/ The argument may be used multiple times such as `--flag --flag`\n Multiple,\n \/\/\/ The argument allows empty values such as `--option \"\"`\n EmptyValues,\n \/\/\/ The argument should be propagated down through all child subcommands\n Global,\n \/\/\/ The argument should **not** be shown in help text\n Hidden,\n \/\/\/ The argument accepts a value, such as `--option <value>`\n TakesValue,\n \/\/\/ Determines if the argument allows values to be grouped via a delimter\n UseValueDelimiter,\n \/\/\/ Prints the help text on the line after the argument\n NextLineHelp,\n}\n\nimpl FromStr for ArgSettings {\n type Err = String;\n fn from_str(s: &str) -> Result<Self, <Self as FromStr>::Err> {\n match &*s.to_ascii_lowercase() {\n \"required\" => Ok(ArgSettings::Required),\n \"multiple\" => Ok(ArgSettings::Multiple),\n \"global\" => Ok(ArgSettings::Global),\n \"emptyvalues\" => Ok(ArgSettings::EmptyValues),\n \"hidden\" => Ok(ArgSettings::Hidden),\n \"takesvalue\" => Ok(ArgSettings::TakesValue),\n \"usevaluedelimiter\" => Ok(ArgSettings::UseValueDelimiter),\n \"nextlinehelp\" => Ok(ArgSettings::NextLineHelp),\n _ => Err(\"unknown ArgSetting, cannot convert from str\".to_owned()),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>initial commit of graph example<commit_after>struct Graph<T> {\n nodes: Vec<Node<T>>,\n}\n\nfn main() {\n \n}<|endoftext|>"} {"text":"<commit_before><commit_msg>add OpsWorks integration tests<commit_after>#![cfg(feature = \"opsworks\")]\n\nextern crate rusoto;\n\nuse rusoto::opsworks::{OpsWorksClient, DescribeStacksRequest};\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\nfn should_describe_stacks() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = OpsWorksClient::new(credentials, Region::UsEast1);\n\n let request = DescribeStacksRequest::default();\n\n match client.describe_stacks(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true) \n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n\n#[test]\nfn should_describe_my_user_profile() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = OpsWorksClient::new(credentials, Region::UsEast1);\n\n match client.describe_my_user_profile() {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true) \n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}<|endoftext|>"} {"text":"<commit_before>use core::mem;\n\nuse {Future, IntoFuture, Async, Poll};\nuse stream::Stream;\n\n\/\/\/ Creates a `Stream` from a seed and a closure returning a `Future`.\n\/\/\/\n\/\/\/ This function is the dual for the `Stream::fold()` adapter: while `Stream:fold()` reduces a\n\/\/\/ `Stream` to one single value, `unfold()` creates a `Stream` from a seed value.\n\/\/\/\n\/\/\/ `unfold()` will call the provided closure with the provided seed, then wait for the\n\/\/\/ returned `Future` to complete with `(a, b)`. It will then yield the value `a`, and use `b`\n\/\/\/ as the next internal state.\n\/\/\/\n\/\/\/ If the closure returns `None` instead of `Some(Future)`, then the `unfold()` will stop\n\/\/\/ producing items and return `Ok(Async::Ready(None))` in future calls to `poll()`.\n\/\/\/\n\/\/\/ In case of error generated by the returned `Future`, the error will be returned by the `Stream`.\n\/\/\/ The `Stream` will then yield `Ok(Async::Ready(None))` in future calls to `poll()`.\n\/\/\/\n\/\/\/ This function can typically be used when wanting to go from the \"world of futures\" to the\n\/\/\/ \"world of streams\": the provided closure can build a `Future` using other library functions\n\/\/\/ working on futures, and `unfold()` will turn it into a `Stream` by repeating the operation.\n\/\/\/\n\/\/\/ # Example\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ use futures::*;\n\/\/\/\n\/\/\/ let mut stream = stream::unfold(0, |state| {\n\/\/\/ if state <= 2 {\n\/\/\/ let next_state = state + 1;\n\/\/\/ let yielded = state * 2;\n\/\/\/ let fut = futures::finished::<_, u32>((yielded, next_state));\n\/\/\/ Some(fut)\n\/\/\/ } else {\n\/\/\/ None\n\/\/\/ }\n\/\/\/ });\n\/\/\/\n\/\/\/ let result = stream.collect().wait();\n\/\/\/ assert_eq!(result, Ok(vec![0, 2, 4]));\n\/\/\/ ```\npub fn unfold<T, F, Fut, It>(init: T, f: F) -> Unfold<T, F, Fut>\nwhere F: FnMut(T) -> Option<Fut>,\n Fut: IntoFuture<Item = (It, T)>,\n{\n Unfold {\n f: f,\n state: State::Ready(init),\n }\n}\n\n\/\/\/ A stream which creates futures, polls them and return their result\n\/\/\/\n\/\/\/ This stream is returned by the `futures::stream::unfold` method\n#[must_use = \"streams do nothing unless polled\"]\npub struct Unfold<T, F, Fut> where Fut: IntoFuture {\n f: F,\n state: State<T, Fut::Future>,\n}\n\nimpl <T, F, Fut, It> Stream for Unfold<T, F, Fut>\nwhere F: FnMut(T) -> Option<Fut>,\n Fut: IntoFuture<Item = (It, T)>,\n{\n type Item = It;\n type Error = Fut::Error;\n\n fn poll(&mut self) -> Poll<Option<It>, Fut::Error> {\n loop {\n match mem::replace(&mut self.state, State::Empty) {\n \/\/ State::Empty may happen if the future returned an error\n State::Empty => { return Ok(Async::Ready(None)); }\n State::Ready(state) => {\n match (self.f)(state) {\n Some(fut) => { self.state = State::Processing(fut.into_future()); }\n None => { return Ok(Async::Ready(None)); }\n }\n }\n State::Processing(mut fut) => {\n match try!(fut.poll()) {\n Async:: Ready((item, next_state)) => {\n self.state = State::Ready(next_state);\n return Ok(Async::Ready(Some(item)));\n }\n Async::NotReady => {\n self.state = State::Processing(fut);\n return Ok(Async::NotReady);\n }\n }\n }\n }\n }\n }\n}\n\nenum State<T, F> where F: Future {\n \/\/\/ Placeholder state when doing work, or when the returned Future generated an error\n Empty,\n\n \/\/\/ Ready to generate new future; current internal state is the `T`\n Ready(T),\n\n \/\/\/ Working on a future generated previously\n Processing(F),\n}\n<commit_msg>Touch up style of unfold a bit<commit_after>use core::mem;\n\nuse {Future, IntoFuture, Async, Poll};\nuse stream::Stream;\n\n\/\/\/ Creates a `Stream` from a seed and a closure returning a `Future`.\n\/\/\/\n\/\/\/ This function is the dual for the `Stream::fold()` adapter: while\n\/\/\/ `Stream:fold()` reduces a `Stream` to one single value, `unfold()` creates a\n\/\/\/ `Stream` from a seed value.\n\/\/\/\n\/\/\/ `unfold()` will call the provided closure with the provided seed, then wait\n\/\/\/ for the returned `Future` to complete with `(a, b)`. It will then yield the\n\/\/\/ value `a`, and use `b` as the next internal state.\n\/\/\/\n\/\/\/ If the closure returns `None` instead of `Some(Future)`, then the `unfold()`\n\/\/\/ will stop producing items and return `Ok(Async::Ready(None))` in future\n\/\/\/ calls to `poll()`.\n\/\/\/\n\/\/\/ In case of error generated by the returned `Future`, the error will be\n\/\/\/ returned by the `Stream`. The `Stream` will then yield\n\/\/\/ `Ok(Async::Ready(None))` in future calls to `poll()`.\n\/\/\/\n\/\/\/ This function can typically be used when wanting to go from the \"world of\n\/\/\/ futures\" to the \"world of streams\": the provided closure can build a\n\/\/\/ `Future` using other library functions working on futures, and `unfold()`\n\/\/\/ will turn it into a `Stream` by repeating the operation.\n\/\/\/\n\/\/\/ # Example\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ use futures::*;\n\/\/\/\n\/\/\/ let mut stream = stream::unfold(0, |state| {\n\/\/\/ if state <= 2 {\n\/\/\/ let next_state = state + 1;\n\/\/\/ let yielded = state * 2;\n\/\/\/ let fut = futures::finished::<_, u32>((yielded, next_state));\n\/\/\/ Some(fut)\n\/\/\/ } else {\n\/\/\/ None\n\/\/\/ }\n\/\/\/ });\n\/\/\/\n\/\/\/ let result = stream.collect().wait();\n\/\/\/ assert_eq!(result, Ok(vec![0, 2, 4]));\n\/\/\/ ```\npub fn unfold<T, F, Fut, It>(init: T, f: F) -> Unfold<T, F, Fut>\n where F: FnMut(T) -> Option<Fut>,\n Fut: IntoFuture<Item = (It, T)>,\n{\n Unfold {\n f: f,\n state: State::Ready(init),\n }\n}\n\n\/\/\/ A stream which creates futures, polls them and return their result\n\/\/\/\n\/\/\/ This stream is returned by the `futures::stream::unfold` method\n#[must_use = \"streams do nothing unless polled\"]\npub struct Unfold<T, F, Fut> where Fut: IntoFuture {\n f: F,\n state: State<T, Fut::Future>,\n}\n\nimpl <T, F, Fut, It> Stream for Unfold<T, F, Fut>\n where F: FnMut(T) -> Option<Fut>,\n Fut: IntoFuture<Item = (It, T)>,\n{\n type Item = It;\n type Error = Fut::Error;\n\n fn poll(&mut self) -> Poll<Option<It>, Fut::Error> {\n loop {\n match mem::replace(&mut self.state, State::Empty) {\n \/\/ State::Empty may happen if the future returned an error\n State::Empty => { return Ok(Async::Ready(None)); }\n State::Ready(state) => {\n match (self.f)(state) {\n Some(fut) => { self.state = State::Processing(fut.into_future()); }\n None => { return Ok(Async::Ready(None)); }\n }\n }\n State::Processing(mut fut) => {\n match try!(fut.poll()) {\n Async:: Ready((item, next_state)) => {\n self.state = State::Ready(next_state);\n return Ok(Async::Ready(Some(item)));\n }\n Async::NotReady => {\n self.state = State::Processing(fut);\n return Ok(Async::NotReady);\n }\n }\n }\n }\n }\n }\n}\n\nenum State<T, F> where F: Future {\n \/\/\/ Placeholder state when doing work, or when the returned Future generated an error\n Empty,\n\n \/\/\/ Ready to generate new future; current internal state is the `T`\n Ready(T),\n\n \/\/\/ Working on a future generated previously\n Processing(F),\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>implement builder interface for conv2d<commit_after>use nn::modules::module::*;\nuse nn::parameter::Parameter;\n\n#[derive(ModParse)]\npub struct Conv2d<'a> {\n delegate: Module<'a>,\n\tweight: Parameter<'a>\n}\n\nimpl <'a>Conv2d<'a> {\n\tpub fn build(in_channels: u32, out_channels: u32, kernel_size: u32) -> Conv2dArgsBuilder {\n\t\tConv2dArgsBuilder::default()\n\t\t\t.in_channels(in_channels)\n\t\t\t.out_channels(out_channels)\n\t\t\t.kernel_size(kernel_size)\n\t}\n\tpub fn new(args: Conv2dArgs) -> Conv2d<'a> {\n\t\tConv2d {delegate: Module::new(), weight: Parameter::default()}\n\t}\n}\n\n#[builder(pattern=\"owned\")]\n#[derive(Builder)]\npub struct Conv2dArgs {\n\tin_channels: u32,\n\tout_channels: u32,\n\tkernel_size: u32,\n\t#[builder(default=\"1\")]\n\tstride: u32,\n\t#[builder(default=\"0\")]\n\tpadding: u32,\n\t#[builder(default=\"1\")]\n\tdilation: u32,\n\t#[builder(default=\"1\")]\n\tgroups: u32,\n\t#[builder(default=\"true\")]\n\tbias: bool,\n}\nimpl Conv2dArgsBuilder {\n\tpub fn done<'a>(self) -> Conv2d<'a> {\n\t\tlet args = self.build().unwrap();\n\t\tConv2d::new(args)\n\t}\n}<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse std::uint;\n\nenum RangeRelation {\n OverlapsBegin(\/* overlap *\/ uint),\n OverlapsEnd(\/* overlap *\/ uint),\n ContainedBy,\n Contains,\n Coincides,\n EntirelyBefore,\n EntirelyAfter\n}\n\npub struct Range {\n priv off: uint,\n priv len: uint\n}\n\nimpl Range {\n pub fn new(off: uint, len: uint) -> Range {\n Range { off: off, len: len }\n }\n\n pub fn empty() -> Range {\n Range::new(0, 0)\n }\n}\n\nimpl Range {\n pub fn begin(&self) -> uint { self.off }\n pub fn length(&self) -> uint { self.len }\n pub fn end(&self) -> uint { self.off + self.len }\n\n pub fn eachi(&self, callback: &fn(uint) -> bool) -> bool {\n for uint::range(self.off, self.off + self.len) |i| {\n if !callback(i) {\n break\n }\n }\n true\n }\n\n pub fn contains(&self, i: uint) -> bool {\n i >= self.begin() && i < self.end()\n }\n\n pub fn is_valid_for_string(&self, s: &str) -> bool {\n self.begin() < s.len() && self.end() <= s.len() && self.length() <= s.len()\n }\n\n pub fn shift_by(&mut self, i: int) { \n self.off = ((self.off as int) + i) as uint;\n }\n\n pub fn extend_by(&mut self, i: int) { \n self.len = ((self.len as int) + i) as uint;\n }\n\n pub fn extend_to(&mut self, i: uint) { \n self.len = i - self.off;\n }\n\n pub fn adjust_by(&mut self, off_i: int, len_i: int) {\n self.off = ((self.off as int) + off_i) as uint;\n self.len = ((self.len as int) + len_i) as uint;\n }\n\n pub fn reset(&mut self, off_i: uint, len_i: uint) {\n self.off = off_i;\n self.len = len_i;\n }\n\n \/\/\/ Computes the relationship between two ranges (`self` and `other`),\n \/\/\/ from the point of view of `self`. So, 'EntirelyBefore' means\n \/\/\/ that the `self` range is entirely before `other` range.\n pub fn relation_to_range(&self, other: &Range) -> RangeRelation {\n if other.begin() > self.end() {\n return EntirelyBefore;\n }\n if self.begin() > other.end() {\n return EntirelyAfter;\n } \n if self.begin() == other.begin() && self.end() == other.end() {\n return Coincides;\n }\n if self.begin() <= other.begin() && self.end() >= other.end() {\n return Contains;\n }\n if self.begin() >= other.begin() && self.end() <= other.end() {\n return ContainedBy;\n }\n if self.begin() < other.begin() && self.end() < other.end() {\n let overlap = self.end() - other.begin();\n return OverlapsBegin(overlap);\n }\n if self.begin() > other.begin() && self.end() > other.end() {\n let overlap = other.end() - self.begin();\n return OverlapsEnd(overlap);\n }\n fail!(fmt!(\"relation_to_range(): didn't classify self=%?, other=%?\",\n self, other));\n }\n\n pub fn repair_after_coalesced_range(&mut self, other: &Range) {\n let relation = self.relation_to_range(other);\n debug!(\"repair_after_coalesced_range: possibly repairing range %?\", self);\n debug!(\"repair_after_coalesced_range: relation of original range and coalesced range(%?): %?\",\n other, relation);\n match relation {\n EntirelyBefore => { },\n EntirelyAfter => { self.shift_by(-(other.length() as int)); },\n Coincides | ContainedBy => { self.reset(other.begin(), 1); },\n Contains => { self.extend_by(-(other.length() as int)); },\n OverlapsBegin(overlap) => { self.extend_by(1 - (overlap as int)); },\n OverlapsEnd(overlap) => {\n let len = self.length() - overlap + 1;\n self.reset(other.begin(), len);\n }\n };\n debug!(\"repair_after_coalesced_range: new range: ---- %?\", self);\n }\n}\n<commit_msg>Add utility methods to Range<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse std::uint;\nuse std::cmp::{max, min};\n\nenum RangeRelation {\n OverlapsBegin(\/* overlap *\/ uint),\n OverlapsEnd(\/* overlap *\/ uint),\n ContainedBy,\n Contains,\n Coincides,\n EntirelyBefore,\n EntirelyAfter\n}\n\npub struct Range {\n priv off: uint,\n priv len: uint\n}\n\nimpl Range {\n pub fn new(off: uint, len: uint) -> Range {\n Range { off: off, len: len }\n }\n\n pub fn empty() -> Range {\n Range::new(0, 0)\n }\n}\n\nimpl Range {\n pub fn begin(&self) -> uint { self.off }\n pub fn length(&self) -> uint { self.len }\n pub fn end(&self) -> uint { self.off + self.len }\n\n pub fn eachi(&self, callback: &fn(uint) -> bool) -> bool {\n for uint::range(self.off, self.off + self.len) |i| {\n if !callback(i) {\n break\n }\n }\n true\n }\n\n pub fn contains(&self, i: uint) -> bool {\n i >= self.begin() && i < self.end()\n }\n\n pub fn is_valid_for_string(&self, s: &str) -> bool {\n self.begin() < s.len() && self.end() <= s.len() && self.length() <= s.len()\n }\n\n pub fn is_empty(&self) -> bool {\n self.len == 0\n }\n\n pub fn shift_by(&mut self, i: int) { \n self.off = ((self.off as int) + i) as uint;\n }\n\n pub fn extend_by(&mut self, i: int) { \n self.len = ((self.len as int) + i) as uint;\n }\n\n pub fn extend_to(&mut self, i: uint) { \n self.len = i - self.off;\n }\n\n pub fn adjust_by(&mut self, off_i: int, len_i: int) {\n self.off = ((self.off as int) + off_i) as uint;\n self.len = ((self.len as int) + len_i) as uint;\n }\n\n pub fn reset(&mut self, off_i: uint, len_i: uint) {\n self.off = off_i;\n self.len = len_i;\n }\n\n pub fn intersect(&self, other: &Range) -> Range {\n let begin = max(self.begin(), other.begin());\n let end = min(self.end(), other.end());\n\n if end < begin {\n Range::empty()\n } else {\n Range::new(begin, end - begin)\n }\n }\n\n \/\/\/ Computes the relationship between two ranges (`self` and `other`),\n \/\/\/ from the point of view of `self`. So, 'EntirelyBefore' means\n \/\/\/ that the `self` range is entirely before `other` range.\n pub fn relation_to_range(&self, other: &Range) -> RangeRelation {\n if other.begin() > self.end() {\n return EntirelyBefore;\n }\n if self.begin() > other.end() {\n return EntirelyAfter;\n } \n if self.begin() == other.begin() && self.end() == other.end() {\n return Coincides;\n }\n if self.begin() <= other.begin() && self.end() >= other.end() {\n return Contains;\n }\n if self.begin() >= other.begin() && self.end() <= other.end() {\n return ContainedBy;\n }\n if self.begin() < other.begin() && self.end() < other.end() {\n let overlap = self.end() - other.begin();\n return OverlapsBegin(overlap);\n }\n if self.begin() > other.begin() && self.end() > other.end() {\n let overlap = other.end() - self.begin();\n return OverlapsEnd(overlap);\n }\n fail!(fmt!(\"relation_to_range(): didn't classify self=%?, other=%?\",\n self, other));\n }\n\n pub fn repair_after_coalesced_range(&mut self, other: &Range) {\n let relation = self.relation_to_range(other);\n debug!(\"repair_after_coalesced_range: possibly repairing range %?\", self);\n debug!(\"repair_after_coalesced_range: relation of original range and coalesced range(%?): %?\",\n other, relation);\n match relation {\n EntirelyBefore => { },\n EntirelyAfter => { self.shift_by(-(other.length() as int)); },\n Coincides | ContainedBy => { self.reset(other.begin(), 1); },\n Contains => { self.extend_by(-(other.length() as int)); },\n OverlapsBegin(overlap) => { self.extend_by(1 - (overlap as int)); },\n OverlapsEnd(overlap) => {\n let len = self.length() - overlap + 1;\n self.reset(other.begin(), len);\n }\n };\n debug!(\"repair_after_coalesced_range: new range: ---- %?\", self);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use collections::borrow::ToOwned;\nuse collections::String;\n\nuse common::time::Duration;\n\n#[derive(Copy, Clone)]\npub enum LogLevel {\n Critical,\n Error,\n Warning,\n Info,\n Debug,\n}\n\n\/\/\/ Add message to kernel logs with format\n#[macro_export]\nmacro_rules! syslog {\n ($level:expr, $($arg:tt)*) => ({\n $crate::logging::syslog_inner($level, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_debug {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Debug, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_info {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Info, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_warning {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Warning, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_critical {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Critical, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_error {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Error, format!($($arg)*));\n });\n}\n\n\/\/\/ Add `message` to the kernel logs, with a priority level of `level`\npub fn syslog(level: LogLevel, message: &str) {\n syslog_inner(level, message.to_owned());\n}\n\n\/\/TODO: Limit log message size\npub fn syslog_inner(level: LogLevel, message: String) {\n let time = Duration::monotonic();\n let logs = unsafe { &mut *::env().logs.get() };\n while logs.len() >= 4096 {\n logs.pop_front();\n }\n logs.push_back((time, level, message));\n \/\/TODO: Print messages that are above priority\n}\n<commit_msg>Print out syslog messages<commit_after>use collections::borrow::ToOwned;\nuse collections::String;\n\nuse common::time::Duration;\n\n#[derive(Copy, Clone)]\npub enum LogLevel {\n Critical,\n Error,\n Warning,\n Info,\n Debug,\n}\n\n\/\/\/ Add message to kernel logs with format\n#[macro_export]\nmacro_rules! syslog {\n ($level:expr, $($arg:tt)*) => ({\n $crate::logging::syslog_inner($level, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_debug {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Debug, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_info {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Info, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_warning {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Warning, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_critical {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Critical, format!($($arg)*));\n });\n}\n\n#[macro_export]\nmacro_rules! syslog_error {\n ($($arg:tt)*) => ({\n $crate::logging::syslog_inner($crate::logging::LogLevel::Error, format!($($arg)*));\n });\n}\n\n\/\/\/ Add `message` to the kernel logs, with a priority level of `level`\npub fn syslog(level: LogLevel, message: &str) {\n syslog_inner(level, message.to_owned());\n}\n\n\/\/TODO: Limit log message size\npub fn syslog_inner(level: LogLevel, message: String) {\n let time = Duration::monotonic();\n\n let prefix: &str = match level {\n LogLevel::Debug => \"DEBUG \",\n LogLevel::Info => \"INFO \",\n LogLevel::Warning => \"WARN \",\n LogLevel::Error => \"ERROR \",\n LogLevel::Critical => \"CRIT \",\n };\n debugln!(\"[{}.{:>03}] {}{}\", time.secs, time.nanos\/1000000, prefix, message);\n\n let logs = unsafe { &mut *::env().logs.get() };\n while logs.len() >= 4096 {\n logs.pop_front();\n }\n logs.push_back((time, level, message));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add the start to a basic integration test<commit_after>extern crate thoughtfuck;\n\nuse thoughtfuck::vm::*;\nuse thoughtfuck::program::*;\nuse thoughtfuck::parse::parse;\n\n#[test]\nfn hello_world () {\n const SOURCE: &str = \"++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.\";\n let commands = parse(SOURCE);\n\n let mut program = Program::new();\n program.append(&commands);\n\n let mut vm = VM::new(None);\n program.execute(&mut vm);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![crate_name = \"alloc_jemalloc\"]\n#![crate_type = \"rlib\"]\n#![no_std]\n#![allocator]\n#![unstable(feature = \"alloc_jemalloc\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![cfg_attr(not(stage0), deny(warnings))]\n#![feature(allocator)]\n#![feature(libc)]\n#![feature(staged_api)]\n\nextern crate libc;\n\nuse libc::{c_int, c_void, size_t};\n\n\/\/ Linkage directives to pull in jemalloc and its dependencies.\n\/\/\n\/\/ On some platforms we need to be sure to link in `pthread` which jemalloc\n\/\/ depends on, and specifically on android we need to also link to libgcc.\n\/\/ Currently jemalloc is compiled with gcc which will generate calls to\n\/\/ intrinsics that are libgcc specific (e.g. those intrinsics aren't present in\n\/\/ libcompiler-rt), so link that in to get that support.\n#[link(name = \"jemalloc\", kind = \"static\")]\n#[cfg_attr(target_os = \"android\", link(name = \"gcc\"))]\n#[cfg_attr(all(not(windows),\n not(target_os = \"android\"),\n not(target_env = \"musl\")),\n link(name = \"pthread\"))]\n#[cfg(not(cargobuild))]\nextern {}\n\n\/\/ Note that the symbols here are prefixed by default on OSX (we don't\n\/\/ explicitly request it), and on Android and DragonFly we explicitly request\n\/\/ it as unprefixing cause segfaults (mismatches in allocators).\nextern {\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\"),\n link_name = \"je_mallocx\")]\n fn mallocx(size: size_t, flags: c_int) -> *mut c_void;\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\"),\n link_name = \"je_rallocx\")]\n fn rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\"),\n link_name = \"je_xallocx\")]\n fn xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\"),\n link_name = \"je_sdallocx\")]\n fn sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\"),\n link_name = \"je_nallocx\")]\n fn nallocx(size: size_t, flags: c_int) -> size_t;\n}\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc64\")))]\nconst MIN_ALIGN: usize = 16;\n\n\/\/ MALLOCX_ALIGN(a) macro\nfn mallocx_align(a: usize) -> c_int {\n a.trailing_zeros() as c_int\n}\n\nfn align_to_flags(align: usize) -> c_int {\n if align <= MIN_ALIGN {\n 0\n } else {\n mallocx_align(align)\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {\n let flags = align_to_flags(align);\n unsafe { mallocx(size as size_t, flags) as *mut u8 }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate(ptr: *mut u8,\n _old_size: usize,\n size: usize,\n align: usize)\n -> *mut u8 {\n let flags = align_to_flags(align);\n unsafe { rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8 }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate_inplace(ptr: *mut u8,\n _old_size: usize,\n size: usize,\n align: usize)\n -> usize {\n let flags = align_to_flags(align);\n unsafe { xallocx(ptr as *mut c_void, size as size_t, 0, flags) as usize }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {\n let flags = align_to_flags(align);\n unsafe { sdallocx(ptr as *mut c_void, old_size as size_t, flags) }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_usable_size(size: usize, align: usize) -> usize {\n let flags = align_to_flags(align);\n unsafe { nallocx(size as size_t, flags) as usize }\n}\n\n\/\/ These symbols are used by jemalloc on android but the really old android\n\/\/ we're building on doesn't have them defined, so just make sure the symbols\n\/\/ are available.\n#[no_mangle]\n#[cfg(target_os = \"android\")]\npub extern fn pthread_atfork(_prefork: *mut u8,\n _postfork_parent: *mut u8,\n _postfork_child: *mut u8) -> i32 {\n 0\n}\n<commit_msg>Auto merge of #33308 - ollie27:wingnu_jemalloc, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![crate_name = \"alloc_jemalloc\"]\n#![crate_type = \"rlib\"]\n#![no_std]\n#![allocator]\n#![unstable(feature = \"alloc_jemalloc\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![cfg_attr(not(stage0), deny(warnings))]\n#![feature(allocator)]\n#![feature(libc)]\n#![feature(staged_api)]\n\nextern crate libc;\n\nuse libc::{c_int, c_void, size_t};\n\n\/\/ Linkage directives to pull in jemalloc and its dependencies.\n\/\/\n\/\/ On some platforms we need to be sure to link in `pthread` which jemalloc\n\/\/ depends on, and specifically on android we need to also link to libgcc.\n\/\/ Currently jemalloc is compiled with gcc which will generate calls to\n\/\/ intrinsics that are libgcc specific (e.g. those intrinsics aren't present in\n\/\/ libcompiler-rt), so link that in to get that support.\n#[link(name = \"jemalloc\", kind = \"static\")]\n#[cfg_attr(target_os = \"android\", link(name = \"gcc\"))]\n#[cfg_attr(all(not(windows),\n not(target_os = \"android\"),\n not(target_env = \"musl\")),\n link(name = \"pthread\"))]\n#[cfg(not(cargobuild))]\nextern {}\n\n\/\/ Note that the symbols here are prefixed by default on OSX and Windows (we\n\/\/ don't explicitly request it), and on Android and DragonFly we explicitly\n\/\/ request it as unprefixing cause segfaults (mismatches in allocators).\nextern {\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\", target_os = \"windows\"),\n link_name = \"je_mallocx\")]\n fn mallocx(size: size_t, flags: c_int) -> *mut c_void;\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\", target_os = \"windows\"),\n link_name = \"je_rallocx\")]\n fn rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\", target_os = \"windows\"),\n link_name = \"je_xallocx\")]\n fn xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\", target_os = \"windows\"),\n link_name = \"je_sdallocx\")]\n fn sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);\n #[cfg_attr(any(target_os = \"macos\", target_os = \"android\", target_os = \"ios\",\n target_os = \"dragonfly\", target_os = \"windows\"),\n link_name = \"je_nallocx\")]\n fn nallocx(size: size_t, flags: c_int) -> size_t;\n}\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc64\")))]\nconst MIN_ALIGN: usize = 16;\n\n\/\/ MALLOCX_ALIGN(a) macro\nfn mallocx_align(a: usize) -> c_int {\n a.trailing_zeros() as c_int\n}\n\nfn align_to_flags(align: usize) -> c_int {\n if align <= MIN_ALIGN {\n 0\n } else {\n mallocx_align(align)\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {\n let flags = align_to_flags(align);\n unsafe { mallocx(size as size_t, flags) as *mut u8 }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate(ptr: *mut u8,\n _old_size: usize,\n size: usize,\n align: usize)\n -> *mut u8 {\n let flags = align_to_flags(align);\n unsafe { rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8 }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate_inplace(ptr: *mut u8,\n _old_size: usize,\n size: usize,\n align: usize)\n -> usize {\n let flags = align_to_flags(align);\n unsafe { xallocx(ptr as *mut c_void, size as size_t, 0, flags) as usize }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {\n let flags = align_to_flags(align);\n unsafe { sdallocx(ptr as *mut c_void, old_size as size_t, flags) }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_usable_size(size: usize, align: usize) -> usize {\n let flags = align_to_flags(align);\n unsafe { nallocx(size as size_t, flags) as usize }\n}\n\n\/\/ These symbols are used by jemalloc on android but the really old android\n\/\/ we're building on doesn't have them defined, so just make sure the symbols\n\/\/ are available.\n#[no_mangle]\n#[cfg(target_os = \"android\")]\npub extern fn pthread_atfork(_prefork: *mut u8,\n _postfork_parent: *mut u8,\n _postfork_child: *mut u8) -> i32 {\n 0\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse dot;\nuse rustc::mir::repr::*;\nuse rustc::middle::ty;\nuse std::fmt::Debug;\nuse std::io::{self, Write};\n\n\/\/\/ Write a graphviz DOT graph for the given MIR.\npub fn write_mir_graphviz<W: Write>(mir: &Mir, w: &mut W) -> io::Result<()> {\n try!(writeln!(w, \"digraph Mir {{\"));\n\n \/\/ Global graph properties\n try!(writeln!(w, r#\" graph [fontname=\"monospace\"];\"#));\n try!(writeln!(w, r#\" node [fontname=\"monospace\"];\"#));\n try!(writeln!(w, r#\" edge [fontname=\"monospace\"];\"#));\n\n \/\/ Graph label\n try!(write_graph_label(mir, w));\n\n \/\/ Nodes\n for block in mir.all_basic_blocks() {\n try!(write_node(block, mir, w));\n }\n\n \/\/ Edges\n for source in mir.all_basic_blocks() {\n try!(write_edges(source, mir, w));\n }\n\n writeln!(w, \"}}\")\n}\n\n\/\/\/ Write a graphviz DOT node for the given basic block.\nfn write_node<W: Write>(block: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {\n let data = mir.basic_block_data(block);\n\n \/\/ Start a new node with the label to follow, in one of DOT's pseudo-HTML tables.\n try!(write!(w, r#\" {} [shape=\"none\", label=<\"#, node(block)));\n try!(write!(w, r#\"<table border=\"0\" cellborder=\"1\" cellspacing=\"0\">\"#));\n\n \/\/ Basic block number at the top.\n try!(write!(w, r#\"<tr><td bgcolor=\"gray\" align=\"center\">{}<\/td><\/tr>\"#, block.index()));\n\n \/\/ List of statements in the middle.\n if !data.statements.is_empty() {\n try!(write!(w, r#\"<tr><td align=\"left\" balign=\"left\">\"#));\n for statement in &data.statements {\n try!(write!(w, \"{}<br\/>\", escape(statement)));\n }\n try!(write!(w, \"<\/td><\/tr>\"));\n }\n\n \/\/ Terminator head at the bottom, not including the list of successor blocks. Those will be\n \/\/ displayed as labels on the edges between blocks.\n let mut terminator_head = String::new();\n data.terminator.fmt_head(&mut terminator_head).unwrap();\n try!(write!(w, r#\"<tr><td align=\"left\">{}<\/td><\/tr>\"#, dot::escape_html(&terminator_head)));\n\n \/\/ Close the table, node label, and the node itself.\n writeln!(w, \"<\/table>>];\")\n}\n\n\/\/\/ Write graphviz DOT edges with labels between the given basic block and all of its successors.\nfn write_edges<W: Write>(source: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {\n let terminator = &mir.basic_block_data(source).terminator;\n let labels = terminator.fmt_successor_labels();\n\n for (&target, label) in terminator.successors().iter().zip(labels) {\n try!(writeln!(w, r#\" {} -> {} [label=\"{}\"];\"#, node(source), node(target), label));\n }\n\n Ok(())\n}\n\n\/\/\/ Write the graphviz DOT label for the overall graph. This is essentially a block of text that\n\/\/\/ will appear below the graph, showing the type of the `fn` this MIR represents and the types of\n\/\/\/ all the variables and temporaries.\nfn write_graph_label<W: Write>(mir: &Mir, w: &mut W) -> io::Result<()> {\n try!(write!(w, \" label=<fn(\"));\n\n \/\/ fn argument types.\n for (i, arg) in mir.arg_decls.iter().enumerate() {\n if i > 0 {\n try!(write!(w, \", \"));\n }\n try!(write!(w, \"{:?}: {}\", Lvalue::Arg(i as u32), escape(&arg.ty)));\n }\n\n try!(write!(w, \") -> \"));\n\n \/\/ fn return type.\n match mir.return_ty {\n ty::FnOutput::FnConverging(ty) => try!(write!(w, \"{}\", escape(ty))),\n ty::FnOutput::FnDiverging => try!(write!(w, \"!\")),\n }\n\n try!(write!(w, r#\"<br align=\"left\"\/>\"#));\n\n \/\/ User variable types (including the user's name in a comment).\n for (i, var) in mir.var_decls.iter().enumerate() {\n try!(write!(w, \"let \"));\n if var.mutability == Mutability::Mut {\n try!(write!(w, \"mut \"));\n }\n try!(write!(w, r#\"{:?}: {}; \/\/ {}<br align=\"left\"\/>\"#,\n Lvalue::Var(i as u32), escape(&var.ty), var.name));\n }\n\n \/\/ Compiler-introduced temporary types.\n for (i, temp) in mir.temp_decls.iter().enumerate() {\n try!(write!(w, r#\"let {:?}: {};<br align=\"left\"\/>\"#,\n Lvalue::Temp(i as u32), escape(&temp.ty)));\n }\n\n writeln!(w, \">;\")\n}\n\nfn node(block: BasicBlock) -> String {\n format!(\"bb{}\", block.index())\n}\n\nfn escape<T: Debug>(t: &T) -> String {\n dot::escape_html(&format!(\"{:?}\", t))\n}\n<commit_msg>Add 'mut' to temporary vars in MIR graphviz output.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse dot;\nuse rustc::mir::repr::*;\nuse rustc::middle::ty;\nuse std::fmt::Debug;\nuse std::io::{self, Write};\n\n\/\/\/ Write a graphviz DOT graph for the given MIR.\npub fn write_mir_graphviz<W: Write>(mir: &Mir, w: &mut W) -> io::Result<()> {\n try!(writeln!(w, \"digraph Mir {{\"));\n\n \/\/ Global graph properties\n try!(writeln!(w, r#\" graph [fontname=\"monospace\"];\"#));\n try!(writeln!(w, r#\" node [fontname=\"monospace\"];\"#));\n try!(writeln!(w, r#\" edge [fontname=\"monospace\"];\"#));\n\n \/\/ Graph label\n try!(write_graph_label(mir, w));\n\n \/\/ Nodes\n for block in mir.all_basic_blocks() {\n try!(write_node(block, mir, w));\n }\n\n \/\/ Edges\n for source in mir.all_basic_blocks() {\n try!(write_edges(source, mir, w));\n }\n\n writeln!(w, \"}}\")\n}\n\n\/\/\/ Write a graphviz DOT node for the given basic block.\nfn write_node<W: Write>(block: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {\n let data = mir.basic_block_data(block);\n\n \/\/ Start a new node with the label to follow, in one of DOT's pseudo-HTML tables.\n try!(write!(w, r#\" {} [shape=\"none\", label=<\"#, node(block)));\n try!(write!(w, r#\"<table border=\"0\" cellborder=\"1\" cellspacing=\"0\">\"#));\n\n \/\/ Basic block number at the top.\n try!(write!(w, r#\"<tr><td bgcolor=\"gray\" align=\"center\">{}<\/td><\/tr>\"#, block.index()));\n\n \/\/ List of statements in the middle.\n if !data.statements.is_empty() {\n try!(write!(w, r#\"<tr><td align=\"left\" balign=\"left\">\"#));\n for statement in &data.statements {\n try!(write!(w, \"{}<br\/>\", escape(statement)));\n }\n try!(write!(w, \"<\/td><\/tr>\"));\n }\n\n \/\/ Terminator head at the bottom, not including the list of successor blocks. Those will be\n \/\/ displayed as labels on the edges between blocks.\n let mut terminator_head = String::new();\n data.terminator.fmt_head(&mut terminator_head).unwrap();\n try!(write!(w, r#\"<tr><td align=\"left\">{}<\/td><\/tr>\"#, dot::escape_html(&terminator_head)));\n\n \/\/ Close the table, node label, and the node itself.\n writeln!(w, \"<\/table>>];\")\n}\n\n\/\/\/ Write graphviz DOT edges with labels between the given basic block and all of its successors.\nfn write_edges<W: Write>(source: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {\n let terminator = &mir.basic_block_data(source).terminator;\n let labels = terminator.fmt_successor_labels();\n\n for (&target, label) in terminator.successors().iter().zip(labels) {\n try!(writeln!(w, r#\" {} -> {} [label=\"{}\"];\"#, node(source), node(target), label));\n }\n\n Ok(())\n}\n\n\/\/\/ Write the graphviz DOT label for the overall graph. This is essentially a block of text that\n\/\/\/ will appear below the graph, showing the type of the `fn` this MIR represents and the types of\n\/\/\/ all the variables and temporaries.\nfn write_graph_label<W: Write>(mir: &Mir, w: &mut W) -> io::Result<()> {\n try!(write!(w, \" label=<fn(\"));\n\n \/\/ fn argument types.\n for (i, arg) in mir.arg_decls.iter().enumerate() {\n if i > 0 {\n try!(write!(w, \", \"));\n }\n try!(write!(w, \"{:?}: {}\", Lvalue::Arg(i as u32), escape(&arg.ty)));\n }\n\n try!(write!(w, \") -> \"));\n\n \/\/ fn return type.\n match mir.return_ty {\n ty::FnOutput::FnConverging(ty) => try!(write!(w, \"{}\", escape(ty))),\n ty::FnOutput::FnDiverging => try!(write!(w, \"!\")),\n }\n\n try!(write!(w, r#\"<br align=\"left\"\/>\"#));\n\n \/\/ User variable types (including the user's name in a comment).\n for (i, var) in mir.var_decls.iter().enumerate() {\n try!(write!(w, \"let \"));\n if var.mutability == Mutability::Mut {\n try!(write!(w, \"mut \"));\n }\n try!(write!(w, r#\"{:?}: {}; \/\/ {}<br align=\"left\"\/>\"#,\n Lvalue::Var(i as u32), escape(&var.ty), var.name));\n }\n\n \/\/ Compiler-introduced temporary types.\n for (i, temp) in mir.temp_decls.iter().enumerate() {\n try!(write!(w, r#\"let mut {:?}: {};<br align=\"left\"\/>\"#,\n Lvalue::Temp(i as u32), escape(&temp.ty)));\n }\n\n writeln!(w, \">;\")\n}\n\nfn node(block: BasicBlock) -> String {\n format!(\"bb{}\", block.index())\n}\n\nfn escape<T: Debug>(t: &T) -> String {\n dot::escape_html(&format!(\"{:?}\", t))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added logging and tracking of custom emojis<commit_after><|endoftext|>"} {"text":"<commit_before>use std::mem;\nuse std::slice;\nuse std::default::Default;\n\nuse color;\nuse color:: {\n Pixel,\n ColorType\n};\n\n\/\/\/ An enumeration of Image Errors\n#[deriving(Show, PartialEq, Eq)]\npub enum ImageError {\n \/\/\/The Image is not formatted properly\n FormatError,\n\n \/\/\/The Image's dimensions are either too small or too large\n DimensionError,\n\n \/\/\/The Decoder does not support this image format\n UnsupportedError,\n\n \/\/\/The Decoder does not support this color type\n UnsupportedColor,\n\n \/\/\/Not enough data was provided to the Decoder\n \/\/\/to decode the image\n NotEnoughData,\n\n \/\/\/An I\/O Error occurred while decoding the image\n IoError,\n\n \/\/\/The end of the image has been reached\n ImageEnd\n}\n\npub type ImageResult<T> = Result<T, ImageError>;\n\n\/\/\/ An enumeration of supported image formats.\n\/\/\/ Not all formats support both encoding and decoding.\n#[deriving(PartialEq, Eq, Show)]\npub enum ImageFormat {\n \/\/\/ An Image in PNG Format\n PNG,\n\n \/\/\/ An Image in JPEG Format\n JPEG,\n\n \/\/\/ An Image in GIF Format\n GIF,\n\n \/\/\/ An Image in WEBP Format\n WEBP,\n\n \/\/\/ An Image in PPM Format\n PPM\n}\n\n\/\/\/ The trait that all decoders implement\npub trait ImageDecoder {\n \/\/\/Return a tuple containing the width and height of the image\n fn dimensions(&mut self) -> ImageResult<(u32, u32)>;\n\n \/\/\/Return the color type of the image e.g RGB(8) (8bit RGB)\n fn colortype(&mut self) -> ImageResult<ColorType>;\n\n \/\/\/Returns the length in bytes of one decoded row of the image\n fn row_len(&mut self) -> ImageResult<uint>;\n\n \/\/\/Read one row from the image into buf\n \/\/\/Returns the row index\n fn read_scanline(&mut self, buf: &mut [u8]) -> ImageResult<u32>;\n\n \/\/\/Decode the entire image and return it as a Vector\n fn read_image(&mut self) -> ImageResult<Vec<u8>>;\n\n \/\/\/Decode a specific region of the image, represented by the rectangle\n \/\/\/starting from ```x``` and ```y``` and having ```length``` and ```width```\n fn load_rect(&mut self, x: u32, y: u32, length: u32, width: u32) -> ImageResult<Vec<u8>> {\n let (w, h) = try!(self.dimensions());\n\n if length > h || width > w || x > w || y > h {\n return Err(DimensionError)\n }\n\n let c = try!(self.colortype());\n\n let bpp = color::bits_per_pixel(c) \/ 8;\n\n let rowlen = try!(self.row_len());\n\n let mut buf = Vec::from_elem(length as uint * width as uint * bpp, 0u8);\n let mut tmp = Vec::from_elem(rowlen, 0u8);\n\n loop {\n let row = try!(self.read_scanline(tmp.as_mut_slice()));\n\n if row - 1 == y {\n break\n }\n }\n\n for i in range(0, length as uint) {\n {\n let from = tmp.slice_from(x as uint * bpp)\n .slice_to(width as uint * bpp);\n\n let to = buf.mut_slice_from(i * width as uint * bpp)\n .mut_slice_to(width as uint * bpp);\n\n slice::bytes::copy_memory(to, from);\n }\n\n let _ = try!(self.read_scanline(tmp.as_mut_slice()));\n }\n\n Ok(buf)\n }\n}\n\n\/\/\/ Immutable pixel iterator\npub struct Pixels<'a, I> {\n image: &'a I,\n x: u32,\n y: u32,\n width: u32,\n height: u32\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: GenericImage<P>> Iterator<(u32, u32, P)> for Pixels<'a, I> {\n fn next(&mut self) -> Option<(u32, u32, P)> {\n if self.x >= self.width {\n self.x = 0;\n self.y += 1;\n }\n\n if self.y >= self.height {\n None\n } else {\n let pixel = self.image.get_pixel(self.x, self.y);\n let p = (self.x, self.y, pixel);\n\n self.x += 1;\n\n Some(p)\n }\n }\n}\n\n\/\/\/ Mutable pixel iterator\npub struct MutPixels<'a, I> {\n image: &'a mut I,\n x: u32,\n y: u32,\n width: u32,\n height: u32\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: MutableRefImage<P>> Iterator<(u32, u32, &'a mut P)> for MutPixels<'a, I> {\n fn next(&mut self) -> Option<(u32, u32, &'a mut P)> {\n if self.x >= self.width {\n self.x = 0;\n self.y += 1;\n }\n\n if self.y >= self.height {\n None\n } else {\n let tmp = self.image.get_mut_pixel(self.x, self.y);\n\n \/\/error: lifetime of `self` is too short to guarantee its contents\n \/\/ can be safely reborrowed...\n let ptr = unsafe {\n mem::transmute(tmp)\n };\n\n let p = (self.x, self.y, ptr);\n\n self.x += 1;\n\n Some(p)\n }\n }\n}\n\n\/\/\/A trait for manipulating images.\npub trait GenericImage<P> {\n \/\/\/The width and height of this image.\n fn dimensions(&self) -> (u32, u32);\n\n \/\/\/The bounding rectangle of this image.\n fn bounds(&self) -> (u32, u32, u32, u32);\n\n \/\/\/Return the pixel located at (x, y)\n fn get_pixel(&self, x: u32, y: u32) -> P;\n\n \/\/\/Put a pixel at location (x, y)\n fn put_pixel(&mut self, x: u32, y: u32, pixel: P);\n\n \/\/\/Return an Iterator over the pixels of this image.\n \/\/\/The iterator yields the coordinates of each pixel\n \/\/\/along with their value\n fn pixels(&self) -> Pixels<Self> {\n let (width, height) = self.dimensions();\n\n Pixels {\n image: self,\n x: 0,\n y: 0,\n width: width,\n height: height,\n }\n }\n}\n\n\/\/\/A trait for images that allow providing mutable references to pixels.\npub trait MutableRefImage<P>: GenericImage<P> {\n \/\/\/Return a mutable reference to the pixel located at (x, y)\n fn get_mut_pixel(&mut self, x: u32, y: u32) -> &mut P;\n\n \/\/\/Return an Iterator over mutable pixels of this image.\n \/\/\/The iterator yields the coordinates of each pixel\n \/\/\/along with a mutable reference to them.\n fn mut_pixels(&mut self) -> MutPixels<Self> {\n let (width, height) = self.dimensions();\n\n MutPixels {\n image: self,\n x: 0,\n y: 0,\n width: width,\n height: height,\n }\n }\n}\n\n\/\/\/An Image whose pixels are contained within a vector\n#[deriving(Clone)]\npub struct ImageBuf<P> {\n pixels: Vec<P>,\n width: u32,\n height: u32,\n}\n\nimpl<T: Primitive, P: Pixel<T>> ImageBuf<P> {\n \/\/\/Construct a new ImageBuf with the specified width and height.\n pub fn new(width: u32, height: u32) -> ImageBuf<P> {\n let pixel: P = Default::default();\n let pixels = Vec::from_elem((width * height) as uint, pixel.clone());\n\n ImageBuf {\n pixels: pixels,\n width: width,\n height: height,\n }\n }\n\n \/\/\/Construct a new ImageBuf by repeated application of the supplied function.\n \/\/\/The arguments to the function are the pixel's x and y coordinates.\n pub fn from_fn(width: u32, height: u32, f: | u32, u32 | -> P) -> ImageBuf<P> {\n let mut pixels: Vec<P> = Vec::with_capacity((width * height) as uint);\n\n for y in range(0, height) {\n for x in range(0, width) {\n pixels.insert((y * width + x) as uint, f(x, y));\n }\n }\n\n ImageBuf::from_pixels(pixels, width, height)\n }\n\n \/\/\/Construct a new ImageBuf from a vector of pixels.\n pub fn from_pixels(pixels: Vec<P>, width: u32, height: u32) -> ImageBuf<P> {\n ImageBuf {\n pixels: pixels,\n width: width,\n height: height,\n }\n }\n\n \/\/\/Construct a new ImageBuf from a pixel.\n pub fn from_pixel(width: u32, height: u32, pixel: P) -> ImageBuf<P> {\n let buf = Vec::from_elem(width as uint * height as uint, pixel.clone());\n\n ImageBuf::from_pixels(buf, width, height)\n }\n\n \/\/\/Return an immutable reference to this image's pixel buffer\n pub fn pixelbuf(&self) -> & [P] {\n self.pixels.as_slice()\n }\n\n \/\/\/Return a mutable reference to this image's pixel buffer\n pub fn mut_pixelbuf(&mut self) -> &mut [P] {\n self.pixels.as_mut_slice()\n }\n}\n\nimpl<T: Primitive, P: Pixel<T> + Clone + Copy> GenericImage<P> for ImageBuf<P> {\n fn dimensions(&self) -> (u32, u32) {\n (self.width, self.height)\n }\n\n fn bounds(&self) -> (u32, u32, u32, u32) {\n (0, 0, self.width, self.height)\n }\n\n fn get_pixel(&self, x: u32, y: u32) -> P {\n let index = y * self.width + x;\n\n self.pixels[index as uint]\n }\n\n fn put_pixel(&mut self, x: u32, y: u32, pixel: P) {\n let index = y * self.width + x;\n let buf = self.pixels.as_mut_slice();\n\n buf[index as uint] = pixel;\n }\n}\n\nimpl<T: Primitive, P: Pixel<T> + Clone + Copy> MutableRefImage<P> for ImageBuf<P> {\n fn get_mut_pixel(&mut self, x: u32, y: u32) -> &mut P {\n let index = y * self.width + x;\n\n self.pixels.get_mut(index as uint)\n }\n}\n\nimpl<T: Primitive, P: Pixel<T>> Index<(u32, u32), P> for ImageBuf<P> {\n fn index(&self, coords: &(u32, u32)) -> &P {\n let &(x, y) = coords;\n let index = y * self.width + x;\n\n &self.pixels[index as uint]\n }\n}\n\n\/\/\/ A View into another image\npub struct SubImage <'a, I> {\n image: &'a mut I,\n xoffset: u32,\n yoffset: u32,\n xstride: u32,\n ystride: u32,\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: GenericImage<P>> SubImage<'a, I> {\n \/\/\/Construct a new subimage\n pub fn new(image: &mut I, x: u32, y: u32, width: u32, height: u32) -> SubImage<I> {\n SubImage {\n image: image,\n xoffset: x,\n yoffset: y,\n xstride: width,\n ystride: height,\n }\n }\n\n \/\/\/Return a mutable reference to the wrapped image.\n pub fn mut_inner(&mut self) -> &mut I {\n &mut (*self.image)\n }\n\n \/\/\/Change the coordinates of this subimage.\n pub fn change_bounds(&mut self, x: u32, y: u32, width: u32, height: u32) {\n self.xoffset = x;\n self.yoffset = y;\n self.xstride = width;\n self.ystride = height;\n }\n\n \/\/\/Convert this subimage to an ImageBuf\n pub fn to_image(&self) -> ImageBuf<P> {\n let p: P = Default::default();\n let mut out = ImageBuf::from_pixel(self.xstride, self.ystride, p.clone());\n\n for y in range(0, self.ystride) {\n for x in range(0, self.xstride) {\n let p = self.get_pixel(x, y);\n out.put_pixel(x, y, p);\n }\n }\n\n out\n }\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: GenericImage<P>> GenericImage<P> for SubImage<'a, I> {\n fn dimensions(&self) -> (u32, u32) {\n (self.xstride, self.ystride)\n }\n\n fn bounds(&self) -> (u32, u32, u32, u32) {\n (self.xoffset, self.yoffset, self.xstride, self.ystride)\n }\n\n fn get_pixel(&self, x: u32, y: u32) -> P {\n self.image.get_pixel(x + self.xoffset, y + self.yoffset)\n }\n\n fn put_pixel(&mut self, x: u32, y: u32, pixel: P) {\n self.image.put_pixel(x + self.xoffset, y + self.yoffset, pixel)\n }\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: MutableRefImage<P>> MutableRefImage<P> for SubImage<'a, I> {\n fn get_mut_pixel(&mut self, x: u32, y: u32) -> &mut P {\n self.image.get_mut_pixel(x + self.xoffset, y + self.yoffset)\n }\n}\n<commit_msg>Add ImageBuf::into_vec<commit_after>use std::mem;\nuse std::slice;\nuse std::default::Default;\n\nuse color;\nuse color:: {\n Pixel,\n ColorType\n};\n\n\/\/\/ An enumeration of Image Errors\n#[deriving(Show, PartialEq, Eq)]\npub enum ImageError {\n \/\/\/The Image is not formatted properly\n FormatError,\n\n \/\/\/The Image's dimensions are either too small or too large\n DimensionError,\n\n \/\/\/The Decoder does not support this image format\n UnsupportedError,\n\n \/\/\/The Decoder does not support this color type\n UnsupportedColor,\n\n \/\/\/Not enough data was provided to the Decoder\n \/\/\/to decode the image\n NotEnoughData,\n\n \/\/\/An I\/O Error occurred while decoding the image\n IoError,\n\n \/\/\/The end of the image has been reached\n ImageEnd\n}\n\npub type ImageResult<T> = Result<T, ImageError>;\n\n\/\/\/ An enumeration of supported image formats.\n\/\/\/ Not all formats support both encoding and decoding.\n#[deriving(PartialEq, Eq, Show)]\npub enum ImageFormat {\n \/\/\/ An Image in PNG Format\n PNG,\n\n \/\/\/ An Image in JPEG Format\n JPEG,\n\n \/\/\/ An Image in GIF Format\n GIF,\n\n \/\/\/ An Image in WEBP Format\n WEBP,\n\n \/\/\/ An Image in PPM Format\n PPM\n}\n\n\/\/\/ The trait that all decoders implement\npub trait ImageDecoder {\n \/\/\/Return a tuple containing the width and height of the image\n fn dimensions(&mut self) -> ImageResult<(u32, u32)>;\n\n \/\/\/Return the color type of the image e.g RGB(8) (8bit RGB)\n fn colortype(&mut self) -> ImageResult<ColorType>;\n\n \/\/\/Returns the length in bytes of one decoded row of the image\n fn row_len(&mut self) -> ImageResult<uint>;\n\n \/\/\/Read one row from the image into buf\n \/\/\/Returns the row index\n fn read_scanline(&mut self, buf: &mut [u8]) -> ImageResult<u32>;\n\n \/\/\/Decode the entire image and return it as a Vector\n fn read_image(&mut self) -> ImageResult<Vec<u8>>;\n\n \/\/\/Decode a specific region of the image, represented by the rectangle\n \/\/\/starting from ```x``` and ```y``` and having ```length``` and ```width```\n fn load_rect(&mut self, x: u32, y: u32, length: u32, width: u32) -> ImageResult<Vec<u8>> {\n let (w, h) = try!(self.dimensions());\n\n if length > h || width > w || x > w || y > h {\n return Err(DimensionError)\n }\n\n let c = try!(self.colortype());\n\n let bpp = color::bits_per_pixel(c) \/ 8;\n\n let rowlen = try!(self.row_len());\n\n let mut buf = Vec::from_elem(length as uint * width as uint * bpp, 0u8);\n let mut tmp = Vec::from_elem(rowlen, 0u8);\n\n loop {\n let row = try!(self.read_scanline(tmp.as_mut_slice()));\n\n if row - 1 == y {\n break\n }\n }\n\n for i in range(0, length as uint) {\n {\n let from = tmp.slice_from(x as uint * bpp)\n .slice_to(width as uint * bpp);\n\n let to = buf.mut_slice_from(i * width as uint * bpp)\n .mut_slice_to(width as uint * bpp);\n\n slice::bytes::copy_memory(to, from);\n }\n\n let _ = try!(self.read_scanline(tmp.as_mut_slice()));\n }\n\n Ok(buf)\n }\n}\n\n\/\/\/ Immutable pixel iterator\npub struct Pixels<'a, I> {\n image: &'a I,\n x: u32,\n y: u32,\n width: u32,\n height: u32\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: GenericImage<P>> Iterator<(u32, u32, P)> for Pixels<'a, I> {\n fn next(&mut self) -> Option<(u32, u32, P)> {\n if self.x >= self.width {\n self.x = 0;\n self.y += 1;\n }\n\n if self.y >= self.height {\n None\n } else {\n let pixel = self.image.get_pixel(self.x, self.y);\n let p = (self.x, self.y, pixel);\n\n self.x += 1;\n\n Some(p)\n }\n }\n}\n\n\/\/\/ Mutable pixel iterator\npub struct MutPixels<'a, I> {\n image: &'a mut I,\n x: u32,\n y: u32,\n width: u32,\n height: u32\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: MutableRefImage<P>> Iterator<(u32, u32, &'a mut P)> for MutPixels<'a, I> {\n fn next(&mut self) -> Option<(u32, u32, &'a mut P)> {\n if self.x >= self.width {\n self.x = 0;\n self.y += 1;\n }\n\n if self.y >= self.height {\n None\n } else {\n let tmp = self.image.get_mut_pixel(self.x, self.y);\n\n \/\/error: lifetime of `self` is too short to guarantee its contents\n \/\/ can be safely reborrowed...\n let ptr = unsafe {\n mem::transmute(tmp)\n };\n\n let p = (self.x, self.y, ptr);\n\n self.x += 1;\n\n Some(p)\n }\n }\n}\n\n\/\/\/A trait for manipulating images.\npub trait GenericImage<P> {\n \/\/\/The width and height of this image.\n fn dimensions(&self) -> (u32, u32);\n\n \/\/\/The bounding rectangle of this image.\n fn bounds(&self) -> (u32, u32, u32, u32);\n\n \/\/\/Return the pixel located at (x, y)\n fn get_pixel(&self, x: u32, y: u32) -> P;\n\n \/\/\/Put a pixel at location (x, y)\n fn put_pixel(&mut self, x: u32, y: u32, pixel: P);\n\n \/\/\/Return an Iterator over the pixels of this image.\n \/\/\/The iterator yields the coordinates of each pixel\n \/\/\/along with their value\n fn pixels(&self) -> Pixels<Self> {\n let (width, height) = self.dimensions();\n\n Pixels {\n image: self,\n x: 0,\n y: 0,\n width: width,\n height: height,\n }\n }\n}\n\n\/\/\/A trait for images that allow providing mutable references to pixels.\npub trait MutableRefImage<P>: GenericImage<P> {\n \/\/\/Return a mutable reference to the pixel located at (x, y)\n fn get_mut_pixel(&mut self, x: u32, y: u32) -> &mut P;\n\n \/\/\/Return an Iterator over mutable pixels of this image.\n \/\/\/The iterator yields the coordinates of each pixel\n \/\/\/along with a mutable reference to them.\n fn mut_pixels(&mut self) -> MutPixels<Self> {\n let (width, height) = self.dimensions();\n\n MutPixels {\n image: self,\n x: 0,\n y: 0,\n width: width,\n height: height,\n }\n }\n}\n\n\/\/\/An Image whose pixels are contained within a vector\n#[deriving(Clone)]\npub struct ImageBuf<P> {\n pixels: Vec<P>,\n width: u32,\n height: u32,\n}\n\nimpl<T: Primitive, P: Pixel<T>> ImageBuf<P> {\n \/\/\/Construct a new ImageBuf with the specified width and height.\n pub fn new(width: u32, height: u32) -> ImageBuf<P> {\n let pixel: P = Default::default();\n let pixels = Vec::from_elem((width * height) as uint, pixel.clone());\n\n ImageBuf {\n pixels: pixels,\n width: width,\n height: height,\n }\n }\n\n \/\/\/Construct a new ImageBuf by repeated application of the supplied function.\n \/\/\/The arguments to the function are the pixel's x and y coordinates.\n pub fn from_fn(width: u32, height: u32, f: | u32, u32 | -> P) -> ImageBuf<P> {\n let mut pixels: Vec<P> = Vec::with_capacity((width * height) as uint);\n\n for y in range(0, height) {\n for x in range(0, width) {\n pixels.insert((y * width + x) as uint, f(x, y));\n }\n }\n\n ImageBuf::from_pixels(pixels, width, height)\n }\n\n \/\/\/Construct a new ImageBuf from a vector of pixels.\n pub fn from_pixels(pixels: Vec<P>, width: u32, height: u32) -> ImageBuf<P> {\n ImageBuf {\n pixels: pixels,\n width: width,\n height: height,\n }\n }\n\n \/\/\/Construct a new ImageBuf from a pixel.\n pub fn from_pixel(width: u32, height: u32, pixel: P) -> ImageBuf<P> {\n let buf = Vec::from_elem(width as uint * height as uint, pixel.clone());\n\n ImageBuf::from_pixels(buf, width, height)\n }\n\n \/\/\/Return an immutable reference to this image's pixel buffer\n pub fn pixelbuf(&self) -> & [P] {\n self.pixels.as_slice()\n }\n\n \/\/\/Return a mutable reference to this image's pixel buffer\n pub fn mut_pixelbuf(&mut self) -> &mut [P] {\n self.pixels.as_mut_slice()\n }\n\n \/\/\/Destroy this ImageBuf, returning the internal vector\n pub fn into_vec(self) -> Vec<P> {\n self.pixels\n }\n}\n\nimpl<T: Primitive, P: Pixel<T> + Clone + Copy> GenericImage<P> for ImageBuf<P> {\n fn dimensions(&self) -> (u32, u32) {\n (self.width, self.height)\n }\n\n fn bounds(&self) -> (u32, u32, u32, u32) {\n (0, 0, self.width, self.height)\n }\n\n fn get_pixel(&self, x: u32, y: u32) -> P {\n let index = y * self.width + x;\n\n self.pixels[index as uint]\n }\n\n fn put_pixel(&mut self, x: u32, y: u32, pixel: P) {\n let index = y * self.width + x;\n let buf = self.pixels.as_mut_slice();\n\n buf[index as uint] = pixel;\n }\n}\n\nimpl<T: Primitive, P: Pixel<T> + Clone + Copy> MutableRefImage<P> for ImageBuf<P> {\n fn get_mut_pixel(&mut self, x: u32, y: u32) -> &mut P {\n let index = y * self.width + x;\n\n self.pixels.get_mut(index as uint)\n }\n}\n\nimpl<T: Primitive, P: Pixel<T>> Index<(u32, u32), P> for ImageBuf<P> {\n fn index(&self, coords: &(u32, u32)) -> &P {\n let &(x, y) = coords;\n let index = y * self.width + x;\n\n &self.pixels[index as uint]\n }\n}\n\n\/\/\/ A View into another image\npub struct SubImage <'a, I> {\n image: &'a mut I,\n xoffset: u32,\n yoffset: u32,\n xstride: u32,\n ystride: u32,\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: GenericImage<P>> SubImage<'a, I> {\n \/\/\/Construct a new subimage\n pub fn new(image: &mut I, x: u32, y: u32, width: u32, height: u32) -> SubImage<I> {\n SubImage {\n image: image,\n xoffset: x,\n yoffset: y,\n xstride: width,\n ystride: height,\n }\n }\n\n \/\/\/Return a mutable reference to the wrapped image.\n pub fn mut_inner(&mut self) -> &mut I {\n &mut (*self.image)\n }\n\n \/\/\/Change the coordinates of this subimage.\n pub fn change_bounds(&mut self, x: u32, y: u32, width: u32, height: u32) {\n self.xoffset = x;\n self.yoffset = y;\n self.xstride = width;\n self.ystride = height;\n }\n\n \/\/\/Convert this subimage to an ImageBuf\n pub fn to_image(&self) -> ImageBuf<P> {\n let p: P = Default::default();\n let mut out = ImageBuf::from_pixel(self.xstride, self.ystride, p.clone());\n\n for y in range(0, self.ystride) {\n for x in range(0, self.xstride) {\n let p = self.get_pixel(x, y);\n out.put_pixel(x, y, p);\n }\n }\n\n out\n }\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: GenericImage<P>> GenericImage<P> for SubImage<'a, I> {\n fn dimensions(&self) -> (u32, u32) {\n (self.xstride, self.ystride)\n }\n\n fn bounds(&self) -> (u32, u32, u32, u32) {\n (self.xoffset, self.yoffset, self.xstride, self.ystride)\n }\n\n fn get_pixel(&self, x: u32, y: u32) -> P {\n self.image.get_pixel(x + self.xoffset, y + self.yoffset)\n }\n\n fn put_pixel(&mut self, x: u32, y: u32, pixel: P) {\n self.image.put_pixel(x + self.xoffset, y + self.yoffset, pixel)\n }\n}\n\nimpl<'a, T: Primitive, P: Pixel<T>, I: MutableRefImage<P>> MutableRefImage<P> for SubImage<'a, I> {\n fn get_mut_pixel(&mut self, x: u32, y: u32) -> &mut P {\n self.image.get_mut_pixel(x + self.xoffset, y + self.yoffset)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Codeforces 740A<commit_after>use std::io::stdin;\nuse std::cmp::min;\n\nfn main() {\n let mut input_line = String::new();\n stdin().read_line(&mut input_line);\n\n let mut input_iter = input_line.\n split_whitespace().\n map(|x| x.parse::<u64>().unwrap());\n\n let owned = input_iter.next().unwrap();\n let price1 = input_iter.next().unwrap();\n let price2 = input_iter.next().unwrap();\n let price3 = input_iter.next().unwrap();\n\n if owned % 4 == 0 {\n \/\/ Already meet criteria\n println!(\"0\");\n return;\n }\n\n let purchase = 4 - owned % 4;\n let result = match purchase {\n 1 => min(price1, min(price3 * 3, price2 + price3)),\n 2 => min(price2, min(price1 * 2, price3 * 2)),\n 3 => min(price3, min(price1 * 3, price2 + price1)),\n _ => u64::max_value()\n };\n\n println!(\"{}\", result);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>refactor: refactor `Param.process` method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add examples.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>First, not working, version of unit-test-server.rs<commit_after>extern crate libmodbus_rs;\n\nuse libmodbus_rs::{Modbus, ModbusServer};\nuse libmodbus_rs::errors::*; \/\/ for the `Result<T>` type\nuse std::env;\n\n\nenum Backend {\n TCP,\n TCPPI,\n RTU,\n}\n\nfn run() -> Result<()> {\n let backend;\n let modbus: Modbus;\n\n let args: Vec<_> = env::args().collect();\n if args.len() > 1 {\n match args[1].to_lowercase().as_ref() {\n \"tcp\" => backend = Backend::TCP,\n \"tcppi\" => backend = Backend::TCPPI,\n \"rtu\" => backend = Backend::RTU,\n _ => {\n println!(\"Usage:\\n {} [tcp|tcppi|rtu] - Modbus server for unit testing\\n\\n\", args[0]);\n std::process::exit(-1);\n }\n }\n } else {\n \/\/ By default\n backend = Backend::TCP;\n }\n\n let mut modbus = match backend {\n Backend::TCP => {\n use libmodbus_rs::ModbusTCP;\n\n Modbus::new_tcp(\"127.0.0.1\", 1502)\n }\n Backend::TCPPI => {\n use libmodbus_rs::ModbusTCPPI;\n\n Modbus::new_tcp_pi(\"::0\", \"1502\")\n }\n Backend::RTU => {\n use libmodbus_rs::ModbusRTU;\n\n Modbus::new_rtu(\"\/dev\/ttyUSB0\", 115200, 'N', 8, 1)\n }\n }?;\n\n let header_lenght = modbus.get_header_length();\n println!(\"{:?}\", header_lenght);\n\n let socket = modbus.get_socket();\n println!(\"{:?}\", socket);\n\n Ok(())\n}\n\n\nfn main() {\n if let Err(ref err) = run() {\n println!(\"Error: {}\", err);\n\n std::process::exit(1)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Handles invoking external binaries.\n\/\/ This module assumes that, for a given machine, there is only one place\n\/\/ where the desired executable might be installed. It expects the engine\n\/\/ to identify that place at its initialization by invoking verify_binaries(),\n\/\/ and to exit immediately if verify_binaries() return an error. If this\n\/\/ protocol is followed then when any command is executed the unique absolute\n\/\/ path of the binary for this machine will already have been identified.\n\/\/ However stratisd may run for a while and it is possible for the binary\n\/\/ to be caused to be uninstalled while stratisd is being run. Therefore,\n\/\/ the existence of the file is checked before the command is invoked, and\n\/\/ an explicit error is returned if the executable can not be found.\n\nuse std::collections::HashMap;\nuse std::path::{Path, PathBuf};\nuse std::process::Command;\n\nuse uuid::Uuid;\n\nuse stratis::{StratisError, StratisResult};\n\n\/\/\/ Find the binary with the given name by looking in likely locations.\n\/\/\/ Return None if no binary was found.\n\/\/\/ Search an explicit list of directories rather than the user's PATH\n\/\/\/ environment variable. stratisd may be running when there is no PATH\n\/\/\/ variable set.\nfn find_binary(name: &str) -> Option<PathBuf> {\n [\"\/usr\/sbin\", \"\/sbin\", \"\/usr\/bin\", \"\/bin\"]\n .iter()\n .map(|pre| [pre, name].iter().collect::<PathBuf>())\n .find(|path| path.exists())\n}\n\n\/\/ These are the external binaries that stratisd relies on.\n\/\/ Any change in this list requires a corresponding change to BINARIES,\n\/\/ and vice-versa.\nconst MKFS_XFS: &str = \"mkfs.xfs\";\nconst THIN_CHECK: &str = \"thin_check\";\nconst THIN_REPAIR: &str = \"thin_repair\";\nconst XFS_DB: &str = \"xfs_db\";\nconst XFS_GROWFS: &str = \"xfs_growfs\";\n\nlazy_static! {\n static ref BINARIES: HashMap<String, Option<PathBuf>> = [\n (MKFS_XFS.to_string(), find_binary(MKFS_XFS)),\n (THIN_CHECK.to_string(), find_binary(THIN_CHECK)),\n (THIN_REPAIR.to_string(), find_binary(THIN_REPAIR)),\n (XFS_DB.to_string(), find_binary(XFS_DB)),\n (XFS_GROWFS.to_string(), find_binary(XFS_GROWFS)),\n ].iter()\n .cloned()\n .collect();\n}\n\n\/\/\/ Verify that all binaries that the engine might invoke are available at some\n\/\/\/ path. Return an error if any are missing. Required to be called on engine\n\/\/\/ initialization.\npub fn verify_binaries() -> StratisResult<()> {\n match BINARIES.iter().find(|&(_, ref path)| path.is_none()) {\n None => Ok(()),\n Some((ref name, _)) => Err(StratisError::Error(format!(\n \"Unable to find absolute path for \\\"{}\\\"\",\n name\n ))),\n }\n}\n\n\/\/\/ Invoke the specified command. Return an error if invoking the command\n\/\/\/ fails or if the command itself fails.\nfn execute_cmd(cmd: &mut Command) -> StratisResult<()> {\n match cmd.output() {\n Err(err) => Err(StratisError::Error(format!(\n \"Failed to execute command {:?}, err: {:?}\",\n cmd, err\n ))),\n Ok(result) => {\n if result.status.success() {\n Ok(())\n } else {\n let std_out_txt = String::from_utf8_lossy(&result.stdout);\n let std_err_txt = String::from_utf8_lossy(&result.stderr);\n let err_msg = format!(\n \"Command failed: cmd: {:?}, stdout: {} stderr: {}\",\n cmd, std_out_txt, std_err_txt\n );\n Err(StratisError::Error(err_msg))\n }\n }\n }\n}\n\n\/\/\/ Get an absolute path for the executable with the given name.\n\/\/\/ Precondition: verify_binaries() has already been invoked.\nfn get_executable(name: &str) -> &Path {\n BINARIES\n .get(name)\n .expect(\"name arguments are all constants defined with BINARIES, lookup can not fail\")\n .as_ref()\n .expect(\"verify_binaries() was previously called and returned no error\")\n}\n\n\/\/\/ Create a filesystem on devnode.\npub fn create_fs(devnode: &Path, uuid: Uuid) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(MKFS_XFS).as_os_str())\n .arg(\"-f\")\n .arg(\"-q\")\n .arg(&devnode)\n .arg(\"-m\")\n .arg(format!(\"uuid={}\", uuid)),\n )\n}\n\n\/\/\/ Use the xfs_growfs command to expand a filesystem mounted at the given\n\/\/\/ mount point.\npub fn xfs_growfs(mount_point: &Path) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(XFS_GROWFS).as_os_str())\n .arg(mount_point)\n .arg(\"-d\"),\n )\n}\n\n\/\/\/ Set a new UUID for filesystem on the devnode.\npub fn set_uuid(devnode: &Path, uuid: Uuid) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(XFS_DB).as_os_str())\n .arg(format!(\"-c uuid {}\", uuid))\n .arg(&devnode),\n )\n}\n\n\/\/\/ Call thin_check on a thinpool\npub fn thin_check(devnode: &Path) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(THIN_CHECK).as_os_str())\n .arg(\"-q\")\n .arg(devnode),\n )\n}\n\n\/\/\/ Call thin_repair on a thinpool\npub fn thin_repair(meta_dev: &Path, new_meta_dev: &Path) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(THIN_REPAIR).as_os_str())\n .arg(\"-i\")\n .arg(meta_dev)\n .arg(\"-o\")\n .arg(new_meta_dev),\n )\n}\n\n\/\/\/ Call udevadm settle\n#[cfg(test)]\npub fn udev_settle() -> StratisResult<()> {\n execute_cmd(Command::new(\"udevadm\").arg(\"settle\"))\n}\n\n#[cfg(test)]\npub fn create_ext3_fs(devnode: &Path) -> StratisResult<()> {\n execute_cmd(Command::new(\"mkfs.ext3\").arg(&devnode))\n}\n<commit_msg>Use \"-x\", expert flag<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Handles invoking external binaries.\n\/\/ This module assumes that, for a given machine, there is only one place\n\/\/ where the desired executable might be installed. It expects the engine\n\/\/ to identify that place at its initialization by invoking verify_binaries(),\n\/\/ and to exit immediately if verify_binaries() return an error. If this\n\/\/ protocol is followed then when any command is executed the unique absolute\n\/\/ path of the binary for this machine will already have been identified.\n\/\/ However stratisd may run for a while and it is possible for the binary\n\/\/ to be caused to be uninstalled while stratisd is being run. Therefore,\n\/\/ the existence of the file is checked before the command is invoked, and\n\/\/ an explicit error is returned if the executable can not be found.\n\nuse std::collections::HashMap;\nuse std::path::{Path, PathBuf};\nuse std::process::Command;\n\nuse uuid::Uuid;\n\nuse stratis::{StratisError, StratisResult};\n\n\/\/\/ Find the binary with the given name by looking in likely locations.\n\/\/\/ Return None if no binary was found.\n\/\/\/ Search an explicit list of directories rather than the user's PATH\n\/\/\/ environment variable. stratisd may be running when there is no PATH\n\/\/\/ variable set.\nfn find_binary(name: &str) -> Option<PathBuf> {\n [\"\/usr\/sbin\", \"\/sbin\", \"\/usr\/bin\", \"\/bin\"]\n .iter()\n .map(|pre| [pre, name].iter().collect::<PathBuf>())\n .find(|path| path.exists())\n}\n\n\/\/ These are the external binaries that stratisd relies on.\n\/\/ Any change in this list requires a corresponding change to BINARIES,\n\/\/ and vice-versa.\nconst MKFS_XFS: &str = \"mkfs.xfs\";\nconst THIN_CHECK: &str = \"thin_check\";\nconst THIN_REPAIR: &str = \"thin_repair\";\nconst XFS_DB: &str = \"xfs_db\";\nconst XFS_GROWFS: &str = \"xfs_growfs\";\n\nlazy_static! {\n static ref BINARIES: HashMap<String, Option<PathBuf>> = [\n (MKFS_XFS.to_string(), find_binary(MKFS_XFS)),\n (THIN_CHECK.to_string(), find_binary(THIN_CHECK)),\n (THIN_REPAIR.to_string(), find_binary(THIN_REPAIR)),\n (XFS_DB.to_string(), find_binary(XFS_DB)),\n (XFS_GROWFS.to_string(), find_binary(XFS_GROWFS)),\n ].iter()\n .cloned()\n .collect();\n}\n\n\/\/\/ Verify that all binaries that the engine might invoke are available at some\n\/\/\/ path. Return an error if any are missing. Required to be called on engine\n\/\/\/ initialization.\npub fn verify_binaries() -> StratisResult<()> {\n match BINARIES.iter().find(|&(_, ref path)| path.is_none()) {\n None => Ok(()),\n Some((ref name, _)) => Err(StratisError::Error(format!(\n \"Unable to find absolute path for \\\"{}\\\"\",\n name\n ))),\n }\n}\n\n\/\/\/ Invoke the specified command. Return an error if invoking the command\n\/\/\/ fails or if the command itself fails.\nfn execute_cmd(cmd: &mut Command) -> StratisResult<()> {\n match cmd.output() {\n Err(err) => Err(StratisError::Error(format!(\n \"Failed to execute command {:?}, err: {:?}\",\n cmd, err\n ))),\n Ok(result) => {\n if result.status.success() {\n Ok(())\n } else {\n let std_out_txt = String::from_utf8_lossy(&result.stdout);\n let std_err_txt = String::from_utf8_lossy(&result.stderr);\n let err_msg = format!(\n \"Command failed: cmd: {:?}, stdout: {} stderr: {}\",\n cmd, std_out_txt, std_err_txt\n );\n Err(StratisError::Error(err_msg))\n }\n }\n }\n}\n\n\/\/\/ Get an absolute path for the executable with the given name.\n\/\/\/ Precondition: verify_binaries() has already been invoked.\nfn get_executable(name: &str) -> &Path {\n BINARIES\n .get(name)\n .expect(\"name arguments are all constants defined with BINARIES, lookup can not fail\")\n .as_ref()\n .expect(\"verify_binaries() was previously called and returned no error\")\n}\n\n\/\/\/ Create a filesystem on devnode.\npub fn create_fs(devnode: &Path, uuid: Uuid) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(MKFS_XFS).as_os_str())\n .arg(\"-f\")\n .arg(\"-q\")\n .arg(&devnode)\n .arg(\"-m\")\n .arg(format!(\"uuid={}\", uuid)),\n )\n}\n\n\/\/\/ Use the xfs_growfs command to expand a filesystem mounted at the given\n\/\/\/ mount point.\npub fn xfs_growfs(mount_point: &Path) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(XFS_GROWFS).as_os_str())\n .arg(mount_point)\n .arg(\"-d\"),\n )\n}\n\n\/\/\/ Set a new UUID for filesystem on the devnode.\npub fn set_uuid(devnode: &Path, uuid: Uuid) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(XFS_DB).as_os_str())\n .arg(\"-x\")\n .arg(format!(\"-c uuid {}\", uuid))\n .arg(&devnode),\n )\n}\n\n\/\/\/ Call thin_check on a thinpool\npub fn thin_check(devnode: &Path) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(THIN_CHECK).as_os_str())\n .arg(\"-q\")\n .arg(devnode),\n )\n}\n\n\/\/\/ Call thin_repair on a thinpool\npub fn thin_repair(meta_dev: &Path, new_meta_dev: &Path) -> StratisResult<()> {\n execute_cmd(\n Command::new(get_executable(THIN_REPAIR).as_os_str())\n .arg(\"-i\")\n .arg(meta_dev)\n .arg(\"-o\")\n .arg(new_meta_dev),\n )\n}\n\n\/\/\/ Call udevadm settle\n#[cfg(test)]\npub fn udev_settle() -> StratisResult<()> {\n execute_cmd(Command::new(\"udevadm\").arg(\"settle\"))\n}\n\n#[cfg(test)]\npub fn create_ext3_fs(devnode: &Path) -> StratisResult<()> {\n execute_cmd(Command::new(\"mkfs.ext3\").arg(&devnode))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>07 - if else<commit_after>fn main() {\n let n = 5i;\n\n if n < 0 {\n print!(\"{} is negative\", n);\n } else if n > 0 {\n print!(\"{} is positive\", n);\n } else {\n print!(\"{} is zero\", n);\n }\n\n let big_n =\n if n < 10 && n > -10 {\n println!(\", and is a small number, increase ten-fold\");\n\n \/\/ This expression returns an `int`\n 10 * n\n } else {\n println!(\", and is a big number, reduce by two\");\n\n \/\/ This expression must return an `int` as well\n n \/ 2\n \/\/ Try suppressing this expression with a semicolon\n };\n \/\/ Don't forget to put a semicolon here! All the `let` bindings need it\n\n println!(\"{} -> {}\", n, big_n);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #103<commit_after>#[link(name = \"prob0103\", vers = \"0.0\")];\n#[crate_type = \"lib\"];\n\nextern mod extra;\nextern mod common;\n\nuse std::{uint, vec};\nuse extra::priority_queue::PriorityQueue;\nuse common::problem::Problem;\n\npub static problem: Problem<'static> = Problem {\n id: 103,\n answer: \"20313839404245\",\n solver: solve\n};\n\nstruct SSSElem {\n avg: float,\n sss: ~[uint],\n sums: ~[uint]\n}\n\nimpl Ord for SSSElem {\n #[inline(always)]\n fn lt(&self, other: &SSSElem) -> bool { self.avg > other.avg }\n #[inline(always)]\n fn le(&self, other: &SSSElem) -> bool { self.avg >= other.avg }\n #[inline(always)]\n fn gt(&self, other: &SSSElem) -> bool { self.avg < other.avg }\n #[inline(always)]\n fn ge(&self, other: &SSSElem) -> bool { self.avg <= other.avg }\n}\n\nimpl SSSElem {\n #[inline]\n pub fn new_pair(a: uint, b: uint) -> SSSElem {\n assert!(a < b);\n return SSSElem {\n avg: ((a + b) as float) \/ 2f,\n sss: ~[a, b],\n sums: ~[0, a, b, a + b]\n };\n }\n\n pub fn add_num(&self, n: uint) -> Option<SSSElem> {\n let mut i = 0;\n let mut j = 0;\n let len = self.sums.len();\n let mut sums = vec::with_capacity(len * 2);\n\n while i < len {\n assert!(j <= i);\n\n match self.sums[i].cmp(&(self.sums[j] + n)) {\n Equal => { return None; }\n Less => {\n sums.push(self.sums[i]);\n i += 1;\n }\n Greater => {\n sums.push(self.sums[j] + n);\n j += 1;\n }\n }\n }\n\n while j < len {\n sums.push(self.sums[j] + n);\n j += 1;\n }\n\n let avg = (self.avg * (len as float) + n as float) \/ ((len as float) + 1f);\n let sss = self.sss + [n];\n return Some(SSSElem { avg: avg, sss: sss, sums: sums });\n }\n\n \/\/ 6: [a, b, c, d, e, f] => (a + b + c + d) - (e + f) - 1\n \/\/ 5: [a, b, c, d, e] => (a + b + c) - e - 1\n \/\/ 4: [a, b, c, d] => (a + b + c) - d - 1\n \/\/ 3: [a, b, c] => (a + b) - 1\n \/\/ 2: [a, b] => (a + b) - 1\n #[inline]\n pub fn max_addable(&self) -> uint {\n let len = self.sss.len();\n let add_len = len \/ 2 + 1;\n let sub_len = len \/ 2 - 1;\n \n let add = self.sss.slice(0, add_len).iter().fold(0, |a, &b| a + b);\n let sub = self.sss.slice(len - sub_len, len).iter().fold(0, |a, &b| a + b);\n return add - sub - 1;\n }\n\n #[inline(always)]\n pub fn each_next(&self, f: &fn(SSSElem) -> bool) -> bool {\n if self.sss.len() == 2 {\n let (a, b) = (self.sss[0], self.sss[1]);\n if !f(SSSElem::new_pair(a, b + 1)) { return false; }\n if a == b - 1 && !f(SSSElem::new_pair(a + 1, b + 1)) { return false; }\n }\n\n for uint::range(self.sss.last() + 1, self.max_addable() + 1) |n| {\n match self.add_num(n) {\n Some(x) => {\n if !f(x) { return false; }\n }\n None => {}\n }\n }\n return true;\n }\n}\n\n#[inline(always)]\npub fn each_sss(f: &fn(&SSSElem) -> bool) -> bool {\n let mut pq = PriorityQueue::new();\n pq.push(SSSElem::new_pair(1, 2));\n while !pq.is_empty() {\n let e = pq.pop();\n if !f(&e) { return false; }\n for e.each_next |next| {\n pq.push(next);\n }\n }\n return true;\n}\n\n\/\/ (a, b) => SSS if a > b\n\/\/ (a, b, c) => SSS if a > b > c && a + b > c\n\/\/ (a, b, c, d) +> SSS if a > b > c > d && a + b > d && \npub fn solve() -> ~str {\n for each_sss |&sss| {\n if sss.sss.len() == 7 {\n return sss.sss.map(|&n| n.to_str()).concat();\n }\n }\n fail!();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse session::config::Options;\n\nuse std::fs;\nuse std::io::{self, StderrLock, Write};\nuse std::time::{Duration, Instant};\n\nmacro_rules! define_categories {\n ($($name:ident,)*) => {\n #[derive(Clone, Copy, Debug, PartialEq, Eq)]\n pub enum ProfileCategory {\n $($name),*\n }\n\n #[allow(nonstandard_style)]\n struct Categories<T> {\n $($name: T),*\n }\n\n impl<T: Default> Categories<T> {\n fn new() -> Categories<T> {\n Categories {\n $($name: T::default()),*\n }\n }\n }\n\n impl<T> Categories<T> {\n fn get(&self, category: ProfileCategory) -> &T {\n match category {\n $(ProfileCategory::$name => &self.$name),*\n }\n }\n\n fn set(&mut self, category: ProfileCategory, value: T) {\n match category {\n $(ProfileCategory::$name => self.$name = value),*\n }\n }\n }\n\n struct CategoryData {\n times: Categories<u64>,\n query_counts: Categories<(u64, u64)>,\n }\n\n impl CategoryData {\n fn new() -> CategoryData {\n CategoryData {\n times: Categories::new(),\n query_counts: Categories::new(),\n }\n }\n\n fn print(&self, lock: &mut StderrLock<'_>) {\n writeln!(lock, \"| Phase | Time (ms) | Queries | Hits (%) |\")\n .unwrap();\n writeln!(lock, \"| ---------------- | -------------- | -------------- | -------- |\")\n .unwrap();\n\n $(\n let (hits, total) = self.query_counts.$name;\n let (hits, total) = if total > 0 {\n (format!(\"{:.2}\",\n (((hits as f32) \/ (total as f32)) * 100.0)), total.to_string())\n } else {\n (String::new(), String::new())\n };\n\n writeln!(\n lock,\n \"| {0: <16} | {1: <14} | {2: <14} | {3: <8} |\",\n stringify!($name),\n self.times.$name \/ 1_000_000,\n total,\n hits\n ).unwrap();\n )*\n }\n\n fn json(&self) -> String {\n let mut json = String::from(\"[\");\n\n $(\n let (hits, total) = self.query_counts.$name;\n\n \/\/normalize hits to 0%\n let hit_percent =\n if total > 0 {\n ((hits as f32) \/ (total as f32)) * 100.0\n } else {\n 0.0\n };\n\n json.push_str(&format!(\n \"{{ \\\"category\\\": \\\"{}\\\", \\\"time_ms\\\": {},\\\n \\\"query_count\\\": {}, \\\"query_hits\\\": {} }},\",\n stringify!($name),\n self.times.$name \/ 1_000_000,\n total,\n format!(\"{:.2}\", hit_percent)\n ));\n )*\n\n \/\/remove the trailing ',' character\n json.pop();\n\n json.push(']');\n\n json\n }\n }\n }\n}\n\ndefine_categories! {\n Parsing,\n Expansion,\n TypeChecking,\n BorrowChecking,\n Codegen,\n Linking,\n Other,\n}\n\npub struct SelfProfiler {\n timer_stack: Vec<ProfileCategory>,\n data: CategoryData,\n current_timer: Instant,\n}\n\nimpl SelfProfiler {\n pub fn new() -> SelfProfiler {\n let mut profiler = SelfProfiler {\n timer_stack: Vec::new(),\n data: CategoryData::new(),\n current_timer: Instant::now(),\n };\n\n profiler.start_activity(ProfileCategory::Other);\n\n profiler\n }\n\n pub fn start_activity(&mut self, category: ProfileCategory) {\n match self.timer_stack.last().cloned() {\n None => {\n self.current_timer = Instant::now();\n },\n Some(current_category) if current_category == category => {\n \/\/since the current category is the same as the new activity's category,\n \/\/we don't need to do anything with the timer, we just need to push it on the stack\n }\n Some(current_category) => {\n let elapsed = self.stop_timer();\n\n \/\/record the current category's time\n let new_time = self.data.times.get(current_category) + elapsed;\n self.data.times.set(current_category, new_time);\n }\n }\n\n \/\/push the new category\n self.timer_stack.push(category);\n }\n\n pub fn record_query(&mut self, category: ProfileCategory) {\n let (hits, total) = *self.data.query_counts.get(category);\n self.data.query_counts.set(category, (hits, total + 1));\n }\n\n pub fn record_query_hit(&mut self, category: ProfileCategory) {\n let (hits, total) = *self.data.query_counts.get(category);\n self.data.query_counts.set(category, (hits + 1, total));\n }\n\n pub fn end_activity(&mut self, category: ProfileCategory) {\n match self.timer_stack.pop() {\n None => bug!(\"end_activity() was called but there was no running activity\"),\n Some(c) =>\n assert!(\n c == category,\n \"end_activity() was called but a different activity was running\"),\n }\n\n \/\/check if the new running timer is in the same category as this one\n \/\/if it is, we don't need to do anything\n if let Some(c) = self.timer_stack.last() {\n if *c == category {\n return;\n }\n }\n\n \/\/the new timer is different than the previous,\n \/\/so record the elapsed time and start a new timer\n let elapsed = self.stop_timer();\n let new_time = self.data.times.get(category) + elapsed;\n self.data.times.set(category, new_time);\n }\n\n fn stop_timer(&mut self) -> u64 {\n let elapsed = if cfg!(windows) {\n \/\/ On Windows, timers don't always appear to be monotonic (see #51648)\n \/\/ which can lead to panics when calculating elapsed time.\n \/\/ Work around this by testing to see if the current time is less than\n \/\/ our recorded time, and if it is, just returning 0.\n let now = Instant::now();\n if self.current_timer >= now {\n Duration::new(0, 0)\n } else {\n self.current_timer.elapsed()\n }\n } else {\n self.current_timer.elapsed()\n };\n\n self.current_timer = Instant::now();\n\n (elapsed.as_secs() * 1_000_000_000) + (elapsed.subsec_nanos() as u64)\n }\n\n pub fn print_results(&mut self, opts: &Options) {\n self.end_activity(ProfileCategory::Other);\n\n assert!(\n self.timer_stack.is_empty(),\n \"there were timers running when print_results() was called\");\n\n let out = io::stderr();\n let mut lock = out.lock();\n\n let crate_name =\n opts.crate_name\n .as_ref()\n .map(|n| format!(\" for {}\", n))\n .unwrap_or_default();\n\n writeln!(lock, \"Self profiling results{}:\", crate_name).unwrap();\n writeln!(lock).unwrap();\n\n self.data.print(&mut lock);\n\n writeln!(lock).unwrap();\n writeln!(lock, \"Optimization level: {:?}\", opts.optimize).unwrap();\n\n let incremental = if opts.incremental.is_some() { \"on\" } else { \"off\" };\n writeln!(lock, \"Incremental: {}\", incremental).unwrap();\n }\n\n pub fn save_results(&self, opts: &Options) {\n let category_data = self.data.json();\n let compilation_options =\n format!(\"{{ \\\"optimization_level\\\": \\\"{:?}\\\", \\\"incremental\\\": {} }}\",\n opts.optimize,\n if opts.incremental.is_some() { \"true\" } else { \"false\" });\n\n let json = format!(\"{{ \\\"category_data\\\": {}, \\\"compilation_options\\\": {} }}\",\n category_data,\n compilation_options);\n\n fs::write(\"self_profiler_results.json\", json).unwrap();\n }\n}\n<commit_msg>Rollup merge of #56702 - wesleywiser:calc_total_time_stats, r=michaelwoerister<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse session::config::Options;\n\nuse std::fs;\nuse std::io::{self, StderrLock, Write};\nuse std::time::{Duration, Instant};\n\nmacro_rules! define_categories {\n ($($name:ident,)*) => {\n #[derive(Clone, Copy, Debug, PartialEq, Eq)]\n pub enum ProfileCategory {\n $($name),*\n }\n\n #[allow(nonstandard_style)]\n struct Categories<T> {\n $($name: T),*\n }\n\n impl<T: Default> Categories<T> {\n fn new() -> Categories<T> {\n Categories {\n $($name: T::default()),*\n }\n }\n }\n\n impl<T> Categories<T> {\n fn get(&self, category: ProfileCategory) -> &T {\n match category {\n $(ProfileCategory::$name => &self.$name),*\n }\n }\n\n fn set(&mut self, category: ProfileCategory, value: T) {\n match category {\n $(ProfileCategory::$name => self.$name = value),*\n }\n }\n }\n\n struct CategoryData {\n times: Categories<u64>,\n query_counts: Categories<(u64, u64)>,\n }\n\n impl CategoryData {\n fn new() -> CategoryData {\n CategoryData {\n times: Categories::new(),\n query_counts: Categories::new(),\n }\n }\n\n fn print(&self, lock: &mut StderrLock<'_>) {\n writeln!(lock, \"| Phase | Time (ms) \\\n | Time (%) | Queries | Hits (%)\")\n .unwrap();\n writeln!(lock, \"| ---------------- | -------------- \\\n | -------- | -------------- | --------\")\n .unwrap();\n\n let total_time = ($(self.times.$name + )* 0) as f32;\n\n $(\n let (hits, total) = self.query_counts.$name;\n let (hits, total) = if total > 0 {\n (format!(\"{:.2}\",\n (((hits as f32) \/ (total as f32)) * 100.0)), total.to_string())\n } else {\n (String::new(), String::new())\n };\n\n writeln!(\n lock,\n \"| {0: <16} | {1: <14} | {2: <8.2} | {3: <14} | {4: <8}\",\n stringify!($name),\n self.times.$name \/ 1_000_000,\n ((self.times.$name as f32) \/ total_time) * 100.0,\n total,\n hits,\n ).unwrap();\n )*\n }\n\n fn json(&self) -> String {\n let mut json = String::from(\"[\");\n\n $(\n let (hits, total) = self.query_counts.$name;\n\n \/\/normalize hits to 0%\n let hit_percent =\n if total > 0 {\n ((hits as f32) \/ (total as f32)) * 100.0\n } else {\n 0.0\n };\n\n json.push_str(&format!(\n \"{{ \\\"category\\\": \\\"{}\\\", \\\"time_ms\\\": {},\\\n \\\"query_count\\\": {}, \\\"query_hits\\\": {} }},\",\n stringify!($name),\n self.times.$name \/ 1_000_000,\n total,\n format!(\"{:.2}\", hit_percent)\n ));\n )*\n\n \/\/remove the trailing ',' character\n json.pop();\n\n json.push(']');\n\n json\n }\n }\n }\n}\n\ndefine_categories! {\n Parsing,\n Expansion,\n TypeChecking,\n BorrowChecking,\n Codegen,\n Linking,\n Other,\n}\n\npub struct SelfProfiler {\n timer_stack: Vec<ProfileCategory>,\n data: CategoryData,\n current_timer: Instant,\n}\n\nimpl SelfProfiler {\n pub fn new() -> SelfProfiler {\n let mut profiler = SelfProfiler {\n timer_stack: Vec::new(),\n data: CategoryData::new(),\n current_timer: Instant::now(),\n };\n\n profiler.start_activity(ProfileCategory::Other);\n\n profiler\n }\n\n pub fn start_activity(&mut self, category: ProfileCategory) {\n match self.timer_stack.last().cloned() {\n None => {\n self.current_timer = Instant::now();\n },\n Some(current_category) if current_category == category => {\n \/\/since the current category is the same as the new activity's category,\n \/\/we don't need to do anything with the timer, we just need to push it on the stack\n }\n Some(current_category) => {\n let elapsed = self.stop_timer();\n\n \/\/record the current category's time\n let new_time = self.data.times.get(current_category) + elapsed;\n self.data.times.set(current_category, new_time);\n }\n }\n\n \/\/push the new category\n self.timer_stack.push(category);\n }\n\n pub fn record_query(&mut self, category: ProfileCategory) {\n let (hits, total) = *self.data.query_counts.get(category);\n self.data.query_counts.set(category, (hits, total + 1));\n }\n\n pub fn record_query_hit(&mut self, category: ProfileCategory) {\n let (hits, total) = *self.data.query_counts.get(category);\n self.data.query_counts.set(category, (hits + 1, total));\n }\n\n pub fn end_activity(&mut self, category: ProfileCategory) {\n match self.timer_stack.pop() {\n None => bug!(\"end_activity() was called but there was no running activity\"),\n Some(c) =>\n assert!(\n c == category,\n \"end_activity() was called but a different activity was running\"),\n }\n\n \/\/check if the new running timer is in the same category as this one\n \/\/if it is, we don't need to do anything\n if let Some(c) = self.timer_stack.last() {\n if *c == category {\n return;\n }\n }\n\n \/\/the new timer is different than the previous,\n \/\/so record the elapsed time and start a new timer\n let elapsed = self.stop_timer();\n let new_time = self.data.times.get(category) + elapsed;\n self.data.times.set(category, new_time);\n }\n\n fn stop_timer(&mut self) -> u64 {\n let elapsed = if cfg!(windows) {\n \/\/ On Windows, timers don't always appear to be monotonic (see #51648)\n \/\/ which can lead to panics when calculating elapsed time.\n \/\/ Work around this by testing to see if the current time is less than\n \/\/ our recorded time, and if it is, just returning 0.\n let now = Instant::now();\n if self.current_timer >= now {\n Duration::new(0, 0)\n } else {\n self.current_timer.elapsed()\n }\n } else {\n self.current_timer.elapsed()\n };\n\n self.current_timer = Instant::now();\n\n (elapsed.as_secs() * 1_000_000_000) + (elapsed.subsec_nanos() as u64)\n }\n\n pub fn print_results(&mut self, opts: &Options) {\n self.end_activity(ProfileCategory::Other);\n\n assert!(\n self.timer_stack.is_empty(),\n \"there were timers running when print_results() was called\");\n\n let out = io::stderr();\n let mut lock = out.lock();\n\n let crate_name =\n opts.crate_name\n .as_ref()\n .map(|n| format!(\" for {}\", n))\n .unwrap_or_default();\n\n writeln!(lock, \"Self profiling results{}:\", crate_name).unwrap();\n writeln!(lock).unwrap();\n\n self.data.print(&mut lock);\n\n writeln!(lock).unwrap();\n writeln!(lock, \"Optimization level: {:?}\", opts.optimize).unwrap();\n\n let incremental = if opts.incremental.is_some() { \"on\" } else { \"off\" };\n writeln!(lock, \"Incremental: {}\", incremental).unwrap();\n }\n\n pub fn save_results(&self, opts: &Options) {\n let category_data = self.data.json();\n let compilation_options =\n format!(\"{{ \\\"optimization_level\\\": \\\"{:?}\\\", \\\"incremental\\\": {} }}\",\n opts.optimize,\n if opts.incremental.is_some() { \"true\" } else { \"false\" });\n\n let json = format!(\"{{ \\\"category_data\\\": {}, \\\"compilation_options\\\": {} }}\",\n category_data,\n compilation_options);\n\n fs::write(\"self_profiler_results.json\", json).unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Optimize neighbor temporary variable declarations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added benches\/benches.rs<commit_after>#![feature(test)]\n#[macro_use]\nextern crate adapton ;\n\nuse std::mem::replace;\nuse std::rc::Rc;\n\nuse adapton::adapton_syntax::* ;\nuse adapton::adapton_sigs::* ;\nuse adapton::adapton_state::* ;\n\npub fn fact<'r> (st:&'r mut AdaptonState, x:Rc<u64>, _:() ) -> Rc<u64> {\n if *x == 0 { Rc::new(1) } else {\n let res = fact(st, Rc::new(*x-1), ());\n Rc::new(*x * *res)\n }\n}\n\npub fn run_fact (x:u64) -> u64 {\n let mut st = AdaptonState::new() ;\n let t = st.thunk(ArtIdChoice::Structural,\n prog_pt!(fact),\n Rc::new(Box::new(fact)),\n Rc::new(x), ()) ;\n *(st.force(&t))\n}\n\n#[cfg(test)]\nmod tests {\n extern crate test;\n use super::*;\n use self::test::Bencher;\n \n #[test]\n fn it_works() {\n assert_eq!(120 as u64, run_fact(5));\n }\n \n #[bench]\n fn bench_fact_5(b: &mut Bencher) {\n b.iter(|| run_fact(5));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement SuccDouble for type int<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add `sine` example<commit_after>extern crate rsoundio;\n\nuse std::f32::consts::PI;\nuse std::thread;\nuse std::time::Duration;\n\nfn main() {\n let f = 440f32;\n let cycle_len = (48_000 as f32 \/ f) as usize;\n let phi = 2.0 * f * PI \/ 48_000 as f32;\n let mut pos = 0;\n let samples: Vec<f32> = (pos..cycle_len)\n .map(|i| (phi * i as f32).sin())\n .collect();\n \/\/ create an audio context\n let sio = rsoundio::SoundIo::new();\n sio.set_app_name(\"rsoundio\").unwrap();\n \/\/ connect to the default audio backend\n sio.connect().unwrap();\n println!(\"Connected to backend: {}\", sio.current_backend().unwrap());\n sio.flush_events();\n \/\/ get default output device\n let dev = sio.default_output_device().unwrap();\n assert!(dev.probe_error().is_none());\n println!(\"Using output device: {}\", dev);\n \/\/ create output stream\n let mut out = dev.create_outstream().unwrap();\n assert!(out.set_name(\"rsoundio-example-sine\").is_ok());\n out.set_format(rsoundio::ffi::SioFormat::Float32LE).unwrap();\n println!(\"Output format: {}\", out.format().unwrap());\n\n \/\/ register callbacks\n out.register_write_callback(Box::new(|out: rsoundio::OutStream,\n min_frame_count: i32,\n max_frame_count: i32| {\n let l: Vec<f32> = samples.iter()\n .cycle()\n .take(max_frame_count as usize + pos)\n .skip(pos)\n .map(|s| *s)\n .collect();\n pos = (max_frame_count as usize + pos) % cycle_len;\n let r = l.clone();\n let frames = vec![l, r];\n out.write_stream_f32(min_frame_count, &frames).unwrap();\n }));\n out.register_underflow_callback(Box::new(|out: rsoundio::OutStream| {\n println!(\"Underflow in {} occured!\", out.name().unwrap())\n }));\n out.register_error_callback(Box::new(|out: rsoundio::OutStream,\n err: rsoundio::ffi::SioError| {\n println!(\"{} error: {}\", out.name().unwrap(), err)\n }));\n\n \/\/ open output stream\n out.open().unwrap();\n let sr = out.sample_rate();\n println!(\"Sample rate: {}\", sr);\n\n \/\/out.layout_error().unwrap();\n let layout = out.layout();\n println!(\"Output channel layout: {}\", layout);\n \/\/ start audio output (now the `write_callback` will be called periodically)\n assert!(out.start().is_none());\n thread::sleep(Duration::new(3, 0));\n println!(\"Pause for 1s\");\n out.pause();\n thread::sleep(Duration::new(1, 0));\n println!(\"Unpausing\");\n out.unpause();\n thread::sleep(Duration::new(3, 0));\n out.destroy()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a snowman example<commit_after>extern crate turtle;\n\nuse std::f64::consts::PI;\n\nuse turtle::Turtle;\n\nfn main() {\n let mut turtle = Turtle::new();\n\n turtle.pen_up();\n turtle.set_speed(\"fastest\");\n turtle.backward(250.0);\n turtle.left(90.0);\n turtle.pen_down();\n turtle.set_speed(6);\n\n for &radius in [120.0, 80.0, 60.0].into_iter() {\n circle(&mut turtle, radius);\n\n turtle.set_speed(\"fastest\");\n turtle.pen_up();\n turtle.right(90.0);\n turtle.forward(radius * 2.0);\n turtle.left(90.0);\n turtle.pen_down();\n turtle.set_speed(6);\n }\n\n turtle.hide();\n}\n\nfn circle(turtle: &mut Turtle, radius: f64) {\n let degrees = 180.0;\n\n let circumference = 2.0*PI*radius;\n let step = circumference \/ degrees;\n let rotation = 360.0 \/ degrees;\n\n for _ in 0..degrees as i32 {\n turtle.forward(step);\n turtle.right(rotation);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #12<commit_after>use std;\n\nfn gen_triangles(&trigs: [uint]) {\n alt vec::len(trigs) {\n 0u { trigs = [1u]; }\n x { trigs += [trigs[x - 1u] + x + 1u]; }\n }\n}\n\nfn gen_prime(&primes: [u64]) {\n let num = alt vec::last(primes) {\n none { primes = [2u64]; ret }\n some(2u64) { primes += [3u64]; ret }\n some(x) { x + 2u64 }\n };\n\n while true {\n for p in primes {\n if p * p > num {\n primes += [num];\n ret;\n }\n if num % p == 0u64 {\n break;\n }\n }\n num += 2u64;\n }\n fail;\n}\n\nfn div_mult(&num: u64, f: u64) -> u64 {\n let exp = 0u64;\n while (num % f == 0u64) {\n exp += 1u64;\n num \/= f;\n }\n ret exp;\n}\n\nfn factorize(num: u64, &primes: [u64]) -> [(u64, u64)] {\n let itr = num;\n let result = [];\n\n for p in primes {\n let exp = div_mult(itr, p);\n if exp > 0u64 {\n result += [(p, exp)];\n }\n }\n\n while itr != 1u64 {\n gen_prime(primes);\n let p = vec::last_total(primes);\n let exp = div_mult(itr, p);\n if exp > 0u64 {\n result += [(p, exp)];\n }\n }\n\n ret result;\n}\n\nfn num_factors(num: u64, &primes: [u64]) -> u64 {\n let facts = factorize(num, primes);\n ret vec::foldl(1u, facts) { |prod, tuple|\n let (_base, exp) = tuple;\n prod * (exp + 1u)\n };\n}\n\nfn main() {\n let trigs = [];\n let primes = [];\n while true {\n gen_triangles(trigs);\n let t = vec::last_total(trigs);\n let num = num_factors(t, primes);\n if num > 500u {\n std::io::println(#fmt(\"%u -> %u\", t, num_factors(t, primes)));\n break;\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add ID reporting in imag-grep<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Utilities related to FFI bindings.\n\/\/!\n\/\/! This module provides utilities to handle data across non-Rust\n\/\/! interfaces, like other programming languages and the underlying\n\/\/! operating system. It is mainly of use for FFI (Foreign Function\n\/\/! Interface) bindings and code that needs to exchange C-like strings\n\/\/! with other languages.\n\/\/!\n\/\/! # Overview\n\/\/!\n\/\/! Rust represents owned strings with the [`String`] type, and\n\/\/! borrowed slices of strings with the [`str`] primitive. Both are\n\/\/! always in UTF-8 encoding, and may contain nul bytes in the middle,\n\/\/! i.e. if you look at the bytes that make up the string, there may\n\/\/! be a `\\0` among them. Both `String` and `str` store their length\n\/\/! explicitly; there are no nul terminators at the end of strings\n\/\/! like in C.\n\/\/!\n\/\/! C strings are different from Rust strings:\n\/\/!\n\/\/! * **Encodings** - Rust strings are UTF-8, but C strings may use\n\/\/! other encodings. If you are using a string from C, you should\n\/\/! check its encoding explicitly, rather than just assuming that it\n\/\/! is UTF-8 like you can do in Rust.\n\/\/!\n\/\/! * **Character size** - C strings may use `char` or `wchar_t`-sized\n\/\/! characters; please **note** that C's `char` is different from Rust's.\n\/\/! The C standard leaves the actual sizes of those types open to\n\/\/! interpretation, but defines different APIs for strings made up of\n\/\/! each character type. Rust strings are always UTF-8, so different\n\/\/! Unicode characters will be encoded in a variable number of bytes\n\/\/! each. The Rust type [`char`] represents a '[Unicode scalar\n\/\/! value]', which is similar to, but not the same as, a '[Unicode\n\/\/! code point]'.\n\/\/!\n\/\/! * **Nul terminators and implicit string lengths** - Often, C\n\/\/! strings are nul-terminated, i.e. they have a `\\0` character at the\n\/\/! end. The length of a string buffer is not stored, but has to be\n\/\/! calculated; to compute the length of a string, C code must\n\/\/! manually call a function like `strlen()` for `char`-based strings,\n\/\/! or `wcslen()` for `wchar_t`-based ones. Those functions return\n\/\/! the number of characters in the string excluding the nul\n\/\/! terminator, so the buffer length is really `len+1` characters.\n\/\/! Rust strings don't have a nul terminator; their length is always\n\/\/! stored and does not need to be calculated. While in Rust\n\/\/! accessing a string's length is a O(1) operation (because the\n\/\/! length is stored); in C it is an O(length) operation because the\n\/\/! length needs to be computed by scanning the string for the nul\n\/\/! terminator.\n\/\/!\n\/\/! * **Internal nul characters** - When C strings have a nul\n\/\/! terminator character, this usually means that they cannot have nul\n\/\/! characters in the middle — a nul character would essentially\n\/\/! truncate the string. Rust strings *can* have nul characters in\n\/\/! the middle, because nul does not have to mark the end of the\n\/\/! string in Rust.\n\/\/!\n\/\/! # Representations of non-Rust strings\n\/\/!\n\/\/! [`CString`] and [`CStr`] are useful when you need to transfer\n\/\/! UTF-8 strings to and from languages with a C ABI, like Python.\n\/\/!\n\/\/! * **From Rust to C:** [`CString`] represents an owned, C-friendly\n\/\/! string: it is nul-terminated, and has no internal nul characters.\n\/\/! Rust code can create a `CString` out of a normal string (provided\n\/\/! that the string doesn't have nul characters in the middle), and\n\/\/! then use a variety of methods to obtain a raw `*mut u8` that can\n\/\/! then be passed as an argument to functions which use the C\n\/\/! conventions for strings.\n\/\/!\n\/\/! * **From C to Rust:** [`CStr`] represents a borrowed C string; it\n\/\/! is what you would use to wrap a raw `*const u8` that you got from\n\/\/! a C function. A `CStr` is guaranteed to be a nul-terminated array\n\/\/! of bytes. Once you have a `CStr`, you can convert it to a Rust\n\/\/! `&str` if it's valid UTF-8, or lossily convert it by adding\n\/\/! replacement characters.\n\/\/!\n\/\/! [`OsString`] and [`OsStr`] are useful when you need to transfer\n\/\/! strings to and from the operating system itself, or when capturing\n\/\/! the output of external commands. Conversions between `OsString`,\n\/\/! `OsStr` and Rust strings work similarly to those for [`CString`]\n\/\/! and [`CStr`].\n\/\/!\n\/\/! * [`OsString`] represents an owned string in whatever\n\/\/! representation the operating system prefers. In the Rust standard\n\/\/! library, various APIs that transfer strings to\/from the operating\n\/\/! system use `OsString` instead of plain strings. For example,\n\/\/! [`env::var_os()`] is used to query environment variables; it\n\/\/! returns an `Option<OsString>`. If the environment variable exists\n\/\/! you will get a `Some(os_string)`, which you can *then* try to\n\/\/! convert to a Rust string. This yields a [`Result<>`], so that\n\/\/! your code can detect errors in case the environment variable did\n\/\/! not in fact contain valid Unicode data.\n\/\/!\n\/\/! * [`OsStr`] represents a borrowed reference to a string in a\n\/\/! format that can be passed to the operating system. It can be\n\/\/! converted into an UTF-8 Rust string slice in a similar way to\n\/\/! `OsString`.\n\/\/!\n\/\/! # Conversions\n\/\/!\n\/\/! ## On Unix\n\/\/!\n\/\/! On Unix, [`OsStr`] implements the\n\/\/! `std::os::unix:ffi::`[`OsStrExt`][unix.OsStrExt] trait, which\n\/\/! augments it with two methods, [`from_bytes`] and [`as_bytes`].\n\/\/! These do inexpensive conversions from and to UTF-8 byte slices.\n\/\/!\n\/\/! Additionally, on Unix [`OsString`] implements the\n\/\/! `std::os::unix:ffi::`[`OsStringExt`][unix.OsStringExt] trait,\n\/\/! which provides [`from_vec`] and [`into_vec`] methods that consume\n\/\/! their arguments, and take or produce vectors of [`u8`].\n\/\/!\n\/\/! ## On Windows\n\/\/!\n\/\/! On Windows, [`OsStr`] implements the\n\/\/! `std::os::windows::ffi::`[`OsStrExt`][windows.OsStrExt] trait,\n\/\/! which provides an [`encode_wide`] method. This provides an\n\/\/! iterator that can be [`collect`]ed into a vector of [`u16`].\n\/\/!\n\/\/! Additionally, on Windows [`OsString`] implements the\n\/\/! `std::os::windows:ffi::`[`OsStringExt`][windows.OsStringExt]\n\/\/! trait, which provides a [`from_wide`] method. The result of this\n\/\/! method is an `OsString` which can be round-tripped to a Windows\n\/\/! string losslessly.\n\/\/!\n\/\/! [`String`]: ..\/string\/struct.String.html\n\/\/! [`str`]: ..\/primitive.str.html\n\/\/! [`char`]: ..\/primitive.char.html\n\/\/! [`u8`]: ..\/primitive.u8.html\n\/\/! [`u16`]: ..\/primitive.u16.html\n\/\/! [Unicode scalar value]: http:\/\/www.unicode.org\/glossary\/#unicode_scalar_value\n\/\/! [Unicode code point]: http:\/\/www.unicode.org\/glossary\/#code_point\n\/\/! [`CString`]: struct.CString.html\n\/\/! [`CStr`]: struct.CStr.html\n\/\/! [`OsString`]: struct.OsString.html\n\/\/! [`OsStr`]: struct.OsStr.html\n\/\/! [`env::set_var()`]: ..\/env\/fn.set_var.html\n\/\/! [`env::var_os()`]: ..\/env\/fn.var_os.html\n\/\/! [`Result<>`]: ..\/result\/enum.Result.html\n\/\/! [unix.OsStringExt]: ..\/os\/unix\/ffi\/trait.OsStringExt.html\n\/\/! [`from_vec`]: ..\/os\/unix\/ffi\/trait.OsStringExt.html#tymethod.from_vec\n\/\/! [`into_vec`]: ..\/os\/unix\/ffi\/trait.OsStringExt.html#tymethod.into_vec\n\/\/! [unix.OsStrExt]: ..\/os\/unix\/ffi\/trait.OsStrExt.html\n\/\/! [`from_bytes`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html#tymethod.from_bytes\n\/\/! [`as_bytes`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html#tymethod.as_bytes\n\/\/! [`OsStrExt`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html\n\/\/! [windows.OsStrExt]: ..\/os\/windows\/ffi\/trait.OsStrExt.html\n\/\/! [`encode_wide`]: ..\/os\/windows\/ffi\/trait.OsStrExt.html#tymethod.encode_wide\n\/\/! [`collect`]: ..\/iter\/trait.Iterator.html#method.collect\n\/\/! [windows.OsStringExt]: ..\/os\/windows\/ffi\/trait.OsStringExt.html\n\/\/! [`from_wide`]: ..\/os\/windows\/ffi\/trait.OsStringExt.html#tymethod.from_wide\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::c_str::{CString, CStr, NulError, IntoStringError};\n#[stable(feature = \"cstr_from_bytes\", since = \"1.10.0\")]\npub use self::c_str::{FromBytesWithNulError};\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::os_str::{OsString, OsStr};\n\n#[stable(feature = \"raw_os\", since = \"1.1.0\")]\npub use core::ffi::c_void;\n\nmod c_str;\nmod os_str;\n<commit_msg>Fix a typo in the documentation of std::ffi<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Utilities related to FFI bindings.\n\/\/!\n\/\/! This module provides utilities to handle data across non-Rust\n\/\/! interfaces, like other programming languages and the underlying\n\/\/! operating system. It is mainly of use for FFI (Foreign Function\n\/\/! Interface) bindings and code that needs to exchange C-like strings\n\/\/! with other languages.\n\/\/!\n\/\/! # Overview\n\/\/!\n\/\/! Rust represents owned strings with the [`String`] type, and\n\/\/! borrowed slices of strings with the [`str`] primitive. Both are\n\/\/! always in UTF-8 encoding, and may contain nul bytes in the middle,\n\/\/! i.e. if you look at the bytes that make up the string, there may\n\/\/! be a `\\0` among them. Both `String` and `str` store their length\n\/\/! explicitly; there are no nul terminators at the end of strings\n\/\/! like in C.\n\/\/!\n\/\/! C strings are different from Rust strings:\n\/\/!\n\/\/! * **Encodings** - Rust strings are UTF-8, but C strings may use\n\/\/! other encodings. If you are using a string from C, you should\n\/\/! check its encoding explicitly, rather than just assuming that it\n\/\/! is UTF-8 like you can do in Rust.\n\/\/!\n\/\/! * **Character size** - C strings may use `char` or `wchar_t`-sized\n\/\/! characters; please **note** that C's `char` is different from Rust's.\n\/\/! The C standard leaves the actual sizes of those types open to\n\/\/! interpretation, but defines different APIs for strings made up of\n\/\/! each character type. Rust strings are always UTF-8, so different\n\/\/! Unicode characters will be encoded in a variable number of bytes\n\/\/! each. The Rust type [`char`] represents a '[Unicode scalar\n\/\/! value]', which is similar to, but not the same as, a '[Unicode\n\/\/! code point]'.\n\/\/!\n\/\/! * **Nul terminators and implicit string lengths** - Often, C\n\/\/! strings are nul-terminated, i.e. they have a `\\0` character at the\n\/\/! end. The length of a string buffer is not stored, but has to be\n\/\/! calculated; to compute the length of a string, C code must\n\/\/! manually call a function like `strlen()` for `char`-based strings,\n\/\/! or `wcslen()` for `wchar_t`-based ones. Those functions return\n\/\/! the number of characters in the string excluding the nul\n\/\/! terminator, so the buffer length is really `len+1` characters.\n\/\/! Rust strings don't have a nul terminator; their length is always\n\/\/! stored and does not need to be calculated. While in Rust\n\/\/! accessing a string's length is a O(1) operation (because the\n\/\/! length is stored); in C it is an O(length) operation because the\n\/\/! length needs to be computed by scanning the string for the nul\n\/\/! terminator.\n\/\/!\n\/\/! * **Internal nul characters** - When C strings have a nul\n\/\/! terminator character, this usually means that they cannot have nul\n\/\/! characters in the middle — a nul character would essentially\n\/\/! truncate the string. Rust strings *can* have nul characters in\n\/\/! the middle, because nul does not have to mark the end of the\n\/\/! string in Rust.\n\/\/!\n\/\/! # Representations of non-Rust strings\n\/\/!\n\/\/! [`CString`] and [`CStr`] are useful when you need to transfer\n\/\/! UTF-8 strings to and from languages with a C ABI, like Python.\n\/\/!\n\/\/! * **From Rust to C:** [`CString`] represents an owned, C-friendly\n\/\/! string: it is nul-terminated, and has no internal nul characters.\n\/\/! Rust code can create a `CString` out of a normal string (provided\n\/\/! that the string doesn't have nul characters in the middle), and\n\/\/! then use a variety of methods to obtain a raw `*mut u8` that can\n\/\/! then be passed as an argument to functions which use the C\n\/\/! conventions for strings.\n\/\/!\n\/\/! * **From C to Rust:** [`CStr`] represents a borrowed C string; it\n\/\/! is what you would use to wrap a raw `*const u8` that you got from\n\/\/! a C function. A `CStr` is guaranteed to be a nul-terminated array\n\/\/! of bytes. Once you have a `CStr`, you can convert it to a Rust\n\/\/! `&str` if it's valid UTF-8, or lossily convert it by adding\n\/\/! replacement characters.\n\/\/!\n\/\/! [`OsString`] and [`OsStr`] are useful when you need to transfer\n\/\/! strings to and from the operating system itself, or when capturing\n\/\/! the output of external commands. Conversions between `OsString`,\n\/\/! `OsStr` and Rust strings work similarly to those for [`CString`]\n\/\/! and [`CStr`].\n\/\/!\n\/\/! * [`OsString`] represents an owned string in whatever\n\/\/! representation the operating system prefers. In the Rust standard\n\/\/! library, various APIs that transfer strings to\/from the operating\n\/\/! system use `OsString` instead of plain strings. For example,\n\/\/! [`env::var_os()`] is used to query environment variables; it\n\/\/! returns an `Option<OsString>`. If the environment variable exists\n\/\/! you will get a `Some(os_string)`, which you can *then* try to\n\/\/! convert to a Rust string. This yields a [`Result<>`], so that\n\/\/! your code can detect errors in case the environment variable did\n\/\/! not in fact contain valid Unicode data.\n\/\/!\n\/\/! * [`OsStr`] represents a borrowed reference to a string in a\n\/\/! format that can be passed to the operating system. It can be\n\/\/! converted into an UTF-8 Rust string slice in a similar way to\n\/\/! `OsString`.\n\/\/!\n\/\/! # Conversions\n\/\/!\n\/\/! ## On Unix\n\/\/!\n\/\/! On Unix, [`OsStr`] implements the\n\/\/! `std::os::unix::ffi::`[`OsStrExt`][unix.OsStrExt] trait, which\n\/\/! augments it with two methods, [`from_bytes`] and [`as_bytes`].\n\/\/! These do inexpensive conversions from and to UTF-8 byte slices.\n\/\/!\n\/\/! Additionally, on Unix [`OsString`] implements the\n\/\/! `std::os::unix::ffi::`[`OsStringExt`][unix.OsStringExt] trait,\n\/\/! which provides [`from_vec`] and [`into_vec`] methods that consume\n\/\/! their arguments, and take or produce vectors of [`u8`].\n\/\/!\n\/\/! ## On Windows\n\/\/!\n\/\/! On Windows, [`OsStr`] implements the\n\/\/! `std::os::windows::ffi::`[`OsStrExt`][windows.OsStrExt] trait,\n\/\/! which provides an [`encode_wide`] method. This provides an\n\/\/! iterator that can be [`collect`]ed into a vector of [`u16`].\n\/\/!\n\/\/! Additionally, on Windows [`OsString`] implements the\n\/\/! `std::os::windows:ffi::`[`OsStringExt`][windows.OsStringExt]\n\/\/! trait, which provides a [`from_wide`] method. The result of this\n\/\/! method is an `OsString` which can be round-tripped to a Windows\n\/\/! string losslessly.\n\/\/!\n\/\/! [`String`]: ..\/string\/struct.String.html\n\/\/! [`str`]: ..\/primitive.str.html\n\/\/! [`char`]: ..\/primitive.char.html\n\/\/! [`u8`]: ..\/primitive.u8.html\n\/\/! [`u16`]: ..\/primitive.u16.html\n\/\/! [Unicode scalar value]: http:\/\/www.unicode.org\/glossary\/#unicode_scalar_value\n\/\/! [Unicode code point]: http:\/\/www.unicode.org\/glossary\/#code_point\n\/\/! [`CString`]: struct.CString.html\n\/\/! [`CStr`]: struct.CStr.html\n\/\/! [`OsString`]: struct.OsString.html\n\/\/! [`OsStr`]: struct.OsStr.html\n\/\/! [`env::set_var()`]: ..\/env\/fn.set_var.html\n\/\/! [`env::var_os()`]: ..\/env\/fn.var_os.html\n\/\/! [`Result<>`]: ..\/result\/enum.Result.html\n\/\/! [unix.OsStringExt]: ..\/os\/unix\/ffi\/trait.OsStringExt.html\n\/\/! [`from_vec`]: ..\/os\/unix\/ffi\/trait.OsStringExt.html#tymethod.from_vec\n\/\/! [`into_vec`]: ..\/os\/unix\/ffi\/trait.OsStringExt.html#tymethod.into_vec\n\/\/! [unix.OsStrExt]: ..\/os\/unix\/ffi\/trait.OsStrExt.html\n\/\/! [`from_bytes`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html#tymethod.from_bytes\n\/\/! [`as_bytes`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html#tymethod.as_bytes\n\/\/! [`OsStrExt`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html\n\/\/! [windows.OsStrExt]: ..\/os\/windows\/ffi\/trait.OsStrExt.html\n\/\/! [`encode_wide`]: ..\/os\/windows\/ffi\/trait.OsStrExt.html#tymethod.encode_wide\n\/\/! [`collect`]: ..\/iter\/trait.Iterator.html#method.collect\n\/\/! [windows.OsStringExt]: ..\/os\/windows\/ffi\/trait.OsStringExt.html\n\/\/! [`from_wide`]: ..\/os\/windows\/ffi\/trait.OsStringExt.html#tymethod.from_wide\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::c_str::{CString, CStr, NulError, IntoStringError};\n#[stable(feature = \"cstr_from_bytes\", since = \"1.10.0\")]\npub use self::c_str::{FromBytesWithNulError};\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::os_str::{OsString, OsStr};\n\n#[stable(feature = \"raw_os\", since = \"1.1.0\")]\npub use core::ffi::c_void;\n\nmod c_str;\nmod os_str;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Used AttemptResult for everything now<commit_after><|endoftext|>"} {"text":"<commit_before>#[crate_type = \"rlib\"];\n#[crate_id=\"github.com\/xsleonard\/sdl2_image-rs#sdl2_image:0.1\"];\n#[desc = \"SDL2_image bindings and wrappers\"];\n#[comment = \"SDL2_image bindings and wrappers\"];\n#[license = \"MIT\"];\n\nextern mod sdl2;\n\nuse std::libc::{c_int, c_char};\nuse std::ptr;\nuse std::cast;\nuse std::io;\nuse sdl2::surface::Surface;\nuse sdl2::render::Texture;\nuse sdl2::render::Renderer;\nuse sdl2::get_error;\n\n\/\/ Setup linking for all targets.\n#[cfg(target_os=\"macos\")]\nmod mac {\n #[cfg(mac_framework)]\n #[link(kind=\"framework\", name=\"SDL2_image\")]\n extern {}\n\n #[cfg(not(mac_framework))]\n #[link(name=\"SDL2_image\")]\n extern {}\n}\n\n#[cfg(target_os=\"win32\")]\n#[cfg(target_os=\"linux\")]\n#[cfg(target_os=\"freebsd\")]\nmod others {\n #[link(name=\"SDL2_image\")]\n extern {}\n}\n\nmod ffi;\n\n#[deriving(Clone, Eq, IterBytes, ToStr)]\npub enum InitFlag {\n InitJpg = ffi::IMG_INIT_JPG as int,\n InitPng = ffi::IMG_INIT_PNG as int,\n InitTif = ffi::IMG_INIT_TIF as int,\n InitWebp = ffi::IMG_INIT_WEBP as int,\n}\n\n#[deriving(Eq, Clone)]\npub struct ImageVersion {\n major: int,\n minor: int,\n patch: int,\n}\n\nimpl ToStr for ImageVersion {\n fn to_str(&self) -> ~str {\n format!(\"{}.{}.{}\", self.major, self.minor, self.patch)\n }\n}\n\nimpl ImageVersion {\n fn from_sdl_version(sv: *ffi::SDL_version) -> ImageVersion {\n unsafe {\n let v = *sv;\n ImageVersion{ major: v.major, minor: v.minor, patch: v.patch }\n }\n }\n}\n\npub trait ImageLoader {\n fn from_file(filename: &str) -> Result<~Surface, ~str>;\n fn from_xpm_array(xpm: **i8) -> Result<~Surface, ~str>;\n}\n\npub trait ImageSaver {\n fn save(&self, filename: &str) -> Result<(), ~str>;\n}\n\nimpl ImageLoader for Surface {\n fn from_file(filename: &str) -> Result<~Surface, ~str> {\n unsafe {\n let raw = ffi::IMG_Load(filename.to_c_str().unwrap());\n if raw == ptr::null() {\n Err(get_error())\n } else {\n Ok(~Surface { raw: raw, owned: true })\n }\n }\n }\n\n fn from_xpm_array(xpm: **i8) -> Result<~Surface, ~str> {\n unsafe {\n let raw = ffi::IMG_ReadXPMFromArray(xpm as **c_char);\n if raw == ptr::null() {\n Err(get_error())\n } else {\n Ok(~Surface { raw: raw, owned: true })\n }\n }\n }\n}\n\nimpl ImageSaver for Surface {\n fn save(&self, filename: &str) -> Result<(), ~str> {\n unsafe {\n let status = ffi::IMG_SavePNG(self.raw,\n filename.to_c_str().unwrap());\n if status != 0 {\n Err(get_error())\n } else {\n Ok(())\n }\n }\n }\n}\n\npub trait TextureLoader {\n fn load_texture_from_file(&self, filename: &str) -> Result<~Texture, ~str>;\n}\n\nimpl TextureLoader for Renderer {\n fn load_texture_from_file(&self,\n filename: &str) -> Result<~Texture, ~str> {\n unsafe {\n let raw = ffi::IMG_LoadTexture(self.raw,\n filename.to_c_str().unwrap());\n if raw == ptr::null() {\n Err(get_error())\n } else {\n Ok(~Texture{ raw: raw, owned: true })\n }\n }\n }\n}\n\npub fn init(flags: &[InitFlag]) -> ~[InitFlag] {\n \/\/! Initializes SDL2_image with InitFlags and returns which\n \/\/! InitFlags were actually used.\n let mut used = ~[];\n unsafe {\n let used_flags = ffi::IMG_Init(\n flags.iter().fold(0, |flags, &flag| {\n flags | flag as ffi::IMG_InitFlags\n })\n );\n for flag in flags.iter() {\n if used_flags & *flag as c_int != 0 {\n used.push(*flag)\n }\n }\n }\n used\n}\n\npub fn quit() {\n \/\/! Teardown the SDL2_Image subsystem\n unsafe { ffi::IMG_Quit(); }\n}\n\npub fn get_linked_version() -> ImageVersion {\n \/\/! Returns the version of the dynamically linked SDL_image library\n unsafe {\n ImageVersion::from_sdl_version(ffi::IMG_Linked_Version())\n }\n}\n\n\/\/ TODO -- this should be in rust-sdl2\n\/\/ Most of the sdl2_image API relies on SDL_RWops.\n\n\/\/ #[deriving(Eq)]\n\/\/ pub struct RWops {\n\/\/ raw: *SDL_RWops;\n\/\/ owned: bool;\n\/\/ }\n\n\/\/ impl Drop for RWops {\n\/\/ fn drop(&mut self) {\n\/\/ if self.owned {\n\/\/ unsafe {\n\/\/ \/\/ TODO -- close() returns a c_int error status.\n\/\/ \/\/ How do we deal with errors in the destructor?\n\/\/ \/\/ Probably either kill the task, or don't implement this\n\/\/ \/\/ as a destructor\n\/\/ self.raw.close()\n\/\/ }\n\/\/ }\n\/\/ }\n\/\/ }\n<commit_msg>use Self in trait ImageLoader's static methods to type hint the compiler<commit_after>#[crate_type = \"rlib\"];\n#[crate_id=\"github.com\/xsleonard\/sdl2_image-rs#sdl2_image:0.1\"];\n#[desc = \"SDL2_image bindings and wrappers\"];\n#[comment = \"SDL2_image bindings and wrappers\"];\n#[license = \"MIT\"];\n\nextern mod sdl2;\n\nuse std::libc::{c_int, c_char};\nuse std::ptr;\nuse std::cast;\nuse std::io;\nuse sdl2::surface::Surface;\nuse sdl2::render::Texture;\nuse sdl2::render::Renderer;\nuse sdl2::get_error;\n\n\/\/ Setup linking for all targets.\n#[cfg(target_os=\"macos\")]\nmod mac {\n #[cfg(mac_framework)]\n #[link(kind=\"framework\", name=\"SDL2_image\")]\n extern {}\n\n #[cfg(not(mac_framework))]\n #[link(name=\"SDL2_image\")]\n extern {}\n}\n\n#[cfg(target_os=\"win32\")]\n#[cfg(target_os=\"linux\")]\n#[cfg(target_os=\"freebsd\")]\nmod others {\n #[link(name=\"SDL2_image\")]\n extern {}\n}\n\nmod ffi;\n\n#[deriving(Clone, Eq, IterBytes, ToStr)]\npub enum InitFlag {\n InitJpg = ffi::IMG_INIT_JPG as int,\n InitPng = ffi::IMG_INIT_PNG as int,\n InitTif = ffi::IMG_INIT_TIF as int,\n InitWebp = ffi::IMG_INIT_WEBP as int,\n}\n\n#[deriving(Eq, Clone)]\npub struct ImageVersion {\n major: int,\n minor: int,\n patch: int,\n}\n\nimpl ToStr for ImageVersion {\n fn to_str(&self) -> ~str {\n format!(\"{}.{}.{}\", self.major, self.minor, self.patch)\n }\n}\n\nimpl ImageVersion {\n fn from_sdl_version(sv: *ffi::SDL_version) -> ImageVersion {\n unsafe {\n let v = *sv;\n ImageVersion{ major: v.major, minor: v.minor, patch: v.patch }\n }\n }\n}\n\npub trait ImageLoader {\n \/\/ Self is only returned here to type hint to the compiler.\n \/\/ The syntax for type hinting in this case is not yet defined.\n \/\/ The intended return value is Result<~Surface, ~str>.\n fn from_file(filename: &str) -> Result<~Self, ~str>;\n fn from_xpm_array(xpm: **i8) -> Result<~Self, ~str>;\n}\n\npub trait ImageSaver {\n fn save(&self, filename: &str) -> Result<(), ~str>;\n}\n\nimpl ImageLoader for Surface {\n fn from_file(filename: &str) -> Result<~Surface, ~str> {\n unsafe {\n let raw = ffi::IMG_Load(filename.to_c_str().unwrap());\n if raw == ptr::null() {\n Err(get_error())\n } else {\n Ok(~Surface { raw: raw, owned: true })\n }\n }\n }\n\n fn from_xpm_array(xpm: **i8) -> Result<~Surface, ~str> {\n unsafe {\n let raw = ffi::IMG_ReadXPMFromArray(xpm as **c_char);\n if raw == ptr::null() {\n Err(get_error())\n } else {\n Ok(~Surface { raw: raw, owned: true })\n }\n }\n }\n}\n\nimpl ImageSaver for Surface {\n fn save(&self, filename: &str) -> Result<(), ~str> {\n unsafe {\n let status = ffi::IMG_SavePNG(self.raw,\n filename.to_c_str().unwrap());\n if status != 0 {\n Err(get_error())\n } else {\n Ok(())\n }\n }\n }\n}\n\npub trait TextureLoader {\n fn load_texture_from_file(&self, filename: &str) -> Result<~Texture, ~str>;\n}\n\nimpl TextureLoader for Renderer {\n fn load_texture_from_file(&self,\n filename: &str) -> Result<~Texture, ~str> {\n unsafe {\n let raw = ffi::IMG_LoadTexture(self.raw,\n filename.to_c_str().unwrap());\n if raw == ptr::null() {\n Err(get_error())\n } else {\n Ok(~Texture{ raw: raw, owned: true })\n }\n }\n }\n}\n\npub fn init(flags: &[InitFlag]) -> ~[InitFlag] {\n \/\/! Initializes SDL2_image with InitFlags and returns which\n \/\/! InitFlags were actually used.\n let mut used = ~[];\n unsafe {\n let used_flags = ffi::IMG_Init(\n flags.iter().fold(0, |flags, &flag| {\n flags | flag as ffi::IMG_InitFlags\n })\n );\n for flag in flags.iter() {\n if used_flags & *flag as c_int != 0 {\n used.push(*flag)\n }\n }\n }\n used\n}\n\npub fn quit() {\n \/\/! Teardown the SDL2_Image subsystem\n unsafe { ffi::IMG_Quit(); }\n}\n\npub fn get_linked_version() -> ImageVersion {\n \/\/! Returns the version of the dynamically linked SDL_image library\n unsafe {\n ImageVersion::from_sdl_version(ffi::IMG_Linked_Version())\n }\n}\n\n\/\/ TODO -- this should be in rust-sdl2\n\/\/ Most of the sdl2_image API relies on SDL_RWops.\n\n\/\/ #[deriving(Eq)]\n\/\/ pub struct RWops {\n\/\/ raw: *SDL_RWops;\n\/\/ owned: bool;\n\/\/ }\n\n\/\/ impl Drop for RWops {\n\/\/ fn drop(&mut self) {\n\/\/ if self.owned {\n\/\/ unsafe {\n\/\/ \/\/ TODO -- close() returns a c_int error status.\n\/\/ \/\/ How do we deal with errors in the destructor?\n\/\/ \/\/ Probably either kill the task, or don't implement this\n\/\/ \/\/ as a destructor\n\/\/ self.raw.close()\n\/\/ }\n\/\/ }\n\/\/ }\n\/\/ }\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 Johannes Köster.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Handling log-probabilities.\n\nuse std::mem;\nuse std::f64;\n\npub use stats::{Prob, LogProb};\n\n\n\/\/\/ A factor to convert log-probabilities to PHRED-scale (phred = p * LOG_TO_PHRED_FACTOR).\nconst LOG_TO_PHRED_FACTOR: f64 = -4.3429448190325175; \/\/ -10 * 1 \/ ln(10)\n\n\n\/\/\/ A factor to convert PHRED-scale to log-probabilities (p = phred * PHRED_TO_LOG_FACTOR).\nconst PHRED_TO_LOG_FACTOR: f64 = -0.23025850929940456; \/\/ 1 \/ (-10 * log10(e))\n\n\n\/\/\/ Calculate log(1 - p) with p given in log space without loss of precision as described in\n\/\/\/ http:\/\/cran.r-project.org\/web\/packages\/Rmpfr\/vignettes\/log1mexp-note.pdf.\npub fn ln_1m_exp(p: LogProb) -> LogProb {\n if p < -0.693 {\n (-p.exp()).ln_1p()\n }\n else {\n (-p.exp_m1()).ln()\n }\n}\n\n\n\/\/\/ Convert log scale probability to PHRED scale.\npub fn log_to_phred(p: LogProb) -> f64 {\n p * LOG_TO_PHRED_FACTOR\n}\n\n\n\/\/\/ Convert PHRED scale probability to log scale.\npub fn phred_to_log(p: f64) -> LogProb {\n p * PHRED_TO_LOG_FACTOR\n}\n\n\n\/\/\/ Calculate the sum of the given probabilities in a numerically stable way (Durbin 1998).\npub fn sum(probs: &[LogProb]) -> LogProb {\n if probs.is_empty() {\n f64::NEG_INFINITY\n }\n else {\n let mut pmax = probs[0];\n let mut imax = 0;\n for (i, &p) in probs.iter().enumerate().skip(1) {\n if p > pmax {\n pmax = p;\n imax = i;\n }\n }\n if pmax == f64::NEG_INFINITY {\n f64::NEG_INFINITY\n }\n else if pmax == f64::INFINITY {\n f64::INFINITY\n }\n else {\n \/\/ TODO use sum() once it has been stabilized: .sum::<usize>()\n pmax + (probs.iter().enumerate().filter_map(|(i, p)| if i != imax { Some((p - pmax).exp()) } else { None }).fold(0.0, |s, e| s + e)).ln_1p()\n }\n }\n}\n\n\n\/\/\/ Calculate the sum of the given probabilities in a numerically stable way (Durbin 1998).\npub fn add(mut p0: LogProb, mut p1: LogProb) -> LogProb {\n if p1 > p0 {\n mem::swap(&mut p0, &mut p1);\n }\n if p0 == f64::NEG_INFINITY {\n f64::NEG_INFINITY\n }\n else if p0 == f64::INFINITY {\n f64::INFINITY\n }\n else {\n p0 + (p1 - p0).exp().ln_1p()\n }\n}\n\n\n\/\/\/ Calculate the cumulative sum of the given probabilities in a numerically stable way (Durbin 1998).\npub fn cumsum<'a, I: Iterator<Item=&'a LogProb>>(probs: I) -> Vec<LogProb> {\n probs.scan(f64::NEG_INFINITY, |s, p| {\n *s = add(*s, *p);\n Some(*s)\n }).collect()\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::f64;\n\n #[test]\n fn test_sum() {\n let probs = [f64::NEG_INFINITY, 0.0, f64::NEG_INFINITY];\n assert_eq!(sum(&probs), 0.0);\n }\n\n #[test]\n fn test_empty_sum() {\n assert_eq!(sum(&[]), f64::NEG_INFINITY);\n }\n}\n<commit_msg>Added test for cumsum and take probs by value.<commit_after>\/\/ Copyright 2014 Johannes Köster.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Handling log-probabilities.\n\nuse std::mem;\nuse std::f64;\n\npub use stats::{Prob, LogProb};\n\n\n\/\/\/ A factor to convert log-probabilities to PHRED-scale (phred = p * LOG_TO_PHRED_FACTOR).\nconst LOG_TO_PHRED_FACTOR: f64 = -4.3429448190325175; \/\/ -10 * 1 \/ ln(10)\n\n\n\/\/\/ A factor to convert PHRED-scale to log-probabilities (p = phred * PHRED_TO_LOG_FACTOR).\nconst PHRED_TO_LOG_FACTOR: f64 = -0.23025850929940456; \/\/ 1 \/ (-10 * log10(e))\n\n\n\/\/\/ Calculate log(1 - p) with p given in log space without loss of precision as described in\n\/\/\/ http:\/\/cran.r-project.org\/web\/packages\/Rmpfr\/vignettes\/log1mexp-note.pdf.\npub fn ln_1m_exp(p: LogProb) -> LogProb {\n if p < -0.693 {\n (-p.exp()).ln_1p()\n }\n else {\n (-p.exp_m1()).ln()\n }\n}\n\n\n\/\/\/ Convert log scale probability to PHRED scale.\npub fn log_to_phred(p: LogProb) -> f64 {\n p * LOG_TO_PHRED_FACTOR\n}\n\n\n\/\/\/ Convert PHRED scale probability to log scale.\npub fn phred_to_log(p: f64) -> LogProb {\n p * PHRED_TO_LOG_FACTOR\n}\n\n\n\/\/\/ Calculate the sum of the given probabilities in a numerically stable way (Durbin 1998).\npub fn sum(probs: &[LogProb]) -> LogProb {\n if probs.is_empty() {\n f64::NEG_INFINITY\n }\n else {\n let mut pmax = probs[0];\n let mut imax = 0;\n for (i, &p) in probs.iter().enumerate().skip(1) {\n if p > pmax {\n pmax = p;\n imax = i;\n }\n }\n if pmax == f64::NEG_INFINITY {\n f64::NEG_INFINITY\n }\n else if pmax == f64::INFINITY {\n f64::INFINITY\n }\n else {\n \/\/ TODO use sum() once it has been stabilized: .sum::<usize>()\n pmax + (probs.iter().enumerate().filter_map(|(i, p)| if i != imax { Some((p - pmax).exp()) } else { None }).fold(0.0, |s, e| s + e)).ln_1p()\n }\n }\n}\n\n\n\/\/\/ Calculate the sum of the given probabilities in a numerically stable way (Durbin 1998).\npub fn add(mut p0: LogProb, mut p1: LogProb) -> LogProb {\n if p1 > p0 {\n mem::swap(&mut p0, &mut p1);\n }\n if p0 == f64::NEG_INFINITY {\n f64::NEG_INFINITY\n }\n else if p0 == f64::INFINITY {\n f64::INFINITY\n }\n else {\n p0 + (p1 - p0).exp().ln_1p()\n }\n}\n\n\n\/\/\/ Calculate the cumulative sum of the given probabilities in a numerically stable way (Durbin 1998).\npub fn cumsum<'a, I: Iterator<Item=LogProb>>(probs: I) -> Vec<LogProb> {\n probs.scan(f64::NEG_INFINITY, |s, p| {\n *s = add(*s, p);\n Some(*s)\n }).collect()\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::f64;\n\n #[test]\n fn test_sum() {\n let probs = [f64::NEG_INFINITY, 0.0, f64::NEG_INFINITY];\n assert_eq!(sum(&probs), 0.0);\n }\n\n #[test]\n fn test_empty_sum() {\n assert_eq!(sum(&[]), f64::NEG_INFINITY);\n }\n\n #[test]\n fn test_cumsum() {\n let probs = vec![0.0f64.ln(), 0.01f64.ln(), 0.001f64.ln()];\n assert_eq!(cumsum(probs.into_iter()), [0.0f64.ln(), 0.01f64.ln(), 0.011f64.ln()]);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use a parametric lifetime in 'Responder' example.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>example of interpret with multiple aguments and fn name<commit_after>extern crate parity_wasm;\n\nuse std::env::args;\n\nuse parity_wasm::{interpreter, ModuleInstanceInterface, RuntimeValue};\n\nfn main() {\n let args: Vec<_> = args().collect();\n if args.len() < 3 {\n println!(\"Usage: {} <wasm file> <exported func> [<arg>...]\", args[0]);\n return;\n }\n let func_name = &args[2];\n let (_, program_args) = args.split_at(3);\n let program_args: Vec<_> = program_args.iter().enumerate()\n .map(|(i, arg)| RuntimeValue::I32(arg.parse().expect(&format!(\"Invalid i32 arg at index {}\", i))))\n .collect();\n\n let program = parity_wasm::ProgramInstance::with_env_params(\n interpreter::EnvParams {\n total_stack: 128*1024,\n total_memory: 2*1024*1024,\n allow_memory_growth: false,\n }\n ).expect(\"Failed to load program\");\n let module = parity_wasm::deserialize_file(&args[1]).expect(\"Failed to load module\");\n let module = program.add_module(\"main\", module, None).expect(\"Failed to initialize module\");\n\n println!(\"Result: {:?}\", module.execute_export(func_name, program_args.into()));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat: add simple example<commit_after>extern crate futures;\nextern crate telegram_bot;\nextern crate tokio_core;\n\nuse std::env;\n\nuse futures::Stream;\nuse tokio_core::reactor::Core;\nuse telegram_bot::*;\n\nfn main() {\n let mut core = Core::new().unwrap();\n\n let token = env::var(\"TELEGRAM_BOT_TOKEN\").unwrap();\n let api = Api::from_token(&core.handle(), &token).unwrap();\n\n \/\/ Fetch new updates via long poll method\n let future = api.stream().for_each(|update| {\n\n \/\/ If the received update contains a new message...\n if let UpdateKind::Message(message) = update.kind {\n\n \/\/ Get sender's first name if available.\n let first_name = match message.from.as_ref() {\n Some(from) => &from.first_name,\n None => return Ok(()) \/\/ Skip a message.\n };\n\n if let MessageKind::Text {ref data, ..} = message.kind {\n \/\/ Print received text message to stdout.\n println!(\"<{}>: {}\", first_name, data);\n\n \/\/ Answer message with \"Hi\".\n api.spawn(&message.text_reply(\n format!(\"Hi, {}! You just wrote '{}'\", first_name, data)\n ));\n }\n }\n\n Ok(())\n });\n\n core.run(future).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make derive block cipher key take fixed size input<commit_after><|endoftext|>"} {"text":"<commit_before>export OSMain;\nexport Msg, BeginDrawing, Draw, AddKeyHandler, Exit;\n\nimport azure::*;\nimport azure::bindgen::*;\nimport azure::cairo;\nimport azure::cairo::bindgen::*;\nimport comm::*;\nimport dvec::{dvec, extensions};\nimport azure::cairo::cairo_surface_t;\nimport gfx::renderer::{Sink};\nimport dom::event::{Event, ResizeEvent};\nimport layers::ImageLayer;\nimport geom::size::Size2D;\nimport std::cmp::fuzzy_eq;\nimport vec::push;\n\ntype OSMain = chan<Msg>;\n\nenum Msg {\n BeginDrawing(chan<AzDrawTargetRef>),\n Draw(chan<AzDrawTargetRef>, AzDrawTargetRef),\n AddKeyHandler(chan<()>),\n AddEventListener(chan<Event>),\n Exit\n}\n\nfn OSMain() -> OSMain {\n do on_osmain::<Msg> |po| {\n do platform::runmain {\n #debug(\"preparing to enter main loop\");\n\t mainloop(po);\n }\n }\n}\n\nfn mainloop(po: port<Msg>) {\n let key_handlers: @dvec<chan<()>> = @dvec();\n let event_listeners: @dvec<chan<Event>> = @dvec();\n\n glut::init();\n glut::init_display_mode(glut::DOUBLE);\n\n let surfaces = @surface_set();\n\n let window = glut::create_window(\"Servo\");\n glut::reshape_window(window, 800, 600);\n\n let context = layers::rendergl::init_render_context();\n\n let image = @layers::layers::Image(0, 0, layers::layers::RGB24Format, ~[]);\n let image_layer = @layers::layers::ImageLayer(image);\n image_layer.common.set_transform\n (image_layer.common.transform.scale(800.0f32, 600.0f32, 1.0f32));\n\n let scene = @mut layers::scene::Scene(layers::layers::ImageLayerKind(image_layer),\n Size2D(800.0f32, 600.0f32));\n\n let done = @mut false;\n\n let check_for_messages = fn@() {\n \/\/ Handle messages\n #debug(\"osmain: peeking\");\n if po.peek() {\n alt po.recv() {\n AddKeyHandler(key_ch) {\n key_handlers.push(key_ch);\n }\n AddEventListener(event_listener) {\n event_listeners.push(event_listener);\n }\n BeginDrawing(sender) {\n lend_surface(*surfaces, sender);\n }\n Draw(sender, dt) {\n #debug(\"osmain: received new frame\");\n return_surface(*surfaces, dt);\n lend_surface(*surfaces, sender);\n\n let mut image_data;\n unsafe {\n let buffer = cairo_image_surface_get_data(surfaces.s1.surf.cairo_surf);\n image_data = vec::unsafe::from_buf(buffer, 800 * 600 * 4);\n }\n\n let image =\n @layers::layers::Image(800, 600, layers::layers::ARGB32Format, image_data);\n image_layer.set_image(image);\n }\n exit {\n *done = true;\n }\n }\n }\n };\n\n do glut::reshape_func(window) |width, height| {\n check_for_messages();\n\n #debug(\"osmain: window resized to %d,%d\", width as int, height as int);\n for event_listeners.each |event_listener| {\n event_listener.send(ResizeEvent(width as int, height as int));\n }\n }\n\n do glut::display_func() {\n check_for_messages();\n\n #debug(\"osmain: drawing to screen\");\n\n layers::rendergl::render_scene(context, *scene);\n glut::swap_buffers();\n glut::post_redisplay();\n }\n\n while !*done {\n #debug(\"osmain: running GLUT check loop\");\n glut::check_loop();\n }\n\n destroy_surface(surfaces.s1.surf);\n destroy_surface(surfaces.s2.surf);\n}\n\n#[doc = \"\nImplementation to allow the osmain channel to be used as a graphics\nsink for the renderer\n\"]\nimpl OSMain of Sink for OSMain {\n fn begin_drawing(next_dt: chan<AzDrawTargetRef>) {\n self.send(BeginDrawing(next_dt))\n }\n fn draw(next_dt: chan<AzDrawTargetRef>, draw_me: AzDrawTargetRef) {\n self.send(Draw(next_dt, draw_me))\n }\n fn add_event_listener(listener: chan<Event>) {\n self.send(AddEventListener(listener));\n }\n}\n\ntype surface_set = {\n mut s1: {\n surf: surface,\n have: bool\n },\n mut s2: {\n surf: surface,\n have: bool\n }\n};\n\nfn lend_surface(surfaces: surface_set, recvr: chan<AzDrawTargetRef>) {\n \/\/ We are in a position to lend out the surface?\n assert surfaces.s1.have;\n \/\/ Ok then take it\n let dt1 = surfaces.s1.surf.az_target;\n #debug(\"osmain: lending surface %?\", dt1);\n recvr.send(dt1);\n \/\/ Now we don't have it\n surfaces.s1 = {\n have: false\n with surfaces.s1\n };\n \/\/ But we (hopefully) have another!\n surfaces.s1 <-> surfaces.s2;\n \/\/ Let's look\n assert surfaces.s1.have;\n}\n\nfn return_surface(surfaces: surface_set, dt: AzDrawTargetRef) {\n #debug(\"osmain: returning surface %?\", dt);\n \/\/ We have room for a return\n assert surfaces.s1.have;\n assert !surfaces.s2.have;\n assert surfaces.s2.surf.az_target == dt;\n \/\/ Now we have it again\n surfaces.s2 = {\n have: true\n with surfaces.s2\n };\n}\n\nfn surface_set() -> surface_set {\n {\n mut s1: {\n surf: mk_surface(),\n have: true\n },\n mut s2: {\n surf: mk_surface(),\n have: true\n }\n }\n}\n\ntype surface = {\n cairo_surf: *cairo_surface_t,\n az_target: AzDrawTargetRef\n};\n\nfn mk_surface() -> surface {\n let cairo_surf = cairo_image_surface_create(cairo::CAIRO_FORMAT_RGB24, 800, 600);\n\n assert !ptr::is_null(cairo_surf);\n\n let azure_target = AzCreateDrawTargetForCairoSurface(cairo_surf);\n assert !ptr::is_null(azure_target);\n\n {\n cairo_surf: cairo_surf,\n az_target: azure_target\n }\n}\n\nfn destroy_surface(+surface: surface) {\n AzReleaseDrawTarget(surface.az_target);\n cairo_surface_destroy(surface.cairo_surf);\n}\n\n#[doc = \"A function for spawning into the platform's main thread\"]\nfn on_osmain<T: send>(+f: fn~(comm::port<T>)) -> comm::chan<T> {\n let builder = task::builder();\n let opts = {\n sched: some({\n mode: task::osmain,\n foreign_stack_size: none\n })\n with task::get_opts(builder)\n };\n task::set_opts(builder, opts);\n ret task::run_listener(builder, f);\n}\n\n\/\/ #[cfg(target_os = \"linux\")]\nmod platform {\n fn runmain(f: fn()) {\n f()\n }\n}\n\n<commit_msg>Add some timing for compositing<commit_after>export OSMain;\nexport Msg, BeginDrawing, Draw, AddKeyHandler, Exit;\n\nimport azure::*;\nimport azure::bindgen::*;\nimport azure::cairo;\nimport azure::cairo::bindgen::*;\nimport comm::*;\nimport dvec::{dvec, extensions};\nimport azure::cairo::cairo_surface_t;\nimport gfx::renderer::{Sink};\nimport dom::event::{Event, ResizeEvent};\nimport layers::ImageLayer;\nimport geom::size::Size2D;\nimport std::cmp::fuzzy_eq;\nimport std::time::precise_time_ns;\nimport vec::push;\n\ntype OSMain = chan<Msg>;\n\nenum Msg {\n BeginDrawing(chan<AzDrawTargetRef>),\n Draw(chan<AzDrawTargetRef>, AzDrawTargetRef),\n AddKeyHandler(chan<()>),\n AddEventListener(chan<Event>),\n Exit\n}\n\nfn OSMain() -> OSMain {\n do on_osmain::<Msg> |po| {\n do platform::runmain {\n #debug(\"preparing to enter main loop\");\n\t mainloop(po);\n }\n }\n}\n\nfn time(msg: str, callback: fn()) {\n let start_time = precise_time_ns();\n callback();\n let end_time = precise_time_ns();\n #debug(\"%s took %u ms\", msg, ((end_time - start_time) \/ 1000000u64) as uint);\n}\n\nfn mainloop(po: port<Msg>) {\n let key_handlers: @dvec<chan<()>> = @dvec();\n let event_listeners: @dvec<chan<Event>> = @dvec();\n\n glut::init();\n glut::init_display_mode(glut::DOUBLE);\n\n let surfaces = @surface_set();\n\n let window = glut::create_window(\"Servo\");\n glut::reshape_window(window, 800, 600);\n\n let context = layers::rendergl::init_render_context();\n\n let image = @layers::layers::Image(0, 0, layers::layers::RGB24Format, ~[]);\n let image_layer = @layers::layers::ImageLayer(image);\n image_layer.common.set_transform\n (image_layer.common.transform.scale(800.0f32, 600.0f32, 1.0f32));\n\n let scene = @mut layers::scene::Scene(layers::layers::ImageLayerKind(image_layer),\n Size2D(800.0f32, 600.0f32));\n\n let done = @mut false;\n\n let check_for_messages = fn@() {\n \/\/ Handle messages\n #debug(\"osmain: peeking\");\n if po.peek() {\n alt po.recv() {\n AddKeyHandler(key_ch) {\n key_handlers.push(key_ch);\n }\n AddEventListener(event_listener) {\n event_listeners.push(event_listener);\n }\n BeginDrawing(sender) {\n lend_surface(*surfaces, sender);\n }\n Draw(sender, dt) {\n #debug(\"osmain: received new frame\");\n return_surface(*surfaces, dt);\n lend_surface(*surfaces, sender);\n\n let mut image_data;\n unsafe {\n let buffer = cairo_image_surface_get_data(surfaces.s1.surf.cairo_surf);\n image_data = vec::unsafe::from_buf(buffer, 800 * 600 * 4);\n }\n\n let image =\n @layers::layers::Image(800, 600, layers::layers::ARGB32Format, image_data);\n image_layer.set_image(image);\n }\n exit {\n *done = true;\n }\n }\n }\n };\n\n do glut::reshape_func(window) |width, height| {\n check_for_messages();\n\n #debug(\"osmain: window resized to %d,%d\", width as int, height as int);\n for event_listeners.each |event_listener| {\n event_listener.send(ResizeEvent(width as int, height as int));\n }\n }\n\n do glut::display_func() {\n check_for_messages();\n\n #debug(\"osmain: drawing to screen\");\n\n do time(\"compositing\") {\n layers::rendergl::render_scene(context, *scene);\n }\n\n glut::swap_buffers();\n glut::post_redisplay();\n }\n\n while !*done {\n #debug(\"osmain: running GLUT check loop\");\n glut::check_loop();\n }\n\n destroy_surface(surfaces.s1.surf);\n destroy_surface(surfaces.s2.surf);\n}\n\n#[doc = \"\nImplementation to allow the osmain channel to be used as a graphics\nsink for the renderer\n\"]\nimpl OSMain of Sink for OSMain {\n fn begin_drawing(next_dt: chan<AzDrawTargetRef>) {\n self.send(BeginDrawing(next_dt))\n }\n fn draw(next_dt: chan<AzDrawTargetRef>, draw_me: AzDrawTargetRef) {\n self.send(Draw(next_dt, draw_me))\n }\n fn add_event_listener(listener: chan<Event>) {\n self.send(AddEventListener(listener));\n }\n}\n\ntype surface_set = {\n mut s1: {\n surf: surface,\n have: bool\n },\n mut s2: {\n surf: surface,\n have: bool\n }\n};\n\nfn lend_surface(surfaces: surface_set, recvr: chan<AzDrawTargetRef>) {\n \/\/ We are in a position to lend out the surface?\n assert surfaces.s1.have;\n \/\/ Ok then take it\n let dt1 = surfaces.s1.surf.az_target;\n #debug(\"osmain: lending surface %?\", dt1);\n recvr.send(dt1);\n \/\/ Now we don't have it\n surfaces.s1 = {\n have: false\n with surfaces.s1\n };\n \/\/ But we (hopefully) have another!\n surfaces.s1 <-> surfaces.s2;\n \/\/ Let's look\n assert surfaces.s1.have;\n}\n\nfn return_surface(surfaces: surface_set, dt: AzDrawTargetRef) {\n #debug(\"osmain: returning surface %?\", dt);\n \/\/ We have room for a return\n assert surfaces.s1.have;\n assert !surfaces.s2.have;\n assert surfaces.s2.surf.az_target == dt;\n \/\/ Now we have it again\n surfaces.s2 = {\n have: true\n with surfaces.s2\n };\n}\n\nfn surface_set() -> surface_set {\n {\n mut s1: {\n surf: mk_surface(),\n have: true\n },\n mut s2: {\n surf: mk_surface(),\n have: true\n }\n }\n}\n\ntype surface = {\n cairo_surf: *cairo_surface_t,\n az_target: AzDrawTargetRef\n};\n\nfn mk_surface() -> surface {\n let cairo_surf = cairo_image_surface_create(cairo::CAIRO_FORMAT_RGB24, 800, 600);\n\n assert !ptr::is_null(cairo_surf);\n\n let azure_target = AzCreateDrawTargetForCairoSurface(cairo_surf);\n assert !ptr::is_null(azure_target);\n\n {\n cairo_surf: cairo_surf,\n az_target: azure_target\n }\n}\n\nfn destroy_surface(+surface: surface) {\n AzReleaseDrawTarget(surface.az_target);\n cairo_surface_destroy(surface.cairo_surf);\n}\n\n#[doc = \"A function for spawning into the platform's main thread\"]\nfn on_osmain<T: send>(+f: fn~(comm::port<T>)) -> comm::chan<T> {\n let builder = task::builder();\n let opts = {\n sched: some({\n mode: task::osmain,\n foreign_stack_size: none\n })\n with task::get_opts(builder)\n };\n task::set_opts(builder, opts);\n ret task::run_listener(builder, f);\n}\n\n\/\/ #[cfg(target_os = \"linux\")]\nmod platform {\n fn runmain(f: fn()) {\n f()\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add prefix file<commit_after>#![allow(non_upper_case_globals, missing_docs)]\npub const exa: f64 = 1e18;\npub const peta: f64 = 1e15;\npub const tera: f64 = 1e12;\npub const giga: f64 = 1e9;\npub const mega: f64 = 1e6;\npub const kilo: f64 = 1e3;\npub const hecto: f64 = 1e2;\npub const deca: f64 = 1e1;\n\npub const deci: f64 = 1e-1;\npub const centi: f64 = 1e-2;\npub const milli: f64 = 1e-3;\npub const micro: f64 = 1e-6;\npub const nano: f64 = 1e-9;\npub const pico: f64 = 1e-12;\npub const femto: f64 = 1e-15;\npub const atto: f64 = 1e-18;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Allow backspacing across lines<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use Iterator in Calc's tokenize() Function<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Advent day6<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate handlebars;\nextern crate rustc_serialize;\nextern crate pulldown_cmark;\n\nuse renderer::html_handlebars::helpers;\nuse renderer::Renderer;\nuse book::MDBook;\nuse book::bookitem::BookItem;\nuse {utils, theme};\n\nuse std::path::{Path, PathBuf};\nuse std::fs::File;\nuse std::error::Error;\nuse std::io::{self, Read, Write};\nuse std::collections::BTreeMap;\n\nuse self::handlebars::{Handlebars, JsonRender};\nuse self::rustc_serialize::json::{Json, ToJson};\nuse self::pulldown_cmark::{Parser, html};\n\npub struct HtmlHandlebars;\n\nimpl HtmlHandlebars {\n pub fn new() -> Self {\n HtmlHandlebars\n }\n}\n\nimpl Renderer for HtmlHandlebars {\n fn render(&self, book: &MDBook) -> Result<(), Box<Error>> {\n debug!(\"[fn]: render\");\n let mut handlebars = Handlebars::new();\n\n \/\/ Load theme\n let theme = theme::Theme::new(book.get_src());\n\n \/\/ Register template\n debug!(\"[*]: Register handlebars template\");\n try!(handlebars.register_template_string(\"index\", try!(String::from_utf8(theme.index))));\n\n \/\/ Register helpers\n debug!(\"[*]: Register handlebars helpers\");\n handlebars.register_helper(\"toc\", Box::new(helpers::toc::RenderToc));\n handlebars.register_helper(\"previous\", Box::new(helpers::navigation::previous));\n handlebars.register_helper(\"next\", Box::new(helpers::navigation::next));\n\n let mut data = try!(make_data(book));\n\n \/\/ Print version\n let mut print_content: String = String::new();\n\n \/\/ Check if dest directory exists\n debug!(\"[*]: Check if destination directory exists\");\n if let Err(_) = utils::create_path(book.get_dest()) {\n return Err(Box::new(io::Error::new(io::ErrorKind::Other, \"Unexpected error when constructing destination path\")))\n }\n\n \/\/ Render a file for every entry in the book\n let mut index = true;\n for item in book.iter() {\n\n match *item {\n BookItem::Chapter(_, ref ch) | BookItem::Affix(ref ch) => {\n if ch.path != PathBuf::new() {\n\n let path = book.get_src().join(&ch.path);\n\n debug!(\"[*]: Opening file: {:?}\", path);\n let mut f = try!(File::open(&path));\n let mut content: String = String::new();\n\n debug!(\"[*]: Reading file\");\n try!(f.read_to_string(&mut content));\n\n \/\/ Render markdown using the pulldown-cmark crate\n content = render_html(&content);\n print_content.push_str(&content);\n\n \/\/ Remove content from previous file and render content for this one\n data.remove(\"path\");\n match ch.path.to_str() {\n Some(p) => { data.insert(\"path\".to_owned(), p.to_json()); },\n None => return Err(Box::new(io::Error::new(io::ErrorKind::Other, \"Could not convert path to str\"))),\n }\n\n\n \/\/ Remove content from previous file and render content for this one\n data.remove(\"content\");\n data.insert(\"content\".to_owned(), content.to_json());\n\n \/\/ Remove path to root from previous file and render content for this one\n data.remove(\"path_to_root\");\n data.insert(\"path_to_root\".to_owned(), utils::path_to_root(&ch.path).to_json());\n\n \/\/ Rendere the handlebars template with the data\n debug!(\"[*]: Render template\");\n let rendered = try!(handlebars.render(\"index\", &data));\n\n debug!(\"[*]: Create file {:?}\", &book.get_dest().join(&ch.path).with_extension(\"html\"));\n \/\/ Write to file\n let mut file = try!(utils::create_file(&book.get_dest().join(&ch.path).with_extension(\"html\")));\n output!(\"[*] Creating {:?} ✓\", &book.get_dest().join(&ch.path).with_extension(\"html\"));\n\n try!(file.write_all(&rendered.into_bytes()));\n\n \/\/ Create an index.html from the first element in SUMMARY.md\n if index {\n debug!(\"[*]: index.html\");\n\n let mut index_file = try!(File::create(book.get_dest().join(\"index.html\")));\n let mut content = String::new();\n let _source = try!(File::open(book.get_dest().join(&ch.path.with_extension(\"html\"))))\n .read_to_string(&mut content);\n\n \/\/ This could cause a problem when someone displays code containing <base href=...>\n \/\/ on the front page, however this case should be very very rare...\n content = content.lines().filter(|line| !line.contains(\"<base href=\")).collect();\n\n try!(index_file.write_all(content.as_bytes()));\n\n output!(\n \"[*] Creating index.html from {:?} ✓\",\n book.get_dest().join(&ch.path.with_extension(\"html\"))\n );\n index = false;\n }\n }\n }\n _ => {}\n }\n }\n\n \/\/ Print version\n\n \/\/ Remove content from previous file and render content for this one\n data.remove(\"path\");\n data.insert(\"path\".to_owned(), \"print.md\".to_json());\n\n \/\/ Remove content from previous file and render content for this one\n data.remove(\"content\");\n data.insert(\"content\".to_owned(), print_content.to_json());\n\n \/\/ Remove path to root from previous file and render content for this one\n data.remove(\"path_to_root\");\n data.insert(\"path_to_root\".to_owned(), utils::path_to_root(Path::new(\"print.md\")).to_json());\n\n \/\/ Rendere the handlebars template with the data\n debug!(\"[*]: Render template\");\n let rendered = try!(handlebars.render(\"index\", &data));\n let mut file = try!(utils::create_file(&book.get_dest().join(\"print\").with_extension(\"html\")));\n try!(file.write_all(&rendered.into_bytes()));\n output!(\"[*] Creating print.html ✓\");\n\n \/\/ Copy static files (js, css, images, ...)\n\n debug!(\"[*] Copy static files\");\n \/\/ JavaScript\n let mut js_file = try!(File::create(book.get_dest().join(\"book.js\")));\n try!(js_file.write_all(&theme.js));\n\n \/\/ Css\n let mut css_file = try!(File::create(book.get_dest().join(\"book.css\")));\n try!(css_file.write_all(&theme.css));\n\n \/\/ JQuery local fallback\n let mut jquery = try!(File::create(book.get_dest().join(\"jquery.js\")));\n try!(jquery.write_all(&theme.jquery));\n\n \/\/ Font Awesome local fallback\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/css\/font-awesome\").with_extension(\"css\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.eot\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_EOT));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.svg\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_SVG));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.ttf\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_TTF));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.woff\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_WOFF));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.woff2\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_WOFF2));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/FontAwesome.ttf\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_TTF));\n\n \/\/ syntax highlighting\n let mut highlight_css = try!(File::create(book.get_dest().join(\"highlight.css\")));\n try!(highlight_css.write_all(&theme.highlight_css));\n let mut tomorrow_night_css = try!(File::create(book.get_dest().join(\"tomorrow-night.css\")));\n try!(tomorrow_night_css.write_all(&theme.tomorrow_night_css));\n let mut highlight_js = try!(File::create(book.get_dest().join(\"highlight.js\")));\n try!(highlight_js.write_all(&theme.highlight_js));\n\n\n \/\/ Copy all remaining files\n try!(utils::copy_files_except_ext(book.get_src(), book.get_dest(), true, &[\"md\"]));\n\n Ok(())\n }\n}\n\nfn make_data(book: &MDBook) -> Result<BTreeMap<String,Json>, Box<Error>> {\n debug!(\"[fn]: make_data\");\n\n let mut data = BTreeMap::new();\n data.insert(\"language\".to_owned(), \"en\".to_json());\n data.insert(\"title\".to_owned(), book.get_title().to_json());\n\n let mut chapters = vec![];\n\n for item in book.iter() {\n \/\/ Create the data to inject in the template\n let mut chapter = BTreeMap::new();\n\n match *item {\n BookItem::Affix(ref ch) => {\n chapter.insert(\"name\".to_owned(), ch.name.to_json());\n match ch.path.to_str() {\n Some(p) => { chapter.insert(\"path\".to_owned(), p.to_json()); },\n None => return Err(Box::new(io::Error::new(io::ErrorKind::Other, \"Could not convert path to str\"))),\n }\n },\n BookItem::Chapter(ref s, ref ch) => {\n chapter.insert(\"section\".to_owned(), s.to_json());\n chapter.insert(\"name\".to_owned(), ch.name.to_json());\n match ch.path.to_str() {\n Some(p) => { chapter.insert(\"path\".to_owned(), p.to_json()); },\n None => return Err(Box::new(io::Error::new(io::ErrorKind::Other, \"Could not convert path to str\"))),\n }\n },\n BookItem::Spacer => {\n chapter.insert(\"spacer\".to_owned(), \"_spacer_\".to_json());\n }\n\n }\n\n chapters.push(chapter);\n }\n\n data.insert(\"chapters\".to_owned(), chapters.to_json());\n\n debug!(\"[*]: JSON constructed\");\n Ok(data)\n}\n\nfn render_html(text: &str) -> String {\n let mut s = String::with_capacity(text.len() * 3 \/ 2);\n let p = Parser::new(&text);\n html::push_html(&mut s, p);\n s\n}\n<commit_msg>Add newlines back in to generated index.html files.<commit_after>extern crate handlebars;\nextern crate rustc_serialize;\nextern crate pulldown_cmark;\n\nuse renderer::html_handlebars::helpers;\nuse renderer::Renderer;\nuse book::MDBook;\nuse book::bookitem::BookItem;\nuse {utils, theme};\n\nuse std::path::{Path, PathBuf};\nuse std::fs::File;\nuse std::error::Error;\nuse std::io::{self, Read, Write};\nuse std::collections::BTreeMap;\n\nuse self::handlebars::{Handlebars, JsonRender};\nuse self::rustc_serialize::json::{Json, ToJson};\nuse self::pulldown_cmark::{Parser, html};\n\npub struct HtmlHandlebars;\n\nimpl HtmlHandlebars {\n pub fn new() -> Self {\n HtmlHandlebars\n }\n}\n\nimpl Renderer for HtmlHandlebars {\n fn render(&self, book: &MDBook) -> Result<(), Box<Error>> {\n debug!(\"[fn]: render\");\n let mut handlebars = Handlebars::new();\n\n \/\/ Load theme\n let theme = theme::Theme::new(book.get_src());\n\n \/\/ Register template\n debug!(\"[*]: Register handlebars template\");\n try!(handlebars.register_template_string(\"index\", try!(String::from_utf8(theme.index))));\n\n \/\/ Register helpers\n debug!(\"[*]: Register handlebars helpers\");\n handlebars.register_helper(\"toc\", Box::new(helpers::toc::RenderToc));\n handlebars.register_helper(\"previous\", Box::new(helpers::navigation::previous));\n handlebars.register_helper(\"next\", Box::new(helpers::navigation::next));\n\n let mut data = try!(make_data(book));\n\n \/\/ Print version\n let mut print_content: String = String::new();\n\n \/\/ Check if dest directory exists\n debug!(\"[*]: Check if destination directory exists\");\n if let Err(_) = utils::create_path(book.get_dest()) {\n return Err(Box::new(io::Error::new(io::ErrorKind::Other, \"Unexpected error when constructing destination path\")))\n }\n\n \/\/ Render a file for every entry in the book\n let mut index = true;\n for item in book.iter() {\n\n match *item {\n BookItem::Chapter(_, ref ch) | BookItem::Affix(ref ch) => {\n if ch.path != PathBuf::new() {\n\n let path = book.get_src().join(&ch.path);\n\n debug!(\"[*]: Opening file: {:?}\", path);\n let mut f = try!(File::open(&path));\n let mut content: String = String::new();\n\n debug!(\"[*]: Reading file\");\n try!(f.read_to_string(&mut content));\n\n \/\/ Render markdown using the pulldown-cmark crate\n content = render_html(&content);\n print_content.push_str(&content);\n\n \/\/ Remove content from previous file and render content for this one\n data.remove(\"path\");\n match ch.path.to_str() {\n Some(p) => { data.insert(\"path\".to_owned(), p.to_json()); },\n None => return Err(Box::new(io::Error::new(io::ErrorKind::Other, \"Could not convert path to str\"))),\n }\n\n\n \/\/ Remove content from previous file and render content for this one\n data.remove(\"content\");\n data.insert(\"content\".to_owned(), content.to_json());\n\n \/\/ Remove path to root from previous file and render content for this one\n data.remove(\"path_to_root\");\n data.insert(\"path_to_root\".to_owned(), utils::path_to_root(&ch.path).to_json());\n\n \/\/ Rendere the handlebars template with the data\n debug!(\"[*]: Render template\");\n let rendered = try!(handlebars.render(\"index\", &data));\n\n debug!(\"[*]: Create file {:?}\", &book.get_dest().join(&ch.path).with_extension(\"html\"));\n \/\/ Write to file\n let mut file = try!(utils::create_file(&book.get_dest().join(&ch.path).with_extension(\"html\")));\n output!(\"[*] Creating {:?} ✓\", &book.get_dest().join(&ch.path).with_extension(\"html\"));\n\n try!(file.write_all(&rendered.into_bytes()));\n\n \/\/ Create an index.html from the first element in SUMMARY.md\n if index {\n debug!(\"[*]: index.html\");\n\n let mut index_file = try!(File::create(book.get_dest().join(\"index.html\")));\n let mut content = String::new();\n let _source = try!(File::open(book.get_dest().join(&ch.path.with_extension(\"html\"))))\n .read_to_string(&mut content);\n\n \/\/ This could cause a problem when someone displays code containing <base href=...>\n \/\/ on the front page, however this case should be very very rare...\n content = content.lines().filter(|line| !line.contains(\"<base href=\")).map(|line| line.to_string() + \"\\n\").collect();\n\n try!(index_file.write_all(content.as_bytes()));\n\n output!(\n \"[*] Creating index.html from {:?} ✓\",\n book.get_dest().join(&ch.path.with_extension(\"html\"))\n );\n index = false;\n }\n }\n }\n _ => {}\n }\n }\n\n \/\/ Print version\n\n \/\/ Remove content from previous file and render content for this one\n data.remove(\"path\");\n data.insert(\"path\".to_owned(), \"print.md\".to_json());\n\n \/\/ Remove content from previous file and render content for this one\n data.remove(\"content\");\n data.insert(\"content\".to_owned(), print_content.to_json());\n\n \/\/ Remove path to root from previous file and render content for this one\n data.remove(\"path_to_root\");\n data.insert(\"path_to_root\".to_owned(), utils::path_to_root(Path::new(\"print.md\")).to_json());\n\n \/\/ Rendere the handlebars template with the data\n debug!(\"[*]: Render template\");\n let rendered = try!(handlebars.render(\"index\", &data));\n let mut file = try!(utils::create_file(&book.get_dest().join(\"print\").with_extension(\"html\")));\n try!(file.write_all(&rendered.into_bytes()));\n output!(\"[*] Creating print.html ✓\");\n\n \/\/ Copy static files (js, css, images, ...)\n\n debug!(\"[*] Copy static files\");\n \/\/ JavaScript\n let mut js_file = try!(File::create(book.get_dest().join(\"book.js\")));\n try!(js_file.write_all(&theme.js));\n\n \/\/ Css\n let mut css_file = try!(File::create(book.get_dest().join(\"book.css\")));\n try!(css_file.write_all(&theme.css));\n\n \/\/ JQuery local fallback\n let mut jquery = try!(File::create(book.get_dest().join(\"jquery.js\")));\n try!(jquery.write_all(&theme.jquery));\n\n \/\/ Font Awesome local fallback\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/css\/font-awesome\").with_extension(\"css\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.eot\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_EOT));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.svg\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_SVG));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.ttf\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_TTF));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.woff\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_WOFF));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/fontawesome-webfont.woff2\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_WOFF2));\n let mut font_awesome = try!(utils::create_file(&book.get_dest().join(\"_FontAwesome\/fonts\/FontAwesome.ttf\")));\n try!(font_awesome.write_all(theme::FONT_AWESOME_TTF));\n\n \/\/ syntax highlighting\n let mut highlight_css = try!(File::create(book.get_dest().join(\"highlight.css\")));\n try!(highlight_css.write_all(&theme.highlight_css));\n let mut tomorrow_night_css = try!(File::create(book.get_dest().join(\"tomorrow-night.css\")));\n try!(tomorrow_night_css.write_all(&theme.tomorrow_night_css));\n let mut highlight_js = try!(File::create(book.get_dest().join(\"highlight.js\")));\n try!(highlight_js.write_all(&theme.highlight_js));\n\n\n \/\/ Copy all remaining files\n try!(utils::copy_files_except_ext(book.get_src(), book.get_dest(), true, &[\"md\"]));\n\n Ok(())\n }\n}\n\nfn make_data(book: &MDBook) -> Result<BTreeMap<String,Json>, Box<Error>> {\n debug!(\"[fn]: make_data\");\n\n let mut data = BTreeMap::new();\n data.insert(\"language\".to_owned(), \"en\".to_json());\n data.insert(\"title\".to_owned(), book.get_title().to_json());\n\n let mut chapters = vec![];\n\n for item in book.iter() {\n \/\/ Create the data to inject in the template\n let mut chapter = BTreeMap::new();\n\n match *item {\n BookItem::Affix(ref ch) => {\n chapter.insert(\"name\".to_owned(), ch.name.to_json());\n match ch.path.to_str() {\n Some(p) => { chapter.insert(\"path\".to_owned(), p.to_json()); },\n None => return Err(Box::new(io::Error::new(io::ErrorKind::Other, \"Could not convert path to str\"))),\n }\n },\n BookItem::Chapter(ref s, ref ch) => {\n chapter.insert(\"section\".to_owned(), s.to_json());\n chapter.insert(\"name\".to_owned(), ch.name.to_json());\n match ch.path.to_str() {\n Some(p) => { chapter.insert(\"path\".to_owned(), p.to_json()); },\n None => return Err(Box::new(io::Error::new(io::ErrorKind::Other, \"Could not convert path to str\"))),\n }\n },\n BookItem::Spacer => {\n chapter.insert(\"spacer\".to_owned(), \"_spacer_\".to_json());\n }\n\n }\n\n chapters.push(chapter);\n }\n\n data.insert(\"chapters\".to_owned(), chapters.to_json());\n\n debug!(\"[*]: JSON constructed\");\n Ok(data)\n}\n\nfn render_html(text: &str) -> String {\n let mut s = String::with_capacity(text.len() * 3 \/ 2);\n let p = Parser::new(&text);\n html::push_html(&mut s, p);\n s\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>bind the range to a name<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Show we basically import journeys too.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>result map<commit_after>use std::num::ParseIntError;\n\nfn double_number(s: &str) -> Result<i32, ParseIntError> {\n match s.parse::<i32>() {\n Ok(n) => Ok(n * 2),\n Err(e) => Err(e)\n }\n}\n\nfn double_with_map(s: &str) -> Result<i32, ParseIntError> {\n s.parse::<i32>().map(|n| n*2)\n}\n\ntype AliasedResult<T> = Result<T, ParseIntError>;\n\nfn double_with_alias(s: &str) -> AliasedResult<i32> {\n s.parse::<i32>().map(|n| n*2)\n}\n\nfn main() {\n println!(\"Double 10: {:?}\", double_number(\"10\"));\n println!(\"Double tt: {:?}\", double_number(\"tt\"));\n\n println!(\"Double 20: {:?}\", double_number(\"20\"));\n\n println!(\"Double 20: {:?}\", double_with_alias(\"20\"));\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>blank message<commit_after><|endoftext|>"} {"text":"<commit_before>#![warn(clippy::all, rust_2018_idioms)]\n\nuse cargo_registry::{boot, App, Env};\nuse std::{\n fs::File,\n sync::{mpsc::channel, Arc, Mutex},\n thread,\n time::Duration,\n};\n\nuse civet::Server as CivetServer;\nuse conduit_hyper::Service;\nuse futures_util::future::FutureExt;\nuse reqwest::blocking::Client;\n\nenum Server {\n Civet(CivetServer),\n Hyper(tokio::runtime::Runtime, tokio::task::JoinHandle<()>),\n}\n\nuse Server::*;\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n \/\/ Initialize logging\n env_logger::init();\n\n let config = cargo_registry::Config::default();\n let client = Client::new();\n\n let app = App::new(&config, Some(client));\n let app = cargo_registry::build_handler(Arc::new(app));\n\n \/\/ On every server restart, ensure the categories available in the database match\n \/\/ the information in *src\/categories.toml*.\n let categories_toml = include_str!(\"..\/boot\/categories.toml\");\n boot::categories::sync(categories_toml).unwrap();\n\n let heroku = dotenv::var(\"HEROKU\").is_ok();\n let fastboot = dotenv::var(\"USE_FASTBOOT\").is_ok();\n\n let port = if heroku {\n 8888\n } else {\n dotenv::var(\"PORT\")\n .ok()\n .and_then(|s| s.parse().ok())\n .unwrap_or(8888)\n };\n let threads = dotenv::var(\"SERVER_THREADS\")\n .map(|s| s.parse().expect(\"SERVER_THREADS was not a valid number\"))\n .unwrap_or_else(|_| {\n if config.env == Env::Development {\n 5\n } else {\n 50\n }\n });\n\n let server = if dotenv::var(\"USE_HYPER\").is_ok() {\n use tokio::io::AsyncWriteExt;\n use tokio::signal::unix::{signal, SignalKind};\n\n println!(\"Booting with a hyper based server\");\n\n let mut rt = tokio::runtime::Builder::new()\n .threaded_scheduler()\n .enable_all()\n .build()\n .unwrap();\n\n let handler = Arc::new(conduit_hyper::BlockingHandler::new(app, threads as usize));\n let make_service =\n hyper::service::make_service_fn(move |socket: &hyper::server::conn::AddrStream| {\n let addr = socket.remote_addr();\n let handler = handler.clone();\n async move { Service::from_blocking(handler, addr) }\n });\n\n let addr = ([127, 0, 0, 1], port).into();\n let server = rt.block_on(async { hyper::Server::bind(&addr).serve(make_service) });\n\n let mut sig_int = rt.block_on(async { signal(SignalKind::interrupt()) })?;\n let mut sig_term = rt.block_on(async { signal(SignalKind::terminate()) })?;\n\n let server = server.with_graceful_shutdown(async move {\n \/\/ Wait for either signal\n futures_util::select! {\n _ = sig_int.recv().fuse() => (),\n _ = sig_term.recv().fuse() => (),\n };\n let mut stdout = tokio::io::stdout();\n stdout.write_all(b\"Starting graceful shutdown\\n\").await.ok();\n });\n\n let server = rt.spawn(async { server.await.unwrap() });\n Hyper(rt, server)\n } else {\n println!(\"Booting with a civet based server\");\n let mut cfg = civet::Config::new();\n cfg.port(port).threads(threads).keep_alive(true);\n Civet(CivetServer::start(cfg, app).unwrap())\n };\n\n println!(\"listening on port {}\", port);\n\n \/\/ Give tokio a chance to spawn the first worker thread\n thread::sleep(Duration::from_millis(10));\n\n \/\/ Creating this file tells heroku to tell nginx that the application is ready\n \/\/ to receive traffic.\n if heroku {\n let path = if fastboot {\n \"\/tmp\/backend-initialized\"\n } else {\n \"\/tmp\/app-initialized\"\n };\n println!(\"Writing to {}\", path);\n File::create(path).unwrap();\n }\n\n \/\/ Block the main thread until the server has shutdown\n match server {\n Hyper(mut rt, server) => {\n rt.block_on(async { server.await.unwrap() });\n }\n Civet(server) => {\n let (tx, rx) = channel::<()>();\n ctrlc_handler(move || tx.send(()).unwrap_or(()));\n rx.recv().unwrap();\n drop(server);\n }\n }\n\n println!(\"Server has gracefully shutdown!\");\n Ok(())\n}\n\nfn ctrlc_handler<F>(f: F)\nwhere\n F: FnOnce() + Send + 'static,\n{\n let call_once = Mutex::new(Some(f));\n\n ctrlc::set_handler(move || {\n if let Some(f) = call_once.lock().unwrap().take() {\n println!(\"Starting graceful shutdown\");\n f();\n } else {\n println!(\"Already sent signal to start graceful shutdown\");\n }\n })\n .unwrap();\n}\n<commit_msg>Allow large enum variant for Server enum<commit_after>#![warn(clippy::all, rust_2018_idioms)]\n\nuse cargo_registry::{boot, App, Env};\nuse std::{\n fs::File,\n sync::{mpsc::channel, Arc, Mutex},\n thread,\n time::Duration,\n};\n\nuse civet::Server as CivetServer;\nuse conduit_hyper::Service;\nuse futures_util::future::FutureExt;\nuse reqwest::blocking::Client;\n\n#[allow(clippy::large_enum_variant)]\nenum Server {\n Civet(CivetServer),\n Hyper(tokio::runtime::Runtime, tokio::task::JoinHandle<()>),\n}\n\nuse Server::*;\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n \/\/ Initialize logging\n env_logger::init();\n\n let config = cargo_registry::Config::default();\n let client = Client::new();\n\n let app = App::new(&config, Some(client));\n let app = cargo_registry::build_handler(Arc::new(app));\n\n \/\/ On every server restart, ensure the categories available in the database match\n \/\/ the information in *src\/categories.toml*.\n let categories_toml = include_str!(\"..\/boot\/categories.toml\");\n boot::categories::sync(categories_toml).unwrap();\n\n let heroku = dotenv::var(\"HEROKU\").is_ok();\n let fastboot = dotenv::var(\"USE_FASTBOOT\").is_ok();\n\n let port = if heroku {\n 8888\n } else {\n dotenv::var(\"PORT\")\n .ok()\n .and_then(|s| s.parse().ok())\n .unwrap_or(8888)\n };\n let threads = dotenv::var(\"SERVER_THREADS\")\n .map(|s| s.parse().expect(\"SERVER_THREADS was not a valid number\"))\n .unwrap_or_else(|_| {\n if config.env == Env::Development {\n 5\n } else {\n 50\n }\n });\n\n let server = if dotenv::var(\"USE_HYPER\").is_ok() {\n use tokio::io::AsyncWriteExt;\n use tokio::signal::unix::{signal, SignalKind};\n\n println!(\"Booting with a hyper based server\");\n\n let mut rt = tokio::runtime::Builder::new()\n .threaded_scheduler()\n .enable_all()\n .build()\n .unwrap();\n\n let handler = Arc::new(conduit_hyper::BlockingHandler::new(app, threads as usize));\n let make_service =\n hyper::service::make_service_fn(move |socket: &hyper::server::conn::AddrStream| {\n let addr = socket.remote_addr();\n let handler = handler.clone();\n async move { Service::from_blocking(handler, addr) }\n });\n\n let addr = ([127, 0, 0, 1], port).into();\n let server = rt.block_on(async { hyper::Server::bind(&addr).serve(make_service) });\n\n let mut sig_int = rt.block_on(async { signal(SignalKind::interrupt()) })?;\n let mut sig_term = rt.block_on(async { signal(SignalKind::terminate()) })?;\n\n let server = server.with_graceful_shutdown(async move {\n \/\/ Wait for either signal\n futures_util::select! {\n _ = sig_int.recv().fuse() => (),\n _ = sig_term.recv().fuse() => (),\n };\n let mut stdout = tokio::io::stdout();\n stdout.write_all(b\"Starting graceful shutdown\\n\").await.ok();\n });\n\n let server = rt.spawn(async { server.await.unwrap() });\n Hyper(rt, server)\n } else {\n println!(\"Booting with a civet based server\");\n let mut cfg = civet::Config::new();\n cfg.port(port).threads(threads).keep_alive(true);\n Civet(CivetServer::start(cfg, app).unwrap())\n };\n\n println!(\"listening on port {}\", port);\n\n \/\/ Give tokio a chance to spawn the first worker thread\n thread::sleep(Duration::from_millis(10));\n\n \/\/ Creating this file tells heroku to tell nginx that the application is ready\n \/\/ to receive traffic.\n if heroku {\n let path = if fastboot {\n \"\/tmp\/backend-initialized\"\n } else {\n \"\/tmp\/app-initialized\"\n };\n println!(\"Writing to {}\", path);\n File::create(path).unwrap();\n }\n\n \/\/ Block the main thread until the server has shutdown\n match server {\n Hyper(mut rt, server) => {\n rt.block_on(async { server.await.unwrap() });\n }\n Civet(server) => {\n let (tx, rx) = channel::<()>();\n ctrlc_handler(move || tx.send(()).unwrap_or(()));\n rx.recv().unwrap();\n drop(server);\n }\n }\n\n println!(\"Server has gracefully shutdown!\");\n Ok(())\n}\n\nfn ctrlc_handler<F>(f: F)\nwhere\n F: FnOnce() + Send + 'static,\n{\n let call_once = Mutex::new(Some(f));\n\n ctrlc::set_handler(move || {\n if let Some(f) = call_once.lock().unwrap().take() {\n println!(\"Starting graceful shutdown\");\n f();\n } else {\n println!(\"Already sent signal to start graceful shutdown\");\n }\n })\n .unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before>use core::clone::Clone;\nuse core::iter::Iterator;\nuse core::ops::Drop;\nuse core::option::Option;\nuse core::ptr;\nuse core::slice::{self, SliceExt};\n\nuse common::memory::*;\n\n\/\/\/ Create a vector with some predefined elements\n#[macro_export]\nmacro_rules! vec {\n ($($x:expr),*) => (\n Vec::from_slice(&[$($x),*])\n );\n ($($x:expr,)*) => (vec![$($x),*])\n}\n\n\/\/\/ An iterator over a vec\npub struct VecIterator<'a, T: 'a> {\n \/\/\/ The vector to iterate over\n vec: &'a Vec<T>,\n \/\/\/ The offset\n offset: usize,\n}\n\nimpl <'a, T> Iterator for VecIterator<'a, T> {\n type Item = &'a mut T;\n fn next(&mut self) -> Option<Self::Item> {\n match self.vec.get(self.offset) {\n Option::Some(item) => {\n self.offset += 1;\n Option::Some(item)\n }\n Option::None => {\n Option::None\n }\n }\n }\n}\n\n\/\/\/ A owned, heap allocated list of elements\npub struct Vec<T> {\n \/\/\/ The vector data\n pub mem: Memory<T>, \/\/ TODO: Option<Memory>\n \/\/\/ The length of the vector\n pub length: usize,\n}\n\nimpl <T> Vec<T> {\n \/\/\/ Create a empty vector\n pub fn new() -> Self {\n Vec {\n mem: Memory { ptr: 0 as *mut T \/* TODO: Option::None *\/ },\n length: 0,\n }\n }\n\n \/\/\/ Convert to pointer\n pub unsafe fn as_ptr(&self) -> *const T {\n self.mem.ptr\n }\n\n \/\/\/ Convert from a raw (unsafe) buffer\n pub unsafe fn from_raw_buf(ptr: *const T, len: usize) -> Self {\n match Memory::new(len) {\n Option::Some(mem) => {\n ptr::copy(ptr, mem.ptr, len);\n\n return Vec {\n mem: mem,\n length: len,\n };\n }\n Option::None => {\n return Self::new();\n }\n }\n }\n\n \/\/\/ Create a vector from a slice\n pub fn from_slice(slice: &[T]) -> Self {\n match Memory::new(slice.len()) {\n Option::Some(mem) => {\n unsafe { ptr::copy(slice.as_ptr(), mem.ptr, slice.len()) };\n\n return Vec {\n mem: mem,\n length: slice.len(),\n };\n }\n Option::None => {\n return Vec::new();\n }\n }\n }\n\n\n \/\/\/ Get the nth element. Returns None if out of bounds.\n pub fn get(&self, i: usize) -> Option<&mut T> {\n if i >= self.length {\n Option::None\n } else {\n unsafe { Option::Some(&mut *self.mem.ptr.offset(i as isize)) }\n }\n }\n\n \/\/\/ Set the nth element\n pub fn set(&self, i: usize, value: T) {\n if i <= self.length {\n unsafe { ptr::write(self.mem.ptr.offset(i as isize), value) };\n }\n }\n\n \/\/\/ Insert element at a given position\n pub fn insert(&mut self, i: usize, value: T) {\n if i <= self.length {\n let new_length = self.length + 1;\n if self.mem.renew(new_length) {\n self.length = new_length;\n\n \/\/ Move all things ahead of insert forward one\n let mut j = self.length - 1;\n while j > i {\n unsafe {\n ptr::write(self.mem.ptr.offset(j as isize),\n ptr::read(self.mem.ptr.offset(j as isize - 1)));\n }\n j -= 1;\n }\n\n unsafe { ptr::write(self.mem.ptr.offset(i as isize), value) };\n }\n }\n }\n\n \/\/\/ Remove a element and return it as a Option\n pub fn remove(&mut self, i: usize) -> Option<T> {\n if i < self.length {\n self.length -= 1;\n\n let item = unsafe { ptr::read(self.mem.ptr.offset(i as isize)) };\n\n \/\/ Move all things ahead of remove back one\n let mut j = i;\n while j < self.length {\n unsafe {\n ptr::write(self.mem.ptr.offset(j as isize),\n ptr::read(self.mem.ptr.offset(j as isize + 1)));\n }\n j += 1;\n }\n\n self.mem.renew(self.length);\n\n Option::Some(item)\n } else {\n Option::None\n }\n }\n\n \/\/\/ Push an element to a vector\n pub fn push(&mut self, value: T) {\n let new_length = self.length + 1;\n if self.mem.renew(new_length) {\n self.length = new_length;\n\n unsafe { ptr::write(self.mem.ptr.offset(self.length as isize - 1), value) };\n }\n }\n\n \/\/\/ Pop the last element\n pub fn pop(&mut self) -> Option<T> {\n if self.length > 0 {\n self.length -= 1;\n\n let item = unsafe { ptr::read(self.mem.ptr.offset(self.length as isize)) };\n\n self.mem.renew(self.length);\n\n return Option::Some(item);\n }\n\n Option::None\n }\n\n \/\/\/ Get the length of the vector\n pub fn len(&self) -> usize {\n self.length\n }\n\n \/\/\/ Create an iterator\n pub fn iter(&self) -> VecIterator<T> {\n VecIterator {\n vec: self,\n offset: 0,\n }\n }\n\n \/\/\/ Get a \"subvector\" of a vector\n \/\/\/ (Is not recommended use slice instead)\n \/\/ TODO: Consider returning a slice instead\n pub fn sub(&self, start: usize, count: usize) -> Self {\n let mut i = start;\n if i > self.len() {\n i = self.len();\n }\n\n let mut j = i + count;\n if j > self.len() {\n j = self.len();\n }\n\n let length = j - i;\n if length == 0 {\n return Vec::new();\n }\n\n match Memory::new(length) {\n Option::Some(mem) => {\n for k in i..j {\n unsafe {\n ptr::write(mem.ptr.offset((k - i) as isize),\n ptr::read(self.mem.ptr.offset(k as isize)))\n };\n }\n\n return Vec {\n mem: mem,\n length: length,\n };\n }\n Option::None => {\n return Self::new();\n }\n }\n }\n\n \/\/\/ Get a slice of the whole vector\n pub fn as_slice(&self) -> &[T] {\n if self.length > 0 {\n unsafe { slice::from_raw_parts(self.mem.ptr, self.length) }\n } else {\n &[]\n }\n }\n}\n\nimpl<T> Vec<T> where T: Clone {\n \/\/\/ Append a vector to another vector\n pub fn push_all(&mut self, vec: &Self) {\n let mut i = self.length as isize;\n let new_length = self.length + vec.len();\n if self.mem.renew(new_length) {\n self.length = new_length;\n\n for value in vec.iter() {\n unsafe { ptr::write(self.mem.ptr.offset(i), value.clone()) };\n i += 1;\n }\n }\n }\n}\n\nimpl<T> Clone for Vec<T> where T: Clone {\n fn clone(&self) -> Self {\n let mut ret = Self::new();\n ret.push_all(self);\n ret\n }\n}\n\nimpl<T> Drop for Vec<T> {\n fn drop(&mut self) {\n unsafe {\n for i in 0..self.len() {\n ptr::read(self.mem.ptr.offset(i as isize));\n }\n }\n }\n}\n<commit_msg>Remove kvec as it is buggy<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Inspect a fixed range of bytes<commit_after>use std::env;\nuse std::fs::File;\nuse std::io::Read;\nuse std::str;\nuse std::str::FromStr;\n\nfn main() {\n let args: Vec<String> = env::args().collect();\n let filename = &args[1];\n\n let mut f = File::open(filename).expect(\"file not found\");\n let mut buffer: Vec<u8> = vec!();\n let _ = f.read_to_end(&mut buffer);\n\n let result: &[u8]= &buffer;\n\n println!(\"{:?}\", str::from_utf8(&result[2*2880..10*2880]).expect(\"should be utf8\"));\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove redundant where clause<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Much more elegant dictionary lookup handling.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test for #954<commit_after>fn main() {\n log 3-2;\n assert 3-2 == 1;\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix missing check in cuboid::closest_point<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add `clone_from` and `deep_clone_from`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added low level example<commit_after>use std::ptr;\nuse std::fs;\nuse std::io::{Write, SeekFrom, Seek};\nuse std::os::unix::prelude::AsRawFd;\nuse mmap::{MemoryMap, MapOption};\nuse std::thread;\n\n\/\/ from crates.io\nextern crate mmap;\nextern crate libc;\n\nfn toggle(data: *mut u8, led_pin: u32) {\n unsafe {\n *(data.offset(0x40000000 + 0x30) as *mut u32) ^= 1 << led_pin;\n }\n}\n\nfn main() {\n let size: usize = 0x407FFFFF; \/\/ i want to access the memory from 0x40000000 to 0x407FFFFF\n\n let mut f = fs::OpenOptions::new().read(true)\n .write(true)\n \/\/.create(true) ... i guess this should be disables!?\n .open(\"\/dev\/mem\")\n .unwrap();\n\n \/\/ Allocate space in the file first\n \/\/f.seek(SeekFrom::Start(size as u64)).unwrap();\n \/\/f.write_all(&[0]).unwrap();\n \/\/f.seek(SeekFrom::Start(0)).unwrap();\n\n let mmap_opts = &[\n \/\/ Then make the mapping *public* so it is written back to the file\n MapOption::MapNonStandardFlags(libc::consts::os::posix88::MAP_SHARED),\n MapOption::MapReadable,\n MapOption::MapWritable,\n MapOption::MapFd(f.as_raw_fd()),\n ];\n\n let mmap = MemoryMap::new(size, mmap_opts).unwrap();\n\n let data = mmap.data();\n\n if data.is_null() {\n panic!(\"Could not access data from memory mapped file\")\n }\n else {\n println!(\"successful data access to memory mapped file\");\n }\n\n \/\/let src = \"Hello!\";\n \/\/let src_data = src.as_bytes();\n\n \/\/unsafe {\n \/\/ ptr::copy(src_data.as_ptr(), data, src_data.len());\n \/\/}\n\n while (true) {\n toggle(data, 3);\n thread::sleep_ms(200);\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse cmp;\nuse io::{self, SeekFrom, Read, Initializer, Write, Seek, BufRead, Error, ErrorKind};\nuse fmt;\nuse mem;\n\n\/\/ =============================================================================\n\/\/ Forwarding implementations\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, R: Read + ?Sized> Read for &'a mut R {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n #[inline]\n unsafe fn initializer(&self) -> Initializer {\n (**self).initializer()\n }\n\n #[inline]\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_to_end(buf)\n }\n\n #[inline]\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_to_string(buf)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n (**self).read_exact(buf)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, W: Write + ?Sized> Write for &'a mut W {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { (**self).flush() }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n (**self).write_all(buf)\n }\n\n #[inline]\n fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {\n (**self).write_fmt(fmt)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, S: Seek + ?Sized> Seek for &'a mut S {\n #[inline]\n fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: BufRead + ?Sized> BufRead for &'a mut B {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }\n\n #[inline]\n fn consume(&mut self, amt: usize) { (**self).consume(amt) }\n\n #[inline]\n fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_until(byte, buf)\n }\n\n #[inline]\n fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_line(buf)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<R: Read + ?Sized> Read for Box<R> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n #[inline]\n unsafe fn initializer(&self) -> Initializer {\n (**self).initializer()\n }\n\n #[inline]\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_to_end(buf)\n }\n\n #[inline]\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_to_string(buf)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n (**self).read_exact(buf)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<W: Write + ?Sized> Write for Box<W> {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { (**self).flush() }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n (**self).write_all(buf)\n }\n\n #[inline]\n fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {\n (**self).write_fmt(fmt)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<S: Seek + ?Sized> Seek for Box<S> {\n #[inline]\n fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<B: BufRead + ?Sized> BufRead for Box<B> {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }\n\n #[inline]\n fn consume(&mut self, amt: usize) { (**self).consume(amt) }\n\n #[inline]\n fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_until(byte, buf)\n }\n\n #[inline]\n fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_line(buf)\n }\n}\n\n\/\/ =============================================================================\n\/\/ In-memory buffer implementations\n\n\/\/\/ Read is implemented for `&[u8]` by copying from the slice.\n\/\/\/\n\/\/\/ Note that reading updates the slice to point to the yet unread part.\n\/\/\/ The slice will be empty when EOF is reached.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> Read for &'a [u8] {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n let amt = cmp::min(buf.len(), self.len());\n let (a, b) = self.split_at(amt);\n\n \/\/ First check if the amount of bytes we want to read is small:\n \/\/ `copy_from_slice` will generally expand to a call to `memcpy`, and\n \/\/ for a single byte the overhead is significant.\n if amt == 1 {\n buf[0] = a[0];\n } else {\n buf[..amt].copy_from_slice(a);\n }\n\n *self = b;\n Ok(amt)\n }\n\n #[inline]\n unsafe fn initializer(&self) -> Initializer {\n Initializer::nop()\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n if buf.len() > self.len() {\n return Err(Error::new(ErrorKind::UnexpectedEof,\n \"failed to fill whole buffer\"));\n }\n let (a, b) = self.split_at(buf.len());\n\n \/\/ First check if the amount of bytes we want to read is small:\n \/\/ `copy_from_slice` will generally expand to a call to `memcpy`, and\n \/\/ for a single byte the overhead is significant.\n if buf.len() == 1 {\n buf[0] = a[0];\n } else {\n buf.copy_from_slice(a);\n }\n\n *self = b;\n Ok(())\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> BufRead for &'a [u8] {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { Ok(*self) }\n\n #[inline]\n fn consume(&mut self, amt: usize) { *self = &self[amt..]; }\n}\n\n\/\/\/ Write is implemented for `&mut [u8]` by copying into the slice, overwriting\n\/\/\/ its data.\n\/\/\/\n\/\/\/ Note that writing updates the slice to point to the yet unwritten part.\n\/\/\/ The slice will be empty when it has been completely overwritten.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> Write for &'a mut [u8] {\n #[inline]\n fn write(&mut self, data: &[u8]) -> io::Result<usize> {\n let amt = cmp::min(data.len(), self.len());\n let (a, b) = mem::replace(self, &mut []).split_at_mut(amt);\n a.copy_from_slice(&data[..amt]);\n *self = b;\n Ok(amt)\n }\n\n #[inline]\n fn write_all(&mut self, data: &[u8]) -> io::Result<()> {\n if self.write(data)? == data.len() {\n Ok(())\n } else {\n Err(Error::new(ErrorKind::WriteZero, \"failed to write whole buffer\"))\n }\n }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n\/\/\/ Write is implemented for `Vec<u8>` by appending to the vector.\n\/\/\/ The vector will grow as needed.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Write for Vec<u8> {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.extend_from_slice(buf);\n Ok(buf.len())\n }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n self.extend_from_slice(buf);\n Ok(())\n }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n#[cfg(test)]\nmod tests {\n use io::prelude::*;\n use test;\n\n #[bench]\n fn bench_read_slice(b: &mut test::Bencher) {\n let buf = [5; 1024];\n let mut dst = [0; 128];\n\n b.iter(|| {\n let mut rd = &buf[..];\n for _ in 0..8 {\n let _ = rd.read(&mut dst);\n test::black_box(&dst);\n }\n })\n }\n\n #[bench]\n fn bench_write_slice(b: &mut test::Bencher) {\n let mut buf = [0; 1024];\n let src = [5; 128];\n\n b.iter(|| {\n let mut wr = &mut buf[..];\n for _ in 0..8 {\n let _ = wr.write_all(&src);\n test::black_box(&wr);\n }\n })\n }\n\n #[bench]\n fn bench_read_vec(b: &mut test::Bencher) {\n let buf = vec![5; 1024];\n let mut dst = [0; 128];\n\n b.iter(|| {\n let mut rd = &buf[..];\n for _ in 0..8 {\n let _ = rd.read(&mut dst);\n test::black_box(&dst);\n }\n })\n }\n\n #[bench]\n fn bench_write_vec(b: &mut test::Bencher) {\n let mut buf = Vec::with_capacity(1024);\n let src = [5; 128];\n\n b.iter(|| {\n let mut wr = &mut buf[..];\n for _ in 0..8 {\n let _ = wr.write_all(&src);\n test::black_box(&wr);\n }\n })\n }\n}\n<commit_msg>Rollup merge of #45083 - fhartwig:slice-read-to-end, r=bluss<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse cmp;\nuse io::{self, SeekFrom, Read, Initializer, Write, Seek, BufRead, Error, ErrorKind};\nuse fmt;\nuse mem;\n\n\/\/ =============================================================================\n\/\/ Forwarding implementations\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, R: Read + ?Sized> Read for &'a mut R {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n #[inline]\n unsafe fn initializer(&self) -> Initializer {\n (**self).initializer()\n }\n\n #[inline]\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_to_end(buf)\n }\n\n #[inline]\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_to_string(buf)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n (**self).read_exact(buf)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, W: Write + ?Sized> Write for &'a mut W {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { (**self).flush() }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n (**self).write_all(buf)\n }\n\n #[inline]\n fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {\n (**self).write_fmt(fmt)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, S: Seek + ?Sized> Seek for &'a mut S {\n #[inline]\n fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: BufRead + ?Sized> BufRead for &'a mut B {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }\n\n #[inline]\n fn consume(&mut self, amt: usize) { (**self).consume(amt) }\n\n #[inline]\n fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_until(byte, buf)\n }\n\n #[inline]\n fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_line(buf)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<R: Read + ?Sized> Read for Box<R> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n #[inline]\n unsafe fn initializer(&self) -> Initializer {\n (**self).initializer()\n }\n\n #[inline]\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_to_end(buf)\n }\n\n #[inline]\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_to_string(buf)\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n (**self).read_exact(buf)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<W: Write + ?Sized> Write for Box<W> {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { (**self).flush() }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n (**self).write_all(buf)\n }\n\n #[inline]\n fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {\n (**self).write_fmt(fmt)\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<S: Seek + ?Sized> Seek for Box<S> {\n #[inline]\n fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<B: BufRead + ?Sized> BufRead for Box<B> {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }\n\n #[inline]\n fn consume(&mut self, amt: usize) { (**self).consume(amt) }\n\n #[inline]\n fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {\n (**self).read_until(byte, buf)\n }\n\n #[inline]\n fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {\n (**self).read_line(buf)\n }\n}\n\n\/\/ =============================================================================\n\/\/ In-memory buffer implementations\n\n\/\/\/ Read is implemented for `&[u8]` by copying from the slice.\n\/\/\/\n\/\/\/ Note that reading updates the slice to point to the yet unread part.\n\/\/\/ The slice will be empty when EOF is reached.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> Read for &'a [u8] {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n let amt = cmp::min(buf.len(), self.len());\n let (a, b) = self.split_at(amt);\n\n \/\/ First check if the amount of bytes we want to read is small:\n \/\/ `copy_from_slice` will generally expand to a call to `memcpy`, and\n \/\/ for a single byte the overhead is significant.\n if amt == 1 {\n buf[0] = a[0];\n } else {\n buf[..amt].copy_from_slice(a);\n }\n\n *self = b;\n Ok(amt)\n }\n\n #[inline]\n unsafe fn initializer(&self) -> Initializer {\n Initializer::nop()\n }\n\n #[inline]\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n if buf.len() > self.len() {\n return Err(Error::new(ErrorKind::UnexpectedEof,\n \"failed to fill whole buffer\"));\n }\n let (a, b) = self.split_at(buf.len());\n\n \/\/ First check if the amount of bytes we want to read is small:\n \/\/ `copy_from_slice` will generally expand to a call to `memcpy`, and\n \/\/ for a single byte the overhead is significant.\n if buf.len() == 1 {\n buf[0] = a[0];\n } else {\n buf.copy_from_slice(a);\n }\n\n *self = b;\n Ok(())\n }\n\n #[inline]\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n buf.extend_from_slice(*self);\n let len = self.len();\n *self = &self[len..];\n Ok(len)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> BufRead for &'a [u8] {\n #[inline]\n fn fill_buf(&mut self) -> io::Result<&[u8]> { Ok(*self) }\n\n #[inline]\n fn consume(&mut self, amt: usize) { *self = &self[amt..]; }\n}\n\n\/\/\/ Write is implemented for `&mut [u8]` by copying into the slice, overwriting\n\/\/\/ its data.\n\/\/\/\n\/\/\/ Note that writing updates the slice to point to the yet unwritten part.\n\/\/\/ The slice will be empty when it has been completely overwritten.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a> Write for &'a mut [u8] {\n #[inline]\n fn write(&mut self, data: &[u8]) -> io::Result<usize> {\n let amt = cmp::min(data.len(), self.len());\n let (a, b) = mem::replace(self, &mut []).split_at_mut(amt);\n a.copy_from_slice(&data[..amt]);\n *self = b;\n Ok(amt)\n }\n\n #[inline]\n fn write_all(&mut self, data: &[u8]) -> io::Result<()> {\n if self.write(data)? == data.len() {\n Ok(())\n } else {\n Err(Error::new(ErrorKind::WriteZero, \"failed to write whole buffer\"))\n }\n }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n\/\/\/ Write is implemented for `Vec<u8>` by appending to the vector.\n\/\/\/ The vector will grow as needed.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Write for Vec<u8> {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.extend_from_slice(buf);\n Ok(buf.len())\n }\n\n #[inline]\n fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {\n self.extend_from_slice(buf);\n Ok(())\n }\n\n #[inline]\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n#[cfg(test)]\nmod tests {\n use io::prelude::*;\n use test;\n\n #[bench]\n fn bench_read_slice(b: &mut test::Bencher) {\n let buf = [5; 1024];\n let mut dst = [0; 128];\n\n b.iter(|| {\n let mut rd = &buf[..];\n for _ in 0..8 {\n let _ = rd.read(&mut dst);\n test::black_box(&dst);\n }\n })\n }\n\n #[bench]\n fn bench_write_slice(b: &mut test::Bencher) {\n let mut buf = [0; 1024];\n let src = [5; 128];\n\n b.iter(|| {\n let mut wr = &mut buf[..];\n for _ in 0..8 {\n let _ = wr.write_all(&src);\n test::black_box(&wr);\n }\n })\n }\n\n #[bench]\n fn bench_read_vec(b: &mut test::Bencher) {\n let buf = vec![5; 1024];\n let mut dst = [0; 128];\n\n b.iter(|| {\n let mut rd = &buf[..];\n for _ in 0..8 {\n let _ = rd.read(&mut dst);\n test::black_box(&dst);\n }\n })\n }\n\n #[bench]\n fn bench_write_vec(b: &mut test::Bencher) {\n let mut buf = Vec::with_capacity(1024);\n let src = [5; 128];\n\n b.iter(|| {\n let mut wr = &mut buf[..];\n for _ in 0..8 {\n let _ = wr.write_all(&src);\n test::black_box(&wr);\n }\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2406<commit_after>\/\/ https:\/\/leetcode.com\/problems\/divide-intervals-into-minimum-number-of-groups\/\npub fn min_groups(intervals: Vec<Vec<i32>>) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\n \"{}\",\n min_groups(vec![\n vec![5, 10],\n vec![6, 8],\n vec![1, 5],\n vec![2, 3],\n vec![1, 10]\n ])\n ); \/\/ 3\n println!(\n \"{}\",\n min_groups(vec![vec![1, 3], vec![5, 6], vec![8, 10], vec![11, 13]])\n ); \/\/ 1\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reorder imports<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>typo fix<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>trait added for i32<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add StoreIdIteratorWithStore::without_store()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add PowerPC (32-bit big-endian) ioctl definitions<commit_after>\/* automatically generated by rust-bindgen *\/\n\npub type __u32 = :: std :: os :: raw :: c_uint ; pub const _HIDIOCGRDESCSIZE : __u32 = 1074022401 ; pub const _HIDIOCGRDESC : __u32 = 1342457858 ;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reinstate Vulkan object destructors<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add newline after enabling GL_KHR_vulkan_glsl<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add optimization codegen tests<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ no-system-llvm\n\/\/ compile-flags: -O\n#![crate_type=\"lib\"]\n\npub enum Three { First, Second, Third }\nuse Three::*;\n\npub enum Four { First, Second, Third, Fourth }\nuse Four::*;\n\n#[no_mangle]\npub fn three_valued(x: Three) -> Three {\n \/\/ CHECK-LABEL: @three_valued\n \/\/ CHECK-NEXT: {{^.*:$}}\n \/\/ CHECK-NEXT: ret i8 %0\n match x {\n First => First,\n Second => Second,\n Third => Third,\n }\n}\n\n#[no_mangle]\npub fn four_valued(x: Four) -> Four {\n \/\/ CHECK-LABEL: @four_valued\n \/\/ CHECK-NEXT: {{^.*:$}}\n \/\/ CHECK-NEXT: ret i8 %0\n match x {\n First => First,\n Second => Second,\n Third => Third,\n Fourth => Fourth,\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Code for applying CSS styles to the DOM.\n\/\/!\n\/\/! This is not very interesting at the moment. It will get much more\n\/\/! complicated if I add support for compound selectors.\n\nuse dom::{Node, Element, ElementData};\nuse css::{Stylesheet, Rule, Selector, Simple, SimpleSelector, Value, Keyword, Specificity};\nuse std::collections::hashmap::HashMap;\n\n\/\/\/ Map from CSS property names to values.\npub type PropertyMap = HashMap<String, Value>;\n\n\/\/\/ A node with associated style data.\npub struct StyledNode<'a> {\n pub node: &'a Node,\n pub specified_values: PropertyMap,\n pub children: Vec<StyledNode<'a>>,\n}\n\n#[deriving(PartialEq)]\npub enum Display {\n Inline,\n Block,\n None,\n}\n\nimpl<'a> StyledNode<'a> {\n \/\/\/ Return the specified value of a property if it exists, otherwise `None`.\n pub fn value(&self, name: &str) -> Option<Value> {\n self.specified_values.find_equiv(&name).map(|v| v.clone())\n }\n\n \/\/\/ Return the specified value of property `name`, or property `fallback_name` if that doesn't\n \/\/\/ exist. or value `default` if neither does.\n pub fn lookup(&self, name: &str, fallback_name: &str, default: &Value) -> Value {\n self.value(name).unwrap_or_else(|| self.value(fallback_name)\n .unwrap_or_else(|| default.clone()))\n }\n\n \/\/\/ The value of the `display` property (defaults to `Block`).\n pub fn display(&self) -> Display {\n match self.value(\"display\") {\n Some(Keyword(s)) => match s.as_slice() {\n \"inline\" => Inline,\n \"none\" => None,\n _ => Block\n },\n _ => Block\n }\n }\n}\n\n\/\/\/ Apply a stylesheet to an entire DOM tree, returning a StyledNode tree.\n\/\/\/\n\/\/\/ This finds only the specified values at the moment. Eventually it should be extended to find the\n\/\/\/ computed values too, including inherited values.\npub fn style_tree<'a>(root: &'a Node, stylesheet: &'a Stylesheet) -> StyledNode<'a> {\n StyledNode {\n node: root,\n specified_values: match root.node_type {\n Element(ref elem) => specified_values(elem, stylesheet),\n _ => HashMap::new(),\n },\n children: root.children.iter().map(|child| style_tree(child, stylesheet)).collect(),\n }\n}\n\n\/\/\/ Apply styles to a single element, returning the specified styles.\n\/\/\/\n\/\/\/ To do: Allow multiple UA\/author\/user stylesheets, and implement the cascade.\nfn specified_values(elem: &ElementData, stylesheet: &Stylesheet) -> PropertyMap {\n let mut values = HashMap::new();\n let mut rules = matching_rules(elem, stylesheet);\n\n \/\/ Go through the rules from lowest to highest specificity.\n rules.sort_by(|&(a, _), &(b, _)| a.cmp(&b));\n for &(_, rule) in rules.iter() {\n for declaration in rule.declarations.iter() {\n values.insert(declaration.name.clone(), declaration.value.clone());\n }\n }\n values\n}\n\n\/\/\/ A single CSS rule and the highest-specificity selector that resulted in a given match.\ntype MatchedRule<'a> = (Specificity, &'a Rule);\n\n\/\/\/ Find all CSS rules that match the given element.\nfn matching_rules<'a>(elem: &ElementData, stylesheet: &'a Stylesheet) -> Vec<MatchedRule<'a>> {\n \/\/ For now, we just do a linear scan of all the rules. For large\n \/\/ documents, it would be more efficient to store the rules in hash tables\n \/\/ based on tag name, id, class, etc.\n stylesheet.rules.iter()\n .filter_map(|rule| {\n \/\/ Find the first (highest-specificity) matching selector.\n rule.selectors.iter().find(|selector| matches(elem, *selector))\n .map(|selector| (selector.specificity(), rule))\n }).collect()\n}\n\n\/\/\/ Selector matching:\nfn matches(elem: &ElementData, selector: &Selector) -> bool {\n match *selector {\n Simple(ref simple_selector) => matches_simple_selector(elem, simple_selector)\n }\n}\n\nfn matches_simple_selector(elem: &ElementData, selector: &SimpleSelector) -> bool {\n \/\/ Check type selector\n if selector.tag_name.iter().any(|name| elem.tag_name != *name) {\n return false;\n }\n\n \/\/ Check ID selector\n if selector.id.iter().any(|id| elem.id() != Some(id)) {\n return false;\n }\n\n \/\/ Check class selectors\n let elem_classes = elem.classes();\n if selector.class.iter().any(|class| !elem_classes.contains(&class.as_slice())) {\n return false;\n }\n\n \/\/ We didn't find any non-matching selector components.\n return true;\n}\n<commit_msg>Refactor matching_rules<commit_after>\/\/! Code for applying CSS styles to the DOM.\n\/\/!\n\/\/! This is not very interesting at the moment. It will get much more\n\/\/! complicated if I add support for compound selectors.\n\nuse dom::{Node, Element, ElementData};\nuse css::{Stylesheet, Rule, Selector, Simple, SimpleSelector, Value, Keyword, Specificity};\nuse std::collections::hashmap::HashMap;\n\n\/\/\/ Map from CSS property names to values.\npub type PropertyMap = HashMap<String, Value>;\n\n\/\/\/ A node with associated style data.\npub struct StyledNode<'a> {\n pub node: &'a Node,\n pub specified_values: PropertyMap,\n pub children: Vec<StyledNode<'a>>,\n}\n\n#[deriving(PartialEq)]\npub enum Display {\n Inline,\n Block,\n None,\n}\n\nimpl<'a> StyledNode<'a> {\n \/\/\/ Return the specified value of a property if it exists, otherwise `None`.\n pub fn value(&self, name: &str) -> Option<Value> {\n self.specified_values.find_equiv(&name).map(|v| v.clone())\n }\n\n \/\/\/ Return the specified value of property `name`, or property `fallback_name` if that doesn't\n \/\/\/ exist. or value `default` if neither does.\n pub fn lookup(&self, name: &str, fallback_name: &str, default: &Value) -> Value {\n self.value(name).unwrap_or_else(|| self.value(fallback_name)\n .unwrap_or_else(|| default.clone()))\n }\n\n \/\/\/ The value of the `display` property (defaults to `Block`).\n pub fn display(&self) -> Display {\n match self.value(\"display\") {\n Some(Keyword(s)) => match s.as_slice() {\n \"inline\" => Inline,\n \"none\" => None,\n _ => Block\n },\n _ => Block\n }\n }\n}\n\n\/\/\/ Apply a stylesheet to an entire DOM tree, returning a StyledNode tree.\n\/\/\/\n\/\/\/ This finds only the specified values at the moment. Eventually it should be extended to find the\n\/\/\/ computed values too, including inherited values.\npub fn style_tree<'a>(root: &'a Node, stylesheet: &'a Stylesheet) -> StyledNode<'a> {\n StyledNode {\n node: root,\n specified_values: match root.node_type {\n Element(ref elem) => specified_values(elem, stylesheet),\n _ => HashMap::new(),\n },\n children: root.children.iter().map(|child| style_tree(child, stylesheet)).collect(),\n }\n}\n\n\/\/\/ Apply styles to a single element, returning the specified styles.\n\/\/\/\n\/\/\/ To do: Allow multiple UA\/author\/user stylesheets, and implement the cascade.\nfn specified_values(elem: &ElementData, stylesheet: &Stylesheet) -> PropertyMap {\n let mut values = HashMap::new();\n let mut rules = matching_rules(elem, stylesheet);\n\n \/\/ Go through the rules from lowest to highest specificity.\n rules.sort_by(|&(a, _), &(b, _)| a.cmp(&b));\n for &(_, rule) in rules.iter() {\n for declaration in rule.declarations.iter() {\n values.insert(declaration.name.clone(), declaration.value.clone());\n }\n }\n values\n}\n\n\/\/\/ A single CSS rule and the specificity of its most specific matching selector.\ntype MatchedRule<'a> = (Specificity, &'a Rule);\n\n\/\/\/ Find all CSS rules that match the given element.\nfn matching_rules<'a>(elem: &ElementData, stylesheet: &'a Stylesheet) -> Vec<MatchedRule<'a>> {\n \/\/ For now, we just do a linear scan of all the rules. For large\n \/\/ documents, it would be more efficient to store the rules in hash tables\n \/\/ based on tag name, id, class, etc.\n stylesheet.rules.iter().filter_map(|rule| match_rule(elem, rule)).collect()\n}\n\nfn match_rule<'a>(elem: &ElementData, rule: &'a Rule) -> Option<MatchedRule<'a>> {\n \/\/ Find the first (highest-specificity) matching selector.\n rule.selectors.iter().find(|selector| matches(elem, *selector))\n .map(|selector| (selector.specificity(), rule))\n}\n\n\/\/\/ Selector matching:\nfn matches(elem: &ElementData, selector: &Selector) -> bool {\n match *selector {\n Simple(ref simple_selector) => matches_simple_selector(elem, simple_selector)\n }\n}\n\nfn matches_simple_selector(elem: &ElementData, selector: &SimpleSelector) -> bool {\n \/\/ Check type selector\n if selector.tag_name.iter().any(|name| elem.tag_name != *name) {\n return false;\n }\n\n \/\/ Check ID selector\n if selector.id.iter().any(|id| elem.id() != Some(id)) {\n return false;\n }\n\n \/\/ Check class selectors\n let elem_classes = elem.classes();\n if selector.class.iter().any(|class| !elem_classes.contains(&class.as_slice())) {\n return false;\n }\n\n \/\/ We didn't find any non-matching selector components.\n return true;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>retag<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Second sets of problems<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Improve error message when unable to initialize git index repo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2336<commit_after>\/\/ https:\/\/leetcode.com\/problems\/smallest-number-in-infinite-set\/\nstruct SmallestInfiniteSet {}\n\nimpl SmallestInfiniteSet {\n fn new() -> Self {\n todo!()\n }\n\n fn pop_smallest(&self) -> i32 {\n todo!()\n }\n\n fn add_back(&self, num: i32) {\n todo!()\n }\n}\n\nfn main() {\n let s = SmallestInfiniteSet::new();\n s.add_back(2);\n println!(\"{}\", s.pop_smallest()); \/\/ 1\n println!(\"{}\", s.pop_smallest()); \/\/ 2\n println!(\"{}\", s.pop_smallest()); \/\/ 3\n s.add_back(1);\n println!(\"{}\", s.pop_smallest()); \/\/ 1\n println!(\"{}\", s.pop_smallest()); \/\/ 4\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2396<commit_after>\/\/ https:\/\/leetcode.com\/problems\/strictly-palindromic-number\/\npub fn is_strictly_palindromic(n: i32) -> bool {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", is_strictly_palindromic(9)); \/\/ false\n println!(\"{}\", is_strictly_palindromic(4)); \/\/ false\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[rust] Small test script<commit_after>\/\/\/Lazy git tool\r\nuse std::env;\r\nuse std::process::Command;\r\n\r\n\/\/\/Executes git command with arguments\r\nmacro_rules! exec_git_cmd {\r\n ($args:expr) => {\r\n Command::new(\"git\").args($args).status().unwrap();\r\n }\r\n}\r\n\r\n\/\/\/Amend changes\r\nfn git_amend(args: &[String]) {\r\n git_add();\r\n if (args.len() != 0) && (args[0] == \"edit\") {\r\n exec_git_cmd!(&[\"commit\", \"--amend\"]);\r\n }\r\n else {\r\n exec_git_cmd!(&[\"commit\", \"--amend\", \"--no-edit\"]);\r\n }\r\n}\r\n\r\n\/\/\/Fetch changes from upstream\r\nfn git_fetch() {\r\n exec_git_cmd!(&[\"checkout\", \"master\"]);\r\n exec_git_cmd!(&[\"fetch\", \"upstream\"]);\r\n exec_git_cmd!(&[\"merge\", \"upstream\/master\"]);\r\n}\r\n\r\n\/\/\/Push the current branch\r\nfn git_push() {\r\n exec_git_cmd!(&[\"push\", \"origin\", \"HEAD\"]);\r\n}\r\n\r\n\/\/\/Add all changes\r\nfn git_add() {\r\n exec_git_cmd!(&[\"add\", \"--all\"]);\r\n}\r\n\r\n\/\/\/Unexpected argument handler\r\nfn unexpected(arg: &str) {\r\n println!(\"Unexpected argument: {}\", arg);\r\n}\r\n\r\nfn main() {\r\n let args: Vec<String> = env::args().collect();\r\n \/\/exclude file name\r\n let s_args = &args[1..];\r\n let is_repo = Command::new(\"git\").arg(\"rev-parse\").arg(\"-q\").output().unwrap();\r\n\r\n if is_repo.status.code().unwrap() != 0 {\r\n println!(\"Not a git repository\");\r\n return;\r\n }\r\n else if s_args.len() == 0 {\r\n println!(\"lgit [option]\\n\");\r\n println!(\"options:\");\r\n println!(\" amend [edit] - amend all changes into last commit\");\r\n println!(\" add - add all changes\");\r\n println!(\" push - push current branch\");\r\n println!(\" fetch - get updates from upstream\\n\");\r\n return;\r\n }\r\n\r\n match s_args[0].as_ref() {\r\n \"amend\" => git_amend(&args[2..]),\r\n \"fetch\" => git_fetch(),\r\n \"push\" => git_push(),\r\n \"add\" => git_add(),\r\n _ => unexpected(s_args[0].as_ref()),\r\n }\r\n}\r\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for Nth<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add compile-fail test for #43196<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn main() {\n |\n}\n\/\/~^ ERROR expected `|`, found `}`\n|\n\/\/~^ ERROR expected item, found `|`\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>something something<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add cycle.rs<commit_after>use std::fmt;\n\n\/\/ +-> Test.0() -+\n\/\/ | |\n\/\/ +-------------+\nstruct Test(Box<dyn Fn() -> Test>);\n\n\/\/for those cold lonely nights\n\/\/aka \"VRRRRRRR\" -cpu fan\n\/\/also apparently uses a lot of power\nfn main() -> ! {\n println!(\"Hello, world! {}\", Test::from(0).0().0().0());\n println!(\"You should Ctrl+C this program. It is nothing but an infinite loop.\");\n let infinite = Test::from(1);\n let _sum: u32 = infinite.map(|_| 1).sum();\n unreachable!()\n}\n\n\/\/this is to get the ball rolling\n\/\/can't make a closure that refers to the binding it's in:\n\/\/let test = Test(Box::new(|| test)); \/\/invalid\nimpl From<i32> for Test {\n fn from(arg: i32) -> Test {\n Test(Box::new(move || arg.into()))\n }\n}\n\n\/\/dummy display impl to quantify the result, since you can't \"display\" a closure\nimpl fmt::Display for Test {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n writeln!(f, \"test\")\n }\n}\n\n\/\/why not, the sky's the limit\nimpl Iterator for Test {\n type Item = Test;\n\n fn next(&mut self) -> Option<Self::Item> {\n Some(self.0())\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![crate_name = \"wc\"]\n#![allow(unstable)]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Boden Garman <bpgarman@gmail.com>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\nextern crate getopts;\nextern crate libc;\n\nuse std::ascii::AsciiExt;\nuse std::str::from_utf8;\nuse std::io::{print, File, BufferedReader};\nuse std::io::fs::PathExtensions;\nuse std::io::stdio::stdin_raw;\nuse std::result::Result as StdResult;\nuse getopts::Matches;\n\n#[path = \"..\/common\/util.rs\"]\n#[macro_use]\nmod util;\n\nstruct Result {\n filename: String,\n bytes: usize,\n chars: usize,\n lines: usize,\n words: usize,\n max_line_length: usize,\n}\n\nstatic NAME: &'static str = \"wc\";\n\npub fn uumain(args: Vec<String>) -> isize {\n let program = args[0].clone();\n let opts = [\n getopts::optflag(\"c\", \"bytes\", \"print the byte counts\"),\n getopts::optflag(\"m\", \"chars\", \"print the character counts\"),\n getopts::optflag(\"l\", \"lines\", \"print the newline counts\"),\n getopts::optflag(\"L\", \"max-line-length\", \"print the length of the longest line\"),\n getopts::optflag(\"w\", \"words\", \"print the word counts\"),\n getopts::optflag(\"h\", \"help\", \"display this help and exit\"),\n getopts::optflag(\"V\", \"version\", \"output version information and exit\"),\n ];\n\n let matches = match getopts::getopts(args.tail(), &opts) {\n Ok(m) => m,\n Err(f) => {\n crash!(1, \"Invalid options\\n{}\", f)\n }\n };\n\n if matches.opt_present(\"help\") {\n println!(\"Usage:\");\n println!(\" {0} [OPTION]... [FILE]...\", program);\n println!(\"\");\n print(getopts::usage(\"Print newline, word and byte counts for each FILE\", &opts).as_slice());\n println!(\"\");\n println!(\"With no FILE, or when FILE is -, read standard input.\");\n return 0;\n }\n\n if matches.opt_present(\"version\") {\n println!(\"wc 1.0.0\");\n return 0;\n }\n\n let mut files = matches.free.clone();\n if files.is_empty() {\n files = vec!(\"-\".to_string());\n }\n\n match wc(files, &matches) {\n Ok(()) => ( \/* pass *\/ ),\n Err(e) => return e\n }\n\n 0\n}\n\nconst CR: u8 = '\\r' as u8;\nconst LF: u8 = '\\n' as u8;\nconst SPACE: u8 = ' ' as u8;\nconst TAB: u8 = '\\t' as u8;\nconst SYN: u8 = 0x16 as u8;\nconst FF: u8 = 0x0C as u8;\n\n#[inline(always)]\nfn is_word_seperator(byte: u8) -> bool {\n byte == SPACE || byte == TAB || byte == CR || byte == SYN || byte == FF\n}\n\npub fn wc(files: Vec<String>, matches: &Matches) -> StdResult<(), isize> {\n let mut total_line_count: usize = 0;\n let mut total_word_count: usize = 0;\n let mut total_char_count: usize = 0;\n let mut total_byte_count: usize = 0;\n let mut total_longest_line_length: usize = 0;\n\n let mut results = vec!();\n let mut max_str_len: usize = 0;\n\n for path in files.iter() {\n let mut reader = try!(open(path.as_slice()));\n\n let mut line_count: usize = 0;\n let mut word_count: usize = 0;\n let mut byte_count: usize = 0;\n let mut char_count: usize = 0;\n let mut current_char_count: usize = 0;\n let mut longest_line_length: usize = 0;\n\n loop {\n \/\/ reading from a TTY seems to raise a condition on, rather than return Some(0) like a file.\n \/\/ hence the option wrapped in a result here\n match reader.read_until(LF) {\n Ok(raw_line) => {\n \/\/ GNU 'wc' only counts lines that end in LF as lines\n if *raw_line.last().unwrap() == LF {\n line_count += 1;\n }\n\n byte_count += raw_line.len();\n\n \/\/ try and convert the bytes to UTF-8 first\n match from_utf8(raw_line.as_slice()) {\n Ok(line) => {\n word_count += line.words().count();\n current_char_count = line.chars().count();\n char_count += current_char_count;\n },\n Err(..) => {\n word_count += raw_line.as_slice().split(|&x| is_word_seperator(x)).count();\n for byte in raw_line.iter() {\n match byte.is_ascii() {\n true => {\n current_char_count += 1;\n }\n false => { }\n }\n }\n char_count += current_char_count;\n }\n }\n\n if current_char_count > longest_line_length {\n \/\/ we subtract one here because `line.len()` includes the LF\n \/\/ matches GNU 'wc' behaviour\n longest_line_length = current_char_count - 1;\n }\n },\n _ => break\n }\n\n }\n\n results.push(Result {\n filename: path.to_string(),\n bytes: byte_count,\n chars: char_count,\n lines: line_count,\n words: word_count,\n max_line_length: longest_line_length,\n });\n\n total_line_count += line_count;\n total_word_count += word_count;\n total_char_count += char_count;\n total_byte_count += byte_count;\n\n if longest_line_length > total_longest_line_length {\n total_longest_line_length = longest_line_length;\n }\n\n \/\/ used for formatting\n max_str_len = total_byte_count.to_string().len();\n }\n\n for result in results.iter() {\n print_stats(result.filename.as_slice(), result.lines, result.words, result.chars, result.bytes, result.max_line_length, matches, max_str_len);\n }\n\n if files.len() > 1 {\n print_stats(\"total\", total_line_count, total_word_count, total_char_count, total_byte_count, total_longest_line_length, matches, max_str_len);\n }\n\n Ok(())\n}\n\nfn print_stats(filename: &str, line_count: usize, word_count: usize, char_count: usize,\n byte_count: usize, longest_line_length: usize, matches: &Matches, max_str_len: usize) {\n if matches.opt_present(\"lines\") {\n print!(\"{:1$}\", line_count, max_str_len);\n }\n if matches.opt_present(\"words\") {\n print!(\"{:1$}\", word_count, max_str_len);\n }\n if matches.opt_present(\"bytes\") {\n print!(\"{:1$}\", byte_count, max_str_len);\n }\n if matches.opt_present(\"chars\") {\n print!(\"{:1$}\", char_count, max_str_len);\n }\n if matches.opt_present(\"max-line-length\") {\n print!(\"{:1$}\", longest_line_length, max_str_len);\n }\n\n \/\/ defaults\n if !matches.opt_present(\"bytes\")\n && !matches.opt_present(\"chars\")\n && !matches.opt_present(\"lines\")\n && !matches.opt_present(\"words\")\n && !matches.opt_present(\"max-line-length\") {\n print!(\"{:1$}\", line_count, max_str_len);\n print!(\"{:1$}\", word_count, max_str_len + 1);\n print!(\"{:1$}\", byte_count, max_str_len + 1);\n }\n\n if filename != \"-\" {\n println!(\" {}\", filename.as_slice());\n }\n else {\n println!(\"\");\n }\n}\n\nfn open(path: &str) -> StdResult<BufferedReader<Box<Reader+'static>>, isize> {\n if \"-\" == path {\n let reader = Box::new(stdin_raw()) as Box<Reader>;\n return Ok(BufferedReader::new(reader));\n }\n\n let fpath = Path::new(path);\n if fpath.is_dir() {\n show_info!(\"{}: is a directory\", path);\n }\n match File::open(&fpath) {\n Ok(fd) => {\n let reader = Box::new(fd) as Box<Reader>;\n Ok(BufferedReader::new(reader))\n }\n Err(e) => {\n show_error!(\"wc: {}: {}\", path, e);\n Err(1)\n }\n }\n}\n<commit_msg>wc: reset current_char_count after each line.<commit_after>#![crate_name = \"wc\"]\n#![allow(unstable)]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Boden Garman <bpgarman@gmail.com>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\nextern crate getopts;\nextern crate libc;\n\nuse std::ascii::AsciiExt;\nuse std::str::from_utf8;\nuse std::io::{print, File, BufferedReader};\nuse std::io::fs::PathExtensions;\nuse std::io::stdio::stdin_raw;\nuse std::result::Result as StdResult;\nuse getopts::Matches;\n\n#[path = \"..\/common\/util.rs\"]\n#[macro_use]\nmod util;\n\nstruct Result {\n filename: String,\n bytes: usize,\n chars: usize,\n lines: usize,\n words: usize,\n max_line_length: usize,\n}\n\nstatic NAME: &'static str = \"wc\";\n\npub fn uumain(args: Vec<String>) -> isize {\n let program = args[0].clone();\n let opts = [\n getopts::optflag(\"c\", \"bytes\", \"print the byte counts\"),\n getopts::optflag(\"m\", \"chars\", \"print the character counts\"),\n getopts::optflag(\"l\", \"lines\", \"print the newline counts\"),\n getopts::optflag(\"L\", \"max-line-length\", \"print the length of the longest line\"),\n getopts::optflag(\"w\", \"words\", \"print the word counts\"),\n getopts::optflag(\"h\", \"help\", \"display this help and exit\"),\n getopts::optflag(\"V\", \"version\", \"output version information and exit\"),\n ];\n\n let matches = match getopts::getopts(args.tail(), &opts) {\n Ok(m) => m,\n Err(f) => {\n crash!(1, \"Invalid options\\n{}\", f)\n }\n };\n\n if matches.opt_present(\"help\") {\n println!(\"Usage:\");\n println!(\" {0} [OPTION]... [FILE]...\", program);\n println!(\"\");\n print(getopts::usage(\"Print newline, word and byte counts for each FILE\", &opts).as_slice());\n println!(\"\");\n println!(\"With no FILE, or when FILE is -, read standard input.\");\n return 0;\n }\n\n if matches.opt_present(\"version\") {\n println!(\"wc 1.0.0\");\n return 0;\n }\n\n let mut files = matches.free.clone();\n if files.is_empty() {\n files = vec!(\"-\".to_string());\n }\n\n match wc(files, &matches) {\n Ok(()) => ( \/* pass *\/ ),\n Err(e) => return e\n }\n\n 0\n}\n\nconst CR: u8 = '\\r' as u8;\nconst LF: u8 = '\\n' as u8;\nconst SPACE: u8 = ' ' as u8;\nconst TAB: u8 = '\\t' as u8;\nconst SYN: u8 = 0x16 as u8;\nconst FF: u8 = 0x0C as u8;\n\n#[inline(always)]\nfn is_word_seperator(byte: u8) -> bool {\n byte == SPACE || byte == TAB || byte == CR || byte == SYN || byte == FF\n}\n\npub fn wc(files: Vec<String>, matches: &Matches) -> StdResult<(), isize> {\n let mut total_line_count: usize = 0;\n let mut total_word_count: usize = 0;\n let mut total_char_count: usize = 0;\n let mut total_byte_count: usize = 0;\n let mut total_longest_line_length: usize = 0;\n\n let mut results = vec!();\n let mut max_str_len: usize = 0;\n\n for path in files.iter() {\n let mut reader = try!(open(path.as_slice()));\n\n let mut line_count: usize = 0;\n let mut word_count: usize = 0;\n let mut byte_count: usize = 0;\n let mut char_count: usize = 0;\n let mut longest_line_length: usize = 0;\n\n loop {\n \/\/ reading from a TTY seems to raise a condition on, rather than return Some(0) like a file.\n \/\/ hence the option wrapped in a result here\n match reader.read_until(LF) {\n Ok(raw_line) => {\n \/\/ GNU 'wc' only counts lines that end in LF as lines\n if *raw_line.last().unwrap() == LF {\n line_count += 1;\n }\n\n byte_count += raw_line.len();\n\n \/\/ try and convert the bytes to UTF-8 first\n let mut current_char_count = 0;\n match from_utf8(raw_line.as_slice()) {\n Ok(line) => {\n word_count += line.words().count();\n current_char_count = line.chars().count();\n char_count += current_char_count;\n },\n Err(..) => {\n word_count += raw_line.as_slice().split(|&x| is_word_seperator(x)).count();\n for byte in raw_line.iter() {\n match byte.is_ascii() {\n true => {\n current_char_count += 1;\n }\n false => { }\n }\n }\n char_count += current_char_count;\n }\n }\n\n if current_char_count > longest_line_length {\n \/\/ we subtract one here because `line.len()` includes the LF\n \/\/ matches GNU 'wc' behaviour\n longest_line_length = current_char_count - 1;\n }\n },\n _ => break\n }\n\n }\n\n results.push(Result {\n filename: path.to_string(),\n bytes: byte_count,\n chars: char_count,\n lines: line_count,\n words: word_count,\n max_line_length: longest_line_length,\n });\n\n total_line_count += line_count;\n total_word_count += word_count;\n total_char_count += char_count;\n total_byte_count += byte_count;\n\n if longest_line_length > total_longest_line_length {\n total_longest_line_length = longest_line_length;\n }\n\n \/\/ used for formatting\n max_str_len = total_byte_count.to_string().len();\n }\n\n for result in results.iter() {\n print_stats(result.filename.as_slice(), result.lines, result.words, result.chars, result.bytes, result.max_line_length, matches, max_str_len);\n }\n\n if files.len() > 1 {\n print_stats(\"total\", total_line_count, total_word_count, total_char_count, total_byte_count, total_longest_line_length, matches, max_str_len);\n }\n\n Ok(())\n}\n\nfn print_stats(filename: &str, line_count: usize, word_count: usize, char_count: usize,\n byte_count: usize, longest_line_length: usize, matches: &Matches, max_str_len: usize) {\n if matches.opt_present(\"lines\") {\n print!(\"{:1$}\", line_count, max_str_len);\n }\n if matches.opt_present(\"words\") {\n print!(\"{:1$}\", word_count, max_str_len);\n }\n if matches.opt_present(\"bytes\") {\n print!(\"{:1$}\", byte_count, max_str_len);\n }\n if matches.opt_present(\"chars\") {\n print!(\"{:1$}\", char_count, max_str_len);\n }\n if matches.opt_present(\"max-line-length\") {\n print!(\"{:1$}\", longest_line_length, max_str_len);\n }\n\n \/\/ defaults\n if !matches.opt_present(\"bytes\")\n && !matches.opt_present(\"chars\")\n && !matches.opt_present(\"lines\")\n && !matches.opt_present(\"words\")\n && !matches.opt_present(\"max-line-length\") {\n print!(\"{:1$}\", line_count, max_str_len);\n print!(\"{:1$}\", word_count, max_str_len + 1);\n print!(\"{:1$}\", byte_count, max_str_len + 1);\n }\n\n if filename != \"-\" {\n println!(\" {}\", filename.as_slice());\n }\n else {\n println!(\"\");\n }\n}\n\nfn open(path: &str) -> StdResult<BufferedReader<Box<Reader+'static>>, isize> {\n if \"-\" == path {\n let reader = Box::new(stdin_raw()) as Box<Reader>;\n return Ok(BufferedReader::new(reader));\n }\n\n let fpath = Path::new(path);\n if fpath.is_dir() {\n show_info!(\"{}: is a directory\", path);\n }\n match File::open(&fpath) {\n Ok(fd) => {\n let reader = Box::new(fd) as Box<Reader>;\n Ok(BufferedReader::new(reader))\n }\n Err(e) => {\n show_error!(\"wc: {}: {}\", path, e);\n Err(1)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/\/! Basic CSS block layout.\n\nuse style::{StyledNode, Inline, Block, DisplayNone};\nuse css::{Keyword, Length, Px};\nuse std::default::Default;\nuse std::iter::AdditiveIterator; \/\/ for `sum`\n\n\/\/ CSS box model. All sizes are in px.\n\n#[deriving(Default, Show)]\npub struct Dimensions {\n \/\/ Top left corner of the content area, relative to the document origin:\n pub x: f32,\n pub y: f32,\n\n \/\/ Content area size:\n pub width: f32,\n pub height: f32,\n\n \/\/ Surrounding edges:\n pub padding: EdgeSizes,\n pub border: EdgeSizes,\n pub margin: EdgeSizes,\n}\n\n#[deriving(Default, Show)]\npub struct EdgeSizes {\n pub left: f32,\n pub right: f32,\n pub top: f32,\n pub bottom: f32,\n}\n\n\/\/\/ A node in the layout tree.\npub struct LayoutBox<'a> {\n pub dimensions: Dimensions,\n pub box_type: BoxType<'a>,\n pub children: Vec<LayoutBox<'a>>,\n}\n\npub enum BoxType<'a> {\n BlockNode(&'a StyledNode<'a>),\n InlineNode(&'a StyledNode<'a>),\n AnonymousBlock,\n}\n\nimpl<'a> LayoutBox<'a> {\n fn new(box_type: BoxType) -> LayoutBox {\n LayoutBox {\n box_type: box_type,\n dimensions: Default::default(),\n children: Vec::new(),\n }\n }\n\n fn get_style_node(&self) -> &'a StyledNode<'a> {\n match self.box_type {\n BlockNode(node) => node,\n InlineNode(node) => node,\n AnonymousBlock => fail!(\"Anonymous block box has no style node\")\n }\n }\n}\n\n\/\/\/ Transform a style tree into a layout tree.\npub fn layout_tree<'a>(node: &'a StyledNode<'a>, containing_block: Dimensions) -> LayoutBox<'a> {\n let mut root_box = build_layout_tree(node);\n root_box.layout(containing_block);\n return root_box;\n}\n\n\/\/\/ Build the tree of LayoutBoxes, but don't perform any layout calculations yet.\nfn build_layout_tree<'a>(style_node: &'a StyledNode<'a>) -> LayoutBox<'a> {\n \/\/ Create the root box.\n let mut root = LayoutBox::new(match style_node.display() {\n Block => BlockNode(style_node),\n Inline => InlineNode(style_node),\n DisplayNone => fail!(\"Root node has display: none.\")\n });\n\n \/\/ Create the descendant boxes.\n for child in style_node.children.iter() {\n match child.display() {\n Block => root.children.push(build_layout_tree(child)),\n Inline => root.get_inline_container().children.push(build_layout_tree(child)),\n DisplayNone => {} \/\/ Don't lay out nodes with `display: none;`\n }\n }\n return root;\n}\n\nimpl<'a> LayoutBox<'a> {\n \/\/\/ Lay out a box and its descendants.\n fn layout(&mut self, containing_block: Dimensions) {\n match self.box_type {\n BlockNode(_) => self.layout_block(containing_block),\n InlineNode(_) => {} \/\/ TODO\n AnonymousBlock => {} \/\/ TODO\n }\n }\n\n \/\/\/ Lay out a block-level element and its descendants.\n fn layout_block(&mut self, containing_block: Dimensions) {\n \/\/ Child width can depend on parent width, so we need to calculate this node's width before\n \/\/ laying out its children.\n self.calculate_block_width(containing_block);\n\n \/\/ Determine where the block is located within its container.\n self.calculate_block_position(containing_block);\n\n \/\/ Recursively lay out the children of this node within its content area.\n self.layout_block_children();\n\n \/\/ Parent height can depend on child height, so `calculate_height` must be called after the\n \/\/ content layout is finished.\n self.calculate_block_height();\n }\n\n \/\/\/ Calculate the width of a block-level non-replaced element in normal flow.\n \/\/\/\n \/\/\/ http:\/\/www.w3.org\/TR\/CSS2\/visudet.html#blockwidth\n \/\/\/\n \/\/\/ Sets the horizontal margin\/padding\/border dimensions, and the `width`.\n fn calculate_block_width(&mut self, containing_block: Dimensions) {\n let style = self.get_style_node();\n\n \/\/ `width` has initial value `auto`.\n let auto = Keyword(\"auto\".to_string());\n let mut width = style.value(\"width\").unwrap_or(auto.clone());\n\n \/\/ margin, border, and padding have initial value 0.\n let zero = Length(0.0, Px);\n\n let mut margin_left = style.lookup(\"margin-left\", \"margin\", &zero);\n let mut margin_right = style.lookup(\"margin-right\", \"margin\", &zero);\n\n let border_left = style.lookup(\"border-left-width\", \"border-width\", &zero);\n let border_right = style.lookup(\"border-right-width\", \"border-width\", &zero);\n\n let padding_left = style.lookup(\"padding-left\", \"padding\", &zero);\n let padding_right = style.lookup(\"padding-right\", \"padding\", &zero);\n\n let total = [&margin_left, &margin_right, &border_left, &border_right,\n &padding_left, &padding_right, &width].iter().map(|v| v.to_px()).sum();\n\n \/\/ If width is not auto and the total is wider than the container, treat auto margins as 0.\n if width != auto && total > containing_block.width {\n if margin_left == auto {\n margin_left = Length(0.0, Px);\n }\n if margin_right == auto {\n margin_right = Length(0.0, Px);\n }\n }\n\n \/\/ Adjust used values so that the above sum equals `containing_block.width`.\n \/\/ Each arm of the `match` should increase the total width by exactly `underflow`,\n \/\/ and afterward all values should be absolute lengths in px.\n let underflow = containing_block.width - total;\n match (width == auto, margin_left == auto, margin_right == auto) {\n \/\/ If the values are overconstrained, calculate margin_right.\n (false, false, false) => {\n margin_right = Length(margin_right.to_px() + underflow, Px);\n }\n \/\/ If exactly one value is auto, its used value follows from the equality.\n (false, false, true) => {\n margin_right = Length(underflow, Px);\n }\n (false, true, false) => {\n margin_left = Length(underflow, Px);\n }\n \/\/ If width is set to auto, any other auto values become 0.\n (true, _, _) => {\n if margin_left == auto {\n margin_left = Length(0.0, Px);\n }\n if margin_right == auto {\n margin_right = Length(0.0, Px);\n }\n width = Length(underflow, Px);\n }\n (false, true, true) => {\n \/\/ If margin-left and margin-right are both auto, their used values are equal.\n margin_left = Length(underflow \/ 2.0, Px);\n margin_right = Length(underflow \/ 2.0, Px);\n }\n }\n\n let d = &mut self.dimensions;\n d.width = width.to_px();\n\n d.padding.left = padding_left.to_px();\n d.padding.right = padding_right.to_px();\n\n d.border.left = border_left.to_px();\n d.border.right = border_right.to_px();\n\n d.margin.left = margin_left.to_px();\n d.margin.right = margin_right.to_px();\n }\n\n \/\/\/ Finish calculating the block's edge sizes, and position it within its containing block.\n \/\/\/\n \/\/\/ http:\/\/www.w3.org\/TR\/CSS2\/visudet.html#normal-block\n \/\/\/\n \/\/\/ Sets the vertical margin\/padding\/border dimensions, and the `x`, `y` values.\n fn calculate_block_position(&mut self, containing_block: Dimensions) {\n let style = self.get_style_node();\n let d = &mut self.dimensions;\n\n \/\/ margin, border, and padding have initial value 0.\n let zero = Length(0.0, Px);\n\n \/\/ If margin-top or margin-bottom is `auto`, the used value is zero.\n d.margin.top = style.lookup(\"margin-top\", \"margin\", &zero).to_px();\n d.margin.bottom = style.lookup(\"margin-bottom\", \"margin\", &zero).to_px();\n\n d.border.top = style.lookup(\"border-top-width\", \"border-width\", &zero).to_px();\n d.border.bottom = style.lookup(\"border-bottom-width\", \"border-width\", &zero).to_px();\n\n d.padding.top = style.lookup(\"padding-top\", \"padding\", &zero).to_px();\n d.padding.bottom = style.lookup(\"padding-bottom\", \"padding\", &zero).to_px();\n\n \/\/ Position the box below all the previous boxes in the container.\n d.x = containing_block.x +\n d.margin.left + d.border.left + d.padding.left;\n d.y = containing_block.y + containing_block.height +\n d.margin.top + d.border.top + d.padding.top;\n }\n\n \/\/\/ Lay out the node's children within its content area and return the content height.\n \/\/\/\n \/\/\/ Set `height` to the total content height.\n fn layout_block_children(&mut self) {\n let d = &mut self.dimensions;\n for child in self.children.mut_iter() {\n child.layout(*d);\n \/\/ Increment the height so each child is laid out below the previous one.\n d.height = d.height + child.dimensions.margin_box_height();\n }\n }\n\n \/\/\/ Height of a block-level non-replaced element in normal flow with overflow visible.\n fn calculate_block_height(&mut self) {\n \/\/ If the height is set to an explicit length, use that exact length.\n \/\/ Otherwise, just keep the value set by `layout_block_children`.\n match self.get_style_node().value(\"height\") {\n Some(Length(h, Px)) => { self.dimensions.height = h; }\n _ => {}\n }\n }\n\n \/\/\/ Where a new inline child should go.\n fn get_inline_container(&mut self) -> &mut LayoutBox<'a> {\n match self.box_type {\n InlineNode(_) | AnonymousBlock => self,\n BlockNode(_) => {\n \/\/ If we've just generated an anonymous block box, keep using it.\n \/\/ Otherwise, create a new one.\n match self.children.last() {\n Some(&LayoutBox { box_type: AnonymousBlock,..}) => {}\n _ => self.children.push(LayoutBox::new(AnonymousBlock))\n }\n self.children.mut_last().unwrap()\n }\n }\n }\n}\n\nimpl Dimensions {\n \/\/\/ Total height of a box including its margins, border, and padding.\n fn margin_box_height(&self) -> f32 {\n self.height + self.padding.top + self.padding.bottom\n + self.border.top + self.border.bottom\n + self.margin.top + self.margin.bottom\n }\n}\n<commit_msg>Style fix<commit_after>\/\/\/! Basic CSS block layout.\n\nuse style::{StyledNode, Inline, Block, DisplayNone};\nuse css::{Keyword, Length, Px};\nuse std::default::Default;\nuse std::iter::AdditiveIterator; \/\/ for `sum`\n\n\/\/ CSS box model. All sizes are in px.\n\n#[deriving(Default, Show)]\npub struct Dimensions {\n \/\/ Top left corner of the content area, relative to the document origin:\n pub x: f32,\n pub y: f32,\n\n \/\/ Content area size:\n pub width: f32,\n pub height: f32,\n\n \/\/ Surrounding edges:\n pub padding: EdgeSizes,\n pub border: EdgeSizes,\n pub margin: EdgeSizes,\n}\n\n#[deriving(Default, Show)]\npub struct EdgeSizes {\n pub left: f32,\n pub right: f32,\n pub top: f32,\n pub bottom: f32,\n}\n\n\/\/\/ A node in the layout tree.\npub struct LayoutBox<'a> {\n pub dimensions: Dimensions,\n pub box_type: BoxType<'a>,\n pub children: Vec<LayoutBox<'a>>,\n}\n\npub enum BoxType<'a> {\n BlockNode(&'a StyledNode<'a>),\n InlineNode(&'a StyledNode<'a>),\n AnonymousBlock,\n}\n\nimpl<'a> LayoutBox<'a> {\n fn new(box_type: BoxType) -> LayoutBox {\n LayoutBox {\n box_type: box_type,\n dimensions: Default::default(),\n children: Vec::new(),\n }\n }\n\n fn get_style_node(&self) -> &'a StyledNode<'a> {\n match self.box_type {\n BlockNode(node) => node,\n InlineNode(node) => node,\n AnonymousBlock => fail!(\"Anonymous block box has no style node\")\n }\n }\n}\n\n\/\/\/ Transform a style tree into a layout tree.\npub fn layout_tree<'a>(node: &'a StyledNode<'a>, containing_block: Dimensions) -> LayoutBox<'a> {\n let mut root_box = build_layout_tree(node);\n root_box.layout(containing_block);\n return root_box;\n}\n\n\/\/\/ Build the tree of LayoutBoxes, but don't perform any layout calculations yet.\nfn build_layout_tree<'a>(style_node: &'a StyledNode<'a>) -> LayoutBox<'a> {\n \/\/ Create the root box.\n let mut root = LayoutBox::new(match style_node.display() {\n Block => BlockNode(style_node),\n Inline => InlineNode(style_node),\n DisplayNone => fail!(\"Root node has display: none.\")\n });\n\n \/\/ Create the descendant boxes.\n for child in style_node.children.iter() {\n match child.display() {\n Block => root.children.push(build_layout_tree(child)),\n Inline => root.get_inline_container().children.push(build_layout_tree(child)),\n DisplayNone => {} \/\/ Don't lay out nodes with `display: none;`\n }\n }\n return root;\n}\n\nimpl<'a> LayoutBox<'a> {\n \/\/\/ Lay out a box and its descendants.\n fn layout(&mut self, containing_block: Dimensions) {\n match self.box_type {\n BlockNode(_) => self.layout_block(containing_block),\n InlineNode(_) => {} \/\/ TODO\n AnonymousBlock => {} \/\/ TODO\n }\n }\n\n \/\/\/ Lay out a block-level element and its descendants.\n fn layout_block(&mut self, containing_block: Dimensions) {\n \/\/ Child width can depend on parent width, so we need to calculate this node's width before\n \/\/ laying out its children.\n self.calculate_block_width(containing_block);\n\n \/\/ Determine where the block is located within its container.\n self.calculate_block_position(containing_block);\n\n \/\/ Recursively lay out the children of this node within its content area.\n self.layout_block_children();\n\n \/\/ Parent height can depend on child height, so `calculate_height` must be called after the\n \/\/ content layout is finished.\n self.calculate_block_height();\n }\n\n \/\/\/ Calculate the width of a block-level non-replaced element in normal flow.\n \/\/\/\n \/\/\/ http:\/\/www.w3.org\/TR\/CSS2\/visudet.html#blockwidth\n \/\/\/\n \/\/\/ Sets the horizontal margin\/padding\/border dimensions, and the `width`.\n fn calculate_block_width(&mut self, containing_block: Dimensions) {\n let style = self.get_style_node();\n\n \/\/ `width` has initial value `auto`.\n let auto = Keyword(\"auto\".to_string());\n let mut width = style.value(\"width\").unwrap_or(auto.clone());\n\n \/\/ margin, border, and padding have initial value 0.\n let zero = Length(0.0, Px);\n\n let mut margin_left = style.lookup(\"margin-left\", \"margin\", &zero);\n let mut margin_right = style.lookup(\"margin-right\", \"margin\", &zero);\n\n let border_left = style.lookup(\"border-left-width\", \"border-width\", &zero);\n let border_right = style.lookup(\"border-right-width\", \"border-width\", &zero);\n\n let padding_left = style.lookup(\"padding-left\", \"padding\", &zero);\n let padding_right = style.lookup(\"padding-right\", \"padding\", &zero);\n\n let total = [&margin_left, &margin_right, &border_left, &border_right,\n &padding_left, &padding_right, &width].iter().map(|v| v.to_px()).sum();\n\n \/\/ If width is not auto and the total is wider than the container, treat auto margins as 0.\n if width != auto && total > containing_block.width {\n if margin_left == auto {\n margin_left = Length(0.0, Px);\n }\n if margin_right == auto {\n margin_right = Length(0.0, Px);\n }\n }\n\n \/\/ Adjust used values so that the above sum equals `containing_block.width`.\n \/\/ Each arm of the `match` should increase the total width by exactly `underflow`,\n \/\/ and afterward all values should be absolute lengths in px.\n let underflow = containing_block.width - total;\n match (width == auto, margin_left == auto, margin_right == auto) {\n \/\/ If the values are overconstrained, calculate margin_right.\n (false, false, false) => {\n margin_right = Length(margin_right.to_px() + underflow, Px);\n }\n \/\/ If exactly one value is auto, its used value follows from the equality.\n (false, false, true) => {\n margin_right = Length(underflow, Px);\n }\n (false, true, false) => {\n margin_left = Length(underflow, Px);\n }\n \/\/ If width is set to auto, any other auto values become 0.\n (true, _, _) => {\n if margin_left == auto {\n margin_left = Length(0.0, Px);\n }\n if margin_right == auto {\n margin_right = Length(0.0, Px);\n }\n width = Length(underflow, Px);\n }\n \/\/ If margin-left and margin-right are both auto, their used values are equal.\n (false, true, true) => {\n margin_left = Length(underflow \/ 2.0, Px);\n margin_right = Length(underflow \/ 2.0, Px);\n }\n }\n\n let d = &mut self.dimensions;\n d.width = width.to_px();\n\n d.padding.left = padding_left.to_px();\n d.padding.right = padding_right.to_px();\n\n d.border.left = border_left.to_px();\n d.border.right = border_right.to_px();\n\n d.margin.left = margin_left.to_px();\n d.margin.right = margin_right.to_px();\n }\n\n \/\/\/ Finish calculating the block's edge sizes, and position it within its containing block.\n \/\/\/\n \/\/\/ http:\/\/www.w3.org\/TR\/CSS2\/visudet.html#normal-block\n \/\/\/\n \/\/\/ Sets the vertical margin\/padding\/border dimensions, and the `x`, `y` values.\n fn calculate_block_position(&mut self, containing_block: Dimensions) {\n let style = self.get_style_node();\n let d = &mut self.dimensions;\n\n \/\/ margin, border, and padding have initial value 0.\n let zero = Length(0.0, Px);\n\n \/\/ If margin-top or margin-bottom is `auto`, the used value is zero.\n d.margin.top = style.lookup(\"margin-top\", \"margin\", &zero).to_px();\n d.margin.bottom = style.lookup(\"margin-bottom\", \"margin\", &zero).to_px();\n\n d.border.top = style.lookup(\"border-top-width\", \"border-width\", &zero).to_px();\n d.border.bottom = style.lookup(\"border-bottom-width\", \"border-width\", &zero).to_px();\n\n d.padding.top = style.lookup(\"padding-top\", \"padding\", &zero).to_px();\n d.padding.bottom = style.lookup(\"padding-bottom\", \"padding\", &zero).to_px();\n\n \/\/ Position the box below all the previous boxes in the container.\n d.x = containing_block.x +\n d.margin.left + d.border.left + d.padding.left;\n d.y = containing_block.y + containing_block.height +\n d.margin.top + d.border.top + d.padding.top;\n }\n\n \/\/\/ Lay out the node's children within its content area and return the content height.\n \/\/\/\n \/\/\/ Set `height` to the total content height.\n fn layout_block_children(&mut self) {\n let d = &mut self.dimensions;\n for child in self.children.mut_iter() {\n child.layout(*d);\n \/\/ Increment the height so each child is laid out below the previous one.\n d.height = d.height + child.dimensions.margin_box_height();\n }\n }\n\n \/\/\/ Height of a block-level non-replaced element in normal flow with overflow visible.\n fn calculate_block_height(&mut self) {\n \/\/ If the height is set to an explicit length, use that exact length.\n \/\/ Otherwise, just keep the value set by `layout_block_children`.\n match self.get_style_node().value(\"height\") {\n Some(Length(h, Px)) => { self.dimensions.height = h; }\n _ => {}\n }\n }\n\n \/\/\/ Where a new inline child should go.\n fn get_inline_container(&mut self) -> &mut LayoutBox<'a> {\n match self.box_type {\n InlineNode(_) | AnonymousBlock => self,\n BlockNode(_) => {\n \/\/ If we've just generated an anonymous block box, keep using it.\n \/\/ Otherwise, create a new one.\n match self.children.last() {\n Some(&LayoutBox { box_type: AnonymousBlock,..}) => {}\n _ => self.children.push(LayoutBox::new(AnonymousBlock))\n }\n self.children.mut_last().unwrap()\n }\n }\n }\n}\n\nimpl Dimensions {\n \/\/\/ Total height of a box including its margins, border, and padding.\n fn margin_box_height(&self) -> f32 {\n self.height + self.padding.top + self.padding.bottom\n + self.border.top + self.border.bottom\n + self.margin.top + self.margin.bottom\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>made a seperate interface for retrieving only applicable referenced table<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Chore(trash): fix import warning<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>more benchmarks<commit_after>#[cfg(feature = \"verbose\")]\nmacro_rules! debugln {\n ($fmt:expr) => (println!($fmt));\n ($fmt:expr, $($arg:tt)*) => (println!($fmt, $($arg)*));\n}\n\n#[cfg(not(feature = \"verbose\"))]\nmacro_rules! debugln {\n ($fmt:expr) => ();\n ($fmt:expr, $($arg:tt)*) => ();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Body is Measurable<commit_after><|endoftext|>"} {"text":"<commit_before>#![allow(unused_variables)]\n\nextern crate orbclient;\nextern crate sinulation;\n\n#[cfg(target_os = \"redox\")]\nuse sinulation::Trig;\n\nuse super::vid;\n\n\/\/ Had to store this somewhere.\n\/*cargo build --example tetrahedrane_example --target i386-unknown-redox.json -- -C no-prepopulate-passes -C no-stack-check -C opt-level=2 -Z no-landing-pads -A dead_code\n*\/\n\n\/\/\/ Stores data about rendering, window and camera.\npub struct Window {\n pub screen_x: u32,\n pub screen_y: u32,\n\n pub camera_x: f32,\n pub camera_y: f32,\n pub camera_z: f32,\n\n pub camera_x_y: f32,\n pub camera_x_z: f32,\n pub camera_y_z: f32,\n\n pub window: Box<orbclient::window::Window>,\n\n pub render_queue: Vec<vid::Triangle>, \n}\n\nimpl Window {\n \/\/\/ Create a new window.\n \/\/\/\n \/\/\/ * `triangle_space` - how much space to preallocate for the triangles\n pub fn new(screen_x: u32, screen_y: u32, window_name: &str, triangle_space: usize) -> Window {\n let win = orbclient::window::Window::new_flags(10, 10, screen_x, screen_y, window_name, true).unwrap();\n\n Window {\n screen_x: screen_x,\n screen_y: screen_y,\n\n camera_x: 0.0,\n camera_y: 0.0,\n camera_z: 0.0,\n\n camera_x_y: 0.0,\n camera_x_z: 0.0,\n camera_y_z: 0.0,\n\n window: win,\n\n render_queue: Vec::with_capacity(triangle_space),\n }\n }\n\n \/\/\/ Renders triangles onto the framebuffer.\n pub fn render(&mut self, triangle: vid::Triangle, shaders: &Vec<vid::Shader>) {\n for shader_id in triangle.shader_ids.clone().iter() {\n let mut assoc_shader = shaders.iter().find(|&shader| shader.id == shader_id.clone());\n if assoc_shader.is_none() {\n continue;\n }\n\n let mut unwrapped_shader = assoc_shader.unwrap();\n (unwrapped_shader.shader)(&triangle, self, unwrapped_shader);\n }\n\n let used_space = self.render_queue.len();\n\n self.render_queue = Vec::with_capacity(used_space);\n }\n\n \/\/\/ Push a triangle onto the render queue.\n pub fn push(&mut self, triangle: vid::Triangle) {\n self.render_queue.push(triangle);\n }\n\n \/\/\/ Push a group of triangles onto the render queue.\n \/*pub fn push_group(&mut self, group: &vid::TriangleGroup) {\n for triangle in &group.triangles {\n self.push(triangle.clone());\n }\n }*\/\n\n \/\/\/ Normalize the camera rotations.\n pub fn normalize_camera(&mut self) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32::consts::PI;\n\n #[cfg(target_os = \"redox\")]\n const PI: f32 = 3.141592653589793;\n\n if self.camera_x_z > (PI * 2.0) {\n self.camera_x_z -= (PI * 2.0);\n }\n\n if self.camera_x_y > (PI * 2.0) {\n self.camera_x_y -= (PI * 2.0);\n }\n\n if self.camera_y_z > (PI * 2.0) {\n self.camera_y_z -= (PI * 2.0);\n }\n }\n}\n<commit_msg>Added group drawing method<commit_after>#![allow(unused_variables)]\n\nextern crate orbclient;\nextern crate sinulation;\n\n#[cfg(target_os = \"redox\")]\nuse sinulation::Trig;\n\nuse super::vid;\n\n\/\/ Had to store this somewhere.\n\/*cargo build --example tetrahedrane_example --target i386-unknown-redox.json -- -C no-prepopulate-passes -C no-stack-check -C opt-level=2 -Z no-landing-pads -A dead_code\n*\/\n\n\/\/\/ Stores data about rendering, window and camera.\npub struct Window {\n pub screen_x: u32,\n pub screen_y: u32,\n\n pub camera_x: f32,\n pub camera_y: f32,\n pub camera_z: f32,\n\n pub camera_x_y: f32,\n pub camera_x_z: f32,\n pub camera_y_z: f32,\n\n pub window: Box<orbclient::window::Window>,\n\n pub render_queue: Vec<vid::Triangle>, \n}\n\nimpl Window {\n \/\/\/ Create a new window.\n \/\/\/\n \/\/\/ * `triangle_space` - how much space to preallocate for the triangles\n pub fn new(screen_x: u32, screen_y: u32, window_name: &str, triangle_space: usize) -> Window {\n let win = orbclient::window::Window::new_flags(10, 10, screen_x, screen_y, window_name, true).unwrap();\n\n Window {\n screen_x: screen_x,\n screen_y: screen_y,\n\n camera_x: 0.0,\n camera_y: 0.0,\n camera_z: 0.0,\n\n camera_x_y: 0.0,\n camera_x_z: 0.0,\n camera_y_z: 0.0,\n\n window: win,\n\n render_queue: Vec::with_capacity(triangle_space),\n }\n }\n\n \/\/\/ Renders triangles onto the framebuffer.\n pub fn render(&mut self, triangle: vid::Triangle, shaders: &Vec<vid::Shader>) {\n for shader_id in triangle.shader_ids.clone().iter() {\n let mut assoc_shader = shaders.iter().find(|&shader| shader.id == shader_id.clone());\n if assoc_shader.is_none() {\n continue;\n }\n\n let mut unwrapped_shader = assoc_shader.unwrap();\n (unwrapped_shader.shader)(&triangle, self, unwrapped_shader);\n }\n\n let used_space = self.render_queue.len();\n\n self.render_queue = Vec::with_capacity(used_space);\n }\n\n pub fn render_group(&mut self, group: vid::TriangleGroup, shaders: &Vec<vid::Shader>) {\n let group_shaders = group.shader_ids.clone();\n for triangle in group.triangles {\n self.render(triangle, shaders);\n }\n }\n\n\n \/\/\/ Push a triangle onto the render queue.\n pub fn push(&mut self, triangle: vid::Triangle) {\n self.render_queue.push(triangle);\n }\n\n \/\/\/ Push a group of triangles onto the render queue.\n \/*pub fn push_group(&mut self, group: &vid::TriangleGroup) {\n for triangle in &group.triangles {\n self.push(triangle.clone());\n }\n }*\/\n\n \/\/\/ Normalize the camera rotations.\n pub fn normalize_camera(&mut self) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32::consts::PI;\n\n #[cfg(target_os = \"redox\")]\n const PI: f32 = 3.141592653589793;\n\n if self.camera_x_z > (PI * 2.0) {\n self.camera_x_z -= (PI * 2.0);\n }\n\n if self.camera_x_y > (PI * 2.0) {\n self.camera_x_y -= (PI * 2.0);\n }\n\n if self.camera_y_z > (PI * 2.0) {\n self.camera_y_z -= (PI * 2.0);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix cursor not showing after program closes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added an example ini parser<commit_after>\/\/! Parser example for INI files.\nextern crate combine;\n\nuse std::collections::HashMap;\n\nuse combine::*;\nuse combine::primitives::{Error, SourcePosition, Stream};\n\n#[derive(PartialEq, Debug)]\npub struct Ini {\n pub global: HashMap<String, String>,\n pub sections: HashMap<String, HashMap<String, String>>\n}\n\nfn property<I>(input: State<I>) -> ParseResult<(String, String), I>\nwhere I: Stream<Item=char> {\n (\n many1(satisfy(|c| c != '=' && c != '[' && c != ';')),\n token('='),\n many1(satisfy(|c| c != '\\n' && c != ';'))\n )\n .map(|(key, _, value)| (key, value))\n .expected(\"property\")\n .parse_state(input)\n}\n\nfn whitespace<I>(input: State<I>) -> ParseResult<(), I>\nwhere I: Stream<Item=char> {\n let comment = (token(';'), skip_many(satisfy(|c| c != '\\n')))\n .map(|_| ());\n \/\/Wrap the `spaces().or(comment)` in `skip_many` so that it skips alternating whitespace and comments\n skip_many(skip_many1(space()).or(comment))\n .parse_state(input)\n}\n\nfn properties<I>(input: State<I>) -> ParseResult<HashMap<String, String>, I>\nwhere I: Stream<Item=char> {\n \/\/After each property we skip any whitespace that followed it\n many(parser(property).skip(parser(whitespace)))\n .parse_state(input)\n}\n\nfn section<I>(input: State<I>) -> ParseResult<(String, HashMap<String, String>), I>\nwhere I: Stream<Item=char> {\n (\n between(token('['), token(']'), many(satisfy(|c| c != ']'))),\n parser(whitespace),\n parser(properties)\n )\n .map(|(name, _, properties)| (name, properties))\n .expected(\"section\")\n .parse_state(input)\n}\n\nfn ini<I>(input: State<I>) -> ParseResult<Ini, I>\nwhere I: Stream<Item=char> {\n (parser(whitespace), parser(properties), many(parser(section)))\n .map(|(_, global, sections)| Ini { global: global, sections: sections })\n .parse_state(input)\n}\n\n#[test]\nfn ini_ok() {\n let text = r#\"\nlanguage=rust\n\n[section]\nname=combine; Comment\ntype=LL(1)\n\n\"#;\n let mut expected = Ini {\n global: HashMap::new(),\n sections: HashMap::new()\n };\n expected.global.insert(String::from(\"language\"), String::from(\"rust\"));\n\n let mut section = HashMap::new();\n section.insert(String::from(\"name\"), String::from(\"combine\"));\n section.insert(String::from(\"type\"), String::from(\"LL(1)\"));\n expected.sections.insert(String::from(\"section\"), section);\n\n let result = parser(ini)\n .parse(text)\n .map(|t| t.0);\n assert_eq!(result, Ok(expected));\n}\n\n#[test]\nfn ini_error() {\n let text = \"[error\";\n let result = parser(ini)\n .parse(text)\n .map(|t| t.0);\n assert_eq!(result, Err(ParseError {\n position: SourcePosition { line: 1, column: 7 },\n errors: vec![\n Error::end_of_input(),\n Error::Expected(\"section\".into()),\n ]\n }));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add test for all workspace members<commit_after>extern crate meta;\n\nuse std::path::PathBuf;\nuse std::process::Command;\n\n\/\/\/ Calls `cargo test` in every workspace member.\n\/\/\/\n\/\/\/ FIXME: This test should be removed once `cargo` has a way to run tests for all crates in a\n\/\/\/ workspace.\n#[test]\nfn test_all_workspace_members() {\n for task in meta::local::parse_tasks(\"Cargo.toml\") {\n let status = Command::new(\"cargo\")\n .current_dir(PathBuf::from(\"tasks\").join(&task.crate_name()))\n .arg(\"test\")\n .status()\n .unwrap();\n\n assert!(status.success());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add EC2 Simple Systems Manager tests<commit_after>#![cfg(feature = \"ssm\")]\n\nextern crate rusoto;\n\nuse rusoto::ssm::{SsmClient, ListDocumentsRequest, ListCommandsRequest, ListCommandInvocationsRequest};\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\nfn should_list_documents() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = SsmClient::new(credentials, Region::UsEast1);\n\n let request = ListDocumentsRequest::default();\n\n match client.list_documents(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true) \n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n\n#[test]\nfn should_list_commands() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = SsmClient::new(credentials, Region::UsEast1);\n\n let request = ListCommandsRequest::default();\n\n match client.list_commands(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true) \n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n\n#[test]\nfn should_list_command_invocations() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = SsmClient::new(credentials, Region::UsEast1);\n\n let request = ListCommandInvocationsRequest::default();\n\n match client.list_command_invocations(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true) \n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Tests: Add code to generate Add docs<commit_after>#![allow(dead_code)]\n#[macro_use]\nextern crate derive_more;\n\n#[derive(Add)]\nstruct MyInts(i32, i32);\n\n#[derive(Add)]\nstruct Point2D {\n x: i32,\n y: i32,\n}\n\n#[derive(Add)]\nenum MixedInts {\n SmallInt(i32),\n BigInt(i64),\n TwoSmallInts(i32, i32),\n NamedSmallInts { x: i32, y: i32 },\n UnsignedOne(u32),\n UnsignedTwo(u32),\n Unit,\n}\n<|endoftext|>"} {"text":"<commit_before>use buffer::{Content, Buffer, BufferAny, BufferType, BufferMode, BufferCreationError};\nuse buffer::{BufferSlice, BufferMutSlice};\nuse uniforms::{AsUniformValue, UniformBlock, UniformValue, LayoutMismatchError};\nuse program;\n\nuse std::ops::{Deref, DerefMut};\n\nuse backend::Facade;\n\n\/\/\/ Buffer that contains a uniform block.\n#[derive(Debug)]\npub struct UniformBuffer<T: ?Sized> where T: Content {\n buffer: Buffer<T>,\n}\n\n\/\/\/ Same as `UniformBuffer` but doesn't contain any information about the type.\n#[derive(Debug)]\npub struct TypelessUniformBuffer {\n buffer: BufferAny,\n}\n\nimpl<T> UniformBuffer<T> where T: Copy {\n \/\/\/ Uploads data in the uniforms buffer.\n #[inline]\n pub fn new<F>(facade: &F, data: T) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::new_impl(facade, data, BufferMode::Default)\n }\n\n \/\/\/ Uploads data in the uniforms buffer.\n #[inline]\n pub fn dynamic<F>(facade: &F, data: T) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::new_impl(facade, data, BufferMode::Dynamic)\n }\n\n \/\/\/ Uploads data in the uniforms buffer.\n #[inline]\n pub fn persistent<F>(facade: &F, data: T) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::new_impl(facade, data, BufferMode::Persistent)\n }\n\n \/\/\/ Uploads data in the uniforms buffer.\n #[inline]\n pub fn immutable<F>(facade: &F, data: T) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::new_impl(facade, data, BufferMode::Immutable)\n }\n\n #[inline]\n fn new_impl<F>(facade: &F, data: T, mode: BufferMode)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n let buffer = try!(Buffer::new(facade, &data, BufferType::UniformBuffer, mode));\n\n Ok(UniformBuffer {\n buffer: buffer,\n })\n }\n\n \/\/\/ Creates an empty buffer.\n #[inline]\n pub fn empty<F>(facade: &F) -> Result<UniformBuffer<T>, BufferCreationError> where F: Facade {\n UniformBuffer::empty_impl(facade, BufferMode::Default)\n }\n\n \/\/\/ Creates an empty buffer.\n #[inline]\n pub fn empty_dynamic<F>(facade: &F) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_impl(facade, BufferMode::Dynamic)\n }\n\n \/\/\/ Creates an empty buffer.\n #[inline]\n pub fn empty_persistent<F>(facade: &F) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_impl(facade, BufferMode::Persistent)\n }\n\n \/\/\/ Creates an empty buffer.\n #[inline]\n pub fn empty_immutable<F>(facade: &F) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_impl(facade, BufferMode::Immutable)\n }\n\n #[inline]\n fn empty_impl<F>(facade: &F, mode: BufferMode) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n let buffer = try!(Buffer::empty(facade, BufferType::UniformBuffer, mode));\n\n Ok(UniformBuffer {\n buffer: buffer,\n })\n }\n}\n\nimpl<T: ?Sized> UniformBuffer<T> where T: Content {\n \/\/\/ Creates an empty buffer.\n \/\/\/\n \/\/\/ # Panic\n \/\/\/\n \/\/\/ Panicks if the size passed as parameter is not suitable for the type of data.\n \/\/\/\n #[inline]\n pub fn empty_unsized<F>(facade: &F, size: usize)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_unsized_impl(facade, size, BufferMode::Default)\n }\n\n \/\/\/ Creates an empty buffer.\n \/\/\/\n \/\/\/ # Panic\n \/\/\/\n \/\/\/ Panicks if the size passed as parameter is not suitable for the type of data.\n \/\/\/\n #[inline]\n pub fn empty_unsized_dynamic<F>(facade: &F, size: usize)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_unsized_impl(facade, size, BufferMode::Dynamic)\n }\n\n \/\/\/ Creates an empty buffer.\n \/\/\/\n \/\/\/ # Panic\n \/\/\/\n \/\/\/ Panicks if the size passed as parameter is not suitable for the type of data.\n \/\/\/\n #[inline]\n pub fn empty_unsized_persistent<F>(facade: &F, size: usize)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_unsized_impl(facade, size, BufferMode::Persistent)\n }\n\n \/\/\/ Creates an empty buffer.\n \/\/\/\n \/\/\/ # Panic\n \/\/\/\n \/\/\/ Panicks if the size passed as parameter is not suitable for the type of data.\n \/\/\/\n #[inline]\n pub fn empty_unsized_immutable<F>(facade: &F, size: usize)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_unsized_impl(facade, size, BufferMode::Immutable)\n }\n\n #[inline]\n fn empty_unsized_impl<F>(facade: &F, size: usize, mode: BufferMode)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n let buffer = try!(Buffer::empty_unsized(facade, BufferType::UniformBuffer, size, mode));\n\n Ok(UniformBuffer {\n buffer: buffer,\n })\n }\n}\n\nimpl<T: ?Sized> Deref for UniformBuffer<T> where T: Content {\n type Target = Buffer<T>;\n\n #[inline]\n fn deref(&self) -> &Buffer<T> {\n &self.buffer\n }\n}\n\nimpl<T: ?Sized> DerefMut for UniformBuffer<T> where T: Content {\n #[inline]\n fn deref_mut(&mut self) -> &mut Buffer<T> {\n &mut self.buffer\n }\n}\n\nimpl<'a, T: ?Sized> From<&'a UniformBuffer<T>> for BufferSlice<'a, T> where T: Content {\n #[inline]\n fn from(b: &'a UniformBuffer<T>) -> BufferSlice<'a, T> {\n b.buffer.as_slice()\n }\n}\n\nimpl<'a, T: ?Sized> From<&'a mut UniformBuffer<T>> for BufferMutSlice<'a, T> where T: Content {\n #[inline]\n fn from(b: &'a mut UniformBuffer<T>) -> BufferMutSlice<'a, T> {\n b.buffer.as_mut_slice()\n }\n}\n\nimpl<'a, T: ?Sized> AsUniformValue for &'a UniformBuffer<T> where T: UniformBlock + Content {\n #[inline]\n fn as_uniform_value(&self) -> UniformValue {\n #[inline]\n fn f<T: ?Sized>(block: &program::UniformBlock)\n -> Result<(), LayoutMismatchError> where T: UniformBlock + Content\n {\n \/\/ TODO: more checks?\n T::matches(&block.layout, 0)\n }\n\n UniformValue::Block(self.buffer.as_slice_any(), f::<T>)\n }\n}\n<commit_msg>forgotten uniform::Buffer GLObject Trait implementation<commit_after>use buffer::{Content, Buffer, BufferAny, BufferType, BufferMode, BufferCreationError};\nuse buffer::{BufferSlice, BufferMutSlice};\nuse uniforms::{AsUniformValue, UniformBlock, UniformValue, LayoutMismatchError};\nuse program;\n\nuse gl;\nuse GlObject;\n\nuse std::ops::{Deref, DerefMut};\n\nuse backend::Facade;\n\n\/\/\/ Buffer that contains a uniform block.\n#[derive(Debug)]\npub struct UniformBuffer<T: ?Sized> where T: Content {\n buffer: Buffer<T>,\n}\n\n\/\/\/ Same as `UniformBuffer` but doesn't contain any information about the type.\n#[derive(Debug)]\npub struct TypelessUniformBuffer {\n buffer: BufferAny,\n}\n\nimpl<T: Copy> GlObject for UniformBuffer<T> {\n type Id = gl::types::GLuint;\n\n #[inline]\n fn get_id(&self) -> gl::types::GLuint {\n self.buffer.get_id()\n }\n}\n\nimpl<T> UniformBuffer<T> where T: Copy {\n \/\/\/ Uploads data in the uniforms buffer.\n #[inline]\n pub fn new<F>(facade: &F, data: T) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::new_impl(facade, data, BufferMode::Default)\n }\n\n \/\/\/ Uploads data in the uniforms buffer.\n #[inline]\n pub fn dynamic<F>(facade: &F, data: T) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::new_impl(facade, data, BufferMode::Dynamic)\n }\n\n \/\/\/ Uploads data in the uniforms buffer.\n #[inline]\n pub fn persistent<F>(facade: &F, data: T) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::new_impl(facade, data, BufferMode::Persistent)\n }\n\n \/\/\/ Uploads data in the uniforms buffer.\n #[inline]\n pub fn immutable<F>(facade: &F, data: T) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::new_impl(facade, data, BufferMode::Immutable)\n }\n\n #[inline]\n fn new_impl<F>(facade: &F, data: T, mode: BufferMode)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n let buffer = try!(Buffer::new(facade, &data, BufferType::UniformBuffer, mode));\n\n Ok(UniformBuffer {\n buffer: buffer,\n })\n }\n\n \/\/\/ Creates an empty buffer.\n #[inline]\n pub fn empty<F>(facade: &F) -> Result<UniformBuffer<T>, BufferCreationError> where F: Facade {\n UniformBuffer::empty_impl(facade, BufferMode::Default)\n }\n\n \/\/\/ Creates an empty buffer.\n #[inline]\n pub fn empty_dynamic<F>(facade: &F) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_impl(facade, BufferMode::Dynamic)\n }\n\n \/\/\/ Creates an empty buffer.\n #[inline]\n pub fn empty_persistent<F>(facade: &F) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_impl(facade, BufferMode::Persistent)\n }\n\n \/\/\/ Creates an empty buffer.\n #[inline]\n pub fn empty_immutable<F>(facade: &F) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_impl(facade, BufferMode::Immutable)\n }\n\n #[inline]\n fn empty_impl<F>(facade: &F, mode: BufferMode) -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n let buffer = try!(Buffer::empty(facade, BufferType::UniformBuffer, mode));\n\n Ok(UniformBuffer {\n buffer: buffer,\n })\n }\n}\n\nimpl<T: ?Sized> UniformBuffer<T> where T: Content {\n \/\/\/ Creates an empty buffer.\n \/\/\/\n \/\/\/ # Panic\n \/\/\/\n \/\/\/ Panicks if the size passed as parameter is not suitable for the type of data.\n \/\/\/\n #[inline]\n pub fn empty_unsized<F>(facade: &F, size: usize)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_unsized_impl(facade, size, BufferMode::Default)\n }\n\n \/\/\/ Creates an empty buffer.\n \/\/\/\n \/\/\/ # Panic\n \/\/\/\n \/\/\/ Panicks if the size passed as parameter is not suitable for the type of data.\n \/\/\/\n #[inline]\n pub fn empty_unsized_dynamic<F>(facade: &F, size: usize)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_unsized_impl(facade, size, BufferMode::Dynamic)\n }\n\n \/\/\/ Creates an empty buffer.\n \/\/\/\n \/\/\/ # Panic\n \/\/\/\n \/\/\/ Panicks if the size passed as parameter is not suitable for the type of data.\n \/\/\/\n #[inline]\n pub fn empty_unsized_persistent<F>(facade: &F, size: usize)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_unsized_impl(facade, size, BufferMode::Persistent)\n }\n\n \/\/\/ Creates an empty buffer.\n \/\/\/\n \/\/\/ # Panic\n \/\/\/\n \/\/\/ Panicks if the size passed as parameter is not suitable for the type of data.\n \/\/\/\n #[inline]\n pub fn empty_unsized_immutable<F>(facade: &F, size: usize)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n UniformBuffer::empty_unsized_impl(facade, size, BufferMode::Immutable)\n }\n\n #[inline]\n fn empty_unsized_impl<F>(facade: &F, size: usize, mode: BufferMode)\n -> Result<UniformBuffer<T>, BufferCreationError>\n where F: Facade\n {\n let buffer = try!(Buffer::empty_unsized(facade, BufferType::UniformBuffer, size, mode));\n\n Ok(UniformBuffer {\n buffer: buffer,\n })\n }\n}\n\nimpl<T: ?Sized> Deref for UniformBuffer<T> where T: Content {\n type Target = Buffer<T>;\n\n #[inline]\n fn deref(&self) -> &Buffer<T> {\n &self.buffer\n }\n}\n\nimpl<T: ?Sized> DerefMut for UniformBuffer<T> where T: Content {\n #[inline]\n fn deref_mut(&mut self) -> &mut Buffer<T> {\n &mut self.buffer\n }\n}\n\nimpl<'a, T: ?Sized> From<&'a UniformBuffer<T>> for BufferSlice<'a, T> where T: Content {\n #[inline]\n fn from(b: &'a UniformBuffer<T>) -> BufferSlice<'a, T> {\n b.buffer.as_slice()\n }\n}\n\nimpl<'a, T: ?Sized> From<&'a mut UniformBuffer<T>> for BufferMutSlice<'a, T> where T: Content {\n #[inline]\n fn from(b: &'a mut UniformBuffer<T>) -> BufferMutSlice<'a, T> {\n b.buffer.as_mut_slice()\n }\n}\n\nimpl<'a, T: ?Sized> AsUniformValue for &'a UniformBuffer<T> where T: UniformBlock + Content {\n #[inline]\n fn as_uniform_value(&self) -> UniformValue {\n #[inline]\n fn f<T: ?Sized>(block: &program::UniformBlock)\n -> Result<(), LayoutMismatchError> where T: UniformBlock + Content\n {\n \/\/ TODO: more checks?\n T::matches(&block.layout, 0)\n }\n\n UniformValue::Block(self.buffer.as_slice_any(), f::<T>)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>prepare outline of helper function<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add empty parser module<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>initial commit<commit_after>#![recursion_limit=\"5000\"]\n\/\/#![feature(trace_macros)] trace_macros!(true);\n\nmacro_rules! bf {\n \/\/ these rules handle the end of the instruction stream -- either the real end of the program,\n \/\/ or the end of a loop body\n \n \/\/ no loop stack and no tail stack => goodbye\n (@run () [$cur_:tt $left_:tt $right_:tt] [] [] [] $out_:tt $_in:tt) => {\n bf!(@out $cur_ [] $left_ $right_ $out_)\n };\n \/\/ counter is zero, pop the loop stack to exit the loop\n (@run () [() $left_:tt $right_:tt] [] [$loops_head:tt $($loops_tail:tt)*] [[$head:tt $($tail:tt)*] $($tails_:tt)*] $out_:tt $in_:tt) => {\n bf!(@run $head [() $left_ $right_] [$($tail)*] [$($loops_tail)*] [$($tails_)*] $out_ $in_)\n };\n \/\/ counter is nonzero, restart the loop\n (@run () $state_:tt [] [[$head:tt $($tail:tt)*] $($loops:tt)*] $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run $head $state_ [$($tail)*] [[$head $($tail)*] $($loops)*] $tails_ $out_ $in_)\n };\n\n \/\/ the next few rules deal with multi-character tokens which are parsed together, but we want\n \/\/ the individual tokens, so just split them up and push the pieces back onto the program\n\n \/\/ >> is actually > >\n (@run >> $state_:tt [$($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run > $state_ [> $($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \/\/ << is actually < <\n (@run << $state_:tt [$($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run < $state_ [< $($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \/\/ .. is actually . .\n (@run .. $state_:tt [$($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run . $state_ [. $($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \/\/ ... is actually . . .\n (@run ... $state_:tt [$($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run . $state_ [. . $($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \/\/ <- is actually < -\n (@run <- $state_:tt [$($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run < $state_ [- $($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \/\/ -> is actually - >\n (@run -> $state_:tt [$($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $out_:tt) => {\n bf!(@run - $state_ [> $($tail)*] $loops_ $tails_ $out_ $in_)\n };\n\n \/\/ now the instructions themselves!\n \n \/\/ add one to the current cell: $cur => ($cur)\n (@run + [$cur:tt $left_:tt $right_:tt] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run $head [($cur) $left_ $right_] [$($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \n \/\/ subtract one from the current cell: ($cur) => (), () => () (that is, underflow is a no-op)\n (@run - [() $left_:tt $right_:tt] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {{\n bf!(@run $head [() $left_ $right_] [$($tail)*] $loops_ $tails_ $out_ $in_)\n }};\n (@run - [($cur:tt) $left_:tt $right_:tt] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run $head [$cur $left_ $right_] [$($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \n \/\/ move the cell pointer to the right\n \/\/ the memory expands if necessary (new cell initialized to zero)\n (@run > [$cur:tt [$($lmore:tt)*] [$right:tt $($rmore:tt)*]] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run $head [$right [$cur $($lmore)*] [$($rmore)*]] [$($tail)*] $loops_ $tails_ $out_ $in_)\n };\n (@run > [$cur:tt [$($lmore:tt)*] []] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run $head [() [$cur $($lmore)*] []] [$($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \n \/\/ move the cell pointer to the left\n \/\/ the memory expands if necessary (new cell initialized to zero)\n (@run < [$cur:tt [$left:tt $($lmore:tt)*] [$($rmore:tt)*]] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run $head [$left [$($lmore)*] [$cur $($rmore)*]] [$($tail)*] $loops_ $tails_ $out_ $in_)\n };\n (@run < [$cur:tt [] [$($rmore:tt)*]] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run $head [() [] [$cur $($rmore)*]] [$($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \n \/\/ loops!\n \n \/\/ counter is zero, skip the loop\n (@run [$inner_head:tt $($inner_tail:tt)*] [() $left_:tt $right_:tt] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run $head [() $left_ $right_] [$($tail)*] $loops_ $tails_ $out_ $in_)\n };\n \/\/ counter is nonzero, push the current tail + loop and enter the loop\n (@run [$inner_head:tt $($inner_tail:tt)*] $state_:tt $tail_:tt [$($loops:tt)*] [$($tails:tt)*] $out_:tt $in_:tt) => {\n bf!(@run $inner_head $state_ [$($inner_tail)* ()] [[$inner_head $($inner_tail)* ()] $($loops)*] [$tail_ $($tails)*] $out_ $in_)\n };\n \n \/\/ output the current cell: just push $cur onto $out\n \/\/ note: the Rust \"program\" will crash at runtime if the BF program outputs invalid UTF-8\n (@run . [$cur:tt $left_:tt $right_:tt] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt [$($out:tt)*] $in_:tt) => {\n bf!(@run $head [$cur $left_ $right_] [$($tail)*] $loops_ $tails_ [$($out)* $cur] $in_)\n };\n\n \/\/ input to the current cell: do nothing on EOF, otherwise pop $in and parse it\n (@run , [$cur:tt $left_:tt $right_:tt] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt []) => {\n bf!(@run $head [$cur $left_ $right_] [$($tail)*] $loops_ $tails_ $out_ [])\n };\n (@run , [$cur:tt $left_:tt $right_:tt] [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt [$in_head:tt $($in_tail:tt)*]) => {\n bf!(@in @revconv $in_head [] [[$head $left_ $right_] [[$($tail)*] $loops_ $tails_ $out_ [$($in_tail)*]]])\n };\n\n (@in @revconv [] [$($digit:tt)*] $stuff:tt) => { bf!(@in @unary [] [] [()] [$($digit)*] $stuff) };\n (@in @revconv [0 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [() $($digit)*] $stuff) };\n (@in @revconv [1 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [(()) $($digit)*] $stuff) };\n (@in @revconv [2 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [((())) $($digit)*] $stuff) };\n (@in @revconv [3 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [(((()))) $($digit)*] $stuff) };\n (@in @revconv [4 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [((((())))) $($digit)*] $stuff) };\n (@in @revconv [5 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [(((((()))))) $($digit)*] $stuff) };\n (@in @revconv [6 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [((((((())))))) $($digit)*] $stuff) };\n (@in @revconv [7 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [(((((((()))))))) $($digit)*] $stuff) };\n (@in @revconv [8 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [((((((((())))))))) $($digit)*] $stuff) };\n (@in @revconv [9 $($tail:tt)*] [$($digit:tt)*] $stuff:tt) => { bf!(@in @revconv [$($tail)*] [(((((((((()))))))))) $($digit)*] $stuff) };\n (@in @unary $fullacc:tt $digitacc:tt $pv:tt [] $stuff:tt) => { bf!(@in @zem () $fullacc $stuff) };\n (@in @unary [$($fullacc:tt)*] [$($digitacc:tt)*] [$($pv:tt)*] [() $($digits:tt)*] $stuff:tt) => {\n bf!(@in @unary [$($fullacc)* $($digitacc)*] [] [$($pv)* $($pv)* $($pv)* $($pv)* $($pv)*\n $($pv)* $($pv)* $($pv)* $($pv)* $($pv)*] [$($digits)*] $stuff)\n };\n (@in @unary $fullacc:tt [$($digitacc:tt)*] [$($pv:tt)*] [($digit:tt) $($digits:tt)*] $stuff:tt) => {\n bf!(@in @unary $fullacc [$($digitacc)* $($pv)*] [$($pv)*] [$digit $($digits)*] $stuff)\n };\n (@in @zem $acc:tt [] [[$head:tt $left:tt $right:tt] [$($other_stuff:tt)*]]) => { bf!(@run $head [$acc $left $right] $($other_stuff)*) };\n (@in @zem $acc:tt [$head:tt $($tail:tt)*] $stuff:tt) => { bf!(@in @zem ($acc) [$($tail)*] $stuff) };\n\n \/\/ invalid instruction is a no-op\n (@run $_instr:tt $state_:tt [$head:tt $($tail:tt)*] $loops_:tt $tails_:tt $out_:tt $in_:tt) => {\n bf!(@run $head $state_ [$($tail)*] $loops_ $tails_ $out_ $in_)\n };\n\n \/\/ the program is over! we now output the memory state and the output to Rust\n \/\/ first we need to unzip the memory, which involves reversing the left-hand side\n\n \/\/ the reversal is finished: convert each memory cell and output character to arithmetic and\n \/\/ output them into arrays\n (@out $cur:tt [$($left:tt)*] [] [$($right:tt)*] [$($out:tt)*]) => {\n (&[$(bf!(@count $left),)* bf!(@count $cur), $(bf!(@count $right),)*], &[$(bf!(@count $out),)*])\n };\n \/\/ reverse the left-hand side of the memory zipper for output\n (@out $cur_:tt [$($left_:tt)*] [$left_head:tt $($left_tail:tt)*] $right_:tt $out_:tt) => {\n bf!(@out $cur_ [$left_head $($left_)*] [$($left_tail)*] $right_ $out_)\n };\n \n \/\/ recursively convert a Zermelo numeral to an arithmetic expression\n \/\/ note: only the first and last line should be necessary, but the compiler has a stack\n \/\/ overflow on a too-long chain of additions, so we do a little unrolling\n (@count ()) => { 0u8 };\n (@count ((($inner:tt)))) => { 3u8 + bf!(@count $inner) };\n (@count (($inner:tt))) => { 2u8 + bf!(@count $inner) };\n (@count ($inner:tt)) => { 1u8 + bf!(@count $inner) };\n\n \/\/ entry point: given a sequence of instructions, launch the machine\n ({$head:tt $($tail:tt)*} {$($input:tt)*}) => { bf!(@run $head [() [] []] [$($tail)* ()] [] [] [] [$($input)*]) }\n \/\/ | | | | | | | ^ input\n \/\/ | | | | | | ^ output\n \/\/ | | | | | ^ tail stack\n \/\/ | | | | ^ loop stack\n \/\/ | | | ^ sentinel added to mark the end of the program\n \/\/ | | ^ remainder of program\n \/\/ | ^ memory zipper [cur [left cells, adjacent first] [right cells, adjacent first]]\n \/\/ ^ current instruction\n}\n\n\/\/ the BF program is interpreted by the macro system at compile time and turned into a constant\n\/\/ arithmetic expression (see expanded.rs)\n\/\/ MACHINE is a tuple where MACHINE.0 is the memory state and MACHINE.1 is the output\nconst MACHINE: (&'static [u8], &'static [u8]) =\n bf!({ ,[+.[-],] } { [6 4] [6 5] [6 6] [6 7] [6 8] [6 9] });\n \/\/bf!({ ++++++++++[>+++++++>++++++++++>+++>+<<<<-]>++.>+.+++++++..+++.>++.<<+++++++++++++++.>.+++.------.--------.>+.>. } {});\n \/\/bf!({ + } { });\n\nfn main() {\n \/\/ all we do at runtime is print the memory state and the output (converted to UTF-8)\n println!(\"memory={:?}\", MACHINE.0);\n println!(\"output:\");\n println!(\"{}\", std::str::from_utf8(MACHINE.1).unwrap());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Do only check entries which are a ref<commit_after><|endoftext|>"} {"text":"<commit_before>extern mod extra;\nextern mod glfw;\nextern mod gl;\nextern mod cgmath;\nextern mod noise;\n\nuse std::cast;\nuse std::ptr;\nuse std::hashmap::HashMap;\nuse std;\n\nuse gl::types::*;\n\nuse cgmath::vector::Vector;\nuse cgmath::vector::Vec3;\nuse cgmath::vector::Vec4;\n\nuse noise::Perlin;\n\nuse CHUNK_SIZE;\nuse WORLD_SIZE;\nuse GraphicsResources;\n\nstatic MAX_CHUNKS : uint = 32;\n\npub struct ChunkLoader<'a> {\n seed : u32,\n graphics_resources : &'a GraphicsResources,\n cache : HashMap<(i64, i64), ~Chunk>\n}\n\nimpl<'a> ChunkLoader<'a> {\n pub fn new<'a>(seed : u32, graphics_resources : &'a GraphicsResources) -> ChunkLoader<'a> {\n ChunkLoader {\n seed: seed,\n graphics_resources : graphics_resources,\n cache: HashMap::new(),\n }\n }\n\n pub fn load(&mut self, cx : i64, cz: i64) {\n println!(\"loading chunk ({}, {})\", cx, cz);\n let chunk = chunk_gen(self.graphics_resources, self.seed, cx, cz);\n self.cache.insert((cx, cz), chunk);\n\n while self.cache.len() > MAX_CHUNKS {\n let (&k, _) = self.cache.iter().min_by(|&(_, chunk)| chunk.used_time).unwrap();\n self.cache.remove(&k);\n }\n }\n}\n\npub struct Chunk {\n x: i64,\n z: i64,\n map: ~Map,\n vao: GLuint,\n vertex_buffer: GLuint,\n normal_buffer: GLuint,\n element_buffer: GLuint,\n num_elements: uint,\n used_time: u64,\n}\n\nimpl Chunk {\n pub fn touch(&mut self) {\n self.used_time = extra::time::precise_time_ns();\n }\n}\n\nimpl Drop for Chunk {\n fn drop(&mut self) {\n unsafe {\n println!(\"unloading chunk ({}, {})\", self.x, self.z);\n gl::DeleteBuffers(1, &self.vertex_buffer);\n gl::DeleteBuffers(1, &self.normal_buffer);\n gl::DeleteBuffers(1, &self.element_buffer);\n gl::DeleteVertexArrays(1, &self.vao);\n }\n }\n}\n\nstruct Block {\n visible: bool,\n}\n\nstruct Map {\n blocks: [[[Block, ..CHUNK_SIZE], ..CHUNK_SIZE], ..CHUNK_SIZE],\n}\n\nstruct Face {\n normal: Vec3<f32>,\n vertices: [Vec3<f32>, ..4],\n}\n\n#[inline]\nfn fast_add_vec3f(a: Vec3<f32>, b: Vec3<f32>) -> Vec3<f32> {\n Vec3{x: a.x + b.x, y: a.y + b.y, z: a.z + b.z }\n}\n\npub fn chunk_gen(res: &GraphicsResources, seed: u32, chunk_x: i64, chunk_z: i64) -> ~Chunk {\n let def_block = Block { visible: false };\n let mut map = ~Map {\n blocks: [[[def_block, ..CHUNK_SIZE], ..CHUNK_SIZE], ..CHUNK_SIZE],\n };\n\n let block_exists = |x: int, y: int, z: int| -> bool {\n if x < 0 || x >= CHUNK_SIZE as int || y < 0 || y >= CHUNK_SIZE as int || z < 0 || z >= CHUNK_SIZE as int {\n return false;\n }\n\n map.blocks[x][y][z].visible\n };\n\n let start_time = extra::time::precise_time_ns();\n\n let perlin = Perlin::from_seed([seed as uint]);\n\n for block_x in range(0, CHUNK_SIZE) {\n for block_z in range(0, CHUNK_SIZE) {\n let noise = perlin.gen([\n (chunk_x + block_x as i64) as f64 * 0.1,\n (chunk_z + block_z as i64) as f64 * 0.1\n ]);\n let height = ((noise + 1.0) * (CHUNK_SIZE as f64 \/ 8.0)) as uint;\n for y in range(0, height) {\n map.blocks[block_x][y][block_z] = Block { visible: true };\n }\n }\n }\n\n let after_noise_time = extra::time::precise_time_ns();\n\n let mut vertices : ~[Vec3<f32>] = ~[];\n let mut normals : ~[Vec3<f32>] = ~[];\n let mut elements : ~[GLuint] = ~[];\n\n let mut idx = 0;\n\n for x in range(0, CHUNK_SIZE) {\n for y in range(0, CHUNK_SIZE) {\n for z in range(0, CHUNK_SIZE) {\n let block = &map.blocks[x][y][z];\n\n if (!block.visible) {\n continue;\n }\n\n let block_position = Vec3 { x: x as f32, y: y as f32, z: z as f32 };\n\n for face in faces.iter() {\n let neighbor_position = fast_add_vec3f(block_position, face.normal);\n if block_exists(neighbor_position.x as int, neighbor_position.y as int, neighbor_position.z as int) {\n continue;\n }\n\n for &v in face.vertices.iter() {\n vertices.push(fast_add_vec3f(v, block_position));\n normals.push(face.normal);\n }\n\n for e in face_elements.iter() {\n elements.push((idx * face.vertices.len()) as GLuint + *e);\n }\n\n idx += 1;\n }\n }\n }\n }\n\n let after_mesh_time = extra::time::precise_time_ns();\n\n let mut vao = 0;\n let mut vertex_buffer = 0;\n let mut normal_buffer = 0;\n let mut element_buffer = 0;\n\n unsafe {\n \/\/ Create Vertex Array Object\n gl::GenVertexArrays(1, &mut vao);\n gl::BindVertexArray(vao);\n\n \/\/ Create a Vertex Buffer Object and copy the vertex data to it\n gl::GenBuffers(1, &mut vertex_buffer);\n gl::BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);\n gl::BufferData(gl::ARRAY_BUFFER,\n (vertices.len() * std::mem::size_of::<Vec3<f32>>()) as GLsizeiptr,\n cast::transmute(&vertices[0]),\n gl::STATIC_DRAW);\n\n \/\/ Create a Vertex Buffer Object and copy the normal data to it\n gl::GenBuffers(1, &mut normal_buffer);\n gl::BindBuffer(gl::ARRAY_BUFFER, normal_buffer);\n gl::BufferData(gl::ARRAY_BUFFER,\n (normals.len() * std::mem::size_of::<Vec3<f32>>()) as GLsizeiptr,\n cast::transmute(&normals[0]),\n gl::STATIC_DRAW);\n\n \/\/ Create a Vertex Buffer Object and copy the element data to it\n gl::GenBuffers(1, &mut element_buffer);\n gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, element_buffer);\n gl::BufferData(gl::ELEMENT_ARRAY_BUFFER,\n (elements.len() * std::mem::size_of::<GLuint>()) as GLsizeiptr,\n cast::transmute(&elements[0]),\n gl::STATIC_DRAW);\n\n \/\/ Specify the layout of the vertex data\n let vert_attr = \"position\".with_c_str(|ptr| gl::GetAttribLocation(res.program, ptr));\n assert!(vert_attr as u32 != gl::INVALID_VALUE);\n gl::BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);\n gl::EnableVertexAttribArray(vert_attr as GLuint);\n gl::VertexAttribPointer(vert_attr as GLuint, 3, gl::FLOAT,\n gl::FALSE as GLboolean, 0, ptr::null());\n\n let normal_attr = \"normal\".with_c_str(|ptr| gl::GetAttribLocation(res.program, ptr));\n assert!(normal_attr as u32 != gl::INVALID_VALUE);\n gl::BindBuffer(gl::ARRAY_BUFFER, normal_buffer);\n gl::EnableVertexAttribArray(normal_attr as GLuint);\n gl::VertexAttribPointer(normal_attr as GLuint, 3, gl::FLOAT,\n gl::FALSE as GLboolean, 0, ptr::null());\n }\n\n gl::BindVertexArray(0);\n\n let after_buffer_time = extra::time::precise_time_ns();\n\n println!(\"chunk loading profile (us): noise={} mesh={} buffer={}\",\n (after_noise_time - start_time)\/1000,\n (after_mesh_time - after_noise_time)\/1000,\n (after_buffer_time - after_mesh_time)\/1000)\n\n return ~Chunk {\n x: chunk_x,\n z: chunk_z,\n map: map,\n vao: vao,\n vertex_buffer: vertex_buffer,\n normal_buffer: normal_buffer,\n element_buffer: element_buffer,\n num_elements: elements.len(),\n used_time: extra::time::precise_time_ns(),\n };\n}\n\nstatic face_elements : [GLuint, ..6] = [\n 0, 1, 2, 3, 2, 1,\n];\n\nstatic faces : [Face, ..6] = [\n \/* front *\/\n Face {\n normal: Vec3 { x: 0.0, y: 0.0, z: 1.0 },\n vertices: [\n Vec3 { x: 0.0, y: 0.0, z: 1.0 }, \/* bottom left *\/\n Vec3 { x: 1.0, y: 0.0, z: 1.0 }, \/* bottom right *\/\n Vec3 { x: 0.0, y: 1.0, z: 1.0 }, \/* top left *\/\n Vec3 { x: 1.0, y: 1.0, z: 1.0 }, \/* top right *\/\n ],\n },\n\n \/* back *\/\n Face {\n normal: Vec3 { x: 0.0, y: 0.0, z: -1.0 },\n vertices: [\n Vec3 { x: 1.0, y: 0.0, z: 0.0 }, \/* bottom right *\/\n Vec3 { x: 0.0, y: 0.0, z: 0.0 }, \/* bottom left *\/\n Vec3 { x: 1.0, y: 1.0, z: 0.0 }, \/* top right *\/\n Vec3 { x: 0.0, y: 1.0, z: 0.0 }, \/* top left *\/\n ],\n },\n\n \/* right *\/\n Face {\n normal: Vec3 { x: 1.0, y: 0.0, z: 0.0 },\n vertices: [\n Vec3 { x: 1.0, y: 0.0, z: 1.0 }, \/* bottom front *\/\n Vec3 { x: 1.0, y: 0.0, z: 0.0 }, \/* bottom back *\/\n Vec3 { x: 1.0, y: 1.0, z: 1.0 }, \/* top front *\/\n Vec3 { x: 1.0, y: 1.0, z: 0.0 }, \/* top back *\/\n ],\n },\n\n \/* left *\/\n Face {\n normal: Vec3 { x: -1.0, y: 0.0, z: 0.0 },\n vertices: [\n Vec3 { x: 0.0, y: 0.0, z: 0.0 }, \/* bottom back *\/\n Vec3 { x: 0.0, y: 0.0, z: 1.0 }, \/* bottom front *\/\n Vec3 { x: 0.0, y: 1.0, z: 0.0 }, \/* top back *\/\n Vec3 { x: 0.0, y: 1.0, z: 1.0 }, \/* top front *\/\n ],\n },\n\n \/* top *\/\n Face {\n normal: Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n vertices: [\n Vec3 { x: 0.0, y: 1.0, z: 1.0 }, \/* front left *\/\n Vec3 { x: 1.0, y: 1.0, z: 1.0 }, \/* front right *\/\n Vec3 { x: 0.0, y: 1.0, z: 0.0 }, \/* back left *\/\n Vec3 { x: 1.0, y: 1.0, z: 0.0 }, \/* back right *\/\n ],\n },\n\n \/* bottom *\/\n Face {\n normal: Vec3 { x: 0.0, y: -1.0, z: 0.0 },\n vertices: [\n Vec3 { x: 0.0, y: 0.0, z: 0.0 }, \/* back left *\/\n Vec3 { x: 1.0, y: 0.0, z: 0.0 }, \/* back right *\/\n Vec3 { x: 0.0, y: 0.0, z: 1.0 }, \/* front left *\/\n Vec3 { x: 1.0, y: 0.0, z: 1.0 }, \/* front right *\/\n ],\n },\n];\n<commit_msg>Revert \"hack to optimize meshing\"<commit_after>extern mod extra;\nextern mod glfw;\nextern mod gl;\nextern mod cgmath;\nextern mod noise;\n\nuse std::cast;\nuse std::ptr;\nuse std::hashmap::HashMap;\nuse std;\n\nuse gl::types::*;\n\nuse cgmath::vector::Vector;\nuse cgmath::vector::Vec3;\nuse cgmath::vector::Vec4;\n\nuse noise::Perlin;\n\nuse CHUNK_SIZE;\nuse WORLD_SIZE;\nuse GraphicsResources;\n\nstatic MAX_CHUNKS : uint = 32;\n\npub struct ChunkLoader<'a> {\n seed : u32,\n graphics_resources : &'a GraphicsResources,\n cache : HashMap<(i64, i64), ~Chunk>\n}\n\nimpl<'a> ChunkLoader<'a> {\n pub fn new<'a>(seed : u32, graphics_resources : &'a GraphicsResources) -> ChunkLoader<'a> {\n ChunkLoader {\n seed: seed,\n graphics_resources : graphics_resources,\n cache: HashMap::new(),\n }\n }\n\n pub fn load(&mut self, cx : i64, cz: i64) {\n println!(\"loading chunk ({}, {})\", cx, cz);\n let chunk = chunk_gen(self.graphics_resources, self.seed, cx, cz);\n self.cache.insert((cx, cz), chunk);\n\n while self.cache.len() > MAX_CHUNKS {\n let (&k, _) = self.cache.iter().min_by(|&(_, chunk)| chunk.used_time).unwrap();\n self.cache.remove(&k);\n }\n }\n}\n\npub struct Chunk {\n x: i64,\n z: i64,\n map: ~Map,\n vao: GLuint,\n vertex_buffer: GLuint,\n normal_buffer: GLuint,\n element_buffer: GLuint,\n num_elements: uint,\n used_time: u64,\n}\n\nimpl Chunk {\n pub fn touch(&mut self) {\n self.used_time = extra::time::precise_time_ns();\n }\n}\n\nimpl Drop for Chunk {\n fn drop(&mut self) {\n unsafe {\n println!(\"unloading chunk ({}, {})\", self.x, self.z);\n gl::DeleteBuffers(1, &self.vertex_buffer);\n gl::DeleteBuffers(1, &self.normal_buffer);\n gl::DeleteBuffers(1, &self.element_buffer);\n gl::DeleteVertexArrays(1, &self.vao);\n }\n }\n}\n\nstruct Block {\n visible: bool,\n}\n\nstruct Map {\n blocks: [[[Block, ..CHUNK_SIZE], ..CHUNK_SIZE], ..CHUNK_SIZE],\n}\n\nstruct Face {\n normal: Vec3<f32>,\n vertices: [Vec3<f32>, ..4],\n}\n\npub fn chunk_gen(res: &GraphicsResources, seed: u32, chunk_x: i64, chunk_z: i64) -> ~Chunk {\n let def_block = Block { visible: false };\n let mut map = ~Map {\n blocks: [[[def_block, ..CHUNK_SIZE], ..CHUNK_SIZE], ..CHUNK_SIZE],\n };\n\n let block_exists = |x: int, y: int, z: int| -> bool {\n if x < 0 || x >= CHUNK_SIZE as int || y < 0 || y >= CHUNK_SIZE as int || z < 0 || z >= CHUNK_SIZE as int {\n return false;\n }\n\n map.blocks[x][y][z].visible\n };\n\n let start_time = extra::time::precise_time_ns();\n\n let perlin = Perlin::from_seed([seed as uint]);\n\n for block_x in range(0, CHUNK_SIZE) {\n for block_z in range(0, CHUNK_SIZE) {\n let noise = perlin.gen([\n (chunk_x + block_x as i64) as f64 * 0.1,\n (chunk_z + block_z as i64) as f64 * 0.1\n ]);\n let height = ((noise + 1.0) * (CHUNK_SIZE as f64 \/ 8.0)) as uint;\n for y in range(0, height) {\n map.blocks[block_x][y][block_z] = Block { visible: true };\n }\n }\n }\n\n let after_noise_time = extra::time::precise_time_ns();\n\n let mut vertices : ~[Vec3<f32>] = ~[];\n let mut normals : ~[Vec3<f32>] = ~[];\n let mut elements : ~[GLuint] = ~[];\n\n let mut idx = 0;\n\n for x in range(0, CHUNK_SIZE) {\n for y in range(0, CHUNK_SIZE) {\n for z in range(0, CHUNK_SIZE) {\n let block = &map.blocks[x][y][z];\n\n if (!block.visible) {\n continue;\n }\n\n let block_position = Vec3 { x: x as f32, y: y as f32, z: z as f32 };\n\n for face in faces.iter() {\n let neighbor_position = block_position.add_v(&face.normal);\n if block_exists(neighbor_position.x as int, neighbor_position.y as int, neighbor_position.z as int) {\n continue;\n }\n\n for v in face.vertices.iter() {\n vertices.push(v.add_v(&block_position));\n normals.push(face.normal);\n }\n\n for e in face_elements.iter() {\n elements.push((idx * face.vertices.len()) as GLuint + *e);\n }\n\n idx += 1;\n }\n }\n }\n }\n\n let after_mesh_time = extra::time::precise_time_ns();\n\n let mut vao = 0;\n let mut vertex_buffer = 0;\n let mut normal_buffer = 0;\n let mut element_buffer = 0;\n\n unsafe {\n \/\/ Create Vertex Array Object\n gl::GenVertexArrays(1, &mut vao);\n gl::BindVertexArray(vao);\n\n \/\/ Create a Vertex Buffer Object and copy the vertex data to it\n gl::GenBuffers(1, &mut vertex_buffer);\n gl::BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);\n gl::BufferData(gl::ARRAY_BUFFER,\n (vertices.len() * std::mem::size_of::<Vec3<f32>>()) as GLsizeiptr,\n cast::transmute(&vertices[0]),\n gl::STATIC_DRAW);\n\n \/\/ Create a Vertex Buffer Object and copy the normal data to it\n gl::GenBuffers(1, &mut normal_buffer);\n gl::BindBuffer(gl::ARRAY_BUFFER, normal_buffer);\n gl::BufferData(gl::ARRAY_BUFFER,\n (normals.len() * std::mem::size_of::<Vec3<f32>>()) as GLsizeiptr,\n cast::transmute(&normals[0]),\n gl::STATIC_DRAW);\n\n \/\/ Create a Vertex Buffer Object and copy the element data to it\n gl::GenBuffers(1, &mut element_buffer);\n gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, element_buffer);\n gl::BufferData(gl::ELEMENT_ARRAY_BUFFER,\n (elements.len() * std::mem::size_of::<GLuint>()) as GLsizeiptr,\n cast::transmute(&elements[0]),\n gl::STATIC_DRAW);\n\n \/\/ Specify the layout of the vertex data\n let vert_attr = \"position\".with_c_str(|ptr| gl::GetAttribLocation(res.program, ptr));\n assert!(vert_attr as u32 != gl::INVALID_VALUE);\n gl::BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);\n gl::EnableVertexAttribArray(vert_attr as GLuint);\n gl::VertexAttribPointer(vert_attr as GLuint, 3, gl::FLOAT,\n gl::FALSE as GLboolean, 0, ptr::null());\n\n let normal_attr = \"normal\".with_c_str(|ptr| gl::GetAttribLocation(res.program, ptr));\n assert!(normal_attr as u32 != gl::INVALID_VALUE);\n gl::BindBuffer(gl::ARRAY_BUFFER, normal_buffer);\n gl::EnableVertexAttribArray(normal_attr as GLuint);\n gl::VertexAttribPointer(normal_attr as GLuint, 3, gl::FLOAT,\n gl::FALSE as GLboolean, 0, ptr::null());\n }\n\n gl::BindVertexArray(0);\n\n let after_buffer_time = extra::time::precise_time_ns();\n\n println!(\"chunk loading profile (us): noise={} mesh={} buffer={}\",\n (after_noise_time - start_time)\/1000,\n (after_mesh_time - after_noise_time)\/1000,\n (after_buffer_time - after_mesh_time)\/1000)\n\n return ~Chunk {\n x: chunk_x,\n z: chunk_z,\n map: map,\n vao: vao,\n vertex_buffer: vertex_buffer,\n normal_buffer: normal_buffer,\n element_buffer: element_buffer,\n num_elements: elements.len(),\n used_time: extra::time::precise_time_ns(),\n };\n}\n\nstatic face_elements : [GLuint, ..6] = [\n 0, 1, 2, 3, 2, 1,\n];\n\nstatic faces : [Face, ..6] = [\n \/* front *\/\n Face {\n normal: Vec3 { x: 0.0, y: 0.0, z: 1.0 },\n vertices: [\n Vec3 { x: 0.0, y: 0.0, z: 1.0 }, \/* bottom left *\/\n Vec3 { x: 1.0, y: 0.0, z: 1.0 }, \/* bottom right *\/\n Vec3 { x: 0.0, y: 1.0, z: 1.0 }, \/* top left *\/\n Vec3 { x: 1.0, y: 1.0, z: 1.0 }, \/* top right *\/\n ],\n },\n\n \/* back *\/\n Face {\n normal: Vec3 { x: 0.0, y: 0.0, z: -1.0 },\n vertices: [\n Vec3 { x: 1.0, y: 0.0, z: 0.0 }, \/* bottom right *\/\n Vec3 { x: 0.0, y: 0.0, z: 0.0 }, \/* bottom left *\/\n Vec3 { x: 1.0, y: 1.0, z: 0.0 }, \/* top right *\/\n Vec3 { x: 0.0, y: 1.0, z: 0.0 }, \/* top left *\/\n ],\n },\n\n \/* right *\/\n Face {\n normal: Vec3 { x: 1.0, y: 0.0, z: 0.0 },\n vertices: [\n Vec3 { x: 1.0, y: 0.0, z: 1.0 }, \/* bottom front *\/\n Vec3 { x: 1.0, y: 0.0, z: 0.0 }, \/* bottom back *\/\n Vec3 { x: 1.0, y: 1.0, z: 1.0 }, \/* top front *\/\n Vec3 { x: 1.0, y: 1.0, z: 0.0 }, \/* top back *\/\n ],\n },\n\n \/* left *\/\n Face {\n normal: Vec3 { x: -1.0, y: 0.0, z: 0.0 },\n vertices: [\n Vec3 { x: 0.0, y: 0.0, z: 0.0 }, \/* bottom back *\/\n Vec3 { x: 0.0, y: 0.0, z: 1.0 }, \/* bottom front *\/\n Vec3 { x: 0.0, y: 1.0, z: 0.0 }, \/* top back *\/\n Vec3 { x: 0.0, y: 1.0, z: 1.0 }, \/* top front *\/\n ],\n },\n\n \/* top *\/\n Face {\n normal: Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n vertices: [\n Vec3 { x: 0.0, y: 1.0, z: 1.0 }, \/* front left *\/\n Vec3 { x: 1.0, y: 1.0, z: 1.0 }, \/* front right *\/\n Vec3 { x: 0.0, y: 1.0, z: 0.0 }, \/* back left *\/\n Vec3 { x: 1.0, y: 1.0, z: 0.0 }, \/* back right *\/\n ],\n },\n\n \/* bottom *\/\n Face {\n normal: Vec3 { x: 0.0, y: -1.0, z: 0.0 },\n vertices: [\n Vec3 { x: 0.0, y: 0.0, z: 0.0 }, \/* back left *\/\n Vec3 { x: 1.0, y: 0.0, z: 0.0 }, \/* back right *\/\n Vec3 { x: 0.0, y: 0.0, z: 1.0 }, \/* front left *\/\n Vec3 { x: 1.0, y: 0.0, z: 1.0 }, \/* front right *\/\n ],\n },\n];\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added an example that reconnects on error using IrcReactor.<commit_after>extern crate irc;\n\nuse std::default::Default;\nuse irc::error;\nuse irc::client::prelude::*;\n\nfn main() {\n let cfg1 = Config {\n nickname: Some(\"pickles\".to_owned()),\n server: Some(\"irc.fyrechat.net\".to_owned()),\n channels: Some(vec![\"#irc-crate\".to_owned()]),\n ..Default::default()\n };\n\n let cfg2 = Config {\n nickname: Some(\"bananas\".to_owned()),\n server: Some(\"irc.fyrechat.net\".to_owned()),\n channels: Some(vec![\"#irc-crate\".to_owned()]),\n ..Default::default()\n };\n\n let configs = vec![cfg1, cfg2];\n\n let mut reactor = IrcReactor::new().unwrap();\n\n loop {\n let res = configs.iter().fold(Ok(()), |acc, config| {\n acc.and(\n reactor.prepare_server_and_connect(config).and_then(|server| {\n server.identify().and(Ok(server))\n }).and_then(|server| {\n reactor.register_server_with_handler(server, process_msg);\n Ok(())\n })\n )\n }).and_then(|()| reactor.run());\n\n match res {\n Ok(_) => break,\n Err(e) => eprintln!(\"{}\", e),\n }\n }\n}\n\nfn process_msg(server: &IrcServer, message: Message) -> error::Result<()> {\n print!(\"{}\", message);\n match message.command {\n Command::PRIVMSG(ref target, ref msg) => {\n if msg.contains(\"pickles\") {\n server.send_privmsg(target, \"Hi!\")?;\n } else if msg.contains(\"quit\") {\n server.send_quit(\"bye\")?;\n }\n }\n _ => (),\n }\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>debuginfo: Add test case for issue #8513.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-win32 Broken because of LLVM bug: http:\/\/llvm.org\/bugs\/show_bug.cgi?id=16249\n\n\/\/ compile-flags:-Z extra-debug-info\n\/\/ debugger:run\n\n\/\/ Nothing to do here really, just make sure it compiles. See issue #8513.\nfn main() {\n let _ = ||();\n}\n\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\nuse redox::time::*;\n\npub struct FileManager {\n folder_icon: BMPFile,\n audio_icon: BMPFile,\n bin_icon: BMPFile,\n image_icon: BMPFile,\n source_icon: BMPFile,\n script_icon: BMPFile,\n text_icon: BMPFile,\n file_icon: BMPFile,\n files: Vec<String>,\n selected: isize,\n last_mouse_event: MouseEvent,\n click_time: Duration,\n}\n\nfn load_icon(path: &str) -> BMPFile {\n let mut resource = File::open(&(\"file:\/\/\/ui\/mimetypes\/\".to_string() + path + \".bmp\"));\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n BMPFile::from_data(&vec)\n}\n\nimpl FileManager {\n pub fn new() -> Self {\n FileManager {\n folder_icon: load_icon(\"inode-directory\"),\n audio_icon: load_icon(\"audio-x-wav\"),\n bin_icon: load_icon(\"application-x-executable\"),\n image_icon: load_icon(\"image-x-generic\"),\n source_icon: load_icon(\"text-x-makefile\"),\n script_icon: load_icon(\"text-x-script\"),\n text_icon: load_icon(\"text-x-generic\"),\n file_icon: load_icon(\"unknown\"),\n files: Vec::new(),\n selected: -1,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n }\n click_time: Duration::new(0, 0),\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n window.set([255, 255, 255, 255]);\n\n let mut i = 0;\n let mut row = 0;\n for string in self.files.iter() {\n if i == self.selected {\n let width = window.width();\n window.rect(0, 32 * row as isize, width, 32, [224, 224, 224, 255]);\n }\n\n if string.ends_with('\/') {\n window.image(0,\n 32 * row as isize,\n self.folder_icon.width(),\n self.folder_icon.height(),\n self.folder_icon.as_slice());\n } else if string.ends_with(\".wav\") {\n window.image(0,\n 32 * row as isize,\n self.audio_icon.width(),\n self.audio_icon.height(),\n self.audio_icon.as_slice());\n } else if string.ends_with(\".bin\") {\n window.image(0,\n 32 * row as isize,\n self.bin_icon.width(),\n self.bin_icon.height(),\n self.bin_icon.as_slice());\n } else if string.ends_with(\".bmp\") {\n window.image(0,\n 32 * row as isize,\n self.image_icon.width(),\n self.image_icon.height(),\n self.image_icon.as_slice());\n } else if string.ends_with(\".rs\") || string.ends_with(\".asm\") || string.ends_with(\".list\") {\n window.image(0,\n 32 * row as isize,\n self.source_icon.width(),\n self.source_icon.height(),\n self.source_icon.as_slice());\n } else if string.ends_with(\".sh\") || string.ends_with(\".lua\") {\n window.image(0,\n 32 * row as isize,\n self.script_icon.width(),\n self.script_icon.height(),\n self.script_icon.as_slice());\n } else if string.ends_with(\".md\") || string.ends_with(\".txt\") {\n window.image(0,\n 32 * row as isize,\n self.text_icon.width(),\n self.text_icon.height(),\n self.text_icon.as_slice());\n } else {\n window.image(0,\n 32 * row as isize,\n self.file_icon.width(),\n self.file_icon.height(),\n self.file_icon.as_slice());\n }\n\n let mut col = 0;\n for c in string.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n [0, 0, 0, 255]);\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n window.sync();\n }\n\n fn main(&mut self, path: &str) {\n let mut width = 160;\n let mut height = 0;\n {\n let mut resource = File::open(path);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n for file in unsafe { String::from_utf8_unchecked(vec) }.split('\\n') {\n if width < 40 + (file.len() + 1) * 8 {\n width = 40 + (file.len() + 1) * 8;\n }\n self.files.push(file.to_string());\n }\n\n if height < self.files.len() * 32 {\n height = self.files.len() * 32;\n }\n }\n\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n width,\n height,\n &path);\n\n self.draw_content(&mut window);\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => break,\n K_HOME => self.selected = 0,\n K_UP => if self.selected > 0 {\n self.selected -= 1;\n },\n K_END => self.selected = self.files.len() as isize - 1,\n K_DOWN => if self.selected < self.files.len() as isize - 1 {\n self.selected += 1;\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n if self.selected >= 0 &&\n self.selected < self.files.len() as isize {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n }\n }\n _ => {\n let mut i = 0;\n for file in self.files.iter() {\n if file.starts_with(key_event.character) {\n self.selected = i;\n break;\n }\n i += 1;\n }\n }\n },\n }\n\n self.draw_content(&mut window);\n }\n }\n EventOption::Mouse(mouse_event) => {\n let mut redraw = false;\n let mut i = 0;\n let mut row = 0;\n for file in self.files.iter() {\n let mut col = 0;\n for c in file.chars() {\n if mouse_event.y >= 32 * row as isize &&\n mouse_event.y < 32 * row as isize + 32 {\n let click_time = Duration::realtime();\n if self.selected == i {\n if click_time - self.click_time < Duration::new(0, 500 * NANOS_PER_MILLI) {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n self.click_time = Duration::new(0, 0);\n }\n } else {\n self.selected = i;\n self.click_time = click_time;\n }\n redraw = true;\n }\n\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n if redraw {\n self.draw_content(&mut window);\n }\n \n self.last_mouse_event = mouse_event;\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => FileManager::new().main(arg),\n Option::None => FileManager::new().main(\"file:\/\/\/\"),\n }\n}\n<commit_msg>WIP double click<commit_after>use redox::*;\nuse redox::time::*;\n\npub struct FileManager {\n folder_icon: BMPFile,\n audio_icon: BMPFile,\n bin_icon: BMPFile,\n image_icon: BMPFile,\n source_icon: BMPFile,\n script_icon: BMPFile,\n text_icon: BMPFile,\n file_icon: BMPFile,\n files: Vec<String>,\n selected: isize,\n last_mouse_event: MouseEvent,\n click_time: Duration,\n}\n\nfn load_icon(path: &str) -> BMPFile {\n let mut resource = File::open(&(\"file:\/\/\/ui\/mimetypes\/\".to_string() + path + \".bmp\"));\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n BMPFile::from_data(&vec)\n}\n\nimpl FileManager {\n pub fn new() -> Self {\n FileManager {\n folder_icon: load_icon(\"inode-directory\"),\n audio_icon: load_icon(\"audio-x-wav\"),\n bin_icon: load_icon(\"application-x-executable\"),\n image_icon: load_icon(\"image-x-generic\"),\n source_icon: load_icon(\"text-x-makefile\"),\n script_icon: load_icon(\"text-x-script\"),\n text_icon: load_icon(\"text-x-generic\"),\n file_icon: load_icon(\"unknown\"),\n files: Vec::new(),\n selected: -1,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n }\n click_time: Duration::new(0, 0),\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n window.set([255, 255, 255, 255]);\n\n let mut i = 0;\n let mut row = 0;\n for string in self.files.iter() {\n if i == self.selected {\n let width = window.width();\n window.rect(0, 32 * row as isize, width, 32, [224, 224, 224, 255]);\n }\n\n if string.ends_with('\/') {\n window.image(0,\n 32 * row as isize,\n self.folder_icon.width(),\n self.folder_icon.height(),\n self.folder_icon.as_slice());\n } else if string.ends_with(\".wav\") {\n window.image(0,\n 32 * row as isize,\n self.audio_icon.width(),\n self.audio_icon.height(),\n self.audio_icon.as_slice());\n } else if string.ends_with(\".bin\") {\n window.image(0,\n 32 * row as isize,\n self.bin_icon.width(),\n self.bin_icon.height(),\n self.bin_icon.as_slice());\n } else if string.ends_with(\".bmp\") {\n window.image(0,\n 32 * row as isize,\n self.image_icon.width(),\n self.image_icon.height(),\n self.image_icon.as_slice());\n } else if string.ends_with(\".rs\") || string.ends_with(\".asm\") || string.ends_with(\".list\") {\n window.image(0,\n 32 * row as isize,\n self.source_icon.width(),\n self.source_icon.height(),\n self.source_icon.as_slice());\n } else if string.ends_with(\".sh\") || string.ends_with(\".lua\") {\n window.image(0,\n 32 * row as isize,\n self.script_icon.width(),\n self.script_icon.height(),\n self.script_icon.as_slice());\n } else if string.ends_with(\".md\") || string.ends_with(\".txt\") {\n window.image(0,\n 32 * row as isize,\n self.text_icon.width(),\n self.text_icon.height(),\n self.text_icon.as_slice());\n } else {\n window.image(0,\n 32 * row as isize,\n self.file_icon.width(),\n self.file_icon.height(),\n self.file_icon.as_slice());\n }\n\n let mut col = 0;\n for c in string.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n [0, 0, 0, 255]);\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n window.sync();\n }\n\n fn main(&mut self, path: &str) {\n let mut width = 160;\n let mut height = 0;\n {\n let mut resource = File::open(path);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n for file in unsafe { String::from_utf8_unchecked(vec) }.split('\\n') {\n if width < 40 + (file.len() + 1) * 8 {\n width = 40 + (file.len() + 1) * 8;\n }\n self.files.push(file.to_string());\n }\n\n if height < self.files.len() * 32 {\n height = self.files.len() * 32;\n }\n }\n\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n width,\n height,\n &path);\n\n self.draw_content(&mut window);\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => break,\n K_HOME => self.selected = 0,\n K_UP => if self.selected > 0 {\n self.selected -= 1;\n },\n K_END => self.selected = self.files.len() as isize - 1,\n K_DOWN => if self.selected < self.files.len() as isize - 1 {\n self.selected += 1;\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n if self.selected >= 0 &&\n self.selected < self.files.len() as isize {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n }\n }\n _ => {\n let mut i = 0;\n for file in self.files.iter() {\n if file.starts_with(key_event.character) {\n self.selected = i;\n break;\n }\n i += 1;\n }\n }\n },\n }\n\n self.draw_content(&mut window);\n }\n }\n EventOption::Mouse(mouse_event) => {\n let mut redraw = false;\n let mut i = 0;\n let mut row = 0;\n for file in self.files.iter() {\n let mut col = 0;\n for c in file.chars() {\n if mouse_event.y >= 32 * row as isize &&\n mouse_event.y < 32 * row as isize + 32 {\n let click_time = Duration::realtime();\n if self.selected == i {\n if click_time - self.click_time < Duration::new(0, 500 * NANOS_PER_MILLI) {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n self.click_time = Duration::new(0, 0);\n }\n } else {\n self.selected = i;\n self.click_time = click_time;\n }\n redraw = true;\n }\n\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n if redraw {\n self.draw_content(&mut window);\n }\n\n if mouse_event.left_button \n self.last_mouse_event = mouse_event;\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => FileManager::new().main(arg),\n Option::None => FileManager::new().main(\"file:\/\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A shareable mutable container for the DOM.\n\nuse dom::bindings::trace::JSTraceable;\nuse js::jsapi::{JSTracer};\n\nuse util::task_state;\nuse util::task_state::{SCRIPT, IN_GC};\n\nuse std::cell::{BorrowState, RefCell, Ref, RefMut};\n\n\/\/\/ A mutable field in the DOM.\n\/\/\/\n\/\/\/ This extends the API of `core::cell::RefCell` to allow unsafe access in\n\/\/\/ certain situations, with dynamic checking in debug builds.\npub struct DOMRefCell<T> {\n value: RefCell<T>,\n}\n\n\/\/ Functionality specific to Servo's `DOMRefCell` type\n\/\/ ===================================================\n\nimpl<T> DOMRefCell<T> {\n \/\/\/ Return a reference to the contents.\n \/\/\/\n \/\/\/ For use in the layout task only.\n #[allow(unsafe_code)]\n pub unsafe fn borrow_for_layout<'a>(&'a self) -> &'a T {\n debug_assert!(task_state::get().is_layout());\n &*self.value.as_unsafe_cell().get()\n }\n\n \/\/\/ Borrow the contents for the purpose of GC tracing.\n \/\/\/\n \/\/\/ This succeeds even if the object is mutably borrowed,\n \/\/\/ so you have to be careful in trace code!\n #[allow(unsafe_code)]\n pub unsafe fn borrow_for_gc_trace<'a>(&'a self) -> &'a T {\n debug_assert!(task_state::get().contains(SCRIPT | IN_GC));\n &*self.value.as_unsafe_cell().get()\n }\n\n \/\/\/ Borrow the contents for the purpose of script deallocation.\n \/\/\/\n #[allow(unsafe_code)]\n pub unsafe fn borrow_for_script_deallocation<'a>(&'a self) -> &'a mut T {\n debug_assert!(task_state::get().contains(SCRIPT));\n &mut *self.value.as_unsafe_cell().get()\n }\n\n \/\/\/ Is the cell mutably borrowed?\n \/\/\/\n \/\/\/ For safety checks in debug builds only.\n pub fn is_mutably_borrowed(&self) -> bool {\n self.value.borrow_state() == BorrowState::Writing\n }\n\n \/\/\/ Attempts to immutably borrow the wrapped value.\n \/\/\/\n \/\/\/ The borrow lasts until the returned `Ref` exits scope. Multiple\n \/\/\/ immutable borrows can be taken out at the same time.\n \/\/\/\n \/\/\/ Returns `None` if the value is currently mutably borrowed.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if this is called off the script thread.\n pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {\n debug_assert!(task_state::get().is_script());\n match self.value.borrow_state() {\n BorrowState::Writing => None,\n _ => Some(self.value.borrow()),\n }\n }\n\n \/\/\/ Mutably borrows the wrapped value.\n \/\/\/\n \/\/\/ The borrow lasts until the returned `RefMut` exits scope. The value\n \/\/\/ cannot be borrowed while this borrow is active.\n \/\/\/\n \/\/\/ Returns `None` if the value is currently borrowed.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if this is called off the script thread.\n pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {\n debug_assert!(task_state::get().is_script());\n match self.value.borrow_state() {\n BorrowState::Unused => Some(self.value.borrow_mut()),\n _ => None,\n }\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for DOMRefCell<T> {\n fn trace(&self, trc: *mut JSTracer) {\n unsafe {\n (*self).borrow_for_gc_trace().trace(trc)\n }\n }\n}\n\n\/\/ Functionality duplicated with `core::cell::RefCell`\n\/\/ ===================================================\nimpl<T> DOMRefCell<T> {\n \/\/\/ Create a new `DOMRefCell` containing `value`.\n pub fn new(value: T) -> DOMRefCell<T> {\n DOMRefCell {\n value: RefCell::new(value),\n }\n }\n\n\n \/\/\/ Immutably borrows the wrapped value.\n \/\/\/\n \/\/\/ The borrow lasts until the returned `Ref` exits scope. Multiple\n \/\/\/ immutable borrows can be taken out at the same time.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if this is called off the script thread.\n \/\/\/\n \/\/\/ Panics if the value is currently mutably borrowed.\n pub fn borrow<'a>(&'a self) -> Ref<'a, T> {\n match self.try_borrow() {\n Some(ptr) => ptr,\n None => panic!(\"DOMRefCell<T> already mutably borrowed\")\n }\n }\n\n \/\/\/ Mutably borrows the wrapped value.\n \/\/\/\n \/\/\/ The borrow lasts until the returned `RefMut` exits scope. The value\n \/\/\/ cannot be borrowed while this borrow is active.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if this is called off the script thread.\n \/\/\/\n \/\/\/ Panics if the value is currently borrowed.\n pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> {\n match self.try_borrow_mut() {\n Some(ptr) => ptr,\n None => panic!(\"DOMRefCell<T> already borrowed\")\n }\n }\n}\n<commit_msg>Utilize Option::expect<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A shareable mutable container for the DOM.\n\nuse dom::bindings::trace::JSTraceable;\nuse js::jsapi::{JSTracer};\n\nuse util::task_state;\nuse util::task_state::{SCRIPT, IN_GC};\n\nuse std::cell::{BorrowState, RefCell, Ref, RefMut};\n\n\/\/\/ A mutable field in the DOM.\n\/\/\/\n\/\/\/ This extends the API of `core::cell::RefCell` to allow unsafe access in\n\/\/\/ certain situations, with dynamic checking in debug builds.\npub struct DOMRefCell<T> {\n value: RefCell<T>,\n}\n\n\/\/ Functionality specific to Servo's `DOMRefCell` type\n\/\/ ===================================================\n\nimpl<T> DOMRefCell<T> {\n \/\/\/ Return a reference to the contents.\n \/\/\/\n \/\/\/ For use in the layout task only.\n #[allow(unsafe_code)]\n pub unsafe fn borrow_for_layout<'a>(&'a self) -> &'a T {\n debug_assert!(task_state::get().is_layout());\n &*self.value.as_unsafe_cell().get()\n }\n\n \/\/\/ Borrow the contents for the purpose of GC tracing.\n \/\/\/\n \/\/\/ This succeeds even if the object is mutably borrowed,\n \/\/\/ so you have to be careful in trace code!\n #[allow(unsafe_code)]\n pub unsafe fn borrow_for_gc_trace<'a>(&'a self) -> &'a T {\n debug_assert!(task_state::get().contains(SCRIPT | IN_GC));\n &*self.value.as_unsafe_cell().get()\n }\n\n \/\/\/ Borrow the contents for the purpose of script deallocation.\n \/\/\/\n #[allow(unsafe_code)]\n pub unsafe fn borrow_for_script_deallocation<'a>(&'a self) -> &'a mut T {\n debug_assert!(task_state::get().contains(SCRIPT));\n &mut *self.value.as_unsafe_cell().get()\n }\n\n \/\/\/ Is the cell mutably borrowed?\n \/\/\/\n \/\/\/ For safety checks in debug builds only.\n pub fn is_mutably_borrowed(&self) -> bool {\n self.value.borrow_state() == BorrowState::Writing\n }\n\n \/\/\/ Attempts to immutably borrow the wrapped value.\n \/\/\/\n \/\/\/ The borrow lasts until the returned `Ref` exits scope. Multiple\n \/\/\/ immutable borrows can be taken out at the same time.\n \/\/\/\n \/\/\/ Returns `None` if the value is currently mutably borrowed.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if this is called off the script thread.\n pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {\n debug_assert!(task_state::get().is_script());\n match self.value.borrow_state() {\n BorrowState::Writing => None,\n _ => Some(self.value.borrow()),\n }\n }\n\n \/\/\/ Mutably borrows the wrapped value.\n \/\/\/\n \/\/\/ The borrow lasts until the returned `RefMut` exits scope. The value\n \/\/\/ cannot be borrowed while this borrow is active.\n \/\/\/\n \/\/\/ Returns `None` if the value is currently borrowed.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if this is called off the script thread.\n pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {\n debug_assert!(task_state::get().is_script());\n match self.value.borrow_state() {\n BorrowState::Unused => Some(self.value.borrow_mut()),\n _ => None,\n }\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for DOMRefCell<T> {\n fn trace(&self, trc: *mut JSTracer) {\n unsafe {\n (*self).borrow_for_gc_trace().trace(trc)\n }\n }\n}\n\n\/\/ Functionality duplicated with `core::cell::RefCell`\n\/\/ ===================================================\nimpl<T> DOMRefCell<T> {\n \/\/\/ Create a new `DOMRefCell` containing `value`.\n pub fn new(value: T) -> DOMRefCell<T> {\n DOMRefCell {\n value: RefCell::new(value),\n }\n }\n\n\n \/\/\/ Immutably borrows the wrapped value.\n \/\/\/\n \/\/\/ The borrow lasts until the returned `Ref` exits scope. Multiple\n \/\/\/ immutable borrows can be taken out at the same time.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if this is called off the script thread.\n \/\/\/\n \/\/\/ Panics if the value is currently mutably borrowed.\n pub fn borrow<'a>(&'a self) -> Ref<'a, T> {\n self.try_borrow().expect(\"DOMRefCell<T> already mutably borrowed\")\n }\n\n \/\/\/ Mutably borrows the wrapped value.\n \/\/\/\n \/\/\/ The borrow lasts until the returned `RefMut` exits scope. The value\n \/\/\/ cannot be borrowed while this borrow is active.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics if this is called off the script thread.\n \/\/\/\n \/\/\/ Panics if the value is currently borrowed.\n pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> {\n self.try_borrow_mut().expect(\"DOMRefCell<T> already borrowed\")\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>practise rust version<commit_after>fn two_sum(nums: Vec<i32>, target: i32) -> Vec<u32> {\n let len = nums.len();\n for i in 0..len {\n for j in i + 1..len {\n if nums[i] + nums[j] == target {\n return vec![i as u32, j as u32];\n }\n }\n }\n return vec![];\n}\n\nfn main() {\n let testcase0 = vec![2, 7, 11, 15];\n let testcase1 = vec![0, 4, 3, 0];\n let testcase2 = vec![-3, 4, 3, 90];\n\n println!(\"{:?}\", two_sum(testcase0, 9));\n println!(\"{:?}\", two_sum(testcase1, 0));\n println!(\"{:?}\", two_sum(testcase2, 0));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add support for template specialization. Add support for `mod` item without body. Add support for tuple patterns. Fix bug parsing template application with lifetimes. Fix bug parsing single arg item in an enum. Simplify some codes.<commit_after><|endoftext|>"} {"text":"<commit_before>pub use winit::os::unix::x11::{XError, XNotSupported, XConnection};\n\nuse api::glx::ffi;\n\nuse CreationError;\nuse std::{mem, ptr};\nuse std::sync::{Arc};\n\nuse winit;\nuse winit::os::unix::WindowExt;\nuse winit::os::unix::WindowBuilderExt;\nuse winit::os::unix::get_x11_xconnection;\nuse winit::NativeMonitorId;\n\nuse Api;\nuse ContextError;\nuse GlAttributes;\nuse GlContext;\nuse GlRequest;\nuse PixelFormat;\nuse PixelFormatRequirements;\n\nuse std::ffi::CString;\n\nuse api::glx::Context as GlxContext;\nuse api::egl;\nuse api::egl::Context as EglContext;\nuse api::glx::ffi::glx::Glx;\nuse api::egl::ffi::egl::Egl;\nuse api::dlopen;\n\nstruct GlxOrEgl {\n glx: Option<Glx>,\n egl: Option<Egl>,\n}\n\nimpl GlxOrEgl {\n fn new() -> GlxOrEgl {\n \/\/ TODO: use something safer than raw \"dlopen\"\n let glx = {\n let mut libglx = unsafe {\n dlopen::dlopen(b\"libGL.so.1\\0\".as_ptr() as *const _, dlopen::RTLD_NOW)\n };\n if libglx.is_null() {\n libglx = unsafe {\n dlopen::dlopen(b\"libGL.so\\0\".as_ptr() as *const _, dlopen::RTLD_NOW)\n };\n }\n if libglx.is_null() {\n None\n } else {\n Some(Glx::load_with(|sym| {\n let sym = CString::new(sym).unwrap();\n unsafe { dlopen::dlsym(libglx, sym.as_ptr()) }\n }))\n }\n };\n \/\/ TODO: use something safer than raw \"dlopen\"\n let egl = {\n let mut libegl = unsafe {\n dlopen::dlopen(b\"libEGL.so.1\\0\".as_ptr() as *const _, dlopen::RTLD_NOW)\n };\n if libegl.is_null() {\n libegl = unsafe {\n dlopen::dlopen(b\"libEGL.so\\0\".as_ptr() as *const _, dlopen::RTLD_NOW)\n };\n }\n if libegl.is_null() {\n None\n } else {\n Some(Egl::load_with(|sym| {\n let sym = CString::new(sym).unwrap();\n unsafe { dlopen::dlsym(libegl, sym.as_ptr()) }\n }))\n }\n };\n GlxOrEgl {\n glx: glx,\n egl: egl,\n }\n }\n}\n\nenum Context {\n Glx(GlxContext),\n Egl(EglContext),\n None,\n}\n\npub struct Window {\n display: Arc<XConnection>,\n colormap: ffi::Colormap,\n context: Context,\n}\n\nunsafe impl Send for Window {}\nunsafe impl Sync for Window {}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n unsafe {\n \/\/ we don't call MakeCurrent(0, 0) because we are not sure that the context\n \/\/ is still the current one\n self.context = Context::None;\n\n (self.display.xlib.XFreeColormap)(self.display.display, self.colormap);\n }\n }\n}\n\nimpl Window {\n pub fn new(\n pf_reqs: &PixelFormatRequirements,\n opengl: &GlAttributes<&Window>,\n winit_builder: winit::WindowBuilder,\n ) -> Result<(Window, winit::Window), CreationError> {\n let display = get_x11_xconnection().unwrap();\n let screen_id = match winit_builder.window.monitor {\n Some(ref m) => match m.get_native_identifier() {\n NativeMonitorId::Numeric(monitor) => monitor as i32,\n _ => panic!(),\n },\n _ => unsafe { (display.xlib.XDefaultScreen)(display.display) },\n };\n\n \/\/ start the context building process\n enum Prototype<'a> {\n Glx(::api::glx::ContextPrototype<'a>),\n Egl(::api::egl::ContextPrototype<'a>),\n }\n let builder_clone_opengl_glx = opengl.clone().map_sharing(|_| unimplemented!()); \/\/ FIXME:\n let builder_clone_opengl_egl = opengl.clone().map_sharing(|_| unimplemented!()); \/\/ FIXME:\n let backend = GlxOrEgl::new();\n let context = match opengl.version {\n GlRequest::Latest | GlRequest::Specific(Api::OpenGl, _) | GlRequest::GlThenGles { .. } => {\n \/\/ GLX should be preferred over EGL, otherwise crashes may occur\n \/\/ on X11 – issue #314\n if let Some(ref glx) = backend.glx {\n Prototype::Glx(try!(GlxContext::new(\n glx.clone(),\n &display.xlib,\n pf_reqs,\n &builder_clone_opengl_glx,\n display.display,\n screen_id,\n )))\n } else if let Some(ref egl) = backend.egl {\n Prototype::Egl(try!(EglContext::new(\n egl.clone(),\n pf_reqs,\n &builder_clone_opengl_egl,\n egl::NativeDisplay::X11(Some(display.display as *const _)),\n )))\n } else {\n return Err(CreationError::NotSupported);\n }\n },\n GlRequest::Specific(Api::OpenGlEs, _) => {\n if let Some(ref egl) = backend.egl {\n Prototype::Egl(try!(EglContext::new(\n egl.clone(),\n pf_reqs,\n &builder_clone_opengl_egl,\n egl::NativeDisplay::X11(Some(display.display as *const _)),\n )))\n } else {\n return Err(CreationError::NotSupported);\n }\n },\n GlRequest::Specific(_, _) => {\n return Err(CreationError::NotSupported);\n },\n };\n\n \/\/ getting the `visual_infos` (a struct that contains information about the visual to use)\n let visual_infos = match context {\n Prototype::Glx(ref p) => p.get_visual_infos().clone(),\n Prototype::Egl(ref p) => {\n unsafe {\n let mut template: ffi::XVisualInfo = mem::zeroed();\n template.visualid = p.get_native_visual_id() as ffi::VisualID;\n\n let mut num_visuals = 0;\n let vi = (display.xlib.XGetVisualInfo)(display.display, ffi::VisualIDMask,\n &mut template, &mut num_visuals);\n display.check_errors().expect(\"Failed to call XGetVisualInfo\");\n assert!(!vi.is_null());\n assert!(num_visuals == 1);\n\n let vi_copy = ptr::read(vi as *const _);\n (display.xlib.XFree)(vi as *mut _);\n vi_copy\n }\n },\n };\n\n let winit_window = winit_builder\n .with_x11_visual(&visual_infos as *const _)\n .with_x11_screen(screen_id)\n .build().unwrap();\n\n let xlib_window = winit_window.get_xlib_window().unwrap();\n \/\/ finish creating the OpenGL context\n let context = match context {\n Prototype::Glx(ctxt) => {\n Context::Glx(try!(ctxt.finish(xlib_window as _)))\n },\n Prototype::Egl(ctxt) => {\n Context::Egl(try!(ctxt.finish(xlib_window)))\n },\n };\n\n \/\/ getting the root window\n let root = unsafe { (display.xlib.XDefaultRootWindow)(display.display) };\n display.check_errors().expect(\"Failed to get root window\");\n\n \/\/ creating the color map\n let cmap = unsafe {\n let cmap = (display.xlib.XCreateColormap)(display.display, root,\n visual_infos.visual as *mut _,\n ffi::AllocNone);\n display.check_errors().expect(\"Failed to call XCreateColormap\");\n cmap\n };\n\n Ok((Window {\n display: display.clone(),\n context: context,\n colormap: cmap,\n },\n winit_window))\n }\n}\n\nimpl GlContext for Window {\n #[inline]\n unsafe fn make_current(&self) -> Result<(), ContextError> {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.make_current(),\n Context::Egl(ref ctxt) => ctxt.make_current(),\n Context::None => Ok(())\n }\n }\n\n #[inline]\n fn is_current(&self) -> bool {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.is_current(),\n Context::Egl(ref ctxt) => ctxt.is_current(),\n Context::None => panic!()\n }\n }\n\n #[inline]\n fn get_proc_address(&self, addr: &str) -> *const () {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.get_proc_address(addr),\n Context::Egl(ref ctxt) => ctxt.get_proc_address(addr),\n Context::None => ptr::null()\n }\n }\n\n #[inline]\n fn swap_buffers(&self) -> Result<(), ContextError> {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.swap_buffers(),\n Context::Egl(ref ctxt) => ctxt.swap_buffers(),\n Context::None => Ok(())\n }\n }\n\n #[inline]\n fn get_api(&self) -> Api {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.get_api(),\n Context::Egl(ref ctxt) => ctxt.get_api(),\n Context::None => panic!()\n }\n }\n\n #[inline]\n fn get_pixel_format(&self) -> PixelFormat {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.get_pixel_format(),\n Context::Egl(ref ctxt) => ctxt.get_pixel_format(),\n Context::None => panic!()\n }\n }\n}\n<commit_msg>Replace unwrap call with proper error handling<commit_after>pub use winit::os::unix::x11::{XError, XNotSupported, XConnection};\n\nuse api::glx::ffi;\n\nuse CreationError;\nuse std::{mem, ptr, fmt, error};\nuse std::sync::{Arc};\n\nuse winit;\nuse winit::os::unix::WindowExt;\nuse winit::os::unix::WindowBuilderExt;\nuse winit::os::unix::get_x11_xconnection;\nuse winit::NativeMonitorId;\n\nuse Api;\nuse ContextError;\nuse GlAttributes;\nuse GlContext;\nuse GlRequest;\nuse PixelFormat;\nuse PixelFormatRequirements;\n\nuse std::ffi::CString;\n\nuse api::glx::Context as GlxContext;\nuse api::egl;\nuse api::egl::Context as EglContext;\nuse api::glx::ffi::glx::Glx;\nuse api::egl::ffi::egl::Egl;\nuse api::dlopen;\n\n#[derive(Debug)]\nstruct NoX11Connection;\n\nimpl error::Error for NoX11Connection {\n fn description(&self) -> &str {\n \"failed to get x11 connection\"\n }\n}\n\nimpl fmt::Display for NoX11Connection {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.write_str(error::Error::description(self))\n }\n}\n\nstruct GlxOrEgl {\n glx: Option<Glx>,\n egl: Option<Egl>,\n}\n\nimpl GlxOrEgl {\n fn new() -> GlxOrEgl {\n \/\/ TODO: use something safer than raw \"dlopen\"\n let glx = {\n let mut libglx = unsafe {\n dlopen::dlopen(b\"libGL.so.1\\0\".as_ptr() as *const _, dlopen::RTLD_NOW)\n };\n if libglx.is_null() {\n libglx = unsafe {\n dlopen::dlopen(b\"libGL.so\\0\".as_ptr() as *const _, dlopen::RTLD_NOW)\n };\n }\n if libglx.is_null() {\n None\n } else {\n Some(Glx::load_with(|sym| {\n let sym = CString::new(sym).unwrap();\n unsafe { dlopen::dlsym(libglx, sym.as_ptr()) }\n }))\n }\n };\n \/\/ TODO: use something safer than raw \"dlopen\"\n let egl = {\n let mut libegl = unsafe {\n dlopen::dlopen(b\"libEGL.so.1\\0\".as_ptr() as *const _, dlopen::RTLD_NOW)\n };\n if libegl.is_null() {\n libegl = unsafe {\n dlopen::dlopen(b\"libEGL.so\\0\".as_ptr() as *const _, dlopen::RTLD_NOW)\n };\n }\n if libegl.is_null() {\n None\n } else {\n Some(Egl::load_with(|sym| {\n let sym = CString::new(sym).unwrap();\n unsafe { dlopen::dlsym(libegl, sym.as_ptr()) }\n }))\n }\n };\n GlxOrEgl {\n glx: glx,\n egl: egl,\n }\n }\n}\n\nenum Context {\n Glx(GlxContext),\n Egl(EglContext),\n None,\n}\n\npub struct Window {\n display: Arc<XConnection>,\n colormap: ffi::Colormap,\n context: Context,\n}\n\nunsafe impl Send for Window {}\nunsafe impl Sync for Window {}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n unsafe {\n \/\/ we don't call MakeCurrent(0, 0) because we are not sure that the context\n \/\/ is still the current one\n self.context = Context::None;\n\n (self.display.xlib.XFreeColormap)(self.display.display, self.colormap);\n }\n }\n}\n\nimpl Window {\n pub fn new(\n pf_reqs: &PixelFormatRequirements,\n opengl: &GlAttributes<&Window>,\n winit_builder: winit::WindowBuilder,\n ) -> Result<(Window, winit::Window), CreationError> {\n let display = match get_x11_xconnection() {\n Some(display) => display,\n None => return Err(CreationError::NoBackendAvailable(Box::new(NoX11Connection))),\n };\n let screen_id = match winit_builder.window.monitor {\n Some(ref m) => match m.get_native_identifier() {\n NativeMonitorId::Numeric(monitor) => monitor as i32,\n _ => panic!(),\n },\n _ => unsafe { (display.xlib.XDefaultScreen)(display.display) },\n };\n\n \/\/ start the context building process\n enum Prototype<'a> {\n Glx(::api::glx::ContextPrototype<'a>),\n Egl(::api::egl::ContextPrototype<'a>),\n }\n let builder_clone_opengl_glx = opengl.clone().map_sharing(|_| unimplemented!()); \/\/ FIXME:\n let builder_clone_opengl_egl = opengl.clone().map_sharing(|_| unimplemented!()); \/\/ FIXME:\n let backend = GlxOrEgl::new();\n let context = match opengl.version {\n GlRequest::Latest | GlRequest::Specific(Api::OpenGl, _) | GlRequest::GlThenGles { .. } => {\n \/\/ GLX should be preferred over EGL, otherwise crashes may occur\n \/\/ on X11 – issue #314\n if let Some(ref glx) = backend.glx {\n Prototype::Glx(try!(GlxContext::new(\n glx.clone(),\n &display.xlib,\n pf_reqs,\n &builder_clone_opengl_glx,\n display.display,\n screen_id,\n )))\n } else if let Some(ref egl) = backend.egl {\n Prototype::Egl(try!(EglContext::new(\n egl.clone(),\n pf_reqs,\n &builder_clone_opengl_egl,\n egl::NativeDisplay::X11(Some(display.display as *const _)),\n )))\n } else {\n return Err(CreationError::NotSupported);\n }\n },\n GlRequest::Specific(Api::OpenGlEs, _) => {\n if let Some(ref egl) = backend.egl {\n Prototype::Egl(try!(EglContext::new(\n egl.clone(),\n pf_reqs,\n &builder_clone_opengl_egl,\n egl::NativeDisplay::X11(Some(display.display as *const _)),\n )))\n } else {\n return Err(CreationError::NotSupported);\n }\n },\n GlRequest::Specific(_, _) => {\n return Err(CreationError::NotSupported);\n },\n };\n\n \/\/ getting the `visual_infos` (a struct that contains information about the visual to use)\n let visual_infos = match context {\n Prototype::Glx(ref p) => p.get_visual_infos().clone(),\n Prototype::Egl(ref p) => {\n unsafe {\n let mut template: ffi::XVisualInfo = mem::zeroed();\n template.visualid = p.get_native_visual_id() as ffi::VisualID;\n\n let mut num_visuals = 0;\n let vi = (display.xlib.XGetVisualInfo)(display.display, ffi::VisualIDMask,\n &mut template, &mut num_visuals);\n display.check_errors().expect(\"Failed to call XGetVisualInfo\");\n assert!(!vi.is_null());\n assert!(num_visuals == 1);\n\n let vi_copy = ptr::read(vi as *const _);\n (display.xlib.XFree)(vi as *mut _);\n vi_copy\n }\n },\n };\n\n let winit_window = winit_builder\n .with_x11_visual(&visual_infos as *const _)\n .with_x11_screen(screen_id)\n .build().unwrap();\n\n let xlib_window = winit_window.get_xlib_window().unwrap();\n \/\/ finish creating the OpenGL context\n let context = match context {\n Prototype::Glx(ctxt) => {\n Context::Glx(try!(ctxt.finish(xlib_window as _)))\n },\n Prototype::Egl(ctxt) => {\n Context::Egl(try!(ctxt.finish(xlib_window)))\n },\n };\n\n \/\/ getting the root window\n let root = unsafe { (display.xlib.XDefaultRootWindow)(display.display) };\n display.check_errors().expect(\"Failed to get root window\");\n\n \/\/ creating the color map\n let cmap = unsafe {\n let cmap = (display.xlib.XCreateColormap)(display.display, root,\n visual_infos.visual as *mut _,\n ffi::AllocNone);\n display.check_errors().expect(\"Failed to call XCreateColormap\");\n cmap\n };\n\n Ok((Window {\n display: display.clone(),\n context: context,\n colormap: cmap,\n },\n winit_window))\n }\n}\n\nimpl GlContext for Window {\n #[inline]\n unsafe fn make_current(&self) -> Result<(), ContextError> {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.make_current(),\n Context::Egl(ref ctxt) => ctxt.make_current(),\n Context::None => Ok(())\n }\n }\n\n #[inline]\n fn is_current(&self) -> bool {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.is_current(),\n Context::Egl(ref ctxt) => ctxt.is_current(),\n Context::None => panic!()\n }\n }\n\n #[inline]\n fn get_proc_address(&self, addr: &str) -> *const () {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.get_proc_address(addr),\n Context::Egl(ref ctxt) => ctxt.get_proc_address(addr),\n Context::None => ptr::null()\n }\n }\n\n #[inline]\n fn swap_buffers(&self) -> Result<(), ContextError> {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.swap_buffers(),\n Context::Egl(ref ctxt) => ctxt.swap_buffers(),\n Context::None => Ok(())\n }\n }\n\n #[inline]\n fn get_api(&self) -> Api {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.get_api(),\n Context::Egl(ref ctxt) => ctxt.get_api(),\n Context::None => panic!()\n }\n }\n\n #[inline]\n fn get_pixel_format(&self) -> PixelFormat {\n match self.context {\n Context::Glx(ref ctxt) => ctxt.get_pixel_format(),\n Context::Egl(ref ctxt) => ctxt.get_pixel_format(),\n Context::None => panic!()\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue #81193<commit_after>\/\/ check-pass\n\n#![feature(associated_type_bounds)]\n\ntrait A<'a, 'b> {}\n\ntrait B<'a, 'b, 'c> {}\n\nfn err<'u, 'a, F>()\nwhere\n for<'b> F: Iterator<Item: for<'c> B<'a, 'b, 'c> + for<'c> A<'a, 'c>>,\n{\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add manual page to seq, -h and --help options.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor to not call exit() anywhere<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>:hammer: Fix build warning<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>git work<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nextern crate clap;\n#[macro_use] extern crate version;\n#[macro_use] extern crate log;\nextern crate walkdir;\nextern crate toml;\nextern crate toml_query;\n\nextern crate libimagrt;\nextern crate libimagerror;\n\nuse std::env;\nuse std::process::exit;\nuse std::process::Command;\nuse std::process::Stdio;\nuse std::io::ErrorKind;\nuse std::collections::BTreeMap;\n\nuse walkdir::WalkDir;\nuse clap::{Arg, ArgMatches, AppSettings, SubCommand};\nuse toml::Value;\nuse toml_query::read::TomlValueReadExt;\n\nuse libimagrt::runtime::Runtime;\nuse libimagerror::trace::trace_error;\n\n\/\/\/ Returns the helptext, putting the Strings in cmds as possible\n\/\/\/ subcommands into it\nfn help_text(cmds: Vec<String>) -> String {\n format!(r#\"\n\n _\n (_)_ __ ___ __ _ __ _\n | | '_ \\` _ \\\/ _\\`|\/ _\\`|\n | | | | | | | (_| | (_| |\n |_|_| |_| |_|\\__,_|\\__, |\n |___\/\n -------------------------\n\n Usage: imag [--version | --versions | -h | --help] <command> <args...>\n\n imag - the personal information management suite for the commandline\n\n imag is a PIM suite for the commandline. It consists of several commands,\n called \"modules\". Each module implements one PIM aspect and all of these\n modules can be used independently.\n\n Available commands:\n\n {imagbins}\n\n Call a command with 'imag <command> <args>'\n Each command can be called with \"--help\" to get the respective helptext.\n\n Please visit https:\/\/github.com\/matthiasbeyer\/imag to view the source code,\n follow the development of imag or maybe even contribute to imag.\n\n imag is free software. It is released under the terms of LGPLv2.1\n\n (c) 2016 Matthias Beyer and contributors\"#,\n imagbins = cmds\n .into_iter()\n .map(|cmd| format!(\"\\t{}\\n\", cmd))\n .fold(String::new(), |s, c| {\n let s = s + c.as_str();\n s\n }))\n}\n\n\/\/\/ Returns the list of imag-* executables found in $PATH\nfn get_commands() -> Vec<String> {\n match env::var(\"PATH\") {\n Err(e) => {\n println!(\"PATH error: {:?}\", e);\n exit(1);\n },\n\n Ok(path) => path\n .split(\":\")\n .flat_map(|elem| {\n WalkDir::new(elem)\n .max_depth(1)\n .into_iter()\n .filter(|path| match *path {\n Ok(ref p) => p.file_name().to_str().map_or(false, |f| f.starts_with(\"imag-\")),\n Err(_) => false,\n })\n .filter_map(Result::ok)\n .filter_map(|path| path\n .file_name()\n .to_str()\n .and_then(|s| s.splitn(2, \"-\").nth(1).map(String::from))\n )\n })\n .collect()\n }\n}\n\n\nfn main() {\n \/\/ Initialize the Runtime and build the CLI\n let appname = \"imag\";\n let version = &version!();\n let about = \"imag - the PIM suite for the commandline\";\n let commands = get_commands();\n let helptext = help_text(commands.clone());\n let app = Runtime::get_default_cli_builder(appname, version, about)\n .settings(&[AppSettings::AllowExternalSubcommands, AppSettings::ArgRequiredElseHelp])\n .arg(Arg::with_name(\"version\")\n .long(\"version\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the version of imag\"))\n .arg(Arg::with_name(\"versions\")\n .long(\"versions\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the versions of the imag commands\"))\n .subcommand(SubCommand::with_name(\"help\").help(\"Show help\"))\n .help(helptext.as_str());\n let rt = Runtime::new(app)\n .unwrap_or_else(|e| {\n println!(\"Runtime couldn't be setup. Exiting\");\n trace_error(&e);\n exit(1);\n });\n let matches = rt.cli();\n\n debug!(\"matches: {:?}\", matches);\n\n \/\/ Begin checking for arguments\n\n if matches.is_present(\"version\") {\n debug!(\"Showing version\");\n println!(\"imag {}\", &version!()[..]);\n exit(0);\n }\n\n if matches.is_present(\"versions\") {\n debug!(\"Showing versions\");\n commands\n .iter()\n .map(|command| {\n match Command::new(format!(\"imag-{}\", command))\n .arg(\"--version\")\n .output()\n .map(|v| v.stdout)\n {\n Ok(s) => match String::from_utf8(s) {\n Ok(s) => format!(\"{:10} -> {}\", command, s),\n Err(e) => format!(\"UTF8 Error while working with output of imag{}: {:?}\", command, e),\n },\n Err(e) => format!(\"Failed calling imag-{} -> {:?}\", command, e),\n }\n })\n .fold((), |_, line| {\n \/\/ The amount of newlines may differ depending on the subprocess\n println!(\"{}\", line.trim());\n });\n\n exit(0);\n }\n\n let aliases = match fetch_aliases(&rt) {\n Ok(aliases) => aliases,\n Err(e) => {\n println!(\"Error while fetching aliases from configuration file\");\n debug!(\"Error = {:?}\", e);\n println!(\"Aborting\");\n exit(1);\n }\n };\n\n \/\/ Matches any subcommand given\n match matches.subcommand() {\n (subcommand, Some(scmd)) => {\n \/\/ Get all given arguments and further subcommands to pass to\n \/\/ the imag-<> binary\n \/\/ Providing no arguments is OK, and is therefore ignored here\n let mut subcommand_args : Vec<String> = match scmd.values_of(\"\") {\n Some(values) => values.map(String::from).collect(),\n None => Vec::new()\n };\n\n forward_commandline_arguments(&matches, &mut subcommand_args);\n\n let subcommand = String::from(subcommand);\n let subcommand = aliases.get(&subcommand).cloned().unwrap_or(subcommand);\n\n debug!(\"Calling 'imag-{}' with args: {:?}\", subcommand, subcommand_args);\n\n \/\/ Create a Command, and pass it the gathered arguments\n match Command::new(format!(\"imag-{}\", subcommand))\n .stdin(Stdio::inherit())\n .stdout(Stdio::inherit())\n .stderr(Stdio::inherit())\n .args(&subcommand_args[..])\n .spawn()\n .and_then(|mut c| c.wait())\n {\n Ok(exit_status) => {\n if !exit_status.success() {\n debug!(\"{} exited with non-zero exit code: {:?}\", subcommand, exit_status);\n println!(\"{} exited with non-zero exit code\", subcommand);\n exit(exit_status.code().unwrap_or(1));\n }\n debug!(\"Successful exit!\");\n },\n\n Err(e) => {\n debug!(\"Error calling the subcommand\");\n match e.kind() {\n ErrorKind::NotFound => {\n println!(\"No such command: 'imag-{}'\", subcommand);\n println!(\"See 'imag --help' for available subcommands\");\n exit(1);\n },\n ErrorKind::PermissionDenied => {\n println!(\"No permission to execute: 'imag-{}'\", subcommand);\n exit(1);\n },\n _ => {\n println!(\"Error spawning: {:?}\", e);\n exit(1);\n }\n }\n }\n }\n },\n \/\/ Calling for example 'imag --versions' will lead here, as this option does not exit.\n \/\/ There's nothing to do in such a case\n _ => {},\n }\n}\n\nfn fetch_aliases(rt: &Runtime) -> Result<BTreeMap<String, String>, String> {\n let cfg = try!(rt.config().ok_or_else(|| String::from(\"No configuration found\")));\n let value = cfg\n .config()\n .read(\"imag.aliases\")\n .map_err(|_| String::from(\"Reading from config failed\"));\n\n match try!(value) {\n None => Ok(BTreeMap::new()),\n Some(&Value::Table(ref tbl)) => {\n let mut alias_mappings = BTreeMap::new();\n\n for (k, v) in tbl {\n match v {\n &Value::String(ref alias) => {\n alias_mappings.insert(alias.clone(), k.clone());\n },\n &Value::Array(ref aliases) => {\n for alias in aliases {\n match alias {\n &Value::String(ref s) => {\n alias_mappings.insert(s.clone(), k.clone());\n },\n _ => {\n let e = format!(\"Not all values are a String in 'imag.aliases.{}'\", k);\n return Err(e);\n }\n }\n }\n },\n\n _ => {\n let msg = format!(\"Type Error: 'imag.aliases.{}' is not a table or string\", k);\n return Err(msg);\n },\n }\n }\n\n Ok(alias_mappings)\n },\n\n Some(_) => Err(String::from(\"Type Error: 'imag.aliases' is not a table\")),\n }\n}\n\nfn forward_commandline_arguments(m: &ArgMatches, scmd: &mut Vec<String>) {\n let push = |flag: Option<&str>, val_name: &str, m: &ArgMatches, v: &mut Vec<String>| {\n let _ = m\n .value_of(val_name)\n .map(|val| {\n let flag = format!(\"--{}\", flag.unwrap_or(val_name));\n v.insert(0, String::from(val));\n v.insert(0, flag);\n });\n };\n\n push(Some(\"verbose\"),\n Runtime::arg_verbosity_name(), m , scmd);\n\n push(Some(\"debug\"),\n Runtime::arg_debugging_name(), m , scmd);\n\n push(Some(\"no-color\"),\n Runtime::arg_no_color_output_name(), m , scmd);\n\n push(Some(\"config\"),\n Runtime::arg_config_name(), m , scmd);\n\n push(Some(\"override-config\"),\n Runtime::arg_config_override_name(), m , scmd);\n\n push(Some(\"rtp\"),\n Runtime::arg_runtimepath_name(), m , scmd);\n\n push(Some(\"store\"),\n Runtime::arg_storepath_name(), m , scmd);\n\n push(Some(\"editor\"),\n Runtime::arg_editor_name(), m , scmd);\n\n push(Some(\"generate-commandline-completion\"),\n Runtime::arg_generate_compl(), m , scmd);\n\n push(None , Runtime::arg_logdest_name() , m , scmd);\n push(None , Runtime::arg_override_module_logging_setting_name() , m , scmd);\n push(None , Runtime::arg_override_trace_logging_format() , m , scmd);\n push(None , Runtime::arg_override_debug_logging_format() , m , scmd);\n push(None , Runtime::arg_override_info_logging_format() , m , scmd);\n push(None , Runtime::arg_override_warn_logging_format() , m , scmd);\n push(None , Runtime::arg_override_error_logging_format() , m , scmd);\n\n\n}\n\n<commit_msg>Remove override passing in imag binary command<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nextern crate clap;\n#[macro_use] extern crate version;\n#[macro_use] extern crate log;\nextern crate walkdir;\nextern crate toml;\nextern crate toml_query;\n\nextern crate libimagrt;\nextern crate libimagerror;\n\nuse std::env;\nuse std::process::exit;\nuse std::process::Command;\nuse std::process::Stdio;\nuse std::io::ErrorKind;\nuse std::collections::BTreeMap;\n\nuse walkdir::WalkDir;\nuse clap::{Arg, ArgMatches, AppSettings, SubCommand};\nuse toml::Value;\nuse toml_query::read::TomlValueReadExt;\n\nuse libimagrt::runtime::Runtime;\nuse libimagerror::trace::trace_error;\n\n\/\/\/ Returns the helptext, putting the Strings in cmds as possible\n\/\/\/ subcommands into it\nfn help_text(cmds: Vec<String>) -> String {\n format!(r#\"\n\n _\n (_)_ __ ___ __ _ __ _\n | | '_ \\` _ \\\/ _\\`|\/ _\\`|\n | | | | | | | (_| | (_| |\n |_|_| |_| |_|\\__,_|\\__, |\n |___\/\n -------------------------\n\n Usage: imag [--version | --versions | -h | --help] <command> <args...>\n\n imag - the personal information management suite for the commandline\n\n imag is a PIM suite for the commandline. It consists of several commands,\n called \"modules\". Each module implements one PIM aspect and all of these\n modules can be used independently.\n\n Available commands:\n\n {imagbins}\n\n Call a command with 'imag <command> <args>'\n Each command can be called with \"--help\" to get the respective helptext.\n\n Please visit https:\/\/github.com\/matthiasbeyer\/imag to view the source code,\n follow the development of imag or maybe even contribute to imag.\n\n imag is free software. It is released under the terms of LGPLv2.1\n\n (c) 2016 Matthias Beyer and contributors\"#,\n imagbins = cmds\n .into_iter()\n .map(|cmd| format!(\"\\t{}\\n\", cmd))\n .fold(String::new(), |s, c| {\n let s = s + c.as_str();\n s\n }))\n}\n\n\/\/\/ Returns the list of imag-* executables found in $PATH\nfn get_commands() -> Vec<String> {\n match env::var(\"PATH\") {\n Err(e) => {\n println!(\"PATH error: {:?}\", e);\n exit(1);\n },\n\n Ok(path) => path\n .split(\":\")\n .flat_map(|elem| {\n WalkDir::new(elem)\n .max_depth(1)\n .into_iter()\n .filter(|path| match *path {\n Ok(ref p) => p.file_name().to_str().map_or(false, |f| f.starts_with(\"imag-\")),\n Err(_) => false,\n })\n .filter_map(Result::ok)\n .filter_map(|path| path\n .file_name()\n .to_str()\n .and_then(|s| s.splitn(2, \"-\").nth(1).map(String::from))\n )\n })\n .collect()\n }\n}\n\n\nfn main() {\n \/\/ Initialize the Runtime and build the CLI\n let appname = \"imag\";\n let version = &version!();\n let about = \"imag - the PIM suite for the commandline\";\n let commands = get_commands();\n let helptext = help_text(commands.clone());\n let app = Runtime::get_default_cli_builder(appname, version, about)\n .settings(&[AppSettings::AllowExternalSubcommands, AppSettings::ArgRequiredElseHelp])\n .arg(Arg::with_name(\"version\")\n .long(\"version\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the version of imag\"))\n .arg(Arg::with_name(\"versions\")\n .long(\"versions\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the versions of the imag commands\"))\n .subcommand(SubCommand::with_name(\"help\").help(\"Show help\"))\n .help(helptext.as_str());\n let rt = Runtime::new(app)\n .unwrap_or_else(|e| {\n println!(\"Runtime couldn't be setup. Exiting\");\n trace_error(&e);\n exit(1);\n });\n let matches = rt.cli();\n\n debug!(\"matches: {:?}\", matches);\n\n \/\/ Begin checking for arguments\n\n if matches.is_present(\"version\") {\n debug!(\"Showing version\");\n println!(\"imag {}\", &version!()[..]);\n exit(0);\n }\n\n if matches.is_present(\"versions\") {\n debug!(\"Showing versions\");\n commands\n .iter()\n .map(|command| {\n match Command::new(format!(\"imag-{}\", command))\n .arg(\"--version\")\n .output()\n .map(|v| v.stdout)\n {\n Ok(s) => match String::from_utf8(s) {\n Ok(s) => format!(\"{:10} -> {}\", command, s),\n Err(e) => format!(\"UTF8 Error while working with output of imag{}: {:?}\", command, e),\n },\n Err(e) => format!(\"Failed calling imag-{} -> {:?}\", command, e),\n }\n })\n .fold((), |_, line| {\n \/\/ The amount of newlines may differ depending on the subprocess\n println!(\"{}\", line.trim());\n });\n\n exit(0);\n }\n\n let aliases = match fetch_aliases(&rt) {\n Ok(aliases) => aliases,\n Err(e) => {\n println!(\"Error while fetching aliases from configuration file\");\n debug!(\"Error = {:?}\", e);\n println!(\"Aborting\");\n exit(1);\n }\n };\n\n \/\/ Matches any subcommand given\n match matches.subcommand() {\n (subcommand, Some(scmd)) => {\n \/\/ Get all given arguments and further subcommands to pass to\n \/\/ the imag-<> binary\n \/\/ Providing no arguments is OK, and is therefore ignored here\n let mut subcommand_args : Vec<String> = match scmd.values_of(\"\") {\n Some(values) => values.map(String::from).collect(),\n None => Vec::new()\n };\n\n forward_commandline_arguments(&matches, &mut subcommand_args);\n\n let subcommand = String::from(subcommand);\n let subcommand = aliases.get(&subcommand).cloned().unwrap_or(subcommand);\n\n debug!(\"Calling 'imag-{}' with args: {:?}\", subcommand, subcommand_args);\n\n \/\/ Create a Command, and pass it the gathered arguments\n match Command::new(format!(\"imag-{}\", subcommand))\n .stdin(Stdio::inherit())\n .stdout(Stdio::inherit())\n .stderr(Stdio::inherit())\n .args(&subcommand_args[..])\n .spawn()\n .and_then(|mut c| c.wait())\n {\n Ok(exit_status) => {\n if !exit_status.success() {\n debug!(\"{} exited with non-zero exit code: {:?}\", subcommand, exit_status);\n println!(\"{} exited with non-zero exit code\", subcommand);\n exit(exit_status.code().unwrap_or(1));\n }\n debug!(\"Successful exit!\");\n },\n\n Err(e) => {\n debug!(\"Error calling the subcommand\");\n match e.kind() {\n ErrorKind::NotFound => {\n println!(\"No such command: 'imag-{}'\", subcommand);\n println!(\"See 'imag --help' for available subcommands\");\n exit(1);\n },\n ErrorKind::PermissionDenied => {\n println!(\"No permission to execute: 'imag-{}'\", subcommand);\n exit(1);\n },\n _ => {\n println!(\"Error spawning: {:?}\", e);\n exit(1);\n }\n }\n }\n }\n },\n \/\/ Calling for example 'imag --versions' will lead here, as this option does not exit.\n \/\/ There's nothing to do in such a case\n _ => {},\n }\n}\n\nfn fetch_aliases(rt: &Runtime) -> Result<BTreeMap<String, String>, String> {\n let cfg = try!(rt.config().ok_or_else(|| String::from(\"No configuration found\")));\n let value = cfg\n .config()\n .read(\"imag.aliases\")\n .map_err(|_| String::from(\"Reading from config failed\"));\n\n match try!(value) {\n None => Ok(BTreeMap::new()),\n Some(&Value::Table(ref tbl)) => {\n let mut alias_mappings = BTreeMap::new();\n\n for (k, v) in tbl {\n match v {\n &Value::String(ref alias) => {\n alias_mappings.insert(alias.clone(), k.clone());\n },\n &Value::Array(ref aliases) => {\n for alias in aliases {\n match alias {\n &Value::String(ref s) => {\n alias_mappings.insert(s.clone(), k.clone());\n },\n _ => {\n let e = format!(\"Not all values are a String in 'imag.aliases.{}'\", k);\n return Err(e);\n }\n }\n }\n },\n\n _ => {\n let msg = format!(\"Type Error: 'imag.aliases.{}' is not a table or string\", k);\n return Err(msg);\n },\n }\n }\n\n Ok(alias_mappings)\n },\n\n Some(_) => Err(String::from(\"Type Error: 'imag.aliases' is not a table\")),\n }\n}\n\nfn forward_commandline_arguments(m: &ArgMatches, scmd: &mut Vec<String>) {\n let push = |flag: Option<&str>, val_name: &str, m: &ArgMatches, v: &mut Vec<String>| {\n let _ = m\n .value_of(val_name)\n .map(|val| {\n let flag = format!(\"--{}\", flag.unwrap_or(val_name));\n v.insert(0, String::from(val));\n v.insert(0, flag);\n });\n };\n\n push(Some(\"verbose\"),\n Runtime::arg_verbosity_name(), m , scmd);\n\n push(Some(\"debug\"),\n Runtime::arg_debugging_name(), m , scmd);\n\n push(Some(\"no-color\"),\n Runtime::arg_no_color_output_name(), m , scmd);\n\n push(Some(\"config\"),\n Runtime::arg_config_name(), m , scmd);\n\n push(Some(\"override-config\"),\n Runtime::arg_config_override_name(), m , scmd);\n\n push(Some(\"rtp\"),\n Runtime::arg_runtimepath_name(), m , scmd);\n\n push(Some(\"store\"),\n Runtime::arg_storepath_name(), m , scmd);\n\n push(Some(\"editor\"),\n Runtime::arg_editor_name(), m , scmd);\n\n push(Some(\"generate-commandline-completion\"),\n Runtime::arg_generate_compl(), m , scmd);\n\n push(None , Runtime::arg_logdest_name() , m , scmd);\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/\/ Reference-counted pointers to flows.\n\/\/\/\n\/\/\/ Eventually, with dynamically sized types in Rust, much of this code will be superfluous.\n\nuse flow::Flow;\nuse flow;\n\nuse std::mem;\nuse std::ops::{Deref, DerefMut};\nuse std::ptr;\nuse std::raw;\nuse std::sync::atomic::Ordering;\n\n#[unsafe_no_drop_flag]\npub struct FlowRef {\n object: raw::TraitObject,\n}\n\nunsafe impl Send for FlowRef {}\nunsafe impl Sync for FlowRef {}\n\nimpl FlowRef {\n pub fn new(mut flow: Box<Flow>) -> FlowRef {\n unsafe {\n let result = {\n let flow_ref: &mut Flow = &mut *flow;\n let object = mem::transmute::<&mut Flow, raw::TraitObject>(flow_ref);\n FlowRef { object: object }\n };\n mem::forget(flow);\n result\n }\n }\n}\n\nimpl<'a> Deref for FlowRef {\n type Target = Flow + 'a;\n fn deref(&self) -> &(Flow + 'a) {\n unsafe {\n mem::transmute_copy::<raw::TraitObject, &(Flow + 'a)>(&self.object)\n }\n }\n}\n\nimpl DerefMut for FlowRef {\n fn deref_mut<'a>(&mut self) -> &mut (Flow + 'a) {\n unsafe {\n mem::transmute_copy::<raw::TraitObject, &mut (Flow + 'a)>(&self.object)\n }\n }\n}\n\nimpl Drop for FlowRef {\n fn drop(&mut self) {\n unsafe {\n if self.object.vtable.is_null() {\n return\n }\n if flow::base(&**self).ref_count().fetch_sub(1, Ordering::SeqCst) > 1 {\n return\n }\n let flow_ref: FlowRef = mem::replace(self, FlowRef {\n object: raw::TraitObject {\n vtable: ptr::null_mut(),\n data: ptr::null_mut(),\n }\n });\n drop(mem::transmute::<raw::TraitObject, Box<Flow>>(flow_ref.object));\n mem::forget(flow_ref);\n self.object.vtable = ptr::null_mut();\n self.object.data = ptr::null_mut();\n }\n }\n}\n\nimpl Clone for FlowRef {\n fn clone(&self) -> FlowRef {\n unsafe {\n drop(flow::base(&**self).ref_count().fetch_add(1, Ordering::SeqCst));\n FlowRef {\n object: raw::TraitObject {\n vtable: self.object.vtable,\n data: self.object.data,\n }\n }\n }\n }\n}\n<commit_msg>Fix the documentation comment syntax in flow_ref.rs.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Reference-counted pointers to flows.\n\/\/!\n\/\/! Eventually, with dynamically sized types in Rust, much of this code will be superfluous.\n\nuse flow::Flow;\nuse flow;\n\nuse std::mem;\nuse std::ops::{Deref, DerefMut};\nuse std::ptr;\nuse std::raw;\nuse std::sync::atomic::Ordering;\n\n#[unsafe_no_drop_flag]\npub struct FlowRef {\n object: raw::TraitObject,\n}\n\nunsafe impl Send for FlowRef {}\nunsafe impl Sync for FlowRef {}\n\nimpl FlowRef {\n pub fn new(mut flow: Box<Flow>) -> FlowRef {\n unsafe {\n let result = {\n let flow_ref: &mut Flow = &mut *flow;\n let object = mem::transmute::<&mut Flow, raw::TraitObject>(flow_ref);\n FlowRef { object: object }\n };\n mem::forget(flow);\n result\n }\n }\n}\n\nimpl<'a> Deref for FlowRef {\n type Target = Flow + 'a;\n fn deref(&self) -> &(Flow + 'a) {\n unsafe {\n mem::transmute_copy::<raw::TraitObject, &(Flow + 'a)>(&self.object)\n }\n }\n}\n\nimpl DerefMut for FlowRef {\n fn deref_mut<'a>(&mut self) -> &mut (Flow + 'a) {\n unsafe {\n mem::transmute_copy::<raw::TraitObject, &mut (Flow + 'a)>(&self.object)\n }\n }\n}\n\nimpl Drop for FlowRef {\n fn drop(&mut self) {\n unsafe {\n if self.object.vtable.is_null() {\n return\n }\n if flow::base(&**self).ref_count().fetch_sub(1, Ordering::SeqCst) > 1 {\n return\n }\n let flow_ref: FlowRef = mem::replace(self, FlowRef {\n object: raw::TraitObject {\n vtable: ptr::null_mut(),\n data: ptr::null_mut(),\n }\n });\n drop(mem::transmute::<raw::TraitObject, Box<Flow>>(flow_ref.object));\n mem::forget(flow_ref);\n self.object.vtable = ptr::null_mut();\n self.object.data = ptr::null_mut();\n }\n }\n}\n\nimpl Clone for FlowRef {\n fn clone(&self) -> FlowRef {\n unsafe {\n drop(flow::base(&**self).ref_count().fetch_add(1, Ordering::SeqCst));\n FlowRef {\n object: raw::TraitObject {\n vtable: self.object.vtable,\n data: self.object.data,\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Smoke test<commit_after>extern crate bytes;\nextern crate regex;\nextern crate futures;\nextern crate httpbis;\nextern crate tokio_core;\n#[macro_use]\nextern crate log;\nextern crate env_logger;\n\nmod test_misc;\n\nuse futures::future;\nuse futures::future::Future;\n\nuse httpbis::Client;\nuse test_misc::*;\n\n#[test]\nfn smoke() {\n env_logger::init().ok();\n\n let server = ServerTest::new();\n\n let client: Client =\n Client::new_plain(\"::1\", server.port, Default::default()).expect(\"client\");\n\n let mut futures = Vec::new();\n for _ in 0..10 {\n futures.push(client.start_get(\"\/blocks\/200000\/5\", \"localhost\").collect());\n }\n\n let r = future::join_all(futures).wait().expect(\"wait\");\n for rr in r {\n assert_eq!(200000 * 5, rr.body.len());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add offline_and_locked_and_no_frozen test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>random test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use Windows 10 instead of 7 in user agent string<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test: Killer Sudoku.<commit_after>\/\/! Killer Sudoku.\n\/\/!\n\/\/! https:\/\/en.wikipedia.org\/wiki\/Killer_sudoku\n\nextern crate puzzle_solver;\n\nuse puzzle_solver::{LinExpr,Puzzle,Solution,Val,VarToken};\n\nconst SQRT_SIZE: usize = 3;\nconst SIZE: usize = 9;\ntype Board = [[Val; SIZE]; SIZE];\ntype Point = (usize, usize);\n\nfn make_killer_sudoku(board: &[(Val, Vec<Point>)]) -> (Puzzle, Vec<Vec<VarToken>>) {\n let mut sys = Puzzle::new();\n let vars = sys.new_vars_with_candidates_2d(SIZE, SIZE, &[1,2,3,4,5,6,7,8,9]);\n\n for y in 0..SIZE {\n sys.all_different(&vars[y]);\n }\n\n for x in 0..SIZE {\n sys.all_different(vars.iter().map(|row| &row[x]));\n }\n\n for block in 0..SIZE {\n let x0 = SQRT_SIZE * (block % SQRT_SIZE);\n let y0 = SQRT_SIZE * (block \/ SQRT_SIZE);\n sys.all_different((0..SIZE).map(|n|\n &vars[y0 + (n \/ SQRT_SIZE)][x0 + (n % SQRT_SIZE)]));\n }\n\n for &(total, ref points) in board.iter() {\n sys.equals(total, points.iter().fold(LinExpr::from(0), |sum, &(x,y)| sum + vars[y][x]));\n }\n\n (sys, vars)\n}\n\nfn print_sudoku(dict: &Solution, vars: &Vec<Vec<VarToken>>) {\n for y in 0..SIZE {\n if y % SQRT_SIZE == 0 {\n println!();\n }\n\n for x in 0..SIZE {\n print!(\"{}{}\",\n if x % SQRT_SIZE == 0 { \" \" } else { \"\" },\n dict[vars[y][x]]);\n }\n println!();\n }\n}\n\nfn verify_sudoku(dict: &Solution, vars: &Vec<Vec<VarToken>>, expected: &Board) {\n for y in 0..SIZE {\n for x in 0..SIZE {\n assert_eq!(dict[vars[y][x]], expected[y][x]);\n }\n }\n}\n\n#[test]\nfn killersudoku_wikipedia() {\n let puzzle = [\n ( 3, vec![(0,0), (1,0)]),\n (15, vec![(2,0), (3,0), (4,0)]),\n (22, vec![(5,0), (4,1), (5,1), (4,2)]),\n ( 4, vec![(6,0), (6,1)]),\n (16, vec![(7,0), (7,1)]),\n (15, vec![(8,0), (8,1), (8,2), (8,3)]),\n (25, vec![(0,1), (1,1), (0,2), (1,2)]),\n (17, vec![(2,1), (3,1)]),\n ( 9, vec![(2,2), (3,2), (3,3)]),\n ( 8, vec![(5,2), (5,3), (5,4)]),\n (20, vec![(6,2), (7,2), (6,3)]),\n ( 6, vec![(0,3), (0,4)]),\n (14, vec![(1,3), (2,3)]),\n (17, vec![(4,3), (4,4), (4,5)]),\n (17, vec![(7,3), (6,4), (7,4)]),\n (13, vec![(1,4), (2,4), (1,5)]),\n (20, vec![(3,4), (3,5), (3,6)]),\n (12, vec![(8,4), (8,5)]),\n (27, vec![(0,5), (0,6), (0,7), (0,8)]),\n ( 6, vec![(2,5), (1,6), (2,6)]),\n (20, vec![(5,5), (5,6), (6,6)]),\n ( 6, vec![(6,5), (7,5)]),\n (10, vec![(4,6), (3,7), (4,7), (3,8)]),\n (14, vec![(7,6), (8,6), (7,7), (8,7)]),\n ( 8, vec![(1,7), (1,8)]),\n (16, vec![(2,7), (2,8)]),\n (15, vec![(5,7), (6,7)]),\n (13, vec![(4,8), (5,8), (6,8)]),\n (17, vec![(7,8), (8,8)]),\n ];\n\n let expected = [\n [ 2,1,5, 6,4,7, 3,9,8 ],\n [ 3,6,8, 9,5,2, 1,7,4 ],\n [ 7,9,4, 3,8,1, 6,5,2 ],\n\n [ 5,8,6, 2,7,4, 9,3,1 ],\n [ 1,4,2, 5,9,3, 8,6,7 ],\n [ 9,7,3, 8,1,6, 4,2,5 ],\n\n [ 8,2,1, 7,3,9, 5,4,6 ],\n [ 6,5,9, 4,2,8, 7,1,3 ],\n [ 4,3,7, 1,6,5, 2,8,9 ] ];\n\n let (mut sys, vars) = make_killer_sudoku(&puzzle);\n let dict = sys.solve_any().expect(\"solution\");\n print_sudoku(&dict, &vars);\n verify_sudoku(&dict, &vars, &expected);\n println!(\"killersudoku_wikipedia: {} guesses\", sys.num_guesses());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>next step is hover and events handling<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added new example<commit_after>extern crate tetrahedrane;\n\nfn main() {\n let mut window = tetrahedrane::init::Window::new(20, 20, 640, 480, &\"Hello World\");\n let mut framebuffer = tetrahedrane::render::Framebuffer::new(640, 480);\n\n let red = tetrahedrane::render::Color::new(255, 0, 0);\n\n loop {\n for x in 0..100 {\n for y in 0..100 {\n framebuffer.set_pixel(x, y, &red);\n }\n }\n\n window.apply_buf(&framebuffer);\n\n window.sync();\n\n std::thread::sleep(std::time::Duration::from_millis(33));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Subcommand is named \"deleted\"<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Removed excess comments<commit_after><|endoftext|>"} {"text":"<commit_before>use alloc::arc::Arc;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cell::UnsafeCell;\nuse core::ops::DerefMut;\nuse core::{mem, ptr};\n\nuse common::elf::Elf;\nuse common::memory;\n\nuse scheduler::context::{CONTEXT_STACK_SIZE, CONTEXT_STACK_ADDR, context_switch,\ncontext_userspace, Context, ContextMemory};\n\nuse schemes::Url;\n\nuse sync::Intex;\n\n\/\/\/ Execute an executable\npub fn execute(url: Url, mut args: Vec<String>) {\n let mut context_ptr: *mut Context = 0 as *mut Context;\n let mut entry: usize = 0;\n\n if let Ok(mut resource) = url.open() {\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n let executable = Elf::from_data(vec.as_ptr() as usize);\n entry = unsafe { executable.entry() };\n let mut memory = Vec::new();\n unsafe {\n for segment in executable.load_segment().iter() {\n let virtual_address = segment.vaddr as usize;\n let virtual_size = segment.mem_len as usize;\n\n \/\/TODO: Warning: Investigate this hack!\n let hack = virtual_address % 4096;\n\n let physical_address = memory::alloc(virtual_size + hack);\n\n if physical_address > 0 {\n debugln!(\"VADDR: {:X} OFF: {:X} FLG: {:X} HACK: {:X}\", segment.vaddr, segment.off, segment.flags, hack);\n\n \/\/ Copy progbits\n ::memcpy((physical_address + hack) as *mut u8,\n (executable.data + segment.off as usize) as *const u8,\n segment.file_len as usize);\n \/\/ Zero bss\n if segment.mem_len > segment.file_len {\n debugln!(\"BSS: {:X} {}\", segment.vaddr + segment.file_len, segment.mem_len - segment.file_len);\n ::memset((physical_address + hack + segment.file_len as usize) as *mut u8,\n 0,\n segment.mem_len as usize - segment.file_len as usize);\n }\n\n memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address - hack,\n virtual_size: virtual_size + hack,\n writeable: segment.flags & 2 == 2\n });\n }\n }\n }\n\n if entry > 0 && ! memory.is_empty() {\n args.insert(0, url.to_string());\n\n let mut contexts = ::env().contexts.lock();\n if let Some(mut context) = contexts.current_mut() {\n context.name = url.string;\n context.args = Arc::new(UnsafeCell::new(args));\n context.cwd = Arc::new(UnsafeCell::new(unsafe { (*context.cwd.get()).clone() }));\n\n unsafe { context.unmap() };\n context.memory = Arc::new(UnsafeCell::new(memory));\n unsafe { context.map() };\n\n context_ptr = context.deref_mut();\n }\n } else {\n debug!(\"{}: Invalid memory or entry\\n\", url.string);\n }\n } else {\n debug!(\"{}: Failed to open\\n\", url.string);\n }\n\n if context_ptr as usize > 0 {\n Context::spawn(\"kexec\".to_string(), box move || {\n unsafe {\n let _intex = Intex::static_lock();\n\n let context = &mut *context_ptr;\n\n let mut context_args: Vec<usize> = Vec::new();\n context_args.push(0); \/\/ ENVP\n context_args.push(0); \/\/ ARGV NULL\n let mut argc = 0;\n for i in 0..(*context.args.get()).len() {\n let reverse_i = (*context.args.get()).len() - i - 1;\n if let Some(ref mut arg) = (*context.args.get()).get_mut(reverse_i) {\n if ! arg.ends_with('\\0') {\n arg.push('\\0');\n }\n context_args.push(arg.as_ptr() as usize);\n argc += 1;\n }\n }\n context_args.push(argc);\n\n context.sp = context.kernel_stack + CONTEXT_STACK_SIZE - 128;\n\n context.stack = Some(ContextMemory {\n physical_address: memory::alloc(CONTEXT_STACK_SIZE),\n virtual_address: CONTEXT_STACK_ADDR,\n virtual_size: CONTEXT_STACK_SIZE,\n writeable: true\n });\n\n let user_sp = if let Some(ref stack) = context.stack {\n let mut sp = stack.physical_address + stack.virtual_size - 128;\n for arg in context_args.iter() {\n sp -= mem::size_of::<usize>();\n ptr::write(sp as *mut usize, *arg);\n }\n sp - stack.physical_address + stack.virtual_address\n } else {\n 0\n };\n\n context.push(0x20 | 3);\n context.push(user_sp);\n context.push(\/*1 << 9*\/ 0);\n context.push(0x18 | 3);\n context.push(entry);\n context.push(context_userspace as usize);\n }\n });\n\n loop {\n unsafe { context_switch(false) };\n }\n }\n}\n<commit_msg>Segment unsafe code from safe code in executor.rs<commit_after>use alloc::arc::Arc;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cell::UnsafeCell;\nuse core::ops::DerefMut;\nuse core::{mem, ptr};\n\nuse common::elf::Elf;\nuse common::memory;\n\nuse scheduler::context::{CONTEXT_STACK_SIZE, CONTEXT_STACK_ADDR, context_switch,\ncontext_userspace, Context, ContextMemory};\n\nuse schemes::Url;\n\nuse sync::Intex;\n\n\/\/\/ Execute an executable\npub fn execute(url: Url, mut args: Vec<String>) {\n let mut context_ptr: *mut Context = 0 as *mut Context;\n let mut entry: usize = 0;\n\n if let Ok(mut resource) = url.open() {\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n let executable = Elf::from_data(vec.as_ptr() as usize);\n entry = unsafe { executable.entry() };\n let mut memory = Vec::new();\n unsafe {\n for segment in executable.load_segment().iter() {\n let virtual_address = segment.vaddr as usize;\n let virtual_size = segment.mem_len as usize;\n\n \/\/TODO: Warning: Investigate this hack!\n let hack = virtual_address % 4096;\n\n let physical_address = memory::alloc(virtual_size + hack);\n\n if physical_address > 0 {\n debugln!(\"VADDR: {:X} OFF: {:X} FLG: {:X} HACK: {:X}\", segment.vaddr, segment.off, segment.flags, hack);\n\n \/\/ Copy progbits\n ::memcpy((physical_address + hack) as *mut u8,\n (executable.data + segment.off as usize) as *const u8,\n segment.file_len as usize);\n \/\/ Zero bss\n if segment.mem_len > segment.file_len {\n debugln!(\"BSS: {:X} {}\", segment.vaddr + segment.file_len, segment.mem_len - segment.file_len);\n ::memset((physical_address + hack + segment.file_len as usize) as *mut u8,\n 0,\n segment.mem_len as usize - segment.file_len as usize);\n }\n\n memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address - hack,\n virtual_size: virtual_size + hack,\n writeable: segment.flags & 2 == 2\n });\n }\n }\n }\n\n if entry > 0 && ! memory.is_empty() {\n args.insert(0, url.to_string());\n\n let mut contexts = ::env().contexts.lock();\n if let Some(mut context) = contexts.current_mut() {\n context.name = url.string;\n context.args = Arc::new(UnsafeCell::new(args));\n context.cwd = Arc::new(UnsafeCell::new(unsafe { (*context.cwd.get()).clone() }));\n\n unsafe { context.unmap() };\n context.memory = Arc::new(UnsafeCell::new(memory));\n unsafe { context.map() };\n\n context_ptr = context.deref_mut();\n }\n } else {\n debug!(\"{}: Invalid memory or entry\\n\", url.string);\n }\n } else {\n debug!(\"{}: Failed to open\\n\", url.string);\n }\n\n if context_ptr as usize > 0 {\n Context::spawn(\"kexec\".to_string(), box move || {\n let _intex = Intex::static_lock();\n\n let context = unsafe { &mut *context_ptr };\n\n let mut context_args: Vec<usize> = Vec::new();\n context_args.push(0); \/\/ ENVP\n context_args.push(0); \/\/ ARGV NULL\n let mut argc = 0;\n for i in 0..unsafe { (*context.args.get()).len() } {\n let reverse_i = unsafe { (*context.args.get()).len() } - i - 1;\n if let Some(ref mut arg) = unsafe { (*context.args.get()).get_mut(reverse_i) } {\n if ! arg.ends_with('\\0') {\n arg.push('\\0');\n }\n context_args.push(arg.as_ptr() as usize);\n argc += 1;\n }\n }\n context_args.push(argc);\n\n context.sp = context.kernel_stack + CONTEXT_STACK_SIZE - 128;\n\n context.stack = Some(ContextMemory {\n physical_address: unsafe { memory::alloc(CONTEXT_STACK_SIZE) },\n virtual_address: CONTEXT_STACK_ADDR,\n virtual_size: CONTEXT_STACK_SIZE,\n writeable: true\n });\n\n let user_sp = if let Some(ref stack) = context.stack {\n let mut sp = stack.physical_address + stack.virtual_size - 128;\n for arg in context_args.iter() {\n sp -= mem::size_of::<usize>();\n unsafe { ptr::write(sp as *mut usize, *arg) };\n }\n sp - stack.physical_address + stack.virtual_address\n } else {\n 0\n };\n\n unsafe {\n context.push(0x20 | 3);\n context.push(user_sp);\n context.push(\/*1 << 9*\/ 0);\n context.push(0x18 | 3);\n context.push(entry);\n context.push(context_userspace as usize);\n }\n });\n\n loop {\n unsafe { context_switch(false) };\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagstore::storeid::StoreId;\n\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::result::Result;\n\nuse vcs::git::action::StoreAction;\n\nuse git2::Repository;\n\n\/\/\/ Check the configuration whether we should commit interactively\npub fn commit_interactive(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Check the configuration whether we should commit with the editor\nfn commit_with_editor(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive_editor\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive_editor' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive_editor = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive_editor'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Get the commit default message\nfn commit_default_msg<'a>(config: &'a Value, action: &'a StoreAction) -> &'a str {\n match config.lookup(\"commit.message\") {\n Some(&Value::String(ref b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.message' must be a String.\",\n action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.message'\", action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n }\n}\n\n\/\/\/ Get the commit template\n\/\/\/\n\/\/\/ TODO: Implement good template string\nfn commit_template(action: &StoreAction, id: &StoreId) -> String {\n format!(r#\"\n# Please commit your changes and remove these lines.\n#\n# You're about to commit changes via the {action} Hook\n#\n# Altered file: {id}\n#\n \"#,\n action = action,\n id = id.local().display())\n}\n\n\/\/\/ Generate a commit message\n\/\/\/\n\/\/\/ Uses the functions `commit_interactive()` and `commit_with_editor()`\n\/\/\/ or reads one from the commandline or uses the `commit_default_msg()` string to create a commit\n\/\/\/ message.\npub fn commit_message(repo: &Repository, config: &Value, action: StoreAction, id: &StoreId) -> Result<String> {\n use libimaginteraction::ask::ask_string;\n use libimagutil::edit::edit_in_tmpfile_with_command;\n use std::process::Command;\n\n if commit_interactive(config, &action) {\n if commit_with_editor(config, &action) {\n repo.config()\n .map_err_into(GHEK::GitConfigFetchError)\n .and_then(|c| c.get_string(\"core.editor\").map_err_into(GHEK::GitConfigEditorFetchError))\n .map_err_into(GHEK::ConfigError)\n .map(Command::new)\n .and_then(|cmd| {\n let mut s = commit_template(&action, id);\n edit_in_tmpfile_with_command(cmd, &mut s).map(|_| s)\n .map_err_into(GHEK::EditorError)\n })\n } else {\n Ok(ask_string(\"Commit Message\", None, false, false, None, \"> \"))\n }\n } else {\n Ok(String::from(commit_default_msg(config, &action)))\n }\n}\n\n\/\/\/ Check whether the hook should abort if the repository cannot be initialized\npub fn abort_on_repo_init_err(cfg: &Value) -> bool {\n get_bool_cfg(Some(cfg), \"abort_on_repo_init_failure\", true, true)\n}\n\n\/\/\/ Get the branch which must be checked out before running the hook (if any).\n\/\/\/\n\/\/\/ If there is no configuration for this, this is `Ok(None)`, otherwise we try to find the\n\/\/\/ configuration `String`.\npub fn ensure_branch(cfg: Option<&Value>) -> Result<Option<String>> {\n match cfg {\n Some(cfg) => {\n match cfg.lookup(\"ensure_branch\") {\n Some(&Value::String(ref s)) => Ok(Some(s.clone())),\n Some(_) => {\n warn!(\"Configuration error, 'ensure_branch' must be a String.\");\n Err(GHEK::ConfigTypeError.into_error())\n .map_err_into(GHEK::ConfigTypeError)\n },\n None => {\n debug!(\"No key `ensure_branch'\");\n Ok(None)\n },\n }\n },\n None => Ok(None),\n }\n}\n\n\/\/\/ Check whether we should check out a branch before committing.\npub fn do_checkout_ensure_branch(cfg: Option<&Value>) -> bool {\n get_bool_cfg(cfg, \"try_checkout_ensure_branch\", true, true)\n}\n\n\/\/\/ Helper to get a boolean value from the configuration.\nfn get_bool_cfg(cfg: Option<&Value>, name: &str, on_fail: bool, on_unavail: bool) -> bool {\n cfg.map(|cfg| {\n match cfg.lookup(name) {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, '{}' must be a Boolean (true|false).\", name);\n warn!(\"Assuming '{}' now.\", on_fail);\n on_fail\n },\n None => {\n debug!(\"No key '{}' - Assuming '{}'\", name, on_unavail);\n on_unavail\n },\n }\n })\n .unwrap_or(on_unavail)\n}\n\n<commit_msg>Add config helper to check whether hook is enabled or not<commit_after>use toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagstore::storeid::StoreId;\n\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::result::Result;\n\nuse vcs::git::action::StoreAction;\n\nuse git2::Repository;\n\n\/\/\/ Check the configuration whether we should commit interactively\npub fn commit_interactive(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Check the configuration whether we should commit with the editor\nfn commit_with_editor(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive_editor\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive_editor' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive_editor = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive_editor'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Get the commit default message\nfn commit_default_msg<'a>(config: &'a Value, action: &'a StoreAction) -> &'a str {\n match config.lookup(\"commit.message\") {\n Some(&Value::String(ref b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.message' must be a String.\",\n action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.message'\", action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n }\n}\n\n\/\/\/ Get the commit template\n\/\/\/\n\/\/\/ TODO: Implement good template string\nfn commit_template(action: &StoreAction, id: &StoreId) -> String {\n format!(r#\"\n# Please commit your changes and remove these lines.\n#\n# You're about to commit changes via the {action} Hook\n#\n# Altered file: {id}\n#\n \"#,\n action = action,\n id = id.local().display())\n}\n\n\/\/\/ Generate a commit message\n\/\/\/\n\/\/\/ Uses the functions `commit_interactive()` and `commit_with_editor()`\n\/\/\/ or reads one from the commandline or uses the `commit_default_msg()` string to create a commit\n\/\/\/ message.\npub fn commit_message(repo: &Repository, config: &Value, action: StoreAction, id: &StoreId) -> Result<String> {\n use libimaginteraction::ask::ask_string;\n use libimagutil::edit::edit_in_tmpfile_with_command;\n use std::process::Command;\n\n if commit_interactive(config, &action) {\n if commit_with_editor(config, &action) {\n repo.config()\n .map_err_into(GHEK::GitConfigFetchError)\n .and_then(|c| c.get_string(\"core.editor\").map_err_into(GHEK::GitConfigEditorFetchError))\n .map_err_into(GHEK::ConfigError)\n .map(Command::new)\n .and_then(|cmd| {\n let mut s = commit_template(&action, id);\n edit_in_tmpfile_with_command(cmd, &mut s).map(|_| s)\n .map_err_into(GHEK::EditorError)\n })\n } else {\n Ok(ask_string(\"Commit Message\", None, false, false, None, \"> \"))\n }\n } else {\n Ok(String::from(commit_default_msg(config, &action)))\n }\n}\n\n\/\/\/ Check whether the hook should abort if the repository cannot be initialized\npub fn abort_on_repo_init_err(cfg: &Value) -> bool {\n get_bool_cfg(Some(cfg), \"abort_on_repo_init_failure\", true, true)\n}\n\n\/\/\/ Get the branch which must be checked out before running the hook (if any).\n\/\/\/\n\/\/\/ If there is no configuration for this, this is `Ok(None)`, otherwise we try to find the\n\/\/\/ configuration `String`.\npub fn ensure_branch(cfg: Option<&Value>) -> Result<Option<String>> {\n match cfg {\n Some(cfg) => {\n match cfg.lookup(\"ensure_branch\") {\n Some(&Value::String(ref s)) => Ok(Some(s.clone())),\n Some(_) => {\n warn!(\"Configuration error, 'ensure_branch' must be a String.\");\n Err(GHEK::ConfigTypeError.into_error())\n .map_err_into(GHEK::ConfigTypeError)\n },\n None => {\n debug!(\"No key `ensure_branch'\");\n Ok(None)\n },\n }\n },\n None => Ok(None),\n }\n}\n\n\/\/\/ Check whether we should check out a branch before committing.\npub fn do_checkout_ensure_branch(cfg: Option<&Value>) -> bool {\n get_bool_cfg(cfg, \"try_checkout_ensure_branch\", true, true)\n}\n\n\/\/\/ Helper to get a boolean value from the configuration.\nfn get_bool_cfg(cfg: Option<&Value>, name: &str, on_fail: bool, on_unavail: bool) -> bool {\n cfg.map(|cfg| {\n match cfg.lookup(name) {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, '{}' must be a Boolean (true|false).\", name);\n warn!(\"Assuming '{}' now.\", on_fail);\n on_fail\n },\n None => {\n debug!(\"No key '{}' - Assuming '{}'\", name, on_unavail);\n on_unavail\n },\n }\n })\n .unwrap_or(on_unavail)\n}\n\n\/\/\/ Check whether the hook is enabled or not. If the config is not there, the hook is _enabled_ by\n\/\/\/ default.\npub fn is_enabled(cfg: &Value) -> bool {\n get_bool_cfg(Some(cfg), \"enabled\", true, true)\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Check before dumping data in kill_stream.<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate libimagstore;\n\npub mod error;\npub mod result;\npub mod viewer;\n\n<commit_msg>Enable lints<commit_after>#![deny(\n dead_code,\n non_camel_case_types,\n non_snake_case,\n path_statements,\n trivial_numeric_casts,\n unstable_features,\n unused_allocation,\n unused_import_braces,\n unused_imports,\n unused_must_use,\n unused_mut,\n unused_qualifications,\n while_true,\n)]\n\nextern crate libimagstore;\n\npub mod error;\npub mod result;\npub mod viewer;\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>update<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added files via upload<commit_after>\n\nfn t() {\n\n let s = \"Hello World\";\n\n assert_eq!(s, \"Hello World\");\n\n\n}\n\n#[test] fn non_macro() { t() }\n\n\n\/\/#[test] fn _macro() { t(\"hello_world_macro\") }\n<|endoftext|>"} {"text":"<commit_before>extern crate byteorder;\n\nuse std::error::Error;\nuse std::io::prelude::*;\n\nuse backend::Backend;\nuse self::byteorder::{ReadBytesExt, WriteBytesExt, BigEndian};\nuse types::{self, FromSql, ToSql, IsNull};\n\nimpl<DB: Backend<RawValue=[u8]>> FromSql<types::Float, DB> for f32 {\n fn from_sql(bytes: Option<&[u8]>) -> Result<Self, Box<Error+Send+Sync>> {\n let mut bytes = not_none!(bytes);\n bytes.read_f32::<BigEndian>().map_err(|e| Box::new(e) as Box<Error+Send+Sync>)\n }\n}\n\nimpl<DB: Backend> ToSql<types::Float, DB> for f32 {\n fn to_sql<W: Write>(&self, out: &mut W) -> Result<IsNull, Box<Error+Send+Sync>> {\n out.write_f32::<BigEndian>(*self)\n .map(|_| IsNull::No)\n .map_err(|e| Box::new(e) as Box<Error+Send+Sync>)\n }\n}\n\nimpl<DB: Backend<RawValue=[u8]>> FromSql<types::Double, DB> for f64 {\n fn from_sql(bytes: Option<&[u8]>) -> Result<Self, Box<Error+Send+Sync>> {\n let mut bytes = not_none!(bytes);\n bytes.read_f64::<BigEndian>().map_err(|e| Box::new(e) as Box<Error+Send+Sync>)\n }\n}\n\nimpl<DB: Backend> ToSql<types::Double, DB> for f64 {\n fn to_sql<W: Write>(&self, out: &mut W) -> Result<IsNull, Box<Error+Send+Sync>> {\n out.write_f64::<BigEndian>(*self)\n .map(|_| IsNull::No)\n .map_err(|e| Box::new(e) as Box<Error+Send+Sync>)\n }\n}\n<commit_msg>Add the same debug check we do for integers to floats<commit_after>extern crate byteorder;\n\nuse std::error::Error;\nuse std::io::prelude::*;\n\nuse backend::Backend;\nuse self::byteorder::{ReadBytesExt, WriteBytesExt, BigEndian};\nuse types::{self, FromSql, ToSql, IsNull};\n\nimpl<DB: Backend<RawValue=[u8]>> FromSql<types::Float, DB> for f32 {\n fn from_sql(bytes: Option<&[u8]>) -> Result<Self, Box<Error+Send+Sync>> {\n let mut bytes = not_none!(bytes);\n debug_assert!(bytes.len() <= 4, \"Received more than 4 bytes while decoding \\\n an f32. Was a double accidentally marked as float?\");\n bytes.read_f32::<BigEndian>().map_err(|e| Box::new(e) as Box<Error+Send+Sync>)\n }\n}\n\nimpl<DB: Backend> ToSql<types::Float, DB> for f32 {\n fn to_sql<W: Write>(&self, out: &mut W) -> Result<IsNull, Box<Error+Send+Sync>> {\n out.write_f32::<BigEndian>(*self)\n .map(|_| IsNull::No)\n .map_err(|e| Box::new(e) as Box<Error+Send+Sync>)\n }\n}\n\nimpl<DB: Backend<RawValue=[u8]>> FromSql<types::Double, DB> for f64 {\n fn from_sql(bytes: Option<&[u8]>) -> Result<Self, Box<Error+Send+Sync>> {\n let mut bytes = not_none!(bytes);\n debug_assert!(bytes.len() <= 8, \"Received more than 8 bytes while decoding \\\n an f64. Was a numeric accidentally marked as dobule?\");\n bytes.read_f64::<BigEndian>().map_err(|e| Box::new(e) as Box<Error+Send+Sync>)\n }\n}\n\nimpl<DB: Backend> ToSql<types::Double, DB> for f64 {\n fn to_sql<W: Write>(&self, out: &mut W) -> Result<IsNull, Box<Error+Send+Sync>> {\n out.write_f64::<BigEndian>(*self)\n .map(|_| IsNull::No)\n .map_err(|e| Box::new(e) as Box<Error+Send+Sync>)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>gtk testing sample<commit_after><|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse collections::vec::Vec;\n\nuse core::intrinsics::{volatile_load, volatile_store};\nuse core::mem::size_of;\nuse core::ptr::{self, read, write};\nuse core::slice;\n\nuse common::debug;\nuse common::memory;\nuse common::time::{self, Duration};\n\nuse drivers::mmio::Mmio;\nuse drivers::pciconfig::PciConfig;\n\nuse schemes::KScheme;\n\nuse super::hci::{UsbHci, UsbMsg};\nuse super::setup::Setup;\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Qtd {\n next: u32,\n next_alt: u32,\n token: u32,\n buffers: [u32; 5],\n}\n\n#[repr(packed)]\nstruct QueueHead {\n next: u32,\n characteristics: u32,\n capabilities: u32,\n qtd_ptr: u32,\n qtd: Qtd,\n}\n\npub struct Ehci {\n pub pci: PciConfig,\n pub base: usize,\n pub irq: u8,\n}\n\nimpl KScheme for Ehci {\n #[allow(non_snake_case)]\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n \/\/ debug::d(\"EHCI handle\");\n\n unsafe {\n let cap_length = &mut *(self.base as *mut Mmio<u8>);\n\n let op_base = self.base + cap_length.read() as usize;\n\n let usb_sts = &mut *((op_base + 4) as *mut Mmio<u32>);\n \/\/ debug::d(\" usb_sts \");\n \/\/ debug::dh(*usb_sts as usize);\n\n usb_sts.writef(0b111111, true);\n\n \/\/ debug::d(\" usb_sts \");\n \/\/ debug::dh(*usb_sts as usize);\n\n \/\/ let FRINDEX = (opbase + 0xC) as *mut Mmio<u32>;\n \/\/ debug::d(\" FRINDEX \");\n \/\/ debug::dh(*FRINDEX as usize);\n }\n\n \/\/ debug::dl();\n }\n }\n}\n\nimpl Ehci {\n pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {\n let mut module = box Ehci {\n pci: pci,\n base: pci.read(0x10) as usize & 0xFFFFFFF0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n\n module.init();\n\n module\n }\n\n #[allow(non_snake_case)]\n pub unsafe fn init(&mut self) {\n debug!(\"EHCI on: {:X}, IRQ {:X}\", self.base, self.irq);\n\n self.pci.flag(4, 4, true); \/\/ Bus master\n\n let cap_length = &mut *(self.base as *mut Mmio<u8>);\n let hcs_params = &mut *((self.base + 4) as *mut Mmio<u32>);\n let hcc_params = &mut *((self.base + 8) as *mut Mmio<u32>);\n\n let ports = (hcs_params.read() & 0b1111) as usize;\n debug::d(\" PORTS \");\n debug::dd(ports);\n\n let eecp = (hcc_params.read() >> 8) as u8;\n debug::d(\" EECP \");\n debug::dh(eecp as usize);\n\n debug::dl();\n\n if eecp > 0 {\n if self.pci.read(eecp) & (1 << 24 | 1 << 16) == 1 << 16 {\n debug::d(\"Taking Ownership\");\n debug::d(\" \");\n debug::dh(self.pci.read(eecp) as usize);\n\n self.pci.flag(eecp, 1 << 24, true);\n\n debug::d(\" \");\n debug::dh(self.pci.read(eecp) as usize);\n debug::dl();\n\n debug::d(\"Waiting\");\n debug::d(\" \");\n debug::dh(self.pci.read(eecp) as usize);\n\n while self.pci.read(eecp) & (1 << 24 | 1 << 16) != 1 << 24 {}\n\n debug::d(\" \");\n debug::dh(self.pci.read(eecp) as usize);\n debug::dl();\n }\n }\n\n let op_base = self.base + cap_length.read() as usize;\n\n let usb_cmd = &mut *(op_base as *mut Mmio<u32>);\n let usb_sts = &mut *((op_base + 4) as *mut Mmio<u32>);\n let usb_intr = &mut *((op_base + 8) as *mut Mmio<u32>);\n let config_flag = &mut *((op_base + 0x40) as *mut Mmio<u32>);\n let port_scs = &mut slice::from_raw_parts_mut((op_base + 0x44) as *mut Mmio<u32>, ports);\n\n \/*\n let FRINDEX = (opbase + 0xC) as *mut Mmio<u32>;\n let CTRLDSSEGMENT = (opbase + 0x10) as *mut Mmio<u32>;\n let PERIODICLISTBASE = (opbase + 0x14) as *mut Mmio<u32>;\n let ASYNCLISTADDR = (opbase + 0x18) as *mut Mmio<u32>;\n *\/\n\n \/\/Halt\n if usb_sts.read() & 1 << 12 == 0 {\n usb_cmd.writef(0xF, false);\n while ! usb_sts.readf(1 << 12) {}\n }\n\n \/\/Reset\n usb_cmd.writef(1 << 1, true);\n while usb_cmd.readf(1 << 1) {}\n\n \/\/Enable\n usb_intr.write(0b111111);\n usb_cmd.writef(1, true);\n config_flag.write(1);\n while usb_sts.readf(1 << 12) {}\n\n for i in 0..port_scs.len() {\n let port_sc = &mut port_scs[i];\n if port_sc.readf(1) {\n debugln!(\"Device on port {}: {:X}\", i, port_sc.read());\n\n if port_sc.readf(1 << 1) {\n debugln!(\"Connection Change\");\n\n port_sc.writef(1 << 1, true);\n }\n\n if ! port_sc.readf(1 << 2) {\n debugln!(\"Reset\");\n\n while ! port_sc.readf(1 << 8) {\n port_sc.writef(1 << 8, true);\n }\n\n let mut spin = 1000000000;\n while spin > 0 {\n spin -= 1;\n }\n\n while port_sc.readf(1 << 8) {\n port_sc.writef(1 << 8, false);\n }\n }\n\n debugln!(\"Port Enabled {:X}\", port_sc.read());\n\n self.device(i as u8 + 1);\n }\n }\n }\n}\n\nimpl UsbHci for Ehci {\n fn msg(&mut self, address: u8, endpoint: u8, msgs: &[UsbMsg]) -> usize {\n let mut tds = Vec::new();\n for msg in msgs.iter().rev() {\n let link_ptr = match tds.last() {\n Some(td) => (td as *const Qtd) as u32,\n None => 1\n };\n\n match *msg {\n UsbMsg::Setup(setup) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: (size_of::<Setup>() as u32) << 16 | 0b10 << 8 | 1 << 7,\n buffers: [(setup as *const Setup) as u32, 0, 0, 0, 0]\n }),\n UsbMsg::In(ref data) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: ((data.len() as u32) & 0x7FFF) << 16 | 0b01 << 8 | 1 << 7,\n buffers: [data.as_ptr() as u32, 0, 0, 0, 0]\n }),\n UsbMsg::InIso(ref data) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: ((data.len() as u32) & 0x7FFF) << 16 | 0b01 << 8 | 1 << 7,\n buffers: [data.as_ptr() as u32, 0, 0, 0, 0]\n }),\n UsbMsg::Out(ref data) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: ((data.len() as u32) & 0x7FFF) << 16 | 0b00 << 8 | 1 << 7,\n buffers: [data.as_ptr() as u32, 0, 0, 0, 0]\n }),\n UsbMsg::OutIso(ref data) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: ((data.len() as u32) & 0x7FFF) << 16 | 0b00 << 8 | 1 << 7,\n buffers: [data.as_ptr() as u32, 0, 0, 0, 0]\n })\n }\n }\n\n let mut count = 0;\n\n if ! tds.is_empty() {\n unsafe {\n let cap_length = &mut *(self.base as *mut Mmio<u8>);\n\n let op_base = self.base + cap_length.read() as usize;\n\n let usb_cmd = &mut *(op_base as *mut Mmio<u32>);\n let usb_sts = &mut *((op_base + 4) as *mut Mmio<u32>);\n let async_list = &mut *((op_base + 0x18) as *mut Mmio<u32>);\n\n let queuehead = box QueueHead {\n next: 1,\n characteristics: 1024 << 16 | 1 << 15 | 1 << 14 | 0b10 << 12 | (endpoint as u32) << 8 | address as u32,\n capabilities: 0b01 << 30,\n qtd_ptr: (tds.last().unwrap() as *const Qtd) as u32,\n qtd: *tds.last().unwrap()\n };\n\n async_list.write((&*queuehead as *const QueueHead) as u32 | 2);\n usb_cmd.writef(1 << 5 | 1, true);\n\n \/*\n for td in tds.iter().rev() {\n while unsafe { volatile_load(td as *const Qtd).token } & 1 << 7 == 1 << 7 {\n \/\/unsafe { context_switch(false) };\n }\n }\n *\/\n\n while usb_sts.readf(0xA000) {}\n\n usb_cmd.writef(1 << 5 | 1, false);\n async_list.write(0);\n }\n }\n\n count\n }\n}\n<commit_msg>EHCI works with touchscreen<commit_after>use alloc::boxed::Box;\n\nuse collections::vec::Vec;\n\nuse core::intrinsics::{volatile_load, volatile_store};\nuse core::mem::size_of;\nuse core::ptr::{self, read, write};\nuse core::slice;\n\nuse common::debug;\nuse common::memory;\nuse common::time::{self, Duration};\n\nuse drivers::mmio::Mmio;\nuse drivers::pciconfig::PciConfig;\n\nuse schemes::KScheme;\n\nuse super::hci::{UsbHci, UsbMsg};\nuse super::setup::Setup;\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Qtd {\n next: u32,\n next_alt: u32,\n token: u32,\n buffers: [u32; 5],\n}\n\n#[repr(packed)]\nstruct QueueHead {\n next: u32,\n characteristics: u32,\n capabilities: u32,\n qtd_ptr: u32,\n qtd: Qtd,\n}\n\npub struct Ehci {\n pub pci: PciConfig,\n pub base: usize,\n pub irq: u8,\n}\n\nimpl KScheme for Ehci {\n #[allow(non_snake_case)]\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n \/\/ debug::d(\"EHCI handle\");\n\n unsafe {\n let cap_length = &mut *(self.base as *mut Mmio<u8>);\n\n let op_base = self.base + cap_length.read() as usize;\n\n let usb_sts = &mut *((op_base + 4) as *mut Mmio<u32>);\n \/\/ debug::d(\" usb_sts \");\n \/\/ debug::dh(*usb_sts as usize);\n\n usb_sts.writef(0b111111, true);\n\n \/\/ debug::d(\" usb_sts \");\n \/\/ debug::dh(*usb_sts as usize);\n\n \/\/ let FRINDEX = (opbase + 0xC) as *mut Mmio<u32>;\n \/\/ debug::d(\" FRINDEX \");\n \/\/ debug::dh(*FRINDEX as usize);\n }\n\n \/\/ debug::dl();\n }\n }\n}\n\nimpl Ehci {\n pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {\n let mut module = box Ehci {\n pci: pci,\n base: pci.read(0x10) as usize & 0xFFFFFFF0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n\n module.init();\n\n module\n }\n\n #[allow(non_snake_case)]\n pub unsafe fn init(&mut self) {\n debug!(\"EHCI on: {:X}, IRQ {:X}\", self.base, self.irq);\n\n self.pci.flag(4, 4, true); \/\/ Bus master\n\n let cap_length = &mut *(self.base as *mut Mmio<u8>);\n let hcs_params = &mut *((self.base + 4) as *mut Mmio<u32>);\n let hcc_params = &mut *((self.base + 8) as *mut Mmio<u32>);\n\n let ports = (hcs_params.read() & 0b1111) as usize;\n debug::d(\" PORTS \");\n debug::dd(ports);\n\n let eecp = (hcc_params.read() >> 8) as u8;\n debug::d(\" EECP \");\n debug::dh(eecp as usize);\n\n debug::dl();\n\n if eecp > 0 {\n if self.pci.read(eecp) & (1 << 24 | 1 << 16) == 1 << 16 {\n debug::d(\"Taking Ownership\");\n debug::d(\" \");\n debug::dh(self.pci.read(eecp) as usize);\n\n self.pci.flag(eecp, 1 << 24, true);\n\n debug::d(\" \");\n debug::dh(self.pci.read(eecp) as usize);\n debug::dl();\n\n debug::d(\"Waiting\");\n debug::d(\" \");\n debug::dh(self.pci.read(eecp) as usize);\n\n while self.pci.read(eecp) & (1 << 24 | 1 << 16) != 1 << 24 {}\n\n debug::d(\" \");\n debug::dh(self.pci.read(eecp) as usize);\n debug::dl();\n }\n }\n\n let op_base = self.base + cap_length.read() as usize;\n\n let usb_cmd = &mut *(op_base as *mut Mmio<u32>);\n let usb_sts = &mut *((op_base + 4) as *mut Mmio<u32>);\n let usb_intr = &mut *((op_base + 8) as *mut Mmio<u32>);\n let config_flag = &mut *((op_base + 0x40) as *mut Mmio<u32>);\n let port_scs = &mut slice::from_raw_parts_mut((op_base + 0x44) as *mut Mmio<u32>, ports);\n\n \/*\n let FRINDEX = (opbase + 0xC) as *mut Mmio<u32>;\n let CTRLDSSEGMENT = (opbase + 0x10) as *mut Mmio<u32>;\n let PERIODICLISTBASE = (opbase + 0x14) as *mut Mmio<u32>;\n let ASYNCLISTADDR = (opbase + 0x18) as *mut Mmio<u32>;\n *\/\n\n \/\/Halt\n if usb_sts.read() & 1 << 12 == 0 {\n usb_cmd.writef(0xF, false);\n while ! usb_sts.readf(1 << 12) {}\n }\n\n \/\/Reset\n usb_cmd.writef(1 << 1, true);\n while usb_cmd.readf(1 << 1) {}\n\n \/\/Enable\n usb_intr.write(0b111111);\n usb_cmd.writef(1, true);\n config_flag.write(1);\n while usb_sts.readf(1 << 12) {}\n\n for i in 0..port_scs.len() {\n let port_sc = &mut port_scs[i];\n if port_sc.readf(1) {\n debugln!(\"Device on port {}: {:X}\", i, port_sc.read());\n\n if port_sc.readf(1 << 1) {\n debugln!(\"Connection Change\");\n\n port_sc.writef(1 << 1, true);\n }\n\n if ! port_sc.readf(1 << 2) {\n debugln!(\"Reset\");\n\n while ! port_sc.readf(1 << 8) {\n port_sc.writef(1 << 8, true);\n }\n\n let mut spin = 1000000000;\n while spin > 0 {\n spin -= 1;\n }\n\n while port_sc.readf(1 << 8) {\n port_sc.writef(1 << 8, false);\n }\n }\n\n debugln!(\"Port Enabled {:X}\", port_sc.read());\n\n self.device(i as u8 + 1);\n }\n }\n }\n}\n\nimpl UsbHci for Ehci {\n fn msg(&mut self, address: u8, endpoint: u8, msgs: &[UsbMsg]) -> usize {\n let mut tds = Vec::new();\n for msg in msgs.iter().rev() {\n let link_ptr = match tds.last() {\n Some(td) => (td as *const Qtd) as u32,\n None => 1\n };\n\n match *msg {\n UsbMsg::Setup(setup) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: (size_of::<Setup>() as u32) << 16 | 0b10 << 8 | 1 << 7,\n buffers: [(setup as *const Setup) as u32, 0, 0, 0, 0]\n }),\n UsbMsg::In(ref data) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: ((data.len() as u32) & 0x7FFF) << 16 | 0b01 << 8 | 1 << 7,\n buffers: [data.as_ptr() as u32, 0, 0, 0, 0]\n }),\n UsbMsg::InIso(ref data) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: ((data.len() as u32) & 0x7FFF) << 16 | 0b01 << 8 | 1 << 7,\n buffers: [data.as_ptr() as u32, 0, 0, 0, 0]\n }),\n UsbMsg::Out(ref data) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: ((data.len() as u32) & 0x7FFF) << 16 | 0b00 << 8 | 1 << 7,\n buffers: [data.as_ptr() as u32, 0, 0, 0, 0]\n }),\n UsbMsg::OutIso(ref data) => tds.push(Qtd {\n next: link_ptr,\n next_alt: 1,\n token: ((data.len() as u32) & 0x7FFF) << 16 | 0b00 << 8 | 1 << 7,\n buffers: [data.as_ptr() as u32, 0, 0, 0, 0]\n })\n }\n }\n\n let mut count = 0;\n\n if ! tds.is_empty() {\n unsafe {\n let cap_length = &mut *(self.base as *mut Mmio<u8>);\n\n let op_base = self.base + cap_length.read() as usize;\n\n let usb_cmd = &mut *(op_base as *mut Mmio<u32>);\n let usb_sts = &mut *((op_base + 4) as *mut Mmio<u32>);\n let async_list = &mut *((op_base + 0x18) as *mut Mmio<u32>);\n\n let queuehead = box QueueHead {\n next: 1,\n characteristics: 1024 << 16 | 1 << 15 | 1 << 14 | 0b10 << 12 | (endpoint as u32) << 8 | address as u32,\n capabilities: 0b01 << 30,\n qtd_ptr: (tds.last().unwrap() as *const Qtd) as u32,\n qtd: *tds.last().unwrap()\n };\n\n \/\/TODO: Calculate actual bytes\n for td in tds.iter().rev() {\n count += (td.token as usize >> 16) & 0x7FFF;\n }\n\n async_list.write((&*queuehead as *const QueueHead) as u32 | 2);\n usb_cmd.writef(1 << 5 | 1, true);\n\n let mut i = 0;\n for td in tds.iter().rev() {\n while unsafe { volatile_load(td as *const Qtd).token } & 1 << 7 == 1 << 7 {\n \/\/unsafe { context_switch(false) };\n }\n }\n\n usb_cmd.writef(1 << 5 | 1, false);\n async_list.write(0);\n }\n }\n\n count\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse collections::vec::Vec;\n\nuse core::intrinsics::volatile_load;\nuse core::{mem, slice};\n\nuse drivers::mmio::Mmio;\nuse drivers::pci::config::PciConfig;\n\nuse schemes::KScheme;\n\nuse super::hci::{UsbHci, UsbMsg};\nuse super::setup::Setup;\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Gtd {\n flags: u32,\n buffer: u32,\n next: u32,\n end: u32,\n}\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Ed {\n flags: u32,\n tail: u32,\n head: u32,\n next: u32,\n}\n\nconst CTRL_CBSR: u32 = 0b11;\nconst CTRL_PLE: u32 = 1 << 2;\nconst CTRL_IE: u32 = 1 << 3;\nconst CTRL_CLE: u32 = 1 << 4;\nconst CTRL_BLE: u32 = 1 << 5;\nconst CTRL_HCFS: u32 = 0b11 << 6;\nconst CTRL_IR: u32 = 1 << 8;\nconst CTRL_RWC: u32 = 1 << 9;\nconst CTRL_RWE: u32 = 1 << 10;\n\nconst CMD_STS_HCR: u32 = 1;\nconst CMD_STS_CLF: u32 = 1 << 1;\nconst CMD_STS_BLF: u32 = 1 << 2;\nconst CMD_STS_OCR: u32 = 1 << 3;\n\nconst PORT_STS_CCS: u32 = 1;\nconst PORT_STS_PES: u32 = 1 << 1;\nconst PORT_STS_PSS: u32 = 1 << 2;\nconst PORT_STS_POCI: u32 = 1 << 3;\nconst PORT_STS_PPS: u32 = 1 << 8;\nconst PORT_STS_LSDA: u32 = 1 << 9;\nconst PORT_STS_CSC: u32 = 1 << 16;\nconst PORT_STS_PESC: u32 = 1 << 17;\nconst PORT_STS_PSSC: u32 = 1 << 18;\nconst PORT_STS_OCIC: u32 = 1 << 19;\nconst PORT_STS_PRSC: u32 = 1 << 20;\n\n#[repr(packed)]\npub struct OhciRegs {\n pub revision: Mmio<u32>,\n pub control: Mmio<u32>,\n pub cmd_sts: Mmio<u32>,\n pub int_sts: Mmio<u32>,\n pub int_en: Mmio<u32>,\n pub int_dis: Mmio<u32>,\n pub hcca: Mmio<u32>,\n pub period_current: Mmio<u32>,\n pub control_head: Mmio<u32>,\n pub control_current: Mmio<u32>,\n pub bulk_head: Mmio<u32>,\n pub bulk_current: Mmio<u32>,\n pub done_head: Mmio<u32>,\n pub fm_interval: Mmio<u32>,\n pub fm_remain: Mmio<u32>,\n pub fm_num: Mmio<u32>,\n pub periodic_start: Mmio<u32>,\n pub ls_thresh: Mmio<u32>,\n pub rh_desc_a: Mmio<u32>,\n pub rh_desc_b: Mmio<u32>,\n pub rh_sts: Mmio<u32>,\n pub port_sts: [Mmio<u32>; 15],\n}\n\npub struct Ohci {\n pub regs: &'static mut OhciRegs,\n pub irq: u8,\n}\n\nimpl KScheme for Ohci {\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n \/\/ d(\"OHCI IRQ\\n\");\n }\n }\n\n fn on_poll(&mut self) {\n }\n}\n\nimpl Ohci {\n pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {\n pci.flag(4, 4, true); \/\/ Bus mastering\n\n let base = pci.read(0x10) as usize & 0xFFFFFFF0;\n let regs = &mut *(base as *mut OhciRegs);\n\n let mut module = box Ohci {\n regs: regs,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n\n module.init();\n\n return module;\n }\n\n pub unsafe fn init(&mut self) {\n debugln!(\"OHCI on: {:X}, IRQ: {:X}\", (self.regs as *mut OhciRegs) as usize, self.irq);\n\n let ctrl = self.regs.control.read();\n self.regs.control.write(ctrl & (0xFFFFFFFF - CTRL_HCFS) | 0b10 << 6);\n\n let ndp = self.regs.rh_desc_a.read() & 0xF;\n for i in 0..ndp as usize {\n debugln!(\"Port {}: {:X}\", i, self.regs.port_sts[i].read());\n\n if self.regs.port_sts[i].readf(PORT_STS_CCS) {\n debugln!(\"Device\");\n\n debugln!(\"Enable\");\n while ! self.regs.port_sts[i].readf(PORT_STS_PES) {\n self.regs.port_sts[i].writef(PORT_STS_PES, true);\n }\n\n self.device(i as u8);\n }\n }\n }\n}\n\n\nimpl UsbHci for Ohci {\n fn msg(&mut self, address: u8, endpoint: u8, msgs: &[UsbMsg]) -> usize {\n let mut tds = Vec::new();\n for msg in msgs.iter().rev() {\n let link_ptr = match tds.last() {\n Some(td) => (td as *const Gtd) as u32,\n None => 0\n };\n\n match *msg {\n UsbMsg::Setup(setup) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b00 << 19,\n buffer: (setup as *const Setup) as u32,\n next: link_ptr,\n end: (setup as *const Setup) as u32 + mem::size_of::<Setup>() as u32\n }),\n UsbMsg::In(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::InIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::Out(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::OutIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n })\n }\n }\n\n let mut count = 0;\n\n if ! tds.is_empty() {\n let ed = box Ed {\n flags: 1024 << 16 | (endpoint as u32) << 7 | address as u32,\n tail: 0,\n head: (tds.last().unwrap() as *const Gtd) as u32,\n next: 0\n };\n\n \/\/TODO: Calculate actual bytes\n for td in tds.iter().rev() {\n count += (td.end - td.buffer) as usize;\n }\n\n \/*\n self.regs.control_head.write((&*ed as *const Ed) as u32);\n while ! self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, true);\n }\n while ! self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, true);\n }\n\n for td in tds.iter().rev() {\n while unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28 == 0b1111 << 28 {\n \/\/unsafe { context_switch(false) };\n }\n }\n\n while self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, false);\n }\n while self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, false);\n }\n self.regs.control_head.write(0);\n *\/\n }\n\n count\n }\n}\n<commit_msg>Getting stall condition<commit_after>use alloc::boxed::Box;\n\nuse collections::vec::Vec;\n\nuse core::intrinsics::volatile_load;\nuse core::{mem, slice};\n\nuse drivers::mmio::Mmio;\nuse drivers::pci::config::PciConfig;\n\nuse schemes::KScheme;\n\nuse super::hci::{UsbHci, UsbMsg};\nuse super::setup::Setup;\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Gtd {\n flags: u32,\n buffer: u32,\n next: u32,\n end: u32,\n}\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Ed {\n flags: u32,\n tail: u32,\n head: u32,\n next: u32,\n}\n\nconst CTRL_CBSR: u32 = 0b11;\nconst CTRL_PLE: u32 = 1 << 2;\nconst CTRL_IE: u32 = 1 << 3;\nconst CTRL_CLE: u32 = 1 << 4;\nconst CTRL_BLE: u32 = 1 << 5;\nconst CTRL_HCFS: u32 = 0b11 << 6;\nconst CTRL_IR: u32 = 1 << 8;\nconst CTRL_RWC: u32 = 1 << 9;\nconst CTRL_RWE: u32 = 1 << 10;\n\nconst CMD_STS_HCR: u32 = 1;\nconst CMD_STS_CLF: u32 = 1 << 1;\nconst CMD_STS_BLF: u32 = 1 << 2;\nconst CMD_STS_OCR: u32 = 1 << 3;\n\nconst PORT_STS_CCS: u32 = 1;\nconst PORT_STS_PES: u32 = 1 << 1;\nconst PORT_STS_PSS: u32 = 1 << 2;\nconst PORT_STS_POCI: u32 = 1 << 3;\nconst PORT_STS_PPS: u32 = 1 << 8;\nconst PORT_STS_LSDA: u32 = 1 << 9;\nconst PORT_STS_CSC: u32 = 1 << 16;\nconst PORT_STS_PESC: u32 = 1 << 17;\nconst PORT_STS_PSSC: u32 = 1 << 18;\nconst PORT_STS_OCIC: u32 = 1 << 19;\nconst PORT_STS_PRSC: u32 = 1 << 20;\n\n#[repr(packed)]\npub struct OhciRegs {\n pub revision: Mmio<u32>,\n pub control: Mmio<u32>,\n pub cmd_sts: Mmio<u32>,\n pub int_sts: Mmio<u32>,\n pub int_en: Mmio<u32>,\n pub int_dis: Mmio<u32>,\n pub hcca: Mmio<u32>,\n pub period_current: Mmio<u32>,\n pub control_head: Mmio<u32>,\n pub control_current: Mmio<u32>,\n pub bulk_head: Mmio<u32>,\n pub bulk_current: Mmio<u32>,\n pub done_head: Mmio<u32>,\n pub fm_interval: Mmio<u32>,\n pub fm_remain: Mmio<u32>,\n pub fm_num: Mmio<u32>,\n pub periodic_start: Mmio<u32>,\n pub ls_thresh: Mmio<u32>,\n pub rh_desc_a: Mmio<u32>,\n pub rh_desc_b: Mmio<u32>,\n pub rh_sts: Mmio<u32>,\n pub port_sts: [Mmio<u32>; 15],\n}\n\npub struct Ohci {\n pub regs: &'static mut OhciRegs,\n pub irq: u8,\n}\n\nimpl KScheme for Ohci {\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n \/\/ d(\"OHCI IRQ\\n\");\n }\n }\n\n fn on_poll(&mut self) {\n }\n}\n\nimpl Ohci {\n pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {\n pci.flag(4, 4, true); \/\/ Bus mastering\n\n let base = pci.read(0x10) as usize & 0xFFFFFFF0;\n let regs = &mut *(base as *mut OhciRegs);\n\n let mut module = box Ohci {\n regs: regs,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n\n module.init();\n\n return module;\n }\n\n pub unsafe fn init(&mut self) {\n debugln!(\"OHCI on: {:X}, IRQ: {:X}\", (self.regs as *mut OhciRegs) as usize, self.irq);\n\n debugln!(\"Reset: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = ctrl & (0xFFFFFFFF - CTRL_HCFS);\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Enable: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = (ctrl & (0xFFFFFFFF - CTRL_HCFS)) | 0b10 << 6;\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Port Enumeration: {:X}\", self.regs.control.read());\n\n let ndp = self.regs.rh_desc_a.read() & 0xF;\n for i in 0..ndp as usize {\n debugln!(\"Port {}: {:X}\", i, self.regs.port_sts[i].read());\n\n if self.regs.port_sts[i].readf(PORT_STS_CCS) {\n debugln!(\"Device\");\n\n debugln!(\"Enable\");\n while ! self.regs.port_sts[i].readf(PORT_STS_PES) {\n self.regs.port_sts[i].writef(PORT_STS_PES, true);\n }\n\n self.device(i as u8);\n }\n }\n }\n}\n\n\nimpl UsbHci for Ohci {\n fn msg(&mut self, address: u8, endpoint: u8, msgs: &[UsbMsg]) -> usize {\n let mut tds = Vec::new();\n for msg in msgs.iter().rev() {\n let link_ptr = match tds.last() {\n Some(td) => (td as *const Gtd) as u32,\n None => 0\n };\n\n match *msg {\n UsbMsg::Setup(setup) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b00 << 19,\n buffer: (setup as *const Setup) as u32,\n next: link_ptr,\n end: (setup as *const Setup) as u32 + mem::size_of::<Setup>() as u32\n }),\n UsbMsg::In(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::InIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::Out(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::OutIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n })\n }\n }\n\n let mut count = 0;\n\n if ! tds.is_empty() {\n let ed = box Ed {\n \/\/TODO: Remove 1 << 13, it sets it to low speed\n flags: 0x3FF << 16 | 1 << 13 | (endpoint as u32) << 7 | address as u32,\n tail: 0,\n head: (tds.last().unwrap() as *const Gtd) as u32,\n next: 0\n };\n\n \/\/TODO: Calculate actual bytes\n for td in tds.iter().rev() {\n count += (td.end - td.buffer) as usize;\n }\n\n self.regs.control_head.write((&*ed as *const Ed) as u32);\n while ! self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, true);\n }\n while ! self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, true);\n }\n\n for td in tds.iter().rev() {\n while unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28 == 0b1111 << 28 {\n \/\/unsafe { context_switch(false) };\n }\n let condition = (unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28) >> 28;\n if condition != 0 {\n debugln!(\"Condition: {:X}\", condition);\n break;\n }\n }\n\n while self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, false);\n }\n while self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, false);\n }\n self.regs.control_head.write(0);\n }\n\n count\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add solution to problem 60<commit_after>#![feature(core)]\n#[macro_use] extern crate libeuler;\n\nuse libeuler::prime::SieveOfAtkin;\n\/\/\/ The primes 3, 7, 109, and 673, are quite remarkable. By taking any two primes and concatenating\n\/\/\/ them in any order the result will always be prime. For example, taking 7 and 109, both 7109 and\n\/\/\/ 1097 are prime. The sum of these four primes, 792, represents the lowest sum for a set of four\n\/\/\/ primes with this property.\n\/\/\/\n\/\/\/ Find the lowest sum for a set of five primes for which any two primes concatenate to\n\/\/\/ produce another prime.\nfn main() {\n solutions! {\n sol naive {\n let sieve = SieveOfAtkin::new(100_000_000);\n\n match find_remarkable_set(&sieve, vec![13], 1000, 5) {\n Some(set) => set.iter().map(|&a| a).sum(),\n _ => 0u64\n }\n }\n }\n}\n\nfn find_remarkable_set(sieve: &SieveOfAtkin, mut set: Vec<u64>, start: u64, needed: usize) -> Option<Vec<u64>> {\n for prim in sieve.iter().filter(|&p| p > start).filter(|&p| p < 9000) {\n \/\/ print!(\"Add {} to {:?}? \", prim, set);\n let mut valid = true;\n for i in set.iter() {\n if !sieve.is_prime(format!(\"{}{}\", i, prim).parse().unwrap()) ||\n !sieve.is_prime(format!(\"{}{}\", prim, i).parse().unwrap()) {\n valid = false;\n \/\/ println!(\"No\");\n break;\n }\n }\n\n if valid {\n \/\/ println!(\"Yes\");\n set.push(prim);\n\n if set.len() >= needed {\n return Some(set);\n }\n\n match find_remarkable_set(sieve, set.clone(), prim, needed) {\n Some(v) => return Some(v),\n _ => {}\n }\n\n set.pop();\n }\n }\n\n None\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>hello world<commit_after>fn main() {\n println!(\"Hello World!\");\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,\n\/\/ refer to the AUTHORS file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Private callback support functions.\n\nuse libc::{c_double, c_int, c_uint};\nuse std::mem;\nuse std::sync::mpsc::Sender;\n\nuse super::*;\n\nmacro_rules! callback(\n (\n type Args = ($($arg:ident: $arg_ty:ty),*);\n type Callback = $Callback:ident;\n let ext_set = $ext_set:expr;\n fn callback($($ext_arg:ident: $ext_arg_ty:ty),*) $call:expr\n ) => (\n thread_local!(static CALLBACK_KEY: RefCell<Option<Box<Object<Args> + 'static>>> = RefCell::new(None));\n\n type Args = ($($arg_ty),*,);\n\n trait Object<T> {\n fn call(&self, args: T);\n }\n\n impl<UserData> Object<Args> for ::Callback<fn($($arg_ty),*, &UserData), UserData> {\n fn call(&self, ($($arg),*,): Args) {\n (self.f)($($arg),*, &self.data);\n }\n }\n\n pub fn set<UserData: 'static>(f: ::$Callback<UserData>) {\n let mut boxed_cb = Some(box f as Box<Object<Args> + 'static>);\n CALLBACK_KEY.with(|cb| {\n *cb.borrow_mut() = boxed_cb.take();\n });\n ($ext_set)(Some(callback as extern \"C\" fn($($ext_arg: $ext_arg_ty),*)));\n }\n\n pub fn unset() {\n CALLBACK_KEY.with(|cb| {\n *cb.borrow_mut() = None;\n });\n ($ext_set)(None);\n }\n\n extern \"C\" fn callback($($ext_arg: $ext_arg_ty),*) {\n CALLBACK_KEY.with(|cb| {\n match *cb.borrow() {\n Some(ref cb) => unsafe { cb.call($call) },\n _ => {}\n }\n })\n }\n )\n);\n\npub mod error {\n use libc::{c_int, c_char};\n use std::cell::RefCell;\n use std::mem;\n\n callback!(\n type Args = (error: ::Error, description: String);\n type Callback = ErrorCallback;\n let ext_set = |&: cb| unsafe { ::ffi::glfwSetErrorCallback(cb) };\n fn callback(error: c_int, description: *const c_char) {\n (mem::transmute(error), ::string_from_c_str(description))\n }\n );\n}\n\npub mod monitor {\n use libc::{c_int};\n use std::cell::RefCell;\n use std::mem;\n\n callback!(\n type Args = (monitor: ::Monitor, event: ::MonitorEvent);\n type Callback = MonitorCallback;\n let ext_set = |&: cb| unsafe { ::ffi::glfwSetMonitorCallback(cb) };\n fn callback(monitor: *mut ::ffi::GLFWmonitor, event: c_int) {\n let monitor = ::Monitor {\n ptr: monitor\n };\n (monitor, mem::transmute(event))\n }\n );\n}\n\nunsafe fn get_sender<'a>(window: &'a *mut ffi::GLFWwindow) -> &'a Sender<(f64, WindowEvent)> {\n mem::transmute(ffi::glfwGetWindowUserPointer(*window))\n}\n\n\/\/ Note that this macro creates a static function pointer rather than a plain function.\n\/\/ This makes it more ergonomic to embed in an Option; see set_window_callback! in lib.rs\nmacro_rules! window_callback(\n (fn $name:ident () => $event:ident) => (\n pub static $name: (extern \"C\" fn(window: *mut ffi::GLFWwindow)) = {\n extern \"C\" fn actual_callback(window: *mut ffi::GLFWwindow) {\n unsafe { get_sender(&window).send((ffi::glfwGetTime() as f64, WindowEvent::$event)).unwrap();}\n }\n actual_callback\n };\n );\n (fn $name:ident ($($ext_arg:ident: $ext_arg_ty:ty),*) => $event:ident($($arg_conv:expr),*)) => (\n pub static $name: (extern \"C\" fn(window: *mut ffi::GLFWwindow $(, $ext_arg: $ext_arg_ty)*)) = {\n extern \"C\" fn actual_callback(window: *mut ffi::GLFWwindow $(, $ext_arg: $ext_arg_ty)*) {\n unsafe { get_sender(&window).send((ffi::glfwGetTime() as f64, WindowEvent::$event($($arg_conv),*))).unwrap(); }\n }\n actual_callback\n };\n );\n);\n\nwindow_callback!(fn window_pos_callback(xpos: c_int, ypos: c_int) => Pos(xpos as i32, ypos as i32));\nwindow_callback!(fn window_size_callback(width: c_int, height: c_int) => Size(width as i32, height as i32));\nwindow_callback!(fn window_close_callback() => Close);\nwindow_callback!(fn window_refresh_callback() => Refresh);\nwindow_callback!(fn window_focus_callback(focused: c_int) => Focus(focused == ffi::TRUE));\nwindow_callback!(fn window_iconify_callback(iconified: c_int) => Iconify(iconified == ffi::TRUE));\nwindow_callback!(fn framebuffer_size_callback(width: c_int, height: c_int) => FramebufferSize(width as i32, height as i32));\nwindow_callback!(fn mouse_button_callback(button: c_int, action: c_int, mods: c_int) => MouseButton(mem::transmute(button), mem::transmute(action), Modifiers::from_bits(mods).unwrap()));\nwindow_callback!(fn cursor_pos_callback(xpos: c_double, ypos: c_double) => CursorPos(xpos as f64, ypos as f64));\nwindow_callback!(fn cursor_enter_callback(entered: c_int) => CursorEnter(entered == ffi::TRUE));\nwindow_callback!(fn scroll_callback(xpos: c_double, ypos: c_double) => Scroll(xpos as f64, ypos as f64));\nwindow_callback!(fn key_callback(key: c_int, scancode: c_int, action: c_int, mods: c_int) => Key(mem::transmute(key), scancode, mem::transmute(action), Modifiers::from_bits(mods).unwrap()));\nwindow_callback!(fn char_callback(character: c_uint) => Char(::std::char::from_u32(character).unwrap()));\n<commit_msg>closure inference fix for the current nightly<commit_after>\/\/ Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,\n\/\/ refer to the AUTHORS file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Private callback support functions.\n\nuse libc::{c_double, c_int, c_uint};\nuse std::mem;\nuse std::sync::mpsc::Sender;\n\nuse super::*;\n\nmacro_rules! callback(\n (\n type Args = ($($arg:ident: $arg_ty:ty),*);\n type Callback = $Callback:ident;\n let ext_set = $ext_set:expr;\n fn callback($($ext_arg:ident: $ext_arg_ty:ty),*) $call:expr\n ) => (\n thread_local!(static CALLBACK_KEY: RefCell<Option<Box<Object<Args> + 'static>>> = RefCell::new(None));\n\n type Args = ($($arg_ty),*,);\n\n trait Object<T> {\n fn call(&self, args: T);\n }\n\n impl<UserData> Object<Args> for ::Callback<fn($($arg_ty),*, &UserData), UserData> {\n fn call(&self, ($($arg),*,): Args) {\n (self.f)($($arg),*, &self.data);\n }\n }\n\n pub fn set<UserData: 'static>(f: ::$Callback<UserData>) {\n let mut boxed_cb = Some(box f as Box<Object<Args> + 'static>);\n CALLBACK_KEY.with(|cb| {\n *cb.borrow_mut() = boxed_cb.take();\n });\n ($ext_set)(Some(callback as extern \"C\" fn($($ext_arg: $ext_arg_ty),*)));\n }\n\n pub fn unset() {\n CALLBACK_KEY.with(|cb| {\n *cb.borrow_mut() = None;\n });\n ($ext_set)(None);\n }\n\n extern \"C\" fn callback($($ext_arg: $ext_arg_ty),*) {\n CALLBACK_KEY.with(|cb| {\n match *cb.borrow() {\n Some(ref cb) => unsafe { cb.call($call) },\n _ => {}\n }\n })\n }\n )\n);\n\npub mod error {\n use libc::{c_int, c_char};\n use std::cell::RefCell;\n use std::mem;\n\n callback!(\n type Args = (error: ::Error, description: String);\n type Callback = ErrorCallback;\n let ext_set = |cb| unsafe { ::ffi::glfwSetErrorCallback(cb) };\n fn callback(error: c_int, description: *const c_char) {\n (mem::transmute(error), ::string_from_c_str(description))\n }\n );\n}\n\npub mod monitor {\n use libc::{c_int};\n use std::cell::RefCell;\n use std::mem;\n\n callback!(\n type Args = (monitor: ::Monitor, event: ::MonitorEvent);\n type Callback = MonitorCallback;\n let ext_set = |cb| unsafe { ::ffi::glfwSetMonitorCallback(cb) };\n fn callback(monitor: *mut ::ffi::GLFWmonitor, event: c_int) {\n let monitor = ::Monitor {\n ptr: monitor\n };\n (monitor, mem::transmute(event))\n }\n );\n}\n\nunsafe fn get_sender<'a>(window: &'a *mut ffi::GLFWwindow) -> &'a Sender<(f64, WindowEvent)> {\n mem::transmute(ffi::glfwGetWindowUserPointer(*window))\n}\n\n\/\/ Note that this macro creates a static function pointer rather than a plain function.\n\/\/ This makes it more ergonomic to embed in an Option; see set_window_callback! in lib.rs\nmacro_rules! window_callback(\n (fn $name:ident () => $event:ident) => (\n pub static $name: (extern \"C\" fn(window: *mut ffi::GLFWwindow)) = {\n extern \"C\" fn actual_callback(window: *mut ffi::GLFWwindow) {\n unsafe { get_sender(&window).send((ffi::glfwGetTime() as f64, WindowEvent::$event)).unwrap();}\n }\n actual_callback\n };\n );\n (fn $name:ident ($($ext_arg:ident: $ext_arg_ty:ty),*) => $event:ident($($arg_conv:expr),*)) => (\n pub static $name: (extern \"C\" fn(window: *mut ffi::GLFWwindow $(, $ext_arg: $ext_arg_ty)*)) = {\n extern \"C\" fn actual_callback(window: *mut ffi::GLFWwindow $(, $ext_arg: $ext_arg_ty)*) {\n unsafe { get_sender(&window).send((ffi::glfwGetTime() as f64, WindowEvent::$event($($arg_conv),*))).unwrap(); }\n }\n actual_callback\n };\n );\n);\n\nwindow_callback!(fn window_pos_callback(xpos: c_int, ypos: c_int) => Pos(xpos as i32, ypos as i32));\nwindow_callback!(fn window_size_callback(width: c_int, height: c_int) => Size(width as i32, height as i32));\nwindow_callback!(fn window_close_callback() => Close);\nwindow_callback!(fn window_refresh_callback() => Refresh);\nwindow_callback!(fn window_focus_callback(focused: c_int) => Focus(focused == ffi::TRUE));\nwindow_callback!(fn window_iconify_callback(iconified: c_int) => Iconify(iconified == ffi::TRUE));\nwindow_callback!(fn framebuffer_size_callback(width: c_int, height: c_int) => FramebufferSize(width as i32, height as i32));\nwindow_callback!(fn mouse_button_callback(button: c_int, action: c_int, mods: c_int) => MouseButton(mem::transmute(button), mem::transmute(action), Modifiers::from_bits(mods).unwrap()));\nwindow_callback!(fn cursor_pos_callback(xpos: c_double, ypos: c_double) => CursorPos(xpos as f64, ypos as f64));\nwindow_callback!(fn cursor_enter_callback(entered: c_int) => CursorEnter(entered == ffi::TRUE));\nwindow_callback!(fn scroll_callback(xpos: c_double, ypos: c_double) => Scroll(xpos as f64, ypos as f64));\nwindow_callback!(fn key_callback(key: c_int, scancode: c_int, action: c_int, mods: c_int) => Key(mem::transmute(key), scancode, mem::transmute(action), Modifiers::from_bits(mods).unwrap()));\nwindow_callback!(fn char_callback(character: c_uint) => Char(::std::char::from_u32(character).unwrap()));\n<|endoftext|>"} {"text":"<commit_before><commit_msg>removed commented code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>missing file<commit_after>use ffi::*;\nuse libc::{self, int32_t};\nuse std::ffi::CStr;\nuse petgraph;\nuse daggy::{Dag,EdgeIndex,NodeIndex};\n\n\/\/pub struct Graph{}\npub struct Node {}\npub type Graph = Dag<Node,EdgeKind>;\n\npub fn print_to_stdout(c: *mut Context, g: &Graph) -> bool {\n true\n}\n\npub fn create(context: *mut Context,\n max_edges: u32,\n max_nodes: u32,\n max_info_bytes: u32,\n growth_factor: f32)\n -> Graph {\n Graph::with_capacity(max_nodes as usize, max_edges as usize)\n}\n\npub fn edge_create(c: *mut Context,\n g: &mut Graph,\n from: i32,\n to: i32,\n kind: EdgeKind) -> i32 {\n \/\/edges are nodeindex, not just u32\n \/\/FIXME: error management. We should return something else than i32\n \/\/ we should also get index as U32 instead of i32\n g.add_edge(NodeIndex::new(from as usize), NodeIndex::new(to as usize), kind).unwrap_or(EdgeIndex::new(0usize)).index() as i32\n}\n\npub fn node_create_decoder(c: *mut Context,\n g: &mut Graph,\n prev_node: i32,\n placeholder_id: i32) -> i32 {\n 0\n}\n\npub fn node_create_canvas(c: *mut Context,\n g: &mut Graph,\n prev_node: i32,\n format: PixelFormat,\n width: usize,\n height: usize,\n bgcolor: u32)\n -> i32 {\n 0\n}\n\npub fn node_create_scale(c: *mut Context,\n g: &mut Graph,\n prev_node: i32,\n width: usize,\n height: usize,\n downscale_filter: i32,\n upscale_filter: i32,\n flags: usize,\n sharpen: f32)\n -> i32 {\n 0\n}\n\npub fn node_create_expand_canvas(c: *mut Context,\n g: &mut Graph,\n prev_node: i32,\n left: u32,\n top: u32,\n right: u32,\n bottom: u32,\n canvas_color_srgb: u32)\n -> i32 {\n 0\n}\n\npub fn node_create_fill_rect(c: *mut Context,\n g: &mut Graph,\n prev_node: i32,\n x1: u32,\n y1: u32,\n x2: u32,\n y2: u32,\n color_srgb: u32)\n -> i32 { 0 }\n\npub fn node_create_rotate_90(c: *mut Context, g: &mut Graph, prev_node: i32) -> i32 { 0 }\npub fn node_create_rotate_180(c: *mut Context, g: &mut Graph, prev_node: i32) -> i32 { 0 }\npub fn node_create_rotate_270(c: *mut Context, g: &mut Graph, prev_node: i32) -> i32 { 0 }\n\npub fn node_create_transpose(c: *mut Context, g: &mut Graph, prev_node: i32) -> i32 { 0 }\n\npub fn node_create_primitive_copy_rect_to_canvas(c: *mut Context,\n g: &mut Graph,\n prev_node: i32,\n from_x: u32,\n from_y: u32,\n width: u32,\n height: u32,\n x: u32,\n y: u32)\n -> i32 { 0 }\n\npub fn node_create_encoder(c: *mut Context,\n g: &mut Graph,\n prev_node: i32,\n placeholder_id: i32,\n desired_encoder_id: i64,\n hints: *const EncoderHints)\n -> i32 { 0 }\n\npub fn node_create_primitive_flip_vertical(c: *mut Context,\n g: &mut Graph,\n prev_node: i32)\n -> i32 { 0 }\n\npub fn node_create_primitive_flip_horizontal(c: *mut Context,\n g: &mut Graph,\n prev_node: i32)\n -> i32 { 0 }\n\npub fn node_create_primitive_crop(c: *mut Context,\n g: &mut Graph,\n prev_node: i32,\n x1: u32,\n y1: u32,\n x2: u32,\n y2: u32)\n -> i32 { 0 }\n\nextern \"C\" {\n\/*\n fn flow_graph_print_to_stdout(c: *mut Context, g: *const Graph) -> bool;\n\n fn flow_graph_create(context: *mut Context,\n max_edges: u32,\n max_nodes: u32,\n max_info_bytes: u32,\n growth_factor: f32)\n -> *mut Graph;\n\n\n fn flow_edge_create(c: *mut Context,\n g: *mut *mut Graph,\n from: i32,\n to: i32,\n kind: EdgeKind)\n -> i32;\n fn flow_node_create_decoder(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32,\n placeholder_id: i32)\n -> i32;\n fn flow_node_create_canvas(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32,\n format: PixelFormat,\n width: usize,\n height: usize,\n bgcolor: u32)\n -> i32;\n\n fn flow_node_create_scale(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32,\n width: usize,\n height: usize,\n downscale_filter: i32,\n upscale_filter: i32,\n flags: usize,\n sharpen: f32)\n -> i32;\n\n fn flow_node_create_expand_canvas(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32,\n left: u32,\n top: u32,\n right: u32,\n bottom: u32,\n canvas_color_srgb: u32)\n -> i32;\n\n fn flow_node_create_fill_rect(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32,\n x1: u32,\n y1: u32,\n x2: u32,\n y2: u32,\n color_srgb: u32)\n -> i32;\n\n fn flow_node_create_rotate_90(c: *mut Context, g: *mut *mut Graph, prev_node: i32) -> i32;\n fn flow_node_create_rotate_180(c: *mut Context, g: *mut *mut Graph, prev_node: i32) -> i32;\n fn flow_node_create_rotate_270(c: *mut Context, g: *mut *mut Graph, prev_node: i32) -> i32;\n\n fn flow_node_create_transpose(c: *mut Context, g: *mut *mut Graph, prev_node: i32) -> i32;\n\n fn flow_node_create_primitive_copy_rect_to_canvas(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32,\n from_x: u32,\n from_y: u32,\n width: u32,\n height: u32,\n x: u32,\n y: u32)\n -> i32;\n\n fn flow_node_create_encoder(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32,\n placeholder_id: i32,\n desired_encoder_id: i64,\n hints: *const EncoderHints)\n -> i32;\n\n fn flow_node_create_primitive_flip_vertical(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32)\n -> i32;\n\n fn flow_node_create_primitive_flip_horizontal(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32)\n -> i32;\n\n fn flow_node_create_primitive_crop(c: *mut Context,\n g: *mut *mut Graph,\n prev_node: i32,\n x1: u32,\n y1: u32,\n x2: u32,\n y2: u32)\n -> i32;\n*\/\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>dynamically generate example assets with cfg! platform switch<commit_after><|endoftext|>"} {"text":"<commit_before>use std::env;\n\nuse serenity::{\n async_trait,\n model::{channel::Message, gateway::Ready},\n prelude::*,\n};\n\nstruct Handler;\n\n#[async_trait]\nimpl EventHandler for Handler {\n async fn message(&self, ctx: Context, msg: Message) {\n if msg.content == \"!hello\" {\n \/\/ The create message builder allows you to easily create embeds and messages\n \/\/ using a builder syntax.\n \/\/ This example will create a message that says \"Hello, World!\", with an embed that has\n \/\/ a title, description, an image, three fields, and a footer.\n let msg = msg\n .channel_id\n .send_message(&ctx.http, |m| {\n m.content(\"Hello, World!\")\n .embed(|e| {\n e.title(\"This is a title\")\n .description(\"This is a description\")\n .image(\"attachment:\/\/ferris_eyes.png\")\n .fields(vec![\n (\"This is the first field\", \"This is a field body\", true),\n (\"This is the second field\", \"Both fields are inline\", true),\n ])\n .field(\"This is the third field\", \"This is not an inline field\", false)\n .footer(|f| f.text(\"This is a footer\"))\n \/\/ Add a timestamp for the current time\n \/\/ This also accepts a rfc3339 Timestamp\n .timestamp(chrono::Utc::now())\n })\n .add_file(\".\/ferris_eyes.png\")\n })\n .await;\n\n if let Err(why) = msg {\n println!(\"Error sending message: {:?}\", why);\n }\n }\n }\n\n async fn ready(&self, _: Context, ready: Ready) {\n println!(\"{} is connected!\", ready.user.name);\n }\n}\n\n#[tokio::main]\nasync fn main() {\n \/\/ Configure the client with your Discord bot token in the environment.\n let token = env::var(\"DISCORD_TOKEN\").expect(\"Expected a token in the environment\");\n let mut client =\n Client::builder(&token).event_handler(Handler).await.expect(\"Err creating client\");\n\n if let Err(why) = client.start().await {\n println!(\"Client error: {:?}\", why);\n }\n}\n<commit_msg>Fix the `e09_create_message_builder` example when the `time` feature is enabled (#1693)<commit_after>use std::env;\n\nuse serenity::{\n async_trait,\n model::Timestamp,\n model::{channel::Message, gateway::Ready},\n prelude::*,\n};\n\nstruct Handler;\n\n#[async_trait]\nimpl EventHandler for Handler {\n async fn message(&self, ctx: Context, msg: Message) {\n if msg.content == \"!hello\" {\n \/\/ The create message builder allows you to easily create embeds and messages\n \/\/ using a builder syntax.\n \/\/ This example will create a message that says \"Hello, World!\", with an embed that has\n \/\/ a title, description, an image, three fields, and a footer.\n let msg = msg\n .channel_id\n .send_message(&ctx.http, |m| {\n m.content(\"Hello, World!\")\n .embed(|e| {\n e.title(\"This is a title\")\n .description(\"This is a description\")\n .image(\"attachment:\/\/ferris_eyes.png\")\n .fields(vec![\n (\"This is the first field\", \"This is a field body\", true),\n (\"This is the second field\", \"Both fields are inline\", true),\n ])\n .field(\"This is the third field\", \"This is not an inline field\", false)\n .footer(|f| f.text(\"This is a footer\"))\n \/\/ Add a timestamp for the current time\n \/\/ This also accepts a rfc3339 Timestamp\n .timestamp(Timestamp::now())\n })\n .add_file(\".\/ferris_eyes.png\")\n })\n .await;\n\n if let Err(why) = msg {\n println!(\"Error sending message: {:?}\", why);\n }\n }\n }\n\n async fn ready(&self, _: Context, ready: Ready) {\n println!(\"{} is connected!\", ready.user.name);\n }\n}\n\n#[tokio::main]\nasync fn main() {\n \/\/ Configure the client with your Discord bot token in the environment.\n let token = env::var(\"DISCORD_TOKEN\").expect(\"Expected a token in the environment\");\n let mut client =\n Client::builder(&token).event_handler(Handler).await.expect(\"Err creating client\");\n\n if let Err(why) = client.start().await {\n println!(\"Client error: {:?}\", why);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};\nuse gdk_pixbuf::{Colorspace, Pixbuf};\n\nuse librsvg::surface_utils::shared_surface::SharedImageSurface;\n\nfn bench_surface_from_pixbuf(c: &mut Criterion) {\n let mut group = c.benchmark_group(\"surface_from_pixbuf\");\n\n for input in [false, true].iter() {\n group.bench_with_input(\n BenchmarkId::from_parameter(format!(\"{:?}\", input)),\n input,\n |b, alpha| {\n let pixbuf = Pixbuf::new(Colorspace::Rgb, *alpha, 8, 256, 256).unwrap();\n\n \/\/ Fill the surface with interesting data\n for y in 0..pixbuf.get_width() {\n for x in 0..pixbuf.get_height() {\n pixbuf.put_pixel(x, y, x as u8, y as u8, x.max(y) as u8, 0xff);\n }\n }\n\n b.iter(|| SharedImageSurface::from_pixbuf(&pixbuf, None, None).unwrap())\n },\n );\n }\n}\n\ncriterion_group!(benches, bench_surface_from_pixbuf);\ncriterion_main!(benches);\n<commit_msg>Fix the surface_from_pixbuf benchmark<commit_after>use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};\nuse gdk_pixbuf::{Colorspace, Pixbuf};\n\nuse librsvg::surface_utils::shared_surface::SharedImageSurface;\n\nfn bench_surface_from_pixbuf(c: &mut Criterion) {\n let mut group = c.benchmark_group(\"surface_from_pixbuf\");\n\n for input in [false, true].iter() {\n group.bench_with_input(\n BenchmarkId::from_parameter(format!(\"{:?}\", input)),\n input,\n |b, alpha| {\n let pixbuf = Pixbuf::new(Colorspace::Rgb, *alpha, 8, 256, 256).unwrap();\n\n \/\/ Fill the surface with interesting data\n for y in 0..pixbuf.get_width() {\n for x in 0..pixbuf.get_height() {\n pixbuf.put_pixel(\n x as u32,\n y as u32,\n x as u8,\n y as u8,\n x.max(y) as u8,\n 0xff,\n );\n }\n }\n\n b.iter(|| SharedImageSurface::from_pixbuf(&pixbuf, None, None).unwrap())\n },\n );\n }\n}\n\ncriterion_group!(benches, bench_surface_from_pixbuf);\ncriterion_main!(benches);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Operation from_str done<commit_after><|endoftext|>"} {"text":"<commit_before>use std::{io, fs};\n\nmacro_rules! readln {\n () => {\n {\n let mut line = String::new();\n match io::stdin().read_line(&mut line) {\n Ok(n) => Some(line.trim().to_string()),\n Err(e) => None\n }\n }\n };\n}\n\nfn console_title(title: &str){\n\n}\n\n#[no_mangle]\npub fn main() {\n console_title(\"Test\");\n\n println!(\"Type help for a command list\");\n while let Some(line) = readln!() {\n let args: Vec<String> = line.split(' ').map(|arg| arg.to_string()).collect();\n\n if let Some(command) = args.get(0) {\n println!(\"# {}\", line);\n\n match &command[..]\n {\n \"panic\" => panic!(\"Test panic\"),\n \"ls\" => {\n \/\/ TODO: when libredox is completed\n \/\/fs::read_dir(\"\/\").unwrap().map(|dir| println!(\"{}\", dir));\n }\n _ => println!(\"Commands: panic\"),\n }\n }\n }\n}\n<commit_msg>Use an array to label all commands and add `ptr_write` command<commit_after>#![feature(alloc)]\n#![feature(core)]\n\nextern crate alloc;\nextern crate core;\n\nuse alloc::boxed::Box;\nuse std::{io, fs, rand};\nuse core::ptr;\n\nmacro_rules! readln {\n () => {\n {\n let mut line = String::new();\n match io::stdin().read_line(&mut line) {\n Ok(n) => Some(line.trim().to_string()),\n Err(e) => None\n }\n }\n };\n}\n\nfn console_title(title: &str){\n\n}\n\n#[no_mangle]\npub fn main() {\n console_title(\"Test\");\n\n println!(\"Type help for a command list\");\n while let Some(line) = readln!() {\n let args: Vec<String> = line.split(' ').map(|arg| arg.to_string()).collect();\n\n if let Some(command) = args.get(0) {\n println!(\"# {}\", line);\n let console_commands = [\"panic\", \"ls\", \"ptr_write\"];\n\n match &command[..]\n {\n command if command == console_commands[0] => panic!(\"Test panic\"),\n command if command == console_commands[1] => {\n \/\/ TODO: import std::fs functions into libredox\n \/\/fs::read_dir(\"\/\").unwrap().map(|dir| println!(\"{}\", dir));\n }\n command if command == console_commands[2] => {\n let a_ptr = rand() as *mut u8;\n \/\/ TODO: import Box::{from_raw, to_raw} methods in libredox\n \/\/let mut a_box = Box::new(rand() as u8);\n unsafe {\n ptr::write(a_ptr, rand() as u8);\n \/\/ptr::write(a_box.to_raw(), rand() as u8);\n }\n }\n _ => println!(\"Commands: {}\", console_commands.join(\" \")),\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Unused error variant removed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add missing file<commit_after>use std::sync::{Arc};\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse mioco::{CoroutineControl, Handler, Scheduler, SchedulerThread};\nuse mioco::mio::{EventLoop};\n\n\npub struct BalancingScheduler {\n\tthread_counter: AtomicUsize,\n\tlength_refs: Arc<Vec<AtomicUsize>>,\n}\n\nimpl BalancingScheduler {\n\tpub fn new(num_threads: usize) -> BalancingScheduler {\n\t\tlet mut length_refs = Vec::new();\n\t\tfor _ in 0..num_threads { length_refs.push(AtomicUsize::new(0)); }\n\n\t\tBalancingScheduler {\n\t\t\tthread_counter: AtomicUsize::new(0),\n\t\t\tlength_refs: Arc::new(length_refs),\n\t\t}\n\t}\n}\n\nunsafe impl Send for BalancingScheduler {}\nunsafe impl Sync for BalancingScheduler {}\n\nimpl Scheduler for BalancingScheduler {\n\tfn spawn_thread(&self) -> Box<SchedulerThread> {\n\t\tlet thread_id = self.thread_counter.fetch_add(1, Ordering::Relaxed);\n\n\t\tlet thread = Box::new(BalancingSchedulerThread {\n\t\t\tcounter: 0,\n\t\t\tthread_id: thread_id,\n\t\t\tq: Vec::new(),\n\t\t\tlength_refs: self.length_refs.clone(),\n\t\t});\n\n\t\tthread\n\t}\n}\n\nstruct BalancingSchedulerThread {\n\tcounter: usize,\n\tthread_id: usize,\n\tq: Vec<CoroutineControl>,\n\tlength_refs: Arc<Vec<AtomicUsize>>,\n}\n\nimpl SchedulerThread for BalancingSchedulerThread {\n\tfn spawned(&mut self, event_loop: &mut EventLoop<Handler>, coroutine_ctrl: CoroutineControl) {\n\t\tlet (thread_id, min) = self.length_refs.iter().enumerate().fold(\n\t\t\t(0, usize::max_value()),\n\t\t\t|(min_id, min), (thread_id, length)| {\n\t\t\t\tlet len = length.load(Ordering::Relaxed);\n\n\t\t\t\tif len < min { (thread_id, len) }\n\t\t\t\telse { (min_id, min) }\n\t\t\t}\n\t\t);\n\n\t\tlet thread_id = if min == 0 {\n\t\t\tself.counter = (self.counter + 1) % self.length_refs.len();\n\t\t\tself.counter\n\t\t} else {\n\t\t\tthread_id\n\t\t};\n\n\t\tif thread_id == self.thread_id { coroutine_ctrl.resume(event_loop); }\n\t\telse { coroutine_ctrl.migrate(event_loop, thread_id); }\n\t}\n\n\tfn ready(&mut self, event_loop: &mut EventLoop<Handler>, coroutine_ctrl: CoroutineControl) {\n\t\tif coroutine_ctrl.is_yielding() {\n\t\t\tself.q.push(coroutine_ctrl);\n\t\t\tself.length_refs[self.thread_id].fetch_add(1, Ordering::Relaxed);\n\t\t} else {\n\/*\t\t\tself.counter = (self.counter + 1) % self.length_refs.len();\n\t\t\tif self.counter == self.thread_id { coroutine_ctrl.resume(event_loop); }\n\t\t\telse { coroutine_ctrl.migrate(event_loop, self.counter); }\n*\/\t\t\tcoroutine_ctrl.resume(event_loop);\n\t\t}\n\t}\n\n\tfn tick(&mut self, event_loop: &mut EventLoop<Handler>) {\n\t\twhile let Some(coroutine_ctrl) = self.q.pop() {\n\t\t\tcoroutine_ctrl.resume(event_loop); \/\/ or migrate them here\n\t\t\tself.length_refs[self.thread_id].fetch_sub(1, Ordering::Relaxed);\n\t\t}\n\t}\n\n\tfn timeout(&mut self) -> Option<u64> { None }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fixing tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>incr.comp.: Add test case for symbol visibility changes<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ revisions: rpass1 rpass2\n\n#![feature(rustc_attrs)]\n#![allow(private_no_mangle_fns)]\n\n#![rustc_partition_reused(module=\"change_symbol_export_status\", cfg=\"rpass2\")]\n#![rustc_partition_translated(module=\"change_symbol_export_status-mod1\", cfg=\"rpass2\")]\n\n\n\/\/ This test case makes sure that a change in symbol visibility is detected by\n\/\/ our dependency tracking. We do this by changing a module's visibility to\n\/\/ `private` in rpass2, causing the contained function to go from `default` to\n\/\/ `hidden` visibility.\n\/\/ The function is marked with #[no_mangle] so it is considered for exporting\n\/\/ even from an executable. Plain Rust functions are only exported from Rust\n\/\/ libraries, which our test infrastructure does not support.\n\n#[cfg(rpass1)]\npub mod mod1 {\n #[no_mangle]\n pub fn foo() {}\n}\n\n#[cfg(rpass2)]\nmod mod1 {\n #[no_mangle]\n pub fn foo() {}\n}\n\nfn main() {\n mod1::foo();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make the number of pregenerated dice rolls public<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>remove PmError<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Always inline DepFn::call<commit_after><|endoftext|>"} {"text":"<commit_before>use std::fmt;\nuse std::io;\n\/\/use std::net::SocketAddr;\n\nuse futures::{Future, Poll, Async};\nuse tokio_io::{AsyncRead, AsyncWrite};\nuse tokio::reactor::Handle;\nuse tokio::net::{TcpStream, TcpStreamNew};\nuse tokio_service::Service;\nuse Uri;\n\nuse super::dns;\n\n\/\/\/ A connector creates an Io to a remote address..\n\/\/\/\n\/\/\/ This trait is not implemented directly, and only exists to make\n\/\/\/ the intent clearer. A connector should implement `Service` with\n\/\/\/ `Request=Uri` and `Response: Io` instead.\npub trait Connect: Service<Request=Uri, Error=io::Error> + 'static {\n \/\/\/ The connected Io Stream.\n type Output: AsyncRead + AsyncWrite + 'static;\n \/\/\/ A Future that will resolve to the connected Stream.\n type Future: Future<Item=Self::Output, Error=io::Error> + 'static;\n \/\/\/ Connect to a remote address.\n fn connect(&self, Uri) -> <Self as Connect>::Future;\n}\n\nimpl<T> Connect for T\nwhere T: Service<Request=Uri, Error=io::Error> + 'static,\n T::Response: AsyncRead + AsyncWrite,\n T::Future: Future<Error=io::Error>,\n{\n type Output = T::Response;\n type Future = T::Future;\n\n fn connect(&self, url: Uri) -> <Self as Connect>::Future {\n self.call(url)\n }\n}\n\n\/\/\/ A connector for the `http` scheme.\n#[derive(Clone)]\npub struct HttpConnector {\n dns: dns::Dns,\n handle: Handle,\n}\n\nimpl HttpConnector {\n\n \/\/\/ Construct a new HttpConnector.\n \/\/\/\n \/\/\/ Takes number of DNS worker threads.\n pub fn new(threads: usize, handle: &Handle) -> HttpConnector {\n HttpConnector {\n dns: dns::Dns::new(threads),\n handle: handle.clone(),\n }\n }\n}\n\nimpl fmt::Debug for HttpConnector {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"HttpConnector\")\n .finish()\n }\n}\n\nimpl Service for HttpConnector {\n type Request = Uri;\n type Response = TcpStream;\n type Error = io::Error;\n type Future = HttpConnecting;\n\n fn call(&self, url: Uri) -> Self::Future {\n debug!(\"Http::connect({:?})\", url);\n let host = match url.host() {\n Some(s) => s,\n None => return HttpConnecting {\n state: State::Error(Some(io::Error::new(io::ErrorKind::InvalidInput, \"invalid url\"))),\n handle: self.handle.clone(),\n },\n };\n let port = url.port().unwrap_or(80);\n\n HttpConnecting {\n state: State::Resolving(self.dns.resolve(host.into(), port)),\n handle: self.handle.clone(),\n }\n }\n\n}\n\n\/\/\/ A Future representing work to connect to a URL.\npub struct HttpConnecting {\n state: State,\n handle: Handle,\n}\n\nenum State {\n Resolving(dns::Query),\n Connecting(ConnectingTcp),\n Error(Option<io::Error>),\n}\n\nimpl Future for HttpConnecting {\n type Item = TcpStream;\n type Error = io::Error;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n loop {\n let state;\n match self.state {\n State::Resolving(ref mut query) => {\n match try!(query.poll()) {\n Async::NotReady => return Ok(Async::NotReady),\n Async::Ready(addrs) => {\n state = State::Connecting(ConnectingTcp {\n addrs: addrs,\n current: None,\n })\n }\n };\n },\n State::Connecting(ref mut c) => return c.poll(&self.handle).map_err(From::from),\n State::Error(ref mut e) => return Err(e.take().expect(\"polled more than once\")),\n }\n self.state = state;\n }\n }\n}\n\nimpl fmt::Debug for HttpConnecting {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.pad(\"HttpConnecting\")\n }\n}\n\nstruct ConnectingTcp {\n addrs: dns::IpAddrs,\n current: Option<TcpStreamNew>,\n}\n\nimpl ConnectingTcp {\n \/\/ not a Future, since passing a &Handle to poll\n fn poll(&mut self, handle: &Handle) -> Poll<TcpStream, io::Error> {\n let mut err = None;\n loop {\n if let Some(ref mut current) = self.current {\n match current.poll() {\n Ok(ok) => return Ok(ok),\n Err(e) => {\n trace!(\"connect error {:?}\", e);\n err = Some(e);\n if let Some(addr) = self.addrs.next() {\n debug!(\"connecting to {:?}\", addr);\n *current = TcpStream::connect(&addr, handle);\n continue;\n }\n }\n }\n } else if let Some(addr) = self.addrs.next() {\n debug!(\"connecting to {:?}\", addr);\n self.current = Some(TcpStream::connect(&addr, handle));\n continue;\n }\n\n return Err(err.take().expect(\"missing connect error\"));\n }\n }\n}\n\n\/*\nimpl<S: SslClient> HttpsConnector<S> {\n \/\/\/ Create a new connector using the provided SSL implementation.\n pub fn new(s: S) -> HttpsConnector<S> {\n HttpsConnector {\n http: HttpConnector::default(),\n ssl: s,\n }\n }\n}\n*\/\n\n#[cfg(test)]\nmod tests {\n use std::io;\n use tokio::reactor::Core;\n use super::{Connect, HttpConnector};\n\n #[test]\n fn test_non_http_url() {\n let mut core = Core::new().unwrap();\n let url = \"\/foo\/bar?baz\".parse().unwrap();\n let connector = HttpConnector::new(1, &core.handle());\n\n assert_eq!(core.run(connector.connect(url)).unwrap_err().kind(), io::ErrorKind::InvalidInput);\n }\n\n}\n<commit_msg>fix(client): get default port for https with Uri<commit_after>use std::fmt;\nuse std::io;\n\/\/use std::net::SocketAddr;\n\nuse futures::{Future, Poll, Async};\nuse tokio_io::{AsyncRead, AsyncWrite};\nuse tokio::reactor::Handle;\nuse tokio::net::{TcpStream, TcpStreamNew};\nuse tokio_service::Service;\nuse Uri;\n\nuse super::dns;\n\n\/\/\/ A connector creates an Io to a remote address..\n\/\/\/\n\/\/\/ This trait is not implemented directly, and only exists to make\n\/\/\/ the intent clearer. A connector should implement `Service` with\n\/\/\/ `Request=Uri` and `Response: Io` instead.\npub trait Connect: Service<Request=Uri, Error=io::Error> + 'static {\n \/\/\/ The connected Io Stream.\n type Output: AsyncRead + AsyncWrite + 'static;\n \/\/\/ A Future that will resolve to the connected Stream.\n type Future: Future<Item=Self::Output, Error=io::Error> + 'static;\n \/\/\/ Connect to a remote address.\n fn connect(&self, Uri) -> <Self as Connect>::Future;\n}\n\nimpl<T> Connect for T\nwhere T: Service<Request=Uri, Error=io::Error> + 'static,\n T::Response: AsyncRead + AsyncWrite,\n T::Future: Future<Error=io::Error>,\n{\n type Output = T::Response;\n type Future = T::Future;\n\n fn connect(&self, url: Uri) -> <Self as Connect>::Future {\n self.call(url)\n }\n}\n\n\/\/\/ A connector for the `http` scheme.\n#[derive(Clone)]\npub struct HttpConnector {\n dns: dns::Dns,\n handle: Handle,\n}\n\nimpl HttpConnector {\n\n \/\/\/ Construct a new HttpConnector.\n \/\/\/\n \/\/\/ Takes number of DNS worker threads.\n pub fn new(threads: usize, handle: &Handle) -> HttpConnector {\n HttpConnector {\n dns: dns::Dns::new(threads),\n handle: handle.clone(),\n }\n }\n}\n\nimpl fmt::Debug for HttpConnector {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"HttpConnector\")\n .finish()\n }\n}\n\nimpl Service for HttpConnector {\n type Request = Uri;\n type Response = TcpStream;\n type Error = io::Error;\n type Future = HttpConnecting;\n\n fn call(&self, uri: Uri) -> Self::Future {\n debug!(\"Http::connect({:?})\", uri);\n let host = match uri.host() {\n Some(s) => s,\n None => return HttpConnecting {\n state: State::Error(Some(io::Error::new(io::ErrorKind::InvalidInput, \"invalid url\"))),\n handle: self.handle.clone(),\n },\n };\n let port = match uri.port() {\n Some(port) => port,\n None => match uri.scheme() {\n Some(\"http\") => 80,\n Some(\"https\") => 443,\n _ => 80,\n },\n };\n\n HttpConnecting {\n state: State::Resolving(self.dns.resolve(host.into(), port)),\n handle: self.handle.clone(),\n }\n }\n\n}\n\n\/\/\/ A Future representing work to connect to a URL.\npub struct HttpConnecting {\n state: State,\n handle: Handle,\n}\n\nenum State {\n Resolving(dns::Query),\n Connecting(ConnectingTcp),\n Error(Option<io::Error>),\n}\n\nimpl Future for HttpConnecting {\n type Item = TcpStream;\n type Error = io::Error;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n loop {\n let state;\n match self.state {\n State::Resolving(ref mut query) => {\n match try!(query.poll()) {\n Async::NotReady => return Ok(Async::NotReady),\n Async::Ready(addrs) => {\n state = State::Connecting(ConnectingTcp {\n addrs: addrs,\n current: None,\n })\n }\n };\n },\n State::Connecting(ref mut c) => return c.poll(&self.handle).map_err(From::from),\n State::Error(ref mut e) => return Err(e.take().expect(\"polled more than once\")),\n }\n self.state = state;\n }\n }\n}\n\nimpl fmt::Debug for HttpConnecting {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.pad(\"HttpConnecting\")\n }\n}\n\nstruct ConnectingTcp {\n addrs: dns::IpAddrs,\n current: Option<TcpStreamNew>,\n}\n\nimpl ConnectingTcp {\n \/\/ not a Future, since passing a &Handle to poll\n fn poll(&mut self, handle: &Handle) -> Poll<TcpStream, io::Error> {\n let mut err = None;\n loop {\n if let Some(ref mut current) = self.current {\n match current.poll() {\n Ok(ok) => return Ok(ok),\n Err(e) => {\n trace!(\"connect error {:?}\", e);\n err = Some(e);\n if let Some(addr) = self.addrs.next() {\n debug!(\"connecting to {:?}\", addr);\n *current = TcpStream::connect(&addr, handle);\n continue;\n }\n }\n }\n } else if let Some(addr) = self.addrs.next() {\n debug!(\"connecting to {:?}\", addr);\n self.current = Some(TcpStream::connect(&addr, handle));\n continue;\n }\n\n return Err(err.take().expect(\"missing connect error\"));\n }\n }\n}\n\n\/*\nimpl<S: SslClient> HttpsConnector<S> {\n \/\/\/ Create a new connector using the provided SSL implementation.\n pub fn new(s: S) -> HttpsConnector<S> {\n HttpsConnector {\n http: HttpConnector::default(),\n ssl: s,\n }\n }\n}\n*\/\n\n#[cfg(test)]\nmod tests {\n use std::io;\n use tokio::reactor::Core;\n use super::{Connect, HttpConnector};\n\n #[test]\n fn test_non_http_url() {\n let mut core = Core::new().unwrap();\n let url = \"\/foo\/bar?baz\".parse().unwrap();\n let connector = HttpConnector::new(1, &core.handle());\n\n assert_eq!(core.run(connector.connect(url)).unwrap_err().kind(), io::ErrorKind::InvalidInput);\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Created the pwd builtin<commit_after>use rsh::State;\n\npub fn pwd(s: &mut State) -> i32 {\n println!(\"{}\", s.cwd.display());\n\n 0\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix ABI storing in AST.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added todo comment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Tweak values of default simulation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove default constructor from RethinkDb<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Cosmetic changes.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>else-if conditional added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add example.<commit_after>extern crate hlc;\nuse hlc::Clock;\nuse std::thread::sleep;\nuse std::time::Duration;\n\nfn main() {\n let mut clock_a = Clock::wall();\n let mut clock_b = Clock::wall();\n\n let b0 = clock_b.on_send();\n let a0 = clock_a.on_send();\n let a1 = clock_a.on_recv(&b0);\n\n println!(\"a0: {:?}\", a0);\n println!(\"b0: {:?}\", b0);\n println!(\"recv {:?} -> {:?}\", b0, a1);\n}\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse std::process::exit;\n\nuse libimagdiary::diary::Diary;\nuse libimagdiary::error::DiaryError as DE;\nuse libimagdiary::error::DiaryErrorKind as DEK;\nuse libimagentrylist::listers::core::CoreLister;\nuse libimagentrylist::lister::Lister;\nuse libimagrt::runtime::Runtime;\nuse libimagstore::store::Entry;\nuse libimagstore::storeid::StoreId;\nuse libimagerror::trace::trace_error;\n\nuse util::get_diary_name;\n\npub fn list(rt: &Runtime) {\n let diaryname = get_diary_name(rt);\n if diaryname.is_none() {\n warn!(\"No diary selected. Use either the configuration file or the commandline option\");\n exit(1);\n }\n let diaryname = diaryname.unwrap();\n\n fn location_to_listing_string(id: &StoreId, base: &PathBuf) -> String {\n id.strip_prefix(base)\n .map_err(|e| trace_error(&e))\n .ok()\n .and_then(|p| p.to_str().map(String::from))\n .unwrap_or(String::from(\"<<Path Parsing Error>>\"))\n }\n\n let diary = Diary::open(rt.store(), &diaryname[..]);\n debug!(\"Diary opened: {:?}\", diary);\n diary.entries()\n .and_then(|es| {\n debug!(\"Iterator for listing: {:?}\", es);\n\n let es = es.filter_map(|a| {\n debug!(\"Filtering: {:?}\", a);\n a.ok()\n }).map(|e| e.into());\n\n let base = rt.store().path();\n\n CoreLister::new(&move |e: &Entry| location_to_listing_string(e.get_location(), base))\n .list(es) \/\/ TODO: Do not ignore non-ok()s\n .map_err(|e| DE::new(DEK::IOError, Some(Box::new(e))))\n })\n .map(|_| debug!(\"Ok\"))\n .map_err(|e| trace_error(&e))\n .ok();\n}\n\n<commit_msg>Fix imag-diary::list::* for new StoreId Interface<commit_after>use std::process::exit;\n\nuse libimagdiary::diary::Diary;\nuse libimagdiary::error::DiaryError as DE;\nuse libimagdiary::error::DiaryErrorKind as DEK;\nuse libimagentrylist::listers::core::CoreLister;\nuse libimagentrylist::lister::Lister;\nuse libimagrt::runtime::Runtime;\nuse libimagstore::store::Entry;\nuse libimagerror::trace::trace_error;\n\nuse util::get_diary_name;\n\npub fn list(rt: &Runtime) {\n let diaryname = get_diary_name(rt);\n if diaryname.is_none() {\n warn!(\"No diary selected. Use either the configuration file or the commandline option\");\n exit(1);\n }\n let diaryname = diaryname.unwrap();\n\n fn entry_to_location_listing_string(e: &Entry) -> String {\n e.get_location().clone()\n .without_base()\n .to_str()\n .map_err(|e| trace_error(&e))\n .unwrap_or(String::from(\"<<Path Parsing Error>>\"))\n }\n\n let diary = Diary::open(rt.store(), &diaryname[..]);\n debug!(\"Diary opened: {:?}\", diary);\n diary.entries()\n .and_then(|es| {\n debug!(\"Iterator for listing: {:?}\", es);\n\n let es = es.filter_map(|a| {\n debug!(\"Filtering: {:?}\", a);\n a.ok()\n }).map(|e| e.into());\n\n CoreLister::new(&entry_to_location_listing_string)\n .list(es) \/\/ TODO: Do not ignore non-ok()s\n .map_err(|e| DE::new(DEK::IOError, Some(Box::new(e))))\n })\n .map(|_| debug!(\"Ok\"))\n .map_err(|e| trace_error(&e))\n .ok();\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>command line work<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Revert \"Make Debug fancy again and handle #[cfg] properly.\"<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove iter from trait<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add pretty metadata in json<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adjust comments for ty::fun stuff<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added separate and expanded tests for vector monomials, including vector_mons_of_deg_le tests.<commit_after>use common::*;\nuse monomial::{Mon2d, Mon3d, Mon4d};\nuse vector_monomial::*;\n\n#[test]\nfn test_construction() {\n let x = Mon2d { exps: [Deg(1), Deg(0)] };\n let _: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(1), x);\n}\n\n#[test]\n#[should_fail]\nfn test_improper_construction() {\n let x = Mon2d { exps: [Deg(1), Deg(0)] };\n let _: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(3), x);\n}\n\n#[test]\nfn test_divergence() {\n let one = Mon2d { exps: [Deg(0), Deg(0)] };\n let x = Mon2d { exps: [Deg(1), Deg(0)] };\n let y = Mon2d { exps: [Deg(0), Deg(1)] };\n \n let x_dim0_vmon: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(0), x);\n assert_eq!(x_dim0_vmon.divergence_coef_and_mon(), (1.,one));\n \n let y_dim0_vmon: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(0), y);\n assert_eq!(y_dim0_vmon.divergence_coef_and_mon(), (0.,y));\n \n let y_dim1_vmon: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(1), y);\n assert_eq!(y_dim1_vmon.divergence_coef_and_mon(), (1.,one));\n \n let xy_dim0_vmon: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(0), x*y);\n assert_eq!(xy_dim0_vmon.divergence_coef_and_mon(), (1.,y));\n\n let x2y3_dim1_vmon: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(1), x*x*y*y*y);\n assert_eq!(x2y3_dim1_vmon.divergence_coef_and_mon(), (3., x*x*y*y));\n}\n\n#[test]\nfn test_vector_mons_of_deg_le_2d() {\n let one = Mon2d { exps: [Deg(0), Deg(0)] };\n let x = Mon2d { exps: [Deg(1), Deg(0)] };\n let y = Mon2d { exps: [Deg(0), Deg(1)] };\n let one_dim0: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(0), one);\n let y_dim0: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(0), y);\n let y2_dim0: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(0), y*y);\n let x_dim0: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(0), x);\n let xy_dim0: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(0), x*y);\n let x2_dim0: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(0), x*x);\n let one_dim1: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(1), one);\n let y_dim1: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(1), y);\n let y2_dim1: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(1), y*y);\n let x_dim1: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(1), x);\n let xy_dim1: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(1), x*y);\n let x2_dim1: VectorMonomial<Mon2d> = VectorMonomial::new(Dim(1), x*x);\n \n let vmons_deg0: ~[VectorMonomial<Mon2d>] = VectorMonomial::vector_mons_of_deg_le(Deg(0));\n assert_eq!(&vmons_deg0, &~[one_dim0, one_dim1]);\n\n let vmons_deg1: ~[VectorMonomial<Mon2d>] = VectorMonomial::vector_mons_of_deg_le(Deg(1));\n assert_eq!(&vmons_deg1,\n &~[one_dim0, y_dim0, x_dim0, \n one_dim1, y_dim1, x_dim1]);\n \n let vmons_deg2: ~[VectorMonomial<Mon2d>] = VectorMonomial::vector_mons_of_deg_le(Deg(2));\n assert_eq!(&vmons_deg2,\n &~[one_dim0, y_dim0, y2_dim0, x_dim0, xy_dim0, x2_dim0,\n one_dim1, y_dim1, y2_dim1, x_dim1, xy_dim1, x2_dim1]);\n}\n\n#[test]\nfn test_vector_mons_of_deg_le_3d() {\n let one = Mon3d { exps: [Deg(0), Deg(0), Deg(0)] };\n let x = Mon3d { exps: [Deg(1), Deg(0), Deg(0)] };\n let y = Mon3d { exps: [Deg(0), Deg(1), Deg(0)] };\n let z = Mon3d { exps: [Deg(0), Deg(0), Deg(1)] };\n let one_dim0: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(0), one);\n let z_dim0: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(0), z);\n let y_dim0: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(0), y);\n let x_dim0: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(0), x);\n let one_dim1: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(1), one);\n let z_dim1: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(1), z);\n let y_dim1: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(1), y);\n let x_dim1: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(1), x);\n let one_dim2: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(2), one);\n let z_dim2: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(2), z);\n let y_dim2: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(2), y);\n let x_dim2: VectorMonomial<Mon3d> = VectorMonomial::new(Dim(2), x);\n \n let vmons_deg0: ~[VectorMonomial<Mon3d>] = VectorMonomial::vector_mons_of_deg_le(Deg(0));\n assert_eq!(&vmons_deg0, &~[one_dim0, one_dim1, one_dim2]);\n\n let vmons_deg1: ~[VectorMonomial<Mon3d>] = VectorMonomial::vector_mons_of_deg_le(Deg(1));\n assert_eq!(&vmons_deg1,\n &~[one_dim0, z_dim0, y_dim0, x_dim0, \n one_dim1, z_dim1, y_dim1, x_dim1,\n one_dim2, z_dim2, y_dim2, x_dim2]);\n}\n\n#[test]\nfn test_vector_mons_of_deg_le_4d() {\n let one = Mon4d { exps: [Deg(0), Deg(0), Deg(0), Deg(0)] };\n let x = Mon4d { exps: [Deg(1), Deg(0), Deg(0), Deg(0)] };\n let y = Mon4d { exps: [Deg(0), Deg(1), Deg(0), Deg(0)] };\n let z = Mon4d { exps: [Deg(0), Deg(0), Deg(1), Deg(0)] };\n let t = Mon4d { exps: [Deg(0), Deg(0), Deg(0), Deg(1)] };\n let one_dim0: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(0), one);\n let t_dim0: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(0), t);\n let z_dim0: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(0), z);\n let y_dim0: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(0), y);\n let x_dim0: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(0), x);\n let one_dim1: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(1), one);\n let t_dim1: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(1), t);\n let z_dim1: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(1), z);\n let y_dim1: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(1), y);\n let x_dim1: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(1), x);\n let one_dim2: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(2), one);\n let t_dim2: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(2), t);\n let z_dim2: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(2), z);\n let y_dim2: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(2), y);\n let x_dim2: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(2), x);\n let one_dim3: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(3), one);\n let t_dim3: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(3), t);\n let z_dim3: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(3), z);\n let y_dim3: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(3), y);\n let x_dim3: VectorMonomial<Mon4d> = VectorMonomial::new(Dim(3), x);\n \n let vmons_deg0: ~[VectorMonomial<Mon4d>] = VectorMonomial::vector_mons_of_deg_le(Deg(0));\n assert_eq!(&vmons_deg0, &~[one_dim0, one_dim1, one_dim2, one_dim3]);\n\n let vmons_deg1: ~[VectorMonomial<Mon4d>] = VectorMonomial::vector_mons_of_deg_le(Deg(1));\n assert_eq!(&vmons_deg1,\n &~[one_dim0, t_dim0, z_dim0, y_dim0, x_dim0, \n one_dim1, t_dim1, z_dim1, y_dim1, x_dim1,\n one_dim2, t_dim2, z_dim2, y_dim2, x_dim2,\n one_dim3, t_dim3, z_dim3, y_dim3, x_dim3]);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add basic tests for Matrix::try_inverse<commit_after>#[macro_use]\nextern crate approx;\n\nextern crate nalgebra as na;\n\nuse na::{Matrix1, Matrix2, Matrix3, Matrix5};\n\n#[test]\nfn matrix1_try_inverse() {\n let a = Matrix1::new(3.0);\n let a_inv = a.try_inverse().expect(\"Matrix is invertible\");\n\n assert_relative_eq!(a_inv, Matrix1::new(1.0 \/ 3.0));\n}\n\n#[test]\nfn matrix2_try_inverse() {\n let a = Matrix2::new( 5.0, -2.0,\n -10.0, 1.0);\n let expected_inverse = Matrix2::new(-0.2 \/ 3.0, -2.0 \/ 15.0,\n -2.0 \/ 3.0, -1.0 \/ 3.0);\n let a_inv = a.try_inverse()\n .expect(\"Matrix is invertible\");\n\n assert_relative_eq!(a_inv, expected_inverse);\n}\n\n#[test]\nfn matrix3_try_inverse() {\n let a = Matrix3::new(-3.0, 2.0, 0.0,\n -6.0, 9.0, -2.0,\n 9.0, -6.0, 4.0);\n let expected_inverse = Matrix3::new(-0.40, 0.4 \/ 3.0, 0.2 \/ 3.00,\n -0.10, 0.2, 0.10,\n 0.75, 0.0, 0.25);\n let a_inv = a.try_inverse()\n .expect(\"Matrix is invertible\");\n\n assert_relative_eq!(a_inv, expected_inverse);\n}\n\n#[test]\nfn matrix5_try_inverse() {\n \/\/ Dimension 5 is chosen so that the inversion\n \/\/ happens by Gaussian elimination\n \/\/ (at the time of writing dimensions <= 3 are implemented\n \/\/ as analytic formulas, but we choose 5 in the case that 4\n \/\/ also gets an analytic implementation)\n let a = Matrix5::new(-2.0, 0.0, 2.0, 5.0, -5.0,\n -6.0, 4.0, 4.0, 13.0, -15.0,\n 4.0, 16.0, -14.0, -19.0, 12.0,\n 12.0, 12.0, -22.0, -35.0, 34.0,\n -8.0, 4.0, 12.0, 27.0, -31.0);\n let expected_inverse = Matrix5::new(\n 3.9333e+00, -1.5667e+00, 2.6667e-01, 6.6667e-02, 3.0000e-01,\n -1.2033e+01, 3.9667e+00, -1.1167e+00, 2.8333e-01, -1.0000e-01,\n -1.8233e+01, 5.7667e+00, -1.5667e+00, 2.3333e-01, -2.0000e-01,\n -4.3333e+00, 1.6667e+00, -6.6667e-01, 3.3333e-01, -4.6950e-19,\n -1.3400e+01, 4.6000e+00, -1.4000e+00, 4.0000e-01, -2.0000e-01);\n let a_inv = a.try_inverse().expect(\"Matrix is invertible\");\n\n assert_relative_eq!(a_inv, expected_inverse, max_relative=1e-4);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #114 - achanda:is_nan, r=hauleth<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>reorganized control is working with avatar attack<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add ui test with pinned extern Rust type<commit_after>use std::marker::PhantomPinned;\n\n#[cxx::bridge]\nmod ffi {\n extern \"Rust\" {\n type Pinned;\n }\n}\n\npub struct Pinned {\n _pinned: PhantomPinned,\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a JSON bench<commit_after>\/\/#![feature(trace_macros)]\n#![feature(test)]\nextern crate test;\n\n#[macro_use]\nextern crate nom;\n\nuse nom::{digit, alphanumeric};\n\nuse test::Bencher;\n\nuse std::str::{self,FromStr};\nuse std::collections::HashMap;\n\n#[derive(Debug,PartialEq)]\npub enum JsonValue {\n Str(String),\n Num(f32),\n Array(Vec<JsonValue>),\n Object(HashMap<String,JsonValue>)\n}\n\n\/\/ FIXME: since we already parsed a serie of digits and dots,\n\/\/ we know it is correct UTF-8. no need to use from_utf8 to\n\/\/ verify it is correct\n\/\/ FIXME: use alt_complete (implement ws for alt_complete)\nnamed!(unsigned_float <f32>, map_res!(\n map_res!(\n recognize!(\n alt_complete!(\n delimited!(digit, tag!(\".\"), opt!(complete!(digit))) |\n delimited!(opt!(digit), tag!(\".\"), digit) |\n digit\n )\n ),\n str::from_utf8\n ),\n FromStr::from_str\n));\nnamed!(float<f32>, map!(\n pair!(\n opt!(alt!(tag!(\"+\") | tag!(\"-\"))),\n unsigned_float\n ),\n |(sign, value): (Option<&[u8]>, f32)| {\n sign.and_then(|s| if s[0] == ('-' as u8) { Some(-1f32) } else { None }).unwrap_or(1f32) * value\n }\n));\n\n\/\/FIXME: verify how json strings are formatted\nnamed!(string<&str>,\n delimited!(\n tag!(\"\\\"\"),\n map_res!(escaped!(call!(alphanumeric), '\\\\', is_a!(\"\\\"n\\\\\")), str::from_utf8),\n tag!(\"\\\"\")\n )\n);\n\nnamed!(array < Vec<JsonValue> >,\n ws!(\n delimited!(\n tag!(\"[\"),\n separated_list!(tag!(\",\"), value),\n tag!(\"]\")\n )\n )\n);\n\nnamed!(key_value<(&str,JsonValue)>,\n ws!(\n separated_pair!(\n string,\n tag!(\":\"),\n value\n )\n )\n);\n\nnamed!(hash< HashMap<String,JsonValue> >,\n ws!(\n map!(\n delimited!(\n tag!(\"{\"),\n separated_list!(tag!(\",\"), key_value),\n tag!(\"}\")\n ),\n |tuple_vec| {\n let mut h: HashMap<String, JsonValue> = HashMap::new();\n for (k, v) in tuple_vec {\n h.insert(String::from(k), v);\n }\n h\n }\n )\n )\n);\n\nnamed!(value<JsonValue>,\n ws!(\n alt!(\n hash => { |h| JsonValue::Object(h) } |\n array => { |v| JsonValue::Array(v) } |\n string => { |s| JsonValue::Str(String::from(s)) } |\n float => { |num| JsonValue::Num(num) }\n )\n )\n);\n\n#[bench]\nfn json_bench(b: &mut Bencher) {\n let data = &b\" { \\\"a\\\"\\t: 42,\n \\\"b\\\": [ \\\"x\\\", \\\"y\\\", 12 ] ,\n \\\"c\\\": { \\\"hello\\\" : \\\"world\\\" \n }\n } \";\n\n \/\/println!(\"data:\\n{:?}\", value(&data[..]));\n b.iter(||{\n value(&data[..])\n });\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>removed uncesesary comments of old code, modified the history comments in load_history and save_history, and made the cmd\/args line cleaner<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_type = \"bin\"]\n\nextern crate metrics;\n\nuse metrics::gauge;\nuse metrics::counter::StdCounter;\nuse metrics::counter::Counter;\n\n\nfn main() {\n \/\/ Create a metric w val 0\n let mut g1: gauge::Gauge = gauge::Gauge{value: 0};\n println!(\"{}\", g1.value);\n g1.update(100);\n\n \/\/ Get a snapshot of it to g2\n let g2: gauge::Gauge = g1.snapshot();\n\n \/\/ Update g1 to 200\n g1.update(200);\n\n println!(\"g1 {} g2 {}\", g1.value, g2.value);\n\n println!(\"{}\", g1.value);\n\n let mut c1 = StdCounter{value: 0};\n c1.inc(1);\n c1.inc(5);\n println!(\"{}\", c1.value)\n}<commit_msg>Fixup according to latest changes<commit_after>#![crate_type = \"bin\"]\n\nextern crate metrics;\n\nuse metrics::gauge;\nuse metrics::counter::{Counter,StdCounter};\n\nuse metrics::meter::{Meter,StdMeter};\nuse metrics::metric::Metric;\n\n\nfn main() {\n \/\/ Create a metric w val 0\n let mut g1: gauge::Gauge = gauge::Gauge{value: 0};\n println!(\"{}\", g1.value);\n g1.update(100);\n\n \/\/ Get a snapshot of it to g2\n let g2: gauge::Gauge = g1.snapshot();\n\n \/\/ Update g1 to 200\n g1.update(200);\n\n println!(\"g1 {} g2 {}\", g1.value, g2.value);\n\n println!(\"{}\", g1.value);\n\n let mut c1 = StdCounter::new();\n c1.inc(1);\n c1.inc(5);\n println!(\"{}\", c1.value);\n\n let mut meter = StdMeter::new();\n meter.mark(3i64);\n meter.tick();\n println!(\"Meter count: {}\", meter.count());\n println!(\"Meter rate: {}\", meter.rate(1));\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Correction to mix hop unwrapped meta array ref boundary<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate crossbeam;\nextern crate clap;\n#[macro_use] extern crate version;\n#[macro_use] extern crate log;\nextern crate walkdir;\n\nextern crate libimagrt;\nextern crate libimagerror;\n\nuse std::env;\nuse std::process::exit;\nuse std::process::Command;\nuse std::process::Stdio;\nuse std::io::ErrorKind;\n\nuse walkdir::WalkDir;\nuse crossbeam::*;\nuse clap::{Arg, AppSettings, SubCommand};\n\nuse libimagrt::runtime::Runtime;\nuse libimagerror::trace::trace_error;\n\nfn help_text(cmds: Vec<String>) -> String {\n let text = format!(r#\"\n\n _\n (_)_ __ ___ __ _ __ _\n | | '_ \\` _ \\\/ _\\`|\/ _\\`|\n | | | | | | | (_| | (_| |\n |_|_| |_| |_|\\__,_|\\__, |\n |___\/\n -------------------------\n\n Usage: imag [--version | --versions | -h | --help] <command> <args...>\n\n imag - the personal information management suite for the commandline\n\n imag is a PIM suite for the commandline. It consists of several commands,\n called \"modules\". Each module implements one PIM aspect and all of these\n modules can be used independently.\n\n Available commands:\n\n {imagbins}\n\n Call a command with 'imag <command> <args>'\n Each command can be called with \"--help\" to get the respective helptext.\n\n Please visit https:\/\/github.com\/matthiasbeyer\/imag to view the source code,\n follow the development of imag or maybe even contribute to imag.\n\n imag is free software. It is released under the terms of LGPLv2.1\n\n (c) 2016 Matthias Beyer and contributors\"#, imagbins = cmds.into_iter()\n .map(|cmd| format!(\"\\t{}\\n\", cmd))\n .fold(String::new(), |s, c| {\n let s = s + c.as_str();\n s\n }));\n text\n}\n\n\nfn get_commands() -> Vec<String> {\n let path = env::var(\"PATH\");\n if path.is_err() {\n println!(\"PATH error: {:?}\", path);\n exit(1);\n }\n let pathelements = path.unwrap();\n let pathelements = pathelements.split(\":\");\n\n let joinhandles : Vec<ScopedJoinHandle<Vec<String>>> = pathelements\n .map(|elem| {\n crossbeam::scope(|scope| {\n scope.spawn(|| {\n WalkDir::new(elem)\n .max_depth(1)\n .into_iter()\n .filter(|path| {\n match path {\n &Ok(ref p) => p.file_name()\n .to_str()\n .map_or(false, |filename| filename.starts_with(\"imag-\")),\n &Err(_) => false,\n }\n })\n .filter_map(|x| x.ok())\n .filter_map(|path| {\n path.file_name()\n .to_str()\n .and_then(|s| s.splitn(2, \"-\").nth(1).map(String::from))\n })\n .collect()\n })\n })\n })\n .collect();\n\n let mut execs = vec![];\n for joinhandle in joinhandles.into_iter() {\n let mut v = joinhandle.join();\n execs.append(&mut v);\n }\n\n execs\n}\n\n\nfn main() {\n let appname = \"imag\";\n let version = &version!();\n let about = \"imag - the PIM suite for the commandline\";\n let commands = get_commands();\n let helptext = help_text(commands);\n let app = Runtime::get_default_cli_builder(appname, version, about)\n .settings(&[AppSettings::AllowExternalSubcommands])\n .arg(Arg::with_name(\"version\")\n .long(\"version\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the version of imag\"))\n .arg(Arg::with_name(\"versions\")\n .long(\"versions\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the versions of the imag commands\"))\n .subcommand(SubCommand::with_name(\"help\").help(\"Show help\"))\n .help(helptext.as_str());\n let rt = Runtime::new(app)\n .unwrap_or_else(|e| {\n println!(\"Runtime couldn't be setup. Exiting\");\n trace_error(&e);\n exit(1);\n });\n let matches = rt.cli();\n\n debug!(\"matches: {:?}\", matches);\n\n if matches.is_present(\"version\") {\n debug!(\"Showing version\");\n println!(\"imag {}\", &version!()[..]);\n exit(0);\n }\n\n if matches.is_present(\"versions\") {\n debug!(\"Showing versions\");\n let mut result = vec![];\n for command in get_commands().iter() {\n result.push(crossbeam::scope(|scope| {\n scope.spawn(|| {\n let v = Command::new(command).arg(\"--version\").output();\n match v {\n Ok(v) => match String::from_utf8(v.stdout) {\n Ok(s) => format!(\"{} -> {}\", command, s),\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n },\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n }\n })\n }))\n }\n\n for versionstring in result.into_iter().map(|handle| handle.join()) {\n println!(\"{}\", versionstring);\n }\n }\n\n match matches.subcommand() {\n (subcommand, Some(scmd)) => {\n debug!(\"Calling with subcommand: {}\", subcommand);\n let subcommand_args : Vec<&str> = match scmd.values_of(\"\") {\n Some(values) => values.collect(),\n None => Vec::new()\n };\n debug!(\"Calling 'imag-{}' with args: {:?}\", subcommand, subcommand_args);\n\n match Command::new(format!(\"imag-{}\", subcommand))\n .stdin(Stdio::inherit())\n .stdout(Stdio::inherit())\n .stderr(Stdio::inherit())\n .args(&subcommand_args[..])\n .spawn()\n .and_then(|mut handle| handle.wait())\n {\n Ok(exit_status) => {\n if !exit_status.success() {\n debug!(\"{} exited with non-zero exit code: {:?}\", subcommand, exit_status);\n println!(\"{} exited with non-zero exit code\", subcommand);\n exit(exit_status.code().unwrap_or(42));\n }\n debug!(\"Successful exit!\");\n },\n\n Err(e) => {\n debug!(\"Error calling the subcommand\");\n match e.kind() {\n ErrorKind::NotFound => {\n println!(\"No such command: 'imag-{}'\", subcommand);\n println!(\"See 'imag --help' for available subcommands\");\n exit(2);\n },\n ErrorKind::PermissionDenied => {\n println!(\"No permission to execute: 'imag-{}'\", subcommand);\n exit(1);\n },\n _ => {\n println!(\"Error spawning: {:?}\", e);\n exit(1337);\n }\n }\n }\n }\n },\n \/\/ clap ensures we have valid input by exiting if not.\n \/\/ The above case is a catch-all for subcommands,\n \/\/ so nothing else needs to be expexted.\n _ => unreachable!(),\n }\n}\n<commit_msg>Fix exit codes<commit_after>extern crate crossbeam;\nextern crate clap;\n#[macro_use] extern crate version;\n#[macro_use] extern crate log;\nextern crate walkdir;\n\nextern crate libimagrt;\nextern crate libimagerror;\n\nuse std::env;\nuse std::process::exit;\nuse std::process::Command;\nuse std::process::Stdio;\nuse std::io::ErrorKind;\n\nuse walkdir::WalkDir;\nuse crossbeam::*;\nuse clap::{Arg, AppSettings, SubCommand};\n\nuse libimagrt::runtime::Runtime;\nuse libimagerror::trace::trace_error;\n\nfn help_text(cmds: Vec<String>) -> String {\n let text = format!(r#\"\n\n _\n (_)_ __ ___ __ _ __ _\n | | '_ \\` _ \\\/ _\\`|\/ _\\`|\n | | | | | | | (_| | (_| |\n |_|_| |_| |_|\\__,_|\\__, |\n |___\/\n -------------------------\n\n Usage: imag [--version | --versions | -h | --help] <command> <args...>\n\n imag - the personal information management suite for the commandline\n\n imag is a PIM suite for the commandline. It consists of several commands,\n called \"modules\". Each module implements one PIM aspect and all of these\n modules can be used independently.\n\n Available commands:\n\n {imagbins}\n\n Call a command with 'imag <command> <args>'\n Each command can be called with \"--help\" to get the respective helptext.\n\n Please visit https:\/\/github.com\/matthiasbeyer\/imag to view the source code,\n follow the development of imag or maybe even contribute to imag.\n\n imag is free software. It is released under the terms of LGPLv2.1\n\n (c) 2016 Matthias Beyer and contributors\"#, imagbins = cmds.into_iter()\n .map(|cmd| format!(\"\\t{}\\n\", cmd))\n .fold(String::new(), |s, c| {\n let s = s + c.as_str();\n s\n }));\n text\n}\n\n\nfn get_commands() -> Vec<String> {\n let path = env::var(\"PATH\");\n if path.is_err() {\n println!(\"PATH error: {:?}\", path);\n exit(1);\n }\n let pathelements = path.unwrap();\n let pathelements = pathelements.split(\":\");\n\n let joinhandles : Vec<ScopedJoinHandle<Vec<String>>> = pathelements\n .map(|elem| {\n crossbeam::scope(|scope| {\n scope.spawn(|| {\n WalkDir::new(elem)\n .max_depth(1)\n .into_iter()\n .filter(|path| {\n match path {\n &Ok(ref p) => p.file_name()\n .to_str()\n .map_or(false, |filename| filename.starts_with(\"imag-\")),\n &Err(_) => false,\n }\n })\n .filter_map(|x| x.ok())\n .filter_map(|path| {\n path.file_name()\n .to_str()\n .and_then(|s| s.splitn(2, \"-\").nth(1).map(String::from))\n })\n .collect()\n })\n })\n })\n .collect();\n\n let mut execs = vec![];\n for joinhandle in joinhandles.into_iter() {\n let mut v = joinhandle.join();\n execs.append(&mut v);\n }\n\n execs\n}\n\n\nfn main() {\n let appname = \"imag\";\n let version = &version!();\n let about = \"imag - the PIM suite for the commandline\";\n let commands = get_commands();\n let helptext = help_text(commands);\n let app = Runtime::get_default_cli_builder(appname, version, about)\n .settings(&[AppSettings::AllowExternalSubcommands])\n .arg(Arg::with_name(\"version\")\n .long(\"version\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the version of imag\"))\n .arg(Arg::with_name(\"versions\")\n .long(\"versions\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the versions of the imag commands\"))\n .subcommand(SubCommand::with_name(\"help\").help(\"Show help\"))\n .help(helptext.as_str());\n let rt = Runtime::new(app)\n .unwrap_or_else(|e| {\n println!(\"Runtime couldn't be setup. Exiting\");\n trace_error(&e);\n exit(1);\n });\n let matches = rt.cli();\n\n debug!(\"matches: {:?}\", matches);\n\n if matches.is_present(\"version\") {\n debug!(\"Showing version\");\n println!(\"imag {}\", &version!()[..]);\n exit(0);\n }\n\n if matches.is_present(\"versions\") {\n debug!(\"Showing versions\");\n let mut result = vec![];\n for command in get_commands().iter() {\n result.push(crossbeam::scope(|scope| {\n scope.spawn(|| {\n let v = Command::new(command).arg(\"--version\").output();\n match v {\n Ok(v) => match String::from_utf8(v.stdout) {\n Ok(s) => format!(\"{} -> {}\", command, s),\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n },\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n }\n })\n }))\n }\n\n for versionstring in result.into_iter().map(|handle| handle.join()) {\n println!(\"{}\", versionstring);\n }\n }\n\n match matches.subcommand() {\n (subcommand, Some(scmd)) => {\n debug!(\"Calling with subcommand: {}\", subcommand);\n let subcommand_args : Vec<&str> = match scmd.values_of(\"\") {\n Some(values) => values.collect(),\n None => Vec::new()\n };\n debug!(\"Calling 'imag-{}' with args: {:?}\", subcommand, subcommand_args);\n\n match Command::new(format!(\"imag-{}\", subcommand))\n .stdin(Stdio::inherit())\n .stdout(Stdio::inherit())\n .stderr(Stdio::inherit())\n .args(&subcommand_args[..])\n .spawn()\n .and_then(|mut handle| handle.wait())\n {\n Ok(exit_status) => {\n if !exit_status.success() {\n debug!(\"{} exited with non-zero exit code: {:?}\", subcommand, exit_status);\n println!(\"{} exited with non-zero exit code\", subcommand);\n exit(exit_status.code().unwrap_or(1));\n }\n debug!(\"Successful exit!\");\n },\n\n Err(e) => {\n debug!(\"Error calling the subcommand\");\n match e.kind() {\n ErrorKind::NotFound => {\n println!(\"No such command: 'imag-{}'\", subcommand);\n println!(\"See 'imag --help' for available subcommands\");\n exit(2);\n },\n ErrorKind::PermissionDenied => {\n println!(\"No permission to execute: 'imag-{}'\", subcommand);\n exit(1);\n },\n _ => {\n println!(\"Error spawning: {:?}\", e);\n exit(1);\n }\n }\n }\n }\n },\n \/\/ clap ensures we have valid input by exiting if not.\n \/\/ The above case is a catch-all for subcommands,\n \/\/ so nothing else needs to be expexted.\n _ => unreachable!(),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[No-auto] bin\/core\/tag: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate rustfmt;\n\nuse std::collections::HashMap;\nuse std::fs;\nuse std::io::Read;\nuse std::thread;\nuse rustfmt::*;\n\n\/\/ For now, the only supported regression tests are idempotent tests - the input and\n\/\/ output must match exactly.\n\/\/ FIXME(#28) would be good to check for error messages and fail on them, or at least report.\n#[test]\nfn idempotent_tests() {\n println!(\"Idempotent tests:\");\n\n \/\/ Get all files in the tests\/idem directory\n let files = fs::read_dir(\"tests\/idem\").unwrap();\n let files = files.chain(fs::read_dir(\"tests\").unwrap());\n let files = files.chain(fs::read_dir(\"src\/bin\").unwrap());\n \/\/ turn a DirEntry into a String that represents the relative path to the file\n let files = files.map(|e| e.unwrap().path().to_str().unwrap().to_owned());\n \/\/ hack because there's no `IntoIterator` impl for `[T; N]`\n let files = files.chain(Some(\"src\/lib.rs\".to_owned()).into_iter());\n\n \/\/ For each file, run rustfmt and collect the output\n let mut count = 0;\n let mut fails = 0;\n for file_name in files.filter(|f| f.ends_with(\".rs\")) {\n println!(\"Testing '{}'...\", file_name);\n match idempotent_check(file_name) {\n Ok(()) => {},\n Err(m) => {\n print_mismatches(m);\n fails += 1;\n },\n }\n count += 1;\n }\n\n \/\/ Display results\n println!(\"Ran {} idempotent tests; {} failures.\", count, fails);\n assert!(fails == 0, \"{} idempotent tests failed\", fails);\n}\n\n\/\/ Compare output to input.\nfn print_mismatches(result: HashMap<String, String>) {\n for (file_name, fmt_text) in result {\n println!(\"Mismatch in {}.\", file_name);\n println!(\"{}\", fmt_text);\n }\n}\n\n\/\/ Ick, just needed to get a &'static to handle_result.\nstatic HANDLE_RESULT: &'static Fn(HashMap<String, String>) = &handle_result;\n\npub fn idempotent_check(filename: String) -> Result<(), HashMap<String, String>> {\n let args = vec![\"rustfmt\".to_owned(), filename];\n \/\/ this thread is not used for concurrency, but rather to workaround the issue that the passed\n \/\/ function handle needs to have static lifetime. Instead of using a global RefCell, we use\n \/\/ panic to return a result in case of failure. This has the advantage of smoothing the road to\n \/\/ multithreaded rustfmt\n thread::spawn(move || {\n run(args, WriteMode::Return(HANDLE_RESULT));\n }).join().map_err(|any|\n \/\/ i know it is a hashmap\n *any.downcast().unwrap()\n )\n}\n\n\/\/ Compare output to input.\nfn handle_result(result: HashMap<String, String>) {\n let mut failures = HashMap::new();\n\n for (file_name, fmt_text) in result {\n let mut f = fs::File::open(&file_name).unwrap();\n let mut text = String::new();\n \/\/ TODO: speedup by running through bytes iterator\n f.read_to_string(&mut text).unwrap();\n if fmt_text != text {\n failures.insert(file_name, fmt_text);\n }\n }\n if !failures.is_empty() {\n panic!(failures);\n }\n}\n<commit_msg>don't create a thread just for panic-isolation<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(catch_panic)]\n\nextern crate rustfmt;\n\nuse std::collections::HashMap;\nuse std::fs;\nuse std::io::Read;\nuse std::thread;\nuse rustfmt::*;\n\n\/\/ For now, the only supported regression tests are idempotent tests - the input and\n\/\/ output must match exactly.\n\/\/ FIXME(#28) would be good to check for error messages and fail on them, or at least report.\n#[test]\nfn idempotent_tests() {\n println!(\"Idempotent tests:\");\n\n \/\/ Get all files in the tests\/idem directory\n let files = fs::read_dir(\"tests\/idem\").unwrap();\n let files = files.chain(fs::read_dir(\"tests\").unwrap());\n let files = files.chain(fs::read_dir(\"src\/bin\").unwrap());\n \/\/ turn a DirEntry into a String that represents the relative path to the file\n let files = files.map(|e| e.unwrap().path().to_str().unwrap().to_owned());\n \/\/ hack because there's no `IntoIterator` impl for `[T; N]`\n let files = files.chain(Some(\"src\/lib.rs\".to_owned()).into_iter());\n\n \/\/ For each file, run rustfmt and collect the output\n let mut count = 0;\n let mut fails = 0;\n for file_name in files.filter(|f| f.ends_with(\".rs\")) {\n println!(\"Testing '{}'...\", file_name);\n match idempotent_check(file_name) {\n Ok(()) => {},\n Err(m) => {\n print_mismatches(m);\n fails += 1;\n },\n }\n count += 1;\n }\n\n \/\/ Display results\n println!(\"Ran {} idempotent tests; {} failures.\", count, fails);\n assert!(fails == 0, \"{} idempotent tests failed\", fails);\n}\n\n\/\/ Compare output to input.\nfn print_mismatches(result: HashMap<String, String>) {\n for (file_name, fmt_text) in result {\n println!(\"Mismatch in {}.\", file_name);\n println!(\"{}\", fmt_text);\n }\n}\n\n\/\/ Ick, just needed to get a &'static to handle_result.\nstatic HANDLE_RESULT: &'static Fn(HashMap<String, String>) = &handle_result;\n\npub fn idempotent_check(filename: String) -> Result<(), HashMap<String, String>> {\n let args = vec![\"rustfmt\".to_owned(), filename];\n \/\/ this thread is not used for concurrency, but rather to workaround the issue that the passed\n \/\/ function handle needs to have static lifetime. Instead of using a global RefCell, we use\n \/\/ panic to return a result in case of failure. This has the advantage of smoothing the road to\n \/\/ multithreaded rustfmt\n thread::catch_panic(move || {\n run(args, WriteMode::Return(HANDLE_RESULT));\n }).map_err(|any|\n \/\/ i know it is a hashmap\n *any.downcast().unwrap()\n )\n}\n\n\/\/ Compare output to input.\nfn handle_result(result: HashMap<String, String>) {\n let mut failures = HashMap::new();\n\n for (file_name, fmt_text) in result {\n let mut f = fs::File::open(&file_name).unwrap();\n let mut text = String::new();\n \/\/ TODO: speedup by running through bytes iterator\n f.read_to_string(&mut text).unwrap();\n if fmt_text != text {\n failures.insert(file_name, fmt_text);\n }\n }\n if !failures.is_empty() {\n panic!(failures);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(core, io)]\nextern crate hyper;\n#[macro_use] extern crate log;\n\nuse std::old_io::util::copy;\nuse std::old_io::net::ip::Ipv4Addr;\n\nuse hyper::{Get, Post};\nuse hyper::header::ContentLength;\nuse hyper::server::{Server, Request, Response};\nuse hyper::uri::RequestUri::AbsolutePath;\n\nmacro_rules! try_return(\n ($e:expr) => {{\n match $e {\n Ok(v) => v,\n Err(e) => { error!(\"Error: {}\", e); return; }\n }\n }}\n);\n\nfn echo(mut req: Request, mut res: Response) {\n match req.uri {\n AbsolutePath(ref path) => match (&req.method, &path[]) {\n (&Get, \"\/\") | (&Get, \"\/echo\") => {\n let out = b\"Try POST \/echo\";\n\n res.headers_mut().set(ContentLength(out.len() as u64));\n let mut res = try_return!(res.start());\n try_return!(res.write_all(out));\n try_return!(res.end());\n return;\n },\n (&Post, \"\/echo\") => (), \/\/ fall through, fighting mutable borrows\n _ => {\n *res.status_mut() = hyper::NotFound;\n try_return!(res.start().and_then(|res| res.end()));\n return;\n }\n },\n _ => {\n try_return!(res.start().and_then(|res| res.end()));\n return;\n }\n };\n\n let mut res = try_return!(res.start());\n try_return!(copy(&mut req, &mut res));\n try_return!(res.end());\n}\n\nfn main() {\n let server = Server::http(Ipv4Addr(127, 0, 0, 1), 1337);\n let mut listening = server.listen(echo).unwrap();\n println!(\"Listening on http:\/\/127.0.0.1:1337\");\n listening.await();\n}\n<commit_msg>fix(rustup): fix unused_feature warning in example server<commit_after>#![feature(io)]\nextern crate hyper;\n#[macro_use] extern crate log;\n\nuse std::old_io::util::copy;\nuse std::old_io::net::ip::Ipv4Addr;\n\nuse hyper::{Get, Post};\nuse hyper::header::ContentLength;\nuse hyper::server::{Server, Request, Response};\nuse hyper::uri::RequestUri::AbsolutePath;\n\nmacro_rules! try_return(\n ($e:expr) => {{\n match $e {\n Ok(v) => v,\n Err(e) => { error!(\"Error: {}\", e); return; }\n }\n }}\n);\n\nfn echo(mut req: Request, mut res: Response) {\n match req.uri {\n AbsolutePath(ref path) => match (&req.method, &path[]) {\n (&Get, \"\/\") | (&Get, \"\/echo\") => {\n let out = b\"Try POST \/echo\";\n\n res.headers_mut().set(ContentLength(out.len() as u64));\n let mut res = try_return!(res.start());\n try_return!(res.write_all(out));\n try_return!(res.end());\n return;\n },\n (&Post, \"\/echo\") => (), \/\/ fall through, fighting mutable borrows\n _ => {\n *res.status_mut() = hyper::NotFound;\n try_return!(res.start().and_then(|res| res.end()));\n return;\n }\n },\n _ => {\n try_return!(res.start().and_then(|res| res.end()));\n return;\n }\n };\n\n let mut res = try_return!(res.start());\n try_return!(copy(&mut req, &mut res));\n try_return!(res.end());\n}\n\nfn main() {\n let server = Server::http(Ipv4Addr(127, 0, 0, 1), 1337);\n let mut listening = server.listen(echo).unwrap();\n println!(\"Listening on http:\/\/127.0.0.1:1337\");\n listening.await();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>show all elements<commit_after><|endoftext|>"} {"text":"<commit_before>pub mod path;\npub mod file;\npub mod parser;\npub mod json;\n<commit_msg>Add Store type<commit_after>use std::cell::RefCell;\nuse std::collections::HashMap;\n\npub mod path;\npub mod file;\npub mod parser;\npub mod json;\n\nuse storage::file::File;\nuse storage::file::id::FileID;\n\ntype Cache<'a> = HashMap<FileID, RefCell<File<'a>>>;\n\npub struct Store<'a> {\n cache : RefCell<Cache<'a>>,\n}\n\nimpl<'a> Store<'a> {\n\n pub fn new() -> Store<'a> {\n Store {\n cache: RefCell::new(HashMap::new()),\n }\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"alloc_system\"]\n#![crate_type = \"rlib\"]\n#![no_std]\n#![cfg_attr(not(stage0), allocator)]\n#![unstable(feature = \"alloc_system\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![feature(allocator)]\n#![feature(asm)]\n#![feature(no_std)]\n#![feature(staged_api)]\n\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"mipsel\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\")))]\nconst MIN_ALIGN: usize = 16;\n\nextern {\n fn memmove(dst: *mut u8, src: *const u8, size: usize);\n fn __rust_allocate(size: usize, align: usize) -> *mut u8;\n fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);\n fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;\n fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> usize;\n fn __rust_usable_size(size: usize, align: usize) -> usize;\n }\n<commit_msg>feature(no_std) no longer required<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"alloc_system\"]\n#![crate_type = \"rlib\"]\n#![no_std]\n#![cfg_attr(not(stage0), allocator)]\n#![unstable(feature = \"alloc_system\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![feature(allocator)]\n#![feature(asm)]\n#![feature(staged_api)]\n\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"mipsel\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\")))]\nconst MIN_ALIGN: usize = 16;\n\nextern {\n fn memmove(dst: *mut u8, src: *const u8, size: usize);\n fn __rust_allocate(size: usize, align: usize) -> *mut u8;\n fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);\n fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;\n fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> usize;\n fn __rust_usable_size(size: usize, align: usize) -> usize;\n }\n<|endoftext|>"} {"text":"<commit_before>use std::collections::BTreeMap;\nuse std::ops::{Deref, DerefMut};\nuse std::io::BufRead;\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse uuid::Uuid;\n\nuse task_hookrs::task::Task as TTask;\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagstore::store::{FileLockEntry, Store};\nuse libimagstore::storeid::{IntoStoreId, StoreIdIterator, StoreId};\nuse module_path::ModuleEntryPath;\n\nuse error::{TodoError, TodoErrorKind, MapErrInto};\nuse result::Result;\n\n\/\/\/ Task struct containing a `FileLockEntry`\n#[derive(Debug)]\npub struct Task<'a>(FileLockEntry<'a>);\n\nimpl<'a> Task<'a> {\n\n \/\/\/ Concstructs a new `Task` with a `FileLockEntry`\n pub fn new(fle: FileLockEntry<'a>) -> Task<'a> {\n Task(fle)\n }\n\n pub fn import<R: BufRead>(store: &'a Store, mut r: R) -> Result<(Task<'a>, Uuid)> {\n let mut line = String::new();\n r.read_line(&mut line);\n import_task(&line.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| {\n let uuid = t.uuid().clone();\n t.into_task(store).map(|t| (t, uuid))\n })\n }\n\n \/\/\/ Get a task from an import string. That is: read the imported string, get the UUID from it\n \/\/\/ and try to load this UUID from store.\n \/\/\/\n \/\/\/ Possible return values are:\n \/\/\/\n \/\/\/ * Ok(Ok(Task))\n \/\/\/ * Ok(Err(String)) - where the String is the String read from the `r` parameter\n \/\/\/ * Err(_) - where the error is an error that happened during evaluation\n \/\/\/\n pub fn get_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<RResult<Task<'a>, String>>\n {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::get_from_string(store, line)\n }\n\n \/\/\/ Get a task from a String. The String is expected to contain the JSON-representation of the\n \/\/\/ Task to get from the store (only the UUID really matters in this case)\n \/\/\/\n \/\/\/ For an explanation on the return values see `Task::get_from_import()`.\n pub fn get_from_string(store: &'a Store, s: String) -> Result<RResult<Task<'a>, String>> {\n import_task(s.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .map(|t| t.uuid().clone())\n .and_then(|uuid| Task::get_from_uuid(store, uuid))\n .and_then(|o| match o {\n None => Ok(Err(s)),\n Some(t) => Ok(Ok(t)),\n })\n }\n\n \/\/\/ Get a task from an UUID.\n \/\/\/\n \/\/\/ If there is no task with this UUID, this returns `Ok(None)`.\n pub fn get_from_uuid(store: &'a Store, uuid: Uuid) -> Result<Option<Task<'a>>> {\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n store.get(store_id)\n .map(|o| o.map(Task::new))\n .map_err_into(TodoErrorKind::StoreError)\n }\n\n \/\/\/ Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to\n \/\/\/ implicitely create the task if it does not exist.\n pub fn retrieve_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<Task<'a>> {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::retrieve_from_string(store, line)\n }\n\n \/\/\/ Retrieve a task from a String. The String is expected to contain the JSON-representation of\n \/\/\/ the Task to retrieve from the store (only the UUID really matters in this case)\n pub fn retrieve_from_string(store: &'a Store, s: String) -> Result<Task<'a>> {\n Task::get_from_string(store, s)\n .and_then(|opt| match opt {\n Ok(task) => Ok(task),\n Err(string) => import_task(string.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| t.into_task(store)),\n })\n }\n\n pub fn delete_by_uuid(store: &Store, uuid: Uuid) -> Result<()> {\n store.delete(ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid())\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all_as_ids(store: &Store) -> Result<StoreIdIterator> {\n store.retrieve_for_module(\"todo\/taskwarrior\")\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all(store: &Store) -> Result<TaskIterator> {\n Task::all_as_ids(store)\n .map(|iter| TaskIterator::new(store, iter))\n }\n\n}\n\nimpl<'a> Deref for Task<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Task<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n\n\/\/\/ A trait to get a `libimagtodo::task::Task` out of the implementing object.\n\/\/\/ This Task struct is merely a wrapper for a `FileLockEntry`, therefore the function name\n\/\/\/ `into_filelockentry`.\npub trait IntoTask<'a> {\n\n \/\/\/ # Usage\n \/\/\/ ```ignore\n \/\/\/ use std::io::stdin;\n \/\/\/\n \/\/\/ use task_hookrs::task::Task;\n \/\/\/ use task_hookrs::import::import;\n \/\/\/ use libimagstore::store::{Store, FileLockEntry};\n \/\/\/\n \/\/\/ if let Ok(task_hookrs_task) = import(stdin()) {\n \/\/\/ \/\/ Store is given at runtime\n \/\/\/ let task = task_hookrs_task.into_filelockentry(store);\n \/\/\/ println!(\"Task with uuid: {}\", task.flentry.get_header().get(\"todo.uuid\"));\n \/\/\/ }\n \/\/\/ ```\n fn into_task(self, store : &'a Store) -> Result<Task<'a>>;\n\n}\n\nimpl<'a> IntoTask<'a> for TTask {\n\n fn into_task(self, store : &'a Store) -> Result<Task<'a>> {\n let uuid = self.uuid();\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n match store.retrieve(store_id) {\n Err(e) => return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(mut fle) => {\n {\n let mut header = fle.get_header_mut();\n match header.read(\"todo\") {\n Ok(None) => {\n if let Err(e) = header.set(\"todo\", Value::Table(BTreeMap::new())) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n Ok(Some(_)) => { }\n Err(e) => {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n if let Err(e) = header.set(\"todo.uuid\", Value::String(format!(\"{}\",uuid))) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n \/\/ If none of the errors above have returned the function, everything is fine\n Ok(Task::new(fle))\n }\n }\n }\n\n}\n\ntrait FromStoreId {\n fn from_storeid<'a>(&'a Store, StoreId) -> Result<Task<'a>>;\n}\n\nimpl<'a> FromStoreId for Task<'a> {\n\n fn from_storeid<'b>(store: &'b Store, id: StoreId) -> Result<Task<'b>> {\n match store.retrieve(id) {\n Err(e) => Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(c) => Ok(Task::new( c )),\n }\n }\n}\n\npub struct TaskIterator<'a> {\n store: &'a Store,\n iditer: StoreIdIterator,\n}\n\nimpl<'a> TaskIterator<'a> {\n\n pub fn new(store: &'a Store, iditer: StoreIdIterator) -> TaskIterator<'a> {\n TaskIterator {\n store: store,\n iditer: iditer,\n }\n }\n\n}\n\nimpl<'a> Iterator for TaskIterator<'a> {\n type Item = Result<Task<'a>>;\n\n fn next(&mut self) -> Option<Result<Task<'a>>> {\n self.iditer.next().map(|id| Task::from_storeid(self.store, id))\n }\n}\n\n<commit_msg>Return the imported string as well<commit_after>use std::collections::BTreeMap;\nuse std::ops::{Deref, DerefMut};\nuse std::io::BufRead;\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse uuid::Uuid;\n\nuse task_hookrs::task::Task as TTask;\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagstore::store::{FileLockEntry, Store};\nuse libimagstore::storeid::{IntoStoreId, StoreIdIterator, StoreId};\nuse module_path::ModuleEntryPath;\n\nuse error::{TodoError, TodoErrorKind, MapErrInto};\nuse result::Result;\n\n\/\/\/ Task struct containing a `FileLockEntry`\n#[derive(Debug)]\npub struct Task<'a>(FileLockEntry<'a>);\n\nimpl<'a> Task<'a> {\n\n \/\/\/ Concstructs a new `Task` with a `FileLockEntry`\n pub fn new(fle: FileLockEntry<'a>) -> Task<'a> {\n Task(fle)\n }\n\n pub fn import<R: BufRead>(store: &'a Store, mut r: R) -> Result<(Task<'a>, String, Uuid)> {\n let mut line = String::new();\n r.read_line(&mut line);\n import_task(&line.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| {\n let uuid = t.uuid().clone();\n t.into_task(store).map(|t| (t, line, uuid))\n })\n }\n\n \/\/\/ Get a task from an import string. That is: read the imported string, get the UUID from it\n \/\/\/ and try to load this UUID from store.\n \/\/\/\n \/\/\/ Possible return values are:\n \/\/\/\n \/\/\/ * Ok(Ok(Task))\n \/\/\/ * Ok(Err(String)) - where the String is the String read from the `r` parameter\n \/\/\/ * Err(_) - where the error is an error that happened during evaluation\n \/\/\/\n pub fn get_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<RResult<Task<'a>, String>>\n {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::get_from_string(store, line)\n }\n\n \/\/\/ Get a task from a String. The String is expected to contain the JSON-representation of the\n \/\/\/ Task to get from the store (only the UUID really matters in this case)\n \/\/\/\n \/\/\/ For an explanation on the return values see `Task::get_from_import()`.\n pub fn get_from_string(store: &'a Store, s: String) -> Result<RResult<Task<'a>, String>> {\n import_task(s.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .map(|t| t.uuid().clone())\n .and_then(|uuid| Task::get_from_uuid(store, uuid))\n .and_then(|o| match o {\n None => Ok(Err(s)),\n Some(t) => Ok(Ok(t)),\n })\n }\n\n \/\/\/ Get a task from an UUID.\n \/\/\/\n \/\/\/ If there is no task with this UUID, this returns `Ok(None)`.\n pub fn get_from_uuid(store: &'a Store, uuid: Uuid) -> Result<Option<Task<'a>>> {\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n store.get(store_id)\n .map(|o| o.map(Task::new))\n .map_err_into(TodoErrorKind::StoreError)\n }\n\n \/\/\/ Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to\n \/\/\/ implicitely create the task if it does not exist.\n pub fn retrieve_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<Task<'a>> {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::retrieve_from_string(store, line)\n }\n\n \/\/\/ Retrieve a task from a String. The String is expected to contain the JSON-representation of\n \/\/\/ the Task to retrieve from the store (only the UUID really matters in this case)\n pub fn retrieve_from_string(store: &'a Store, s: String) -> Result<Task<'a>> {\n Task::get_from_string(store, s)\n .and_then(|opt| match opt {\n Ok(task) => Ok(task),\n Err(string) => import_task(string.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| t.into_task(store)),\n })\n }\n\n pub fn delete_by_uuid(store: &Store, uuid: Uuid) -> Result<()> {\n store.delete(ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid())\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all_as_ids(store: &Store) -> Result<StoreIdIterator> {\n store.retrieve_for_module(\"todo\/taskwarrior\")\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all(store: &Store) -> Result<TaskIterator> {\n Task::all_as_ids(store)\n .map(|iter| TaskIterator::new(store, iter))\n }\n\n}\n\nimpl<'a> Deref for Task<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Task<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n\n\/\/\/ A trait to get a `libimagtodo::task::Task` out of the implementing object.\n\/\/\/ This Task struct is merely a wrapper for a `FileLockEntry`, therefore the function name\n\/\/\/ `into_filelockentry`.\npub trait IntoTask<'a> {\n\n \/\/\/ # Usage\n \/\/\/ ```ignore\n \/\/\/ use std::io::stdin;\n \/\/\/\n \/\/\/ use task_hookrs::task::Task;\n \/\/\/ use task_hookrs::import::import;\n \/\/\/ use libimagstore::store::{Store, FileLockEntry};\n \/\/\/\n \/\/\/ if let Ok(task_hookrs_task) = import(stdin()) {\n \/\/\/ \/\/ Store is given at runtime\n \/\/\/ let task = task_hookrs_task.into_filelockentry(store);\n \/\/\/ println!(\"Task with uuid: {}\", task.flentry.get_header().get(\"todo.uuid\"));\n \/\/\/ }\n \/\/\/ ```\n fn into_task(self, store : &'a Store) -> Result<Task<'a>>;\n\n}\n\nimpl<'a> IntoTask<'a> for TTask {\n\n fn into_task(self, store : &'a Store) -> Result<Task<'a>> {\n let uuid = self.uuid();\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n match store.retrieve(store_id) {\n Err(e) => return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(mut fle) => {\n {\n let mut header = fle.get_header_mut();\n match header.read(\"todo\") {\n Ok(None) => {\n if let Err(e) = header.set(\"todo\", Value::Table(BTreeMap::new())) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n Ok(Some(_)) => { }\n Err(e) => {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n if let Err(e) = header.set(\"todo.uuid\", Value::String(format!(\"{}\",uuid))) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n \/\/ If none of the errors above have returned the function, everything is fine\n Ok(Task::new(fle))\n }\n }\n }\n\n}\n\ntrait FromStoreId {\n fn from_storeid<'a>(&'a Store, StoreId) -> Result<Task<'a>>;\n}\n\nimpl<'a> FromStoreId for Task<'a> {\n\n fn from_storeid<'b>(store: &'b Store, id: StoreId) -> Result<Task<'b>> {\n match store.retrieve(id) {\n Err(e) => Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(c) => Ok(Task::new( c )),\n }\n }\n}\n\npub struct TaskIterator<'a> {\n store: &'a Store,\n iditer: StoreIdIterator,\n}\n\nimpl<'a> TaskIterator<'a> {\n\n pub fn new(store: &'a Store, iditer: StoreIdIterator) -> TaskIterator<'a> {\n TaskIterator {\n store: store,\n iditer: iditer,\n }\n }\n\n}\n\nimpl<'a> Iterator for TaskIterator<'a> {\n type Item = Result<Task<'a>>;\n\n fn next(&mut self) -> Option<Result<Task<'a>>> {\n self.iditer.next().map(|id| Task::from_storeid(self.store, id))\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Suppress a warning when compiling the Rust code<commit_after><|endoftext|>"} {"text":"<commit_before>use scaly::containers::Ref;\nuse scaly::io::Stream;\nuse scaly::memory::Region;\nuse scaly::Equal;\nuse scaly::{Page, String, StringBuilder};\n\npub struct Lexer {\n pub token: Ref<Token>,\n character: char,\n stream: *mut Stream,\n is_at_end: bool,\n previous_line: usize,\n previous_column: usize,\n pub line: usize,\n pub column: usize,\n}\n\nimpl Lexer {\n pub fn new(_pr: &Region, _rp: *mut Page, stream: *mut Stream) -> Ref<Lexer> {\n let _r = Region::create(_pr);\n let _token_page = unsafe { (*_rp).allocate_exclusive_page() };\n unsafe { (*_token_page).reset() };\n let mut lexer = Ref::new(\n _rp,\n Lexer {\n stream: stream,\n is_at_end: false,\n token: Ref::new(_token_page, Token::InvalidToken),\n character: 0 as char,\n previous_line: 1,\n previous_column: 0,\n line: 1,\n column: 0,\n },\n );\n lexer.read_character();\n lexer.advance(&_r);\n lexer\n }\n\n fn read_character(&mut self) {\n unsafe {\n let read_result: i32 = (*self.stream).read_byte();\n if read_result == -1 {\n self.is_at_end = true;\n self.character = 0 as char;\n } else {\n self.character = read_result as u8 as char;\n }\n }\n }\n\n pub fn advance(&mut self, _pr: &Region) {\n let _r = Region::create(_pr);\n self.skip_whitespace();\n self.previous_line = self.line;\n self.previous_column = self.previous_column;\n if self.is_at_end {\n return;\n }\n\n let c = self.character;\n\n if ((c >= 'a') && (c <= 'z')) || ((c >= 'A') && (c <= 'Z')) {\n {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_identifier(&_r, _token_page);\n }\n return;\n }\n\n if (c >= '0') && (c <= '9') {\n {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_numeric_literal(&_r, _token_page);\n }\n return;\n }\n\n match c {\n '+' | '-' | '*' | '\/' | '=' | '%' | '&' | '|' | '^' | '~' | '<' | '>' => {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_operator(&_r, _token_page);\n }\n\n '\\\"' => {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_string_literal(&_r, _token_page);\n }\n\n '\\'' => {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_character_literal(&_r, _token_page);\n }\n\n '{' | '}' | '(' | ')' | '[' | ']' | '.' | ',' | ':' | ';' | '?' | '!' | '@' | '#'\n | '$' | '_' | '`' => {\n {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = Ref::new(\n _token_page,\n Token::Punctuation(String::from_character(Page::own(self), c)),\n );\n }\n self.read_character();\n self.column = self.column + 1;\n }\n\n _ => {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = Ref::new(_token_page, Token::InvalidToken);\n }\n }\n }\n\n fn scan_identifier(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut name: Ref<StringBuilder> = StringBuilder::from_character(_r.page, self.character);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Identifier(name.to_string(_rp)));\n }\n\n let c = self.character;\n if ((c >= 'a') && (c <= 'z'))\n || ((c >= 'A') && (c <= 'Z'))\n || ((c >= '0') && (c <= '9'))\n || (c == '_')\n {\n name.append_character(c);\n } else {\n return Ref::new(_rp, Token::Identifier(name.to_string(_rp)));\n }\n }\n }\n\n fn scan_operator(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut operation: Ref<StringBuilder> =\n StringBuilder::from_character(_r.page, self.character);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Identifier(operation.to_string(_rp)));\n }\n\n match self.character {\n '+' | '-' | '*' | '\/' | '=' | '%' | '&' | '|' | '^' | '~' | '<' | '>' => {\n operation.append_character(self.character)\n }\n\n _ => return Ref::new(_rp, Token::Identifier(operation.to_string(_rp))),\n }\n }\n }\n\n fn scan_string_literal(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut value: Ref<StringBuilder> = StringBuilder::new(_r.page);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::InvalidToken);\n }\n\n match self.character {\n '\\\"' => {\n self.read_character();\n self.column = self.column + 1;\n return Ref::new(_rp, Token::Literal(Literal::String(value.to_string(_rp))));\n }\n\n '\\\\' => {\n self.read_character();\n self.column = self.column + 1;\n match self.character {\n '\\\"' | '\\\\' | '\\'' => {\n value.append_character('\\\\');\n value.append_character(self.character);\n }\n 'n' => {\n value.append_character('\\\\');\n value.append_character('n');\n }\n 'r' => {\n value.append_character('\\\\');\n value.append_character('r');\n }\n 't' => {\n value.append_character('\\\\');\n value.append_character('t');\n }\n '0' => {\n value.append_character('\\\\');\n value.append_character('0');\n }\n _ => return Ref::new(_rp, Token::InvalidToken),\n }\n }\n _ => value.append_character(self.character),\n }\n }\n }\n\n fn scan_character_literal(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut value: Ref<StringBuilder> = StringBuilder::new(_r.page);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::InvalidToken);\n }\n\n match self.character {\n '\\'' => {\n self.read_character();\n self.column = self.column + 1;\n return Ref::new(\n _rp,\n Token::Literal(Literal::Character(value.to_string(_rp))),\n );\n }\n\n '\\\\' => {\n self.read_character();\n self.column = self.column + 1;\n match self.character {\n '\\\"' | '\\\\' | '\\'' => {\n value.append_character('\\\\');\n value.append_character(self.character);\n }\n 'n' => {\n value.append_character('\\\\');\n value.append_character('n');\n }\n 'r' => {\n value.append_character('\\\\');\n value.append_character('r');\n }\n 't' => {\n value.append_character('\\\\');\n value.append_character('t');\n }\n '0' => {\n value.append_character('\\\\');\n value.append_character('0');\n }\n _ => return Ref::new(_rp, Token::InvalidToken),\n }\n }\n _ => value.append_character(self.character),\n }\n }\n }\n\n fn scan_numeric_literal(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut value: Ref<StringBuilder> = StringBuilder::from_character(_rp, self.character);\n\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Literal(Literal::Numeric(value.to_string(_rp))));\n }\n if self.character == 'x' {\n return self.scan_hex_literal(&_r, _rp);\n } else {\n self.read_character();\n self.column = self.column + 1;\n }\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Literal(Literal::Numeric(value.to_string(_rp))));\n }\n\n let c = self.character;\n if (c >= '0') && (c <= '9') {\n value.append_character(self.character)\n } else {\n return Ref::new(_rp, Token::Literal(Literal::Numeric(value.to_string(_rp))));\n }\n }\n }\n\n fn scan_hex_literal(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut value: Ref<StringBuilder> = StringBuilder::from_character(_rp, self.character);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Literal(Literal::Hex(value.to_string(_rp))));\n }\n\n let c = self.character;\n if ((c >= '0') && (c <= '9'))\n || ((c >= 'a') && (c <= 'f'))\n || ((c >= 'A') && (c <= 'F'))\n {\n value.append_character(self.character)\n } else {\n return Ref::new(_rp, Token::Literal(Literal::Hex(value.to_string(_rp))));\n }\n }\n }\n\n fn skip_whitespace(&mut self) {\n loop {\n if self.is_at_end {\n return;\n }\n\n match self.character {\n ' ' => {\n self.read_character();;\n self.column = self.column + 1;\n continue;\n }\n\n '\\t' => {\n self.read_character();\n self.column = self.column + 4;\n continue;\n }\n\n '\\r' => {\n self.read_character();\n continue;\n }\n\n '\\n' => {\n self.read_character();\n self.column = 1;\n self.line = self.line + 1;\n continue;\n }\n\n '\/' => {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end {\n return;\n }\n\n if self.character == '\/' {\n self.handle_single_line_comment();\n } else {\n if self.character == '*' {\n self.handle_multi_line_comment();\n } else {\n return;\n }\n }\n }\n\n _ => return,\n }\n }\n }\n\n fn handle_single_line_comment(&mut self) {\n loop {\n if self.is_at_end() {\n return;\n }\n\n match self.character {\n '\\t' => {\n self.read_character();\n self.column = self.column + 4;\n continue;\n }\n\n '\\r' => {\n self.read_character();\n continue;\n }\n\n '\\n' => {\n self.read_character();\n self.column = 1;\n self.line = self.line + 1;\n return;\n }\n\n _ => {\n self.read_character();\n self.column = self.column + 1;\n continue;\n }\n }\n }\n }\n\n fn handle_multi_line_comment(&mut self) {\n loop {\n if self.is_at_end() {\n return;\n }\n\n match self.character {\n '\/' => {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return;\n } else {\n if self.character == '*' {\n self.handle_multi_line_comment();\n } else {\n return;\n }\n }\n }\n\n '*' => {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return;\n } else {\n if self.character == '\/' {\n self.read_character();\n self.column = self.column + 1;\n return;\n }\n }\n }\n\n '\\t' => {\n self.read_character();\n self.column = self.column + 4;\n continue;\n }\n\n '\\r' => {\n self.read_character();\n continue;\n }\n\n '\\n' => {\n self.read_character();\n self.column = 1;\n self.line = self.line + 1;\n return;\n }\n\n _ => {\n self.read_character();\n self.column = self.column + 1;\n continue;\n }\n }\n }\n }\n\n pub fn parse_keyword(&self, fixed_string: String) -> bool {\n match *self.token {\n Token::Identifier(name) => return name.equals(&fixed_string),\n _ => return false,\n }\n }\n\n pub fn parse_identifier(&self, _rp: *mut Page) -> Option<String> {\n match *self.token {\n Token::Identifier(name) => return Some(String::copy(_rp, name)),\n _ => return None,\n }\n }\n\n pub fn parse_punctuation(&self, fixed_string: String) -> bool {\n match *self.token {\n Token::Punctuation(name) => return name.equals(&fixed_string),\n _ => return false,\n }\n }\n\n pub fn parse_literal(&self, _rp: *mut Page) -> Option<Literal> {\n match *self.token {\n Token::Literal(Literal::String(name)) => Some(Literal::String(String::copy(_rp, name))),\n Token::Literal(Literal::Character(name)) => {\n Some(Literal::Character(String::copy(_rp, name)))\n }\n Token::Literal(Literal::Numeric(name)) => {\n Some(Literal::Numeric(String::copy(_rp, name)))\n }\n Token::Literal(Literal::Hex(name)) => Some(Literal::Hex(String::copy(_rp, name))),\n _ => None,\n }\n }\n\n pub fn is_at_end(&self) -> bool {\n self.is_at_end\n }\n\n pub fn get_position(&self) -> Position {\n Position {\n line: self.line,\n column: self.column,\n }\n }\n\n pub fn get_previous_position(&self) -> Position {\n Position {\n line: self.previous_line,\n column: self.previous_column,\n }\n }\n}\n\n#[derive(Copy, Clone)]\npub struct Position {\n pub line: usize,\n pub column: usize,\n}\n\n#[derive(Copy, Clone)]\npub enum Token {\n InvalidToken,\n Identifier(String),\n Literal(Literal),\n Punctuation(String),\n}\n\n#[derive(Copy, Clone)]\npub enum Literal {\n String(String),\n Character(String),\n Numeric(String),\n Hex(String),\n}\n<commit_msg>small lexer fixes<commit_after>use scaly::containers::Ref;\nuse scaly::io::Stream;\nuse scaly::memory::Region;\nuse scaly::Equal;\nuse scaly::{Page, String, StringBuilder};\n\npub struct Lexer {\n pub token: Ref<Token>,\n character: char,\n stream: *mut Stream,\n is_at_end: bool,\n previous_line: usize,\n previous_column: usize,\n pub line: usize,\n pub column: usize,\n}\n\nimpl Lexer {\n pub fn new(_pr: &Region, _rp: *mut Page, stream: *mut Stream) -> Ref<Lexer> {\n let _r = Region::create(_pr);\n let _token_page = unsafe { (*_rp).allocate_exclusive_page() };\n unsafe { (*_token_page).reset() };\n let mut lexer = Ref::new(\n _rp,\n Lexer {\n stream: stream,\n is_at_end: false,\n token: Ref::new(_token_page, Token::InvalidToken),\n character: 0 as char,\n previous_line: 1,\n previous_column: 0,\n line: 1,\n column: 0,\n },\n );\n lexer.read_character();\n lexer.advance(&_r);\n lexer\n }\n\n fn read_character(&mut self) {\n unsafe {\n let read_result: i32 = (*self.stream).read_byte();\n if read_result == -1 {\n self.is_at_end = true;\n self.character = 0 as char;\n } else {\n self.character = read_result as u8 as char;\n }\n }\n }\n\n pub fn advance(&mut self, _pr: &Region) {\n let _r = Region::create(_pr);\n self.skip_whitespace();\n self.previous_line = self.line;\n self.previous_column = self.column;\n if self.is_at_end {\n return;\n }\n\n let c = self.character;\n\n if ((c >= 'a') && (c <= 'z')) || ((c >= 'A') && (c <= 'Z')) {\n {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_identifier(&_r, _token_page);\n }\n return;\n }\n\n if (c >= '0') && (c <= '9') {\n {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_numeric_literal(&_r, _token_page);\n }\n return;\n }\n\n match c {\n '+' | '-' | '*' | '\/' | '=' | '%' | '&' | '|' | '^' | '~' | '<' | '>' => {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_operator(&_r, _token_page);\n }\n\n '\\\"' => {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_string_literal(&_r, _token_page);\n }\n\n '\\'' => {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = self.scan_character_literal(&_r, _token_page);\n }\n\n '{' | '}' | '(' | ')' | '[' | ']' | '.' | ',' | ':' | ';' | '?' | '!' | '@' | '#'\n | '$' | '_' | '`' => {\n {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = Ref::new(\n _token_page,\n Token::Punctuation(String::from_character(Page::own(self), c)),\n );\n }\n self.read_character();\n self.column = self.column + 1;\n }\n\n _ => {\n let _token_page = Page::own(&self.token);\n unsafe {\n (*_token_page).reset();\n }\n self.token = Ref::new(_token_page, Token::InvalidToken);\n }\n }\n }\n\n fn scan_identifier(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut name: Ref<StringBuilder> = StringBuilder::from_character(_r.page, self.character);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Identifier(name.to_string(_rp)));\n }\n\n let c = self.character;\n if ((c >= 'a') && (c <= 'z'))\n || ((c >= 'A') && (c <= 'Z'))\n || ((c >= '0') && (c <= '9'))\n || (c == '_')\n {\n name.append_character(c);\n } else {\n return Ref::new(_rp, Token::Identifier(name.to_string(_rp)));\n }\n }\n }\n\n fn scan_operator(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut operation: Ref<StringBuilder> =\n StringBuilder::from_character(_r.page, self.character);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Identifier(operation.to_string(_rp)));\n }\n\n match self.character {\n '+' | '-' | '*' | '\/' | '=' | '%' | '&' | '|' | '^' | '~' | '<' | '>' => {\n operation.append_character(self.character)\n }\n\n _ => return Ref::new(_rp, Token::Identifier(operation.to_string(_rp))),\n }\n }\n }\n\n fn scan_string_literal(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut value: Ref<StringBuilder> = StringBuilder::new(_r.page);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::InvalidToken);\n }\n\n match self.character {\n '\\\"' => {\n self.read_character();\n self.column = self.column + 1;\n return Ref::new(_rp, Token::Literal(Literal::String(value.to_string(_rp))));\n }\n\n '\\\\' => {\n self.read_character();\n self.column = self.column + 1;\n match self.character {\n '\\\"' | '\\\\' | '\\'' => {\n value.append_character('\\\\');\n value.append_character(self.character);\n }\n 'n' => {\n value.append_character('\\\\');\n value.append_character('n');\n }\n 'r' => {\n value.append_character('\\\\');\n value.append_character('r');\n }\n 't' => {\n value.append_character('\\\\');\n value.append_character('t');\n }\n '0' => {\n value.append_character('\\\\');\n value.append_character('0');\n }\n _ => return Ref::new(_rp, Token::InvalidToken),\n }\n }\n _ => value.append_character(self.character),\n }\n }\n }\n\n fn scan_character_literal(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut value: Ref<StringBuilder> = StringBuilder::new(_r.page);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::InvalidToken);\n }\n\n match self.character {\n '\\'' => {\n self.read_character();\n self.column = self.column + 1;\n return Ref::new(\n _rp,\n Token::Literal(Literal::Character(value.to_string(_rp))),\n );\n }\n\n '\\\\' => {\n self.read_character();\n self.column = self.column + 1;\n match self.character {\n '\\\"' | '\\\\' | '\\'' => {\n value.append_character('\\\\');\n value.append_character(self.character);\n }\n 'n' => {\n value.append_character('\\\\');\n value.append_character('n');\n }\n 'r' => {\n value.append_character('\\\\');\n value.append_character('r');\n }\n 't' => {\n value.append_character('\\\\');\n value.append_character('t');\n }\n '0' => {\n value.append_character('\\\\');\n value.append_character('0');\n }\n _ => return Ref::new(_rp, Token::InvalidToken),\n }\n }\n _ => value.append_character(self.character),\n }\n }\n }\n\n fn scan_numeric_literal(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut value: Ref<StringBuilder> = StringBuilder::from_character(_rp, self.character);\n\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Literal(Literal::Numeric(value.to_string(_rp))));\n }\n if self.character == 'x' {\n return self.scan_hex_literal(&_r, _rp);\n }\n\n loop {\n let c = self.character;\n if (c >= '0') && (c <= '9') {\n value.append_character(self.character)\n } else {\n return Ref::new(_rp, Token::Literal(Literal::Numeric(value.to_string(_rp))));\n }\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Literal(Literal::Numeric(value.to_string(_rp))));\n }\n }\n }\n\n fn scan_hex_literal(&mut self, _pr: &Region, _rp: *mut Page) -> Ref<Token> {\n let _r = Region::create(_pr);\n let mut value: Ref<StringBuilder> = StringBuilder::from_character(_rp, self.character);\n\n loop {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return Ref::new(_rp, Token::Literal(Literal::Hex(value.to_string(_rp))));\n }\n\n let c = self.character;\n if ((c >= '0') && (c <= '9'))\n || ((c >= 'a') && (c <= 'f'))\n || ((c >= 'A') && (c <= 'F'))\n {\n value.append_character(self.character)\n } else {\n return Ref::new(_rp, Token::Literal(Literal::Hex(value.to_string(_rp))));\n }\n }\n }\n\n fn skip_whitespace(&mut self) {\n loop {\n if self.is_at_end {\n return;\n }\n\n match self.character {\n ' ' => {\n self.read_character();;\n self.column = self.column + 1;\n continue;\n }\n\n '\\t' => {\n self.read_character();\n self.column = self.column + 4;\n continue;\n }\n\n '\\r' => {\n self.read_character();\n continue;\n }\n\n '\\n' => {\n self.read_character();\n self.column = 1;\n self.line = self.line + 1;\n continue;\n }\n\n '\/' => {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end {\n return;\n }\n\n if self.character == '\/' {\n self.handle_single_line_comment();\n } else {\n if self.character == '*' {\n self.handle_multi_line_comment();\n } else {\n return;\n }\n }\n }\n\n _ => return,\n }\n }\n }\n\n fn handle_single_line_comment(&mut self) {\n loop {\n if self.is_at_end() {\n return;\n }\n\n match self.character {\n '\\t' => {\n self.read_character();\n self.column = self.column + 4;\n continue;\n }\n\n '\\r' => {\n self.read_character();\n continue;\n }\n\n '\\n' => {\n self.read_character();\n self.column = 1;\n self.line = self.line + 1;\n return;\n }\n\n _ => {\n self.read_character();\n self.column = self.column + 1;\n continue;\n }\n }\n }\n }\n\n fn handle_multi_line_comment(&mut self) {\n loop {\n if self.is_at_end() {\n return;\n }\n\n match self.character {\n '\/' => {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return;\n } else {\n if self.character == '*' {\n self.handle_multi_line_comment();\n } else {\n return;\n }\n }\n }\n\n '*' => {\n self.read_character();\n self.column = self.column + 1;\n\n if self.is_at_end() {\n return;\n } else {\n if self.character == '\/' {\n self.read_character();\n self.column = self.column + 1;\n return;\n }\n }\n }\n\n '\\t' => {\n self.read_character();\n self.column = self.column + 4;\n continue;\n }\n\n '\\r' => {\n self.read_character();\n continue;\n }\n\n '\\n' => {\n self.read_character();\n self.column = 1;\n self.line = self.line + 1;\n return;\n }\n\n _ => {\n self.read_character();\n self.column = self.column + 1;\n continue;\n }\n }\n }\n }\n\n pub fn parse_keyword(&self, fixed_string: String) -> bool {\n match *self.token {\n Token::Identifier(name) => return name.equals(&fixed_string),\n _ => return false,\n }\n }\n\n pub fn parse_identifier(&self, _rp: *mut Page) -> Option<String> {\n match *self.token {\n Token::Identifier(name) => return Some(String::copy(_rp, name)),\n _ => return None,\n }\n }\n\n pub fn parse_punctuation(&self, fixed_string: String) -> bool {\n match *self.token {\n Token::Punctuation(name) => return name.equals(&fixed_string),\n _ => return false,\n }\n }\n\n pub fn parse_literal(&self, _rp: *mut Page) -> Option<Literal> {\n match *self.token {\n Token::Literal(Literal::String(name)) => Some(Literal::String(String::copy(_rp, name))),\n Token::Literal(Literal::Character(name)) => {\n Some(Literal::Character(String::copy(_rp, name)))\n }\n Token::Literal(Literal::Numeric(name)) => {\n Some(Literal::Numeric(String::copy(_rp, name)))\n }\n Token::Literal(Literal::Hex(name)) => Some(Literal::Hex(String::copy(_rp, name))),\n _ => None,\n }\n }\n\n pub fn is_at_end(&self) -> bool {\n self.is_at_end\n }\n\n pub fn get_position(&self) -> Position {\n Position {\n line: self.line,\n column: self.column,\n }\n }\n\n pub fn get_previous_position(&self) -> Position {\n Position {\n line: self.previous_line,\n column: self.previous_column,\n }\n }\n}\n\n#[derive(Copy, Clone)]\npub struct Position {\n pub line: usize,\n pub column: usize,\n}\n\n#[derive(Copy, Clone)]\npub enum Token {\n InvalidToken,\n Identifier(String),\n Literal(Literal),\n Punctuation(String),\n}\n\n#[derive(Copy, Clone)]\npub enum Literal {\n String(String),\n Character(String),\n Numeric(String),\n Hex(String),\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Characters and their corresponding confusables were collected from\n\/\/ http:\/\/www.unicode.org\/Public\/security\/revision-06\/confusables.txt\n\nuse codemap::mk_sp as make_span;\nuse errors::DiagnosticBuilder;\nuse super::StringReader;\n\nconst UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[\n ('ߺ', \"Nko Lajanyalan\", '_'),\n ('﹍', \"Dashed Low Line\", '_'),\n ('﹎', \"Centreline Low Line\", '_'),\n ('﹏', \"Wavy Low Line\", '_'),\n ('‐', \"Hyphen\", '-'),\n ('‑', \"Non-Breaking Hyphen\", '-'),\n ('‒', \"Figure Dash\", '-'),\n ('–', \"En Dash\", '-'),\n ('—', \"Em Dash\", '-'),\n ('﹘', \"Small Em Dash\", '-'),\n ('⁃', \"Hyphen Bullet\", '-'),\n ('˗', \"Modifier Letter Minus Sign\", '-'),\n ('−', \"Minus Sign\", '-'),\n ('ー', \"Katakana-Hiragana Prolonged Sound Mark\", '-'),\n ('٫', \"Arabic Decimal Separator\", ','),\n ('‚', \"Single Low-9 Quotation Mark\", ','),\n ('ꓹ', \"Lisu Letter Tone Na Po\", ','),\n (',', \"Fullwidth Comma\", ','),\n (';', \"Greek Question Mark\", ';'),\n (';', \"Fullwidth Semicolon\", ';'),\n ('ः', \"Devanagari Sign Visarga\", ':'),\n ('ઃ', \"Gujarati Sign Visarga\", ':'),\n (':', \"Fullwidth Colon\", ':'),\n ('։', \"Armenian Full Stop\", ':'),\n ('܃', \"Syriac Supralinear Colon\", ':'),\n ('܄', \"Syriac Sublinear Colon\", ':'),\n ('︰', \"Presentation Form For Vertical Two Dot Leader\", ':'),\n ('᠃', \"Mongolian Full Stop\", ':'),\n ('᠉', \"Mongolian Manchu Full Stop\", ':'),\n ('⁚', \"Two Dot Punctuation\", ':'),\n ('׃', \"Hebrew Punctuation Sof Pasuq\", ':'),\n ('˸', \"Modifier Letter Raised Colon\", ':'),\n ('꞉', \"Modifier Letter Colon\", ':'),\n ('∶', \"Ratio\", ':'),\n ('ː', \"Modifier Letter Triangular Colon\", ':'),\n ('ꓽ', \"Lisu Letter Tone Mya Jeu\", ':'),\n ('!', \"Fullwidth Exclamation Mark\", '!'),\n ('ǃ', \"Latin Letter Retroflex Click\", '!'),\n ('ʔ', \"Latin Letter Glottal Stop\", '?'),\n ('ॽ', \"Devanagari Letter Glottal Stop\", '?'),\n ('Ꭾ', \"Cherokee Letter He\", '?'),\n ('?', \"Fullwidth Question Mark\", '?'),\n ('𝅭', \"Musical Symbol Combining Augmentation Dot\", '.'),\n ('․', \"One Dot Leader\", '.'),\n ('۔', \"Arabic Full Stop\", '.'),\n ('܁', \"Syriac Supralinear Full Stop\", '.'),\n ('܂', \"Syriac Sublinear Full Stop\", '.'),\n ('꘎', \"Vai Full Stop\", '.'),\n ('𐩐', \"Kharoshthi Punctuation Dot\", '.'),\n ('·', \"Middle Dot\", '.'),\n ('٠', \"Arabic-Indic Digit Zero\", '.'),\n ('۰', \"Extended Arabic-Indic Digit Zero\", '.'),\n ('ꓸ', \"Lisu Letter Tone Mya Ti\", '.'),\n ('。', \"Ideographic Full Stop\", '.'),\n ('・', \"Katakana Middle Dot\", '.'),\n ('՝', \"Armenian Comma\", '\\''),\n (''', \"Fullwidth Apostrophe\", '\\''),\n ('‘', \"Left Single Quotation Mark\", '\\''),\n ('’', \"Right Single Quotation Mark\", '\\''),\n ('‛', \"Single High-Reversed-9 Quotation Mark\", '\\''),\n ('′', \"Prime\", '\\''),\n ('‵', \"Reversed Prime\", '\\''),\n ('՚', \"Armenian Apostrophe\", '\\''),\n ('׳', \"Hebrew Punctuation Geresh\", '\\''),\n ('`', \"Greek Varia\", '\\''),\n ('`', \"Fullwidth Grave Accent\", '\\''),\n ('΄', \"Greek Tonos\", '\\''),\n ('´', \"Greek Oxia\", '\\''),\n ('᾽', \"Greek Koronis\", '\\''),\n ('᾿', \"Greek Psili\", '\\''),\n ('῾', \"Greek Dasia\", '\\''),\n ('ʹ', \"Modifier Letter Prime\", '\\''),\n ('ʹ', \"Greek Numeral Sign\", '\\''),\n ('ˊ', \"Modifier Letter Acute Accent\", '\\''),\n ('ˋ', \"Modifier Letter Grave Accent\", '\\''),\n ('˴', \"Modifier Letter Middle Grave Accent\", '\\''),\n ('ʻ', \"Modifier Letter Turned Comma\", '\\''),\n ('ʽ', \"Modifier Letter Reversed Comma\", '\\''),\n ('ʼ', \"Modifier Letter Apostrophe\", '\\''),\n ('ʾ', \"Modifier Letter Right Half Ring\", '\\''),\n ('ꞌ', \"Latin Small Letter Saltillo\", '\\''),\n ('י', \"Hebrew Letter Yod\", '\\''),\n ('ߴ', \"Nko High Tone Apostrophe\", '\\''),\n ('ߵ', \"Nko Low Tone Apostrophe\", '\\''),\n ('"', \"Fullwidth Quotation Mark\", '\"'),\n ('“', \"Left Double Quotation Mark\", '\"'),\n ('”', \"Right Double Quotation Mark\", '\"'),\n ('‟', \"Double High-Reversed-9 Quotation Mark\", '\"'),\n ('″', \"Double Prime\", '\"'),\n ('‶', \"Reversed Double Prime\", '\"'),\n ('〃', \"Ditto Mark\", '\"'),\n ('״', \"Hebrew Punctuation Gershayim\", '\"'),\n ('˝', \"Double Acute Accent\", '\"'),\n ('ʺ', \"Modifier Letter Double Prime\", '\"'),\n ('˶', \"Modifier Letter Middle Double Acute Accent\", '\"'),\n ('˵', \"Modifier Letter Middle Double Grave Accent\", '\"'),\n ('ˮ', \"Modifier Letter Double Apostrophe\", '\"'),\n ('ײ', \"Hebrew Ligature Yiddish Double Yod\", '\"'),\n ('❞', \"Heavy Double Comma Quotation Mark Ornament\", '\"'),\n ('❝', \"Heavy Double Turned Comma Quotation Mark Ornament\", '\"'),\n ('[', \"Fullwidth Left Square Bracket\", '('),\n ('❨', \"Medium Left Parenthesis Ornament\", '('),\n ('❲', \"Light Left Tortoise Shell Bracket Ornament\", '('),\n ('〔', \"Left Tortoise Shell Bracket\", '('),\n ('﴾', \"Ornate Left Parenthesis\", '('),\n ('(', \"Fullwidth Left Parenthesis\", '('),\n (']', \"Fullwidth Right Square Bracket\", ')'),\n ('❩', \"Medium Right Parenthesis Ornament\", ')'),\n ('❳', \"Light Right Tortoise Shell Bracket Ornament\", ')'),\n ('〕', \"Right Tortoise Shell Bracket\", ')'),\n ('﴿', \"Ornate Right Parenthesis\", ')'),\n (')', \"Fullwidth Right Parenthesis\", ')'),\n ('❴', \"Medium Left Curly Bracket Ornament\", '{'),\n ('❵', \"Medium Right Curly Bracket Ornament\", '}'),\n ('⁎', \"Low Asterisk\", '*'),\n ('٭', \"Arabic Five Pointed Star\", '*'),\n ('∗', \"Asterisk Operator\", '*'),\n ('᜵', \"Philippine Single Punctuation\", '\/'),\n ('⁁', \"Caret Insertion Point\", '\/'),\n ('∕', \"Division Slash\", '\/'),\n ('⁄', \"Fraction Slash\", '\/'),\n ('╱', \"Box Drawings Light Diagonal Upper Right To Lower Left\", '\/'),\n ('⟋', \"Mathematical Rising Diagonal\", '\/'),\n ('⧸', \"Big Solidus\", '\/'),\n ('㇓', \"Cjk Stroke Sp\", '\/'),\n ('〳', \"Vertical Kana Repeat Mark Upper Half\", '\/'),\n ('丿', \"Cjk Unified Ideograph-4E3F\", '\/'),\n ('⼃', \"Kangxi Radical Slash\", '\/'),\n ('\', \"Fullwidth Reverse Solidus\", '\\\\'),\n ('﹨', \"Small Reverse Solidus\", '\\\\'),\n ('∖', \"Set Minus\", '\\\\'),\n ('⟍', \"Mathematical Falling Diagonal\", '\\\\'),\n ('⧵', \"Reverse Solidus Operator\", '\\\\'),\n ('⧹', \"Big Reverse Solidus\", '\\\\'),\n ('、', \"Ideographic Comma\", '\\\\'),\n ('ヽ', \"Katakana Iteration Mark\", '\\\\'),\n ('㇔', \"Cjk Stroke D\", '\\\\'),\n ('丶', \"Cjk Unified Ideograph-4E36\", '\\\\'),\n ('⼂', \"Kangxi Radical Dot\", '\\\\'),\n ('ꝸ', \"Latin Small Letter Um\", '&'),\n ('﬩', \"Hebrew Letter Alternative Plus Sign\", '+'),\n ('‹', \"Single Left-Pointing Angle Quotation Mark\", '<'),\n ('❮', \"Heavy Left-Pointing Angle Quotation Mark Ornament\", '<'),\n ('˂', \"Modifier Letter Left Arrowhead\", '<'),\n ('〈', \"Left Angle Bracket\", '<'),\n ('《', \"Left Double Angle Bracket\", '<'),\n ('꓿', \"Lisu Punctuation Full Stop\", '='),\n ('›', \"Single Right-Pointing Angle Quotation Mark\", '>'),\n ('❯', \"Heavy Right-Pointing Angle Quotation Mark Ornament\", '>'),\n ('˃', \"Modifier Letter Right Arrowhead\", '>'),\n ('〉', \"Right Angle Bracket\", '>'),\n ('》', \"Right Double Angle Bracket\", '>'),\n ('Ⲻ', \"Coptic Capital Letter Dialect-P Ni\", '-'),\n ('Ɂ', \"Latin Capital Letter Glottal Stop\", '?'),\n ('Ⳇ', \"Coptic Capital Letter Old Coptic Esh\", '\/'), ];\n\nconst ASCII_ARRAY: &'static [(char, &'static str)] = &[\n ('_', \"Underscore\"),\n ('-', \"Minus\/Hyphen\"),\n (',', \"Comma\"),\n (';', \"Semicolon\"),\n (':', \"Colon\"),\n ('!', \"Exclamation Mark\"),\n ('?', \"Question Mark\"),\n ('.', \"Period\"),\n ('\\'', \"Single Quote\"),\n ('\"', \"Quotation Mark\"),\n ('(', \"Left Parenthesis\"),\n (')', \"Right Parenthesis\"),\n ('{', \"Left Curly Brace\"),\n ('}', \"Right Curly Brace\"),\n ('*', \"Asterisk\"),\n ('\/', \"Slash\"),\n ('\\\\', \"Backslash\"),\n ('&', \"Ampersand\"),\n ('+', \"Plus Sign\"),\n ('<', \"Less-Than Sign\"),\n ('=', \"Equals Sign\"),\n ('>', \"Greater-Than Sign\"), ];\n\npub fn check_for_substitution<'a>(reader: &StringReader<'a>,\n ch: char,\n err: &mut DiagnosticBuilder<'a>) {\n UNICODE_ARRAY\n .iter()\n .find(|&&(c, _, _)| c == ch)\n .map(|&(_, u_name, ascii_char)| {\n let span = make_span(reader.last_pos, reader.pos);\n match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) {\n Some(&(ascii_char, ascii_name)) => {\n let msg =\n format!(\"unicode character '{}' ({}) looks much like '{}' ({}), but it's not\",\n ch, u_name, ascii_char, ascii_name);\n err.span_help(span, &msg);\n },\n None => {\n reader\n .span_diagnostic\n .span_bug_no_panic(span,\n &format!(\"substitution character not found for '{}'\", ch));\n }\n }\n });\n}\n<commit_msg>add confusable space characters<commit_after>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Characters and their corresponding confusables were collected from\n\/\/ http:\/\/www.unicode.org\/Public\/security\/revision-06\/confusables.txt\n\nuse codemap::mk_sp as make_span;\nuse errors::DiagnosticBuilder;\nuse super::StringReader;\n\nconst UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[\n (' ', \"No-Break Space\", ' '),\n (' ', \"Ogham Space Mark\", ' '),\n (' ', \"En Quad\", ' '),\n (' ', \"Em Quad\", ' '),\n (' ', \"En Space\", ' '),\n (' ', \"Em Space\", ' '),\n (' ', \"Three-Per-Em Space\", ' '),\n (' ', \"Four-Per-Em Space\", ' '),\n (' ', \"Six-Per-Em Space\", ' '),\n (' ', \"Figure Space\", ' '),\n (' ', \"Punctuation Space\", ' '),\n (' ', \"Thin Space\", ' '),\n (' ', \"Hair Space\", ' '),\n (' ', \"Narrow No-Break Space\", ' '),\n (' ', \"Medium Mathematical Space\", ' '),\n (' ', \"Ideographic Space\", ' '),\n ('ߺ', \"Nko Lajanyalan\", '_'),\n ('﹍', \"Dashed Low Line\", '_'),\n ('﹎', \"Centreline Low Line\", '_'),\n ('﹏', \"Wavy Low Line\", '_'),\n ('‐', \"Hyphen\", '-'),\n ('‑', \"Non-Breaking Hyphen\", '-'),\n ('‒', \"Figure Dash\", '-'),\n ('–', \"En Dash\", '-'),\n ('—', \"Em Dash\", '-'),\n ('﹘', \"Small Em Dash\", '-'),\n ('⁃', \"Hyphen Bullet\", '-'),\n ('˗', \"Modifier Letter Minus Sign\", '-'),\n ('−', \"Minus Sign\", '-'),\n ('ー', \"Katakana-Hiragana Prolonged Sound Mark\", '-'),\n ('٫', \"Arabic Decimal Separator\", ','),\n ('‚', \"Single Low-9 Quotation Mark\", ','),\n ('ꓹ', \"Lisu Letter Tone Na Po\", ','),\n (',', \"Fullwidth Comma\", ','),\n (';', \"Greek Question Mark\", ';'),\n (';', \"Fullwidth Semicolon\", ';'),\n ('ः', \"Devanagari Sign Visarga\", ':'),\n ('ઃ', \"Gujarati Sign Visarga\", ':'),\n (':', \"Fullwidth Colon\", ':'),\n ('։', \"Armenian Full Stop\", ':'),\n ('܃', \"Syriac Supralinear Colon\", ':'),\n ('܄', \"Syriac Sublinear Colon\", ':'),\n ('︰', \"Presentation Form For Vertical Two Dot Leader\", ':'),\n ('᠃', \"Mongolian Full Stop\", ':'),\n ('᠉', \"Mongolian Manchu Full Stop\", ':'),\n ('⁚', \"Two Dot Punctuation\", ':'),\n ('׃', \"Hebrew Punctuation Sof Pasuq\", ':'),\n ('˸', \"Modifier Letter Raised Colon\", ':'),\n ('꞉', \"Modifier Letter Colon\", ':'),\n ('∶', \"Ratio\", ':'),\n ('ː', \"Modifier Letter Triangular Colon\", ':'),\n ('ꓽ', \"Lisu Letter Tone Mya Jeu\", ':'),\n ('!', \"Fullwidth Exclamation Mark\", '!'),\n ('ǃ', \"Latin Letter Retroflex Click\", '!'),\n ('ʔ', \"Latin Letter Glottal Stop\", '?'),\n ('ॽ', \"Devanagari Letter Glottal Stop\", '?'),\n ('Ꭾ', \"Cherokee Letter He\", '?'),\n ('?', \"Fullwidth Question Mark\", '?'),\n ('𝅭', \"Musical Symbol Combining Augmentation Dot\", '.'),\n ('․', \"One Dot Leader\", '.'),\n ('۔', \"Arabic Full Stop\", '.'),\n ('܁', \"Syriac Supralinear Full Stop\", '.'),\n ('܂', \"Syriac Sublinear Full Stop\", '.'),\n ('꘎', \"Vai Full Stop\", '.'),\n ('𐩐', \"Kharoshthi Punctuation Dot\", '.'),\n ('·', \"Middle Dot\", '.'),\n ('٠', \"Arabic-Indic Digit Zero\", '.'),\n ('۰', \"Extended Arabic-Indic Digit Zero\", '.'),\n ('ꓸ', \"Lisu Letter Tone Mya Ti\", '.'),\n ('。', \"Ideographic Full Stop\", '.'),\n ('・', \"Katakana Middle Dot\", '.'),\n ('՝', \"Armenian Comma\", '\\''),\n (''', \"Fullwidth Apostrophe\", '\\''),\n ('‘', \"Left Single Quotation Mark\", '\\''),\n ('’', \"Right Single Quotation Mark\", '\\''),\n ('‛', \"Single High-Reversed-9 Quotation Mark\", '\\''),\n ('′', \"Prime\", '\\''),\n ('‵', \"Reversed Prime\", '\\''),\n ('՚', \"Armenian Apostrophe\", '\\''),\n ('׳', \"Hebrew Punctuation Geresh\", '\\''),\n ('`', \"Greek Varia\", '\\''),\n ('`', \"Fullwidth Grave Accent\", '\\''),\n ('΄', \"Greek Tonos\", '\\''),\n ('´', \"Greek Oxia\", '\\''),\n ('᾽', \"Greek Koronis\", '\\''),\n ('᾿', \"Greek Psili\", '\\''),\n ('῾', \"Greek Dasia\", '\\''),\n ('ʹ', \"Modifier Letter Prime\", '\\''),\n ('ʹ', \"Greek Numeral Sign\", '\\''),\n ('ˊ', \"Modifier Letter Acute Accent\", '\\''),\n ('ˋ', \"Modifier Letter Grave Accent\", '\\''),\n ('˴', \"Modifier Letter Middle Grave Accent\", '\\''),\n ('ʻ', \"Modifier Letter Turned Comma\", '\\''),\n ('ʽ', \"Modifier Letter Reversed Comma\", '\\''),\n ('ʼ', \"Modifier Letter Apostrophe\", '\\''),\n ('ʾ', \"Modifier Letter Right Half Ring\", '\\''),\n ('ꞌ', \"Latin Small Letter Saltillo\", '\\''),\n ('י', \"Hebrew Letter Yod\", '\\''),\n ('ߴ', \"Nko High Tone Apostrophe\", '\\''),\n ('ߵ', \"Nko Low Tone Apostrophe\", '\\''),\n ('"', \"Fullwidth Quotation Mark\", '\"'),\n ('“', \"Left Double Quotation Mark\", '\"'),\n ('”', \"Right Double Quotation Mark\", '\"'),\n ('‟', \"Double High-Reversed-9 Quotation Mark\", '\"'),\n ('″', \"Double Prime\", '\"'),\n ('‶', \"Reversed Double Prime\", '\"'),\n ('〃', \"Ditto Mark\", '\"'),\n ('״', \"Hebrew Punctuation Gershayim\", '\"'),\n ('˝', \"Double Acute Accent\", '\"'),\n ('ʺ', \"Modifier Letter Double Prime\", '\"'),\n ('˶', \"Modifier Letter Middle Double Acute Accent\", '\"'),\n ('˵', \"Modifier Letter Middle Double Grave Accent\", '\"'),\n ('ˮ', \"Modifier Letter Double Apostrophe\", '\"'),\n ('ײ', \"Hebrew Ligature Yiddish Double Yod\", '\"'),\n ('❞', \"Heavy Double Comma Quotation Mark Ornament\", '\"'),\n ('❝', \"Heavy Double Turned Comma Quotation Mark Ornament\", '\"'),\n ('[', \"Fullwidth Left Square Bracket\", '('),\n ('❨', \"Medium Left Parenthesis Ornament\", '('),\n ('❲', \"Light Left Tortoise Shell Bracket Ornament\", '('),\n ('〔', \"Left Tortoise Shell Bracket\", '('),\n ('﴾', \"Ornate Left Parenthesis\", '('),\n ('(', \"Fullwidth Left Parenthesis\", '('),\n (']', \"Fullwidth Right Square Bracket\", ')'),\n ('❩', \"Medium Right Parenthesis Ornament\", ')'),\n ('❳', \"Light Right Tortoise Shell Bracket Ornament\", ')'),\n ('〕', \"Right Tortoise Shell Bracket\", ')'),\n ('﴿', \"Ornate Right Parenthesis\", ')'),\n (')', \"Fullwidth Right Parenthesis\", ')'),\n ('❴', \"Medium Left Curly Bracket Ornament\", '{'),\n ('❵', \"Medium Right Curly Bracket Ornament\", '}'),\n ('⁎', \"Low Asterisk\", '*'),\n ('٭', \"Arabic Five Pointed Star\", '*'),\n ('∗', \"Asterisk Operator\", '*'),\n ('᜵', \"Philippine Single Punctuation\", '\/'),\n ('⁁', \"Caret Insertion Point\", '\/'),\n ('∕', \"Division Slash\", '\/'),\n ('⁄', \"Fraction Slash\", '\/'),\n ('╱', \"Box Drawings Light Diagonal Upper Right To Lower Left\", '\/'),\n ('⟋', \"Mathematical Rising Diagonal\", '\/'),\n ('⧸', \"Big Solidus\", '\/'),\n ('㇓', \"Cjk Stroke Sp\", '\/'),\n ('〳', \"Vertical Kana Repeat Mark Upper Half\", '\/'),\n ('丿', \"Cjk Unified Ideograph-4E3F\", '\/'),\n ('⼃', \"Kangxi Radical Slash\", '\/'),\n ('\', \"Fullwidth Reverse Solidus\", '\\\\'),\n ('﹨', \"Small Reverse Solidus\", '\\\\'),\n ('∖', \"Set Minus\", '\\\\'),\n ('⟍', \"Mathematical Falling Diagonal\", '\\\\'),\n ('⧵', \"Reverse Solidus Operator\", '\\\\'),\n ('⧹', \"Big Reverse Solidus\", '\\\\'),\n ('、', \"Ideographic Comma\", '\\\\'),\n ('ヽ', \"Katakana Iteration Mark\", '\\\\'),\n ('㇔', \"Cjk Stroke D\", '\\\\'),\n ('丶', \"Cjk Unified Ideograph-4E36\", '\\\\'),\n ('⼂', \"Kangxi Radical Dot\", '\\\\'),\n ('ꝸ', \"Latin Small Letter Um\", '&'),\n ('﬩', \"Hebrew Letter Alternative Plus Sign\", '+'),\n ('‹', \"Single Left-Pointing Angle Quotation Mark\", '<'),\n ('❮', \"Heavy Left-Pointing Angle Quotation Mark Ornament\", '<'),\n ('˂', \"Modifier Letter Left Arrowhead\", '<'),\n ('〈', \"Left Angle Bracket\", '<'),\n ('《', \"Left Double Angle Bracket\", '<'),\n ('꓿', \"Lisu Punctuation Full Stop\", '='),\n ('›', \"Single Right-Pointing Angle Quotation Mark\", '>'),\n ('❯', \"Heavy Right-Pointing Angle Quotation Mark Ornament\", '>'),\n ('˃', \"Modifier Letter Right Arrowhead\", '>'),\n ('〉', \"Right Angle Bracket\", '>'),\n ('》', \"Right Double Angle Bracket\", '>'),\n ('Ⲻ', \"Coptic Capital Letter Dialect-P Ni\", '-'),\n ('Ɂ', \"Latin Capital Letter Glottal Stop\", '?'),\n ('Ⳇ', \"Coptic Capital Letter Old Coptic Esh\", '\/'), ];\n\nconst ASCII_ARRAY: &'static [(char, &'static str)] = &[\n (' ', \"Space\"),\n ('_', \"Underscore\"),\n ('-', \"Minus\/Hyphen\"),\n (',', \"Comma\"),\n (';', \"Semicolon\"),\n (':', \"Colon\"),\n ('!', \"Exclamation Mark\"),\n ('?', \"Question Mark\"),\n ('.', \"Period\"),\n ('\\'', \"Single Quote\"),\n ('\"', \"Quotation Mark\"),\n ('(', \"Left Parenthesis\"),\n (')', \"Right Parenthesis\"),\n ('{', \"Left Curly Brace\"),\n ('}', \"Right Curly Brace\"),\n ('*', \"Asterisk\"),\n ('\/', \"Slash\"),\n ('\\\\', \"Backslash\"),\n ('&', \"Ampersand\"),\n ('+', \"Plus Sign\"),\n ('<', \"Less-Than Sign\"),\n ('=', \"Equals Sign\"),\n ('>', \"Greater-Than Sign\"), ];\n\npub fn check_for_substitution<'a>(reader: &StringReader<'a>,\n ch: char,\n err: &mut DiagnosticBuilder<'a>) {\n UNICODE_ARRAY\n .iter()\n .find(|&&(c, _, _)| c == ch)\n .map(|&(_, u_name, ascii_char)| {\n let span = make_span(reader.last_pos, reader.pos);\n match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) {\n Some(&(ascii_char, ascii_name)) => {\n let msg =\n format!(\"unicode character '{}' ({}) looks much like '{}' ({}), but it's not\",\n ch, u_name, ascii_char, ascii_name);\n err.span_help(span, &msg);\n },\n None => {\n reader\n .span_diagnostic\n .span_bug_no_panic(span,\n &format!(\"substitution character not found for '{}'\", ch));\n }\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before>generate_error_imports!();\n\ngenerate_error_types!(StoreError, StoreErrorKind,\n BackendError => \"Backend Error\",\n NoCommandlineCall => \"No commandline call\"\n);\n\n<commit_msg>imag-store: use generate_error_module!() macro and reexport generated types<commit_after>generate_error_module!(\n generate_error_types!(StoreError, StoreErrorKind,\n BackendError => \"Backend Error\",\n NoCommandlineCall => \"No commandline call\"\n );\n);\n\npub use self::error::StoreError;\npub use self::error::StoreErrorKind;\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for category<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to enforce various stylistic guidelines on the Rust codebase.\n\/\/!\n\/\/! Example checks are:\n\/\/!\n\/\/! * No lines over 100 characters\n\/\/! * No tabs\n\/\/! * No trailing whitespace\n\/\/! * No CR characters\n\/\/! * No `TODO` or `XXX` directives\n\/\/! * A valid license header is at the top\n\/\/! * No unexplained ` ```ignore ` or ` ```rust,ignore ` doc tests\n\/\/!\n\/\/! A number of these checks can be opted-out of with various directives like\n\/\/! `\/\/ ignore-tidy-linelength`.\n\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\nconst COLS: usize = 100;\nconst LICENSE: &'static str = \"\\\nCopyright <year> The Rust Project Developers. See the COPYRIGHT\nfile at the top-level directory of this distribution and at\nhttp:\/\/rust-lang.org\/COPYRIGHT.\n\nLicensed under the Apache License, Version 2.0 <LICENSE-APACHE or\nhttp:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n<LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\noption. This file may not be copied, modified, or distributed\nexcept according to those terms.\";\n\nconst UNEXPLAINED_IGNORE_DOCTEST_INFO: &str = r#\"unexplained \"```ignore\" doctest; try one:\n\n* make the test actually pass, by adding necessary imports and declarations, or\n* use \"```text\", if the code is not Rust code, or\n* use \"```compile_fail,Ennnn\", if the code is expected to fail at compile time, or\n* use \"```should_panic\", if the code is expected to fail at run time, or\n* use \"```no_run\", if the code should type-check but not necessary linkable\/runnable, or\n* explain it like \"```ignore (cannot-test-this-because-xxxx)\", if the annotation cannot be avoided.\n\n\"#;\n\nconst LLVM_UNREACHABLE_INFO: &str = r\"\\\nC++ code used llvm_unreachable, which triggers undefined behavior\nwhen executed when assertions are disabled.\nUse llvm::report_fatal_error for increased robustness.\";\n\n\/\/\/ Parser states for line_is_url.\n#[derive(PartialEq)]\n#[allow(non_camel_case_types)]\nenum LIUState { EXP_COMMENT_START,\n EXP_LINK_LABEL_OR_URL,\n EXP_URL,\n EXP_END }\n\n\/\/\/ True if LINE appears to be a line comment containing an URL,\n\/\/\/ possibly with a Markdown link label in front, and nothing else.\n\/\/\/ The Markdown link label, if present, may not contain whitespace.\n\/\/\/ Lines of this form are allowed to be overlength, because Markdown\n\/\/\/ offers no way to split a line in the middle of a URL, and the lengths\n\/\/\/ of URLs to external references are beyond our control.\nfn line_is_url(line: &str) -> bool {\n use self::LIUState::*;\n let mut state: LIUState = EXP_COMMENT_START;\n\n for tok in line.split_whitespace() {\n match (state, tok) {\n (EXP_COMMENT_START, \"\/\/\") => state = EXP_LINK_LABEL_OR_URL,\n (EXP_COMMENT_START, \"\/\/\/\") => state = EXP_LINK_LABEL_OR_URL,\n (EXP_COMMENT_START, \"\/\/!\") => state = EXP_LINK_LABEL_OR_URL,\n\n (EXP_LINK_LABEL_OR_URL, w)\n if w.len() >= 4 && w.starts_with(\"[\") && w.ends_with(\"]:\")\n => state = EXP_URL,\n\n (EXP_LINK_LABEL_OR_URL, w)\n if w.starts_with(\"http:\/\/\") || w.starts_with(\"https:\/\/\")\n => state = EXP_END,\n\n (EXP_URL, w)\n if w.starts_with(\"http:\/\/\") || w.starts_with(\"https:\/\/\") || w.starts_with(\"..\/\")\n => state = EXP_END,\n\n (_, _) => return false,\n }\n }\n\n state == EXP_END\n}\n\n\/\/\/ True if LINE is allowed to be longer than the normal limit.\n\/\/\/ Currently there is only one exception, for long URLs, but more\n\/\/\/ may be added in the future.\nfn long_line_is_ok(line: &str) -> bool {\n if line_is_url(line) {\n return true;\n }\n\n false\n}\n\npub fn check(path: &Path, bad: &mut bool) {\n let mut contents = String::new();\n super::walk(path, &mut super::filter_dirs, &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n let extensions = [\".rs\", \".py\", \".js\", \".sh\", \".c\", \".cpp\", \".h\"];\n if extensions.iter().all(|e| !filename.ends_with(e)) ||\n filename.starts_with(\".#\") {\n return\n }\n\n contents.truncate(0);\n t!(t!(File::open(file), file).read_to_string(&mut contents));\n\n if contents.is_empty() {\n tidy_error!(bad, \"{}: empty file\", file.display());\n }\n\n let skip_cr = contents.contains(\"ignore-tidy-cr\");\n let skip_tab = contents.contains(\"ignore-tidy-tab\");\n let skip_length = contents.contains(\"ignore-tidy-linelength\");\n let skip_end_whitespace = contents.contains(\"ignore-tidy-end-whitespace\");\n let mut trailing_new_lines = 0;\n for (i, line) in contents.split('\\n').enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n if !skip_length && line.chars().count() > COLS\n && !long_line_is_ok(line) {\n err(&format!(\"line longer than {} chars\", COLS));\n }\n if line.contains(\"\\t\") && !skip_tab {\n err(\"tab character\");\n }\n if !skip_end_whitespace && (line.ends_with(\" \") || line.ends_with(\"\\t\")) {\n err(\"trailing whitespace\");\n }\n if line.contains(\"\\r\") && !skip_cr {\n err(\"CR character\");\n }\n if filename != \"style.rs\" {\n if line.contains(\"TODO\") {\n err(\"TODO is deprecated; use FIXME\")\n }\n if line.contains(\"\/\/\") && line.contains(\" XXX\") {\n err(\"XXX is deprecated; use FIXME\")\n }\n }\n if line.ends_with(\"```ignore\") || line.ends_with(\"```rust,ignore\") {\n err(UNEXPLAINED_IGNORE_DOCTEST_INFO);\n }\n if filename.ends_with(\".cpp\") && line.contains(\"llvm_unreachable\") {\n err(LLVM_UNREACHABLE_INFO);\n }\n if line.is_empty() {\n trailing_new_lines += 1;\n } else {\n trailing_new_lines = 0;\n }\n }\n if !licenseck(file, &contents) {\n tidy_error!(bad, \"{}: incorrect license\", file.display());\n }\n match trailing_new_lines {\n 0 => tidy_error!(bad, \"{}: missing trailing newline\", file.display()),\n 1 | 2 => {}\n n => tidy_error!(bad, \"{}: too many trailing newlines ({})\", file.display(), n),\n };\n })\n}\n\nfn licenseck(file: &Path, contents: &str) -> bool {\n if contents.contains(\"ignore-license\") {\n return true\n }\n let exceptions = [\n \"libstd\/sync\/mpsc\/mpsc_queue.rs\",\n \"libstd\/sync\/mpsc\/spsc_queue.rs\",\n ];\n if exceptions.iter().any(|f| file.ends_with(f)) {\n return true\n }\n\n \/\/ Skip the BOM if it's there\n let bom = \"\\u{feff}\";\n let contents = if contents.starts_with(bom) {&contents[3..]} else {contents};\n\n \/\/ See if the license shows up in the first 100 lines\n let lines = contents.lines().take(100).collect::<Vec<_>>();\n lines.windows(LICENSE.lines().count()).any(|window| {\n let offset = if window.iter().all(|w| w.starts_with(\"\/\/\")) {\n 2\n } else if window.iter().all(|w| w.starts_with('#')) {\n 1\n } else if window.iter().all(|w| w.starts_with(\" *\")) {\n 2\n } else {\n return false\n };\n window.iter().map(|a| a[offset..].trim())\n .zip(LICENSE.lines()).all(|(a, b)| {\n a == b || match b.find(\"<year>\") {\n Some(i) => a.starts_with(&b[..i]) && a.ends_with(&b[i+6..]),\n None => false,\n }\n })\n })\n\n}\n<commit_msg>tidy: Stop requiring a license header<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to enforce various stylistic guidelines on the Rust codebase.\n\/\/!\n\/\/! Example checks are:\n\/\/!\n\/\/! * No lines over 100 characters\n\/\/! * No tabs\n\/\/! * No trailing whitespace\n\/\/! * No CR characters\n\/\/! * No `TODO` or `XXX` directives\n\/\/! * No unexplained ` ```ignore ` or ` ```rust,ignore ` doc tests\n\/\/!\n\/\/! A number of these checks can be opted-out of with various directives like\n\/\/! `\/\/ ignore-tidy-linelength`.\n\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\nconst COLS: usize = 100;\n\nconst UNEXPLAINED_IGNORE_DOCTEST_INFO: &str = r#\"unexplained \"```ignore\" doctest; try one:\n\n* make the test actually pass, by adding necessary imports and declarations, or\n* use \"```text\", if the code is not Rust code, or\n* use \"```compile_fail,Ennnn\", if the code is expected to fail at compile time, or\n* use \"```should_panic\", if the code is expected to fail at run time, or\n* use \"```no_run\", if the code should type-check but not necessary linkable\/runnable, or\n* explain it like \"```ignore (cannot-test-this-because-xxxx)\", if the annotation cannot be avoided.\n\n\"#;\n\nconst LLVM_UNREACHABLE_INFO: &str = r\"\\\nC++ code used llvm_unreachable, which triggers undefined behavior\nwhen executed when assertions are disabled.\nUse llvm::report_fatal_error for increased robustness.\";\n\n\/\/\/ Parser states for line_is_url.\n#[derive(PartialEq)]\n#[allow(non_camel_case_types)]\nenum LIUState { EXP_COMMENT_START,\n EXP_LINK_LABEL_OR_URL,\n EXP_URL,\n EXP_END }\n\n\/\/\/ True if LINE appears to be a line comment containing an URL,\n\/\/\/ possibly with a Markdown link label in front, and nothing else.\n\/\/\/ The Markdown link label, if present, may not contain whitespace.\n\/\/\/ Lines of this form are allowed to be overlength, because Markdown\n\/\/\/ offers no way to split a line in the middle of a URL, and the lengths\n\/\/\/ of URLs to external references are beyond our control.\nfn line_is_url(line: &str) -> bool {\n use self::LIUState::*;\n let mut state: LIUState = EXP_COMMENT_START;\n\n for tok in line.split_whitespace() {\n match (state, tok) {\n (EXP_COMMENT_START, \"\/\/\") => state = EXP_LINK_LABEL_OR_URL,\n (EXP_COMMENT_START, \"\/\/\/\") => state = EXP_LINK_LABEL_OR_URL,\n (EXP_COMMENT_START, \"\/\/!\") => state = EXP_LINK_LABEL_OR_URL,\n\n (EXP_LINK_LABEL_OR_URL, w)\n if w.len() >= 4 && w.starts_with(\"[\") && w.ends_with(\"]:\")\n => state = EXP_URL,\n\n (EXP_LINK_LABEL_OR_URL, w)\n if w.starts_with(\"http:\/\/\") || w.starts_with(\"https:\/\/\")\n => state = EXP_END,\n\n (EXP_URL, w)\n if w.starts_with(\"http:\/\/\") || w.starts_with(\"https:\/\/\") || w.starts_with(\"..\/\")\n => state = EXP_END,\n\n (_, _) => return false,\n }\n }\n\n state == EXP_END\n}\n\n\/\/\/ True if LINE is allowed to be longer than the normal limit.\n\/\/\/ Currently there is only one exception, for long URLs, but more\n\/\/\/ may be added in the future.\nfn long_line_is_ok(line: &str) -> bool {\n if line_is_url(line) {\n return true;\n }\n\n false\n}\n\npub fn check(path: &Path, bad: &mut bool) {\n let mut contents = String::new();\n super::walk(path, &mut super::filter_dirs, &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n let extensions = [\".rs\", \".py\", \".js\", \".sh\", \".c\", \".cpp\", \".h\"];\n if extensions.iter().all(|e| !filename.ends_with(e)) ||\n filename.starts_with(\".#\") {\n return\n }\n\n contents.truncate(0);\n t!(t!(File::open(file), file).read_to_string(&mut contents));\n\n if contents.is_empty() {\n tidy_error!(bad, \"{}: empty file\", file.display());\n }\n\n let skip_cr = contents.contains(\"ignore-tidy-cr\");\n let skip_tab = contents.contains(\"ignore-tidy-tab\");\n let skip_length = contents.contains(\"ignore-tidy-linelength\");\n let skip_end_whitespace = contents.contains(\"ignore-tidy-end-whitespace\");\n let mut trailing_new_lines = 0;\n for (i, line) in contents.split('\\n').enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n if !skip_length && line.chars().count() > COLS\n && !long_line_is_ok(line) {\n err(&format!(\"line longer than {} chars\", COLS));\n }\n if line.contains(\"\\t\") && !skip_tab {\n err(\"tab character\");\n }\n if !skip_end_whitespace && (line.ends_with(\" \") || line.ends_with(\"\\t\")) {\n err(\"trailing whitespace\");\n }\n if line.contains(\"\\r\") && !skip_cr {\n err(\"CR character\");\n }\n if filename != \"style.rs\" {\n if line.contains(\"TODO\") {\n err(\"TODO is deprecated; use FIXME\")\n }\n if line.contains(\"\/\/\") && line.contains(\" XXX\") {\n err(\"XXX is deprecated; use FIXME\")\n }\n }\n if line.ends_with(\"```ignore\") || line.ends_with(\"```rust,ignore\") {\n err(UNEXPLAINED_IGNORE_DOCTEST_INFO);\n }\n if filename.ends_with(\".cpp\") && line.contains(\"llvm_unreachable\") {\n err(LLVM_UNREACHABLE_INFO);\n }\n if line.is_empty() {\n trailing_new_lines += 1;\n } else {\n trailing_new_lines = 0;\n }\n }\n match trailing_new_lines {\n 0 => tidy_error!(bad, \"{}: missing trailing newline\", file.display()),\n 1 | 2 => {}\n n => tidy_error!(bad, \"{}: too many trailing newlines ({})\", file.display(), n),\n };\n })\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for Windows\n\n#![allow(bad_style)]\n\nuse prelude::v1::*;\nuse os::windows::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io;\nuse libc::{c_int, c_void};\nuse ops::Range;\nuse os::windows::ffi::EncodeWide;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse sys::{c, cvt};\nuse sys::handle::Handle;\n\npub fn errno() -> i32 {\n unsafe { c::GetLastError() as i32 }\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errnum: i32) -> String {\n \/\/ This value is calculated from the macro\n \/\/ MAKELANGID(LANG_SYSTEM_DEFAULT, SUBLANG_SYS_DEFAULT)\n let langId = 0x0800 as c::DWORD;\n\n let mut buf = [0 as c::WCHAR; 2048];\n\n unsafe {\n let res = c::FormatMessageW(c::FORMAT_MESSAGE_FROM_SYSTEM |\n c::FORMAT_MESSAGE_IGNORE_INSERTS,\n ptr::null_mut(),\n errnum as c::DWORD,\n langId,\n buf.as_mut_ptr(),\n buf.len() as c::DWORD,\n ptr::null()) as usize;\n if res == 0 {\n \/\/ Sometimes FormatMessageW can fail e.g. system doesn't like langId,\n let fm_err = errno();\n return format!(\"OS Error {} (FormatMessageW() returned error {})\",\n errnum, fm_err);\n }\n\n match String::from_utf16(&buf[..res]) {\n Ok(mut msg) => {\n \/\/ Trim trailing CRLF inserted by FormatMessageW\n let len = msg.trim_right().len();\n msg.truncate(len);\n msg\n },\n Err(..) => format!(\"OS Error {} (FormatMessageW() returned \\\n invalid UTF-16)\", errnum),\n }\n }\n}\n\npub struct Env {\n base: c::LPWCH,\n cur: c::LPWCH,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n\n fn next(&mut self) -> Option<(OsString, OsString)> {\n unsafe {\n if *self.cur == 0 { return None }\n let p = &*self.cur;\n let mut len = 0;\n while *(p as *const u16).offset(len) != 0 {\n len += 1;\n }\n let p = p as *const u16;\n let s = slice::from_raw_parts(p, len as usize);\n self.cur = self.cur.offset(len + 1);\n\n let (k, v) = match s.iter().position(|&b| b == '=' as u16) {\n Some(n) => (&s[..n], &s[n+1..]),\n None => (s, &[][..]),\n };\n Some((OsStringExt::from_wide(k), OsStringExt::from_wide(v)))\n }\n }\n}\n\nimpl Drop for Env {\n fn drop(&mut self) {\n unsafe { c::FreeEnvironmentStringsW(self.base); }\n }\n}\n\npub fn env() -> Env {\n unsafe {\n let ch = c::GetEnvironmentStringsW();\n if ch as usize == 0 {\n panic!(\"failure getting env string from OS: {}\",\n io::Error::last_os_error());\n }\n Env { base: ch, cur: ch }\n }\n}\n\npub struct SplitPaths<'a> {\n data: EncodeWide<'a>,\n must_yield: bool,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n SplitPaths {\n data: unparsed.encode_wide(),\n must_yield: true,\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option<PathBuf> {\n \/\/ On Windows, the PATH environment variable is semicolon separated.\n \/\/ Double quotes are used as a way of introducing literal semicolons\n \/\/ (since c:\\some;dir is a valid Windows path). Double quotes are not\n \/\/ themselves permitted in path names, so there is no way to escape a\n \/\/ double quote. Quoted regions can appear in arbitrary locations, so\n \/\/\n \/\/ c:\\foo;c:\\som\"e;di\"r;c:\\bar\n \/\/\n \/\/ Should parse as [c:\\foo, c:\\some;dir, c:\\bar].\n \/\/\n \/\/ (The above is based on testing; there is no clear reference available\n \/\/ for the grammar.)\n\n\n let must_yield = self.must_yield;\n self.must_yield = false;\n\n let mut in_progress = Vec::new();\n let mut in_quote = false;\n for b in self.data.by_ref() {\n if b == '\"' as u16 {\n in_quote = !in_quote;\n } else if b == ';' as u16 && !in_quote {\n self.must_yield = true;\n break\n } else {\n in_progress.push(b)\n }\n }\n\n if !must_yield && in_progress.is_empty() {\n None\n } else {\n Some(super::os2path(&in_progress))\n }\n }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>\n where I: Iterator<Item=T>, T: AsRef<OsStr>\n{\n let mut joined = Vec::new();\n let sep = b';' as u16;\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref();\n if i > 0 { joined.push(sep) }\n let v = path.encode_wide().collect::<Vec<u16>>();\n if v.contains(&(b'\"' as u16)) {\n return Err(JoinPathsError)\n } else if v.contains(&sep) {\n joined.push(b'\"' as u16);\n joined.push_all(&v[..]);\n joined.push(b'\"' as u16);\n } else {\n joined.push_all(&v[..]);\n }\n }\n\n Ok(OsStringExt::from_wide(&joined[..]))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains `\\\"`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result<PathBuf> {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetModuleFileNameW(ptr::null_mut(), buf, sz)\n }, super::os2path)\n}\n\npub fn getcwd() -> io::Result<PathBuf> {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetCurrentDirectoryW(sz, buf)\n }, super::os2path)\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n let p: &OsStr = p.as_ref();\n let mut p = p.encode_wide().collect::<Vec<_>>();\n p.push(0);\n\n cvt(unsafe {\n c::SetCurrentDirectoryW(p.as_ptr())\n }).map(|_| ())\n}\n\npub fn getenv(k: &OsStr) -> io::Result<Option<OsString>> {\n let k = super::to_utf16_os(k);\n let res = super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetEnvironmentVariableW(k.as_ptr(), buf, sz)\n }, |buf| {\n OsStringExt::from_wide(buf)\n });\n match res {\n Ok(value) => Ok(Some(value)),\n Err(e) => {\n if e.raw_os_error() == Some(c::ERROR_ENVVAR_NOT_FOUND as i32) {\n Ok(None)\n } else {\n Err(e)\n }\n }\n }\n}\n\npub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> {\n let k = super::to_utf16_os(k);\n let v = super::to_utf16_os(v);\n\n cvt(unsafe {\n c::SetEnvironmentVariableW(k.as_ptr(), v.as_ptr())\n }).map(|_| ())\n}\n\npub fn unsetenv(n: &OsStr) -> io::Result<()> {\n let v = super::to_utf16_os(n);\n cvt(unsafe {\n c::SetEnvironmentVariableW(v.as_ptr(), ptr::null())\n }).map(|_| ())\n}\n\npub struct Args {\n range: Range<isize>,\n cur: *mut *mut u16,\n}\n\nimpl Iterator for Args {\n type Item = OsString;\n fn next(&mut self) -> Option<OsString> {\n self.range.next().map(|i| unsafe {\n let ptr = *self.cur.offset(i);\n let mut len = 0;\n while *ptr.offset(len) != 0 { len += 1; }\n\n \/\/ Push it onto the list.\n let ptr = ptr as *const u16;\n let buf = slice::from_raw_parts(ptr, len as usize);\n OsStringExt::from_wide(buf)\n })\n }\n fn size_hint(&self) -> (usize, Option<usize>) { self.range.size_hint() }\n}\n\nimpl ExactSizeIterator for Args {\n fn len(&self) -> usize { self.range.len() }\n}\n\nimpl Drop for Args {\n fn drop(&mut self) {\n \/\/ self.cur can be null if CommandLineToArgvW previously failed,\n \/\/ but LocalFree ignores NULL pointers\n unsafe { c::LocalFree(self.cur as *mut c_void); }\n }\n}\n\npub fn args() -> Args {\n unsafe {\n let mut nArgs: c_int = 0;\n let lpCmdLine = c::GetCommandLineW();\n let szArgList = c::CommandLineToArgvW(lpCmdLine, &mut nArgs);\n\n \/\/ szArcList can be NULL if CommandLinToArgvW failed,\n \/\/ but in that case nArgs is 0 so we won't actually\n \/\/ try to read a null pointer\n Args { cur: szArgList, range: 0..(nArgs as isize) }\n }\n}\n\npub fn temp_dir() -> PathBuf {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetTempPathW(sz, buf)\n }, super::os2path).unwrap()\n}\n\npub fn home_dir() -> Option<PathBuf> {\n ::env::var_os(\"HOME\").or_else(|| {\n ::env::var_os(\"USERPROFILE\")\n }).map(PathBuf::from).or_else(|| unsafe {\n let me = c::GetCurrentProcess();\n let mut token = ptr::null_mut();\n if c::OpenProcessToken(me, c::TOKEN_READ, &mut token) == 0 {\n return None\n }\n let _handle = Handle::new(token);\n super::fill_utf16_buf(|buf, mut sz| {\n match c::GetUserProfileDirectoryW(token, buf, &mut sz) {\n 0 if c::GetLastError() != 0 => 0,\n 0 => sz,\n n => n as c::DWORD,\n }\n }, super::os2path).ok()\n })\n}\n\npub fn exit(code: i32) -> ! {\n unsafe { c::ExitProcess(code as c::UINT) }\n}\n<commit_msg>Ignore malformed environment variables on Windows too<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for Windows\n\n#![allow(bad_style)]\n\nuse prelude::v1::*;\nuse os::windows::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io;\nuse libc::{c_int, c_void};\nuse ops::Range;\nuse os::windows::ffi::EncodeWide;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse sys::{c, cvt};\nuse sys::handle::Handle;\n\npub fn errno() -> i32 {\n unsafe { c::GetLastError() as i32 }\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errnum: i32) -> String {\n \/\/ This value is calculated from the macro\n \/\/ MAKELANGID(LANG_SYSTEM_DEFAULT, SUBLANG_SYS_DEFAULT)\n let langId = 0x0800 as c::DWORD;\n\n let mut buf = [0 as c::WCHAR; 2048];\n\n unsafe {\n let res = c::FormatMessageW(c::FORMAT_MESSAGE_FROM_SYSTEM |\n c::FORMAT_MESSAGE_IGNORE_INSERTS,\n ptr::null_mut(),\n errnum as c::DWORD,\n langId,\n buf.as_mut_ptr(),\n buf.len() as c::DWORD,\n ptr::null()) as usize;\n if res == 0 {\n \/\/ Sometimes FormatMessageW can fail e.g. system doesn't like langId,\n let fm_err = errno();\n return format!(\"OS Error {} (FormatMessageW() returned error {})\",\n errnum, fm_err);\n }\n\n match String::from_utf16(&buf[..res]) {\n Ok(mut msg) => {\n \/\/ Trim trailing CRLF inserted by FormatMessageW\n let len = msg.trim_right().len();\n msg.truncate(len);\n msg\n },\n Err(..) => format!(\"OS Error {} (FormatMessageW() returned \\\n invalid UTF-16)\", errnum),\n }\n }\n}\n\npub struct Env {\n base: c::LPWCH,\n cur: c::LPWCH,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n\n fn next(&mut self) -> Option<(OsString, OsString)> {\n loop {\n unsafe {\n if *self.cur == 0 { return None }\n let p = &*self.cur as *const u16;\n let mut len = 0;\n while *p.offset(len) != 0 {\n len += 1;\n }\n let s = slice::from_raw_parts(p, len as usize);\n self.cur = self.cur.offset(len + 1);\n\n \/\/ Windows allows environment variables to start with an equals\n \/\/ symbol (in any other position, this is the separator between\n \/\/ variable name and value). Since`s` has at least length 1 at\n \/\/ this point (because the empty string terminates the array of\n \/\/ environment variables), we can safely slice.\n let pos = match s[1..].iter().position(|&u| u == b'=' as u16).map(|p| p + 1) {\n Some(p) => p,\n None => continue,\n }\n return Some((\n OsStringExt::from_wide(&s[..pos]),\n OsStringExt::from_wide(&s[pos+1..]),\n ))\n }\n }\n }\n}\n\nimpl Drop for Env {\n fn drop(&mut self) {\n unsafe { c::FreeEnvironmentStringsW(self.base); }\n }\n}\n\npub fn env() -> Env {\n unsafe {\n let ch = c::GetEnvironmentStringsW();\n if ch as usize == 0 {\n panic!(\"failure getting env string from OS: {}\",\n io::Error::last_os_error());\n }\n Env { base: ch, cur: ch }\n }\n}\n\npub struct SplitPaths<'a> {\n data: EncodeWide<'a>,\n must_yield: bool,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n SplitPaths {\n data: unparsed.encode_wide(),\n must_yield: true,\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option<PathBuf> {\n \/\/ On Windows, the PATH environment variable is semicolon separated.\n \/\/ Double quotes are used as a way of introducing literal semicolons\n \/\/ (since c:\\some;dir is a valid Windows path). Double quotes are not\n \/\/ themselves permitted in path names, so there is no way to escape a\n \/\/ double quote. Quoted regions can appear in arbitrary locations, so\n \/\/\n \/\/ c:\\foo;c:\\som\"e;di\"r;c:\\bar\n \/\/\n \/\/ Should parse as [c:\\foo, c:\\some;dir, c:\\bar].\n \/\/\n \/\/ (The above is based on testing; there is no clear reference available\n \/\/ for the grammar.)\n\n\n let must_yield = self.must_yield;\n self.must_yield = false;\n\n let mut in_progress = Vec::new();\n let mut in_quote = false;\n for b in self.data.by_ref() {\n if b == '\"' as u16 {\n in_quote = !in_quote;\n } else if b == ';' as u16 && !in_quote {\n self.must_yield = true;\n break\n } else {\n in_progress.push(b)\n }\n }\n\n if !must_yield && in_progress.is_empty() {\n None\n } else {\n Some(super::os2path(&in_progress))\n }\n }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>\n where I: Iterator<Item=T>, T: AsRef<OsStr>\n{\n let mut joined = Vec::new();\n let sep = b';' as u16;\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref();\n if i > 0 { joined.push(sep) }\n let v = path.encode_wide().collect::<Vec<u16>>();\n if v.contains(&(b'\"' as u16)) {\n return Err(JoinPathsError)\n } else if v.contains(&sep) {\n joined.push(b'\"' as u16);\n joined.push_all(&v[..]);\n joined.push(b'\"' as u16);\n } else {\n joined.push_all(&v[..]);\n }\n }\n\n Ok(OsStringExt::from_wide(&joined[..]))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains `\\\"`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result<PathBuf> {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetModuleFileNameW(ptr::null_mut(), buf, sz)\n }, super::os2path)\n}\n\npub fn getcwd() -> io::Result<PathBuf> {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetCurrentDirectoryW(sz, buf)\n }, super::os2path)\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n let p: &OsStr = p.as_ref();\n let mut p = p.encode_wide().collect::<Vec<_>>();\n p.push(0);\n\n cvt(unsafe {\n c::SetCurrentDirectoryW(p.as_ptr())\n }).map(|_| ())\n}\n\npub fn getenv(k: &OsStr) -> io::Result<Option<OsString>> {\n let k = super::to_utf16_os(k);\n let res = super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetEnvironmentVariableW(k.as_ptr(), buf, sz)\n }, |buf| {\n OsStringExt::from_wide(buf)\n });\n match res {\n Ok(value) => Ok(Some(value)),\n Err(e) => {\n if e.raw_os_error() == Some(c::ERROR_ENVVAR_NOT_FOUND as i32) {\n Ok(None)\n } else {\n Err(e)\n }\n }\n }\n}\n\npub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> {\n let k = super::to_utf16_os(k);\n let v = super::to_utf16_os(v);\n\n cvt(unsafe {\n c::SetEnvironmentVariableW(k.as_ptr(), v.as_ptr())\n }).map(|_| ())\n}\n\npub fn unsetenv(n: &OsStr) -> io::Result<()> {\n let v = super::to_utf16_os(n);\n cvt(unsafe {\n c::SetEnvironmentVariableW(v.as_ptr(), ptr::null())\n }).map(|_| ())\n}\n\npub struct Args {\n range: Range<isize>,\n cur: *mut *mut u16,\n}\n\nimpl Iterator for Args {\n type Item = OsString;\n fn next(&mut self) -> Option<OsString> {\n self.range.next().map(|i| unsafe {\n let ptr = *self.cur.offset(i);\n let mut len = 0;\n while *ptr.offset(len) != 0 { len += 1; }\n\n \/\/ Push it onto the list.\n let ptr = ptr as *const u16;\n let buf = slice::from_raw_parts(ptr, len as usize);\n OsStringExt::from_wide(buf)\n })\n }\n fn size_hint(&self) -> (usize, Option<usize>) { self.range.size_hint() }\n}\n\nimpl ExactSizeIterator for Args {\n fn len(&self) -> usize { self.range.len() }\n}\n\nimpl Drop for Args {\n fn drop(&mut self) {\n \/\/ self.cur can be null if CommandLineToArgvW previously failed,\n \/\/ but LocalFree ignores NULL pointers\n unsafe { c::LocalFree(self.cur as *mut c_void); }\n }\n}\n\npub fn args() -> Args {\n unsafe {\n let mut nArgs: c_int = 0;\n let lpCmdLine = c::GetCommandLineW();\n let szArgList = c::CommandLineToArgvW(lpCmdLine, &mut nArgs);\n\n \/\/ szArcList can be NULL if CommandLinToArgvW failed,\n \/\/ but in that case nArgs is 0 so we won't actually\n \/\/ try to read a null pointer\n Args { cur: szArgList, range: 0..(nArgs as isize) }\n }\n}\n\npub fn temp_dir() -> PathBuf {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetTempPathW(sz, buf)\n }, super::os2path).unwrap()\n}\n\npub fn home_dir() -> Option<PathBuf> {\n ::env::var_os(\"HOME\").or_else(|| {\n ::env::var_os(\"USERPROFILE\")\n }).map(PathBuf::from).or_else(|| unsafe {\n let me = c::GetCurrentProcess();\n let mut token = ptr::null_mut();\n if c::OpenProcessToken(me, c::TOKEN_READ, &mut token) == 0 {\n return None\n }\n let _handle = Handle::new(token);\n super::fill_utf16_buf(|buf, mut sz| {\n match c::GetUserProfileDirectoryW(token, buf, &mut sz) {\n 0 if c::GetLastError() != 0 => 0,\n 0 => sz,\n n => n as c::DWORD,\n }\n }, super::os2path).ok()\n })\n}\n\npub fn exit(code: i32) -> ! {\n unsafe { c::ExitProcess(code as c::UINT) }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\nuse std::env;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf, Component};\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::Entry;\n\nuse Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\nfn main() {\n let docs = env::args_os().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: String,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nfn small_url_encode(s: &str) -> String {\n s.replace(\"<\", \"%3C\")\n .replace(\">\", \"%3E\")\n .replace(\" \", \"%20\")\n .replace(\"?\", \"%3F\")\n .replace(\"'\", \"%27\")\n .replace(\"&\", \"%26\")\n .replace(\",\", \"%2C\")\n .replace(\":\", \"%3A\")\n .replace(\";\", \"%3B\")\n .replace(\"[\", \"%5B\")\n .replace(\"]\", \"%5D\")\n .replace(\"\\\"\", \"%22\")\n}\n\nimpl FileEntry {\n fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i, _| {\n let frag = fragment.trim_left_matches(\"#\").to_owned();\n let encoded = small_url_encode(&frag);\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\", file.display(), i, fragment);\n }\n \/\/ Just in case, we also add the encoded id.\n self.ids.insert(encoded);\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n if kind.is_dir() {\n walk(cache, root, &path, errors);\n } else {\n let pretty_path = check(cache, root, &path, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to reduce memory-usage\n entry.source = String::new();\n }\n }\n }\n}\n\nfn check(cache: &mut Cache,\n root: &Path,\n file: &Path,\n errors: &mut bool)\n -> Option<PathBuf> {\n \/\/ Ignore none HTML files.\n if file.extension().and_then(|s| s.to_str()) != Some(\"html\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/string\/struct.String.html\") ||\n file.ends_with(\"interpret\/struct.ValTy.html\") ||\n file.ends_with(\"symbol\/struct.InternedString.html\") ||\n file.ends_with(\"ast\/struct.ThinVec.html\") ||\n file.ends_with(\"util\/struct.ThinVec.html\") ||\n file.ends_with(\"util\/struct.RcSlice.html\") ||\n file.ends_with(\"layout\/struct.TyLayout.html\") ||\n file.ends_with(\"ty\/struct.Slice.html\") ||\n file.ends_with(\"ty\/enum.Attributes.html\") ||\n file.ends_with(\"ty\/struct.SymbolName.html\") {\n return None;\n }\n \/\/ FIXME(#32553)\n if file.ends_with(\"string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"btree_set\/struct.BTreeSet.html\") ||\n file.ends_with(\"struct.BTreeSet.html\") ||\n file.ends_with(\"btree_map\/struct.BTreeMap.html\") ||\n file.ends_with(\"hash_map\/struct.HashMap.html\") ||\n file.ends_with(\"hash_set\/struct.HashSet.html\") ||\n file.ends_with(\"sync\/struct.Lrc.html\") ||\n file.ends_with(\"sync\/struct.RwLock.html\") {\n return None;\n }\n\n let res = load_file(cache, root, file, SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file)\n .unwrap()\n .parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i, base| {\n \/\/ Ignore external URLs\n if url.starts_with(\"http:\") || url.starts_with(\"https:\") ||\n url.starts_with(\"javascript:\") || url.starts_with(\"ftp:\") ||\n url.starts_with(\"irc:\") || url.starts_with(\"data:\") {\n return;\n }\n let mut parts = url.splitn(2, \"#\");\n let url = parts.next().unwrap();\n let fragment = parts.next();\n let mut parts = url.splitn(2, \"?\");\n let url = parts.next().unwrap();\n\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path.\n let mut path = file.to_path_buf();\n if !base.is_empty() || !url.is_empty() {\n path.pop();\n for part in Path::new(base).join(url).components() {\n match part {\n Component::Prefix(_) |\n Component::RootDir => panic!(),\n Component::CurDir => {}\n Component::ParentDir => { path.pop(); }\n Component::Normal(s) => { path.push(s); }\n }\n }\n }\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n \/\/ Links to directories show as directory listings when viewing\n \/\/ the docs offline so it's best to avoid them.\n *errors = true;\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}:{}: directory link - {}\",\n pretty_file.display(),\n i + 1,\n pretty_path.display());\n return;\n }\n if let Some(extension) = path.extension() {\n \/\/ Ignore none HTML files.\n if extension != \"html\" {\n return;\n }\n }\n let res = load_file(cache, root, &path, FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => {\n panic!(\"error loading {}: {}\", path.display(), err);\n }\n Err(LoadError::BrokenRedirect(target, _)) => {\n *errors = true;\n println!(\"{}:{}: broken redirect to {}\",\n pretty_file.display(),\n i + 1,\n target.display());\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n if let Some(ref fragment) = fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-')\n .all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(*fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \",\n pretty_file.display(),\n i + 1);\n println!(\"`#{}` pointing to `{}`\", fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(cache: &mut Cache,\n root: &Path,\n file: &Path,\n redirect: Redirect)\n -> Result<(PathBuf, String), LoadError> {\n let mut contents = String::new();\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let maybe_redirect = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => {\n contents = entry.get().source.clone();\n None\n }\n Entry::Vacant(entry) => {\n let mut fp = File::open(file).map_err(|err| {\n if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.to_path_buf(), err)\n } else {\n LoadError::IOError(err)\n }\n })?;\n fp.read_to_string(&mut contents).map_err(|err| LoadError::IOError(err))?;\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry {\n source: contents.clone(),\n ids: HashSet::new(),\n });\n }\n maybe\n }\n };\n match maybe_redirect.map(|url| file.parent().unwrap().join(url)) {\n Some(redirect_file) => {\n load_file(cache, root, &redirect_file, FromRedirect(true))\n }\n None => Ok((pretty_file, contents)),\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = match lines.nth(6) {\n Some(l) => l,\n None => return None,\n };\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize, &str)>(contents: &str, attr: &str, mut f: F) {\n let mut base = \"\";\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len()..];\n \/\/ The base tag should always be the first link in the document so\n \/\/ we can get away with using one pass.\n let is_base = line[..j].ends_with(\"<base\");\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_left_matches(\" \") != \"\" {\n continue;\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_left_matches(\" \") != \"\" {\n continue;\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n if is_base {\n base = url;\n continue;\n }\n f(url, i, base)\n }\n }\n}\n<commit_msg>Rollup merge of #47387 - Rantanen:linkchecker-error-msg, r=steveklabnik<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\nuse std::env;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf, Component};\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::Entry;\n\nuse Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\nfn main() {\n let docs = env::args_os().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: String,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nfn small_url_encode(s: &str) -> String {\n s.replace(\"<\", \"%3C\")\n .replace(\">\", \"%3E\")\n .replace(\" \", \"%20\")\n .replace(\"?\", \"%3F\")\n .replace(\"'\", \"%27\")\n .replace(\"&\", \"%26\")\n .replace(\",\", \"%2C\")\n .replace(\":\", \"%3A\")\n .replace(\";\", \"%3B\")\n .replace(\"[\", \"%5B\")\n .replace(\"]\", \"%5D\")\n .replace(\"\\\"\", \"%22\")\n}\n\nimpl FileEntry {\n fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i, _| {\n let frag = fragment.trim_left_matches(\"#\").to_owned();\n let encoded = small_url_encode(&frag);\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\", file.display(), i, fragment);\n }\n \/\/ Just in case, we also add the encoded id.\n self.ids.insert(encoded);\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n if kind.is_dir() {\n walk(cache, root, &path, errors);\n } else {\n let pretty_path = check(cache, root, &path, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to reduce memory-usage\n entry.source = String::new();\n }\n }\n }\n}\n\nfn check(cache: &mut Cache,\n root: &Path,\n file: &Path,\n errors: &mut bool)\n -> Option<PathBuf> {\n \/\/ Ignore none HTML files.\n if file.extension().and_then(|s| s.to_str()) != Some(\"html\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/string\/struct.String.html\") ||\n file.ends_with(\"interpret\/struct.ValTy.html\") ||\n file.ends_with(\"symbol\/struct.InternedString.html\") ||\n file.ends_with(\"ast\/struct.ThinVec.html\") ||\n file.ends_with(\"util\/struct.ThinVec.html\") ||\n file.ends_with(\"util\/struct.RcSlice.html\") ||\n file.ends_with(\"layout\/struct.TyLayout.html\") ||\n file.ends_with(\"ty\/struct.Slice.html\") ||\n file.ends_with(\"ty\/enum.Attributes.html\") ||\n file.ends_with(\"ty\/struct.SymbolName.html\") {\n return None;\n }\n \/\/ FIXME(#32553)\n if file.ends_with(\"string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"btree_set\/struct.BTreeSet.html\") ||\n file.ends_with(\"struct.BTreeSet.html\") ||\n file.ends_with(\"btree_map\/struct.BTreeMap.html\") ||\n file.ends_with(\"hash_map\/struct.HashMap.html\") ||\n file.ends_with(\"hash_set\/struct.HashSet.html\") ||\n file.ends_with(\"sync\/struct.Lrc.html\") ||\n file.ends_with(\"sync\/struct.RwLock.html\") {\n return None;\n }\n\n let res = load_file(cache, root, file, SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file)\n .unwrap()\n .parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i, base| {\n \/\/ Ignore external URLs\n if url.starts_with(\"http:\") || url.starts_with(\"https:\") ||\n url.starts_with(\"javascript:\") || url.starts_with(\"ftp:\") ||\n url.starts_with(\"irc:\") || url.starts_with(\"data:\") {\n return;\n }\n let mut parts = url.splitn(2, \"#\");\n let url = parts.next().unwrap();\n let fragment = parts.next();\n let mut parts = url.splitn(2, \"?\");\n let url = parts.next().unwrap();\n\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path.\n let mut path = file.to_path_buf();\n if !base.is_empty() || !url.is_empty() {\n path.pop();\n for part in Path::new(base).join(url).components() {\n match part {\n Component::Prefix(_) |\n Component::RootDir => {\n \/\/ Avoid absolute paths as they make the docs not\n \/\/ relocatable by making assumptions on where the docs\n \/\/ are hosted relative to the site root.\n *errors = true;\n println!(\"{}:{}: absolute path - {}\",\n pretty_file.display(),\n i + 1,\n Path::new(base).join(url).display());\n return;\n }\n Component::CurDir => {}\n Component::ParentDir => { path.pop(); }\n Component::Normal(s) => { path.push(s); }\n }\n }\n }\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n \/\/ Links to directories show as directory listings when viewing\n \/\/ the docs offline so it's best to avoid them.\n *errors = true;\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}:{}: directory link - {}\",\n pretty_file.display(),\n i + 1,\n pretty_path.display());\n return;\n }\n if let Some(extension) = path.extension() {\n \/\/ Ignore none HTML files.\n if extension != \"html\" {\n return;\n }\n }\n let res = load_file(cache, root, &path, FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => {\n panic!(\"error loading {}: {}\", path.display(), err);\n }\n Err(LoadError::BrokenRedirect(target, _)) => {\n *errors = true;\n println!(\"{}:{}: broken redirect to {}\",\n pretty_file.display(),\n i + 1,\n target.display());\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n if let Some(ref fragment) = fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-')\n .all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(*fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \",\n pretty_file.display(),\n i + 1);\n println!(\"`#{}` pointing to `{}`\", fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(cache: &mut Cache,\n root: &Path,\n file: &Path,\n redirect: Redirect)\n -> Result<(PathBuf, String), LoadError> {\n let mut contents = String::new();\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let maybe_redirect = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => {\n contents = entry.get().source.clone();\n None\n }\n Entry::Vacant(entry) => {\n let mut fp = File::open(file).map_err(|err| {\n if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.to_path_buf(), err)\n } else {\n LoadError::IOError(err)\n }\n })?;\n fp.read_to_string(&mut contents).map_err(|err| LoadError::IOError(err))?;\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry {\n source: contents.clone(),\n ids: HashSet::new(),\n });\n }\n maybe\n }\n };\n match maybe_redirect.map(|url| file.parent().unwrap().join(url)) {\n Some(redirect_file) => {\n load_file(cache, root, &redirect_file, FromRedirect(true))\n }\n None => Ok((pretty_file, contents)),\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = match lines.nth(6) {\n Some(l) => l,\n None => return None,\n };\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize, &str)>(contents: &str, attr: &str, mut f: F) {\n let mut base = \"\";\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len()..];\n \/\/ The base tag should always be the first link in the document so\n \/\/ we can get away with using one pass.\n let is_base = line[..j].ends_with(\"<base\");\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_left_matches(\" \") != \"\" {\n continue;\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_left_matches(\" \") != \"\" {\n continue;\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n if is_base {\n base = url;\n continue;\n }\n f(url, i, base)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\nextern crate url;\n\nuse std::env;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf};\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::Entry;\n\nuse url::{Url, UrlParser};\n\nuse Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\nfn main() {\n let docs = env::args().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut url = Url::from_file_path(&docs).unwrap();\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut url, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: String,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nimpl FileEntry {\n fn parse_ids(&mut self,\n file: &Path,\n contents: &str,\n errors: &mut bool)\n{\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i| {\n let frag = fragment.trim_left_matches(\"#\").to_owned();\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\",\n file.display(), i, fragment);\n }\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache,\n root: &Path,\n dir: &Path,\n url: &mut Url,\n errors: &mut bool)\n{\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n url.path_mut().unwrap().push(entry.file_name().into_string().unwrap());\n if kind.is_dir() {\n walk(cache, root, &path, url, errors);\n } else {\n let pretty_path = check(cache, root, &path, url, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to reduce memory-usage\n entry.source = String::new();\n }\n }\n url.path_mut().unwrap().pop();\n }\n}\n\nfn check(cache: &mut Cache,\n root: &Path,\n file: &Path,\n base: &Url,\n errors: &mut bool) -> Option<PathBuf>\n{\n \/\/ ignore js files as they are not prone to errors as the rest of the\n \/\/ documentation is and they otherwise bring up false positives.\n if file.extension().and_then(|s| s.to_str()) == Some(\"js\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32553)\n if file.ends_with(\"collections\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"btree_set\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/btree_map\/struct.BTreeMap.html\") ||\n file.ends_with(\"collections\/hash_map\/struct.HashMap.html\") {\n return None;\n }\n\n if file.ends_with(\"std\/sys\/ext\/index.html\") {\n return None;\n }\n\n if let Some(file) = file.to_str() {\n \/\/ FIXME(#31948)\n if file.contains(\"ParseFloatError\") {\n return None;\n }\n \/\/ weird reexports, but this module is on its way out, so chalk it up to\n \/\/ \"rustdoc weirdness\" and move on from there\n if file.contains(\"scoped_tls\") {\n return None;\n }\n }\n\n let mut parser = UrlParser::new();\n parser.base_url(base);\n\n let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file).unwrap()\n .parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i| {\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path. If either of these fail then we\n \/\/ just keep going.\n let (parsed_url, path) = match url_to_file_path(&parser, url) {\n Some((url, path)) => (url, PathBuf::from(path)),\n None => return,\n };\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n return;\n }\n let res = load_file(cache, root, path.clone(), FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => panic!(format!(\"{}\", err)),\n Err(LoadError::BrokenRedirect(target, _)) => {\n print!(\"{}:{}: broken redirect to {}\",\n pretty_file.display(), i + 1, target.display());\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n if let Some(ref fragment) = parsed_url.fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-')\n .all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \",\n pretty_file.display(), i + 1);\n println!(\"`#{}` pointing to `{}`\",\n fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(cache: &mut Cache,\n root: &Path,\n file: PathBuf,\n redirect: Redirect) -> Result<(PathBuf, String), LoadError> {\n let mut contents = String::new();\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let maybe_redirect = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => {\n contents = entry.get().source.clone();\n None\n },\n Entry::Vacant(entry) => {\n let mut fp = try!(File::open(file.clone()).map_err(|err| {\n if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.clone(), err)\n } else {\n LoadError::IOError(err)\n }\n }));\n try!(fp.read_to_string(&mut contents)\n .map_err(|err| LoadError::IOError(err)));\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry {\n source: contents.clone(),\n ids: HashSet::new(),\n });\n }\n maybe\n },\n };\n let base = Url::from_file_path(&file).unwrap();\n let mut parser = UrlParser::new();\n parser.base_url(&base);\n\n match maybe_redirect.and_then(|url| url_to_file_path(&parser, &url)) {\n Some((_, redirect_file)) => {\n let path = PathBuf::from(redirect_file);\n load_file(cache, root, path, FromRedirect(true))\n }\n None => Ok((pretty_file, contents))\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = match lines.nth(6) {\n Some(l) => l,\n None => return None,\n };\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn url_to_file_path(parser: &UrlParser, url: &str) -> Option<(Url, PathBuf)> {\n parser.parse(url).ok().and_then(|parsed_url| {\n parsed_url.to_file_path().ok().map(|f| (parsed_url, f))\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str,\n attr: &str,\n mut f: F)\n{\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len() ..];\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_left_matches(\" \") != \"\" {\n continue\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_left_matches(\" \") != \"\" {\n continue\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n f(url, i)\n }\n }\n}\n<commit_msg>Auto merge of #34039 - ollie27:linkchecker_dirs, r=alexcrichton<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\nextern crate url;\n\nuse std::env;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf};\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::Entry;\n\nuse url::{Url, UrlParser};\n\nuse Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\nfn main() {\n let docs = env::args().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut url = Url::from_file_path(&docs).unwrap();\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut url, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: String,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nimpl FileEntry {\n fn parse_ids(&mut self,\n file: &Path,\n contents: &str,\n errors: &mut bool)\n{\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i| {\n let frag = fragment.trim_left_matches(\"#\").to_owned();\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\",\n file.display(), i, fragment);\n }\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache,\n root: &Path,\n dir: &Path,\n url: &mut Url,\n errors: &mut bool)\n{\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n url.path_mut().unwrap().push(entry.file_name().into_string().unwrap());\n if kind.is_dir() {\n walk(cache, root, &path, url, errors);\n } else {\n let pretty_path = check(cache, root, &path, url, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to reduce memory-usage\n entry.source = String::new();\n }\n }\n url.path_mut().unwrap().pop();\n }\n}\n\nfn check(cache: &mut Cache,\n root: &Path,\n file: &Path,\n base: &Url,\n errors: &mut bool) -> Option<PathBuf>\n{\n \/\/ ignore js files as they are not prone to errors as the rest of the\n \/\/ documentation is and they otherwise bring up false positives.\n if file.extension().and_then(|s| s.to_str()) == Some(\"js\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32553)\n if file.ends_with(\"collections\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"btree_set\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/btree_map\/struct.BTreeMap.html\") ||\n file.ends_with(\"collections\/hash_map\/struct.HashMap.html\") {\n return None;\n }\n\n if file.ends_with(\"std\/sys\/ext\/index.html\") {\n return None;\n }\n\n if let Some(file) = file.to_str() {\n \/\/ FIXME(#31948)\n if file.contains(\"ParseFloatError\") {\n return None;\n }\n \/\/ weird reexports, but this module is on its way out, so chalk it up to\n \/\/ \"rustdoc weirdness\" and move on from there\n if file.contains(\"scoped_tls\") {\n return None;\n }\n }\n\n let mut parser = UrlParser::new();\n parser.base_url(base);\n\n let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file).unwrap()\n .parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i| {\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path. If either of these fail then we\n \/\/ just keep going.\n let (parsed_url, path) = match url_to_file_path(&parser, url) {\n Some((url, path)) => (url, PathBuf::from(path)),\n None => return,\n };\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n \/\/ Links to directories show as directory listings when viewing\n \/\/ the docs offline so it's best to avoid them.\n *errors = true;\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}:{}: directory link - {}\", pretty_file.display(),\n i + 1, pretty_path.display());\n return;\n }\n let res = load_file(cache, root, path.clone(), FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => panic!(format!(\"{}\", err)),\n Err(LoadError::BrokenRedirect(target, _)) => {\n print!(\"{}:{}: broken redirect to {}\",\n pretty_file.display(), i + 1, target.display());\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n if let Some(ref fragment) = parsed_url.fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-')\n .all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \",\n pretty_file.display(), i + 1);\n println!(\"`#{}` pointing to `{}`\",\n fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(cache: &mut Cache,\n root: &Path,\n file: PathBuf,\n redirect: Redirect) -> Result<(PathBuf, String), LoadError> {\n let mut contents = String::new();\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let maybe_redirect = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => {\n contents = entry.get().source.clone();\n None\n },\n Entry::Vacant(entry) => {\n let mut fp = try!(File::open(file.clone()).map_err(|err| {\n if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.clone(), err)\n } else {\n LoadError::IOError(err)\n }\n }));\n try!(fp.read_to_string(&mut contents)\n .map_err(|err| LoadError::IOError(err)));\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry {\n source: contents.clone(),\n ids: HashSet::new(),\n });\n }\n maybe\n },\n };\n let base = Url::from_file_path(&file).unwrap();\n let mut parser = UrlParser::new();\n parser.base_url(&base);\n\n match maybe_redirect.and_then(|url| url_to_file_path(&parser, &url)) {\n Some((_, redirect_file)) => {\n let path = PathBuf::from(redirect_file);\n load_file(cache, root, path, FromRedirect(true))\n }\n None => Ok((pretty_file, contents))\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = match lines.nth(6) {\n Some(l) => l,\n None => return None,\n };\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn url_to_file_path(parser: &UrlParser, url: &str) -> Option<(Url, PathBuf)> {\n parser.parse(url).ok().and_then(|parsed_url| {\n parsed_url.to_file_path().ok().map(|f| (parsed_url, f))\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str,\n attr: &str,\n mut f: F)\n{\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len() ..];\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_left_matches(\" \") != \"\" {\n continue\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_left_matches(\" \") != \"\" {\n continue\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n f(url, i)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>auto merge of #5366 : tedhorst\/rust\/threadring, r=brson<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Based on threadring.erlang by Jira Isa\n\nfn start(n_tasks: int, token: int) {\n let mut (p, ch1) = comm::stream();\n ch1.send(token);\n \/\/ XXX could not get this to work with a range closure\n let mut i = 2;\n while i <= n_tasks {\n let (next_p, ch) = comm::stream();\n let imm_i = i;\n let imm_p = p;\n do task::spawn {\n roundtrip(imm_i, n_tasks, &imm_p, &ch);\n };\n p = next_p;\n i += 1;\n }\n let imm_p = p;\n let imm_ch = ch1;\n do task::spawn {\n roundtrip(1, n_tasks, &imm_p, &imm_ch);\n }\n}\n\nfn roundtrip(id: int, n_tasks: int, p: &comm::Port<int>, ch: &comm::Chan<int>) {\n while (true) {\n match p.recv() {\n 1 => {\n io::println(fmt!(\"%d\\n\", id));\n return;\n }\n token => {\n debug!(\"thread: %d got token: %d\", id, token);\n ch.send(token - 1);\n if token <= n_tasks {\n return;\n }\n }\n }\n }\n}\n\nfn main() {\n let args = if os::getenv(~\"RUST_BENCH\").is_some() {\n ~[~\"\", ~\"2000000\", ~\"503\"]\n }\n else {\n os::args()\n };\n let token = if args.len() > 1u {\n int::from_str(args[1]).get()\n }\n else {\n 1000\n };\n let n_tasks = if args.len() > 2u {\n int::from_str(args[2]).get()\n }\n else {\n 503\n };\n start(n_tasks, token);\n\n}\n<|endoftext|>"} {"text":"<commit_before>\nuse regex::Regex;\n\nuse std::collections::VecDeque;\n\n#[derive(Debug, Clone)]\npub enum ParseError {\n UnclosedList,\n}\n\npub type ParseResult = Result<Token, ParseError>;\n\n#[derive(Debug, Clone)]\npub enum Token {\n Number(f32),\n StrLit(String),\n\n Sym(String),\n\n List(Vec<Token>),\n}\n\nfn preprocess(code: &str) -> VecDeque<String> {\n let string_re = r#\"\"[^\"]*\"\"#;\n let sym_re = r\"_*-*\\w+_*-*\\w*_*-*\";\n let num_re = r\"\\d+\\.*\\d*\";\n let list_re = r\"\\(|\\)\";\n let op_re = r\"\\+|-|\\*|\/|\\^|&|\\||=\";\n \n let regex = format!(\"{}|{}|{}|{}|{}\", string_re, num_re, sym_re, list_re, op_re);\n\n let re = match Regex::new(®ex) {\n Ok(re) => re,\n Err(e) => return VecDeque::new(),\n };\n\n let spaced_code = code.to_string()\n .replace(\"(\", \" ( \")\n .replace(\")\", \" ) \");\n\n let spaced_code = spaced_code.trim();\n\n let mut token_strs = VecDeque::new();\n\n for cap in re.captures_iter(&spaced_code) {\n let match_str = cap.at(0).unwrap();\n token_strs.push_back(match_str.to_string());\n }\n\n return token_strs;\n}\n\npub fn tokenize_str(code: &str) -> ParseResult {\n let mut seperated_code = preprocess(code);\n\n tokenize(&mut seperated_code)\n}\n\nfn tokenize(list: &mut VecDeque<String>) -> ParseResult {\n let token_str = list.remove(0).unwrap_or(format!(\"42\"));\n\n match &token_str[..] {\n \"(\" => {\n let mut tokens = Vec::new();\n\n while let Some(item_str) = list.remove(0) {\n if &item_str[..] == \")\" {\n return Ok(Token::List(tokens));\n }\n\n list.push_front(item_str);\n tokens.push(try!(tokenize(list)));\n }\n\n Err(ParseError::UnclosedList)\n },\n atom => {\n Ok(tokenize_atom(atom.to_string()))\n }\n }\n}\n\nfn tokenize_atom(atom: String) -> Token {\n if let Ok(n) = atom.parse() {\n Token::Number(n)\n } else if let Some(lit) = string_lit(&atom[..]) {\n Token::StrLit(lit) \n } else {\n Token::Sym(atom.to_string())\n }\n}\n\nfn string_lit(slice: &str) -> Option<String> {\n let len = slice.len();\n let end = len - 1;\n\n if &slice[0..1] == \"\\\"\" && &slice[end .. len] == \"\\\"\" {\n let lit = &slice[1..end];\n return Some(lit.to_string());\n }\n\n return None;\n}\n<commit_msg>Use proper functions to pop items from front of VecDeque<commit_after>\nuse regex::Regex;\n\nuse std::collections::VecDeque;\n\n#[derive(Debug, Clone)]\npub enum ParseError {\n UnclosedList,\n}\n\npub type ParseResult = Result<Token, ParseError>;\n\n#[derive(Debug, Clone)]\npub enum Token {\n Number(f32),\n StrLit(String),\n\n Sym(String),\n\n List(Vec<Token>),\n}\n\nfn preprocess(code: &str) -> VecDeque<String> {\n let string_re = r#\"\"[^\"]*\"\"#;\n let sym_re = r\"_*-*\\w+_*-*\\w*_*-*\";\n let num_re = r\"\\d+\\.*\\d*\";\n let list_re = r\"\\(|\\)\";\n let op_re = r\"\\+|-|\\*|\/|\\^|&|\\||=\";\n \n let regex = format!(\"{}|{}|{}|{}|{}\", string_re, num_re, sym_re, list_re, op_re);\n\n let re = match Regex::new(®ex) {\n Ok(re) => re,\n Err(e) => return VecDeque::new(),\n };\n\n let spaced_code = code.to_string()\n .replace(\"(\", \" ( \")\n .replace(\")\", \" ) \");\n\n let spaced_code = spaced_code.trim();\n\n let mut token_strs = VecDeque::new();\n\n for cap in re.captures_iter(&spaced_code) {\n let match_str = cap.at(0).unwrap();\n token_strs.push_back(match_str.to_string());\n }\n\n return token_strs;\n}\n\npub fn tokenize_str(code: &str) -> ParseResult {\n let mut seperated_code = preprocess(code);\n\n tokenize(&mut seperated_code)\n}\n\nfn tokenize(list: &mut VecDeque<String>) -> ParseResult {\n let token_str = list.pop_front().unwrap_or(format!(\"42\"));\n\n match &token_str[..] {\n \"(\" => {\n let mut tokens = Vec::new();\n\n while let Some(item_str) = list.pop_front() {\n if &item_str[..] == \")\" {\n return Ok(Token::List(tokens));\n }\n\n list.push_front(item_str);\n tokens.push(try!(tokenize(list)));\n }\n\n Err(ParseError::UnclosedList)\n },\n atom => {\n Ok(tokenize_atom(atom.to_string()))\n }\n }\n}\n\nfn tokenize_atom(atom: String) -> Token {\n if let Ok(n) = atom.parse() {\n Token::Number(n)\n } else if let Some(lit) = string_lit(&atom[..]) {\n Token::StrLit(lit) \n } else {\n Token::Sym(atom.to_string())\n }\n}\n\nfn string_lit(slice: &str) -> Option<String> {\n let len = slice.len();\n let end = len - 1;\n\n if &slice[0..1] == \"\\\"\" && &slice[end .. len] == \"\\\"\" {\n let lit = &slice[1..end];\n return Some(lit.to_string());\n }\n\n return None;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added example implementing types for custom type<commit_after>#![allow(dead_code, unused)]\n\n#[macro_use]\nextern crate kinder;\nuse kinder::lift::*;\n\n\/\/We'll define a very simply custom struct and then implement HOT's for it\n#[derive(Debug)] \/\/for printing nicely\nstruct Holder<T> {\n elem: T, \/\/simply holds a value of type T\n}\n\n\/\/now we lift it to a higher type\nlift!(Holder);\n\nimpl<A,B> Functor<A> for Holder<B> {\n fn fmap<F>(&self, f: F) -> Holder<A> \n where F: Fn(&B) -> A\n {\n Holder{ elem: f(&self.elem) } \/\/just applies f to elem\n }\n}\n\nimpl<A, B> Monad<A> for Holder<B> {\n fn lift(x:A) -> <Self as Higher<A>>::C {\n Holder { elem: x }\n }\n\n fn bind<F>(&self, mut f: F) -> Holder<A>\n where F: FnMut(&B) -> Holder<A>\n {\n f(&self.elem) \/\/apply f to elem -> returns a holder\n }\n}\n\n\nfn main() {\n let test = Holder::lift(2);\n println!(\"{:?}\", test); \/\/prints Holder { elem: 2 }\n \n let mapped = test.fmap(|x| x+4);\n println!(\"{:?}\", mapped); \/\/prints Holder { elem: 6 }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Ignore \"<quote\\ expansion\" entries.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test for changing pub inherent method signature<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test where we change the body of a private method in an impl.\n\/\/ We then test what sort of functions must be rebuilt as a result.\n\n\/\/ revisions:rpass1 rpass2\n\/\/ compile-flags: -Z query-dep-graph\n\n#![feature(rustc_attrs)]\n#![feature(stmt_expr_attributes)]\n#![feature(static_in_const)]\n#![allow(dead_code)]\n\n\/\/ These are expected to require translation.\n#![rustc_partition_translated(module=\"struct_point-point\", cfg=\"rpass2\")]\n#![rustc_partition_translated(module=\"struct_point-fn_calls_changed_method\", cfg=\"rpass2\")]\n\n\/\/ FIXME(#36349) -- this gets recompiled because we don't separate items from impl\n#![rustc_partition_translated(module=\"struct_point-fn_calls_another_method\", cfg=\"rpass2\")]\n\n#![rustc_partition_reused(module=\"struct_point-fn_make_struct\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"struct_point-fn_read_field\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"struct_point-fn_write_field\", cfg=\"rpass2\")]\n\nmod point {\n pub struct Point {\n pub x: f32,\n pub y: f32,\n }\n\n impl Point {\n #[cfg(rpass1)]\n pub fn distance_from_point(&self, p: Option<Point>) -> f32 {\n let p = p.unwrap_or(Point { x: 0.0, y: 0.0 });\n let x_diff = self.x - p.x;\n let y_diff = self.y - p.y;\n return x_diff * x_diff + y_diff * y_diff;\n }\n\n #[cfg(rpass2)]\n pub fn distance_from_point(&self, p: Option<&Point>) -> f32 {\n const ORIGIN: &Point = &Point { x: 0.0, y: 0.0 };\n let p = p.unwrap_or(ORIGIN);\n let x_diff = self.x - p.x;\n let y_diff = self.y - p.y;\n return x_diff * x_diff + y_diff * y_diff;\n }\n\n pub fn x(&self) -> f32 {\n self.x\n }\n }\n}\n\n\/\/\/ A fn item that calls (public) methods on `Point` from the same impl which changed\nmod fn_calls_changed_method {\n use point::Point;\n\n #[rustc_dirty(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn check() {\n let p = Point { x: 2.0, y: 2.0 };\n p.distance_from_point(None);\n }\n}\n\n\/\/\/ A fn item that calls (public) methods on `Point` from the same impl which changed\nmod fn_calls_another_method {\n use point::Point;\n\n \/\/ FIXME(#36349) -- this gets recompiled because we don't separate items from impl\n #[rustc_dirty(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn check() {\n let p = Point { x: 2.0, y: 2.0 };\n p.x();\n }\n}\n\n\/\/\/ A fn item that makes an instance of `Point` but does not invoke methods\nmod fn_make_struct {\n use point::Point;\n\n #[rustc_clean(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn make_origin() -> Point {\n Point { x: 2.0, y: 2.0 }\n }\n}\n\n\/\/\/ A fn item that reads fields from `Point` but does not invoke methods\nmod fn_read_field {\n use point::Point;\n\n #[rustc_clean(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn get_x(p: Point) -> f32 {\n p.x\n }\n}\n\n\/\/\/ A fn item that writes to a field of `Point` but does not invoke methods\nmod fn_write_field {\n use point::Point;\n\n #[rustc_clean(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn inc_x(p: &mut Point) {\n p.x += 1.0;\n }\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add build.rs to help with local builds on macos<commit_after>fn main() {\n if cfg!(target_os = \"macos\") {\n \/\/ if we're on Mac OS X we'll kindly add DYLD_LIBRARY_PATH to rustc's\n \/\/ linker search path\n if let Some(dyld_paths) = option_env!(\"DYLD_LIBRARY_PATH\") {\n for path in dyld_paths.split(':') {\n println!(\"cargo:rustc-link-search=native={}\", path)\n }\n }\n \/\/ if we're on Mac OS X we'll kindly add DYLD_FALLBACK_LIBRARY_PATH to rustc's\n \/\/ linker search path\n if let Some(dyld_fallback_paths) = option_env!(\"DYLD_FALLBACK_LIBRARY_PATH\") {\n for path in dyld_fallback_paths.split(':') {\n println!(\"cargo:rustc-link-search=native={}\", path)\n }\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>sort category<commit_after><|endoftext|>"} {"text":"<commit_before>use redox::*;\n\n\/\/ TODO: Structure using loops\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset <= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '$') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n if editor.offset < editor.string.len() {\n break;\n }\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n if editor.offset < editor.string.len() {\n break;\n }\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<commit_msg>Fix while loops<commit_after>use redox::*;\n\n\/\/ TODO: Structure using loops\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset <= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '$') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n if editor.offset < editor.string.len() {\n break;\n }\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n if editor.offset >= 0 {\n break;\n }\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rustify nvpairs.h for Tedsta<commit_after>enum DataType {\n Unknown = 0,\n Boolean,\n Byte,\n Int16,\n Uint16,\n Int32,\n Uint32,\n Int64,\n Uint64,\n String, \/\/ TODO: What to name this string type?\n ByteArray,\n Int16Array,\n Uint16Array,\n Int32Array,\n Uint32Array,\n Int64Array,\n Uint64Array,\n StringArray,\n Hrtime,\n Nvlist, \/\/ TODO: What to name this ?\n NvlistArray,\n BooleanValue,\n Int8,\n Uint8,\n BooleanArray,\n Int8Array,\n Uint8Array\n}\n\nstruct NvPair {\n nvp_size: i32, \/\/ size of this nvpair\n nvp_name_sz: i16, \/\/ length of name string\n nvp_reserve: i16, \/\/ not used\n nvp_value_elem: i32, \/\/ number of elements for array types\n nvp_type: DataType, \/\/ type of value\n \/\/ name string\n \/\/ aligned ptr array for string arrays\n \/\/ aligned array of data for value\n}\n\n\/\/ nvlist header\nstruct NvList {\n nvl_version: i32\n nvl_nvflag: u32 \/\/ persistent flags\n nvl_priv: u64 \/\/ ptr to private data if not packed\n nvl_flag: u32\n nvl_pad: i32 \/\/ currently not used, for alignment\n}\n\n\/\/ nvp implementation version\nconst NV_VERSION: i32 = 0;\n\n\/\/ nvlist pack encoding\nconst NV_ENCODE_NATIVE: u8 = 0;\nconst NV_ENCODE_XDR: u8 = 1;\n\n\/\/ nvlist persistent unique name flags, stored in nvl_nvflags\nconst NV_UNIQUE_NAME: u32 = 0x1;\nconst NV_UNIQUE_NAME_TYPE: u32 = 0x2;\n\n\/\/ nvlist lookup pairs related flags\nconst NV_FLAG_NOENTOK: isize = 0x1;\n\n\/* What to do about these macros?\n\/\/ convenience macros\n#define NV_ALIGN(x) (((ulong_t)(x) + 7ul) & ~7ul)\n#define NV_ALIGN4(x) (((x) + 3) & ~3)\n\n#define NVP_SIZE(nvp) ((nvp)->nvp_size)\n#define NVP_NAME(nvp) ((char *)(nvp) + sizeof (nvpair_t))\n#define NVP_TYPE(nvp) ((nvp)->nvp_type)\n#define NVP_NELEM(nvp) ((nvp)->nvp_value_elem)\n#define NVP_VALUE(nvp) ((char *)(nvp) + NV_ALIGN(sizeof (nvpair_t) \\\n + (nvp)->nvp_name_sz))\n\n#define NVL_VERSION(nvl) ((nvl)->nvl_version)\n#define NVL_SIZE(nvl) ((nvl)->nvl_size)\n#define NVL_FLAG(nvl) ((nvl)->nvl_flag)\n*\/\n\n\/\/ NV allocator framework\nstruct NvAllocOps;\n\nstruct NvAlloc<> {\n nva_ops: &'static NvAllocOps,\n nva_arg: Any, \/\/ This was a void pointer type.\n \/\/ Not sure if Any is the correct type.\n}\n\nstruct NvAllocOps {\n int (*nv_ao_init)(nv_alloc_t *, __va_list);\n void (*nv_ao_fini)(nv_alloc_t *);\n void *(*nv_ao_alloc)(nv_alloc_t *, size_t);\n void (*nv_ao_free)(nv_alloc_t *, void *, size_t);\n void (*nv_ao_reset)(nv_alloc_t *);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/\/ External linking is a complex implementation to be able to serve a clean and easy-to-use\n\/\/\/ interface.\n\/\/\/\n\/\/\/ Internally, there are no such things as \"external links\" (plural). Each Entry in the store can\n\/\/\/ only have _one_ external link.\n\/\/\/\n\/\/\/ This library does the following therefor: It allows you to have several external links with one\n\/\/\/ entry, which are internally one file in the store for each link, linked with \"internal\n\/\/\/ linking\".\n\/\/\/\n\/\/\/ This helps us greatly with deduplication of URLs.\n\/\/\/\n\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\nuse std::fmt::Debug;\n\nuse libimagstore::store::Entry;\nuse libimagstore::store::Store;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagutil::debug_result::*;\n\nuse toml_query::read::TomlValueReadExt;\nuse toml_query::insert::TomlValueInsertExt;\n\nuse error::LinkErrorKind as LEK;\nuse error::LinkError as LE;\nuse error::Result;\nuse internal::InternalLinker;\nuse module_path::ModuleEntryPath;\nuse error::ResultExt;\n\nuse self::iter::*;\n\nuse toml::Value;\nuse url::Url;\nuse crypto::sha1::Sha1;\nuse crypto::digest::Digest;\n\npub trait Link {\n\n fn get_link_uri_from_filelockentry(&self) -> Result<Option<Url>>;\n\n fn get_url(&self) -> Result<Option<Url>>;\n\n}\n\nimpl Link for Entry {\n\n fn get_link_uri_from_filelockentry(&self) -> Result<Option<Url>> {\n self.get_header()\n .read(\"links.external.url\")\n .chain_err(|| LEK::EntryHeaderReadError)\n .and_then(|opt| match opt {\n Some(&Value::String(ref s)) => {\n debug!(\"Found url, parsing: {:?}\", s);\n Url::parse(&s[..]).chain_err(|| LEK::InvalidUri).map(Some)\n },\n Some(_) => Err(LE::from_kind(LEK::LinkParserFieldTypeError)),\n None => Ok(None),\n })\n }\n\n fn get_url(&self) -> Result<Option<Url>> {\n match self.get_header().read(\"links.external.url\") {\n Ok(Some(&Value::String(ref s))) => {\n Url::parse(&s[..])\n .map(Some)\n .chain_err(|| LEK::EntryHeaderReadError)\n },\n Ok(None) => Ok(None),\n _ => Err(LE::from_kind(LEK::EntryHeaderReadError))\n }\n }\n\n}\n\npub trait ExternalLinker : InternalLinker {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links<'a>(&self, store: &'a Store) -> Result<UrlIter<'a>>;\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()>;\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n}\n\npub mod iter {\n \/\/! Iterator helpers for external linking stuff\n \/\/!\n \/\/! Contains also helpers to filter iterators for external\/internal links\n \/\/!\n \/\/!\n \/\/! # Warning\n \/\/!\n \/\/! This module uses `internal::Link` as link type, so we operate on _store ids_ here.\n \/\/!\n \/\/! Not to confuse with `external::Link` which is a real `FileLockEntry` under the hood.\n \/\/!\n\n use libimagutil::debug_result::*;\n use libimagstore::store::Store;\n\n use internal::Link;\n use internal::iter::LinkIter;\n use error::LinkErrorKind as LEK;\n use error::ResultExt;\n use error::Result;\n\n use url::Url;\n\n \/\/\/ Helper for building `OnlyExternalIter` and `NoExternalIter`\n \/\/\/\n \/\/\/ The boolean value defines, how to interpret the `is_external_link_storeid()` return value\n \/\/\/ (here as \"pred\"):\n \/\/\/\n \/\/\/ pred | bool | xor | take?\n \/\/\/ ---- | ---- | --- | ----\n \/\/\/ 0 | 0 | 0 | 1\n \/\/\/ 0 | 1 | 1 | 0\n \/\/\/ 1 | 0 | 1 | 0\n \/\/\/ 1 | 1 | 0 | 1\n \/\/\/\n \/\/\/ If `bool` says \"take if return value is false\", we take the element if the `pred` returns\n \/\/\/ false... and so on.\n \/\/\/\n \/\/\/ As we can see, the operator between these two operants is `!(a ^ b)`.\n pub struct ExternalFilterIter(LinkIter, bool);\n\n impl Iterator for ExternalFilterIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n use super::is_external_link_storeid;\n\n while let Some(elem) = self.0.next() {\n if !(self.1 ^ is_external_link_storeid(&elem)) {\n return Some(elem);\n }\n }\n None\n }\n }\n\n \/\/\/ Helper trait to be implemented on `LinkIter` to select or deselect all external links\n \/\/\/\n \/\/\/ # See also\n \/\/\/\n \/\/\/ Also see `OnlyExternalIter` and `NoExternalIter` and the helper traits\/functions\n \/\/\/ `OnlyInteralLinks`\/`only_internal_links()` and `OnlyExternalLinks`\/`only_external_links()`.\n pub trait SelectExternal {\n fn select_external_links(self, b: bool) -> ExternalFilterIter;\n }\n\n impl SelectExternal for LinkIter {\n fn select_external_links(self, b: bool) -> ExternalFilterIter {\n ExternalFilterIter(self, b)\n }\n }\n\n\n pub struct OnlyExternalIter(ExternalFilterIter);\n\n impl OnlyExternalIter {\n pub fn new(li: LinkIter) -> OnlyExternalIter {\n OnlyExternalIter(ExternalFilterIter(li, true))\n }\n\n pub fn urls<'a>(self, store: &'a Store) -> UrlIter<'a> {\n UrlIter(self, store)\n }\n }\n\n impl Iterator for OnlyExternalIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next()\n }\n }\n\n pub struct NoExternalIter(ExternalFilterIter);\n\n impl NoExternalIter {\n pub fn new(li: LinkIter) -> NoExternalIter {\n NoExternalIter(ExternalFilterIter(li, false))\n }\n }\n\n impl Iterator for NoExternalIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next()\n }\n }\n\n pub trait OnlyExternalLinks : Sized {\n fn only_external_links(self) -> OnlyExternalIter ;\n\n fn no_internal_links(self) -> OnlyExternalIter {\n self.only_external_links()\n }\n }\n\n impl OnlyExternalLinks for LinkIter {\n fn only_external_links(self) -> OnlyExternalIter {\n OnlyExternalIter::new(self)\n }\n }\n\n pub trait OnlyInternalLinks : Sized {\n fn only_internal_links(self) -> NoExternalIter;\n\n fn no_external_links(self) -> NoExternalIter {\n self.only_internal_links()\n }\n }\n\n impl OnlyInternalLinks for LinkIter {\n fn only_internal_links(self) -> NoExternalIter {\n NoExternalIter::new(self)\n }\n }\n\n pub struct UrlIter<'a>(OnlyExternalIter, &'a Store);\n\n impl<'a> Iterator for UrlIter<'a> {\n type Item = Result<Url>;\n\n fn next(&mut self) -> Option<Self::Item> {\n use external::Link;\n\n loop {\n let next = self.0\n .next()\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n self.1\n .retrieve(id.clone())\n .chain_err(|| LEK::StoreReadError)\n .map_dbg_err(|_| format!(\"Retrieving entry for id: '{:?}' failed\", id))\n .and_then(|f| {\n debug!(\"Store::retrieve({:?}) succeeded\", id);\n debug!(\"getting external link from file now\");\n f.get_link_uri_from_filelockentry()\n .map_dbg_err(|e| format!(\"URL -> Err = {:?}\", e))\n })\n });\n\n match next {\n Some(Ok(Some(link))) => return Some(Ok(link)),\n Some(Ok(None)) => continue,\n Some(Err(e)) => return Some(Err(e)),\n None => return None\n }\n }\n }\n\n }\n\n}\n\n\n\/\/\/ Check whether the StoreId starts with `\/link\/external\/`\npub fn is_external_link_storeid<A: AsRef<StoreId> + Debug>(id: A) -> bool {\n debug!(\"Checking whether this is a 'links\/external\/': '{:?}'\", id);\n id.as_ref().local().starts_with(\"links\/external\")\n}\n\n\/\/\/ Implement `ExternalLinker` for `Entry`, hiding the fact that there is no such thing as an external\n\/\/\/ link in an entry, but internal links to other entries which serve as external links, as one\n\/\/\/ entry in the store can only have one external link.\nimpl ExternalLinker for Entry {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links<'a>(&self, store: &'a Store) -> Result<UrlIter<'a>> {\n \/\/ Iterate through all internal links and filter for FileLockEntries which live in\n \/\/ \/link\/external\/<SHA> -> load these files and get the external link from their headers,\n \/\/ put them into the return vector.\n self.get_internal_links()\n .chain_err(|| LEK::StoreReadError)\n .map(|iter| {\n debug!(\"Getting external links\");\n iter.only_external_links().urls(store)\n })\n }\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()> {\n \/\/ Take all the links, generate a SHA sum out of each one, filter out the already existing\n \/\/ store entries and store the other URIs in the header of one FileLockEntry each, in\n \/\/ the path \/link\/external\/<SHA of the URL>\n\n debug!(\"Iterating {} links = {:?}\", links.len(), links);\n for link in links { \/\/ for all links\n let hash = {\n let mut s = Sha1::new();\n s.input_str(&link.as_str()[..]);\n s.result_str()\n };\n let file_id = try!(\n ModuleEntryPath::new(format!(\"external\/{}\", hash)).into_storeid()\n .chain_err(|| LEK::StoreWriteError)\n .map_dbg_err(|_| {\n format!(\"Failed to build StoreId for this hash '{:?}'\", hash)\n })\n );\n\n debug!(\"Link = '{:?}'\", link);\n debug!(\"Hash = '{:?}'\", hash);\n debug!(\"StoreId = '{:?}'\", file_id);\n\n \/\/ retrieve the file from the store, which implicitely creates the entry if it does not\n \/\/ exist\n let mut file = try!(store\n .retrieve(file_id.clone())\n .chain_err(|| LEK::StoreWriteError)\n .map_dbg_err(|_| {\n format!(\"Failed to create or retrieve an file for this link '{:?}'\", link)\n }));\n\n debug!(\"Generating header content!\");\n {\n let hdr = file.deref_mut().get_header_mut();\n\n let mut table = match hdr.read(\"links.external.content\") {\n Ok(Some(&Value::Table(ref table))) => table.clone(),\n Ok(Some(_)) => {\n warn!(\"There is a value at 'links.external.content' which is not a table.\");\n warn!(\"Going to override this value\");\n BTreeMap::new()\n },\n Ok(None) => BTreeMap::new(),\n Err(e) => return Err(e).chain_err(|| LEK::StoreWriteError),\n };\n\n let v = Value::String(link.into_string());\n\n debug!(\"setting URL = '{:?}\", v);\n table.insert(String::from(\"url\"), v);\n\n if let Err(e) = hdr.insert(\"links.external.content\", Value::Table(table)) {\n return Err(e).chain_err(|| LEK::StoreWriteError);\n } else {\n debug!(\"Setting URL worked\");\n }\n }\n\n \/\/ then add an internal link to the new file or return an error if this fails\n if let Err(e) = self.add_internal_link(file.deref_mut()) {\n debug!(\"Error adding internal link\");\n return Err(e).chain_err(|| LEK::StoreWriteError);\n }\n }\n debug!(\"Ready iterating\");\n Ok(())\n }\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, add this one, save them\n debug!(\"Getting links\");\n self.get_external_links(store)\n .and_then(|links| {\n \/\/ TODO: Do not ignore errors here\n let mut links = links.filter_map(Result::ok).collect::<Vec<_>>();\n debug!(\"Adding link = '{:?}' to links = {:?}\", link, links);\n links.push(link);\n debug!(\"Setting {} links = {:?}\", links.len(), links);\n self.set_external_links(store, links)\n })\n }\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, remove this one, save them\n self.get_external_links(store)\n .and_then(|links| {\n debug!(\"Removing link = '{:?}'\", link);\n let links = links\n .filter_map(Result::ok)\n .filter(|l| l.as_str() != link.as_str())\n .collect::<Vec<_>>();\n self.set_external_links(store, links)\n })\n }\n\n}\n\n<commit_msg>Add test to test external linking<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/\/ External linking is a complex implementation to be able to serve a clean and easy-to-use\n\/\/\/ interface.\n\/\/\/\n\/\/\/ Internally, there are no such things as \"external links\" (plural). Each Entry in the store can\n\/\/\/ only have _one_ external link.\n\/\/\/\n\/\/\/ This library does the following therefor: It allows you to have several external links with one\n\/\/\/ entry, which are internally one file in the store for each link, linked with \"internal\n\/\/\/ linking\".\n\/\/\/\n\/\/\/ This helps us greatly with deduplication of URLs.\n\/\/\/\n\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\nuse std::fmt::Debug;\n\nuse libimagstore::store::Entry;\nuse libimagstore::store::Store;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagutil::debug_result::*;\n\nuse toml_query::read::TomlValueReadExt;\nuse toml_query::insert::TomlValueInsertExt;\n\nuse error::LinkErrorKind as LEK;\nuse error::LinkError as LE;\nuse error::Result;\nuse internal::InternalLinker;\nuse module_path::ModuleEntryPath;\nuse error::ResultExt;\n\nuse self::iter::*;\n\nuse toml::Value;\nuse url::Url;\nuse crypto::sha1::Sha1;\nuse crypto::digest::Digest;\n\npub trait Link {\n\n fn get_link_uri_from_filelockentry(&self) -> Result<Option<Url>>;\n\n fn get_url(&self) -> Result<Option<Url>>;\n\n}\n\nimpl Link for Entry {\n\n fn get_link_uri_from_filelockentry(&self) -> Result<Option<Url>> {\n self.get_header()\n .read(\"links.external.url\")\n .chain_err(|| LEK::EntryHeaderReadError)\n .and_then(|opt| match opt {\n Some(&Value::String(ref s)) => {\n debug!(\"Found url, parsing: {:?}\", s);\n Url::parse(&s[..]).chain_err(|| LEK::InvalidUri).map(Some)\n },\n Some(_) => Err(LE::from_kind(LEK::LinkParserFieldTypeError)),\n None => Ok(None),\n })\n }\n\n fn get_url(&self) -> Result<Option<Url>> {\n match self.get_header().read(\"links.external.url\") {\n Ok(Some(&Value::String(ref s))) => {\n Url::parse(&s[..])\n .map(Some)\n .chain_err(|| LEK::EntryHeaderReadError)\n },\n Ok(None) => Ok(None),\n _ => Err(LE::from_kind(LEK::EntryHeaderReadError))\n }\n }\n\n}\n\npub trait ExternalLinker : InternalLinker {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links<'a>(&self, store: &'a Store) -> Result<UrlIter<'a>>;\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()>;\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n}\n\npub mod iter {\n \/\/! Iterator helpers for external linking stuff\n \/\/!\n \/\/! Contains also helpers to filter iterators for external\/internal links\n \/\/!\n \/\/!\n \/\/! # Warning\n \/\/!\n \/\/! This module uses `internal::Link` as link type, so we operate on _store ids_ here.\n \/\/!\n \/\/! Not to confuse with `external::Link` which is a real `FileLockEntry` under the hood.\n \/\/!\n\n use libimagutil::debug_result::*;\n use libimagstore::store::Store;\n\n use internal::Link;\n use internal::iter::LinkIter;\n use error::LinkErrorKind as LEK;\n use error::ResultExt;\n use error::Result;\n\n use url::Url;\n\n \/\/\/ Helper for building `OnlyExternalIter` and `NoExternalIter`\n \/\/\/\n \/\/\/ The boolean value defines, how to interpret the `is_external_link_storeid()` return value\n \/\/\/ (here as \"pred\"):\n \/\/\/\n \/\/\/ pred | bool | xor | take?\n \/\/\/ ---- | ---- | --- | ----\n \/\/\/ 0 | 0 | 0 | 1\n \/\/\/ 0 | 1 | 1 | 0\n \/\/\/ 1 | 0 | 1 | 0\n \/\/\/ 1 | 1 | 0 | 1\n \/\/\/\n \/\/\/ If `bool` says \"take if return value is false\", we take the element if the `pred` returns\n \/\/\/ false... and so on.\n \/\/\/\n \/\/\/ As we can see, the operator between these two operants is `!(a ^ b)`.\n pub struct ExternalFilterIter(LinkIter, bool);\n\n impl Iterator for ExternalFilterIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n use super::is_external_link_storeid;\n\n while let Some(elem) = self.0.next() {\n if !(self.1 ^ is_external_link_storeid(&elem)) {\n return Some(elem);\n }\n }\n None\n }\n }\n\n \/\/\/ Helper trait to be implemented on `LinkIter` to select or deselect all external links\n \/\/\/\n \/\/\/ # See also\n \/\/\/\n \/\/\/ Also see `OnlyExternalIter` and `NoExternalIter` and the helper traits\/functions\n \/\/\/ `OnlyInteralLinks`\/`only_internal_links()` and `OnlyExternalLinks`\/`only_external_links()`.\n pub trait SelectExternal {\n fn select_external_links(self, b: bool) -> ExternalFilterIter;\n }\n\n impl SelectExternal for LinkIter {\n fn select_external_links(self, b: bool) -> ExternalFilterIter {\n ExternalFilterIter(self, b)\n }\n }\n\n\n pub struct OnlyExternalIter(ExternalFilterIter);\n\n impl OnlyExternalIter {\n pub fn new(li: LinkIter) -> OnlyExternalIter {\n OnlyExternalIter(ExternalFilterIter(li, true))\n }\n\n pub fn urls<'a>(self, store: &'a Store) -> UrlIter<'a> {\n UrlIter(self, store)\n }\n }\n\n impl Iterator for OnlyExternalIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next()\n }\n }\n\n pub struct NoExternalIter(ExternalFilterIter);\n\n impl NoExternalIter {\n pub fn new(li: LinkIter) -> NoExternalIter {\n NoExternalIter(ExternalFilterIter(li, false))\n }\n }\n\n impl Iterator for NoExternalIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next()\n }\n }\n\n pub trait OnlyExternalLinks : Sized {\n fn only_external_links(self) -> OnlyExternalIter ;\n\n fn no_internal_links(self) -> OnlyExternalIter {\n self.only_external_links()\n }\n }\n\n impl OnlyExternalLinks for LinkIter {\n fn only_external_links(self) -> OnlyExternalIter {\n OnlyExternalIter::new(self)\n }\n }\n\n pub trait OnlyInternalLinks : Sized {\n fn only_internal_links(self) -> NoExternalIter;\n\n fn no_external_links(self) -> NoExternalIter {\n self.only_internal_links()\n }\n }\n\n impl OnlyInternalLinks for LinkIter {\n fn only_internal_links(self) -> NoExternalIter {\n NoExternalIter::new(self)\n }\n }\n\n pub struct UrlIter<'a>(OnlyExternalIter, &'a Store);\n\n impl<'a> Iterator for UrlIter<'a> {\n type Item = Result<Url>;\n\n fn next(&mut self) -> Option<Self::Item> {\n use external::Link;\n\n loop {\n let next = self.0\n .next()\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n self.1\n .retrieve(id.clone())\n .chain_err(|| LEK::StoreReadError)\n .map_dbg_err(|_| format!(\"Retrieving entry for id: '{:?}' failed\", id))\n .and_then(|f| {\n debug!(\"Store::retrieve({:?}) succeeded\", id);\n debug!(\"getting external link from file now\");\n f.get_link_uri_from_filelockentry()\n .map_dbg_err(|e| format!(\"URL -> Err = {:?}\", e))\n })\n });\n\n match next {\n Some(Ok(Some(link))) => return Some(Ok(link)),\n Some(Ok(None)) => continue,\n Some(Err(e)) => return Some(Err(e)),\n None => return None\n }\n }\n }\n\n }\n\n}\n\n\n\/\/\/ Check whether the StoreId starts with `\/link\/external\/`\npub fn is_external_link_storeid<A: AsRef<StoreId> + Debug>(id: A) -> bool {\n debug!(\"Checking whether this is a 'links\/external\/': '{:?}'\", id);\n id.as_ref().local().starts_with(\"links\/external\")\n}\n\n\/\/\/ Implement `ExternalLinker` for `Entry`, hiding the fact that there is no such thing as an external\n\/\/\/ link in an entry, but internal links to other entries which serve as external links, as one\n\/\/\/ entry in the store can only have one external link.\nimpl ExternalLinker for Entry {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links<'a>(&self, store: &'a Store) -> Result<UrlIter<'a>> {\n \/\/ Iterate through all internal links and filter for FileLockEntries which live in\n \/\/ \/link\/external\/<SHA> -> load these files and get the external link from their headers,\n \/\/ put them into the return vector.\n self.get_internal_links()\n .chain_err(|| LEK::StoreReadError)\n .map(|iter| {\n debug!(\"Getting external links\");\n iter.only_external_links().urls(store)\n })\n }\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()> {\n \/\/ Take all the links, generate a SHA sum out of each one, filter out the already existing\n \/\/ store entries and store the other URIs in the header of one FileLockEntry each, in\n \/\/ the path \/link\/external\/<SHA of the URL>\n\n debug!(\"Iterating {} links = {:?}\", links.len(), links);\n for link in links { \/\/ for all links\n let hash = {\n let mut s = Sha1::new();\n s.input_str(&link.as_str()[..]);\n s.result_str()\n };\n let file_id = try!(\n ModuleEntryPath::new(format!(\"external\/{}\", hash)).into_storeid()\n .chain_err(|| LEK::StoreWriteError)\n .map_dbg_err(|_| {\n format!(\"Failed to build StoreId for this hash '{:?}'\", hash)\n })\n );\n\n debug!(\"Link = '{:?}'\", link);\n debug!(\"Hash = '{:?}'\", hash);\n debug!(\"StoreId = '{:?}'\", file_id);\n\n \/\/ retrieve the file from the store, which implicitely creates the entry if it does not\n \/\/ exist\n let mut file = try!(store\n .retrieve(file_id.clone())\n .chain_err(|| LEK::StoreWriteError)\n .map_dbg_err(|_| {\n format!(\"Failed to create or retrieve an file for this link '{:?}'\", link)\n }));\n\n debug!(\"Generating header content!\");\n {\n let hdr = file.deref_mut().get_header_mut();\n\n let mut table = match hdr.read(\"links.external.content\") {\n Ok(Some(&Value::Table(ref table))) => table.clone(),\n Ok(Some(_)) => {\n warn!(\"There is a value at 'links.external.content' which is not a table.\");\n warn!(\"Going to override this value\");\n BTreeMap::new()\n },\n Ok(None) => BTreeMap::new(),\n Err(e) => return Err(e).chain_err(|| LEK::StoreWriteError),\n };\n\n let v = Value::String(link.into_string());\n\n debug!(\"setting URL = '{:?}\", v);\n table.insert(String::from(\"url\"), v);\n\n if let Err(e) = hdr.insert(\"links.external.content\", Value::Table(table)) {\n return Err(e).chain_err(|| LEK::StoreWriteError);\n } else {\n debug!(\"Setting URL worked\");\n }\n }\n\n \/\/ then add an internal link to the new file or return an error if this fails\n if let Err(e) = self.add_internal_link(file.deref_mut()) {\n debug!(\"Error adding internal link\");\n return Err(e).chain_err(|| LEK::StoreWriteError);\n }\n }\n debug!(\"Ready iterating\");\n Ok(())\n }\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, add this one, save them\n debug!(\"Getting links\");\n self.get_external_links(store)\n .and_then(|links| {\n \/\/ TODO: Do not ignore errors here\n let mut links = links.filter_map(Result::ok).collect::<Vec<_>>();\n debug!(\"Adding link = '{:?}' to links = {:?}\", link, links);\n links.push(link);\n debug!(\"Setting {} links = {:?}\", links.len(), links);\n self.set_external_links(store, links)\n })\n }\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, remove this one, save them\n self.get_external_links(store)\n .and_then(|links| {\n debug!(\"Removing link = '{:?}'\", link);\n let links = links\n .filter_map(Result::ok)\n .filter(|l| l.as_str() != link.as_str())\n .collect::<Vec<_>>();\n self.set_external_links(store, links)\n })\n }\n\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::path::PathBuf;\n\n use libimagstore::store::Store;\n\n fn setup_logging() {\n use env_logger;\n let _ = env_logger::init().unwrap_or(());\n }\n\n pub fn get_store() -> Store {\n use libimagstore::file_abstraction::InMemoryFileAbstraction;\n let backend = Box::new(InMemoryFileAbstraction::new());\n Store::new_with_backend(PathBuf::from(\"\/\"), None, backend).unwrap()\n }\n\n\n #[test]\n fn test_simple() {\n setup_logging();\n let store = get_store();\n let mut e = store.retrieve(PathBuf::from(\"base-test_simple\")).unwrap();\n let url = Url::parse(\"http:\/\/google.de\").unwrap();\n\n assert!(e.add_external_link(&store, url.clone()).is_ok());\n\n assert_eq!(1, e.get_external_links(&store).unwrap().count());\n assert_eq!(url, e.get_external_links(&store).unwrap().next().unwrap().unwrap());\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Ensure exhaustiveness of convert functions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>removed build script<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for `impl Trait` in argument position<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(universal_impl_trait)]\n#![crate_name = \"foo\"]\n\n\/\/ @has foo\/fn.foo.html\n\/\/ @has - \/\/pre 'foo('\n\/\/ @matches - '_x: impl <a class=\"trait\" href=\"[^\"]+\/trait\\.Clone\\.html\"'\n\/\/ @matches - '_z: .+impl.+trait\\.Copy\\.html.+, impl.+trait\\.Clone\\.html'\npub fn foo(_x: impl Clone, _y: i32, _z: (impl Copy, impl Clone)) {\n}\n\npub trait Trait {\n \/\/ @has foo\/trait.Trait.html\n \/\/ @has - 'method<\/a>('\n \/\/ @matches - '_x: impl <a class=\"trait\" href=\"[^\"]+\/trait\\.Debug\\.html\"'\n fn method(&self, _x: impl std::fmt::Debug) {\n }\n}\n\npub struct S<T>(T);\n\nimpl<T> S<T> {\n \/\/ @has foo\/struct.S.html\n \/\/ @has - 'bar<\/a>('\n \/\/ @matches - '_bar: impl <a class=\"trait\" href=\"[^\"]+\/trait\\.Copy\\.html\"'\n pub fn bar(_bar: impl Copy) {\n }\n\n \/\/ @has - 'baz<\/a>('\n \/\/ @matches - '_baz:.+struct\\.S\\.html.+impl .+trait\\.Clone\\.html'\n pub fn baz(_baz: S<impl Clone>) {\n }\n}\n\n\/\/ @has - 'method<\/a>('\n\/\/ @matches - '_x: impl <a class=\"trait\" href=\"[^\"]+\/trait\\.Debug\\.html\"'\nimpl<T> Trait for S<T> {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>cargo fmt<commit_after><|endoftext|>"} {"text":"<commit_before>#[macro_use]\nextern crate cnx;\nextern crate env_logger;\n#[macro_use]\nextern crate error_chain;\nextern crate log;\n\nuse std::env;\n\nuse env_logger::Builder;\nuse log::LevelFilter;\n\nuse cnx::text::*;\nuse cnx::widgets::*;\nuse cnx::*;\n\nmod errors {\n error_chain! {\n links {\n Cnx(::cnx::errors::Error, ::cnx::errors::ErrorKind);\n }\n foreign_links {\n SetLogger(::log::SetLoggerError);\n }\n }\n}\n\nfn init_log() -> errors::Result<()> {\n let mut builder = Builder::new();\n builder.filter(Some(\"cnx\"), LevelFilter::Trace);\n if let Ok(rust_log) = env::var(\"RUST_LOG\") {\n builder.parse(&rust_log);\n }\n builder.try_init()?;\n Ok(())\n}\n\nfn main() -> errors::Result<()> {\n init_log()?;\n\n let attr = Attributes {\n font: Font::new(\"SourceCodePro 21\"),\n fg_color: Color::white(),\n bg_color: None,\n padding: Padding::new(8.0, 8.0, 0.0, 0.0),\n };\n let mut active_attr = attr.clone();\n active_attr.bg_color = Some(Color::blue());\n\n let mut cnx = Cnx::new(Position::Bottom)?;\n\n cnx_add_widget!(cnx, Pager::new(&cnx, active_attr, attr.clone()));\n cnx_add_widget!(cnx, ActiveWindowTitle::new(&cnx, attr.clone()));\n cnx_add_widget!(\n cnx,\n Sensors::new(&cnx, attr.clone(), vec![\"Core 0\", \"Core 1\"])\n );\n #[cfg(feature = \"volume-widget\")]\n cnx_add_widget!(cnx, Volume::new(&cnx, attr.clone()));\n cnx_add_widget!(cnx, Battery::new(&cnx, attr.clone(), Color::red()));\n cnx_add_widget!(cnx, Clock::new(&cnx, attr.clone()));\n\n cnx.run()?;\n\n Ok(())\n}\n<commit_msg>Default bar to top, for easy debugging.<commit_after>#[macro_use]\nextern crate cnx;\nextern crate env_logger;\n#[macro_use]\nextern crate error_chain;\nextern crate log;\n\nuse std::env;\n\nuse env_logger::Builder;\nuse log::LevelFilter;\n\nuse cnx::text::*;\nuse cnx::widgets::*;\nuse cnx::*;\n\nmod errors {\n error_chain! {\n links {\n Cnx(::cnx::errors::Error, ::cnx::errors::ErrorKind);\n }\n foreign_links {\n SetLogger(::log::SetLoggerError);\n }\n }\n}\n\nfn init_log() -> errors::Result<()> {\n let mut builder = Builder::new();\n builder.filter(Some(\"cnx\"), LevelFilter::Trace);\n if let Ok(rust_log) = env::var(\"RUST_LOG\") {\n builder.parse(&rust_log);\n }\n builder.try_init()?;\n Ok(())\n}\n\nfn main() -> errors::Result<()> {\n init_log()?;\n\n let attr = Attributes {\n font: Font::new(\"SourceCodePro 21\"),\n fg_color: Color::white(),\n bg_color: None,\n padding: Padding::new(8.0, 8.0, 0.0, 0.0),\n };\n let mut active_attr = attr.clone();\n active_attr.bg_color = Some(Color::blue());\n\n let mut cnx = Cnx::new(Position::Top)?;\n\n cnx_add_widget!(cnx, Pager::new(&cnx, active_attr, attr.clone()));\n cnx_add_widget!(cnx, ActiveWindowTitle::new(&cnx, attr.clone()));\n cnx_add_widget!(\n cnx,\n Sensors::new(&cnx, attr.clone(), vec![\"Core 0\", \"Core 1\"])\n );\n #[cfg(feature = \"volume-widget\")]\n cnx_add_widget!(cnx, Volume::new(&cnx, attr.clone()));\n cnx_add_widget!(cnx, Battery::new(&cnx, attr.clone(), Color::red()));\n cnx_add_widget!(cnx, Clock::new(&cnx, attr.clone()));\n\n cnx.run()?;\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! History API\n\n#[cfg(unix)]\nuse libc;\nuse std::collections::vec_deque;\nuse std::collections::VecDeque;\nuse std::fs::File;\nuse std::iter::DoubleEndedIterator;\nuse std::ops::Index;\nuse std::path::Path;\n\nuse super::Result;\nuse crate::config::{Config, HistoryDuplicates};\n\n\/\/\/ Search direction\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\npub enum Direction {\n Forward,\n Reverse,\n}\n\n\/\/\/ Current state of the history.\n#[derive(Default)]\npub struct History {\n entries: VecDeque<String>,\n max_len: usize,\n pub(crate) ignore_space: bool,\n pub(crate) ignore_dups: bool,\n}\n\nimpl History {\n pub fn new() -> Self {\n Self::with_config(Config::default())\n }\n\n pub fn with_config(config: Config) -> Self {\n Self {\n entries: VecDeque::new(),\n max_len: config.max_history_size(),\n ignore_space: config.history_ignore_space(),\n ignore_dups: config.history_duplicates() == HistoryDuplicates::IgnoreConsecutive,\n }\n }\n\n \/\/\/ Return the history entry at position `index`, starting from 0.\n pub fn get(&self, index: usize) -> Option<&String> {\n self.entries.get(index)\n }\n\n \/\/\/ Return the last history entry (i.e. previous command)\n pub fn last(&self) -> Option<&String> {\n self.entries.back()\n }\n\n \/\/\/ Add a new entry in the history.\n pub fn add<S: AsRef<str> + Into<String>>(&mut self, line: S) -> bool {\n if self.max_len == 0 {\n return false;\n }\n if line.as_ref().is_empty()\n || (self.ignore_space\n && line\n .as_ref()\n .chars()\n .next()\n .map_or(true, char::is_whitespace))\n {\n return false;\n }\n if self.ignore_dups {\n if let Some(s) = self.entries.back() {\n if s == line.as_ref() {\n return false;\n }\n }\n }\n if self.entries.len() == self.max_len {\n self.entries.pop_front();\n }\n self.entries.push_back(line.into());\n true\n }\n\n \/\/\/ Return the number of entries in the history.\n pub fn len(&self) -> usize {\n self.entries.len()\n }\n\n \/\/\/ Return true if the history has no entry.\n pub fn is_empty(&self) -> bool {\n self.entries.is_empty()\n }\n\n \/\/\/ Set the maximum length for the history. This function can be called even\n \/\/\/ if there is already some history, the function will make sure to retain\n \/\/\/ just the latest `len` elements if the new history length value is\n \/\/\/ smaller than the amount of items already inside the history.\n \/\/\/\n \/\/\/ Like [stifle_history](http:\/\/cnswww.cns.cwru.\n \/\/\/ edu\/php\/chet\/readline\/history.html#IDX11).\n pub fn set_max_len(&mut self, len: usize) {\n self.max_len = len;\n if len == 0 {\n self.entries.clear();\n return;\n }\n loop {\n if self.entries.len() <= len {\n break;\n }\n self.entries.pop_front();\n }\n }\n\n \/\/\/ Save the history in the specified file.\n \/\/ TODO append_history\n \/\/ http:\/\/cnswww.cns.cwru.edu\/php\/chet\/readline\/history.html#IDX30\n \/\/ TODO history_truncate_file\n \/\/ http:\/\/cnswww.cns.cwru.edu\/php\/chet\/readline\/history.html#IDX31\n pub fn save<P: AsRef<Path> + ?Sized>(&self, path: &P) -> Result<()> {\n use std::io::{BufWriter, Write};\n\n if self.is_empty() {\n return Ok(());\n }\n let old_umask = umask();\n let f = File::create(path);\n restore_umask(old_umask);\n let file = f?;\n fix_perm(&file);\n let mut wtr = BufWriter::new(file);\n for entry in &self.entries {\n wtr.write_all(entry.as_bytes())?;\n wtr.write_all(b\"\\n\")?;\n }\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/32677#issuecomment-204833485\n wtr.flush()?;\n Ok(())\n }\n\n \/\/\/ Load the history from the specified file.\n \/\/\/\n \/\/\/ # Errors\n \/\/\/ Will return `Err` if path does not already exist or could not be read.\n pub fn load<P: AsRef<Path> + ?Sized>(&mut self, path: &P) -> Result<()> {\n use std::io::{BufRead, BufReader};\n\n let file = File::open(&path)?;\n let rdr = BufReader::new(file);\n for line in rdr.lines() {\n self.add(line?); \/\/ TODO truncate to MAX_LINE\n }\n Ok(())\n }\n\n \/\/\/ Clear history\n pub fn clear(&mut self) {\n self.entries.clear()\n }\n\n \/\/\/ Search history (start position inclusive [0, len-1]).\n \/\/\/\n \/\/\/ Return the absolute index of the nearest history entry that matches\n \/\/\/ `term`.\n \/\/\/\n \/\/\/ Return None if no entry contains `term` between [start, len -1] for\n \/\/\/ forward search\n \/\/\/ or between [0, start] for reverse search.\n pub fn search(&self, term: &str, start: usize, dir: Direction) -> Option<usize> {\n let test = |entry: &String| entry.contains(term);\n self.search_match(term, start, dir, test)\n }\n\n \/\/\/ Anchored search\n pub fn starts_with(&self, term: &str, start: usize, dir: Direction) -> Option<usize> {\n let test = |entry: &String| entry.starts_with(term);\n self.search_match(term, start, dir, test)\n }\n\n fn search_match<F>(&self, term: &str, start: usize, dir: Direction, test: F) -> Option<usize>\n where\n F: Fn(&String) -> bool,\n {\n if term.is_empty() || start >= self.len() {\n return None;\n }\n match dir {\n Direction::Reverse => {\n let index = self\n .entries\n .iter()\n .rev()\n .skip(self.entries.len() - 1 - start)\n .position(test);\n index.and_then(|index| Some(start - index))\n }\n Direction::Forward => {\n let index = self.entries.iter().skip(start).position(test);\n index.and_then(|index| Some(index + start))\n }\n }\n }\n\n \/\/\/ Return a forward iterator.\n pub fn iter(&self) -> Iter<'_> {\n Iter(self.entries.iter())\n }\n}\n\nimpl Index<usize> for History {\n type Output = String;\n\n fn index(&self, index: usize) -> &String {\n &self.entries[index]\n }\n}\n\nimpl<'a> IntoIterator for &'a History {\n type IntoIter = Iter<'a>;\n type Item = &'a String;\n\n fn into_iter(self) -> Iter<'a> {\n self.iter()\n }\n}\n\n\/\/\/ History iterator.\npub struct Iter<'a>(vec_deque::Iter<'a, String>);\n\nimpl<'a> Iterator for Iter<'a> {\n type Item = &'a String;\n\n fn next(&mut self) -> Option<&'a String> {\n self.0.next()\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n self.0.size_hint()\n }\n}\n\nimpl<'a> DoubleEndedIterator for Iter<'a> {\n fn next_back(&mut self) -> Option<&'a String> {\n self.0.next_back()\n }\n}\n\n#[cfg(windows)]\nfn umask() -> u16 {\n 0\n}\n#[cfg(unix)]\nfn umask() -> libc::mode_t {\n unsafe { libc::umask(libc::S_IXUSR | libc::S_IRWXG | libc::S_IRWXO) }\n}\n#[cfg(windows)]\nfn restore_umask(_: u16) {}\n#[cfg(unix)]\nfn restore_umask(old_umask: libc::mode_t) {\n unsafe {\n libc::umask(old_umask);\n }\n}\n\n#[cfg(windows)]\nfn fix_perm(_: &File) {}\n#[cfg(unix)]\nfn fix_perm(file: &File) {\n use std::os::unix::io::AsRawFd;\n unsafe {\n libc::fchmod(file.as_raw_fd(), libc::S_IRUSR | libc::S_IWUSR);\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Direction, History};\n use crate::config::Config;\n use std::path::Path;\n use tempdir;\n\n fn init() -> History {\n let mut history = History::new();\n assert!(history.add(\"line1\"));\n assert!(history.add(\"line2\"));\n assert!(history.add(\"line3\"));\n history\n }\n\n #[test]\n fn new() {\n let history = History::new();\n assert_eq!(0, history.entries.len());\n }\n\n #[test]\n fn add() {\n let config = Config::builder().history_ignore_space(true).build();\n let mut history = History::with_config(config);\n assert_eq!(config.max_history_size(), history.max_len);\n assert!(history.add(\"line1\"));\n assert!(history.add(\"line2\"));\n assert!(!history.add(\"line2\"));\n assert!(!history.add(\"\"));\n assert!(!history.add(\" line3\"));\n }\n\n #[test]\n fn set_max_len() {\n let mut history = init();\n history.set_max_len(1);\n assert_eq!(1, history.entries.len());\n assert_eq!(Some(&\"line3\".to_owned()), history.last());\n }\n\n #[test]\n fn save() {\n let mut history = init();\n let td = tempdir::TempDir::new_in(&Path::new(\".\"), \"histo\").unwrap();\n let history_path = td.path().join(\".history\");\n\n history.save(&history_path).unwrap();\n history.load(&history_path).unwrap();\n td.close().unwrap();\n }\n\n #[test]\n fn search() {\n let history = init();\n assert_eq!(None, history.search(\"\", 0, Direction::Forward));\n assert_eq!(None, history.search(\"none\", 0, Direction::Forward));\n assert_eq!(None, history.search(\"line\", 3, Direction::Forward));\n\n assert_eq!(Some(0), history.search(\"line\", 0, Direction::Forward));\n assert_eq!(Some(1), history.search(\"line\", 1, Direction::Forward));\n assert_eq!(Some(2), history.search(\"line3\", 1, Direction::Forward));\n }\n\n #[test]\n fn reverse_search() {\n let history = init();\n assert_eq!(None, history.search(\"\", 2, Direction::Reverse));\n assert_eq!(None, history.search(\"none\", 2, Direction::Reverse));\n assert_eq!(None, history.search(\"line\", 3, Direction::Reverse));\n\n assert_eq!(Some(2), history.search(\"line\", 2, Direction::Reverse));\n assert_eq!(Some(1), history.search(\"line\", 1, Direction::Reverse));\n assert_eq!(Some(0), history.search(\"line1\", 1, Direction::Reverse));\n }\n}\n<commit_msg>Fix clippy warnings<commit_after>\/\/! History API\n\n#[cfg(unix)]\nuse libc;\nuse std::collections::vec_deque;\nuse std::collections::VecDeque;\nuse std::fs::File;\nuse std::iter::DoubleEndedIterator;\nuse std::ops::Index;\nuse std::path::Path;\n\nuse super::Result;\nuse crate::config::{Config, HistoryDuplicates};\n\n\/\/\/ Search direction\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\npub enum Direction {\n Forward,\n Reverse,\n}\n\n\/\/\/ Current state of the history.\n#[derive(Default)]\npub struct History {\n entries: VecDeque<String>,\n max_len: usize,\n pub(crate) ignore_space: bool,\n pub(crate) ignore_dups: bool,\n}\n\nimpl History {\n pub fn new() -> Self {\n Self::with_config(Config::default())\n }\n\n pub fn with_config(config: Config) -> Self {\n Self {\n entries: VecDeque::new(),\n max_len: config.max_history_size(),\n ignore_space: config.history_ignore_space(),\n ignore_dups: config.history_duplicates() == HistoryDuplicates::IgnoreConsecutive,\n }\n }\n\n \/\/\/ Return the history entry at position `index`, starting from 0.\n pub fn get(&self, index: usize) -> Option<&String> {\n self.entries.get(index)\n }\n\n \/\/\/ Return the last history entry (i.e. previous command)\n pub fn last(&self) -> Option<&String> {\n self.entries.back()\n }\n\n \/\/\/ Add a new entry in the history.\n pub fn add<S: AsRef<str> + Into<String>>(&mut self, line: S) -> bool {\n if self.max_len == 0 {\n return false;\n }\n if line.as_ref().is_empty()\n || (self.ignore_space\n && line\n .as_ref()\n .chars()\n .next()\n .map_or(true, char::is_whitespace))\n {\n return false;\n }\n if self.ignore_dups {\n if let Some(s) = self.entries.back() {\n if s == line.as_ref() {\n return false;\n }\n }\n }\n if self.entries.len() == self.max_len {\n self.entries.pop_front();\n }\n self.entries.push_back(line.into());\n true\n }\n\n \/\/\/ Return the number of entries in the history.\n pub fn len(&self) -> usize {\n self.entries.len()\n }\n\n \/\/\/ Return true if the history has no entry.\n pub fn is_empty(&self) -> bool {\n self.entries.is_empty()\n }\n\n \/\/\/ Set the maximum length for the history. This function can be called even\n \/\/\/ if there is already some history, the function will make sure to retain\n \/\/\/ just the latest `len` elements if the new history length value is\n \/\/\/ smaller than the amount of items already inside the history.\n \/\/\/\n \/\/\/ Like [stifle_history](http:\/\/cnswww.cns.cwru.\n \/\/\/ edu\/php\/chet\/readline\/history.html#IDX11).\n pub fn set_max_len(&mut self, len: usize) {\n self.max_len = len;\n if len == 0 {\n self.entries.clear();\n return;\n }\n loop {\n if self.entries.len() <= len {\n break;\n }\n self.entries.pop_front();\n }\n }\n\n \/\/\/ Save the history in the specified file.\n \/\/ TODO append_history\n \/\/ http:\/\/cnswww.cns.cwru.edu\/php\/chet\/readline\/history.html#IDX30\n \/\/ TODO history_truncate_file\n \/\/ http:\/\/cnswww.cns.cwru.edu\/php\/chet\/readline\/history.html#IDX31\n pub fn save<P: AsRef<Path> + ?Sized>(&self, path: &P) -> Result<()> {\n use std::io::{BufWriter, Write};\n\n if self.is_empty() {\n return Ok(());\n }\n let old_umask = umask();\n let f = File::create(path);\n restore_umask(old_umask);\n let file = f?;\n fix_perm(&file);\n let mut wtr = BufWriter::new(file);\n for entry in &self.entries {\n wtr.write_all(entry.as_bytes())?;\n wtr.write_all(b\"\\n\")?;\n }\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/32677#issuecomment-204833485\n wtr.flush()?;\n Ok(())\n }\n\n \/\/\/ Load the history from the specified file.\n \/\/\/\n \/\/\/ # Errors\n \/\/\/ Will return `Err` if path does not already exist or could not be read.\n pub fn load<P: AsRef<Path> + ?Sized>(&mut self, path: &P) -> Result<()> {\n use std::io::{BufRead, BufReader};\n\n let file = File::open(&path)?;\n let rdr = BufReader::new(file);\n for line in rdr.lines() {\n self.add(line?); \/\/ TODO truncate to MAX_LINE\n }\n Ok(())\n }\n\n \/\/\/ Clear history\n pub fn clear(&mut self) {\n self.entries.clear()\n }\n\n \/\/\/ Search history (start position inclusive [0, len-1]).\n \/\/\/\n \/\/\/ Return the absolute index of the nearest history entry that matches\n \/\/\/ `term`.\n \/\/\/\n \/\/\/ Return None if no entry contains `term` between [start, len -1] for\n \/\/\/ forward search\n \/\/\/ or between [0, start] for reverse search.\n pub fn search(&self, term: &str, start: usize, dir: Direction) -> Option<usize> {\n let test = |entry: &String| entry.contains(term);\n self.search_match(term, start, dir, test)\n }\n\n \/\/\/ Anchored search\n pub fn starts_with(&self, term: &str, start: usize, dir: Direction) -> Option<usize> {\n let test = |entry: &String| entry.starts_with(term);\n self.search_match(term, start, dir, test)\n }\n\n fn search_match<F>(&self, term: &str, start: usize, dir: Direction, test: F) -> Option<usize>\n where\n F: Fn(&String) -> bool,\n {\n if term.is_empty() || start >= self.len() {\n return None;\n }\n match dir {\n Direction::Reverse => {\n let index = self\n .entries\n .iter()\n .rev()\n .skip(self.entries.len() - 1 - start)\n .position(test);\n index.map(|index| start - index)\n }\n Direction::Forward => {\n let index = self.entries.iter().skip(start).position(test);\n index.map(|index| index + start)\n }\n }\n }\n\n \/\/\/ Return a forward iterator.\n pub fn iter(&self) -> Iter<'_> {\n Iter(self.entries.iter())\n }\n}\n\nimpl Index<usize> for History {\n type Output = String;\n\n fn index(&self, index: usize) -> &String {\n &self.entries[index]\n }\n}\n\nimpl<'a> IntoIterator for &'a History {\n type IntoIter = Iter<'a>;\n type Item = &'a String;\n\n fn into_iter(self) -> Iter<'a> {\n self.iter()\n }\n}\n\n\/\/\/ History iterator.\npub struct Iter<'a>(vec_deque::Iter<'a, String>);\n\nimpl<'a> Iterator for Iter<'a> {\n type Item = &'a String;\n\n fn next(&mut self) -> Option<&'a String> {\n self.0.next()\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n self.0.size_hint()\n }\n}\n\nimpl<'a> DoubleEndedIterator for Iter<'a> {\n fn next_back(&mut self) -> Option<&'a String> {\n self.0.next_back()\n }\n}\n\n#[cfg(windows)]\nfn umask() -> u16 {\n 0\n}\n#[cfg(unix)]\nfn umask() -> libc::mode_t {\n unsafe { libc::umask(libc::S_IXUSR | libc::S_IRWXG | libc::S_IRWXO) }\n}\n#[cfg(windows)]\nfn restore_umask(_: u16) {}\n#[cfg(unix)]\nfn restore_umask(old_umask: libc::mode_t) {\n unsafe {\n libc::umask(old_umask);\n }\n}\n\n#[cfg(windows)]\nfn fix_perm(_: &File) {}\n#[cfg(unix)]\nfn fix_perm(file: &File) {\n use std::os::unix::io::AsRawFd;\n unsafe {\n libc::fchmod(file.as_raw_fd(), libc::S_IRUSR | libc::S_IWUSR);\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Direction, History};\n use crate::config::Config;\n use std::path::Path;\n use tempdir;\n\n fn init() -> History {\n let mut history = History::new();\n assert!(history.add(\"line1\"));\n assert!(history.add(\"line2\"));\n assert!(history.add(\"line3\"));\n history\n }\n\n #[test]\n fn new() {\n let history = History::new();\n assert_eq!(0, history.entries.len());\n }\n\n #[test]\n fn add() {\n let config = Config::builder().history_ignore_space(true).build();\n let mut history = History::with_config(config);\n assert_eq!(config.max_history_size(), history.max_len);\n assert!(history.add(\"line1\"));\n assert!(history.add(\"line2\"));\n assert!(!history.add(\"line2\"));\n assert!(!history.add(\"\"));\n assert!(!history.add(\" line3\"));\n }\n\n #[test]\n fn set_max_len() {\n let mut history = init();\n history.set_max_len(1);\n assert_eq!(1, history.entries.len());\n assert_eq!(Some(&\"line3\".to_owned()), history.last());\n }\n\n #[test]\n fn save() {\n let mut history = init();\n let td = tempdir::TempDir::new_in(&Path::new(\".\"), \"histo\").unwrap();\n let history_path = td.path().join(\".history\");\n\n history.save(&history_path).unwrap();\n history.load(&history_path).unwrap();\n td.close().unwrap();\n }\n\n #[test]\n fn search() {\n let history = init();\n assert_eq!(None, history.search(\"\", 0, Direction::Forward));\n assert_eq!(None, history.search(\"none\", 0, Direction::Forward));\n assert_eq!(None, history.search(\"line\", 3, Direction::Forward));\n\n assert_eq!(Some(0), history.search(\"line\", 0, Direction::Forward));\n assert_eq!(Some(1), history.search(\"line\", 1, Direction::Forward));\n assert_eq!(Some(2), history.search(\"line3\", 1, Direction::Forward));\n }\n\n #[test]\n fn reverse_search() {\n let history = init();\n assert_eq!(None, history.search(\"\", 2, Direction::Reverse));\n assert_eq!(None, history.search(\"none\", 2, Direction::Reverse));\n assert_eq!(None, history.search(\"line\", 3, Direction::Reverse));\n\n assert_eq!(Some(2), history.search(\"line\", 2, Direction::Reverse));\n assert_eq!(Some(1), history.search(\"line\", 1, Direction::Reverse));\n assert_eq!(Some(0), history.search(\"line1\", 1, Direction::Reverse));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:shirt: Comment out unused functions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>src\/addressbook: cargo fmt expects documentation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement apple_music client<commit_after>use scraping::{Html, Selector};\nuse scraping::element_ref::ElementRef;\nuse hyper::Client;\nuse hyper::header::Connection;\nuse hyper::header::ConnectionOption;\nuse std::io::Read;\nuse std::error;\nuse std::fmt;\nuse regex::Regex;\n\nstatic BASE_URL: &'static str = \"http:\/\/tools.applemusic.com\/embed\/v1\/\";\nstatic MUSIC_URL: &'static str = r#\"musicUrl = \"([\\x00-\\x21\\x23-\\x7F]+)\"\"#; \/\/ except \\x22(\")\n\n#[derive(Debug)]\npub struct Song {\n pub id: String,\n pub country: String,\n pub title: String,\n pub artwork_url: String,\n pub artist: String,\n pub audio_url: String,\n pub music_url: String,\n}\n\n#[derive(Debug)]\npub struct Album {\n pub id: String,\n pub country: String,\n pub title: String,\n pub artwork_url: String,\n pub album_artist: String,\n pub music_url: String,\n pub genre: String,\n}\n\n#[derive(Debug)]\npub struct Playlist {\n pub id: String,\n pub country: String,\n pub title: String,\n pub curator: String,\n pub description: String,\n pub artwork_url: String,\n pub music_url: String,\n pub count: String,\n}\n\n#[derive(Debug)]\npub struct Track {\n pub title: String,\n pub artwork_url: String,\n pub artist: String,\n pub audio_url: String,\n}\n\n#[derive(Debug)]\npub struct ScrapeError {\n reason: String,\n}\n\nimpl fmt::Display for ScrapeError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}\", self.reason)\n }\n}\n\ntype ScrapeResult<T> = Result<T, ScrapeError>;\n\nimpl error::Error for ScrapeError {\n fn description(&self) -> &str {\n &self.reason\n }\n}\n\npub fn fetch_song(id: &str, country: &str) -> ScrapeResult<Song> {\n let client = Client::new();\n let url = format!(\"{}\/song\/{}?country={}\", BASE_URL, id, country);\n let mut res = client.get(&url)\n .header(Connection(vec![ConnectionOption::Close]))\n .send()\n .unwrap();\n if !res.status.is_success() {\n return Err(ScrapeError { reason: \"network error\".to_string() })\n }\n let mut body = String::new();\n res.read_to_string(&mut body).unwrap();\n let fragment = Html::parse_fragment(&body);\n\n let artwork_url = try!(extract_artwork_url(fragment.clone()));\n let title = try!(extract_song_title(fragment.clone()));\n let artist = try!(extract_song_artist(fragment.clone()));\n let audio_url = try!(extract_audio_url(fragment.clone()));\n let music_url = try!(extract_music_url(fragment.clone()));\n Ok(Song {\n id: id.to_string(),\n country: country.to_string(),\n title: title,\n artist: artist,\n artwork_url: artwork_url,\n audio_url: audio_url,\n music_url: music_url,\n })\n}\n\npub fn fetch_album(id: &str, country: &str) -> ScrapeResult<Album> {\n let client = Client::new();\n let url = format!(\"{}\/album\/{}?country={}\", BASE_URL, id, country);\n let mut res = client.get(&url)\n .header(Connection(vec![ConnectionOption::Close]))\n .send()\n .unwrap();\n if !res.status.is_success() {\n return Err(ScrapeError { reason: \"network error\".to_string() })\n }\n let mut body = String::new();\n res.read_to_string(&mut body).unwrap();\n let fragment = Html::parse_fragment(&body);\n\n let artwork_url = try!(extract_artwork_url(fragment.clone()));\n let title = try!(extract_title(fragment.clone()));\n let album_artist = try!(extract_album_artist(fragment.clone()));\n let count = try!(extract_count(fragment.clone()));\n let music_url = try!(extract_music_url(fragment.clone()));\n let mut tracks = Vec::new();\n let tracks_selector = Selector::parse(\".track\").unwrap();\n for node in fragment.select(&tracks_selector) {\n tracks.push(extract_track(node));\n }\n Ok(Album {\n id: id.to_string(),\n country: country.to_string(),\n title: title,\n album_artist: album_artist,\n artwork_url: artwork_url,\n music_url: music_url,\n genre: count,\n })\n}\n\npub fn fetch_playlist(id: &str, country: &str) -> ScrapeResult<Playlist> {\n let client = Client::new();\n let url = format!(\"{}\/playlist\/{}?country={}\", BASE_URL, id, country);\n let mut res = client.get(&url)\n .header(Connection(vec![ConnectionOption::Close]))\n .send()\n .unwrap();\n if !res.status.is_success() {\n return Err(ScrapeError { reason: \"network error\".to_string() })\n }\n let mut body = String::new();\n res.read_to_string(&mut body).unwrap();\n let fragment = Html::parse_fragment(&body);\n\n let artwork_url = try!(extract_artwork_url(fragment.clone()));\n let title = try!(extract_title(fragment.clone()));\n let description = try!(extract_description(fragment.clone()));\n let curator = try!(extract_curator(fragment.clone()));\n let count = try!(extract_count(fragment.clone()));\n let music_url = try!(extract_music_url(fragment.clone()));\n let mut tracks = Vec::new();\n let tracks_selector = Selector::parse(\".track\").unwrap();\n for node in fragment.select(&tracks_selector) {\n let track = extract_track(node);\n tracks.push(track);\n }\n Ok(Playlist {\n id: id.to_string(),\n country: country.to_string(),\n title: title,\n curator: curator,\n description: description,\n artwork_url: artwork_url,\n music_url: music_url,\n count: count,\n })\n}\n\nfn extract_music_url(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\"script\").unwrap();\n html.select(&selector)\n .last()\n .and_then(|img| img.text().next())\n .and_then(|script| match Regex::new(MUSIC_URL) {\n Ok(re) => match re.captures(script) {\n Some(cap) => Some(cap[1].to_string()),\n None => None\n },\n Err(_) => None\n })\n .ok_or(ScrapeError { reason: \"music url is not found\".to_string() })\n}\n\nfn extract_artwork_url(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\"#heroArtImage > img\").unwrap();\n html.select(&selector)\n .next()\n .and_then(|img| img.value().attr(\"src\"))\n .map(|url| url.trim().to_string())\n .ok_or(ScrapeError { reason: \"artwork url is not found\".to_string() })\n}\n\nfn extract_audio_url(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\"#heroArtImage > .song-audio\").unwrap();\n html.select(&selector)\n .next()\n .and_then(|img| img.value().attr(\"data-url\"))\n .map(|url| url.trim().to_string())\n .ok_or(ScrapeError { reason: \"audio url is not found\".to_string() })\n}\n\nfn extract_description(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\"#description\").unwrap();\n html.select(&selector)\n .next()\n .and_then(|div| div.text().next())\n .map(|text| text.trim().to_string())\n .ok_or(ScrapeError { reason: \"description is not found\".to_string() })\n}\n\nfn extract_title(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\".heroMeta > .title > a\").unwrap();\n html.select(&selector)\n .next()\n .and_then(|a| a.text().next())\n .map(|text| text.trim().to_string())\n .ok_or(ScrapeError { reason: \"title is not found\".to_string() })\n}\n\nfn extract_song_title(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\".heroMeta > .details > .title-explicit > .title > a\").unwrap();\n html.select(&selector)\n .next()\n .and_then(|a| a.text().next())\n .map(|text| text.trim().to_string())\n .ok_or(ScrapeError { reason: \"title is not found\".to_string() })\n}\n\nfn extract_curator(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\".heroMeta > .curator > a\").unwrap();\n html.select(&selector)\n .next()\n .and_then(|a| a.text().next())\n .map(|text| text.trim().to_string())\n .ok_or(ScrapeError { reason: \"curator is not found\".to_string() })\n}\n\nfn extract_album_artist(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\".heroMeta > .album-artist > a\").unwrap();\n html.select(&selector)\n .next()\n .and_then(|a| a.text().next())\n .map(|text| text.trim().to_string())\n .ok_or(ScrapeError { reason: \"album artist is not found\".to_string() })\n}\n\nfn extract_song_artist(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\".heroMeta > .details > .song-artist > a\").unwrap();\n html.select(&selector)\n .next()\n .and_then(|a| a.text().next())\n .map(|text| text.trim().to_string())\n .ok_or(ScrapeError { reason: \"song artist is not found\".to_string() })\n}\n\nfn extract_count(html: Html) -> ScrapeResult<String> {\n let selector = Selector::parse(\".heroMeta > .count\").unwrap();\n html.select(&selector)\n .next()\n .and_then(|a| a.text().next())\n .map(|text| text.trim().to_string())\n .ok_or(ScrapeError { reason: \"count is not found\".to_string() })\n}\n\nfn extract_track(node: ElementRef) -> Track {\n let img_selector = Selector::parse(\".artworkImage > img\").unwrap();\n let title_selector = Selector::parse(\".title\").unwrap();\n let artist_selector = Selector::parse(\".artist\").unwrap();\n let audio_selector = Selector::parse(\".playlist-audio\").unwrap();\n\n let mut title: String = \"\".to_string();\n let mut artwork_url: String = \"\".to_string();\n let mut artist: String = \"\".to_string();\n let mut audio_url: String = \"\".to_string();\n if let Some(n) = node.select(&title_selector).next() {\n if let Some(text) = n.text().next() {\n title = text.trim().to_string();\n }\n }\n\n if let Some(n) = node.select(&artist_selector).next() {\n if let Some(text) = n.text().next() {\n artist = text.trim().to_string();\n }\n }\n\n if let Some(n) = node.select(&img_selector).next() {\n if let Some(url) = n.value().attr(\"src\") {\n artwork_url = url.trim().to_string();\n }\n }\n\n if let Some(n) = node.select(&audio_selector).next() {\n if let Some(url) = n.value().attr(\"data-url\") {\n audio_url = url.trim().to_string();\n }\n }\n\n Track {\n title: title,\n artwork_url: artwork_url,\n artist: artist,\n audio_url: audio_url,\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::fetch_playlist;\n use super::fetch_album;\n use super::fetch_song;\n #[test]\n fn test_fetch_playlist() {\n let playlist = fetch_playlist(\"pl.2ff0e502db0c44a598a7cb2261a5e6b2\", \"jp\").unwrap();\n assert_eq!(playlist.id, \"pl.2ff0e502db0c44a598a7cb2261a5e6b2\");\n assert_eq!(playlist.music_url, \"https:\/\/itunes.apple.com\/jp\/playlist\/lili-limit-ga-xuanbu-maipureirisuto\/idpl.2ff0e502db0c44a598a7cb2261a5e6b2?app=music\");\n }\n #[test]\n fn test_fetch_album() {\n let album = fetch_album(\"1160715126\", \"jp\").unwrap();\n assert_eq!(album.id, \"1160715126\");\n }\n #[test]\n fn test_fetch_song() {\n let song = fetch_song(\"1160715431\", \"jp\").unwrap();\n assert_eq!(song.id, \"1160715431\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added tracechain tests<commit_after>extern crate rustyprefetch;\nuse rustyprefetch::librp::tracechain;\nuse rustyprefetch::librp::utils;\n\n#[test]\nfn tracechain_v17_test() {\n let v17_tracechain_buffer: &[u8] = &[\n 0x01,0x00,0x00,0x00,0x00,0x04,0x00,0x00,0xFA,0xFF,0x07,0x00\n ];\n\n let v17_tracechain_entry = match tracechain::TraceChainEntryV17::new(v17_tracechain_buffer){\n Ok(v17_tracechain_entry) => v17_tracechain_entry,\n Err(error) => panic!(error)\n };\n\n assert_eq!(v17_tracechain_entry.next_index, 1);\n assert_eq!(v17_tracechain_entry.block_load_count, 1024);\n assert_eq!(v17_tracechain_entry.unknown1, 250);\n assert_eq!(v17_tracechain_entry.unknown2, 255);\n assert_eq!(v17_tracechain_entry.unknown3, 7);\n}\n\n#[test]\nfn tracechain_v26_test() {\n let v26_tracechain_buffer: &[u8] = &[\n 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x01,0x16,0x00\n ];\n\n let v26_tracechain_entry = match tracechain::TraceChainEntryV26::new(v26_tracechain_buffer){\n Ok(v26_tracechain_entry) => v26_tracechain_entry,\n Err(error) => panic!(error)\n };\n\n assert_eq!(v26_tracechain_entry.next_index, 1);\n assert_eq!(v26_tracechain_entry.block_load_count, 0);\n assert_eq!(v26_tracechain_entry.unknown1, 3);\n assert_eq!(v26_tracechain_entry.unknown2, 1);\n assert_eq!(v26_tracechain_entry.unknown3, 22);\n}\n\n#[test]\nfn tracechain_v30_test() {\n let v30_tracechain_buffer: &[u8] = &[\n 0x00,0x00,0x00,0x00,0x06,0x00,0x00,0x00\n ];\n\n let v30_tracechain_entry = match tracechain::TraceChainEntryV30::new(v30_tracechain_buffer){\n Ok(v30_tracechain_entry) => v30_tracechain_entry,\n Err(error) => panic!(error)\n };\n\n assert_eq!(v30_tracechain_entry.block_load_count, 0);\n assert_eq!(v30_tracechain_entry.unknown1,6);\n assert_eq!(v30_tracechain_entry.unknown2, 0);\n assert_eq!(v30_tracechain_entry.unknown3, 0);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a TUI benchmark program<commit_after>\/\/ A program that initializes the TUI and adds lines in the file (given as first argument) to it,\n\/\/ with a draw() call after every line added.\n\/\/\n\/\/ After adding all lines the program just quits.\n\/\/\n\/\/ Useful for benchmarking TUI::draw().\n\nuse libtiny_tui::{Colors, TUI};\nuse libtiny_ui::*;\nuse std::fs::File;\nuse std::io::{BufRead, BufReader};\nuse tokio::runtime::current_thread::Runtime;\n\nfn main() {\n let args = std::env::args().collect::<Vec<_>>();\n let file_path = &args[1];\n let file = File::open(file_path).unwrap();\n let file_buffered = BufReader::new(file);\n let lines = file_buffered.lines().map(Result::unwrap).collect();\n\n let mut executor = Runtime::new().unwrap();\n let (tui, _) = TUI::run(Colors::default(), &mut executor);\n\n tui.new_server_tab(\"test\");\n tui.draw();\n\n executor.block_on(bench_task(tui, lines));\n\n \/\/ executor.run();\n}\n\nasync fn bench_task(tui: TUI, lines: Vec<String>) {\n let msg_target = MsgTarget::Server { serv: \"test\" };\n let time = time::now();\n\n for line in &lines {\n tui.add_privmsg(\"server\", line, time, &msg_target, false, false);\n tui.draw();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use core::clone::Clone;\nuse core::mem::size_of;\nuse core::option::Option;\n\nuse alloc::boxed::*;\n\nuse common::debug::*;\nuse common::memory::*;\nuse common::random::*;\nuse common::string::*;\nuse common::vec::*;\n\nuse network::common::*;\n\n#[derive(Copy, Clone)]\npub struct TCPHeader {\n pub src: n16,\n pub dst: n16,\n pub sequence: n32,\n pub ack_num: n32,\n pub flags: n16,\n pub window_size: n16,\n pub checksum: Checksum,\n pub urgent_pointer: n16\n}\n\npub struct TCP {\n header: TCPHeader,\n options: Vec<u8>,\n data: Vec<u8>,\n src_ip: IPv4Addr,\n dst_ip: IPv4Addr\n}\n\nimpl ToBytes for TCP {\n fn to_bytes(&self) -> Vec<u8> {\n unsafe{\n let header_ptr: *const TCPHeader = &self.header;\n let mut ret = Vec::from_raw_buf(header_ptr as *const u8, size_of::<TCPHeader>());\n ret.push_all(&self.options);\n ret.push_all(&self.data);\n return ret;\n }\n }\n}\n\nconst TCP_FIN: u16 = 1;\nconst TCP_SYN: u16 = 1 << 1;\nconst TCP_RST: u16 = 1 << 2;\nconst TCP_PSH: u16 = 1 << 3;\nconst TCP_ACK: u16 = 1 << 4;\n\n#[allow(trivial_casts)]\nimpl Response for TCP {\n fn respond(&self, callback: Box<FnBox(Vec<Vec<u8>>)>){\n if cfg!(debug_network){\n d(\" \");\n self.d();\n dl();\n }\n\n let allow;\n match self.header.dst.get() {\n 80 => allow = true,\n _ => allow = false\n }\n\n if allow {\n if self.header.flags.get() & TCP_SYN != 0 {\n if cfg!(debug_network){\n d(\" TCP SYN\\n\");\n }\n let mut response = TCP {\n header: self.header,\n options: self.options.clone(),\n data: Vec::new(),\n src_ip: IP_ADDR,\n dst_ip: self.src_ip\n };\n\n response.header.src = self.header.dst;\n response.header.dst = self.header.src;\n response.header.flags.set(self.header.flags.get() | TCP_ACK);\n response.header.ack_num.set(self.header.sequence.get() + 1);\n response.header.sequence.set(rand() as u32);\n\n unsafe{\n response.header.checksum.data = 0;\n\n let proto = n16::new(0x06);\n let segment_len = n16::new((size_of::<TCPHeader>() + response.options.len() + response.data.len()) as u16);\n response.header.checksum.data = Checksum::compile(\n Checksum::sum((&response.src_ip as *const IPv4Addr) as usize, size_of::<IPv4Addr>()) +\n Checksum::sum((&response.dst_ip as *const IPv4Addr) as usize, size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize, size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize, size_of::<n16>()) +\n Checksum::sum((&response.header as *const TCPHeader) as usize, size_of::<TCPHeader>()) +\n Checksum::sum(response.options.as_ptr() as usize, response.options.len()) +\n Checksum::sum(response.data.as_ptr() as usize, response.data.len())\n );\n }\n\n let mut ret: Vec<Vec<u8>> = Vec::new();\n ret.push(response.to_bytes());\n callback(ret);\n }else if self.header.flags.get() & TCP_PSH != 0{\n if cfg!(debug_network){\n d(\" TCP PSH\\n\");\n }\n \/\/Send TCP_ACK_PSH_FIN in one statement\n {\n let tcp_header = self.header;\n let tcp_options = self.options.clone();\n let tcp_dst_ip = self.src_ip;\n let tcp_data = self.data.clone();\n let tcp_callback = box move |data: String|{\n let mut response = TCP {\n header: tcp_header,\n options: tcp_options.clone(),\n data: Vec::new(),\n src_ip: IP_ADDR,\n dst_ip: tcp_dst_ip\n };\n\n response.header.src = tcp_header.dst;\n response.header.dst = tcp_header.src;\n response.header.flags.set(tcp_header.flags.get() | TCP_FIN);\n response.header.ack_num.set(tcp_header.sequence.get() + tcp_data.len() as u32);\n response.header.sequence.set(tcp_header.ack_num.get());\n\n unsafe{\n let data_ptr = data.to_c_str();\n response.data = Vec::from_raw_buf(data_ptr, data.len());\n unalloc(data_ptr as usize);\n }\n\n response.header.checksum.data = 0;\n\n let proto = n16::new(0x06);\n let segment_len = n16::new((size_of::<TCPHeader>() + response.options.len() + response.data.len()) as u16);\n unsafe{\n response.header.checksum.data = Checksum::compile(\n Checksum::sum((&response.src_ip as *const IPv4Addr) as usize, size_of::<IPv4Addr>()) +\n Checksum::sum((&response.dst_ip as *const IPv4Addr) as usize, size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize, size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize, size_of::<n16>()) +\n Checksum::sum((&response.header as *const TCPHeader) as usize, size_of::<TCPHeader>()) +\n Checksum::sum(response.options.as_ptr() as usize, response.options.len()) +\n Checksum::sum(response.data.as_ptr() as usize, response.data.len())\n );\n }\n\n let mut ret: Vec<Vec<u8>> = Vec::new();\n ret.push(response.to_bytes());\n callback(ret);\n };\n\n match self.header.dst.get() {\n 80 => {\n let request = String::from_c_slice(self.data.as_slice());\n\n let mut path = \"\/\".to_string();\n\n for row in request.split(\"\\r\\n\".to_string()) {\n let mut i = 0;\n for col in row.split(\" \".to_string()) {\n match i {\n 1 => path = col,\n _ => ()\n }\n i += 1;\n }\n break;\n }\n\n \/\/session.request(&URL::from_string(\"http:\/\/\".to_string() + path), tcp_callback);\n },\n _ => ()\n }\n }\n }\n }else{\n d(\" TCP RST TODO\\n\");\n }\n }\n}\n\nimpl TCP {\n pub fn from_bytes_ipv4(bytes: Vec<u8>, src_ip: IPv4Addr, dst_ip: IPv4Addr) -> Option<TCP> {\n if bytes.len() >= size_of::<TCPHeader>() {\n unsafe {\n let header = *(bytes.as_ptr() as *const TCPHeader);\n let header_len = ((header.flags.get() & 0xF000) >> 10) as usize;\n\n return Option::Some(TCP {\n header: header,\n options: bytes.sub(size_of::<TCPHeader>(), header_len - size_of::<TCPHeader>()),\n data: bytes.sub(header_len, bytes.len() - header_len),\n src_ip: src_ip,\n dst_ip: dst_ip\n });\n }\n }\n return Option::None;\n }\n\n pub fn d(&self){\n d(\"TCP from \");\n dd(self.header.src.get() as usize);\n d(\" to \");\n dd(self.header.dst.get() as usize);\n d(\" options \");\n dd(self.options.len());\n d(\" data \");\n dd(self.data.len());\n }\n}\n<commit_msg>Allow HTTP response<commit_after>use core::clone::Clone;\nuse core::mem::size_of;\nuse core::option::Option;\n\nuse alloc::boxed::*;\n\nuse common::debug::*;\nuse common::memory::*;\nuse common::random::*;\nuse common::resource::*;\nuse common::string::*;\nuse common::vec::*;\n\nuse network::common::*;\n\n#[derive(Copy, Clone)]\npub struct TCPHeader {\n pub src: n16,\n pub dst: n16,\n pub sequence: n32,\n pub ack_num: n32,\n pub flags: n16,\n pub window_size: n16,\n pub checksum: Checksum,\n pub urgent_pointer: n16\n}\n\npub struct TCP {\n header: TCPHeader,\n options: Vec<u8>,\n data: Vec<u8>,\n src_ip: IPv4Addr,\n dst_ip: IPv4Addr\n}\n\nimpl ToBytes for TCP {\n fn to_bytes(&self) -> Vec<u8> {\n unsafe{\n let header_ptr: *const TCPHeader = &self.header;\n let mut ret = Vec::from_raw_buf(header_ptr as *const u8, size_of::<TCPHeader>());\n ret.push_all(&self.options);\n ret.push_all(&self.data);\n return ret;\n }\n }\n}\n\nconst TCP_FIN: u16 = 1;\nconst TCP_SYN: u16 = 1 << 1;\nconst TCP_RST: u16 = 1 << 2;\nconst TCP_PSH: u16 = 1 << 3;\nconst TCP_ACK: u16 = 1 << 4;\n\n#[allow(trivial_casts)]\nimpl Response for TCP {\n fn respond(&self, callback: Box<FnBox(Vec<Vec<u8>>)>){\n if cfg!(debug_network){\n d(\" \");\n self.d();\n dl();\n }\n\n let allow;\n match self.header.dst.get() {\n 80 => allow = true,\n _ => allow = false\n }\n\n if allow {\n if self.header.flags.get() & TCP_SYN != 0 {\n if cfg!(debug_network){\n d(\" TCP SYN\\n\");\n }\n let mut response = TCP {\n header: self.header,\n options: self.options.clone(),\n data: Vec::new(),\n src_ip: IP_ADDR,\n dst_ip: self.src_ip\n };\n\n response.header.src = self.header.dst;\n response.header.dst = self.header.src;\n response.header.flags.set(self.header.flags.get() | TCP_ACK);\n response.header.ack_num.set(self.header.sequence.get() + 1);\n response.header.sequence.set(rand() as u32);\n\n unsafe{\n response.header.checksum.data = 0;\n\n let proto = n16::new(0x06);\n let segment_len = n16::new((size_of::<TCPHeader>() + response.options.len() + response.data.len()) as u16);\n response.header.checksum.data = Checksum::compile(\n Checksum::sum((&response.src_ip as *const IPv4Addr) as usize, size_of::<IPv4Addr>()) +\n Checksum::sum((&response.dst_ip as *const IPv4Addr) as usize, size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize, size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize, size_of::<n16>()) +\n Checksum::sum((&response.header as *const TCPHeader) as usize, size_of::<TCPHeader>()) +\n Checksum::sum(response.options.as_ptr() as usize, response.options.len()) +\n Checksum::sum(response.data.as_ptr() as usize, response.data.len())\n );\n }\n\n let mut ret: Vec<Vec<u8>> = Vec::new();\n ret.push(response.to_bytes());\n callback(ret);\n }else if self.header.flags.get() & TCP_PSH != 0{\n if cfg!(debug_network){\n d(\" TCP PSH\\n\");\n }\n \/\/Send TCP_ACK_PSH_FIN in one statement\n {\n match self.header.dst.get() {\n 80 => {\n let request = String::from_c_slice(self.data.as_slice());\n\n let mut path = \"\/\".to_string();\n\n for row in request.split(\"\\r\\n\".to_string()) {\n let mut i = 0;\n for col in row.split(\" \".to_string()) {\n match i {\n 1 => path = col,\n _ => ()\n }\n i += 1;\n }\n break;\n }\n\n let tcp_header = self.header;\n let tcp_options = self.options.clone();\n let tcp_dst_ip = self.src_ip;\n let tcp_data = self.data.clone();\n URL::from_string(\"http:\/\/\".to_string() + path).open_async(box move |mut resource: Box<Resource>|{\n let mut vec: Vec<u8> = Vec::new();\n\n match resource.read_to_end(&mut vec) {\n Option::Some(len) => (),\n Option::None => ()\n }\n\n let mut response = TCP {\n header: tcp_header,\n options: tcp_options.clone(),\n data: Vec::new(),\n src_ip: IP_ADDR,\n dst_ip: tcp_dst_ip\n };\n\n response.header.src = tcp_header.dst;\n response.header.dst = tcp_header.src;\n response.header.flags.set(tcp_header.flags.get() | TCP_FIN);\n response.header.ack_num.set(tcp_header.sequence.get() + tcp_data.len() as u32);\n response.header.sequence.set(tcp_header.ack_num.get());\n\n response.data = vec;\n\n response.header.checksum.data = 0;\n\n let proto = n16::new(0x06);\n let segment_len = n16::new((size_of::<TCPHeader>() + response.options.len() + response.data.len()) as u16);\n unsafe{\n response.header.checksum.data = Checksum::compile(\n Checksum::sum((&response.src_ip as *const IPv4Addr) as usize, size_of::<IPv4Addr>()) +\n Checksum::sum((&response.dst_ip as *const IPv4Addr) as usize, size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize, size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize, size_of::<n16>()) +\n Checksum::sum((&response.header as *const TCPHeader) as usize, size_of::<TCPHeader>()) +\n Checksum::sum(response.options.as_ptr() as usize, response.options.len()) +\n Checksum::sum(response.data.as_ptr() as usize, response.data.len())\n );\n }\n\n let mut ret: Vec<Vec<u8>> = Vec::new();\n ret.push(response.to_bytes());\n callback(ret);\n });\n },\n _ => ()\n }\n }\n }\n }else{\n d(\" TCP RST TODO\\n\");\n }\n }\n}\n\nimpl TCP {\n pub fn from_bytes_ipv4(bytes: Vec<u8>, src_ip: IPv4Addr, dst_ip: IPv4Addr) -> Option<TCP> {\n if bytes.len() >= size_of::<TCPHeader>() {\n unsafe {\n let header = *(bytes.as_ptr() as *const TCPHeader);\n let header_len = ((header.flags.get() & 0xF000) >> 10) as usize;\n\n return Option::Some(TCP {\n header: header,\n options: bytes.sub(size_of::<TCPHeader>(), header_len - size_of::<TCPHeader>()),\n data: bytes.sub(header_len, bytes.len() - header_len),\n src_ip: src_ip,\n dst_ip: dst_ip\n });\n }\n }\n return Option::None;\n }\n\n pub fn d(&self){\n d(\"TCP from \");\n dd(self.header.src.get() as usize);\n d(\" to \");\n dd(self.header.dst.get() as usize);\n d(\" options \");\n dd(self.options.len());\n d(\" data \");\n dd(self.data.len());\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Command Buffer device interface\n\nuse std::marker::PhantomData;\nuse std::ops::{Deref, DerefMut};\nuse std::collections::hash_set::{self, HashSet};\nuse {Backend, Resources, IndexType, InstanceCount, VertexCount,\n SubmissionResult, SubmissionError};\nuse {state, target, pso, shade, texture, handle};\n\n\/\/\/ A universal clear color supporting integet formats\n\/\/\/ as well as the standard floating-point.\n#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]\n#[cfg_attr(feature = \"serialize\", derive(Serialize, Deserialize))]\npub enum ClearColor {\n \/\/\/ Standard floating-point vec4 color\n Float([f32; 4]),\n \/\/\/ Integer vector to clear ivec4 targets.\n Int([i32; 4]),\n \/\/\/ Unsigned int vector to clear uvec4 targets.\n Uint([u32; 4]),\n}\n\n\/\/\/ Optional instance parameters: (instance count, buffer offset)\npub type InstanceParams = (InstanceCount, VertexCount);\n\n\/\/\/ Thread-safe finished command buffer for submission.\npub struct Submit<B: Backend>(B::SubmitInfo);\nimpl<B: Backend> Submit<B> {\n #[doc(hidden)]\n pub unsafe fn get_info(&self) -> &B::SubmitInfo {\n &self.0\n }\n}\n\n\/\/\/ Encoder for a command buffer.\n\/\/\/\n\/\/\/ Pools will always return an Encoder on `acquire_command_buffer` to provide a safe interface.\n#[derive(Debug)]\npub struct Encoder<B: Backend, C: CommandBuffer<B>>(C, PhantomData<B>);\n\nimpl<B, C> Deref for Encoder<B, C>\n where B: Backend, C: CommandBuffer<B>\n{\n type Target = C;\n fn deref(&self) -> &C {\n &self.0\n }\n}\n\nimpl<B, C> DerefMut for Encoder<B, C>\n where B: Backend, C: CommandBuffer<B>\n{\n fn deref_mut(&mut self) -> &mut C {\n &mut self.0\n }\n}\n\nimpl<B, C> Encoder<B, C>\n where B: Backend, C: CommandBuffer<B>\n{\n #[doc(hidden)]\n pub unsafe fn new(buffer: C) -> Self {\n Encoder(buffer, PhantomData)\n }\n\n \/\/\/ Finish recording commands to the command buffers.\n \/\/\/\n \/\/\/ The command buffer will be consumed and can't be modified further.\n \/\/\/ The command pool must be reset to re-record the command buffer.\n pub fn finish(mut self) -> Submit<B> {\n Submit(unsafe { self.0.end() })\n }\n}\n\n\/\/\/ Base trait for all CommandBuffers\npub trait CommandBuffer<B: Backend> {\n #[doc(hidden)]\n unsafe fn end(&mut self) -> B::SubmitInfo;\n}\n\n\/\/\/ Command buffer with graphics, compute and transfer functionality.\npub struct GeneralCommandBuffer<'a, B: Backend>(pub(crate) &'a mut B::RawCommandBuffer)\nwhere B::RawCommandBuffer: 'a;\n\nimpl<'a, B: Backend> CommandBuffer<B> for GeneralCommandBuffer<'a, B> {\n unsafe fn end(&mut self) -> B::SubmitInfo {\n self.0.end()\n }\n}\n\n\/\/ TODO: temporary derefs, remove once command buffers will be reworked\nimpl<'a, B: Backend> Deref for GeneralCommandBuffer<'a, B> {\n type Target = B::RawCommandBuffer;\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\n\nimpl<'a, B: Backend> DerefMut for GeneralCommandBuffer<'a, B> {\n fn deref_mut(&mut self) -> &mut Self::Target {\n &mut self.0\n }\n}\n\n\/\/\/ Command buffer with graphics and transfer functionality.\npub struct GraphicsCommandBuffer<'a, B: Backend>(pub(crate) &'a mut B::RawCommandBuffer)\nwhere B::RawCommandBuffer: 'a;\n\nimpl<'a, B: Backend> CommandBuffer<B> for GraphicsCommandBuffer<'a, B> {\n unsafe fn end(&mut self) -> B::SubmitInfo {\n self.0.end()\n }\n}\n\n\/\/ TODO: temporary derefs, remove once command buffers will be reworked\nimpl<'a, B: Backend> Deref for GraphicsCommandBuffer<'a, B> {\n type Target = B::RawCommandBuffer;\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\n\nimpl<'a, B: Backend> DerefMut for GraphicsCommandBuffer<'a, B> {\n fn deref_mut(&mut self) -> &mut Self::Target {\n &mut self.0\n }\n}\n\n\/\/\/ Command buffer with compute and transfer functionality.\npub struct ComputeCommandBuffer<'a, B: Backend>(pub(crate) &'a mut B::RawCommandBuffer)\nwhere B::RawCommandBuffer: 'a;\n\nimpl<'a, B: Backend> CommandBuffer<B> for ComputeCommandBuffer<'a, B> {\n unsafe fn end(&mut self) -> B::SubmitInfo {\n self.0.end()\n }\n}\n\n\/\/\/ Command buffer with transfer functionality.\npub struct TransferCommandBuffer<'a, B: Backend>(pub(crate) &'a mut B::RawCommandBuffer)\nwhere B::RawCommandBuffer: 'a;\n\nimpl<'a, B: Backend> CommandBuffer<B> for TransferCommandBuffer<'a, B> {\n unsafe fn end(&mut self) -> B::SubmitInfo {\n self.0.end()\n }\n}\n\/\/\/ An interface of the abstract command buffer. It collects commands in an\n\/\/\/ efficient API-specific manner, to be ready for execution on the device.\n#[allow(missing_docs)]\npub trait Buffer<R: Resources>: 'static + Send {\n \/\/\/ Reset the command buffer contents, retain the allocated storage\n fn reset(&mut self);\n \/\/\/ Bind a pipeline state object\n fn bind_pipeline_state(&mut self, R::PipelineStateObject);\n \/\/\/ Bind a complete set of vertex buffers\n fn bind_vertex_buffers(&mut self, pso::VertexBufferSet<R>);\n \/\/\/ Bind a complete set of constant buffers\n fn bind_constant_buffers(&mut self, &[pso::ConstantBufferParam<R>]);\n \/\/\/ Bind a global constant\n fn bind_global_constant(&mut self, shade::Location, shade::UniformValue);\n \/\/\/ Bind a complete set of shader resource views\n fn bind_resource_views(&mut self, &[pso::ResourceViewParam<R>]);\n \/\/\/ Bind a complete set of unordered access views\n fn bind_unordered_views(&mut self, &[pso::UnorderedViewParam<R>]);\n \/\/\/ Bind a complete set of samplers\n fn bind_samplers(&mut self, &[pso::SamplerParam<R>]);\n \/\/\/ Bind a complete set of pixel targets, including multiple\n \/\/\/ colors views and an optional depth\/stencil view.\n fn bind_pixel_targets(&mut self, pso::PixelTargetSet<R>);\n \/\/\/ Bind an index buffer\n fn bind_index(&mut self, R::Buffer, IndexType);\n \/\/\/ Set scissor rectangle\n fn set_scissor(&mut self, target::Rect);\n \/\/\/ Set reference values for the blending and stencil front\/back\n fn set_ref_values(&mut self, state::RefValues);\n \/\/\/ Copy part of a buffer to another\n fn copy_buffer(&mut self, src: R::Buffer, dst: R::Buffer,\n src_offset_bytes: usize, dst_offset_bytes: usize,\n size_bytes: usize);\n \/\/\/ Copy part of a buffer to a texture\n fn copy_buffer_to_texture(&mut self,\n src: R::Buffer, src_offset_bytes: usize,\n dst: R::Texture, texture::Kind,\n Option<texture::CubeFace>, texture::RawImageInfo);\n \/\/\/ Copy part of a texture to a buffer\n fn copy_texture_to_buffer(&mut self,\n src: R::Texture, texture::Kind,\n Option<texture::CubeFace>, texture::RawImageInfo,\n dst: R::Buffer, dst_offset_bytes: usize);\n \/\/\/ Update a vertex\/index\/uniform buffer\n fn update_buffer(&mut self, R::Buffer, data: &[u8], offset: usize);\n \/\/\/ Update a texture\n fn update_texture(&mut self, R::Texture, texture::Kind, Option<texture::CubeFace>,\n data: &[u8], texture::RawImageInfo);\n fn generate_mipmap(&mut self, R::ShaderResourceView);\n \/\/\/ Clear color target\n fn clear_color(&mut self, R::RenderTargetView, ClearColor);\n fn clear_depth_stencil(&mut self, R::DepthStencilView,\n Option<target::Depth>, Option<target::Stencil>);\n \/\/\/ Draw a primitive\n fn call_draw(&mut self, VertexCount, VertexCount, Option<InstanceParams>);\n \/\/\/ Draw a primitive with index buffer\n fn call_draw_indexed(&mut self, VertexCount, VertexCount, VertexCount, Option<InstanceParams>);\n}\n\nmacro_rules! impl_clear {\n { $( $ty:ty = $sub:ident[$a:expr, $b:expr, $c:expr, $d:expr], )* } => {\n $(\n impl From<$ty> for ClearColor {\n fn from(v: $ty) -> ClearColor {\n ClearColor::$sub([v[$a], v[$b], v[$c], v[$d]])\n }\n }\n )*\n }\n}\n\nimpl_clear! {\n [f32; 4] = Float[0, 1, 2, 3],\n [f32; 3] = Float[0, 1, 2, 0],\n [f32; 2] = Float[0, 1, 0, 0],\n [i32; 4] = Int [0, 1, 2, 3],\n [i32; 3] = Int [0, 1, 2, 0],\n [i32; 2] = Int [0, 1, 0, 0],\n [u32; 4] = Uint [0, 1, 2, 3],\n [u32; 3] = Uint [0, 1, 2, 0],\n [u32; 2] = Uint [0, 1, 0, 0],\n}\n\nimpl From<f32> for ClearColor {\n fn from(v: f32) -> ClearColor {\n ClearColor::Float([v, 0.0, 0.0, 0.0])\n }\n}\nimpl From<i32> for ClearColor {\n fn from(v: i32) -> ClearColor {\n ClearColor::Int([v, 0, 0, 0])\n }\n}\nimpl From<u32> for ClearColor {\n fn from(v: u32) -> ClearColor {\n ClearColor::Uint([v, 0, 0, 0])\n }\n}\n\n\/\/\/ Informations about what is accessed by a bunch of commands.\n#[derive(Clone, Debug, Eq, PartialEq)]\npub struct AccessInfo<R: Resources> {\n mapped_reads: HashSet<handle::RawBuffer<R>>,\n mapped_writes: HashSet<handle::RawBuffer<R>>,\n}\n\nimpl<R: Resources> AccessInfo<R> {\n \/\/\/ Creates empty access informations\n pub fn new() -> Self {\n AccessInfo {\n mapped_reads: HashSet::new(),\n mapped_writes: HashSet::new(),\n }\n }\n\n \/\/\/ Clear access informations\n pub fn clear(&mut self) {\n self.mapped_reads.clear();\n self.mapped_writes.clear();\n }\n\n \/\/\/ Register a buffer read access\n pub fn buffer_read(&mut self, buffer: &handle::RawBuffer<R>) {\n if buffer.is_mapped() {\n self.mapped_reads.insert(buffer.clone());\n }\n }\n\n \/\/\/ Register a buffer write access\n pub fn buffer_write(&mut self, buffer: &handle::RawBuffer<R>) {\n if buffer.is_mapped() {\n self.mapped_writes.insert(buffer.clone());\n }\n }\n\n \/\/\/ Returns the mapped buffers that The GPU will read from\n pub fn mapped_reads(&self) -> AccessInfoBuffers<R> {\n self.mapped_reads.iter()\n }\n\n \/\/\/ Returns the mapped buffers that The GPU will write to\n pub fn mapped_writes(&self) -> AccessInfoBuffers<R> {\n self.mapped_writes.iter()\n }\n\n \/\/\/ Is there any mapped buffer reads ?\n pub fn has_mapped_reads(&self) -> bool {\n !self.mapped_reads.is_empty()\n }\n\n \/\/\/ Is there any mapped buffer writes ?\n pub fn has_mapped_writes(&self) -> bool {\n !self.mapped_writes.is_empty()\n }\n\n \/\/\/ Takes all the accesses necessary for submission\n pub fn take_accesses(&self) -> SubmissionResult<AccessGuard<R>> {\n for buffer in self.mapped_reads().chain(self.mapped_writes()) {\n unsafe {\n if !buffer.mapping().unwrap().take_access() {\n return Err(SubmissionError::AccessOverlap);\n }\n }\n }\n Ok(AccessGuard { inner: self })\n }\n}\n\n#[allow(missing_docs)]\npub type AccessInfoBuffers<'a, R> = hash_set::Iter<'a, handle::RawBuffer<R>>;\n\n#[allow(missing_docs)]\n#[derive(Debug)]\npub struct AccessGuard<'a, R: Resources> {\n inner: &'a AccessInfo<R>,\n}\n\n#[allow(missing_docs)]\nimpl<'a, R: Resources> AccessGuard<'a, R> {\n \/\/\/ Returns the mapped buffers that The GPU will read from,\n \/\/\/ with exclusive acces to their mapping\n pub fn access_mapped_reads(&mut self) -> AccessGuardBuffers<R> {\n AccessGuardBuffers {\n buffers: self.inner.mapped_reads()\n }\n }\n\n \/\/\/ Returns the mapped buffers that The GPU will write to,\n \/\/\/ with exclusive acces to their mapping\n pub fn access_mapped_writes(&mut self) -> AccessGuardBuffers<R> {\n AccessGuardBuffers {\n buffers: self.inner.mapped_writes()\n }\n }\n\n pub fn access_mapped(&mut self) -> AccessGuardBuffersChain<R> {\n AccessGuardBuffersChain {\n fst: self.inner.mapped_reads(),\n snd: self.inner.mapped_writes(),\n }\n }\n}\n\nimpl<'a, R: Resources> Deref for AccessGuard<'a, R> {\n type Target = AccessInfo<R>;\n fn deref(&self) -> &Self::Target {\n &self.inner\n }\n}\n\nimpl<'a, R: Resources> Drop for AccessGuard<'a, R> {\n fn drop(&mut self) {\n for buffer in self.inner.mapped_reads().chain(self.inner.mapped_writes()) {\n unsafe {\n buffer.mapping().unwrap().release_access();\n }\n }\n }\n}\n\n#[allow(missing_docs)]\n#[derive(Debug)]\npub struct AccessGuardBuffers<'a, R: Resources> {\n buffers: AccessInfoBuffers<'a, R>\n}\n\nimpl<'a, R: Resources> Iterator for AccessGuardBuffers<'a, R> {\n type Item = (&'a handle::RawBuffer<R>, &'a mut R::Mapping);\n\n fn next(&mut self) -> Option<Self::Item> {\n self.buffers.next().map(|buffer| unsafe {\n (buffer, buffer.mapping().unwrap().use_access())\n })\n }\n}\n\n#[allow(missing_docs)]\n#[derive(Debug)]\npub struct AccessGuardBuffersChain<'a, R: Resources> {\n fst: AccessInfoBuffers<'a, R>,\n snd: AccessInfoBuffers<'a, R>\n}\n\nimpl<'a, R: Resources> Iterator for AccessGuardBuffersChain<'a, R> {\n type Item = (&'a handle::RawBuffer<R>, &'a mut R::Mapping);\n\n fn next(&mut self) -> Option<Self::Item> {\n self.fst.next().or_else(|| self.snd.next())\n .map(|buffer| unsafe {\n (buffer, buffer.mapping().unwrap().use_access())\n })\n }\n}\n<commit_msg>[ll] core: Remove trait bounds for Encoder<commit_after>\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Command Buffer device interface\n\nuse std::marker::PhantomData;\nuse std::ops::{Deref, DerefMut};\nuse std::collections::hash_set::{self, HashSet};\nuse {Backend, Resources, IndexType, InstanceCount, VertexCount,\n SubmissionResult, SubmissionError};\nuse {state, target, pso, shade, texture, handle};\n\n\/\/\/ A universal clear color supporting integet formats\n\/\/\/ as well as the standard floating-point.\n#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]\n#[cfg_attr(feature = \"serialize\", derive(Serialize, Deserialize))]\npub enum ClearColor {\n \/\/\/ Standard floating-point vec4 color\n Float([f32; 4]),\n \/\/\/ Integer vector to clear ivec4 targets.\n Int([i32; 4]),\n \/\/\/ Unsigned int vector to clear uvec4 targets.\n Uint([u32; 4]),\n}\n\n\/\/\/ Optional instance parameters: (instance count, buffer offset)\npub type InstanceParams = (InstanceCount, VertexCount);\n\n\/\/\/ Thread-safe finished command buffer for submission.\npub struct Submit<B: Backend>(B::SubmitInfo);\nimpl<B: Backend> Submit<B> {\n #[doc(hidden)]\n pub unsafe fn get_info(&self) -> &B::SubmitInfo {\n &self.0\n }\n}\n\n\/\/\/ Encoder for a command buffer.\n\/\/\/\n\/\/\/ Pools will always return an Encoder on `acquire_command_buffer` to provide a safe interface.\n#[derive(Debug)]\npub struct Encoder<B, C>(C, PhantomData<B>);\n\nimpl<B, C> Deref for Encoder<B, C> {\n type Target = C;\n fn deref(&self) -> &C {\n &self.0\n }\n}\n\nimpl<B, C> DerefMut for Encoder<B, C> {\n fn deref_mut(&mut self) -> &mut C {\n &mut self.0\n }\n}\n\nimpl<B, C> Encoder<B, C>\n where B: Backend, C: CommandBuffer<B>\n{\n #[doc(hidden)]\n pub unsafe fn new(buffer: C) -> Self {\n Encoder(buffer, PhantomData)\n }\n\n \/\/\/ Finish recording commands to the command buffers.\n \/\/\/\n \/\/\/ The command buffer will be consumed and can't be modified further.\n \/\/\/ The command pool must be reset to re-record the command buffer.\n pub fn finish(mut self) -> Submit<B> {\n Submit(unsafe { self.0.end() })\n }\n}\n\n\/\/\/ Base trait for all CommandBuffers\npub trait CommandBuffer<B: Backend> {\n #[doc(hidden)]\n unsafe fn end(&mut self) -> B::SubmitInfo;\n}\n\n\/\/\/ Command buffer with graphics, compute and transfer functionality.\npub struct GeneralCommandBuffer<'a, B: Backend>(pub(crate) &'a mut B::RawCommandBuffer)\nwhere B::RawCommandBuffer: 'a;\n\nimpl<'a, B: Backend> CommandBuffer<B> for GeneralCommandBuffer<'a, B> {\n unsafe fn end(&mut self) -> B::SubmitInfo {\n self.0.end()\n }\n}\n\n\/\/ TODO: temporary derefs, remove once command buffers will be reworked\nimpl<'a, B: Backend> Deref for GeneralCommandBuffer<'a, B> {\n type Target = B::RawCommandBuffer;\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\n\nimpl<'a, B: Backend> DerefMut for GeneralCommandBuffer<'a, B> {\n fn deref_mut(&mut self) -> &mut Self::Target {\n &mut self.0\n }\n}\n\n\/\/\/ Command buffer with graphics and transfer functionality.\npub struct GraphicsCommandBuffer<'a, B: Backend>(pub(crate) &'a mut B::RawCommandBuffer)\nwhere B::RawCommandBuffer: 'a;\n\nimpl<'a, B: Backend> CommandBuffer<B> for GraphicsCommandBuffer<'a, B> {\n unsafe fn end(&mut self) -> B::SubmitInfo {\n self.0.end()\n }\n}\n\n\/\/ TODO: temporary derefs, remove once command buffers will be reworked\nimpl<'a, B: Backend> Deref for GraphicsCommandBuffer<'a, B> {\n type Target = B::RawCommandBuffer;\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\n\nimpl<'a, B: Backend> DerefMut for GraphicsCommandBuffer<'a, B> {\n fn deref_mut(&mut self) -> &mut Self::Target {\n &mut self.0\n }\n}\n\n\/\/\/ Command buffer with compute and transfer functionality.\npub struct ComputeCommandBuffer<'a, B: Backend>(pub(crate) &'a mut B::RawCommandBuffer)\nwhere B::RawCommandBuffer: 'a;\n\nimpl<'a, B: Backend> CommandBuffer<B> for ComputeCommandBuffer<'a, B> {\n unsafe fn end(&mut self) -> B::SubmitInfo {\n self.0.end()\n }\n}\n\n\/\/\/ Command buffer with transfer functionality.\npub struct TransferCommandBuffer<'a, B: Backend>(pub(crate) &'a mut B::RawCommandBuffer)\nwhere B::RawCommandBuffer: 'a;\n\nimpl<'a, B: Backend> CommandBuffer<B> for TransferCommandBuffer<'a, B> {\n unsafe fn end(&mut self) -> B::SubmitInfo {\n self.0.end()\n }\n}\n\/\/\/ An interface of the abstract command buffer. It collects commands in an\n\/\/\/ efficient API-specific manner, to be ready for execution on the device.\n#[allow(missing_docs)]\npub trait Buffer<R: Resources>: 'static + Send {\n \/\/\/ Reset the command buffer contents, retain the allocated storage\n fn reset(&mut self);\n \/\/\/ Bind a pipeline state object\n fn bind_pipeline_state(&mut self, R::PipelineStateObject);\n \/\/\/ Bind a complete set of vertex buffers\n fn bind_vertex_buffers(&mut self, pso::VertexBufferSet<R>);\n \/\/\/ Bind a complete set of constant buffers\n fn bind_constant_buffers(&mut self, &[pso::ConstantBufferParam<R>]);\n \/\/\/ Bind a global constant\n fn bind_global_constant(&mut self, shade::Location, shade::UniformValue);\n \/\/\/ Bind a complete set of shader resource views\n fn bind_resource_views(&mut self, &[pso::ResourceViewParam<R>]);\n \/\/\/ Bind a complete set of unordered access views\n fn bind_unordered_views(&mut self, &[pso::UnorderedViewParam<R>]);\n \/\/\/ Bind a complete set of samplers\n fn bind_samplers(&mut self, &[pso::SamplerParam<R>]);\n \/\/\/ Bind a complete set of pixel targets, including multiple\n \/\/\/ colors views and an optional depth\/stencil view.\n fn bind_pixel_targets(&mut self, pso::PixelTargetSet<R>);\n \/\/\/ Bind an index buffer\n fn bind_index(&mut self, R::Buffer, IndexType);\n \/\/\/ Set scissor rectangle\n fn set_scissor(&mut self, target::Rect);\n \/\/\/ Set reference values for the blending and stencil front\/back\n fn set_ref_values(&mut self, state::RefValues);\n \/\/\/ Copy part of a buffer to another\n fn copy_buffer(&mut self, src: R::Buffer, dst: R::Buffer,\n src_offset_bytes: usize, dst_offset_bytes: usize,\n size_bytes: usize);\n \/\/\/ Copy part of a buffer to a texture\n fn copy_buffer_to_texture(&mut self,\n src: R::Buffer, src_offset_bytes: usize,\n dst: R::Texture, texture::Kind,\n Option<texture::CubeFace>, texture::RawImageInfo);\n \/\/\/ Copy part of a texture to a buffer\n fn copy_texture_to_buffer(&mut self,\n src: R::Texture, texture::Kind,\n Option<texture::CubeFace>, texture::RawImageInfo,\n dst: R::Buffer, dst_offset_bytes: usize);\n \/\/\/ Update a vertex\/index\/uniform buffer\n fn update_buffer(&mut self, R::Buffer, data: &[u8], offset: usize);\n \/\/\/ Update a texture\n fn update_texture(&mut self, R::Texture, texture::Kind, Option<texture::CubeFace>,\n data: &[u8], texture::RawImageInfo);\n fn generate_mipmap(&mut self, R::ShaderResourceView);\n \/\/\/ Clear color target\n fn clear_color(&mut self, R::RenderTargetView, ClearColor);\n fn clear_depth_stencil(&mut self, R::DepthStencilView,\n Option<target::Depth>, Option<target::Stencil>);\n \/\/\/ Draw a primitive\n fn call_draw(&mut self, VertexCount, VertexCount, Option<InstanceParams>);\n \/\/\/ Draw a primitive with index buffer\n fn call_draw_indexed(&mut self, VertexCount, VertexCount, VertexCount, Option<InstanceParams>);\n}\n\nmacro_rules! impl_clear {\n { $( $ty:ty = $sub:ident[$a:expr, $b:expr, $c:expr, $d:expr], )* } => {\n $(\n impl From<$ty> for ClearColor {\n fn from(v: $ty) -> ClearColor {\n ClearColor::$sub([v[$a], v[$b], v[$c], v[$d]])\n }\n }\n )*\n }\n}\n\nimpl_clear! {\n [f32; 4] = Float[0, 1, 2, 3],\n [f32; 3] = Float[0, 1, 2, 0],\n [f32; 2] = Float[0, 1, 0, 0],\n [i32; 4] = Int [0, 1, 2, 3],\n [i32; 3] = Int [0, 1, 2, 0],\n [i32; 2] = Int [0, 1, 0, 0],\n [u32; 4] = Uint [0, 1, 2, 3],\n [u32; 3] = Uint [0, 1, 2, 0],\n [u32; 2] = Uint [0, 1, 0, 0],\n}\n\nimpl From<f32> for ClearColor {\n fn from(v: f32) -> ClearColor {\n ClearColor::Float([v, 0.0, 0.0, 0.0])\n }\n}\nimpl From<i32> for ClearColor {\n fn from(v: i32) -> ClearColor {\n ClearColor::Int([v, 0, 0, 0])\n }\n}\nimpl From<u32> for ClearColor {\n fn from(v: u32) -> ClearColor {\n ClearColor::Uint([v, 0, 0, 0])\n }\n}\n\n\/\/\/ Informations about what is accessed by a bunch of commands.\n#[derive(Clone, Debug, Eq, PartialEq)]\npub struct AccessInfo<R: Resources> {\n mapped_reads: HashSet<handle::RawBuffer<R>>,\n mapped_writes: HashSet<handle::RawBuffer<R>>,\n}\n\nimpl<R: Resources> AccessInfo<R> {\n \/\/\/ Creates empty access informations\n pub fn new() -> Self {\n AccessInfo {\n mapped_reads: HashSet::new(),\n mapped_writes: HashSet::new(),\n }\n }\n\n \/\/\/ Clear access informations\n pub fn clear(&mut self) {\n self.mapped_reads.clear();\n self.mapped_writes.clear();\n }\n\n \/\/\/ Register a buffer read access\n pub fn buffer_read(&mut self, buffer: &handle::RawBuffer<R>) {\n if buffer.is_mapped() {\n self.mapped_reads.insert(buffer.clone());\n }\n }\n\n \/\/\/ Register a buffer write access\n pub fn buffer_write(&mut self, buffer: &handle::RawBuffer<R>) {\n if buffer.is_mapped() {\n self.mapped_writes.insert(buffer.clone());\n }\n }\n\n \/\/\/ Returns the mapped buffers that The GPU will read from\n pub fn mapped_reads(&self) -> AccessInfoBuffers<R> {\n self.mapped_reads.iter()\n }\n\n \/\/\/ Returns the mapped buffers that The GPU will write to\n pub fn mapped_writes(&self) -> AccessInfoBuffers<R> {\n self.mapped_writes.iter()\n }\n\n \/\/\/ Is there any mapped buffer reads ?\n pub fn has_mapped_reads(&self) -> bool {\n !self.mapped_reads.is_empty()\n }\n\n \/\/\/ Is there any mapped buffer writes ?\n pub fn has_mapped_writes(&self) -> bool {\n !self.mapped_writes.is_empty()\n }\n\n \/\/\/ Takes all the accesses necessary for submission\n pub fn take_accesses(&self) -> SubmissionResult<AccessGuard<R>> {\n for buffer in self.mapped_reads().chain(self.mapped_writes()) {\n unsafe {\n if !buffer.mapping().unwrap().take_access() {\n return Err(SubmissionError::AccessOverlap);\n }\n }\n }\n Ok(AccessGuard { inner: self })\n }\n}\n\n#[allow(missing_docs)]\npub type AccessInfoBuffers<'a, R> = hash_set::Iter<'a, handle::RawBuffer<R>>;\n\n#[allow(missing_docs)]\n#[derive(Debug)]\npub struct AccessGuard<'a, R: Resources> {\n inner: &'a AccessInfo<R>,\n}\n\n#[allow(missing_docs)]\nimpl<'a, R: Resources> AccessGuard<'a, R> {\n \/\/\/ Returns the mapped buffers that The GPU will read from,\n \/\/\/ with exclusive acces to their mapping\n pub fn access_mapped_reads(&mut self) -> AccessGuardBuffers<R> {\n AccessGuardBuffers {\n buffers: self.inner.mapped_reads()\n }\n }\n\n \/\/\/ Returns the mapped buffers that The GPU will write to,\n \/\/\/ with exclusive acces to their mapping\n pub fn access_mapped_writes(&mut self) -> AccessGuardBuffers<R> {\n AccessGuardBuffers {\n buffers: self.inner.mapped_writes()\n }\n }\n\n pub fn access_mapped(&mut self) -> AccessGuardBuffersChain<R> {\n AccessGuardBuffersChain {\n fst: self.inner.mapped_reads(),\n snd: self.inner.mapped_writes(),\n }\n }\n}\n\nimpl<'a, R: Resources> Deref for AccessGuard<'a, R> {\n type Target = AccessInfo<R>;\n fn deref(&self) -> &Self::Target {\n &self.inner\n }\n}\n\nimpl<'a, R: Resources> Drop for AccessGuard<'a, R> {\n fn drop(&mut self) {\n for buffer in self.inner.mapped_reads().chain(self.inner.mapped_writes()) {\n unsafe {\n buffer.mapping().unwrap().release_access();\n }\n }\n }\n}\n\n#[allow(missing_docs)]\n#[derive(Debug)]\npub struct AccessGuardBuffers<'a, R: Resources> {\n buffers: AccessInfoBuffers<'a, R>\n}\n\nimpl<'a, R: Resources> Iterator for AccessGuardBuffers<'a, R> {\n type Item = (&'a handle::RawBuffer<R>, &'a mut R::Mapping);\n\n fn next(&mut self) -> Option<Self::Item> {\n self.buffers.next().map(|buffer| unsafe {\n (buffer, buffer.mapping().unwrap().use_access())\n })\n }\n}\n\n#[allow(missing_docs)]\n#[derive(Debug)]\npub struct AccessGuardBuffersChain<'a, R: Resources> {\n fst: AccessInfoBuffers<'a, R>,\n snd: AccessInfoBuffers<'a, R>\n}\n\nimpl<'a, R: Resources> Iterator for AccessGuardBuffersChain<'a, R> {\n type Item = (&'a handle::RawBuffer<R>, &'a mut R::Mapping);\n\n fn next(&mut self) -> Option<Self::Item> {\n self.fst.next().or_else(|| self.snd.next())\n .map(|buffer| unsafe {\n (buffer, buffer.mapping().unwrap().use_access())\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Start on 64-bit IDT<commit_after>use core::mem;\nuse spin::Mutex;\n\nextern {\n \/\/\/ Offset of the 64-bit GDT main code segment.\n \/\/\/ This should be exported by ASM.\n static gdt64_offset: u16;\n}\n\nconst IDT_ENTRIES: usize = 256;\ntype Handler = unsafe extern \"C\" fn() -> ();\n\n\/\/\/ An IDT entry is called a gate.\n\/\/\/\n\/\/\/ Based on code from the OS Dev Wiki\n\/\/\/ http:\/\/wiki.osdev.org\/Interrupt_Descriptor_Table#Structure\n#[repr(C, packed)]\nstruct Gate { offset_lower: u16\n , selector: u16\n , zero: u8\n , type_attr: u8\n , offest_mid: u16\n , offset_upper: u32\n , reserved: u32\n }\n\nimpl Gate {\n \/\/\/ Creates a new IDT gate marked as `absent`.\n \/\/\/\n \/\/\/ This is basically just for filling the new IDT table\n \/\/\/ with valid (but useless) gates upon init.\n const fn absent() -> Self {\n Gate { offset_lower: 0\n , selector: 0\n , zero: 0\n , type_attr: 0b0000_1110\n , offset_mid: 0\n , offset_upper: 0\n , reserved: 0\n }\n }\n\n fn new(handler: Handler) -> Gate {\n unsafe {\n \/\/ `mem::transmute()` is glorious black magic\n let (low, mid, high): (u16, u16, u32)\n = mem::transmute(handler)\n\n Gate { offset_lower: low\n , selector: gdt64_offset\n , zero: 0\n , type_attr: 0b1000_1110\n , offset_mid: mid\n , offset_upper: high\n , reserved: 0\n }\n }\n }\n}\n\n\nstruct Idt([Gate; IDT_ENTRIES]);\n\n\/\/\/ This is the format that `lidt` expects for the pointer to the IDT.\n\/\/\/ ...apparently.\n#[repr(C, packed)]\nstruct IdtPtr { limit: u16\n , base: u64\n }\n\nimpl IdtPtr {\n \/\/\/ Load the IDT at the given location.\n \/\/\/ This just calls `lidt`.\n pub unsafe fn load(&self) {\n asm!( \"lidt ($0)\"\n :: \"{rax}\"(self)\n :: \"volatile\" );\n }\n}\n\n\/\/\/ Global Interrupt Descriptor Table instance\n\/\/\/ Our global IDT.\nstatic IDT: Mutex<Idt>\n = Mutex::new(Idt([Gate::absent(); IDT_ENTRIES]));\n<|endoftext|>"} {"text":"<commit_before>use module::helpers::header as headerhelpers;\nuse storage::file::header::data::FileHeaderData as FHD;\nuse storage::file::header::spec::FileHeaderSpec as FHS;\n\npub fn get_spec() -> FHS {\n FHS::Map {\n keys: vec![\n headerhelpers::tags::spec::url_key(),\n headerhelpers::tags::spec::tags_key(),\n ]\n }\n}\n\npub fn build_header(url: String, tags: Vec<String>) -> FHD {\n FHD::Map {\n keys: vec![\n FHD::Key {\n name: String::from(\"URL\"),\n value: Box::new(FHD::Text(url.clone()))\n },\n FHD::Key {\n name: String::from(\"TAGS\"),\n value: Box::new(headerhelpers::tags::data::build_tag_array(tags))\n }\n ]\n }\n}\n\npub fn get_tags_from_header(header: &FHD) -> Vec<String> {\n headerhelpers::tags::data::get_tags_from_header(header)\n}\n\npub fn get_url_from_header(header: &FHD) -> Option<String> {\n headerhelpers::data::get_url_from_header(header)\n}\n\n<commit_msg>BM header helpers: Add set_tags_in_header()<commit_after>use module::helpers::header as headerhelpers;\nuse storage::file::header::data::FileHeaderData as FHD;\nuse storage::file::header::spec::FileHeaderSpec as FHS;\n\npub fn get_spec() -> FHS {\n FHS::Map {\n keys: vec![\n headerhelpers::tags::spec::url_key(),\n headerhelpers::tags::spec::tags_key(),\n ]\n }\n}\n\npub fn build_header(url: String, tags: Vec<String>) -> FHD {\n FHD::Map {\n keys: vec![\n FHD::Key {\n name: String::from(\"URL\"),\n value: Box::new(FHD::Text(url.clone()))\n },\n FHD::Key {\n name: String::from(\"TAGS\"),\n value: Box::new(headerhelpers::tags::data::build_tag_array(tags))\n }\n ]\n }\n}\n\npub fn get_tags_from_header(header: &FHD) -> Vec<String> {\n headerhelpers::tags::data::get_tags_from_header(header)\n}\n\npub fn set_tags_in_header(header: &FHD, tags: Vec<String>) {\n headerhelpers::tags::data::set_tags_in_header(header, tags)\n}\n\npub fn get_url_from_header(header: &FHD) -> Option<String> {\n headerhelpers::data::get_url_from_header(header)\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>mmu: refactoring.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add url parameter to the servosrc gstreamer plugin<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Finish implementing tar archivers<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>collections::pub use { ... }<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Another benchmark<commit_after>\/\/ This test creates a bunch of tasks that simultaneously send to each\n\/\/ other in a ring. The messages should all be basically\n\/\/ independent. It's designed to hammer the global kernel lock, so\n\/\/ that things will look really good once we get that lock out of the\n\/\/ message path.\n\nimport comm::*;\nimport future::future;\n\nuse std;\nimport std::time;\n\nfn thread_ring(i: uint,\n count: uint,\n num_chan: comm::chan<uint>,\n num_port: comm::port<uint>) {\n \/\/ Send\/Receive lots of messages.\n for uint::range(0u, count) {|j|\n num_chan.send(i * j);\n num_port.recv();\n };\n}\n\nfn main(args: [str]) {\n let args = if os::getenv(\"RUST_BENCH\").is_some() {\n [\"\", \"100\", \"10000\"]\n } else if args.len() <= 1u {\n [\"\", \"100\", \"1000\"]\n } else {\n args\n }; \n\n let num_tasks = option::get(uint::from_str(args[1]));\n let msg_per_task = option::get(uint::from_str(args[2]));\n\n let num_port = port();\n let mut num_chan = chan(num_port);\n\n let start = time::precise_time_s();\n\n \/\/ create the ring\n let mut futures = [];\n\n for uint::range(1u, num_tasks) {|i|\n let get_chan = port();\n let get_chan_chan = chan(get_chan);\n\n futures += [future::spawn {|copy num_chan, move get_chan_chan|\n let p = port();\n get_chan_chan.send(chan(p));\n thread_ring(i, msg_per_task, num_chan, p)\n }];\n \n num_chan = get_chan.recv();\n };\n\n \/\/ do our iteration\n thread_ring(0u, msg_per_task, num_chan, num_port);\n\n \/\/ synchronize\n for futures.each {|f| f.get() };\n\n let stop = time::precise_time_s();\n\n \/\/ all done, report stats.\n let num_msgs = num_tasks * msg_per_task;\n let elapsed = (stop - start);\n let rate = (num_msgs as float) \/ elapsed;\n\n io::println(#fmt(\"Sent %? messages in %? seconds\",\n num_msgs, elapsed));\n io::println(#fmt(\" %? messages \/ second\", rate));\n io::println(#fmt(\" %? μs \/ message\", 1000000. \/ rate));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Workaround for https:\/\/github.com\/rust-lang\/rust\/issues\/15763<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added partial MBC1 impl<commit_after>use super::MemoryBankController;\n\n\/\/\/ MBC1 is the first MBC chip for the Gameboy. It can address a maximum of\n\/\/\/ 2MB ROM (divided in 125 banks of size 16KB) and\/or 32KB RAM.\n\/\/\/\n\/\/\/ The first 16KB in the rom (0 to 0x3FFF) are always pointing to the first\n\/\/\/ ROM bank. The following 16KB refer to the selected ROM bank.\n\/\/\/ Banks are numbered from 0 to 0x7F (128), but bank numbers 0x20, 0x40 and\n\/\/\/ 0x60 are not usable, thus the total bank number is 125.\nstruct MBC1 {\n data: Vec<u8>,\n ram: [u8; 32768], \/\/ 32KB RAM\n selected_rom_bank: u8,\n selected_ram_bank: u8,\n}\n\nimpl MemoryBankController for MBC1 {\n\n fn read(&self, addr: u16) -> u8 {\n match addr {\n 0...0x3FFF => self.data[addr as usize], \/\/ bank 0\n 0x4000...0x7FFF => {\n \/\/ the selected bank data (banks 1 to 0x7F)\n let base_address = self.selected_rom_bank as u16 * 0x4000;\n self.data[(base_address + addr) as usize]\n },\n 0xA000...0xBFFF => {\n \/\/ the selected RAM bank data (banks 0 to 3)\n self.ram[(addr - 0xA000) as usize]\n }\n _ => panic!(\"RomOnly MBC attempted read at ${:04x}\", addr),\n }\n }\n\n fn write(&mut self, addr: u16, data: u8) {\n \/\/ TODO finish this\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>watcher fix<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Start of heap<commit_after>\/\/ SPDX-License-Identifier: Unlicense\r\n\r\n#![feature(custom_test_frameworks)]\r\n#![no_main]\r\n#![no_std]\r\n#![reexport_test_harness_main = \"test_main\"]\r\n#![test_runner(libkernel::util::testing::test_runner)]\r\n#![feature(format_args_nl)] \/\/ for debug macros\r\n\r\n#[allow(unused_imports)]\r\n#[macro_use]\r\nextern crate libkernel;\r\n\r\nuse test_macros::kernel_test;\r\n\r\n#[no_mangle]\r\nfn kernel_init() {\r\n use libkernel::{handler, pager};\r\n\r\n fn next() -> ! {\r\n test_main();\r\n unreachable!()\r\n }\r\n\r\n handler::init().expect(\"handler::init\");\r\n pager::init(next);\r\n}\r\n\r\n#[kernel_test]\r\nfn device_init() {\r\n use libkernel::device;\r\n\r\n major!(\"initialising device\");\r\n device::init().expect(\"device::init\");\r\n debug!(\"returned\");\r\n}\r\n<|endoftext|>"} {"text":"<commit_before>use std::io::{Read, Write};\nuse std::mem;\nuse std::net::Shutdown;\nuse std::os::unix::prelude::*;\nuse std::path::Path;\n\nuse libc;\n\nuse {io, Evented, Ready, Poll, PollOpt, Token};\nuse sys::unix::{cvt, Io};\nuse sys::unix::io::{set_nonblock, set_cloexec};\n\ntrait MyInto<T> {\n fn my_into(self) -> T;\n}\n\nimpl MyInto<u32> for usize {\n fn my_into(self) -> u32 { self as u32 }\n}\n\nimpl MyInto<usize> for usize {\n fn my_into(self) -> usize { self }\n}\n\nunsafe fn sockaddr_un(path: &Path)\n -> io::Result<(libc::sockaddr_un, libc::socklen_t)> {\n let mut addr: libc::sockaddr_un = mem::zeroed();\n addr.sun_family = libc::AF_UNIX as libc::sa_family_t;\n\n let bytes = path.as_os_str().as_bytes();\n\n if bytes.len() >= addr.sun_path.len() {\n return Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"path must be shorter than SUN_LEN\"))\n }\n for (dst, src) in addr.sun_path.iter_mut().zip(bytes.iter()) {\n *dst = *src as libc::c_char;\n }\n \/\/ null byte for pathname addresses is already there because we zeroed the\n \/\/ struct\n\n let mut len = sun_path_offset() + bytes.len();\n match bytes.get(0) {\n Some(&0) | None => {}\n Some(_) => len += 1,\n }\n Ok((addr, len as libc::socklen_t))\n}\n\nfn sun_path_offset() -> usize {\n unsafe {\n \/\/ Work with an actual instance of the type since using a null pointer is UB\n let addr: libc::sockaddr_un = mem::uninitialized();\n let base = &addr as *const _ as usize;\n let path = &addr.sun_path as *const _ as usize;\n path - base\n }\n}\n\n#[derive(Debug)]\npub struct UnixSocket {\n io: Io,\n}\n\nimpl UnixSocket {\n \/\/\/ Returns a new, unbound, non-blocking Unix domain socket\n pub fn stream() -> io::Result<UnixSocket> {\n #[cfg(target_os = \"linux\")]\n use libc::{SOCK_CLOEXEC, SOCK_NONBLOCK};\n #[cfg(not(target_os = \"linux\"))]\n const SOCK_CLOEXEC: libc::c_int = 0;\n #[cfg(not(target_os = \"linux\"))]\n const SOCK_NONBLOCK: libc::c_int = 0;\n\n unsafe {\n if cfg!(target_os = \"linux\") {\n let flags = libc::SOCK_STREAM | SOCK_CLOEXEC | SOCK_NONBLOCK;\n match cvt(libc::socket(libc::AF_UNIX, flags, 0)) {\n Ok(fd) => return Ok(UnixSocket::from_raw_fd(fd)),\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {}\n Err(e) => return Err(e),\n }\n }\n\n let fd = try!(cvt(libc::socket(libc::AF_UNIX, libc::SOCK_STREAM, 0)));\n let fd = UnixSocket::from_raw_fd(fd);\n try!(set_cloexec(fd.as_raw_fd()));\n try!(set_nonblock(fd.as_raw_fd()));\n Ok(fd)\n }\n }\n\n \/\/\/ Connect the socket to the specified address\n pub fn connect<P: AsRef<Path> + ?Sized>(&self, addr: &P) -> io::Result<()> {\n unsafe {\n let (addr, len) = try!(sockaddr_un(addr.as_ref()));\n try!(cvt(libc::connect(self.as_raw_fd(),\n &addr as *const _ as *const _,\n len)));\n Ok(())\n }\n }\n\n \/\/\/ Listen for incoming requests\n pub fn listen(&self, backlog: usize) -> io::Result<()> {\n unsafe {\n try!(cvt(libc::listen(self.as_raw_fd(), backlog as i32)));\n Ok(())\n }\n }\n\n pub fn accept(&self) -> io::Result<UnixSocket> {\n unsafe {\n let fd = try!(cvt(libc::accept(self.as_raw_fd(),\n 0 as *mut _,\n 0 as *mut _)));\n let fd = Io::from_raw_fd(fd);\n try!(set_cloexec(fd.as_raw_fd()));\n try!(set_nonblock(fd.as_raw_fd()));\n Ok(UnixSocket { io: fd })\n }\n }\n\n \/\/\/ Bind the socket to the specified address\n pub fn bind<P: AsRef<Path> + ?Sized>(&self, addr: &P) -> io::Result<()> {\n unsafe {\n let (addr, len) = try!(sockaddr_un(addr.as_ref()));\n try!(cvt(libc::bind(self.as_raw_fd(),\n &addr as *const _ as *const _,\n len)));\n Ok(())\n }\n }\n\n pub fn try_clone(&self) -> io::Result<UnixSocket> {\n Ok(UnixSocket { io: try!(self.io.try_clone()) })\n }\n\n pub fn shutdown(&self, how: Shutdown) -> io::Result<()> {\n let how = match how {\n Shutdown::Read => libc::SHUT_RD,\n Shutdown::Write => libc::SHUT_WR,\n Shutdown::Both => libc::SHUT_RDWR,\n };\n unsafe {\n try!(cvt(libc::shutdown(self.as_raw_fd(), how)));\n Ok(())\n }\n }\n\n pub fn read_recv_fd(&mut self, buf: &mut [u8]) -> io::Result<(usize, Option<RawFd>)> {\n unsafe {\n let mut iov = libc::iovec {\n iov_base: buf.as_mut_ptr() as *mut _,\n iov_len: buf.len(),\n };\n struct Cmsg {\n hdr: libc::cmsghdr,\n data: [libc::c_int; 1],\n }\n let mut cmsg: Cmsg = mem::zeroed();\n let mut msg = libc::msghdr {\n msg_name: 0 as *mut _,\n msg_namelen: 0,\n msg_iov: &mut iov,\n msg_iovlen: 1,\n msg_control: &mut cmsg as *mut _ as *mut _,\n msg_controllen: mem::size_of_val(&cmsg).my_into(),\n msg_flags: 0,\n };\n let bytes = try!(cvt(libc::recvmsg(self.as_raw_fd(), &mut msg, 0)));\n\n const SCM_RIGHTS: libc::c_int = 1;\n\n let fd = if cmsg.hdr.cmsg_level == libc::SOL_SOCKET &&\n cmsg.hdr.cmsg_type == SCM_RIGHTS {\n Some(cmsg.data[0])\n } else {\n None\n };\n Ok((bytes as usize, fd))\n }\n }\n\n pub fn write_send_fd(&mut self, buf: &[u8], fd: RawFd) -> io::Result<usize> {\n unsafe {\n let mut iov = libc::iovec {\n iov_base: buf.as_ptr() as *mut _,\n iov_len: buf.len(),\n };\n struct Cmsg {\n hdr: libc::cmsghdr,\n data: [libc::c_int; 1],\n }\n let mut cmsg: Cmsg = mem::zeroed();\n cmsg.hdr.cmsg_len = mem::size_of_val(&cmsg).my_into();\n cmsg.hdr.cmsg_level = libc::SOL_SOCKET;\n cmsg.hdr.cmsg_type = 1; \/\/ SCM_RIGHTS\n cmsg.data[0] = fd;\n let msg = libc::msghdr {\n msg_name: 0 as *mut _,\n msg_namelen: 0,\n msg_iov: &mut iov,\n msg_iovlen: 1,\n msg_control: &mut cmsg as *mut _ as *mut _,\n msg_controllen: mem::size_of_val(&cmsg).my_into(),\n msg_flags: 0,\n };\n let bytes = try!(cvt(libc::sendmsg(self.as_raw_fd(), &msg, 0)));\n Ok(bytes as usize)\n }\n }\n}\n\nimpl Read for UnixSocket {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.io.read(buf)\n }\n}\n\nimpl Write for UnixSocket {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.io.write(buf)\n }\n\n fn flush(&mut self) -> io::Result<()> {\n self.io.flush()\n }\n}\n\nimpl Evented for UnixSocket {\n fn register(&self, poll: &Poll, token: Token, interest: Ready, opts: PollOpt) -> io::Result<()> {\n self.io.register(poll, token, interest, opts)\n }\n\n fn reregister(&self, poll: &Poll, token: Token, interest: Ready, opts: PollOpt) -> io::Result<()> {\n self.io.reregister(poll, token, interest, opts)\n }\n\n fn deregister(&self, poll: &Poll) -> io::Result<()> {\n self.io.deregister(poll)\n }\n}\n\n\nimpl From<Io> for UnixSocket {\n fn from(io: Io) -> UnixSocket {\n UnixSocket { io: io }\n }\n}\n\nimpl FromRawFd for UnixSocket {\n unsafe fn from_raw_fd(fd: RawFd) -> UnixSocket {\n UnixSocket { io: Io::from_raw_fd(fd) }\n }\n}\n\nimpl IntoRawFd for UnixSocket {\n fn into_raw_fd(self) -> RawFd {\n self.io.into_raw_fd()\n }\n}\n\nimpl AsRawFd for UnixSocket {\n fn as_raw_fd(&self) -> RawFd {\n self.io.as_raw_fd()\n }\n}\n<commit_msg>Fix compile on musl<commit_after>use std::io::{Read, Write};\nuse std::mem;\nuse std::net::Shutdown;\nuse std::os::unix::prelude::*;\nuse std::path::Path;\n\nuse libc;\n\nuse {io, Evented, Ready, Poll, PollOpt, Token};\nuse sys::unix::{cvt, Io};\nuse sys::unix::io::{set_nonblock, set_cloexec};\n\ntrait MyInto<T> {\n fn my_into(self) -> T;\n}\n\nimpl MyInto<u32> for usize {\n fn my_into(self) -> u32 { self as u32 }\n}\n\nimpl MyInto<usize> for usize {\n fn my_into(self) -> usize { self }\n}\n\nunsafe fn sockaddr_un(path: &Path)\n -> io::Result<(libc::sockaddr_un, libc::socklen_t)> {\n let mut addr: libc::sockaddr_un = mem::zeroed();\n addr.sun_family = libc::AF_UNIX as libc::sa_family_t;\n\n let bytes = path.as_os_str().as_bytes();\n\n if bytes.len() >= addr.sun_path.len() {\n return Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"path must be shorter than SUN_LEN\"))\n }\n for (dst, src) in addr.sun_path.iter_mut().zip(bytes.iter()) {\n *dst = *src as libc::c_char;\n }\n \/\/ null byte for pathname addresses is already there because we zeroed the\n \/\/ struct\n\n let mut len = sun_path_offset() + bytes.len();\n match bytes.get(0) {\n Some(&0) | None => {}\n Some(_) => len += 1,\n }\n Ok((addr, len as libc::socklen_t))\n}\n\nfn sun_path_offset() -> usize {\n unsafe {\n \/\/ Work with an actual instance of the type since using a null pointer is UB\n let addr: libc::sockaddr_un = mem::uninitialized();\n let base = &addr as *const _ as usize;\n let path = &addr.sun_path as *const _ as usize;\n path - base\n }\n}\n\n#[derive(Debug)]\npub struct UnixSocket {\n io: Io,\n}\n\nimpl UnixSocket {\n \/\/\/ Returns a new, unbound, non-blocking Unix domain socket\n pub fn stream() -> io::Result<UnixSocket> {\n #[cfg(target_os = \"linux\")]\n use libc::{SOCK_CLOEXEC, SOCK_NONBLOCK};\n #[cfg(not(target_os = \"linux\"))]\n const SOCK_CLOEXEC: libc::c_int = 0;\n #[cfg(not(target_os = \"linux\"))]\n const SOCK_NONBLOCK: libc::c_int = 0;\n\n unsafe {\n if cfg!(target_os = \"linux\") {\n let flags = libc::SOCK_STREAM | SOCK_CLOEXEC | SOCK_NONBLOCK;\n match cvt(libc::socket(libc::AF_UNIX, flags, 0)) {\n Ok(fd) => return Ok(UnixSocket::from_raw_fd(fd)),\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {}\n Err(e) => return Err(e),\n }\n }\n\n let fd = try!(cvt(libc::socket(libc::AF_UNIX, libc::SOCK_STREAM, 0)));\n let fd = UnixSocket::from_raw_fd(fd);\n try!(set_cloexec(fd.as_raw_fd()));\n try!(set_nonblock(fd.as_raw_fd()));\n Ok(fd)\n }\n }\n\n \/\/\/ Connect the socket to the specified address\n pub fn connect<P: AsRef<Path> + ?Sized>(&self, addr: &P) -> io::Result<()> {\n unsafe {\n let (addr, len) = try!(sockaddr_un(addr.as_ref()));\n try!(cvt(libc::connect(self.as_raw_fd(),\n &addr as *const _ as *const _,\n len)));\n Ok(())\n }\n }\n\n \/\/\/ Listen for incoming requests\n pub fn listen(&self, backlog: usize) -> io::Result<()> {\n unsafe {\n try!(cvt(libc::listen(self.as_raw_fd(), backlog as i32)));\n Ok(())\n }\n }\n\n pub fn accept(&self) -> io::Result<UnixSocket> {\n unsafe {\n let fd = try!(cvt(libc::accept(self.as_raw_fd(),\n 0 as *mut _,\n 0 as *mut _)));\n let fd = Io::from_raw_fd(fd);\n try!(set_cloexec(fd.as_raw_fd()));\n try!(set_nonblock(fd.as_raw_fd()));\n Ok(UnixSocket { io: fd })\n }\n }\n\n \/\/\/ Bind the socket to the specified address\n pub fn bind<P: AsRef<Path> + ?Sized>(&self, addr: &P) -> io::Result<()> {\n unsafe {\n let (addr, len) = try!(sockaddr_un(addr.as_ref()));\n try!(cvt(libc::bind(self.as_raw_fd(),\n &addr as *const _ as *const _,\n len)));\n Ok(())\n }\n }\n\n pub fn try_clone(&self) -> io::Result<UnixSocket> {\n Ok(UnixSocket { io: try!(self.io.try_clone()) })\n }\n\n pub fn shutdown(&self, how: Shutdown) -> io::Result<()> {\n let how = match how {\n Shutdown::Read => libc::SHUT_RD,\n Shutdown::Write => libc::SHUT_WR,\n Shutdown::Both => libc::SHUT_RDWR,\n };\n unsafe {\n try!(cvt(libc::shutdown(self.as_raw_fd(), how)));\n Ok(())\n }\n }\n\n pub fn read_recv_fd(&mut self, buf: &mut [u8]) -> io::Result<(usize, Option<RawFd>)> {\n unsafe {\n let mut iov = libc::iovec {\n iov_base: buf.as_mut_ptr() as *mut _,\n iov_len: buf.len(),\n };\n struct Cmsg {\n hdr: libc::cmsghdr,\n data: [libc::c_int; 1],\n }\n let mut cmsg: Cmsg = mem::zeroed();\n let mut msg: libc::msghdr = mem::zeroed();\n msg.msg_iov = &mut iov;\n msg.msg_iovlen = 1;\n msg.msg_control = &mut cmsg as *mut _ as *mut _;\n msg.msg_controllen = mem::size_of_val(&cmsg).my_into();\n let bytes = try!(cvt(libc::recvmsg(self.as_raw_fd(), &mut msg, 0)));\n\n const SCM_RIGHTS: libc::c_int = 1;\n\n let fd = if cmsg.hdr.cmsg_level == libc::SOL_SOCKET &&\n cmsg.hdr.cmsg_type == SCM_RIGHTS {\n Some(cmsg.data[0])\n } else {\n None\n };\n Ok((bytes as usize, fd))\n }\n }\n\n pub fn write_send_fd(&mut self, buf: &[u8], fd: RawFd) -> io::Result<usize> {\n unsafe {\n let mut iov = libc::iovec {\n iov_base: buf.as_ptr() as *mut _,\n iov_len: buf.len(),\n };\n struct Cmsg {\n hdr: libc::cmsghdr,\n data: [libc::c_int; 1],\n }\n let mut cmsg: Cmsg = mem::zeroed();\n cmsg.hdr.cmsg_len = mem::size_of_val(&cmsg).my_into();\n cmsg.hdr.cmsg_level = libc::SOL_SOCKET;\n cmsg.hdr.cmsg_type = 1; \/\/ SCM_RIGHTS\n cmsg.data[0] = fd;\n let mut msg: libc::msghdr = mem::zeroed();\n msg.msg_iov = &mut iov;\n msg.msg_iovlen = 1;\n msg.msg_control = &mut cmsg as *mut _ as *mut _;\n msg.msg_controllen = mem::size_of_val(&cmsg).my_into();\n let bytes = try!(cvt(libc::sendmsg(self.as_raw_fd(), &msg, 0)));\n Ok(bytes as usize)\n }\n }\n}\n\nimpl Read for UnixSocket {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.io.read(buf)\n }\n}\n\nimpl Write for UnixSocket {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.io.write(buf)\n }\n\n fn flush(&mut self) -> io::Result<()> {\n self.io.flush()\n }\n}\n\nimpl Evented for UnixSocket {\n fn register(&self, poll: &Poll, token: Token, interest: Ready, opts: PollOpt) -> io::Result<()> {\n self.io.register(poll, token, interest, opts)\n }\n\n fn reregister(&self, poll: &Poll, token: Token, interest: Ready, opts: PollOpt) -> io::Result<()> {\n self.io.reregister(poll, token, interest, opts)\n }\n\n fn deregister(&self, poll: &Poll) -> io::Result<()> {\n self.io.deregister(poll)\n }\n}\n\n\nimpl From<Io> for UnixSocket {\n fn from(io: Io) -> UnixSocket {\n UnixSocket { io: io }\n }\n}\n\nimpl FromRawFd for UnixSocket {\n unsafe fn from_raw_fd(fd: RawFd) -> UnixSocket {\n UnixSocket { io: Io::from_raw_fd(fd) }\n }\n}\n\nimpl IntoRawFd for UnixSocket {\n fn into_raw_fd(self) -> RawFd {\n self.io.into_raw_fd()\n }\n}\n\nimpl AsRawFd for UnixSocket {\n fn as_raw_fd(&self) -> RawFd {\n self.io.as_raw_fd()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for g_print* functions<commit_after>extern crate glib;\n\nuse glib::*;\n\nuse std::sync::{Arc, Mutex};\n\n\/\/ Funny thing: we can't put those two tests in two different functions, otherwise they might\n\/\/ conflict with the results of the other one (or it would be mandatory to run the tests on only\n\/\/ one thread).\n#[test]\nfn check_print_handler() {\n \/\/\n \/\/ g_print check part\n \/\/\n let count = Arc::new(Mutex::new(0));\n set_print_handler(clone!(@weak count => move |_| {\n \/\/ we don't care about the message in here!\n *count.lock().expect(\"failed to lock 1\") += 1;\n }));\n g_print!(\"test\");\n assert_eq!(*count.lock().expect(\"failed to lock 2\"), 1);\n g_printerr!(\"one\");\n assert_eq!(*count.lock().expect(\"failed to lock 3\"), 1);\n g_print!(\"another\");\n assert_eq!(*count.lock().expect(\"failed to lock 4\"), 2);\n unset_print_handler();\n g_print!(\"tadam\");\n assert_eq!(*count.lock().expect(\"failed to lock 5\"), 2);\n g_printerr!(\"toudoum\");\n assert_eq!(*count.lock().expect(\"failed to lock 6\"), 2);\n\n \/\/\n \/\/ g_printerr check part\n \/\/\n let count = Arc::new(Mutex::new(0));\n set_printerr_handler(clone!(@weak count => move |_| {\n \/\/ we don't care about the message in here!\n *count.lock().expect(\"failed to lock a\") += 1;\n }));\n g_printerr!(\"test\");\n assert_eq!(*count.lock().expect(\"failed to lock b\"), 1);\n g_print!(\"one\");\n assert_eq!(*count.lock().expect(\"failed to lock c\"), 1);\n g_printerr!(\"another\");\n assert_eq!(*count.lock().expect(\"failed to lock d\"), 2);\n unset_printerr_handler();\n g_printerr!(\"tadam\");\n assert_eq!(*count.lock().expect(\"failed to lock e\"), 2);\n g_print!(\"toudoum\");\n assert_eq!(*count.lock().expect(\"failed to lock f\"), 2);\n}\n<|endoftext|>"} {"text":"<commit_before>use std::{fmt, mem, usize};\nuse std::ops::{Index, IndexMut};\nuse token::Token;\n\n\/\/\/ A preallocated chunk of memory for storing objects of the same type.\npub struct Slab<T> {\n \/\/ Chunk of memory\n entries: Vec<Entry<T>>,\n \/\/ Number of elements currently in the slab\n len: usize,\n \/\/ The token offset\n off: usize,\n \/\/ Offset of the next available slot in the slab. Set to the slab's\n \/\/ capacity when the slab is full.\n nxt: usize,\n}\n\nconst MAX: usize = usize::MAX;\n\nunsafe impl<T> Send for Slab<T> where T: Send {}\n\n\/\/ TODO: Once NonZero lands, use it to optimize the layout\nimpl<T> Slab<T> {\n pub fn new(cap: usize) -> Slab<T> {\n Slab::new_starting_at(Token(0), cap)\n }\n\n pub fn new_starting_at(offset: Token, cap: usize) -> Slab<T> {\n assert!(cap <= MAX, \"capacity too large\");\n \/\/ TODO:\n \/\/ - Rename to with_capacity\n \/\/ - Use a power of 2 capacity\n \/\/ - Ensure that mem size is less than usize::MAX\n\n let entries = Vec::with_capacity(cap);\n\n Slab {\n entries: entries,\n len: 0,\n off: offset.as_usize(),\n nxt: 0,\n }\n }\n\n #[inline]\n pub fn count(&self) -> usize {\n self.len as usize\n }\n\n #[inline]\n pub fn is_empty(&self) -> bool {\n self.len == 0\n }\n\n #[inline]\n pub fn remaining(&self) -> usize {\n (self.entries.capacity() - self.len) as usize\n }\n\n #[inline]\n pub fn has_remaining(&self) -> bool {\n self.remaining() > 0\n }\n\n #[inline]\n pub fn contains(&self, idx: Token) -> bool {\n if idx.as_usize() < self.off {\n return false;\n }\n\n let idx = self.token_to_idx(idx);\n\n if idx < self.entries.len() {\n return self.entries[idx].in_use();\n }\n\n false\n }\n\n pub fn get(&self, idx: Token) -> Option<&T> {\n assert!(self.contains(idx), \"slab does not contain token `{:?}`\", idx);\n\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n if idx < self.entries.len() {\n return self.entries[idx].val.as_ref();\n }\n }\n\n None\n }\n\n pub fn get_mut(&mut self, idx: Token) -> Option<&mut T> {\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n if idx < self.entries.len() {\n return self.entries[idx].val.as_mut();\n }\n }\n\n None\n }\n\n pub fn insert(&mut self, val: T) -> Result<Token, T> {\n let idx = self.nxt;\n\n if idx == self.entries.len() {\n \/\/ Using an uninitialized entry\n if idx == self.entries.capacity() {\n \/\/ No more capacity\n debug!(\"slab out of capacity; cap={}\", self.entries.capacity());\n return Err(val);\n }\n\n self.entries.push(Entry {\n nxt: MAX,\n val: Some(val),\n });\n\n self.len += 1;\n self.nxt = self.len;\n }\n else {\n self.len += 1;\n self.nxt = self.entries[idx].put(val);\n }\n\n Ok(self.idx_to_token(idx))\n }\n\n \/\/\/ Releases the given slot\n pub fn remove(&mut self, idx: Token) -> Option<T> {\n \/\/ Cast to usize\n let idx = self.token_to_idx(idx);\n\n if idx > self.entries.len() {\n return None;\n }\n\n match self.entries[idx].remove(self.nxt) {\n Some(v) => {\n self.nxt = idx;\n self.len -= 1;\n Some(v)\n }\n None => None\n }\n }\n\n pub fn iter(&self) -> SlabIter<T> {\n SlabIter {\n slab: self,\n cur_idx: 0,\n yielded: 0\n }\n }\n\n pub fn iter_mut(&mut self) -> SlabMutIter<T> {\n SlabMutIter { iter: self.iter() }\n }\n\n #[inline]\n fn validate_idx(&self, idx: usize) -> usize {\n if idx < self.entries.len() {\n return idx;\n }\n\n panic!(\"invalid index {} -- greater than capacity {}\", idx, self.entries.capacity());\n }\n\n fn token_to_idx(&self, token: Token) -> usize {\n token.as_usize() - self.off\n }\n\n fn idx_to_token(&self, idx: usize) -> Token {\n Token(idx as usize + self.off)\n }\n}\n\nimpl<T> Index<Token> for Slab<T> {\n type Output = T;\n\n fn index<'a>(&'a self, idx: Token) -> &'a T {\n let idx = self.token_to_idx(idx);\n let idx = self.validate_idx(idx);\n\n self.entries[idx].val.as_ref()\n .expect(\"invalid index\")\n }\n}\n\nimpl<T> IndexMut<Token> for Slab<T> {\n fn index_mut<'a>(&'a mut self, idx: Token) -> &'a mut T {\n let idx = self.token_to_idx(idx);\n let idx = self.validate_idx(idx);\n\n self.entries[idx].val.as_mut()\n .expect(\"invalid index\")\n }\n}\n\nimpl<T> fmt::Debug for Slab<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"Slab {{ len: {}, cap: {} }}\", self.len, self.entries.capacity())\n }\n}\n\n\/\/ Holds the values in the slab.\nstruct Entry<T> {\n nxt: usize,\n val: Option<T>,\n}\n\nimpl<T> Entry<T> {\n #[inline]\n fn put(&mut self, val: T) -> usize{\n let ret = self.nxt;\n self.val = Some(val);\n ret\n }\n\n fn remove(&mut self, nxt: usize) -> Option<T> {\n if self.in_use() {\n self.nxt = nxt;\n self.val.take()\n } else {\n None\n }\n }\n\n #[inline]\n fn in_use(&self) -> bool {\n self.val.is_some()\n }\n}\n\npub struct SlabIter<'a, T: 'a> {\n slab: &'a Slab<T>,\n cur_idx: usize,\n yielded: usize\n}\n\nimpl<'a, T> Iterator for SlabIter<'a, T> {\n type Item = &'a T;\n\n fn next(&mut self) -> Option<&'a T> {\n while self.yielded < self.slab.len {\n match self.slab.entries[self.cur_idx].val {\n Some(ref v) => {\n self.cur_idx += 1;\n self.yielded += 1;\n return Some(v);\n }\n None => {\n self.cur_idx += 1;\n }\n }\n }\n\n None\n }\n}\n\npub struct SlabMutIter<'a, T: 'a> {\n iter: SlabIter<'a, T>,\n}\n\nimpl<'a, 'b, T> Iterator for SlabMutIter<'a, T> {\n type Item = &'b mut T;\n\n fn next(&mut self) -> Option<&'b mut T> {\n unsafe { mem::transmute(self.iter.next()) }\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::Slab;\n use {Token};\n\n #[test]\n fn test_insertion() {\n let mut slab = Slab::new(1);\n let token = slab.insert(10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], 10);\n }\n\n #[test]\n fn test_repeated_insertion() {\n let mut slab = Slab::new(10);\n\n for i in (0..10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], i + 10);\n }\n\n slab.insert(20).err().expect(\"Inserted when full\");\n }\n\n #[test]\n fn test_repeated_insertion_and_removal() {\n let mut slab = Slab::new(10);\n let mut tokens = vec![];\n\n for i in (0..10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n tokens.push(token);\n assert_eq!(slab[token], i + 10);\n }\n\n for &i in tokens.iter() {\n slab.remove(i);\n }\n\n slab.insert(20).ok().expect(\"Failed to insert in newly empty slab\");\n }\n\n #[test]\n fn test_insertion_when_full() {\n let mut slab = Slab::new(1);\n slab.insert(10).ok().expect(\"Failed to insert\");\n slab.insert(10).err().expect(\"Inserted into a full slab\");\n }\n\n #[test]\n fn test_removal_is_successful() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(10).ok().expect(\"Failed to insert\");\n slab.remove(t1);\n let t2 = slab.insert(20).ok().expect(\"Failed to insert\");\n assert_eq!(slab[t2], 20);\n }\n\n #[test]\n fn test_mut_retrieval() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(\"foo\".to_string()).ok().expect(\"Failed to insert\");\n\n slab[t1].push_str(\"bar\");\n\n assert_eq!(&slab[t1][..], \"foobar\");\n }\n\n #[test]\n #[should_panic]\n fn test_reusing_slots_1() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n let t1 = slab.insert(456).unwrap();\n\n assert!(slab.count() == 2);\n assert!(slab.remaining() == 14);\n\n slab.remove(t0);\n\n assert!(slab.count() == 1, \"actual={}\", slab.count());\n assert!(slab.remaining() == 15);\n\n slab.remove(t1);\n\n assert!(slab.count() == 0);\n assert!(slab.remaining() == 16);\n\n let _ = slab[t1];\n }\n\n #[test]\n fn test_reusing_slots_2() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n\n assert!(slab[t0] == 123);\n assert!(slab.remove(t0) == Some(123));\n\n let t0 = slab.insert(456).unwrap();\n\n assert!(slab[t0] == 456);\n\n let t1 = slab.insert(789).unwrap();\n\n assert!(slab[t0] == 456);\n assert!(slab[t1] == 789);\n\n assert!(slab.remove(t0).unwrap() == 456);\n assert!(slab.remove(t1).unwrap() == 789);\n\n assert!(slab.count() == 0);\n }\n\n #[test]\n #[should_panic]\n fn test_accessing_out_of_bounds() {\n let slab = Slab::<usize>::new(16);\n slab[Token(0)];\n }\n\n #[test]\n fn test_contains() {\n let mut slab = Slab::new_starting_at(Token(5),16);\n assert!(!slab.contains(Token(0)));\n\n let tok = slab.insert(111).unwrap();\n assert!(slab.contains(tok));\n }\n\n #[test]\n fn test_iter() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in (0..4) {\n slab.insert(i).unwrap();\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![0, 1, 2, 3]);\n\n slab.remove(Token(1));\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![0, 2, 3]);\n }\n\n #[test]\n fn test_iter_mut() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in (0..4) {\n slab.insert(i).unwrap();\n }\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![1, 2, 3, 4]);\n\n slab.remove(Token(2));\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![2, 3, 5]);\n }\n}\n<commit_msg>Implement IntoInterator instances for Slab<commit_after>use std::{fmt, mem, usize};\nuse std::iter::IntoIterator;\nuse std::ops::{Index, IndexMut};\nuse token::Token;\n\n\/\/\/ A preallocated chunk of memory for storing objects of the same type.\npub struct Slab<T> {\n \/\/ Chunk of memory\n entries: Vec<Entry<T>>,\n \/\/ Number of elements currently in the slab\n len: usize,\n \/\/ The token offset\n off: usize,\n \/\/ Offset of the next available slot in the slab. Set to the slab's\n \/\/ capacity when the slab is full.\n nxt: usize,\n}\n\nconst MAX: usize = usize::MAX;\n\nunsafe impl<T> Send for Slab<T> where T: Send {}\n\n\/\/ TODO: Once NonZero lands, use it to optimize the layout\nimpl<T> Slab<T> {\n pub fn new(cap: usize) -> Slab<T> {\n Slab::new_starting_at(Token(0), cap)\n }\n\n pub fn new_starting_at(offset: Token, cap: usize) -> Slab<T> {\n assert!(cap <= MAX, \"capacity too large\");\n \/\/ TODO:\n \/\/ - Rename to with_capacity\n \/\/ - Use a power of 2 capacity\n \/\/ - Ensure that mem size is less than usize::MAX\n\n let entries = Vec::with_capacity(cap);\n\n Slab {\n entries: entries,\n len: 0,\n off: offset.as_usize(),\n nxt: 0,\n }\n }\n\n #[inline]\n pub fn count(&self) -> usize {\n self.len as usize\n }\n\n #[inline]\n pub fn is_empty(&self) -> bool {\n self.len == 0\n }\n\n #[inline]\n pub fn remaining(&self) -> usize {\n (self.entries.capacity() - self.len) as usize\n }\n\n #[inline]\n pub fn has_remaining(&self) -> bool {\n self.remaining() > 0\n }\n\n #[inline]\n pub fn contains(&self, idx: Token) -> bool {\n if idx.as_usize() < self.off {\n return false;\n }\n\n let idx = self.token_to_idx(idx);\n\n if idx < self.entries.len() {\n return self.entries[idx].in_use();\n }\n\n false\n }\n\n pub fn get(&self, idx: Token) -> Option<&T> {\n assert!(self.contains(idx), \"slab does not contain token `{:?}`\", idx);\n\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n if idx < self.entries.len() {\n return self.entries[idx].val.as_ref();\n }\n }\n\n None\n }\n\n pub fn get_mut(&mut self, idx: Token) -> Option<&mut T> {\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n if idx < self.entries.len() {\n return self.entries[idx].val.as_mut();\n }\n }\n\n None\n }\n\n pub fn insert(&mut self, val: T) -> Result<Token, T> {\n let idx = self.nxt;\n\n if idx == self.entries.len() {\n \/\/ Using an uninitialized entry\n if idx == self.entries.capacity() {\n \/\/ No more capacity\n debug!(\"slab out of capacity; cap={}\", self.entries.capacity());\n return Err(val);\n }\n\n self.entries.push(Entry {\n nxt: MAX,\n val: Some(val),\n });\n\n self.len += 1;\n self.nxt = self.len;\n }\n else {\n self.len += 1;\n self.nxt = self.entries[idx].put(val);\n }\n\n Ok(self.idx_to_token(idx))\n }\n\n \/\/\/ Releases the given slot\n pub fn remove(&mut self, idx: Token) -> Option<T> {\n \/\/ Cast to usize\n let idx = self.token_to_idx(idx);\n\n if idx > self.entries.len() {\n return None;\n }\n\n match self.entries[idx].remove(self.nxt) {\n Some(v) => {\n self.nxt = idx;\n self.len -= 1;\n Some(v)\n }\n None => None\n }\n }\n\n pub fn iter(&self) -> SlabIter<T> {\n SlabIter {\n slab: self,\n cur_idx: 0,\n yielded: 0\n }\n }\n\n pub fn iter_mut(&mut self) -> SlabMutIter<T> {\n SlabMutIter { iter: self.iter() }\n }\n\n #[inline]\n fn validate_idx(&self, idx: usize) -> usize {\n if idx < self.entries.len() {\n return idx;\n }\n\n panic!(\"invalid index {} -- greater than capacity {}\", idx, self.entries.capacity());\n }\n\n fn token_to_idx(&self, token: Token) -> usize {\n token.as_usize() - self.off\n }\n\n fn idx_to_token(&self, idx: usize) -> Token {\n Token(idx as usize + self.off)\n }\n}\n\nimpl<T> Index<Token> for Slab<T> {\n type Output = T;\n\n fn index<'a>(&'a self, idx: Token) -> &'a T {\n let idx = self.token_to_idx(idx);\n let idx = self.validate_idx(idx);\n\n self.entries[idx].val.as_ref()\n .expect(\"invalid index\")\n }\n}\n\nimpl<T> IndexMut<Token> for Slab<T> {\n fn index_mut<'a>(&'a mut self, idx: Token) -> &'a mut T {\n let idx = self.token_to_idx(idx);\n let idx = self.validate_idx(idx);\n\n self.entries[idx].val.as_mut()\n .expect(\"invalid index\")\n }\n}\n\nimpl<T> fmt::Debug for Slab<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"Slab {{ len: {}, cap: {} }}\", self.len, self.entries.capacity())\n }\n}\n\n\/\/ Holds the values in the slab.\nstruct Entry<T> {\n nxt: usize,\n val: Option<T>,\n}\n\nimpl<T> Entry<T> {\n #[inline]\n fn put(&mut self, val: T) -> usize{\n let ret = self.nxt;\n self.val = Some(val);\n ret\n }\n\n fn remove(&mut self, nxt: usize) -> Option<T> {\n if self.in_use() {\n self.nxt = nxt;\n self.val.take()\n } else {\n None\n }\n }\n\n #[inline]\n fn in_use(&self) -> bool {\n self.val.is_some()\n }\n}\n\npub struct SlabIter<'a, T: 'a> {\n slab: &'a Slab<T>,\n cur_idx: usize,\n yielded: usize\n}\n\nimpl<'a, T> Iterator for SlabIter<'a, T> {\n type Item = &'a T;\n\n fn next(&mut self) -> Option<&'a T> {\n while self.yielded < self.slab.len {\n match self.slab.entries[self.cur_idx].val {\n Some(ref v) => {\n self.cur_idx += 1;\n self.yielded += 1;\n return Some(v);\n }\n None => {\n self.cur_idx += 1;\n }\n }\n }\n\n None\n }\n}\n\npub struct SlabMutIter<'a, T: 'a> {\n iter: SlabIter<'a, T>,\n}\n\nimpl<'a, 'b, T> Iterator for SlabMutIter<'a, T> {\n type Item = &'b mut T;\n\n fn next(&mut self) -> Option<&'b mut T> {\n unsafe { mem::transmute(self.iter.next()) }\n }\n}\n\nimpl<'a, T> IntoIterator for &'a Slab<T> {\n type Item = &'a T;\n type IntoIter = SlabIter<'a, T>;\n\n fn into_iter(self) -> SlabIter<'a, T> {\n self.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Slab<T> {\n type Item = &'a mut T;\n type IntoIter = SlabMutIter<'a, T>;\n\n fn into_iter(self) -> SlabMutIter<'a, T> {\n self.iter_mut()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::Slab;\n use {Token};\n\n #[test]\n fn test_insertion() {\n let mut slab = Slab::new(1);\n let token = slab.insert(10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], 10);\n }\n\n #[test]\n fn test_repeated_insertion() {\n let mut slab = Slab::new(10);\n\n for i in (0..10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], i + 10);\n }\n\n slab.insert(20).err().expect(\"Inserted when full\");\n }\n\n #[test]\n fn test_repeated_insertion_and_removal() {\n let mut slab = Slab::new(10);\n let mut tokens = vec![];\n\n for i in (0..10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n tokens.push(token);\n assert_eq!(slab[token], i + 10);\n }\n\n for &i in tokens.iter() {\n slab.remove(i);\n }\n\n slab.insert(20).ok().expect(\"Failed to insert in newly empty slab\");\n }\n\n #[test]\n fn test_insertion_when_full() {\n let mut slab = Slab::new(1);\n slab.insert(10).ok().expect(\"Failed to insert\");\n slab.insert(10).err().expect(\"Inserted into a full slab\");\n }\n\n #[test]\n fn test_removal_is_successful() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(10).ok().expect(\"Failed to insert\");\n slab.remove(t1);\n let t2 = slab.insert(20).ok().expect(\"Failed to insert\");\n assert_eq!(slab[t2], 20);\n }\n\n #[test]\n fn test_mut_retrieval() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(\"foo\".to_string()).ok().expect(\"Failed to insert\");\n\n slab[t1].push_str(\"bar\");\n\n assert_eq!(&slab[t1][..], \"foobar\");\n }\n\n #[test]\n #[should_panic]\n fn test_reusing_slots_1() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n let t1 = slab.insert(456).unwrap();\n\n assert!(slab.count() == 2);\n assert!(slab.remaining() == 14);\n\n slab.remove(t0);\n\n assert!(slab.count() == 1, \"actual={}\", slab.count());\n assert!(slab.remaining() == 15);\n\n slab.remove(t1);\n\n assert!(slab.count() == 0);\n assert!(slab.remaining() == 16);\n\n let _ = slab[t1];\n }\n\n #[test]\n fn test_reusing_slots_2() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n\n assert!(slab[t0] == 123);\n assert!(slab.remove(t0) == Some(123));\n\n let t0 = slab.insert(456).unwrap();\n\n assert!(slab[t0] == 456);\n\n let t1 = slab.insert(789).unwrap();\n\n assert!(slab[t0] == 456);\n assert!(slab[t1] == 789);\n\n assert!(slab.remove(t0).unwrap() == 456);\n assert!(slab.remove(t1).unwrap() == 789);\n\n assert!(slab.count() == 0);\n }\n\n #[test]\n #[should_panic]\n fn test_accessing_out_of_bounds() {\n let slab = Slab::<usize>::new(16);\n slab[Token(0)];\n }\n\n #[test]\n fn test_contains() {\n let mut slab = Slab::new_starting_at(Token(5),16);\n assert!(!slab.contains(Token(0)));\n\n let tok = slab.insert(111).unwrap();\n assert!(slab.contains(tok));\n }\n\n #[test]\n fn test_iter() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in (0..4) {\n slab.insert(i).unwrap();\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![0, 1, 2, 3]);\n\n slab.remove(Token(1));\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![0, 2, 3]);\n }\n\n #[test]\n fn test_iter_mut() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in (0..4) {\n slab.insert(i).unwrap();\n }\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![1, 2, 3, 4]);\n\n slab.remove(Token(2));\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![2, 3, 5]);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Temporal quantification.\n\n#![stable(feature = \"time\", since = \"1.3.0\")]\n\nuse error::Error;\nuse fmt;\nuse ops::{Add, Sub};\nuse sys::time;\n\n#[stable(feature = \"time\", since = \"1.3.0\")]\npub use self::duration::Duration;\n\nmod duration;\n\n\/\/\/ A measurement of a monotonically increasing clock which is suitable for\n\/\/\/ measuring the amount of time that an operation takes.\n\/\/\/\n\/\/\/ Instants are guaranteed always be greater than any previously measured\n\/\/\/ instant when created, and are often useful for tasks such as measuring\n\/\/\/ benchmarks or timing how long an operation takes.\n\/\/\/\n\/\/\/ Note, however, that instants are not guaranteed to be **steady**. In other\n\/\/\/ words each tick of the underlying clock may not be the same length (e.g.\n\/\/\/ some seconds may be longer than others). An instant may jump forwards or\n\/\/\/ experience time dilation (slow down or speed up), but it will never go\n\/\/\/ backwards.\n\/\/\/\n\/\/\/ Instants are opaque types that can only be compared to one another. There is\n\/\/\/ no method to get \"the number of seconds\" from an instant but instead it only\n\/\/\/ allow learning the duration between two instants (or comparing two\n\/\/\/ instants).\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct Instant(time::Instant);\n\n\/\/\/ A measurement of the system clock appropriate for timestamps such as those\n\/\/\/ on files on the filesystem.\n\/\/\/\n\/\/\/ Distinct from the `Instant` type, this time measurement **is not\n\/\/\/ monotonic**. This means that you can save a file to the file system, then\n\/\/\/ save another file to the file system, **and the second file has a\n\/\/\/ `SystemTime` measurement earlier than the second**. In other words, an\n\/\/\/ operation that happens after another operation in real time may have an\n\/\/\/ earlier `SystemTime`!\n\/\/\/\n\/\/\/ Consequently, comparing two `SystemTime` instances to learn about the\n\/\/\/ duration between them returns a `Result` instead of an infallible `Duration`\n\/\/\/ to indicate that this sort of time drift may happen and needs to be handled.\n\/\/\/\n\/\/\/ Although a `SystemTime` cannot be directly inspected, the `UNIX_EPOCH`\n\/\/\/ constant is provided in this module as an anchor in time to learn\n\/\/\/ information about a `SystemTime`. By calculating the duration from this\n\/\/\/ fixed point in time a `SystemTime` can be converted to a human-readable time\n\/\/\/ or perhaps some other string representation.\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTime(time::SystemTime);\n\n\/\/\/ An error returned from the `duration_from_earlier` method on `SystemTime`,\n\/\/\/ used to learn about why how far in the opposite direction a timestamp lies.\n#[derive(Clone, Debug)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTimeError(Duration);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Instant {\n \/\/\/ Returns an instant corresponding to \"now\".\n pub fn now() -> Instant {\n Instant(time::Instant::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from another instant to this one.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function will panic if `earlier` is later than `self`, which should\n \/\/\/ only be possible if `earlier` was created after `self`. Because\n \/\/\/ `Instant` is monotonic, the only time that this should happen should be\n \/\/\/ a bug.\n pub fn duration_from_earlier(&self, earlier: Instant) -> Duration {\n self.0.sub_instant(&earlier.0)\n }\n\n \/\/\/ Returns the amount of time elapsed since this instant was created.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function may panic if the current time is earlier than this instant\n \/\/\/ which can happen if an `Instant` is produced synthetically.\n pub fn elapsed(&self) -> Duration {\n Instant::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for Instant {\n type Output = Instant;\n\n fn add(self, other: Duration) -> Instant {\n Instant(self.0.add_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for Instant {\n type Output = Instant;\n\n fn sub(self, other: Duration) -> Instant {\n Instant(self.0.sub_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for Instant {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTime {\n \/\/\/ Returns the system time corresponding to \"now\".\n pub fn now() -> SystemTime {\n SystemTime(time::SystemTime::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from an earlier point in time.\n \/\/\/\n \/\/\/ This function may fail because measurements taken earlier are not\n \/\/\/ guaranteed to always be before later measurements (due to anomalies such\n \/\/\/ as the system clock being adjusted either forwards or backwards).\n \/\/\/\n \/\/\/ If successful, `Ok(duration)` is returned where the duration represents\n \/\/\/ the amount of time elapsed from the specified measurement to this one.\n \/\/\/\n \/\/\/ Returns an `Err` if `earlier` is later than `self`, and the error\n \/\/\/ contains how far from `self` the time is.\n pub fn duration_from_earlier(&self, earlier: SystemTime)\n -> Result<Duration, SystemTimeError> {\n self.0.sub_time(&earlier.0).map_err(SystemTimeError)\n }\n\n \/\/\/ Returns the amount of time elapsed since this system time was created.\n \/\/\/\n \/\/\/ This function may fail as the underlying system clock is susceptible to\n \/\/\/ drift and updates (e.g. the system clock could go backwards), so this\n \/\/\/ function may not always succeed. If successful, `Ok(duration)` is\n \/\/\/ returned where the duration represents the amount of time elapsed from\n \/\/\/ this time measurement to the current time.\n \/\/\/\n \/\/\/ Returns an `Err` if `self` is later than the current system time, and\n \/\/\/ the error contains how far from the current system time `self` is.\n pub fn elapsed(&self) -> Result<Duration, SystemTimeError> {\n SystemTime::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn add(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.add_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn sub(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.sub_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for SystemTime {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n\/\/\/ An anchor in time which can be used to create new `SystemTime` instances or\n\/\/\/ learn about where in time a `SystemTime` lies.\n\/\/\/\n\/\/\/ This constant is defined to be \"1970-01-01 00:00:00 UTC\" on all systems with\n\/\/\/ respect to the system clock. Using `duration_from_earlier` on an existing\n\/\/\/ `SystemTime` instance can tell how far away from this point in time a\n\/\/\/ measurement lies, and using `UNIX_EPOCH + duration` can be used to create a\n\/\/\/ `SystemTime` instance to represent another fixed point in time.\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub const UNIX_EPOCH: SystemTime = SystemTime(time::UNIX_EPOCH);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTimeError {\n \/\/\/ Returns the positive duration which represents how far forward the\n \/\/\/ second system time was from the first.\n \/\/\/\n \/\/\/ A `SystemTimeError` is returned from the `duration_from_earlier`\n \/\/\/ operation whenever the second duration, `earlier`, actually represents a\n \/\/\/ point later in time than the `self` of the method call. This function\n \/\/\/ will extract and return the amount of time later `earlier` actually is.\n pub fn duration(&self) -> Duration {\n self.0\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Error for SystemTimeError {\n fn description(&self) -> &str { \"other time was not earlier than self\" }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Display for SystemTimeError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"second time provided was later than self\")\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Instant, SystemTime, Duration, UNIX_EPOCH};\n\n macro_rules! assert_almost_eq {\n ($a:expr, $b:expr) => ({\n let (a, b) = ($a, $b);\n if a != b {\n let (a, b) = if a > b {(a, b)} else {(b, a)};\n assert!(a - Duration::new(0, 1) <= b);\n }\n })\n }\n\n #[test]\n fn instant_monotonic() {\n let a = Instant::now();\n let b = Instant::now();\n assert!(b >= a);\n }\n\n #[test]\n fn instant_elapsed() {\n let a = Instant::now();\n a.elapsed();\n }\n\n #[test]\n fn instant_math() {\n let a = Instant::now();\n let b = Instant::now();\n let dur = b.duration_from_earlier(a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a - second + second, a);\n }\n\n #[test]\n #[should_panic]\n fn instant_duration_panic() {\n let a = Instant::now();\n (a - Duration::new(1, 0)).duration_from_earlier(a);\n }\n\n #[test]\n fn system_time_math() {\n let a = SystemTime::now();\n let b = SystemTime::now();\n match b.duration_from_earlier(a) {\n Ok(dur) if dur == Duration::new(0, 0) => {\n assert_almost_eq!(a, b);\n }\n Ok(dur) => {\n assert!(b > a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n }\n Err(dur) => {\n let dur = dur.duration();\n assert!(a > b);\n assert_almost_eq!(b + dur, a);\n assert_almost_eq!(b - dur, a);\n }\n }\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a.duration_from_earlier(a - second).unwrap(), second);\n assert_almost_eq!(a.duration_from_earlier(a + second).unwrap_err()\n .duration(), second);\n\n assert_almost_eq!(a - second + second, a);\n\n let eighty_years = second * 60 * 60 * 24 * 365 * 80;\n assert_almost_eq!(a - eighty_years + eighty_years, a);\n assert_almost_eq!(a - (eighty_years * 10) + (eighty_years * 10), a);\n }\n\n #[test]\n fn system_time_elapsed() {\n let a = SystemTime::now();\n drop(a.elapsed());\n }\n\n #[test]\n fn since_epoch() {\n let ts = SystemTime::now();\n let a = ts.duration_from_earlier(UNIX_EPOCH).unwrap();\n let b = ts.duration_from_earlier(UNIX_EPOCH - Duration::new(1, 0)).unwrap();\n assert!(b > a);\n assert_eq!(b - a, Duration::new(1, 0));\n\n \/\/ let's assume that we're all running computers later than 2000\n let thirty_years = Duration::new(1, 0) * 60 * 60 * 24 * 365 * 30;\n assert!(a > thirty_years);\n\n \/\/ let's assume that we're all running computers earlier than 2090.\n \/\/ Should give us ~70 years to fix this!\n let hundred_twenty_years = thirty_years * 4;\n assert!(a < hundred_twenty_years);\n }\n}\n<commit_msg>Auto merge of #30061 - tshepang:doc-time, r=brson<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Temporal quantification.\n\n#![stable(feature = \"time\", since = \"1.3.0\")]\n\nuse error::Error;\nuse fmt;\nuse ops::{Add, Sub};\nuse sys::time;\n\n#[stable(feature = \"time\", since = \"1.3.0\")]\npub use self::duration::Duration;\n\nmod duration;\n\n\/\/\/ A measurement of a monotonically increasing clock.\n\/\/\/\n\/\/\/ Instants are always guaranteed to be greater than any previously measured\n\/\/\/ instant when created, and are often useful for tasks such as measuring\n\/\/\/ benchmarks or timing how long an operation takes.\n\/\/\/\n\/\/\/ Note, however, that instants are not guaranteed to be **steady**. In other\n\/\/\/ words, each tick of the underlying clock may not be the same length (e.g.\n\/\/\/ some seconds may be longer than others). An instant may jump forwards or\n\/\/\/ experience time dilation (slow down or speed up), but it will never go\n\/\/\/ backwards.\n\/\/\/\n\/\/\/ Instants are opaque types that can only be compared to one another. There is\n\/\/\/ no method to get \"the number of seconds\" from an instant. Instead, it only\n\/\/\/ allows measuring the duration between two instants (or comparing two\n\/\/\/ instants).\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct Instant(time::Instant);\n\n\/\/\/ A measurement of the system clock appropriate for timestamps such as those\n\/\/\/ on files on the filesystem.\n\/\/\/\n\/\/\/ Distinct from the `Instant` type, this time measurement **is not\n\/\/\/ monotonic**. This means that you can save a file to the file system, then\n\/\/\/ save another file to the file system, **and the second file has a\n\/\/\/ `SystemTime` measurement earlier than the second**. In other words, an\n\/\/\/ operation that happens after another operation in real time may have an\n\/\/\/ earlier `SystemTime`!\n\/\/\/\n\/\/\/ Consequently, comparing two `SystemTime` instances to learn about the\n\/\/\/ duration between them returns a `Result` instead of an infallible `Duration`\n\/\/\/ to indicate that this sort of time drift may happen and needs to be handled.\n\/\/\/\n\/\/\/ Although a `SystemTime` cannot be directly inspected, the `UNIX_EPOCH`\n\/\/\/ constant is provided in this module as an anchor in time to learn\n\/\/\/ information about a `SystemTime`. By calculating the duration from this\n\/\/\/ fixed point in time, a `SystemTime` can be converted to a human-readable time,\n\/\/\/ or perhaps some other string representation.\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTime(time::SystemTime);\n\n\/\/\/ An error returned from the `duration_from_earlier` method on `SystemTime`,\n\/\/\/ used to learn about why how far in the opposite direction a timestamp lies.\n#[derive(Clone, Debug)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTimeError(Duration);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Instant {\n \/\/\/ Returns an instant corresponding to \"now\".\n pub fn now() -> Instant {\n Instant(time::Instant::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from another instant to this one.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function will panic if `earlier` is later than `self`, which should\n \/\/\/ only be possible if `earlier` was created after `self`. Because\n \/\/\/ `Instant` is monotonic, the only time that this should happen should be\n \/\/\/ a bug.\n pub fn duration_from_earlier(&self, earlier: Instant) -> Duration {\n self.0.sub_instant(&earlier.0)\n }\n\n \/\/\/ Returns the amount of time elapsed since this instant was created.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function may panic if the current time is earlier than this\n \/\/\/ instant, which is something that can happen if an `Instant` is\n \/\/\/ produced synthetically.\n pub fn elapsed(&self) -> Duration {\n Instant::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for Instant {\n type Output = Instant;\n\n fn add(self, other: Duration) -> Instant {\n Instant(self.0.add_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for Instant {\n type Output = Instant;\n\n fn sub(self, other: Duration) -> Instant {\n Instant(self.0.sub_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for Instant {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTime {\n \/\/\/ Returns the system time corresponding to \"now\".\n pub fn now() -> SystemTime {\n SystemTime(time::SystemTime::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from an earlier point in time.\n \/\/\/\n \/\/\/ This function may fail because measurements taken earlier are not\n \/\/\/ guaranteed to always be before later measurements (due to anomalies such\n \/\/\/ as the system clock being adjusted either forwards or backwards).\n \/\/\/\n \/\/\/ If successful, `Ok(Duration)` is returned where the duration represents\n \/\/\/ the amount of time elapsed from the specified measurement to this one.\n \/\/\/\n \/\/\/ Returns an `Err` if `earlier` is later than `self`, and the error\n \/\/\/ contains how far from `self` the time is.\n pub fn duration_from_earlier(&self, earlier: SystemTime)\n -> Result<Duration, SystemTimeError> {\n self.0.sub_time(&earlier.0).map_err(SystemTimeError)\n }\n\n \/\/\/ Returns the amount of time elapsed since this system time was created.\n \/\/\/\n \/\/\/ This function may fail as the underlying system clock is susceptible to\n \/\/\/ drift and updates (e.g. the system clock could go backwards), so this\n \/\/\/ function may not always succeed. If successful, `Ok(duration)` is\n \/\/\/ returned where the duration represents the amount of time elapsed from\n \/\/\/ this time measurement to the current time.\n \/\/\/\n \/\/\/ Returns an `Err` if `self` is later than the current system time, and\n \/\/\/ the error contains how far from the current system time `self` is.\n pub fn elapsed(&self) -> Result<Duration, SystemTimeError> {\n SystemTime::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn add(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.add_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn sub(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.sub_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for SystemTime {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n\/\/\/ An anchor in time which can be used to create new `SystemTime` instances or\n\/\/\/ learn about where in time a `SystemTime` lies.\n\/\/\/\n\/\/\/ This constant is defined to be \"1970-01-01 00:00:00 UTC\" on all systems with\n\/\/\/ respect to the system clock. Using `duration_from_earlier` on an existing\n\/\/\/ `SystemTime` instance can tell how far away from this point in time a\n\/\/\/ measurement lies, and using `UNIX_EPOCH + duration` can be used to create a\n\/\/\/ `SystemTime` instance to represent another fixed point in time.\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub const UNIX_EPOCH: SystemTime = SystemTime(time::UNIX_EPOCH);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTimeError {\n \/\/\/ Returns the positive duration which represents how far forward the\n \/\/\/ second system time was from the first.\n \/\/\/\n \/\/\/ A `SystemTimeError` is returned from the `duration_from_earlier`\n \/\/\/ operation whenever the second system time represents a point later\n \/\/\/ in time than the `self` of the method call.\n pub fn duration(&self) -> Duration {\n self.0\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Error for SystemTimeError {\n fn description(&self) -> &str { \"other time was not earlier than self\" }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Display for SystemTimeError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"second time provided was later than self\")\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Instant, SystemTime, Duration, UNIX_EPOCH};\n\n macro_rules! assert_almost_eq {\n ($a:expr, $b:expr) => ({\n let (a, b) = ($a, $b);\n if a != b {\n let (a, b) = if a > b {(a, b)} else {(b, a)};\n assert!(a - Duration::new(0, 1) <= b);\n }\n })\n }\n\n #[test]\n fn instant_monotonic() {\n let a = Instant::now();\n let b = Instant::now();\n assert!(b >= a);\n }\n\n #[test]\n fn instant_elapsed() {\n let a = Instant::now();\n a.elapsed();\n }\n\n #[test]\n fn instant_math() {\n let a = Instant::now();\n let b = Instant::now();\n let dur = b.duration_from_earlier(a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a - second + second, a);\n }\n\n #[test]\n #[should_panic]\n fn instant_duration_panic() {\n let a = Instant::now();\n (a - Duration::new(1, 0)).duration_from_earlier(a);\n }\n\n #[test]\n fn system_time_math() {\n let a = SystemTime::now();\n let b = SystemTime::now();\n match b.duration_from_earlier(a) {\n Ok(dur) if dur == Duration::new(0, 0) => {\n assert_almost_eq!(a, b);\n }\n Ok(dur) => {\n assert!(b > a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n }\n Err(dur) => {\n let dur = dur.duration();\n assert!(a > b);\n assert_almost_eq!(b + dur, a);\n assert_almost_eq!(b - dur, a);\n }\n }\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a.duration_from_earlier(a - second).unwrap(), second);\n assert_almost_eq!(a.duration_from_earlier(a + second).unwrap_err()\n .duration(), second);\n\n assert_almost_eq!(a - second + second, a);\n\n let eighty_years = second * 60 * 60 * 24 * 365 * 80;\n assert_almost_eq!(a - eighty_years + eighty_years, a);\n assert_almost_eq!(a - (eighty_years * 10) + (eighty_years * 10), a);\n }\n\n #[test]\n fn system_time_elapsed() {\n let a = SystemTime::now();\n drop(a.elapsed());\n }\n\n #[test]\n fn since_epoch() {\n let ts = SystemTime::now();\n let a = ts.duration_from_earlier(UNIX_EPOCH).unwrap();\n let b = ts.duration_from_earlier(UNIX_EPOCH - Duration::new(1, 0)).unwrap();\n assert!(b > a);\n assert_eq!(b - a, Duration::new(1, 0));\n\n \/\/ let's assume that we're all running computers later than 2000\n let thirty_years = Duration::new(1, 0) * 60 * 60 * 24 * 365 * 30;\n assert!(a > thirty_years);\n\n \/\/ let's assume that we're all running computers earlier than 2090.\n \/\/ Should give us ~70 years to fix this!\n let hundred_twenty_years = thirty_years * 4;\n assert!(a < hundred_twenty_years);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create c_str.rs<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! C-string manipulation and management\n\/\/!\n\/\/! This modules provides the basic methods for creating and manipulating\n\/\/! null-terminated strings for use with FFI calls (back to C). Most C APIs require\n\/\/! that the string being passed to them is null-terminated, and by default rust's\n\/\/! string types are *not* null terminated.\n\/\/!\n\/\/! The other problem with translating Rust strings to C strings is that Rust\n\/\/! strings can validly contain a null-byte in the middle of the string (0 is a\n\/\/! valid Unicode codepoint). This means that not all Rust strings can actually be\n\/\/! translated to C strings.\n\/\/!\n\/\/! # Creation of a C string\n\/\/!\n\/\/! A C string is managed through the `CString` type defined in this module. It\n\/\/! \"owns\" the internal buffer of characters and will automatically deallocate the\n\/\/! buffer when the string is dropped. The `ToCStr` trait is implemented for `&str`\n\/\/! and `&[u8]`, but the conversions can fail due to some of the limitations\n\/\/! explained above.\n\/\/!\n\/\/! This also means that currently whenever a C string is created, an allocation\n\/\/! must be performed to place the data elsewhere (the lifetime of the C string is\n\/\/! not tied to the lifetime of the original string\/data buffer). If C strings are\n\/\/! heavily used in applications, then caching may be advisable to prevent\n\/\/! unnecessary amounts of allocations.\n\/\/!\n\/\/! Be carefull to remember that the memory is managed by C allocator API and not\n\/\/! by Rust allocator API.\n\/\/! That means that the CString pointers should be freed with C allocator API\n\/\/! if you intend to do that on your own, as the behaviour if you free them with\n\/\/! Rust's allocator API is not well defined\n\/\/!\n\/\/! An example of creating and using a C string would be:\n\/\/!\n\/\/! ```rust\n\/\/! extern crate libc;\n\/\/!\n\/\/! use std::c_str::ToCStr;\n\/\/!\n\/\/! extern {\n\/\/! fn puts(s: *const libc::c_char);\n\/\/! }\n\/\/!\n\/\/! fn main() {\n\/\/! let my_string = \"Hello, world!\";\n\/\/!\n\/\/! \/\/ Allocate the C string with an explicit local that owns the string. The\n\/\/! \/\/ `c_buffer` pointer will be deallocated when `my_c_string` goes out of scope.\n\/\/! let my_c_string = my_string.to_c_str();\n\/\/! unsafe {\n\/\/! puts(my_c_string.as_ptr());\n\/\/! }\n\/\/!\n\/\/! \/\/ Don't save\/return the pointer to the C string, the `c_buffer` will be\n\/\/! \/\/ deallocated when this block returns!\n\/\/! my_string.with_c_str(|c_buffer| {\n\/\/! unsafe { puts(c_buffer); }\n\/\/! });\n\/\/! }\n\/\/! ```\n\nuse core::prelude::*;\nuse libc;\n\nuse cmp::Ordering;\nuse fmt;\nuse hash;\nuse mem;\nuse ptr;\nuse slice::{self, IntSliceExt};\nuse str;\nuse string::String;\nuse core::kinds::marker;\n\n\/\/\/ The representation of a C String.\n\/\/\/\n\/\/\/ This structure wraps a `*libc::c_char`, and will automatically free the\n\/\/\/ memory it is pointing to when it goes out of scope.\n#[allow(missing_copy_implementations)]\npub struct CString {\n buf: *const libc::c_char,\n owns_buffer_: bool,\n}\n\nunsafe impl Send for CString { }\nunsafe impl Sync for CString { }\n\nimpl Clone for CString {\n \/\/\/ Clone this CString into a new, uniquely owned CString. For safety\n \/\/\/ reasons, this is always a deep clone with the memory allocated\n \/\/\/ with C's allocator API, rather than the usual shallow clone.\n fn clone(&self) -> CString {\n let len = self.len() + 1;\n let buf = unsafe { libc::malloc(len as libc::size_t) } as *mut libc::c_char;\n if buf.is_null() { ::alloc::oom() }\n unsafe { ptr::copy_nonoverlapping_memory(buf, self.buf, len); }\n CString { buf: buf as *const libc::c_char, owns_buffer_: true }\n }\n}\n\nimpl PartialEq for CString {\n fn eq(&self, other: &CString) -> bool {\n \/\/ Check if the two strings share the same buffer\n if self.buf as uint == other.buf as uint {\n true\n } else {\n unsafe {\n libc::strcmp(self.buf, other.buf) == 0\n }\n }\n }\n}\n\nimpl PartialOrd for CString {\n #[inline]\n fn partial_cmp(&self, other: &CString) -> Option<Ordering> {\n self.as_bytes().partial_cmp(other.as_bytes())\n }\n}\n\nimpl Eq for CString {}\n\nimpl<S: hash::Writer> hash::Hash<S> for CString {\n #[inline]\n fn hash(&self, state: &mut S) {\n self.as_bytes().hash(state)\n }\n}\n\nimpl CString {\n \/\/\/ Create a C String from a pointer, with memory managed by C's allocator\n \/\/\/ API, so avoid calling it with a pointer to memory managed by Rust's\n \/\/\/ allocator API, as the behaviour would not be well defined.\n \/\/\/\n \/\/\/# Panics\n \/\/\/\n \/\/\/ Panics if `buf` is null\n pub unsafe fn new(buf: *const libc::c_char, owns_buffer: bool) -> CString {\n assert!(!buf.is_null());\n CString { buf: buf, owns_buffer_: owns_buffer }\n }\n\n \/\/\/ Return a pointer to the NUL-terminated string data.\n \/\/\/\n \/\/\/ `.as_ptr` returns an internal pointer into the `CString`, and\n \/\/\/ may be invalidated when the `CString` falls out of scope (the\n \/\/\/ destructor will run, freeing the allocation if there is\n \/\/\/ one).\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use std::c_str::ToCStr;\n \/\/\/\n \/\/\/ let foo = \"some string\";\n \/\/\/\n \/\/\/ \/\/ right\n \/\/\/ let x = foo.to_c_str();\n \/\/\/ let p = x.as_ptr();\n \/\/\/\n \/\/\/ \/\/ wrong (the CString will be freed, invalidating `p`)\n \/\/\/ let p = foo.to_c_str().as_ptr();\n \/\/\/ ```\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ extern crate libc;\n \/\/\/\n \/\/\/ use std::c_str::ToCStr;\n \/\/\/\n \/\/\/ fn main() {\n \/\/\/ let c_str = \"foo bar\".to_c_str();\n \/\/\/ unsafe {\n \/\/\/ libc::puts(c_str.as_ptr());\n \/\/\/ }\n \/\/\/ }\n \/\/\/ ```\n pub fn as_ptr(&self) -> *const libc::c_char {\n self.buf\n }\n\n \/\/\/ Return a mutable pointer to the NUL-terminated string data.\n \/\/\/\n \/\/\/ `.as_mut_ptr` returns an internal pointer into the `CString`, and\n \/\/\/ may be invalidated when the `CString` falls out of scope (the\n \/\/\/ destructor will run, freeing the allocation if there is\n \/\/\/ one).\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use std::c_str::ToCStr;\n \/\/\/\n \/\/\/ let foo = \"some string\";\n \/\/\/\n \/\/\/ \/\/ right\n \/\/\/ let mut x = foo.to_c_str();\n \/\/\/ let p = x.as_mut_ptr();\n \/\/\/\n \/\/\/ \/\/ wrong (the CString will be freed, invalidating `p`)\n \/\/\/ let p = foo.to_c_str().as_mut_ptr();\n \/\/\/ ```\n pub fn as_mut_ptr(&mut self) -> *mut libc::c_char {\n self.buf as *mut _\n }\n\n \/\/\/ Returns whether or not the `CString` owns the buffer.\n pub fn owns_buffer(&self) -> bool {\n self.owns_buffer_\n }\n\n \/\/\/ Converts the CString into a `&[u8]` without copying.\n \/\/\/ Includes the terminating NUL byte.\n #[inline]\n pub fn as_bytes<'a>(&'a self) -> &'a [u8] {\n unsafe {\n slice::from_raw_buf(&self.buf, self.len() + 1).as_unsigned()\n }\n }\n\n \/\/\/ Converts the CString into a `&[u8]` without copying.\n \/\/\/ Does not include the terminating NUL byte.\n #[inline]\n pub fn as_bytes_no_nul<'a>(&'a self) -> &'a [u8] {\n unsafe {\n slice::from_raw_buf(&self.buf, self.len()).as_unsigned()\n }\n }\n\n \/\/\/ Converts the CString into a `&str` without copying.\n \/\/\/ Returns None if the CString is not UTF-8.\n #[inline]\n pub fn as_str<'a>(&'a self) -> Option<&'a str> {\n let buf = self.as_bytes_no_nul();\n str::from_utf8(buf).ok()\n }\n\n \/\/\/ Return a CString iterator.\n pub fn iter<'a>(&'a self) -> CChars<'a> {\n CChars {\n ptr: self.buf,\n marker: marker::ContravariantLifetime,\n }\n }\n\n \/\/\/ Unwraps the wrapped `*libc::c_char` from the `CString` wrapper.\n \/\/\/\n \/\/\/ Any ownership of the buffer by the `CString` wrapper is\n \/\/\/ forgotten, meaning that the backing allocation of this\n \/\/\/ `CString` is not automatically freed if it owns the\n \/\/\/ allocation. In this case, a user of `.unwrap()` should ensure\n \/\/\/ the allocation is freed, to avoid leaking memory. You should\n \/\/\/ use libc's memory allocator in this case.\n \/\/\/\n \/\/\/ Prefer `.as_ptr()` when just retrieving a pointer to the\n \/\/\/ string data, as that does not relinquish ownership.\n pub unsafe fn into_inner(mut self) -> *const libc::c_char {\n self.owns_buffer_ = false;\n self.buf\n }\n\n \/\/\/ Return the number of bytes in the CString (not including the NUL\n \/\/\/ terminator).\n #[inline]\n pub fn len(&self) -> uint {\n unsafe { libc::strlen(self.buf) as uint }\n }\n\n \/\/\/ Returns if there are no bytes in this string\n #[inline]\n pub fn is_empty(&self) -> bool { self.len() == 0 }\n}\n\nimpl Drop for CString {\n fn drop(&mut self) {\n if self.owns_buffer_ {\n unsafe {\n libc::free(self.buf as *mut libc::c_void)\n }\n }\n }\n}\n\nimpl fmt::Show for CString {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n String::from_utf8_lossy(self.as_bytes_no_nul()).fmt(f)\n }\n}\n\n\/\/\/ A generic trait for converting a value to a CString.\npub trait ToCStr for Sized? {\n \/\/\/ Copy the receiver into a CString.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics the task if the receiver has an interior null.\n fn to_c_str(&self) -> CString;\n\n \/\/\/ Unsafe variant of `to_c_str()` that doesn't check for nulls.\n unsafe fn to_c_str_unchecked(&self) -> CString;\n\n \/\/\/ Work with a temporary CString constructed from the receiver.\n \/\/\/ The provided `*libc::c_char` will be freed immediately upon return.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ extern crate libc;\n \/\/\/\n \/\/\/ use std::c_str::ToCStr;\n \/\/\/\n \/\/\/ fn main() {\n \/\/\/ let s = \"PATH\".with_c_str(|path| unsafe {\n \/\/\/ libc::getenv(path)\n \/\/\/ });\n \/\/\/ }\n \/\/\/ ```\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Panics the task if the receiver has an interior null.\n #[inline]\n fn with_c_str<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n let c_str = self.to_c_str();\n f(c_str.as_ptr())\n }\n\n \/\/\/ Unsafe variant of `with_c_str()` that doesn't check for nulls.\n #[inline]\n unsafe fn with_c_str_unchecked<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n let c_str = self.to_c_str_unchecked();\n f(c_str.as_ptr())\n }\n}\n\nimpl ToCStr for str {\n #[inline]\n fn to_c_str(&self) -> CString {\n self.as_bytes().to_c_str()\n }\n\n #[inline]\n unsafe fn to_c_str_unchecked(&self) -> CString {\n self.as_bytes().to_c_str_unchecked()\n }\n\n #[inline]\n fn with_c_str<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n self.as_bytes().with_c_str(f)\n }\n\n #[inline]\n unsafe fn with_c_str_unchecked<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n self.as_bytes().with_c_str_unchecked(f)\n }\n}\n\nimpl ToCStr for String {\n #[inline]\n fn to_c_str(&self) -> CString {\n self.as_bytes().to_c_str()\n }\n\n #[inline]\n unsafe fn to_c_str_unchecked(&self) -> CString {\n self.as_bytes().to_c_str_unchecked()\n }\n\n #[inline]\n fn with_c_str<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n self.as_bytes().with_c_str(f)\n }\n\n #[inline]\n unsafe fn with_c_str_unchecked<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n self.as_bytes().with_c_str_unchecked(f)\n }\n}\n\n\/\/ The length of the stack allocated buffer for `vec.with_c_str()`\nconst BUF_LEN: uint = 128;\n\nimpl ToCStr for [u8] {\n fn to_c_str(&self) -> CString {\n let mut cs = unsafe { self.to_c_str_unchecked() };\n check_for_null(self, cs.as_mut_ptr());\n cs\n }\n\n unsafe fn to_c_str_unchecked(&self) -> CString {\n let self_len = self.len();\n let buf = libc::malloc(self_len as libc::size_t + 1) as *mut u8;\n if buf.is_null() { ::alloc::oom() }\n\n ptr::copy_memory(buf, self.as_ptr(), self_len);\n *buf.offset(self_len as int) = 0;\n\n CString::new(buf as *const libc::c_char, true)\n }\n\n fn with_c_str<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n unsafe { with_c_str(self, true, f) }\n }\n\n unsafe fn with_c_str_unchecked<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n with_c_str(self, false, f)\n }\n}\n\nimpl<'a, Sized? T: ToCStr> ToCStr for &'a T {\n #[inline]\n fn to_c_str(&self) -> CString {\n (**self).to_c_str()\n }\n\n #[inline]\n unsafe fn to_c_str_unchecked(&self) -> CString {\n (**self).to_c_str_unchecked()\n }\n\n #[inline]\n fn with_c_str<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n (**self).with_c_str(f)\n }\n\n #[inline]\n unsafe fn with_c_str_unchecked<T, F>(&self, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n {\n (**self).with_c_str_unchecked(f)\n }\n}\n\n\/\/ Unsafe function that handles possibly copying the &[u8] into a stack array.\nunsafe fn with_c_str<T, F>(v: &[u8], checked: bool, f: F) -> T where\n F: FnOnce(*const libc::c_char) -> T,\n{\n let c_str = if v.len() < BUF_LEN {\n let mut buf: [u8; BUF_LEN] = mem::uninitialized();\n slice::bytes::copy_memory(&mut buf, v);\n buf[v.len()] = 0;\n\n let buf = buf.as_mut_ptr();\n if checked {\n check_for_null(v, buf as *mut libc::c_char);\n }\n\n return f(buf as *const libc::c_char)\n } else if checked {\n v.to_c_str()\n } else {\n v.to_c_str_unchecked()\n };\n\n f(c_str.as_ptr())\n}\n\n#[inline]\nfn check_for_null(v: &[u8], buf: *mut libc::c_char) {\n for i in range(0, v.len()) {\n unsafe {\n let p = buf.offset(i as int);\n assert!(*p != 0);\n }\n }\n}\n\n\/\/\/ External iterator for a CString's bytes.\n\/\/\/\n\/\/\/ Use with the `std::iter` module.\n#[allow(raw_pointer_deriving)]\n#[derive(Clone)]\npub struct CChars<'a> {\n ptr: *const libc::c_char,\n marker: marker::ContravariantLifetime<'a>,\n}\n\nimpl<'a> Iterator for CChars<'a> {\n type Item = libc::c_char;\n\n fn next(&mut self) -> Option<libc::c_char> {\n let ch = unsafe { *self.ptr };\n if ch == 0 {\n None\n } else {\n self.ptr = unsafe { self.ptr.offset(1) };\n Some(ch)\n }\n }\n}\n\n\/\/\/ Parses a C \"multistring\", eg windows env values or\n\/\/\/ the req->ptr result in a uv_fs_readdir() call.\n\/\/\/\n\/\/\/ Optionally, a `count` can be passed in, limiting the\n\/\/\/ parsing to only being done `count`-times.\n\/\/\/\n\/\/\/ The specified closure is invoked with each string that\n\/\/\/ is found, and the number of strings found is returned.\npub unsafe fn from_c_multistring<F>(buf: *const libc::c_char,\n count: Option<uint>,\n mut f: F)\n -> uint where\n F: FnMut(&CString),\n{\n\n let mut curr_ptr: uint = buf as uint;\n let mut ctr = 0;\n let (limited_count, limit) = match count {\n Some(limit) => (true, limit),\n None => (false, 0)\n };\n while ((limited_count && ctr < limit) || !limited_count)\n && *(curr_ptr as *const libc::c_char) != 0 as libc::c_char {\n let cstr = CString::new(curr_ptr as *const libc::c_char, false);\n f(&cstr);\n curr_ptr += cstr.len() + 1;\n ctr += 1;\n }\n return ctr;\n}\n\n#[cfg(test)]\nmod tests {\n use prelude::v1::*;\n use super::*;\n use ptr;\n use thread::Thread;\n use libc;\n\n #[test]\n fn test_str_multistring_parsing() {\n unsafe {\n let input = b\"zero\\0one\\0\\0\";\n let ptr = input.as_ptr();\n let expected = [\"zero\", \"one\"];\n let mut it = expected.iter();\n let result = from_c_multistring(ptr as *const libc::c_char, None, |c| {\n let cbytes = c.as_bytes_no_nul();\n assert_eq!(cbytes, it.next().unwrap().as_bytes());\n });\n assert_eq!(result, 2);\n assert!(it.next().is_none());\n }\n }\n\n #[test]\n fn test_str_to_c_str() {\n let c_str = \"\".to_c_str();\n unsafe {\n assert_eq!(*c_str.as_ptr().offset(0), 0);\n }\n\n let c_str = \"hello\".to_c_str();\n let buf = c_str.as_ptr();\n unsafe {\n assert_eq!(*buf.offset(0), 'h' as libc::c_char);\n assert_eq!(*buf.offset(1), 'e' as libc::c_char);\n assert_eq!(*buf.offset(2), 'l' as libc::c_char);\n assert_eq!(*buf.offset(3), 'l' as libc::c_char);\n assert_eq!(*buf.offset(4), 'o' as libc::c_char);\n assert_eq!(*buf.offset(5), 0);\n }\n }\n\n #[test]\n fn test_vec_to_c_str() {\n let b: &[u8] = &[];\n let c_str = b.to_c_str();\n unsafe {\n assert_eq!(*c_str.as_ptr().offset(0), 0);\n }\n\n let c_str = b\"hello\".to_c_str();\n let buf = c_str.as_ptr();\n unsafe {\n assert_eq!(*buf.offset(0), 'h' as libc::c_char);\n assert_eq!(*buf.offset(1), 'e' as libc::c_char);\n assert_eq!(*buf.offset(2), 'l' as libc::c_char);\n assert_eq!(*buf.offset(3), 'l' as libc::c_char);\n assert_eq!(*buf.offset(4), 'o' as libc::c_char);\n assert_eq!(*buf.offset(5), 0);\n }\n\n let c_str = b\"foo\\xFF\".to_c_str();\n let buf = c_str.as_ptr();\n unsafe {\n assert_eq!(*buf.offset(0), 'f' as libc::c_char);\n assert_eq!(*buf.offset(1), 'o' as libc::c_char);\n assert_eq!(*buf.offset(2), 'o' as libc::c_char);\n assert_eq!(*buf.offset(3), 0xffu8 as libc::c_char);\n assert_eq!(*buf.offset(4), 0);\n }\n }\n\n #[test]\n fn test_unwrap() {\n let c_str = \"hello\".to_c_str();\n unsafe { libc::free(c_str.into_inner() as *mut libc::c_void) }\n }\n\n #[test]\n fn test_as_ptr() {\n let c_str = \"hello\".to_c_str();\n let len = unsafe { libc::strlen(c_str.as_ptr()) };\n assert_eq!(len, 5);\n }\n\n #[test]\n fn test_iterator() {\n let c_str = \"\".to_c_str();\n let mut iter = c_str.iter();\n assert_eq!(iter.next(), None);\n\n let c_str = \"hello\".to_c_str();\n let mut iter = c_str.iter();\n assert_eq!(iter.next(), Some('h' as libc::c_char));\n assert_eq!(iter.next(), Some('e' as libc::c_char));\n assert_eq!(iter.next(), Some('l' as libc::c_char));\n assert_eq!(iter.next(), Some('l' as libc::c_char));\n assert_eq!(iter.next(), Some('o' as libc::c_char));\n assert_eq!(iter.next(), None);\n }\n\n #[test]\n fn test_to_c_str_fail() {\n assert!(Thread::spawn(move|| { \"he\\x00llo\".to_c_str() }).join().is_err());\n }\n\n #[test]\n fn test_to_c_str_unchecked() {\n unsafe {\n let c_string = \"he\\x00llo\".to_c_str_unchecked();\n let buf = c_string.as_ptr();\n assert_eq!(*buf.offset(0), 'h' as libc::c_char);\n assert_eq!(*buf.offset(1), 'e' as libc::c_char);\n assert_eq!(*buf.offset(2), 0);\n assert_eq!(*buf.offset(3), 'l' as libc::c_char);\n assert_eq!(*buf.offset(4), 'l' as libc::c_char);\n assert_eq!(*buf.offset(5), 'o' as libc::c_char);\n assert_eq!(*buf.offset(6), 0);\n }\n }\n\n #[test]\n fn test_as_bytes() {\n let c_str = \"hello\".to_c_str();\n assert_eq!(c_str.as_bytes(), b\"hello\\0\");\n let c_str = \"\".to_c_str();\n assert_eq!(c_str.as_bytes(), b\"\\0\");\n let c_str = b\"foo\\xFF\".to_c_str();\n assert_eq!(c_str.as_bytes(), b\"foo\\xFF\\0\");\n }\n\n #[test]\n fn test_as_bytes_no_nul() {\n let c_str = \"hello\".to_c_str();\n assert_eq!(c_str.as_bytes_no_nul(), b\"hello\");\n let c_str = \"\".to_c_str();\n let exp: &[u8] = &[];\n assert_eq!(c_str.as_bytes_no_nul(), exp);\n let c_str = b\"foo\\xFF\".to_c_str();\n assert_eq!(c_str.as_bytes_no_nul(), b\"foo\\xFF\");\n }\n\n #[test]\n fn test_as_str() {\n let c_str = \"hello\".to_c_str();\n assert_eq!(c_str.as_str(), Some(\"hello\"));\n let c_str = \"\".to_c_str();\n assert_eq!(c_str.as_str(), Some(\"\"));\n let c_str = b\"foo\\xFF\".to_c_str();\n assert_eq!(c_str.as_str(), None);\n }\n\n #[test]\n #[should_fail]\n fn test_new_fail() {\n let _c_str = unsafe { CString::new(ptr::null(), false) };\n }\n\n #[test]\n fn test_clone() {\n let a = \"hello\".to_c_str();\n let b = a.clone();\n assert!(a == b);\n }\n\n #[test]\n fn test_clone_noleak() {\n fn foo<F>(f: F) where F: FnOnce(&CString) {\n let s = \"test\".to_string();\n let c = s.to_c_str();\n \/\/ give the closure a non-owned CString\n let mut c_ = unsafe { CString::new(c.as_ptr(), false) };\n f(&c_);\n \/\/ muck with the buffer for later printing\n unsafe { *c_.as_mut_ptr() = 'X' as libc::c_char }\n }\n\n let mut c_: Option<CString> = None;\n foo(|c| {\n c_ = Some(c.clone());\n c.clone();\n \/\/ force a copy, reading the memory\n c.as_bytes().to_vec();\n });\n let c_ = c_.unwrap();\n \/\/ force a copy, reading the memory\n c_.as_bytes().to_vec();\n }\n}\n\n#[cfg(test)]\nmod bench {\n extern crate test;\n\n use prelude::v1::*;\n use self::test::Bencher;\n use libc;\n use c_str::ToCStr;\n\n #[inline]\n fn check(s: &str, c_str: *const libc::c_char) {\n let s_buf = s.as_ptr();\n for i in range(0, s.len()) {\n unsafe {\n assert_eq!(\n *s_buf.offset(i as int) as libc::c_char,\n *c_str.offset(i as int));\n }\n }\n }\n\n static S_SHORT: &'static str = \"Mary\";\n static S_MEDIUM: &'static str = \"Mary had a little lamb\";\n static S_LONG: &'static str = \"\\\n Mary had a little lamb, Little lamb\n Mary had a little lamb, Little lamb\n Mary had a little lamb, Little lamb\n Mary had a little lamb, Little lamb\n Mary had a little lamb, Little lamb\n Mary had a little lamb, Little lamb\";\n\n fn bench_to_string(b: &mut Bencher, s: &str) {\n b.iter(|| {\n let c_str = s.to_c_str();\n check(s, c_str.as_ptr());\n })\n }\n\n #[bench]\n fn bench_to_c_str_short(b: &mut Bencher) {\n bench_to_string(b, S_SHORT)\n }\n\n #[bench]\n fn bench_to_c_str_medium(b: &mut Bencher) {\n bench_to_string(b, S_MEDIUM)\n }\n\n #[bench]\n fn bench_to_c_str_long(b: &mut Bencher) {\n bench_to_string(b, S_LONG)\n }\n\n fn bench_to_c_str_unchecked(b: &mut Bencher, s: &str) {\n b.iter(|| {\n let c_str = unsafe { s.to_c_str_unchecked() };\n check(s, c_str.as_ptr())\n })\n }\n\n #[bench]\n fn bench_to_c_str_unchecked_short(b: &mut Bencher) {\n bench_to_c_str_unchecked(b, S_SHORT)\n }\n\n #[bench]\n fn bench_to_c_str_unchecked_medium(b: &mut Bencher) {\n bench_to_c_str_unchecked(b, S_MEDIUM)\n }\n\n #[bench]\n fn bench_to_c_str_unchecked_long(b: &mut Bencher) {\n bench_to_c_str_unchecked(b, S_LONG)\n }\n\n fn bench_with_c_str(b: &mut Bencher, s: &str) {\n b.iter(|| {\n s.with_c_str(|c_str_buf| check(s, c_str_buf))\n })\n }\n\n #[bench]\n fn bench_with_c_str_short(b: &mut Bencher) {\n bench_with_c_str(b, S_SHORT)\n }\n\n #[bench]\n fn bench_with_c_str_medium(b: &mut Bencher) {\n bench_with_c_str(b, S_MEDIUM)\n }\n\n #[bench]\n fn bench_with_c_str_long(b: &mut Bencher) {\n bench_with_c_str(b, S_LONG)\n }\n\n fn bench_with_c_str_unchecked(b: &mut Bencher, s: &str) {\n b.iter(|| {\n unsafe {\n s.with_c_str_unchecked(|c_str_buf| check(s, c_str_buf))\n }\n })\n }\n\n #[bench]\n fn bench_with_c_str_unchecked_short(b: &mut Bencher) {\n bench_with_c_str_unchecked(b, S_SHORT)\n }\n\n #[bench]\n fn bench_with_c_str_unchecked_medium(b: &mut Bencher) {\n bench_with_c_str_unchecked(b, S_MEDIUM)\n }\n\n #[bench]\n fn bench_with_c_str_unchecked_long(b: &mut Bencher) {\n bench_with_c_str_unchecked(b, S_LONG)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add some tests for attributes<commit_after>#![feature(phase)]\n#![feature(unboxed_closures)]\n\n#[phase(plugin)]\nextern crate glium_macros;\n\nextern crate glutin;\nextern crate glium;\n\nuse glium::Surface;\n\nmod support;\n\n#[test]\nfn attribute_types_matching() {\n let display = support::build_display();\n\n #[vertex_format]\n #[deriving(Copy)]\n struct Vertex {\n field1: [f32, ..2],\n field2: [i32, ..3],\n }\n\n let vertex_buffer = glium::VertexBuffer::new(&display, vec![\n Vertex { field1: [0.0, 0.0], field2: [0, 0, 0] }\n ]);\n let index_buffer = glium::IndexBuffer::new(&display,\n glium::index_buffer::PointsList(vec![0u16]));\n\n let program = glium::Program::new(&display,\n \/\/ vertex shader\n \"\n #version 110\n\n attribute vec2 field1;\n attribute ivec3 field2;\n\n void main() {\n float z = 0.0;\n gl_Position = vec4(field1, z, 1.0);\n }\n \",\n \"\n #version 110\n void main() {\n gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0);\n }\n \",\n\n \/\/ geometry shader\n None)\n .unwrap();\n\n \/\/ drawing a frame\n let mut target = display.draw();\n target.draw(&vertex_buffer, &index_buffer, &program, &glium::uniforms::EmptyUniforms,\n &std::default::Default::default());\n target.finish();\n \n display.assert_no_error();\n}\n\n#[test]\n#[should_fail(expected = \"The program attributes do not match the vertex format\")]\nfn attribute_types_mismatch() {\n let display = support::build_display();\n\n #[vertex_format]\n #[deriving(Copy)]\n struct Vertex {\n field1: [f32, ..4],\n }\n\n let vertex_buffer = glium::VertexBuffer::new(&display, Vec::<Vertex>::new());\n let index_buffer = glium::IndexBuffer::new(&display,\n glium::index_buffer::PointsList(Vec::<u16>::new()));\n\n let program = glium::Program::new(&display,\n \/\/ vertex shader\n \"\n #version 110\n\n attribute vec2 field1;\n\n void main() {\n gl_Position = vec4(field1, 0.0, 1.0);\n }\n \",\n \"\n #version 110\n void main() {\n gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0);\n }\n \",\n\n \/\/ geometry shader\n None)\n .unwrap();\n\n \/\/ drawing a frame\n let mut target = display.draw();\n target.draw(&vertex_buffer, &index_buffer, &program, &glium::uniforms::EmptyUniforms,\n &std::default::Default::default());\n target.finish();\n \n display.assert_no_error();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Force a rebuild when spirv-reflect.cpp changes, rerun-if-changed in build.rs (partial fix for issue #44)<commit_after><|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::new(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n pub fn iter_ids(&self, m: &Module) -> Result<IntoIter<FileID>, StorageBackendError>\n {\n let globstr = self.prefix_of_files_for_module(m) + \"*.imag\";\n debug!(\"Globstring = {}\", globstr);\n glob(&globstr[..])\n .and_then(|globlist| {\n debug!(\"Iterating over globlist\");\n Ok(globlist_to_file_id_vec(globlist).into_iter())\n })\n .map_err(|e| {\n debug!(\"glob() returned error: {:?}\", e);\n let serr = StorageBackendError::new(\n \"iter_ids()\",\n \"Cannot iter on file ids\",\n None);\n \/\/ Why the hack is Error not implemented for glob::PatternError\n \/\/ serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Result<IntoIter<File<'a>>, StorageBackendError>\n where HP: FileHeaderParser\n {\n self.iter_ids(m)\n .and_then(|ids| {\n debug!(\"Iterating ids and building files from them\");\n debug!(\" number of ids = {}\", ids.len());\n Ok(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n .into_iter())\n })\n .map_err(|e| {\n debug!(\"StorageBackend::iter_ids() returned error = {:?}\", e);\n let mut serr = StorageBackendError::new(\"iter_files()\",\n \"Cannot iter on files\",\n None);\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n use std::ops::Index;\n\n debug!(\"Searching for file with id '{}'\", id);\n\n if id.get_type() == FileIDType::NONE {\n \/\/ We don't know the hash type, so we glob() around a bit.\n debug!(\"Having FileIDType::NONE, so we glob() for the raw ID\");\n\n let id_str = id.get_id().unwrap_or(String::from(\"INVALID\"));\n let globstr = self.prefix_of_files_for_module(m) + \"*\" + &id_str[..] + \".imag\";\n debug!(\"Globbing with globstr = '{}'\", globstr);\n glob(&globstr[..]).map(|globlist| {\n let mut vec = globlist_to_file_id_vec(globlist).into_iter()\n .filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>();\n vec.reverse();\n vec.pop()\n }).unwrap_or({\n debug!(\"No glob matches, actually. We can't do anything at this point\");\n None\n })\n } else {\n \/\/ The (hash)type is already in the FileID object, so we can just\n \/\/ build a path from the information we already have\n debug!(\"We know FileIDType, so we build the path directly now\");\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success opening file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::new(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n let idstr : String = id.clone().into();\n let idtype : FileIDType = id.into();\n let typestr : String = idtype.into();\n\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id: '{}'\", idstr);\n debug!(\" type: '{}'\", typestr);\n\n self.prefix_of_files_for_module(owner) +\n \"-\" + &typestr[..] +\n \"-\" + &idstr[..] +\n \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n\n fn new<S>(action: S, desc: S, data: Option<String>) -> StorageBackendError\n where S: Into<String>\n {\n StorageBackendError {\n action: action.into(),\n desc: desc.into(),\n data_dump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl<'a> Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::new(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n\nfn globlist_to_file_id_vec(globlist: Paths) -> Vec<FileID> {\n globlist.filter_map(Result::ok)\n .map(|pbuf| FileID::from(&pbuf))\n .collect::<Vec<FileID>>()\n}\n<commit_msg>Outsource: ids iterator to files vector<commit_after>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::new(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n pub fn iter_ids(&self, m: &Module) -> Result<IntoIter<FileID>, StorageBackendError>\n {\n let globstr = self.prefix_of_files_for_module(m) + \"*.imag\";\n debug!(\"Globstring = {}\", globstr);\n glob(&globstr[..])\n .and_then(|globlist| {\n debug!(\"Iterating over globlist\");\n Ok(globlist_to_file_id_vec(globlist).into_iter())\n })\n .map_err(|e| {\n debug!(\"glob() returned error: {:?}\", e);\n let serr = StorageBackendError::new(\n \"iter_ids()\",\n \"Cannot iter on file ids\",\n None);\n \/\/ Why the hack is Error not implemented for glob::PatternError\n \/\/ serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Result<IntoIter<File<'a>>, StorageBackendError>\n where HP: FileHeaderParser\n {\n self.iter_ids(m)\n .and_then(|ids| {\n debug!(\"Iterating ids and building files from them\");\n debug!(\" number of ids = {}\", ids.len());\n Ok(self.filter_map_ids_to_files(m, p, ids).into_iter())\n })\n .map_err(|e| {\n debug!(\"StorageBackend::iter_ids() returned error = {:?}\", e);\n let mut serr = StorageBackendError::new(\"iter_files()\",\n \"Cannot iter on files\",\n None);\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n use std::ops::Index;\n\n debug!(\"Searching for file with id '{}'\", id);\n\n if id.get_type() == FileIDType::NONE {\n \/\/ We don't know the hash type, so we glob() around a bit.\n debug!(\"Having FileIDType::NONE, so we glob() for the raw ID\");\n\n let id_str = id.get_id().unwrap_or(String::from(\"INVALID\"));\n let globstr = self.prefix_of_files_for_module(m) + \"*\" + &id_str[..] + \".imag\";\n debug!(\"Globbing with globstr = '{}'\", globstr);\n glob(&globstr[..]).map(|globlist| {\n let idvec = globlist_to_file_id_vec(globlist).into_iter();\n let mut vec = self.filter_map_ids_to_files(m, p, idvec);\n vec.reverse();\n vec.pop()\n }).unwrap_or({\n debug!(\"No glob matches, actually. We can't do anything at this point\");\n None\n })\n } else {\n \/\/ The (hash)type is already in the FileID object, so we can just\n \/\/ build a path from the information we already have\n debug!(\"We know FileIDType, so we build the path directly now\");\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success opening file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::new(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n let idstr : String = id.clone().into();\n let idtype : FileIDType = id.into();\n let typestr : String = idtype.into();\n\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id: '{}'\", idstr);\n debug!(\" type: '{}'\", typestr);\n\n self.prefix_of_files_for_module(owner) +\n \"-\" + &typestr[..] +\n \"-\" + &idstr[..] +\n \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n fn filter_map_ids_to_files<'a, HP>(&self,\n m: &'a Module,\n p: &Parser<HP>,\n ids: IntoIter<FileID>)\n -> Vec<File<'a>>\n where HP: FileHeaderParser\n {\n ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n\n fn new<S>(action: S, desc: S, data: Option<String>) -> StorageBackendError\n where S: Into<String>\n {\n StorageBackendError {\n action: action.into(),\n desc: desc.into(),\n data_dump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl<'a> Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::new(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n\nfn globlist_to_file_id_vec(globlist: Paths) -> Vec<FileID> {\n globlist.filter_map(Result::ok)\n .map(|pbuf| FileID::from(&pbuf))\n .collect::<Vec<FileID>>()\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>uvcview<commit_after>use std::c_str::CString;\nuse std::cast::transmute;\nuse std::default::Default;\nuse std::fmt;\nuse std::io::{IoResult,IoError,OtherIoError,TypeUnknown,MismatchedFileTypeForOperation};\nuse std::io;\nuse std::libc::consts::os::posix88::{EINVAL};\nuse std::libc::{c_int,O_RDWR};\nuse std::libc;\nuse std::os;\nuse v4l2;\nuse v4l2::{v4l2_capability,v4l2_crop,v4l2_cropcap,v4l2_format,v4l2_ioctl};\n\npub struct UvcView {\n device_path: Path,\n fd: c_int,\n width: u32,\n height: u32,\n}\n\nimpl Default for UvcView {\n fn default() -> UvcView {\n UvcView {\n device_path: Path::new(\"\/dev\/video0\"),\n fd: -1,\n width: 640,\n height: 480,\n }\n }\n}\n\nimpl fmt::Show for UvcView {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f.buf, \"device_path : {}\\nfd : {}\\nwidth : {}\\nheight : {}\",\n self.device_path.display(), self.fd, self.width, self.height)\n }\n}\n\npub fn errno_msg() -> ~str {\n let errno = os::errno();\n let err_msg = unsafe {\n CString::new(libc::strerror(errno as c_int), false)\n .as_str().unwrap_or(\"unknown error\");\n };\n format!(\"{} ({}\", err_msg, errno)\n}\n\nimpl UvcView {\n pub fn open<'a>(&'a mut self) -> IoResult<&'a mut UvcView> {\n match self.device_path.stat() {\n Ok(stat) => {\n if stat.kind != io::TypeUnknown\/*TypeCharacter?*\/ {\n return Err(IoError {\n kind: io::MismatchedFileTypeForOperation,\n desc: \"open(): invalid file type\",\n detail: Some(format!(\"{} is not device\", self.device_path.display()))\n });\n }\n }\n Err(mut e) => {\n e.detail = Some(~\"open() failed\");\n return Err(e);\n }\n }\n\n static O_NONBLOCK: c_int = 04000;\n match self.device_path.with_c_str(|path| {\n unsafe { libc::open(path, O_RDWR | O_NONBLOCK, 0) }\n }) {\n -1 => {\n return Err(IoError {\n kind: io::OtherIoError,\n desc: \"open() failed\",\n detail: Some(errno_msg())\n });\n }\n fd => {\n self.fd = fd;\n return Ok(self);\n }\n }\n }\n\n pub fn init<'a>(&'a mut self) -> IoResult<&'a mut UvcView> {\n let mut cap: v4l2::v4l2_capability = Default::default();\n match v4l2_ioctl(self.fd, v4l2::VIDIOC_QUERYCAP, unsafe { transmute(&mut cap) }) {\n Ok(_) => {\n if (cap.capabilities & v4l2::V4L2_CAP_VIDEO_CAPTURE) == 0 {\n return Err(IoError {\n kind: io::OtherIoError,\n desc: \"init(): V4L2_CAP_VIDEO_CAPTURE not supported\",\n detail: Some(format!(\"{} is no video capture device\", self.device_path.display()))\n });\n }\n if (cap.capabilities & v4l2::V4L2_CAP_STREAMING) == 0 {\n return Err(IoError {\n kind: io::OtherIoError,\n desc: \"init(): V4L2_CAP_STREAMING not supported\",\n detail: Some(format!(\"{} dose not support streaming i\/o\", self.device_path.display()))\n });\n }\n }\n Err(e) => {\n if e == EINVAL {\n return Err(IoError {\n kind: io::OtherIoError,\n desc: \"init(): VIDIOC_QUERYCAP not supported\",\n detail: Some(format!(\"{} is no v4l2 device\", self.device_path.display()))\n });\n } else {\n return Err(IoError {\n kind: io::OtherIoError,\n desc: \"init(): ioctl() returns -1\",\n detail: Some(errno_msg())\n });\n }\n }\n }\n\n \/\/ Select video input, video standard and tune here.\n\n let mut cropcap: v4l2::v4l2_cropcap = Default::default();\n\n match v4l2_ioctl(self.fd, v4l2::VIDIOC_CROPCAP, unsafe { transmute(&mut cropcap) }) {\n Ok(_) => {\n let mut crop: v4l2::v4l2_crop = Default::default();\n crop._type = v4l2::V4L2_BUF_TYPE_VIDEO_CAPTURE;\n crop.c = cropcap.defrect;\n\n\n match v4l2_ioctl(self.fd, v4l2::VIDIOC_S_CROP, unsafe { transmute(&mut crop) }) {\n Ok(_) => {}\n Err(EINVAL) => {\n \/\/ Cropping not supported.\n }\n Err(_) => {\n \/\/ Errors ignored.\n }\n }\n }\n Err(_) => {\n \/\/ Errors ignored.\n }\n }\n\n let mut fmt: v4l2_format = Default::default();\n fmt._type = v4l2::V4L2_BUF_TYPE_VIDEO_CAPTURE;\n let pix = fmt.fmt.pix();\n unsafe {\n (*pix).width = self.width;\n (*pix).height= self.height;\n (*pix).pixelformat = v4l2::V4L2_PIX_FMT_YUYV;\n (*pix).field = v4l2::V4L2_FIELD_INTERLACED; \/\/ TODO\n }\n\n match v4l2_ioctl(self.fd, v4l2::VIDIOC_S_FMT, unsafe { transmute(&mut fmt) }) {\n Ok(_) => {}\n Err(_) => {\n return Err(IoError {\n kind: io::OtherIoError,\n desc: \"init(): ioctl() returns -1\",\n detail: Some(errno_msg())\n });\n }\n }\n\n \/\/ Note VIDIOC_S_FMT may change width and height\n\n \/\/ Buggy driver paranoia\n unsafe {\n let mut min = (*pix).width * 2;\n if (*pix).bytesperline < min {\n (*pix).bytesperline = min;\n }\n min = (*pix).bytesperline * (*pix).height;\n if (*pix).sizeimage < min {\n (*pix).sizeimage = min;\n }\n\n if (*pix).width != self.width {\n self.width = (*pix).width;\n }\n if (*pix).height != self.height {\n self.height = (*pix).height;\n }\n }\n\n return Ok(self);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added exhaustive tuple macros<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/\/ Used to run some code when a value goes out of scope.\n\/\/\/ This is sometimes called a 'destructor'.\n\/\/\/\n\/\/\/ When a value goes out of scope, it will have its `drop` method called if\n\/\/\/ its type implements `Drop`. Then, any fields the value contains will also\n\/\/\/ be dropped recursively.\n\/\/\/\n\/\/\/ Because of this recursive dropping, you do not need to implement this trait\n\/\/\/ unless your type needs its own destructor logic.\n\/\/\/\n\/\/\/ Refer to [the chapter on `Drop` in *The Rust Programming Language*][book]\n\/\/\/ for some more elaboration.\n\/\/\/\n\/\/\/ [book]: ..\/..\/book\/second-edition\/ch15-03-drop.html\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ## Implementing `Drop`\n\/\/\/\n\/\/\/ The `drop` method is called when `_x` goes out of scope, and therefore\n\/\/\/ `main` prints `Dropping!`.\n\/\/\/\n\/\/\/ ```\n\/\/\/ struct HasDrop;\n\/\/\/\n\/\/\/ impl Drop for HasDrop {\n\/\/\/ fn drop(&mut self) {\n\/\/\/ println!(\"Dropping!\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ fn main() {\n\/\/\/ let _x = HasDrop;\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ ## Dropping is done recursively\n\/\/\/\n\/\/\/ When `outer` goes out of scope, the `drop` method will be called first for\n\/\/\/ `Outer`, then for `Inner`. Therefore, `main` prints `Dropping Outer!` and\n\/\/\/ then `Dropping Inner!`.\n\/\/\/\n\/\/\/ ```\n\/\/\/ struct Inner;\n\/\/\/ struct Outer(Inner);\n\/\/\/\n\/\/\/ impl Drop for Inner {\n\/\/\/ fn drop(&mut self) {\n\/\/\/ println!(\"Dropping Inner!\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ impl Drop for Outer {\n\/\/\/ fn drop(&mut self) {\n\/\/\/ println!(\"Dropping Outer!\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ fn main() {\n\/\/\/ let _x = Outer(Inner);\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ ## Variables are dropped in reverse order of declaration\n\/\/\/\n\/\/\/ `_first` is declared first and `_second` is declared second, so `main` will\n\/\/\/ print `Declared second!` and then `Declared first!`.\n\/\/\/\n\/\/\/ ```\n\/\/\/ struct PrintOnDrop(&'static str);\n\/\/\/\n\/\/\/ impl Drop for PrintOnDrop {\n\/\/\/ fn drop(&mut self) {\n\/\/\/ println!(\"{}\", self.0);\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ fn main() {\n\/\/\/ let _first = PrintOnDrop(\"Declared first!\");\n\/\/\/ let _second = PrintOnDrop(\"Declared second!\");\n\/\/\/ }\n\/\/\/ ```\n#[lang = \"drop\"]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait Drop {\n \/\/\/ Executes the destructor for this type.\n \/\/\/\n \/\/\/ This method is called implilcitly when the value goes out of scope,\n \/\/\/ and cannot be called explicitly (this is compiler error [E0040]).\n \/\/\/ However, the [`std::mem::drop`] function in the prelude can be\n \/\/\/ used to call the argument's `Drop` implementation.\n \/\/\/\n \/\/\/ When this method has been called, `self` has not yet been deallocated.\n \/\/\/ That only happens after the method is over.\n \/\/\/ If this wasn't the case, `self` would be a dangling reference.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Given that a [`panic!`] will call `drop` as it unwinds, any [`panic!`]\n \/\/\/ in a `drop` implementation will likely abort.\n \/\/\/\n \/\/\/ [E0040]: ..\/..\/error-index.html#E0040\n \/\/\/ [`panic!`]: ..\/macro.panic.html\n \/\/\/ [`std::mem::drop`]: ..\/..\/std\/mem\/fn.drop.html\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn drop(&mut self);\n}\n<commit_msg>Update drop.rs<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/\/ Used to run some code when a value goes out of scope.\n\/\/\/ This is sometimes called a 'destructor'.\n\/\/\/\n\/\/\/ When a value goes out of scope, it will have its `drop` method called if\n\/\/\/ its type implements `Drop`. Then, any fields the value contains will also\n\/\/\/ be dropped recursively.\n\/\/\/\n\/\/\/ Because of this recursive dropping, you do not need to implement this trait\n\/\/\/ unless your type needs its own destructor logic.\n\/\/\/\n\/\/\/ Refer to [the chapter on `Drop` in *The Rust Programming Language*][book]\n\/\/\/ for some more elaboration.\n\/\/\/\n\/\/\/ [book]: ..\/..\/book\/second-edition\/ch15-03-drop.html\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ## Implementing `Drop`\n\/\/\/\n\/\/\/ The `drop` method is called when `_x` goes out of scope, and therefore\n\/\/\/ `main` prints `Dropping!`.\n\/\/\/\n\/\/\/ ```\n\/\/\/ struct HasDrop;\n\/\/\/\n\/\/\/ impl Drop for HasDrop {\n\/\/\/ fn drop(&mut self) {\n\/\/\/ println!(\"Dropping!\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ fn main() {\n\/\/\/ let _x = HasDrop;\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ ## Dropping is done recursively\n\/\/\/\n\/\/\/ When `outer` goes out of scope, the `drop` method will be called first for\n\/\/\/ `Outer`, then for `Inner`. Therefore, `main` prints `Dropping Outer!` and\n\/\/\/ then `Dropping Inner!`.\n\/\/\/\n\/\/\/ ```\n\/\/\/ struct Inner;\n\/\/\/ struct Outer(Inner);\n\/\/\/\n\/\/\/ impl Drop for Inner {\n\/\/\/ fn drop(&mut self) {\n\/\/\/ println!(\"Dropping Inner!\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ impl Drop for Outer {\n\/\/\/ fn drop(&mut self) {\n\/\/\/ println!(\"Dropping Outer!\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ fn main() {\n\/\/\/ let _x = Outer(Inner);\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ ## Variables are dropped in reverse order of declaration\n\/\/\/\n\/\/\/ `_first` is declared first and `_second` is declared second, so `main` will\n\/\/\/ print `Declared second!` and then `Declared first!`.\n\/\/\/\n\/\/\/ ```\n\/\/\/ struct PrintOnDrop(&'static str);\n\/\/\/\n\/\/\/ impl Drop for PrintOnDrop {\n\/\/\/ fn drop(&mut self) {\n\/\/\/ println!(\"{}\", self.0);\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ fn main() {\n\/\/\/ let _first = PrintOnDrop(\"Declared first!\");\n\/\/\/ let _second = PrintOnDrop(\"Declared second!\");\n\/\/\/ }\n\/\/\/ ```\n#[lang = \"drop\"]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait Drop {\n \/\/\/ Executes the destructor for this type.\n \/\/\/\n \/\/\/ This method is called implicitly when the value goes out of scope,\n \/\/\/ and cannot be called explicitly (this is compiler error [E0040]).\n \/\/\/ However, the [`std::mem::drop`] function in the prelude can be\n \/\/\/ used to call the argument's `Drop` implementation.\n \/\/\/\n \/\/\/ When this method has been called, `self` has not yet been deallocated.\n \/\/\/ That only happens after the method is over.\n \/\/\/ If this wasn't the case, `self` would be a dangling reference.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ Given that a [`panic!`] will call `drop` as it unwinds, any [`panic!`]\n \/\/\/ in a `drop` implementation will likely abort.\n \/\/\/\n \/\/\/ [E0040]: ..\/..\/error-index.html#E0040\n \/\/\/ [`panic!`]: ..\/macro.panic.html\n \/\/\/ [`std::mem::drop`]: ..\/..\/std\/mem\/fn.drop.html\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn drop(&mut self);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Linked failure: Add '100 generations' bench test (#1868)<commit_after>\/\/ Test performance of a task \"spawn ladder\", in which children task have many\n\/\/ many ancestor taskgroups, but with only a few such groups alive at a time.\n\/\/ Each child task has to enlist as a descendant in each of its ancestor\n\/\/ groups, but that shouldn't have to happen for already-dead groups.\n\/\/\n\/\/ The filename is a reference; google it in quotes.\n\nfn child_generation(gens_left: uint) {\n \/\/ This used to be O(n^2) in the number of generations that ever existed.\n \/\/ With this code, only as many generations are alive at a time as tasks\n \/\/ alive at a time, \n do task::spawn_supervised {\n if gens_left & 1 == 1 {\n task::yield(); \/\/ shake things up a bit\n }\n if gens_left > 0 {\n child_generation(gens_left - 1); \/\/ recurse\n }\n }\n}\n\nfn main(args: ~[~str]) {\n let args = if os::getenv(~\"RUST_BENCH\").is_some() {\n ~[~\"\", ~\"100000\"]\n } else if args.len() <= 1u {\n ~[~\"\", ~\"100\"]\n } else {\n copy args\n };\n\n child_generation(uint::from_str(args[1]).get());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for the feature gating of naked<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[naked]\n\/\/~^ the `#[naked]` attribute is an experimental feature\nfn naked() {}\n\n#[naked]\n\/\/~^ the `#[naked]` attribute is an experimental feature\nfn naked_2() -> isize {\n 0\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>make sure raw ptrs only have to be valid as far as they are used<commit_after>\/\/ Deref a raw ptr to access a field of a large struct, where the field\n\/\/ is allocated but not the entire struct is.\n\/\/ For now, we want to allow this.\n\nfn main() {\n let x = (1, 1);\n let xptr = &x as *const _ as *const (i32, i32, i32);\n let _val = unsafe { (*xptr).1 };\n}\n<|endoftext|>"} {"text":"<commit_before>use inflector::Inflector;\n\nuse botocore::{Service, Shape};\n\nuse self::json::JsonGenerator;\nuse self::query::QueryGenerator;\nuse self::rest_json::RestJsonGenerator;\n\nmod json;\nmod query;\nmod rest_json;\n\npub trait GenerateProtocol {\n fn generate_methods(&self, service: &Service) -> String;\n\n fn generate_prelude(&self) -> String;\n\n fn generate_struct_attributes(&self) -> String;\n\n fn generate_support_types(&self, _name: &str, _shape: &Shape, _service: &Service)\n -> Option<String> {\n None\n }\n}\n\npub fn generate_source(service: &Service) -> String {\n match &service.metadata.protocol[..] {\n \"json\" => generate(service, JsonGenerator),\n \"query\" => generate(service, QueryGenerator),\n \"rest-json\" => generate(service, RestJsonGenerator),\n protocol => panic!(\"Unknown protocol {}\", protocol),\n }\n}\n\nfn generate<P>(service: &Service, protocol_generator: P) -> String where P: GenerateProtocol {\n format!(\n \"{prelude}\n\n {types}\n\n {client}\",\n client = generate_client(service, &protocol_generator),\n prelude = &protocol_generator.generate_prelude(),\n types = generate_types(service, &protocol_generator),\n )\n}\n\nfn generate_client<P>(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n format!(\n \"\/\/\/ A client for the {service_name} API.\n pub struct {type_name}<P> where P: ProvideAwsCredentials {{\n credentials_provider: P,\n region: Region,\n }}\n\n impl<P> {type_name}<P> where P: ProvideAwsCredentials {{\n pub fn new(credentials_provider: P, region: Region) -> Self {{\n {type_name} {{\n credentials_provider: credentials_provider,\n region: region,\n }}\n }}\n\n {methods}\n }}\n \",\n methods = protocol_generator.generate_methods(service),\n service_name = match &service.metadata.service_abbreviation {\n &Some(ref service_abbreviation) => service_abbreviation.as_str(),\n &None => {\n match service.metadata.endpoint_prefix {\n ref x if x == \"elastictranscoder\" => \"Amazon Elastic Transcoder\",\n _ => panic!(\"Unable to determine service abbreviation\"),\n }\n },\n },\n type_name = service.client_type_name(),\n )\n}\n\nfn generate_list(name: &str, shape: &Shape) -> String {\n format!(\"pub type {} = Vec<{}>;\", name, shape.member())\n}\n\nfn generate_map(name: &str, shape: &Shape) -> String {\n format!(\n \"pub type {} = ::std::collections::HashMap<{}, {}>;\",\n name,\n shape.key(),\n shape.value(),\n )\n}\n\nfn generate_primitive_type(name: &str, shape_type: &str) -> String {\n let primitive_type = match shape_type {\n \"blob\" => \"Vec<u8>\",\n \"boolean\" => \"bool\",\n \"double\" | \"timestamp\" => \"f64\",\n \"float\" => \"f32\",\n \"integer\" => \"i32\",\n \"long\" => \"i64\",\n \"string\" => \"String\",\n primitive_type => panic!(\"Unknown primitive type: {}\", primitive_type),\n };\n\n format!(\"pub type {} = {};\", name, primitive_type)\n}\n\nfn generate_types<P>(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n service.shapes.iter().filter_map(|(name, shape)| {\n if name == \"String\" {\n return protocol_generator.generate_support_types(name, shape, &service);\n }\n\n let mut parts = Vec::with_capacity(3);\n\n if let Some(ref docs) = shape.documentation {\n parts.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n match &shape.shape_type[..] {\n \"structure\" => parts.push(generate_struct(service, name, shape, protocol_generator)),\n \"map\" => parts.push(generate_map(name, shape)),\n \"list\" => parts.push(generate_list(name, shape)),\n shape_type => parts.push(generate_primitive_type(name, shape_type)),\n }\n\n if let Some(support_types) = protocol_generator.generate_support_types(name, shape, &service) {\n parts.push(support_types);\n }\n\n Some(parts.join(\"\\n\"))\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\nfn generate_struct<P>(\n service: &Service,\n name: &str,\n shape: &Shape,\n protocol_generator: &P,\n) -> String where P: GenerateProtocol {\n if shape.members.is_none() || shape.members.as_ref().unwrap().is_empty() {\n format!(\n \"{attributes}\n pub struct {name};\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n )\n } else {\n format!(\n \"{attributes}\n pub struct {name} {{\n {struct_fields}\n }}\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n struct_fields = generate_struct_fields(service, shape),\n )\n }\n\n}\n\nfn generate_struct_fields(service: &Service, shape: &Shape) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n let mut lines = Vec::with_capacity(4);\n let name = member_name.to_snake_case();\n\n if let Some(ref docs) = member.documentation {\n lines.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n lines.push(\"#[allow(unused_attributes)]\".to_owned());\n lines.push(format!(\"#[serde(rename=\\\"{}\\\")]\", member_name));\n\n if let Some(shape_type) = service.shape_type_for_member(member) {\n if shape_type == \"blob\" {\n lines.push(\n \"#[serde(\n deserialize_with=\\\"::serialization::SerdeBlob::deserialize_blob\\\",\n serialize_with=\\\"::serialization::SerdeBlob::serialize_blob\\\",\n )]\".to_owned()\n );\n }\n }\n\n if shape.required(member_name) {\n lines.push(format!(\"pub {}: {},\", name, member.shape));\n } else if name == \"type\" {\n lines.push(format!(\"pub aws_{}: Option<{}>,\", name, member.shape));\n } else {\n lines.push(format!(\"pub {}: Option<{}>,\", name, member.shape));\n }\n\n lines.join(\"\\n\")\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n<commit_msg>tell serde to use Default::default() when a blob has no value in the JSON response<commit_after>use inflector::Inflector;\n\nuse botocore::{Service, Shape};\n\nuse self::json::JsonGenerator;\nuse self::query::QueryGenerator;\nuse self::rest_json::RestJsonGenerator;\n\nmod json;\nmod query;\nmod rest_json;\n\npub trait GenerateProtocol {\n fn generate_methods(&self, service: &Service) -> String;\n\n fn generate_prelude(&self) -> String;\n\n fn generate_struct_attributes(&self) -> String;\n\n fn generate_support_types(&self, _name: &str, _shape: &Shape, _service: &Service)\n -> Option<String> {\n None\n }\n}\n\npub fn generate_source(service: &Service) -> String {\n match &service.metadata.protocol[..] {\n \"json\" => generate(service, JsonGenerator),\n \"query\" => generate(service, QueryGenerator),\n \"rest-json\" => generate(service, RestJsonGenerator),\n protocol => panic!(\"Unknown protocol {}\", protocol),\n }\n}\n\nfn generate<P>(service: &Service, protocol_generator: P) -> String where P: GenerateProtocol {\n format!(\n \"{prelude}\n\n {types}\n\n {client}\",\n client = generate_client(service, &protocol_generator),\n prelude = &protocol_generator.generate_prelude(),\n types = generate_types(service, &protocol_generator),\n )\n}\n\nfn generate_client<P>(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n format!(\n \"\/\/\/ A client for the {service_name} API.\n pub struct {type_name}<P> where P: ProvideAwsCredentials {{\n credentials_provider: P,\n region: Region,\n }}\n\n impl<P> {type_name}<P> where P: ProvideAwsCredentials {{\n pub fn new(credentials_provider: P, region: Region) -> Self {{\n {type_name} {{\n credentials_provider: credentials_provider,\n region: region,\n }}\n }}\n\n {methods}\n }}\n \",\n methods = protocol_generator.generate_methods(service),\n service_name = match &service.metadata.service_abbreviation {\n &Some(ref service_abbreviation) => service_abbreviation.as_str(),\n &None => {\n match service.metadata.endpoint_prefix {\n ref x if x == \"elastictranscoder\" => \"Amazon Elastic Transcoder\",\n _ => panic!(\"Unable to determine service abbreviation\"),\n }\n },\n },\n type_name = service.client_type_name(),\n )\n}\n\nfn generate_list(name: &str, shape: &Shape) -> String {\n format!(\"pub type {} = Vec<{}>;\", name, shape.member())\n}\n\nfn generate_map(name: &str, shape: &Shape) -> String {\n format!(\n \"pub type {} = ::std::collections::HashMap<{}, {}>;\",\n name,\n shape.key(),\n shape.value(),\n )\n}\n\nfn generate_primitive_type(name: &str, shape_type: &str) -> String {\n let primitive_type = match shape_type {\n \"blob\" => \"Vec<u8>\",\n \"boolean\" => \"bool\",\n \"double\" | \"timestamp\" => \"f64\",\n \"float\" => \"f32\",\n \"integer\" => \"i32\",\n \"long\" => \"i64\",\n \"string\" => \"String\",\n primitive_type => panic!(\"Unknown primitive type: {}\", primitive_type),\n };\n\n format!(\"pub type {} = {};\", name, primitive_type)\n}\n\nfn generate_types<P>(service: &Service, protocol_generator: &P) -> String\nwhere P: GenerateProtocol {\n service.shapes.iter().filter_map(|(name, shape)| {\n if name == \"String\" {\n return protocol_generator.generate_support_types(name, shape, &service);\n }\n\n let mut parts = Vec::with_capacity(3);\n\n if let Some(ref docs) = shape.documentation {\n parts.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n match &shape.shape_type[..] {\n \"structure\" => parts.push(generate_struct(service, name, shape, protocol_generator)),\n \"map\" => parts.push(generate_map(name, shape)),\n \"list\" => parts.push(generate_list(name, shape)),\n shape_type => parts.push(generate_primitive_type(name, shape_type)),\n }\n\n if let Some(support_types) = protocol_generator.generate_support_types(name, shape, &service) {\n parts.push(support_types);\n }\n\n Some(parts.join(\"\\n\"))\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\nfn generate_struct<P>(\n service: &Service,\n name: &str,\n shape: &Shape,\n protocol_generator: &P,\n) -> String where P: GenerateProtocol {\n if shape.members.is_none() || shape.members.as_ref().unwrap().is_empty() {\n format!(\n \"{attributes}\n pub struct {name};\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n )\n } else {\n format!(\n \"{attributes}\n pub struct {name} {{\n {struct_fields}\n }}\n \",\n attributes = protocol_generator.generate_struct_attributes(),\n name = name,\n struct_fields = generate_struct_fields(service, shape),\n )\n }\n\n}\n\nfn generate_struct_fields(service: &Service, shape: &Shape) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n let mut lines = Vec::with_capacity(4);\n let name = member_name.to_snake_case();\n\n if let Some(ref docs) = member.documentation {\n lines.push(format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")));\n }\n\n lines.push(\"#[allow(unused_attributes)]\".to_owned());\n lines.push(format!(\"#[serde(rename=\\\"{}\\\")]\", member_name));\n\n if let Some(shape_type) = service.shape_type_for_member(member) {\n if shape_type == \"blob\" {\n lines.push(\n \"#[serde(\n deserialize_with=\\\"::serialization::SerdeBlob::deserialize_blob\\\",\n serialize_with=\\\"::serialization::SerdeBlob::serialize_blob\\\",\n default,\n )]\".to_owned()\n );\n }\n }\n\n if shape.required(member_name) {\n lines.push(format!(\"pub {}: {},\", name, member.shape));\n } else if name == \"type\" {\n lines.push(format!(\"pub aws_{}: Option<{}>,\", name, member.shape));\n } else {\n lines.push(format!(\"pub {}: Option<{}>,\", name, member.shape));\n }\n\n lines.join(\"\\n\")\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Data needed by the layout task.\n\n#![allow(unsafe_blocks)]\n\nuse css::matching::{ApplicableDeclarationsCache, StyleSharingCandidateCache};\n\nuse geom::{Rect, Size2D};\nuse gfx::display_list::OpaqueNode;\nuse gfx::font_context::FontContext;\nuse gfx::font_cache_task::FontCacheTask;\nuse script::layout_interface::LayoutChan;\nuse script_traits::UntrustedNodeAddress;\nuse msg::constellation_msg::ConstellationChan;\nuse net::local_image_cache::LocalImageCache;\nuse servo_util::geometry::Au;\nuse std::cell::Cell;\nuse std::mem;\nuse std::ptr;\nuse std::sync::{Arc, Mutex};\nuse style::selector_matching::Stylist;\nuse url::Url;\n\nstruct LocalLayoutContext {\n font_context: FontContext,\n applicable_declarations_cache: ApplicableDeclarationsCache,\n style_sharing_candidate_cache: StyleSharingCandidateCache,\n}\n\nthread_local!(static LOCAL_CONTEXT_KEY: Cell<*mut LocalLayoutContext> = Cell::new(ptr::null_mut()));\n\nfn create_or_get_local_context(shared_layout_context: &SharedLayoutContext) -> *mut LocalLayoutContext {\n LOCAL_CONTEXT_KEY.with(|ref r| {\n if r.get().is_null() {\n let context = box LocalLayoutContext {\n font_context: FontContext::new(shared_layout_context.font_cache_task.clone()),\n applicable_declarations_cache: ApplicableDeclarationsCache::new(),\n style_sharing_candidate_cache: StyleSharingCandidateCache::new(),\n };\n r.set(unsafe { mem::transmute(context) });\n }\n\n r.get()\n })\n}\n\npub struct SharedLayoutContext {\n \/\/\/ The local image cache.\n pub image_cache: Arc<Mutex<LocalImageCache<UntrustedNodeAddress>>>,\n\n \/\/\/ The current screen size.\n pub screen_size: Size2D<Au>,\n\n \/\/\/ A channel up to the constellation.\n pub constellation_chan: ConstellationChan,\n\n \/\/\/ A channel up to the layout task.\n pub layout_chan: LayoutChan,\n\n \/\/\/ Interface to the font cache task.\n pub font_cache_task: FontCacheTask,\n\n \/\/\/ The CSS selector stylist.\n \/\/\/\n \/\/\/ FIXME(#2604): Make this no longer an unsafe pointer once we have fast `RWArc`s.\n pub stylist: *const Stylist,\n\n \/\/\/ The root node at which we're starting the layout.\n pub reflow_root: OpaqueNode,\n\n \/\/\/ The URL.\n pub url: Url,\n\n \/\/\/ The dirty rectangle, used during display list building.\n pub dirty: Rect<Au>,\n\n \/\/\/ Starts at zero, and increased by one every time a layout completes.\n \/\/\/ This can be used to easily check for invalid stale data.\n pub generation: uint,\n}\n\npub struct SharedLayoutContextWrapper(pub *const SharedLayoutContext);\nunsafe impl Send for SharedLayoutContextWrapper {}\n\npub struct LayoutContext<'a> {\n pub shared: &'a SharedLayoutContext,\n cached_local_layout_context: *mut LocalLayoutContext,\n}\n\nimpl<'a> LayoutContext<'a> {\n pub fn new(shared_layout_context: &'a SharedLayoutContext) -> LayoutContext<'a> {\n\n let local_context = create_or_get_local_context(shared_layout_context);\n\n LayoutContext {\n shared: shared_layout_context,\n cached_local_layout_context: local_context,\n }\n }\n\n #[inline(always)]\n pub fn font_context<'b>(&'b self) -> &'b mut FontContext {\n unsafe {\n let cached_context = &mut *self.cached_local_layout_context;\n &mut cached_context.font_context\n }\n }\n\n #[inline(always)]\n pub fn applicable_declarations_cache<'b>(&'b self) -> &'b mut ApplicableDeclarationsCache {\n unsafe {\n let cached_context = &mut *self.cached_local_layout_context;\n &mut cached_context.applicable_declarations_cache\n }\n }\n\n #[inline(always)]\n pub fn style_sharing_candidate_cache<'b>(&'b self) -> &'b mut StyleSharingCandidateCache {\n unsafe {\n let cached_context = &mut *self.cached_local_layout_context;\n &mut cached_context.style_sharing_candidate_cache\n }\n }\n}\n<commit_msg>Use boxed::into_raw in create_or_get_local_context.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Data needed by the layout task.\n\n#![allow(unsafe_blocks)]\n\nuse css::matching::{ApplicableDeclarationsCache, StyleSharingCandidateCache};\n\nuse geom::{Rect, Size2D};\nuse gfx::display_list::OpaqueNode;\nuse gfx::font_context::FontContext;\nuse gfx::font_cache_task::FontCacheTask;\nuse script::layout_interface::LayoutChan;\nuse script_traits::UntrustedNodeAddress;\nuse msg::constellation_msg::ConstellationChan;\nuse net::local_image_cache::LocalImageCache;\nuse servo_util::geometry::Au;\nuse std::boxed;\nuse std::cell::Cell;\nuse std::ptr;\nuse std::sync::{Arc, Mutex};\nuse style::selector_matching::Stylist;\nuse url::Url;\n\nstruct LocalLayoutContext {\n font_context: FontContext,\n applicable_declarations_cache: ApplicableDeclarationsCache,\n style_sharing_candidate_cache: StyleSharingCandidateCache,\n}\n\nthread_local!(static LOCAL_CONTEXT_KEY: Cell<*mut LocalLayoutContext> = Cell::new(ptr::null_mut()));\n\nfn create_or_get_local_context(shared_layout_context: &SharedLayoutContext) -> *mut LocalLayoutContext {\n LOCAL_CONTEXT_KEY.with(|ref r| {\n if r.get().is_null() {\n let context = box LocalLayoutContext {\n font_context: FontContext::new(shared_layout_context.font_cache_task.clone()),\n applicable_declarations_cache: ApplicableDeclarationsCache::new(),\n style_sharing_candidate_cache: StyleSharingCandidateCache::new(),\n };\n r.set(unsafe { boxed::into_raw(context) });\n }\n\n r.get()\n })\n}\n\npub struct SharedLayoutContext {\n \/\/\/ The local image cache.\n pub image_cache: Arc<Mutex<LocalImageCache<UntrustedNodeAddress>>>,\n\n \/\/\/ The current screen size.\n pub screen_size: Size2D<Au>,\n\n \/\/\/ A channel up to the constellation.\n pub constellation_chan: ConstellationChan,\n\n \/\/\/ A channel up to the layout task.\n pub layout_chan: LayoutChan,\n\n \/\/\/ Interface to the font cache task.\n pub font_cache_task: FontCacheTask,\n\n \/\/\/ The CSS selector stylist.\n \/\/\/\n \/\/\/ FIXME(#2604): Make this no longer an unsafe pointer once we have fast `RWArc`s.\n pub stylist: *const Stylist,\n\n \/\/\/ The root node at which we're starting the layout.\n pub reflow_root: OpaqueNode,\n\n \/\/\/ The URL.\n pub url: Url,\n\n \/\/\/ The dirty rectangle, used during display list building.\n pub dirty: Rect<Au>,\n\n \/\/\/ Starts at zero, and increased by one every time a layout completes.\n \/\/\/ This can be used to easily check for invalid stale data.\n pub generation: uint,\n}\n\npub struct SharedLayoutContextWrapper(pub *const SharedLayoutContext);\nunsafe impl Send for SharedLayoutContextWrapper {}\n\npub struct LayoutContext<'a> {\n pub shared: &'a SharedLayoutContext,\n cached_local_layout_context: *mut LocalLayoutContext,\n}\n\nimpl<'a> LayoutContext<'a> {\n pub fn new(shared_layout_context: &'a SharedLayoutContext) -> LayoutContext<'a> {\n\n let local_context = create_or_get_local_context(shared_layout_context);\n\n LayoutContext {\n shared: shared_layout_context,\n cached_local_layout_context: local_context,\n }\n }\n\n #[inline(always)]\n pub fn font_context<'b>(&'b self) -> &'b mut FontContext {\n unsafe {\n let cached_context = &mut *self.cached_local_layout_context;\n &mut cached_context.font_context\n }\n }\n\n #[inline(always)]\n pub fn applicable_declarations_cache<'b>(&'b self) -> &'b mut ApplicableDeclarationsCache {\n unsafe {\n let cached_context = &mut *self.cached_local_layout_context;\n &mut cached_context.applicable_declarations_cache\n }\n }\n\n #[inline(always)]\n pub fn style_sharing_candidate_cache<'b>(&'b self) -> &'b mut StyleSharingCandidateCache {\n unsafe {\n let cached_context = &mut *self.cached_local_layout_context;\n &mut cached_context.style_sharing_candidate_cache\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: add test case for partial indent issue #482<commit_after>use handlebars::*;\nuse serde_json::json;\n\n#[test]\nfn test_partial_indent() {\n let outer = r#\" {{> inner inner_solo}}\n\n{{#each inners}}\n {{> inner}}\n{{\/each}}\n\n {{#each inners}}\n {{> inner}}\n {{\/each}}\n\"#;\n let inner = r#\"name: {{name}}\n\"#;\n\n let mut hbs = Handlebars::new();\n\n hbs.register_template_string(\"inner\", inner).unwrap();\n hbs.register_template_string(\"outer\", outer).unwrap();\n\n let result = hbs\n .render(\n \"outer\",\n &json!({\n \"inner_solo\": {\"name\": \"inner_solo\"},\n \"inners\": [\n {\"name\": \"hello\"},\n {\"name\": \"there\"}\n ]\n }),\n )\n .unwrap();\n\n assert_eq!(\n result,\n r#\" name: inner_solo\n\n name: hello\n name: there\n\n name: hello\n name: there\n\"#\n );\n}\n\/\/ Rule::partial_expression should not trim new lines by default\n<|endoftext|>"} {"text":"<commit_before>use quire::validate as V;\n\nuse build_step::{BuildStep, VersionError, StepError, Digest, Config, Guard};\n\n\n\/\/ Build Steps\n#[derive(Debug)]\npub struct Install(Vec<String>);\ntuple_struct_decode!(Install);\n\nimpl Install {\n pub fn config() -> V::Sequence<'static> {\n V::Sequence::new(V::Scalar::new())\n }\n}\n\n#[derive(Debug)]\npub struct BuildDeps(Vec<String>);\ntuple_struct_decode!(BuildDeps);\n\nimpl BuildDeps {\n pub fn config() -> V::Sequence<'static> {\n V::Sequence::new(V::Scalar::new())\n }\n}\n\nimpl BuildStep for Install {\n fn hash(&self, _cfg: &Config, hash: &mut Digest)\n -> Result<(), VersionError>\n {\n hash.sequence(\"Install\", &self.0);\n Ok(())\n }\n fn build(&self, guard: &mut Guard, build: bool)\n -> Result<(), StepError>\n {\n guard.ctx.packages.extend(self.0.clone().into_iter());\n for i in self.0.iter() {\n guard.ctx.build_deps.remove(i);\n }\n if build {\n try!(guard.distro.install(&mut guard.ctx, &self.0));\n }\n Ok(())\n }\n fn is_dependent_on(&self) -> Option<&str> {\n None\n }\n}\n\nimpl BuildStep for BuildDeps {\n fn hash(&self, _cfg: &Config, hash: &mut Digest)\n -> Result<(), VersionError>\n {\n hash.sequence(\"BuildDeps\", &self.0);\n Ok(())\n }\n fn build(&self, guard: &mut Guard, build: bool)\n -> Result<(), StepError>\n {\n for i in self.0.iter() {\n if !guard.ctx.packages.contains(i) {\n guard.ctx.build_deps.insert(i.clone());\n }\n }\n if build {\n try!(guard.distro.install(&mut guard.ctx, &self.0));\n }\n Ok(())\n }\n fn is_dependent_on(&self) -> Option<&str> {\n None\n }\n}\n<commit_msg>Fix edge cases of builddeps (#266)<commit_after>use quire::validate as V;\n\nuse build_step::{BuildStep, VersionError, StepError, Digest, Config, Guard};\n\n\n\/\/ Build Steps\n#[derive(Debug)]\npub struct Install(Vec<String>);\ntuple_struct_decode!(Install);\n\nimpl Install {\n pub fn config() -> V::Sequence<'static> {\n V::Sequence::new(V::Scalar::new())\n }\n}\n\n#[derive(Debug)]\npub struct BuildDeps(Vec<String>);\ntuple_struct_decode!(BuildDeps);\n\nimpl BuildDeps {\n pub fn config() -> V::Sequence<'static> {\n V::Sequence::new(V::Scalar::new())\n }\n}\n\nimpl BuildStep for Install {\n fn hash(&self, _cfg: &Config, hash: &mut Digest)\n -> Result<(), VersionError>\n {\n hash.sequence(\"Install\", &self.0);\n Ok(())\n }\n fn build(&self, guard: &mut Guard, build: bool)\n -> Result<(), StepError>\n {\n guard.ctx.packages.extend(self.0.clone().into_iter());\n for i in self.0.iter() {\n guard.ctx.build_deps.remove(i);\n }\n if build {\n try!(guard.distro.install(&mut guard.ctx, &self.0));\n }\n Ok(())\n }\n fn is_dependent_on(&self) -> Option<&str> {\n None\n }\n}\n\nimpl BuildStep for BuildDeps {\n fn hash(&self, _cfg: &Config, hash: &mut Digest)\n -> Result<(), VersionError>\n {\n hash.sequence(\"BuildDeps\", &self.0);\n Ok(())\n }\n fn build(&self, guard: &mut Guard, build: bool)\n -> Result<(), StepError>\n {\n if build {\n for i in self.0.iter() {\n if !guard.ctx.packages.contains(i) {\n guard.ctx.build_deps.insert(i.clone());\n }\n }\n try!(guard.distro.install(&mut guard.ctx, &self.0));\n }\n Ok(())\n }\n fn is_dependent_on(&self) -> Option<&str> {\n None\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>work with the test framework, not against it<commit_after><|endoftext|>"} {"text":"<commit_before>\n#![feature(globs)]\n\nextern crate graphics;\nextern crate piston;\nextern crate sdl2_game_window;\nextern crate opengl_graphics;\n\nuse opengl_graphics::{\n Gl,\n Texture,\n};\nuse sdl2_game_window::GameWindowSDL2;\nuse graphics::*;\nuse piston::{\n AssetStore,\n GameIterator,\n GameIteratorSettings,\n GameWindowSettings,\n Render,\n};\n\n#[start]\nfn start(argc: int, argv: **u8) -> int {\n \/\/ Run gui on the main thread.\n native::start(argc, argv, main)\n}\n\nfn main() {\n let mut window = GameWindowSDL2::new(\n GameWindowSettings {\n title: \"Image\".to_string(),\n size: [300, 300],\n fullscreen: false,\n exit_on_esc: true,\n }\n );\n\n let asset_store = AssetStore::from_folder(\"assets\");\n\n let image = asset_store.path(\"rust-logo.png\").unwrap();\n let image = Texture::from_path(&image).unwrap();\n let game_iter_settings = GameIteratorSettings {\n updates_per_second: 120,\n max_frames_per_second: 60,\n };\n let mut game_iter = GameIterator::new(&mut window, &game_iter_settings);\n let ref mut gl = Gl::new();\n loop {\n match game_iter.next() {\n None => { break },\n Some(e) => match e {\n Render(args) => {\n gl.viewport(0, 0, args.width as i32, args.height as i32);\n\n let c = Context::abs(args.width as f64, args.height as f64);\n c.rgb(1.0, 1.0, 1.0).draw(gl);\n c.image(&image).draw(gl);\n },\n _ => {}, \n },\n }\n }\n}\n\n\n<commit_msg>Removed 'image_iter.rs'<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Pass path to ui builder function when creating mock app<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add solution for Exercise 6.4<commit_after>\/\/\/ Exercise 6.4 Calculate the latest time that can be represented by the time_t data type.\n\/\/\/ After it wraps around, what happens?\n\nextern crate libc;\n#[macro_use(cstr)]\nextern crate apue;\n\nuse libc::{tm, time_t, c_char, size_t, printf};\nuse std::mem::uninitialized;\n\nuse apue::{CArray};\n\nextern \"C\" {\n fn localtime(time: *const time_t) -> *mut tm;\n fn strftime(s: *mut c_char,\n maxsize: size_t,\n format: *const c_char,\n timeptr: *const tm)\n -> size_t;\n}\n\nfn main() {\n unsafe {\n let buf: [c_char; 1024] = uninitialized();\n let mut t: time_t = 1;\n loop {\n println!(\"{:?}\", t - 1);\n strftime(buf.as_char(), 1024, cstr!(\"%a %b %d, %Y\"), localtime(&mut (t - 1)));\n printf(cstr!(\"%s\\n\"), buf.as_ptr());\n t *= 2;\n }\n }\n}\n\n\/\/ Answer: there's a Segmentation fault on strftime. This probably happens when the year\n\/\/ within tm is bigger than 2147483647 (max i32 value). There's no \"wrap around\". Either\n\/\/ the question is intentionally misleading or there are some systems where it wraps\n\/\/ around. On OSX or Linux it doesn't.<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Servo Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(append)]\n#![feature(exit_status)]\n#![feature(fs_walk)]\n#![feature(path_ext)]\n#![feature(slice_extras)]\n#![feature(slice_patterns)]\n#![feature(test)]\n\n#[macro_use] extern crate bitflags;\nextern crate png;\nextern crate test;\nextern crate url;\nextern crate util;\n\nuse std::env;\nuse std::ffi::OsStr;\nuse std::fs::{PathExt, File, walk_dir};\nuse std::io::{self, Read, Result};\nuse std::path::{Path, PathBuf};\nuse std::process::{Command, Stdio};\nuse test::{AutoColor, DynTestName, DynTestFn, TestDesc, TestOpts, TestDescAndFn, ShouldPanic};\nuse test::run_tests_console;\nuse url::Url;\n\nbitflags!(\n flags RenderMode: u32 {\n const CPU_RENDERING = 0x00000001,\n const GPU_RENDERING = 0x00000010,\n const LINUX_TARGET = 0x00000100,\n const MACOS_TARGET = 0x00001000,\n const ANDROID_TARGET = 0x00010000\n }\n);\n\n\nfn main() {\n let args: Vec<String> = env::args().collect();\n let mut parts = args.tail().split(|e| &**e == \"--\");\n\n let harness_args = parts.next().unwrap(); \/\/ .split() is never empty\n let servo_args = parts.next().unwrap_or(&[]);\n\n let (render_mode_string, base_path, testname) = match harness_args {\n [] | [_] => panic!(\"USAGE: cpu|gpu base_path [testname regex]\"),\n [ref render_mode_string, ref base_path] => (render_mode_string, base_path, None),\n [ref render_mode_string, ref base_path, ref testname, ..] =>\n (render_mode_string, base_path, Some(testname.clone())),\n };\n\n let mut render_mode = match &**render_mode_string {\n \"cpu\" => CPU_RENDERING,\n \"gpu\" => GPU_RENDERING,\n _ => panic!(\"First argument must specify cpu or gpu as rendering mode\")\n };\n if cfg!(target_os = \"linux\") {\n render_mode.insert(LINUX_TARGET);\n }\n if cfg!(target_os = \"macos\") {\n render_mode.insert(MACOS_TARGET);\n }\n if cfg!(target_os = \"android\") {\n render_mode.insert(ANDROID_TARGET);\n }\n\n let mut all_tests = vec!();\n println!(\"Scanning {} for manifests\\n\", base_path);\n\n for file in walk_dir(base_path).unwrap() {\n let file = file.unwrap().path();\n let maybe_extension = file.extension();\n match maybe_extension {\n Some(extension) => {\n if extension == OsStr::new(\"list\") && file.is_file() {\n let mut tests = parse_lists(&file, servo_args, render_mode, all_tests.len());\n println!(\"\\t{} [{} tests]\", file.display(), tests.len());\n all_tests.append(&mut tests);\n }\n }\n _ => {}\n }\n }\n\n let test_opts = TestOpts {\n filter: testname,\n run_ignored: false,\n logfile: None,\n run_tests: true,\n bench_benchmarks: false,\n nocapture: false,\n color: AutoColor,\n };\n\n match run(test_opts,\n all_tests,\n servo_args.iter().map(|x| x.clone()).collect()) {\n Ok(false) => env::set_exit_status(1), \/\/ tests failed\n Err(_) => env::set_exit_status(2), \/\/ I\/O-related failure\n _ => (),\n }\n}\n\nfn run(test_opts: TestOpts, all_tests: Vec<TestDescAndFn>,\n servo_args: Vec<String>) -> io::Result<bool> {\n \/\/ Verify that we're passing in valid servo arguments. Otherwise, servo\n \/\/ will exit before we've run any tests, and it will appear to us as if\n \/\/ all the tests are failing.\n let output = match Command::new(&servo_path()).args(&servo_args).output() {\n Ok(p) => p,\n Err(e) => panic!(\"failed to execute process: {}\", e),\n };\n let stderr = String::from_utf8(output.stderr).unwrap();\n\n if stderr.contains(\"Unrecognized\") {\n println!(\"Servo: {}\", stderr);\n return Ok(false);\n }\n\n run_tests_console(&test_opts, all_tests)\n}\n\n#[derive(PartialEq)]\nenum ReftestKind {\n Same,\n Different,\n}\n\nstruct Reftest {\n name: String,\n kind: ReftestKind,\n files: [PathBuf; 2],\n id: usize,\n servo_args: Vec<String>,\n render_mode: RenderMode,\n is_flaky: bool,\n experimental: bool,\n fragment_identifier: Option<String>,\n resolution: Option<String>,\n}\n\nstruct TestLine<'a> {\n conditions: &'a str,\n kind: &'a str,\n file_left: &'a str,\n file_right: &'a str,\n}\n\nfn parse_lists(file: &Path, servo_args: &[String], render_mode: RenderMode, id_offset: usize) -> Vec<TestDescAndFn> {\n let mut tests = Vec::new();\n let contents = {\n let mut f = File::open(file).unwrap();\n let mut contents = String::new();\n f.read_to_string(&mut contents).unwrap();\n contents\n };\n\n for line in contents.lines() {\n \/\/ ignore comments or empty lines\n if line.starts_with(\"#\") || line.is_empty() {\n continue;\n }\n\n let parts: Vec<&str> = line.split(' ').filter(|p| !p.is_empty()).collect();\n\n let test_line = match parts.len() {\n 3 => TestLine {\n conditions: \"\",\n kind: parts[0],\n file_left: parts[1],\n file_right: parts[2],\n },\n 4 => TestLine {\n conditions: parts[0],\n kind: parts[1],\n file_left: parts[2],\n file_right: parts[3],\n },\n _ => panic!(\"reftest line: '{}' doesn't match '[CONDITIONS] KIND LEFT RIGHT'\", line),\n };\n\n let kind = match test_line.kind {\n \"==\" => ReftestKind::Same,\n \"!=\" => ReftestKind::Different,\n part => panic!(\"reftest line: '{}' has invalid kind '{}'\", line, part)\n };\n\n let base = env::current_dir().unwrap().join(file.parent().unwrap());\n\n let file_left = base.join(test_line.file_left);\n let file_right = base.join(test_line.file_right);\n\n let conditions_list = test_line.conditions.split(',');\n let mut flakiness = RenderMode::empty();\n let mut experimental = false;\n let mut fragment_identifier = None;\n let mut resolution = None;\n for condition in conditions_list {\n match condition {\n \"flaky_cpu\" => flakiness.insert(CPU_RENDERING),\n \"flaky_gpu\" => flakiness.insert(GPU_RENDERING),\n \"flaky_linux\" => flakiness.insert(LINUX_TARGET),\n \"flaky_macos\" => flakiness.insert(MACOS_TARGET),\n \"experimental\" => experimental = true,\n _ => (),\n }\n if condition.starts_with(\"fragment=\") {\n fragment_identifier = Some(condition[\"fragment=\".len()..].to_string());\n }\n if condition.starts_with(\"resolution=\") {\n resolution = Some(condition[\"resolution=\".len() ..].to_string());\n }\n }\n\n let reftest = Reftest {\n name: format!(\"{} {} {}\", test_line.file_left, test_line.kind, test_line.file_right),\n kind: kind,\n files: [file_left, file_right],\n id: id_offset + tests.len(),\n render_mode: render_mode,\n servo_args: servo_args.to_vec(),\n is_flaky: render_mode.intersects(flakiness),\n experimental: experimental,\n fragment_identifier: fragment_identifier,\n resolution: resolution,\n };\n\n tests.push(make_test(reftest));\n }\n tests\n}\n\nfn make_test(reftest: Reftest) -> TestDescAndFn {\n let name = reftest.name.clone();\n TestDescAndFn {\n desc: TestDesc {\n name: DynTestName(name),\n ignore: false,\n should_panic: ShouldPanic::No,\n },\n testfn: DynTestFn(Box::new(move || {\n check_reftest(reftest);\n })),\n }\n}\n\nfn capture(reftest: &Reftest, side: usize) -> (u32, u32, Vec<u8>) {\n let png_filename = format!(\"\/tmp\/servo-reftest-{:06}-{}.png\", reftest.id, side);\n let mut command = Command::new(&servo_path());\n command\n .stdout(Stdio::null())\n .stderr(Stdio::null())\n .args(&reftest.servo_args[..])\n .arg(\"--user-stylesheet\").arg(util::resource_files::resources_dir_path().join(\"ahem.css\"))\n \/\/ Allows pixel perfect rendering of Ahem font and the HTML canvas for reftests.\n .arg(\"-Z\")\n .arg(\"disable-text-aa,disable-canvas-aa\")\n .args(&[\"-f\", \"-o\"])\n .arg(&png_filename)\n .arg(&{\n let mut url = Url::from_file_path(&*reftest.files[side]).unwrap();\n url.fragment = reftest.fragment_identifier.clone();\n url.to_string()\n });\n \/\/ CPU rendering is the default\n if reftest.render_mode.contains(CPU_RENDERING) {\n command.arg(\"-c\");\n }\n if reftest.render_mode.contains(GPU_RENDERING) {\n command.arg(\"-g\");\n }\n if reftest.experimental {\n command.arg(\"--experimental\");\n }\n if let Some(ref resolution) = reftest.resolution {\n command.arg(\"--resolution\");\n command.arg(resolution);\n }\n let retval = match command.status() {\n Ok(status) => status,\n Err(e) => panic!(\"failed to execute process: {}\", e),\n };\n assert!(retval.success());\n\n let image = png::load_png(&png_filename).unwrap();\n let rgba8_bytes = match image.pixels {\n png::PixelsByColorType::RGBA8(pixels) => pixels,\n _ => panic!(),\n };\n (image.width, image.height, rgba8_bytes)\n}\n\nfn servo_path() -> PathBuf {\n let current_exe = env::current_exe().ok().expect(\"Could not locate current executable\");\n current_exe.parent().unwrap().join(\"servo\")\n}\n\nfn check_reftest(reftest: Reftest) {\n let (left_width, left_height, left_bytes) = capture(&reftest, 0);\n let (right_width, right_height, right_bytes) = capture(&reftest, 1);\n\n assert_eq!(left_width, right_width);\n assert_eq!(left_height, right_height);\n\n let left_all_white = left_bytes.iter().all(|&p| p == 255);\n let right_all_white = right_bytes.iter().all(|&p| p == 255);\n\n if left_all_white && right_all_white {\n panic!(\"Both renderings are empty\")\n }\n\n let pixels = left_bytes.iter().zip(right_bytes.iter()).map(|(&a, &b)| {\n if a == b {\n \/\/ White for correct\n 0xFF\n } else {\n \/\/ \"1100\" in the RGBA channel with an error for an incorrect value\n \/\/ This results in some number of C0 and FFs, which is much more\n \/\/ readable (and distinguishable) than the previous difference-wise\n \/\/ scaling but does not require reconstructing the actual RGBA pixel.\n 0xC0\n }\n }).collect::<Vec<u8>>();\n\n if pixels.iter().any(|&a| a < 255) {\n let output = format!(\"\/tmp\/servo-reftest-{:06}-diff.png\", reftest.id);\n\n let mut img = png::Image {\n width: left_width,\n height: left_height,\n pixels: png::PixelsByColorType::RGBA8(pixels),\n };\n let res = png::store_png(&mut img, &output);\n assert!(res.is_ok());\n\n match (reftest.kind, reftest.is_flaky) {\n (ReftestKind::Same, true) => println!(\"flaky test - rendering difference: {}\", output),\n (ReftestKind::Same, false) => panic!(\"rendering difference: {}\", output),\n (ReftestKind::Different, _) => {} \/\/ Result was different and that's what was expected\n }\n } else {\n assert!(reftest.is_flaky || reftest.kind == ReftestKind::Same);\n }\n}\n<commit_msg>Stop using [T]::tail.<commit_after>\/\/ Copyright 2013 The Servo Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(append)]\n#![feature(exit_status)]\n#![feature(fs_walk)]\n#![feature(path_ext)]\n#![feature(slice_patterns)]\n#![feature(test)]\n\n#[macro_use] extern crate bitflags;\nextern crate png;\nextern crate test;\nextern crate url;\nextern crate util;\n\nuse std::env;\nuse std::ffi::OsStr;\nuse std::fs::{PathExt, File, walk_dir};\nuse std::io::{self, Read, Result};\nuse std::path::{Path, PathBuf};\nuse std::process::{Command, Stdio};\nuse test::{AutoColor, DynTestName, DynTestFn, TestDesc, TestOpts, TestDescAndFn, ShouldPanic};\nuse test::run_tests_console;\nuse url::Url;\n\nbitflags!(\n flags RenderMode: u32 {\n const CPU_RENDERING = 0x00000001,\n const GPU_RENDERING = 0x00000010,\n const LINUX_TARGET = 0x00000100,\n const MACOS_TARGET = 0x00001000,\n const ANDROID_TARGET = 0x00010000\n }\n);\n\n\nfn main() {\n let args: Vec<String> = env::args().collect();\n let mut parts = args[1..].split(|e| &**e == \"--\");\n\n let harness_args = parts.next().unwrap(); \/\/ .split() is never empty\n let servo_args = parts.next().unwrap_or(&[]);\n\n let (render_mode_string, base_path, testname) = match harness_args {\n [] | [_] => panic!(\"USAGE: cpu|gpu base_path [testname regex]\"),\n [ref render_mode_string, ref base_path] => (render_mode_string, base_path, None),\n [ref render_mode_string, ref base_path, ref testname, ..] =>\n (render_mode_string, base_path, Some(testname.clone())),\n };\n\n let mut render_mode = match &**render_mode_string {\n \"cpu\" => CPU_RENDERING,\n \"gpu\" => GPU_RENDERING,\n _ => panic!(\"First argument must specify cpu or gpu as rendering mode\")\n };\n if cfg!(target_os = \"linux\") {\n render_mode.insert(LINUX_TARGET);\n }\n if cfg!(target_os = \"macos\") {\n render_mode.insert(MACOS_TARGET);\n }\n if cfg!(target_os = \"android\") {\n render_mode.insert(ANDROID_TARGET);\n }\n\n let mut all_tests = vec!();\n println!(\"Scanning {} for manifests\\n\", base_path);\n\n for file in walk_dir(base_path).unwrap() {\n let file = file.unwrap().path();\n let maybe_extension = file.extension();\n match maybe_extension {\n Some(extension) => {\n if extension == OsStr::new(\"list\") && file.is_file() {\n let mut tests = parse_lists(&file, servo_args, render_mode, all_tests.len());\n println!(\"\\t{} [{} tests]\", file.display(), tests.len());\n all_tests.append(&mut tests);\n }\n }\n _ => {}\n }\n }\n\n let test_opts = TestOpts {\n filter: testname,\n run_ignored: false,\n logfile: None,\n run_tests: true,\n bench_benchmarks: false,\n nocapture: false,\n color: AutoColor,\n };\n\n match run(test_opts,\n all_tests,\n servo_args.iter().map(|x| x.clone()).collect()) {\n Ok(false) => env::set_exit_status(1), \/\/ tests failed\n Err(_) => env::set_exit_status(2), \/\/ I\/O-related failure\n _ => (),\n }\n}\n\nfn run(test_opts: TestOpts, all_tests: Vec<TestDescAndFn>,\n servo_args: Vec<String>) -> io::Result<bool> {\n \/\/ Verify that we're passing in valid servo arguments. Otherwise, servo\n \/\/ will exit before we've run any tests, and it will appear to us as if\n \/\/ all the tests are failing.\n let output = match Command::new(&servo_path()).args(&servo_args).output() {\n Ok(p) => p,\n Err(e) => panic!(\"failed to execute process: {}\", e),\n };\n let stderr = String::from_utf8(output.stderr).unwrap();\n\n if stderr.contains(\"Unrecognized\") {\n println!(\"Servo: {}\", stderr);\n return Ok(false);\n }\n\n run_tests_console(&test_opts, all_tests)\n}\n\n#[derive(PartialEq)]\nenum ReftestKind {\n Same,\n Different,\n}\n\nstruct Reftest {\n name: String,\n kind: ReftestKind,\n files: [PathBuf; 2],\n id: usize,\n servo_args: Vec<String>,\n render_mode: RenderMode,\n is_flaky: bool,\n experimental: bool,\n fragment_identifier: Option<String>,\n resolution: Option<String>,\n}\n\nstruct TestLine<'a> {\n conditions: &'a str,\n kind: &'a str,\n file_left: &'a str,\n file_right: &'a str,\n}\n\nfn parse_lists(file: &Path, servo_args: &[String], render_mode: RenderMode, id_offset: usize) -> Vec<TestDescAndFn> {\n let mut tests = Vec::new();\n let contents = {\n let mut f = File::open(file).unwrap();\n let mut contents = String::new();\n f.read_to_string(&mut contents).unwrap();\n contents\n };\n\n for line in contents.lines() {\n \/\/ ignore comments or empty lines\n if line.starts_with(\"#\") || line.is_empty() {\n continue;\n }\n\n let parts: Vec<&str> = line.split(' ').filter(|p| !p.is_empty()).collect();\n\n let test_line = match parts.len() {\n 3 => TestLine {\n conditions: \"\",\n kind: parts[0],\n file_left: parts[1],\n file_right: parts[2],\n },\n 4 => TestLine {\n conditions: parts[0],\n kind: parts[1],\n file_left: parts[2],\n file_right: parts[3],\n },\n _ => panic!(\"reftest line: '{}' doesn't match '[CONDITIONS] KIND LEFT RIGHT'\", line),\n };\n\n let kind = match test_line.kind {\n \"==\" => ReftestKind::Same,\n \"!=\" => ReftestKind::Different,\n part => panic!(\"reftest line: '{}' has invalid kind '{}'\", line, part)\n };\n\n let base = env::current_dir().unwrap().join(file.parent().unwrap());\n\n let file_left = base.join(test_line.file_left);\n let file_right = base.join(test_line.file_right);\n\n let conditions_list = test_line.conditions.split(',');\n let mut flakiness = RenderMode::empty();\n let mut experimental = false;\n let mut fragment_identifier = None;\n let mut resolution = None;\n for condition in conditions_list {\n match condition {\n \"flaky_cpu\" => flakiness.insert(CPU_RENDERING),\n \"flaky_gpu\" => flakiness.insert(GPU_RENDERING),\n \"flaky_linux\" => flakiness.insert(LINUX_TARGET),\n \"flaky_macos\" => flakiness.insert(MACOS_TARGET),\n \"experimental\" => experimental = true,\n _ => (),\n }\n if condition.starts_with(\"fragment=\") {\n fragment_identifier = Some(condition[\"fragment=\".len()..].to_string());\n }\n if condition.starts_with(\"resolution=\") {\n resolution = Some(condition[\"resolution=\".len() ..].to_string());\n }\n }\n\n let reftest = Reftest {\n name: format!(\"{} {} {}\", test_line.file_left, test_line.kind, test_line.file_right),\n kind: kind,\n files: [file_left, file_right],\n id: id_offset + tests.len(),\n render_mode: render_mode,\n servo_args: servo_args.to_vec(),\n is_flaky: render_mode.intersects(flakiness),\n experimental: experimental,\n fragment_identifier: fragment_identifier,\n resolution: resolution,\n };\n\n tests.push(make_test(reftest));\n }\n tests\n}\n\nfn make_test(reftest: Reftest) -> TestDescAndFn {\n let name = reftest.name.clone();\n TestDescAndFn {\n desc: TestDesc {\n name: DynTestName(name),\n ignore: false,\n should_panic: ShouldPanic::No,\n },\n testfn: DynTestFn(Box::new(move || {\n check_reftest(reftest);\n })),\n }\n}\n\nfn capture(reftest: &Reftest, side: usize) -> (u32, u32, Vec<u8>) {\n let png_filename = format!(\"\/tmp\/servo-reftest-{:06}-{}.png\", reftest.id, side);\n let mut command = Command::new(&servo_path());\n command\n .stdout(Stdio::null())\n .stderr(Stdio::null())\n .args(&reftest.servo_args[..])\n .arg(\"--user-stylesheet\").arg(util::resource_files::resources_dir_path().join(\"ahem.css\"))\n \/\/ Allows pixel perfect rendering of Ahem font and the HTML canvas for reftests.\n .arg(\"-Z\")\n .arg(\"disable-text-aa,disable-canvas-aa\")\n .args(&[\"-f\", \"-o\"])\n .arg(&png_filename)\n .arg(&{\n let mut url = Url::from_file_path(&*reftest.files[side]).unwrap();\n url.fragment = reftest.fragment_identifier.clone();\n url.to_string()\n });\n \/\/ CPU rendering is the default\n if reftest.render_mode.contains(CPU_RENDERING) {\n command.arg(\"-c\");\n }\n if reftest.render_mode.contains(GPU_RENDERING) {\n command.arg(\"-g\");\n }\n if reftest.experimental {\n command.arg(\"--experimental\");\n }\n if let Some(ref resolution) = reftest.resolution {\n command.arg(\"--resolution\");\n command.arg(resolution);\n }\n let retval = match command.status() {\n Ok(status) => status,\n Err(e) => panic!(\"failed to execute process: {}\", e),\n };\n assert!(retval.success());\n\n let image = png::load_png(&png_filename).unwrap();\n let rgba8_bytes = match image.pixels {\n png::PixelsByColorType::RGBA8(pixels) => pixels,\n _ => panic!(),\n };\n (image.width, image.height, rgba8_bytes)\n}\n\nfn servo_path() -> PathBuf {\n let current_exe = env::current_exe().ok().expect(\"Could not locate current executable\");\n current_exe.parent().unwrap().join(\"servo\")\n}\n\nfn check_reftest(reftest: Reftest) {\n let (left_width, left_height, left_bytes) = capture(&reftest, 0);\n let (right_width, right_height, right_bytes) = capture(&reftest, 1);\n\n assert_eq!(left_width, right_width);\n assert_eq!(left_height, right_height);\n\n let left_all_white = left_bytes.iter().all(|&p| p == 255);\n let right_all_white = right_bytes.iter().all(|&p| p == 255);\n\n if left_all_white && right_all_white {\n panic!(\"Both renderings are empty\")\n }\n\n let pixels = left_bytes.iter().zip(right_bytes.iter()).map(|(&a, &b)| {\n if a == b {\n \/\/ White for correct\n 0xFF\n } else {\n \/\/ \"1100\" in the RGBA channel with an error for an incorrect value\n \/\/ This results in some number of C0 and FFs, which is much more\n \/\/ readable (and distinguishable) than the previous difference-wise\n \/\/ scaling but does not require reconstructing the actual RGBA pixel.\n 0xC0\n }\n }).collect::<Vec<u8>>();\n\n if pixels.iter().any(|&a| a < 255) {\n let output = format!(\"\/tmp\/servo-reftest-{:06}-diff.png\", reftest.id);\n\n let mut img = png::Image {\n width: left_width,\n height: left_height,\n pixels: png::PixelsByColorType::RGBA8(pixels),\n };\n let res = png::store_png(&mut img, &output);\n assert!(res.is_ok());\n\n match (reftest.kind, reftest.is_flaky) {\n (ReftestKind::Same, true) => println!(\"flaky test - rendering difference: {}\", output),\n (ReftestKind::Same, false) => panic!(\"rendering difference: {}\", output),\n (ReftestKind::Different, _) => {} \/\/ Result was different and that's what was expected\n }\n } else {\n assert!(reftest.is_flaky || reftest.kind == ReftestKind::Same);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Replace most of the unwrap() instances in shaderglsl.rs with match statements<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>ugh rust<commit_after><|endoftext|>"} {"text":"<commit_before>pub use alloc::arc::{Arc, Weak};\npub use core::sync::atomic;\npub use self::mutex::{Mutex, MutexGuard, StaticMutex};\npub use self::rwlock::{RwLock, RwLockReadGuard, RwLockWriteGuard};\npub use self::once::Once;\n\nmod mpsc;\nmod mutex;\nmod once;\nmod rwlock;\n<commit_msg>Public mpsc<commit_after>pub use alloc::arc::{Arc, Weak};\npub use core::sync::atomic;\npub use self::mutex::{Mutex, MutexGuard, StaticMutex};\npub use self::rwlock::{RwLock, RwLockReadGuard, RwLockWriteGuard};\npub use self::once::Once;\n\npub mod mpsc;\nmod mutex;\nmod once;\nmod rwlock;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example of custom potential usage<commit_after>\/\/! Using a custom potential in simulations\nextern crate cymbalum;\nuse cymbalum::*;\n\n\/\/\/ Let's define a new version of the LennardJones potential, using the\n\/\/\/ alternative form:\n\/\/\/\n\/\/\/ A B\n\/\/\/ V = ----- - -----\n\/\/\/ r^12 r^6\n\/\/\/\nstruct LJ {\n a: f64,\n b: f64\n}\n\n\/\/\/ All we need to do is to implement the PotentialFunction trait\nimpl PotentialFunction for LJ {\n \/\/\/ The energy function give the energy at distance `r`\n fn energy(&self, r: f64) -> f64 {\n self.a \/ r.powi(12) - self.b \/ r.powi(6)\n }\n\n \/\/\/ The force function give the norm of the force at distance `r`\n fn force(&self, r: f64) -> f64 {\n 12.0 * self.a \/ r.powi(13) - 6.0 * self.b \/ r.powi(7)\n }\n}\n\n\/\/ We want to use our LJ potential as a pair potential.\nimpl PairPotential for LJ {}\n\nfn main() {\n Logger::stdout();\n let mut universe = Universe::new();\n\n universe.add_particle(Particle::new(\"F\"));\n universe[0].position = Vector3D::new(0.0, 0.0, 0.0);\n universe.add_particle(Particle::new(\"F\"));\n universe[1].position = Vector3D::new(1.5, 0.0, 0.0);\n\n \/\/ We can now use our new potential in the universe\n universe.add_pair_interaction(\"F\", \"F\",\n Box::new(LJ{\n a: units::from(675.5, \"kJ\/mol\/A^12\").unwrap(),\n b: units::from(40.26, \"kJ\/mol\/A^6\").unwrap()\n }\n ));\n\n let mut simulation = Simulation::new(MolecularDynamics::new(units::from(1.0, \"fs\").unwrap()));\n simulation.run(&mut universe, 1000);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for intersect_with<commit_after>extern crate roaring;\nuse roaring::RoaringBitmap;\n\n#[test]\nfn array() {\n let mut bitmap1: RoaringBitmap = FromIterator::from_iter(0..2000);\n let bitmap2: RoaringBitmap = FromIterator::from_iter(1000..3000);\n let bitmap3: RoaringBitmap = FromIterator::from_iter(1000..2000);\n\n bitmap1.intersect_with(&bitmap2);\n\n assert_eq!(bitmap1, bitmap3);\n}\n\n#[test]\nfn array_and_bitmap() {\n let mut bitmap1: RoaringBitmap = FromIterator::from_iter(0..2000);\n let bitmap2: RoaringBitmap = FromIterator::from_iter(1000..8000);\n let bitmap3: RoaringBitmap = FromIterator::from_iter(1000..2000);\n\n bitmap1.intersect_with(&bitmap2);\n\n assert_eq!(bitmap1, bitmap3);\n}\n\n#[test]\nfn bitmap_to_bitmap() {\n let mut bitmap1: RoaringBitmap = FromIterator::from_iter(0..12000);\n let bitmap2: RoaringBitmap = FromIterator::from_iter(6000..18000);\n let bitmap3: RoaringBitmap = FromIterator::from_iter(6000..12000);\n\n bitmap1.intersect_with(&bitmap2);\n\n assert_eq!(bitmap1, bitmap3);\n}\n\n#[test]\nfn bitmap_to_array() {\n let mut bitmap1: RoaringBitmap = FromIterator::from_iter(0..6000);\n let bitmap2: RoaringBitmap = FromIterator::from_iter(3000..9000);\n let bitmap3: RoaringBitmap = FromIterator::from_iter(3000..6000);\n\n bitmap1.intersect_with(&bitmap2);\n\n assert_eq!(bitmap1, bitmap3);\n}\n\n#[test]\nfn bitmap_and_array() {\n let mut bitmap1: RoaringBitmap = FromIterator::from_iter(0..12000);\n let bitmap2: RoaringBitmap = FromIterator::from_iter(7000..9000);\n let bitmap3: RoaringBitmap = FromIterator::from_iter(7000..9000);\n\n bitmap1.intersect_with(&bitmap2);\n\n assert_eq!(bitmap1, bitmap3);\n}\n\n#[test]\nfn arrays() {\n let mut bitmap1: RoaringBitmap = FromIterator::from_iter((0..2000).chain(1000000..1002000).chain(3000000..3001000));\n let bitmap2: RoaringBitmap = FromIterator::from_iter((1000..3000).chain(1001000..1003000).chain(2000000..2001000));\n let bitmap3: RoaringBitmap = FromIterator::from_iter((1000..2000).chain(1001000..1002000));\n\n bitmap1.intersect_with(&bitmap2);\n\n assert_eq!(bitmap1, bitmap3);\n}\n\n#[test]\nfn bitmaps() {\n let mut bitmap1: RoaringBitmap = FromIterator::from_iter((0..6000).chain(1000000..1012000).chain(3000000..3010000));\n let bitmap2: RoaringBitmap = FromIterator::from_iter((3000..9000).chain(1006000..1018000).chain(2000000..2010000));\n let bitmap3: RoaringBitmap = FromIterator::from_iter((3000..6000).chain(1006000..1012000));\n\n bitmap1.intersect_with(&bitmap2);\n\n assert_eq!(bitmap1, bitmap3);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create a hash benchmark<commit_after>\/\/ Benchmark our hashing function.\n\n#![feature(test)]\n\nextern crate rsure;\nextern crate tempdir;\nextern crate test;\n\nuse rsure::{Progress, SureHash};\nuse tempdir::TempDir;\nuse std::fs::File;\nuse std::io::Write;\nuse test::Bencher;\n\n\/\/ To compute hashing speed, use 1 over the benchmark time in seconds. For\n\/\/ example, if the benchmark runs in 1,863,225 ns\/iter, that would be about\n\/\/ 536 MiB\/sec hash performance.\n#[bench]\nfn tree_mb_bench(b: &mut Bencher) {\n let tmp = TempDir::new(\"rsure-bench\").unwrap();\n {\n let mut fd = File::create(tmp.path().join(\"large\")).unwrap();\n let buf = vec![0; 1024];\n for _ in 0 .. 1024 {\n fd.write_all(&buf).unwrap();\n }\n }\n\n b.iter(|| {\n let mut tree = rsure::scan_fs(tmp.path()).unwrap();\n let estimate = tree.hash_estimate();\n let mut progress = Progress::new(estimate.files, estimate.bytes);\n tree.hash_update(tmp.path(), &mut progress);\n \/\/ progress.flush();\n })\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate crossbeam;\nextern crate clap;\n#[macro_use] extern crate version;\n#[macro_use] extern crate log;\nextern crate walkdir;\n\nextern crate libimagrt;\nextern crate libimagerror;\n\nuse std::env;\nuse std::process::exit;\nuse std::process::Command;\nuse std::process::Stdio;\nuse std::io::ErrorKind;\n\nuse walkdir::WalkDir;\nuse crossbeam::*;\nuse clap::{Arg, AppSettings, SubCommand};\n\nuse libimagrt::runtime::Runtime;\nuse libimagerror::trace::trace_error;\n\n\/\/\/ Returns the helptext, putting the Strings in cmds as possible\n\/\/\/ subcommands into it\nfn help_text(cmds: Vec<String>) -> String {\n let text = format!(r#\"\n\n _\n (_)_ __ ___ __ _ __ _\n | | '_ \\` _ \\\/ _\\`|\/ _\\`|\n | | | | | | | (_| | (_| |\n |_|_| |_| |_|\\__,_|\\__, |\n |___\/\n -------------------------\n\n Usage: imag [--version | --versions | -h | --help] <command> <args...>\n\n imag - the personal information management suite for the commandline\n\n imag is a PIM suite for the commandline. It consists of several commands,\n called \"modules\". Each module implements one PIM aspect and all of these\n modules can be used independently.\n\n Available commands:\n\n {imagbins}\n\n Call a command with 'imag <command> <args>'\n Each command can be called with \"--help\" to get the respective helptext.\n\n Please visit https:\/\/github.com\/matthiasbeyer\/imag to view the source code,\n follow the development of imag or maybe even contribute to imag.\n\n imag is free software. It is released under the terms of LGPLv2.1\n\n (c) 2016 Matthias Beyer and contributors\"#, imagbins = cmds.into_iter()\n .map(|cmd| format!(\"\\t{}\\n\", cmd))\n .fold(String::new(), |s, c| {\n let s = s + c.as_str();\n s\n }));\n text\n}\n\n\/\/\/ Returns the list of imag-* executables found in $PATH\nfn get_commands() -> Vec<String> {\n let path = env::var(\"PATH\");\n if path.is_err() {\n println!(\"PATH error: {:?}\", path);\n exit(1);\n }\n let pathelements = path.unwrap();\n let pathelements = pathelements.split(\":\");\n\n let joinhandles : Vec<ScopedJoinHandle<Vec<String>>> = pathelements\n .map(|elem| {\n crossbeam::scope(|scope| {\n scope.spawn(|| {\n WalkDir::new(elem)\n .max_depth(1)\n .into_iter()\n .filter(|path| {\n match path {\n &Ok(ref p) => p.file_name()\n .to_str()\n .map_or(false, |filename| filename.starts_with(\"imag-\")),\n &Err(_) => false,\n }\n })\n .filter_map(|x| x.ok())\n .filter_map(|path| {\n path.file_name()\n .to_str()\n .and_then(|s| s.splitn(2, \"-\").nth(1).map(String::from))\n })\n .collect()\n })\n })\n })\n .collect();\n\n let mut execs = vec![];\n for joinhandle in joinhandles.into_iter() {\n let mut v = joinhandle.join();\n execs.append(&mut v);\n }\n\n execs\n}\n\n\nfn main() {\n \/\/ Initialize the Runtime and build the CLI\n let appname = \"imag\";\n let version = &version!();\n let about = \"imag - the PIM suite for the commandline\";\n let commands = get_commands();\n let helptext = help_text(commands.clone());\n let app = Runtime::get_default_cli_builder(appname, version, about)\n .settings(&[AppSettings::AllowExternalSubcommands, AppSettings::ArgRequiredElseHelp])\n .arg(Arg::with_name(\"version\")\n .long(\"version\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the version of imag\"))\n .arg(Arg::with_name(\"versions\")\n .long(\"versions\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the versions of the imag commands\"))\n .subcommand(SubCommand::with_name(\"help\").help(\"Show help\"))\n .help(helptext.as_str());\n let rt = Runtime::new(app)\n .unwrap_or_else(|e| {\n println!(\"Runtime couldn't be setup. Exiting\");\n trace_error(&e);\n exit(1);\n });\n let matches = rt.cli();\n\n debug!(\"matches: {:?}\", matches);\n\n \/\/ Begin checking for arguments\n\n if matches.is_present(\"version\") {\n debug!(\"Showing version\");\n println!(\"imag {}\", &version!()[..]);\n exit(0);\n }\n\n if matches.is_present(\"versions\") {\n debug!(\"Showing versions\");\n let mut result = vec![];\n for command in get_commands().iter() {\n result.push(crossbeam::scope(|scope| {\n scope.spawn(|| {\n let v = Command::new(format!(\"imag-{}\",command)).arg(\"--version\").output();\n match v {\n Ok(v) => match String::from_utf8(v.stdout) {\n Ok(s) => format!(\"{} -> {}\", command, s),\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n },\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n }\n })\n }))\n }\n\n for versionstring in result.into_iter().map(|handle| handle.join()) {\n println!(\"{}\", versionstring);\n }\n }\n\n \/\/ Matches any subcommand given\n match matches.subcommand() {\n (subcommand, Some(scmd)) => {\n \/\/ Get all given arguments and further subcommands to pass to\n \/\/ the imag-<> binary\n \/\/ Providing no arguments is OK, and is therefore ignored here\n let subcommand_args : Vec<&str> = match scmd.values_of(\"\") {\n Some(values) => values.collect(),\n None => Vec::new()\n };\n \n \/\/ Typos happen, so check if the given subcommand is one found in $PATH\n if !commands.clone().contains(&String::from(subcommand)) {\n println!(\"No such command: 'imag-{}'\", subcommand);\n println!(\"See 'imag --help' for available subcommands\");\n exit(2);\n }\n \n debug!(\"Calling 'imag-{}' with args: {:?}\", subcommand, subcommand_args);\n\n \/\/ Create a Command, and pass it the gathered arguments\n match Command::new(format!(\"imag-{}\", subcommand))\n .stdin(Stdio::inherit())\n .stdout(Stdio::inherit())\n .stderr(Stdio::inherit())\n .args(&subcommand_args[..])\n .spawn()\n .and_then(|mut handle| handle.wait())\n {\n Ok(exit_status) => {\n if !exit_status.success() {\n debug!(\"{} exited with non-zero exit code: {:?}\", subcommand, exit_status);\n println!(\"{} exited with non-zero exit code\", subcommand);\n exit(exit_status.code().unwrap_or(1));\n }\n debug!(\"Successful exit!\");\n },\n\n Err(e) => {\n debug!(\"Error calling the subcommand\");\n match e.kind() {\n ErrorKind::NotFound => {\n \/\/ With the check above, this absolutely should not happen.\n \/\/ Keeping it to be safe\n println!(\"No such command: 'imag-{}'\", subcommand);\n println!(\"See 'imag --help' for available subcommands\");\n exit(2);\n },\n ErrorKind::PermissionDenied => {\n println!(\"No permission to execute: 'imag-{}'\", subcommand);\n exit(1);\n },\n _ => {\n println!(\"Error spawning: {:?}\", e);\n exit(1);\n }\n }\n }\n }\n },\n \/\/ Calling for example 'imag --versions' will lead here, as this option does not exit.\n \/\/ There's nothing to do in such a case\n _ => {},\n }\n}\n<commit_msg>Pretty output of --versions<commit_after>extern crate crossbeam;\nextern crate clap;\n#[macro_use] extern crate version;\n#[macro_use] extern crate log;\nextern crate walkdir;\n\nextern crate libimagrt;\nextern crate libimagerror;\n\nuse std::env;\nuse std::process::exit;\nuse std::process::Command;\nuse std::process::Stdio;\nuse std::io::ErrorKind;\n\nuse walkdir::WalkDir;\nuse crossbeam::*;\nuse clap::{Arg, AppSettings, SubCommand};\n\nuse libimagrt::runtime::Runtime;\nuse libimagerror::trace::trace_error;\n\n\/\/\/ Returns the helptext, putting the Strings in cmds as possible\n\/\/\/ subcommands into it\nfn help_text(cmds: Vec<String>) -> String {\n let text = format!(r#\"\n\n _\n (_)_ __ ___ __ _ __ _\n | | '_ \\` _ \\\/ _\\`|\/ _\\`|\n | | | | | | | (_| | (_| |\n |_|_| |_| |_|\\__,_|\\__, |\n |___\/\n -------------------------\n\n Usage: imag [--version | --versions | -h | --help] <command> <args...>\n\n imag - the personal information management suite for the commandline\n\n imag is a PIM suite for the commandline. It consists of several commands,\n called \"modules\". Each module implements one PIM aspect and all of these\n modules can be used independently.\n\n Available commands:\n\n {imagbins}\n\n Call a command with 'imag <command> <args>'\n Each command can be called with \"--help\" to get the respective helptext.\n\n Please visit https:\/\/github.com\/matthiasbeyer\/imag to view the source code,\n follow the development of imag or maybe even contribute to imag.\n\n imag is free software. It is released under the terms of LGPLv2.1\n\n (c) 2016 Matthias Beyer and contributors\"#, imagbins = cmds.into_iter()\n .map(|cmd| format!(\"\\t{}\\n\", cmd))\n .fold(String::new(), |s, c| {\n let s = s + c.as_str();\n s\n }));\n text\n}\n\n\/\/\/ Returns the list of imag-* executables found in $PATH\nfn get_commands() -> Vec<String> {\n let path = env::var(\"PATH\");\n if path.is_err() {\n println!(\"PATH error: {:?}\", path);\n exit(1);\n }\n let pathelements = path.unwrap();\n let pathelements = pathelements.split(\":\");\n\n let joinhandles : Vec<ScopedJoinHandle<Vec<String>>> = pathelements\n .map(|elem| {\n crossbeam::scope(|scope| {\n scope.spawn(|| {\n WalkDir::new(elem)\n .max_depth(1)\n .into_iter()\n .filter(|path| {\n match path {\n &Ok(ref p) => p.file_name()\n .to_str()\n .map_or(false, |filename| filename.starts_with(\"imag-\")),\n &Err(_) => false,\n }\n })\n .filter_map(|x| x.ok())\n .filter_map(|path| {\n path.file_name()\n .to_str()\n .and_then(|s| s.splitn(2, \"-\").nth(1).map(String::from))\n })\n .collect()\n })\n })\n })\n .collect();\n\n let mut execs = vec![];\n for joinhandle in joinhandles.into_iter() {\n let mut v = joinhandle.join();\n execs.append(&mut v);\n }\n\n execs\n}\n\n\nfn main() {\n \/\/ Initialize the Runtime and build the CLI\n let appname = \"imag\";\n let version = &version!();\n let about = \"imag - the PIM suite for the commandline\";\n let commands = get_commands();\n let helptext = help_text(commands.clone());\n let app = Runtime::get_default_cli_builder(appname, version, about)\n .settings(&[AppSettings::AllowExternalSubcommands, AppSettings::ArgRequiredElseHelp])\n .arg(Arg::with_name(\"version\")\n .long(\"version\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the version of imag\"))\n .arg(Arg::with_name(\"versions\")\n .long(\"versions\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the versions of the imag commands\"))\n .subcommand(SubCommand::with_name(\"help\").help(\"Show help\"))\n .help(helptext.as_str());\n let rt = Runtime::new(app)\n .unwrap_or_else(|e| {\n println!(\"Runtime couldn't be setup. Exiting\");\n trace_error(&e);\n exit(1);\n });\n let matches = rt.cli();\n\n debug!(\"matches: {:?}\", matches);\n\n \/\/ Begin checking for arguments\n\n if matches.is_present(\"version\") {\n debug!(\"Showing version\");\n println!(\"imag {}\", &version!()[..]);\n exit(0);\n }\n\n if matches.is_present(\"versions\") {\n debug!(\"Showing versions\");\n let mut result = vec![];\n for command in get_commands().iter() {\n result.push(crossbeam::scope(|scope| {\n scope.spawn(|| {\n let v = Command::new(format!(\"imag-{}\",command)).arg(\"--version\").output();\n match v {\n Ok(v) => match String::from_utf8(v.stdout) {\n Ok(s) => format!(\"{:10} -> {}\", command, s),\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n },\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n }\n })\n }))\n }\n\n for versionstring in result.into_iter().map(|handle| handle.join()) {\n print!(\"{}\", versionstring);\n }\n }\n\n \/\/ Matches any subcommand given\n match matches.subcommand() {\n (subcommand, Some(scmd)) => {\n \/\/ Get all given arguments and further subcommands to pass to\n \/\/ the imag-<> binary\n \/\/ Providing no arguments is OK, and is therefore ignored here\n let subcommand_args : Vec<&str> = match scmd.values_of(\"\") {\n Some(values) => values.collect(),\n None => Vec::new()\n };\n \n \/\/ Typos happen, so check if the given subcommand is one found in $PATH\n if !commands.clone().contains(&String::from(subcommand)) {\n println!(\"No such command: 'imag-{}'\", subcommand);\n println!(\"See 'imag --help' for available subcommands\");\n exit(2);\n }\n \n debug!(\"Calling 'imag-{}' with args: {:?}\", subcommand, subcommand_args);\n\n \/\/ Create a Command, and pass it the gathered arguments\n match Command::new(format!(\"imag-{}\", subcommand))\n .stdin(Stdio::inherit())\n .stdout(Stdio::inherit())\n .stderr(Stdio::inherit())\n .args(&subcommand_args[..])\n .spawn()\n .and_then(|mut handle| handle.wait())\n {\n Ok(exit_status) => {\n if !exit_status.success() {\n debug!(\"{} exited with non-zero exit code: {:?}\", subcommand, exit_status);\n println!(\"{} exited with non-zero exit code\", subcommand);\n exit(exit_status.code().unwrap_or(1));\n }\n debug!(\"Successful exit!\");\n },\n\n Err(e) => {\n debug!(\"Error calling the subcommand\");\n match e.kind() {\n ErrorKind::NotFound => {\n \/\/ With the check above, this absolutely should not happen.\n \/\/ Keeping it to be safe\n println!(\"No such command: 'imag-{}'\", subcommand);\n println!(\"See 'imag --help' for available subcommands\");\n exit(2);\n },\n ErrorKind::PermissionDenied => {\n println!(\"No permission to execute: 'imag-{}'\", subcommand);\n exit(1);\n },\n _ => {\n println!(\"Error spawning: {:?}\", e);\n exit(1);\n }\n }\n }\n }\n },\n \/\/ Calling for example 'imag --versions' will lead here, as this option does not exit.\n \/\/ There's nothing to do in such a case\n _ => {},\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>style: convert NS_STYLE_STROKE_LINECAP_* to an enum class in nsStyleConsts.h<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>commands: fix from_bytes. get rid of stupid code duplication... what was i thinking when i wrote that.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor Chunk setters+getters to use usize for indeces<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Formatting for World::touch<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>WIP Add to_rule<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>style<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>21 - ownership and move<commit_after>\/\/ This function takes ownership of the heap allocated memory\nfn destroy_box(c: Box<int>) {\n println!(\"destroying a box that contains {}\", c);\n\n \/\/ `c` will be destroyed in this scope, and the memory will be freed\n}\n\nfn main() {\n \/\/ Stack allocated integer\n let x = 5u;\n\n \/\/ \"Copy\" `x` into `y`, there are no resources to move\n let y = x;\n\n \/\/ Both values can be independently used\n println!(\"x is {}, and y is {}\", x, y);\n\n \/\/ `a` is a pointer to a heap allocated integer\n let a = box 5;\n\n println!(\"a contains: {}\", a);\n\n \/\/ \"Move\" `a` into `b`\n \/\/ Here's what happens under the hood: the pointer `a` gets copied (*not*\n \/\/ the data on the heap, just its address) into `b`. Now both are pointers\n \/\/ to the *same* heap allocated data. But now, `b` *owns* the heap\n \/\/ allocated data; `b` is now in charge of freeing the memory in the heap.\n let b = a;\n\n \/\/ After the previous move, `a` can no longer be used\n \/\/ Error! `a` can no longer access the data, because it no longer owns the\n \/\/ heap memory\n \/\/println!(\"a contains: {}\", a);\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ \"Move\" `b` into the function; `b` gives up ownership of the heap data\n destroy_box(b);\n\n \/\/ Since the heap memory has been freed at this point, this action would\n \/\/ result in dereferencing freed memory, but it's forbidden by the compiler\n \/\/ Error! Same reason as the previous Error\n \/\/println!(\"b contains: {}\", b);\n \/\/ TODO ^ Try uncommenting this line\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>ls adeheres to new ArgParser interface<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added TwitchClient, User, and Message types<commit_after>\npub struct TwitchClient {\n\tuser: String,\n\tauth: String,\n\tdefault_channel: String,\n\tlogging: bool,\n}\n\nimpl TwitchClient {\n\tpub fn connect(&self) {\n\t\t\/\/ TODO: Connect!\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Simplify client (closes #16)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add protocol module, in order to retrieve the protocol to retrieve a file<commit_after>use hyper::Url;\n\n#[derive(Debug)]\npub enum Protocol {\n HTTP,\n HTTPS,\n}\n\npub fn get_protocol(url: &str) -> Option<Protocol> {\n match Url::parse(url) {\n Ok(url) => match url.scheme() {\n \"http\" => Some(Protocol::HTTP),\n \"https\" => Some(Protocol::HTTPS),\n _ => None,\n },\n Err(error) => {\n warning!(&format!(\"Canno't extract the protocol from the URL: {}\", error));\n None\n }\n }\n}<|endoftext|>"} {"text":"<commit_before>use super::moves::Plies;\nuse super::pos::Position;\nuse super::fen;\nuse super::legal;\nuse super::make_move::make_move;\nuse super::mate;\n\n#[test]\nfn perf() {\n perf_fen(\"rnbqkbnr\/pppppppp\/8\/8\/8\/8\/PPPPPPPP\/RNBQKBNR w KQkq - 0 1\",\n &[1, 20, 400, 8902, 197281, 4865609]);\n}\n\nfn perf_fen(fen_str: &str, res: &[u64]) {\n let p = fen::fen_to_position(fen_str).unwrap();\n perf_pos(&p, res);\n}\n\nfn perf_pos(p: &Position, res: &[u64]) {\n for (num_plies, expect_num_from_pos) in res.iter().enumerate() {\n assert_eq!(*expect_num_from_pos, num_from_pos(p, Plies(num_plies as u8)));\n }\n}\n\nfn num_from_pos(p: &Position, plies: Plies) -> u64 {\n if plies == Plies(0) {\n return 1;\n }\n if !mate::has_legal_moves(p.clone()) {\n return 0;\n }\n let next_plies = match plies {\n Plies(val) => Plies(val-1),\n };\n let mut ans = 0;\n for m in legal::receive_legal(p.clone()).iter() {\n let new_pos = make_move(p.clone(), &m);\n ans += num_from_pos(&new_pos, next_plies);\n }\n ans\n}\n<commit_msg>Decrease test position.<commit_after>use super::moves::Plies;\nuse super::pos::Position;\nuse super::fen;\nuse super::legal;\nuse super::make_move::make_move;\nuse super::mate;\n\n#[test]\nfn perf() {\n \/\/perf_fen(\"rnbqkbnr\/pppppppp\/8\/8\/8\/8\/PPPPPPPP\/RNBQKBNR w KQkq - 0 1\",\n \/\/ &[1, 20, 400, 8902, 197281, 4865609]);\n perf_fen(\"rnbqkbnr\/pppppppp\/8\/8\/8\/8\/PPPPPPPP\/RNBQKBNR w KQkq - 0 1\",\n &[1, 20, 400, 8902]);\n}\n\nfn perf_fen(fen_str: &str, res: &[u64]) {\n let p = fen::fen_to_position(fen_str).unwrap();\n perf_pos(&p, res);\n}\n\nfn perf_pos(p: &Position, res: &[u64]) {\n for (num_plies, expect_num_from_pos) in res.iter().enumerate() {\n assert_eq!(*expect_num_from_pos, num_from_pos(p, Plies(num_plies as u8)));\n }\n}\n\nfn num_from_pos(p: &Position, plies: Plies) -> u64 {\n if plies == Plies(0) {\n return 1;\n }\n if !mate::has_legal_moves(p.clone()) {\n return 0;\n }\n let next_plies = match plies {\n Plies(val) => Plies(val-1),\n };\n let mut ans = 0;\n for m in legal::receive_legal(p.clone()).iter() {\n let new_pos = make_move(p.clone(), &m);\n ans += num_from_pos(&new_pos, next_plies);\n }\n ans\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add flag removal setter<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Change packer::pack's `entries`'s 0th item from String to &str<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>do correction after all checks<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix: clap_app! should be gated by unstable, not nightly feature<commit_after><|endoftext|>"} {"text":"<commit_before>use runtime::Runtime;\nuse error::ImagError;\nuse error::ImagErrorBase;\n\npub struct ModuleError {\n base: ImagErrorBase,\n module_name: String,\n}\n\nimpl ModuleError {\n\n pub fn short<T: Module>(module : &T, short : String) -> ModuleError {\n ModuleError::new(module, short, \"\".to_string())\n }\n\n pub fn new<T: Module>(module : &T, short : String, long : String) -> ModuleError {\n ModuleError {\n base: ImagErrorBase {\n shortdesc: short,\n longdesc: long,\n },\n module_name: module.name(),\n }\n }\n\n}\n\nimpl<'a> ImagError<'a> for ModuleError {\n fn print(&self, rt: &Runtime) {\n if self.base.longdesc.is_empty() {\n let s = format!(\"{}: {}\\n\\n{}\\n\\n\",\n self.module_name,\n self.base.shortdesc,\n self.base.longdesc);\n rt.print(&s)\n } else {\n let s = format!(\"{}: {}\\n\",\n self.module_name,\n self.base.shortdesc);\n rt.print(&s)\n }\n }\n\n fn print_short(&self, rt : &Runtime) {\n let s = format!(\"{}: {}\\n\", self.module_name, self.base.shortdesc);\n rt.print(&s)\n }\n\n fn print_long(&self, rt : &Runtime) {\n let s = format!(\"{}: {}\\n\\n{}\\n\\n\",\n self.module_name,\n self.base.shortdesc,\n self.base.longdesc);\n rt.print(&s)\n }\n}\n\npub trait Module {\n\n fn new() -> Self;\n fn load(&self, &rt : Runtime) -> Self;\n fn name(&self) -> String;\n\n fn execute(&self, &rt : Runtime) -> Option<ModuleError>;\n\n}\n<commit_msg>Revert \"Add: impl ImagError for ModuleError\"<commit_after>use runtime::Runtime;\nuse error::ImagErrorBase;\n\npub struct ModuleError {\n base: ImagErrorBase,\n module_name: String,\n}\n\nimpl ModuleError {\n\n pub fn short<T: Module>(module : &T, short : String) -> ModuleError {\n ModuleError::new(module, short, \"\".to_string())\n }\n\n pub fn new<T: Module>(module : &T, short : String, long : String) -> ModuleError {\n ModuleError {\n base: ImagErrorBase {\n shortdesc: short,\n longdesc: long,\n },\n module_name: module.name(),\n }\n }\n\n}\n\npub trait Module {\n\n fn new() -> Self;\n fn load(&self, &rt : Runtime) -> Self;\n fn name(&self) -> String;\n\n fn execute(&self, &rt : Runtime) -> Option<ModuleError>;\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>No need to tokenize incoming path. Router performance is acceptable now.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>remove unused dependencies<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement `IndexMut` for `Vector<T>` (#38)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> Adds Rust solution for #008<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Finished #1 - Safer Count with RWArc<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>7 done<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test of missing unsafe on extern Rust sig<commit_after>#[cxx::bridge]\nmod ffi {\n extern \"Rust\" {\n fn f(x: i32);\n }\n}\n\nunsafe fn f(_x: i32) {}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>use redox::{Vec, String, ToString};\nuse redox::collections::{BTreeMap, VecDeque};\n\nuse super::dvaddr::DVAddr;\nuse super::zio;\n\n\/\/\/ MRU - Most Recently Used cache\nstruct Mru {\n \/\/ TODO: keep track of use counts. So mru_map becomes (use_count: u64, Vec<u8>)\n map: BTreeMap<DVAddr, (u64, Vec<u8>)>,\n queue: VecDeque<DVAddr>, \/\/ Oldest DVAddrs are at the end\n size: usize, \/\/ Max mru cache size in bytes\n used: usize, \/\/ Used bytes in mru cache\n}\n\nimpl Mru {\n pub fn new() -> Self {\n Mru {\n map: BTreeMap::new(),\n queue: VecDeque::new(),\n size: 10,\n used: 0,\n }\n }\n}\n\n\/\/\/ MFU - Most Frequently Used cache\nstruct Mfu {\n \/\/ TODO: Keep track of use counts. So mfu_map becomes (use_count: u64, Vec<u8>). Reset the use\n \/\/ count every once in a while. For instance, every 1000 reads. This will probably end up being\n \/\/ a knob for the user.\n \/\/ TODO: Keep track of minimum frequency and corresponding DVA\n map: BTreeMap<DVAddr, (u64, Vec<u8>)>,\n size: usize, \/\/ Max mfu cache size in bytes\n used: usize, \/\/ Used bytes in mfu cache\n}\n\nimpl Mfu {\n pub fn new() -> Self {\n Mfu {\n map: BTreeMap::new(),\n size: 10,\n used: 0,\n }\n }\n}\n\n\/\/ Our implementation of the Adaptive Replacement Cache (ARC) is set up to allocate\n\/\/ its buffer on the heap rather than in a private pool thing. This makes it much\n\/\/ simpler to implement, but defers the fragmentation problem to the heap allocator.\n\/\/ We named the type `ArCache` to avoid confusion with Rust's `Arc` reference type.\npub struct ArCache {\n \/\/ MRU\n \/\/ TODO: keep track of use counts. So mru_map becomes (use_count: u64, Vec<u8>)\n mru_map: BTreeMap<DVAddr, Vec<u8>>, \/\/ Most recently used cache\n mru_queue: VecDeque<DVAddr>, \/\/ Oldest DVAddrs are at the end\n mru_size: usize, \/\/ Max mru cache size in blocks \n mru_used: usize, \/\/ Number of used blocks in mru cache\n\n \/\/ MFU\n \/\/ TODO: Keep track of use counts. So mfu_map becomes (use_count: u64, Vec<u8>). Reset the use\n \/\/ count every once in a while. For instance, every 1000 reads. This will probably end up being\n \/\/ a knob for the user.\n \/\/ TODO: Keep track of minimum frequency and corresponding DVA\n mfu_map: BTreeMap<DVAddr, Vec<u8>>, \/\/ Most frequently used cache\n mfu_size: usize, \/\/ Max mfu cache size in blocks\n mfu_used: usize, \/\/ Number of used bytes in mfu cache\n}\n\nimpl ArCache {\n pub fn new() -> Self {\n ArCache {\n mru_map: BTreeMap::new(),\n mru_queue: VecDeque::new(),\n mru_size: 1000,\n mru_used: 0,\n\n mfu_map: BTreeMap::new(),\n mfu_size: 1000,\n mfu_used: 0,\n }\n }\n\n pub fn read(&mut self, reader: &mut zio::Reader, dva: &DVAddr) -> Result<Vec<u8>, String> {\n if let Some(block) = self.mru_map.get(dva) {\n \/\/ TODO: Keep track of MRU DVA use count. If it gets used a second time, move the block into\n \/\/ the MFU cache.\n\n \/\/ Block is cached\n return Ok(block.clone());\n }\n if let Some(block) = self.mfu_map.get(dva) {\n \/\/ TODO: keep track of DVA use count\n \/\/ Block is cached\n return Ok(block.clone());\n }\n\n \/\/ Block isn't cached, have to read it from disk\n let block = reader.read(dva.sector() as usize, dva.asize() as usize);\n\n \/\/ Blocks start in MRU cache\n self.mru_cache_block(dva, block)\n }\n\n fn mru_cache_block(&mut self, dva: &DVAddr, block: Vec<u8>) -> Result<Vec<u8>, String>{\n \/\/ If necessary, make room for the block in the cache\n while self.mru_used + (dva.asize() as usize) > self.mru_size {\n let last_dva =\n match self.mru_queue.pop_back()\n {\n Some(dva) => dva,\n None => return Err(\"No more ARC MRU items to free\".to_string()),\n };\n self.mru_map.remove(&last_dva);\n self.mru_used -= last_dva.asize() as usize;\n }\n\n \/\/ Add the block to the cache\n self.mru_used += dva.asize() as usize;\n self.mru_map.insert(*dva, block);\n self.mru_queue.push_front(*dva);\n Ok(self.mru_map.get(dva).unwrap().clone())\n }\n\n \/\/ TODO: mfu_cache_block. Remove the DVA with the lowest frequency\n}\n<commit_msg>Reorganized code with the `Mru` and `Mfu` types<commit_after>use redox::{Vec, String, ToString};\nuse redox::collections::{BTreeMap, VecDeque};\n\nuse super::dvaddr::DVAddr;\nuse super::zio;\n\n\/\/\/ MRU - Most Recently Used cache\nstruct Mru {\n \/\/ TODO: keep track of use counts. So mru_map becomes (use_count: u64, Vec<u8>)\n map: BTreeMap<DVAddr, (u64, Vec<u8>)>,\n queue: VecDeque<DVAddr>, \/\/ Oldest DVAddrs are at the end\n size: usize, \/\/ Max mru cache size in bytes\n used: usize, \/\/ Used bytes in mru cache\n}\n\nimpl Mru {\n pub fn new() -> Self {\n Mru {\n map: BTreeMap::new(),\n queue: VecDeque::new(),\n size: 1000,\n used: 0,\n }\n }\n\n fn cache_block(&mut self, dva: &DVAddr, block: Vec<u8>) -> Result<Vec<u8>, String> {\n \/\/ If necessary, make room for the block in the cache\n while self.used + block.len() > self.size {\n let last_dva = match self.queue.pop_back() {\n Some(dva) => dva,\n None => return Err(\"No more ARC MRU items to free\".to_string()),\n };\n self.map.remove(&last_dva);\n self.used -= last_dva.asize() as usize;\n }\n\n \/\/ Add the block to the cache\n self.used += block.len();\n self.map.insert(*dva, (1337, block));\n self.queue.push_front(*dva);\n Ok(self.map.get(dva).unwrap().1.clone())\n }\n}\n\n\/\/\/ MFU - Most Frequently Used cache\nstruct Mfu {\n \/\/ TODO: Keep track of use counts. So mfu_map becomes (use_count: u64, Vec<u8>). Reset the use\n \/\/ count every once in a while. For instance, every 1000 reads. This will probably end up being\n \/\/ a knob for the user.\n \/\/ TODO: Keep track of minimum frequency and corresponding DVA\n map: BTreeMap<DVAddr, (u64, Vec<u8>)>,\n size: usize, \/\/ Max mfu cache size in bytes\n used: usize, \/\/ Used bytes in mfu cache\n}\n\nimpl Mfu {\n pub fn new() -> Self {\n Mfu {\n map: BTreeMap::new(),\n size: 1000,\n used: 0,\n }\n }\n\n \/\/ TODO: cache_block. Remove the DVA with the lowest frequency\n fn cache_block(&mut self, dva: &DVAddr, block: Vec<u8>) -> Result<Vec<u8>, String> {\n }\n}\n\n\/\/ Our implementation of the Adaptive Replacement Cache (ARC) is set up to allocate\n\/\/ its buffer on the heap rather than in a private pool thing. This makes it much\n\/\/ simpler to implement, but defers the fragmentation problem to the heap allocator.\n\/\/ We named the type `ArCache` to avoid confusion with Rust's `Arc` reference type.\npub struct ArCache {\n mru: Mru,\n mfu: Mfu,\n}\n\nimpl ArCache {\n pub fn new() -> Self {\n ArCache {\n mru: Mru::new(),\n mfu: Mfu::new(),\n }\n }\n\n pub fn read(&mut self, reader: &mut zio::Reader, dva: &DVAddr) -> Result<Vec<u8>, String> {\n if let Some(block) = self.mru.map.get(dva) {\n \/\/ TODO: Keep track of MRU DVA use count. If it gets used a second time, move the block into\n \/\/ the MFU cache.\n\n \/\/ Block is cached\n return Ok(block.1.clone());\n }\n if let Some(block) = self.mfu.map.get(dva) {\n \/\/ TODO: keep track of DVA use count\n \/\/ Block is cached\n return Ok(block.1.clone());\n }\n\n \/\/ Block isn't cached, have to read it from disk\n let block = reader.read(dva.sector() as usize, dva.asize() as usize);\n\n \/\/ Blocks start in MRU cache\n self.mru.cache_block(dva, block)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use collections::VecDeque;\n\/\/ Temporary hack until libredox get hashmaps\nuse redox::*;\n\n#[derive(Clone, PartialEq, Copy, Hash)]\npub enum InsertMode {\n Append,\n Insert,\n Replace,\n}\n\n#[derive(Clone, PartialEq, Copy, Hash)]\npub struct InsertOptions {\n mode: InsertMode,\n}\n\n\/\/\/ A mode\n#[derive(Clone, PartialEq, Copy, Hash)]\npub enum Mode {\n \/\/\/ A primitive mode (no repeat, no delimiters, no preprocessing)\n Primitive(PrimitiveMode),\n \/\/\/ Command mode\n Command(CommandMode),\n}\n\n#[derive(Clone, PartialEq, Copy, Hash)]\n\/\/\/ A command mode\npub enum CommandMode {\n\/\/ Visual(VisualOptions),\n \/\/\/ Normal mode\n Normal,\n}\n\n#[derive(Clone, PartialEq, Copy, Hash)]\n\/\/\/ A primitive mode\npub enum PrimitiveMode {\n \/\/\/ Insert mode\n Insert(InsertOptions),\n}\n\n#[derive(Clone, PartialEq, Hash)]\n\/\/\/ The state of the editor\npub struct Editor {\n \/\/\/ The current cursor\n pub current_cursor: u8,\n \/\/\/ The cursors\n pub cursors: Vec<Cursor>,\n \/\/\/ The text (document)\n pub text: VecDeque<VecDeque<char>>,\n \/\/\/ The x coordinate of the scroll\n pub scroll_x: u32,\n \/\/\/ The y coordinate of the scroll\n pub scroll_y: u32,\n \/\/\/ Number of repeation entered\n pub n: u16,\n}\n\n\nimpl Editor {\n\n \/\/\/ Execute a instruction n times\n pub fn exec(&mut self, n: u16, cmd: char) {\n\n match cmd {\n\n }\n }\n \n \/\/\/ Feed a char to the editor (as input)\n fn feed(&mut self, c: char) {\n match self.cursors[self.current_cursor as usize].mode {\n Mode::Primitive(_) => {\n self.exec(0, c);\n },\n Mode::Command(_) => {\n self.n = match c {\n '0' if self.n != 0 => self.n * 10,\n '1' => self.n * 10 + 1,\n '2' => self.n * 10 + 2,\n '3' => self.n * 10 + 3,\n '4' => self.n * 10 + 4,\n '5' => self.n * 10 + 5,\n '6' => self.n * 10 + 6,\n '7' => self.n * 10 + 7,\n '8' => self.n * 10 + 8,\n '9' => self.n * 10 + 9,\n _ => {\n self.exec(if self.n == 0 { 1 } else { self.n }, c);\n self.n\n }\n }\n\n }\n }\n }\n\n \/\/\/ Insert text\n fn insert(&mut self, c: char) {\n\n }\n\n \/\/\/ Create new default state editor\n fn new() -> Editor {\n Editor {\n current_cursor: 0,\n cursors: Vec::new(),\n text: VecDeque::new(),\n scroll_x: 0,\n scroll_y: 0,\n n: 0,\n }\n }\n}\n\n\/\/\/ A command char\n#[derive(Clone, Copy, Hash, PartialEq)]\npub enum CommandChar {\n \/\/\/ A char\n Char(char),\n \/\/\/ A wildcard\n Wildcard,\n}\n\n\n#[derive(Clone, PartialEq, Hash)]\n\/\/\/ A cursor\npub struct Cursor {\n \/\/\/ The x coordinate of the cursor\n pub x: u32,\n \/\/\/ The y coordinate of the cursor\n pub y: u32,\n \/\/\/ The mode of the cursor\n pub mode: Mode,\n \/\/\/ The history of the cursor\n pub history: Vec<Unit>,\n\n<commit_msg>More refactoring<commit_after>use collections::VecDeque;\n\/\/ Temporary hack until libredox get hashmaps\nuse redox::*;\n\n#[derive(Clone)]\n\/\/\/ An instruction\npub Inst(i16, char)\n\n#[derive(Clone, PartialEq, Copy, Hash)]\npub enum InsertMode {\n Append,\n Insert,\n Replace,\n}\n\n#[derive(Clone, PartialEq, Copy, Hash)]\npub struct InsertOptions {\n mode: InsertMode,\n}\n\n\/\/\/ A mode\n#[derive(Clone, PartialEq, Copy, Hash)]\npub enum Mode {\n \/\/\/ A primitive mode (no repeat, no delimiters, no preprocessing)\n Primitive(PrimitiveMode),\n \/\/\/ Command mode\n Command(CommandMode),\n}\n\n#[derive(Clone, PartialEq, Copy, Hash)]\n\/\/\/ A command mode\npub enum CommandMode {\n\/\/ Visual(VisualOptions),\n \/\/\/ Normal mode\n Normal,\n}\n\n#[derive(Clone, PartialEq, Copy, Hash)]\n\/\/\/ A primitive mode\npub enum PrimitiveMode {\n \/\/\/ Insert mode\n Insert(InsertOptions),\n}\n\n#[derive(Clone, PartialEq, Hash)]\n\/\/\/ The state of the editor\npub struct Editor<I: Iterator<Item = char>> {\n \/\/\/ The current cursor\n pub current_cursor: u8,\n \/\/\/ The cursors\n pub cursors: Vec<Cursor>,\n \/\/\/ The text (document)\n pub text: VecDeque<VecDeque<char>>,\n \/\/\/ The x coordinate of the scroll\n pub scroll_x: u32,\n \/\/\/ The y coordinate of the scroll\n pub scroll_y: u32,\n \/\/\/ Number of repeation entered\n pub n: u16,\n \/\/\/ The input iterator\n pub iter: I,\n}\n\n\nimpl Editor {\n\n \/\/\/ Execute a instruction\n pub fn exec(&mut self, inst: Inst) {\n\n }\n\n \/\/\/ Feed a char to the editor (as input)\n pub fn feed(&mut self, c: char) {\n match self.cursors[self.current_cursor as usize].mode {\n Mode::Primitive(_) => {\n self.exec(Inst(0, c));\n },\n Mode::Command(_) => {\n self.n = match c {\n '0' if self.n != 0 => self.n * 10,\n '1' => self.n * 10 + 1,\n '2' => self.n * 10 + 2,\n '3' => self.n * 10 + 3,\n '4' => self.n * 10 + 4,\n '5' => self.n * 10 + 5,\n '6' => self.n * 10 + 6,\n '7' => self.n * 10 + 7,\n '8' => self.n * 10 + 8,\n '9' => self.n * 10 + 9,\n _ => {\n self.exec(Inst(if self.n == 0 { 1 } else { self.n },\n c));\n self.n\n },\n }\n\n }\n }\n }\n\n \/\/\/ Initialize the editor\n pub fn init(&mut self) {\n for c in self.iter {\n self.feed(c);\n }\n }\n\n \/\/\/ Insert text\n pub fn insert(&mut self, c: char) {\n \n }\n\n \/\/\/ Create new default state editor\n pub fn new() -> Editor {\n Editor {\n current_cursor: 0,\n cursors: Vec::new(),\n text: VecDeque::new(),\n scroll_x: 0,\n scroll_y: 0,\n n: 0,\n }\n }\n}\n\n\/\/\/ A command char\n#[derive(Clone, Copy, Hash, PartialEq)]\npub enum CommandChar {\n \/\/\/ A char\n Char(char),\n \/\/\/ A wildcard\n Wildcard,\n}\n\n\n#[derive(Clone, PartialEq, Hash)]\n\/\/\/ A cursor\npub struct Cursor {\n \/\/\/ The x coordinate of the cursor\n pub x: u32,\n \/\/\/ The y coordinate of the cursor\n pub y: u32,\n \/\/\/ The mode of the cursor\n pub mode: Mode,\n \/\/\/ The history of the cursor\n pub history: Vec<Inst>,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Invalid source code formating Fixed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Create automata.rs<commit_after>\/\/ Transition table for a language consisting of strings having even number of zeroes ( that includes having no zeroes as well )\n\n\/\/____________\n\/\/S1 | 1 -> S1\n\/\/S1 | 0 -> S2\n\/\/S2 | 0 -> S1\n\/\/S2 | 1 -> S2\n\/\/------------\n\n\n\/*#[derive(Debug)]\nenum STATE {\n S0,\n S1,\n S2,\n}*\/\n\npub mod even_zeros_dfa {\n\nconst S0:&'static str = \"S0\";\nconst S1:&'static str = \"S1\";\nconst S2:&'static str = \"S2\";\n\n\nfn assert_state(state:&'static str,desired:&'static str) -> bool {\n state == desired\n}\n\n#[allow(unused_assignments)]\nfn dfa(s:&String) -> bool {\n let mut state = S0;\n \/\/ initializing the dfa to the start state with any symbol\n if s.len() > 0 {state=S1;} else {return false;}\n for i in s.chars() {\n if i=='1' && assert_state(&state,S1)\n {state = S1;}\n else if i=='0' && assert_state(&state,S1)\n {state = S2;}\n else if i=='0' && assert_state(&state,S2)\n {state = S1;}\n else if i=='1' && assert_state(&state,S2)\n {state = S2;}\n }\n\n \/\/ automatic boolean return from match arm\n assert_state(&state,S1)\n}\n\npub fn init_fsm(st:&String) {\n if dfa(st) {println!(\"Language accepted by the dfa\");}\n else {println!(\"Language not recognized\");}\n}\n\n#[test]\nfn test_dfa() {\n let string = \"100\".to_string();\n if dfa(&string) {\n println!(\"Language accepted by the dfa\");\n }else {\n println!(\"Language not recognized\");\n }\n}\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add sample<commit_after>extern crate sea_canal;\n\nuse sea_canal::analyzer::{Analyze, Analyzer};\n\nfn main() {\n let s = &[1, 4, 3, 6, 5];\n println!(\"{:?}\", s);\n let analyzer = Analyzer::from_seq(s);\n\n for seq in analyzer.analyze_n(2) {\n println!(\"{}\", seq);\n }\n\n let s = &[1, 2, 4, 8];\n println!(\"\\n{:?}\", s);\n let analyzer = Analyzer::from_seq(s);\n\n for seq in analyzer.analyze_n(1) {\n println!(\"{}\", seq);\n }\n\n let s = &[1, 10, 19, 28];\n println!(\"\\n{:?}\", s);\n let analyzer = Analyzer::from_seq(s);\n\n for seq in analyzer.analyze_n(1) {\n println!(\"{}\", seq);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(globs)]\n#![feature(macro_rules)]\n#![feature(phase)]\n#![feature(slicing_syntax)]\n#![feature(tuple_indexing)]\n\n#[phase(plugin, link)]\nextern crate gl;\nextern crate math;\nextern crate base;\n\nextern crate libc;\n#[phase(plugin, link)]\nextern crate log;\n\npub use render::Renderer;\npub use render::RenderStep;\npub use shader::Shader;\npub use shader::ShaderLoader;\npub use shader::Uniform;\npub use texture::Texture;\npub use vbo::BufferBuilder;\npub use vbo::VertexBuffer;\n\nmod render;\nmod shader;\nmod texture;\nmod vbo;\n<commit_msg>tuple_indexing is no longer gated!<commit_after>#![feature(globs, macro_rules, phase, slicing_syntax)]\n\n#[phase(plugin, link)]\nextern crate gl;\nextern crate math;\nextern crate base;\n\nextern crate libc;\n#[phase(plugin, link)]\nextern crate log;\n\npub use render::Renderer;\npub use render::RenderStep;\npub use shader::Shader;\npub use shader::ShaderLoader;\npub use shader::Uniform;\npub use texture::Texture;\npub use vbo::BufferBuilder;\npub use vbo::VertexBuffer;\n\nmod render;\nmod shader;\nmod texture;\nmod vbo;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Change a lot.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adds Rust Problem 551 in progress<commit_after>\/\/\/ Problem 551\n\/\/\/ Let a0, a1, a2, ... be an integer sequence defined by:\n\/\/\/ \n\/\/\/ a_0 = 1;\n\/\/\/ for n ≥ 1, a_n is the sum of the digits of all preceding terms.\n\/\/\/\n\/\/\/ The sequence starts with 1, 1, 2, 4, 8, 16, 23, 28, 38, 49, ...\n\/\/\/ You are given a_10^6 = 31054319.\n\/\/\/ \n\/\/\/ a_10^1 = 62\n\/\/\/ a_10^2 = 1205\n\/\/\/ a_10^3 = 16577\n\/\/\/ a_10^4 = 213677\n\/\/\/ a_10^5 = 2609882\n\/\/\/ a_10^6 = 31054319\n\/\/\/ a_10^7 = 355356611\n\/\/\/ a_10^8 = 4047602471\n\/\/\/ a_10^9 = 45063267434\n\/\/\/ a_10^10 = 500834734271\n\/\/\/\n\/\/\/ Find a_10^15.\nfn main() {\n \/\/ In essence, the next term is the current term plus the sum of the\n \/\/ current term's digits. a_7 = 28, sum of digits = 10, a_8 = 38.\n let mut i: u64 = 1;\n let mut a: u64 = 1;\n \/\/ let mut i: u64 = 1000000;\n \/\/ let mut a: u64 = 31054319;\n let mut c: u64 = 0;\n let mut pow: u64 = 10;\n loop {\n \/\/ if c > 25 {\n \/\/ break\n \/\/ }\n \/\/ if i == 1000000 {\n \/\/ if i == 1000000000000000 {\n if i % pow == 0 {\n println!(\"a_{} = {}\", i, a);\n pow *= 10;\n \/\/ break\n }\n \/\/ Take sum of digits of current term\n let mut b = a;\n let mut s = b%10;\n while b > 0 {\n b \/= 10;\n s += b%10;\n }\n \/\/ Increment term\n a += s;\n i += 1;\n c += 1;\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Formatting + remove dead handler code<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n#![feature(append)]\n#![feature(arc_unique)]\n#![feature(as_slice)]\n#![feature(as_unsafe_cell)]\n#![feature(borrow_state)]\n#![feature(box_raw)]\n#![feature(box_syntax)]\n#![feature(core)]\n#![feature(core_intrinsics)]\n#![feature(custom_attribute)]\n#![feature(custom_derive)]\n#![feature(drain)]\n#![feature(hashmap_hasher)]\n#![feature(mpsc_select)]\n#![feature(nonzero)]\n#![feature(owned_ascii_ext)]\n#![feature(plugin)]\n#![feature(rc_unique)]\n#![feature(slice_chars)]\n#![feature(str_utf16)]\n#![feature(unicode)]\n#![feature(vec_push_all)]\n\n#![deny(unsafe_code)]\n#![allow(non_snake_case)]\n\n#![doc=\"The script crate contains all matters DOM.\"]\n\n#![plugin(string_cache_plugin)]\n#![plugin(plugins)]\n\n#[macro_use]\nextern crate log;\n\n#[macro_use] extern crate bitflags;\nextern crate core;\nextern crate devtools_traits;\nextern crate cssparser;\nextern crate euclid;\nextern crate html5ever;\nextern crate encoding;\nextern crate fnv;\nextern crate hyper;\nextern crate ipc_channel;\nextern crate js;\nextern crate layout_traits;\nextern crate libc;\nextern crate msg;\nextern crate net_traits;\nextern crate num;\nextern crate rustc_serialize;\nextern crate rustc_unicode;\nextern crate serde;\nextern crate time;\nextern crate canvas;\nextern crate canvas_traits;\nextern crate rand;\n#[macro_use]\nextern crate profile_traits;\nextern crate script_traits;\nextern crate selectors;\nextern crate smallvec;\nextern crate util;\nextern crate websocket;\n#[macro_use]\nextern crate style;\nextern crate unicase;\nextern crate url;\nextern crate uuid;\nextern crate string_cache;\nextern crate offscreen_gl_context;\nextern crate tendril;\n\npub mod cors;\npub mod document_loader;\n\n#[macro_use]\npub mod dom;\n\npub mod parse;\n\npub mod layout_interface;\nmod network_listener;\npub mod page;\npub mod script_task;\nmod timers;\npub mod textinput;\npub mod clipboard_provider;\nmod devtools;\nmod horribly_inefficient_timers;\nmod webdriver_handlers;\n\n#[allow(unsafe_code)]\npub fn init() {\n unsafe {\n assert_eq!(js::jsapi::JS_Init(), 1);\n }\n}\n<commit_msg>Auto merge of #6707 - pcwalton:more-fds, r=larsbergstrom<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n#![feature(append)]\n#![feature(arc_unique)]\n#![feature(as_slice)]\n#![feature(as_unsafe_cell)]\n#![feature(borrow_state)]\n#![feature(box_raw)]\n#![feature(box_syntax)]\n#![feature(core)]\n#![feature(core_intrinsics)]\n#![feature(custom_attribute)]\n#![feature(custom_derive)]\n#![feature(drain)]\n#![feature(hashmap_hasher)]\n#![feature(mpsc_select)]\n#![feature(nonzero)]\n#![feature(owned_ascii_ext)]\n#![feature(plugin)]\n#![feature(rc_unique)]\n#![feature(slice_chars)]\n#![feature(str_utf16)]\n#![feature(unicode)]\n#![feature(vec_push_all)]\n\n#![deny(unsafe_code)]\n#![allow(non_snake_case)]\n\n#![doc=\"The script crate contains all matters DOM.\"]\n\n#![plugin(string_cache_plugin)]\n#![plugin(plugins)]\n\n#[macro_use]\nextern crate log;\n\n#[macro_use] extern crate bitflags;\nextern crate core;\nextern crate devtools_traits;\nextern crate cssparser;\nextern crate euclid;\nextern crate html5ever;\nextern crate encoding;\nextern crate fnv;\nextern crate hyper;\nextern crate ipc_channel;\nextern crate js;\nextern crate layout_traits;\nextern crate libc;\nextern crate msg;\nextern crate net_traits;\nextern crate num;\nextern crate rustc_serialize;\nextern crate rustc_unicode;\nextern crate serde;\nextern crate time;\nextern crate canvas;\nextern crate canvas_traits;\nextern crate rand;\n#[macro_use]\nextern crate profile_traits;\nextern crate script_traits;\nextern crate selectors;\nextern crate smallvec;\nextern crate util;\nextern crate websocket;\n#[macro_use]\nextern crate style;\nextern crate unicase;\nextern crate url;\nextern crate uuid;\nextern crate string_cache;\nextern crate offscreen_gl_context;\nextern crate tendril;\n\npub mod cors;\npub mod document_loader;\n\n#[macro_use]\npub mod dom;\n\npub mod parse;\n\npub mod layout_interface;\nmod network_listener;\npub mod page;\npub mod script_task;\nmod timers;\npub mod textinput;\npub mod clipboard_provider;\nmod devtools;\nmod horribly_inefficient_timers;\nmod webdriver_handlers;\n\n#[cfg(any(target_os=\"linux\", target_os=\"android\"))]\n#[allow(unsafe_code)]\nfn perform_platform_specific_initialization() {\n use std::mem;\n const RLIMIT_NOFILE: libc::c_int = 7;\n\n \/\/ Bump up our number of file descriptors to save us from impending doom caused by an onslaught\n \/\/ of iframes.\n unsafe {\n let mut rlim = mem::uninitialized();\n assert!(libc::getrlimit(RLIMIT_NOFILE, &mut rlim) == 0);\n rlim.rlim_cur = rlim.rlim_max;\n assert!(libc::setrlimit(RLIMIT_NOFILE, &mut rlim) == 0);\n }\n}\n\n#[cfg(not(any(target_os=\"linux\", target_os=\"android\")))]\nfn perform_platform_specific_initialization() {}\n\n#[allow(unsafe_code)]\npub fn init() {\n unsafe {\n assert_eq!(js::jsapi::JS_Init(), 1);\n }\n\n perform_platform_specific_initialization();\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix typo: WithField is WithFields<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Init from a func return value<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added very basic example of bindings working<commit_after>extern crate celix;\n\nfn main() {\n println!(\"{}\", celix::CELIX_SUCCESS);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>More doc!<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use Vec::from_raw_parts<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added unary scope<commit_after>use std::rc::Rc;\nuse std::cell::RefCell;\nuse std::default::Default;\n\nuse progress::count_map::CountMap;\nuse progress::notificator::Notificator;\nuse progress::{Timestamp, Scope, Antichain};\nuse communication::channels::Data;\nuse communication::exchange::ExchangeReceiver;\nuse communication::channels::{OutputPort, ObserverHelper};\n\npub struct UnaryScopeHandle<T: Timestamp, D1: Data, D2: Data> {\n pub input: ExchangeReceiver<T, D1>,\n pub output: ObserverHelper<OutputPort<T, D2>>,\n pub notificator: Notificator<T>,\n}\n\npub struct UnaryScope<T: Timestamp, D1: Data, D2: Data, L: FnMut(&mut UnaryScopeHandle<T, D1, D2>)> {\n handle: UnaryScopeHandle<T, D1, D2>,\n logic: L,\n}\n\nimpl<T: Timestamp, D1: Data, D2: Data, L: FnMut(&mut UnaryScopeHandle<T, D1, D2>)> UnaryScope<T, D1, D2, L> {\n pub fn new(receiver: ExchangeReceiver<T, D1>, targets: OutputPort<T, D2>, logic: L) -> UnaryScope<T, D1, D2, L> {\n UnaryScope {\n handle: UnaryScopeHandle {\n input: receiver,\n output: ObserverHelper::new(targets.clone(), Rc::new(RefCell::new(CountMap::new()))),\n notificator: Default::default(),\n },\n logic: logic,\n }\n }\n}\n\nimpl<T: Timestamp, D1: Data, D2: Data, L: FnMut(&mut UnaryScopeHandle<T, D1, D2>)> Scope<T> for UnaryScope<T, D1, D2, L> {\n fn inputs(&self) -> u64 { 1 }\n fn outputs(&self) -> u64 { 1 }\n\n fn set_external_summary(&mut self, _summaries: Vec<Vec<Antichain<T::Summary>>>, frontier: &mut Vec<CountMap<T>>) -> () {\n self.handle.notificator.update_frontier_from_cm(&mut frontier[0]);\n frontier[0].clear();\n }\n\n fn push_external_progress(&mut self, external: &mut Vec<CountMap<T>>) -> () {\n \/\/ println!(\"unary.pep: {:?}\", external);\n self.handle.notificator.update_frontier_from_cm(&mut external[0]);\n external[0].clear();\n }\n\n fn pull_internal_progress(&mut self, internal: &mut Vec<CountMap<T>>,\n consumed: &mut Vec<CountMap<T>>,\n produced: &mut Vec<CountMap<T>>) -> bool\n {\n (self.logic)(&mut self.handle);\n\n \/\/ extract what we know about progress from the input and output adapters.\n self.handle.input.pull_progress(&mut consumed[0]);\n self.handle.output.pull_progress(&mut produced[0]);\n self.handle.notificator.pull_progress(&mut internal[0]);\n\n return false; \/\/ no unannounced internal work\n }\n\n fn name(&self) -> String { format!(\"Unary\") }\n fn notify_me(&self) -> bool { true }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::path::Path;\nuse sdl2::{self};\nuse sdl2_image::{self, LoadTexture, INIT_PNG, INIT_JPG};\nuse sdl2::event::Event;\nuse sdl2::keycode::KeyCode;\n\npub fn main(png: &Path) {\n\n let mut context = sdl2::init().video().unwrap();\n sdl2_image::init(INIT_PNG | INIT_JPG);\n let window = context.window(\"rust-sdl2 demo: Video\", 800, 600)\n .position_centered()\n .opengl()\n .build()\n .unwrap();\n\n let mut renderer = window.renderer().accelerated().build().unwrap();\n let mut texture = renderer.load_texture(png).unwrap();\n\n \/\/ Draws and shows the loaded texture.\n let mut drawer = renderer.drawer();\n drawer.copy(&mut texture, None, None);\n drawer.present();\n\n 'mainloop: loop {\n for event in context.event_pump().poll_iter() {\n match event {\n Event::Quit{..} |\n Event::KeyDown {keycode: KeyCode::Escape, ..} =>\n break 'mainloop,\n _ => {}\n }\n }\n }\n\n sdl2_image::quit();\n}\n<commit_msg>fix 'use'<commit_after>use std::path::Path;\nuse sdl2;\nuse sdl2_image::{self, LoadTexture, INIT_PNG, INIT_JPG};\nuse sdl2::event::Event;\nuse sdl2::keycode::KeyCode;\n\npub fn main(png: &Path) {\n\n let mut context = sdl2::init().video().unwrap();\n sdl2_image::init(INIT_PNG | INIT_JPG);\n let window = context.window(\"rust-sdl2 demo: Video\", 800, 600)\n .position_centered()\n .opengl()\n .build()\n .unwrap();\n\n let mut renderer = window.renderer().accelerated().build().unwrap();\n let mut texture = renderer.load_texture(png).unwrap();\n\n \/\/ Draws and shows the loaded texture.\n let mut drawer = renderer.drawer();\n drawer.copy(&mut texture, None, None);\n drawer.present();\n\n 'mainloop: loop {\n for event in context.event_pump().poll_iter() {\n match event {\n Event::Quit{..} |\n Event::KeyDown {keycode: KeyCode::Escape, ..} =>\n break 'mainloop,\n _ => {}\n }\n }\n }\n\n sdl2_image::quit();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add make_exp method to GroupCurve25519<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Started work on number_dialer module<commit_after>\nuse std::from_str::FromStr;\nuse opengl_graphics::Gl;\nuse piston::{\n RenderArgs,\n};\nuse color::Color;\nuse point::Point;\nuse rectangle;\nuse rectangle::RectangleState;\nuse widget::{\n Widget,\n NumberDialer,\n};\nuse ui_context::{\n UIID,\n UIContext,\n};\nuse mouse_state::{\n MouseState,\n Up,\n Down,\n};\nuse label;\nuse label::{\n FontSize,\n IsLabel,\n NoLabel,\n Label,\n};\nuse utils::clamp;\n\nwidget_state!(NumberDialerState, NumberDialerState {\n Normal -> 0,\n Highlighted -> 1,\n Clicked -> 2\n})\n\nimpl NumberDialerState {\n \/\/\/ Return the associated Rectangle state.\n fn as_rectangle_state(&self) -> RectangleState {\n match self {\n &Normal => rectangle::Normal,\n &Highlighted => rectangle::Highlighted,\n &Clicked => rectangle::Clicked,\n }\n }\n}\n\n\/\/\/ Draw the number_dialer. When successfully pressed,\n\/\/\/ or if the value is changed, the given `callback`\n\/\/\/ function will be called.\npub fn draw<T: Num + Copy + Primitive + FromPrimitive + ToPrimitive + ToString + FromStr>\n (args: &RenderArgs,\n gl: &mut Gl,\n uic: &mut UIContext,\n ui_id: UIID,\n pos: Point<f64>,\n font_size: FontSize,\n color: Color,\n label: IsLabel,\n value: T,\n min: T,\n max: T,\n precision: u8,\n callback: |T|) {\n let val = clamp(value, min, max);\n let state = get_state(uic, ui_id);\n let mouse = uic.get_mouse_state();\n let string = create_string(&label, val, precision);\n \/\/println!(\"{}\", string.as_slice());\n \n \/\/ TODO\n \/\/ determine rect dimensions.\n \/\/ draw rect.\n \/\/ draw string.\n \/\/ if being interacted with, determine which glyph captures the mouse.\n \/\/ determine new value by comparing previous state with new state.\n}\n\n\/\/\/ Create the string to be drawn from the given values\n\/\/\/ and precision. Combine this with the label string if\n\/\/\/ one is given.\nfn create_string<T: ToString>(label: &IsLabel, val: T, precision: u8) -> String {\n let label_string = match *label {\n NoLabel => String::new(),\n Label(ref text, _, _) => text.to_string().append(\": \"),\n };\n let mut val_string = val.to_string();\n match (val_string.as_slice().chars().position(|ch| ch == '.'), precision) {\n (None, 0u8) => label_string.append(val_string.as_slice()),\n (None, _) => {\n val_string.push_char('.');\n val_string.grow(precision as uint, '0');\n label_string.append(val_string.as_slice())\n },\n (Some(idx), 0u8) => {\n val_string.truncate(idx);\n label_string.append(val_string.as_slice())\n },\n (Some(idx), _) => {\n let (len, desired_len) = (val_string.len(), idx + precision as uint + 1u);\n match len.cmp(&desired_len) {\n Greater => val_string.truncate(desired_len),\n Equal => (),\n Less => val_string.grow(desired_len - len, '0'),\n }\n label_string.append(val_string.as_slice())\n },\n }\n}\n\n\/\/\/ Return a default Widget variant.\nfn default() -> Widget { NumberDialer(Normal) }\n\n\/\/\/ Get a reference to the widget associated with the given UIID.\nfn get_widget(uic: &mut UIContext, ui_id: UIID) -> &mut Widget {\n uic.get_widget(ui_id, default())\n}\n\n\/\/\/ Get the current SliderState for the widget.\nfn get_state(uic: &mut UIContext, ui_id: UIID) -> NumberDialerState {\n match *get_widget(uic, ui_id) {\n NumberDialer(state) => state,\n _ => fail!(\"The Widget variant returned by UIContext is different to the requested.\"),\n }\n}\n\n\/\/\/ Set the state for the widget in the UIContext.\nfn set_state(uic: &mut UIContext, ui_id: UIID, new_state: NumberDialerState) {\n match *get_widget(uic, ui_id) {\n NumberDialer(ref mut state) => { *state = new_state; },\n _ => fail!(\"The Widget variant returned by UIContext is different to the requested.\"),\n }\n}\n\n\n\n<|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/ Ruby -> Toml translation primitives\n\nuse std::collections::BTreeMap;\n\nuse ruru::{Object, AnyObject, Class, RString, Fixnum, Float, Symbol, Hash, Array, VM};\nuse ruru::types::ValueType;\nuse toml::Value;\n\n\npub trait AsToml : Sized {\n fn as_toml(&self) -> Value;\n}\n\npub trait IntoToml : AsToml {\n fn into_toml(self) -> Value {\n self.as_toml()\n }\n}\nimpl<T: AsToml> IntoToml for T { }\n\nimpl AsToml for AnyObject {\n\n fn as_toml(&self) -> Value {\n match self.value().ty() {\n ValueType::None => {\n Value::Boolean(false)\n },\n ValueType::Object => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Class => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Module => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Float =>\n Value::Float(self.try_convert_to::<Float>().unwrap().to_f64()),\n ValueType::RString =>\n Value::String(self.try_convert_to::<RString>().unwrap().to_string()),\n ValueType::Regexp => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Array => {\n let vals = self\n .try_convert_to::<Array>()\n .unwrap()\n .into_iter()\n .map(|v| v.as_toml())\n .collect::<Vec<Value>>();\n\n Value::Array(vals)\n },\n ValueType::Hash => {\n let mut btm = BTreeMap::new();\n self.try_convert_to::<Hash>()\n .unwrap()\n .each(|key, value| {\n let key = match key.as_toml() {\n Value::String(s) => s,\n _ => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Can only have String or Symbol as Key for TOML maps\");\n String::new()\n }\n };\n let value = value.as_toml();\n btm.insert(key, value);\n });\n Value::Table(btm)\n },\n ValueType::Struct => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Bignum => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::File => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Data => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Match => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Complex => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Rational => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Nil => Value::Boolean(false),\n ValueType::True => Value::Boolean(true),\n ValueType::False => Value::Boolean(false),\n ValueType::Symbol => Value::String(self.try_convert_to::<Symbol>().unwrap().to_string()),\n ValueType::Fixnum => Value::Integer(self.try_convert_to::<Fixnum>().unwrap().to_i64()),\n ValueType::Undef => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Node => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::IClass => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Zombie => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Mask => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n }\n }\n\n}\n\n<commit_msg>Simplify ruby utils<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/ Ruby -> Toml translation primitives\n\nuse std::collections::BTreeMap;\n\nuse ruru::{Object, AnyObject, Class, RString, Fixnum, Float, Symbol, Hash, Array, VM};\nuse ruru::types::ValueType;\nuse toml::Value;\n\n\npub trait AsToml : Sized {\n fn as_toml(&self) -> Value;\n}\n\npub trait IntoToml : AsToml {\n fn into_toml(self) -> Value {\n self.as_toml()\n }\n}\nimpl<T: AsToml> IntoToml for T { }\n\nimpl AsToml for AnyObject {\n\n fn as_toml(&self) -> Value {\n match self.value().ty() {\n ValueType::None => {\n Value::Boolean(false)\n },\n ValueType::Object => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Class => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Module => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Float => self.try_convert_to::<Float>().unwrap().as_toml(),\n ValueType::RString => self.try_convert_to::<RString>().unwrap().as_toml(),\n ValueType::Regexp => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Array => self.try_convert_to::<Array>().unwrap().as_toml(),\n ValueType::Hash => self.try_convert_to::<Hash>().unwrap().as_toml(),\n ValueType::Struct => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Bignum => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::File => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Data => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Match => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Complex => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Rational => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Nil => Value::Boolean(false),\n ValueType::True => Value::Boolean(true),\n ValueType::False => Value::Boolean(false),\n ValueType::Symbol => self.try_convert_to::<Symbol>().unwrap().as_toml(),\n ValueType::Fixnum => self.try_convert_to::<Fixnum>().unwrap().as_toml(),\n ValueType::Undef => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Node => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::IClass => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Zombie => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n ValueType::Mask => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Cannot translate type '' to fit into TOML\");\n Value::Boolean(false)\n },\n }\n }\n\n}\n\nimpl AsToml for Hash {\n\n fn as_toml(&self) -> Value {\n let mut btm = BTreeMap::new();\n self.try_convert_to::<Hash>()\n .unwrap()\n .each(|key, value| {\n let key = match key.as_toml() {\n Value::String(s) => s,\n _ => {\n let rte = Class::from_existing(\"TypeError\");\n VM::raise(rte, \"Can only have String or Symbol as Key for TOML maps\");\n String::new()\n }\n };\n let value = value.as_toml();\n btm.insert(key, value);\n });\n Value::Table(btm)\n }\n\n}\n\nimpl AsToml for Array {\n\n fn as_toml(&self) -> Value {\n let vals = self\n .try_convert_to::<Array>()\n .unwrap()\n .into_iter()\n .map(|v| v.as_toml())\n .collect::<Vec<Value>>();\n\n Value::Array(vals)\n }\n\n}\n\nimpl AsToml for RString {\n\n fn as_toml(&self) -> Value {\n Value::String(self.try_convert_to::<RString>().unwrap().to_string())\n }\n\n}\n\nimpl AsToml for Float {\n\n fn as_toml(&self) -> Value {\n Value::Float(self.try_convert_to::<Float>().unwrap().to_f64())\n }\n\n}\n\nimpl AsToml for Symbol {\n\n fn as_toml(&self) -> Value {\n Value::String(self.try_convert_to::<Symbol>().unwrap().to_string())\n }\n\n}\n\nimpl AsToml for Fixnum {\n\n fn as_toml(&self) -> Value {\n Value::Integer(self.try_convert_to::<Fixnum>().unwrap().to_i64())\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\nuse dom::activation::Activatable;\nuse dom::attr::AttrValue;\nuse dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;\nuse dom::bindings::codegen::Bindings::HTMLAnchorElementBinding;\nuse dom::bindings::codegen::Bindings::HTMLAnchorElementBinding::HTMLAnchorElementMethods;\nuse dom::bindings::codegen::Bindings::MouseEventBinding::MouseEventMethods;\nuse dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;\nuse dom::bindings::codegen::InheritTypes::{HTMLAnchorElementDerived, HTMLImageElementDerived};\nuse dom::bindings::codegen::InheritTypes::{ElementCast, HTMLElementCast};\nuse dom::bindings::codegen::InheritTypes::{MouseEventCast, NodeCast};\nuse dom::bindings::js::{JS, MutNullableHeap, Root};\nuse dom::document::{Document, DocumentHelpers};\nuse dom::domtokenlist::DOMTokenList;\nuse dom::element::{Element, AttributeHandlers, ElementTypeId};\nuse dom::event::Event;\nuse dom::eventtarget::{EventTarget, EventTargetTypeId};\nuse dom::htmlelement::{HTMLElement, HTMLElementTypeId};\nuse dom::node::{Node, NodeHelpers, NodeTypeId, document_from_node, window_from_node};\nuse dom::virtualmethods::VirtualMethods;\nuse dom::window::WindowHelpers;\n\nuse num::ToPrimitive;\nuse std::default::Default;\nuse string_cache::Atom;\nuse util::str::DOMString;\n\n#[dom_struct]\npub struct HTMLAnchorElement {\n htmlelement: HTMLElement,\n rel_list: MutNullableHeap<JS<DOMTokenList>>,\n}\n\nimpl HTMLAnchorElementDerived for EventTarget {\n fn is_htmlanchorelement(&self) -> bool {\n *self.type_id() ==\n EventTargetTypeId::Node(\n NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAnchorElement)))\n }\n}\n\nimpl HTMLAnchorElement {\n fn new_inherited(localName: DOMString,\n prefix: Option<DOMString>,\n document: &Document) -> HTMLAnchorElement {\n HTMLAnchorElement {\n htmlelement:\n HTMLElement::new_inherited(HTMLElementTypeId::HTMLAnchorElement, localName, prefix, document),\n rel_list: Default::default(),\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(localName: DOMString,\n prefix: Option<DOMString>,\n document: &Document) -> Root<HTMLAnchorElement> {\n let element = HTMLAnchorElement::new_inherited(localName, prefix, document);\n Node::reflect_node(box element, document, HTMLAnchorElementBinding::Wrap)\n }\n}\n\nimpl<'a> VirtualMethods for &'a HTMLAnchorElement {\n fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {\n let htmlelement: &&HTMLElement = HTMLElementCast::from_borrowed_ref(self);\n Some(htmlelement as &VirtualMethods)\n }\n\n fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {\n match name {\n &atom!(\"rel\") => AttrValue::from_serialized_tokenlist(value),\n _ => self.super_type().unwrap().parse_plain_attribute(name, value),\n }\n }\n}\n\nimpl<'a> HTMLAnchorElementMethods for &'a HTMLAnchorElement {\n fn Text(self) -> DOMString {\n let node = NodeCast::from_ref(self);\n node.GetTextContent().unwrap()\n }\n\n fn SetText(self, value: DOMString) {\n let node = NodeCast::from_ref(self);\n node.SetTextContent(Some(value))\n }\n\n fn RelList(self) -> Root<DOMTokenList> {\n self.rel_list.or_init(|| {\n DOMTokenList::new(ElementCast::from_ref(self), &atom!(\"rel\"))\n })\n }\n}\n\nimpl<'a> Activatable for &'a HTMLAnchorElement {\n fn as_element<'b>(&'b self) -> &'b Element {\n ElementCast::from_ref(*self)\n }\n\n fn is_instance_activatable(&self) -> bool {\n true\n }\n\n\n \/\/TODO:https:\/\/html.spec.whatwg.org\/multipage\/#the-a-element\n fn pre_click_activation(&self) {\n }\n\n \/\/TODO:https:\/\/html.spec.whatwg.org\/multipage\/#the-a-element\n \/\/ https:\/\/html.spec.whatwg.org\/multipage\/#run-canceled-activation-steps\n fn canceled_activation(&self) {\n }\n\n \/\/https:\/\/html.spec.whatwg.org\/multipage\/#the-a-element:activation-behaviour\n fn activation_behavior(&self, event: &Event, target: &EventTarget) {\n \/\/Step 1. If the node document is not fully active, abort.\n let doc = document_from_node(*self);\n if !doc.r().is_fully_active() {\n return;\n }\n \/\/TODO: Step 2. Check if browsing context is specified and act accordingly.\n \/\/Step 3. Handle <img ismap\/>.\n let element = ElementCast::from_ref(*self);\n let mouse_event = MouseEventCast::to_ref(event).unwrap();\n let mut ismap_suffix = None;\n if let Some(element) = ElementCast::to_ref(target) {\n if target.is_htmlimageelement() && element.has_attribute(&atom!(\"ismap\")) {\n\n let target_node = NodeCast::to_ref(target).unwrap();\n let rect = window_from_node(target_node).r().content_box_query(\n target_node.to_trusted_node_address());\n ismap_suffix = Some(\n format!(\"?{},{}\", mouse_event.ClientX().to_f32().unwrap() - rect.origin.x.to_f32_px(),\n mouse_event.ClientY().to_f32().unwrap() - rect.origin.y.to_f32_px())\n )\n }\n }\n\n \/\/TODO: Step 4. Download the link is `download` attribute is set.\n\n let attr = element.get_attribute(&ns!(\"\"), &atom!(\"href\"));\n match attr {\n Some(ref href) => {\n let value = href.r().Value() + ismap_suffix.as_ref().map(|s| &**s).unwrap_or(\"\");\n debug!(\"clicked on link to {}\", value);\n doc.r().load_anchor_href(value);\n }\n None => ()\n }\n }\n\n \/\/TODO:https:\/\/html.spec.whatwg.org\/multipage\/#the-a-element\n fn implicit_submission(&self, _ctrlKey: bool, _shiftKey: bool, _altKey: bool, _metaKey: bool) {\n }\n}\n<commit_msg>Remove an avoidable null-check from HTMLAnchorElement::activation_behavior.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\nuse dom::activation::Activatable;\nuse dom::attr::AttrValue;\nuse dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;\nuse dom::bindings::codegen::Bindings::HTMLAnchorElementBinding;\nuse dom::bindings::codegen::Bindings::HTMLAnchorElementBinding::HTMLAnchorElementMethods;\nuse dom::bindings::codegen::Bindings::MouseEventBinding::MouseEventMethods;\nuse dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;\nuse dom::bindings::codegen::InheritTypes::{HTMLAnchorElementDerived, HTMLImageElementDerived};\nuse dom::bindings::codegen::InheritTypes::{ElementCast, HTMLElementCast};\nuse dom::bindings::codegen::InheritTypes::{MouseEventCast, NodeCast};\nuse dom::bindings::js::{JS, MutNullableHeap, Root};\nuse dom::document::{Document, DocumentHelpers};\nuse dom::domtokenlist::DOMTokenList;\nuse dom::element::{Element, AttributeHandlers, ElementTypeId};\nuse dom::event::Event;\nuse dom::eventtarget::{EventTarget, EventTargetTypeId};\nuse dom::htmlelement::{HTMLElement, HTMLElementTypeId};\nuse dom::node::{Node, NodeHelpers, NodeTypeId, document_from_node, window_from_node};\nuse dom::virtualmethods::VirtualMethods;\nuse dom::window::WindowHelpers;\n\nuse num::ToPrimitive;\nuse std::default::Default;\nuse string_cache::Atom;\nuse util::str::DOMString;\n\n#[dom_struct]\npub struct HTMLAnchorElement {\n htmlelement: HTMLElement,\n rel_list: MutNullableHeap<JS<DOMTokenList>>,\n}\n\nimpl HTMLAnchorElementDerived for EventTarget {\n fn is_htmlanchorelement(&self) -> bool {\n *self.type_id() ==\n EventTargetTypeId::Node(\n NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAnchorElement)))\n }\n}\n\nimpl HTMLAnchorElement {\n fn new_inherited(localName: DOMString,\n prefix: Option<DOMString>,\n document: &Document) -> HTMLAnchorElement {\n HTMLAnchorElement {\n htmlelement:\n HTMLElement::new_inherited(HTMLElementTypeId::HTMLAnchorElement, localName, prefix, document),\n rel_list: Default::default(),\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(localName: DOMString,\n prefix: Option<DOMString>,\n document: &Document) -> Root<HTMLAnchorElement> {\n let element = HTMLAnchorElement::new_inherited(localName, prefix, document);\n Node::reflect_node(box element, document, HTMLAnchorElementBinding::Wrap)\n }\n}\n\nimpl<'a> VirtualMethods for &'a HTMLAnchorElement {\n fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {\n let htmlelement: &&HTMLElement = HTMLElementCast::from_borrowed_ref(self);\n Some(htmlelement as &VirtualMethods)\n }\n\n fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {\n match name {\n &atom!(\"rel\") => AttrValue::from_serialized_tokenlist(value),\n _ => self.super_type().unwrap().parse_plain_attribute(name, value),\n }\n }\n}\n\nimpl<'a> HTMLAnchorElementMethods for &'a HTMLAnchorElement {\n fn Text(self) -> DOMString {\n let node = NodeCast::from_ref(self);\n node.GetTextContent().unwrap()\n }\n\n fn SetText(self, value: DOMString) {\n let node = NodeCast::from_ref(self);\n node.SetTextContent(Some(value))\n }\n\n fn RelList(self) -> Root<DOMTokenList> {\n self.rel_list.or_init(|| {\n DOMTokenList::new(ElementCast::from_ref(self), &atom!(\"rel\"))\n })\n }\n}\n\nimpl<'a> Activatable for &'a HTMLAnchorElement {\n fn as_element<'b>(&'b self) -> &'b Element {\n ElementCast::from_ref(*self)\n }\n\n fn is_instance_activatable(&self) -> bool {\n true\n }\n\n\n \/\/TODO:https:\/\/html.spec.whatwg.org\/multipage\/#the-a-element\n fn pre_click_activation(&self) {\n }\n\n \/\/TODO:https:\/\/html.spec.whatwg.org\/multipage\/#the-a-element\n \/\/ https:\/\/html.spec.whatwg.org\/multipage\/#run-canceled-activation-steps\n fn canceled_activation(&self) {\n }\n\n \/\/https:\/\/html.spec.whatwg.org\/multipage\/#the-a-element:activation-behaviour\n fn activation_behavior(&self, event: &Event, target: &EventTarget) {\n \/\/Step 1. If the node document is not fully active, abort.\n let doc = document_from_node(*self);\n if !doc.r().is_fully_active() {\n return;\n }\n \/\/TODO: Step 2. Check if browsing context is specified and act accordingly.\n \/\/Step 3. Handle <img ismap\/>.\n let element = ElementCast::from_ref(*self);\n let mouse_event = MouseEventCast::to_ref(event).unwrap();\n let mut ismap_suffix = None;\n if let Some(element) = ElementCast::to_ref(target) {\n if target.is_htmlimageelement() && element.has_attribute(&atom!(\"ismap\")) {\n\n let target_node = NodeCast::from_ref(element);\n let rect = window_from_node(target_node).r().content_box_query(\n target_node.to_trusted_node_address());\n ismap_suffix = Some(\n format!(\"?{},{}\", mouse_event.ClientX().to_f32().unwrap() - rect.origin.x.to_f32_px(),\n mouse_event.ClientY().to_f32().unwrap() - rect.origin.y.to_f32_px())\n )\n }\n }\n\n \/\/TODO: Step 4. Download the link is `download` attribute is set.\n\n let attr = element.get_attribute(&ns!(\"\"), &atom!(\"href\"));\n match attr {\n Some(ref href) => {\n let value = href.r().Value() + ismap_suffix.as_ref().map(|s| &**s).unwrap_or(\"\");\n debug!(\"clicked on link to {}\", value);\n doc.r().load_anchor_href(value);\n }\n None => ()\n }\n }\n\n \/\/TODO:https:\/\/html.spec.whatwg.org\/multipage\/#the-a-element\n fn implicit_submission(&self, _ctrlKey: bool, _shiftKey: bool, _altKey: bool, _metaKey: bool) {\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Improves report display<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A module for working with borrowed data.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse core::cmp::Ordering;\nuse core::hash::{Hash, Hasher};\nuse core::ops::{Add, AddAssign, Deref};\n\nuse fmt;\nuse string::String;\n\nuse self::Cow::*;\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use core::borrow::{Borrow, BorrowMut};\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Borrow<B> for Cow<'a, B>\n where B: ToOwned,\n <B as ToOwned>::Owned: 'a\n{\n fn borrow(&self) -> &B {\n &**self\n }\n}\n\n\/\/\/ A generalization of `Clone` to borrowed data.\n\/\/\/\n\/\/\/ Some types make it possible to go from borrowed to owned, usually by\n\/\/\/ implementing the `Clone` trait. But `Clone` works only for going from `&T`\n\/\/\/ to `T`. The `ToOwned` trait generalizes `Clone` to construct owned data\n\/\/\/ from any borrow of a given type.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait ToOwned {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n type Owned: Borrow<Self>;\n\n \/\/\/ Creates owned data from borrowed data, usually by cloning.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Basic usage:\n \/\/\/\n \/\/\/ ```\n \/\/\/ let s: &str = \"a\";\n \/\/\/ let ss: String = s.to_owned();\n \/\/\/\n \/\/\/ let v: &[i32] = &[1, 2];\n \/\/\/ let vv: Vec<i32> = v.to_owned();\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn to_owned(&self) -> Self::Owned;\n\n \/\/\/ Uses borrowed data to replace owned data, usually by cloning.\n \/\/\/\n \/\/\/ This is borrow-generalized version of `Clone::clone_from`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Basic usage:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # #![feature(toowned_clone_into)]\n \/\/\/ let mut s: String = String::new();\n \/\/\/ \"hello\".clone_into(&mut s);\n \/\/\/\n \/\/\/ let mut v: Vec<i32> = Vec::new();\n \/\/\/ [1, 2][..].clone_into(&mut v);\n \/\/\/ ```\n #[unstable(feature = \"toowned_clone_into\",\n reason = \"recently added\",\n issue = \"41263\")]\n fn clone_into(&self, target: &mut Self::Owned) {\n *target = self.to_owned();\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<T> ToOwned for T\n where T: Clone\n{\n type Owned = T;\n fn to_owned(&self) -> T {\n self.clone()\n }\n\n fn clone_into(&self, target: &mut T) {\n target.clone_from(self);\n }\n}\n\n\/\/\/ A clone-on-write smart pointer.\n\/\/\/\n\/\/\/ The type `Cow` is a smart pointer providing clone-on-write functionality: it\n\/\/\/ can enclose and provide immutable access to borrowed data, and clone the\n\/\/\/ data lazily when mutation or ownership is required. The type is designed to\n\/\/\/ work with general borrowed data via the `Borrow` trait.\n\/\/\/\n\/\/\/ `Cow` implements `Deref`, which means that you can call\n\/\/\/ non-mutating methods directly on the data it encloses. If mutation\n\/\/\/ is desired, `to_mut` will obtain a mutable reference to an owned\n\/\/\/ value, cloning if necessary.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::borrow::Cow;\n\/\/\/\n\/\/\/ fn abs_all(input: &mut Cow<[i32]>) {\n\/\/\/ for i in 0..input.len() {\n\/\/\/ let v = input[i];\n\/\/\/ if v < 0 {\n\/\/\/ \/\/ Clones into a vector if not already owned.\n\/\/\/ input.to_mut()[i] = -v;\n\/\/\/ }\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ \/\/ No clone occurs because `input` doesn't need to be mutated.\n\/\/\/ let slice = [0, 1, 2];\n\/\/\/ let mut input = Cow::from(&slice[..]);\n\/\/\/ abs_all(&mut input);\n\/\/\/\n\/\/\/ \/\/ Clone occurs because `input` needs to be mutated.\n\/\/\/ let slice = [-1, 0, 1];\n\/\/\/ let mut input = Cow::from(&slice[..]);\n\/\/\/ abs_all(&mut input);\n\/\/\/\n\/\/\/ \/\/ No clone occurs because `input` is already owned.\n\/\/\/ let mut input = Cow::from(vec![-1, 0, 1]);\n\/\/\/ abs_all(&mut input);\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub enum Cow<'a, B: ?Sized + 'a>\n where B: ToOwned\n{\n \/\/\/ Borrowed data.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Borrowed(#[stable(feature = \"rust1\", since = \"1.0.0\")]\n &'a B),\n\n \/\/\/ Owned data.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Owned(#[stable(feature = \"rust1\", since = \"1.0.0\")]\n <B as ToOwned>::Owned),\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Clone for Cow<'a, B>\n where B: ToOwned\n{\n fn clone(&self) -> Cow<'a, B> {\n match *self {\n Borrowed(b) => Borrowed(b),\n Owned(ref o) => {\n let b: &B = o.borrow();\n Owned(b.to_owned())\n }\n }\n }\n\n fn clone_from(&mut self, source: &Cow<'a, B>) {\n if let Owned(ref mut dest) = *self {\n if let Owned(ref o) = *source {\n o.borrow().clone_into(dest);\n return;\n }\n }\n\n *self = source.clone();\n }\n}\n\nimpl<'a, B: ?Sized> Cow<'a, B>\n where B: ToOwned\n{\n \/\/\/ Acquires a mutable reference to the owned form of the data.\n \/\/\/\n \/\/\/ Clones the data if it is not already owned.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::borrow::Cow;\n \/\/\/\n \/\/\/ let mut cow: Cow<[_]> = Cow::Owned(vec![1, 2, 3]);\n \/\/\/\n \/\/\/ let hello = cow.to_mut();\n \/\/\/\n \/\/\/ assert_eq!(hello, &[1, 2, 3]);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn to_mut(&mut self) -> &mut <B as ToOwned>::Owned {\n match *self {\n Borrowed(borrowed) => {\n *self = Owned(borrowed.to_owned());\n match *self {\n Borrowed(..) => unreachable!(),\n Owned(ref mut owned) => owned,\n }\n }\n Owned(ref mut owned) => owned,\n }\n }\n\n \/\/\/ Extracts the owned data.\n \/\/\/\n \/\/\/ Clones the data if it is not already owned.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::borrow::Cow;\n \/\/\/\n \/\/\/ let cow: Cow<[_]> = Cow::Owned(vec![1, 2, 3]);\n \/\/\/\n \/\/\/ let hello = cow.into_owned();\n \/\/\/\n \/\/\/ assert_eq!(vec![1, 2, 3], hello);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn into_owned(self) -> <B as ToOwned>::Owned {\n match self {\n Borrowed(borrowed) => borrowed.to_owned(),\n Owned(owned) => owned,\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Deref for Cow<'a, B>\n where B: ToOwned\n{\n type Target = B;\n\n fn deref(&self) -> &B {\n match *self {\n Borrowed(borrowed) => borrowed,\n Owned(ref owned) => owned.borrow(),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Eq for Cow<'a, B> where B: Eq + ToOwned {}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Ord for Cow<'a, B>\n where B: Ord + ToOwned\n{\n #[inline]\n fn cmp(&self, other: &Cow<'a, B>) -> Ordering {\n Ord::cmp(&**self, &**other)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>\n where B: PartialEq<C> + ToOwned,\n C: ToOwned\n{\n #[inline]\n fn eq(&self, other: &Cow<'b, C>) -> bool {\n PartialEq::eq(&**self, &**other)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> PartialOrd for Cow<'a, B>\n where B: PartialOrd + ToOwned\n{\n #[inline]\n fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> {\n PartialOrd::partial_cmp(&**self, &**other)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> fmt::Debug for Cow<'a, B>\n where B: fmt::Debug + ToOwned,\n <B as ToOwned>::Owned: fmt::Debug\n{\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Borrowed(ref b) => fmt::Debug::fmt(b, f),\n Owned(ref o) => fmt::Debug::fmt(o, f),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> fmt::Display for Cow<'a, B>\n where B: fmt::Display + ToOwned,\n <B as ToOwned>::Owned: fmt::Display\n{\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Borrowed(ref b) => fmt::Display::fmt(b, f),\n Owned(ref o) => fmt::Display::fmt(o, f),\n }\n }\n}\n\n#[stable(feature = \"default\", since = \"1.11.0\")]\nimpl<'a, B: ?Sized> Default for Cow<'a, B>\n where B: ToOwned,\n <B as ToOwned>::Owned: Default\n{\n \/\/\/ Creates an owned Cow<'a, B> with the default value for the contained owned value.\n fn default() -> Cow<'a, B> {\n Owned(<B as ToOwned>::Owned::default())\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Hash for Cow<'a, B>\n where B: Hash + ToOwned\n{\n #[inline]\n fn hash<H: Hasher>(&self, state: &mut H) {\n Hash::hash(&**self, state)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[allow(deprecated)]\nimpl<'a, T: ?Sized + ToOwned> AsRef<T> for Cow<'a, T> {\n fn as_ref(&self) -> &T {\n self\n }\n}\n\n#[stable(feature = \"cow_add\", since = \"1.14.0\")]\nimpl<'a> Add<&'a str> for Cow<'a, str> {\n type Output = Cow<'a, str>;\n\n #[inline]\n fn add(mut self, rhs: &'a str) -> Self::Output {\n self += rhs;\n self\n }\n}\n\n#[stable(feature = \"cow_add\", since = \"1.14.0\")]\nimpl<'a> Add<Cow<'a, str>> for Cow<'a, str> {\n type Output = Cow<'a, str>;\n\n #[inline]\n fn add(mut self, rhs: Cow<'a, str>) -> Self::Output {\n self += rhs;\n self\n }\n}\n\n#[stable(feature = \"cow_add\", since = \"1.14.0\")]\nimpl<'a> AddAssign<&'a str> for Cow<'a, str> {\n fn add_assign(&mut self, rhs: &'a str) {\n if self.is_empty() {\n *self = Cow::Borrowed(rhs)\n } else if rhs.is_empty() {\n return;\n } else {\n if let Cow::Borrowed(lhs) = *self {\n let mut s = String::with_capacity(lhs.len() + rhs.len());\n s.push_str(lhs);\n *self = Cow::Owned(s);\n }\n self.to_mut().push_str(rhs);\n }\n }\n}\n\n#[stable(feature = \"cow_add\", since = \"1.14.0\")]\nimpl<'a> AddAssign<Cow<'a, str>> for Cow<'a, str> {\n fn add_assign(&mut self, rhs: Cow<'a, str>) {\n if self.is_empty() {\n *self = rhs\n } else if rhs.is_empty() {\n return;\n } else {\n if let Cow::Borrowed(lhs) = *self {\n let mut s = String::with_capacity(lhs.len() + rhs.len());\n s.push_str(lhs);\n *self = Cow::Owned(s);\n }\n self.to_mut().push_str(&rhs);\n }\n }\n}\n<commit_msg>Improve doc examples for `Cow::into_owned`.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A module for working with borrowed data.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse core::cmp::Ordering;\nuse core::hash::{Hash, Hasher};\nuse core::ops::{Add, AddAssign, Deref};\n\nuse fmt;\nuse string::String;\n\nuse self::Cow::*;\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use core::borrow::{Borrow, BorrowMut};\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Borrow<B> for Cow<'a, B>\n where B: ToOwned,\n <B as ToOwned>::Owned: 'a\n{\n fn borrow(&self) -> &B {\n &**self\n }\n}\n\n\/\/\/ A generalization of `Clone` to borrowed data.\n\/\/\/\n\/\/\/ Some types make it possible to go from borrowed to owned, usually by\n\/\/\/ implementing the `Clone` trait. But `Clone` works only for going from `&T`\n\/\/\/ to `T`. The `ToOwned` trait generalizes `Clone` to construct owned data\n\/\/\/ from any borrow of a given type.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait ToOwned {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n type Owned: Borrow<Self>;\n\n \/\/\/ Creates owned data from borrowed data, usually by cloning.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Basic usage:\n \/\/\/\n \/\/\/ ```\n \/\/\/ let s: &str = \"a\";\n \/\/\/ let ss: String = s.to_owned();\n \/\/\/\n \/\/\/ let v: &[i32] = &[1, 2];\n \/\/\/ let vv: Vec<i32> = v.to_owned();\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn to_owned(&self) -> Self::Owned;\n\n \/\/\/ Uses borrowed data to replace owned data, usually by cloning.\n \/\/\/\n \/\/\/ This is borrow-generalized version of `Clone::clone_from`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Basic usage:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # #![feature(toowned_clone_into)]\n \/\/\/ let mut s: String = String::new();\n \/\/\/ \"hello\".clone_into(&mut s);\n \/\/\/\n \/\/\/ let mut v: Vec<i32> = Vec::new();\n \/\/\/ [1, 2][..].clone_into(&mut v);\n \/\/\/ ```\n #[unstable(feature = \"toowned_clone_into\",\n reason = \"recently added\",\n issue = \"41263\")]\n fn clone_into(&self, target: &mut Self::Owned) {\n *target = self.to_owned();\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<T> ToOwned for T\n where T: Clone\n{\n type Owned = T;\n fn to_owned(&self) -> T {\n self.clone()\n }\n\n fn clone_into(&self, target: &mut T) {\n target.clone_from(self);\n }\n}\n\n\/\/\/ A clone-on-write smart pointer.\n\/\/\/\n\/\/\/ The type `Cow` is a smart pointer providing clone-on-write functionality: it\n\/\/\/ can enclose and provide immutable access to borrowed data, and clone the\n\/\/\/ data lazily when mutation or ownership is required. The type is designed to\n\/\/\/ work with general borrowed data via the `Borrow` trait.\n\/\/\/\n\/\/\/ `Cow` implements `Deref`, which means that you can call\n\/\/\/ non-mutating methods directly on the data it encloses. If mutation\n\/\/\/ is desired, `to_mut` will obtain a mutable reference to an owned\n\/\/\/ value, cloning if necessary.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::borrow::Cow;\n\/\/\/\n\/\/\/ fn abs_all(input: &mut Cow<[i32]>) {\n\/\/\/ for i in 0..input.len() {\n\/\/\/ let v = input[i];\n\/\/\/ if v < 0 {\n\/\/\/ \/\/ Clones into a vector if not already owned.\n\/\/\/ input.to_mut()[i] = -v;\n\/\/\/ }\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ \/\/ No clone occurs because `input` doesn't need to be mutated.\n\/\/\/ let slice = [0, 1, 2];\n\/\/\/ let mut input = Cow::from(&slice[..]);\n\/\/\/ abs_all(&mut input);\n\/\/\/\n\/\/\/ \/\/ Clone occurs because `input` needs to be mutated.\n\/\/\/ let slice = [-1, 0, 1];\n\/\/\/ let mut input = Cow::from(&slice[..]);\n\/\/\/ abs_all(&mut input);\n\/\/\/\n\/\/\/ \/\/ No clone occurs because `input` is already owned.\n\/\/\/ let mut input = Cow::from(vec![-1, 0, 1]);\n\/\/\/ abs_all(&mut input);\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub enum Cow<'a, B: ?Sized + 'a>\n where B: ToOwned\n{\n \/\/\/ Borrowed data.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Borrowed(#[stable(feature = \"rust1\", since = \"1.0.0\")]\n &'a B),\n\n \/\/\/ Owned data.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Owned(#[stable(feature = \"rust1\", since = \"1.0.0\")]\n <B as ToOwned>::Owned),\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Clone for Cow<'a, B>\n where B: ToOwned\n{\n fn clone(&self) -> Cow<'a, B> {\n match *self {\n Borrowed(b) => Borrowed(b),\n Owned(ref o) => {\n let b: &B = o.borrow();\n Owned(b.to_owned())\n }\n }\n }\n\n fn clone_from(&mut self, source: &Cow<'a, B>) {\n if let Owned(ref mut dest) = *self {\n if let Owned(ref o) = *source {\n o.borrow().clone_into(dest);\n return;\n }\n }\n\n *self = source.clone();\n }\n}\n\nimpl<'a, B: ?Sized> Cow<'a, B>\n where B: ToOwned\n{\n \/\/\/ Acquires a mutable reference to the owned form of the data.\n \/\/\/\n \/\/\/ Clones the data if it is not already owned.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::borrow::Cow;\n \/\/\/\n \/\/\/ let mut cow: Cow<[_]> = Cow::Owned(vec![1, 2, 3]);\n \/\/\/\n \/\/\/ let hello = cow.to_mut();\n \/\/\/\n \/\/\/ assert_eq!(hello, &[1, 2, 3]);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn to_mut(&mut self) -> &mut <B as ToOwned>::Owned {\n match *self {\n Borrowed(borrowed) => {\n *self = Owned(borrowed.to_owned());\n match *self {\n Borrowed(..) => unreachable!(),\n Owned(ref mut owned) => owned,\n }\n }\n Owned(ref mut owned) => owned,\n }\n }\n\n \/\/\/ Extracts the owned data.\n \/\/\/\n \/\/\/ Clones the data if it is not already owned.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Calling `into_owned` on a `Cow::Borrowed` clones the underlying data\n \/\/\/ and becomes a `Cow::Owned`:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::borrow::Cow;\n \/\/\/\n \/\/\/ let s = \"Hello world!\";\n \/\/\/ let cow = Cow::Borrowed(s);\n \/\/\/\n \/\/\/ assert_eq!(\n \/\/\/ cow.into_owned(),\n \/\/\/ Cow::Owned(String::from(s))\n \/\/\/ );\n \/\/\/ ```\n \/\/\/\n \/\/\/ Calling `into_owned` on a `Cow::Owned` is a no-op:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::borrow::Cow;\n \/\/\/\n \/\/\/ let s = \"Hello world!\";\n \/\/\/ let cow: Cow<str> = Cow::Owned(String::from(s));\n \/\/\/\n \/\/\/ assert_eq!(\n \/\/\/ cow.into_owned(),\n \/\/\/ Cow::Owned(String::from(s))\n \/\/\/ );\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn into_owned(self) -> <B as ToOwned>::Owned {\n match self {\n Borrowed(borrowed) => borrowed.to_owned(),\n Owned(owned) => owned,\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Deref for Cow<'a, B>\n where B: ToOwned\n{\n type Target = B;\n\n fn deref(&self) -> &B {\n match *self {\n Borrowed(borrowed) => borrowed,\n Owned(ref owned) => owned.borrow(),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Eq for Cow<'a, B> where B: Eq + ToOwned {}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Ord for Cow<'a, B>\n where B: Ord + ToOwned\n{\n #[inline]\n fn cmp(&self, other: &Cow<'a, B>) -> Ordering {\n Ord::cmp(&**self, &**other)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>\n where B: PartialEq<C> + ToOwned,\n C: ToOwned\n{\n #[inline]\n fn eq(&self, other: &Cow<'b, C>) -> bool {\n PartialEq::eq(&**self, &**other)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> PartialOrd for Cow<'a, B>\n where B: PartialOrd + ToOwned\n{\n #[inline]\n fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> {\n PartialOrd::partial_cmp(&**self, &**other)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> fmt::Debug for Cow<'a, B>\n where B: fmt::Debug + ToOwned,\n <B as ToOwned>::Owned: fmt::Debug\n{\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Borrowed(ref b) => fmt::Debug::fmt(b, f),\n Owned(ref o) => fmt::Debug::fmt(o, f),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> fmt::Display for Cow<'a, B>\n where B: fmt::Display + ToOwned,\n <B as ToOwned>::Owned: fmt::Display\n{\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Borrowed(ref b) => fmt::Display::fmt(b, f),\n Owned(ref o) => fmt::Display::fmt(o, f),\n }\n }\n}\n\n#[stable(feature = \"default\", since = \"1.11.0\")]\nimpl<'a, B: ?Sized> Default for Cow<'a, B>\n where B: ToOwned,\n <B as ToOwned>::Owned: Default\n{\n \/\/\/ Creates an owned Cow<'a, B> with the default value for the contained owned value.\n fn default() -> Cow<'a, B> {\n Owned(<B as ToOwned>::Owned::default())\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, B: ?Sized> Hash for Cow<'a, B>\n where B: Hash + ToOwned\n{\n #[inline]\n fn hash<H: Hasher>(&self, state: &mut H) {\n Hash::hash(&**self, state)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[allow(deprecated)]\nimpl<'a, T: ?Sized + ToOwned> AsRef<T> for Cow<'a, T> {\n fn as_ref(&self) -> &T {\n self\n }\n}\n\n#[stable(feature = \"cow_add\", since = \"1.14.0\")]\nimpl<'a> Add<&'a str> for Cow<'a, str> {\n type Output = Cow<'a, str>;\n\n #[inline]\n fn add(mut self, rhs: &'a str) -> Self::Output {\n self += rhs;\n self\n }\n}\n\n#[stable(feature = \"cow_add\", since = \"1.14.0\")]\nimpl<'a> Add<Cow<'a, str>> for Cow<'a, str> {\n type Output = Cow<'a, str>;\n\n #[inline]\n fn add(mut self, rhs: Cow<'a, str>) -> Self::Output {\n self += rhs;\n self\n }\n}\n\n#[stable(feature = \"cow_add\", since = \"1.14.0\")]\nimpl<'a> AddAssign<&'a str> for Cow<'a, str> {\n fn add_assign(&mut self, rhs: &'a str) {\n if self.is_empty() {\n *self = Cow::Borrowed(rhs)\n } else if rhs.is_empty() {\n return;\n } else {\n if let Cow::Borrowed(lhs) = *self {\n let mut s = String::with_capacity(lhs.len() + rhs.len());\n s.push_str(lhs);\n *self = Cow::Owned(s);\n }\n self.to_mut().push_str(rhs);\n }\n }\n}\n\n#[stable(feature = \"cow_add\", since = \"1.14.0\")]\nimpl<'a> AddAssign<Cow<'a, str>> for Cow<'a, str> {\n fn add_assign(&mut self, rhs: Cow<'a, str>) {\n if self.is_empty() {\n *self = rhs\n } else if rhs.is_empty() {\n return;\n } else {\n if let Cow::Borrowed(lhs) = *self {\n let mut s = String::with_capacity(lhs.len() + rhs.len());\n s.push_str(lhs);\n *self = Cow::Owned(s);\n }\n self.to_mut().push_str(&rhs);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Move the vertex buffer creation from RendererVk::new to ::flush<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Configuration options for a single run of the servo application. Created\n\/\/! from command line arguments.\n\nuse geometry::ScreenPx;\n\nuse geom::scale_factor::ScaleFactor;\nuse geom::size::TypedSize2D;\nuse layers::geometry::DevicePixel;\nuse getopts;\nuse std::collections::HashSet;\nuse std::cmp;\nuse std::io;\nuse std::mem;\nuse std::os;\nuse std::ptr;\nuse std::rt;\n\n#[deriving(Clone)]\npub enum RenderApi {\n OpenGL,\n Mesa,\n}\n\n\/\/\/ Global flags for Servo, currently set on the command line.\n#[deriving(Clone)]\npub struct Opts {\n \/\/\/ The initial URLs to load.\n pub urls: Vec<String>,\n\n \/\/\/ How many threads to use for CPU rendering (`-t`).\n \/\/\/\n \/\/\/ FIXME(pcwalton): This is not currently used. All rendering is sequential.\n pub n_render_threads: uint,\n\n \/\/\/ True to use GPU painting via Skia-GL, false to use CPU painting via Skia (`-g`). Note that\n \/\/\/ compositing is always done on the GPU.\n pub gpu_painting: bool,\n\n \/\/\/ The maximum size of each tile in pixels (`-s`).\n pub tile_size: uint,\n\n \/\/\/ The ratio of device pixels per px at the default scale. If unspecified, will use the\n \/\/\/ platform default setting.\n pub device_pixels_per_px: Option<ScaleFactor<ScreenPx, DevicePixel, f32>>,\n\n \/\/\/ `None` to disable the time profiler or `Some` with an interval in seconds to enable it and\n \/\/\/ cause it to produce output on that interval (`-p`).\n pub time_profiler_period: Option<f64>,\n\n \/\/\/ `None` to disable the memory profiler or `Some` with an interval in seconds to enable it\n \/\/\/ and cause it to produce output on that interval (`-m`).\n pub memory_profiler_period: Option<f64>,\n\n \/\/\/ Enable experimental web features (`-e`).\n pub enable_experimental: bool,\n\n \/\/\/ The number of threads to use for layout (`-y`). Defaults to 1, which results in a recursive\n \/\/\/ sequential algorithm.\n pub layout_threads: uint,\n\n pub nonincremental_layout: bool,\n\n pub output_file: Option<String>,\n pub headless: bool,\n pub hard_fail: bool,\n\n \/\/\/ True if we should bubble intrinsic widths sequentially (`-b`). If this is true, then\n \/\/\/ intrinsic widths are computed as a separate pass instead of during flow construction. You\n \/\/\/ may wish to turn this flag on in order to benchmark style recalculation against other\n \/\/\/ browser engines.\n pub bubble_inline_sizes_separately: bool,\n\n \/\/\/ True if we should show borders on all layers and tiles for\n \/\/\/ debugging purposes (`--show-debug-borders`).\n pub show_debug_borders: bool,\n\n \/\/\/ True if we should show borders on all fragments for debugging purposes (`--show-debug-fragment-borders`).\n pub show_debug_fragment_borders: bool,\n\n \/\/\/ If set with --disable-text-aa, disable antialiasing on fonts. This is primarily useful for reftests\n \/\/\/ where pixel perfect results are required when using fonts such as the Ahem\n \/\/\/ font for layout tests.\n pub enable_text_antialiasing: bool,\n\n \/\/\/ True if each step of layout is traced to an external JSON file\n \/\/\/ for debugging purposes. Settings this implies sequential layout\n \/\/\/ and render.\n pub trace_layout: bool,\n\n \/\/\/ If true, instrument the runtime for each task created and dump\n \/\/\/ that information to a JSON file that can be viewed in the task\n \/\/\/ profile viewer.\n pub profile_tasks: bool,\n\n \/\/\/ `None` to disable devtools or `Some` with a port number to start a server to listen to\n \/\/\/ remote Firefox devtools connections.\n pub devtools_port: Option<u16>,\n\n \/\/\/ The initial requested size of the window.\n pub initial_window_size: TypedSize2D<ScreenPx, uint>,\n\n \/\/\/ An optional string allowing the user agent to be set for testing.\n pub user_agent: Option<String>,\n\n \/\/\/ Dumps the flow tree after a layout.\n pub dump_flow_tree: bool,\n\n \/\/\/ Whether to show an error when display list geometry escapes flow overflow regions.\n pub validate_display_list_geometry: bool,\n\n pub render_api: RenderApi,\n}\n\nfn print_usage(app: &str, opts: &[getopts::OptGroup]) {\n let message = format!(\"Usage: {} [ options ... ] [URL]\\n\\twhere options include\", app);\n println!(\"{}\", getopts::usage(message.as_slice(), opts));\n}\n\npub fn print_debug_usage(app: &str) {\n fn print_option(name: &str, description: &str) {\n println!(\"\\t{:<35} {}\", name, description);\n }\n\n println!(\"Usage: {} debug option,[options,...]\\n\\twhere options include\\n\\nOptions:\", app);\n\n print_option(\"bubble-widths\", \"Bubble intrinsic widths separately like other engines.\");\n print_option(\"disable-text-aa\", \"Disable antialiasing of rendered text.\");\n print_option(\"dump-flow-tree\", \"Print the flow tree after each layout.\");\n print_option(\"profile-tasks\", \"Instrument each task, writing the output to a file.\");\n print_option(\"show-compositor-borders\", \"Paint borders along layer and tile boundaries.\");\n print_option(\"show-fragment-borders\", \"Paint borders along fragment boundaries.\");\n print_option(\"trace-layout\", \"Write layout trace to an external file for debugging.\");\n print_option(\"validate-display-list-geometry\",\n \"Display an error when display list geometry escapes overflow region.\");\n\n println!(\"\");\n}\n\nfn args_fail(msg: &str) {\n io::stderr().write_line(msg).unwrap();\n os::set_exit_status(1);\n}\n\n\/\/ Always use CPU rendering on android.\n\n#[cfg(target_os=\"android\")]\nstatic FORCE_CPU_PAINTING: bool = true;\n\n#[cfg(not(target_os=\"android\"))]\nstatic FORCE_CPU_PAINTING: bool = false;\n\npub fn default_opts() -> Opts {\n Opts {\n urls: vec!(),\n n_render_threads: 1,\n gpu_painting: false,\n tile_size: 512,\n device_pixels_per_px: None,\n time_profiler_period: None,\n memory_profiler_period: None,\n enable_experimental: false,\n layout_threads: 1,\n nonincremental_layout: false,\n output_file: None,\n headless: true,\n hard_fail: true,\n bubble_inline_sizes_separately: false,\n show_debug_borders: false,\n show_debug_fragment_borders: false,\n enable_text_antialiasing: false,\n trace_layout: false,\n devtools_port: None,\n initial_window_size: TypedSize2D(800, 600),\n user_agent: None,\n dump_flow_tree: false,\n validate_display_list_geometry: false,\n profile_tasks: false,\n render_api: OpenGL,\n }\n}\n\npub fn from_cmdline_args(args: &[String]) -> bool {\n let app_name = args[0].to_string();\n let args = args.tail();\n\n let opts = vec!(\n getopts::optflag(\"c\", \"cpu\", \"CPU painting (default)\"),\n getopts::optflag(\"g\", \"gpu\", \"GPU painting\"),\n getopts::optopt(\"o\", \"output\", \"Output file\", \"output.png\"),\n getopts::optopt(\"s\", \"size\", \"Size of tiles\", \"512\"),\n getopts::optopt(\"\", \"device-pixel-ratio\", \"Device pixels per px\", \"\"),\n getopts::optflag(\"e\", \"experimental\", \"Enable experimental web features\"),\n getopts::optopt(\"t\", \"threads\", \"Number of render threads\", \"1\"),\n getopts::optflagopt(\"p\", \"profile\", \"Profiler flag and output interval\", \"10\"),\n getopts::optflagopt(\"m\", \"memory-profile\", \"Memory profiler flag and output interval\", \"10\"),\n getopts::optflag(\"x\", \"exit\", \"Exit after load flag\"),\n getopts::optopt(\"y\", \"layout-threads\", \"Number of threads to use for layout\", \"1\"),\n getopts::optflag(\"i\", \"nonincremental-layout\", \"Enable to turn off incremental layout.\"),\n getopts::optflag(\"z\", \"headless\", \"Headless mode\"),\n getopts::optflag(\"f\", \"hard-fail\", \"Exit on task failure instead of displaying about:failure\"),\n getopts::optflagopt(\"\", \"devtools\", \"Start remote devtools server on port\", \"6000\"),\n getopts::optopt(\"\", \"resolution\", \"Set window resolution.\", \"800x600\"),\n getopts::optopt(\"u\", \"user-agent\", \"Set custom user agent string\", \"NCSA Mosaic\/1.0 (X11;SunOS 4.1.4 sun4m)\"),\n getopts::optopt(\"Z\", \"debug\", \"A comma-separated string of debug options. Pass help to show available options.\", \"\"),\n getopts::optflag(\"h\", \"help\", \"Print this message\"),\n getopts::optopt(\"r\", \"render-api\", \"Set the rendering API to use\", \"gl|mesa\"),\n );\n\n let opt_match = match getopts::getopts(args, opts.as_slice()) {\n Ok(m) => m,\n Err(f) => {\n args_fail(format!(\"{}\", f).as_slice());\n return false;\n }\n };\n\n if opt_match.opt_present(\"h\") || opt_match.opt_present(\"help\") {\n print_usage(app_name.as_slice(), opts.as_slice());\n return false;\n };\n\n let mut debug_options = HashSet::new();\n let debug_string = match opt_match.opt_str(\"Z\") {\n Some(string) => string,\n None => String::new()\n };\n for split in debug_string.as_slice().split(',') {\n debug_options.insert(split.clone());\n }\n if debug_options.contains(&\"help\") {\n print_debug_usage(app_name.as_slice());\n return false;\n }\n\n let urls = if opt_match.free.is_empty() {\n print_usage(app_name.as_slice(), opts.as_slice());\n args_fail(\"servo asks that you provide 1 or more URLs\");\n return false;\n } else {\n opt_match.free.clone()\n };\n\n let tile_size: uint = match opt_match.opt_str(\"s\") {\n Some(tile_size_str) => from_str(tile_size_str.as_slice()).unwrap(),\n None => 512,\n };\n\n let device_pixels_per_px = opt_match.opt_str(\"device-pixel-ratio\").map(|dppx_str|\n ScaleFactor(from_str(dppx_str.as_slice()).unwrap())\n );\n\n let mut n_render_threads: uint = match opt_match.opt_str(\"t\") {\n Some(n_render_threads_str) => from_str(n_render_threads_str.as_slice()).unwrap(),\n None => 1, \/\/ FIXME: Number of cores.\n };\n\n \/\/ If only the flag is present, default to a 5 second period for both profilers.\n let time_profiler_period = opt_match.opt_default(\"p\", \"5\").map(|period| {\n from_str(period.as_slice()).unwrap()\n });\n let memory_profiler_period = opt_match.opt_default(\"m\", \"5\").map(|period| {\n from_str(period.as_slice()).unwrap()\n });\n\n let gpu_painting = !FORCE_CPU_PAINTING && opt_match.opt_present(\"g\");\n\n let mut layout_threads: uint = match opt_match.opt_str(\"y\") {\n Some(layout_threads_str) => from_str(layout_threads_str.as_slice()).unwrap(),\n None => cmp::max(rt::default_sched_threads() * 3 \/ 4, 1),\n };\n\n let nonincremental_layout = opt_match.opt_present(\"i\");\n\n let mut bubble_inline_sizes_separately = debug_options.contains(&\"bubble-widths\");\n let trace_layout = debug_options.contains(&\"trace-layout\");\n if trace_layout {\n n_render_threads = 1;\n layout_threads = 1;\n bubble_inline_sizes_separately = true;\n }\n\n let devtools_port = opt_match.opt_default(\"devtools\", \"6000\").map(|port| {\n from_str(port.as_slice()).unwrap()\n });\n\n let initial_window_size = match opt_match.opt_str(\"resolution\") {\n Some(res_string) => {\n let res: Vec<uint> = res_string.as_slice().split('x').map(|r| from_str(r).unwrap()).collect();\n TypedSize2D(res[0], res[1])\n }\n None => {\n TypedSize2D(800, 600)\n }\n };\n\n let render_api = match opt_match.opt_str(\"r\").unwrap_or(\"gl\".to_string()).as_slice() {\n \"mesa\" => Mesa,\n \"gl\" => OpenGL,\n _ => {\n args_fail(\"Unknown render api specified\");\n return false;\n }\n };\n\n let opts = Opts {\n urls: urls,\n n_render_threads: n_render_threads,\n gpu_painting: gpu_painting,\n tile_size: tile_size,\n device_pixels_per_px: device_pixels_per_px,\n time_profiler_period: time_profiler_period,\n memory_profiler_period: memory_profiler_period,\n enable_experimental: opt_match.opt_present(\"e\"),\n layout_threads: layout_threads,\n nonincremental_layout: nonincremental_layout,\n output_file: opt_match.opt_str(\"o\"),\n headless: opt_match.opt_present(\"z\"),\n hard_fail: opt_match.opt_present(\"f\"),\n bubble_inline_sizes_separately: bubble_inline_sizes_separately,\n profile_tasks: debug_options.contains(&\"profile-tasks\"),\n trace_layout: trace_layout,\n devtools_port: devtools_port,\n initial_window_size: initial_window_size,\n user_agent: opt_match.opt_str(\"u\"),\n show_debug_borders: debug_options.contains(&\"show-compositor-borders\"),\n show_debug_fragment_borders: debug_options.contains(&\"show-fragment-borders\"),\n enable_text_antialiasing: !debug_options.contains(&\"disable-text-aa\"),\n dump_flow_tree: debug_options.contains(&\"dump-flow-tree\"),\n validate_display_list_geometry: debug_options.contains(&\"validate-display-list-geometry\"),\n render_api: render_api,\n };\n\n set_opts(opts);\n true\n}\n\nstatic mut EXPERIMENTAL_ENABLED: bool = false;\n\npub fn set_experimental_enabled(new_value: bool) {\n unsafe {\n EXPERIMENTAL_ENABLED = new_value;\n }\n}\n\npub fn experimental_enabled() -> bool {\n unsafe {\n EXPERIMENTAL_ENABLED\n }\n}\n\n\/\/ Make Opts available globally. This saves having to clone and pass\n\/\/ opts everywhere it is used, which gets particularly cumbersome\n\/\/ when passing through the DOM structures.\nstatic mut OPTIONS: *mut Opts = 0 as *mut Opts;\n\npub fn set_opts(opts: Opts) {\n unsafe {\n let box_opts = box opts;\n OPTIONS = mem::transmute(box_opts);\n }\n}\n\n#[inline]\npub fn get<'a>() -> &'a Opts {\n unsafe {\n \/\/ If code attempts to retrieve the options and they haven't\n \/\/ been set by the platform init code, just return a default\n \/\/ set of options. This is mostly useful for unit tests that\n \/\/ run through a code path which queries the cmd line options.\n if OPTIONS == ptr::null_mut() {\n set_opts(default_opts());\n }\n mem::transmute(OPTIONS)\n }\n}\n<commit_msg>Replace render to paint in util\/opts.rs. Some general words are not replaced.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Configuration options for a single run of the servo application. Created\n\/\/! from command line arguments.\n\nuse geometry::ScreenPx;\n\nuse geom::scale_factor::ScaleFactor;\nuse geom::size::TypedSize2D;\nuse layers::geometry::DevicePixel;\nuse getopts;\nuse std::collections::HashSet;\nuse std::cmp;\nuse std::io;\nuse std::mem;\nuse std::os;\nuse std::ptr;\nuse std::rt;\n\n#[deriving(Clone)]\npub enum RenderApi {\n OpenGL,\n Mesa,\n}\n\n\/\/\/ Global flags for Servo, currently set on the command line.\n#[deriving(Clone)]\npub struct Opts {\n \/\/\/ The initial URLs to load.\n pub urls: Vec<String>,\n\n \/\/\/ How many threads to use for CPU painting (`-t`).\n \/\/\/\n \/\/\/ FIXME(pcwalton): This is not currently used. All painting is sequential.\n pub n_paint_threads: uint,\n\n \/\/\/ True to use GPU painting via Skia-GL, false to use CPU painting via Skia (`-g`). Note that\n \/\/\/ compositing is always done on the GPU.\n pub gpu_painting: bool,\n\n \/\/\/ The maximum size of each tile in pixels (`-s`).\n pub tile_size: uint,\n\n \/\/\/ The ratio of device pixels per px at the default scale. If unspecified, will use the\n \/\/\/ platform default setting.\n pub device_pixels_per_px: Option<ScaleFactor<ScreenPx, DevicePixel, f32>>,\n\n \/\/\/ `None` to disable the time profiler or `Some` with an interval in seconds to enable it and\n \/\/\/ cause it to produce output on that interval (`-p`).\n pub time_profiler_period: Option<f64>,\n\n \/\/\/ `None` to disable the memory profiler or `Some` with an interval in seconds to enable it\n \/\/\/ and cause it to produce output on that interval (`-m`).\n pub memory_profiler_period: Option<f64>,\n\n \/\/\/ Enable experimental web features (`-e`).\n pub enable_experimental: bool,\n\n \/\/\/ The number of threads to use for layout (`-y`). Defaults to 1, which results in a recursive\n \/\/\/ sequential algorithm.\n pub layout_threads: uint,\n\n pub nonincremental_layout: bool,\n\n pub output_file: Option<String>,\n pub headless: bool,\n pub hard_fail: bool,\n\n \/\/\/ True if we should bubble intrinsic widths sequentially (`-b`). If this is true, then\n \/\/\/ intrinsic widths are computed as a separate pass instead of during flow construction. You\n \/\/\/ may wish to turn this flag on in order to benchmark style recalculation against other\n \/\/\/ browser engines.\n pub bubble_inline_sizes_separately: bool,\n\n \/\/\/ True if we should show borders on all layers and tiles for\n \/\/\/ debugging purposes (`--show-debug-borders`).\n pub show_debug_borders: bool,\n\n \/\/\/ True if we should show borders on all fragments for debugging purposes (`--show-debug-fragment-borders`).\n pub show_debug_fragment_borders: bool,\n\n \/\/\/ If set with --disable-text-aa, disable antialiasing on fonts. This is primarily useful for reftests\n \/\/\/ where pixel perfect results are required when using fonts such as the Ahem\n \/\/\/ font for layout tests.\n pub enable_text_antialiasing: bool,\n\n \/\/\/ True if each step of layout is traced to an external JSON file\n \/\/\/ for debugging purposes. Settings this implies sequential layout\n \/\/\/ and paint.\n pub trace_layout: bool,\n\n \/\/\/ If true, instrument the runtime for each task created and dump\n \/\/\/ that information to a JSON file that can be viewed in the task\n \/\/\/ profile viewer.\n pub profile_tasks: bool,\n\n \/\/\/ `None` to disable devtools or `Some` with a port number to start a server to listen to\n \/\/\/ remote Firefox devtools connections.\n pub devtools_port: Option<u16>,\n\n \/\/\/ The initial requested size of the window.\n pub initial_window_size: TypedSize2D<ScreenPx, uint>,\n\n \/\/\/ An optional string allowing the user agent to be set for testing.\n pub user_agent: Option<String>,\n\n \/\/\/ Dumps the flow tree after a layout.\n pub dump_flow_tree: bool,\n\n \/\/\/ Whether to show an error when display list geometry escapes flow overflow regions.\n pub validate_display_list_geometry: bool,\n\n pub render_api: RenderApi,\n}\n\nfn print_usage(app: &str, opts: &[getopts::OptGroup]) {\n let message = format!(\"Usage: {} [ options ... ] [URL]\\n\\twhere options include\", app);\n println!(\"{}\", getopts::usage(message.as_slice(), opts));\n}\n\npub fn print_debug_usage(app: &str) {\n fn print_option(name: &str, description: &str) {\n println!(\"\\t{:<35} {}\", name, description);\n }\n\n println!(\"Usage: {} debug option,[options,...]\\n\\twhere options include\\n\\nOptions:\", app);\n\n print_option(\"bubble-widths\", \"Bubble intrinsic widths separately like other engines.\");\n print_option(\"disable-text-aa\", \"Disable antialiasing of rendered text.\");\n print_option(\"dump-flow-tree\", \"Print the flow tree after each layout.\");\n print_option(\"profile-tasks\", \"Instrument each task, writing the output to a file.\");\n print_option(\"show-compositor-borders\", \"Paint borders along layer and tile boundaries.\");\n print_option(\"show-fragment-borders\", \"Paint borders along fragment boundaries.\");\n print_option(\"trace-layout\", \"Write layout trace to an external file for debugging.\");\n print_option(\"validate-display-list-geometry\",\n \"Display an error when display list geometry escapes overflow region.\");\n\n println!(\"\");\n}\n\nfn args_fail(msg: &str) {\n io::stderr().write_line(msg).unwrap();\n os::set_exit_status(1);\n}\n\n\/\/ Always use CPU painting on android.\n\n#[cfg(target_os=\"android\")]\nstatic FORCE_CPU_PAINTING: bool = true;\n\n#[cfg(not(target_os=\"android\"))]\nstatic FORCE_CPU_PAINTING: bool = false;\n\npub fn default_opts() -> Opts {\n Opts {\n urls: vec!(),\n n_paint_threads: 1,\n gpu_painting: false,\n tile_size: 512,\n device_pixels_per_px: None,\n time_profiler_period: None,\n memory_profiler_period: None,\n enable_experimental: false,\n layout_threads: 1,\n nonincremental_layout: false,\n output_file: None,\n headless: true,\n hard_fail: true,\n bubble_inline_sizes_separately: false,\n show_debug_borders: false,\n show_debug_fragment_borders: false,\n enable_text_antialiasing: false,\n trace_layout: false,\n devtools_port: None,\n initial_window_size: TypedSize2D(800, 600),\n user_agent: None,\n dump_flow_tree: false,\n validate_display_list_geometry: false,\n profile_tasks: false,\n render_api: OpenGL,\n }\n}\n\npub fn from_cmdline_args(args: &[String]) -> bool {\n let app_name = args[0].to_string();\n let args = args.tail();\n\n let opts = vec!(\n getopts::optflag(\"c\", \"cpu\", \"CPU painting (default)\"),\n getopts::optflag(\"g\", \"gpu\", \"GPU painting\"),\n getopts::optopt(\"o\", \"output\", \"Output file\", \"output.png\"),\n getopts::optopt(\"s\", \"size\", \"Size of tiles\", \"512\"),\n getopts::optopt(\"\", \"device-pixel-ratio\", \"Device pixels per px\", \"\"),\n getopts::optflag(\"e\", \"experimental\", \"Enable experimental web features\"),\n getopts::optopt(\"t\", \"threads\", \"Number of paint threads\", \"1\"),\n getopts::optflagopt(\"p\", \"profile\", \"Profiler flag and output interval\", \"10\"),\n getopts::optflagopt(\"m\", \"memory-profile\", \"Memory profiler flag and output interval\", \"10\"),\n getopts::optflag(\"x\", \"exit\", \"Exit after load flag\"),\n getopts::optopt(\"y\", \"layout-threads\", \"Number of threads to use for layout\", \"1\"),\n getopts::optflag(\"i\", \"nonincremental-layout\", \"Enable to turn off incremental layout.\"),\n getopts::optflag(\"z\", \"headless\", \"Headless mode\"),\n getopts::optflag(\"f\", \"hard-fail\", \"Exit on task failure instead of displaying about:failure\"),\n getopts::optflagopt(\"\", \"devtools\", \"Start remote devtools server on port\", \"6000\"),\n getopts::optopt(\"\", \"resolution\", \"Set window resolution.\", \"800x600\"),\n getopts::optopt(\"u\", \"user-agent\", \"Set custom user agent string\", \"NCSA Mosaic\/1.0 (X11;SunOS 4.1.4 sun4m)\"),\n getopts::optopt(\"Z\", \"debug\", \"A comma-separated string of debug options. Pass help to show available options.\", \"\"),\n getopts::optflag(\"h\", \"help\", \"Print this message\"),\n getopts::optopt(\"r\", \"render-api\", \"Set the rendering API to use\", \"gl|mesa\"),\n );\n\n let opt_match = match getopts::getopts(args, opts.as_slice()) {\n Ok(m) => m,\n Err(f) => {\n args_fail(format!(\"{}\", f).as_slice());\n return false;\n }\n };\n\n if opt_match.opt_present(\"h\") || opt_match.opt_present(\"help\") {\n print_usage(app_name.as_slice(), opts.as_slice());\n return false;\n };\n\n let mut debug_options = HashSet::new();\n let debug_string = match opt_match.opt_str(\"Z\") {\n Some(string) => string,\n None => String::new()\n };\n for split in debug_string.as_slice().split(',') {\n debug_options.insert(split.clone());\n }\n if debug_options.contains(&\"help\") {\n print_debug_usage(app_name.as_slice());\n return false;\n }\n\n let urls = if opt_match.free.is_empty() {\n print_usage(app_name.as_slice(), opts.as_slice());\n args_fail(\"servo asks that you provide 1 or more URLs\");\n return false;\n } else {\n opt_match.free.clone()\n };\n\n let tile_size: uint = match opt_match.opt_str(\"s\") {\n Some(tile_size_str) => from_str(tile_size_str.as_slice()).unwrap(),\n None => 512,\n };\n\n let device_pixels_per_px = opt_match.opt_str(\"device-pixel-ratio\").map(|dppx_str|\n ScaleFactor(from_str(dppx_str.as_slice()).unwrap())\n );\n\n let mut n_paint_threads: uint = match opt_match.opt_str(\"t\") {\n Some(n_paint_threads_str) => from_str(n_paint_threads_str.as_slice()).unwrap(),\n None => 1, \/\/ FIXME: Number of cores.\n };\n\n \/\/ If only the flag is present, default to a 5 second period for both profilers.\n let time_profiler_period = opt_match.opt_default(\"p\", \"5\").map(|period| {\n from_str(period.as_slice()).unwrap()\n });\n let memory_profiler_period = opt_match.opt_default(\"m\", \"5\").map(|period| {\n from_str(period.as_slice()).unwrap()\n });\n\n let gpu_painting = !FORCE_CPU_PAINTING && opt_match.opt_present(\"g\");\n\n let mut layout_threads: uint = match opt_match.opt_str(\"y\") {\n Some(layout_threads_str) => from_str(layout_threads_str.as_slice()).unwrap(),\n None => cmp::max(rt::default_sched_threads() * 3 \/ 4, 1),\n };\n\n let nonincremental_layout = opt_match.opt_present(\"i\");\n\n let mut bubble_inline_sizes_separately = debug_options.contains(&\"bubble-widths\");\n let trace_layout = debug_options.contains(&\"trace-layout\");\n if trace_layout {\n n_paint_threads = 1;\n layout_threads = 1;\n bubble_inline_sizes_separately = true;\n }\n\n let devtools_port = opt_match.opt_default(\"devtools\", \"6000\").map(|port| {\n from_str(port.as_slice()).unwrap()\n });\n\n let initial_window_size = match opt_match.opt_str(\"resolution\") {\n Some(res_string) => {\n let res: Vec<uint> = res_string.as_slice().split('x').map(|r| from_str(r).unwrap()).collect();\n TypedSize2D(res[0], res[1])\n }\n None => {\n TypedSize2D(800, 600)\n }\n };\n\n let render_api = match opt_match.opt_str(\"r\").unwrap_or(\"gl\".to_string()).as_slice() {\n \"mesa\" => Mesa,\n \"gl\" => OpenGL,\n _ => {\n args_fail(\"Unknown render api specified\");\n return false;\n }\n };\n\n let opts = Opts {\n urls: urls,\n n_paint_threads: n_paint_threads,\n gpu_painting: gpu_painting,\n tile_size: tile_size,\n device_pixels_per_px: device_pixels_per_px,\n time_profiler_period: time_profiler_period,\n memory_profiler_period: memory_profiler_period,\n enable_experimental: opt_match.opt_present(\"e\"),\n layout_threads: layout_threads,\n nonincremental_layout: nonincremental_layout,\n output_file: opt_match.opt_str(\"o\"),\n headless: opt_match.opt_present(\"z\"),\n hard_fail: opt_match.opt_present(\"f\"),\n bubble_inline_sizes_separately: bubble_inline_sizes_separately,\n profile_tasks: debug_options.contains(&\"profile-tasks\"),\n trace_layout: trace_layout,\n devtools_port: devtools_port,\n initial_window_size: initial_window_size,\n user_agent: opt_match.opt_str(\"u\"),\n show_debug_borders: debug_options.contains(&\"show-compositor-borders\"),\n show_debug_fragment_borders: debug_options.contains(&\"show-fragment-borders\"),\n enable_text_antialiasing: !debug_options.contains(&\"disable-text-aa\"),\n dump_flow_tree: debug_options.contains(&\"dump-flow-tree\"),\n validate_display_list_geometry: debug_options.contains(&\"validate-display-list-geometry\"),\n render_api: render_api,\n };\n\n set_opts(opts);\n true\n}\n\nstatic mut EXPERIMENTAL_ENABLED: bool = false;\n\npub fn set_experimental_enabled(new_value: bool) {\n unsafe {\n EXPERIMENTAL_ENABLED = new_value;\n }\n}\n\npub fn experimental_enabled() -> bool {\n unsafe {\n EXPERIMENTAL_ENABLED\n }\n}\n\n\/\/ Make Opts available globally. This saves having to clone and pass\n\/\/ opts everywhere it is used, which gets particularly cumbersome\n\/\/ when passing through the DOM structures.\nstatic mut OPTIONS: *mut Opts = 0 as *mut Opts;\n\npub fn set_opts(opts: Opts) {\n unsafe {\n let box_opts = box opts;\n OPTIONS = mem::transmute(box_opts);\n }\n}\n\n#[inline]\npub fn get<'a>() -> &'a Opts {\n unsafe {\n \/\/ If code attempts to retrieve the options and they haven't\n \/\/ been set by the platform init code, just return a default\n \/\/ set of options. This is mostly useful for unit tests that\n \/\/ run through a code path which queries the cmd line options.\n if OPTIONS == ptr::null_mut() {\n set_opts(default_opts());\n }\n mem::transmute(OPTIONS)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Another workaround for https:\/\/github.com\/rust-lang\/rust\/issues\/15763<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>refactor `match` statements<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Enviroment data\n\nuse alloc::boxed::Box;\n\nuse fs::File;\nuse path::{Path, PathBuf};\nuse io::Result;\nuse string::{String, ToString};\nuse vec::Vec;\n\nuse system::error::{Error, ENOENT};\nuse system::syscall::sys_chdir;\n\nstatic mut _args: *mut Vec<&'static str> = 0 as *mut Vec<&'static str>;\n\npub struct Args {\n i: usize\n}\n\nimpl Iterator for Args {\n \/\/Yes, this is supposed to be String, do not change it!\n \/\/Only change it if https:\/\/doc.rust-lang.org\/std\/env\/struct.Args.html changes from String\n type Item = String;\n fn next(&mut self) -> Option<String> {\n if let Some(arg) = unsafe { (*_args).get(self.i) } {\n self.i += 1;\n Some(arg.to_string())\n } else {\n None\n }\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n let len = if self.i <= unsafe { (*_args).len() } {\n unsafe { (*_args).len() - self.i }\n } else {\n 0\n };\n (len, Some(len))\n }\n}\n\nimpl ExactSizeIterator for Args {}\n\n\/\/\/ Arguments\npub fn args() -> Args {\n Args {\n i: 0\n }\n}\n\n\/\/\/ Initialize arguments\npub unsafe fn args_init(args: Vec<&'static str>) {\n _args = Box::into_raw(box args);\n}\n\n\/\/\/ Destroy arguments\npub unsafe fn args_destroy() {\n if _args as usize > 0 {\n drop(Box::from_raw(_args));\n }\n}\n\n\/\/\/ Private function to get the path from a custom location\n\/\/\/ If the custom directory cannot be found, None will be returned\nfn get_path_from(location : &str) -> Result<PathBuf> {\n match File::open(dir) {\n Ok(file) => {\n match file.path() {\n Ok(path) => Ok(path),\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/\/ Set the current directory\npub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let file_result = if path.as_ref().inner.is_empty() || path.as_ref().inner.ends_with('\/') {\n File::open(&path.as_ref().inner)\n } else {\n File::open(&(path.as_ref().inner.to_string() + \"\/\"))\n };\n\n match file_result {\n Ok(file) => {\n match file.path() {\n Ok(path) => {\n if let Some(path_str) = path.to_str() {\n let path_c = path_str.to_string() + \"\\0\";\n match Error::demux(unsafe { sys_chdir(path_c.as_ptr()) }) {\n Ok(_) => Ok(()),\n Err(err) => Err(err),\n }\n } else {\n Err(Error::new(ENOENT))\n }\n }\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/ TODO: Fully implement `env::var()`\npub fn var(_key: &str) -> Result<String> {\n Ok(\"This is code filler\".to_string())\n}\n<commit_msg>current_dir refactoring<commit_after>\/\/! Enviroment data\n\nuse alloc::boxed::Box;\n\nuse fs::File;\nuse path::{Path, PathBuf};\nuse io::Result;\nuse string::{String, ToString};\nuse vec::Vec;\n\nuse system::error::{Error, ENOENT};\nuse system::syscall::sys_chdir;\n\nstatic mut _args: *mut Vec<&'static str> = 0 as *mut Vec<&'static str>;\n\npub struct Args {\n i: usize\n}\n\nimpl Iterator for Args {\n \/\/Yes, this is supposed to be String, do not change it!\n \/\/Only change it if https:\/\/doc.rust-lang.org\/std\/env\/struct.Args.html changes from String\n type Item = String;\n fn next(&mut self) -> Option<String> {\n if let Some(arg) = unsafe { (*_args).get(self.i) } {\n self.i += 1;\n Some(arg.to_string())\n } else {\n None\n }\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n let len = if self.i <= unsafe { (*_args).len() } {\n unsafe { (*_args).len() - self.i }\n } else {\n 0\n };\n (len, Some(len))\n }\n}\n\nimpl ExactSizeIterator for Args {}\n\n\/\/\/ Arguments\npub fn args() -> Args {\n Args {\n i: 0\n }\n}\n\n\/\/\/ Initialize arguments\npub unsafe fn args_init(args: Vec<&'static str>) {\n _args = Box::into_raw(box args);\n}\n\n\/\/\/ Destroy arguments\npub unsafe fn args_destroy() {\n if _args as usize > 0 {\n drop(Box::from_raw(_args));\n }\n}\n\n\/\/\/ Private function to get the path from a custom location\n\/\/\/ If the custom directory cannot be found, None will be returned\nfn get_path_from(location : &str) -> Result<PathBuf> {\n match File::open(dir) {\n Ok(file) => {\n match file.path() {\n Ok(path) => Ok(path),\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/\/ Method to return the current directory\npub fn current_dir() -> Result<PathBuf> {\n \/\/ Return the current path\n get_dir(\".\/\")\n}\n\/\/\/ Set the current directory\npub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let file_result = if path.as_ref().inner.is_empty() || path.as_ref().inner.ends_with('\/') {\n File::open(&path.as_ref().inner)\n } else {\n File::open(&(path.as_ref().inner.to_string() + \"\/\"))\n };\n\n match file_result {\n Ok(file) => {\n match file.path() {\n Ok(path) => {\n if let Some(path_str) = path.to_str() {\n let path_c = path_str.to_string() + \"\\0\";\n match Error::demux(unsafe { sys_chdir(path_c.as_ptr()) }) {\n Ok(_) => Ok(()),\n Err(err) => Err(err),\n }\n } else {\n Err(Error::new(ENOENT))\n }\n }\n Err(err) => Err(err),\n }\n }\n Err(err) => Err(err),\n }\n}\n\n\/\/ TODO: Fully implement `env::var()`\npub fn var(_key: &str) -> Result<String> {\n Ok(\"This is code filler\".to_string())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add benches<commit_after>#![feature(test)]\n\nextern crate growable;\nextern crate test;\n\nuse std::collections::VecDeque;\nuse std::fmt::Debug;\nuse growable::{GrowablePoolBuilder, Reusable};\nuse test::Bencher;\n\n#[bench]\nfn bench_box(bencher: &mut Bencher) {\n let mut buffer: VecDeque<Box<Debug>> = VecDeque::with_capacity(1024);\n bencher.iter(|| {\n for i in 0 .. 1024 {\n let item: Box<Debug> = match i % 3 {\n 0 => Box::new(\"Hello World\"),\n 1 => Box::new(365),\n 2 => Box::new(['?'; 24]),\n _ => unreachable!(),\n };\n buffer.push_back(item);\n }\n for _ in 0 .. 1024 {\n let _ = buffer.pop_front().unwrap();\n }\n });\n}\n\n#[bench]\nfn bench_growable(bencher: &mut Bencher) {\n let mut buffer: VecDeque<Reusable<Debug>> = VecDeque::with_capacity(1024);\n let mut pool = GrowablePoolBuilder::default()\n .with_default_capacity(24)\n .with_default_ptr_alignment(8)\n .with_capacity(1024)\n .build();\n bencher.iter(|| {\n for i in 0 .. 1024 {\n let item: Reusable<Debug> = match i % 3 {\n 0 => pool.allocate(\"Hello World\"),\n 1 => pool.allocate(365),\n 2 => pool.allocate(['?'; 24]),\n _ => unreachable!(),\n };\n buffer.push_back(item);\n }\n for _ in 0 .. 1024 {\n pool.free(buffer.pop_front().unwrap());\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #32420 - frewsxcv:regression-test-26997, r=alexcrichton<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub struct Foo {\n x: isize,\n y: isize\n}\n\nimpl Foo {\n pub extern fn foo_new() -> Foo {\n Foo { x: 21, y: 33 }\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add framed_hello.rs<commit_after>extern crate log;\nextern crate env_logger;\n\nextern crate actix_http;\nextern crate actix_net;\nextern crate futures;\nextern crate http;\nextern crate bytes;\n\nuse actix_http::{h1, ServiceConfig, SendResponse, Response};\nuse actix_net::framed::IntoFramed;\nuse actix_net::codec::Framed;\nuse actix_net::stream::TakeItem;\nuse actix_net::server::Server;\nuse actix_net::service::NewServiceExt;\nuse futures::Future;\nuse std::env;\n\nfn main() {\n env::set_var(\"RUST_LOG\", \"framed_hello=info\");\n env_logger::init();\n\n Server::new().bind(\"framed_hello\", \"127.0.0.1:8080\", || {\n IntoFramed::new(|| h1::Codec::new(ServiceConfig::default()))\n .and_then(TakeItem::new().map_err(|_| ()))\n .and_then(|(_req, _framed): (_, Framed<_, _>)| {\n SendResponse::send(_framed, Response::Ok().body(\"Hello world!\"))\n .map_err(|_| ())\n .map(|_| ())\n })\n }).unwrap().run();\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement imphash (#40)<commit_after>\/*!\nGets the hash of the import library.\n\nReferences:\n\n* https:\/\/github.com\/erocarrera\/pefile\/blob\/4c57c1443bb504281dcc00e1ddec9d62c5e39b35\/pefile.py#L3824\n* https:\/\/www.fireeye.com\/blog\/threat-research\/2014\/01\/tracking-malware-import-hashing.html\n*\/\n\nextern crate pelite;\n\nuse std::env;\nuse std::collections::hash_map::DefaultHasher;\nuse std::hash::{Hash, Hasher};\n\nfn main() {\n\tlet mut args = env::args_os();\n\tif let (Some(_), Some(path), None) = (args.next(), args.next(), args.next()) {\n\t\tmatch pelite::FileMap::open(&path) {\n\t\t\tOk(file_map) => {\n\t\t\t\tlet result = imphash32(file_map.as_ref()).or_else(|_| imphash64(file_map.as_ref()));\n\t\t\t\tmatch result {\n\t\t\t\t\tOk(hash) => {\n\t\t\t\t\t\tprintln!(\"Import hash is {:016X} for {:?}.\", hash, path);\n\t\t\t\t\t},\n\t\t\t\t\tErr(err) => {\n\t\t\t\t\t\teprintln!(\"Error reading {:?} with {}.\", path, err);\n\t\t\t\t\t},\n\t\t\t\t}\n\t\t\t},\n\t\t\tErr(err) => eprintln!(\"Error reading {:?} with {}.\", path, err),\n\t\t};\n\t}\n\telse {\n\t\tprintln!(\"imphash <path.dll>\\nGiven a path to a dll calculates a hash of the import table.\");\n\t}\n}\n\nfn imphash64(image: &[u8]) -> pelite::Result<u64> {\n\tuse pelite::pe64::{Pe, PeFile};\n\tuse pelite::pe64::imports::Import;\n\n\tlet file = PeFile::from_bytes(image)?;\n\tlet imports = match file.imports() {\n\t\tOk(imports) => imports,\n\t\tErr(err) if err.is_null() => return Ok(0),\n\t\tErr(err) => return Err(err),\n\t};\n\n\tlet mut h = DefaultHasher::new();\n\n\tfor desc in imports {\n\t\tlet dll_name = desc.dll_name()?;\n\t\tdll_name.hash(&mut h);\n\t\tfor imp in desc.int()? {\n\t\t\tmatch imp? {\n\t\t\t\tImport::ByName { hint: _, name } => {\n\t\t\t\t\tname.hash(&mut h);\n\t\t\t\t},\n\t\t\t\tImport::ByOrdinal { ord } => {\n\t\t\t\t\tord.hash(&mut h);\n\t\t\t\t},\n\t\t\t}\n\t\t}\n\t}\n\n\tOk(h.finish())\n}\n\nfn imphash32(image: &[u8]) -> pelite::Result<u64> {\n\tuse pelite::pe32::{Pe, PeFile};\n\tuse pelite::pe32::imports::Import;\n\n\tlet file = PeFile::from_bytes(image)?;\n\tlet imports = match file.imports() {\n\t\tOk(imports) => imports,\n\t\tErr(err) if err.is_null() => return Ok(0),\n\t\tErr(err) => return Err(err),\n\t};\n\n\tlet mut h = DefaultHasher::new();\n\n\tfor desc in imports {\n\t\tlet dll_name = desc.dll_name()?;\n\t\tdll_name.hash(&mut h);\n\t\tfor imp in desc.int()? {\n\t\t\tmatch imp? {\n\t\t\t\tImport::ByName { hint: _, name } => {\n\t\t\t\t\tname.hash(&mut h);\n\t\t\t\t},\n\t\t\t\tImport::ByOrdinal { ord } => {\n\t\t\t\t\tord.hash(&mut h);\n\t\t\t\t},\n\t\t\t}\n\t\t}\n\t}\n\n\tOk(h.finish())\n}\n<|endoftext|>"} {"text":"<commit_before>fn main() {\n \/\/ (all the type annotations are superfluous)\n \/\/ a reference to a string allocated in read only memory\n let pangram: &'static str = \"the quick brow fox jumps over the lazy dog\";\n println!(\"pangram: {}\", pangram);\n\n \/\/ iterate over words in reverse, no new string is allocated\n println!(\"words in reverse\");\n for word in pangram.words().rev() {\n println!(\"{}\", word);\n }\n\n \/\/ copy chars into a vector, sort and remove duplicates\n let mut chars: Vec<char> = pangram.chars().collect();\n chars.sort();\n chars.dedup();\n\n \/\/ a growable `String`\n let mut string: String = String::new();\n for c in chars.move_iter() {\n \/\/ insert a char at the end of string\n string.push_char(c);\n \/\/ insert a string at the end of string\n string.push_str(\", \");\n }\n\n \/\/ the trimmed string is a slice to the original string, hence\n \/\/ no new allocation is performed\n let trimmed_str: &str = string.as_slice().trim_chars(&[',', ' ']);\n println!(\"used characters: {}\", trimmed_str);\n\n \/\/ heap allocate a string\n let alice = String::from_str(\"I like dogs\");\n \/\/ allocate new memory and store the modified string there\n let bob: String = alice.replace(\"dog\", \"cat\");\n\n println!(\"Alice says: {}\", alice);\n println!(\"Bob says: {}\", bob);\n}\n<commit_msg>Fix typo in string pangram<commit_after>fn main() {\n \/\/ (all the type annotations are superfluous)\n \/\/ a reference to a string allocated in read only memory\n let pangram: &'static str = \"the quick brown fox jumps over the lazy dog\";\n println!(\"pangram: {}\", pangram);\n\n \/\/ iterate over words in reverse, no new string is allocated\n println!(\"words in reverse\");\n for word in pangram.words().rev() {\n println!(\"{}\", word);\n }\n\n \/\/ copy chars into a vector, sort and remove duplicates\n let mut chars: Vec<char> = pangram.chars().collect();\n chars.sort();\n chars.dedup();\n\n \/\/ a growable `String`\n let mut string: String = String::new();\n for c in chars.move_iter() {\n \/\/ insert a char at the end of string\n string.push_char(c);\n \/\/ insert a string at the end of string\n string.push_str(\", \");\n }\n\n \/\/ the trimmed string is a slice to the original string, hence\n \/\/ no new allocation is performed\n let trimmed_str: &str = string.as_slice().trim_chars(&[',', ' ']);\n println!(\"used characters: {}\", trimmed_str);\n\n \/\/ heap allocate a string\n let alice = String::from_str(\"I like dogs\");\n \/\/ allocate new memory and store the modified string there\n let bob: String = alice.replace(\"dog\", \"cat\");\n\n println!(\"Alice says: {}\", alice);\n println!(\"Bob says: {}\", bob);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test demonstrating the issue.<commit_after>\/\/ check-pass\n\/\/\n\/\/ This tests checks that clashing_extern_declarations handles types that are recursive through a\n\/\/ pointer or ref argument. See #75512.\n\n#![crate_type = \"lib\"]\n\nmod raw_ptr_recursion {\n mod a {\n #[repr(C)]\n struct Pointy {\n pointy: *const Pointy,\n }\n\n extern \"C\" {\n fn run_pointy(pointy: Pointy);\n }\n }\n mod b {\n #[repr(C)]\n struct Pointy {\n pointy: *const Pointy,\n }\n\n extern \"C\" {\n fn run_pointy(pointy: Pointy);\n }\n }\n}\n\nmod raw_ptr_recursion_once_removed {\n mod a {\n #[repr(C)]\n struct Pointy1 {\n pointy_two: *const Pointy2,\n }\n\n #[repr(C)]\n struct Pointy2 {\n pointy_one: *const Pointy1,\n }\n\n extern \"C\" {\n fn run_pointy2(pointy: Pointy2);\n }\n }\n\n mod b {\n #[repr(C)]\n struct Pointy1 {\n pointy_two: *const Pointy2,\n }\n\n #[repr(C)]\n struct Pointy2 {\n pointy_one: *const Pointy1,\n }\n\n extern \"C\" {\n fn run_pointy2(pointy: Pointy2);\n }\n }\n}\n\nmod ref_recursion {\n mod a {\n #[repr(C)]\n struct Reffy<'a> {\n reffy: &'a Reffy<'a>,\n }\n\n extern \"C\" {\n fn reffy_recursion(reffy: Reffy);\n }\n }\n mod b {\n #[repr(C)]\n struct Reffy<'a> {\n reffy: &'a Reffy<'a>,\n }\n\n extern \"C\" {\n fn reffy_recursion(reffy: Reffy);\n }\n }\n}\n\nmod ref_recursion_once_removed {\n mod a {\n #[repr(C)]\n struct Reffy1<'a> {\n reffy: &'a Reffy1<'a>,\n }\n\n struct Reffy2<'a> {\n reffy: &'a Reffy2<'a>,\n }\n\n extern \"C\" {\n fn reffy_once_removed(reffy: Reffy1);\n }\n }\n mod b {\n #[repr(C)]\n struct Reffy1<'a> {\n reffy: &'a Reffy1<'a>,\n }\n\n struct Reffy2<'a> {\n reffy: &'a Reffy2<'a>,\n }\n\n extern \"C\" {\n fn reffy_once_removed(reffy: Reffy1);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.\n\/\/! It has to be run really early, before transformations like inlining, because\n\/\/! introducing these calls *adds* UB -- so, conceptually, this pass is actually part\n\/\/! of MIR building, and only after this pass we think of the program has having the\n\/\/! normal MIR semantics.\n\nuse rustc::ty::{self, TyCtxt, RegionKind};\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::mir::transform::{MirPass, MirSource};\nuse rustc::middle::region::CodeExtent;\n\npub struct AddValidation;\n\n\/\/\/ Determine the \"context\" of the lval: Mutability and region.\nfn lval_context<'a, 'tcx, D>(\n lval: &Lvalue<'tcx>,\n local_decls: &D,\n tcx: TyCtxt<'a, 'tcx, 'tcx>\n) -> (Option<CodeExtent>, hir::Mutability)\n where D: HasLocalDecls<'tcx>\n{\n use rustc::mir::Lvalue::*;\n\n match *lval {\n Local { .. } => (None, hir::MutMutable),\n Static(_) => (None, hir::MutImmutable),\n Projection(ref proj) => {\n match proj.elem {\n ProjectionElem::Deref => {\n \/\/ Computing the inside the recursion makes this quadratic.\n \/\/ We don't expect deep paths though.\n let ty = proj.base.ty(local_decls, tcx).to_ty(tcx);\n \/\/ A Deref projection may restrict the context, this depends on the type\n \/\/ being deref'd.\n let context = match ty.sty {\n ty::TyRef(re, tam) => {\n let re = match re {\n &RegionKind::ReScope(ce) => Some(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => None\n };\n (re, tam.mutbl)\n }\n ty::TyRawPtr(_) =>\n \/\/ There is no guarantee behind even a mutable raw pointer,\n \/\/ no write locks are acquired there, so we also don't want to\n \/\/ release any.\n (None, hir::MutImmutable),\n ty::TyAdt(adt, _) if adt.is_box() => (None, hir::MutMutable),\n _ => bug!(\"Deref on a non-pointer type {:?}\", ty),\n };\n \/\/ \"Intersect\" this restriction with proj.base.\n if let (Some(_), hir::MutImmutable) = context {\n \/\/ This is already as restricted as it gets, no need to even recurse\n context\n } else {\n let base_context = lval_context(&proj.base, local_decls, tcx);\n \/\/ The region of the outermost Deref is always most restrictive.\n let re = context.0.or(base_context.0);\n let mutbl = context.1.and(base_context.1);\n (re, mutbl)\n }\n\n }\n _ => lval_context(&proj.base, local_decls, tcx),\n }\n }\n }\n}\n\nimpl MirPass for AddValidation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _: MirSource,\n mir: &mut Mir<'tcx>) {\n if !tcx.sess.opts.debugging_opts.mir_emit_validate {\n return;\n }\n\n let local_decls = mir.local_decls.clone(); \/\/ FIXME: Find a way to get rid of this clone.\n\n \/\/\/ Convert an lvalue to a validation operand.\n let lval_to_operand = |lval: Lvalue<'tcx>| -> ValidationOperand<'tcx, Lvalue<'tcx>> {\n let (re, mutbl) = lval_context(&lval, &local_decls, tcx);\n let ty = lval.ty(&local_decls, tcx).to_ty(tcx);\n ValidationOperand { lval, ty, re, mutbl }\n };\n\n \/\/ PART 1\n \/\/ Add an AcquireValid at the beginning of the start block.\n if mir.arg_count > 0 {\n let acquire_stmt = Statement {\n source_info: SourceInfo {\n scope: ARGUMENT_VISIBILITY_SCOPE,\n span: mir.span, \/\/ FIXME: Consider using just the span covering the function\n \/\/ argument declaration.\n },\n kind: StatementKind::Validate(ValidationOp::Acquire,\n \/\/ Skip return value, go over all the arguments\n mir.local_decls.iter_enumerated().skip(1).take(mir.arg_count)\n .map(|(local, _)| lval_to_operand(Lvalue::Local(local))).collect()\n )\n };\n mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 2\n \/\/ Add ReleaseValid\/AcquireValid around function call terminators. We don't use a visitor\n \/\/ because we need to access the block that a Call jumps to.\n let mut returns : Vec<(SourceInfo, Lvalue<'tcx>, BasicBlock)> = Vec::new();\n for block_data in mir.basic_blocks_mut() {\n match block_data.terminator {\n Some(Terminator { kind: TerminatorKind::Call { ref args, ref destination, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n args.iter().filter_map(|op| {\n match op {\n &Operand::Consume(ref lval) =>\n Some(lval_to_operand(lval.clone())),\n &Operand::Constant(..) => { None },\n }\n }).collect())\n };\n block_data.statements.push(release_stmt);\n \/\/ Remember the return destination for later\n if let &Some(ref destination) = destination {\n returns.push((source_info, destination.0.clone(), destination.1));\n }\n }\n Some(Terminator { kind: TerminatorKind::Drop { location: ref lval, .. },\n source_info }) |\n Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref lval, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![lval_to_operand(lval.clone())]),\n };\n block_data.statements.push(release_stmt);\n \/\/ drop doesn't return anything, so we need no acquire.\n }\n _ => {\n \/\/ Not a block ending in a Call -> ignore.\n }\n }\n }\n \/\/ Now we go over the returns we collected to acquire the return values.\n for (source_info, dest_lval, dest_block) in returns {\n let acquire_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n mir.basic_blocks_mut()[dest_block].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 3\n \/\/ Add ReleaseValid\/AcquireValid around Ref and Cast. Again an iterator does not seem very\n \/\/ suited\n \/\/ as we need to add new statements before and after each Ref.\n for block_data in mir.basic_blocks_mut() {\n \/\/ We want to insert statements around Ref commands as we iterate. To this end, we\n \/\/ iterate backwards using indices.\n for i in (0..block_data.statements.len()).rev() {\n match block_data.statements[i].kind {\n \/\/ When the borrow of this ref expires, we need to recover validation.\n StatementKind::Assign(_, Rvalue::Ref(_, _, _)) => {\n \/\/ Due to a lack of NLL; we can't capture anything directly here.\n \/\/ Instead, we have to re-match and clone there.\n let (dest_lval, re, src_lval) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_lval,\n Rvalue::Ref(re, _, ref src_lval)) => {\n (dest_lval.clone(), re, src_lval.clone())\n },\n _ => bug!(\"We already matched this.\"),\n };\n \/\/ So this is a ref, and we got all the data we wanted.\n \/\/ Do an acquire of the result -- but only what it points to, so add a Deref\n \/\/ projection.\n let dest_lval = Projection { base: dest_lval, elem: ProjectionElem::Deref };\n let dest_lval = Lvalue::Projection(Box::new(dest_lval));\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ The source is released until the region of the borrow ends.\n let op = match re {\n &RegionKind::ReScope(ce) => ValidationOp::Suspend(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => ValidationOp::Release,\n };\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(op, vec![lval_to_operand(src_lval)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n \/\/ Casts can change what validation does (e.g. unsizing)\n StatementKind::Assign(_, Rvalue::Cast(kind, Operand::Consume(_), _))\n if kind != CastKind::Misc =>\n {\n \/\/ Due to a lack of NLL; we can't capture anything directly here.\n \/\/ Instead, we have to re-match and clone there.\n let (dest_lval, src_lval) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_lval,\n Rvalue::Cast(_, Operand::Consume(ref src_lval), _)) =>\n {\n (dest_lval.clone(), src_lval.clone())\n },\n _ => bug!(\"We already matched this.\"),\n };\n\n \/\/ Acquire of the result\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ Release of the input\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![lval_to_operand(src_lval)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n _ => {},\n }\n }\n }\n }\n}\n<commit_msg>do not use doc comments inside functions<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.\n\/\/! It has to be run really early, before transformations like inlining, because\n\/\/! introducing these calls *adds* UB -- so, conceptually, this pass is actually part\n\/\/! of MIR building, and only after this pass we think of the program has having the\n\/\/! normal MIR semantics.\n\nuse rustc::ty::{self, TyCtxt, RegionKind};\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::mir::transform::{MirPass, MirSource};\nuse rustc::middle::region::CodeExtent;\n\npub struct AddValidation;\n\n\/\/\/ Determine the \"context\" of the lval: Mutability and region.\nfn lval_context<'a, 'tcx, D>(\n lval: &Lvalue<'tcx>,\n local_decls: &D,\n tcx: TyCtxt<'a, 'tcx, 'tcx>\n) -> (Option<CodeExtent>, hir::Mutability)\n where D: HasLocalDecls<'tcx>\n{\n use rustc::mir::Lvalue::*;\n\n match *lval {\n Local { .. } => (None, hir::MutMutable),\n Static(_) => (None, hir::MutImmutable),\n Projection(ref proj) => {\n match proj.elem {\n ProjectionElem::Deref => {\n \/\/ Computing the inside the recursion makes this quadratic.\n \/\/ We don't expect deep paths though.\n let ty = proj.base.ty(local_decls, tcx).to_ty(tcx);\n \/\/ A Deref projection may restrict the context, this depends on the type\n \/\/ being deref'd.\n let context = match ty.sty {\n ty::TyRef(re, tam) => {\n let re = match re {\n &RegionKind::ReScope(ce) => Some(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => None\n };\n (re, tam.mutbl)\n }\n ty::TyRawPtr(_) =>\n \/\/ There is no guarantee behind even a mutable raw pointer,\n \/\/ no write locks are acquired there, so we also don't want to\n \/\/ release any.\n (None, hir::MutImmutable),\n ty::TyAdt(adt, _) if adt.is_box() => (None, hir::MutMutable),\n _ => bug!(\"Deref on a non-pointer type {:?}\", ty),\n };\n \/\/ \"Intersect\" this restriction with proj.base.\n if let (Some(_), hir::MutImmutable) = context {\n \/\/ This is already as restricted as it gets, no need to even recurse\n context\n } else {\n let base_context = lval_context(&proj.base, local_decls, tcx);\n \/\/ The region of the outermost Deref is always most restrictive.\n let re = context.0.or(base_context.0);\n let mutbl = context.1.and(base_context.1);\n (re, mutbl)\n }\n\n }\n _ => lval_context(&proj.base, local_decls, tcx),\n }\n }\n }\n}\n\nimpl MirPass for AddValidation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _: MirSource,\n mir: &mut Mir<'tcx>) {\n if !tcx.sess.opts.debugging_opts.mir_emit_validate {\n return;\n }\n\n let local_decls = mir.local_decls.clone(); \/\/ FIXME: Find a way to get rid of this clone.\n\n \/\/ Convert an lvalue to a validation operand.\n let lval_to_operand = |lval: Lvalue<'tcx>| -> ValidationOperand<'tcx, Lvalue<'tcx>> {\n let (re, mutbl) = lval_context(&lval, &local_decls, tcx);\n let ty = lval.ty(&local_decls, tcx).to_ty(tcx);\n ValidationOperand { lval, ty, re, mutbl }\n };\n\n \/\/ PART 1\n \/\/ Add an AcquireValid at the beginning of the start block.\n if mir.arg_count > 0 {\n let acquire_stmt = Statement {\n source_info: SourceInfo {\n scope: ARGUMENT_VISIBILITY_SCOPE,\n span: mir.span, \/\/ FIXME: Consider using just the span covering the function\n \/\/ argument declaration.\n },\n kind: StatementKind::Validate(ValidationOp::Acquire,\n \/\/ Skip return value, go over all the arguments\n mir.local_decls.iter_enumerated().skip(1).take(mir.arg_count)\n .map(|(local, _)| lval_to_operand(Lvalue::Local(local))).collect()\n )\n };\n mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 2\n \/\/ Add ReleaseValid\/AcquireValid around function call terminators. We don't use a visitor\n \/\/ because we need to access the block that a Call jumps to.\n let mut returns : Vec<(SourceInfo, Lvalue<'tcx>, BasicBlock)> = Vec::new();\n for block_data in mir.basic_blocks_mut() {\n match block_data.terminator {\n Some(Terminator { kind: TerminatorKind::Call { ref args, ref destination, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n args.iter().filter_map(|op| {\n match op {\n &Operand::Consume(ref lval) =>\n Some(lval_to_operand(lval.clone())),\n &Operand::Constant(..) => { None },\n }\n }).collect())\n };\n block_data.statements.push(release_stmt);\n \/\/ Remember the return destination for later\n if let &Some(ref destination) = destination {\n returns.push((source_info, destination.0.clone(), destination.1));\n }\n }\n Some(Terminator { kind: TerminatorKind::Drop { location: ref lval, .. },\n source_info }) |\n Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref lval, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![lval_to_operand(lval.clone())]),\n };\n block_data.statements.push(release_stmt);\n \/\/ drop doesn't return anything, so we need no acquire.\n }\n _ => {\n \/\/ Not a block ending in a Call -> ignore.\n }\n }\n }\n \/\/ Now we go over the returns we collected to acquire the return values.\n for (source_info, dest_lval, dest_block) in returns {\n let acquire_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n mir.basic_blocks_mut()[dest_block].statements.insert(0, acquire_stmt);\n }\n\n \/\/ PART 3\n \/\/ Add ReleaseValid\/AcquireValid around Ref and Cast. Again an iterator does not seem very\n \/\/ suited\n \/\/ as we need to add new statements before and after each Ref.\n for block_data in mir.basic_blocks_mut() {\n \/\/ We want to insert statements around Ref commands as we iterate. To this end, we\n \/\/ iterate backwards using indices.\n for i in (0..block_data.statements.len()).rev() {\n match block_data.statements[i].kind {\n \/\/ When the borrow of this ref expires, we need to recover validation.\n StatementKind::Assign(_, Rvalue::Ref(_, _, _)) => {\n \/\/ Due to a lack of NLL; we can't capture anything directly here.\n \/\/ Instead, we have to re-match and clone there.\n let (dest_lval, re, src_lval) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_lval,\n Rvalue::Ref(re, _, ref src_lval)) => {\n (dest_lval.clone(), re, src_lval.clone())\n },\n _ => bug!(\"We already matched this.\"),\n };\n \/\/ So this is a ref, and we got all the data we wanted.\n \/\/ Do an acquire of the result -- but only what it points to, so add a Deref\n \/\/ projection.\n let dest_lval = Projection { base: dest_lval, elem: ProjectionElem::Deref };\n let dest_lval = Lvalue::Projection(Box::new(dest_lval));\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ The source is released until the region of the borrow ends.\n let op = match re {\n &RegionKind::ReScope(ce) => ValidationOp::Suspend(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => ValidationOp::Release,\n };\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(op, vec![lval_to_operand(src_lval)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n \/\/ Casts can change what validation does (e.g. unsizing)\n StatementKind::Assign(_, Rvalue::Cast(kind, Operand::Consume(_), _))\n if kind != CastKind::Misc =>\n {\n \/\/ Due to a lack of NLL; we can't capture anything directly here.\n \/\/ Instead, we have to re-match and clone there.\n let (dest_lval, src_lval) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_lval,\n Rvalue::Cast(_, Operand::Consume(ref src_lval), _)) =>\n {\n (dest_lval.clone(), src_lval.clone())\n },\n _ => bug!(\"We already matched this.\"),\n };\n\n \/\/ Acquire of the result\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![lval_to_operand(dest_lval)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ Release of the input\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![lval_to_operand(src_lval)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n _ => {},\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Some small cleanups<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(phase)]\nextern crate regex;\n#[phase(syntax)] extern crate regex_macros;\n\nextern crate getopts;\nuse std::os;\nuse std::io::fs;\n\nuse file::File;\nuse column::defaultColumns;\nuse options::{Options, SortField, Name};\n\npub mod colours;\npub mod column;\npub mod format;\npub mod file;\npub mod unix;\npub mod options;\n\nfn main() {\n let args: Vec<StrBuf> = os::args().iter()\n .map(|x| x.to_strbuf())\n .collect();\n\n let opts = ~[\n getopts::optflag(\"a\", \"all\", \"show dot-files\"),\n getopts::optflag(\"r\", \"reverse\", \"reverse order of files\"),\n getopts::optopt(\"s\", \"sort\", \"field to sort by\", \"WORD\"),\n ];\n\n let matches = match getopts::getopts(args.tail(), opts) {\n Ok(m) => m,\n Err(f) => fail!(\"Invalid options\\n{}\", f.to_err_msg()),\n };\n\n let opts = Options {\n showInvisibles: matches.opt_present(\"all\"),\n reverse: matches.opt_present(\"reverse\"),\n sortField: matches.opt_str(\"sort\").map(|word| SortField::from_word(word)).unwrap_or(Name),\n };\n\n let strs = if matches.free.is_empty() {\n vec!(\".\/\".to_strbuf())\n }\n else {\n matches.free.clone()\n };\n\n for dir in strs.move_iter() {\n list(opts, Path::new(dir))\n }\n}\n\nfn list(options: Options, path: Path) {\n let paths = match fs::readdir(&path) {\n Ok(paths) => paths,\n Err(e) => fail!(\"readdir: {}\", e),\n };\n\n let mut files = paths.iter().map(|path| File::from_path(path)).collect();\n options.sort(&mut files);\n if options.reverse {\n files.reverse();\n }\n\n let columns = defaultColumns();\n let table: Vec<Vec<StrBuf>> = files.iter()\n .filter(|&f| options.show(f))\n .map(|f| columns.iter().map(|c| f.display(c)).collect())\n .collect();\n\n let maxes: Vec<uint> = range(0, columns.len())\n .map(|n| table.iter().map(|row| colours::strip_formatting(row.get(n)).len()).max().unwrap())\n .collect();\n\n for row in table.iter() {\n let mut first = true;\n for (length, cell) in maxes.iter().zip(row.iter()) {\n if first {\n first = false;\n } else {\n print!(\" \");\n }\n print!(\"{}\", cell.as_slice());\n for _ in range(colours::strip_formatting(cell).len(), *length) {\n print!(\" \");\n }\n }\n print!(\"\\n\");\n }\n}\n<commit_msg>Cache the lengths of formatting-striped fields<commit_after>#![feature(phase)]\nextern crate regex;\n#[phase(syntax)] extern crate regex_macros;\n\nextern crate getopts;\nuse std::os;\nuse std::io::fs;\n\nuse file::File;\nuse column::defaultColumns;\nuse options::{Options, SortField, Name};\n\npub mod colours;\npub mod column;\npub mod format;\npub mod file;\npub mod unix;\npub mod options;\n\nfn main() {\n let args: Vec<StrBuf> = os::args().iter()\n .map(|x| x.to_strbuf())\n .collect();\n\n let opts = ~[\n getopts::optflag(\"a\", \"all\", \"show dot-files\"),\n getopts::optflag(\"r\", \"reverse\", \"reverse order of files\"),\n getopts::optopt(\"s\", \"sort\", \"field to sort by\", \"WORD\"),\n ];\n\n let matches = match getopts::getopts(args.tail(), opts) {\n Ok(m) => m,\n Err(f) => fail!(\"Invalid options\\n{}\", f.to_err_msg()),\n };\n\n let opts = Options {\n showInvisibles: matches.opt_present(\"all\"),\n reverse: matches.opt_present(\"reverse\"),\n sortField: matches.opt_str(\"sort\").map(|word| SortField::from_word(word)).unwrap_or(Name),\n };\n\n let strs = if matches.free.is_empty() {\n vec!(\".\/\".to_strbuf())\n }\n else {\n matches.free.clone()\n };\n\n for dir in strs.move_iter() {\n list(opts, Path::new(dir))\n }\n}\n\nfn list(options: Options, path: Path) {\n let paths = match fs::readdir(&path) {\n Ok(paths) => paths,\n Err(e) => fail!(\"readdir: {}\", e),\n };\n\n let mut files = paths.iter().map(|path| File::from_path(path)).collect();\n options.sort(&mut files);\n if options.reverse {\n files.reverse();\n }\n\n let columns = defaultColumns();\n let num_columns = columns.len();\n\n let table: Vec<Vec<StrBuf>> = files.iter()\n .filter(|&f| options.show(f))\n .map(|f| columns.iter().map(|c| f.display(c)).collect())\n .collect();\n\n let lengths: Vec<Vec<uint>> = table.iter()\n .map(|row| row.iter().map( |col| colours::strip_formatting(col).len() ).collect())\n .collect();\n\n let maxes: Vec<uint> = range(0, num_columns)\n .map(|n| lengths.iter().map(|row| *row.get(n)).max().unwrap())\n .collect();\n\n for (field_lengths, row) in lengths.iter().zip(table.iter()) {\n let mut first = true;\n for ((column_length, cell), field_length) in maxes.iter().zip(row.iter()).zip(field_lengths.iter()) {\n if first {\n first = false;\n } else {\n print!(\" \");\n }\n print!(\"{}\", cell.as_slice());\n for _ in range(*field_length, *column_length) {\n print!(\" \");\n }\n }\n print!(\"\\n\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(generators)]\n\nfn bar<'a>() {\n let a: &'static str = \"hi\";\n let b: &'a str = a;\n\n || {\n yield a;\n yield b;\n };\n}\n\nfn main() {}\n<commit_msg>Run yield-subtype test on nll mode too as a regression check<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ revisions:lexical nll\n#![cfg_attr(nll, feature(nll))]\n\n#![feature(generators)]\n\nfn bar<'a>() {\n let a: &'static str = \"hi\";\n let b: &'a str = a;\n\n || {\n yield a;\n yield b;\n };\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make a new common projects path function<commit_after>use args::Args;\n#[cfg(not(test))]\nuse dirs::home_dir;\nuse std::path::PathBuf;\n\npub static MUXED_FOLDER: &str = \".muxed\";\nstatic CONFIG_EXTENSION: &str = \"yml\";\n\npub struct ProjectPaths {\n pub home_directory: PathBuf,\n pub project_directory: PathBuf,\n pub project_file: PathBuf,\n}\n\nimpl ProjectPaths {\n pub fn new(home_directory: PathBuf, project_directory: PathBuf, project_file: PathBuf) -> ProjectPaths {\n ProjectPaths {\n home_directory,\n project_directory,\n project_file,\n }\n }\n\n #[cfg(test)]\n pub fn from_strs(home_directory: &str, project_directory: &str, project_file: &str) -> ProjectPaths {\n let home_directory = PathBuf::from(home_directory);\n let project_directory = home_directory.join(project_directory);\n let project_file = project_directory.join(project_file).with_extension(CONFIG_EXTENSION);\n\n ProjectPaths {\n home_directory,\n project_directory,\n project_file,\n }\n }\n}\n\n\/\/\/ A common method for returning the project directory and filepath. The method\n\/\/\/ will check for a passed argument set with -p but if it does not exist will\n\/\/\/ map the path for the .muxed directory in the users home directory and return\n\/\/\/ that as the default.\npub fn project_paths(args: &Args) -> ProjectPaths {\n let homedir = homedir().expect(\"We couldn't find your home directory.\");\n let default_dir = homedir.join(MUXED_FOLDER);\n let project_directory = args.flag_p.as_ref().map_or(default_dir, |p| PathBuf::from(p));\n\n let project_filename = PathBuf::from(&args.arg_project).with_extension(CONFIG_EXTENSION);\n let project_fullpath = project_directory.join(project_filename);\n\n ProjectPaths::new(homedir, project_directory, project_fullpath)\n}\n\n\/\/\/ A Thin wrapper around the home_dir crate. This is so we can swap the default\n\/\/\/ dir out during testing.\n#[cfg(not(test))]\nfn homedir() -> Option<PathBuf> {\n home_dir()\n}\n\n\/\/\/ Return the temp dir as the users home dir during testing.\n#[cfg(test)]\nfn homedir() -> Option<PathBuf> {\n Some(PathBuf::from(\"\/tmp\"))\n}\n\n\n#[cfg(test)]\nmod test {\n #[test]\n fn \n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>need?<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add some threading documentation<commit_after><|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse std::process::Command;\nuse std::env;\n\npub use clap::App;\n\nuse clap::{Arg, ArgMatches};\nuse log;\nuse log::LogLevelFilter;\n\nuse configuration::Configuration;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\nuse logger::ImagLogger;\n\nuse libimagstore::store::Store;\n\npub struct Runtime<'a> {\n rtp: PathBuf,\n configuration: Option<Configuration>,\n cli_matches: ArgMatches<'a>,\n store: Store,\n}\n\nimpl<'a> Runtime<'a> {\n\n \/**\n * Gets the CLI spec for the program and retreives the config file path (or uses the default on\n * in $HOME\/.imag\/config, $XDG_CONFIG_DIR\/imag\/config or from env(\"$IMAG_CONFIG\")\n * and builds the Runtime object with it.\n *\n * The cli_spec object should be initially build with the ::get_default_cli_builder() function.\n *\n *\/\n pub fn new(cli_spec: App<'a, 'a>) -> Result<Runtime<'a>, RuntimeError> {\n use std::env;\n use std::error::Error;\n\n use configuration::error::ConfigErrorKind;\n\n let matches = cli_spec.get_matches();\n\n let is_debugging = matches.is_present(\"debugging\");\n let is_verbose = matches.is_present(\"verbosity\");\n\n Runtime::init_logger(is_debugging, is_verbose);\n\n let rtp : PathBuf = matches.value_of(\"runtimepath\")\n .map(PathBuf::from)\n .unwrap_or_else(|| {\n env::var(\"HOME\")\n .map(PathBuf::from)\n .map(|mut p| { p.push(\".imag\"); p})\n .unwrap_or_else(|_| {\n panic!(\"You seem to be $HOME-less. Please get a $HOME before using this software. We are sorry for you and hope you have some accommodation anyways.\");\n })\n });\n let storepath = matches.value_of(\"storepath\")\n .map(PathBuf::from)\n .unwrap_or({\n let mut spath = rtp.clone();\n spath.push(\"store\");\n spath\n });\n\n let cfg = Configuration::new(&rtp);\n let cfg = if cfg.is_err() {\n let e = cfg.err().unwrap();\n if e.kind() != ConfigErrorKind::NoConfigFileFound {\n let cause : Option<Box<Error>> = Some(Box::new(e));\n return Err(RuntimeError::new(RuntimeErrorKind::Instantiate, cause));\n } else {\n None\n }\n } else {\n Some(cfg.unwrap())\n };\n\n let store_config = {\n match &cfg {\n &Some(ref c) => c.store_config().map(|c| c.clone()),\n _ => None\n }\n };\n\n Store::new(storepath, store_config).map(|store| {\n Runtime {\n cli_matches: matches,\n configuration: cfg,\n rtp: rtp,\n store: store,\n }\n })\n .map_err(|e| {\n RuntimeError::new(RuntimeErrorKind::Instantiate, Some(Box::new(e)))\n })\n }\n\n \/**\n * Get a commandline-interface builder object from `clap`\n *\n * This commandline interface builder object already contains some predefined interface flags:\n * * -v | --verbose for verbosity\n * * --debug for debugging\n * * -c <file> | --config <file> for alternative configuration file\n * * -r <path> | --rtp <path> for alternative runtimepath\n * * --store <path> for alternative store path\n * Each has the appropriate help text included.\n *\n * The `appname` shall be \"imag-<command>\".\n *\/\n pub fn get_default_cli_builder(appname: &'a str,\n version: &'a str,\n about: &'a str)\n -> App<'a, 'a>\n {\n App::new(appname)\n .version(version)\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .about(about)\n .arg(Arg::with_name(\"verbosity\")\n .short(\"v\")\n .long(\"verbose\")\n .help(\"Enables verbosity\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(\"debugging\")\n .long(\"debug\")\n .help(\"Enables debugging output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(\"config\")\n .long(\"config\")\n .help(\"Path to alternative config file\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(\"runtimepath\")\n .long(\"rtp\")\n .help(\"Alternative runtimepath\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(\"storepath\")\n .long(\"store\")\n .help(\"Alternative storepath. Must be specified as full path, can be outside of the RTP\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(\"editor\")\n .long(\"editor\")\n .help(\"Set editor\")\n .required(false)\n .takes_value(true))\n }\n\n \/**\n * Initialize the internal logger\n *\/\n fn init_logger(is_debugging: bool, is_verbose: bool) {\n let lvl = if is_debugging {\n LogLevelFilter::Debug\n } else if is_verbose {\n LogLevelFilter::Info\n } else {\n LogLevelFilter::Error\n };\n\n log::set_logger(|max_log_lvl| {\n max_log_lvl.set(lvl);\n debug!(\"Init logger with {}\", lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()))\n })\n .map_err(|_| {\n panic!(\"Could not setup logger\");\n })\n .ok();\n }\n\n \/**\n * Get the verbosity flag value\n *\/\n pub fn is_verbose(&self) -> bool {\n self.cli_matches.is_present(\"verbosity\")\n }\n\n \/**\n * Get the debugging flag value\n *\/\n pub fn is_debugging(&self) -> bool {\n self.cli_matches.is_present(\"debugging\")\n }\n\n \/**\n * Get the runtimepath\n *\/\n pub fn rtp(&self) -> &PathBuf {\n &self.rtp\n }\n\n \/**\n * Get the commandline interface matches\n *\/\n pub fn cli(&self) -> &ArgMatches {\n &self.cli_matches\n }\n\n \/**\n * Get the store object\n *\/\n pub fn store(&self) -> &Store {\n &self.store\n }\n\n pub fn editor(&self) -> Option<Command> {\n self.cli()\n .value_of(\"editor\")\n .map(String::from)\n .or({\n match &self.configuration {\n &Some(ref c) => c.editor().map(|s| s.clone()),\n _ => None,\n }\n })\n .or(env::var(\"EDITOR\").ok())\n .map(Command::new)\n }\n}\n\n\n<commit_msg>Add Runtime::config()<commit_after>use std::path::PathBuf;\nuse std::process::Command;\nuse std::env;\n\npub use clap::App;\n\nuse clap::{Arg, ArgMatches};\nuse log;\nuse log::LogLevelFilter;\n\nuse configuration::Configuration;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\nuse logger::ImagLogger;\n\nuse libimagstore::store::Store;\n\npub struct Runtime<'a> {\n rtp: PathBuf,\n configuration: Option<Configuration>,\n cli_matches: ArgMatches<'a>,\n store: Store,\n}\n\nimpl<'a> Runtime<'a> {\n\n \/**\n * Gets the CLI spec for the program and retreives the config file path (or uses the default on\n * in $HOME\/.imag\/config, $XDG_CONFIG_DIR\/imag\/config or from env(\"$IMAG_CONFIG\")\n * and builds the Runtime object with it.\n *\n * The cli_spec object should be initially build with the ::get_default_cli_builder() function.\n *\n *\/\n pub fn new(cli_spec: App<'a, 'a>) -> Result<Runtime<'a>, RuntimeError> {\n use std::env;\n use std::error::Error;\n\n use configuration::error::ConfigErrorKind;\n\n let matches = cli_spec.get_matches();\n\n let is_debugging = matches.is_present(\"debugging\");\n let is_verbose = matches.is_present(\"verbosity\");\n\n Runtime::init_logger(is_debugging, is_verbose);\n\n let rtp : PathBuf = matches.value_of(\"runtimepath\")\n .map(PathBuf::from)\n .unwrap_or_else(|| {\n env::var(\"HOME\")\n .map(PathBuf::from)\n .map(|mut p| { p.push(\".imag\"); p})\n .unwrap_or_else(|_| {\n panic!(\"You seem to be $HOME-less. Please get a $HOME before using this software. We are sorry for you and hope you have some accommodation anyways.\");\n })\n });\n let storepath = matches.value_of(\"storepath\")\n .map(PathBuf::from)\n .unwrap_or({\n let mut spath = rtp.clone();\n spath.push(\"store\");\n spath\n });\n\n let cfg = Configuration::new(&rtp);\n let cfg = if cfg.is_err() {\n let e = cfg.err().unwrap();\n if e.kind() != ConfigErrorKind::NoConfigFileFound {\n let cause : Option<Box<Error>> = Some(Box::new(e));\n return Err(RuntimeError::new(RuntimeErrorKind::Instantiate, cause));\n } else {\n None\n }\n } else {\n Some(cfg.unwrap())\n };\n\n let store_config = {\n match &cfg {\n &Some(ref c) => c.store_config().map(|c| c.clone()),\n _ => None\n }\n };\n\n Store::new(storepath, store_config).map(|store| {\n Runtime {\n cli_matches: matches,\n configuration: cfg,\n rtp: rtp,\n store: store,\n }\n })\n .map_err(|e| {\n RuntimeError::new(RuntimeErrorKind::Instantiate, Some(Box::new(e)))\n })\n }\n\n \/**\n * Get a commandline-interface builder object from `clap`\n *\n * This commandline interface builder object already contains some predefined interface flags:\n * * -v | --verbose for verbosity\n * * --debug for debugging\n * * -c <file> | --config <file> for alternative configuration file\n * * -r <path> | --rtp <path> for alternative runtimepath\n * * --store <path> for alternative store path\n * Each has the appropriate help text included.\n *\n * The `appname` shall be \"imag-<command>\".\n *\/\n pub fn get_default_cli_builder(appname: &'a str,\n version: &'a str,\n about: &'a str)\n -> App<'a, 'a>\n {\n App::new(appname)\n .version(version)\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .about(about)\n .arg(Arg::with_name(\"verbosity\")\n .short(\"v\")\n .long(\"verbose\")\n .help(\"Enables verbosity\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(\"debugging\")\n .long(\"debug\")\n .help(\"Enables debugging output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(\"config\")\n .long(\"config\")\n .help(\"Path to alternative config file\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(\"runtimepath\")\n .long(\"rtp\")\n .help(\"Alternative runtimepath\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(\"storepath\")\n .long(\"store\")\n .help(\"Alternative storepath. Must be specified as full path, can be outside of the RTP\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(\"editor\")\n .long(\"editor\")\n .help(\"Set editor\")\n .required(false)\n .takes_value(true))\n }\n\n \/**\n * Initialize the internal logger\n *\/\n fn init_logger(is_debugging: bool, is_verbose: bool) {\n let lvl = if is_debugging {\n LogLevelFilter::Debug\n } else if is_verbose {\n LogLevelFilter::Info\n } else {\n LogLevelFilter::Error\n };\n\n log::set_logger(|max_log_lvl| {\n max_log_lvl.set(lvl);\n debug!(\"Init logger with {}\", lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()))\n })\n .map_err(|_| {\n panic!(\"Could not setup logger\");\n })\n .ok();\n }\n\n \/**\n * Get the verbosity flag value\n *\/\n pub fn is_verbose(&self) -> bool {\n self.cli_matches.is_present(\"verbosity\")\n }\n\n \/**\n * Get the debugging flag value\n *\/\n pub fn is_debugging(&self) -> bool {\n self.cli_matches.is_present(\"debugging\")\n }\n\n \/**\n * Get the runtimepath\n *\/\n pub fn rtp(&self) -> &PathBuf {\n &self.rtp\n }\n\n \/**\n * Get the commandline interface matches\n *\/\n pub fn cli(&self) -> &ArgMatches {\n &self.cli_matches\n }\n\n \/**\n * Get the configuration object\n *\/\n pub fn config(&self) -> Option<&Configuration> {\n self.configuration.as_ref()\n }\n\n \/**\n * Get the store object\n *\/\n pub fn store(&self) -> &Store {\n &self.store\n }\n\n pub fn editor(&self) -> Option<Command> {\n self.cli()\n .value_of(\"editor\")\n .map(String::from)\n .or({\n match &self.configuration {\n &Some(ref c) => c.editor().map(|s| s.clone()),\n _ => None,\n }\n })\n .or(env::var(\"EDITOR\").ok())\n .map(Command::new)\n }\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>GH-ISSUE: #68 Finish making package_manager_command() function<commit_after><|endoftext|>"} {"text":"<commit_before>use std::ptr;\nuse gl;\nuse gl::types::*;\nuse nalgebra::na::{Vec2, Vec3, Mat3, Mat4};\nuse nalgebra::na;\nuse resource::Material;\nuse object::ObjectData;\nuse light::{Light, Absolute, StickToCamera};\nuse camera::Camera;\nuse resource::{Mesh, Shader, ShaderAttribute, ShaderUniform};\n\n#[path = \"..\/error.rs\"]\nmod error;\n\n\/\/\/ The default material used to draw objects.\npub struct ObjectMaterial {\n shader: Shader,\n pos: ShaderAttribute<Vec3<f32>>,\n normal: ShaderAttribute<Vec3<f32>>,\n tex_coord: ShaderAttribute<Vec2<f32>>,\n light: ShaderUniform<Vec3<f32>>,\n color: ShaderUniform<Vec3<f32>>,\n transform: ShaderUniform<Mat4<f32>>,\n scale: ShaderUniform<Mat3<f32>>,\n ntransform: ShaderUniform<Mat3<f32>>,\n view: ShaderUniform<Mat4<f32>>,\n tex: ShaderUniform<GLuint>\n}\n\nimpl ObjectMaterial {\n \/\/\/ Creates a new `ObjectMaterial`.\n pub fn new() -> ObjectMaterial {\n \/\/ load the shader\n let mut shader = Shader::new_from_str(OBJECT_VERTEX_SRC, OBJECT_FRAGMENT_SRC);\n\n shader.use_program();\n\n \/\/ get the variables locations\n ObjectMaterial {\n pos: shader.get_attrib(\"position\").unwrap(),\n normal: shader.get_attrib(\"normal\").unwrap(),\n tex_coord: shader.get_attrib(\"tex_coord_v\").unwrap(),\n light: shader.get_uniform(\"light_position\").unwrap(),\n color: shader.get_uniform(\"color\").unwrap(),\n transform: shader.get_uniform(\"transform\").unwrap(),\n scale: shader.get_uniform(\"scale\").unwrap(),\n ntransform: shader.get_uniform(\"ntransform\").unwrap(),\n view: shader.get_uniform(\"view\").unwrap(),\n tex: shader.get_uniform(\"tex\").unwrap(),\n shader: shader\n }\n }\n\n fn activate(&mut self) {\n self.shader.use_program();\n self.pos.enable();\n self.normal.enable();\n self.tex_coord.enable();\n }\n\n fn deactivate(&mut self) {\n self.pos.disable();\n self.normal.disable();\n self.tex_coord.disable();\n }\n}\n\nimpl Material for ObjectMaterial {\n fn render(&mut self,\n pass: uint,\n camera: &mut Camera,\n light: &Light,\n data: &ObjectData,\n mesh: &mut Mesh) {\n self.activate();\n\n\n \/*\n *\n * Setup camera and light.\n *\n *\/\n camera.upload(pass, &mut self.view);\n\n let pos = match *light {\n Absolute(ref p) => p.clone(),\n StickToCamera => camera.eye()\n };\n\n self.light.upload(&pos);\n\n \/*\n *\n * Setup object-related stuffs.\n *\n *\/\n let formated_transform: Mat4<f32> = na::to_homogeneous(data.transform());\n let formated_ntransform: Mat3<f32> = *data.transform().rotation.submat();\n\n unsafe {\n self.transform.upload(&formated_transform);\n self.ntransform.upload(&formated_ntransform);\n self.scale.upload(data.scale());\n self.color.upload(data.color());\n\n mesh.bind(&mut self.pos, &mut self.normal, &mut self.tex_coord);\n\n verify!(gl::ActiveTexture(gl::TEXTURE0));\n verify!(gl::BindTexture(gl::TEXTURE_2D, data.texture().id()));\n\n verify!(gl::DrawElements(gl::TRIANGLES,\n mesh.num_pts() as GLint,\n gl::UNSIGNED_INT,\n ptr::null()));\n }\n\n mesh.unbind();\n self.deactivate();\n }\n}\n\n\/\/\/ Vertex shader of the default object material.\npub static OBJECT_VERTEX_SRC: &'static str = A_VERY_LONG_STRING;\n\/\/\/ Fragment shader of the default object material.\npub static OBJECT_FRAGMENT_SRC: &'static str = ANOTHER_VERY_LONG_STRING;\n\nstatic A_VERY_LONG_STRING: &'static str =\n \"#version 120\n attribute vec3 position;\n attribute vec3 normal;\n attribute vec3 color;\n attribute vec2 tex_coord_v;\n varying vec3 ws_normal;\n varying vec3 ws_position;\n varying vec2 tex_coord;\n uniform mat4 view;\n uniform mat4 transform;\n uniform mat3 scale;\n uniform mat3 ntransform;\n void main() {\n mat4 scale4 = mat4(scale);\n vec4 pos4 = transform * scale4 * vec4(position, 1.0);\n tex_coord = tex_coord_v;\n ws_position = pos4.xyz;\n gl_Position = view * pos4;\n ws_normal = normalize(ntransform * scale * normal);\n }\";\n\n\/\/ phong lighting (heavily) inspired\n\/\/ by http:\/\/www.opengl.org\/sdk\/docs\/tutorials\/ClockworkCoders\/lighting.php\nstatic ANOTHER_VERY_LONG_STRING: &'static str =\n \"#version 120\n uniform vec3 color;\n uniform vec3 light_position;\n uniform sampler2D tex;\n varying vec2 tex_coord;\n varying vec3 ws_normal;\n varying vec3 ws_position;\n void main() {\n vec3 L = normalize(light_position - ws_position);\n vec3 E = normalize(-ws_position);\n\n \/\/calculate Ambient Term:\n vec4 Iamb = vec4(1.0, 1.0, 1.0, 1.0);\n\n \/\/calculate Diffuse Term:\n vec4 Idiff1 = vec4(1.0, 1.0, 1.0, 1.0) * max(dot(ws_normal,L), 0.0);\n Idiff1 = clamp(Idiff1, 0.0, 1.0);\n\n \/\/ double sided lighting:\n vec4 Idiff2 = vec4(1.0, 1.0, 1.0, 1.0) * max(dot(-ws_normal,L), 0.0);\n Idiff2 = clamp(Idiff2, 0.0, 1.0);\n\n vec4 tex_color = texture2D(tex, tex_coord);\n gl_FragColor = tex_color * (vec4(color, 1.0) + Iamb + (Idiff1 + Idiff2) \/ 2) \/ 3;\n }\";\n<commit_msg>Sets the ambient lighting to be entirely based on the color of the object, instead of having a white component.<commit_after>use std::ptr;\nuse gl;\nuse gl::types::*;\nuse nalgebra::na::{Vec2, Vec3, Mat3, Mat4};\nuse nalgebra::na;\nuse resource::Material;\nuse object::ObjectData;\nuse light::{Light, Absolute, StickToCamera};\nuse camera::Camera;\nuse resource::{Mesh, Shader, ShaderAttribute, ShaderUniform};\n\n#[path = \"..\/error.rs\"]\nmod error;\n\n\/\/\/ The default material used to draw objects.\npub struct ObjectMaterial {\n shader: Shader,\n pos: ShaderAttribute<Vec3<f32>>,\n normal: ShaderAttribute<Vec3<f32>>,\n tex_coord: ShaderAttribute<Vec2<f32>>,\n light: ShaderUniform<Vec3<f32>>,\n color: ShaderUniform<Vec3<f32>>,\n transform: ShaderUniform<Mat4<f32>>,\n scale: ShaderUniform<Mat3<f32>>,\n ntransform: ShaderUniform<Mat3<f32>>,\n view: ShaderUniform<Mat4<f32>>,\n tex: ShaderUniform<GLuint>\n}\n\nimpl ObjectMaterial {\n \/\/\/ Creates a new `ObjectMaterial`.\n pub fn new() -> ObjectMaterial {\n \/\/ load the shader\n let mut shader = Shader::new_from_str(OBJECT_VERTEX_SRC, OBJECT_FRAGMENT_SRC);\n\n shader.use_program();\n\n \/\/ get the variables locations\n ObjectMaterial {\n pos: shader.get_attrib(\"position\").unwrap(),\n normal: shader.get_attrib(\"normal\").unwrap(),\n tex_coord: shader.get_attrib(\"tex_coord_v\").unwrap(),\n light: shader.get_uniform(\"light_position\").unwrap(),\n color: shader.get_uniform(\"color\").unwrap(),\n transform: shader.get_uniform(\"transform\").unwrap(),\n scale: shader.get_uniform(\"scale\").unwrap(),\n ntransform: shader.get_uniform(\"ntransform\").unwrap(),\n view: shader.get_uniform(\"view\").unwrap(),\n tex: shader.get_uniform(\"tex\").unwrap(),\n shader: shader\n }\n }\n\n fn activate(&mut self) {\n self.shader.use_program();\n self.pos.enable();\n self.normal.enable();\n self.tex_coord.enable();\n }\n\n fn deactivate(&mut self) {\n self.pos.disable();\n self.normal.disable();\n self.tex_coord.disable();\n }\n}\n\nimpl Material for ObjectMaterial {\n fn render(&mut self,\n pass: uint,\n camera: &mut Camera,\n light: &Light,\n data: &ObjectData,\n mesh: &mut Mesh) {\n self.activate();\n\n\n \/*\n *\n * Setup camera and light.\n *\n *\/\n camera.upload(pass, &mut self.view);\n\n let pos = match *light {\n Absolute(ref p) => p.clone(),\n StickToCamera => camera.eye()\n };\n\n self.light.upload(&pos);\n\n \/*\n *\n * Setup object-related stuffs.\n *\n *\/\n let formated_transform: Mat4<f32> = na::to_homogeneous(data.transform());\n let formated_ntransform: Mat3<f32> = *data.transform().rotation.submat();\n\n unsafe {\n self.transform.upload(&formated_transform);\n self.ntransform.upload(&formated_ntransform);\n self.scale.upload(data.scale());\n self.color.upload(data.color());\n\n mesh.bind(&mut self.pos, &mut self.normal, &mut self.tex_coord);\n\n verify!(gl::ActiveTexture(gl::TEXTURE0));\n verify!(gl::BindTexture(gl::TEXTURE_2D, data.texture().id()));\n\n verify!(gl::DrawElements(gl::TRIANGLES,\n mesh.num_pts() as GLint,\n gl::UNSIGNED_INT,\n ptr::null()));\n }\n\n mesh.unbind();\n self.deactivate();\n }\n}\n\n\/\/\/ Vertex shader of the default object material.\npub static OBJECT_VERTEX_SRC: &'static str = A_VERY_LONG_STRING;\n\/\/\/ Fragment shader of the default object material.\npub static OBJECT_FRAGMENT_SRC: &'static str = ANOTHER_VERY_LONG_STRING;\n\nstatic A_VERY_LONG_STRING: &'static str =\n \"#version 120\n attribute vec3 position;\n attribute vec3 normal;\n attribute vec3 color;\n attribute vec2 tex_coord_v;\n varying vec3 ws_normal;\n varying vec3 ws_position;\n varying vec2 tex_coord;\n uniform mat4 view;\n uniform mat4 transform;\n uniform mat3 scale;\n uniform mat3 ntransform;\n void main() {\n mat4 scale4 = mat4(scale);\n vec4 pos4 = transform * scale4 * vec4(position, 1.0);\n tex_coord = tex_coord_v;\n ws_position = pos4.xyz;\n gl_Position = view * pos4;\n ws_normal = normalize(ntransform * scale * normal);\n }\";\n\n\/\/ phong lighting (heavily) inspired\n\/\/ by http:\/\/www.opengl.org\/sdk\/docs\/tutorials\/ClockworkCoders\/lighting.php\nstatic ANOTHER_VERY_LONG_STRING: &'static str =\n \"#version 120\n uniform vec3 color;\n uniform vec3 light_position;\n uniform sampler2D tex;\n varying vec2 tex_coord;\n varying vec3 ws_normal;\n varying vec3 ws_position;\n void main() {\n vec3 L = normalize(light_position - ws_position);\n vec3 E = normalize(-ws_position);\n\n \/\/calculate Ambient Term:\n vec4 Iamb = vec4(color, 1.0);\n\n \/\/calculate Diffuse Term:\n vec4 Idiff1 = vec4(1.0, 1.0, 1.0, 1.0) * max(dot(ws_normal,L), 0.0);\n Idiff1 = clamp(Idiff1, 0.0, 1.0);\n\n \/\/ double sided lighting:\n vec4 Idiff2 = vec4(1.0, 1.0, 1.0, 1.0) * max(dot(-ws_normal,L), 0.0);\n Idiff2 = clamp(Idiff2, 0.0, 1.0);\n\n vec4 tex_color = texture2D(tex, tex_coord);\n gl_FragColor = tex_color * (Iamb + (Idiff1 + Idiff2) \/ 2) \/ 2;\n }\";\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a changelog example<commit_after>extern crate futures;\nextern crate tokio_core;\n\n#[macro_use]\nextern crate reql;\nextern crate reql_types;\n\n#[macro_use]\nextern crate serde_derive;\nextern crate serde;\nextern crate serde_json;\n\nuse futures::stream::Stream;\nuse tokio_core::reactor::Core;\nuse reql::{Client, Document, Run};\n\n\/**\n * rethinkdb changelog example\n * After running, access your admin panel at localhost:8080\n *\n * setup a \"test\" database, with a \"test\" table, and run:\n *\n * r.db('test').table('test').insert({ test: 1 })\n *\n *\/\n\n#[derive(Debug, Serialize, Deserialize)]\nstruct TestItem {\n test: i32\n}\n\n#[derive(Debug, Serialize, Deserialize)]\nstruct Change {\n \/\/ Upon deserialisation:\n \/\/ We'll change the rethinkdb change \"type\" to avoid naming issues\n #[serde(rename(deserialize = \"type\"))]\n action: String,\n new_val: TestItem,\n old_val: TestItem,\n}\n\nfn main()\n{\n \/\/ Create a new ReQL client\n let r = Client::new();\n\n \/\/ Create an even loop\n let core = Core::new().unwrap();\n\n \/\/ Create a connection pool\n let conn = r.connect(&core.handle()).unwrap();\n\n \/\/ Run the query\n let query = r.db(\"test\")\n .table(\"test\")\n .changes()\n\n \/\/ We want rethinkdb to inform us of the change type\n .with_args(args!({\n include_types: true\n }))\n .run::<Change>(conn)\n .unwrap();\n\n \/\/ Process the results\n let stati = query.and_then(|change| {\n match change {\n \/\/ The server returned the response we were expecting,\n \/\/ and deserialized the data into our Change structure\n Some(Document::Expected(change)) => {\n \/\/ Extract the change type\n print!(\"{} action received\\n\\t=> \", change.action);\n\n \/\/ Match the change type\n match change.action.as_ref() {\n \"add\" => println!(\"{:?}\", change.new_val),\n \"remove\" => println!(\"{:?}\", change.old_val),\n \"change\" => println!(\"from {:?} to {:?}\", change.old_val, change.new_val),\n\n _ => println!(\"{:?}\", change)\n }\n }\n \/\/ We got a response alright, but it wasn't the one we were\n \/\/ expecting plus it's not an error either, otherwise it would\n \/\/ have been returned as such (This simply means that the response\n \/\/ we got couldn't be serialised into the type we were expecting)\n Some(Document::Unexpected(change)) => {\n println!(\"{}\",change)\n }\n \/\/ This is impossible in this particular example since there\n \/\/ needs to be at least one server available to give this\n \/\/ response otherwise we would have run into an error for\n \/\/ failing to connect\n None => {\n println!(\"got no documents in the database\");\n }\n }\n Ok(())\n })\n \/\/ Our query ran into an error\n .or_else(|error| {\n println!(\"{:?}\", error);\n Err(())\n });\n\n \/\/ Wait for all the results to be processed\n for _ in stati.wait() {}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Just let String grow on its own<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module provides a simplified abstraction for working with\n\/\/! code blocks identified by their integer node-id. In particular,\n\/\/! it captures a common set of attributes that all \"function-like\n\/\/! things\" (represented by `FnLike` instances) share. For example,\n\/\/! all `FnLike` instances have a type signature (be it explicit or\n\/\/! inferred). And all `FnLike` instances have a body, i.e. the code\n\/\/! that is run when the function-like thing it represents is invoked.\n\/\/!\n\/\/! With the above abstraction in place, one can treat the program\n\/\/! text as a collection of blocks of code (and most such blocks are\n\/\/! nested within a uniquely determined `FnLike`), and users can ask\n\/\/! for the `Code` associated with a particular NodeId.\n\nuse hir as ast;\nuse hir::map::{self, Node};\nuse hir::{Expr, FnDecl};\nuse hir::intravisit::FnKind;\nuse syntax::ast::{Attribute, Ident, Name, NodeId};\nuse syntax_pos::Span;\n\n\/\/\/ An FnLikeNode is a Node that is like a fn, in that it has a decl\n\/\/\/ and a body (as well as a NodeId, a span, etc).\n\/\/\/\n\/\/\/ More specifically, it is one of either:\n\/\/\/\n\/\/\/ - A function item,\n\/\/\/ - A closure expr (i.e. an ExprKind::Closure), or\n\/\/\/ - The default implementation for a trait method.\n\/\/\/\n\/\/\/ To construct one, use the `Code::from_node` function.\n#[derive(Copy, Clone, Debug)]\npub struct FnLikeNode<'a> { node: map::Node<'a> }\n\n\/\/\/ MaybeFnLike wraps a method that indicates if an object\n\/\/\/ corresponds to some FnLikeNode.\npub trait MaybeFnLike { fn is_fn_like(&self) -> bool; }\n\nimpl MaybeFnLike for ast::Item {\n fn is_fn_like(&self) -> bool {\n match self.node { ast::ItemKind::Fn(..) => true, _ => false, }\n }\n}\n\nimpl MaybeFnLike for ast::TraitItem {\n fn is_fn_like(&self) -> bool {\n match self.node {\n ast::TraitItemKind::Method(_, ast::TraitMethod::Provided(_)) => true,\n _ => false,\n }\n }\n}\n\nimpl MaybeFnLike for ast::Expr {\n fn is_fn_like(&self) -> bool {\n match self.node {\n ast::ExprKind::Closure(..) => true,\n _ => false,\n }\n }\n}\n\n\/\/\/ Carries either an FnLikeNode or a Expr, as these are the two\n\/\/\/ constructs that correspond to \"code\" (as in, something from which\n\/\/\/ we can construct a control-flow graph).\n#[derive(Copy, Clone)]\npub enum Code<'a> {\n FnLike(FnLikeNode<'a>),\n Expr(&'a Expr),\n}\n\nimpl<'a> Code<'a> {\n pub fn id(&self) -> NodeId {\n match *self {\n Code::FnLike(node) => node.id(),\n Code::Expr(block) => block.id,\n }\n }\n\n \/\/\/ Attempts to construct a Code from presumed FnLike or Expr node input.\n pub fn from_node(map: &map::Map<'a>, id: NodeId) -> Option<Code<'a>> {\n match map.get(id) {\n map::NodeBlock(_) => {\n \/\/ Use the parent, hopefully an expression node.\n Code::from_node(map, map.get_parent_node(id))\n }\n map::NodeExpr(expr) => Some(Code::Expr(expr)),\n node => FnLikeNode::from_node(node).map(Code::FnLike)\n }\n }\n}\n\n\/\/\/ These are all the components one can extract from a fn item for\n\/\/\/ use when implementing FnLikeNode operations.\nstruct ItemFnParts<'a> {\n name: Name,\n decl: &'a ast::FnDecl,\n header: ast::FnHeader,\n vis: &'a ast::Visibility,\n generics: &'a ast::Generics,\n body: ast::BodyId,\n id: NodeId,\n span: Span,\n attrs: &'a [Attribute],\n}\n\n\/\/\/ These are all the components one can extract from a closure expr\n\/\/\/ for use when implementing FnLikeNode operations.\nstruct ClosureParts<'a> {\n decl: &'a FnDecl,\n body: ast::BodyId,\n id: NodeId,\n span: Span,\n attrs: &'a [Attribute],\n}\n\nimpl<'a> ClosureParts<'a> {\n fn new(d: &'a FnDecl, b: ast::BodyId, id: NodeId, s: Span, attrs: &'a [Attribute]) -> Self {\n ClosureParts {\n decl: d,\n body: b,\n id,\n span: s,\n attrs,\n }\n }\n}\n\nimpl<'a> FnLikeNode<'a> {\n \/\/\/ Attempts to construct a FnLikeNode from presumed FnLike node input.\n pub fn from_node(node: Node) -> Option<FnLikeNode> {\n let fn_like = match node {\n map::NodeItem(item) => item.is_fn_like(),\n map::NodeTraitItem(tm) => tm.is_fn_like(),\n map::NodeImplItem(_) => true,\n map::NodeExpr(e) => e.is_fn_like(),\n _ => false\n };\n if fn_like {\n Some(FnLikeNode {\n node,\n })\n } else {\n None\n }\n }\n\n pub fn body(self) -> ast::BodyId {\n self.handle(|i: ItemFnParts<'a>| i.body,\n |_, _, _: &'a ast::MethodSig, _, body: ast::BodyId, _, _| body,\n |c: ClosureParts<'a>| c.body)\n }\n\n pub fn decl(self) -> &'a FnDecl {\n self.handle(|i: ItemFnParts<'a>| &*i.decl,\n |_, _, sig: &'a ast::MethodSig, _, _, _, _| &sig.decl,\n |c: ClosureParts<'a>| c.decl)\n }\n\n pub fn span(self) -> Span {\n self.handle(|i: ItemFnParts| i.span,\n |_, _, _: &'a ast::MethodSig, _, _, span, _| span,\n |c: ClosureParts| c.span)\n }\n\n pub fn id(self) -> NodeId {\n self.handle(|i: ItemFnParts| i.id,\n |id, _, _: &'a ast::MethodSig, _, _, _, _| id,\n |c: ClosureParts| c.id)\n }\n\n pub fn constness(self) -> ast::Constness {\n match self.kind() {\n FnKind::ItemFn(_, _, header, ..) => header.constness,\n FnKind::Method(_, m, ..) => m.header.constness,\n _ => ast::Constness::NotConst\n }\n }\n\n pub fn asyncness(self) -> ast::IsAsync {\n match self.kind() {\n FnKind::ItemFn(_, _, header, ..) => header.asyncness,\n FnKind::Method(_, m, ..) => m.header.asyncness,\n _ => ast::IsAsync::NotAsync\n }\n }\n\n pub fn unsafety(self) -> ast::Unsafety {\n match self.kind() {\n FnKind::ItemFn(_, _, header, ..) => header.unsafety,\n FnKind::Method(_, m, ..) => m.header.unsafety,\n _ => ast::Unsafety::Normal\n }\n }\n\n pub fn kind(self) -> FnKind<'a> {\n let item = |p: ItemFnParts<'a>| -> FnKind<'a> {\n FnKind::ItemFn(p.name, p.generics, p.header, p.vis, p.attrs)\n };\n let closure = |c: ClosureParts<'a>| {\n FnKind::Closure(c.attrs)\n };\n let method = |_, ident: Ident, sig: &'a ast::MethodSig, vis, _, _, attrs| {\n FnKind::Method(ident, sig, vis, attrs)\n };\n self.handle(item, method, closure)\n }\n\n fn handle<A, I, M, C>(self, item_fn: I, method: M, closure: C) -> A where\n I: FnOnce(ItemFnParts<'a>) -> A,\n M: FnOnce(NodeId,\n Ident,\n &'a ast::MethodSig,\n Option<&'a ast::Visibility>,\n ast::BodyId,\n Span,\n &'a [Attribute])\n -> A,\n C: FnOnce(ClosureParts<'a>) -> A,\n {\n match self.node {\n map::NodeItem(i) => match i.node {\n ast::ItemKind::Fn(ref decl, header, ref generics, block) =>\n item_fn(ItemFnParts {\n id: i.id,\n name: i.name,\n decl: &decl,\n body: block,\n vis: &i.vis,\n span: i.span,\n attrs: &i.attrs,\n header,\n generics,\n }),\n _ => bug!(\"item FnLikeNode that is not fn-like\"),\n },\n map::NodeTraitItem(ti) => match ti.node {\n ast::TraitItemKind::Method(ref sig, ast::TraitMethod::Provided(body)) => {\n method(ti.id, ti.ident, sig, None, body, ti.span, &ti.attrs)\n }\n _ => bug!(\"trait method FnLikeNode that is not fn-like\"),\n },\n map::NodeImplItem(ii) => {\n match ii.node {\n ast::ImplItemKind::Method(ref sig, body) => {\n method(ii.id, ii.ident, sig, Some(&ii.vis), body, ii.span, &ii.attrs)\n }\n _ => {\n bug!(\"impl method FnLikeNode that is not fn-like\")\n }\n }\n },\n map::NodeExpr(e) => match e.node {\n ast::ExprKind::Closure(_, ref decl, block, _fn_decl_span, _gen) =>\n closure(ClosureParts::new(&decl, block, e.id, e.span, &e.attrs)),\n _ => bug!(\"expr FnLikeNode that is not fn-like\"),\n },\n _ => bug!(\"other FnLikeNode that is not fn-like\"),\n }\n }\n}\n<commit_msg>Only methods are fn-like, not other associated items<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module provides a simplified abstraction for working with\n\/\/! code blocks identified by their integer node-id. In particular,\n\/\/! it captures a common set of attributes that all \"function-like\n\/\/! things\" (represented by `FnLike` instances) share. For example,\n\/\/! all `FnLike` instances have a type signature (be it explicit or\n\/\/! inferred). And all `FnLike` instances have a body, i.e. the code\n\/\/! that is run when the function-like thing it represents is invoked.\n\/\/!\n\/\/! With the above abstraction in place, one can treat the program\n\/\/! text as a collection of blocks of code (and most such blocks are\n\/\/! nested within a uniquely determined `FnLike`), and users can ask\n\/\/! for the `Code` associated with a particular NodeId.\n\nuse hir as ast;\nuse hir::map::{self, Node};\nuse hir::{Expr, FnDecl};\nuse hir::intravisit::FnKind;\nuse syntax::ast::{Attribute, Ident, Name, NodeId};\nuse syntax_pos::Span;\n\n\/\/\/ An FnLikeNode is a Node that is like a fn, in that it has a decl\n\/\/\/ and a body (as well as a NodeId, a span, etc).\n\/\/\/\n\/\/\/ More specifically, it is one of either:\n\/\/\/\n\/\/\/ - A function item,\n\/\/\/ - A closure expr (i.e. an ExprKind::Closure), or\n\/\/\/ - The default implementation for a trait method.\n\/\/\/\n\/\/\/ To construct one, use the `Code::from_node` function.\n#[derive(Copy, Clone, Debug)]\npub struct FnLikeNode<'a> { node: map::Node<'a> }\n\n\/\/\/ MaybeFnLike wraps a method that indicates if an object\n\/\/\/ corresponds to some FnLikeNode.\npub trait MaybeFnLike { fn is_fn_like(&self) -> bool; }\n\nimpl MaybeFnLike for ast::Item {\n fn is_fn_like(&self) -> bool {\n match self.node { ast::ItemKind::Fn(..) => true, _ => false, }\n }\n}\n\nimpl MaybeFnLike for ast::ImplItem {\n fn is_fn_like(&self) -> bool {\n match self.node { ast::ImplItemKind::Method(..) => true, _ => false, }\n }\n}\n\nimpl MaybeFnLike for ast::TraitItem {\n fn is_fn_like(&self) -> bool {\n match self.node {\n ast::TraitItemKind::Method(_, ast::TraitMethod::Provided(_)) => true,\n _ => false,\n }\n }\n}\n\nimpl MaybeFnLike for ast::Expr {\n fn is_fn_like(&self) -> bool {\n match self.node {\n ast::ExprKind::Closure(..) => true,\n _ => false,\n }\n }\n}\n\n\/\/\/ Carries either an FnLikeNode or a Expr, as these are the two\n\/\/\/ constructs that correspond to \"code\" (as in, something from which\n\/\/\/ we can construct a control-flow graph).\n#[derive(Copy, Clone)]\npub enum Code<'a> {\n FnLike(FnLikeNode<'a>),\n Expr(&'a Expr),\n}\n\nimpl<'a> Code<'a> {\n pub fn id(&self) -> NodeId {\n match *self {\n Code::FnLike(node) => node.id(),\n Code::Expr(block) => block.id,\n }\n }\n\n \/\/\/ Attempts to construct a Code from presumed FnLike or Expr node input.\n pub fn from_node(map: &map::Map<'a>, id: NodeId) -> Option<Code<'a>> {\n match map.get(id) {\n map::NodeBlock(_) => {\n \/\/ Use the parent, hopefully an expression node.\n Code::from_node(map, map.get_parent_node(id))\n }\n map::NodeExpr(expr) => Some(Code::Expr(expr)),\n node => FnLikeNode::from_node(node).map(Code::FnLike)\n }\n }\n}\n\n\/\/\/ These are all the components one can extract from a fn item for\n\/\/\/ use when implementing FnLikeNode operations.\nstruct ItemFnParts<'a> {\n name: Name,\n decl: &'a ast::FnDecl,\n header: ast::FnHeader,\n vis: &'a ast::Visibility,\n generics: &'a ast::Generics,\n body: ast::BodyId,\n id: NodeId,\n span: Span,\n attrs: &'a [Attribute],\n}\n\n\/\/\/ These are all the components one can extract from a closure expr\n\/\/\/ for use when implementing FnLikeNode operations.\nstruct ClosureParts<'a> {\n decl: &'a FnDecl,\n body: ast::BodyId,\n id: NodeId,\n span: Span,\n attrs: &'a [Attribute],\n}\n\nimpl<'a> ClosureParts<'a> {\n fn new(d: &'a FnDecl, b: ast::BodyId, id: NodeId, s: Span, attrs: &'a [Attribute]) -> Self {\n ClosureParts {\n decl: d,\n body: b,\n id,\n span: s,\n attrs,\n }\n }\n}\n\nimpl<'a> FnLikeNode<'a> {\n \/\/\/ Attempts to construct a FnLikeNode from presumed FnLike node input.\n pub fn from_node(node: Node) -> Option<FnLikeNode> {\n let fn_like = match node {\n map::NodeItem(item) => item.is_fn_like(),\n map::NodeTraitItem(tm) => tm.is_fn_like(),\n map::NodeImplItem(it) => it.is_fn_like(),\n map::NodeExpr(e) => e.is_fn_like(),\n _ => false\n };\n if fn_like {\n Some(FnLikeNode {\n node,\n })\n } else {\n None\n }\n }\n\n pub fn body(self) -> ast::BodyId {\n self.handle(|i: ItemFnParts<'a>| i.body,\n |_, _, _: &'a ast::MethodSig, _, body: ast::BodyId, _, _| body,\n |c: ClosureParts<'a>| c.body)\n }\n\n pub fn decl(self) -> &'a FnDecl {\n self.handle(|i: ItemFnParts<'a>| &*i.decl,\n |_, _, sig: &'a ast::MethodSig, _, _, _, _| &sig.decl,\n |c: ClosureParts<'a>| c.decl)\n }\n\n pub fn span(self) -> Span {\n self.handle(|i: ItemFnParts| i.span,\n |_, _, _: &'a ast::MethodSig, _, _, span, _| span,\n |c: ClosureParts| c.span)\n }\n\n pub fn id(self) -> NodeId {\n self.handle(|i: ItemFnParts| i.id,\n |id, _, _: &'a ast::MethodSig, _, _, _, _| id,\n |c: ClosureParts| c.id)\n }\n\n pub fn constness(self) -> ast::Constness {\n match self.kind() {\n FnKind::ItemFn(_, _, header, ..) => header.constness,\n FnKind::Method(_, m, ..) => m.header.constness,\n _ => ast::Constness::NotConst\n }\n }\n\n pub fn asyncness(self) -> ast::IsAsync {\n match self.kind() {\n FnKind::ItemFn(_, _, header, ..) => header.asyncness,\n FnKind::Method(_, m, ..) => m.header.asyncness,\n _ => ast::IsAsync::NotAsync\n }\n }\n\n pub fn unsafety(self) -> ast::Unsafety {\n match self.kind() {\n FnKind::ItemFn(_, _, header, ..) => header.unsafety,\n FnKind::Method(_, m, ..) => m.header.unsafety,\n _ => ast::Unsafety::Normal\n }\n }\n\n pub fn kind(self) -> FnKind<'a> {\n let item = |p: ItemFnParts<'a>| -> FnKind<'a> {\n FnKind::ItemFn(p.name, p.generics, p.header, p.vis, p.attrs)\n };\n let closure = |c: ClosureParts<'a>| {\n FnKind::Closure(c.attrs)\n };\n let method = |_, ident: Ident, sig: &'a ast::MethodSig, vis, _, _, attrs| {\n FnKind::Method(ident, sig, vis, attrs)\n };\n self.handle(item, method, closure)\n }\n\n fn handle<A, I, M, C>(self, item_fn: I, method: M, closure: C) -> A where\n I: FnOnce(ItemFnParts<'a>) -> A,\n M: FnOnce(NodeId,\n Ident,\n &'a ast::MethodSig,\n Option<&'a ast::Visibility>,\n ast::BodyId,\n Span,\n &'a [Attribute])\n -> A,\n C: FnOnce(ClosureParts<'a>) -> A,\n {\n match self.node {\n map::NodeItem(i) => match i.node {\n ast::ItemKind::Fn(ref decl, header, ref generics, block) =>\n item_fn(ItemFnParts {\n id: i.id,\n name: i.name,\n decl: &decl,\n body: block,\n vis: &i.vis,\n span: i.span,\n attrs: &i.attrs,\n header,\n generics,\n }),\n _ => bug!(\"item FnLikeNode that is not fn-like\"),\n },\n map::NodeTraitItem(ti) => match ti.node {\n ast::TraitItemKind::Method(ref sig, ast::TraitMethod::Provided(body)) => {\n method(ti.id, ti.ident, sig, None, body, ti.span, &ti.attrs)\n }\n _ => bug!(\"trait method FnLikeNode that is not fn-like\"),\n },\n map::NodeImplItem(ii) => {\n match ii.node {\n ast::ImplItemKind::Method(ref sig, body) => {\n method(ii.id, ii.ident, sig, Some(&ii.vis), body, ii.span, &ii.attrs)\n }\n _ => {\n bug!(\"impl method FnLikeNode that is not fn-like\")\n }\n }\n },\n map::NodeExpr(e) => match e.node {\n ast::ExprKind::Closure(_, ref decl, block, _fn_decl_span, _gen) =>\n closure(ClosureParts::new(&decl, block, e.id, e.span, &e.attrs)),\n _ => bug!(\"expr FnLikeNode that is not fn-like\"),\n },\n _ => bug!(\"other FnLikeNode that is not fn-like\"),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use `new_all_targets` in cargo-fix when no target selection exists<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Reworking on print command<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\/\/! * All unstable lang features have tests to ensure they are actually unstable\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(PartialEq)]\nenum Status {\n Stable,\n Removed,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n Status::Removed => \"removed\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\nstruct Feature {\n level: Status,\n since: String,\n has_gate_test: bool,\n}\n\npub fn check(path: &Path, bad: &mut bool) {\n let mut features = collect_lang_features(&path.join(\"libsyntax\/feature_gate.rs\"));\n assert!(!features.is_empty());\n let mut lib_features = HashMap::<String, Feature>::new();\n\n let mut contents = String::new();\n super::walk(path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n println!(\"{}:{}: {}\", file.display(), i + 1, msg);\n *bad = true;\n };\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => {\n err(\"malformed stability attribute\");\n continue;\n }\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err(\"malformed stability attribute\");\n continue;\n }\n None => \"None\",\n };\n\n if features.contains_key(feature_name) {\n err(\"duplicating a lang feature\");\n }\n if let Some(ref s) = lib_features.get(feature_name) {\n if s.level != level {\n err(\"different stability level than before\");\n }\n if s.since != since {\n err(\"different `since` than before\");\n }\n continue;\n }\n lib_features.insert(feature_name.to_owned(),\n Feature {\n level: level,\n since: since.to_owned(),\n has_gate_test: false,\n });\n }\n });\n\n super::walk_many(&[&path.join(\"test\/compile-fail\"),\n &path.join(\"test\/compile-fail-fulldeps\"),\n &path.join(\"test\/parse-fail\"),],\n &mut |path| super::filter_dirs(path),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n let filen_underscore = filename.replace(\"-\",\"_\").replace(\".rs\",\"\");\n test_filen_gate(&filen_underscore, &mut features);\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n println!(\"{}:{}: {}\", file.display(), i + 1, msg);\n *bad = true;\n };\n\n let gate_test_str = \"gate-test-\";\n\n if !line.contains(gate_test_str) {\n continue;\n }\n\n let feature_name = match line.find(gate_test_str) {\n Some(i) => {\n &line[i+gate_test_str.len()..line[i+1..].find(' ').unwrap_or(line.len())]\n },\n None => continue,\n };\n let found_feature = features.get_mut(feature_name)\n .map(|v| { v.has_gate_test = true; () })\n .is_some();\n\n let found_lib_feature = features.get_mut(feature_name)\n .map(|v| { v.has_gate_test = true; () })\n .is_some();\n\n if !(found_feature || found_lib_feature) {\n err(&format!(\"gate-test test found referencing a nonexistent feature '{}'\",\n feature_name));\n }\n }\n });\n\n \/\/ FIXME get this whitelist empty.\n let whitelist = vec![\n ];\n\n \/\/ Only check the number of lang features.\n \/\/ Obligatory testing for library features is dumb.\n let gate_untested = features.iter()\n .filter(|&(_, f)| f.level == Status::Unstable)\n .filter(|&(_, f)| !f.has_gate_test)\n .filter(|&(n, _)| !whitelist.contains(&n.as_str()))\n .collect::<Vec<_>>();\n\n for &(name, _) in gate_untested.iter() {\n println!(\"Expected a gate test for the feature '{}'.\", name);\n println!(\"Hint: create a file named 'feature-gate-{}.rs' in the compile-fail\\\n \\n test suite, with its failures due to missing usage of\\\n \\n #![feature({})].\", name, name);\n println!(\"Hint: If you already have such a test and don't want to rename it,\\\n \\n you can also add a \/\/ gate-test-{} line to the test file.\",\n name);\n }\n\n if gate_untested.len() > 0 {\n println!(\"Found {} features without a gate test.\", gate_untested.len());\n *bad = true;\n }\n\n if *bad {\n return;\n }\n\n let mut lines = Vec::new();\n for (name, feature) in features.iter() {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn test_filen_gate(filen_underscore: &str,\n features: &mut HashMap<String, Feature>) -> bool {\n if filen_underscore.starts_with(\"feature_gate\") {\n for (n, f) in features.iter_mut() {\n if filen_underscore == format!(\"feature_gate_{}\", n) {\n f.has_gate_test = true;\n return true;\n }\n }\n }\n return false;\n}\n\nfn collect_lang_features(path: &Path) -> HashMap<String, Feature> {\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Removed,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n Some((name.to_owned(),\n Feature {\n level: level,\n since: since.to_owned(),\n has_gate_test: false,\n }))\n })\n .collect()\n}\n<commit_msg>Remove the compile-fail feature gates whitelist<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\/\/! * All unstable lang features have tests to ensure they are actually unstable\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(PartialEq)]\nenum Status {\n Stable,\n Removed,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n Status::Removed => \"removed\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\nstruct Feature {\n level: Status,\n since: String,\n has_gate_test: bool,\n}\n\npub fn check(path: &Path, bad: &mut bool) {\n let mut features = collect_lang_features(&path.join(\"libsyntax\/feature_gate.rs\"));\n assert!(!features.is_empty());\n let mut lib_features = HashMap::<String, Feature>::new();\n\n let mut contents = String::new();\n super::walk(path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n println!(\"{}:{}: {}\", file.display(), i + 1, msg);\n *bad = true;\n };\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => {\n err(\"malformed stability attribute\");\n continue;\n }\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err(\"malformed stability attribute\");\n continue;\n }\n None => \"None\",\n };\n\n if features.contains_key(feature_name) {\n err(\"duplicating a lang feature\");\n }\n if let Some(ref s) = lib_features.get(feature_name) {\n if s.level != level {\n err(\"different stability level than before\");\n }\n if s.since != since {\n err(\"different `since` than before\");\n }\n continue;\n }\n lib_features.insert(feature_name.to_owned(),\n Feature {\n level: level,\n since: since.to_owned(),\n has_gate_test: false,\n });\n }\n });\n\n super::walk_many(&[&path.join(\"test\/compile-fail\"),\n &path.join(\"test\/compile-fail-fulldeps\"),\n &path.join(\"test\/parse-fail\"),],\n &mut |path| super::filter_dirs(path),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n let filen_underscore = filename.replace(\"-\",\"_\").replace(\".rs\",\"\");\n test_filen_gate(&filen_underscore, &mut features);\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n println!(\"{}:{}: {}\", file.display(), i + 1, msg);\n *bad = true;\n };\n\n let gate_test_str = \"gate-test-\";\n\n if !line.contains(gate_test_str) {\n continue;\n }\n\n let feature_name = match line.find(gate_test_str) {\n Some(i) => {\n &line[i+gate_test_str.len()..line[i+1..].find(' ').unwrap_or(line.len())]\n },\n None => continue,\n };\n let found_feature = features.get_mut(feature_name)\n .map(|v| { v.has_gate_test = true; () })\n .is_some();\n\n let found_lib_feature = features.get_mut(feature_name)\n .map(|v| { v.has_gate_test = true; () })\n .is_some();\n\n if !(found_feature || found_lib_feature) {\n err(&format!(\"gate-test test found referencing a nonexistent feature '{}'\",\n feature_name));\n }\n }\n });\n\n \/\/ Only check the number of lang features.\n \/\/ Obligatory testing for library features is dumb.\n let gate_untested = features.iter()\n .filter(|&(_, f)| f.level == Status::Unstable)\n .filter(|&(_, f)| !f.has_gate_test)\n .collect::<Vec<_>>();\n\n for &(name, _) in gate_untested.iter() {\n println!(\"Expected a gate test for the feature '{}'.\", name);\n println!(\"Hint: create a file named 'feature-gate-{}.rs' in the compile-fail\\\n \\n test suite, with its failures due to missing usage of\\\n \\n #![feature({})].\", name, name);\n println!(\"Hint: If you already have such a test and don't want to rename it,\\\n \\n you can also add a \/\/ gate-test-{} line to the test file.\",\n name);\n }\n\n if gate_untested.len() > 0 {\n println!(\"Found {} features without a gate test.\", gate_untested.len());\n *bad = true;\n }\n\n if *bad {\n return;\n }\n\n let mut lines = Vec::new();\n for (name, feature) in features.iter() {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn test_filen_gate(filen_underscore: &str,\n features: &mut HashMap<String, Feature>) -> bool {\n if filen_underscore.starts_with(\"feature_gate\") {\n for (n, f) in features.iter_mut() {\n if filen_underscore == format!(\"feature_gate_{}\", n) {\n f.has_gate_test = true;\n return true;\n }\n }\n }\n return false;\n}\n\nfn collect_lang_features(path: &Path) -> HashMap<String, Feature> {\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Removed,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n Some((name.to_owned(),\n Feature {\n level: level,\n since: since.to_owned(),\n has_gate_test: false,\n }))\n })\n .collect()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>and again, forgot the real file..; other was cleanup & mysql testing<commit_after>extern crate mysql;\n\nuse mysql::conn::pool::MyPool;\nuse mysql::conn::pool::MyPooledConn;\nuse mysql::conn::Stmt;\n\nuse std::process::{Command, Stdio};\nuse std::error::Error;\nuse std::io::prelude::*;\nuse std::io::BufReader;\n\n#[derive(Debug)]\npub enum DownloadError {\n\tConsoleError,\n\tRadError,\n\tDMCAError,\n\tInternalError,\n}\n\n\/\/\/Downloads a video, updates the DB\n\/\/\/TODO: get the sql statements out of the class\n\/\/\/TODO: wrap errors\n\/\/\/Doesn't care about DMCAs, will emit errors on them\npub fn download_video(url: &str, quality: i32, qid: i64, folderFormat: &str, pool: MyPool) -> Result<bool,DownloadError> {\n\tlet process = match Command::new(\"youtube-dl\")\n\t\t\t\t\t\t\t\t.arg(\"--newline\")\n\t\t\t\t\t\t\t\t.arg(format!(\"-o {}\",folderFormat))\n\t\t\t\t\t\t\t\t.arg(url)\n\t\t\t\t\t\t\t\t.stdin(Stdio::null())\n .stdout(Stdio::piped())\n .spawn() {\n Err(why) => panic!(\"couldn't spawn cmd: {}\", Error::description(&why)),\n Ok(process) => process,\n };\n\tlet mut s = String::new(); \/\/buffer prep\n\tlet mut stdout = BufReader::new(process.stdout.unwrap());\n\n\tlet mut conn = pool.get_conn().unwrap();\n\tlet statement = prepare_progress_updater(&mut conn);\n\n\tfor line in &mut stdout.lines(){\n\t\tmatch line{\n\t\t\tErr(why) => panic!(\"couldn't read cmd stdout: {}\", Error::description(&why)),\n\t\t\tOk(text) => println!(\"{}\",text),\n\t\t}\n\t}\n\n\tOk(true)\n}\n\nfn prepare_progress_updater<'a>(conn: &'a mut MyPooledConn) -> Stmt<'a> {\n\tconn.prepare(\"UPDATE querydetails SET status = ? WHERE qid = ?\").unwrap()\n}\n\nfn update_progress(stmt: &mut Stmt, progress: i32, qid: i64){\n\tstmt.execute(&[&progress,&qid]);\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Utilities related to FFI bindings.\n\/\/!\n\/\/! This module provides utilities to handle data across non-Rust\n\/\/! interfaces, like other programming languages and the underlying\n\/\/! operating system. It is mainly of use for FFI (Foreign Function\n\/\/! Interface) bindings and code that needs to exchange C-like strings\n\/\/! with other languages.\n\/\/!\n\/\/! # Overview\n\/\/!\n\/\/! Rust represents owned strings with the [`String`] type, and\n\/\/! borrowed slices of strings with the [`str`] primitive. Both are\n\/\/! always in UTF-8 encoding, and may contain nul bytes in the middle,\n\/\/! i.e., if you look at the bytes that make up the string, there may\n\/\/! be a `\\0` among them. Both `String` and `str` store their length\n\/\/! explicitly; there are no nul terminators at the end of strings\n\/\/! like in C.\n\/\/!\n\/\/! C strings are different from Rust strings:\n\/\/!\n\/\/! * **Encodings** - Rust strings are UTF-8, but C strings may use\n\/\/! other encodings. If you are using a string from C, you should\n\/\/! check its encoding explicitly, rather than just assuming that it\n\/\/! is UTF-8 like you can do in Rust.\n\/\/!\n\/\/! * **Character size** - C strings may use `char` or `wchar_t`-sized\n\/\/! characters; please **note** that C's `char` is different from Rust's.\n\/\/! The C standard leaves the actual sizes of those types open to\n\/\/! interpretation, but defines different APIs for strings made up of\n\/\/! each character type. Rust strings are always UTF-8, so different\n\/\/! Unicode characters will be encoded in a variable number of bytes\n\/\/! each. The Rust type [`char`] represents a '[Unicode scalar\n\/\/! value]', which is similar to, but not the same as, a '[Unicode\n\/\/! code point]'.\n\/\/!\n\/\/! * **Nul terminators and implicit string lengths** - Often, C\n\/\/! strings are nul-terminated, i.e., they have a `\\0` character at the\n\/\/! end. The length of a string buffer is not stored, but has to be\n\/\/! calculated; to compute the length of a string, C code must\n\/\/! manually call a function like `strlen()` for `char`-based strings,\n\/\/! or `wcslen()` for `wchar_t`-based ones. Those functions return\n\/\/! the number of characters in the string excluding the nul\n\/\/! terminator, so the buffer length is really `len+1` characters.\n\/\/! Rust strings don't have a nul terminator; their length is always\n\/\/! stored and does not need to be calculated. While in Rust\n\/\/! accessing a string's length is a O(1) operation (because the\n\/\/! length is stored); in C it is an O(length) operation because the\n\/\/! length needs to be computed by scanning the string for the nul\n\/\/! terminator.\n\/\/!\n\/\/! * **Internal nul characters** - When C strings have a nul\n\/\/! terminator character, this usually means that they cannot have nul\n\/\/! characters in the middle — a nul character would essentially\n\/\/! truncate the string. Rust strings *can* have nul characters in\n\/\/! the middle, because nul does not have to mark the end of the\n\/\/! string in Rust.\n\/\/!\n\/\/! # Representations of non-Rust strings\n\/\/!\n\/\/! [`CString`] and [`CStr`] are useful when you need to transfer\n\/\/! UTF-8 strings to and from languages with a C ABI, like Python.\n\/\/!\n\/\/! * **From Rust to C:** [`CString`] represents an owned, C-friendly\n\/\/! string: it is nul-terminated, and has no internal nul characters.\n\/\/! Rust code can create a `CString` out of a normal string (provided\n\/\/! that the string doesn't have nul characters in the middle), and\n\/\/! then use a variety of methods to obtain a raw `*mut u8` that can\n\/\/! then be passed as an argument to functions which use the C\n\/\/! conventions for strings.\n\/\/!\n\/\/! * **From C to Rust:** [`CStr`] represents a borrowed C string; it\n\/\/! is what you would use to wrap a raw `*const u8` that you got from\n\/\/! a C function. A `CStr` is guaranteed to be a nul-terminated array\n\/\/! of bytes. Once you have a `CStr`, you can convert it to a Rust\n\/\/! `&str` if it's valid UTF-8, or lossily convert it by adding\n\/\/! replacement characters.\n\/\/!\n\/\/! [`OsString`] and [`OsStr`] are useful when you need to transfer\n\/\/! strings to and from the operating system itself, or when capturing\n\/\/! the output of external commands. Conversions between `OsString`,\n\/\/! `OsStr` and Rust strings work similarly to those for [`CString`]\n\/\/! and [`CStr`].\n\/\/!\n\/\/! * [`OsString`] represents an owned string in whatever\n\/\/! representation the operating system prefers. In the Rust standard\n\/\/! library, various APIs that transfer strings to\/from the operating\n\/\/! system use `OsString` instead of plain strings. For example,\n\/\/! [`env::var_os()`] is used to query environment variables; it\n\/\/! returns an `Option<OsString>`. If the environment variable exists\n\/\/! you will get a `Some(os_string)`, which you can *then* try to\n\/\/! convert to a Rust string. This yields a [`Result<>`], so that\n\/\/! your code can detect errors in case the environment variable did\n\/\/! not in fact contain valid Unicode data.\n\/\/!\n\/\/! * [`OsStr`] represents a borrowed reference to a string in a\n\/\/! format that can be passed to the operating system. It can be\n\/\/! converted into an UTF-8 Rust string slice in a similar way to\n\/\/! `OsString`.\n\/\/!\n\/\/! # Conversions\n\/\/!\n\/\/! ## On Unix\n\/\/!\n\/\/! On Unix, [`OsStr`] implements the\n\/\/! `std::os::unix::ffi::`[`OsStrExt`][unix.OsStrExt] trait, which\n\/\/! augments it with two methods, [`from_bytes`] and [`as_bytes`].\n\/\/! These do inexpensive conversions from and to UTF-8 byte slices.\n\/\/!\n\/\/! Additionally, on Unix [`OsString`] implements the\n\/\/! `std::os::unix::ffi::`[`OsStringExt`][unix.OsStringExt] trait,\n\/\/! which provides [`from_vec`] and [`into_vec`] methods that consume\n\/\/! their arguments, and take or produce vectors of [`u8`].\n\/\/!\n\/\/! ## On Windows\n\/\/!\n\/\/! On Windows, [`OsStr`] implements the\n\/\/! `std::os::windows::ffi::`[`OsStrExt`][windows.OsStrExt] trait,\n\/\/! which provides an [`encode_wide`] method. This provides an\n\/\/! iterator that can be [`collect`]ed into a vector of [`u16`].\n\/\/!\n\/\/! Additionally, on Windows [`OsString`] implements the\n\/\/! `std::os::windows:ffi::`[`OsStringExt`][windows.OsStringExt]\n\/\/! trait, which provides a [`from_wide`] method. The result of this\n\/\/! method is an `OsString` which can be round-tripped to a Windows\n\/\/! string losslessly.\n\/\/!\n\/\/! [`String`]: ..\/string\/struct.String.html\n\/\/! [`str`]: ..\/primitive.str.html\n\/\/! [`char`]: ..\/primitive.char.html\n\/\/! [`u8`]: ..\/primitive.u8.html\n\/\/! [`u16`]: ..\/primitive.u16.html\n\/\/! [Unicode scalar value]: http:\/\/www.unicode.org\/glossary\/#unicode_scalar_value\n\/\/! [Unicode code point]: http:\/\/www.unicode.org\/glossary\/#code_point\n\/\/! [`CString`]: struct.CString.html\n\/\/! [`CStr`]: struct.CStr.html\n\/\/! [`OsString`]: struct.OsString.html\n\/\/! [`OsStr`]: struct.OsStr.html\n\/\/! [`env::set_var()`]: ..\/env\/fn.set_var.html\n\/\/! [`env::var_os()`]: ..\/env\/fn.var_os.html\n\/\/! [`Result<>`]: ..\/result\/enum.Result.html\n\/\/! [unix.OsStringExt]: ..\/os\/unix\/ffi\/trait.OsStringExt.html\n\/\/! [`from_vec`]: ..\/os\/unix\/ffi\/trait.OsStringExt.html#tymethod.from_vec\n\/\/! [`into_vec`]: ..\/os\/unix\/ffi\/trait.OsStringExt.html#tymethod.into_vec\n\/\/! [unix.OsStrExt]: ..\/os\/unix\/ffi\/trait.OsStrExt.html\n\/\/! [`from_bytes`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html#tymethod.from_bytes\n\/\/! [`as_bytes`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html#tymethod.as_bytes\n\/\/! [`OsStrExt`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html\n\/\/! [windows.OsStrExt]: ..\/os\/windows\/ffi\/trait.OsStrExt.html\n\/\/! [`encode_wide`]: ..\/os\/windows\/ffi\/trait.OsStrExt.html#tymethod.encode_wide\n\/\/! [`collect`]: ..\/iter\/trait.Iterator.html#method.collect\n\/\/! [windows.OsStringExt]: ..\/os\/windows\/ffi\/trait.OsStringExt.html\n\/\/! [`from_wide`]: ..\/os\/windows\/ffi\/trait.OsStringExt.html#tymethod.from_wide\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::c_str::{CString, CStr, NulError, IntoStringError};\n#[stable(feature = \"cstr_from_bytes\", since = \"1.10.0\")]\npub use self::c_str::{FromBytesWithNulError};\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::os_str::{OsString, OsStr};\n\n#[stable(feature = \"raw_os\", since = \"1.1.0\")]\npub use core::ffi::c_void;\n\n#[unstable(feature = \"c_variadic\",\n reason = \"the `c_variadic` feature has not been properly tested on \\\n all supported platforms\",\n issue = \"27745\")]\npub use core::ffi::VaList;\n\nmod c_str;\nmod os_str;\n<commit_msg>Rollup merge of #56731 - GuillaumeGomez:ffi-doc-urls, r=Centril<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Utilities related to FFI bindings.\n\/\/!\n\/\/! This module provides utilities to handle data across non-Rust\n\/\/! interfaces, like other programming languages and the underlying\n\/\/! operating system. It is mainly of use for FFI (Foreign Function\n\/\/! Interface) bindings and code that needs to exchange C-like strings\n\/\/! with other languages.\n\/\/!\n\/\/! # Overview\n\/\/!\n\/\/! Rust represents owned strings with the [`String`] type, and\n\/\/! borrowed slices of strings with the [`str`] primitive. Both are\n\/\/! always in UTF-8 encoding, and may contain nul bytes in the middle,\n\/\/! i.e., if you look at the bytes that make up the string, there may\n\/\/! be a `\\0` among them. Both `String` and `str` store their length\n\/\/! explicitly; there are no nul terminators at the end of strings\n\/\/! like in C.\n\/\/!\n\/\/! C strings are different from Rust strings:\n\/\/!\n\/\/! * **Encodings** - Rust strings are UTF-8, but C strings may use\n\/\/! other encodings. If you are using a string from C, you should\n\/\/! check its encoding explicitly, rather than just assuming that it\n\/\/! is UTF-8 like you can do in Rust.\n\/\/!\n\/\/! * **Character size** - C strings may use `char` or `wchar_t`-sized\n\/\/! characters; please **note** that C's `char` is different from Rust's.\n\/\/! The C standard leaves the actual sizes of those types open to\n\/\/! interpretation, but defines different APIs for strings made up of\n\/\/! each character type. Rust strings are always UTF-8, so different\n\/\/! Unicode characters will be encoded in a variable number of bytes\n\/\/! each. The Rust type [`char`] represents a '[Unicode scalar\n\/\/! value]', which is similar to, but not the same as, a '[Unicode\n\/\/! code point]'.\n\/\/!\n\/\/! * **Nul terminators and implicit string lengths** - Often, C\n\/\/! strings are nul-terminated, i.e., they have a `\\0` character at the\n\/\/! end. The length of a string buffer is not stored, but has to be\n\/\/! calculated; to compute the length of a string, C code must\n\/\/! manually call a function like `strlen()` for `char`-based strings,\n\/\/! or `wcslen()` for `wchar_t`-based ones. Those functions return\n\/\/! the number of characters in the string excluding the nul\n\/\/! terminator, so the buffer length is really `len+1` characters.\n\/\/! Rust strings don't have a nul terminator; their length is always\n\/\/! stored and does not need to be calculated. While in Rust\n\/\/! accessing a string's length is a O(1) operation (because the\n\/\/! length is stored); in C it is an O(length) operation because the\n\/\/! length needs to be computed by scanning the string for the nul\n\/\/! terminator.\n\/\/!\n\/\/! * **Internal nul characters** - When C strings have a nul\n\/\/! terminator character, this usually means that they cannot have nul\n\/\/! characters in the middle — a nul character would essentially\n\/\/! truncate the string. Rust strings *can* have nul characters in\n\/\/! the middle, because nul does not have to mark the end of the\n\/\/! string in Rust.\n\/\/!\n\/\/! # Representations of non-Rust strings\n\/\/!\n\/\/! [`CString`] and [`CStr`] are useful when you need to transfer\n\/\/! UTF-8 strings to and from languages with a C ABI, like Python.\n\/\/!\n\/\/! * **From Rust to C:** [`CString`] represents an owned, C-friendly\n\/\/! string: it is nul-terminated, and has no internal nul characters.\n\/\/! Rust code can create a [`CString`] out of a normal string (provided\n\/\/! that the string doesn't have nul characters in the middle), and\n\/\/! then use a variety of methods to obtain a raw `*mut `[`u8`] that can\n\/\/! then be passed as an argument to functions which use the C\n\/\/! conventions for strings.\n\/\/!\n\/\/! * **From C to Rust:** [`CStr`] represents a borrowed C string; it\n\/\/! is what you would use to wrap a raw `*const `[`u8`] that you got from\n\/\/! a C function. A [`CStr`] is guaranteed to be a nul-terminated array\n\/\/! of bytes. Once you have a [`CStr`], you can convert it to a Rust\n\/\/! [`&str`][`str`] if it's valid UTF-8, or lossily convert it by adding\n\/\/! replacement characters.\n\/\/!\n\/\/! [`OsString`] and [`OsStr`] are useful when you need to transfer\n\/\/! strings to and from the operating system itself, or when capturing\n\/\/! the output of external commands. Conversions between [`OsString`],\n\/\/! [`OsStr`] and Rust strings work similarly to those for [`CString`]\n\/\/! and [`CStr`].\n\/\/!\n\/\/! * [`OsString`] represents an owned string in whatever\n\/\/! representation the operating system prefers. In the Rust standard\n\/\/! library, various APIs that transfer strings to\/from the operating\n\/\/! system use [`OsString`] instead of plain strings. For example,\n\/\/! [`env::var_os()`] is used to query environment variables; it\n\/\/! returns an [`Option`]`<`[`OsString`]`>`. If the environment variable\n\/\/! exists you will get a [`Some`]`(os_string)`, which you can *then* try to\n\/\/! convert to a Rust string. This yields a [`Result<>`], so that\n\/\/! your code can detect errors in case the environment variable did\n\/\/! not in fact contain valid Unicode data.\n\/\/!\n\/\/! * [`OsStr`] represents a borrowed reference to a string in a\n\/\/! format that can be passed to the operating system. It can be\n\/\/! converted into an UTF-8 Rust string slice in a similar way to\n\/\/! [`OsString`].\n\/\/!\n\/\/! # Conversions\n\/\/!\n\/\/! ## On Unix\n\/\/!\n\/\/! On Unix, [`OsStr`] implements the\n\/\/! `std::os::unix::ffi::`[`OsStrExt`][unix.OsStrExt] trait, which\n\/\/! augments it with two methods, [`from_bytes`] and [`as_bytes`].\n\/\/! These do inexpensive conversions from and to UTF-8 byte slices.\n\/\/!\n\/\/! Additionally, on Unix [`OsString`] implements the\n\/\/! `std::os::unix::ffi::`[`OsStringExt`][unix.OsStringExt] trait,\n\/\/! which provides [`from_vec`] and [`into_vec`] methods that consume\n\/\/! their arguments, and take or produce vectors of [`u8`].\n\/\/!\n\/\/! ## On Windows\n\/\/!\n\/\/! On Windows, [`OsStr`] implements the\n\/\/! `std::os::windows::ffi::`[`OsStrExt`][windows.OsStrExt] trait,\n\/\/! which provides an [`encode_wide`] method. This provides an\n\/\/! iterator that can be [`collect`]ed into a vector of [`u16`].\n\/\/!\n\/\/! Additionally, on Windows [`OsString`] implements the\n\/\/! `std::os::windows:ffi::`[`OsStringExt`][windows.OsStringExt]\n\/\/! trait, which provides a [`from_wide`] method. The result of this\n\/\/! method is an [`OsString`] which can be round-tripped to a Windows\n\/\/! string losslessly.\n\/\/!\n\/\/! [`String`]: ..\/string\/struct.String.html\n\/\/! [`str`]: ..\/primitive.str.html\n\/\/! [`char`]: ..\/primitive.char.html\n\/\/! [`u8`]: ..\/primitive.u8.html\n\/\/! [`u16`]: ..\/primitive.u16.html\n\/\/! [Unicode scalar value]: http:\/\/www.unicode.org\/glossary\/#unicode_scalar_value\n\/\/! [Unicode code point]: http:\/\/www.unicode.org\/glossary\/#code_point\n\/\/! [`CString`]: struct.CString.html\n\/\/! [`CStr`]: struct.CStr.html\n\/\/! [`OsString`]: struct.OsString.html\n\/\/! [`OsStr`]: struct.OsStr.html\n\/\/! [`env::set_var()`]: ..\/env\/fn.set_var.html\n\/\/! [`env::var_os()`]: ..\/env\/fn.var_os.html\n\/\/! [`Result<>`]: ..\/result\/enum.Result.html\n\/\/! [unix.OsStringExt]: ..\/os\/unix\/ffi\/trait.OsStringExt.html\n\/\/! [`from_vec`]: ..\/os\/unix\/ffi\/trait.OsStringExt.html#tymethod.from_vec\n\/\/! [`into_vec`]: ..\/os\/unix\/ffi\/trait.OsStringExt.html#tymethod.into_vec\n\/\/! [unix.OsStrExt]: ..\/os\/unix\/ffi\/trait.OsStrExt.html\n\/\/! [`from_bytes`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html#tymethod.from_bytes\n\/\/! [`as_bytes`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html#tymethod.as_bytes\n\/\/! [`OsStrExt`]: ..\/os\/unix\/ffi\/trait.OsStrExt.html\n\/\/! [windows.OsStrExt]: ..\/os\/windows\/ffi\/trait.OsStrExt.html\n\/\/! [`encode_wide`]: ..\/os\/windows\/ffi\/trait.OsStrExt.html#tymethod.encode_wide\n\/\/! [`collect`]: ..\/iter\/trait.Iterator.html#method.collect\n\/\/! [windows.OsStringExt]: ..\/os\/windows\/ffi\/trait.OsStringExt.html\n\/\/! [`from_wide`]: ..\/os\/windows\/ffi\/trait.OsStringExt.html#tymethod.from_wide\n\/\/! [`Option`]: ..\/option\/enum.Option.html\n\/\/! [`Some`]: ..\/option\/enum.Option.html#variant.Some\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::c_str::{CString, CStr, NulError, IntoStringError};\n#[stable(feature = \"cstr_from_bytes\", since = \"1.10.0\")]\npub use self::c_str::{FromBytesWithNulError};\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::os_str::{OsString, OsStr};\n\n#[stable(feature = \"raw_os\", since = \"1.1.0\")]\npub use core::ffi::c_void;\n\n#[unstable(feature = \"c_variadic\",\n reason = \"the `c_variadic` feature has not been properly tested on \\\n all supported platforms\",\n issue = \"27745\")]\npub use core::ffi::VaList;\n\nmod c_str;\nmod os_str;\n<|endoftext|>"} {"text":"<commit_before>#![cfg(feature = \"glutin\")]\n\/*!\n\nBackend implementation for the glutin library\n\n# Features\n\nOnly available if the 'glutin' feature is enabled.\n\n*\/\nextern crate glutin;\n\nuse libc;\n\nuse DisplayBuild;\nuse Frame;\nuse GliumCreationError;\n\nuse context;\nuse backend;\nuse backend::Context;\nuse backend::Backend;\nuse version::Version;\n\nuse std::cell::{RefCell, Ref};\nuse std::rc::Rc;\nuse std::ops::Deref;\n\n\/\/\/ Facade implementation for glutin. Wraps both glium and glutin.\n#[derive(Clone)]\npub struct GlutinFacade {\n \/\/ contains everything related to the current context and its state\n context: Rc<context::Context>,\n\n \/\/ contains the window\n backend: Rc<Option<RefCell<Rc<GlutinWindowBackend>>>>,\n}\n\nimpl backend::Facade for GlutinFacade {\n fn get_context(&self) -> &Rc<Context> {\n &self.context\n }\n}\n\n\/\/\/ Iterator for all the events received by the window.\npub struct PollEventsIter<'a> {\n window: Option<&'a RefCell<Rc<GlutinWindowBackend>>>,\n}\n\nimpl<'a> Iterator for PollEventsIter<'a> {\n type Item = glutin::Event;\n\n fn next(&mut self) -> Option<glutin::Event> {\n if let Some(window) = self.window.as_ref() {\n window.borrow().poll_events().next()\n } else {\n None\n }\n }\n}\n\n\/\/\/ Blocking iterator over all the events received by the window.\n\/\/\/\n\/\/\/ This iterator polls for events, until the window associated with its context\n\/\/\/ is closed.\npub struct WaitEventsIter<'a> {\n window: Option<&'a RefCell<Rc<GlutinWindowBackend>>>,\n}\n\nimpl<'a> Iterator for WaitEventsIter<'a> {\n type Item = glutin::Event;\n\n fn next(&mut self) -> Option<glutin::Event> {\n if let Some(window) = self.window.as_ref() {\n window.borrow().wait_events().next()\n } else {\n None\n }\n }\n}\n\n\/\/\/ Borrow of the glutin window.\npub struct WinRef<'a>(Ref<'a, Rc<GlutinWindowBackend>>);\n\nimpl<'a> Deref for WinRef<'a> {\n type Target = glutin::Window;\n\n fn deref(&self) -> &glutin::Window {\n self.0.get_window()\n }\n}\n\nimpl GlutinFacade {\n \/\/\/ Reads all events received by the window.\n \/\/\/\n \/\/\/ This iterator polls for events and can be exhausted.\n pub fn poll_events(&self) -> PollEventsIter {\n PollEventsIter {\n window: self.backend.as_ref(),\n }\n }\n\n \/\/\/ Reads all events received by the window.\n pub fn wait_events(&self) -> WaitEventsIter {\n WaitEventsIter {\n window: self.backend.as_ref(),\n }\n }\n\n \/\/\/ Returns true if the window has been closed.\n pub fn is_closed(&self) -> bool {\n self.backend.as_ref().map(|b| b.borrow().is_closed()).unwrap_or(false)\n }\n\n \/\/\/ Returns the underlying window, or `None` if glium uses a headless context.\n pub fn get_window(&self) -> Option<WinRef> {\n self.backend.as_ref().map(|w| WinRef(w.borrow()))\n }\n\n \/\/\/ Returns the OpenGL version of the current context.\n \/\/ TODO: change Context so that this function derefs from it as well\n pub fn get_opengl_version(&self) -> Version {\n *self.context.get_version()\n }\n\n \/\/\/ Start drawing on the backbuffer.\n \/\/\/\n \/\/\/ This function returns a `Frame`, which can be used to draw on it. When the `Frame` is\n \/\/\/ destroyed, the buffers are swapped.\n \/\/\/\n \/\/\/ Note that destroying a `Frame` is immediate, even if vsync is enabled.\n pub fn draw(&self) -> Frame {\n Frame::new(self.context.clone(), self.get_framebuffer_dimensions())\n }\n}\n\nimpl Deref for GlutinFacade {\n type Target = Context;\n\n fn deref(&self) -> &Context {\n &self.context\n }\n}\n\nimpl DisplayBuild for glutin::WindowBuilder<'static> {\n type Facade = GlutinFacade;\n type Err = GliumCreationError<glutin::CreationError>;\n\n fn build_glium(self) -> Result<GlutinFacade, GliumCreationError<glutin::CreationError>> {\n let backend = Rc::new(try!(backend::glutin_backend::GlutinWindowBackend::new(self)));\n let context = try!(unsafe { context::Context::new(backend.clone(), true) });\n\n let display = GlutinFacade {\n context: context,\n backend: Rc::new(Some(RefCell::new(backend))),\n };\n\n Ok(display)\n }\n\n unsafe fn build_glium_unchecked(self) -> Result<GlutinFacade, GliumCreationError<glutin::CreationError>> {\n let backend = Rc::new(try!(backend::glutin_backend::GlutinWindowBackend::new(self)));\n let context = try!(context::Context::new(backend.clone(), false));\n\n let display = GlutinFacade {\n context: context,\n backend: Rc::new(Some(RefCell::new(backend))),\n };\n\n Ok(display)\n }\n\n fn rebuild_glium(self, display: &GlutinFacade) -> Result<(), GliumCreationError<glutin::CreationError>> {\n let mut existing_window = display.backend.as_ref()\n .expect(\"can't rebuild a headless display\").borrow_mut();\n let new_backend = Rc::new(try!(existing_window.rebuild(self)));\n try!(unsafe { display.context.rebuild(new_backend.clone()) });\n *existing_window = new_backend;\n Ok(())\n }\n}\n\nimpl DisplayBuild for glutin::HeadlessRendererBuilder {\n type Facade = GlutinFacade;\n type Err = GliumCreationError<glutin::CreationError>;\n\n fn build_glium(self) -> Result<GlutinFacade, GliumCreationError<glutin::CreationError>> {\n let backend = Rc::new(try!(backend::glutin_backend::GlutinHeadlessBackend::new(self)));\n let context = try!(unsafe { context::Context::new(backend.clone(), true) });\n\n let display = GlutinFacade {\n context: context,\n backend: Rc::new(None),\n };\n\n Ok(display)\n }\n\n unsafe fn build_glium_unchecked(self) -> Result<GlutinFacade, GliumCreationError<glutin::CreationError>> {\n let backend = Rc::new(try!(backend::glutin_backend::GlutinHeadlessBackend::new(self)));\n let context = try!(context::Context::new(backend.clone(), true));\n\n let display = GlutinFacade {\n context: context,\n backend: Rc::new(None),\n };\n\n Ok(display)\n }\n\n fn rebuild_glium(self, _: &GlutinFacade) -> Result<(), GliumCreationError<glutin::CreationError>> {\n unimplemented!()\n }\n}\n\n\/\/\/ An implementation of the `Backend` trait for a glutin window.\npub struct GlutinWindowBackend {\n window: glutin::Window,\n}\n\nunsafe impl Backend for GlutinWindowBackend {\n fn swap_buffers(&self) {\n self.window.swap_buffers();\n }\n\n unsafe fn get_proc_address(&self, symbol: &str) -> *const libc::c_void {\n self.window.get_proc_address(symbol)\n }\n\n fn get_framebuffer_dimensions(&self) -> (u32, u32) {\n let (width, height) = self.window.get_inner_size().unwrap_or((800, 600)); \/\/ TODO: 800x600 ?\n let scale = self.window.hidpi_factor();\n ((width as f32 * scale) as u32, (height as f32 * scale) as u32)\n }\n\n fn is_current(&self) -> bool {\n self.window.is_current()\n }\n\n unsafe fn make_current(&self) {\n self.window.make_current();\n }\n}\n\n#[allow(missing_docs)]\nimpl GlutinWindowBackend {\n \/\/\/ Builds a new backend from the builder.\n pub fn new(builder: glutin::WindowBuilder)\n -> Result<GlutinWindowBackend, GliumCreationError<glutin::CreationError>>\n {\n let window = try!(builder.build());\n\n Ok(GlutinWindowBackend {\n window: window,\n })\n }\n\n pub fn get_window(&self) -> &glutin::Window {\n &self.window\n }\n\n pub fn is_closed(&self) -> bool {\n self.window.is_closed()\n }\n\n pub fn poll_events(&self) -> glutin::PollEventsIterator {\n self.window.poll_events()\n }\n\n pub fn wait_events(&self) -> glutin::WaitEventsIterator {\n self.window.wait_events()\n }\n\n pub fn rebuild(&self, builder: glutin::WindowBuilder)\n -> Result<GlutinWindowBackend, GliumCreationError<glutin::CreationError>>\n {\n let window = try!(builder.with_shared_lists(&self.window).build());\n\n Ok(GlutinWindowBackend {\n window: window,\n })\n }\n}\n\n\/\/\/ An implementation of the `Backend` trait for a glutin headless context.\npub struct GlutinHeadlessBackend {\n context: glutin::HeadlessContext,\n}\n\nunsafe impl Backend for GlutinHeadlessBackend {\n fn swap_buffers(&self) {\n }\n\n unsafe fn get_proc_address(&self, symbol: &str) -> *const libc::c_void {\n self.context.get_proc_address(symbol)\n }\n\n fn get_framebuffer_dimensions(&self) -> (u32, u32) {\n (800, 600) \/\/ FIXME: these are random\n }\n\n fn is_current(&self) -> bool {\n self.context.is_current()\n }\n\n unsafe fn make_current(&self) {\n self.context.make_current();\n }\n}\n\nimpl GlutinHeadlessBackend {\n \/\/\/ Builds a new backend from the builder.\n pub fn new(builder: glutin::HeadlessRendererBuilder)\n -> Result<GlutinHeadlessBackend, GliumCreationError<glutin::CreationError>>\n {\n let context = try!(builder.build());\n\n Ok(GlutinHeadlessBackend {\n context: context,\n })\n }\n}\n<commit_msg>Fix code for Rust nightly<commit_after>#![cfg(feature = \"glutin\")]\n\/*!\n\nBackend implementation for the glutin library\n\n# Features\n\nOnly available if the 'glutin' feature is enabled.\n\n*\/\nextern crate glutin;\n\nuse libc;\n\nuse DisplayBuild;\nuse Frame;\nuse GliumCreationError;\n\nuse context;\nuse backend;\nuse backend::Context;\nuse backend::Backend;\nuse version::Version;\n\nuse std::cell::{RefCell, Ref};\nuse std::rc::Rc;\nuse std::ops::Deref;\n\n\/\/\/ Facade implementation for glutin. Wraps both glium and glutin.\n#[derive(Clone)]\npub struct GlutinFacade {\n \/\/ contains everything related to the current context and its state\n context: Rc<context::Context>,\n\n \/\/ contains the window\n backend: Rc<Option<RefCell<Rc<GlutinWindowBackend>>>>,\n}\n\nimpl backend::Facade for GlutinFacade {\n fn get_context(&self) -> &Rc<Context> {\n &self.context\n }\n}\n\n\/\/\/ Iterator for all the events received by the window.\npub struct PollEventsIter<'a> {\n window: Option<&'a RefCell<Rc<GlutinWindowBackend>>>,\n}\n\nimpl<'a> Iterator for PollEventsIter<'a> {\n type Item = glutin::Event;\n\n fn next(&mut self) -> Option<glutin::Event> {\n if let Some(window) = self.window.as_ref() {\n window.borrow().poll_events().next()\n } else {\n None\n }\n }\n}\n\n\/\/\/ Blocking iterator over all the events received by the window.\n\/\/\/\n\/\/\/ This iterator polls for events, until the window associated with its context\n\/\/\/ is closed.\npub struct WaitEventsIter<'a> {\n window: Option<&'a RefCell<Rc<GlutinWindowBackend>>>,\n}\n\nimpl<'a> Iterator for WaitEventsIter<'a> {\n type Item = glutin::Event;\n\n fn next(&mut self) -> Option<glutin::Event> {\n if let Some(window) = self.window.as_ref() {\n window.borrow().wait_events().next()\n } else {\n None\n }\n }\n}\n\n\/\/\/ Borrow of the glutin window.\npub struct WinRef<'a>(Ref<'a, Rc<GlutinWindowBackend>>);\n\nimpl<'a> Deref for WinRef<'a> {\n type Target = glutin::Window;\n\n fn deref(&self) -> &glutin::Window {\n self.0.get_window()\n }\n}\n\nimpl GlutinFacade {\n \/\/\/ Reads all events received by the window.\n \/\/\/\n \/\/\/ This iterator polls for events and can be exhausted.\n pub fn poll_events(&self) -> PollEventsIter {\n PollEventsIter {\n window: Option::as_ref(&self.backend),\n }\n }\n\n \/\/\/ Reads all events received by the window.\n pub fn wait_events(&self) -> WaitEventsIter {\n WaitEventsIter {\n window: Option::as_ref(&self.backend),\n }\n }\n\n \/\/\/ Returns true if the window has been closed.\n pub fn is_closed(&self) -> bool {\n Option::as_ref(&self.backend).map(|b| b.borrow().is_closed()).unwrap_or(false)\n }\n\n \/\/\/ Returns the underlying window, or `None` if glium uses a headless context.\n pub fn get_window(&self) -> Option<WinRef> {\n Option::as_ref(&self.backend).map(|w| WinRef(w.borrow()))\n }\n\n \/\/\/ Returns the OpenGL version of the current context.\n \/\/ TODO: change Context so that this function derefs from it as well\n pub fn get_opengl_version(&self) -> Version {\n *self.context.get_version()\n }\n\n \/\/\/ Start drawing on the backbuffer.\n \/\/\/\n \/\/\/ This function returns a `Frame`, which can be used to draw on it. When the `Frame` is\n \/\/\/ destroyed, the buffers are swapped.\n \/\/\/\n \/\/\/ Note that destroying a `Frame` is immediate, even if vsync is enabled.\n pub fn draw(&self) -> Frame {\n Frame::new(self.context.clone(), self.get_framebuffer_dimensions())\n }\n}\n\nimpl Deref for GlutinFacade {\n type Target = Context;\n\n fn deref(&self) -> &Context {\n &self.context\n }\n}\n\nimpl DisplayBuild for glutin::WindowBuilder<'static> {\n type Facade = GlutinFacade;\n type Err = GliumCreationError<glutin::CreationError>;\n\n fn build_glium(self) -> Result<GlutinFacade, GliumCreationError<glutin::CreationError>> {\n let backend = Rc::new(try!(backend::glutin_backend::GlutinWindowBackend::new(self)));\n let context = try!(unsafe { context::Context::new(backend.clone(), true) });\n\n let display = GlutinFacade {\n context: context,\n backend: Rc::new(Some(RefCell::new(backend))),\n };\n\n Ok(display)\n }\n\n unsafe fn build_glium_unchecked(self) -> Result<GlutinFacade, GliumCreationError<glutin::CreationError>> {\n let backend = Rc::new(try!(backend::glutin_backend::GlutinWindowBackend::new(self)));\n let context = try!(context::Context::new(backend.clone(), false));\n\n let display = GlutinFacade {\n context: context,\n backend: Rc::new(Some(RefCell::new(backend))),\n };\n\n Ok(display)\n }\n\n fn rebuild_glium(self, display: &GlutinFacade) -> Result<(), GliumCreationError<glutin::CreationError>> {\n let mut existing_window = Option::as_ref(&display.backend)\n .expect(\"can't rebuild a headless display\").borrow_mut();\n let new_backend = Rc::new(try!(existing_window.rebuild(self)));\n try!(unsafe { display.context.rebuild(new_backend.clone()) });\n *existing_window = new_backend;\n Ok(())\n }\n}\n\nimpl DisplayBuild for glutin::HeadlessRendererBuilder {\n type Facade = GlutinFacade;\n type Err = GliumCreationError<glutin::CreationError>;\n\n fn build_glium(self) -> Result<GlutinFacade, GliumCreationError<glutin::CreationError>> {\n let backend = Rc::new(try!(backend::glutin_backend::GlutinHeadlessBackend::new(self)));\n let context = try!(unsafe { context::Context::new(backend.clone(), true) });\n\n let display = GlutinFacade {\n context: context,\n backend: Rc::new(None),\n };\n\n Ok(display)\n }\n\n unsafe fn build_glium_unchecked(self) -> Result<GlutinFacade, GliumCreationError<glutin::CreationError>> {\n let backend = Rc::new(try!(backend::glutin_backend::GlutinHeadlessBackend::new(self)));\n let context = try!(context::Context::new(backend.clone(), true));\n\n let display = GlutinFacade {\n context: context,\n backend: Rc::new(None),\n };\n\n Ok(display)\n }\n\n fn rebuild_glium(self, _: &GlutinFacade) -> Result<(), GliumCreationError<glutin::CreationError>> {\n unimplemented!()\n }\n}\n\n\/\/\/ An implementation of the `Backend` trait for a glutin window.\npub struct GlutinWindowBackend {\n window: glutin::Window,\n}\n\nunsafe impl Backend for GlutinWindowBackend {\n fn swap_buffers(&self) {\n self.window.swap_buffers();\n }\n\n unsafe fn get_proc_address(&self, symbol: &str) -> *const libc::c_void {\n self.window.get_proc_address(symbol)\n }\n\n fn get_framebuffer_dimensions(&self) -> (u32, u32) {\n let (width, height) = self.window.get_inner_size().unwrap_or((800, 600)); \/\/ TODO: 800x600 ?\n let scale = self.window.hidpi_factor();\n ((width as f32 * scale) as u32, (height as f32 * scale) as u32)\n }\n\n fn is_current(&self) -> bool {\n self.window.is_current()\n }\n\n unsafe fn make_current(&self) {\n self.window.make_current();\n }\n}\n\n#[allow(missing_docs)]\nimpl GlutinWindowBackend {\n \/\/\/ Builds a new backend from the builder.\n pub fn new(builder: glutin::WindowBuilder)\n -> Result<GlutinWindowBackend, GliumCreationError<glutin::CreationError>>\n {\n let window = try!(builder.build());\n\n Ok(GlutinWindowBackend {\n window: window,\n })\n }\n\n pub fn get_window(&self) -> &glutin::Window {\n &self.window\n }\n\n pub fn is_closed(&self) -> bool {\n self.window.is_closed()\n }\n\n pub fn poll_events(&self) -> glutin::PollEventsIterator {\n self.window.poll_events()\n }\n\n pub fn wait_events(&self) -> glutin::WaitEventsIterator {\n self.window.wait_events()\n }\n\n pub fn rebuild(&self, builder: glutin::WindowBuilder)\n -> Result<GlutinWindowBackend, GliumCreationError<glutin::CreationError>>\n {\n let window = try!(builder.with_shared_lists(&self.window).build());\n\n Ok(GlutinWindowBackend {\n window: window,\n })\n }\n}\n\n\/\/\/ An implementation of the `Backend` trait for a glutin headless context.\npub struct GlutinHeadlessBackend {\n context: glutin::HeadlessContext,\n}\n\nunsafe impl Backend for GlutinHeadlessBackend {\n fn swap_buffers(&self) {\n }\n\n unsafe fn get_proc_address(&self, symbol: &str) -> *const libc::c_void {\n self.context.get_proc_address(symbol)\n }\n\n fn get_framebuffer_dimensions(&self) -> (u32, u32) {\n (800, 600) \/\/ FIXME: these are random\n }\n\n fn is_current(&self) -> bool {\n self.context.is_current()\n }\n\n unsafe fn make_current(&self) {\n self.context.make_current();\n }\n}\n\nimpl GlutinHeadlessBackend {\n \/\/\/ Builds a new backend from the builder.\n pub fn new(builder: glutin::HeadlessRendererBuilder)\n -> Result<GlutinHeadlessBackend, GliumCreationError<glutin::CreationError>>\n {\n let context = try!(builder.build());\n\n Ok(GlutinHeadlessBackend {\n context: context,\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use crate::custom::credential::NewAwsCredsForStsCreds;\nuse crate::{AssumeRoleWithWebIdentityRequest, Sts, StsClient};\nuse rusoto_core::credential::{\n AwsCredentials, CredentialsError, ProvideAwsCredentials, Secret, Variable,\n};\nuse rusoto_core::request::HttpClient;\nuse rusoto_core::{Client, Region};\n\nuse async_trait::async_trait;\n\nconst AWS_WEB_IDENTITY_TOKEN_FILE: &str = \"AWS_WEB_IDENTITY_TOKEN_FILE\";\n\nconst AWS_ROLE_ARN: &str = \"AWS_ROLE_ARN\";\n\nconst AWS_ROLE_SESSION_NAME: &str = \"AWS_ROLE_SESSION_NAME\";\n\n\/\/\/ WebIdentityProvider using OpenID Connect bearer token to retrieve AWS IAM credentials.\n\/\/\/\n\/\/\/ See https:\/\/docs.aws.amazon.com\/STS\/latest\/APIReference\/API_AssumeRoleWithWebIdentity.html for\n\/\/\/ more details.\n#[derive(Debug, Clone)]\npub struct WebIdentityProvider {\n \/\/\/ The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity provider.\n \/\/\/ Your application must get this token by authenticating the user who is using your application\n \/\/\/ with a web identity provider before the application makes an AssumeRoleWithWebIdentity call.\n pub web_identity_token: Variable<Secret, CredentialsError>,\n \/\/\/ The Amazon Resource Name (ARN) of the role that the caller is assuming.\n pub role_arn: Variable<String, CredentialsError>,\n \/\/\/ An identifier for the assumed role session. Typically, you pass the name or identifier that is\n \/\/\/ associated with the user who is using your application. That way, the temporary security credentials\n \/\/\/ that your application will use are associated with that user. This session name is included as part\n \/\/\/ of the ARN and assumed role ID in the AssumedRoleUser response element.\n pub role_session_name: Variable<String, CredentialsError>,\n}\n\nimpl WebIdentityProvider {\n \/\/\/ Create new WebIdentityProvider by explicitly passing its configuration.\n pub fn new<A, B, C>(web_identity_token: A, role_arn: B, role_session_name: Option<C>) -> Self\n where\n A: Into<Variable<Secret, CredentialsError>>,\n B: Into<Variable<String, CredentialsError>>,\n C: Into<Variable<String, CredentialsError>>,\n {\n Self {\n web_identity_token: web_identity_token.into(),\n role_arn: role_arn.into(),\n role_session_name: role_session_name\n .map(|v| v.into())\n .unwrap_or_else(|| Variable::with_value(Self::create_session_name())),\n }\n }\n\n \/\/\/ Creat a WebIdentityProvider from the following environment variables:\n \/\/\/\n \/\/\/ - `AWS_WEB_IDENTITY_TOKEN_FILE` path to the web identity token file.\n \/\/\/ - `AWS_ROLE_ARN` ARN of the role to assume.\n \/\/\/ - `AWS_ROLE_SESSION_NAME` (optional) name applied to the assume-role session.\n \/\/\/\n \/\/\/ See https:\/\/docs.aws.amazon.com\/eks\/latest\/userguide\/iam-roles-for-service-accounts-technical-overview.html\n \/\/\/ for more information about how IAM Roles for Kubernetes Service Accounts works.\n pub fn from_k8s_env() -> Self {\n Self::_from_k8s_env(\n Variable::from_env_var(AWS_WEB_IDENTITY_TOKEN_FILE),\n Variable::from_env_var(AWS_ROLE_ARN),\n Some(Variable::from_env_var(AWS_ROLE_SESSION_NAME)),\n )\n }\n\n \/\/\/ Used by unit testing\n pub(crate) fn _from_k8s_env(\n token_file: Variable<String, CredentialsError>,\n role: Variable<String, CredentialsError>,\n session_name: Option<Variable<String, CredentialsError>>,\n ) -> Self {\n Self::new(\n Variable::dynamic(move || Variable::from_text_file(token_file.resolve()?).resolve()),\n role,\n session_name,\n )\n }\n\n #[cfg(test)]\n pub(crate) fn load_token(&self) -> Result<Secret, CredentialsError> {\n self.web_identity_token.resolve()\n }\n\n fn create_session_name() -> String {\n \/\/ TODO can we do better here?\n \/\/ - Pod service account, Pod name and Pod namespace\n \/\/ - EC2 Instance ID if available\n \/\/ - IP address if available\n \/\/ - ...\n \/\/ Having some information in the session name that identifies the client would enable\n \/\/ better correlation analysis in CloudTrail.\n \"WebIdentitySession\".to_string()\n }\n}\n\n#[async_trait]\nimpl ProvideAwsCredentials for WebIdentityProvider {\n async fn credentials(&self) -> Result<AwsCredentials, CredentialsError> {\n let http_client = match HttpClient::new() {\n Ok(c) => c,\n Err(e) => return Err(CredentialsError::new(e)),\n };\n let client = Client::new_not_signing(http_client);\n let sts = StsClient::new_with_client(client, Region::default());\n let mut req = AssumeRoleWithWebIdentityRequest::default();\n\n req.role_arn = self.role_arn.resolve()?;\n req.web_identity_token = self.web_identity_token.resolve()?.to_string();\n req.role_session_name = self.role_session_name.resolve()?;\n\n let assume_role = sts.assume_role_with_web_identity(req).await;\n match assume_role {\n Err(e) => Err(CredentialsError::new(e)),\n Ok(role) => match role.credentials {\n None => Err(CredentialsError::new(format!(\n \"No credentials found in AssumeRoleWithWebIdentityResponse: {:?}\",\n role\n ))),\n Some(c) => AwsCredentials::new_for_credentials(c),\n },\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io::Write;\n use tempfile::NamedTempFile;\n\n #[test]\n fn api_ergonomy() {\n WebIdentityProvider::new(Secret::from(\"\".to_string()), \"\", Some(\"\".to_string()));\n }\n\n #[test]\n fn from_k8s_env() -> Result<(), CredentialsError> {\n const TOKEN_VALUE: &str = \"secret\";\n const ROLE_ARN: &str = \"role\";\n const SESSION_NAME: &str = \"session\";\n let mut file = NamedTempFile::new()?;\n \/\/ We use writeln to add an extra newline at the end of the token, which should be\n \/\/ removed by Variable::from_text_file.\n writeln!(file, \"{}\", TOKEN_VALUE)?;\n let p = WebIdentityProvider::_from_k8s_env(\n Variable::with_value(file.path().to_string_lossy().to_string()),\n Variable::with_value(ROLE_ARN.to_string()),\n Some(Variable::with_value(SESSION_NAME.to_string())),\n );\n let token = p.load_token()?;\n assert_eq!(token.as_ref(), TOKEN_VALUE);\n Ok(())\n }\n}\n<commit_msg>Fix serializing jwt token when sending request<commit_after>use crate::custom::credential::NewAwsCredsForStsCreds;\nuse crate::{AssumeRoleWithWebIdentityRequest, Sts, StsClient};\nuse rusoto_core::credential::{\n AwsCredentials, CredentialsError, ProvideAwsCredentials, Secret, Variable,\n};\nuse rusoto_core::request::HttpClient;\nuse rusoto_core::{Client, Region};\n\nuse async_trait::async_trait;\n\nconst AWS_WEB_IDENTITY_TOKEN_FILE: &str = \"AWS_WEB_IDENTITY_TOKEN_FILE\";\n\nconst AWS_ROLE_ARN: &str = \"AWS_ROLE_ARN\";\n\nconst AWS_ROLE_SESSION_NAME: &str = \"AWS_ROLE_SESSION_NAME\";\n\n\/\/\/ WebIdentityProvider using OpenID Connect bearer token to retrieve AWS IAM credentials.\n\/\/\/\n\/\/\/ See https:\/\/docs.aws.amazon.com\/STS\/latest\/APIReference\/API_AssumeRoleWithWebIdentity.html for\n\/\/\/ more details.\n#[derive(Debug, Clone)]\npub struct WebIdentityProvider {\n \/\/\/ The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity provider.\n \/\/\/ Your application must get this token by authenticating the user who is using your application\n \/\/\/ with a web identity provider before the application makes an AssumeRoleWithWebIdentity call.\n pub web_identity_token: Variable<Secret, CredentialsError>,\n \/\/\/ The Amazon Resource Name (ARN) of the role that the caller is assuming.\n pub role_arn: Variable<String, CredentialsError>,\n \/\/\/ An identifier for the assumed role session. Typically, you pass the name or identifier that is\n \/\/\/ associated with the user who is using your application. That way, the temporary security credentials\n \/\/\/ that your application will use are associated with that user. This session name is included as part\n \/\/\/ of the ARN and assumed role ID in the AssumedRoleUser response element.\n pub role_session_name: Variable<String, CredentialsError>,\n}\n\nimpl WebIdentityProvider {\n \/\/\/ Create new WebIdentityProvider by explicitly passing its configuration.\n pub fn new<A, B, C>(web_identity_token: A, role_arn: B, role_session_name: Option<C>) -> Self\n where\n A: Into<Variable<Secret, CredentialsError>>,\n B: Into<Variable<String, CredentialsError>>,\n C: Into<Variable<String, CredentialsError>>,\n {\n Self {\n web_identity_token: web_identity_token.into(),\n role_arn: role_arn.into(),\n role_session_name: role_session_name\n .map(|v| v.into())\n .unwrap_or_else(|| Variable::with_value(Self::create_session_name())),\n }\n }\n\n \/\/\/ Creat a WebIdentityProvider from the following environment variables:\n \/\/\/\n \/\/\/ - `AWS_WEB_IDENTITY_TOKEN_FILE` path to the web identity token file.\n \/\/\/ - `AWS_ROLE_ARN` ARN of the role to assume.\n \/\/\/ - `AWS_ROLE_SESSION_NAME` (optional) name applied to the assume-role session.\n \/\/\/\n \/\/\/ See https:\/\/docs.aws.amazon.com\/eks\/latest\/userguide\/iam-roles-for-service-accounts-technical-overview.html\n \/\/\/ for more information about how IAM Roles for Kubernetes Service Accounts works.\n pub fn from_k8s_env() -> Self {\n Self::_from_k8s_env(\n Variable::from_env_var(AWS_WEB_IDENTITY_TOKEN_FILE),\n Variable::from_env_var(AWS_ROLE_ARN),\n Some(Variable::from_env_var(AWS_ROLE_SESSION_NAME)),\n )\n }\n\n \/\/\/ Used by unit testing\n pub(crate) fn _from_k8s_env(\n token_file: Variable<String, CredentialsError>,\n role: Variable<String, CredentialsError>,\n session_name: Option<Variable<String, CredentialsError>>,\n ) -> Self {\n Self::new(\n Variable::dynamic(move || Variable::from_text_file(token_file.resolve()?).resolve()),\n role,\n session_name,\n )\n }\n\n #[cfg(test)]\n pub(crate) fn load_token(&self) -> Result<Secret, CredentialsError> {\n self.web_identity_token.resolve()\n }\n\n fn create_session_name() -> String {\n \/\/ TODO can we do better here?\n \/\/ - Pod service account, Pod name and Pod namespace\n \/\/ - EC2 Instance ID if available\n \/\/ - IP address if available\n \/\/ - ...\n \/\/ Having some information in the session name that identifies the client would enable\n \/\/ better correlation analysis in CloudTrail.\n \"WebIdentitySession\".to_string()\n }\n}\n\n#[async_trait]\nimpl ProvideAwsCredentials for WebIdentityProvider {\n async fn credentials(&self) -> Result<AwsCredentials, CredentialsError> {\n let http_client = match HttpClient::new() {\n Ok(c) => c,\n Err(e) => return Err(CredentialsError::new(e)),\n };\n let client = Client::new_not_signing(http_client);\n let sts = StsClient::new_with_client(client, Region::default());\n let mut req = AssumeRoleWithWebIdentityRequest::default();\n\n req.role_arn = self.role_arn.resolve()?;\n req.web_identity_token = self.web_identity_token.resolve()?.as_ref().to_string();\n req.role_session_name = self.role_session_name.resolve()?;\n\n let assume_role = sts.assume_role_with_web_identity(req).await;\n match assume_role {\n Err(e) => Err(CredentialsError::new(e)),\n Ok(role) => match role.credentials {\n None => Err(CredentialsError::new(format!(\n \"No credentials found in AssumeRoleWithWebIdentityResponse: {:?}\",\n role\n ))),\n Some(c) => AwsCredentials::new_for_credentials(c),\n },\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io::Write;\n use tempfile::NamedTempFile;\n\n #[test]\n fn api_ergonomy() {\n WebIdentityProvider::new(Secret::from(\"\".to_string()), \"\", Some(\"\".to_string()));\n }\n\n #[test]\n fn from_k8s_env() -> Result<(), CredentialsError> {\n const TOKEN_VALUE: &str = \"secret\";\n const ROLE_ARN: &str = \"role\";\n const SESSION_NAME: &str = \"session\";\n let mut file = NamedTempFile::new()?;\n \/\/ We use writeln to add an extra newline at the end of the token, which should be\n \/\/ removed by Variable::from_text_file.\n writeln!(file, \"{}\", TOKEN_VALUE)?;\n let p = WebIdentityProvider::_from_k8s_env(\n Variable::with_value(file.path().to_string_lossy().to_string()),\n Variable::with_value(ROLE_ARN.to_string()),\n Some(Variable::with_value(SESSION_NAME.to_string())),\n );\n let token = p.load_token()?;\n assert_eq!(token.as_ref(), TOKEN_VALUE);\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Impl expr parsing.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix imag-notes for new error interface<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Switch to using LOAD_OP_LOAD for both colour and depth in in renderpasses<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>First tranche of rustdoc comments for renderervk.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Ensure validation logic for m.key.verification.start is run when deserializing the event, not just the content.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add autobackup in print<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use a macro for parser errors in expr.<commit_after><|endoftext|>"} {"text":"<commit_before>use super::{LzfResult, LzfError};\nuse std::{cmp, mem};\n\nconst HLOG : usize = 16;\nconst HSIZE : u32 = 1 << HLOG;\nconst MAX_OFF : usize = 1 << 13;\nconst MAX_REF : usize = ((1 << 8) + (1 << 3));\nconst MAX_LIT : i32 = (1 << 5);\n\nfn first(p: &[u8], off: usize) -> u32 {\n ((p[off] as u32) << 8) | p[off+1] as u32\n}\n\nfn next(v: u32, p: &[u8], off: usize) -> u32 {\n (v << 8) | p[off+2] as u32\n}\n\nfn idx(h: u32) -> usize {\n let h = h as u64;\n (\n \/\/ 8 = 3*8-HLOG, but HLOG is constant at 16\n (h.wrapping_shr(8).wrapping_sub(h*5))\n & (HSIZE-1) as u64\n ) as usize\n}\n\nfn not(i: i32) -> i32 {\n if i == 0 {\n 1\n } else {\n 0\n }\n}\n\n\/\/\/ Compress the given data, if possible.\n\/\/\/ The return value will be set to the error if compression fails.\n\/\/\/\n\/\/\/ The buffer is always set to the same size as the input buffer.\n\/\/\/ If that is not enough to hold the lzf-compressed data,\n\/\/\/ an error will be returned.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let data = \"foobar\";\n\/\/\/ let compressed = lzf::compress(data.as_bytes());\n\/\/\/ ```\npub fn compress(data: &[u8]) -> LzfResult<Vec<u8>> {\n let in_len = data.len();\n let out_buf_len = in_len;\n let mut out = Vec::with_capacity(out_buf_len);\n unsafe { out.set_len(out_buf_len) };\n\n let mut out_len: i32 = 1; \/* start run by default *\/\n\n \/* This goes against all of Rust's statically verifiable guarantees,\n * but for the below use-case accessing uninitialized memory is ok,\n * as we have other checks to make sure the read memory is not used.\n *\n * The otherwise happening memset slows down the code by a factor of 20-30\n *\/\n let mut htab: [usize; 1 << HLOG] = unsafe { mem::uninitialized() };\n\n let mut current_offset = 0;\n\n if in_len == 0 {\n return Err(LzfError::DataCorrupted);\n }\n\n let mut lit: i32 = 0;\n\n let mut hval: u32;\n let mut ref_offset;\n\n hval = first(data, current_offset);\n\n while current_offset < in_len - 2 {\n hval = next(hval, data, current_offset);\n let hslot_idx = idx(hval);\n\n ref_offset = htab[hslot_idx];\n htab[hslot_idx] = current_offset;\n\n let off = current_offset.wrapping_sub(ref_offset).wrapping_sub(1);\n if off < MAX_OFF && current_offset + 4 < in_len && ref_offset > 0 &&\n data[ref_offset] == data[current_offset] &&\n data[ref_offset+1] == data[current_offset+1] &&\n data[ref_offset+2] == data[current_offset+2] {\n\n let mut len = 2;\n let maxlen = cmp::min(in_len - current_offset - len, MAX_REF);\n\n \/* stop run *\/\n out[(out_len - lit - 1) as usize] = (lit as u8).wrapping_sub(1);\n out_len -= not(lit); \/* undo run if length is zero *\/\n\n if out_len as i32 + 3 + 1 >= out_buf_len as i32 {\n return Err(LzfError::NoCompressionPossible);\n }\n\n loop {\n len += 1;\n while len < maxlen && data[ref_offset+len] == data[current_offset+len] {\n len += 1;\n }\n break;\n }\n\n len -= 2; \/* len is now #octets - 1 *\/\n current_offset += 1;\n\n if len < 7 {\n out[out_len as usize] = (off >> 8) as u8 + (len << 5) as u8;\n out_len += 1;\n } else {\n out[out_len as usize] = (off >> 8) as u8 + (7 << 5);\n out[out_len as usize + 1] = (len as u8).wrapping_sub(7);\n out_len += 2;\n }\n\n out[out_len as usize] = off as u8;\n out_len += 2; \/* start run *\/\n lit = 0;\n\n \/* we add here, because we later substract from the total length *\/\n current_offset += len - 1;\n\n if current_offset >= in_len {\n break;\n }\n\n hval = first(data, current_offset);\n\n hval = next(hval, data, current_offset);\n htab[idx(hval)] = current_offset;\n current_offset += 1;\n\n hval = next(hval, data, current_offset);\n htab[idx(hval)] = current_offset;\n current_offset += 1;\n } else {\n \/* one more literal byte we must copy *\/\n if out_len >= out_buf_len as i32 {\n return Err(LzfError::NoCompressionPossible);\n }\n\n lit += 1;\n out[out_len as usize] = data[current_offset];\n out_len += 1;\n current_offset += 1;\n\n if lit == MAX_LIT {\n \/* stop run *\/\n out[(out_len - lit - 1) as usize] = (lit as u8).wrapping_sub(1);\n lit = 0;\n out_len += 1; \/* start run *\/\n }\n }\n }\n\n \/* at most 3 bytes can be missing here *\/\n if out_len + 3 > out_buf_len as i32 {\n return Err(LzfError::NoCompressionPossible);\n }\n\n while current_offset < in_len {\n lit += 1;\n out[out_len as usize] = data[current_offset];\n out_len += 1;\n current_offset += 1;\n\n if lit == MAX_LIT {\n \/* stop run *\/\n out[(out_len - lit - 1) as usize] = (lit as u8).wrapping_sub(1);\n lit = 0;\n out_len += 1; \/* start run *\/\n }\n }\n\n \/* end run *\/\n out[(out_len - lit - 1) as usize] = (lit as u8).wrapping_sub(1);\n out_len -= not(lit); \/* undo run if length is zero *\/\n\n unsafe { out.set_len(out_len as usize) };\n\n Ok(out)\n}\n\n#[test]\nfn test_compress_skips_short() {\n match compress(\"foo\".as_bytes()) {\n Ok(_) => panic!(\"Compression did _something_, which is wrong for 'foo'\"),\n Err(err) => assert_eq!(LzfError::NoCompressionPossible, err),\n }\n}\n\n#[test]\nfn test_compress_lorem() {\n let lorem = \"Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod \\\n tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At \\\n vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, \\\n no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit \\\n amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut \\\n labore et dolore magna aliquyam erat, sed diam voluptua.\";\n\n match compress(lorem.as_bytes()) {\n Ok(compressed) => {\n assert_eq!(272, compressed.len())\n }\n Err(err) => panic!(\"Compression failed with error {:?}\", err),\n }\n}\n\n#[test]\nfn test_compress_decompress_lorem_round() {\n use super::decompress;\n\n let lorem = \"Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod \\\n tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At \\\n vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, \\\n no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit \\\n amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut \\\n labore et dolore magna aliquyam erat, sed diam voluptua.\";\n\n let compressed = match compress(lorem.as_bytes()) {\n Ok(c) => c,\n Err(err) => panic!(\"Compression failed with error {:?}\", err),\n };\n\n match decompress(&compressed, lorem.len()) {\n Ok(decompressed) => {\n assert_eq!(lorem.len(), decompressed.len());\n assert_eq!(lorem.as_bytes(), &decompressed[..]);\n }\n Err(err) => panic!(\"Decompression failed with error {:?}\", err),\n };\n}\n\n#[test]\nfn test_alice_wonderland_both() {\n let alice = \"\\r\\n\\r\\n\\r\\n\\r\\n ALICE'S ADVENTURES IN WONDERLAND\\r\\n\";\n\n let compressed = match compress(alice.as_bytes()) {\n Ok(c) => c,\n Err(err) => panic!(\"Compression failed with error {:?}\", err),\n };\n\n let c_compressed = match super::compress(alice.as_bytes()) {\n Ok(c) => c,\n Err(err) => panic!(\"Compression failed with error {:?}\", err),\n };\n\n assert_eq!(&compressed[..], &c_compressed[..]);\n}\n\n#[test]\nfn quickcheck_found_bug() {\n let inp = vec![0, 0, 0, 0, 1, 0, 0, 2, 0, 0, 3, 0, 0, 4, 0, 1, 1, 0, 1, 2, 0, 1, 3, 0, 1, 4, 0, 0, 5, 0, 0, 6, 0, 0, 7, 0, 0, 8, 0, 0, 9, 0, 0, 10, 0, 0, 11, 0, 1, 5, 0, 1, 6, 0, 1, 7, 0, 1, 8, 0, 1, 9, 0, 1, 10, 0, 0];\n\n assert_eq!(LzfError::NoCompressionPossible, compress(&inp).unwrap_err());\n}\n<commit_msg>fix: Avoid compression if there is not enough data<commit_after>use super::{LzfResult, LzfError};\nuse std::{cmp, mem};\n\nconst HLOG : usize = 16;\nconst HSIZE : u32 = 1 << HLOG;\nconst MAX_OFF : usize = 1 << 13;\nconst MAX_REF : usize = ((1 << 8) + (1 << 3));\nconst MAX_LIT : i32 = (1 << 5);\n\nfn first(p: &[u8], off: usize) -> u32 {\n ((p[off] as u32) << 8) | p[off+1] as u32\n}\n\nfn next(v: u32, p: &[u8], off: usize) -> u32 {\n (v << 8) | p[off+2] as u32\n}\n\nfn idx(h: u32) -> usize {\n let h = h as u64;\n (\n \/\/ 8 = 3*8-HLOG, but HLOG is constant at 16\n (h.wrapping_shr(8).wrapping_sub(h*5))\n & (HSIZE-1) as u64\n ) as usize\n}\n\nfn not(i: i32) -> i32 {\n if i == 0 {\n 1\n } else {\n 0\n }\n}\n\n\/\/\/ Compress the given data, if possible.\n\/\/\/ The return value will be set to the error if compression fails.\n\/\/\/\n\/\/\/ The buffer is always set to the same size as the input buffer.\n\/\/\/ If that is not enough to hold the lzf-compressed data,\n\/\/\/ an error will be returned.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let data = \"foobar\";\n\/\/\/ let compressed = lzf::compress(data.as_bytes());\n\/\/\/ ```\npub fn compress(data: &[u8]) -> LzfResult<Vec<u8>> {\n let in_len = data.len();\n let out_buf_len = in_len;\n let mut out = Vec::with_capacity(out_buf_len);\n unsafe { out.set_len(out_buf_len) };\n\n let mut out_len: i32 = 1; \/* start run by default *\/\n\n \/* This goes against all of Rust's statically verifiable guarantees,\n * but for the below use-case accessing uninitialized memory is ok,\n * as we have other checks to make sure the read memory is not used.\n *\n * The otherwise happening memset slows down the code by a factor of 20-30\n *\/\n let mut htab: [usize; 1 << HLOG] = unsafe { mem::uninitialized() };\n\n let mut current_offset = 0;\n\n if in_len < 2 {\n return Err(LzfError::NoCompressionPossible);\n }\n\n let mut lit: i32 = 0;\n\n let mut hval: u32;\n let mut ref_offset;\n\n hval = first(data, current_offset);\n\n while current_offset < in_len - 2 {\n hval = next(hval, data, current_offset);\n let hslot_idx = idx(hval);\n\n ref_offset = htab[hslot_idx];\n htab[hslot_idx] = current_offset;\n\n let off = current_offset.wrapping_sub(ref_offset).wrapping_sub(1);\n if off < MAX_OFF && current_offset + 4 < in_len && ref_offset > 0 &&\n data[ref_offset] == data[current_offset] &&\n data[ref_offset+1] == data[current_offset+1] &&\n data[ref_offset+2] == data[current_offset+2] {\n\n let mut len = 2;\n let maxlen = cmp::min(in_len - current_offset - len, MAX_REF);\n\n \/* stop run *\/\n out[(out_len - lit - 1) as usize] = (lit as u8).wrapping_sub(1);\n out_len -= not(lit); \/* undo run if length is zero *\/\n\n if out_len as i32 + 3 + 1 >= out_buf_len as i32 {\n return Err(LzfError::NoCompressionPossible);\n }\n\n loop {\n len += 1;\n while len < maxlen && data[ref_offset+len] == data[current_offset+len] {\n len += 1;\n }\n break;\n }\n\n len -= 2; \/* len is now #octets - 1 *\/\n current_offset += 1;\n\n if len < 7 {\n out[out_len as usize] = (off >> 8) as u8 + (len << 5) as u8;\n out_len += 1;\n } else {\n out[out_len as usize] = (off >> 8) as u8 + (7 << 5);\n out[out_len as usize + 1] = (len as u8).wrapping_sub(7);\n out_len += 2;\n }\n\n out[out_len as usize] = off as u8;\n out_len += 2; \/* start run *\/\n lit = 0;\n\n \/* we add here, because we later substract from the total length *\/\n current_offset += len - 1;\n\n if current_offset >= in_len {\n break;\n }\n\n hval = first(data, current_offset);\n\n hval = next(hval, data, current_offset);\n htab[idx(hval)] = current_offset;\n current_offset += 1;\n\n hval = next(hval, data, current_offset);\n htab[idx(hval)] = current_offset;\n current_offset += 1;\n } else {\n \/* one more literal byte we must copy *\/\n if out_len >= out_buf_len as i32 {\n return Err(LzfError::NoCompressionPossible);\n }\n\n lit += 1;\n out[out_len as usize] = data[current_offset];\n out_len += 1;\n current_offset += 1;\n\n if lit == MAX_LIT {\n \/* stop run *\/\n out[(out_len - lit - 1) as usize] = (lit as u8).wrapping_sub(1);\n lit = 0;\n out_len += 1; \/* start run *\/\n }\n }\n }\n\n \/* at most 3 bytes can be missing here *\/\n if out_len + 3 > out_buf_len as i32 {\n return Err(LzfError::NoCompressionPossible);\n }\n\n while current_offset < in_len {\n lit += 1;\n out[out_len as usize] = data[current_offset];\n out_len += 1;\n current_offset += 1;\n\n if lit == MAX_LIT {\n \/* stop run *\/\n out[(out_len - lit - 1) as usize] = (lit as u8).wrapping_sub(1);\n lit = 0;\n out_len += 1; \/* start run *\/\n }\n }\n\n \/* end run *\/\n out[(out_len - lit - 1) as usize] = (lit as u8).wrapping_sub(1);\n out_len -= not(lit); \/* undo run if length is zero *\/\n\n unsafe { out.set_len(out_len as usize) };\n\n Ok(out)\n}\n\n#[test]\nfn test_compress_skips_short() {\n match compress(\"foo\".as_bytes()) {\n Ok(_) => panic!(\"Compression did _something_, which is wrong for 'foo'\"),\n Err(err) => assert_eq!(LzfError::NoCompressionPossible, err),\n }\n}\n\n#[test]\nfn test_compress_lorem() {\n let lorem = \"Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod \\\n tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At \\\n vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, \\\n no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit \\\n amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut \\\n labore et dolore magna aliquyam erat, sed diam voluptua.\";\n\n match compress(lorem.as_bytes()) {\n Ok(compressed) => {\n assert_eq!(272, compressed.len())\n }\n Err(err) => panic!(\"Compression failed with error {:?}\", err),\n }\n}\n\n#[test]\nfn test_compress_decompress_lorem_round() {\n use super::decompress;\n\n let lorem = \"Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod \\\n tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At \\\n vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, \\\n no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit \\\n amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut \\\n labore et dolore magna aliquyam erat, sed diam voluptua.\";\n\n let compressed = match compress(lorem.as_bytes()) {\n Ok(c) => c,\n Err(err) => panic!(\"Compression failed with error {:?}\", err),\n };\n\n match decompress(&compressed, lorem.len()) {\n Ok(decompressed) => {\n assert_eq!(lorem.len(), decompressed.len());\n assert_eq!(lorem.as_bytes(), &decompressed[..]);\n }\n Err(err) => panic!(\"Decompression failed with error {:?}\", err),\n };\n}\n\n#[test]\nfn test_alice_wonderland_both() {\n let alice = \"\\r\\n\\r\\n\\r\\n\\r\\n ALICE'S ADVENTURES IN WONDERLAND\\r\\n\";\n\n let compressed = match compress(alice.as_bytes()) {\n Ok(c) => c,\n Err(err) => panic!(\"Compression failed with error {:?}\", err),\n };\n\n let c_compressed = match super::compress(alice.as_bytes()) {\n Ok(c) => c,\n Err(err) => panic!(\"Compression failed with error {:?}\", err),\n };\n\n assert_eq!(&compressed[..], &c_compressed[..]);\n}\n\n#[test]\nfn quickcheck_found_bug() {\n let inp = vec![0, 0, 0, 0, 1, 0, 0, 2, 0, 0, 3, 0, 0, 4, 0, 1, 1, 0, 1, 2, 0, 1, 3, 0, 1, 4, 0, 0, 5, 0, 0, 6, 0, 0, 7, 0, 0, 8, 0, 0, 9, 0, 0, 10, 0, 0, 11, 0, 1, 5, 0, 1, 6, 0, 1, 7, 0, 1, 8, 0, 1, 9, 0, 1, 10, 0, 0];\n\n assert_eq!(LzfError::NoCompressionPossible, compress(&inp).unwrap_err());\n}\n\n#[test]\nfn quickcheck_found_bug2() {\n let inp = vec![0];\n\n assert_eq!(LzfError::NoCompressionPossible, compress(&inp).unwrap_err());\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015-2016 Intecture Developers. See the COPYRIGHT file at the\n\/\/ top-level directory of this distribution and at\n\/\/ https:\/\/intecture.io\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Mozilla Public License 2.0 <LICENSE or\n\/\/ https:\/\/www.tldrlegal.com\/l\/mpl-2.0>. This file may not be copied,\n\/\/ modified, or distributed except according to those terms.\n\n\/\/! Parser for Intecture data files.\n\/\/!\n\/\/! # Examples\n\/\/!\n\/\/! ```no_run\n\/\/! ```\n\nmod condition;\n\/\/ pub mod ffi;\n\nuse error::{Error, Result};\nuse serde_json::{self, Value};\nuse std::collections::BTreeMap;\nuse std::fs;\nuse std::path::Path;\n\n\/\/\/ Parser for Intecture data files.\npub struct DataParser;\n\nimpl DataParser {\n \/\/\/ Open a new file and recursively parse its contents.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ ```\n pub fn open<P: AsRef<Path>>(path: P) -> Result<Value> {\n let file = try!(DataFile::new(path));\n Ok(try!(file.merge(Value::Null)))\n }\n}\n\nstruct DataFile {\n v: Value,\n}\n\nimpl DataFile {\n fn new<P: AsRef<Path>>(path: P) -> Result<DataFile> {\n let mut fh = try!(fs::File::open(path.as_ref()));\n let data: Value = try!(serde_json::from_reader(&mut fh));\n\n if !data.is_object() {\n Err(Error::Generic(\"Value is not an object\".into()))\n } else {\n Ok(DataFile {\n v: data,\n })\n }\n }\n\n fn dependencies(&self) -> Result<Vec<DataFile>> {\n let mut deps = Vec::new();\n\n if let Some(inc) = self.v.find(\"_include\") {\n if !inc.is_array() {\n return Err(Error::Generic(\"Value of `_include` is not an array\".into()));\n }\n\n \/\/ Loop in reverse order to get lowest importance first\n for i in inc.as_array().unwrap().iter().rev() {\n deps.push(try!(DataFile::new(i.as_str().unwrap())));\n }\n }\n\n Ok(deps)\n }\n\n fn merge(self, mut last_value: Value) -> Result<Value> {\n for dep in try!(self.dependencies()) {\n last_value = try!(dep.merge(last_value));\n }\n\n Ok(try!(Self::merge_values(self.v, last_value)))\n }\n\n fn merge_values(into: Value, mut from: Value) -> Result<Value> {\n match into {\n Value::Null |\n Value::Bool(_) |\n Value::I64(_) |\n Value::U64(_) |\n Value::F64(_) |\n Value::String(_) => Ok(into),\n Value::Array(mut a) => {\n if from.is_array() {\n a.append(from.as_array_mut().unwrap());\n } else {\n a.push(from);\n }\n\n Ok(Value::Array(a))\n },\n Value::Object(o) => {\n let mut new: BTreeMap<String, Value> = BTreeMap::new();\n\n for (mut key, mut value) in o {\n if key.ends_with(\"?\") || key.ends_with(\"?!\") {\n if key.pop().unwrap() == '!' {\n key.pop();\n key.push('!');\n }\n\n value = try!(Self::query_value(&from, value)).unwrap_or(Value::Null);\n }\n\n if key.ends_with(\"!\") {\n key.pop();\n }\n else if let Some(o1) = from.find(&key) {\n value = try!(Self::merge_values(value, o1.clone()));\n }\n\n new.insert(key, value);\n }\n\n \/\/ Insert any missing values\n if let Some(o1) = from.as_object() {\n for (key, value) in o1 {\n if !new.contains_key(key) {\n new.insert(key.clone(), value.clone());\n }\n }\n }\n\n Ok(Value::Object(new))\n }\n }\n }\n\n fn query_value(data: &Value, value: Value) -> Result<Option<Value>> {\n match value {\n Value::Array(a) => {\n for opt in a {\n if let Some(v) = try!(Self::query_value(data, opt)) {\n return Ok(Some(v));\n }\n }\n },\n Value::Object(mut o) => {\n if let Some(v) = o.remove(\"_\") {\n if let Some(q) = o.get(\"?\") {\n match *q {\n Value::String(ref s) if !try!(condition::eval(data, s)) => return Ok(None),\n _ => return Err(Error::Generic(\"Query must be string\".into())),\n };\n }\n\n return Ok(Some(v));\n }\n },\n _ => return Ok(Some(value)),\n }\n\n Ok(None)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use serde_json::{self, Value};\n use std::fs::File;\n use std::io::Write;\n use std::path::PathBuf;\n use super::*;\n use tempdir::TempDir;\n\n #[test]\n fn test_parser() {\n let tempdir = TempDir::new(\"parser_test\").unwrap();\n let mut path = tempdir.path().to_owned();\n let expected_value = create_data(&mut path);\n\n path.push(\"top.json\");\n let value = DataParser::open(&path).unwrap();\n\n assert_eq!(value, expected_value);\n }\n\n fn create_data(path: &mut PathBuf) -> Value {\n path.push(\"top.json\");\n let mut fh = File::create(&path).unwrap();\n path.pop();\n fh.write_all(format!(\"{{\n \\\"a\\\": 1,\n \\\"payload\\\": {{\n \\\"b\\\": [ 1, 2 ],\n \\\"c?\\\": [\n {{\n \\\"_\\\": [ 6, 7 ],\n \\\"?\\\": \\\"\/variable = false\\\"\n }},\n {{\n \\\"_\\\": [ 8, 9 ]\n }}\n ],\n \\\"d\\\": [ 123 ]\n }},\n \\\"variable\\\": {{\n \\\"one!\\\": true,\n \\\"two\\\": false\n }},\n \\\"_include\\\": [\n \\\"{0}\/middle.json\\\",\n \\\"{0}\/bottom.json\\\"\n ]\n }}\", &path.to_str().unwrap()).as_bytes()).unwrap();\n\n path.push(\"middle.json\");\n let mut fh = File::create(&path).unwrap();\n path.pop();\n fh.write_all(b\"{\n \\\"payload\\\": {\n \\\"b!\\\": [ 3, 4 ],\n \\\"d\\\": [ 987 ]\n },\n \\\"variable\\\": false,\n \\\"d\\\": 4\n }\").unwrap();\n\n path.push(\"bottom.json\");\n let mut fh = File::create(&path).unwrap();\n path.pop();\n fh.write_all(b\"{\n \\\"moo\\\": \\\"cow\\\",\n \\\"payload\\\": {\n \\\"b\\\": [ 5 ],\n \\\"d\\\": [ 999 ]\n }\n }\").unwrap();\n\n serde_json::from_str(&format!(\"{{\n \\\"_include\\\": [\n \\\"{0}\/middle.json\\\",\n \\\"{0}\/bottom.json\\\"\n ],\n \\\"a\\\": 1,\n \\\"d\\\": 4,\n \\\"moo\\\": \\\"cow\\\",\n \\\"payload\\\": {{\n \\\"b\\\": [ 1, 2, 3, 4 ],\n \\\"c\\\": [ 6, 7 ],\n \\\"d\\\": [ 123, 987, 999 ]\n }},\n \\\"variable\\\": {{\n \\\"one\\\": true,\n \\\"two\\\": false\n }}\n }}\", &path.to_str().unwrap())).unwrap()\n }\n}\n<commit_msg>Fixed bugs with sharing data with query parser<commit_after>\/\/ Copyright 2015-2016 Intecture Developers. See the COPYRIGHT file at the\n\/\/ top-level directory of this distribution and at\n\/\/ https:\/\/intecture.io\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Mozilla Public License 2.0 <LICENSE or\n\/\/ https:\/\/www.tldrlegal.com\/l\/mpl-2.0>. This file may not be copied,\n\/\/ modified, or distributed except according to those terms.\n\n\/\/! Parser for Intecture data files.\n\/\/!\n\/\/! # Examples\n\/\/!\n\/\/! ```no_run\n\/\/! ```\n\nmod condition;\n\/\/ pub mod ffi;\n\nuse error::{Error, Result};\nuse serde_json::{self, Value};\nuse std::collections::BTreeMap;\nuse std::fs;\nuse std::path::Path;\n\n\/\/\/ Parser for Intecture data files.\npub struct DataParser;\n\nimpl DataParser {\n \/\/\/ Open a new file and recursively parse its contents.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ ```\n pub fn open<P: AsRef<Path>>(path: P) -> Result<Value> {\n let file = try!(DataFile::new(path));\n Ok(try!(file.merge(Value::Null)))\n }\n}\n\nstruct DataFile {\n v: Value,\n}\n\nimpl DataFile {\n fn new<P: AsRef<Path>>(path: P) -> Result<DataFile> {\n let mut fh = try!(fs::File::open(path.as_ref()));\n let data: Value = try!(serde_json::from_reader(&mut fh));\n\n if !data.is_object() {\n Err(Error::Generic(\"Value is not an object\".into()))\n } else {\n Ok(DataFile {\n v: data,\n })\n }\n }\n\n fn dependencies(&self) -> Result<Vec<DataFile>> {\n let mut deps = Vec::new();\n\n if let Some(inc) = self.v.find(\"_include\") {\n if !inc.is_array() {\n return Err(Error::Generic(\"Value of `_include` is not an array\".into()));\n }\n\n \/\/ Loop in reverse order to get lowest importance first\n for i in inc.as_array().unwrap().iter().rev() {\n deps.push(try!(DataFile::new(i.as_str().unwrap())));\n }\n }\n\n Ok(deps)\n }\n\n fn merge(self, mut last_value: Value) -> Result<Value> {\n for dep in try!(self.dependencies()) {\n last_value = try!(dep.merge(last_value));\n }\n\n let lv_clone = last_value.clone();\n Ok(try!(Self::merge_values(self.v, last_value, &lv_clone)))\n }\n\n fn merge_values(into: Value, mut from: Value, parent_from: &Value) -> Result<Value> {\n match into {\n Value::Null |\n Value::Bool(_) |\n Value::I64(_) |\n Value::U64(_) |\n Value::F64(_) |\n Value::String(_) => Ok(into),\n Value::Array(mut a) => {\n if from.is_array() {\n a.append(from.as_array_mut().unwrap());\n } else {\n a.push(from);\n }\n\n Ok(Value::Array(a))\n },\n Value::Object(o) => {\n let mut new: BTreeMap<String, Value> = BTreeMap::new();\n\n for (mut key, mut value) in o {\n if key.ends_with(\"?\") || key.ends_with(\"?!\") {\n if key.pop().unwrap() == '!' {\n key.pop();\n key.push('!');\n }\n\n value = try!(Self::query_value(&parent_from, value)).unwrap_or(Value::Null);\n }\n\n if key.ends_with(\"!\") {\n key.pop();\n }\n else if let Some(o1) = from.find(&key) {\n value = try!(Self::merge_values(value, o1.clone(), &parent_from));\n }\n\n new.insert(key, value);\n }\n\n \/\/ Insert any missing values\n if let Some(o1) = from.as_object() {\n for (key, value) in o1 {\n if !new.contains_key(key) {\n new.insert(key.clone(), value.clone());\n }\n }\n }\n\n Ok(Value::Object(new))\n }\n }\n }\n\n fn query_value(data: &Value, value: Value) -> Result<Option<Value>> {\n match value {\n Value::Array(a) => {\n for opt in a {\n if let Some(v) = try!(Self::query_value(data, opt)) {\n return Ok(Some(v));\n }\n }\n },\n Value::Object(mut o) => {\n if let Some(v) = o.remove(\"_\") {\n if let Some(q) = o.get(\"?\") {\n match *q {\n Value::String(ref s) => {\n if try!(condition::eval(data, s)) {\n return Ok(Some(v));\n }\n },\n _ => return Err(Error::Generic(\"Query must be string\".into())),\n };\n }\n\n return Ok(Some(v));\n }\n },\n _ => return Ok(Some(value)),\n }\n\n Ok(None)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use serde_json::{self, Value};\n use std::fs::File;\n use std::io::Write;\n use std::path::PathBuf;\n use super::*;\n use tempdir::TempDir;\n\n #[test]\n fn test_parser() {\n let tempdir = TempDir::new(\"parser_test\").unwrap();\n let mut path = tempdir.path().to_owned();\n let expected_value = create_data(&mut path);\n\n path.push(\"top.json\");\n let value = DataParser::open(&path).unwrap();\n\n assert_eq!(value, expected_value);\n }\n\n fn create_data(path: &mut PathBuf) -> Value {\n path.push(\"top.json\");\n let mut fh = File::create(&path).unwrap();\n path.pop();\n fh.write_all(format!(\"{{\n \\\"a\\\": 1,\n \\\"payload\\\": {{\n \\\"b\\\": [ 1, 2 ],\n \\\"c?\\\": [\n {{\n \\\"_\\\": [ 6, 7 ],\n \\\"?\\\": \\\"\/variable = false\\\"\n }},\n {{\n \\\"_\\\": [ 8, 9 ]\n }}\n ],\n \\\"d\\\": [ 123 ]\n }},\n \\\"variable\\\": {{\n \\\"one!\\\": true,\n \\\"two\\\": false\n }},\n \\\"_include\\\": [\n \\\"{0}\/middle.json\\\",\n \\\"{0}\/bottom.json\\\"\n ]\n }}\", &path.to_str().unwrap()).as_bytes()).unwrap();\n\n path.push(\"middle.json\");\n let mut fh = File::create(&path).unwrap();\n path.pop();\n fh.write_all(b\"{\n \\\"payload\\\": {\n \\\"b!\\\": [ 3, 4 ],\n \\\"d\\\": [ 987 ]\n },\n \\\"variable\\\": false,\n \\\"d\\\": 4\n }\").unwrap();\n\n path.push(\"bottom.json\");\n let mut fh = File::create(&path).unwrap();\n path.pop();\n fh.write_all(b\"{\n \\\"moo\\\": \\\"cow\\\",\n \\\"payload\\\": {\n \\\"b\\\": [ 5 ],\n \\\"d\\\": [ 999 ]\n }\n }\").unwrap();\n\n serde_json::from_str(&format!(\"{{\n \\\"_include\\\": [\n \\\"{0}\/middle.json\\\",\n \\\"{0}\/bottom.json\\\"\n ],\n \\\"a\\\": 1,\n \\\"d\\\": 4,\n \\\"moo\\\": \\\"cow\\\",\n \\\"payload\\\": {{\n \\\"b\\\": [ 1, 2, 3, 4 ],\n \\\"c\\\": [ 6, 7 ],\n \\\"d\\\": [ 123, 987, 999 ]\n }},\n \\\"variable\\\": {{\n \\\"one\\\": true,\n \\\"two\\\": false\n }}\n }}\", &path.to_str().unwrap())).unwrap()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add download.rs<commit_after>use std::ffi::{OsStr, OsString};\nuse std::io::Write;\nuse std::path::{Path, PathBuf};\n\nuse atomicwrites::{AtomicFile, OverwriteBehavior};\nuse rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator};\nuse s3::bucket::Bucket;\nuse s3::creds::Credentials;\n\/\/use s3::S3Error;\n\nfn s3_download(bucket: &Bucket, remote_path: &str, local_path: &Path) -> Result<(), anyhow::Error> {\n if !local_path.exists() {\n let mut i = 0;\n let contents;\n loop {\n match bucket.get_object_blocking(&remote_path) {\n Ok((c, code)) => {\n contents = c;\n if code != 200 {\n println!(\"{}\", remote_path);\n return Ok(());\n }\n break;\n }\n Err(e) => {\n if i < 20 {\n i += 1;\n continue;\n }\n return Err(e.into());\n }\n }\n }\n\n AtomicFile::new(local_path, OverwriteBehavior::AllowOverwrite)\n .write(|f| f.write_all(&contents))?;\n }\n Ok(())\n}\n\nfn make_vrt(directory: &Path, extension: &OsStr) -> Result<(), anyhow::Error> {\n let files: Vec<OsString> = std::fs::read_dir(directory)?\n .filter_map(Result::ok)\n .filter(|f| f.path().extension() == Some(extension))\n .map(|f| f.file_name())\n .collect();\n\n let mut args = vec![OsString::from(\"merged.vrt\")];\n args.extend(files);\n\n let output = std::process::Command::new(\"gdalbuildvrt\")\n .current_dir(directory)\n .args(args)\n .output()\n .expect(\"Failed to run gdalbuildvrt. Is gdal installed?\");\n\n println!(\"stdout: {}\", String::from_utf8_lossy(&output.stdout));\n println!(\"stderr: {}\", String::from_utf8_lossy(&output.stderr));\n\n Ok(())\n}\n\n\/\/ pub fn download_nasadem(path: &Path) -> Result<(), anyhow::Error> {\n\/\/ let directory = path.join(\"nasadem\");\n\/\/ std::fs::create_dir_all(&directory)?;\n\n\/\/ \/\/ TODO: Actually do the download step\n\n\/\/ let files = std::fs::read_dir(directory)?.filter_map(Result::ok).filter(|f|f.path().extension() == ).collect();\n\n\/\/ Ok(())\n\/\/ }\n\n\/\/ Download global watermask from Copernicus dataset.\n\/\/\n\/\/ | Pixel | Value Meaning |\n\/\/ |-------|---------------|\n\/\/ | 0 | No water |\n\/\/ | 1 | Ocean |\n\/\/ | 2 | Lake |\n\/\/ | 3 | River |\n\/\/ |-------|---------------|\npub fn download_copernicus_wbm(path: &Path) -> Result<(), anyhow::Error> {\n let directory = path.join(\"copernicus-wbm\");\n\n std::fs::create_dir_all(&directory)?;\n\n let bucket =\n Bucket::new(\"copernicus-dem-30m\", \"eu-central-1\".parse()?, Credentials::anonymous()?)?;\n let bucket_fallback =\n Bucket::new(\"copernicus-dem-90m\", \"eu-central-1\".parse()?, Credentials::anonymous()?)?;\n\n let (tile_list, code) = bucket.get_object_blocking(\"tileList.txt\")?;\n assert_eq!(200, code);\n let (missing, code) = bucket.get_object_blocking(\"blacklist.txt\")?;\n assert_eq!(200, code);\n\n let tile_list = String::from_utf8(tile_list)?;\n let tile_list: Vec<_> = tile_list.split_ascii_whitespace().collect();\n tile_list.par_iter().try_for_each(|name| -> Result<(), anyhow::Error> {\n let filename = format!(\"{}WBM.tif\", &name[..name.len() - 3]);\n let local_path = directory.join(&filename);\n let remote_path = format!(\"{}\/AUXFILES\/{}\", name, filename);\n s3_download(&bucket, &remote_path, &local_path)\n })?;\n\n let missing = String::from_utf8(missing)?;\n let missing: Vec<_> = missing.split_ascii_whitespace().collect();\n missing.par_iter().try_for_each(|name| -> Result<(), anyhow::Error> {\n let name = name.replace(\"DSM_10\", \"DSM_COG_30\").replace(\".tif\", \"\");\n let filename = format!(\"{}WBM.tif\", &name[..name.len() - 3]);\n let local_path = directory.join(&filename);\n let remote_path = format!(\"{}\/AUXFILES\/{}\", name, filename);\n s3_download(&bucket_fallback, &remote_path, &local_path)\n })?;\n\n make_vrt(&directory, OsStr::new(\"tif\"))?;\n\n Ok(())\n}\n\n\/\/ Download heights from Copernicus dataset.\n\/\/\n\/\/ See https:\/\/registry.opendata.aws\/copernicus-dem\/\npub fn download_copernicus_hgt(path: &Path) -> Result<(), anyhow::Error> {\n let directory = path.join(\"copernicus-hgt\");\n\n std::fs::create_dir_all(&directory)?;\n\n let bucket =\n Bucket::new(\"copernicus-dem-30m\", \"eu-central-1\".parse()?, Credentials::anonymous()?)?;\n let bucket_fallback =\n Bucket::new(\"copernicus-dem-90m\", \"eu-central-1\".parse()?, Credentials::anonymous()?)?;\n\n let (tile_list, code) = bucket.get_object_blocking(\"tileList.txt\")?;\n assert_eq!(200, code);\n let (missing, code) = bucket.get_object_blocking(\"blacklist.txt\")?;\n assert_eq!(200, code);\n\n let tile_list = String::from_utf8(tile_list)?;\n let tile_list: Vec<_> = tile_list.split_ascii_whitespace().collect();\n tile_list.into_par_iter().try_for_each(|name| -> Result<(), anyhow::Error> {\n let filename = format!(\"{}DEM.tif\", &name[..name.len() - 3]);\n let local_path = directory.join(&filename);\n let remote_path = format!(\"{}\/{}\", name, filename);\n s3_download(&bucket, &remote_path, &local_path)\n })?;\n\n let missing = String::from_utf8(missing)?;\n let missing: Vec<_> = missing.split_ascii_whitespace().collect();\n missing.into_par_iter().try_for_each(|name| -> Result<(), anyhow::Error> {\n let name = name.replace(\"DSM_10\", \"DSM_COG_30\").replace(\".tif\", \"\");\n let filename = format!(\"{}DEM.tif\", &name[..name.len() - 3]);\n let local_path = directory.join(&filename);\n let remote_path = format!(\"{}\/{}\", name, filename);\n s3_download(&bucket_fallback, &remote_path, &local_path)\n })?;\n\n make_vrt(&directory, OsStr::new(\"tif\"))?;\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 Johannes Köster, Christopher Schröder.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/! FASTA format reading and writing.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use std::io;\n\/\/! use bio::io::fasta;\n\/\/! let reader = fasta::Reader::new(io::stdin());\n\/\/! ```\n\n\nuse std::io;\nuse std::io::prelude::*;\nuse std::ascii::AsciiExt;\nuse std::collections;\nuse std::fs;\nuse std::path::Path;\nuse std::convert::AsRef;\nuse std::cmp::min;\n\nuse csv;\n\nuse utils::{TextSlice, Text};\n\n\/\/\/ A FASTA reader.\npub struct Reader<R: io::Read> {\n reader: io::BufReader<R>,\n line: String,\n}\n\n\nimpl Reader<fs::File> {\n \/\/\/ Read FASTA from given file path.\n pub fn from_file<P: AsRef<Path>>(path: P) -> io::Result<Self> {\n fs::File::open(path).map(Reader::new)\n }\n}\n\n\nimpl<R: io::Read> Reader<R> {\n \/\/\/ Create a new Fasta reader given an instance of `io::Read`.\n pub fn new(reader: R) -> Self {\n Reader {\n reader: io::BufReader::new(reader),\n line: String::new(),\n }\n }\n\n \/\/\/ Read next FASTA record into the given `Record`.\n pub fn read(&mut self, record: &mut Record) -> io::Result<()> {\n record.clear();\n if self.line.is_empty() {\n try!(self.reader.read_line(&mut self.line));\n if self.line.is_empty() {\n return Ok(());\n }\n }\n\n if !self.line.starts_with('>') {\n return Err(io::Error::new(io::ErrorKind::Other, \"Expected > at record start.\"));\n }\n record.header.push_str(&self.line);\n loop {\n self.line.clear();\n try!(self.reader.read_line(&mut self.line));\n if self.line.is_empty() || self.line.starts_with('>') {\n break;\n }\n record.seq.push_str(&self.line.trim_right());\n }\n\n Ok(())\n }\n\n \/\/\/ Return an iterator over the records of this FastQ file.\n pub fn records(self) -> Records<R> {\n Records { reader: self }\n }\n}\n\n\n\/\/\/ A FASTA index as created by SAMtools (.fai).\npub struct Index {\n inner: collections::HashMap<String, IndexRecord>,\n seqs: Vec<String>,\n}\n\n\nimpl Index {\n \/\/\/ Open a FASTA index from a given `io::Read` instance.\n pub fn new<R: io::Read>(fai: R) -> csv::Result<Self> {\n let mut inner = collections::HashMap::new();\n let mut seqs = vec![];\n let mut fai_reader = csv::Reader::from_reader(fai).delimiter(b'\\t').has_headers(false);\n for row in fai_reader.decode() {\n let (name, record): (String, IndexRecord) = try!(row);\n seqs.push(name.clone());\n inner.insert(name, record);\n }\n Ok(Index {\n inner: inner,\n seqs: seqs,\n })\n }\n\n \/\/\/ Open a FASTA index from a given file path.\n pub fn from_file<P: AsRef<Path>>(path: &P) -> csv::Result<Self> {\n match fs::File::open(path) {\n Ok(fai) => Self::new(fai),\n Err(e) => Err(csv::Error::Io(e)),\n }\n }\n\n \/\/\/ Open a FASTA index given the corresponding FASTA file path (e.g. for ref.fasta we expect ref.fasta.fai).\n pub fn with_fasta_file<P: AsRef<Path>>(fasta_path: &P) -> csv::Result<Self> {\n let mut ext = fasta_path.as_ref().extension().unwrap().to_str().unwrap().to_owned();\n ext.push_str(\".fai\");\n let fai_path = fasta_path.as_ref().with_extension(ext);\n\n Self::from_file(&fai_path)\n }\n\n \/\/\/ Return a vector of sequences described in the index.\n pub fn sequences(&self) -> Vec<Sequence> {\n self.seqs\n .iter()\n .map(|name| {\n Sequence {\n name: name.clone(),\n len: self.inner.get(name).unwrap().len,\n }\n })\n .collect()\n }\n}\n\n\n\/\/\/ A FASTA reader with an index as created by SAMtools (.fai).\npub struct IndexedReader<R: io::Read + io::Seek> {\n reader: io::BufReader<R>,\n pub index: Index,\n}\n\n\nimpl IndexedReader<fs::File> {\n \/\/\/ Read from a given file path. This assumes the index ref.fasta.fai to be present for FASTA ref.fasta.\n pub fn from_file<P: AsRef<Path>>(path: &P) -> csv::Result<Self> {\n let index = try!(Index::with_fasta_file(path));\n\n match fs::File::open(path) {\n Ok(fasta) => Ok(IndexedReader::with_index(fasta, index)),\n Err(e) => Err(csv::Error::Io(e)),\n }\n }\n}\n\n\nimpl<R: io::Read + io::Seek> IndexedReader<R> {\n \/\/\/ Read from a FASTA and its index, both given as `io::Read`. FASTA has to be `io::Seek` in addition.\n pub fn new<I: io::Read>(fasta: R, fai: I) -> csv::Result<Self> {\n let index = try!(Index::new(fai));\n Ok(IndexedReader {\n reader: io::BufReader::new(fasta),\n index: index,\n })\n }\n\n \/\/\/ Read from a FASTA and its index, the first given as `io::Read`, the second given as index object.\n pub fn with_index(fasta: R, index: Index) -> Self {\n IndexedReader {\n reader: io::BufReader::new(fasta),\n index: index,\n }\n }\n\n \/\/\/ For a given seqname, read the whole sequence into the given vector.\n pub fn read_all(&mut self, seqname: &str, seq: &mut Text) -> io::Result<()> {\n match self.index.inner.get(seqname) {\n Some(&idx) => self.read(seqname, 0, idx.len, seq),\n None => Err(io::Error::new(io::ErrorKind::Other, \"Unknown sequence name.\")),\n }\n }\n\n \/\/\/ Read the given interval of the given seqname into the given vector (stop position is exclusive).\n pub fn read(&mut self,\n seqname: &str,\n start: u64,\n stop: u64,\n seq: &mut Text)\n -> io::Result<()> {\n match self.index.inner.get(seqname) {\n Some(idx) => {\n seq.clear();\n\n let length = stop - start as u64;\n let mut buf = vec![0u8; idx.line_bases as usize];\n\n loop {\n let current_start = start + seq.len() as u64;\n let line_start = current_start \/ idx.line_bases * idx.line_bytes;\n let line_offset = current_start % idx.line_bases;\n let offset = idx.offset + line_start + line_offset;\n try!(self.reader.seek(io::SeekFrom::Start(offset)));\n\n let left_to_read = length - seq.len() as u64;\n let left_in_line = min(left_to_read, idx.line_bases - line_offset) as usize;\n\n try!(self.reader.read(&mut buf[..left_in_line]));\n seq.extend_from_slice(&buf[..left_in_line]);\n\n if seq.len() as u64 == length\n {\n break;\n }\n }\n\n Ok(())\n }\n None => Err(io::Error::new(io::ErrorKind::Other, \"Unknown sequence name.\")),\n }\n }\n}\n\n\n\/\/\/ Record of a FASTA index.\n#[derive(RustcDecodable, Debug, Copy, Clone)]\nstruct IndexRecord {\n len: u64,\n offset: u64,\n line_bases: u64,\n line_bytes: u64,\n}\n\n\n\/\/\/ A sequence record returned by the FASTA index.\npub struct Sequence {\n pub name: String,\n pub len: u64,\n}\n\n\n\/\/\/ A Fasta writer.\npub struct Writer<W: io::Write> {\n writer: io::BufWriter<W>,\n}\n\n\nimpl Writer<fs::File> {\n \/\/\/ Write to the given file path.\n pub fn to_file<P: AsRef<Path>>(path: P) -> io::Result<Self> {\n fs::File::create(path).map(Writer::new)\n }\n}\n\n\nimpl<W: io::Write> Writer<W> {\n \/\/\/ Create a new Fasta writer.\n pub fn new(writer: W) -> Self {\n Writer { writer: io::BufWriter::new(writer) }\n }\n\n \/\/\/ Directly write a Fasta record.\n pub fn write_record(&mut self, record: Record) -> io::Result<()> {\n self.write(record.id().unwrap_or(\"\"), record.desc(), record.seq())\n }\n\n \/\/\/ Write a Fasta record with given id, optional description and sequence.\n pub fn write(&mut self, id: &str, desc: Option<&str>, seq: TextSlice) -> io::Result<()> {\n try!(self.writer.write(b\">\"));\n try!(self.writer.write(id.as_bytes()));\n if desc.is_some() {\n try!(self.writer.write(b\" \"));\n try!(self.writer.write(desc.unwrap().as_bytes()));\n }\n try!(self.writer.write(b\"\\n\"));\n try!(self.writer.write(seq));\n try!(self.writer.write(b\"\\n\"));\n\n Ok(())\n }\n\n \/\/\/ Flush the writer, ensuring that everything is written.\n pub fn flush(&mut self) -> io::Result<()> {\n self.writer.flush()\n }\n}\n\n\n\/\/\/ A FASTA record.\npub struct Record {\n header: String,\n seq: String,\n}\n\n\nimpl Record {\n \/\/\/ Create a new instance.\n pub fn new() -> Self {\n Record {\n header: String::new(),\n seq: String::new(),\n }\n }\n\n \/\/\/ Check if record is empty.\n pub fn is_empty(&self) -> bool {\n self.header.is_empty() && self.seq.is_empty()\n }\n\n \/\/\/ Check validity of Fasta record.\n pub fn check(&self) -> Result<(), &str> {\n if self.id().is_none() {\n return Err(\"Expecting id for FastQ record.\");\n }\n if !self.seq.is_ascii() {\n return Err(\"Non-ascii character found in sequence.\");\n }\n\n Ok(())\n }\n\n \/\/\/ Return the id of the record.\n pub fn id(&self) -> Option<&str> {\n self.header[1..].trim_right().splitn(2, ' ').next()\n }\n\n \/\/\/ Return descriptions if present.\n pub fn desc(&self) -> Option<&str> {\n self.header[1..].trim_right().splitn(2, ' ').skip(1).next()\n }\n\n \/\/\/ Return the sequence of the record.\n pub fn seq(&self) -> TextSlice {\n self.seq.as_bytes()\n }\n\n \/\/\/ Clear the record.\n fn clear(&mut self) {\n self.header.clear();\n self.seq.clear();\n }\n}\n\n\n\/\/\/ An iterator over the records of a Fasta file.\npub struct Records<R: io::Read> {\n reader: Reader<R>,\n}\n\n\nimpl<R: io::Read> Iterator for Records<R> {\n type Item = io::Result<Record>;\n\n fn next(&mut self) -> Option<io::Result<Record>> {\n let mut record = Record::new();\n match self.reader.read(&mut record) {\n Ok(()) if record.is_empty() => None,\n Ok(()) => Some(Ok(record)),\n Err(err) => Some(Err(err)),\n }\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io;\n\n const FASTA_FILE: &'static [u8] = b\">id desc\nACCGTAGGCTGA\nCCGTAGGCTGAA\nCGTAGGCTGAAA\nGTAGGCTGAAAA\nCCCC\n>id2\nATTGTTGTTTTA\nATTGTTGTTTTA\nATTGTTGTTTTA\nGGGG\n\";\n const FAI_FILE: &'static [u8] = b\"id\\t52\\t9\\t12\\t13\nid2\\t40\\t71\\t12\\t13\n\";\n const WRITE_FASTA_FILE: &'static [u8] = b\">id desc\nACCGTAGGCTGA\n>id2\nATTGTTGTTTTA\n\";\n\n #[test]\n fn test_reader() {\n let reader = Reader::new(FASTA_FILE);\n let ids = [Some(\"id\"), Some(\"id2\")];\n let descs = [Some(\"desc\"), None];\n let seqs: [&[u8]; 2] = [b\"ACCGTAGGCTGACCGTAGGCTGAACGTAGGCTGAAAGTAGGCTGAAAACCCC\",\n b\"ATTGTTGTTTTAATTGTTGTTTTAATTGTTGTTTTAGGGG\"];\n\n for (i, r) in reader.records().enumerate() {\n let record = r.ok().expect(\"Error reading record\");\n assert_eq!(record.check(), Ok(()));\n assert_eq!(record.id(), ids[i]);\n assert_eq!(record.desc(), descs[i]);\n assert_eq!(record.seq(), seqs[i]);\n }\n\n\n \/\/ let record = records.ok().nth(1).unwrap();\n }\n\n #[test]\n fn test_indexed_reader() {\n let mut reader = IndexedReader::new(io::Cursor::new(FASTA_FILE), FAI_FILE)\n .ok()\n .expect(\"Error reading index\");\n let mut seq = Vec::new();\n\n\n \/\/ Test reading various substrings of the sequence\n reader.read(\"id\", 1, 5, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"CCGT\");\n\n reader.read(\"id\", 1, 31, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"CCGTAGGCTGACCGTAGGCTGAACGTAGGC\");\n\n reader.read(\"id\", 13, 23, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"CGTAGGCTGA\");\n\n reader.read(\"id\", 36, 52, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"GTAGGCTGAAAACCCC\");\n\n reader.read(\"id2\", 12, 40, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"ATTGTTGTTTTAATTGTTGTTTTAGGGG\");\n }\n\n\n #[test]\n fn test_writer() {\n let mut writer = Writer::new(Vec::new());\n writer.write(\"id\", Some(\"desc\"), b\"ACCGTAGGCTGA\").ok().expect(\"Expected successful write\");\n writer.write(\"id2\", None, b\"ATTGTTGTTTTA\").ok().expect(\"Expected successful write\");\n writer.flush().ok().expect(\"Expected successful write\");\n assert_eq!(writer.writer.get_ref(), &WRITE_FASTA_FILE);\n }\n}\n<commit_msg>read can read less than the requested number of bytes. use read_exact instead. truncate the stop position to the size of the fasta entry<commit_after>\/\/ Copyright 2014 Johannes Köster, Christopher Schröder.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/! FASTA format reading and writing.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use std::io;\n\/\/! use bio::io::fasta;\n\/\/! let reader = fasta::Reader::new(io::stdin());\n\/\/! ```\n\n\nuse std::io;\nuse std::io::prelude::*;\nuse std::ascii::AsciiExt;\nuse std::collections;\nuse std::fs;\nuse std::path::Path;\nuse std::convert::AsRef;\nuse std::cmp::min;\n\nuse csv;\n\nuse utils::{TextSlice, Text};\n\n\/\/\/ A FASTA reader.\npub struct Reader<R: io::Read> {\n reader: io::BufReader<R>,\n line: String,\n}\n\n\nimpl Reader<fs::File> {\n \/\/\/ Read FASTA from given file path.\n pub fn from_file<P: AsRef<Path>>(path: P) -> io::Result<Self> {\n fs::File::open(path).map(Reader::new)\n }\n}\n\n\nimpl<R: io::Read> Reader<R> {\n \/\/\/ Create a new Fasta reader given an instance of `io::Read`.\n pub fn new(reader: R) -> Self {\n Reader {\n reader: io::BufReader::new(reader),\n line: String::new(),\n }\n }\n\n \/\/\/ Read next FASTA record into the given `Record`.\n pub fn read(&mut self, record: &mut Record) -> io::Result<()> {\n record.clear();\n if self.line.is_empty() {\n try!(self.reader.read_line(&mut self.line));\n if self.line.is_empty() {\n return Ok(());\n }\n }\n\n if !self.line.starts_with('>') {\n return Err(io::Error::new(io::ErrorKind::Other, \"Expected > at record start.\"));\n }\n record.header.push_str(&self.line);\n loop {\n self.line.clear();\n try!(self.reader.read_line(&mut self.line));\n if self.line.is_empty() || self.line.starts_with('>') {\n break;\n }\n record.seq.push_str(&self.line.trim_right());\n }\n\n Ok(())\n }\n\n \/\/\/ Return an iterator over the records of this FastQ file.\n pub fn records(self) -> Records<R> {\n Records { reader: self }\n }\n}\n\n\n\/\/\/ A FASTA index as created by SAMtools (.fai).\npub struct Index {\n inner: collections::HashMap<String, IndexRecord>,\n seqs: Vec<String>,\n}\n\n\nimpl Index {\n \/\/\/ Open a FASTA index from a given `io::Read` instance.\n pub fn new<R: io::Read>(fai: R) -> csv::Result<Self> {\n let mut inner = collections::HashMap::new();\n let mut seqs = vec![];\n let mut fai_reader = csv::Reader::from_reader(fai).delimiter(b'\\t').has_headers(false);\n for row in fai_reader.decode() {\n let (name, record): (String, IndexRecord) = try!(row);\n seqs.push(name.clone());\n inner.insert(name, record);\n }\n Ok(Index {\n inner: inner,\n seqs: seqs,\n })\n }\n\n \/\/\/ Open a FASTA index from a given file path.\n pub fn from_file<P: AsRef<Path>>(path: &P) -> csv::Result<Self> {\n match fs::File::open(path) {\n Ok(fai) => Self::new(fai),\n Err(e) => Err(csv::Error::Io(e)),\n }\n }\n\n \/\/\/ Open a FASTA index given the corresponding FASTA file path (e.g. for ref.fasta we expect ref.fasta.fai).\n pub fn with_fasta_file<P: AsRef<Path>>(fasta_path: &P) -> csv::Result<Self> {\n let mut ext = fasta_path.as_ref().extension().unwrap().to_str().unwrap().to_owned();\n ext.push_str(\".fai\");\n let fai_path = fasta_path.as_ref().with_extension(ext);\n\n Self::from_file(&fai_path)\n }\n\n \/\/\/ Return a vector of sequences described in the index.\n pub fn sequences(&self) -> Vec<Sequence> {\n self.seqs\n .iter()\n .map(|name| {\n Sequence {\n name: name.clone(),\n len: self.inner.get(name).unwrap().len,\n }\n })\n .collect()\n }\n}\n\n\n\/\/\/ A FASTA reader with an index as created by SAMtools (.fai).\npub struct IndexedReader<R: io::Read + io::Seek> {\n reader: io::BufReader<R>,\n pub index: Index,\n}\n\n\nimpl IndexedReader<fs::File> {\n \/\/\/ Read from a given file path. This assumes the index ref.fasta.fai to be present for FASTA ref.fasta.\n pub fn from_file<P: AsRef<Path>>(path: &P) -> csv::Result<Self> {\n let index = try!(Index::with_fasta_file(path));\n\n match fs::File::open(path) {\n Ok(fasta) => Ok(IndexedReader::with_index(fasta, index)),\n Err(e) => Err(csv::Error::Io(e)),\n }\n }\n}\n\n\nimpl<R: io::Read + io::Seek> IndexedReader<R> {\n \/\/\/ Read from a FASTA and its index, both given as `io::Read`. FASTA has to be `io::Seek` in addition.\n pub fn new<I: io::Read>(fasta: R, fai: I) -> csv::Result<Self> {\n let index = try!(Index::new(fai));\n Ok(IndexedReader {\n reader: io::BufReader::new(fasta),\n index: index,\n })\n }\n\n \/\/\/ Read from a FASTA and its index, the first given as `io::Read`, the second given as index object.\n pub fn with_index(fasta: R, index: Index) -> Self {\n IndexedReader {\n reader: io::BufReader::new(fasta),\n index: index,\n }\n }\n\n \/\/\/ For a given seqname, read the whole sequence into the given vector.\n pub fn read_all(&mut self, seqname: &str, seq: &mut Text) -> io::Result<()> {\n match self.index.inner.get(seqname) {\n Some(&idx) => self.read(seqname, 0, idx.len, seq),\n None => Err(io::Error::new(io::ErrorKind::Other, \"Unknown sequence name.\")),\n }\n }\n\n \/\/\/ Read the given interval of the given seqname into the given vector (stop position is exclusive).\n pub fn read(&mut self,\n seqname: &str,\n start: u64,\n stop: u64,\n seq: &mut Text)\n -> io::Result<()> {\n match self.index.inner.get(seqname) {\n Some(idx) => {\n seq.clear();\n\n let stop = min(stop, idx.len);\n let length = stop - start as u64;\n let mut buf = vec![0u8; idx.line_bases as usize];\n\n loop {\n let current_start = start + seq.len() as u64;\n let line_start = current_start \/ idx.line_bases * idx.line_bytes;\n let line_offset = current_start % idx.line_bases;\n let offset = idx.offset + line_start + line_offset;\n\n let left_to_read = length - seq.len() as u64;\n let left_in_line = min(left_to_read, idx.line_bases - line_offset) as usize;\n\n try!(self.reader.seek(io::SeekFrom::Start(offset)));\n try!(self.reader.read_exact(&mut buf[..left_in_line]));\n\n seq.extend_from_slice(&buf[..left_in_line]);\n\n if seq.len() as u64 == length\n {\n break;\n }\n }\n\n Ok(())\n }\n None => Err(io::Error::new(io::ErrorKind::Other, \"Unknown sequence name.\")),\n }\n }\n}\n\n\n\/\/\/ Record of a FASTA index.\n#[derive(RustcDecodable, Debug, Copy, Clone)]\nstruct IndexRecord {\n len: u64,\n offset: u64,\n line_bases: u64,\n line_bytes: u64,\n}\n\n\n\/\/\/ A sequence record returned by the FASTA index.\npub struct Sequence {\n pub name: String,\n pub len: u64,\n}\n\n\n\/\/\/ A Fasta writer.\npub struct Writer<W: io::Write> {\n writer: io::BufWriter<W>,\n}\n\n\nimpl Writer<fs::File> {\n \/\/\/ Write to the given file path.\n pub fn to_file<P: AsRef<Path>>(path: P) -> io::Result<Self> {\n fs::File::create(path).map(Writer::new)\n }\n}\n\n\nimpl<W: io::Write> Writer<W> {\n \/\/\/ Create a new Fasta writer.\n pub fn new(writer: W) -> Self {\n Writer { writer: io::BufWriter::new(writer) }\n }\n\n \/\/\/ Directly write a Fasta record.\n pub fn write_record(&mut self, record: Record) -> io::Result<()> {\n self.write(record.id().unwrap_or(\"\"), record.desc(), record.seq())\n }\n\n \/\/\/ Write a Fasta record with given id, optional description and sequence.\n pub fn write(&mut self, id: &str, desc: Option<&str>, seq: TextSlice) -> io::Result<()> {\n try!(self.writer.write(b\">\"));\n try!(self.writer.write(id.as_bytes()));\n if desc.is_some() {\n try!(self.writer.write(b\" \"));\n try!(self.writer.write(desc.unwrap().as_bytes()));\n }\n try!(self.writer.write(b\"\\n\"));\n try!(self.writer.write(seq));\n try!(self.writer.write(b\"\\n\"));\n\n Ok(())\n }\n\n \/\/\/ Flush the writer, ensuring that everything is written.\n pub fn flush(&mut self) -> io::Result<()> {\n self.writer.flush()\n }\n}\n\n\n\/\/\/ A FASTA record.\npub struct Record {\n header: String,\n seq: String,\n}\n\n\nimpl Record {\n \/\/\/ Create a new instance.\n pub fn new() -> Self {\n Record {\n header: String::new(),\n seq: String::new(),\n }\n }\n\n \/\/\/ Check if record is empty.\n pub fn is_empty(&self) -> bool {\n self.header.is_empty() && self.seq.is_empty()\n }\n\n \/\/\/ Check validity of Fasta record.\n pub fn check(&self) -> Result<(), &str> {\n if self.id().is_none() {\n return Err(\"Expecting id for FastQ record.\");\n }\n if !self.seq.is_ascii() {\n return Err(\"Non-ascii character found in sequence.\");\n }\n\n Ok(())\n }\n\n \/\/\/ Return the id of the record.\n pub fn id(&self) -> Option<&str> {\n self.header[1..].trim_right().splitn(2, ' ').next()\n }\n\n \/\/\/ Return descriptions if present.\n pub fn desc(&self) -> Option<&str> {\n self.header[1..].trim_right().splitn(2, ' ').skip(1).next()\n }\n\n \/\/\/ Return the sequence of the record.\n pub fn seq(&self) -> TextSlice {\n self.seq.as_bytes()\n }\n\n \/\/\/ Clear the record.\n fn clear(&mut self) {\n self.header.clear();\n self.seq.clear();\n }\n}\n\n\n\/\/\/ An iterator over the records of a Fasta file.\npub struct Records<R: io::Read> {\n reader: Reader<R>,\n}\n\n\nimpl<R: io::Read> Iterator for Records<R> {\n type Item = io::Result<Record>;\n\n fn next(&mut self) -> Option<io::Result<Record>> {\n let mut record = Record::new();\n match self.reader.read(&mut record) {\n Ok(()) if record.is_empty() => None,\n Ok(()) => Some(Ok(record)),\n Err(err) => Some(Err(err)),\n }\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io;\n\n const FASTA_FILE: &'static [u8] = b\">id desc\nACCGTAGGCTGA\nCCGTAGGCTGAA\nCGTAGGCTGAAA\nGTAGGCTGAAAA\nCCCC\n>id2\nATTGTTGTTTTA\nATTGTTGTTTTA\nATTGTTGTTTTA\nGGGG\n\";\n const FAI_FILE: &'static [u8] = b\"id\\t52\\t9\\t12\\t13\nid2\\t40\\t71\\t12\\t13\n\";\n const WRITE_FASTA_FILE: &'static [u8] = b\">id desc\nACCGTAGGCTGA\n>id2\nATTGTTGTTTTA\n\";\n\n #[test]\n fn test_reader() {\n let reader = Reader::new(FASTA_FILE);\n let ids = [Some(\"id\"), Some(\"id2\")];\n let descs = [Some(\"desc\"), None];\n let seqs: [&[u8]; 2] = [b\"ACCGTAGGCTGACCGTAGGCTGAACGTAGGCTGAAAGTAGGCTGAAAACCCC\",\n b\"ATTGTTGTTTTAATTGTTGTTTTAATTGTTGTTTTAGGGG\"];\n\n for (i, r) in reader.records().enumerate() {\n let record = r.ok().expect(\"Error reading record\");\n assert_eq!(record.check(), Ok(()));\n assert_eq!(record.id(), ids[i]);\n assert_eq!(record.desc(), descs[i]);\n assert_eq!(record.seq(), seqs[i]);\n }\n\n\n \/\/ let record = records.ok().nth(1).unwrap();\n }\n\n #[test]\n fn test_indexed_reader() {\n let mut reader = IndexedReader::new(io::Cursor::new(FASTA_FILE), FAI_FILE)\n .ok()\n .expect(\"Error reading index\");\n let mut seq = Vec::new();\n\n\n \/\/ Test reading various substrings of the sequence\n reader.read(\"id\", 1, 5, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"CCGT\");\n\n reader.read(\"id\", 1, 31, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"CCGTAGGCTGACCGTAGGCTGAACGTAGGC\");\n\n reader.read(\"id\", 13, 23, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"CGTAGGCTGA\");\n\n reader.read(\"id\", 36, 52, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"GTAGGCTGAAAACCCC\");\n\n reader.read(\"id2\", 12, 40, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"ATTGTTGTTTTAATTGTTGTTTTAGGGG\");\n }\n\n\n #[test]\n fn test_writer() {\n let mut writer = Writer::new(Vec::new());\n writer.write(\"id\", Some(\"desc\"), b\"ACCGTAGGCTGA\").ok().expect(\"Expected successful write\");\n writer.write(\"id2\", None, b\"ATTGTTGTTTTA\").ok().expect(\"Expected successful write\");\n writer.flush().ok().expect(\"Expected successful write\");\n assert_eq!(writer.writer.get_ref(), &WRITE_FASTA_FILE);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Languages we can hyphenate and their default parameters, as provided by\n\/\/! the TeX `hyph-utf8` package.\n\n#![allow(non_camel_case_types)]\n\nuse exception::{Exceptions};\nuse pattern::{Patterns};\n\n\/\/\/ A `Corpus` carries hyphenation data and parameters.\n\/\/\/\n\/\/\/ It comprises the working language, the set of applicable patterns and\n\/\/\/ exceptions, as well as the left and right intra-word hyphenation boundaries.\n#[derive(Clone, Debug)]\npub struct Corpus {\n pub language: Language,\n pub patterns: Patterns,\n pub exceptions: Option<Exceptions>,\n pub left_min: usize,\n pub right_min: usize\n}\n\n\nuse self::Language::*;\n\n\/\/\/ The set of available languages.\n#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum Language {\n Afrikaans,\n Armenian,\n Assamese,\n Basque,\n Bengali,\n Bulgarian,\n Catalan,\n Chinese,\n Coptic,\n Croatian,\n Czech,\n Danish,\n Dutch,\n English_GB,\n English_US,\n Esperanto,\n Estonian,\n Ethiopic,\n Finnish,\n French,\n Friulan,\n Galician,\n Georgian,\n German_1901,\n German_1996,\n German_Swiss,\n Greek_Ancient,\n Greek_Mono,\n Greek_Poly,\n Gujarati,\n Hindi,\n Hungarian,\n Icelandic,\n Indonesian,\n Interlingua,\n Irish,\n Italian,\n Kannada,\n Kurmanji,\n Latin_Classic,\n Latin,\n Latvian,\n Lithuanian,\n Malayalam,\n Marathi,\n Mongolian,\n Norwegian_Bokmal,\n Norwegian_Nynorsk,\n Occitan,\n Oriya,\n Panjabi,\n Piedmontese,\n Polish,\n Portuguese,\n Romanian,\n Romansh,\n Russian,\n Sanskrit,\n Serbian_Cyrillic,\n Serbocroatian_Cyrillic,\n Serbocroatian_Latin,\n Slavonic_Church,\n Slovak,\n Slovenian,\n Spanish,\n Swedish,\n Tamil,\n Telugu,\n Thai,\n Turkish,\n Turkmen,\n Ukrainian,\n Uppersorbian,\n Welsh\n}\n\n\n\/\/\/ The TeX tag for a given language.\npub fn tag(lang: Language) -> &'static str {\n match lang {\n Afrikaans => \"af\",\n Armenian => \"hy\",\n Assamese => \"as\",\n Basque => \"eu\",\n Bengali => \"bn\",\n Bulgarian => \"bg\",\n Catalan => \"ca\",\n Chinese => \"zh-latn-pinyin\",\n Coptic => \"cop\",\n Croatian => \"hr\",\n Czech => \"cs\",\n Danish => \"da\",\n Dutch => \"nl\",\n English_GB => \"en-gb\",\n English_US => \"en-us\",\n Esperanto => \"eo\",\n Estonian => \"et\",\n Ethiopic => \"mul-ethi\",\n Finnish => \"fi\",\n French => \"fr\",\n Friulan => \"fur\",\n Galician => \"gl\",\n Georgian => \"ka\",\n German_1901 => \"de-1901\",\n German_1996 => \"de-1996\",\n German_Swiss => \"de-ch-1901\",\n Greek_Ancient => \"grc\",\n Greek_Mono => \"el-monoton\",\n Greek_Poly => \"el-polyton\",\n Gujarati => \"gu\",\n Hindi => \"hi\",\n Hungarian => \"hu\",\n Icelandic => \"is\",\n Indonesian => \"id\",\n Interlingua => \"ia\",\n Irish => \"ga\",\n Italian => \"it\",\n Kannada => \"kn\",\n Kurmanji => \"kmr\",\n Latin => \"la\",\n Latin_Classic => \"la-x-classic\",\n Latvian => \"lv\",\n Lithuanian => \"lt\",\n Malayalam => \"ml\",\n Marathi => \"mr\",\n Mongolian => \"mn-cyrl\",\n Norwegian_Bokmal => \"nb\",\n Norwegian_Nynorsk => \"nn\",\n Occitan => \"oc\",\n Oriya => \"or\",\n Panjabi => \"pa\",\n Piedmontese => \"pms\",\n Polish => \"pl\",\n Portuguese => \"pt\",\n Romanian => \"ro\",\n Romansh => \"rm\",\n Russian => \"ru\",\n Sanskrit => \"sa\",\n Serbian_Cyrillic => \"sr-cyrl\",\n Serbocroatian_Cyrillic => \"sh-cyrl\",\n Serbocroatian_Latin => \"sh-latn\",\n Slavonic_Church => \"cu\",\n Slovak => \"sk\",\n Slovenian => \"sl\",\n Spanish => \"es\",\n Swedish => \"sv\",\n Tamil => \"ta\",\n Telugu => \"te\",\n Thai => \"th\",\n Turkish => \"tr\",\n Turkmen => \"tk\",\n Ukrainian => \"uk\",\n Uppersorbian => \"hsb\",\n Welsh => \"cy\"\n }\n}\n\n\/\/\/ The default number of characters from the start and end of a word\n\/\/\/ which shall not be hyphenated.\npub fn mins(lang: Language) -> (usize, usize) {\n match lang {\n Afrikaans => (1, 2),\n Armenian => (1, 2),\n Assamese => (1, 1),\n Basque => (2, 2),\n Bengali => (1, 1),\n Bulgarian => (2, 2),\n Catalan => (2, 2),\n Chinese => (1, 1),\n Coptic => (1, 1),\n Croatian => (2, 2),\n Czech => (2, 3),\n Danish => (2, 2),\n Dutch => (2, 2),\n English_GB => (2, 3),\n English_US => (2, 3),\n Esperanto => (2, 2),\n Estonian => (2, 3),\n Ethiopic => (1, 1),\n Finnish => (2, 2),\n French => (2, 3),\n Friulan => (2, 2),\n Galician => (2, 2),\n Georgian => (1, 2),\n German_1901 => (2, 2),\n German_1996 => (2, 2),\n German_Swiss => (2, 2),\n Greek_Ancient => (1, 1),\n Greek_Mono => (1, 1),\n Greek_Poly => (1, 1),\n Gujarati => (1, 1),\n Hindi => (1, 1),\n Hungarian => (2, 2),\n Icelandic => (2, 2),\n Indonesian => (2, 2),\n Interlingua => (2, 2),\n Irish => (2, 3),\n Italian => (2, 2),\n Kannada => (1, 1),\n Kurmanji => (2, 2),\n Latin => (2, 2),\n Latin_Classic => (2, 2),\n Latvian => (2, 2),\n Lithuanian => (2, 2),\n Malayalam => (1, 1),\n Marathi => (1, 1),\n Mongolian => (2, 2),\n Norwegian_Bokmal => (2, 2),\n Norwegian_Nynorsk => (2, 2),\n Occitan => (2, 2),\n Oriya => (1, 1),\n Panjabi => (1, 1),\n Piedmontese => (2, 2),\n Polish => (2, 2),\n Portuguese => (2, 3),\n Romanian => (2, 2),\n Romansh => (2, 2),\n Russian => (2, 2),\n Sanskrit => (1, 3),\n Serbian_Cyrillic => (2, 2),\n Serbocroatian_Cyrillic => (2, 2),\n Serbocroatian_Latin => (2, 2),\n Slavonic_Church => (1, 2),\n Slovak => (2, 3),\n Slovenian => (2, 2),\n Spanish => (2, 2),\n Swedish => (2, 2),\n Tamil => (1, 1),\n Telugu => (1, 1),\n Thai => (2, 3),\n Turkish => (2, 2),\n Turkmen => (2, 2),\n Ukrainian => (2, 2),\n Uppersorbian => (2, 2),\n Welsh => (2, 3)\n }\n}\n<commit_msg>Note: default hyphenation boundaries<commit_after>\/\/! Languages we can hyphenate and their default parameters, as provided by\n\/\/! the TeX `hyph-utf8` package.\n\n#![allow(non_camel_case_types)]\n\nuse exception::{Exceptions};\nuse pattern::{Patterns};\n\n\/\/\/ A `Corpus` carries hyphenation data and parameters.\n\/\/\/\n\/\/\/ It comprises the working language, the set of applicable patterns and\n\/\/\/ exceptions, as well as the left and right intra-word hyphenation boundaries.\n#[derive(Clone, Debug)]\npub struct Corpus {\n pub language: Language,\n pub patterns: Patterns,\n pub exceptions: Option<Exceptions>,\n pub left_min: usize,\n pub right_min: usize\n}\n\n\nuse self::Language::*;\n\n\/\/\/ The set of available languages.\n#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum Language {\n Afrikaans,\n Armenian,\n Assamese,\n Basque,\n Bengali,\n Bulgarian,\n Catalan,\n Chinese,\n Coptic,\n Croatian,\n Czech,\n Danish,\n Dutch,\n English_GB,\n English_US,\n Esperanto,\n Estonian,\n Ethiopic,\n Finnish,\n French,\n Friulan,\n Galician,\n Georgian,\n German_1901,\n German_1996,\n German_Swiss,\n Greek_Ancient,\n Greek_Mono,\n Greek_Poly,\n Gujarati,\n Hindi,\n Hungarian,\n Icelandic,\n Indonesian,\n Interlingua,\n Irish,\n Italian,\n Kannada,\n Kurmanji,\n Latin_Classic,\n Latin,\n Latvian,\n Lithuanian,\n Malayalam,\n Marathi,\n Mongolian,\n Norwegian_Bokmal,\n Norwegian_Nynorsk,\n Occitan,\n Oriya,\n Panjabi,\n Piedmontese,\n Polish,\n Portuguese,\n Romanian,\n Romansh,\n Russian,\n Sanskrit,\n Serbian_Cyrillic,\n Serbocroatian_Cyrillic,\n Serbocroatian_Latin,\n Slavonic_Church,\n Slovak,\n Slovenian,\n Spanish,\n Swedish,\n Tamil,\n Telugu,\n Thai,\n Turkish,\n Turkmen,\n Ukrainian,\n Uppersorbian,\n Welsh\n}\n\n\n\/\/\/ The TeX tag for a given language.\npub fn tag(lang: Language) -> &'static str {\n match lang {\n Afrikaans => \"af\",\n Armenian => \"hy\",\n Assamese => \"as\",\n Basque => \"eu\",\n Bengali => \"bn\",\n Bulgarian => \"bg\",\n Catalan => \"ca\",\n Chinese => \"zh-latn-pinyin\",\n Coptic => \"cop\",\n Croatian => \"hr\",\n Czech => \"cs\",\n Danish => \"da\",\n Dutch => \"nl\",\n English_GB => \"en-gb\",\n English_US => \"en-us\",\n Esperanto => \"eo\",\n Estonian => \"et\",\n Ethiopic => \"mul-ethi\",\n Finnish => \"fi\",\n French => \"fr\",\n Friulan => \"fur\",\n Galician => \"gl\",\n Georgian => \"ka\",\n German_1901 => \"de-1901\",\n German_1996 => \"de-1996\",\n German_Swiss => \"de-ch-1901\",\n Greek_Ancient => \"grc\",\n Greek_Mono => \"el-monoton\",\n Greek_Poly => \"el-polyton\",\n Gujarati => \"gu\",\n Hindi => \"hi\",\n Hungarian => \"hu\",\n Icelandic => \"is\",\n Indonesian => \"id\",\n Interlingua => \"ia\",\n Irish => \"ga\",\n Italian => \"it\",\n Kannada => \"kn\",\n Kurmanji => \"kmr\",\n Latin => \"la\",\n Latin_Classic => \"la-x-classic\",\n Latvian => \"lv\",\n Lithuanian => \"lt\",\n Malayalam => \"ml\",\n Marathi => \"mr\",\n Mongolian => \"mn-cyrl\",\n Norwegian_Bokmal => \"nb\",\n Norwegian_Nynorsk => \"nn\",\n Occitan => \"oc\",\n Oriya => \"or\",\n Panjabi => \"pa\",\n Piedmontese => \"pms\",\n Polish => \"pl\",\n Portuguese => \"pt\",\n Romanian => \"ro\",\n Romansh => \"rm\",\n Russian => \"ru\",\n Sanskrit => \"sa\",\n Serbian_Cyrillic => \"sr-cyrl\",\n Serbocroatian_Cyrillic => \"sh-cyrl\",\n Serbocroatian_Latin => \"sh-latn\",\n Slavonic_Church => \"cu\",\n Slovak => \"sk\",\n Slovenian => \"sl\",\n Spanish => \"es\",\n Swedish => \"sv\",\n Tamil => \"ta\",\n Telugu => \"te\",\n Thai => \"th\",\n Turkish => \"tr\",\n Turkmen => \"tk\",\n Ukrainian => \"uk\",\n Uppersorbian => \"hsb\",\n Welsh => \"cy\"\n }\n}\n\n\/\/\/ The default number of characters from the start and end of a word\n\/\/\/ which shall not be hyphenated.\npub fn mins(lang: Language) -> (usize, usize) {\n \/\/ NOTE: These values were taken directly from the relevant TeX packages, but\n \/\/ it is unclear how well they map to the notion of Unicode `char` in Rust.\n \/\/\n \/\/ In the worst case, a language featuring graphemes larger than 1 `char` may\n \/\/ set boundaries mid-grapheme. This should be of no practical consequence,\n \/\/ since well-formed hyphenation patterns only match full graphemes.\n match lang {\n Afrikaans => (1, 2),\n Armenian => (1, 2),\n Assamese => (1, 1),\n Basque => (2, 2),\n Bengali => (1, 1),\n Bulgarian => (2, 2),\n Catalan => (2, 2),\n Chinese => (1, 1),\n Coptic => (1, 1),\n Croatian => (2, 2),\n Czech => (2, 3),\n Danish => (2, 2),\n Dutch => (2, 2),\n English_GB => (2, 3),\n English_US => (2, 3),\n Esperanto => (2, 2),\n Estonian => (2, 3),\n Ethiopic => (1, 1),\n Finnish => (2, 2),\n French => (2, 3),\n Friulan => (2, 2),\n Galician => (2, 2),\n Georgian => (1, 2),\n German_1901 => (2, 2),\n German_1996 => (2, 2),\n German_Swiss => (2, 2),\n Greek_Ancient => (1, 1),\n Greek_Mono => (1, 1),\n Greek_Poly => (1, 1),\n Gujarati => (1, 1),\n Hindi => (1, 1),\n Hungarian => (2, 2),\n Icelandic => (2, 2),\n Indonesian => (2, 2),\n Interlingua => (2, 2),\n Irish => (2, 3),\n Italian => (2, 2),\n Kannada => (1, 1),\n Kurmanji => (2, 2),\n Latin => (2, 2),\n Latin_Classic => (2, 2),\n Latvian => (2, 2),\n Lithuanian => (2, 2),\n Malayalam => (1, 1),\n Marathi => (1, 1),\n Mongolian => (2, 2),\n Norwegian_Bokmal => (2, 2),\n Norwegian_Nynorsk => (2, 2),\n Occitan => (2, 2),\n Oriya => (1, 1),\n Panjabi => (1, 1),\n Piedmontese => (2, 2),\n Polish => (2, 2),\n Portuguese => (2, 3),\n Romanian => (2, 2),\n Romansh => (2, 2),\n Russian => (2, 2),\n Sanskrit => (1, 3),\n Serbian_Cyrillic => (2, 2),\n Serbocroatian_Cyrillic => (2, 2),\n Serbocroatian_Latin => (2, 2),\n Slavonic_Church => (1, 2),\n Slovak => (2, 3),\n Slovenian => (2, 2),\n Spanish => (2, 2),\n Swedish => (2, 2),\n Tamil => (1, 1),\n Telugu => (1, 1),\n Thai => (2, 3),\n Turkish => (2, 2),\n Turkmen => (2, 2),\n Ukrainian => (2, 2),\n Uppersorbian => (2, 2),\n Welsh => (2, 3)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #11357 - hi-rustin:rustin-patch-warnings, r=ehuss<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Format code<commit_after><|endoftext|>"} {"text":"<commit_before>static METRIC_PREFIXES: &'static [&'static str] = &[\n \"\", \"K\", \"M\", \"G\", \"T\", \"P\", \"E\", \"Z\", \"Y\"\n];\n\nstatic IEC_PREFIXES: &'static [&'static str] = &[\n \"\", \"Ki\", \"Mi\", \"Gi\", \"Ti\", \"Pi\", \"Ei\", \"Zi\", \"Yi\"\n];\n\nfn format_bytes(mut amount: u64, kilo: u64, prefixes: &[&str]) -> (String, String) {\n let mut prefix = 0;\n while amount > kilo {\n amount \/= kilo;\n prefix += 1;\n }\n return (format!(\"{}\", amount), prefixes[prefix].to_string());\n}\n\npub fn format_IEC_bytes(amount: u64) -> (String, String) {\n format_bytes(amount, 1024, IEC_PREFIXES)\n}\n\npub fn format_metric_bytes(amount: u64) -> (String, String) {\n format_bytes(amount, 1000, METRIC_PREFIXES)\n}\n<commit_msg>Get rid of silly warning<commit_after>static METRIC_PREFIXES: &'static [&'static str] = &[\n \"\", \"K\", \"M\", \"G\", \"T\", \"P\", \"E\", \"Z\", \"Y\"\n];\n\nstatic IEC_PREFIXES: &'static [&'static str] = &[\n \"\", \"Ki\", \"Mi\", \"Gi\", \"Ti\", \"Pi\", \"Ei\", \"Zi\", \"Yi\"\n];\n\nfn format_bytes(mut amount: u64, kilo: u64, prefixes: &[&str]) -> (String, String) {\n let mut prefix = 0;\n while amount > kilo {\n amount \/= kilo;\n prefix += 1;\n }\n return (format!(\"{}\", amount), prefixes[prefix].to_string());\n}\n\n#[allow(non_snake_case_functions)]\npub fn format_IEC_bytes(amount: u64) -> (String, String) {\n format_bytes(amount, 1024, IEC_PREFIXES)\n}\n\npub fn format_metric_bytes(amount: u64) -> (String, String) {\n format_bytes(amount, 1000, METRIC_PREFIXES)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug info test for boxes<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags:-Z extra-debug-info\n\/\/ debugger:set print pretty off\n\/\/ debugger:break 29\n\/\/ debugger:run\n\/\/ debugger:print a->boxed\n\/\/ check:$1 = 1\n\/\/ debugger:print b->boxed\n\/\/ check:$2 = {2, 3.5}\n\/\/ debugger:print c->boxed\n\/\/ check:$3 = 4\n\/\/ debugger:print d->boxed\n\/\/ check:$4 = false\n\nfn main() {\n let a = ~1;\n let b = ~(2, 3.5);\n let c = @4;\n let d = @false;\n let _z = 0;\n}\n<|endoftext|>"} {"text":"<commit_before>pub struct PLPWriter\n{\n\t\/\/\/ If true, tabs will be used to indent. Else, spaces will be used to indent\n\tpub use_tabs: bool,\n\n\t\/\/\/ Number of indent characters to prefix each line of code with.\n\t\/\/\/ Adjusting this value will not affect code that has already been written\n\tpub indent_level: u16,\n\n\t\/\/\/ PLP output of this writer\n\tpub code: String,\n}\n\nimpl PLPWriter\n{\n\tpub fn new() -> PLPWriter\n\t{\n\t\tPLPWriter {\n\t\t\tuse_tabs: true,\n\t\t\tindent_level: 0,\n\t\t\tcode: String::new(),\n\t\t\t}\n\t}\n\n\tfn create_indented_string(&self) -> String\n\t{\n\t\tlet mut string = String::new();\n\t\tlet indent = match self.use_tabs {\n\t\t\t\ttrue => \"\\t\",\n\t\t\t\tfalse => \" \",\n\t\t\t};\n\n\t\tfor index in (0..self.indent_level)\n\t\t{\n\t\t\tstring.push_str(indent);\n\t\t}\n\n\t\tstring\n\t}\n\n\tpub fn reset(&mut self)\n\t{\n\t\tself.indent_level = 0;\n\t\tself.code = String::new();\n\t}\n\n\tpub fn li(&mut self, register: &str, value: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"li \");\n\t\tcode.push_str(register);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(value);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn mov(&mut self, register_to: &str, register_from: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"move \");\n\t\tcode.push_str(register_to);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_from);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn addu(&mut self, register_sum: &str, register_addend1: &str, register_addend2: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"addu \");\n\t\tcode.push_str(register_sum);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_addend1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_addend2);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn subu(&mut self, register_difference: &str, register_addend1: &str, register_addend2: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"subu \");\n\t\tcode.push_str(register_difference);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_addend1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_addend2);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn mullo(&mut self, register_product: &str, register_multiplicand1: &str, register_multiplicand2: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"mullo \");\n\t\tcode.push_str(register_product);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_multiplicand1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_multiplicand2);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn sw(&mut self, register_target: &str, offset: u16, register_address: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"sw \");\n\t\tcode.push_str(register_target);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(&*offset.to_string());\n\t\tcode.push_str(\"(\");\n\t\tcode.push_str(register_address);\n\t\tcode.push_str(\")\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn lw(&mut self, register_target: &str, offset: u16, register_address: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"lw \");\n\t\tcode.push_str(register_target);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(&*offset.to_string());\n\t\tcode.push_str(\"(\");\n\t\tcode.push_str(register_address);\n\t\tcode.push_str(\")\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn push(&mut self, register_target: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"push \");\n\t\tcode.push_str(register_target);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn pop(&mut self, register_target: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"pop \");\n\t\tcode.push_str(register_target);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn call(&mut self, function_label: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"call \");\n\t\tcode.push_str(function_label);\n\t\tcode.push_str(\"\\n\");\n\t\tcode.push_str(\"nop\");\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn nop(&mut self) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"nop\");\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn label(&mut self, label: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(label);\n\t\tcode.push_str(\":\");\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn beq(&mut self, register_comparator1: &str, register_comparator2: &str, target_label: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"beq \");\n\t\tcode.push_str(register_comparator1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_comparator2);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(target_label);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn bne(&mut self, register_comparator1: &str, register_comparator2: &str, target_label: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"bne \");\n\t\tcode.push_str(register_comparator1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_comparator2);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(target_label);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn ret(&mut self) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"return\\nnop\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn j(&mut self, label: &str) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"j \");\n\t\tcode.push_str(label);\n\t\tcode.push_str(\"\\n\");\n\t\tcode.push_str(\"nop\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n\n\tpub fn space(&mut self, amount: u16) -> String\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\".space \");\n\t\tcode.push_str(&*amount.to_string());\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\treturn code;\n\t}\n}\n<commit_msg>Remove unused return values<commit_after>pub struct PLPWriter\n{\n\t\/\/\/ If true, tabs will be used to indent. Else, spaces will be used to indent\n\tpub use_tabs: bool,\n\n\t\/\/\/ Number of indent characters to prefix each line of code with.\n\t\/\/\/ Adjusting this value will not affect code that has already been written\n\tpub indent_level: u16,\n\n\t\/\/\/ PLP output of this writer\n\tpub code: String,\n}\n\nimpl PLPWriter\n{\n\tpub fn new() -> PLPWriter\n\t{\n\t\tPLPWriter {\n\t\t\tuse_tabs: true,\n\t\t\tindent_level: 0,\n\t\t\tcode: String::new(),\n\t\t\t}\n\t}\n\n\tfn create_indented_string(&self) -> String\n\t{\n\t\tlet mut string = String::new();\n\t\tlet indent = match self.use_tabs {\n\t\t\t\ttrue => \"\\t\",\n\t\t\t\tfalse => \" \",\n\t\t\t};\n\n\t\tfor _ in (0..self.indent_level)\n\t\t{\n\t\t\tstring.push_str(indent);\n\t\t}\n\n\t\tstring\n\t}\n\n\t#[allow(dead_code)]\n\tpub fn reset(&mut self)\n\t{\n\t\tself.indent_level = 0;\n\t\tself.code = String::new();\n\t}\n\n\tpub fn li(&mut self, register: &str, value: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"li \");\n\t\tcode.push_str(register);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(value);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn mov(&mut self, register_to: &str, register_from: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"move \");\n\t\tcode.push_str(register_to);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_from);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn addu(&mut self, register_sum: &str, register_addend1: &str, register_addend2: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"addu \");\n\t\tcode.push_str(register_sum);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_addend1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_addend2);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn subu(&mut self, register_difference: &str, register_addend1: &str, register_addend2: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"subu \");\n\t\tcode.push_str(register_difference);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_addend1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_addend2);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn mullo(&mut self, register_product: &str, register_multiplicand1: &str, register_multiplicand2: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"mullo \");\n\t\tcode.push_str(register_product);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_multiplicand1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_multiplicand2);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn sw(&mut self, register_target: &str, offset: u16, register_address: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"sw \");\n\t\tcode.push_str(register_target);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(&*offset.to_string());\n\t\tcode.push_str(\"(\");\n\t\tcode.push_str(register_address);\n\t\tcode.push_str(\")\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn lw(&mut self, register_target: &str, offset: u16, register_address: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"lw \");\n\t\tcode.push_str(register_target);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(&*offset.to_string());\n\t\tcode.push_str(\"(\");\n\t\tcode.push_str(register_address);\n\t\tcode.push_str(\")\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn push(&mut self, register_target: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"push \");\n\t\tcode.push_str(register_target);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn pop(&mut self, register_target: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"pop \");\n\t\tcode.push_str(register_target);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn call(&mut self, function_label: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"call \");\n\t\tcode.push_str(function_label);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\tself.nop();\n\t}\n\n\tpub fn nop(&mut self)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"nop\");\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn label(&mut self, label: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(label);\n\t\tcode.push_str(\":\");\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\t#[allow(dead_code)]\n\tpub fn beq(&mut self, register_comparator1: &str, register_comparator2: &str, target_label: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"beq \");\n\t\tcode.push_str(register_comparator1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_comparator2);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(target_label);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\t#[allow(dead_code)]\n\tpub fn bne(&mut self, register_comparator1: &str, register_comparator2: &str, target_label: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"bne \");\n\t\tcode.push_str(register_comparator1);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(register_comparator2);\n\t\tcode.push_str(\", \");\n\t\tcode.push_str(target_label);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n\n\tpub fn ret(&mut self)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"return\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\tself.nop();\n\t}\n\n\tpub fn j(&mut self, label: &str)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\"j \");\n\t\tcode.push_str(label);\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t\tself.nop();\n\t}\n\n\tpub fn space(&mut self, amount: u16)\n\t{\n\t\tlet mut code = self.create_indented_string();\n\t\tcode.push_str(\".space \");\n\t\tcode.push_str(&*amount.to_string());\n\t\tcode.push_str(\"\\n\");\n\n\t\tself.code.push_str(&*code);\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add file `common_list.rs` to test directory, include each test crate to test library<commit_after>mod str_test;\n<|endoftext|>"} {"text":"<commit_before>#![feature(no_std)]\n#![feature(const_fn)]\n#![no_std]\n#![no_builtins]\n\n#[macro_use]\nextern crate uni;\nextern crate xen;\n\npub mod event;\npub mod arch;\npub mod libc;\n\nextern {\n fn main(_: isize, _: *const *const u8) -> isize;\n}\n\n\/\/ 8KB\nconst STACK_SIZE: usize = 8192;\n\n#[no_mangle]\n#[allow(non_upper_case_globals)]\n#[link_section=\".stack\"]\npub static rust_stack: [u8; STACK_SIZE] = [0; STACK_SIZE];\n\nfn init() {\n self::arch::init();\n\n println!(\"Uni.rs is booting\");\n\n \/\/ Memory initialization is unsafe\n unsafe {\n let (heap_start, heap_size) = arch::init_memory();\n\n uni::alloc::init(heap_start, heap_size);\n }\n\n event::init();\n}\n\n#[no_mangle]\npub extern \"C\" fn uni_rust_entry() -> ! {\n let app_ret;\n\n init();\n\n unsafe {\n app_ret = main(0, core::ptr::null());\n }\n\n uni::console::console().flush();\n\n xen::sched::poweroff(app_ret as xen::defs::Ulong);\n\n panic!(\"Failed to poweroff the machine !\");\n}\n<commit_msg>libboot: Events are now enabled after initialization<commit_after>#![feature(no_std)]\n#![feature(const_fn)]\n#![no_std]\n#![no_builtins]\n\n#[macro_use]\nextern crate uni;\nextern crate xen;\n\npub mod event;\npub mod arch;\npub mod libc;\n\nextern {\n fn main(_: isize, _: *const *const u8) -> isize;\n}\n\n\/\/ 8KB\nconst STACK_SIZE: usize = 8192;\n\n#[no_mangle]\n#[allow(non_upper_case_globals)]\n#[link_section=\".stack\"]\npub static rust_stack: [u8; STACK_SIZE] = [0; STACK_SIZE];\n\nfn init() {\n self::arch::init();\n\n println!(\"Uni.rs is booting\");\n\n \/\/ Memory initialization is unsafe\n unsafe {\n let (heap_start, heap_size) = arch::init_memory();\n\n uni::alloc::init(heap_start, heap_size);\n }\n\n event::init();\n\n xen::enable_upcalls();\n}\n\n#[no_mangle]\npub extern \"C\" fn uni_rust_entry() -> ! {\n let app_ret;\n\n init();\n\n unsafe {\n app_ret = main(0, core::ptr::null());\n }\n\n xen::disable_upcalls();\n\n uni::console::console().flush();\n\n xen::sched::poweroff(app_ret as xen::defs::Ulong);\n\n panic!(\"Failed to poweroff the machine !\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>cleaned up the last clippy warning<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for issue 4573<commit_after>\/\/ rustmft-version:Two\n\/\/ rustmft-use_small_heuristics:Max\n\/\/ rustmft-merge_derives:false\n\/\/ These are the same rustfmt configuration options that are used\n\/\/ in the comiler as of ce39461ca75a and 8eb7c58dbb7b\n\/\/ These are commits in https:\/\/github.com\/rust-lang\/rust\n\n#![no_std] \/\/ inner attribute comment\n\/\/ inner attribute comment\n#![no_implicit_prelude]\n\/\/ post inner attribute comment\n\n#[cfg(not(miri))] \/\/ inline comment\n#[no_link]\nextern crate foo;\n\n\/\/ before attributes\n#[no_link]\n\/\/ between attributes\n#[cfg(not(miri))] \/\/ inline comment\nextern crate foo as bar;\n\n#[cfg(not(miri))] \/\/ inline comment\n\/\/ between attribute and use\nuse foo;\n\n#[cfg(not(miri))] \/\/ inline comment\nuse foo;\n\n\/* pre attributre *\/\n#[cfg(not(miri))]\nuse foo::bar;\n\n#[cfg(not(miri))] \/\/ inline comment\nuse foo::bar as FooBar;\n\n#[cfg(not(miri))] \/\/ inline comment\n#[allow(unused)]\n#[deprecated(\n since = \"5.2\", \/\/ inline inner comment\n note = \"FOO was rarely used. Users should instead use BAR\"\n)]\n#[allow(unused)]\nstatic FOO: i32 = 42;\n\n#[used]\n#[export_name = \"FOO\"]\n#[cfg(not(miri))] \/\/ inline comment\n#[deprecated(\n since = \"5.2\",\n note = \"FOO was rarely used. Users should instead use BAR\"\n)]\nstatic FOO: i32 = 42;\n\n#[cfg(not(miri))] \/\/ inline comment\n#[export_name = \"FOO\"]\nstatic BAR: &'static str = \"bar\";\n\n#[cfg(not(miri))] \/\/ inline comment\nconst BAR: i32 = 42;\n\n#[cfg(not(miri))] \/\/ inline comment\n#[no_mangle]\n#[link_section = \".example_section\"]\nfn foo(bar: usize) {\n #[cfg(not(miri))] \/\/ inline comment\n println!(\"hello world!\");\n}\n\n#[cfg(not(miri))] \/\/ inline comment\nmod foo {}\n\n#[cfg(not(miri))] \/\/ inline comment\nextern \"C\" {\n fn my_c_function(x: i32) -> bool;\n}\n\n#[cfg(not(miri))] \/\/ inline comment\n#[link(name = \"CoreFoundation\", kind = \"framework\")]\nextern \"C\" {\n\n #[link_name = \"actual_symbol_name\"] \/\/ inline comment\n \/\/ between attribute and function\n fn my_c_function(x: i32) -> bool;\n}\n\n#[cfg(not(miri))] \/\/ inline comment\npub extern \"C\" fn callable_from_c(x: i32) -> bool {\n x % 3 == 0\n}\n\n#[cfg(not(miri))] \/\/ inline comment\n\/* between attribute block comment *\/\n#[no_mangle]\n\/* between attribute and type *\/\ntype Foo = Bar<u8>;\n\n#[no_mangle]\n#[cfg(not(miri))] \/\/ inline comment\n#[non_exhaustive] \/\/ inline comment\nenum Foo {\n Bar,\n Baz,\n}\n\n#[no_mangle]\n#[cfg(not(miri))] \/* inline comment *\/\nstruct Foo<A> {\n x: A,\n}\n\n#[cfg(not(miri))] \/\/ inline comment\nunion Foo<A, B> {\n x: A,\n y: B,\n}\n\n#[cfg(not(miri))] \/\/ inline comment\ntrait Foo {}\n\n#[cfg(not(miri))] \/\/ inline comment\ntrait Foo = Bar + Quux;\n\n#[cfg(not(miri))] \/\/ inline comment\nimpl Foo {}\n\n#[cfg(not(miri))] \/\/ inline comment\nmacro_rules! bar {\n (3) => {};\n}\n\nmod nested {\n #[cfg(not(miri))] \/\/ inline comment\n \/\/ between attribute and use\n use foo;\n\n #[cfg(not(miri))] \/\/ inline comment\n use foo;\n\n #[cfg(not(miri))] \/\/ inline comment\n use foo::bar;\n\n #[cfg(not(miri))] \/\/ inline comment\n use foo::bar as FooBar;\n\n #[cfg(not(miri))] \/\/ inline comment\n static FOO: i32 = 42;\n\n #[cfg(not(miri))] \/\/ inline comment\n static FOO: i32 = 42;\n\n #[cfg(not(miri))] \/\/ inline comment\n static FOO: &'static str = \"bar\";\n\n #[cfg(not(miri))] \/\/ inline comment\n const FOO: i32 = 42;\n\n #[cfg(not(miri))] \/\/ inline comment\n fn foo(bar: usize) {\n #[cfg(not(miri))] \/\/ inline comment\n println!(\"hello world!\");\n }\n\n #[cfg(not(miri))] \/\/ inline comment\n mod foo {}\n\n #[cfg(not(miri))] \/\/ inline comment\n mod foo {}\n\n #[cfg(not(miri))] \/\/ inline comment\n extern \"C\" {\n fn my_c_function(x: i32) -> bool;\n }\n\n #[cfg(not(miri))] \/\/ inline comment\n #[link(name = \"CoreFoundation\", kind = \"framework\")]\n extern \"C\" {\n\n #[link_name = \"actual_symbol_name\"] \/\/ inline comment\n \/\/ between attribute and function\n fn my_c_function(x: i32) -> bool;\n }\n\n #[cfg(not(miri))] \/\/ inline comment\n pub extern \"C\" fn callable_from_c(x: i32) -> bool {\n x % 3 == 0\n }\n\n #[cfg(not(miri))] \/\/ inline comment\n type Foo = Bar<u8>;\n\n #[cfg(not(miri))] \/\/ inline comment\n #[non_exhaustive] \/\/ inline comment\n enum Foo {\n \/\/ comment\n #[attribute_1]\n #[attribute_2] \/\/ comment\n \/\/ comment!\n Bar,\n \/* comment *\/\n #[attribute_1]\n #[attribute_2] \/* comment *\/\n #[attribute_3]\n #[attribute_4]\n \/* comment! *\/\n Baz,\n }\n\n #[cfg(not(miri))] \/\/ inline comment\n struct Foo<A> {\n x: A,\n }\n\n #[cfg(not(miri))] \/\/ inline comment\n union Foo<A, B> {\n #[attribute_1]\n #[attribute_2] \/* comment *\/\n #[attribute_3]\n #[attribute_4] \/\/ comment\n x: A,\n y: B,\n }\n\n #[cfg(not(miri))] \/\/ inline comment\n #[allow(missing_docs)]\n trait Foo {\n #[must_use] \/* comment\n * that wrappes to\n * the next line *\/\n fn bar() {}\n }\n\n #[allow(missing_docs)]\n #[cfg(not(miri))] \/\/ inline comment\n trait Foo = Bar + Quux;\n\n #[allow(missing_docs)]\n #[cfg(not(miri))] \/\/ inline comment\n impl Foo {}\n\n #[cfg(not(miri))] \/\/ inline comment\n macro_rules! bar {\n (3) => {};\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>new experiment<commit_after>extern crate rand;\nextern crate timely;\nextern crate differential_dataflow;\nextern crate core_affinity;\n\nuse rand::{Rng, SeedableRng, StdRng};\n\nuse timely::dataflow::*;\nuse timely::dataflow::operators::probe::Handle;\n\nuse differential_dataflow::input::Input;\nuse differential_dataflow::Collection;\nuse differential_dataflow::operators::*;\nuse differential_dataflow::lattice::Lattice;\nuse differential_dataflow::operators::iterate::Variable;\nuse differential_dataflow::operators::arrange::ArrangeByKey;\nuse differential_dataflow::operators::arrange::ArrangeBySelf;\n\ntype Node = usize;\n\nfn main() {\n\n let nodes: usize = std::env::args().nth(1).unwrap().parse().unwrap();\n let edges: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n let rate: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n let goal: usize = std::env::args().nth(4).unwrap().parse().unwrap();\n let queries: usize = std::env::args().nth(5).unwrap().parse().unwrap();\n\n \/\/ Our setting involves four read query types, and two updatable base relations.\n \/\/\n \/\/ Q1: Point lookup: reads \"state\" associated with a node.\n \/\/ Q2: One-hop lookup: reads \"state\" associated with neighbors of a node.\n \/\/ Q3: Two-hop lookup: reads \"state\" associated with n-of-n's of a node.\n \/\/ Q4: Shortest path: reports hop count between two query nodes.\n \/\/\n \/\/ R1: \"State\": a pair of (node, T) for some type T that I don't currently know.\n \/\/ R2: \"Graph\": pairs (node, node) indicating linkage between the two nodes.\n\n timely::execute_from_args(std::env::args().skip(3), move |worker| {\n\n let index = worker.index();\n let peers = worker.peers();\n let timer = ::std::time::Instant::now();\n\n let core_ids = core_affinity::get_core_ids().unwrap();\n core_affinity::set_for_current(core_ids[index % core_ids.len()]);\n\n \/\/ define BFS dataflow; return handles to roots and edges inputs\n let mut probe = Handle::new();\n\n let (mut q1, mut q2, mut q3, mut q4, mut state, mut graph) = worker.dataflow(|scope| {\n\n let (q1_input, q1) = scope.new_collection();\n let (q2_input, q2) = scope.new_collection::<usize,isize>();\n let (q3_input, q3) = scope.new_collection::<usize,isize>();\n let (q4_input, q4) = scope.new_collection();\n\n let (state_input, state) = scope.new_collection();\n let (graph_input, graph) = scope.new_collection();\n\n let state_indexed = state.arrange_by_key();\n let graph_indexed = graph.map(|(src, dst)| (dst, src))\n .concat(&graph)\n .arrange_by_key();\n\n \/\/ Q1: Point lookups on `state`:\n q1 .arrange_by_self()\n .join_core(&state_indexed, |&query, &(), &state| Some((query, state)))\n .probe_with(&mut probe);\n\n \/\/ Q2: One-hop lookups on `state`:\n q2 .arrange_by_self()\n .join_core(&graph_indexed, |&query, &(), &friend| Some((friend, query)))\n .join_core(&state_indexed, |_friend, &query, &state| Some((query, state)))\n .probe_with(&mut probe);\n\n \/\/ Q3: Two-hop lookups on `state`:\n q3 .arrange_by_self()\n .join_core(&graph_indexed, |&query, &(), &friend| Some((friend, query)))\n .join_core(&graph_indexed, |_friend, &query, &friend2| Some((friend2, query)))\n .join_core(&state_indexed, |_friend2, &query, &state| Some((query, state)))\n .probe_with(&mut probe);\n\n \/\/ Q4: Shortest path queries:\n bidijkstra(&graph_indexed, &graph_indexed, &q4)\n .probe_with(&mut probe);\n\n (q1_input, q2_input, q3_input, q4_input, state_input, graph_input)\n });\n\n let seed: &[_] = &[1, 2, 3, index];\n let mut rng1: StdRng = SeedableRng::from_seed(seed); \/\/ rng for edge additions\n let mut rng2: StdRng = SeedableRng::from_seed(seed); \/\/ rng for edge deletions\n let mut rng3: StdRng = SeedableRng::from_seed(seed); \/\/ rng for q1 additions\n let mut rng4: StdRng = SeedableRng::from_seed(seed); \/\/ rng for q1 deletions\n let mut rng5: StdRng = SeedableRng::from_seed(seed); \/\/ rng for q4 additions\n let mut rng6: StdRng = SeedableRng::from_seed(seed); \/\/ rng for q4 deletions\n\n if index == 0 { println!(\"performing workload on random graph with {} nodes, {} edges:\", nodes, edges); }\n\n let worker_edges = edges\/peers + if index < (edges % peers) { 1 } else { 0 };\n for _ in 0 .. worker_edges {\n graph.insert((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)));\n }\n for node in 0 .. nodes {\n if node % peers == index {\n state.insert((node, node));\n }\n }\n\n \/\/ let queries = 1000;\n let worker_window = queries\/peers + if index < (queries % peers) { 1 } else { 0 };\n for _ in 0 .. worker_window {\n q1.insert(rng3.gen_range(0, nodes));\n q4.insert((rng5.gen_range(0, nodes), rng5.gen_range(0, nodes)));\n }\n\n q1.advance_to(1); q1.flush(); \/\/ q1 queries start now.\n q2.advance_to(usize::max_value()); q2.flush(); \/\/ q2 queries start here.\n q3.advance_to(usize::max_value()); q3.flush(); \/\/ q3 queries start here.\n q4.advance_to(1); q4.flush(); \/\/ q4 queries start here.\n state.advance_to(usize::max_value()); state.flush();\n graph.advance_to(1); graph.flush();\n\n \/\/ finish graph loading work.\n while probe.less_than(q1.time()) { worker.step(); }\n\n if index == 0 { println!(\"{:?}\\tgraph loaded\", timer.elapsed()); }\n\n let requests_per_sec = rate \/ 2;\n let ns_per_request = 1_000_000_000 \/ requests_per_sec;\n let mut request_counter = peers + index; \/\/ skip first request for each.\n let mut ack_counter = peers + index;\n\n let mut inserted_ns = 1;\n\n let timer = ::std::time::Instant::now();\n let mut counts = vec![[0usize; 16]; 64];\n\n if index == 0 {\n\n let ack_target = goal * rate;\n while ack_counter < ack_target {\n\n \/\/ Open-loop latency-throughput test, parameterized by offered rate `ns_per_request`.\n let elapsed = timer.elapsed();\n let elapsed_ns: usize = (elapsed.as_secs() * 1_000_000_000 + (elapsed.subsec_nanos() as u64)) as usize;\n\n \/\/ Determine completed ns.\n let acknowledged_ns: usize = probe.with_frontier(|frontier| frontier[0].inner);\n\n \/\/ any un-recorded measurements that are complete should be recorded.\n while (ack_counter * ns_per_request) < acknowledged_ns && ack_counter < ack_target {\n let requested_at = ack_counter * ns_per_request;\n let count_index = (elapsed_ns - requested_at).next_power_of_two().trailing_zeros() as usize;\n if ack_counter > ack_target \/ 2 {\n let low_bits = ((elapsed_ns - requested_at) >> (count_index - 5)) & 0xF;\n counts[count_index][low_bits as usize] += 1;\n }\n ack_counter += 1;\/\/peers;\n }\n\n \/\/ Now, should we introduce more records before stepping the worker?\n \/\/\n \/\/ Thinking: inserted_ns - acknowledged_ns is some amount of time that\n \/\/ is currently outstanding in the system, and we needn't advance our\n \/\/ inputs unless by this order of magnitude.\n \/\/\n \/\/ The more sophisticated plan is: we compute the next power of two\n \/\/ greater than inserted_ns - acknowledged_ns and look for the last\n \/\/ multiple of this number in the interval [inserted_ns, elapsed_ns].\n \/\/ If such a multiple exists, we introduce records to that point and\n \/\/ advance the input.\n\n \/\/ let scale = (inserted_ns - acknowledged_ns).next_power_of_two();\n \/\/ let target_ns = elapsed_ns & !(scale - 1);\n\n let mut target_ns = elapsed_ns & !((1 << 20) - 1);\n\n \/\/ let mut target_ns = if acknowledged_ns >= inserted_ns { elapsed_ns } else { inserted_ns };\n\n if target_ns > inserted_ns + 1_000_000_000 { target_ns = inserted_ns + 1_000_000_000; }\n\n if inserted_ns < target_ns {\n\n while (request_counter * ns_per_request) < target_ns {\n match request_counter % 3 {\n 0 => {\n graph.advance_to(request_counter * ns_per_request);\n graph.insert((rng1.gen_range(0, nodes),rng1.gen_range(0, nodes)));\n graph.remove((rng2.gen_range(0, nodes),rng2.gen_range(0, nodes)));\n }\n 1 => {\n q1.advance_to(request_counter * ns_per_request);\n q1.insert(rng3.gen_range(0, nodes));\n q1.remove(rng4.gen_range(0, nodes));\n }\n 2 => {\n q4.advance_to(request_counter * ns_per_request);\n q4.insert((rng5.gen_range(0, nodes),rng5.gen_range(0, nodes)));\n q4.remove((rng6.gen_range(0, nodes),rng6.gen_range(0, nodes)));\n }\n _ => { panic!(\"not how numbers work\"); }\n }\n request_counter += 1;\/\/peers;\n }\n graph.advance_to(target_ns); graph.flush();\n q1.advance_to(target_ns); q1.flush();\n q4.advance_to(target_ns); q4.flush();\n inserted_ns = target_ns;\n }\n\n worker.step();\n }\n }\n\n if index == 0 {\n\n let mut results = Vec::new();\n let total = counts.iter().map(|x| x.iter().sum::<usize>()).sum();\n let mut sum = 0;\n for index in (10 .. counts.len()).rev() {\n for sub in (0 .. 16).rev() {\n if sum > 0 && sum < total {\n let latency = (1 << (index-1)) + (sub << (index-5));\n let fraction = (sum as f64) \/ (total as f64);\n results.push((latency, fraction));\n }\n sum += counts[index][sub];\n }\n }\n for (latency, fraction) in results.drain(..).rev() {\n println!(\"{}\\t{}\", latency, fraction);\n }\n }\n\n }).unwrap();\n}\n\nuse differential_dataflow::trace::implementations::ord::OrdValSpine as DefaultValTrace;\nuse differential_dataflow::operators::arrange::TraceAgent;\nuse differential_dataflow::operators::arrange::Arranged;\n\ntype Arrange<G: Scope, K, V, R> = Arranged<G, K, V, R, TraceAgent<K, V, G::Timestamp, R, DefaultValTrace<K, V, G::Timestamp, R>>>;\n\n\/\/ returns pairs (n, s) indicating node n can be reached from a root in s steps.\nfn bidijkstra<G: Scope>(\n forward_graph: &Arrange<G, Node, Node, isize>,\n reverse_graph: &Arrange<G, Node, Node, isize>,\n goals: &Collection<G, (Node, Node)>) -> Collection<G, ((Node, Node), u32)>\nwhere G::Timestamp: Lattice+Ord {\n\n goals.scope().scoped(|inner| {\n\n \/\/ Our plan is to start evolving distances from both sources and destinations.\n \/\/ The evolution from a source or destination should continue as long as there\n \/\/ is a corresponding destination or source that has not yet been reached.\n\n \/\/ forward and reverse (node, (root, dist))\n let forward = Variable::from(goals.map(|(x,_)| (x,(x,0))).enter(inner));\n let reverse = Variable::from(goals.map(|(_,y)| (y,(y,0))).enter(inner));\n\n let goals = goals.enter(inner);\n let forward_graph = forward_graph.enter(inner);\n let reverse_graph = reverse_graph.enter(inner);\n\n \/\/ Let's determine which (src, dst) pairs are ready to return.\n \/\/\n \/\/ done(src, dst) := forward(src, med), reverse(dst, med), goal(src, dst).\n \/\/\n \/\/ This is a cyclic join, which should scare us a bunch.\n let reached =\n forward\n .join_map(&reverse, |_, &(src,d1), &(dst,d2)| ((src, dst), d1 + d2))\n .group(|_key, s, t| t.push((*s[0].0, 1)))\n .semijoin(&goals);\n\n let active =\n reached\n .negate()\n .map(|(srcdst,_)| srcdst)\n .concat(&goals)\n .consolidate();\n\n \/\/ Let's expand out forward queries that are active.\n let forward_active = active.map(|(x,_y)| x).distinct();\n let forward_next =\n forward\n .map(|(med, (src, dist))| (src, (med, dist)))\n .semijoin(&forward_active)\n .map(|(src, (med, dist))| (med, (src, dist)))\n .join_core(&forward_graph, |_med, &(src, dist), &next| Some((next, (src, dist+1))))\n .concat(&forward)\n .map(|(next, (src, dist))| ((next, src), dist))\n .group(|_key, s, t| t.push((*s[0].0, 1)))\n .map(|((next, src), dist)| (next, (src, dist)));\n\n forward.set(&forward_next);\n\n \/\/ Let's expand out reverse queries that are active.\n let reverse_active = active.map(|(_x,y)| y).distinct();\n let reverse_next =\n reverse\n .map(|(med, (rev, dist))| (rev, (med, dist)))\n .semijoin(&reverse_active)\n .map(|(rev, (med, dist))| (med, (rev, dist)))\n .join_core(&reverse_graph, |_med, &(rev, dist), &next| Some((next, (rev, dist+1))))\n .concat(&reverse)\n .map(|(next, (rev, dist))| ((next, rev), dist))\n .group(|_key, s, t| t.push((*s[0].0, 1)))\n .map(|((next,rev), dist)| (next, (rev, dist)));\n\n reverse.set(&reverse_next);\n\n reached.leave()\n })\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>start new<commit_after>fn sum_vec(nums: &Vec<i32>) -> i32 {\n let mut result = 0;\n for i in nums {\n result += *i;\n }\n result\n}\n\nfn check_sub_array_sum(nums: Vec<i32>, k: i32) -> bool {\n let length = nums.len();\n if length <= 1 {\n return false;\n }\n\n if sum_vec(&nums) == 0 {\n return true;\n }\n\n if k == 0 {\n return false;\n }\n\n if length >= 2 && k > 0 {\n return true;\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unsupported Versions and Protocols<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>12 - super and self<commit_after>fn function() {\n println!(\"called `function()`\");\n}\n\nmod my {\n pub fn indirect_call() {\n \/\/ Let's access all the functions named `function` from this scope\n print!(\"called `my::indirect_call()`, that\\n> \");\n\n \/\/ `my::function` can be called directly\n function();\n\n {\n \/\/ This will bind to the `cool::function` in the *crate* scope\n \/\/ In this case the crate scope is the outermost scope\n use cool::function as root_cool_function;\n\n print!(\"> \");\n root_cool_function();\n }\n\n {\n \/\/ `self` refers to the current module scope, in this case: `my`\n use self::cool::function as my_cool_function;\n\n print!(\"> \")\n my_cool_function();\n }\n\n {\n \/\/ `super` refers to the parent scope, i.e. outside of the `my`\n \/\/ module\n use super::function as root_function;\n\n print!(\"> \");\n root_function();\n }\n }\n\n fn function() {\n println!(\"called `my::function()`\");\n }\n\n mod cool {\n pub fn function() {\n println!(\"called `my::cool::function()`\");\n }\n }\n}\n\nmod cool {\n pub fn function() {\n println!(\"called `cool::function()`\");\n }\n}\n\nfn main() {\n my::indirect_call();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create humain rust<commit_after>struct Human {\n _name: String,\n _age: u8,\n}\n\nimpl Human {\n\n fn getTralalaWith(&self, target: &Human) {\n if (self._age >= 18 && target._age >= 18) {\n target.whine();\n }\n else {\n if (self._age < 18) {\n println!(\"{} is not 18' right now. Stop trying to have tralala before 18.\", self._name);\n }\n else {\n println!(\"{} is not 18' right now. Don't touch.\", target._name);\n }\n }\n }\n\n fn say(&self, what: &str) {\n println!(\"{}: {}\", self._name, what);\n }\n\n fn whine(&self) {\n self.say(\"Ohhh yeahh...\");\n }\n}\n\nfn main() {\n let mut h1 = Human {_name: \"Trololo\".to_string(), _age: 20};\n let mut h2 = Human {_name: \"Ponyporn\".to_string(), _age: 20};\n let mut h3 = Human {_name: \"Little Ponyporn\".to_string(), _age: 12};\n h1.getTralalaWith(&h2);\n h1.getTralalaWith(&h3);\n h3.getTralalaWith(&h2);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>cleanup<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>starting exp example; trying to understand references in data structures<commit_after>use std::hash;\nuse std::hash::Hash;\n\n#[derive(Show)]\npub enum Exp<'x> {\n Value(int),\n Plus(Box<Exp<'x>>, Box<Exp<'x>>),\n Ref(&'x Exp<'x>)\n}\n\n#[test]\npub fn print() {\n let e = Exp::Plus(box Exp::Value(1),\n box Exp::Plus(box Exp::Value(2),\n box Exp::Plus(box Exp::Value(3),\n box Exp::Plus(box Exp::Value(4),\n box Exp::Value(5))))) ;\n print!(\"{}\", e);\n}\n\npub fn main () {\n let e = Exp::Plus(box Exp::Value(1),\n box Exp::Plus(box Exp::Value(2),\n box Exp::Plus(box Exp::Value(3),\n box Exp::Plus(box Exp::Value(4),\n box Exp::Value(5))))) ;\n print!(\"{}\", e);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #33<commit_after>extern mod euler;\nuse euler::calc::{ get_gcd };\n\n\/\/ AB \/ AC => NG (10A+B : 10A+C = B : C => 10AC+BC = 10AB+BC => 10A(C-B) = 0 -> trivial)\n\/\/ BA \/ CA => NG\n\/\/ AB \/ CA => (10A + B : 10C + A = B : C => 10AC + BC = 10BC + AB => A(10C-B) = 9BC)\n\/\/ BA \/ AC => (10B + A : 10A + C = B : C => 10BC + AC = 10AB + BC => A(10B-C) = 9BC)\n\/\/\n\/\/ * AB \/ CA = B \/ C\n\/\/ A = 9BC \/ (10C - B)\n\/\/ C > B\n\/\/\n\/\/ * BA \/ AC = B \/ C\n\/\/ A = 9BC \/ (10B - C)\n\/\/ C > B\n\nfn main() {\n let mut prod_numer = 1;\n let mut prod_denom = 1;\n\n for uint::range(1, 10) |b| {\n for uint::range(b + 1, 10) |c| {\n let a_numer = 9 * b * c;\n let a_denom = 10 * c - b;\n if a_numer % a_denom == 0 && a_numer < 10 * a_denom {\n let a = a_numer \/ a_denom;\n prod_numer *= b;\n prod_denom *= c;\n io::println(fmt!(\"%u%u\/%u%u = %u\/%u\", a, b, c, a, b, c));\n }\n }\n }\n for uint::range(1, 10) |b| {\n for uint::range(b + 1, 10) |c| {\n let a_numer = 9 * b * c;\n let a_denom = 10 * b - c;\n if a_numer % a_denom == 0 && a_numer < 10 * a_denom {\n let a = a_numer \/ a_denom;\n prod_numer *= b;\n prod_denom *= c;\n io::println(fmt!(\"%u%u\/%u%u = %u\/%u\", b, a, a, c, b, c));\n }\n }\n }\n\n let gcd = get_gcd(prod_numer, prod_denom);\n io::println(fmt!(\"%u\/%u\", prod_numer \/ gcd, prod_denom \/ gcd));\n io::println(fmt!(\"answer: %u\", prod_denom \/ gcd));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix(tokenizer): use float numbers<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate gcc;\nextern crate build_helper;\n\nuse std::process::Command;\nuse std::env;\nuse std::path::{PathBuf, Path};\n\nuse build_helper::output;\n\nfn main() {\n println!(\"cargo:rustc-cfg=cargobuild\");\n\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let llvm_config = env::var_os(\"LLVM_CONFIG\")\n .map(PathBuf::from)\n .unwrap_or_else(|| {\n if let Some(dir) = env::var_os(\"CARGO_TARGET_DIR\").map(PathBuf::from) {\n let to_test = dir.parent()\n .unwrap()\n .parent()\n .unwrap()\n .join(&target)\n .join(\"llvm\/bin\/llvm-config\");\n if Command::new(&to_test).output().is_ok() {\n return to_test;\n }\n }\n PathBuf::from(\"llvm-config\")\n });\n\n println!(\"cargo:rerun-if-changed={}\", llvm_config.display());\n\n \/\/ Test whether we're cross-compiling LLVM. This is a pretty rare case\n \/\/ currently where we're producing an LLVM for a different platform than\n \/\/ what this build script is currently running on.\n \/\/\n \/\/ In that case, there's no guarantee that we can actually run the target,\n \/\/ so the build system works around this by giving us the LLVM_CONFIG for\n \/\/ the host platform. This only really works if the host LLVM and target\n \/\/ LLVM are compiled the same way, but for us that's typically the case.\n \/\/\n \/\/ We *want* detect this cross compiling situation by asking llvm-config\n \/\/ what it's host-target is. If that's not the TARGET, then we're cross\n \/\/ compiling. Unfortunately `llvm-config` seems either be buggy, or we're\n \/\/ misconfiguring it, because the `i686-pc-windows-gnu` build of LLVM will\n \/\/ report itself with a `--host-target` of `x86_64-pc-windows-gnu`. This\n \/\/ tricks us into thinking we're doing a cross build when we aren't, so\n \/\/ havoc ensues.\n \/\/\n \/\/ In any case, if we're cross compiling, this generally just means that we\n \/\/ can't trust all the output of llvm-config becaues it might be targeted\n \/\/ for the host rather than the target. As a result a bunch of blocks below\n \/\/ are gated on `if !is_crossed`\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let host = env::var(\"HOST\").expect(\"HOST was not set\");\n let is_crossed = target != host;\n\n let optional_components =\n [\"x86\", \"arm\", \"aarch64\", \"mips\", \"powerpc\", \"pnacl\", \"systemz\", \"jsbackend\"];\n\n \/\/ FIXME: surely we don't need all these components, right? Stuff like mcjit\n \/\/ or interpreter the compiler itself never uses.\n let required_components = &[\"ipo\",\n \"bitreader\",\n \"bitwriter\",\n \"linker\",\n \"asmparser\",\n \"mcjit\",\n \"interpreter\",\n \"instrumentation\"];\n\n let components = output(Command::new(&llvm_config).arg(\"--components\"));\n let mut components = components.split_whitespace().collect::<Vec<_>>();\n components.retain(|c| optional_components.contains(c) || required_components.contains(c));\n\n for component in required_components {\n if !components.contains(component) {\n panic!(\"require llvm component {} but wasn't found\", component);\n }\n }\n\n for component in components.iter() {\n println!(\"cargo:rustc-cfg=llvm_component=\\\"{}\\\"\", component);\n }\n\n \/\/ Link in our own LLVM shims, compiled with the same flags as LLVM\n let mut cmd = Command::new(&llvm_config);\n cmd.arg(\"--cxxflags\");\n let cxxflags = output(&mut cmd);\n let mut cfg = gcc::Config::new();\n for flag in cxxflags.split_whitespace() {\n \/\/ Ignore flags like `-m64` when we're doing a cross build\n if is_crossed && flag.starts_with(\"-m\") {\n continue;\n }\n cfg.flag(flag);\n }\n\n for component in &components[..] {\n let mut flag = String::from(\"-DLLVM_COMPONENT_\");\n flag.push_str(&component.to_uppercase());\n cfg.flag(&flag);\n }\n\n if env::var_os(\"LLVM_RUSTLLVM\").is_some() {\n cfg.flag(\"-DLLVM_RUSTLLVM\");\n }\n\n cfg.file(\"..\/rustllvm\/PassWrapper.cpp\")\n .file(\"..\/rustllvm\/RustWrapper.cpp\")\n .file(\"..\/rustllvm\/ArchiveWrapper.cpp\")\n .cpp(true)\n .cpp_link_stdlib(None) \/\/ we handle this below\n .compile(\"librustllvm.a\");\n\n \/\/ Link in all LLVM libraries, if we're uwring the \"wrong\" llvm-config then\n \/\/ we don't pick up system libs because unfortunately they're for the host\n \/\/ of llvm-config, not the target that we're attempting to link.\n let mut cmd = Command::new(&llvm_config);\n cmd.arg(\"--libs\");\n if !is_crossed {\n cmd.arg(\"--system-libs\");\n }\n cmd.args(&components[..]);\n\n for lib in output(&mut cmd).split_whitespace() {\n let name = if lib.starts_with(\"-l\") {\n &lib[2..]\n } else if lib.starts_with(\"-\") {\n &lib[1..]\n } else if Path::new(lib).exists() {\n \/\/ On MSVC llvm-config will print the full name to libraries, but\n \/\/ we're only interested in the name part\n let name = Path::new(lib).file_name().unwrap().to_str().unwrap();\n name.trim_right_matches(\".lib\")\n } else if lib.ends_with(\".lib\") {\n \/\/ Some MSVC libraries just come up with `.lib` tacked on, so chop\n \/\/ that off\n lib.trim_right_matches(\".lib\")\n } else {\n continue;\n };\n\n \/\/ Don't need or want this library, but LLVM's CMake build system\n \/\/ doesn't provide a way to disable it, so filter it here even though we\n \/\/ may or may not have built it. We don't reference anything from this\n \/\/ library and it otherwise may just pull in extra dependencies on\n \/\/ libedit which we don't want\n if name == \"LLVMLineEditor\" {\n continue;\n }\n\n let kind = if name.starts_with(\"LLVM\") {\n \"static\"\n } else {\n \"dylib\"\n };\n println!(\"cargo:rustc-link-lib={}={}\", kind, name);\n }\n\n \/\/ LLVM ldflags\n \/\/\n \/\/ If we're a cross-compile of LLVM then unfortunately we can't trust these\n \/\/ ldflags (largely where all the LLVM libs are located). Currently just\n \/\/ hack around this by replacing the host triple with the target and pray\n \/\/ that those -L directories are the same!\n let mut cmd = Command::new(&llvm_config);\n cmd.arg(\"--ldflags\");\n for lib in output(&mut cmd).split_whitespace() {\n if lib.starts_with(\"-LIBPATH:\") {\n println!(\"cargo:rustc-link-search=native={}\", &lib[9..]);\n } else if is_crossed {\n if lib.starts_with(\"-L\") {\n println!(\"cargo:rustc-link-search=native={}\",\n lib[2..].replace(&host, &target));\n }\n } else if lib.starts_with(\"-l\") {\n println!(\"cargo:rustc-link-lib={}\", &lib[2..]);\n } else if lib.starts_with(\"-L\") {\n println!(\"cargo:rustc-link-search=native={}\", &lib[2..]);\n }\n }\n\n \/\/ C++ runtime library\n if !target.contains(\"msvc\") {\n if let Some(s) = env::var_os(\"LLVM_STATIC_STDCPP\") {\n assert!(!cxxflags.contains(\"stdlib=libc++\"));\n let path = PathBuf::from(s);\n println!(\"cargo:rustc-link-search=native={}\",\n path.parent().unwrap().display());\n println!(\"cargo:rustc-link-lib=static=stdc++\");\n } else if cxxflags.contains(\"stdlib=libc++\") {\n println!(\"cargo:rustc-link-lib=c++\");\n } else {\n println!(\"cargo:rustc-link-lib=stdc++\");\n }\n }\n}\n<commit_msg>Force static linking of LLVM<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate gcc;\nextern crate build_helper;\n\nuse std::process::Command;\nuse std::env;\nuse std::path::{PathBuf, Path};\n\nuse build_helper::output;\n\nfn main() {\n println!(\"cargo:rustc-cfg=cargobuild\");\n\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let llvm_config = env::var_os(\"LLVM_CONFIG\")\n .map(PathBuf::from)\n .unwrap_or_else(|| {\n if let Some(dir) = env::var_os(\"CARGO_TARGET_DIR\").map(PathBuf::from) {\n let to_test = dir.parent()\n .unwrap()\n .parent()\n .unwrap()\n .join(&target)\n .join(\"llvm\/bin\/llvm-config\");\n if Command::new(&to_test).output().is_ok() {\n return to_test;\n }\n }\n PathBuf::from(\"llvm-config\")\n });\n\n println!(\"cargo:rerun-if-changed={}\", llvm_config.display());\n\n \/\/ Test whether we're cross-compiling LLVM. This is a pretty rare case\n \/\/ currently where we're producing an LLVM for a different platform than\n \/\/ what this build script is currently running on.\n \/\/\n \/\/ In that case, there's no guarantee that we can actually run the target,\n \/\/ so the build system works around this by giving us the LLVM_CONFIG for\n \/\/ the host platform. This only really works if the host LLVM and target\n \/\/ LLVM are compiled the same way, but for us that's typically the case.\n \/\/\n \/\/ We *want* detect this cross compiling situation by asking llvm-config\n \/\/ what it's host-target is. If that's not the TARGET, then we're cross\n \/\/ compiling. Unfortunately `llvm-config` seems either be buggy, or we're\n \/\/ misconfiguring it, because the `i686-pc-windows-gnu` build of LLVM will\n \/\/ report itself with a `--host-target` of `x86_64-pc-windows-gnu`. This\n \/\/ tricks us into thinking we're doing a cross build when we aren't, so\n \/\/ havoc ensues.\n \/\/\n \/\/ In any case, if we're cross compiling, this generally just means that we\n \/\/ can't trust all the output of llvm-config becaues it might be targeted\n \/\/ for the host rather than the target. As a result a bunch of blocks below\n \/\/ are gated on `if !is_crossed`\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let host = env::var(\"HOST\").expect(\"HOST was not set\");\n let is_crossed = target != host;\n\n let optional_components =\n [\"x86\", \"arm\", \"aarch64\", \"mips\", \"powerpc\", \"pnacl\", \"systemz\", \"jsbackend\"];\n\n \/\/ FIXME: surely we don't need all these components, right? Stuff like mcjit\n \/\/ or interpreter the compiler itself never uses.\n let required_components = &[\"ipo\",\n \"bitreader\",\n \"bitwriter\",\n \"linker\",\n \"asmparser\",\n \"mcjit\",\n \"interpreter\",\n \"instrumentation\"];\n\n let components = output(Command::new(&llvm_config).arg(\"--components\"));\n let mut components = components.split_whitespace().collect::<Vec<_>>();\n components.retain(|c| optional_components.contains(c) || required_components.contains(c));\n\n for component in required_components {\n if !components.contains(component) {\n panic!(\"require llvm component {} but wasn't found\", component);\n }\n }\n\n for component in components.iter() {\n println!(\"cargo:rustc-cfg=llvm_component=\\\"{}\\\"\", component);\n }\n\n \/\/ Link in our own LLVM shims, compiled with the same flags as LLVM\n let mut cmd = Command::new(&llvm_config);\n cmd.arg(\"--cxxflags\");\n let cxxflags = output(&mut cmd);\n let mut cfg = gcc::Config::new();\n for flag in cxxflags.split_whitespace() {\n \/\/ Ignore flags like `-m64` when we're doing a cross build\n if is_crossed && flag.starts_with(\"-m\") {\n continue;\n }\n cfg.flag(flag);\n }\n\n for component in &components[..] {\n let mut flag = String::from(\"-DLLVM_COMPONENT_\");\n flag.push_str(&component.to_uppercase());\n cfg.flag(&flag);\n }\n\n if env::var_os(\"LLVM_RUSTLLVM\").is_some() {\n cfg.flag(\"-DLLVM_RUSTLLVM\");\n }\n\n cfg.file(\"..\/rustllvm\/PassWrapper.cpp\")\n .file(\"..\/rustllvm\/RustWrapper.cpp\")\n .file(\"..\/rustllvm\/ArchiveWrapper.cpp\")\n .cpp(true)\n .cpp_link_stdlib(None) \/\/ we handle this below\n .compile(\"librustllvm.a\");\n\n \/\/ Link in all LLVM libraries, if we're uwring the \"wrong\" llvm-config then\n \/\/ we don't pick up system libs because unfortunately they're for the host\n \/\/ of llvm-config, not the target that we're attempting to link.\n let mut cmd = Command::new(&llvm_config);\n cmd.arg(\"--libs\");\n\n \/\/ Force static linking with \"--link-static\" if available.\n let mut version_cmd = Command::new(&llvm_config);\n version_cmd.arg(\"--version\");\n let version_output = output(&mut version_cmd);\n let mut parts = version_output.split('.');\n if let (Some(major), Some(minor)) = (parts.next().and_then(|s| s.parse::<u32>().ok()),\n parts.next().and_then(|s| s.parse::<u32>().ok())) {\n if major > 3 || (major == 3 && minor >= 8) {\n cmd.arg(\"--link-static\");\n }\n }\n\n if !is_crossed {\n cmd.arg(\"--system-libs\");\n }\n cmd.args(&components[..]);\n\n for lib in output(&mut cmd).split_whitespace() {\n let name = if lib.starts_with(\"-l\") {\n &lib[2..]\n } else if lib.starts_with(\"-\") {\n &lib[1..]\n } else if Path::new(lib).exists() {\n \/\/ On MSVC llvm-config will print the full name to libraries, but\n \/\/ we're only interested in the name part\n let name = Path::new(lib).file_name().unwrap().to_str().unwrap();\n name.trim_right_matches(\".lib\")\n } else if lib.ends_with(\".lib\") {\n \/\/ Some MSVC libraries just come up with `.lib` tacked on, so chop\n \/\/ that off\n lib.trim_right_matches(\".lib\")\n } else {\n continue;\n };\n\n \/\/ Don't need or want this library, but LLVM's CMake build system\n \/\/ doesn't provide a way to disable it, so filter it here even though we\n \/\/ may or may not have built it. We don't reference anything from this\n \/\/ library and it otherwise may just pull in extra dependencies on\n \/\/ libedit which we don't want\n if name == \"LLVMLineEditor\" {\n continue;\n }\n\n let kind = if name.starts_with(\"LLVM\") {\n \"static\"\n } else {\n \"dylib\"\n };\n println!(\"cargo:rustc-link-lib={}={}\", kind, name);\n }\n\n \/\/ LLVM ldflags\n \/\/\n \/\/ If we're a cross-compile of LLVM then unfortunately we can't trust these\n \/\/ ldflags (largely where all the LLVM libs are located). Currently just\n \/\/ hack around this by replacing the host triple with the target and pray\n \/\/ that those -L directories are the same!\n let mut cmd = Command::new(&llvm_config);\n cmd.arg(\"--ldflags\");\n for lib in output(&mut cmd).split_whitespace() {\n if lib.starts_with(\"-LIBPATH:\") {\n println!(\"cargo:rustc-link-search=native={}\", &lib[9..]);\n } else if is_crossed {\n if lib.starts_with(\"-L\") {\n println!(\"cargo:rustc-link-search=native={}\",\n lib[2..].replace(&host, &target));\n }\n } else if lib.starts_with(\"-l\") {\n println!(\"cargo:rustc-link-lib={}\", &lib[2..]);\n } else if lib.starts_with(\"-L\") {\n println!(\"cargo:rustc-link-search=native={}\", &lib[2..]);\n }\n }\n\n \/\/ C++ runtime library\n if !target.contains(\"msvc\") {\n if let Some(s) = env::var_os(\"LLVM_STATIC_STDCPP\") {\n assert!(!cxxflags.contains(\"stdlib=libc++\"));\n let path = PathBuf::from(s);\n println!(\"cargo:rustc-link-search=native={}\",\n path.parent().unwrap().display());\n println!(\"cargo:rustc-link-lib=static=stdc++\");\n } else if cxxflags.contains(\"stdlib=libc++\") {\n println!(\"cargo:rustc-link-lib=c++\");\n } else {\n println!(\"cargo:rustc-link-lib=stdc++\");\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Infrastructure for compiler plugins.\n\/\/!\n\/\/! Plugins are Rust libraries which extend the behavior of `rustc`\n\/\/! in various ways.\n\/\/!\n\/\/! Plugin authors will use the `Registry` type re-exported by\n\/\/! this module, along with its methods. The rest of the module\n\/\/! is for use by `rustc` itself.\n\/\/!\n\/\/! To define a plugin, build a dylib crate with a\n\/\/! `#[plugin_registrar]` function:\n\/\/!\n\/\/! ```no_run\n\/\/! #![crate_name = \"myplugin\"]\n\/\/! #![crate_type = \"dylib\"]\n\/\/! #![feature(plugin_registrar)]\n\/\/! #![feature(rustc_private)]\n\/\/!\n\/\/! extern crate rustc_plugin;\n\/\/! extern crate syntax;\n\/\/! extern crate syntax_pos;\n\/\/!\n\/\/! use rustc_plugin::Registry;\n\/\/! use syntax::ext::base::{ExtCtxt, MacResult};\n\/\/! use syntax_pos::Span;\n\/\/! use syntax::tokenstream::TokenTree;\n\/\/!\n\/\/! #[plugin_registrar]\n\/\/! pub fn plugin_registrar(reg: &mut Registry) {\n\/\/! reg.register_macro(\"mymacro\", expand_mymacro);\n\/\/! }\n\/\/!\n\/\/! fn expand_mymacro(cx: &mut ExtCtxt, span: Span, tt: &[TokenTree]) -> Box<MacResult> {\n\/\/! unimplemented!()\n\/\/! }\n\/\/!\n\/\/! # fn main() {}\n\/\/! ```\n\/\/!\n\/\/! WARNING: We currently don't check that the registrar function\n\/\/! has the appropriate type!\n\/\/!\n\/\/! To use a plugin while compiling another crate:\n\/\/!\n\/\/! ```rust\n\/\/! #![feature(plugin)]\n\/\/! #![plugin(myplugin)]\n\/\/! ```\n\/\/!\n\/\/! See the [`plugin` feature](..\/unstable-book\/language-features\/plugin.html) of\n\/\/! the Unstable Book for more examples.\n\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\")]\n\n#![feature(rustc_diagnostic_macros)]\n\n#[macro_use] extern crate syntax;\n\nextern crate rustc;\nextern crate rustc_metadata;\nextern crate syntax_pos;\nextern crate rustc_errors as errors;\n\npub use self::registry::Registry;\n\nmod diagnostics;\npub mod registry;\npub mod load;\npub mod build;\n\n__build_diagnostic_array! { librustc_plugin, DIAGNOSTICS }\n<commit_msg>[nll] librustc_plugin: enable feature(nll) for bootstrap<commit_after>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Infrastructure for compiler plugins.\n\/\/!\n\/\/! Plugins are Rust libraries which extend the behavior of `rustc`\n\/\/! in various ways.\n\/\/!\n\/\/! Plugin authors will use the `Registry` type re-exported by\n\/\/! this module, along with its methods. The rest of the module\n\/\/! is for use by `rustc` itself.\n\/\/!\n\/\/! To define a plugin, build a dylib crate with a\n\/\/! `#[plugin_registrar]` function:\n\/\/!\n\/\/! ```no_run\n\/\/! #![crate_name = \"myplugin\"]\n\/\/! #![crate_type = \"dylib\"]\n\/\/! #![feature(plugin_registrar)]\n\/\/! #![feature(rustc_private)]\n\/\/!\n\/\/! extern crate rustc_plugin;\n\/\/! extern crate syntax;\n\/\/! extern crate syntax_pos;\n\/\/!\n\/\/! use rustc_plugin::Registry;\n\/\/! use syntax::ext::base::{ExtCtxt, MacResult};\n\/\/! use syntax_pos::Span;\n\/\/! use syntax::tokenstream::TokenTree;\n\/\/!\n\/\/! #[plugin_registrar]\n\/\/! pub fn plugin_registrar(reg: &mut Registry) {\n\/\/! reg.register_macro(\"mymacro\", expand_mymacro);\n\/\/! }\n\/\/!\n\/\/! fn expand_mymacro(cx: &mut ExtCtxt, span: Span, tt: &[TokenTree]) -> Box<MacResult> {\n\/\/! unimplemented!()\n\/\/! }\n\/\/!\n\/\/! # fn main() {}\n\/\/! ```\n\/\/!\n\/\/! WARNING: We currently don't check that the registrar function\n\/\/! has the appropriate type!\n\/\/!\n\/\/! To use a plugin while compiling another crate:\n\/\/!\n\/\/! ```rust\n\/\/! #![feature(plugin)]\n\/\/! #![plugin(myplugin)]\n\/\/! ```\n\/\/!\n\/\/! See the [`plugin` feature](..\/unstable-book\/language-features\/plugin.html) of\n\/\/! the Unstable Book for more examples.\n\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\")]\n\n#![cfg_attr(not(stage0), feature(nll))]\n#![feature(rustc_diagnostic_macros)]\n\n#[macro_use] extern crate syntax;\n\nextern crate rustc;\nextern crate rustc_metadata;\nextern crate syntax_pos;\nextern crate rustc_errors as errors;\n\npub use self::registry::Registry;\n\nmod diagnostics;\npub mod registry;\npub mod load;\npub mod build;\n\n__build_diagnostic_array! { librustc_plugin, DIAGNOSTICS }\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse prelude::v1::*;\n\nuse ffi::CStr;\nuse io;\nuse libc::{self, c_int, size_t, sockaddr, socklen_t};\nuse net::{SocketAddr, Shutdown};\nuse str;\nuse sys::fd::FileDesc;\nuse sys_common::{AsInner, FromInner, IntoInner};\nuse sys_common::net::{getsockopt, setsockopt};\nuse time::Duration;\n\npub use sys::{cvt, cvt_r};\npub extern crate libc as netc;\n\npub type wrlen_t = size_t;\n\n\/\/ See below for the usage of SOCK_CLOEXEC, but this constant is only defined on\n\/\/ Linux currently (e.g. support doesn't exist on other platforms). In order to\n\/\/ get name resolution to work and things to compile we just define a dummy\n\/\/ SOCK_CLOEXEC here for other platforms. Note that the dummy constant isn't\n\/\/ actually ever used (the blocks below are wrapped in `if cfg!` as well.\n#[cfg(target_os = \"linux\")]\nuse libc::SOCK_CLOEXEC;\n#[cfg(not(target_os = \"linux\"))]\nconst SOCK_CLOEXEC: c_int = 0;\n\npub struct Socket(FileDesc);\n\npub fn init() {}\n\npub fn cvt_gai(err: c_int) -> io::Result<()> {\n if err == 0 { return Ok(()) }\n\n let detail = unsafe {\n str::from_utf8(CStr::from_ptr(libc::gai_strerror(err)).to_bytes()).unwrap()\n .to_owned()\n };\n Err(io::Error::new(io::ErrorKind::Other,\n &format!(\"failed to lookup address information: {}\",\n detail)[..]))\n}\n\nimpl Socket {\n pub fn new(addr: &SocketAddr, ty: c_int) -> io::Result<Socket> {\n let fam = match *addr {\n SocketAddr::V4(..) => libc::AF_INET,\n SocketAddr::V6(..) => libc::AF_INET6,\n };\n Socket::new_raw(fam, ty)\n }\n\n pub fn new_raw(fam: c_int, ty: c_int) -> io::Result<Socket> {\n unsafe {\n \/\/ On linux we first attempt to pass the SOCK_CLOEXEC flag to\n \/\/ atomically create the socket and set it as CLOEXEC. Support for\n \/\/ this option, however, was added in 2.6.27, and we still support\n \/\/ 2.6.18 as a kernel, so if the returned error is EINVAL we\n \/\/ fallthrough to the fallback.\n if cfg!(linux) {\n match cvt(libc::socket(fam, ty | SOCK_CLOEXEC, 0)) {\n Ok(fd) => return Ok(Socket(FileDesc::new(fd))),\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {}\n Err(e) => return Err(e),\n }\n }\n\n let fd = cvt(libc::socket(fam, ty, 0))?;\n let fd = FileDesc::new(fd);\n fd.set_cloexec()?;\n Ok(Socket(fd))\n }\n }\n\n pub fn new_pair(fam: c_int, ty: c_int) -> io::Result<(Socket, Socket)> {\n unsafe {\n let mut fds = [0, 0];\n\n \/\/ Like above, see if we can set cloexec atomically\n if cfg!(linux) {\n match cvt(libc::socketpair(fam, ty | SOCK_CLOEXEC, 0, fds.as_mut_ptr())) {\n Ok(_) => {\n return Ok((Socket(FileDesc::new(fds[0])), Socket(FileDesc::new(fds[1]))));\n }\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {},\n Err(e) => return Err(e),\n }\n }\n\n cvt(libc::socketpair(fam, ty, 0, fds.as_mut_ptr()))?;\n let a = FileDesc::new(fds[0]);\n let b = FileDesc::new(fds[1]);\n a.set_cloexec()?;\n b.set_cloexec()?;\n Ok((Socket(a), Socket(b)))\n }\n }\n\n pub fn accept(&self, storage: *mut sockaddr, len: *mut socklen_t)\n -> io::Result<Socket> {\n \/\/ Unfortunately the only known way right now to accept a socket and\n \/\/ atomically set the CLOEXEC flag is to use the `accept4` syscall on\n \/\/ Linux. This was added in 2.6.28, however, and because we support\n \/\/ 2.6.18 we must detect this support dynamically.\n if cfg!(target_os = \"linux\") {\n weak! {\n fn accept4(c_int, *mut sockaddr, *mut socklen_t, c_int) -> c_int\n }\n if let Some(accept) = accept4.get() {\n let res = cvt_r(|| unsafe {\n accept(self.0.raw(), storage, len, SOCK_CLOEXEC)\n });\n match res {\n Ok(fd) => return Ok(Socket(FileDesc::new(fd))),\n Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => {}\n Err(e) => return Err(e),\n }\n }\n }\n\n let fd = cvt_r(|| unsafe {\n libc::accept(self.0.raw(), storage, len)\n })?;\n let fd = FileDesc::new(fd);\n fd.set_cloexec()?;\n Ok(Socket(fd))\n }\n\n pub fn duplicate(&self) -> io::Result<Socket> {\n self.0.duplicate().map(Socket)\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n self.0.read(buf)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec<u8>) -> io::Result<usize> {\n self.0.read_to_end(buf)\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result<usize> {\n self.0.write(buf)\n }\n\n pub fn set_timeout(&self, dur: Option<Duration>, kind: libc::c_int) -> io::Result<()> {\n let timeout = match dur {\n Some(dur) => {\n if dur.as_secs() == 0 && dur.subsec_nanos() == 0 {\n return Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot set a 0 duration timeout\"));\n }\n\n let secs = if dur.as_secs() > libc::time_t::max_value() as u64 {\n libc::time_t::max_value()\n } else {\n dur.as_secs() as libc::time_t\n };\n let mut timeout = libc::timeval {\n tv_sec: secs,\n tv_usec: (dur.subsec_nanos() \/ 1000) as libc::suseconds_t,\n };\n if timeout.tv_sec == 0 && timeout.tv_usec == 0 {\n timeout.tv_usec = 1;\n }\n timeout\n }\n None => {\n libc::timeval {\n tv_sec: 0,\n tv_usec: 0,\n }\n }\n };\n setsockopt(self, libc::SOL_SOCKET, kind, timeout)\n }\n\n pub fn timeout(&self, kind: libc::c_int) -> io::Result<Option<Duration>> {\n let raw: libc::timeval = getsockopt(self, libc::SOL_SOCKET, kind)?;\n if raw.tv_sec == 0 && raw.tv_usec == 0 {\n Ok(None)\n } else {\n let sec = raw.tv_sec as u64;\n let nsec = (raw.tv_usec as u32) * 1000;\n Ok(Some(Duration::new(sec, nsec)))\n }\n }\n\n pub fn shutdown(&self, how: Shutdown) -> io::Result<()> {\n let how = match how {\n Shutdown::Write => libc::SHUT_WR,\n Shutdown::Read => libc::SHUT_RD,\n Shutdown::Both => libc::SHUT_RDWR,\n };\n cvt(unsafe { libc::shutdown(self.0.raw(), how) })?;\n Ok(())\n }\n\n pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> {\n setsockopt(self, libc::IPPROTO_TCP, libc::TCP_NODELAY, nodelay as c_int)\n }\n\n pub fn nodelay(&self) -> io::Result<bool> {\n let raw: c_int = getsockopt(self, libc::IPPROTO_TCP, libc::TCP_NODELAY)?;\n Ok(raw != 0)\n }\n\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n let mut nonblocking = nonblocking as libc::c_ulong;\n cvt(unsafe { libc::ioctl(*self.as_inner(), libc::FIONBIO, &mut nonblocking) }).map(|_| ())\n }\n\n pub fn take_error(&self) -> io::Result<Option<io::Error>> {\n let raw: c_int = getsockopt(self, libc::SOL_SOCKET, libc::SO_ERROR)?;\n if raw == 0 {\n Ok(None)\n } else {\n Ok(Some(io::Error::from_raw_os_error(raw as i32)))\n }\n }\n}\n\nimpl AsInner<c_int> for Socket {\n fn as_inner(&self) -> &c_int { self.0.as_inner() }\n}\n\nimpl FromInner<c_int> for Socket {\n fn from_inner(fd: c_int) -> Socket { Socket(FileDesc::new(fd)) }\n}\n\nimpl IntoInner<c_int> for Socket {\n fn into_inner(self) -> c_int { self.0.into_raw() }\n}\n<commit_msg>Auto merge of #34946 - alexcrichton:fix-cfg, r=brson<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse prelude::v1::*;\n\nuse ffi::CStr;\nuse io;\nuse libc::{self, c_int, size_t, sockaddr, socklen_t};\nuse net::{SocketAddr, Shutdown};\nuse str;\nuse sys::fd::FileDesc;\nuse sys_common::{AsInner, FromInner, IntoInner};\nuse sys_common::net::{getsockopt, setsockopt};\nuse time::Duration;\n\npub use sys::{cvt, cvt_r};\npub extern crate libc as netc;\n\npub type wrlen_t = size_t;\n\n\/\/ See below for the usage of SOCK_CLOEXEC, but this constant is only defined on\n\/\/ Linux currently (e.g. support doesn't exist on other platforms). In order to\n\/\/ get name resolution to work and things to compile we just define a dummy\n\/\/ SOCK_CLOEXEC here for other platforms. Note that the dummy constant isn't\n\/\/ actually ever used (the blocks below are wrapped in `if cfg!` as well.\n#[cfg(target_os = \"linux\")]\nuse libc::SOCK_CLOEXEC;\n#[cfg(not(target_os = \"linux\"))]\nconst SOCK_CLOEXEC: c_int = 0;\n\npub struct Socket(FileDesc);\n\npub fn init() {}\n\npub fn cvt_gai(err: c_int) -> io::Result<()> {\n if err == 0 { return Ok(()) }\n\n let detail = unsafe {\n str::from_utf8(CStr::from_ptr(libc::gai_strerror(err)).to_bytes()).unwrap()\n .to_owned()\n };\n Err(io::Error::new(io::ErrorKind::Other,\n &format!(\"failed to lookup address information: {}\",\n detail)[..]))\n}\n\nimpl Socket {\n pub fn new(addr: &SocketAddr, ty: c_int) -> io::Result<Socket> {\n let fam = match *addr {\n SocketAddr::V4(..) => libc::AF_INET,\n SocketAddr::V6(..) => libc::AF_INET6,\n };\n Socket::new_raw(fam, ty)\n }\n\n pub fn new_raw(fam: c_int, ty: c_int) -> io::Result<Socket> {\n unsafe {\n \/\/ On linux we first attempt to pass the SOCK_CLOEXEC flag to\n \/\/ atomically create the socket and set it as CLOEXEC. Support for\n \/\/ this option, however, was added in 2.6.27, and we still support\n \/\/ 2.6.18 as a kernel, so if the returned error is EINVAL we\n \/\/ fallthrough to the fallback.\n if cfg!(target_os = \"linux\") {\n match cvt(libc::socket(fam, ty | SOCK_CLOEXEC, 0)) {\n Ok(fd) => return Ok(Socket(FileDesc::new(fd))),\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {}\n Err(e) => return Err(e),\n }\n }\n\n let fd = cvt(libc::socket(fam, ty, 0))?;\n let fd = FileDesc::new(fd);\n fd.set_cloexec()?;\n Ok(Socket(fd))\n }\n }\n\n pub fn new_pair(fam: c_int, ty: c_int) -> io::Result<(Socket, Socket)> {\n unsafe {\n let mut fds = [0, 0];\n\n \/\/ Like above, see if we can set cloexec atomically\n if cfg!(target_os = \"linux\") {\n match cvt(libc::socketpair(fam, ty | SOCK_CLOEXEC, 0, fds.as_mut_ptr())) {\n Ok(_) => {\n return Ok((Socket(FileDesc::new(fds[0])), Socket(FileDesc::new(fds[1]))));\n }\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {},\n Err(e) => return Err(e),\n }\n }\n\n cvt(libc::socketpair(fam, ty, 0, fds.as_mut_ptr()))?;\n let a = FileDesc::new(fds[0]);\n let b = FileDesc::new(fds[1]);\n a.set_cloexec()?;\n b.set_cloexec()?;\n Ok((Socket(a), Socket(b)))\n }\n }\n\n pub fn accept(&self, storage: *mut sockaddr, len: *mut socklen_t)\n -> io::Result<Socket> {\n \/\/ Unfortunately the only known way right now to accept a socket and\n \/\/ atomically set the CLOEXEC flag is to use the `accept4` syscall on\n \/\/ Linux. This was added in 2.6.28, however, and because we support\n \/\/ 2.6.18 we must detect this support dynamically.\n if cfg!(target_os = \"linux\") {\n weak! {\n fn accept4(c_int, *mut sockaddr, *mut socklen_t, c_int) -> c_int\n }\n if let Some(accept) = accept4.get() {\n let res = cvt_r(|| unsafe {\n accept(self.0.raw(), storage, len, SOCK_CLOEXEC)\n });\n match res {\n Ok(fd) => return Ok(Socket(FileDesc::new(fd))),\n Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => {}\n Err(e) => return Err(e),\n }\n }\n }\n\n let fd = cvt_r(|| unsafe {\n libc::accept(self.0.raw(), storage, len)\n })?;\n let fd = FileDesc::new(fd);\n fd.set_cloexec()?;\n Ok(Socket(fd))\n }\n\n pub fn duplicate(&self) -> io::Result<Socket> {\n self.0.duplicate().map(Socket)\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n self.0.read(buf)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec<u8>) -> io::Result<usize> {\n self.0.read_to_end(buf)\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result<usize> {\n self.0.write(buf)\n }\n\n pub fn set_timeout(&self, dur: Option<Duration>, kind: libc::c_int) -> io::Result<()> {\n let timeout = match dur {\n Some(dur) => {\n if dur.as_secs() == 0 && dur.subsec_nanos() == 0 {\n return Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot set a 0 duration timeout\"));\n }\n\n let secs = if dur.as_secs() > libc::time_t::max_value() as u64 {\n libc::time_t::max_value()\n } else {\n dur.as_secs() as libc::time_t\n };\n let mut timeout = libc::timeval {\n tv_sec: secs,\n tv_usec: (dur.subsec_nanos() \/ 1000) as libc::suseconds_t,\n };\n if timeout.tv_sec == 0 && timeout.tv_usec == 0 {\n timeout.tv_usec = 1;\n }\n timeout\n }\n None => {\n libc::timeval {\n tv_sec: 0,\n tv_usec: 0,\n }\n }\n };\n setsockopt(self, libc::SOL_SOCKET, kind, timeout)\n }\n\n pub fn timeout(&self, kind: libc::c_int) -> io::Result<Option<Duration>> {\n let raw: libc::timeval = getsockopt(self, libc::SOL_SOCKET, kind)?;\n if raw.tv_sec == 0 && raw.tv_usec == 0 {\n Ok(None)\n } else {\n let sec = raw.tv_sec as u64;\n let nsec = (raw.tv_usec as u32) * 1000;\n Ok(Some(Duration::new(sec, nsec)))\n }\n }\n\n pub fn shutdown(&self, how: Shutdown) -> io::Result<()> {\n let how = match how {\n Shutdown::Write => libc::SHUT_WR,\n Shutdown::Read => libc::SHUT_RD,\n Shutdown::Both => libc::SHUT_RDWR,\n };\n cvt(unsafe { libc::shutdown(self.0.raw(), how) })?;\n Ok(())\n }\n\n pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> {\n setsockopt(self, libc::IPPROTO_TCP, libc::TCP_NODELAY, nodelay as c_int)\n }\n\n pub fn nodelay(&self) -> io::Result<bool> {\n let raw: c_int = getsockopt(self, libc::IPPROTO_TCP, libc::TCP_NODELAY)?;\n Ok(raw != 0)\n }\n\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n let mut nonblocking = nonblocking as libc::c_ulong;\n cvt(unsafe { libc::ioctl(*self.as_inner(), libc::FIONBIO, &mut nonblocking) }).map(|_| ())\n }\n\n pub fn take_error(&self) -> io::Result<Option<io::Error>> {\n let raw: c_int = getsockopt(self, libc::SOL_SOCKET, libc::SO_ERROR)?;\n if raw == 0 {\n Ok(None)\n } else {\n Ok(Some(io::Error::from_raw_os_error(raw as i32)))\n }\n }\n}\n\nimpl AsInner<c_int> for Socket {\n fn as_inner(&self) -> &c_int { self.0.as_inner() }\n}\n\nimpl FromInner<c_int> for Socket {\n fn from_inner(fd: c_int) -> Socket { Socket(FileDesc::new(fd)) }\n}\n\nimpl IntoInner<c_int> for Socket {\n fn into_inner(self) -> c_int { self.0.into_raw() }\n}\n<|endoftext|>"} {"text":"<commit_before>\/**\n * Mid-level wrapper functions that provide rust-style funtion names. Includes public exports of\n * core types and constants.\n *\/\n\nuse core::libc::*;\n\npub use support::consts::*;\npub use support::types::*;\n\npub fn init() -> c_int {\n unsafe { ::ll::glfwInit() }\n}\n\npub fn terminate() {\n unsafe { ::ll::glfwTerminate(); }\n}\n\npub fn get_version() -> (c_int, c_int, c_int) {\n unsafe {\n let mut major = 0,\n minor = 0,\n rev = 0;\n ::ll::glfwGetVersion(&major, &minor, &rev);\n (major, minor, rev)\n }\n}\n\npub fn get_version_string() -> ~str {\n unsafe { str::raw::from_c_str(::ll::glfwGetVersionString()) }\n}\n\npub fn set_error_callback(cbfun: GLFWerrorfun) -> GLFWerrorfun {\n unsafe { ::ll::glfwSetErrorCallback(cbfun) }\n}\n\npub fn get_monitors() -> ~[*GLFWmonitor] {\n unsafe {\n let mut count = 0;\n let ptr = ::ll::glfwGetMonitors(&count);\n vec::from_buf(ptr, count as uint)\n }\n}\n\npub fn get_primary_monitor() -> *GLFWmonitor {\n unsafe { ::ll::glfwGetPrimaryMonitor() }\n}\n\npub fn get_monitor_pos(monitor: *GLFWmonitor) -> (c_int, c_int) {\n unsafe {\n let mut xpos = 0, ypos = 0;\n ::ll::glfwGetMonitorPos(monitor, &xpos, &ypos);\n (xpos, ypos)\n }\n}\n\npub fn get_monitor_physical_size(monitor: *GLFWmonitor) -> (c_int, c_int) {\n unsafe {\n let mut width = 0, height = 0;\n ::ll::glfwGetMonitorPhysicalSize(monitor, &width, &height);\n (width, height)\n }\n}\n\npub fn get_monitor_name(monitor: *GLFWmonitor) -> ~str {\n unsafe { str::raw::from_c_str(::ll::glfwGetMonitorName(monitor)) }\n}\n\npub fn get_video_modes(monitor: *GLFWmonitor) -> ~[GLFWvidmode] {\n unsafe {\n let mut count = 0;\n let ptr = ::ll::glfwGetVideoModes(monitor, &count);\n vec::from_buf(ptr, count as uint)\n }\n}\n\npub fn get_video_mode(monitor: *GLFWmonitor) -> GLFWvidmode {\n unsafe { ::ll::glfwGetVideoMode(monitor) }\n}\n\npub fn set_gamma(monitor: *GLFWmonitor, gamma: c_float) {\n unsafe { ::ll::glfwSetGamma(monitor, gamma); }\n}\n\npub fn get_gamma_ramp(monitor: *GLFWmonitor) -> GLFWgammaramp {\n let mut ramp = ::GammaRamp {\n red: [0, ..GAMMA_RAMP_SIZE],\n green: [0, ..GAMMA_RAMP_SIZE],\n blue: [0, ..GAMMA_RAMP_SIZE],\n };\n unsafe { ::ll::glfwGetGammaRamp(monitor, &ramp); }\n return ramp;\n}\n\npub fn set_gamma_ramp(monitor: *GLFWmonitor, ramp: &GLFWgammaramp) {\n unsafe { ::ll::glfwSetGammaRamp(monitor, ramp); }\n}\n\npub fn set_monitor_callback(cbfun: GLFWmonitorfun) -> GLFWmonitorfun {\n unsafe { ::ll::glfwSetMonitorCallback(cbfun) }\n}\n\npub fn default_window_hints() {\n unsafe { ::ll::glfwDefaultWindowHints(); }\n}\n\npub fn window_hint(target: c_int, hint: c_int) {\n unsafe { ::ll::glfwWindowHint(target, hint); }\n}\n\npub fn create_window(width: c_int, height: c_int, title: &str, monitor: *GLFWmonitor, share: *GLFWwindow) -> *GLFWwindow {\n unsafe { ::ll::glfwCreateWindow(width, height, str::as_c_str(title, |a| a), monitor, share) }\n}\n\npub fn destroy_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwDestroyWindow(window); }\n}\n\npub fn window_should_close(window: *GLFWwindow) -> c_int {\n unsafe { ::ll::glfwWindowShouldClose(window) }\n}\n\npub fn set_window_should_close(window: *GLFWwindow, value: c_int) {\n unsafe { ::ll::glfwSetWindowShouldClose(window, value) }\n}\n\npub fn set_window_title(window: *GLFWwindow, title: &str) {\n unsafe { ::ll::glfwSetWindowTitle(window, str::as_c_str(title, |a| a)) }\n}\n\npub fn get_window_pos(window: *GLFWwindow) -> (c_int, c_int) {\n unsafe {\n let mut xpos = 0, ypos = 0;\n ::ll::glfwGetWindowPos(window, &xpos, &ypos);\n (xpos, ypos)\n }\n}\n\npub fn set_window_pos(window: *GLFWwindow, xpos: c_int, ypos: c_int) {\n unsafe { ::ll::glfwSetWindowPos(window, xpos, ypos); }\n}\n\npub fn get_window_size(window: *GLFWwindow) -> (c_int, c_int) {\n unsafe {\n let mut width = 0, height = 0;\n ::ll::glfwGetWindowSize(window, &width, &height);\n (width, height)\n }\n}\n\npub fn set_window_size(window: *GLFWwindow, width: c_int, height: c_int) {\n unsafe { ::ll::glfwSetWindowSize(window, width, height); }\n}\n\npub fn iconify_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwIconifyWindow(window); }\n}\n\npub fn restore_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwRestoreWindow(window); }\n}\n\npub fn show_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwShowWindow(window); }\n}\n\npub fn hide_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwHideWindow(window); }\n}\n\npub fn get_window_monitor(window: *GLFWwindow) -> *GLFWmonitor {\n unsafe { ::ll::glfwGetWindowMonitor(window) }\n}\n\npub fn get_window_param(window: *GLFWwindow, param: c_int) -> c_int {\n unsafe { ::ll::glfwGetWindowParam(window, param) }\n}\n\npub fn set_window_user_pointer(window: *GLFWwindow, pointer: *c_void) {\n unsafe { ::ll::glfwSetWindowUserPointer(window, pointer); }\n}\n\npub fn get_window_user_pointer(window: *GLFWwindow) -> *c_void {\n unsafe { ::ll::glfwGetWindowUserPointer(window) }\n}\n\npub fn set_window_pos_callback(window: *GLFWwindow, cbfun: GLFWwindowposfun) -> GLFWwindowposfun {\n unsafe { ::ll::glfwSetWindowPosCallback(window, cbfun) }\n}\n\npub fn set_window_size_callback(window: *GLFWwindow, cbfun: GLFWwindowsizefun) -> GLFWwindowsizefun {\n unsafe { ::ll::glfwSetWindowSizeCallback(window, cbfun) }\n}\n\npub fn set_window_close_callback(window: *GLFWwindow, cbfun: GLFWwindowclosefun) -> GLFWwindowclosefun {\n unsafe { ::ll::glfwSetWindowCloseCallback(window, cbfun) }\n}\n\npub fn set_window_refresh_callback(window: *GLFWwindow, cbfun: GLFWwindowrefreshfun) -> GLFWwindowrefreshfun {\n unsafe { ::ll::glfwSetWindowRefreshCallback(window, cbfun) }\n}\n\npub fn set_window_focus_callback(window: *GLFWwindow, cbfun: GLFWwindowfocusfun) -> GLFWwindowfocusfun {\n unsafe { ::ll::glfwSetWindowFocusCallback(window, cbfun) }\n}\n\npub fn set_window_iconify_callback(window: *GLFWwindow, cbfun: GLFWwindowiconifyfun) -> GLFWwindowiconifyfun {\n unsafe { ::ll::glfwSetWindowIconifyCallback(window, cbfun) }\n}\n\npub fn get_input_mode(window: *GLFWwindow, mode: c_int) -> c_int {\n unsafe { ::ll::glfwGetInputMode(window, mode) }\n}\n\npub fn set_input_mode(window: *GLFWwindow, mode: c_int, value: c_int) {\n unsafe { ::ll::glfwSetInputMode(window, mode, value); }\n}\n\npub fn get_key(window: *GLFWwindow, key: c_int) -> c_int {\n unsafe { ::ll::glfwGetKey(window, key) }\n}\n\npub fn get_mouse_button(window: *GLFWwindow, button: c_int) -> c_int {\n unsafe { ::ll::glfwGetMouseButton(window, button) }\n}\n\npub fn get_cursor_pos(window: *GLFWwindow) -> (c_double, c_double) {\n unsafe {\n let mut xpos = 0.0, ypos = 0.0;\n ::ll::glfwGetCursorPos(window, &xpos, &ypos);\n (xpos, ypos)\n }\n}\n\npub fn set_cursor_pos(window: *GLFWwindow, xpos: c_double, ypos: c_double) {\n unsafe { ::ll::glfwSetCursorPos(window, xpos, ypos); }\n}\n\npub fn set_key_callback(window: *GLFWwindow, cbfun: GLFWkeyfun) -> GLFWkeyfun {\n unsafe { ::ll::glfwSetKeyCallback(window, cbfun) }\n}\n\npub fn set_char_callback(window: *GLFWwindow, cbfun: GLFWcharfun) -> GLFWcharfun {\n unsafe { ::ll::glfwSetCharCallback(window, cbfun) }\n}\n\npub fn set_mouse_button_callback(window: *GLFWwindow, cbfun: GLFWmousebuttonfun) -> GLFWmousebuttonfun {\n unsafe { ::ll::glfwSetMouseButtonCallback(window, cbfun) }\n}\n\npub fn set_cursor_pos_callback(window: *GLFWwindow, cbfun: GLFWcursorposfun) -> GLFWcursorposfun {\n unsafe { ::ll::glfwSetCursorPosCallback(window, cbfun) }\n}\n\npub fn set_cursor_enter_callback(window: *GLFWwindow, cbfun: GLFWcursorenterfun) -> GLFWcursorenterfun {\n unsafe { ::ll::glfwSetCursorEnterCallback(window, cbfun) }\n}\n\npub fn set_scroll_callback(window: *GLFWwindow, cbfun: GLFWscrollfun) -> GLFWscrollfun {\n unsafe { ::ll::glfwSetScrollCallback(window, cbfun) }\n}\n\npub fn set_clipboard_string(window: *GLFWwindow, string: &str) {\n unsafe { ::ll::glfwSetClipboardString(window, str::as_c_str(string, |a| a)); }\n}\n\npub fn get_clipboard_string(window: *GLFWwindow) -> ~str {\n unsafe { str::raw::from_c_str(::ll::glfwGetClipboardString(window)) }\n}\n\npub fn make_context_current(window: *GLFWwindow) {\n unsafe { ::ll::glfwMakeContextCurrent(window); }\n}\n\npub fn swap_buffers(window: *GLFWwindow) {\n unsafe { ::ll::glfwSwapBuffers(window); }\n}\n\npub fn poll_events() {\n unsafe { ::ll::glfwPollEvents(); }\n}\n\npub fn wait_events() {\n unsafe { ::ll::glfwWaitEvents(); }\n}\n\npub fn get_joystick_param(joy: c_int, param: c_int) -> c_int {\n unsafe { ::ll::glfwGetJoystickParam(joy, param) }\n}\n\n\/\/ TODO\n\/\/ pub fn get_joystick_axes(joy: c_int, axes: *c_float, numaxes: c_int) -> ~[c_int] {\n\/\/ unsafe { ::ll::glfwGetJoystickAxes(joy, ...) }\n\/\/ }\n\n\/\/ TODO\n\/\/ pub fn get_joystick_buttons(joy: c_int, buttons: *c_uchar, numbuttons: c_int) -> ~[c_int] {\n\/\/ unsafe { ::ll::glfwGetJoystickButtons(joy, ...) }\n\/\/ }\n\npub fn get_joystick_name(joy: c_int) -> ~str {\n unsafe { str::raw::from_c_str(::ll::glfwGetJoystickName(joy)) }\n}\n\npub fn get_time() -> c_double {\n unsafe { ::ll::glfwGetTime() }\n}\n\npub fn set_time(time: c_double) {\n unsafe { ::ll::glfwSetTime(time); }\n}\n\npub fn get_current_context() -> *GLFWwindow {\n unsafe { ::ll::glfwGetCurrentContext() }\n}\n\npub fn set_swap_interval(interval: c_int) {\n unsafe { ::ll::glfwSwapInterval(interval); }\n}\n\npub fn extension_supported(extension: &str) -> c_int {\n unsafe { ::ll::glfwExtensionSupported(str::as_c_str(extension, |a| a)) }\n}\n\npub fn get_proc_address(procname: &str) -> GLFWglproc {\n unsafe { ::ll::glfwGetProcAddress(str::as_c_str(procname, |a| a)) }\n}<commit_msg>Fix mutability warnings<commit_after>\/**\n * Mid-level wrapper functions that provide rust-style funtion names. Includes public exports of\n * core types and constants.\n *\/\n\nuse core::libc::*;\n\npub use support::consts::*;\npub use support::types::*;\n\npub fn init() -> c_int {\n unsafe { ::ll::glfwInit() }\n}\n\npub fn terminate() {\n unsafe { ::ll::glfwTerminate(); }\n}\n\npub fn get_version() -> (c_int, c_int, c_int) {\n unsafe {\n let major = 0,\n minor = 0,\n rev = 0;\n ::ll::glfwGetVersion(&major, &minor, &rev);\n (major, minor, rev)\n }\n}\n\npub fn get_version_string() -> ~str {\n unsafe { str::raw::from_c_str(::ll::glfwGetVersionString()) }\n}\n\npub fn set_error_callback(cbfun: GLFWerrorfun) -> GLFWerrorfun {\n unsafe { ::ll::glfwSetErrorCallback(cbfun) }\n}\n\npub fn get_monitors() -> ~[*GLFWmonitor] {\n unsafe {\n let count = 0;\n let ptr = ::ll::glfwGetMonitors(&count);\n vec::from_buf(ptr, count as uint)\n }\n}\n\npub fn get_primary_monitor() -> *GLFWmonitor {\n unsafe { ::ll::glfwGetPrimaryMonitor() }\n}\n\npub fn get_monitor_pos(monitor: *GLFWmonitor) -> (c_int, c_int) {\n unsafe {\n let xpos = 0,\n ypos = 0;\n ::ll::glfwGetMonitorPos(monitor, &xpos, &ypos);\n (xpos, ypos)\n }\n}\n\npub fn get_monitor_physical_size(monitor: *GLFWmonitor) -> (c_int, c_int) {\n unsafe {\n let width = 0,\n height = 0;\n ::ll::glfwGetMonitorPhysicalSize(monitor, &width, &height);\n (width, height)\n }\n}\n\npub fn get_monitor_name(monitor: *GLFWmonitor) -> ~str {\n unsafe { str::raw::from_c_str(::ll::glfwGetMonitorName(monitor)) }\n}\n\npub fn get_video_modes(monitor: *GLFWmonitor) -> ~[GLFWvidmode] {\n unsafe {\n let count = 0;\n let ptr = ::ll::glfwGetVideoModes(monitor, &count);\n vec::from_buf(ptr, count as uint)\n }\n}\n\npub fn get_video_mode(monitor: *GLFWmonitor) -> GLFWvidmode {\n unsafe { ::ll::glfwGetVideoMode(monitor) }\n}\n\npub fn set_gamma(monitor: *GLFWmonitor, gamma: c_float) {\n unsafe { ::ll::glfwSetGamma(monitor, gamma); }\n}\n\npub fn get_gamma_ramp(monitor: *GLFWmonitor) -> GLFWgammaramp {\n unsafe {\n let ramp = ::GammaRamp {\n red: [0, ..GAMMA_RAMP_SIZE],\n green: [0, ..GAMMA_RAMP_SIZE],\n blue: [0, ..GAMMA_RAMP_SIZE],\n };\n ::ll::glfwGetGammaRamp(monitor, &ramp);\n ramp\n }\n}\n\npub fn set_gamma_ramp(monitor: *GLFWmonitor, ramp: &GLFWgammaramp) {\n unsafe { ::ll::glfwSetGammaRamp(monitor, ramp); }\n}\n\npub fn set_monitor_callback(cbfun: GLFWmonitorfun) -> GLFWmonitorfun {\n unsafe { ::ll::glfwSetMonitorCallback(cbfun) }\n}\n\npub fn default_window_hints() {\n unsafe { ::ll::glfwDefaultWindowHints(); }\n}\n\npub fn window_hint(target: c_int, hint: c_int) {\n unsafe { ::ll::glfwWindowHint(target, hint); }\n}\n\npub fn create_window(width: c_int, height: c_int, title: &str, monitor: *GLFWmonitor, share: *GLFWwindow) -> *GLFWwindow {\n unsafe { ::ll::glfwCreateWindow(width, height, str::as_c_str(title, |a| a), monitor, share) }\n}\n\npub fn destroy_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwDestroyWindow(window); }\n}\n\npub fn window_should_close(window: *GLFWwindow) -> c_int {\n unsafe { ::ll::glfwWindowShouldClose(window) }\n}\n\npub fn set_window_should_close(window: *GLFWwindow, value: c_int) {\n unsafe { ::ll::glfwSetWindowShouldClose(window, value) }\n}\n\npub fn set_window_title(window: *GLFWwindow, title: &str) {\n unsafe { ::ll::glfwSetWindowTitle(window, str::as_c_str(title, |a| a)) }\n}\n\npub fn get_window_pos(window: *GLFWwindow) -> (c_int, c_int) {\n unsafe {\n let xpos = 0,\n ypos = 0;\n ::ll::glfwGetWindowPos(window, &xpos, &ypos);\n (xpos, ypos)\n }\n}\n\npub fn set_window_pos(window: *GLFWwindow, xpos: c_int, ypos: c_int) {\n unsafe { ::ll::glfwSetWindowPos(window, xpos, ypos); }\n}\n\npub fn get_window_size(window: *GLFWwindow) -> (c_int, c_int) {\n unsafe {\n let width = 0,\n height = 0;\n ::ll::glfwGetWindowSize(window, &width, &height);\n (width, height)\n }\n}\n\npub fn set_window_size(window: *GLFWwindow, width: c_int, height: c_int) {\n unsafe { ::ll::glfwSetWindowSize(window, width, height); }\n}\n\npub fn iconify_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwIconifyWindow(window); }\n}\n\npub fn restore_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwRestoreWindow(window); }\n}\n\npub fn show_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwShowWindow(window); }\n}\n\npub fn hide_window(window: *GLFWwindow) {\n unsafe { ::ll::glfwHideWindow(window); }\n}\n\npub fn get_window_monitor(window: *GLFWwindow) -> *GLFWmonitor {\n unsafe { ::ll::glfwGetWindowMonitor(window) }\n}\n\npub fn get_window_param(window: *GLFWwindow, param: c_int) -> c_int {\n unsafe { ::ll::glfwGetWindowParam(window, param) }\n}\n\npub fn set_window_user_pointer(window: *GLFWwindow, pointer: *c_void) {\n unsafe { ::ll::glfwSetWindowUserPointer(window, pointer); }\n}\n\npub fn get_window_user_pointer(window: *GLFWwindow) -> *c_void {\n unsafe { ::ll::glfwGetWindowUserPointer(window) }\n}\n\npub fn set_window_pos_callback(window: *GLFWwindow, cbfun: GLFWwindowposfun) -> GLFWwindowposfun {\n unsafe { ::ll::glfwSetWindowPosCallback(window, cbfun) }\n}\n\npub fn set_window_size_callback(window: *GLFWwindow, cbfun: GLFWwindowsizefun) -> GLFWwindowsizefun {\n unsafe { ::ll::glfwSetWindowSizeCallback(window, cbfun) }\n}\n\npub fn set_window_close_callback(window: *GLFWwindow, cbfun: GLFWwindowclosefun) -> GLFWwindowclosefun {\n unsafe { ::ll::glfwSetWindowCloseCallback(window, cbfun) }\n}\n\npub fn set_window_refresh_callback(window: *GLFWwindow, cbfun: GLFWwindowrefreshfun) -> GLFWwindowrefreshfun {\n unsafe { ::ll::glfwSetWindowRefreshCallback(window, cbfun) }\n}\n\npub fn set_window_focus_callback(window: *GLFWwindow, cbfun: GLFWwindowfocusfun) -> GLFWwindowfocusfun {\n unsafe { ::ll::glfwSetWindowFocusCallback(window, cbfun) }\n}\n\npub fn set_window_iconify_callback(window: *GLFWwindow, cbfun: GLFWwindowiconifyfun) -> GLFWwindowiconifyfun {\n unsafe { ::ll::glfwSetWindowIconifyCallback(window, cbfun) }\n}\n\npub fn get_input_mode(window: *GLFWwindow, mode: c_int) -> c_int {\n unsafe { ::ll::glfwGetInputMode(window, mode) }\n}\n\npub fn set_input_mode(window: *GLFWwindow, mode: c_int, value: c_int) {\n unsafe { ::ll::glfwSetInputMode(window, mode, value); }\n}\n\npub fn get_key(window: *GLFWwindow, key: c_int) -> c_int {\n unsafe { ::ll::glfwGetKey(window, key) }\n}\n\npub fn get_mouse_button(window: *GLFWwindow, button: c_int) -> c_int {\n unsafe { ::ll::glfwGetMouseButton(window, button) }\n}\n\npub fn get_cursor_pos(window: *GLFWwindow) -> (c_double, c_double) {\n unsafe {\n let xpos = 0.0,\n ypos = 0.0;\n ::ll::glfwGetCursorPos(window, &xpos, &ypos);\n (xpos, ypos)\n }\n}\n\npub fn set_cursor_pos(window: *GLFWwindow, xpos: c_double, ypos: c_double) {\n unsafe { ::ll::glfwSetCursorPos(window, xpos, ypos); }\n}\n\npub fn set_key_callback(window: *GLFWwindow, cbfun: GLFWkeyfun) -> GLFWkeyfun {\n unsafe { ::ll::glfwSetKeyCallback(window, cbfun) }\n}\n\npub fn set_char_callback(window: *GLFWwindow, cbfun: GLFWcharfun) -> GLFWcharfun {\n unsafe { ::ll::glfwSetCharCallback(window, cbfun) }\n}\n\npub fn set_mouse_button_callback(window: *GLFWwindow, cbfun: GLFWmousebuttonfun) -> GLFWmousebuttonfun {\n unsafe { ::ll::glfwSetMouseButtonCallback(window, cbfun) }\n}\n\npub fn set_cursor_pos_callback(window: *GLFWwindow, cbfun: GLFWcursorposfun) -> GLFWcursorposfun {\n unsafe { ::ll::glfwSetCursorPosCallback(window, cbfun) }\n}\n\npub fn set_cursor_enter_callback(window: *GLFWwindow, cbfun: GLFWcursorenterfun) -> GLFWcursorenterfun {\n unsafe { ::ll::glfwSetCursorEnterCallback(window, cbfun) }\n}\n\npub fn set_scroll_callback(window: *GLFWwindow, cbfun: GLFWscrollfun) -> GLFWscrollfun {\n unsafe { ::ll::glfwSetScrollCallback(window, cbfun) }\n}\n\npub fn set_clipboard_string(window: *GLFWwindow, string: &str) {\n unsafe { ::ll::glfwSetClipboardString(window, str::as_c_str(string, |a| a)); }\n}\n\npub fn get_clipboard_string(window: *GLFWwindow) -> ~str {\n unsafe { str::raw::from_c_str(::ll::glfwGetClipboardString(window)) }\n}\n\npub fn make_context_current(window: *GLFWwindow) {\n unsafe { ::ll::glfwMakeContextCurrent(window); }\n}\n\npub fn swap_buffers(window: *GLFWwindow) {\n unsafe { ::ll::glfwSwapBuffers(window); }\n}\n\npub fn poll_events() {\n unsafe { ::ll::glfwPollEvents(); }\n}\n\npub fn wait_events() {\n unsafe { ::ll::glfwWaitEvents(); }\n}\n\npub fn get_joystick_param(joy: c_int, param: c_int) -> c_int {\n unsafe { ::ll::glfwGetJoystickParam(joy, param) }\n}\n\n\/\/ TODO\n\/\/ pub fn get_joystick_axes(joy: c_int, axes: *c_float, numaxes: c_int) -> ~[c_int] {\n\/\/ unsafe { ::ll::glfwGetJoystickAxes(joy, ...) }\n\/\/ }\n\n\/\/ TODO\n\/\/ pub fn get_joystick_buttons(joy: c_int, buttons: *c_uchar, numbuttons: c_int) -> ~[c_int] {\n\/\/ unsafe { ::ll::glfwGetJoystickButtons(joy, ...) }\n\/\/ }\n\npub fn get_joystick_name(joy: c_int) -> ~str {\n unsafe { str::raw::from_c_str(::ll::glfwGetJoystickName(joy)) }\n}\n\npub fn get_time() -> c_double {\n unsafe { ::ll::glfwGetTime() }\n}\n\npub fn set_time(time: c_double) {\n unsafe { ::ll::glfwSetTime(time); }\n}\n\npub fn get_current_context() -> *GLFWwindow {\n unsafe { ::ll::glfwGetCurrentContext() }\n}\n\npub fn set_swap_interval(interval: c_int) {\n unsafe { ::ll::glfwSwapInterval(interval); }\n}\n\npub fn extension_supported(extension: &str) -> c_int {\n unsafe { ::ll::glfwExtensionSupported(str::as_c_str(extension, |a| a)) }\n}\n\npub fn get_proc_address(procname: &str) -> GLFWglproc {\n unsafe { ::ll::glfwGetProcAddress(str::as_c_str(procname, |a| a)) }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add implementation of ToTokenTrees for String<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #10847 - johnmatthiggins:master, r=epage<commit_after><|endoftext|>"} {"text":"<commit_before>use core::cmp::min;\nuse core::intrinsics;\nuse core::mem::size_of;\nuse core::ops::{Index, IndexMut};\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\npub struct Memory<T> {\n pub ptr: *mut T,\n}\n\nimpl<T> Memory<T> {\n pub fn new(count: usize) -> Option<Self> {\n let alloc = unsafe { alloc(count * size_of::<T>()) };\n if alloc > 0 {\n Some(Memory { ptr: alloc as *mut T })\n } else {\n None\n }\n }\n\n pub fn renew(&mut self, count: usize) -> bool {\n let address = unsafe { realloc(self.ptr as usize, count * size_of::<T>()) };\n if address > 0 {\n self.ptr = address as *mut T;\n true\n } else {\n false\n }\n }\n\n \/\/ The size in bytes\n pub fn size(&self) -> usize {\n unsafe { alloc_size(self.ptr as usize) }\n }\n\n \/\/ The length in T elements\n pub fn length(&self) -> usize {\n unsafe { alloc_size(self.ptr as usize) \/ size_of::<T>() }\n }\n\n pub unsafe fn address(&self) -> usize {\n self.ptr as usize\n }\n\n pub unsafe fn read(&self, i: usize) -> T {\n ptr::read(self.ptr.offset(i as isize))\n }\n\n pub unsafe fn load(&self, i: usize) -> T {\n intrinsics::atomic_load(self.ptr.offset(i as isize))\n }\n\n pub unsafe fn write(&mut self, i: usize, value: T) {\n ptr::write(self.ptr.offset(i as isize), value);\n }\n\n pub unsafe fn store(&mut self, i: usize, value: T) {\n intrinsics::atomic_store(self.ptr.offset(i as isize), value);\n }\n}\n\nimpl<T> Drop for Memory<T> {\n fn drop(&mut self) {\n unsafe { unalloc(self.ptr as usize) }\n }\n}\n\nimpl<T> Index<usize> for Memory<T> {\n type Output = T;\n\n fn index<'a>(&'a self, _index: usize) -> &'a T {\n unsafe { &*self.ptr.offset(_index as isize) }\n }\n}\n\nimpl<T> IndexMut<usize> for Memory<T> {\n fn index_mut<'a>(&'a mut self, _index: usize) -> &'a mut T {\n unsafe { &mut *self.ptr.offset(_index as isize) }\n }\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<commit_msg>Use `intrinsics::atomic_singlethreadfence` instead of `atomic_{load,store}`<commit_after>use core::cmp::min;\nuse core::intrinsics;\nuse core::mem::size_of;\nuse core::ops::{Index, IndexMut};\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\npub struct Memory<T> {\n pub ptr: *mut T,\n}\n\nimpl<T> Memory<T> {\n pub fn new(count: usize) -> Option<Self> {\n let alloc = unsafe { alloc(count * size_of::<T>()) };\n if alloc > 0 {\n Some(Memory { ptr: alloc as *mut T })\n } else {\n None\n }\n }\n\n pub fn renew(&mut self, count: usize) -> bool {\n let address = unsafe { realloc(self.ptr as usize, count * size_of::<T>()) };\n if address > 0 {\n self.ptr = address as *mut T;\n true\n } else {\n false\n }\n }\n\n \/\/ The size in bytes\n pub fn size(&self) -> usize {\n unsafe { alloc_size(self.ptr as usize) }\n }\n\n \/\/ The length in T elements\n pub fn length(&self) -> usize {\n unsafe { alloc_size(self.ptr as usize) \/ size_of::<T>() }\n }\n\n pub unsafe fn address(&self) -> usize {\n self.ptr as usize\n }\n\n pub unsafe fn read(&self, i: usize) -> T {\n ptr::read(self.ptr.offset(i as isize))\n }\n\n pub unsafe fn load(&self, i: usize) -> T {\n intrinsics::atomic_singlethreadfence();\n ptr::read(self.ptr.offset(i as isize))\n }\n\n pub unsafe fn write(&mut self, i: usize, value: T) {\n ptr::write(self.ptr.offset(i as isize), value);\n }\n\n pub unsafe fn store(&mut self, i: usize, value: T) {\n intrinsics::atomic_singlethreadfence();\n ptr::write(self.ptr.offset(i as isize), value)\n }\n}\n\nimpl<T> Drop for Memory<T> {\n fn drop(&mut self) {\n unsafe { unalloc(self.ptr as usize) }\n }\n}\n\nimpl<T> Index<usize> for Memory<T> {\n type Output = T;\n\n fn index<'a>(&'a self, _index: usize) -> &'a T {\n unsafe { &*self.ptr.offset(_index as isize) }\n }\n}\n\nimpl<T> IndexMut<usize> for Memory<T> {\n fn index_mut<'a>(&'a mut self, _index: usize) -> &'a mut T {\n unsafe { &mut *self.ptr.offset(_index as isize) }\n }\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix(process_mutex): win stub fix<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>led-noblink.rs: Add support for stopping led blinking in Rust.<commit_after>use std::error::Error;\nuse std::fs::File;\nuse std::path::Path;\nuse std::io::Write;\n\n\nfn main() {\n let path = Path::new(\"\/proc\/acpi\/ibm\/led\");\n let pdis = path.display();\n\n let mut fpo = match File::create(&path) {\n Err(why) => panic!(\"Can't open {}: {}\", pdis, why.description()),\n Ok(fpo) => fpo,\n };\n\n match fpo.write_all(b\"0 on\\n\") {\n Err(why) => panic!(\"Can't write {}: {}\", pdis, why.description()),\n Ok(_) => (),\n }\n match fpo.flush() {\n Err(why) => panic!(\"Can't flush {}: {}\", pdis, why.description()),\n Ok(_) => (),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that there are no binaries checked into the source tree\n\/\/! by accident.\n\/\/!\n\/\/! In the past we've accidentally checked in test binaries and such which add a\n\/\/! huge amount of bloat to the git history, so it's good to just ensure we\n\/\/! don't do that again :)\n\nuse std::path::Path;\n\n\/\/ All files are executable on Windows, so just check on Unix\n#[cfg(windows)]\npub fn check(_path: &Path, _bad: &mut bool) {}\n\n#[cfg(unix)]\npub fn check(path: &Path, bad: &mut bool) {\n use std::fs;\n use std::io::Read;\n use std::process::{Command, Stdio};\n use std::os::unix::prelude::*;\n\n if let Ok(mut file) = fs::File::open(\"\/proc\/version\") {\n let mut contents = String::new();\n file.read_to_string(&mut contents).unwrap();\n \/\/ Probably on Windows Linux Subsystem, all files will be marked as\n \/\/ executable, so skip checking.\n if contents.contains(\"Microsoft\") {\n return;\n }\n }\n\n super::walk(path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/etc\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n let extensions = [\".py\", \".sh\"];\n if extensions.iter().any(|e| filename.ends_with(e)) {\n return\n }\n\n let metadata = t!(fs::symlink_metadata(&file), &file);\n if metadata.mode() & 0o111 != 0 {\n let rel_path = file.strip_prefix(path).unwrap();\n let git_friendly_path = rel_path.to_str().unwrap().replace(\"\\\\\", \"\/\");\n let ret_code = Command::new(\"git\")\n .arg(\"ls-files\")\n .arg(&git_friendly_path)\n .current_dir(path)\n .stdout(Stdio::null())\n .stderr(Stdio::null())\n .status()\n .unwrap_or_else(|e| {\n panic!(\"could not run git ls-files: {}\", e);\n });\n if ret_code.success() {\n println!(\"binary checked into source: {}\", file.display());\n *bad = true;\n }\n }\n })\n}\n\n<commit_msg>tidy\/bins: fix false positive on non checked-in binary<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that there are no binaries checked into the source tree\n\/\/! by accident.\n\/\/!\n\/\/! In the past we've accidentally checked in test binaries and such which add a\n\/\/! huge amount of bloat to the git history, so it's good to just ensure we\n\/\/! don't do that again :)\n\nuse std::path::Path;\n\n\/\/ All files are executable on Windows, so just check on Unix\n#[cfg(windows)]\npub fn check(_path: &Path, _bad: &mut bool) {}\n\n#[cfg(unix)]\npub fn check(path: &Path, bad: &mut bool) {\n use std::fs;\n use std::io::Read;\n use std::process::{Command, Stdio};\n use std::os::unix::prelude::*;\n\n if let Ok(mut file) = fs::File::open(\"\/proc\/version\") {\n let mut contents = String::new();\n file.read_to_string(&mut contents).unwrap();\n \/\/ Probably on Windows Linux Subsystem, all files will be marked as\n \/\/ executable, so skip checking.\n if contents.contains(\"Microsoft\") {\n return;\n }\n }\n\n super::walk(path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/etc\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n let extensions = [\".py\", \".sh\"];\n if extensions.iter().any(|e| filename.ends_with(e)) {\n return;\n }\n\n let metadata = t!(fs::symlink_metadata(&file), &file);\n if metadata.mode() & 0o111 != 0 {\n let rel_path = file.strip_prefix(path).unwrap();\n let git_friendly_path = rel_path.to_str().unwrap().replace(\"\\\\\", \"\/\");\n let output = Command::new(\"git\")\n .arg(\"ls-files\")\n .arg(&git_friendly_path)\n .current_dir(path)\n .stderr(Stdio::null())\n .output()\n .unwrap_or_else(|e| {\n panic!(\"could not run git ls-files: {}\", e);\n });\n let path_bytes = rel_path.as_os_str().as_bytes();\n if output.status.success() && output.stdout.starts_with(path_bytes) {\n println!(\"binary checked into source: {}\", file.display());\n *bad = true;\n }\n }\n })\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0, rls\n \"colored\", \/\/ MPL-2.0, rustfmt\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [CrateVersion] = &[\n CrateVersion(\"rustc\", \"0.0.0\"),\n CrateVersion(\"rustc_codegen_llvm\", \"0.0.0\"),\n];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n Crate(\"aho-corasick\"),\n Crate(\"ar\"),\n Crate(\"arrayvec\"),\n Crate(\"atty\"),\n Crate(\"backtrace\"),\n Crate(\"backtrace-sys\"),\n Crate(\"bitflags\"),\n Crate(\"byteorder\"),\n Crate(\"cc\"),\n Crate(\"chalk-engine\"),\n Crate(\"chalk-macros\"),\n Crate(\"cfg-if\"),\n Crate(\"cmake\"),\n Crate(\"crossbeam-deque\"),\n Crate(\"crossbeam-epoch\"),\n Crate(\"crossbeam-utils\"),\n Crate(\"either\"),\n Crate(\"ena\"),\n Crate(\"env_logger\"),\n Crate(\"filetime\"),\n Crate(\"flate2\"),\n Crate(\"fuchsia-zircon\"),\n Crate(\"fuchsia-zircon-sys\"),\n Crate(\"getopts\"),\n Crate(\"humantime\"),\n Crate(\"jobserver\"),\n Crate(\"kernel32-sys\"),\n Crate(\"lazy_static\"),\n Crate(\"libc\"),\n Crate(\"log\"),\n Crate(\"log_settings\"),\n Crate(\"memchr\"),\n Crate(\"memoffset\"),\n Crate(\"miniz-sys\"),\n Crate(\"nodrop\"),\n Crate(\"num_cpus\"),\n Crate(\"owning_ref\"),\n Crate(\"parking_lot\"),\n Crate(\"parking_lot_core\"),\n Crate(\"polonius-engine\"),\n Crate(\"quick-error\"),\n Crate(\"rand\"),\n Crate(\"redox_syscall\"),\n Crate(\"redox_termios\"),\n Crate(\"regex\"),\n Crate(\"regex-syntax\"),\n Crate(\"remove_dir_all\"),\n Crate(\"rustc-demangle\"),\n Crate(\"rustc-hash\"),\n Crate(\"rustc-rayon\"),\n Crate(\"rustc-rayon-core\"),\n Crate(\"scoped-tls\"),\n Crate(\"scopeguard\"),\n Crate(\"smallvec\"),\n Crate(\"stable_deref_trait\"),\n Crate(\"tempdir\"),\n Crate(\"termcolor\"),\n Crate(\"terminon\"),\n Crate(\"termion\"),\n Crate(\"thread_local\"),\n Crate(\"ucd-util\"),\n Crate(\"unicode-width\"),\n Crate(\"unreachable\"),\n Crate(\"utf8-ranges\"),\n Crate(\"void\"),\n Crate(\"winapi\"),\n Crate(\"winapi-build\"),\n Crate(\"winapi-i686-pc-windows-gnu\"),\n Crate(\"winapi-x86_64-pc-windows-gnu\"),\n Crate(\"wincolor\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct CrateVersion<'a>(&'a str, &'a str); \/\/ (name, version)\n\nimpl<'a> Crate<'a> {\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\nimpl<'a> CrateVersion<'a> {\n \/\/\/ Returns the struct and whether or not the dep is in-tree\n pub fn from_str(s: &'a str) -> (Self, bool) {\n let mut parts = s.split(\" \");\n let name = parts.next().unwrap();\n let version = parts.next().unwrap();\n let path = parts.next().unwrap();\n\n let is_path_dep = path.starts_with(\"(path+\");\n\n (CrateVersion(name, version), is_path_dep)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} {}\", self.0, self.1)\n }\n}\n\nimpl<'a> From<CrateVersion<'a>> for Crate<'a> {\n fn from(cv: CrateVersion<'a>) -> Crate<'a> {\n Crate(cv.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate, false);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<CrateVersion<'a>>,\n krate: CrateVersion<'a>,\n must_be_on_whitelist: bool,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this path is in-tree, we don't require it to be on the whitelist\n if must_be_on_whitelist {\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate.into()) {\n unapproved.insert(krate.into());\n }\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let (krate, is_path_dep) = CrateVersion::from_str(dep);\n\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate, !is_path_dep);\n unapproved.append(&mut bad);\n }\n\n unapproved\n}\n<commit_msg>Whitelist datafrog on tidy<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0, rls\n \"colored\", \/\/ MPL-2.0, rustfmt\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [CrateVersion] = &[\n CrateVersion(\"rustc\", \"0.0.0\"),\n CrateVersion(\"rustc_codegen_llvm\", \"0.0.0\"),\n];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n Crate(\"aho-corasick\"),\n Crate(\"ar\"),\n Crate(\"arrayvec\"),\n Crate(\"atty\"),\n Crate(\"backtrace\"),\n Crate(\"backtrace-sys\"),\n Crate(\"bitflags\"),\n Crate(\"byteorder\"),\n Crate(\"cc\"),\n Crate(\"chalk-engine\"),\n Crate(\"chalk-macros\"),\n Crate(\"cfg-if\"),\n Crate(\"cmake\"),\n Crate(\"crossbeam-deque\"),\n Crate(\"crossbeam-epoch\"),\n Crate(\"crossbeam-utils\"),\n Crate(\"datafrog\"),\n Crate(\"either\"),\n Crate(\"ena\"),\n Crate(\"env_logger\"),\n Crate(\"filetime\"),\n Crate(\"flate2\"),\n Crate(\"fuchsia-zircon\"),\n Crate(\"fuchsia-zircon-sys\"),\n Crate(\"getopts\"),\n Crate(\"humantime\"),\n Crate(\"jobserver\"),\n Crate(\"kernel32-sys\"),\n Crate(\"lazy_static\"),\n Crate(\"libc\"),\n Crate(\"log\"),\n Crate(\"log_settings\"),\n Crate(\"memchr\"),\n Crate(\"memoffset\"),\n Crate(\"miniz-sys\"),\n Crate(\"nodrop\"),\n Crate(\"num_cpus\"),\n Crate(\"owning_ref\"),\n Crate(\"parking_lot\"),\n Crate(\"parking_lot_core\"),\n Crate(\"polonius-engine\"),\n Crate(\"quick-error\"),\n Crate(\"rand\"),\n Crate(\"redox_syscall\"),\n Crate(\"redox_termios\"),\n Crate(\"regex\"),\n Crate(\"regex-syntax\"),\n Crate(\"remove_dir_all\"),\n Crate(\"rustc-demangle\"),\n Crate(\"rustc-hash\"),\n Crate(\"rustc-rayon\"),\n Crate(\"rustc-rayon-core\"),\n Crate(\"scoped-tls\"),\n Crate(\"scopeguard\"),\n Crate(\"smallvec\"),\n Crate(\"stable_deref_trait\"),\n Crate(\"tempdir\"),\n Crate(\"termcolor\"),\n Crate(\"terminon\"),\n Crate(\"termion\"),\n Crate(\"thread_local\"),\n Crate(\"ucd-util\"),\n Crate(\"unicode-width\"),\n Crate(\"unreachable\"),\n Crate(\"utf8-ranges\"),\n Crate(\"void\"),\n Crate(\"winapi\"),\n Crate(\"winapi-build\"),\n Crate(\"winapi-i686-pc-windows-gnu\"),\n Crate(\"winapi-x86_64-pc-windows-gnu\"),\n Crate(\"wincolor\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct CrateVersion<'a>(&'a str, &'a str); \/\/ (name, version)\n\nimpl<'a> Crate<'a> {\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\nimpl<'a> CrateVersion<'a> {\n \/\/\/ Returns the struct and whether or not the dep is in-tree\n pub fn from_str(s: &'a str) -> (Self, bool) {\n let mut parts = s.split(\" \");\n let name = parts.next().unwrap();\n let version = parts.next().unwrap();\n let path = parts.next().unwrap();\n\n let is_path_dep = path.starts_with(\"(path+\");\n\n (CrateVersion(name, version), is_path_dep)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} {}\", self.0, self.1)\n }\n}\n\nimpl<'a> From<CrateVersion<'a>> for Crate<'a> {\n fn from(cv: CrateVersion<'a>) -> Crate<'a> {\n Crate(cv.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate, false);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<CrateVersion<'a>>,\n krate: CrateVersion<'a>,\n must_be_on_whitelist: bool,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this path is in-tree, we don't require it to be on the whitelist\n if must_be_on_whitelist {\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate.into()) {\n unapproved.insert(krate.into());\n }\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let (krate, is_path_dep) = CrateVersion::from_str(dep);\n\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate, !is_path_dep);\n unapproved.append(&mut bad);\n }\n\n unapproved\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Simplify some auxiliary functions.<commit_after><|endoftext|>"} {"text":"<commit_before>use nom::{Consumer, FileProducer};\nuse std::io::{Error, ErrorKind, Result};\nuse std::u32;\n\nuse metadata::{\n Block, BlockData,\n StreamInfo, CueSheet, VorbisComment, Picture,\n PictureType,\n};\nuse metadata::types::MetaDataConsumer;\n\n\/\/ Will return true when the unwrapped value of `$option` and `$compare`\n\/\/ match or `$option` is `Option::None`, otherwise false.\nmacro_rules! optional_eq (\n ($compare: expr, $option: expr) => (\n $option.map_or(true, |compare| $compare == compare);\n );\n);\n\n\/\/ With the given filename, return all metadata blocks available.\n\/\/\n\/\/ This function expects a flac file, but will return a proper `Result::Err`\n\/\/ when things go wrong.\n\/\/\n\/\/ # Failures\n\/\/\n\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found.\n\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/ isn't valid FLAC data.\npub fn get_metadata(filename: &str) -> Result<Vec<Block>> {\n FileProducer::new(filename, 1024).and_then(|mut producer| {\n let mut consumer = MetaDataConsumer::new();\n\n consumer.run(&mut producer);\n\n if !consumer.data.is_empty() {\n Ok(consumer.data)\n } else {\n let error_str = \"parser: couldn't find any metadata\";\n\n Err(Error::new(ErrorKind::InvalidData, error_str))\n }\n })\n}\n\n\/\/\/ Reads and returns the `StreamInfo` metadata block of the given FLAC\n\/\/\/ file.\n\/\/\/\n\/\/\/ # Failures\n\/\/\/\n\/\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found\n\/\/\/ or there is no `StreamInfo` within the file.\n\/\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/\/ isn't valid FLAC data.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Handling errors might look something like this:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use flac::metadata;\n\/\/\/\n\/\/\/ match metadata::get_stream_info(\"path\/to\/file.flac\") {\n\/\/\/ Ok(stream_info) => {\n\/\/\/ \/\/ Use the stream_info variable...\n\/\/\/ }\n\/\/\/ Err(error) => println!(\"{}\", error),\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Or just ignore the errors:\n\/\/\/\n\/\/\/ ```no_run\n\/\/\/ use flac::metadata;\n\/\/\/\n\/\/\/ let stream_info = metadata::get_stream_info(\"path\/to\/file.flac\").unwrap();\n\/\/\/ ```\npub fn get_stream_info(filename: &str) -> Result<StreamInfo> {\n get_metadata(filename).and_then(|blocks| {\n let error_str = \"metadata: couldn't find StreamInfo\";\n let mut result = Err(Error::new(ErrorKind::NotFound, error_str));\n\n for block in blocks {\n if let BlockData::StreamInfo(stream_info) = block.data {\n result = Ok(stream_info);\n break;\n }\n }\n\n result\n })\n}\n\n\/\/\/ Reads and returns the `VorbisComment` metadata block of the given FLAC\n\/\/\/ file.\n\/\/\/\n\/\/\/ # Failures\n\/\/\/\n\/\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found\n\/\/\/ or there is no `VorbisComment` within the file.\n\/\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/\/ isn't valid FLAC data.\npub fn get_vorbis_comment(filename: &str) -> Result<VorbisComment> {\n get_metadata(filename).and_then(|blocks| {\n let error_str = \"metadata: couldn't find VorbisComment\";\n let mut result = Err(Error::new(ErrorKind::NotFound, error_str));\n\n for block in blocks {\n if let BlockData::VorbisComment(vorbis_comment) = block.data {\n result = Ok(vorbis_comment);\n break;\n }\n }\n\n result\n })\n}\n\n\/\/\/ Reads and returns the `CueSheet` metadata block of the given FLAC file.\n\/\/\/\n\/\/\/ # Failures\n\/\/\/\n\/\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found\n\/\/\/ or there is no `CueSheet` within the file.\n\/\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/\/ isn't valid FLAC data.\npub fn get_cue_sheet(filename: &str) -> Result<CueSheet> {\n get_metadata(filename).and_then(|blocks| {\n let error_str = \"metadata: couldn't find CueSheet\";\n let mut result = Err(Error::new(ErrorKind::NotFound, error_str));\n\n for block in blocks {\n if let BlockData::CueSheet(cue_sheet) = block.data {\n result = Ok(cue_sheet);\n break;\n }\n }\n\n result\n })\n}\n\n\/\/\/ Reads and returns a `Picture` metadata block of the given FLAC file.\n\/\/\/\n\/\/\/ There can be more than one `Picture` block in a file and this function\n\/\/\/ takes optional, that being `Option<T>`, parameters that act as\n\/\/\/ constraints to search within. The `Picture` with the largest area\n\/\/\/ matching all constraints will be returned.\n\/\/\/\n\/\/\/ Putting `None` into any of the optional constraints conveys that you\n\/\/\/ want any of that parameter. Otherwise it will try to look for the image\n\/\/\/ that matches within the given constraints.\n\/\/\/\n\/\/\/ # Failures\n\/\/\/\n\/\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found,\n\/\/\/ there is no `Picture` within the file, or no `Picture` that fits the\n\/\/\/ given constraints.\n\/\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/\/ isn't valid FLAC data.\npub fn get_picture(filename: &str,\n picture_type: Option<PictureType>,\n mime_type: Option<&str>,\n description: Option<&str>,\n max_width: Option<u32>,\n max_height: Option<u32>,\n max_depth: Option<u32>,\n max_colors: Option<u32>)\n -> Result<Picture> {\n get_metadata(filename).and_then(|blocks| {\n let error_str = \"metadata: couldn't find any Picture\";\n let mut result = Err(Error::new(ErrorKind::NotFound, error_str));\n\n let mut max_area_seen = 0;\n let mut max_depth_seen = 0;\n\n let max_value = u32::max_value();\n let max_width_num = max_width.unwrap_or(max_value);\n let max_height_num = max_height.unwrap_or(max_value);\n let max_depth_num = max_depth.unwrap_or(max_value);\n let max_colors_num = max_colors.unwrap_or(max_value);\n\n for block in blocks {\n if let BlockData::Picture(picture) = block.data {\n let area = (picture.width as u64) * (picture.height as u64);\n\n if optional_eq!(picture.picture_type, picture_type) &&\n optional_eq!(picture.mime_type, mime_type) &&\n optional_eq!(picture.description, description) &&\n picture.width <= max_width_num &&\n picture.height <= max_height_num &&\n picture.depth <= max_depth_num &&\n picture.colors <= max_colors_num &&\n (area > max_area_seen || (area == max_area_seen &&\n picture.depth > max_depth_seen)) {\n max_area_seen = area;\n max_depth_seen = picture.depth;\n result = Ok(picture);\n }\n }\n }\n\n result\n })\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io::ErrorKind;\n\n #[test]\n #[should_panic]\n fn test_panic_optional_eq() {\n assert!(optional_eq!(0, Some(1)));\n }\n\n #[test]\n fn test_optional_eq() {\n assert!(optional_eq!(0, None), \"Should always return true when None\");\n assert!(optional_eq!(0, Some(0)), \"Should return true (Some(0) == 0)\");\n }\n\n #[test]\n fn test_get_metadata() {\n let not_found = get_metadata(\"non-existent\/file.txt\");\n let invalid_data = get_metadata(\"README.md\");\n let result = get_metadata(\"tests\/assets\/input-SVAUP.flac\");\n\n assert_eq!(not_found.unwrap_err().kind(), ErrorKind::NotFound);\n assert_eq!(invalid_data.unwrap_err().kind(), ErrorKind::InvalidData);\n assert!(result.is_ok());\n }\n}\n<commit_msg>Add examples for `get_vorbis_comment`<commit_after>use nom::{Consumer, FileProducer};\nuse std::io::{Error, ErrorKind, Result};\nuse std::u32;\n\nuse metadata::{\n Block, BlockData,\n StreamInfo, CueSheet, VorbisComment, Picture,\n PictureType,\n};\nuse metadata::types::MetaDataConsumer;\n\n\/\/ Will return true when the unwrapped value of `$option` and `$compare`\n\/\/ match or `$option` is `Option::None`, otherwise false.\nmacro_rules! optional_eq (\n ($compare: expr, $option: expr) => (\n $option.map_or(true, |compare| $compare == compare);\n );\n);\n\n\/\/ With the given filename, return all metadata blocks available.\n\/\/\n\/\/ This function expects a flac file, but will return a proper `Result::Err`\n\/\/ when things go wrong.\n\/\/\n\/\/ # Failures\n\/\/\n\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found.\n\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/ isn't valid FLAC data.\npub fn get_metadata(filename: &str) -> Result<Vec<Block>> {\n FileProducer::new(filename, 1024).and_then(|mut producer| {\n let mut consumer = MetaDataConsumer::new();\n\n consumer.run(&mut producer);\n\n if !consumer.data.is_empty() {\n Ok(consumer.data)\n } else {\n let error_str = \"parser: couldn't find any metadata\";\n\n Err(Error::new(ErrorKind::InvalidData, error_str))\n }\n })\n}\n\n\/\/\/ Reads and returns the `StreamInfo` metadata block of the given FLAC\n\/\/\/ file.\n\/\/\/\n\/\/\/ # Failures\n\/\/\/\n\/\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found\n\/\/\/ or there is no `StreamInfo` within the file.\n\/\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/\/ isn't valid FLAC data.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Handling errors might look something like this:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use flac::metadata;\n\/\/\/\n\/\/\/ match metadata::get_stream_info(\"path\/to\/file.flac\") {\n\/\/\/ Ok(stream_info) => {\n\/\/\/ \/\/ Use the stream_info variable...\n\/\/\/ }\n\/\/\/ Err(error) => println!(\"{}\", error),\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Or just ignore the errors:\n\/\/\/\n\/\/\/ ```no_run\n\/\/\/ use flac::metadata;\n\/\/\/\n\/\/\/ let stream_info = metadata::get_stream_info(\"path\/to\/file.flac\").unwrap();\n\/\/\/ ```\npub fn get_stream_info(filename: &str) -> Result<StreamInfo> {\n get_metadata(filename).and_then(|blocks| {\n let error_str = \"metadata: couldn't find StreamInfo\";\n let mut result = Err(Error::new(ErrorKind::NotFound, error_str));\n\n for block in blocks {\n if let BlockData::StreamInfo(stream_info) = block.data {\n result = Ok(stream_info);\n break;\n }\n }\n\n result\n })\n}\n\n\/\/\/ Reads and returns the `VorbisComment` metadata block of the given FLAC\n\/\/\/ file.\n\/\/\/\n\/\/\/ # Failures\n\/\/\/\n\/\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found\n\/\/\/ or there is no `VorbisComment` within the file.\n\/\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/\/ isn't valid FLAC data.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Handling errors might look something like this:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use flac::metadata;\n\/\/\/\n\/\/\/ match metadata::get_vorbis_comment(\"path\/to\/file.flac\") {\n\/\/\/ Ok(vorbis_comment) => {\n\/\/\/ \/\/ Use the vorbis_comment variable...\n\/\/\/ }\n\/\/\/ Err(error) => println!(\"{}\", error),\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Or just ignore the errors:\n\/\/\/\n\/\/\/ ```no_run\n\/\/\/ use flac::metadata;\n\/\/\/\n\/\/\/ let vorbis_comment =\n\/\/\/ metadata::get_vorbis_comment(\"path\/to\/file.flac\").unwrap();\n\/\/\/ ```\npub fn get_vorbis_comment(filename: &str) -> Result<VorbisComment> {\n get_metadata(filename).and_then(|blocks| {\n let error_str = \"metadata: couldn't find VorbisComment\";\n let mut result = Err(Error::new(ErrorKind::NotFound, error_str));\n\n for block in blocks {\n if let BlockData::VorbisComment(vorbis_comment) = block.data {\n result = Ok(vorbis_comment);\n break;\n }\n }\n\n result\n })\n}\n\n\/\/\/ Reads and returns the `CueSheet` metadata block of the given FLAC file.\n\/\/\/\n\/\/\/ # Failures\n\/\/\/\n\/\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found\n\/\/\/ or there is no `CueSheet` within the file.\n\/\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/\/ isn't valid FLAC data.\npub fn get_cue_sheet(filename: &str) -> Result<CueSheet> {\n get_metadata(filename).and_then(|blocks| {\n let error_str = \"metadata: couldn't find CueSheet\";\n let mut result = Err(Error::new(ErrorKind::NotFound, error_str));\n\n for block in blocks {\n if let BlockData::CueSheet(cue_sheet) = block.data {\n result = Ok(cue_sheet);\n break;\n }\n }\n\n result\n })\n}\n\n\/\/\/ Reads and returns a `Picture` metadata block of the given FLAC file.\n\/\/\/\n\/\/\/ There can be more than one `Picture` block in a file and this function\n\/\/\/ takes optional, that being `Option<T>`, parameters that act as\n\/\/\/ constraints to search within. The `Picture` with the largest area\n\/\/\/ matching all constraints will be returned.\n\/\/\/\n\/\/\/ Putting `None` into any of the optional constraints conveys that you\n\/\/\/ want any of that parameter. Otherwise it will try to look for the image\n\/\/\/ that matches within the given constraints.\n\/\/\/\n\/\/\/ # Failures\n\/\/\/\n\/\/\/ * `ErrorKind::NotFound` is returned when the given filename isn't found,\n\/\/\/ there is no `Picture` within the file, or no `Picture` that fits the\n\/\/\/ given constraints.\n\/\/\/ * `ErrorKind::InvalidData` is returned when the data within the file\n\/\/\/ isn't valid FLAC data.\npub fn get_picture(filename: &str,\n picture_type: Option<PictureType>,\n mime_type: Option<&str>,\n description: Option<&str>,\n max_width: Option<u32>,\n max_height: Option<u32>,\n max_depth: Option<u32>,\n max_colors: Option<u32>)\n -> Result<Picture> {\n get_metadata(filename).and_then(|blocks| {\n let error_str = \"metadata: couldn't find any Picture\";\n let mut result = Err(Error::new(ErrorKind::NotFound, error_str));\n\n let mut max_area_seen = 0;\n let mut max_depth_seen = 0;\n\n let max_value = u32::max_value();\n let max_width_num = max_width.unwrap_or(max_value);\n let max_height_num = max_height.unwrap_or(max_value);\n let max_depth_num = max_depth.unwrap_or(max_value);\n let max_colors_num = max_colors.unwrap_or(max_value);\n\n for block in blocks {\n if let BlockData::Picture(picture) = block.data {\n let area = (picture.width as u64) * (picture.height as u64);\n\n if optional_eq!(picture.picture_type, picture_type) &&\n optional_eq!(picture.mime_type, mime_type) &&\n optional_eq!(picture.description, description) &&\n picture.width <= max_width_num &&\n picture.height <= max_height_num &&\n picture.depth <= max_depth_num &&\n picture.colors <= max_colors_num &&\n (area > max_area_seen || (area == max_area_seen &&\n picture.depth > max_depth_seen)) {\n max_area_seen = area;\n max_depth_seen = picture.depth;\n result = Ok(picture);\n }\n }\n }\n\n result\n })\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io::ErrorKind;\n\n #[test]\n #[should_panic]\n fn test_panic_optional_eq() {\n assert!(optional_eq!(0, Some(1)));\n }\n\n #[test]\n fn test_optional_eq() {\n assert!(optional_eq!(0, None), \"Should always return true when None\");\n assert!(optional_eq!(0, Some(0)), \"Should return true (Some(0) == 0)\");\n }\n\n #[test]\n fn test_get_metadata() {\n let not_found = get_metadata(\"non-existent\/file.txt\");\n let invalid_data = get_metadata(\"README.md\");\n let result = get_metadata(\"tests\/assets\/input-SVAUP.flac\");\n\n assert_eq!(not_found.unwrap_err().kind(), ErrorKind::NotFound);\n assert_eq!(invalid_data.unwrap_err().kind(), ErrorKind::InvalidData);\n assert!(result.is_ok());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix tests<commit_after>\/\/! Register commands on the registry.\n\nuse std::sync::Arc;\nuse std::process::Command;\nuse std::thread;\nuse std::env;\nuse std::io::prelude::*;\n\nuse registry::{self, RegistryValue, CommandFn};\nuse layout::tree::try_lock_tree;\nuse lua::{self, LuaQuery};\n\n\/\/\/ Register the default commands on the registry.\n\/\/\/\n\/\/\/ Some of this code will be moved to be called after the config,\n\/\/\/ and will be registered dynamically.\npub fn register_defaults() {\n let mut reg = registry::write_lock();\n\n let mut register = |name: &'static str, val: CommandFn| {\n reg.insert(name.to_string(), RegistryValue::new_command(val));\n };\n\n \/\/ Workspace\n register(\"quit\", Arc::new(quit));\n register(\"launch_terminal\", Arc::new(launch_terminal));\n register(\"launch_dmenu\", Arc::new(launch_dmenu));\n register(\"print_pointer\", Arc::new(print_pointer));\n\n register(\"dmenu_eval\", Arc::new(dmenu_eval));\n register(\"dmenu_lua_dofile\", Arc::new(dmenu_lua_dofile));\n\n \/\/register(\"workspace_left\", workspace_left);\n \/\/register(\"workspace_right\", workspace_right);\n\n \/\/\/ Generate switch_workspace methods and register them in $map\n macro_rules! gen_switch_workspace {\n ( $($b:ident, $n:expr);+ ) => {\n $(fn $b() {\n trace!(\"Switching to workspace {}\", $n);\n if let Ok(mut tree) = try_lock_tree() {\n tree.switch_workspace(&$n.to_string());\n }\n }\n register(stringify!($b), Arc::new($b)); )+\n }\n }\n\n\n gen_switch_workspace!(switch_workspace_1, \"1\";\n switch_workspace_2, \"2\";\n switch_workspace_3, \"3\";\n switch_workspace_4, \"4\";\n switch_workspace_5, \"5\";\n switch_workspace_6, \"6\";\n switch_workspace_7, \"7\";\n switch_workspace_8, \"8\";\n switch_workspace_9, \"9\";\n switch_workspace_0, \"0\");\n\n}\n\n\/\/ All of the methods defined should be registered.\n#[deny(dead_code)]\n\nfn launch_terminal() {\n let term = env::var(\"WAYLAND_TERMINAL\")\n .unwrap_or(\"weston-terminal\".to_string());\n\n Command::new(\"sh\").arg(\"-c\")\n .arg(term)\n .spawn().expect(\"Error launching terminal\");\n}\n\nfn launch_dmenu() {\n Command::new(\"sh\").arg(\"-c\")\n .arg(\"dmenu_run\")\n .spawn().expect(\"Error launching terminal\");\n}\n\nfn print_pointer() {\n use lua;\n use lua::LuaQuery;\n\n let code = \"if wm == nil then print('wm table does not exist')\\n\\\n elseif wm.pointer == nil then print('wm.pointer table does not exist')\\n\\\n else\\n\\\n local x, y = wm.pointer.get_position()\\n\\\n print('The cursor is at ' .. x .. ', ' .. y)\\n\\\n end\".to_string();\n lua::send(LuaQuery::Execute(code))\n .expect(\"Error telling Lua to get pointer coords\");\n}\n\nfn quit() {\n info!(\"Closing way cooler!!\");\n ::rustwlc::terminate();\n}\n\n#[allow(unused_variables)]\nfn dmenu_lua_dofile() {\n thread::Builder::new().name(\"dmenu_dofile\".to_string()).spawn(|| {\n let child = Command::new(\"dmenu\").arg(\"-p 'Eval Lua file'\")\n .spawn().expect(\"Unable to launch dmenu!\");\n\n \/\/ Write \\d to stdin to prevent options from being given\n let stdin = child.stdin.expect(\"Unable to access stdin\");\n \/\/stdin.write_all(b\"\\d\");\n\n let mut stdout = child.stdout.expect(\"Unable to access stdout\");\n let mut output = String::new();\n stdout.read_to_string(&mut output).expect(\"Unable to read stdout\");\n\n lua::send(LuaQuery::ExecFile(output)).expect(\"unable to contact Lua\");\n }).expect(\"Unable to spawn thread\");\n}\n\n#[allow(unused_variables)]\nfn dmenu_eval() {\n thread::Builder::new().name(\"dmenu_eval\".to_string()).spawn(|| {\n let child = Command::new(\"dmenu\").arg(\"-p 'Eval Lua code'\")\n .spawn().expect(\"Unable to launch dmenu!\");\n\n \/\/ Write \\d to stdin to prevent options from being given\n let stdin = child.stdin.expect(\"Unable to access stdin\");\n \/\/stdin.write_all(b\"\\d\");\n\n let mut stdout = child.stdout.expect(\"Unable to access stdout\");\n let mut output = String::new();\n stdout.read_to_string(&mut output).expect(\"Unable to read stdout\");\n\n lua::send(LuaQuery::Execute(output)).expect(\"Unable to contact Lua\");\n }).expect(\"Unable to spawn thread\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>privatize some needlessly public methods<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Reduce macro iterations to 32<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Sanity checking performed by rustbuild before actually executing anything.\n\/\/!\n\/\/! This module contains the implementation of ensuring that the build\n\/\/! environment looks reasonable before progressing. This will verify that\n\/\/! various programs like git and python exist, along with ensuring that all C\n\/\/! compilers for cross-compiling are found.\n\/\/!\n\/\/! In theory if we get past this phase it's a bug if a build fails, but in\n\/\/! practice that's likely not true!\n\nuse std::collections::HashSet;\nuse std::env;\nuse std::ffi::{OsStr, OsString};\nuse std::fs;\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse Build;\n\npub fn check(build: &mut Build) {\n let mut checked = HashSet::new();\n let path = env::var_os(\"PATH\").unwrap_or(OsString::new());\n \/\/ On Windows, quotes are invalid characters for filename paths, and if\n \/\/ one is present as part of the PATH then that can lead to the system\n \/\/ being unable to identify the files properly. See\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/34959 for more details.\n if cfg!(windows) {\n if path.to_string_lossy().contains(\"\\\"\") {\n panic!(\"PATH contains invalid character '\\\"'\");\n }\n }\n let have_cmd = |cmd: &OsStr| {\n for path in env::split_paths(&path) {\n let target = path.join(cmd);\n let mut cmd_alt = cmd.to_os_string();\n cmd_alt.push(\".exe\");\n if target.exists() ||\n target.with_extension(\"exe\").exists() ||\n target.join(cmd_alt).exists() {\n return Some(target);\n }\n }\n return None;\n };\n\n let mut need_cmd = |cmd: &OsStr| {\n if !checked.insert(cmd.to_owned()) {\n return\n }\n if have_cmd(cmd).is_none() {\n panic!(\"\\n\\ncouldn't find required command: {:?}\\n\\n\", cmd);\n }\n };\n\n \/\/ If we've got a git directory we're gona need git to update\n \/\/ submodules and learn about various other aspects.\n if fs::metadata(build.src.join(\".git\")).is_ok() {\n need_cmd(\"git\".as_ref());\n }\n\n \/\/ We need cmake, but only if we're actually building LLVM\n for host in build.config.host.iter() {\n if let Some(config) = build.config.target_config.get(host) {\n if config.llvm_config.is_some() {\n continue\n }\n }\n need_cmd(\"cmake\".as_ref());\n if build.config.ninja {\n need_cmd(\"ninja\".as_ref())\n }\n break\n }\n\n if build.config.python.is_none() {\n build.config.python = have_cmd(\"python2.7\".as_ref());\n }\n if build.config.python.is_none() {\n build.config.python = have_cmd(\"python2\".as_ref());\n }\n if build.config.python.is_none() {\n need_cmd(\"python\".as_ref());\n build.config.python = Some(\"python\".into());\n }\n need_cmd(build.config.python.as_ref().unwrap().as_ref());\n\n\n if let Some(ref s) = build.config.nodejs {\n need_cmd(s.as_ref());\n } else {\n \/\/ Look for the nodejs command, needed for emscripten testing\n if let Some(node) = have_cmd(\"node\".as_ref()) {\n build.config.nodejs = Some(node);\n } else if let Some(node) = have_cmd(\"nodejs\".as_ref()) {\n build.config.nodejs = Some(node);\n }\n }\n\n if let Some(ref gdb) = build.config.gdb {\n need_cmd(gdb.as_ref());\n } else {\n build.config.gdb = have_cmd(\"gdb\".as_ref());\n }\n\n \/\/ We're gonna build some custom C code here and there, host triples\n \/\/ also build some C++ shims for LLVM so we need a C++ compiler.\n for target in build.config.target.iter() {\n \/\/ On emscripten we don't actually need the C compiler to just\n \/\/ build the target artifacts, only for testing. For the sake\n \/\/ of easier bot configuration, just skip detection.\n if target.contains(\"emscripten\") {\n continue;\n }\n\n need_cmd(build.cc(target).as_ref());\n if let Some(ar) = build.ar(target) {\n need_cmd(ar.as_ref());\n }\n }\n for host in build.config.host.iter() {\n need_cmd(build.cxx(host).as_ref());\n }\n\n \/\/ The msvc hosts don't use jemalloc, turn it off globally to\n \/\/ avoid packaging the dummy liballoc_jemalloc on that platform.\n for host in build.config.host.iter() {\n if host.contains(\"msvc\") {\n build.config.use_jemalloc = false;\n }\n }\n\n \/\/ Externally configured LLVM requires FileCheck to exist\n let filecheck = build.llvm_filecheck(&build.config.build);\n if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {\n panic!(\"FileCheck executable {:?} does not exist\", filecheck);\n }\n\n for target in build.config.target.iter() {\n \/\/ Can't compile for iOS unless we're on OSX\n if target.contains(\"apple-ios\") &&\n !build.config.build.contains(\"apple-darwin\") {\n panic!(\"the iOS target is only supported on OSX\");\n }\n\n \/\/ Make sure musl-root is valid if specified\n if target.contains(\"musl\") && !target.contains(\"mips\") {\n match build.musl_root(target) {\n Some(root) => {\n if fs::metadata(root.join(\"lib\/libc.a\")).is_err() {\n panic!(\"couldn't find libc.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n if fs::metadata(root.join(\"lib\/libunwind.a\")).is_err() {\n panic!(\"couldn't find libunwind.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n }\n None => {\n panic!(\"when targeting MUSL either the rust.musl-root \\\n option or the target.$TARGET.musl-root option must \\\n be specified in config.toml\")\n }\n }\n }\n\n if target.contains(\"msvc\") {\n \/\/ There are three builds of cmake on windows: MSVC, MinGW, and\n \/\/ Cygwin. The Cygwin build does not have generators for Visual\n \/\/ Studio, so detect that here and error.\n let out = output(Command::new(\"cmake\").arg(\"--help\"));\n if !out.contains(\"Visual Studio\") {\n panic!(\"\ncmake does not support Visual Studio generators.\n\nThis is likely due to it being an msys\/cygwin build of cmake,\nrather than the required windows version, built using MinGW\nor Visual Studio.\n\nIf you are building under msys2 try installing the mingw-w64-x86_64-cmake\npackage instead of cmake:\n\n$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake\n\");\n }\n }\n\n if target.contains(\"arm-linux-android\") {\n need_cmd(\"adb\".as_ref());\n }\n }\n\n for host in build.flags.host.iter() {\n if !build.config.host.contains(host) {\n panic!(\"specified host `{}` is not in the .\/configure list\", host);\n }\n }\n for target in build.flags.target.iter() {\n if !build.config.target.contains(target) {\n panic!(\"specified target `{}` is not in the .\/configure list\",\n target);\n }\n }\n\n let run = |cmd: &mut Command| {\n cmd.output().map(|output| {\n String::from_utf8_lossy(&output.stdout)\n .lines().next().unwrap()\n .to_string()\n })\n };\n build.lldb_version = run(Command::new(\"lldb\").arg(\"--version\")).ok();\n if build.lldb_version.is_some() {\n build.lldb_python_dir = run(Command::new(\"lldb\").arg(\"-P\")).ok();\n }\n\n if let Some(ref s) = build.config.ccache {\n need_cmd(s.as_ref());\n }\n}\n<commit_msg>Accept ninja-build binary in place of ninja<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Sanity checking performed by rustbuild before actually executing anything.\n\/\/!\n\/\/! This module contains the implementation of ensuring that the build\n\/\/! environment looks reasonable before progressing. This will verify that\n\/\/! various programs like git and python exist, along with ensuring that all C\n\/\/! compilers for cross-compiling are found.\n\/\/!\n\/\/! In theory if we get past this phase it's a bug if a build fails, but in\n\/\/! practice that's likely not true!\n\nuse std::collections::HashSet;\nuse std::env;\nuse std::ffi::{OsStr, OsString};\nuse std::fs;\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse Build;\n\npub fn check(build: &mut Build) {\n let mut checked = HashSet::new();\n let path = env::var_os(\"PATH\").unwrap_or(OsString::new());\n \/\/ On Windows, quotes are invalid characters for filename paths, and if\n \/\/ one is present as part of the PATH then that can lead to the system\n \/\/ being unable to identify the files properly. See\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/34959 for more details.\n if cfg!(windows) {\n if path.to_string_lossy().contains(\"\\\"\") {\n panic!(\"PATH contains invalid character '\\\"'\");\n }\n }\n let have_cmd = |cmd: &OsStr| {\n for path in env::split_paths(&path) {\n let target = path.join(cmd);\n let mut cmd_alt = cmd.to_os_string();\n cmd_alt.push(\".exe\");\n if target.exists() ||\n target.with_extension(\"exe\").exists() ||\n target.join(cmd_alt).exists() {\n return Some(target);\n }\n }\n return None;\n };\n\n let mut need_cmd = |cmd: &OsStr| {\n if !checked.insert(cmd.to_owned()) {\n return\n }\n if have_cmd(cmd).is_none() {\n panic!(\"\\n\\ncouldn't find required command: {:?}\\n\\n\", cmd);\n }\n };\n\n \/\/ If we've got a git directory we're gona need git to update\n \/\/ submodules and learn about various other aspects.\n if fs::metadata(build.src.join(\".git\")).is_ok() {\n need_cmd(\"git\".as_ref());\n }\n\n \/\/ We need cmake, but only if we're actually building LLVM\n for host in build.config.host.iter() {\n if let Some(config) = build.config.target_config.get(host) {\n if config.llvm_config.is_some() {\n continue\n }\n }\n need_cmd(\"cmake\".as_ref());\n if build.config.ninja {\n \/\/ Some Linux distros rename `ninja` to `ninja-build`.\n \/\/ CMake can work with either binary name.\n if have_cmd(\"ninja-build\".as_ref()).is_none() {\n need_cmd(\"ninja\".as_ref());\n }\n }\n break\n }\n\n if build.config.python.is_none() {\n build.config.python = have_cmd(\"python2.7\".as_ref());\n }\n if build.config.python.is_none() {\n build.config.python = have_cmd(\"python2\".as_ref());\n }\n if build.config.python.is_none() {\n need_cmd(\"python\".as_ref());\n build.config.python = Some(\"python\".into());\n }\n need_cmd(build.config.python.as_ref().unwrap().as_ref());\n\n\n if let Some(ref s) = build.config.nodejs {\n need_cmd(s.as_ref());\n } else {\n \/\/ Look for the nodejs command, needed for emscripten testing\n if let Some(node) = have_cmd(\"node\".as_ref()) {\n build.config.nodejs = Some(node);\n } else if let Some(node) = have_cmd(\"nodejs\".as_ref()) {\n build.config.nodejs = Some(node);\n }\n }\n\n if let Some(ref gdb) = build.config.gdb {\n need_cmd(gdb.as_ref());\n } else {\n build.config.gdb = have_cmd(\"gdb\".as_ref());\n }\n\n \/\/ We're gonna build some custom C code here and there, host triples\n \/\/ also build some C++ shims for LLVM so we need a C++ compiler.\n for target in build.config.target.iter() {\n \/\/ On emscripten we don't actually need the C compiler to just\n \/\/ build the target artifacts, only for testing. For the sake\n \/\/ of easier bot configuration, just skip detection.\n if target.contains(\"emscripten\") {\n continue;\n }\n\n need_cmd(build.cc(target).as_ref());\n if let Some(ar) = build.ar(target) {\n need_cmd(ar.as_ref());\n }\n }\n for host in build.config.host.iter() {\n need_cmd(build.cxx(host).as_ref());\n }\n\n \/\/ The msvc hosts don't use jemalloc, turn it off globally to\n \/\/ avoid packaging the dummy liballoc_jemalloc on that platform.\n for host in build.config.host.iter() {\n if host.contains(\"msvc\") {\n build.config.use_jemalloc = false;\n }\n }\n\n \/\/ Externally configured LLVM requires FileCheck to exist\n let filecheck = build.llvm_filecheck(&build.config.build);\n if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {\n panic!(\"FileCheck executable {:?} does not exist\", filecheck);\n }\n\n for target in build.config.target.iter() {\n \/\/ Can't compile for iOS unless we're on OSX\n if target.contains(\"apple-ios\") &&\n !build.config.build.contains(\"apple-darwin\") {\n panic!(\"the iOS target is only supported on OSX\");\n }\n\n \/\/ Make sure musl-root is valid if specified\n if target.contains(\"musl\") && !target.contains(\"mips\") {\n match build.musl_root(target) {\n Some(root) => {\n if fs::metadata(root.join(\"lib\/libc.a\")).is_err() {\n panic!(\"couldn't find libc.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n if fs::metadata(root.join(\"lib\/libunwind.a\")).is_err() {\n panic!(\"couldn't find libunwind.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n }\n None => {\n panic!(\"when targeting MUSL either the rust.musl-root \\\n option or the target.$TARGET.musl-root option must \\\n be specified in config.toml\")\n }\n }\n }\n\n if target.contains(\"msvc\") {\n \/\/ There are three builds of cmake on windows: MSVC, MinGW, and\n \/\/ Cygwin. The Cygwin build does not have generators for Visual\n \/\/ Studio, so detect that here and error.\n let out = output(Command::new(\"cmake\").arg(\"--help\"));\n if !out.contains(\"Visual Studio\") {\n panic!(\"\ncmake does not support Visual Studio generators.\n\nThis is likely due to it being an msys\/cygwin build of cmake,\nrather than the required windows version, built using MinGW\nor Visual Studio.\n\nIf you are building under msys2 try installing the mingw-w64-x86_64-cmake\npackage instead of cmake:\n\n$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake\n\");\n }\n }\n\n if target.contains(\"arm-linux-android\") {\n need_cmd(\"adb\".as_ref());\n }\n }\n\n for host in build.flags.host.iter() {\n if !build.config.host.contains(host) {\n panic!(\"specified host `{}` is not in the .\/configure list\", host);\n }\n }\n for target in build.flags.target.iter() {\n if !build.config.target.contains(target) {\n panic!(\"specified target `{}` is not in the .\/configure list\",\n target);\n }\n }\n\n let run = |cmd: &mut Command| {\n cmd.output().map(|output| {\n String::from_utf8_lossy(&output.stdout)\n .lines().next().unwrap()\n .to_string()\n })\n };\n build.lldb_version = run(Command::new(\"lldb\").arg(\"--version\")).ok();\n if build.lldb_version.is_some() {\n build.lldb_python_dir = run(Command::new(\"lldb\").arg(\"-P\")).ok();\n }\n\n if let Some(ref s) = build.config.ccache {\n need_cmd(s.as_ref());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:art: Review the owner of methods<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fixed missing file<commit_after>#![allow(dead_code)]\n#![allow(non_camel_case_types)]\n#![allow(missing_copy_implementations)]\n\nuse std::fmt::Formatter;\nuse std::fmt;\nuse std::fmt::Debug;\nuse std::mem;\nuse std::ffi::CStr;\nuse std::str;\n\n\nuse cql_ffi::types::cass_uint64_t;\nuse cql_ffi::types::cass_uint8_t;\nuse cql_bindgen::CassUuid as _CassUuid;\nuse cql_bindgen::CassUuidGen as _CassUuidGen;\nuse cql_bindgen::cass_uuid_gen_new;\nuse cql_bindgen::cass_uuid_gen_free;\nuse cql_bindgen::cass_uuid_gen_time;\nuse cql_bindgen::cass_uuid_gen_new_with_node;\nuse cql_bindgen::cass_uuid_gen_random;\nuse cql_bindgen::cass_uuid_gen_from_time;\nuse cql_bindgen::cass_uuid_min_from_time;\nuse cql_bindgen::cass_uuid_max_from_time;\nuse cql_bindgen::cass_uuid_timestamp;\nuse cql_bindgen::cass_uuid_version;\nuse cql_bindgen::cass_uuid_string;\nuse cql_bindgen::raw2utf8;\n\/\/use cql_bindgen::cass_uuid_from_string;\n\n\/\/use cql_ffi::error::CassError;\n\nconst CASS_UUID_STRING_LENGTH:usize = 37;\n\n\n#[derive(Copy,Clone)]\npub struct CassUuid(pub _CassUuid);\n\nimpl ::std::default::Default for CassUuid {\n fn default() -> CassUuid { unsafe { ::std::mem::zeroed() } }\n}\n\npub struct CassUuidGen(pub *mut _CassUuidGen);\n\nimpl Drop for CassUuidGen {\n fn drop(&mut self) {\n self.free()\n }\n}\n\nimpl Debug for CassUuid {\n fn fmt(&self, f:&mut Formatter) -> fmt::Result {\n write!(f, \"{:?}\", self.to_string())\n } \n}\n\nimpl CassUuid {\n pub unsafe fn min_from_time(&mut self, time: cass_uint64_t) {cass_uuid_min_from_time(time,&mut self.0)}\n\n pub unsafe fn max_from_time(&mut self, time: cass_uint64_t) {cass_uuid_max_from_time(time,&mut self.0)}\n\n pub unsafe fn timestamp(&self) -> u64 {cass_uuid_timestamp(self.0)}\n\n pub unsafe fn version(&self) -> cass_uint8_t {cass_uuid_version(self.0)}\n \n \/\/FIXME\n pub fn to_string(&self) -> String {unsafe{\n let mut time_str:[i8;CASS_UUID_STRING_LENGTH] = [0;CASS_UUID_STRING_LENGTH];\n \n cass_uuid_string(self.0, time_str[..].as_mut_ptr());\n let mut output:i8 = mem::zeroed();\n cass_uuid_string(self.0,&mut output);\n \n let mut output:i8 = mem::zeroed();\n cass_uuid_string(self.0, &mut output);\n let slice = CStr::from_ptr(&output);\n str::from_utf8(slice.to_bytes()).unwrap().to_string()\n }}\n \n \/\/pub unsafe fn from_string(&mut self, str: *const c_char) -> Result<(),CassError> {CassError::build(cass_uuid_from_string(str,&mut self.0))}\n}\n\nimpl CassUuidGen {\n pub fn new() -> Self {unsafe{\n CassUuidGen(cass_uuid_gen_new())\n }}\n \n pub fn new_with_node(node: cass_uint64_t) -> CassUuidGen {unsafe{\n CassUuidGen(cass_uuid_gen_new_with_node(node))\n }}\n \n fn free(&self) {unsafe{\n cass_uuid_gen_free(self.0)\n }}\n \n pub fn get_time(&self) -> CassUuid {unsafe{\n let mut output:_CassUuid = mem::zeroed();\n cass_uuid_gen_time(self.0,&mut output);\n CassUuid(output)\n }}\n \n pub fn fill_random(&self, mut output: CassUuid) {unsafe{\n cass_uuid_gen_random(self.0, &mut output.0)\n }}\n \n pub fn random(&self) -> CassUuid {unsafe{\n let mut output:_CassUuid = mem::zeroed();\n cass_uuid_gen_random(self.0, &mut output);\n CassUuid(output)\n }}\n \n pub fn from_time(&self, timestamp: cass_uint64_t, mut output: CassUuid){unsafe{\n cass_uuid_gen_from_time(self.0,timestamp, &mut output.0)\n }}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix visibility of foreign module<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::path::{self, Path, PathBuf};\nuse std::ffi::OsString;\nuse std::fs;\nuse std::io;\n\n\/\/ Unfortunately, on windows, it looks like msvcrt.dll is silently translating\n\/\/ verbatim paths under the hood to non-verbatim paths! This manifests itself as\n\/\/ gcc looking like it cannot accept paths of the form `\\\\?\\C:\\...`, but the\n\/\/ real bug seems to lie in msvcrt.dll.\n\/\/\n\/\/ Verbatim paths are generally pretty rare, but the implementation of\n\/\/ `fs::canonicalize` currently generates paths of this form, meaning that we're\n\/\/ going to be passing quite a few of these down to gcc, so we need to deal with\n\/\/ this case.\n\/\/\n\/\/ For now we just strip the \"verbatim prefix\" of `\\\\?\\` from the path. This\n\/\/ will probably lose information in some cases, but there's not a whole lot\n\/\/ more we can do with a buggy msvcrt...\n\/\/\n\/\/ For some more information, see this comment:\n\/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/25505#issuecomment-102876737\npub fn fix_windows_verbatim_for_gcc(p: &Path) -> PathBuf {\n if !cfg!(windows) {\n return p.to_path_buf()\n }\n let mut components = p.components();\n let prefix = match components.next() {\n Some(path::Component::Prefix(p)) => p,\n _ => return p.to_path_buf(),\n };\n match prefix.kind() {\n path::Prefix::VerbatimDisk(disk) => {\n let mut base = OsString::from(format!(\"{}:\", disk as char));\n base.push(components.as_path());\n PathBuf::from(base)\n }\n path::Prefix::VerbatimUNC(server, share) => {\n let mut base = OsString::from(r\"\\\\\");\n base.push(server);\n base.push(r\"\\\");\n base.push(share);\n base.push(components.as_path());\n PathBuf::from(base)\n }\n _ => p.to_path_buf(),\n }\n}\n\npub enum LinkOrCopy {\n Link,\n Copy\n}\n\n\/\/\/ Copy `p` into `q`, preferring to use hard-linking if possible. If\n\/\/\/ `q` already exists, it is removed first.\n\/\/\/ The result indicates which of the two operations has been performed.\npub fn link_or_copy<P: AsRef<Path>, Q: AsRef<Path>>(p: P, q: Q) -> io::Result<LinkOrCopy> {\n let p = p.as_ref();\n let q = q.as_ref();\n if q.exists() {\n fs::remove_file(&q)?;\n }\n\n match fs::hard_link(p, q) {\n Ok(()) => Ok(LinkOrCopy::Link),\n Err(_) => {\n match fs::copy(p, q) {\n Ok(_) => Ok(LinkOrCopy::Copy),\n Err(e) => Err(e)\n }\n }\n }\n}\n\n#[derive(Debug)]\npub enum RenameOrCopyRemove {\n Rename,\n CopyRemove\n}\n\n\/\/\/ Rename `p` into `q`, preferring to use `rename` if possible.\n\/\/\/ If `rename` fails (rename may fail for reasons such as crossing filesystem), fallback to copy & remove\npub fn rename_or_copy_remove<P: AsRef<Path>, Q: AsRef<Path>>(p: P, q: Q) -> io::Result<RenameOrCopyRemove> {\n let p = p.as_ref();\n let q = q.as_ref();\n match fs::rename(p, q) {\n Ok(()) => Ok(RenameOrCopyRemove::Rename),\n Err(_) => {\n match fs::copy(p, q) {\n Ok(_) => {\n fs::remove_file(p)?;\n Ok(RenameOrCopyRemove::CopyRemove)\n },\n Err(e) => Err(e)\n }\n }\n }\n}\n\n\/\/ Like std::fs::create_dir_all, except handles concurrent calls among multiple\n\/\/ threads or processes.\npub fn create_dir_racy(path: &Path) -> io::Result<()> {\n match fs::create_dir(path) {\n Ok(()) => return Ok(()),\n Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => return Ok(()),\n Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}\n Err(e) => return Err(e),\n }\n match path.parent() {\n Some(p) => try!(create_dir_racy(p)),\n None => return Err(io::Error::new(io::ErrorKind::Other,\n \"failed to create whole tree\")),\n }\n match fs::create_dir(path) {\n Ok(()) => Ok(()),\n Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()),\n Err(e) => Err(e),\n }\n}\n<commit_msg>run rustfmt for librustc\/util\/fs.rs<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::path::{self, Path, PathBuf};\nuse std::ffi::OsString;\nuse std::fs;\nuse std::io;\n\n\/\/ Unfortunately, on windows, it looks like msvcrt.dll is silently translating\n\/\/ verbatim paths under the hood to non-verbatim paths! This manifests itself as\n\/\/ gcc looking like it cannot accept paths of the form `\\\\?\\C:\\...`, but the\n\/\/ real bug seems to lie in msvcrt.dll.\n\/\/\n\/\/ Verbatim paths are generally pretty rare, but the implementation of\n\/\/ `fs::canonicalize` currently generates paths of this form, meaning that we're\n\/\/ going to be passing quite a few of these down to gcc, so we need to deal with\n\/\/ this case.\n\/\/\n\/\/ For now we just strip the \"verbatim prefix\" of `\\\\?\\` from the path. This\n\/\/ will probably lose information in some cases, but there's not a whole lot\n\/\/ more we can do with a buggy msvcrt...\n\/\/\n\/\/ For some more information, see this comment:\n\/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/25505#issuecomment-102876737\npub fn fix_windows_verbatim_for_gcc(p: &Path) -> PathBuf {\n if !cfg!(windows) {\n return p.to_path_buf();\n }\n let mut components = p.components();\n let prefix = match components.next() {\n Some(path::Component::Prefix(p)) => p,\n _ => return p.to_path_buf(),\n };\n match prefix.kind() {\n path::Prefix::VerbatimDisk(disk) => {\n let mut base = OsString::from(format!(\"{}:\", disk as char));\n base.push(components.as_path());\n PathBuf::from(base)\n }\n path::Prefix::VerbatimUNC(server, share) => {\n let mut base = OsString::from(r\"\\\\\");\n base.push(server);\n base.push(r\"\\\");\n base.push(share);\n base.push(components.as_path());\n PathBuf::from(base)\n }\n _ => p.to_path_buf(),\n }\n}\n\npub enum LinkOrCopy {\n Link,\n Copy,\n}\n\n\/\/\/ Copy `p` into `q`, preferring to use hard-linking if possible. If\n\/\/\/ `q` already exists, it is removed first.\n\/\/\/ The result indicates which of the two operations has been performed.\npub fn link_or_copy<P: AsRef<Path>, Q: AsRef<Path>>(p: P, q: Q) -> io::Result<LinkOrCopy> {\n let p = p.as_ref();\n let q = q.as_ref();\n if q.exists() {\n fs::remove_file(&q)?;\n }\n\n match fs::hard_link(p, q) {\n Ok(()) => Ok(LinkOrCopy::Link),\n Err(_) => {\n match fs::copy(p, q) {\n Ok(_) => Ok(LinkOrCopy::Copy),\n Err(e) => Err(e),\n }\n }\n }\n}\n\n#[derive(Debug)]\npub enum RenameOrCopyRemove {\n Rename,\n CopyRemove,\n}\n\n\/\/\/ Rename `p` into `q`, preferring to use `rename` if possible.\n\/\/\/ If `rename` fails (rename may fail for reasons such as crossing\n\/\/\/ filesystem), fallback to copy & remove\npub fn rename_or_copy_remove<P: AsRef<Path>, Q: AsRef<Path>>(p: P,\n q: Q)\n -> io::Result<RenameOrCopyRemove> {\n let p = p.as_ref();\n let q = q.as_ref();\n match fs::rename(p, q) {\n Ok(()) => Ok(RenameOrCopyRemove::Rename),\n Err(_) => {\n match fs::copy(p, q) {\n Ok(_) => {\n fs::remove_file(p)?;\n Ok(RenameOrCopyRemove::CopyRemove)\n }\n Err(e) => Err(e),\n }\n }\n }\n}\n\n\/\/ Like std::fs::create_dir_all, except handles concurrent calls among multiple\n\/\/ threads or processes.\npub fn create_dir_racy(path: &Path) -> io::Result<()> {\n match fs::create_dir(path) {\n Ok(()) => return Ok(()),\n Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => return Ok(()),\n Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}\n Err(e) => return Err(e),\n }\n match path.parent() {\n Some(p) => try!(create_dir_racy(p)),\n None => return Err(io::Error::new(io::ErrorKind::Other, \"failed to create whole tree\")),\n }\n match fs::create_dir(path) {\n Ok(()) => Ok(()),\n Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()),\n Err(e) => Err(e),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use a single transaction per journey observed.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Ignore cancelReason for now.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse attr::{AttrMetaMethods, HasAttrs};\nuse errors::Handler;\nuse feature_gate::GatedCfgAttr;\nuse fold::Folder;\nuse {ast, fold, attr};\nuse codemap::{Spanned, respan};\nuse parse::token;\nuse ptr::P;\n\nuse util::small_vector::SmallVector;\n\npub trait CfgFolder: fold::Folder {\n \/\/ Check if a node with the given attributes is in this configuration.\n fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool;\n\n \/\/ Update a node before checking if it is in this configuration (used to implement `cfg_attr`).\n fn process_attrs<T: HasAttrs>(&mut self, node: T) -> T { node }\n\n \/\/ Visit attributes on expression and statements (but not attributes on items in blocks).\n fn visit_stmt_or_expr_attrs(&mut self, _attrs: &[ast::Attribute]) {}\n\n \/\/ Visit unremovable (non-optional) expressions -- c.f. `fold_expr` vs `fold_opt_expr`.\n fn visit_unremovable_expr(&mut self, _expr: &ast::Expr) {}\n\n fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {\n let node = self.process_attrs(node);\n if self.in_cfg(node.attrs()) { Some(node) } else { None }\n }\n}\n\n\/\/\/ A folder that strips out items that do not belong in the current\n\/\/\/ configuration.\npub struct StripUnconfigured<'a> {\n diag: CfgDiagReal<'a, 'a>,\n should_test: bool,\n config: &'a ast::CrateConfig,\n}\n\nimpl<'a> StripUnconfigured<'a> {\n pub fn new(config: &'a ast::CrateConfig,\n should_test: bool,\n diagnostic: &'a Handler,\n feature_gated_cfgs: &'a mut Vec<GatedCfgAttr>)\n -> Self {\n StripUnconfigured {\n config: config,\n should_test: should_test,\n diag: CfgDiagReal { diag: diagnostic, feature_gated_cfgs: feature_gated_cfgs },\n }\n }\n\n fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {\n if !attr.check_name(\"cfg_attr\") {\n return Some(attr);\n }\n\n let attr_list = match attr.meta_item_list() {\n Some(attr_list) => attr_list,\n None => {\n let msg = \"expected `#[cfg_attr(<cfg pattern>, <attr>)]`\";\n self.diag.diag.span_err(attr.span, msg);\n return None;\n }\n };\n let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) {\n (2, Some(cfg), Some(mi)) => (cfg, mi),\n _ => {\n let msg = \"expected `#[cfg_attr(<cfg pattern>, <attr>)]`\";\n self.diag.diag.span_err(attr.span, msg);\n return None;\n }\n };\n\n if attr::cfg_matches(self.config, &cfg, &mut self.diag) {\n Some(respan(mi.span, ast::Attribute_ {\n id: attr::mk_attr_id(),\n style: attr.node.style,\n value: mi.clone(),\n is_sugared_doc: false,\n }))\n } else {\n None\n }\n }\n}\n\nimpl<'a> CfgFolder for StripUnconfigured<'a> {\n \/\/ Determine if an item should be translated in the current crate\n \/\/ configuration based on the item's attributes\n fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool {\n attrs.iter().all(|attr| {\n \/\/ When not compiling with --test we should not compile the #[test] functions\n if !self.should_test && is_test_or_bench(attr) {\n return false;\n }\n\n let mis = match attr.node.value.node {\n ast::MetaItemKind::List(_, ref mis) if is_cfg(&attr) => mis,\n _ => return true\n };\n\n if mis.len() != 1 {\n self.diag.emit_error(|diagnostic| {\n diagnostic.span_err(attr.span, \"expected 1 cfg-pattern\");\n });\n return true;\n }\n\n attr::cfg_matches(self.config, &mis[0], &mut self.diag)\n })\n }\n\n fn process_attrs<T: HasAttrs>(&mut self, node: T) -> T {\n node.map_attrs(|attrs| {\n attrs.into_iter().filter_map(|attr| self.process_cfg_attr(attr)).collect()\n })\n }\n\n fn visit_stmt_or_expr_attrs(&mut self, attrs: &[ast::Attribute]) {\n \/\/ flag the offending attributes\n for attr in attrs.iter() {\n self.diag.feature_gated_cfgs.push(GatedCfgAttr::GatedAttr(attr.span));\n }\n }\n\n fn visit_unremovable_expr(&mut self, expr: &ast::Expr) {\n if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(a) || is_test_or_bench(a)) {\n let msg = \"removing an expression is not supported in this position\";\n self.diag.diag.span_err(attr.span, msg);\n }\n }\n}\n\n\/\/ Support conditional compilation by transforming the AST, stripping out\n\/\/ any items that do not belong in the current configuration\npub fn strip_unconfigured_items(diagnostic: &Handler, krate: ast::Crate, should_test: bool,\n feature_gated_cfgs: &mut Vec<GatedCfgAttr>)\n -> ast::Crate\n{\n let config = &krate.config.clone();\n StripUnconfigured::new(config, should_test, diagnostic, feature_gated_cfgs).fold_crate(krate)\n}\n\nimpl<T: CfgFolder> fold::Folder for T {\n fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod {\n ast::ForeignMod {\n abi: foreign_mod.abi,\n items: foreign_mod.items.into_iter().filter_map(|item| {\n self.configure(item).map(|item| fold::noop_fold_foreign_item(item, self))\n }).collect(),\n }\n }\n\n fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {\n let fold_struct = |this: &mut Self, vdata| match vdata {\n ast::VariantData::Struct(fields, id) => {\n let fields = fields.into_iter().filter_map(|field| this.configure(field));\n ast::VariantData::Struct(fields.collect(), id)\n }\n ast::VariantData::Tuple(fields, id) => {\n let fields = fields.into_iter().filter_map(|field| this.configure(field));\n ast::VariantData::Tuple(fields.collect(), id)\n }\n ast::VariantData::Unit(id) => ast::VariantData::Unit(id)\n };\n\n let item = match item {\n ast::ItemKind::Struct(def, generics) => {\n ast::ItemKind::Struct(fold_struct(self, def), generics)\n }\n ast::ItemKind::Enum(def, generics) => {\n let variants = def.variants.into_iter().filter_map(|v| {\n self.configure(v).map(|v| {\n Spanned {\n node: ast::Variant_ {\n name: v.node.name,\n attrs: v.node.attrs,\n data: fold_struct(self, v.node.data),\n disr_expr: v.node.disr_expr,\n },\n span: v.span\n }\n })\n });\n ast::ItemKind::Enum(ast::EnumDef {\n variants: variants.collect(),\n }, generics)\n }\n item => item,\n };\n\n fold::noop_fold_item_kind(item, self)\n }\n\n fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {\n self.visit_stmt_or_expr_attrs(expr.attrs());\n \/\/ If an expr is valid to cfg away it will have been removed by the\n \/\/ outer stmt or expression folder before descending in here.\n \/\/ Anything else is always required, and thus has to error out\n \/\/ in case of a cfg attr.\n \/\/\n \/\/ NB: This is intentionally not part of the fold_expr() function\n \/\/ in order for fold_opt_expr() to be able to avoid this check\n self.visit_unremovable_expr(&expr);\n let expr = self.process_attrs(expr);\n fold_expr(self, expr)\n }\n\n fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {\n self.configure(expr).map(|expr| fold_expr(self, expr))\n }\n\n fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVector<ast::Stmt> {\n let is_item = match stmt.node {\n ast::StmtKind::Decl(ref decl, _) => match decl.node {\n ast::DeclKind::Item(_) => true,\n _ => false,\n },\n _ => false,\n };\n\n \/\/ avoid calling `visit_stmt_or_expr_attrs` on items\n if !is_item {\n self.visit_stmt_or_expr_attrs(stmt.attrs());\n }\n\n self.configure(stmt).map(|stmt| fold::noop_fold_stmt(stmt, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {\n fold::noop_fold_mac(mac, self)\n }\n\n fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {\n self.configure(item).map(|item| fold::noop_fold_item(item, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_impl_item(&mut self, item: ast::ImplItem) -> SmallVector<ast::ImplItem> {\n self.configure(item).map(|item| fold::noop_fold_impl_item(item, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_trait_item(&mut self, item: ast::TraitItem) -> SmallVector<ast::TraitItem> {\n self.configure(item).map(|item| fold::noop_fold_trait_item(item, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_interpolated(&mut self, nt: token::Nonterminal) -> token::Nonterminal {\n \/\/ Don't configure interpolated AST (c.f. #34171).\n \/\/ Interpolated AST will get configured once the surrounding tokens are parsed.\n nt\n }\n}\n\nfn fold_expr<F: CfgFolder>(folder: &mut F, expr: P<ast::Expr>) -> P<ast::Expr> {\n expr.map(|ast::Expr {id, span, node, attrs}| {\n fold::noop_fold_expr(ast::Expr {\n id: id,\n node: match node {\n ast::ExprKind::Match(m, arms) => {\n ast::ExprKind::Match(m, arms.into_iter()\n .filter_map(|a| folder.configure(a))\n .collect())\n }\n _ => node\n },\n span: span,\n attrs: attrs,\n }, folder)\n })\n}\n\nfn is_cfg(attr: &ast::Attribute) -> bool {\n attr.check_name(\"cfg\")\n}\n\nfn is_test_or_bench(attr: &ast::Attribute) -> bool {\n attr.check_name(\"test\") || attr.check_name(\"bench\")\n}\n\npub trait CfgDiag {\n fn emit_error<F>(&mut self, f: F) where F: FnMut(&Handler);\n fn flag_gated<F>(&mut self, f: F) where F: FnMut(&mut Vec<GatedCfgAttr>);\n}\n\npub struct CfgDiagReal<'a, 'b> {\n pub diag: &'a Handler,\n pub feature_gated_cfgs: &'b mut Vec<GatedCfgAttr>,\n}\n\nimpl<'a, 'b> CfgDiag for CfgDiagReal<'a, 'b> {\n fn emit_error<F>(&mut self, mut f: F) where F: FnMut(&Handler) {\n f(self.diag)\n }\n fn flag_gated<F>(&mut self, mut f: F) where F: FnMut(&mut Vec<GatedCfgAttr>) {\n f(self.feature_gated_cfgs)\n }\n}\n<commit_msg>Refactor away the `CfgFolder` trait.<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse attr::{AttrMetaMethods, HasAttrs};\nuse errors::Handler;\nuse feature_gate::GatedCfgAttr;\nuse fold::Folder;\nuse {ast, fold, attr};\nuse codemap::{Spanned, respan};\nuse parse::token;\nuse ptr::P;\n\nuse util::small_vector::SmallVector;\n\n\/\/\/ A folder that strips out items that do not belong in the current configuration.\npub struct StripUnconfigured<'a> {\n diag: CfgDiagReal<'a, 'a>,\n should_test: bool,\n config: &'a ast::CrateConfig,\n}\n\nimpl<'a> StripUnconfigured<'a> {\n pub fn new(config: &'a ast::CrateConfig,\n should_test: bool,\n diagnostic: &'a Handler,\n feature_gated_cfgs: &'a mut Vec<GatedCfgAttr>)\n -> Self {\n StripUnconfigured {\n config: config,\n should_test: should_test,\n diag: CfgDiagReal { diag: diagnostic, feature_gated_cfgs: feature_gated_cfgs },\n }\n }\n\n fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {\n let node = self.process_cfg_attrs(node);\n if self.in_cfg(node.attrs()) { Some(node) } else { None }\n }\n\n fn process_cfg_attrs<T: HasAttrs>(&mut self, node: T) -> T {\n node.map_attrs(|attrs| {\n attrs.into_iter().filter_map(|attr| self.process_cfg_attr(attr)).collect()\n })\n }\n\n fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {\n if !attr.check_name(\"cfg_attr\") {\n return Some(attr);\n }\n\n let attr_list = match attr.meta_item_list() {\n Some(attr_list) => attr_list,\n None => {\n let msg = \"expected `#[cfg_attr(<cfg pattern>, <attr>)]`\";\n self.diag.diag.span_err(attr.span, msg);\n return None;\n }\n };\n let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) {\n (2, Some(cfg), Some(mi)) => (cfg, mi),\n _ => {\n let msg = \"expected `#[cfg_attr(<cfg pattern>, <attr>)]`\";\n self.diag.diag.span_err(attr.span, msg);\n return None;\n }\n };\n\n if attr::cfg_matches(self.config, &cfg, &mut self.diag) {\n Some(respan(mi.span, ast::Attribute_ {\n id: attr::mk_attr_id(),\n style: attr.node.style,\n value: mi.clone(),\n is_sugared_doc: false,\n }))\n } else {\n None\n }\n }\n\n \/\/ Determine if a node with the given attributes should be included in this configuation.\n fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool {\n attrs.iter().all(|attr| {\n \/\/ When not compiling with --test we should not compile the #[test] functions\n if !self.should_test && is_test_or_bench(attr) {\n return false;\n }\n\n let mis = match attr.node.value.node {\n ast::MetaItemKind::List(_, ref mis) if is_cfg(&attr) => mis,\n _ => return true\n };\n\n if mis.len() != 1 {\n self.diag.emit_error(|diagnostic| {\n diagnostic.span_err(attr.span, \"expected 1 cfg-pattern\");\n });\n return true;\n }\n\n attr::cfg_matches(self.config, &mis[0], &mut self.diag)\n })\n }\n\n \/\/ Visit attributes on expression and statements (but not attributes on items in blocks).\n fn visit_stmt_or_expr_attrs(&mut self, attrs: &[ast::Attribute]) {\n \/\/ flag the offending attributes\n for attr in attrs.iter() {\n self.diag.feature_gated_cfgs.push(GatedCfgAttr::GatedAttr(attr.span));\n }\n }\n\n \/\/ Visit unremovable (non-optional) expressions -- c.f. `fold_expr` vs `fold_opt_expr`.\n fn visit_unremovable_expr(&mut self, expr: &ast::Expr) {\n if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(a) || is_test_or_bench(a)) {\n let msg = \"removing an expression is not supported in this position\";\n self.diag.diag.span_err(attr.span, msg);\n }\n }\n}\n\n\/\/ Support conditional compilation by transforming the AST, stripping out\n\/\/ any items that do not belong in the current configuration\npub fn strip_unconfigured_items(diagnostic: &Handler, krate: ast::Crate, should_test: bool,\n feature_gated_cfgs: &mut Vec<GatedCfgAttr>)\n -> ast::Crate\n{\n let config = &krate.config.clone();\n StripUnconfigured::new(config, should_test, diagnostic, feature_gated_cfgs).fold_crate(krate)\n}\n\nimpl<'a> fold::Folder for StripUnconfigured<'a> {\n fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod {\n ast::ForeignMod {\n abi: foreign_mod.abi,\n items: foreign_mod.items.into_iter().filter_map(|item| {\n self.configure(item).map(|item| fold::noop_fold_foreign_item(item, self))\n }).collect(),\n }\n }\n\n fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {\n let fold_struct = |this: &mut Self, vdata| match vdata {\n ast::VariantData::Struct(fields, id) => {\n let fields = fields.into_iter().filter_map(|field| this.configure(field));\n ast::VariantData::Struct(fields.collect(), id)\n }\n ast::VariantData::Tuple(fields, id) => {\n let fields = fields.into_iter().filter_map(|field| this.configure(field));\n ast::VariantData::Tuple(fields.collect(), id)\n }\n ast::VariantData::Unit(id) => ast::VariantData::Unit(id)\n };\n\n let item = match item {\n ast::ItemKind::Struct(def, generics) => {\n ast::ItemKind::Struct(fold_struct(self, def), generics)\n }\n ast::ItemKind::Enum(def, generics) => {\n let variants = def.variants.into_iter().filter_map(|v| {\n self.configure(v).map(|v| {\n Spanned {\n node: ast::Variant_ {\n name: v.node.name,\n attrs: v.node.attrs,\n data: fold_struct(self, v.node.data),\n disr_expr: v.node.disr_expr,\n },\n span: v.span\n }\n })\n });\n ast::ItemKind::Enum(ast::EnumDef {\n variants: variants.collect(),\n }, generics)\n }\n item => item,\n };\n\n fold::noop_fold_item_kind(item, self)\n }\n\n fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {\n self.visit_stmt_or_expr_attrs(expr.attrs());\n \/\/ If an expr is valid to cfg away it will have been removed by the\n \/\/ outer stmt or expression folder before descending in here.\n \/\/ Anything else is always required, and thus has to error out\n \/\/ in case of a cfg attr.\n \/\/\n \/\/ NB: This is intentionally not part of the fold_expr() function\n \/\/ in order for fold_opt_expr() to be able to avoid this check\n self.visit_unremovable_expr(&expr);\n let expr = self.process_cfg_attrs(expr);\n fold_expr(self, expr)\n }\n\n fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {\n self.configure(expr).map(|expr| fold_expr(self, expr))\n }\n\n fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVector<ast::Stmt> {\n let is_item = match stmt.node {\n ast::StmtKind::Decl(ref decl, _) => match decl.node {\n ast::DeclKind::Item(_) => true,\n _ => false,\n },\n _ => false,\n };\n\n \/\/ avoid calling `visit_stmt_or_expr_attrs` on items\n if !is_item {\n self.visit_stmt_or_expr_attrs(stmt.attrs());\n }\n\n self.configure(stmt).map(|stmt| fold::noop_fold_stmt(stmt, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {\n fold::noop_fold_mac(mac, self)\n }\n\n fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {\n self.configure(item).map(|item| fold::noop_fold_item(item, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_impl_item(&mut self, item: ast::ImplItem) -> SmallVector<ast::ImplItem> {\n self.configure(item).map(|item| fold::noop_fold_impl_item(item, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_trait_item(&mut self, item: ast::TraitItem) -> SmallVector<ast::TraitItem> {\n self.configure(item).map(|item| fold::noop_fold_trait_item(item, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_interpolated(&mut self, nt: token::Nonterminal) -> token::Nonterminal {\n \/\/ Don't configure interpolated AST (c.f. #34171).\n \/\/ Interpolated AST will get configured once the surrounding tokens are parsed.\n nt\n }\n}\n\nfn fold_expr(folder: &mut StripUnconfigured, expr: P<ast::Expr>) -> P<ast::Expr> {\n expr.map(|ast::Expr {id, span, node, attrs}| {\n fold::noop_fold_expr(ast::Expr {\n id: id,\n node: match node {\n ast::ExprKind::Match(m, arms) => {\n ast::ExprKind::Match(m, arms.into_iter()\n .filter_map(|a| folder.configure(a))\n .collect())\n }\n _ => node\n },\n span: span,\n attrs: attrs,\n }, folder)\n })\n}\n\nfn is_cfg(attr: &ast::Attribute) -> bool {\n attr.check_name(\"cfg\")\n}\n\nfn is_test_or_bench(attr: &ast::Attribute) -> bool {\n attr.check_name(\"test\") || attr.check_name(\"bench\")\n}\n\npub trait CfgDiag {\n fn emit_error<F>(&mut self, f: F) where F: FnMut(&Handler);\n fn flag_gated<F>(&mut self, f: F) where F: FnMut(&mut Vec<GatedCfgAttr>);\n}\n\npub struct CfgDiagReal<'a, 'b> {\n pub diag: &'a Handler,\n pub feature_gated_cfgs: &'b mut Vec<GatedCfgAttr>,\n}\n\nimpl<'a, 'b> CfgDiag for CfgDiagReal<'a, 'b> {\n fn emit_error<F>(&mut self, mut f: F) where F: FnMut(&Handler) {\n f(self.diag)\n }\n fn flag_gated<F>(&mut self, mut f: F) where F: FnMut(&mut Vec<GatedCfgAttr>) {\n f(self.feature_gated_cfgs)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>kdtree: add Programming Assignment 5: Kd-Trees<commit_after>\/\/ http:\/\/coursera.cs.princeton.edu\/algs4\/assignments\/kdtree.html\nextern crate algs4;\nextern crate rand;\n\nuse std::io::prelude::*;\nuse std::io;\n\nuse algs4::geometric_search::primitive::{Point2D, PointSet, RectHV};\nuse algs4::symbol_tables::ST;\nuse algs4::geometric_search::kd_tree::KdTree;\n\n\nfn run() {\n let mut pset = PointSet::new();\n let mut kt: KdTree<Point2D,()> = KdTree::new();\n\n let npoints = io::BufReader::new(io::stdin())\n .lines()\n .map(|line| line.unwrap().split(' ')\n .map(|xy| xy.parse::<f64>().unwrap())\n .collect::<Vec<f64>>())\n .map(|pt| {\n pset.insert(Point2D::new(pt[0], pt[1]));\n kt.insert(Point2D::new(pt[0], pt[1]));\n })\n .count();\n\n println!(\"got {} points\", npoints);\n let point: Point2D = Point2D::new(0.9, 0.6);\n let rect = RectHV::new(0.4, 0.4, 0.6, 0.6);\n\n println!(\"PSet in Rect: {}\", pset.range_count(rect));\n println!(\"KdTree in Rect: {}\", kt.range_count(rect));\n \/\/println!(\"tree => {:?}\", kt);\n println!(\"nearest to {}\", point);\n println!(\"PointSet => {:?} d = {}\", pset.nearest(point), pset.nearest(point).unwrap().distance_to(point));\n println!(\"KdTree => {:?} d = {}\", kt.nearest(point), kt.nearest(point).unwrap().distance_to(point));\n}\n\n\nfn main() {\n run();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>finished demacrofying integer_utils_tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Optimise scene drawing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[Auto] bin\/core\/init: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test for RcMut being Const and Owned, tests #7017 being fixed<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern mod extra;\n\nfn o<T: Owned>(_: &T) {}\nfn c<T: Const>(_: &T) {}\n\nfn main() {\n let x = extra::rc::rc_mut_from_owned(0);\n o(&x); \/\/~ ERROR instantiating a type parameter with an incompatible type `extra::rc::RcMut<int>`, which does not fulfill `Owned`\n c(&x); \/\/~ ERROR instantiating a type parameter with an incompatible type `extra::rc::RcMut<int>`, which does not fulfill `Const`\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore(main): changed logging<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! A buffer is a memory location accessible to the video card.\n\/\/!\n\/\/! The purpose of buffers is to serve as a space where the GPU can read from or write data to.\n\/\/! It can contain a list of vertices, indices, uniform data, etc.\n\/\/!\n\/\/! # Buffers management in glium\n\/\/!\n\/\/! There are three levels of abstraction in glium:\n\/\/!\n\/\/! - A `Buffer` corresponds to an OpenGL buffer object. This type is not public.\n\/\/! - A `Buffer` corresponds to a part of a `Buffer`. One buffer can contain one or multiple\n\/\/! subbuffers.\n\/\/! - The `VertexBuffer`, `IndexBuffer`, `UniformBuffer`, `PixelBuffer`, ... types are\n\/\/! abstractions over a subbuffer indicating their specific purpose. They implement `Deref`\n\/\/! for the subbuffer. These types are in the `vertex`, `index`, ... modules.\n\/\/!\npub use self::view::{Buffer, BufferAny, BufferMutSlice};\npub use self::view::{BufferSlice, BufferAnySlice};\npub use self::alloc::{Mapping, WriteMapping, ReadMapping, ReadError, is_buffer_read_supported};\npub use self::fences::Inserter;\n\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::Buffer as BufferView;\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::BufferSlice as BufferViewSlice;\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::BufferMutSlice as BufferViewMutSlice;\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::BufferAny as BufferViewAny;\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::BufferAnySlice as BufferViewAnySlice;\n\nuse gl;\nuse std::mem;\nuse std::slice;\n\nmod alloc;\nmod fences;\nmod view;\n\n\/\/\/ Trait for types of data that can be put inside buffers.\npub unsafe trait Content {\n \/\/\/ A type that holds a sized version of the content.\n type Owned;\n\n \/\/\/ Prepares an output buffer, then turns this buffer into an `Owned`.\n fn read<F, E>(size: usize, F) -> Result<Self::Owned, E>\n where F: FnOnce(&mut Self) -> Result<(), E>;\n\n \/\/\/ Returns the size of each element.\n fn get_elements_size() -> usize;\n\n \/\/\/ Produces a pointer to the data.\n fn to_void_ptr(&self) -> *const ();\n\n \/\/\/ Builds a pointer to this type from a raw pointer.\n fn ref_from_ptr<'a>(ptr: *mut (), size: usize) -> Option<*mut Self>;\n\n \/\/\/ Returns true if the size is suitable to store a type like this.\n fn is_size_suitable(usize) -> bool;\n}\n\nunsafe impl<T> Content for T where T: Copy {\n type Owned = T;\n\n #[inline]\n fn read<F, E>(size: usize, f: F) -> Result<T, E> where F: FnOnce(&mut T) -> Result<(), E> {\n assert!(size == mem::size_of::<T>());\n let mut value = unsafe { mem::uninitialized() };\n try!(f(&mut value));\n Ok(value)\n }\n\n #[inline]\n fn get_elements_size() -> usize {\n mem::size_of::<T>()\n }\n\n #[inline]\n fn to_void_ptr(&self) -> *const () {\n self as *const T as *const ()\n }\n\n #[inline]\n fn ref_from_ptr<'a>(ptr: *mut (), size: usize) -> Option<*mut T> {\n if size != mem::size_of::<T>() {\n return None;\n }\n\n Some(ptr as *mut T)\n }\n\n #[inline]\n fn is_size_suitable(size: usize) -> bool {\n size == mem::size_of::<T>()\n }\n}\n\nunsafe impl<T> Content for [T] where T: Copy {\n type Owned = Vec<T>;\n\n #[inline]\n fn read<F, E>(size: usize, f: F) -> Result<Vec<T>, E>\n where F: FnOnce(&mut [T]) -> Result<(), E>\n {\n assert!(size % mem::size_of::<T>() == 0);\n let len = size \/ mem::size_of::<T>();\n let mut value = Vec::with_capacity(len);\n unsafe { value.set_len(len) };\n try!(f(&mut value));\n Ok(value)\n }\n\n #[inline]\n fn get_elements_size() -> usize {\n mem::size_of::<T>()\n }\n\n #[inline]\n fn to_void_ptr(&self) -> *const () {\n &self[0] as *const T as *const ()\n }\n\n #[inline]\n fn ref_from_ptr<'a>(ptr: *mut (), size: usize) -> Option<*mut [T]> {\n if size % mem::size_of::<T>() != 0 {\n return None;\n }\n\n let ptr = ptr as *mut T;\n let size = size \/ mem::size_of::<T>();\n Some(unsafe { slice::from_raw_parts_mut(&mut *ptr, size) as *mut [T] })\n }\n\n #[inline]\n fn is_size_suitable(size: usize) -> bool {\n size % mem::size_of::<T>() == 0\n }\n}\n\n\/\/\/ Error that can happen when creating a buffer.\n#[derive(Debug, Copy, Clone)]\npub enum BufferCreationError {\n \/\/\/ Not enough memory to create the buffer.\n OutOfMemory,\n\n \/\/\/ This type of buffer is not supported.\n BufferTypeNotSupported,\n}\n\n\/\/\/ How the buffer is created.\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub enum BufferMode {\n \/\/\/ This is the default mode suitable for any usage. Will never be slow, will never be fast\n \/\/\/ either.\n \/\/\/\n \/\/\/ Other modes should always be preferred, but you can use this one if you don't know what\n \/\/\/ will happen to the buffer.\n \/\/\/\n \/\/\/ # Implementation\n \/\/\/\n \/\/\/ Tries to use `glBufferStorage` with the `GL_DYNAMIC_STORAGE_BIT` flag.\n \/\/\/\n \/\/\/ If this function is not available, falls back to `glBufferData` with `GL_STATIC_DRAW`.\n \/\/\/\n Default,\n\n \/\/\/ The mode to use when you modify a buffer multiple times per frame. Simiar to `Default` in\n \/\/\/ that it is suitable for most usages.\n \/\/\/\n \/\/\/ Use this if you do a quick succession of modify the buffer, draw, modify, draw, etc. This\n \/\/\/ is something that you shouldn't do by the way.\n \/\/\/\n \/\/\/ With this mode, the OpenGL driver automatically manages the buffer for us. It will try to\n \/\/\/ find the most appropriate storage depending on how we use it. It is guaranteed to never be\n \/\/\/ too slow, but it won't be too fast either.\n \/\/\/\n \/\/\/ # Implementation\n \/\/\/\n \/\/\/ Tries to use `glBufferStorage` with the `GL_DYNAMIC_STORAGE_BIT` and\n \/\/\/ `GL_CLIENT_STORAGE_BIT` flags.\n \/\/\/\n \/\/\/ If this function is not available, falls back to `glBufferData` with `GL_DYNAMIC_DRAW`.\n \/\/\/\n Dynamic,\n\n \/\/\/ Optimized for when you modify a buffer exactly once per frame. You can modify it more than\n \/\/\/ once per frame, but if you modify it too often things will slow down.\n \/\/\/\n \/\/\/ With this mode, glium automatically handles synchronization to prevent the buffer from\n \/\/\/ being access by both the GPU and the CPU simultaneously. If you try to modify the buffer,\n \/\/\/ the execution will block until the GPU has finished using it. For this reason, a quick\n \/\/\/ succession of modifying and drawing using the same buffer will be very slow.\n \/\/\/\n \/\/\/ When using persistent mapping, it is recommended to use triple buffering. This is done by\n \/\/\/ creating a buffer that has three times the capacity that it would normally have. You modify\n \/\/\/ and draw the first third, then modify and draw the second third, then the last part, then\n \/\/\/ go back to the first third, etc.\n \/\/\/\n \/\/\/ # Implementation\n \/\/\/\n \/\/\/ Tries to use `glBufferStorage` with `GL_MAP_PERSISTENT_BIT`. Sync fences are automatically\n \/\/\/ managed by glium.\n \/\/\/\n \/\/\/ If this function is not available, falls back to `glBufferData` with `GL_DYNAMIC_DRAW`.\n \/\/\/\n Persistent,\n\n \/\/\/ Optimized when you will never touch the content of the buffer.\n \/\/\/\n \/\/\/ Immutable buffers should be created once and never touched again. Modifying their content\n \/\/\/ is permitted, but is very slow.\n \/\/\/\n \/\/\/ # Implementation\n \/\/\/\n \/\/\/ Tries to use `glBufferStorage` without any flag. Modifications are done by creating\n \/\/\/ temporary buffers and making the GPU copy the data from the temporary buffer to the real\n \/\/\/ one.\n \/\/\/\n \/\/\/ If this function is not available, falls back to `glBufferData` with `GL_STATIC_DRAW`.\n \/\/\/\n Immutable,\n}\n\nimpl Default for BufferMode {\n fn default() -> BufferMode {\n BufferMode::Default\n }\n}\n\n\/\/\/ Type of a buffer.\n#[doc(hidden)]\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub enum BufferType {\n ArrayBuffer,\n PixelPackBuffer,\n PixelUnpackBuffer,\n UniformBuffer,\n CopyReadBuffer,\n CopyWriteBuffer,\n AtomicCounterBuffer,\n DispatchIndirectBuffer,\n DrawIndirectBuffer,\n QueryBuffer,\n ShaderStorageBuffer,\n TextureBuffer,\n TransformFeedbackBuffer,\n ElementArrayBuffer,\n}\n\nimpl BufferType {\n fn to_glenum(&self) -> gl::types::GLenum {\n match *self {\n BufferType::ArrayBuffer => gl::ARRAY_BUFFER,\n BufferType::PixelPackBuffer => gl::PIXEL_PACK_BUFFER,\n BufferType::PixelUnpackBuffer => gl::PIXEL_UNPACK_BUFFER,\n BufferType::UniformBuffer => gl::UNIFORM_BUFFER,\n BufferType::CopyReadBuffer => gl::COPY_READ_BUFFER,\n BufferType::CopyWriteBuffer => gl::COPY_WRITE_BUFFER,\n BufferType::AtomicCounterBuffer => gl::ATOMIC_COUNTER_BUFFER,\n BufferType::DispatchIndirectBuffer => gl::DISPATCH_INDIRECT_BUFFER,\n BufferType::DrawIndirectBuffer => gl::DRAW_INDIRECT_BUFFER,\n BufferType::QueryBuffer => gl::QUERY_BUFFER,\n BufferType::ShaderStorageBuffer => gl::SHADER_STORAGE_BUFFER,\n BufferType::TextureBuffer => gl::TEXTURE_BUFFER,\n BufferType::TransformFeedbackBuffer => gl::TRANSFORM_FEEDBACK_BUFFER,\n BufferType::ElementArrayBuffer => gl::ELEMENT_ARRAY_BUFFER,\n }\n }\n}\n<commit_msg>Improve and update the docs of the \"buffer\" module<commit_after>\/\/! A buffer is a memory location accessible to the video card.\n\/\/!\n\/\/! The purpose of buffers is to serve as a space where the GPU can read from or write data to.\n\/\/! It can contain a list of vertices, indices, uniform data, etc.\n\/\/!\n\/\/! # Buffers management in glium\n\/\/!\n\/\/! There are three levels of abstraction in glium:\n\/\/!\n\/\/! - An `Alloc` corresponds to an OpenGL buffer object and is unsafe to use.\n\/\/! This type is not public.\n\/\/! - A `Buffer` wraps around an `Alloc` and provides safety by handling the data type and fences.\n\/\/! - The `VertexBuffer`, `IndexBuffer`, `UniformBuffer`, `PixelBuffer`, etc. types are\n\/\/! abstractions over a `Buffer` indicating their specific purpose. They implement `Deref`\n\/\/! for the `Buffer`. These types are in the `vertex`, `index`, etc. modules.\n\/\/!\n\/\/! # Unsized types\n\/\/!\n\/\/! In order to put some data in a buffer, it must implement the `Content` trait. This trait is\n\/\/! automatically implemented on all `Sized` types and on slices (like `[u8]`). This means that\n\/\/! you can create a `Buffer<Foo>` (if `Foo` is sized) or a `Buffer<[u8]>` for example without\n\/\/! worrying about it.\n\/\/!\n\/\/! However unsized structs don't automatically implement this trait and you must call the\n\/\/! `implement_buffer_content!` macro on them. You must then use the `empty_unsized` constructor.\n\/\/!\n\/\/! ```no_run\n\/\/! # #[macro_use] extern crate glium; fn main() {\n\/\/! # use std::mem;\n\/\/! # use glium::buffer::{BufferType, BufferMode};\n\/\/! # let display: glium::Display = unsafe { mem::uninitialized() };\n\/\/! struct Data {\n\/\/! data: [f32], \/\/ `[f32]` is unsized, therefore `Data` is unsized too\n\/\/! }\n\/\/!\n\/\/! implement_buffer_content!(Data); \/\/ without this, you can't put `Data` in a glium buffer\n\/\/!\n\/\/! \/\/ creates a buffer of 64 bytes, which thus holds 8 f32s\n\/\/! let mut buffer = glium::buffer::Buffer::<Data>::empty_unsized(&display, BufferType::UniformBuffer,\n\/\/! 64, BufferMode::Default).unwrap();\n\/\/!\n\/\/! \/\/ you can then write to it like you normally would\n\/\/! buffer.map().data[4] = 2.1;\n\/\/! # }\n\/\/! ```\n\/\/!\npub use self::view::{Buffer, BufferAny, BufferMutSlice};\npub use self::view::{BufferSlice, BufferAnySlice};\npub use self::alloc::{Mapping, WriteMapping, ReadMapping, ReadError, is_buffer_read_supported};\npub use self::fences::Inserter;\n\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::Buffer as BufferView;\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::BufferSlice as BufferViewSlice;\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::BufferMutSlice as BufferViewMutSlice;\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::BufferAny as BufferViewAny;\n\/\/\/ DEPRECATED. Only here for backward compatibility.\npub use self::view::BufferAnySlice as BufferViewAnySlice;\n\nuse gl;\nuse std::mem;\nuse std::slice;\n\nmod alloc;\nmod fences;\nmod view;\n\n\/\/\/ Trait for types of data that can be put inside buffers.\npub unsafe trait Content {\n \/\/\/ A type that holds a sized version of the content.\n type Owned;\n\n \/\/\/ Prepares an output buffer, then turns this buffer into an `Owned`.\n fn read<F, E>(size: usize, F) -> Result<Self::Owned, E>\n where F: FnOnce(&mut Self) -> Result<(), E>;\n\n \/\/\/ Returns the size of each element.\n fn get_elements_size() -> usize;\n\n \/\/\/ Produces a pointer to the data.\n fn to_void_ptr(&self) -> *const ();\n\n \/\/\/ Builds a pointer to this type from a raw pointer.\n fn ref_from_ptr<'a>(ptr: *mut (), size: usize) -> Option<*mut Self>;\n\n \/\/\/ Returns true if the size is suitable to store a type like this.\n fn is_size_suitable(usize) -> bool;\n}\n\nunsafe impl<T> Content for T where T: Copy {\n type Owned = T;\n\n #[inline]\n fn read<F, E>(size: usize, f: F) -> Result<T, E> where F: FnOnce(&mut T) -> Result<(), E> {\n assert!(size == mem::size_of::<T>());\n let mut value = unsafe { mem::uninitialized() };\n try!(f(&mut value));\n Ok(value)\n }\n\n #[inline]\n fn get_elements_size() -> usize {\n mem::size_of::<T>()\n }\n\n #[inline]\n fn to_void_ptr(&self) -> *const () {\n self as *const T as *const ()\n }\n\n #[inline]\n fn ref_from_ptr<'a>(ptr: *mut (), size: usize) -> Option<*mut T> {\n if size != mem::size_of::<T>() {\n return None;\n }\n\n Some(ptr as *mut T)\n }\n\n #[inline]\n fn is_size_suitable(size: usize) -> bool {\n size == mem::size_of::<T>()\n }\n}\n\nunsafe impl<T> Content for [T] where T: Copy {\n type Owned = Vec<T>;\n\n #[inline]\n fn read<F, E>(size: usize, f: F) -> Result<Vec<T>, E>\n where F: FnOnce(&mut [T]) -> Result<(), E>\n {\n assert!(size % mem::size_of::<T>() == 0);\n let len = size \/ mem::size_of::<T>();\n let mut value = Vec::with_capacity(len);\n unsafe { value.set_len(len) };\n try!(f(&mut value));\n Ok(value)\n }\n\n #[inline]\n fn get_elements_size() -> usize {\n mem::size_of::<T>()\n }\n\n #[inline]\n fn to_void_ptr(&self) -> *const () {\n &self[0] as *const T as *const ()\n }\n\n #[inline]\n fn ref_from_ptr<'a>(ptr: *mut (), size: usize) -> Option<*mut [T]> {\n if size % mem::size_of::<T>() != 0 {\n return None;\n }\n\n let ptr = ptr as *mut T;\n let size = size \/ mem::size_of::<T>();\n Some(unsafe { slice::from_raw_parts_mut(&mut *ptr, size) as *mut [T] })\n }\n\n #[inline]\n fn is_size_suitable(size: usize) -> bool {\n size % mem::size_of::<T>() == 0\n }\n}\n\n\/\/\/ Error that can happen when creating a buffer.\n#[derive(Debug, Copy, Clone)]\npub enum BufferCreationError {\n \/\/\/ Not enough memory to create the buffer.\n OutOfMemory,\n\n \/\/\/ This type of buffer is not supported.\n BufferTypeNotSupported,\n}\n\n\/\/\/ How the buffer is created.\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub enum BufferMode {\n \/\/\/ This is the default mode suitable for any usage. Will never be slow, will never be fast\n \/\/\/ either.\n \/\/\/\n \/\/\/ Other modes should always be preferred, but you can use this one if you don't know what\n \/\/\/ will happen to the buffer.\n \/\/\/\n \/\/\/ # Implementation\n \/\/\/\n \/\/\/ Tries to use `glBufferStorage` with the `GL_DYNAMIC_STORAGE_BIT` flag.\n \/\/\/\n \/\/\/ If this function is not available, falls back to `glBufferData` with `GL_STATIC_DRAW`.\n \/\/\/\n Default,\n\n \/\/\/ The mode to use when you modify a buffer multiple times per frame. Simiar to `Default` in\n \/\/\/ that it is suitable for most usages.\n \/\/\/\n \/\/\/ Use this if you do a quick succession of modify the buffer, draw, modify, draw, etc. This\n \/\/\/ is something that you shouldn't do by the way.\n \/\/\/\n \/\/\/ With this mode, the OpenGL driver automatically manages the buffer for us. It will try to\n \/\/\/ find the most appropriate storage depending on how we use it. It is guaranteed to never be\n \/\/\/ too slow, but it won't be too fast either.\n \/\/\/\n \/\/\/ # Implementation\n \/\/\/\n \/\/\/ Tries to use `glBufferStorage` with the `GL_DYNAMIC_STORAGE_BIT` and\n \/\/\/ `GL_CLIENT_STORAGE_BIT` flags.\n \/\/\/\n \/\/\/ If this function is not available, falls back to `glBufferData` with `GL_DYNAMIC_DRAW`.\n \/\/\/\n Dynamic,\n\n \/\/\/ Optimized for when you modify a buffer exactly once per frame. You can modify it more than\n \/\/\/ once per frame, but if you modify it too often things will slow down.\n \/\/\/\n \/\/\/ With this mode, glium automatically handles synchronization to prevent the buffer from\n \/\/\/ being access by both the GPU and the CPU simultaneously. If you try to modify the buffer,\n \/\/\/ the execution will block until the GPU has finished using it. For this reason, a quick\n \/\/\/ succession of modifying and drawing using the same buffer will be very slow.\n \/\/\/\n \/\/\/ When using persistent mapping, it is recommended to use triple buffering. This is done by\n \/\/\/ creating a buffer that has three times the capacity that it would normally have. You modify\n \/\/\/ and draw the first third, then modify and draw the second third, then the last part, then\n \/\/\/ go back to the first third, etc.\n \/\/\/\n \/\/\/ # Implementation\n \/\/\/\n \/\/\/ Tries to use `glBufferStorage` with `GL_MAP_PERSISTENT_BIT`. Sync fences are automatically\n \/\/\/ managed by glium.\n \/\/\/\n \/\/\/ If this function is not available, falls back to `glBufferData` with `GL_DYNAMIC_DRAW`.\n \/\/\/\n Persistent,\n\n \/\/\/ Optimized when you will never touch the content of the buffer.\n \/\/\/\n \/\/\/ Immutable buffers should be created once and never touched again. Modifying their content\n \/\/\/ is permitted, but is very slow.\n \/\/\/\n \/\/\/ # Implementation\n \/\/\/\n \/\/\/ Tries to use `glBufferStorage` without any flag. Modifications are done by creating\n \/\/\/ temporary buffers and making the GPU copy the data from the temporary buffer to the real\n \/\/\/ one.\n \/\/\/\n \/\/\/ If this function is not available, falls back to `glBufferData` with `GL_STATIC_DRAW`.\n \/\/\/\n Immutable,\n}\n\nimpl Default for BufferMode {\n fn default() -> BufferMode {\n BufferMode::Default\n }\n}\n\n\/\/\/ Type of a buffer.\n#[doc(hidden)]\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\npub enum BufferType {\n ArrayBuffer,\n PixelPackBuffer,\n PixelUnpackBuffer,\n UniformBuffer,\n CopyReadBuffer,\n CopyWriteBuffer,\n AtomicCounterBuffer,\n DispatchIndirectBuffer,\n DrawIndirectBuffer,\n QueryBuffer,\n ShaderStorageBuffer,\n TextureBuffer,\n TransformFeedbackBuffer,\n ElementArrayBuffer,\n}\n\nimpl BufferType {\n fn to_glenum(&self) -> gl::types::GLenum {\n match *self {\n BufferType::ArrayBuffer => gl::ARRAY_BUFFER,\n BufferType::PixelPackBuffer => gl::PIXEL_PACK_BUFFER,\n BufferType::PixelUnpackBuffer => gl::PIXEL_UNPACK_BUFFER,\n BufferType::UniformBuffer => gl::UNIFORM_BUFFER,\n BufferType::CopyReadBuffer => gl::COPY_READ_BUFFER,\n BufferType::CopyWriteBuffer => gl::COPY_WRITE_BUFFER,\n BufferType::AtomicCounterBuffer => gl::ATOMIC_COUNTER_BUFFER,\n BufferType::DispatchIndirectBuffer => gl::DISPATCH_INDIRECT_BUFFER,\n BufferType::DrawIndirectBuffer => gl::DRAW_INDIRECT_BUFFER,\n BufferType::QueryBuffer => gl::QUERY_BUFFER,\n BufferType::ShaderStorageBuffer => gl::SHADER_STORAGE_BUFFER,\n BufferType::TextureBuffer => gl::TEXTURE_BUFFER,\n BufferType::TransformFeedbackBuffer => gl::TRANSFORM_FEEDBACK_BUFFER,\n BufferType::ElementArrayBuffer => gl::ELEMENT_ARRAY_BUFFER,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse clap::{Arg, App, SubCommand};\n\nuse libimagutil::cli_validators::is_existing_path;\n\npub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {\n app\n .subcommand(SubCommand::with_name(\"add\")\n .about(\"Add a reference to a file outside of the store\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"path\")\n .long(\"path\")\n .short(\"p\")\n .takes_value(true)\n .required(true)\n .help(\"The path of the file\")\n .validator(is_existing_path)\n .value_name(\"PATH\"))\n .arg(Arg::with_name(\"track-content\")\n .long(\"content-hash\")\n .short(\"C\")\n .takes_value(false)\n .required(false)\n .help(\"Hash the content for the reference\"))\n .arg(Arg::with_name(\"track-permissions\")\n .long(\"permission-tracking\")\n .short(\"P\")\n .takes_value(false)\n .required(false)\n .help(\"Rememeber the permissions of the referenced file\"))\n )\n\n .subcommand(SubCommand::with_name(\"remove\")\n .about(\"Remove a reference\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"hash\")\n .long(\"hash\")\n .short(\"h\")\n .takes_value(true)\n .required(true)\n .help(\"Remove the reference with this hash\")\n .value_name(\"HASH\"))\n\n .arg(Arg::with_name(\"yes\")\n .long(\"yes\")\n .short(\"y\")\n .help(\"Don't ask whether this really should be done\"))\n )\n\n .subcommand(SubCommand::with_name(\"list\")\n .about(\"List references in the store\")\n .version(\"0.1\")\n\n .arg(Arg::with_name(\"check-dead\")\n .long(\"check-dead\")\n .short(\"d\")\n .help(\"Check each reference whether it is dead\"))\n\n .arg(Arg::with_name(\"check-changed\")\n .long(\"check-changed\")\n .short(\"c\")\n .help(\"Check whether a reference had changed (content or permissions)\"))\n\n .arg(Arg::with_name(\"check-changed-content\")\n .long(\"check-changed-content\")\n .short(\"C\")\n .help(\"Check whether the content of the referenced file changed\"))\n\n .arg(Arg::with_name(\"check-changed-permissions\")\n .long(\"check-changed-perms\")\n .short(\"P\")\n .help(\"Check whether the permissions of the referenced file changed\"))\n\n )\n}\n<commit_msg>Use positional args<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse clap::{Arg, App, SubCommand};\n\nuse libimagutil::cli_validators::is_existing_path;\n\npub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {\n app\n .subcommand(SubCommand::with_name(\"add\")\n .about(\"Add a reference to a file outside of the store\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"path\")\n .index(1)\n .takes_value(true)\n .required(true)\n .help(\"The path of the file\")\n .validator(is_existing_path)\n .value_name(\"PATH\"))\n .arg(Arg::with_name(\"track-content\")\n .long(\"content-hash\")\n .short(\"C\")\n .takes_value(false)\n .required(false)\n .help(\"Hash the content for the reference\"))\n .arg(Arg::with_name(\"track-permissions\")\n .long(\"permission-tracking\")\n .short(\"P\")\n .takes_value(false)\n .required(false)\n .help(\"Rememeber the permissions of the referenced file\"))\n )\n\n .subcommand(SubCommand::with_name(\"remove\")\n .about(\"Remove a reference\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"hash\")\n .index(1)\n .takes_value(true)\n .required(true)\n .help(\"Remove the reference with this hash\")\n .value_name(\"HASH\"))\n\n .arg(Arg::with_name(\"yes\")\n .long(\"yes\")\n .short(\"y\")\n .help(\"Don't ask whether this really should be done\"))\n )\n\n .subcommand(SubCommand::with_name(\"list\")\n .about(\"List references in the store\")\n .version(\"0.1\")\n\n .arg(Arg::with_name(\"check-dead\")\n .long(\"check-dead\")\n .short(\"d\")\n .help(\"Check each reference whether it is dead\"))\n\n .arg(Arg::with_name(\"check-changed\")\n .long(\"check-changed\")\n .short(\"c\")\n .help(\"Check whether a reference had changed (content or permissions)\"))\n\n .arg(Arg::with_name(\"check-changed-content\")\n .long(\"check-changed-content\")\n .short(\"C\")\n .help(\"Check whether the content of the referenced file changed\"))\n\n .arg(Arg::with_name(\"check-changed-permissions\")\n .long(\"check-changed-perms\")\n .short(\"P\")\n .help(\"Check whether the permissions of the referenced file changed\"))\n\n )\n}\n<|endoftext|>"} {"text":"<commit_before>#[cfg(test)]\nuse ::{card, deck, hand};\n\n#[test]\nfn create_hand() {\n let mut deck = deck::Deck::new();\n let mut cards: Vec<card::Card> = vec![];\n for _i in 0..5 {\n cards.push(deck.draw().unwrap());\n }\n let hand = hand::Hand::new(cards);\n\n assert_eq!(hand.cards.len(), 5);\n}\n\n#[test]\nfn has_flush() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Six, card::Suit::Hearts);\n let c = card::Card::new(card::Value::Ten, card::Suit::Hearts);\n let d = card::Card::new(card::Value::Queen, card::Suit::Hearts);\n let e = card::Card::new(card::Value::Two, card::Suit::Hearts);\n let cards: Vec<card::Card> = vec![a, b, c, d, e];\n let hand: hand::Hand = hand::Hand::new(cards);\n\n assert_eq!(hand.has_flush(), true);\n}\n\n#[test]\nfn no_flush() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Six, card::Suit::Spades);\n let c = card::Card::new(card::Value::Ten, card::Suit::Hearts);\n let d = card::Card::new(card::Value::Queen, card::Suit::Hearts);\n let e = card::Card::new(card::Value::Two, card::Suit::Hearts);\n let cards: Vec<card::Card> = vec![a, b, c, d, e];\n let hand: hand::Hand = hand::Hand::new(cards);\n\n assert_eq!(hand.has_flush(), false);\n}\n\n#[test]\nfn high_card_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let poker_hand = hand::PokerHand::HighCard(a);\n\n assert_eq!(poker_hand.raw_value(), 0);\n}\n\n#[test]\nfn one_pair_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Three, card::Suit::Diamonds);\n let poker_hand = hand::PokerHand::OnePair((a, b));\n\n assert_eq!(poker_hand.raw_value(), 1);\n}\n\n#[test]\nfn two_pair_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Three, card::Suit::Diamonds);\n let c = card::Card::new(card::Value::King, card::Suit::Spades);\n let d = card::Card::new(card::Value::King, card::Suit::Hearts);\n let poker_hand = hand::PokerHand::TwoPair((a, b), (c, d));\n\n assert_eq!(poker_hand.raw_value(), 2);\n}\n<commit_msg>Added unit tests for all raw values of PokerHands<commit_after>#[cfg(test)]\nuse ::{card, deck, hand};\n\n#[test]\nfn create_hand() {\n let mut deck = deck::Deck::new();\n let mut cards: Vec<card::Card> = vec![];\n for _i in 0..5 {\n cards.push(deck.draw().unwrap());\n }\n let hand = hand::Hand::new(cards);\n\n assert_eq!(hand.cards.len(), 5);\n}\n\n#[test]\nfn has_flush() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Six, card::Suit::Hearts);\n let c = card::Card::new(card::Value::Ten, card::Suit::Hearts);\n let d = card::Card::new(card::Value::Queen, card::Suit::Hearts);\n let e = card::Card::new(card::Value::Two, card::Suit::Hearts);\n let cards: Vec<card::Card> = vec![a, b, c, d, e];\n let hand: hand::Hand = hand::Hand::new(cards);\n\n assert_eq!(hand.has_flush(), true);\n}\n\n#[test]\nfn no_flush() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Six, card::Suit::Spades);\n let c = card::Card::new(card::Value::Ten, card::Suit::Hearts);\n let d = card::Card::new(card::Value::Queen, card::Suit::Hearts);\n let e = card::Card::new(card::Value::Two, card::Suit::Hearts);\n let cards: Vec<card::Card> = vec![a, b, c, d, e];\n let hand: hand::Hand = hand::Hand::new(cards);\n\n assert_eq!(hand.has_flush(), false);\n}\n\n#[test]\nfn high_card_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let poker_hand = hand::PokerHand::HighCard(a);\n\n assert_eq!(poker_hand.raw_value(), 0);\n}\n\n#[test]\nfn one_pair_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Three, card::Suit::Diamonds);\n let poker_hand = hand::PokerHand::OnePair((a, b));\n\n assert_eq!(poker_hand.raw_value(), 1);\n}\n\n#[test]\nfn two_pair_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Three, card::Suit::Diamonds);\n let c = card::Card::new(card::Value::King, card::Suit::Spades);\n let d = card::Card::new(card::Value::King, card::Suit::Hearts);\n let poker_hand = hand::PokerHand::TwoPair((a, b), (c, d));\n\n assert_eq!(poker_hand.raw_value(), 2);\n}\n\n#[test]\nfn trips_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Three, card::Suit::Diamonds);\n let c = card::Card::new(card::Value::Three, card::Suit::Spades);\n let poker_hand = hand::PokerHand::Trips((a, b, c));\n\n assert_eq!(poker_hand.raw_value(), 3);\n}\n\n#[test]\nfn straight_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Four, card::Suit::Diamonds);\n let c = card::Card::new(card::Value::Five, card::Suit::Spades);\n let d = card::Card::new(card::Value::Six, card::Suit::Hearts);\n let e = card::Card::new(card::Value::Seven, card::Suit::Hearts);\n let poker_hand = hand::PokerHand::Straight((a, b, c, d, e));\n\n assert_eq!(poker_hand.raw_value(), 4);\n}\n\n#[test]\nfn flush_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::King, card::Suit::Hearts);\n let c = card::Card::new(card::Value::Five, card::Suit::Hearts);\n let d = card::Card::new(card::Value::Jack, card::Suit::Hearts);\n let e = card::Card::new(card::Value::Ten, card::Suit::Hearts);\n let poker_hand = hand::PokerHand::Flush((a, b, c, d, e));\n\n assert_eq!(poker_hand.raw_value(), 5);\n}\n\n#[test]\nfn full_house_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Three, card::Suit::Spades);\n let c = card::Card::new(card::Value::Three, card::Suit::Clubs);\n let d = card::Card::new(card::Value::Ten, card::Suit::Hearts);\n let e = card::Card::new(card::Value::Ten, card::Suit::Diamonds);\n let poker_hand = hand::PokerHand::FullHouse((a, b, c), (d, e));\n\n assert_eq!(poker_hand.raw_value(), 6);\n}\n\n#[test]\nfn quads_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Diamonds);\n let b = card::Card::new(card::Value::Three, card::Suit::Spades);\n let c = card::Card::new(card::Value::Three, card::Suit::Clubs);\n let d = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let poker_hand = hand::PokerHand::Quads((a, b, c, d));\n\n assert_eq!(poker_hand.raw_value(), 7);\n}\n\n#[test]\nfn straight_flush_raw_value() {\n let a = card::Card::new(card::Value::Three, card::Suit::Hearts);\n let b = card::Card::new(card::Value::Four, card::Suit::Hearts);\n let c = card::Card::new(card::Value::Five, card::Suit::Hearts);\n let d = card::Card::new(card::Value::Six, card::Suit::Hearts);\n let e = card::Card::new(card::Value::Seven, card::Suit::Hearts);\n let poker_hand = hand::PokerHand::StraightFlush((a, b, c, d, e));\n\n assert_eq!(poker_hand.raw_value(), 8);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 Johannes Köster, Christopher Schröder.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/! Fasta reading and writing.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use std::io;\n\/\/! use bio::io::fasta;\n\/\/! let reader = fasta::Reader::new(io::stdin());\n\/\/! ```\n\n\nuse std::io;\nuse std::io::prelude::*;\nuse std::ascii::AsciiExt;\nuse std::collections;\nuse std::fs;\nuse std::path::Path;\nuse std::convert::AsRef;\nuse std::ffi::AsOsStr;\n\nuse itertools::Itertools;\n\nuse csv;\n\n\npub struct Reader<R: io::Read> {\n reader: io::BufReader<R>,\n line: String\n}\n\n\nimpl<R: io::Read> Reader<R> {\n \/\/\/ Create a new FastQ reader.\n pub fn new(reader: R) -> Self {\n Reader { reader: io::BufReader::new(reader), line: String::new() }\n }\n\n pub fn from_file<P: AsRef<Path>>(path: P) -> io::Result<Reader<fs::File>> {\n fs::File::open(path).map(|f| Reader::new(f))\n }\n\n pub fn read(&mut self, record: &mut Record) -> io::Result<()> {\n record.clear();\n if self.line.is_empty() {\n try!(self.reader.read_line(&mut self.line));\n if self.line.is_empty() {\n return Ok(());\n }\n }\n\n if !self.line.starts_with(\">\") {\n return Err(io::Error::new(\n io::ErrorKind::Other,\n \"Expected > at record start.\"\n ));\n }\n record.header.push_str(&self.line);\n loop {\n self.line.clear();\n try!(self.reader.read_line(&mut self.line));\n record.seq.push_str(&self.line.trim_right());\n if self.line.is_empty() || self.line.starts_with(\">\") {\n break;\n }\n }\n\n Ok(())\n }\n\n \/\/\/ Return an iterator over the records of this FastQ file.\n pub fn records(self) -> Records<R> {\n Records { reader: self }\n }\n}\n\n\npub struct Index {\n inner: collections::BTreeMap<Vec<u8>, IndexRecord>,\n}\n\n\nimpl Index {\n pub fn new<R: io::Read>(fai: R) -> csv::Result<Self> {\n let mut inner = collections::BTreeMap::new();\n let mut fai_reader = csv::Reader::from_reader(fai).delimiter(b'\\t').has_headers(false);\n for row in fai_reader.decode() {\n let (name, record): (String, IndexRecord) = try!(row);\n inner.insert(name.into_bytes(), record);\n }\n Ok(Index { inner: inner })\n }\n\n pub fn from_file<P: AsRef<Path>>(path: &P) -> csv::Result<Self> {\n match fs::File::open(path) {\n Ok(fai) => Self::new(fai),\n Err(e) => Err(csv::Error::Io(e))\n }\n }\n\n pub fn sequences(&self) -> Vec<Sequence> {\n self.inner.iter().map(|(name, record)| Sequence { name: name.clone(), len: record.len }).collect_vec()\n }\n}\n\n\npub struct IndexedReader<R: io::Read + io::Seek> {\n reader: io::BufReader<R>,\n pub index: Index,\n}\n\n\nimpl<R: io::Read + io::Seek> IndexedReader<R> {\n pub fn new<I: io::Read>(fasta: R, fai: I) -> csv::Result<Self> {\n let index = try!(Index::new(fai));\n Ok(IndexedReader { reader: io::BufReader::new(fasta), index: index })\n }\n\n pub fn with_index(fasta: R, index: Index) -> Self {\n IndexedReader { reader: io::BufReader::new(fasta), index: index }\n }\n\n pub fn from_file<P: AsRef<Path>>(path: &P) -> csv::Result<IndexedReader<fs::File>> {\n let mut ext = path.as_ref().extension().unwrap_or(\"\".as_os_str()).to_str().unwrap().to_string();\n ext.push_str(\".fai\");\n let fai_path = path.as_ref().with_extension(ext);\n\n let index = try!(Index::from_file(&fai_path));\n match fs::File::open(path) {\n Ok(fasta) => Ok(IndexedReader::with_index(fasta, index)),\n Err(e) => Err(csv::Error::Io(e))\n }\n }\n\n pub fn read_all(&mut self, seqname: &[u8], seq: &mut Vec<u8>) -> io::Result<()> {\n match self.index.inner.get(seqname) {\n Some(&idx) => self.read(seqname, 0, idx.len, seq),\n None => Err(\n io::Error::new(\n io::ErrorKind::Other,\n \"Unknown sequence name.\"\n )\n )\n }\n }\n\n pub fn read(&mut self, seqname: &[u8], start: u64, stop: u64, seq: &mut Vec<u8>) -> io::Result<()> {\n match self.index.inner.get(seqname) {\n Some(idx) => {\n seq.clear();\n \/\/ derived from\n \/\/ http:\/\/www.allenyu.info\/item\/24-quickly-fetch-sequence-from-samtools-faidx-indexed-fasta-sequences.html\n let line = start \/ idx.line_bases * idx.line_bytes;\n let line_offset = start % idx.line_bases;\n let offset = idx.offset + line + line_offset;\n let lines = stop \/ idx.line_bases * idx.line_bytes - line;\n let line_stop = stop % idx.line_bases - if lines == 0 { line_offset } else { 0 };\n\n try!(self.reader.seek(io::SeekFrom::Start(offset)));\n let mut buf = vec![0u8; idx.line_bases as usize];\n for _ in 0..lines {\n \/\/ read full lines\n try!(self.reader.read(&mut buf));\n seq.push_all(&buf);\n }\n \/\/ read last line\n println!(\"linestop {}\", line_stop);\n try!(self.reader.read(&mut buf[..line_stop as usize]));\n seq.push_all(&buf[..line_stop as usize]);\n Ok(())\n },\n None => Err(\n io::Error::new(\n io::ErrorKind::Other,\n \"Unknown sequence name.\"\n )\n )\n }\n }\n}\n\n\n#[derive(RustcDecodable, Debug, Copy, Clone)]\nstruct IndexRecord {\n len: u64,\n offset: u64,\n line_bases: u64,\n line_bytes: u64,\n}\n\n\npub struct Sequence {\n pub name: Vec<u8>,\n pub len: u64,\n}\n\n\n\/\/\/ A Fasta writer.\npub struct Writer<W: io::Write> {\n writer: io::BufWriter<W>,\n}\n\n\nimpl<W: io::Write> Writer<W> {\n \/\/\/ Create a new Fasta writer.\n pub fn new(writer: W) -> Self {\n Writer { writer: io::BufWriter::new(writer) }\n }\n\n pub fn from_file<P: AsRef<Path>>(path: P) -> io::Result<Writer<fs::File>> {\n fs::File::create(path).map(|f| Writer::new(f))\n }\n\n \/\/\/ Directly write a Fasta record.\n pub fn write_record(&mut self, record: Record) -> io::Result<()> {\n self.write(record.id().unwrap_or(\"\"), &record.desc(), record.seq())\n }\n\n \/\/\/ Write a Fasta record with given values.\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ * `id` - the record id\n \/\/\/ * `desc` - the optional descriptions\n \/\/\/ * `seq` - the sequence\n pub fn write(&mut self, id: &str, desc: &[&str], seq: &[u8]) -> io::Result<()> {\n try!(self.writer.write(b\">\"));\n try!(self.writer.write(id.as_bytes()));\n if !desc.is_empty() {\n for d in desc {\n try!(self.writer.write(b\" \"));\n try!(self.writer.write(d.as_bytes()));\n }\n }\n try!(self.writer.write(b\"\\n\"));\n try!(self.writer.write(seq));\n try!(self.writer.write(b\"\\n\"));\n\n Ok(())\n }\n\n \/\/\/ Flush the writer, ensuring that everything is written.\n pub fn flush(&mut self) -> io::Result<()> {\n self.writer.flush()\n }\n}\n\n\npub struct Record {\n header: String,\n seq: String,\n}\n\n\nimpl Record {\n pub fn new() -> Self {\n Record { header: String::new(), seq: String::new() }\n }\n\n pub fn is_empty(&self) -> bool {\n self.header.is_empty() && self.seq.is_empty()\n }\n\n \/\/\/ Check validity of Fasta record.\n pub fn check(&self) -> Result<(), &str> {\n if self.id().is_none() {\n return Err(\"Expecting id for FastQ record.\");\n }\n if !self.seq.is_ascii() {\n return Err(\"Non-ascii character found in sequence.\");\n }\n\n Ok(())\n }\n\n \/\/\/ Return the id of the record.\n pub fn id(&self) -> Option<&str> {\n self.header[1..].words().next()\n }\n\n \/\/\/ Return descriptions if present.\n pub fn desc(&self) -> Vec<&str> {\n self.header[1..].words().skip(1).collect()\n }\n\n \/\/\/ Return the sequence of the record.\n pub fn seq(&self) -> &[u8] {\n self.seq.as_bytes()\n }\n\n fn clear(&mut self) {\n self.header.clear();\n self.seq.clear();\n }\n}\n\n\n\/\/\/ An iterator over the records of a Fasta file.\npub struct Records<R: io::Read> {\n reader: Reader<R>,\n}\n\n\nimpl<R: io::Read> Iterator for Records<R> {\n type Item = io::Result<Record>;\n\n fn next(&mut self) -> Option<io::Result<Record>> {\n let mut record = Record::new();\n match self.reader.read(&mut record) {\n Ok(()) if record.is_empty() => None,\n Ok(()) => Some(Ok(record)),\n Err(err) => Some(Err(err))\n }\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io;\n\n const FASTA_FILE: &'static [u8] = b\">id desc\nACCGTAGGCTGA\n\";\n const FAI_FILE: &'static [u8] = b\"id\\t12\\t9\\t60\\t61\n\";\n\n #[test]\n fn test_reader() {\n let reader = Reader::new(FASTA_FILE);\n let records: Vec<io::Result<Record>> = reader.records().collect();\n assert!(records.len() == 1);\n for res in records {\n let record = res.ok().unwrap();\n assert_eq!(record.check(), Ok(()));\n assert_eq!(record.id(), Some(\"id\"));\n assert_eq!(record.desc(), [\"desc\"]);\n assert_eq!(record.seq(), b\"ACCGTAGGCTGA\");\n }\n }\n\n #[test]\n fn test_indexed_reader() {\n let mut reader = IndexedReader::new(io::Cursor::new(FASTA_FILE), FAI_FILE).ok().expect(\"Error reading index\");\n let mut seq = Vec::new();\n reader.read(b\"id\", 1, 5, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"CCGT\");\n }\n\n #[test]\n fn test_writer() {\n let mut writer = Writer::new(Vec::new());\n writer.write(\"id\", &[\"desc\"], b\"ACCGTAGGCTGA\").ok().expect(\"Expected successful write\");\n writer.flush().ok().expect(\"Expected successful write\");\n assert_eq!(writer.writer.get_ref(), &FASTA_FILE);\n }\n}\n<commit_msg>Minor.<commit_after>\/\/ Copyright 2014 Johannes Köster, Christopher Schröder.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/! Fasta reading and writing.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use std::io;\n\/\/! use bio::io::fasta;\n\/\/! let reader = fasta::Reader::new(io::stdin());\n\/\/! ```\n\n\nuse std::io;\nuse std::io::prelude::*;\nuse std::ascii::AsciiExt;\nuse std::collections;\nuse std::fs;\nuse std::path::Path;\nuse std::convert::AsRef;\nuse std::ffi::AsOsStr;\n\nuse itertools::Itertools;\n\nuse csv;\n\n\npub struct Reader<R: io::Read> {\n reader: io::BufReader<R>,\n line: String\n}\n\n\nimpl<R: io::Read> Reader<R> {\n \/\/\/ Create a new FastQ reader.\n pub fn new(reader: R) -> Self {\n Reader { reader: io::BufReader::new(reader), line: String::new() }\n }\n\n pub fn from_file<P: AsRef<Path>>(path: P) -> io::Result<Reader<fs::File>> {\n fs::File::open(path).map(|f| Reader::new(f))\n }\n\n pub fn read(&mut self, record: &mut Record) -> io::Result<()> {\n record.clear();\n if self.line.is_empty() {\n try!(self.reader.read_line(&mut self.line));\n if self.line.is_empty() {\n return Ok(());\n }\n }\n\n if !self.line.starts_with(\">\") {\n return Err(io::Error::new(\n io::ErrorKind::Other,\n \"Expected > at record start.\"\n ));\n }\n record.header.push_str(&self.line);\n loop {\n self.line.clear();\n try!(self.reader.read_line(&mut self.line));\n record.seq.push_str(&self.line.trim_right());\n if self.line.is_empty() || self.line.starts_with(\">\") {\n break;\n }\n }\n\n Ok(())\n }\n\n \/\/\/ Return an iterator over the records of this FastQ file.\n pub fn records(self) -> Records<R> {\n Records { reader: self }\n }\n}\n\n\npub struct Index {\n inner: collections::BTreeMap<Vec<u8>, IndexRecord>,\n}\n\n\nimpl Index {\n pub fn new<R: io::Read>(fai: R) -> csv::Result<Self> {\n let mut inner = collections::BTreeMap::new();\n let mut fai_reader = csv::Reader::from_reader(fai).delimiter(b'\\t').has_headers(false);\n for row in fai_reader.decode() {\n let (name, record): (String, IndexRecord) = try!(row);\n inner.insert(name.into_bytes(), record);\n }\n Ok(Index { inner: inner })\n }\n\n pub fn from_file<P: AsRef<Path>>(path: &P) -> csv::Result<Self> {\n match fs::File::open(path) {\n Ok(fai) => Self::new(fai),\n Err(e) => Err(csv::Error::Io(e))\n }\n }\n\n pub fn with_fasta_file<P: AsRef<Path>>(fasta_path: &P) -> csv::Result<Self> {\n let mut ext = fasta_path.as_ref().extension().unwrap_or(\"\".as_os_str()).to_str().unwrap().to_string();\n ext.push_str(\".fai\");\n let fai_path = fasta_path.as_ref().with_extension(ext);\n\n Self::from_file(&fai_path)\n }\n\n pub fn sequences(&self) -> Vec<Sequence> {\n self.inner.iter().map(|(name, record)| Sequence { name: name.clone(), len: record.len }).collect_vec()\n }\n}\n\n\npub struct IndexedReader<R: io::Read + io::Seek> {\n reader: io::BufReader<R>,\n pub index: Index,\n}\n\n\nimpl<R: io::Read + io::Seek> IndexedReader<R> {\n pub fn new<I: io::Read>(fasta: R, fai: I) -> csv::Result<Self> {\n let index = try!(Index::new(fai));\n Ok(IndexedReader { reader: io::BufReader::new(fasta), index: index })\n }\n\n pub fn with_index(fasta: R, index: Index) -> Self {\n IndexedReader { reader: io::BufReader::new(fasta), index: index }\n }\n\n pub fn from_file<P: AsRef<Path>>(path: &P) -> csv::Result<IndexedReader<fs::File>> {\n let index = try!(Index::with_fasta_file(path));\n\n match fs::File::open(path) {\n Ok(fasta) => Ok(IndexedReader::with_index(fasta, index)),\n Err(e) => Err(csv::Error::Io(e))\n }\n }\n\n pub fn read_all(&mut self, seqname: &[u8], seq: &mut Vec<u8>) -> io::Result<()> {\n match self.index.inner.get(seqname) {\n Some(&idx) => self.read(seqname, 0, idx.len, seq),\n None => Err(\n io::Error::new(\n io::ErrorKind::Other,\n \"Unknown sequence name.\"\n )\n )\n }\n }\n\n pub fn read(&mut self, seqname: &[u8], start: u64, stop: u64, seq: &mut Vec<u8>) -> io::Result<()> {\n match self.index.inner.get(seqname) {\n Some(idx) => {\n seq.clear();\n \/\/ derived from\n \/\/ http:\/\/www.allenyu.info\/item\/24-quickly-fetch-sequence-from-samtools-faidx-indexed-fasta-sequences.html\n let line = start \/ idx.line_bases * idx.line_bytes;\n let line_offset = start % idx.line_bases;\n let offset = idx.offset + line + line_offset;\n let lines = stop \/ idx.line_bases * idx.line_bytes - line;\n let line_stop = stop % idx.line_bases - if lines == 0 { line_offset } else { 0 };\n\n try!(self.reader.seek(io::SeekFrom::Start(offset)));\n let mut buf = vec![0u8; idx.line_bases as usize];\n for _ in 0..lines {\n \/\/ read full lines\n try!(self.reader.read(&mut buf));\n seq.push_all(&buf);\n }\n \/\/ read last line\n println!(\"linestop {}\", line_stop);\n try!(self.reader.read(&mut buf[..line_stop as usize]));\n seq.push_all(&buf[..line_stop as usize]);\n Ok(())\n },\n None => Err(\n io::Error::new(\n io::ErrorKind::Other,\n \"Unknown sequence name.\"\n )\n )\n }\n }\n}\n\n\n#[derive(RustcDecodable, Debug, Copy, Clone)]\nstruct IndexRecord {\n len: u64,\n offset: u64,\n line_bases: u64,\n line_bytes: u64,\n}\n\n\npub struct Sequence {\n pub name: Vec<u8>,\n pub len: u64,\n}\n\n\n\/\/\/ A Fasta writer.\npub struct Writer<W: io::Write> {\n writer: io::BufWriter<W>,\n}\n\n\nimpl<W: io::Write> Writer<W> {\n \/\/\/ Create a new Fasta writer.\n pub fn new(writer: W) -> Self {\n Writer { writer: io::BufWriter::new(writer) }\n }\n\n pub fn from_file<P: AsRef<Path>>(path: P) -> io::Result<Writer<fs::File>> {\n fs::File::create(path).map(|f| Writer::new(f))\n }\n\n \/\/\/ Directly write a Fasta record.\n pub fn write_record(&mut self, record: Record) -> io::Result<()> {\n self.write(record.id().unwrap_or(\"\"), &record.desc(), record.seq())\n }\n\n \/\/\/ Write a Fasta record with given values.\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ * `id` - the record id\n \/\/\/ * `desc` - the optional descriptions\n \/\/\/ * `seq` - the sequence\n pub fn write(&mut self, id: &str, desc: &[&str], seq: &[u8]) -> io::Result<()> {\n try!(self.writer.write(b\">\"));\n try!(self.writer.write(id.as_bytes()));\n if !desc.is_empty() {\n for d in desc {\n try!(self.writer.write(b\" \"));\n try!(self.writer.write(d.as_bytes()));\n }\n }\n try!(self.writer.write(b\"\\n\"));\n try!(self.writer.write(seq));\n try!(self.writer.write(b\"\\n\"));\n\n Ok(())\n }\n\n \/\/\/ Flush the writer, ensuring that everything is written.\n pub fn flush(&mut self) -> io::Result<()> {\n self.writer.flush()\n }\n}\n\n\npub struct Record {\n header: String,\n seq: String,\n}\n\n\nimpl Record {\n pub fn new() -> Self {\n Record { header: String::new(), seq: String::new() }\n }\n\n pub fn is_empty(&self) -> bool {\n self.header.is_empty() && self.seq.is_empty()\n }\n\n \/\/\/ Check validity of Fasta record.\n pub fn check(&self) -> Result<(), &str> {\n if self.id().is_none() {\n return Err(\"Expecting id for FastQ record.\");\n }\n if !self.seq.is_ascii() {\n return Err(\"Non-ascii character found in sequence.\");\n }\n\n Ok(())\n }\n\n \/\/\/ Return the id of the record.\n pub fn id(&self) -> Option<&str> {\n self.header[1..].words().next()\n }\n\n \/\/\/ Return descriptions if present.\n pub fn desc(&self) -> Vec<&str> {\n self.header[1..].words().skip(1).collect()\n }\n\n \/\/\/ Return the sequence of the record.\n pub fn seq(&self) -> &[u8] {\n self.seq.as_bytes()\n }\n\n fn clear(&mut self) {\n self.header.clear();\n self.seq.clear();\n }\n}\n\n\n\/\/\/ An iterator over the records of a Fasta file.\npub struct Records<R: io::Read> {\n reader: Reader<R>,\n}\n\n\nimpl<R: io::Read> Iterator for Records<R> {\n type Item = io::Result<Record>;\n\n fn next(&mut self) -> Option<io::Result<Record>> {\n let mut record = Record::new();\n match self.reader.read(&mut record) {\n Ok(()) if record.is_empty() => None,\n Ok(()) => Some(Ok(record)),\n Err(err) => Some(Err(err))\n }\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use std::io;\n\n const FASTA_FILE: &'static [u8] = b\">id desc\nACCGTAGGCTGA\n\";\n const FAI_FILE: &'static [u8] = b\"id\\t12\\t9\\t60\\t61\n\";\n\n #[test]\n fn test_reader() {\n let reader = Reader::new(FASTA_FILE);\n let records: Vec<io::Result<Record>> = reader.records().collect();\n assert!(records.len() == 1);\n for res in records {\n let record = res.ok().unwrap();\n assert_eq!(record.check(), Ok(()));\n assert_eq!(record.id(), Some(\"id\"));\n assert_eq!(record.desc(), [\"desc\"]);\n assert_eq!(record.seq(), b\"ACCGTAGGCTGA\");\n }\n }\n\n #[test]\n fn test_indexed_reader() {\n let mut reader = IndexedReader::new(io::Cursor::new(FASTA_FILE), FAI_FILE).ok().expect(\"Error reading index\");\n let mut seq = Vec::new();\n reader.read(b\"id\", 1, 5, &mut seq).ok().expect(\"Error reading sequence.\");\n assert_eq!(seq, b\"CCGT\");\n }\n\n #[test]\n fn test_writer() {\n let mut writer = Writer::new(Vec::new());\n writer.write(\"id\", &[\"desc\"], b\"ACCGTAGGCTGA\").ok().expect(\"Expected successful write\");\n writer.flush().ok().expect(\"Expected successful write\");\n assert_eq!(writer.writer.get_ref(), &FASTA_FILE);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>REMEMBER TO TRACK THE FILES, TICKI<commit_after>\/\/! This is a temporary implementation of rsa until liboctavo is ported to redox\n\/\/! Note that this is not secure. It's trivial to crack. This implementation is just\n\/\/! for testing.\n\nfn mod_pow(b: u64, e: u64, m: u64) -> u64 {\n let mut c = 1;\n let mut e_prime = 0;\n\n loop {\n e_prime += 1;\n c = (b * c) % m;\n\n if e_prime >= e {\n break;\n }\n }\n\n c\n}\n\nfn encrypt(msg: u64, key: (u64, u64)) -> u64 {\n mod_pow(msg, key.1, key.0)\n}\nfn decrypt(enc_msg: u64, key: (u64, u64)) -> u64 {\n mod_pow(enc_msg, key.1, key.0)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Optimize randomizer performance<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[server] Add initial stub server<commit_after>extern crate encoding;\n\nuse std::io::{Read, Write};\nuse std::net::TcpListener;\nuse std::os::unix::io::AsRawFd;\nuse std::thread;\nuse encoding::{Encoding, EncoderTrap, DecoderTrap};\nuse encoding::all::{ASCII, EUC_JP};\n\nfn main() {\n let listener = TcpListener::bind((\"127.0.0.1\", 11178)).unwrap();\n let encoding = EUC_JP;\n\n for stream in listener.incoming() {\n match stream {\n Ok(mut stream) => {\n thread::spawn(move || {\n println!(\"Connected {}\", stream.as_raw_fd());\n let _ = handle_stream(&mut stream, encoding);\n println!(\"Disconnected {}\", stream.as_raw_fd());\n });\n }\n Err(e) => {\n let _ = writeln!(&mut std::io::stderr(), \"{}\", e);\n }\n }\n }\n}\n\nfn handle_stream<T: Read + Write>(stream: &mut T, encoding: &Encoding) -> std::io::Result<()> {\n loop {\n let mut cmd = [0; 1];\n let r = try!(stream.read(&mut cmd));\n if r == 0 {\n break;\n }\n match cmd[0] {\n 0x30 => {\n break;\n }\n 0x31 => {\n let _ = respond_candidate(stream, encoding);\n }\n 0x32 => {\n let _ = respond_version(stream);\n }\n _ => { println!(\"Unknown message {}\", cmd[0]); }\n }\n }\n return Ok(());\n}\n\nfn respond_version(stream: &mut Write) -> std::io::Result<()> {\n let _ = try!(write_with_encoding(stream, \"skkserv-rust:0.1.0\", ASCII));\n try!(stream.flush());\n return Ok(());\n}\n\nfn respond_candidate<T: Read + Write>(stream: &mut T, encoding: &Encoding) -> std::io::Result<()> {\n let input = try!(read_candidate(stream, encoding));\n println!(\"Asked {}\", input);\n let _ = try!(write_with_encoding(stream, \"1\/aiueo\/\\n\", encoding));\n return Ok(());\n}\n\nfn read_candidate(stream: &mut Read, encoding: &Encoding) -> std::io::Result<String> {\n let mut buf = Vec::new();\n\n loop {\n let mut t = [0; 1024];\n let r = try!(stream.read(&mut t));\n if r == 0 {\n panic!(\"Unterminated input\");\n }\n for i in 0..r {\n if t[i] == 0x20 {\n return Ok(encoding.decode(&buf, DecoderTrap::Strict).unwrap());\n }\n buf.push(t[i]);\n \/\/ TODO: unpush rest\n }\n }\n}\n\nfn write_with_encoding(stream: &mut Write, input: &str, encoding: &Encoding) -> std::io::Result<usize> {\n return stream.write(&encoding.encode(input, EncoderTrap::Strict).unwrap());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>argh, forgot to put this file in source control earlier<commit_after>const CX_ASI : u32 = 0x01;\nconst CX_OPERATOR : u32 = 0x02;\n\n#[derive(Debug, Copy, Eq, PartialEq)]\npub struct Context {\n asi: bool,\n operator: bool\n}\n\nimpl Context {\n pub fn new() -> Context {\n Context { asi: false, operator: false }\n }\n pub fn is_asi_possible(&mut self) -> bool { self.asi }\n pub fn is_operator(&mut self) -> bool { self.operator }\n}\n\n\/*\npub struct ParseContext {\n is_asi_possible: bool,\n is_operator: bool\n}\n*\/\n\npub trait ParseContext {\n fn is_asi_possible(&mut self) -> bool;\n fn is_operator(&mut self) -> bool;\n}\n\npub struct SimpleContext;\n\nimpl<'a> ParseContext for &'a SimpleContext {\n fn is_asi_possible(&mut self) -> bool { true }\n fn is_operator(&mut self) -> bool { true }\n}\n\nimpl SimpleContext {\n pub fn new() -> SimpleContext {\n SimpleContext\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor(unwise): Happy Fourth! Insomnia commit #2. Syncfile error handling cleanup.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Minor improvements to subcommand summaries.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #78478 - hameerabbasi:const-generics-supertraits, r=lcnr<commit_after>\/\/ run-pass\n\/\/ revisions: full min\n\n#![cfg_attr(full, feature(const_generics))]\n#![cfg_attr(full, allow(incomplete_features))]\n#![cfg_attr(min, feature(min_const_generics))]\n\ntrait Foo<const N: usize> {\n fn myfun(&self) -> usize;\n}\ntrait Bar<const N: usize> : Foo<N> {}\ntrait Baz: Foo<3> {}\n\nstruct FooType<const N: usize>;\nstruct BarType<const N: usize>;\nstruct BazType;\n\nimpl<const N: usize> Foo<N> for FooType<N> {\n fn myfun(&self) -> usize { N }\n}\nimpl<const N: usize> Foo<N> for BarType<N> {\n fn myfun(&self) -> usize { N + 1 }\n}\nimpl<const N: usize> Bar<N> for BarType<N> {}\nimpl Foo<3> for BazType {\n fn myfun(&self) -> usize { 999 }\n}\nimpl Baz for BazType {}\n\ntrait Foz {}\ntrait Boz: Foo<3> + Foz {}\ntrait Bok<const N: usize>: Foo<N> + Foz {}\n\nstruct FozType;\nstruct BozType;\nstruct BokType<const N: usize>;\n\nimpl Foz for FozType {}\n\nimpl Foz for BozType {}\nimpl Foo<3> for BozType {\n fn myfun(&self) -> usize { 9999 }\n}\nimpl Boz for BozType {}\n\nimpl<const N: usize> Foz for BokType<N> {}\nimpl<const N: usize> Foo<N> for BokType<N> {\n fn myfun(&self) -> usize { N + 2 }\n}\nimpl<const N: usize> Bok<N> for BokType<N> {}\n\nfn a<const N: usize>(x: &dyn Foo<N>) -> usize { x.myfun() }\nfn b(x: &dyn Foo<3>) -> usize { x.myfun() }\nfn c<T: Bok<N>, const N: usize>(x: T) -> usize { a::<N>(&x) }\nfn d<T: ?Sized + Foo<3>>(x: &T) -> usize { x.myfun() }\nfn e(x: &dyn Bar<3>) -> usize { d(x) }\n\nfn main() {\n let foo = FooType::<3> {};\n assert!(a(&foo) == 3);\n assert!(b(&foo) == 3);\n assert!(d(&foo) == 3);\n\n let bar = BarType::<3> {};\n assert!(a(&bar) == 4);\n assert!(b(&bar) == 4);\n assert!(d(&bar) == 4);\n assert!(e(&bar) == 4);\n\n let baz = BazType {};\n assert!(a(&baz) == 999);\n assert!(b(&baz) == 999);\n assert!(d(&baz) == 999);\n\n let boz = BozType {};\n assert!(a(&boz) == 9999);\n assert!(b(&boz) == 9999);\n assert!(d(&boz) == 9999);\n\n let bok = BokType::<3> {};\n assert!(a(&bok) == 5);\n assert!(b(&bok) == 5);\n assert!(d(&bok) == 5);\n assert!(c(BokType::<3> {}) == 5);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add globs that cause errors instead of ignoring them and printing an error<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix mmu read fn to use LittleEndian access to the buffer<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove redundant length checks.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed some clippy warnings, and adjusted some comments to fit in rustfmt's guidelines.<commit_after><|endoftext|>"} {"text":"<commit_before>use redox::get_slice::GetSlice;\nuse redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application<'static> = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ main: box|args: &Vec<String>| {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command<'a> {\n pub name: &'a str,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl<'a> Command<'a> {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n\n commands.push(Command {\n name: \"cat\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read: {}\", path),\n }\n } else {\n println!(\"Failed to open file: {}\", path);\n }\n }),\n });\n\n commands.push(Command {\n name: \"cd\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(path) => {\n if !change_cwd(&path) {\n println!(\"Bad path: {}\", path);\n }\n }\n None => println!(\"No path given\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"echo\",\n main: Box::new(|args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n }),\n });\n\n commands.push(Command {\n name: \"else\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"exec\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n let mut args_str: Vec<&str> = Vec::new();\n for arg in args.get_slice(Some(2), None) {\n args_str.push(arg);\n }\n\n File::exec(arg, &args_str);\n }\n }),\n });\n\n commands.push(Command {\n name: \"exit\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"fi\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"if\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"ls\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(dir) = read_dir(&path) {\n for entry in dir {\n println!(\"{}\", entry.path());\n }\n } else {\n println!(\"Failed to open directory: {}\", path);\n }\n }),\n });\n\n commands.push(Command {\n name: \"mkdir\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(dir_name) => if DirEntry::create(dir_name).is_none() {\n println!(\"Failed to create {}\", dir_name);\n },\n None => println!(\"No name provided\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"pwd\",\n main: Box::new(|_: &Vec<String>| {\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n println!(\"{}\", path);\n } else {\n println!(\"Could not get the path\");\n }\n } else {\n println!(\"Could not open the working directory\");\n }\n }),\n });\n\n commands.push(Command {\n name: \"read\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"run\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n }),\n });\n\n commands.push(Command {\n name: \"sleep\",\n main: Box::new(|args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n }),\n });\n\n commands.push(Command {\n name: \"send\",\n main: Box::new(|args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n }),\n });\n\n \/\/ Simple command to create a file, in the current directory\n \/\/ The file has got the name given as the first argument of the command\n \/\/ If the command have no arguments, the command don't create the file\n commands.push(Command {\n name: \"touch\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(file_name) => if File::create(file_name).is_none() {\n println!(\"Failed to create: {}\", file_name);\n },\n None => println!(\"No name provided\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"url_hex\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n }),\n });\n\n commands.push(Command {\n name: \"wget\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n }),\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + c.name);\n\n commands.push(Command {\n name: \"help\",\n main: Box::new(move |_: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n }),\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application<'a> {\n commands: Vec<Command<'a>>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl<'a> Application<'a> {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &str) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if command_string == \"$\" {\n for variable in self.variables.iter() {\n println!(\"{}={}\", variable.name, variable.value);\n }\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n } else {\n println!(\"No right hand side\");\n }\n } else {\n println!(\"No comparison operator\");\n }\n } else {\n println!(\"No left hand side\");\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n if cmd == \"read\" {\n for i in 1..args.len() {\n if let Some(arg_original) = args.get(i) {\n let arg = arg_original.trim();\n print!(\"{}=\", arg);\n if let Some(value_original) = readln!() {\n let value = value_original.trim();\n self.set_var(arg, value);\n }\n }\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].trim();\n let mut value = cmd[i + 1 .. cmd.len()].trim().to_string();\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n self.set_var(name, &value);\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n\n pub fn set_var(&mut self, name: &str, value: &str){\n if name.is_empty() {\n return;\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value.to_string();\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name.to_string(),\n value: value.to_string(),\n });\n }\n }\n\n \/\/\/ Method to return the current directory\n \/\/\/ If the current directory cannot be found, a default string (\"?\") will be returned\n pub fn get_current_directory(&mut self) -> String {\n \/\/ Return the current path\n File::open(\"\")\n .and_then(|file| file.path())\n .unwrap_or(\"?\".to_string())\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"user@redox:{}# {}\", self.get_current_directory(), command);\n self.on_command(&command);\n }\n\n loop {\n for mode in self.modes.iter().rev() {\n if mode.value {\n print!(\"+ \");\n } else {\n print!(\"- \");\n }\n }\n print!(\"user@redox:{}# \", self.get_current_directory());\n if let Some(command_original) = readln!() {\n let command = command_original.trim();\n if command == \"exit\" {\n println!(\"Exit temporarily blocked (due to using terminal as init)\")\n \/\/break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n } else {\n println!(\"Failed to read from stdin\");\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = Box::new(Application::new());\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>Add a 'help' field for each command structure, to display a little man for each command<commit_after>use redox::get_slice::GetSlice;\nuse redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application<'static> = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ main: box|args: &Vec<String>| {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command<'a> {\n pub name: &'a str,\n pub help: &'a str,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl<'a> Command<'a> {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n\n commands.push(Command {\n name: \"cat\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read: {}\", path),\n }\n } else {\n println!(\"Failed to open file: {}\", path);\n }\n }),\n });\n\n commands.push(Command {\n name: \"cd\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(path) => {\n if !change_cwd(&path) {\n println!(\"Bad path: {}\", path);\n }\n }\n None => println!(\"No path given\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"echo\",\n main: Box::new(|args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n }),\n });\n\n commands.push(Command {\n name: \"else\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"exec\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n let mut args_str: Vec<&str> = Vec::new();\n for arg in args.get_slice(Some(2), None) {\n args_str.push(arg);\n }\n\n File::exec(arg, &args_str);\n }\n }),\n });\n\n commands.push(Command {\n name: \"exit\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"fi\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"if\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"ls\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(dir) = read_dir(&path) {\n for entry in dir {\n println!(\"{}\", entry.path());\n }\n } else {\n println!(\"Failed to open directory: {}\", path);\n }\n }),\n });\n\n commands.push(Command {\n name: \"mkdir\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(dir_name) => if DirEntry::create(dir_name).is_none() {\n println!(\"Failed to create {}\", dir_name);\n },\n None => println!(\"No name provided\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"pwd\",\n main: Box::new(|_: &Vec<String>| {\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n println!(\"{}\", path);\n } else {\n println!(\"Could not get the path\");\n }\n } else {\n println!(\"Could not open the working directory\");\n }\n }),\n });\n\n commands.push(Command {\n name: \"read\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"run\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n }),\n });\n\n commands.push(Command {\n name: \"sleep\",\n main: Box::new(|args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n }),\n });\n\n commands.push(Command {\n name: \"send\",\n main: Box::new(|args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n }),\n });\n\n \/\/ Simple command to create a file, in the current directory\n \/\/ The file has got the name given as the first argument of the command\n \/\/ If the command have no arguments, the command don't create the file\n commands.push(Command {\n name: \"touch\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(file_name) => if File::create(file_name).is_none() {\n println!(\"Failed to create: {}\", file_name);\n },\n None => println!(\"No name provided\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"url_hex\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n }),\n });\n\n commands.push(Command {\n name: \"wget\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n }),\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + c.name);\n\n commands.push(Command {\n name: \"help\",\n main: Box::new(move |_: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n }),\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application<'a> {\n commands: Vec<Command<'a>>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl<'a> Application<'a> {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &str) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if command_string == \"$\" {\n for variable in self.variables.iter() {\n println!(\"{}={}\", variable.name, variable.value);\n }\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n } else {\n println!(\"No right hand side\");\n }\n } else {\n println!(\"No comparison operator\");\n }\n } else {\n println!(\"No left hand side\");\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n if cmd == \"read\" {\n for i in 1..args.len() {\n if let Some(arg_original) = args.get(i) {\n let arg = arg_original.trim();\n print!(\"{}=\", arg);\n if let Some(value_original) = readln!() {\n let value = value_original.trim();\n self.set_var(arg, value);\n }\n }\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].trim();\n let mut value = cmd[i + 1 .. cmd.len()].trim().to_string();\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n self.set_var(name, &value);\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n\n pub fn set_var(&mut self, name: &str, value: &str){\n if name.is_empty() {\n return;\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value.to_string();\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name.to_string(),\n value: value.to_string(),\n });\n }\n }\n\n \/\/\/ Method to return the current directory\n \/\/\/ If the current directory cannot be found, a default string (\"?\") will be returned\n pub fn get_current_directory(&mut self) -> String {\n \/\/ Return the current path\n File::open(\"\")\n .and_then(|file| file.path())\n .unwrap_or(\"?\".to_string())\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"user@redox:{}# {}\", self.get_current_directory(), command);\n self.on_command(&command);\n }\n\n loop {\n for mode in self.modes.iter().rev() {\n if mode.value {\n print!(\"+ \");\n } else {\n print!(\"- \");\n }\n }\n print!(\"user@redox:{}# \", self.get_current_directory());\n if let Some(command_original) = readln!() {\n let command = command_original.trim();\n if command == \"exit\" {\n println!(\"Exit temporarily blocked (due to using terminal as init)\")\n \/\/break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n } else {\n println!(\"Failed to read from stdin\");\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = Box::new(Application::new());\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test(all): add tests for parsing, formatting and matching tags<commit_after>extern crate language_tags;\n\nuse std::default::Default;\nuse std::collections::BTreeMap;\n\nuse language_tags::{Error, LanguageTag, Result};\n\n\/\/ All tests here may be completly nonsensical.\n\n#[test]\nfn test_lang_from_str() {\n let a: LanguageTag = \"de\".parse().unwrap();\n let mut b: LanguageTag = Default::default();\n b.language = Some(\"de\".to_owned());\n assert_eq!(a, b);\n}\n\n#[test]\nfn test_extlang_from_str() {\n let a: LanguageTag = \"ar-afb\".parse().unwrap();\n let mut b: LanguageTag = Default::default();\n b.language = Some(\"ar\".to_owned());\n b.extlang = Some(\"afb\".to_owned());\n assert_eq!(a, b);\n}\n\n#[test]\nfn test_script_from_str() {\n let a: LanguageTag = \"ar-afb-Latn\".parse().unwrap();\n let mut b: LanguageTag = Default::default();\n b.language = Some(\"ar\".to_owned());\n b.extlang = Some(\"afb\".to_owned());\n b.script = Some(\"latn\".to_owned());\n assert_eq!(a, b);\n}\n\n#[test]\nfn test_region_from_str() {\n let mut a: LanguageTag = \"ar-DE\".parse().unwrap();\n let mut b: LanguageTag = Default::default();\n b.language = Some(\"ar\".to_owned());\n b.region = Some(\"de\".to_owned());\n assert_eq!(a, b);\n\n a = \"ar-005\".parse().unwrap();\n b = Default::default();\n b.language = Some(\"ar\".to_owned());\n b.region = Some(\"005\".to_owned());\n assert_eq!(a, b);\n\n a = \"ar-005\".parse().unwrap();\n b = Default::default();\n b.language = Some(\"ar\".to_owned());\n b.region = Some(\"005\".to_owned());\n assert_eq!(a, b);\n}\n\n#[test]\nfn test_variant_from_str() {\n let a: LanguageTag = \"sl-IT-nedis\".parse().unwrap();\n let mut b: LanguageTag = Default::default();\n b.language = Some(\"sl\".parse().unwrap());\n b.region = Some(\"it\".parse().unwrap());\n b.variants = vec![\"nedis\".parse().unwrap()];\n assert_eq!(a, b);\n}\n\n#[test]\nfn test_invalid_from_str() {\n assert_eq!(\"sl-07\".parse::<LanguageTag>(), Err(Error));\n}\n\n#[test]\nfn test_strange_case_from_str() {\n \/\/ This is a perfectly valid language code\n let a: LanguageTag = \"SL-AFB-lATN-005-nEdis\".parse().unwrap();\n let b = LanguageTag {\n language: Some(\"sl\".to_owned()),\n extlang: Some(\"afb\".to_owned()),\n script: Some(\"Latn\".to_owned()),\n region: Some(\"005\".to_owned()),\n variants: vec![\"nedis\".to_owned()],\n extensions: BTreeMap::new(),\n privateuse: Vec::new(),\n };\n assert_eq!(a, b);\n}\n\n#[test]\nfn test_fmt() {\n let a: LanguageTag = \"ar-arb-Latn-DE-nedis-foobar\".parse().unwrap();\n assert_eq!(format!(\"{}\", a), \"ar-arb-Latn-DE-nedis-foobar\");\n}\n\n#[test]\nfn test_match() {\n let de_de: LanguageTag = \"de-DE\".parse().unwrap();\n let de: LanguageTag = \"de\".parse().unwrap();\n assert!(de.matches(&de_de));\n assert!(!de_de.matches(&de));\n}\n\n#[test]\nfn test_klingon() {\n let a: LanguageTag = \"i-klingon\".parse().unwrap();\n let mut b: LanguageTag = Default::default();\n b.language = Some(\"i-klingon\".to_owned());\n assert_eq!(a, b);\n}\n\n#[test]\nfn test_private_use() {\n let a: LanguageTag = \"es-x-foobar-AT-007\".parse().unwrap();\n let mut b: LanguageTag = Default::default();\n b.language = Some(\"es\".to_owned());\n b.privateuse = vec![\"foobar\".to_owned(), \"AT\".to_owned(), \"007\".to_owned()];\n assert_eq!(a, b);\n}\n\n#[test]\nfn test_unicode() {\n let x: Result<LanguageTag> = \"zh-x-Üńìcødê\".parse();\n assert!(x.is_err());\n}\n\n#[test]\nfn test_format() {\n let x: LanguageTag = \"HkgnmerM-x-e5-zf-VdDjcpz-1V6\".parse().unwrap();\n assert_eq!(format!(\"{}\", x), \"HkgnmerM-x-e5-zf-VdDjcpz-1V6\");\n let y: LanguageTag = \"MgxQa-ywEp-8lcW-7bvT-h-dP1Md-0h7-0Z3ir\".parse().unwrap();\n assert_eq!(format!(\"{}\", y), \"MgxQa-ywEp-8lcW-7bvT-h-dP1Md-0h7-0Z3ir\");\n}\n\n#[test]\nfn test_wellformed_tags() {\n \/\/ Source: http:\/\/www.langtag.net\/test-suites\/well-formed-tags.txt\n let tags = vec![\n \"fr \",\n \"fr-Latn\",\n \"fr-fra\", \/\/ Extended tag\n \"fr-Latn-FR\",\n \"fr-Latn-419\",\n \"fr-FR\",\n \"ax-TZ\", \/\/ Not in the registry, but well-formed\n \"fr-shadok\", \/\/ Variant\n \"fr-y-myext-myext2\",\n \"fra-Latn\", \/\/ ISO 639 can be 3-letters\n \"fra\",\n \"fra-FX\",\n \"i-klingon\", \/\/ grandfathered with singleton\n \"I-kLINgon\", \/\/ tags are case-insensitive...\n \"no-bok\", \/\/ grandfathered without singleton\n \"fr-Lat\", \/\/ Extended\",\n \"mn-Cyrl-MN\",\n \"mN-cYrL-Mn\",\n \"fr-Latn-CA\",\n \"en-US\",\n \"fr-Latn-CA\",\n \"i-enochian\", \/\/ Grand fathered\n \"x-fr-CH \",\n \"sr-Latn-CS\",\n \"es-419\",\n \"sl-nedis\",\n \"de-CH-1996\",\n \"de-Latg-1996\",\n \"sl-IT-nedis\",\n \"en-a-bbb-x-a-ccc\",\n \"de-a-value\",\n \"en-Latn-GB-boont-r-extended-sequence-x-private\",\n \"en-x-US\",\n \"az-Arab-x-AZE-derbend\",\n \"es-Latn-CO-x-private\",\n \"en-US-boont\",\n \"ab-x-abc-x-abc\", \/\/ anything goes after x\n \"ab-x-abc-a-a\", \/\/ ditto\",\n \"i-default\", \/\/ grandfathered\",\n \"i-klingon\", \/\/ grandfathered\",\n \"abcd-Latn\", \/\/ Language of 4 chars reserved for future use\n \"AaBbCcDd-x-y-any-x\", \/\/ Language of 5-8 chars, registered\n \"en\",\n \"de-AT\",\n \"es-419\",\n \"de-CH-1901\",\n \"sr-Cyrl\",\n \"sr-Cyrl-CS\",\n \"sl-Latn-IT-rozaj\",\n \"en-US-x-twain\",\n \"zh-cmn\",\n \"zh-cmn-Hant\",\n \"zh-cmn-Hant-HK\",\n \"zh-gan\",\n \"zh-yue-Hant-HK\",\n \"xr-lxs-qut\", \/\/ extlangS\n \"xr-lqt-qu\", \/\/ extlang + region\n \"xr-p-lze\", \/\/ Extension\n ];\n let failed: Vec<&str> = tags.iter().filter(|x| x.parse::<LanguageTag>().is_err()).map(|&x| x).collect();\n println!(\"Number: { } Failed: {:?}\", failed.len(), failed);\n assert!(failed.is_empty());\n}\n\n#[test]\nfn test_broken_tags() {\n \/\/ Source: http:\/\/www.langtag.net\/test-suites\/broken-tags.txt\n let tags = vec![\n \"f\",\n \"f-Latn\",\n \"fr-Latn-F\",\n \"a-value\",\n \"en-a-bbb-a-ccc\", \/\/ 'a' appears twice\n \"tlh-a-b-foo\",\n \"i-notexist\", \/\/ grandfathered but not registered: invalid, even if we only test well-formedness\n \"abcdefghi-012345678\",\n \"ab-abc-abc-abc-abc\",\n \"ab-abcd-abc\",\n \"ab-ab-abc\",\n \"ab-123-abc\",\n \"a-Hant-ZH\",\n \"a1-Hant-ZH\",\n \"ab-abcde-abc\",\n \"ab-1abc-abc\",\n \"ab-ab-abcd\",\n \"ab-123-abcd\",\n \"ab-abcde-abcd\",\n \"ab-1abc-abcd\",\n \"ab-a-b\",\n \"ab-a-x\",\n \"ab--ab\",\n \"ab-abc-\",\n \"-ab-abc\",\n \"ab-c-abc-r-toto-c-abc # 'c' appears twice \",\n \"abcd-efg\",\n \"aabbccddE\",\n ];\n let failed: Vec<(&str, Result<LanguageTag>)> = tags.iter().map(|x| (*x, x.parse::<LanguageTag>())).filter(|x| x.1.is_ok()).collect();\n println!(\"Number: { } Failed: {:?}\", failed.len(), failed);\n assert!(failed.is_empty());\n}\n\n#[test]\nfn test_random_good_tags () {\n \/\/ Source: http:\/\/unicode.org\/repos\/cldr\/trunk\/tools\/java\/org\/unicode\/cldr\/util\/data\/langtagTest.txt\n let tags = vec![\n \"zszLDm-sCVS-es-x-gn762vG-83-S-mlL\",\n \"IIJdFI-cfZv\",\n \"kbAxSgJ-685\",\n \"tbutP\",\n \"hDL-595\",\n \"dUf-iUjq-0hJ4P-5YkF-WD8fk\",\n \"FZAABA-FH\",\n \"xZ-lh-4QfM5z9J-1eG4-x-K-R6VPr2z\",\n \"Fyi\",\n \"SeI-DbaG\",\n \"ch-xwFn\",\n \"OeC-GPVI\",\n \"JLzvUSi\",\n \"Fxh-hLAs\",\n \"pKHzCP-sgaO-554\",\n \"eytqeW-hfgH-uQ\",\n \"ydn-zeOP-PR\",\n \"uoWmBM-yHCf-JE\",\n \"xwYem\",\n \"zie\",\n \"Re-wjSv-Ey-i-XE-E-JjWTEB8-f-DLSH-NVzLH-AtnFGWoH-SIDE\",\n \"Ri-063-c-u6v-ZfhkToTB-C-IFfmv-XT-j-rdyYFMhK-h-pY-D5-Oh6FqBhL-hcXt-v-WdpNx71-K-c74m4-eBTT7-JdH7Q1Z\",\n \"ji\",\n \"IM-487\",\n \"EPZ-zwcB\",\n \"GauwEcwo\",\n \"kDEP\",\n \"FwDYt-TNvo\",\n \"ottqP-KLES-x-9-i9\",\n \"fcflR-grQQ\",\n \"TvFwdu-kYhs\",\n \"WE-336\",\n \"MgxQa-ywEp-8lcW-7bvT-h-dP1Md-0h7-0Z3ir-K-Srkm-kA-7LXM-Z-whb2MiO-2mNsvbLm-W3O-4r-U-KceIxHdI-gvMVgUBV-2uRUni-J0-7C8yTK2\",\n \"Hyr-B-evMtVoB1-mtsVZf-vQMV-gM-I-rr-kvLzg-f-lAUK-Qb36Ne-Z-7eFzOD-mv6kKf-l-miZ7U3-k-XDGtNQG\",\n \"ybrlCpzy\",\n \"PTow-w-cAQ51-8Xd6E-cumicgt-WpkZv3NY-q-ORYPRy-v-A4jL4A-iNEqQZZ-sjKn-W-N1F-pzyc-xP5eWz-LmsCiCcZ\",\n \"ih-DlPR-PE\",\n \"Krf-362\",\n \"WzaD\",\n \"EPaOnB-gHHn\",\n \"XYta\",\n \"NZ-RgOO-tR\",\n \"at-FE\",\n \"Tpc-693\",\n \"YFp\",\n \"gRQrQULo\",\n \"pVomZ-585\",\n \"laSu-ZcAq-338\",\n \"gCW\",\n \"PydSwHRI-TYfF\",\n \"zKmWDD\",\n \"X-bCrL5RL\",\n \"HK\",\n \"YMKGcLY\",\n \"GDJ-nHYa-bw-X-ke-rohH5GfS-LdJKsGVe\",\n \"tfOxdau-yjge-489-a-oB-I8Csb-1ESaK1v-VFNz-N-FT-ZQyn-On2-I-hu-vaW3-jIQb-vg0U-hUl-h-dO6KuJqB-U-tde2L-P3gHUY-vnl5c-RyO-H-gK1-zDPu-VF1oeh8W-kGzzvBbW-yuAJZ\",\n \"LwDux\",\n \"Zl-072\",\n \"Ri-Ar\",\n \"vocMSwo-cJnr-288\",\n \"kUWq-gWfQ-794\",\n \"YyzqKL-273\",\n \"Xrw-ZHwH-841-9foT-ESSZF-6OqO-0knk-991U-9p3m-b-JhiV-0Kq7Y-h-cxphLb-cDlXUBOQ-X-4Ti-jty94yPp\",\n \"en-GB-oed\",\n \"LEuZl-so\",\n \"HyvBvFi-cCAl-X-irMQA-Pzt-H\",\n \"uDbsrAA-304\",\n \"wTS\",\n \"IWXS\",\n \"XvDqNkSn-jRDR\",\n \"gX-Ycbb-iLphEks-AQ1aJ5\",\n \"FbSBz-VLcR-VL\",\n \"JYoVQOP-Iytp\",\n \"gDSoDGD-lq-v-7aFec-ag-k-Z4-0kgNxXC-7h\",\n \"Bjvoayy-029\",\n \"qSDJd\",\n \"qpbQov\",\n \"fYIll-516\",\n \"GfgLyfWE-EHtB\",\n \"Wc-ZMtk\",\n \"cgh-VEYK\",\n \"WRZs-AaFd-yQ\",\n \"eSb-CpsZ-788\",\n \"YVwFU\",\n \"JSsHiQhr-MpjT-381\",\n \"LuhtJIQi-JKYt\",\n \"vVTvS-RHcP\",\n \"SY\",\n \"fSf-EgvQfI-ktWoG-8X5z-63PW\",\n \"NOKcy\",\n \"OjJb-550\",\n \"KB\",\n \"qzKBv-zDKk-589\",\n \"Jr\",\n \"Acw-GPXf-088\",\n \"WAFSbos\",\n \"HkgnmerM-x-e5-zf-VdDjcpz-1V6\",\n \"UAfYflJU-uXDc-YV\",\n \"x-CHsHx-VDcOUAur-FqagDTx-H-V0e74R\",\n \"uZIAZ-Xmbh-pd\"\n ];\n let failed: Vec<(&str, Result<LanguageTag>)> = tags.iter().map(|x| (*x, x.parse::<LanguageTag>())).filter(|x| x.1.is_err()).collect();\n println!(\"Number: { } Failed: {:?}\", failed.len(), failed);\n assert!(failed.is_empty());\n}\n\n#[test]\nfn test_random_bad_tags () {\n \/\/ Source: http:\/\/unicode.org\/repos\/cldr\/trunk\/tools\/java\/org\/unicode\/cldr\/util\/data\/langtagTest.txt\n let tags = vec![\n \"EdY-z_H791Xx6_m_kj\",\n \"qWt85_8S0-L_rbBDq0gl_m_O_zsAx_nRS\",\n \"VzyL2\",\n \"T_VFJq-L-0JWuH_u2_VW-hK-kbE\",\n \"u-t\",\n \"Q-f_ZVJXyc-doj_k-i\",\n \"JWB7gNa_K-5GB-25t_W-s-ZbGVwDu1-H3E\",\n \"b-2T-Qob_L-C9v_2CZxK86\",\n \"fQTpX_0_4Vg_L3L_g7VtALh2\",\n \"S-Z-E_J\",\n \"f6wsq-02_i-F\",\n \"9_GcUPq_G\",\n \"QjsIy_9-0-7_Dv2yPV09_D-JXWXM\",\n \"D_se-f-k\",\n \"ON47Wv1_2_W\",\n \"f-z-R_s-ha\",\n \"N3APeiw_195_Bx2-mM-pf-Z-Ip5lXWa-5r\",\n \"IRjxU-E_6kS_D_b1b_H\",\n \"NB-3-5-AyW_FQ-9hB-TrRJg3JV_3C\",\n \"yF-3a_V_FoJQAHeL_Z-Mc-u\",\n \"n_w_bbunOG_1-s-tJMT5je\",\n \"Q-AEWE_X\",\n \"57b1O_k_R6MU_sb\",\n \"hK_65J_i-o_SI-Y\",\n \"wB4B7u_5I2_I_NZPI\",\n \"J24Nb_q_d-zE\",\n \"v6-dHjJmvPS_IEb-x_A-O-i\",\n \"8_8_dl-ZgBr84u-P-E\",\n \"nIn-xD7EVhe_C\",\n \"5_N-6P_x7Of_Lo_6_YX_R\",\n \"0_46Oo0sZ-YNwiU8Wr_d-M-pg1OriV\",\n \"laiY-5\",\n \"K-8Mdd-j_ila0sSpo_aO8_J\",\n \"wNATtSL-Cp4_gPa_fD41_9z\",\n \"H_FGz5V8_n6rrcoz0_1O6d-kH-7-N\",\n \"wDOrnHU-odqJ_vWl\",\n \"gP_qO-I-jH\",\n \"h\",\n \"dJ0hX-o_csBykEhU-F\",\n \"L-Vf7_BV_eRJ5goSF_Kp\",\n \"y-oF-chnavU-H\",\n \"9FkG-8Q-8_v\",\n \"W_l_NDQqI-O_SFSAOVq\",\n \"kDG3fzXw\",\n \"t-nsSp-7-t-mUK2\",\n \"Yw-F\",\n \"1-S_3_l\",\n \"u-v_brn-Y\",\n \"4_ft_3ZPZC5lA_D\",\n \"n_dR-QodsqJnh_e\",\n \"Hwvt-bSwZwj_KL-hxg0m-3_hUG\",\n \"mQHzvcV-UL-o2O_1KhUJQo_G2_uryk3-a\",\n \"b-UTn33HF\",\n \"r-Ep-jY-aFM_N_H\",\n \"K-k-krEZ0gwD_k_ua-9dm3Oy-s_v\",\n \"XS_oS-p\",\n \"EIx_h-zf5\",\n \"p_z-0_i-omQCo3B\",\n \"1_q0N_jo_9\",\n \"0Ai-6-S\",\n \"L-LZEp_HtW\",\n \"Zj-A4JD_2A5Aj7_b-m3\",\n \"x\",\n \"p-qPuXQpp_d-jeKifB-c-7_G-X\",\n \"X94cvJ_A\",\n \"F2D25R_qk_W-w_Okf_kx\",\n \"rc-f\",\n \"D\",\n \"gD_WrDfxmF-wu-E-U4t\",\n \"Z_BN9O4_D9-D_0E_KnCwZF-84b-19\",\n \"T-8_g-u-0_E\",\n \"lXTtys9j_X_A_m-vtNiNMw_X_b-C6Nr\",\n \"V_Ps-4Y-S\",\n \"X5wGEA\",\n \"mIbHFf_ALu4_Jo1Z1\",\n \"ET-TacYx_c\",\n \"Z-Lm5cAP_ri88-d_q_fi8-x\",\n \"rTi2ah-4j_j_4AlxTs6m_8-g9zqncIf-N5\",\n \"FBaLB85_u-0NxhAy-ZU_9c\",\n \"x_j_l-5_aV95_s_tY_jp4\",\n \"PL768_D-m7jNWjfD-Nl_7qvb_bs_8_Vg\",\n \"9-yOc-gbh\",\n \"6DYxZ_SL-S_Ye\",\n \"ZCa-U-muib-6-d-f_oEh_O\",\n \"Qt-S-o8340F_f_aGax-c-jbV0gfK_p\",\n \"WE_SzOI_OGuoBDk-gDp\",\n \"cs-Y_9\",\n \"m1_uj\",\n \"Y-ob_PT\",\n \"li-B\",\n \"f-2-7-9m_f8den_J_T_d\",\n \"p-Os0dua-H_o-u\",\n \"L\",\n \"rby-w\"\n ];\n let failed: Vec<(&str, Result<LanguageTag>)> = tags.iter().map(|x| (*x, x.parse::<LanguageTag>())).filter(|x| x.1.is_ok()).collect();\n println!(\"Number: { } Failed: {:?}\", failed.len(), failed);\n assert!(failed.is_empty());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Base directory doesn't work on Windows<commit_after><|endoftext|>"} {"text":"<commit_before>use std::cell::RefCell;\nuse std::iter::Iterator;\nuse std::rc::Rc;\nuse std::ops::Deref;\n\nuse storage::file::File;\n\npub trait FilePrinter {\n\n fn new(verbose: bool, debug: bool) -> Self;\n\n \/*\n * Print a single file\n *\/\n fn print_file(&self, Rc<RefCell<File>>);\n\n \/*\n * Print a list of files\n *\/\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n for file in files {\n self.print_file(file);\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n info!(\"{}\", f(file).join(\" \"));\n }\n\n fn print_files_custom<F, I>(&self, files: I, f: &F)\n where I: Iterator<Item = Rc<RefCell<File>>>,\n F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n for file in files {\n self.print_file_custom(file, f);\n }\n }\n\n}\n\nstruct DebugPrinter {\n debug: bool,\n}\n\nimpl FilePrinter for DebugPrinter {\n\n fn new(_: bool, debug: bool) -> DebugPrinter {\n DebugPrinter {\n debug: debug,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f);\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f(file).join(\" \"));\n }\n }\n\n}\n\nstruct SimplePrinter {\n verbose: bool,\n debug: bool,\n}\n\nimpl FilePrinter for SimplePrinter {\n\n fn new(verbose: bool, debug: bool) -> SimplePrinter {\n SimplePrinter {\n debug: debug,\n verbose: verbose,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"{:?}\", f);\n } else if self.verbose {\n info!(\"{}\", &*f.deref().borrow());\n } else {\n info!(\"[File]: {}\", f.deref().borrow().id());\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n let s = f(file).join(\" \");\n if self.debug {\n debug!(\"{:?}\", s);\n } else if self.verbose {\n info!(\"{}\", s);\n } else {\n info!(\"[File]: {}\", s);\n }\n }\n\n}\n\npub struct TablePrinter {\n verbose: bool,\n debug: bool,\n sp: SimplePrinter,\n}\n\nimpl FilePrinter for TablePrinter {\n\n fn new(verbose: bool, debug: bool) -> TablePrinter {\n TablePrinter {\n debug: debug,\n verbose: verbose,\n sp: SimplePrinter::new(verbose, debug),\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n self.sp.print_file(f);\n }\n\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"File#\", \"Owner\", \"ID\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.deref().borrow().owner_name())[..]);\n\n let id : String = file.deref().borrow().id().clone().into();\n let cell_id = Cell::new(&id[..]);\n let row = Row::new(vec![cell_i, cell_o, cell_id]);\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n fn print_files_custom<F, I>(&self, files: I, f: &F)\n where I: Iterator<Item = Rc<RefCell<File>>>,\n F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"File#\", \"Owner\", \"ID\", \"...\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.deref().borrow().owner_name())[..]);\n\n let id : String = file.deref().borrow().id().clone().into();\n let cell_id = Cell::new(&id[..]);\n\n let mut row = Row::new(vec![cell_i, cell_o, cell_id]);\n\n for cell in f(file).iter() {\n debug!(\"Adding custom cell: {:?}\", cell);\n row.add_cell(Cell::new(&cell[..]))\n }\n\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n}\n<commit_msg>Rename table title<commit_after>use std::cell::RefCell;\nuse std::iter::Iterator;\nuse std::rc::Rc;\nuse std::ops::Deref;\n\nuse storage::file::File;\n\npub trait FilePrinter {\n\n fn new(verbose: bool, debug: bool) -> Self;\n\n \/*\n * Print a single file\n *\/\n fn print_file(&self, Rc<RefCell<File>>);\n\n \/*\n * Print a list of files\n *\/\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n for file in files {\n self.print_file(file);\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n info!(\"{}\", f(file).join(\" \"));\n }\n\n fn print_files_custom<F, I>(&self, files: I, f: &F)\n where I: Iterator<Item = Rc<RefCell<File>>>,\n F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n for file in files {\n self.print_file_custom(file, f);\n }\n }\n\n}\n\nstruct DebugPrinter {\n debug: bool,\n}\n\nimpl FilePrinter for DebugPrinter {\n\n fn new(_: bool, debug: bool) -> DebugPrinter {\n DebugPrinter {\n debug: debug,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f);\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f(file).join(\" \"));\n }\n }\n\n}\n\nstruct SimplePrinter {\n verbose: bool,\n debug: bool,\n}\n\nimpl FilePrinter for SimplePrinter {\n\n fn new(verbose: bool, debug: bool) -> SimplePrinter {\n SimplePrinter {\n debug: debug,\n verbose: verbose,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"{:?}\", f);\n } else if self.verbose {\n info!(\"{}\", &*f.deref().borrow());\n } else {\n info!(\"[File]: {}\", f.deref().borrow().id());\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n let s = f(file).join(\" \");\n if self.debug {\n debug!(\"{:?}\", s);\n } else if self.verbose {\n info!(\"{}\", s);\n } else {\n info!(\"[File]: {}\", s);\n }\n }\n\n}\n\npub struct TablePrinter {\n verbose: bool,\n debug: bool,\n sp: SimplePrinter,\n}\n\nimpl FilePrinter for TablePrinter {\n\n fn new(verbose: bool, debug: bool) -> TablePrinter {\n TablePrinter {\n debug: debug,\n verbose: verbose,\n sp: SimplePrinter::new(verbose, debug),\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n self.sp.print_file(f);\n }\n\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"File#\", \"Owner\", \"ID\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.deref().borrow().owner_name())[..]);\n\n let id : String = file.deref().borrow().id().clone().into();\n let cell_id = Cell::new(&id[..]);\n let row = Row::new(vec![cell_i, cell_o, cell_id]);\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n fn print_files_custom<F, I>(&self, files: I, f: &F)\n where I: Iterator<Item = Rc<RefCell<File>>>,\n F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"#\", \"Module\", \"ID\", \"...\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.deref().borrow().owner_name())[..]);\n\n let id : String = file.deref().borrow().id().clone().into();\n let cell_id = Cell::new(&id[..]);\n\n let mut row = Row::new(vec![cell_i, cell_o, cell_id]);\n\n for cell in f(file).iter() {\n debug!(\"Adding custom cell: {:?}\", cell);\n row.add_cell(Cell::new(&cell[..]))\n }\n\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Uncomment warning line<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Don't panic on VSU reads<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Don’t store computed values of logical properties in style structs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add compile-fail test sync-cond-shouldnt-escape.rs<commit_after>\/\/ error-pattern: reference is not valid outside of its lifetime\nfn main() {\n let m = ~sync::new_mutex();\n let mut cond = none;\n do m.lock_cond |c| {\n cond = some(c);\n } \n option::unwrap(cond).signal();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Tests from the image crate to be sure we don’t break them<commit_after>#[macro_use] extern crate enum_primitive;\n\nmod gif {\n enum_from_primitive! {\n \/\/\/ Known block types\n enum Block {\n Image = 0x2C,\n Extension = 0x21,\n Trailer = 0x3B\n }\n }\n\n enum_from_primitive! {\n \/\/\/ Known GIF extensions\n enum Extension {\n Text = 0x01,\n Control = 0xF9,\n Comment = 0xFE,\n Application = 0xFF\n }\n }\n\n enum_from_primitive! {\n \/\/\/ Method to dispose the image\n enum DisposalMethod {\n Undefined = 0,\n None = 1,\n Previous = 2,\n Background = 3\n }\n }\n}\n\nmod png {\n enum_from_primitive! {\n #[derive(Clone, Copy, Debug, PartialEq)]\n enum InterlaceMethod {\n None = 0,\n Adam7 = 1\n }\n }\n\n enum_from_primitive! {\n #[derive(Debug)]\n pub enum FilterType {\n NoFilter = 0,\n Sub = 1,\n Up = 2,\n Avg = 3,\n Paeth = 4\n }\n }\n}\n\nmod tiff {\n enum_from_primitive! {\n #[derive(Clone, Copy, Debug, PartialEq)]\n enum PhotometricInterpretation {\n WhiteIsZero = 0,\n BlackIsZero = 1,\n RGB = 2,\n RGBPalette = 3,\n TransparencyMask = 4,\n CMYK = 5,\n YCbCr = 6,\n CIELab = 8,\n }\n }\n\n enum_from_primitive! {\n #[derive(Clone, Copy, Debug)]\n enum CompressionMethod {\n None = 1,\n Huffman = 2,\n Fax3 = 3,\n Fax4 = 4,\n LZW = 5,\n JPEG = 6,\n PackBits = 32773\n }\n }\n\n enum_from_primitive! {\n #[derive(Clone, Copy, Debug)]\n enum PlanarConfiguration {\n Chunky = 1,\n Planar = 2\n }\n }\n\n enum_from_primitive! {\n #[derive(Clone, Copy, Debug)]\n enum Predictor {\n None = 1,\n Horizontal = 2\n }\n }\n\n enum_from_primitive! {\n #[derive(Clone, Copy, Debug)]\n pub enum Type {\n BYTE = 1,\n ASCII = 2,\n SHORT = 3,\n LONG = 4,\n RATIONAL = 5,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Handle Empty Braces<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove allocations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a big random test.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add object display test<commit_after>extern crate bnf;\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn validate_display() {\n let input =\n \"<postal-address> ::= <name-part> <street-address> <zip-part>;\n\n <name-part> ::= <personal-part> <last-name> <opt-suffix-part> <EOL>\n | <personal-part> <name-part>;\n\n <personal-part> ::= <initial> \\\".\\\" | <first-name>;\n\n <street-address> ::= <house-num> <street-name> <opt-apt-num> <EOL>;\n\n <zip-part> ::= <town-name> \\\",\\\" <state-code> <ZIP-code> <EOL>;\n\n <opt-suffix-part> ::= \\\"Sr.\\\" | \\\"Jr.\\\" | <roman-numeral> | \\\"\\\";\n <opt-apt-num> ::= <apt-num> | \\\"\\\";\";\n\n let display_output = \n \"<postal-address> ::= <name-part> <street-address> <zip-part>;\\n\\\n <name-part> ::= <personal-part> <last-name> <opt-suffix-part> <EOL> | <personal-part> <name-part>;\\n\\\n <personal-part> ::= <initial> \\\".\\\" | <first-name>;\\n\\\n <street-address> ::= <house-num> <street-name> <opt-apt-num> <EOL>;\\n\\\n <zip-part> ::= <town-name> \\\",\\\" <state-code> <ZIP-code> <EOL>;\\n\\\n <opt-suffix-part> ::= \\\"Sr.\\\" | \\\"Jr.\\\" | <roman-numeral> | \\\"\\\";\\n\\\n <opt-apt-num> ::= <apt-num> | \\\"\\\";\\n\"; \n \n let grammar = bnf::parse(input); \n\n assert_eq!(grammar.to_string(), display_output);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add test case for host: `http:\/\/192.168.0.257`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Ignore \".d\" binaries in debug build<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>finish rust version<commit_after>use std::collections::{HashMap, HashSet};\n\npub fn min_reorder(n: i32, connections: Vec<Vec<i32>>) -> i32 {\n let mut result = 0;\n let mut set: HashSet<(i32, i32)> = connections.iter().map(|x| (x[0], x[1])).collect();\n let mut graph: HashMap<i32, Vec<i32>> = HashMap::new();\n\n for road in connections {\n graph.entry(road[0]).or_insert(vec![]).push(road[1]);\n graph.entry(road[1]).or_insert(vec![]).push(road[0]);\n }\n\n dfs(0, -1, &graph, &set, &mut result)\n}\n\nfn dfs(\n i: i32,\n p: i32,\n graph: &HashMap<i32, Vec<i32>>,\n set: &HashSet<(i32, i32)>,\n result: &mut i32,\n) -> i32 {\n match set.get(&(p, i)) {\n Some(_) => *result += 1,\n None => (),\n }\n\n for v in graph.get(&i).unwrap() {\n if *v == p {\n continue;\n }\n dfs(*v, i, graph, set, result);\n }\n\n *result\n}\n\nfn main() {\n assert_eq!(\n min_reorder(\n 6,\n vec![vec![0, 1], vec![1, 3], vec![2, 3], vec![4, 0], vec![4, 5]]\n ),\n 3\n );\n\n assert_eq!(\n min_reorder(5, vec![vec![1, 0], vec![1, 2], vec![3, 2], vec![3, 4]]),\n 2\n );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add: Move common functions in 'install' and 'uninstall' to ops::utils<commit_after>use std::collections::{BTreeMap, BTreeSet};\nuse std::env;\nuse std::io::prelude::*;\nuse std::io::SeekFrom;\nuse std::path::{Path, PathBuf};\n\nuse semver::VersionReq;\nuse serde::{Deserialize, Serialize};\n\nuse crate::core::package::PackageSet;\nuse crate::core::source::SourceMap;\nuse crate::core::PackageId;\nuse crate::core::{Dependency, Package, Source, SourceId};\nuse crate::sources::PathSource;\nuse crate::util::errors::{CargoResult, CargoResultExt};\nuse crate::util::{internal, Config, ToSemver};\nuse crate::util::{FileLock, Filesystem};\n\n#[derive(Deserialize, Serialize)]\n#[serde(untagged)]\npub enum CrateListing {\n V1(CrateListingV1),\n Empty(Empty),\n}\n\n#[derive(Deserialize, Serialize)]\n#[serde(deny_unknown_fields)]\npub struct Empty {}\n\n#[derive(Deserialize, Serialize)]\npub struct CrateListingV1 {\n pub v1: BTreeMap<PackageId, BTreeSet<String>>,\n}\n\npub fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult<Filesystem> {\n let config_root = config.get_path(\"install.root\")?;\n Ok(flag\n .map(PathBuf::from)\n .or_else(|| env::var_os(\"CARGO_INSTALL_ROOT\").map(PathBuf::from))\n .or_else(move || config_root.map(|v| v.val))\n .map(Filesystem::new)\n .unwrap_or_else(|| config.home().clone()))\n}\n\npub fn path_source<'a>(source_id: SourceId, config: &'a Config) -> CargoResult<PathSource<'a>> {\n let path = source_id\n .url()\n .to_file_path()\n .map_err(|()| failure::format_err!(\"path sources must have a valid path\"))?;\n Ok(PathSource::new(&path, source_id, config))\n}\n\npub fn select_pkg<'a, T>(\n mut source: T,\n name: Option<&str>,\n vers: Option<&str>,\n config: &Config,\n needs_update: bool,\n list_all: &mut dyn FnMut(&mut T) -> CargoResult<Vec<Package>>,\n) -> CargoResult<(Package, Box<dyn Source + 'a>)>\nwhere\n T: Source + 'a,\n{\n if needs_update {\n source.update()?;\n }\n\n match name {\n Some(name) => {\n let vers = match vers {\n Some(v) => {\n \/\/ If the version begins with character <, >, =, ^, ~ parse it as a\n \/\/ version range, otherwise parse it as a specific version\n let first = v.chars().nth(0).ok_or_else(|| {\n failure::format_err!(\"no version provided for the `--vers` flag\")\n })?;\n\n match first {\n '<' | '>' | '=' | '^' | '~' => match v.parse::<VersionReq>() {\n Ok(v) => Some(v.to_string()),\n Err(_) => failure::bail!(\n \"the `--vers` provided, `{}`, is \\\n not a valid semver version requirement\\n\\n\n Please have a look at \\\n http:\/\/doc.crates.io\/specifying-dependencies.html \\\n for the correct format\",\n v\n ),\n },\n _ => match v.to_semver() {\n Ok(v) => Some(format!(\"={}\", v)),\n Err(_) => {\n let mut msg = format!(\n \"\\\n the `--vers` provided, `{}`, is \\\n not a valid semver version\\n\\n\\\n historically Cargo treated this \\\n as a semver version requirement \\\n accidentally\\nand will continue \\\n to do so, but this behavior \\\n will be removed eventually\",\n v\n );\n\n \/\/ If it is not a valid version but it is a valid version\n \/\/ requirement, add a note to the warning\n if v.parse::<VersionReq>().is_ok() {\n msg.push_str(&format!(\n \"\\nif you want to specify semver range, \\\n add an explicit qualifier, like ^{}\",\n v\n ));\n }\n config.shell().warn(&msg)?;\n Some(v.to_string())\n }\n },\n }\n }\n None => None,\n };\n let vers = vers.as_ref().map(|s| &**s);\n let vers_spec = if vers.is_none() && source.source_id().is_registry() {\n \/\/ Avoid pre-release versions from crate.io\n \/\/ unless explicitly asked for\n Some(\"*\")\n } else {\n vers\n };\n let dep = Dependency::parse_no_deprecated(name, vers_spec, source.source_id())?;\n let deps = source.query_vec(&dep)?;\n let pkgid = match deps.iter().map(|p| p.package_id()).max() {\n Some(pkgid) => pkgid,\n None => {\n let vers_info = vers\n .map(|v| format!(\" with version `{}`\", v))\n .unwrap_or_default();\n failure::bail!(\n \"could not find `{}` in {}{}\",\n name,\n source.source_id(),\n vers_info\n )\n }\n };\n\n let pkg = {\n let mut map = SourceMap::new();\n map.insert(Box::new(&mut source));\n PackageSet::new(&[pkgid], map, config)?\n .get_one(pkgid)?\n .clone()\n };\n Ok((pkg, Box::new(source)))\n }\n None => {\n let candidates = list_all(&mut source)?;\n let binaries = candidates\n .iter()\n .filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0);\n let examples = candidates\n .iter()\n .filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0);\n let pkg = match one(binaries, |v| multi_err(\"binaries\", v))? {\n Some(p) => p,\n None => match one(examples, |v| multi_err(\"examples\", v))? {\n Some(p) => p,\n None => failure::bail!(\n \"no packages found with binaries or \\\n examples\"\n ),\n },\n };\n return Ok((pkg.clone(), Box::new(source)));\n\n fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String {\n pkgs.sort_unstable_by_key(|a| a.name());\n format!(\n \"multiple packages with {} found: {}\",\n kind,\n pkgs.iter()\n .map(|p| p.name().as_str())\n .collect::<Vec<_>>()\n .join(\", \")\n )\n }\n }\n }\n}\n\npub fn one<I, F>(mut i: I, f: F) -> CargoResult<Option<I::Item>>\nwhere\n I: Iterator,\n F: FnOnce(Vec<I::Item>) -> String,\n{\n match (i.next(), i.next()) {\n (Some(i1), Some(i2)) => {\n let mut v = vec![i1, i2];\n v.extend(i);\n Err(failure::format_err!(\"{}\", f(v)))\n }\n (Some(i), None) => Ok(Some(i)),\n (None, _) => Ok(None),\n }\n}\n\npub fn read_crate_list(file: &FileLock) -> CargoResult<CrateListingV1> {\n let listing = (|| -> CargoResult<_> {\n let mut contents = String::new();\n file.file().read_to_string(&mut contents)?;\n let listing =\n toml::from_str(&contents).chain_err(|| internal(\"invalid TOML found for metadata\"))?;\n match listing {\n CrateListing::V1(v1) => Ok(v1),\n CrateListing::Empty(_) => Ok(CrateListingV1 {\n v1: BTreeMap::new(),\n }),\n }\n })()\n .chain_err(|| {\n failure::format_err!(\n \"failed to parse crate metadata at `{}`\",\n file.path().to_string_lossy()\n )\n })?;\n Ok(listing)\n}\n\npub fn write_crate_list(file: &FileLock, listing: CrateListingV1) -> CargoResult<()> {\n (|| -> CargoResult<_> {\n let mut file = file.file();\n file.seek(SeekFrom::Start(0))?;\n file.set_len(0)?;\n let data = toml::to_string(&CrateListing::V1(listing))?;\n file.write_all(data.as_bytes())?;\n Ok(())\n })()\n .chain_err(|| {\n failure::format_err!(\n \"failed to write crate metadata at `{}`\",\n file.path().to_string_lossy()\n )\n })?;\n Ok(())\n}\n\npub fn metadata(config: &Config, root: &Filesystem) -> CargoResult<FileLock> {\n root.open_rw(Path::new(\".crates.toml\"), config, \"crate metadata\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>adding default_handler.rs<commit_after>use bt_messages::Message;\n\/\/use bt_messages::Message::{Choke, Unchoke};\nuse buffered_reader::BufferedReader;\nuse std::net::TcpStream;\nuse std::sync::mpsc::Sender;\n\npub struct DefaultHandler;\n\n\/\/\/ Handles messages. This is a cheap way to force reactive style\npub trait Handler {\n type MessageType;\n fn handle(&mut self, message: Self::MessageType, peer: &mut Peer) -> Action;\n}\n\nimpl DefaultHandler {\n pub fn new () -> DefaultHandler {\n DefaultHandler\n }\n}\n\npub enum Action {\n None,\n Respond(Vec<u8>)\n}\n\n\/\/\/ The default algorithm\nimpl Handler for DefaultHandler {\n type MessageType = Message;\n #[inline]\n fn handle (&mut self, message: Message, peer: &mut Peer) -> Action {\n println!(\"{:?}\", message);\n match message {\n Message::Choke => {\n peer.set_choked(true);\n Action::None\n },\n Message::Unchoke => {\n peer.set_choked(false);\n Action::None\n },\n _ => {\n Action::None\n }\n }\n }\n}\n\npub struct Peer {\n pub id: String,\n pub chan: Sender<Action>,\n stream: TcpStream,\n state: State\n}\n\n#[derive(Debug)]\nstruct State {\n choked: bool,\n \/\/the intention is that eventually we will support growable files. so going with vector\n bitfield: Vec<u8>\n}\n\nimpl Peer {\n pub fn new (id:String, chan: Sender<Action>, stream: TcpStream) -> Peer {\n Peer {\n id: id,\n chan: chan,\n stream: stream,\n state: State {\n choked: true,\n bitfield: Vec::new()\n }\n }\n }\n\n fn set_choked (&mut self, choked: bool) {\n self.state.choked = choked;\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::collections::VecDeque;\nuse std::env::{set_current_dir, current_dir, home_dir};\nuse std::path::PathBuf;\nuse super::status::{SUCCESS, FAILURE};\n\npub struct DirectoryStack {\n dirs: VecDeque<PathBuf>, \/\/ The top is always the current directory\n max_size: usize,\n}\n\nimpl DirectoryStack {\n pub fn new() -> Result<DirectoryStack, &'static str> {\n let mut dirs: VecDeque<PathBuf> = VecDeque::new();\n if let Ok(curr_dir) = current_dir() {\n dirs.push_front(curr_dir);\n Ok(DirectoryStack {\n dirs: dirs,\n max_size: 1000, \/\/ TODO don't hardcode this size, make it configurable\n })\n } else {\n Err(\"Failed to get current directory when building directory stack\")\n }\n }\n\n pub fn popd<I: IntoIterator>(&mut self, _: I) -> i32\n where I::Item: AsRef<str>\n {\n if self.dirs.len() < 2 {\n println!(\"Directory stack is empty\");\n return FAILURE;\n }\n if let Some(dir) = self.dirs.get(self.dirs.len() - 2) {\n if let Err(err) = set_current_dir(dir) {\n println!(\"{}: Failed to switch to directory {}\", err, dir.display());\n return FAILURE;\n }\n }\n self.dirs.pop_back();\n self.print_dirs();\n SUCCESS\n }\n\n pub fn pushd<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n if let Some(dir) = args.into_iter().nth(1) {\n let result = self.change_and_push_dir(dir.as_ref());\n self.print_dirs();\n result\n } else {\n println!(\"No directory provided\");\n FAILURE\n }\n }\n\n pub fn cd<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n if let Some(dir) = args.into_iter().nth(1) {\n let dir = dir.as_ref();\n self.change_and_push_dir(dir)\n } else {\n if let Some(home) = home_dir() {\n if let Some(home) = home.to_str() {\n self.change_and_push_dir(home)\n } else {\n println!(\"Failed to convert home directory to str\");\n FAILURE\n }\n } else {\n println!(\"Failed to get home directory\");\n FAILURE\n }\n }\n }\n\n pub fn change_and_push_dir(&mut self, dir: &str) -> i32\n {\n match (set_current_dir(dir), current_dir()) {\n (Ok(()), Ok(cur_dir)) => {\n self.push_dir(cur_dir);\n SUCCESS\n }\n (Err(err), _) => {\n println!(\"Failed to set current dir to {}: {}\", dir, err);\n FAILURE\n }\n (_, _) => FAILURE \/\/ This should not happen\n }\n }\n\n fn push_dir(&mut self, path: PathBuf) {\n self.dirs.push_front(path);\n self.dirs.truncate(self.max_size);\n }\n\n pub fn dirs<I: IntoIterator>(&self, _: I) -> i32\n where I::Item: AsRef<str>\n {\n self.print_dirs();\n SUCCESS\n }\n\n fn print_dirs(&self) {\n let dir = self.dirs.iter().fold(String::new(), |acc, dir| {\n acc + \" \" + dir.to_str().unwrap_or(\"No directory found\")\n });\n println!(\"{}\", dir.trim_left());\n }\n}\n<commit_msg>Add `cd -` and fix bug in `popd`<commit_after>use std::collections::VecDeque;\nuse std::env::{set_current_dir, current_dir, home_dir};\nuse std::path::PathBuf;\nuse super::status::{SUCCESS, FAILURE};\n\npub struct DirectoryStack {\n dirs: VecDeque<PathBuf>, \/\/ The top is always the current directory\n max_size: usize,\n}\n\nimpl DirectoryStack {\n pub fn new() -> Result<DirectoryStack, &'static str> {\n let mut dirs: VecDeque<PathBuf> = VecDeque::new();\n if let Ok(curr_dir) = current_dir() {\n dirs.push_front(curr_dir);\n Ok(DirectoryStack {\n dirs: dirs,\n max_size: 1000, \/\/ TODO don't hardcode this size, make it configurable\n })\n } else {\n Err(\"Failed to get current directory when building directory stack\")\n }\n }\n\n pub fn popd<I: IntoIterator>(&mut self, _: I) -> i32\n where I::Item: AsRef<str>\n {\n if let Some(dir) = self.get_previous_dir() {\n if let Err(err) = set_current_dir(dir) {\n println!(\"{}: Failed to switch to directory {}\", err, dir.display());\n return FAILURE;\n }\n } else {\n println!(\"Directory stack is empty\");\n return FAILURE;\n }\n self.dirs.pop_front();\n self.print_dirs();\n SUCCESS\n }\n\n pub fn pushd<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n if let Some(dir) = args.into_iter().nth(1) {\n let result = self.change_and_push_dir(dir.as_ref());\n self.print_dirs();\n result\n } else {\n println!(\"No directory provided\");\n FAILURE\n }\n }\n\n pub fn cd<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n if let Some(dir) = args.into_iter().nth(1) {\n let dir = dir.as_ref();\n if dir == \"-\" {\n self.switch_to_previous_directory()\n } else {\n self.change_and_push_dir(dir)\n }\n } else {\n self.switch_to_home_directory()\n }\n }\n\n fn switch_to_home_directory(&mut self) -> i32 {\n if let Some(home) = home_dir() {\n if let Some(home) = home.to_str() {\n self.change_and_push_dir(home)\n } else {\n println!(\"Failed to convert home directory to str\");\n FAILURE\n }\n } else {\n println!(\"Failed to get home directory\");\n FAILURE\n }\n }\n\n fn switch_to_previous_directory(&mut self) -> i32 {\n if let Some(prev) = self.get_previous_dir()\n .map(|path| path.to_string_lossy().to_string()) {\n println!(\"{}\", prev);\n self.change_and_push_dir(&prev)\n } else {\n println!(\"No previous directory to switch to\");\n FAILURE\n }\n }\n\n fn get_previous_dir(&self) -> Option<&PathBuf> {\n if self.dirs.len() < 2 {\n None\n } else {\n self.dirs.get(1)\n }\n }\n\n pub fn change_and_push_dir(&mut self, dir: &str) -> i32 {\n match (set_current_dir(dir), current_dir()) {\n (Ok(()), Ok(cur_dir)) => {\n self.push_dir(cur_dir);\n SUCCESS\n }\n (Err(err), _) => {\n println!(\"Failed to set current dir to {}: {}\", dir, err);\n FAILURE\n }\n (_, _) => FAILURE \/\/ This should not happen\n }\n }\n\n fn push_dir(&mut self, path: PathBuf) {\n self.dirs.push_front(path);\n self.dirs.truncate(self.max_size);\n }\n\n pub fn dirs<I: IntoIterator>(&self, _: I) -> i32\n where I::Item: AsRef<str>\n {\n self.print_dirs();\n SUCCESS\n }\n\n fn print_dirs(&self) {\n let dir = self.dirs.iter().fold(String::new(), |acc, dir| {\n acc + \" \" + dir.to_str().unwrap_or(\"No directory found\")\n });\n println!(\"{}\", dir.trim_left());\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#[macro_export]\nmacro_rules! generate_error_imports {\n () => {\n use std::error::Error;\n use std::fmt::Error as FmtError;\n use std::fmt::{Display, Formatter};\n\n use $crate::into::IntoError;\n }\n}\n\n#[macro_export]\nmacro_rules! generate_error_module {\n ( $exprs:item ) => {\n pub mod error {\n generate_error_imports!();\n $exprs\n }\n }\n}\n\n#[macro_export]\nmacro_rules! generate_custom_error_types {\n {\n $name: ident,\n $kindname: ident,\n $customMemberTypeName: ident,\n $($kind:ident => $string:expr),*\n } => {\n #[derive(Clone, Copy, Debug, PartialEq)]\n pub enum $kindname {\n $( $kind ),*\n }\n\n impl Display for $kindname {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n let s = match *self {\n $( $kindname::$kind => $string ),*\n };\n try!(write!(fmt, \"{}\", s));\n Ok(())\n }\n\n }\n\n impl IntoError for $kindname {\n type Target = $name;\n\n fn into_error(self) -> Self::Target {\n $name::new(self, None)\n }\n\n fn into_error_with_cause(self, cause: Box<Error>) -> Self::Target {\n $name::new(self, Some(cause))\n }\n\n }\n\n #[derive(Debug)]\n pub struct $name {\n err_type: $kindname,\n cause: Option<Box<Error>>,\n custom_data: Option<$customMemberTypeName>,\n }\n\n impl $name {\n\n pub fn new(errtype: $kindname, cause: Option<Box<Error>>) -> $name {\n $name {\n err_type: errtype,\n cause: cause,\n custom_data: None,\n }\n }\n\n pub fn err_type(&self) -> $kindname {\n self.err_type\n }\n\n pub fn with_custom_data(mut self, custom: $customMemberTypeName) -> $name {\n self.custom_data = Some(custom);\n self\n }\n\n }\n\n impl Into<$name> for $kindname {\n\n fn into(self) -> $name {\n $name::new(self, None)\n }\n\n }\n\n impl Display for $name {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"[{}]\", self.err_type));\n Ok(())\n }\n\n }\n\n impl Error for $name {\n\n fn description(&self) -> &str {\n match self.err_type {\n $( $kindname::$kind => $string ),*\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n }\n\n }\n}\n\n#[macro_export]\nmacro_rules! generate_result_helper {\n (\n $name: ident,\n $kindname: ident\n ) => {\n \/\/\/ Trait to replace\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ foo.map_err(Box::new).map_err(|e| SomeType::SomeErrorKind.into_error_with_cause(e))\n \/\/\/ \/\/ or:\n \/\/\/ foo.map_err(|e| SomeType::SomeErrorKind.into_error_with_cause(Box::new(e)))\n \/\/\/ ```\n \/\/\/\n \/\/\/ with much nicer\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ foo.map_err_into(SomeType::SomeErrorKind)\n \/\/\/ ```\n \/\/\/\n pub trait MapErrInto<T> {\n fn map_err_into(self, error_kind: $kindname) -> Result<T, $name>;\n }\n\n impl<T, E: Error + 'static> MapErrInto<T> for Result<T, E> {\n\n fn map_err_into(self, error_kind: $kindname) -> Result<T, $name> {\n self.map_err(Box::new)\n .map_err(|e| error_kind.into_error_with_cause(e))\n }\n\n }\n }\n}\n\n#[macro_export]\nmacro_rules! generate_option_helper {\n (\n $name: ident,\n $kindname: ident\n ) => {\n \/\/\/ Trait to replace\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ foo.ok_or(SomeType::SomeErrorKind.into_error())\n \/\/\/ ```\n \/\/\/\n \/\/\/ with\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ foo.ok_or_errkind(SomeType::SomeErrorKind)\n \/\/\/ ```\n pub trait OkOrErr<T> {\n fn ok_or_errkind(self, kind: $kindname) -> Result<T, $name>;\n }\n\n impl<T> OkOrErr<T> for Option<T> {\n\n fn ok_or_errkind(self, kind: $kindname) -> Result<T, $name> {\n self.ok_or(kind.into_error())\n }\n\n }\n }\n}\n\n#[macro_export]\nmacro_rules! generate_error_types {\n (\n $name: ident,\n $kindname: ident,\n $($kind:ident => $string:expr),*\n ) => {\n #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Copy)]\n pub struct SomeNotExistingTypeWithATypeNameNoOneWillEverChoose {}\n generate_custom_error_types!($name, $kindname,\n SomeNotExistingTypeWithATypeNameNoOneWillEverChoose,\n $($kind => $string),*);\n\n generate_result_helper!($name, $kindname);\n generate_option_helper!($name, $kindname);\n }\n}\n\n\n#[cfg(test)]\nmod test {\n\n generate_error_module!(\n generate_error_types!(TestError, TestErrorKind,\n TestErrorKindA => \"testerrorkind a\",\n TestErrorKindB => \"testerrorkind B\");\n );\n\n #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Copy)]\n pub struct CustomData {\n pub test: i32,\n pub othr: i64,\n }\n\n generate_error_imports!();\n\n generate_custom_error_types!(CustomTestError, CustomTestErrorKind,\n CustomData,\n CustomErrorKindA => \"customerrorkind a\",\n CustomErrorKindB => \"customerrorkind B\");\n\n impl CustomTestError {\n pub fn test(&self) -> i32 {\n match self.custom_data {\n Some(t) => t.test,\n None => 0,\n }\n }\n\n pub fn bar(&self) -> i64 {\n match self.custom_data {\n Some(t) => t.othr,\n None => 0,\n }\n }\n }\n\n\n #[test]\n fn test_a() {\n use self::error::{TestError, TestErrorKind};\n\n let kind = TestErrorKind::TestErrorKindA;\n assert_eq!(String::from(\"testerrorkind a\"), format!(\"{}\", kind));\n\n let e = TestError::new(kind, None);\n assert_eq!(String::from(\"[testerrorkind a]\"), format!(\"{}\", e));\n }\n\n #[test]\n fn test_b() {\n use self::error::{TestError, TestErrorKind};\n\n let kind = TestErrorKind::TestErrorKindB;\n assert_eq!(String::from(\"testerrorkind B\"), format!(\"{}\", kind));\n\n let e = TestError::new(kind, None);\n assert_eq!(String::from(\"[testerrorkind B]\"), format!(\"{}\", e));\n\n }\n\n #[test]\n fn test_ab() {\n use std::error::Error;\n use self::error::{TestError, TestErrorKind};\n\n let kinda = TestErrorKind::TestErrorKindA;\n let kindb = TestErrorKind::TestErrorKindB;\n assert_eq!(String::from(\"testerrorkind a\"), format!(\"{}\", kinda));\n assert_eq!(String::from(\"testerrorkind B\"), format!(\"{}\", kindb));\n\n let e = TestError::new(kinda, Some(Box::new(TestError::new(kindb, None))));\n assert_eq!(String::from(\"[testerrorkind a]\"), format!(\"{}\", e));\n assert_eq!(TestErrorKind::TestErrorKindA, e.err_type());\n assert_eq!(String::from(\"[testerrorkind B]\"), format!(\"{}\", e.cause().unwrap()));\n }\n\n pub mod anothererrormod {\n generate_error_imports!();\n generate_error_types!(TestError, TestErrorKind,\n TestErrorKindA => \"testerrorkind a\",\n TestErrorKindB => \"testerrorkind B\");\n }\n\n #[test]\n fn test_other_a() {\n use self::anothererrormod::{TestError, TestErrorKind};\n\n let kind = TestErrorKind::TestErrorKindA;\n assert_eq!(String::from(\"testerrorkind a\"), format!(\"{}\", kind));\n\n let e = TestError::new(kind, None);\n assert_eq!(String::from(\"[testerrorkind a]\"), format!(\"{}\", e));\n }\n\n #[test]\n fn test_other_b() {\n use self::anothererrormod::{TestError, TestErrorKind};\n\n let kind = TestErrorKind::TestErrorKindB;\n assert_eq!(String::from(\"testerrorkind B\"), format!(\"{}\", kind));\n\n let e = TestError::new(kind, None);\n assert_eq!(String::from(\"[testerrorkind B]\"), format!(\"{}\", e));\n\n }\n\n #[test]\n fn test_other_ab() {\n use std::error::Error;\n use self::anothererrormod::{TestError, TestErrorKind};\n\n let kinda = TestErrorKind::TestErrorKindA;\n let kindb = TestErrorKind::TestErrorKindB;\n assert_eq!(String::from(\"testerrorkind a\"), format!(\"{}\", kinda));\n assert_eq!(String::from(\"testerrorkind B\"), format!(\"{}\", kindb));\n\n let e = TestError::new(kinda, Some(Box::new(TestError::new(kindb, None))));\n assert_eq!(String::from(\"[testerrorkind a]\"), format!(\"{}\", e));\n assert_eq!(TestErrorKind::TestErrorKindA, e.err_type());\n assert_eq!(String::from(\"[testerrorkind B]\"), format!(\"{}\", e.cause().unwrap()));\n }\n\n #[test]\n fn test_error_kind_mapping() {\n use std::io::{Error, ErrorKind};\n use self::error::{OkOrErr, MapErrInto};\n use self::error::TestErrorKind;\n\n let err : Result<(), _> = Err(Error::new(ErrorKind::Other, \"\"));\n let err : Result<(), _> = err.map_err_into(TestErrorKind::TestErrorKindA);\n\n assert!(err.is_err());\n let err = err.unwrap_err();\n\n match err.err_type() {\n TestErrorKind::TestErrorKindA => assert!(true),\n _ => assert!(false),\n }\n }\n\n #[test]\n fn test_error_kind_double_mapping() {\n use std::io::{Error, ErrorKind};\n use self::error::{OkOrErr, MapErrInto};\n use self::error::TestErrorKind;\n\n let err : Result<(), _> = Err(Error::new(ErrorKind::Other, \"\"));\n let err : Result<(), _> = err.map_err_into(TestErrorKind::TestErrorKindA)\n .map_err_into(TestErrorKind::TestErrorKindB);\n\n assert!(err.is_err());\n let err = err.unwrap_err();\n match err.err_type() {\n TestErrorKind::TestErrorKindB => assert!(true),\n _ => assert!(false),\n }\n\n \/\/ not sure how to test that the inner error is of TestErrorKindA, actually...\n match err.cause() {\n Some(_) => assert!(true),\n None => assert!(false),\n }\n\n }\n\n #[test]\n fn test_error_option_good() {\n use self::error::{OkOrErr, MapErrInto};\n use self::error::TestErrorKind;\n\n let something = Some(1);\n match something.ok_or_errkind(TestErrorKind::TestErrorKindA) {\n Ok(1) => assert!(true),\n _ => assert!(false),\n }\n }\n\n #[test]\n fn test_error_option_bad() {\n use self::error::{OkOrErr, MapErrInto};\n use self::error::TestErrorKind;\n\n let something : Option<i32> = None;\n match something.ok_or_errkind(TestErrorKind::TestErrorKindA) {\n Ok(_) => assert!(false),\n Err(e) => assert!(true),\n }\n }\n\n}\n<commit_msg>libimagerror: Eliminate unused imports<commit_after>#[macro_export]\nmacro_rules! generate_error_imports {\n () => {\n use std::error::Error;\n use std::fmt::Error as FmtError;\n use std::fmt::{Display, Formatter};\n\n use $crate::into::IntoError;\n }\n}\n\n#[macro_export]\nmacro_rules! generate_error_module {\n ( $exprs:item ) => {\n pub mod error {\n generate_error_imports!();\n $exprs\n }\n }\n}\n\n#[macro_export]\nmacro_rules! generate_custom_error_types {\n {\n $name: ident,\n $kindname: ident,\n $customMemberTypeName: ident,\n $($kind:ident => $string:expr),*\n } => {\n #[derive(Clone, Copy, Debug, PartialEq)]\n pub enum $kindname {\n $( $kind ),*\n }\n\n impl Display for $kindname {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n let s = match *self {\n $( $kindname::$kind => $string ),*\n };\n try!(write!(fmt, \"{}\", s));\n Ok(())\n }\n\n }\n\n impl IntoError for $kindname {\n type Target = $name;\n\n fn into_error(self) -> Self::Target {\n $name::new(self, None)\n }\n\n fn into_error_with_cause(self, cause: Box<Error>) -> Self::Target {\n $name::new(self, Some(cause))\n }\n\n }\n\n #[derive(Debug)]\n pub struct $name {\n err_type: $kindname,\n cause: Option<Box<Error>>,\n custom_data: Option<$customMemberTypeName>,\n }\n\n impl $name {\n\n pub fn new(errtype: $kindname, cause: Option<Box<Error>>) -> $name {\n $name {\n err_type: errtype,\n cause: cause,\n custom_data: None,\n }\n }\n\n pub fn err_type(&self) -> $kindname {\n self.err_type\n }\n\n pub fn with_custom_data(mut self, custom: $customMemberTypeName) -> $name {\n self.custom_data = Some(custom);\n self\n }\n\n }\n\n impl Into<$name> for $kindname {\n\n fn into(self) -> $name {\n $name::new(self, None)\n }\n\n }\n\n impl Display for $name {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"[{}]\", self.err_type));\n Ok(())\n }\n\n }\n\n impl Error for $name {\n\n fn description(&self) -> &str {\n match self.err_type {\n $( $kindname::$kind => $string ),*\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n }\n\n }\n}\n\n#[macro_export]\nmacro_rules! generate_result_helper {\n (\n $name: ident,\n $kindname: ident\n ) => {\n \/\/\/ Trait to replace\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ foo.map_err(Box::new).map_err(|e| SomeType::SomeErrorKind.into_error_with_cause(e))\n \/\/\/ \/\/ or:\n \/\/\/ foo.map_err(|e| SomeType::SomeErrorKind.into_error_with_cause(Box::new(e)))\n \/\/\/ ```\n \/\/\/\n \/\/\/ with much nicer\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ foo.map_err_into(SomeType::SomeErrorKind)\n \/\/\/ ```\n \/\/\/\n pub trait MapErrInto<T> {\n fn map_err_into(self, error_kind: $kindname) -> Result<T, $name>;\n }\n\n impl<T, E: Error + 'static> MapErrInto<T> for Result<T, E> {\n\n fn map_err_into(self, error_kind: $kindname) -> Result<T, $name> {\n self.map_err(Box::new)\n .map_err(|e| error_kind.into_error_with_cause(e))\n }\n\n }\n }\n}\n\n#[macro_export]\nmacro_rules! generate_option_helper {\n (\n $name: ident,\n $kindname: ident\n ) => {\n \/\/\/ Trait to replace\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ foo.ok_or(SomeType::SomeErrorKind.into_error())\n \/\/\/ ```\n \/\/\/\n \/\/\/ with\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ foo.ok_or_errkind(SomeType::SomeErrorKind)\n \/\/\/ ```\n pub trait OkOrErr<T> {\n fn ok_or_errkind(self, kind: $kindname) -> Result<T, $name>;\n }\n\n impl<T> OkOrErr<T> for Option<T> {\n\n fn ok_or_errkind(self, kind: $kindname) -> Result<T, $name> {\n self.ok_or(kind.into_error())\n }\n\n }\n }\n}\n\n#[macro_export]\nmacro_rules! generate_error_types {\n (\n $name: ident,\n $kindname: ident,\n $($kind:ident => $string:expr),*\n ) => {\n #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Copy)]\n pub struct SomeNotExistingTypeWithATypeNameNoOneWillEverChoose {}\n generate_custom_error_types!($name, $kindname,\n SomeNotExistingTypeWithATypeNameNoOneWillEverChoose,\n $($kind => $string),*);\n\n generate_result_helper!($name, $kindname);\n generate_option_helper!($name, $kindname);\n }\n}\n\n\n#[cfg(test)]\nmod test {\n\n generate_error_module!(\n generate_error_types!(TestError, TestErrorKind,\n TestErrorKindA => \"testerrorkind a\",\n TestErrorKindB => \"testerrorkind B\");\n );\n\n #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Copy)]\n pub struct CustomData {\n pub test: i32,\n pub othr: i64,\n }\n\n generate_error_imports!();\n\n generate_custom_error_types!(CustomTestError, CustomTestErrorKind,\n CustomData,\n CustomErrorKindA => \"customerrorkind a\",\n CustomErrorKindB => \"customerrorkind B\");\n\n impl CustomTestError {\n pub fn test(&self) -> i32 {\n match self.custom_data {\n Some(t) => t.test,\n None => 0,\n }\n }\n\n pub fn bar(&self) -> i64 {\n match self.custom_data {\n Some(t) => t.othr,\n None => 0,\n }\n }\n }\n\n\n #[test]\n fn test_a() {\n use self::error::{TestError, TestErrorKind};\n\n let kind = TestErrorKind::TestErrorKindA;\n assert_eq!(String::from(\"testerrorkind a\"), format!(\"{}\", kind));\n\n let e = TestError::new(kind, None);\n assert_eq!(String::from(\"[testerrorkind a]\"), format!(\"{}\", e));\n }\n\n #[test]\n fn test_b() {\n use self::error::{TestError, TestErrorKind};\n\n let kind = TestErrorKind::TestErrorKindB;\n assert_eq!(String::from(\"testerrorkind B\"), format!(\"{}\", kind));\n\n let e = TestError::new(kind, None);\n assert_eq!(String::from(\"[testerrorkind B]\"), format!(\"{}\", e));\n\n }\n\n #[test]\n fn test_ab() {\n use std::error::Error;\n use self::error::{TestError, TestErrorKind};\n\n let kinda = TestErrorKind::TestErrorKindA;\n let kindb = TestErrorKind::TestErrorKindB;\n assert_eq!(String::from(\"testerrorkind a\"), format!(\"{}\", kinda));\n assert_eq!(String::from(\"testerrorkind B\"), format!(\"{}\", kindb));\n\n let e = TestError::new(kinda, Some(Box::new(TestError::new(kindb, None))));\n assert_eq!(String::from(\"[testerrorkind a]\"), format!(\"{}\", e));\n assert_eq!(TestErrorKind::TestErrorKindA, e.err_type());\n assert_eq!(String::from(\"[testerrorkind B]\"), format!(\"{}\", e.cause().unwrap()));\n }\n\n pub mod anothererrormod {\n generate_error_imports!();\n generate_error_types!(TestError, TestErrorKind,\n TestErrorKindA => \"testerrorkind a\",\n TestErrorKindB => \"testerrorkind B\");\n }\n\n #[test]\n fn test_other_a() {\n use self::anothererrormod::{TestError, TestErrorKind};\n\n let kind = TestErrorKind::TestErrorKindA;\n assert_eq!(String::from(\"testerrorkind a\"), format!(\"{}\", kind));\n\n let e = TestError::new(kind, None);\n assert_eq!(String::from(\"[testerrorkind a]\"), format!(\"{}\", e));\n }\n\n #[test]\n fn test_other_b() {\n use self::anothererrormod::{TestError, TestErrorKind};\n\n let kind = TestErrorKind::TestErrorKindB;\n assert_eq!(String::from(\"testerrorkind B\"), format!(\"{}\", kind));\n\n let e = TestError::new(kind, None);\n assert_eq!(String::from(\"[testerrorkind B]\"), format!(\"{}\", e));\n\n }\n\n #[test]\n fn test_other_ab() {\n use std::error::Error;\n use self::anothererrormod::{TestError, TestErrorKind};\n\n let kinda = TestErrorKind::TestErrorKindA;\n let kindb = TestErrorKind::TestErrorKindB;\n assert_eq!(String::from(\"testerrorkind a\"), format!(\"{}\", kinda));\n assert_eq!(String::from(\"testerrorkind B\"), format!(\"{}\", kindb));\n\n let e = TestError::new(kinda, Some(Box::new(TestError::new(kindb, None))));\n assert_eq!(String::from(\"[testerrorkind a]\"), format!(\"{}\", e));\n assert_eq!(TestErrorKind::TestErrorKindA, e.err_type());\n assert_eq!(String::from(\"[testerrorkind B]\"), format!(\"{}\", e.cause().unwrap()));\n }\n\n #[test]\n fn test_error_kind_mapping() {\n use std::io::{Error, ErrorKind};\n use self::error::MapErrInto;\n use self::error::TestErrorKind;\n\n let err : Result<(), _> = Err(Error::new(ErrorKind::Other, \"\"));\n let err : Result<(), _> = err.map_err_into(TestErrorKind::TestErrorKindA);\n\n assert!(err.is_err());\n let err = err.unwrap_err();\n\n match err.err_type() {\n TestErrorKind::TestErrorKindA => assert!(true),\n _ => assert!(false),\n }\n }\n\n #[test]\n fn test_error_kind_double_mapping() {\n use std::io::{Error, ErrorKind};\n use self::error::MapErrInto;\n use self::error::TestErrorKind;\n\n let err : Result<(), _> = Err(Error::new(ErrorKind::Other, \"\"));\n let err : Result<(), _> = err.map_err_into(TestErrorKind::TestErrorKindA)\n .map_err_into(TestErrorKind::TestErrorKindB);\n\n assert!(err.is_err());\n let err = err.unwrap_err();\n match err.err_type() {\n TestErrorKind::TestErrorKindB => assert!(true),\n _ => assert!(false),\n }\n\n \/\/ not sure how to test that the inner error is of TestErrorKindA, actually...\n match err.cause() {\n Some(_) => assert!(true),\n None => assert!(false),\n }\n\n }\n\n #[test]\n fn test_error_option_good() {\n use self::error::OkOrErr;\n use self::error::TestErrorKind;\n\n let something = Some(1);\n match something.ok_or_errkind(TestErrorKind::TestErrorKindA) {\n Ok(1) => assert!(true),\n _ => assert!(false),\n }\n }\n\n #[test]\n fn test_error_option_bad() {\n use self::error::OkOrErr;\n use self::error::TestErrorKind;\n\n let something : Option<i32> = None;\n match something.ok_or_errkind(TestErrorKind::TestErrorKindA) {\n Ok(_) => assert!(false),\n Err(e) => assert!(true),\n }\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore: associated type within trait<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>v0.3.3.5 - Testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Move back to a single-type model with an SSL field that is only present when the secure feature flag is used<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_name = \"piston\"]\n#![deny(missing_docs)]\n#![warn(dead_code)]\n#![feature(default_type_params)]\n#![feature(globs)]\n\n\/\/! A user friendly game engine written in Rust.\n\n#[cfg(feature = \"include_gfx\")]\nextern crate gfx;\n#[cfg(feature = \"include_gfx\")]\nextern crate gfx_graphics;\nextern crate opengl_graphics;\nextern crate sdl2;\nextern crate sdl2_window;\n\n\/\/ Crates used to reexport.\nextern crate \"ai_behavior\" as ai_behavior_lib;\nextern crate \"vecmath\" as vecmath_lib;\nextern crate \"shader_version\" as shader_version_lib;\nextern crate \"image\" as image_lib;\nextern crate \"graphics\" as graphics_lib;\nextern crate \"input\" as input_lib;\nextern crate \"event\" as event_lib;\nextern crate \"window\" as window_lib;\nextern crate \"cam\" as cam_lib;\nextern crate \"current\" as current_lib;\nextern crate \"quack\" as quack_lib;\nextern crate \"fps_counter\" as fps_counter_lib;\nextern crate \"drag_controller\" as drag_controller_lib;\nextern crate \"read_color\" as read_color_lib;\nextern crate \"select_color\" as select_color_lib;\n\n\/\/ Reexports.\npub use current_lib as current;\npub use quack_lib as quack;\npub use ai_behavior_lib as ai_behavior;\npub use shader_version_lib as shader_version;\npub use image_lib as image;\npub use graphics_lib as graphics;\npub use vecmath_lib as vecmath;\npub use input_lib as input;\npub use event_lib as event;\npub use window_lib as window;\npub use cam_lib as cam;\npub use fps_counter_lib as fps_counter;\npub use drag_controller_lib as drag_controller;\n\npub use sdl2_window::Sdl2Window as WindowBackEnd;\npub use event::{\n Event,\n Events,\n NoWindow,\n RenderArgs,\n UpdateArgs,\n WindowSettings,\n};\n\npub use quack::{\n Action,\n ActOn,\n Get,\n GetFrom,\n Set,\n SetAt,\n};\npub use current::{\n Current,\n CurrentGuard,\n};\n\n#[cfg(feature = \"include_gfx\")]\nuse gfx_graphics::G2D;\n#[cfg(feature = \"include_gfx\")]\nuse gfx::{ DeviceHelper };\nuse opengl_graphics::Gl;\nuse fps_counter::FPSCounter;\n\nuse std::rc::Rc;\nuse std::cell::RefCell;\n\npub mod color {\n \/\/! Rexported libraries for working with colors\n pub use read_color_lib as read_color;\n pub use select_color_lib as select_color;\n}\n\nfn start_window(\n opengl: shader_version::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n let mut window = Rc::new(RefCell::new(WindowBackEnd::new(\n opengl,\n window_settings,\n )));\n let mut gl = Rc::new(RefCell::new(Gl::new(opengl)));\n let mut fps_counter = Rc::new(RefCell::new(FPSCounter::new()));\n\n let window_guard = CurrentGuard::new(&mut window);\n let gl_guard = CurrentGuard::new(&mut gl);\n let fps_counter_guard = CurrentGuard::new(&mut fps_counter);\n\n f();\n\n drop(window_guard);\n drop(gl_guard);\n drop(fps_counter_guard);\n}\n\n#[cfg(feature = \"include_gfx\")]\nfn start_gfx(f: ||) {\n let window = current_window();\n\n let mut device = Rc::new(RefCell::new(gfx::GlDevice::new(|s| unsafe {\n std::mem::transmute(sdl2::video::gl_get_proc_address(s))\n })));\n let mut g2d = Rc::new(RefCell::new(G2D::new(device.borrow_mut().deref_mut())));\n let mut renderer = Rc::new(RefCell::new(device.borrow_mut().create_renderer()));\n let event::window::Size([w, h]) = window.get(); \n let mut frame = Rc::new(RefCell::new(gfx::Frame::new(w as u16, h as u16)));\n\n let device_guard = CurrentGuard::new(&mut device);\n let g2d_guard = CurrentGuard::new(&mut g2d);\n let renderer_guard = CurrentGuard::new(&mut renderer);\n let frame_guard = CurrentGuard::new(&mut frame);\n\n f();\n \n drop(g2d_guard);\n drop(renderer_guard);\n drop(frame_guard);\n drop(device_guard);\n}\n\n#[cfg(not(feature = \"include_gfx\"))]\nfn start_gfx(f: ||) {\n f();\n}\n\n\/\/\/ Initializes window and sets up current objects.\npub fn start(\n opengl: shader_version::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n start_window(opengl, window_settings, || {\n if cfg!(feature = \"include_gfx\") {\n start_gfx(|| f());\n } else {\n f();\n }\n });\n}\n\n\/\/\/ The current window\npub fn current_window() -> Rc<RefCell<WindowBackEnd>> {\n unsafe {\n Current::<Rc<RefCell<WindowBackEnd>>>::new().clone()\n }\n}\n\/\/\/ The current Gfx device\n#[cfg(feature = \"include_gfx\")]\npub fn current_gfx_device() -> Rc<RefCell<gfx::GlDevice>> {\n unsafe {\n Current::<Rc<RefCell<gfx::GlDevice>>>::new().clone()\n }\n}\n\/\/\/ The current opengl_graphics back-end\npub fn current_gl() -> Rc<RefCell<Gl>> {\n unsafe {\n Current::<Rc<RefCell<Gl>>>::new().clone()\n }\n}\n\/\/\/ The current gfx_graphics back-end\n#[cfg(feature = \"include_gfx\")]\npub fn current_g2d() -> Rc<RefCell<G2D>> {\n unsafe {\n Current::<Rc<RefCell<G2D>>>::new().clone()\n }\n}\n\/\/\/ The current Gfx renderer\n#[cfg(feature = \"include_gfx\")]\npub fn current_renderer() -> Rc<RefCell<gfx::Renderer<gfx::GlCommandBuffer>>> {\n unsafe {\n Current::<Rc<RefCell<gfx::Renderer<gfx::GlCommandBuffer>>>>::new().clone()\n }\n}\n\/\/\/ The current Gfx frame\n#[cfg(feature = \"include_gfx\")]\npub fn current_frame() -> Rc<RefCell<gfx::Frame>> {\n unsafe {\n Current::<Rc<RefCell<gfx::Frame>>>::new().clone()\n }\n}\n\/\/\/ The current FPS counter\npub fn current_fps_counter() -> Rc<RefCell<FPSCounter>> {\n unsafe {\n Current::<Rc<RefCell<FPSCounter>>>::new().clone()\n }\n}\n\n\/\/\/ Returns an event iterator for the event loop\npub fn events() -> event::Events<Rc<RefCell<WindowBackEnd>>> {\n Events::new(current_window())\n}\n\n\/\/\/ Updates the FPS counter and gets the frames per second.\npub fn fps_tick() -> uint {\n current_fps_counter().borrow_mut().tick()\n}\n\n\/\/\/ Sets title of the current window.\npub fn set_title(text: String) {\n current_window().set_mut(window::Title(text));\n}\n\n\/\/\/ Returns true if the current window should be closed.\npub fn should_close() -> bool {\n use window::ShouldClose;\n let ShouldClose(val) = current_window().get();\n val\n}\n\n\/\/\/ Renders 2D graphics using Gfx.\n#[cfg(feature = \"include_gfx\")]\npub fn render_2d_gfx(\n bg_color: Option<[f32, ..4]>, \n f: |graphics::Context, \n &mut gfx_graphics::GraphicsBackEnd<gfx::GlCommandBuffer>|\n) {\n use gfx::Device; \n\n let renderer = current_renderer();\n let mut renderer = renderer.borrow_mut();\n let renderer = renderer.deref_mut();\n current_g2d().borrow_mut().draw(\n renderer,\n current_frame().borrow_mut().deref_mut(), \n |c, g| {\n if let Some(bg_color) = bg_color {\n graphics::clear(bg_color, g);\n }\n f(c, g);\n });\n current_gfx_device().borrow_mut().submit(renderer.as_buffer());\n renderer.reset();\n}\n\n\/\/\/ Renders 2D graphics using OpenGL.\n\/\/\/\n\/\/\/ Panics if called nested within the closure\n\/\/\/ to prevent mutable aliases to the graphics back-end.\npub fn render_2d_opengl(\n bg_color: Option<[f32, ..4]>,\n f: |graphics::Context,\n &mut opengl_graphics::Gl|\n) {\n let window::Size([w, h]) = current_window().borrow().deref().get();\n current_gl().borrow_mut().draw([0, 0, w as i32, h as i32], |c, g| {\n use graphics::*;\n if let Some(bg_color) = bg_color {\n graphics::clear(bg_color, g);\n }\n f(c, g);\n });\n}\n\n<commit_msg>Upgrade to latest Rust<commit_after>#![crate_name = \"piston\"]\n#![deny(missing_docs)]\n#![warn(dead_code)]\n#![feature(default_type_params)]\n#![feature(globs)]\n\n\/\/! A user friendly game engine written in Rust.\n\n#[cfg(feature = \"include_gfx\")]\nextern crate gfx;\n#[cfg(feature = \"include_gfx\")]\nextern crate gfx_graphics;\nextern crate opengl_graphics;\nextern crate sdl2;\nextern crate sdl2_window;\n\n\/\/ Crates used to reexport.\nextern crate \"ai_behavior\" as ai_behavior_lib;\nextern crate \"vecmath\" as vecmath_lib;\nextern crate \"shader_version\" as shader_version_lib;\nextern crate \"image\" as image_lib;\nextern crate \"graphics\" as graphics_lib;\nextern crate \"input\" as input_lib;\nextern crate \"event\" as event_lib;\nextern crate \"window\" as window_lib;\nextern crate \"cam\" as cam_lib;\nextern crate \"current\" as current_lib;\nextern crate \"quack\" as quack_lib;\nextern crate \"fps_counter\" as fps_counter_lib;\nextern crate \"drag_controller\" as drag_controller_lib;\nextern crate \"read_color\" as read_color_lib;\nextern crate \"select_color\" as select_color_lib;\n\n\/\/ Reexports.\npub use current_lib as current;\npub use quack_lib as quack;\npub use ai_behavior_lib as ai_behavior;\npub use shader_version_lib as shader_version;\npub use image_lib as image;\npub use graphics_lib as graphics;\npub use vecmath_lib as vecmath;\npub use input_lib as input;\npub use event_lib as event;\npub use window_lib as window;\npub use cam_lib as cam;\npub use fps_counter_lib as fps_counter;\npub use drag_controller_lib as drag_controller;\n\npub use sdl2_window::Sdl2Window as WindowBackEnd;\npub use event::{\n Event,\n Events,\n NoWindow,\n RenderArgs,\n UpdateArgs,\n WindowSettings,\n};\n\npub use quack::{\n Action,\n ActOn,\n Get,\n GetFrom,\n Set,\n SetAt,\n};\npub use current::{\n Current,\n CurrentGuard,\n};\n\n#[cfg(feature = \"include_gfx\")]\nuse gfx_graphics::G2D;\n#[cfg(feature = \"include_gfx\")]\nuse gfx::{ DeviceHelper };\nuse opengl_graphics::Gl;\nuse fps_counter::FPSCounter;\n\nuse std::rc::Rc;\nuse std::cell::RefCell;\n\npub mod color {\n \/\/! Rexported libraries for working with colors\n pub use read_color_lib as read_color;\n pub use select_color_lib as select_color;\n}\n\nfn start_window(\n opengl: shader_version::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n let mut window = Rc::new(RefCell::new(WindowBackEnd::new(\n opengl,\n window_settings,\n )));\n let mut gl = Rc::new(RefCell::new(Gl::new(opengl)));\n let mut fps_counter = Rc::new(RefCell::new(FPSCounter::new()));\n\n let window_guard = CurrentGuard::new(&mut window);\n let gl_guard = CurrentGuard::new(&mut gl);\n let fps_counter_guard = CurrentGuard::new(&mut fps_counter);\n\n f();\n\n drop(window_guard);\n drop(gl_guard);\n drop(fps_counter_guard);\n}\n\n#[cfg(feature = \"include_gfx\")]\nfn start_gfx(f: ||) {\n let window = current_window();\n\n let mut device = Rc::new(RefCell::new(gfx::GlDevice::new(|s| unsafe {\n std::mem::transmute(sdl2::video::gl_get_proc_address(s))\n })));\n let mut g2d = Rc::new(RefCell::new(G2D::new(device.borrow_mut().deref_mut())));\n let mut renderer = Rc::new(RefCell::new(device.borrow_mut().create_renderer()));\n let event::window::Size([w, h]) = window.get(); \n let mut frame = Rc::new(RefCell::new(gfx::Frame::new(w as u16, h as u16)));\n\n let device_guard = CurrentGuard::new(&mut device);\n let g2d_guard = CurrentGuard::new(&mut g2d);\n let renderer_guard = CurrentGuard::new(&mut renderer);\n let frame_guard = CurrentGuard::new(&mut frame);\n\n f();\n \n drop(g2d_guard);\n drop(renderer_guard);\n drop(frame_guard);\n drop(device_guard);\n}\n\n#[cfg(not(feature = \"include_gfx\"))]\nfn start_gfx(f: ||) {\n f();\n}\n\n\/\/\/ Initializes window and sets up current objects.\npub fn start(\n opengl: shader_version::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n start_window(opengl, window_settings, || {\n if cfg!(feature = \"include_gfx\") {\n start_gfx(|| f());\n } else {\n f();\n }\n });\n}\n\n\/\/\/ The current window\npub fn current_window() -> Rc<RefCell<WindowBackEnd>> {\n unsafe {\n Current::<Rc<RefCell<WindowBackEnd>>>::new().clone()\n }\n}\n\/\/\/ The current Gfx device\n#[cfg(feature = \"include_gfx\")]\npub fn current_gfx_device() -> Rc<RefCell<gfx::GlDevice>> {\n unsafe {\n Current::<Rc<RefCell<gfx::GlDevice>>>::new().clone()\n }\n}\n\/\/\/ The current opengl_graphics back-end\npub fn current_gl() -> Rc<RefCell<Gl>> {\n unsafe {\n Current::<Rc<RefCell<Gl>>>::new().clone()\n }\n}\n\/\/\/ The current gfx_graphics back-end\n#[cfg(feature = \"include_gfx\")]\npub fn current_g2d() -> Rc<RefCell<G2D>> {\n unsafe {\n Current::<Rc<RefCell<G2D>>>::new().clone()\n }\n}\n\/\/\/ The current Gfx renderer\n#[cfg(feature = \"include_gfx\")]\npub fn current_renderer() -> Rc<RefCell<gfx::Renderer<gfx::GlCommandBuffer>>> {\n unsafe {\n Current::<Rc<RefCell<gfx::Renderer<gfx::GlCommandBuffer>>>>::new().clone()\n }\n}\n\/\/\/ The current Gfx frame\n#[cfg(feature = \"include_gfx\")]\npub fn current_frame() -> Rc<RefCell<gfx::Frame>> {\n unsafe {\n Current::<Rc<RefCell<gfx::Frame>>>::new().clone()\n }\n}\n\/\/\/ The current FPS counter\npub fn current_fps_counter() -> Rc<RefCell<FPSCounter>> {\n unsafe {\n Current::<Rc<RefCell<FPSCounter>>>::new().clone()\n }\n}\n\n\/\/\/ Returns an event iterator for the event loop\npub fn events<E>() -> event::Events<Rc<RefCell<WindowBackEnd>>, E> {\n Events::new(current_window())\n}\n\n\/\/\/ Updates the FPS counter and gets the frames per second.\npub fn fps_tick() -> uint {\n current_fps_counter().borrow_mut().tick()\n}\n\n\/\/\/ Sets title of the current window.\npub fn set_title(text: String) {\n current_window().set_mut(window::Title(text));\n}\n\n\/\/\/ Returns true if the current window should be closed.\npub fn should_close() -> bool {\n use window::ShouldClose;\n let ShouldClose(val) = current_window().get();\n val\n}\n\n\/\/\/ Renders 2D graphics using Gfx.\n#[cfg(feature = \"include_gfx\")]\npub fn render_2d_gfx(\n bg_color: Option<[f32; 4]>, \n f: |graphics::Context, \n &mut gfx_graphics::GraphicsBackEnd<gfx::GlCommandBuffer>|\n) {\n use gfx::Device; \n\n let renderer = current_renderer();\n let mut renderer = renderer.borrow_mut();\n let renderer = renderer.deref_mut();\n current_g2d().borrow_mut().draw(\n renderer,\n current_frame().borrow_mut().deref_mut(), \n |c, g| {\n if let Some(bg_color) = bg_color {\n graphics::clear(bg_color, g);\n }\n f(c, g);\n });\n current_gfx_device().borrow_mut().submit(renderer.as_buffer());\n renderer.reset();\n}\n\n\/\/\/ Renders 2D graphics using OpenGL.\n\/\/\/\n\/\/\/ Panics if called nested within the closure\n\/\/\/ to prevent mutable aliases to the graphics back-end.\npub fn render_2d_opengl(\n bg_color: Option<[f32; 4]>,\n f: |graphics::Context,\n &mut opengl_graphics::Gl|\n) {\n use std::ops::Deref;\n\n let window::Size([w, h]) = current_window().borrow().deref().get();\n current_gl().borrow_mut().draw([0, 0, w as i32, h as i32], |c, g| {\n use graphics::*;\n if let Some(bg_color) = bg_color {\n graphics::clear(bg_color, g);\n }\n f(c, g);\n });\n}\n\n<|endoftext|>"} {"text":"<commit_before>#![crate_id = \"piston\"]\n#![deny(missing_doc)]\n#![warn(dead_code)]\n\n\/\/! A user friendly graphics engine.\n#[cfg(glfw)]\nextern crate glfw;\n#[cfg(sdl2)]\nextern crate sdl2;\n#[cfg(glfw)]\n#[cfg(sdl2)]\nextern crate gl;\nextern crate time;\n\npub use Game = game::Game;\n\npub use Render = game_iterator::Render;\npub use Update = game_iterator::Update;\npub use KeyPress = game_iterator::KeyPress;\npub use KeyRelease = game_iterator::KeyRelease;\npub use MousePress = game_iterator::MousePress;\npub use MouseRelease = game_iterator::MouseRelease;\npub use MouseMove = game_iterator::MouseMove;\npub use MouseRelativeMove = game_iterator::MouseRelativeMove;\n\npub use GameEvent = game_iterator::GameEvent;\npub use GameIterator = game_iterator::GameIterator;\npub use GameIteratorSettings = game_iterator::GameIteratorSettings;\npub use RenderArgs = game_iterator::RenderArgs;\npub use UpdateArgs = game_iterator::UpdateArgs;\npub use KeyPressArgs = game_iterator::KeyPressArgs;\npub use KeyReleaseArgs = game_iterator::KeyReleaseArgs;\npub use MousePressArgs = game_iterator::MousePressArgs;\npub use MouseReleaseArgs = game_iterator::MouseReleaseArgs;\npub use MouseMoveArgs = game_iterator::MouseMoveArgs;\npub use MouseRelativeMoveArgs = game_iterator::MouseRelativeMoveArgs;\n\npub use GameWindow = game_window::GameWindow;\n#[cfg(sdl2)]\npub use GameWindowSDL2 = game_window_sdl2::GameWindowSDL2;\n#[cfg(glfw)]\npub use GameWindowGLFW = game_window_glfw::GameWindowGLFW;\npub use GameWindowSettings = game_window_settings::GameWindowSettings;\npub use AssetStore = asset_store::AssetStore;\n\npub mod game_window;\npub mod keyboard;\npub mod event;\npub mod mouse;\n\nmod game;\nmod game_iterator;\nmod game_window_settings;\nmod asset_store;\n\n#[cfg(sdl2)]\nmod game_window_sdl2;\n#[cfg(glfw)]\nmod game_window_glfw;\n\n<commit_msg>Fix glfw build<commit_after>#![crate_id = \"piston\"]\n#![deny(missing_doc)]\n#![warn(dead_code)]\n\n\/\/! A user friendly graphics engine.\n#[cfg(glfw)]\nextern crate glfw;\n#[cfg(sdl2)]\nextern crate sdl2;\n#[cfg(glfw)]\nextern crate collections;\nextern crate gl;\nextern crate time;\n\npub use Game = game::Game;\n\npub use Render = game_iterator::Render;\npub use Update = game_iterator::Update;\npub use KeyPress = game_iterator::KeyPress;\npub use KeyRelease = game_iterator::KeyRelease;\npub use MousePress = game_iterator::MousePress;\npub use MouseRelease = game_iterator::MouseRelease;\npub use MouseMove = game_iterator::MouseMove;\npub use MouseRelativeMove = game_iterator::MouseRelativeMove;\n\npub use GameEvent = game_iterator::GameEvent;\npub use GameIterator = game_iterator::GameIterator;\npub use GameIteratorSettings = game_iterator::GameIteratorSettings;\npub use RenderArgs = game_iterator::RenderArgs;\npub use UpdateArgs = game_iterator::UpdateArgs;\npub use KeyPressArgs = game_iterator::KeyPressArgs;\npub use KeyReleaseArgs = game_iterator::KeyReleaseArgs;\npub use MousePressArgs = game_iterator::MousePressArgs;\npub use MouseReleaseArgs = game_iterator::MouseReleaseArgs;\npub use MouseMoveArgs = game_iterator::MouseMoveArgs;\npub use MouseRelativeMoveArgs = game_iterator::MouseRelativeMoveArgs;\n\npub use GameWindow = game_window::GameWindow;\n#[cfg(sdl2)]\npub use GameWindowSDL2 = game_window_sdl2::GameWindowSDL2;\n#[cfg(glfw)]\npub use GameWindowGLFW = game_window_glfw::GameWindowGLFW;\npub use GameWindowSettings = game_window_settings::GameWindowSettings;\npub use AssetStore = asset_store::AssetStore;\n\npub mod game_window;\npub mod keyboard;\npub mod event;\npub mod mouse;\n\nmod game;\nmod game_iterator;\nmod game_window_settings;\nmod asset_store;\n\n#[cfg(sdl2)]\nmod game_window_sdl2;\n#[cfg(glfw)]\nmod game_window_glfw;\n\n<|endoftext|>"} {"text":"<commit_before>#![crate_name = \"ui\"]\n#![deny(missing_doc)]\n\n\/\/! A flexible structure for user interactions\n\/\/! to be used in window frameworks and widgets libraries.\n\n\/\/\/ Time difference in seconds.\npub struct DeltaTime(pub f64);\n\n\/\/\/ Describes a device.\n\/\/\/\n\/\/\/ To create a custom device, use `Device::new`.\npub struct Device(&'static str);\n\npub static MOUSE: Device = Device(\"_mouse\");\npub static KEYBOARD: Device = Device(\"_keyboard\");\n\nimpl Device {\n \/\/\/ Creates a new custom device.\n \/\/\/\n \/\/\/ Returns `Err(String)` if the id is reserved.\n pub fn new(id: &'static str) -> Result<Device, String> {\n let device = Device(id);\n match device {\n MOUSE\n | KEYBOARD\n => Err(format!(\"The device id '{}' is reserved\", id)),\n _ => Ok(device)\n }\n }\n}\n\n\/\/\/ Describes a button on a device.\npub struct Button {\n \/\/\/ The device that contains the button.\n pub device: Device,\n \/\/\/ The key that identifies the button.\n pub key: &'static str,\n \/\/\/ A non-zero id if device contains more than one.\n pub axis_id: uint,\n \/\/\/ The user id.\n pub user_id: uint,\n}\n\n\/\/\/ A user input signal is a 3D data of some sort.\n\/\/\/\n\/\/\/ This can be absolute or relative depending on the usage.\npub struct Signal {\n \/\/\/ The device where the signal came from.\n pub device: Device,\n \/\/\/ The coordinates received from signal.\n pub xyz: [f64, ..3],\n \/\/\/ A non-zero id if device contains more than one.\n pub id: uint,\n}\n\nimpl Signal {\n \/\/\/ Gets the `x` and `y` component.\n pub fn xy(&self) -> (f64, f64) {\n (self.xyz[0], self.xyz[1])\n }\n}\n\n\/\/\/ Can be sent to a widget from window or parent widget.\npub enum Input {\n \/\/\/ Press button or key.\n Press(Button),\n \/\/\/ Release button or key.\n Release(Button),\n \/\/\/ Repeat button or key.\n \/\/\/\n \/\/\/ The frequency of repeating is implementation dependent.\n Repeat(Button),\n \/\/\/ Position input event from device.\n Position(Device, [f64, ..3]),\n \/\/\/ Move input event from device.\n Move(Device, [f64, ..3]),\n \/\/\/ Scroll input event from device.\n Scroll(Device, [f64, ..3]),\n \/\/\/ Resize input event from device.\n Resize(Device, [f64, ..3]),\n \/\/\/ Orient input event from device.\n Orient(Device, [f64, ..3]),\n \/\/\/ Rotate input event from device.\n Rotate(Device, [f64, ..3]),\n \/\/\/ Scale input event from device.\n Scale(Device, [f64, ..3]),\n \/\/\/ Select all items.\n SelectAll,\n \/\/\/ Deselect all items.\n SelectNone,\n \/\/\/ Select an item.\n Select(uint),\n \/\/\/ Deselect an item.\n Deselect(uint),\n \/\/\/ Select range of items.\n SelectRange(uint, uint),\n \/\/\/ Navigate to item.\n NavigateTo(uint),\n \/\/\/ Focus attention on widget.\n Focus,\n \/\/\/ Shift focus away from widget.\n Defocus,\n \/\/\/ Enable the widget for handling user input.\n Enable,\n \/\/\/ Disable the widget from handling user input.\n Disable,\n \/\/\/ Shw the child widgets.\n Expand,\n \/\/\/ Hide the child widgets.\n Collapse,\n \/\/\/ Play video\/audio content.\n Play,\n \/\/\/ Pause video\/audio content.\n Pause,\n \/\/\/ Skip seconds of video\/audio content.\n \/\/\/\n \/\/\/ This can be negative.\n Skip(DeltaTime),\n \/\/\/ Set the current time of video\/audio content.\n SetTime(DeltaTime),\n \/\/\/ Update widget with delta time in seconds.\n Update(DeltaTime),\n \/\/\/ Render the widget with computed extrapolated time for smoothness.\n Render(DeltaTime),\n \/\/\/ Pastes a string in widget.\n PasteString(String),\n \/\/\/ Pastes a blob of data in widget.\n PasteBlob(Vec<u8>),\n \/\/\/ A unicode character, usually handled by text widgets.\n UnicodeChar(char),\n}\n\n\/\/\/ Can be sent to parent of a widget.\npub enum Output {\n \/\/\/ Capture a device.\n \/\/\/\n \/\/\/ This is used by games to hide mouse cursor.\n Capture(Device),\n \/\/\/ Free the captured device.\n Free(Device),\n \/\/\/ Put a string on the clipboard.\n CopyString(String),\n \/\/\/ Put a blob of data on the clipboard.\n CopyBlob(Vec<u8>),\n \/\/\/ Request a size for the widget content to fit on screen.\n RequestSize([f64, ..3]),\n \/\/\/ Ask to go to full screen mode.\n RequestFullscreen,\n \/\/\/ Ask to go to window mode.\n RequestWindow,\n \/\/\/ The number of items in widget has changed.\n \/\/\/\n \/\/\/ This is used by widgets that streams data.\n Count(uint),\n \/\/\/ The length of video\/audio content in seconds.\n Length(DeltaTime),\n \/\/\/ A warning message.\n Warning(String),\n \/\/\/ A critical message.\n Alert(String),\n \/\/\/ A message to notify the user.\n Notify(String),\n \/\/\/ An error occured.\n Error(String),\n \/\/\/ Invalid input, used by widgets that require specific format.\n Invalid(String),\n}\n\npub mod mouse;\npub mod keyboard;\n<commit_msg>Renamed from `Signal` to `Motion`<commit_after>#![crate_name = \"ui\"]\n#![deny(missing_doc)]\n\n\/\/! A flexible structure for user interactions\n\/\/! to be used in window frameworks and widgets libraries.\n\n\/\/\/ Time difference in seconds.\npub struct DeltaTime(pub f64);\n\n\/\/\/ Describes a device.\n\/\/\/\n\/\/\/ To create a custom device, use `Device::new`.\npub struct Device(&'static str);\n\npub static MOUSE: Device = Device(\"_mouse\");\npub static KEYBOARD: Device = Device(\"_keyboard\");\n\nimpl Device {\n \/\/\/ Creates a new custom device.\n \/\/\/\n \/\/\/ Returns `Err(String)` if the id is reserved.\n pub fn new(id: &'static str) -> Result<Device, String> {\n let device = Device(id);\n match device {\n MOUSE\n | KEYBOARD\n => Err(format!(\"The device id '{}' is reserved\", id)),\n _ => Ok(device)\n }\n }\n}\n\n\/\/\/ Describes a button on a device.\npub struct Button {\n \/\/\/ The device that contains the button.\n pub device: Device,\n \/\/\/ The key that identifies the button.\n pub key: &'static str,\n \/\/\/ A non-zero id if device contains more than one.\n pub axis_id: uint,\n \/\/\/ The user id.\n pub user_id: uint,\n}\n\n\/\/\/ A device input motion.\n\/\/\/\n\/\/\/ Coordinates can be absolute or relative depending on the usage.\n\/\/\/ The coordinates depends on usage.\npub struct Motion {\n \/\/\/ The device where the motion came from.\n pub device: Device,\n \/\/\/ The coordinates describing the motion.\n \/\/\/\n \/\/\/ For rotation the coordinates are usually yaw, pitch and roll.\n pub xyz: [f64, ..3],\n \/\/\/ A non-zero id if device contains more than one.\n pub axis_id: uint,\n \/\/\/ The user id.\n pub user_id: uint,\n}\n\nimpl Motion {\n \/\/\/ Gets the `x` and `y` component.\n pub fn get_xy(&self) -> (f64, f64) {\n (self.xyz[0], self.xyz[1])\n }\n}\n\n\/\/\/ Can be sent to a widget from window or parent widget.\npub enum Input {\n \/\/\/ Press button or key.\n Press(Button),\n \/\/\/ Release button or key.\n Release(Button),\n \/\/\/ Repeat button or key.\n \/\/\/\n \/\/\/ The frequency of repeating is implementation dependent.\n Repeat(Button),\n \/\/\/ Position input event from device.\n Position(Motion),\n \/\/\/ Move input event from device.\n Move(Motion),\n \/\/\/ Scroll input event from device.\n Scroll(Motion),\n \/\/\/ Resize input event from device.\n Resize(Motion),\n \/\/\/ Orient input event from device.\n Orient(Motion),\n \/\/\/ Rotate input event from device.\n Rotate(Motion),\n \/\/\/ Scale input event from device.\n Scale(Motion),\n \/\/\/ Select all items.\n SelectAll,\n \/\/\/ Deselect all items.\n SelectNone,\n \/\/\/ Select an item.\n Select(uint),\n \/\/\/ Deselect an item.\n Deselect(uint),\n \/\/\/ Select range of items.\n SelectRange(uint, uint),\n \/\/\/ Navigate to item.\n NavigateTo(uint),\n \/\/\/ Focus attention on widget.\n Focus,\n \/\/\/ Shift focus away from widget.\n Defocus,\n \/\/\/ Enable the widget for handling user input.\n Enable,\n \/\/\/ Disable the widget from handling user input.\n Disable,\n \/\/\/ Shw the child widgets.\n Expand,\n \/\/\/ Hide the child widgets.\n Collapse,\n \/\/\/ Play video\/audio content.\n Play,\n \/\/\/ Pause video\/audio content.\n Pause,\n \/\/\/ Skip seconds of video\/audio content.\n \/\/\/\n \/\/\/ This can be negative.\n Skip(DeltaTime),\n \/\/\/ Set the current time of video\/audio content.\n SetTime(DeltaTime),\n \/\/\/ Update widget with delta time in seconds.\n Update(DeltaTime),\n \/\/\/ Render the widget with computed extrapolated time for smoothness.\n Render(DeltaTime),\n \/\/\/ Pastes a string in widget.\n PasteString(String),\n \/\/\/ Pastes a blob of data in widget.\n PasteBlob(Vec<u8>),\n \/\/\/ A unicode character, usually handled by text widgets.\n UnicodeChar(char),\n}\n\n\/\/\/ Can be sent to parent of a widget.\npub enum Output {\n \/\/\/ Capture a device.\n \/\/\/\n \/\/\/ This is used by games to hide mouse cursor.\n Capture(Device),\n \/\/\/ Free the captured device.\n Free(Device),\n \/\/\/ Put a string on the clipboard.\n CopyString(String),\n \/\/\/ Put a blob of data on the clipboard.\n CopyBlob(Vec<u8>),\n \/\/\/ Request a size for the widget content to fit on screen.\n RequestSize([f64, ..3]),\n \/\/\/ Ask to go to full screen mode.\n RequestFullscreen,\n \/\/\/ Ask to go to window mode.\n RequestWindow,\n \/\/\/ The number of items in widget has changed.\n \/\/\/\n \/\/\/ This is used by widgets that streams data.\n Count(uint),\n \/\/\/ The length of video\/audio content in seconds.\n Length(DeltaTime),\n \/\/\/ A warning message.\n Warning(String),\n \/\/\/ A critical message.\n Alert(String),\n \/\/\/ A message to notify the user.\n Notify(String),\n \/\/\/ An error occured.\n Error(String),\n \/\/\/ Invalid input, used by widgets that require specific format.\n Invalid(String),\n}\n\npub mod mouse;\npub mod keyboard;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>docs: Fix byte offsets mentioned in Archive::read_kind<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>command work<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>simplified segment chunking<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #239 - frewsxcv:doc-examples, r=SimonSapin<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Relax bounce to FnOnce<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>updated to pull from my repo by default<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Snake case<commit_after><|endoftext|>"} {"text":"<commit_before>use std::collections::HashMap;\nuse std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse serde_json::{Value, from_str};\nuse serde_json::error::Result as R;\nuse serde_json::Serializer;\nuse serde::ser::Serialize;\nuse serde::ser::Serializer as Ser;\n\nuse storage::parser::{FileHeaderParser, ParserError};\nuse storage::file::header::spec::FileHeaderSpec;\nuse storage::file::header::data::FileHeaderData;\n\npub struct JsonHeaderParser {\n spec: Option<FileHeaderSpec>,\n}\n\nimpl JsonHeaderParser {\n\n pub fn new(spec: Option<FileHeaderSpec>) -> JsonHeaderParser {\n JsonHeaderParser {\n spec: spec\n }\n }\n\n}\n\nimpl Display for JsonHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"JsonHeaderParser\"));\n Ok(())\n }\n\n}\n\nimpl Debug for JsonHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"JsonHeaderParser, Spec: {:?}\", self.spec));\n Ok(())\n }\n\n}\n\nimpl FileHeaderParser for JsonHeaderParser {\n\n fn read(&self, string: Option<String>)\n -> Result<FileHeaderData, ParserError>\n {\n if string.is_some() {\n let s = string.unwrap();\n debug!(\"Deserializing: {}\", s);\n let fromstr : R<Value> = from_str(&s[..]);\n if let Ok(ref content) = fromstr {\n return Ok(visit_json(&content))\n }\n let oe = fromstr.err().unwrap();\n let s = format!(\"JSON parser error: {}\", oe.description());\n let e = ParserError::short(&s[..], s.clone(), 0);\n Err(e)\n } else {\n Ok(FileHeaderData::Null)\n }\n }\n\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {\n let mut s = Vec::<u8>::new();\n {\n let mut ser = Serializer::pretty(&mut s);\n data.serialize(&mut ser).map_err(|e| {\n debug!(\"Serializer error: {:?}\", e);\n }).ok();\n }\n\n String::from_utf8(s).or(\n Err(ParserError::short(\"Cannot parse utf8 bytes\",\n String::from(\"<not printable>\"),\n 0)))\n }\n\n}\n\n\/\/ TODO: This function must be able to return a parser error\nfn visit_json(v: &Value) -> FileHeaderData {\n match v {\n &Value::Null => FileHeaderData::Null,\n &Value::Bool(b) => FileHeaderData::Bool(b),\n &Value::I64(i) => FileHeaderData::Integer(i),\n &Value::U64(u) => FileHeaderData::UInteger(u),\n &Value::F64(f) => FileHeaderData::Float(f),\n &Value::String(ref s) => FileHeaderData::Text(s.clone()),\n &Value::Array(ref vec) => {\n FileHeaderData::Array {\n values: Box::new(vec.clone().into_iter().map(|i| visit_json(&i)).collect())\n }\n },\n &Value::Object(ref btree) => {\n let btree = btree.clone();\n FileHeaderData::Map{\n keys: btree.into_iter().map(|(k, v)|\n FileHeaderData::Key {\n name: k,\n value: Box::new(visit_json(&v)),\n }\n ).collect()\n }\n }\n }\n}\n\nimpl Serialize for FileHeaderData {\n\n fn serialize<S>(&self, ser: &mut S) -> Result<(), S::Error>\n where S: Ser\n {\n match self {\n &FileHeaderData::Null => {\n let o : Option<bool> = None;\n o.serialize(ser)\n },\n &FileHeaderData::Bool(ref b) => b.serialize(ser),\n &FileHeaderData::Integer(ref i) => i.serialize(ser),\n &FileHeaderData::UInteger(ref u) => u.serialize(ser),\n &FileHeaderData::Float(ref f) => f.serialize(ser),\n &FileHeaderData::Text(ref s) => (&s[..]).serialize(ser),\n &FileHeaderData::Array{values: ref vs} => vs.serialize(ser),\n &FileHeaderData::Map{keys: ref ks} => {\n let mut hm = HashMap::new();\n\n for key in ks {\n if let &FileHeaderData::Key{name: ref n, value: ref v} = key {\n hm.insert(n, v);\n } else {\n panic!(\"Not a key: {:?}\", key);\n }\n }\n\n hm.serialize(ser)\n },\n &FileHeaderData::Key{name: _, value: _} => unreachable!(),\n\n }\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use std::ops::Deref;\n\n use super::JsonHeaderParser;\n use storage::parser::FileHeaderParser;\n use storage::file::header::data::FileHeaderData as FHD;\n use storage::file::header::spec::FileHeaderSpec as FHS;\n\n #[test]\n fn test_deserialization() {\n let text = String::from(\"{\\\"a\\\": 1, \\\"b\\\": -2}\");\n let spec = FHS::Map {\n keys: vec![\n FHS::Key {\n name: String::from(\"a\"),\n value_type: Box::new(FHS::UInteger)\n },\n FHS::Key {\n name: String::from(\"b\"),\n value_type: Box::new(FHS::Integer)\n }\n ]\n };\n\n let parser = JsonHeaderParser::new(Some(spec));\n let parsed = parser.read(Some(text));\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{keys: keys}) => {\n for k in keys {\n match k {\n FHD::Key{name: name, value: value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::UInteger(u) => assert_eq!(u, 1),\n &FHD::Integer(i) => assert_eq!(i, -2),\n _ => assert!(false, \"Integers are not here\"),\n }\n },\n _ => assert!(false, \"Key is not a Key\"),\n }\n }\n },\n\n _ => assert!(false, \"Parsed is not a map\"),\n }\n }\n\n #[test]\n fn test_deserialization_without_spec() {\n let text = String::from(\"{\\\"a\\\": [1], \\\"b\\\": {\\\"c\\\": -2}}\");\n let parser = JsonHeaderParser::new(None);\n let parsed = parser.read(Some(text));\n\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{keys: keys}) => {\n for k in keys {\n match_key(&k);\n }\n },\n\n _ => assert!(false, \"Parsed is not a map\"),\n }\n }\n\n fn match_key(k: &FHD) {\n use std::ops::Deref;\n\n match k {\n &FHD::Key{name: ref name, value: ref value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::Array{values: ref vs} => {\n for value in vs.iter() {\n match value {\n &FHD::UInteger(u) => assert_eq!(u, 1),\n _ => assert!(false, \"UInt is not an UInt\"),\n }\n }\n }\n\n &FHD::Map{keys: ref ks} => {\n for key in ks.iter() {\n match key {\n &FHD::Key{name: ref name, value: ref value} => {\n match value.deref() {\n &FHD::Integer(i) => {\n assert_eq!(i, -2);\n assert_eq!(name, \"c\");\n },\n _ => assert!(false, \"Int is not an Int\"),\n };\n },\n _ => assert!(false, \"Key is not a Key\"),\n }\n }\n }\n _ => assert!(false, \"Integers are not here\"),\n }\n },\n _ => assert!(false, \"Key in main Map is not a Key\"),\n }\n }\n\n #[test]\n fn test_desser() {\n use serde_json::error::Result as R;\n use serde_json::{Value, from_str};\n\n let text = String::from(\"{\\\"a\\\": [1], \\\"b\\\": {\\\"c\\\": -2}}\");\n let parser = JsonHeaderParser::new(None);\n\n let des = parser.read(Some(text.clone()));\n assert!(des.is_ok(), \"Deserializing failed\");\n\n let ser = parser.write(&des.unwrap());\n assert!(ser.is_ok(), \"Parser error when serializing deserialized text\");\n\n let json_text : R<Value> = from_str(&text[..]);\n let json_ser : R<Value> = from_str(&ser.unwrap()[..]);\n\n assert!(json_text.is_ok(), \"Could not use serde to serialize text for comparison\");\n assert!(json_ser.is_ok(), \"Could not use serde to serialize serialized-deserialized text for comparison\");\n assert_eq!(json_text.unwrap(), json_ser.unwrap());\n }\n\n}\n<commit_msg>Remove unneeded shorthand field pattern<commit_after>use std::collections::HashMap;\nuse std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse serde_json::{Value, from_str};\nuse serde_json::error::Result as R;\nuse serde_json::Serializer;\nuse serde::ser::Serialize;\nuse serde::ser::Serializer as Ser;\n\nuse storage::parser::{FileHeaderParser, ParserError};\nuse storage::file::header::spec::FileHeaderSpec;\nuse storage::file::header::data::FileHeaderData;\n\npub struct JsonHeaderParser {\n spec: Option<FileHeaderSpec>,\n}\n\nimpl JsonHeaderParser {\n\n pub fn new(spec: Option<FileHeaderSpec>) -> JsonHeaderParser {\n JsonHeaderParser {\n spec: spec\n }\n }\n\n}\n\nimpl Display for JsonHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"JsonHeaderParser\"));\n Ok(())\n }\n\n}\n\nimpl Debug for JsonHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"JsonHeaderParser, Spec: {:?}\", self.spec));\n Ok(())\n }\n\n}\n\nimpl FileHeaderParser for JsonHeaderParser {\n\n fn read(&self, string: Option<String>)\n -> Result<FileHeaderData, ParserError>\n {\n if string.is_some() {\n let s = string.unwrap();\n debug!(\"Deserializing: {}\", s);\n let fromstr : R<Value> = from_str(&s[..]);\n if let Ok(ref content) = fromstr {\n return Ok(visit_json(&content))\n }\n let oe = fromstr.err().unwrap();\n let s = format!(\"JSON parser error: {}\", oe.description());\n let e = ParserError::short(&s[..], s.clone(), 0);\n Err(e)\n } else {\n Ok(FileHeaderData::Null)\n }\n }\n\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {\n let mut s = Vec::<u8>::new();\n {\n let mut ser = Serializer::pretty(&mut s);\n data.serialize(&mut ser).map_err(|e| {\n debug!(\"Serializer error: {:?}\", e);\n }).ok();\n }\n\n String::from_utf8(s).or(\n Err(ParserError::short(\"Cannot parse utf8 bytes\",\n String::from(\"<not printable>\"),\n 0)))\n }\n\n}\n\n\/\/ TODO: This function must be able to return a parser error\nfn visit_json(v: &Value) -> FileHeaderData {\n match v {\n &Value::Null => FileHeaderData::Null,\n &Value::Bool(b) => FileHeaderData::Bool(b),\n &Value::I64(i) => FileHeaderData::Integer(i),\n &Value::U64(u) => FileHeaderData::UInteger(u),\n &Value::F64(f) => FileHeaderData::Float(f),\n &Value::String(ref s) => FileHeaderData::Text(s.clone()),\n &Value::Array(ref vec) => {\n FileHeaderData::Array {\n values: Box::new(vec.clone().into_iter().map(|i| visit_json(&i)).collect())\n }\n },\n &Value::Object(ref btree) => {\n let btree = btree.clone();\n FileHeaderData::Map{\n keys: btree.into_iter().map(|(k, v)|\n FileHeaderData::Key {\n name: k,\n value: Box::new(visit_json(&v)),\n }\n ).collect()\n }\n }\n }\n}\n\nimpl Serialize for FileHeaderData {\n\n fn serialize<S>(&self, ser: &mut S) -> Result<(), S::Error>\n where S: Ser\n {\n match self {\n &FileHeaderData::Null => {\n let o : Option<bool> = None;\n o.serialize(ser)\n },\n &FileHeaderData::Bool(ref b) => b.serialize(ser),\n &FileHeaderData::Integer(ref i) => i.serialize(ser),\n &FileHeaderData::UInteger(ref u) => u.serialize(ser),\n &FileHeaderData::Float(ref f) => f.serialize(ser),\n &FileHeaderData::Text(ref s) => (&s[..]).serialize(ser),\n &FileHeaderData::Array{values: ref vs} => vs.serialize(ser),\n &FileHeaderData::Map{keys: ref ks} => {\n let mut hm = HashMap::new();\n\n for key in ks {\n if let &FileHeaderData::Key{name: ref n, value: ref v} = key {\n hm.insert(n, v);\n } else {\n panic!(\"Not a key: {:?}\", key);\n }\n }\n\n hm.serialize(ser)\n },\n &FileHeaderData::Key{name: _, value: _} => unreachable!(),\n\n }\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use std::ops::Deref;\n\n use super::JsonHeaderParser;\n use storage::parser::FileHeaderParser;\n use storage::file::header::data::FileHeaderData as FHD;\n use storage::file::header::spec::FileHeaderSpec as FHS;\n\n #[test]\n fn test_deserialization() {\n let text = String::from(\"{\\\"a\\\": 1, \\\"b\\\": -2}\");\n let spec = FHS::Map {\n keys: vec![\n FHS::Key {\n name: String::from(\"a\"),\n value_type: Box::new(FHS::UInteger)\n },\n FHS::Key {\n name: String::from(\"b\"),\n value_type: Box::new(FHS::Integer)\n }\n ]\n };\n\n let parser = JsonHeaderParser::new(Some(spec));\n let parsed = parser.read(Some(text));\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{keys}) => {\n for k in keys {\n match k {\n FHD::Key{name: name, value: value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::UInteger(u) => assert_eq!(u, 1),\n &FHD::Integer(i) => assert_eq!(i, -2),\n _ => assert!(false, \"Integers are not here\"),\n }\n },\n _ => assert!(false, \"Key is not a Key\"),\n }\n }\n },\n\n _ => assert!(false, \"Parsed is not a map\"),\n }\n }\n\n #[test]\n fn test_deserialization_without_spec() {\n let text = String::from(\"{\\\"a\\\": [1], \\\"b\\\": {\\\"c\\\": -2}}\");\n let parser = JsonHeaderParser::new(None);\n let parsed = parser.read(Some(text));\n\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{keys: keys}) => {\n for k in keys {\n match_key(&k);\n }\n },\n\n _ => assert!(false, \"Parsed is not a map\"),\n }\n }\n\n fn match_key(k: &FHD) {\n use std::ops::Deref;\n\n match k {\n &FHD::Key{name: ref name, value: ref value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::Array{values: ref vs} => {\n for value in vs.iter() {\n match value {\n &FHD::UInteger(u) => assert_eq!(u, 1),\n _ => assert!(false, \"UInt is not an UInt\"),\n }\n }\n }\n\n &FHD::Map{keys: ref ks} => {\n for key in ks.iter() {\n match key {\n &FHD::Key{name: ref name, value: ref value} => {\n match value.deref() {\n &FHD::Integer(i) => {\n assert_eq!(i, -2);\n assert_eq!(name, \"c\");\n },\n _ => assert!(false, \"Int is not an Int\"),\n };\n },\n _ => assert!(false, \"Key is not a Key\"),\n }\n }\n }\n _ => assert!(false, \"Integers are not here\"),\n }\n },\n _ => assert!(false, \"Key in main Map is not a Key\"),\n }\n }\n\n #[test]\n fn test_desser() {\n use serde_json::error::Result as R;\n use serde_json::{Value, from_str};\n\n let text = String::from(\"{\\\"a\\\": [1], \\\"b\\\": {\\\"c\\\": -2}}\");\n let parser = JsonHeaderParser::new(None);\n\n let des = parser.read(Some(text.clone()));\n assert!(des.is_ok(), \"Deserializing failed\");\n\n let ser = parser.write(&des.unwrap());\n assert!(ser.is_ok(), \"Parser error when serializing deserialized text\");\n\n let json_text : R<Value> = from_str(&text[..]);\n let json_ser : R<Value> = from_str(&ser.unwrap()[..]);\n\n assert!(json_text.is_ok(), \"Could not use serde to serialize text for comparison\");\n assert!(json_ser.is_ok(), \"Could not use serde to serialize serialized-deserialized text for comparison\");\n assert_eq!(json_text.unwrap(), json_ser.unwrap());\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add initial network benchmark<commit_after>#![feature(test)]\n\nextern crate test;\nextern crate exonum;\nextern crate time;\n\nuse std::{thread};\nuse std::net::SocketAddr;\n\nuse time::Duration;\nuse test::Bencher;\n\nuse exonum::events::{Events, Reactor, EventsConfiguration, Event, Timeout};\nuse exonum::events::{Network, NetworkConfiguration};\nuse exonum::messages::{MessageWriter, RawMessage};\nuse exonum::crypto::gen_keypair;\n\n\ntrait EventsBench {\n fn with_addr(addr: SocketAddr) -> Events;\n fn wait_for_msg(&mut self) -> Option<RawMessage>;\n fn gen_message(id: u16, len: usize) -> RawMessage;\n fn wait_for_messages(&mut self, mut count: usize, timeout: Duration) -> Result<(), String>;\n}\n\nimpl EventsBench for Events {\n fn with_addr(addr: SocketAddr) -> Events {\n let network = Network::with_config(NetworkConfiguration {\n listen_address: addr,\n max_incoming_connections: 128,\n max_outgoing_connections: 128\n });\n Events::with_config(EventsConfiguration::new(), network).unwrap()\n }\n\n fn wait_for_msg(&mut self) -> Option<RawMessage> {\n let time = self.get_time() + Duration::milliseconds(10000);\n self.add_timeout(Timeout::Status, time);\n loop {\n match self.poll() {\n Event::Incoming(msg) => return Some(msg),\n Event::Timeout(_) => return None,\n Event::Error(_) => return None,\n _ => {}\n }\n }\n }\n\n fn wait_for_messages(&mut self, mut count: usize, timeout: Duration) -> Result<(), String> {\n let time = self.get_time() + timeout;\n self.add_timeout(Timeout::Status, time);\n loop {\n match self.poll() {\n Event::Incoming(_) => { count = count - 1; }\n Event::Timeout(_) => return Err(format!(\"Timeout exceeded, {} messages is not received\", count)),\n Event::Error(_) => return Err(format!(\"An error occured, {} messages is not received\", count)),\n _ => {}\n }\n if count == 0 {\n return Ok(())\n }\n }\n }\n\n fn gen_message(id: u16, len: usize) -> RawMessage {\n let writer = MessageWriter::new(id, len);\n RawMessage::new(writer.sign(&gen_keypair().1))\n }\n}\n\nfn bench_network(b: &mut Bencher, addrs: [SocketAddr; 2], times: usize, len: usize) {\n b.iter(|| {\n let mut e1 = Events::with_addr(addrs[0]);\n let mut e2 = Events::with_addr(addrs[1]);\n e1.bind().unwrap();\n e2.bind().unwrap();\n\n let timeout = Duration::seconds(120);\n let t1 = thread::spawn(move || {\n for _ in 0..times {\n let msg = Events::gen_message(0, len);\n e1.send_to(&addrs[1], msg).unwrap();\n }\n e1.wait_for_messages(times, timeout).unwrap();\n });\n let t2 = thread::spawn(move || {\n for _ in 0..times {\n let msg = Events::gen_message(1, len);\n e2.send_to(&addrs[0], msg).unwrap();\n }\n e2.wait_for_messages(times, timeout).unwrap();\n });\n t1.join().unwrap();\n t2.join().unwrap();\n })\n}\n\n#[bench]\nfn bench_msg_short_100(b: &mut Bencher) {\n let addrs = [\n \"127.0.0.1:9990\".parse().unwrap(),\n \"127.0.0.1:9991\".parse().unwrap()\n ];\n bench_network(b, addrs, 100, 100);\n}\n\n#[bench]\nfn bench_msg_short_1000(b: &mut Bencher) {\n let addrs = [\n \"127.0.0.1:9992\".parse().unwrap(),\n \"127.0.0.1:9993\".parse().unwrap()\n ];\n bench_network(b, addrs, 1000, 100);\n}\n\n#[bench]\nfn bench_msg_long_10(b: &mut Bencher) {\n let addrs = [\n \"127.0.0.1:9994\".parse().unwrap(),\n \"127.0.0.1:9995\".parse().unwrap()\n ];\n bench_network(b, addrs, 10, 100000);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>error safe?<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fast stdout<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ TODO: Refactor using a matrix for performance\n\nuse redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n if self.cur() == '\\n' || self.cur() == '\\0' {\n self.left();\n while self.cur() != '\\n' &&\n self.cur() != '\\0' &&\n self.offset >= 1 {\n self.left();\n }\n } else {\n let x = self.get_x(); \/\/- if self.cur() == '\\n' { 1 } else { 0 };\n\n while self.cur() != '\\n' &&\n self.offset >= 1 {\n self.left();\n }\n self.right();\n let mut new_offset = 0;\n\n\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n for _ in 1..x {\n if self.cur() != '\\n' {\n self.right();\n } else {\n break;\n }\n }\n }\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let x = self.get_x(); \/\/- if self.cur() == '\\n' { 1 } else { 0 };\n let original_c = self.cur();\n\n while self.offset < self.string.len() &&\n self.cur() != '\\n' &&\n self.cur() != '\\0' {\n self.right();\n }\n self.right();\n\n if original_c == '\\n' {\n while self.cur() != '\\n' &&\n self.cur() != '\\0' &&\n self.offset < self.string.len() {\n self.right();\n }\n } else {\n for _ in 1..x {\n if self.cur() != '\\n' {\n self.right();\n } else {\n break;\n }\n }\n }\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn get_x(&self) -> usize {\n let mut x = 0;\n for (n, c) in self.string.chars().enumerate() {\n if c == '\\n' {\n x = 0;\n } else {\n x += 1;\n }\n if n >= self.offset {\n break;\n }\n }\n x\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<commit_msg>Add auto indentation<commit_after>\/\/ TODO: Refactor using a matrix for performance\n\nuse redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n if self.cur() == '\\n' || self.cur() == '\\0' {\n self.left();\n while self.cur() != '\\n' &&\n self.cur() != '\\0' &&\n self.offset >= 1 {\n self.left();\n }\n } else {\n let x = self.get_x(); \/\/- if self.cur() == '\\n' { 1 } else { 0 };\n\n while self.cur() != '\\n' &&\n self.offset >= 1 {\n self.left();\n }\n self.right();\n let mut new_offset = 0;\n\n\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n for _ in 1..x {\n if self.cur() != '\\n' {\n self.right();\n } else {\n break;\n }\n }\n }\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let x = self.get_x(); \/\/- if self.cur() == '\\n' { 1 } else { 0 };\n let original_c = self.cur();\n\n while self.offset < self.string.len() &&\n self.cur() != '\\n' &&\n self.cur() != '\\0' {\n self.right();\n }\n self.right();\n\n if original_c == '\\n' {\n while self.cur() != '\\n' &&\n self.cur() != '\\0' &&\n self.offset < self.string.len() {\n self.right();\n }\n } else {\n for _ in 1..x {\n if self.cur() != '\\n' {\n self.right();\n } else {\n break;\n }\n }\n }\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n let ind = if c == '\\n' {\n let mut mov = 0;\n\n for _ in 0..self.get_x() {\n self.left();\n mov += 1;\n }\n\n let mut ind = String::new();\n while (self.cur() == ' ' ||\n self.cur() == '\\t') &&\n self.offset < self.string.len() {\n ind.push(self.cur());\n self.right();\n mov -= 1;\n }\n\n for _ in 0..mov {\n self.right();\n }\n\n ind\n } else {\n String::new()\n };\n\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n\n self.right();\n\n if c == '\\n' {\n for c in ind.chars() {\n self.insert(c, window);\n }\n\n }\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn get_x(&self) -> usize {\n let mut x = 0;\n for (n, c) in self.string.chars().enumerate() {\n if c == '\\n' {\n x = 0;\n } else {\n x += 1;\n }\n if n >= self.offset {\n break;\n }\n }\n x\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Address comments from PR<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::HTMLObjectElementBinding;\nuse dom::bindings::utils::ErrorResult;\nuse dom::document::AbstractDocument;\nuse dom::element::HTMLObjectElementTypeId;\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{AbstractNode, Node};\nuse dom::validitystate::ValidityState;\nuse dom::windowproxy::WindowProxy;\nuse servo_util::str::DOMString;\n\npub struct HTMLObjectElement {\n htmlelement: HTMLElement\n}\n\nimpl HTMLObjectElement {\n pub fn new_inherited(localName: DOMString, document: AbstractDocument) -> HTMLObjectElement {\n HTMLObjectElement {\n htmlelement: HTMLElement::new_inherited(HTMLObjectElementTypeId, localName, document)\n }\n }\n\n pub fn new(localName: DOMString, document: AbstractDocument) -> AbstractNode {\n let element = HTMLObjectElement::new_inherited(localName, document);\n Node::reflect_node(@mut element, document, HTMLObjectElementBinding::Wrap)\n }\n}\n\nimpl HTMLObjectElement {\n pub fn Data(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetData(&mut self, _data: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Type(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetType(&mut self, _type: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Name(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetName(&mut self, _name: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn UseMap(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetUseMap(&mut self, _use_map: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn GetForm(&self) -> Option<AbstractNode> {\n None\n }\n\n pub fn Width(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetWidth(&mut self, _width: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Height(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetHeight(&mut self, _height: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn GetContentDocument(&self) -> Option<AbstractDocument> {\n None\n }\n\n pub fn GetContentWindow(&self) -> Option<@mut WindowProxy> {\n None\n }\n\n pub fn WillValidate(&self) -> bool {\n false\n }\n\n pub fn Validity(&self) -> @mut ValidityState {\n let global = self.htmlelement.element.node.owner_doc().document().window;\n ValidityState::new(global)\n }\n\n pub fn ValidationMessage(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn CheckValidity(&self) -> bool {\n false\n }\n\n pub fn SetCustomValidity(&mut self, _error: DOMString) {\n }\n\n pub fn Align(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetAlign(&mut self, _align: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Archive(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetArchive(&mut self, _archive: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Code(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetCode(&mut self, _code: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Declare(&self) -> bool {\n false\n }\n\n pub fn SetDeclare(&mut self, _declare: bool) -> ErrorResult {\n Ok(())\n }\n\n pub fn Hspace(&self) -> u32 {\n 0\n }\n\n pub fn SetHspace(&mut self, _hspace: u32) -> ErrorResult {\n Ok(())\n }\n\n pub fn Standby(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetStandby(&mut self, _standby: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Vspace(&self) -> u32 {\n 0\n }\n\n pub fn SetVspace(&mut self, _vspace: u32) -> ErrorResult {\n Ok(())\n }\n\n pub fn CodeBase(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetCodeBase(&mut self, _codebase: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn CodeType(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetCodeType(&mut self, _codetype: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Border(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetBorder(&mut self, _border: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn GetSVGDocument(&self) -> Option<AbstractDocument> {\n None\n }\n}\n<commit_msg>Support object element data.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::HTMLObjectElementBinding;\nuse dom::bindings::utils::ErrorResult;\nuse dom::document::AbstractDocument;\nuse dom::element::HTMLObjectElementTypeId;\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{AbstractNode, Node};\nuse dom::validitystate::ValidityState;\nuse dom::windowproxy::WindowProxy;\nuse servo_util::str::DOMString;\n\nuse extra::url::Url;\nuse servo_net::image_cache_task;\nuse servo_net::image_cache_task::ImageCacheTask;\nuse servo_util::url::parse_url;\nuse servo_util::namespace::Null;\nuse servo_util::url::is_image_data;\n\npub struct HTMLObjectElement {\n htmlelement: HTMLElement,\n}\n\nimpl HTMLObjectElement {\n pub fn new_inherited(localName: DOMString, document: AbstractDocument) -> HTMLObjectElement {\n HTMLObjectElement {\n htmlelement: HTMLElement::new_inherited(HTMLObjectElementTypeId, localName, document),\n }\n }\n\n pub fn new(localName: DOMString, document: AbstractDocument) -> AbstractNode {\n let element = HTMLObjectElement::new_inherited(localName, document);\n Node::reflect_node(@mut element, document, HTMLObjectElementBinding::Wrap)\n }\n}\n\nimpl HTMLObjectElement {\n\n \/\/ Makes the local `data` member match the status of the `data` attribute and starts\n \/\/\/ prefetching the image. This method must be called after `data` is changed.\n pub fn process_data_url(&mut self, image_cache: ImageCacheTask, url: Option<Url>) {\n let elem = &mut self.htmlelement.element;\n\n \/\/ TODO: support other values\n match (elem.get_attribute(Null, \"type\").map(|x| x.Value()),\n elem.get_attribute(Null, \"data\").map(|x| x.Value())) {\n (None, Some(uri)) => {\n if is_image_data(uri) {\n let data_url = parse_url(uri, url);\n \/\/ Issue #84\n image_cache.send(image_cache_task::Prefetch(data_url));\n }\n }\n _ => { }\n }\n }\n\n pub fn AfterSetAttr(&mut self, name: DOMString, _value: DOMString) {\n if \"data\" == name {\n let document = self.htmlelement.element.node.owner_doc();\n let window = document.document().window;\n let url = window.page.url.as_ref().map(|&(ref url, _)| url.clone());\n self.process_data_url(window.image_cache_task.clone(), url);\n }\n }\n\n pub fn Data(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetData(&mut self, _data: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Type(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetType(&mut self, _type: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Name(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetName(&mut self, _name: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn UseMap(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetUseMap(&mut self, _use_map: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn GetForm(&self) -> Option<AbstractNode> {\n None\n }\n\n pub fn Width(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetWidth(&mut self, _width: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Height(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetHeight(&mut self, _height: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn GetContentDocument(&self) -> Option<AbstractDocument> {\n None\n }\n\n pub fn GetContentWindow(&self) -> Option<@mut WindowProxy> {\n None\n }\n\n pub fn WillValidate(&self) -> bool {\n false\n }\n\n pub fn Validity(&self) -> @mut ValidityState {\n let global = self.htmlelement.element.node.owner_doc().document().window;\n ValidityState::new(global)\n }\n\n pub fn ValidationMessage(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn CheckValidity(&self) -> bool {\n false\n }\n\n pub fn SetCustomValidity(&mut self, _error: DOMString) {\n }\n\n pub fn Align(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetAlign(&mut self, _align: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Archive(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetArchive(&mut self, _archive: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Code(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetCode(&mut self, _code: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Declare(&self) -> bool {\n false\n }\n\n pub fn SetDeclare(&mut self, _declare: bool) -> ErrorResult {\n Ok(())\n }\n\n pub fn Hspace(&self) -> u32 {\n 0\n }\n\n pub fn SetHspace(&mut self, _hspace: u32) -> ErrorResult {\n Ok(())\n }\n\n pub fn Standby(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetStandby(&mut self, _standby: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Vspace(&self) -> u32 {\n 0\n }\n\n pub fn SetVspace(&mut self, _vspace: u32) -> ErrorResult {\n Ok(())\n }\n\n pub fn CodeBase(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetCodeBase(&mut self, _codebase: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn CodeType(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetCodeType(&mut self, _codetype: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn Border(&self) -> DOMString {\n ~\"\"\n }\n\n pub fn SetBorder(&mut self, _border: DOMString) -> ErrorResult {\n Ok(())\n }\n\n pub fn GetSVGDocument(&self) -> Option<AbstractDocument> {\n None\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::BindingDeclarations::HTMLOutputElementBinding;\nuse dom::bindings::codegen::InheritTypes::HTMLOutputElementDerived;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::error::ErrorResult;\nuse dom::document::Document;\nuse dom::element::HTMLOutputElementTypeId;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::htmlformelement::HTMLFormElement;\nuse dom::node::{Node, ElementNodeTypeId, window_from_node};\nuse dom::validitystate::ValidityState;\nuse servo_util::str::DOMString;\n\n#[deriving(Encodable)]\npub struct HTMLOutputElement {\n pub htmlelement: HTMLElement\n}\n\nimpl HTMLOutputElementDerived for EventTarget {\n fn is_htmloutputelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLOutputElementTypeId))\n }\n}\n\nimpl HTMLOutputElement {\n pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLOutputElement {\n HTMLOutputElement {\n htmlelement: HTMLElement::new_inherited(HTMLOutputElementTypeId, localName, document)\n }\n }\n\n pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLOutputElement> {\n let element = HTMLOutputElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLOutputElementBinding::Wrap)\n }\n}\n\npub trait HTMLOutputElementMethods {\n fn GetForm(&self) -> Option<Temporary<HTMLFormElement>>;\n fn Name(&self) -> DOMString;\n fn SetName(&mut self, _name: DOMString) -> ErrorResult;\n fn Type(&self) -> DOMString;\n fn DefaultValue(&self) -> DOMString;\n fn SetDefaultValue(&mut self, _value: DOMString) -> ErrorResult;\n fn Value(&self) -> DOMString;\n fn SetValue(&mut self, _value: DOMString) -> ErrorResult;\n fn WillValidate(&self) -> bool;\n fn SetWillValidate(&mut self, _will_validate: bool);\n fn Validity(&self) -> Temporary<ValidityState>;\n fn ValidationMessage(&self) -> DOMString;\n fn SetValidationMessage(&mut self, _message: DOMString) -> ErrorResult;\n fn CheckValidity(&self) -> bool;\n fn SetCustomValidity(&mut self, _error: DOMString);\n}\n\nimpl<'a> HTMLOutputElementMethods for JSRef<'a, HTMLOutputElement> {\n fn GetForm(&self) -> Option<Temporary<HTMLFormElement>> {\n None\n }\n\n fn Name(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetName(&mut self, _name: DOMString) -> ErrorResult {\n Ok(())\n }\n\n fn Type(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn DefaultValue(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetDefaultValue(&mut self, _value: DOMString) -> ErrorResult {\n Ok(())\n }\n\n fn Value(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetValue(&mut self, _value: DOMString) -> ErrorResult {\n Ok(())\n }\n\n fn WillValidate(&self) -> bool {\n false\n }\n\n fn SetWillValidate(&mut self, _will_validate: bool) {\n }\n\n fn Validity(&self) -> Temporary<ValidityState> {\n let window = window_from_node(self).root();\n ValidityState::new(&*window)\n }\n\n fn ValidationMessage(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetValidationMessage(&mut self, _message: DOMString) -> ErrorResult {\n Ok(())\n }\n\n fn CheckValidity(&self) -> bool {\n true\n }\n\n fn SetCustomValidity(&mut self, _error: DOMString) {\n }\n}\n<commit_msg>Remove needless '&mut self' from HTMLOutputElementMethods.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::BindingDeclarations::HTMLOutputElementBinding;\nuse dom::bindings::codegen::InheritTypes::HTMLOutputElementDerived;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::error::ErrorResult;\nuse dom::document::Document;\nuse dom::element::HTMLOutputElementTypeId;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::htmlformelement::HTMLFormElement;\nuse dom::node::{Node, ElementNodeTypeId, window_from_node};\nuse dom::validitystate::ValidityState;\nuse servo_util::str::DOMString;\n\n#[deriving(Encodable)]\npub struct HTMLOutputElement {\n pub htmlelement: HTMLElement\n}\n\nimpl HTMLOutputElementDerived for EventTarget {\n fn is_htmloutputelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLOutputElementTypeId))\n }\n}\n\nimpl HTMLOutputElement {\n pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLOutputElement {\n HTMLOutputElement {\n htmlelement: HTMLElement::new_inherited(HTMLOutputElementTypeId, localName, document)\n }\n }\n\n pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLOutputElement> {\n let element = HTMLOutputElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLOutputElementBinding::Wrap)\n }\n}\n\npub trait HTMLOutputElementMethods {\n fn GetForm(&self) -> Option<Temporary<HTMLFormElement>>;\n fn Name(&self) -> DOMString;\n fn SetName(&self, _name: DOMString) -> ErrorResult;\n fn Type(&self) -> DOMString;\n fn DefaultValue(&self) -> DOMString;\n fn SetDefaultValue(&self, _value: DOMString) -> ErrorResult;\n fn Value(&self) -> DOMString;\n fn SetValue(&self, _value: DOMString) -> ErrorResult;\n fn WillValidate(&self) -> bool;\n fn SetWillValidate(&self, _will_validate: bool);\n fn Validity(&self) -> Temporary<ValidityState>;\n fn ValidationMessage(&self) -> DOMString;\n fn SetValidationMessage(&self, _message: DOMString) -> ErrorResult;\n fn CheckValidity(&self) -> bool;\n fn SetCustomValidity(&self, _error: DOMString);\n}\n\nimpl<'a> HTMLOutputElementMethods for JSRef<'a, HTMLOutputElement> {\n fn GetForm(&self) -> Option<Temporary<HTMLFormElement>> {\n None\n }\n\n fn Name(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetName(&self, _name: DOMString) -> ErrorResult {\n Ok(())\n }\n\n fn Type(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn DefaultValue(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetDefaultValue(&self, _value: DOMString) -> ErrorResult {\n Ok(())\n }\n\n fn Value(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetValue(&self, _value: DOMString) -> ErrorResult {\n Ok(())\n }\n\n fn WillValidate(&self) -> bool {\n false\n }\n\n fn SetWillValidate(&self, _will_validate: bool) {\n }\n\n fn Validity(&self) -> Temporary<ValidityState> {\n let window = window_from_node(self).root();\n ValidityState::new(&*window)\n }\n\n fn ValidationMessage(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetValidationMessage(&self, _message: DOMString) -> ErrorResult {\n Ok(())\n }\n\n fn CheckValidity(&self) -> bool {\n true\n }\n\n fn SetCustomValidity(&self, _error: DOMString) {\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n#![feature(associated_consts)]\n\ntrait Lattice {\n const BOTTOM: Self;\n}\n\n\/\/ FIXME(#33573): this should work without the 'static lifetime bound.\nimpl<T: 'static> Lattice for Option<T> {\n const BOTTOM: Option<T> = None;\n}\n\nfn main(){}\n<commit_msg>Auto merge of #42147 - withoutboats:run-pass-test-for-static-in-assoc-const-ty-refs, r=eddyb<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n#![feature(associated_consts)]\n\ntrait Lattice {\n const BOTTOM: Self;\n}\n\nimpl<T> Lattice for Option<T> {\n const BOTTOM: Option<T> = None;\n}\n\nfn main(){}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(repr_simd, target_feature, cfg_target_feature)]\n\nuse std::process::{Command, ExitStatus};\nuse std::env;\n\nfn main() {\n if let Some(level) = env::args().nth(1) {\n return test::main(&level)\n }\n\n let me = env::current_exe().unwrap();\n for level in [\"sse\", \"avx\", \"avx512\"].iter() {\n let status = Command::new(&me).arg(level).status().unwrap();\n if status.success() {\n println!(\"success with {}\", level);\n continue\n }\n\n \/\/ We don't actually know if our computer has the requisite target features\n \/\/ for the test below. Testing for that will get added to libstd later so\n \/\/ for now just asume sigill means this is a machine that can't run this test.\n if is_sigill(status) {\n println!(\"sigill with {}, assuming spurious\", level);\n continue\n }\n panic!(\"invalid status at {}: {}\", level, status);\n }\n}\n\n#[cfg(unix)]\nfn is_sigill(status: ExitStatus) -> bool {\n use std::os::unix::prelude::*;\n status.signal() == Some(4)\n}\n\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\n#[allow(bad_style)]\nmod test {\n \/\/ An SSE type\n #[repr(simd)]\n #[derive(PartialEq, Debug, Clone, Copy)]\n struct __m128i(u64, u64);\n\n \/\/ An AVX type\n #[repr(simd)]\n #[derive(PartialEq, Debug, Clone, Copy)]\n struct __m256i(u64, u64, u64, u64);\n\n \/\/ An AVX-512 type\n #[repr(simd)]\n #[derive(PartialEq, Debug, Clone, Copy)]\n struct __m512i(u64, u64, u64, u64, u64, u64, u64, u64);\n\n pub fn main(level: &str) {\n unsafe {\n main_normal(level);\n main_sse(level);\n if level == \"sse\" {\n return\n }\n main_avx(level);\n if level == \"avx\" {\n return\n }\n main_avx512(level);\n }\n }\n\n macro_rules! mains {\n ($(\n $(#[$attr:meta])*\n unsafe fn $main:ident(level: &str) {\n ...\n }\n )*) => ($(\n $(#[$attr])*\n unsafe fn $main(level: &str) {\n let m128 = __m128i(1, 2);\n let m256 = __m256i(3, 4, 5, 6);\n let m512 = __m512i(7, 8, 9, 10, 11, 12, 13, 14);\n assert_eq!(id_sse_128(m128), m128);\n assert_eq!(id_sse_256(m256), m256);\n assert_eq!(id_sse_512(m512), m512);\n\n if level == \"sse\" {\n return\n }\n assert_eq!(id_avx_128(m128), m128);\n assert_eq!(id_avx_256(m256), m256);\n assert_eq!(id_avx_512(m512), m512);\n\n if level == \"avx\" {\n return\n }\n assert_eq!(id_avx512_128(m128), m128);\n assert_eq!(id_avx512_256(m256), m256);\n assert_eq!(id_avx512_512(m512), m512);\n }\n )*)\n }\n\n mains! {\n unsafe fn main_normal(level: &str) { ... }\n #[target_feature(enable = \"sse2\")]\n unsafe fn main_sse(level: &str) { ... }\n #[target_feature(enable = \"avx\")]\n unsafe fn main_avx(level: &str) { ... }\n #[target_feature(enable = \"avx512bw\")]\n unsafe fn main_avx512(level: &str) { ... }\n }\n\n\n #[target_feature(enable = \"sse2\")]\n unsafe fn id_sse_128(a: __m128i) -> __m128i {\n assert_eq!(a, __m128i(1, 2));\n a.clone()\n }\n\n #[target_feature(enable = \"sse2\")]\n unsafe fn id_sse_256(a: __m256i) -> __m256i {\n assert_eq!(a, __m256i(3, 4, 5, 6));\n a.clone()\n }\n\n #[target_feature(enable = \"sse2\")]\n unsafe fn id_sse_512(a: __m512i) -> __m512i {\n assert_eq!(a, __m512i(7, 8, 9, 10, 11, 12, 13, 14));\n a.clone()\n }\n\n #[target_feature(enable = \"avx\")]\n unsafe fn id_avx_128(a: __m128i) -> __m128i {\n assert_eq!(a, __m128i(1, 2));\n a.clone()\n }\n\n #[target_feature(enable = \"avx\")]\n unsafe fn id_avx_256(a: __m256i) -> __m256i {\n assert_eq!(a, __m256i(3, 4, 5, 6));\n a.clone()\n }\n\n #[target_feature(enable = \"avx\")]\n unsafe fn id_avx_512(a: __m512i) -> __m512i {\n assert_eq!(a, __m512i(7, 8, 9, 10, 11, 12, 13, 14));\n a.clone()\n }\n\n #[target_feature(enable = \"avx512bw\")]\n unsafe fn id_avx512_128(a: __m128i) -> __m128i {\n assert_eq!(a, __m128i(1, 2));\n a.clone()\n }\n\n #[target_feature(enable = \"avx512bw\")]\n unsafe fn id_avx512_256(a: __m256i) -> __m256i {\n assert_eq!(a, __m256i(3, 4, 5, 6));\n a.clone()\n }\n\n #[target_feature(enable = \"avx512bw\")]\n unsafe fn id_avx512_512(a: __m512i) -> __m512i {\n assert_eq!(a, __m512i(7, 8, 9, 10, 11, 12, 13, 14));\n a.clone()\n }\n}\n\n#[cfg(not(any(target_arch = \"x86\", target_arch = \"x86_64\")))]\nmod test {\n pub fn main(level: &str) {}\n}\n<commit_msg>Fix a test case on Windows<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(repr_simd, target_feature, cfg_target_feature)]\n\nuse std::process::{Command, ExitStatus};\nuse std::env;\n\nfn main() {\n if let Some(level) = env::args().nth(1) {\n return test::main(&level)\n }\n\n let me = env::current_exe().unwrap();\n for level in [\"sse\", \"avx\", \"avx512\"].iter() {\n let status = Command::new(&me).arg(level).status().unwrap();\n if status.success() {\n println!(\"success with {}\", level);\n continue\n }\n\n \/\/ We don't actually know if our computer has the requisite target features\n \/\/ for the test below. Testing for that will get added to libstd later so\n \/\/ for now just asume sigill means this is a machine that can't run this test.\n if is_sigill(status) {\n println!(\"sigill with {}, assuming spurious\", level);\n continue\n }\n panic!(\"invalid status at {}: {}\", level, status);\n }\n}\n\n#[cfg(unix)]\nfn is_sigill(status: ExitStatus) -> bool {\n use std::os::unix::prelude::*;\n status.signal() == Some(4)\n}\n\n#[cfg(windows)]\nfn is_sigill(status: ExitStatus) -> bool {\n status.code() == Some(0xc000001d)\n}\n\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\n#[allow(bad_style)]\nmod test {\n \/\/ An SSE type\n #[repr(simd)]\n #[derive(PartialEq, Debug, Clone, Copy)]\n struct __m128i(u64, u64);\n\n \/\/ An AVX type\n #[repr(simd)]\n #[derive(PartialEq, Debug, Clone, Copy)]\n struct __m256i(u64, u64, u64, u64);\n\n \/\/ An AVX-512 type\n #[repr(simd)]\n #[derive(PartialEq, Debug, Clone, Copy)]\n struct __m512i(u64, u64, u64, u64, u64, u64, u64, u64);\n\n pub fn main(level: &str) {\n unsafe {\n main_normal(level);\n main_sse(level);\n if level == \"sse\" {\n return\n }\n main_avx(level);\n if level == \"avx\" {\n return\n }\n main_avx512(level);\n }\n }\n\n macro_rules! mains {\n ($(\n $(#[$attr:meta])*\n unsafe fn $main:ident(level: &str) {\n ...\n }\n )*) => ($(\n $(#[$attr])*\n unsafe fn $main(level: &str) {\n let m128 = __m128i(1, 2);\n let m256 = __m256i(3, 4, 5, 6);\n let m512 = __m512i(7, 8, 9, 10, 11, 12, 13, 14);\n assert_eq!(id_sse_128(m128), m128);\n assert_eq!(id_sse_256(m256), m256);\n assert_eq!(id_sse_512(m512), m512);\n\n if level == \"sse\" {\n return\n }\n assert_eq!(id_avx_128(m128), m128);\n assert_eq!(id_avx_256(m256), m256);\n assert_eq!(id_avx_512(m512), m512);\n\n if level == \"avx\" {\n return\n }\n assert_eq!(id_avx512_128(m128), m128);\n assert_eq!(id_avx512_256(m256), m256);\n assert_eq!(id_avx512_512(m512), m512);\n }\n )*)\n }\n\n mains! {\n unsafe fn main_normal(level: &str) { ... }\n #[target_feature(enable = \"sse2\")]\n unsafe fn main_sse(level: &str) { ... }\n #[target_feature(enable = \"avx\")]\n unsafe fn main_avx(level: &str) { ... }\n #[target_feature(enable = \"avx512bw\")]\n unsafe fn main_avx512(level: &str) { ... }\n }\n\n\n #[target_feature(enable = \"sse2\")]\n unsafe fn id_sse_128(a: __m128i) -> __m128i {\n assert_eq!(a, __m128i(1, 2));\n a.clone()\n }\n\n #[target_feature(enable = \"sse2\")]\n unsafe fn id_sse_256(a: __m256i) -> __m256i {\n assert_eq!(a, __m256i(3, 4, 5, 6));\n a.clone()\n }\n\n #[target_feature(enable = \"sse2\")]\n unsafe fn id_sse_512(a: __m512i) -> __m512i {\n assert_eq!(a, __m512i(7, 8, 9, 10, 11, 12, 13, 14));\n a.clone()\n }\n\n #[target_feature(enable = \"avx\")]\n unsafe fn id_avx_128(a: __m128i) -> __m128i {\n assert_eq!(a, __m128i(1, 2));\n a.clone()\n }\n\n #[target_feature(enable = \"avx\")]\n unsafe fn id_avx_256(a: __m256i) -> __m256i {\n assert_eq!(a, __m256i(3, 4, 5, 6));\n a.clone()\n }\n\n #[target_feature(enable = \"avx\")]\n unsafe fn id_avx_512(a: __m512i) -> __m512i {\n assert_eq!(a, __m512i(7, 8, 9, 10, 11, 12, 13, 14));\n a.clone()\n }\n\n #[target_feature(enable = \"avx512bw\")]\n unsafe fn id_avx512_128(a: __m128i) -> __m128i {\n assert_eq!(a, __m128i(1, 2));\n a.clone()\n }\n\n #[target_feature(enable = \"avx512bw\")]\n unsafe fn id_avx512_256(a: __m256i) -> __m256i {\n assert_eq!(a, __m256i(3, 4, 5, 6));\n a.clone()\n }\n\n #[target_feature(enable = \"avx512bw\")]\n unsafe fn id_avx512_512(a: __m512i) -> __m512i {\n assert_eq!(a, __m512i(7, 8, 9, 10, 11, 12, 13, 14));\n a.clone()\n }\n}\n\n#[cfg(not(any(target_arch = \"x86\", target_arch = \"x86_64\")))]\nmod test {\n pub fn main(level: &str) {}\n}\n<|endoftext|>"} {"text":"<commit_before>use std::rc::Rc;\nuse std::cell::RefCell;\nuse std::collections::HashMap;\nuse std::fs::File as FSFile;\nuse std::ops::Deref;\nuse std::io::Write;\nuse std::io::Read;\n\npub mod path;\npub mod file;\npub mod parser;\npub mod json;\n\nuse module::Module;\nuse runtime::Runtime;\nuse storage::file::File;\nuse storage::file::id::FileID;\nuse storage::file::id_type::FileIDType;\nuse storage::file::hash::FileHash;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\nuse storage::file::header::data::FileHeaderData;\n\ntype Cache = HashMap<FileID, Rc<RefCell<File>>>;\n\npub struct Store {\n storepath: String,\n cache : RefCell<Cache>,\n}\n\nimpl Store {\n\n pub fn new(storepath: String) -> Store {\n Store {\n storepath: storepath,\n cache: RefCell::new(HashMap::new()),\n }\n }\n\n fn put_in_cache(&self, f: File) -> FileID {\n let res = f.id().clone();\n self.cache.borrow_mut().insert(f.id().clone(), Rc::new(RefCell::new(f)));\n res\n }\n\n pub fn load_in_cache<HP>(&self, m: &Module, parser: &Parser<HP>, id: FileID)\n -> Option<Rc<RefCell<File>>>\n where HP: FileHeaderParser\n {\n let idstr : String = id.clone().into();\n let path = format!(\"{}\/{}-{}.imag\", self.storepath, m.name(), idstr);\n let mut string = String::new();\n\n FSFile::open(&path).map(|mut file| {\n file.read_to_string(&mut string)\n .map_err(|e| error!(\"Failed reading file: '{}'\", path));\n });\n\n parser.read(string).map(|(header, data)| {\n self.new_file_from_parser_result(m, id.clone(), header, data);\n });\n\n self.load(&id)\n }\n\n pub fn new_file(&self, module: &Module)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: FileHeaderData::Null,\n data: String::from(\"\"),\n id: self.get_new_file_id(),\n };\n\n debug!(\"Create new File object: {:?}\", &f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_from_parser_result(&self,\n module: &Module,\n id: FileID,\n header: FileHeaderData,\n data: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: header,\n data: data,\n id: id,\n };\n debug!(\"Create new File object from parser result: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_header(&self,\n module: &Module,\n h: FileHeaderData)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: h,\n data: String::from(\"\"),\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with header: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_data(&self, module: &Module, d: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: FileHeaderData::Null,\n data: d,\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with data: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_content(&self,\n module: &Module,\n h: FileHeaderData,\n d: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: h,\n data: d,\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with content: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn persist<HP>(&self,\n p: &Parser<HP>,\n f: Rc<RefCell<File>>) -> bool\n where HP: FileHeaderParser\n {\n let file = f.deref().borrow();\n let text = p.write(file.contents());\n if text.is_err() {\n error!(\"Error: {}\", text.err().unwrap());\n return false;\n }\n\n let path = {\n let ids : String = file.id().clone().into();\n format!(\"{}\/{}-{}.imag\", self.storepath, file.owning_module_name, ids)\n };\n\n self.ensure_store_path_exists();\n\n FSFile::create(&path).map(|mut fsfile| {\n fsfile.write_all(&text.unwrap().clone().into_bytes()[..])\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n }).and(Ok(true)).unwrap()\n }\n\n fn ensure_store_path_exists(&self) {\n use std::fs::create_dir_all;\n use std::process::exit;\n\n create_dir_all(&self.storepath).unwrap_or_else(|e| {\n error!(\"Could not create store: '{}'\", self.storepath);\n error!(\"Error : '{}'\", e);\n error!(\"Killing myself now\");\n exit(1);\n })\n }\n\n pub fn load(&self, id: &FileID) -> Option<Rc<RefCell<File>>> {\n debug!(\"Loading '{:?}'\", id);\n self.cache.borrow().get(id).cloned()\n }\n\n pub fn load_by_hash<HP>(&self,\n m: &Module,\n parser: &Parser<HP>,\n hash: FileHash)\n -> Option<Rc<RefCell<File>>>\n where HP: FileHeaderParser\n {\n macro_rules! try_some {\n ($expr:expr) => (match $expr {\n ::std::option::Option::Some(val) => val,\n ::std::option::Option::None => return ::std::option::Option::None,\n });\n\n ($expr:expr => return) => (match $expr {\n ::std::option::Option::Some(val) => val,\n ::std::option::Option::None => return,\n })\n }\n\n use glob::{glob, Paths, PatternError};\n\n let hashstr : String = hash.into();\n let globstr = format!(\"{}\/*-{}.imag\", self.storepath, hashstr);\n debug!(\"glob({})\", globstr);\n\n let globs = glob(&globstr[..]);\n if globs.is_err() {\n return None;\n }\n\n let path = globs.unwrap().last();\n debug!(\"path = {:?}\", path);\n\n let pathbuf = try_some!(path);\n if pathbuf.is_err() { return None; }\n\n let pathbuf_un = pathbuf.unwrap();\n let filename = pathbuf_un.file_name();\n let s = try_some!(filename).to_str();\n let string = String::from(try_some!(s));\n let id = try_some!(FileID::parse(&string));\n\n debug!(\"Loaded ID = '{:?}'\", id);\n\n self.load_in_cache(m, parser, id)\n .map(|file| {\n debug!(\"Loaded File = '{:?}'\", file);\n Some(file)\n }).unwrap_or(None)\n }\n\n pub fn remove(&self, id: FileID) -> bool {\n use std::fs::remove_file;\n\n self.cache\n .borrow_mut()\n .remove(&id)\n .map(|file| {\n let idstr : String = id.into();\n let path = format!(\"{}\/{}-{}.imag\",\n self.storepath,\n file.deref().borrow().owner_name(),\n idstr);\n remove_file(path).is_ok()\n })\n .unwrap_or(false)\n }\n\n pub fn load_for_module<HP>(&self, m: &Module, parser: &Parser<HP>)\n -> Vec<Rc<RefCell<File>>>\n where HP: FileHeaderParser\n {\n use glob::{glob, Paths, PatternError};\n\n let globstr = format!(\"{}\/{}-*.imag\", self.storepath, m.name());\n let mut res = vec![];\n\n glob(&globstr[..]).map(|paths| {\n for path in paths {\n if let Ok(pathbuf) = path {\n let fname = pathbuf.file_name().and_then(|s| s.to_str());\n fname.map(|s| {\n FileID::parse(&String::from(s)).map(|id| {\n self.load_in_cache(m, parser, id).map(|file| {\n res.push(file);\n })\n });\n });\n }\n }\n });\n res\n }\n\n fn get_new_file_id(&self) -> FileID {\n use uuid::Uuid;\n let hash = FileHash::from(Uuid::new_v4().to_hyphenated_string());\n FileID::new(FileIDType::UUID, hash)\n }\n\n}\n<commit_msg>Add debug output to Store::load_in_cache()<commit_after>use std::rc::Rc;\nuse std::cell::RefCell;\nuse std::collections::HashMap;\nuse std::fs::File as FSFile;\nuse std::ops::Deref;\nuse std::io::Write;\nuse std::io::Read;\n\npub mod path;\npub mod file;\npub mod parser;\npub mod json;\n\nuse module::Module;\nuse runtime::Runtime;\nuse storage::file::File;\nuse storage::file::id::FileID;\nuse storage::file::id_type::FileIDType;\nuse storage::file::hash::FileHash;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\nuse storage::file::header::data::FileHeaderData;\n\ntype Cache = HashMap<FileID, Rc<RefCell<File>>>;\n\npub struct Store {\n storepath: String,\n cache : RefCell<Cache>,\n}\n\nimpl Store {\n\n pub fn new(storepath: String) -> Store {\n Store {\n storepath: storepath,\n cache: RefCell::new(HashMap::new()),\n }\n }\n\n fn put_in_cache(&self, f: File) -> FileID {\n let res = f.id().clone();\n self.cache.borrow_mut().insert(f.id().clone(), Rc::new(RefCell::new(f)));\n res\n }\n\n pub fn load_in_cache<HP>(&self, m: &Module, parser: &Parser<HP>, id: FileID)\n -> Option<Rc<RefCell<File>>>\n where HP: FileHeaderParser\n {\n let idstr : String = id.clone().into();\n let path = format!(\"{}\/{}-{}.imag\", self.storepath, m.name(), idstr);\n debug!(\"Loading path = '{}'\", path);\n let mut string = String::new();\n\n FSFile::open(&path).map(|mut file| {\n file.read_to_string(&mut string)\n .map_err(|e| error!(\"Failed reading file: '{}'\", path));\n });\n\n parser.read(string).map(|(header, data)| {\n self.new_file_from_parser_result(m, id.clone(), header, data);\n });\n\n self.load(&id)\n }\n\n pub fn new_file(&self, module: &Module)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: FileHeaderData::Null,\n data: String::from(\"\"),\n id: self.get_new_file_id(),\n };\n\n debug!(\"Create new File object: {:?}\", &f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_from_parser_result(&self,\n module: &Module,\n id: FileID,\n header: FileHeaderData,\n data: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: header,\n data: data,\n id: id,\n };\n debug!(\"Create new File object from parser result: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_header(&self,\n module: &Module,\n h: FileHeaderData)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: h,\n data: String::from(\"\"),\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with header: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_data(&self, module: &Module, d: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: FileHeaderData::Null,\n data: d,\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with data: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_content(&self,\n module: &Module,\n h: FileHeaderData,\n d: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: h,\n data: d,\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with content: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn persist<HP>(&self,\n p: &Parser<HP>,\n f: Rc<RefCell<File>>) -> bool\n where HP: FileHeaderParser\n {\n let file = f.deref().borrow();\n let text = p.write(file.contents());\n if text.is_err() {\n error!(\"Error: {}\", text.err().unwrap());\n return false;\n }\n\n let path = {\n let ids : String = file.id().clone().into();\n format!(\"{}\/{}-{}.imag\", self.storepath, file.owning_module_name, ids)\n };\n\n self.ensure_store_path_exists();\n\n FSFile::create(&path).map(|mut fsfile| {\n fsfile.write_all(&text.unwrap().clone().into_bytes()[..])\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n }).and(Ok(true)).unwrap()\n }\n\n fn ensure_store_path_exists(&self) {\n use std::fs::create_dir_all;\n use std::process::exit;\n\n create_dir_all(&self.storepath).unwrap_or_else(|e| {\n error!(\"Could not create store: '{}'\", self.storepath);\n error!(\"Error : '{}'\", e);\n error!(\"Killing myself now\");\n exit(1);\n })\n }\n\n pub fn load(&self, id: &FileID) -> Option<Rc<RefCell<File>>> {\n debug!(\"Loading '{:?}'\", id);\n self.cache.borrow().get(id).cloned()\n }\n\n pub fn load_by_hash<HP>(&self,\n m: &Module,\n parser: &Parser<HP>,\n hash: FileHash)\n -> Option<Rc<RefCell<File>>>\n where HP: FileHeaderParser\n {\n macro_rules! try_some {\n ($expr:expr) => (match $expr {\n ::std::option::Option::Some(val) => val,\n ::std::option::Option::None => return ::std::option::Option::None,\n });\n\n ($expr:expr => return) => (match $expr {\n ::std::option::Option::Some(val) => val,\n ::std::option::Option::None => return,\n })\n }\n\n use glob::{glob, Paths, PatternError};\n\n let hashstr : String = hash.into();\n let globstr = format!(\"{}\/*-{}.imag\", self.storepath, hashstr);\n debug!(\"glob({})\", globstr);\n\n let globs = glob(&globstr[..]);\n if globs.is_err() {\n return None;\n }\n\n let path = globs.unwrap().last();\n debug!(\"path = {:?}\", path);\n\n let pathbuf = try_some!(path);\n if pathbuf.is_err() { return None; }\n\n let pathbuf_un = pathbuf.unwrap();\n let filename = pathbuf_un.file_name();\n let s = try_some!(filename).to_str();\n let string = String::from(try_some!(s));\n let id = try_some!(FileID::parse(&string));\n\n debug!(\"Loaded ID = '{:?}'\", id);\n\n self.load_in_cache(m, parser, id)\n .map(|file| {\n debug!(\"Loaded File = '{:?}'\", file);\n Some(file)\n }).unwrap_or(None)\n }\n\n pub fn remove(&self, id: FileID) -> bool {\n use std::fs::remove_file;\n\n self.cache\n .borrow_mut()\n .remove(&id)\n .map(|file| {\n let idstr : String = id.into();\n let path = format!(\"{}\/{}-{}.imag\",\n self.storepath,\n file.deref().borrow().owner_name(),\n idstr);\n remove_file(path).is_ok()\n })\n .unwrap_or(false)\n }\n\n pub fn load_for_module<HP>(&self, m: &Module, parser: &Parser<HP>)\n -> Vec<Rc<RefCell<File>>>\n where HP: FileHeaderParser\n {\n use glob::{glob, Paths, PatternError};\n\n let globstr = format!(\"{}\/{}-*.imag\", self.storepath, m.name());\n let mut res = vec![];\n\n glob(&globstr[..]).map(|paths| {\n for path in paths {\n if let Ok(pathbuf) = path {\n let fname = pathbuf.file_name().and_then(|s| s.to_str());\n fname.map(|s| {\n FileID::parse(&String::from(s)).map(|id| {\n self.load_in_cache(m, parser, id).map(|file| {\n res.push(file);\n })\n });\n });\n }\n }\n });\n res\n }\n\n fn get_new_file_id(&self) -> FileID {\n use uuid::Uuid;\n let hash = FileHash::from(Uuid::new_v4().to_hyphenated_string());\n FileID::new(FileIDType::UUID, hash)\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary scissor flush<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add readability.rs<commit_after>use std::rc::Rc;\nuse std::path::Path;\nuse std::cell::Cell;\nuse std::collections::BTreeMap;\nuse url::Url;\nuse regex::Regex;\nuse html5ever::tree_builder::TreeSink;\nuse html5ever::rcdom::Node;\nuse html5ever::rcdom::NodeData::{Element, Text};\nuse html5ever::rcdom::Handle;\nuse html5ever::rcdom::NodeData::{\n Document,\n Doctype,\n Comment,\n ProcessingInstruction\n};\nuse html5ever::rcdom::RcDom;\nuse dom;\n\npub static PUNCTUATIONS: &'static str = r\"([、。,.!?]|\\.[^A-Za-z0-9]|,[^0-9]|!|\\?)\";\npub static POSITIVE: &'static str = \"article|body|content|entry|hentry|main|page\\\n |pagination|post|text|blog|story\";\npub static NEGATIVE: &'static str = \"combx|comment|com|contact|foot|footer|footnote\\\n |masthead|media|meta|outbrain|promo|related\\\n |scroll|shoutbox|sidebar|sponsor|shopping\\\n |tags|tool|widget\";\nstatic TAGS: [&'static str; 10] = [\n \"a\", \"blockquote\", \"dl\", \"div\", \"img\", \"ol\", \"p\", \"pre\", \"table\", \"ul\",\n];\n\npub struct Candidate {\n pub node: Rc<Node>,\n pub score: Cell<f32>,\n}\n\npub fn fix_img_path(handle: Handle, url: &Url) -> bool {\n let src = dom::get_attr(\"src\", handle.clone());\n if src.is_none() {\n return false\n }\n let s = src.unwrap();\n if !s.starts_with(\"\/\/\") && !s.starts_with(\"http:\/\/\") && s.starts_with(\"https:\/\/\") {\n match url.join(&s) {\n Ok(new_url) => dom::set_attr(\"src\", new_url.as_str(), handle),\n Err(_) => (),\n }\n }\n true\n}\n\npub fn get_link_density(handle: Handle) -> f32 {\n let text_length = dom::text_len(handle.clone()) as f32;\n if text_length == 0.0 {\n return 0.0;\n }\n let mut link_length = 0.0;\n let mut links: Vec<Rc<Node>> = vec![];\n dom::find_node(handle.clone(), \"a\", &mut links);\n for link in links.iter() {\n link_length += dom::text_len(link.clone()) as f32;\n }\n link_length \/ text_length\n}\n\npub fn is_candidate(handle: Handle) -> bool {\n let text_len = dom::text_len(handle.clone());\n if text_len < 20 {\n return false\n }\n let n: &str = &dom::get_tag_name(handle. clone()).unwrap_or(\"\".to_string());\n match n {\n \"p\" => true,\n \"div\" | \"article\" | \"center\" | \"section\" =>\n !dom::has_nodes(handle.clone(), &TAGS.iter().map(|t| *t).collect()),\n _ => false\n }\n}\n\npub fn init_content_score(handle: Handle) -> f32 {\n let tag_name = dom::get_tag_name(handle.clone()).unwrap_or(\"\".to_string());\n match tag_name.as_ref() {\n \"article\" => 10.0,\n \"div\" => 5.0,\n \"blockquote\" => 3.0,\n \"form\" => -3.0,\n \"th\" => 5.0,\n _ => 0.0,\n }\n}\n\npub fn calc_content_score(handle: Handle) -> f32 {\n let mut score: f32 = 1.0;\n score += get_class_weight(handle.clone());\n let mut text = String::new();\n dom::extract_text(handle.clone(), &mut text, true);\n let re = Regex::new(PUNCTUATIONS).unwrap();\n let mat = re.find_iter(&text);\n score += mat.count() as f32;\n score += f32::min(f32::floor(text.chars().count() as f32 \/ 100.0), 3.0);\n return score\n}\n\npub fn get_class_weight(handle: Handle) -> f32 {\n let mut weight: f32 = 0.0;\n match handle.data {\n Element { name: _, ref attrs, .. } => {\n for prop in [\"id\", \"class\"].iter() {\n if let Some(class) = dom::attr(prop, &attrs.borrow()) {\n if Regex::new(POSITIVE).unwrap().is_match(&class) {\n weight += 25.0\n };\n if Regex::new(NEGATIVE).unwrap().is_match(&class) {\n weight -= 25.0\n }\n }\n }\n },\n _ => (),\n };\n weight\n}\n\npub fn clean(mut dom: &mut RcDom, id: &Path, handle: Handle, url: &Url, candidates: &BTreeMap<String, Candidate>) -> bool {\n let mut useless = false;\n match handle.data {\n Document => (),\n Doctype { .. } => (),\n Text { ref contents } => {\n let s = contents.borrow();\n if s.trim().len() == 0 {\n useless = true\n }\n },\n Comment { .. } => useless = true,\n Element { ref name, ref attrs, .. } => {\n let tag_name = name.local.as_ref();\n match tag_name.to_lowercase().as_ref() {\n \"script\" | \"link\" | \"style\" | \"noscript\" | \"meta\"\n | \"h1\" | \"object\" | \"header\" | \"footer\" | \"aside\" => {\n useless = true\n },\n \"form\" | \"table\" | \"ul\" | \"div\" => {\n useless = is_useless(id, handle.clone(), candidates)\n },\n \"img\" => useless = fix_img_path(handle.clone(), url),\n _ => (),\n }\n dom::clean_attr(\"id\" , &mut *attrs.borrow_mut());\n dom::clean_attr(\"class\", &mut *attrs.borrow_mut());\n dom::clean_attr(\"style\", &mut *attrs.borrow_mut());\n },\n ProcessingInstruction { .. } => unreachable!()\n }\n let mut useless_nodes = vec![];\n for (i, child) in handle.children.borrow().iter().enumerate() {\n let pid = id.join(i.to_string());\n if clean(&mut dom, pid.as_path(), child.clone(), url, candidates) {\n useless_nodes.push(child.clone());\n }\n }\n for node in useless_nodes.iter() {\n dom.remove_from_parent(node);\n }\n if dom::is_empty(handle) {\n useless = true\n }\n useless\n}\n\npub fn is_useless(id: &Path, handle: Handle, candidates: &BTreeMap<String, Candidate>) -> bool {\n let tag_name = &dom::get_tag_name(handle.clone()).unwrap_or(\"\".to_string());\n let weight = get_class_weight(handle.clone());\n let score = id.to_str()\n .and_then(|id| candidates.get(id))\n .map(|c| c.score.get()).unwrap_or(0.0);\n if weight + score < 0.0 {\n return true\n }\n let mut p_nodes: Vec<Rc<Node>> = vec![];\n let mut img_nodes: Vec<Rc<Node>> = vec![];\n let mut li_nodes: Vec<Rc<Node>> = vec![];\n let mut input_nodes: Vec<Rc<Node>> = vec![];\n let mut embed_nodes: Vec<Rc<Node>> = vec![];\n dom::find_node(handle.clone(), \"p\" , &mut p_nodes);\n dom::find_node(handle.clone(), \"img\" , &mut img_nodes);\n dom::find_node(handle.clone(), \"li\" , &mut li_nodes);\n dom::find_node(handle.clone(), \"input\" , &mut input_nodes);\n dom::find_node(handle.clone(), \"embed\" , &mut embed_nodes);\n let p_count = p_nodes.len();\n let img_count = img_nodes.len();\n let li_count = li_nodes.len() as i32 - 100;\n let input_count = input_nodes.len();\n let embed_count = embed_nodes.len();\n let link_density = get_link_density(handle.clone());\n let content_length = dom::text_len(handle.clone());\n\n if img_count > p_count {\n return true\n }\n if li_count > p_count as i32 && tag_name != \"ul\" && tag_name != \"ol\" {\n return true\n }\n if input_count as f32 > f32::floor(p_count as f32 \/ 3.0) {\n return true\n }\n if content_length < 25 && (img_count == 0 || img_count > 2) {\n return true\n }\n if weight < 25.0 && link_density > 0.2 {\n return true\n }\n if (embed_count == 1 && content_length < 35) || embed_count > 1 {\n return true\n }\n return false\n}\n\npub fn is_link_list(handle: Handle) -> bool {\n let rate = evaluate_list(handle);\n rate > 7.5\n}\n\nfn evaluate_list(handle: Handle) -> f32 {\n let mut hit: i32 = 0;\n let mut len: i32 = 0;\n for child in handle.children.borrow().iter() {\n let c = child.clone();\n match c.data {\n Element { ref name, .. } => {\n let tag_name = name.local.as_ref();\n match tag_name.to_lowercase().as_ref() {\n \"li\" | \"dt\" | \"dd\" => {\n len += 1;\n if dom::has_link(child.clone()) {\n hit += 1\n }\n }\n _ => (),\n }\n },\n _ => ()\n }\n }\n if len == 0 {\n 0.0\n } else {\n 9.0 * ((hit \/ len) as f32).powi(2) + 1.0\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add api-std-hashmap.rs<commit_after>\/**\n * http:\/\/static.rust-lang.org\/doc\/0.7\/std\/hashmap.html\n *\n * @license MIT license <http:\/\/www.opensource.org\/licenses\/mit-license.php>\n *\/\nuse std::hashmap::HashMap;\n\nfn main() {\n\tprintln(\"Using borrowed pointers as keys.\");\n\tlet mut h = HashMap::new::<&str, int>();\n\th.insert(\"foo\", 42);\n\tprintln(fmt!(\"Is there a key foo? => %?\", h.contains_key(& &\"foo\"))); \/\/ => true\n\tprintln(fmt!(\"Is there a key baz? => %?\", h.contains_key(& &\"baz\"))); \/\/ => false\n\tprintln(fmt!(\"The value for foo is => %?\", h.find(& &\"foo\"))); \/\/ => Some(&42)\n\th.insert(key, 1);\n\tprintln(fmt!(\"Is there a key baz? => %?\", h.contains_key(& &\"baz\"))); \/\/ => false\n\n\t\/\/ You don't actually need the HashMap to own the keys (but\n\t\/\/ unless all keys are static, this will be likely to lead\n\t\/\/ to problems, so I don't suggest you do it in reality)\n\n\tprintln(\"Using owned pointers as keys.\");\n\tlet mut h = HashMap::new::<~str, int>();\n\th.insert(~\"foo\", 42);\n\tprintln(fmt!(\"Is there a key foo? => %?\", h.contains_key(&~\"foo\"))); \/\/ => true\n\tprintln(fmt!(\"Is there a key baz? => %?\", h.contains_key(&~\"baz\"))); \/\/ => false\n\tprintln(fmt!(\"The value for foo is => %?\", h.find(&~\"foo\"))); \/\/ => Some(&42)\n\tlet key = \"baz\";\n\th.insert(key.to_owned(), 1);\n\tprintln(fmt!(\"Is there a key baz? => %?\", h.contains_key(&~\"baz\"))); \/\/ => true\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #86208<commit_after>\/\/ This is a regression test for issue #86208.\n\/\/ It is also a general test of macro_rules! display.\n\n#![crate_name = \"foo\"]\n\n\/\/ @has 'foo\/macro.todo.html'\n\/\/ @has - '\/\/span[@class=\"macro\"]' 'macro_rules!'\n\/\/ @has - '\/\/span[@class=\"ident\"]' 'todo'\n\/\/ Note: count = 2 * ('=' + '>') + '+' = 2 * (1 + 1) + 1 = 5\n\/\/ @count - '\/\/span[@class=\"op\"]' 5\n\n\/\/ @has - '{ ()'\n\/\/ @has - '\/\/span[@class=\"op\"]' '='\n\/\/ @has - '\/\/span[@class=\"op\"]' '>'\n\/\/ @has - '{ ... };'\n\n\/\/ @has - '($('\n\/\/ @has - '\/\/span[@class=\"macro-nonterminal\"]' '$'\n\/\/ @has - '\/\/span[@class=\"macro-nonterminal\"]' 'arg'\n\/\/ @has - ':'\n\/\/ @has - '\/\/span[@class=\"ident\"]' 'tt'\n\/\/ @has - '\/\/span[@class=\"op\"]' '+'\n\/\/ @has - ')'\npub use std::todo;\n\nmod mod1 {\n \/\/ @has 'foo\/macro.macro1.html'\n \/\/ @has - 'macro_rules!'\n \/\/ @has - 'macro1'\n \/\/ @has - '{ ()'\n \/\/ @has - '($('\n \/\/ @has - 'arg'\n \/\/ @has - 'expr'\n \/\/ @has - ','\n \/\/ @has - '+'\n \/\/ @has - ')'\n #[macro_export]\n macro_rules! macro1 {\n () => {};\n ($($arg:expr),+) => { stringify!($($arg),+) };\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>testsuite: Add passing test for #4735<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse core::cast::transmute;\nuse core::libc::c_void;\n\nstruct NonCopyable(*c_void);\n\nimpl Drop for NonCopyable {\n fn finalize(&self) {\n let p = **self;\n let v = unsafe { transmute::<*c_void, ~int>(p) };\n }\n}\n\nfn main() {\n let t = ~0;\n let p = unsafe { transmute::<~int, *c_void>(t) };\n let z = NonCopyable(p);\n}\n\n\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse std::path::Path;\nuse std::borrow::Borrow;\nuse std::ops::Deref;\n\nuse semver::Version;\nuse std::fmt::{Display, Debug, Formatter};\nuse std::fmt::Error as FmtError;\nuse std::result::Result as RResult;\n\nuse error::StoreErrorKind as SEK;\nuse store::Result;\nuse store::Store;\n\n\/\/\/ The Index into the Store\n#[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)]\npub struct StoreId {\n store_location: PathBuf,\n id: PathBuf,\n}\n\nimpl StoreId {\n\n pub fn storified(self, store: &Store) -> StoreId {\n if self.starts_with(store.path()) {\n debug!(\"Not storifying {:?}, because it is already.\", self);\n self\n } else {\n debug!(\"Create new store id out of: {:?} and {:?}\", store.path(), self.id);\n\n let new_id = StoreId { store_location: store.path().clone(), self.id };\n\n debug!(\"Created: '{:?}'\", new_id);\n new_id\n }\n }\n\n pub fn exists(&self) -> bool {\n let pb : PathBuf = self.clone().into();\n pb.exists()\n }\n\n pub fn is_file(&self) -> bool {\n true\n }\n\n pub fn is_dir(&self) -> bool {\n false\n }\n\n}\n\nimpl Into<PathBuf> for StoreId {\n\n fn into(self) -> PathBuf {\n let mut base = self.store_location;\n base.push(self.id);\n base\n }\n\n}\n\nimpl Display for StoreId {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n match self.id.to_str() {\n Some(s) => write!(fmt, \"{}\", s),\n None => write!(fmt, \"{}\", self.id.to_string_lossy()),\n }\n }\n\n}\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\npub trait IntoStoreId {\n fn into_storeid(self) -> StoreId;\n}\n\nimpl IntoStoreId for StoreId {\n fn into_storeid(self) -> StoreId {\n self\n }\n}\n\npub fn build_entry_path(store: &Store, path_elem: &str) -> Result<PathBuf> {\n debug!(\"Checking path element for version\");\n if path_elem.split('~').last().map_or(false, |v| Version::parse(v).is_err()) {\n debug!(\"Version cannot be parsed from {:?}\", path_elem);\n debug!(\"Path does not contain version!\");\n return Err(SEK::StorePathLacksVersion.into());\n }\n debug!(\"Version checking succeeded\");\n\n debug!(\"Building path from {:?}\", path_elem);\n let mut path = store.path().clone();\n\n if path_elem.starts_with('\/') {\n path.push(&path_elem[1..]);\n } else {\n path.push(path_elem);\n }\n\n Ok(path)\n}\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n \/\/\/ A helper module to create valid module entry paths\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n use $crate::storeid::StoreId;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(self) -> $crate::storeid::StoreId {\n StoreId::from(self.0)\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n iter: Box<Iterator<Item = StoreId>>,\n}\n\nimpl Debug for StoreIdIterator {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"StoreIdIterator\")\n }\n\n}\n\nimpl StoreIdIterator {\n\n pub fn new(iter: Box<Iterator<Item = StoreId>>) -> StoreIdIterator {\n StoreIdIterator {\n iter: iter,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.iter.next()\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(), \"test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<commit_msg>Make StoreId.base optional<commit_after>use std::path::PathBuf;\nuse std::path::Path;\nuse std::borrow::Borrow;\nuse std::ops::Deref;\n\nuse semver::Version;\nuse std::fmt::{Display, Debug, Formatter};\nuse std::fmt::Error as FmtError;\nuse std::result::Result as RResult;\n\nuse error::StoreErrorKind as SEK;\nuse store::Result;\nuse store::Store;\n\n\/\/\/ The Index into the Store\n#[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)]\npub struct StoreId {\n base: Option<PathBuf>,\n id: PathBuf,\n}\n\nimpl StoreId {\n\n pub fn storified(self, store: &Store) -> StoreId {\n if self.starts_with(store.path()) {\n debug!(\"Not storifying {:?}, because it is already.\", self);\n self\n } else {\n debug!(\"Create new store id out of: {:?} and {:?}\", store.path(), self.id);\n\n let new_id = StoreId { base: Some(store.path().clone()), self.id };\n\n debug!(\"Created: '{:?}'\", new_id);\n new_id\n }\n }\n\n pub fn exists(&self) -> bool {\n let pb : PathBuf = self.clone().into();\n pb.exists()\n }\n\n pub fn is_file(&self) -> bool {\n true\n }\n\n pub fn is_dir(&self) -> bool {\n false\n }\n\n}\n\nimpl Into<PathBuf> for StoreId {\n\n fn into(self) -> PathBuf {\n let mut base = self.base.unwrap_or(PathBuf::from(\"\/\"));\n base.push(self.id);\n base\n }\n\n}\n\nimpl Display for StoreId {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n match self.id.to_str() {\n Some(s) => write!(fmt, \"{}\", s),\n None => write!(fmt, \"{}\", self.id.to_string_lossy()),\n }\n }\n\n}\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\npub trait IntoStoreId {\n fn into_storeid(self) -> StoreId;\n}\n\nimpl IntoStoreId for StoreId {\n fn into_storeid(self) -> StoreId {\n self\n }\n}\n\npub fn build_entry_path(store: &Store, path_elem: &str) -> Result<PathBuf> {\n debug!(\"Checking path element for version\");\n if path_elem.split('~').last().map_or(false, |v| Version::parse(v).is_err()) {\n debug!(\"Version cannot be parsed from {:?}\", path_elem);\n debug!(\"Path does not contain version!\");\n return Err(SEK::StorePathLacksVersion.into());\n }\n debug!(\"Version checking succeeded\");\n\n debug!(\"Building path from {:?}\", path_elem);\n let mut path = store.path().clone();\n\n if path_elem.starts_with('\/') {\n path.push(&path_elem[1..]);\n } else {\n path.push(path_elem);\n }\n\n Ok(path)\n}\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n \/\/\/ A helper module to create valid module entry paths\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n use $crate::storeid::StoreId;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(self) -> $crate::storeid::StoreId {\n StoreId::from(self.0)\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n iter: Box<Iterator<Item = StoreId>>,\n}\n\nimpl Debug for StoreIdIterator {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"StoreIdIterator\")\n }\n\n}\n\nimpl StoreIdIterator {\n\n pub fn new(iter: Box<Iterator<Item = StoreId>>) -> StoreIdIterator {\n StoreIdIterator {\n iter: iter,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.iter.next()\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(), \"test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove lifetime specification<commit_after><|endoftext|>"} {"text":"<commit_before>use std::{Box, String};\nuse std::collections::VecDeque;\nuse std::ops::DerefMut;\n\nuse orbital::{Color, Point, Size, Event, KeyEvent, MouseEvent, QuitEvent};\n\nuse super::display::Display;\nuse super::scheduler;\n\n\/\/\/ A window\npub struct Window {\n \/\/\/ The position of the window\n pub point: Point,\n \/\/\/ The size of the window\n pub size: Size,\n \/\/\/ The title of the window\n pub title: String,\n \/\/\/ The content of the window\n pub content: Box<Display>,\n \/\/\/ The color of the window title\n pub title_color: Color,\n \/\/\/ The color of the border\n pub border_color: Color,\n \/\/\/ Is the window focused?\n pub focused: bool,\n \/\/\/ Is the window minimized?\n pub minimized: bool,\n dragging: bool,\n last_mouse_event: MouseEvent,\n events: VecDeque<Event>,\n ptr: *mut Window,\n}\n\nimpl Window {\n \/\/\/ Create a new window\n pub fn new(point: Point, size: Size, title: String) -> Box<Self> {\n let mut ret = box Window {\n point: point,\n size: size,\n title: title,\n content: Display::new(size.width, size.height),\n title_color: Color::rgb(255, 255, 255),\n border_color: Color::rgba(64, 64, 64, 128),\n focused: false,\n minimized: false,\n dragging: false,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n right_button: false,\n middle_button: false,\n },\n events: VecDeque::new(),\n ptr: 0 as *mut Window,\n };\n\n unsafe {\n ret.ptr = ret.deref_mut();\n\n if ret.ptr as usize > 0 {\n (*super::session_ptr).add_window(ret.ptr);\n }\n }\n\n ret\n }\n\n \/\/\/ Poll the window (new)\n pub fn poll(&mut self) -> Option<Event> {\n let event_option;\n unsafe {\n let reenable = scheduler::start_no_ints();\n event_option = self.events.pop_front();\n scheduler::end_no_ints(reenable);\n }\n return event_option;\n }\n\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.content.flip();\n (*super::session_ptr).redraw = true;\n (*super::session_ptr).redraw();\n scheduler::end_no_ints(reenable);\n }\n }\n\n \/\/\/ Draw the window using a `Display`\n pub fn draw(&mut self, display: &Display, font: usize) {\n if self.focused {\n self.border_color = Color::rgba(128, 128, 128, 192);\n } else {\n self.border_color = Color::rgba(64, 64, 64, 128);\n }\n\n if self.minimized {\n self.title_color = Color::rgb(0, 0, 0);\n } else {\n self.title_color = Color::rgb(255, 255, 255);\n\n display.rect(Point::new(self.point.x - 2, self.point.y - 18),\n Size::new(self.size.width + 4, 18),\n self.border_color);\n\n let mut cursor = Point::new(self.point.x, self.point.y - 17);\n for c in self.title.chars() {\n if cursor.x + 8 <= self.point.x + self.size.width as isize {\n display.char(cursor, c, self.title_color, font);\n }\n cursor.x += 8;\n }\n\n display.rect(Point::new(self.point.x - 2, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n display.rect(Point::new(self.point.x - 2, self.point.y + self.size.height as isize),\n Size::new(self.size.width + 4, 2),\n self.border_color);\n display.rect(Point::new(self.point.x + self.size.width as isize, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n\n unsafe {\n let reenable = scheduler::start_no_ints();\n display.image(self.point,\n self.content.onscreen as *const Color,\n Size::new(self.content.width, self.content.height));\n scheduler::end_no_ints(reenable);\n }\n }\n }\n\n \/\/\/ Called on key press\n pub fn on_key(&mut self, key_event: KeyEvent) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(key_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n fn on_window_decoration(&self, x: isize, y: isize) -> bool {\n !self.minimized && x >= -2 && x < self.size.width as isize + 4 && y >= -18 && y < 0\n }\n\n fn on_window_body(&self, x: isize, y: isize) -> bool {\n !self.minimized && x >= 0 && x < self.size.width as isize && y >= 0 &&\n y < self.size.height as isize\n }\n\n fn mouse_button_pressed(mouse_event: &MouseEvent) -> bool {\n mouse_event.left_button || mouse_event.middle_button || mouse_event.right_button \n }\n\n \/\/\/ Called on mouse movement\n pub fn on_mouse(&mut self, orig_mouse_event: MouseEvent, allow_catch: bool, active_window: bool) -> bool {\n let mut mouse_event = orig_mouse_event;\n\n mouse_event.x -= self.point.x;\n mouse_event.y -= self.point.y;\n\n let mut caught = false;\n\n if allow_catch && (active_window || Window::mouse_button_pressed(&mouse_event)) {\n if self.on_window_body(mouse_event.x, mouse_event.y) {\n caught = true;\n } else if self.on_window_decoration(mouse_event.x, mouse_event.y) {\n caught = true;\n\n if mouse_event.left_button {\n if !self.last_mouse_event.left_button {\n self.dragging = true;\n }\n } else {\n self.dragging = false;\n }\n\n if mouse_event.right_button {\n if !self.last_mouse_event.right_button {\n self.minimized = !self.minimized;\n }\n }\n\n if mouse_event.middle_button {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(QuitEvent.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n }\n \n if self.dragging {\n self.point.x += orig_mouse_event.x - self.last_mouse_event.x;\n self.point.y += orig_mouse_event.y - self.last_mouse_event.y;\n caught = true;\n }\n } else {\n self.dragging = false;\n }\n\n self.last_mouse_event = orig_mouse_event;\n\n if caught && !self.dragging {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(mouse_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n caught\n }\n}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n unsafe {\n if self.ptr as usize > 0 {\n (*super::session_ptr).remove_window(self.ptr);\n }\n }\n }\n}\n<commit_msg>Fix for case when mouse button held down<commit_after>use std::{Box, String};\nuse std::collections::VecDeque;\nuse std::ops::DerefMut;\n\nuse orbital::{Color, Point, Size, Event, KeyEvent, MouseEvent, QuitEvent};\n\nuse super::display::Display;\nuse super::scheduler;\n\n\/\/\/ A window\npub struct Window {\n \/\/\/ The position of the window\n pub point: Point,\n \/\/\/ The size of the window\n pub size: Size,\n \/\/\/ The title of the window\n pub title: String,\n \/\/\/ The content of the window\n pub content: Box<Display>,\n \/\/\/ The color of the window title\n pub title_color: Color,\n \/\/\/ The color of the border\n pub border_color: Color,\n \/\/\/ Is the window focused?\n pub focused: bool,\n \/\/\/ Is the window minimized?\n pub minimized: bool,\n dragging: bool,\n last_mouse_event: MouseEvent,\n events: VecDeque<Event>,\n ptr: *mut Window,\n}\n\nimpl Window {\n \/\/\/ Create a new window\n pub fn new(point: Point, size: Size, title: String) -> Box<Self> {\n let mut ret = box Window {\n point: point,\n size: size,\n title: title,\n content: Display::new(size.width, size.height),\n title_color: Color::rgb(255, 255, 255),\n border_color: Color::rgba(64, 64, 64, 128),\n focused: false,\n minimized: false,\n dragging: false,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n right_button: false,\n middle_button: false,\n },\n events: VecDeque::new(),\n ptr: 0 as *mut Window,\n };\n\n unsafe {\n ret.ptr = ret.deref_mut();\n\n if ret.ptr as usize > 0 {\n (*super::session_ptr).add_window(ret.ptr);\n }\n }\n\n ret\n }\n\n \/\/\/ Poll the window (new)\n pub fn poll(&mut self) -> Option<Event> {\n let event_option;\n unsafe {\n let reenable = scheduler::start_no_ints();\n event_option = self.events.pop_front();\n scheduler::end_no_ints(reenable);\n }\n return event_option;\n }\n\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.content.flip();\n (*super::session_ptr).redraw = true;\n (*super::session_ptr).redraw();\n scheduler::end_no_ints(reenable);\n }\n }\n\n \/\/\/ Draw the window using a `Display`\n pub fn draw(&mut self, display: &Display, font: usize) {\n if self.focused {\n self.border_color = Color::rgba(128, 128, 128, 192);\n } else {\n self.border_color = Color::rgba(64, 64, 64, 128);\n }\n\n if self.minimized {\n self.title_color = Color::rgb(0, 0, 0);\n } else {\n self.title_color = Color::rgb(255, 255, 255);\n\n display.rect(Point::new(self.point.x - 2, self.point.y - 18),\n Size::new(self.size.width + 4, 18),\n self.border_color);\n\n let mut cursor = Point::new(self.point.x, self.point.y - 17);\n for c in self.title.chars() {\n if cursor.x + 8 <= self.point.x + self.size.width as isize {\n display.char(cursor, c, self.title_color, font);\n }\n cursor.x += 8;\n }\n\n display.rect(Point::new(self.point.x - 2, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n display.rect(Point::new(self.point.x - 2, self.point.y + self.size.height as isize),\n Size::new(self.size.width + 4, 2),\n self.border_color);\n display.rect(Point::new(self.point.x + self.size.width as isize, self.point.y),\n Size::new(2, self.size.height),\n self.border_color);\n\n unsafe {\n let reenable = scheduler::start_no_ints();\n display.image(self.point,\n self.content.onscreen as *const Color,\n Size::new(self.content.width, self.content.height));\n scheduler::end_no_ints(reenable);\n }\n }\n }\n\n \/\/\/ Called on key press\n pub fn on_key(&mut self, key_event: KeyEvent) {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(key_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n fn on_window_decoration(&self, x: isize, y: isize) -> bool {\n !self.minimized && x >= -2 && x < self.size.width as isize + 4 && y >= -18 && y < 0\n }\n\n fn on_window_body(&self, x: isize, y: isize) -> bool {\n !self.minimized && x >= 0 && x < self.size.width as isize && y >= 0 &&\n y < self.size.height as isize\n }\n\n fn mouse_button_pressed(mouse_event: &MouseEvent) -> bool {\n mouse_event.left_button || mouse_event.middle_button || mouse_event.right_button \n }\n\n \/\/\/ Called on mouse movement\n pub fn on_mouse(&mut self, orig_mouse_event: MouseEvent, allow_catch: bool, active_window: bool) -> bool {\n let mut mouse_event = orig_mouse_event;\n\n mouse_event.x -= self.point.x;\n mouse_event.y -= self.point.y;\n\n let mut caught = false;\n\n if allow_catch && (active_window || (Window::mouse_button_pressed(&mouse_event) && !Window::mouse_button_pressed(&self.last_mouse_event))) {\n if self.on_window_body(mouse_event.x, mouse_event.y) {\n caught = true;\n } else if self.on_window_decoration(mouse_event.x, mouse_event.y) {\n caught = true;\n\n if mouse_event.left_button {\n if !self.last_mouse_event.left_button {\n self.dragging = true;\n }\n } else {\n self.dragging = false;\n }\n\n if mouse_event.right_button {\n if !self.last_mouse_event.right_button {\n self.minimized = !self.minimized;\n }\n }\n\n if mouse_event.middle_button {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(QuitEvent.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n }\n \n if self.dragging {\n self.point.x += orig_mouse_event.x - self.last_mouse_event.x;\n self.point.y += orig_mouse_event.y - self.last_mouse_event.y;\n caught = true;\n }\n } else {\n self.dragging = false;\n }\n\n self.last_mouse_event = orig_mouse_event;\n\n if caught && !self.dragging {\n unsafe {\n let reenable = scheduler::start_no_ints();\n self.events.push_back(mouse_event.to_event());\n scheduler::end_no_ints(reenable);\n }\n }\n\n caught\n }\n}\n\nimpl Drop for Window {\n fn drop(&mut self) {\n unsafe {\n if self.ptr as usize > 0 {\n (*super::session_ptr).remove_window(self.ptr);\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>examples\/print_torrent<commit_after>extern crate lazy_bencoding;\n\nuse std::env;\nuse std::fs::File;\nuse std::io::Read;\n\nuse lazy_bencoding::*;\n\nfn pretty_print<'a>(bencoded: BEncoded<'a>, padding: &str) {\n if let Some(s) = bencoded.get_utf8_string() {\n println!(\"{}{}\", padding, s);\n } else if let Some(bs) = bencoded.get_byte_string() {\n \/\/ Hexdump\n let mut line = String::new();\n for (i, b) in bs.iter().enumerate() {\n line.push_str(&format!(\"{:02X}\", b));\n if i & 15 == 0 {\n println!(\"{}{}\", padding, line);\n line.clear();\n } else if i & 7 == 0 {\n line.push_str(\" \");\n } else if i & 3 == 0 {\n line.push_str(\" \");\n } else {\n line.push_str(\" \");\n }\n }\n if line.len() > 0 {\n println!(\"{}{}\", padding, line);\n }\n } else if let Some(i) = bencoded.get_integer() {\n println!(\"{}{}\", padding, i);\n } else if bencoded.is_dict() {\n for (key, value) in bencoded.dict() {\n if let Some(key) = key.get_utf8_string() {\n println!(\"{}{}:\", padding, key);\n } else {\n pretty_print(key, padding);\n }\n pretty_print(value, &format!(\"{} \", padding));\n }\n } else if bencoded.is_list() {\n for (i, elem) in bencoded.list().enumerate() {\n let padding = if i == 0 {\n format!(\"{} - \", padding)\n } else {\n let space_padding: String = padding.chars()\n .map(|c| ' ').collect();\n format!(\"{} - \", space_padding)\n };\n pretty_print(elem, &padding);\n }\n } else {\n println!(\"{}Weird tokens: {:?}\", padding, bencoded.collect::<Vec<Token<'a>>>());\n }\n}\n\nfn main() {\n for filename in env::args().skip(1) {\n let mut f = match File::open(&filename) {\n Ok(f) => f,\n Err(e) => {\n println!(\"{}: {:?}\", filename, e);\n continue;\n }\n };\n let mut contents = Vec::new();\n f.read_to_end(&mut contents).unwrap();\n\n let bencoded = BEncoded::new(&contents[..]);\n pretty_print(bencoded, \"\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::Bindings::HTMLIFrameElementBinding;\nuse dom::bindings::codegen::Bindings::HTMLIFrameElementBinding::HTMLIFrameElementMethods;\nuse dom::bindings::codegen::InheritTypes::{ElementCast, HTMLIFrameElementDerived, HTMLElementCast};\nuse dom::bindings::js::{JSRef, Temporary, OptionalRootable};\nuse dom::bindings::trace::Traceable;\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::document::Document;\nuse dom::element::{HTMLIFrameElementTypeId, Element};\nuse dom::element::AttributeHandlers;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{Node, ElementNodeTypeId, window_from_node};\nuse dom::virtualmethods::VirtualMethods;\nuse dom::window::Window;\nuse page::IterablePage;\nuse servo_msg::constellation_msg::{PipelineId, SubpageId};\nuse servo_msg::constellation_msg::{IFrameSandboxed, IFrameUnsandboxed};\nuse servo_msg::constellation_msg::{ConstellationChan, LoadIframeUrlMsg};\nuse servo_util::namespace::Null;\nuse servo_util::str::DOMString;\n\nuse std::ascii::StrAsciiExt;\nuse std::cell::Cell;\nuse url::{Url, UrlParser};\n\nenum SandboxAllowance {\n AllowNothing = 0x00,\n AllowSameOrigin = 0x01,\n AllowTopNavigation = 0x02,\n AllowForms = 0x04,\n AllowScripts = 0x08,\n AllowPointerLock = 0x10,\n AllowPopups = 0x20\n}\n\n#[deriving(Encodable)]\npub struct HTMLIFrameElement {\n pub htmlelement: HTMLElement,\n pub size: Traceable<Cell<Option<IFrameSize>>>,\n pub sandbox: Traceable<Cell<Option<u8>>>,\n}\n\nimpl HTMLIFrameElementDerived for EventTarget {\n fn is_htmliframeelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLIFrameElementTypeId))\n }\n}\n\n#[deriving(Encodable)]\npub struct IFrameSize {\n pub pipeline_id: PipelineId,\n pub subpage_id: SubpageId,\n}\n\npub trait HTMLIFrameElementHelpers {\n fn is_sandboxed(&self) -> bool;\n fn get_url(&self) -> Option<Url>;\n}\n\nimpl<'a> HTMLIFrameElementHelpers for JSRef<'a, HTMLIFrameElement> {\n fn is_sandboxed(&self) -> bool {\n self.sandbox.deref().get().is_some()\n }\n\n fn get_url(&self) -> Option<Url> {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.get_attribute(Null, \"src\").root().and_then(|src| {\n let window = window_from_node(self).root();\n UrlParser::new().base_url(&window.deref().page().get_url())\n .parse(src.deref().value().as_slice()).ok()\n })\n }\n}\n\nimpl HTMLIFrameElement {\n pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLIFrameElement {\n HTMLIFrameElement {\n htmlelement: HTMLElement::new_inherited(HTMLIFrameElementTypeId, localName, document),\n size: Traceable::new(Cell::new(None)),\n sandbox: Traceable::new(Cell::new(None)),\n }\n }\n\n pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLIFrameElement> {\n let element = HTMLIFrameElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLIFrameElementBinding::Wrap)\n }\n}\n\nimpl<'a> HTMLIFrameElementMethods for JSRef<'a, HTMLIFrameElement> {\n fn Src(&self) -> DOMString {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.get_string_attribute(\"src\")\n }\n\n fn SetSrc(&self, src: DOMString) {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.set_url_attribute(\"src\", src)\n }\n\n fn Sandbox(&self) -> DOMString {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.get_string_attribute(\"sandbox\")\n }\n\n fn SetSandbox(&self, sandbox: DOMString) {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"sandbox\", sandbox);\n }\n\n fn GetContentWindow(&self) -> Option<Temporary<Window>> {\n self.size.deref().get().and_then(|size| {\n let window = window_from_node(self).root();\n let children = &*window.deref().page.children.deref().borrow();\n let child = children.iter().find(|child| {\n child.subpage_id.unwrap() == size.subpage_id\n });\n child.and_then(|page| {\n page.frame.deref().borrow().as_ref().map(|frame| {\n Temporary::new(frame.window.clone())\n })\n })\n })\n }\n}\n\nimpl<'a> VirtualMethods for JSRef<'a, HTMLIFrameElement> {\n fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods+> {\n let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_ref(self);\n Some(htmlelement as &VirtualMethods+)\n }\n\n fn after_set_attr(&self, name: DOMString, value: DOMString) {\n match self.super_type() {\n Some(ref s) => s.after_set_attr(name.clone(), value.clone()),\n _ => (),\n }\n\n if \"sandbox\" == name.as_slice() {\n let mut modes = AllowNothing as u8;\n for word in value.as_slice().split(' ') {\n modes |= match word.to_ascii_lower().as_slice() {\n \"allow-same-origin\" => AllowSameOrigin,\n \"allow-forms\" => AllowForms,\n \"allow-pointer-lock\" => AllowPointerLock,\n \"allow-popups\" => AllowPopups,\n \"allow-scripts\" => AllowScripts,\n \"allow-top-navigation\" => AllowTopNavigation,\n _ => AllowNothing\n } as u8;\n }\n self.deref().sandbox.deref().set(Some(modes));\n }\n }\n\n fn before_remove_attr(&self, name: DOMString, value: DOMString) {\n match self.super_type() {\n Some(ref s) => s.before_remove_attr(name.clone(), value),\n _ => (),\n }\n\n if \"sandbox\" == name.as_slice() {\n self.deref().sandbox.deref().set(None);\n }\n }\n\n fn bind_to_tree(&self, tree_in_doc: bool) {\n match self.super_type() {\n Some(ref s) => s.bind_to_tree(tree_in_doc),\n _ => (),\n }\n\n if !tree_in_doc { return; }\n\n match self.get_url() {\n Some(url) => {\n let sandboxed = if self.is_sandboxed() {\n IFrameSandboxed\n } else {\n IFrameUnsandboxed\n };\n\n \/\/ Subpage Id\n let window = window_from_node(self).root();\n let page = window.deref().page();\n let subpage_id = page.get_next_subpage_id();\n\n self.deref().size.deref().set(Some(IFrameSize {\n pipeline_id: page.id,\n subpage_id: subpage_id,\n }));\n\n let ConstellationChan(ref chan) = *page.constellation_chan.deref();\n chan.send(LoadIframeUrlMsg(url, page.id, subpage_id, sandboxed));\n }\n _ => ()\n }\n }\n}\n\nimpl Reflectable for HTMLIFrameElement {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.htmlelement.reflector()\n }\n}\n<commit_msg> Implement src setter for HTMLIFrameElement #2529<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::Bindings::HTMLIFrameElementBinding;\nuse dom::bindings::codegen::Bindings::HTMLIFrameElementBinding::HTMLIFrameElementMethods;\nuse dom::bindings::codegen::InheritTypes::{NodeCast, ElementCast};\nuse dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLIFrameElementDerived};\nuse dom::bindings::js::{JSRef, Temporary, OptionalRootable};\nuse dom::bindings::trace::Traceable;\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::document::Document;\nuse dom::element::{HTMLIFrameElementTypeId, Element};\nuse dom::element::AttributeHandlers;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{Node, NodeHelpers, ElementNodeTypeId, window_from_node};\nuse dom::virtualmethods::VirtualMethods;\nuse dom::window::Window;\nuse page::IterablePage;\nuse servo_msg::constellation_msg::{PipelineId, SubpageId};\nuse servo_msg::constellation_msg::{IFrameSandboxed, IFrameUnsandboxed};\nuse servo_msg::constellation_msg::{ConstellationChan, LoadIframeUrlMsg};\nuse servo_util::namespace::Null;\nuse servo_util::str::DOMString;\n\nuse std::ascii::StrAsciiExt;\nuse std::cell::Cell;\nuse url::{Url, UrlParser};\n\nenum SandboxAllowance {\n AllowNothing = 0x00,\n AllowSameOrigin = 0x01,\n AllowTopNavigation = 0x02,\n AllowForms = 0x04,\n AllowScripts = 0x08,\n AllowPointerLock = 0x10,\n AllowPopups = 0x20\n}\n\n#[deriving(Encodable)]\npub struct HTMLIFrameElement {\n pub htmlelement: HTMLElement,\n pub size: Traceable<Cell<Option<IFrameSize>>>,\n pub sandbox: Traceable<Cell<Option<u8>>>,\n}\n\nimpl HTMLIFrameElementDerived for EventTarget {\n fn is_htmliframeelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLIFrameElementTypeId))\n }\n}\n\n#[deriving(Encodable)]\npub struct IFrameSize {\n pub pipeline_id: PipelineId,\n pub subpage_id: SubpageId,\n}\n\npub trait HTMLIFrameElementHelpers {\n fn is_sandboxed(&self) -> bool;\n fn get_url(&self) -> Option<Url>;\n \/\/\/ http:\/\/www.whatwg.org\/html\/#process-the-iframe-attributes\n fn process_the_iframe_attributes(&self);\n}\n\nimpl<'a> HTMLIFrameElementHelpers for JSRef<'a, HTMLIFrameElement> {\n fn is_sandboxed(&self) -> bool {\n self.sandbox.deref().get().is_some()\n }\n\n fn get_url(&self) -> Option<Url> {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.get_attribute(Null, \"src\").root().and_then(|src| {\n let window = window_from_node(self).root();\n UrlParser::new().base_url(&window.deref().page().get_url())\n .parse(src.deref().value().as_slice()).ok()\n })\n }\n\n fn process_the_iframe_attributes(&self) {\n match self.get_url() {\n Some(url) => {\n let sandboxed = if self.is_sandboxed() {\n IFrameSandboxed\n } else {\n IFrameUnsandboxed\n };\n\n \/\/ Subpage Id\n let window = window_from_node(self).root();\n let page = window.deref().page();\n let subpage_id = page.get_next_subpage_id();\n\n self.deref().size.deref().set(Some(IFrameSize {\n pipeline_id: page.id,\n subpage_id: subpage_id,\n }));\n\n let ConstellationChan(ref chan) = *page.constellation_chan.deref();\n chan.send(LoadIframeUrlMsg(url, page.id, subpage_id, sandboxed));\n }\n _ => ()\n }\n }\n}\n\nimpl HTMLIFrameElement {\n pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLIFrameElement {\n HTMLIFrameElement {\n htmlelement: HTMLElement::new_inherited(HTMLIFrameElementTypeId, localName, document),\n size: Traceable::new(Cell::new(None)),\n sandbox: Traceable::new(Cell::new(None)),\n }\n }\n\n pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLIFrameElement> {\n let element = HTMLIFrameElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLIFrameElementBinding::Wrap)\n }\n}\n\nimpl<'a> HTMLIFrameElementMethods for JSRef<'a, HTMLIFrameElement> {\n fn Src(&self) -> DOMString {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.get_string_attribute(\"src\")\n }\n\n fn SetSrc(&self, src: DOMString) {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.set_url_attribute(\"src\", src)\n }\n\n fn Sandbox(&self) -> DOMString {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.get_string_attribute(\"sandbox\")\n }\n\n fn SetSandbox(&self, sandbox: DOMString) {\n let element: &JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"sandbox\", sandbox);\n }\n\n fn GetContentWindow(&self) -> Option<Temporary<Window>> {\n self.size.deref().get().and_then(|size| {\n let window = window_from_node(self).root();\n let children = &*window.deref().page.children.deref().borrow();\n let child = children.iter().find(|child| {\n child.subpage_id.unwrap() == size.subpage_id\n });\n child.and_then(|page| {\n page.frame.deref().borrow().as_ref().map(|frame| {\n Temporary::new(frame.window.clone())\n })\n })\n })\n }\n}\n\nimpl<'a> VirtualMethods for JSRef<'a, HTMLIFrameElement> {\n fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods+> {\n let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_ref(self);\n Some(htmlelement as &VirtualMethods+)\n }\n\n fn after_set_attr(&self, name: DOMString, value: DOMString) {\n match self.super_type() {\n Some(ref s) => s.after_set_attr(name.clone(), value.clone()),\n _ => (),\n }\n\n if \"sandbox\" == name.as_slice() {\n let mut modes = AllowNothing as u8;\n for word in value.as_slice().split(' ') {\n modes |= match word.to_ascii_lower().as_slice() {\n \"allow-same-origin\" => AllowSameOrigin,\n \"allow-forms\" => AllowForms,\n \"allow-pointer-lock\" => AllowPointerLock,\n \"allow-popups\" => AllowPopups,\n \"allow-scripts\" => AllowScripts,\n \"allow-top-navigation\" => AllowTopNavigation,\n _ => AllowNothing\n } as u8;\n }\n self.deref().sandbox.deref().set(Some(modes));\n }\n\n if \"src\" == name.as_slice() {\n let node: &JSRef<Node> = NodeCast::from_ref(self);\n if node.is_in_doc() {\n self.process_the_iframe_attributes()\n }\n }\n }\n\n fn before_remove_attr(&self, name: DOMString, value: DOMString) {\n match self.super_type() {\n Some(ref s) => s.before_remove_attr(name.clone(), value),\n _ => (),\n }\n\n if \"sandbox\" == name.as_slice() {\n self.deref().sandbox.deref().set(None);\n }\n }\n\n fn bind_to_tree(&self, tree_in_doc: bool) {\n match self.super_type() {\n Some(ref s) => s.bind_to_tree(tree_in_doc),\n _ => (),\n }\n\n if tree_in_doc {\n self.process_the_iframe_attributes();\n }\n }\n}\n\nimpl Reflectable for HTMLIFrameElement {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.htmlelement.reflector()\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::{DOMRefCell, Ref, RefMut};\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::{OnErrorEventHandlerNonNull, EventHandlerNonNull};\nuse dom::bindings::codegen::Bindings::WindowBinding;\nuse dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;\nuse dom::bindings::codegen::InheritTypes::EventTargetCast;\nuse dom::bindings::error::{Fallible, InvalidCharacter};\nuse dom::bindings::global;\nuse dom::bindings::js::{MutNullableJS, JSRef, Temporary, OptionalSettable};\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::browsercontext::BrowserContext;\nuse dom::console::Console;\nuse dom::document::Document;\nuse dom::eventtarget::{EventTarget, WindowTypeId, EventTargetHelpers};\nuse dom::location::Location;\nuse dom::navigator::Navigator;\nuse dom::performance::Performance;\nuse dom::screen::Screen;\nuse layout_interface::NoQuery;\nuse page::Page;\nuse script_task::{ExitWindowMsg, ScriptChan, TriggerLoadMsg, TriggerFragmentMsg};\nuse script_task::FromWindow;\nuse script_traits::ScriptControlChan;\nuse timers::{TimerId, TimerManager};\n\nuse servo_msg::compositor_msg::ScriptListener;\nuse servo_msg::constellation_msg::LoadData;\nuse servo_net::image_cache_task::ImageCacheTask;\nuse servo_util::str::{DOMString,HTML_SPACE_CHARACTERS};\n\nuse js::jsapi::JS_EvaluateUCScript;\nuse js::jsapi::JSContext;\nuse js::jsapi::{JS_GC, JS_GetRuntime};\nuse js::jsval::{JSVal, UndefinedValue};\nuse js::rust::with_compartment;\nuse url::{Url, UrlParser};\n\nuse libc;\nuse serialize::base64::{FromBase64, ToBase64, STANDARD};\nuse std::default::Default;\nuse std::rc::Rc;\nuse time;\n\n#[dom_struct]\npub struct Window {\n eventtarget: EventTarget,\n script_chan: ScriptChan,\n control_chan: ScriptControlChan,\n console: MutNullableJS<Console>,\n location: MutNullableJS<Location>,\n navigator: MutNullableJS<Navigator>,\n image_cache_task: ImageCacheTask,\n compositor: DOMRefCell<Box<ScriptListener+'static>>,\n browser_context: DOMRefCell<Option<BrowserContext>>,\n page: Rc<Page>,\n performance: MutNullableJS<Performance>,\n navigation_start: u64,\n navigation_start_precise: f64,\n screen: MutNullableJS<Screen>,\n timers: TimerManager\n}\n\nimpl Window {\n pub fn get_cx(&self) -> *mut JSContext {\n let js_info = self.page().js_info();\n (*js_info.as_ref().unwrap().js_context).ptr\n }\n\n pub fn script_chan<'a>(&'a self) -> &'a ScriptChan {\n &self.script_chan\n }\n\n pub fn control_chan<'a>(&'a self) -> &'a ScriptControlChan {\n &self.control_chan\n }\n\n pub fn image_cache_task<'a>(&'a self) -> &'a ImageCacheTask {\n &self.image_cache_task\n }\n\n pub fn compositor(&self) -> RefMut<Box<ScriptListener+'static>> {\n self.compositor.borrow_mut()\n }\n\n pub fn browser_context(&self) -> Ref<Option<BrowserContext>> {\n self.browser_context.borrow()\n }\n\n pub fn page<'a>(&'a self) -> &'a Page {\n &*self.page\n }\n\n pub fn navigation_start(&self) -> u64 {\n self.navigation_start\n }\n\n pub fn navigation_start_precise(&self) -> f64 {\n self.navigation_start_precise\n }\n\n pub fn get_url(&self) -> Url {\n self.page().get_url()\n }\n}\n\n\/\/ http:\/\/www.whatwg.org\/html\/#atob\npub fn base64_btoa(btoa: DOMString) -> Fallible<DOMString> {\n let input = btoa.as_slice();\n \/\/ \"The btoa() method must throw an InvalidCharacterError exception if\n \/\/ the method's first argument contains any character whose code point\n \/\/ is greater than U+00FF.\"\n if input.chars().any(|c: char| c > '\\u00FF') {\n Err(InvalidCharacter)\n } else {\n \/\/ \"Otherwise, the user agent must convert that argument to a\n \/\/ sequence of octets whose nth octet is the eight-bit\n \/\/ representation of the code point of the nth character of\n \/\/ the argument,\"\n let octets = input.chars().map(|c: char| c as u8).collect::<Vec<u8>>();\n\n \/\/ \"and then must apply the base64 algorithm to that sequence of\n \/\/ octets, and return the result. [RFC4648]\"\n Ok(octets.as_slice().to_base64(STANDARD))\n }\n}\n\n\/\/ http:\/\/www.whatwg.org\/html\/#atob\npub fn base64_atob(atob: DOMString) -> Fallible<DOMString> {\n \/\/ \"Let input be the string being parsed.\"\n let mut input = atob.as_slice();\n\n \/\/ \"Remove all space characters from input.\"\n \/\/ serialize::base64::from_base64 ignores \\r and \\n,\n \/\/ but it treats the other space characters as\n \/\/ invalid input.\n fn is_html_space(c: char) -> bool {\n HTML_SPACE_CHARACTERS.iter().any(|&m| m == c)\n }\n let without_spaces = input.chars()\n .filter(|&c| ! is_html_space(c))\n .collect::<String>();\n input = without_spaces.as_slice();\n\n \/\/ \"If the length of input divides by 4 leaving no remainder, then:\n \/\/ if input ends with one or two U+003D EQUALS SIGN (=) characters,\n \/\/ remove them from input.\"\n if input.len() % 4 == 0 {\n if input.ends_with(\"==\") {\n input = input.slice_to(input.len() - 2)\n } else if input.ends_with(\"=\") {\n input = input.slice_to(input.len() - 1)\n }\n }\n\n \/\/ \"If the length of input divides by 4 leaving a remainder of 1,\n \/\/ throw an InvalidCharacterError exception and abort these steps.\"\n if input.len() % 4 == 1 {\n return Err(InvalidCharacter)\n }\n\n \/\/ \"If input contains a character that is not in the following list of\n \/\/ characters and character ranges, throw an InvalidCharacterError\n \/\/ exception and abort these steps:\n \/\/\n \/\/ U+002B PLUS SIGN (+)\n \/\/ U+002F SOLIDUS (\/)\n \/\/ Alphanumeric ASCII characters\"\n if input.chars()\n .find(|&c| !(c == '+' || c == '\/' || c.is_alphanumeric()))\n .is_some() {\n return Err(InvalidCharacter)\n }\n\n match input.from_base64() {\n Ok(data) => Ok(data.iter().map(|&b| b as char).collect::<String>()),\n Err(..) => Err(InvalidCharacter)\n }\n}\n\n\nimpl<'a> WindowMethods for JSRef<'a, Window> {\n fn Alert(self, s: DOMString) {\n \/\/ Right now, just print to the console\n println!(\"ALERT: {:s}\", s);\n }\n\n fn Close(self) {\n let ScriptChan(ref chan) = self.script_chan;\n chan.send(ExitWindowMsg(self.page.id.clone()));\n }\n\n fn Document(self) -> Temporary<Document> {\n let frame = self.page().frame();\n Temporary::new(frame.as_ref().unwrap().document.clone())\n }\n\n fn Location(self) -> Temporary<Location> {\n if self.location.get().is_none() {\n let page = self.page.clone();\n let location = Location::new(self, page);\n self.location.assign(Some(location));\n }\n self.location.get().unwrap()\n }\n\n fn Console(self) -> Temporary<Console> {\n if self.console.get().is_none() {\n let console = Console::new(&global::Window(self));\n self.console.assign(Some(console));\n }\n self.console.get().unwrap()\n }\n\n fn Navigator(self) -> Temporary<Navigator> {\n if self.navigator.get().is_none() {\n let navigator = Navigator::new(self);\n self.navigator.assign(Some(navigator));\n }\n self.navigator.get().unwrap()\n }\n\n fn SetTimeout(self, _cx: *mut JSContext, callback: JSVal, timeout: i32) -> i32 {\n self.timers.set_timeout_or_interval(callback,\n timeout,\n false, \/\/ is_interval\n FromWindow(self.page.id.clone()),\n self.script_chan.clone())\n }\n\n fn ClearTimeout(self, handle: i32) {\n self.timers.clear_timeout_or_interval(handle);\n }\n\n fn SetInterval(self, _cx: *mut JSContext, callback: JSVal, timeout: i32) -> i32 {\n self.timers.set_timeout_or_interval(callback,\n timeout,\n true, \/\/ is_interval\n FromWindow(self.page.id.clone()),\n self.script_chan.clone())\n }\n\n fn ClearInterval(self, handle: i32) {\n self.ClearTimeout(handle);\n }\n\n fn Window(self) -> Temporary<Window> {\n Temporary::from_rooted(self)\n }\n\n fn Self(self) -> Temporary<Window> {\n self.Window()\n }\n\n \/\/ http:\/\/www.whatwg.org\/html\/#dom-frames\n fn Frames(self) -> Temporary<Window> {\n self.Window()\n }\n\n fn Parent(self) -> Temporary<Window> {\n \/\/TODO - Once we support iframes correctly this needs to return the parent frame\n self.Window()\n }\n\n fn Performance(self) -> Temporary<Performance> {\n if self.performance.get().is_none() {\n let performance = Performance::new(self);\n self.performance.assign(Some(performance));\n }\n self.performance.get().unwrap()\n }\n\n event_handler!(click, GetOnclick, SetOnclick)\n event_handler!(load, GetOnload, SetOnload)\n event_handler!(unload, GetOnunload, SetOnunload)\n error_event_handler!(error, GetOnerror, SetOnerror)\n\n fn Screen(self) -> Temporary<Screen> {\n if self.screen.get().is_none() {\n let screen = Screen::new(self);\n self.screen.assign(Some(screen));\n }\n self.screen.get().unwrap()\n }\n\n fn Debug(self, message: DOMString) {\n debug!(\"{:s}\", message);\n }\n\n fn Gc(self) {\n unsafe {\n JS_GC(JS_GetRuntime(self.get_cx()));\n }\n }\n\n fn Btoa(self, btoa: DOMString) -> Fallible<DOMString> {\n base64_btoa(btoa)\n }\n\n fn Atob(self, atob: DOMString) -> Fallible<DOMString> {\n base64_atob(atob)\n }\n}\n\nimpl Reflectable for Window {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.eventtarget.reflector()\n }\n}\n\npub trait WindowHelpers {\n fn reflow(self);\n fn flush_layout(self);\n fn wait_until_safe_to_modify_dom(self);\n fn init_browser_context(self, doc: JSRef<Document>);\n fn load_url(self, href: DOMString);\n fn handle_fire_timer(self, timer_id: TimerId, cx: *mut JSContext);\n fn evaluate_js_with_result(self, code: &str) -> JSVal;\n fn evaluate_script_with_result(self, code: &str, filename: &str) -> JSVal;\n}\n\n\nimpl<'a> WindowHelpers for JSRef<'a, Window> {\n fn evaluate_js_with_result(self, code: &str) -> JSVal {\n self.evaluate_script_with_result(code, \"\")\n }\n\n fn evaluate_script_with_result(self, code: &str, filename: &str) -> JSVal {\n let global = self.reflector().get_jsobject();\n let code: Vec<u16> = code.as_slice().utf16_units().collect();\n let mut rval = UndefinedValue();\n let filename = filename.to_c_str();\n let cx = self.get_cx();\n\n with_compartment(cx, global, || {\n unsafe {\n if JS_EvaluateUCScript(cx, global, code.as_ptr(),\n code.len() as libc::c_uint,\n filename.as_ptr(), 1, &mut rval) == 0 {\n debug!(\"error evaluating JS string\");\n }\n rval\n }\n })\n }\n\n fn reflow(self) {\n self.page().damage();\n }\n\n fn flush_layout(self) {\n self.page().flush_layout(NoQuery);\n }\n\n fn wait_until_safe_to_modify_dom(self) {\n \/\/ FIXME: This disables concurrent layout while we are modifying the DOM, since\n \/\/ our current architecture is entirely unsafe in the presence of races.\n self.page().join_layout();\n }\n\n fn init_browser_context(self, doc: JSRef<Document>) {\n *self.browser_context.borrow_mut() = Some(BrowserContext::new(doc));\n }\n\n \/\/\/ Commence a new URL load which will either replace this window or scroll to a fragment.\n fn load_url(self, href: DOMString) {\n let base_url = self.page().get_url();\n debug!(\"current page url is {}\", base_url);\n let url = UrlParser::new().base_url(&base_url).parse(href.as_slice());\n \/\/ FIXME: handle URL parse errors more gracefully.\n let url = url.unwrap();\n let ScriptChan(ref script_chan) = self.script_chan;\n if href.as_slice().starts_with(\"#\") {\n script_chan.send(TriggerFragmentMsg(self.page.id, url));\n } else {\n script_chan.send(TriggerLoadMsg(self.page.id, LoadData::new(url)));\n }\n }\n\n fn handle_fire_timer(self, timer_id: TimerId, cx: *mut JSContext) {\n let this_value = self.reflector().get_jsobject();\n self.timers.fire_timer(timer_id, this_value, cx);\n }\n}\n\nimpl Window {\n pub fn new(cx: *mut JSContext,\n page: Rc<Page>,\n script_chan: ScriptChan,\n control_chan: ScriptControlChan,\n compositor: Box<ScriptListener+'static>,\n image_cache_task: ImageCacheTask)\n -> Temporary<Window> {\n let win = box Window {\n eventtarget: EventTarget::new_inherited(WindowTypeId),\n script_chan: script_chan,\n control_chan: control_chan,\n console: Default::default(),\n compositor: DOMRefCell::new(compositor),\n page: page,\n location: Default::default(),\n navigator: Default::default(),\n image_cache_task: image_cache_task,\n browser_context: DOMRefCell::new(None),\n performance: Default::default(),\n navigation_start: time::get_time().sec as u64,\n navigation_start_precise: time::precise_time_s(),\n screen: Default::default(),\n timers: TimerManager::new()\n };\n\n WindowBinding::Wrap(cx, win)\n }\n}\n<commit_msg>Flush layout after executing timers.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::{DOMRefCell, Ref, RefMut};\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::{OnErrorEventHandlerNonNull, EventHandlerNonNull};\nuse dom::bindings::codegen::Bindings::WindowBinding;\nuse dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;\nuse dom::bindings::codegen::InheritTypes::EventTargetCast;\nuse dom::bindings::error::{Fallible, InvalidCharacter};\nuse dom::bindings::global;\nuse dom::bindings::js::{MutNullableJS, JSRef, Temporary, OptionalSettable};\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::browsercontext::BrowserContext;\nuse dom::console::Console;\nuse dom::document::Document;\nuse dom::eventtarget::{EventTarget, WindowTypeId, EventTargetHelpers};\nuse dom::location::Location;\nuse dom::navigator::Navigator;\nuse dom::performance::Performance;\nuse dom::screen::Screen;\nuse layout_interface::NoQuery;\nuse page::Page;\nuse script_task::{ExitWindowMsg, ScriptChan, TriggerLoadMsg, TriggerFragmentMsg};\nuse script_task::FromWindow;\nuse script_traits::ScriptControlChan;\nuse timers::{TimerId, TimerManager};\n\nuse servo_msg::compositor_msg::ScriptListener;\nuse servo_msg::constellation_msg::LoadData;\nuse servo_net::image_cache_task::ImageCacheTask;\nuse servo_util::str::{DOMString,HTML_SPACE_CHARACTERS};\n\nuse js::jsapi::JS_EvaluateUCScript;\nuse js::jsapi::JSContext;\nuse js::jsapi::{JS_GC, JS_GetRuntime};\nuse js::jsval::{JSVal, UndefinedValue};\nuse js::rust::with_compartment;\nuse url::{Url, UrlParser};\n\nuse libc;\nuse serialize::base64::{FromBase64, ToBase64, STANDARD};\nuse std::default::Default;\nuse std::rc::Rc;\nuse time;\n\n#[dom_struct]\npub struct Window {\n eventtarget: EventTarget,\n script_chan: ScriptChan,\n control_chan: ScriptControlChan,\n console: MutNullableJS<Console>,\n location: MutNullableJS<Location>,\n navigator: MutNullableJS<Navigator>,\n image_cache_task: ImageCacheTask,\n compositor: DOMRefCell<Box<ScriptListener+'static>>,\n browser_context: DOMRefCell<Option<BrowserContext>>,\n page: Rc<Page>,\n performance: MutNullableJS<Performance>,\n navigation_start: u64,\n navigation_start_precise: f64,\n screen: MutNullableJS<Screen>,\n timers: TimerManager\n}\n\nimpl Window {\n pub fn get_cx(&self) -> *mut JSContext {\n let js_info = self.page().js_info();\n (*js_info.as_ref().unwrap().js_context).ptr\n }\n\n pub fn script_chan<'a>(&'a self) -> &'a ScriptChan {\n &self.script_chan\n }\n\n pub fn control_chan<'a>(&'a self) -> &'a ScriptControlChan {\n &self.control_chan\n }\n\n pub fn image_cache_task<'a>(&'a self) -> &'a ImageCacheTask {\n &self.image_cache_task\n }\n\n pub fn compositor(&self) -> RefMut<Box<ScriptListener+'static>> {\n self.compositor.borrow_mut()\n }\n\n pub fn browser_context(&self) -> Ref<Option<BrowserContext>> {\n self.browser_context.borrow()\n }\n\n pub fn page<'a>(&'a self) -> &'a Page {\n &*self.page\n }\n\n pub fn navigation_start(&self) -> u64 {\n self.navigation_start\n }\n\n pub fn navigation_start_precise(&self) -> f64 {\n self.navigation_start_precise\n }\n\n pub fn get_url(&self) -> Url {\n self.page().get_url()\n }\n}\n\n\/\/ http:\/\/www.whatwg.org\/html\/#atob\npub fn base64_btoa(btoa: DOMString) -> Fallible<DOMString> {\n let input = btoa.as_slice();\n \/\/ \"The btoa() method must throw an InvalidCharacterError exception if\n \/\/ the method's first argument contains any character whose code point\n \/\/ is greater than U+00FF.\"\n if input.chars().any(|c: char| c > '\\u00FF') {\n Err(InvalidCharacter)\n } else {\n \/\/ \"Otherwise, the user agent must convert that argument to a\n \/\/ sequence of octets whose nth octet is the eight-bit\n \/\/ representation of the code point of the nth character of\n \/\/ the argument,\"\n let octets = input.chars().map(|c: char| c as u8).collect::<Vec<u8>>();\n\n \/\/ \"and then must apply the base64 algorithm to that sequence of\n \/\/ octets, and return the result. [RFC4648]\"\n Ok(octets.as_slice().to_base64(STANDARD))\n }\n}\n\n\/\/ http:\/\/www.whatwg.org\/html\/#atob\npub fn base64_atob(atob: DOMString) -> Fallible<DOMString> {\n \/\/ \"Let input be the string being parsed.\"\n let mut input = atob.as_slice();\n\n \/\/ \"Remove all space characters from input.\"\n \/\/ serialize::base64::from_base64 ignores \\r and \\n,\n \/\/ but it treats the other space characters as\n \/\/ invalid input.\n fn is_html_space(c: char) -> bool {\n HTML_SPACE_CHARACTERS.iter().any(|&m| m == c)\n }\n let without_spaces = input.chars()\n .filter(|&c| ! is_html_space(c))\n .collect::<String>();\n input = without_spaces.as_slice();\n\n \/\/ \"If the length of input divides by 4 leaving no remainder, then:\n \/\/ if input ends with one or two U+003D EQUALS SIGN (=) characters,\n \/\/ remove them from input.\"\n if input.len() % 4 == 0 {\n if input.ends_with(\"==\") {\n input = input.slice_to(input.len() - 2)\n } else if input.ends_with(\"=\") {\n input = input.slice_to(input.len() - 1)\n }\n }\n\n \/\/ \"If the length of input divides by 4 leaving a remainder of 1,\n \/\/ throw an InvalidCharacterError exception and abort these steps.\"\n if input.len() % 4 == 1 {\n return Err(InvalidCharacter)\n }\n\n \/\/ \"If input contains a character that is not in the following list of\n \/\/ characters and character ranges, throw an InvalidCharacterError\n \/\/ exception and abort these steps:\n \/\/\n \/\/ U+002B PLUS SIGN (+)\n \/\/ U+002F SOLIDUS (\/)\n \/\/ Alphanumeric ASCII characters\"\n if input.chars()\n .find(|&c| !(c == '+' || c == '\/' || c.is_alphanumeric()))\n .is_some() {\n return Err(InvalidCharacter)\n }\n\n match input.from_base64() {\n Ok(data) => Ok(data.iter().map(|&b| b as char).collect::<String>()),\n Err(..) => Err(InvalidCharacter)\n }\n}\n\n\nimpl<'a> WindowMethods for JSRef<'a, Window> {\n fn Alert(self, s: DOMString) {\n \/\/ Right now, just print to the console\n println!(\"ALERT: {:s}\", s);\n }\n\n fn Close(self) {\n let ScriptChan(ref chan) = self.script_chan;\n chan.send(ExitWindowMsg(self.page.id.clone()));\n }\n\n fn Document(self) -> Temporary<Document> {\n let frame = self.page().frame();\n Temporary::new(frame.as_ref().unwrap().document.clone())\n }\n\n fn Location(self) -> Temporary<Location> {\n if self.location.get().is_none() {\n let page = self.page.clone();\n let location = Location::new(self, page);\n self.location.assign(Some(location));\n }\n self.location.get().unwrap()\n }\n\n fn Console(self) -> Temporary<Console> {\n if self.console.get().is_none() {\n let console = Console::new(&global::Window(self));\n self.console.assign(Some(console));\n }\n self.console.get().unwrap()\n }\n\n fn Navigator(self) -> Temporary<Navigator> {\n if self.navigator.get().is_none() {\n let navigator = Navigator::new(self);\n self.navigator.assign(Some(navigator));\n }\n self.navigator.get().unwrap()\n }\n\n fn SetTimeout(self, _cx: *mut JSContext, callback: JSVal, timeout: i32) -> i32 {\n self.timers.set_timeout_or_interval(callback,\n timeout,\n false, \/\/ is_interval\n FromWindow(self.page.id.clone()),\n self.script_chan.clone())\n }\n\n fn ClearTimeout(self, handle: i32) {\n self.timers.clear_timeout_or_interval(handle);\n }\n\n fn SetInterval(self, _cx: *mut JSContext, callback: JSVal, timeout: i32) -> i32 {\n self.timers.set_timeout_or_interval(callback,\n timeout,\n true, \/\/ is_interval\n FromWindow(self.page.id.clone()),\n self.script_chan.clone())\n }\n\n fn ClearInterval(self, handle: i32) {\n self.ClearTimeout(handle);\n }\n\n fn Window(self) -> Temporary<Window> {\n Temporary::from_rooted(self)\n }\n\n fn Self(self) -> Temporary<Window> {\n self.Window()\n }\n\n \/\/ http:\/\/www.whatwg.org\/html\/#dom-frames\n fn Frames(self) -> Temporary<Window> {\n self.Window()\n }\n\n fn Parent(self) -> Temporary<Window> {\n \/\/TODO - Once we support iframes correctly this needs to return the parent frame\n self.Window()\n }\n\n fn Performance(self) -> Temporary<Performance> {\n if self.performance.get().is_none() {\n let performance = Performance::new(self);\n self.performance.assign(Some(performance));\n }\n self.performance.get().unwrap()\n }\n\n event_handler!(click, GetOnclick, SetOnclick)\n event_handler!(load, GetOnload, SetOnload)\n event_handler!(unload, GetOnunload, SetOnunload)\n error_event_handler!(error, GetOnerror, SetOnerror)\n\n fn Screen(self) -> Temporary<Screen> {\n if self.screen.get().is_none() {\n let screen = Screen::new(self);\n self.screen.assign(Some(screen));\n }\n self.screen.get().unwrap()\n }\n\n fn Debug(self, message: DOMString) {\n debug!(\"{:s}\", message);\n }\n\n fn Gc(self) {\n unsafe {\n JS_GC(JS_GetRuntime(self.get_cx()));\n }\n }\n\n fn Btoa(self, btoa: DOMString) -> Fallible<DOMString> {\n base64_btoa(btoa)\n }\n\n fn Atob(self, atob: DOMString) -> Fallible<DOMString> {\n base64_atob(atob)\n }\n}\n\nimpl Reflectable for Window {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.eventtarget.reflector()\n }\n}\n\npub trait WindowHelpers {\n fn reflow(self);\n fn flush_layout(self);\n fn wait_until_safe_to_modify_dom(self);\n fn init_browser_context(self, doc: JSRef<Document>);\n fn load_url(self, href: DOMString);\n fn handle_fire_timer(self, timer_id: TimerId, cx: *mut JSContext);\n fn evaluate_js_with_result(self, code: &str) -> JSVal;\n fn evaluate_script_with_result(self, code: &str, filename: &str) -> JSVal;\n}\n\n\nimpl<'a> WindowHelpers for JSRef<'a, Window> {\n fn evaluate_js_with_result(self, code: &str) -> JSVal {\n self.evaluate_script_with_result(code, \"\")\n }\n\n fn evaluate_script_with_result(self, code: &str, filename: &str) -> JSVal {\n let global = self.reflector().get_jsobject();\n let code: Vec<u16> = code.as_slice().utf16_units().collect();\n let mut rval = UndefinedValue();\n let filename = filename.to_c_str();\n let cx = self.get_cx();\n\n with_compartment(cx, global, || {\n unsafe {\n if JS_EvaluateUCScript(cx, global, code.as_ptr(),\n code.len() as libc::c_uint,\n filename.as_ptr(), 1, &mut rval) == 0 {\n debug!(\"error evaluating JS string\");\n }\n rval\n }\n })\n }\n\n fn reflow(self) {\n self.page().damage();\n }\n\n fn flush_layout(self) {\n self.page().flush_layout(NoQuery);\n }\n\n fn wait_until_safe_to_modify_dom(self) {\n \/\/ FIXME: This disables concurrent layout while we are modifying the DOM, since\n \/\/ our current architecture is entirely unsafe in the presence of races.\n self.page().join_layout();\n }\n\n fn init_browser_context(self, doc: JSRef<Document>) {\n *self.browser_context.borrow_mut() = Some(BrowserContext::new(doc));\n }\n\n \/\/\/ Commence a new URL load which will either replace this window or scroll to a fragment.\n fn load_url(self, href: DOMString) {\n let base_url = self.page().get_url();\n debug!(\"current page url is {}\", base_url);\n let url = UrlParser::new().base_url(&base_url).parse(href.as_slice());\n \/\/ FIXME: handle URL parse errors more gracefully.\n let url = url.unwrap();\n let ScriptChan(ref script_chan) = self.script_chan;\n if href.as_slice().starts_with(\"#\") {\n script_chan.send(TriggerFragmentMsg(self.page.id, url));\n } else {\n script_chan.send(TriggerLoadMsg(self.page.id, LoadData::new(url)));\n }\n }\n\n fn handle_fire_timer(self, timer_id: TimerId, cx: *mut JSContext) {\n let this_value = self.reflector().get_jsobject();\n self.timers.fire_timer(timer_id, this_value, cx);\n self.flush_layout();\n }\n}\n\nimpl Window {\n pub fn new(cx: *mut JSContext,\n page: Rc<Page>,\n script_chan: ScriptChan,\n control_chan: ScriptControlChan,\n compositor: Box<ScriptListener+'static>,\n image_cache_task: ImageCacheTask)\n -> Temporary<Window> {\n let win = box Window {\n eventtarget: EventTarget::new_inherited(WindowTypeId),\n script_chan: script_chan,\n control_chan: control_chan,\n console: Default::default(),\n compositor: DOMRefCell::new(compositor),\n page: page,\n location: Default::default(),\n navigator: Default::default(),\n image_cache_task: image_cache_task,\n browser_context: DOMRefCell::new(None),\n performance: Default::default(),\n navigation_start: time::get_time().sec as u64,\n navigation_start_precise: time::precise_time_s(),\n screen: Default::default(),\n timers: TimerManager::new()\n };\n\n WindowBinding::Wrap(cx, win)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #91<commit_after>use common::problem::{ Problem };\n\npub static problem: Problem<'static> = Problem {\n id: 91,\n answer: \"14234\",\n solver: solve\n};\n\nfn count_right_o(x_max: uint, y_max: uint) -> uint {\n return x_max * y_max;\n}\n\nfn count_right_p(x_max: uint, y_max: uint) -> uint {\n let mut cnt = x_max * y_max; \/\/ (0, y0) - (xi, y0) => xi: [1, x_max], y0: [0, y_max]\n\n for uint::range(1, x_max + 1) |x| {\n for uint::range(1, y_max + 1) |y| {\n let d = x.gcd(&y);\n let (dx, neg_dy) = (y \/ d, x \/ d);\n cnt += uint::min(y \/ neg_dy, (x_max - x) \/ dx);\n }\n }\n\n return cnt;\n}\n\n\nfn solve() -> ~str {\n let (x_max, y_max) = (50, 50);\n let answer = count_right_o(x_max, y_max) + count_right_p(x_max, y_max) * 2;\n return answer.to_str();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add testing module for Coord<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for wild fields<commit_after>#![feature(plugin)]\n#![plugin(clippy)]\n\n#![deny(unneeded_field_pattern)]\n#[allow(dead_code, unused)]\n\nstruct Foo {\n a: i32,\n b: i32,\n c: i32,\n}\n\nfn main() {\n let f = Foo { a: 0, b: 0, c: 0 };\n\n match f {\n Foo { a: _, b: 0, .. } => {} \/\/~ERROR You matched a field with a wildcard pattern\n \/\/~^ HELP Try with `Foo { b: 0, .. }`\n Foo { a: _, b: _, c: _ } => {} \/\/~ERROR All the struct fields are matched to a\n \/\/~^ HELP Try with `Foo { .. }`\n }\n match f {\n Foo { b: 0, .. } => {} \/\/ should be OK\n Foo { .. } => {} \/\/ and the Force might be with this one\n }\n}<|endoftext|>"} {"text":"<commit_before>#![doc(html_root_url = \"https:\/\/docs.rs\/prost-codegen\/0.1.1\")]\n\/\/ The `quote!` macro requires deep recursion.\n#![recursion_limit = \"4096\"]\n\nextern crate itertools;\nextern crate proc_macro;\nextern crate syn;\n\n#[macro_use]\nextern crate error_chain;\n#[macro_use]\nextern crate quote;\n\nuse std::str;\n\nuse itertools::Itertools;\nuse proc_macro::TokenStream;\nuse syn::Ident;\n\n\/\/ Proc-macro crates can't export anything, so error chain definitions go in a private module.\nmod error {\n error_chain!();\n}\nuse error::*;\n\nmod field;\nuse field::Field;\n\nfn try_message(input: TokenStream) -> Result<TokenStream> {\n let syn::DeriveInput { ident, generics, body, .. } = syn::parse_derive_input(&input.to_string())?;\n\n if !generics.lifetimes.is_empty() ||\n !generics.ty_params.is_empty() ||\n !generics.where_clause.predicates.is_empty() {\n bail!(\"Message may not be derived for generic type\");\n }\n\n let fields = match body {\n syn::Body::Struct(syn::VariantData::Struct(fields)) => fields,\n syn::Body::Struct(syn::VariantData::Tuple(fields)) => fields,\n syn::Body::Struct(syn::VariantData::Unit) => Vec::new(),\n syn::Body::Enum(..) => bail!(\"Message can not be derived for an enum\"),\n };\n\n let mut fields = fields.into_iter()\n .enumerate()\n .flat_map(|(idx, field)| {\n let field_ident = field.ident\n .unwrap_or_else(|| Ident::new(idx.to_string()));\n match Field::new(field.attrs) {\n Ok(Some(field)) => Some(Ok((field_ident, field))),\n Ok(None) => None,\n Err(err) => Some(Err(err).chain_err(|| {\n format!(\"invalid message field {}.{}\",\n ident, field_ident)\n })),\n }\n })\n .collect::<Result<Vec<(Ident, Field)>>>()?;\n\n \/\/ Sort the fields by tag number so that fields will be encoded in tag order.\n \/\/ TODO: This encodes oneof fields in the position of their lowest tag,\n \/\/ regardless of the currently occupied variant, is that consequential?\n \/\/ See: https:\/\/developers.google.com\/protocol-buffers\/docs\/encoding#order\n fields.sort_by_key(|&(_, ref field)| field.tags().into_iter().min().unwrap());\n let fields = fields;\n\n let mut tags = fields.iter().flat_map(|&(_, ref field)| field.tags()).collect::<Vec<_>>();\n let num_tags = tags.len();\n tags.sort();\n tags.dedup();\n if tags.len() != num_tags {\n bail!(\"message {} has fields with duplicate tags\", ident);\n }\n\n let dummy_const = Ident::new(format!(\"_IMPL_MESSAGE_FOR_{}\", ident));\n\n let encoded_len = fields.iter()\n .map(|&(ref field_ident, ref field)| {\n field.encoded_len(&Ident::new(format!(\"self.{}\", field_ident)))\n });\n\n let encode = fields.iter()\n .map(|&(ref field_ident, ref field)| {\n field.encode(&Ident::new(format!(\"self.{}\", field_ident)))\n });\n\n let merge = fields.iter().map(|&(ref field_ident, ref field)| {\n let merge = field.merge(&Ident::new(format!(\"self.{}\", field_ident)));\n let tags = field.tags().into_iter().map(|tag| quote!(#tag)).intersperse(quote!(|));\n quote!(#(#tags)* => #merge.map_err(|mut error| {\n error.push(stringify!(#ident), stringify!(#field_ident));\n error\n }),)\n });\n\n let default = fields.iter()\n .map(|&(ref field_ident, ref field)| {\n let value = field.default();\n quote!(#field_ident: #value,)\n });\n\n let methods = fields.iter()\n .flat_map(|&(ref field_ident, ref field)| field.methods(field_ident))\n .collect::<Vec<_>>();\n let methods = if methods.is_empty() {\n quote!()\n } else {\n quote! {\n impl #ident {\n #(#methods)*\n }\n }\n };\n\n let expanded = quote! {\n #[allow(\n non_upper_case_globals,\n unused_attributes,\n unused_imports,\n unused_qualifications,\n unused_variables\n )]\n const #dummy_const: () = {\n\n extern crate prost as _prost;\n extern crate bytes as _bytes;\n\n #[automatically_derived]\n impl _prost::Message for #ident {\n #[inline]\n fn encode_raw<B>(&self, buf: &mut B) where B: _bytes::BufMut {\n #(#encode)*\n }\n\n #[inline]\n fn merge_field<B>(&mut self, buf: &mut B) -> ::std::result::Result<(), _prost::DecodeError>\n where B: _bytes::Buf {\n let (tag, wire_type) = _prost::encoding::decode_key(buf)?;\n match tag {\n #(#merge)*\n _ => _prost::encoding::skip_field(wire_type, buf),\n }\n }\n\n #[inline]\n fn encoded_len(&self) -> usize {\n 0 #(+ #encoded_len)*\n }\n }\n\n #[automatically_derived]\n impl Default for #ident {\n fn default() -> #ident {\n #ident {\n #(#default)*\n }\n }\n }\n };\n\n #methods\n };\n\n expanded.parse::<TokenStream>().map_err(|err| Error::from(format!(\"{:?}\", err)))\n}\n\n#[proc_macro_derive(Message, attributes(prost))]\npub fn message(input: TokenStream) -> TokenStream {\n try_message(input).unwrap()\n}\n\n#[proc_macro_derive(Enumeration, attributes(prost))]\npub fn enumeration(input: TokenStream) -> TokenStream {\n let syn::DeriveInput { ident, generics, body, .. } =\n syn::parse_derive_input(&input.to_string()).expect(\"unable to parse enumeration type\");\n\n if !generics.lifetimes.is_empty() ||\n !generics.ty_params.is_empty() ||\n !generics.where_clause.predicates.is_empty() {\n panic!(\"Enumeration may not be derived for generic type\");\n }\n\n let variants = match body {\n syn::Body::Struct(..) => panic!(\"Enumeration can not be derived for a struct\"),\n syn::Body::Enum(variants) => variants,\n };\n\n let variants = variants.into_iter().map(|syn::Variant { ident: variant, data, discriminant, .. }| {\n if let syn::VariantData::Unit = data {\n if let Some(discriminant) = discriminant {\n (variant, discriminant)\n } else {\n panic!(\"Enumeration variants must have a discriminant value: {}::{}\", ident, variant);\n }\n } else {\n panic!(\"Enumeration variants may not have fields: {}::{}\", ident, variant);\n }\n }).collect::<Vec<_>>();\n\n if variants.is_empty() {\n panic!(\"Enumeration must have at least one variant: {}\", ident);\n }\n\n let default = variants[0].0.clone();\n\n let dummy_const = Ident::new(format!(\"_IMPL_ENUMERATION_FOR_{}\", ident));\n let is_valid = variants.iter().map(|&(_, ref value)| quote!(#value => true));\n let from = variants.iter().map(|&(ref variant, ref value)| quote!(#value => ::std::option::Option::Some(#ident::#variant)));\n\n let is_valid_doc = format!(\"Returns `true` if `value` is a variant of `{}`.\", ident);\n let from_i32_doc = format!(\"Converts an `i32` to a `{}`, or `None` if `value` is not a valid variant.\", ident);\n\n let expanded = quote! {\n #[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]\n const #dummy_const: () = {\n extern crate bytes as _bytes;\n extern crate prost as _prost;\n\n #[automatically_derived]\n impl #ident {\n\n #[doc=#is_valid_doc]\n pub fn is_valid(value: i32) -> bool {\n match value {\n #(#is_valid,)*\n _ => false,\n }\n }\n\n #[doc=#from_i32_doc]\n pub fn from_i32(value: i32) -> ::std::option::Option<#ident> {\n match value {\n #(#from,)*\n _ => ::std::option::Option::None,\n }\n }\n }\n\n #[automatically_derived]\n impl ::std::default::Default for #ident {\n fn default() -> #ident {\n #ident::#default\n }\n }\n\n #[automatically_derived]\n impl ::std::convert::From<#ident> for i32 {\n fn from(value: #ident) -> i32 {\n value as i32\n }\n }\n };\n };\n\n expanded.parse().unwrap()\n}\n\nfn try_oneof(input: TokenStream) -> Result<TokenStream> {\n let syn::DeriveInput { ident, generics, body, .. } = syn::parse_derive_input(&input.to_string())?;\n\n if !generics.lifetimes.is_empty() ||\n !generics.ty_params.is_empty() ||\n !generics.where_clause.predicates.is_empty() {\n panic!(\"Oneof may not be derived for generic type\");\n }\n\n let variants = match body {\n syn::Body::Enum(variants) => variants,\n syn::Body::Struct(..) => panic!(\"Oneof can not be derived for a struct\"),\n };\n\n \/\/ Map the variants into 'fields'.\n let fields = variants.into_iter().map(|variant| {\n let variant_ident = variant.ident;\n let attrs = variant.attrs;\n if let syn::VariantData::Tuple(fields) = variant.data {\n if fields.len() != 1 {\n bail!(\"invalid oneof variant {}::{}: oneof variants must have a single field\",\n ident, variant_ident);\n }\n match Field::new_oneof(attrs) {\n Ok(Some(field)) => Ok((variant_ident, field)),\n Ok(None) => bail!(\"invalid oneof variant {}::{}: oneof variants may not be ignored\",\n ident, variant_ident),\n Err(err) => bail!(\"invalid oneof variant {}::{}: {}\", ident, variant_ident, err),\n }\n } else {\n bail!(\"invalid oneof variant {}::{}: oneof variants must have a single field\",\n ident, variant_ident);\n }\n }).collect::<Result<Vec<(Ident, Field)>>>()?;\n\n let mut tags = fields.iter().flat_map(|&(ref variant_ident, ref field)| -> Result<u32> {\n if field.tags().len() > 1 {\n bail!(\"invalid oneof variant {}::{}: oneof variants may only have a single tag\",\n ident, variant_ident);\n }\n Ok(field.tags()[0])\n }).collect::<Vec<_>>();\n tags.sort();\n tags.dedup();\n if tags.len() != fields.len() {\n panic!(\"invalid oneof {}: variants have duplicate tags\", ident);\n }\n\n let dummy_const = Ident::new(format!(\"_IMPL_ONEOF_FOR_{}\", ident));\n\n let encode = fields.iter().map(|&(ref variant_ident, ref field)| {\n let encode = field.encode(&Ident::new(\"*value\"));\n quote!(#ident::#variant_ident(ref value) => { #encode })\n });\n\n let merge = fields.iter().map(|&(ref variant_ident, ref field)| {\n let tag = field.tags()[0];\n let merge = field.merge(&Ident::new(\"value\"));\n quote! {\n #tag => {\n let mut value = ::std::default::Default::default();\n #merge.map(|_| *field = ::std::option::Option::Some(#ident::#variant_ident(value)))\n }\n }\n });\n\n let encoded_len = fields.iter().map(|&(ref variant_ident, ref field)| {\n let encoded_len = field.encoded_len(&Ident::new(\"*value\"));\n quote!(#ident::#variant_ident(ref value) => #encoded_len)\n });\n\n let expanded = quote! {\n #[allow(\n non_upper_case_globals,\n unused_attributes,\n unused_imports,\n unused_qualifications,\n unused_variables\n )]\n const #dummy_const: () = {\n extern crate bytes as _bytes;\n extern crate prost as _prost;\n\n impl #ident {\n pub fn encode<B>(&self, buf: &mut B) where B: _bytes::BufMut {\n match *self {\n #(#encode,)*\n }\n }\n\n pub fn merge<B>(field: &mut ::std::option::Option<#ident>,\n tag: u32,\n wire_type: _prost::encoding::WireType,\n buf: &mut B)\n -> ::std::result::Result<(), _prost::DecodeError>\n where B: _bytes::Buf {\n match tag {\n #(#merge,)*\n _ => unreachable!(concat!(\"invalid \", stringify!(#ident), \" tag: {}\"), tag),\n }\n }\n\n pub fn encoded_len(&self) -> usize {\n match *self {\n #(#encoded_len,)*\n }\n }\n }\n };\n };\n\n expanded.parse::<TokenStream>().map_err(|err| Error::from(format!(\"{:?}\", err)))\n}\n\n#[proc_macro_derive(Oneof, attributes(prost))]\npub fn oneof(input: TokenStream) -> TokenStream {\n try_oneof(input).unwrap()\n}\n<commit_msg>Remove inline annotations in derive output<commit_after>#![doc(html_root_url = \"https:\/\/docs.rs\/prost-codegen\/0.1.1\")]\n\/\/ The `quote!` macro requires deep recursion.\n#![recursion_limit = \"4096\"]\n\nextern crate itertools;\nextern crate proc_macro;\nextern crate syn;\n\n#[macro_use]\nextern crate error_chain;\n#[macro_use]\nextern crate quote;\n\nuse std::str;\n\nuse itertools::Itertools;\nuse proc_macro::TokenStream;\nuse syn::Ident;\n\n\/\/ Proc-macro crates can't export anything, so error chain definitions go in a private module.\nmod error {\n error_chain!();\n}\nuse error::*;\n\nmod field;\nuse field::Field;\n\nfn try_message(input: TokenStream) -> Result<TokenStream> {\n let syn::DeriveInput { ident, generics, body, .. } = syn::parse_derive_input(&input.to_string())?;\n\n if !generics.lifetimes.is_empty() ||\n !generics.ty_params.is_empty() ||\n !generics.where_clause.predicates.is_empty() {\n bail!(\"Message may not be derived for generic type\");\n }\n\n let fields = match body {\n syn::Body::Struct(syn::VariantData::Struct(fields)) => fields,\n syn::Body::Struct(syn::VariantData::Tuple(fields)) => fields,\n syn::Body::Struct(syn::VariantData::Unit) => Vec::new(),\n syn::Body::Enum(..) => bail!(\"Message can not be derived for an enum\"),\n };\n\n let mut fields = fields.into_iter()\n .enumerate()\n .flat_map(|(idx, field)| {\n let field_ident = field.ident\n .unwrap_or_else(|| Ident::new(idx.to_string()));\n match Field::new(field.attrs) {\n Ok(Some(field)) => Some(Ok((field_ident, field))),\n Ok(None) => None,\n Err(err) => Some(Err(err).chain_err(|| {\n format!(\"invalid message field {}.{}\",\n ident, field_ident)\n })),\n }\n })\n .collect::<Result<Vec<(Ident, Field)>>>()?;\n\n \/\/ Sort the fields by tag number so that fields will be encoded in tag order.\n \/\/ TODO: This encodes oneof fields in the position of their lowest tag,\n \/\/ regardless of the currently occupied variant, is that consequential?\n \/\/ See: https:\/\/developers.google.com\/protocol-buffers\/docs\/encoding#order\n fields.sort_by_key(|&(_, ref field)| field.tags().into_iter().min().unwrap());\n let fields = fields;\n\n let mut tags = fields.iter().flat_map(|&(_, ref field)| field.tags()).collect::<Vec<_>>();\n let num_tags = tags.len();\n tags.sort();\n tags.dedup();\n if tags.len() != num_tags {\n bail!(\"message {} has fields with duplicate tags\", ident);\n }\n\n let dummy_const = Ident::new(format!(\"_IMPL_MESSAGE_FOR_{}\", ident));\n\n let encoded_len = fields.iter()\n .map(|&(ref field_ident, ref field)| {\n field.encoded_len(&Ident::new(format!(\"self.{}\", field_ident)))\n });\n\n let encode = fields.iter()\n .map(|&(ref field_ident, ref field)| {\n field.encode(&Ident::new(format!(\"self.{}\", field_ident)))\n });\n\n let merge = fields.iter().map(|&(ref field_ident, ref field)| {\n let merge = field.merge(&Ident::new(format!(\"self.{}\", field_ident)));\n let tags = field.tags().into_iter().map(|tag| quote!(#tag)).intersperse(quote!(|));\n quote!(#(#tags)* => #merge.map_err(|mut error| {\n error.push(stringify!(#ident), stringify!(#field_ident));\n error\n }),)\n });\n\n let default = fields.iter()\n .map(|&(ref field_ident, ref field)| {\n let value = field.default();\n quote!(#field_ident: #value,)\n });\n\n let methods = fields.iter()\n .flat_map(|&(ref field_ident, ref field)| field.methods(field_ident))\n .collect::<Vec<_>>();\n let methods = if methods.is_empty() {\n quote!()\n } else {\n quote! {\n impl #ident {\n #(#methods)*\n }\n }\n };\n\n let expanded = quote! {\n #[allow(\n non_upper_case_globals,\n unused_attributes,\n unused_imports,\n unused_qualifications,\n unused_variables\n )]\n const #dummy_const: () = {\n\n extern crate prost as _prost;\n extern crate bytes as _bytes;\n\n #[automatically_derived]\n impl _prost::Message for #ident {\n fn encode_raw<B>(&self, buf: &mut B) where B: _bytes::BufMut {\n #(#encode)*\n }\n\n fn merge_field<B>(&mut self, buf: &mut B) -> ::std::result::Result<(), _prost::DecodeError>\n where B: _bytes::Buf {\n let (tag, wire_type) = _prost::encoding::decode_key(buf)?;\n match tag {\n #(#merge)*\n _ => _prost::encoding::skip_field(wire_type, buf),\n }\n }\n\n fn encoded_len(&self) -> usize {\n 0 #(+ #encoded_len)*\n }\n }\n\n #[automatically_derived]\n impl Default for #ident {\n fn default() -> #ident {\n #ident {\n #(#default)*\n }\n }\n }\n };\n\n #methods\n };\n\n expanded.parse::<TokenStream>().map_err(|err| Error::from(format!(\"{:?}\", err)))\n}\n\n#[proc_macro_derive(Message, attributes(prost))]\npub fn message(input: TokenStream) -> TokenStream {\n try_message(input).unwrap()\n}\n\n#[proc_macro_derive(Enumeration, attributes(prost))]\npub fn enumeration(input: TokenStream) -> TokenStream {\n let syn::DeriveInput { ident, generics, body, .. } =\n syn::parse_derive_input(&input.to_string()).expect(\"unable to parse enumeration type\");\n\n if !generics.lifetimes.is_empty() ||\n !generics.ty_params.is_empty() ||\n !generics.where_clause.predicates.is_empty() {\n panic!(\"Enumeration may not be derived for generic type\");\n }\n\n let variants = match body {\n syn::Body::Struct(..) => panic!(\"Enumeration can not be derived for a struct\"),\n syn::Body::Enum(variants) => variants,\n };\n\n let variants = variants.into_iter().map(|syn::Variant { ident: variant, data, discriminant, .. }| {\n if let syn::VariantData::Unit = data {\n if let Some(discriminant) = discriminant {\n (variant, discriminant)\n } else {\n panic!(\"Enumeration variants must have a discriminant value: {}::{}\", ident, variant);\n }\n } else {\n panic!(\"Enumeration variants may not have fields: {}::{}\", ident, variant);\n }\n }).collect::<Vec<_>>();\n\n if variants.is_empty() {\n panic!(\"Enumeration must have at least one variant: {}\", ident);\n }\n\n let default = variants[0].0.clone();\n\n let dummy_const = Ident::new(format!(\"_IMPL_ENUMERATION_FOR_{}\", ident));\n let is_valid = variants.iter().map(|&(_, ref value)| quote!(#value => true));\n let from = variants.iter().map(|&(ref variant, ref value)| quote!(#value => ::std::option::Option::Some(#ident::#variant)));\n\n let is_valid_doc = format!(\"Returns `true` if `value` is a variant of `{}`.\", ident);\n let from_i32_doc = format!(\"Converts an `i32` to a `{}`, or `None` if `value` is not a valid variant.\", ident);\n\n let expanded = quote! {\n #[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]\n const #dummy_const: () = {\n extern crate bytes as _bytes;\n extern crate prost as _prost;\n\n #[automatically_derived]\n impl #ident {\n\n #[doc=#is_valid_doc]\n pub fn is_valid(value: i32) -> bool {\n match value {\n #(#is_valid,)*\n _ => false,\n }\n }\n\n #[doc=#from_i32_doc]\n pub fn from_i32(value: i32) -> ::std::option::Option<#ident> {\n match value {\n #(#from,)*\n _ => ::std::option::Option::None,\n }\n }\n }\n\n #[automatically_derived]\n impl ::std::default::Default for #ident {\n fn default() -> #ident {\n #ident::#default\n }\n }\n\n #[automatically_derived]\n impl ::std::convert::From<#ident> for i32 {\n fn from(value: #ident) -> i32 {\n value as i32\n }\n }\n };\n };\n\n expanded.parse().unwrap()\n}\n\nfn try_oneof(input: TokenStream) -> Result<TokenStream> {\n let syn::DeriveInput { ident, generics, body, .. } = syn::parse_derive_input(&input.to_string())?;\n\n if !generics.lifetimes.is_empty() ||\n !generics.ty_params.is_empty() ||\n !generics.where_clause.predicates.is_empty() {\n panic!(\"Oneof may not be derived for generic type\");\n }\n\n let variants = match body {\n syn::Body::Enum(variants) => variants,\n syn::Body::Struct(..) => panic!(\"Oneof can not be derived for a struct\"),\n };\n\n \/\/ Map the variants into 'fields'.\n let fields = variants.into_iter().map(|variant| {\n let variant_ident = variant.ident;\n let attrs = variant.attrs;\n if let syn::VariantData::Tuple(fields) = variant.data {\n if fields.len() != 1 {\n bail!(\"invalid oneof variant {}::{}: oneof variants must have a single field\",\n ident, variant_ident);\n }\n match Field::new_oneof(attrs) {\n Ok(Some(field)) => Ok((variant_ident, field)),\n Ok(None) => bail!(\"invalid oneof variant {}::{}: oneof variants may not be ignored\",\n ident, variant_ident),\n Err(err) => bail!(\"invalid oneof variant {}::{}: {}\", ident, variant_ident, err),\n }\n } else {\n bail!(\"invalid oneof variant {}::{}: oneof variants must have a single field\",\n ident, variant_ident);\n }\n }).collect::<Result<Vec<(Ident, Field)>>>()?;\n\n let mut tags = fields.iter().flat_map(|&(ref variant_ident, ref field)| -> Result<u32> {\n if field.tags().len() > 1 {\n bail!(\"invalid oneof variant {}::{}: oneof variants may only have a single tag\",\n ident, variant_ident);\n }\n Ok(field.tags()[0])\n }).collect::<Vec<_>>();\n tags.sort();\n tags.dedup();\n if tags.len() != fields.len() {\n panic!(\"invalid oneof {}: variants have duplicate tags\", ident);\n }\n\n let dummy_const = Ident::new(format!(\"_IMPL_ONEOF_FOR_{}\", ident));\n\n let encode = fields.iter().map(|&(ref variant_ident, ref field)| {\n let encode = field.encode(&Ident::new(\"*value\"));\n quote!(#ident::#variant_ident(ref value) => { #encode })\n });\n\n let merge = fields.iter().map(|&(ref variant_ident, ref field)| {\n let tag = field.tags()[0];\n let merge = field.merge(&Ident::new(\"value\"));\n quote! {\n #tag => {\n let mut value = ::std::default::Default::default();\n #merge.map(|_| *field = ::std::option::Option::Some(#ident::#variant_ident(value)))\n }\n }\n });\n\n let encoded_len = fields.iter().map(|&(ref variant_ident, ref field)| {\n let encoded_len = field.encoded_len(&Ident::new(\"*value\"));\n quote!(#ident::#variant_ident(ref value) => #encoded_len)\n });\n\n let expanded = quote! {\n #[allow(\n non_upper_case_globals,\n unused_attributes,\n unused_imports,\n unused_qualifications,\n unused_variables\n )]\n const #dummy_const: () = {\n extern crate bytes as _bytes;\n extern crate prost as _prost;\n\n impl #ident {\n pub fn encode<B>(&self, buf: &mut B) where B: _bytes::BufMut {\n match *self {\n #(#encode,)*\n }\n }\n\n pub fn merge<B>(field: &mut ::std::option::Option<#ident>,\n tag: u32,\n wire_type: _prost::encoding::WireType,\n buf: &mut B)\n -> ::std::result::Result<(), _prost::DecodeError>\n where B: _bytes::Buf {\n match tag {\n #(#merge,)*\n _ => unreachable!(concat!(\"invalid \", stringify!(#ident), \" tag: {}\"), tag),\n }\n }\n\n pub fn encoded_len(&self) -> usize {\n match *self {\n #(#encoded_len,)*\n }\n }\n }\n };\n };\n\n expanded.parse::<TokenStream>().map_err(|err| Error::from(format!(\"{:?}\", err)))\n}\n\n#[proc_macro_derive(Oneof, attributes(prost))]\npub fn oneof(input: TokenStream) -> TokenStream {\n try_oneof(input).unwrap()\n}\n<|endoftext|>"} {"text":"<commit_before>extern mod kiss3d;\nextern mod nalgebra;\nextern mod glfw;\n\n\/\/use nalgebra::mat::Rotation;\nuse nalgebra::na::Vec3;\nuse kiss3d::window;\nuse kiss3d::event::KeyReleased;\nuse kiss3d::camera::{Camera, FirstPersonStereo};\nuse kiss3d::post_processing::post_processing_effect::PostProcessingEffect;\nuse kiss3d::post_processing::oculus_stereo::OculusStereo;\n\/\/use kiss3d::post_processing::grayscales::Grayscales;\n\n#[start]\nfn start(argc: int, argv: **u8) -> int {\n std::rt::start_on_main_thread(argc, argv, main)\n}\n\nfn main() {\n do window::Window::spawn_size(\"kiss3d_stereo\", 1280, 800) |window| {\n let mut c = window.add_cube(1.0, 1.0, 1.0);\n \/\/c.position(\n\n let eye = Vec3::new(00.0f64, 0.0, 10.0);\n let at = Vec3::new(00.0f64, 0.0, 0.0);\n let first_person_stereo = @mut FirstPersonStereo::new(eye, at, 0.3f64);\n let camera = first_person_stereo as @mut Camera;\n window.set_camera(camera);\n\n \/\/ Position the window correctly. -6\/-26 takes care of icewm default\n \/\/ window decoration. Should probably just disable decorations (since\n \/\/ the top title is obscured anyway).\n window.glfw_window().set_pos(-6, -26);\n c.set_color(1.0, 0.0, 0.0);\n\n window.set_light(window::StickToCamera);\n\n let effect = Some(@mut OculusStereo::new() as @mut PostProcessingEffect);\n \/\/let effect = Some(@mut Grayscales::new() as @mut PostProcessingEffect);\n window.set_post_processing_effect(effect);\n let mut using_shader = true;\n\n do window.render_loop |w| {\n \/\/c.rotate_by(&Vec3::new(0.0f64, 0.014, 0.0))\n fn update_ipd(camera: @mut FirstPersonStereo, val: f64) -> bool {\n \/\/ cannot borrow `*camera` as immutable because it is also borrowed as mutable\n let ipd = camera.ipd();\n camera.set_ipd(ipd + val);\n println(fmt!(\"ipd = %f\", camera.ipd() as f64));\n true\n }\n do w.poll_events |w, event| {\n match *event {\n KeyReleased(key) => {\n match key {\n glfw::Key1 => {\n update_ipd(first_person_stereo, 0.1f64)\n },\n glfw::Key2 => {\n update_ipd(first_person_stereo, -0.1f64)\n },\n glfw::KeyS => {\n using_shader = match using_shader {\n false => {\n w.set_post_processing_effect(effect);\n true\n },\n true => {\n w.set_post_processing_effect(None);\n false\n },\n };\n false\n },\n _ => {\n false\n },\n }\n }\n _ => { true }\n }\n }\n }\n }\n}\n<commit_msg>Allow default keyboard handling on the stereo demo.<commit_after>extern mod kiss3d;\nextern mod nalgebra;\nextern mod glfw;\n\n\/\/use nalgebra::mat::Rotation;\nuse nalgebra::na::Vec3;\nuse kiss3d::window;\nuse kiss3d::event::KeyReleased;\nuse kiss3d::camera::{Camera, FirstPersonStereo};\nuse kiss3d::post_processing::post_processing_effect::PostProcessingEffect;\nuse kiss3d::post_processing::oculus_stereo::OculusStereo;\n\/\/use kiss3d::post_processing::grayscales::Grayscales;\n\n#[start]\nfn start(argc: int, argv: **u8) -> int {\n std::rt::start_on_main_thread(argc, argv, main)\n}\n\nfn main() {\n do window::Window::spawn_size(\"kiss3d_stereo\", 1280, 800) |window| {\n let mut c = window.add_cube(1.0, 1.0, 1.0);\n \/\/c.position(\n\n let eye = Vec3::new(00.0f64, 0.0, 10.0);\n let at = Vec3::new(00.0f64, 0.0, 0.0);\n let first_person_stereo = @mut FirstPersonStereo::new(eye, at, 0.3f64);\n let camera = first_person_stereo as @mut Camera;\n window.set_camera(camera);\n\n \/\/ Position the window correctly. -6\/-26 takes care of icewm default\n \/\/ window decoration. Should probably just disable decorations (since\n \/\/ the top title is obscured anyway).\n window.glfw_window().set_pos(-6, -26);\n c.set_color(1.0, 0.0, 0.0);\n\n window.set_light(window::StickToCamera);\n\n let effect = Some(@mut OculusStereo::new() as @mut PostProcessingEffect);\n \/\/let effect = Some(@mut Grayscales::new() as @mut PostProcessingEffect);\n window.set_post_processing_effect(effect);\n let mut using_shader = true;\n\n do window.render_loop |w| {\n \/\/c.rotate_by(&Vec3::new(0.0f64, 0.014, 0.0))\n fn update_ipd(camera: @mut FirstPersonStereo, val: f64) -> bool {\n \/\/ cannot borrow `*camera` as immutable because it is also borrowed as mutable\n let ipd = camera.ipd();\n camera.set_ipd(ipd + val);\n println(fmt!(\"ipd = %f\", camera.ipd() as f64));\n true\n }\n do w.poll_events |w, event| {\n match *event {\n KeyReleased(key) => {\n match key {\n glfw::Key1 => {\n update_ipd(first_person_stereo, 0.1f64)\n },\n glfw::Key2 => {\n update_ipd(first_person_stereo, -0.1f64)\n },\n glfw::KeyS => {\n using_shader = match using_shader {\n false => {\n w.set_post_processing_effect(effect);\n true\n },\n true => {\n w.set_post_processing_effect(None);\n false\n },\n };\n false\n },\n _ => {\n true\n },\n }\n }\n _ => { true }\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\nuse std::env;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf, Component};\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::Entry;\n\nuse Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\nfn main() {\n let docs = env::args().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: String,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nimpl FileEntry {\n fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i| {\n let frag = fragment.trim_left_matches(\"#\").to_owned();\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\", file.display(), i, fragment);\n }\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n if kind.is_dir() {\n walk(cache, root, &path, errors);\n } else {\n let pretty_path = check(cache, root, &path, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to reduce memory-usage\n entry.source = String::new();\n }\n }\n }\n}\n\nfn check(cache: &mut Cache,\n root: &Path,\n file: &Path,\n errors: &mut bool)\n -> Option<PathBuf> {\n \/\/ ignore js files as they are not prone to errors as the rest of the\n \/\/ documentation is and they otherwise bring up false positives.\n if file.extension().and_then(|s| s.to_str()) == Some(\"js\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32553)\n if file.ends_with(\"collections\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"btree_set\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/btree_map\/struct.BTreeMap.html\") ||\n file.ends_with(\"collections\/hash_map\/struct.HashMap.html\") {\n return None;\n }\n\n let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file)\n .unwrap()\n .parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i| {\n \/\/ Ignore external URLs\n if url.starts_with(\"http:\") || url.starts_with(\"https:\") ||\n url.starts_with(\"javascript:\") || url.starts_with(\"ftp:\") ||\n url.starts_with(\"irc:\") || url.starts_with(\"data:\") {\n return;\n }\n let mut parts = url.splitn(2, \"#\");\n let url = parts.next().unwrap();\n let fragment = parts.next();\n let mut parts = url.splitn(2, \"?\");\n let url = parts.next().unwrap();\n\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path.\n let mut path = file.to_path_buf();\n if !url.is_empty() {\n path.pop();\n for part in Path::new(url).components() {\n match part {\n Component::Prefix(_) |\n Component::RootDir => panic!(),\n Component::CurDir => {}\n Component::ParentDir => { path.pop(); }\n Component::Normal(s) => { path.push(s); }\n }\n }\n }\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n \/\/ Links to directories show as directory listings when viewing\n \/\/ the docs offline so it's best to avoid them.\n *errors = true;\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}:{}: directory link - {}\",\n pretty_file.display(),\n i + 1,\n pretty_path.display());\n return;\n }\n let res = load_file(cache, root, path.clone(), FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => panic!(format!(\"{}\", err)),\n Err(LoadError::BrokenRedirect(target, _)) => {\n *errors = true;\n println!(\"{}:{}: broken redirect to {}\",\n pretty_file.display(),\n i + 1,\n target.display());\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n if let Some(ref fragment) = fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-')\n .all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(*fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \",\n pretty_file.display(),\n i + 1);\n println!(\"`#{}` pointing to `{}`\", fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(cache: &mut Cache,\n root: &Path,\n mut file: PathBuf,\n redirect: Redirect)\n -> Result<(PathBuf, String), LoadError> {\n let mut contents = String::new();\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let maybe_redirect = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => {\n contents = entry.get().source.clone();\n None\n }\n Entry::Vacant(entry) => {\n let mut fp = File::open(file.clone()).map_err(|err| {\n if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.clone(), err)\n } else {\n LoadError::IOError(err)\n }\n })?;\n fp.read_to_string(&mut contents).map_err(|err| LoadError::IOError(err))?;\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry {\n source: contents.clone(),\n ids: HashSet::new(),\n });\n }\n maybe\n }\n };\n file.pop();\n match maybe_redirect.map(|url| file.join(url)) {\n Some(redirect_file) => {\n let path = PathBuf::from(redirect_file);\n load_file(cache, root, path, FromRedirect(true))\n }\n None => Ok((pretty_file, contents)),\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = match lines.nth(6) {\n Some(l) => l,\n None => return None,\n };\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str, attr: &str, mut f: F) {\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len()..];\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_left_matches(\" \") != \"\" {\n continue;\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_left_matches(\" \") != \"\" {\n continue;\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n f(url, i)\n }\n }\n}\n<commit_msg>fix up linkchecker<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Script to check the validity of `href` links in our HTML documentation.\n\/\/!\n\/\/! In the past we've been quite error prone to writing in broken links as most\n\/\/! of them are manually rather than automatically added. As files move over\n\/\/! time or apis change old links become stale or broken. The purpose of this\n\/\/! script is to check all relative links in our documentation to make sure they\n\/\/! actually point to a valid place.\n\/\/!\n\/\/! Currently this doesn't actually do any HTML parsing or anything fancy like\n\/\/! that, it just has a simple \"regex\" to search for `href` and `id` tags.\n\/\/! These values are then translated to file URLs if possible and then the\n\/\/! destination is asserted to exist.\n\/\/!\n\/\/! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,\n\/\/! but this should catch the majority of \"broken link\" cases.\n\nuse std::env;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf, Component};\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::Entry;\n\nuse Redirect::*;\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {:?}\", stringify!($e), e),\n })\n}\n\nfn main() {\n let docs = env::args().nth(1).unwrap();\n let docs = env::current_dir().unwrap().join(docs);\n let mut errors = false;\n walk(&mut HashMap::new(), &docs, &docs, &mut errors);\n if errors {\n panic!(\"found some broken links\");\n }\n}\n\n#[derive(Debug)]\npub enum LoadError {\n IOError(std::io::Error),\n BrokenRedirect(PathBuf, std::io::Error),\n IsRedirect,\n}\n\nenum Redirect {\n SkipRedirect,\n FromRedirect(bool),\n}\n\nstruct FileEntry {\n source: String,\n ids: HashSet<String>,\n}\n\ntype Cache = HashMap<PathBuf, FileEntry>;\n\nimpl FileEntry {\n fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {\n if self.ids.is_empty() {\n with_attrs_in_source(contents, \" id\", |fragment, i| {\n let frag = fragment.trim_left_matches(\"#\").to_owned();\n if !self.ids.insert(frag) {\n *errors = true;\n println!(\"{}:{}: id is not unique: `{}`\", file.display(), i, fragment);\n }\n });\n }\n }\n}\n\nfn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {\n for entry in t!(dir.read_dir()).map(|e| t!(e)) {\n let path = entry.path();\n let kind = t!(entry.file_type());\n if kind.is_dir() {\n walk(cache, root, &path, errors);\n } else {\n let pretty_path = check(cache, root, &path, errors);\n if let Some(pretty_path) = pretty_path {\n let entry = cache.get_mut(&pretty_path).unwrap();\n \/\/ we don't need the source anymore,\n \/\/ so drop to reduce memory-usage\n entry.source = String::new();\n }\n }\n }\n}\n\nfn check(cache: &mut Cache,\n root: &Path,\n file: &Path,\n errors: &mut bool)\n -> Option<PathBuf> {\n \/\/ ignore js files as they are not prone to errors as the rest of the\n \/\/ documentation is and they otherwise bring up false positives.\n if file.extension().and_then(|s| s.to_str()) == Some(\"js\") {\n return None;\n }\n\n \/\/ Unfortunately we're not 100% full of valid links today to we need a few\n \/\/ whitelists to get this past `make check` today.\n \/\/ FIXME(#32129)\n if file.ends_with(\"std\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32553)\n if file.ends_with(\"collections\/string\/struct.String.html\") {\n return None;\n }\n \/\/ FIXME(#32130)\n if file.ends_with(\"btree_set\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/struct.BTreeSet.html\") ||\n file.ends_with(\"collections\/btree_map\/struct.BTreeMap.html\") ||\n file.ends_with(\"collections\/hash_map\/struct.HashMap.html\") {\n return None;\n }\n\n let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);\n let (pretty_file, contents) = match res {\n Ok(res) => res,\n Err(_) => return None,\n };\n {\n cache.get_mut(&pretty_file)\n .unwrap()\n .parse_ids(&pretty_file, &contents, errors);\n }\n\n \/\/ Search for anything that's the regex 'href[ ]*=[ ]*\".*?\"'\n with_attrs_in_source(&contents, \" href\", |url, i| {\n \/\/ Ignore external URLs\n if url.starts_with(\"http:\") || url.starts_with(\"https:\") ||\n url.starts_with(\"javascript:\") || url.starts_with(\"ftp:\") ||\n url.starts_with(\"irc:\") || url.starts_with(\"data:\") {\n return;\n }\n let mut parts = url.splitn(2, \"#\");\n let url = parts.next().unwrap();\n let fragment = parts.next();\n let mut parts = url.splitn(2, \"?\");\n let url = parts.next().unwrap();\n\n \/\/ Once we've plucked out the URL, parse it using our base url and\n \/\/ then try to extract a file path.\n let mut path = file.to_path_buf();\n if !url.is_empty() {\n path.pop();\n for part in Path::new(url).components() {\n match part {\n Component::Prefix(_) |\n Component::RootDir => panic!(),\n Component::CurDir => {}\n Component::ParentDir => { path.pop(); }\n Component::Normal(s) => { path.push(s); }\n }\n }\n }\n\n if let Some(extension) = path.extension() {\n \/\/ don't check these files\n if extension == \"png\" {\n return;\n }\n }\n\n \/\/ Alright, if we've found a file name then this file had better\n \/\/ exist! If it doesn't then we register and print an error.\n if path.exists() {\n if path.is_dir() {\n \/\/ Links to directories show as directory listings when viewing\n \/\/ the docs offline so it's best to avoid them.\n *errors = true;\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}:{}: directory link - {}\",\n pretty_file.display(),\n i + 1,\n pretty_path.display());\n return;\n }\n let res = load_file(cache, root, path.clone(), FromRedirect(false));\n let (pretty_path, contents) = match res {\n Ok(res) => res,\n Err(LoadError::IOError(err)) => {\n panic!(format!(\"error loading {}: {}\", path.display(), err));\n }\n Err(LoadError::BrokenRedirect(target, _)) => {\n *errors = true;\n println!(\"{}:{}: broken redirect to {}\",\n pretty_file.display(),\n i + 1,\n target.display());\n return;\n }\n Err(LoadError::IsRedirect) => unreachable!(),\n };\n\n \/\/ we don't check the book for fragments because they're added via JS\n for book in [\"book\/\", \"nomicon\/\"].iter() {\n if !pretty_path.to_str().unwrap().starts_with(book) {\n return;\n }\n }\n\n if let Some(ref fragment) = fragment {\n \/\/ Fragments like `#1-6` are most likely line numbers to be\n \/\/ interpreted by javascript, so we're ignoring these\n if fragment.splitn(2, '-')\n .all(|f| f.chars().all(|c| c.is_numeric())) {\n return;\n }\n\n let entry = &mut cache.get_mut(&pretty_path).unwrap();\n entry.parse_ids(&pretty_path, &contents, errors);\n\n if !entry.ids.contains(*fragment) {\n *errors = true;\n print!(\"{}:{}: broken link fragment \",\n pretty_file.display(),\n i + 1);\n println!(\"`#{}` pointing to `{}`\", fragment, pretty_path.display());\n };\n }\n } else {\n *errors = true;\n print!(\"{}:{}: broken link - \", pretty_file.display(), i + 1);\n let pretty_path = path.strip_prefix(root).unwrap_or(&path);\n println!(\"{}\", pretty_path.display());\n }\n });\n Some(pretty_file)\n}\n\nfn load_file(cache: &mut Cache,\n root: &Path,\n mut file: PathBuf,\n redirect: Redirect)\n -> Result<(PathBuf, String), LoadError> {\n let mut contents = String::new();\n let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));\n\n let maybe_redirect = match cache.entry(pretty_file.clone()) {\n Entry::Occupied(entry) => {\n contents = entry.get().source.clone();\n None\n }\n Entry::Vacant(entry) => {\n let mut fp = File::open(file.clone()).map_err(|err| {\n if let FromRedirect(true) = redirect {\n LoadError::BrokenRedirect(file.clone(), err)\n } else {\n LoadError::IOError(err)\n }\n })?;\n fp.read_to_string(&mut contents).map_err(|err| LoadError::IOError(err))?;\n\n let maybe = maybe_redirect(&contents);\n if maybe.is_some() {\n if let SkipRedirect = redirect {\n return Err(LoadError::IsRedirect);\n }\n } else {\n entry.insert(FileEntry {\n source: contents.clone(),\n ids: HashSet::new(),\n });\n }\n maybe\n }\n };\n file.pop();\n match maybe_redirect.map(|url| file.join(url)) {\n Some(redirect_file) => {\n let path = PathBuf::from(redirect_file);\n load_file(cache, root, path, FromRedirect(true))\n }\n None => Ok((pretty_file, contents)),\n }\n}\n\nfn maybe_redirect(source: &str) -> Option<String> {\n const REDIRECT: &'static str = \"<p>Redirecting to <a href=\";\n\n let mut lines = source.lines();\n let redirect_line = match lines.nth(6) {\n Some(l) => l,\n None => return None,\n };\n\n redirect_line.find(REDIRECT).map(|i| {\n let rest = &redirect_line[(i + REDIRECT.len() + 1)..];\n let pos_quote = rest.find('\"').unwrap();\n rest[..pos_quote].to_owned()\n })\n}\n\nfn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str, attr: &str, mut f: F) {\n for (i, mut line) in contents.lines().enumerate() {\n while let Some(j) = line.find(attr) {\n let rest = &line[j + attr.len()..];\n line = rest;\n let pos_equals = match rest.find(\"=\") {\n Some(i) => i,\n None => continue,\n };\n if rest[..pos_equals].trim_left_matches(\" \") != \"\" {\n continue;\n }\n\n let rest = &rest[pos_equals + 1..];\n\n let pos_quote = match rest.find(&['\"', '\\''][..]) {\n Some(i) => i,\n None => continue,\n };\n let quote_delim = rest.as_bytes()[pos_quote] as char;\n\n if rest[..pos_quote].trim_left_matches(\" \") != \"\" {\n continue;\n }\n let rest = &rest[pos_quote + 1..];\n let url = match rest.find(quote_delim) {\n Some(i) => &rest[..i],\n None => continue,\n };\n f(url, i)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before> #![feature(core)]\nextern crate eve;\nextern crate getopts;\nextern crate url;\nextern crate core;\n\nuse std::thread;\nuse std::env;\n\nuse getopts::Options;\n\nuse std::net::SocketAddr;\nuse core::str::FromStr;\n\nuse eve::server;\nuse eve::login;\n\n#[allow(dead_code)]\nfn main() {\n\n\t\/\/ handle command line arguments\n\tlet args: Vec<String> = env::args().collect();\n\n\t\/\/ define the command line arguments\n\tlet mut opts = Options::new();\n opts.optopt(\"f\", \"faddress\", \"specify a socket address for the static file server. Defaults to 0.0.0.0:8080\",\"SOCKET ADDRESS\");\n opts.optopt(\"s\", \"saves\", \"specify the location of the saves directory\",\"PATH\");\n opts.optflag(\"h\", \"help\", \"prints all options and usage\");\n\n \/\/ parse raw input arguments into options\n let matches = match opts.parse(&args[1..]) {\n Ok(m) => { m }\n Err(f) => { panic!(f.to_string()) }\n };\n\n \/\/ print the help menu\n if matches.opt_present(\"h\") {\n print!(\"{}\", opts.usage(\"\"));\n return;\n }\n\n \/\/ parse static file server address\n let default_addr = SocketAddr::from_str(\"0.0.0.0:8080\").unwrap();\n let addr = match matches.opt_str(\"f\") {\n\t\tSome(ip) => {\n\t\t\tmatch SocketAddr::from_str(&*ip) {\n\t\t\t\tOk(addr) => addr,\n\t\t\t\tErr(_) => {\n\t\t\t\t\tprintln!(\"WARNING: Could not parse static file server address.\\nDefaulting to {:?}\",default_addr);\n\t\t\t\t\tdefault_addr\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\tNone => default_addr,\n\t};\n\n\t\/\/ parse the autosave file location\n let default_autosave = \"..\/saves\/\".to_owned();\n let autosave = match matches.opt_str(\"s\") {\n\t\tSome(path) => path,\n\t\tNone => default_autosave,\n\t};\n\n\tthread::spawn(move || login::run(addr.clone()));\n server::run(&*autosave);\n}<commit_msg>rename for clarity<commit_after> #![feature(core)]\nextern crate eve;\nextern crate getopts;\nextern crate url;\nextern crate core;\n\nuse std::thread;\nuse std::env;\n\nuse getopts::Options;\n\nuse std::net::SocketAddr;\nuse core::str::FromStr;\n\nuse eve::server;\nuse eve::login;\n\n#[allow(dead_code)]\nfn main() {\n\n\t\/\/ handle command line arguments\n\tlet args: Vec<String> = env::args().collect();\n\n\t\/\/ define the command line arguments\n\tlet mut opts = Options::new();\n opts.optopt(\"f\", \"faddress\", \"specify a socket address for the static file server. Defaults to 0.0.0.0:8080\",\"SOCKET ADDRESS\");\n opts.optopt(\"s\", \"saves\", \"specify the location of the saves directory\",\"PATH\");\n opts.optflag(\"h\", \"help\", \"prints all options and usage\");\n\n \/\/ parse raw input arguments into options\n let matches = match opts.parse(&args[1..]) {\n Ok(m) => { m }\n Err(f) => { panic!(f.to_string()) }\n };\n\n \/\/ print the help menu\n if matches.opt_present(\"h\") {\n print!(\"{}\", opts.usage(\"\"));\n return;\n }\n\n \/\/ parse static file server address\n let default_addr = SocketAddr::from_str(\"0.0.0.0:8080\").unwrap();\n let addr = match matches.opt_str(\"f\") {\n\t\tSome(ip) => {\n\t\t\tmatch SocketAddr::from_str(&*ip) {\n\t\t\t\tOk(addr) => addr,\n\t\t\t\tErr(_) => {\n\t\t\t\t\tprintln!(\"WARNING: Could not parse static file server address.\\nDefaulting to {:?}\",default_addr);\n\t\t\t\t\tdefault_addr\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\tNone => default_addr,\n\t};\n\n\t\/\/ parse the autosave file location\n\n let default_saves_dir = \"..\/saves\/\".to_owned();\n let autosave = match matches.opt_str(\"s\") {\n\t\tSome(path) => path,\n\t\tNone => default_saves_dir,\n\t};\n\n\tthread::spawn(move || login::run(addr.clone()));\n server::run(&*autosave);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>add problem 5 rust<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix error where cargo test for curryrs failed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Reimplement remove_tag() with get_tags()\/set_tags()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove calls to exit() and replace them with error propagation up to main()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add request and response types for Hotel USB<commit_after>use super::serialize::Serialize;\n\n#[repr(C)]\npub struct DeviceDescriptor {\n pub b_length: u8,\n pub b_descriptor_type: u8,\n pub bcd_usb: u16,\n pub b_device_class: u8,\n pub b_device_sub_class: u8,\n pub b_device_protocol: u8,\n pub b_max_packet_size0: u8,\n pub id_vendor: u16,\n pub id_product: u16,\n pub bcd_device: u16,\n pub i_manufacturer: u8,\n pub i_product: u8,\n pub i_serial_number: u8,\n pub b_num_configurations: u8\n}\n\nunsafe impl Serialize for DeviceDescriptor {}\n\npub struct ConfigurationDescriptor {\n\tpub b_length: u8,\n\tpub b_descriptor_type: u8,\n\tpub w_total_length: u16,\n\tpub b_num_interfaces: u8,\n\tpub b_configuration_value: u8,\n\tpub i_configuration: u8,\n\tpub bm_attributes: u8,\n\tpub b_max_power: u8\n}\n\nimpl ConfigurationDescriptor {\n \/\/\/ Creates an empty configuration with no interface descriptors.\n \/\/\/\n \/\/\/ `bm_attributes` set to self powered, and not remote wakeup\n \/\/\/ `b_max_power` set to 100ma\n pub fn new() -> ConfigurationDescriptor {\n ConfigurationDescriptor {\n b_length: 9,\n b_descriptor_type: 2,\n w_total_length: 9,\n b_num_interfaces: 0,\n b_configuration_value: 1,\n i_configuration: 0,\n bm_attributes: 0b11000000,\n b_max_power: 50\n }\n }\n}\n\nunsafe impl Serialize for ConfigurationDescriptor {}\n\n#[repr(u8)]\n#[derive(Clone, Copy)]\n#[allow(dead_code)]\npub enum SetupRequestType {\n GetStatus = 0,\n ClearFeature = 1,\n\n SetFeature = 3,\n\n SetAddress = 5,\n GetDescriptor = 6,\n SetDescriptor = 7,\n GetConfiguration = 8,\n SetConfiguration = 9,\n GetInterface = 10,\n SetInterface = 11,\n SynchFrame = 12\n}\n\npub struct SetupRequest {\n pub bm_request_type: u8,\n pub b_request: SetupRequestType,\n pub w_value: u16,\n pub w_index: u16,\n pub w_length: u16,\n}\n\nimpl SetupRequest {\n pub fn parse(buf: &[u8; 64]) -> &SetupRequest {\n unsafe {\n ::core::mem::transmute(buf.as_ptr())\n }\n }\n\n pub fn data_direction(&self) -> u8 {\n (self.bm_request_type & 0x80) >> 7\n }\n\n pub fn req_type(&self) -> u8 {\n (self.bm_request_type & 0x60) >> 5\n }\n\n pub fn recipient(&self) -> u8 {\n self.bm_request_type & 0x1f\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Empty Arguments = No Arg<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Advent updates<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add clap<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #97<commit_after>use core::num::{ One, Zero };\n\nuse std::bigint::{ BigUint };\n\nuse common::problem::{ Problem };\n\npub static problem: Problem<'static> = Problem {\n id: 97,\n answer: \"8739992577\",\n solver: solve\n};\n\n#[inline(always)]\nfn pow_unit(base: &BigUint, exp: &BigUint, unit: &BigUint) -> BigUint {\n let two = BigUint::from_uint(2);\n let mut result = One::one();\n let mut itr = exp.clone();\n let mut pow = base.clone();\n while !itr.is_zero() {\n if itr % two == One::one() {\n result = mul_unit(&result, &pow, unit);\n }\n itr >>= One::one();\n pow = mul_unit(&pow, &pow, unit);\n }\n return result;\n}\n\n#[inline(always)]\nfn mul_unit(a: &BigUint, b: &BigUint, unit: &BigUint) -> BigUint {\n (a * *b) % *unit\n}\n\n#[inline(always)]\nfn add_unit(a: &BigUint, b: &BigUint, unit: &BigUint) -> BigUint {\n (a + *b) % *unit\n}\n\nfn solve() -> ~str {\n let unit = BigUint::from_uint(100_0000_0000);\n return add_unit(\n &mul_unit(&BigUint::from_uint(28433),\n &pow_unit(&BigUint::from_uint(2), &BigUint::from_uint(7830457), &unit),\n &unit),\n &One::one(),\n &unit\n ).to_str();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)]\n\nfn non_elidable<'a, 'b>(a: &'a u8, b: &'b u8) -> &'a u8 { a }\n\n\/\/ the boundaries of elision\nstatic NON_ELIDABLE_FN : &fn(&u8, &u8) -> &u8 =\n\/\/^ERROR: missing lifetime specifier\n &(non_elidable as fn(&u8, &u8) -> &u8);\n\ntype Baz<'a> = fn(&'a [u8]) -> Option<u8>;\n\nfn baz(e: &[u8]) -> Option<u8> { e.first().map(|x| *x) }\n\nstatic STATIC_BAZ : &Baz<'static> = &(baz as Baz);\nconst CONST_BAZ : &Baz<'static> = &(baz as Baz);\n\nfn main() {\n let y = [1u8, 2, 3];\n\n \/\/surprisingly this appears to work, so lifetime < `'static` is valid\n assert_eq!(Some(1), STATIC_BAZ(y));\n assert_eq!(Some(1), CONST_BAZ(y));\n}\n<commit_msg>fixed compile-fail test<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)]\n\nfn non_elidable<'a, 'b>(a: &'a u8, b: &'b u8) -> &'a u8 { a }\n\n\/\/ the boundaries of elision\nstatic NON_ELIDABLE_FN : &fn(&u8, &u8) -> &u8 =\n\/\/~^ ERROR: missing lifetime specifier\n &(non_elidable as fn(&u8, &u8) -> &u8);\n\ntype Baz<'a> = fn(&'a [u8]) -> Option<u8>;\n\nfn baz(e: &[u8]) -> Option<u8> { e.first().map(|x| *x) }\n\nstatic STATIC_BAZ : &Baz<'static> = &(baz as Baz);\nconst CONST_BAZ : &Baz<'static> = &(baz as Baz);\n\nfn main() {\n let x = &[1u8, 2, 3];\n let y = x;\n\n \/\/surprisingly this appears to work, so lifetime < `'static` is valid\n assert_eq!(Some(1), STATIC_BAZ(y));\n assert_eq!(Some(1), CONST_BAZ(y));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(std_misc)]\n\npub type HANDLE = u32;\npub type DWORD = u32;\npub type SIZE_T = u32;\npub type LPVOID = usize;\npub type BOOL = u8;\n\n#[cfg(windows)]\nmod kernel32 {\n use super::{HANDLE, DWORD, SIZE_T, LPVOID, BOOL};\n\n extern \"system\" {\n pub fn GetProcessHeap() -> HANDLE;\n pub fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T)\n -> LPVOID;\n pub fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL;\n }\n}\n\n\n#[cfg(windows)]\npub fn main() {\n let heap = unsafe { kernel32::GetProcessHeap() };\n let mem = unsafe { kernel32::HeapAlloc(heap, 0, 100) };\n assert!(mem != 0);\n let res = unsafe { kernel32::HeapFree(heap, 0, mem) };\n assert!(res != 0);\n}\n\n#[cfg(not(windows))]\npub fn main() { }\n<commit_msg>Correct type definition of HANDLE.<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(std_misc)]\n\npub type HANDLE = usize;\npub type DWORD = u32;\npub type SIZE_T = u32;\npub type LPVOID = usize;\npub type BOOL = u8;\n\n#[cfg(windows)]\nmod kernel32 {\n use super::{HANDLE, DWORD, SIZE_T, LPVOID, BOOL};\n\n extern \"system\" {\n pub fn GetProcessHeap() -> HANDLE;\n pub fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T)\n -> LPVOID;\n pub fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL;\n }\n}\n\n\n#[cfg(windows)]\npub fn main() {\n let heap = unsafe { kernel32::GetProcessHeap() };\n let mem = unsafe { kernel32::HeapAlloc(heap, 0, 100) };\n assert!(mem != 0);\n let res = unsafe { kernel32::HeapFree(heap, 0, mem) };\n assert!(res != 0);\n}\n\n#[cfg(not(windows))]\npub fn main() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add test for fake self<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct Foo {\n x: i32,\n}\n\nimpl Foo {\n fn foo(&self) -> i32 {\n this.x\n \/\/~^ ERROR cannot find value `this` in this scope\n }\n\n fn bar(&self) -> i32 {\n this.foo()\n \/\/~^ ERROR cannot find value `this` in this scope\n }\n\n fn baz(&self) -> i32 {\n my.bar()\n \/\/~^ ERROR cannot find value `this` in this scope\n }\n}\n\nfn main() {\n let this = vec![1, 2, 3];\n let my = vec![1, 2, 3];\n let len = this.len();\n let len = my.len();\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/**\n * Utility helpers for CLI\n *\/\npub mod cli {\n use clap::ArgMatches;\n\n use runtime::Runtime;\n\n \/**\n * Get a commandline option \"tags\" and split the argument by \",\" to be able to provide a\n * Vec<String> with the argument as array.\n *\/\n pub fn get_tags<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Vec<String> {\n\n fn reject_if_with_spaces(e: &String) -> bool {\n if e.contains(\" \") {\n warn!(\"Tag contains spaces: '{}'\", e);\n false\n } else {\n true\n }\n }\n\n debug!(\"Fetching tags from commandline\");\n sub.value_of(\"tags\").and_then(|tags| {\n Some(tags.split(\",\")\n .into_iter()\n .map(|s| s.to_string())\n .filter(|e| reject_if_with_spaces(e))\n .collect()\n )\n }).or(Some(vec![])).unwrap()\n }\n\n}\n<commit_msg>Remove unused variable<commit_after>\/**\n * Utility helpers for CLI\n *\/\npub mod cli {\n use clap::ArgMatches;\n\n use runtime::Runtime;\n\n \/**\n * Get a commandline option \"tags\" and split the argument by \",\" to be able to provide a\n * Vec<String> with the argument as array.\n *\/\n pub fn get_tags<'a>(sub: &ArgMatches<'a, 'a>) -> Vec<String> {\n\n fn reject_if_with_spaces(e: &String) -> bool {\n if e.contains(\" \") {\n warn!(\"Tag contains spaces: '{}'\", e);\n false\n } else {\n true\n }\n }\n\n debug!(\"Fetching tags from commandline\");\n sub.value_of(\"tags\").and_then(|tags| {\n Some(tags.split(\",\")\n .into_iter()\n .map(|s| s.to_string())\n .filter(|e| reject_if_with_spaces(e))\n .collect()\n )\n }).or(Some(vec![])).unwrap()\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Optimize: Do not compute lowercase key twice<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add simple profile filter rust script (#1065)<commit_after>#!\/bin\/bash\n#![forbid(unsafe_code)]\/* This line is ignored by bash\n# This block is ignored by rustc\nCHANNEL=\"release\"\nsource .\/config.sh\nCG_CLIF_JIT=1 PROFILE=$1 OUTPUT=$2 exec $RUSTC $0 --crate-type bin -Cprefer-dynamic\n#*\/\n\n\/\/! This program filters away uninteresting samples and trims uninteresting frames for stackcollapse\n\/\/! profiles.\n\/\/!\n\/\/! Usage: .\/filter_profile.rs <profile in stackcollapse format> <output file>\n\/\/!\n\/\/! This file is specially crafted to be both a valid bash script and valid rust source file. If\n\/\/! executed as bash script this will run the rust source using cg_clif in JIT mode.\n\nuse std::io::Write;\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n let profile_name = std::env::var(\"PROFILE\").unwrap();\n let output_name = std::env::var(\"OUTPUT\").unwrap();\n if profile_name.is_empty() || output_name.is_empty() {\n println!(\"Usage: .\/filter_profile.rs <profile in stackcollapse format> <output file>\");\n std::process::exit(1);\n }\n let profile = std::fs::read_to_string(profile_name)\n .map_err(|err| format!(\"Failed to read profile {}\", err))?;\n let mut output = std::fs::OpenOptions::new()\n .create(true)\n .write(true)\n .truncate(true)\n .open(output_name)?;\n\n for line in profile.lines() {\n let mut stack = &line[..line.rfind(\" \").unwrap()];\n let count = &line[line.rfind(\" \").unwrap() + 1..];\n\n \/\/ Filter away uninteresting samples\n if !stack.contains(\"rustc_codegen_cranelift\") {\n continue;\n }\n\n if stack.contains(\"rustc_mir::monomorphize::partitioning::collect_and_partition_mono_items\")\n || stack.contains(\"rustc_incremental::assert_dep_graph::assert_dep_graph\")\n || stack.contains(\"rustc_symbol_mangling::test::report_symbol_names\")\n {\n continue;\n }\n\n \/\/ Trim start\n if let Some(index) = stack.find(\"rustc_interface::passes::configure_and_expand\") {\n stack = &stack[index..];\n } else if let Some(index) = stack.find(\"rustc_interface::passes::analysis\") {\n stack = &stack[index..];\n } else if let Some(index) = stack.find(\"rustc_interface::passes::start_codegen\") {\n stack = &stack[index..];\n } else if let Some(index) = stack.find(\"rustc_interface::queries::Linker::link\") {\n stack = &stack[index..];\n }\n\n if let Some(index) = stack.find(\"rustc_codegen_cranelift::driver::aot::module_codegen\") {\n stack = &stack[index..];\n }\n\n \/\/ Trim end\n const MALLOC: &str = \"malloc\";\n if let Some(index) = stack.find(MALLOC) {\n stack = &stack[..index + MALLOC.len()];\n }\n\n const FREE: &str = \"free\";\n if let Some(index) = stack.find(FREE) {\n stack = &stack[..index + FREE.len()];\n }\n\n const TYPECK_ITEM_BODIES: &str = \"rustc_typeck::check::typeck_item_bodies\";\n if let Some(index) = stack.find(TYPECK_ITEM_BODIES) {\n stack = &stack[..index + TYPECK_ITEM_BODIES.len()];\n }\n\n const COLLECT_AND_PARTITION_MONO_ITEMS: &str =\n \"rustc_mir::monomorphize::partitioning::collect_and_partition_mono_items\";\n if let Some(index) = stack.find(COLLECT_AND_PARTITION_MONO_ITEMS) {\n stack = &stack[..index + COLLECT_AND_PARTITION_MONO_ITEMS.len()];\n }\n\n const ASSERT_DEP_GRAPH: &str = \"rustc_incremental::assert_dep_graph::assert_dep_graph\";\n if let Some(index) = stack.find(ASSERT_DEP_GRAPH) {\n stack = &stack[..index + ASSERT_DEP_GRAPH.len()];\n }\n\n const REPORT_SYMBOL_NAMES: &str = \"rustc_symbol_mangling::test::report_symbol_names\";\n if let Some(index) = stack.find(REPORT_SYMBOL_NAMES) {\n stack = &stack[..index + REPORT_SYMBOL_NAMES.len()];\n }\n\n const ENCODE_METADATA: &str = \"rustc_middle::ty::context::TyCtxt::encode_metadata\";\n if let Some(index) = stack.find(ENCODE_METADATA) {\n stack = &stack[..index + ENCODE_METADATA.len()];\n }\n\n const SUBST_AND_NORMALIZE_ERASING_REGIONS: &str = \"rustc_middle::ty::normalize_erasing_regions::<impl rustc_middle::ty::context::TyCtxt>::subst_and_normalize_erasing_regions\";\n if let Some(index) = stack.find(SUBST_AND_NORMALIZE_ERASING_REGIONS) {\n stack = &stack[..index + SUBST_AND_NORMALIZE_ERASING_REGIONS.len()];\n }\n\n const NORMALIZE_ERASING_LATE_BOUND_REGIONS: &str = \"rustc_middle::ty::normalize_erasing_regions::<impl rustc_middle::ty::context::TyCtxt>::normalize_erasing_late_bound_regions\";\n if let Some(index) = stack.find(NORMALIZE_ERASING_LATE_BOUND_REGIONS) {\n stack = &stack[..index + NORMALIZE_ERASING_LATE_BOUND_REGIONS.len()];\n }\n\n const INST_BUILD: &str = \"<cranelift_frontend::frontend::FuncInstBuilder as cranelift_codegen::ir::builder::InstBuilderBase>::build\";\n if let Some(index) = stack.find(INST_BUILD) {\n stack = &stack[..index + INST_BUILD.len()];\n }\n\n output.write_all(stack.as_bytes())?;\n output.write_all(&*b\" \")?;\n output.write_all(count.as_bytes())?;\n output.write_all(&*b\"\\n\")?;\n }\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before>\/*******************************************************************************\n *\n * kit\/kernel\/terminal.rs\n *\n * vim:ft=rust:ts=4:sw=4:et:tw=80\n *\n * Copyright (C) 2015, Devyn Cairns\n * Redistribution of this file is permitted under the terms of the simplified\n * BSD license. See LICENSE for more information.\n *\n ******************************************************************************\/\n\n\/\/! Early text mode 80x25 terminal handler.\n\nuse core::prelude::*;\n\nuse core::fmt;\n\n#[derive(Copy)]\npub enum Color {\n Black = 0,\n Blue = 1,\n Green = 2,\n Cyan = 3,\n Red = 4,\n Magenta = 5,\n Brown = 6,\n LightGrey = 7,\n DarkGrey = 8,\n LightBlue = 9,\n LightGreen = 10,\n LightCyan = 11,\n LightRed = 12,\n LightMagenta = 13,\n LightBrown = 14,\n White = 15,\n}\n\n\/\/\/ A terminal.\npub trait Terminal: fmt::Write {\n fn reset(&mut self) -> fmt::Result;\n fn clear(&mut self) -> fmt::Result;\n\n fn get_cursor(&self) -> (usize, usize);\n fn set_cursor(&mut self, row: usize, col: usize) -> fmt::Result;\n\n fn get_color(&self) -> (Color, Color);\n fn set_color(&mut self, fg: Color, bg: Color) -> fmt::Result;\n\n fn put_raw_byte(&mut self,\n byte: u8,\n fg: Color,\n bg: Color,\n row: usize,\n col: usize) -> fmt::Result;\n\n \/\/\/ Does not flush.\n fn write_raw_byte(&mut self, byte: u8) -> fmt::Result;\n\n \/\/\/ Does not flush.\n fn write_raw_bytes(&mut self, bytes: &[u8]) -> fmt::Result {\n for byte in bytes {\n try!(self.write_raw_byte(*byte));\n }\n\n Ok(())\n }\n\n fn flush(&mut self) -> fmt::Result;\n\n fn write_char(&mut self, ch: char) -> fmt::Result {\n let mut buf = [0u8, 4];\n\n let size = try!(ch.encode_utf8(&mut buf).ok_or(fmt::Error));\n\n try!(self.write_raw_bytes(&buf[0..size]));\n try!(self.flush());\n Ok(())\n }\n}\n\n\/\/\/ Controls a VGA text-mode terminal.\npub struct Vga {\n width: usize,\n height: usize,\n row: usize,\n col: usize,\n fg: Color,\n bg: Color,\n attr: u8,\n buffer: *mut u16,\n port: u16\n}\n\nimpl Vga {\n pub unsafe fn new(width: usize,\n height: usize,\n buffer: *mut u16,\n port: u16)\n -> Vga {\n\n let mut vga = Vga {\n width: width,\n height: height,\n row: 0,\n col: 0,\n fg: Color::LightGrey,\n bg: Color::Black,\n attr: Vga::attr(Color::LightGrey, Color::Black),\n buffer: buffer,\n port: port\n };\n\n vga.reset().unwrap();\n\n vga\n }\n\n pub fn color(c: Color) -> u8 {\n c as u8\n }\n\n pub fn attr(fg: Color, bg: Color) -> u8 {\n Vga::color(fg) | (Vga::color(bg) << 4)\n }\n\n fn update_attr(&mut self) {\n self.attr = Vga::attr(self.fg, self.bg);\n }\n\n fn update_cursor(&mut self) {\n unsafe fn outb(byte: u8, port: u16) {\n \/\/ FIXME: It seems like we have to do this due to a Rust bug where\n \/\/ the \"a\" and \"d\" constraints cause nothing to be generated. I\n \/\/ should file a bug report.\n asm!(concat!(\"mov $0, %al;\\n\",\n \"mov $1, %dx;\\n\",\n \"out %al, %dx\")\n :\n : \"r\" (byte), \"r\" (port)\n : \"rax\", \"rdx\"\n : \"volatile\");\n }\n\n let pos: u16 = ((self.row * self.width) + self.col) as u16;\n\n unsafe {\n outb(0x0F, self.port);\n outb(pos as u8, self.port + 1);\n\n outb(0x0E, self.port);\n outb((pos >> 8) as u8, self.port + 1);\n }\n }\n\n pub fn put(&mut self, byte: u8, attr: u8, row: usize, col: usize) {\n unsafe {\n *self.buffer.offset((row * self.width + col) as isize) =\n (byte as u16) | ((attr as u16) << 8);\n }\n }\n\n pub fn put_here(&mut self, byte: u8) {\n let (attr, row, col) = (self.attr, self.row, self.col);\n\n self.put(byte, attr, row, col)\n }\n\n fn new_line(&mut self) {\n \/\/ Clear to the end of the line.\n while self.col < self.width {\n self.put_here(' ' as u8);\n self.col += 1;\n }\n\n \/\/ Go to the next line, scrolling if necessary.\n self.col = 0;\n self.row += 1;\n\n while self.row >= self.height {\n self.scroll();\n self.row -= 1;\n }\n\n self.update_cursor();\n }\n\n fn scroll(&mut self) {\n \/\/ Shift everything one line back.\n for row in 1..self.height {\n for col in 0..self.width {\n let index = (row * self.width + col) as isize;\n\n unsafe {\n *self.buffer.offset(index - self.width as isize) =\n *self.buffer.offset(index);\n }\n }\n }\n\n \/\/ Clear last line.\n let (attr, height) = (self.attr, self.height);\n\n for col in 0..self.width {\n self.put(' ' as u8, attr, height - 1, col);\n }\n }\n}\n\nimpl Terminal for Vga {\n fn reset(&mut self) -> fmt::Result {\n self.fg = Color::LightGrey;\n self.bg = Color::Black;\n self.update_attr();\n self.clear()\n }\n\n fn clear(&mut self) -> fmt::Result {\n self.row = 0;\n self.col = 0;\n\n let attr = self.attr;\n\n for row in 0..self.height {\n for col in 0..self.width {\n self.put(' ' as u8, attr, row, col);\n }\n }\n\n Ok(())\n }\n\n fn get_cursor(&self) -> (usize, usize) {\n (self.row, self.col)\n }\n\n fn set_cursor(&mut self, row: usize, col: usize) -> fmt::Result {\n self.row = row;\n self.col = col;\n\n self.update_cursor();\n Ok(())\n }\n\n fn get_color(&self) -> (Color, Color) {\n (self.fg, self.bg)\n }\n\n fn set_color(&mut self, fg: Color, bg: Color) -> fmt::Result {\n self.fg = fg;\n self.bg = bg;\n self.update_attr();\n Ok(())\n }\n\n fn put_raw_byte(&mut self,\n byte: u8,\n fg: Color,\n bg: Color,\n row: usize,\n col: usize) -> fmt::Result {\n\n self.put(byte, Vga::attr(fg, bg), row, col);\n Ok(())\n }\n\n fn write_raw_byte(&mut self, byte: u8) -> fmt::Result {\n match byte {\n 0x0A \/* newline *\/ => {\n self.new_line();\n },\n\n 0x08 \/* backspace *\/ => {\n if self.col > 0 {\n self.col -= 1;\n }\n\n self.put_here(' ' as u8);\n },\n\n _ => {\n self.put_here(byte);\n\n if self.col + 1 >= self.width {\n self.new_line();\n } else {\n self.col += 1;\n }\n }\n }\n\n Ok(())\n }\n\n fn flush(&mut self) -> fmt::Result {\n self.update_cursor();\n Ok(())\n }\n}\n\nimpl fmt::Write for Vga {\n fn write_str(&mut self, s: &str) -> fmt::Result {\n try!(self.write_raw_bytes(s.as_bytes()));\n try!(self.flush());\n Ok(())\n }\n}\n\nstatic mut CONSOLE: Option<Vga> = None;\n\n\/\/\/ Get the current global console.\npub fn console() -> &'static mut Terminal {\n unsafe {\n if CONSOLE.is_none() {\n CONSOLE = Some(Vga::new(80, 25,\n 0xffffffff800b8000 as *mut u16, 0x3d4));\n }\n\n CONSOLE.as_mut().unwrap()\n }\n}\n\n\/\/\/ C (legacy) interface. See `kit\/kernel\/include\/terminal.h`.\npub mod ffi {\n use super::*;\n \n use core::mem;\n use core::slice;\n use core::ptr::PtrExt;\n\n use libc::{c_char, size_t};\n\n #[no_mangle]\n pub extern fn terminal_initialize() {\n console().reset().unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_clear() {\n console().clear().unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_updatecursor() {\n console().flush().unwrap();\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_getcursor(row: *mut size_t, column: *mut size_t) {\n let (row_us, col_us) = console().get_cursor();\n\n *row = row_us as size_t;\n *column = col_us as size_t;\n }\n\n #[no_mangle]\n pub extern fn terminal_setcursor(row: size_t, column: size_t) {\n console().set_cursor(row as usize, column as usize).unwrap();\n }\n\n #[repr(C)]\n #[derive(Copy)]\n pub enum VgaColor {\n Black = 0,\n Blue = 1,\n Green = 2,\n Cyan = 3,\n Red = 4,\n Magenta = 5,\n Brown = 6,\n LightGrey = 7,\n DarkGrey = 8,\n LightBlue = 9,\n LightGreen = 10,\n LightCyan = 11,\n LightRed = 12,\n LightMagenta = 13,\n LightBrown = 14,\n White = 15,\n }\n\n impl VgaColor {\n pub fn from_color(color: Color) -> VgaColor {\n unsafe { mem::transmute(color as i32) }\n }\n\n pub fn to_color(self) -> Color {\n unsafe { mem::transmute(self as u8) }\n }\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_getcolor(fg: *mut VgaColor,\n bg: *mut VgaColor) {\n let (fg_c, bg_c) = console().get_color();\n\n *fg = VgaColor::from_color(fg_c);\n *bg = VgaColor::from_color(bg_c);\n }\n\n #[no_mangle]\n pub extern fn terminal_setcolor(fg: VgaColor, bg: VgaColor) {\n console().set_color(fg.to_color(), bg.to_color()).unwrap();\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_putentryat(c: c_char,\n color: u8,\n x: size_t,\n y: size_t) {\n\n let fg_v: VgaColor = mem::transmute((color & 0x0f) as i32);\n let bg_v: VgaColor = mem::transmute((color >> 4) as i32);\n\n console()\n .put_raw_byte(c as u8,\n fg_v.to_color(),\n bg_v.to_color(),\n y as usize,\n x as usize)\n .unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_newline() {\n console().write_raw_byte('\\n' as u8).unwrap();\n console().flush().unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_writechar_internal(c: c_char) {\n console().write_raw_byte(c as u8).unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_writechar(c: c_char) {\n console().write_raw_byte(c as u8).unwrap();\n console().flush().unwrap();\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_writebuf(length: u64, buffer: *const u8) {\n let bytes = slice::from_raw_parts(buffer, length as usize);\n\n console().write_raw_bytes(bytes).unwrap();\n console().flush().unwrap();\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_writestring(data: *const u8) {\n let mut data_len = 0usize;\n let mut data_end = data;\n\n while *data_end != 0 {\n data_len += 1;\n data_end = data_end.offset(1);\n }\n\n let bytes = slice::from_raw_parts(data, data_len);\n\n console().write_raw_bytes(bytes).unwrap();\n console().flush().unwrap();\n }\n}\n<commit_msg>[kernel] fix the Rust outb() function to use LLVM style constraints<commit_after>\/*******************************************************************************\n *\n * kit\/kernel\/terminal.rs\n *\n * vim:ft=rust:ts=4:sw=4:et:tw=80\n *\n * Copyright (C) 2015, Devyn Cairns\n * Redistribution of this file is permitted under the terms of the simplified\n * BSD license. See LICENSE for more information.\n *\n ******************************************************************************\/\n\n\/\/! Early text mode 80x25 terminal handler.\n\nuse core::prelude::*;\n\nuse core::fmt;\n\n#[derive(Copy)]\npub enum Color {\n Black = 0,\n Blue = 1,\n Green = 2,\n Cyan = 3,\n Red = 4,\n Magenta = 5,\n Brown = 6,\n LightGrey = 7,\n DarkGrey = 8,\n LightBlue = 9,\n LightGreen = 10,\n LightCyan = 11,\n LightRed = 12,\n LightMagenta = 13,\n LightBrown = 14,\n White = 15,\n}\n\n\/\/\/ A terminal.\npub trait Terminal: fmt::Write {\n fn reset(&mut self) -> fmt::Result;\n fn clear(&mut self) -> fmt::Result;\n\n fn get_cursor(&self) -> (usize, usize);\n fn set_cursor(&mut self, row: usize, col: usize) -> fmt::Result;\n\n fn get_color(&self) -> (Color, Color);\n fn set_color(&mut self, fg: Color, bg: Color) -> fmt::Result;\n\n fn put_raw_byte(&mut self,\n byte: u8,\n fg: Color,\n bg: Color,\n row: usize,\n col: usize) -> fmt::Result;\n\n \/\/\/ Does not flush.\n fn write_raw_byte(&mut self, byte: u8) -> fmt::Result;\n\n \/\/\/ Does not flush.\n fn write_raw_bytes(&mut self, bytes: &[u8]) -> fmt::Result {\n for byte in bytes {\n try!(self.write_raw_byte(*byte));\n }\n\n Ok(())\n }\n\n fn flush(&mut self) -> fmt::Result;\n\n fn write_char(&mut self, ch: char) -> fmt::Result {\n let mut buf = [0u8, 4];\n\n let size = try!(ch.encode_utf8(&mut buf).ok_or(fmt::Error));\n\n try!(self.write_raw_bytes(&buf[0..size]));\n try!(self.flush());\n Ok(())\n }\n}\n\n\/\/\/ Controls a VGA text-mode terminal.\npub struct Vga {\n width: usize,\n height: usize,\n row: usize,\n col: usize,\n fg: Color,\n bg: Color,\n attr: u8,\n buffer: *mut u16,\n port: u16\n}\n\nimpl Vga {\n pub unsafe fn new(width: usize,\n height: usize,\n buffer: *mut u16,\n port: u16)\n -> Vga {\n\n let mut vga = Vga {\n width: width,\n height: height,\n row: 0,\n col: 0,\n fg: Color::LightGrey,\n bg: Color::Black,\n attr: Vga::attr(Color::LightGrey, Color::Black),\n buffer: buffer,\n port: port\n };\n\n vga.reset().unwrap();\n\n vga\n }\n\n pub fn color(c: Color) -> u8 {\n c as u8\n }\n\n pub fn attr(fg: Color, bg: Color) -> u8 {\n Vga::color(fg) | (Vga::color(bg) << 4)\n }\n\n fn update_attr(&mut self) {\n self.attr = Vga::attr(self.fg, self.bg);\n }\n\n fn update_cursor(&mut self) {\n unsafe fn outb(byte: u8, port: u16) {\n asm!(\"out %al, %dx\" :: \"{ax}\" (byte), \"{dx}\" (port) :: \"volatile\");\n }\n\n let pos: u16 = ((self.row * self.width) + self.col) as u16;\n\n unsafe {\n outb(0x0F, self.port);\n outb(pos as u8, self.port + 1);\n\n outb(0x0E, self.port);\n outb((pos >> 8) as u8, self.port + 1);\n }\n }\n\n pub fn put(&mut self, byte: u8, attr: u8, row: usize, col: usize) {\n unsafe {\n *self.buffer.offset((row * self.width + col) as isize) =\n (byte as u16) | ((attr as u16) << 8);\n }\n }\n\n pub fn put_here(&mut self, byte: u8) {\n let (attr, row, col) = (self.attr, self.row, self.col);\n\n self.put(byte, attr, row, col)\n }\n\n fn new_line(&mut self) {\n \/\/ Clear to the end of the line.\n while self.col < self.width {\n self.put_here(' ' as u8);\n self.col += 1;\n }\n\n \/\/ Go to the next line, scrolling if necessary.\n self.col = 0;\n self.row += 1;\n\n while self.row >= self.height {\n self.scroll();\n self.row -= 1;\n }\n\n self.update_cursor();\n }\n\n fn scroll(&mut self) {\n \/\/ Shift everything one line back.\n for row in 1..self.height {\n for col in 0..self.width {\n let index = (row * self.width + col) as isize;\n\n unsafe {\n *self.buffer.offset(index - self.width as isize) =\n *self.buffer.offset(index);\n }\n }\n }\n\n \/\/ Clear last line.\n let (attr, height) = (self.attr, self.height);\n\n for col in 0..self.width {\n self.put(' ' as u8, attr, height - 1, col);\n }\n }\n}\n\nimpl Terminal for Vga {\n fn reset(&mut self) -> fmt::Result {\n self.fg = Color::LightGrey;\n self.bg = Color::Black;\n self.update_attr();\n self.clear()\n }\n\n fn clear(&mut self) -> fmt::Result {\n self.row = 0;\n self.col = 0;\n\n let attr = self.attr;\n\n for row in 0..self.height {\n for col in 0..self.width {\n self.put(' ' as u8, attr, row, col);\n }\n }\n\n Ok(())\n }\n\n fn get_cursor(&self) -> (usize, usize) {\n (self.row, self.col)\n }\n\n fn set_cursor(&mut self, row: usize, col: usize) -> fmt::Result {\n self.row = row;\n self.col = col;\n\n self.update_cursor();\n Ok(())\n }\n\n fn get_color(&self) -> (Color, Color) {\n (self.fg, self.bg)\n }\n\n fn set_color(&mut self, fg: Color, bg: Color) -> fmt::Result {\n self.fg = fg;\n self.bg = bg;\n self.update_attr();\n Ok(())\n }\n\n fn put_raw_byte(&mut self,\n byte: u8,\n fg: Color,\n bg: Color,\n row: usize,\n col: usize) -> fmt::Result {\n\n self.put(byte, Vga::attr(fg, bg), row, col);\n Ok(())\n }\n\n fn write_raw_byte(&mut self, byte: u8) -> fmt::Result {\n match byte {\n 0x0A \/* newline *\/ => {\n self.new_line();\n },\n\n 0x08 \/* backspace *\/ => {\n if self.col > 0 {\n self.col -= 1;\n }\n\n self.put_here(' ' as u8);\n },\n\n _ => {\n self.put_here(byte);\n\n if self.col + 1 >= self.width {\n self.new_line();\n } else {\n self.col += 1;\n }\n }\n }\n\n Ok(())\n }\n\n fn flush(&mut self) -> fmt::Result {\n self.update_cursor();\n Ok(())\n }\n}\n\nimpl fmt::Write for Vga {\n fn write_str(&mut self, s: &str) -> fmt::Result {\n try!(self.write_raw_bytes(s.as_bytes()));\n try!(self.flush());\n Ok(())\n }\n}\n\nstatic mut CONSOLE: Option<Vga> = None;\n\n\/\/\/ Get the current global console.\npub fn console() -> &'static mut Terminal {\n unsafe {\n if CONSOLE.is_none() {\n CONSOLE = Some(Vga::new(80, 25,\n 0xffffffff800b8000 as *mut u16, 0x3d4));\n }\n\n CONSOLE.as_mut().unwrap()\n }\n}\n\n\/\/\/ C (legacy) interface. See `kit\/kernel\/include\/terminal.h`.\npub mod ffi {\n use super::*;\n \n use core::mem;\n use core::slice;\n use core::ptr::PtrExt;\n\n use libc::{c_char, size_t};\n\n #[no_mangle]\n pub extern fn terminal_initialize() {\n console().reset().unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_clear() {\n console().clear().unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_updatecursor() {\n console().flush().unwrap();\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_getcursor(row: *mut size_t, column: *mut size_t) {\n let (row_us, col_us) = console().get_cursor();\n\n *row = row_us as size_t;\n *column = col_us as size_t;\n }\n\n #[no_mangle]\n pub extern fn terminal_setcursor(row: size_t, column: size_t) {\n console().set_cursor(row as usize, column as usize).unwrap();\n }\n\n #[repr(C)]\n #[derive(Copy)]\n pub enum VgaColor {\n Black = 0,\n Blue = 1,\n Green = 2,\n Cyan = 3,\n Red = 4,\n Magenta = 5,\n Brown = 6,\n LightGrey = 7,\n DarkGrey = 8,\n LightBlue = 9,\n LightGreen = 10,\n LightCyan = 11,\n LightRed = 12,\n LightMagenta = 13,\n LightBrown = 14,\n White = 15,\n }\n\n impl VgaColor {\n pub fn from_color(color: Color) -> VgaColor {\n unsafe { mem::transmute(color as i32) }\n }\n\n pub fn to_color(self) -> Color {\n unsafe { mem::transmute(self as u8) }\n }\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_getcolor(fg: *mut VgaColor,\n bg: *mut VgaColor) {\n let (fg_c, bg_c) = console().get_color();\n\n *fg = VgaColor::from_color(fg_c);\n *bg = VgaColor::from_color(bg_c);\n }\n\n #[no_mangle]\n pub extern fn terminal_setcolor(fg: VgaColor, bg: VgaColor) {\n console().set_color(fg.to_color(), bg.to_color()).unwrap();\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_putentryat(c: c_char,\n color: u8,\n x: size_t,\n y: size_t) {\n\n let fg_v: VgaColor = mem::transmute((color & 0x0f) as i32);\n let bg_v: VgaColor = mem::transmute((color >> 4) as i32);\n\n console()\n .put_raw_byte(c as u8,\n fg_v.to_color(),\n bg_v.to_color(),\n y as usize,\n x as usize)\n .unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_newline() {\n console().write_raw_byte('\\n' as u8).unwrap();\n console().flush().unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_writechar_internal(c: c_char) {\n console().write_raw_byte(c as u8).unwrap();\n }\n\n #[no_mangle]\n pub extern fn terminal_writechar(c: c_char) {\n console().write_raw_byte(c as u8).unwrap();\n console().flush().unwrap();\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_writebuf(length: u64, buffer: *const u8) {\n let bytes = slice::from_raw_parts(buffer, length as usize);\n\n console().write_raw_bytes(bytes).unwrap();\n console().flush().unwrap();\n }\n\n #[no_mangle]\n pub unsafe extern fn terminal_writestring(data: *const u8) {\n let mut data_len = 0usize;\n let mut data_end = data;\n\n while *data_end != 0 {\n data_len += 1;\n data_end = data_end.offset(1);\n }\n\n let bytes = slice::from_raw_parts(data, data_len);\n\n console().write_raw_bytes(bytes).unwrap();\n console().flush().unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>End of phone_number_matcher file<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/! Extension trait for libimagstore::store::Store\n\/\/!\n\/\/! This module contains traits and code for extending the Store with functions that can be used to\n\/\/! create, get and delete events.\n\nuse chrono::NaiveDateTime as NDT;\nuse toml::Value;\nuse toml_query::insert::TomlValueInsertExt;\n\nuse libimagstore::store::Store;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::storeid::StoreId;\nuse libimagentrydatetime::datepath::compiler::DatePathCompiler;\n\nuse result::Result;\nuse constants::*;\nuse error::TimeTrackErrorKind as TTEK;\nuse error::MapErrInto;\nuse iter::get::GetTimeTrackIter;\n\nuse tag::TimeTrackingTag as TTT;\n\npub trait TimeTrackStore<'a> {\n\n fn create_timetracking_now(&'a self, ts: &TTT) -> Result<FileLockEntry<'a>>;\n fn create_timetracking_at(&'a self, start: &NDT, ts: &TTT) -> Result<FileLockEntry<'a>>;\n fn create_timetracking(&'a self, start: &NDT, end: &NDT, ts: &TTT) -> Result<FileLockEntry<'a>>;\n\n fn get_timetrackings<I>(&'a self) -> Result<GetTimeTrackIter<'a>>;\n}\n\nfn now() -> NDT {\n use chrono::offset::local::Local;\n Local::now().naive_local()\n}\n\nlazy_static! {\n static ref COMPILER: DatePathCompiler = {\n use libimagentrydatetime::datepath::accuracy::Accuracy;\n use libimagentrydatetime::datepath::format::Format;\n\n DatePathCompiler::new(Accuracy::Second, Format::ElementIsFolder)\n };\n}\n\nimpl<'a> TimeTrackStore<'a> for Store {\n\n fn create_timetracking_now(&'a self, ts: &TTT) -> Result<FileLockEntry<'a>> {\n self.create_timetracking_at(&now(), ts)\n }\n\n fn create_timetracking_at(&'a self, start: &NDT, ts: &TTT) -> Result<FileLockEntry<'a>> {\n COMPILER.compile(CRATE_NAME, start)\n .map_err_into(TTEK::StoreIdError)\n .and_then(|id| enhance_id_with_tag(id, ts))\n .and_then(|id| self.create(id).map_err_into(TTEK::StoreWriteError))\n .and_then(|mut fle| {\n let v = Value::String(ts.as_str().to_owned());\n fle.get_header_mut()\n .insert(DATE_TIME_TAG_HEADER_PATH, v)\n .map_err_into(TTEK::HeaderWriteError)\n .map(|_| fle)\n })\n .and_then(|mut fle| {\n let v = Value::String(start.format(DATE_TIME_FORMAT).to_string());\n fle.get_header_mut()\n .insert(DATE_TIME_START_HEADER_PATH, v)\n .map_err_into(TTEK::HeaderWriteError)\n .map(|_| fle)\n })\n }\n\n fn create_timetracking(&'a self, start: &NDT, end: &NDT, ts: &TTT) -> Result<FileLockEntry<'a>> {\n self.create_timetracking_at(start, ts)\n .and_then(|mut fle| {\n let v = Value::String(end.format(DATE_TIME_FORMAT).to_string());\n fle.get_header_mut()\n .insert(DATE_TIME_END_HEADER_PATH, v)\n .map_err_into(TTEK::HeaderWriteError)\n .map(|_| fle)\n })\n }\n\n fn get_timetrackings<I>(&'a self) -> Result<GetTimeTrackIter<'a>> {\n self.retrieve_for_module(CRATE_NAME)\n .map_err_into(TTEK::StoreReadError)\n .map(|iter| GetTimeTrackIter::new(iter, self))\n }\n\n}\n\n\/\/\/ TODO: We need a new function on StoreId to do this in a nice way:\n\/\/\/\n\/\/\/ `storeid.append_to_filename(string)`\n\/\/\/\nfn enhance_id_with_tag(s: StoreId, t: &TTT) -> Result<StoreId> {\n let mut new = s.local().clone();\n new.push(t.as_str().to_owned());\n StoreId::new_baseless(new).map_err_into(TTEK::StoreIdError)\n}\n\n<commit_msg>Redo path altering<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/! Extension trait for libimagstore::store::Store\n\/\/!\n\/\/! This module contains traits and code for extending the Store with functions that can be used to\n\/\/! create, get and delete events.\n\nuse chrono::NaiveDateTime as NDT;\nuse toml::Value;\nuse toml_query::insert::TomlValueInsertExt;\n\nuse libimagstore::store::Store;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::storeid::StoreId;\nuse libimagentrydatetime::datepath::compiler::DatePathCompiler;\n\nuse result::Result;\nuse constants::*;\nuse error::TimeTrackErrorKind as TTEK;\nuse error::MapErrInto;\nuse iter::get::GetTimeTrackIter;\n\nuse tag::TimeTrackingTag as TTT;\n\npub trait TimeTrackStore<'a> {\n\n fn create_timetracking_now(&'a self, ts: &TTT) -> Result<FileLockEntry<'a>>;\n fn create_timetracking_at(&'a self, start: &NDT, ts: &TTT) -> Result<FileLockEntry<'a>>;\n fn create_timetracking(&'a self, start: &NDT, end: &NDT, ts: &TTT) -> Result<FileLockEntry<'a>>;\n\n fn get_timetrackings<I>(&'a self) -> Result<GetTimeTrackIter<'a>>;\n}\n\nfn now() -> NDT {\n use chrono::offset::local::Local;\n Local::now().naive_local()\n}\n\nlazy_static! {\n static ref COMPILER: DatePathCompiler = {\n use libimagentrydatetime::datepath::accuracy::Accuracy;\n use libimagentrydatetime::datepath::format::Format;\n\n DatePathCompiler::new(Accuracy::Second, Format::ElementIsFolder)\n };\n}\n\nimpl<'a> TimeTrackStore<'a> for Store {\n\n fn create_timetracking_now(&'a self, ts: &TTT) -> Result<FileLockEntry<'a>> {\n self.create_timetracking_at(&now(), ts)\n }\n\n fn create_timetracking_at(&'a self, start: &NDT, ts: &TTT) -> Result<FileLockEntry<'a>> {\n use std::path::PathBuf;\n\n COMPILER.compile(CRATE_NAME, start)\n .map_err_into(TTEK::StoreIdError)\n .map(|mut id| {\n id.local_push(PathBuf::from(ts.as_str()));\n id\n })\n .and_then(|id| self.create(id).map_err_into(TTEK::StoreWriteError))\n .and_then(|mut fle| {\n let v = Value::String(ts.as_str().to_owned());\n fle.get_header_mut()\n .insert(DATE_TIME_TAG_HEADER_PATH, v)\n .map_err_into(TTEK::HeaderWriteError)\n .map(|_| fle)\n })\n .and_then(|mut fle| {\n let v = Value::String(start.format(DATE_TIME_FORMAT).to_string());\n fle.get_header_mut()\n .insert(DATE_TIME_START_HEADER_PATH, v)\n .map_err_into(TTEK::HeaderWriteError)\n .map(|_| fle)\n })\n }\n\n fn create_timetracking(&'a self, start: &NDT, end: &NDT, ts: &TTT) -> Result<FileLockEntry<'a>> {\n self.create_timetracking_at(start, ts)\n .and_then(|mut fle| {\n let v = Value::String(end.format(DATE_TIME_FORMAT).to_string());\n fle.get_header_mut()\n .insert(DATE_TIME_END_HEADER_PATH, v)\n .map_err_into(TTEK::HeaderWriteError)\n .map(|_| fle)\n })\n }\n\n fn get_timetrackings<I>(&'a self) -> Result<GetTimeTrackIter<'a>> {\n self.retrieve_for_module(CRATE_NAME)\n .map_err_into(TTEK::StoreReadError)\n .map(|iter| GetTimeTrackIter::new(iter, self))\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that an assignment of type ! makes the rest of the block dead code.\n\n#![feature(never_type)]\n#![deny(unused, unreachable_code)]\n\nfn main() {\n let x: ! = panic!(\"aah\"); \/\/~ ERROR unused\n drop(x); \/\/~ ERROR unreachable\n}\n\n<commit_msg>we now get an extra unreachable code warning in this test<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that an assignment of type ! makes the rest of the block dead code.\n\n#![feature(never_type)]\n#![deny(unused, unreachable_code)]\n\nfn main() {\n let x: ! = panic!(\"aah\"); \/\/~ ERROR unused\n drop(x); \/\/~ ERROR unreachable\n \/\/~^ ERROR unreachable\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[No-auto] bin\/domain\/habit: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>configuration.rs: Add function to fetch editor options<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2477<commit_after>\/\/ https:\/\/leetcode.com\/problems\/minimum-fuel-cost-to-report-to-the-capital\/\npub fn minimum_fuel_cost(roads: Vec<Vec<i32>>, seats: i32) -> i64 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", minimum_fuel_cost(vec![vec![0,1],vec![0,2],vec![0,3]], 5)); \/\/ 3\n println!(\"{}\", minimum_fuel_cost(vec![vec![3,1],vec![3,2],vec![1,0],vec![0,4],vec![0,5],vec![4,6]], 2)); \/\/ 7\n println!(\"{}\", minimum_fuel_cost(vec![], 1)); \/\/ 0\n}\n<|endoftext|>"} {"text":"<commit_before>use core::ops::Deref;\nuse core_collections::borrow::ToOwned;\nuse io::{Read, Error, Result, Write, Seek, SeekFrom};\nuse os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};\nuse mem;\nuse path::{PathBuf, Path};\nuse str;\nuse string::String;\nuse sys_common::AsInner;\nuse vec::Vec;\n\nuse system::syscall::{sys_open, sys_dup, sys_close, sys_fpath, sys_ftruncate, sys_read,\n sys_write, sys_lseek, sys_fsync, sys_mkdir, sys_rmdir, sys_stat, sys_unlink};\nuse system::syscall::{O_RDWR, O_RDONLY, O_WRONLY, O_APPEND, O_CREAT, O_TRUNC, MODE_DIR, MODE_FILE, SEEK_SET, SEEK_CUR, SEEK_END, Stat};\n\n\/\/\/ A Unix-style file\npub struct File {\n \/\/\/ The id for the file\n fd: usize,\n}\n\nimpl File {\n \/\/\/ Open a new file using a path\n pub fn open<P: AsRef<Path>>(path: P) -> Result<File> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_open(path_c.as_ptr(), O_RDONLY, 0).map(|fd| File::from_raw_fd(fd) )\n }.map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Create a new file using a path\n pub fn create<P: AsRef<Path>>(path: P) -> Result<File> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_open(path_c.as_ptr(), O_CREAT | O_RDWR | O_TRUNC, 0).map(|fd| File::from_raw_fd(fd) )\n }.map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Duplicate the file\n pub fn dup(&self) -> Result<File> {\n sys_dup(self.fd).map(|fd| unsafe { File::from_raw_fd(fd) }).map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Get the canonical path of the file\n pub fn path(&self) -> Result<PathBuf> {\n let mut buf: [u8; 4096] = [0; 4096];\n match sys_fpath(self.fd, &mut buf) {\n Ok(count) => Ok(PathBuf::from(unsafe { String::from_utf8_unchecked(Vec::from(&buf[0..count])) })),\n Err(err) => Err(Error::from_sys(err)),\n }\n }\n\n \/\/\/ Flush the file data and metadata\n pub fn sync_all(&mut self) -> Result<()> {\n sys_fsync(self.fd).and(Ok(())).map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Flush the file data\n pub fn sync_data(&mut self) -> Result<()> {\n sys_fsync(self.fd).and(Ok(())).map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Truncates the file\n pub fn set_len(&mut self, size: u64) -> Result<()> {\n sys_ftruncate(self.fd, size as usize).and(Ok(())).map_err(|x| Error::from_sys(x))\n }\n}\n\nimpl AsRawFd for File {\n fn as_raw_fd(&self) -> RawFd {\n self.fd\n }\n}\n\nimpl FromRawFd for File {\n unsafe fn from_raw_fd(fd: RawFd) -> Self {\n File {\n fd: fd\n }\n }\n}\n\nimpl IntoRawFd for File {\n fn into_raw_fd(self) -> RawFd {\n let fd = self.fd;\n mem::forget(self);\n fd\n }\n}\n\nimpl Read for File {\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n sys_read(self.fd, buf).map_err(|x| Error::from_sys(x))\n }\n}\n\nimpl Write for File {\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n sys_write(self.fd, buf).map_err(|x| Error::from_sys(x))\n }\n\n \/\/ TODO buffered fs\n fn flush(&mut self) -> Result<()> { Ok(()) }\n}\n\nimpl Seek for File {\n \/\/\/ Seek a given position\n fn seek(&mut self, pos: SeekFrom) -> Result<u64> {\n let (whence, offset) = match pos {\n SeekFrom::Start(offset) => (SEEK_SET, offset as isize),\n SeekFrom::Current(offset) => (SEEK_CUR, offset as isize),\n SeekFrom::End(offset) => (SEEK_END, offset as isize),\n };\n\n sys_lseek(self.fd, offset, whence).map(|position| position as u64).map_err(|x| Error::from_sys(x))\n }\n}\n\nimpl Drop for File {\n fn drop(&mut self) {\n let _ = sys_close(self.fd);\n }\n}\n\npub struct FileType {\n dir: bool,\n file: bool,\n}\n\nimpl FileType {\n pub fn is_dir(&self) -> bool {\n self.dir\n }\n\n pub fn is_file(&self) -> bool {\n self.file\n }\n}\n\npub struct OpenOptions {\n read: bool,\n write: bool,\n append: bool,\n create: bool,\n truncate: bool,\n}\n\nimpl OpenOptions {\n pub fn new() -> OpenOptions {\n OpenOptions {\n read: false,\n write: false,\n append: false,\n create: false,\n truncate: false,\n }\n }\n\n pub fn read(&mut self, read: bool) -> &mut OpenOptions {\n self.read = read;\n self\n }\n\n pub fn write(&mut self, write: bool) -> &mut OpenOptions {\n self.write = write;\n self\n }\n\n pub fn append(&mut self, append: bool) -> &mut OpenOptions {\n self.append = append;\n self\n }\n\n pub fn create(&mut self, create: bool) -> &mut OpenOptions {\n self.create = create;\n self\n }\n\n pub fn truncate(&mut self, truncate: bool) -> &mut OpenOptions {\n self.truncate = truncate;\n self\n }\n\n pub fn open<P: AsRef<Path>>(&self, path: P) -> Result<File> {\n let mut flags = 0;\n\n if self.read && self.write {\n flags |= O_RDWR;\n } else if self.read {\n flags |= O_RDONLY;\n } else if self.write {\n flags |= O_WRONLY;\n }\n\n if self.append {\n flags |= O_APPEND;\n }\n\n if self.create {\n flags |= O_CREAT;\n }\n\n if self.truncate {\n flags |= O_TRUNC;\n }\n\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_open(path_c.as_ptr(), flags, 0).map(|fd| File::from_raw_fd(fd))\n }.map_err(|x| Error::from_sys(x))\n }\n}\n\npub struct Metadata {\n stat: Stat\n}\n\nimpl Metadata {\n pub fn file_type(&self) -> FileType {\n FileType {\n dir: self.stat.st_mode & MODE_DIR == MODE_DIR,\n file: self.stat.st_mode & MODE_FILE == MODE_FILE\n }\n }\n\n pub fn is_dir(&self) -> bool {\n self.stat.st_mode & MODE_DIR == MODE_DIR\n }\n\n pub fn is_file(&self) -> bool {\n self.stat.st_mode & MODE_FILE == MODE_FILE\n }\n\n pub fn len(&self) -> u64 {\n self.stat.st_size\n }\n}\n\npub struct DirEntry {\n path: String,\n dir: bool,\n file: bool,\n}\n\nimpl DirEntry {\n pub fn file_name(&self) -> &Path {\n unsafe { mem::transmute(self.path.deref()) }\n }\n\n pub fn file_type(&self) -> Result<FileType> {\n Ok(FileType {\n dir: self.dir,\n file: self.file,\n })\n }\n\n pub fn path(&self) -> PathBuf {\n PathBuf::from(self.path.clone())\n }\n}\n\npub struct ReadDir {\n file: File,\n}\n\nimpl Iterator for ReadDir {\n type Item = Result<DirEntry>;\n fn next(&mut self) -> Option<Result<DirEntry>> {\n let mut path = String::new();\n let mut buf: [u8; 1] = [0; 1];\n loop {\n match self.file.read(&mut buf) {\n Ok(0) => break,\n Ok(count) => {\n if buf[0] == 10 {\n break;\n } else {\n path.push_str(unsafe { str::from_utf8_unchecked(&buf[..count]) });\n }\n }\n Err(_err) => break,\n }\n }\n if path.is_empty() {\n None\n } else {\n let dir = path.ends_with('\/');\n if dir {\n path.pop();\n }\n Some(Ok(DirEntry {\n path: path,\n dir: dir,\n file: !dir,\n }))\n }\n }\n}\n\n\/\/\/ Find the canonical path of a file\npub fn canonicalize<P: AsRef<Path>>(path: P) -> Result<PathBuf> {\n match File::open(path) {\n Ok(file) => {\n match file.path() {\n Ok(realpath) => Ok(realpath),\n Err(err) => Err(err)\n }\n },\n Err(err) => Err(err)\n }\n}\n\npub fn metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> {\n let mut stat = Stat {\n st_mode: 0,\n st_size: 0\n };\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n try!(sys_stat(path_c.as_ptr(), &mut stat).map_err(|x| Error::from_sys(x)));\n }\n Ok(Metadata {\n stat: stat\n })\n}\n\n\/\/\/ Create a new directory, using a path\n\/\/\/ The default mode of the directory is 744\npub fn create_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_mkdir(path_c.as_ptr(), 755).and(Ok(())).map_err(|x| Error::from_sys(x))\n }\n}\n\npub fn read_dir<P: AsRef<Path>>(path: P) -> Result<ReadDir> {\n File::open(path).map(|file| ReadDir { file: file })\n}\n\npub fn remove_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_rmdir(path_c.as_ptr()).and(Ok(()))\n }.map_err(|x| Error::from_sys(x))\n}\n\npub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_unlink(path_c.as_ptr()).and(Ok(()))\n }.map_err(|x| Error::from_sys(x))\n}\n<commit_msg>Add fs::copy and fs::rename<commit_after>use core::ops::Deref;\nuse core_collections::borrow::ToOwned;\nuse io::{self, Read, Error, Result, Write, Seek, SeekFrom};\nuse os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};\nuse mem;\nuse path::{PathBuf, Path};\nuse str;\nuse string::String;\nuse sys_common::AsInner;\nuse vec::Vec;\n\nuse system::syscall::{sys_open, sys_dup, sys_close, sys_fpath, sys_ftruncate, sys_read,\n sys_write, sys_lseek, sys_fsync, sys_mkdir, sys_rmdir, sys_stat, sys_unlink};\nuse system::syscall::{O_RDWR, O_RDONLY, O_WRONLY, O_APPEND, O_CREAT, O_TRUNC, MODE_DIR, MODE_FILE, SEEK_SET, SEEK_CUR, SEEK_END, Stat};\n\n\/\/\/ A Unix-style file\npub struct File {\n \/\/\/ The id for the file\n fd: usize,\n}\n\nimpl File {\n \/\/\/ Open a new file using a path\n pub fn open<P: AsRef<Path>>(path: P) -> Result<File> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_open(path_c.as_ptr(), O_RDONLY, 0).map(|fd| File::from_raw_fd(fd) )\n }.map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Create a new file using a path\n pub fn create<P: AsRef<Path>>(path: P) -> Result<File> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_open(path_c.as_ptr(), O_CREAT | O_RDWR | O_TRUNC, 0).map(|fd| File::from_raw_fd(fd) )\n }.map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Duplicate the file\n pub fn dup(&self) -> Result<File> {\n sys_dup(self.fd).map(|fd| unsafe { File::from_raw_fd(fd) }).map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Get the canonical path of the file\n pub fn path(&self) -> Result<PathBuf> {\n let mut buf: [u8; 4096] = [0; 4096];\n match sys_fpath(self.fd, &mut buf) {\n Ok(count) => Ok(PathBuf::from(unsafe { String::from_utf8_unchecked(Vec::from(&buf[0..count])) })),\n Err(err) => Err(Error::from_sys(err)),\n }\n }\n\n \/\/\/ Flush the file data and metadata\n pub fn sync_all(&mut self) -> Result<()> {\n sys_fsync(self.fd).and(Ok(())).map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Flush the file data\n pub fn sync_data(&mut self) -> Result<()> {\n sys_fsync(self.fd).and(Ok(())).map_err(|x| Error::from_sys(x))\n }\n\n \/\/\/ Truncates the file\n pub fn set_len(&mut self, size: u64) -> Result<()> {\n sys_ftruncate(self.fd, size as usize).and(Ok(())).map_err(|x| Error::from_sys(x))\n }\n}\n\nimpl AsRawFd for File {\n fn as_raw_fd(&self) -> RawFd {\n self.fd\n }\n}\n\nimpl FromRawFd for File {\n unsafe fn from_raw_fd(fd: RawFd) -> Self {\n File {\n fd: fd\n }\n }\n}\n\nimpl IntoRawFd for File {\n fn into_raw_fd(self) -> RawFd {\n let fd = self.fd;\n mem::forget(self);\n fd\n }\n}\n\nimpl Read for File {\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n sys_read(self.fd, buf).map_err(|x| Error::from_sys(x))\n }\n}\n\nimpl Write for File {\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n sys_write(self.fd, buf).map_err(|x| Error::from_sys(x))\n }\n\n \/\/ TODO buffered fs\n fn flush(&mut self) -> Result<()> { Ok(()) }\n}\n\nimpl Seek for File {\n \/\/\/ Seek a given position\n fn seek(&mut self, pos: SeekFrom) -> Result<u64> {\n let (whence, offset) = match pos {\n SeekFrom::Start(offset) => (SEEK_SET, offset as isize),\n SeekFrom::Current(offset) => (SEEK_CUR, offset as isize),\n SeekFrom::End(offset) => (SEEK_END, offset as isize),\n };\n\n sys_lseek(self.fd, offset, whence).map(|position| position as u64).map_err(|x| Error::from_sys(x))\n }\n}\n\nimpl Drop for File {\n fn drop(&mut self) {\n let _ = sys_close(self.fd);\n }\n}\n\npub struct FileType {\n dir: bool,\n file: bool,\n}\n\nimpl FileType {\n pub fn is_dir(&self) -> bool {\n self.dir\n }\n\n pub fn is_file(&self) -> bool {\n self.file\n }\n}\n\npub struct OpenOptions {\n read: bool,\n write: bool,\n append: bool,\n create: bool,\n truncate: bool,\n}\n\nimpl OpenOptions {\n pub fn new() -> OpenOptions {\n OpenOptions {\n read: false,\n write: false,\n append: false,\n create: false,\n truncate: false,\n }\n }\n\n pub fn read(&mut self, read: bool) -> &mut OpenOptions {\n self.read = read;\n self\n }\n\n pub fn write(&mut self, write: bool) -> &mut OpenOptions {\n self.write = write;\n self\n }\n\n pub fn append(&mut self, append: bool) -> &mut OpenOptions {\n self.append = append;\n self\n }\n\n pub fn create(&mut self, create: bool) -> &mut OpenOptions {\n self.create = create;\n self\n }\n\n pub fn truncate(&mut self, truncate: bool) -> &mut OpenOptions {\n self.truncate = truncate;\n self\n }\n\n pub fn open<P: AsRef<Path>>(&self, path: P) -> Result<File> {\n let mut flags = 0;\n\n if self.read && self.write {\n flags |= O_RDWR;\n } else if self.read {\n flags |= O_RDONLY;\n } else if self.write {\n flags |= O_WRONLY;\n }\n\n if self.append {\n flags |= O_APPEND;\n }\n\n if self.create {\n flags |= O_CREAT;\n }\n\n if self.truncate {\n flags |= O_TRUNC;\n }\n\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_open(path_c.as_ptr(), flags, 0).map(|fd| File::from_raw_fd(fd))\n }.map_err(|x| Error::from_sys(x))\n }\n}\n\npub struct Metadata {\n stat: Stat\n}\n\nimpl Metadata {\n pub fn file_type(&self) -> FileType {\n FileType {\n dir: self.stat.st_mode & MODE_DIR == MODE_DIR,\n file: self.stat.st_mode & MODE_FILE == MODE_FILE\n }\n }\n\n pub fn is_dir(&self) -> bool {\n self.stat.st_mode & MODE_DIR == MODE_DIR\n }\n\n pub fn is_file(&self) -> bool {\n self.stat.st_mode & MODE_FILE == MODE_FILE\n }\n\n pub fn len(&self) -> u64 {\n self.stat.st_size\n }\n}\n\npub struct DirEntry {\n path: String,\n dir: bool,\n file: bool,\n}\n\nimpl DirEntry {\n pub fn file_name(&self) -> &Path {\n unsafe { mem::transmute(self.path.deref()) }\n }\n\n pub fn file_type(&self) -> Result<FileType> {\n Ok(FileType {\n dir: self.dir,\n file: self.file,\n })\n }\n\n pub fn path(&self) -> PathBuf {\n PathBuf::from(self.path.clone())\n }\n}\n\npub struct ReadDir {\n file: File,\n}\n\nimpl Iterator for ReadDir {\n type Item = Result<DirEntry>;\n fn next(&mut self) -> Option<Result<DirEntry>> {\n let mut path = String::new();\n let mut buf: [u8; 1] = [0; 1];\n loop {\n match self.file.read(&mut buf) {\n Ok(0) => break,\n Ok(count) => {\n if buf[0] == 10 {\n break;\n } else {\n path.push_str(unsafe { str::from_utf8_unchecked(&buf[..count]) });\n }\n }\n Err(_err) => break,\n }\n }\n if path.is_empty() {\n None\n } else {\n let dir = path.ends_with('\/');\n if dir {\n path.pop();\n }\n Some(Ok(DirEntry {\n path: path,\n dir: dir,\n file: !dir,\n }))\n }\n }\n}\n\n\/\/\/ Find the canonical path of a file\npub fn canonicalize<P: AsRef<Path>>(path: P) -> Result<PathBuf> {\n match File::open(path) {\n Ok(file) => {\n match file.path() {\n Ok(realpath) => Ok(realpath),\n Err(err) => Err(err)\n }\n },\n Err(err) => Err(err)\n }\n}\n\npub fn metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> {\n let mut stat = Stat {\n st_mode: 0,\n st_size: 0\n };\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n try!(sys_stat(path_c.as_ptr(), &mut stat).map_err(|x| Error::from_sys(x)));\n }\n Ok(Metadata {\n stat: stat\n })\n}\n\n\/\/\/ Create a new directory, using a path\n\/\/\/ The default mode of the directory is 744\npub fn create_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_mkdir(path_c.as_ptr(), 755).and(Ok(())).map_err(|x| Error::from_sys(x))\n }\n}\n\npub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {\n let mut infile = try!(File::open(from));\n let mut outfile = try!(File::create(to));\n io::copy(&mut infile, &mut outfile)\n}\n\npub fn rename<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<()> {\n try!(copy(Path::new(from.as_ref()), to));\n remove_file(from)\n}\n\npub fn read_dir<P: AsRef<Path>>(path: P) -> Result<ReadDir> {\n File::open(path).map(|file| ReadDir { file: file })\n}\n\npub fn remove_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_rmdir(path_c.as_ptr()).and(Ok(()))\n }.map_err(|x| Error::from_sys(x))\n}\n\npub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> {\n let path_str = path.as_ref().as_os_str().as_inner();\n let mut path_c = path_str.to_owned();\n path_c.push_str(\"\\0\");\n unsafe {\n sys_unlink(path_c.as_ptr()).and(Ok(()))\n }.map_err(|x| Error::from_sys(x))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #18532<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that overloaded call parameter checking does not ICE\n\/\/ when a type error or unconstrained type variable propagates\n\/\/ into it.\n\n#![feature(overloaded_calls)]\n\nfn main() {\n (return)((),());\n \/\/~^ ERROR the type of this value must be known\n \/\/~^^ ERROR the type of this value must be known\n \/\/~^^^ ERROR cannot use call notation\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add issue 69017 test<commit_after>\/\/ This issue reproduces an ICE on compile\n\/\/ Fails on 2020-02-08 nightly\n\/\/ regressed commit: https:\/\/github.com\/rust-lang\/rust\/commit\/f8fd4624474a68bd26694eff3536b9f3a127b2d3\n\/\/\n\/\/ check-pass\n\n#![feature(generator_trait)]\n#![feature(generators)]\n\nuse std::ops::Generator;\n\nfn gen() -> impl Generator<usize> {\n |_: usize| {\n println!(\"-> {}\", yield);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementations of things like `Eq` for fixed-length arrays\n\/\/! up to a certain length. Eventually we should able to generalize\n\/\/! to all lengths.\n\/\/!\n\/\/! *[See also the array primitive type](..\/primitive.array.html).*\n\n#![unstable(feature = \"fixed_size_array\",\n reason = \"traits and impls are better expressed through generic \\\n integer constants\",\n issue = \"27778\")]\n\nuse borrow::{Borrow, BorrowMut};\nuse clone::Clone;\nuse cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering};\nuse convert::{AsRef, AsMut};\nuse default::Default;\nuse fmt;\nuse hash::{Hash, self};\nuse iter::IntoIterator;\nuse marker::{Sized, Unsize};\nuse option::Option;\nuse slice::{Iter, IterMut, SliceExt};\n\n\/\/\/ Utility trait implemented only on arrays of fixed size\n\/\/\/\n\/\/\/ This trait can be used to implement other traits on fixed-size arrays\n\/\/\/ without causing much metadata bloat.\n\/\/\/\n\/\/\/ The trait is marked unsafe in order to restrict implementors to fixed-size\n\/\/\/ arrays. User of this trait can assume that implementors have the exact\n\/\/\/ layout in memory of a fixed size array (for example, for unsafe\n\/\/\/ initialization).\n\/\/\/\n\/\/\/ Note that the traits AsRef and AsMut provide similar methods for types that\n\/\/\/ may not be fixed-size arrays. Implementors should prefer those traits\n\/\/\/ instead.\npub unsafe trait FixedSizeArray<T> {\n \/\/\/ Converts the array to immutable slice\n fn as_slice(&self) -> &[T];\n \/\/\/ Converts the array to mutable slice\n fn as_mut_slice(&mut self) -> &mut [T];\n}\n\nunsafe impl<T, A: Unsize<[T]>> FixedSizeArray<T> for A {\n #[inline]\n fn as_slice(&self) -> &[T] {\n self\n }\n #[inline]\n fn as_mut_slice(&mut self) -> &mut [T] {\n self\n }\n}\n\n\/\/ macro for implementing n-ary tuple functions and operations\nmacro_rules! array_impls {\n ($($N:expr)+) => {\n $(\n impl<T> AsRef<[T]> for [T; $N] {\n #[inline]\n fn as_ref(&self) -> &[T] {\n &self[..]\n }\n }\n\n impl<T> AsMut<[T]> for [T; $N] {\n #[inline]\n fn as_mut(&mut self) -> &mut [T] {\n &mut self[..]\n }\n }\n\n #[stable(feature = \"array_borrow\", since = \"1.4.0\")]\n impl<T> Borrow<[T]> for [T; $N] {\n fn borrow(&self) -> &[T] {\n self\n }\n }\n\n #[stable(feature = \"array_borrow\", since = \"1.4.0\")]\n impl<T> BorrowMut<[T]> for [T; $N] {\n fn borrow_mut(&mut self) -> &mut [T] {\n self\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T: Hash> Hash for [T; $N] {\n fn hash<H: hash::Hasher>(&self, state: &mut H) {\n Hash::hash(&self[..], state)\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T: fmt::Debug> fmt::Debug for [T; $N] {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Debug::fmt(&&self[..], f)\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<'a, T> IntoIterator for &'a [T; $N] {\n type Item = &'a T;\n type IntoIter = Iter<'a, T>;\n\n fn into_iter(self) -> Iter<'a, T> {\n self.iter()\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<'a, T> IntoIterator for &'a mut [T; $N] {\n type Item = &'a mut T;\n type IntoIter = IterMut<'a, T>;\n\n fn into_iter(self) -> IterMut<'a, T> {\n self.iter_mut()\n }\n }\n\n \/\/ NOTE: some less important impls are omitted to reduce code bloat\n __impl_slice_eq1! { [A; $N], [B; $N] }\n __impl_slice_eq2! { [A; $N], [B] }\n __impl_slice_eq2! { [A; $N], &'b [B] }\n __impl_slice_eq2! { [A; $N], &'b mut [B] }\n \/\/ __impl_slice_eq2! { [A; $N], &'b [B; $N] }\n \/\/ __impl_slice_eq2! { [A; $N], &'b mut [B; $N] }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T:Eq> Eq for [T; $N] { }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T:PartialOrd> PartialOrd for [T; $N] {\n #[inline]\n fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {\n PartialOrd::partial_cmp(&&self[..], &&other[..])\n }\n #[inline]\n fn lt(&self, other: &[T; $N]) -> bool {\n PartialOrd::lt(&&self[..], &&other[..])\n }\n #[inline]\n fn le(&self, other: &[T; $N]) -> bool {\n PartialOrd::le(&&self[..], &&other[..])\n }\n #[inline]\n fn ge(&self, other: &[T; $N]) -> bool {\n PartialOrd::ge(&&self[..], &&other[..])\n }\n #[inline]\n fn gt(&self, other: &[T; $N]) -> bool {\n PartialOrd::gt(&&self[..], &&other[..])\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T:Ord> Ord for [T; $N] {\n #[inline]\n fn cmp(&self, other: &[T; $N]) -> Ordering {\n Ord::cmp(&&self[..], &&other[..])\n }\n }\n )+\n }\n}\n\narray_impls! {\n 0 1 2 3 4 5 6 7 8 9\n 10 11 12 13 14 15 16 17 18 19\n 20 21 22 23 24 25 26 27 28 29\n 30 31 32\n}\n\n\/\/ The Default impls cannot be generated using the array_impls! macro because\n\/\/ they require array literals.\n\nmacro_rules! array_impl_default {\n {$n:expr, $t:ident $($ts:ident)*} => {\n #[stable(since = \"1.4.0\", feature = \"array_default\")]\n impl<T> Default for [T; $n] where T: Default {\n fn default() -> [T; $n] {\n [$t::default(), $($ts::default()),*]\n }\n }\n array_impl_default!{($n - 1), $($ts)*}\n };\n {$n:expr,} => {\n #[stable(since = \"1.4.0\", feature = \"array_default\")]\n impl<T> Default for [T; $n] {\n fn default() -> [T; $n] { [] }\n }\n };\n}\n\narray_impl_default!{32, T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T}\n\nmacro_rules! array_impl_clone {\n {$n:expr, $i:expr, $($idx:expr,)*} => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T: Clone> Clone for [T; $n] {\n fn clone(&self) -> [T; $n] {\n let temp = [&self[$i], $(&self[$idx]),*];\n [temp[$i].clone(), $(temp[$idx].clone()),*]\n }\n }\n array_impl_clone!{$i, $($idx,)*}\n };\n {$n:expr,} => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T: Clone> Clone for [T; 0] {\n fn clone(&self) -> [T; 0] {\n []\n }\n }\n };\n}\n\narray_impl_clone! {\n 32, 31, 30,\n 29, 28, 27, 26, 25, 24, 23, 22, 21, 20,\n 19, 18, 17, 16, 15, 14, 13, 12, 11, 10,\n 9, 8, 7, 6, 5, 4, 3, 2, 1, 0,\n}\n<commit_msg>Don't use an explicit temporary for cloning an array<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementations of things like `Eq` for fixed-length arrays\n\/\/! up to a certain length. Eventually we should able to generalize\n\/\/! to all lengths.\n\/\/!\n\/\/! *[See also the array primitive type](..\/primitive.array.html).*\n\n#![unstable(feature = \"fixed_size_array\",\n reason = \"traits and impls are better expressed through generic \\\n integer constants\",\n issue = \"27778\")]\n\nuse borrow::{Borrow, BorrowMut};\nuse clone::Clone;\nuse cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering};\nuse convert::{AsRef, AsMut};\nuse default::Default;\nuse fmt;\nuse hash::{Hash, self};\nuse iter::IntoIterator;\nuse marker::{Sized, Unsize};\nuse option::Option;\nuse slice::{Iter, IterMut, SliceExt};\n\n\/\/\/ Utility trait implemented only on arrays of fixed size\n\/\/\/\n\/\/\/ This trait can be used to implement other traits on fixed-size arrays\n\/\/\/ without causing much metadata bloat.\n\/\/\/\n\/\/\/ The trait is marked unsafe in order to restrict implementors to fixed-size\n\/\/\/ arrays. User of this trait can assume that implementors have the exact\n\/\/\/ layout in memory of a fixed size array (for example, for unsafe\n\/\/\/ initialization).\n\/\/\/\n\/\/\/ Note that the traits AsRef and AsMut provide similar methods for types that\n\/\/\/ may not be fixed-size arrays. Implementors should prefer those traits\n\/\/\/ instead.\npub unsafe trait FixedSizeArray<T> {\n \/\/\/ Converts the array to immutable slice\n fn as_slice(&self) -> &[T];\n \/\/\/ Converts the array to mutable slice\n fn as_mut_slice(&mut self) -> &mut [T];\n}\n\nunsafe impl<T, A: Unsize<[T]>> FixedSizeArray<T> for A {\n #[inline]\n fn as_slice(&self) -> &[T] {\n self\n }\n #[inline]\n fn as_mut_slice(&mut self) -> &mut [T] {\n self\n }\n}\n\n\/\/ macro for implementing n-ary tuple functions and operations\nmacro_rules! array_impls {\n ($($N:expr)+) => {\n $(\n impl<T> AsRef<[T]> for [T; $N] {\n #[inline]\n fn as_ref(&self) -> &[T] {\n &self[..]\n }\n }\n\n impl<T> AsMut<[T]> for [T; $N] {\n #[inline]\n fn as_mut(&mut self) -> &mut [T] {\n &mut self[..]\n }\n }\n\n #[stable(feature = \"array_borrow\", since = \"1.4.0\")]\n impl<T> Borrow<[T]> for [T; $N] {\n fn borrow(&self) -> &[T] {\n self\n }\n }\n\n #[stable(feature = \"array_borrow\", since = \"1.4.0\")]\n impl<T> BorrowMut<[T]> for [T; $N] {\n fn borrow_mut(&mut self) -> &mut [T] {\n self\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T: Hash> Hash for [T; $N] {\n fn hash<H: hash::Hasher>(&self, state: &mut H) {\n Hash::hash(&self[..], state)\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T: fmt::Debug> fmt::Debug for [T; $N] {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Debug::fmt(&&self[..], f)\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<'a, T> IntoIterator for &'a [T; $N] {\n type Item = &'a T;\n type IntoIter = Iter<'a, T>;\n\n fn into_iter(self) -> Iter<'a, T> {\n self.iter()\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<'a, T> IntoIterator for &'a mut [T; $N] {\n type Item = &'a mut T;\n type IntoIter = IterMut<'a, T>;\n\n fn into_iter(self) -> IterMut<'a, T> {\n self.iter_mut()\n }\n }\n\n \/\/ NOTE: some less important impls are omitted to reduce code bloat\n __impl_slice_eq1! { [A; $N], [B; $N] }\n __impl_slice_eq2! { [A; $N], [B] }\n __impl_slice_eq2! { [A; $N], &'b [B] }\n __impl_slice_eq2! { [A; $N], &'b mut [B] }\n \/\/ __impl_slice_eq2! { [A; $N], &'b [B; $N] }\n \/\/ __impl_slice_eq2! { [A; $N], &'b mut [B; $N] }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T:Eq> Eq for [T; $N] { }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T:PartialOrd> PartialOrd for [T; $N] {\n #[inline]\n fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {\n PartialOrd::partial_cmp(&&self[..], &&other[..])\n }\n #[inline]\n fn lt(&self, other: &[T; $N]) -> bool {\n PartialOrd::lt(&&self[..], &&other[..])\n }\n #[inline]\n fn le(&self, other: &[T; $N]) -> bool {\n PartialOrd::le(&&self[..], &&other[..])\n }\n #[inline]\n fn ge(&self, other: &[T; $N]) -> bool {\n PartialOrd::ge(&&self[..], &&other[..])\n }\n #[inline]\n fn gt(&self, other: &[T; $N]) -> bool {\n PartialOrd::gt(&&self[..], &&other[..])\n }\n }\n\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T:Ord> Ord for [T; $N] {\n #[inline]\n fn cmp(&self, other: &[T; $N]) -> Ordering {\n Ord::cmp(&&self[..], &&other[..])\n }\n }\n )+\n }\n}\n\narray_impls! {\n 0 1 2 3 4 5 6 7 8 9\n 10 11 12 13 14 15 16 17 18 19\n 20 21 22 23 24 25 26 27 28 29\n 30 31 32\n}\n\n\/\/ The Default impls cannot be generated using the array_impls! macro because\n\/\/ they require array literals.\n\nmacro_rules! array_impl_default {\n {$n:expr, $t:ident $($ts:ident)*} => {\n #[stable(since = \"1.4.0\", feature = \"array_default\")]\n impl<T> Default for [T; $n] where T: Default {\n fn default() -> [T; $n] {\n [$t::default(), $($ts::default()),*]\n }\n }\n array_impl_default!{($n - 1), $($ts)*}\n };\n {$n:expr,} => {\n #[stable(since = \"1.4.0\", feature = \"array_default\")]\n impl<T> Default for [T; $n] {\n fn default() -> [T; $n] { [] }\n }\n };\n}\n\narray_impl_default!{32, T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T}\n\nmacro_rules! array_impl_clone {\n {$n:expr, $i:expr, $($idx:expr,)*} => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T: Clone> Clone for [T; $n] {\n fn clone(&self) -> [T; $n] {\n [self[$i-$i].clone(), $(self[$i-$idx].clone()),*]\n }\n }\n array_impl_clone!{$i, $($idx,)*}\n };\n {$n:expr,} => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl<T: Clone> Clone for [T; 0] {\n fn clone(&self) -> [T; 0] {\n []\n }\n }\n };\n}\n\narray_impl_clone! {\n 32, 31, 30,\n 29, 28, 27, 26, 25, 24, 23, 22, 21, 20,\n 19, 18, 17, 16, 15, 14, 13, 12, 11, 10,\n 9, 8, 7, 6, 5, 4, 3, 2, 1, 0,\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Type inference\n\nuse core::{Match, Src, Scope, Session};\nuse nameres::resolve_path_with_str;\nuse core::Namespace::TypeNamespace;\nuse core;\nuse ast;\nuse scopes;\nuse matchers;\nuse core::SearchType::ExactMatch;\nuse util::txt_matches;\n\nfn find_start_of_function_body(src: &str) -> usize {\n \/\/ TODO: this should ignore anything inside parens so as to skip the arg list\n src.find('{').unwrap()\n}\n\n\/\/ Removes the body of the statement (anything in the braces {...}), leaving just\n\/\/ the header\n\/\/ TODO: this should skip parens (e.g. function arguments)\npub fn generate_skeleton_for_parsing(src: &str) -> String {\n let mut s = String::new();\n let n = src.find('{').unwrap();\n s.push_str(&src[..n+1]);\n s.push_str(\"};\");\n s\n}\n\npub fn first_param_is_self(blob: &str) -> bool {\n \/\/ skip generic arg\n \/\/ consider 'pub fn map<U, F: FnOnce(T) -> U>(self, f: F)'\n \/\/ we have to match the '>'\n match blob.find('(') {\n None => false,\n Some(probable_param_start) => {\n let skip_generic = match blob.find('<') {\n None => 0,\n Some(generic_start) if generic_start < probable_param_start => {\n let mut level = 0;\n let mut prev = ' ';\n let mut skip_generic = 0;\n for (i, c) in blob.char_indices() {\n match c {\n '<' => level += 1,\n '>' if prev == '-' => (),\n '>' => level -= 1,\n _ => (),\n }\n prev = c;\n if level == 0 {\n skip_generic = i;\n }\n }\n skip_generic\n },\n Some(..) => 0,\n };\n while let Some(start) = blob[skip_generic..].find('(') {\n let end = scopes::find_closing_paren(blob, start + 1);\n let is_self = txt_matches(ExactMatch, \"self\", &blob[(start + 1)..end]);\n debug!(\"searching fn args: |{}| {}\",\n &blob[(start + 1)..end],\n is_self);\n return is_self;\n }\n false\n }\n }\n}\n\n#[test]\nfn generates_skeleton_for_mod() {\n let src = \"mod foo { blah };\";\n let out = generate_skeleton_for_parsing(src);\n assert_eq!(\"mod foo {};\", out);\n}\n\nfn get_type_of_self_arg(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n debug!(\"get_type_of_self_arg {:?}\", m);\n scopes::find_impl_start(msrc, m.point, 0).and_then(|start| {\n let decl = generate_skeleton_for_parsing(&msrc.from(start));\n debug!(\"get_type_of_self_arg impl skeleton |{}|\", decl);\n\n if decl.starts_with(\"impl\") {\n let implres = ast::parse_impl(decl);\n debug!(\"get_type_of_self_arg implres |{:?}|\", implres);\n resolve_path_with_str(&implres.name_path.expect(\"failed parsing impl name\"),\n &m.filepath, start,\n ExactMatch, TypeNamespace,\n session).nth(0).map(core::Ty::Match)\n } else {\n \/\/ \/\/ must be a trait\n ast::parse_trait(decl).name.and_then(|name| {\n Some(core::Ty::Match(Match {\n matchstr: name,\n filepath: m.filepath.clone(),\n point: start,\n local: m.local,\n mtype: core::MatchType::Trait,\n contextstr: matchers::first_line(&msrc[start..]),\n generic_args: Vec::new(),\n generic_types: Vec::new(),\n docs: String::new(),\n }))\n })\n }\n })\n}\n\nfn get_type_of_fnarg(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n if m.matchstr == \"self\" {\n return get_type_of_self_arg(m, msrc, session);\n }\n\n let stmtstart = scopes::find_stmt_start(msrc, m.point).unwrap();\n let block = msrc.from(stmtstart);\n if let Some((start, end)) = block.iter_stmts().next() {\n let blob = &msrc[(stmtstart+start)..(stmtstart+end)];\n \/\/ wrap in \"impl blah { }\" so that methods get parsed correctly too\n let mut s = String::new();\n s.push_str(\"impl blah {\");\n let impl_header_len = s.len();\n s.push_str(&blob[..(find_start_of_function_body(blob)+1)]);\n s.push_str(\"}}\");\n let argpos = m.point - (stmtstart+start) + impl_header_len;\n return ast::parse_fn_arg_type(s, argpos, Scope::from_match(m), session);\n }\n None\n}\n\nfn get_type_of_let_expr(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n \/\/ ASSUMPTION: this is being called on a let decl\n let point = scopes::find_stmt_start(msrc, m.point).unwrap();\n let src = msrc.from(point);\n\n if let Some((start, end)) = src.iter_stmts().next() {\n let blob = &src[start..end];\n debug!(\"get_type_of_let_expr calling parse_let |{}|\", blob);\n\n let pos = m.point - point - start;\n let scope = Scope{ filepath: m.filepath.clone(), point: m.point };\n ast::get_let_type(blob.to_owned(), pos, scope, session)\n } else {\n None\n }\n}\n\nfn get_type_of_let_block_expr(m: &Match, msrc: Src, session: &Session, prefix: &str) -> Option<core::Ty> {\n \/\/ ASSUMPTION: this is being called on an if let or while let decl\n let stmtstart = scopes::find_stmt_start(msrc, m.point).unwrap();\n let stmt = msrc.from(stmtstart);\n let point = stmt.find(prefix).unwrap();\n let src = core::new_source(generate_skeleton_for_parsing(&stmt[point..]));\n\n if let Some((start, end)) = src.as_src().iter_stmts().next() {\n let blob = &src[start..end];\n debug!(\"get_type_of_let_block_expr calling get_let_type |{}|\", blob);\n\n let pos = m.point - stmtstart - point - start;\n let scope = Scope{ filepath: m.filepath.clone(), point: m.point };\n ast::get_let_type(blob.to_owned(), pos, scope, session)\n } else {\n None\n }\n}\n\nfn get_type_of_for_expr(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n let stmtstart = scopes::find_stmt_start(msrc, m.point).unwrap();\n let stmt = msrc.from(stmtstart);\n let forpos = stmt.find(\"for \").unwrap();\n let inpos = stmt.find(\" in \").unwrap();\n \/\/ XXX: this need not be the correct brace, see generate_skeleton_for_parsing\n let bracepos = stmt.find('{').unwrap();\n let mut src = stmt[..forpos].to_owned();\n src.push_str(\"if let Some(\");\n src.push_str(&stmt[forpos+4..inpos]);\n src.push_str(\") = \");\n src.push_str(&stmt[inpos+4..bracepos]);\n src.push_str(\".into_iter().next() { }}\");\n let src = core::new_source(src);\n\n if let Some((start, end)) = src.as_src().iter_stmts().next() {\n let blob = &src[start..end];\n debug!(\"get_type_of_for_expr: |{}| {} {} {} {}\", blob, m.point, stmtstart, forpos, start);\n\n let pos = m.point - stmtstart - forpos - start;\n let scope = Scope{ filepath: m.filepath.clone(), point: m.point };\n\n ast::get_let_type(blob.to_owned(), pos, scope, session)\n } else {\n None\n }\n}\n\npub fn get_struct_field_type(fieldname: &str, structmatch: &Match, session: &Session) -> Option<core::Ty> {\n assert!(structmatch.mtype == core::MatchType::Struct);\n\n let src = session.load_file(&structmatch.filepath);\n\n let opoint = scopes::find_stmt_start(src.as_src(), structmatch.point);\n let structsrc = scopes::end_of_next_scope(&src[opoint.unwrap()..]);\n\n let fields = ast::parse_struct_fields(structsrc.to_owned(), Scope::from_match(structmatch));\n for (field, _, ty) in fields.into_iter() {\n if fieldname == field {\n return ty;\n }\n }\n None\n}\n\npub fn get_tuplestruct_field_type(fieldnum: usize, structmatch: &Match, session: &Session) -> Option<core::Ty> {\n let src = session.load_file(&structmatch.filepath);\n\n let structsrc = if let core::MatchType::EnumVariant = structmatch.mtype {\n \/\/ decorate the enum variant src to make it look like a tuple struct\n let to = (&src[structmatch.point..]).find('(')\n .map(|n| scopes::find_closing_paren(&src, structmatch.point + n+1))\n .unwrap();\n \"struct \".to_owned() + &src[structmatch.point..(to+1)] + \";\"\n } else {\n assert!(structmatch.mtype == core::MatchType::Struct);\n let opoint = scopes::find_stmt_start(src.as_src(), structmatch.point);\n (*get_first_stmt(src.as_src().from(opoint.unwrap()))).to_owned()\n };\n\n debug!(\"get_tuplestruct_field_type structsrc=|{}|\", structsrc);\n\n let fields = ast::parse_struct_fields(structsrc, Scope::from_match(structmatch));\n\n for (i, (_, _, ty)) in fields.into_iter().enumerate() {\n if i == fieldnum {\n return ty;\n }\n }\n None\n}\n\npub fn get_first_stmt(src: Src) -> Src {\n match src.iter_stmts().next() {\n Some((from, to)) => src.from_to(from, to),\n None => src\n }\n}\n\npub fn get_type_of_match(m: Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n debug!(\"get_type_of match {:?} \", m);\n\n match m.mtype {\n core::MatchType::Let => get_type_of_let_expr(&m, msrc, session),\n core::MatchType::IfLet => get_type_of_let_block_expr(&m, msrc, session, \"if let\"),\n core::MatchType::WhileLet => get_type_of_let_block_expr(&m, msrc, session, \"while let\"),\n core::MatchType::For => get_type_of_for_expr(&m, msrc, session),\n core::MatchType::FnArg => get_type_of_fnarg(&m, msrc, session),\n core::MatchType::MatchArm => get_type_from_match_arm(&m, msrc, session),\n core::MatchType::Struct |\n core::MatchType::Enum |\n core::MatchType::Function |\n core::MatchType::Module => Some(core::Ty::Match(m)),\n _ => { debug!(\"!!! WARNING !!! Can't get type of {:?}\", m.mtype); None }\n }\n}\n\nmacro_rules! otry {\n ($e:expr) => (match $e { Some(e) => e, None => return None })\n}\n\npub fn get_type_from_match_arm(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n \/\/ We construct a faux match stmt and then parse it. This is because the\n \/\/ match stmt may be incomplete (half written) in the real code\n\n \/\/ skip to end of match arm pattern so we can search backwards\n let arm = otry!((&msrc[m.point..]).find(\"=>\")) + m.point;\n let scopestart = scopes::scope_start(msrc, arm);\n\n let stmtstart = otry!(scopes::find_stmt_start(msrc, scopestart-1));\n debug!(\"PHIL preblock is {} {}\", stmtstart, scopestart);\n let preblock = &msrc[stmtstart..scopestart];\n let matchstart = otry!(preblock.rfind(\"match \")) + stmtstart;\n\n let lhs_start = scopes::get_start_of_pattern(&msrc, arm);\n let lhs = &msrc[lhs_start..arm];\n \/\/ construct faux match statement and recreate point\n let mut fauxmatchstmt = (&msrc[matchstart..scopestart]).to_owned();\n let faux_prefix_size = fauxmatchstmt.len();\n fauxmatchstmt = fauxmatchstmt + lhs + \" => () };\";\n let faux_point = faux_prefix_size + (m.point - lhs_start);\n\n debug!(\"fauxmatchstmt for parsing is pt:{} src:|{}|\", faux_point, fauxmatchstmt);\n\n ast::get_match_arm_type(fauxmatchstmt, faux_point,\n \/\/ scope is used to locate expression, so send\n \/\/ it the start of the match expr\n Scope {\n filepath: m.filepath.clone(),\n point: matchstart,\n }, session)\n}\n\npub fn get_function_declaration(fnmatch: &Match, session: &Session) -> String {\n let src = session.load_file(&fnmatch.filepath);\n let start = scopes::find_stmt_start(src.as_src(), fnmatch.point).unwrap();\n let end = (&src[start..]).find('{').unwrap();\n (&src[start..end+start]).to_owned()\n}\n\npub fn get_return_type_of_function(fnmatch: &Match, session: &Session) -> Option<core::Ty> {\n let src = session.load_file(&fnmatch.filepath);\n let point = scopes::find_stmt_start(src.as_src(), fnmatch.point).unwrap();\n (&src[point..]).find(\"{\").and_then(|n| {\n \/\/ wrap in \"impl blah { }\" so that methods get parsed correctly too\n let mut decl = String::new();\n decl.push_str(\"impl blah {\");\n decl.push_str(&src[point..(point+n+1)]);\n decl.push_str(\"}}\");\n debug!(\"get_return_type_of_function: passing in |{}|\", decl);\n ast::parse_fn_output(decl, Scope::from_match(fnmatch))\n })\n}\n<commit_msg>[master] Early return on first generic close.<commit_after>\/\/ Type inference\n\nuse core::{Match, Src, Scope, Session};\nuse nameres::resolve_path_with_str;\nuse core::Namespace::TypeNamespace;\nuse core;\nuse ast;\nuse scopes;\nuse matchers;\nuse core::SearchType::ExactMatch;\nuse util::txt_matches;\n\nfn find_start_of_function_body(src: &str) -> usize {\n \/\/ TODO: this should ignore anything inside parens so as to skip the arg list\n src.find('{').unwrap()\n}\n\n\/\/ Removes the body of the statement (anything in the braces {...}), leaving just\n\/\/ the header\n\/\/ TODO: this should skip parens (e.g. function arguments)\npub fn generate_skeleton_for_parsing(src: &str) -> String {\n let mut s = String::new();\n let n = src.find('{').unwrap();\n s.push_str(&src[..n+1]);\n s.push_str(\"};\");\n s\n}\n\npub fn first_param_is_self(blob: &str) -> bool {\n \/\/ skip generic arg\n \/\/ consider 'pub fn map<U, F: FnOnce(T) -> U>(self, f: F)'\n \/\/ we have to match the '>'\n match blob.find('(') {\n None => false,\n Some(probable_param_start) => {\n let skip_generic = match blob.find('<') {\n None => 0,\n Some(generic_start) if generic_start < probable_param_start => {\n let mut level = 0;\n let mut prev = ' ';\n let mut skip_generic = 0;\n for (i, c) in blob[generic_start..].char_indices() {\n match c {\n '<' => level += 1,\n '>' if prev == '-' => (),\n '>' => level -= 1,\n _ => (),\n }\n prev = c;\n if level == 0 {\n skip_generic = i;\n break;\n }\n }\n skip_generic\n },\n Some(..) => 0,\n };\n while let Some(start) = blob[skip_generic..].find('(') {\n let end = scopes::find_closing_paren(blob, start + 1);\n let is_self = txt_matches(ExactMatch, \"self\", &blob[(start + 1)..end]);\n debug!(\"searching fn args: |{}| {}\",\n &blob[(start + 1)..end],\n is_self);\n return is_self;\n }\n false\n }\n }\n}\n\n#[test]\nfn generates_skeleton_for_mod() {\n let src = \"mod foo { blah };\";\n let out = generate_skeleton_for_parsing(src);\n assert_eq!(\"mod foo {};\", out);\n}\n\nfn get_type_of_self_arg(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n debug!(\"get_type_of_self_arg {:?}\", m);\n scopes::find_impl_start(msrc, m.point, 0).and_then(|start| {\n let decl = generate_skeleton_for_parsing(&msrc.from(start));\n debug!(\"get_type_of_self_arg impl skeleton |{}|\", decl);\n\n if decl.starts_with(\"impl\") {\n let implres = ast::parse_impl(decl);\n debug!(\"get_type_of_self_arg implres |{:?}|\", implres);\n resolve_path_with_str(&implres.name_path.expect(\"failed parsing impl name\"),\n &m.filepath, start,\n ExactMatch, TypeNamespace,\n session).nth(0).map(core::Ty::Match)\n } else {\n \/\/ \/\/ must be a trait\n ast::parse_trait(decl).name.and_then(|name| {\n Some(core::Ty::Match(Match {\n matchstr: name,\n filepath: m.filepath.clone(),\n point: start,\n local: m.local,\n mtype: core::MatchType::Trait,\n contextstr: matchers::first_line(&msrc[start..]),\n generic_args: Vec::new(),\n generic_types: Vec::new(),\n docs: String::new(),\n }))\n })\n }\n })\n}\n\nfn get_type_of_fnarg(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n if m.matchstr == \"self\" {\n return get_type_of_self_arg(m, msrc, session);\n }\n\n let stmtstart = scopes::find_stmt_start(msrc, m.point).unwrap();\n let block = msrc.from(stmtstart);\n if let Some((start, end)) = block.iter_stmts().next() {\n let blob = &msrc[(stmtstart+start)..(stmtstart+end)];\n \/\/ wrap in \"impl blah { }\" so that methods get parsed correctly too\n let mut s = String::new();\n s.push_str(\"impl blah {\");\n let impl_header_len = s.len();\n s.push_str(&blob[..(find_start_of_function_body(blob)+1)]);\n s.push_str(\"}}\");\n let argpos = m.point - (stmtstart+start) + impl_header_len;\n return ast::parse_fn_arg_type(s, argpos, Scope::from_match(m), session);\n }\n None\n}\n\nfn get_type_of_let_expr(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n \/\/ ASSUMPTION: this is being called on a let decl\n let point = scopes::find_stmt_start(msrc, m.point).unwrap();\n let src = msrc.from(point);\n\n if let Some((start, end)) = src.iter_stmts().next() {\n let blob = &src[start..end];\n debug!(\"get_type_of_let_expr calling parse_let |{}|\", blob);\n\n let pos = m.point - point - start;\n let scope = Scope{ filepath: m.filepath.clone(), point: m.point };\n ast::get_let_type(blob.to_owned(), pos, scope, session)\n } else {\n None\n }\n}\n\nfn get_type_of_let_block_expr(m: &Match, msrc: Src, session: &Session, prefix: &str) -> Option<core::Ty> {\n \/\/ ASSUMPTION: this is being called on an if let or while let decl\n let stmtstart = scopes::find_stmt_start(msrc, m.point).unwrap();\n let stmt = msrc.from(stmtstart);\n let point = stmt.find(prefix).unwrap();\n let src = core::new_source(generate_skeleton_for_parsing(&stmt[point..]));\n\n if let Some((start, end)) = src.as_src().iter_stmts().next() {\n let blob = &src[start..end];\n debug!(\"get_type_of_let_block_expr calling get_let_type |{}|\", blob);\n\n let pos = m.point - stmtstart - point - start;\n let scope = Scope{ filepath: m.filepath.clone(), point: m.point };\n ast::get_let_type(blob.to_owned(), pos, scope, session)\n } else {\n None\n }\n}\n\nfn get_type_of_for_expr(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n let stmtstart = scopes::find_stmt_start(msrc, m.point).unwrap();\n let stmt = msrc.from(stmtstart);\n let forpos = stmt.find(\"for \").unwrap();\n let inpos = stmt.find(\" in \").unwrap();\n \/\/ XXX: this need not be the correct brace, see generate_skeleton_for_parsing\n let bracepos = stmt.find('{').unwrap();\n let mut src = stmt[..forpos].to_owned();\n src.push_str(\"if let Some(\");\n src.push_str(&stmt[forpos+4..inpos]);\n src.push_str(\") = \");\n src.push_str(&stmt[inpos+4..bracepos]);\n src.push_str(\".into_iter().next() { }}\");\n let src = core::new_source(src);\n\n if let Some((start, end)) = src.as_src().iter_stmts().next() {\n let blob = &src[start..end];\n debug!(\"get_type_of_for_expr: |{}| {} {} {} {}\", blob, m.point, stmtstart, forpos, start);\n\n let pos = m.point - stmtstart - forpos - start;\n let scope = Scope{ filepath: m.filepath.clone(), point: m.point };\n\n ast::get_let_type(blob.to_owned(), pos, scope, session)\n } else {\n None\n }\n}\n\npub fn get_struct_field_type(fieldname: &str, structmatch: &Match, session: &Session) -> Option<core::Ty> {\n assert!(structmatch.mtype == core::MatchType::Struct);\n\n let src = session.load_file(&structmatch.filepath);\n\n let opoint = scopes::find_stmt_start(src.as_src(), structmatch.point);\n let structsrc = scopes::end_of_next_scope(&src[opoint.unwrap()..]);\n\n let fields = ast::parse_struct_fields(structsrc.to_owned(), Scope::from_match(structmatch));\n for (field, _, ty) in fields.into_iter() {\n if fieldname == field {\n return ty;\n }\n }\n None\n}\n\npub fn get_tuplestruct_field_type(fieldnum: usize, structmatch: &Match, session: &Session) -> Option<core::Ty> {\n let src = session.load_file(&structmatch.filepath);\n\n let structsrc = if let core::MatchType::EnumVariant = structmatch.mtype {\n \/\/ decorate the enum variant src to make it look like a tuple struct\n let to = (&src[structmatch.point..]).find('(')\n .map(|n| scopes::find_closing_paren(&src, structmatch.point + n+1))\n .unwrap();\n \"struct \".to_owned() + &src[structmatch.point..(to+1)] + \";\"\n } else {\n assert!(structmatch.mtype == core::MatchType::Struct);\n let opoint = scopes::find_stmt_start(src.as_src(), structmatch.point);\n (*get_first_stmt(src.as_src().from(opoint.unwrap()))).to_owned()\n };\n\n debug!(\"get_tuplestruct_field_type structsrc=|{}|\", structsrc);\n\n let fields = ast::parse_struct_fields(structsrc, Scope::from_match(structmatch));\n\n for (i, (_, _, ty)) in fields.into_iter().enumerate() {\n if i == fieldnum {\n return ty;\n }\n }\n None\n}\n\npub fn get_first_stmt(src: Src) -> Src {\n match src.iter_stmts().next() {\n Some((from, to)) => src.from_to(from, to),\n None => src\n }\n}\n\npub fn get_type_of_match(m: Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n debug!(\"get_type_of match {:?} \", m);\n\n match m.mtype {\n core::MatchType::Let => get_type_of_let_expr(&m, msrc, session),\n core::MatchType::IfLet => get_type_of_let_block_expr(&m, msrc, session, \"if let\"),\n core::MatchType::WhileLet => get_type_of_let_block_expr(&m, msrc, session, \"while let\"),\n core::MatchType::For => get_type_of_for_expr(&m, msrc, session),\n core::MatchType::FnArg => get_type_of_fnarg(&m, msrc, session),\n core::MatchType::MatchArm => get_type_from_match_arm(&m, msrc, session),\n core::MatchType::Struct |\n core::MatchType::Enum |\n core::MatchType::Function |\n core::MatchType::Module => Some(core::Ty::Match(m)),\n _ => { debug!(\"!!! WARNING !!! Can't get type of {:?}\", m.mtype); None }\n }\n}\n\nmacro_rules! otry {\n ($e:expr) => (match $e { Some(e) => e, None => return None })\n}\n\npub fn get_type_from_match_arm(m: &Match, msrc: Src, session: &Session) -> Option<core::Ty> {\n \/\/ We construct a faux match stmt and then parse it. This is because the\n \/\/ match stmt may be incomplete (half written) in the real code\n\n \/\/ skip to end of match arm pattern so we can search backwards\n let arm = otry!((&msrc[m.point..]).find(\"=>\")) + m.point;\n let scopestart = scopes::scope_start(msrc, arm);\n\n let stmtstart = otry!(scopes::find_stmt_start(msrc, scopestart-1));\n debug!(\"PHIL preblock is {} {}\", stmtstart, scopestart);\n let preblock = &msrc[stmtstart..scopestart];\n let matchstart = otry!(preblock.rfind(\"match \")) + stmtstart;\n\n let lhs_start = scopes::get_start_of_pattern(&msrc, arm);\n let lhs = &msrc[lhs_start..arm];\n \/\/ construct faux match statement and recreate point\n let mut fauxmatchstmt = (&msrc[matchstart..scopestart]).to_owned();\n let faux_prefix_size = fauxmatchstmt.len();\n fauxmatchstmt = fauxmatchstmt + lhs + \" => () };\";\n let faux_point = faux_prefix_size + (m.point - lhs_start);\n\n debug!(\"fauxmatchstmt for parsing is pt:{} src:|{}|\", faux_point, fauxmatchstmt);\n\n ast::get_match_arm_type(fauxmatchstmt, faux_point,\n \/\/ scope is used to locate expression, so send\n \/\/ it the start of the match expr\n Scope {\n filepath: m.filepath.clone(),\n point: matchstart,\n }, session)\n}\n\npub fn get_function_declaration(fnmatch: &Match, session: &Session) -> String {\n let src = session.load_file(&fnmatch.filepath);\n let start = scopes::find_stmt_start(src.as_src(), fnmatch.point).unwrap();\n let end = (&src[start..]).find('{').unwrap();\n (&src[start..end+start]).to_owned()\n}\n\npub fn get_return_type_of_function(fnmatch: &Match, session: &Session) -> Option<core::Ty> {\n let src = session.load_file(&fnmatch.filepath);\n let point = scopes::find_stmt_start(src.as_src(), fnmatch.point).unwrap();\n (&src[point..]).find(\"{\").and_then(|n| {\n \/\/ wrap in \"impl blah { }\" so that methods get parsed correctly too\n let mut decl = String::new();\n decl.push_str(\"impl blah {\");\n decl.push_str(&src[point..(point+n+1)]);\n decl.push_str(\"}}\");\n debug!(\"get_return_type_of_function: passing in |{}|\", decl);\n ast::parse_fn_output(decl, Scope::from_match(fnmatch))\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add decode tests for coprocessor transfer operations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix AST module<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>json related changes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>`IXAudio2SubmixVoice`, `IXAudio2MasteringVoice`, `IXAudio2EngineCallback`, `IXAudio2VoiceCallback`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix warnings<commit_after><|endoftext|>"} {"text":"<commit_before>use js;\nuse js::rust::Compartment;\nuse js::{JS_ARGV, JSCLASS_HAS_RESERVED_SLOTS, JSPROP_ENUMERATE, JSPROP_SHARED, JSVAL_NULL,\n JS_THIS_OBJECT, JS_SET_RVAL};\nuse js::jsapi::{JSContext, JSVal, JSObject, JSBool, jsid, JSClass, JSFreeOp, JSNative,\n JSFunctionSpec, JSPropertySpec, JSVal};\nuse js::jsapi::bindgen::{JS_ValueToString, JS_GetStringCharsZAndLength, JS_ReportError,\n JS_GetReservedSlot, JS_SetReservedSlot, JS_NewStringCopyN,\n JS_DefineFunctions, JS_DefineProperty, JS_GetContextPrivate,\n JS_GetClass, JS_GetPrototype};\nuse js::glue::{PROPERTY_STUB, STRICT_PROPERTY_STUB, ENUMERATE_STUB, CONVERT_STUB,\n RESOLVE_STUB};\nuse js::glue::bindgen::*;\nuse core::ptr::null;\nuse core::cast;\nuse content::content_task::{Content, task_from_context};\n\npub enum DOMString {\n str(~str),\n null_string\n}\n\npub struct rust_box<T> {\n rc: uint,\n td: *sys::TypeDesc,\n next: *(),\n prev: *(),\n payload: T\n}\n\npub unsafe fn unwrap<T>(obj: *JSObject) -> T {\n let val = JS_GetReservedSlot(obj, 0);\n cast::reinterpret_cast(&RUST_JSVAL_TO_PRIVATE(val))\n}\n\npub unsafe fn squirrel_away<T>(x: @T) -> *rust_box<T> {\n let y: *rust_box<T> = cast::reinterpret_cast(&x);\n cast::forget(x);\n y\n}\n\npub unsafe fn squirrel_away_unique<T>(x: ~T) -> *rust_box<T> {\n let y: *rust_box<T> = cast::reinterpret_cast(&x);\n cast::forget(x);\n y\n}\n\n\/\/XXX very incomplete\npub fn jsval_to_str(cx: *JSContext, v: JSVal) -> Result<~str, ()> {\n let jsstr;\n if RUST_JSVAL_IS_STRING(v) == 1 {\n jsstr = RUST_JSVAL_TO_STRING(v)\n } else {\n jsstr = JS_ValueToString(cx, v);\n if jsstr.is_null() {\n return Err(());\n }\n }\n\n let len = 0;\n let chars = JS_GetStringCharsZAndLength(cx, jsstr, ptr::to_unsafe_ptr(&len));\n return if chars.is_null() {\n Err(())\n } else {\n unsafe {\n let buf = vec::raw::from_buf_raw(chars as *u8, len as uint);\n Ok(str::from_bytes(buf))\n }\n }\n}\n\npub unsafe fn domstring_to_jsval(cx: *JSContext, string: &DOMString) -> JSVal {\n match string {\n &null_string => {\n JSVAL_NULL\n }\n &str(ref s) => {\n str::as_buf(*s, |buf, len| {\n let cbuf = cast::reinterpret_cast(&buf);\n RUST_STRING_TO_JSVAL(JS_NewStringCopyN(cx, cbuf, len as libc::size_t))\n })\n }\n }\n}\n\npub fn get_compartment(cx: *JSContext) -> @mut Compartment {\n unsafe {\n let content = task_from_context(cx);\n let compartment = option::expect((*content).compartment,\n ~\"Should always have compartment when \\\n executing JS code\");\n fail_unless!(cx == compartment.cx.ptr);\n compartment\n }\n}\n\nextern fn has_instance(_cx: *JSContext, obj: **JSObject, v: *JSVal, bp: *mut JSBool) -> JSBool {\n \/\/XXXjdm this is totally broken for non-object values\n let mut o = RUST_JSVAL_TO_OBJECT(unsafe {*v});\n let obj = unsafe {*obj};\n unsafe { *bp = 0; }\n while o.is_not_null() {\n if o == obj {\n unsafe { *bp = 1; }\n break;\n }\n o = JS_GetPrototype(o);\n }\n return 1;\n}\n\npub fn prototype_jsclass(name: ~str) -> @fn(compartment: @mut Compartment) -> JSClass {\n let f: @fn(@mut Compartment) -> JSClass = |compartment: @mut Compartment| {\n JSClass {\n name: compartment.add_name(copy name),\n flags: 0,\n addProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n delProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n getProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n setProperty: GetJSClassHookStubPointer(STRICT_PROPERTY_STUB) as *u8,\n enumerate: GetJSClassHookStubPointer(ENUMERATE_STUB) as *u8,\n resolve: GetJSClassHookStubPointer(RESOLVE_STUB) as *u8,\n convert: GetJSClassHookStubPointer(CONVERT_STUB) as *u8,\n finalize: null(),\n checkAccess: null(),\n call: null(),\n hasInstance: has_instance,\n construct: null(),\n trace: null(),\n reserved: (null(), null(), null(), null(), null(), \/\/ 05\n null(), null(), null(), null(), null(), \/\/ 10\n null(), null(), null(), null(), null(), \/\/ 15\n null(), null(), null(), null(), null(), \/\/ 20\n null(), null(), null(), null(), null(), \/\/ 25\n null(), null(), null(), null(), null(), \/\/ 30\n null(), null(), null(), null(), null(), \/\/ 35\n null(), null(), null(), null(), null()) \/\/ 40\n }\n };\n return f;\n}\n\npub fn instance_jsclass(name: ~str, finalize: *u8)\n -> @fn(compartment: @mut Compartment) -> JSClass {\n let f: @fn(@mut Compartment) -> JSClass = |compartment: @mut Compartment| {\n JSClass {\n name: compartment.add_name(copy name),\n flags: JSCLASS_HAS_RESERVED_SLOTS(1),\n addProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n delProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n getProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n setProperty: GetJSClassHookStubPointer(STRICT_PROPERTY_STUB) as *u8,\n enumerate: GetJSClassHookStubPointer(ENUMERATE_STUB) as *u8,\n resolve: GetJSClassHookStubPointer(RESOLVE_STUB) as *u8,\n convert: GetJSClassHookStubPointer(CONVERT_STUB) as *u8,\n finalize: finalize,\n checkAccess: null(),\n call: null(),\n hasInstance: has_instance,\n construct: null(),\n trace: null(),\n reserved: (null(), null(), null(), null(), null(), \/\/ 05\n null(), null(), null(), null(), null(), \/\/ 10\n null(), null(), null(), null(), null(), \/\/ 15\n null(), null(), null(), null(), null(), \/\/ 20\n null(), null(), null(), null(), null(), \/\/ 25\n null(), null(), null(), null(), null(), \/\/ 30\n null(), null(), null(), null(), null(), \/\/ 35\n null(), null(), null(), null(), null()) \/\/ 40\n }\n };\n return f;\n}\n\n\/\/ FIXME: A lot of string copies here\npub fn define_empty_prototype(name: ~str, proto: Option<~str>, compartment: @mut Compartment)\n -> js::rust::jsobj {\n compartment.register_class(prototype_jsclass(copy name));\n\n \/\/TODO error checking\n let obj = result::unwrap(\n match proto {\n Some(s) => compartment.new_object_with_proto(copy name,\n s, \n compartment.global_obj.ptr),\n None => compartment.new_object(copy name, null(), compartment.global_obj.ptr)\n });\n\n compartment.define_property(copy name, RUST_OBJECT_TO_JSVAL(obj.ptr),\n GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n GetJSClassHookStubPointer(STRICT_PROPERTY_STUB) as *u8,\n JSPROP_ENUMERATE);\n compartment.stash_global_proto(name, obj);\n return obj;\n}\n\n\/\/ We use slot 0 for holding the raw object. This is safe for both\n\/\/ globals and non-globals.\nconst DOM_OBJECT_SLOT: uint = 0;\n\n\/\/ NOTE: This is baked into the Ion JIT as 0 in codegen for LGetDOMProperty and\n\/\/ LSetDOMProperty. Those constants need to be changed accordingly if this value\n\/\/ changes.\nconst DOM_PROTO_INSTANCE_CLASS_SLOT: u32 = 0;\n\n\/\/ All DOM globals must have a slot at DOM_PROTOTYPE_SLOT. We have to\n\/\/ start at 1 past JSCLASS_GLOBAL_SLOT_COUNT because XPConnect uses\n\/\/ that one.\nconst DOM_PROTOTYPE_SLOT: u32 = js::JSCLASS_GLOBAL_SLOT_COUNT + 1;\n\n\/\/ NOTE: This is baked into the Ion JIT as 0 in codegen for LGetDOMProperty and\n\/\/ LSetDOMProperty. Those constants need to be changed accordingly if this value\n\/\/ changes.\nconst JSCLASS_DOM_GLOBAL: u32 = js::JSCLASS_USERBIT1;\n\npub struct NativeProperties {\n staticMethods: *JSFunctionSpec,\n staticMethodIds: *jsid,\n staticMethodsSpecs: *JSFunctionSpec,\n staticAttributes: *JSPropertySpec,\n staticAttributeIds: *jsid,\n staticAttributeSpecs: *JSPropertySpec,\n methods: *JSFunctionSpec,\n methodIds: *jsid,\n methodsSpecs: *JSFunctionSpec,\n attributes: *JSPropertySpec,\n attributeIds: *jsid,\n attributeSpecs: *JSPropertySpec,\n unforgeableAttributes: *JSPropertySpec,\n unforgeableAttributeIds: *jsid,\n unforgeableAttributeSpecs: *JSPropertySpec,\n constants: *ConstantSpec,\n constantIds: *jsid,\n constantSpecs: *ConstantSpec\n}\n\npub struct NativePropertyHooks {\n resolve_own_property: *u8,\n resolve_property: *u8,\n enumerate_own_properties: *u8,\n enumerate_properties: *u8,\n proto_hooks: *NativePropertyHooks\n}\n\npub struct JSNativeHolder {\n native: js::jsapi::JSNative,\n propertyHooks: *NativePropertyHooks\n}\n\npub struct ConstantSpec {\n name: &str,\n value: JSVal\n}\n\npub struct DOMClass {\n \/\/ A list of interfaces that this object implements, in order of decreasing\n \/\/ derivedness.\n interface_chain: [prototypes::id::Prototype * 1 \/*prototypes::id::_ID_Count*\/],\n\n unused: bool, \/\/ DOMObjectIsISupports (always false)\n native_hooks: *NativePropertyHooks\n}\n\npub struct DOMJSClass {\n base: JSClass,\n dom_class: DOMClass\n}\n\nfn GetProtoOrIfaceArray(global: *JSObject) -> **JSObject {\n unsafe {\n assert ((*JS_GetClass(global)).flags & JSCLASS_DOM_GLOBAL) != 0;\n cast::reinterpret_cast(&JS_GetReservedSlot(global, DOM_PROTOTYPE_SLOT))\n }\n}\n\nmod prototypes {\n mod id {\n pub enum Prototype {\n ClientRect,\n _ID_Count\n }\n }\n}\n\npub fn CreateInterfaceObjects2(cx: *JSContext, global: *JSObject, receiver: *JSObject,\n protoProto: *JSObject, protoClass: *JSClass,\n constructorClass: *JSClass, constructor: JSNative,\n ctorNargs: uint,\n domClass: *DOMClass,\n methods: *JSFunctionSpec,\n properties: *JSPropertySpec,\n constants: *ConstantSpec,\n staticMethods: *JSFunctionSpec,\n name: &str) -> *JSObject {\n unsafe {\n let mut proto = ptr::null();\n if protoClass.is_not_null() {\n proto = \/*CreateInterfacePrototypeObject(cx, global, protoProto,\n protoClass,\n regularProperties,\n chromeOnlyProperties);*\/ptr::null();\n if proto.is_null() {\n return ptr::null();\n }\n \n JS_SetReservedSlot(proto, DOM_PROTO_INSTANCE_CLASS_SLOT,\n RUST_PRIVATE_TO_JSVAL(domClass as *libc::c_void));\n }\n\n let mut interface = ptr::null();\n if constructorClass.is_not_null() || constructor.is_not_null() {\n interface = do str::as_c_str(name) |s| {\n \/*CreateInterfaceObject(cx, global, constructorClass, constructor,\n ctorNargs, proto, properties,\n chromeOnlyProperties, s)*\/ptr::null()\n };\n if interface.is_null() {\n return ptr::null();\n }\n }\n\n if protoClass.is_not_null() {\n proto\n } else {\n interface\n }\n }\n}\n\npub extern fn ThrowingConstructor(cx: *JSContext, argc: uint, vp: *JSVal) -> JSBool {\n \/\/XXX should trigger exception here\n return 0;\n}<commit_msg>Hook up interface and prototype object creation.<commit_after>use js;\nuse js::rust::Compartment;\nuse js::{JS_ARGV, JSCLASS_HAS_RESERVED_SLOTS, JSPROP_ENUMERATE, JSPROP_SHARED, JSVAL_NULL,\n JS_THIS_OBJECT, JS_SET_RVAL, JSFUN_CONSTRUCTOR, JS_CALLEE};\nuse js::jsapi::{JSContext, JSVal, JSObject, JSBool, jsid, JSClass, JSFreeOp, JSNative,\n JSFunctionSpec, JSPropertySpec, JSVal, JSString};\nuse js::jsapi::bindgen::{JS_ValueToString, JS_GetStringCharsZAndLength, JS_ReportError,\n JS_GetReservedSlot, JS_SetReservedSlot, JS_NewStringCopyN,\n JS_DefineFunctions, JS_DefineProperty, JS_GetContextPrivate,\n JS_GetClass, JS_GetPrototype, JS_LinkConstructorAndPrototype,\n JS_AlreadyHasOwnProperty, JS_NewObject, JS_NewFunction,\n JS_GetFunctionPrototype, JS_InternString, JS_GetFunctionObject,\n JS_GetInternedStringCharsAndLength};\nuse js::jsfriendapi::bindgen::{DefineFunctionWithReserved, GetObjectJSClass,\n JS_NewObjectWithUniqueType};\nuse js::glue::{PROPERTY_STUB, STRICT_PROPERTY_STUB, ENUMERATE_STUB, CONVERT_STUB,\n RESOLVE_STUB};\nuse js::glue::bindgen::*;\nuse core::ptr::null;\nuse core::cast;\nuse content::content_task::{Content, task_from_context};\n\nconst TOSTRING_CLASS_RESERVED_SLOT: u64 = 0;\nconst TOSTRING_NAME_RESERVED_SLOT: u64 = 1;\n\nextern fn InterfaceObjectToString(cx: *JSContext, argc: uint, vp: *mut JSVal) -> JSBool {\n unsafe {\n let callee = RUST_JSVAL_TO_OBJECT(*JS_CALLEE(cx, cast::transmute(&vp)));\n let obj = JS_THIS_OBJECT(cx, cast::transmute(&vp));\n if obj.is_null() {\n \/\/XXXjdm figure out JSMSG madness\n \/*JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_CANT_CONVERT_TO,\n \"null\", \"object\");*\/\n return 0;\n }\n\n let v = GetFunctionNativeReserved(callee, TOSTRING_CLASS_RESERVED_SLOT);\n let clasp: *JSClass = cast::reinterpret_cast(&RUST_JSVAL_TO_PRIVATE(*v));\n\n let v = GetFunctionNativeReserved(callee, TOSTRING_NAME_RESERVED_SLOT);\n let jsname: *JSString = RUST_JSVAL_TO_STRING(*v);\n let length = 0;\n let name = JS_GetInternedStringCharsAndLength(jsname, &length);\n\n if GetObjectJSClass(obj) != clasp {\n \/\/XXXjdm figure out JSMSG madness\n \/*JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_INCOMPATIBLE_PROTO,\n NS_ConvertUTF16toUTF8(name).get(), \"toString\",\n \"object\");*\/\n return 0;\n }\n\n let name = jsval_to_str(cx, *v).get();\n let retval = str(~\"function \" + name + ~\"() {\\n [native code]\\n}\");\n *vp = domstring_to_jsval(cx, &retval);\n return 1;\n }\n}\n\npub enum DOMString {\n str(~str),\n null_string\n}\n\npub struct rust_box<T> {\n rc: uint,\n td: *sys::TypeDesc,\n next: *(),\n prev: *(),\n payload: T\n}\n\npub unsafe fn unwrap<T>(obj: *JSObject) -> T {\n let val = JS_GetReservedSlot(obj, 0);\n cast::reinterpret_cast(&RUST_JSVAL_TO_PRIVATE(val))\n}\n\npub unsafe fn squirrel_away<T>(x: @T) -> *rust_box<T> {\n let y: *rust_box<T> = cast::reinterpret_cast(&x);\n cast::forget(x);\n y\n}\n\npub unsafe fn squirrel_away_unique<T>(x: ~T) -> *rust_box<T> {\n let y: *rust_box<T> = cast::reinterpret_cast(&x);\n cast::forget(x);\n y\n}\n\n\/\/XXX very incomplete\npub fn jsval_to_str(cx: *JSContext, v: JSVal) -> Result<~str, ()> {\n let jsstr;\n if RUST_JSVAL_IS_STRING(v) == 1 {\n jsstr = RUST_JSVAL_TO_STRING(v)\n } else {\n jsstr = JS_ValueToString(cx, v);\n if jsstr.is_null() {\n return Err(());\n }\n }\n\n let len = 0;\n let chars = JS_GetStringCharsZAndLength(cx, jsstr, ptr::to_unsafe_ptr(&len));\n return if chars.is_null() {\n Err(())\n } else {\n unsafe {\n let buf = vec::raw::from_buf_raw(chars as *u8, len as uint);\n Ok(str::from_bytes(buf))\n }\n }\n}\n\npub unsafe fn domstring_to_jsval(cx: *JSContext, string: &DOMString) -> JSVal {\n match string {\n &null_string => {\n JSVAL_NULL\n }\n &str(ref s) => {\n str::as_buf(*s, |buf, len| {\n let cbuf = cast::reinterpret_cast(&buf);\n RUST_STRING_TO_JSVAL(JS_NewStringCopyN(cx, cbuf, len as libc::size_t))\n })\n }\n }\n}\n\npub fn get_compartment(cx: *JSContext) -> @mut Compartment {\n unsafe {\n let content = task_from_context(cx);\n let compartment = option::expect((*content).compartment,\n ~\"Should always have compartment when \\\n executing JS code\");\n fail_unless!(cx == compartment.cx.ptr);\n compartment\n }\n}\n\nextern fn has_instance(_cx: *JSContext, obj: **JSObject, v: *JSVal, bp: *mut JSBool) -> JSBool {\n \/\/XXXjdm this is totally broken for non-object values\n let mut o = RUST_JSVAL_TO_OBJECT(unsafe {*v});\n let obj = unsafe {*obj};\n unsafe { *bp = 0; }\n while o.is_not_null() {\n if o == obj {\n unsafe { *bp = 1; }\n break;\n }\n o = JS_GetPrototype(o);\n }\n return 1;\n}\n\npub fn prototype_jsclass(name: ~str) -> @fn(compartment: @mut Compartment) -> JSClass {\n let f: @fn(@mut Compartment) -> JSClass = |compartment: @mut Compartment| {\n JSClass {\n name: compartment.add_name(copy name),\n flags: 0,\n addProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n delProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n getProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n setProperty: GetJSClassHookStubPointer(STRICT_PROPERTY_STUB) as *u8,\n enumerate: GetJSClassHookStubPointer(ENUMERATE_STUB) as *u8,\n resolve: GetJSClassHookStubPointer(RESOLVE_STUB) as *u8,\n convert: GetJSClassHookStubPointer(CONVERT_STUB) as *u8,\n finalize: null(),\n checkAccess: null(),\n call: null(),\n hasInstance: has_instance,\n construct: null(),\n trace: null(),\n reserved: (null(), null(), null(), null(), null(), \/\/ 05\n null(), null(), null(), null(), null(), \/\/ 10\n null(), null(), null(), null(), null(), \/\/ 15\n null(), null(), null(), null(), null(), \/\/ 20\n null(), null(), null(), null(), null(), \/\/ 25\n null(), null(), null(), null(), null(), \/\/ 30\n null(), null(), null(), null(), null(), \/\/ 35\n null(), null(), null(), null(), null()) \/\/ 40\n }\n };\n return f;\n}\n\npub fn instance_jsclass(name: ~str, finalize: *u8)\n -> @fn(compartment: @mut Compartment) -> JSClass {\n let f: @fn(@mut Compartment) -> JSClass = |compartment: @mut Compartment| {\n JSClass {\n name: compartment.add_name(copy name),\n flags: JSCLASS_HAS_RESERVED_SLOTS(1),\n addProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n delProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n getProperty: GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n setProperty: GetJSClassHookStubPointer(STRICT_PROPERTY_STUB) as *u8,\n enumerate: GetJSClassHookStubPointer(ENUMERATE_STUB) as *u8,\n resolve: GetJSClassHookStubPointer(RESOLVE_STUB) as *u8,\n convert: GetJSClassHookStubPointer(CONVERT_STUB) as *u8,\n finalize: finalize,\n checkAccess: null(),\n call: null(),\n hasInstance: has_instance,\n construct: null(),\n trace: null(),\n reserved: (null(), null(), null(), null(), null(), \/\/ 05\n null(), null(), null(), null(), null(), \/\/ 10\n null(), null(), null(), null(), null(), \/\/ 15\n null(), null(), null(), null(), null(), \/\/ 20\n null(), null(), null(), null(), null(), \/\/ 25\n null(), null(), null(), null(), null(), \/\/ 30\n null(), null(), null(), null(), null(), \/\/ 35\n null(), null(), null(), null(), null()) \/\/ 40\n }\n };\n return f;\n}\n\n\/\/ FIXME: A lot of string copies here\npub fn define_empty_prototype(name: ~str, proto: Option<~str>, compartment: @mut Compartment)\n -> js::rust::jsobj {\n compartment.register_class(prototype_jsclass(copy name));\n\n \/\/TODO error checking\n let obj = result::unwrap(\n match proto {\n Some(s) => compartment.new_object_with_proto(copy name,\n s, \n compartment.global_obj.ptr),\n None => compartment.new_object(copy name, null(), compartment.global_obj.ptr)\n });\n\n compartment.define_property(copy name, RUST_OBJECT_TO_JSVAL(obj.ptr),\n GetJSClassHookStubPointer(PROPERTY_STUB) as *u8,\n GetJSClassHookStubPointer(STRICT_PROPERTY_STUB) as *u8,\n JSPROP_ENUMERATE);\n compartment.stash_global_proto(name, obj);\n return obj;\n}\n\n\/\/ We use slot 0 for holding the raw object. This is safe for both\n\/\/ globals and non-globals.\nconst DOM_OBJECT_SLOT: uint = 0;\n\n\/\/ NOTE: This is baked into the Ion JIT as 0 in codegen for LGetDOMProperty and\n\/\/ LSetDOMProperty. Those constants need to be changed accordingly if this value\n\/\/ changes.\nconst DOM_PROTO_INSTANCE_CLASS_SLOT: u32 = 0;\n\n\/\/ All DOM globals must have a slot at DOM_PROTOTYPE_SLOT. We have to\n\/\/ start at 1 past JSCLASS_GLOBAL_SLOT_COUNT because XPConnect uses\n\/\/ that one.\nconst DOM_PROTOTYPE_SLOT: u32 = js::JSCLASS_GLOBAL_SLOT_COUNT + 1;\n\n\/\/ NOTE: This is baked into the Ion JIT as 0 in codegen for LGetDOMProperty and\n\/\/ LSetDOMProperty. Those constants need to be changed accordingly if this value\n\/\/ changes.\nconst JSCLASS_DOM_GLOBAL: u32 = js::JSCLASS_USERBIT1;\n\npub struct NativeProperties {\n staticMethods: *JSFunctionSpec,\n staticMethodIds: *jsid,\n staticMethodsSpecs: *JSFunctionSpec,\n staticAttributes: *JSPropertySpec,\n staticAttributeIds: *jsid,\n staticAttributeSpecs: *JSPropertySpec,\n methods: *JSFunctionSpec,\n methodIds: *jsid,\n methodsSpecs: *JSFunctionSpec,\n attributes: *JSPropertySpec,\n attributeIds: *jsid,\n attributeSpecs: *JSPropertySpec,\n unforgeableAttributes: *JSPropertySpec,\n unforgeableAttributeIds: *jsid,\n unforgeableAttributeSpecs: *JSPropertySpec,\n constants: *ConstantSpec,\n constantIds: *jsid,\n constantSpecs: *ConstantSpec\n}\n\npub struct NativePropertyHooks {\n resolve_own_property: *u8,\n resolve_property: *u8,\n enumerate_own_properties: *u8,\n enumerate_properties: *u8,\n proto_hooks: *NativePropertyHooks\n}\n\npub struct JSNativeHolder {\n native: js::jsapi::JSNative,\n propertyHooks: *NativePropertyHooks\n}\n\npub struct ConstantSpec {\n name: &str,\n value: JSVal\n}\n\npub struct DOMClass {\n \/\/ A list of interfaces that this object implements, in order of decreasing\n \/\/ derivedness.\n interface_chain: [prototypes::id::Prototype * 1 \/*prototypes::id::_ID_Count*\/],\n\n unused: bool, \/\/ DOMObjectIsISupports (always false)\n native_hooks: *NativePropertyHooks\n}\n\npub struct DOMJSClass {\n base: JSClass,\n dom_class: DOMClass\n}\n\nfn GetProtoOrIfaceArray(global: *JSObject) -> **JSObject {\n unsafe {\n assert ((*JS_GetClass(global)).flags & JSCLASS_DOM_GLOBAL) != 0;\n cast::reinterpret_cast(&JS_GetReservedSlot(global, DOM_PROTOTYPE_SLOT))\n }\n}\n\nmod prototypes {\n mod id {\n pub enum Prototype {\n ClientRect,\n _ID_Count\n }\n }\n}\n\npub fn CreateInterfaceObjects2(cx: *JSContext, global: *JSObject, receiver: *JSObject,\n protoProto: *JSObject, protoClass: *JSClass,\n constructorClass: *JSClass, constructor: JSNative,\n ctorNargs: u32,\n domClass: *DOMClass,\n methods: *JSFunctionSpec,\n properties: *JSPropertySpec,\n constants: *ConstantSpec,\n staticMethods: *JSFunctionSpec,\n name: &str) -> *JSObject {\n unsafe {\n let mut proto = ptr::null();\n if protoClass.is_not_null() {\n proto = CreateInterfacePrototypeObject(cx, global, protoProto,\n protoClass, methods,\n properties, constants);\n if proto.is_null() {\n return ptr::null();\n }\n \n JS_SetReservedSlot(proto, DOM_PROTO_INSTANCE_CLASS_SLOT,\n RUST_PRIVATE_TO_JSVAL(domClass as *libc::c_void));\n }\n\n let mut interface = ptr::null();\n if constructorClass.is_not_null() || constructor.is_not_null() {\n interface = do str::as_c_str(name) |s| {\n CreateInterfaceObject(cx, global, receiver, constructorClass,\n constructor, ctorNargs, proto,\n staticMethods, constants, s)\n };\n if interface.is_null() {\n return ptr::null();\n }\n }\n\n if protoClass.is_not_null() {\n proto\n } else {\n interface\n }\n }\n}\n\nfn CreateInterfaceObject(cx: *JSContext, global: *JSObject, receiver: *JSObject,\n constructorClass: *JSClass, constructorNative: JSNative,\n ctorNargs: u32, proto: *JSObject,\n staticMethods: *JSFunctionSpec,\n constants: *ConstantSpec,\n name: *libc::c_char) -> *JSObject {\n unsafe {\n let constructor = if constructorClass.is_not_null() {\n let functionProto = JS_GetFunctionPrototype(cx, global);\n if functionProto.is_null() {\n ptr::null()\n } else {\n JS_NewObject(cx, constructorClass, functionProto, global)\n }\n } else {\n assert constructorNative.is_not_null();\n let fun = JS_NewFunction(cx, constructorNative, ctorNargs,\n JSFUN_CONSTRUCTOR, global, name);\n if fun.is_null() {\n ptr::null()\n } else {\n JS_GetFunctionObject(fun)\n }\n };\n\n if constructor.is_null() {\n return ptr::null();\n }\n\n if staticMethods.is_not_null() \/*&&\n !DefinePrefable(cx, constructor, staticMethods)*\/ {\n return ptr::null();\n }\n\n if constructorClass.is_not_null() {\n let toString = do str::as_c_str(\"toString\") |s| {\n DefineFunctionWithReserved(cx, constructor, s,\n InterfaceObjectToString,\n 0, 0)\n };\n if toString.is_null() {\n return ptr::null();\n }\n\n let toStringObj = JS_GetFunctionObject(toString);\n SetFunctionNativeReserved(toStringObj, TOSTRING_CLASS_RESERVED_SLOT,\n &RUST_PRIVATE_TO_JSVAL(constructorClass as *libc::c_void));\n let s = JS_InternString(cx, name);\n if s.is_null() {\n return ptr::null();\n }\n SetFunctionNativeReserved(toStringObj, TOSTRING_NAME_RESERVED_SLOT,\n &RUST_STRING_TO_JSVAL(s));\n }\n\n if constants.is_not_null() \/*&&\n !DefinePrefable(cx, constructor, constants)*\/ {\n return ptr::null();\n }\n\n if proto.is_not_null() && JS_LinkConstructorAndPrototype(cx, constructor, proto) == 0 {\n return ptr::null();\n }\n\n let alreadyDefined = 0;\n if JS_AlreadyHasOwnProperty(cx, receiver, name, &alreadyDefined) == 0 {\n return ptr::null();\n }\n\n if alreadyDefined == 0 &&\n JS_DefineProperty(cx, receiver, name, RUST_OBJECT_TO_JSVAL(constructor),\n ptr::null(), ptr::null(), 0) == 0 {\n return ptr::null();\n }\n\n return constructor;\n }\n}\n\nfn CreateInterfacePrototypeObject(cx: *JSContext, global: *JSObject,\n parentProto: *JSObject, protoClass: *JSClass,\n methods: *JSFunctionSpec,\n properties: *JSPropertySpec,\n constants: *ConstantSpec) -> *JSObject {\n let ourProto = JS_NewObjectWithUniqueType(cx, protoClass, parentProto, global);\n if ourProto.is_null() {\n return ptr::null();\n }\n\n if methods.is_not_null() \/*&& !DefinePrefable(cx, ourProto, methods)*\/ {\n return ptr::null();\n }\n\n if properties.is_not_null() \/*&& !DefinePrefable(cx, ourProto, properties)*\/ {\n return ptr::null();\n }\n\n if constants.is_not_null() \/*&& !DefinePrefable(cx, ourProto, constants)*\/ {\n return ptr::null();\n }\n\n return ourProto;\n}\n\npub extern fn ThrowingConstructor(cx: *JSContext, argc: uint, vp: *JSVal) -> JSBool {\n \/\/XXX should trigger exception here\n return 0;\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>add more tests for whitespace control<commit_after>extern crate liquid;\n\nuse liquid::LiquidOptions;\nuse liquid::Renderable;\nuse liquid::Context;\nuse liquid::parse;\nuse std::default::Default;\n\nmacro_rules! compare {\n ($input:expr, $output:expr) => {\n let input = $input.replace(\"…\", \" \");\n let expected = $output.replace(\"…\", \" \");\n let options: LiquidOptions = Default::default();\n let template = parse(&input, options).unwrap();\n\n let mut data = Context::new();\n let output = template.render(&mut data);\n assert_eq!(output.unwrap(), Some(expected));\n }\n}\n\n#[test]\npub fn no_whitespace_control() {\n compare!(\"\ntopic1\n……{% assign foo = \\\"bar\\\" %}\n……{% if foo %}\n…………-……{{ foo }}\n……{% endif %}\n\",\n \"\ntopic1\n……\n……\n…………-……bar\n……\n\");\n}\n\n#[test]\npub fn simple_whitespace_control() {\n compare!(\"\ntopic1\n……{% assign foo = \\\"bar\\\" -%}\n……{% if foo -%}\n…………-……{{- foo }}\n……{%- endif %}\n\",\n \"\ntopic1\n……-bar\n\");\n}\n\n#[test]\npub fn double_sided_whitespace_control() {\n compare!(\"\ntopic1\n……{%- assign foo = \\\"bar\\\" -%}\n……-……{{- foo -}}……\n\n\",\n \"\ntopic1-bar\\\n\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adding a test case for #2548<commit_after>\/\/ A test case for #2548.\n\n\/\/ xfail-test\n\nstruct foo {\n x: @mut int;\n\n new(x: @mut int) { self.x = x; }\n\n drop {\n io::println(\"Goodbye, World!\");\n *self.x += 1;\n }\n}\n\nfn main() {\n let x = @mut 0;\n\n {\n let mut res = foo(x);\n \n let mut v = ~[mut];\n v <- ~[mut res] + v;\n }\n\n assert *x == 1;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add a test case for vec growth<commit_after>\/\/ xfail-stage0\n\nfn main() {\n auto v = vec(1);\n v += vec(2);\n v += vec(3);\n v += vec(4);\n v += vec(5);\n check (v.(0) == 1);\n check (v.(1) == 2);\n check (v.(2) == 3);\n check (v.(3) == 4);\n check (v.(4) == 5);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>We should expect a subscription to finish.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>some improvements<commit_after><|endoftext|>"} {"text":"<commit_before>use clap::{Arg, ArgMatches, App, SubCommand};\n\nuse tag::Tag;\n\n\/\/\/ Generates a `clap::SubCommand` to be integrated in the commandline-ui builder for building a\n\/\/\/ \"tags --add foo --remove bar\" subcommand to do tagging action.\npub fn tag_subcommand<'a, 'b>() -> App<'a, 'b> {\n SubCommand::with_name(tag_subcommand_name())\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .version(\"0.1\")\n .about(\"Add or remove tags\")\n .arg(tag_add_arg())\n .arg(tag_remove_arg())\n}\n\npub fn tag_add_arg<'a, 'b>() -> Arg<'a, 'b> {\n Arg::with_name(tag_subcommand_add_arg_name())\n .short(\"a\")\n .long(\"add\")\n .takes_value(true)\n .multiple(true)\n .help(\"Add tags, seperated by comma or by specifying multiple times\")\n}\n\npub fn tag_remove_arg<'a, 'b>() -> Arg<'a, 'b> {\n Arg::with_name(tag_subcommand_remove_arg_name())\n .short(\"r\")\n .long(\"remove\")\n .takes_value(true)\n .multiple(true)\n .help(\"Remove tags, seperated by comma or by specifying multiple times\")\n}\n\npub fn tag_subcommand_name() -> &'static str {\n \"tags\"\n}\n\npub fn tag_subcommand_add_arg_name() -> &'static str {\n \"add-tags\"\n}\n\npub fn tag_subcommand_remove_arg_name() -> &'static str {\n \"remove-tags\"\n}\n\npub fn tag_subcommand_names() -> Vec<&'static str> {\n vec![tag_subcommand_add_arg_name(), tag_subcommand_remove_arg_name()]\n}\n\n\/\/\/ Generates a `clap::Arg` which can be integrated into the commandline-ui builder for building a\n\/\/\/ \"-t\" or \"--tags\" argument which takes values for tagging actions (add, remove)\npub fn tag_argument<'a, 'b>() -> Arg<'a, 'b> {\n Arg::with_name(tag_argument_name())\n .short(\"t\")\n .long(\"tags\")\n .takes_value(true)\n .multiple(true)\n .help(\"Add or remove tags, prefixed by '+' (for adding) or '-' (for removing)\")\n}\n\npub fn tag_argument_name() -> &'static str {\n \"specify-tags\"\n}\n\n\/\/\/ Get the tags which should be added from the commandline\n\/\/\/\n\/\/\/ Returns none if the argument was not specified\npub fn get_add_tags(matches: &ArgMatches) -> Option<Vec<Tag>> {\n if let Some(v) = extract_tags(matches, tag_subcommand_add_arg_name(), '+') {\n return Some(v);\n } else {\n matches\n .values_of(tag_subcommand_add_arg_name())\n .map(|values| values.map(String::from).collect())\n }\n}\n\n\/\/\/ Get the tags which should be removed from the commandline\n\/\/\/\n\/\/\/ Returns none if the argument was not specified\npub fn get_remove_tags(matches: &ArgMatches) -> Option<Vec<Tag>> {\n if let Some(v) = extract_tags(matches, tag_subcommand_remove_arg_name(), '-') {\n return Some(v);\n } else {\n matches\n .values_of(tag_subcommand_remove_arg_name())\n .map(|values| values.map(String::from).collect())\n }\n}\n\nfn extract_tags(matches: &ArgMatches, specifier: &str, specchar: char) -> Option<Vec<Tag>> {\n if let Some(submatch) = matches.subcommand_matches(\"tags\") {\n submatch.values_of(specifier)\n .map(|values| values.map(String::from).collect())\n } else {\n matches.values_of(\"specify-tags\")\n .map(|argmatches| {\n argmatches\n .map(String::from)\n .filter(|s| s.starts_with(specchar))\n .map(|s| {\n String::from(s.split_at(1).1)\n })\n .collect()\n })\n }\n}\n\n<commit_msg>Shorten code in get_add_tags()<commit_after>use clap::{Arg, ArgMatches, App, SubCommand};\n\nuse tag::Tag;\n\n\/\/\/ Generates a `clap::SubCommand` to be integrated in the commandline-ui builder for building a\n\/\/\/ \"tags --add foo --remove bar\" subcommand to do tagging action.\npub fn tag_subcommand<'a, 'b>() -> App<'a, 'b> {\n SubCommand::with_name(tag_subcommand_name())\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .version(\"0.1\")\n .about(\"Add or remove tags\")\n .arg(tag_add_arg())\n .arg(tag_remove_arg())\n}\n\npub fn tag_add_arg<'a, 'b>() -> Arg<'a, 'b> {\n Arg::with_name(tag_subcommand_add_arg_name())\n .short(\"a\")\n .long(\"add\")\n .takes_value(true)\n .multiple(true)\n .help(\"Add tags, seperated by comma or by specifying multiple times\")\n}\n\npub fn tag_remove_arg<'a, 'b>() -> Arg<'a, 'b> {\n Arg::with_name(tag_subcommand_remove_arg_name())\n .short(\"r\")\n .long(\"remove\")\n .takes_value(true)\n .multiple(true)\n .help(\"Remove tags, seperated by comma or by specifying multiple times\")\n}\n\npub fn tag_subcommand_name() -> &'static str {\n \"tags\"\n}\n\npub fn tag_subcommand_add_arg_name() -> &'static str {\n \"add-tags\"\n}\n\npub fn tag_subcommand_remove_arg_name() -> &'static str {\n \"remove-tags\"\n}\n\npub fn tag_subcommand_names() -> Vec<&'static str> {\n vec![tag_subcommand_add_arg_name(), tag_subcommand_remove_arg_name()]\n}\n\n\/\/\/ Generates a `clap::Arg` which can be integrated into the commandline-ui builder for building a\n\/\/\/ \"-t\" or \"--tags\" argument which takes values for tagging actions (add, remove)\npub fn tag_argument<'a, 'b>() -> Arg<'a, 'b> {\n Arg::with_name(tag_argument_name())\n .short(\"t\")\n .long(\"tags\")\n .takes_value(true)\n .multiple(true)\n .help(\"Add or remove tags, prefixed by '+' (for adding) or '-' (for removing)\")\n}\n\npub fn tag_argument_name() -> &'static str {\n \"specify-tags\"\n}\n\n\/\/\/ Get the tags which should be added from the commandline\n\/\/\/\n\/\/\/ Returns none if the argument was not specified\npub fn get_add_tags(matches: &ArgMatches) -> Option<Vec<Tag>> {\n let add = tag_subcommand_add_arg_name();\n extract_tags(matches, add, '+')\n .or_else(|| matches.values_of(add).map(|values| values.map(String::from).collect()))\n}\n\n\/\/\/ Get the tags which should be removed from the commandline\n\/\/\/\n\/\/\/ Returns none if the argument was not specified\npub fn get_remove_tags(matches: &ArgMatches) -> Option<Vec<Tag>> {\n if let Some(v) = extract_tags(matches, tag_subcommand_remove_arg_name(), '-') {\n return Some(v);\n } else {\n matches\n .values_of(tag_subcommand_remove_arg_name())\n .map(|values| values.map(String::from).collect())\n }\n}\n\nfn extract_tags(matches: &ArgMatches, specifier: &str, specchar: char) -> Option<Vec<Tag>> {\n if let Some(submatch) = matches.subcommand_matches(\"tags\") {\n submatch.values_of(specifier)\n .map(|values| values.map(String::from).collect())\n } else {\n matches.values_of(\"specify-tags\")\n .map(|argmatches| {\n argmatches\n .map(String::from)\n .filter(|s| s.starts_with(specchar))\n .map(|s| {\n String::from(s.split_at(1).1)\n })\n .collect()\n })\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove main.rs from top-level directory of \/rs\/trpl<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>insertionsort1<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #10372 - weihanglo:issue-9325, r=ehuss<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse io;\nuse libc::{self, c_int, size_t, c_void};\nuse mem;\nuse sys::cvt;\nuse sys_common::AsInner;\n\npub struct FileDesc {\n fd: c_int,\n}\n\nimpl FileDesc {\n pub fn new(fd: c_int) -> FileDesc {\n FileDesc { fd: fd }\n }\n\n pub fn raw(&self) -> c_int { self.fd }\n\n \/\/\/ Extracts the actual filedescriptor without closing it.\n pub fn into_raw(self) -> c_int {\n let fd = self.fd;\n mem::forget(self);\n fd\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n let ret = try!(cvt(unsafe {\n libc::read(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n buf.len() as size_t)\n }));\n Ok(ret as usize)\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result<usize> {\n let ret = try!(cvt(unsafe {\n libc::write(self.fd,\n buf.as_ptr() as *const c_void,\n buf.len() as size_t)\n }));\n Ok(ret as usize)\n }\n\n #[cfg(not(target_env = \"newlib\"))]\n pub fn set_cloexec(&self) {\n unsafe {\n let ret = libc::ioctl(self.fd, libc::FIOCLEX);\n debug_assert_eq!(ret, 0);\n }\n }\n #[cfg(target_env = \"newlib\")]\n pub fn set_cloexec(&self) {\n unsafe {\n let previous = libc::fnctl(self.fd, libc::F_GETFD);\n let ret = libc::fnctl(self.fd, libc::F_SETFD, previous | libc::FD_CLOEXEC);\n debug_assert_eq!(ret, 0);\n }\n }\n}\n\nimpl AsInner<c_int> for FileDesc {\n fn as_inner(&self) -> &c_int { &self.fd }\n}\n\nimpl Drop for FileDesc {\n fn drop(&mut self) {\n \/\/ Note that errors are ignored when closing a file descriptor. The\n \/\/ reason for this is that if an error occurs we don't actually know if\n \/\/ the file descriptor was closed or not, and if we retried (for\n \/\/ something like EINTR), we might close another valid file descriptor\n \/\/ (opened after we closed ours.\n let _ = unsafe { libc::close(self.fd) };\n }\n}\n<commit_msg>Fix a typo in `fd.rs`. Fixes #30231.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse io;\nuse libc::{self, c_int, size_t, c_void};\nuse mem;\nuse sys::cvt;\nuse sys_common::AsInner;\n\npub struct FileDesc {\n fd: c_int,\n}\n\nimpl FileDesc {\n pub fn new(fd: c_int) -> FileDesc {\n FileDesc { fd: fd }\n }\n\n pub fn raw(&self) -> c_int { self.fd }\n\n \/\/\/ Extracts the actual filedescriptor without closing it.\n pub fn into_raw(self) -> c_int {\n let fd = self.fd;\n mem::forget(self);\n fd\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n let ret = try!(cvt(unsafe {\n libc::read(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n buf.len() as size_t)\n }));\n Ok(ret as usize)\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result<usize> {\n let ret = try!(cvt(unsafe {\n libc::write(self.fd,\n buf.as_ptr() as *const c_void,\n buf.len() as size_t)\n }));\n Ok(ret as usize)\n }\n\n #[cfg(not(target_env = \"newlib\"))]\n pub fn set_cloexec(&self) {\n unsafe {\n let ret = libc::ioctl(self.fd, libc::FIOCLEX);\n debug_assert_eq!(ret, 0);\n }\n }\n #[cfg(target_env = \"newlib\")]\n pub fn set_cloexec(&self) {\n unsafe {\n let previous = libc::fcntl(self.fd, libc::F_GETFD);\n let ret = libc::fcntl(self.fd, libc::F_SETFD, previous | libc::FD_CLOEXEC);\n debug_assert_eq!(ret, 0);\n }\n }\n}\n\nimpl AsInner<c_int> for FileDesc {\n fn as_inner(&self) -> &c_int { &self.fd }\n}\n\nimpl Drop for FileDesc {\n fn drop(&mut self) {\n \/\/ Note that errors are ignored when closing a file descriptor. The\n \/\/ reason for this is that if an error occurs we don't actually know if\n \/\/ the file descriptor was closed or not, and if we retried (for\n \/\/ something like EINTR), we might close another valid file descriptor\n \/\/ (opened after we closed ours.\n let _ = unsafe { libc::close(self.fd) };\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tidy: Use an inclusive range rather than a +1 bound<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>just use full-normalization when for the impl trait ref<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>feat: allows waiting for user input on error<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>tests(fmt): removes fmt tests from Windows builds<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix some incorrect indentation in tests::archive_from_bytes_zero_length_memmap<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>lib: update builder functions, add temp split<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add push_back<commit_after><|endoftext|>"} {"text":"<commit_before>\/*\nCopyright 2014 Benjamin Elder from https:\/\/github.com\/BenTheElder\/slack-rs\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n\thttp:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*\/\n#![feature(core)]\nextern crate hyper;\nextern crate websocket;\nextern crate openssl;\nextern crate \"rustc-serialize\" as rustc_serialize;\n\nuse rustc_serialize::json::{Json};\nuse std::sync::mpsc::{Sender,Receiver,channel};\nuse std::thread;\nuse std::io::Read;\nuse std::sync::atomic::{AtomicIsize, Ordering};\nuse websocket::Client;\npub use websocket::message::Message;\nuse websocket::Sender as WsSender;\nuse websocket::Receiver as WsReceiver;\nuse websocket::dataframe::DataFrame;\nuse websocket::stream::WebSocketStream;\nuse websocket::client::request::Url;\n\npub type WsClient = Client<websocket::dataframe::DataFrame,\n websocket::client::sender::Sender<websocket::stream::WebSocketStream>,\n websocket::client::receiver::Receiver<websocket::stream::WebSocketStream>>;\n\n\n\n\/\/\/Implement this trait in your code to handle message events\npub trait MessageHandler {\n\t\/\/\/When a message is received this will be called with self, the slack client,\n\t\/\/\/and the json encoded string payload.\n\tfn on_receive(&mut self, cli: &mut RtmClient, json_str: &str);\n\n\t\/\/\/Called when a ping is received; you do NOT need to handle the reply pong,\n\t\/\/\/but you may use this event to track the connection as a keep-alive.\n\tfn on_ping(&mut self, cli: &mut RtmClient);\n\n\t\/\/\/Called when the connection is closed for any reason.\n\tfn on_close(&mut self, cli: &mut RtmClient);\n\n\t\/\/\/Called when the connection is opened.\n\tfn on_connect(&mut self, cli: &mut RtmClient);\n}\n\n\n\/\/\/Contains information about the team the bot is logged into.\npub struct Team {\n\tname : String,\n\tid : String\n}\n\nimpl Team {\n\t\/\/\/private, create empty team.\n\tfn new() -> Team {\n\t\tTeam{name: String::new(), id: String::new()}\n\t}\n\n\t\/\/\/Returns the team's name as a String\n\tpub fn get_name(&self) -> String {\n\t\tself.name.clone()\n\t}\n\n\t\/\/\/Returns the team's id as a String\n\tpub fn get_id(&self) -> String {\n\t\tself.id.clone()\n\t}\n}\n\nimpl Clone for Team {\n\tfn clone(&self) -> Self {\n\t\tTeam{\n\t\t\tname: self.name.clone(),\n\t\t\tid: self.id.clone()\n\t\t}\n\t}\n\n\tfn clone_from(&mut self, source: &Self) {\n\t\tself.name = source.name.clone();\n\t\tself.id = source.id.clone();\n\t}\n}\n\n\/\/\/The actual messaging client.\npub struct RtmClient{\n\tname : String,\n\tid : String,\n\tteam : Team,\n\tmsg_num: AtomicIsize,\n\touts : Option<Sender<Message>>\n}\n\n\/\/\/Error string. (FIXME: better error return values\/ custom error type)\nstatic RTM_INVALID : &'static str = \"Invalid data returned from slack (rtm.start)\";\n\n\nimpl RtmClient {\n\n\t\/\/\/Creates a new empty client.\n\tpub fn new() -> RtmClient {\n\t\tRtmClient{\n\t\t\tname : String::new(),\n\t\t\tid : String::new(),\n\t\t\tteam : Team::new(),\n\t\t\tmsg_num: AtomicIsize::new(0),\n\t\t\touts : None\n\t\t}\n\t}\n\n\n\tpub fn get_outs(&self) -> Option<Sender<Message>> {\n\t\tself.outs.clone()\n\t}\n\n\t\/\/\/Returns the name of the bot\/user connected to the client.\n\t\/\/\/Only valid after login.\n\tpub fn get_name(&self) -> String {\n\t\treturn self.name.clone();\n\t}\n\n\t\/\/\/Returns the id of the bot\/user connected to the client.\n\t\/\/\/Only valid after login.\n\tpub fn get_id(&self) -> String {\n\t\treturn self.id.clone();\n\t}\n\n\t\/\/\/Returns the Team struct of the bot\/user connected to the client.\n\t\/\/\/Only valid after login.\n\tpub fn get_team<'a>(&'a self) -> &'a Team {\n\t\t&self.team\n\t}\n\n\t\/\/\/Returns a unique identifier to be used in the 'id' field of a message\n\t\/\/\/sent to slack.\n\tpub fn get_msg_uid(&self) -> isize {\n\t\tself.msg_num.fetch_add(1, Ordering::SeqCst)\n\t}\n\n\n\t\/\/\/Allows sending a json string message over the websocket connection.\n\t\/\/\/Note that this only passes the message over a channel to the\n\t\/\/\/Messaging task, and therfore a succesful return value does not\n\t\/\/\/mean the message has been actually put on the wire yet.\n\t\/\/\/Note that you will need to form a valid json reply yourself if you\n\t\/\/\/use this method, and you will also need to retrieve a unique id for\n\t\/\/\/the message via RtmClient.get_msg_uid()\n\t\/\/\/Only valid after login.\n\tpub fn send(&mut self, s : &str) -> Result<(),String> {\n\t\tlet tx = match self.outs {\n\t\t\tSome(ref tx) => tx,\n\t\t\tNone => return Err(\"Failed to get tx!\".to_string())\n\t\t};\n\t\tmatch tx.send(Message::Text(s.to_string())) {\n\t\t\tOk(_) => {},\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err))\n\t\t}\n\t\tOk(())\n\t}\n\n\t\/\/\/Allows sending a textual string message over the websocket connection,\n\t\/\/\/to the requested channel id. Ideal usage would be EG:\n\t\/\/\/extract the channel in on_receive and then send back a message to the channel.\n\t\/\/\/Note that this only passes the message over a channel to the\n\t\/\/\/Messaging task, and therfore a succesful return value does not\n\t\/\/\/mean the message has been actually put on the wire yet.\n\t\/\/\/This method also handles getting a unique id and formatting the actual json\n\t\/\/\/sent.\n\t\/\/\/Only valid after login.\n\tpub fn send_message(&self, chan: &str, msg: &str) -> Result<(),String>{\n\t\tlet n = self.get_msg_uid();\n\t\tlet mstr = \"{\".to_string()+format!(r#\"\"id\": {},\"type\": \"message\",\"channel\": \"{}\",\"text\": \"{}\"\"#,n,chan,msg).as_slice()+\"}\";\n\t\tlet tx = match self.outs {\n\t\t\tSome(ref tx) => tx,\n\t\t\tNone => return Err(\"Failed to get tx!\".to_string())\n\t\t};\n\t\tmatch tx.send(Message::Text(mstr)) {\n\t\t\tOk(_) => {},\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err))\n\t\t}\n\t\tOk(())\n\t}\n\n\t\/\/\/Logs in to slack. Call this before calling run.\n\t\/\/\/Alternatively use login_and_run\n\tpub fn login(&mut self, token: &str) -> Result<(WsClient,Receiver<Message>),String> {\n\t\t\/\/Slack real time api url\n\t\tlet url = \"https:\/\/slack.com\/api\/rtm.start?token=\".to_string()+token;\n\n\t\t\/\/Create http client and send request to slack\n\t\tlet mut client = hyper::Client::new();\n\t\tlet mut res = match client.get(url.as_slice()).send() {\n\t\t\tOk(res) => res,\n\t\t\tErr(err) => return Err(format!(\"Hyper Error: {:?}\", err))\n\t\t};\n\n\t\t\/\/Read result string\n\t\tlet mut res_str = String::new();\n\n\t\tmatch res.read_to_string(&mut res_str) {\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err)),\n\t\t\t_ => {},\n\t\t};\n\n\n\t\t\/\/Start parsing json. We do not map to a structure,\n\t\t\/\/because slack makes no guarantee that there won't be extra fields.\n\t\tlet js = match Json::from_str(res_str.as_slice()) {\n\t\t\tOk(js) => js,\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err))\n\t\t};\n\n\t\tif !js.is_object() {\n\t\t\treturn Err(format!(\"{} : json is not an object.\", RTM_INVALID))\n\t\t}\n\t\tlet jo = js.as_object().unwrap();\n\n\t\tmatch jo.get(\"ok\") {\n\t\t\tSome(v) => {\n\t\t\t\tif !(v.is_boolean() && v.as_boolean().unwrap() == true) {\n\t\t\t\t\treturn Err(format!(\"{} : js.get(\\\"ok\\\") != true : {:?}\", RTM_INVALID, jo))\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(format!(\"{} : jo.get(\\\"ok\\\") returned None. : {:?}\", RTM_INVALID, jo))\n\t\t}\n\n\t\tlet wss_url_string = match jo.get(\"url\") {\n\t\t\tSome(wss_url) => {\n\t\t\t\tif wss_url.is_string() {\n\t\t\t\t\twss_url.as_string().unwrap()\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(format!(\"{} : jo.get(\\\"url\\\") failed! : {:?}\", RTM_INVALID, jo))\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(format!(\"{} : jo.get(\\\"url\\\") returned None. : {:?}\", RTM_INVALID, jo))\n\t\t};\n\n\t\tlet wss_url = match Url::parse(wss_url_string) {\n\t\t\tOk(url) => url,\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err))\n\t\t};\n\n\t\tlet jself = match jo.get(\"self\") {\n\t\t\tSome(jself) => {\n\t\t\t\tif jself.is_object() {\n\t\t\t\t\tjself.as_object().unwrap()\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t};\n\t\tmatch jself.get(\"name\") {\n\t\t\tSome(jname) => {\n\t\t\t\tif jname.is_string() {\n\t\t\t\t\tself.name = jname.as_string().unwrap().to_string();\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t}\n\t\tmatch jself.get(\"id\") {\n\t\t\tSome(jid) => {\n\t\t\t\tif jid.is_string() {\n\t\t\t\t\tself.id = jid.as_string().unwrap().to_string();\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t}\n\n\t\tlet jteam = match jo.get(\"team\") {\n\t\t\tSome(jteam) => {\n\t\t\t\tif jteam.is_object() {\n\t\t\t\t\tjteam.as_object().unwrap()\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t};\n\t\tmatch jteam.get(\"name\") {\n\t\t\tSome(jtname) => {\n\t\t\t\tif jtname.is_string() {\n\t\t\t\t\tself.team.name = jtname.as_string().unwrap().to_string();\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t}\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t}\n\t\tmatch jteam.get(\"id\") {\n\t\t\tSome(jtid) => {\n\t\t\t\tif jtid.is_string() {\n\t\t\t\t\tself.team.id = jtid.as_string().unwrap().to_string();\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t}\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t}\n\n\n\t\tlet req = match websocket::client::Client::connect(wss_url.clone()) {\n\t\t\tOk(res) => res,\n\t\t\tErr(err) => return Err(format!(\"{:?}, Websocket request to `{:?}` failed\", err, wss_url))\n\t\t};\n\n\t\t\/\/Connect via tls, do websocket handshake.\n\t\tlet res = match req.send() {\n\t\t\tOk(res) => res,\n\t\t\tErr(err) => {\n\t\t\t\treturn Err(format!(\"{:?}, Websocket request to `{:?}` failed\", err, wss_url))\n\t\t\t}\n\t\t};\n\n\n\t\tmatch res.validate() {\n\t\t\tOk(()) => { }\n\t\t\tErr(err) => {\n\t\t\t\treturn Err(format!(\"Error: res.validate(): {:?}\", err))\n\t\t\t}\n\t\t}\n\n\t\tlet (tx,rx) = channel::<Message>();\n\t\tself.outs = Some(tx.clone());\n\t\tOk((res.begin(),rx))\n\t}\n\n\t\/\/\/Runs the message receive loop\n\tpub fn run<T: MessageHandler>(&mut self, handler: &mut T, client: WsClient, rx: Receiver<Message>) -> Result<(),String> {\n\t\t\/\/for sending messages\n\t\tlet tx = match self.outs {\n\t\t\tSome(ref mut tx) => { tx.clone() },\n\t\t\tNone => { return Err(\"No tx!\".to_string()); }\n\t\t};\n\n\t\tlet (mut sender, mut receiver) = client.split();\n\n\t\thandler.on_connect(self);\n\t\t\/\/websocket send loop\n\t\tlet guard = thread::scoped(move || -> () {\n\t\t\tloop {\n\t\t\t\tlet msg = match rx.recv() {\n\t\t\t\t\tOk(m) => { m },\n\t\t\t\t\tErr(_) => { return; }\n\t\t\t\t};\n\n\t\t\t\tlet closing = match msg {\n\t\t\t\t\tMessage::Close(_) => { true },\n\t\t\t\t\t_ => { false }\n\t\t\t\t};\n\t\t\t\tmatch sender.send_message(msg) {\n\t\t\t\t\tOk(_) => {},\n\t\t\t\t\tErr(_) => { return; }\/\/panic!(format!(\"{:?}\", err))\n\t\t\t\t}\n\t\t\t\tif closing {\n\t\t\t\t\tdrop(rx);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\n\t\t\/\/receive loop\n\t\tfor message in receiver.incoming_messages() {\n\t\t\tlet message = match message {\n\t\t\t\tOk(message) => message,\n\t\t\t\tErr(err) => {\n\t\t\t\t\treturn Err(format!(\"{:?}\", err));\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tmatch message {\n\t\t\t\tMessage::Text(data) => {\n\t\t\t\t\thandler.on_receive(self, data.as_slice());\n\t\t\t\t},\n\t\t\t\tMessage::Ping(data) => {\n\t\t\t\t\thandler.on_ping(self);\n\t\t\t\t\tlet message = Message::Pong(data);\n\t\t\t\t\tmatch tx.send(message) {\n\t\t\t\t\t\tOk(_) => {},\n\t\t\t\t\t\tErr(err) => {\n\t\t\t\t\t\t\treturn Err(format!(\"{:?}\", err));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t},\n\t\t\t\tMessage::Close(data) => {\n\t\t\t\t\thandler.on_close(self);\n\t\t\t\t\tlet message = Message::Close(data);\n\t\t\t\t\tmatch tx.send(message) {\n\t\t\t\t\t\tOk(_) => {},\n\t\t\t\t\t\tErr(err) => {\n\t\t\t\t\t\t\treturn Err(format!(\"{:?}\", err));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\treturn Ok(());\n\t\t\t\t},\n\t\t\t\t_ => {}\n\t\t\t}\n\t\t}\n\t\tlet _ = guard.join();\n\t\tOk(())\n\t}\n\n\n\t\/\/\/Runs the main loop for the client after logging in to slack,\n\t\/\/\/returns an error if the process fails at an point, or an Ok(()) on succesful\n\t\/\/\/close.\n\t\/\/\/Takes a MessageHandler (implemented by the user) to call events handlers on.\n\t\/\/\/once the first on_receive() or on_ping is called on the MessageHandler, you\n\t\/\/\/can soon the 'Only valid after login' methods are safe to use.\n\t\/\/\/Sending is run in a thread in parallel while the receive loop runs on the main thread.\n\t\/\/\/Both loops should end on return.\n\t\/\/\/Sending should be thread safe as the messages are passed in via a channel in\n\t\/\/\/RtmClient.send and RtmClient.send_message\n\tpub fn login_and_run<T: MessageHandler>(&mut self, handler: &mut T, token : &str) -> Result<(),String> {\n\t\tlet (client,rx) = match self.login(token) {\n\t\t\tOk((c,r)) => { (c,r) },\n\t\t\tErr(err) => { return Err(format!(\"{:?}\",err)); }\n\t\t};\n\t\tself.run(handler, client, rx)\n\t}\n}\n<commit_msg>Use rustc_serialize instead of \"rustc-serialize\"<commit_after>\/*\nCopyright 2014 Benjamin Elder from https:\/\/github.com\/BenTheElder\/slack-rs\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n\thttp:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*\/\n#![feature(core)]\nextern crate hyper;\nextern crate websocket;\nextern crate openssl;\nextern crate rustc_serialize;\n\nuse rustc_serialize::json::{Json};\nuse std::sync::mpsc::{Sender,Receiver,channel};\nuse std::thread;\nuse std::io::Read;\nuse std::sync::atomic::{AtomicIsize, Ordering};\nuse websocket::Client;\npub use websocket::message::Message;\nuse websocket::Sender as WsSender;\nuse websocket::Receiver as WsReceiver;\nuse websocket::dataframe::DataFrame;\nuse websocket::stream::WebSocketStream;\nuse websocket::client::request::Url;\n\npub type WsClient = Client<websocket::dataframe::DataFrame,\n websocket::client::sender::Sender<websocket::stream::WebSocketStream>,\n websocket::client::receiver::Receiver<websocket::stream::WebSocketStream>>;\n\n\n\n\/\/\/Implement this trait in your code to handle message events\npub trait MessageHandler {\n\t\/\/\/When a message is received this will be called with self, the slack client,\n\t\/\/\/and the json encoded string payload.\n\tfn on_receive(&mut self, cli: &mut RtmClient, json_str: &str);\n\n\t\/\/\/Called when a ping is received; you do NOT need to handle the reply pong,\n\t\/\/\/but you may use this event to track the connection as a keep-alive.\n\tfn on_ping(&mut self, cli: &mut RtmClient);\n\n\t\/\/\/Called when the connection is closed for any reason.\n\tfn on_close(&mut self, cli: &mut RtmClient);\n\n\t\/\/\/Called when the connection is opened.\n\tfn on_connect(&mut self, cli: &mut RtmClient);\n}\n\n\n\/\/\/Contains information about the team the bot is logged into.\npub struct Team {\n\tname : String,\n\tid : String\n}\n\nimpl Team {\n\t\/\/\/private, create empty team.\n\tfn new() -> Team {\n\t\tTeam{name: String::new(), id: String::new()}\n\t}\n\n\t\/\/\/Returns the team's name as a String\n\tpub fn get_name(&self) -> String {\n\t\tself.name.clone()\n\t}\n\n\t\/\/\/Returns the team's id as a String\n\tpub fn get_id(&self) -> String {\n\t\tself.id.clone()\n\t}\n}\n\nimpl Clone for Team {\n\tfn clone(&self) -> Self {\n\t\tTeam{\n\t\t\tname: self.name.clone(),\n\t\t\tid: self.id.clone()\n\t\t}\n\t}\n\n\tfn clone_from(&mut self, source: &Self) {\n\t\tself.name = source.name.clone();\n\t\tself.id = source.id.clone();\n\t}\n}\n\n\/\/\/The actual messaging client.\npub struct RtmClient{\n\tname : String,\n\tid : String,\n\tteam : Team,\n\tmsg_num: AtomicIsize,\n\touts : Option<Sender<Message>>\n}\n\n\/\/\/Error string. (FIXME: better error return values\/ custom error type)\nstatic RTM_INVALID : &'static str = \"Invalid data returned from slack (rtm.start)\";\n\n\nimpl RtmClient {\n\n\t\/\/\/Creates a new empty client.\n\tpub fn new() -> RtmClient {\n\t\tRtmClient{\n\t\t\tname : String::new(),\n\t\t\tid : String::new(),\n\t\t\tteam : Team::new(),\n\t\t\tmsg_num: AtomicIsize::new(0),\n\t\t\touts : None\n\t\t}\n\t}\n\n\n\tpub fn get_outs(&self) -> Option<Sender<Message>> {\n\t\tself.outs.clone()\n\t}\n\n\t\/\/\/Returns the name of the bot\/user connected to the client.\n\t\/\/\/Only valid after login.\n\tpub fn get_name(&self) -> String {\n\t\treturn self.name.clone();\n\t}\n\n\t\/\/\/Returns the id of the bot\/user connected to the client.\n\t\/\/\/Only valid after login.\n\tpub fn get_id(&self) -> String {\n\t\treturn self.id.clone();\n\t}\n\n\t\/\/\/Returns the Team struct of the bot\/user connected to the client.\n\t\/\/\/Only valid after login.\n\tpub fn get_team<'a>(&'a self) -> &'a Team {\n\t\t&self.team\n\t}\n\n\t\/\/\/Returns a unique identifier to be used in the 'id' field of a message\n\t\/\/\/sent to slack.\n\tpub fn get_msg_uid(&self) -> isize {\n\t\tself.msg_num.fetch_add(1, Ordering::SeqCst)\n\t}\n\n\n\t\/\/\/Allows sending a json string message over the websocket connection.\n\t\/\/\/Note that this only passes the message over a channel to the\n\t\/\/\/Messaging task, and therfore a succesful return value does not\n\t\/\/\/mean the message has been actually put on the wire yet.\n\t\/\/\/Note that you will need to form a valid json reply yourself if you\n\t\/\/\/use this method, and you will also need to retrieve a unique id for\n\t\/\/\/the message via RtmClient.get_msg_uid()\n\t\/\/\/Only valid after login.\n\tpub fn send(&mut self, s : &str) -> Result<(),String> {\n\t\tlet tx = match self.outs {\n\t\t\tSome(ref tx) => tx,\n\t\t\tNone => return Err(\"Failed to get tx!\".to_string())\n\t\t};\n\t\tmatch tx.send(Message::Text(s.to_string())) {\n\t\t\tOk(_) => {},\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err))\n\t\t}\n\t\tOk(())\n\t}\n\n\t\/\/\/Allows sending a textual string message over the websocket connection,\n\t\/\/\/to the requested channel id. Ideal usage would be EG:\n\t\/\/\/extract the channel in on_receive and then send back a message to the channel.\n\t\/\/\/Note that this only passes the message over a channel to the\n\t\/\/\/Messaging task, and therfore a succesful return value does not\n\t\/\/\/mean the message has been actually put on the wire yet.\n\t\/\/\/This method also handles getting a unique id and formatting the actual json\n\t\/\/\/sent.\n\t\/\/\/Only valid after login.\n\tpub fn send_message(&self, chan: &str, msg: &str) -> Result<(),String>{\n\t\tlet n = self.get_msg_uid();\n\t\tlet mstr = \"{\".to_string()+format!(r#\"\"id\": {},\"type\": \"message\",\"channel\": \"{}\",\"text\": \"{}\"\"#,n,chan,msg).as_slice()+\"}\";\n\t\tlet tx = match self.outs {\n\t\t\tSome(ref tx) => tx,\n\t\t\tNone => return Err(\"Failed to get tx!\".to_string())\n\t\t};\n\t\tmatch tx.send(Message::Text(mstr)) {\n\t\t\tOk(_) => {},\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err))\n\t\t}\n\t\tOk(())\n\t}\n\n\t\/\/\/Logs in to slack. Call this before calling run.\n\t\/\/\/Alternatively use login_and_run\n\tpub fn login(&mut self, token: &str) -> Result<(WsClient,Receiver<Message>),String> {\n\t\t\/\/Slack real time api url\n\t\tlet url = \"https:\/\/slack.com\/api\/rtm.start?token=\".to_string()+token;\n\n\t\t\/\/Create http client and send request to slack\n\t\tlet mut client = hyper::Client::new();\n\t\tlet mut res = match client.get(url.as_slice()).send() {\n\t\t\tOk(res) => res,\n\t\t\tErr(err) => return Err(format!(\"Hyper Error: {:?}\", err))\n\t\t};\n\n\t\t\/\/Read result string\n\t\tlet mut res_str = String::new();\n\n\t\tmatch res.read_to_string(&mut res_str) {\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err)),\n\t\t\t_ => {},\n\t\t};\n\n\n\t\t\/\/Start parsing json. We do not map to a structure,\n\t\t\/\/because slack makes no guarantee that there won't be extra fields.\n\t\tlet js = match Json::from_str(res_str.as_slice()) {\n\t\t\tOk(js) => js,\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err))\n\t\t};\n\n\t\tif !js.is_object() {\n\t\t\treturn Err(format!(\"{} : json is not an object.\", RTM_INVALID))\n\t\t}\n\t\tlet jo = js.as_object().unwrap();\n\n\t\tmatch jo.get(\"ok\") {\n\t\t\tSome(v) => {\n\t\t\t\tif !(v.is_boolean() && v.as_boolean().unwrap() == true) {\n\t\t\t\t\treturn Err(format!(\"{} : js.get(\\\"ok\\\") != true : {:?}\", RTM_INVALID, jo))\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(format!(\"{} : jo.get(\\\"ok\\\") returned None. : {:?}\", RTM_INVALID, jo))\n\t\t}\n\n\t\tlet wss_url_string = match jo.get(\"url\") {\n\t\t\tSome(wss_url) => {\n\t\t\t\tif wss_url.is_string() {\n\t\t\t\t\twss_url.as_string().unwrap()\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(format!(\"{} : jo.get(\\\"url\\\") failed! : {:?}\", RTM_INVALID, jo))\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(format!(\"{} : jo.get(\\\"url\\\") returned None. : {:?}\", RTM_INVALID, jo))\n\t\t};\n\n\t\tlet wss_url = match Url::parse(wss_url_string) {\n\t\t\tOk(url) => url,\n\t\t\tErr(err) => return Err(format!(\"{:?}\", err))\n\t\t};\n\n\t\tlet jself = match jo.get(\"self\") {\n\t\t\tSome(jself) => {\n\t\t\t\tif jself.is_object() {\n\t\t\t\t\tjself.as_object().unwrap()\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t};\n\t\tmatch jself.get(\"name\") {\n\t\t\tSome(jname) => {\n\t\t\t\tif jname.is_string() {\n\t\t\t\t\tself.name = jname.as_string().unwrap().to_string();\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t}\n\t\tmatch jself.get(\"id\") {\n\t\t\tSome(jid) => {\n\t\t\t\tif jid.is_string() {\n\t\t\t\t\tself.id = jid.as_string().unwrap().to_string();\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t}\n\n\t\tlet jteam = match jo.get(\"team\") {\n\t\t\tSome(jteam) => {\n\t\t\t\tif jteam.is_object() {\n\t\t\t\t\tjteam.as_object().unwrap()\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t},\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t};\n\t\tmatch jteam.get(\"name\") {\n\t\t\tSome(jtname) => {\n\t\t\t\tif jtname.is_string() {\n\t\t\t\t\tself.team.name = jtname.as_string().unwrap().to_string();\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t}\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t}\n\t\tmatch jteam.get(\"id\") {\n\t\t\tSome(jtid) => {\n\t\t\t\tif jtid.is_string() {\n\t\t\t\t\tself.team.id = jtid.as_string().unwrap().to_string();\n\t\t\t\t}else{\n\t\t\t\t\treturn Err(RTM_INVALID.to_string())\n\t\t\t\t}\n\t\t\t}\n\t\t\tNone => return Err(RTM_INVALID.to_string())\n\t\t}\n\n\n\t\tlet req = match websocket::client::Client::connect(wss_url.clone()) {\n\t\t\tOk(res) => res,\n\t\t\tErr(err) => return Err(format!(\"{:?}, Websocket request to `{:?}` failed\", err, wss_url))\n\t\t};\n\n\t\t\/\/Connect via tls, do websocket handshake.\n\t\tlet res = match req.send() {\n\t\t\tOk(res) => res,\n\t\t\tErr(err) => {\n\t\t\t\treturn Err(format!(\"{:?}, Websocket request to `{:?}` failed\", err, wss_url))\n\t\t\t}\n\t\t};\n\n\n\t\tmatch res.validate() {\n\t\t\tOk(()) => { }\n\t\t\tErr(err) => {\n\t\t\t\treturn Err(format!(\"Error: res.validate(): {:?}\", err))\n\t\t\t}\n\t\t}\n\n\t\tlet (tx,rx) = channel::<Message>();\n\t\tself.outs = Some(tx.clone());\n\t\tOk((res.begin(),rx))\n\t}\n\n\t\/\/\/Runs the message receive loop\n\tpub fn run<T: MessageHandler>(&mut self, handler: &mut T, client: WsClient, rx: Receiver<Message>) -> Result<(),String> {\n\t\t\/\/for sending messages\n\t\tlet tx = match self.outs {\n\t\t\tSome(ref mut tx) => { tx.clone() },\n\t\t\tNone => { return Err(\"No tx!\".to_string()); }\n\t\t};\n\n\t\tlet (mut sender, mut receiver) = client.split();\n\n\t\thandler.on_connect(self);\n\t\t\/\/websocket send loop\n\t\tlet guard = thread::scoped(move || -> () {\n\t\t\tloop {\n\t\t\t\tlet msg = match rx.recv() {\n\t\t\t\t\tOk(m) => { m },\n\t\t\t\t\tErr(_) => { return; }\n\t\t\t\t};\n\n\t\t\t\tlet closing = match msg {\n\t\t\t\t\tMessage::Close(_) => { true },\n\t\t\t\t\t_ => { false }\n\t\t\t\t};\n\t\t\t\tmatch sender.send_message(msg) {\n\t\t\t\t\tOk(_) => {},\n\t\t\t\t\tErr(_) => { return; }\/\/panic!(format!(\"{:?}\", err))\n\t\t\t\t}\n\t\t\t\tif closing {\n\t\t\t\t\tdrop(rx);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\n\t\t\/\/receive loop\n\t\tfor message in receiver.incoming_messages() {\n\t\t\tlet message = match message {\n\t\t\t\tOk(message) => message,\n\t\t\t\tErr(err) => {\n\t\t\t\t\treturn Err(format!(\"{:?}\", err));\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tmatch message {\n\t\t\t\tMessage::Text(data) => {\n\t\t\t\t\thandler.on_receive(self, data.as_slice());\n\t\t\t\t},\n\t\t\t\tMessage::Ping(data) => {\n\t\t\t\t\thandler.on_ping(self);\n\t\t\t\t\tlet message = Message::Pong(data);\n\t\t\t\t\tmatch tx.send(message) {\n\t\t\t\t\t\tOk(_) => {},\n\t\t\t\t\t\tErr(err) => {\n\t\t\t\t\t\t\treturn Err(format!(\"{:?}\", err));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t},\n\t\t\t\tMessage::Close(data) => {\n\t\t\t\t\thandler.on_close(self);\n\t\t\t\t\tlet message = Message::Close(data);\n\t\t\t\t\tmatch tx.send(message) {\n\t\t\t\t\t\tOk(_) => {},\n\t\t\t\t\t\tErr(err) => {\n\t\t\t\t\t\t\treturn Err(format!(\"{:?}\", err));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\treturn Ok(());\n\t\t\t\t},\n\t\t\t\t_ => {}\n\t\t\t}\n\t\t}\n\t\tlet _ = guard.join();\n\t\tOk(())\n\t}\n\n\n\t\/\/\/Runs the main loop for the client after logging in to slack,\n\t\/\/\/returns an error if the process fails at an point, or an Ok(()) on succesful\n\t\/\/\/close.\n\t\/\/\/Takes a MessageHandler (implemented by the user) to call events handlers on.\n\t\/\/\/once the first on_receive() or on_ping is called on the MessageHandler, you\n\t\/\/\/can soon the 'Only valid after login' methods are safe to use.\n\t\/\/\/Sending is run in a thread in parallel while the receive loop runs on the main thread.\n\t\/\/\/Both loops should end on return.\n\t\/\/\/Sending should be thread safe as the messages are passed in via a channel in\n\t\/\/\/RtmClient.send and RtmClient.send_message\n\tpub fn login_and_run<T: MessageHandler>(&mut self, handler: &mut T, token : &str) -> Result<(),String> {\n\t\tlet (client,rx) = match self.login(token) {\n\t\t\tOk((c,r)) => { (c,r) },\n\t\t\tErr(err) => { return Err(format!(\"{:?}\",err)); }\n\t\t};\n\t\tself.run(handler, client, rx)\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>`IXAudio2Voice::SetChannelVolumes()`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>run_command API update<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rename parameters for clarity<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Forgot to add value.rs file.<commit_after>\/* This module collects value functions. *\/\n\nuse std::str::FromStr;\n\nuse game;\nuse game::{PlayerColor, LineState};\n\n#[derive(Clone, Copy, Debug)]\npub enum Simple {\n Subsets,\n WinOnly,\n}\n\nimpl Simple {\n pub fn value_of(self,\n structure: &game::Structure,\n state: &game::State,\n my_color: PlayerColor)\n -> i32 {\n match self {\n Simple::Subsets => subsets(structure, state, my_color),\n Simple::WinOnly => win_only(structure, state, my_color),\n }\n }\n}\n\nimpl FromStr for Simple {\n type Err = ();\n fn from_str(s: &str) -> Result<Simple, ()> {\n match s {\n \"subsets\" => Ok(Simple::Subsets),\n \"win\" => Ok(Simple::WinOnly),\n _ => Err(())\n }\n }\n}\n\n\/\/ For each possible winning subset, this adds some score.\n\/\/ One piece on a line => 1 Point\n\/\/ Two pieces on a line => 4 Points\n\/\/ Three pieces on a line => 9 Points\npub fn subsets(structure: &game::Structure, state: &game::State, my_color: PlayerColor) -> i32 {\n if let game::VictoryState::Win { winner, .. } = state.victory_state {\n if winner == my_color {\n return 1000;\n } else {\n return -1000;\n }\n }\n\n let mut score = 0;\n\n for subset in &structure.source {\n score += match subset.win_state(state) {\n LineState::Empty => 0,\n LineState::Mixed => 0,\n LineState::Pure { color, count } => {\n if color == my_color {\n (count * count) as i32\n } else {\n -(count * count) as i32\n }\n }\n LineState::Win(_) => {\n panic!(\"If the game is already won this should be caught earlier.\")\n }\n }\n }\n\n score\n}\n\n#[test]\nfn test_subsets_values() {\n use game::{Structure, State, Position2};\n use game::PlayerColor::White;\n use constants::LINES;\n\n let structure = Structure::new(&LINES);\n\n let mut state = State::new();\n assert_eq!(0, subsets(&structure, &state, White));\n\n state.insert(&structure, Position2::new(0, 0));\n assert_eq!(7, subsets(&structure, &state, White));\n\n state.insert(&structure, Position2::new(0, 3));\n assert_eq!(0, subsets(&structure, &state, White));\n}\n\n\/\/ This value function only checks if you won the game already.\npub fn win_only(structure: &game::Structure, state: &game::State, my_color: PlayerColor) -> i32 {\n if let game::VictoryState::Win { winner, .. } = state.victory_state {\n if winner == my_color {\n return 1;\n } else {\n return -1;\n }\n } else {\n 0\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #137<commit_after>\/\/! [Problem 137](https:\/\/projecteuler.net\/problem=137) solver.\n\/\/!\n\/\/! ```math\n\/\/! A_F(x) = x F_1 + x^2 F_2 + x^3 F_3 + \\dots\n\/\/! x A_F(x) = x^2 F_1 + x^3 F_2 + x^4 F_3 + \\dots\n\/\/! x^2 A_F(x) = x^3 F_1 + x^4 F_2 + x^5 F_3 + \\dots\n\/\/! (1 - x - x^2) A_F(x) = x F_1 + x^2 (F_2 - F_1) + x^3 (F_3 - F_2 - F_1) + \\dots\n\/\/! ```\n\/\/!\n\/\/! `F_k = F_{k-1} + F_k`, `F_1 = F_2 = 1` より、\n\/\/!\n\/\/! ```math\n\/\/! (1 - x - x^2) A_F(x) = x\n\/\/! ```\n\/\/!\n\/\/! `A_F(x)` は正の整数なので `n := A_F(x) > 0` とおくと、以下の二次方程式を得る。\n\/\/!\n\/\/! ```math\n\/\/! n x^2 + (n + 1) x - n = 0\n\/\/! ```\n\/\/!\n\/\/! この方程式が有理数解をもつのは、判別式 `D` が平方数の場合であり、ある整数 `m` を用いると以下のように表せる場合である。\n\/\/!\n\/\/! ```math\n\/\/! D = (n+1)^2 + 4n^2 = m^2\n\/\/! (5n+1)^2 - 5m^2 = -4\n\/\/! ```\n\/\/!\n\/\/! これは Pell 方程式であり、解を列挙すれば良い。\n\/\/!\n\/\/! `p := 5n + 1`, `q := m` とおくと、\n\/\/! `p_0 = 1`, `q_0 = 1` より、\n\/\/! ```math\n\/\/! p_{k+1} = \\frac{3p_k + 5q_k}{2}\n\/\/! q_{k+1} = \\frac{p_k + 3q_k}{2}\n\/\/! ```\n\/\/!\n\/\/! となり、これが一般解である。\n\n#![warn(bad_style,\n unused, unused_extern_crates, unused_import_braces,\n unused_qualifications, unused_results)]\n\n#[macro_use(problem)] extern crate common;\nextern crate itertools;\n\nuse itertools::Unfold;\n\nfn compute(i: usize) -> u64 {\n Unfold::new((1, 1), |state| {\n let next = ((3 * state.0 + 5 * state.1) \/ 2,\n (state.0 + 3 * state.1) \/ 2);\n *state = next;\n Some(next)\n }).filter_map(|(p, q)| {\n if p % 5 == 1 {\n Some((p \/ 5, q))\n } else {\n None\n }\n }).nth(i).unwrap().0\n}\n\nfn solve() -> String {\n compute(14).to_string()\n}\n\nproblem!(\"1120149658760\", solve);\n\n#[cfg(test)]\nmod tests {\n #[test]\n fn tenth_sol() {\n assert_eq!(74049690, super::compute(9));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Cleanup imports<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix backwards red\/blue (lol)<commit_after><|endoftext|>"} {"text":"<commit_before>\/*!\nTest supports module.\n\n*\/\n\n#![allow(dead_code)]\n\nuse glutin;\nuse glium::{self, DisplayBuild};\n\nuse std::env;\n\n\/\/\/ Returns true if we are executing headless tests.\npub fn is_headless() -> bool {\n env::var(\"HEADLESS_TESTS\").is_ok()\n}\n\n\/\/\/ Builds a headless display for tests.\n#[cfg(feature = \"headless\")]\npub fn build_display() -> glium::Display {\n let display = if is_headless() {\n glutin::HeadlessRendererBuilder::new(1024, 768).with_gl_debug_flag(true)\n .build_glium().unwrap()\n } else {\n glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)\n .build_glium().unwrap()\n };\n\n display\n}\n\n\/\/\/ Builds a headless display for tests.\n#[cfg(not(feature = \"headless\"))]\npub fn build_display() -> glium::Display {\n assert!(!is_headless());\n glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)\n .build_glium().unwrap()\n}\n\n\/\/\/ Builds a 2x2 unicolor texture.\npub fn build_unicolor_texture2d(display: &glium::Display, red: f32, green: f32, blue: f32)\n -> glium::Texture2d\n{\n let color = ((red * 255.0) as u8, (green * 255.0) as u8, (blue * 255.0) as u8);\n\n glium::texture::Texture2d::new(display, vec![\n vec![color, color],\n vec![color, color],\n ])\n}\n\n\/\/\/ Builds a vertex buffer, index buffer, and program, to draw red `(1.0, 0.0, 0.0, 1.0)` to the whole screen.\npub fn build_fullscreen_red_pipeline(display: &glium::Display) -> (glium::vertex::VertexBufferAny,\n glium::IndexBuffer, glium::Program)\n{\n #[derive(Copy, Clone)]\n struct Vertex {\n position: [f32; 2],\n }\n\n implement_vertex!(Vertex, position);\n\n (\n glium::VertexBuffer::new(display, vec![\n Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },\n Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },\n ]).into_vertex_buffer_any(),\n\n glium::IndexBuffer::new(display, glium::index::TriangleStrip(vec![0u8, 1, 2, 3])),\n\n glium::Program::from_source(display,\n \"\n #version 110\n\n attribute vec2 position;\n\n void main() {\n gl_Position = vec4(position, 0.0, 1.0);\n }\n \",\n \"\n #version 110\n\n void main() {\n gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n }\n \",\n None).unwrap()\n )\n}\n\n\/\/\/ Builds a vertex buffer and an index buffer corresponding to a rectangle.\n\/\/\/\n\/\/\/ The vertex buffer has the \"position\" attribute of type \"vec2\".\npub fn build_rectangle_vb_ib(display: &glium::Display)\n -> (glium::vertex::VertexBufferAny, glium::IndexBuffer)\n{\n #[derive(Copy, Clone)]\n struct Vertex {\n position: [f32; 2],\n }\n\n implement_vertex!(Vertex, position);\n\n (\n glium::VertexBuffer::new(display, vec![\n Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },\n Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },\n ]).into_vertex_buffer_any(),\n\n glium::IndexBuffer::new(display, glium::index::TriangleStrip(vec![0u8, 1, 2, 3])),\n )\n}\n\n\/\/\/ Builds a texture suitable for rendering.\npub fn build_renderable_texture(display: &glium::Display) -> glium::Texture2d {\n glium::Texture2d::empty(display, 1024, 1024)\n}\n<commit_msg>Make tests support module more flexible with the facade<commit_after>\/*!\nTest supports module.\n\n*\/\n\n#![allow(dead_code)]\n\nuse glutin;\nuse glium::{self, DisplayBuild};\nuse glium::backend::Facade;\n\nuse std::env;\n\n\/\/\/ Returns true if we are executing headless tests.\npub fn is_headless() -> bool {\n env::var(\"HEADLESS_TESTS\").is_ok()\n}\n\n\/\/\/ Builds a headless display for tests.\n#[cfg(feature = \"headless\")]\npub fn build_display() -> glium::Display {\n let display = if is_headless() {\n glutin::HeadlessRendererBuilder::new(1024, 768).with_gl_debug_flag(true)\n .build_glium().unwrap()\n } else {\n glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)\n .build_glium().unwrap()\n };\n\n display\n}\n\n\/\/\/ Builds a headless display for tests.\n#[cfg(not(feature = \"headless\"))]\npub fn build_display() -> glium::Display {\n assert!(!is_headless());\n glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)\n .build_glium().unwrap()\n}\n\n\/\/\/ Builds a 2x2 unicolor texture.\npub fn build_unicolor_texture2d<F>(facade: &F, red: f32, green: f32, blue: f32)\n -> glium::Texture2d where F: Facade\n{\n let color = ((red * 255.0) as u8, (green * 255.0) as u8, (blue * 255.0) as u8);\n\n glium::texture::Texture2d::new(facade, vec![\n vec![color, color],\n vec![color, color],\n ])\n}\n\n\/\/\/ Builds a vertex buffer, index buffer, and program, to draw red `(1.0, 0.0, 0.0, 1.0)` to the whole screen.\npub fn build_fullscreen_red_pipeline<F>(facade: &F) -> (glium::vertex::VertexBufferAny,\n glium::IndexBuffer, glium::Program) where F: Facade\n{\n #[derive(Copy, Clone)]\n struct Vertex {\n position: [f32; 2],\n }\n\n implement_vertex!(Vertex, position);\n\n (\n glium::VertexBuffer::new(facade, vec![\n Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },\n Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },\n ]).into_vertex_buffer_any(),\n\n glium::IndexBuffer::new(facade, glium::index::TriangleStrip(vec![0u8, 1, 2, 3])),\n\n glium::Program::from_source(facade,\n \"\n #version 110\n\n attribute vec2 position;\n\n void main() {\n gl_Position = vec4(position, 0.0, 1.0);\n }\n \",\n \"\n #version 110\n\n void main() {\n gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n }\n \",\n None).unwrap()\n )\n}\n\n\/\/\/ Builds a vertex buffer and an index buffer corresponding to a rectangle.\n\/\/\/\n\/\/\/ The vertex buffer has the \"position\" attribute of type \"vec2\".\npub fn build_rectangle_vb_ib<F>(facade: &F)\n -> (glium::vertex::VertexBufferAny, glium::IndexBuffer) where F: Facade\n{\n #[derive(Copy, Clone)]\n struct Vertex {\n position: [f32; 2],\n }\n\n implement_vertex!(Vertex, position);\n\n (\n glium::VertexBuffer::new(facade, vec![\n Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },\n Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },\n ]).into_vertex_buffer_any(),\n\n glium::IndexBuffer::new(facade, glium::index::TriangleStrip(vec![0u8, 1, 2, 3])),\n )\n}\n\n\/\/\/ Builds a texture suitable for rendering.\npub fn build_renderable_texture<F>(facade: &F) -> glium::Texture2d where F: Facade {\n glium::Texture2d::empty(facade, 1024, 1024)\n}\n<|endoftext|>"} {"text":"<commit_before>use std::collections::HashMap;\n\nuse metric::Metric;\n\npub trait Registry<'a> {\n fn get(&'a self, name: &'a str) -> &'a Metric;\n\n fn insert<T: Metric + 'a>(&mut self, name: &'a str, metric: T);\n}\n\npub struct StdRegistry<'a> {\n metrics: HashMap<&'a str, Box<Metric + 'a>>,\n}\n\n\/\/ Specific stuff for registry goes here\nimpl<'a> Registry<'a> for StdRegistry<'a> {\n fn get(&'a self, name: &'a str) -> &'a Metric {\n &*self.metrics[name]\n }\n\n fn insert<T: Metric + 'a>(&mut self, name: &'a str, metric: T) {\n let boxed = Box::new(metric);\n\n self.metrics.insert(name, boxed);\n }\n}\n\n\/\/ General StdRegistry\nimpl<'a> StdRegistry<'a> {\n fn new() -> StdRegistry<'a> {\n StdRegistry{\n metrics: HashMap::new()\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use metric::Metric;\n use meter::StdMeter;\n use registry::{Registry, StdRegistry};\n\n #[test]\n fn meter() {\n let mut r: StdRegistry = StdRegistry::new();\n let mut m: StdMeter = StdMeter::new();\n\n r.insert(\"foo\", m);\n r.get(\"foo\");\n }\n}\n<commit_msg>quiet warnings<commit_after>use std::collections::HashMap;\n\nuse metric::Metric;\n\npub trait Registry<'a> {\n fn get(&'a self, name: &'a str) -> &'a Metric;\n\n fn insert<T: Metric + 'a>(&mut self, name: &'a str, metric: T);\n}\n\npub struct StdRegistry<'a> {\n metrics: HashMap<&'a str, Box<Metric + 'a>>,\n}\n\n\/\/ Specific stuff for registry goes here\nimpl<'a> Registry<'a> for StdRegistry<'a> {\n fn get(&'a self, name: &'a str) -> &'a Metric {\n &*self.metrics[name]\n }\n\n fn insert<T: Metric + 'a>(&mut self, name: &'a str, metric: T) {\n let boxed = Box::new(metric);\n\n self.metrics.insert(name, boxed);\n }\n}\n\n\/\/ General StdRegistry\nimpl<'a> StdRegistry<'a> {\n #[allow(dead_code)]\n fn new() -> StdRegistry<'a> {\n StdRegistry{\n metrics: HashMap::new()\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use meter::StdMeter;\n use registry::{Registry, StdRegistry};\n\n #[test]\n fn meter() {\n let mut r: StdRegistry = StdRegistry::new();\n let m: StdMeter = StdMeter::new();\n\n r.insert(\"foo\", m);\n r.get(\"foo\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove double prompting for password of new user.<commit_after><|endoftext|>"} {"text":"<commit_before>#![allow(dead_code)]\nextern crate rusoto;\nextern crate xml;\nextern crate time;\nextern crate regex;\nextern crate rustc_serialize;\nuse rusoto::credentials::*;\nuse rusoto::error::*;\nuse rusoto::sqs::*;\nuse rusoto::s3::*;\nuse time::*;\nuse std::fs::File;\nuse std::io::Write;\nuse std::io::Read;\n\/\/ use std::thread;\n\nfn main() {\n\tlet mut provider = DefaultAWSCredentialsProviderChain::new();\n\n\t\/\/ println!(\"Creds in main: {}, {}, {}.\", creds.get_aws_secret_key(), creds.get_aws_secret_key(),\n\t\/\/ \tcreds.get_token());\n\n\tmatch sqs_roundtrip_tests(&provider.get_credentials()) {\n\t\tOk(_) => { println!(\"Everything worked.\"); },\n\t\tErr(err) => { println!(\"Got error: {:#?}\", err); }\n\t}\n\n\n\tlet bucket_name = format!(\"rusoto{}\", get_time().sec);\n\n\tmatch s3_list_buckets_tests(&provider.get_credentials()) {\n\t\tOk(_) => { println!(\"Everything worked for S3 list buckets.\"); },\n\t\tErr(err) => { println!(\"Got error in s3 list buckets: {:#?}\", err); }\n\t}\n\n\tmatch s3_create_bucket_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(_) => { println!(\"Everything worked for S3 create bucket.\"); },\n\t\tErr(err) => { println!(\"Got error in s3 create bucket: {:#?}\", err); }\n\t}\n\n\tmatch s3_put_object_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(result) => {\n\t\t\tprintln!(\"Everything worked for S3 put object.\");\n\t\t}\n\t\tErr(err) => { println!(\"Got error in s3 put object: {:#?}\", err); }\n\t}\n\n\tmatch s3_get_object_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(result) => {\n\t\t\tprintln!(\"Everything worked for S3 get object.\");\n\t\t\tlet mut f = File::create(\"s3-sample-creds\").unwrap();\n\t\t\tf.write(&(result.body));\n\t\t}\n\t\tErr(err) => { println!(\"Got error in s3 get object: {:#?}\", err); }\n\t}\n\n\tmatch s3_delete_object_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(result) => {\n\t\t\tprintln!(\"Everything worked for S3 delete object.\");\n\t\t}\n\t\tErr(err) => { println!(\"Got error in s3 delete object: {:#?}\", err); }\n\t}\n\n\tmatch s3_delete_bucket_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(_) => { println!(\"Everything worked for S3 delete bucket.\"); },\n\t\tErr(err) => { println!(\"Got error in s3 delete bucket: {:#?}\", err); }\n\t}\n}\n\nfn s3_list_buckets_tests(creds: &AWSCredentials) -> Result<(), AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet response = try!(s3.list_buckets());\n\t\/\/ println!(\"response is {:?}\", response);\n\tfor q in response.buckets {\n\t\tprintln!(\"Existing bucket: {:?}\", q.name);\n\t}\n\n\tOk(())\n}\n\nfn s3_get_object_test(creds: &AWSCredentials, bucket: &str) -> Result<GetObjectOutput, AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet response = try!(s3.get_object(bucket, \"sample-credentials\"));\n\t\/\/ println!(\"get object response is {:?}\", response);\n\tOk(response)\n}\n\nfn s3_delete_object_test(creds: &AWSCredentials, bucket: &str) -> Result<DeleteObjectOutput, AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet response = try!(s3.delete_object(bucket, \"sample-credentials\"));\n\t\/\/ println!(\"delete object response is {:?}\", response);\n\tOk(response)\n}\n\nfn s3_put_object_test(creds: &AWSCredentials, bucket: &str) -> Result<PutObjectOutput, AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet mut f = File::open(\"src\/sample-credentials\").unwrap();\n\tlet mut contents = Vec::new();\n\tf.read_to_end(&mut contents);\n\n\tlet response = try!(s3.put_object(bucket, \"sample-credentials\", &contents));\n\t\/\/ println!(\"put object response is {:?}\", response);\n\tOk(response)\n}\n\nfn s3_create_bucket_test(creds: &AWSCredentials, bucket: &str) -> Result<(), AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet response = try!(s3.create_bucket_in_region(bucket, \"us-east-1\"));\n\n\t\/\/ println!(\"Create bucket response is {:?}\", response);\n\tOk(())\n}\n\nfn s3_delete_bucket_test(creds: &AWSCredentials, bucket: &str) -> Result<(), AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet response = try!(s3.delete_bucket(bucket));\n\t\/\/ println!(\"Delete bucket response is {:?}\", response);\n\tOk(())\n}\n\nfn sqs_roundtrip_tests(creds: &AWSCredentials) -> Result<(), AWSError> {\n\tlet sqs = SQSHelper::new(&creds, \"us-east-1\");\n\n\t\/\/ list existing queues\n\tlet response = try!(sqs.list_queues());\n\tfor q in response.queue_urls {\n\t\tprintln!(\"Existing queue: {}\", q);\n\t}\n\n\t\/\/ create a new queue\n\tlet q_name = &format!(\"test_q_{}\", get_time().sec);\n\tlet response = try!(sqs.create_queue(q_name));\n\tprintln!(\"Created queue {} with url {}\", q_name, response.queue_url);\n\n\t\/\/ query it by name\n\tlet response = try!(sqs.get_queue_url(q_name));\n\tlet queue_url = &response.queue_url;\n\tprintln!(\"Verified queue url {} for queue name {}\", queue_url, q_name);\n\n\t\/\/ send it a message\n\tlet msg_str = \"lorem ipsum dolor sit amet\";\n\tlet response = try!(sqs.send_message(queue_url, msg_str));\n\tprintln!(\"Send message with body '{}' and created message_id {}\", msg_str, response.message_id);\n\n\t\/\/ receive a message\n\tlet response = try!(sqs.receive_message(queue_url));\n\tfor msg in response.messages {\n\t\tprintln!(\"Received message '{}' with id {}\", msg.body, msg.message_id);\n\t\ttry!(sqs.delete_message(queue_url, &msg.receipt_handle));\n\t}\n\n\t\/\/ delete the queue\n\ttry!(sqs.delete_queue(queue_url));\n\tprintln!(\"Queue {} deleted\", queue_url);\n\n\tOk(())\n}\n<commit_msg>Refactors main to not have compiler warnings.<commit_after>#![allow(dead_code)]\nextern crate rusoto;\nextern crate xml;\nextern crate time;\nextern crate regex;\nextern crate rustc_serialize;\nuse rusoto::credentials::*;\nuse rusoto::error::*;\nuse rusoto::sqs::*;\nuse rusoto::s3::*;\nuse time::*;\nuse std::fs::File;\nuse std::io::Write;\nuse std::io::Read;\n\/\/ use std::thread;\n\nfn main() {\n\tlet mut provider = DefaultAWSCredentialsProviderChain::new();\n\n\t\/\/ println!(\"Creds in main: {}, {}, {}.\", creds.get_aws_secret_key(), creds.get_aws_secret_key(),\n\t\/\/ \tcreds.get_token());\n\n\tmatch sqs_roundtrip_tests(&provider.get_credentials()) {\n\t\tOk(_) => { println!(\"Everything worked.\"); },\n\t\tErr(err) => { println!(\"Got error: {:#?}\", err); }\n\t}\n\n\n\tlet bucket_name = format!(\"rusoto{}\", get_time().sec);\n\n\tmatch s3_list_buckets_tests(&provider.get_credentials()) {\n\t\tOk(_) => { println!(\"Everything worked for S3 list buckets.\"); },\n\t\tErr(err) => { println!(\"Got error in s3 list buckets: {:#?}\", err); }\n\t}\n\n\tmatch s3_create_bucket_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(_) => { println!(\"Everything worked for S3 create bucket.\"); },\n\t\tErr(err) => { println!(\"Got error in s3 create bucket: {:#?}\", err); }\n\t}\n\n\tmatch s3_put_object_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(_) => {\n\t\t\tprintln!(\"Everything worked for S3 put object.\");\n\t\t}\n\t\tErr(err) => { println!(\"Got error in s3 put object: {:#?}\", err); }\n\t}\n\n\tmatch s3_get_object_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(result) => {\n\t\t\tprintln!(\"Everything worked for S3 get object.\");\n\t\t\tlet mut f = File::create(\"s3-sample-creds\").unwrap();\n\t\t\tmatch f.write(&(result.body)) {\n\t\t\t\tErr(why) => println!(\"Couldn't create file to save object from S3: {}\", why),\n\t\t\t\tOk(_) => return,\n\t\t\t}\n\t\t}\n\t\tErr(err) => { println!(\"Got error in s3 get object: {:#?}\", err); }\n\t}\n\n\tmatch s3_delete_object_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(_) => {\n\t\t\tprintln!(\"Everything worked for S3 delete object.\");\n\t\t}\n\t\tErr(err) => { println!(\"Got error in s3 delete object: {:#?}\", err); }\n\t}\n\n\tmatch s3_delete_bucket_test(&provider.get_credentials(), &bucket_name) {\n\t\tOk(_) => { println!(\"Everything worked for S3 delete bucket.\"); },\n\t\tErr(err) => { println!(\"Got error in s3 delete bucket: {:#?}\", err); }\n\t}\n}\n\nfn s3_list_buckets_tests(creds: &AWSCredentials) -> Result<(), AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet response = try!(s3.list_buckets());\n\t\/\/ println!(\"response is {:?}\", response);\n\tfor q in response.buckets {\n\t\tprintln!(\"Existing bucket: {:?}\", q.name);\n\t}\n\n\tOk(())\n}\n\nfn s3_get_object_test(creds: &AWSCredentials, bucket: &str) -> Result<GetObjectOutput, AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet response = try!(s3.get_object(bucket, \"sample-credentials\"));\n\t\/\/ println!(\"get object response is {:?}\", response);\n\tOk(response)\n}\n\nfn s3_delete_object_test(creds: &AWSCredentials, bucket: &str) -> Result<DeleteObjectOutput, AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet response = try!(s3.delete_object(bucket, \"sample-credentials\"));\n\t\/\/ println!(\"delete object response is {:?}\", response);\n\tOk(response)\n}\n\nfn s3_put_object_test(creds: &AWSCredentials, bucket: &str) -> Result<PutObjectOutput, AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\tlet mut f = File::open(\"src\/sample-credentials\").unwrap();\n\tlet mut contents = Vec::new();\n\tmatch f.read_to_end(&mut contents) {\n\t\tErr(why) => return Err(AWSError::new(format!(\"Error opening file to send to S3: {}\", why))),\n\t\tOk(_) => {\n\t\t\tlet response = try!(s3.put_object(bucket, \"sample-credentials\", &contents));\n\t\t\tOk(response)\n\t\t}\n\t}\n}\n\nfn s3_create_bucket_test(creds: &AWSCredentials, bucket: &str) -> Result<(), AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\ttry!(s3.create_bucket_in_region(bucket, \"us-east-1\"));\n\n\tOk(())\n}\n\nfn s3_delete_bucket_test(creds: &AWSCredentials, bucket: &str) -> Result<(), AWSError> {\n\tlet s3 = S3Helper::new(&creds, \"us-east-1\");\n\n\ttry!(s3.delete_bucket(bucket));\n\tOk(())\n}\n\nfn sqs_roundtrip_tests(creds: &AWSCredentials) -> Result<(), AWSError> {\n\tlet sqs = SQSHelper::new(&creds, \"us-east-1\");\n\n\t\/\/ list existing queues\n\tlet response = try!(sqs.list_queues());\n\tfor q in response.queue_urls {\n\t\tprintln!(\"Existing queue: {}\", q);\n\t}\n\n\t\/\/ create a new queue\n\tlet q_name = &format!(\"test_q_{}\", get_time().sec);\n\tlet response = try!(sqs.create_queue(q_name));\n\tprintln!(\"Created queue {} with url {}\", q_name, response.queue_url);\n\n\t\/\/ query it by name\n\tlet response = try!(sqs.get_queue_url(q_name));\n\tlet queue_url = &response.queue_url;\n\tprintln!(\"Verified queue url {} for queue name {}\", queue_url, q_name);\n\n\t\/\/ send it a message\n\tlet msg_str = \"lorem ipsum dolor sit amet\";\n\tlet response = try!(sqs.send_message(queue_url, msg_str));\n\tprintln!(\"Send message with body '{}' and created message_id {}\", msg_str, response.message_id);\n\n\t\/\/ receive a message\n\tlet response = try!(sqs.receive_message(queue_url));\n\tfor msg in response.messages {\n\t\tprintln!(\"Received message '{}' with id {}\", msg.body, msg.message_id);\n\t\ttry!(sqs.delete_message(queue_url, &msg.receipt_handle));\n\t}\n\n\t\/\/ delete the queue\n\ttry!(sqs.delete_queue(queue_url));\n\tprintln!(\"Queue {} deleted\", queue_url);\n\n\tOk(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add a test case for derived-tydescs-before-dynamic-allocas<commit_after>\/\/ xfail-stage0\n\ntag option[T] {\n some(T);\n none;\n}\n\ntype r[T] = rec(mutable (option[T])[] v);\n\nfn f[T]() -> T[] {\n ret ~[];\n}\n\nfn main() {\n let r[int] r = rec(mutable v=~[]);\n r.v = f();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #25287 - jdm:google-load, r=pcwalton<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed Indentation.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! An experimental pass that scources for `#[rustc_mir]` attributes,\n\/\/! builds the resulting MIR, and dumps it out into a file for inspection.\n\/\/!\n\/\/! The attribute formats that are currently accepted are:\n\/\/!\n\/\/! - `#[rustc_mir(graphviz=\"file.gv\")]`\n\/\/! - `#[rustc_mir(pretty=\"file.mir\")]`\n\nextern crate syntax;\n\nuse build;\nuse rustc::dep_graph::DepNode;\nuse rustc::mir::repr::Mir;\nuse pretty;\nuse hair::cx::Cx;\n\nuse rustc::mir::mir_map::MirMap;\nuse rustc::infer;\nuse rustc::traits::ProjectionMode;\nuse rustc::ty::{self, Ty, TyCtxt};\nuse rustc::util::common::ErrorReported;\nuse rustc::util::nodemap::NodeMap;\nuse rustc::hir;\nuse rustc::hir::intravisit::{self, Visitor};\nuse syntax::abi::Abi;\nuse syntax::ast;\nuse syntax::attr::AttrMetaMethods;\nuse syntax::codemap::Span;\n\npub fn build_mir_for_crate<'tcx>(tcx: &TyCtxt<'tcx>) -> MirMap<'tcx> {\n let mut map = MirMap {\n map: NodeMap(),\n };\n {\n let mut dump = OuterDump {\n tcx: tcx,\n map: &mut map,\n };\n tcx.visit_all_items_in_krate(DepNode::MirMapConstruction, &mut dump);\n }\n map\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ OuterDump -- walks a crate, looking for fn items and methods to build MIR from\n\nstruct OuterDump<'a, 'tcx: 'a> {\n tcx: &'a TyCtxt<'tcx>,\n map: &'a mut MirMap<'tcx>,\n}\n\nimpl<'a, 'tcx> OuterDump<'a, 'tcx> {\n fn visit_mir<OP>(&mut self, attributes: &'a [ast::Attribute], mut walk_op: OP)\n where OP: for<'m> FnMut(&mut InnerDump<'a, 'm, 'tcx>)\n {\n let mut closure_dump = InnerDump {\n tcx: self.tcx,\n attr: None,\n map: &mut *self.map,\n };\n for attr in attributes {\n if attr.check_name(\"rustc_mir\") {\n closure_dump.attr = Some(attr);\n }\n }\n walk_op(&mut closure_dump);\n }\n}\n\n\nimpl<'a, 'tcx> Visitor<'tcx> for OuterDump<'a, 'tcx> {\n fn visit_item(&mut self, item: &'tcx hir::Item) {\n self.visit_mir(&item.attrs, |c| intravisit::walk_item(c, item));\n intravisit::walk_item(self, item);\n }\n\n fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {\n match trait_item.node {\n hir::MethodTraitItem(_, Some(_)) => {\n self.visit_mir(&trait_item.attrs, |c| intravisit::walk_trait_item(c, trait_item));\n }\n hir::MethodTraitItem(_, None) |\n hir::ConstTraitItem(..) |\n hir::TypeTraitItem(..) => {}\n }\n intravisit::walk_trait_item(self, trait_item);\n }\n\n fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {\n match impl_item.node {\n hir::ImplItemKind::Method(..) => {\n self.visit_mir(&impl_item.attrs, |c| intravisit::walk_impl_item(c, impl_item));\n }\n hir::ImplItemKind::Const(..) | hir::ImplItemKind::Type(..) => {}\n }\n intravisit::walk_impl_item(self, impl_item);\n }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ InnerDump -- dumps MIR for a single fn and its contained closures\n\nstruct InnerDump<'a, 'm, 'tcx: 'a + 'm> {\n tcx: &'a TyCtxt<'tcx>,\n map: &'m mut MirMap<'tcx>,\n attr: Option<&'a ast::Attribute>,\n}\n\nimpl<'a, 'm, 'tcx> Visitor<'tcx> for InnerDump<'a,'m,'tcx> {\n fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem) {\n \/\/ ignore methods; the outer dump will call us for them independently\n }\n\n fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem) {\n \/\/ ignore methods; the outer dump will call us for them independently\n }\n\n fn visit_fn(&mut self,\n fk: intravisit::FnKind<'tcx>,\n decl: &'tcx hir::FnDecl,\n body: &'tcx hir::Block,\n span: Span,\n id: ast::NodeId) {\n let implicit_arg_tys = if let intravisit::FnKind::Closure(..) = fk {\n vec![closure_self_ty(&self.tcx, id, body.id)]\n } else {\n vec![]\n };\n\n let param_env = ty::ParameterEnvironment::for_item(self.tcx, id);\n let infcx = infer::new_infer_ctxt(self.tcx,\n &self.tcx.tables,\n Some(param_env),\n ProjectionMode::AnyFinal);\n\n match build_mir(Cx::new(&infcx), implicit_arg_tys, id, span, decl, body) {\n Ok(mir) => assert!(self.map.map.insert(id, mir).is_none()),\n Err(ErrorReported) => {}\n }\n\n intravisit::walk_fn(self, fk, decl, body, span);\n }\n}\n\nfn build_mir<'a,'tcx:'a>(cx: Cx<'a,'tcx>,\n implicit_arg_tys: Vec<Ty<'tcx>>,\n fn_id: ast::NodeId,\n span: Span,\n decl: &'tcx hir::FnDecl,\n body: &'tcx hir::Block)\n -> Result<Mir<'tcx>, ErrorReported> {\n \/\/ fetch the fully liberated fn signature (that is, all bound\n \/\/ types\/lifetimes replaced)\n let fn_sig = match cx.tcx().tables.borrow().liberated_fn_sigs.get(&fn_id) {\n Some(f) => f.clone(),\n None => {\n span_bug!(span, \"no liberated fn sig for {:?}\", fn_id);\n }\n };\n\n let arguments =\n decl.inputs\n .iter()\n .enumerate()\n .map(|(index, arg)| {\n (fn_sig.inputs[index], &*arg.pat)\n })\n .collect();\n\n let (mut mir, scope_auxiliary) =\n build::construct(cx,\n span,\n fn_id,\n body.id,\n implicit_arg_tys,\n arguments,\n fn_sig.output,\n body);\n\n match cx.tcx().node_id_to_type(fn_id).sty {\n ty::TyFnDef(_, _, f) if f.abi == Abi::RustCall => {\n \/\/ RustCall pseudo-ABI untuples the last argument.\n if let Some(arg_decl) = mir.arg_decls.last_mut() {\n arg_decl.spread = true;\n }\n }\n _ => {}\n }\n\n pretty::dump_mir(cx.tcx(),\n \"mir_map\",\n &0,\n fn_id,\n &mir,\n Some(&scope_auxiliary));\n\n Ok(mir)\n}\n\nfn closure_self_ty<'a, 'tcx>(tcx: &TyCtxt<'tcx>,\n closure_expr_id: ast::NodeId,\n body_id: ast::NodeId)\n -> Ty<'tcx> {\n let closure_ty = tcx.node_id_to_type(closure_expr_id);\n\n \/\/ We're just hard-coding the idea that the signature will be\n \/\/ &self or &mut self and hence will have a bound region with\n \/\/ number 0, hokey.\n let region = ty::Region::ReFree(ty::FreeRegion {\n scope: tcx.region_maps.item_extent(body_id),\n bound_region: ty::BoundRegion::BrAnon(0),\n });\n let region = tcx.mk_region(region);\n\n match tcx.closure_kind(tcx.map.local_def_id(closure_expr_id)) {\n ty::ClosureKind::Fn =>\n tcx.mk_ref(region,\n ty::TypeAndMut { ty: closure_ty,\n mutbl: hir::MutImmutable }),\n ty::ClosureKind::FnMut =>\n tcx.mk_ref(region,\n ty::TypeAndMut { ty: closure_ty,\n mutbl: hir::MutMutable }),\n ty::ClosureKind::FnOnce =>\n closure_ty\n }\n}\n<commit_msg>mir: remove the unused attribute logic in the MIR map construction.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! An experimental pass that scources for `#[rustc_mir]` attributes,\n\/\/! builds the resulting MIR, and dumps it out into a file for inspection.\n\/\/!\n\/\/! The attribute formats that are currently accepted are:\n\/\/!\n\/\/! - `#[rustc_mir(graphviz=\"file.gv\")]`\n\/\/! - `#[rustc_mir(pretty=\"file.mir\")]`\n\nextern crate syntax;\n\nuse build;\nuse rustc::dep_graph::DepNode;\nuse rustc::mir::repr::Mir;\nuse pretty;\nuse hair::cx::Cx;\n\nuse rustc::mir::mir_map::MirMap;\nuse rustc::infer;\nuse rustc::traits::ProjectionMode;\nuse rustc::ty::{self, Ty, TyCtxt};\nuse rustc::util::common::ErrorReported;\nuse rustc::util::nodemap::NodeMap;\nuse rustc::hir;\nuse rustc::hir::intravisit::{self, Visitor};\nuse syntax::abi::Abi;\nuse syntax::ast;\nuse syntax::codemap::Span;\n\npub fn build_mir_for_crate<'tcx>(tcx: &TyCtxt<'tcx>) -> MirMap<'tcx> {\n let mut map = MirMap {\n map: NodeMap(),\n };\n {\n let mut dump = BuildMir {\n tcx: tcx,\n map: &mut map,\n };\n tcx.visit_all_items_in_krate(DepNode::MirMapConstruction, &mut dump);\n }\n map\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ BuildMir -- walks a crate, looking for fn items and methods to build MIR from\n\nstruct BuildMir<'a, 'tcx: 'a> {\n tcx: &'a TyCtxt<'tcx>,\n map: &'a mut MirMap<'tcx>,\n}\n\nimpl<'a, 'tcx> Visitor<'tcx> for BuildMir<'a, 'tcx> {\n fn visit_fn(&mut self,\n fk: intravisit::FnKind<'tcx>,\n decl: &'tcx hir::FnDecl,\n body: &'tcx hir::Block,\n span: Span,\n id: ast::NodeId) {\n let implicit_arg_tys = if let intravisit::FnKind::Closure(..) = fk {\n vec![closure_self_ty(&self.tcx, id, body.id)]\n } else {\n vec![]\n };\n\n let param_env = ty::ParameterEnvironment::for_item(self.tcx, id);\n let infcx = infer::new_infer_ctxt(self.tcx,\n &self.tcx.tables,\n Some(param_env),\n ProjectionMode::AnyFinal);\n\n match build_mir(Cx::new(&infcx), implicit_arg_tys, id, span, decl, body) {\n Ok(mir) => assert!(self.map.map.insert(id, mir).is_none()),\n Err(ErrorReported) => {}\n }\n\n intravisit::walk_fn(self, fk, decl, body, span);\n }\n}\n\nfn build_mir<'a,'tcx:'a>(cx: Cx<'a,'tcx>,\n implicit_arg_tys: Vec<Ty<'tcx>>,\n fn_id: ast::NodeId,\n span: Span,\n decl: &'tcx hir::FnDecl,\n body: &'tcx hir::Block)\n -> Result<Mir<'tcx>, ErrorReported> {\n \/\/ fetch the fully liberated fn signature (that is, all bound\n \/\/ types\/lifetimes replaced)\n let fn_sig = match cx.tcx().tables.borrow().liberated_fn_sigs.get(&fn_id) {\n Some(f) => f.clone(),\n None => {\n span_bug!(span, \"no liberated fn sig for {:?}\", fn_id);\n }\n };\n\n let arguments =\n decl.inputs\n .iter()\n .enumerate()\n .map(|(index, arg)| {\n (fn_sig.inputs[index], &*arg.pat)\n })\n .collect();\n\n let (mut mir, scope_auxiliary) =\n build::construct(cx,\n span,\n fn_id,\n body.id,\n implicit_arg_tys,\n arguments,\n fn_sig.output,\n body);\n\n match cx.tcx().node_id_to_type(fn_id).sty {\n ty::TyFnDef(_, _, f) if f.abi == Abi::RustCall => {\n \/\/ RustCall pseudo-ABI untuples the last argument.\n if let Some(arg_decl) = mir.arg_decls.last_mut() {\n arg_decl.spread = true;\n }\n }\n _ => {}\n }\n\n pretty::dump_mir(cx.tcx(),\n \"mir_map\",\n &0,\n fn_id,\n &mir,\n Some(&scope_auxiliary));\n\n Ok(mir)\n}\n\nfn closure_self_ty<'a, 'tcx>(tcx: &TyCtxt<'tcx>,\n closure_expr_id: ast::NodeId,\n body_id: ast::NodeId)\n -> Ty<'tcx> {\n let closure_ty = tcx.node_id_to_type(closure_expr_id);\n\n \/\/ We're just hard-coding the idea that the signature will be\n \/\/ &self or &mut self and hence will have a bound region with\n \/\/ number 0, hokey.\n let region = ty::Region::ReFree(ty::FreeRegion {\n scope: tcx.region_maps.item_extent(body_id),\n bound_region: ty::BoundRegion::BrAnon(0),\n });\n let region = tcx.mk_region(region);\n\n match tcx.closure_kind(tcx.map.local_def_id(closure_expr_id)) {\n ty::ClosureKind::Fn =>\n tcx.mk_ref(region,\n ty::TypeAndMut { ty: closure_ty,\n mutbl: hir::MutImmutable }),\n ty::ClosureKind::FnMut =>\n tcx.mk_ref(region,\n ty::TypeAndMut { ty: closure_ty,\n mutbl: hir::MutMutable }),\n ty::ClosureKind::FnOnce =>\n closure_ty\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax::ast;\nuse syntax::ext::base;\nuse syntax::ext::build::AstBuilder;\nuse syntax::symbol::Symbol;\nuse syntax_pos;\nuse syntax::tokenstream;\nuse syntax::print::pprust;\n\nuse std::string::String;\n\npub fn expand_syntax_ext(cx: &mut base::ExtCtxt,\n sp: syntax_pos::Span,\n tts: &[tokenstream::TokenTree])\n -> Box<base::MacResult + 'static> {\n let es = match base::get_exprs_from_tts(cx, sp, tts) {\n Some(e) => e,\n None => return base::DummyResult::expr(sp),\n };\n let mut accumulator = String::new();\n for e in es {\n match e.node {\n ast::ExprKind::Lit(ref lit) => {\n match lit.node {\n ast::LitKind::Str(ref s, _) |\n ast::LitKind::Float(ref s, _) |\n ast::LitKind::FloatUnsuffixed(ref s) => {\n accumulator.push_str(&s.as_str());\n }\n ast::LitKind::Char(c) => {\n accumulator.push(c);\n }\n ast::LitKind::Int(i, ast::LitIntType::Unsigned(_)) |\n ast::LitKind::Int(i, ast::LitIntType::Signed(_)) |\n ast::LitKind::Int(i, ast::LitIntType::Unsuffixed) => {\n accumulator.push_str(&format!(\"{}\", i));\n }\n ast::LitKind::Bool(b) => {\n accumulator.push_str(&format!(\"{}\", b));\n }\n ast::LitKind::Byte(..) |\n ast::LitKind::ByteStr(..) => {\n cx.span_err(e.span, \"cannot concatenate a byte string literal\");\n }\n }\n }\n _ => {\n let mut err = cx.struct_span_err(e.span, \"expected a literal\");\n err.span_suggestion(\n e.span,\n \"consider changing this to\",\n format!(\"\\\"{{}}\\\", {}\", pprust::expr_to_string(&e))\n );\n err.emit();\n }\n }\n }\n let sp = sp.apply_mark(cx.current_expansion.mark);\n base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator)))\n}\n<commit_msg>add span note<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax::ast;\nuse syntax::ext::base;\nuse syntax::ext::build::AstBuilder;\nuse syntax::symbol::Symbol;\nuse syntax_pos;\nuse syntax::tokenstream;\nuse syntax::print::pprust;\n\nuse std::string::String;\n\npub fn expand_syntax_ext(cx: &mut base::ExtCtxt,\n sp: syntax_pos::Span,\n tts: &[tokenstream::TokenTree])\n -> Box<base::MacResult + 'static> {\n let es = match base::get_exprs_from_tts(cx, sp, tts) {\n Some(e) => e,\n None => return base::DummyResult::expr(sp),\n };\n let mut accumulator = String::new();\n for e in es {\n match e.node {\n ast::ExprKind::Lit(ref lit) => {\n match lit.node {\n ast::LitKind::Str(ref s, _) |\n ast::LitKind::Float(ref s, _) |\n ast::LitKind::FloatUnsuffixed(ref s) => {\n accumulator.push_str(&s.as_str());\n }\n ast::LitKind::Char(c) => {\n accumulator.push(c);\n }\n ast::LitKind::Int(i, ast::LitIntType::Unsigned(_)) |\n ast::LitKind::Int(i, ast::LitIntType::Signed(_)) |\n ast::LitKind::Int(i, ast::LitIntType::Unsuffixed) => {\n accumulator.push_str(&format!(\"{}\", i));\n }\n ast::LitKind::Bool(b) => {\n accumulator.push_str(&format!(\"{}\", b));\n }\n ast::LitKind::Byte(..) |\n ast::LitKind::ByteStr(..) => {\n cx.span_err(e.span, \"cannot concatenate a byte string literal\");\n }\n }\n }\n _ => {\n let mut err = cx.struct_span_err(e.span, \"expected a literal\");\n let msg = cx.codemap().span_to_snippet(e.span).unwrap_or_else(\n |_| pprust::expr_to_string(&e)\n );\n err.span_suggestion(\n e.span,\n \"consider changing this to\",\n format!(\"\\\"{{}}\\\", {}\", msg)\n );\n err.span_note(\n e.span,\n \"you might be missing a string literal to format with\",\n );\n err.emit();\n }\n }\n }\n let sp = sp.apply_mark(cx.current_expansion.mark);\n base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator)))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rename the tar writer thing.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added unix platform.<commit_after>\/\/\/ Write a string to the output channel.\npub unsafe fn puts(s: &str) {\n print!(\"{}\", s);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add unicode tests to all images docker-exec\/dexec#27<commit_after>fn main() {\n println!(\"hello unicode 👾\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rust primitives<commit_after>use std::fmt::{Display, Formatter, Result};\nuse std::mem;\n\n#[allow(dead_code,unused_variables,unused_mut)]\nfn main() {\n let an_integer0 = 5i8;\n let an_integer1 = 5i16;\n let an_integer2 = 5i32;\n let an_integer3 = 5i64;\n let unsigned_integer0 = 5u8;\n let unsigned_integer1 = 5u16;\n let unsigned_integer2 = 5u32;\n let unsigned_integer3 = 5u64;\n let a_float0: f32 = 1.0;\n let a_float1: f64 = 1.0;\n let char0: char = 'a';\n let char1 = 'α';\n let char2 = '∞'; \/\/ 4 bytes\n let default_float = 3.0; \/\/ f64\n let default_integer = 7; \/\/ i32\n let mut mutable = 12; \/\/ mutable i32\n \/\/ mutable = true; \/\/ type of variable can't be changed\n let logical: bool = true;\n let unit: () = ();\n let array = [1, 2, 3];\n let tuple = (1, true);\n\n \/\/ 2.1\n println!(\"0011 AND 0101 is {:04b}\", 0b0011u32 & 0b0101);\n println!(\"0011 OR 0101 is {:04b}\", 0b0011u32 | 0b0101);\n println!(\"0011 XOR 0101 is {:04b}\", 0b0011u32 ^ 0b0101);\n println!(\"1 << 5 is {}\", 1u32 << 5);\n println!(\"0x80 >> 2 is 0x{:x}\", 0x82u32 >> 2);\n println!(\"One million: {}\", 1_000_000u32);\n\n \/\/ 2.2\n fn reverse(pair: (i32, bool)) -> (bool, i32) {\n let (i, b) = pair;\n (b, i)\n }\n #[derive(Debug)]\n struct Matrix(f32, f32, f32, f32);\n let tuple_of_tuples = ((1u8, 2u16, 3u32), (4u64, -1i8), -2i16);\n println!(\"tuple of tuples {:?}\", tuple_of_tuples);\n println!(\"tuple 1: {:?}\", tuple_of_tuples.0);\n\n let tuple = (1, \"hello\", 4.5, true);\n let (a,b,c,d) = tuple;\n println!(\"{:?} {:?} {:?} {:?}\", a, b, c, d);\n\n let matrix = Matrix(1.1, 1.2, 2.1, 2.2);\n println!(\"{:?}\", matrix);\n impl Display for Matrix {\n fn fmt(&self, f: &mut Formatter) -> Result {\n write!(f, \"( {} {} )\\n( {} {} )\", self.0, self.1, self.2, self.3)\n }\n }\n println!(\"{}\", matrix);\n\n fn transpose(m: Matrix) -> Matrix {\n return Matrix(m.0, m.2, m.1, m.3);\n }\n println!(\"Transpose:\\n{}\", transpose(matrix));\n\n \/\/ 2.3\n let xs: [i32; 5] = [1,2,3,4,5];\n let ys: [i32; 500] = [0; 500]; \/\/ init all elements to 0\n println!(\"first element of the array: {}\", xs[1]);\n println!(\"second element of the array: {}\", xs[2]);\n \/\/println!(\"array out of bound: {}\", xs[5]);\n println!(\"array size: {}\", xs.len());\n println!(\"array occupies {} bytes\", mem::size_of_val(&xs));\n fn analyze_slice(slice: &[i32]) {\n println!(\"slice[0]: {}, size: {}\", slice[0], slice.len());\n }\n analyze_slice(&xs);\n analyze_slice(&ys[1..4]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #66756<commit_after>\/\/ Test for <https:\/\/github.com\/rust-lang\/rust\/issues\/66756>\n\n\/\/ check-pass\n\n#![feature(const_if_match)]\n\nenum E {\n A,\n B,\n C\n}\n\nconst fn f(e: E) {\n match e {\n E::A => {}\n E::B => {}\n E::C => {}\n }\n}\n\nconst fn g(e: E) {\n match e {\n _ => {}\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added network module.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Forgot a file.<commit_after>use session::Session;\nuse subscription::Subscription;\nuse frame::Frame;\nuse option_setter::OptionSetter;\nuse std::old_io::IoResult;\n\npub struct SubscriptionBuilder <'a> {\n pub session: &'a mut Session<'a>,\n pub frame: Frame,\n pub subscription: Subscription<'a> \n}\n\nimpl <'a> SubscriptionBuilder <'a> {\n pub fn new(session: &'a mut Session<'a>, frame: Frame, sub: Subscription<'a>) -> SubscriptionBuilder <'a> {\n SubscriptionBuilder {\n session: session,\n frame: frame,\n subscription: sub\n }\n } \n\n #[allow(dead_code)] \n pub fn create(mut self) -> IoResult<String> {\n try!(self.session.send(self.frame));\n debug!(\"Registering callback for subscription id '{}' from builder\", self.subscription.id);\n let id_to_return = self.subscription.id.to_string();\n self.session.subscriptions.insert(self.subscription.id.to_string(), self.subscription);\n Ok(id_to_return)\n }\n\n #[allow(dead_code)] \n pub fn with<T>(self, option_setter: T) -> SubscriptionBuilder<'a> where T: OptionSetter<SubscriptionBuilder<'a>> {\n option_setter.set_option(self) \n } \n}\n\n\n<|endoftext|>"} {"text":"<commit_before>\/* MT19937 Mersenne Twister RNG\n *\n * Official Mersenne Twister page:\n * http:\/\/www.math.sci.hiroshima-u.ac.jp\/~m-mat\/MT\/emt.html\n *\n * Dmitry Vasiliev <dima@hlabs.org>\n *\/\n\nextern crate test;\n\nuse std::cmp::max;\n\n\nstatic N: uint = 624;\nstatic M: uint = 397;\n\nstruct MersenneTwister {\n state: Vec<u32>,\n index: uint\n}\n\nimpl MersenneTwister {\n fn new(init_key: &[u32]) -> MersenneTwister {\n MersenneTwister{state: init_by_vec(init_key), index: N}\n }\n\n fn rand_u32(&mut self) -> u32 {\n if self.index >= N {\n self.init();\n }\n let mut y = self.state[self.index];\n self.index += 1;\n y ^= y >> 11;\n y ^= (y << 7) & 0x9d2c5680;\n y ^= (y << 15) & 0xefc60000;\n y ^ (y >> 18)\n }\n\n fn rand_f64(&mut self) -> f64 {\n self.rand_u32() as f64 \/ 4294967296.0\n }\n\n #[inline]\n fn init(&mut self) {\n for i in range(0, N) {\n let y = (self.state[i] & 0x80000000)\n | (self.state[(i + 1) % N] & 0x7fffffff);\n self.state[i] = self.state[(i + M) % N] ^ (y >> 1);\n if y % 2 != 0 {\n self.state[i] ^= 0x9908b0df;\n }\n }\n self.index = 0;\n }\n}\n\n#[inline]\nfn init_state(seed: u32) -> Vec<u32> {\n range(0, N as u32).scan(seed, |state, i| {\n let prev = *state;\n *state = 1812433253 * (*state ^ (*state >> 30)) + i + 1;\n Some(prev)\n }).collect()\n}\n\n#[inline]\nfn init_by_vec(init_key: &[u32]) -> Vec<u32> {\n let mut state = init_state(19650218);\n let len = init_key.len();\n for i in range(0, max(N, len)) {\n state[i % (N - 1) + 1] = (state[i % (N - 1) + 1]\n ^ ((state[i % (N - 1)] ^ (state[i % (N - 1)] >> 30)) * 1664525))\n + init_key[i % len] + (i % len) as u32;\n if (i + 1) % (N - 1) == 0 {\n state[0] = state[N - 1];\n }\n }\n\n for i in range(1, N) {\n state[i % (N - 1) + 1] = (state[i % (N - 1) + 1]\n ^ ((state[i % (N - 1)] ^ (state[i % (N - 1)] >> 30)) * 1566083941))\n - (i % (N - 1) + 1) as u32;\n if (i + 1) % (N - 1) == 0 {\n state[0] = state[N - 1];\n }\n }\n\n state[0] = 0x80000000;\n state\n}\n\n\/*\n * Main entry point\n *\/\n#[cfg(not(test))]\nfn main() {\n let mut rng = MersenneTwister::new(&[0x123, 0x234, 0x345, 0x456]);\n for i in range(0u, 1000) {\n match i % 5 == 4 {\n false => print!(\"{:>10} \", rng.rand_u32()),\n true => println!(\"{:>10}\", rng.rand_u32())\n }\n }\n println!(\"\");\n for i in range(0u, 1000) {\n match i % 5 == 4 {\n false => print!(\"{:>10.8} \", rng.rand_f64()),\n true => println!(\"{:>10.8}\", rng.rand_f64())\n }\n }\n}\n\n\/*\n * Tests\n *\n * Link to the test data:\n * http:\/\/www.math.sci.hiroshima-u.ac.jp\/~m-mat\/MT\/MT2002\/CODES\/mt19937ar.out\n *\/\n#[cfg(test)]\nmod tests {\n use test::Bencher;\n\n use super::MersenneTwister;\n\n #[test]\n fn test_rand_u32() {\n let expected: [u32, ..50] = [\n 1067595299, 955945823, 477289528, 4107218783, 4228976476,\n 3344332714, 3355579695, 227628506, 810200273, 2591290167,\n 2560260675, 3242736208, 646746669, 1479517882, 4245472273,\n 1143372638, 3863670494, 3221021970, 1773610557, 1138697238,\n 1421897700, 1269916527, 2859934041, 1764463362, 3874892047,\n 3965319921, 72549643, 2383988930, 2600218693, 3237492380,\n 2792901476, 725331109, 605841842, 271258942, 715137098,\n 3297999536, 1322965544, 4229579109, 1395091102, 3735697720,\n 2101727825, 3730287744, 2950434330, 1661921839, 2895579582,\n 2370511479, 1004092106, 2247096681, 2111242379, 3237345263,\n ];\n let mut rng = MersenneTwister::new(&[0x123, 0x234, 0x345, 0x456]);\n for (i, &exp) in expected.iter().enumerate() {\n assert_eq!((i, exp), (i, rng.rand_u32()));\n }\n }\n\n #[test]\n fn test_rand_f64() {\n let expected: [f64, ..50] = [\n 0.76275443, 0.99000644, 0.98670464, 0.10143112, 0.27933125,\n 0.69867227, 0.94218740, 0.03427201, 0.78842173, 0.28180608,\n 0.92179002, 0.20785655, 0.54534773, 0.69644020, 0.38107718,\n 0.23978165, 0.65286910, 0.07514568, 0.22765211, 0.94872929,\n 0.74557914, 0.62664415, 0.54708246, 0.90959343, 0.42043116,\n 0.86334511, 0.19189126, 0.14718544, 0.70259889, 0.63426346,\n 0.77408121, 0.04531601, 0.04605807, 0.88595519, 0.69398270,\n 0.05377184, 0.61711170, 0.05565708, 0.10133577, 0.41500776,\n 0.91810699, 0.22320679, 0.23353705, 0.92871862, 0.98897234,\n 0.19786706, 0.80558809, 0.06961067, 0.55840445, 0.90479405,\n ];\n let mut rng = MersenneTwister::new(&[0x123, 0x234, 0x345, 0x456]);\n \/\/ For the test data we use 1000 values should be skipped\n for _ in range(0u, 1000) {\n rng.rand_f64();\n }\n for (i, &exp) in expected.iter().enumerate() {\n assert_eq!((i, true), (i, rng.rand_f64() - exp < 0.00000001f64));\n }\n }\n\n #[bench]\n fn bench_rand_u32(b: &mut Bencher) {\n let mut rng = MersenneTwister::new(&[0x123, 0x234, 0x345, 0x456]);\n b.iter(|| rng.rand_u32());\n }\n}\n<commit_msg>Slightly better code for 3\/21<commit_after>\/* MT19937 Mersenne Twister RNG\n *\n * Official Mersenne Twister page:\n * http:\/\/www.math.sci.hiroshima-u.ac.jp\/~m-mat\/MT\/emt.html\n *\n * Dmitry Vasiliev <dima@hlabs.org>\n *\/\n\nextern crate test;\n\nuse std::cmp::max;\n\n\nstatic N: uint = 624;\nstatic M: uint = 397;\n\nstruct MersenneTwister {\n state: Vec<u32>,\n index: uint\n}\n\nimpl MersenneTwister {\n fn new(init_key: &[u32]) -> MersenneTwister {\n MersenneTwister{state: init_by_vec(init_key), index: N}\n }\n\n fn rand_u32(&mut self) -> u32 {\n if self.index >= N {\n self.init();\n }\n let mut y = self.state[self.index];\n self.index += 1;\n y ^= y >> 11;\n y ^= (y << 7) & 0x9d2c5680;\n y ^= (y << 15) & 0xefc60000;\n y ^ (y >> 18)\n }\n\n fn rand_f64(&mut self) -> f64 {\n self.rand_u32() as f64 \/ 4294967296.0\n }\n\n #[inline]\n fn init(&mut self) {\n for i in range(0, N) {\n let y = (self.state[i] & 0x80000000)\n | (self.state[(i + 1) % N] & 0x7fffffff);\n self.state[i] = match y % 2 {\n 0 => self.state[(i + M) % N] ^ (y >> 1),\n _ => self.state[(i + M) % N] ^ (y >> 1) ^ 0x9908b0df\n }\n }\n self.index = 0;\n }\n}\n\n#[inline]\nfn init_state(seed: u32) -> Vec<u32> {\n range(0, N as u32).scan(seed, |state, i| {\n let prev = *state;\n *state = 1812433253 * (*state ^ (*state >> 30)) + i + 1;\n Some(prev)\n }).collect()\n}\n\n#[inline]\nfn init_by_vec(init_key: &[u32]) -> Vec<u32> {\n let mut state = init_state(19650218);\n let len = init_key.len();\n let limit = N - 1;\n for i in range(0, max(N, len)) {\n let idx = i % limit;\n state[idx + 1] = (state[idx + 1]\n ^ ((state[idx] ^ (state[idx] >> 30)) * 1664525))\n + init_key[i % len] + (i % len) as u32;\n if (i + 1) % limit == 0 {\n state[0] = state[limit];\n }\n }\n\n for i in range(2, N) {\n state[i] = (state[i]\n ^ ((state[i - 1] ^ (state[i - 1] >> 30)) * 1566083941)) - i as u32;\n }\n\n state[1] = (state[1]\n ^ ((state[N - 1] ^ (state[N - 1] >> 30)) * 1566083941)) - 1;\n state[0] = 0x80000000;\n state\n}\n\n\/*\n * Main entry point\n *\/\n#[cfg(not(test))]\nfn main() {\n let mut rng = MersenneTwister::new(&[0x123, 0x234, 0x345, 0x456]);\n for i in range(0u, 1000) {\n match i % 5 == 4 {\n false => print!(\"{:>10} \", rng.rand_u32()),\n true => println!(\"{:>10}\", rng.rand_u32())\n }\n }\n println!(\"\");\n for i in range(0u, 1000) {\n match i % 5 == 4 {\n false => print!(\"{:>10.8} \", rng.rand_f64()),\n true => println!(\"{:>10.8}\", rng.rand_f64())\n }\n }\n}\n\n\/*\n * Tests\n *\n * Link to the test data:\n * http:\/\/www.math.sci.hiroshima-u.ac.jp\/~m-mat\/MT\/MT2002\/CODES\/mt19937ar.out\n *\/\n#[cfg(test)]\nmod tests {\n use test::Bencher;\n\n use super::MersenneTwister;\n\n #[test]\n fn test_rand_u32() {\n let expected: [u32, ..50] = [\n 1067595299, 955945823, 477289528, 4107218783, 4228976476,\n 3344332714, 3355579695, 227628506, 810200273, 2591290167,\n 2560260675, 3242736208, 646746669, 1479517882, 4245472273,\n 1143372638, 3863670494, 3221021970, 1773610557, 1138697238,\n 1421897700, 1269916527, 2859934041, 1764463362, 3874892047,\n 3965319921, 72549643, 2383988930, 2600218693, 3237492380,\n 2792901476, 725331109, 605841842, 271258942, 715137098,\n 3297999536, 1322965544, 4229579109, 1395091102, 3735697720,\n 2101727825, 3730287744, 2950434330, 1661921839, 2895579582,\n 2370511479, 1004092106, 2247096681, 2111242379, 3237345263,\n ];\n let mut rng = MersenneTwister::new(&[0x123, 0x234, 0x345, 0x456]);\n for (i, &exp) in expected.iter().enumerate() {\n assert_eq!((i, exp), (i, rng.rand_u32()));\n }\n }\n\n #[test]\n fn test_rand_f64() {\n let expected: [f64, ..50] = [\n 0.76275443, 0.99000644, 0.98670464, 0.10143112, 0.27933125,\n 0.69867227, 0.94218740, 0.03427201, 0.78842173, 0.28180608,\n 0.92179002, 0.20785655, 0.54534773, 0.69644020, 0.38107718,\n 0.23978165, 0.65286910, 0.07514568, 0.22765211, 0.94872929,\n 0.74557914, 0.62664415, 0.54708246, 0.90959343, 0.42043116,\n 0.86334511, 0.19189126, 0.14718544, 0.70259889, 0.63426346,\n 0.77408121, 0.04531601, 0.04605807, 0.88595519, 0.69398270,\n 0.05377184, 0.61711170, 0.05565708, 0.10133577, 0.41500776,\n 0.91810699, 0.22320679, 0.23353705, 0.92871862, 0.98897234,\n 0.19786706, 0.80558809, 0.06961067, 0.55840445, 0.90479405,\n ];\n let mut rng = MersenneTwister::new(&[0x123, 0x234, 0x345, 0x456]);\n \/\/ For the test data we use 1000 values should be skipped\n for _ in range(0u, 1000) {\n rng.rand_f64();\n }\n for (i, &exp) in expected.iter().enumerate() {\n assert_eq!((i, true), (i, rng.rand_f64() - exp < 0.00000001f64));\n }\n }\n\n #[bench]\n fn bench_rand_u32(b: &mut Bencher) {\n let mut rng = MersenneTwister::new(&[0x123, 0x234, 0x345, 0x456]);\n b.iter(|| rng.rand_u32());\n }\n\n #[bench]\n fn bench_new(b: &mut Bencher) {\n b.iter(|| MersenneTwister::new(&[0x123, 0x234, 0x345, 0x456]));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Offset is a Monoid<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>optimize permissions conversions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>bit faster, works sometimes, gets stuck most times. needs better selection algorithm<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add main.rs to solve errors<commit_after>extern crate simcom;\n\nuse std::io::{self, Read};\n\nuse simcom::lexer::Lexer;\nuse simcom::parser::Parser;\n\nfn main() {\n let content = {\n let mut buffer = String::new();\n let stdin = io::stdin();\n stdin.lock().read_to_string(&mut buffer).unwrap();\n \n buffer\n };\n\n let ast = Parser::new(Lexer::new(&content));\n\n for node in ast {\n println!(\"{:?}\", node);\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>macro usage<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a trailing slash to folder names in the list view<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>dynamic refresh<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse spec::{LinkerFlavor, Target, TargetResult};\n\npub fn target() -> TargetResult {\n let mut base = super::openbsd_base::opts();\n base.cpu = \"pentium4\".to_string();\n base.max_atomic_width = Some(64);\n base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push(\"-m32\".to_string());\n base.stack_probes = true;\n\n Ok(Target {\n llvm_target: \"i686-unknown-openbsd\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n target_c_int_width: \"32\".to_string(),\n data_layout: \"e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128\".to_string(),\n arch: \"x86\".to_string(),\n target_os: \"openbsd\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n linker_flavor: LinkerFlavor::Gcc,\n options: base,\n })\n}\n<commit_msg>openbsd-i686: use lld as linker by default<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse spec::{LinkerFlavor, Target, TargetResult};\n\npub fn target() -> TargetResult {\n let mut base = super::openbsd_base::opts();\n base.cpu = \"pentium4\".to_string();\n base.max_atomic_width = Some(64);\n base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push(\"-m32\".to_string());\n base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push(\"-fuse-ld=lld\".to_string());\n base.stack_probes = true;\n\n Ok(Target {\n llvm_target: \"i686-unknown-openbsd\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n target_c_int_width: \"32\".to_string(),\n data_layout: \"e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128\".to_string(),\n arch: \"x86\".to_string(),\n target_os: \"openbsd\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n linker_flavor: LinkerFlavor::Gcc,\n options: base,\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Don't import M; fix build<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test case.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Checks that lint attributes work on non-item AST nodes\n\nfn main() {\n #[deny(unreachable_code)]\n loop {\n break;\n \"unreachable\"; \/\/~ ERROR unreachable statement\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add a build-pass test for issue 62220<commit_after>\/\/ build-pass\n#![allow(incomplete_features)]\n\n#![feature(const_generics)]\npub struct Vector<T, const N: usize>([T; N]);\n\npub type TruncatedVector<T, const N: usize> = Vector<T, { N - 1 }>;\n\nimpl<T, const N: usize> Vector<T, { N }> {\n \/\/\/ Drop the last component and return the vector with one fewer dimension.\n pub fn trunc(self) -> (TruncatedVector<T, { N }>, T) {\n unimplemented!()\n }\n}\n\nfn vec4<T>(a: T, b: T, c: T, d: T) -> Vector<T, 4> {\n Vector([a, b, c, d])\n}\n\nfn main() {\n let (_xyz, _w): (TruncatedVector<u32, 4>, u32) = vec4(0u32, 1, 2, 3).trunc();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor(syncfile): split write_syncfile_header into functions; add HMAC skeleton<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement PredDouble for positive bin nats<commit_after><|endoftext|>"} {"text":"<commit_before>use data::{Asn1Seq, Asn1SeqField};\nuse parse::{asn1_type_name, asn1_type};\nuse parse::space::{skip_other};\n\nnamed!(pub asn1_seq_field <Asn1SeqField>, chain!(\n skip_other? ~\n name: asn1_type_name ~\n skip_other? ~\n asn1_type: asn1_type,\n || Asn1SeqField {\n name: name,\n asn1_type: asn1_type,\n }\n));\n\nnamed!(pub asn1_seq <Asn1Seq>, chain!(\n tag!(\"SEQUENCE\") ~\n skip_other? ~\n fields: delimited!(\n tag!(\"{\"),\n separated_list!(\n chain!(skip_other? ~ tag!(\",\"), || ()),\n asn1_seq_field\n ),\n tuple!(opt!(skip_other), tag!(\"}\"))\n ),\n || Asn1Seq {\n fields: fields,\n }\n));\n\n#[test]\nfn test_asn1_sequence_field() {\n let field1 = Asn1SeqField {\n name: \"foo\".into(),\n asn1_type: ::data::Asn1Type::Type(\"Bar\".into()),\n };\n let field2 = Asn1SeqField {\n name: \"asdf\".into(),\n asn1_type: ::data::Asn1Type::Type(\"INTEGER\".into()),\n };\n assert_eq!(\n field1,\n asn1_seq_field(\"foo Bar\".as_bytes()).unwrap().1\n );\n assert_eq!(\n field2,\n asn1_seq_field(\"asdf INTEGER,\".as_bytes()).unwrap().1\n );\n assert_eq!(\n field1,\n asn1_seq_field(\"foo--test\\n Bar\".as_bytes()).unwrap().1\n );\n}\n\n#[test]\nfn test_seq_fields() {\n let seq = Asn1Seq {\n fields: vec![\n Asn1SeqField {\n name: \"foo\".into(),\n asn1_type: ::data::Asn1Type::Type(\"Bar\".into()),\n },\n Asn1SeqField {\n name: \"asdf\".into(),\n asn1_type: ::data::Asn1Type::Type(\"INTEGER\".into()),\n }\n ],\n };\n println!(\"{:#?}\", asn1_seq(\"\\\n SEQUENCE {\\\n foo Bar,\\\n asdf INTEGER\\\n }\\\n \".as_bytes()));\n assert_eq!(\n seq,\n asn1_seq(\"\\\n SEQUENCE {\\\n foo Bar,\\\n asdf INTEGER\\\n }\\\n \".as_bytes()).unwrap().1\n );\n}\n<commit_msg>Improve sequence tests<commit_after>use data::{Asn1Seq, Asn1SeqField};\nuse parse::{asn1_type_name, asn1_type};\nuse parse::space::{skip_other};\n\nnamed!(pub asn1_seq_field <Asn1SeqField>, chain!(\n skip_other? ~\n name: asn1_type_name ~\n skip_other? ~\n asn1_type: asn1_type,\n || Asn1SeqField {\n name: name,\n asn1_type: asn1_type,\n }\n));\n\nnamed!(pub asn1_seq <Asn1Seq>, chain!(\n tag!(\"SEQUENCE\") ~\n skip_other? ~\n fields: delimited!(\n tag!(\"{\"),\n separated_list!(\n chain!(skip_other? ~ tag!(\",\"), || ()),\n asn1_seq_field\n ),\n tuple!(opt!(skip_other), tag!(\"}\"))\n ),\n || Asn1Seq {\n fields: fields,\n }\n));\n\n#[test]\nfn test_asn1_sequence_field() {\n let field1 = Asn1SeqField {\n name: \"foo\".into(),\n asn1_type: ::data::Asn1Type::Type(\"Bar\".into()),\n };\n let field2 = Asn1SeqField {\n name: \"asdf\".into(),\n asn1_type: ::data::Asn1Type::Type(\"INTEGER\".into()),\n };\n assert_eq!(\n field1,\n asn1_seq_field(\"foo Bar\".as_bytes()).unwrap().1\n );\n assert_eq!(\n field2,\n asn1_seq_field(\"asdf INTEGER,\".as_bytes()).unwrap().1\n );\n assert_eq!(\n field1,\n asn1_seq_field(\"foo--test\\n Bar\".as_bytes()).unwrap().1\n );\n}\n\n#[test]\nfn test_seq_fields() {\n let seq = Asn1Seq {\n fields: vec![\n Asn1SeqField {\n name: \"foo\".into(),\n asn1_type: ::data::Asn1Type::Type(\"Bar\".into()),\n },\n Asn1SeqField {\n name: \"asdf\".into(),\n asn1_type: ::data::Asn1Type::Type(\"INTEGER\".into()),\n }\n ],\n };\n assert_eq!(\n seq,\n asn1_seq(\"\\\n SEQUENCE {\\\n foo Bar,\\\n asdf INTEGER\\\n }\\\n \".as_bytes()).unwrap().1\n );\n assert_eq!(\n seq,\n asn1_seq(\"\\\n SEQUENCE {\n foo Bar --,\n , asdf INTEGER\n }\n \".as_bytes()).unwrap().1\n );\n assert!(asn1_seq(\"SEQUENC \".as_bytes()).is_err());\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright (C) 2014 Josh Stone\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse fuse::FileType;\nuse git2;\nuse libc;\nuse libc::consts::os::posix88;\nuse std::collections::hash_map;\nuse std::default::Default;\nuse std::path::{Path, PathBuf};\n\nuse inode;\n\n\n\/\/\/ Represents a virtual directory in reference paths\n\/\/\/ (e.g. `refs\/heads\/master` needs intermediate `refs\/` and `refs\/heads\/`)\npub struct RefDir {\n entries: hash_map::HashMap<PathBuf, inode::Id>,\n}\n\nimpl RefDir {\n pub fn new() -> Box<inode::Inode+'static> {\n Box::new(RefDir {\n entries: Default::default(),\n })\n }\n}\n\nimpl inode::Inode for RefDir {\n fn lookup(&mut self, _repo: &git2::Repository, name: &Path\n ) -> Result<inode::Id, libc::c_int> {\n self.entries.get(name).cloned().ok_or(posix88::ENOENT)\n }\n\n fn getattr(&mut self, _repo: &git2::Repository, attr: inode::FileAttr\n ) -> Result<inode::FileAttr, libc::c_int> {\n let size = self.entries.len() as u64;\n Ok(inode::FileAttr {\n size: size,\n blocks: inode::st_blocks(size),\n kind: FileType::Directory,\n perm: 0755,\n ..attr\n })\n }\n\n fn readdir<'a>(&mut self, _repo: &git2::Repository, offset: u64,\n mut add: Box<FnMut(inode::Id, FileType, &Path) -> bool + 'a>\n ) -> Result<(), libc::c_int> {\n if offset < self.entries.len() as u64 {\n for (path, &id) in self.entries.iter().skip(offset as usize) {\n if add(id, FileType::Directory, path) {\n break;\n }\n }\n }\n Ok(())\n }\n}\n\n\n\n<commit_msg>Path\/PathBuf workaround for rustc ICE<commit_after>\/\/ Copyright (C) 2014 Josh Stone\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse fuse::FileType;\nuse git2;\nuse libc;\nuse libc::consts::os::posix88;\nuse std::collections::hash_map;\nuse std::default::Default;\nuse std::path::{Path, PathBuf};\n\nuse inode;\n\n\n\/\/\/ Represents a virtual directory in reference paths\n\/\/\/ (e.g. `refs\/heads\/master` needs intermediate `refs\/` and `refs\/heads\/`)\npub struct RefDir {\n entries: hash_map::HashMap<PathBuf, inode::Id>,\n}\n\nimpl RefDir {\n pub fn new() -> Box<inode::Inode+'static> {\n Box::new(RefDir {\n entries: Default::default(),\n })\n }\n}\n\nimpl inode::Inode for RefDir {\n fn lookup(&mut self, _repo: &git2::Repository, name: &Path\n ) -> Result<inode::Id, libc::c_int> {\n \/\/ workaround for rust-lang\/rust#23649\n let name_buf = name.to_path_buf();\n let name = &name_buf;\n\n self.entries.get(name).cloned().ok_or(posix88::ENOENT)\n }\n\n fn getattr(&mut self, _repo: &git2::Repository, attr: inode::FileAttr\n ) -> Result<inode::FileAttr, libc::c_int> {\n let size = self.entries.len() as u64;\n Ok(inode::FileAttr {\n size: size,\n blocks: inode::st_blocks(size),\n kind: FileType::Directory,\n perm: 0755,\n ..attr\n })\n }\n\n fn readdir<'a>(&mut self, _repo: &git2::Repository, offset: u64,\n mut add: Box<FnMut(inode::Id, FileType, &Path) -> bool + 'a>\n ) -> Result<(), libc::c_int> {\n if offset < self.entries.len() as u64 {\n for (path, &id) in self.entries.iter().skip(offset as usize) {\n if add(id, FileType::Directory, path) {\n break;\n }\n }\n }\n Ok(())\n }\n}\n\n\n\n<|endoftext|>"} {"text":"<commit_before>#![crate_name = \"uu_sort\"]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Michael Yin <mikeyin@mikeyin.org>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\n#![allow(dead_code)]\n\nextern crate getopts;\nextern crate libc;\nextern crate semver;\n\n#[macro_use]\nextern crate uucore;\n#[macro_use]\nextern crate itertools;\n\nuse std::cmp::Ordering;\nuse std::collections::BinaryHeap;\nuse std::fs::File;\nuse std::io::{BufRead, BufReader, BufWriter, Lines, Read, stdin, stdout, Write};\nuse std::mem::replace;\nuse std::path::Path;\nuse uucore::fs::is_stdin_interactive;\nuse semver::Version;\nuse itertools::Itertools; \/\/ for Iterator::dedup()\n\nstatic NAME: &'static str = \"sort\";\nstatic VERSION: &'static str = env!(\"CARGO_PKG_VERSION\");\n\nstatic DECIMAL_PT: char = '.';\nstatic THOUSANDS_SEP: char = ',';\n\nenum SortMode {\n Numeric,\n HumanNumeric,\n Month,\n Version,\n Default,\n}\n\nstruct Settings {\n mode: SortMode,\n merge: bool,\n reverse: bool,\n outfile: Option<String>,\n stable: bool,\n unique: bool,\n check: bool,\n compare_fns: Vec<fn(&String, &String) -> Ordering>,\n}\n\nimpl Default for Settings {\n fn default() -> Settings {\n Settings {\n mode: SortMode::Default,\n merge: false,\n reverse: false,\n outfile: None,\n stable: false,\n unique: false,\n check: false,\n compare_fns: Vec::new(),\n }\n }\n}\n\nstruct MergeableFile<'a> {\n lines: Lines<BufReader<Box<Read>>>,\n current_line: String,\n settings: &'a Settings,\n}\n\n\/\/ BinaryHeap depends on `Ord`. Note that we want to pop smallest items\n\/\/ from the heap first, and BinaryHeap.pop() returns the largest, so we\n\/\/ trick it into the right order by calling reverse() here.\nimpl<'a> Ord for MergeableFile<'a> {\n fn cmp(&self, other: &MergeableFile) -> Ordering {\n compare_by(&self.current_line, &other.current_line, &self.settings).reverse()\n }\n}\n\nimpl<'a> PartialOrd for MergeableFile<'a> {\n fn partial_cmp(&self, other: &MergeableFile) -> Option<Ordering> {\n Some(self.cmp(other))\n }\n}\n\nimpl<'a> PartialEq for MergeableFile<'a> {\n fn eq(&self, other: &MergeableFile) -> bool {\n Ordering::Equal == compare_by(&self.current_line, &other.current_line, &self.settings)\n }\n}\n\nimpl<'a> Eq for MergeableFile<'a> {}\n\nstruct FileMerger<'a> {\n heap: BinaryHeap<MergeableFile<'a>>,\n settings: &'a Settings,\n}\n\nimpl<'a> FileMerger<'a> {\n fn new(settings: &'a Settings) -> FileMerger<'a> {\n FileMerger {\n heap: BinaryHeap::new(),\n settings: settings,\n }\n }\n fn push_file(&mut self, mut lines: Lines<BufReader<Box<Read>>>){\n match lines.next() {\n Some(Ok(next_line)) => {\n let mergeable_file = MergeableFile {\n lines: lines,\n current_line: next_line,\n settings: &self.settings,\n };\n self.heap.push(mergeable_file);\n }\n _ => {}\n }\n }\n}\n\nimpl<'a> Iterator for FileMerger<'a> {\n type Item = String;\n fn next(&mut self) -> Option<String> {\n match self.heap.pop() {\n Some(mut current) => {\n match current.lines.next() {\n Some(Ok(next_line)) => {\n let ret = replace(&mut current.current_line, next_line);\n self.heap.push(current);\n Some(ret)\n },\n _ => {\n \/\/ Don't put it back in the heap (it's empty\/erroring)\n \/\/ but its first line is still valid.\n Some(current.current_line)\n },\n }\n },\n None => None,\n }\n }\n}\n\npub fn uumain(args: Vec<String>) -> i32 {\n let mut settings: Settings = Default::default();\n let mut opts = getopts::Options::new();\n\n opts.optflag(\"n\", \"numeric-sort\", \"compare according to string numerical value\");\n opts.optflag(\"h\", \"human-numeric-sort\", \"compare according to human readable sizes, eg 1M > 100k\");\n opts.optflag(\"M\", \"month-sort\", \"compare according to month name abbreviation\");\n opts.optflag(\"r\", \"reverse\", \"reverse the output\");\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n opts.optflag(\"\", \"version\", \"output version information and exit\");\n opts.optflag(\"m\", \"merge\", \"merge already sorted files; do not sort\");\n opts.optopt(\"o\", \"output\", \"write output to FILENAME instead of stdout\", \"FILENAME\");\n opts.optflag(\"s\", \"stable\", \"stabilize sort by disabling last-resort comparison\");\n opts.optflag(\"u\", \"unique\", \"output only the first of an equal run\");\n opts.optflag(\"V\", \"version-sort\", \"Sort by SemVer version number, eg 1.12.2 > 1.1.2\");\n opts.optflag(\"c\", \"check\", \"check for sorted input; do not sort\");\n\n let matches = match opts.parse(&args[1..]) {\n Ok(m) => m,\n Err(f) => crash!(1, \"Invalid options\\n{}\", f)\n };\n if matches.opt_present(\"help\") {\n let msg = format!(\"{0} {1}\n\nUsage:\n {0} [OPTION]... [FILE]...\n\nWrite the sorted concatenation of all FILE(s) to standard output.\n\nMandatory arguments for long options are mandatory for short options too.\n\nWith no FILE, or when FILE is -, read standard input.\", NAME, VERSION);\n print!(\"{}\", opts.usage(&msg));\n return 0;\n }\n\n if matches.opt_present(\"version\") {\n println!(\"{} {}\", NAME, VERSION);\n return 0;\n }\n\n settings.mode = if matches.opt_present(\"numeric-sort\") {\n SortMode::Numeric\n } else if matches.opt_present(\"human-numeric-sort\") {\n SortMode::HumanNumeric\n } else if matches.opt_present(\"month-sort\") {\n SortMode::Month\n } else if matches.opt_present(\"version-sort\") {\n SortMode::Version\n } else {\n SortMode::Default\n };\n\n settings.merge = matches.opt_present(\"merge\");\n settings.reverse = matches.opt_present(\"reverse\");\n settings.outfile = matches.opt_str(\"output\");\n settings.stable = matches.opt_present(\"stable\");\n settings.unique = matches.opt_present(\"unique\");\n settings.check = matches.opt_present(\"check\");\n\n let mut files = matches.free;\n if files.is_empty() {\n \/* if no file, default to stdin *\/\n files.push(\"-\".to_owned());\n }\n\n settings.compare_fns.push(match settings.mode {\n SortMode::Numeric => numeric_compare,\n SortMode::HumanNumeric => human_numeric_size_compare,\n SortMode::Month => month_compare,\n SortMode::Version => version_compare,\n SortMode::Default => String::cmp\n });\n\n if !settings.stable {\n match settings.mode {\n SortMode::Default => {}\n _ => settings.compare_fns.push(String::cmp)\n }\n }\n\n exec(files, &settings)\n}\n\nfn exec(files: Vec<String>, settings: &Settings) -> i32 {\n let mut lines = Vec::new();\n let mut file_merger = FileMerger::new(&settings);\n\n for path in &files {\n let (reader, _) = match open(path) {\n Some(x) => x,\n None => continue,\n };\n\n let buf_reader = BufReader::new(reader);\n\n if settings.merge {\n file_merger.push_file(buf_reader.lines());\n }\n else {\n for line in buf_reader.lines() {\n if let Ok(n) = line {\n lines.push(n);\n }\n else {\n break;\n }\n }\n }\n }\n\n let original_lines = lines.to_vec();\n\n sort_by(&mut lines, &settings);\n\n if settings.check {\n for (i, line) in lines.iter().enumerate() {\n if line != &original_lines[i] {\n println!(\"sort: disorder in line {}\", i);\n return 1;\n }\n }\n }\n else if settings.merge {\n if settings.unique {\n print_sorted(file_merger.dedup(), &settings.outfile)\n }\n else {\n print_sorted(file_merger, &settings.outfile)\n }\n }\n else {\n if settings.unique {\n print_sorted(lines.iter().dedup(), &settings.outfile)\n }\n else {\n print_sorted(lines.iter(), &settings.outfile)\n }\n }\n\n 0\n\n}\n\nfn sort_by(lines: &mut Vec<String>, settings: &Settings) {\n lines.sort_by(|a, b| {\n compare_by(a, b, &settings)\n })\n}\n\nfn compare_by(a: &String, b: &String, settings: &Settings) -> Ordering {\n for compare_fn in &settings.compare_fns {\n let cmp = compare_fn(a, b);\n if cmp != Ordering::Equal {\n if settings.reverse {\n return cmp.reverse();\n }\n else {\n return cmp;\n }\n }\n }\n return Ordering::Equal;\n}\n\n\/\/\/ Parse the beginning string into an f64, returning -inf instead of NaN on errors.\nfn permissive_f64_parse(a: &str) -> f64 {\n \/\/ Maybe should be split on non-digit, but then 10e100 won't parse properly.\n \/\/ On the flip side, this will give NEG_INFINITY for \"1,234\", which might be OK\n \/\/ because there's no way to handle both CSV and thousands separators without a new flag.\n \/\/ GNU sort treats \"1,234\" as \"1\" in numeric, so maybe it's fine.\n let sa: &str = a.split_whitespace().next().unwrap();\n match sa.parse::<f64>() {\n Ok(a) => a,\n Err(_) => std::f64::NEG_INFINITY\n }\n}\n\n\/\/\/ Compares two floating point numbers, with errors being assumed to be -inf.\n\/\/\/ Stops coercing at the first whitespace char, so 1e2 will parse as 100 but\n\/\/\/ 1,000 will parse as -inf.\nfn numeric_compare(a: &String, b: &String) -> Ordering {\n let fa = permissive_f64_parse(a);\n let fb = permissive_f64_parse(b);\n \/\/ f64::cmp isn't implemented because NaN messes with it\n \/\/ but we sidestep that with permissive_f64_parse so just fake it\n if fa > fb {\n Ordering::Greater\n }\n else if fa < fb {\n Ordering::Less\n }\n else {\n Ordering::Equal\n }\n}\n\nfn human_numeric_convert(a: &String) -> f64 {\n let int_iter = a.chars();\n let suffix_iter = a.chars();\n let int_str: String = int_iter.take_while(|c| c.is_numeric()).collect();\n let suffix = suffix_iter.skip_while(|c| c.is_numeric()).next();\n let int_part = match int_str.parse::<f64>() {\n Ok(i) => i,\n Err(_) => -1f64\n } as f64;\n let suffix: f64 = match suffix.unwrap_or('\\0') {\n 'K' => 1000f64,\n 'M' => 1E6,\n 'G' => 1E9,\n 'T' => 1E12,\n 'P' => 1E15,\n _ => 1f64\n };\n int_part * suffix\n}\n\n\/\/\/ Compare two strings as if they are human readable sizes.\n\/\/\/ AKA 1M > 100k\nfn human_numeric_size_compare(a: &String, b: &String) -> Ordering {\n let fa = human_numeric_convert(a);\n let fb = human_numeric_convert(b);\n if fa > fb {\n Ordering::Greater\n }\n else if fa < fb {\n Ordering::Less\n }\n else {\n Ordering::Equal\n }\n}\n\n#[derive(Eq, Ord, PartialEq, PartialOrd)]\nenum Month {\n Unknown,\n January,\n February,\n March,\n April,\n May,\n June,\n July,\n August,\n September,\n October,\n November,\n December,\n}\n\n\/\/\/ Parse the beginning string into a Month, returning Month::Unknown on errors.\nfn month_parse(line: &String) -> Month {\n match line.split_whitespace().next().unwrap().to_uppercase().as_ref() {\n \"JAN\" => Month::January,\n \"FEB\" => Month::February,\n \"MAR\" => Month::March,\n \"APR\" => Month::April,\n \"MAY\" => Month::May,\n \"JUN\" => Month::June,\n \"JUL\" => Month::July,\n \"AUG\" => Month::August,\n \"SEP\" => Month::September,\n \"OCT\" => Month::October,\n \"NOV\" => Month::November,\n \"DEC\" => Month::December,\n _ => Month::Unknown,\n }\n}\n\nfn month_compare(a: &String, b: &String) -> Ordering {\n month_parse(a).cmp(&month_parse(b))\n}\n\nfn version_compare(a: &String, b: &String) -> Ordering {\n let ver_a = Version::parse(a);\n let ver_b = Version::parse(b);\n if ver_a > ver_b {\n Ordering::Greater\n }\n else if ver_a < ver_b {\n Ordering::Less\n }\n else {\n Ordering::Equal\n }\n}\n\nfn print_sorted<S, T: Iterator<Item=S>>(iter: T, outfile: &Option<String>) where S: std::fmt::Display {\n let mut file: Box<Write> = match *outfile {\n Some(ref filename) => {\n match File::create(Path::new(&filename)) {\n Ok(f) => Box::new(BufWriter::new(f)) as Box<Write>,\n Err(e) => {\n show_error!(\"sort: {0}: {1}\", filename, e.to_string());\n panic!(\"Could not open output file\");\n },\n }\n },\n None => Box::new(stdout()) as Box<Write>,\n };\n\n\n for line in iter {\n let str = format!(\"{}\\n\", line);\n match file.write_all(str.as_bytes()) {\n Err(e) => {\n show_error!(\"sort: {0}\", e.to_string());\n panic!(\"Write failed\");\n },\n Ok(_) => (),\n }\n }\n}\n\n\/\/ from cat.rs\nfn open(path: &str) -> Option<(Box<Read>, bool)> {\n if path == \"-\" {\n let stdin = stdin();\n return Some((Box::new(stdin) as Box<Read>, is_stdin_interactive()));\n }\n\n match File::open(Path::new(path)) {\n Ok(f) => Some((Box::new(f) as Box<Read>, false)),\n Err(e) => {\n show_error!(\"sort: {0}: {1}\", path, e.to_string());\n None\n },\n }\n}\n<commit_msg>sort --check refactor to use iterator<commit_after>#![crate_name = \"uu_sort\"]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Michael Yin <mikeyin@mikeyin.org>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\n#![allow(dead_code)]\n\nextern crate getopts;\nextern crate libc;\nextern crate semver;\n\n#[macro_use]\nextern crate uucore;\n#[macro_use]\nextern crate itertools;\n\nuse std::cmp::Ordering;\nuse std::collections::BinaryHeap;\nuse std::fs::File;\nuse std::io::{BufRead, BufReader, BufWriter, Lines, Read, stdin, stdout, Write};\nuse std::mem::replace;\nuse std::path::Path;\nuse uucore::fs::is_stdin_interactive;\nuse semver::Version;\nuse itertools::Itertools; \/\/ for Iterator::dedup()\n\nstatic NAME: &'static str = \"sort\";\nstatic VERSION: &'static str = env!(\"CARGO_PKG_VERSION\");\n\nstatic DECIMAL_PT: char = '.';\nstatic THOUSANDS_SEP: char = ',';\n\nenum SortMode {\n Numeric,\n HumanNumeric,\n Month,\n Version,\n Default,\n}\n\nstruct Settings {\n mode: SortMode,\n merge: bool,\n reverse: bool,\n outfile: Option<String>,\n stable: bool,\n unique: bool,\n check: bool,\n compare_fns: Vec<fn(&String, &String) -> Ordering>,\n}\n\nimpl Default for Settings {\n fn default() -> Settings {\n Settings {\n mode: SortMode::Default,\n merge: false,\n reverse: false,\n outfile: None,\n stable: false,\n unique: false,\n check: false,\n compare_fns: Vec::new(),\n }\n }\n}\n\nstruct MergeableFile<'a> {\n lines: Lines<BufReader<Box<Read>>>,\n current_line: String,\n settings: &'a Settings,\n}\n\n\/\/ BinaryHeap depends on `Ord`. Note that we want to pop smallest items\n\/\/ from the heap first, and BinaryHeap.pop() returns the largest, so we\n\/\/ trick it into the right order by calling reverse() here.\nimpl<'a> Ord for MergeableFile<'a> {\n fn cmp(&self, other: &MergeableFile) -> Ordering {\n compare_by(&self.current_line, &other.current_line, &self.settings).reverse()\n }\n}\n\nimpl<'a> PartialOrd for MergeableFile<'a> {\n fn partial_cmp(&self, other: &MergeableFile) -> Option<Ordering> {\n Some(self.cmp(other))\n }\n}\n\nimpl<'a> PartialEq for MergeableFile<'a> {\n fn eq(&self, other: &MergeableFile) -> bool {\n Ordering::Equal == compare_by(&self.current_line, &other.current_line, &self.settings)\n }\n}\n\nimpl<'a> Eq for MergeableFile<'a> {}\n\nstruct FileMerger<'a> {\n heap: BinaryHeap<MergeableFile<'a>>,\n settings: &'a Settings,\n}\n\nimpl<'a> FileMerger<'a> {\n fn new(settings: &'a Settings) -> FileMerger<'a> {\n FileMerger {\n heap: BinaryHeap::new(),\n settings: settings,\n }\n }\n fn push_file(&mut self, mut lines: Lines<BufReader<Box<Read>>>){\n match lines.next() {\n Some(Ok(next_line)) => {\n let mergeable_file = MergeableFile {\n lines: lines,\n current_line: next_line,\n settings: &self.settings,\n };\n self.heap.push(mergeable_file);\n }\n _ => {}\n }\n }\n}\n\nimpl<'a> Iterator for FileMerger<'a> {\n type Item = String;\n fn next(&mut self) -> Option<String> {\n match self.heap.pop() {\n Some(mut current) => {\n match current.lines.next() {\n Some(Ok(next_line)) => {\n let ret = replace(&mut current.current_line, next_line);\n self.heap.push(current);\n Some(ret)\n },\n _ => {\n \/\/ Don't put it back in the heap (it's empty\/erroring)\n \/\/ but its first line is still valid.\n Some(current.current_line)\n },\n }\n },\n None => None,\n }\n }\n}\n\npub fn uumain(args: Vec<String>) -> i32 {\n let mut settings: Settings = Default::default();\n let mut opts = getopts::Options::new();\n\n opts.optflag(\"n\", \"numeric-sort\", \"compare according to string numerical value\");\n opts.optflag(\"h\", \"human-numeric-sort\", \"compare according to human readable sizes, eg 1M > 100k\");\n opts.optflag(\"M\", \"month-sort\", \"compare according to month name abbreviation\");\n opts.optflag(\"r\", \"reverse\", \"reverse the output\");\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n opts.optflag(\"\", \"version\", \"output version information and exit\");\n opts.optflag(\"m\", \"merge\", \"merge already sorted files; do not sort\");\n opts.optopt(\"o\", \"output\", \"write output to FILENAME instead of stdout\", \"FILENAME\");\n opts.optflag(\"s\", \"stable\", \"stabilize sort by disabling last-resort comparison\");\n opts.optflag(\"u\", \"unique\", \"output only the first of an equal run\");\n opts.optflag(\"V\", \"version-sort\", \"Sort by SemVer version number, eg 1.12.2 > 1.1.2\");\n opts.optflag(\"c\", \"check\", \"check for sorted input; do not sort\");\n\n let matches = match opts.parse(&args[1..]) {\n Ok(m) => m,\n Err(f) => crash!(1, \"Invalid options\\n{}\", f)\n };\n if matches.opt_present(\"help\") {\n let msg = format!(\"{0} {1}\n\nUsage:\n {0} [OPTION]... [FILE]...\n\nWrite the sorted concatenation of all FILE(s) to standard output.\n\nMandatory arguments for long options are mandatory for short options too.\n\nWith no FILE, or when FILE is -, read standard input.\", NAME, VERSION);\n print!(\"{}\", opts.usage(&msg));\n return 0;\n }\n\n if matches.opt_present(\"version\") {\n println!(\"{} {}\", NAME, VERSION);\n return 0;\n }\n\n settings.mode = if matches.opt_present(\"numeric-sort\") {\n SortMode::Numeric\n } else if matches.opt_present(\"human-numeric-sort\") {\n SortMode::HumanNumeric\n } else if matches.opt_present(\"month-sort\") {\n SortMode::Month\n } else if matches.opt_present(\"version-sort\") {\n SortMode::Version\n } else {\n SortMode::Default\n };\n\n settings.merge = matches.opt_present(\"merge\");\n settings.reverse = matches.opt_present(\"reverse\");\n settings.outfile = matches.opt_str(\"output\");\n settings.stable = matches.opt_present(\"stable\");\n settings.unique = matches.opt_present(\"unique\");\n settings.check = matches.opt_present(\"check\");\n\n let mut files = matches.free;\n if files.is_empty() {\n \/* if no file, default to stdin *\/\n files.push(\"-\".to_owned());\n }\n else if settings.check && files.len() != 1 {\n crash!(1, \"sort: extra operand `{}' not allowed with -c\", files[1])\n\n }\n\n settings.compare_fns.push(match settings.mode {\n SortMode::Numeric => numeric_compare,\n SortMode::HumanNumeric => human_numeric_size_compare,\n SortMode::Month => month_compare,\n SortMode::Version => version_compare,\n SortMode::Default => String::cmp\n });\n\n if !settings.stable {\n match settings.mode {\n SortMode::Default => {}\n _ => settings.compare_fns.push(String::cmp)\n }\n }\n\n exec(files, &settings)\n}\n\nfn exec(files: Vec<String>, settings: &Settings) -> i32 {\n let mut lines = Vec::new();\n let mut file_merger = FileMerger::new(&settings);\n\n for path in &files {\n let (reader, _) = match open(path) {\n Some(x) => x,\n None => continue,\n };\n\n let buf_reader = BufReader::new(reader);\n\n if settings.merge {\n file_merger.push_file(buf_reader.lines());\n }\n else if settings.check {\n return exec_check_file(buf_reader.lines(), &settings)\n }\n else {\n for line in buf_reader.lines() {\n if let Ok(n) = line {\n lines.push(n);\n }\n else {\n break;\n }\n }\n }\n }\n\n sort_by(&mut lines, &settings);\n\n if settings.merge {\n if settings.unique {\n print_sorted(file_merger.dedup(), &settings.outfile)\n }\n else {\n print_sorted(file_merger, &settings.outfile)\n }\n }\n else {\n if settings.unique {\n print_sorted(lines.iter().dedup(), &settings.outfile)\n }\n else {\n print_sorted(lines.iter(), &settings.outfile)\n }\n }\n\n 0\n\n}\n\nfn exec_check_file(lines: Lines<BufReader<Box<Read>>>, settings: &Settings) -> i32 {\n \/\/ errors yields the line before each disorder,\n \/\/ plus the last line (quirk of .coalesce())\n let unwrapped_lines = lines.filter_map(|maybe_line| {\n if let Ok(line) = maybe_line {\n Some(line)\n }\n else {\n None\n }\n });\n let mut errors = unwrapped_lines.enumerate().coalesce(\n |(last_i, last_line), (i, line)| {\n if compare_by(&last_line, &line, &settings) == Ordering::Greater {\n Err(((last_i, last_line), (i, line)))\n }\n else {\n Ok((i, line))\n }\n });\n if let Some((first_error_index, _line)) = errors.next() {\n \/\/ Check for a second \"error\", as .coalesce() always returns the last\n \/\/ line, no matter what our merging function does.\n if let Some(_last_line_or_next_error) = errors.next() {\n println!(\"sort: disorder in line {}\", first_error_index);\n return 1;\n }\n else {\n \/\/ first \"error\" was actually the last line. \n return 0;\n }\n }\n else {\n \/\/ unwrapped_lines was empty. Empty files are defined to be sorted.\n return 0;\n }\n}\n\nfn sort_by(lines: &mut Vec<String>, settings: &Settings) {\n lines.sort_by(|a, b| {\n compare_by(a, b, &settings)\n })\n}\n\nfn compare_by(a: &String, b: &String, settings: &Settings) -> Ordering {\n for compare_fn in &settings.compare_fns {\n let cmp = compare_fn(a, b);\n if cmp != Ordering::Equal {\n if settings.reverse {\n return cmp.reverse();\n }\n else {\n return cmp;\n }\n }\n }\n return Ordering::Equal;\n}\n\n\/\/\/ Parse the beginning string into an f64, returning -inf instead of NaN on errors.\nfn permissive_f64_parse(a: &str) -> f64 {\n \/\/ Maybe should be split on non-digit, but then 10e100 won't parse properly.\n \/\/ On the flip side, this will give NEG_INFINITY for \"1,234\", which might be OK\n \/\/ because there's no way to handle both CSV and thousands separators without a new flag.\n \/\/ GNU sort treats \"1,234\" as \"1\" in numeric, so maybe it's fine.\n let sa: &str = a.split_whitespace().next().unwrap();\n match sa.parse::<f64>() {\n Ok(a) => a,\n Err(_) => std::f64::NEG_INFINITY\n }\n}\n\n\/\/\/ Compares two floating point numbers, with errors being assumed to be -inf.\n\/\/\/ Stops coercing at the first whitespace char, so 1e2 will parse as 100 but\n\/\/\/ 1,000 will parse as -inf.\nfn numeric_compare(a: &String, b: &String) -> Ordering {\n let fa = permissive_f64_parse(a);\n let fb = permissive_f64_parse(b);\n \/\/ f64::cmp isn't implemented because NaN messes with it\n \/\/ but we sidestep that with permissive_f64_parse so just fake it\n if fa > fb {\n Ordering::Greater\n }\n else if fa < fb {\n Ordering::Less\n }\n else {\n Ordering::Equal\n }\n}\n\nfn human_numeric_convert(a: &String) -> f64 {\n let int_iter = a.chars();\n let suffix_iter = a.chars();\n let int_str: String = int_iter.take_while(|c| c.is_numeric()).collect();\n let suffix = suffix_iter.skip_while(|c| c.is_numeric()).next();\n let int_part = match int_str.parse::<f64>() {\n Ok(i) => i,\n Err(_) => -1f64\n } as f64;\n let suffix: f64 = match suffix.unwrap_or('\\0') {\n 'K' => 1000f64,\n 'M' => 1E6,\n 'G' => 1E9,\n 'T' => 1E12,\n 'P' => 1E15,\n _ => 1f64\n };\n int_part * suffix\n}\n\n\/\/\/ Compare two strings as if they are human readable sizes.\n\/\/\/ AKA 1M > 100k\nfn human_numeric_size_compare(a: &String, b: &String) -> Ordering {\n let fa = human_numeric_convert(a);\n let fb = human_numeric_convert(b);\n if fa > fb {\n Ordering::Greater\n }\n else if fa < fb {\n Ordering::Less\n }\n else {\n Ordering::Equal\n }\n}\n\n#[derive(Eq, Ord, PartialEq, PartialOrd)]\nenum Month {\n Unknown,\n January,\n February,\n March,\n April,\n May,\n June,\n July,\n August,\n September,\n October,\n November,\n December,\n}\n\n\/\/\/ Parse the beginning string into a Month, returning Month::Unknown on errors.\nfn month_parse(line: &String) -> Month {\n match line.split_whitespace().next().unwrap().to_uppercase().as_ref() {\n \"JAN\" => Month::January,\n \"FEB\" => Month::February,\n \"MAR\" => Month::March,\n \"APR\" => Month::April,\n \"MAY\" => Month::May,\n \"JUN\" => Month::June,\n \"JUL\" => Month::July,\n \"AUG\" => Month::August,\n \"SEP\" => Month::September,\n \"OCT\" => Month::October,\n \"NOV\" => Month::November,\n \"DEC\" => Month::December,\n _ => Month::Unknown,\n }\n}\n\nfn month_compare(a: &String, b: &String) -> Ordering {\n month_parse(a).cmp(&month_parse(b))\n}\n\nfn version_compare(a: &String, b: &String) -> Ordering {\n let ver_a = Version::parse(a);\n let ver_b = Version::parse(b);\n if ver_a > ver_b {\n Ordering::Greater\n }\n else if ver_a < ver_b {\n Ordering::Less\n }\n else {\n Ordering::Equal\n }\n}\n\nfn print_sorted<S, T: Iterator<Item=S>>(iter: T, outfile: &Option<String>) where S: std::fmt::Display {\n let mut file: Box<Write> = match *outfile {\n Some(ref filename) => {\n match File::create(Path::new(&filename)) {\n Ok(f) => Box::new(BufWriter::new(f)) as Box<Write>,\n Err(e) => {\n show_error!(\"sort: {0}: {1}\", filename, e.to_string());\n panic!(\"Could not open output file\");\n },\n }\n },\n None => Box::new(stdout()) as Box<Write>,\n };\n\n\n for line in iter {\n let str = format!(\"{}\\n\", line);\n match file.write_all(str.as_bytes()) {\n Err(e) => {\n show_error!(\"sort: {0}\", e.to_string());\n panic!(\"Write failed\");\n },\n Ok(_) => (),\n }\n }\n}\n\n\/\/ from cat.rs\nfn open(path: &str) -> Option<(Box<Read>, bool)> {\n if path == \"-\" {\n let stdin = stdin();\n return Some((Box::new(stdin) as Box<Read>, is_stdin_interactive()));\n }\n\n match File::open(Path::new(path)) {\n Ok(f) => Some((Box::new(f) as Box<Read>, false)),\n Err(e) => {\n show_error!(\"sort: {0}: {1}\", path, e.to_string());\n None\n },\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate curl;\nextern crate docopt;\n\nuse curl::http::handle::Method;\nuse curl::http;\nuse docopt::Docopt;\nuse super::request::SpagRequest;\nuse super::file;\n\ndocopt!(Args derive Debug, \"\nUsage: spag request [show] <file> [(-H <header>)...]\n spag (get|post|put|patch|delete) <resource> [(-H <header>)...]\n spag env set (<key> <val>)...\n spag env show [<environment>]\n spag history\n spag history show <index>\n\nOptions:\n -h, --help Show this message\n -H, --header Supply a header\n\nArguments:\n <resource> The path of an api resource, like \/v2\/things\n <header> An http header, like 'Content-type: application\/json'\n <environment> The name of an environment, like 'default'\n <index> An index, starting at zero\n\");\n\npub fn main() {\n let args: Args = Args::docopt().decode().unwrap_or_else(|e| e.exit());\n println!(\"{:?}\", args);\n\n if args.cmd_request {\n spag_request(&args);\n } else if args.cmd_history {\n spag_history(&args);\n } else if args.cmd_env {\n spag_env(&args);\n } else if args.cmd_get || args.cmd_post || args.cmd_put || args.cmd_patch || args.cmd_delete {\n spag_method(&args);\n }\n}\n\nfn spag_env(args: &Args) {\n if args.cmd_show {\n spag_env_show(&args);\n } else if args.cmd_set {\n spag_env_set(&args);\n } else {\n panic!(\"BUG: Invalid command\");\n }\n}\n\nfn spag_env_set(args: &Args) {\n println!(\"TODO\");\n let y = file::load_yaml_file(\"active.yml\");\n println!(\"{:?}\", y);\n}\n\nfn spag_env_show(args: &Args) {\n let s = file::read_file(\"active.yml\");\n println!(\"{}\", s.trim());\n}\n\nfn spag_history(args: &Args) {\n println!(\"called spag history\");\n}\n\nfn spag_request(args: &Args) {\n println!(\"called spag request\");\n}\n\nfn spag_method(args: &Args) {\n let method = get_method_from_args(args);\n let endpoint = \"http:\/\/localhost:5000\".to_string();\n let uri = args.arg_resource.to_string();\n let mut req = SpagRequest::new(method, endpoint, uri);\n req.add_headers(args.arg_header.iter());\n do_request(&req);\n}\n\nfn do_request(req: &SpagRequest) {\n println!(\"{:?}\", req);\n let mut handle = http::handle();\n let resp = req.prepare(&mut handle).exec().unwrap();\n println!(\"{}\", resp);\n}\n\nfn get_method_from_args(args: &Args) -> Method {\n if args.cmd_get { Method::Get }\n else if args.cmd_post { Method::Post }\n else if args.cmd_put { Method::Put }\n else if args.cmd_patch { Method::Patch }\n else if args.cmd_delete { Method::Delete }\n else { panic!(\"BUG: method not recognized\"); }\n}\n<commit_msg>Improve help<commit_after>extern crate curl;\nextern crate docopt;\n\nuse curl::http::handle::Method;\nuse curl::http;\nuse docopt::Docopt;\nuse super::request::SpagRequest;\nuse super::file;\n\ndocopt!(Args derive Debug, \"\nUsage:\n spag (-h|--help)\n spag env set (<key> <val>)...\n spag env show [<environment>]\n spag (get|post|put|patch|delete) <resource> [(-H <header>)...]\n spag request <file> [(-H <header>)...]\n spag request show <file>\n spag history\n spag history show <index>\n\nOptions:\n -h, --help Show this message\n -H, --header Supply a header\n\nArguments:\n <resource> The path of an api resource, like \/v2\/things\n <header> An http header, like 'Content-type: application\/json'\n <environment> The name of an environment, like 'default'\n <index> An index, starting at zero\n\nCommands:\n env set Set a key-value pair in the active environment\n env show Print out the specified environment\n get An HTTP GET request\n post An HTTP POST request\n put An HTTP PUT request\n patch An HTTP PATCH request\n delete An HTTP DELETE request\n request Make a request using a predefined file\n request show Show the specified request file\n history Print a list of previously made requests\n history show Print out a previous request by its index\n\");\n\npub fn main() {\n let args: Args = Args::docopt().decode().unwrap_or_else(|e| e.exit());\n println!(\"{:?}\", args);\n\n if args.cmd_request {\n spag_request(&args);\n } else if args.cmd_history {\n spag_history(&args);\n } else if args.cmd_env {\n spag_env(&args);\n } else if args.cmd_get || args.cmd_post || args.cmd_put || args.cmd_patch || args.cmd_delete {\n spag_method(&args);\n }\n}\n\nfn spag_env(args: &Args) {\n if args.cmd_show {\n spag_env_show(&args);\n } else if args.cmd_set {\n spag_env_set(&args);\n } else {\n panic!(\"BUG: Invalid command\");\n }\n}\n\nfn spag_env_set(args: &Args) {\n println!(\"TODO\");\n let y = file::load_yaml_file(\"active.yml\");\n println!(\"{:?}\", y);\n}\n\nfn spag_env_show(args: &Args) {\n let s = file::read_file(\"active.yml\");\n println!(\"{}\", s.trim());\n}\n\nfn spag_history(args: &Args) {\n println!(\"called spag history\");\n}\n\nfn spag_request(args: &Args) {\n println!(\"called spag request\");\n}\n\nfn spag_method(args: &Args) {\n let method = get_method_from_args(args);\n let endpoint = \"http:\/\/localhost:5000\".to_string();\n let uri = args.arg_resource.to_string();\n let mut req = SpagRequest::new(method, endpoint, uri);\n req.add_headers(args.arg_header.iter());\n do_request(&req);\n}\n\nfn do_request(req: &SpagRequest) {\n println!(\"{:?}\", req);\n let mut handle = http::handle();\n let resp = req.prepare(&mut handle).exec().unwrap();\n println!(\"{}\", resp);\n}\n\nfn get_method_from_args(args: &Args) -> Method {\n if args.cmd_get { Method::Get }\n else if args.cmd_post { Method::Post }\n else if args.cmd_put { Method::Put }\n else if args.cmd_patch { Method::Patch }\n else if args.cmd_delete { Method::Delete }\n else { panic!(\"BUG: method not recognized\"); }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Change GroupCurve25519 method signatures to returns references<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Error handle?<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implemented sleep.<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate pulldown_cmark;\n\nuse std::path::{Path, Component};\nuse std::error::Error;\nuse std::io;\nuse std::fs::{self, metadata, File};\n\nuse self::pulldown_cmark::{Parser, html, Options, OPTION_ENABLE_TABLES, OPTION_ENABLE_FOOTNOTES};\n\n\/\/\/ Takes a path and returns a path containing just enough `..\/` to point to the root of the given path.\n\/\/\/\n\/\/\/ This is mostly interesting for a relative path to point back to the directory from where the\n\/\/\/ path starts.\n\/\/\/\n\/\/\/ ```ignore\n\/\/\/ let mut path = Path::new(\"some\/relative\/path\");\n\/\/\/\n\/\/\/ println!(\"{}\", path_to_root(&path));\n\/\/\/ ```\n\/\/\/\n\/\/\/ **Outputs**\n\/\/\/\n\/\/\/ ```text\n\/\/\/ \"..\/..\/\"\n\/\/\/ ```\n\/\/\/\n\/\/\/ **note:** it's not very fool-proof, if you find a situation where it doesn't return the correct\n\/\/\/ path. Consider [submitting a new issue](https:\/\/github.com\/azerupi\/mdBook\/issues) or a\n\/\/\/ [pull-request](https:\/\/github.com\/azerupi\/mdBook\/pulls) to improve it.\n\npub fn path_to_root(path: &Path) -> String {\n debug!(\"[fn]: path_to_root\");\n \/\/ Remove filename and add \"..\/\" for every directory\n\n path.to_path_buf().parent().expect(\"\")\n .components().fold(String::new(), |mut s, c| {\n match c {\n Component::Normal(_) => s.push_str(\"..\/\"),\n _ => {\n debug!(\"[*]: Other path component... {:?}\", c);\n }\n }\n s\n })\n}\n\n\n\n\/\/\/ This function creates a file and returns it. But before creating the file it checks every\n\/\/\/ directory in the path to see if it exists, and if it does not it will be created.\n\npub fn create_file(path: &Path) -> Result<File, Box<Error>> {\n debug!(\"[fn]: create_file\");\n\n \/\/ Construct path\n if let Some(p) = path.parent() {\n debug!(\"Parent directory is: {:?}\", p);\n\n try!(fs::create_dir_all(p));\n }\n\n debug!(\"[*]: Create file: {:?}\", path);\n let f = match File::create(path) {\n Ok(f) => f,\n Err(e) => {\n debug!(\"File::create: {}\", e);\n return Err(Box::new(io::Error::new(io::ErrorKind::Other, format!(\"{}\", e))))\n },\n };\n\n Ok(f)\n}\n\n\/\/\/ Removes all the content of a directory but not the directory itself\n\npub fn remove_dir_content(dir: &Path) -> Result<(), Box<Error>> {\n for item in try!(fs::read_dir(dir)) {\n if let Ok(item) = item {\n let item = item.path();\n if item.is_dir() { try!(fs::remove_dir_all(item)); } else { try!(fs::remove_file(item)); }\n }\n }\n Ok(())\n}\n\n\/\/\/\n\/\/\/\n\/\/\/ Copies all files of a directory to another one except the files with the extensions given in the\n\/\/\/ `ext_blacklist` array\n\npub fn copy_files_except_ext(from: &Path, to: &Path, recursive: bool, ext_blacklist: &[&str]) -> Result<(), Box<Error>> {\n debug!(\"[fn] copy_files_except_ext\");\n \/\/ Check that from and to are different\n if from == to { return Ok(()) }\n debug!(\"[*] Loop\");\n for entry in try!(fs::read_dir(from)) {\n let entry = try!(entry);\n debug!(\"[*] {:?}\", entry.path());\n let metadata = try!(entry.metadata());\n\n \/\/ If the entry is a dir and the recursive option is enabled, call itself\n if metadata.is_dir() && recursive {\n if entry.path() == to.to_path_buf() { continue }\n debug!(\"[*] is dir\");\n\n \/\/ check if output dir already exists\n if !to.join(entry.file_name()).exists() {\n try!(fs::create_dir(&to.join(entry.file_name())));\n }\n\n try!(copy_files_except_ext(\n &from.join(entry.file_name()),\n &to.join(entry.file_name()),\n true,\n ext_blacklist\n ));\n } else if metadata.is_file() {\n\n \/\/ Check if it is in the blacklist\n if let Some(ext) = entry.path().extension() {\n if ext_blacklist.contains(&ext.to_str().unwrap()) { continue }\n debug!(\"[*] creating path for file: {:?}\", &to.join(entry.path().file_name().expect(\"a file should have a file name...\")));\n\n output!(\"[*] copying file: {:?}\\n to {:?}\", entry.path(), &to.join(entry.path().file_name().expect(\"a file should have a file name...\")));\n try!(fs::copy(entry.path(), &to.join(entry.path().file_name().expect(\"a file should have a file name...\"))));\n }\n }\n }\n Ok(())\n}\n\n\n\/\/\/\n\/\/\/\n\/\/\/ Wrapper around the pulldown-cmark parser and renderer to render markdown\n\npub fn render_markdown(text: &str) -> String {\n let mut s = String::with_capacity(text.len() * 3 \/ 2);\n\n let mut opts = Options::empty();\n opts.insert(OPTION_ENABLE_TABLES);\n opts.insert(OPTION_ENABLE_FOOTNOTES);\n\n let p = Parser::new_ext(&text, opts);\n html::push_html(&mut s, p);\n s\n}\n\n\n\n\/\/ ------------------------------------------------------------------------------------------------\n\/\/ ------------------------------------------------------------------------------------------------\n\n\/\/ tests\n\n#[cfg(test)]\nmod tests {\n extern crate tempdir;\n\n use super::copy_files_except_ext;\n use super::PathExt;\n use std::fs;\n\n #[test]\n fn copy_files_except_ext_test() {\n let tmp = match tempdir::TempDir::new(\"\") {\n Ok(t) => t,\n Err(_) => panic!(\"Could not create a temp dir\"),\n };\n\n \/\/ Create a couple of files\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.txt\")) { panic!(\"Could not create file.txt\") }\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.md\")) { panic!(\"Could not create file.md\") }\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.png\")) { panic!(\"Could not create file.png\") }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"sub_dir\")) { panic!(\"Could not create sub_dir\") }\n if let Err(_) = fs::File::create(&tmp.path().join(\"sub_dir\/file.png\")) { panic!(\"Could not create sub_dir\/file.png\") }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"sub_dir_exists\")) { panic!(\"Could not create sub_dir_exists\") }\n if let Err(_) = fs::File::create(&tmp.path().join(\"sub_dir_exists\/file.txt\")) { panic!(\"Could not create sub_dir_exists\/file.txt\") }\n\n \/\/ Create output dir\n if let Err(_) = fs::create_dir(&tmp.path().join(\"output\")) { panic!(\"Could not create output\") }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"output\/sub_dir_exists\")) { panic!(\"Could not create output\/sub_dir_exists\") }\n\n match copy_files_except_ext(&tmp.path(), &tmp.path().join(\"output\"), true, &[\"md\"]) {\n Err(e) => panic!(\"Error while executing the function:\\n{:?}\", e),\n Ok(_) => {},\n }\n\n \/\/ Check if the correct files where created\n if !(&tmp.path().join(\"output\/file.txt\")).exists() { panic!(\"output\/file.txt should exist\") }\n if (&tmp.path().join(\"output\/file.md\")).exists() { panic!(\"output\/file.md should not exist\") }\n if !(&tmp.path().join(\"output\/file.png\")).exists() { panic!(\"output\/file.png should exist\") }\n if !(&tmp.path().join(\"output\/sub_dir\/file.png\")).exists() { panic!(\"output\/sub_dir\/file.png should exist\") }\n if !(&tmp.path().join(\"output\/sub_dir_exists\/file.txt\")).exists() { panic!(\"output\/sub_dir\/file.png should exist\") }\n\n }\n}\n<commit_msg>Fix tests after removing PathExt from utils<commit_after>extern crate pulldown_cmark;\n\nuse std::path::{Path, Component};\nuse std::error::Error;\nuse std::io;\nuse std::fs::{self, metadata, File};\n\nuse self::pulldown_cmark::{Parser, html, Options, OPTION_ENABLE_TABLES, OPTION_ENABLE_FOOTNOTES};\n\n\/\/\/ Takes a path and returns a path containing just enough `..\/` to point to the root of the given path.\n\/\/\/\n\/\/\/ This is mostly interesting for a relative path to point back to the directory from where the\n\/\/\/ path starts.\n\/\/\/\n\/\/\/ ```ignore\n\/\/\/ let mut path = Path::new(\"some\/relative\/path\");\n\/\/\/\n\/\/\/ println!(\"{}\", path_to_root(&path));\n\/\/\/ ```\n\/\/\/\n\/\/\/ **Outputs**\n\/\/\/\n\/\/\/ ```text\n\/\/\/ \"..\/..\/\"\n\/\/\/ ```\n\/\/\/\n\/\/\/ **note:** it's not very fool-proof, if you find a situation where it doesn't return the correct\n\/\/\/ path. Consider [submitting a new issue](https:\/\/github.com\/azerupi\/mdBook\/issues) or a\n\/\/\/ [pull-request](https:\/\/github.com\/azerupi\/mdBook\/pulls) to improve it.\n\npub fn path_to_root(path: &Path) -> String {\n debug!(\"[fn]: path_to_root\");\n \/\/ Remove filename and add \"..\/\" for every directory\n\n path.to_path_buf().parent().expect(\"\")\n .components().fold(String::new(), |mut s, c| {\n match c {\n Component::Normal(_) => s.push_str(\"..\/\"),\n _ => {\n debug!(\"[*]: Other path component... {:?}\", c);\n }\n }\n s\n })\n}\n\n\n\n\/\/\/ This function creates a file and returns it. But before creating the file it checks every\n\/\/\/ directory in the path to see if it exists, and if it does not it will be created.\n\npub fn create_file(path: &Path) -> Result<File, Box<Error>> {\n debug!(\"[fn]: create_file\");\n\n \/\/ Construct path\n if let Some(p) = path.parent() {\n debug!(\"Parent directory is: {:?}\", p);\n\n try!(fs::create_dir_all(p));\n }\n\n debug!(\"[*]: Create file: {:?}\", path);\n let f = match File::create(path) {\n Ok(f) => f,\n Err(e) => {\n debug!(\"File::create: {}\", e);\n return Err(Box::new(io::Error::new(io::ErrorKind::Other, format!(\"{}\", e))))\n },\n };\n\n Ok(f)\n}\n\n\/\/\/ Removes all the content of a directory but not the directory itself\n\npub fn remove_dir_content(dir: &Path) -> Result<(), Box<Error>> {\n for item in try!(fs::read_dir(dir)) {\n if let Ok(item) = item {\n let item = item.path();\n if item.is_dir() { try!(fs::remove_dir_all(item)); } else { try!(fs::remove_file(item)); }\n }\n }\n Ok(())\n}\n\n\/\/\/\n\/\/\/\n\/\/\/ Copies all files of a directory to another one except the files with the extensions given in the\n\/\/\/ `ext_blacklist` array\n\npub fn copy_files_except_ext(from: &Path, to: &Path, recursive: bool, ext_blacklist: &[&str]) -> Result<(), Box<Error>> {\n debug!(\"[fn] copy_files_except_ext\");\n \/\/ Check that from and to are different\n if from == to { return Ok(()) }\n debug!(\"[*] Loop\");\n for entry in try!(fs::read_dir(from)) {\n let entry = try!(entry);\n debug!(\"[*] {:?}\", entry.path());\n let metadata = try!(entry.metadata());\n\n \/\/ If the entry is a dir and the recursive option is enabled, call itself\n if metadata.is_dir() && recursive {\n if entry.path() == to.to_path_buf() { continue }\n debug!(\"[*] is dir\");\n\n \/\/ check if output dir already exists\n if !to.join(entry.file_name()).exists() {\n try!(fs::create_dir(&to.join(entry.file_name())));\n }\n\n try!(copy_files_except_ext(\n &from.join(entry.file_name()),\n &to.join(entry.file_name()),\n true,\n ext_blacklist\n ));\n } else if metadata.is_file() {\n\n \/\/ Check if it is in the blacklist\n if let Some(ext) = entry.path().extension() {\n if ext_blacklist.contains(&ext.to_str().unwrap()) { continue }\n debug!(\"[*] creating path for file: {:?}\", &to.join(entry.path().file_name().expect(\"a file should have a file name...\")));\n\n output!(\"[*] copying file: {:?}\\n to {:?}\", entry.path(), &to.join(entry.path().file_name().expect(\"a file should have a file name...\")));\n try!(fs::copy(entry.path(), &to.join(entry.path().file_name().expect(\"a file should have a file name...\"))));\n }\n }\n }\n Ok(())\n}\n\n\n\/\/\/\n\/\/\/\n\/\/\/ Wrapper around the pulldown-cmark parser and renderer to render markdown\n\npub fn render_markdown(text: &str) -> String {\n let mut s = String::with_capacity(text.len() * 3 \/ 2);\n\n let mut opts = Options::empty();\n opts.insert(OPTION_ENABLE_TABLES);\n opts.insert(OPTION_ENABLE_FOOTNOTES);\n\n let p = Parser::new_ext(&text, opts);\n html::push_html(&mut s, p);\n s\n}\n\n\n\n\/\/ ------------------------------------------------------------------------------------------------\n\/\/ ------------------------------------------------------------------------------------------------\n\n\/\/ tests\n\n#[cfg(test)]\nmod tests {\n extern crate tempdir;\n\n use super::copy_files_except_ext;\n use std::fs;\n\n #[test]\n fn copy_files_except_ext_test() {\n let tmp = match tempdir::TempDir::new(\"\") {\n Ok(t) => t,\n Err(_) => panic!(\"Could not create a temp dir\"),\n };\n\n \/\/ Create a couple of files\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.txt\")) { panic!(\"Could not create file.txt\") }\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.md\")) { panic!(\"Could not create file.md\") }\n if let Err(_) = fs::File::create(&tmp.path().join(\"file.png\")) { panic!(\"Could not create file.png\") }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"sub_dir\")) { panic!(\"Could not create sub_dir\") }\n if let Err(_) = fs::File::create(&tmp.path().join(\"sub_dir\/file.png\")) { panic!(\"Could not create sub_dir\/file.png\") }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"sub_dir_exists\")) { panic!(\"Could not create sub_dir_exists\") }\n if let Err(_) = fs::File::create(&tmp.path().join(\"sub_dir_exists\/file.txt\")) { panic!(\"Could not create sub_dir_exists\/file.txt\") }\n\n \/\/ Create output dir\n if let Err(_) = fs::create_dir(&tmp.path().join(\"output\")) { panic!(\"Could not create output\") }\n if let Err(_) = fs::create_dir(&tmp.path().join(\"output\/sub_dir_exists\")) { panic!(\"Could not create output\/sub_dir_exists\") }\n\n match copy_files_except_ext(&tmp.path(), &tmp.path().join(\"output\"), true, &[\"md\"]) {\n Err(e) => panic!(\"Error while executing the function:\\n{:?}\", e),\n Ok(_) => {},\n }\n\n \/\/ Check if the correct files where created\n if !(&tmp.path().join(\"output\/file.txt\")).exists() { panic!(\"output\/file.txt should exist\") }\n if (&tmp.path().join(\"output\/file.md\")).exists() { panic!(\"output\/file.md should not exist\") }\n if !(&tmp.path().join(\"output\/file.png\")).exists() { panic!(\"output\/file.png should exist\") }\n if !(&tmp.path().join(\"output\/sub_dir\/file.png\")).exists() { panic!(\"output\/sub_dir\/file.png should exist\") }\n if !(&tmp.path().join(\"output\/sub_dir_exists\/file.txt\")).exists() { panic!(\"output\/sub_dir\/file.png should exist\") }\n\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::collections::{HashMap, hash_map};\nuse std::iter::IntoIterator;\n\nuse uuid::Uuid;\n\nuse engine::Name;\n\n\/\/\/ Map UUID and name to T items.\n#[derive(Debug)]\npub struct Table<T> {\n name_to_uuid: HashMap<Name, Uuid>,\n items: HashMap<Uuid, (Name, T)>,\n}\n\n\nimpl<T> Default for Table<T> {\n fn default() -> Table<T> {\n Table {\n name_to_uuid: HashMap::default(),\n items: HashMap::default(),\n }\n }\n}\n\npub struct Iter<'a, T: 'a> {\n items: hash_map::Iter<'a, Uuid, (Name, T)>,\n}\n\nimpl<'a, T> Iterator for Iter<'a, T> {\n type Item = (&'a Name, &'a Uuid, &'a T);\n\n #[inline]\n fn next(&mut self) -> Option<(&'a Name, &'a Uuid, &'a T)> {\n self.items\n .next()\n .map(|(uuid, &(ref name, ref item))| (&*name, uuid, item))\n }\n\n #[inline]\n fn size_hint(&self) -> (usize, Option<usize>) {\n self.items.size_hint()\n }\n}\n\npub struct IterMut<'a, T: 'a> {\n items: hash_map::IterMut<'a, Uuid, (Name, T)>,\n}\n\nimpl<'a, T> Iterator for IterMut<'a, T> {\n type Item = (&'a Name, &'a Uuid, &'a mut T);\n\n #[inline]\n fn next(&mut self) -> Option<(&'a Name, &'a Uuid, &'a mut T)> {\n self.items\n .next()\n .map(|(uuid, &mut (ref name, ref mut item))| (&*name, uuid, item))\n }\n\n #[inline]\n fn size_hint(&self) -> (usize, Option<usize>) {\n self.items.size_hint()\n }\n}\n\npub struct IntoIter<T> {\n items: hash_map::IntoIter<Uuid, (Name, T)>,\n}\n\nimpl<T> Iterator for IntoIter<T> {\n type Item = (Name, Uuid, T);\n\n #[inline]\n fn next(&mut self) -> Option<(Name, Uuid, T)> {\n self.items\n .next()\n .map(|(uuid, (name, item))| (name, uuid, item))\n }\n\n #[inline]\n fn size_hint(&self) -> (usize, Option<usize>) {\n self.items.size_hint()\n }\n}\n\nimpl<T> IntoIterator for Table<T> {\n type Item = (Name, Uuid, T);\n type IntoIter = IntoIter<T>;\n\n fn into_iter(self) -> IntoIter<T> {\n self.into_iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a Table<T> {\n type Item = (&'a Name, &'a Uuid, &'a T);\n type IntoIter = Iter<'a, T>;\n\n fn into_iter(self) -> Iter<'a, T> {\n self.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Table<T> {\n type Item = (&'a Name, &'a Uuid, &'a mut T);\n type IntoIter = IterMut<'a, T>;\n\n fn into_iter(self) -> IterMut<'a, T> {\n self.iter_mut()\n }\n}\n\n\/\/\/ All operations are O(1), although Name lookups are slightly disadvantaged\n\/\/\/ vs. Uuid lookups. In order to rename a T item, it must be removed,\n\/\/\/ renamed, and reinserted under the new name.\nimpl<T> Table<T> {\n pub fn is_empty(&self) -> bool {\n self.items.is_empty()\n }\n\n pub fn len(&self) -> usize {\n self.items.len()\n }\n\n pub fn iter(&self) -> Iter<T> {\n Iter { items: self.items.iter() }\n }\n\n pub fn iter_mut(&mut self) -> IterMut<T> {\n IterMut { items: self.items.iter_mut() }\n }\n\n pub fn into_iter(self) -> IntoIter<T> {\n IntoIter { items: self.items.into_iter() }\n }\n\n \/\/\/ Returns true if map has an item corresponding to this name, else false.\n pub fn contains_name(&self, name: &str) -> bool {\n self.name_to_uuid.contains_key(name)\n }\n\n \/\/\/ Returns true if map has an item corresponding to this uuid, else false.\n pub fn contains_uuid(&self, uuid: Uuid) -> bool {\n self.items.contains_key(&uuid)\n }\n\n \/\/\/ Get item by name.\n pub fn get_by_name(&self, name: &str) -> Option<(Uuid, &T)> {\n self.name_to_uuid\n .get(&*name)\n .and_then(|uuid| self.items.get(uuid).map(|&(_, ref item)| (*uuid, item)))\n }\n\n \/\/\/ Get item by uuid.\n pub fn get_by_uuid(&self, uuid: Uuid) -> Option<(Name, &T)> {\n self.items\n .get(&uuid)\n .map(|&(ref name, ref item)| (name.clone(), item))\n }\n\n \/\/\/ Get mutable item by name.\n pub fn get_mut_by_name(&mut self, name: &str) -> Option<(Uuid, &mut T)> {\n let uuid = match self.name_to_uuid.get(name) {\n Some(uuid) => *uuid,\n None => return None,\n };\n self.items\n .get_mut(&uuid)\n .map(|&mut (_, ref mut item)| (uuid, item))\n }\n\n \/\/\/ Get mutable item by uuid.\n pub fn get_mut_by_uuid(&mut self, uuid: Uuid) -> Option<(Name, &mut T)> {\n self.items\n .get_mut(&uuid)\n .map(|&mut (ref name, ref mut item)| (name.clone(), item))\n }\n\n \/\/\/ Removes the item corresponding to name if there is one.\n pub fn remove_by_name(&mut self, name: &str) -> Option<(Uuid, T)> {\n if let Some(uuid) = self.name_to_uuid.remove(name) {\n self.items.remove(&uuid).map(|(_, item)| (uuid, item))\n } else {\n None\n }\n }\n\n \/\/\/ Removes the item corresponding to the uuid if there is one.\n pub fn remove_by_uuid(&mut self, uuid: Uuid) -> Option<(Name, T)> {\n if let Some((name, item)) = self.items.remove(&uuid) {\n self.name_to_uuid.remove(&name);\n Some((name, item))\n } else {\n None\n }\n }\n\n \/\/\/ Inserts an item for given uuid and name.\n \/\/\/ Possibly returns the item displaced.\n pub fn insert(&mut self, name: Name, uuid: Uuid, item: T) -> Option<(Name, Uuid, T)> {\n match self.name_to_uuid.insert(name.clone(), uuid) {\n Some(old_uuid) => {\n \/\/ (existing name, _)\n match self.items.insert(uuid, (name, item)) {\n \/\/ (existing name, existing uuid)\n Some((old_name, old_item)) => Some((old_name, uuid, old_item)),\n \/\/ (existing name, new uuid)\n None => {\n let (old_name, old_item) =\n self.items.remove(&old_uuid).expect(\"should be there\");\n Some((old_name, old_uuid, old_item))\n }\n }\n }\n None => {\n \/\/ (new name, existing uuid)\n if let Some((old_name, old_item)) = self.items.insert(uuid, (name, item)) {\n let old_uuid = self.name_to_uuid\n .remove(&old_name)\n .expect(\"should be there\");\n Some((old_name, old_uuid, old_item))\n } else {\n \/\/ (new name, new uuid)\n None\n }\n }\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n\n use rand;\n use uuid::Uuid;\n\n use engine::Name;\n\n use super::Table;\n\n #[derive(Debug)]\n struct TestThing {\n name: String,\n uuid: Uuid,\n stuff: u32,\n }\n\n \/\/ A global invariant checker for the table.\n \/\/ Verifies proper relationship between internal data structures.\n fn table_invariant<T>(table: &Table<T>) -> () {\n for (uuid, &(ref name, _)) in &table.items {\n assert_eq!(*uuid, *table.name_to_uuid.get(name).unwrap())\n }\n\n \/\/ No extra garbage\n assert_eq!(table.name_to_uuid.len(), table.items.len())\n }\n\n impl TestThing {\n pub fn new(name: &str, uuid: Uuid) -> TestThing {\n TestThing {\n name: name.to_owned(),\n uuid: uuid.clone(),\n stuff: rand::random::<u32>(),\n }\n }\n }\n\n #[test]\n \/\/\/ Remove a test object by its uuid.\n \/\/\/ Mutate the removed test object.\n \/\/\/ Verify that the table is now empty and that removing by name yields\n \/\/\/ no result.\n fn remove_existing_item() {\n let mut t: Table<TestThing> = Table::default();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n t.insert(Name::new(name.to_owned()),\n uuid,\n TestThing::new(&name, uuid));\n table_invariant(&t);\n\n assert!(t.get_by_name(&name).is_some());\n assert!(t.get_by_uuid(uuid).is_some());\n let thing = t.remove_by_uuid(uuid);\n table_invariant(&t);\n assert!(thing.is_some());\n let mut thing = thing.unwrap();\n thing.1.stuff = 0;\n assert!(t.is_empty());\n assert!(t.remove_by_name(&name).is_none());\n table_invariant(&t);\n\n assert!(t.get_by_name(&name).is_none());\n assert!(t.get_by_uuid(uuid).is_none());\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with same keys.\n \/\/\/ The previously inserted thing should be returned.\n \/\/\/ You can't insert the identical thing, because that would be a move.\n \/\/\/ This is good, because then you can't have a thing that is both in\n \/\/\/ the table and not in the table.\n fn insert_same_keys() {\n let mut t: Table<TestThing> = Table::default();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, uuid);\n let thing_key = thing.stuff;\n let displaced = t.insert(Name::new(name.to_owned()), uuid, thing);\n table_invariant(&t);\n\n \/\/ There was nothing previously, so displaced must be empty.\n assert!(displaced.is_none());\n\n \/\/ t now contains the inserted thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid));\n assert!(t.get_by_uuid(uuid).unwrap().1.stuff == thing_key);\n\n \/\/ Add another thing with the same keys.\n let thing2 = TestThing::new(&name, uuid);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(Name::new(name.to_owned()), uuid, thing2);\n table_invariant(&t);\n\n \/\/ It has displaced the old thing.\n assert!(displaced.is_some());\n let ref displaced_item = displaced.unwrap();\n assert!(&*displaced_item.0 == name);\n assert!(displaced_item.1 == uuid);\n\n \/\/ But it contains a thing with the same keys.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid));\n assert!(t.get_by_uuid(uuid).unwrap().1.stuff == thing_key2);\n assert!(t.len() == 1);\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with the same name.\n \/\/\/ The previously inserted thing should be returned.\n fn insert_same_name() {\n let mut t: Table<TestThing> = Table::default();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, uuid);\n let thing_key = thing.stuff;\n\n \/\/ There was nothing in the table before, so displaced is empty.\n let displaced = t.insert(Name::new(name.to_owned()), uuid, thing);\n table_invariant(&t);\n assert!(displaced.is_none());\n\n \/\/ t now contains thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid));\n\n \/\/ Insert new item with different UUID.\n let uuid2 = Uuid::new_v4();\n let thing2 = TestThing::new(&name, uuid2);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(Name::new(name.to_owned()), uuid2, thing2);\n table_invariant(&t);\n\n \/\/ The items displaced consist exactly of the first item.\n assert!(displaced.is_some());\n let ref displaced_item = displaced.unwrap();\n assert!(&*displaced_item.0 == name);\n assert!(displaced_item.1 == uuid);\n assert!(displaced_item.2.stuff == thing_key);\n\n \/\/ The table contains the new item and has no memory of the old.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid2));\n assert!(!t.contains_uuid(uuid));\n assert!(t.get_by_uuid(uuid2).unwrap().1.stuff == thing_key2);\n assert!(t.get_by_name(&name).unwrap().1.stuff == thing_key2);\n assert!(t.len() == 1);\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with the same uuid.\n \/\/\/ The previously inserted thing should be returned.\n fn insert_same_uuid() {\n let mut t: Table<TestThing> = Table::default();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, uuid);\n let thing_key = thing.stuff;\n\n \/\/ There was nothing in the table before, so displaced is empty.\n let displaced = t.insert(Name::new(name.to_owned()), uuid, thing);\n table_invariant(&t);\n assert!(displaced.is_none());\n\n \/\/ t now contains thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid));\n\n \/\/ Insert new item with different UUID.\n let name2 = \"name2\";\n let thing2 = TestThing::new(&name2, uuid);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(Name::new(name2.to_owned()), uuid, thing2);\n table_invariant(&t);\n\n \/\/ The items displaced consist exactly of the first item.\n assert!(displaced.is_some());\n let ref displaced_item = displaced.unwrap();\n assert!(&*displaced_item.0 == name);\n assert!(displaced_item.1 == uuid);\n assert!(displaced_item.2.stuff == thing_key);\n\n \/\/ The table contains the new item and has no memory of the old.\n assert!(t.contains_uuid(uuid));\n assert!(t.contains_name(name2));\n assert!(!t.contains_name(name));\n assert!(t.get_by_uuid(uuid).unwrap().1.stuff == thing_key2);\n assert!(t.get_by_name(&name2).unwrap().1.stuff == thing_key2);\n assert!(t.len() == 1);\n }\n}\n<commit_msg>Use assert_eq! instead of assert! when possible<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::collections::{HashMap, hash_map};\nuse std::iter::IntoIterator;\n\nuse uuid::Uuid;\n\nuse engine::Name;\n\n\/\/\/ Map UUID and name to T items.\n#[derive(Debug)]\npub struct Table<T> {\n name_to_uuid: HashMap<Name, Uuid>,\n items: HashMap<Uuid, (Name, T)>,\n}\n\n\nimpl<T> Default for Table<T> {\n fn default() -> Table<T> {\n Table {\n name_to_uuid: HashMap::default(),\n items: HashMap::default(),\n }\n }\n}\n\npub struct Iter<'a, T: 'a> {\n items: hash_map::Iter<'a, Uuid, (Name, T)>,\n}\n\nimpl<'a, T> Iterator for Iter<'a, T> {\n type Item = (&'a Name, &'a Uuid, &'a T);\n\n #[inline]\n fn next(&mut self) -> Option<(&'a Name, &'a Uuid, &'a T)> {\n self.items\n .next()\n .map(|(uuid, &(ref name, ref item))| (&*name, uuid, item))\n }\n\n #[inline]\n fn size_hint(&self) -> (usize, Option<usize>) {\n self.items.size_hint()\n }\n}\n\npub struct IterMut<'a, T: 'a> {\n items: hash_map::IterMut<'a, Uuid, (Name, T)>,\n}\n\nimpl<'a, T> Iterator for IterMut<'a, T> {\n type Item = (&'a Name, &'a Uuid, &'a mut T);\n\n #[inline]\n fn next(&mut self) -> Option<(&'a Name, &'a Uuid, &'a mut T)> {\n self.items\n .next()\n .map(|(uuid, &mut (ref name, ref mut item))| (&*name, uuid, item))\n }\n\n #[inline]\n fn size_hint(&self) -> (usize, Option<usize>) {\n self.items.size_hint()\n }\n}\n\npub struct IntoIter<T> {\n items: hash_map::IntoIter<Uuid, (Name, T)>,\n}\n\nimpl<T> Iterator for IntoIter<T> {\n type Item = (Name, Uuid, T);\n\n #[inline]\n fn next(&mut self) -> Option<(Name, Uuid, T)> {\n self.items\n .next()\n .map(|(uuid, (name, item))| (name, uuid, item))\n }\n\n #[inline]\n fn size_hint(&self) -> (usize, Option<usize>) {\n self.items.size_hint()\n }\n}\n\nimpl<T> IntoIterator for Table<T> {\n type Item = (Name, Uuid, T);\n type IntoIter = IntoIter<T>;\n\n fn into_iter(self) -> IntoIter<T> {\n self.into_iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a Table<T> {\n type Item = (&'a Name, &'a Uuid, &'a T);\n type IntoIter = Iter<'a, T>;\n\n fn into_iter(self) -> Iter<'a, T> {\n self.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Table<T> {\n type Item = (&'a Name, &'a Uuid, &'a mut T);\n type IntoIter = IterMut<'a, T>;\n\n fn into_iter(self) -> IterMut<'a, T> {\n self.iter_mut()\n }\n}\n\n\/\/\/ All operations are O(1), although Name lookups are slightly disadvantaged\n\/\/\/ vs. Uuid lookups. In order to rename a T item, it must be removed,\n\/\/\/ renamed, and reinserted under the new name.\nimpl<T> Table<T> {\n pub fn is_empty(&self) -> bool {\n self.items.is_empty()\n }\n\n pub fn len(&self) -> usize {\n self.items.len()\n }\n\n pub fn iter(&self) -> Iter<T> {\n Iter { items: self.items.iter() }\n }\n\n pub fn iter_mut(&mut self) -> IterMut<T> {\n IterMut { items: self.items.iter_mut() }\n }\n\n pub fn into_iter(self) -> IntoIter<T> {\n IntoIter { items: self.items.into_iter() }\n }\n\n \/\/\/ Returns true if map has an item corresponding to this name, else false.\n pub fn contains_name(&self, name: &str) -> bool {\n self.name_to_uuid.contains_key(name)\n }\n\n \/\/\/ Returns true if map has an item corresponding to this uuid, else false.\n pub fn contains_uuid(&self, uuid: Uuid) -> bool {\n self.items.contains_key(&uuid)\n }\n\n \/\/\/ Get item by name.\n pub fn get_by_name(&self, name: &str) -> Option<(Uuid, &T)> {\n self.name_to_uuid\n .get(&*name)\n .and_then(|uuid| self.items.get(uuid).map(|&(_, ref item)| (*uuid, item)))\n }\n\n \/\/\/ Get item by uuid.\n pub fn get_by_uuid(&self, uuid: Uuid) -> Option<(Name, &T)> {\n self.items\n .get(&uuid)\n .map(|&(ref name, ref item)| (name.clone(), item))\n }\n\n \/\/\/ Get mutable item by name.\n pub fn get_mut_by_name(&mut self, name: &str) -> Option<(Uuid, &mut T)> {\n let uuid = match self.name_to_uuid.get(name) {\n Some(uuid) => *uuid,\n None => return None,\n };\n self.items\n .get_mut(&uuid)\n .map(|&mut (_, ref mut item)| (uuid, item))\n }\n\n \/\/\/ Get mutable item by uuid.\n pub fn get_mut_by_uuid(&mut self, uuid: Uuid) -> Option<(Name, &mut T)> {\n self.items\n .get_mut(&uuid)\n .map(|&mut (ref name, ref mut item)| (name.clone(), item))\n }\n\n \/\/\/ Removes the item corresponding to name if there is one.\n pub fn remove_by_name(&mut self, name: &str) -> Option<(Uuid, T)> {\n if let Some(uuid) = self.name_to_uuid.remove(name) {\n self.items.remove(&uuid).map(|(_, item)| (uuid, item))\n } else {\n None\n }\n }\n\n \/\/\/ Removes the item corresponding to the uuid if there is one.\n pub fn remove_by_uuid(&mut self, uuid: Uuid) -> Option<(Name, T)> {\n if let Some((name, item)) = self.items.remove(&uuid) {\n self.name_to_uuid.remove(&name);\n Some((name, item))\n } else {\n None\n }\n }\n\n \/\/\/ Inserts an item for given uuid and name.\n \/\/\/ Possibly returns the item displaced.\n pub fn insert(&mut self, name: Name, uuid: Uuid, item: T) -> Option<(Name, Uuid, T)> {\n match self.name_to_uuid.insert(name.clone(), uuid) {\n Some(old_uuid) => {\n \/\/ (existing name, _)\n match self.items.insert(uuid, (name, item)) {\n \/\/ (existing name, existing uuid)\n Some((old_name, old_item)) => Some((old_name, uuid, old_item)),\n \/\/ (existing name, new uuid)\n None => {\n let (old_name, old_item) =\n self.items.remove(&old_uuid).expect(\"should be there\");\n Some((old_name, old_uuid, old_item))\n }\n }\n }\n None => {\n \/\/ (new name, existing uuid)\n if let Some((old_name, old_item)) = self.items.insert(uuid, (name, item)) {\n let old_uuid = self.name_to_uuid\n .remove(&old_name)\n .expect(\"should be there\");\n Some((old_name, old_uuid, old_item))\n } else {\n \/\/ (new name, new uuid)\n None\n }\n }\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n\n use rand;\n use uuid::Uuid;\n\n use engine::Name;\n\n use super::Table;\n\n #[derive(Debug)]\n struct TestThing {\n name: String,\n uuid: Uuid,\n stuff: u32,\n }\n\n \/\/ A global invariant checker for the table.\n \/\/ Verifies proper relationship between internal data structures.\n fn table_invariant<T>(table: &Table<T>) -> () {\n for (uuid, &(ref name, _)) in &table.items {\n assert_eq!(*uuid, *table.name_to_uuid.get(name).unwrap())\n }\n\n \/\/ No extra garbage\n assert_eq!(table.name_to_uuid.len(), table.items.len())\n }\n\n impl TestThing {\n pub fn new(name: &str, uuid: Uuid) -> TestThing {\n TestThing {\n name: name.to_owned(),\n uuid: uuid.clone(),\n stuff: rand::random::<u32>(),\n }\n }\n }\n\n #[test]\n \/\/\/ Remove a test object by its uuid.\n \/\/\/ Mutate the removed test object.\n \/\/\/ Verify that the table is now empty and that removing by name yields\n \/\/\/ no result.\n fn remove_existing_item() {\n let mut t: Table<TestThing> = Table::default();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n t.insert(Name::new(name.to_owned()),\n uuid,\n TestThing::new(&name, uuid));\n table_invariant(&t);\n\n assert!(t.get_by_name(&name).is_some());\n assert!(t.get_by_uuid(uuid).is_some());\n let thing = t.remove_by_uuid(uuid);\n table_invariant(&t);\n assert!(thing.is_some());\n let mut thing = thing.unwrap();\n thing.1.stuff = 0;\n assert!(t.is_empty());\n assert!(t.remove_by_name(&name).is_none());\n table_invariant(&t);\n\n assert!(t.get_by_name(&name).is_none());\n assert!(t.get_by_uuid(uuid).is_none());\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with same keys.\n \/\/\/ The previously inserted thing should be returned.\n \/\/\/ You can't insert the identical thing, because that would be a move.\n \/\/\/ This is good, because then you can't have a thing that is both in\n \/\/\/ the table and not in the table.\n fn insert_same_keys() {\n let mut t: Table<TestThing> = Table::default();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, uuid);\n let thing_key = thing.stuff;\n let displaced = t.insert(Name::new(name.to_owned()), uuid, thing);\n table_invariant(&t);\n\n \/\/ There was nothing previously, so displaced must be empty.\n assert!(displaced.is_none());\n\n \/\/ t now contains the inserted thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid));\n assert_eq!(t.get_by_uuid(uuid).unwrap().1.stuff, thing_key);\n\n \/\/ Add another thing with the same keys.\n let thing2 = TestThing::new(&name, uuid);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(Name::new(name.to_owned()), uuid, thing2);\n table_invariant(&t);\n\n \/\/ It has displaced the old thing.\n assert!(displaced.is_some());\n let ref displaced_item = displaced.unwrap();\n assert_eq!(&*displaced_item.0, name);\n assert_eq!(displaced_item.1, uuid);\n\n \/\/ But it contains a thing with the same keys.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid));\n assert_eq!(t.get_by_uuid(uuid).unwrap().1.stuff, thing_key2);\n assert_eq!(t.len(), 1);\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with the same name.\n \/\/\/ The previously inserted thing should be returned.\n fn insert_same_name() {\n let mut t: Table<TestThing> = Table::default();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, uuid);\n let thing_key = thing.stuff;\n\n \/\/ There was nothing in the table before, so displaced is empty.\n let displaced = t.insert(Name::new(name.to_owned()), uuid, thing);\n table_invariant(&t);\n assert!(displaced.is_none());\n\n \/\/ t now contains thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid));\n\n \/\/ Insert new item with different UUID.\n let uuid2 = Uuid::new_v4();\n let thing2 = TestThing::new(&name, uuid2);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(Name::new(name.to_owned()), uuid2, thing2);\n table_invariant(&t);\n\n \/\/ The items displaced consist exactly of the first item.\n assert!(displaced.is_some());\n let ref displaced_item = displaced.unwrap();\n assert_eq!(&*displaced_item.0, name);\n assert_eq!(displaced_item.1, uuid);\n assert_eq!(displaced_item.2.stuff, thing_key);\n\n \/\/ The table contains the new item and has no memory of the old.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid2));\n assert!(!t.contains_uuid(uuid));\n assert_eq!(t.get_by_uuid(uuid2).unwrap().1.stuff, thing_key2);\n assert_eq!(t.get_by_name(&name).unwrap().1.stuff, thing_key2);\n assert_eq!(t.len(), 1);\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with the same uuid.\n \/\/\/ The previously inserted thing should be returned.\n fn insert_same_uuid() {\n let mut t: Table<TestThing> = Table::default();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, uuid);\n let thing_key = thing.stuff;\n\n \/\/ There was nothing in the table before, so displaced is empty.\n let displaced = t.insert(Name::new(name.to_owned()), uuid, thing);\n table_invariant(&t);\n assert!(displaced.is_none());\n\n \/\/ t now contains thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(uuid));\n\n \/\/ Insert new item with different UUID.\n let name2 = \"name2\";\n let thing2 = TestThing::new(&name2, uuid);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(Name::new(name2.to_owned()), uuid, thing2);\n table_invariant(&t);\n\n \/\/ The items displaced consist exactly of the first item.\n assert!(displaced.is_some());\n let ref displaced_item = displaced.unwrap();\n assert_eq!(&*displaced_item.0, name);\n assert_eq!(displaced_item.1, uuid);\n assert_eq!(displaced_item.2.stuff, thing_key);\n\n \/\/ The table contains the new item and has no memory of the old.\n assert!(t.contains_uuid(uuid));\n assert!(t.contains_name(name2));\n assert!(!t.contains_name(name));\n assert_eq!(t.get_by_uuid(uuid).unwrap().1.stuff, thing_key2);\n assert_eq!(t.get_by_name(&name2).unwrap().1.stuff, thing_key2);\n assert_eq!(t.len(), 1);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added replay trait<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added an example how to create crosstalk by convolving a matrix with a matrix of impulse responses<commit_after>extern crate hound;\nextern crate basic_dsp;\nextern crate docopt;\n\nuse basic_dsp::*;\nuse basic_dsp::matrix::*;\nuse std::i16;\nuse std::env;\nuse docopt::Docopt;\n\nconst USAGE: &'static str = \"\nThis program takes a source wav file, adds crosstalk\nand then writes the result to the dest file.\n\nCrosstalk means that you will hear parts of channel 1 in\nchannel 2 and vice versa.\n\nUsage: crosstalk <source> <dest>\n crosstalk (--help) \n\nOptions:\n -h, --help Display usage.\n\";\n\nfn main() {\n let argv = env::args();\n let args = Docopt::new(USAGE)\n .and_then(|d| d.argv(argv.into_iter()).parse())\n .unwrap_or_else(|e| e.exit());\n if args.get_bool(\"-h\") || args.get_bool(\"--help\") {\n println!(\"{}\", USAGE);\n std::process::exit(0);\n }\n \n \n let source = args.get_str(\"<source>\");\n let dest = args.get_str(\"<dest>\");\n\n let mut reader = hound::WavReader::open(source).expect(\"Failed to open input waveform\");\n let samples: Vec<f32> = reader.samples::<f32>().map(|x|x.expect(\"Failed to read sample\")).collect(); \n assert_eq!(reader.spec().channels, 2);\n assert_eq!(reader.spec().sample_rate, 44100);\n assert_eq!(reader.spec().bits_per_sample, 32);\n \n let mut complex = samples.to_complex_time_vec();\n let mut channel1 = Vec::new().to_real_time_vec();\n let mut channel2 = Vec::new().to_real_time_vec();\n complex.get_real_imag(&mut channel1, &mut channel2);\n \n let mut mat = [channel1, channel2].to_mat();\n \/\/ The attenuation impulse response also adds an echo (expressed\n \/\/ by the value at index 0), but since the echo is only 3 samples and the \n \/\/ sample rate is 44.1 kHz the echo is < 1ms and no one will be able to hear that, \n \/\/ but it might be interesting enough for an example.\n let attenuation = vec!(0.2, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0).to_real_time_vec();\n let crosstalk = vec!(0.0, 0.0, 0.0, 0.3, 0.0, 0.0, 0.0).to_real_time_vec();\n \n let imp_resp = [\n [&attenuation, &crosstalk],\n [&crosstalk, &attenuation]\n ];\n \n let mut buffer = SingleBuffer::new();\n mat.convolve_vector(&mut buffer, &imp_resp).unwrap();\n \n let rows = &mat.rows();\n complex.set_real_imag(&rows[0], &rows[1])\n .expect(\"Should never fail, this Vec can be resized\");\n \n let spec = hound::WavSpec {\n channels: 2,\n sample_rate: 44100,\n bits_per_sample: 16,\n sample_format: hound::SampleFormat::Int,\n };\n \n let mut writer = hound::WavWriter::create(dest, spec).expect(\"Failed to open output waveform\");\n let mut sample_no = 0;\n for sample in &complex[..] {\n let amplitude = i16::MAX as f32;\n let sample = (sample * amplitude) as i16;\n writer.write_sample(sample).expect(&format!(\"Failed to write sample {}\", sample_no));\n sample_no += 1;\n }\n \n writer.finalize().expect(\"Failed to close output waveform\");\n \n let len = complex.points();\n println!(\"Finished processing {} samples\", len);\n}<|endoftext|>"} {"text":"<commit_before>use header::{Header, Raw, CookiePair, CookieJar};\nuse std::fmt::{self, Display};\nuse std::str::from_utf8;\n\n\n\/\/\/ `Set-Cookie` header, defined [RFC6265](http:\/\/tools.ietf.org\/html\/rfc6265#section-4.1)\n\/\/\/\n\/\/\/ The Set-Cookie HTTP response header is used to send cookies from the\n\/\/\/ server to the user agent.\n\/\/\/\n\/\/\/ Informally, the Set-Cookie response header contains the header name\n\/\/\/ \"Set-Cookie\" followed by a \":\" and a cookie. Each cookie begins with\n\/\/\/ a name-value-pair, followed by zero or more attribute-value pairs.\n\/\/\/\n\/\/\/ # ABNF\n\/\/\/ ```plain\n\/\/\/ set-cookie-header = \"Set-Cookie:\" SP set-cookie-string\n\/\/\/ set-cookie-string = cookie-pair *( \";\" SP cookie-av )\n\/\/\/ cookie-pair = cookie-name \"=\" cookie-value\n\/\/\/ cookie-name = token\n\/\/\/ cookie-value = *cookie-octet \/ ( DQUOTE *cookie-octet DQUOTE )\n\/\/\/ cookie-octet = %x21 \/ %x23-2B \/ %x2D-3A \/ %x3C-5B \/ %x5D-7E\n\/\/\/ ; US-ASCII characters excluding CTLs,\n\/\/\/ ; whitespace DQUOTE, comma, semicolon,\n\/\/\/ ; and backslash\n\/\/\/ token = <token, defined in [RFC2616], Section 2.2>\n\/\/\/\n\/\/\/ cookie-av = expires-av \/ max-age-av \/ domain-av \/\n\/\/\/ path-av \/ secure-av \/ httponly-av \/\n\/\/\/ extension-av\n\/\/\/ expires-av = \"Expires=\" sane-cookie-date\n\/\/\/ sane-cookie-date = <rfc1123-date, defined in [RFC2616], Section 3.3.1>\n\/\/\/ max-age-av = \"Max-Age=\" non-zero-digit *DIGIT\n\/\/\/ ; In practice, both expires-av and max-age-av\n\/\/\/ ; are limited to dates representable by the\n\/\/\/ ; user agent.\n\/\/\/ non-zero-digit = %x31-39\n\/\/\/ ; digits 1 through 9\n\/\/\/ domain-av = \"Domain=\" domain-value\n\/\/\/ domain-value = <subdomain>\n\/\/\/ ; defined in [RFC1034], Section 3.5, as\n\/\/\/ ; enhanced by [RFC1123], Section 2.1\n\/\/\/ path-av = \"Path=\" path-value\n\/\/\/ path-value = <any CHAR except CTLs or \";\">\n\/\/\/ secure-av = \"Secure\"\n\/\/\/ httponly-av = \"HttpOnly\"\n\/\/\/ extension-av = <any CHAR except CTLs or \";\">\n\/\/\/ ```\n\/\/\/\n\/\/\/ # Example values\n\/\/\/ * `SID=31d4d96e407aad42`\n\/\/\/ * `lang=en-US; Expires=Wed, 09 Jun 2021 10:18:14 GMT`\n\/\/\/ * `lang=; Expires=Sun, 06 Nov 1994 08:49:37 GMT`\n\/\/\/ * `lang=en-US; Path=\/; Domain=example.com`\n\/\/\/\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ # extern crate hyper;\n\/\/\/ # extern crate cookie;\n\/\/\/ # fn main() {\n\/\/\/ \/\/ extern crate cookie;\n\/\/\/\n\/\/\/ use hyper::header::{Headers, SetCookie};\n\/\/\/ use cookie::Cookie as CookiePair;\n\/\/\/\n\/\/\/ let mut headers = Headers::new();\n\/\/\/ let mut cookie = CookiePair::new(\"foo\".to_owned(), \"bar\".to_owned());\n\/\/\/\n\/\/\/ cookie.path = Some(\"\/path\".to_owned());\n\/\/\/ cookie.domain = Some(\"example.com\".to_owned());\n\/\/\/\n\/\/\/ headers.set(\n\/\/\/ SetCookie(vec![\n\/\/\/ cookie,\n\/\/\/ CookiePair::new(\"baz\".to_owned(), \"quux\".to_owned()),\n\/\/\/ ])\n\/\/\/ );\n\/\/\/ # }\n\/\/\/ ```\n#[derive(Clone, PartialEq, Debug)]\npub struct SetCookie(pub Vec<CookiePair>);\n\n__hyper__deref!(SetCookie => Vec<CookiePair>);\n\nimpl Header for SetCookie {\n fn header_name() -> &'static str {\n static NAME: &'static str = \"Set-Cookie\";\n NAME\n }\n\n fn parse_header(raw: &Raw) -> ::Result<SetCookie> {\n let mut set_cookies = Vec::with_capacity(raw.len());\n for set_cookies_raw in raw {\n if let Ok(s) = from_utf8(&set_cookies_raw[..]) {\n if let Ok(cookie) = s.parse() {\n set_cookies.push(cookie);\n }\n }\n }\n\n if !set_cookies.is_empty() {\n Ok(SetCookie(set_cookies))\n } else {\n Err(::Error::Header)\n }\n }\n\n fn fmt_header(&self, f: &mut fmt::Formatter) -> fmt::Result {\n for (i, cookie) in self.0.iter().enumerate() {\n if i != 0 {\n try!(f.write_str(\"\\r\\nSet-Cookie: \"));\n }\n try!(Display::fmt(cookie, f));\n }\n Ok(())\n }\n}\n\n\nimpl SetCookie {\n \/\/\/ Use this to create SetCookie header from CookieJar using\n \/\/\/ calculated delta.\n pub fn from_cookie_jar(jar: &CookieJar) -> SetCookie {\n SetCookie(jar.delta())\n }\n\n \/\/\/ Use this on client to apply changes from SetCookie to CookieJar.\n \/\/\/ Note that this will `panic!` if `CookieJar` is not root.\n pub fn apply_to_cookie_jar(&self, jar: &mut CookieJar) {\n for cookie in self.iter() {\n jar.add_original(cookie.clone())\n }\n }\n}\n\n\n#[test]\nfn test_parse() {\n let h = Header::parse_header(&\"foo=bar; HttpOnly\".into());\n let mut c1 = CookiePair::new(\"foo\".to_owned(), \"bar\".to_owned());\n c1.httponly = true;\n\n assert_eq!(h.ok(), Some(SetCookie(vec![c1])));\n}\n\n#[test]\nfn test_fmt() {\n use header::Headers;\n\n let mut cookie = CookiePair::new(\"foo\".to_owned(), \"bar\".to_owned());\n cookie.httponly = true;\n cookie.path = Some(\"\/p\".to_owned());\n let cookies = SetCookie(vec![cookie, CookiePair::new(\"baz\".to_owned(), \"quux\".to_owned())]);\n let mut headers = Headers::new();\n headers.set(cookies);\n\n assert_eq!(\n &headers.to_string()[..],\n \"Set-Cookie: foo=bar; HttpOnly; Path=\/p\\r\\nSet-Cookie: baz=quux\\r\\n\");\n}\n\n#[test]\nfn cookie_jar() {\n let jar = CookieJar::new(b\"secret\");\n let cookie = CookiePair::new(\"foo\".to_owned(), \"bar\".to_owned());\n jar.add(cookie);\n\n let cookies = SetCookie::from_cookie_jar(&jar);\n\n let mut new_jar = CookieJar::new(b\"secret\");\n cookies.apply_to_cookie_jar(&mut new_jar);\n\n assert_eq!(jar.find(\"foo\"), new_jar.find(\"foo\"));\n assert_eq!(jar.iter().collect::<Vec<CookiePair>>(), new_jar.iter().collect::<Vec<CookiePair>>());\n}\n<commit_msg>docs(headers): remove extraneous space<commit_after>use header::{Header, Raw, CookiePair, CookieJar};\nuse std::fmt::{self, Display};\nuse std::str::from_utf8;\n\n\n\/\/\/ `Set-Cookie` header, defined [RFC6265](http:\/\/tools.ietf.org\/html\/rfc6265#section-4.1)\n\/\/\/\n\/\/\/ The Set-Cookie HTTP response header is used to send cookies from the\n\/\/\/ server to the user agent.\n\/\/\/\n\/\/\/ Informally, the Set-Cookie response header contains the header name\n\/\/\/ \"Set-Cookie\" followed by a \":\" and a cookie. Each cookie begins with\n\/\/\/ a name-value-pair, followed by zero or more attribute-value pairs.\n\/\/\/\n\/\/\/ # ABNF\n\/\/\/ ```plain\n\/\/\/ set-cookie-header = \"Set-Cookie:\" SP set-cookie-string\n\/\/\/ set-cookie-string = cookie-pair *( \";\" SP cookie-av )\n\/\/\/ cookie-pair = cookie-name \"=\" cookie-value\n\/\/\/ cookie-name = token\n\/\/\/ cookie-value = *cookie-octet \/ ( DQUOTE *cookie-octet DQUOTE )\n\/\/\/ cookie-octet = %x21 \/ %x23-2B \/ %x2D-3A \/ %x3C-5B \/ %x5D-7E\n\/\/\/ ; US-ASCII characters excluding CTLs,\n\/\/\/ ; whitespace DQUOTE, comma, semicolon,\n\/\/\/ ; and backslash\n\/\/\/ token = <token, defined in [RFC2616], Section 2.2>\n\/\/\/\n\/\/\/ cookie-av = expires-av \/ max-age-av \/ domain-av \/\n\/\/\/ path-av \/ secure-av \/ httponly-av \/\n\/\/\/ extension-av\n\/\/\/ expires-av = \"Expires=\" sane-cookie-date\n\/\/\/ sane-cookie-date = <rfc1123-date, defined in [RFC2616], Section 3.3.1>\n\/\/\/ max-age-av = \"Max-Age=\" non-zero-digit *DIGIT\n\/\/\/ ; In practice, both expires-av and max-age-av\n\/\/\/ ; are limited to dates representable by the\n\/\/\/ ; user agent.\n\/\/\/ non-zero-digit = %x31-39\n\/\/\/ ; digits 1 through 9\n\/\/\/ domain-av = \"Domain=\" domain-value\n\/\/\/ domain-value = <subdomain>\n\/\/\/ ; defined in [RFC1034], Section 3.5, as\n\/\/\/ ; enhanced by [RFC1123], Section 2.1\n\/\/\/ path-av = \"Path=\" path-value\n\/\/\/ path-value = <any CHAR except CTLs or \";\">\n\/\/\/ secure-av = \"Secure\"\n\/\/\/ httponly-av = \"HttpOnly\"\n\/\/\/ extension-av = <any CHAR except CTLs or \";\">\n\/\/\/ ```\n\/\/\/\n\/\/\/ # Example values\n\/\/\/ * `SID=31d4d96e407aad42`\n\/\/\/ * `lang=en-US; Expires=Wed, 09 Jun 2021 10:18:14 GMT`\n\/\/\/ * `lang=; Expires=Sun, 06 Nov 1994 08:49:37 GMT`\n\/\/\/ * `lang=en-US; Path=\/; Domain=example.com`\n\/\/\/\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ # extern crate hyper;\n\/\/\/ # extern crate cookie;\n\/\/\/ # fn main() {\n\/\/\/ \/\/ extern crate cookie;\n\/\/\/\n\/\/\/ use hyper::header::{Headers, SetCookie};\n\/\/\/ use cookie::Cookie as CookiePair;\n\/\/\/\n\/\/\/ let mut headers = Headers::new();\n\/\/\/ let mut cookie = CookiePair::new(\"foo\".to_owned(), \"bar\".to_owned());\n\/\/\/\n\/\/\/ cookie.path = Some(\"\/path\".to_owned());\n\/\/\/ cookie.domain = Some(\"example.com\".to_owned());\n\/\/\/\n\/\/\/ headers.set(\n\/\/\/ SetCookie(vec![\n\/\/\/ cookie,\n\/\/\/ CookiePair::new(\"baz\".to_owned(), \"quux\".to_owned()),\n\/\/\/ ])\n\/\/\/ );\n\/\/\/ # }\n\/\/\/ ```\n#[derive(Clone, PartialEq, Debug)]\npub struct SetCookie(pub Vec<CookiePair>);\n\n__hyper__deref!(SetCookie => Vec<CookiePair>);\n\nimpl Header for SetCookie {\n fn header_name() -> &'static str {\n static NAME: &'static str = \"Set-Cookie\";\n NAME\n }\n\n fn parse_header(raw: &Raw) -> ::Result<SetCookie> {\n let mut set_cookies = Vec::with_capacity(raw.len());\n for set_cookies_raw in raw {\n if let Ok(s) = from_utf8(&set_cookies_raw[..]) {\n if let Ok(cookie) = s.parse() {\n set_cookies.push(cookie);\n }\n }\n }\n\n if !set_cookies.is_empty() {\n Ok(SetCookie(set_cookies))\n } else {\n Err(::Error::Header)\n }\n }\n\n fn fmt_header(&self, f: &mut fmt::Formatter) -> fmt::Result {\n for (i, cookie) in self.0.iter().enumerate() {\n if i != 0 {\n try!(f.write_str(\"\\r\\nSet-Cookie: \"));\n }\n try!(Display::fmt(cookie, f));\n }\n Ok(())\n }\n}\n\n\nimpl SetCookie {\n \/\/\/ Use this to create SetCookie header from CookieJar using\n \/\/\/ calculated delta.\n pub fn from_cookie_jar(jar: &CookieJar) -> SetCookie {\n SetCookie(jar.delta())\n }\n\n \/\/\/ Use this on client to apply changes from SetCookie to CookieJar.\n \/\/\/ Note that this will `panic!` if `CookieJar` is not root.\n pub fn apply_to_cookie_jar(&self, jar: &mut CookieJar) {\n for cookie in self.iter() {\n jar.add_original(cookie.clone())\n }\n }\n}\n\n\n#[test]\nfn test_parse() {\n let h = Header::parse_header(&\"foo=bar; HttpOnly\".into());\n let mut c1 = CookiePair::new(\"foo\".to_owned(), \"bar\".to_owned());\n c1.httponly = true;\n\n assert_eq!(h.ok(), Some(SetCookie(vec![c1])));\n}\n\n#[test]\nfn test_fmt() {\n use header::Headers;\n\n let mut cookie = CookiePair::new(\"foo\".to_owned(), \"bar\".to_owned());\n cookie.httponly = true;\n cookie.path = Some(\"\/p\".to_owned());\n let cookies = SetCookie(vec![cookie, CookiePair::new(\"baz\".to_owned(), \"quux\".to_owned())]);\n let mut headers = Headers::new();\n headers.set(cookies);\n\n assert_eq!(\n &headers.to_string()[..],\n \"Set-Cookie: foo=bar; HttpOnly; Path=\/p\\r\\nSet-Cookie: baz=quux\\r\\n\");\n}\n\n#[test]\nfn cookie_jar() {\n let jar = CookieJar::new(b\"secret\");\n let cookie = CookiePair::new(\"foo\".to_owned(), \"bar\".to_owned());\n jar.add(cookie);\n\n let cookies = SetCookie::from_cookie_jar(&jar);\n\n let mut new_jar = CookieJar::new(b\"secret\");\n cookies.apply_to_cookie_jar(&mut new_jar);\n\n assert_eq!(jar.find(\"foo\"), new_jar.find(\"foo\"));\n assert_eq!(jar.iter().collect::<Vec<CookiePair>>(), new_jar.iter().collect::<Vec<CookiePair>>());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test that compare-exchange-weak-failure-rate=0.0 means what it says<commit_after>\/\/ compile-flags: -Zmiri-compare-exchange-weak-failure-rate=0.0\nuse std::sync::atomic::{AtomicBool, Ordering::*};\n\n\/\/ Ensure that compare_exchange_weak never fails.\nfn main() {\n let atomic = AtomicBool::new(false);\n let tries = 100;\n for _ in 0..tries {\n let cur = atomic.load(Relaxed);\n \/\/ Try (weakly) to flip the flag.\n if atomic.compare_exchange_weak(cur, !cur, Relaxed, Relaxed).is_err() {\n \/\/ We failed. Avoid panic machinery as that uses atomics\/locks.\n eprintln!(\"compare_exchange_weak failed\");\n std::process::abort();\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #32802 - nikomatsakis:issue-32505, r=eddyb<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z parse-only -Z continue-parse-after-error\n\npub fn test() {\n foo(|_|) \/\/~ ERROR unexpected token: `)`\n}\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>04 - mutability<commit_after>fn main() {\n let immutable_variable = 1i;\n let mut mutable_variable = 1i;\n\n println!(\"Before mutation: {}\", mutable_variable);\n\n \/\/ Ok\n mutable_variable += 1;\n\n println!(\"After mutation: {}\", mutable_variable);\n\n println!(\"This is immutable: {}\", immutable_variable);\n\n \/\/ Error!\n \/\/ immutable_variable += 1;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>from_utf8 now returns Result instead of Option<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>preload history cleanup<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add core data structures for Version and VersionReq<commit_after>#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\npub struct Version {\n pub major: u64,\n pub minor: u64,\n pub patch: u64,\n pub pre: String,\n pub build: String,\n}\n\n#[derive(Default, Clone, Eq, PartialEq, Hash, Debug)]\npub struct VersionReq {\n pub comparators: Vec<Comparator>,\n}\n\n#[derive(Clone, Eq, PartialEq, Hash, Debug)]\npub struct Comparator {\n pub op: Op,\n pub major: u64,\n pub minor: Option<u64>,\n pub patch: Option<u64>,\n pub pre: String,\n}\n\n#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]\npub enum Op {\n Exact,\n Greater,\n GreaterEq,\n Less,\n LessEq,\n Tilde,\n Caret,\n Wildcard,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rustfmt lib<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add #![warn(missing_docs)]<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Work on phone_number_util<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix(lib): UserRWX -> USER_RWX, add #![feature(slicing_syntax)]<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added files via upload<commit_after>\/\/ \n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add RNA test from Rosalind challenge<commit_after>pub fn dna_to_rna_str(dna: &str ) -> ~str {\n std::str::replace(dna,\"T\", \"U\")\n}\n\n\n\nfn main() {\n let dna = ~\"ACTG\";\n let rna = dna_to_rna_str(dna);\n println!(\"DNA: {:}\", dna);\n println!(\"RNA: {:}\", rna);\n}\n\n#[cfg(test)]\nmod test {\n use super::dna_to_rna_str;\n\n #[test]\n fn first_test(){\n let dna = ~\"GATGGAACTTGACTACGTAAATT\";\n assert_eq!(dna_to_rna_str(dna), ~\"GAUGGAACUUGACUACGUAAAUU\");\n }\n\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Have FtpStream contain an SslContext since it can be cloned, unlike Ssl, and simplify some code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix bug printing incorrect diagnostic lines<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate orbclient;\nextern crate sinulation;\n\nuse super::start;\n\n#[derive(Debug, Clone, Copy)]\npub struct Color {\n pub r: u8,\n pub g: u8,\n pub b: u8,\n}\n\nimpl Color {\n pub fn new(r: u8, g: u8, b: u8) -> Color {\n Color {\n r: r,\n g: g,\n b: b,\n }\n }\n\n pub fn orb_color(&self) -> orbclient::color::Color {\n orbclient::color::Color::rgb(self.r, self.g, self.b)\n }\n}\n\n#[derive(Clone, Copy, Debug)]\npub struct FlatPoint {\n pub x: i32,\n pub y: i32,\n}\n\n\/*impl FlatPoint {\n pub fn make_sdl(&self) -> sdl2::rect::Point {\n sdl2::rect::Point::new(self.x, self.y)\n }\n}*\/\n\n#[derive(Clone, Copy, Debug)]\npub struct DepthPoint {\n pub x: f32,\n pub y: f32,\n pub z: f32,\n\n pub x_y: f32,\n pub x_z: f32,\n pub y_z: f32,\n\n last_x_y: f32,\n last_x_z: f32,\n last_y_z: f32,\n}\n\nimpl DepthPoint {\n pub fn new(x: f32, y: f32, z: f32) -> DepthPoint {\n DepthPoint {\n x: x, \n y: y,\n z: z,\n\n x_y: 0.0,\n x_z: 0.0,\n y_z: 0.0,\n \n last_x_y: 0.0,\n last_x_z: 0.0,\n last_y_z: 0.0,\n }\n }\n\n pub fn flat_point(&mut self, engine_scr_x: u32, engine_scr_y: u32, offset_x: f32, offset_y: f32, offset_z: f32) -> FlatPoint { \n if self.z > -0.01 && self.z < 0.0 {\n self.z = 0.001\n }\n\n else if self.z < 0.1 { \/\/ Prevents division by nearly 0, that cause integer overflow\/underflow\n self.z = 0.11;\n }\n\n FlatPoint {\n x: ((engine_scr_x as f32 * (self.x + offset_x) as f32\/(self.z + offset_z)) + engine_scr_x as f32 \/ 2.0) as i32, \n y: ((engine_scr_x as f32 * (self.y + offset_y) as f32\/(self.z + offset_z)) + engine_scr_y as f32 \/ 2.0) as i32,\n }\n }\n\n pub fn apply_camera_rotations(&mut self, engine: &start::Window) {\n use std::f32::consts::PI;\n\n let x_y = self.x_y;\n let x_z = self.x_z;\n let y_z = self.y_z;\n\n let last_x_y = self.last_x_y;\n let last_x_z = self.last_x_z;\n let last_y_z = self.last_y_z;\n\n self.camera_rotate_x_y(&engine, x_y - last_x_y);\n self.camera_rotate_x_z(&engine, x_z - last_x_z);\n self.camera_rotate_y_z(&engine, y_z - last_y_z);\n\n self.last_x_y = x_y;\n self.last_x_z = x_z;\n self.last_y_z = y_z;\n\n \/\/normalize rotations\n if self.x_z > (PI * 2.0) {\n self.x_z -= (PI * 2.0);\n }\n\n if self.x_y > (PI * 2.0) {\n self.x_y -= (PI * 2.0);\n }\n\n if self.y_z > (PI * 2.0) {\n self.y_z -= (PI * 2.0);\n } \n }\n\n pub fn camera_rotate_x_y(&mut self, engine: &start::Window, angle: f32) {\n use std::f32;\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= engine.camera_x;\n self.y -= engine.camera_y;\n\n let new_x = self.x * c - self.y * s;\n let new_y = self.x * s + self.y * c;\n\n self.x = new_x + engine.camera_x;\n self.y = new_y + engine.camera_y;\n }\n \n pub fn camera_rotate_x_z(&mut self, engine: &start::Window, angle: f32) {\n use std::f32;\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= engine.camera_x;\n self.z -= engine.camera_z;\n\n let new_x = self.x * c - self.z * s;\n let new_z = self.x * s + self.z * c;\n\n self.x = new_x + engine.camera_x;\n self.z = new_z + engine.camera_z;\n }\n\n pub fn camera_rotate_y_z(&mut self, engine: &start::Window, angle: f32) {\n use std::f32;\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.y -= engine.camera_y;\n self.z -= engine.camera_z;\n\n let new_y = self.y * c - self.z * s;\n let new_z = self.y * s + self.z * c;\n\n self.y = new_y + engine.camera_y;\n self.z = new_z + engine.camera_z;\n } \n\n pub fn coord_rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n use std::f32;\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= x;\n self.y -= y;\n\n let new_x = self.x * c - self.y * s;\n let new_y = self.x * s + self.y * c;\n\n self.x = new_x + x;\n self.y = new_y + y;\n }\n \n pub fn coord_rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n use std::f32;\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= x;\n self.z -= z;\n\n let new_x = self.x * c - self.z * s;\n let new_z = self.x * s + self.z * c;\n\n self.x = new_x + x;\n self.z = new_z + z;\n }\n\n pub fn coord_rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n use std::f32;\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.y -= y;\n self.z -= z;\n\n let new_y = self.y * c - self.z * s;\n let new_z = self.y * s + self.z * c;\n\n self.y = new_y + y;\n self.z = new_z + z;\n } \n}\n\n#[derive(Clone, Copy, Debug)]\npub struct Triangle {\n pub p1: DepthPoint,\n pub p2: DepthPoint,\n pub p3: DepthPoint,\n\n pub x: f32,\n pub y: f32,\n pub z: f32,\n\n pub x_y: f32,\n pub x_z: f32,\n pub y_z: f32,\n\n pub color: Color,\n}\n\nimpl Triangle {\n pub fn new(p1: DepthPoint, p2: DepthPoint, p3: DepthPoint, x: f32, y: f32, z: f32, color: Color) -> Triangle {\n Triangle {\n p1: p1,\n p2: p2, \n p3: p3,\n\n x: x,\n y: y, \n z: z,\n\n x_y: 0.0,\n x_z: 0.0,\n y_z: 0.0,\n \n color: color,\n }\n }\n\n pub fn apply_camera_rotations(&mut self, engine: &start::Window) {\n self.p1.x_y += self.x_y;\n self.p1.x_z += self.x_z;\n self.p1.y_z += self.y_z;\n\n self.p2.x_y += self.x_y;\n self.p2.x_z += self.x_z;\n self.p2.y_z += self.y_z;\n\n self.p3.x_y += self.x_y;\n self.p3.x_z += self.x_z;\n self.p3.y_z += self.y_z;\n\n self.p1.apply_camera_rotations(&engine);\n self.p2.apply_camera_rotations(&engine);\n self.p3.apply_camera_rotations(&engine);\n\n self.p1.x_y -= self.x_y;\n self.p1.x_z -= self.x_z;\n self.p1.y_z -= self.y_z;\n\n self.p2.x_y -= self.x_y;\n self.p2.x_z -= self.x_z;\n self.p2.y_z -= self.y_z;\n\n self.p3.x_y -= self.x_y;\n self.p3.x_z -= self.x_z;\n self.p3.y_z -= self.y_z;\n }\n\n pub fn coord_rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n self.p1.coord_rotate_x_y(x, y, angle);\n self.p2.coord_rotate_x_y(x, y, angle);\n self.p3.coord_rotate_x_y(x, y, angle);\n }\n\n pub fn coord_rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n self.p1.coord_rotate_x_z(x, z, angle);\n self.p2.coord_rotate_x_z(x, z, angle);\n self.p3.coord_rotate_x_z(x, z, angle);\n }\n\n pub fn coord_rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n self.p1.coord_rotate_y_z(y, z, angle);\n self.p2.coord_rotate_y_z(y, z, angle);\n self.p3.coord_rotate_y_z(y, z, angle);\n }\n}\n\n#[derive(Clone, Debug)]\npub struct TriangleGroup {\n pub triangles: Vec<Triangle>,\n}\n\nimpl TriangleGroup {\n pub fn new(triangles: Vec<Triangle>) -> TriangleGroup {\n TriangleGroup {\n triangles: triangles\n }\n }\n}\n<commit_msg>Redox stuff<commit_after>extern crate orbclient;\nextern crate sinulation;\n\nuse super::start;\n\n#[cfg(target_os = \"redox\")]\nuse sinulation::Trig;\n\n#[derive(Debug, Clone, Copy)]\npub struct Color {\n pub r: u8,\n pub g: u8,\n pub b: u8,\n}\n\nimpl Color {\n pub fn new(r: u8, g: u8, b: u8) -> Color {\n Color {\n r: r,\n g: g,\n b: b,\n }\n }\n\n pub fn orb_color(&self) -> orbclient::color::Color {\n orbclient::color::Color::rgb(self.r, self.g, self.b)\n }\n}\n\n#[derive(Clone, Copy, Debug)]\npub struct FlatPoint {\n pub x: i32,\n pub y: i32,\n}\n\n\/*impl FlatPoint {\n pub fn make_sdl(&self) -> sdl2::rect::Point {\n sdl2::rect::Point::new(self.x, self.y)\n }\n}*\/\n\n#[derive(Clone, Copy, Debug)]\npub struct DepthPoint {\n pub x: f32,\n pub y: f32,\n pub z: f32,\n\n pub x_y: f32,\n pub x_z: f32,\n pub y_z: f32,\n\n last_x_y: f32,\n last_x_z: f32,\n last_y_z: f32,\n}\n\nimpl DepthPoint {\n pub fn new(x: f32, y: f32, z: f32) -> DepthPoint {\n DepthPoint {\n x: x, \n y: y,\n z: z,\n\n x_y: 0.0,\n x_z: 0.0,\n y_z: 0.0,\n \n last_x_y: 0.0,\n last_x_z: 0.0,\n last_y_z: 0.0,\n }\n }\n\n pub fn flat_point(&mut self, engine_scr_x: u32, engine_scr_y: u32, offset_x: f32, offset_y: f32, offset_z: f32) -> FlatPoint { \n if self.z > -0.01 && self.z < 0.0 {\n self.z = 0.001\n }\n\n else if self.z < 0.1 { \/\/ Prevents division by nearly 0, that cause integer overflow\/underflow\n self.z = 0.11;\n }\n\n FlatPoint {\n x: ((engine_scr_x as f32 * (self.x + offset_x) as f32\/(self.z + offset_z)) + engine_scr_x as f32 \/ 2.0) as i32, \n y: ((engine_scr_x as f32 * (self.y + offset_y) as f32\/(self.z + offset_z)) + engine_scr_y as f32 \/ 2.0) as i32,\n }\n }\n\n pub fn apply_camera_rotations(&mut self, engine: &start::Window) {\n use std::f32::consts::PI;\n\n let x_y = self.x_y;\n let x_z = self.x_z;\n let y_z = self.y_z;\n\n let last_x_y = self.last_x_y;\n let last_x_z = self.last_x_z;\n let last_y_z = self.last_y_z;\n\n self.camera_rotate_x_y(&engine, x_y - last_x_y);\n self.camera_rotate_x_z(&engine, x_z - last_x_z);\n self.camera_rotate_y_z(&engine, y_z - last_y_z);\n\n self.last_x_y = x_y;\n self.last_x_z = x_z;\n self.last_y_z = y_z;\n\n \/\/normalize rotations\n if self.x_z > (PI * 2.0) {\n self.x_z -= (PI * 2.0);\n }\n\n if self.x_y > (PI * 2.0) {\n self.x_y -= (PI * 2.0);\n }\n\n if self.y_z > (PI * 2.0) {\n self.y_z -= (PI * 2.0);\n } \n }\n\n pub fn camera_rotate_x_y(&mut self, engine: &start::Window, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= engine.camera_x;\n self.y -= engine.camera_y;\n\n let new_x = self.x * c - self.y * s;\n let new_y = self.x * s + self.y * c;\n\n self.x = new_x + engine.camera_x;\n self.y = new_y + engine.camera_y;\n }\n \n pub fn camera_rotate_x_z(&mut self, engine: &start::Window, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= engine.camera_x;\n self.z -= engine.camera_z;\n\n let new_x = self.x * c - self.z * s;\n let new_z = self.x * s + self.z * c;\n\n self.x = new_x + engine.camera_x;\n self.z = new_z + engine.camera_z;\n }\n\n pub fn camera_rotate_y_z(&mut self, engine: &start::Window, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.y -= engine.camera_y;\n self.z -= engine.camera_z;\n\n let new_y = self.y * c - self.z * s;\n let new_z = self.y * s + self.z * c;\n\n self.y = new_y + engine.camera_y;\n self.z = new_z + engine.camera_z;\n } \n\n pub fn coord_rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= x;\n self.y -= y;\n\n let new_x = self.x * c - self.y * s;\n let new_y = self.x * s + self.y * c;\n\n self.x = new_x + x;\n self.y = new_y + y;\n }\n \n pub fn coord_rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.x -= x;\n self.z -= z;\n\n let new_x = self.x * c - self.z * s;\n let new_z = self.x * s + self.z * c;\n\n self.x = new_x + x;\n self.z = new_z + z;\n }\n\n pub fn coord_rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n #[cfg(not(target_os = \"redox\"))]\n use std::f32;\n\n let s = f32::sin(angle);\n let c = f32::cos(angle);\n\n self.y -= y;\n self.z -= z;\n\n let new_y = self.y * c - self.z * s;\n let new_z = self.y * s + self.z * c;\n\n self.y = new_y + y;\n self.z = new_z + z;\n } \n}\n\n#[derive(Clone, Copy, Debug)]\npub struct Triangle {\n pub p1: DepthPoint,\n pub p2: DepthPoint,\n pub p3: DepthPoint,\n\n pub x: f32,\n pub y: f32,\n pub z: f32,\n\n pub x_y: f32,\n pub x_z: f32,\n pub y_z: f32,\n\n pub color: Color,\n}\n\nimpl Triangle {\n pub fn new(p1: DepthPoint, p2: DepthPoint, p3: DepthPoint, x: f32, y: f32, z: f32, color: Color) -> Triangle {\n Triangle {\n p1: p1,\n p2: p2, \n p3: p3,\n\n x: x,\n y: y, \n z: z,\n\n x_y: 0.0,\n x_z: 0.0,\n y_z: 0.0,\n \n color: color,\n }\n }\n\n pub fn apply_camera_rotations(&mut self, engine: &start::Window) {\n self.p1.x_y += self.x_y;\n self.p1.x_z += self.x_z;\n self.p1.y_z += self.y_z;\n\n self.p2.x_y += self.x_y;\n self.p2.x_z += self.x_z;\n self.p2.y_z += self.y_z;\n\n self.p3.x_y += self.x_y;\n self.p3.x_z += self.x_z;\n self.p3.y_z += self.y_z;\n\n self.p1.apply_camera_rotations(&engine);\n self.p2.apply_camera_rotations(&engine);\n self.p3.apply_camera_rotations(&engine);\n\n self.p1.x_y -= self.x_y;\n self.p1.x_z -= self.x_z;\n self.p1.y_z -= self.y_z;\n\n self.p2.x_y -= self.x_y;\n self.p2.x_z -= self.x_z;\n self.p2.y_z -= self.y_z;\n\n self.p3.x_y -= self.x_y;\n self.p3.x_z -= self.x_z;\n self.p3.y_z -= self.y_z;\n }\n\n pub fn coord_rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n self.p1.coord_rotate_x_y(x, y, angle);\n self.p2.coord_rotate_x_y(x, y, angle);\n self.p3.coord_rotate_x_y(x, y, angle);\n }\n\n pub fn coord_rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n self.p1.coord_rotate_x_z(x, z, angle);\n self.p2.coord_rotate_x_z(x, z, angle);\n self.p3.coord_rotate_x_z(x, z, angle);\n }\n\n pub fn coord_rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n self.p1.coord_rotate_y_z(y, z, angle);\n self.p2.coord_rotate_y_z(y, z, angle);\n self.p3.coord_rotate_y_z(y, z, angle);\n }\n}\n\n#[derive(Clone, Debug)]\npub struct TriangleGroup {\n pub triangles: Vec<Triangle>,\n}\n\nimpl TriangleGroup {\n pub fn new(triangles: Vec<Triangle>) -> TriangleGroup {\n TriangleGroup {\n triangles: triangles\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate rustc_serialize;\nextern crate time;\nextern crate crypto;\n\nuse time::Duration;\nuse rustc_serialize::base64;\nuse rustc_serialize::base64::{ToBase64, FromBase64};\nuse rustc_serialize::json;\nuse rustc_serialize::json::{ToJson, Json};\nuse std::collections::BTreeMap;\nuse crypto::sha2::{Sha256, Sha384, Sha512};\nuse crypto::hmac::Hmac;\nuse crypto::digest::Digest;\nuse crypto::mac::Mac;\nuse std::str;\nuse std::fmt;\nuse std::fmt::Formatter;\nuse std::fmt::Debug;\n\npub type Payload = BTreeMap<String, String>; \/\/todo replace with &str\n\npub struct Header {\n algorithm: Algorithm,\n ttype: String\n}\n\nimpl Header {\n pub fn new(alg: Algorithm) -> Header {\n Header { algorithm: alg, ttype: Header::std_type() }\n }\n \n pub fn std_type() -> String {\n \"JWT\".to_string()\n }\n}\n\n#[derive(Clone, Copy)]\npub enum Algorithm {\n HS256,\n HS384,\n HS512\n}\n\nimpl ToString for Algorithm {\n fn to_string(&self) -> String {\n match *self {\n Algorithm::HS256 => \"HS256\".to_string(),\n Algorithm::HS384 => \"HS384\".to_string(),\n Algorithm::HS512 => \"HS512\".to_string()\n } \n }\n}\n\npub enum Error {\n SignatureExpired,\n SignatureInvalid,\n JWTInvalid,\n IssuerInvalid,\n ExpirationInvalid,\n AudienceInvalid\n}\n\nimpl ToJson for Header {\n fn to_json(&self) -> json::Json {\n let mut map = BTreeMap::new();\n map.insert(\"typ\".to_string(), self.ttype.to_json());\n map.insert(\"alg\".to_string(), self.algorithm.to_string().to_json());\n Json::Object(map)\n }\n}\n\npub fn encode(header: Header, secret: String, payload: Payload) -> String {\n let signing_input = get_signing_input(payload, &header.algorithm);\n let signature = sign_hmac(&signing_input, secret, header.algorithm);\n format!(\"{}.{}\", signing_input, signature)\n}\n\npub fn decode(encoded_token: String, secret: String, algorithm: Algorithm) -> Result<(Header, Payload), Error> {\n match decode_segments(encoded_token) {\n Some((header, payload, signature, signing_input)) => {\n if !verify_signature(algorithm, signing_input, &signature, secret.to_string()) {\n return Err(Error::SignatureInvalid)\n } \n \/\/todo\n \/\/ verify_issuer(payload_json);\n \/\/ verify_expiration(payload_json);\n \/\/ verify_audience();\n \/\/ verify_subject();\n \/\/ verify_notbefore();\n \/\/ verify_issuedat();\n \/\/ verify_jwtid();\n\n \/\/todo\n Ok((header, payload))\n },\n\n None => Err(Error::JWTInvalid)\n }\n}\n\nfn segments_count() -> usize {\n 3\n}\n\nfn get_signing_input(payload: Payload, algorithm: &Algorithm) -> String {\n let header = Header::new(*algorithm);\n let header_json_str = header.to_json();\n let encoded_header = base64_url_encode(header_json_str.to_string().as_bytes()).to_string();\n let p = payload.into_iter().map(|(k, v)| (k, v.to_json())).collect();\n let payload_json = Json::Object(p);\n let encoded_payload = base64_url_encode(payload_json.to_string().as_bytes()).to_string();\n format!(\"{}.{}\", encoded_header, encoded_payload)\n}\n\n\nfn sign_hmac(signing_input: &str, secret: String, algorithm: Algorithm) -> String {\n let mut hmac = match algorithm {\n Algorithm::HS256 => create_hmac(Sha256::new(), secret),\n Algorithm::HS384 => create_hmac(Sha384::new(), secret),\n Algorithm::HS512 => create_hmac(Sha512::new(), secret)\n };\n \n hmac.input(signing_input.as_bytes());\n base64_url_encode(hmac.result().code())\n}\n\nfn base64_url_encode(bytes: &[u8]) -> String {\n bytes.to_base64(base64::URL_SAFE)\n}\n\nfn json_to_tree(input: Json) -> BTreeMap<String, String> {\n match input {\n Json::Object(json_tree) => json_tree.into_iter().map(|(k, v)| (k, match v {\n Json::String(s) => s,\n _ => unreachable!()\n })).collect(),\n _ => unreachable!()\n }\n}\n\nfn decode_segments(encoded_token: String) -> Option<(Header, Payload, Vec<u8>, String)> {\n let raw_segments: Vec<&str> = encoded_token.split(\".\").collect();\n if raw_segments.len() != segments_count() {\n return None\n }\n\n let header_segment = raw_segments[0];\n let payload_segment = raw_segments[1];\n let crypto_segment = raw_segments[2];\n let (header, payload) = decode_header_and_payload(header_segment, payload_segment);\n let signature = &crypto_segment.as_bytes().from_base64().unwrap();\n\n let signing_input = format!(\"{}.{}\", header_segment, payload_segment);\n Some((header, payload, signature.clone(), signing_input))\n}\n\nfn decode_header_and_payload<'a>(header_segment: &str, payload_segment: &str) -> (Header, Payload) {\n fn base64_to_json(input: &str) -> Json {\n let bytes = input.as_bytes().from_base64().unwrap();\n let s = str::from_utf8(&bytes).unwrap();\n Json::from_str(s).unwrap()\n };\n\n let header_json = base64_to_json(header_segment);\n\n println!(\"header_json {:?}\", header_json);\n\n let header_tree = json_to_tree(header_json);\n let alg = header_tree.get(\"alg\").unwrap();\n let header = Header::new(parse_algorithm(alg));\n let payload_json = base64_to_json(payload_segment);\n\n println!(\"payload_json {:?}\", payload_json);\n\n let payload = json_to_tree(payload_json);\n (header, payload)\n}\n\nfn parse_algorithm(alg: &str) -> Algorithm {\n match alg {\n \"HS256\" => Algorithm::HS256,\n \"HS384\" => Algorithm::HS384,\n \"HS512\" => Algorithm::HS512,\n _ => panic!(\"Unknown algorithm\")\n }\n}\n\nfn verify_signature(algorithm: Algorithm, signing_input: String, signature: &[u8], secret: String) -> bool {\n let mut hmac = match algorithm {\n Algorithm::HS256 => create_hmac(Sha256::new(), secret),\n Algorithm::HS384 => create_hmac(Sha384::new(), secret),\n Algorithm::HS512 => create_hmac(Sha512::new(), secret)\n };\n\n hmac.input(signing_input.to_string().as_bytes());\n secure_compare(signature, hmac.result().code())\n}\n\nfn secure_compare(a: &[u8], b: &[u8]) -> bool {\n if a.len() != b.len() {\n return false\n }\n\n let mut res = 0_u8;\n for (&x, &y) in a.iter().zip(b.iter()) {\n res |= x ^ y;\n }\n\n res == 0\n}\n\nfn create_hmac<'a, D: Digest + 'a>(digest: D, some_str: String) -> Box<Mac + 'a> {\n Box::new(Hmac::new(digest, some_str.as_bytes()))\n}\n\n#[cfg(test)]\nmod tests {\n extern crate time;\n\n use time::Duration;\n\n use super::Header;\n use super::Payload;\n use super::encode;\n use super::decode;\n use super::Algorithm;\n\n use super::secure_compare;\n\n #[test]\n fn test_encode_and_decode_jwt_hs256() {\n let mut p1 = Payload::new();\n p1.insert(\"key1\".to_string(), \"val1\".to_string());\n p1.insert(\"key2\".to_string(), \"val2\".to_string());\n p1.insert(\"key3\".to_string(), \"val3\".to_string());\n\n let secret = \"secret123\";\n let header = Header::new(Algorithm::HS256);\n let jwt1 = encode(header, secret.to_string(), p1.clone());\n let maybe_res = decode(jwt1, secret.to_string(), Algorithm::HS256);\n assert!(maybe_res.is_ok());\n } \n\n #[test]\n fn test_decode_valid_jwt_hs256() {\n let mut p1 = Payload::new();\n p1.insert(\"key11\".to_string(), \"val1\".to_string());\n p1.insert(\"key22\".to_string(), \"val2\".to_string());\n let secret = \"secret123\";\n let jwt = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJrZXkxMSI6InZhbDEiLCJrZXkyMiI6InZhbDIifQ.jrcoVcRsmQqDEzSW9qOhG1HIrzV_n3nMhykNPnGvp9c\";\n let maybe_res = decode(jwt.to_string(), secret.to_string(), Algorithm::HS256);\n assert!(maybe_res.is_ok());\n }\n\n\/\/ #[test]\n\/\/ fn test_fails_when_expired() {\n\/\/ let now = time::get_time();\n\/\/ let past = now + Duration::minutes(-5);\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"exp\".to_string(), past.sec.to_string());\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = sign(secret, Some(p1.clone()), None);\n\/\/ let res = verify(jwt.as_slice(), secret, None);\n\/\/ assert!(res.is_ok());\n\/\/ }\n\n\/\/ #[test]\n\/\/ fn test_ok_when_expired_not_verified() {\n\/\/ let now = time::get_time();\n\/\/ let past = now + Duration::minutes(-5);\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"exp\".to_string(), past.sec.to_string());\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = sign(secret, Some(p1.clone()), None);\n\/\/ let res = verify(jwt.as_slice(), secret, None);\n\/\/ assert!(res.is_ok());\n\/\/ }\n \n\/\/ #[test]\n\/\/ fn test_secure_compare_same_strings() {\n\/\/ let str1 = \"same same\".as_bytes();\n\/\/ let str2 = \"same same\".as_bytes();\n\/\/ let res = secure_compare(str1, str2);\n\/\/ assert!(res);\n\/\/ }\n\n\/\/ #[test]\n\/\/ fn test_fails_when_secure_compare_different_strings() {\n\/\/ let str1 = \"same same\".as_bytes();\n\/\/ let str2 = \"same same but different\".as_bytes();\n\/\/ let res = secure_compare(str1, str2);\n\/\/ assert!(!res);\n\n\/\/ let str3 = \"same same\".as_bytes();\n\/\/ let str4 = \"same ssss\".as_bytes();\n\/\/ let res2 = secure_compare(str3, str4);\n\/\/ assert!(!res2);\n\/\/ }\n}<commit_msg>fixed2<commit_after>extern crate rustc_serialize;\nextern crate time;\nextern crate crypto;\n\nuse time::Duration;\nuse rustc_serialize::base64;\nuse rustc_serialize::base64::{ToBase64, FromBase64};\nuse rustc_serialize::json;\nuse rustc_serialize::json::{ToJson, Json};\nuse std::collections::BTreeMap;\nuse crypto::sha2::{Sha256, Sha384, Sha512};\nuse crypto::hmac::Hmac;\nuse crypto::digest::Digest;\nuse crypto::mac::Mac;\nuse std::str;\nuse std::fmt;\nuse std::fmt::Formatter;\nuse std::fmt::Debug;\n\npub type Payload = BTreeMap<String, String>; \/\/todo replace with &str\n\npub struct Header {\n algorithm: Algorithm,\n ttype: String\n}\n\nimpl Header {\n pub fn new(alg: Algorithm) -> Header {\n Header { algorithm: alg, ttype: Header::std_type() }\n }\n \n pub fn std_type() -> String {\n \"JWT\".to_string()\n }\n}\n\n#[derive(Clone, Copy)]\npub enum Algorithm {\n HS256,\n HS384,\n HS512\n}\n\nimpl ToString for Algorithm {\n fn to_string(&self) -> String {\n match *self {\n Algorithm::HS256 => \"HS256\".to_string(),\n Algorithm::HS384 => \"HS384\".to_string(),\n Algorithm::HS512 => \"HS512\".to_string()\n } \n }\n}\n\npub enum Error {\n SignatureExpired,\n SignatureInvalid,\n JWTInvalid,\n IssuerInvalid,\n ExpirationInvalid,\n AudienceInvalid\n}\n\nimpl ToJson for Header {\n fn to_json(&self) -> json::Json {\n let mut map = BTreeMap::new();\n map.insert(\"typ\".to_string(), self.ttype.to_json());\n map.insert(\"alg\".to_string(), self.algorithm.to_string().to_json());\n Json::Object(map)\n }\n}\n\npub fn encode(header: Header, secret: String, payload: Payload) -> String {\n let signing_input = get_signing_input(payload, &header.algorithm);\n let signature = sign_hmac(&signing_input, secret, header.algorithm);\n format!(\"{}.{}\", signing_input, signature)\n}\n\npub fn decode(encoded_token: String, secret: String, algorithm: Algorithm) -> Result<(Header, Payload), Error> {\n match decode_segments(encoded_token) {\n Some((header, payload, signature, signing_input)) => {\n if !verify_signature(algorithm, signing_input, &signature, secret.to_string()) {\n return Err(Error::SignatureInvalid)\n } \n \/\/todo\n \/\/ verify_issuer(payload_json);\n \/\/ verify_expiration(payload_json);\n \/\/ verify_audience();\n \/\/ verify_subject();\n \/\/ verify_notbefore();\n \/\/ verify_issuedat();\n \/\/ verify_jwtid();\n\n \/\/todo\n Ok((header, payload))\n },\n\n None => Err(Error::JWTInvalid)\n }\n}\n\nfn segments_count() -> usize {\n 3\n}\n\nfn get_signing_input(payload: Payload, algorithm: &Algorithm) -> String {\n let header = Header::new(*algorithm);\n let header_json_str = header.to_json();\n let encoded_header = base64_url_encode(header_json_str.to_string().as_bytes()).to_string();\n let p = payload.into_iter().map(|(k, v)| (k, v.to_json())).collect();\n let payload_json = Json::Object(p);\n let encoded_payload = base64_url_encode(payload_json.to_string().as_bytes()).to_string();\n format!(\"{}.{}\", encoded_header, encoded_payload)\n}\n\n\nfn sign_hmac(signing_input: &str, secret: String, algorithm: Algorithm) -> String {\n let mut hmac = match algorithm {\n Algorithm::HS256 => create_hmac(Sha256::new(), secret),\n Algorithm::HS384 => create_hmac(Sha384::new(), secret),\n Algorithm::HS512 => create_hmac(Sha512::new(), secret)\n };\n \n hmac.input(signing_input.as_bytes());\n base64_url_encode(hmac.result().code())\n}\n\nfn base64_url_encode(bytes: &[u8]) -> String {\n bytes.to_base64(base64::URL_SAFE)\n}\n\nfn json_to_tree(input: Json) -> BTreeMap<String, String> {\n match input {\n Json::Object(json_tree) => json_tree.into_iter().map(|(k, v)| (k, match v {\n Json::String(s) => s,\n _ => unreachable!()\n })).collect(),\n _ => unreachable!()\n }\n}\n\nfn decode_segments(encoded_token: String) -> Option<(Header, Payload, Vec<u8>, String)> {\n let raw_segments: Vec<&str> = encoded_token.split(\".\").collect();\n if raw_segments.len() != segments_count() {\n return None\n }\n\n let header_segment = raw_segments[0];\n let payload_segment = raw_segments[1];\n let crypto_segment = raw_segments[2];\n let (header, payload) = decode_header_and_payload(header_segment, payload_segment);\n let signature = &crypto_segment.as_bytes().from_base64().unwrap();\n\n let signing_input = format!(\"{}.{}\", header_segment, payload_segment);\n Some((header, payload, signature.clone(), signing_input))\n}\n\nfn decode_header_and_payload<'a>(header_segment: &str, payload_segment: &str) -> (Header, Payload) {\n fn base64_to_json(input: &str) -> Json {\n let bytes = input.as_bytes().from_base64().unwrap();\n let s = str::from_utf8(&bytes).unwrap();\n Json::from_str(s).unwrap()\n };\n\n let header_json = base64_to_json(header_segment);\n\n println!(\"header_json {:?}\", header_json);\n\n let header_tree = json_to_tree(header_json);\n let alg = header_tree.get(\"alg\").unwrap();\n let header = Header::new(parse_algorithm(alg));\n let payload_json = base64_to_json(payload_segment);\n\n println!(\"payload_json {:?}\", payload_json);\n\n let payload = json_to_tree(payload_json);\n (header, payload)\n}\n\nfn parse_algorithm(alg: &str) -> Algorithm {\n match alg {\n \"HS256\" => Algorithm::HS256,\n \"HS384\" => Algorithm::HS384,\n \"HS512\" => Algorithm::HS512,\n _ => panic!(\"Unknown algorithm\")\n }\n}\n\nfn verify_signature(algorithm: Algorithm, signing_input: String, signature: &[u8], secret: String) -> bool {\n let mut hmac = match algorithm {\n Algorithm::HS256 => create_hmac(Sha256::new(), secret),\n Algorithm::HS384 => create_hmac(Sha384::new(), secret),\n Algorithm::HS512 => create_hmac(Sha512::new(), secret)\n };\n\n hmac.input(signing_input.to_string().as_bytes());\n secure_compare(signature, hmac.result().code())\n}\n\nfn secure_compare(a: &[u8], b: &[u8]) -> bool {\n if a.len() != b.len() {\n return false\n }\n\n let mut res = 0_u8;\n for (&x, &y) in a.iter().zip(b.iter()) {\n res |= x ^ y;\n }\n\n res == 0\n}\n\nfn create_hmac<'a, D: Digest + 'a>(digest: D, some_str: String) -> Box<Mac + 'a> {\n Box::new(Hmac::new(digest, some_str.as_bytes()))\n}\n\n#[cfg(test)]\nmod tests {\n extern crate time;\n\n use time::Duration;\n\n use super::Header;\n use super::Payload;\n use super::encode;\n use super::decode;\n use super::Algorithm;\n use super::secure_compare;\n\n #[test]\n fn test_encode_and_decode_jwt_hs256() {\n let mut p1 = Payload::new();\n p1.insert(\"key1\".to_string(), \"val1\".to_string());\n p1.insert(\"key2\".to_string(), \"val2\".to_string());\n p1.insert(\"key3\".to_string(), \"val3\".to_string());\n\n let secret = \"secret123\";\n let header = Header::new(Algorithm::HS256);\n let jwt1 = encode(header, secret.to_string(), p1.clone());\n let maybe_res = decode(jwt1, secret.to_string(), Algorithm::HS256);\n assert!(maybe_res.is_ok());\n } \n\n #[test]\n fn test_decode_valid_jwt_hs256() {\n let mut p1 = Payload::new();\n p1.insert(\"key11\".to_string(), \"val1\".to_string());\n p1.insert(\"key22\".to_string(), \"val2\".to_string());\n let secret = \"secret123\";\n let jwt = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJrZXkxMSI6InZhbDEiLCJrZXkyMiI6InZhbDIifQ.jrcoVcRsmQqDEzSW9qOhG1HIrzV_n3nMhykNPnGvp9c\";\n let maybe_res = decode(jwt.to_string(), secret.to_string(), Algorithm::HS256);\n assert!(maybe_res.is_ok());\n }\n\n #[test]\n fn test_secure_compare_same_strings() {\n let str1 = \"same same\".as_bytes();\n let str2 = \"same same\".as_bytes();\n let res = secure_compare(str1, str2);\n assert!(res);\n }\n\n #[test]\n fn test_fails_when_secure_compare_different_strings() {\n let str1 = \"same same\".as_bytes();\n let str2 = \"same same but different\".as_bytes();\n let res = secure_compare(str1, str2);\n assert!(!res);\n }\n\n\/\/ #[test]\n\/\/ fn test_fails_when_expired() {\n\/\/ let now = time::get_time();\n\/\/ let past = now + Duration::minutes(-5);\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"exp\".to_string(), past.sec.to_string());\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = sign(secret, Some(p1.clone()), None);\n\/\/ let res = verify(jwt.as_slice(), secret, None);\n\/\/ assert!(res.is_ok());\n\/\/ }\n\n\/\/ #[test]\n\/\/ fn test_ok_when_expired_not_verified() {\n\/\/ let now = time::get_time();\n\/\/ let past = now + Duration::minutes(-5);\n\/\/ let mut p1 = BTreeMap::new();\n\/\/ p1.insert(\"exp\".to_string(), past.sec.to_string());\n\/\/ p1.insert(\"key1\".to_string(), \"val1\".to_string());\n\/\/ let secret = \"secret123\";\n\/\/ let jwt = sign(secret, Some(p1.clone()), None);\n\/\/ let res = verify(jwt.as_slice(), secret, None);\n\/\/ assert!(res.is_ok());\n\/\/ }\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_private)]\n#![feature(str_escape)]\n#![feature(str_char)]\n#![feature(slice_extras)]\n\n\/\/ TODO we're going to allocate a whole bunch of temp Strings, is it worth\n\/\/ keeping some scratch mem for this and running our own StrPool?\n\/\/ TODO for lint violations of names, emit a refactor script\n\n\n#[macro_use]\nextern crate log;\n\nextern crate getopts;\nextern crate rustc;\nextern crate rustc_driver;\nextern crate syntax;\nextern crate rustc_serialize;\n\nextern crate strings;\n\nuse rustc::session::Session;\nuse rustc::session::config as rustc_config;\nuse rustc::session::config::Input;\nuse rustc_driver::{driver, CompilerCalls, Compilation};\n\nuse syntax::ast;\nuse syntax::codemap::CodeMap;\nuse syntax::diagnostics;\nuse syntax::visit;\n\nuse std::path::PathBuf;\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::mem::swap;\n\nuse issues::{BadIssueSeeker, Issue};\nuse changes::ChangeSet;\nuse visitor::FmtVisitor;\nuse config::Config;\n\n#[macro_use]\nmod config;\n#[macro_use]\nmod utils;\nmod changes;\nmod visitor;\nmod items;\nmod missed_spans;\nmod lists;\nmod types;\nmod expr;\nmod imports;\nmod issues;\nmod rewrite;\nmod string;\nmod comment;\n\nconst MIN_STRING: usize = 10;\n\/\/ When we get scoped annotations, we should have rustfmt::skip.\nconst SKIP_ANNOTATION: &'static str = \"rustfmt_skip\";\n\n#[derive(Copy, Clone)]\npub enum WriteMode {\n Overwrite,\n \/\/ str is the extension of the new file\n NewFile(&'static str),\n \/\/ Write the output to stdout.\n Display,\n \/\/ Return the result as a mapping from filenames to StringBuffers.\n Return(&'static Fn(HashMap<String, String>)),\n}\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\npub enum NewlineStyle {\n Windows, \/\/ \\r\\n\n Unix, \/\/ \\n\n}\n\nimpl_enum_decodable!(NewlineStyle, Windows, Unix);\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\npub enum BraceStyle {\n AlwaysNextLine,\n PreferSameLine,\n \/\/ Prefer same line except where there is a where clause, in which case force\n \/\/ the brace to the next line.\n SameLineWhere,\n}\n\nimpl_enum_decodable!(BraceStyle, AlwaysNextLine, PreferSameLine, SameLineWhere);\n\n\/\/ How to indent a function's return type.\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\npub enum ReturnIndent {\n \/\/ Aligned with the arguments\n WithArgs,\n \/\/ Aligned with the where clause\n WithWhereClause,\n}\n\nimpl_enum_decodable!(ReturnIndent, WithArgs, WithWhereClause);\n\nenum ErrorKind {\n \/\/ Line has exceeded character limit\n LineOverflow,\n \/\/ Line ends in whitespace\n TrailingWhitespace,\n \/\/ TO-DO or FIX-ME item without an issue number\n BadIssue(Issue),\n}\n\nimpl fmt::Display for ErrorKind {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n match *self {\n ErrorKind::LineOverflow => {\n write!(fmt, \"line exceeded maximum length\")\n },\n ErrorKind::TrailingWhitespace => {\n write!(fmt, \"left behind trailing whitespace\")\n },\n ErrorKind::BadIssue(issue) => {\n write!(fmt, \"found {}\", issue)\n },\n }\n }\n}\n\n\/\/ Formatting errors that are identified *after* rustfmt has run\nstruct FormattingError {\n line: u32,\n kind: ErrorKind,\n}\n\nstruct FormatReport {\n \/\/ Maps stringified file paths to their associated formatting errors\n file_error_map: HashMap<String, Vec<FormattingError>>,\n}\n\nimpl fmt::Display for FormatReport {\n \/\/ Prints all the formatting errors.\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n for (file, errors) in self.file_error_map.iter() {\n for error in errors {\n try!(write!(fmt,\n \"Rustfmt failed at {}:{}: {} (sorry)\\n\",\n file,\n error.line,\n error.kind));\n }\n }\n Ok(())\n }\n}\n\n\/\/ Formatting which depends on the AST.\nfn fmt_ast<'a>(krate: &ast::Crate, codemap: &'a CodeMap, config: &'a Config) -> ChangeSet<'a> {\n let mut visitor = FmtVisitor::from_codemap(codemap, config);\n visit::walk_crate(&mut visitor, krate);\n visitor.changes\n}\n\n\/\/ Formatting done on a char by char or line by line basis.\n\/\/ TODO warn on bad license\n\/\/ TODO other stuff for parity with make tidy\nfn fmt_lines(changes: &mut ChangeSet, config: &Config) -> FormatReport {\n let mut truncate_todo = Vec::new();\n let mut report = FormatReport { file_error_map: HashMap::new() };\n\n \/\/ Iterate over the chars in the change set.\n for (f, text) in changes.text() {\n let mut trims = vec![];\n let mut last_wspace: Option<usize> = None;\n let mut line_len = 0;\n let mut cur_line = 1;\n let mut newline_count = 0;\n let mut errors = vec![];\n let mut issue_seeker = BadIssueSeeker::new(config.report_todo,\n config.report_fixme);\n\n for (c, b) in text.chars() {\n if c == '\\r' { continue; }\n\n \/\/ Add warnings for bad todos\/ fixmes\n if let Some(issue) = issue_seeker.inspect(c) {\n errors.push(FormattingError {\n line: cur_line,\n kind: ErrorKind::BadIssue(issue)\n });\n }\n\n if c == '\\n' {\n \/\/ Check for (and record) trailing whitespace.\n if let Some(lw) = last_wspace {\n trims.push((cur_line, lw, b));\n line_len -= b - lw;\n }\n \/\/ Check for any line width errors we couldn't correct.\n if line_len > config.max_width {\n errors.push(FormattingError {\n line: cur_line,\n kind: ErrorKind::LineOverflow\n });\n }\n line_len = 0;\n cur_line += 1;\n newline_count += 1;\n last_wspace = None;\n } else {\n newline_count = 0;\n line_len += 1;\n if c.is_whitespace() {\n if last_wspace.is_none() {\n last_wspace = Some(b);\n }\n } else {\n last_wspace = None;\n }\n }\n }\n\n if newline_count > 1 {\n debug!(\"track truncate: {} {} {}\", f, text.len, newline_count);\n truncate_todo.push((f.to_owned(), text.len - newline_count + 1))\n }\n\n for &(l, _, _) in trims.iter() {\n errors.push(FormattingError {\n line: l,\n kind: ErrorKind::TrailingWhitespace\n });\n }\n\n report.file_error_map.insert(f.to_owned(), errors);\n }\n\n for (f, l) in truncate_todo {\n changes.get_mut(&f).truncate(l);\n }\n\n report\n}\n\nstruct RustFmtCalls {\n input_path: Option<PathBuf>,\n write_mode: WriteMode,\n config: Option<Box<config::Config>>,\n}\n\nimpl<'a> CompilerCalls<'a> for RustFmtCalls {\n fn early_callback(&mut self,\n _: &getopts::Matches,\n _: &diagnostics::registry::Registry)\n -> Compilation {\n Compilation::Continue\n }\n\n fn some_input(&mut self,\n input: Input,\n input_path: Option<PathBuf>)\n -> (Input, Option<PathBuf>) {\n match input_path {\n Some(ref ip) => self.input_path = Some(ip.clone()),\n _ => {\n \/\/ FIXME should handle string input and write to stdout or something\n panic!(\"No input path\");\n }\n }\n (input, input_path)\n }\n\n fn no_input(&mut self,\n _: &getopts::Matches,\n _: &rustc_config::Options,\n _: &Option<PathBuf>,\n _: &Option<PathBuf>,\n _: &diagnostics::registry::Registry)\n -> Option<(Input, Option<PathBuf>)> {\n panic!(\"No input supplied to RustFmt\");\n }\n\n fn late_callback(&mut self,\n _: &getopts::Matches,\n _: &Session,\n _: &Input,\n _: &Option<PathBuf>,\n _: &Option<PathBuf>)\n -> Compilation {\n Compilation::Continue\n }\n\n fn build_controller(&mut self, _: &Session) -> driver::CompileController<'a> {\n let write_mode = self.write_mode;\n\n let mut config_option = None;\n swap(&mut self.config, &mut config_option);\n let config = config_option.unwrap();\n\n let mut control = driver::CompileController::basic();\n control.after_parse.stop = Compilation::Stop;\n control.after_parse.callback = Box::new(move |state| {\n let krate = state.krate.unwrap();\n let codemap = state.session.codemap();\n let mut changes = fmt_ast(krate, codemap, &*config);\n \/\/ For some reason, the codemap does not include terminating newlines\n \/\/ so we must add one on for each file. This is sad.\n changes.append_newlines();\n println!(\"{}\", fmt_lines(&mut changes, &*config));\n\n let result = changes.write_all_files(write_mode, &*config);\n\n match result {\n Err(msg) => println!(\"Error writing files: {}\", msg),\n Ok(result) => {\n if let WriteMode::Return(callback) = write_mode {\n callback(result);\n }\n }\n }\n });\n\n control\n }\n}\n\n\/\/ args are the arguments passed on the command line, generally passed through\n\/\/ to the compiler.\n\/\/ write_mode determines what happens to the result of running rustfmt, see\n\/\/ WriteMode.\n\/\/ default_config is a string of toml data to be used to configure rustfmt.\npub fn run(args: Vec<String>, write_mode: WriteMode, default_config: &str) {\n let config = Some(Box::new(config::Config::from_toml(default_config)));\n let mut call_ctxt = RustFmtCalls { input_path: None, write_mode: write_mode, config: config };\n rustc_driver::run_compiler(&args, &mut call_ctxt);\n}\n<commit_msg>Don't apologise if its not our fault in warnings<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_private)]\n#![feature(str_escape)]\n#![feature(str_char)]\n#![feature(slice_extras)]\n\n\/\/ TODO we're going to allocate a whole bunch of temp Strings, is it worth\n\/\/ keeping some scratch mem for this and running our own StrPool?\n\/\/ TODO for lint violations of names, emit a refactor script\n\n\n#[macro_use]\nextern crate log;\n\nextern crate getopts;\nextern crate rustc;\nextern crate rustc_driver;\nextern crate syntax;\nextern crate rustc_serialize;\n\nextern crate strings;\n\nuse rustc::session::Session;\nuse rustc::session::config as rustc_config;\nuse rustc::session::config::Input;\nuse rustc_driver::{driver, CompilerCalls, Compilation};\n\nuse syntax::ast;\nuse syntax::codemap::CodeMap;\nuse syntax::diagnostics;\nuse syntax::visit;\n\nuse std::path::PathBuf;\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::mem::swap;\n\nuse issues::{BadIssueSeeker, Issue};\nuse changes::ChangeSet;\nuse visitor::FmtVisitor;\nuse config::Config;\n\n#[macro_use]\nmod config;\n#[macro_use]\nmod utils;\nmod changes;\nmod visitor;\nmod items;\nmod missed_spans;\nmod lists;\nmod types;\nmod expr;\nmod imports;\nmod issues;\nmod rewrite;\nmod string;\nmod comment;\n\nconst MIN_STRING: usize = 10;\n\/\/ When we get scoped annotations, we should have rustfmt::skip.\nconst SKIP_ANNOTATION: &'static str = \"rustfmt_skip\";\n\n#[derive(Copy, Clone)]\npub enum WriteMode {\n Overwrite,\n \/\/ str is the extension of the new file\n NewFile(&'static str),\n \/\/ Write the output to stdout.\n Display,\n \/\/ Return the result as a mapping from filenames to StringBuffers.\n Return(&'static Fn(HashMap<String, String>)),\n}\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\npub enum NewlineStyle {\n Windows, \/\/ \\r\\n\n Unix, \/\/ \\n\n}\n\nimpl_enum_decodable!(NewlineStyle, Windows, Unix);\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\npub enum BraceStyle {\n AlwaysNextLine,\n PreferSameLine,\n \/\/ Prefer same line except where there is a where clause, in which case force\n \/\/ the brace to the next line.\n SameLineWhere,\n}\n\nimpl_enum_decodable!(BraceStyle, AlwaysNextLine, PreferSameLine, SameLineWhere);\n\n\/\/ How to indent a function's return type.\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\npub enum ReturnIndent {\n \/\/ Aligned with the arguments\n WithArgs,\n \/\/ Aligned with the where clause\n WithWhereClause,\n}\n\nimpl_enum_decodable!(ReturnIndent, WithArgs, WithWhereClause);\n\nenum ErrorKind {\n \/\/ Line has exceeded character limit\n LineOverflow,\n \/\/ Line ends in whitespace\n TrailingWhitespace,\n \/\/ TO-DO or FIX-ME item without an issue number\n BadIssue(Issue),\n}\n\nimpl fmt::Display for ErrorKind {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n match *self {\n ErrorKind::LineOverflow => {\n write!(fmt, \"line exceeded maximum length\")\n }\n ErrorKind::TrailingWhitespace => {\n write!(fmt, \"left behind trailing whitespace\")\n }\n ErrorKind::BadIssue(issue) => {\n write!(fmt, \"found {}\", issue)\n }\n }\n }\n}\n\n\/\/ Formatting errors that are identified *after* rustfmt has run\nstruct FormattingError {\n line: u32,\n kind: ErrorKind,\n}\n\nimpl FormattingError {\n fn msg_prefix(&self) -> &str {\n match self.kind {\n ErrorKind::LineOverflow |\n ErrorKind::TrailingWhitespace => \"Rustfmt failed at\",\n ErrorKind::BadIssue(_) => \"WARNING:\",\n }\n }\n\n fn msg_suffix(&self) -> &str {\n match self.kind {\n ErrorKind::LineOverflow |\n ErrorKind::TrailingWhitespace => \"(sorry)\",\n ErrorKind::BadIssue(_) => \"\",\n }\n }\n}\n\nstruct FormatReport {\n \/\/ Maps stringified file paths to their associated formatting errors\n file_error_map: HashMap<String, Vec<FormattingError>>,\n}\n\nimpl fmt::Display for FormatReport {\n \/\/ Prints all the formatting errors.\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n for (file, errors) in self.file_error_map.iter() {\n for error in errors {\n try!(write!(fmt,\n \"{} {}:{}: {} {}\\n\",\n error.msg_prefix(),\n file,\n error.line,\n error.kind,\n error.msg_suffix()));\n }\n }\n Ok(())\n }\n}\n\n\/\/ Formatting which depends on the AST.\nfn fmt_ast<'a>(krate: &ast::Crate, codemap: &'a CodeMap, config: &'a Config) -> ChangeSet<'a> {\n let mut visitor = FmtVisitor::from_codemap(codemap, config);\n visit::walk_crate(&mut visitor, krate);\n visitor.changes\n}\n\n\/\/ Formatting done on a char by char or line by line basis.\n\/\/ TODO warn on bad license\n\/\/ TODO other stuff for parity with make tidy\nfn fmt_lines(changes: &mut ChangeSet, config: &Config) -> FormatReport {\n let mut truncate_todo = Vec::new();\n let mut report = FormatReport { file_error_map: HashMap::new() };\n\n \/\/ Iterate over the chars in the change set.\n for (f, text) in changes.text() {\n let mut trims = vec![];\n let mut last_wspace: Option<usize> = None;\n let mut line_len = 0;\n let mut cur_line = 1;\n let mut newline_count = 0;\n let mut errors = vec![];\n let mut issue_seeker = BadIssueSeeker::new(config.report_todo,\n config.report_fixme);\n\n for (c, b) in text.chars() {\n if c == '\\r' { continue; }\n\n \/\/ Add warnings for bad todos\/ fixmes\n if let Some(issue) = issue_seeker.inspect(c) {\n errors.push(FormattingError {\n line: cur_line,\n kind: ErrorKind::BadIssue(issue)\n });\n }\n\n if c == '\\n' {\n \/\/ Check for (and record) trailing whitespace.\n if let Some(lw) = last_wspace {\n trims.push((cur_line, lw, b));\n line_len -= b - lw;\n }\n \/\/ Check for any line width errors we couldn't correct.\n if line_len > config.max_width {\n errors.push(FormattingError {\n line: cur_line,\n kind: ErrorKind::LineOverflow\n });\n }\n line_len = 0;\n cur_line += 1;\n newline_count += 1;\n last_wspace = None;\n } else {\n newline_count = 0;\n line_len += 1;\n if c.is_whitespace() {\n if last_wspace.is_none() {\n last_wspace = Some(b);\n }\n } else {\n last_wspace = None;\n }\n }\n }\n\n if newline_count > 1 {\n debug!(\"track truncate: {} {} {}\", f, text.len, newline_count);\n truncate_todo.push((f.to_owned(), text.len - newline_count + 1))\n }\n\n for &(l, _, _) in trims.iter() {\n errors.push(FormattingError {\n line: l,\n kind: ErrorKind::TrailingWhitespace\n });\n }\n\n report.file_error_map.insert(f.to_owned(), errors);\n }\n\n for (f, l) in truncate_todo {\n changes.get_mut(&f).truncate(l);\n }\n\n report\n}\n\nstruct RustFmtCalls {\n input_path: Option<PathBuf>,\n write_mode: WriteMode,\n config: Option<Box<config::Config>>,\n}\n\nimpl<'a> CompilerCalls<'a> for RustFmtCalls {\n fn early_callback(&mut self,\n _: &getopts::Matches,\n _: &diagnostics::registry::Registry)\n -> Compilation {\n Compilation::Continue\n }\n\n fn some_input(&mut self,\n input: Input,\n input_path: Option<PathBuf>)\n -> (Input, Option<PathBuf>) {\n match input_path {\n Some(ref ip) => self.input_path = Some(ip.clone()),\n _ => {\n \/\/ FIXME should handle string input and write to stdout or something\n panic!(\"No input path\");\n }\n }\n (input, input_path)\n }\n\n fn no_input(&mut self,\n _: &getopts::Matches,\n _: &rustc_config::Options,\n _: &Option<PathBuf>,\n _: &Option<PathBuf>,\n _: &diagnostics::registry::Registry)\n -> Option<(Input, Option<PathBuf>)> {\n panic!(\"No input supplied to RustFmt\");\n }\n\n fn late_callback(&mut self,\n _: &getopts::Matches,\n _: &Session,\n _: &Input,\n _: &Option<PathBuf>,\n _: &Option<PathBuf>)\n -> Compilation {\n Compilation::Continue\n }\n\n fn build_controller(&mut self, _: &Session) -> driver::CompileController<'a> {\n let write_mode = self.write_mode;\n\n let mut config_option = None;\n swap(&mut self.config, &mut config_option);\n let config = config_option.unwrap();\n\n let mut control = driver::CompileController::basic();\n control.after_parse.stop = Compilation::Stop;\n control.after_parse.callback = Box::new(move |state| {\n let krate = state.krate.unwrap();\n let codemap = state.session.codemap();\n let mut changes = fmt_ast(krate, codemap, &*config);\n \/\/ For some reason, the codemap does not include terminating newlines\n \/\/ so we must add one on for each file. This is sad.\n changes.append_newlines();\n println!(\"{}\", fmt_lines(&mut changes, &*config));\n\n let result = changes.write_all_files(write_mode, &*config);\n\n match result {\n Err(msg) => println!(\"Error writing files: {}\", msg),\n Ok(result) => {\n if let WriteMode::Return(callback) = write_mode {\n callback(result);\n }\n }\n }\n });\n\n control\n }\n}\n\n\/\/ args are the arguments passed on the command line, generally passed through\n\/\/ to the compiler.\n\/\/ write_mode determines what happens to the result of running rustfmt, see\n\/\/ WriteMode.\n\/\/ default_config is a string of toml data to be used to configure rustfmt.\npub fn run(args: Vec<String>, write_mode: WriteMode, default_config: &str) {\n let config = Some(Box::new(config::Config::from_toml(default_config)));\n let mut call_ctxt = RustFmtCalls { input_path: None, write_mode: write_mode, config: config };\n rustc_driver::run_compiler(&args, &mut call_ctxt);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>\tmodified: dr-daemon.rs<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Implements traits from Diesel, allowing identifiers to be used as database fields.\n\nuse std::{convert::TryFrom, error::Error as StdError, io::Write};\n\nuse diesel::{\n backend::Backend,\n deserialize::{FromSql, Result as DeserializeResult},\n serialize::{Output, Result as SerializeResult, ToSql},\n sql_types::Text,\n};\n\nmacro_rules! diesel_impl {\n ($name:ident) => {\n impl<DB> ToSql<Text, DB> for $crate::$name\n where\n DB: Backend,\n {\n fn to_sql<W: Write>(&self, out: &mut Output<W, DB>) -> SerializeResult {\n ToSql::<Text, DB>::to_sql(&self.to_string(), out)\n }\n }\n\n impl<DB> FromSql<Text, DB> for $crate::$name\n where\n String: FromSql<Text, DB>,\n DB: Backend,\n {\n fn from_sql(value: Option<&<DB as Backend>::RawValue>) -> DeserializeResult<Self> {\n let string = <String as FromSql<Text, DB>>::from_sql(value)?;\n Self::try_from(string.as_str())\n .map_err(|error| Box::new(error) as Box<StdError + Send + Sync>)\n }\n }\n };\n}\n\ndiesel_impl!(EventId);\ndiesel_impl!(RoomAliasId);\ndiesel_impl!(RoomId);\ndiesel_impl!(RoomIdOrAliasId);\ndiesel_impl!(RoomVersionId);\ndiesel_impl!(UserId);\n<commit_msg>Fix missing<commit_after>\/\/! Implements traits from Diesel, allowing identifiers to be used as database fields.\n\nuse std::{convert::TryFrom, error::Error as StdError, io::Write};\n\nuse diesel::{\n backend::Backend,\n deserialize::{FromSql, Result as DeserializeResult},\n serialize::{Output, Result as SerializeResult, ToSql},\n sql_types::Text,\n};\n\nmacro_rules! diesel_impl {\n ($name:ident) => {\n impl<DB> ToSql<Text, DB> for $crate::$name\n where\n DB: Backend,\n {\n fn to_sql<W: Write>(&self, out: &mut Output<W, DB>) -> SerializeResult {\n ToSql::<Text, DB>::to_sql(&self.to_string(), out)\n }\n }\n\n impl<DB> FromSql<Text, DB> for $crate::$name\n where\n String: FromSql<Text, DB>,\n DB: Backend,\n {\n fn from_sql(value: Option<&<DB as Backend>::RawValue>) -> DeserializeResult<Self> {\n let string = <String as FromSql<Text, DB>>::from_sql(value)?;\n Self::try_from(string.as_str())\n .map_err(|error| Box::new(error) as Box<dyn StdError + Send + Sync>)\n }\n }\n };\n}\n\ndiesel_impl!(EventId);\ndiesel_impl!(RoomAliasId);\ndiesel_impl!(RoomId);\ndiesel_impl!(RoomIdOrAliasId);\ndiesel_impl!(RoomVersionId);\ndiesel_impl!(UserId);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example of reading and printing tags<commit_after>\/\/ Claxon -- A FLAC decoding library in Rust\n\/\/ Copyright 2017 Ruud van Asseldonk\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ A copy of the License has been included in the root of the repository.\n\n\/\/ This file contains a minimal example of using Claxon and Hound to decode a\n\/\/ flac file. This can be done more efficiently, but it is also more verbose.\n\/\/ See the `decode` example for that.\n\nextern crate claxon;\n\nuse std::env;\n\nfn main() {\n for fname in env::args().skip(1) {\n let reader = claxon::FlacReader::open(&fname).expect(\"failed to open FLAC stream\");\n\n \/\/ We can use `tags()` to iterate over all tags. When looking for a\n \/\/ specific tag, `get_tag()` may be useful instead.\n for &(ref name, ref value) in reader.tags() {\n \/\/ Print comments in a format similar to what\n \/\/ `metaflac --block-type=VORBIS_COMMENT --list` would print.\n println!(\"{}: {}={}\", fname, name, value);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove logging calls in favour of debug calls<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>switch to the new non-variadic API<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>tests(unicode): use scope wide attribute<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a counter example<commit_after>extern crate robots;\n\nuse std::any::Any;\nuse std::sync::{Arc, Mutex};\nuse std::time::Duration;\n\nuse robots::actors::{Actor, ActorSystem, ActorCell, Props};\n\nstruct Counter {\n counter: Mutex<u32>,\n}\n\nimpl Actor for Counter {\n fn receive(&self, _message: Box<Any>, _context: ActorCell) {\n let mut count = self.counter.lock().unwrap();\n *count += 1;\n println!(\"count: {}\", *count);\n }\n}\n\nimpl Counter {\n fn new(_dummy: ()) -> Counter {\n Counter {\n counter: Mutex::new(0)\n }\n }\n}\n\nfn main() {\n let actor_system = ActorSystem::new(\"counter\".to_owned());\n\n let props = Props::new(Arc::new(Counter::new),());\n let actor_ref_1 = actor_system.actor_of(props.clone(), \"counter\".to_owned());\n let actor_ref_2 = actor_system.actor_of(props.clone(), \"sender\".to_owned());\n\n actor_ref_1.tell_to(actor_ref_2.clone(), ());\n actor_ref_1.tell_to(actor_ref_2.clone(), ());\n actor_ref_1.tell_to(actor_ref_2.clone(), ());\n\n std::thread::sleep(Duration::from_millis(100));\n actor_system.shutdown();\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::{DOMRefCell, Ref};\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::{OnErrorEventHandlerNonNull, EventHandlerNonNull};\nuse dom::bindings::codegen::Bindings::WindowBinding;\nuse dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;\nuse dom::bindings::codegen::InheritTypes::EventTargetCast;\nuse dom::bindings::error::{Fallible, InvalidCharacter};\nuse dom::bindings::global;\nuse dom::bindings::js::{MutNullableJS, JSRef, Temporary, OptionalSettable};\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::browsercontext::BrowserContext;\nuse dom::console::Console;\nuse dom::document::Document;\nuse dom::eventtarget::{EventTarget, WindowTypeId, EventTargetHelpers};\nuse dom::location::Location;\nuse dom::navigator::Navigator;\nuse dom::performance::Performance;\nuse dom::screen::Screen;\nuse layout_interface::{ReflowGoal, ReflowForDisplay};\nuse page::Page;\nuse script_task::{ExitWindowMsg, ScriptChan, TriggerLoadMsg, TriggerFragmentMsg};\nuse script_task::FromWindow;\nuse script_traits::ScriptControlChan;\nuse timers::{TimerId, TimerManager};\n\nuse servo_msg::compositor_msg::ScriptListener;\nuse servo_msg::constellation_msg::LoadData;\nuse servo_net::image_cache_task::ImageCacheTask;\nuse servo_util::str::{DOMString,HTML_SPACE_CHARACTERS};\n\nuse js::jsapi::JS_EvaluateUCScript;\nuse js::jsapi::JSContext;\nuse js::jsapi::{JS_GC, JS_GetRuntime};\nuse js::jsval::{JSVal, UndefinedValue};\nuse js::rust::with_compartment;\nuse url::{Url, UrlParser};\n\nuse libc;\nuse serialize::base64::{FromBase64, ToBase64, STANDARD};\nuse std::default::Default;\nuse std::rc::Rc;\nuse time;\n\n#[dom_struct]\npub struct Window {\n eventtarget: EventTarget,\n script_chan: ScriptChan,\n control_chan: ScriptControlChan,\n console: MutNullableJS<Console>,\n location: MutNullableJS<Location>,\n navigator: MutNullableJS<Navigator>,\n image_cache_task: ImageCacheTask,\n compositor: Box<ScriptListener+'static>,\n browser_context: DOMRefCell<Option<BrowserContext>>,\n page: Rc<Page>,\n performance: MutNullableJS<Performance>,\n navigation_start: u64,\n navigation_start_precise: f64,\n screen: MutNullableJS<Screen>,\n timers: TimerManager\n}\n\nimpl Window {\n pub fn get_cx(&self) -> *mut JSContext {\n let js_info = self.page().js_info();\n (*js_info.as_ref().unwrap().js_context).ptr\n }\n\n pub fn script_chan<'a>(&'a self) -> &'a ScriptChan {\n &self.script_chan\n }\n\n pub fn control_chan<'a>(&'a self) -> &'a ScriptControlChan {\n &self.control_chan\n }\n\n pub fn image_cache_task<'a>(&'a self) -> &'a ImageCacheTask {\n &self.image_cache_task\n }\n\n pub fn compositor<'a>(&'a self) -> &'a ScriptListener+'static {\n &*self.compositor\n }\n\n pub fn browser_context(&self) -> Ref<Option<BrowserContext>> {\n self.browser_context.borrow()\n }\n\n pub fn page<'a>(&'a self) -> &'a Page {\n &*self.page\n }\n\n pub fn navigation_start(&self) -> u64 {\n self.navigation_start\n }\n\n pub fn navigation_start_precise(&self) -> f64 {\n self.navigation_start_precise\n }\n\n pub fn get_url(&self) -> Url {\n self.page().get_url()\n }\n}\n\n\/\/ http:\/\/www.whatwg.org\/html\/#atob\npub fn base64_btoa(btoa: DOMString) -> Fallible<DOMString> {\n let input = btoa.as_slice();\n \/\/ \"The btoa() method must throw an InvalidCharacterError exception if\n \/\/ the method's first argument contains any character whose code point\n \/\/ is greater than U+00FF.\"\n if input.chars().any(|c: char| c > '\\u00FF') {\n Err(InvalidCharacter)\n } else {\n \/\/ \"Otherwise, the user agent must convert that argument to a\n \/\/ sequence of octets whose nth octet is the eight-bit\n \/\/ representation of the code point of the nth character of\n \/\/ the argument,\"\n let octets = input.chars().map(|c: char| c as u8).collect::<Vec<u8>>();\n\n \/\/ \"and then must apply the base64 algorithm to that sequence of\n \/\/ octets, and return the result. [RFC4648]\"\n Ok(octets.as_slice().to_base64(STANDARD))\n }\n}\n\n\/\/ http:\/\/www.whatwg.org\/html\/#atob\npub fn base64_atob(atob: DOMString) -> Fallible<DOMString> {\n \/\/ \"Let input be the string being parsed.\"\n let mut input = atob.as_slice();\n\n \/\/ \"Remove all space characters from input.\"\n \/\/ serialize::base64::from_base64 ignores \\r and \\n,\n \/\/ but it treats the other space characters as\n \/\/ invalid input.\n fn is_html_space(c: char) -> bool {\n HTML_SPACE_CHARACTERS.iter().any(|&m| m == c)\n }\n let without_spaces = input.chars()\n .filter(|&c| ! is_html_space(c))\n .collect::<String>();\n input = without_spaces.as_slice();\n\n \/\/ \"If the length of input divides by 4 leaving no remainder, then:\n \/\/ if input ends with one or two U+003D EQUALS SIGN (=) characters,\n \/\/ remove them from input.\"\n if input.len() % 4 == 0 {\n if input.ends_with(\"==\") {\n input = input.slice_to(input.len() - 2)\n } else if input.ends_with(\"=\") {\n input = input.slice_to(input.len() - 1)\n }\n }\n\n \/\/ \"If the length of input divides by 4 leaving a remainder of 1,\n \/\/ throw an InvalidCharacterError exception and abort these steps.\"\n if input.len() % 4 == 1 {\n return Err(InvalidCharacter)\n }\n\n \/\/ \"If input contains a character that is not in the following list of\n \/\/ characters and character ranges, throw an InvalidCharacterError\n \/\/ exception and abort these steps:\n \/\/\n \/\/ U+002B PLUS SIGN (+)\n \/\/ U+002F SOLIDUS (\/)\n \/\/ Alphanumeric ASCII characters\"\n if input.chars()\n .find(|&c| !(c == '+' || c == '\/' || c.is_alphanumeric()))\n .is_some() {\n return Err(InvalidCharacter)\n }\n\n match input.from_base64() {\n Ok(data) => Ok(data.iter().map(|&b| b as char).collect::<String>()),\n Err(..) => Err(InvalidCharacter)\n }\n}\n\n\nimpl<'a> WindowMethods for JSRef<'a, Window> {\n fn Alert(self, s: DOMString) {\n \/\/ Right now, just print to the console\n println!(\"ALERT: {:s}\", s);\n }\n\n fn Close(self) {\n let ScriptChan(ref chan) = self.script_chan;\n chan.send(ExitWindowMsg(self.page.id.clone()));\n }\n\n fn Document(self) -> Temporary<Document> {\n let frame = self.page().frame();\n Temporary::new(frame.as_ref().unwrap().document.clone())\n }\n\n fn Location(self) -> Temporary<Location> {\n if self.location.get().is_none() {\n let page = self.page.clone();\n let location = Location::new(self, page);\n self.location.assign(Some(location));\n }\n self.location.get().unwrap()\n }\n\n fn Console(self) -> Temporary<Console> {\n if self.console.get().is_none() {\n let console = Console::new(&global::Window(self));\n self.console.assign(Some(console));\n }\n self.console.get().unwrap()\n }\n\n fn Navigator(self) -> Temporary<Navigator> {\n if self.navigator.get().is_none() {\n let navigator = Navigator::new(self);\n self.navigator.assign(Some(navigator));\n }\n self.navigator.get().unwrap()\n }\n\n fn SetTimeout(self, _cx: *mut JSContext, callback: JSVal, timeout: i32) -> i32 {\n self.timers.set_timeout_or_interval(callback,\n timeout,\n false, \/\/ is_interval\n FromWindow(self.page.id.clone()),\n self.script_chan.clone())\n }\n\n fn ClearTimeout(self, handle: i32) {\n self.timers.clear_timeout_or_interval(handle);\n }\n\n fn SetInterval(self, _cx: *mut JSContext, callback: JSVal, timeout: i32) -> i32 {\n self.timers.set_timeout_or_interval(callback,\n timeout,\n true, \/\/ is_interval\n FromWindow(self.page.id.clone()),\n self.script_chan.clone())\n }\n\n fn ClearInterval(self, handle: i32) {\n self.ClearTimeout(handle);\n }\n\n fn Window(self) -> Temporary<Window> {\n Temporary::from_rooted(self)\n }\n\n fn Self(self) -> Temporary<Window> {\n self.Window()\n }\n\n \/\/ http:\/\/www.whatwg.org\/html\/#dom-frames\n fn Frames(self) -> Temporary<Window> {\n self.Window()\n }\n\n fn Parent(self) -> Temporary<Window> {\n \/\/TODO - Once we support iframes correctly this needs to return the parent frame\n self.Window()\n }\n\n fn Performance(self) -> Temporary<Performance> {\n if self.performance.get().is_none() {\n let performance = Performance::new(self);\n self.performance.assign(Some(performance));\n }\n self.performance.get().unwrap()\n }\n\n event_handler!(click, GetOnclick, SetOnclick)\n event_handler!(load, GetOnload, SetOnload)\n event_handler!(unload, GetOnunload, SetOnunload)\n error_event_handler!(error, GetOnerror, SetOnerror)\n\n fn Screen(self) -> Temporary<Screen> {\n if self.screen.get().is_none() {\n let screen = Screen::new(self);\n self.screen.assign(Some(screen));\n }\n self.screen.get().unwrap()\n }\n\n fn Debug(self, message: DOMString) {\n debug!(\"{:s}\", message);\n }\n\n fn Gc(self) {\n unsafe {\n JS_GC(JS_GetRuntime(self.get_cx()));\n }\n }\n\n fn Btoa(self, btoa: DOMString) -> Fallible<DOMString> {\n base64_btoa(btoa)\n }\n\n fn Atob(self, atob: DOMString) -> Fallible<DOMString> {\n base64_atob(atob)\n }\n}\n\nimpl Reflectable for Window {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.eventtarget.reflector()\n }\n}\n\npub trait WindowHelpers {\n fn reflow(self);\n fn flush_layout(self, goal: ReflowGoal);\n fn wait_until_safe_to_modify_dom(self);\n fn init_browser_context(self, doc: JSRef<Document>);\n fn load_url(self, href: DOMString);\n fn handle_fire_timer(self, timer_id: TimerId, cx: *mut JSContext);\n fn evaluate_js_with_result(self, code: &str) -> JSVal;\n fn evaluate_script_with_result(self, code: &str, filename: &str) -> JSVal;\n}\n\n\nimpl<'a> WindowHelpers for JSRef<'a, Window> {\n fn evaluate_js_with_result(self, code: &str) -> JSVal {\n self.evaluate_script_with_result(code, \"\")\n }\n\n fn evaluate_script_with_result(self, code: &str, filename: &str) -> JSVal {\n let global = self.reflector().get_jsobject();\n let code: Vec<u16> = code.as_slice().utf16_units().collect();\n let mut rval = UndefinedValue();\n let filename = filename.to_c_str();\n let cx = self.get_cx();\n\n with_compartment(cx, global, || {\n unsafe {\n if JS_EvaluateUCScript(cx, global, code.as_ptr(),\n code.len() as libc::c_uint,\n filename.as_ptr(), 1, &mut rval) == 0 {\n debug!(\"error evaluating JS string\");\n }\n rval\n }\n })\n }\n\n fn reflow(self) {\n self.page().damage();\n \/\/ FIXME This should probably be ReflowForQuery, not Display. All queries currently\n \/\/ currently rely on the display list, which means we can't destroy it by\n \/\/ doing a query reflow.\n self.page().reflow(ReflowForDisplay, self.control_chan.clone(), &*self.compositor);\n }\n\n fn flush_layout(self, goal: ReflowGoal) {\n self.page().flush_layout(goal);\n }\n\n fn wait_until_safe_to_modify_dom(self) {\n \/\/ FIXME: This disables concurrent layout while we are modifying the DOM, since\n \/\/ our current architecture is entirely unsafe in the presence of races.\n self.page().join_layout();\n }\n\n fn init_browser_context(self, doc: JSRef<Document>) {\n *self.browser_context.borrow_mut() = Some(BrowserContext::new(doc));\n }\n\n \/\/\/ Commence a new URL load which will either replace this window or scroll to a fragment.\n fn load_url(self, href: DOMString) {\n let base_url = self.page().get_url();\n debug!(\"current page url is {:?}\", base_url);\n let url = UrlParser::new().base_url(&base_url).parse(href.as_slice());\n \/\/ FIXME: handle URL parse errors more gracefully.\n let url = url.unwrap();\n let ScriptChan(ref script_chan) = self.script_chan;\n if href.as_slice().starts_with(\"#\") {\n script_chan.send(TriggerFragmentMsg(self.page.id, url));\n } else {\n script_chan.send(TriggerLoadMsg(self.page.id, LoadData::new(url)));\n }\n }\n\n fn handle_fire_timer(self, timer_id: TimerId, cx: *mut JSContext) {\n let this_value = self.reflector().get_jsobject();\n self.timers.fire_timer(timer_id, this_value, cx);\n }\n}\n\nimpl Window {\n pub fn new(cx: *mut JSContext,\n page: Rc<Page>,\n script_chan: ScriptChan,\n control_chan: ScriptControlChan,\n compositor: Box<ScriptListener+'static>,\n image_cache_task: ImageCacheTask)\n -> Temporary<Window> {\n let win = box Window {\n eventtarget: EventTarget::new_inherited(WindowTypeId),\n script_chan: script_chan,\n control_chan: control_chan,\n console: Default::default(),\n compositor: compositor,\n page: page,\n location: Default::default(),\n navigator: Default::default(),\n image_cache_task: image_cache_task,\n browser_context: DOMRefCell::new(None),\n performance: Default::default(),\n navigation_start: time::get_time().sec as u64,\n navigation_start_precise: time::precise_time_s(),\n screen: Default::default(),\n timers: TimerManager::new()\n };\n\n WindowBinding::Wrap(cx, win)\n }\n}\n<commit_msg>Don't force reflow on content changes<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::{DOMRefCell, Ref};\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::{OnErrorEventHandlerNonNull, EventHandlerNonNull};\nuse dom::bindings::codegen::Bindings::WindowBinding;\nuse dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;\nuse dom::bindings::codegen::InheritTypes::EventTargetCast;\nuse dom::bindings::error::{Fallible, InvalidCharacter};\nuse dom::bindings::global;\nuse dom::bindings::js::{MutNullableJS, JSRef, Temporary, OptionalSettable};\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::browsercontext::BrowserContext;\nuse dom::console::Console;\nuse dom::document::Document;\nuse dom::eventtarget::{EventTarget, WindowTypeId, EventTargetHelpers};\nuse dom::location::Location;\nuse dom::navigator::Navigator;\nuse dom::performance::Performance;\nuse dom::screen::Screen;\nuse layout_interface::ReflowGoal;\nuse page::Page;\nuse script_task::{ExitWindowMsg, ScriptChan, TriggerLoadMsg, TriggerFragmentMsg};\nuse script_task::FromWindow;\nuse script_traits::ScriptControlChan;\nuse timers::{TimerId, TimerManager};\n\nuse servo_msg::compositor_msg::ScriptListener;\nuse servo_msg::constellation_msg::LoadData;\nuse servo_net::image_cache_task::ImageCacheTask;\nuse servo_util::str::{DOMString,HTML_SPACE_CHARACTERS};\n\nuse js::jsapi::JS_EvaluateUCScript;\nuse js::jsapi::JSContext;\nuse js::jsapi::{JS_GC, JS_GetRuntime};\nuse js::jsval::{JSVal, UndefinedValue};\nuse js::rust::with_compartment;\nuse url::{Url, UrlParser};\n\nuse libc;\nuse serialize::base64::{FromBase64, ToBase64, STANDARD};\nuse std::default::Default;\nuse std::rc::Rc;\nuse time;\n\n#[dom_struct]\npub struct Window {\n eventtarget: EventTarget,\n script_chan: ScriptChan,\n control_chan: ScriptControlChan,\n console: MutNullableJS<Console>,\n location: MutNullableJS<Location>,\n navigator: MutNullableJS<Navigator>,\n image_cache_task: ImageCacheTask,\n compositor: Box<ScriptListener+'static>,\n browser_context: DOMRefCell<Option<BrowserContext>>,\n page: Rc<Page>,\n performance: MutNullableJS<Performance>,\n navigation_start: u64,\n navigation_start_precise: f64,\n screen: MutNullableJS<Screen>,\n timers: TimerManager\n}\n\nimpl Window {\n pub fn get_cx(&self) -> *mut JSContext {\n let js_info = self.page().js_info();\n (*js_info.as_ref().unwrap().js_context).ptr\n }\n\n pub fn script_chan<'a>(&'a self) -> &'a ScriptChan {\n &self.script_chan\n }\n\n pub fn control_chan<'a>(&'a self) -> &'a ScriptControlChan {\n &self.control_chan\n }\n\n pub fn image_cache_task<'a>(&'a self) -> &'a ImageCacheTask {\n &self.image_cache_task\n }\n\n pub fn compositor<'a>(&'a self) -> &'a ScriptListener+'static {\n &*self.compositor\n }\n\n pub fn browser_context(&self) -> Ref<Option<BrowserContext>> {\n self.browser_context.borrow()\n }\n\n pub fn page<'a>(&'a self) -> &'a Page {\n &*self.page\n }\n\n pub fn navigation_start(&self) -> u64 {\n self.navigation_start\n }\n\n pub fn navigation_start_precise(&self) -> f64 {\n self.navigation_start_precise\n }\n\n pub fn get_url(&self) -> Url {\n self.page().get_url()\n }\n}\n\n\/\/ http:\/\/www.whatwg.org\/html\/#atob\npub fn base64_btoa(btoa: DOMString) -> Fallible<DOMString> {\n let input = btoa.as_slice();\n \/\/ \"The btoa() method must throw an InvalidCharacterError exception if\n \/\/ the method's first argument contains any character whose code point\n \/\/ is greater than U+00FF.\"\n if input.chars().any(|c: char| c > '\\u00FF') {\n Err(InvalidCharacter)\n } else {\n \/\/ \"Otherwise, the user agent must convert that argument to a\n \/\/ sequence of octets whose nth octet is the eight-bit\n \/\/ representation of the code point of the nth character of\n \/\/ the argument,\"\n let octets = input.chars().map(|c: char| c as u8).collect::<Vec<u8>>();\n\n \/\/ \"and then must apply the base64 algorithm to that sequence of\n \/\/ octets, and return the result. [RFC4648]\"\n Ok(octets.as_slice().to_base64(STANDARD))\n }\n}\n\n\/\/ http:\/\/www.whatwg.org\/html\/#atob\npub fn base64_atob(atob: DOMString) -> Fallible<DOMString> {\n \/\/ \"Let input be the string being parsed.\"\n let mut input = atob.as_slice();\n\n \/\/ \"Remove all space characters from input.\"\n \/\/ serialize::base64::from_base64 ignores \\r and \\n,\n \/\/ but it treats the other space characters as\n \/\/ invalid input.\n fn is_html_space(c: char) -> bool {\n HTML_SPACE_CHARACTERS.iter().any(|&m| m == c)\n }\n let without_spaces = input.chars()\n .filter(|&c| ! is_html_space(c))\n .collect::<String>();\n input = without_spaces.as_slice();\n\n \/\/ \"If the length of input divides by 4 leaving no remainder, then:\n \/\/ if input ends with one or two U+003D EQUALS SIGN (=) characters,\n \/\/ remove them from input.\"\n if input.len() % 4 == 0 {\n if input.ends_with(\"==\") {\n input = input.slice_to(input.len() - 2)\n } else if input.ends_with(\"=\") {\n input = input.slice_to(input.len() - 1)\n }\n }\n\n \/\/ \"If the length of input divides by 4 leaving a remainder of 1,\n \/\/ throw an InvalidCharacterError exception and abort these steps.\"\n if input.len() % 4 == 1 {\n return Err(InvalidCharacter)\n }\n\n \/\/ \"If input contains a character that is not in the following list of\n \/\/ characters and character ranges, throw an InvalidCharacterError\n \/\/ exception and abort these steps:\n \/\/\n \/\/ U+002B PLUS SIGN (+)\n \/\/ U+002F SOLIDUS (\/)\n \/\/ Alphanumeric ASCII characters\"\n if input.chars()\n .find(|&c| !(c == '+' || c == '\/' || c.is_alphanumeric()))\n .is_some() {\n return Err(InvalidCharacter)\n }\n\n match input.from_base64() {\n Ok(data) => Ok(data.iter().map(|&b| b as char).collect::<String>()),\n Err(..) => Err(InvalidCharacter)\n }\n}\n\n\nimpl<'a> WindowMethods for JSRef<'a, Window> {\n fn Alert(self, s: DOMString) {\n \/\/ Right now, just print to the console\n println!(\"ALERT: {:s}\", s);\n }\n\n fn Close(self) {\n let ScriptChan(ref chan) = self.script_chan;\n chan.send(ExitWindowMsg(self.page.id.clone()));\n }\n\n fn Document(self) -> Temporary<Document> {\n let frame = self.page().frame();\n Temporary::new(frame.as_ref().unwrap().document.clone())\n }\n\n fn Location(self) -> Temporary<Location> {\n if self.location.get().is_none() {\n let page = self.page.clone();\n let location = Location::new(self, page);\n self.location.assign(Some(location));\n }\n self.location.get().unwrap()\n }\n\n fn Console(self) -> Temporary<Console> {\n if self.console.get().is_none() {\n let console = Console::new(&global::Window(self));\n self.console.assign(Some(console));\n }\n self.console.get().unwrap()\n }\n\n fn Navigator(self) -> Temporary<Navigator> {\n if self.navigator.get().is_none() {\n let navigator = Navigator::new(self);\n self.navigator.assign(Some(navigator));\n }\n self.navigator.get().unwrap()\n }\n\n fn SetTimeout(self, _cx: *mut JSContext, callback: JSVal, timeout: i32) -> i32 {\n self.timers.set_timeout_or_interval(callback,\n timeout,\n false, \/\/ is_interval\n FromWindow(self.page.id.clone()),\n self.script_chan.clone())\n }\n\n fn ClearTimeout(self, handle: i32) {\n self.timers.clear_timeout_or_interval(handle);\n }\n\n fn SetInterval(self, _cx: *mut JSContext, callback: JSVal, timeout: i32) -> i32 {\n self.timers.set_timeout_or_interval(callback,\n timeout,\n true, \/\/ is_interval\n FromWindow(self.page.id.clone()),\n self.script_chan.clone())\n }\n\n fn ClearInterval(self, handle: i32) {\n self.ClearTimeout(handle);\n }\n\n fn Window(self) -> Temporary<Window> {\n Temporary::from_rooted(self)\n }\n\n fn Self(self) -> Temporary<Window> {\n self.Window()\n }\n\n \/\/ http:\/\/www.whatwg.org\/html\/#dom-frames\n fn Frames(self) -> Temporary<Window> {\n self.Window()\n }\n\n fn Parent(self) -> Temporary<Window> {\n \/\/TODO - Once we support iframes correctly this needs to return the parent frame\n self.Window()\n }\n\n fn Performance(self) -> Temporary<Performance> {\n if self.performance.get().is_none() {\n let performance = Performance::new(self);\n self.performance.assign(Some(performance));\n }\n self.performance.get().unwrap()\n }\n\n event_handler!(click, GetOnclick, SetOnclick)\n event_handler!(load, GetOnload, SetOnload)\n event_handler!(unload, GetOnunload, SetOnunload)\n error_event_handler!(error, GetOnerror, SetOnerror)\n\n fn Screen(self) -> Temporary<Screen> {\n if self.screen.get().is_none() {\n let screen = Screen::new(self);\n self.screen.assign(Some(screen));\n }\n self.screen.get().unwrap()\n }\n\n fn Debug(self, message: DOMString) {\n debug!(\"{:s}\", message);\n }\n\n fn Gc(self) {\n unsafe {\n JS_GC(JS_GetRuntime(self.get_cx()));\n }\n }\n\n fn Btoa(self, btoa: DOMString) -> Fallible<DOMString> {\n base64_btoa(btoa)\n }\n\n fn Atob(self, atob: DOMString) -> Fallible<DOMString> {\n base64_atob(atob)\n }\n}\n\nimpl Reflectable for Window {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.eventtarget.reflector()\n }\n}\n\npub trait WindowHelpers {\n fn reflow(self);\n fn flush_layout(self, goal: ReflowGoal);\n fn wait_until_safe_to_modify_dom(self);\n fn init_browser_context(self, doc: JSRef<Document>);\n fn load_url(self, href: DOMString);\n fn handle_fire_timer(self, timer_id: TimerId, cx: *mut JSContext);\n fn evaluate_js_with_result(self, code: &str) -> JSVal;\n fn evaluate_script_with_result(self, code: &str, filename: &str) -> JSVal;\n}\n\n\nimpl<'a> WindowHelpers for JSRef<'a, Window> {\n fn evaluate_js_with_result(self, code: &str) -> JSVal {\n self.evaluate_script_with_result(code, \"\")\n }\n\n fn evaluate_script_with_result(self, code: &str, filename: &str) -> JSVal {\n let global = self.reflector().get_jsobject();\n let code: Vec<u16> = code.as_slice().utf16_units().collect();\n let mut rval = UndefinedValue();\n let filename = filename.to_c_str();\n let cx = self.get_cx();\n\n with_compartment(cx, global, || {\n unsafe {\n if JS_EvaluateUCScript(cx, global, code.as_ptr(),\n code.len() as libc::c_uint,\n filename.as_ptr(), 1, &mut rval) == 0 {\n debug!(\"error evaluating JS string\");\n }\n rval\n }\n })\n }\n\n fn reflow(self) {\n self.page().damage();\n }\n\n fn flush_layout(self, goal: ReflowGoal) {\n self.page().flush_layout(goal);\n }\n\n fn wait_until_safe_to_modify_dom(self) {\n \/\/ FIXME: This disables concurrent layout while we are modifying the DOM, since\n \/\/ our current architecture is entirely unsafe in the presence of races.\n self.page().join_layout();\n }\n\n fn init_browser_context(self, doc: JSRef<Document>) {\n *self.browser_context.borrow_mut() = Some(BrowserContext::new(doc));\n }\n\n \/\/\/ Commence a new URL load which will either replace this window or scroll to a fragment.\n fn load_url(self, href: DOMString) {\n let base_url = self.page().get_url();\n debug!(\"current page url is {:?}\", base_url);\n let url = UrlParser::new().base_url(&base_url).parse(href.as_slice());\n \/\/ FIXME: handle URL parse errors more gracefully.\n let url = url.unwrap();\n let ScriptChan(ref script_chan) = self.script_chan;\n if href.as_slice().starts_with(\"#\") {\n script_chan.send(TriggerFragmentMsg(self.page.id, url));\n } else {\n script_chan.send(TriggerLoadMsg(self.page.id, LoadData::new(url)));\n }\n }\n\n fn handle_fire_timer(self, timer_id: TimerId, cx: *mut JSContext) {\n let this_value = self.reflector().get_jsobject();\n self.timers.fire_timer(timer_id, this_value, cx);\n }\n}\n\nimpl Window {\n pub fn new(cx: *mut JSContext,\n page: Rc<Page>,\n script_chan: ScriptChan,\n control_chan: ScriptControlChan,\n compositor: Box<ScriptListener+'static>,\n image_cache_task: ImageCacheTask)\n -> Temporary<Window> {\n let win = box Window {\n eventtarget: EventTarget::new_inherited(WindowTypeId),\n script_chan: script_chan,\n control_chan: control_chan,\n console: Default::default(),\n compositor: compositor,\n page: page,\n location: Default::default(),\n navigator: Default::default(),\n image_cache_task: image_cache_task,\n browser_context: DOMRefCell::new(None),\n performance: Default::default(),\n navigation_start: time::get_time().sec as u64,\n navigation_start_precise: time::precise_time_s(),\n screen: Default::default(),\n timers: TimerManager::new()\n };\n\n WindowBinding::Wrap(cx, win)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This module implements the central application object.\n\nuse std::fmt;\nuse std::collections::HashMap;\nuse std::error::Error;\nuse std::fs::File;\nuse std::path::PathBuf;\n\nuse hyper;\nuse hyper::server::Request as HTTPRequest;\nuse hyper::server::Response as HTTPResponse;\n\nuse types::{\n PencilValue,\n PenResponse,\n\n PencilError,\n PenHTTPError,\n PenUserError,\n\n UserError,\n PencilResult,\n ViewFunc,\n HTTPErrorHandler,\n UserErrorHandler,\n BeforeRequestFunc,\n AfterRequestFunc,\n TeardownRequestFunc,\n};\nuse wrappers::{\n Request,\n Response,\n};\nuse helpers::{PathBound, send_static_file};\nuse helpers;\nuse config;\nuse logging;\nuse serving::run_server;\nuse routing::{Map, Rule};\nuse testing::PencilClient;\nuse errors::{HTTPError, NotFound, InternalServerError};\n\n\n\/\/\/ The pencil type. It acts as the central application object. Once it is created it\n\/\/\/ will act as a central registry for the view functions, the URL rules and much more.\npub struct Pencil {\n \/\/\/ The path where your application locates.\n pub root_path: String,\n \/\/\/ The name of the application. By default it's guessed from the root path.\n pub name: String,\n \/\/\/ The folder with static files that should be served at `static_url_path`.\n \/\/\/ Defaults to the `\"static\"` folder in the root path of the application.\n pub static_folder: String,\n \/\/\/ The url path for the static files on the web, defaults to be `\"\/static\"`.\n pub static_url_path: String,\n \/\/\/ The folder that contains the templates that should be used for the application.\n \/\/\/ Defaults to `''templates''` folder in the root path of the application.\n pub template_folder: String,\n pub config: config::Config,\n pub url_map: Map,\n \/\/ A dictionary of all view functions registered.\n view_functions: HashMap<String, ViewFunc>,\n before_request_funcs: Vec<BeforeRequestFunc>,\n after_request_funcs: Vec<AfterRequestFunc>,\n teardown_request_funcs: Vec<TeardownRequestFunc>,\n http_error_handlers: HashMap<isize, HTTPErrorHandler>,\n user_error_handlers: HashMap<&'static str, UserErrorHandler>,\n}\n\nimpl Pencil {\n \/\/\/ Create a new pencil object. It is passed the root path of your application.\n \/\/\/ The root path is used to resolve resources from inside it, for more information\n \/\/\/ about resource loading, see method `open_resource`.\n \/\/\/\n \/\/\/ Usually you create a pencil object in your main function like this:\n \/\/\/\n \/\/\/ ```rust,no_run\n \/\/\/ use pencil::Pencil;\n \/\/\/\n \/\/\/ fn main() {\n \/\/\/ let mut app = Pencil::new(\"\/web\/myapp\");\n \/\/\/ }\n \/\/\/ ```\n pub fn new(root_path: &str) -> Pencil {\n Pencil {\n root_path: root_path.to_string(),\n name: root_path.to_string(),\n static_folder: String::from(\"static\"),\n static_url_path: String::from(\"\/static\"),\n template_folder: String::from(\"templates\"),\n config: config::Config::new(),\n url_map: Map::new(),\n view_functions: HashMap::new(),\n before_request_funcs: vec![],\n after_request_funcs: vec![],\n teardown_request_funcs: vec![],\n http_error_handlers: HashMap::new(),\n user_error_handlers: HashMap::new(),\n }\n }\n\n \/\/\/ Set global log level based on the application's debug flag.\n pub fn set_log_level(&self) {\n logging::set_log_level(self);\n }\n\n \/\/\/ A shortcut that is used to register a view function for a given\n \/\/\/ URL rule.\n pub fn route(&mut self, rule: &str, methods: &[&str], endpoint: &str, view_func: ViewFunc) {\n self.add_url_rule(rule, methods, endpoint, view_func);\n }\n\n \/\/\/ Connects a URL rule.\n fn add_url_rule(&mut self, rule: &str, methods: &[&str], endpoint: &str, view_func: ViewFunc) {\n let url_rule = Rule::new(rule, methods, endpoint);\n self.url_map.add(url_rule);\n self.view_functions.insert(endpoint.to_string(), view_func);\n }\n\n \/\/\/ Enables static file handling.\n pub fn enable_static_file_handle(&mut self) {\n let mut rule = self.static_url_path.clone();\n rule = rule + \"\/([^\/].*?)\";\n self.add_url_rule(&rule, &[\"GET\"], \"static\", send_static_file);\n }\n\n \/\/\/ Registers a function to run before each request.\n pub fn before_request(&mut self, f: BeforeRequestFunc) {\n self.before_request_funcs.push(f);\n }\n\n \/\/\/ Registers a function to run after each request. Your function\n \/\/\/ must take a response object and modify it.\n pub fn after_request(&mut self, f: AfterRequestFunc) {\n self.after_request_funcs.push(f);\n }\n\n \/\/\/ Registers a function to run at the end of each request,\n \/\/\/ regardless of whether there was an error or not.\n pub fn teardown_request(&mut self, f: TeardownRequestFunc) {\n self.teardown_request_funcs.push(f);\n }\n\n \/\/\/ Registers a function as one http error handler.\n fn register_http_error_handler(&mut self, status_code: isize, f: HTTPErrorHandler) {\n self.http_error_handlers.insert(status_code, f);\n }\n\n \/\/\/ Registers a function as one user error handler.\n fn register_user_error_handler(&mut self, error_desc: &'static str, f: UserErrorHandler) {\n self.user_error_handlers.insert(error_desc, f);\n }\n\n \/\/\/ Registers a function as one http error handler. Example:\n \/\/\/\n \/\/\/ ```rust,no_run\n \/\/\/ use pencil::{Pencil, PencilResult, Response, PenResponse};\n \/\/\/ use pencil::HTTPError;\n \/\/\/\n \/\/\/\n \/\/\/ fn page_not_found(error: HTTPError) -> PencilResult {\n \/\/\/ let mut response = Response::new(String::from(\"The page does not exist\"));\n \/\/\/ response.status_code = 404;\n \/\/\/ return Ok(PenResponse(response));\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn main() {\n \/\/\/ let mut app = Pencil::new(\"\/web\/demo\");\n \/\/\/ app.httperrorhandler(404, page_not_found);\n \/\/\/ }\n \/\/\/ ```\n pub fn httperrorhandler(&mut self, status_code: isize, f: HTTPErrorHandler) {\n self.register_http_error_handler(status_code, f);\n }\n\n \/\/\/ Registers a function as one user error handler. There are two ways to handle\n \/\/\/ user errors currently, you can do it in your own view like this:\n \/\/\/\n \/\/\/ ```rust,no_run\n \/\/\/ use pencil::Request;\n \/\/\/ use pencil::{PencilResult, PenString};\n \/\/\/\n \/\/\/\n \/\/\/ #[derive(Clone, Copy)]\n \/\/\/ struct MyErr(isize);\n \/\/\/\n \/\/\/\n \/\/\/ fn some_operation() -> Result<isize, MyErr> {\n \/\/\/ return Err(MyErr(10));\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn my_err_handler(_: MyErr) -> PencilResult {\n \/\/\/ Ok(PenString(String::from(\"My err occurred!\")))\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn hello(_: Request) -> PencilResult {\n \/\/\/ match some_operation() {\n \/\/\/ Ok(_) => Ok(PenString(String::from(\"Hello!\"))),\n \/\/\/ Err(e) => my_err_handler(e),\n \/\/\/ }\n \/\/\/ }\n \/\/\/ ```\n \/\/\/\n \/\/\/ The problem with this is that you have to do it in all of your views, it brings\n \/\/\/ a lot of redundance, so pencil provides another solution, currently I still\n \/\/\/ haven't got any better idea on how to store user error handlers, this feature is\n \/\/\/ really just experimental, if you have any good idea, please wake me up. Here is\n \/\/\/ one simple example:\n \/\/\/\n \/\/\/ ```rust,no_run\n \/\/\/ use std::convert;\n \/\/\/\n \/\/\/ use pencil::Request;\n \/\/\/ use pencil::{Pencil, PencilResult, PenString};\n \/\/\/ use pencil::{PencilError, PenUserError, UserError};\n \/\/\/\n \/\/\/\n \/\/\/ #[derive(Clone, Copy)]\n \/\/\/ pub struct MyErr(isize);\n \/\/\/\n \/\/\/ impl convert::From<MyErr> for PencilError {\n \/\/\/ fn from(err: MyErr) -> PencilError {\n \/\/\/ let user_error = UserError::new(\"MyErr\");\n \/\/\/ return PenUserError(user_error);\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn my_err_handler(_: UserError) -> PencilResult {\n \/\/\/ Ok(PenString(String::from(\"My err occurred!\")))\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn some_operation() -> Result<String, MyErr> {\n \/\/\/ return Err(MyErr(10));\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn hello(_: Request) -> PencilResult {\n \/\/\/ let rv = try!(some_operation());\n \/\/\/ return Ok(PenString(rv));\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn main() {\n \/\/\/ let mut app = Pencil::new(\"\/web\/demo\");\n \/\/\/ \/\/ Use error description as key to store handlers, really ugly...\n \/\/\/ app.usererrorhandler(\"MyErr\", my_err_handler);\n \/\/\/ }\n \/\/\/ ```\n pub fn usererrorhandler(&mut self, error_desc: &'static str, f: UserErrorHandler) {\n self.register_user_error_handler(error_desc, f);\n }\n\n \/\/\/ Creates a test client for this application, you can use it\n \/\/\/ like this:\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ let client = app.test_client();\n \/\/\/ let response = client.get('\/');\n \/\/\/ assert!(response.code, 200);\n \/\/\/ ```\n pub fn test_client(&self) -> PencilClient {\n PencilClient::new(self)\n }\n\n \/\/\/ Called before the actual request dispatching, you can return value\n \/\/\/ from here and stop the further request handling.\n fn preprocess_request(&self, request: &Request) -> Option<PencilResult> {\n let mut result: Option<PencilResult>;\n for &func in self.before_request_funcs.iter() {\n result = func(request);\n if result.is_some() {\n return result;\n }\n }\n return None;\n }\n\n \/\/\/ Does the request dispatching. Matches the URL and returns the return\n \/\/\/ value of the view.\n fn dispatch_request(&self, request: Request) -> PencilResult {\n if request.routing_error.is_some() {\n return Err(PenHTTPError(request.routing_error.unwrap()));\n }\n match self.view_functions.get(&request.endpoint().unwrap()) {\n Some(&view_func) => {\n return view_func(request);\n },\n None => {\n return Err(PenHTTPError(NotFound));\n }\n }\n }\n\n \/\/\/ Converts the return value from a view function to a real\n \/\/\/ response object.\n fn make_response(&self, rv: PencilValue) -> Response {\n return helpers::make_response(rv);\n }\n\n \/\/\/ Modify the response object before it's sent to the HTTP server.\n fn process_response(&self, response: &mut Response) {\n \/\/ TODO: reverse order\n for &func in self.after_request_funcs.iter() {\n func(response);\n }\n }\n\n \/\/\/ Called after the actual request dispatching.\n fn do_teardown_request(&self, e: Option<&PencilError>) {\n \/\/ TODO: reverse order\n for &func in self.teardown_request_funcs.iter() {\n func(e);\n }\n }\n\n \/\/\/ This method is called whenever an error occurs that should be handled.\n fn handle_all_error(&self, e: PencilError) -> PencilResult {\n match e {\n PenHTTPError(e) => self.handle_http_error(e),\n PenUserError(e) => self.handle_user_error(e),\n }\n }\n\n \/\/\/ Handles an User error.\n fn handle_user_error(&self, e: UserError) -> PencilResult {\n match self.user_error_handlers.get(e.description()) {\n Some(&handler) => handler(e),\n None => Err(PenUserError(e)),\n }\n }\n\n \/\/\/ Handles an HTTP error.\n fn handle_http_error(&self, e: HTTPError) -> PencilResult {\n match self.http_error_handlers.get(&e.code()) {\n Some(&handler) => handler(e),\n None => Ok(PenResponse(e.to_response())),\n }\n }\n\n \/\/\/ Default error handing that kicks in when an error occurs that is not\n \/\/\/ handled.\n fn handle_error(&self, e: &PencilError) -> PencilValue {\n self.log_error(e);\n let internal_server_error = InternalServerError;\n match self.http_error_handlers.get(&500) {\n Some(&handler) => {\n match handler(internal_server_error) {\n Ok(value) => value,\n Err(_) => {\n let e = InternalServerError;\n PenResponse(e.to_response())\n }\n }\n },\n None => {\n let e = InternalServerError;\n PenResponse(e.to_response())\n }\n }\n }\n\n \/\/\/ Logs an error.\n fn log_error(&self, e: &PencilError) {\n error!(\"Error: {}\", e.description());\n }\n\n \/\/\/ Dispatches the request and performs request pre and postprocessing\n \/\/\/ as well as HTTP error handling.\n fn full_dispatch_request(&self, request: Request) -> Result<Response, PencilError> {\n let result = match self.preprocess_request(&request) {\n Some(result) => result,\n None => self.dispatch_request(request),\n };\n let rv = match result {\n Ok(value) => Ok(value),\n Err(e) => self.handle_all_error(e),\n };\n match rv {\n Ok(value) => {\n let mut response = self.make_response(value);\n self.process_response(&mut response);\n Ok(response)\n },\n Err(e) => Err(e),\n }\n }\n\n \/\/\/ The actual application handler.\n pub fn handle_request(&self, mut request: Request) -> Response {\n request.match_request();\n match self.full_dispatch_request(request) {\n Ok(response) => {\n self.do_teardown_request(None);\n return response;\n },\n Err(e) => {\n let response = self.make_response(self.handle_error(&e));\n self.do_teardown_request(Some(&e));\n return response;\n }\n };\n }\n\n \/\/\/ Runs the application on a local development server.\n pub fn run(self) {\n run_server(self);\n }\n}\n\nimpl hyper::server::Handler for Pencil {\n fn handle(&self, req: HTTPRequest, res: HTTPResponse) {\n let request = Request::new(self, req);\n let request_method = request.method();\n let response = self.handle_request(request);\n response.write(request_method, res);\n }\n}\n\nimpl PathBound for Pencil {\n fn open_resource(&self, resource: &str) -> File {\n let mut pathbuf = PathBuf::from(&self.root_path);\n pathbuf.push(resource);\n return File::open(&pathbuf.as_path()).unwrap();\n }\n}\n\nimpl fmt::Display for Pencil {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"<Pencil application {}>\", self.name)\n }\n}\n\nimpl fmt::Debug for Pencil {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"<Pencil application {}>\", self.name)\n }\n}\n<commit_msg>Make register error handler public<commit_after>\/\/ This module implements the central application object.\n\nuse std::fmt;\nuse std::collections::HashMap;\nuse std::error::Error;\nuse std::fs::File;\nuse std::path::PathBuf;\n\nuse hyper;\nuse hyper::server::Request as HTTPRequest;\nuse hyper::server::Response as HTTPResponse;\n\nuse types::{\n PencilValue,\n PenResponse,\n\n PencilError,\n PenHTTPError,\n PenUserError,\n\n UserError,\n PencilResult,\n ViewFunc,\n HTTPErrorHandler,\n UserErrorHandler,\n BeforeRequestFunc,\n AfterRequestFunc,\n TeardownRequestFunc,\n};\nuse wrappers::{\n Request,\n Response,\n};\nuse helpers::{PathBound, send_static_file};\nuse helpers;\nuse config;\nuse logging;\nuse serving::run_server;\nuse routing::{Map, Rule};\nuse testing::PencilClient;\nuse errors::{HTTPError, NotFound, InternalServerError};\n\n\n\/\/\/ The pencil type. It acts as the central application object. Once it is created it\n\/\/\/ will act as a central registry for the view functions, the URL rules and much more.\npub struct Pencil {\n \/\/\/ The path where your application locates.\n pub root_path: String,\n \/\/\/ The name of the application. By default it's guessed from the root path.\n pub name: String,\n \/\/\/ The folder with static files that should be served at `static_url_path`.\n \/\/\/ Defaults to the `\"static\"` folder in the root path of the application.\n pub static_folder: String,\n \/\/\/ The url path for the static files on the web, defaults to be `\"\/static\"`.\n pub static_url_path: String,\n \/\/\/ The folder that contains the templates that should be used for the application.\n \/\/\/ Defaults to `''templates''` folder in the root path of the application.\n pub template_folder: String,\n pub config: config::Config,\n pub url_map: Map,\n \/\/ A dictionary of all view functions registered.\n view_functions: HashMap<String, ViewFunc>,\n before_request_funcs: Vec<BeforeRequestFunc>,\n after_request_funcs: Vec<AfterRequestFunc>,\n teardown_request_funcs: Vec<TeardownRequestFunc>,\n http_error_handlers: HashMap<isize, HTTPErrorHandler>,\n user_error_handlers: HashMap<&'static str, UserErrorHandler>,\n}\n\nimpl Pencil {\n \/\/\/ Create a new pencil object. It is passed the root path of your application.\n \/\/\/ The root path is used to resolve resources from inside it, for more information\n \/\/\/ about resource loading, see method `open_resource`.\n \/\/\/\n \/\/\/ Usually you create a pencil object in your main function like this:\n \/\/\/\n \/\/\/ ```rust,no_run\n \/\/\/ use pencil::Pencil;\n \/\/\/\n \/\/\/ fn main() {\n \/\/\/ let mut app = Pencil::new(\"\/web\/myapp\");\n \/\/\/ }\n \/\/\/ ```\n pub fn new(root_path: &str) -> Pencil {\n Pencil {\n root_path: root_path.to_string(),\n name: root_path.to_string(),\n static_folder: String::from(\"static\"),\n static_url_path: String::from(\"\/static\"),\n template_folder: String::from(\"templates\"),\n config: config::Config::new(),\n url_map: Map::new(),\n view_functions: HashMap::new(),\n before_request_funcs: vec![],\n after_request_funcs: vec![],\n teardown_request_funcs: vec![],\n http_error_handlers: HashMap::new(),\n user_error_handlers: HashMap::new(),\n }\n }\n\n \/\/\/ Set global log level based on the application's debug flag.\n pub fn set_log_level(&self) {\n logging::set_log_level(self);\n }\n\n \/\/\/ A shortcut that is used to register a view function for a given\n \/\/\/ URL rule.\n pub fn route(&mut self, rule: &str, methods: &[&str], endpoint: &str, view_func: ViewFunc) {\n self.add_url_rule(rule, methods, endpoint, view_func);\n }\n\n \/\/\/ Connects a URL rule.\n fn add_url_rule(&mut self, rule: &str, methods: &[&str], endpoint: &str, view_func: ViewFunc) {\n let url_rule = Rule::new(rule, methods, endpoint);\n self.url_map.add(url_rule);\n self.view_functions.insert(endpoint.to_string(), view_func);\n }\n\n \/\/\/ Enables static file handling.\n pub fn enable_static_file_handle(&mut self) {\n let mut rule = self.static_url_path.clone();\n rule = rule + \"\/([^\/].*?)\";\n self.add_url_rule(&rule, &[\"GET\"], \"static\", send_static_file);\n }\n\n \/\/\/ Registers a function to run before each request.\n pub fn before_request(&mut self, f: BeforeRequestFunc) {\n self.before_request_funcs.push(f);\n }\n\n \/\/\/ Registers a function to run after each request. Your function\n \/\/\/ must take a response object and modify it.\n pub fn after_request(&mut self, f: AfterRequestFunc) {\n self.after_request_funcs.push(f);\n }\n\n \/\/\/ Registers a function to run at the end of each request,\n \/\/\/ regardless of whether there was an error or not.\n pub fn teardown_request(&mut self, f: TeardownRequestFunc) {\n self.teardown_request_funcs.push(f);\n }\n\n \/\/\/ Registers a function as one http error handler.\n \/\/\/ Same to `httperrorhandler`.\n pub fn register_http_error_handler(&mut self, status_code: isize, f: HTTPErrorHandler) {\n self.http_error_handlers.insert(status_code, f);\n }\n\n \/\/\/ Registers a function as one user error handler.\n \/\/\/ Same to `usererrorhandler`.\n pub fn register_user_error_handler(&mut self, error_desc: &'static str, f: UserErrorHandler) {\n self.user_error_handlers.insert(error_desc, f);\n }\n\n \/\/\/ Registers a function as one http error handler. Example:\n \/\/\/\n \/\/\/ ```rust,no_run\n \/\/\/ use pencil::{Pencil, PencilResult, Response, PenResponse};\n \/\/\/ use pencil::HTTPError;\n \/\/\/\n \/\/\/\n \/\/\/ fn page_not_found(error: HTTPError) -> PencilResult {\n \/\/\/ let mut response = Response::new(String::from(\"The page does not exist\"));\n \/\/\/ response.status_code = 404;\n \/\/\/ return Ok(PenResponse(response));\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn main() {\n \/\/\/ let mut app = Pencil::new(\"\/web\/demo\");\n \/\/\/ app.httperrorhandler(404, page_not_found);\n \/\/\/ }\n \/\/\/ ```\n pub fn httperrorhandler(&mut self, status_code: isize, f: HTTPErrorHandler) {\n self.register_http_error_handler(status_code, f);\n }\n\n \/\/\/ Registers a function as one user error handler. There are two ways to handle\n \/\/\/ user errors currently, you can do it in your own view like this:\n \/\/\/\n \/\/\/ ```rust,no_run\n \/\/\/ use pencil::Request;\n \/\/\/ use pencil::{PencilResult, PenString};\n \/\/\/\n \/\/\/\n \/\/\/ #[derive(Clone, Copy)]\n \/\/\/ struct MyErr(isize);\n \/\/\/\n \/\/\/\n \/\/\/ fn some_operation() -> Result<isize, MyErr> {\n \/\/\/ return Err(MyErr(10));\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn my_err_handler(_: MyErr) -> PencilResult {\n \/\/\/ Ok(PenString(String::from(\"My err occurred!\")))\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn hello(_: Request) -> PencilResult {\n \/\/\/ match some_operation() {\n \/\/\/ Ok(_) => Ok(PenString(String::from(\"Hello!\"))),\n \/\/\/ Err(e) => my_err_handler(e),\n \/\/\/ }\n \/\/\/ }\n \/\/\/ ```\n \/\/\/\n \/\/\/ The problem with this is that you have to do it in all of your views, it brings\n \/\/\/ a lot of redundance, so pencil provides another solution, currently I still\n \/\/\/ haven't got any better idea on how to store user error handlers, this feature is\n \/\/\/ really just experimental, if you have any good idea, please wake me up. Here is\n \/\/\/ one simple example:\n \/\/\/\n \/\/\/ ```rust,no_run\n \/\/\/ use std::convert;\n \/\/\/\n \/\/\/ use pencil::Request;\n \/\/\/ use pencil::{Pencil, PencilResult, PenString};\n \/\/\/ use pencil::{PencilError, PenUserError, UserError};\n \/\/\/\n \/\/\/\n \/\/\/ #[derive(Clone, Copy)]\n \/\/\/ pub struct MyErr(isize);\n \/\/\/\n \/\/\/ impl convert::From<MyErr> for PencilError {\n \/\/\/ fn from(err: MyErr) -> PencilError {\n \/\/\/ let user_error = UserError::new(\"MyErr\");\n \/\/\/ return PenUserError(user_error);\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn my_err_handler(_: UserError) -> PencilResult {\n \/\/\/ Ok(PenString(String::from(\"My err occurred!\")))\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn some_operation() -> Result<String, MyErr> {\n \/\/\/ return Err(MyErr(10));\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn hello(_: Request) -> PencilResult {\n \/\/\/ let rv = try!(some_operation());\n \/\/\/ return Ok(PenString(rv));\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ fn main() {\n \/\/\/ let mut app = Pencil::new(\"\/web\/demo\");\n \/\/\/ \/\/ Use error description as key to store handlers, really ugly...\n \/\/\/ app.usererrorhandler(\"MyErr\", my_err_handler);\n \/\/\/ }\n \/\/\/ ```\n pub fn usererrorhandler(&mut self, error_desc: &'static str, f: UserErrorHandler) {\n self.register_user_error_handler(error_desc, f);\n }\n\n \/\/\/ Creates a test client for this application, you can use it\n \/\/\/ like this:\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ let client = app.test_client();\n \/\/\/ let response = client.get('\/');\n \/\/\/ assert!(response.code, 200);\n \/\/\/ ```\n pub fn test_client(&self) -> PencilClient {\n PencilClient::new(self)\n }\n\n \/\/\/ Called before the actual request dispatching, you can return value\n \/\/\/ from here and stop the further request handling.\n fn preprocess_request(&self, request: &Request) -> Option<PencilResult> {\n let mut result: Option<PencilResult>;\n for &func in self.before_request_funcs.iter() {\n result = func(request);\n if result.is_some() {\n return result;\n }\n }\n return None;\n }\n\n \/\/\/ Does the request dispatching. Matches the URL and returns the return\n \/\/\/ value of the view.\n fn dispatch_request(&self, request: Request) -> PencilResult {\n if request.routing_error.is_some() {\n return Err(PenHTTPError(request.routing_error.unwrap()));\n }\n match self.view_functions.get(&request.endpoint().unwrap()) {\n Some(&view_func) => {\n return view_func(request);\n },\n None => {\n return Err(PenHTTPError(NotFound));\n }\n }\n }\n\n \/\/\/ Converts the return value from a view function to a real\n \/\/\/ response object.\n fn make_response(&self, rv: PencilValue) -> Response {\n return helpers::make_response(rv);\n }\n\n \/\/\/ Modify the response object before it's sent to the HTTP server.\n fn process_response(&self, response: &mut Response) {\n \/\/ TODO: reverse order\n for &func in self.after_request_funcs.iter() {\n func(response);\n }\n }\n\n \/\/\/ Called after the actual request dispatching.\n fn do_teardown_request(&self, e: Option<&PencilError>) {\n \/\/ TODO: reverse order\n for &func in self.teardown_request_funcs.iter() {\n func(e);\n }\n }\n\n \/\/\/ This method is called whenever an error occurs that should be handled.\n fn handle_all_error(&self, e: PencilError) -> PencilResult {\n match e {\n PenHTTPError(e) => self.handle_http_error(e),\n PenUserError(e) => self.handle_user_error(e),\n }\n }\n\n \/\/\/ Handles an User error.\n fn handle_user_error(&self, e: UserError) -> PencilResult {\n match self.user_error_handlers.get(e.description()) {\n Some(&handler) => handler(e),\n None => Err(PenUserError(e)),\n }\n }\n\n \/\/\/ Handles an HTTP error.\n fn handle_http_error(&self, e: HTTPError) -> PencilResult {\n match self.http_error_handlers.get(&e.code()) {\n Some(&handler) => handler(e),\n None => Ok(PenResponse(e.to_response())),\n }\n }\n\n \/\/\/ Default error handing that kicks in when an error occurs that is not\n \/\/\/ handled.\n fn handle_error(&self, e: &PencilError) -> PencilValue {\n self.log_error(e);\n let internal_server_error = InternalServerError;\n match self.http_error_handlers.get(&500) {\n Some(&handler) => {\n match handler(internal_server_error) {\n Ok(value) => value,\n Err(_) => {\n let e = InternalServerError;\n PenResponse(e.to_response())\n }\n }\n },\n None => {\n let e = InternalServerError;\n PenResponse(e.to_response())\n }\n }\n }\n\n \/\/\/ Logs an error.\n fn log_error(&self, e: &PencilError) {\n error!(\"Error: {}\", e.description());\n }\n\n \/\/\/ Dispatches the request and performs request pre and postprocessing\n \/\/\/ as well as HTTP error handling.\n fn full_dispatch_request(&self, request: Request) -> Result<Response, PencilError> {\n let result = match self.preprocess_request(&request) {\n Some(result) => result,\n None => self.dispatch_request(request),\n };\n let rv = match result {\n Ok(value) => Ok(value),\n Err(e) => self.handle_all_error(e),\n };\n match rv {\n Ok(value) => {\n let mut response = self.make_response(value);\n self.process_response(&mut response);\n Ok(response)\n },\n Err(e) => Err(e),\n }\n }\n\n \/\/\/ The actual application handler.\n pub fn handle_request(&self, mut request: Request) -> Response {\n request.match_request();\n match self.full_dispatch_request(request) {\n Ok(response) => {\n self.do_teardown_request(None);\n return response;\n },\n Err(e) => {\n let response = self.make_response(self.handle_error(&e));\n self.do_teardown_request(Some(&e));\n return response;\n }\n };\n }\n\n \/\/\/ Runs the application on a local development server.\n pub fn run(self) {\n run_server(self);\n }\n}\n\nimpl hyper::server::Handler for Pencil {\n fn handle(&self, req: HTTPRequest, res: HTTPResponse) {\n let request = Request::new(self, req);\n let request_method = request.method();\n let response = self.handle_request(request);\n response.write(request_method, res);\n }\n}\n\nimpl PathBound for Pencil {\n fn open_resource(&self, resource: &str) -> File {\n let mut pathbuf = PathBuf::from(&self.root_path);\n pathbuf.push(resource);\n return File::open(&pathbuf.as_path()).unwrap();\n }\n}\n\nimpl fmt::Display for Pencil {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"<Pencil application {}>\", self.name)\n }\n}\n\nimpl fmt::Debug for Pencil {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"<Pencil application {}>\", self.name)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::io;\nuse std::fs;\nuse std::path::{Path, PathBuf};\nuse std::slice::Iter as SliceIter;\n\nuse feature::Git;\nuse file::{File, fields};\n\n\n\/\/\/ A **Dir** provides a cached list of the file paths in a directory that's\n\/\/\/ being listed.\n\/\/\/\n\/\/\/ This object gets passed to the Files themselves, in order for them to\n\/\/\/ check the existence of surrounding files, then highlight themselves\n\/\/\/ accordingly. (See `File#get_source_files`)\npub struct Dir {\n contents: Vec<PathBuf>,\n path: PathBuf,\n git: Option<Git>,\n}\n\nimpl Dir {\n\n \/\/\/ Create a new Dir object filled with all the files in the directory\n \/\/\/ pointed to by the given path. Fails if the directory can't be read, or\n \/\/\/ isn't actually a directory.\n pub fn readdir(path: &Path, git: bool) -> io::Result<Dir> {\n fs::read_dir(path).map(|dir_obj| Dir {\n contents: dir_obj.map(|entry| entry.unwrap().path()).collect(),\n path: path.to_path_buf(),\n git: if git { Git::scan(path).ok() } else { None },\n })\n }\n\n \/\/\/ Produce a vector of File objects from an initialised directory,\n \/\/\/ printing out an error if any of the Files fail to be created.\n \/\/\/\n \/\/\/ Passing in `recurse` means that any directories will be scanned for\n \/\/\/ their contents, as well.\n pub fn files<'dir>(&'dir self) -> Files<'dir> {\n Files {\n inner: self.contents.iter(),\n dir: &self,\n }\n }\n\n \/\/\/ Whether this directory contains a file with the given path.\n pub fn contains(&self, path: &Path) -> bool {\n self.contents.iter().any(|ref p| p.as_path() == path)\n }\n\n \/\/\/ Append a path onto the path specified by this directory.\n pub fn join(&self, child: &Path) -> PathBuf {\n self.path.join(child)\n }\n\n \/\/\/ Return whether there's a Git repository on or above this directory.\n pub fn has_git_repo(&self) -> bool {\n self.git.is_some()\n }\n\n \/\/\/ Get a string describing the Git status of the given file.\n pub fn git_status(&self, path: &Path, prefix_lookup: bool) -> fields::Git {\n match (&self.git, prefix_lookup) {\n (&Some(ref git), false) => git.status(path),\n (&Some(ref git), true) => git.dir_status(path),\n (&None, _) => fields::Git::empty()\n }\n }\n}\n\n\npub struct Files<'dir> {\n inner: SliceIter<'dir, PathBuf>,\n dir: &'dir Dir,\n}\n\nimpl<'dir> Iterator for Files<'dir> {\n type Item = Result<File<'dir>, (PathBuf, io::Error)>;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.inner.next().map(|path| File::from_path(path, Some(self.dir)).map_err(|t| (path.clone(), t)))\n }\n}<commit_msg>Propagate errors that occur during readdir<commit_after>use std::io;\nuse std::fs;\nuse std::path::{Path, PathBuf};\nuse std::slice::Iter as SliceIter;\n\nuse feature::Git;\nuse file::{File, fields};\n\n\n\/\/\/ A **Dir** provides a cached list of the file paths in a directory that's\n\/\/\/ being listed.\n\/\/\/\n\/\/\/ This object gets passed to the Files themselves, in order for them to\n\/\/\/ check the existence of surrounding files, then highlight themselves\n\/\/\/ accordingly. (See `File#get_source_files`)\npub struct Dir {\n contents: Vec<PathBuf>,\n path: PathBuf,\n git: Option<Git>,\n}\n\nimpl Dir {\n\n \/\/\/ Create a new Dir object filled with all the files in the directory\n \/\/\/ pointed to by the given path. Fails if the directory can't be read, or\n \/\/\/ isn't actually a directory, or if there's an IO error that occurs\n \/\/\/ while scanning.\n pub fn readdir(path: &Path, git: bool) -> io::Result<Dir> {\n let reader = try!(fs::read_dir(path));\n let contents = try!(reader.map(|e| e.map(|e| e.path())).collect());\n\n Ok(Dir {\n contents: contents,\n path: path.to_path_buf(),\n git: if git { Git::scan(path).ok() } else { None },\n })\n }\n\n \/\/\/ Produce an iterator of IO results of trying to read all the files in\n \/\/\/ this directory.\n pub fn files<'dir>(&'dir self) -> Files<'dir> {\n Files {\n inner: self.contents.iter(),\n dir: &self,\n }\n }\n\n \/\/\/ Whether this directory contains a file with the given path.\n pub fn contains(&self, path: &Path) -> bool {\n self.contents.iter().any(|ref p| p.as_path() == path)\n }\n\n \/\/\/ Append a path onto the path specified by this directory.\n pub fn join(&self, child: &Path) -> PathBuf {\n self.path.join(child)\n }\n\n \/\/\/ Return whether there's a Git repository on or above this directory.\n pub fn has_git_repo(&self) -> bool {\n self.git.is_some()\n }\n\n \/\/\/ Get a string describing the Git status of the given file.\n pub fn git_status(&self, path: &Path, prefix_lookup: bool) -> fields::Git {\n match (&self.git, prefix_lookup) {\n (&Some(ref git), false) => git.status(path),\n (&Some(ref git), true) => git.dir_status(path),\n (&None, _) => fields::Git::empty()\n }\n }\n}\n\n\npub struct Files<'dir> {\n inner: SliceIter<'dir, PathBuf>,\n dir: &'dir Dir,\n}\n\nimpl<'dir> Iterator for Files<'dir> {\n type Item = Result<File<'dir>, (PathBuf, io::Error)>;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.inner.next().map(|path| File::from_path(path, Some(self.dir)).map_err(|t| (path.clone(), t)))\n }\n}<|endoftext|>"} {"text":"<commit_before>#![crate_name = \"ftp\"]\n#![crate_type = \"lib\"]\n#![feature(phase, slicing_syntax)]\n\nextern crate regex;\n\n#[phase(plugin)] extern crate regex_macros;\n\nuse std::io::{IoResult, TcpStream, BufferedReader, BufferedWriter, MemReader, EndOfFile};\nuse std::result::Result;\nuse std::string::String;\nuse std::io::util::copy;\n\n\/\/\/ Stream to interface with the FTP server. This interface is only for the command stream.\npub struct FTPStream {\n\tcommand_stream: TcpStream,\n\tpub host: &'static str,\n\tpub command_port: u16\n}\n\nimpl FTPStream {\n\t\n\t\/\/\/ Creates an FTP Stream.\n\tpub fn connect(host: &'static str, port: u16) -> IoResult<FTPStream> {\n\t\tlet connect_string = format!(\"{}:{}\", host, port);\n\t\tlet tcp_stream = try!(TcpStream::connect(connect_string.as_slice()));\n\t\tlet mut ftp_stream = FTPStream {\n\t\t\tcommand_stream: tcp_stream,\n\t\t\thost: host,\n\t\t\tcommand_port: port\n\t\t};\n\t\tmatch ftp_stream.read_response(220) {\n\t\t\tOk(_) => (),\n\t\t\tErr(e) => println!(\"{}\", e)\n\t\t}\n\t\tOk(ftp_stream)\n\t}\n\n\t\/\/\/ Log in to the FTP server.\n\tpub fn login(&mut self, user: &str, password: &str) -> Result<(), String> {\n\t\tlet user_command = format!(\"USER {}\\r\\n\", user);\n\t\tlet pass_command = format!(\"PASS {}\\r\\n\", password);\n\n\t\tmatch self.command_stream.write_str(user_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(331) {\n\t\t\tOk(_) => {\n\n\t\t\t\tmatch self.command_stream.write_str(pass_command.as_slice()) {\n\t\t\t\t\tOk(_) => (),\n\t\t\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t\t\t}\n\n\t\t\t\tmatch self.read_response(230) {\n\t\t\t\t\tOk(_) => Ok(()),\n\t\t\t\t\tErr(s) => Err(s)\n\t\t\t\t}\n\t\t\t},\n\t\t\tErr(s) => Err(s)\n\t\t}\n\t}\n\n\t\/\/\/ Change the current directory to the path specified.\n\tpub fn change_dir(&mut self, path: &str) -> Result<(), String> {\n\t\tlet cwd_command = format!(\"CWD {}\\r\\n\", path);\n\n\t\tmatch self.command_stream.write_str(cwd_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(250) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/\/ Move the current directory to the parent directory.\n\tpub fn change_dir_to_parent(&mut self) -> Result<(), String> {\n\t\tlet cdup_command = format!(\"CDUP\\r\\n\");\n\n\t\tmatch self.command_stream.write_str(cdup_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(250) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(s) => Err(s)\n\t\t}\n\t}\n\n\t\/\/\/ Gets the current directory\n\tpub fn current_dir(&mut self) -> Result<String, String> {\n\t\tfn index_of(string: &str, ch: char) -> int {\n\t\t\tlet mut i = -1;\n\t\t\tlet mut index = 0;\n\t\t\tfor c in string.chars() {\n\t\t\t\tif c == ch {\n\t\t\t\t\ti = index;\n\t\t\t\t\treturn i\n\t\t\t\t}\n\t\t\t\tindex+=1;\n\t\t\t}\n\t\t\treturn i;\n\t\t}\n\n\t\tfn last_index_of(string: &str, ch: char) -> int {\n\t\t\tlet mut i = -1;\n\t\t\tlet mut index = 0;\n\t\t\tfor c in string.chars() {\n\t\t\t\tif c == ch {\n\t\t\t\t\ti = index;\n\t\t\t\t}\n\t\t\t\tindex+=1;\n\t\t\t}\n\t\t\treturn i;\n\t\t}\n\t\tlet pwd_command = format!(\"PWD\\r\\n\");\n\n\t\tmatch self.command_stream.write_str(pwd_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(e) => return Err(format!(\"{}\", e))\n\t\t}\n\n\t\tmatch self.read_response(257) {\n\t\t\tOk((_, line)) => {\n\t\t\t\tlet begin = index_of(line.as_slice(), '\"');\n\t\t\t\tlet end = last_index_of(line.as_slice(), '\"');\n\n\t\t\t\tif begin == -1 || end == -1 {\n\t\t\t\t\treturn Err(format!(\"Invalid PWD Response: {}\", line))\n\t\t\t\t}\n\t\t\t\tlet b = begin as uint;\n\t\t\t\tlet e = end as uint;\n\n\t\t\t\treturn Ok(String::from_str(line.as_slice()[b+1..e]))\n\t\t\t},\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/\/ This does nothing. This is usually just used to keep the connection open.\n\tpub fn noop(&mut self) -> Result<(), String> {\n\t\tlet noop_command = format!(\"NOOP\\r\\n\");\n\n\t\tmatch self.command_stream.write_str(noop_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(200) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(s) => Err(s)\n\t\t}\n\t}\n\n\t\/\/\/ This creates new directories on the server.\n\tpub fn make_dir(&mut self, pathname: &str) -> Result<(), String> {\n\t\tlet mkdir_command = format!(\"MKD {}\\r\\n\", pathname);\n\n\t\tmatch self.command_stream.write_str(mkdir_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(257) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/\/ Runs the PASV command.\n\tpub fn pasv(&mut self) -> Result<(int), String> {\n\t\tlet pasv_command = format!(\"PASV\\r\\n\");\n\n\t\tmatch self.command_stream.write_str(pasv_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\t\/\/PASV response format : 227 Entering Passive Mode (h1,h2,h3,h4,p1,p2).\n\n\t\tlet response_regex = regex!(r\"(.*)\\(\\d+,\\d+,\\d+,\\d+,(\\d+),(\\d+)\\)(.*)\");\n\n\t\tmatch self.read_response(227) {\n\t\t\tOk((_, line)) => {\n\t\t\t\tlet caps = response_regex.captures(line.as_slice()).unwrap();\n\t\t\t\tlet first_part_port: int = from_str(caps.at(2)).unwrap();\n\t\t\t\tlet second_part_port: int = from_str(caps.at(3)).unwrap();\n\t\t\t\tOk((first_part_port*256)+second_part_port)\n\t\t\t},\n\t\t\tErr(s) => Err(s)\n\t\t}\n\t}\n\n\t\/\/\/ Quits the current FTP session.\n\tpub fn quit(&mut self) -> Result<(int, String), String> {\n\t\tlet quit_command = format!(\"QUIT\\r\\n\");\n\t\t\n\t\tmatch self.command_stream.write_str(quit_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(221) {\n\t\t\tOk((code, message)) => Ok((code, message)),\n\t\t\tErr(message) => Err(message),\n\t\t}\n\t}\n\n\t\/\/\/ Retrieves the file name specified from the server. This method is a more complicated way to retrieve a file. The reader returned should be dropped.\n\t\/\/\/ Also you will have to read the response to make sure it has the correct value.\n\tpub fn retr(&mut self, file_name: &str) -> Result<BufferedReader<TcpStream>, String> {\n\t\tlet retr_command = format!(\"RETR {}\\r\\n\", file_name);\n\n\t\tlet port = match self.pasv() {\n\t\t\tOk(p) => p,\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tlet connect_string = format!(\"{}:{}\", self.host, port);\n\t\tlet data_stream = BufferedReader::new(TcpStream::connect(connect_string.as_slice()).unwrap());\n\n\t\tmatch self.command_stream.write_str(retr_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(150) {\n\t\t\tOk(_) => Ok(data_stream),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\tfn simple_retr_(&mut self, file_name: &str) -> Result<MemReader, String> {\n\t\tlet mut data_stream = match self.retr(file_name) {\n\t\t\tOk(s) => s,\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tlet buffer: &mut Vec<u8> = &mut Vec::new();\n\t\tloop {\n\t\t\tlet mut buf = [0, ..256];\n\t\t\tlet len = match data_stream.read(&mut buf) {\n \tOk(len) => len,\n \tErr(ref e) if e.kind == EndOfFile => break,\n \tErr(e) => return Err(format!(\"{}\", e)),\n \t};\n \tmatch buffer.write(buf[..len]) {\n \t\tOk(_) => (),\n \t\tErr(e) => return Err(format!(\"{}\", e))\n \t};\n\t\t}\n\n\t\tdrop(data_stream);\n\n\t\tOk(MemReader::new(buffer.clone()))\n\t}\n\n\t\/\/\/ Simple way to retr a file from the server. This stores the file in memory.\n\tpub fn simple_retr(&mut self, file_name: &str) -> Result<MemReader, String> {\n\t\tlet r = match self.simple_retr_(file_name) {\n\t\t\tOk(reader) => reader,\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tmatch self.read_response(226) {\n\t\t\tOk(_) => Ok(r),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/\/ Removes the remote pathname from the server.\n\tpub fn remove_dir(&mut self, pathname: &str) -> Result<(), String> {\n\t\tlet rmd_command = format!(\"RMD {}\\r\\n\", pathname);\n\n\t\tmatch self.command_stream.write_str(rmd_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(250) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\tfn stor_<R: Reader>(&mut self, filename: &str, r: &mut R) -> Result<(), String> {\n\t\tlet stor_command = format!(\"STOR {}\\r\\n\", filename);\n\n\t\tlet port = match self.pasv() {\n\t\t\tOk(p) => p,\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tlet connect_string = format!(\"{}:{}\", self.host, port);\n\t\tlet data_stream: &mut BufferedWriter<TcpStream> = &mut BufferedWriter::new(TcpStream::connect(connect_string.as_slice()).unwrap());\n\n\t\tmatch self.command_stream.write_str(stor_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(150) {\n\t\t\tOk(_) => (),\n\t\t\tErr(e) => return Err(e)\n\t\t}\n\n\t\tmatch copy(r, data_stream) {\n\t\t\tOk(_) => {\n\t\t\t\tdrop(data_stream);\n\t\t\t\tOk(())\n\t\t\t},\n\t\t\tErr(_) => {\n\t\t\t\tdrop(data_stream);\n\t\t\t\tErr(format!(\"Error Writing\"))\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/\/ This stores a file on the server.\n\tpub fn stor<R: Reader>(&mut self, filename: &str, r: &mut R) -> Result<(), String> {\n\t\tmatch self.stor_(filename, r) {\n\t\t\tOk(_) => (),\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tmatch self.read_response(226) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/Retrieve single line response\n\tpub fn read_response(&mut self, expected_code: int) -> Result<(int, String), String> {\n\t\t\/\/Carriage return\n\t\tlet cr = 0x0d;\n\t\t\/\/Line Feed\n\t\tlet lf = 0x0a;\n\t\tlet mut line_buffer: Vec<u8> = Vec::new();\n\n\t\twhile line_buffer.len() < 2 || (line_buffer[line_buffer.len()-1] != lf && line_buffer[line_buffer.len()-2] != cr) {\n\t\t\t\tlet byte_buffer: &mut [u8] = &mut [0];\n\t\t\t\tmatch self.command_stream.read(byte_buffer) {\n\t\t\t\t\tOk(_) => {},\n\t\t\t\t\tErr(_) => return Err(format!(\"Error reading response\")),\n\t\t\t\t}\n\t\t\t\tline_buffer.push(byte_buffer[0]);\n\t\t}\n\n\t\tlet response = String::from_utf8(line_buffer).unwrap();\n\t\tlet chars_to_trim: &[char] = &['\\r', '\\n'];\n\t\tlet trimmed_response = response.as_slice().trim_chars(chars_to_trim);\n \tlet trimmed_response_vec: Vec<char> = trimmed_response.chars().collect();\n \tif trimmed_response_vec.len() < 5 || trimmed_response_vec[3] != ' ' {\n \t\treturn Err(format!(\"Invalid response\"));\n \t}\n\n \tlet v: Vec<&str> = trimmed_response.splitn(1, ' ').collect();\n \tlet code: int = from_str(v[0]).unwrap();\n \tlet message = v[1];\n \tif code != expected_code {\n \t\treturn Err(format!(\"Invalid response: {} {}\", code, message))\n \t}\n \tOk((code, String::from_str(message)))\n\t}\n}<commit_msg>Updating ftp client for the current version of rust<commit_after>#![crate_name = \"ftp\"]\n#![crate_type = \"lib\"]\n#![feature(phase, slicing_syntax)]\n\nextern crate regex;\n\n#[phase(plugin)] extern crate regex_macros;\n\nuse std::io::{IoResult, TcpStream, BufferedReader, BufferedWriter, MemReader, EndOfFile};\nuse std::result::Result;\nuse std::string::String;\nuse std::io::util::copy;\n\n\/\/\/ Stream to interface with the FTP server. This interface is only for the command stream.\npub struct FTPStream {\n\tcommand_stream: TcpStream,\n\tpub host: &'static str,\n\tpub command_port: u16\n}\n\nimpl FTPStream {\n\t\n\t\/\/\/ Creates an FTP Stream.\n\tpub fn connect(host: &'static str, port: u16) -> IoResult<FTPStream> {\n\t\tlet connect_string = format!(\"{}:{}\", host, port);\n\t\tlet tcp_stream = try!(TcpStream::connect(connect_string.as_slice()));\n\t\tlet mut ftp_stream = FTPStream {\n\t\t\tcommand_stream: tcp_stream,\n\t\t\thost: host,\n\t\t\tcommand_port: port\n\t\t};\n\t\tmatch ftp_stream.read_response(220) {\n\t\t\tOk(_) => (),\n\t\t\tErr(e) => println!(\"{}\", e)\n\t\t}\n\t\tOk(ftp_stream)\n\t}\n\n\t\/\/\/ Log in to the FTP server.\n\tpub fn login(&mut self, user: &str, password: &str) -> Result<(), String> {\n\t\tlet user_command = format!(\"USER {}\\r\\n\", user);\n\t\tlet pass_command = format!(\"PASS {}\\r\\n\", password);\n\n\t\tmatch self.command_stream.write_str(user_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(331) {\n\t\t\tOk(_) => {\n\n\t\t\t\tmatch self.command_stream.write_str(pass_command.as_slice()) {\n\t\t\t\t\tOk(_) => (),\n\t\t\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t\t\t}\n\n\t\t\t\tmatch self.read_response(230) {\n\t\t\t\t\tOk(_) => Ok(()),\n\t\t\t\t\tErr(s) => Err(s)\n\t\t\t\t}\n\t\t\t},\n\t\t\tErr(s) => Err(s)\n\t\t}\n\t}\n\n\t\/\/\/ Change the current directory to the path specified.\n\tpub fn change_dir(&mut self, path: &str) -> Result<(), String> {\n\t\tlet cwd_command = format!(\"CWD {}\\r\\n\", path);\n\n\t\tmatch self.command_stream.write_str(cwd_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(250) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/\/ Move the current directory to the parent directory.\n\tpub fn change_dir_to_parent(&mut self) -> Result<(), String> {\n\t\tlet cdup_command = format!(\"CDUP\\r\\n\");\n\n\t\tmatch self.command_stream.write_str(cdup_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(250) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(s) => Err(s)\n\t\t}\n\t}\n\n\t\/\/\/ Gets the current directory\n\tpub fn current_dir(&mut self) -> Result<String, String> {\n\t\tfn index_of(string: &str, ch: char) -> int {\n\t\t\tlet mut i = -1;\n\t\t\tlet mut index = 0;\n\t\t\tfor c in string.chars() {\n\t\t\t\tif c == ch {\n\t\t\t\t\ti = index;\n\t\t\t\t\treturn i\n\t\t\t\t}\n\t\t\t\tindex+=1;\n\t\t\t}\n\t\t\treturn i;\n\t\t}\n\n\t\tfn last_index_of(string: &str, ch: char) -> int {\n\t\t\tlet mut i = -1;\n\t\t\tlet mut index = 0;\n\t\t\tfor c in string.chars() {\n\t\t\t\tif c == ch {\n\t\t\t\t\ti = index;\n\t\t\t\t}\n\t\t\t\tindex+=1;\n\t\t\t}\n\t\t\treturn i;\n\t\t}\n\t\tlet pwd_command = format!(\"PWD\\r\\n\");\n\n\t\tmatch self.command_stream.write_str(pwd_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(e) => return Err(format!(\"{}\", e))\n\t\t}\n\n\t\tmatch self.read_response(257) {\n\t\t\tOk((_, line)) => {\n\t\t\t\tlet begin = index_of(line.as_slice(), '\"');\n\t\t\t\tlet end = last_index_of(line.as_slice(), '\"');\n\n\t\t\t\tif begin == -1 || end == -1 {\n\t\t\t\t\treturn Err(format!(\"Invalid PWD Response: {}\", line))\n\t\t\t\t}\n\t\t\t\tlet b = begin as uint;\n\t\t\t\tlet e = end as uint;\n\n\t\t\t\treturn Ok(String::from_str(line.as_slice()[b+1..e]))\n\t\t\t},\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/\/ This does nothing. This is usually just used to keep the connection open.\n\tpub fn noop(&mut self) -> Result<(), String> {\n\t\tlet noop_command = format!(\"NOOP\\r\\n\");\n\n\t\tmatch self.command_stream.write_str(noop_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(200) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(s) => Err(s)\n\t\t}\n\t}\n\n\t\/\/\/ This creates new directories on the server.\n\tpub fn make_dir(&mut self, pathname: &str) -> Result<(), String> {\n\t\tlet mkdir_command = format!(\"MKD {}\\r\\n\", pathname);\n\n\t\tmatch self.command_stream.write_str(mkdir_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(257) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/\/ Runs the PASV command.\n\tpub fn pasv(&mut self) -> Result<(int), String> {\n\t\tlet pasv_command = format!(\"PASV\\r\\n\");\n\n\t\tmatch self.command_stream.write_str(pasv_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\t\/\/PASV response format : 227 Entering Passive Mode (h1,h2,h3,h4,p1,p2).\n\n\t\tlet response_regex = regex!(r\"(.*)\\(\\d+,\\d+,\\d+,\\d+,(\\d+),(\\d+)\\)(.*)\");\n\n\t\tmatch self.read_response(227) {\n\t\t\tOk((_, line)) => {\n\t\t\t\tlet caps = response_regex.captures(line.as_slice()).unwrap();\n\t\t\t\tlet caps_2 = match caps.at(2) {\n\t\t\t\t\tSome(s) => s,\n\t\t\t\t\tNone => return Err(format!(\"Problems parsing reponse\"))\n\t\t\t\t};\n\t\t\t\tlet caps_3 = match caps.at(3) {\n\t\t\t\t\tSome(s) => s,\n\t\t\t\t\tNone => return Err(format!(\"Problems parsing reponse\"))\n\t\t\t\t};\n\t\t\t\tlet first_part_port: int = from_str(caps_2).unwrap();\n\t\t\t\tlet second_part_port: int = from_str(caps_3).unwrap();\n\t\t\t\tOk((first_part_port*256)+second_part_port)\n\t\t\t},\n\t\t\tErr(s) => Err(s)\n\t\t}\n\t}\n\n\t\/\/\/ Quits the current FTP session.\n\tpub fn quit(&mut self) -> Result<(int, String), String> {\n\t\tlet quit_command = format!(\"QUIT\\r\\n\");\n\t\t\n\t\tmatch self.command_stream.write_str(quit_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(221) {\n\t\t\tOk((code, message)) => Ok((code, message)),\n\t\t\tErr(message) => Err(message),\n\t\t}\n\t}\n\n\t\/\/\/ Retrieves the file name specified from the server. This method is a more complicated way to retrieve a file. The reader returned should be dropped.\n\t\/\/\/ Also you will have to read the response to make sure it has the correct value.\n\tpub fn retr(&mut self, file_name: &str) -> Result<BufferedReader<TcpStream>, String> {\n\t\tlet retr_command = format!(\"RETR {}\\r\\n\", file_name);\n\n\t\tlet port = match self.pasv() {\n\t\t\tOk(p) => p,\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tlet connect_string = format!(\"{}:{}\", self.host, port);\n\t\tlet data_stream = BufferedReader::new(TcpStream::connect(connect_string.as_slice()).unwrap());\n\n\t\tmatch self.command_stream.write_str(retr_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(150) {\n\t\t\tOk(_) => Ok(data_stream),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\tfn simple_retr_(&mut self, file_name: &str) -> Result<MemReader, String> {\n\t\tlet mut data_stream = match self.retr(file_name) {\n\t\t\tOk(s) => s,\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tlet buffer: &mut Vec<u8> = &mut Vec::new();\n\t\tloop {\n\t\t\tlet mut buf = [0, ..256];\n\t\t\tlet len = match data_stream.read(&mut buf) {\n \tOk(len) => len,\n \tErr(ref e) if e.kind == EndOfFile => break,\n \tErr(e) => return Err(format!(\"{}\", e)),\n \t};\n \tmatch buffer.write(buf[..len]) {\n \t\tOk(_) => (),\n \t\tErr(e) => return Err(format!(\"{}\", e))\n \t};\n\t\t}\n\n\t\tdrop(data_stream);\n\n\t\tOk(MemReader::new(buffer.clone()))\n\t}\n\n\t\/\/\/ Simple way to retr a file from the server. This stores the file in memory.\n\tpub fn simple_retr(&mut self, file_name: &str) -> Result<MemReader, String> {\n\t\tlet r = match self.simple_retr_(file_name) {\n\t\t\tOk(reader) => reader,\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tmatch self.read_response(226) {\n\t\t\tOk(_) => Ok(r),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/\/ Removes the remote pathname from the server.\n\tpub fn remove_dir(&mut self, pathname: &str) -> Result<(), String> {\n\t\tlet rmd_command = format!(\"RMD {}\\r\\n\", pathname);\n\n\t\tmatch self.command_stream.write_str(rmd_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(250) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\tfn stor_<R: Reader>(&mut self, filename: &str, r: &mut R) -> Result<(), String> {\n\t\tlet stor_command = format!(\"STOR {}\\r\\n\", filename);\n\n\t\tlet port = match self.pasv() {\n\t\t\tOk(p) => p,\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tlet connect_string = format!(\"{}:{}\", self.host, port);\n\t\tlet data_stream: &mut BufferedWriter<TcpStream> = &mut BufferedWriter::new(TcpStream::connect(connect_string.as_slice()).unwrap());\n\n\t\tmatch self.command_stream.write_str(stor_command.as_slice()) {\n\t\t\tOk(_) => (),\n\t\t\tErr(_) => return Err(format!(\"Write Error\"))\n\t\t}\n\n\t\tmatch self.read_response(150) {\n\t\t\tOk(_) => (),\n\t\t\tErr(e) => return Err(e)\n\t\t}\n\n\t\tmatch copy(r, data_stream) {\n\t\t\tOk(_) => {\n\t\t\t\tdrop(data_stream);\n\t\t\t\tOk(())\n\t\t\t},\n\t\t\tErr(_) => {\n\t\t\t\tdrop(data_stream);\n\t\t\t\tErr(format!(\"Error Writing\"))\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/\/ This stores a file on the server.\n\tpub fn stor<R: Reader>(&mut self, filename: &str, r: &mut R) -> Result<(), String> {\n\t\tmatch self.stor_(filename, r) {\n\t\t\tOk(_) => (),\n\t\t\tErr(e) => return Err(e)\n\t\t};\n\n\t\tmatch self.read_response(226) {\n\t\t\tOk(_) => Ok(()),\n\t\t\tErr(e) => Err(e)\n\t\t}\n\t}\n\n\t\/\/Retrieve single line response\n\tpub fn read_response(&mut self, expected_code: int) -> Result<(int, String), String> {\n\t\t\/\/Carriage return\n\t\tlet cr = 0x0d;\n\t\t\/\/Line Feed\n\t\tlet lf = 0x0a;\n\t\tlet mut line_buffer: Vec<u8> = Vec::new();\n\n\t\twhile line_buffer.len() < 2 || (line_buffer[line_buffer.len()-1] != lf && line_buffer[line_buffer.len()-2] != cr) {\n\t\t\t\tlet byte_buffer: &mut [u8] = &mut [0];\n\t\t\t\tmatch self.command_stream.read(byte_buffer) {\n\t\t\t\t\tOk(_) => {},\n\t\t\t\t\tErr(_) => return Err(format!(\"Error reading response\")),\n\t\t\t\t}\n\t\t\t\tline_buffer.push(byte_buffer[0]);\n\t\t}\n\n\t\tlet response = String::from_utf8(line_buffer).unwrap();\n\t\tlet chars_to_trim: &[char] = &['\\r', '\\n'];\n\t\tlet trimmed_response = response.as_slice().trim_chars(chars_to_trim);\n \tlet trimmed_response_vec: Vec<char> = trimmed_response.chars().collect();\n \tif trimmed_response_vec.len() < 5 || trimmed_response_vec[3] != ' ' {\n \t\treturn Err(format!(\"Invalid response\"));\n \t}\n\n \tlet v: Vec<&str> = trimmed_response.splitn(1, ' ').collect();\n \tlet code: int = from_str(v[0]).unwrap();\n \tlet message = v[1];\n \tif code != expected_code {\n \t\treturn Err(format!(\"Invalid response: {} {}\", code, message))\n \t}\n \tOk((code, String::from_str(message)))\n\t}\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Simple generics<commit_after>fn print_vec<T: ToStr>(v: &[T]) {\n for i in v.iter() {\n println(i.to_str());\n }\n}\n\nfn main() {\n let vec = [1,2,3];\n\n print_vec(vec);\n\n let str_vec = [~\"hey\", ~\"there\", ~\"yo\"];\n\n print_vec(str_vec);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added usable_cores<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Addition and Multiplication for NonSmallInt<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Begin building the directory structure for cargo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update tests, add doc-tests<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ This is a part of rust-chrono.\n\/\/ Copyright (c) 2014, Kang Seonghoon.\n\/\/ See README.md and LICENSE.txt for details.\n\n\/*!\nExperimental date and time handling for Rust.\n*\/\n\n#![comment = \"Date and time library for Rust\"]\n#![license = \"MIT\"]\n\n#![feature(macro_rules)]\n#![deny(missing_doc)]\n\nextern crate num;\nextern crate \"time\" as stdtime;\n\npub use duration::Duration;\npub use offset::{Offset, LocalResult};\npub use offset::{UTC, FixedOffset, Local};\npub use naive::date::NaiveDate;\npub use naive::time::NaiveTime;\npub use naive::datetime::NaiveDateTime;\npub use date::Date;\npub use time::Time;\npub use datetime::DateTime;\n\npub mod duration {\n \/\/! ISO 8601 duration.\n \/\/!\n \/\/! This used to be a part of rust-chrono,\n \/\/! but has been subsequently merged into Rust's standard library.\n pub use std::time::duration::{MIN, MAX, Duration};\n}\npub mod offset;\npub mod naive {\n \/\/! Date and time types which do not concern about the timezones.\n \/\/!\n \/\/! They are primarily building blocks for other types (e.g. `Offset`),\n \/\/! but can be also used for the simpler date and time handling.\n pub mod date;\n pub mod time;\n pub mod datetime;\n}\npub mod date;\npub mod time;\npub mod datetime;\npub mod format;\n\n\/\/\/ The day of week (DOW).\n\/\/\/\n\/\/\/ The order of the days of week depends on the context.\n\/\/\/ One should prefer `*_from_monday` or `*_from_sunday` methods to get the correct result.\n#[deriving(PartialEq, Eq, Clone, FromPrimitive, Show)]\npub enum Weekday {\n \/\/\/ Monday.\n Mon = 0,\n \/\/\/ Tuesday.\n Tue = 1,\n \/\/\/ Wednesday.\n Wed = 2,\n \/\/\/ Thursday.\n Thu = 3,\n \/\/\/ Friday.\n Fri = 4,\n \/\/\/ Saturday.\n Sat = 5,\n \/\/\/ Sunday.\n Sun = 6,\n}\n\nimpl Weekday {\n \/\/\/ The next day in the week.\n #[inline]\n pub fn succ(&self) -> Weekday {\n match *self {\n Mon => Tue,\n Tue => Wed,\n Wed => Thu,\n Thu => Fri,\n Fri => Sat,\n Sat => Sun,\n Sun => Mon,\n }\n }\n\n \/\/\/ The previous day in the week.\n #[inline]\n pub fn pred(&self) -> Weekday {\n match *self {\n Mon => Sun,\n Tue => Mon,\n Wed => Tue,\n Thu => Wed,\n Fri => Thu,\n Sat => Fri,\n Sun => Sat,\n }\n }\n\n \/\/\/ Returns a DOW number starting from Monday = 1. (ISO 8601 weekday number)\n #[inline]\n pub fn number_from_monday(&self) -> u32 {\n match *self {\n Mon => 1,\n Tue => 2,\n Wed => 3,\n Thu => 4,\n Fri => 5,\n Sat => 6,\n Sun => 7,\n }\n }\n\n \/\/\/ Returns a DOW number starting from Sunday = 1.\n #[inline]\n pub fn number_from_sunday(&self) -> u32 {\n match *self {\n Mon => 2,\n Tue => 3,\n Wed => 4,\n Thu => 5,\n Fri => 6,\n Sat => 7,\n Sun => 1,\n }\n }\n\n \/\/\/ Returns a DOW number starting from Monday = 0.\n #[inline]\n pub fn num_days_from_monday(&self) -> u32 {\n match *self {\n Mon => 0,\n Tue => 1,\n Wed => 2,\n Thu => 3,\n Fri => 4,\n Sat => 5,\n Sun => 6,\n }\n }\n\n \/\/\/ Returns a DOW number starting from Sunday = 0.\n #[inline]\n pub fn num_days_from_sunday(&self) -> u32 {\n match *self {\n Mon => 1,\n Tue => 2,\n Wed => 3,\n Thu => 4,\n Fri => 5,\n Sat => 6,\n Sun => 0,\n }\n }\n}\n\n\/\/\/ The common set of methods for date component.\npub trait Datelike {\n \/\/\/ Returns the year number.\n fn year(&self) -> i32;\n\n \/\/\/ Returns the absolute year number starting from 1 with a boolean flag,\n \/\/\/ which is false when the year predates the epoch (BCE\/BC) and true otherwise (CE\/AD).\n #[inline]\n fn year_ce(&self) -> (bool, u32) {\n let year = self.year();\n if year < 1 {\n (false, (1 - year) as u32)\n } else {\n (true, year as u32)\n }\n }\n\n \/\/\/ Returns the month number starting from 1.\n fn month(&self) -> u32;\n\n \/\/\/ Returns the month number starting from 0.\n fn month0(&self) -> u32;\n\n \/\/\/ Returns the day of month starting from 1.\n fn day(&self) -> u32;\n\n \/\/\/ Returns the day of month starting from 0.\n fn day0(&self) -> u32;\n\n \/\/\/ Returns the day of year starting from 1.\n fn ordinal(&self) -> u32;\n\n \/\/\/ Returns the day of year starting from 0.\n fn ordinal0(&self) -> u32;\n\n \/\/\/ Returns the day of week.\n fn weekday(&self) -> Weekday;\n\n \/\/\/ Returns the ISO week date: an adjusted year, week number and day of week.\n \/\/\/ The adjusted year may differ from that of the calendar date.\n fn isoweekdate(&self) -> (i32, u32, Weekday);\n\n \/\/\/ Makes a new value with the year number changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_year(&self, year: i32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the month number (starting from 1) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_month(&self, month: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the month number (starting from 0) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_month0(&self, month0: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the day of month (starting from 1) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_day(&self, day: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the day of month (starting from 0) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_day0(&self, day0: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the day of year (starting from 1) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_ordinal(&self, ordinal: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the day of year (starting from 0) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_ordinal0(&self, ordinal0: u32) -> Option<Self>;\n\n \/\/\/ Returns the number of days since January 1, 1 (Day 1) in the proleptic Gregorian calendar.\n fn num_days_from_ce(&self) -> i32 {\n \/\/ we know this wouldn't overflow since year is limited to 1\/2^13 of i32's full range.\n let mut year = self.year() - 1;\n let mut ndays = 0;\n if year < 0 {\n let excess = 1 + (-year) \/ 400;\n year += excess * 400;\n ndays -= excess * 146097;\n }\n let div_100 = year \/ 100;\n ndays += ((year * 1461) >> 2) - div_100 + (div_100 >> 2);\n ndays + self.ordinal() as i32\n }\n}\n\n\/\/\/ The common set of methods for time component.\npub trait Timelike {\n \/\/\/ Returns the hour number from 0 to 23.\n fn hour(&self) -> u32;\n\n \/\/\/ Returns the hour number from 1 to 12 with a boolean flag,\n \/\/\/ which is false for AM and true for PM.\n #[inline]\n fn hour12(&self) -> (bool, u32) {\n let hour = self.hour();\n let mut hour12 = hour % 12;\n if hour12 == 0 { hour12 = 12; }\n (hour >= 12, hour12)\n }\n\n \/\/\/ Returns the minute number from 0 to 59.\n fn minute(&self) -> u32;\n\n \/\/\/ Returns the second number from 0 to 59.\n fn second(&self) -> u32;\n\n \/\/\/ Returns the number of nanoseconds since the whole non-leap second.\n \/\/\/ The range from 1,000,000,000 to 1,999,999,999 represents the leap second.\n fn nanosecond(&self) -> u32;\n\n \/\/\/ Makes a new value with the hour number changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_hour(&self, hour: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the minute number changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_minute(&self, min: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the second number changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_second(&self, sec: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with nanoseconds since the whole non-leap second changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_nanosecond(&self, nano: u32) -> Option<Self>;\n\n \/\/\/ Returns the number of non-leap seconds past the last midnight.\n #[inline]\n fn num_seconds_from_midnight(&self) -> u32 {\n self.hour() * 3600 + self.minute() * 60 + self.second()\n }\n}\n\n#[test]\nfn test_readme_doomsday() {\n use std::iter::range_inclusive;\n\n for y in range_inclusive(naive::date::MIN.year(), naive::date::MAX.year()) {\n \/\/ even months\n let d4 = NaiveDate::from_ymd(y, 4, 4);\n let d6 = NaiveDate::from_ymd(y, 6, 6);\n let d8 = NaiveDate::from_ymd(y, 8, 8);\n let d10 = NaiveDate::from_ymd(y, 10, 10);\n let d12 = NaiveDate::from_ymd(y, 12, 12);\n\n \/\/ nine to five, seven-eleven\n let d59 = NaiveDate::from_ymd(y, 5, 9);\n let d95 = NaiveDate::from_ymd(y, 9, 5);\n let d711 = NaiveDate::from_ymd(y, 7, 11);\n let d117 = NaiveDate::from_ymd(y, 11, 7);\n\n \/\/ \"March 0\"\n let d30 = NaiveDate::from_ymd(y, 3, 1).pred();\n\n let weekday = d30.weekday();\n let other_dates = [d4, d6, d8, d10, d12, d59, d95, d711, d117];\n assert!(other_dates.iter().all(|d| d.weekday() == weekday));\n }\n}\n\n<commit_msg>Fix lint warning on deny(missing_doc)<commit_after>\/\/ This is a part of rust-chrono.\n\/\/ Copyright (c) 2014, Kang Seonghoon.\n\/\/ See README.md and LICENSE.txt for details.\n\n\/*!\nExperimental date and time handling for Rust.\n*\/\n\n#![comment = \"Date and time library for Rust\"]\n#![license = \"MIT\"]\n\n#![feature(macro_rules)]\n#![deny(missing_docs)]\n\nextern crate num;\nextern crate \"time\" as stdtime;\n\npub use duration::Duration;\npub use offset::{Offset, LocalResult};\npub use offset::{UTC, FixedOffset, Local};\npub use naive::date::NaiveDate;\npub use naive::time::NaiveTime;\npub use naive::datetime::NaiveDateTime;\npub use date::Date;\npub use time::Time;\npub use datetime::DateTime;\n\npub mod duration {\n \/\/! ISO 8601 duration.\n \/\/!\n \/\/! This used to be a part of rust-chrono,\n \/\/! but has been subsequently merged into Rust's standard library.\n pub use std::time::duration::{MIN, MAX, Duration};\n}\npub mod offset;\npub mod naive {\n \/\/! Date and time types which do not concern about the timezones.\n \/\/!\n \/\/! They are primarily building blocks for other types (e.g. `Offset`),\n \/\/! but can be also used for the simpler date and time handling.\n pub mod date;\n pub mod time;\n pub mod datetime;\n}\npub mod date;\npub mod time;\npub mod datetime;\npub mod format;\n\n\/\/\/ The day of week (DOW).\n\/\/\/\n\/\/\/ The order of the days of week depends on the context.\n\/\/\/ One should prefer `*_from_monday` or `*_from_sunday` methods to get the correct result.\n#[deriving(PartialEq, Eq, Clone, FromPrimitive, Show)]\npub enum Weekday {\n \/\/\/ Monday.\n Mon = 0,\n \/\/\/ Tuesday.\n Tue = 1,\n \/\/\/ Wednesday.\n Wed = 2,\n \/\/\/ Thursday.\n Thu = 3,\n \/\/\/ Friday.\n Fri = 4,\n \/\/\/ Saturday.\n Sat = 5,\n \/\/\/ Sunday.\n Sun = 6,\n}\n\nimpl Weekday {\n \/\/\/ The next day in the week.\n #[inline]\n pub fn succ(&self) -> Weekday {\n match *self {\n Mon => Tue,\n Tue => Wed,\n Wed => Thu,\n Thu => Fri,\n Fri => Sat,\n Sat => Sun,\n Sun => Mon,\n }\n }\n\n \/\/\/ The previous day in the week.\n #[inline]\n pub fn pred(&self) -> Weekday {\n match *self {\n Mon => Sun,\n Tue => Mon,\n Wed => Tue,\n Thu => Wed,\n Fri => Thu,\n Sat => Fri,\n Sun => Sat,\n }\n }\n\n \/\/\/ Returns a DOW number starting from Monday = 1. (ISO 8601 weekday number)\n #[inline]\n pub fn number_from_monday(&self) -> u32 {\n match *self {\n Mon => 1,\n Tue => 2,\n Wed => 3,\n Thu => 4,\n Fri => 5,\n Sat => 6,\n Sun => 7,\n }\n }\n\n \/\/\/ Returns a DOW number starting from Sunday = 1.\n #[inline]\n pub fn number_from_sunday(&self) -> u32 {\n match *self {\n Mon => 2,\n Tue => 3,\n Wed => 4,\n Thu => 5,\n Fri => 6,\n Sat => 7,\n Sun => 1,\n }\n }\n\n \/\/\/ Returns a DOW number starting from Monday = 0.\n #[inline]\n pub fn num_days_from_monday(&self) -> u32 {\n match *self {\n Mon => 0,\n Tue => 1,\n Wed => 2,\n Thu => 3,\n Fri => 4,\n Sat => 5,\n Sun => 6,\n }\n }\n\n \/\/\/ Returns a DOW number starting from Sunday = 0.\n #[inline]\n pub fn num_days_from_sunday(&self) -> u32 {\n match *self {\n Mon => 1,\n Tue => 2,\n Wed => 3,\n Thu => 4,\n Fri => 5,\n Sat => 6,\n Sun => 0,\n }\n }\n}\n\n\/\/\/ The common set of methods for date component.\npub trait Datelike {\n \/\/\/ Returns the year number.\n fn year(&self) -> i32;\n\n \/\/\/ Returns the absolute year number starting from 1 with a boolean flag,\n \/\/\/ which is false when the year predates the epoch (BCE\/BC) and true otherwise (CE\/AD).\n #[inline]\n fn year_ce(&self) -> (bool, u32) {\n let year = self.year();\n if year < 1 {\n (false, (1 - year) as u32)\n } else {\n (true, year as u32)\n }\n }\n\n \/\/\/ Returns the month number starting from 1.\n fn month(&self) -> u32;\n\n \/\/\/ Returns the month number starting from 0.\n fn month0(&self) -> u32;\n\n \/\/\/ Returns the day of month starting from 1.\n fn day(&self) -> u32;\n\n \/\/\/ Returns the day of month starting from 0.\n fn day0(&self) -> u32;\n\n \/\/\/ Returns the day of year starting from 1.\n fn ordinal(&self) -> u32;\n\n \/\/\/ Returns the day of year starting from 0.\n fn ordinal0(&self) -> u32;\n\n \/\/\/ Returns the day of week.\n fn weekday(&self) -> Weekday;\n\n \/\/\/ Returns the ISO week date: an adjusted year, week number and day of week.\n \/\/\/ The adjusted year may differ from that of the calendar date.\n fn isoweekdate(&self) -> (i32, u32, Weekday);\n\n \/\/\/ Makes a new value with the year number changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_year(&self, year: i32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the month number (starting from 1) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_month(&self, month: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the month number (starting from 0) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_month0(&self, month0: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the day of month (starting from 1) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_day(&self, day: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the day of month (starting from 0) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_day0(&self, day0: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the day of year (starting from 1) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_ordinal(&self, ordinal: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the day of year (starting from 0) changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_ordinal0(&self, ordinal0: u32) -> Option<Self>;\n\n \/\/\/ Returns the number of days since January 1, 1 (Day 1) in the proleptic Gregorian calendar.\n fn num_days_from_ce(&self) -> i32 {\n \/\/ we know this wouldn't overflow since year is limited to 1\/2^13 of i32's full range.\n let mut year = self.year() - 1;\n let mut ndays = 0;\n if year < 0 {\n let excess = 1 + (-year) \/ 400;\n year += excess * 400;\n ndays -= excess * 146097;\n }\n let div_100 = year \/ 100;\n ndays += ((year * 1461) >> 2) - div_100 + (div_100 >> 2);\n ndays + self.ordinal() as i32\n }\n}\n\n\/\/\/ The common set of methods for time component.\npub trait Timelike {\n \/\/\/ Returns the hour number from 0 to 23.\n fn hour(&self) -> u32;\n\n \/\/\/ Returns the hour number from 1 to 12 with a boolean flag,\n \/\/\/ which is false for AM and true for PM.\n #[inline]\n fn hour12(&self) -> (bool, u32) {\n let hour = self.hour();\n let mut hour12 = hour % 12;\n if hour12 == 0 { hour12 = 12; }\n (hour >= 12, hour12)\n }\n\n \/\/\/ Returns the minute number from 0 to 59.\n fn minute(&self) -> u32;\n\n \/\/\/ Returns the second number from 0 to 59.\n fn second(&self) -> u32;\n\n \/\/\/ Returns the number of nanoseconds since the whole non-leap second.\n \/\/\/ The range from 1,000,000,000 to 1,999,999,999 represents the leap second.\n fn nanosecond(&self) -> u32;\n\n \/\/\/ Makes a new value with the hour number changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_hour(&self, hour: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the minute number changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_minute(&self, min: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with the second number changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_second(&self, sec: u32) -> Option<Self>;\n\n \/\/\/ Makes a new value with nanoseconds since the whole non-leap second changed.\n \/\/\/\n \/\/\/ Returns `None` when the resulting value would be invalid.\n fn with_nanosecond(&self, nano: u32) -> Option<Self>;\n\n \/\/\/ Returns the number of non-leap seconds past the last midnight.\n #[inline]\n fn num_seconds_from_midnight(&self) -> u32 {\n self.hour() * 3600 + self.minute() * 60 + self.second()\n }\n}\n\n#[test]\nfn test_readme_doomsday() {\n use std::iter::range_inclusive;\n\n for y in range_inclusive(naive::date::MIN.year(), naive::date::MAX.year()) {\n \/\/ even months\n let d4 = NaiveDate::from_ymd(y, 4, 4);\n let d6 = NaiveDate::from_ymd(y, 6, 6);\n let d8 = NaiveDate::from_ymd(y, 8, 8);\n let d10 = NaiveDate::from_ymd(y, 10, 10);\n let d12 = NaiveDate::from_ymd(y, 12, 12);\n\n \/\/ nine to five, seven-eleven\n let d59 = NaiveDate::from_ymd(y, 5, 9);\n let d95 = NaiveDate::from_ymd(y, 9, 5);\n let d711 = NaiveDate::from_ymd(y, 7, 11);\n let d117 = NaiveDate::from_ymd(y, 11, 7);\n\n \/\/ \"March 0\"\n let d30 = NaiveDate::from_ymd(y, 3, 1).pred();\n\n let weekday = d30.weekday();\n let other_dates = [d4, d6, d8, d10, d12, d59, d95, d711, d117];\n assert!(other_dates.iter().all(|d| d.weekday() == weekday));\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>adding rub buildable support<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added preliminary quadtree tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>All known rewarder messages accounted for<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Expose error_chain types.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update lib.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>(feat) Inserted Parsed into alloy<commit_after>#![crate_id = \"bodyparser\"]\n#![license = \"MIT\"]\n\nextern crate iron;\nextern crate serialize;\n\n\/\/use iron::{Iron, Ingot, Furnace, Alloy, Request, Response, ServerT};\nuse iron::{Ingot, Alloy, Request, Response};\nuse iron::ingot::{Status, Continue, Unwind};\n\nuse serialize::json;\n\/\/use serialize::json::{List, Object};\n\n#[deriving(Clone)]\nstruct Parsed(json::Json);\n\n#[deriving(Clone)]\npub struct BodyParser;\n\nimpl BodyParser {\n fn new() -> BodyParser {\n BodyParser\n }\n}\n\nimpl<Rq: Request, Rs: Response> Ingot<Rq, Rs> for BodyParser {\n fn enter(&mut self, _rq: &mut Rq, _rs: &mut Rs, alloy: &mut Alloy) -> Status {\n alloy.insert::<Parsed>(Parsed(parse_body(_rq.body())));\n Continue\n }\n}\n\nfn parse_body(x:&str) -> json::Json {\n let json_object = json::from_str(x.as_slice());\n json_object.clone().unwrap()\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>What....?<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Better lifetimes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added many intersection tests<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add code to authenticate<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fixed connection panic. Rewarder connection is being established reliably now<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Minor phrasing updates to API level comments<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix typo in comment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>docs update<commit_after><|endoftext|>"} {"text":"<commit_before>#![cfg(any(target_arch=\"x86\", target_arch=\"x86_64\"))]\n\n#![no_std]\n#![crate_name=\"cpu\"]\n#![crate_type=\"rlib\"]\n#![feature(no_std)]\n#![feature(asm)]\n#![feature(core)]\n#![feature(hash)]\n\n#[macro_use]\nextern crate core;\n#[macro_use]\nextern crate bitflags;\n\npub use cpu::*;\n\n#[cfg(target_arch=\"x86\")]\n#[path = \"x86.rs\"]\nmod cpu;\n#[cfg(target_arch=\"x86_64\")]\n#[path = \"x86_64.rs\"]\nmod cpu;\n\npub mod std { pub use core::*; }\n<commit_msg>Remove the now-unused hash feature gate<commit_after>#![cfg(any(target_arch=\"x86\", target_arch=\"x86_64\"))]\n\n#![no_std]\n#![crate_name=\"cpu\"]\n#![crate_type=\"rlib\"]\n#![feature(no_std)]\n#![feature(asm)]\n#![feature(core)]\n\n#[macro_use]\nextern crate core;\n#[macro_use]\nextern crate bitflags;\n\npub use cpu::*;\n\n#[cfg(target_arch=\"x86\")]\n#[path = \"x86.rs\"]\nmod cpu;\n#[cfg(target_arch=\"x86_64\")]\n#[path = \"x86_64.rs\"]\nmod cpu;\n\npub mod std { pub use core::*; }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>adds docs about query string decoding<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate crypto;\nextern crate rustc_serialize;\n\npub mod error;\npub mod header;\npub mod claims;\n<commit_msg>Add signing<commit_after>extern crate crypto;\nextern crate rustc_serialize;\n\nuse crypto::digest::Digest;\nuse crypto::hmac::Hmac;\nuse crypto::mac::Mac;\nuse rustc_serialize::base64::{\n self,\n CharacterSet,\n Newline,\n ToBase64,\n};\n\npub mod error;\npub mod header;\npub mod claims;\n\nconst BASE_CONFIG: base64::Config = base64::Config {\n char_set: CharacterSet::Standard,\n newline: Newline::LF,\n pad: false,\n line_length: None,\n};\n\nfn sign<D: Digest>(data: &str, key: &str, digest: D) -> String {\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n let mac = hmac.result();\n let code = mac.code();\n (*code).to_base64(BASE_CONFIG)\n}\n\n#[cfg(test)]\nmod tests {\n use sign;\n use crypto::sha2::Sha256;\n\n #[test]\n pub fn sign_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let real_sig = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, \"secret\", Sha256::new());\n\n assert_eq!(sig, real_sig);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>more exports<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make code compilable with rustc revision 7d7e409<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove __free_impl<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make window resizable<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add convenience method to acces various pieces of an URL.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Separate FFI bindings to new mod ffi<commit_after>\/\/ The MIT License (MIT)\n\/\/\n\/\/ Copyright (c) 2015 Johan Johansson\n\/\/\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a copy\n\/\/ of this software and associated documentation files (the \"Software\"), to deal\n\/\/ in the Software without restriction, including without limitation the rights\n\/\/ to use, copy, modify, merge, publish, distribute, sublicense, and\/or sell\n\/\/ copies of the Software, and to permit persons to whom the Software is\n\/\/ furnished to do so, subject to the following conditions:\n\/\/\n\/\/ The above copyright notice and this permission notice shall be included in\n\/\/ all copies or substantial portions of the Software.\n\/\/\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\/\/ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\/\/ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\/\/ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\/\/ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\/\/ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n\/\/ THE SOFTWARE.\n\n\/\/! FFI bindings for functions related to SHM, shared memory\n\nuse x11::xlib::{ Display, Bool, Visual, XImage, Drawable };\nuse libc::{ c_int, c_char, c_uint, c_ushort, size_t, c_void, c_ulong, time_t, pid_t, uid_t, gid_t, mode_t };\n\npub type ShmSeg = c_ulong;\n\n#[repr(C)]\npub struct XShmSegmentInfo {\n\tpub shmseg: ShmSeg,\n\tpub shmid: c_int,\n\tpub shmaddr: *mut c_char,\n\tpub read_only: Bool,\n}\n\n#[link(name = \"Xext\")]\nextern \"system\" {\n\tpub fn XShmQueryExtension(display: *mut Display) -> Bool;\n\tpub fn XShmQueryVersion(display: *mut Display,\n\t\tmajor_ver: *mut c_int, minor_ver: *mut c_int,\n\t\tshared_pixmaps: *mut Bool) -> Bool;\n\tpub fn XShmCreateImage(display: *mut Display, visual: *mut Visual,\n\t\tdepth: c_uint,\n\t\tformat: c_int, data: *mut c_char,\n\t\tshminfo: *mut XShmSegmentInfo,\n\t\twidth: c_uint, height: c_uint) -> *mut XImage;\n\tpub fn XShmAttach(display: *mut Display, shminfo: *mut XShmSegmentInfo) -> Bool;\n\tpub fn XShmGetImage(display: *mut Display, drawable: Drawable, image: *mut XImage,\n\t\tx: c_int, y: c_int,\n\t\tplane_mask: c_ulong) -> Bool;\n\tpub fn XShmDetach(display: *mut Display, shminfo: *mut XShmSegmentInfo) -> Bool;\n\tfn XShmPixmapFormat(display: *mut Display) -> c_int;\n}\n\npub type key_t = c_int;\npub type shmatt_t = c_ulong;\n\n#[repr(C)]\npub struct ipc_perm {\n\tpub key: key_t,\n\tpub uid: uid_t,\n\tpub gid: gid_t,\n\tpub cuid: uid_t,\n\tpub cgid: gid_t,\n\tpub mode: mode_t,\n\tpub seq: c_ushort,\n}\n\n#[repr(C)]\npub struct shmid_ds {\n\tpub shm_perm: ipc_perm,\n\tpub shm_segsz: size_t,\n\tpub shm_atime: time_t,\n\tpub shm_dtime: time_t,\n\tpub shm_ctime: time_t,\n\tpub shm_cpid: pid_t,\n\tpub shm_lpid: pid_t,\n\tpub shm_nattch: shmatt_t,\n\tshm_unused2: *mut c_void,\n\tshm_unused3: *mut c_void,\n}\n\npub const IPC_PRIVATE: key_t = 0;\npub const IPC_CREAT: c_int = 0o1000;\npub const IPC_RMID: c_int = 0;\n\nextern \"system\" {\n\tpub fn shmget(key: key_t, size: size_t, shm_flag: c_int) -> c_int;\n\tpub fn shmat(shmid: c_int, shmaddr: *const c_void, shm_flag: c_int) -> *mut c_void;\n\tpub fn shmdt(shmaddr: *const c_void) -> c_int;\n\tpub fn shmctl(shmid: c_int, cmd: c_int, buf: *mut shmid_ds) -> c_int;\n}\n\npub const XYBitmap: c_int = 0;\npub const XYPixmap: c_int = 1;\npub const ZPixmap: c_int = 2;\n\npub const AllPlanes: c_ulong = 0;<|endoftext|>"} {"text":"<commit_before>extern crate sdl2;\nextern crate sdl2_sys;\nextern crate libc;\n\npub mod start;\npub mod vid;\n\n\/\/use vid::*;\n\n\/*fn main() {\n let mut rend_contx = start::bootstrap(1280, 720, \"Hello world!\");\n let (mut renderer, mut pump) = rend_contx;\n\n unsafe {sdl2_sys::mouse::SDL_SetRelativeMouseMode(1);}\n\n let screen_w = 1280\/2;\n let screen_h = 720\/2;\n\n let mut camera_x = 0.0;\n let mut camera_y = 0.0;\n let mut camera_z = 3.0; \n\n let mut camera_x_z = 0.0;\n let mut camera_y_z = 0.0;\n let mut camera_x_y = 0.0;\n\n let mut cubes = vec![Cube::gen_new(0.0, 0.0, 4.0, 0.5, 0.5, 0.5),\n Cube::gen_new(0.0, 0.0, 1.0, 2.5, 0.5, 0.5),\n Cube::gen_new(0.0, 0.0, 1.0, 0.5, 0.5, 0.5),\n \/*Cube::gen_new(1.0, 1.0, 1.0, 0.5, 0.5, 0.5),*\/];\n\n let mut lines = Lines::new(vec![\n [DepthPoint::new(-0.5, -0.5, 0.6), DepthPoint::new(-0.5, 0.5, 0.6)],\n [DepthPoint::new(-0.5, -0.5, 0.6), DepthPoint::new(0.0, -0.5, 0.6)],\n [DepthPoint::new(-0.5, 0.0, 0.6), DepthPoint::new(0.0, 0.0, 0.6)],\n [DepthPoint::new(0.0, 0.0, 0.6), DepthPoint::new(0.0, -0.5, 0.6)],\n ]);\n\n let mut triangle = Triangle::new(DepthPoint::new(0.0, -1.0, 1.0), DepthPoint::new(-1.0, 1.0, 1.0), DepthPoint::new(1.0, 1.0, 1.0));\n\n let horizon = DepthPoint::new(0.0, 0.5, 1.0);\n\n 'game_loop: loop {\n use sdl2::pixels::Color::RGB;\n\n std::thread::sleep(std::time::Duration::from_millis(33));\n \n for event in pump.poll_iter() {\n use sdl2::event::Event::*;\n use sdl2::keyboard::Keycode::*;\n\n match event {\n Quit {..} => {break 'game_loop;},\n KeyDown {keycode, ..} => {\n match keycode {\n Some(Up) => {\n let z = camera_z;\n camera_z = z - 0.05;\n },\n Some(Down) => {\n let z = camera_z;\n camera_z = z + 0.05;\n },\n Some(Left) => {\n let x = camera_x;\n camera_x = x + 0.05;\n },\n Some(Right) => {\n let x = camera_x;\n camera_x = x - 0.05;\n },\n Some(RCtrl) => {\n let y = camera_y;\n camera_y = y + 0.05;\n },\n Some(RShift) => {\n let y = camera_y;\n camera_y = y - 0.05;\n },\n Some(Q) => {\n camera_x_y += 0.1;\n },\n Some(E) => {\n camera_x_y -= 0.1;\n },\n Some(Escape) => {\n break 'game_loop;\n }\n _ => {println!(\"{:?}\", keycode);}\n }\n },\n MouseMotion {xrel, yrel, ..} => {\n camera_x_z = (xrel as f64)\/30.0;\n \/\/camera_y_z = (yrel as f64)\/30.0;\n }\n _ => {}\n }\n }\n\n renderer.set_draw_color(RGB(20, 40, 60));\n renderer.clear();\n renderer.set_draw_color(RGB(200, 200, 200));\n\n \/\/ UNCOMMENT TO TAKE A LOOK AT LINES, IF I REMEMBER CORRECTLY IT WILL MAKE A SHAPE OF LETTER 'P'\n \/\/lines.flat(screen_w, screen_h, &mut renderer,\n \/\/ camera_x, camera_y, camera_z,\n \/\/ camera_x_y, camera_x_z, camera_y_z);\n\n\n \/* \/\/ UNCOMMENT TO TAKE A LOOK AT DRAWING CUBES\n \/\/ should show four cubes, you can define more yourself in cubes vector.\n \/\/ KNOWN BUG: IF A POINT IS OFF SCREEN THEN THE WHOLE LINE ISNT DRAWN AND IT STARTS GOING TO WRONG POINT\n for cube in &mut cubes {\n use std::f64;\n\n cube.flat(screen_w, screen_h, &mut renderer, \n camera_x, camera_y, camera_z, \n camera_x_y, camera_x_z, camera_y_z);\n \n }\/\/ end for cube in cubes *\/\n \n triangle.flat(screen_w, screen_h, &mut renderer,\n camera_x, camera_y, camera_z,\n camera_x_y, camera_x_z, camera_y_z);\n triangle.fill_bottom_flat(screen_w, screen_h);\n\n \/\/ Reset relative mouse move back to 0 as everything was already moved\n camera_x_z = 0.0;\n camera_y_z = 0.0; \n camera_x_y = 0.0;\n \n\n renderer.present(); \n }\n}*\/\n\n<commit_msg>Removed old comments.<commit_after>extern crate sdl2;\nextern crate sdl2_sys;\nextern crate libc;\n\npub mod start;\npub mod vid;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add lib.rs<commit_after>extern crate crypto;\nextern crate rustc_serialize as serialize;\nextern crate rand;\n\nuse std::iter::repeat;\nuse std::fs;\nuse crypto::salsa20::Salsa20;\nuse crypto::symmetriccipher::SynchronousStreamCipher;\nuse serialize::base64;\nuse serialize::base64::{FromBase64, ToBase64};\nuse rand::{OsRng, Rng};\n\npub use model::*;\n\n\nconst DEFAULT_FORMAT: &'static str = \"2\";\nconst DEFAULT_LENGTH: &'static str = \"32\";\nconst SALT_LENGTH: usize = 24;\nconst KEY_LENGTH: usize = 32;\nconst GENERATED_INPUT_LENGTH: usize = 1024;\n\nfn pack(allowed_chars: &str, hash: &[u8]) -> String {\n let source_len = allowed_chars.len();\n let mut output = String::new();\n for &byte in hash {\n let n = (byte % source_len as u8) as usize;\n output.push(allowed_chars.chars().nth(n).unwrap());\n }\n output\n}\n\n\nfn pack_into_password(hash: &[u8], format_choice: u8) -> String {\n match format_choice {\n 1 => {\n pack(\"!\\\"#$%&'()*+,-.\/0123456789:;\\\n <=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\",\n hash)\n }\n 2 => {\n pack(\"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789\",\n hash)\n }\n 3 => pack(\"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\", hash),\n 4 => pack(\"0123456789\", hash),\n 5 => pack(\"01\", hash), \/\/ stupider than the above, but not by much\n _ => panic!(\"Invalid format choice {}\", format_choice),\n }\n}\n\npub fn cut_password(pass: Vec<u8>, format: u8, length: u16) -> String {\n let packed_pass = pack_into_password(&*pass, format);\n packed_pass.chars().take(length as usize).collect()\n}\nfn expand_to_at_least(wanted_length: usize, base: Vec<u8>) -> Vec<u8> {\n let mut buf = vec!();\n while buf.len() < wanted_length {\n buf.extend(&base);\n }\n buf\n}\n\n\/\/ Generates a new password bytestream.\n\/\/\n\/\/ Algorithm:\n\/\/\n\/\/ 1. Generate a random SALT_LENGTH byte salt\n\/\/ 2. Repeat title until it reaches at least i bytes\n\/\/ 3. Generate a cipher text using xsalsa20, with above string as input,\n\/\/ using given key and generated salt\n\/\/\n\/\/\nfn generate_password(key: &[u8], salt: Vec<u8>, i: usize) -> Vec<u8> {\n let mut cipher = Salsa20::new_xsalsa20(&key, &salt);\n let clear_text = expand_to_at_least(i, salt);\n\n let mut buf: Vec<u8> = repeat(0).take(i).collect();\n cipher.process(&clear_text[0..i], &mut buf);\n buf\n}\n\nfn generate_salt() -> Vec<u8> {\n let mut rng = OsRng::new().expect(\"OsRng init failed\");\n rng.gen_iter::<u8>().take(SALT_LENGTH).collect()\n}\n\nfn load_or_create_key(filename: &str) -> Vec<u8> {\n match Passwords::load_file(filename) {\n Ok(s) => s.from_base64().expect(\"Key base64 decoding failed\"),\n Err(_) => {\n println!(\"Creating a new key in {}\", filename);\n let mut rng = OsRng::new().expect(\"OsRng init failed\");\n let new_key: Vec<u8> = rng.gen_iter::<u8>().take(KEY_LENGTH).collect();\n let key_base64 = new_key.to_base64(base64::STANDARD);\n Passwords::save_data(&key_base64, filename);\n Passwords::set_file_perms(filename, 0o400);\n new_key\n }\n }\n}\n\nfn create_data_dir(data_dir: &str) {\n fs::create_dir_all(data_dir.to_string())\n .expect(&format!(\"Creating data directory {} failed\", data_dir));\n Passwords::set_file_perms(&data_dir, 0o700);\n}\n\n\n#[test]\nfn test_cut_password() {\n let pass = vec!(1, 1, 88, 240, 120, 150, 13, 21, 34, 55);\n\n assert_eq!(\"11000011\", cut_password(pass.clone(), 5, 8));\n assert_eq!(\"110000\", cut_password(pass.clone(), 5, 6));\n\n assert_eq!(\"11800031\", cut_password(pass.clone(), 4, 8));\n assert_eq!(\"118000\", cut_password(pass.clone(), 4, 6));\n\n assert_eq!(\"bbKGqUnv\", cut_password(pass.clone(), 3, 8));\n assert_eq!(\"bbKGqU\", cut_password(pass.clone(), 3, 6));\n\n assert_eq!(\"bbA26Anv\", cut_password(pass.clone(), 2, 8));\n assert_eq!(\"bbA26A\", cut_password(pass.clone(), 2, 6));\n\n assert_eq!(\"\\\"\\\"yU;Y.6\", cut_password(pass.clone(), 1, 8));\n assert_eq!(\"\\\"\\\"yU;Y\", cut_password(pass.clone(), 1, 6));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove feature core<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests and fix Fast8 for fields with width<8<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test + doc<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Alloc DbPage struct from heap<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add support for Default Opts<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix(pkgid): drop cargo:\/\/ support<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #33264<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ only-x86_64\n\n#![allow(dead_code, non_upper_case_globals)]\n#![feature(asm)]\n\n#[repr(C)]\npub struct D32x4(f32,f32,f32,f32);\n\nimpl D32x4 {\n fn add(&self, vec: Self) -> Self {\n unsafe {\n let ret: Self;\n asm!(\"\n movaps $1, %xmm1\n movaps $2, %xmm2\n addps %xmm1, %xmm2\n movaps $xmm1, $0\n \"\n : \"=r\"(ret)\n : \"1\"(self), \"2\"(vec)\n : \"xmm1\", \"xmm2\"\n );\n ret\n }\n }\n}\n\nfn main() { }\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add more debug output<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Shim which is passed to Cargo as \"rustdoc\" when running the bootstrap.\n\/\/!\n\/\/! See comments in `src\/bootstrap\/rustc.rs` for more information.\n\n#![deny(warnings)]\n\nextern crate bootstrap;\n\nuse std::env;\nuse std::process::Command;\nuse std::path::PathBuf;\n\nfn main() {\n let args = env::args_os().skip(1).collect::<Vec<_>>();\n let rustdoc = env::var_os(\"RUSTDOC_REAL\").expect(\"RUSTDOC_REAL was not set\");\n let libdir = env::var_os(\"RUSTC_LIBDIR\").expect(\"RUSTC_LIBDIR was not set\");\n let stage = env::var(\"RUSTC_STAGE\").expect(\"RUSTC_STAGE was not set\");\n let sysroot = env::var_os(\"RUSTC_SYSROOT\").expect(\"RUSTC_SYSROOT was not set\");\n\n let mut dylib_path = bootstrap::util::dylib_path();\n dylib_path.insert(0, PathBuf::from(libdir));\n\n let mut cmd = Command::new(rustdoc);\n cmd.args(&args)\n .arg(\"--cfg\")\n .arg(format!(\"stage{}\", stage))\n .arg(\"--cfg\")\n .arg(\"dox\")\n .arg(\"--sysroot\")\n .arg(sysroot)\n .env(bootstrap::util::dylib_path_var(),\n env::join_paths(&dylib_path).unwrap());\n std::process::exit(match cmd.status() {\n Ok(s) => s.code().unwrap_or(1),\n Err(e) => panic!(\"\\n\\nfailed to run {:?}: {}\\n\\n\", cmd, e),\n })\n}\n<commit_msg>rustbuild: Fix compiler docs again<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Shim which is passed to Cargo as \"rustdoc\" when running the bootstrap.\n\/\/!\n\/\/! See comments in `src\/bootstrap\/rustc.rs` for more information.\n\n#![deny(warnings)]\n\nextern crate bootstrap;\n\nuse std::env;\nuse std::process::Command;\nuse std::path::PathBuf;\n\nfn main() {\n let args = env::args_os().skip(1).collect::<Vec<_>>();\n let rustdoc = env::var_os(\"RUSTDOC_REAL\").expect(\"RUSTDOC_REAL was not set\");\n let libdir = env::var_os(\"RUSTC_LIBDIR\").expect(\"RUSTC_LIBDIR was not set\");\n let stage = env::var(\"RUSTC_STAGE\").expect(\"RUSTC_STAGE was not set\");\n let sysroot = env::var_os(\"RUSTC_SYSROOT\").expect(\"RUSTC_SYSROOT was not set\");\n\n let mut dylib_path = bootstrap::util::dylib_path();\n dylib_path.insert(0, PathBuf::from(libdir));\n\n let mut cmd = Command::new(rustdoc);\n cmd.args(&args)\n .arg(\"--cfg\")\n .arg(format!(\"stage{}\", stage))\n .arg(\"--cfg\")\n .arg(\"dox\")\n .arg(\"--sysroot\")\n .arg(sysroot)\n .env(bootstrap::util::dylib_path_var(),\n env::join_paths(&dylib_path).unwrap());\n\n \/\/ Pass the `rustbuild` feature flag to crates which rustbuild is\n \/\/ building. See the comment in bootstrap\/lib.rs where this env var is\n \/\/ set for more details.\n if env::var_os(\"RUSTBUILD_UNSTABLE\").is_some() {\n cmd.arg(\"--cfg\").arg(\"rustbuild\");\n }\n\n std::process::exit(match cmd.status() {\n Ok(s) => s.code().unwrap_or(1),\n Err(e) => panic!(\"\\n\\nfailed to run {:?}: {}\\n\\n\", cmd, e),\n })\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Shim which is passed to Cargo as \"rustdoc\" when running the bootstrap.\n\/\/!\n\/\/! See comments in `src\/bootstrap\/rustc.rs` for more information.\n\n#![deny(warnings)]\n\nextern crate bootstrap;\n\nuse std::env;\nuse std::process::Command;\nuse std::path::PathBuf;\n\nfn main() {\n let args = env::args_os().skip(1).collect::<Vec<_>>();\n let rustdoc = env::var_os(\"RUSTDOC_REAL\").expect(\"RUSTDOC_REAL was not set\");\n let libdir = env::var_os(\"RUSTC_LIBDIR\").expect(\"RUSTC_LIBDIR was not set\");\n let stage = env::var(\"RUSTC_STAGE\").expect(\"RUSTC_STAGE was not set\");\n let sysroot = env::var_os(\"RUSTC_SYSROOT\").expect(\"RUSTC_SYSROOT was not set\");\n\n let mut dylib_path = bootstrap::util::dylib_path();\n dylib_path.insert(0, PathBuf::from(libdir));\n\n let mut cmd = Command::new(rustdoc);\n cmd.args(&args)\n .arg(\"--cfg\")\n .arg(format!(\"stage{}\", stage))\n .arg(\"--cfg\")\n .arg(\"dox\")\n .arg(\"--sysroot\")\n .arg(sysroot)\n .env(bootstrap::util::dylib_path_var(),\n env::join_paths(&dylib_path).unwrap());\n\n \/\/ Force all crates compiled by this compiler to (a) be unstable and (b)\n \/\/ allow the `rustc_private` feature to link to other unstable crates\n \/\/ also in the sysroot.\n if env::var_os(\"RUSTC_FORCE_UNSTABLE\").is_some() {\n cmd.arg(\"-Z\").arg(\"force-unstable-if-unmarked\");\n }\n if let Some(linker) = env::var_os(\"RUSTC_TARGET_LINKER\") {\n cmd.arg(\"--linker\").arg(linker).arg(\"-Z\").arg(\"unstable-options\");\n }\n\n \/\/ Bootstrap's Cargo-command builder sets this variable to the current Rust version; let's pick\n \/\/ it up so we can make rustdoc print this into the docs\n if let Some(version) = env::var_os(\"RUSTDOC_CRATE_VERSION\") {\n \/\/ This \"unstable-options\" can be removed when `--crate-version` is stabilized\n cmd.arg(\"-Z\").arg(\"unstable-options\")\n .arg(\"--crate-version\").arg(version);\n }\n\n std::process::exit(match cmd.status() {\n Ok(s) => s.code().unwrap_or(1),\n Err(e) => panic!(\"\\n\\nfailed to run {:?}: {}\\n\\n\", cmd, e),\n })\n}\n<commit_msg>bootstrap: pass --deny-render-differences to rustdoc<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Shim which is passed to Cargo as \"rustdoc\" when running the bootstrap.\n\/\/!\n\/\/! See comments in `src\/bootstrap\/rustc.rs` for more information.\n\n#![deny(warnings)]\n\nextern crate bootstrap;\n\nuse std::env;\nuse std::process::Command;\nuse std::path::PathBuf;\n\nfn main() {\n let args = env::args_os().skip(1).collect::<Vec<_>>();\n let rustdoc = env::var_os(\"RUSTDOC_REAL\").expect(\"RUSTDOC_REAL was not set\");\n let libdir = env::var_os(\"RUSTC_LIBDIR\").expect(\"RUSTC_LIBDIR was not set\");\n let stage = env::var(\"RUSTC_STAGE\").expect(\"RUSTC_STAGE was not set\");\n let sysroot = env::var_os(\"RUSTC_SYSROOT\").expect(\"RUSTC_SYSROOT was not set\");\n\n let mut dylib_path = bootstrap::util::dylib_path();\n dylib_path.insert(0, PathBuf::from(libdir));\n\n let mut cmd = Command::new(rustdoc);\n cmd.args(&args)\n .arg(\"--cfg\")\n .arg(format!(\"stage{}\", stage))\n .arg(\"--cfg\")\n .arg(\"dox\")\n .arg(\"--sysroot\")\n .arg(sysroot)\n .env(bootstrap::util::dylib_path_var(),\n env::join_paths(&dylib_path).unwrap());\n\n \/\/ Force all crates compiled by this compiler to (a) be unstable and (b)\n \/\/ allow the `rustc_private` feature to link to other unstable crates\n \/\/ also in the sysroot.\n if env::var_os(\"RUSTC_FORCE_UNSTABLE\").is_some() {\n cmd.arg(\"-Z\").arg(\"force-unstable-if-unmarked\");\n }\n if let Some(linker) = env::var_os(\"RUSTC_TARGET_LINKER\") {\n cmd.arg(\"--linker\").arg(linker).arg(\"-Z\").arg(\"unstable-options\");\n }\n\n \/\/ Bootstrap's Cargo-command builder sets this variable to the current Rust version; let's pick\n \/\/ it up so we can make rustdoc print this into the docs\n if let Some(version) = env::var_os(\"RUSTDOC_CRATE_VERSION\") {\n \/\/ This \"unstable-options\" can be removed when `--crate-version` is stabilized\n cmd.arg(\"-Z\").arg(\"unstable-options\")\n .arg(\"--crate-version\").arg(version);\n\n \/\/ While we can assume that `-Z unstable-options` is set, let's also force rustdoc to panic\n \/\/ if pulldown rendering differences are found\n cmd.arg(\"--deny-render-differences\");\n }\n\n std::process::exit(match cmd.status() {\n Ok(s) => s.code().unwrap_or(1),\n Err(e) => panic!(\"\\n\\nfailed to run {:?}: {}\\n\\n\", cmd, e),\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Sorry, gods of git<commit_after>use super::*;\nuse redox::*;\nuse core::marker::Sized;\n\npub struct InstructionIterator<'a, I: 'a> {\n pub editor: &'a mut Editor,\n pub iter: &'a mut I,\n}\n\nimpl<'a, I: Iterator<Item = EventOption>> Iterator for InstructionIterator<'a, I> {\n type Item = Inst;\n\n fn next(&mut self) -> Option<Inst> {\n let mut n = 0;\n\n let mut last = '\\0';\n while let Some(EventOption::Key(k)) = self.iter.next() {\n if k.pressed {\n let c = k.character;\n match self.editor.cursor().mode {\n Mode::Primitive(_) => {\n Inst(0, c);\n },\n Mode::Command(_) => {\n n = match c {\n '0' if n != 0 => n * 10,\n '1' => n * 10 + 1,\n '2' => n * 10 + 2,\n '3' => n * 10 + 3,\n '4' => n * 10 + 4,\n '5' => n * 10 + 5,\n '6' => n * 10 + 6,\n '7' => n * 10 + 7,\n '8' => n * 10 + 8,\n '9' => n * 10 + 9,\n _ => {\n last = c;\n break;\n }\n }\n }\n }\n }\n }\n Some(Inst(if n == 0 { 1 } else { n }, last))\n }\n}\n\npub trait ToInstructionIterator\n where Self: Sized {\n fn inst_iter<'a>(&'a mut self, editor: &'a mut Editor) -> InstructionIterator<'a, Self>;\n}\n\nimpl<I> ToInstructionIterator for I\n where I: Iterator<Item = EventOption> + Sized {\n fn inst_iter<'a>(&'a mut self, editor: &'a mut Editor) -> InstructionIterator<'a, Self> {\n InstructionIterator {\n editor: editor,\n iter: self,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>languages\/rust\/learn-rust\/3-chapter\/16-code.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Exercise 6.3<commit_after>\/\/\/ Exercise 6.3: Write a program that calls uname and prints all the fields\n\/\/\/ in the utsname structure.\n\/\/\/ Compare the output to the output from the uname(1) command.\n\/\/\/\n\/\/\/ Takeaway: couldn't find a method in CStr or CString\n\/\/\/ this came close:\n\/\/\/ https:\/\/gist.github.com\/philippkeller\/89a8a0b47362e86570958dc7a14e84d7\n\/\/\/ but produced Err(FromBytesWithNulError { _a: () })\n\nextern crate libc;\nextern crate itertools;\n\nuse libc::{utsname, uname};\nuse itertools::Itertools;\n\nfn array_to_string(slice: &[i8]) -> String {\n slice.iter().take_while(|&x| *x != 0).map(|&a| a as u8 as char).join(\"\")\n}\n\n#[derive(Debug)]\nstruct UtsName {\n sysname: String,\n nodename: String,\n release: String,\n version: String,\n machine: String,\n}\n\nfn my_uname() -> Option<UtsName> {\n let mut uc: utsname = unsafe { std::mem::uninitialized() };\n if unsafe { uname(&mut uc) } == 0 {\n return Some(UtsName {\n sysname: array_to_string(&uc.sysname),\n nodename: array_to_string(&uc.nodename),\n release: array_to_string(&uc.release),\n version: array_to_string(&uc.version),\n machine: array_to_string(&uc.machine),\n });\n }\n None\n}\n\n\nfn main() {\n println!(\"{:?}\", my_uname().unwrap());\n}\n\n\/\/ Result:\n\/\/\n\/\/ > uname -a\n\/\/ Darwin philippkellr-6.local 15.6.0 Darwin Kernel Version 15.6.0:\n\/\/ Mon Aug 29 20:21:34 PDT 2016;\n\/\/ root:xnu-3248.60.11~1\/RELEASE_X86_64 x86_64\n\/\/\n\/\/ > target\/debug\/e06-03-uname\n\/\/ UtsName { sysname: \"Darwin\",\n\/\/ nodename: \"philippkellr-6.local\",\n\/\/ release: \"15.6.0\",\n\/\/ version: \"Darwin Kernel Version 15.6.0: Mon Aug 29 20:21:34 PDT 2016;\n\/\/ root:xnu-3248.60.11~1\/RELEASE_X86_64\",\n\/\/ machine: \"x86_64\" }\n\/\/\n\/\/ -> it has the same infos, uname -a concatenates the fields with a space inbetween\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make to_string public again<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Define encode sets based on another set.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added Copy<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a search-and-replace example.<commit_after>\/\/! Example of basic search-and-replace functionality implemented on top\n\/\/! of a Ropey rope.\n\/\/!\n\/\/! Usage:\n\/\/! search_and_replace <search_pattern> <replacement_text> <input_filepath>\n\/\/!\n\/\/! The file contents with the search-and-replace performed on it is sent to\n\/\/! stdout.\n\nextern crate ropey;\n\nuse std::fs::File;\nuse std::io;\n\nuse ropey::{iter::Chars, Rope, RopeSlice};\n\nfn main() {\n \/\/ Get arguments from commandline\n let (search_pattern, replacement_text, filepath) = if std::env::args().count() > 3 {\n (\n std::env::args().nth(1).unwrap(),\n std::env::args().nth(2).unwrap(),\n std::env::args().nth(3).unwrap(),\n )\n } else {\n println!(\n \"Usage:\\n search_and_replace <search_pattern> <replacement_text> <input_filepath>\"\n );\n return;\n };\n\n \/\/ Load file contents into a rope.\n let mut text = Rope::from_reader(io::BufReader::new(File::open(&filepath).unwrap())).expect(\"Cannot read file: either it doesn't exist, file permissions don't allow reading, or is not utf8 text.\");\n\n \/\/ Do the search-and-replace.\n search_and_replace(&mut text, &search_pattern, &replacement_text);\n\n \/\/ Print the new text to stdout.\n println!(\"{}\", text);\n}\n\n\/\/\/ Searches the rope for `search_pattern` and replaces all matches with\n\/\/\/ `replacement_text`.\n\/\/\/\n\/\/\/ There are several ways this could be done: \n\/\/\/\n\/\/\/ 1. Clone the rope and then do the search on the original while replacing\n\/\/\/ on the clone. This isn't as awful as it sounds because the clone\n\/\/\/ operation is constant-time and the two ropes will share most of their\n\/\/\/ storage in typical cases. However, this probably isn't the best\n\/\/\/ general solution because it will use a lot of additional space if a\n\/\/\/ large percentage of the text is bring replaced.\n\/\/\/\n\/\/\/ 2. A two-stage approach: first find and collect all the matches, then\n\/\/\/ do the replacements on the original rope. This is a good solution\n\/\/\/ when a relatively small number of matches are expected. However, if\n\/\/\/ there are a large number of matches then the space to store the\n\/\/\/ matches themselves can become large.\n\/\/\/\n\/\/\/ 3. A piece-meal approach: search for the first match, replace it, then\n\/\/\/ restart the match from the end of the replacement, repeat. This is\n\/\/\/ a good solution for memory-constrained situations. However,\n\/\/\/ computationally it is likely the most expensive when there are a large\n\/\/\/ number of matches and there are costs associated with repeatedly\n\/\/\/ restarting the search.\n\/\/\/\n\/\/\/ 4. Combine approaches #2 and #3: collect a fixed number of matches and\n\/\/\/ replace them, then collect another batch matches and replace them, and\n\/\/\/ so on. This is probably the best general solution, because it\n\/\/\/ combines the best of both worlds: it allows you to collect the matches\n\/\/\/ in a bounded amount of space, and any costs associated with restarting\n\/\/\/ the search are amortized across multiple matches.\n\/\/\/\n\/\/\/ In this implementation we take approach #4 because it seems the\n\/\/\/ all-around best.\nfn search_and_replace(rope: &mut Rope, search_pattern: &str, replacement_text: &str) {\n const BATCH_SIZE: usize = 256;\n\n let replacement_text_len = replacement_text.chars().count();\n\n let mut head = 0; \/\/ Char index of the search\/replace head.\n let mut matches = Vec::with_capacity(BATCH_SIZE);\n loop {\n \/\/ Collect the next batch of matches. Note that we don't use\n \/\/ `Iterator::collect()` to collect the batch because we want to\n \/\/ re-use the same Vec to avoid unnecessary allocations.\n matches.clear();\n for m in SearchIter::from_rope_slice(&rope.slice(head..), &search_pattern).take(BATCH_SIZE)\n {\n matches.push(m);\n }\n\n \/\/ If there are no matches, we're done!\n if matches.len() == 0 {\n break;\n }\n\n \/\/ Replace the collected matches.\n let mut index_diff: isize = 0;\n for &(start, end) in matches.iter() {\n \/\/ Get the properly offset indices.\n let start_d = (head as isize + start as isize + index_diff) as usize;\n let end_d = (head as isize + end as isize + index_diff) as usize;\n\n \/\/ Do the replacement.\n rope.remove(start_d..end_d);\n rope.insert(start_d, &replacement_text);\n\n \/\/ Update the index offset.\n let match_len = (end - start) as isize;\n index_diff = index_diff - match_len + replacement_text_len as isize;\n }\n\n \/\/ Update head for next iteration.\n head = (head as isize + index_diff + matches.last().unwrap().1 as isize) as usize;\n }\n}\n\n\/\/\/ An iterator over simple textual matches in a RopeSlice.\n\/\/\/\n\/\/\/ This implementation is somewhat naive, and could be sped up by using a\n\/\/\/ more sophisticated text searching algorithm such as Boyer-Moore or\n\/\/\/ Knuth-Morris-Pratt.\n\/\/\/\n\/\/\/ The important thing, however, is the interface. For example, a regex\n\/\/\/ implementation providing an equivalent interface could easily be dropped\n\/\/\/ in, and the search-and-replace function above would then work with it\n\/\/\/ quite happily.\nstruct SearchIter<'a> {\n char_iter: Chars<'a>,\n search_pattern: &'a str,\n search_pattern_char_len: usize,\n cur_index: usize, \/\/ The current char index of the search head.\n possible_matches: Vec<std::str::Chars<'a>>, \/\/ Tracks where we are in the search pattern for the current possible matches.\n}\n\nimpl<'a> SearchIter<'a> {\n fn from_rope_slice<'b>(slice: &'b RopeSlice, search_pattern: &'b str) -> SearchIter<'b> {\n assert!(\n search_pattern.len() > 0,\n \"Can't search using an empty search pattern.\"\n );\n SearchIter {\n char_iter: slice.chars(),\n search_pattern: search_pattern,\n search_pattern_char_len: search_pattern.chars().count(),\n cur_index: 0,\n possible_matches: Vec::new(),\n }\n }\n}\n\nimpl<'a> Iterator for SearchIter<'a> {\n type Item = (usize, usize);\n\n \/\/ Return the start\/end char indices of the next match.\n fn next(&mut self) -> Option<(usize, usize)> {\n while let Some(next_char) = self.char_iter.next() {\n \/\/ Push current position onto the possible matches list, to seed a\n \/\/ possible beginning of a match.\n self.possible_matches.push(self.search_pattern.chars());\n\n \/\/ Check it against the next character in each of the potential\n \/\/ matches, removing the potential matches that don't match.\n \/\/ We're using indexing instead of iteration here so that we can\n \/\/ remove the possible matches as we go.\n let mut i = 0;\n while i < self.possible_matches.len() {\n let pattern_char = self.possible_matches[i].next().unwrap();\n if next_char == pattern_char {\n if self.possible_matches[i].clone().next() == None {\n self.cur_index += 1;\n let char_match_range = (\n self.cur_index - self.search_pattern_char_len,\n self.cur_index,\n );\n self.possible_matches.clear();\n return Some(char_match_range);\n }\n i += 1;\n } else {\n self.possible_matches.swap_remove(i);\n }\n }\n\n self.cur_index += 1;\n }\n\n return None;\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse self::InternalDebugLocation::*;\n\nuse super::utils::{debug_context, span_start};\nuse super::metadata::UNKNOWN_COLUMN_NUMBER;\nuse super::FunctionDebugContext;\n\nuse llvm;\nuse llvm::debuginfo::DIScope;\nuse builder::Builder;\n\nuse libc::c_uint;\nuse std::ptr;\nuse syntax_pos::{Span, Pos};\n\n\/\/\/ Sets the current debug location at the beginning of the span.\n\/\/\/\n\/\/\/ Maps to a call to llvm::LLVMSetCurrentDebugLocation(...).\npub fn set_source_location(\n debug_context: &FunctionDebugContext, bx: &Builder, scope: DIScope, span: Span\n) {\n let function_debug_context = match *debug_context {\n FunctionDebugContext::DebugInfoDisabled => return,\n FunctionDebugContext::FunctionWithoutDebugInfo => {\n set_debug_location(bx, UnknownLocation);\n return;\n }\n FunctionDebugContext::RegularContext(ref data) => data\n };\n\n let dbg_loc = if function_debug_context.source_locations_enabled.get() {\n debug!(\"set_source_location: {}\", bx.sess().codemap().span_to_string(span));\n let loc = span_start(bx.cx, span);\n InternalDebugLocation::new(scope, loc.line, loc.col.to_usize())\n } else {\n UnknownLocation\n };\n set_debug_location(bx, dbg_loc);\n}\n\n\/\/\/ Enables emitting source locations for the given functions.\n\/\/\/\n\/\/\/ Since we don't want source locations to be emitted for the function prelude,\n\/\/\/ they are disabled when beginning to codegen a new function. This functions\n\/\/\/ switches source location emitting on and must therefore be called before the\n\/\/\/ first real statement\/expression of the function is codegened.\npub fn start_emitting_source_locations(dbg_context: &FunctionDebugContext) {\n match *dbg_context {\n FunctionDebugContext::RegularContext(ref data) => {\n data.source_locations_enabled.set(true)\n },\n _ => { \/* safe to ignore *\/ }\n }\n}\n\n\n#[derive(Copy, Clone, PartialEq)]\npub enum InternalDebugLocation {\n KnownLocation { scope: DIScope, line: usize, col: usize },\n UnknownLocation\n}\n\nimpl InternalDebugLocation {\n pub fn new(scope: DIScope, line: usize, col: usize) -> InternalDebugLocation {\n KnownLocation {\n scope,\n line,\n col,\n }\n }\n}\n\npub fn set_debug_location(bx: &Builder, debug_location: InternalDebugLocation) {\n let metadata_node = match debug_location {\n KnownLocation { scope, line, .. } => {\n \/\/ Always set the column to zero like Clang and GCC\n let col = UNKNOWN_COLUMN_NUMBER;\n debug!(\"setting debug location to {} {}\", line, col);\n\n unsafe {\n llvm::LLVMRustDIBuilderCreateDebugLocation(\n debug_context(bx.cx).llcontext,\n line as c_uint,\n col as c_uint,\n scope,\n ptr::null_mut())\n }\n }\n UnknownLocation => {\n debug!(\"clearing debug location \");\n ptr::null_mut()\n }\n };\n\n unsafe {\n llvm::LLVMSetCurrentDebugLocation(bx.llbuilder, metadata_node);\n }\n}\n<commit_msg>Rollup merge of #51980 - est31:columns, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse self::InternalDebugLocation::*;\n\nuse super::utils::{debug_context, span_start};\nuse super::metadata::UNKNOWN_COLUMN_NUMBER;\nuse super::FunctionDebugContext;\n\nuse llvm;\nuse llvm::debuginfo::DIScope;\nuse builder::Builder;\n\nuse libc::c_uint;\nuse std::ptr;\nuse syntax_pos::{Span, Pos};\n\n\/\/\/ Sets the current debug location at the beginning of the span.\n\/\/\/\n\/\/\/ Maps to a call to llvm::LLVMSetCurrentDebugLocation(...).\npub fn set_source_location(\n debug_context: &FunctionDebugContext, bx: &Builder, scope: DIScope, span: Span\n) {\n let function_debug_context = match *debug_context {\n FunctionDebugContext::DebugInfoDisabled => return,\n FunctionDebugContext::FunctionWithoutDebugInfo => {\n set_debug_location(bx, UnknownLocation);\n return;\n }\n FunctionDebugContext::RegularContext(ref data) => data\n };\n\n let dbg_loc = if function_debug_context.source_locations_enabled.get() {\n debug!(\"set_source_location: {}\", bx.sess().codemap().span_to_string(span));\n let loc = span_start(bx.cx, span);\n InternalDebugLocation::new(scope, loc.line, loc.col.to_usize())\n } else {\n UnknownLocation\n };\n set_debug_location(bx, dbg_loc);\n}\n\n\/\/\/ Enables emitting source locations for the given functions.\n\/\/\/\n\/\/\/ Since we don't want source locations to be emitted for the function prelude,\n\/\/\/ they are disabled when beginning to codegen a new function. This functions\n\/\/\/ switches source location emitting on and must therefore be called before the\n\/\/\/ first real statement\/expression of the function is codegened.\npub fn start_emitting_source_locations(dbg_context: &FunctionDebugContext) {\n match *dbg_context {\n FunctionDebugContext::RegularContext(ref data) => {\n data.source_locations_enabled.set(true)\n },\n _ => { \/* safe to ignore *\/ }\n }\n}\n\n\n#[derive(Copy, Clone, PartialEq)]\npub enum InternalDebugLocation {\n KnownLocation { scope: DIScope, line: usize, col: usize },\n UnknownLocation\n}\n\nimpl InternalDebugLocation {\n pub fn new(scope: DIScope, line: usize, col: usize) -> InternalDebugLocation {\n KnownLocation {\n scope,\n line,\n col,\n }\n }\n}\n\npub fn set_debug_location(bx: &Builder, debug_location: InternalDebugLocation) {\n let metadata_node = match debug_location {\n KnownLocation { scope, line, col } => {\n \/\/ For MSVC, set the column number to zero.\n \/\/ Otherwise, emit it. This mimics clang behaviour.\n \/\/ See discussion in https:\/\/github.com\/rust-lang\/rust\/issues\/42921\n let col_used = if bx.cx.sess().target.target.options.is_like_msvc {\n UNKNOWN_COLUMN_NUMBER\n } else {\n col as c_uint\n };\n debug!(\"setting debug location to {} {}\", line, col);\n\n unsafe {\n llvm::LLVMRustDIBuilderCreateDebugLocation(\n debug_context(bx.cx).llcontext,\n line as c_uint,\n col_used,\n scope,\n ptr::null_mut())\n }\n }\n UnknownLocation => {\n debug!(\"clearing debug location \");\n ptr::null_mut()\n }\n };\n\n unsafe {\n llvm::LLVMSetCurrentDebugLocation(bx.llbuilder, metadata_node);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added basic integration tests for .flatten_ok() without size_hint tests<commit_after>use itertools::{assert_equal, Itertools};\nuse std::{ops::Range, vec::IntoIter};\n\nfn mix_data() -> IntoIter<Result<Range<i32>, bool>> {\n vec![Ok(0..2), Err(false), Ok(2..4), Err(true), Ok(4..6)].into_iter()\n}\n\nfn ok_data() -> IntoIter<Result<Range<i32>, bool>> {\n vec![Ok(0..2), Ok(2..4), Ok(4..6)].into_iter()\n}\n\n#[test]\nfn flatten_ok_mixed_expected_forward() {\n assert_equal(\n mix_data().flatten_ok(),\n vec![\n Ok(0),\n Ok(1),\n Err(false),\n Ok(2),\n Ok(3),\n Err(true),\n Ok(4),\n Ok(5),\n ],\n );\n}\n\n#[test]\nfn flatten_ok_mixed_expected_reverse() {\n assert_equal(\n mix_data().flatten_ok().rev(),\n vec![\n Ok(5),\n Ok(4),\n Err(true),\n Ok(3),\n Ok(2),\n Err(false),\n Ok(1),\n Ok(0),\n ],\n );\n}\n\n#[test]\nfn flatten_ok_collect_mixed_forward() {\n assert_eq!(\n mix_data().flatten_ok().collect::<Result<Vec<_>, _>>(),\n Err(false)\n );\n}\n\n#[test]\nfn flatten_ok_collect_mixed_reverse() {\n assert_eq!(\n mix_data().flatten_ok().rev().collect::<Result<Vec<_>, _>>(),\n Err(true)\n );\n}\n\n#[test]\nfn flatten_ok_collect_ok_forward() {\n assert_eq!(\n ok_data().flatten_ok().collect::<Result<Vec<_>, _>>(),\n Ok((0..6).collect())\n );\n}\n\n#[test]\nfn flatten_ok_collect_ok_reverse() {\n assert_eq!(\n ok_data().flatten_ok().rev().collect::<Result<Vec<_>, _>>(),\n Ok((0..6).rev().collect())\n );\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Headers container, and common header fields.\n\/\/!\n\/\/! hyper has the opinion that Headers should be strongly-typed, because that's\n\/\/! why we're using Rust in the first place. To set or get any header, an object\n\/\/! must implement the `Header` trait from this module. Several common headers\n\/\/! are already provided, such as `Host`, `ContentType`, `UserAgent`, and others.\nuse std::ascii::{AsciiExt, ASCII_LOWER_MAP};\nuse std::fmt::{mod, Show};\nuse std::hash;\nuse std::intrinsics::TypeId;\nuse std::mem::{transmute, transmute_copy};\nuse std::raw::TraitObject;\nuse std::str::{from_utf8, SendStr, Slice, Owned};\nuse std::string::raw;\nuse std::collections::hashmap::{HashMap, Entries};\n\nuse uany::UncheckedAnyDowncast;\nuse typeable::Typeable;\n\nuse http::read_header;\nuse {HttpResult};\n\n\/\/\/ Common Headers\npub mod common;\n\n\/\/\/ A trait for any object that will represent a header field and value.\npub trait Header: Typeable {\n \/\/\/ Returns the name of the header field this belongs to.\n \/\/\/\n \/\/\/ The market `Option` is to hint to the type system which implementation\n \/\/\/ to call. This can be done away with once UFCS arrives.\n fn header_name(marker: Option<Self>) -> &'static str;\n \/\/\/ Parse a header from a raw stream of bytes.\n \/\/\/\n \/\/\/ It's possible that a request can include a header field more than once,\n \/\/\/ and in that case, the slice will have a length greater than 1. However,\n \/\/\/ it's not necessarily the case that a Header is *allowed* to have more\n \/\/\/ than one field value. If that's the case, you **should** return `None`\n \/\/\/ if `raw.len() > 1`.\n fn parse_header(raw: &[Vec<u8>]) -> Option<Self>;\n \/\/\/ Format a header to be output into a TcpStream.\n fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result;\n}\n\n#[doc(hidden)]\ntrait Is {\n fn is<T: 'static>(self) -> bool;\n}\n\nimpl<'a> Is for &'a Header {\n fn is<T: 'static>(self) -> bool {\n self.get_type() == TypeId::of::<T>()\n }\n}\n\nimpl<'a> UncheckedAnyDowncast<'a> for &'a Header {\n #[inline]\n unsafe fn downcast_ref_unchecked<T: 'static>(self) -> &'a T {\n let to: TraitObject = transmute_copy(&self);\n transmute(to.data)\n }\n}\n\nfn header_name<T: Header>() -> &'static str {\n let name = Header::header_name(None::<T>);\n name\n}\n\n\/\/\/ A map of header fields on requests and responses.\npub struct Headers {\n data: HashMap<CaseInsensitive, Item>\n}\n\nimpl Headers {\n\n \/\/\/ Creates a new, empty headers map.\n pub fn new() -> Headers {\n Headers {\n data: HashMap::new()\n }\n }\n\n #[doc(hidden)]\n pub fn from_raw<R: Reader>(rdr: &mut R) -> HttpResult<Headers> {\n let mut headers = Headers::new();\n loop {\n match try!(read_header(rdr)) {\n Some((name, value)) => {\n \/\/ read_header already checks that name is a token, which \n \/\/ means its safe utf8\n let name = unsafe {\n raw::from_utf8(name)\n };\n let name = CaseInsensitive(Owned(name));\n let item = headers.data.find_or_insert(name, Raw(vec![]));\n match *item {\n Raw(ref mut raw) => raw.push(value),\n \/\/ Unreachable\n _ => {}\n };\n },\n None => break,\n }\n }\n Ok(headers)\n }\n\n \/\/\/ Set a header field to the corresponding value.\n \/\/\/\n \/\/\/ The field is determined by the type of the value being set.\n pub fn set<H: Header>(&mut self, value: H) {\n self.data.insert(CaseInsensitive(Slice(header_name::<H>())), Typed(box value as Box<Header>));\n }\n\n \/\/\/ Get a clone of the header field's value, if it exists.\n \/\/\/\n \/\/\/ Example:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # use hyper::header::Headers;\n \/\/\/ # use hyper::header::common::ContentType;\n \/\/\/ # let mut headers = Headers::new();\n \/\/\/ let content_type = headers.get::<ContentType>();\n \/\/\/ ```\n pub fn get<H: Header + Clone>(&mut self) -> Option<H> {\n self.get_ref().map(|v: &H| v.clone())\n }\n\n \/\/\/ Access the raw value of a header, if it exists and has not\n \/\/\/ been already parsed.\n \/\/\/\n \/\/\/ If the header field has already been parsed into a typed header,\n \/\/\/ then you *must* access it through that representation.\n \/\/\/\n \/\/\/ Example:\n \/\/\/ ```\n \/\/\/ # use hyper::header::Headers;\n \/\/\/ # let mut headers = Headers::new();\n \/\/\/ let raw_content_type = unsafe { headers.get_raw(\"content-type\") };\n \/\/\/ ```\n pub unsafe fn get_raw(&self, name: &'static str) -> Option<&[Vec<u8>]> {\n self.data.find(&CaseInsensitive(Slice(name))).and_then(|item| {\n match *item {\n Raw(ref raw) => Some(raw.as_slice()),\n _ => None\n }\n })\n }\n\n \/\/\/ Get a reference to the header field's value, if it exists.\n pub fn get_ref<H: Header>(&mut self) -> Option<&H> {\n self.data.find_mut(&CaseInsensitive(Slice(header_name::<H>()))).and_then(|item| {\n debug!(\"get_ref, name={}, val={}\", header_name::<H>(), item);\n let header = match *item {\n \/\/ Huge borrowck hack here, should be refactored to just return here.\n Typed(ref typed) if typed.is::<H>() => None,\n \/\/ Typed, wrong type\n Typed(_) => return None,\n Raw(ref raw) => match Header::parse_header(raw.as_slice()) {\n Some::<H>(h) => {\n Some(h)\n },\n None => return None\n },\n };\n\n match header {\n Some(header) => {\n *item = Typed(box header as Box<Header>);\n Some(item)\n },\n None => {\n Some(item)\n }\n }\n }).and_then(|item| {\n debug!(\"downcasting {}\", item);\n let ret = match *item {\n Typed(ref val) => {\n unsafe { Some(val.downcast_ref_unchecked()) }\n },\n _ => unreachable!()\n };\n ret\n })\n }\n\n \/\/\/ Returns a boolean of whether a certain header is in the map.\n \/\/\/\n \/\/\/ Example:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # use hyper::header::Headers;\n \/\/\/ # use hyper::header::common::ContentType;\n \/\/\/ # let mut headers = Headers::new();\n \/\/\/ let has_type = headers.has::<ContentType>();\n \/\/\/ ```\n pub fn has<H: Header>(&self) -> bool {\n self.data.contains_key(&CaseInsensitive(Slice(header_name::<H>())))\n }\n\n \/\/\/ Removes a header from the map, if one existed.\n \/\/\/ Returns true if a header has been removed.\n pub fn remove<H: Header>(&mut self) -> bool {\n self.data.remove(&CaseInsensitive(Slice(Header::header_name(None::<H>))))\n }\n\n \/\/\/ Returns an iterator over the header fields.\n pub fn iter<'a>(&'a self) -> HeadersItems<'a> {\n HeadersItems {\n inner: self.data.iter()\n }\n }\n}\n\nimpl fmt::Show for Headers {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n try!(\"Headers {\\n\".fmt(fmt));\n for (k, v) in self.iter() {\n try!(write!(fmt, \"\\t{}: {}\\n\", k, v));\n }\n \"}\".fmt(fmt)\n }\n}\n\n\/\/\/ An `Iterator` over the fields in a `Headers` map.\npub struct HeadersItems<'a> {\n inner: Entries<'a, CaseInsensitive, Item>\n}\n\nimpl<'a> Iterator<(&'a str, HeaderView<'a>)> for HeadersItems<'a> {\n fn next(&mut self) -> Option<(&'a str, HeaderView<'a>)> {\n match self.inner.next() {\n Some((k, v)) => Some((k.as_slice(), HeaderView(v))),\n None => None\n }\n }\n}\n\n\/\/\/ Returned with the `HeadersItems` iterator.\npub struct HeaderView<'a>(&'a Item);\n\nimpl<'a> fmt::Show for HeaderView<'a> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let HeaderView(item) = *self;\n item.fmt(fmt)\n }\n}\n\nimpl Collection for Headers {\n fn len(&self) -> uint {\n self.data.len()\n }\n}\n\nimpl Mutable for Headers {\n fn clear(&mut self) {\n self.data.clear()\n }\n}\n\nenum Item {\n Raw(Vec<Vec<u8>>),\n Typed(Box<Header>)\n}\n\nimpl fmt::Show for Item {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Typed(ref h) => h.fmt_header(fmt),\n Raw(ref raw) => {\n for part in raw.iter() {\n try!(fmt.write(part.as_slice()));\n }\n Ok(())\n },\n }\n }\n}\n\nstruct CaseInsensitive(SendStr);\n\nimpl Str for CaseInsensitive {\n fn as_slice(&self) -> &str {\n let CaseInsensitive(ref s) = *self;\n s.as_slice()\n }\n\n}\n\nimpl fmt::Show for CaseInsensitive {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n self.as_slice().fmt(fmt)\n }\n}\n\nimpl PartialEq for CaseInsensitive {\n fn eq(&self, other: &CaseInsensitive) -> bool {\n self.as_slice().eq_ignore_ascii_case(other.as_slice())\n }\n}\n\nimpl Eq for CaseInsensitive {}\n\nimpl<H: hash::Writer> hash::Hash<H> for CaseInsensitive {\n #[inline]\n fn hash(&self, hasher: &mut H) {\n for byte in self.as_slice().bytes() {\n hasher.write([ASCII_LOWER_MAP[byte as uint]].as_slice());\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::io::MemReader;\n use std::fmt;\n use std::str::Slice;\n use std::hash::sip::hash;\n use mime::{Mime, Text, Plain};\n use super::CaseInsensitive;\n use super::{Headers, Header};\n use super::common::{ContentLength, ContentType};\n\n fn mem(s: &str) -> MemReader {\n MemReader::new(s.as_bytes().to_vec())\n }\n\n #[test]\n fn test_case_insensitive() {\n let a = CaseInsensitive(Slice(\"foobar\"));\n let b = CaseInsensitive(Slice(\"FOOBAR\"));\n\n assert_eq!(a, b);\n assert_eq!(hash(&a), hash(&b));\n }\n\n #[test]\n fn test_from_raw() {\n let mut headers = Headers::from_raw(&mut mem(\"Content-Length: 10\\r\\n\\r\\n\")).unwrap();\n assert_eq!(headers.get_ref(), Some(&ContentLength(10)));\n }\n\n #[test]\n fn test_content_type() {\n let content_type = Header::parse_header([\"text\/plain\".as_bytes().to_vec()].as_slice());\n assert_eq!(content_type, Some(ContentType(Mime(Text, Plain, vec![]))));\n }\n\n #[deriving(Clone)]\n struct CrazyLength(Option<bool>, uint);\n\n impl Header for CrazyLength {\n fn header_name(_: Option<CrazyLength>) -> &'static str {\n \"content-length\"\n }\n fn parse_header(raw: &[Vec<u8>]) -> Option<CrazyLength> {\n use std::str::from_utf8;\n use std::from_str::FromStr;\n\n if raw.len() != 1 {\n return None;\n }\n \/\/ we JUST checked that raw.len() == 1, so raw[0] WILL exist.\n match from_utf8(unsafe { raw.as_slice().unsafe_get(0).as_slice() }) {\n Some(s) => FromStr::from_str(s),\n None => None\n }.map(|u| CrazyLength(Some(false), u))\n }\n fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n use std::fmt::Show;\n let CrazyLength(_, ref value) = *self;\n value.fmt(fmt)\n }\n }\n\n #[test]\n fn test_different_structs_for_same_header() {\n let mut headers = Headers::from_raw(&mut mem(\"Content-Length: 10\\r\\n\\r\\n\")).unwrap();\n let ContentLength(_) = headers.get::<ContentLength>().unwrap();\n assert!(headers.get::<CrazyLength>().is_none());\n }\n}\n<commit_msg>Use the entry API instead of find_or_insert.<commit_after>\/\/! Headers container, and common header fields.\n\/\/!\n\/\/! hyper has the opinion that Headers should be strongly-typed, because that's\n\/\/! why we're using Rust in the first place. To set or get any header, an object\n\/\/! must implement the `Header` trait from this module. Several common headers\n\/\/! are already provided, such as `Host`, `ContentType`, `UserAgent`, and others.\nuse std::ascii::{AsciiExt, ASCII_LOWER_MAP};\nuse std::fmt::{mod, Show};\nuse std::hash;\nuse std::intrinsics::TypeId;\nuse std::mem::{transmute, transmute_copy};\nuse std::raw::TraitObject;\nuse std::str::{from_utf8, SendStr, Slice, Owned};\nuse std::string::raw;\nuse std::collections::hashmap::{HashMap, Entries, Occupied, Vacant};\n\nuse uany::UncheckedAnyDowncast;\nuse typeable::Typeable;\n\nuse http::read_header;\nuse {HttpResult};\n\n\/\/\/ Common Headers\npub mod common;\n\n\/\/\/ A trait for any object that will represent a header field and value.\npub trait Header: Typeable {\n \/\/\/ Returns the name of the header field this belongs to.\n \/\/\/\n \/\/\/ The market `Option` is to hint to the type system which implementation\n \/\/\/ to call. This can be done away with once UFCS arrives.\n fn header_name(marker: Option<Self>) -> &'static str;\n \/\/\/ Parse a header from a raw stream of bytes.\n \/\/\/\n \/\/\/ It's possible that a request can include a header field more than once,\n \/\/\/ and in that case, the slice will have a length greater than 1. However,\n \/\/\/ it's not necessarily the case that a Header is *allowed* to have more\n \/\/\/ than one field value. If that's the case, you **should** return `None`\n \/\/\/ if `raw.len() > 1`.\n fn parse_header(raw: &[Vec<u8>]) -> Option<Self>;\n \/\/\/ Format a header to be output into a TcpStream.\n fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result;\n}\n\n#[doc(hidden)]\ntrait Is {\n fn is<T: 'static>(self) -> bool;\n}\n\nimpl<'a> Is for &'a Header {\n fn is<T: 'static>(self) -> bool {\n self.get_type() == TypeId::of::<T>()\n }\n}\n\nimpl<'a> UncheckedAnyDowncast<'a> for &'a Header {\n #[inline]\n unsafe fn downcast_ref_unchecked<T: 'static>(self) -> &'a T {\n let to: TraitObject = transmute_copy(&self);\n transmute(to.data)\n }\n}\n\nfn header_name<T: Header>() -> &'static str {\n let name = Header::header_name(None::<T>);\n name\n}\n\n\/\/\/ A map of header fields on requests and responses.\npub struct Headers {\n data: HashMap<CaseInsensitive, Item>\n}\n\nimpl Headers {\n\n \/\/\/ Creates a new, empty headers map.\n pub fn new() -> Headers {\n Headers {\n data: HashMap::new()\n }\n }\n\n #[doc(hidden)]\n pub fn from_raw<R: Reader>(rdr: &mut R) -> HttpResult<Headers> {\n let mut headers = Headers::new();\n loop {\n match try!(read_header(rdr)) {\n Some((name, value)) => {\n \/\/ read_header already checks that name is a token, which \n \/\/ means its safe utf8\n let name = unsafe {\n raw::from_utf8(name)\n };\n\n let item = match headers.data.entry(CaseInsensitive(Owned(name))) {\n Vacant(entry) => entry.set(Raw(vec![])),\n Occupied(entry) => entry.into_mut()\n };\n\n match *item {\n Raw(ref mut raw) => raw.push(value),\n \/\/ Unreachable\n _ => {}\n };\n },\n None => break,\n }\n }\n Ok(headers)\n }\n\n \/\/\/ Set a header field to the corresponding value.\n \/\/\/\n \/\/\/ The field is determined by the type of the value being set.\n pub fn set<H: Header>(&mut self, value: H) {\n self.data.insert(CaseInsensitive(Slice(header_name::<H>())), Typed(box value as Box<Header>));\n }\n\n \/\/\/ Get a clone of the header field's value, if it exists.\n \/\/\/\n \/\/\/ Example:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # use hyper::header::Headers;\n \/\/\/ # use hyper::header::common::ContentType;\n \/\/\/ # let mut headers = Headers::new();\n \/\/\/ let content_type = headers.get::<ContentType>();\n \/\/\/ ```\n pub fn get<H: Header + Clone>(&mut self) -> Option<H> {\n self.get_ref().map(|v: &H| v.clone())\n }\n\n \/\/\/ Access the raw value of a header, if it exists and has not\n \/\/\/ been already parsed.\n \/\/\/\n \/\/\/ If the header field has already been parsed into a typed header,\n \/\/\/ then you *must* access it through that representation.\n \/\/\/\n \/\/\/ Example:\n \/\/\/ ```\n \/\/\/ # use hyper::header::Headers;\n \/\/\/ # let mut headers = Headers::new();\n \/\/\/ let raw_content_type = unsafe { headers.get_raw(\"content-type\") };\n \/\/\/ ```\n pub unsafe fn get_raw(&self, name: &'static str) -> Option<&[Vec<u8>]> {\n self.data.find(&CaseInsensitive(Slice(name))).and_then(|item| {\n match *item {\n Raw(ref raw) => Some(raw.as_slice()),\n _ => None\n }\n })\n }\n\n \/\/\/ Get a reference to the header field's value, if it exists.\n pub fn get_ref<H: Header>(&mut self) -> Option<&H> {\n self.data.find_mut(&CaseInsensitive(Slice(header_name::<H>()))).and_then(|item| {\n debug!(\"get_ref, name={}, val={}\", header_name::<H>(), item);\n let header = match *item {\n \/\/ Huge borrowck hack here, should be refactored to just return here.\n Typed(ref typed) if typed.is::<H>() => None,\n \/\/ Typed, wrong type\n Typed(_) => return None,\n Raw(ref raw) => match Header::parse_header(raw.as_slice()) {\n Some::<H>(h) => {\n Some(h)\n },\n None => return None\n },\n };\n\n match header {\n Some(header) => {\n *item = Typed(box header as Box<Header>);\n Some(item)\n },\n None => {\n Some(item)\n }\n }\n }).and_then(|item| {\n debug!(\"downcasting {}\", item);\n let ret = match *item {\n Typed(ref val) => {\n unsafe { Some(val.downcast_ref_unchecked()) }\n },\n _ => unreachable!()\n };\n ret\n })\n }\n\n \/\/\/ Returns a boolean of whether a certain header is in the map.\n \/\/\/\n \/\/\/ Example:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # use hyper::header::Headers;\n \/\/\/ # use hyper::header::common::ContentType;\n \/\/\/ # let mut headers = Headers::new();\n \/\/\/ let has_type = headers.has::<ContentType>();\n \/\/\/ ```\n pub fn has<H: Header>(&self) -> bool {\n self.data.contains_key(&CaseInsensitive(Slice(header_name::<H>())))\n }\n\n \/\/\/ Removes a header from the map, if one existed.\n \/\/\/ Returns true if a header has been removed.\n pub fn remove<H: Header>(&mut self) -> bool {\n self.data.remove(&CaseInsensitive(Slice(Header::header_name(None::<H>))))\n }\n\n \/\/\/ Returns an iterator over the header fields.\n pub fn iter<'a>(&'a self) -> HeadersItems<'a> {\n HeadersItems {\n inner: self.data.iter()\n }\n }\n}\n\nimpl fmt::Show for Headers {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n try!(\"Headers {\\n\".fmt(fmt));\n for (k, v) in self.iter() {\n try!(write!(fmt, \"\\t{}: {}\\n\", k, v));\n }\n \"}\".fmt(fmt)\n }\n}\n\n\/\/\/ An `Iterator` over the fields in a `Headers` map.\npub struct HeadersItems<'a> {\n inner: Entries<'a, CaseInsensitive, Item>\n}\n\nimpl<'a> Iterator<(&'a str, HeaderView<'a>)> for HeadersItems<'a> {\n fn next(&mut self) -> Option<(&'a str, HeaderView<'a>)> {\n match self.inner.next() {\n Some((k, v)) => Some((k.as_slice(), HeaderView(v))),\n None => None\n }\n }\n}\n\n\/\/\/ Returned with the `HeadersItems` iterator.\npub struct HeaderView<'a>(&'a Item);\n\nimpl<'a> fmt::Show for HeaderView<'a> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let HeaderView(item) = *self;\n item.fmt(fmt)\n }\n}\n\nimpl Collection for Headers {\n fn len(&self) -> uint {\n self.data.len()\n }\n}\n\nimpl Mutable for Headers {\n fn clear(&mut self) {\n self.data.clear()\n }\n}\n\nenum Item {\n Raw(Vec<Vec<u8>>),\n Typed(Box<Header>)\n}\n\nimpl fmt::Show for Item {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Typed(ref h) => h.fmt_header(fmt),\n Raw(ref raw) => {\n for part in raw.iter() {\n try!(fmt.write(part.as_slice()));\n }\n Ok(())\n },\n }\n }\n}\n\nstruct CaseInsensitive(SendStr);\n\nimpl Str for CaseInsensitive {\n fn as_slice(&self) -> &str {\n let CaseInsensitive(ref s) = *self;\n s.as_slice()\n }\n\n}\n\nimpl fmt::Show for CaseInsensitive {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n self.as_slice().fmt(fmt)\n }\n}\n\nimpl PartialEq for CaseInsensitive {\n fn eq(&self, other: &CaseInsensitive) -> bool {\n self.as_slice().eq_ignore_ascii_case(other.as_slice())\n }\n}\n\nimpl Eq for CaseInsensitive {}\n\nimpl<H: hash::Writer> hash::Hash<H> for CaseInsensitive {\n #[inline]\n fn hash(&self, hasher: &mut H) {\n for byte in self.as_slice().bytes() {\n hasher.write([ASCII_LOWER_MAP[byte as uint]].as_slice());\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::io::MemReader;\n use std::fmt;\n use std::str::Slice;\n use std::hash::sip::hash;\n use mime::{Mime, Text, Plain};\n use super::CaseInsensitive;\n use super::{Headers, Header};\n use super::common::{ContentLength, ContentType};\n\n fn mem(s: &str) -> MemReader {\n MemReader::new(s.as_bytes().to_vec())\n }\n\n #[test]\n fn test_case_insensitive() {\n let a = CaseInsensitive(Slice(\"foobar\"));\n let b = CaseInsensitive(Slice(\"FOOBAR\"));\n\n assert_eq!(a, b);\n assert_eq!(hash(&a), hash(&b));\n }\n\n #[test]\n fn test_from_raw() {\n let mut headers = Headers::from_raw(&mut mem(\"Content-Length: 10\\r\\n\\r\\n\")).unwrap();\n assert_eq!(headers.get_ref(), Some(&ContentLength(10)));\n }\n\n #[test]\n fn test_content_type() {\n let content_type = Header::parse_header([\"text\/plain\".as_bytes().to_vec()].as_slice());\n assert_eq!(content_type, Some(ContentType(Mime(Text, Plain, vec![]))));\n }\n\n #[deriving(Clone)]\n struct CrazyLength(Option<bool>, uint);\n\n impl Header for CrazyLength {\n fn header_name(_: Option<CrazyLength>) -> &'static str {\n \"content-length\"\n }\n fn parse_header(raw: &[Vec<u8>]) -> Option<CrazyLength> {\n use std::str::from_utf8;\n use std::from_str::FromStr;\n\n if raw.len() != 1 {\n return None;\n }\n \/\/ we JUST checked that raw.len() == 1, so raw[0] WILL exist.\n match from_utf8(unsafe { raw.as_slice().unsafe_get(0).as_slice() }) {\n Some(s) => FromStr::from_str(s),\n None => None\n }.map(|u| CrazyLength(Some(false), u))\n }\n fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n use std::fmt::Show;\n let CrazyLength(_, ref value) = *self;\n value.fmt(fmt)\n }\n }\n\n #[test]\n fn test_different_structs_for_same_header() {\n let mut headers = Headers::from_raw(&mut mem(\"Content-Length: 10\\r\\n\\r\\n\")).unwrap();\n let ContentLength(_) = headers.get::<ContentLength>().unwrap();\n assert!(headers.get::<CrazyLength>().is_none());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add run-pass test suggested by @joshtriplett<commit_after>\/\/ run-pass\n\nfn main() {\n\tlet x = 1;\n\n\t#[cfg(FALSE)]\n\tif false {\n\t\tx = 2;\n\t} else if true {\n\t\tx = 3;\n\t} else {\n\t\tx = 4;\n\t}\n\tassert_eq!(x, 1);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test accessing the module level int\/float consts<commit_after>\/\/ run-pass\n\n\/\/ Make sure the module level constants are still there and accessible even after\n\/\/ the corresponding associated constants have been added, and later stabilized.\nuse std::{u16, f32};\n\nfn main() {\n let _ = u16::MAX;\n let _ = f32::EPSILON;\n let _ = std::f64::MANTISSA_DIGITS;\n}\n<|endoftext|>"} {"text":"<commit_before>mod array;\npub mod date_and_time;\npub mod floats;\nmod integers;\nmod primitives;\n#[cfg(feature = \"uuid\")]\nmod uuid;\n\n#[doc(hidden)]\npub mod sql_types {\n #[derive(Debug, Clone, Copy, Default)] pub struct Oid;\n #[derive(Debug, Clone, Copy, Default)] pub struct Array<T>(T);\n pub type SmallSerial = ::types::SmallInt;\n pub type Serial = ::types::Integer;\n pub type BigSerial = ::types::BigInt;\n #[cfg(feature = \"uuid\")]\n #[derive(Debug, Clone, Copy, Default)] pub struct Uuid;\n pub type Bytea = ::types::Binary;\n pub type Bpchar = ::types::VarChar;\n}\n<commit_msg>Hide internal type<commit_after>mod array;\npub mod date_and_time;\npub mod floats;\nmod integers;\nmod primitives;\n#[cfg(feature = \"uuid\")]\nmod uuid;\n\n#[doc(hidden)]\npub mod sql_types {\n #[derive(Debug, Clone, Copy, Default)] pub struct Oid;\n #[derive(Debug, Clone, Copy, Default)] pub struct Array<T>(T);\n pub type SmallSerial = ::types::SmallInt;\n pub type Serial = ::types::Integer;\n pub type BigSerial = ::types::BigInt;\n #[cfg(feature = \"uuid\")]\n #[derive(Debug, Clone, Copy, Default)] pub struct Uuid;\n pub type Bytea = ::types::Binary;\n #[doc(hidden)]\n pub type Bpchar = ::types::VarChar;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Module dedicated to handling Subscriber and ReSubscriber functionality<commit_after>\/*\nTristen Horton\ntristen@tristenhorton.com\n2017-04-06\n*\/\n\npub struct NewSubscriber {\n\tchannel: String,\n\tname: String,\n\ttwitch_prime: bool\n}\n\nimpl NewSubscriber {\n\tpub fn from(irc: String) -> NewSubscriber {\n\t\tlet channel = irc.split(\"#\").nth(1).unwrap().split(\" \").nth(0).unwrap().replace(\" \", \"\");\n\t\tlet mut name = irc.split(\":\").nth(2).unwrap().split(\" \").nth(0).unwrap().replace(\" \", \"\");\n\t\tif name.contains(\":\") {\n\t\t\tname = name.split(\":\").nth(0).unwrap().to_owned();\n\t\t}\n\t\tlet prime = irc.contains(\"subscribed with Twitch Prime\");\n\t\tNewSubscriber {\n\t\t\tchannel: channel,\n\t\t\tname: name,\n\t\t\ttwitch_prime: prime\n\t\t}\n\t}\n\n\tpub fn channel(&self) -> &String {\n\t\t&self.channel\n\t}\n\n\tpub fn name(&self) -> &String {\n\t\t&self.name\n\t}\n\n\tpub fn twitch_prime(&self) -> &bool {\n\t\t&self.twitch_prime\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse hir::BodyId;\nuse hir::def_id::DefId;\nuse syntax::ast::NodeId;\nuse ty::TyCtxt;\n\n\/\/\/ The `DepGraphSafe` trait is used to specify what kinds of values\n\/\/\/ are safe to \"leak\" into a task. The idea is that this should be\n\/\/\/ only be implemented for things like the tcx as well as various id\n\/\/\/ types, which will create reads in the dep-graph whenever the trait\n\/\/\/ loads anything that might depend on the input program.\npub trait DepGraphSafe {\n}\n\n\/\/\/ A `BodyId` on its own doesn't give access to any particular state.\n\/\/\/ You must fetch the state from the various maps or generate\n\/\/\/ on-demand queries, all of which create reads.\nimpl DepGraphSafe for BodyId {\n}\n\n\/\/\/ A `NodeId` on its own doesn't give access to any particular state.\n\/\/\/ You must fetch the state from the various maps or generate\n\/\/\/ on-demand queries, all of which create reads.\nimpl DepGraphSafe for NodeId {\n}\n\n\/\/\/ A `DefId` on its own doesn't give access to any particular state.\n\/\/\/ You must fetch the state from the various maps or generate\n\/\/\/ on-demand queries, all of which create reads.\nimpl DepGraphSafe for DefId {\n}\n\n\/\/\/ The type context itself can be used to access all kinds of tracked\n\/\/\/ state, but those accesses should always generate read events.\nimpl<'a, 'gcx, 'tcx> DepGraphSafe for TyCtxt<'a, 'gcx, 'tcx> {\n}\n\n\/\/\/ Tuples make it easy to build up state.\nimpl<A, B> DepGraphSafe for (A, B)\n where A: DepGraphSafe, B: DepGraphSafe\n{\n}\n\n\/\/\/ Shared ref to dep-graph-safe stuff should still be dep-graph-safe.\nimpl<'a, A> DepGraphSafe for &'a A\n where A: DepGraphSafe,\n{\n}\n\n\/\/\/ No data here! :)\nimpl DepGraphSafe for () {\n}\n\n\/\/\/ A convenient override that lets you pass arbitrary state into a\n\/\/\/ task. Every use should be accompanied by a comment explaining why\n\/\/\/ it makes sense (or how it could be refactored away in the future).\npub struct AssertDepGraphSafe<T>(pub T);\n\nimpl<T> DepGraphSafe for AssertDepGraphSafe<T> {\n}\n<commit_msg>Module doc comment for librustc\/dep_graph\/safe.rs<commit_after>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The `DepGraphSafe` trait\n\nuse hir::BodyId;\nuse hir::def_id::DefId;\nuse syntax::ast::NodeId;\nuse ty::TyCtxt;\n\n\/\/\/ The `DepGraphSafe` trait is used to specify what kinds of values\n\/\/\/ are safe to \"leak\" into a task. The idea is that this should be\n\/\/\/ only be implemented for things like the tcx as well as various id\n\/\/\/ types, which will create reads in the dep-graph whenever the trait\n\/\/\/ loads anything that might depend on the input program.\npub trait DepGraphSafe {\n}\n\n\/\/\/ A `BodyId` on its own doesn't give access to any particular state.\n\/\/\/ You must fetch the state from the various maps or generate\n\/\/\/ on-demand queries, all of which create reads.\nimpl DepGraphSafe for BodyId {\n}\n\n\/\/\/ A `NodeId` on its own doesn't give access to any particular state.\n\/\/\/ You must fetch the state from the various maps or generate\n\/\/\/ on-demand queries, all of which create reads.\nimpl DepGraphSafe for NodeId {\n}\n\n\/\/\/ A `DefId` on its own doesn't give access to any particular state.\n\/\/\/ You must fetch the state from the various maps or generate\n\/\/\/ on-demand queries, all of which create reads.\nimpl DepGraphSafe for DefId {\n}\n\n\/\/\/ The type context itself can be used to access all kinds of tracked\n\/\/\/ state, but those accesses should always generate read events.\nimpl<'a, 'gcx, 'tcx> DepGraphSafe for TyCtxt<'a, 'gcx, 'tcx> {\n}\n\n\/\/\/ Tuples make it easy to build up state.\nimpl<A, B> DepGraphSafe for (A, B)\n where A: DepGraphSafe, B: DepGraphSafe\n{\n}\n\n\/\/\/ Shared ref to dep-graph-safe stuff should still be dep-graph-safe.\nimpl<'a, A> DepGraphSafe for &'a A\n where A: DepGraphSafe,\n{\n}\n\n\/\/\/ No data here! :)\nimpl DepGraphSafe for () {\n}\n\n\/\/\/ A convenient override that lets you pass arbitrary state into a\n\/\/\/ task. Every use should be accompanied by a comment explaining why\n\/\/\/ it makes sense (or how it could be refactored away in the future).\npub struct AssertDepGraphSafe<T>(pub T);\n\nimpl<T> DepGraphSafe for AssertDepGraphSafe<T> {\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse session::config::Options;\n\nuse std::fs;\nuse std::io::{self, StdoutLock, Write};\nuse std::time::Instant;\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\npub enum ProfileCategory {\n Parsing,\n Expansion,\n TypeChecking,\n BorrowChecking,\n Codegen,\n Linking,\n Other,\n}\n\nstruct Categories<T> {\n parsing: T,\n expansion: T,\n type_checking: T,\n borrow_checking: T,\n codegen: T,\n linking: T,\n other: T,\n}\n\nimpl<T: Default> Categories<T> {\n fn new() -> Categories<T> {\n Categories {\n parsing: T::default(),\n expansion: T::default(),\n type_checking: T::default(),\n borrow_checking: T::default(),\n codegen: T::default(),\n linking: T::default(),\n other: T::default(),\n }\n }\n}\n\nimpl<T> Categories<T> {\n fn get(&self, category: ProfileCategory) -> &T {\n match category {\n ProfileCategory::Parsing => &self.parsing,\n ProfileCategory::Expansion => &self.expansion,\n ProfileCategory::TypeChecking => &self.type_checking,\n ProfileCategory::BorrowChecking => &self.borrow_checking,\n ProfileCategory::Codegen => &self.codegen,\n ProfileCategory::Linking => &self.linking,\n ProfileCategory::Other => &self.other,\n }\n }\n\n fn set(&mut self, category: ProfileCategory, value: T) {\n match category {\n ProfileCategory::Parsing => self.parsing = value,\n ProfileCategory::Expansion => self.expansion = value,\n ProfileCategory::TypeChecking => self.type_checking = value,\n ProfileCategory::BorrowChecking => self.borrow_checking = value,\n ProfileCategory::Codegen => self.codegen = value,\n ProfileCategory::Linking => self.linking = value,\n ProfileCategory::Other => self.other = value,\n }\n }\n}\n\nstruct CategoryData {\n times: Categories<u64>,\n query_counts: Categories<(u64, u64)>,\n}\n\nimpl CategoryData {\n fn new() -> CategoryData {\n CategoryData {\n times: Categories::new(),\n query_counts: Categories::new(),\n }\n }\n\n fn print(&self, lock: &mut StdoutLock) {\n macro_rules! p {\n ($name:tt, $rustic_name:ident) => {\n let (hits, total) = self.query_counts.$rustic_name;\n let (hits, total) = if total > 0 {\n (format!(\"{:.2}\", (((hits as f32) \/ (total as f32)) * 100.0)), total.to_string())\n } else {\n (\"\".into(), \"\".into())\n };\n\n writeln!(\n lock,\n \"| {0: <16} | {1: <14} | {2: <14} | {3: <8} |\",\n $name,\n self.times.$rustic_name \/ 1_000_000,\n total,\n hits\n ).unwrap();\n };\n }\n\n writeln!(lock, \"| Phase | Time (ms) | Queries | Hits (%) |\").unwrap();\n writeln!(lock, \"| ---------------- | -------------- | -------------- | -------- |\").unwrap();\n\n p!(\"Parsing\", parsing);\n p!(\"Expansion\", expansion);\n p!(\"TypeChecking\", type_checking);\n p!(\"BorrowChecking\", borrow_checking);\n p!(\"Codegen\", codegen);\n p!(\"Linking\", linking);\n p!(\"Other\", other);\n }\n\n fn json(&self) -> String {\n macro_rules! j {\n ($category:tt, $rustic_name:ident) => {{\n let (hits, total) = self.query_counts.$rustic_name;\n\n format!(\"{{ \\\"category\\\": {}, \\\"time_ms\\\": {}, \\\"query_count\\\": {}, \\\"query_hits\\\": {} }}\",\n stringify!($category),\n self.times.$rustic_name \/ 1_000_000,\n total,\n format!(\"{:.2}\", (((hits as f32) \/ (total as f32)) * 100.0))\n )\n }}\n }\n\n format!(\"[\n {},\n {},\n {},\n {},\n {},\n {},\n {}\n ]\",\n j!(\"Parsing\", parsing),\n j!(\"Expansion\", expansion),\n j!(\"TypeChecking\", type_checking),\n j!(\"BorrowChecking\", borrow_checking),\n j!(\"Codegen\", codegen),\n j!(\"Linking\", linking),\n j!(\"Other\", other)\n )\n }\n}\n\npub struct SelfProfiler {\n timer_stack: Vec<ProfileCategory>,\n data: CategoryData,\n current_timer: Instant,\n}\n\nimpl SelfProfiler {\n pub fn new() -> SelfProfiler {\n let mut profiler = SelfProfiler {\n timer_stack: Vec::new(),\n data: CategoryData::new(),\n current_timer: Instant::now(),\n };\n\n profiler.start_activity(ProfileCategory::Other);\n\n profiler\n }\n\n pub fn start_activity(&mut self, category: ProfileCategory) {\n match self.timer_stack.last().cloned() {\n None => {\n self.current_timer = Instant::now();\n },\n Some(current_category) if current_category == category => {\n \/\/since the current category is the same as the new activity's category,\n \/\/we don't need to do anything with the timer, we just need to push it on the stack\n }\n Some(current_category) => {\n let elapsed = self.stop_timer();\n\n \/\/record the current category's time\n let new_time = self.data.times.get(current_category) + elapsed;\n self.data.times.set(current_category, new_time);\n }\n }\n\n \/\/push the new category\n self.timer_stack.push(category);\n }\n\n pub fn record_query(&mut self, category: ProfileCategory) {\n let (hits, total) = *self.data.query_counts.get(category);\n self.data.query_counts.set(category, (hits, total + 1));\n }\n\n pub fn record_query_hit(&mut self, category: ProfileCategory) {\n let (hits, total) = *self.data.query_counts.get(category);\n self.data.query_counts.set(category, (hits + 1, total));\n }\n\n pub fn end_activity(&mut self, category: ProfileCategory) {\n match self.timer_stack.pop() {\n None => bug!(\"end_activity() was called but there was no running activity\"),\n Some(c) => \n assert!(\n c == category, \n \"end_activity() was called but a different activity was running\"),\n }\n\n \/\/check if the new running timer is in the same category as this one\n \/\/if it is, we don't need to do anything\n if let Some(c) = self.timer_stack.last() {\n if *c == category {\n return;\n }\n }\n\n \/\/the new timer is different than the previous, so record the elapsed time and start a new timer\n let elapsed = self.stop_timer();\n let new_time = self.data.times.get(category) + elapsed;\n self.data.times.set(category, new_time);\n }\n\n fn stop_timer(&mut self) -> u64 {\n let elapsed = self.current_timer.elapsed();\n\n self.current_timer = Instant::now();\n\n (elapsed.as_secs() * 1_000_000_000) + (elapsed.subsec_nanos() as u64)\n }\n\n pub fn print_results(&mut self, opts: &Options) {\n self.end_activity(ProfileCategory::Other);\n\n assert!(self.timer_stack.is_empty(), \"there were timers running when print_results() was called\");\n\n let out = io::stdout();\n let mut lock = out.lock();\n\n let crate_name = opts.crate_name.as_ref().map(|n| format!(\" for {}\", n)).unwrap_or_default();\n\n writeln!(lock, \"Self profiling results{}:\", crate_name).unwrap();\n writeln!(lock).unwrap();\n\n self.data.print(&mut lock);\n\n writeln!(lock).unwrap();\n writeln!(lock, \"Optimization level: {:?}\", opts.optimize).unwrap();\n\n let incremental = if opts.incremental.is_some() { \"on\" } else { \"off\" };\n writeln!(lock, \"Incremental: {}\", incremental).unwrap();\n }\n\n pub fn save_results(&self, opts: &Options) {\n let category_data = self.data.json();\n let compilation_options = format!(\"{{ \\\"optimization_level\\\": \\\"{:?}\\\", \\\"incremental\\\": {} }}\",\n opts.optimize,\n if opts.incremental.is_some() { \"true\" } else { \"false\" });\n\n let json = format!(\"{{ \\\"category_data\\\": {}, \\\"compilation_options\\\": {} }}\",\n category_data,\n compilation_options);\n\n fs::write(\"self_profiler_results.json\", json).unwrap();\n }\n}\n<commit_msg>Fix tidy<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse session::config::Options;\n\nuse std::fs;\nuse std::io::{self, StdoutLock, Write};\nuse std::time::Instant;\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\npub enum ProfileCategory {\n Parsing,\n Expansion,\n TypeChecking,\n BorrowChecking,\n Codegen,\n Linking,\n Other,\n}\n\nstruct Categories<T> {\n parsing: T,\n expansion: T,\n type_checking: T,\n borrow_checking: T,\n codegen: T,\n linking: T,\n other: T,\n}\n\nimpl<T: Default> Categories<T> {\n fn new() -> Categories<T> {\n Categories {\n parsing: T::default(),\n expansion: T::default(),\n type_checking: T::default(),\n borrow_checking: T::default(),\n codegen: T::default(),\n linking: T::default(),\n other: T::default(),\n }\n }\n}\n\nimpl<T> Categories<T> {\n fn get(&self, category: ProfileCategory) -> &T {\n match category {\n ProfileCategory::Parsing => &self.parsing,\n ProfileCategory::Expansion => &self.expansion,\n ProfileCategory::TypeChecking => &self.type_checking,\n ProfileCategory::BorrowChecking => &self.borrow_checking,\n ProfileCategory::Codegen => &self.codegen,\n ProfileCategory::Linking => &self.linking,\n ProfileCategory::Other => &self.other,\n }\n }\n\n fn set(&mut self, category: ProfileCategory, value: T) {\n match category {\n ProfileCategory::Parsing => self.parsing = value,\n ProfileCategory::Expansion => self.expansion = value,\n ProfileCategory::TypeChecking => self.type_checking = value,\n ProfileCategory::BorrowChecking => self.borrow_checking = value,\n ProfileCategory::Codegen => self.codegen = value,\n ProfileCategory::Linking => self.linking = value,\n ProfileCategory::Other => self.other = value,\n }\n }\n}\n\nstruct CategoryData {\n times: Categories<u64>,\n query_counts: Categories<(u64, u64)>,\n}\n\nimpl CategoryData {\n fn new() -> CategoryData {\n CategoryData {\n times: Categories::new(),\n query_counts: Categories::new(),\n }\n }\n\n fn print(&self, lock: &mut StdoutLock) {\n macro_rules! p {\n ($name:tt, $rustic_name:ident) => {\n let (hits, total) = self.query_counts.$rustic_name;\n let (hits, total) = if total > 0 {\n (format!(\"{:.2}\",\n (((hits as f32) \/ (total as f32)) * 100.0)), total.to_string())\n } else {\n (\"\".into(), \"\".into())\n };\n\n writeln!(\n lock,\n \"| {0: <16} | {1: <14} | {2: <14} | {3: <8} |\",\n $name,\n self.times.$rustic_name \/ 1_000_000,\n total,\n hits\n ).unwrap();\n };\n }\n\n writeln!(lock, \"| Phase | Time (ms) | Queries | Hits (%) |\")\n .unwrap();\n writeln!(lock, \"| ---------------- | -------------- | -------------- | -------- |\")\n .unwrap();\n\n p!(\"Parsing\", parsing);\n p!(\"Expansion\", expansion);\n p!(\"TypeChecking\", type_checking);\n p!(\"BorrowChecking\", borrow_checking);\n p!(\"Codegen\", codegen);\n p!(\"Linking\", linking);\n p!(\"Other\", other);\n }\n\n fn json(&self) -> String {\n macro_rules! j {\n ($category:tt, $rustic_name:ident) => {{\n let (hits, total) = self.query_counts.$rustic_name;\n\n format!(\n \"{{ \\\"category\\\": {}, \\\"time_ms\\\": {},\n \\\"query_count\\\": {}, \\\"query_hits\\\": {} }}\",\n stringify!($category),\n self.times.$rustic_name \/ 1_000_000,\n total,\n format!(\"{:.2}\", (((hits as f32) \/ (total as f32)) * 100.0))\n )\n }}\n }\n\n format!(\"[\n {},\n {},\n {},\n {},\n {},\n {},\n {}\n ]\",\n j!(\"Parsing\", parsing),\n j!(\"Expansion\", expansion),\n j!(\"TypeChecking\", type_checking),\n j!(\"BorrowChecking\", borrow_checking),\n j!(\"Codegen\", codegen),\n j!(\"Linking\", linking),\n j!(\"Other\", other)\n )\n }\n}\n\npub struct SelfProfiler {\n timer_stack: Vec<ProfileCategory>,\n data: CategoryData,\n current_timer: Instant,\n}\n\nimpl SelfProfiler {\n pub fn new() -> SelfProfiler {\n let mut profiler = SelfProfiler {\n timer_stack: Vec::new(),\n data: CategoryData::new(),\n current_timer: Instant::now(),\n };\n\n profiler.start_activity(ProfileCategory::Other);\n\n profiler\n }\n\n pub fn start_activity(&mut self, category: ProfileCategory) {\n match self.timer_stack.last().cloned() {\n None => {\n self.current_timer = Instant::now();\n },\n Some(current_category) if current_category == category => {\n \/\/since the current category is the same as the new activity's category,\n \/\/we don't need to do anything with the timer, we just need to push it on the stack\n }\n Some(current_category) => {\n let elapsed = self.stop_timer();\n\n \/\/record the current category's time\n let new_time = self.data.times.get(current_category) + elapsed;\n self.data.times.set(current_category, new_time);\n }\n }\n\n \/\/push the new category\n self.timer_stack.push(category);\n }\n\n pub fn record_query(&mut self, category: ProfileCategory) {\n let (hits, total) = *self.data.query_counts.get(category);\n self.data.query_counts.set(category, (hits, total + 1));\n }\n\n pub fn record_query_hit(&mut self, category: ProfileCategory) {\n let (hits, total) = *self.data.query_counts.get(category);\n self.data.query_counts.set(category, (hits + 1, total));\n }\n\n pub fn end_activity(&mut self, category: ProfileCategory) {\n match self.timer_stack.pop() {\n None => bug!(\"end_activity() was called but there was no running activity\"),\n Some(c) =>\n assert!(\n c == category,\n \"end_activity() was called but a different activity was running\"),\n }\n\n \/\/check if the new running timer is in the same category as this one\n \/\/if it is, we don't need to do anything\n if let Some(c) = self.timer_stack.last() {\n if *c == category {\n return;\n }\n }\n\n \/\/the new timer is different than the previous,\n \/\/so record the elapsed time and start a new timer\n let elapsed = self.stop_timer();\n let new_time = self.data.times.get(category) + elapsed;\n self.data.times.set(category, new_time);\n }\n\n fn stop_timer(&mut self) -> u64 {\n let elapsed = self.current_timer.elapsed();\n\n self.current_timer = Instant::now();\n\n (elapsed.as_secs() * 1_000_000_000) + (elapsed.subsec_nanos() as u64)\n }\n\n pub fn print_results(&mut self, opts: &Options) {\n self.end_activity(ProfileCategory::Other);\n\n assert!(\n self.timer_stack.is_empty(),\n \"there were timers running when print_results() was called\");\n\n let out = io::stdout();\n let mut lock = out.lock();\n\n let crate_name =\n opts.crate_name\n .as_ref()\n .map(|n| format!(\" for {}\", n))\n .unwrap_or_default();\n\n writeln!(lock, \"Self profiling results{}:\", crate_name).unwrap();\n writeln!(lock).unwrap();\n\n self.data.print(&mut lock);\n\n writeln!(lock).unwrap();\n writeln!(lock, \"Optimization level: {:?}\", opts.optimize).unwrap();\n\n let incremental = if opts.incremental.is_some() { \"on\" } else { \"off\" };\n writeln!(lock, \"Incremental: {}\", incremental).unwrap();\n }\n\n pub fn save_results(&self, opts: &Options) {\n let category_data = self.data.json();\n let compilation_options =\n format!(\"{{ \\\"optimization_level\\\": \\\"{:?}\\\", \\\"incremental\\\": {} }}\",\n opts.optimize,\n if opts.incremental.is_some() { \"true\" } else { \"false\" });\n\n let json = format!(\"{{ \\\"category_data\\\": {}, \\\"compilation_options\\\": {} }}\",\n category_data,\n compilation_options);\n\n fs::write(\"self_profiler_results.json\", json).unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add some TODO notes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use if let construct<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>chore: hashing<commit_after><|endoftext|>"} {"text":"<commit_before>#[allow(dead_code)]\n#[derive(Copy)]\nstruct Book {\n \/\/ `&'static str` is a reference to a string allocated in read only memory\n author: &'static str,\n title: &'static str,\n year: uint,\n}\n\n\/\/ This function takes a reference to a book\nfn borrow_book(book: &Book) {\n println!(\"I borrowed {} {} edition\", book.title, book.year);\n}\n\n\/\/ This function takes a reference to a mutable book\nfn new_edition(book: &mut Book) {\n \/\/ the fields of the book can be modified\n book.year = 2014;\n}\n\nfn main() {\n \/\/ An immutable Book\n let geb = Book {\n \/\/ string literals have type `&'static str`\n author: \"Douglas Hofstadter\",\n title: \"Gödel, Escher, Bach\",\n year: 1979,\n };\n\n \/\/ Immutably borrow `geb`\n borrow_book(&geb);\n\n \/\/ Error! Can't borrow an immutable object as mutable\n new_edition(&mut geb);\n \/\/ FIXME ^ Comment out this line\n\n \/\/ `mutable_geb` is a mutable copy of `geb`\n let mut mutable_geb = geb;\n\n \/\/ Borrow a mutable object as mutable\n new_edition(&mut mutable_geb);\n\n \/\/ Mutable objects can be immutably borrowed\n borrow_book(&mutable_geb);\n}\n<commit_msg>Fix warning about uint: use u32 instead<commit_after>#[allow(dead_code)]\n#[derive(Copy)]\nstruct Book {\n \/\/ `&'static str` is a reference to a string allocated in read only memory\n author: &'static str,\n title: &'static str,\n year: u32,\n}\n\n\/\/ This function takes a reference to a book\nfn borrow_book(book: &Book) {\n println!(\"I borrowed {} {} edition\", book.title, book.year);\n}\n\n\/\/ This function takes a reference to a mutable book\nfn new_edition(book: &mut Book) {\n \/\/ the fields of the book can be modified\n book.year = 2014;\n}\n\nfn main() {\n \/\/ An immutable Book\n let geb = Book {\n \/\/ string literals have type `&'static str`\n author: \"Douglas Hofstadter\",\n title: \"Gödel, Escher, Bach\",\n year: 1979,\n };\n\n \/\/ Immutably borrow `geb`\n borrow_book(&geb);\n\n \/\/ Error! Can't borrow an immutable object as mutable\n new_edition(&mut geb);\n \/\/ FIXME ^ Comment out this line\n\n \/\/ `mutable_geb` is a mutable copy of `geb`\n let mut mutable_geb = geb;\n\n \/\/ Borrow a mutable object as mutable\n new_edition(&mut mutable_geb);\n\n \/\/ Mutable objects can be immutably borrowed\n borrow_book(&mutable_geb);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Colored spiral example<commit_after>extern crate turtle;\n\nuse turtle::{Turtle, Color};\n\nfn main() {\n let mut turtle = Turtle::new();\n\n for i in 0..720 {\n let i = i as f64;\n turtle.set_pen_color(Color::hsl(i % 360.0, 0.5, 0.5).with_alpha(1.0 - i \/ (360.0 * 2.5)));\n turtle.set_pen_size((i + 1.0) \/ 3.0);\n \/\/ Move forward three steps\n turtle.forward(6.0);\n turtle.backward(4.0);\n \/\/ Rotate to the right (clockwise) by 1 degree\n turtle.right(1.5);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Set limit to recursive glob search<commit_after><|endoftext|>"} {"text":"<commit_before>use crate::dsl;\nuse crate::query_builder::combination_clause::{\n All, CombinationClause, Distinct, Except, Intersect, Union,\n};\nuse crate::query_builder::{AsQuery, Query};\nuse crate::Table;\n\n\/\/\/ Extension trait to combine queries using a combinator like `UNION`, `INTERSECT` or `EXPECT`\n\/\/\/ with or without `ALL` rule for duplicates\npub trait CombineDsl {\n \/\/\/ What kind of query does this type represent?\n type Query: Query;\n\n \/\/\/ Combine two queries using a SQL `UNION`\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```rust\n \/\/\/ # extern crate diesel;\n \/\/\/ # include!(\"..\/doctest_setup.rs\");\n \/\/\/ # use schema::{users, animals};\n \/\/\/ # use crate::diesel::query_dsl::positional_order_dsl::PositionalOrderDsl;\n \/\/\/\n \/\/\/ # fn main() {\n \/\/\/ # use self::users::dsl::{users, name as user_name};\n \/\/\/ # use self::animals::dsl::{animals, name as animal_name};\n \/\/\/ # let connection = establish_connection();\n \/\/\/ let data = users.select(user_name.nullable())\n \/\/\/ .union(animals.select(animal_name).filter(animal_name.is_not_null()))\n \/\/\/ # .positional_order_by(1)\n \/\/\/ .load(&connection);\n \/\/\/\n \/\/\/ let expected_data = vec![\n \/\/\/ Some(String::from(\"Jack\")),\n \/\/\/ Some(String::from(\"Sean\")),\n \/\/\/ Some(String::from(\"Tess\")),\n \/\/\/ ];\n \/\/\/ assert_eq!(Ok(expected_data), data);\n \/\/\/ # }\n \/\/\/ ```\n fn union<Rhs>(self, rhs: Rhs) -> dsl::Union<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `UNION ALL`\n fn union_all<Rhs>(self, rhs: Rhs) -> dsl::UnionAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `INTERSECT`\n fn intersect<Rhs>(self, rhs: Rhs) -> dsl::Intersect<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `INTERSECT ALL`\n fn intersect_all<Rhs>(self, rhs: Rhs) -> dsl::IntersectAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `EXCEPT`\n fn except<Rhs>(self, rhs: Rhs) -> dsl::Except<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `EXCEPT ALL`\n fn except_all<Rhs>(self, rhs: Rhs) -> dsl::ExceptAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n}\n\nimpl<T: Table> CombineDsl for T {\n type Query = T::Query;\n\n fn union<Rhs>(self, rhs: Rhs) -> dsl::Union<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Union, Distinct, self.as_query(), rhs.as_query())\n }\n\n fn union_all<Rhs>(self, rhs: Rhs) -> dsl::UnionAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Union, All, self.as_query(), rhs.as_query())\n }\n\n fn intersect<Rhs>(self, rhs: Rhs) -> dsl::Intersect<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Intersect, Distinct, self.as_query(), rhs.as_query())\n }\n\n fn intersect_all<Rhs>(self, rhs: Rhs) -> dsl::IntersectAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Intersect, All, self.as_query(), rhs.as_query())\n }\n\n fn except<Rhs>(self, rhs: Rhs) -> dsl::Except<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Except, Distinct, self.as_query(), rhs.as_query())\n }\n\n fn except_all<Rhs>(self, rhs: Rhs) -> dsl::ExceptAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Except, All, self.as_query(), rhs.as_query())\n }\n}\n<commit_msg>Remove extra empty line on top<commit_after>use crate::dsl;\nuse crate::query_builder::combination_clause::{\n All, CombinationClause, Distinct, Except, Intersect, Union,\n};\nuse crate::query_builder::{AsQuery, Query};\nuse crate::Table;\n\n\/\/\/ Extension trait to combine queries using a combinator like `UNION`, `INTERSECT` or `EXPECT`\n\/\/\/ with or without `ALL` rule for duplicates\npub trait CombineDsl {\n \/\/\/ What kind of query does this type represent?\n type Query: Query;\n\n \/\/\/ Combine two queries using a SQL `UNION`\n \/\/\/\n \/\/\/ # Examples\n \/\/\/ ```rust\n \/\/\/ # extern crate diesel;\n \/\/\/ # include!(\"..\/doctest_setup.rs\");\n \/\/\/ # use schema::{users, animals};\n \/\/\/ # use crate::diesel::query_dsl::positional_order_dsl::PositionalOrderDsl;\n \/\/\/ #\n \/\/\/ # fn main() {\n \/\/\/ # use self::users::dsl::{users, name as user_name};\n \/\/\/ # use self::animals::dsl::{animals, name as animal_name};\n \/\/\/ # let connection = establish_connection();\n \/\/\/ let data = users.select(user_name.nullable())\n \/\/\/ .union(animals.select(animal_name).filter(animal_name.is_not_null()))\n \/\/\/ # .positional_order_by(1)\n \/\/\/ .load(&connection);\n \/\/\/\n \/\/\/ let expected_data = vec![\n \/\/\/ Some(String::from(\"Jack\")),\n \/\/\/ Some(String::from(\"Sean\")),\n \/\/\/ Some(String::from(\"Tess\")),\n \/\/\/ ];\n \/\/\/ assert_eq!(Ok(expected_data), data);\n \/\/\/ # }\n \/\/\/ ```\n fn union<Rhs>(self, rhs: Rhs) -> dsl::Union<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `UNION ALL`\n fn union_all<Rhs>(self, rhs: Rhs) -> dsl::UnionAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `INTERSECT`\n fn intersect<Rhs>(self, rhs: Rhs) -> dsl::Intersect<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `INTERSECT ALL`\n fn intersect_all<Rhs>(self, rhs: Rhs) -> dsl::IntersectAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `EXCEPT`\n fn except<Rhs>(self, rhs: Rhs) -> dsl::Except<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n\n \/\/\/ Combine two queries using a SQL `EXCEPT ALL`\n fn except_all<Rhs>(self, rhs: Rhs) -> dsl::ExceptAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>;\n}\n\nimpl<T: Table> CombineDsl for T {\n type Query = T::Query;\n\n fn union<Rhs>(self, rhs: Rhs) -> dsl::Union<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Union, Distinct, self.as_query(), rhs.as_query())\n }\n\n fn union_all<Rhs>(self, rhs: Rhs) -> dsl::UnionAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Union, All, self.as_query(), rhs.as_query())\n }\n\n fn intersect<Rhs>(self, rhs: Rhs) -> dsl::Intersect<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Intersect, Distinct, self.as_query(), rhs.as_query())\n }\n\n fn intersect_all<Rhs>(self, rhs: Rhs) -> dsl::IntersectAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Intersect, All, self.as_query(), rhs.as_query())\n }\n\n fn except<Rhs>(self, rhs: Rhs) -> dsl::Except<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Except, Distinct, self.as_query(), rhs.as_query())\n }\n\n fn except_all<Rhs>(self, rhs: Rhs) -> dsl::ExceptAll<Self, Rhs>\n where\n Rhs: AsQuery<SqlType = <Self::Query as Query>::SqlType>,\n {\n CombinationClause::new(Except, All, self.as_query(), rhs.as_query())\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Liveness analysis.\n\n\/\/ FIXME: Make sure this analysis uses proper MIR semantics. Also find out what\n\/\/ the MIR semantics are.\n\nuse rustc::mir::*;\nuse rustc::mir::visit::{LvalueContext, Visitor};\nuse rustc_data_structures::indexed_vec::{IndexVec, Idx};\nuse rustc_data_structures::indexed_set::IdxSetBuf;\nuse util::pretty::{write_basic_block, dump_enabled, write_mir_intro};\nuse rustc::mir::transform::MirSource;\nuse rustc::ty::item_path;\nuse std::path::{PathBuf, Path};\nuse std::fs;\nuse rustc::ty::TyCtxt;\nuse std::io::{self, Write};\n\npub type LocalSet = IdxSetBuf<Local>;\n\n#[derive(Eq, PartialEq, Clone)]\nstruct BlockInfo {\n defs: LocalSet,\n uses: LocalSet,\n}\n\nstruct BlockInfoVisitor {\n pre_defs: LocalSet,\n defs: LocalSet,\n uses: LocalSet,\n}\n\nimpl<'tcx> Visitor<'tcx> for BlockInfoVisitor {\n fn visit_lvalue(&mut self,\n lvalue: &Lvalue<'tcx>,\n context: LvalueContext<'tcx>,\n location: Location) {\n if let Lvalue::Local(local) = *lvalue {\n match context {\n LvalueContext::Store | LvalueContext::Call => {\n self.defs.add(&local);\n }\n LvalueContext::Projection(..) |\n LvalueContext::Borrow { .. } |\n LvalueContext::Inspect |\n LvalueContext::Consume |\n LvalueContext::Drop => {\n \/\/ Ignore uses which are already defined in this block\n if !self.pre_defs.contains(&local) {\n self.uses.add(&local);\n }\n }\n LvalueContext::StorageLive | LvalueContext::StorageDead => (),\n }\n }\n\n self.super_lvalue(lvalue, context, location)\n }\n}\n\nfn block<'tcx>(b: &BasicBlockData<'tcx>, locals: usize) -> BlockInfo {\n let mut visitor = BlockInfoVisitor {\n pre_defs: LocalSet::new_empty(locals),\n defs: LocalSet::new_empty(locals),\n uses: LocalSet::new_empty(locals),\n };\n\n let dummy_location = Location { block: BasicBlock::new(0), statement_index: 0 };\n\n for statement in &b.statements {\n visitor.visit_statement(BasicBlock::new(0), statement, dummy_location);\n visitor.pre_defs.union(&visitor.defs);\n }\n visitor.visit_terminator(BasicBlock::new(0), b.terminator(), dummy_location);\n\n BlockInfo {\n defs: visitor.defs,\n uses: visitor.uses,\n }\n}\n\npub struct LivenessResult {\n pub ins: IndexVec<BasicBlock, LocalSet>,\n pub outs: IndexVec<BasicBlock, LocalSet>,\n}\n\npub fn liveness_of_locals<'tcx>(mir: &Mir<'tcx>) -> LivenessResult {\n let locals = mir.local_decls.len();\n let def_use: IndexVec<_, _> = mir.basic_blocks().iter().map(|b| {\n block(b, locals)\n }).collect();\n\n let copy = |from: &IndexVec<BasicBlock, LocalSet>, to: &mut IndexVec<BasicBlock, LocalSet>| {\n for (i, set) in to.iter_enumerated_mut() {\n set.clone_from(&from[i]);\n }\n };\n\n let mut ins: IndexVec<_, _> = mir.basic_blocks()\n .indices()\n .map(|_| LocalSet::new_empty(locals)).collect();\n let mut outs = ins.clone();\n\n let mut ins_ = ins.clone();\n let mut outs_ = outs.clone();\n\n loop {\n copy(&ins, &mut ins_);\n copy(&outs, &mut outs_);\n\n for b in mir.basic_blocks().indices().rev() {\n \/\/ out = ∪ {ins of successors}\n outs[b].clear();\n for &successor in mir.basic_blocks()[b].terminator().successors().into_iter() {\n outs[b].union(&ins[successor]);\n }\n\n \/\/ in = use ∪ (out - def)\n ins[b].clone_from(&outs[b]);\n ins[b].subtract(&def_use[b].defs);\n ins[b].union(&def_use[b].uses);\n }\n\n if ins_ == ins && outs_ == outs {\n break;\n }\n }\n\n LivenessResult {\n ins,\n outs,\n }\n}\n\npub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n source: MirSource,\n mir: &Mir<'tcx>,\n result: &LivenessResult) {\n if !dump_enabled(tcx, pass_name, source) {\n return;\n }\n let node_path = item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 below\n tcx.item_path_str(tcx.hir.local_def_id(source.item_id()))\n });\n dump_matched_mir_node(tcx, pass_name, &node_path,\n source, mir, result);\n}\n\nfn dump_matched_mir_node<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n node_path: &str,\n source: MirSource,\n mir: &Mir<'tcx>,\n result: &LivenessResult) {\n let mut file_path = PathBuf::new();\n if let Some(ref file_dir) = tcx.sess.opts.debugging_opts.dump_mir_dir {\n let p = Path::new(file_dir);\n file_path.push(p);\n };\n let file_name = format!(\"rustc.node{}{}-liveness.mir\",\n source.item_id(), pass_name);\n file_path.push(&file_name);\n let _ = fs::File::create(&file_path).and_then(|mut file| {\n writeln!(file, \"\/\/ MIR local liveness analysis for `{}`\", node_path)?;\n writeln!(file, \"\/\/ source = {:?}\", source)?;\n writeln!(file, \"\/\/ pass_name = {}\", pass_name)?;\n writeln!(file, \"\")?;\n write_mir_fn(tcx, source, mir, &mut file, result)?;\n Ok(())\n });\n}\n\npub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir<'tcx>,\n w: &mut Write,\n result: &LivenessResult)\n -> io::Result<()> {\n write_mir_intro(tcx, src, mir, w)?;\n for block in mir.basic_blocks().indices() {\n let print = |w: &mut Write, prefix, result: &IndexVec<BasicBlock, LocalSet>| {\n let live: Vec<String> = mir.local_decls.indices()\n .filter(|i| result[block].contains(i))\n .map(|i| format!(\"{:?}\", i))\n .collect();\n writeln!(w, \"{} {{{}}}\", prefix, live.join(\", \"))\n };\n print(w, \" \", &result.ins)?;\n write_basic_block(tcx, block, mir, w)?;\n print(w, \" \", &result.outs)?;\n if block.index() + 1 != mir.basic_blocks().len() {\n writeln!(w, \"\")?;\n }\n }\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n<commit_msg>Consider StorageDead and StorageLive as gens for liveness analysis<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Liveness analysis.\n\n\/\/ FIXME: Make sure this analysis uses proper MIR semantics. Also find out what\n\/\/ the MIR semantics are.\n\nuse rustc::mir::*;\nuse rustc::mir::visit::{LvalueContext, Visitor};\nuse rustc_data_structures::indexed_vec::{IndexVec, Idx};\nuse rustc_data_structures::indexed_set::IdxSetBuf;\nuse util::pretty::{write_basic_block, dump_enabled, write_mir_intro};\nuse rustc::mir::transform::MirSource;\nuse rustc::ty::item_path;\nuse std::path::{PathBuf, Path};\nuse std::fs;\nuse rustc::ty::TyCtxt;\nuse std::io::{self, Write};\n\npub type LocalSet = IdxSetBuf<Local>;\n\n#[derive(Eq, PartialEq, Clone)]\nstruct BlockInfo {\n defs: LocalSet,\n uses: LocalSet,\n}\n\nstruct BlockInfoVisitor {\n pre_defs: LocalSet,\n defs: LocalSet,\n uses: LocalSet,\n}\n\nimpl<'tcx> Visitor<'tcx> for BlockInfoVisitor {\n fn visit_lvalue(&mut self,\n lvalue: &Lvalue<'tcx>,\n context: LvalueContext<'tcx>,\n location: Location) {\n if let Lvalue::Local(local) = *lvalue {\n match context {\n LvalueContext::Store |\n LvalueContext::Call |\n LvalueContext::StorageLive |\n LvalueContext::StorageDead => {\n self.defs.add(&local);\n }\n LvalueContext::Projection(..) |\n LvalueContext::Borrow { .. } |\n LvalueContext::Inspect |\n LvalueContext::Consume |\n LvalueContext::Drop => {\n \/\/ Ignore uses which are already defined in this block\n if !self.pre_defs.contains(&local) {\n self.uses.add(&local);\n }\n }\n }\n }\n\n self.super_lvalue(lvalue, context, location)\n }\n}\n\nfn block<'tcx>(b: &BasicBlockData<'tcx>, locals: usize) -> BlockInfo {\n let mut visitor = BlockInfoVisitor {\n pre_defs: LocalSet::new_empty(locals),\n defs: LocalSet::new_empty(locals),\n uses: LocalSet::new_empty(locals),\n };\n\n let dummy_location = Location { block: BasicBlock::new(0), statement_index: 0 };\n\n for statement in &b.statements {\n visitor.visit_statement(BasicBlock::new(0), statement, dummy_location);\n visitor.pre_defs.union(&visitor.defs);\n }\n visitor.visit_terminator(BasicBlock::new(0), b.terminator(), dummy_location);\n\n BlockInfo {\n defs: visitor.defs,\n uses: visitor.uses,\n }\n}\n\npub struct LivenessResult {\n pub ins: IndexVec<BasicBlock, LocalSet>,\n pub outs: IndexVec<BasicBlock, LocalSet>,\n}\n\npub fn liveness_of_locals<'tcx>(mir: &Mir<'tcx>) -> LivenessResult {\n let locals = mir.local_decls.len();\n let def_use: IndexVec<_, _> = mir.basic_blocks().iter().map(|b| {\n block(b, locals)\n }).collect();\n\n let copy = |from: &IndexVec<BasicBlock, LocalSet>, to: &mut IndexVec<BasicBlock, LocalSet>| {\n for (i, set) in to.iter_enumerated_mut() {\n set.clone_from(&from[i]);\n }\n };\n\n let mut ins: IndexVec<_, _> = mir.basic_blocks()\n .indices()\n .map(|_| LocalSet::new_empty(locals)).collect();\n let mut outs = ins.clone();\n\n let mut ins_ = ins.clone();\n let mut outs_ = outs.clone();\n\n loop {\n copy(&ins, &mut ins_);\n copy(&outs, &mut outs_);\n\n for b in mir.basic_blocks().indices().rev() {\n \/\/ out = ∪ {ins of successors}\n outs[b].clear();\n for &successor in mir.basic_blocks()[b].terminator().successors().into_iter() {\n outs[b].union(&ins[successor]);\n }\n\n \/\/ in = use ∪ (out - def)\n ins[b].clone_from(&outs[b]);\n ins[b].subtract(&def_use[b].defs);\n ins[b].union(&def_use[b].uses);\n }\n\n if ins_ == ins && outs_ == outs {\n break;\n }\n }\n\n LivenessResult {\n ins,\n outs,\n }\n}\n\npub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n source: MirSource,\n mir: &Mir<'tcx>,\n result: &LivenessResult) {\n if !dump_enabled(tcx, pass_name, source) {\n return;\n }\n let node_path = item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 below\n tcx.item_path_str(tcx.hir.local_def_id(source.item_id()))\n });\n dump_matched_mir_node(tcx, pass_name, &node_path,\n source, mir, result);\n}\n\nfn dump_matched_mir_node<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n node_path: &str,\n source: MirSource,\n mir: &Mir<'tcx>,\n result: &LivenessResult) {\n let mut file_path = PathBuf::new();\n if let Some(ref file_dir) = tcx.sess.opts.debugging_opts.dump_mir_dir {\n let p = Path::new(file_dir);\n file_path.push(p);\n };\n let file_name = format!(\"rustc.node{}{}-liveness.mir\",\n source.item_id(), pass_name);\n file_path.push(&file_name);\n let _ = fs::File::create(&file_path).and_then(|mut file| {\n writeln!(file, \"\/\/ MIR local liveness analysis for `{}`\", node_path)?;\n writeln!(file, \"\/\/ source = {:?}\", source)?;\n writeln!(file, \"\/\/ pass_name = {}\", pass_name)?;\n writeln!(file, \"\")?;\n write_mir_fn(tcx, source, mir, &mut file, result)?;\n Ok(())\n });\n}\n\npub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir<'tcx>,\n w: &mut Write,\n result: &LivenessResult)\n -> io::Result<()> {\n write_mir_intro(tcx, src, mir, w)?;\n for block in mir.basic_blocks().indices() {\n let print = |w: &mut Write, prefix, result: &IndexVec<BasicBlock, LocalSet>| {\n let live: Vec<String> = mir.local_decls.indices()\n .filter(|i| result[block].contains(i))\n .map(|i| format!(\"{:?}\", i))\n .collect();\n writeln!(w, \"{} {{{}}}\", prefix, live.join(\", \"))\n };\n print(w, \" \", &result.ins)?;\n write_basic_block(tcx, block, mir, w)?;\n print(w, \" \", &result.outs)?;\n if block.index() + 1 != mir.basic_blocks().len() {\n writeln!(w, \"\")?;\n }\n }\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add some debugging output<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\/\/! * All unstable lang features have tests to ensure they are actually unstable\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(Debug, PartialEq, Clone)]\npub enum Status {\n Stable,\n Removed,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n Status::Removed => \"removed\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\n#[derive(Debug, Clone)]\npub struct Feature {\n pub level: Status,\n pub since: String,\n pub has_gate_test: bool,\n pub tracking_issue: Option<u32>,\n}\n\npub type Features = HashMap<String, Feature>;\n\npub fn check(path: &Path, bad: &mut bool, quiet: bool) {\n let mut features = collect_lang_features(path);\n assert!(!features.is_empty());\n\n let lib_features = get_and_check_lib_features(path, bad, &features);\n assert!(!lib_features.is_empty());\n\n let mut contents = String::new();\n\n super::walk_many(&[&path.join(\"test\/compile-fail\"),\n &path.join(\"test\/compile-fail-fulldeps\"),\n &path.join(\"test\/parse-fail\"),],\n &mut |path| super::filter_dirs(path),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n let filen_underscore = filename.replace(\"-\",\"_\").replace(\".rs\",\"\");\n let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features);\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n\n let gate_test_str = \"gate-test-\";\n\n if !line.contains(gate_test_str) {\n continue;\n }\n\n let feature_name = match line.find(gate_test_str) {\n Some(i) => {\n &line[i+gate_test_str.len()..line[i+1..].find(' ').unwrap_or(line.len())]\n },\n None => continue,\n };\n match features.get_mut(feature_name) {\n Some(f) => {\n if filename_is_gate_test {\n err(&format!(\"The file is already marked as gate test \\\n through its name, no need for a \\\n 'gate-test-{}' comment\",\n feature_name));\n }\n f.has_gate_test = true;\n }\n None => {\n err(&format!(\"gate-test test found referencing a nonexistent feature '{}'\",\n feature_name));\n }\n }\n }\n });\n\n \/\/ Only check the number of lang features.\n \/\/ Obligatory testing for library features is dumb.\n let gate_untested = features.iter()\n .filter(|&(_, f)| f.level == Status::Unstable)\n .filter(|&(_, f)| !f.has_gate_test)\n .collect::<Vec<_>>();\n\n for &(name, _) in gate_untested.iter() {\n println!(\"Expected a gate test for the feature '{}'.\", name);\n println!(\"Hint: create a file named 'feature-gate-{}.rs' in the compile-fail\\\n \\n test suite, with its failures due to missing usage of\\\n \\n #![feature({})].\", name, name);\n println!(\"Hint: If you already have such a test and don't want to rename it,\\\n \\n you can also add a \/\/ gate-test-{} line to the test file.\",\n name);\n }\n\n if gate_untested.len() > 0 {\n tidy_error!(bad, \"Found {} features without a gate test.\", gate_untested.len());\n }\n\n if *bad {\n return;\n }\n if quiet {\n println!(\"* {} features\", features.len());\n return;\n }\n\n let mut lines = Vec::new();\n for (name, feature) in features.iter() {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {\n if filen_underscore.starts_with(\"feature_gate\") {\n for (n, f) in features.iter_mut() {\n if filen_underscore == format!(\"feature_gate_{}\", n) {\n f.has_gate_test = true;\n return true;\n }\n }\n }\n return false;\n}\n\npub fn collect_lang_features(base_src_path: &Path) -> Features {\n let mut contents = String::new();\n let path = base_src_path.join(\"libsyntax\/feature_gate.rs\");\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Removed,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n let issue_str = parts.next().unwrap().trim();\n let tracking_issue = if issue_str.starts_with(\"None\") {\n None\n } else {\n let s = issue_str.split(\"(\").nth(1).unwrap().split(\")\").nth(0).unwrap();\n Some(s.parse().unwrap())\n };\n Some((name.to_owned(),\n Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n }))\n })\n .collect()\n}\n\npub fn collect_lib_features(base_src_path: &Path) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, _, _| {\n match res {\n Ok((name, feature)) => {\n if lib_features.get(name).is_some() {\n return;\n }\n lib_features.insert(name.to_owned(), feature);\n },\n Err(_) => (),\n }\n });\n lib_features\n}\n\nfn get_and_check_lib_features(base_src_path: &Path,\n bad: &mut bool,\n lang_features: &Features) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, file, line| {\n match res {\n Ok((name, f)) => {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), line, msg);\n };\n if lang_features.contains_key(name) && name != \"proc_macro\" {\n err(\"duplicating a lang feature\");\n }\n if let Some(ref s) = lib_features.get(name) {\n if s.level != f.level {\n err(\"different stability level than before\");\n }\n if s.since != f.since {\n err(\"different `since` than before\");\n }\n if s.tracking_issue != f.tracking_issue {\n err(\"different `tracking_issue` than before\");\n }\n }\n lib_features.insert(name.to_owned(), f);\n },\n Err(msg) => {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), line, msg);\n },\n }\n\n });\n lib_features\n}\n\nfn map_lib_features(base_src_path: &Path,\n mf: &mut FnMut(Result<(&str, Feature), &str>, &Path, usize)) {\n let mut contents = String::new();\n super::walk(base_src_path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n let mut becoming_feature: Option<(String, Feature)> = None;\n for (i, line) in contents.lines().enumerate() {\n macro_rules! err {\n ($msg:expr) => {{\n mf(Err($msg), file, i + 1);\n continue;\n }};\n };\n if let Some((ref name, ref mut f)) = becoming_feature {\n if f.tracking_issue.is_none() {\n f.tracking_issue = find_attr_val(line, \"issue\")\n .map(|s| s.parse().unwrap());\n }\n if line.ends_with(\"]\") {\n mf(Ok((name, f.clone())), file, i + 1);\n } else if !line.ends_with(\",\") && !line.ends_with(\"\\\\\") {\n \/\/ We need to bail here because we might have missed the\n \/\/ end of a stability attribute above because the \"]\"\n \/\/ might not have been at the end of the line.\n \/\/ We could then get into the very unfortunate situation that\n \/\/ we continue parsing the file assuming the current stability\n \/\/ attribute has not ended, and ignoring possible feature\n \/\/ attributes in the process.\n err!(\"malformed stability attribute\");\n } else {\n continue;\n }\n }\n becoming_feature = None;\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => err!(\"malformed stability attribute\"),\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err!(\"malformed stability attribute\");\n }\n None => \"None\",\n };\n let tracking_issue = find_attr_val(line, \"issue\").map(|s| s.parse().unwrap());\n\n let feature = Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n };\n if line.contains(\"]\") {\n mf(Ok((feature_name, feature)), file, i + 1);\n } else {\n becoming_feature = Some((feature_name.to_owned(), feature));\n }\n }\n });\n}\n<commit_msg>Auto merge of #43247 - est31:master, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\/\/! * All unstable lang features have tests to ensure they are actually unstable\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(Debug, PartialEq, Clone)]\npub enum Status {\n Stable,\n Removed,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n Status::Removed => \"removed\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\n#[derive(Debug, Clone)]\npub struct Feature {\n pub level: Status,\n pub since: String,\n pub has_gate_test: bool,\n pub tracking_issue: Option<u32>,\n}\n\nimpl Feature {\n fn check_match(&self, other: &Feature)-> Result<(), Vec<&'static str>> {\n let mut mismatches = Vec::new();\n if self.level != other.level {\n mismatches.push(\"stability level\");\n }\n if self.level == Status::Stable || other.level == Status::Stable {\n \/\/ As long as a feature is unstable, the since field tracks\n \/\/ when the given part of the feature has been implemented.\n \/\/ Mismatches are tolerable as features evolve and functionality\n \/\/ gets added.\n \/\/ Once a feature is stable, the since field tracks the first version\n \/\/ it was part of the stable distribution, and mismatches are disallowed.\n if self.since != other.since {\n mismatches.push(\"since\");\n }\n }\n if self.tracking_issue != other.tracking_issue {\n mismatches.push(\"tracking issue\");\n }\n if mismatches.is_empty() {\n Ok(())\n } else {\n Err(mismatches)\n }\n }\n}\n\npub type Features = HashMap<String, Feature>;\n\npub fn check(path: &Path, bad: &mut bool, quiet: bool) {\n let mut features = collect_lang_features(path);\n assert!(!features.is_empty());\n\n let lib_features = get_and_check_lib_features(path, bad, &features);\n assert!(!lib_features.is_empty());\n\n let mut contents = String::new();\n\n super::walk_many(&[&path.join(\"test\/compile-fail\"),\n &path.join(\"test\/compile-fail-fulldeps\"),\n &path.join(\"test\/parse-fail\"),],\n &mut |path| super::filter_dirs(path),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n let filen_underscore = filename.replace(\"-\",\"_\").replace(\".rs\",\"\");\n let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features);\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n\n let gate_test_str = \"gate-test-\";\n\n if !line.contains(gate_test_str) {\n continue;\n }\n\n let feature_name = match line.find(gate_test_str) {\n Some(i) => {\n &line[i+gate_test_str.len()..line[i+1..].find(' ').unwrap_or(line.len())]\n },\n None => continue,\n };\n match features.get_mut(feature_name) {\n Some(f) => {\n if filename_is_gate_test {\n err(&format!(\"The file is already marked as gate test \\\n through its name, no need for a \\\n 'gate-test-{}' comment\",\n feature_name));\n }\n f.has_gate_test = true;\n }\n None => {\n err(&format!(\"gate-test test found referencing a nonexistent feature '{}'\",\n feature_name));\n }\n }\n }\n });\n\n \/\/ Only check the number of lang features.\n \/\/ Obligatory testing for library features is dumb.\n let gate_untested = features.iter()\n .filter(|&(_, f)| f.level == Status::Unstable)\n .filter(|&(_, f)| !f.has_gate_test)\n .collect::<Vec<_>>();\n\n for &(name, _) in gate_untested.iter() {\n println!(\"Expected a gate test for the feature '{}'.\", name);\n println!(\"Hint: create a file named 'feature-gate-{}.rs' in the compile-fail\\\n \\n test suite, with its failures due to missing usage of\\\n \\n #![feature({})].\", name, name);\n println!(\"Hint: If you already have such a test and don't want to rename it,\\\n \\n you can also add a \/\/ gate-test-{} line to the test file.\",\n name);\n }\n\n if gate_untested.len() > 0 {\n tidy_error!(bad, \"Found {} features without a gate test.\", gate_untested.len());\n }\n\n if *bad {\n return;\n }\n if quiet {\n println!(\"* {} features\", features.len());\n return;\n }\n\n let mut lines = Vec::new();\n for (name, feature) in features.iter() {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {\n if filen_underscore.starts_with(\"feature_gate\") {\n for (n, f) in features.iter_mut() {\n if filen_underscore == format!(\"feature_gate_{}\", n) {\n f.has_gate_test = true;\n return true;\n }\n }\n }\n return false;\n}\n\npub fn collect_lang_features(base_src_path: &Path) -> Features {\n let mut contents = String::new();\n let path = base_src_path.join(\"libsyntax\/feature_gate.rs\");\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Removed,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n let issue_str = parts.next().unwrap().trim();\n let tracking_issue = if issue_str.starts_with(\"None\") {\n None\n } else {\n let s = issue_str.split(\"(\").nth(1).unwrap().split(\")\").nth(0).unwrap();\n Some(s.parse().unwrap())\n };\n Some((name.to_owned(),\n Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n }))\n })\n .collect()\n}\n\npub fn collect_lib_features(base_src_path: &Path) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, _, _| {\n match res {\n Ok((name, feature)) => {\n if lib_features.get(name).is_some() {\n return;\n }\n lib_features.insert(name.to_owned(), feature);\n },\n Err(_) => (),\n }\n });\n lib_features\n}\n\nfn get_and_check_lib_features(base_src_path: &Path,\n bad: &mut bool,\n lang_features: &Features) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, file, line| {\n match res {\n Ok((name, f)) => {\n let mut check_features = |f: &Feature, list: &Features, display: &str| {\n if let Some(ref s) = list.get(name) {\n if let Err(m) = (&f).check_match(s) {\n tidy_error!(bad,\n \"{}:{}: mismatches to {} in: {:?}\",\n file.display(),\n line,\n display,\n &m);\n }\n }\n };\n check_features(&f, &lang_features, \"corresponding lang feature\");\n check_features(&f, &lib_features, \"previous\");\n lib_features.insert(name.to_owned(), f);\n },\n Err(msg) => {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), line, msg);\n },\n }\n\n });\n lib_features\n}\n\nfn map_lib_features(base_src_path: &Path,\n mf: &mut FnMut(Result<(&str, Feature), &str>, &Path, usize)) {\n let mut contents = String::new();\n super::walk(base_src_path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n let mut becoming_feature: Option<(String, Feature)> = None;\n for (i, line) in contents.lines().enumerate() {\n macro_rules! err {\n ($msg:expr) => {{\n mf(Err($msg), file, i + 1);\n continue;\n }};\n };\n if let Some((ref name, ref mut f)) = becoming_feature {\n if f.tracking_issue.is_none() {\n f.tracking_issue = find_attr_val(line, \"issue\")\n .map(|s| s.parse().unwrap());\n }\n if line.ends_with(\"]\") {\n mf(Ok((name, f.clone())), file, i + 1);\n } else if !line.ends_with(\",\") && !line.ends_with(\"\\\\\") {\n \/\/ We need to bail here because we might have missed the\n \/\/ end of a stability attribute above because the \"]\"\n \/\/ might not have been at the end of the line.\n \/\/ We could then get into the very unfortunate situation that\n \/\/ we continue parsing the file assuming the current stability\n \/\/ attribute has not ended, and ignoring possible feature\n \/\/ attributes in the process.\n err!(\"malformed stability attribute\");\n } else {\n continue;\n }\n }\n becoming_feature = None;\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => err!(\"malformed stability attribute\"),\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err!(\"malformed stability attribute\");\n }\n None => \"None\",\n };\n let tracking_issue = find_attr_val(line, \"issue\").map(|s| s.parse().unwrap());\n\n let feature = Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n };\n if line.contains(\"]\") {\n mf(Ok((feature_name, feature)), file, i + 1);\n } else {\n becoming_feature = Some((feature_name.to_owned(), feature));\n }\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate rustbox;\nextern crate chrono;\n\nuse rustbox::*;\nuse chrono::*;\n\nuse screen::common::*;\nuse utility::string::*;\nuse model::ShowItem;\nuse reply_model::*;\n\npub struct Show<'a> {\n rustbox: &'a rustbox::RustBox,\n scrollY: usize,\n}\n\nimpl<'a> Show<'a> {\n pub fn new(rustbox: &'a rustbox::RustBox) -> Self {\n Show {\n rustbox: &rustbox,\n scrollY: 0,\n }\n }\n pub fn print(&mut self, title: &str, item: &ShowItem) {\n\n self.print_header(&format!(\"{} - {} [{}\/{}]\",\n item.title,\n title,\n item.page,\n item.max_page));\n self.print_body(2, &item);\n }\n\n fn print_header(&mut self, text: &str) {\n let title_len = jks_len(text);\n let padding = (if self.rustbox.width() >= title_len {\n self.rustbox.width() - title_len\n } else {\n 0\n }) \/ 2;\n\n let header_bottom = seq_str_gen(0, self.rustbox.width(), \"─\", \"\");\n\n clearline(&self.rustbox, self.rustbox.width(), 0, 0);\n self.rustbox.print(padding,\n 0,\n rustbox::RB_BOLD,\n Color::White,\n Color::Black,\n text);\n self.rustbox.print(0,\n 1,\n rustbox::RB_BOLD,\n Color::Yellow,\n Color::Black,\n &header_bottom);\n }\n\n fn build_separator_arguments(&mut self) -> (usize, usize, String) {\n let width = self.body_width();\n let rustbox = self.rustbox;\n\n let separator_width = if rustbox.width() >= 2 {\n rustbox.width() - 2\n } else {\n 0\n };\n let separator_padding_width = if rustbox.width() > separator_width {\n rustbox.width() - separator_width\n } else {\n 0\n } \/ 2;\n\n let separator_padding = seq_str_gen(0, separator_padding_width, \" \", \"\");\n\n (separator_width, separator_padding_width, separator_padding)\n }\n\n fn build_separator_top(&mut self, replier_name: &str, time: &str) -> String {\n let replier_max_width = 14;\n let time_max_width = 5;\n let (separator_width, separator_padding_width, separator_padding) =\n self.build_separator_arguments();\n make_separator_top(separator_width,\n &separator_padding,\n replier_max_width,\n &replier_name,\n time_max_width,\n &time)\n }\n\n fn build_separator_bottom(&mut self) -> String {\n let (separator_width, separator_padding_width, separator_padding) =\n self.build_separator_arguments();\n make_separator_bottom(separator_width, &separator_padding)\n }\n\n pub fn print_body(&mut self, offset_y: usize, item: &ShowItem) {\n let width = self.body_width();\n let rows = self.body_height();\n let rustbox = self.rustbox;\n let scrollY = self.scrollY;\n\n let mut y = offset_y;\n let now = Local::now();\n\n for (i, reply) in item.replies.iter().take(rows).enumerate() {\n\n let mut m = print_reply(&reply.body, 0, scrollY, y, &rustbox);\n\n if scrollY + 1 < y + m {\n\n let replier_name = reply.username.clone();\n\n let published_at = reply.published_at.clone();\n\n let published_at_dt = match Local.datetime_from_str(&published_at,\n \"%d\/%m\/%Y %H:%M\") {\n Ok(v) => v,\n Err(e) => now,\n };\n let time = published_at_format(&(now - published_at_dt));\n\n rustbox.print(0,\n m + y - scrollY,\n rustbox::RB_NORMAL,\n Color::Green,\n Color::Black,\n &self.build_separator_top(&replier_name, &time));\n }\n m += 1;\n\n if scrollY + 1 < y + m {\n rustbox.print(0,\n m + y - scrollY,\n rustbox::RB_NORMAL,\n Color::Green,\n Color::Black,\n &self.build_separator_bottom());\n }\n m += 1;\n y += m;\n }\n }\n\n pub fn resetY(&mut self) {\n self.scrollY = 0;\n }\n\n pub fn scrollUp(&mut self, value: usize) -> bool {\n let tmp = self.scrollY;\n if tmp > value {\n self.scrollY = tmp - value;\n true\n } else if tmp != 0 {\n self.scrollY = 0;\n true\n } else {\n false\n }\n }\n\n pub fn scrollDown(&mut self, value: usize) -> bool {\n let tmp = self.scrollY;\n if tmp < 10000 {\n self.scrollY = tmp + value;\n return true;\n }\n false\n }\n\n pub fn body_height(&self) -> usize {\n if self.rustbox.height() >= 3 {\n self.rustbox.height() - 3\n } else {\n 0\n }\n }\n\n pub fn body_width(&self) -> usize {\n if self.rustbox.width() >= 2 {\n self.rustbox.width() - 2\n } else {\n 0\n }\n }\n fn print_default(&self, x: usize, y: usize, text: &str) {\n self.rustbox.print(x, y, rustbox::RB_BOLD, Color::White, Color::Black, text);\n }\n}\n\nfn print_default(rustbox: &rustbox::RustBox, x: usize, y: usize, s: String) {\n rustbox.print(0, y, rustbox::RB_NORMAL, Color::White, Color::Black, &s);\n}\n\nfn print_reply(vec: &Vec<NodeType>,\n depth: usize,\n scrollY: usize,\n y: usize,\n rustbox: &rustbox::RustBox)\n -> usize {\n let padding = seq_str_gen(0, depth, \"├─\", \"\");\n let mut m = 0;\n let mut recursive_offset = 0;\n let mut total_y = 0;\n let mut line = String::new();\n\n \/\/ clean up lines (end)\n let vec2 = {\n let vec_length = vec.len();\n let vec_check_cleanup = vec.clone();\n\n \/\/ check if last 4 elements match the EMPTY PATTERN\n let is_last4_empty = vec_check_cleanup.iter()\n .rev()\n .take(4)\n .enumerate()\n .all(|(j, node)| match node.clone() {\n NodeType::Br(n) => j == 1 || j == 2 || j == 3,\n NodeType::Text(n) => j == 0 && n.data.is_empty(),\n _ => false,\n });\n\n let vec_short_length = if vec_length > 4 && is_last4_empty {\n vec_length - 4\n } else {\n vec_length\n };\n\n vec.iter().take(vec_short_length)\n };\n\n \/\/ clean up lines (start)\n let vec3 = {\n let vec2_cloned = vec2.clone();\n let mut result: Vec<NodeType> = Vec::new();\n for (j, node) in vec2_cloned.enumerate() {\n let node2 = node.clone();\n let node3 = node.clone();\n match node2 {\n NodeType::Br(n) => {\n if !result.is_empty() {\n result.push(node3);\n }\n }\n _ => result.push(node3),\n }\n }\n result.clone()\n };\n\n let mut is_first = true;\n for (j, node) in vec3.iter().enumerate() {\n total_y = y + m + recursive_offset;\n if scrollY + 1 < total_y {\n let node2 = node.clone();\n match node2 {\n NodeType::Text(n) => {\n if n.data != \"\" {\n line = format!(\"{}{}\", line, n.data);\n }\n }\n NodeType::Image(n) => {\n if n.data != \"\" {\n line = format!(\"{}[img {}]\", line, n.data);\n }\n }\n NodeType::BlockQuote(n) => {\n recursive_offset += print_reply(&n.data, depth + 1, scrollY, total_y, &rustbox);\n is_first = false;\n }\n NodeType::Br(n) => {\n if !line.is_empty() {\n print_default(rustbox,\n 0,\n total_y - scrollY,\n format!(\" {}{}\", padding, line));\n line = String::new();\n is_first = false;\n }\n\n \/\/ prevent first line empty\n if !is_first {\n m += 1;\n }\n\n }\n }\n }\n }\n\n if !line.is_empty() {\n total_y = y + m + recursive_offset;\n print_default(rustbox,\n 0,\n total_y - scrollY,\n format!(\" {}{} \", padding, line));\n line = String::new();\n m += 1;\n }\n\n m + recursive_offset\n}\n\nfn make_separator_replier_name(separator_width: usize,\n separator_padding: &str,\n replier_max_width: usize,\n replier_name: &str)\n -> String {\n let replier_name_len = jks_len(&replier_name);\n let replier_name_spacing_width = replier_max_width - replier_name_len;\n let is_replier_name_spacing_width_odd = replier_name_spacing_width & 1 == 1;\n let replier_name_right_spacing_width = replier_name_spacing_width \/ 2;\n let replier_name_left_spacing_width = if is_replier_name_spacing_width_odd {\n replier_name_right_spacing_width + 1\n } else {\n replier_name_right_spacing_width\n };\n\n let replier_name_left_spacing = seq_str_gen(0, replier_name_left_spacing_width, \"─\", \"\");\n let replier_name_right_spacing = seq_str_gen(0, replier_name_right_spacing_width, \"─\", \"\");\n\n let separator_replier = format!(\"{}{}{}{}{}\",\n \"╭\",\n replier_name_left_spacing,\n replier_name,\n replier_name_right_spacing,\n \"\");\n\n return separator_replier;\n}\n\nfn make_separator_time(separator_width: usize,\n separator_padding: &str,\n time_max_width: usize,\n time: &str)\n -> String {\n let time_len = jks_len(&time);\n let time_spacing_width = if time_max_width > time_len {\n time_max_width - time_len\n } else {\n 0\n };\n\n let is_time_spacing_width_odd = time_spacing_width & 1 == 1;\n let time_right_spacing_width = time_spacing_width \/ 2;\n let time_left_spacing_width = if is_time_spacing_width_odd {\n time_right_spacing_width + 1\n } else {\n time_right_spacing_width\n };\n\n let time_left_spacing = seq_str_gen(0, time_left_spacing_width, \"─\", \"\");\n let time_right_spacing = seq_str_gen(0, time_right_spacing_width, \"─\", \"\");\n\n let separator_time = format!(\"{}{}{}{}{}\",\n \"\",\n time_left_spacing,\n time,\n time_right_spacing,\n \"╮\");\n\n\n return separator_time;\n}\n\nfn make_separator_top(separator_width: usize,\n separator_padding: &str,\n replier_max_width: usize,\n replier_name: &str,\n time_max_width: usize,\n time: &str)\n -> String {\n\n let separator_replier = make_separator_replier_name(separator_width,\n &separator_padding,\n replier_max_width,\n &replier_name);\n\n let separator_replier_width = jks_len(&separator_replier);\n\n let separator_time = make_separator_time(separator_width,\n &separator_padding,\n time_max_width,\n &time);\n\n let separator_time_width = jks_len(&separator_time);\n\n let separator_top_middle_width = if separator_width >=\n (separator_replier_width + separator_time_width) {\n separator_width - separator_replier_width - separator_time_width\n } else {\n 0\n };\n\n let separator_top_middle = seq_str_gen(0, separator_top_middle_width, \" \", \"\");\n let separator_top = format!(\"{}{}{}{}{}\",\n separator_padding,\n separator_top_middle,\n separator_replier,\n separator_time,\n separator_padding);\n return separator_top;\n}\n\nfn make_separator_bottom(separator_width: usize, separator_padding: &str) -> String {\n let style_box_width = 1;\n let separator_bottom_middle_width = if separator_width > style_box_width {\n separator_width - style_box_width\n } else {\n 0\n };\n let separator_bottom_middle = seq_str_gen(0, separator_bottom_middle_width, \"─\", \"\");\n\n let separator_bottom = format!(\"{}{}{}{}\",\n separator_padding,\n separator_bottom_middle,\n \"╯\",\n separator_padding);\n return separator_bottom;\n}\n\n\nfn published_at_format(duration: &Duration) -> String {\n let weeks = duration.num_weeks();\n let days = duration.num_days();\n let hours = duration.num_hours();\n let minutes = duration.num_minutes();\n\n if weeks > 0 {\n format!(\"{}w\", weeks)\n } else if days > 0 {\n format!(\"{}d\", days)\n } else if hours > 0 {\n format!(\"{}h\", hours)\n } else if minutes > 0 {\n format!(\"{}m\", minutes)\n } else {\n String::from(\"1m\")\n }\n}\n\nfn seq_str_gen(start: usize, end: usize, sym: &str, join_sym: &str) -> String {\n (start..end).map(|_| sym.clone()).collect::<Vec<_>>().join(&join_sym)\n}\n<commit_msg>show - extract print separators<commit_after>extern crate rustbox;\nextern crate chrono;\n\nuse rustbox::*;\nuse chrono::*;\n\nuse screen::common::*;\nuse utility::string::*;\nuse model::ShowItem;\nuse model::ShowReplyItem;\nuse reply_model::*;\n\npub struct Show<'a> {\n rustbox: &'a rustbox::RustBox,\n scrollY: usize,\n}\n\nimpl<'a> Show<'a> {\n pub fn new(rustbox: &'a rustbox::RustBox) -> Self {\n Show {\n rustbox: &rustbox,\n scrollY: 0,\n }\n }\n pub fn print(&mut self, title: &str, item: &ShowItem) {\n\n self.print_header(&format!(\"{} - {} [{}\/{}]\",\n item.title,\n title,\n item.page,\n item.max_page));\n self.print_body(2, &item);\n }\n\n fn print_separator_top(&mut self, reply: &ShowReplyItem, y: usize) {\n if y > self.scrollY + 1 {\n let (replier_name, time) = self.build_separator_content(&reply);\n self.rustbox.print(0,\n y - self.scrollY,\n rustbox::RB_NORMAL,\n Color::Green,\n Color::Black,\n &self.build_separator_top(&replier_name, &time));\n }\n }\n\n fn print_separator_bottom(&mut self, y: usize) {\n if y > self.scrollY + 1 {\n self.rustbox.print(0,\n y - self.scrollY,\n rustbox::RB_NORMAL,\n Color::Green,\n Color::Black,\n &self.build_separator_bottom());\n }\n }\n\n fn print_header(&mut self, text: &str) {\n let title_len = jks_len(text);\n let padding = (if self.rustbox.width() >= title_len {\n self.rustbox.width() - title_len\n } else {\n 0\n }) \/ 2;\n\n let header_bottom = seq_str_gen(0, self.rustbox.width(), \"─\", \"\");\n\n clearline(&self.rustbox, self.rustbox.width(), 0, 0);\n self.rustbox.print(padding,\n 0,\n rustbox::RB_BOLD,\n Color::White,\n Color::Black,\n text);\n self.rustbox.print(0,\n 1,\n rustbox::RB_BOLD,\n Color::Yellow,\n Color::Black,\n &header_bottom);\n }\n\n pub fn print_body(&mut self, offset_y: usize, item: &ShowItem) {\n let width = self.body_width();\n let rows = self.body_height();\n let rustbox = self.rustbox;\n let scrollY = self.scrollY;\n\n let mut y = offset_y;\n\n for (i, reply) in item.replies.iter().take(rows).enumerate() {\n\n y += print_reply(&reply.body, 0, scrollY, y, &rustbox);\n\n self.print_separator_top(&reply, y);\n y += 1;\n\n self.print_separator_bottom(y);\n y += 1;\n }\n }\n\n fn build_separator_content(&mut self, reply: &ShowReplyItem) -> (String, String) {\n let now = Local::now();\n\n let replier_name = reply.username.clone();\n\n let published_at = reply.published_at.clone();\n\n let published_at_dt = match Local.datetime_from_str(&published_at, \"%d\/%m\/%Y %H:%M\") {\n Ok(v) => v,\n Err(e) => now,\n };\n let time = published_at_format(&(now - published_at_dt));\n (replier_name, time)\n }\n\n fn build_separator_arguments(&mut self) -> (usize, usize, String) {\n let width = self.body_width();\n let rustbox = self.rustbox;\n\n let separator_width = if rustbox.width() >= 2 {\n rustbox.width() - 2\n } else {\n 0\n };\n let separator_padding_width = if rustbox.width() > separator_width {\n rustbox.width() - separator_width\n } else {\n 0\n } \/ 2;\n\n let separator_padding = seq_str_gen(0, separator_padding_width, \" \", \"\");\n\n (separator_width, separator_padding_width, separator_padding)\n }\n\n fn build_separator_top(&mut self, replier_name: &str, time: &str) -> String {\n let replier_max_width = 14;\n let time_max_width = 5;\n let (separator_width, separator_padding_width, separator_padding) =\n self.build_separator_arguments();\n make_separator_top(separator_width,\n &separator_padding,\n replier_max_width,\n &replier_name,\n time_max_width,\n &time)\n }\n\n fn build_separator_bottom(&mut self) -> String {\n let (separator_width, separator_padding_width, separator_padding) =\n self.build_separator_arguments();\n make_separator_bottom(separator_width, &separator_padding)\n }\n\n pub fn resetY(&mut self) {\n self.scrollY = 0;\n }\n\n pub fn scrollUp(&mut self, value: usize) -> bool {\n let tmp = self.scrollY;\n if tmp > value {\n self.scrollY = tmp - value;\n true\n } else if tmp != 0 {\n self.scrollY = 0;\n true\n } else {\n false\n }\n }\n\n pub fn scrollDown(&mut self, value: usize) -> bool {\n let tmp = self.scrollY;\n if tmp < 10000 {\n self.scrollY = tmp + value;\n return true;\n }\n false\n }\n\n pub fn body_height(&self) -> usize {\n if self.rustbox.height() >= 3 {\n self.rustbox.height() - 3\n } else {\n 0\n }\n }\n\n pub fn body_width(&self) -> usize {\n if self.rustbox.width() >= 2 {\n self.rustbox.width() - 2\n } else {\n 0\n }\n }\n fn print_default(&self, x: usize, y: usize, text: &str) {\n self.rustbox.print(x, y, rustbox::RB_BOLD, Color::White, Color::Black, text);\n }\n}\n\nfn print_default(rustbox: &rustbox::RustBox, x: usize, y: usize, s: String) {\n rustbox.print(0, y, rustbox::RB_NORMAL, Color::White, Color::Black, &s);\n}\n\nfn print_reply(vec: &Vec<NodeType>,\n depth: usize,\n scrollY: usize,\n y: usize,\n rustbox: &rustbox::RustBox)\n -> usize {\n let padding = seq_str_gen(0, depth, \"├─\", \"\");\n let mut m = 0;\n let mut recursive_offset = 0;\n let mut total_y = 0;\n let mut line = String::new();\n\n \/\/ clean up lines (end)\n let vec2 = {\n let vec_length = vec.len();\n let vec_check_cleanup = vec.clone();\n\n \/\/ check if last 4 elements match the EMPTY PATTERN\n let is_last4_empty = vec_check_cleanup.iter()\n .rev()\n .take(4)\n .enumerate()\n .all(|(j, node)| match node.clone() {\n NodeType::Br(n) => j == 1 || j == 2 || j == 3,\n NodeType::Text(n) => j == 0 && n.data.is_empty(),\n _ => false,\n });\n\n let vec_short_length = if vec_length > 4 && is_last4_empty {\n vec_length - 4\n } else {\n vec_length\n };\n\n vec.iter().take(vec_short_length)\n };\n\n \/\/ clean up lines (start)\n let vec3 = {\n let vec2_cloned = vec2.clone();\n let mut result: Vec<NodeType> = Vec::new();\n for (j, node) in vec2_cloned.enumerate() {\n let node2 = node.clone();\n let node3 = node.clone();\n match node2 {\n NodeType::Br(n) => {\n if !result.is_empty() {\n result.push(node3);\n }\n }\n _ => result.push(node3),\n }\n }\n result.clone()\n };\n\n let mut is_first = true;\n for (j, node) in vec3.iter().enumerate() {\n total_y = y + m + recursive_offset;\n if scrollY + 1 < total_y {\n let node2 = node.clone();\n match node2 {\n NodeType::Text(n) => {\n if n.data != \"\" {\n line = format!(\"{}{}\", line, n.data);\n }\n }\n NodeType::Image(n) => {\n if n.data != \"\" {\n line = format!(\"{}[img {}]\", line, n.data);\n }\n }\n NodeType::BlockQuote(n) => {\n recursive_offset += print_reply(&n.data, depth + 1, scrollY, total_y, &rustbox);\n is_first = false;\n }\n NodeType::Br(n) => {\n if !line.is_empty() {\n print_default(rustbox,\n 0,\n total_y - scrollY,\n format!(\" {}{}\", padding, line));\n line = String::new();\n is_first = false;\n }\n\n \/\/ prevent first line empty\n if !is_first {\n m += 1;\n }\n\n }\n }\n }\n }\n\n if !line.is_empty() {\n total_y = y + m + recursive_offset;\n print_default(rustbox,\n 0,\n total_y - scrollY,\n format!(\" {}{} \", padding, line));\n line = String::new();\n m += 1;\n }\n\n m + recursive_offset\n}\n\nfn make_separator_replier_name(separator_width: usize,\n separator_padding: &str,\n replier_max_width: usize,\n replier_name: &str)\n -> String {\n let replier_name_len = jks_len(&replier_name);\n let replier_name_spacing_width = replier_max_width - replier_name_len;\n let is_replier_name_spacing_width_odd = replier_name_spacing_width & 1 == 1;\n let replier_name_right_spacing_width = replier_name_spacing_width \/ 2;\n let replier_name_left_spacing_width = if is_replier_name_spacing_width_odd {\n replier_name_right_spacing_width + 1\n } else {\n replier_name_right_spacing_width\n };\n\n let replier_name_left_spacing = seq_str_gen(0, replier_name_left_spacing_width, \"─\", \"\");\n let replier_name_right_spacing = seq_str_gen(0, replier_name_right_spacing_width, \"─\", \"\");\n\n let separator_replier = format!(\"{}{}{}{}{}\",\n \"╭\",\n replier_name_left_spacing,\n replier_name,\n replier_name_right_spacing,\n \"\");\n\n return separator_replier;\n}\n\nfn make_separator_time(separator_width: usize,\n separator_padding: &str,\n time_max_width: usize,\n time: &str)\n -> String {\n let time_len = jks_len(&time);\n let time_spacing_width = if time_max_width > time_len {\n time_max_width - time_len\n } else {\n 0\n };\n\n let is_time_spacing_width_odd = time_spacing_width & 1 == 1;\n let time_right_spacing_width = time_spacing_width \/ 2;\n let time_left_spacing_width = if is_time_spacing_width_odd {\n time_right_spacing_width + 1\n } else {\n time_right_spacing_width\n };\n\n let time_left_spacing = seq_str_gen(0, time_left_spacing_width, \"─\", \"\");\n let time_right_spacing = seq_str_gen(0, time_right_spacing_width, \"─\", \"\");\n\n let separator_time = format!(\"{}{}{}{}{}\",\n \"\",\n time_left_spacing,\n time,\n time_right_spacing,\n \"╮\");\n\n\n return separator_time;\n}\n\nfn make_separator_top(separator_width: usize,\n separator_padding: &str,\n replier_max_width: usize,\n replier_name: &str,\n time_max_width: usize,\n time: &str)\n -> String {\n\n let separator_replier = make_separator_replier_name(separator_width,\n &separator_padding,\n replier_max_width,\n &replier_name);\n\n let separator_replier_width = jks_len(&separator_replier);\n\n let separator_time = make_separator_time(separator_width,\n &separator_padding,\n time_max_width,\n &time);\n\n let separator_time_width = jks_len(&separator_time);\n\n let separator_top_middle_width = if separator_width >=\n (separator_replier_width + separator_time_width) {\n separator_width - separator_replier_width - separator_time_width\n } else {\n 0\n };\n\n let separator_top_middle = seq_str_gen(0, separator_top_middle_width, \" \", \"\");\n let separator_top = format!(\"{}{}{}{}{}\",\n separator_padding,\n separator_top_middle,\n separator_replier,\n separator_time,\n separator_padding);\n return separator_top;\n}\n\nfn make_separator_bottom(separator_width: usize, separator_padding: &str) -> String {\n let style_box_width = 1;\n let separator_bottom_middle_width = if separator_width > style_box_width {\n separator_width - style_box_width\n } else {\n 0\n };\n let separator_bottom_middle = seq_str_gen(0, separator_bottom_middle_width, \"─\", \"\");\n\n let separator_bottom = format!(\"{}{}{}{}\",\n separator_padding,\n separator_bottom_middle,\n \"╯\",\n separator_padding);\n return separator_bottom;\n}\n\n\nfn published_at_format(duration: &Duration) -> String {\n let weeks = duration.num_weeks();\n let days = duration.num_days();\n let hours = duration.num_hours();\n let minutes = duration.num_minutes();\n\n if weeks > 0 {\n format!(\"{}w\", weeks)\n } else if days > 0 {\n format!(\"{}d\", days)\n } else if hours > 0 {\n format!(\"{}h\", hours)\n } else if minutes > 0 {\n format!(\"{}m\", minutes)\n } else {\n String::from(\"1m\")\n }\n}\n\nfn seq_str_gen(start: usize, end: usize, sym: &str, join_sym: &str) -> String {\n (start..end).map(|_| sym.clone()).collect::<Vec<_>>().join(&join_sym)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chotto<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nfn a(&self) { }\n\/\/~^ ERROR unexpected `self` argument in bare function\n\nfn main() { }\n<commit_msg>remove license<commit_after>fn a(&self) { }\n\/\/~^ ERROR unexpected `self` argument in bare function\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>#7: add deny(unsafe_code)<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate clap;\n#[macro_use]\nextern crate log;\n#[macro_use]\nextern crate lazy_static;\nextern crate env_logger;\nextern crate url;\nextern crate reqwest;\n#[macro_use]\nextern crate serde_derive;\nextern crate serde;\nextern crate serde_json;\nextern crate pbr;\n\nuse std::env;\nuse std::fs::OpenOptions;\nuse std::io::Read;\nuse std::io::Write;\n\nuse clap::{App, AppSettings, Arg};\nuse pbr::{ProgressBar, Units};\nuse reqwest::header::ContentLength;\n\nmod format;\nmod video_info;\n\nuse video_info::YTDL_PROXY_URL;\n\n#[derive(Debug)]\nstruct Options {\n no_progress: bool,\n info_only: bool,\n silent: bool, \n debug: bool,\n append: bool,\n json: bool,\n download_url: bool,\n filter: Vec<String>,\n byte_range: String,\n output_file: String,\n start_offset: i32,\n proxy_url: String,\n}\n\nfn main() {\n env_logger::init().expect(\"env logger init fail\");\n\n let flags = vec![\n Arg::with_name(\"output\")\n .short(\"o\")\n .long(\"output\")\n .value_name(\"FILE\")\n .help(\"write output to a file\")\n .takes_value(true),\n Arg::with_name(\"proxy-url\")\n .short(\"p\")\n .long(\"proxy-url\")\n .value_name(\"PROXY_URL\")\n .help(\"use proxy for the request\")\n .takes_value(true),\n Arg::with_name(\"no-progress\")\n .long(\"no-progress\")\n .help(\"write output to a file\"),\n Arg::with_name(\"range\")\n .short(\"r\")\n .long(\"range\")\n .value_name(\"RANGE\")\n .help(\"download a specific range of bytes of the video, [start]-[end]\")\n .takes_value(true),\n Arg::with_name(\"url\")\n .help(\"youtube url\")\n .required(true)\n .index(1),\n Arg::with_name(\"download-url\")\n .short(\"-u\")\n .long(\"download-url\")\n .help(\"prints download url to stdout\"),\n Arg::with_name(\"json\")\n .short(\"j\")\n .long(\"json\")\n .help(\"print info json to stdout\"),\n Arg::with_name(\"debug\")\n .short(\"d\")\n .long(\"debug\")\n .help(\"output debug log\"),\n Arg::with_name(\"filter\")\n .short(\"f\")\n .long(\"filter\")\n .value_name(\"FILTER\")\n .multiple(true)\n .help(\"filter available formats, syntax: val1 val2 val3\")\n .takes_value(true),\n Arg::with_name(\"append\")\n .short(\"-a\")\n .long(\"--append\")\n .help(\"append to output file instead of overwriting\"),\n Arg::with_name(\"start-offset\")\n .long(\"start-offset\")\n .value_name(\"STARTOFFSET\")\n .help(\"offset the start of the video\")\n .takes_value(true),\n Arg::with_name(\"silent\")\n .short(\"s\")\n .long(\"silent\")\n .help(\"only output error, also diables progressbar\"),\n Arg::with_name(\"info\")\n .short(\"i\")\n .long(\"info\")\n .help(\"only output info\")];\n \n let matches = App::new(\"ytdl\")\n .setting(AppSettings::ArgRequiredElseHelp)\n .version(\"0.0.1\")\n .about(\"download youtube videos\")\n .args(&flags)\n .get_matches();\n\n \n let mut filter = vec![];\n if matches.is_present(\"filter\") {\n filter = matches.values_of(\"filter\").unwrap().map(|x| x.to_string()).collect();\n }\n\n let mut options = Options {\n no_progress: matches.is_present(\"no-progress\"),\n info_only: matches.is_present(\"info\"),\n silent: matches.is_present(\"silent\"),\n debug: matches.is_present(\"debug\"),\n append: matches.is_present(\"append\"),\n json: matches.is_present(\"json\"),\n download_url: matches.is_present(\"download-url\"),\n filter: filter,\n output_file: matches.value_of(\"output\").unwrap_or_default().to_string(),\n byte_range: matches.value_of(\"range\").unwrap_or_default().to_string(),\n start_offset: matches.value_of(\"start-offset\").unwrap_or(\"0\").parse::<i32>().unwrap(),\n proxy_url: matches.value_of(\"proxy-url\").unwrap_or_default().to_string(),\n };\n\n if !options.proxy_url.is_empty() {\n env::set_var(YTDL_PROXY_URL, &options.proxy_url);\n }\n\n let identifier = matches.value_of(\"url\").unwrap_or_default();\n if options.filter.is_empty() {\n options.filter = vec![\n format!(\"{}:mp4\", format::FORMAT_EXTENSION_KEY),\n format!(\"!{}:\", format::FORMAT_VIDEO_ENCODING_KEY),\n format!(\"!{}:\", format::FORMAT_AUDIO_ENCODING_KEY),\n format!(\"best\"),\n ];\n }\n\n handler(identifier, &options);\n}\n\nfn handler(identifier: &str, options: &Options) {\n info!(\"fetching video info...\");\n let info = match video_info::get_video_info(identifier) {\n Ok(i) => i,\n Err(e) => {\n println!(\"unable to fetch video info: {}\", e.to_string());\n return;\n }\n };\n\n if options.info_only {\n println!(\"Author: {}\", info.author);\n println!(\"Duration: {}s\", info.duration);\n return\n } else if options.json {\n println!(\"{}\", serde_json::to_string(&info).unwrap_or_default());\n return \n }\n\n let formats = &info.formats;\n for x in &options.filter {\n\n }\n\n if formats.len() == 0 {\n println!(\"no formats available that match criteria\");\n return \n }\n\n let mut download_url = match video_info::get_download_url(&formats[0]) {\n Ok(u) => u,\n Err(e) => {\n println!(\"unable to get download url: {}\", e.to_string());\n return\n }\n };\n\n if options.start_offset != 0 {\n download_url.query_pairs_mut().append_pair(\"begin\", &format!(\"{}\", &options.start_offset * 1000));\n }\n\n if options.download_url {\n println!(\"{}\", download_url.as_str());\n }\n\n \n let filename = if !options.output_file.is_empty() {\n options.output_file.clone()\n } else {\n video_info::get_filename(&info, &formats[0])\n };\n\n let mut file = if options.append {\n OpenOptions::new()\n .write(true)\n .create(true)\n .append(true)\n .open(&filename)\n .expect(\"create output file fail\")\n } else {\n OpenOptions::new()\n .write(true)\n .create(true)\n .open(&filename)\n .expect(\"create output file fail\")\n };\n\n info!(\"download to {}\", filename);\n\n let client = video_info::get_client().expect(\"get request client fail\");\n let mut resp = client\n .get(download_url.as_str())\n .expect(\"download fail\")\n .send()\n .expect(\"download fail\");\n \n let file_size = resp.headers().get::<ContentLength>()\n .map(|l| **l)\n .unwrap_or(0);\n\n let mut pb = ProgressBar::new(file_size);\n pb.format(\"╢▌▌░╟\");\n pb.set_units(Units::Bytes);\n pb.show_percent = true;\n pb.show_speed = true;\n pb.show_time_left = true;\n let mut buf = [0; 128 * 1024];\n\n loop {\n match resp.read(&mut buf) {\n Ok(len) => {\n file.write_all(&buf[..len]).expect(\"write to file fail\");\n if !options.silent && !options.no_progress {\n pb.add(len as u64);\n }\n if len == 0 {\n break;\n }\n }\n Err(e) => panic!(\"{}\", e.to_string()),\n };\n }\n}\n\nfn parse_filter() {\n \n}<commit_msg>feat: version 0.1.1<commit_after>extern crate clap;\n#[macro_use]\nextern crate log;\n#[macro_use]\nextern crate lazy_static;\nextern crate env_logger;\nextern crate url;\nextern crate reqwest;\n#[macro_use]\nextern crate serde_derive;\nextern crate serde;\nextern crate serde_json;\nextern crate pbr;\n\nuse std::env;\nuse std::fs::OpenOptions;\nuse std::io::Read;\nuse std::io::Write;\n\nuse clap::{App, AppSettings, Arg};\nuse pbr::{ProgressBar, Units};\nuse reqwest::header::ContentLength;\n\nmod format;\nmod video_info;\n\nuse video_info::YTDL_PROXY_URL;\n\n#[derive(Debug)]\nstruct Options {\n no_progress: bool,\n info_only: bool,\n silent: bool, \n debug: bool,\n append: bool,\n json: bool,\n download_url: bool,\n filter: Vec<String>,\n byte_range: String,\n output_file: String,\n start_offset: i32,\n proxy_url: String,\n}\n\nfn main() {\n env_logger::init().expect(\"env logger init fail\");\n\n let flags = vec![\n Arg::with_name(\"output\")\n .short(\"o\")\n .long(\"output\")\n .value_name(\"FILE\")\n .help(\"write output to a file\")\n .takes_value(true),\n Arg::with_name(\"proxy-url\")\n .short(\"p\")\n .long(\"proxy-url\")\n .value_name(\"PROXY_URL\")\n .help(\"use proxy for the request\")\n .takes_value(true),\n Arg::with_name(\"no-progress\")\n .long(\"no-progress\")\n .help(\"write output to a file\"),\n Arg::with_name(\"range\")\n .short(\"r\")\n .long(\"range\")\n .value_name(\"RANGE\")\n .help(\"download a specific range of bytes of the video, [start]-[end]\")\n .takes_value(true),\n Arg::with_name(\"url\")\n .help(\"youtube url\")\n .required(true)\n .index(1),\n Arg::with_name(\"download-url\")\n .short(\"-u\")\n .long(\"download-url\")\n .help(\"prints download url to stdout\"),\n Arg::with_name(\"json\")\n .short(\"j\")\n .long(\"json\")\n .help(\"print info json to stdout\"),\n Arg::with_name(\"debug\")\n .short(\"d\")\n .long(\"debug\")\n .help(\"output debug log\"),\n Arg::with_name(\"filter\")\n .short(\"f\")\n .long(\"filter\")\n .value_name(\"FILTER\")\n .multiple(true)\n .help(\"filter available formats, syntax: val1 val2 val3\")\n .takes_value(true),\n Arg::with_name(\"append\")\n .short(\"-a\")\n .long(\"--append\")\n .help(\"append to output file instead of overwriting\"),\n Arg::with_name(\"start-offset\")\n .long(\"start-offset\")\n .value_name(\"STARTOFFSET\")\n .help(\"offset the start of the video\")\n .takes_value(true),\n Arg::with_name(\"silent\")\n .short(\"s\")\n .long(\"silent\")\n .help(\"only output error, also diables progressbar\"),\n Arg::with_name(\"info\")\n .short(\"i\")\n .long(\"info\")\n .help(\"only output info\")];\n \n let matches = App::new(\"ytdl\")\n .setting(AppSettings::ArgRequiredElseHelp)\n .version(\"0.1.1\")\n .about(\"download youtube videos\")\n .args(&flags)\n .get_matches();\n\n \n let mut filter = vec![];\n if matches.is_present(\"filter\") {\n filter = matches.values_of(\"filter\").unwrap().map(|x| x.to_string()).collect();\n }\n\n let mut options = Options {\n no_progress: matches.is_present(\"no-progress\"),\n info_only: matches.is_present(\"info\"),\n silent: matches.is_present(\"silent\"),\n debug: matches.is_present(\"debug\"),\n append: matches.is_present(\"append\"),\n json: matches.is_present(\"json\"),\n download_url: matches.is_present(\"download-url\"),\n filter: filter,\n output_file: matches.value_of(\"output\").unwrap_or_default().to_string(),\n byte_range: matches.value_of(\"range\").unwrap_or_default().to_string(),\n start_offset: matches.value_of(\"start-offset\").unwrap_or(\"0\").parse::<i32>().unwrap(),\n proxy_url: matches.value_of(\"proxy-url\").unwrap_or_default().to_string(),\n };\n\n if !options.proxy_url.is_empty() {\n env::set_var(YTDL_PROXY_URL, &options.proxy_url);\n }\n\n let identifier = matches.value_of(\"url\").unwrap_or_default();\n if options.filter.is_empty() {\n options.filter = vec![\n format!(\"{}:mp4\", format::FORMAT_EXTENSION_KEY),\n format!(\"!{}:\", format::FORMAT_VIDEO_ENCODING_KEY),\n format!(\"!{}:\", format::FORMAT_AUDIO_ENCODING_KEY),\n format!(\"best\"),\n ];\n }\n\n handler(identifier, &options);\n}\n\nfn handler(identifier: &str, options: &Options) {\n info!(\"fetching video info...\");\n let info = match video_info::get_video_info(identifier) {\n Ok(i) => i,\n Err(e) => {\n println!(\"unable to fetch video info: {}\", e.to_string());\n return;\n }\n };\n\n if options.info_only {\n println!(\"Author: {}\", info.author);\n println!(\"Duration: {}s\", info.duration);\n return\n } else if options.json {\n println!(\"{}\", serde_json::to_string(&info).unwrap_or_default());\n return \n }\n\n let formats = &info.formats;\n for x in &options.filter {\n\n }\n\n if formats.len() == 0 {\n println!(\"no formats available that match criteria\");\n return \n }\n\n let mut download_url = match video_info::get_download_url(&formats[0]) {\n Ok(u) => u,\n Err(e) => {\n println!(\"unable to get download url: {}\", e.to_string());\n return\n }\n };\n\n if options.start_offset != 0 {\n download_url.query_pairs_mut().append_pair(\"begin\", &format!(\"{}\", &options.start_offset * 1000));\n }\n\n if options.download_url {\n println!(\"{}\", download_url.as_str());\n }\n\n \n let filename = if !options.output_file.is_empty() {\n options.output_file.clone()\n } else {\n video_info::get_filename(&info, &formats[0])\n };\n\n let mut file = if options.append {\n OpenOptions::new()\n .write(true)\n .create(true)\n .append(true)\n .open(&filename)\n .expect(\"create output file fail\")\n } else {\n OpenOptions::new()\n .write(true)\n .create(true)\n .open(&filename)\n .expect(\"create output file fail\")\n };\n\n info!(\"download to {}\", filename);\n\n let client = video_info::get_client().expect(\"get request client fail\");\n let mut resp = client\n .get(download_url.as_str())\n .expect(\"download fail\")\n .send()\n .expect(\"download fail\");\n \n let file_size = resp.headers().get::<ContentLength>()\n .map(|l| **l)\n .unwrap_or(0);\n\n let mut pb = ProgressBar::new(file_size);\n pb.format(\"╢▌▌░╟\");\n pb.set_units(Units::Bytes);\n pb.show_percent = true;\n pb.show_speed = true;\n pb.show_time_left = true;\n let mut buf = [0; 128 * 1024];\n\n loop {\n match resp.read(&mut buf) {\n Ok(len) => {\n file.write_all(&buf[..len]).expect(\"write to file fail\");\n if !options.silent && !options.no_progress {\n pb.add(len as u64);\n }\n if len == 0 {\n break;\n }\n }\n Err(e) => panic!(\"{}\", e.to_string()),\n };\n }\n}\n\nfn parse_filter() {\n \n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement Help and Help-Mode, add error messages<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>HTTPバージョンをパースするように。<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix title<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>remove gui for now<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fixed endpoint<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>basic rust solution for problem 001<commit_after>fn sum_all_multiples(limit: i32) -> i32 {\n let mut sum = 0;\n\n for i in 0..limit {\n \tif i % 3 == 0 || i % 5 == 0 {\n \t sum += i;\n \t}\n }\n\n return sum;\n}\n\nfn main() {\n\tprintln!(\"{:?}\", sum_all_multiples(1000));\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>feat: initial code<commit_after>#![feature(path, io)]\nuse std::old_io as io;\n\nstruct State {\n index: usize,\n memory: [u8; 256]\n}\n\ntype Trace<'a> = Fn(&mut State)+'a;\n\nfn main() {\n let mut reader = reader(&Path::new(\"test.bf\"));\n let mut state = State { index: 0, memory: [0; 256] };\n let mut chars = reader.chars().peekable();\n\n loop {\n let trace = parse(\n |&:| match chars.next() { Some(Ok(c)) => Some(c), _ => None },\n );\n match trace {\n Some(t) => (*t)(&mut state),\n None => return\n }\n }\n}\n\nfn reader(path: &Path) -> io::BufferedReader<io::File> {\n match io::File::open_mode(path, io::Open, io::Read) {\n Ok(f) => io::BufferedReader::new(f),\n Err(e) => panic!(e)\n }\n}\n\nfn parse<'a, F: FnMut() -> Option<char>>(mut next: F) -> Option<Box<Trace<'a>>> {\n match next() {\n Some('+') => Some(Box::new(|s| s.memory[s.index] += 1)),\n Some('-') => Some(Box::new(|s| s.memory[s.index] -= 1)),\n Some('.') => Some(Box::new(|s| print!(\"{}\", s.memory[s.index]))),\n _ => None\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added functions from visa<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added comment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed bug with tic_vec type<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for word-count case<commit_after>#![feature(ascii_ctype)]\nuse std::ascii::AsciiExt;\nuse std::collections::HashMap;\n\npub fn word_count(text: &str) -> HashMap<String, u32> {\n let iter = text.split(|x: char| x.is_ascii_punctuation() || x == ' ')\n .filter(|x| !x.is_empty())\n .map(|x| x.to_lowercase());\n\n let mut result: HashMap<String, u32> = HashMap::new();\n\n for i in iter {\n let v = result.get(&i).map_or(1, |v| v + 1);\n\n result.insert(i, v);\n }\n\n result\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse net_traits::{ResourceAttribute, ResourceFetchTiming, ResourceTimeValue, ResourceTimingType};\n\n#[test]\nfn test_set_start_time_to_fetch_start_if_nonzero_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.fetch_start = 1;\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n assert!(\n resource_timing.fetch_start > 0,\n \"`fetch_start` should have a positive value\"\n );\n\n \/\/ verify that setting `start_time` to `fetch_start` succeeds\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::FetchStart));\n assert_eq!(\n resource_timing.start_time, resource_timing.fetch_start,\n \"`start_time` should equal `fetch_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time_to_fetch_start_if_zero_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n assert_eq!(\n resource_timing.fetch_start, 0,\n \"`fetch_start` should be zero\"\n );\n\n \/\/ verify that setting `start_time` to `fetch_start` succeeds even when `fetch_start` == zero\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::FetchStart));\n assert_eq!(\n resource_timing.start_time, resource_timing.fetch_start,\n \"`start_time` should equal `fetch_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time_to_fetch_start_if_nonzero_no_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.mark_timing_check_failed();\n resource_timing.fetch_start = 1;\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n assert!(\n resource_timing.fetch_start > 0,\n \"`fetch_start` should have a positive value\"\n );\n\n \/\/ verify that setting `start_time` to `fetch_start` succeeds even when TAO check failed\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::FetchStart));\n assert_eq!(\n resource_timing.start_time, resource_timing.fetch_start,\n \"`start_time` should equal `fetch_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time_to_fetch_start_if_zero_no_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.mark_timing_check_failed();\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n assert_eq!(\n resource_timing.fetch_start, 0,\n \"`fetch_start` should be zero\"\n );\n\n \/\/ verify that setting `start_time` to `fetch_start` succeeds even when `fetch_start`==0 and no TAO\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::FetchStart));\n assert_eq!(\n resource_timing.start_time, resource_timing.fetch_start,\n \"`start_time` should equal `fetch_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time_to_redirect_start_if_nonzero_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.redirect_start = 1;\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n assert!(\n resource_timing.redirect_start > 0,\n \"`redirect_start` should have a positive value\"\n );\n\n \/\/ verify that setting `start_time` to `redirect_start` succeeds for nonzero `redirect_start`, TAO pass\n resource_timing.set_attribute(ResourceAttribute::StartTime(\n ResourceTimeValue::RedirectStart,\n ));\n assert_eq!(\n resource_timing.start_time, resource_timing.redirect_start,\n \"`start_time` should equal `redirect_start`\"\n );\n}\n\n#[test]\nfn test_not_set_start_time_to_redirect_start_if_zero_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n assert_eq!(\n resource_timing.redirect_start, 0,\n \"`redirect_start` should be zero\"\n );\n\n \/\/ verify that setting `start_time` to `redirect_start` fails if `redirect_start` == 0\n resource_timing.set_attribute(ResourceAttribute::StartTime(\n ResourceTimeValue::RedirectStart,\n ));\n assert_ne!(\n resource_timing.start_time, resource_timing.redirect_start,\n \"`start_time` should *not* equal `redirect_start`\"\n );\n}\n\n#[test]\nfn test_not_set_start_time_to_redirect_start_if_nonzero_no_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.mark_timing_check_failed();\n \/\/ Note: properly-behaved redirect_start should never be nonzero once TAO check has failed\n resource_timing.redirect_start = 1;\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n assert!(\n resource_timing.redirect_start > 0,\n \"`redirect_start` should have a positive value\"\n );\n\n \/\/ verify that setting `start_time` to `redirect_start` fails if TAO check fails\n resource_timing.set_attribute(ResourceAttribute::StartTime(\n ResourceTimeValue::RedirectStart,\n ));\n assert_ne!(\n resource_timing.start_time, resource_timing.redirect_start,\n \"`start_time` should *not* equal `redirect_start`\"\n );\n}\n\n#[test]\nfn test_not_set_start_time_to_redirect_start_if_zero_no_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.mark_timing_check_failed();\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n assert_eq!(\n resource_timing.redirect_start, 0,\n \"`redirect_start` should be zero\"\n );\n\n \/\/ verify that setting `start_time` to `redirect_start` fails if `redirect_start`==0 and no TAO\n resource_timing.set_attribute(ResourceAttribute::StartTime(\n ResourceTimeValue::RedirectStart,\n ));\n assert_ne!(\n resource_timing.start_time, resource_timing.redirect_start,\n \"`start_time` should *not* equal `redirect_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n assert_eq!(\n resource_timing.start_time, 0,\n \"initial `start_time` should be zero\"\n );\n\n \/\/ verify setting `start_time` to current time succeeds\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::Now));\n assert!(resource_timing.start_time > 0, \"failed to set `start_time`\");\n}\n#[test]\nfn test_reset_start_time() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n assert_eq!(resource_timing.start_time, 0, \"initial `start_time` = 0\");\n\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n\n \/\/ verify resetting `start_time` (to zero) succeeds\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::Zero));\n assert_eq!(\n resource_timing.start_time, 0,\n \"failed to reset `start_time`\"\n );\n}\n<commit_msg>Minor assert message edits for consistency<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at https:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse net_traits::{ResourceAttribute, ResourceFetchTiming, ResourceTimeValue, ResourceTimingType};\n\n#[test]\nfn test_set_start_time_to_fetch_start_if_nonzero_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.fetch_start = 1;\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n assert!(\n resource_timing.fetch_start > 0,\n \"`fetch_start` should have a positive value\"\n );\n\n \/\/ verify that setting `start_time` to `fetch_start` succeeds\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::FetchStart));\n assert_eq!(\n resource_timing.start_time, resource_timing.fetch_start,\n \"`start_time` should equal `fetch_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time_to_fetch_start_if_zero_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n assert_eq!(\n resource_timing.fetch_start, 0,\n \"`fetch_start` should be zero\"\n );\n\n \/\/ verify that setting `start_time` to `fetch_start` succeeds even when `fetch_start` == zero\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::FetchStart));\n assert_eq!(\n resource_timing.start_time, resource_timing.fetch_start,\n \"`start_time` should equal `fetch_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time_to_fetch_start_if_nonzero_no_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.mark_timing_check_failed();\n resource_timing.fetch_start = 1;\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n assert!(\n resource_timing.fetch_start > 0,\n \"`fetch_start` should have a positive value\"\n );\n\n \/\/ verify that setting `start_time` to `fetch_start` succeeds even when TAO check failed\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::FetchStart));\n assert_eq!(\n resource_timing.start_time, resource_timing.fetch_start,\n \"`start_time` should equal `fetch_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time_to_fetch_start_if_zero_no_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.mark_timing_check_failed();\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n assert_eq!(\n resource_timing.fetch_start, 0,\n \"`fetch_start` should be zero\"\n );\n\n \/\/ verify that setting `start_time` to `fetch_start` succeeds even when `fetch_start`==0 and no TAO\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::FetchStart));\n assert_eq!(\n resource_timing.start_time, resource_timing.fetch_start,\n \"`start_time` should equal `fetch_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time_to_redirect_start_if_nonzero_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.redirect_start = 1;\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n assert!(\n resource_timing.redirect_start > 0,\n \"`redirect_start` should have a positive value\"\n );\n\n \/\/ verify that setting `start_time` to `redirect_start` succeeds for nonzero `redirect_start`, TAO pass\n resource_timing.set_attribute(ResourceAttribute::StartTime(\n ResourceTimeValue::RedirectStart,\n ));\n assert_eq!(\n resource_timing.start_time, resource_timing.redirect_start,\n \"`start_time` should equal `redirect_start`\"\n );\n}\n\n#[test]\nfn test_not_set_start_time_to_redirect_start_if_zero_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n assert_eq!(\n resource_timing.redirect_start, 0,\n \"`redirect_start` should be zero\"\n );\n\n \/\/ verify that setting `start_time` to `redirect_start` fails if `redirect_start` == 0\n resource_timing.set_attribute(ResourceAttribute::StartTime(\n ResourceTimeValue::RedirectStart,\n ));\n assert_ne!(\n resource_timing.start_time, resource_timing.redirect_start,\n \"`start_time` should *not* equal `redirect_start`\"\n );\n}\n\n#[test]\nfn test_not_set_start_time_to_redirect_start_if_nonzero_no_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.mark_timing_check_failed();\n \/\/ Note: properly-behaved redirect_start should never be nonzero once TAO check has failed\n resource_timing.redirect_start = 1;\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n assert!(\n resource_timing.redirect_start > 0,\n \"`redirect_start` should have a positive value\"\n );\n\n \/\/ verify that setting `start_time` to `redirect_start` fails if TAO check fails\n resource_timing.set_attribute(ResourceAttribute::StartTime(\n ResourceTimeValue::RedirectStart,\n ));\n assert_ne!(\n resource_timing.start_time, resource_timing.redirect_start,\n \"`start_time` should *not* equal `redirect_start`\"\n );\n}\n\n#[test]\nfn test_not_set_start_time_to_redirect_start_if_zero_no_tao() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n resource_timing.mark_timing_check_failed();\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n assert_eq!(\n resource_timing.redirect_start, 0,\n \"`redirect_start` should be zero\"\n );\n\n \/\/ verify that setting `start_time` to `redirect_start` fails if `redirect_start`==0 and no TAO\n resource_timing.set_attribute(ResourceAttribute::StartTime(\n ResourceTimeValue::RedirectStart,\n ));\n assert_ne!(\n resource_timing.start_time, resource_timing.redirect_start,\n \"`start_time` should *not* equal `redirect_start`\"\n );\n}\n\n#[test]\nfn test_set_start_time() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n\n \/\/ verify setting `start_time` to current time succeeds\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::Now));\n assert!(resource_timing.start_time > 0, \"failed to set `start_time`\");\n}\n#[test]\nfn test_reset_start_time() {\n let mut resource_timing: ResourceFetchTiming =\n ResourceFetchTiming::new(ResourceTimingType::Resource);\n assert_eq!(resource_timing.start_time, 0, \"`start_time` should be zero\");\n\n resource_timing.start_time = 1;\n assert!(\n resource_timing.start_time > 0,\n \"`start_time` should have a positive value\"\n );\n\n \/\/ verify resetting `start_time` (to zero) succeeds\n resource_timing.set_attribute(ResourceAttribute::StartTime(ResourceTimeValue::Zero));\n assert_eq!(\n resource_timing.start_time, 0,\n \"failed to reset `start_time`\"\n );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix else if branches<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Propogate PrefixFreeDecodeError<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Upgrade variables to 64 bit<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>...somehow I was missing this file<commit_after>use capnp::list::{struct_list};\nuse holmes_capnp::holmes;\n\nuse std::str::FromStr;\nuse std::string::{ToString, String};\n\npub type PredId = u64;\n\n#[derive(Copy,PartialEq,Clone)]\npub enum HType {\n UInt64,\n HString,\n Blob\n}\nuse native_types::HType::*;\n\nimpl FromStr for HType {\n fn from_str(s : &str) -> Option<HType> {\n match s {\n \"uint64\" => {Some(UInt64)}\n \"string\" => {Some(HString)}\n \"blob\" => {Some(Blob)}\n _ => {None}\n }\n }\n}\n\nimpl ToString for HType {\n fn to_string(&self) -> String {\n String::from_str(match self {\n &UInt64 => {\"uint64\"}\n &HString => {\"string\"}\n &Blob => {\"blob\"}\n })\n }\n}\n\n#[derive(PartialEq,Clone)]\npub struct Predicate {\n pub name : String,\n pub types : Vec<HType>\n}\n\npub fn convert_types<'a> (types_reader : struct_list::Reader<'a, holmes::h_type::Reader<'a>>)\n -> Vec<HType> {\n let mut types = Vec::new();\n for type_reader in types_reader.iter() {\n match type_reader.which() {\n Some(holmes::h_type::Uint64(())) => {types.push(UInt64);}\n Some(holmes::h_type::String(())) => {types.push(HString);}\n Some(holmes::h_type::Blob(())) => {types.push(Blob);}\n None => { } \/\/TODO: What should we do if there's an unknown type?\n }\n }\n types\n}\n<|endoftext|>"} {"text":"<commit_before>use std::cmp;\nuse std::fmt::{self, Debug, Formatter};\nuse std::fs;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf};\n\nuse filetime::FileTime;\nuse git2;\nuse glob::Pattern;\n\nuse core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};\nuse ops;\nuse util::{self, CargoResult, internal, internal_error, human, ChainError};\nuse util::Config;\n\npub struct PathSource<'cfg> {\n id: SourceId,\n path: PathBuf,\n updated: bool,\n packages: Vec<Package>,\n config: &'cfg Config,\n}\n\n\/\/ TODO: Figure out if packages should be discovered in new or self should be\n\/\/ mut and packages are discovered in update\nimpl<'cfg> PathSource<'cfg> {\n pub fn for_path(path: &Path, config: &'cfg Config)\n -> CargoResult<PathSource<'cfg>> {\n trace!(\"PathSource::for_path; path={}\", path.display());\n Ok(PathSource::new(path, &try!(SourceId::for_path(path)), config))\n }\n\n \/\/\/ Invoked with an absolute path to a directory that contains a Cargo.toml.\n \/\/\/ The source will read the manifest and find any other packages contained\n \/\/\/ in the directory structure reachable by the root manifest.\n pub fn new(path: &Path, id: &SourceId, config: &'cfg Config)\n -> PathSource<'cfg> {\n trace!(\"new; id={}\", id);\n\n PathSource {\n id: id.clone(),\n path: path.to_path_buf(),\n updated: false,\n packages: Vec::new(),\n config: config,\n }\n }\n\n pub fn root_package(&mut self) -> CargoResult<Package> {\n trace!(\"root_package; source={:?}\", self);\n\n try!(self.update());\n\n match self.packages.iter().find(|p| p.root() == &*self.path) {\n Some(pkg) => Ok(pkg.clone()),\n None => Err(internal(\"no package found in source\"))\n }\n }\n\n fn read_packages(&self) -> CargoResult<Vec<Package>> {\n if self.updated {\n Ok(self.packages.clone())\n } else if self.id.is_path() && self.id.precise().is_some() {\n \/\/ If our source id is a path and it's listed with a precise\n \/\/ version, then it means that we're not allowed to have nested\n \/\/ dependencies (they've been rewritten to crates.io dependencies)\n \/\/ In this case we specifically read just one package, not a list of\n \/\/ packages.\n let path = self.path.join(\"Cargo.toml\");\n let (pkg, _) = try!(ops::read_package(&path, &self.id,\n self.config));\n Ok(vec![pkg])\n } else {\n ops::read_packages(&self.path, &self.id, self.config)\n }\n }\n\n \/\/\/ List all files relevant to building this package inside this source.\n \/\/\/\n \/\/\/ This function will use the appropriate methods to determine the\n \/\/\/ set of files underneath this source's directory which are relevant for\n \/\/\/ building `pkg`.\n \/\/\/\n \/\/\/ The basic assumption of this method is that all files in the directory\n \/\/\/ are relevant for building this package, but it also contains logic to\n \/\/\/ use other methods like .gitignore to filter the list of files.\n pub fn list_files(&self, pkg: &Package) -> CargoResult<Vec<PathBuf>> {\n let root = pkg.root();\n\n let parse = |p: &String| {\n Pattern::new(p).map_err(|e| {\n human(format!(\"could not parse pattern `{}`: {}\", p, e))\n })\n };\n let exclude = try!(pkg.manifest().exclude().iter()\n .map(|p| parse(p)).collect::<Result<Vec<_>, _>>());\n let include = try!(pkg.manifest().include().iter()\n .map(|p| parse(p)).collect::<Result<Vec<_>, _>>());\n\n let mut filter = |p: &Path| {\n let relative_path = util::without_prefix(p, &root).unwrap();\n include.iter().any(|p| p.matches_path(&relative_path)) || {\n include.len() == 0 &&\n !exclude.iter().any(|p| p.matches_path(&relative_path))\n }\n };\n\n \/\/ If this package is a git repository, then we really do want to query\n \/\/ the git repository as it takes into account items such as .gitignore.\n \/\/ We're not quite sure where the git repository is, however, so we do a\n \/\/ bit of a probe.\n \/\/\n \/\/ We check all packages in this source that are ancestors of the\n \/\/ specified package (including the same package) to see if they're at\n \/\/ the root of the git repository. This isn't always true, but it'll get\n \/\/ us there most of the time!\n let repo = self.packages.iter()\n .map(|pkg| pkg.root())\n .filter(|path| root.starts_with(path))\n .filter_map(|path| git2::Repository::open(&path).ok())\n .next();\n match repo {\n Some(repo) => self.list_files_git(pkg, repo, &mut filter),\n None => self.list_files_walk(pkg, &mut filter),\n }\n }\n\n fn list_files_git(&self, pkg: &Package, repo: git2::Repository,\n filter: &mut FnMut(&Path) -> bool)\n -> CargoResult<Vec<PathBuf>> {\n warn!(\"list_files_git {}\", pkg.package_id());\n let index = try!(repo.index());\n let root = try!(repo.workdir().chain_error(|| {\n internal_error(\"Can't list files on a bare repository.\", \"\")\n }));\n let pkg_path = pkg.root();\n\n let mut ret = Vec::new();\n\n \/\/ We use information from the git repository to guide us in traversing\n \/\/ its tree. The primary purpose of this is to take advantage of the\n \/\/ .gitignore and auto-ignore files that don't matter.\n \/\/\n \/\/ Here we're also careful to look at both tracked and untracked files as\n \/\/ the untracked files are often part of a build and may become relevant\n \/\/ as part of a future commit.\n let index_files = index.iter().map(|entry| {\n use libgit2_sys::git_filemode_t::GIT_FILEMODE_COMMIT;\n let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32;\n (join(&root, &entry.path), Some(is_dir))\n });\n let mut opts = git2::StatusOptions::new();\n opts.include_untracked(true);\n if let Some(suffix) = util::without_prefix(pkg_path, &root) {\n opts.pathspec(suffix);\n }\n let statuses = try!(repo.statuses(Some(&mut opts)));\n let untracked = statuses.iter().map(|entry| {\n (join(&root, entry.path_bytes()), None)\n });\n\n 'outer: for (file_path, is_dir) in index_files.chain(untracked) {\n let file_path = try!(file_path);\n\n \/\/ Filter out files outside this package.\n if !file_path.starts_with(pkg_path) { continue }\n\n \/\/ Filter out Cargo.lock and target always\n {\n let fname = file_path.file_name().and_then(|s| s.to_str());\n if fname == Some(\"Cargo.lock\") { continue }\n if fname == Some(\"target\") { continue }\n }\n\n \/\/ Filter out sub-packages of this package\n for other_pkg in self.packages.iter().filter(|p| *p != pkg) {\n let other_path = other_pkg.root();\n if other_path.starts_with(pkg_path) &&\n file_path.starts_with(other_path) {\n continue 'outer;\n }\n }\n\n let is_dir = is_dir.or_else(|| {\n fs::metadata(&file_path).ok().map(|m| m.is_dir())\n }).unwrap_or(false);\n if is_dir {\n warn!(\" found submodule {}\", file_path.display());\n let rel = util::without_prefix(&file_path, &root).unwrap();\n let rel = try!(rel.to_str().chain_error(|| {\n human(format!(\"invalid utf-8 filename: {}\", rel.display()))\n }));\n \/\/ Git submodules are currently only named through `\/` path\n \/\/ separators, explicitly not `\\` which windows uses. Who knew?\n let rel = rel.replace(r\"\\\", \"\/\");\n match repo.find_submodule(&rel).and_then(|s| s.open()) {\n Ok(repo) => {\n let files = try!(self.list_files_git(pkg, repo, filter));\n ret.extend(files.into_iter());\n }\n Err(..) => {\n try!(PathSource::walk(&file_path, &mut ret, false,\n filter));\n }\n }\n } else if (*filter)(&file_path) {\n \/\/ We found a file!\n warn!(\" found {}\", file_path.display());\n ret.push(file_path);\n }\n }\n return Ok(ret);\n\n #[cfg(unix)]\n fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {\n use std::os::unix::prelude::*;\n use std::ffi::OsStr;\n Ok(path.join(<OsStr as OsStrExt>::from_bytes(data)))\n }\n #[cfg(windows)]\n fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {\n use std::str;\n match str::from_utf8(data) {\n Ok(s) => Ok(path.join(s)),\n Err(..) => Err(internal(\"cannot process path in git with a non \\\n unicode filename\")),\n }\n }\n }\n\n fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> bool)\n -> CargoResult<Vec<PathBuf>> {\n let mut ret = Vec::new();\n for pkg in self.packages.iter().filter(|p| *p == pkg) {\n let loc = pkg.root();\n try!(PathSource::walk(loc, &mut ret, true, filter));\n }\n return Ok(ret);\n }\n\n fn walk(path: &Path, ret: &mut Vec<PathBuf>,\n is_root: bool, filter: &mut FnMut(&Path) -> bool) -> CargoResult<()>\n {\n if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) {\n if (*filter)(path) {\n ret.push(path.to_path_buf());\n }\n return Ok(())\n }\n \/\/ Don't recurse into any sub-packages that we have\n if !is_root && fs::metadata(&path.join(\"Cargo.toml\")).is_ok() {\n return Ok(())\n }\n for dir in try!(fs::read_dir(path)) {\n let dir = try!(dir).path();\n let name = dir.file_name().and_then(|s| s.to_str());\n \/\/ Skip dotfile directories\n if name.map(|s| s.starts_with(\".\")) == Some(true) {\n continue\n } else if is_root {\n \/\/ Skip cargo artifacts\n match name {\n Some(\"target\") | Some(\"Cargo.lock\") => continue,\n _ => {}\n }\n }\n try!(PathSource::walk(&dir, ret, false, filter));\n }\n return Ok(())\n }\n}\n\nimpl<'cfg> Debug for PathSource<'cfg> {\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n write!(f, \"the paths source\")\n }\n}\n\nimpl<'cfg> Registry for PathSource<'cfg> {\n fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {\n self.packages.query(dep)\n }\n}\n\nimpl<'cfg> Source for PathSource<'cfg> {\n fn update(&mut self) -> CargoResult<()> {\n if !self.updated {\n let packages = try!(self.read_packages());\n self.packages.extend(packages.into_iter());\n self.updated = true;\n }\n\n Ok(())\n }\n\n fn download(&mut self, _: &[PackageId]) -> CargoResult<()>{\n \/\/ TODO: assert! that the PackageId is contained by the source\n Ok(())\n }\n\n fn get(&self, ids: &[PackageId]) -> CargoResult<Vec<Package>> {\n trace!(\"getting packages; ids={:?}\", ids);\n\n Ok(self.packages.iter()\n .filter(|pkg| ids.iter().any(|id| pkg.package_id() == id))\n .map(|pkg| pkg.clone())\n .collect())\n }\n\n fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {\n if !self.updated {\n return Err(internal_error(\"BUG: source was not updated\", \"\"));\n }\n\n let mut max = FileTime::zero();\n for file in try!(self.list_files(pkg)).iter() {\n \/\/ An fs::stat error here is either because path is a\n \/\/ broken symlink, a permissions error, or a race\n \/\/ condition where this path was rm'ed - either way,\n \/\/ we can ignore the error and treat the path's mtime\n \/\/ as 0.\n let mtime = fs::metadata(file).map(|meta| {\n FileTime::from_last_modification_time(&meta)\n }).unwrap_or(FileTime::zero());\n warn!(\"{} {}\", mtime, file.display());\n max = cmp::max(max, mtime);\n }\n trace!(\"fingerprint {}: {}\", self.path.display(), max);\n Ok(max.to_string())\n }\n}\n<commit_msg>Include filename in path fingerprint<commit_after>use std::fmt::{self, Debug, Formatter};\nuse std::fs;\nuse std::io::prelude::*;\nuse std::path::{Path, PathBuf};\n\nuse filetime::FileTime;\nuse git2;\nuse glob::Pattern;\n\nuse core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};\nuse ops;\nuse util::{self, CargoResult, internal, internal_error, human, ChainError};\nuse util::Config;\n\npub struct PathSource<'cfg> {\n id: SourceId,\n path: PathBuf,\n updated: bool,\n packages: Vec<Package>,\n config: &'cfg Config,\n}\n\n\/\/ TODO: Figure out if packages should be discovered in new or self should be\n\/\/ mut and packages are discovered in update\nimpl<'cfg> PathSource<'cfg> {\n pub fn for_path(path: &Path, config: &'cfg Config)\n -> CargoResult<PathSource<'cfg>> {\n trace!(\"PathSource::for_path; path={}\", path.display());\n Ok(PathSource::new(path, &try!(SourceId::for_path(path)), config))\n }\n\n \/\/\/ Invoked with an absolute path to a directory that contains a Cargo.toml.\n \/\/\/ The source will read the manifest and find any other packages contained\n \/\/\/ in the directory structure reachable by the root manifest.\n pub fn new(path: &Path, id: &SourceId, config: &'cfg Config)\n -> PathSource<'cfg> {\n trace!(\"new; id={}\", id);\n\n PathSource {\n id: id.clone(),\n path: path.to_path_buf(),\n updated: false,\n packages: Vec::new(),\n config: config,\n }\n }\n\n pub fn root_package(&mut self) -> CargoResult<Package> {\n trace!(\"root_package; source={:?}\", self);\n\n try!(self.update());\n\n match self.packages.iter().find(|p| p.root() == &*self.path) {\n Some(pkg) => Ok(pkg.clone()),\n None => Err(internal(\"no package found in source\"))\n }\n }\n\n fn read_packages(&self) -> CargoResult<Vec<Package>> {\n if self.updated {\n Ok(self.packages.clone())\n } else if self.id.is_path() && self.id.precise().is_some() {\n \/\/ If our source id is a path and it's listed with a precise\n \/\/ version, then it means that we're not allowed to have nested\n \/\/ dependencies (they've been rewritten to crates.io dependencies)\n \/\/ In this case we specifically read just one package, not a list of\n \/\/ packages.\n let path = self.path.join(\"Cargo.toml\");\n let (pkg, _) = try!(ops::read_package(&path, &self.id,\n self.config));\n Ok(vec![pkg])\n } else {\n ops::read_packages(&self.path, &self.id, self.config)\n }\n }\n\n \/\/\/ List all files relevant to building this package inside this source.\n \/\/\/\n \/\/\/ This function will use the appropriate methods to determine the\n \/\/\/ set of files underneath this source's directory which are relevant for\n \/\/\/ building `pkg`.\n \/\/\/\n \/\/\/ The basic assumption of this method is that all files in the directory\n \/\/\/ are relevant for building this package, but it also contains logic to\n \/\/\/ use other methods like .gitignore to filter the list of files.\n pub fn list_files(&self, pkg: &Package) -> CargoResult<Vec<PathBuf>> {\n let root = pkg.root();\n\n let parse = |p: &String| {\n Pattern::new(p).map_err(|e| {\n human(format!(\"could not parse pattern `{}`: {}\", p, e))\n })\n };\n let exclude = try!(pkg.manifest().exclude().iter()\n .map(|p| parse(p)).collect::<Result<Vec<_>, _>>());\n let include = try!(pkg.manifest().include().iter()\n .map(|p| parse(p)).collect::<Result<Vec<_>, _>>());\n\n let mut filter = |p: &Path| {\n let relative_path = util::without_prefix(p, &root).unwrap();\n include.iter().any(|p| p.matches_path(&relative_path)) || {\n include.len() == 0 &&\n !exclude.iter().any(|p| p.matches_path(&relative_path))\n }\n };\n\n \/\/ If this package is a git repository, then we really do want to query\n \/\/ the git repository as it takes into account items such as .gitignore.\n \/\/ We're not quite sure where the git repository is, however, so we do a\n \/\/ bit of a probe.\n \/\/\n \/\/ We check all packages in this source that are ancestors of the\n \/\/ specified package (including the same package) to see if they're at\n \/\/ the root of the git repository. This isn't always true, but it'll get\n \/\/ us there most of the time!\n let repo = self.packages.iter()\n .map(|pkg| pkg.root())\n .filter(|path| root.starts_with(path))\n .filter_map(|path| git2::Repository::open(&path).ok())\n .next();\n match repo {\n Some(repo) => self.list_files_git(pkg, repo, &mut filter),\n None => self.list_files_walk(pkg, &mut filter),\n }\n }\n\n fn list_files_git(&self, pkg: &Package, repo: git2::Repository,\n filter: &mut FnMut(&Path) -> bool)\n -> CargoResult<Vec<PathBuf>> {\n warn!(\"list_files_git {}\", pkg.package_id());\n let index = try!(repo.index());\n let root = try!(repo.workdir().chain_error(|| {\n internal_error(\"Can't list files on a bare repository.\", \"\")\n }));\n let pkg_path = pkg.root();\n\n let mut ret = Vec::new();\n\n \/\/ We use information from the git repository to guide us in traversing\n \/\/ its tree. The primary purpose of this is to take advantage of the\n \/\/ .gitignore and auto-ignore files that don't matter.\n \/\/\n \/\/ Here we're also careful to look at both tracked and untracked files as\n \/\/ the untracked files are often part of a build and may become relevant\n \/\/ as part of a future commit.\n let index_files = index.iter().map(|entry| {\n use libgit2_sys::git_filemode_t::GIT_FILEMODE_COMMIT;\n let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32;\n (join(&root, &entry.path), Some(is_dir))\n });\n let mut opts = git2::StatusOptions::new();\n opts.include_untracked(true);\n if let Some(suffix) = util::without_prefix(pkg_path, &root) {\n opts.pathspec(suffix);\n }\n let statuses = try!(repo.statuses(Some(&mut opts)));\n let untracked = statuses.iter().map(|entry| {\n (join(&root, entry.path_bytes()), None)\n });\n\n 'outer: for (file_path, is_dir) in index_files.chain(untracked) {\n let file_path = try!(file_path);\n\n \/\/ Filter out files outside this package.\n if !file_path.starts_with(pkg_path) { continue }\n\n \/\/ Filter out Cargo.lock and target always\n {\n let fname = file_path.file_name().and_then(|s| s.to_str());\n if fname == Some(\"Cargo.lock\") { continue }\n if fname == Some(\"target\") { continue }\n }\n\n \/\/ Filter out sub-packages of this package\n for other_pkg in self.packages.iter().filter(|p| *p != pkg) {\n let other_path = other_pkg.root();\n if other_path.starts_with(pkg_path) &&\n file_path.starts_with(other_path) {\n continue 'outer;\n }\n }\n\n let is_dir = is_dir.or_else(|| {\n fs::metadata(&file_path).ok().map(|m| m.is_dir())\n }).unwrap_or(false);\n if is_dir {\n warn!(\" found submodule {}\", file_path.display());\n let rel = util::without_prefix(&file_path, &root).unwrap();\n let rel = try!(rel.to_str().chain_error(|| {\n human(format!(\"invalid utf-8 filename: {}\", rel.display()))\n }));\n \/\/ Git submodules are currently only named through `\/` path\n \/\/ separators, explicitly not `\\` which windows uses. Who knew?\n let rel = rel.replace(r\"\\\", \"\/\");\n match repo.find_submodule(&rel).and_then(|s| s.open()) {\n Ok(repo) => {\n let files = try!(self.list_files_git(pkg, repo, filter));\n ret.extend(files.into_iter());\n }\n Err(..) => {\n try!(PathSource::walk(&file_path, &mut ret, false,\n filter));\n }\n }\n } else if (*filter)(&file_path) {\n \/\/ We found a file!\n warn!(\" found {}\", file_path.display());\n ret.push(file_path);\n }\n }\n return Ok(ret);\n\n #[cfg(unix)]\n fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {\n use std::os::unix::prelude::*;\n use std::ffi::OsStr;\n Ok(path.join(<OsStr as OsStrExt>::from_bytes(data)))\n }\n #[cfg(windows)]\n fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {\n use std::str;\n match str::from_utf8(data) {\n Ok(s) => Ok(path.join(s)),\n Err(..) => Err(internal(\"cannot process path in git with a non \\\n unicode filename\")),\n }\n }\n }\n\n fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> bool)\n -> CargoResult<Vec<PathBuf>> {\n let mut ret = Vec::new();\n for pkg in self.packages.iter().filter(|p| *p == pkg) {\n let loc = pkg.root();\n try!(PathSource::walk(loc, &mut ret, true, filter));\n }\n return Ok(ret);\n }\n\n fn walk(path: &Path, ret: &mut Vec<PathBuf>,\n is_root: bool, filter: &mut FnMut(&Path) -> bool) -> CargoResult<()>\n {\n if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) {\n if (*filter)(path) {\n ret.push(path.to_path_buf());\n }\n return Ok(())\n }\n \/\/ Don't recurse into any sub-packages that we have\n if !is_root && fs::metadata(&path.join(\"Cargo.toml\")).is_ok() {\n return Ok(())\n }\n for dir in try!(fs::read_dir(path)) {\n let dir = try!(dir).path();\n let name = dir.file_name().and_then(|s| s.to_str());\n \/\/ Skip dotfile directories\n if name.map(|s| s.starts_with(\".\")) == Some(true) {\n continue\n } else if is_root {\n \/\/ Skip cargo artifacts\n match name {\n Some(\"target\") | Some(\"Cargo.lock\") => continue,\n _ => {}\n }\n }\n try!(PathSource::walk(&dir, ret, false, filter));\n }\n return Ok(())\n }\n}\n\nimpl<'cfg> Debug for PathSource<'cfg> {\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n write!(f, \"the paths source\")\n }\n}\n\nimpl<'cfg> Registry for PathSource<'cfg> {\n fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {\n self.packages.query(dep)\n }\n}\n\nimpl<'cfg> Source for PathSource<'cfg> {\n fn update(&mut self) -> CargoResult<()> {\n if !self.updated {\n let packages = try!(self.read_packages());\n self.packages.extend(packages.into_iter());\n self.updated = true;\n }\n\n Ok(())\n }\n\n fn download(&mut self, _: &[PackageId]) -> CargoResult<()>{\n \/\/ TODO: assert! that the PackageId is contained by the source\n Ok(())\n }\n\n fn get(&self, ids: &[PackageId]) -> CargoResult<Vec<Package>> {\n trace!(\"getting packages; ids={:?}\", ids);\n\n Ok(self.packages.iter()\n .filter(|pkg| ids.iter().any(|id| pkg.package_id() == id))\n .map(|pkg| pkg.clone())\n .collect())\n }\n\n fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {\n if !self.updated {\n return Err(internal_error(\"BUG: source was not updated\", \"\"));\n }\n\n let mut max = FileTime::zero();\n let mut max_path = PathBuf::from(\"\");\n for file in try!(self.list_files(pkg)) {\n \/\/ An fs::stat error here is either because path is a\n \/\/ broken symlink, a permissions error, or a race\n \/\/ condition where this path was rm'ed - either way,\n \/\/ we can ignore the error and treat the path's mtime\n \/\/ as 0.\n let mtime = fs::metadata(&file).map(|meta| {\n FileTime::from_last_modification_time(&meta)\n }).unwrap_or(FileTime::zero());\n warn!(\"{} {}\", mtime, file.display());\n if mtime > max {\n max = mtime;\n max_path = file;\n }\n }\n trace!(\"fingerprint {}: {}\", self.path.display(), max);\n Ok(format!(\"{} ({})\", max, max_path.display()))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use resiter::IterInnerOkOrElse instead of libimagerror version<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>feature(io) is now feature(old_io)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>nbt: Small fix for `Display` implementation.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>New test for `cargo rustc --crate-type` with dependency<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example of counting points<commit_after>\/\/! Counts the number of points in a las file.\n\nextern crate las;\n\nuse las::Reader;\n\nfn main() {\n let path = std::env::args().skip(1).next().expect(\n \"Must provide a path to a las file\",\n );\n let mut reader = Reader::from_path(path).expect(\"Unable to open reader\");\n let npoints = reader\n .points()\n .map(|p| p.expect(\"Unable to read point\"))\n .count();\n println!(\"Number of points: {}\", npoints);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add damping generator.<commit_after>use nalgebra::traits::scalar_op::ScalarMul;\nuse object::body::{Body, RigidBody, SoftBody};\nuse integration::integrator::Integrator;\n\npub struct BodyDamping<N, LV, AV, M, II> {\n priv damping_factor: N,\n priv objects: ~[@mut Body<N, LV, AV, M, II>]\n}\n\nimpl<N, LV, AV, M, II> BodyDamping<N, LV, AV, M, II> {\n pub fn new(damping_factor: N) -> BodyDamping<N, LV, AV, M, II> {\n BodyDamping {\n damping_factor: damping_factor,\n objects: ~[]\n }\n }\n}\n\nimpl<N: Clone, LV: Clone + ScalarMul<N>, AV: Clone + ScalarMul<N>, M: Clone, II: Clone>\nIntegrator<N, Body<N, LV, AV, M, II>> for BodyDamping<N, LV, AV, M, II> {\n fn add(&mut self, o: @mut Body<N, LV, AV, M, II>) {\n self.objects.push(o.clone());\n }\n\n fn remove(&mut self, _: @mut Body<N, LV, AV, M, II>) {\n fail!(\"Not yet implemented.\");\n }\n\n fn update(&mut self, _: N) {\n for &o in self.objects.iter() {\n match *o {\n RigidBody(rb) => {\n let new_lin = rb.lin_vel().scalar_mul(&self.damping_factor);\n rb.set_lin_vel(new_lin);\n let new_ang = rb.ang_vel().scalar_mul(&self.damping_factor);\n rb.set_ang_vel(new_ang);\n },\n SoftBody(_) => {\n fail!(\"Not yet implemented.\")\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #66757<commit_after>\/\/ Regression test for #66757\n\/\/\n\/\/ Test than when you have a `!` value (e.g., the local variable\n\/\/ never) and an uninferred variable (here the argument to `From`) it\n\/\/ doesn't fallback to `()` but rather `!`.\n\/\/\n\/\/ run-pass\n\n#![feature(never_type)]\n\n\/\/ FIXME(#67225) -- this should be true even without the fallback gate.\n#![feature(never_type_fallback)]\n\nstruct E;\n\nimpl From<!> for E {\n fn from(_: !) -> E {\n E\n }\n}\n\n#[allow(unreachable_code)]\n#[allow(dead_code)]\nfn foo(never: !) {\n <E as From<!>>::from(never); \/\/ Ok\n <E as From<_>>::from(never); \/\/ Inference fails here\n}\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #29800 - Detegr:master, r=nikomatsakis<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[fundamental] \/\/~ ERROR the `#[fundamental]` attribute is an experimental feature\nstruct Fundamental;\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #9515 - pickfire:patch-1, r=ehuss<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Arc1 code added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add a test case for issue #32031<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -C no-prepopulate-passes\n\n#![crate_type = \"lib\"]\n\n#[no_mangle]\npub struct F32(f32);\n\n\/\/ CHECK: define float @add_newtype_f32(float, float)\n#[inline(never)]\n#[no_mangle]\npub fn add_newtype_f32(a: F32, b: F32) -> F32 {\n F32(a.0 + b.0)\n}\n\n#[no_mangle]\npub struct F64(f64);\n\n\/\/ CHECK: define double @add_newtype_f64(double, double)\n#[inline(never)]\n#[no_mangle]\npub fn add_newtype_f64(a: F64, b: F64) -> F64 {\n F64(a.0 + b.0)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add xfailed test for #3979<commit_after>\/\/ xfail-test\ntrait Positioned {\n fn SetX(int);\n}\n\ntrait Movable: Positioned {\n fn translate(dx: int) {\n self.SetX(self.X() + dx);\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add validation layer for FPS, and full dump at application debug level 2 and above<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rationalise names ending _raw to starting raw_ in Vulkan renderer<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add log message on socket disconnect<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix imag-mail to list from appropriate collection<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_type=\"staticlib\"]\n#![feature(alloc)]\n#![feature(allocator)]\n#![feature(arc_counts)]\n#![feature(augmented_assignments)]\n#![feature(asm)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(const_fn)]\n#![feature(core_intrinsics)]\n#![feature(core_str_ext)]\n#![feature(core_slice_ext)]\n#![feature(fnbox)]\n#![feature(fundamental)]\n#![feature(lang_items)]\n#![feature(op_assign_traits)]\n#![feature(unboxed_closures)]\n#![feature(unsafe_no_drop_flag)]\n#![feature(unwind_attributes)]\n#![feature(vec_push_all)]\n#![feature(zero_one)]\n#![no_std]\n\n#[macro_use]\nextern crate alloc;\n\n#[macro_use]\nextern crate collections;\n\nextern crate system;\n\nuse acpi::Acpi;\n\nuse alloc::boxed::Box;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cell::UnsafeCell;\nuse core::{ptr, mem, usize};\nuse core::slice::SliceExt;\n\nuse common::event::{self, EVENT_KEY, EventOption};\nuse common::memory;\nuse common::paging::Page;\nuse common::time::Duration;\n\nuse drivers::pci;\nuse drivers::io::{Io, Pio};\nuse drivers::ps2::*;\nuse drivers::rtc::*;\nuse drivers::serial::*;\n\nuse env::Environment;\n\nuse graphics::display;\n\nuse scheduler::{Context, Regs, TSS};\nuse scheduler::context::context_switch;\n\nuse schemes::Url;\nuse schemes::context::*;\nuse schemes::debug::*;\nuse schemes::display::*;\nuse schemes::interrupt::*;\nuse schemes::memory::*;\n\nuse syscall::execute::execute;\nuse syscall::handle::*;\n\npub use system::externs::*;\n\n\/\/\/ Common std-like functionality\n#[macro_use]\npub mod common;\n#[macro_use]\npub mod macros;\n\/\/\/ Allocation\npub mod alloc_system;\n\/\/\/ ACPI\npub mod acpi;\n\/\/\/ Disk drivers\npub mod disk;\n\/\/\/ Various drivers\npub mod drivers;\n\/\/\/ Environment\npub mod env;\n\/\/\/ Filesystems\npub mod fs;\n\/\/\/ Various graphical methods\npub mod graphics;\n\/\/\/ Panic\npub mod panic;\n\/\/\/ Schemes\npub mod schemes;\n\/\/\/ Scheduling\npub mod scheduler;\n\/\/\/ Sync primatives\npub mod sync;\n\/\/\/ System calls\npub mod syscall;\n\/\/\/ USB input\/output\npub mod usb;\n\npub static mut TSS_PTR: Option<&'static mut TSS> = None;\npub static mut ENV_PTR: Option<&'static mut Environment> = None;\n\npub fn env() -> &'static Environment {\n unsafe {\n match ENV_PTR {\n Some(&mut ref p) => p,\n None => unreachable!(),\n }\n }\n}\n\n\/\/\/ Pit duration\nstatic PIT_DURATION: Duration = Duration {\n secs: 0,\n nanos: 2250286,\n};\n\n\/\/\/ Idle loop (active while idle)\nunsafe fn idle_loop() {\n loop {\n asm!(\"cli\" : : : : \"intel\", \"volatile\");\n\n let mut halt = true;\n\n for i in env().contexts.lock().iter().skip(1) {\n if i.interrupted {\n halt = false;\n break;\n }\n }\n\n\n if halt {\n asm!(\"sti\" : : : : \"intel\", \"volatile\");\n asm!(\"hlt\" : : : : \"intel\", \"volatile\");\n } else {\n asm!(\"sti\" : : : : \"intel\", \"volatile\");\n }\n\n\n context_switch(false);\n }\n}\n\n\/\/\/ Event poll loop\nfn poll_loop() {\n loop {\n env().on_poll();\n\n unsafe { context_switch(false) };\n }\n}\n\n\/\/\/ Event loop\nfn event_loop() {\n {\n let mut console = env().console.lock();\n console.instant = false;\n }\n\n let mut cmd = String::new();\n loop {\n loop {\n let mut console = env().console.lock();\n match env().events.lock().pop_front() {\n Some(event) => {\n if console.draw {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n event::K_F2 => {\n console.draw = false;\n }\n event::K_BKSP => if !cmd.is_empty() {\n console.write(&[8]);\n cmd.pop();\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n console.command = Some(cmd.clone());\n\n cmd.clear();\n console.write(&[10]);\n }\n _ => {\n cmd.push(key_event.character);\n console.write(&[key_event.character as u8]);\n }\n },\n }\n }\n }\n _ => (),\n }\n } else {\n if event.code == EVENT_KEY && event.b as u8 == event::K_F1 && event.c > 0 {\n console.draw = true;\n console.redraw = true;\n } else {\n \/\/ TODO: Magical orbital hack\n }\n }\n }\n None => break,\n }\n }\n\n {\n let mut console = env().console.lock();\n console.instant = false;\n if console.draw && console.redraw {\n console.redraw = false;\n console.display.flip();\n }\n }\n\n unsafe { context_switch(false) };\n }\n}\n\nstatic BSS_TEST_ZERO: usize = 0;\nstatic BSS_TEST_NONZERO: usize = usize::MAX;\n\n\/\/\/ Initialize kernel\nunsafe fn init(tss_data: usize) {\n\n \/\/ Test\n assume!(true);\n\n \/\/ Zero BSS, this initializes statics that are set to 0\n {\n extern {\n static mut __bss_start: u8;\n static mut __bss_end: u8;\n }\n\n let start_ptr = &mut __bss_start;\n let end_ptr = &mut __bss_end;\n\n if start_ptr as *const _ as usize <= end_ptr as *const _ as usize {\n let size = end_ptr as *const _ as usize - start_ptr as *const _ as usize;\n memset(start_ptr, 0, size);\n }\n\n assert_eq!(BSS_TEST_ZERO, 0);\n assert_eq!(BSS_TEST_NONZERO, usize::MAX);\n }\n\n \/\/ Setup paging, this allows for memory allocation\n Page::init();\n memory::cluster_init();\n \/\/ Unmap first page to catch null pointer errors (after reading memory map)\n Page::new(0).unmap();\n\n TSS_PTR = Some(&mut *(tss_data as *mut TSS));\n ENV_PTR = Some(&mut *Box::into_raw(Environment::new()));\n\n match ENV_PTR {\n Some(ref mut env) => {\n env.contexts.lock().push(Context::root());\n env.console.lock().draw = true;\n\n debug!(\"Redox {} bits\\n\", mem::size_of::<usize>() * 8);\n\n if let Some(acpi) = Acpi::new() {\n env.schemes.push(UnsafeCell::new(acpi));\n }\n\n *(env.clock_realtime.lock()) = Rtc::new().time();\n\n env.schemes.push(UnsafeCell::new(Ps2::new()));\n env.schemes.push(UnsafeCell::new(Serial::new(0x3F8, 0x4)));\n\n pci::pci_init(env);\n\n env.schemes.push(UnsafeCell::new(DebugScheme::new()));\n env.schemes.push(UnsafeCell::new(box DisplayScheme));\n env.schemes.push(UnsafeCell::new(box ContextScheme));\n env.schemes.push(UnsafeCell::new(box InterruptScheme));\n env.schemes.push(UnsafeCell::new(box MemoryScheme));\n\n Context::spawn(\"kpoll\".to_string(),\n box move || {\n poll_loop();\n });\n\n Context::spawn(\"kevent\".to_string(),\n box move || {\n event_loop();\n });\n\n env.contexts.lock().enabled = true;\n\n Context::spawn(\"kinit\".to_string(),\n box move || {\n {\n let wd_c = \"file:\/\\0\";\n do_sys_chdir(wd_c.as_ptr());\n\n let stdio_c = \"debug:\\0\";\n do_sys_open(stdio_c.as_ptr(), 0);\n do_sys_open(stdio_c.as_ptr(), 0);\n do_sys_open(stdio_c.as_ptr(), 0);\n }\n\n execute(Url::from_str(\"file:\/apps\/init\/main.bin\"), Vec::new());\n debug!(\"INIT: Failed to execute\\n\");\n\n loop {\n context_switch(false);\n }\n });\n },\n None => unreachable!(),\n }\n}\n\n#[cold]\n#[inline(never)]\n#[no_mangle]\n\/\/\/ Take regs for kernel calls and exceptions\npub extern \"cdecl\" fn kernel(interrupt: usize, mut regs: &mut Regs) {\n macro_rules! exception_inner {\n ($name:expr) => ({\n {\n let contexts = ::env().contexts.lock();\n if let Some(context) = contexts.current() {\n debugln!(\"PID {}: {}\", context.pid, context.name);\n }\n }\n\n debugln!(\" INT {:X}: {}\", interrupt, $name);\n debugln!(\" CS: {:08X} IP: {:08X} FLG: {:08X}\", regs.cs, regs.ip, regs.flags);\n debugln!(\" SS: {:08X} SP: {:08X} BP: {:08X}\", regs.ss, regs.sp, regs.bp);\n debugln!(\" AX: {:08X} BX: {:08X} CX: {:08X} DX: {:08X}\", regs.ax, regs.bx, regs.cx, regs.dx);\n debugln!(\" DI: {:08X} SI: {:08X}\", regs.di, regs.di);\n\n let cr0: usize;\n let cr2: usize;\n let cr3: usize;\n let cr4: usize;\n unsafe {\n asm!(\"mov $0, cr0\" : \"=r\"(cr0) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr2\" : \"=r\"(cr2) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr3\" : \"=r\"(cr3) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr4\" : \"=r\"(cr4) : : : \"intel\", \"volatile\");\n }\n debugln!(\" CR0: {:08X} CR2: {:08X} CR3: {:08X} CR4: {:08X}\", cr0, cr2, cr3, cr4);\n\n let sp = regs.sp as *const u32;\n for y in -15..16 {\n debug!(\" {:>3}:\", y * 8 * 4);\n for x in 0..8 {\n debug!(\" {:08X}\", unsafe { ptr::read(sp.offset(-(x + y * 8))) });\n }\n debug!(\"\\n\");\n }\n })\n };\n\n macro_rules! exception {\n ($name:expr) => ({\n exception_inner!($name);\n\n loop {\n do_sys_exit(usize::MAX);\n }\n })\n };\n\n macro_rules! exception_error {\n ($name:expr) => ({\n let error = regs.ip;\n regs.ip = regs.cs;\n regs.cs = regs.flags;\n regs.flags = regs.sp;\n regs.sp = regs.ss;\n regs.ss = 0;\n \/\/regs.ss = regs.error;\n\n exception_inner!($name);\n debugln!(\" ERR: {:08X}\", error);\n\n loop {\n do_sys_exit(usize::MAX);\n }\n })\n };\n\n if interrupt >= 0x20 && interrupt < 0x30 {\n if interrupt >= 0x28 {\n Pio::<u8>::new(0xA0).write(0x20);\n }\n\n Pio::<u8>::new(0x20).write(0x20);\n }\n\n \/\/Do not catch init interrupt\n if interrupt < 0xFF {\n env().interrupts.lock()[interrupt as usize] += 1;\n }\n\n match interrupt {\n 0x20 => {\n {\n let mut clock_monotonic = env().clock_monotonic.lock();\n *clock_monotonic = *clock_monotonic + PIT_DURATION;\n }\n {\n let mut clock_realtime = env().clock_realtime.lock();\n *clock_realtime = *clock_realtime + PIT_DURATION;\n }\n\n let switch = {\n let mut contexts = ::env().contexts.lock();\n if let Some(mut context) = contexts.current_mut() {\n context.slices -= 1;\n context.slice_total += 1;\n context.slices == 0\n } else {\n false\n }\n };\n\n if switch {\n unsafe { context_switch(true) };\n }\n }\n i @ 0x21 ... 0x2F => env().on_irq(i as u8 - 0x20),\n 0x80 => if !syscall_handle(regs) {\n exception!(\"Unknown Syscall\");\n },\n 0xFF => {\n unsafe {\n init(regs.ax);\n idle_loop();\n }\n },\n 0x0 => exception!(\"Divide by zero exception\"),\n 0x1 => exception!(\"Debug exception\"),\n 0x2 => exception!(\"Non-maskable interrupt\"),\n 0x3 => exception!(\"Breakpoint exception\"),\n 0x4 => exception!(\"Overflow exception\"),\n 0x5 => exception!(\"Bound range exceeded exception\"),\n 0x6 => exception!(\"Invalid opcode exception\"),\n 0x7 => exception!(\"Device not available exception\"),\n 0x8 => exception_error!(\"Double fault\"),\n 0x9 => exception!(\"Coprocessor Segment Overrun\"), \/\/ legacy\n 0xA => exception_error!(\"Invalid TSS exception\"),\n 0xB => exception_error!(\"Segment not present exception\"),\n 0xC => exception_error!(\"Stack-segment fault\"),\n 0xD => exception_error!(\"General protection fault\"),\n 0xE => exception_error!(\"Page fault\"),\n 0x10 => exception!(\"x87 floating-point exception\"),\n 0x11 => exception_error!(\"Alignment check exception\"),\n 0x12 => exception!(\"Machine check exception\"),\n 0x13 => exception!(\"SIMD floating-point exception\"),\n 0x14 => exception!(\"Virtualization exception\"),\n 0x1E => exception_error!(\"Security exception\"),\n _ => exception!(\"Unknown Interrupt\"),\n }\n}\n\n#[test]\nfn get_slice_test() {\n let array = [1, 2, 3, 4, 5];\n assert_eq!(array.get_slice(2, None), array[2..]);\n assert_eq!(array.get_slice(2, array.len()), array[2..array.len()]);\n assert_eq!(array.get_slice(None, 2), array[..2]);\n assert_eq!(array.get_slice(0, 2), array[0..2]);\n assert_eq!(array.get_slice(1, array.len()), array[1..array.len()]);\n assert_eq!(array.get_slice(1, array.len() + 1), array[1..array.len()]);\n assert_eq!(array.get_slice(array.len(), array.len()),\n array[array.len()..array.len()]);\n assert_eq!(array.get_slice(array.len() + 2, array.len() + 2),\n array[array.len()..array.len()]);\n}\n<commit_msg>Format idle loop asm<commit_after>#![crate_type=\"staticlib\"]\n#![feature(alloc)]\n#![feature(allocator)]\n#![feature(arc_counts)]\n#![feature(augmented_assignments)]\n#![feature(asm)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(const_fn)]\n#![feature(core_intrinsics)]\n#![feature(core_str_ext)]\n#![feature(core_slice_ext)]\n#![feature(fnbox)]\n#![feature(fundamental)]\n#![feature(lang_items)]\n#![feature(op_assign_traits)]\n#![feature(unboxed_closures)]\n#![feature(unsafe_no_drop_flag)]\n#![feature(unwind_attributes)]\n#![feature(vec_push_all)]\n#![feature(zero_one)]\n#![no_std]\n\n#[macro_use]\nextern crate alloc;\n\n#[macro_use]\nextern crate collections;\n\nextern crate system;\n\nuse acpi::Acpi;\n\nuse alloc::boxed::Box;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cell::UnsafeCell;\nuse core::{ptr, mem, usize};\nuse core::slice::SliceExt;\n\nuse common::event::{self, EVENT_KEY, EventOption};\nuse common::memory;\nuse common::paging::Page;\nuse common::time::Duration;\n\nuse drivers::pci;\nuse drivers::io::{Io, Pio};\nuse drivers::ps2::*;\nuse drivers::rtc::*;\nuse drivers::serial::*;\n\nuse env::Environment;\n\nuse graphics::display;\n\nuse scheduler::{Context, Regs, TSS};\nuse scheduler::context::context_switch;\n\nuse schemes::Url;\nuse schemes::context::*;\nuse schemes::debug::*;\nuse schemes::display::*;\nuse schemes::interrupt::*;\nuse schemes::memory::*;\n\nuse syscall::execute::execute;\nuse syscall::handle::*;\n\npub use system::externs::*;\n\n\/\/\/ Common std-like functionality\n#[macro_use]\npub mod common;\n#[macro_use]\npub mod macros;\n\/\/\/ Allocation\npub mod alloc_system;\n\/\/\/ ACPI\npub mod acpi;\n\/\/\/ Disk drivers\npub mod disk;\n\/\/\/ Various drivers\npub mod drivers;\n\/\/\/ Environment\npub mod env;\n\/\/\/ Filesystems\npub mod fs;\n\/\/\/ Various graphical methods\npub mod graphics;\n\/\/\/ Panic\npub mod panic;\n\/\/\/ Schemes\npub mod schemes;\n\/\/\/ Scheduling\npub mod scheduler;\n\/\/\/ Sync primatives\npub mod sync;\n\/\/\/ System calls\npub mod syscall;\n\/\/\/ USB input\/output\npub mod usb;\n\npub static mut TSS_PTR: Option<&'static mut TSS> = None;\npub static mut ENV_PTR: Option<&'static mut Environment> = None;\n\npub fn env() -> &'static Environment {\n unsafe {\n match ENV_PTR {\n Some(&mut ref p) => p,\n None => unreachable!(),\n }\n }\n}\n\n\/\/\/ Pit duration\nstatic PIT_DURATION: Duration = Duration {\n secs: 0,\n nanos: 2250286,\n};\n\n\/\/\/ Idle loop (active while idle)\nunsafe fn idle_loop() {\n loop {\n asm!(\"cli\" : : : : \"intel\", \"volatile\");\n\n let mut halt = true;\n\n for i in env().contexts.lock().iter().skip(1) {\n if i.interrupted {\n halt = false;\n break;\n }\n }\n\n\n if halt {\n asm!(\"sti\n hlt\"\n :\n :\n :\n : \"intel\", \"volatile\");\n } else {\n asm!(\"sti\"\n :\n :\n :\n : \"intel\", \"volatile\");\n }\n\n\n context_switch(false);\n }\n}\n\n\/\/\/ Event poll loop\nfn poll_loop() {\n loop {\n env().on_poll();\n\n unsafe { context_switch(false) };\n }\n}\n\n\/\/\/ Event loop\nfn event_loop() {\n {\n let mut console = env().console.lock();\n console.instant = false;\n }\n\n let mut cmd = String::new();\n loop {\n loop {\n let mut console = env().console.lock();\n match env().events.lock().pop_front() {\n Some(event) => {\n if console.draw {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n event::K_F2 => {\n console.draw = false;\n }\n event::K_BKSP => if !cmd.is_empty() {\n console.write(&[8]);\n cmd.pop();\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n console.command = Some(cmd.clone());\n\n cmd.clear();\n console.write(&[10]);\n }\n _ => {\n cmd.push(key_event.character);\n console.write(&[key_event.character as u8]);\n }\n },\n }\n }\n }\n _ => (),\n }\n } else {\n if event.code == EVENT_KEY && event.b as u8 == event::K_F1 && event.c > 0 {\n console.draw = true;\n console.redraw = true;\n } else {\n \/\/ TODO: Magical orbital hack\n }\n }\n }\n None => break,\n }\n }\n\n {\n let mut console = env().console.lock();\n console.instant = false;\n if console.draw && console.redraw {\n console.redraw = false;\n console.display.flip();\n }\n }\n\n unsafe { context_switch(false) };\n }\n}\n\nstatic BSS_TEST_ZERO: usize = 0;\nstatic BSS_TEST_NONZERO: usize = usize::MAX;\n\n\/\/\/ Initialize kernel\nunsafe fn init(tss_data: usize) {\n\n \/\/ Test\n assume!(true);\n\n \/\/ Zero BSS, this initializes statics that are set to 0\n {\n extern {\n static mut __bss_start: u8;\n static mut __bss_end: u8;\n }\n\n let start_ptr = &mut __bss_start;\n let end_ptr = &mut __bss_end;\n\n if start_ptr as *const _ as usize <= end_ptr as *const _ as usize {\n let size = end_ptr as *const _ as usize - start_ptr as *const _ as usize;\n memset(start_ptr, 0, size);\n }\n\n assert_eq!(BSS_TEST_ZERO, 0);\n assert_eq!(BSS_TEST_NONZERO, usize::MAX);\n }\n\n \/\/ Setup paging, this allows for memory allocation\n Page::init();\n memory::cluster_init();\n \/\/ Unmap first page to catch null pointer errors (after reading memory map)\n Page::new(0).unmap();\n\n TSS_PTR = Some(&mut *(tss_data as *mut TSS));\n ENV_PTR = Some(&mut *Box::into_raw(Environment::new()));\n\n match ENV_PTR {\n Some(ref mut env) => {\n env.contexts.lock().push(Context::root());\n env.console.lock().draw = true;\n\n debug!(\"Redox {} bits\\n\", mem::size_of::<usize>() * 8);\n\n if let Some(acpi) = Acpi::new() {\n env.schemes.push(UnsafeCell::new(acpi));\n }\n\n *(env.clock_realtime.lock()) = Rtc::new().time();\n\n env.schemes.push(UnsafeCell::new(Ps2::new()));\n env.schemes.push(UnsafeCell::new(Serial::new(0x3F8, 0x4)));\n\n pci::pci_init(env);\n\n env.schemes.push(UnsafeCell::new(DebugScheme::new()));\n env.schemes.push(UnsafeCell::new(box DisplayScheme));\n env.schemes.push(UnsafeCell::new(box ContextScheme));\n env.schemes.push(UnsafeCell::new(box InterruptScheme));\n env.schemes.push(UnsafeCell::new(box MemoryScheme));\n\n Context::spawn(\"kpoll\".to_string(),\n box move || {\n poll_loop();\n });\n\n Context::spawn(\"kevent\".to_string(),\n box move || {\n event_loop();\n });\n\n env.contexts.lock().enabled = true;\n\n Context::spawn(\"kinit\".to_string(),\n box move || {\n {\n let wd_c = \"file:\/\\0\";\n do_sys_chdir(wd_c.as_ptr());\n\n let stdio_c = \"debug:\\0\";\n do_sys_open(stdio_c.as_ptr(), 0);\n do_sys_open(stdio_c.as_ptr(), 0);\n do_sys_open(stdio_c.as_ptr(), 0);\n }\n\n execute(Url::from_str(\"file:\/apps\/init\/main.bin\"), Vec::new());\n debug!(\"INIT: Failed to execute\\n\");\n\n loop {\n context_switch(false);\n }\n });\n },\n None => unreachable!(),\n }\n}\n\n#[cold]\n#[inline(never)]\n#[no_mangle]\n\/\/\/ Take regs for kernel calls and exceptions\npub extern \"cdecl\" fn kernel(interrupt: usize, mut regs: &mut Regs) {\n macro_rules! exception_inner {\n ($name:expr) => ({\n {\n let contexts = ::env().contexts.lock();\n if let Some(context) = contexts.current() {\n debugln!(\"PID {}: {}\", context.pid, context.name);\n }\n }\n\n debugln!(\" INT {:X}: {}\", interrupt, $name);\n debugln!(\" CS: {:08X} IP: {:08X} FLG: {:08X}\", regs.cs, regs.ip, regs.flags);\n debugln!(\" SS: {:08X} SP: {:08X} BP: {:08X}\", regs.ss, regs.sp, regs.bp);\n debugln!(\" AX: {:08X} BX: {:08X} CX: {:08X} DX: {:08X}\", regs.ax, regs.bx, regs.cx, regs.dx);\n debugln!(\" DI: {:08X} SI: {:08X}\", regs.di, regs.di);\n\n let cr0: usize;\n let cr2: usize;\n let cr3: usize;\n let cr4: usize;\n unsafe {\n asm!(\"mov $0, cr0\" : \"=r\"(cr0) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr2\" : \"=r\"(cr2) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr3\" : \"=r\"(cr3) : : : \"intel\", \"volatile\");\n asm!(\"mov $0, cr4\" : \"=r\"(cr4) : : : \"intel\", \"volatile\");\n }\n debugln!(\" CR0: {:08X} CR2: {:08X} CR3: {:08X} CR4: {:08X}\", cr0, cr2, cr3, cr4);\n\n let sp = regs.sp as *const u32;\n for y in -15..16 {\n debug!(\" {:>3}:\", y * 8 * 4);\n for x in 0..8 {\n debug!(\" {:08X}\", unsafe { ptr::read(sp.offset(-(x + y * 8))) });\n }\n debug!(\"\\n\");\n }\n })\n };\n\n macro_rules! exception {\n ($name:expr) => ({\n exception_inner!($name);\n\n loop {\n do_sys_exit(usize::MAX);\n }\n })\n };\n\n macro_rules! exception_error {\n ($name:expr) => ({\n let error = regs.ip;\n regs.ip = regs.cs;\n regs.cs = regs.flags;\n regs.flags = regs.sp;\n regs.sp = regs.ss;\n regs.ss = 0;\n \/\/regs.ss = regs.error;\n\n exception_inner!($name);\n debugln!(\" ERR: {:08X}\", error);\n\n loop {\n do_sys_exit(usize::MAX);\n }\n })\n };\n\n if interrupt >= 0x20 && interrupt < 0x30 {\n if interrupt >= 0x28 {\n Pio::<u8>::new(0xA0).write(0x20);\n }\n\n Pio::<u8>::new(0x20).write(0x20);\n }\n\n \/\/Do not catch init interrupt\n if interrupt < 0xFF {\n env().interrupts.lock()[interrupt as usize] += 1;\n }\n\n match interrupt {\n 0x20 => {\n {\n let mut clock_monotonic = env().clock_monotonic.lock();\n *clock_monotonic = *clock_monotonic + PIT_DURATION;\n }\n {\n let mut clock_realtime = env().clock_realtime.lock();\n *clock_realtime = *clock_realtime + PIT_DURATION;\n }\n\n let switch = {\n let mut contexts = ::env().contexts.lock();\n if let Some(mut context) = contexts.current_mut() {\n context.slices -= 1;\n context.slice_total += 1;\n context.slices == 0\n } else {\n false\n }\n };\n\n if switch {\n unsafe { context_switch(true) };\n }\n }\n i @ 0x21 ... 0x2F => env().on_irq(i as u8 - 0x20),\n 0x80 => if !syscall_handle(regs) {\n exception!(\"Unknown Syscall\");\n },\n 0xFF => {\n unsafe {\n init(regs.ax);\n idle_loop();\n }\n },\n 0x0 => exception!(\"Divide by zero exception\"),\n 0x1 => exception!(\"Debug exception\"),\n 0x2 => exception!(\"Non-maskable interrupt\"),\n 0x3 => exception!(\"Breakpoint exception\"),\n 0x4 => exception!(\"Overflow exception\"),\n 0x5 => exception!(\"Bound range exceeded exception\"),\n 0x6 => exception!(\"Invalid opcode exception\"),\n 0x7 => exception!(\"Device not available exception\"),\n 0x8 => exception_error!(\"Double fault\"),\n 0x9 => exception!(\"Coprocessor Segment Overrun\"), \/\/ legacy\n 0xA => exception_error!(\"Invalid TSS exception\"),\n 0xB => exception_error!(\"Segment not present exception\"),\n 0xC => exception_error!(\"Stack-segment fault\"),\n 0xD => exception_error!(\"General protection fault\"),\n 0xE => exception_error!(\"Page fault\"),\n 0x10 => exception!(\"x87 floating-point exception\"),\n 0x11 => exception_error!(\"Alignment check exception\"),\n 0x12 => exception!(\"Machine check exception\"),\n 0x13 => exception!(\"SIMD floating-point exception\"),\n 0x14 => exception!(\"Virtualization exception\"),\n 0x1E => exception_error!(\"Security exception\"),\n _ => exception!(\"Unknown Interrupt\"),\n }\n}\n\n#[test]\nfn get_slice_test() {\n let array = [1, 2, 3, 4, 5];\n assert_eq!(array.get_slice(2, None), array[2..]);\n assert_eq!(array.get_slice(2, array.len()), array[2..array.len()]);\n assert_eq!(array.get_slice(None, 2), array[..2]);\n assert_eq!(array.get_slice(0, 2), array[0..2]);\n assert_eq!(array.get_slice(1, array.len()), array[1..array.len()]);\n assert_eq!(array.get_slice(1, array.len() + 1), array[1..array.len()]);\n assert_eq!(array.get_slice(array.len(), array.len()),\n array[array.len()..array.len()]);\n assert_eq!(array.get_slice(array.len() + 2, array.len() + 2),\n array[array.len()..array.len()]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #56661 - aelred:issue-55846, r=Mark-Simulacrum<commit_after>\/\/ run-pass\n\n\/\/ Regression test for #55846, which once caused an ICE.\n\nuse std::marker::PhantomData;\n\nstruct Foo;\n\nstruct Bar<A> {\n a: PhantomData<A>,\n}\n\nimpl Fooifier for Foo {\n type Assoc = Foo;\n}\n\ntrait Fooifier {\n type Assoc;\n}\n\ntrait Barifier<H> {\n fn barify();\n}\n\nimpl<H> Barifier<H> for Bar<H> {\n fn barify() {\n println!(\"All correct!\");\n }\n}\n\nimpl Bar<<Foo as Fooifier>::Assoc> {\n fn this_shouldnt_crash() {\n <Self as Barifier<<Foo as Fooifier>::Assoc>>::barify();\n }\n}\n\nfn main() {\n Bar::<Foo>::this_shouldnt_crash();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Okay, the error is now more apparent.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Sort by Unit Names Previously, I was sorting units by their path. This change will sort by names of the units instead.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Failed attempt to add an explicit pipeline barrier to fix the rendering<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Replace all VK_NULL_HANDLE_MUT() with ptr::null_mut() for consistency with uses of ptr::null()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Assign a fence for each command buffer to avoid creating and destroying one each time execute_now() is invoked<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Review and tidy the RenderVk fields and construction<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>clear test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix test: Check whether in cache, then get, then check again<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #73137<commit_after>\/\/ Regression test for <https:\/\/github.com\/rust-lang\/rust\/issues\/73137>\n\n\/\/ run-pass\n\/\/ edition:2018\n\n#![allow(dead_code)]\n#![feature(wake_trait)]\nuse std::future::Future;\nuse std::task::{Waker, Wake, Context};\nuse std::sync::Arc;\n\nstruct DummyWaker;\nimpl Wake for DummyWaker {\n fn wake(self: Arc<Self>) {}\n}\n\nstruct Foo {\n a: usize,\n b: &'static u32,\n}\n\n#[inline(never)]\nfn nop<T>(_: T) {}\n\nfn main() {\n let mut fut = Box::pin(async {\n let action = Foo {\n b: &42,\n a: async { 0 }.await,\n };\n\n \/\/ An error in the generator transform caused `b` to be overwritten with `a` when `b` was\n \/\/ borrowed.\n nop(&action.b);\n assert_ne!(0usize, unsafe { std::mem::transmute(action.b) });\n\n async {}.await;\n });\n let waker = Waker::from(Arc::new(DummyWaker));\n let mut cx = Context::from_waker(&waker);\n let _ = fut.as_mut().poll(&mut cx);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore: unit test<commit_after><|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Debug;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Error as FmtError;\nuse std::clone::Clone;\nuse std::convert::From;\n\nuse std::io::Error as IOError;\n\n#[derive(Clone)]\npub enum StoreErrorType {\n Unknown,\n IdNotFound,\n OutOfMemory,\n \/\/ maybe more\n}\n\nimpl From<StoreErrorType> for String {\n\n fn from(e: StoreErrorType) -> String {\n String::from(&e)\n }\n\n}\n\nimpl<'a> From<&'a StoreErrorType> for String {\n\n fn from(e: &'a StoreErrorType) -> String {\n match e {\n &StoreErrorType::Unknown => String::from(\"<Unknown>\"),\n &StoreErrorType::IdNotFound => String::from(\"ID not found\"),\n &StoreErrorType::OutOfMemory => String::from(\"Out of Memory\"),\n }\n }\n\n}\n\nimpl Debug for StoreErrorType {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n let s : String = self.into();\n try!(write!(fmt, \"{:?}\", s));\n Ok(())\n }\n\n}\n\nimpl Display for StoreErrorType {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n let s : String = self.into();\n try!(write!(fmt, \"{}\", s));\n Ok(())\n }\n\n}\n\npub struct StoreError {\n err_type: StoreErrorType,\n expl: Option<&'static str>,\n cause: Option<Box<Error>>,\n}\n\nimpl StoreError {\n\n pub fn new() -> StoreError {\n StoreError {\n err_type: StoreErrorType::Unknown,\n expl: None,\n cause: None,\n }\n }\n\n pub fn err_type(&self) -> StoreErrorType {\n self.err_type.clone()\n }\n\n pub fn with_type(mut self, t: StoreErrorType) -> StoreError {\n self.err_type = t;\n self\n }\n\n pub fn with_expl(mut self, e: &'static str) -> StoreError {\n self.expl = Some(e);\n self\n }\n\n pub fn with_cause(mut self, e: Box<Error>) -> StoreError {\n self.cause = Some(e);\n self\n }\n\n}\n\nimpl Debug for StoreError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"[{:?}]: {:?}, caused: {:?}\",\n self.err_type, self.expl, self.cause));\n Ok(())\n }\n\n}\n\nimpl Display for StoreError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n let e : String = self.err_type.clone().into();\n try!(write!(fmt, \"[{}]: {}\",\n e,\n self.expl.unwrap_or(\"\")));\n Ok(())\n }\n\n}\n\nimpl Error for StoreError {\n\n fn description(&self) -> &str {\n self.expl.unwrap_or(\"\")\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n}\n\n<commit_msg>Pass error members on ::new()<commit_after>use std::error::Error;\nuse std::fmt::Debug;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Error as FmtError;\nuse std::clone::Clone;\nuse std::convert::From;\n\nuse std::io::Error as IOError;\n\n#[derive(Clone)]\npub enum StoreErrorType {\n Unknown,\n IdNotFound,\n OutOfMemory,\n \/\/ maybe more\n}\n\nimpl From<StoreErrorType> for String {\n\n fn from(e: StoreErrorType) -> String {\n String::from(&e)\n }\n\n}\n\nimpl<'a> From<&'a StoreErrorType> for String {\n\n fn from(e: &'a StoreErrorType) -> String {\n match e {\n &StoreErrorType::Unknown => String::from(\"<Unknown>\"),\n &StoreErrorType::IdNotFound => String::from(\"ID not found\"),\n &StoreErrorType::OutOfMemory => String::from(\"Out of Memory\"),\n }\n }\n\n}\n\nimpl Debug for StoreErrorType {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n let s : String = self.into();\n try!(write!(fmt, \"{:?}\", s));\n Ok(())\n }\n\n}\n\nimpl Display for StoreErrorType {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n let s : String = self.into();\n try!(write!(fmt, \"{}\", s));\n Ok(())\n }\n\n}\n\npub struct StoreError {\n err_type: StoreErrorType,\n expl: &'static str,\n cause: Option<Box<Error>>,\n}\n\nimpl StoreError {\n\n pub fn new(errtype: StoreErrorType, expl: &'static str, cause: Option<Box<Error>>)\n -> StoreError\n {\n StoreError {\n err_type: errtype,\n expl: expl,\n cause: cause,\n }\n }\n\n pub fn err_type(&self) -> StoreErrorType {\n self.err_type.clone()\n }\n\n}\n\nimpl Debug for StoreError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"[{:?}]: {:?}, caused: {:?}\",\n self.err_type, self.expl, self.cause));\n Ok(())\n }\n\n}\n\nimpl Display for StoreError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n let e : String = self.err_type.clone().into();\n try!(write!(fmt, \"[{}]: {}\", e, self.expl));\n Ok(())\n }\n\n}\n\nimpl Error for StoreError {\n\n fn description(&self) -> &str {\n self.expl\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add example<commit_after>extern crate irc;\n\nuse irc::protocol::{Message, Body, IrcMessage};\nuse irc::protocol::command::PrivmsgCommand;\n\nfn main() {\n\n let data = \":abc!abc@example.com PRIVMSG #rust :Ok, thanks guys. :)\";\n let parsed_msg = Message::from_str(data).unwrap();\n\n\n let privmsg = PrivmsgCommand::new(\"#rust\", \"Ok, thanks guys. :)\");\n let expected_msg = Message::new(Some(\"abc!abc@example.com\"), Body::command(privmsg));\n\n assert_eq!(expected_msg, parsed_msg);\n\n println!(\"{}\", expected_msg);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Writing diagnostics for frame writing duration<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\n\/\/ Error messages for EXXXX errors.\n\/\/ Each message should start and end with a new line, and be wrapped to 80 characters.\n\/\/ In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable.\nregister_long_diagnostics! {\n\nE0178: r##\"\nIn types, the `+` type operator has low precedence, so it is often necessary\nto use parentheses.\n\nFor example:\n\n```compile_fail,E0178\ntrait Foo {}\n\nstruct Bar<'a> {\n w: &'a Foo + Copy, \/\/ error, use &'a (Foo + Copy)\n x: &'a Foo + 'a, \/\/ error, use &'a (Foo + 'a)\n y: &'a mut Foo + 'a, \/\/ error, use &'a mut (Foo + 'a)\n z: fn() -> Foo + 'a, \/\/ error, use fn() -> (Foo + 'a)\n}\n```\n\nMore details can be found in [RFC 438].\n\n[RFC 438]: https:\/\/github.com\/rust-lang\/rfcs\/pull\/438\n\"##,\n\nE0536: r##\"\nThe `not` cfg-predicate was malformed.\n\nErroneous code example:\n\n```compile_fail,E0536\n#[cfg(not())] \/\/ error: expected 1 cfg-pattern\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `not` predicate expects one cfg-pattern. Example:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0537: r##\"\nAn unknown predicate was used inside the `cfg` attribute.\n\nErroneous code example:\n\n```compile_fail,E0537\n#[cfg(unknown())] \/\/ error: invalid predicate `unknown`\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `cfg` attribute supports only three kinds of predicates:\n\n * any\n * all\n * not\n\nExample:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0538: r##\"\nAttribute contains multiple of the same meta item.\n\nErroneous code example:\n\n```compile_fail,E0538\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\",\n note=\"Second deprecation note.\" \/\/ error: multiple same meta item\n)]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. Each key may only be\nused once in each attribute.\n\nTo fix the problem, remove all but one of the meta items with the same key.\n\nExample:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0541: r##\"\nAn unknown meta item was used.\n\nErroneous code example:\n\n```compile_fail,E0541\n#[deprecated(\n since=\"1.0.0\",\n \/\/ error: unknown meta item\n reason=\"Example invalid meta item. Should be 'note'\")\n]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. The keys provided\nmust be one of the valid keys for the specified attribute.\n\nTo fix the problem, either remove the unknown meta item, or rename it if you\nprovided the wrong name.\n\nIn the erroneous code example above, the wrong name was provided, so changing\nto a correct one it will fix the error. Example:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"This is a valid meta item for the deprecated attribute.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0552: r##\"\nA unrecognized representation attribute was used.\n\nErroneous code example:\n\n```compile_fail,E0552\n#[repr(D)] \/\/ error: unrecognized representation hint\nstruct MyStruct {\n my_field: usize\n}\n```\n\nYou can use a `repr` attribute to tell the compiler how you want a struct or\nenum to be laid out in memory.\n\nMake sure you're using one of the supported options:\n\n```\n#[repr(C)] \/\/ ok!\nstruct MyStruct {\n my_field: usize\n}\n```\n\nFor more information about specifying representations, see the [\"Alternative\nRepresentations\" section] of the Rustonomicon.\n\n[\"Alternative Representations\" section]: https:\/\/doc.rust-lang.org\/nomicon\/other-reprs.html\n\"##,\n\nE0554: r##\"\nFeature attributes are only allowed on the nightly release channel. Stable or\nbeta compilers will not comply.\n\nExample of erroneous code (on a stable compiler):\n\n```ignore (depends on release channel)\n#![feature(non_ascii_idents)] \/\/ error: #![feature] may not be used on the\n \/\/ stable release channel\n```\n\nIf you need the feature, make sure to use a nightly release of the compiler\n(but be warned that the feature may be removed or altered in the future).\n\"##,\n\nE0557: r##\"\nA feature attribute named a feature that has been removed.\n\nErroneous code example:\n\n```compile_fail,E0557\n#![feature(managed_boxes)] \/\/ error: feature has been removed\n```\n\nDelete the offending feature attribute.\n\"##,\n\nE0565: r##\"\nA literal was used in an attribute that doesn't support literals.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#![feature(attr_literals)]\n\n#[inline(\"always\")] \/\/ error: unsupported literal\npub fn something() {}\n```\n\nLiterals in attributes are new and largely unsupported. Work to support literals\nwhere appropriate is ongoing. Try using an unquoted name instead:\n\n```\n#[inline(always)]\npub fn something() {}\n```\n\"##,\n\nE0583: r##\"\nA file wasn't found for an out-of-line module.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\nmod file_that_doesnt_exist; \/\/ error: file not found for module\n\nfn main() {}\n```\n\nPlease be sure that a file corresponding to the module exists. If you\nwant to use a module named `file_that_doesnt_exist`, you need to have a file\nnamed `file_that_doesnt_exist.rs` or `file_that_doesnt_exist\/mod.rs` in the\nsame directory.\n\"##,\n\nE0585: r##\"\nA documentation comment that doesn't document anything was found.\n\nErroneous code example:\n\n```compile_fail,E0585\nfn main() {\n \/\/ The following doc comment will fail:\n \/\/\/ This is a useless doc comment!\n}\n```\n\nDocumentation comments need to be followed by items, including functions,\ntypes, modules, etc. Examples:\n\n```\n\/\/\/ I'm documenting the following struct:\nstruct Foo;\n\n\/\/\/ I'm documenting the following function:\nfn foo() {}\n```\n\"##,\n\nE0586: r##\"\nAn inclusive range was used with no end.\n\nErroneous code example:\n\n```compile_fail,E0586\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=]; \/\/ error: inclusive range was used with no end\n}\n```\n\nAn inclusive range needs an end in order to *include* it. If you just need a\nstart and no end, use a non-inclusive range (with `..`):\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..]; \/\/ ok!\n}\n```\n\nOr put an end to your inclusive range:\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=3]; \/\/ ok!\n}\n```\n\"##,\n\nE0589: r##\"\nThe value of `N` that was specified for `repr(align(N))` was not a power\nof two, or was greater than 2^29.\n\n```compile_fail,E0589\n#[repr(align(15))] \/\/ error: invalid `repr(align)` attribute: not a power of two\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0658: r##\"\nAn unstable feature was used.\n\nErroneous code example:\n\n```compile_fail,E658\n#[repr(u128)] \/\/ error: use of unstable library feature 'repr128'\nenum Foo {\n Bar(u64),\n}\n```\n\nIf you're using a stable or a beta version of rustc, you won't be able to use\nany unstable features. In order to do so, please switch to a nightly version of\nrustc (by using rustup).\n\nIf you're using a nightly version of rustc, just add the corresponding feature\nto be able to use it:\n\n```\n#![feature(repr128)]\n\n#[repr(u128)] \/\/ ok!\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0633: r##\"\nThe `unwind` attribute was malformed.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#[unwind()] \/\/ error: expected one argument\npub extern fn something() {}\n\nfn main() {}\n```\n\nThe `#[unwind]` attribute should be used as follows:\n\n- `#[unwind(aborts)]` -- specifies that if a non-Rust ABI function\n should abort the process if it attempts to unwind. This is the safer\n and preferred option.\n\n- `#[unwind(allowed)]` -- specifies that a non-Rust ABI function\n should be allowed to unwind. This can easily result in Undefined\n Behavior (UB), so be careful.\n\nNB. The default behavior here is \"allowed\", but this is unspecified\nand likely to change in the future.\n\n\"##,\n\n}\n\nregister_diagnostics! {\n E0539, \/\/ incorrect meta item\n E0540, \/\/ multiple rustc_deprecated attributes\n E0542, \/\/ missing 'since'\n E0543, \/\/ missing 'reason'\n E0544, \/\/ multiple stability levels\n E0545, \/\/ incorrect 'issue'\n E0546, \/\/ missing 'feature'\n E0547, \/\/ missing 'issue'\n E0548, \/\/ incorrect stability attribute type\n E0549, \/\/ rustc_deprecated attribute must be paired with either stable or unstable attribute\n E0550, \/\/ multiple deprecated attributes\n E0551, \/\/ incorrect meta item\n E0553, \/\/ multiple rustc_const_unstable attributes\n E0555, \/\/ malformed feature attribute, expected #![feature(...)]\n E0556, \/\/ malformed feature, expected just one word\n E0584, \/\/ file for module `..` found at both .. and ..\n E0629, \/\/ missing 'feature' (rustc_const_unstable)\n E0630, \/\/ rustc_const_unstable attribute must be paired with stable\/unstable attribute\n E0693, \/\/ incorrect `repr(align)` attribute format\n E0694, \/\/ an unknown tool name found in scoped attributes\n}\n<commit_msg>Slightly better summary for E0538<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\n\/\/ Error messages for EXXXX errors.\n\/\/ Each message should start and end with a new line, and be wrapped to 80 characters.\n\/\/ In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable.\nregister_long_diagnostics! {\n\nE0178: r##\"\nIn types, the `+` type operator has low precedence, so it is often necessary\nto use parentheses.\n\nFor example:\n\n```compile_fail,E0178\ntrait Foo {}\n\nstruct Bar<'a> {\n w: &'a Foo + Copy, \/\/ error, use &'a (Foo + Copy)\n x: &'a Foo + 'a, \/\/ error, use &'a (Foo + 'a)\n y: &'a mut Foo + 'a, \/\/ error, use &'a mut (Foo + 'a)\n z: fn() -> Foo + 'a, \/\/ error, use fn() -> (Foo + 'a)\n}\n```\n\nMore details can be found in [RFC 438].\n\n[RFC 438]: https:\/\/github.com\/rust-lang\/rfcs\/pull\/438\n\"##,\n\nE0536: r##\"\nThe `not` cfg-predicate was malformed.\n\nErroneous code example:\n\n```compile_fail,E0536\n#[cfg(not())] \/\/ error: expected 1 cfg-pattern\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `not` predicate expects one cfg-pattern. Example:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0537: r##\"\nAn unknown predicate was used inside the `cfg` attribute.\n\nErroneous code example:\n\n```compile_fail,E0537\n#[cfg(unknown())] \/\/ error: invalid predicate `unknown`\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `cfg` attribute supports only three kinds of predicates:\n\n * any\n * all\n * not\n\nExample:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0538: r##\"\nAttribute contains same meta item more than once.\n\nErroneous code example:\n\n```compile_fail,E0538\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\",\n note=\"Second deprecation note.\" \/\/ error: multiple same meta item\n)]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. Each key may only be\nused once in each attribute.\n\nTo fix the problem, remove all but one of the meta items with the same key.\n\nExample:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0541: r##\"\nAn unknown meta item was used.\n\nErroneous code example:\n\n```compile_fail,E0541\n#[deprecated(\n since=\"1.0.0\",\n \/\/ error: unknown meta item\n reason=\"Example invalid meta item. Should be 'note'\")\n]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. The keys provided\nmust be one of the valid keys for the specified attribute.\n\nTo fix the problem, either remove the unknown meta item, or rename it if you\nprovided the wrong name.\n\nIn the erroneous code example above, the wrong name was provided, so changing\nto a correct one it will fix the error. Example:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"This is a valid meta item for the deprecated attribute.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0552: r##\"\nA unrecognized representation attribute was used.\n\nErroneous code example:\n\n```compile_fail,E0552\n#[repr(D)] \/\/ error: unrecognized representation hint\nstruct MyStruct {\n my_field: usize\n}\n```\n\nYou can use a `repr` attribute to tell the compiler how you want a struct or\nenum to be laid out in memory.\n\nMake sure you're using one of the supported options:\n\n```\n#[repr(C)] \/\/ ok!\nstruct MyStruct {\n my_field: usize\n}\n```\n\nFor more information about specifying representations, see the [\"Alternative\nRepresentations\" section] of the Rustonomicon.\n\n[\"Alternative Representations\" section]: https:\/\/doc.rust-lang.org\/nomicon\/other-reprs.html\n\"##,\n\nE0554: r##\"\nFeature attributes are only allowed on the nightly release channel. Stable or\nbeta compilers will not comply.\n\nExample of erroneous code (on a stable compiler):\n\n```ignore (depends on release channel)\n#![feature(non_ascii_idents)] \/\/ error: #![feature] may not be used on the\n \/\/ stable release channel\n```\n\nIf you need the feature, make sure to use a nightly release of the compiler\n(but be warned that the feature may be removed or altered in the future).\n\"##,\n\nE0557: r##\"\nA feature attribute named a feature that has been removed.\n\nErroneous code example:\n\n```compile_fail,E0557\n#![feature(managed_boxes)] \/\/ error: feature has been removed\n```\n\nDelete the offending feature attribute.\n\"##,\n\nE0565: r##\"\nA literal was used in an attribute that doesn't support literals.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#![feature(attr_literals)]\n\n#[inline(\"always\")] \/\/ error: unsupported literal\npub fn something() {}\n```\n\nLiterals in attributes are new and largely unsupported. Work to support literals\nwhere appropriate is ongoing. Try using an unquoted name instead:\n\n```\n#[inline(always)]\npub fn something() {}\n```\n\"##,\n\nE0583: r##\"\nA file wasn't found for an out-of-line module.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\nmod file_that_doesnt_exist; \/\/ error: file not found for module\n\nfn main() {}\n```\n\nPlease be sure that a file corresponding to the module exists. If you\nwant to use a module named `file_that_doesnt_exist`, you need to have a file\nnamed `file_that_doesnt_exist.rs` or `file_that_doesnt_exist\/mod.rs` in the\nsame directory.\n\"##,\n\nE0585: r##\"\nA documentation comment that doesn't document anything was found.\n\nErroneous code example:\n\n```compile_fail,E0585\nfn main() {\n \/\/ The following doc comment will fail:\n \/\/\/ This is a useless doc comment!\n}\n```\n\nDocumentation comments need to be followed by items, including functions,\ntypes, modules, etc. Examples:\n\n```\n\/\/\/ I'm documenting the following struct:\nstruct Foo;\n\n\/\/\/ I'm documenting the following function:\nfn foo() {}\n```\n\"##,\n\nE0586: r##\"\nAn inclusive range was used with no end.\n\nErroneous code example:\n\n```compile_fail,E0586\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=]; \/\/ error: inclusive range was used with no end\n}\n```\n\nAn inclusive range needs an end in order to *include* it. If you just need a\nstart and no end, use a non-inclusive range (with `..`):\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..]; \/\/ ok!\n}\n```\n\nOr put an end to your inclusive range:\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=3]; \/\/ ok!\n}\n```\n\"##,\n\nE0589: r##\"\nThe value of `N` that was specified for `repr(align(N))` was not a power\nof two, or was greater than 2^29.\n\n```compile_fail,E0589\n#[repr(align(15))] \/\/ error: invalid `repr(align)` attribute: not a power of two\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0658: r##\"\nAn unstable feature was used.\n\nErroneous code example:\n\n```compile_fail,E658\n#[repr(u128)] \/\/ error: use of unstable library feature 'repr128'\nenum Foo {\n Bar(u64),\n}\n```\n\nIf you're using a stable or a beta version of rustc, you won't be able to use\nany unstable features. In order to do so, please switch to a nightly version of\nrustc (by using rustup).\n\nIf you're using a nightly version of rustc, just add the corresponding feature\nto be able to use it:\n\n```\n#![feature(repr128)]\n\n#[repr(u128)] \/\/ ok!\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0633: r##\"\nThe `unwind` attribute was malformed.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#[unwind()] \/\/ error: expected one argument\npub extern fn something() {}\n\nfn main() {}\n```\n\nThe `#[unwind]` attribute should be used as follows:\n\n- `#[unwind(aborts)]` -- specifies that if a non-Rust ABI function\n should abort the process if it attempts to unwind. This is the safer\n and preferred option.\n\n- `#[unwind(allowed)]` -- specifies that a non-Rust ABI function\n should be allowed to unwind. This can easily result in Undefined\n Behavior (UB), so be careful.\n\nNB. The default behavior here is \"allowed\", but this is unspecified\nand likely to change in the future.\n\n\"##,\n\n}\n\nregister_diagnostics! {\n E0539, \/\/ incorrect meta item\n E0540, \/\/ multiple rustc_deprecated attributes\n E0542, \/\/ missing 'since'\n E0543, \/\/ missing 'reason'\n E0544, \/\/ multiple stability levels\n E0545, \/\/ incorrect 'issue'\n E0546, \/\/ missing 'feature'\n E0547, \/\/ missing 'issue'\n E0548, \/\/ incorrect stability attribute type\n E0549, \/\/ rustc_deprecated attribute must be paired with either stable or unstable attribute\n E0550, \/\/ multiple deprecated attributes\n E0551, \/\/ incorrect meta item\n E0553, \/\/ multiple rustc_const_unstable attributes\n E0555, \/\/ malformed feature attribute, expected #![feature(...)]\n E0556, \/\/ malformed feature, expected just one word\n E0584, \/\/ file for module `..` found at both .. and ..\n E0629, \/\/ missing 'feature' (rustc_const_unstable)\n E0630, \/\/ rustc_const_unstable attribute must be paired with stable\/unstable attribute\n E0693, \/\/ incorrect `repr(align)` attribute format\n E0694, \/\/ an unknown tool name found in scoped attributes\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix `with_capacity` for zero-size vectors<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>dispatch!<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Reusable Diagnostic Example<commit_after>use codespan_reporting::diagnostic::{Diagnostic, Label};\nuse codespan_reporting::files::SimpleFile;\nuse codespan_reporting::term::termcolor::StandardStream;\nuse codespan_reporting::term::{self, ColorArg};\nuse std::ops::Range;\nuse structopt::StructOpt;\n\n#[derive(Debug, StructOpt)]\n#[structopt(name = \"emit\")]\npub struct Opts {\n\t#[structopt(long = \"color\",\n\t\tparse(try_from_str),\n\t\tdefault_value = \"auto\",\n\t\tpossible_values = ColorArg::VARIANTS,\n\t\tcase_insensitive = true\n\t)]\n\tcolor: ColorArg,\n}\n\nfn main() -> anyhow::Result<()> {\n\tlet file = SimpleFile::new(\n\t\t\"main.rs\",\n\t\tunindent::unindent(\n\t\t\tr#\"\n fn main() {\n let foo: i32 = \"hello, world\";\n foo += 1;\n }\n \"#,\n\t\t),\n\t);\n\n\tlet errors = [\n\t\tError::MismatchType(\n\t\t\tItem::new(20..23, \"i32\"),\n\t\t\tItem::new(31..45, \"\\\"hello, world\\\"\"),\n\t\t),\n\t\tError::MutatingImmutable(Item::new(20..23, \"foo\"), Item::new(51..59, \"foo += 1\")),\n\t];\n\n \/\/ Map all errors into `Diagnostic`\n\tlet diagnostics: Vec<Diagnostic<()>> = errors.iter().map(Error::report).collect();\n\n\tlet opts = Opts::from_args();\n\tlet writer = StandardStream::stderr(opts.color.into());\n\tlet config = codespan_reporting::term::Config::default();\n\tfor diagnostic in &diagnostics {\n\t\tterm::emit(&mut writer.lock(), &config, &file, &diagnostic)?;\n\t}\n\n\tOk(())\n}\n\n\/\/\/ An error enum that represent all possible errors within your program\nenum Error {\n\tMismatchType(Item, Item),\n\tMutatingImmutable(Item, Item),\n}\n\nimpl Error {\n\tfn report(&self) -> Diagnostic<()> {\n\t\tmatch self {\n\t\t\tError::MismatchType(left, right) => Diagnostic::error()\n\t\t\t\t.with_code(\"E0308\")\n\t\t\t\t.with_message(\"mismatch types\")\n\t\t\t\t.with_labels(vec![\n\t\t\t\t\tLabel::primary((), right.range.clone()).with_message(format!(\n\t\t\t\t\t\t\"Expected `{}`, found: `{}`\",\n\t\t\t\t\t\tleft.content, right.content\n\t\t\t\t\t)),\n\t\t\t\t\tLabel::secondary((), left.range.clone()).with_message(\"expected due to this\"),\n\t\t\t\t]),\n\t\t\tError::MutatingImmutable(original, mutating) => Diagnostic::error()\n\t\t\t\t.with_code(\"E0384\")\n\t\t\t\t.with_message(format!(\n\t\t\t\t\t\"cannot mutate immutable variable `{}`\",\n\t\t\t\t\toriginal.content\n\t\t\t\t))\n\t\t\t\t.with_labels(vec![\n\t\t\t\t\tLabel::secondary((), original.range.clone()).with_message(unindent::unindent(\n\t\t\t\t\t\t&format!(\n\t\t\t\t\t\t\tr#\"\n first assignment to `{0}`\n help: make this binding mutable: `mut {0}`\n \"#,\n\t\t\t\t\t\t\toriginal.content\n\t\t\t\t\t\t),\n\t\t\t\t\t)),\n\t\t\t\t\tLabel::primary((), mutating.range.clone())\n\t\t\t\t\t\t.with_message(\"cannot assign twice to immutable variable\"),\n\t\t\t\t]),\n\t\t}\n\t}\n}\n\n\/\/\/ Representing a single error point in `Error` enum\n\/\/\/ In a more complex program it could also contain `FileId` to handle error that occurs inside multiple files\nstruct Item {\n\trange: Range<usize>,\n\tcontent: String,\n}\n\nimpl Item {\n\tfn new(range: Range<usize>, content: impl Into<String>) -> Item {\n\t\tlet content = content.into();\n\t\tItem { range, content }\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 Johannes Köster.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Algorithm of Horspool.\n\/\/! Window-based, similar to but faster than Boyer-Moore.\n\/\/!\n\/\/! # Idea\n\/\/! Look at a search window m, match pattern backwards.\n\/\/! In case of a mismatch, you can jump behind that.\n\/\/! Best case time complexity: O(n \/ m)\n\/\/! Worst case time complexity: O(n * m)\n\/\/! With a large alphabet, you are likely\n\/\/! around the best case, and faster than the rather\n\/\/! complicated Boyer-Moore.\n\/\/!\n\/\/! The algorithm has two phases (let a be the last symbol in the window):\n\/\/!\n\/\/! 1. test phase: compare the last symbol of the window.\n\/\/! If it matches, compare the whole pattern.\n\/\/! If it does not match, continue with the shift phase.\n\/\/! 2. shift phase: let l[a] be the rightmost position of a in \n\/\/! the pattern without the last symbol. If it does not occur\n\/\/! let l[a] be -1. Shift the window by m - 1 - l[a]. I.e.\n\/\/! we shift the window such that the rightmost a matches\n\/\/! the a at the end of the last window.\n\/\/! If a does not occur in the pattern, we shift by the whole length.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use bio::pattern_matching::horspool::Horspool;\n\/\/! let text = b\"ACGGCTAGGAAAAAGACTGAGGACTGAAAA\";\n\/\/! let pattern = b\"GAAAA\";\n\/\/! let horspool = Horspool::new(pattern);\n\/\/! let occ: Vec<usize> = horspool.find_all(text).collect();\n\/\/! assert_eq!(occ, [8, 25]);\n\/\/! ```\n\n\npub struct Horspool<'a> {\n shift: Vec<usize>,\n m: usize,\n pattern: &'a [u8]\n}\n\n\nimpl<'a> Horspool<'a> {\n pub fn new(pattern: &'a [u8]) -> Self {\n let m = pattern.len();\n let mut shift = vec![m; 256];\n \/\/ shift is m for all not occurring characters\n \/\/ and m - 1 - j for all others\n for (j, &a) in pattern[..m-1].iter().enumerate() {\n shift[a as usize] = m - 1 - j;\n }\n\n Horspool {\n m: m, shift: shift, pattern: pattern\n }\n }\n\n pub fn find_all<'b>(&'b self, text: &'b [u8]) -> HorspoolMatches {\n HorspoolMatches {\n horspool: self, text: text, n: text.len(),\n last: self.m - 1,\n pattern_last: self.pattern[self.m - 1]\n }\n }\n}\n\n\npub struct HorspoolMatches<'a> {\n horspool: &'a Horspool<'a>,\n text: &'a [u8],\n n: usize,\n last: usize,\n pattern_last: u8\n}\n\n\nimpl<'a> Iterator for HorspoolMatches<'a> {\n type Item = usize;\n\n fn next(&mut self) -> Option<usize> {\n loop {\n \/\/ shift until the last symbol matches\n while self.last < self.n\n && self.text[self.last] != self.pattern_last {\n self.last += self.horspool.shift[self.text[self.last] as usize];\n }\n \/\/ stop if end of text is reached\n if self.last >= self.n {\n return None;\n }\n\n \/\/ putative start position\n let i = self.last - self.horspool.m + 1;\n let j = self.last;\n\n \/\/ shift again (after both match and mismatch, this makes sense)\n self.last += self.horspool.shift[self.pattern_last as usize];\n\n if self.text[i..j] == self.horspool.pattern[..self.horspool.m-1] {\n return Some(i);\n }\n }\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::Horspool;\n\n #[test]\n fn test_shift() {\n let pattern = b\"AACB\";\n let horspool = Horspool::new(pattern);\n assert_eq!(horspool.shift[b'A' as usize], 2);\n assert_eq!(horspool.shift[b'C' as usize], 1);\n assert_eq!(horspool.shift[b'B' as usize], 4);\n assert_eq!(horspool.shift[b'X' as usize], 4);\n }\n}\n<commit_msg>Fixed overflow in horspool.<commit_after>\/\/ Copyright 2014 Johannes Köster.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Algorithm of Horspool.\n\/\/! Window-based, similar to but faster than Boyer-Moore.\n\/\/!\n\/\/! # Idea\n\/\/! Look at a search window m, match pattern backwards.\n\/\/! In case of a mismatch, you can jump behind that.\n\/\/! Best case time complexity: O(n \/ m)\n\/\/! Worst case time complexity: O(n * m)\n\/\/! With a large alphabet, you are likely\n\/\/! around the best case, and faster than the rather\n\/\/! complicated Boyer-Moore.\n\/\/!\n\/\/! The algorithm has two phases (let a be the last symbol in the window):\n\/\/!\n\/\/! 1. test phase: compare the last symbol of the window.\n\/\/! If it matches, compare the whole pattern.\n\/\/! If it does not match, continue with the shift phase.\n\/\/! 2. shift phase: let l[a] be the rightmost position of a in \n\/\/! the pattern without the last symbol. If it does not occur\n\/\/! let l[a] be -1. Shift the window by m - 1 - l[a]. I.e.\n\/\/! we shift the window such that the rightmost a matches\n\/\/! the a at the end of the last window.\n\/\/! If a does not occur in the pattern, we shift by the whole length.\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use bio::pattern_matching::horspool::Horspool;\n\/\/! let text = b\"ACGGCTAGGAAAAAGACTGAGGACTGAAAA\";\n\/\/! let pattern = b\"GAAAA\";\n\/\/! let horspool = Horspool::new(pattern);\n\/\/! let occ: Vec<usize> = horspool.find_all(text).collect();\n\/\/! assert_eq!(occ, [8, 25]);\n\/\/! ```\n\n\npub struct Horspool<'a> {\n shift: Vec<usize>,\n m: usize,\n pattern: &'a [u8]\n}\n\n\nimpl<'a> Horspool<'a> {\n pub fn new(pattern: &'a [u8]) -> Self {\n let m = pattern.len();\n let mut shift = vec![m; 256];\n \/\/ shift is m for all not occurring characters\n \/\/ and m - 1 - j for all others\n for (j, &a) in pattern[..m-1].iter().enumerate() {\n shift[a as usize] = m - 1 - j;\n }\n\n Horspool {\n m: m, shift: shift, pattern: pattern\n }\n }\n\n pub fn find_all<'b>(&'b self, text: &'b [u8]) -> HorspoolMatches {\n HorspoolMatches {\n horspool: self, text: text, n: text.len(),\n last: self.m - 1,\n pattern_last: self.pattern[self.m - 1]\n }\n }\n}\n\n\npub struct HorspoolMatches<'a> {\n horspool: &'a Horspool<'a>,\n text: &'a [u8],\n n: usize,\n last: usize,\n pattern_last: u8\n}\n\n\nimpl<'a> Iterator for HorspoolMatches<'a> {\n type Item = usize;\n\n fn next(&mut self) -> Option<usize> {\n loop {\n \/\/ shift until the last symbol matches\n while self.last < self.n\n && self.text[self.last] != self.pattern_last {\n self.last += self.horspool.shift[self.text[self.last] as usize];\n }\n \/\/ stop if end of text is reached\n if self.last >= self.n {\n return None;\n }\n\n \/\/ putative start position\n let i = self.last + 1 - self.horspool.m;\n let j = self.last;\n\n \/\/ shift again (after both match and mismatch, this makes sense)\n self.last += self.horspool.shift[self.pattern_last as usize];\n\n if self.text[i..j] == self.horspool.pattern[..self.horspool.m-1] {\n return Some(i);\n }\n }\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::Horspool;\n\n #[test]\n fn test_shift() {\n let pattern = b\"AACB\";\n let horspool = Horspool::new(pattern);\n assert_eq!(horspool.shift[b'A' as usize], 2);\n assert_eq!(horspool.shift[b'C' as usize], 1);\n assert_eq!(horspool.shift[b'B' as usize], 4);\n assert_eq!(horspool.shift[b'X' as usize], 4);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Problem 57<commit_after>#[deriving(Clone)]\nenum BinaryTree<T> {\n Node(T, ~BinaryTree<T>, ~BinaryTree<T>),\n Empty\n}\n\nimpl<T: Eq> Eq for BinaryTree<T> {\n fn eq(&self, other: &BinaryTree<T>) -> bool {\n match (self, other) {\n (&Empty, &Empty) => true,\n (&Node(ref a, ref ll, ref lr), &Node(ref b, ref rl, ref rr)) if a == b =>\n ll == rl && lr == rr,\n _ => false\n }\n }\n}\n\nfn construct<T: Ord>(list: ~[T]) -> BinaryTree<T> {\n fn insert<T: Ord>(tree: BinaryTree<T>, elem: T) -> BinaryTree<T> {\n match tree {\n Empty => Node(elem, ~Empty, ~Empty),\n Node(e, ~l, ~r) => if elem < e {\n Node(e, ~insert(l, elem), ~r)\n } else {\n Node(e, ~l, ~insert(r, elem))\n }\n }\n }\n list.move_iter().fold(Empty, |t, e| insert(t, e))\n}\n\nfn main() {\n assert!(construct(~[3, 2, 5, 7, 1]) ==\n Node(3, ~Node(2, ~Node(1, ~Empty, ~Empty), ~Empty),\n ~Node(5, ~Empty, ~Node(7, ~Empty, ~Empty))));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-arm\n\/\/ ignore-aarch64\n\/\/ ignore-powerpc\n\/\/ ignore-aarch64\n\/\/ ignore-wasm\n\/\/ ignore-emscripten\n\/\/ ignore-windows\n\/\/ min-system-llvm-version 5.0\n\/\/ compile-flags: -C no-prepopulate-passes\n\n#![crate_type = \"lib\"]\n\n#[no_mangle]\npub fn foo() {\n\/\/ CHECK: @foo() unnamed_addr #0\n\/\/ CHECK: attributes #0 = { {{.*}}\"probe-stack\"=\"__rust_probestack\"{{.*}} }\n}\n<commit_msg>test: remove duplicate ignore-aarch64 from stack-probes test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-arm\n\/\/ ignore-aarch64\n\/\/ ignore-powerpc\n\/\/ ignore-wasm\n\/\/ ignore-emscripten\n\/\/ ignore-windows\n\/\/ min-system-llvm-version 5.0\n\/\/ compile-flags: -C no-prepopulate-passes\n\n#![crate_type = \"lib\"]\n\n#[no_mangle]\npub fn foo() {\n\/\/ CHECK: @foo() unnamed_addr #0\n\/\/ CHECK: attributes #0 = { {{.*}}\"probe-stack\"=\"__rust_probestack\"{{.*}} }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rollup merge of #21808: jfager\/r20454<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::thread;\n\nfn main() {\n thread::Thread::spawn(move || { \/\/ no need for -> ()\n loop {\n println!(\"hello\");\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #29004 - frewsxcv:regression-test-22814, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntrait Test {}\n\nmacro_rules! test {\n( $($name:ident)+) => (\n impl<$($name: Test),*> Test for ($($name,)*) {\n }\n)\n}\n\ntest!(A B C);\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>(doc) Add echo example<commit_after>\/\/ An example that echoes the body of the request back as the response.\n\/\/\n\/\/ Shows how to read the request body with error handling and how to return a\n\/\/ response. See `helper_macros` example for a different way to handle errors.\n\nextern crate iron;\n\nuse std::io::Read;\n\nuse iron::prelude::*;\nuse iron::status;\n\nfn echo(request: &mut Request) -> IronResult<Response> {\n let mut body = Vec::new();\n request\n .body\n .read_to_end(&mut body)\n .map_err(|e| IronError::new(e, (status::InternalServerError, \"Error reading request\")))?;\n Ok(Response::with((status::Ok, body)))\n}\n\nfn main() {\n Iron::new(echo).http(\"localhost:3000\").unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added fifo example<commit_after>\nextern crate atom;\n\nuse std::thread;\nuse std::sync::{Arc, Barrier};\nuse atom::*;\n\nstruct Link {\n next: AtomSetOnce<Link, Arc<Link>>\n}\n\nfn main() {\n let b = Arc::new(Barrier::new(11));\n\n let head = Arc::new(Link{next: AtomSetOnce::empty()});\n\n for _ in (0..10) {\n let b = b.clone();\n let head = head.clone();\n thread::spawn(move || {\n let mut hptr = &*head;\n\n for _ in (0..100) {\n let mut my_awesome_node = Arc::new(Link {\n next: AtomSetOnce::empty()\n });\n\n\n loop {\n while let Some(h) = hptr.next.get(Ordering::SeqCst) {\n hptr = h;\n }\n\n my_awesome_node = match hptr.next.set_if_none(my_awesome_node, Ordering::SeqCst) {\n Some(v) => v,\n None => break\n };\n }\n }\n b.wait();\n });\n }\n\n b.wait();\n\n let mut hptr = &*head;\n let mut count = 0;\n while let Some(h) = hptr.next.get(Ordering::SeqCst) {\n hptr = h;\n count += 1;\n }\n println!(\"Using {} threads we wrote {} links at the same time!\", 10, count);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #78391 - JulianKnodt:mc_test, r=lcnr<commit_after>\/\/ run-pass\n\n#![feature(min_const_generics)]\n\nconst fn identity<const T: u32>() -> u32 { T }\n\n#[derive(Eq, PartialEq, Debug)]\npub struct ConstU32<const U: u32>;\n\npub fn new() -> ConstU32<{ identity::<3>() }> {\n ConstU32::<{ identity::<3>() }>\n}\n\nfn main() {\n let v = new();\n assert_eq!(v, ConstU32::<3>);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove feature(duration) now that it is stable in nightly<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test(basic_syncback): add test (currently failing)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix(lib): `unwrap` has been deprecated in favor of `into_inner`<commit_after><|endoftext|>"} {"text":"<commit_before>use std::io::{fs, IoResult};\nuse std::io;\n\nuse ansi_term::{ANSIString, Colour, Style};\nuse ansi_term::Style::Plain;\nuse ansi_term::Colour::{Red, Green, Yellow, Blue, Purple, Cyan, Fixed};\n\nuse users::Users;\n\nuse number_prefix::{binary_prefix, decimal_prefix, Prefixed, Standalone, PrefixNames};\n\nuse column::{Column, SizeFormat, Cell};\nuse column::Column::*;\nuse dir::Dir;\nuse filetype::HasType;\n\n\/\/\/ This grey value is directly in between white and black, so it's guaranteed\n\/\/\/ to show up on either backgrounded terminal.\npub static GREY: Colour = Fixed(244);\n\n\/\/\/ A **File** is a wrapper around one of Rust's Path objects, along with\n\/\/\/ associated data about the file.\n\/\/\/\n\/\/\/ Each file is definitely going to have its filename displayed at least\n\/\/\/ once, have its file extension extracted at least once, and have its stat\n\/\/\/ information queried at least once, so it makes sense to do all this at the\n\/\/\/ start and hold on to all the information.\npub struct File<'a> {\n pub name: String,\n pub dir: Option<&'a Dir>,\n pub ext: Option<String>,\n pub path: Path,\n pub stat: io::FileStat,\n}\n\nimpl<'a> File<'a> {\n \/\/\/ Create a new File object from the given Path, inside the given Dir, if\n \/\/\/ appropriate. Paths specified directly on the command-line have no Dirs.\n \/\/\/\n \/\/\/ This uses lstat instead of stat, which doesn't follow symbolic links.\n pub fn from_path(path: &Path, parent: Option<&'a Dir>) -> IoResult<File<'a>> {\n fs::lstat(path).map(|stat| File::with_stat(stat, path, parent))\n }\n\n \/\/\/ Create a new File object from the given Stat result, and other data.\n pub fn with_stat(stat: io::FileStat, path: &Path, parent: Option<&'a Dir>) -> File<'a> {\n let v = path.filename().unwrap(); \/\/ fails if \/ or . or ..\n let filename = String::from_utf8_lossy(v);\n\n File {\n path: path.clone(),\n dir: parent,\n stat: stat,\n name: filename.to_string(),\n ext: ext(filename.as_slice()),\n }\n }\n\n \/\/\/ Whether this file is a dotfile or not.\n pub fn is_dotfile(&self) -> bool {\n self.name.as_slice().starts_with(\".\")\n }\n\n \/\/\/ Whether this file is a temporary file or not.\n pub fn is_tmpfile(&self) -> bool {\n let name = self.name.as_slice();\n name.ends_with(\"~\") || (name.starts_with(\"#\") && name.ends_with(\"#\"))\n }\n\n \/\/\/ Get the data for a column, formatted as a coloured string.\n pub fn display<U: Users>(&self, column: &Column, users_cache: &mut U) -> Cell {\n match *column {\n Permissions => self.permissions_string(),\n FileName => self.file_name_view(),\n FileSize(f) => self.file_size(f),\n HardLinks => self.hard_links(),\n Inode => self.inode(),\n Blocks => self.blocks(),\n User => self.user(users_cache),\n Group => self.group(users_cache),\n }\n }\n\n \/\/\/ The \"file name view\" is what's displayed in the column and lines\n \/\/\/ views, but *not* in the grid view.\n \/\/\/\n \/\/\/ It consists of the file name coloured in the appropriate style, and,\n \/\/\/ if it's a symlink, an arrow pointing to the file it links to, also\n \/\/\/ coloured in the appropriate style.\n \/\/\/\n \/\/\/ If the symlink target doesn't exist, then instead of displaying\n \/\/\/ an error, highlight the target and arrow in red. The error would\n \/\/\/ be shown out of context, and it's almost always because the\n \/\/\/ target doesn't exist.\n pub fn file_name_view(&self) -> Cell {\n let name = &*self.name;\n let style = self.file_colour();\n\n if self.stat.kind == io::FileType::Symlink {\n match fs::readlink(&self.path) {\n Ok(path) => {\n let target_path = match self.dir {\n Some(dir) => dir.path.join(path),\n None => path,\n };\n\n match self.target_file(&target_path) {\n Ok(file) => Cell {\n length: 0, \/\/ These lengths are never actually used...\n text: format!(\"{} {} {}{}{}\",\n style.paint(name),\n GREY.paint(\"=>\"),\n Cyan.paint(target_path.dirname_str().unwrap()),\n Cyan.paint(\"\/\"),\n file.file_colour().paint(file.name.as_slice())),\n },\n Err(filename) => Cell {\n length: 0, \/\/ ...because the rightmost column lengths are ignored!\n text: format!(\"{} {} {}\",\n style.paint(name),\n Red.paint(\"=>\"),\n Red.underline().paint(filename.as_slice())),\n },\n }\n }\n Err(_) => Cell::paint(style, name),\n }\n }\n else {\n Cell::paint(style, name)\n }\n }\n\n \/\/\/ The `ansi_term::Style` that this file's name should be painted.\n pub fn file_colour(&self) -> Style {\n self.get_type().style()\n }\n\n \/\/\/ The Unicode 'display width' of the filename.\n \/\/\/\n \/\/\/ This is related to the number of graphemes in the string: most\n \/\/\/ characters are 1 columns wide, but in some contexts, certain\n \/\/\/ characters are actually 2 columns wide.\n pub fn file_name_width(&self) -> usize {\n self.name.as_slice().width(false)\n }\n\n \/\/\/ Assuming the current file is a symlink, follows the link and\n \/\/\/ returns a File object from the path the link points to.\n \/\/\/\n \/\/\/ If statting the file fails (usually because the file on the\n \/\/\/ other end doesn't exist), returns the *filename* of the file\n \/\/\/ that should be there.\n fn target_file(&self, target_path: &Path) -> Result<File, String> {\n let v = target_path.filename().unwrap();\n let filename = String::from_utf8_lossy(v);\n\n \/\/ Use stat instead of lstat - we *want* to follow links.\n if let Ok(stat) = fs::stat(target_path) {\n Ok(File {\n path: target_path.clone(),\n dir: self.dir,\n stat: stat,\n name: filename.to_string(),\n ext: ext(filename.as_slice()),\n })\n }\n else {\n Err(filename.to_string())\n }\n }\n\n \/\/\/ This file's number of hard links as a coloured string.\n fn hard_links(&self) -> Cell {\n let style = if self.has_multiple_links() { Red.on(Yellow) } else { Red.normal() };\n Cell::paint(style, &*self.stat.unstable.nlink.to_string())\n }\n\n \/\/\/ Whether this is a regular file with more than one link.\n \/\/\/\n \/\/\/ This is important, because a file with multiple links is uncommon,\n \/\/\/ while you can come across directories and other types with multiple\n \/\/\/ links much more often.\n fn has_multiple_links(&self) -> bool {\n self.stat.kind == io::FileType::RegularFile && self.stat.unstable.nlink > 1\n }\n\n \/\/\/ This file's inode as a coloured string.\n fn inode(&self) -> Cell {\n Cell::paint(Purple.normal(), &*self.stat.unstable.inode.to_string())\n }\n\n \/\/\/ This file's number of filesystem blocks (if available) as a coloured string.\n fn blocks(&self) -> Cell {\n if self.stat.kind == io::FileType::RegularFile || self.stat.kind == io::FileType::Symlink {\n Cell::paint(Cyan.normal(), &*self.stat.unstable.blocks.to_string())\n }\n else {\n Cell { text: GREY.paint(\"-\").to_string(), length: 1 }\n }\n }\n\n \/\/\/ This file's owner's username as a coloured string.\n \/\/\/\n \/\/\/ If the user is not present, then it formats the uid as a number\n \/\/\/ instead. This usually happens when a user is deleted, but still owns\n \/\/\/ files.\n fn user<U: Users>(&self, users_cache: &mut U) -> Cell {\n let uid = self.stat.unstable.uid as i32;\n\n let user_name = match users_cache.get_user_by_uid(uid) {\n Some(user) => user.name,\n None => uid.to_string(),\n };\n\n let style = if users_cache.get_current_uid() == uid { Yellow.bold() } else { Plain };\n Cell::paint(style, &*user_name)\n }\n\n \/\/\/ This file's group name as a coloured string.\n \/\/\/\n \/\/\/ As above, if not present, it formats the gid as a number instead.\n fn group<U: Users>(&self, users_cache: &mut U) -> Cell {\n let gid = self.stat.unstable.gid as u32;\n let mut style = Plain;\n\n let group_name = match users_cache.get_group_by_gid(gid) {\n Some(group) => {\n let current_uid = users_cache.get_current_uid();\n if let Some(current_user) = users_cache.get_user_by_uid(current_uid) {\n if current_user.primary_group == group.gid || group.members.contains(¤t_user.name) {\n style = Yellow.bold();\n }\n }\n group.name\n },\n None => gid.to_string(),\n };\n\n Cell::paint(style, &*group_name)\n }\n\n \/\/\/ This file's size, formatted using the given way, as a coloured string.\n \/\/\/\n \/\/\/ For directories, no size is given. Although they do have a size on\n \/\/\/ some filesystems, I've never looked at one of those numbers and gained\n \/\/\/ any information from it, so by emitting \"-\" instead, the table is less\n \/\/\/ cluttered with numbers.\n fn file_size(&self, size_format: SizeFormat) -> Cell {\n if self.stat.kind == io::FileType::Directory {\n Cell { text: GREY.paint(\"-\").to_string(), length: 1 }\n }\n else {\n let result = match size_format {\n SizeFormat::DecimalBytes => decimal_prefix(self.stat.size as f64),\n SizeFormat::BinaryBytes => binary_prefix(self.stat.size as f64),\n SizeFormat::JustBytes => return Cell::paint(Green.bold(), &*self.stat.size.to_string())\n };\n\n match result {\n Standalone(bytes) => Cell::paint(Green.bold(), &*bytes.to_string()),\n Prefixed(prefix, n) => {\n let number = if n < 10f64 { format!(\"{:.1}\", n) } else { format!(\"{:.0}\", n) };\n let symbol = prefix.symbol();\n\n Cell {\n text: format!(\"{}{}\", Green.bold().paint(&*number), Green.paint(symbol)),\n length: number.len() + symbol.len(),\n }\n }\n }\n }\n }\n\n \/\/\/ This file's type, represented by a coloured character.\n \/\/\/\n \/\/\/ Although the file type can usually be guessed from the colour of the\n \/\/\/ file, `ls` puts this character there, so people will expect it.\n fn type_char(&self) -> ANSIString {\n return match self.stat.kind {\n io::FileType::RegularFile => Plain.paint(\".\"),\n io::FileType::Directory => Blue.paint(\"d\"),\n io::FileType::NamedPipe => Yellow.paint(\"|\"),\n io::FileType::BlockSpecial => Purple.paint(\"s\"),\n io::FileType::Symlink => Cyan.paint(\"l\"),\n io::FileType::Unknown => Plain.paint(\"?\"),\n }\n }\n\n \/\/\/ Generate the \"rwxrwxrwx\" permissions string, like how ls does it.\n \/\/\/\n \/\/\/ Each character is given its own colour. The first three permission\n \/\/\/ bits are bold because they're the ones used most often, and executable\n \/\/\/ files are underlined to make them stand out more.\n fn permissions_string(&self) -> Cell {\n let bits = self.stat.perm;\n let executable_colour = match self.stat.kind {\n io::FileType::RegularFile => Green.bold().underline(),\n _ => Green.bold(),\n };\n\n let string = format!(\"{}{}{}{}{}{}{}{}{}{}\",\n self.type_char(),\n File::permission_bit(&bits, io::USER_READ, \"r\", Yellow.bold()),\n File::permission_bit(&bits, io::USER_WRITE, \"w\", Red.bold()),\n File::permission_bit(&bits, io::USER_EXECUTE, \"x\", executable_colour),\n File::permission_bit(&bits, io::GROUP_READ, \"r\", Yellow.normal()),\n File::permission_bit(&bits, io::GROUP_WRITE, \"w\", Red.normal()),\n File::permission_bit(&bits, io::GROUP_EXECUTE, \"x\", Green.normal()),\n File::permission_bit(&bits, io::OTHER_READ, \"r\", Yellow.normal()),\n File::permission_bit(&bits, io::OTHER_WRITE, \"w\", Red.normal()),\n File::permission_bit(&bits, io::OTHER_EXECUTE, \"x\", Green.normal()),\n );\n\n Cell { text: string, length: 10 }\n }\n\n \/\/\/ Helper method for the permissions string.\n fn permission_bit(bits: &io::FilePermission, bit: io::FilePermission, character: &'static str, style: Style) -> ANSIString<'static> {\n if bits.contains(bit) {\n style.paint(character)\n }\n else {\n GREY.paint(\"-\")\n }\n }\n\n \/\/\/ For this file, return a vector of alternate file paths that, if any of\n \/\/\/ them exist, mean that *this* file should be coloured as `Compiled`.\n \/\/\/\n \/\/\/ The point of this is to highlight compiled files such as `foo.o` when\n \/\/\/ their source file `foo.c` exists in the same directory. It's too\n \/\/\/ dangerous to highlight *all* compiled, so the paths in this vector\n \/\/\/ are checked for existence first: for example, `foo.js` is perfectly\n \/\/\/ valid without `foo.coffee`.\n pub fn get_source_files(&self) -> Vec<Path> {\n if let Some(ref ext) = self.ext {\n match ext.as_slice() {\n \"class\" => vec![self.path.with_extension(\"java\")], \/\/ Java\n \"css\" => vec![self.path.with_extension(\"sass\"), self.path.with_extension(\"less\")], \/\/ SASS, Less\n \"elc\" => vec![self.path.with_extension(\"el\")], \/\/ Emacs Lisp\n \"hi\" => vec![self.path.with_extension(\"hs\")], \/\/ Haskell\n \"js\" => vec![self.path.with_extension(\"coffee\"), self.path.with_extension(\"ts\")], \/\/ CoffeeScript, TypeScript\n \"o\" => vec![self.path.with_extension(\"c\"), self.path.with_extension(\"cpp\")], \/\/ C, C++\n \"pyc\" => vec![self.path.with_extension(\"py\")], \/\/ Python\n\n \"aux\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX: auxiliary file\n \"bbl\" => vec![self.path.with_extension(\"tex\")], \/\/ BibTeX bibliography file\n \"blg\" => vec![self.path.with_extension(\"tex\")], \/\/ BibTeX log file\n \"lof\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX list of figures\n \"log\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX log file\n \"lot\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX list of tables\n \"toc\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX table of contents\n\n _ => vec![], \/\/ No source files if none of the above\n }\n }\n else {\n vec![] \/\/ No source files if there's no extension, either!\n }\n }\n}\n\n\/\/\/ Extract an extension from a string, if one is present.\n\/\/\/\n\/\/\/ The extension is the series of characters after the last dot. This\n\/\/\/ deliberately counts dotfiles, so the \".git\" folder has the extension \"git\".\nfn ext<'a>(name: &'a str) -> Option<String> {\n name.rfind('.').map(|p| name[p+1..].to_string())\n}\n<commit_msg>Extract symlink stuff into its own method<commit_after>use std::io::{fs, IoResult};\nuse std::io;\n\nuse ansi_term::{ANSIString, Colour, Style};\nuse ansi_term::Style::Plain;\nuse ansi_term::Colour::{Red, Green, Yellow, Blue, Purple, Cyan, Fixed};\n\nuse users::Users;\n\nuse number_prefix::{binary_prefix, decimal_prefix, Prefixed, Standalone, PrefixNames};\n\nuse column::{Column, SizeFormat, Cell};\nuse column::Column::*;\nuse dir::Dir;\nuse filetype::HasType;\n\n\/\/\/ This grey value is directly in between white and black, so it's guaranteed\n\/\/\/ to show up on either backgrounded terminal.\npub static GREY: Colour = Fixed(244);\n\n\/\/\/ A **File** is a wrapper around one of Rust's Path objects, along with\n\/\/\/ associated data about the file.\n\/\/\/\n\/\/\/ Each file is definitely going to have its filename displayed at least\n\/\/\/ once, have its file extension extracted at least once, and have its stat\n\/\/\/ information queried at least once, so it makes sense to do all this at the\n\/\/\/ start and hold on to all the information.\npub struct File<'a> {\n pub name: String,\n pub dir: Option<&'a Dir>,\n pub ext: Option<String>,\n pub path: Path,\n pub stat: io::FileStat,\n}\n\nimpl<'a> File<'a> {\n \/\/\/ Create a new File object from the given Path, inside the given Dir, if\n \/\/\/ appropriate. Paths specified directly on the command-line have no Dirs.\n \/\/\/\n \/\/\/ This uses lstat instead of stat, which doesn't follow symbolic links.\n pub fn from_path(path: &Path, parent: Option<&'a Dir>) -> IoResult<File<'a>> {\n fs::lstat(path).map(|stat| File::with_stat(stat, path, parent))\n }\n\n \/\/\/ Create a new File object from the given Stat result, and other data.\n pub fn with_stat(stat: io::FileStat, path: &Path, parent: Option<&'a Dir>) -> File<'a> {\n let v = path.filename().unwrap(); \/\/ fails if \/ or . or ..\n let filename = String::from_utf8_lossy(v);\n\n File {\n path: path.clone(),\n dir: parent,\n stat: stat,\n name: filename.to_string(),\n ext: ext(filename.as_slice()),\n }\n }\n\n \/\/\/ Whether this file is a dotfile or not.\n pub fn is_dotfile(&self) -> bool {\n self.name.as_slice().starts_with(\".\")\n }\n\n \/\/\/ Whether this file is a temporary file or not.\n pub fn is_tmpfile(&self) -> bool {\n let name = self.name.as_slice();\n name.ends_with(\"~\") || (name.starts_with(\"#\") && name.ends_with(\"#\"))\n }\n\n \/\/\/ Get the data for a column, formatted as a coloured string.\n pub fn display<U: Users>(&self, column: &Column, users_cache: &mut U) -> Cell {\n match *column {\n Permissions => self.permissions_string(),\n FileName => self.file_name_view(),\n FileSize(f) => self.file_size(f),\n HardLinks => self.hard_links(),\n Inode => self.inode(),\n Blocks => self.blocks(),\n User => self.user(users_cache),\n Group => self.group(users_cache),\n }\n }\n\n \/\/\/ The \"file name view\" is what's displayed in the column and lines\n \/\/\/ views, but *not* in the grid view.\n \/\/\/\n \/\/\/ It consists of the file name coloured in the appropriate style,\n \/\/\/ with special formatting for a symlink.\n pub fn file_name_view(&self) -> Cell {\n if self.stat.kind == io::FileType::Symlink {\n self.symlink_file_name_view()\n }\n else {\n Cell {\n length: 0, \/\/ This length is ignored (rightmost column)\n text: self.file_colour().paint(&*self.name).to_string(),\n }\n }\n }\n\n \/\/\/ If this file is a symlink, returns a string displaying its name,\n \/\/\/ and an arrow pointing to the file it links to, which is also\n \/\/\/ coloured in the appropriate style.\n \/\/\/\n \/\/\/ If the symlink target doesn't exist, then instead of displaying\n \/\/\/ an error, highlight the target and arrow in red. The error would\n \/\/\/ be shown out of context, and it's almost always because the\n \/\/\/ target doesn't exist.\n fn symlink_file_name_view(&self) -> Cell {\n let name = &*self.name;\n let style = self.file_colour();\n\n if let Ok(path) = fs::readlink(&self.path) {\n let target_path = match self.dir {\n Some(dir) => dir.path.join(path),\n None => path,\n };\n\n match self.target_file(&target_path) {\n Ok(file) => Cell {\n length: 0, \/\/ These lengths are never actually used...\n text: format!(\"{} {} {}{}{}\",\n style.paint(name),\n GREY.paint(\"=>\"),\n Cyan.paint(target_path.dirname_str().unwrap()),\n Cyan.paint(\"\/\"),\n file.file_colour().paint(file.name.as_slice())),\n },\n Err(filename) => Cell {\n length: 0, \/\/ ...because the rightmost column lengths are ignored!\n text: format!(\"{} {} {}\",\n style.paint(name),\n Red.paint(\"=>\"),\n Red.underline().paint(filename.as_slice())),\n },\n }\n }\n else {\n Cell::paint(style, name)\n }\n }\n\n \/\/\/ The `ansi_term::Style` that this file's name should be painted.\n pub fn file_colour(&self) -> Style {\n self.get_type().style()\n }\n\n \/\/\/ The Unicode 'display width' of the filename.\n \/\/\/\n \/\/\/ This is related to the number of graphemes in the string: most\n \/\/\/ characters are 1 columns wide, but in some contexts, certain\n \/\/\/ characters are actually 2 columns wide.\n pub fn file_name_width(&self) -> usize {\n self.name.as_slice().width(false)\n }\n\n \/\/\/ Assuming the current file is a symlink, follows the link and\n \/\/\/ returns a File object from the path the link points to.\n \/\/\/\n \/\/\/ If statting the file fails (usually because the file on the\n \/\/\/ other end doesn't exist), returns the *filename* of the file\n \/\/\/ that should be there.\n fn target_file(&self, target_path: &Path) -> Result<File, String> {\n let v = target_path.filename().unwrap();\n let filename = String::from_utf8_lossy(v);\n\n \/\/ Use stat instead of lstat - we *want* to follow links.\n if let Ok(stat) = fs::stat(target_path) {\n Ok(File {\n path: target_path.clone(),\n dir: self.dir,\n stat: stat,\n name: filename.to_string(),\n ext: ext(filename.as_slice()),\n })\n }\n else {\n Err(filename.to_string())\n }\n }\n\n \/\/\/ This file's number of hard links as a coloured string.\n fn hard_links(&self) -> Cell {\n let style = if self.has_multiple_links() { Red.on(Yellow) } else { Red.normal() };\n Cell::paint(style, &*self.stat.unstable.nlink.to_string())\n }\n\n \/\/\/ Whether this is a regular file with more than one link.\n \/\/\/\n \/\/\/ This is important, because a file with multiple links is uncommon,\n \/\/\/ while you can come across directories and other types with multiple\n \/\/\/ links much more often.\n fn has_multiple_links(&self) -> bool {\n self.stat.kind == io::FileType::RegularFile && self.stat.unstable.nlink > 1\n }\n\n \/\/\/ This file's inode as a coloured string.\n fn inode(&self) -> Cell {\n Cell::paint(Purple.normal(), &*self.stat.unstable.inode.to_string())\n }\n\n \/\/\/ This file's number of filesystem blocks (if available) as a coloured string.\n fn blocks(&self) -> Cell {\n if self.stat.kind == io::FileType::RegularFile || self.stat.kind == io::FileType::Symlink {\n Cell::paint(Cyan.normal(), &*self.stat.unstable.blocks.to_string())\n }\n else {\n Cell { text: GREY.paint(\"-\").to_string(), length: 1 }\n }\n }\n\n \/\/\/ This file's owner's username as a coloured string.\n \/\/\/\n \/\/\/ If the user is not present, then it formats the uid as a number\n \/\/\/ instead. This usually happens when a user is deleted, but still owns\n \/\/\/ files.\n fn user<U: Users>(&self, users_cache: &mut U) -> Cell {\n let uid = self.stat.unstable.uid as i32;\n\n let user_name = match users_cache.get_user_by_uid(uid) {\n Some(user) => user.name,\n None => uid.to_string(),\n };\n\n let style = if users_cache.get_current_uid() == uid { Yellow.bold() } else { Plain };\n Cell::paint(style, &*user_name)\n }\n\n \/\/\/ This file's group name as a coloured string.\n \/\/\/\n \/\/\/ As above, if not present, it formats the gid as a number instead.\n fn group<U: Users>(&self, users_cache: &mut U) -> Cell {\n let gid = self.stat.unstable.gid as u32;\n let mut style = Plain;\n\n let group_name = match users_cache.get_group_by_gid(gid) {\n Some(group) => {\n let current_uid = users_cache.get_current_uid();\n if let Some(current_user) = users_cache.get_user_by_uid(current_uid) {\n if current_user.primary_group == group.gid || group.members.contains(¤t_user.name) {\n style = Yellow.bold();\n }\n }\n group.name\n },\n None => gid.to_string(),\n };\n\n Cell::paint(style, &*group_name)\n }\n\n \/\/\/ This file's size, formatted using the given way, as a coloured string.\n \/\/\/\n \/\/\/ For directories, no size is given. Although they do have a size on\n \/\/\/ some filesystems, I've never looked at one of those numbers and gained\n \/\/\/ any information from it, so by emitting \"-\" instead, the table is less\n \/\/\/ cluttered with numbers.\n fn file_size(&self, size_format: SizeFormat) -> Cell {\n if self.stat.kind == io::FileType::Directory {\n Cell { text: GREY.paint(\"-\").to_string(), length: 1 }\n }\n else {\n let result = match size_format {\n SizeFormat::DecimalBytes => decimal_prefix(self.stat.size as f64),\n SizeFormat::BinaryBytes => binary_prefix(self.stat.size as f64),\n SizeFormat::JustBytes => return Cell::paint(Green.bold(), &*self.stat.size.to_string())\n };\n\n match result {\n Standalone(bytes) => Cell::paint(Green.bold(), &*bytes.to_string()),\n Prefixed(prefix, n) => {\n let number = if n < 10f64 { format!(\"{:.1}\", n) } else { format!(\"{:.0}\", n) };\n let symbol = prefix.symbol();\n\n Cell {\n text: format!(\"{}{}\", Green.bold().paint(&*number), Green.paint(symbol)),\n length: number.len() + symbol.len(),\n }\n }\n }\n }\n }\n\n \/\/\/ This file's type, represented by a coloured character.\n \/\/\/\n \/\/\/ Although the file type can usually be guessed from the colour of the\n \/\/\/ file, `ls` puts this character there, so people will expect it.\n fn type_char(&self) -> ANSIString {\n return match self.stat.kind {\n io::FileType::RegularFile => Plain.paint(\".\"),\n io::FileType::Directory => Blue.paint(\"d\"),\n io::FileType::NamedPipe => Yellow.paint(\"|\"),\n io::FileType::BlockSpecial => Purple.paint(\"s\"),\n io::FileType::Symlink => Cyan.paint(\"l\"),\n io::FileType::Unknown => Plain.paint(\"?\"),\n }\n }\n\n \/\/\/ Generate the \"rwxrwxrwx\" permissions string, like how ls does it.\n \/\/\/\n \/\/\/ Each character is given its own colour. The first three permission\n \/\/\/ bits are bold because they're the ones used most often, and executable\n \/\/\/ files are underlined to make them stand out more.\n fn permissions_string(&self) -> Cell {\n let bits = self.stat.perm;\n let executable_colour = match self.stat.kind {\n io::FileType::RegularFile => Green.bold().underline(),\n _ => Green.bold(),\n };\n\n let string = format!(\"{}{}{}{}{}{}{}{}{}{}\",\n self.type_char(),\n File::permission_bit(&bits, io::USER_READ, \"r\", Yellow.bold()),\n File::permission_bit(&bits, io::USER_WRITE, \"w\", Red.bold()),\n File::permission_bit(&bits, io::USER_EXECUTE, \"x\", executable_colour),\n File::permission_bit(&bits, io::GROUP_READ, \"r\", Yellow.normal()),\n File::permission_bit(&bits, io::GROUP_WRITE, \"w\", Red.normal()),\n File::permission_bit(&bits, io::GROUP_EXECUTE, \"x\", Green.normal()),\n File::permission_bit(&bits, io::OTHER_READ, \"r\", Yellow.normal()),\n File::permission_bit(&bits, io::OTHER_WRITE, \"w\", Red.normal()),\n File::permission_bit(&bits, io::OTHER_EXECUTE, \"x\", Green.normal()),\n );\n\n Cell { text: string, length: 10 }\n }\n\n \/\/\/ Helper method for the permissions string.\n fn permission_bit(bits: &io::FilePermission, bit: io::FilePermission, character: &'static str, style: Style) -> ANSIString<'static> {\n if bits.contains(bit) {\n style.paint(character)\n }\n else {\n GREY.paint(\"-\")\n }\n }\n\n \/\/\/ For this file, return a vector of alternate file paths that, if any of\n \/\/\/ them exist, mean that *this* file should be coloured as `Compiled`.\n \/\/\/\n \/\/\/ The point of this is to highlight compiled files such as `foo.o` when\n \/\/\/ their source file `foo.c` exists in the same directory. It's too\n \/\/\/ dangerous to highlight *all* compiled, so the paths in this vector\n \/\/\/ are checked for existence first: for example, `foo.js` is perfectly\n \/\/\/ valid without `foo.coffee`.\n pub fn get_source_files(&self) -> Vec<Path> {\n if let Some(ref ext) = self.ext {\n match ext.as_slice() {\n \"class\" => vec![self.path.with_extension(\"java\")], \/\/ Java\n \"css\" => vec![self.path.with_extension(\"sass\"), self.path.with_extension(\"less\")], \/\/ SASS, Less\n \"elc\" => vec![self.path.with_extension(\"el\")], \/\/ Emacs Lisp\n \"hi\" => vec![self.path.with_extension(\"hs\")], \/\/ Haskell\n \"js\" => vec![self.path.with_extension(\"coffee\"), self.path.with_extension(\"ts\")], \/\/ CoffeeScript, TypeScript\n \"o\" => vec![self.path.with_extension(\"c\"), self.path.with_extension(\"cpp\")], \/\/ C, C++\n \"pyc\" => vec![self.path.with_extension(\"py\")], \/\/ Python\n\n \"aux\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX: auxiliary file\n \"bbl\" => vec![self.path.with_extension(\"tex\")], \/\/ BibTeX bibliography file\n \"blg\" => vec![self.path.with_extension(\"tex\")], \/\/ BibTeX log file\n \"lof\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX list of figures\n \"log\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX log file\n \"lot\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX list of tables\n \"toc\" => vec![self.path.with_extension(\"tex\")], \/\/ TeX table of contents\n\n _ => vec![], \/\/ No source files if none of the above\n }\n }\n else {\n vec![] \/\/ No source files if there's no extension, either!\n }\n }\n}\n\n\/\/\/ Extract an extension from a string, if one is present.\n\/\/\/\n\/\/\/ The extension is the series of characters after the last dot. This\n\/\/\/ deliberately counts dotfiles, so the \".git\" folder has the extension \"git\".\nfn ext<'a>(name: &'a str) -> Option<String> {\n name.rfind('.').map(|p| name[p+1..].to_string())\n}\n<|endoftext|>"} {"text":"<commit_before>use std::sync::Arc;\n\nuse {Future, Wake, Tokens};\n\n\/\/\/ A future which \"fuse\"s an future once it's been resolved.\n\/\/\/\n\/\/\/ Normally futures can behave unpredictable once they're used after a future\n\/\/\/ has been resolved, but `Fuse` is always defined to return `None` from `poll`\n\/\/\/ after it has succeeded, and after it has succeeded all future calls to\n\/\/\/ `schedule` will be ignored.\npub struct Fuse<A> {\n future: A,\n done: bool,\n}\n\npub fn new<A: Future>(f: A) -> Fuse<A> {\n Fuse {\n future: f,\n done: false,\n }\n}\n\nimpl<A: Future> Future for Fuse<A> {\n type Item = A::Item;\n type Error = A::Error;\n\n fn poll(&mut self, tokens: &Tokens) -> Option<Result<A::Item, A::Error>> {\n if self.done {\n None\n } else {\n let res = self.future.poll(tokens);\n self.done = res.is_some();\n return res\n }\n }\n\n fn schedule(&mut self, wake: Arc<Wake>) {\n if !self.done {\n self.future.schedule(wake);\n }\n }\n}\n<commit_msg>Drop the future early in Fuse<commit_after>use std::sync::Arc;\n\nuse {Future, Wake, Tokens};\n\n\/\/\/ A future which \"fuse\"s an future once it's been resolved.\n\/\/\/\n\/\/\/ Normally futures can behave unpredictable once they're used after a future\n\/\/\/ has been resolved, but `Fuse` is always defined to return `None` from `poll`\n\/\/\/ after it has succeeded, and after it has succeeded all future calls to\n\/\/\/ `schedule` will be ignored.\npub struct Fuse<A> {\n future: Option<A>,\n}\n\npub fn new<A: Future>(f: A) -> Fuse<A> {\n Fuse {\n future: Some(f),\n }\n}\n\nimpl<A: Future> Future for Fuse<A> {\n type Item = A::Item;\n type Error = A::Error;\n\n fn poll(&mut self, tokens: &Tokens) -> Option<Result<A::Item, A::Error>> {\n let ret = self.future.as_mut().and_then(|f| f.poll(tokens));\n if ret.is_some() {\n self.future = None;\n }\n return ret\n }\n\n fn schedule(&mut self, wake: Arc<Wake>) {\n if let Some(ref mut f) = self.future {\n f.schedule(wake);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add free method<commit_after>use libc::c_char;\nuse std::ffi::CString;\n\n#[no_mangle]\npub extern fn free(s: *mut c_char) {\n unsafe {\n if s.is_null() { return }\n CString::from_raw(s)\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update main.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed mutable vec warning<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added download support.<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate libc;\nextern crate neovim;\nextern crate rgtk;\nextern crate \"rustc-serialize\" as rustc_serialize;\n\nuse rgtk::*;\nuse std::collections::HashSet;\nuse std::old_io::fs;\nuse std::old_io::fs::PathExtensions;\n\nmod projects;\nmod ui;\nmod utils;\n\nmod ffi {\n pub use libc::{c_int, c_uchar, c_void};\n pub use libc::funcs::posix88::unistd::{close, pipe, read, write};\n pub use libc::types::os::arch::c95::size_t;\n\n extern \"C\" {\n pub fn fork () -> c_int;\n pub fn kill (pid: c_int, sig: c_int);\n }\n}\n\nfn gui_main(\n pty: &mut gtk::VtePty,\n read_fd: ffi::c_int,\n write_fd: ffi::c_int,\n pid: ffi::c_int)\n{\n gtk::init();\n\n \/\/ constants\n\n let width = 1242;\n let height = 768;\n let editor_height = ((height as f32) * 0.75) as i32;\n\n \/\/ create the window\n\n let title = format!(\"SolidOak {}.{}.{}\",\n option_env!(\"CARGO_PKG_VERSION_MAJOR\").unwrap(),\n option_env!(\"CARGO_PKG_VERSION_MINOR\").unwrap(),\n option_env!(\"CARGO_PKG_VERSION_PATCH\").unwrap());\n let mut window = gtk::Window::new(gtk::WindowType::TopLevel).unwrap();\n window.set_title(title.as_slice());\n window.set_window_position(gtk::WindowPosition::Center);\n window.set_default_size(width, height);\n\n window.connect(gtk::signals::DeleteEvent::new(&mut |&: _| {\n unsafe {\n ffi::close(read_fd);\n ffi::close(write_fd);\n ffi::kill(pid, 15);\n }\n gtk::main_quit();\n true\n }));\n\n \/\/ create the panes\n\n let new_button = gtk::Button::new_with_label(\"New Project\").unwrap();\n let import_button = gtk::Button::new_with_label(\"Import\").unwrap();\n let rename_button = gtk::Button::new_with_label(\"Rename\").unwrap();\n let remove_button = gtk::Button::new_with_label(\"Remove\").unwrap();\n\n let mut project_buttons = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n project_buttons.set_size_request(-1, -1);\n project_buttons.add(&new_button);\n project_buttons.add(&import_button);\n project_buttons.add(&rename_button);\n project_buttons.add(&remove_button);\n\n let mut project_tree = gtk::TreeView::new().unwrap();\n let selection = project_tree.get_selection().unwrap();\n let column_types = [glib::ffi::g_type_string, glib::ffi::g_type_string];\n let store = gtk::TreeStore::new(&column_types).unwrap();\n let model = store.get_model().unwrap();\n project_tree.set_model(&model);\n project_tree.set_headers_visible(false);\n\n let mut scroll_pane = gtk::ScrolledWindow::new(None, None).unwrap();\n scroll_pane.add(&project_tree);\n\n let column = gtk::TreeViewColumn::new().unwrap();\n let cell = gtk::CellRendererText::new().unwrap();\n column.pack_start(&cell, true);\n column.add_attribute(&cell, \"text\", 0);\n project_tree.append_column(&column);\n\n let mut project_pane = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n project_pane.set_size_request(-1, -1);\n project_pane.pack_start(&project_buttons, false, true, 0);\n project_pane.pack_start(&scroll_pane, true, true, 0);\n\n let mut editor_pane = gtk::VteTerminal::new().unwrap();\n editor_pane.set_size_request(-1, editor_height);\n editor_pane.set_pty(pty);\n editor_pane.watch_child(pid);\n\n let run_button = gtk::Button::new_with_label(\"Run\").unwrap();\n let build_button = gtk::Button::new_with_label(\"Build\").unwrap();\n let test_button = gtk::Button::new_with_label(\"Test\").unwrap();\n let clean_button = gtk::Button::new_with_label(\"Clean\").unwrap();\n let stop_button = gtk::Button::new_with_label(\"Stop\").unwrap();\n\n let mut build_buttons = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n build_buttons.set_size_request(-1, -1);\n build_buttons.add(&run_button);\n build_buttons.add(&build_button);\n build_buttons.add(&test_button);\n build_buttons.add(&clean_button);\n build_buttons.add(&stop_button);\n\n let build_term = gtk::VteTerminal::new().unwrap();\n\n let mut build_pane = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n build_pane.pack_start(&build_buttons, false, true, 0);\n build_pane.pack_start(&build_term, true, true, 0);\n\n let mut content = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n content.pack_start(&editor_pane, false, true, 0);\n content.pack_start(&build_pane, true, true, 0);\n\n let mut hbox = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n hbox.pack_start(&project_pane, false, true, 0);\n hbox.pack_start(&content, true, true, 0);\n window.add(&hbox);\n\n \/\/ populate the project tree\n\n let mut state = ::utils::State{\n projects: HashSet::new(),\n expansions: HashSet::new(),\n selection: None,\n tree_model: &model,\n tree_store: &store,\n tree_selection: &selection,\n rename_button: &rename_button,\n remove_button: &remove_button,\n };\n\n ::utils::read_prefs(&mut state);\n ::ui::update_project_tree(&mut state, &mut project_tree);\n\n \/\/ connect to the signals\n\n new_button.connect(gtk::signals::Clicked::new(&mut || {\n ::projects::new_project(&mut state, &mut project_tree);\n }));\n import_button.connect(gtk::signals::Clicked::new(&mut || {\n ::projects::import_project(&mut state, &mut project_tree);\n }));\n rename_button.connect(gtk::signals::Clicked::new(&mut || {\n ::projects::rename_file(&mut state);\n }));\n remove_button.connect(gtk::signals::Clicked::new(&mut || {\n ::projects::remove_item(&mut state);\n }));\n selection.connect(gtk::signals::Changed::new(&mut || {\n ::projects::save_selection(&mut state);\n }));\n project_tree.connect(gtk::signals::RowCollapsed::new(&mut |iter_raw, _| {\n let iter = gtk::TreeIter::wrap_pointer(iter_raw);\n ::projects::remove_expansion(&mut state, &iter);\n }));\n project_tree.connect(gtk::signals::RowExpanded::new(&mut |iter_raw, _| {\n let iter = gtk::TreeIter::wrap_pointer(iter_raw);\n ::projects::add_expansion(&mut state, &iter);\n }));\n\n \/\/ show the window\n\n window.show_all();\n gtk::main();\n}\n\nfn nvim_attach(fd: ffi::c_int) {\n let mut arr = neovim::Array::new();\n arr.add_integer(80);\n arr.add_integer(24);\n arr.add_boolean(true);\n let msg = neovim::serialize_message(1, \"ui_attach\", &arr);\n let msg_ptr = msg.as_slice().as_ptr() as *const ffi::c_void;\n unsafe { ffi::write(fd, msg_ptr, msg.len() as ffi::size_t) };\n}\n\nfn nvim_execute(fd: ffi::c_int, command: &str) {\n let mut arr = neovim::Array::new();\n arr.add_string(command);\n let msg = neovim::serialize_message(1, \"vim_command\", &arr);\n let msg_ptr = msg.as_slice().as_ptr() as *const ffi::c_void;\n unsafe { ffi::write(fd, msg_ptr, msg.len() as ffi::size_t) };\n}\n\nfn receive_message(fd: ffi::c_int) -> Option<neovim::Array> {\n let mut buf : [ffi::c_uchar; 1024] = [0; 1024];\n let n = unsafe { ffi::read(fd, buf.as_mut_ptr() as *mut ffi::c_void, 1024) };\n if n < 0 {\n return None;\n }\n unsafe {\n let v = Vec::from_raw_buf(buf.as_ptr(), n as usize);\n let s = String::from_utf8_unchecked(v);\n Some(neovim::deserialize_message(&s))\n }\n}\n\nfn main() {\n \/\/ create data dir\n let home_dir = ::utils::get_home_dir();\n let data_dir = home_dir.join(::utils::DATA_DIR);\n if !data_dir.exists() {\n match fs::mkdir(&data_dir, ::std::old_io::USER_DIR) {\n Ok(_) => {\n for res in ::utils::DATA_CONTENT.iter() {\n let res_path = data_dir.join_many(res.path);\n ::std::old_io::fs::mkdir_recursive(&res_path.dir_path(), ::std::old_io::USER_DIR).ok();\n ::std::old_io::File::create(&res_path).write_all(res.data.as_bytes()).ok();\n }\n println!(\"Created data dir at {}\", data_dir.as_str().unwrap());\n },\n Err(e) => { println!(\"Error creating data dir: {}\", e) }\n }\n }\n\n \/\/ set $VIM to the data dir if it isn't already set\n if ::std::env::var(\"VIM\").is_none() {\n ::std::env::set_var(\"VIM\", data_dir.as_str().unwrap());\n }\n\n \/\/ create config file\n let config_file = home_dir.join(::utils::CONFIG_FILE);\n if !config_file.exists() {\n match ::std::old_io::File::create(&config_file).write_all(::utils::CONFIG_CONTENT.as_bytes()) {\n Ok(_) => { println!(\"Created config file at {}\", config_file.as_str().unwrap()) },\n Err(e) => { println!(\"Error creating config file: {}\", e) }\n }\n }\n\n \/\/ takes care of piping stdin\/stdout between the gui and nvim\n let mut pty = gtk::VtePty::new().unwrap();\n\n \/\/ two pairs of anonymous pipes for msgpack-rpc between the gui and nvim\n let mut nvim_gui : [ffi::c_int; 2] = [0; 2]; \/\/ to nvim from gui\n let mut gui_nvim : [ffi::c_int; 2] = [0; 2]; \/\/ to gui from nvim\n unsafe {\n ffi::pipe(nvim_gui.as_mut_ptr());\n ffi::pipe(gui_nvim.as_mut_ptr());\n };\n\n \/\/ split into two processes\n let pid = unsafe { ffi::fork() };\n\n if pid > 0 { \/\/ the gui process\n ::std::thread::Thread::spawn(move || {\n \/\/ start communicating with nvim\n nvim_attach(nvim_gui[1]);\n\n \/\/ listen for bufread events\n nvim_execute(nvim_gui[1], \"au BufRead * call rpcnotify(1, \\\"bufread\\\", bufname(\\\"\\\"))\");\n\n \/\/ receive messages\n while let Some(recv_arr) = receive_message(gui_nvim[0]) {\n if recv_arr.len() > 0 {\n println!(\"Received: {:?}\", recv_arr);\n }\n }\n });\n\n \/\/ start the gui\n gui_main(&mut pty, gui_nvim[0], gui_nvim[1], pid);\n } else { \/\/ the nvim process\n \/\/ prepare this process to be piped into the gui\n pty.child_setup();\n\n \/\/ start nvim\n let mut args = Vec::new();\n for arg in ::std::env::args() {\n args.push(arg.into_string().unwrap());\n }\n args.push_all(&[\"-u\".to_string(), config_file.as_str().unwrap().to_string()]);\n neovim::main_setup(args);\n neovim::channel_from_fds(nvim_gui[0], gui_nvim[1]);\n neovim::main_loop();\n }\n}\n<commit_msg>Pass args as reference<commit_after>extern crate libc;\nextern crate neovim;\nextern crate rgtk;\nextern crate \"rustc-serialize\" as rustc_serialize;\n\nuse rgtk::*;\nuse std::collections::HashSet;\nuse std::old_io::fs;\nuse std::old_io::fs::PathExtensions;\n\nmod projects;\nmod ui;\nmod utils;\n\nmod ffi {\n pub use libc::{c_int, c_uchar, c_void};\n pub use libc::funcs::posix88::unistd::{close, pipe, read, write};\n pub use libc::types::os::arch::c95::size_t;\n\n extern \"C\" {\n pub fn fork () -> c_int;\n pub fn kill (pid: c_int, sig: c_int);\n }\n}\n\nfn gui_main(\n pty: &mut gtk::VtePty,\n read_fd: ffi::c_int,\n write_fd: ffi::c_int,\n pid: ffi::c_int)\n{\n gtk::init();\n\n \/\/ constants\n\n let width = 1242;\n let height = 768;\n let editor_height = ((height as f32) * 0.75) as i32;\n\n \/\/ create the window\n\n let title = format!(\"SolidOak {}.{}.{}\",\n option_env!(\"CARGO_PKG_VERSION_MAJOR\").unwrap(),\n option_env!(\"CARGO_PKG_VERSION_MINOR\").unwrap(),\n option_env!(\"CARGO_PKG_VERSION_PATCH\").unwrap());\n let mut window = gtk::Window::new(gtk::WindowType::TopLevel).unwrap();\n window.set_title(title.as_slice());\n window.set_window_position(gtk::WindowPosition::Center);\n window.set_default_size(width, height);\n\n window.connect(gtk::signals::DeleteEvent::new(&mut |&: _| {\n unsafe {\n ffi::close(read_fd);\n ffi::close(write_fd);\n ffi::kill(pid, 15);\n }\n gtk::main_quit();\n true\n }));\n\n \/\/ create the panes\n\n let new_button = gtk::Button::new_with_label(\"New Project\").unwrap();\n let import_button = gtk::Button::new_with_label(\"Import\").unwrap();\n let rename_button = gtk::Button::new_with_label(\"Rename\").unwrap();\n let remove_button = gtk::Button::new_with_label(\"Remove\").unwrap();\n\n let mut project_buttons = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n project_buttons.set_size_request(-1, -1);\n project_buttons.add(&new_button);\n project_buttons.add(&import_button);\n project_buttons.add(&rename_button);\n project_buttons.add(&remove_button);\n\n let mut project_tree = gtk::TreeView::new().unwrap();\n let selection = project_tree.get_selection().unwrap();\n let column_types = [glib::ffi::g_type_string, glib::ffi::g_type_string];\n let store = gtk::TreeStore::new(&column_types).unwrap();\n let model = store.get_model().unwrap();\n project_tree.set_model(&model);\n project_tree.set_headers_visible(false);\n\n let mut scroll_pane = gtk::ScrolledWindow::new(None, None).unwrap();\n scroll_pane.add(&project_tree);\n\n let column = gtk::TreeViewColumn::new().unwrap();\n let cell = gtk::CellRendererText::new().unwrap();\n column.pack_start(&cell, true);\n column.add_attribute(&cell, \"text\", 0);\n project_tree.append_column(&column);\n\n let mut project_pane = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n project_pane.set_size_request(-1, -1);\n project_pane.pack_start(&project_buttons, false, true, 0);\n project_pane.pack_start(&scroll_pane, true, true, 0);\n\n let mut editor_pane = gtk::VteTerminal::new().unwrap();\n editor_pane.set_size_request(-1, editor_height);\n editor_pane.set_pty(pty);\n editor_pane.watch_child(pid);\n\n let run_button = gtk::Button::new_with_label(\"Run\").unwrap();\n let build_button = gtk::Button::new_with_label(\"Build\").unwrap();\n let test_button = gtk::Button::new_with_label(\"Test\").unwrap();\n let clean_button = gtk::Button::new_with_label(\"Clean\").unwrap();\n let stop_button = gtk::Button::new_with_label(\"Stop\").unwrap();\n\n let mut build_buttons = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n build_buttons.set_size_request(-1, -1);\n build_buttons.add(&run_button);\n build_buttons.add(&build_button);\n build_buttons.add(&test_button);\n build_buttons.add(&clean_button);\n build_buttons.add(&stop_button);\n\n let build_term = gtk::VteTerminal::new().unwrap();\n\n let mut build_pane = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n build_pane.pack_start(&build_buttons, false, true, 0);\n build_pane.pack_start(&build_term, true, true, 0);\n\n let mut content = gtk::Box::new(gtk::Orientation::Vertical, 0).unwrap();\n content.pack_start(&editor_pane, false, true, 0);\n content.pack_start(&build_pane, true, true, 0);\n\n let mut hbox = gtk::Box::new(gtk::Orientation::Horizontal, 0).unwrap();\n hbox.pack_start(&project_pane, false, true, 0);\n hbox.pack_start(&content, true, true, 0);\n window.add(&hbox);\n\n \/\/ populate the project tree\n\n let mut state = ::utils::State{\n projects: HashSet::new(),\n expansions: HashSet::new(),\n selection: None,\n tree_model: &model,\n tree_store: &store,\n tree_selection: &selection,\n rename_button: &rename_button,\n remove_button: &remove_button,\n };\n\n ::utils::read_prefs(&mut state);\n ::ui::update_project_tree(&mut state, &mut project_tree);\n\n \/\/ connect to the signals\n\n new_button.connect(gtk::signals::Clicked::new(&mut || {\n ::projects::new_project(&mut state, &mut project_tree);\n }));\n import_button.connect(gtk::signals::Clicked::new(&mut || {\n ::projects::import_project(&mut state, &mut project_tree);\n }));\n rename_button.connect(gtk::signals::Clicked::new(&mut || {\n ::projects::rename_file(&mut state);\n }));\n remove_button.connect(gtk::signals::Clicked::new(&mut || {\n ::projects::remove_item(&mut state);\n }));\n selection.connect(gtk::signals::Changed::new(&mut || {\n ::projects::save_selection(&mut state);\n }));\n project_tree.connect(gtk::signals::RowCollapsed::new(&mut |iter_raw, _| {\n let iter = gtk::TreeIter::wrap_pointer(iter_raw);\n ::projects::remove_expansion(&mut state, &iter);\n }));\n project_tree.connect(gtk::signals::RowExpanded::new(&mut |iter_raw, _| {\n let iter = gtk::TreeIter::wrap_pointer(iter_raw);\n ::projects::add_expansion(&mut state, &iter);\n }));\n\n \/\/ show the window\n\n window.show_all();\n gtk::main();\n}\n\nfn nvim_attach(fd: ffi::c_int) {\n let mut arr = neovim::Array::new();\n arr.add_integer(80);\n arr.add_integer(24);\n arr.add_boolean(true);\n let msg = neovim::serialize_message(1, \"ui_attach\", &arr);\n let msg_ptr = msg.as_slice().as_ptr() as *const ffi::c_void;\n unsafe { ffi::write(fd, msg_ptr, msg.len() as ffi::size_t) };\n}\n\nfn nvim_execute(fd: ffi::c_int, command: &str) {\n let mut arr = neovim::Array::new();\n arr.add_string(command);\n let msg = neovim::serialize_message(1, \"vim_command\", &arr);\n let msg_ptr = msg.as_slice().as_ptr() as *const ffi::c_void;\n unsafe { ffi::write(fd, msg_ptr, msg.len() as ffi::size_t) };\n}\n\nfn receive_message(fd: ffi::c_int) -> Option<neovim::Array> {\n let mut buf : [ffi::c_uchar; 1024] = [0; 1024];\n let n = unsafe { ffi::read(fd, buf.as_mut_ptr() as *mut ffi::c_void, 1024) };\n if n < 0 {\n return None;\n }\n unsafe {\n let v = Vec::from_raw_buf(buf.as_ptr(), n as usize);\n let s = String::from_utf8_unchecked(v);\n Some(neovim::deserialize_message(&s))\n }\n}\n\nfn main() {\n \/\/ create data dir\n let home_dir = ::utils::get_home_dir();\n let data_dir = home_dir.join(::utils::DATA_DIR);\n if !data_dir.exists() {\n match fs::mkdir(&data_dir, ::std::old_io::USER_DIR) {\n Ok(_) => {\n for res in ::utils::DATA_CONTENT.iter() {\n let res_path = data_dir.join_many(res.path);\n ::std::old_io::fs::mkdir_recursive(&res_path.dir_path(), ::std::old_io::USER_DIR).ok();\n ::std::old_io::File::create(&res_path).write_all(res.data.as_bytes()).ok();\n }\n println!(\"Created data dir at {}\", data_dir.as_str().unwrap());\n },\n Err(e) => { println!(\"Error creating data dir: {}\", e) }\n }\n }\n\n \/\/ set $VIM to the data dir if it isn't already set\n if ::std::env::var(\"VIM\").is_none() {\n ::std::env::set_var(\"VIM\", data_dir.as_str().unwrap());\n }\n\n \/\/ create config file\n let config_file = home_dir.join(::utils::CONFIG_FILE);\n if !config_file.exists() {\n match ::std::old_io::File::create(&config_file).write_all(::utils::CONFIG_CONTENT.as_bytes()) {\n Ok(_) => { println!(\"Created config file at {}\", config_file.as_str().unwrap()) },\n Err(e) => { println!(\"Error creating config file: {}\", e) }\n }\n }\n\n \/\/ takes care of piping stdin\/stdout between the gui and nvim\n let mut pty = gtk::VtePty::new().unwrap();\n\n \/\/ two pairs of anonymous pipes for msgpack-rpc between the gui and nvim\n let mut nvim_gui : [ffi::c_int; 2] = [0; 2]; \/\/ to nvim from gui\n let mut gui_nvim : [ffi::c_int; 2] = [0; 2]; \/\/ to gui from nvim\n unsafe {\n ffi::pipe(nvim_gui.as_mut_ptr());\n ffi::pipe(gui_nvim.as_mut_ptr());\n };\n\n \/\/ split into two processes\n let pid = unsafe { ffi::fork() };\n\n if pid > 0 { \/\/ the gui process\n ::std::thread::Thread::spawn(move || {\n \/\/ start communicating with nvim\n nvim_attach(nvim_gui[1]);\n\n \/\/ listen for bufread events\n nvim_execute(nvim_gui[1], \"au BufRead * call rpcnotify(1, \\\"bufread\\\", bufname(\\\"\\\"))\");\n\n \/\/ receive messages\n while let Some(recv_arr) = receive_message(gui_nvim[0]) {\n if recv_arr.len() > 0 {\n println!(\"Received: {:?}\", recv_arr);\n }\n }\n });\n\n \/\/ start the gui\n gui_main(&mut pty, gui_nvim[0], gui_nvim[1], pid);\n } else { \/\/ the nvim process\n \/\/ prepare this process to be piped into the gui\n pty.child_setup();\n\n \/\/ start nvim\n let mut args = Vec::new();\n for arg in ::std::env::args() {\n args.push(arg.into_string().unwrap());\n }\n args.push_all(&[\"-u\".to_string(), config_file.as_str().unwrap().to_string()]);\n neovim::main_setup(&args);\n neovim::channel_from_fds(nvim_gui[0], gui_nvim[1]);\n neovim::main_loop();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>try to fill the numbers. works halfway<commit_after><|endoftext|>"} {"text":"<commit_before>#![deny(warnings)]\n\nextern crate cobalt;\nextern crate getopts;\nextern crate env_logger;\nextern crate notify;\nextern crate ghp;\n\n#[macro_use]\nextern crate nickel;\n\n#[macro_use]\nextern crate log;\n\nuse getopts::Options;\nuse std::env;\nuse std::fs;\nuse cobalt::Config;\nuse log::{LogRecord, LogLevelFilter};\nuse env_logger::LogBuilder;\nuse nickel::{Nickel, Options as NickelOptions, StaticFilesHandler};\nuse ghp::import_dir;\n\nuse notify::{RecommendedWatcher, Error, Watcher};\nuse std::sync::mpsc::channel;\nuse std::thread;\nuse std::path::PathBuf;\n\nfn print_version() {\n println!(\"0.2.0\");\n}\n\nfn print_usage(opts: Options) {\n let usage = concat!(\"\\n\\tbuild -- build the cobalt project at the source dir\",\n \"\\n\\tserve -- build and serve the cobalt project at the source dir\",\n \"\\n\\twatch -- build, serve, and watch the project at the source dir\",\n \"\\n\\timport -- moves the contents of the dest folder to the gh-pages branch\");\n println!(\"{}\", opts.usage(usage));\n}\n\nfn main() {\n let args: Vec<String> = env::args().collect();\n\n let mut opts = Options::new();\n\n opts.optopt(\"s\", \"source\", \"Source folder, Default: .\/\", \"\");\n opts.optopt(\"d\", \"destination\", \"Destination folder, Default: .\/\", \"\");\n opts.optopt(\"c\",\n \"config\",\n \"Config file to use, Default: .cobalt.yml\",\n \"\");\n opts.optopt(\"l\",\n \"layouts\",\n \"\\tLayout templates folder, Default: _layouts\/\",\n \"\");\n opts.optopt(\"p\", \"posts\", \"Posts folder, Default: _posts\/\", \"\");\n opts.optopt(\"P\", \"port\", \"Port to serve from, Default: 3000\", \"\");\n opts.optopt(\"b\", \"branch\", \"Branch that will be used to import the site to, Default: gh-pages\", \"\");\n opts.optopt(\"m\", \"message\", \"Commit message that will be used on import, Default: cobalt site import\", \"\");\n\n opts.optflag(\"\", \"debug\", \"Log verbose (debug level) information\");\n opts.optflag(\"\", \"trace\", \"Log ultra-verbose (trace level) information\");\n opts.optflag(\"\", \"silent\", \"Suppress all output\");\n opts.optflag(\"i\", \"import\", \"Import after build to gh-pages branch\");\n opts.optflag(\"h\", \"help\", \"Print this help menu\");\n opts.optflag(\"v\", \"version\", \"Display version\");\n\n let matches = match opts.parse(&args[1..]) {\n Ok(m) => m,\n Err(f) => panic!(f.to_string()),\n };\n\n if matches.opt_present(\"h\") {\n print_usage(opts);\n return;\n }\n\n if matches.opt_present(\"version\") {\n print_version();\n return;\n }\n\n let format = |record: &LogRecord| {\n let level = format!(\"[{}]\", record.level()).to_lowercase();\n format!(\"{:8} {}\", level, record.args())\n };\n\n let mut builder = LogBuilder::new();\n builder.format(format);\n builder.filter(None, LogLevelFilter::Info);\n\n if matches.opt_present(\"debug\") {\n builder.filter(None, LogLevelFilter::Debug);\n }\n\n if matches.opt_present(\"trace\") {\n builder.filter(None, LogLevelFilter::Trace);\n }\n\n if matches.opt_present(\"silent\") {\n builder.filter(None, LogLevelFilter::Off);\n }\n\n builder.init().unwrap();\n\n let config_path = match matches.opt_str(\"config\") {\n Some(config) => config,\n None => \".\/.cobalt.yml\".to_owned(),\n };\n\n \/\/ Fetch config information if available\n let mut config: Config = if fs::metadata(&config_path).is_ok() {\n info!(\"Using config file {}\", &config_path);\n\n match Config::from_file(&config_path) {\n Ok(config) => config,\n Err(e) => {\n error!(\"Error reading config file:\");\n error!(\"{}\", e);\n std::process::exit(1);\n }\n }\n } else {\n Default::default()\n };\n\n if let Some(source) = matches.opt_str(\"s\") {\n config.source = source;\n };\n\n if let Some(dest) = matches.opt_str(\"d\") {\n config.dest = dest;\n };\n\n if let Some(layouts) = matches.opt_str(\"layouts\") {\n config.layouts = layouts;\n };\n\n if let Some(posts) = matches.opt_str(\"posts\") {\n config.posts = posts;\n };\n\n let command = if !matches.free.is_empty() {\n matches.free[0].clone()\n } else {\n print_usage(opts);\n return;\n };\n\n \/\/ Check for port and set port variable to it\n let port = matches.opt_str(\"port\").unwrap_or(\"3000\".to_owned());\n let branch = matches.opt_str(\"branch\").unwrap_or(\"gh-pages\".to_owned());\n let message = matches.opt_str(\"message\").unwrap_or(\"cobalt site import\".to_owned());\n let should_import = matches.opt_present(\"import\");\n\n match command.as_ref() {\n \"build\" => {\n build(&config);\n if should_import {\n import(&config, &branch, &message);\n }\n }\n\n \"serve\" => {\n build(&config);\n serve(&config.dest, &port);\n }\n\n \"watch\" => {\n build(&config);\n\n let dest = config.dest.clone();\n thread::spawn(move || {\n serve(&dest, &port);\n });\n\n let (tx, rx) = channel();\n let w: Result<RecommendedWatcher, Error> = Watcher::new(tx);\n\n match w {\n Ok(mut watcher) => {\n \/\/ TODO: clean up this unwrap\n watcher.watch(&config.source).unwrap();\n info!(\"Watching {:?} for changes\", &config.source);\n\n loop {\n match rx.recv() {\n Ok(val) => {\n trace!(\"file changed {:?}\", val);\n if let Some(path) = val.path {\n if path.is_absolute() {\n \/\/ get where process was run from\n let cwd = std::env::current_dir().unwrap_or(PathBuf::new());\n \/\/ strip absolute path\n let rel_path = path.strip_prefix(&cwd).unwrap_or(&cwd);\n\n \/\/ check if path starts with the build folder.\n if !rel_path.starts_with(&config.dest) {\n build(&config);\n }\n\n } else {\n \/\/ check if path starts with build folder.\n \/\/ TODO: may want to check if it starts `.\/`\n if path.to_str() != Some(&config.dest) {\n build(&config);\n }\n }\n }\n }\n\n Err(e) => {\n error!(\"[Notify Error]: {}\", e);\n std::process::exit(1);\n }\n }\n }\n }\n Err(e) => {\n error!(\"[Notify Error]: {}\", e);\n std::process::exit(1);\n }\n }\n }\n\n \"import\" => {\n import(&config, &branch, &message);\n }\n\n _ => {\n print_usage(opts);\n return;\n }\n }\n}\n\nfn build(config: &Config) {\n info!(\"Building from {} into {}\", config.source, config.dest);\n match cobalt::build(&config) {\n Ok(_) => info!(\"Build successful\"),\n Err(e) => {\n error!(\"{}\", e);\n error!(\"Build not successful\");\n std::process::exit(1);\n }\n };\n}\n\nfn serve(dest: &str, port: &str) {\n info!(\"Serving {:?} through static file server\", dest);\n let mut server = Nickel::new();\n server.options = NickelOptions::default().output_on_listen(false);\n\n server.utilize(StaticFilesHandler::new(dest));\n\n let ip = \"127.0.0.1:\".to_owned() + port;\n info!(\"Server Listening on {}\", &ip);\n info!(\"Ctrl-c to stop the server\");\n server.listen(&*ip);\n}\n\nfn import(config: &Config, branch: &str, message: &str) {\n info!(\"Importing {} to {}\", config.dest, branch);\n\n let meta = match fs::metadata(&config.dest) {\n Ok(data) => data,\n\n Err(e) => {\n error!(\"{}\", e);\n error!(\"Import not successful\");\n std::process::exit(1);\n }\n };\n\n if meta.is_dir() {\n match import_dir(&config.dest, branch, message) {\n Ok(_) => info!(\"Import successful\"),\n Err(e) => {\n error!(\"{}\", e);\n error!(\"Import not successful\");\n std::process::exit(1);\n }\n }\n } else {\n error!(\"Build dir is not a directory: {}\", config.dest);\n error!(\"Import not successful\");\n std::process::exit(1);\n }\n}\n<commit_msg>moved to glob path checking from default path checking<commit_after>#![deny(warnings)]\n\nextern crate cobalt;\nextern crate getopts;\nextern crate env_logger;\nextern crate notify;\nextern crate glob;\nextern crate ghp;\n\n#[macro_use]\nextern crate nickel;\n\n#[macro_use]\nextern crate log;\n\nuse getopts::Options;\nuse std::env;\nuse std::fs;\nuse cobalt::Config;\nuse log::{LogRecord, LogLevelFilter};\nuse env_logger::LogBuilder;\nuse nickel::{Nickel, Options as NickelOptions, StaticFilesHandler};\nuse ghp::import_dir;\nuse glob::Pattern;\n\nuse notify::{RecommendedWatcher, Error, Watcher};\nuse std::sync::mpsc::channel;\nuse std::thread;\nuse std::path::PathBuf;\n\nfn print_version() {\n println!(\"0.2.0\");\n}\n\nfn print_usage(opts: Options) {\n let usage = concat!(\"\\n\\tbuild -- build the cobalt project at the source dir\",\n \"\\n\\tserve -- build and serve the cobalt project at the source dir\",\n \"\\n\\twatch -- build, serve, and watch the project at the source dir\",\n \"\\n\\timport -- moves the contents of the dest folder to the gh-pages branch\");\n println!(\"{}\", opts.usage(usage));\n}\n\nfn main() {\n let args: Vec<String> = env::args().collect();\n\n let mut opts = Options::new();\n\n opts.optopt(\"s\", \"source\", \"Source folder, Default: .\/\", \"\");\n opts.optopt(\"d\", \"destination\", \"Destination folder, Default: .\/\", \"\");\n opts.optopt(\"c\",\n \"config\",\n \"Config file to use, Default: .cobalt.yml\",\n \"\");\n opts.optopt(\"l\",\n \"layouts\",\n \"\\tLayout templates folder, Default: _layouts\/\",\n \"\");\n opts.optopt(\"p\", \"posts\", \"Posts folder, Default: _posts\/\", \"\");\n opts.optopt(\"P\", \"port\", \"Port to serve from, Default: 3000\", \"\");\n opts.optopt(\"b\", \"branch\", \"Branch that will be used to import the site to, Default: gh-pages\", \"\");\n opts.optopt(\"m\", \"message\", \"Commit message that will be used on import, Default: cobalt site import\", \"\");\n\n opts.optflag(\"\", \"debug\", \"Log verbose (debug level) information\");\n opts.optflag(\"\", \"trace\", \"Log ultra-verbose (trace level) information\");\n opts.optflag(\"\", \"silent\", \"Suppress all output\");\n opts.optflag(\"i\", \"import\", \"Import after build to gh-pages branch\");\n opts.optflag(\"h\", \"help\", \"Print this help menu\");\n opts.optflag(\"v\", \"version\", \"Display version\");\n\n let matches = match opts.parse(&args[1..]) {\n Ok(m) => m,\n Err(f) => panic!(f.to_string()),\n };\n\n if matches.opt_present(\"h\") {\n print_usage(opts);\n return;\n }\n\n if matches.opt_present(\"version\") {\n print_version();\n return;\n }\n\n let format = |record: &LogRecord| {\n let level = format!(\"[{}]\", record.level()).to_lowercase();\n format!(\"{:8} {}\", level, record.args())\n };\n\n let mut builder = LogBuilder::new();\n builder.format(format);\n builder.filter(None, LogLevelFilter::Info);\n\n if matches.opt_present(\"debug\") {\n builder.filter(None, LogLevelFilter::Debug);\n }\n\n if matches.opt_present(\"trace\") {\n builder.filter(None, LogLevelFilter::Trace);\n }\n\n if matches.opt_present(\"silent\") {\n builder.filter(None, LogLevelFilter::Off);\n }\n\n builder.init().unwrap();\n\n let config_path = match matches.opt_str(\"config\") {\n Some(config) => config,\n None => \".\/.cobalt.yml\".to_owned(),\n };\n\n \/\/ Fetch config information if available\n let mut config: Config = if fs::metadata(&config_path).is_ok() {\n info!(\"Using config file {}\", &config_path);\n\n match Config::from_file(&config_path) {\n Ok(config) => config,\n Err(e) => {\n error!(\"Error reading config file:\");\n error!(\"{}\", e);\n std::process::exit(1);\n }\n }\n } else {\n Default::default()\n };\n\n if let Some(source) = matches.opt_str(\"s\") {\n config.source = source;\n };\n\n if let Some(dest) = matches.opt_str(\"d\") {\n config.dest = dest;\n };\n\n if let Some(layouts) = matches.opt_str(\"layouts\") {\n config.layouts = layouts;\n };\n\n if let Some(posts) = matches.opt_str(\"posts\") {\n config.posts = posts;\n };\n\n let command = if !matches.free.is_empty() {\n matches.free[0].clone()\n } else {\n print_usage(opts);\n return;\n };\n\n \/\/ Check for port and set port variable to it\n let port = matches.opt_str(\"port\").unwrap_or(\"3000\".to_owned());\n let branch = matches.opt_str(\"branch\").unwrap_or(\"gh-pages\".to_owned());\n let message = matches.opt_str(\"message\").unwrap_or(\"cobalt site import\".to_owned());\n let should_import = matches.opt_present(\"import\");\n\n match command.as_ref() {\n \"build\" => {\n build(&config);\n if should_import {\n import(&config, &branch, &message);\n }\n }\n\n \"serve\" => {\n build(&config);\n serve(&config.dest, &port);\n }\n\n \"watch\" => {\n build(&config);\n\n let dest = config.dest.clone();\n thread::spawn(move || {\n serve(&dest, &port);\n });\n\n let (tx, rx) = channel();\n let w: Result<RecommendedWatcher, Error> = Watcher::new(tx);\n\n match w {\n Ok(mut watcher) => {\n \/\/ TODO: clean up this unwrap\n watcher.watch(&config.source).unwrap();\n info!(\"Watching {:?} for changes\", &config.source);\n\n loop {\n match rx.recv() {\n Ok(val) => {\n trace!(\"file changed {:?}\", val);\n if let Some(path) = val.path {\n if path.is_absolute() {\n \/\/ get where process was run from\n let cwd = std::env::current_dir().unwrap_or(PathBuf::new());\n \/\/ strip absolute path\n let rel_path = path.strip_prefix(&cwd).unwrap_or(&cwd);\n\n \/\/ check if path starts with the build folder.\n if !&config.ignore.iter().any(|pattern| Pattern::matches_path(\n pattern,\n rel_path)) {\n build(&config);\n }\n\n } else {\n \/\/ check if path starts with build folder.\n \/\/ TODO: may want to check if it starts `.\/`\n if !&config.ignore.iter().any(|pattern| Pattern::matches_path(pattern, &path)) {\n build(&config);\n }\n }\n }\n }\n\n Err(e) => {\n error!(\"[Notify Error]: {}\", e);\n std::process::exit(1);\n }\n }\n }\n }\n Err(e) => {\n error!(\"[Notify Error]: {}\", e);\n std::process::exit(1);\n }\n }\n }\n\n \"import\" => {\n import(&config, &branch, &message);\n }\n\n _ => {\n print_usage(opts);\n return;\n }\n }\n}\n\nfn build(config: &Config) {\n info!(\"Building from {} into {}\", config.source, config.dest);\n match cobalt::build(&config) {\n Ok(_) => info!(\"Build successful\"),\n Err(e) => {\n error!(\"{}\", e);\n error!(\"Build not successful\");\n std::process::exit(1);\n }\n };\n}\n\nfn serve(dest: &str, port: &str) {\n info!(\"Serving {:?} through static file server\", dest);\n let mut server = Nickel::new();\n server.options = NickelOptions::default().output_on_listen(false);\n\n server.utilize(StaticFilesHandler::new(dest));\n\n let ip = \"127.0.0.1:\".to_owned() + port;\n info!(\"Server Listening on {}\", &ip);\n info!(\"Ctrl-c to stop the server\");\n server.listen(&*ip);\n}\n\nfn import(config: &Config, branch: &str, message: &str) {\n info!(\"Importing {} to {}\", config.dest, branch);\n\n let meta = match fs::metadata(&config.dest) {\n Ok(data) => data,\n\n Err(e) => {\n error!(\"{}\", e);\n error!(\"Import not successful\");\n std::process::exit(1);\n }\n };\n\n if meta.is_dir() {\n match import_dir(&config.dest, branch, message) {\n Ok(_) => info!(\"Import successful\"),\n Err(e) => {\n error!(\"{}\", e);\n error!(\"Import not successful\");\n std::process::exit(1);\n }\n }\n } else {\n error!(\"Build dir is not a directory: {}\", config.dest);\n error!(\"Import not successful\");\n std::process::exit(1);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Proof of concept 2.0.1 (changed paths to use PathBuf)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>adding basic layout for new sites and config file<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>can now execute programs that expect a tty<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added cache headers.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>For consistency, added in another compiler error.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update REPL to use the new API<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add basic functions like memset,..<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This library is derived from rlibc, not intended for general use,\n\/\/! and is superseded by a system libc if one is available. In a\n\/\/! freestanding context, however, common functions such as memset, memcpy,\n\/\/! etc are not implemented. This library provides an implementation of\n\/\/! these functions which are either required by libcore or called by rustc\n\/\/! implicitly.\n\n#[no_mangle]\npub unsafe extern \"C\" fn memcpy(dest: *mut u8, src: *const u8, n: usize) -> *mut u8 {\n\tlet mut i = 0;\n\twhile i < n {\n\t\t*dest.offset(i as isize) = *src.offset(i as isize);\n\t\ti += 1;\n\t}\n\treturn dest;\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn memmove(dest: *mut u8, src: *const u8, n: usize) -> *mut u8 {\n\tif src < dest as *const u8 {\n\t\t\/\/ copy from end\n\t\tlet mut i = n;\n\t\twhile i != 0 {\n\t\t\ti -= 1;\n\t\t\t*dest.offset(i as isize) = *src.offset(i as isize);\n\t\t}\n\t} else {\n\t\t\/\/ copy from beginning\n\t\tlet mut i = 0;\n\t\twhile i < n {\n\t\t\t*dest.offset(i as isize) = *src.offset(i as isize);\n\t\t\ti += 1;\n\t\t}\n\t}\n\treturn dest;\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn memset(s: *mut u8, c: i32, n: usize) -> *mut u8 {\n\tlet mut i = 0;\n\twhile i < n {\n\t\t*s.offset(i as isize) = c as u8;\n\t\ti += 1;\n\t}\n\treturn s;\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32 {\n\tlet mut i = 0;\n\twhile i < n {\n\t\tlet a = *s1.offset(i as isize);\n\t\tlet b = *s2.offset(i as isize);\n\t\tif a != b {\n\t\t\treturn a as i32 - b as i32;\n\t\t}\n\t\ti += 1;\n\t}\n\treturn 0;\n}\n\n#[cfg(test)]\nmod test {\n\tuse super::{memcmp, memcpy, memmove, memset};\n\n\t#[test]\n\tfn memcmp_single_byte_pointers() {\n\t\tunsafe {\n\t\t\tassert_eq!(memcmp(&0xFAu8, &0xFAu8, 1), 0x00);\n\t\t\tassert!(memcmp(&0xEFu8, &0xFEu8, 1) < 0x00);\n\t\t}\n\t}\n\n\t#[test]\n\tfn memcmp_strings() {\n\t\t{\n\t\t\tlet (x, z) = (\"Hello!\", \"Good Bye.\");\n\t\t\tlet l = x.len();\n\t\t\tunsafe {\n\t\t\t\tassert_eq!(memcmp(x.as_ptr(), x.as_ptr(), l), 0);\n\t\t\t\tassert!(memcmp(x.as_ptr(), z.as_ptr(), l) > 0);\n\t\t\t\tassert!(memcmp(z.as_ptr(), x.as_ptr(), l) < 0);\n\t\t\t}\n\t\t}\n\t\t{\n\t\t\tlet (x, z) = (\"hey!\", \"hey.\");\n\t\t\tlet l = x.len();\n\t\t\tunsafe {\n\t\t\t\tassert!(memcmp(x.as_ptr(), z.as_ptr(), l) < 0);\n\t\t\t}\n\t\t}\n\t}\n\n\t#[test]\n\tfn memset_single_byte_pointers() {\n\t\tlet mut x: u8 = 0xFF;\n\t\tunsafe {\n\t\t\tmemset(&mut x, 0xAA, 1);\n\t\t\tassert_eq!(x, 0xAA);\n\t\t\tmemset(&mut x, 0x00, 1);\n\t\t\tassert_eq!(x, 0x00);\n\t\t\tx = 0x01;\n\t\t\tmemset(&mut x, 0x12, 0);\n\t\t\tassert_eq!(x, 0x01);\n\t\t}\n\t}\n\n\t#[test]\n\tfn memset_array() {\n\t\tlet mut buffer = [b'X'; 100];\n\t\tunsafe {\n\t\t\tmemset(buffer.as_mut_ptr(), b'#' as i32, buffer.len());\n\t\t}\n\t\tfor byte in buffer.iter() {\n\t\t\tassert_eq!(*byte, b'#');\n\t\t}\n\t}\n\n\t#[test]\n\tfn memcpy_and_memcmp_arrays() {\n\t\tlet (src, mut dst) = ([b'X'; 100], [b'Y'; 100]);\n\t\tunsafe {\n\t\t\tassert!(memcmp(src.as_ptr(), dst.as_ptr(), 100) != 0);\n\t\t\tlet _ = memcpy(dst.as_mut_ptr(), src.as_ptr(), 100);\n\t\t\tassert_eq!(memcmp(src.as_ptr(), dst.as_ptr(), 100), 0);\n\t\t}\n\t}\n\n\t#[test]\n\tfn memmove_overlapping() {\n\t\t{\n\t\t\tlet mut buffer = [b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9'];\n\t\t\tunsafe {\n\t\t\t\tmemmove(&mut buffer[4], &buffer[0], 6);\n\t\t\t\tlet mut i = 0;\n\t\t\t\tfor byte in b\"0123012345\".iter() {\n\t\t\t\t\tassert_eq!(buffer[i], *byte);\n\t\t\t\t\ti += 1;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t{\n\t\t\tlet mut buffer = [b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9'];\n\t\t\tunsafe {\n\t\t\t\tmemmove(&mut buffer[0], &buffer[4], 6);\n\t\t\t\tlet mut i = 0;\n\t\t\t\tfor byte in b\"4567896789\".iter() {\n\t\t\t\t\tassert_eq!(buffer[i], *byte);\n\t\t\t\t\ti += 1;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add data members<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update main.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Colored prompt<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>flood_fill<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed usage of condvar.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>recursive data type with methods for node graph<commit_after>fn main() {\n let list = ~Node(1, ~Node(2, ~Node(3, ~Empty)));\n println!(\"Sum of all values in the list: {:i}.\", list.sum());\n}\n\nenum IntList {\n Node(int, ~IntList),\n Empty\n}\n\nimpl IntList {\n fn sum(~self) -> int {\n \/\/ As in C and C++, pointers are dereferenced with the asterisk `*` operator.\n match *self {\n Node(value, next) => value + next.sum(),\n Empty => 0\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added tests.rs.<commit_after>use std::time;\nuse ::{CANSocket, ShouldRetry};\n\n#[test]\nfn test_nonexistant_device() {\n assert!(CANSocket::open(\"invalid\").is_err());\n}\n\n#[test]\nfn vcan0_timeout() {\n let cs = CANSocket::open(\"vcan1\").unwrap();\n cs.set_read_timeout(time::Duration::from_millis(100)).unwrap();\n assert!(cs.read_frame().should_retry());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>video.reorderAlbums method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Pick more appropriate int types for hmac<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(box_syntax)]\n#![feature(plugin)]\n#![plugin(peg_syntax_ext)]\n\nuse std::collections::{BTreeMap, HashMap};\nuse std::fs::File;\nuse std::io::{stdout, Read, Write};\nuse std::env;\nuse std::process;\n\nuse self::to_num::ToNum;\nuse self::directory_stack::DirectoryStack;\nuse self::input_editor::readln;\nuse self::peg::parse;\nuse self::expansion::expand_variables;\n\npub mod builtin;\npub mod directory_stack;\npub mod to_num;\npub mod input_editor;\npub mod peg;\npub mod expansion;\n\npub type Variables = BTreeMap<String, String>;\n\n\/\/\/ This struct will contain all of the data structures related to this\n\/\/\/ instance of the shell.\npub struct Shell {\n pub variables: Variables,\n pub modes: Vec<Mode>,\n pub directory_stack: DirectoryStack,\n}\n\nimpl Shell {\n \/\/\/ Panics if DirectoryStack construction fails\n pub fn new() -> Self {\n Shell {\n variables: BTreeMap::new(),\n modes: vec![],\n directory_stack: DirectoryStack::new().expect(\"\"),\n }\n }\n}\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the\n\/\/\/ functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ help: \"Describe what my_command does followed by a newline showing usage\",\n\/\/\/ main: box|args: &[String], &mut Shell| {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command {\n pub name: &'static str,\n pub help: &'static str,\n pub main: Box<Fn(&[String], &mut Shell)>,\n}\n\nimpl Command {\n \/\/\/ Return the map from command names to commands\n pub fn map() -> HashMap<&'static str, Self> {\n let mut commands: HashMap<&str, Self> = HashMap::new();\n\n commands.insert(\"cd\",\n Command {\n name: \"cd\",\n help: \"To change the current directory\\n cd <your_destination>\",\n main: box |args: &[String], _: &mut Shell| {\n builtin::cd(args);\n },\n });\n\n commands.insert(\"dirs\",\n Command {\n name: \"dirs\",\n help: \"Make a sleep in the current session\\n sleep \\\n <number_of_seconds>\",\n main: box |args: &[String], shell: &mut Shell| {\n shell.directory_stack.dirs(args);\n },\n });\n\n commands.insert(\"exit\",\n Command {\n name: \"exit\",\n help: \"To exit the curent session\",\n main: box |_: &[String], _: &mut Shell| {},\n });\n\n commands.insert(\"read\",\n Command {\n name: \"read\",\n help: \"To read some variables\\n read <my_variable>\",\n main: box |args: &[String], shell: &mut Shell| {\n builtin::read(args, &mut shell.variables);\n },\n });\n\n commands.insert(\"run\",\n Command {\n name: \"run\",\n help: \"Run a script\\n run <script>\",\n main: box |args: &[String], shell: &mut Shell| {\n builtin::run(args, &mut shell.variables);\n },\n });\n\n commands.insert(\"pushd\",\n Command {\n name: \"pushd\",\n help: \"Make a sleep in the current session\\n sleep \\\n <number_of_seconds>\",\n main: box |args: &[String], shell: &mut Shell| {\n shell.directory_stack.pushd(args);\n },\n });\n\n commands.insert(\"popd\",\n Command {\n name: \"popd\",\n help: \"Make a sleep in the current session\\n sleep \\\n <number_of_seconds>\",\n main: box |args: &[String], shell: &mut Shell| {\n shell.directory_stack.popd(args);\n },\n });\n\n let command_helper: HashMap<String, String> = commands.iter()\n .map(|(k, v)| {\n (k.to_string(),\n v.help.to_string())\n })\n .collect();\n\n commands.insert(\"help\",\n Command {\n name: \"help\",\n help: \"Display a little helper for a given command\\n help ls\",\n main: box move |args: &[String], _: &mut Shell| {\n if let Some(command) = args.get(1) {\n if command_helper.contains_key(command) {\n match command_helper.get(command) {\n Some(help) => println!(\"{}\", help),\n None => {\n println!(\"Command helper not found [run 'help']...\")\n }\n }\n } else {\n println!(\"Command helper not found [run 'help']...\");\n }\n } else {\n for (command, _help) in command_helper.iter() {\n println!(\"{}\", command);\n }\n }\n },\n });\n\n commands\n }\n}\n\npub struct Mode {\n value: bool,\n}\n\nfn on_command(command_string: &str, commands: &HashMap<&str, Command>, shell: &mut Shell) {\n \/\/ Show variables\n if command_string == \"$\" {\n for (key, value) in shell.variables.iter() {\n println!(\"{}={}\", key, value);\n }\n return;\n }\n\n let mut jobs = parse(command_string);\n expand_variables(&mut jobs, &shell.variables);\n\n \/\/ Execute commands\n for job in jobs.iter() {\n if job.command == \"if\" {\n let mut value = false;\n\n if let Some(left) = job.args.get(0) {\n if let Some(cmp) = job.args.get(1) {\n if let Some(right) = job.args.get(2) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n } else {\n println!(\"No right hand side\");\n }\n } else {\n println!(\"No comparison operator\");\n }\n } else {\n println!(\"No left hand side\");\n }\n\n shell.modes.insert(0, Mode { value: value });\n continue;\n }\n\n if job.command == \"else\" {\n if let Some(mode) = shell.modes.get_mut(0) {\n mode.value = !mode.value;\n } else {\n println!(\"Syntax error: else found with no previous if\");\n }\n continue;\n }\n\n if job.command == \"fi\" {\n if !shell.modes.is_empty() {\n shell.modes.remove(0);\n } else {\n println!(\"Syntax error: fi found with no previous if\");\n }\n continue;\n }\n\n let mut skipped: bool = false;\n for mode in shell.modes.iter() {\n if !mode.value {\n skipped = true;\n break;\n }\n }\n if skipped {\n continue;\n }\n\n \/\/ Set variables\n if let Some(i) = job.command.find('=') {\n let name = job.command[0..i].trim();\n let mut value = job.command[i + 1..job.command.len()].trim().to_string();\n\n for i in 0..job.args.len() {\n if let Some(arg) = job.args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n set_var(&mut shell.variables, name, &value);\n continue;\n }\n\n \/\/ Commands\n let mut args = job.args.clone();\n args.insert(0, job.command.clone());\n if let Some(command) = commands.get(&job.command.as_str()) {\n (*command.main)(&args, shell);\n } else {\n run_external_commmand(args, &mut shell.variables);\n }\n }\n}\n\n\npub fn set_var(variables: &mut Variables, name: &str, value: &str) {\n if name.is_empty() {\n return;\n }\n\n if value.is_empty() {\n variables.remove(&name.to_string());\n } else {\n variables.insert(name.to_string(), value.to_string());\n }\n}\n\nfn print_prompt(modes: &[Mode]) {\n let prompt_prefix = modes.iter().rev().fold(String::new(), |acc, mode| {\n acc +\n if mode.value {\n \"+ \"\n } else {\n \"- \"\n }\n });\n print!(\"{}\", prompt_prefix);\n\n let cwd = env::current_dir().ok().map_or(\"?\".to_string(),\n |ref p| p.to_str().unwrap_or(\"?\").to_string());\n\n print!(\"ion:{}# \", cwd);\n if let Err(message) = stdout().flush() {\n println!(\"{}: failed to flush prompt to stdout\", message);\n }\n}\n\nfn run_external_commmand(args: Vec<String>, variables: &mut Variables) {\n if let Some(path) = args.get(0) {\n let mut command = process::Command::new(path);\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n command.arg(arg);\n }\n }\n match command.spawn() {\n Ok(mut child) => {\n match child.wait() {\n Ok(status) => {\n if let Some(code) = status.code() {\n set_var(variables, \"?\", &code.to_string());\n } else {\n println!(\"{}: No child exit code\", path);\n }\n }\n Err(err) => println!(\"{}: Failed to wait: {}\", path, err),\n }\n }\n Err(err) => println!(\"{}: Failed to execute: {}\", path, err),\n }\n }\n}\n\nfn main() {\n let commands = Command::map();\n let mut shell = Shell::new();\n\n for arg in env::args().skip(1) {\n let mut command_list = String::new();\n if let Ok(mut file) = File::open(&arg) {\n if let Err(message) = file.read_to_string(&mut command_list) {\n println!(\"{}: Failed to read {}\", message, arg);\n }\n }\n on_command(&command_list, &commands, &mut shell);\n return;\n }\n\n loop {\n\n print_prompt(&shell.modes);\n\n if let Some(command_original) = readln() {\n let command = command_original.trim();\n if command == \"exit\" {\n break;\n } else if !command.is_empty() {\n on_command(&command, &commands, &mut shell);\n }\n } else {\n break;\n }\n }\n}\n<commit_msg>add history<commit_after>#![feature(box_syntax)]\n#![feature(plugin)]\n#![plugin(peg_syntax_ext)]\n\nuse std::collections::{BTreeMap, HashMap, VecDeque};\nuse std::fs::File;\nuse std::io::{stdout, Read, Write};\nuse std::env;\nuse std::process;\n\nuse self::to_num::ToNum;\nuse self::directory_stack::DirectoryStack;\nuse self::input_editor::readln;\nuse self::peg::parse;\nuse self::expansion::expand_variables;\n\npub mod builtin;\npub mod directory_stack;\npub mod to_num;\npub mod input_editor;\npub mod peg;\npub mod expansion;\n\npub type Variables = BTreeMap<String, String>;\n\n\/\/\/ This struct will contain all of the data structures related to this\n\/\/\/ instance of the shell.\npub struct Shell {\n pub variables: Variables,\n pub modes: Vec<Mode>,\n pub directory_stack: DirectoryStack,\n pub history: VecDeque<String>,\n}\n\nimpl Shell {\n \/\/\/ Panics if DirectoryStack construction fails\n pub fn new() -> Self {\n Shell {\n variables: BTreeMap::new(),\n modes: vec![],\n directory_stack: DirectoryStack::new().expect(\"\"),\n history: VecDeque::new(),\n }\n }\n}\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the\n\/\/\/ functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ help: \"Describe what my_command does followed by a newline showing usage\",\n\/\/\/ main: box|args: &[String], &mut Shell| {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command {\n pub name: &'static str,\n pub help: &'static str,\n pub main: Box<Fn(&[String], &mut Shell)>,\n}\n\nimpl Command {\n \/\/\/ Return the map from command names to commands\n pub fn map() -> HashMap<&'static str, Self> {\n let mut commands: HashMap<&str, Self> = HashMap::new();\n\n commands.insert(\"cd\",\n Command {\n name: \"cd\",\n help: \"To change the current directory\\n cd <your_destination>\",\n main: box |args: &[String], _: &mut Shell| {\n builtin::cd(args);\n },\n });\n\n commands.insert(\"dirs\",\n Command {\n name: \"dirs\",\n help: \"Make a sleep in the current session\\n sleep \\\n <number_of_seconds>\",\n main: box |args: &[String], shell: &mut Shell| {\n shell.directory_stack.dirs(args);\n },\n });\n\n commands.insert(\"exit\",\n Command {\n name: \"exit\",\n help: \"To exit the curent session\",\n main: box |_: &[String], _: &mut Shell| {},\n });\n\n commands.insert(\"read\",\n Command {\n name: \"read\",\n help: \"To read some variables\\n read <my_variable>\",\n main: box |args: &[String], shell: &mut Shell| {\n builtin::read(args, &mut shell.variables);\n },\n });\n\n commands.insert(\"run\",\n Command {\n name: \"run\",\n help: \"Run a script\\n run <script>\",\n main: box |args: &[String], shell: &mut Shell| {\n builtin::run(args, &mut shell.variables);\n },\n });\n\n commands.insert(\"pushd\",\n Command {\n name: \"pushd\",\n help: \"Make a sleep in the current session\\n sleep \\\n <number_of_seconds>\",\n main: box |args: &[String], shell: &mut Shell| {\n shell.directory_stack.pushd(args);\n },\n });\n\n commands.insert(\"popd\",\n Command {\n name: \"popd\",\n help: \"Make a sleep in the current session\\n sleep \\\n <number_of_seconds>\",\n main: box |args: &[String], shell: &mut Shell| {\n shell.directory_stack.popd(args);\n },\n });\n\n commands.insert(\"history\",\n Command {\n name: \"history\",\n help: \"Display all commands previously executed\",\n main: box |args: &[String], shell: &mut Shell| {\n for command in shell.history.clone() {\n println!(\"{}\", command);\n }\n },\n });\n\n let command_helper: HashMap<String, String> = commands.iter()\n .map(|(k, v)| {\n (k.to_string(),\n v.help.to_string())\n })\n .collect();\n\n commands.insert(\"help\",\n Command {\n name: \"help\",\n help: \"Display a little helper for a given command\\n help ls\",\n main: box move |args: &[String], _: &mut Shell| {\n if let Some(command) = args.get(1) {\n if command_helper.contains_key(command) {\n match command_helper.get(command) {\n Some(help) => println!(\"{}\", help),\n None => {\n println!(\"Command helper not found [run 'help']...\")\n }\n }\n } else {\n println!(\"Command helper not found [run 'help']...\");\n }\n } else {\n for (command, _help) in command_helper.iter() {\n println!(\"{}\", command);\n }\n }\n },\n });\n\n commands\n }\n}\n\npub struct Mode {\n value: bool,\n}\n\nfn on_command(command_string: &str, commands: &HashMap<&str, Command>, shell: &mut Shell) {\n let max_history: usize = 1000; \/\/ TODO temporary, make this configurable\n if shell.history.len() > max_history {\n shell.history.pop_front();\n }\n shell.history.push_back(command_string.to_string());\n\n \/\/ Show variables\n if command_string == \"$\" {\n for (key, value) in shell.variables.iter() {\n println!(\"{}={}\", key, value);\n }\n return;\n }\n\n let mut jobs = parse(command_string);\n expand_variables(&mut jobs, &shell.variables);\n\n \/\/ Execute commands\n for job in jobs.iter() {\n if job.command == \"if\" {\n let mut value = false;\n\n if let Some(left) = job.args.get(0) {\n if let Some(cmp) = job.args.get(1) {\n if let Some(right) = job.args.get(2) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n } else {\n println!(\"No right hand side\");\n }\n } else {\n println!(\"No comparison operator\");\n }\n } else {\n println!(\"No left hand side\");\n }\n\n shell.modes.insert(0, Mode { value: value });\n continue;\n }\n\n if job.command == \"else\" {\n if let Some(mode) = shell.modes.get_mut(0) {\n mode.value = !mode.value;\n } else {\n println!(\"Syntax error: else found with no previous if\");\n }\n continue;\n }\n\n if job.command == \"fi\" {\n if !shell.modes.is_empty() {\n shell.modes.remove(0);\n } else {\n println!(\"Syntax error: fi found with no previous if\");\n }\n continue;\n }\n\n let mut skipped: bool = false;\n for mode in shell.modes.iter() {\n if !mode.value {\n skipped = true;\n break;\n }\n }\n if skipped {\n continue;\n }\n\n \/\/ Set variables\n if let Some(i) = job.command.find('=') {\n let name = job.command[0..i].trim();\n let mut value = job.command[i + 1..job.command.len()].trim().to_string();\n\n for i in 0..job.args.len() {\n if let Some(arg) = job.args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n set_var(&mut shell.variables, name, &value);\n continue;\n }\n\n \/\/ Commands\n let mut args = job.args.clone();\n args.insert(0, job.command.clone());\n if let Some(command) = commands.get(&job.command.as_str()) {\n (*command.main)(&args, shell);\n } else {\n run_external_commmand(args, &mut shell.variables);\n }\n }\n}\n\n\npub fn set_var(variables: &mut Variables, name: &str, value: &str) {\n if name.is_empty() {\n return;\n }\n\n if value.is_empty() {\n variables.remove(&name.to_string());\n } else {\n variables.insert(name.to_string(), value.to_string());\n }\n}\n\nfn print_prompt(modes: &[Mode]) {\n let prompt_prefix = modes.iter().rev().fold(String::new(), |acc, mode| {\n acc +\n if mode.value {\n \"+ \"\n } else {\n \"- \"\n }\n });\n print!(\"{}\", prompt_prefix);\n\n let cwd = env::current_dir().ok().map_or(\"?\".to_string(),\n |ref p| p.to_str().unwrap_or(\"?\").to_string());\n\n print!(\"ion:{}# \", cwd);\n if let Err(message) = stdout().flush() {\n println!(\"{}: failed to flush prompt to stdout\", message);\n }\n}\n\nfn run_external_commmand(args: Vec<String>, variables: &mut Variables) {\n if let Some(path) = args.get(0) {\n let mut command = process::Command::new(path);\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n command.arg(arg);\n }\n }\n match command.spawn() {\n Ok(mut child) => {\n match child.wait() {\n Ok(status) => {\n if let Some(code) = status.code() {\n set_var(variables, \"?\", &code.to_string());\n } else {\n println!(\"{}: No child exit code\", path);\n }\n }\n Err(err) => println!(\"{}: Failed to wait: {}\", path, err),\n }\n }\n Err(err) => println!(\"{}: Failed to execute: {}\", path, err),\n }\n }\n}\n\nfn main() {\n let commands = Command::map();\n let mut shell = Shell::new();\n\n for arg in env::args().skip(1) {\n let mut command_list = String::new();\n if let Ok(mut file) = File::open(&arg) {\n if let Err(message) = file.read_to_string(&mut command_list) {\n println!(\"{}: Failed to read {}\", message, arg);\n }\n }\n on_command(&command_list, &commands, &mut shell);\n return;\n }\n\n loop {\n\n print_prompt(&shell.modes);\n\n if let Some(command_original) = readln() {\n let command = command_original.trim();\n if command == \"exit\" {\n break;\n } else if !command.is_empty() {\n on_command(&command, &commands, &mut shell);\n }\n } else {\n break;\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Lifetimes corrected<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Missed canceling for node process Added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Removed Span::shift_to_zero because it is inconsistent.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>blank message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>empty message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>blank message<commit_after><|endoftext|>"} {"text":"<commit_before>use std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse yaml_rust::Yaml;\n\nuse storage::parser::{FileHeaderParser, ParserError};\nuse storage::file::header::spec::FileHeaderSpec;\nuse storage::file::header::data::FileHeaderData;\n\npub struct YamlHeaderParser {\n spec: Option<FileHeaderSpec>,\n}\n\nimpl YamlHeaderParser {\n\n pub fn new(spec: Option<FileHeaderSpec>) -> YamlHeaderParser {\n YamlHeaderParser {\n spec: spec\n }\n }\n\n}\n\nimpl Display for YamlHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"YamlHeaderParser\"));\n Ok(())\n }\n\n}\n\nimpl Debug for YamlHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"YamlHeaderParser, Spec: {:?}\", self.spec));\n Ok(())\n }\n\n}\n\nimpl FileHeaderParser for YamlHeaderParser {\n\n fn read(&self, string: Option<String>) -> Result<FileHeaderData, ParserError> {\n use yaml_rust::YamlLoader;\n if string.is_some() {\n let s = string.unwrap();\n YamlLoader::load_from_str(&s[..])\n .map(|mut vec_yaml| {\n vec_yaml.pop().map(|f| {\n visit_yaml(f)\n }).unwrap()\n })\n .map_err(|e| {\n debug!(\"YAML parser error: {:?}\", e);\n ParserError::short(&s[..], s.clone(), 0)\n })\n } else {\n Ok(FileHeaderData::Null)\n }\n\n }\n\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {\n use yaml_rust::YamlEmitter;\n\n let mut buffer = String::new();\n let result = {\n let mut emitter = YamlEmitter::new(&mut buffer);\n emitter.dump(&visit_header(data))\n };\n result\n .map_err(|e| {\n error!(\"Error emitting YAML.\");\n debug!(\"YAML parser error: {:?}\", e);\n ParserError::short(&buffer[..], buffer.clone(), 0)\n })\n .map(|_| buffer)\n }\n\n}\n\nfn visit_yaml(v: Yaml) -> FileHeaderData {\n use std::process::exit;\n\n match v {\n Yaml::Real(_) => FileHeaderData::Float(v.as_f64().unwrap()),\n Yaml::Integer(i) => {\n if i > 0 {\n debug!(\"Castring {} : i64 -> u64\", i);\n FileHeaderData::UInteger(i as u64)\n } else {\n FileHeaderData::Integer(i)\n }\n },\n Yaml::String(s) => FileHeaderData::Text(s),\n Yaml::Boolean(b) => FileHeaderData::Bool(b),\n\n Yaml::Array(vec) => {\n FileHeaderData::Array {\n values: Box::new(vec.clone().into_iter().map(|i| visit_yaml(i)).collect())\n }\n },\n\n Yaml::Hash(btree) => {\n let btree = btree.clone();\n FileHeaderData::Map{\n keys: btree.into_iter().map(|(k, v)|\n FileHeaderData::Key {\n name: String::from(k.as_str().unwrap()),\n value: Box::new(visit_yaml(v)),\n }\n ).collect()\n }\n },\n\n Yaml::Alias(_) => {\n warn!(\"YAML::ALIAS is not yet fully supported by rust-yaml\");\n FileHeaderData::Null\n },\n\n Yaml::Null => FileHeaderData::Null,\n\n Yaml::BadValue => {\n warn!(\"YAML parsing error\");\n exit(1);\n },\n }\n}\n\nfn visit_header(h: &FileHeaderData) -> Yaml {\n use std::ops::Deref;\n use std::collections::BTreeMap;\n use std::process::exit;\n\n match h {\n &FileHeaderData::Null => Yaml::Null,\n &FileHeaderData::Float(f) => Yaml::Real(format!(\"{}\", f)),\n &FileHeaderData::Integer(i) => Yaml::Integer(i),\n &FileHeaderData::UInteger(u) => {\n debug!(\"Might be losing data now: u64 -> i64 cast\");\n Yaml::Integer(u as i64)\n },\n &FileHeaderData::Text(ref s) => Yaml::String(s.clone()),\n &FileHeaderData::Bool(b) => Yaml::Boolean(b),\n\n &FileHeaderData::Array{values: ref a} => {\n Yaml::Array(a.deref().into_iter().map(|e| visit_header(e)).collect())\n },\n\n &FileHeaderData::Key{name: _, value: _} => {\n error!(\"Something went terribly wrong when trying to emit YAML\");\n exit(1);\n },\n\n &FileHeaderData::Map{ref keys} => {\n let mut map : BTreeMap<Yaml, Yaml> = BTreeMap::new();\n\n let failed = keys.into_iter().map(|key| {\n match key {\n &FileHeaderData::Key{ref name, ref value} => {\n let k = Yaml::String(name.clone());\n let v = visit_header(value.deref());\n\n map.insert(k, v).is_none()\n },\n\n _ => {\n error!(\"Something went terribly wrong when trying to emit YAML\");\n exit(1);\n }\n }\n })\n .fold(0, |acc, succeeded : bool| {\n if !succeeded { acc + 1 } else { acc }\n });\n\n debug!(\"Failed to insert {} keys\", failed);\n Yaml::Hash(map)\n },\n }\n}\n<commit_msg>Add simple deserialization test<commit_after>use std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse yaml_rust::Yaml;\n\nuse storage::parser::{FileHeaderParser, ParserError};\nuse storage::file::header::spec::FileHeaderSpec;\nuse storage::file::header::data::FileHeaderData;\n\npub struct YamlHeaderParser {\n spec: Option<FileHeaderSpec>,\n}\n\nimpl YamlHeaderParser {\n\n pub fn new(spec: Option<FileHeaderSpec>) -> YamlHeaderParser {\n YamlHeaderParser {\n spec: spec\n }\n }\n\n}\n\nimpl Display for YamlHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"YamlHeaderParser\"));\n Ok(())\n }\n\n}\n\nimpl Debug for YamlHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"YamlHeaderParser, Spec: {:?}\", self.spec));\n Ok(())\n }\n\n}\n\nimpl FileHeaderParser for YamlHeaderParser {\n\n fn read(&self, string: Option<String>) -> Result<FileHeaderData, ParserError> {\n use yaml_rust::YamlLoader;\n if string.is_some() {\n let s = string.unwrap();\n YamlLoader::load_from_str(&s[..])\n .map(|mut vec_yaml| {\n vec_yaml.pop().map(|f| {\n visit_yaml(f)\n }).unwrap()\n })\n .map_err(|e| {\n debug!(\"YAML parser error: {:?}\", e);\n ParserError::short(&s[..], s.clone(), 0)\n })\n } else {\n Ok(FileHeaderData::Null)\n }\n\n }\n\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {\n use yaml_rust::YamlEmitter;\n\n let mut buffer = String::new();\n let result = {\n let mut emitter = YamlEmitter::new(&mut buffer);\n emitter.dump(&visit_header(data))\n };\n result\n .map_err(|e| {\n error!(\"Error emitting YAML.\");\n debug!(\"YAML parser error: {:?}\", e);\n ParserError::short(&buffer[..], buffer.clone(), 0)\n })\n .map(|_| buffer)\n }\n\n}\n\nfn visit_yaml(v: Yaml) -> FileHeaderData {\n use std::process::exit;\n\n match v {\n Yaml::Real(_) => FileHeaderData::Float(v.as_f64().unwrap()),\n Yaml::Integer(i) => {\n if i > 0 {\n debug!(\"Castring {} : i64 -> u64\", i);\n FileHeaderData::UInteger(i as u64)\n } else {\n FileHeaderData::Integer(i)\n }\n },\n Yaml::String(s) => FileHeaderData::Text(s),\n Yaml::Boolean(b) => FileHeaderData::Bool(b),\n\n Yaml::Array(vec) => {\n FileHeaderData::Array {\n values: Box::new(vec.clone().into_iter().map(|i| visit_yaml(i)).collect())\n }\n },\n\n Yaml::Hash(btree) => {\n let btree = btree.clone();\n FileHeaderData::Map{\n keys: btree.into_iter().map(|(k, v)|\n FileHeaderData::Key {\n name: String::from(k.as_str().unwrap()),\n value: Box::new(visit_yaml(v)),\n }\n ).collect()\n }\n },\n\n Yaml::Alias(_) => {\n warn!(\"YAML::ALIAS is not yet fully supported by rust-yaml\");\n FileHeaderData::Null\n },\n\n Yaml::Null => FileHeaderData::Null,\n\n Yaml::BadValue => {\n warn!(\"YAML parsing error\");\n exit(1);\n },\n }\n}\n\nfn visit_header(h: &FileHeaderData) -> Yaml {\n use std::ops::Deref;\n use std::collections::BTreeMap;\n use std::process::exit;\n\n match h {\n &FileHeaderData::Null => Yaml::Null,\n &FileHeaderData::Float(f) => Yaml::Real(format!(\"{}\", f)),\n &FileHeaderData::Integer(i) => Yaml::Integer(i),\n &FileHeaderData::UInteger(u) => {\n debug!(\"Might be losing data now: u64 -> i64 cast\");\n Yaml::Integer(u as i64)\n },\n &FileHeaderData::Text(ref s) => Yaml::String(s.clone()),\n &FileHeaderData::Bool(b) => Yaml::Boolean(b),\n\n &FileHeaderData::Array{values: ref a} => {\n Yaml::Array(a.deref().into_iter().map(|e| visit_header(e)).collect())\n },\n\n &FileHeaderData::Key{name: _, value: _} => {\n error!(\"Something went terribly wrong when trying to emit YAML\");\n exit(1);\n },\n\n &FileHeaderData::Map{ref keys} => {\n let mut map : BTreeMap<Yaml, Yaml> = BTreeMap::new();\n\n let failed = keys.into_iter().map(|key| {\n match key {\n &FileHeaderData::Key{ref name, ref value} => {\n let k = Yaml::String(name.clone());\n let v = visit_header(value.deref());\n\n map.insert(k, v).is_none()\n },\n\n _ => {\n error!(\"Something went terribly wrong when trying to emit YAML\");\n exit(1);\n }\n }\n })\n .fold(0, |acc, succeeded : bool| {\n if !succeeded { acc + 1 } else { acc }\n });\n\n debug!(\"Failed to insert {} keys\", failed);\n Yaml::Hash(map)\n },\n }\n}\n\n#[cfg(test)]\nmod test {\n use std::ops::Deref;\n\n use super::YamlHeaderParser;\n use storage::parser::FileHeaderParser;\n use storage::file::header::data::FileHeaderData as FHD;\n use storage::file::header::spec::FileHeaderSpec as FHS;\n\n #[test]\n fn test_deserialization() {\n let text = String::from(\"a: 1\\nb: 2\");\n let spec = FHS::Array { allowed_types: vec![\n FHS::Map {\n keys: vec![\n FHS::Key {\n name: String::from(\"a\"),\n value_type: Box::new(FHS::UInteger)\n },\n ]\n }\n ]\n };\n\n let parser = YamlHeaderParser::new(Some(spec));\n let parsed = parser.read(Some(text));\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n debug!(\"Parsed: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{ref keys}) => {\n keys.into_iter().map(|k| {\n match k {\n &FHD::Key{ref name, ref value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::UInteger(u) => assert!(u == 1 || u == 2),\n &FHD::Integer(_) => assert!(false, \"Found Integer, expected UInteger\"),\n _ => assert!(false, \"Integers are not here\"),\n };\n },\n _ => assert!(false, \"Key is not a Key\"),\n };\n })\n .all(|x| x == ());\n },\n _ => assert!(false, \"Map is not a Map\"),\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0 rls\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [CrateVersion] = &[\n CrateVersion(\"rustc\", \"0.0.0\"),\n CrateVersion(\"rustc_trans\", \"0.0.0\"),\n];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n\/\/ Crate(\"ar\"),\n\/\/ Crate(\"arena\"),\n\/\/ Crate(\"backtrace\"),\n\/\/ Crate(\"backtrace-sys\"),\n\/\/ Crate(\"bitflags\"),\n\/\/ Crate(\"build_helper\"),\n\/\/ Crate(\"byteorder\"),\n\/\/ Crate(\"cc\"),\n\/\/ Crate(\"cfg-if\"),\n\/\/ Crate(\"cmake\"),\n\/\/ Crate(\"filetime\"),\n\/\/ Crate(\"flate2\"),\n\/\/ Crate(\"fmt_macros\"),\n\/\/ Crate(\"fuchsia-zircon\"),\n\/\/ Crate(\"fuchsia-zircon-sys\"),\n\/\/ Crate(\"graphviz\"),\n\/\/ Crate(\"jobserver\"),\n\/\/ Crate(\"kernel32-sys\"),\n\/\/ Crate(\"lazy_static\"),\n\/\/ Crate(\"libc\"),\n\/\/ Crate(\"log\"),\n\/\/ Crate(\"log_settings\"),\n\/\/ Crate(\"miniz-sys\"),\n\/\/ Crate(\"num_cpus\"),\n\/\/ Crate(\"owning_ref\"),\n\/\/ Crate(\"parking_lot\"),\n\/\/ Crate(\"parking_lot_core\"),\n\/\/ Crate(\"rand\"),\n\/\/ Crate(\"redox_syscall\"),\n\/\/ Crate(\"rustc\"),\n\/\/ Crate(\"rustc-demangle\"),\n\/\/ Crate(\"rustc_allocator\"),\n\/\/ Crate(\"rustc_apfloat\"),\n\/\/ Crate(\"rustc_back\"),\n\/\/ Crate(\"rustc_binaryen\"),\n\/\/ Crate(\"rustc_const_eval\"),\n\/\/ Crate(\"rustc_const_math\"),\n\/\/ Crate(\"rustc_cratesio_shim\"),\n\/\/ Crate(\"rustc_data_structures\"),\n\/\/ Crate(\"rustc_errors\"),\n\/\/ Crate(\"rustc_incremental\"),\n\/\/ Crate(\"rustc_llvm\"),\n\/\/ Crate(\"rustc_mir\"),\n\/\/ Crate(\"rustc_platform_intrinsics\"),\n\/\/ Crate(\"rustc_trans\"),\n\/\/ Crate(\"rustc_trans_utils\"),\n\/\/ Crate(\"serialize\"),\n\/\/ Crate(\"smallvec\"),\n\/\/ Crate(\"stable_deref_trait\"),\n\/\/ Crate(\"syntax\"),\n\/\/ Crate(\"syntax_pos\"),\n\/\/ Crate(\"tempdir\"),\n\/\/ Crate(\"unicode-width\"),\n\/\/ Crate(\"winapi\"),\n\/\/ Crate(\"winapi-build\"),\n\/\/ Crate(\"winapi-i686-pc-windows-gnu\"),\n\/\/ Crate(\"winapi-x86_64-pc-windows-gnu\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct CrateVersion<'a>(&'a str, &'a str); \/\/ (name, version)\n\nimpl<'a> Crate<'a> {\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\nimpl<'a> CrateVersion<'a> {\n pub fn from_str(s: &'a str) -> Self {\n let mut parts = s.split(\" \");\n let name = parts.next().unwrap();\n let version = parts.next().unwrap();\n\n CrateVersion(name, version)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} {}\", self.0, self.1)\n }\n}\n\nimpl<'a> From<CrateVersion<'a>> for Crate<'a> {\n fn from(cv: CrateVersion<'a>) -> Crate<'a> {\n Crate(cv.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<CrateVersion<'a>>,\n krate: CrateVersion<'a>,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate.into()) {\n unapproved.insert(krate.into());\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let krate = CrateVersion::from_str(dep);\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate);\n\n unapproved.append(&mut bad);\n }\n\n unapproved\n}\n<commit_msg>Don't check in-tree deps<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0 rls\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [CrateVersion] = &[\n CrateVersion(\"rustc\", \"0.0.0\"),\n CrateVersion(\"rustc_trans\", \"0.0.0\"),\n];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n\/\/ Crate(\"backtrace\"),\n\/\/ Crate(\"backtrace-sys\"),\n\/\/ Crate(\"bitflags\"),\n\/\/ Crate(\"byteorder\"),\n\/\/ Crate(\"cc\"),\n\/\/ Crate(\"cfg-if\"),\n\/\/ Crate(\"flate2\"),\n\/\/ Crate(\"fuchsia-zircon\"),\n\/\/ Crate(\"fuchsia-zircon-sys\"),\n\/\/ Crate(\"jobserver\"),\n\/\/ Crate(\"lazy_static\"),\n\/\/ Crate(\"libc\"),\n\/\/ Crate(\"log\"),\n\/\/ Crate(\"miniz-sys\"),\n\/\/ Crate(\"num_cpus\"),\n\/\/ Crate(\"rand\"),\n\/\/ Crate(\"rustc\"),\n\/\/ Crate(\"rustc-demangle\"),\n\/\/ Crate(\"rustc_trans\"),\n\/\/ Crate(\"tempdir\"),\n\/\/ Crate(\"winapi\"),\n\/\/ Crate(\"winapi-i686-pc-windows-gnu\"),\n\/\/ Crate(\"winapi-x86_64-pc-windows-gnu\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct CrateVersion<'a>(&'a str, &'a str); \/\/ (name, version)\n\nimpl<'a> Crate<'a> {\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\nimpl<'a> CrateVersion<'a> {\n \/\/\/ Returns the struct and whether or not the dep is in-tree\n pub fn from_str(s: &'a str) -> (Self, bool) {\n let mut parts = s.split(\" \");\n let name = parts.next().unwrap();\n let version = parts.next().unwrap();\n let path = parts.next().unwrap();\n\n let is_path_dep = path.starts_with(\"(path+\");\n\n (CrateVersion(name, version), is_path_dep)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} {}\", self.0, self.1)\n }\n}\n\nimpl<'a> From<CrateVersion<'a>> for Crate<'a> {\n fn from(cv: CrateVersion<'a>) -> Crate<'a> {\n Crate(cv.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<CrateVersion<'a>>,\n krate: CrateVersion<'a>,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate.into()) {\n unapproved.insert(krate.into());\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let (krate, is_path_dep) = CrateVersion::from_str(dep);\n\n \/\/ We don't check in-tree deps\n if !is_path_dep {\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate);\n unapproved.append(&mut bad);\n }\n }\n\n unapproved\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>ZMsgExtended::send_multi() is no longer static<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix(Error Status): fixes bug where --help and --version return non-zero exit code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Kill dead code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Clippy Fix: let_underscore_lock<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test items from serde_codegen test suite<commit_after>extern crate item;\n\n#[macro_use]\nextern crate quote;\n\n\/\/\/ These are all of the items from serde_codegen's test suite.\n\/\/\/ Obnoxious whitespace has been added in an attempt to fool the parser.\n#[test]\nfn test_all() {\n for s in ITEMS {\n let ast = item::parse(s);\n let tokens = quote!(#ast).to_string();\n assert_eq!(ast, item::parse(&tokens));\n }\n\n static ITEMS: &'static [&'static str] = &[\n r#\"\n # [ derive ( Serialize ) ]\n #[derive(Deserialize)]\n struct DefaultStruct <A , B, C, D, E> where C : MyDefault , E: MyDefault {\n a1 : A,\n #[serde(default)]\n a2: B,\n #[serde(default = \"MyDefault::my_default\")]\n a3: C,\n #[serde(skip_deserializing)]\n a4: D,\n #[serde(skip_deserializing, default = \"MyDefault::my_default\")]\n a5: E,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n enum DefaultEnum<A, B, C, D, E> where C: MyDefault, E: MyDefault {\n Struct {\n a1: A,\n #[serde(default)]\n a2: B,\n #[serde(default = \"MyDefault::my_default\")]\n a3: C,\n #[serde(skip_deserializing)]\n a4: D,\n #[serde(skip_deserializing, default = \"MyDefault::my_default\")]\n a5: E,\n },\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n struct NoStdDefault(i8);\n \"#,\n r#\"\n #[derive(Deserialize)]\n struct ContainsNoStdDefault <A : MyDefault> {\n #[serde(default = \"MyDefault::my_default\")]\n a: A,\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n struct ContainsNotDeserialize<A, B, C: DeserializeWith, E: MyDefault> {\n #[serde(skip_deserializing)]\n a: A,\n #[serde(skip_deserializing, default)]\n b: B,\n #[serde(deserialize_with = \"DeserializeWith::deserialize_with\", default)]\n c: C,\n #[serde(skip_deserializing, default = \"MyDefault::my_default\")]\n e: E,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[serde(deny_unknown_fields)]\n #[derive(Deserialize)]\n struct DenyUnknown {\n a1: i32,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[serde(rename = \"Superhero\")]\n #[derive(Deserialize)]\n struct RenameStruct {\n a1: i32,\n #[serde(rename = \"a3\")]\n a2: i32,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[serde(rename(serialize = \"SuperheroSer\", deserialize = \"SuperheroDe\"))]\n #[derive(Deserialize)]\n struct RenameStructSerializeDeserialize {\n a1: i32,\n #[serde(rename(serialize = \"a4\", deserialize = \"a5\"))]\n a2: i32,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[serde(rename = \"Superhero\")]\n #[derive(Deserialize)]\n enum RenameEnum {\n\n #[serde(rename = \"bruce_wayne\")]\n Batman ,\n\n #[serde(rename = \"clark_kent\")]\n Superman ( i8 ) ,\n\n #[serde(rename = \"diana_prince\")]\n WonderWoman ( i8 , i8 ) ,\n\n #[serde(rename = \"barry_allan\")]\n Flash {\n #[serde(rename = \"b\")]\n a : i32,\n } ,\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n #[serde ( rename ( serialize = \"SuperheroSer\" , deserialize = \"SuperheroDe\" ) ) ]\n #[derive(Serialize)]\n enum RenameEnumSerializeDeserialize<A> {\n\n #[serde(rename(serialize = \"dick_grayson\", deserialize = \"jason_todd\"))]\n Robin {\n a: i8,\n #[serde(rename(serialize = \"c\"))]\n #[serde(rename(deserialize = \"d\"))]\n b: A,\n },\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n struct SkipSerializingStruct< 'a , B, C> where C : ShouldSkip {\n a: & 'a i8,\n #[serde(skip_serializing)]\n b: B,\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n c: C,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n enum SkipSerializingEnum<'a, B, C> where C: ShouldSkip {\n Struct {\n a: &'a i8,\n #[serde(skip_serializing)]\n _b: B,\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n c: C,\n },\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n struct ContainsNotSerialize<'a, B, C, D> where B: 'a , D: SerializeWith {\n a: &'a Option<i8>,\n #[serde(skip_serializing)]\n b: &'a B,\n #[serde(skip_serializing)]\n c: Option<C>,\n #[serde(serialize_with = \"SerializeWith::serialize_with\")]\n d: D,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n struct SerializeWithStruct<'a, B> where B: SerializeWith {\n a: &'a i8,\n #[serde(serialize_with = \"SerializeWith::serialize_with\")]\n b: B,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n enum SerializeWithEnum<'a, B> where B: SerializeWith {\n Struct {\n a: &'a i8,\n #[serde(serialize_with = \"SerializeWith::serialize_with\")]\n b: B,\n },\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n struct DeserializeWithStruct<B> where B: DeserializeWith {\n a: i8,\n #[serde(deserialize_with = \"DeserializeWith::deserialize_with\")]\n b: B,\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n enum DeserializeWithEnum<B> where B: DeserializeWith {\n Struct {\n a: i8,\n #[serde(deserialize_with = \"DeserializeWith::deserialize_with\")]\n b: B,\n },\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n enum InvalidLengthEnum {\n A(i32, i32, i32),\n B(\n #[serde(skip_deserializing)]\n i32, i32, i32),\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n struct TupleStruct(i32, i32, i32);\n \"#,\n r#\"\n #[derive(Deserialize)]\n struct Struct {\n a: i32,\n b: i32,\n #[serde(skip_deserializing)]\n c: i32,\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n enum Enum {\n Unit,\n Simple(i32),\n Seq(i32, i32, i32),\n Map {\n a: i32,\n b: i32,\n c: i32,\n },\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n struct IgnoreBase {\n a: i32,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct With<T> {\n t: T,\n #[serde(serialize_with = \"ser_x\", deserialize_with = \"de_x\")]\n x: X,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct WithRef<'a, T: 'a> {\n #[serde(skip_deserializing)]\n t: Option<&'a T>,\n #[serde(serialize_with = \"ser_x\", deserialize_with = \"de_x\")]\n x: X,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct PhantomX {\n x: PhantomData<X>,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct PhantomT<T> {\n t: PhantomData<T>,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct NoBounds<T> {\n t: T,\n option: Option<T>,\n boxed: Box<T>,\n option_boxed: Option<Box<T>>,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n enum EnumWith<T> {\n Unit,\n Newtype(\n #[serde(serialize_with = \"ser_x\", deserialize_with = \"de_x\")]\n X),\n Tuple(T,\n #[serde(serialize_with = \"ser_x\", deserialize_with = \"de_x\")]\n X),\n Struct {\n t: T,\n #[serde(serialize_with = \"ser_x\", deserialize_with = \"de_x\")]\n x: X,\n },\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n struct MultipleRef<'a, 'b, 'c, T> where T: 'c, 'c: 'b, 'b: 'a {\n t: T,\n rrrt: &'a &'b &'c T,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct Newtype(\n #[serde(serialize_with = \"ser_x\", deserialize_with = \"de_x\")]\n X);\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct Tuple<T>(T,\n #[serde(serialize_with = \"ser_x\", deserialize_with = \"de_x\")]\n X);\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n enum TreeNode<D> {\n Split {\n left: Box<TreeNode<D>>,\n right: Box<TreeNode<D>>,\n },\n Leaf {\n data: D,\n },\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct ListNode<D> {\n data: D,\n next: Box<ListNode<D>>,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct RecursiveA {\n b: Box<RecursiveB>,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n enum RecursiveB { A(RecursiveA), }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct RecursiveGenericA<T> {\n t: T,\n b: Box<RecursiveGenericB<T>>,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n enum RecursiveGenericB<T> { T(T), A(RecursiveGenericA<T>), }\n \"#,\n r#\"\n #[derive(Serialize)]\n struct OptionStatic<'a> {\n a: Option<&'a str>,\n b: Option<&'static str>,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[serde(bound = \"D: SerializeWith + DeserializeWith\")]\n #[derive(Deserialize)]\n struct WithTraits1<D, E> {\n #[serde(serialize_with = \"SerializeWith::serialize_with\",\n deserialize_with = \"DeserializeWith::deserialize_with\")]\n d: D,\n #[serde(serialize_with = \"SerializeWith::serialize_with\",\n deserialize_with = \"DeserializeWith::deserialize_with\",\n bound = \"E: SerializeWith + DeserializeWith\")]\n e: E,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[serde(bound(serialize = \"D: SerializeWith\",\n deserialize = \"D: DeserializeWith\"))]\n #[derive(Deserialize)]\n struct WithTraits2<D, E> {\n #[serde(serialize_with = \"SerializeWith::serialize_with\",\n deserialize_with = \"DeserializeWith::deserialize_with\")]\n d: D,\n #[serde(serialize_with = \"SerializeWith::serialize_with\",\n bound(serialize = \"E: SerializeWith\"))]\n #[serde(deserialize_with = \"DeserializeWith::deserialize_with\",\n bound(deserialize = \"E: DeserializeWith\"))]\n e: E,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct CowStr<'a>(Cow<'a, str>);\n \"#,\n r#\"\n #[derive(Serialize)]\n #[serde(bound(deserialize = \"T::Owned: Deserialize\"))]\n #[derive(Deserialize)]\n struct CowT < 'a , T : ? Sized + 'a + ToOwned > ( Cow < 'a , T > ) ;\n \"#,\n r#\"\n #[derive(Serialize)]\n struct SerNamedTuple<'a, 'b, A: 'a, B: 'b, C>(&'a A, &'b mut B, C);\n \"#,\n r#\"\n #[derive(Deserialize)]\n struct DeNamedTuple<A, B, C>(A, B, C);\n \"#,\n r#\"\n #[derive(Serialize)]\n struct SerNamedMap<'a, 'b, A: 'a, B: 'b, C> {\n a: &'a A,\n b: &'b mut B,\n c: C,\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n struct DeNamedMap<A, B, C> {\n a: A,\n b: < Vec < T > as a :: b :: Trait > :: AssociatedItem,\n c: < Vec < T > > :: AssociatedItem,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n enum SerEnum<'a, B: 'a, C: 'a, D> where for < 'a > D: 'a {\n Unit,\n Seq(i8, B, &'a C, &'a mut D),\n Map {\n a: i8,\n b: B,\n c: &'a C,\n d: &'a mut D,\n },\n _Unit2,\n _Seq2(i8, B, &'a C, &'a mut D),\n _Map2 {\n a: i8,\n b: B,\n c: &'a C,\n d: &'a mut D,\n },\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n enum DeEnum<B, C, D> {\n Unit,\n Seq(i8, B, C, D),\n Map {\n a: i8,\n b: B,\n c: C,\n d: D,\n },\n _Unit2,\n _Seq2(i8, B, C, D),\n _Map2 {\n a: i8,\n b: B,\n c: C,\n d: D,\n },\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n enum Lifetimes<'a> {\n LifetimeSeq(&'a i32),\n NoLifetimeSeq(i32),\n LifetimeMap {\n a: &'a i32,\n },\n NoLifetimeMap {\n a: i32,\n },\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n pub struct GenericStruct<T> {\n x: T,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n pub struct GenericNewTypeStruct<T>(T);\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n pub struct GenericTupleStruct<T, U>(T, U);\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n pub enum GenericEnum<T, U: for < 'a > F<'a>> {\n Unit,\n NewType(T),\n Seq(T, U),\n Map {\n x: T,\n y: U,\n },\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct DefaultTyParam<T: AssociatedType<X = i32> = i32> {\n phantom: std :: marker :: PhantomData<T>,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n struct UnitStruct;\n \"#,\n r#\"\n #[derive(Serialize)]\n struct TupleStruct(i32, i32, i32);\n \"#,\n r#\"\n #[derive(Serialize)]\n struct Struct {\n a: i32,\n b: i32,\n c: i32,\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n enum Enum {\n Unit,\n One(i32),\n Seq(i32, i32),\n Map {\n a: i32,\n b: i32,\n },\n }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct Bounds<T: Serialize + Deserialize> {\n t: T,\n option: Option<T>,\n boxed: Box<T>,\n option_boxed: Option<Box<T>>,\n }\n \"#,\n r#\"\n #[derive(Deserialize)]\n #[rustc_copy_clone_marker]\n struct UnitStruct;\n \"#,\n r#\"\n #[derive(Serialize)]\n #[allow(dead_code)]\n #[deny(unused_variables)]\n enum Void { }\n \"#,\n r#\"\n #[derive(Serialize)]\n #[derive(Deserialize)]\n struct NamedUnit;\n \"#,\n ];\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>unbreak collections::vec::IntoIter::drop<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>global tick will be stapled to game epoch<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Nicer formatting for DataProcessing instruction<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>but and -> and<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix the description of --width and --height command-line options<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Temporal quantification.\n\n#![stable(feature = \"time\", since = \"1.3.0\")]\n\nuse error::Error;\nuse fmt;\nuse ops::{Add, Sub};\nuse sys::time;\n\n#[stable(feature = \"time\", since = \"1.3.0\")]\npub use self::duration::Duration;\n\nmod duration;\n\n\/\/\/ A measurement of a monotonically increasing clock.\n\/\/\/\n\/\/\/ Instants are always guaranteed to be greater than any previously measured\n\/\/\/ instant when created, and are often useful for tasks such as measuring\n\/\/\/ benchmarks or timing how long an operation takes.\n\/\/\/\n\/\/\/ Note, however, that instants are not guaranteed to be **steady**. In other\n\/\/\/ words, each tick of the underlying clock may not be the same length (e.g.\n\/\/\/ some seconds may be longer than others). An instant may jump forwards or\n\/\/\/ experience time dilation (slow down or speed up), but it will never go\n\/\/\/ backwards.\n\/\/\/\n\/\/\/ Instants are opaque types that can only be compared to one another. There is\n\/\/\/ no method to get \"the number of seconds\" from an instant. Instead, it only\n\/\/\/ allows measuring the duration between two instants (or comparing two\n\/\/\/ instants).\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct Instant(time::Instant);\n\n\/\/\/ A measurement of the system clock appropriate for timestamps such as those\n\/\/\/ on files on the filesystem.\n\/\/\/\n\/\/\/ Distinct from the `Instant` type, this time measurement **is not\n\/\/\/ monotonic**. This means that you can save a file to the file system, then\n\/\/\/ save another file to the file system, **and the second file has a\n\/\/\/ `SystemTime` measurement earlier than the second**. In other words, an\n\/\/\/ operation that happens after another operation in real time may have an\n\/\/\/ earlier `SystemTime`!\n\/\/\/\n\/\/\/ Consequently, comparing two `SystemTime` instances to learn about the\n\/\/\/ duration between them returns a `Result` instead of an infallible `Duration`\n\/\/\/ to indicate that this sort of time drift may happen and needs to be handled.\n\/\/\/\n\/\/\/ Although a `SystemTime` cannot be directly inspected, the `UNIX_EPOCH`\n\/\/\/ constant is provided in this module as an anchor in time to learn\n\/\/\/ information about a `SystemTime`. By calculating the duration from this\n\/\/\/ fixed point in time, a `SystemTime` can be converted to a human-readable time,\n\/\/\/ or perhaps some other string representation.\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTime(time::SystemTime);\n\n\/\/\/ An error returned from the `duration_from_earlier` method on `SystemTime`,\n\/\/\/ used to learn about why how far in the opposite direction a timestamp lies.\n#[derive(Clone, Debug)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTimeError(Duration);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Instant {\n \/\/\/ Returns an instant corresponding to \"now\".\n pub fn now() -> Instant {\n Instant(time::Instant::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from another instant to this one.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function will panic if `earlier` is later than `self`, which should\n \/\/\/ only be possible if `earlier` was created after `self`. Because\n \/\/\/ `Instant` is monotonic, the only time that this should happen should be\n \/\/\/ a bug.\n pub fn duration_from_earlier(&self, earlier: Instant) -> Duration {\n self.0.sub_instant(&earlier.0)\n }\n\n \/\/\/ Returns the amount of time elapsed since this instant was created.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function may panic if the current time is earlier than this\n \/\/\/ instant, which is something that can happen if an `Instant` is\n \/\/\/ produced synthetically.\n pub fn elapsed(&self) -> Duration {\n Instant::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for Instant {\n type Output = Instant;\n\n fn add(self, other: Duration) -> Instant {\n Instant(self.0.add_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for Instant {\n type Output = Instant;\n\n fn sub(self, other: Duration) -> Instant {\n Instant(self.0.sub_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for Instant {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTime {\n \/\/\/ Returns the system time corresponding to \"now\".\n pub fn now() -> SystemTime {\n SystemTime(time::SystemTime::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from an earlier point in time.\n \/\/\/\n \/\/\/ This function may fail because measurements taken earlier are not\n \/\/\/ guaranteed to always be before later measurements (due to anomalies such\n \/\/\/ as the system clock being adjusted either forwards or backwards).\n \/\/\/\n \/\/\/ If successful, `Ok(Duration)` is returned where the duration represents\n \/\/\/ the amount of time elapsed from the specified measurement to this one.\n \/\/\/\n \/\/\/ Returns an `Err` if `earlier` is later than `self`, and the error\n \/\/\/ contains how far from `self` the time is.\n pub fn duration_from_earlier(&self, earlier: SystemTime)\n -> Result<Duration, SystemTimeError> {\n self.0.sub_time(&earlier.0).map_err(SystemTimeError)\n }\n\n \/\/\/ Returns the amount of time elapsed since this system time was created.\n \/\/\/\n \/\/\/ This function may fail as the underlying system clock is susceptible to\n \/\/\/ drift and updates (e.g. the system clock could go backwards), so this\n \/\/\/ function may not always succeed. If successful, `Ok(duration)` is\n \/\/\/ returned where the duration represents the amount of time elapsed from\n \/\/\/ this time measurement to the current time.\n \/\/\/\n \/\/\/ Returns an `Err` if `self` is later than the current system time, and\n \/\/\/ the error contains how far from the current system time `self` is.\n pub fn elapsed(&self) -> Result<Duration, SystemTimeError> {\n SystemTime::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn add(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.add_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn sub(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.sub_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for SystemTime {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n\/\/\/ An anchor in time which can be used to create new `SystemTime` instances or\n\/\/\/ learn about where in time a `SystemTime` lies.\n\/\/\/\n\/\/\/ This constant is defined to be \"1970-01-01 00:00:00 UTC\" on all systems with\n\/\/\/ respect to the system clock. Using `duration_from_earlier` on an existing\n\/\/\/ `SystemTime` instance can tell how far away from this point in time a\n\/\/\/ measurement lies, and using `UNIX_EPOCH + duration` can be used to create a\n\/\/\/ `SystemTime` instance to represent another fixed point in time.\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub const UNIX_EPOCH: SystemTime = SystemTime(time::UNIX_EPOCH);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTimeError {\n \/\/\/ Returns the positive duration which represents how far forward the\n \/\/\/ second system time was from the first.\n \/\/\/\n \/\/\/ A `SystemTimeError` is returned from the `duration_from_earlier`\n \/\/\/ operation whenever the second system time represents a point later\n \/\/\/ in time than the `self` of the method call.\n pub fn duration(&self) -> Duration {\n self.0\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Error for SystemTimeError {\n fn description(&self) -> &str { \"other time was not earlier than self\" }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Display for SystemTimeError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"second time provided was later than self\")\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Instant, SystemTime, Duration, UNIX_EPOCH};\n\n macro_rules! assert_almost_eq {\n ($a:expr, $b:expr) => ({\n let (a, b) = ($a, $b);\n if a != b {\n let (a, b) = if a > b {(a, b)} else {(b, a)};\n assert!(a - Duration::new(0, 100) <= b);\n }\n })\n }\n\n #[test]\n fn instant_monotonic() {\n let a = Instant::now();\n let b = Instant::now();\n assert!(b >= a);\n }\n\n #[test]\n fn instant_elapsed() {\n let a = Instant::now();\n a.elapsed();\n }\n\n #[test]\n fn instant_math() {\n let a = Instant::now();\n let b = Instant::now();\n let dur = b.duration_from_earlier(a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a - second + second, a);\n }\n\n #[test]\n #[should_panic]\n fn instant_duration_panic() {\n let a = Instant::now();\n (a - Duration::new(1, 0)).duration_from_earlier(a);\n }\n\n #[test]\n fn system_time_math() {\n let a = SystemTime::now();\n let b = SystemTime::now();\n match b.duration_from_earlier(a) {\n Ok(dur) if dur == Duration::new(0, 0) => {\n assert_almost_eq!(a, b);\n }\n Ok(dur) => {\n assert!(b > a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n }\n Err(dur) => {\n let dur = dur.duration();\n assert!(a > b);\n assert_almost_eq!(b + dur, a);\n assert_almost_eq!(b - dur, a);\n }\n }\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a.duration_from_earlier(a - second).unwrap(), second);\n assert_almost_eq!(a.duration_from_earlier(a + second).unwrap_err()\n .duration(), second);\n\n assert_almost_eq!(a - second + second, a);\n\n let eighty_years = second * 60 * 60 * 24 * 365 * 80;\n assert_almost_eq!(a - eighty_years + eighty_years, a);\n assert_almost_eq!(a - (eighty_years * 10) + (eighty_years * 10), a);\n\n let one_second_from_epoch = UNIX_EPOCH + Duration::new(1, 0);\n let one_second_from_epoch2 = UNIX_EPOCH + Duration::new(0, 500_000_000)\n + Duration::new(0, 500_000_000);\n assert_eq!(one_second_from_epoch, one_second_from_epoch2);\n }\n\n #[test]\n fn system_time_elapsed() {\n let a = SystemTime::now();\n drop(a.elapsed());\n }\n\n #[test]\n fn since_epoch() {\n let ts = SystemTime::now();\n let a = ts.duration_from_earlier(UNIX_EPOCH).unwrap();\n let b = ts.duration_from_earlier(UNIX_EPOCH - Duration::new(1, 0)).unwrap();\n assert!(b > a);\n assert_eq!(b - a, Duration::new(1, 0));\n\n \/\/ let's assume that we're all running computers later than 2000\n let thirty_years = Duration::new(1, 0) * 60 * 60 * 24 * 365 * 30;\n assert!(a > thirty_years);\n\n \/\/ let's assume that we're all running computers earlier than 2090.\n \/\/ Should give us ~70 years to fix this!\n let hundred_twenty_years = thirty_years * 4;\n assert!(a < hundred_twenty_years);\n }\n}\n<commit_msg>Instant\/SystemTime doc: add meaning to first line<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Temporal quantification.\n\n#![stable(feature = \"time\", since = \"1.3.0\")]\n\nuse error::Error;\nuse fmt;\nuse ops::{Add, Sub};\nuse sys::time;\n\n#[stable(feature = \"time\", since = \"1.3.0\")]\npub use self::duration::Duration;\n\nmod duration;\n\n\/\/\/ A measurement of a monotonically increasing clock,\n\/\/\/ only meaningful within one process.\n\/\/\/\n\/\/\/ Instants are always guaranteed to be greater than any previously measured\n\/\/\/ instant when created, and are often useful for tasks such as measuring\n\/\/\/ benchmarks or timing how long an operation takes.\n\/\/\/\n\/\/\/ Note, however, that instants are not guaranteed to be **steady**. In other\n\/\/\/ words, each tick of the underlying clock may not be the same length (e.g.\n\/\/\/ some seconds may be longer than others). An instant may jump forwards or\n\/\/\/ experience time dilation (slow down or speed up), but it will never go\n\/\/\/ backwards.\n\/\/\/\n\/\/\/ Instants are opaque types that can only be compared to one another. There is\n\/\/\/ no method to get \"the number of seconds\" from an instant. Instead, it only\n\/\/\/ allows measuring the duration between two instants (or comparing two\n\/\/\/ instants).\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct Instant(time::Instant);\n\n\/\/\/ A measurement of the system clock appropriate for timestamps meaningful\n\/\/\/ outside one process, such as those on files on the filesystem.\n\/\/\/\n\/\/\/ Distinct from the `Instant` type, this time measurement **is not\n\/\/\/ monotonic**. This means that you can save a file to the file system, then\n\/\/\/ save another file to the file system, **and the second file has a\n\/\/\/ `SystemTime` measurement earlier than the second**. In other words, an\n\/\/\/ operation that happens after another operation in real time may have an\n\/\/\/ earlier `SystemTime`!\n\/\/\/\n\/\/\/ Consequently, comparing two `SystemTime` instances to learn about the\n\/\/\/ duration between them returns a `Result` instead of an infallible `Duration`\n\/\/\/ to indicate that this sort of time drift may happen and needs to be handled.\n\/\/\/\n\/\/\/ Although a `SystemTime` cannot be directly inspected, the `UNIX_EPOCH`\n\/\/\/ constant is provided in this module as an anchor in time to learn\n\/\/\/ information about a `SystemTime`. By calculating the duration from this\n\/\/\/ fixed point in time, a `SystemTime` can be converted to a human-readable time,\n\/\/\/ or perhaps some other string representation.\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTime(time::SystemTime);\n\n\/\/\/ An error returned from the `duration_from_earlier` method on `SystemTime`,\n\/\/\/ used to learn about why how far in the opposite direction a timestamp lies.\n#[derive(Clone, Debug)]\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub struct SystemTimeError(Duration);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Instant {\n \/\/\/ Returns an instant corresponding to \"now\".\n pub fn now() -> Instant {\n Instant(time::Instant::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from another instant to this one.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function will panic if `earlier` is later than `self`, which should\n \/\/\/ only be possible if `earlier` was created after `self`. Because\n \/\/\/ `Instant` is monotonic, the only time that this should happen should be\n \/\/\/ a bug.\n pub fn duration_from_earlier(&self, earlier: Instant) -> Duration {\n self.0.sub_instant(&earlier.0)\n }\n\n \/\/\/ Returns the amount of time elapsed since this instant was created.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function may panic if the current time is earlier than this\n \/\/\/ instant, which is something that can happen if an `Instant` is\n \/\/\/ produced synthetically.\n pub fn elapsed(&self) -> Duration {\n Instant::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for Instant {\n type Output = Instant;\n\n fn add(self, other: Duration) -> Instant {\n Instant(self.0.add_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for Instant {\n type Output = Instant;\n\n fn sub(self, other: Duration) -> Instant {\n Instant(self.0.sub_duration(&other))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for Instant {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTime {\n \/\/\/ Returns the system time corresponding to \"now\".\n pub fn now() -> SystemTime {\n SystemTime(time::SystemTime::now())\n }\n\n \/\/\/ Returns the amount of time elapsed from an earlier point in time.\n \/\/\/\n \/\/\/ This function may fail because measurements taken earlier are not\n \/\/\/ guaranteed to always be before later measurements (due to anomalies such\n \/\/\/ as the system clock being adjusted either forwards or backwards).\n \/\/\/\n \/\/\/ If successful, `Ok(Duration)` is returned where the duration represents\n \/\/\/ the amount of time elapsed from the specified measurement to this one.\n \/\/\/\n \/\/\/ Returns an `Err` if `earlier` is later than `self`, and the error\n \/\/\/ contains how far from `self` the time is.\n pub fn duration_from_earlier(&self, earlier: SystemTime)\n -> Result<Duration, SystemTimeError> {\n self.0.sub_time(&earlier.0).map_err(SystemTimeError)\n }\n\n \/\/\/ Returns the amount of time elapsed since this system time was created.\n \/\/\/\n \/\/\/ This function may fail as the underlying system clock is susceptible to\n \/\/\/ drift and updates (e.g. the system clock could go backwards), so this\n \/\/\/ function may not always succeed. If successful, `Ok(duration)` is\n \/\/\/ returned where the duration represents the amount of time elapsed from\n \/\/\/ this time measurement to the current time.\n \/\/\/\n \/\/\/ Returns an `Err` if `self` is later than the current system time, and\n \/\/\/ the error contains how far from the current system time `self` is.\n pub fn elapsed(&self) -> Result<Duration, SystemTimeError> {\n SystemTime::now().duration_from_earlier(*self)\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Add<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn add(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.add_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Sub<Duration> for SystemTime {\n type Output = SystemTime;\n\n fn sub(self, dur: Duration) -> SystemTime {\n SystemTime(self.0.sub_duration(&dur))\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Debug for SystemTime {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.0.fmt(f)\n }\n}\n\n\/\/\/ An anchor in time which can be used to create new `SystemTime` instances or\n\/\/\/ learn about where in time a `SystemTime` lies.\n\/\/\/\n\/\/\/ This constant is defined to be \"1970-01-01 00:00:00 UTC\" on all systems with\n\/\/\/ respect to the system clock. Using `duration_from_earlier` on an existing\n\/\/\/ `SystemTime` instance can tell how far away from this point in time a\n\/\/\/ measurement lies, and using `UNIX_EPOCH + duration` can be used to create a\n\/\/\/ `SystemTime` instance to represent another fixed point in time.\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\npub const UNIX_EPOCH: SystemTime = SystemTime(time::UNIX_EPOCH);\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl SystemTimeError {\n \/\/\/ Returns the positive duration which represents how far forward the\n \/\/\/ second system time was from the first.\n \/\/\/\n \/\/\/ A `SystemTimeError` is returned from the `duration_from_earlier`\n \/\/\/ operation whenever the second system time represents a point later\n \/\/\/ in time than the `self` of the method call.\n pub fn duration(&self) -> Duration {\n self.0\n }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl Error for SystemTimeError {\n fn description(&self) -> &str { \"other time was not earlier than self\" }\n}\n\n#[unstable(feature = \"time2\", reason = \"recently added\", issue = \"29866\")]\nimpl fmt::Display for SystemTimeError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"second time provided was later than self\")\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Instant, SystemTime, Duration, UNIX_EPOCH};\n\n macro_rules! assert_almost_eq {\n ($a:expr, $b:expr) => ({\n let (a, b) = ($a, $b);\n if a != b {\n let (a, b) = if a > b {(a, b)} else {(b, a)};\n assert!(a - Duration::new(0, 100) <= b);\n }\n })\n }\n\n #[test]\n fn instant_monotonic() {\n let a = Instant::now();\n let b = Instant::now();\n assert!(b >= a);\n }\n\n #[test]\n fn instant_elapsed() {\n let a = Instant::now();\n a.elapsed();\n }\n\n #[test]\n fn instant_math() {\n let a = Instant::now();\n let b = Instant::now();\n let dur = b.duration_from_earlier(a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a - second + second, a);\n }\n\n #[test]\n #[should_panic]\n fn instant_duration_panic() {\n let a = Instant::now();\n (a - Duration::new(1, 0)).duration_from_earlier(a);\n }\n\n #[test]\n fn system_time_math() {\n let a = SystemTime::now();\n let b = SystemTime::now();\n match b.duration_from_earlier(a) {\n Ok(dur) if dur == Duration::new(0, 0) => {\n assert_almost_eq!(a, b);\n }\n Ok(dur) => {\n assert!(b > a);\n assert_almost_eq!(b - dur, a);\n assert_almost_eq!(a + dur, b);\n }\n Err(dur) => {\n let dur = dur.duration();\n assert!(a > b);\n assert_almost_eq!(b + dur, a);\n assert_almost_eq!(b - dur, a);\n }\n }\n\n let second = Duration::new(1, 0);\n assert_almost_eq!(a.duration_from_earlier(a - second).unwrap(), second);\n assert_almost_eq!(a.duration_from_earlier(a + second).unwrap_err()\n .duration(), second);\n\n assert_almost_eq!(a - second + second, a);\n\n let eighty_years = second * 60 * 60 * 24 * 365 * 80;\n assert_almost_eq!(a - eighty_years + eighty_years, a);\n assert_almost_eq!(a - (eighty_years * 10) + (eighty_years * 10), a);\n\n let one_second_from_epoch = UNIX_EPOCH + Duration::new(1, 0);\n let one_second_from_epoch2 = UNIX_EPOCH + Duration::new(0, 500_000_000)\n + Duration::new(0, 500_000_000);\n assert_eq!(one_second_from_epoch, one_second_from_epoch2);\n }\n\n #[test]\n fn system_time_elapsed() {\n let a = SystemTime::now();\n drop(a.elapsed());\n }\n\n #[test]\n fn since_epoch() {\n let ts = SystemTime::now();\n let a = ts.duration_from_earlier(UNIX_EPOCH).unwrap();\n let b = ts.duration_from_earlier(UNIX_EPOCH - Duration::new(1, 0)).unwrap();\n assert!(b > a);\n assert_eq!(b - a, Duration::new(1, 0));\n\n \/\/ let's assume that we're all running computers later than 2000\n let thirty_years = Duration::new(1, 0) * 60 * 60 * 24 * 365 * 30;\n assert!(a > thirty_years);\n\n \/\/ let's assume that we're all running computers earlier than 2090.\n \/\/ Should give us ~70 years to fix this!\n let hundred_twenty_years = thirty_years * 4;\n assert!(a < hundred_twenty_years);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Problem 14<commit_after>fn dupli<T: Clone>(list: &[T]) -> ~[T] {\n let mut r = ~[];\n for e in list.iter() {\n r.push(e.clone());\n r.push(e.clone());\n }\n r\n}\n\nfn main() {\n let list = ~['a', 'b', 'c', 'c', 'd'];\n println!(\"{:?}\", dupli(list));\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>File dumps and wrapped input<commit_after>use std::fs::File;\nuse std::path::Path;\nuse std::error::Error;\nuse std::io::prelude::*;\n\npub fn read_in(file_path: &str) -> String\n{\n let path = Path::new(file_path);\n let display = path.display();\n\n let mut file = match File::open(&path)\n {\n Err(why) => panic!(\"couldn't open {}: {}\", display, Error::description(&why)),\n Ok(file) => file,\n };\n\n let mut file_as_string = String::new();\n match file.read_to_string(&mut file_as_string) {\n Err(why) => panic!(\"couldn't read {}: {}\", display, Error::description(&why)),\n Ok(_) => {},\n }\n\n file_as_string\n}\n\npub fn dump(file_path: &str, data: String)\n{\n let path = Path::new(file_path);\n let display = path.display();\n\n let mut file = match File::create(&path)\n {\n Err(why) => panic!(\"couldn't create {}: {}\", display, Error::description(&why)),\n Ok(file) => file,\n };\n\n match file.write_all(data.as_bytes())\n {\n Err(why) => {panic!(\"couldn't write to {}: {}\", display, Error::description(&why))},\n Ok(_) => println!(\"successfully output to {}\", display),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Intermediate network commit.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>move semantics, chapter 2<commit_after>\/\/ move1.rs\nfn main() {\n let s1 = \"hello dolly\".to_string();\n let s2 = s1;\n println!(\"s1 {}\", s1);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Problem 20<commit_after>fn drop<T: Clone>(list: &[T], k: uint) -> ~[T] {\n let mut res = ~[];\n res.push_all(list.slice_to(k-1));\n res.push_all(list.slice_from(k));\n res\n}\n\nfn main() {\n let list = ~['a', 'b', 'c', 'd'];\n println!(\"{:?}\", drop(list, 2));\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Some change to the blackboard<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[Rust] overflow panicked on debug build<commit_after>fn main() {\n let a: u8 = 0xf0;\n let b: u8 = 0xf0;\n println!(\"{} {} {}\", a, b, a + b);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Ensure we can read proxy params from environment<commit_after>#![cfg(feature = \"reqwest-backend\")]\n\nuse std::env::{remove_var, set_var};\n\nuse env_proxy::for_url;\nuse url::Url;\n\nfn scrub_env() {\n remove_var(\"http_proxy\");\n remove_var(\"https_proxy\");\n remove_var(\"HTTPS_PROXY\");\n remove_var(\"ftp_proxy\");\n remove_var(\"FTP_PROXY\");\n remove_var(\"all_proxy\");\n remove_var(\"ALL_PROXY\");\n remove_var(\"no_proxy\");\n remove_var(\"NO_PROXY\");\n}\n\n\/\/ Tests for correctly retrieving the proxy (host, port) tuple from $https_proxy\n#[test]\nfn read_basic_proxy_params() {\n scrub_env();\n set_var(\"https_proxy\", \"http:\/\/proxy.example.com:8080\");\n let u = Url::parse(\"https:\/\/www.example.org\").ok().unwrap();\n assert_eq!(\n for_url(&u).host_port(),\n Some((\"proxy.example.com\".to_string(), 8080))\n );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>problem: unsafe scrypt params solution: do unsafe only for particular values<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>play is ready for hog debugging<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added rotation and scaling to polygons<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test that function types are actually zero-sized.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that fn item types are zero-sized.\n\nuse std::mem::{size_of, size_of_val};\n\nfn main() {\n assert_eq!(size_of_val(&main), 0);\n\n let (a, b) = (size_of::<u8>, size_of::<u16>);\n assert_eq!(size_of_val(&a), 0);\n assert_eq!(size_of_val(&b), 0);\n assert_eq!((a(), b()), (1, 2));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added main<commit_after>\nfn main() {\n println!(\"Thunderdome Graph Server starting up...\");\n\n println!(\"Thunderdome Graph Server shutting down\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Correct line drawing and transformation order.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove a few derives to reduce compile times<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>addding new site cmd<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Improve version parsing to handle v12<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor of Print Function (WIP) (#9)<commit_after><|endoftext|>"} {"text":"<commit_before>#[macro_use] extern crate clap;\n#[macro_use] extern crate log;\n#[macro_use] extern crate serde;\n#[macro_use] extern crate serde_json;\n#[macro_use] extern crate glob;\n#[macro_use] extern crate uuid;\n#[macro_use] extern crate regex;\nextern crate config;\n\nuse cli::CliConfig;\nuse configuration::Configuration;\nuse runtime::{ImagLogger, Runtime};\nuse clap::App;\nuse module::Module;\nuse module::ModuleError;\nuse module::CommandEnv;\nuse module::bm::BMModule;\nuse storage::backend::StorageBackend;\n\nmod cli;\nmod configuration;\nmod runtime;\nmod module;\nmod storage;\nmod ui;\n\nfn main() {\n let yaml = load_yaml!(\"..\/etc\/cli.yml\");\n let app = App::from_yaml(yaml);\n let config = CliConfig::new(app);\n let configuration = Configuration::new(&config);\n\n let logger = ImagLogger::init(&configuration, &config);\n debug!(\"Logger created!\");\n\n debug!(\"CliConfig : {:?}\", &config);\n debug!(\"Configuration: {:?}\", &configuration);\n\n let rt = Runtime::new(configuration, config);\n\n debug!(\"Runtime : {:?}\", &rt);\n\n if let Some(matches) = rt.config.cli_matches.subcommand_matches(\"bm\") {\n let module = BMModule::new(&rt);\n let commands = module.get_commands(&rt);\n if let Some(command) = matches.subcommand_name() {\n debug!(\"Subcommand: {}\", command);\n\n let backend = StorageBackend::new(&rt);\n\n let cmdenv = CommandEnv {\n rt: &rt,\n bk: &backend,\n matches: matches.subcommand_matches(command).unwrap(),\n };\n\n let result = match commands.get(command) {\n Some(f) => f(&module, cmdenv),\n None => Err(ModuleError::new(\"No subcommand found\")),\n };\n\n debug!(\"Result of command: {:?}\", result);\n } else {\n debug!(\"No subcommand\");\n }\n\n module.shutdown(&rt);\n } else {\n \/\/ Err(ModuleError::mk(\"No commandline call\"))\n info!(\"No commandline call...\")\n }\n\n\n info!(\"Hello, world!\");\n}\n<commit_msg>Use prettytable to implement TablePrinter<commit_after>#[macro_use] extern crate clap;\n#[macro_use] extern crate log;\n#[macro_use] extern crate serde;\n#[macro_use] extern crate serde_json;\n#[macro_use] extern crate glob;\n#[macro_use] extern crate uuid;\n#[macro_use] extern crate regex;\n#[macro_use] extern crate prettytable;\nextern crate config;\n\nuse cli::CliConfig;\nuse configuration::Configuration;\nuse runtime::{ImagLogger, Runtime};\nuse clap::App;\nuse module::Module;\nuse module::ModuleError;\nuse module::CommandEnv;\nuse module::bm::BMModule;\nuse storage::backend::StorageBackend;\n\nmod cli;\nmod configuration;\nmod runtime;\nmod module;\nmod storage;\nmod ui;\n\nfn main() {\n let yaml = load_yaml!(\"..\/etc\/cli.yml\");\n let app = App::from_yaml(yaml);\n let config = CliConfig::new(app);\n let configuration = Configuration::new(&config);\n\n let logger = ImagLogger::init(&configuration, &config);\n debug!(\"Logger created!\");\n\n debug!(\"CliConfig : {:?}\", &config);\n debug!(\"Configuration: {:?}\", &configuration);\n\n let rt = Runtime::new(configuration, config);\n\n debug!(\"Runtime : {:?}\", &rt);\n\n if let Some(matches) = rt.config.cli_matches.subcommand_matches(\"bm\") {\n let module = BMModule::new(&rt);\n let commands = module.get_commands(&rt);\n if let Some(command) = matches.subcommand_name() {\n debug!(\"Subcommand: {}\", command);\n\n let backend = StorageBackend::new(&rt);\n\n let cmdenv = CommandEnv {\n rt: &rt,\n bk: &backend,\n matches: matches.subcommand_matches(command).unwrap(),\n };\n\n let result = match commands.get(command) {\n Some(f) => f(&module, cmdenv),\n None => Err(ModuleError::new(\"No subcommand found\")),\n };\n\n debug!(\"Result of command: {:?}\", result);\n } else {\n debug!(\"No subcommand\");\n }\n\n module.shutdown(&rt);\n } else {\n \/\/ Err(ModuleError::mk(\"No commandline call\"))\n info!(\"No commandline call...\")\n }\n\n\n info!(\"Hello, world!\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reverting back to old compile! macro<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The Rust Prelude\n\/\/!\n\/\/! Rust comes with a variety of things in its standard library. However, if\n\/\/! you had to manually import every single thing that you used, it would be\n\/\/! very verbose. But importing a lot of things that a program never uses isn't\n\/\/! good either. A balance needs to be struck.\n\/\/!\n\/\/! The *prelude* is the list of things that Rust automatically imports into\n\/\/! every Rust program. It's kept as small as possible, and is focused on\n\/\/! things, particuarly traits, which are used in almost every single Rust\n\/\/! program.\n\/\/!\n\/\/! On a technical level, Rust inserts\n\/\/!\n\/\/! ```ignore\n\/\/! extern crate std;\n\/\/! ```\n\/\/!\n\/\/! into the crate root of every crate, and\n\/\/!\n\/\/! ```ignore\n\/\/! use std::prelude::v1::*;\n\/\/! ```\n\/\/!\n\/\/! into every module.\n\/\/!\n\/\/! # Other preludes\n\/\/!\n\/\/! Preludes can be seen as a pattern to make using multiple types more\n\/\/! convenient. As such, you'll find other preludes in the standard library,\n\/\/! such as [`std::io::prelude`]. Various libraries in the Rust ecosystem may\n\/\/! also define their own preludes.\n\/\/!\n\/\/! [`std::io::prelude`]: ..\/io\/prelude\/index.html\n\/\/!\n\/\/! The differece between 'the prelude' and these other preludes is that they\n\/\/! are not automatically `use`'d, and must be imported manually. This is still\n\/\/! easier than importing all of their consitutent components.\n\/\/!\n\/\/! # Prelude contents\n\/\/!\n\/\/! The current version of the prelude (version 1) lives in\n\/\/! [`std::prelude::v1`], and reexports the following.\n\/\/!\n\/\/! * [`std::marker`]::{[`Copy`], [`Send`], [`Sized`], [`Sync`]}. The marker\n\/\/! traits indicate fundamental properties of types.\n\/\/! * [`std::ops`]::{[`Drop`], [`Fn`], [`FnMut`], [`FnOnce`]}. Various\n\/\/! operations for both destuctors and overloading `()`.\n\/\/! * [`std::mem`]::[`drop`], a convenience function for explicitly dropping a\n\/\/! value.\n\/\/! * [`std::boxed`]::[`Box`], a way to allocate values on the heap.\n\/\/! * [`std::borrow`]::[`ToOwned`], The conversion trait that defines\n\/\/! [`to_owned()`], the generic method for creating an owned type from a\n\/\/! borrowed type.\n\/\/! * [`std::clone`]::[`Clone`], the ubiquitous trait that defines [`clone()`],\n\/\/! the method for producing a copy of a value.\n\/\/! * [`std::cmp`]::{[`PartialEq`], [`PartialOrd`], [`Eq`], [`Ord`] }. The\n\/\/! comparison traits, which implement the comparison operators and are often\n\/\/! seen in trait bounds.\n\/\/! * [`std::convert`]::{[`AsRef`], [`AsMut`], [`Into`], [`From`]}. Generic\n\/\/! conversions, used by savvy API authors to create overloaded methods.\n\/\/! * [`std::default`]::[`Default`], types that have default values.\n\/\/! * [`std::iter`]::{[`Iterator`], [`Extend`], [`IntoIterator`],\n\/\/! [`DoubleEndedIterator`], [`ExactSizeIterator`]}. Iterators of various\n\/\/! kinds.\n\/\/! * [`std::option`]::[`Option`]::{`self`, `Some`, `None`}. A type which\n\/\/! expresses the presence or absence of a value. This type is so commonly\n\/\/! used, its variants are also exported.\n\/\/! * [`std::result`]::[`Result`]::{`self`, `Ok`, `Err`}. A type for functions\n\/\/! that may succeed or fail. Like [`Option`], its variants are exported as\n\/\/! well.\n\/\/! * [`std::slice`]::[`SliceConcatExt`], a trait that exists for technical\n\/\/! reasons, but shouldn't have to exist. It provides a few useful methods on\n\/\/! slices.\n\/\/! * [`std::string`]::{[`String`], [`ToString`]}, heap allocated strings.\n\/\/! * [`std::vec`]::[`Vec`](..\/vec\/struct.Vec.html), a growable, heap-allocated\n\/\/! vector.\n\/\/!\n\/\/! [`AsMut`]: ..\/convert\/trait.AsMut.html\n\/\/! [`AsRef`]: ..\/convert\/trait.AsRef.html\n\/\/! [`Box`]: ..\/boxed\/struct.Box.html\n\/\/! [`Clone`]: ..\/clone\/trait.Clone.html\n\/\/! [`Copy`]: ..\/marker\/trait.Copy.html\n\/\/! [`Default`]: ..\/default\/trait.Default.html\n\/\/! [`DoubleEndedIterator`]: ..\/iter\/trait.DoubleEndedIterator.html\n\/\/! [`Drop`]: ..\/ops\/trait.Drop.html\n\/\/! [`Eq`]: ..\/cmp\/trait.Eq.html\n\/\/! [`ExactSizeIterator`]: ..\/iter\/trait.ExactSizeIterator.html\n\/\/! [`Extend`]: ..\/iter\/trait.Extend.html\n\/\/! [`FnMut`]: ..\/ops\/trait.FnMut.html\n\/\/! [`FnOnce`]: ..\/ops\/trait.FnOnce.html\n\/\/! [`Fn`]: ..\/ops\/trait.Fn.html\n\/\/! [`From`]: ..\/convert\/trait.From.html\n\/\/! [`IntoIterator`]: ..\/iter\/trait.IntoIterator.html\n\/\/! [`Into`]: ..\/convert\/trait.Into.html\n\/\/! [`Iterator`]: ..\/iter\/trait.Iterator.html\n\/\/! [`Option`]: ..\/option\/enum.Option.html\n\/\/! [`Ord`]: ..\/cmp\/trait.Ord.html\n\/\/! [`PartialEq`]: ..\/cmp\/trait.PartialEq.html\n\/\/! [`PartialOrd`]: ..\/cmp\/trait.PartialOrd.html\n\/\/! [`Result`]: ..\/result\/enum.Result.html\n\/\/! [`Send`]: ..\/marker\/trait.Send.html\n\/\/! [`Sized`]: ..\/marker\/trait.Sized.html\n\/\/! [`SliceConcatExt`]: ..\/slice\/trait.SliceConcatExt.html\n\/\/! [`String`]: ..\/string\/struct.String.html\n\/\/! [`Sync`]: ..\/marker\/trait.Sync.html\n\/\/! [`ToOwned`]: ..\/borrow\/trait.ToOwned.html\n\/\/! [`ToString`]: ..\/string\/trait.ToString.html\n\/\/! [`Vec`]: ..\/vec\/struct.Vec.html\n\/\/! [`clone()`]: ..\/clone\/trait.Clone.html#tymethod.clone\n\/\/! [`drop`]: ..\/mem\/fn.drop.html\n\/\/! [`std::borrow`]: ..\/borrow\/index.html\n\/\/! [`std::boxed`]: ..\/boxed\/index.html\n\/\/! [`std::clone`]: ..\/clone\/index.html\n\/\/! [`std::cmp`]: ..\/cmp\/index.html\n\/\/! [`std::convert`]: ..\/convert\/index.html\n\/\/! [`std::default`]: ..\/default\/index.html\n\/\/! [`std::iter`]: ..\/iter\/index.html\n\/\/! [`std::marker`]: ..\/marker\/index.html\n\/\/! [`std::mem`]: ..\/mem\/index.html\n\/\/! [`std::ops`]: ..\/ops\/index.html\n\/\/! [`std::option`]: ..\/option\/index.html\n\/\/! [`std::prelude::v1`]: v1\/index.html\n\/\/! [`std::result`]: ..\/result\/index.html\n\/\/! [`std::slice`]: ..\/slice\/index.html\n\/\/! [`std::string`]: ..\/string\/index.html\n\/\/! [`std::vec`]: ..\/vec\/index.html\n\/\/! [`to_owned()`]: ..\/borrow\/trait.ToOwned.html#tymethod.to_owned\n\/\/! [book-closures]: ..\/..\/book\/closures.html\n\/\/! [book-dtor]: ..\/..\/book\/drop.html\n\/\/! [book-enums]: ..\/..\/book\/enums.html\n\/\/! [book-iter]: ..\/..\/book\/iterators.html\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub mod v1;\n<commit_msg>doc: fix typo<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The Rust Prelude\n\/\/!\n\/\/! Rust comes with a variety of things in its standard library. However, if\n\/\/! you had to manually import every single thing that you used, it would be\n\/\/! very verbose. But importing a lot of things that a program never uses isn't\n\/\/! good either. A balance needs to be struck.\n\/\/!\n\/\/! The *prelude* is the list of things that Rust automatically imports into\n\/\/! every Rust program. It's kept as small as possible, and is focused on\n\/\/! things, particuarly traits, which are used in almost every single Rust\n\/\/! program.\n\/\/!\n\/\/! On a technical level, Rust inserts\n\/\/!\n\/\/! ```ignore\n\/\/! extern crate std;\n\/\/! ```\n\/\/!\n\/\/! into the crate root of every crate, and\n\/\/!\n\/\/! ```ignore\n\/\/! use std::prelude::v1::*;\n\/\/! ```\n\/\/!\n\/\/! into every module.\n\/\/!\n\/\/! # Other preludes\n\/\/!\n\/\/! Preludes can be seen as a pattern to make using multiple types more\n\/\/! convenient. As such, you'll find other preludes in the standard library,\n\/\/! such as [`std::io::prelude`]. Various libraries in the Rust ecosystem may\n\/\/! also define their own preludes.\n\/\/!\n\/\/! [`std::io::prelude`]: ..\/io\/prelude\/index.html\n\/\/!\n\/\/! The difference between 'the prelude' and these other preludes is that they\n\/\/! are not automatically `use`'d, and must be imported manually. This is still\n\/\/! easier than importing all of their consitutent components.\n\/\/!\n\/\/! # Prelude contents\n\/\/!\n\/\/! The current version of the prelude (version 1) lives in\n\/\/! [`std::prelude::v1`], and reexports the following.\n\/\/!\n\/\/! * [`std::marker`]::{[`Copy`], [`Send`], [`Sized`], [`Sync`]}. The marker\n\/\/! traits indicate fundamental properties of types.\n\/\/! * [`std::ops`]::{[`Drop`], [`Fn`], [`FnMut`], [`FnOnce`]}. Various\n\/\/! operations for both destuctors and overloading `()`.\n\/\/! * [`std::mem`]::[`drop`], a convenience function for explicitly dropping a\n\/\/! value.\n\/\/! * [`std::boxed`]::[`Box`], a way to allocate values on the heap.\n\/\/! * [`std::borrow`]::[`ToOwned`], The conversion trait that defines\n\/\/! [`to_owned()`], the generic method for creating an owned type from a\n\/\/! borrowed type.\n\/\/! * [`std::clone`]::[`Clone`], the ubiquitous trait that defines [`clone()`],\n\/\/! the method for producing a copy of a value.\n\/\/! * [`std::cmp`]::{[`PartialEq`], [`PartialOrd`], [`Eq`], [`Ord`] }. The\n\/\/! comparison traits, which implement the comparison operators and are often\n\/\/! seen in trait bounds.\n\/\/! * [`std::convert`]::{[`AsRef`], [`AsMut`], [`Into`], [`From`]}. Generic\n\/\/! conversions, used by savvy API authors to create overloaded methods.\n\/\/! * [`std::default`]::[`Default`], types that have default values.\n\/\/! * [`std::iter`]::{[`Iterator`], [`Extend`], [`IntoIterator`],\n\/\/! [`DoubleEndedIterator`], [`ExactSizeIterator`]}. Iterators of various\n\/\/! kinds.\n\/\/! * [`std::option`]::[`Option`]::{`self`, `Some`, `None`}. A type which\n\/\/! expresses the presence or absence of a value. This type is so commonly\n\/\/! used, its variants are also exported.\n\/\/! * [`std::result`]::[`Result`]::{`self`, `Ok`, `Err`}. A type for functions\n\/\/! that may succeed or fail. Like [`Option`], its variants are exported as\n\/\/! well.\n\/\/! * [`std::slice`]::[`SliceConcatExt`], a trait that exists for technical\n\/\/! reasons, but shouldn't have to exist. It provides a few useful methods on\n\/\/! slices.\n\/\/! * [`std::string`]::{[`String`], [`ToString`]}, heap allocated strings.\n\/\/! * [`std::vec`]::[`Vec`](..\/vec\/struct.Vec.html), a growable, heap-allocated\n\/\/! vector.\n\/\/!\n\/\/! [`AsMut`]: ..\/convert\/trait.AsMut.html\n\/\/! [`AsRef`]: ..\/convert\/trait.AsRef.html\n\/\/! [`Box`]: ..\/boxed\/struct.Box.html\n\/\/! [`Clone`]: ..\/clone\/trait.Clone.html\n\/\/! [`Copy`]: ..\/marker\/trait.Copy.html\n\/\/! [`Default`]: ..\/default\/trait.Default.html\n\/\/! [`DoubleEndedIterator`]: ..\/iter\/trait.DoubleEndedIterator.html\n\/\/! [`Drop`]: ..\/ops\/trait.Drop.html\n\/\/! [`Eq`]: ..\/cmp\/trait.Eq.html\n\/\/! [`ExactSizeIterator`]: ..\/iter\/trait.ExactSizeIterator.html\n\/\/! [`Extend`]: ..\/iter\/trait.Extend.html\n\/\/! [`FnMut`]: ..\/ops\/trait.FnMut.html\n\/\/! [`FnOnce`]: ..\/ops\/trait.FnOnce.html\n\/\/! [`Fn`]: ..\/ops\/trait.Fn.html\n\/\/! [`From`]: ..\/convert\/trait.From.html\n\/\/! [`IntoIterator`]: ..\/iter\/trait.IntoIterator.html\n\/\/! [`Into`]: ..\/convert\/trait.Into.html\n\/\/! [`Iterator`]: ..\/iter\/trait.Iterator.html\n\/\/! [`Option`]: ..\/option\/enum.Option.html\n\/\/! [`Ord`]: ..\/cmp\/trait.Ord.html\n\/\/! [`PartialEq`]: ..\/cmp\/trait.PartialEq.html\n\/\/! [`PartialOrd`]: ..\/cmp\/trait.PartialOrd.html\n\/\/! [`Result`]: ..\/result\/enum.Result.html\n\/\/! [`Send`]: ..\/marker\/trait.Send.html\n\/\/! [`Sized`]: ..\/marker\/trait.Sized.html\n\/\/! [`SliceConcatExt`]: ..\/slice\/trait.SliceConcatExt.html\n\/\/! [`String`]: ..\/string\/struct.String.html\n\/\/! [`Sync`]: ..\/marker\/trait.Sync.html\n\/\/! [`ToOwned`]: ..\/borrow\/trait.ToOwned.html\n\/\/! [`ToString`]: ..\/string\/trait.ToString.html\n\/\/! [`Vec`]: ..\/vec\/struct.Vec.html\n\/\/! [`clone()`]: ..\/clone\/trait.Clone.html#tymethod.clone\n\/\/! [`drop`]: ..\/mem\/fn.drop.html\n\/\/! [`std::borrow`]: ..\/borrow\/index.html\n\/\/! [`std::boxed`]: ..\/boxed\/index.html\n\/\/! [`std::clone`]: ..\/clone\/index.html\n\/\/! [`std::cmp`]: ..\/cmp\/index.html\n\/\/! [`std::convert`]: ..\/convert\/index.html\n\/\/! [`std::default`]: ..\/default\/index.html\n\/\/! [`std::iter`]: ..\/iter\/index.html\n\/\/! [`std::marker`]: ..\/marker\/index.html\n\/\/! [`std::mem`]: ..\/mem\/index.html\n\/\/! [`std::ops`]: ..\/ops\/index.html\n\/\/! [`std::option`]: ..\/option\/index.html\n\/\/! [`std::prelude::v1`]: v1\/index.html\n\/\/! [`std::result`]: ..\/result\/index.html\n\/\/! [`std::slice`]: ..\/slice\/index.html\n\/\/! [`std::string`]: ..\/string\/index.html\n\/\/! [`std::vec`]: ..\/vec\/index.html\n\/\/! [`to_owned()`]: ..\/borrow\/trait.ToOwned.html#tymethod.to_owned\n\/\/! [book-closures]: ..\/..\/book\/closures.html\n\/\/! [book-dtor]: ..\/..\/book\/drop.html\n\/\/! [book-enums]: ..\/..\/book\/enums.html\n\/\/! [book-iter]: ..\/..\/book\/iterators.html\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\npub mod v1;\n<|endoftext|>"} {"text":"<commit_before>\/\/! Blanket impls for Middleware.\n\/\/! This is pre-implemented for any function which takes a\n\/\/! `Request` and `Response` parameter and returns anything\n\/\/! implementing the `ResponseFinalizer` trait. It is also\n\/\/! implemented for a tuple of a function and a type `T`.\n\/\/! The function must take a `Request`, a `Response` and a\n\/\/! `T`, returning anything that implements `ResponseFinalizer`.\n\/\/! The data of type `T` will then be shared and available\n\/\/! in any request.\n\/\/!\n\/\/! Please see the examples for usage.\n\nuse request::Request;\nuse response::Response;\nuse hyper::status::StatusCode;\nuse http::headers;\nuse std::fmt::Display;\nuse std::num::FromPrimitive;\nuse middleware::{Middleware, MiddlewareResult, Halt};\nuse serialize::json;\nuse mimes::MediaType;\n\nimpl<R> Middleware for fn(&Request, &mut Response) -> R\n where R: ResponseFinalizer {\n fn invoke<'a, 'b>(&self, req: &mut Request<'a, 'b>, res: &mut Response) -> MiddlewareResult {\n let r = (*self)(req, res);\n r.respond(res)\n }\n}\n\nimpl<T, R> Middleware for (fn(&Request, &mut Response, &T) -> R, T)\n where T: Send + Sync + 'static, R: ResponseFinalizer + 'static {\n fn invoke<'a, 'b>(&self, req: &mut Request<'a, 'b>, res: &mut Response) -> MiddlewareResult {\n let (f, ref data) = *self;\n let r = f(req, res, data);\n r.respond(res)\n }\n}\n\nimpl<R> Middleware for fn(&mut Request, &mut Response) -> R\n where R: ResponseFinalizer {\n fn invoke<'a, 'b>(&self, req: &mut Request<'a, 'b>, res: &mut Response) -> MiddlewareResult {\n let r = (*self)(req, res);\n r.respond(res)\n }\n}\n\nimpl<T, R> Middleware for (fn(&mut Request, &mut Response, &T) -> R, T)\n where T: Send + Sync + 'static, R: ResponseFinalizer + 'static {\n fn invoke<'a, 'b>(&self, req: &mut Request<'a, 'b>, res: &mut Response) -> MiddlewareResult {\n let (f, ref data) = *self;\n let r = f(req, res, data);\n r.respond(res)\n }\n}\n\n\/\/\/ This trait provides convenience for translating a number\n\/\/\/ of common return types into a `MiddlewareResult` while\n\/\/\/ also modifying the `Response` as required.\n\/\/\/\n\/\/\/ Please see the examples for some uses.\npub trait ResponseFinalizer {\n fn respond(self, &mut Response) -> MiddlewareResult;\n}\n\nimpl ResponseFinalizer for () {\n fn respond(self, res: &mut Response) -> MiddlewareResult {\n maybe_set_type(res, MediaType::Html);\n Ok(Halt)\n }\n}\n\nimpl ResponseFinalizer for MiddlewareResult {\n fn respond(self, res: &mut Response) -> MiddlewareResult {\n maybe_set_type(res, MediaType::Html);\n self\n }\n}\n\nimpl ResponseFinalizer for json::Json {\n fn respond(self, res: &mut Response) -> MiddlewareResult {\n maybe_set_type(res, MediaType::Json);\n \/\/ FIXME: remove unwrap\n res.send(json::encode(&self).unwrap());\n Ok(Halt)\n }\n}\n\nimpl<'a, S: Display> ResponseFinalizer for &'a [S] {\n fn respond(self, res: &mut Response) -> MiddlewareResult {\n maybe_set_type(res, MediaType::Html);\n res.origin.status = StatusCode::Ok;\n for ref s in self.iter() {\n \/\/ FIXME : failure unhandled\n let _ = write!(res.origin, \"{}\", s);\n }\n Ok(Halt)\n }\n}\n\nmacro_rules! dual_impl {\n ($view:ty, $alloc:ty, |$s:ident, $res:ident| $b:block) => (\n impl<'a> ResponseFinalizer for $view {\n fn respond($s, $res: &mut Response) -> MiddlewareResult $b\n }\n\n impl ResponseFinalizer for $alloc {\n fn respond($s, $res: &mut Response) -> MiddlewareResult $b\n }\n )\n}\n\ndual_impl!(&'a str,\n String,\n |self, res| {\n maybe_set_type(res, MediaType::Html);\n res.origin.status = StatusCode::Ok;\n res.send(self);\n Ok(Halt)\n });\n\ndual_impl!((StatusCode, &'a str),\n (StatusCode, String),\n |self, res| {\n maybe_set_type(res, MediaType::Html);\n let (status, data) = self;\n res.origin.status = status;\n res.send(data);\n Ok(Halt)\n });\n\ndual_impl!((usize, &'a str),\n (usize, String),\n |self, res| {\n maybe_set_type(res, MediaType::Html);\n let (status, data) = self;\n match FromPrimitive::from_uint(status) {\n Some(status) => {\n res.origin.status = status;\n res.send(data);\n Ok(Halt)\n }\n \/\/ This is a logic error\n None => panic!(\"Bad status code\")\n }\n });\n\ndual_impl!((StatusCode, &'a str, Vec<headers::response::Header>),\n (StatusCode, String, Vec<headers::response::Header>),\n |self, res| {\n let (status, data, headers) = self;\n\n res.origin.status = status;\n for header in headers.into_iter() {\n res.origin.headers.insert(header);\n }\n maybe_set_type(res, MediaType::Html);\n res.send(data);\n Ok(Halt)\n });\n\nfn maybe_set_type(res: &mut Response, ty: MediaType) {\n if res.origin.headers.content_type.is_none() {\n res.content_type(ty);\n }\n}\n<commit_msg>refactor(hyper): depreciate one of the blanket impls for now<commit_after>\/\/! Blanket impls for Middleware.\n\/\/! This is pre-implemented for any function which takes a\n\/\/! `Request` and `Response` parameter and returns anything\n\/\/! implementing the `ResponseFinalizer` trait. It is also\n\/\/! implemented for a tuple of a function and a type `T`.\n\/\/! The function must take a `Request`, a `Response` and a\n\/\/! `T`, returning anything that implements `ResponseFinalizer`.\n\/\/! The data of type `T` will then be shared and available\n\/\/! in any request.\n\/\/!\n\/\/! Please see the examples for usage.\n\nuse request::Request;\nuse response::Response;\nuse hyper::status::StatusCode;\nuse std::fmt::Display;\nuse std::num::FromPrimitive;\n\/\/ use hyper::header::{Header, HeaderFormat};\nuse middleware::{Middleware, MiddlewareResult, Halt};\nuse serialize::json;\nuse mimes::MediaType;\n\nimpl<R> Middleware for fn(&Request, &mut Response) -> R\n where R: ResponseFinalizer {\n fn invoke<'a, 'b>(&self, req: &mut Request<'a, 'b>, res: &mut Response) -> MiddlewareResult {\n let r = (*self)(req, res);\n r.respond(res)\n }\n}\n\nimpl<T, R> Middleware for (fn(&Request, &mut Response, &T) -> R, T)\n where T: Send + Sync + 'static, R: ResponseFinalizer + 'static {\n fn invoke<'a, 'b>(&self, req: &mut Request<'a, 'b>, res: &mut Response) -> MiddlewareResult {\n let (f, ref data) = *self;\n let r = f(req, res, data);\n r.respond(res)\n }\n}\n\nimpl<R> Middleware for fn(&mut Request, &mut Response) -> R\n where R: ResponseFinalizer {\n fn invoke<'a, 'b>(&self, req: &mut Request<'a, 'b>, res: &mut Response) -> MiddlewareResult {\n let r = (*self)(req, res);\n r.respond(res)\n }\n}\n\nimpl<T, R> Middleware for (fn(&mut Request, &mut Response, &T) -> R, T)\n where T: Send + Sync + 'static, R: ResponseFinalizer + 'static {\n fn invoke<'a, 'b>(&self, req: &mut Request<'a, 'b>, res: &mut Response) -> MiddlewareResult {\n let (f, ref data) = *self;\n let r = f(req, res, data);\n r.respond(res)\n }\n}\n\n\/\/\/ This trait provides convenience for translating a number\n\/\/\/ of common return types into a `MiddlewareResult` while\n\/\/\/ also modifying the `Response` as required.\n\/\/\/\n\/\/\/ Please see the examples for some uses.\npub trait ResponseFinalizer {\n fn respond(self, &mut Response) -> MiddlewareResult;\n}\n\nimpl ResponseFinalizer for () {\n fn respond(self, res: &mut Response) -> MiddlewareResult {\n maybe_set_type(res, MediaType::Html);\n Ok(Halt)\n }\n}\n\nimpl ResponseFinalizer for MiddlewareResult {\n fn respond(self, res: &mut Response) -> MiddlewareResult {\n maybe_set_type(res, MediaType::Html);\n self\n }\n}\n\nimpl ResponseFinalizer for json::Json {\n fn respond(self, res: &mut Response) -> MiddlewareResult {\n maybe_set_type(res, MediaType::Json);\n \/\/ FIXME: remove unwrap\n res.send(json::encode(&self).unwrap());\n Ok(Halt)\n }\n}\n\nimpl<'a, S: Display> ResponseFinalizer for &'a [S] {\n fn respond(self, res: &mut Response) -> MiddlewareResult {\n maybe_set_type(res, MediaType::Html);\n res.origin.status = StatusCode::Ok;\n for ref s in self.iter() {\n \/\/ FIXME : failure unhandled\n let _ = write!(res.origin, \"{}\", s);\n }\n Ok(Halt)\n }\n}\n\nmacro_rules! dual_impl {\n ($view:ty, $alloc:ty, |$s:ident, $res:ident| $b:block) => (\n impl<'a> ResponseFinalizer for $view {\n fn respond($s, $res: &mut Response) -> MiddlewareResult $b\n }\n\n impl ResponseFinalizer for $alloc {\n fn respond($s, $res: &mut Response) -> MiddlewareResult $b\n }\n )\n}\n\ndual_impl!(&'a str,\n String,\n |self, res| {\n maybe_set_type(res, MediaType::Html);\n res.origin.status = StatusCode::Ok;\n res.send(self);\n Ok(Halt)\n });\n\ndual_impl!((StatusCode, &'a str),\n (StatusCode, String),\n |self, res| {\n maybe_set_type(res, MediaType::Html);\n let (status, data) = self;\n res.origin.status = status;\n res.send(data);\n Ok(Halt)\n });\n\ndual_impl!((usize, &'a str),\n (usize, String),\n |self, res| {\n maybe_set_type(res, MediaType::Html);\n let (status, data) = self;\n match FromPrimitive::from_uint(status) {\n Some(status) => {\n res.origin.status = status;\n res.send(data);\n Ok(Halt)\n }\n \/\/ This is a logic error\n None => panic!(\"Bad status code\")\n }\n });\n\n\/\/ FIXME: Hyper uses traits for headers, so this needs to be a Vec of\n\/\/ trait objects. But, a trait object is unable to have Foo + Bar as a bound.\n\/\/\n\/\/ A better\/faster solution would be to impl this for tuples,\n\/\/ where each tuple element implements the Header trait, which would give a\n\/\/ static dispatch.\n\/\/ dual_impl!((StatusCode, &'a str, Vec<Box<ResponseHeader>>),\n\/\/ (StatusCode, String, Vec<Box<ResponseHeader>>)\n\/\/ |self, res| {\n\/\/ let (status, data, headers) = self;\n\n\/\/ res.origin.status = status;\n\/\/ for header in headers.into_iter() {\n\/\/ res.origin.headers_mut().set(header);\n\/\/ }\n\/\/ maybe_set_type(res, MediaType::Html);\n\/\/ res.send(data);\n\/\/ Ok(Halt)\n\/\/ })\n\nfn maybe_set_type(res: &mut Response, ty: MediaType) {\n if res.origin.headers.content_type.is_none() {\n res.content_type(ty);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example code to get a project id (from its iid, the value shown in the URL)<commit_after>extern crate gitlab_api as gitlab;\n\nuse std::env;\n#[macro_use]\nextern crate log;\nextern crate env_logger;\n#[macro_use]\nextern crate clap;\n\n\nuse gitlab::GitLab;\nuse gitlab::issues;\nuse gitlab::Lister;\n\nuse gitlab::errors::*;\n\n\nfn main() {\n if let Err(ref e) = run() {\n println!(\"error: {}\", e);\n\n for e in e.iter().skip(1) {\n println!(\"caused by: {}\", e);\n }\n\n \/\/ The backtrace is not always generated. Try to run this example\n \/\/ with `RUST_BACKTRACE=1`.\n if let Some(backtrace) = e.backtrace() {\n println!(\"backtrace: {:?}\", backtrace);\n }\n\n ::std::process::exit(1);\n }\n}\n\nfn run() -> Result<()> {\n env_logger::init().unwrap();\n info!(\"starting up\");\n\n let hostname = match env::var(\"GITLAB_HOSTNAME\") {\n Ok(val) => val,\n Err(_) => {\n let default = String::from(\"gitlab.com\");\n println!(\"Please set environment variable 'GITLAB_HOSTNAME'. Using default '{}'.\",\n default);\n default\n }\n };\n\n let token = match env::var(\"GITLAB_TOKEN\") {\n Ok(val) => val,\n Err(_) => {\n panic!(\"Please set environment variable 'GITLAB_TOKEN'. Take it from \\\n http:\/\/{}\/profile\/account\",\n hostname);\n }\n };\n\n let gl = GitLab::new(&hostname, &token).chain_err(|| \"failure to create GitLab instance\")?;\n \/\/ let gl = GitLab::new(&hostname, &token).chain_err(|| \"failure to create GitLab instance\")?.scheme(\"http\").port(80);\n \/\/ let gl = gl.scheme(\"http\").port(80);\n\n let matches = clap::App::new(\"get_id_project\")\n .version(\"1.0\")\n .author(\"Nicolas Bigaouette <nbigaouette@gmail.com>\")\n .about(\"Get the id of a GitLab project from namespace\/project.\")\n .arg(clap::Arg::with_name(\"namespace\")\n .help(\"The project's namespace (or group)\")\n .long(\"namespace\")\n .short(\"n\")\n .takes_value(true)\n .required(true))\n .arg(clap::Arg::with_name(\"project\")\n .help(\"The project's name\")\n .long(\"project\")\n .short(\"p\")\n .takes_value(true)\n .required(true))\n .get_matches();\n\n let project_namespace = matches.value_of(\"namespace\").unwrap();\n let project_name = matches.value_of(\"project\").unwrap();\n\n let project = gl.get_project(project_namespace, project_name).chain_err(|| \"cannot get project\")?;\n \/\/ println!(\"project: {:?}\", project);\n\n println!(\"Id for {}\/{}: {}\", project_namespace, project_name, project.id);\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before>use std::rc::Rc;\nuse std::cell::RefCell;\nuse std::ops::Deref;\nuse std::process::exit;\n\nuse clap::ArgMatches;\nuse regex::Regex;\n\nuse storage::file::File;\nuse storage::file::hash::FileHash;\nuse storage::json::parser::JsonHeaderParser;\nuse storage::parser::FileHeaderParser;\nuse storage::parser::Parser;\n\npub trait CliFileFilter {\n\n fn filter_file(&self, &Rc<RefCell<File>>) -> bool;\n\n fn not(self) -> CliFileFilterNot\n where Self: Sized + 'static\n {\n CliFileFilterNot {\n a: Box::new(self),\n }\n }\n\n fn or(self, other: Box<CliFileFilter>) -> CliFileFilterOr\n where Self: Sized + 'static\n {\n CliFileFilterOr {\n a: Box::new(self),\n b: other\n }\n }\n\n fn and(self, other: Box<CliFileFilter>) -> CliFileFilterAnd\n where Self: Sized + 'static\n {\n CliFileFilterAnd {\n a: Box::new(self),\n b: other\n }\n }\n\n}\n\npub struct CliFileFilterNot {\n a: Box<CliFileFilter>,\n}\n\nimpl CliFileFilter for CliFileFilterNot {\n\n fn filter_file(&self, f: &Rc<RefCell<File>>) -> bool {\n !self.a.filter_file(f)\n }\n\n}\n\npub struct CliFileFilterOr {\n a: Box<CliFileFilter>,\n b: Box<CliFileFilter>\n}\n\nimpl CliFileFilter for CliFileFilterOr {\n\n fn filter_file(&self, f: &Rc<RefCell<File>>) -> bool {\n self.a.filter_file(f) || self.b.filter_file(f)\n }\n\n}\n\npub struct CliFileFilterAnd {\n a: Box<CliFileFilter>,\n b: Box<CliFileFilter>\n}\n\nimpl CliFileFilter for CliFileFilterAnd {\n\n fn filter_file(&self, f: &Rc<RefCell<File>>) -> bool {\n self.a.filter_file(f) && self.b.filter_file(f)\n }\n\n}\n\npub struct CliFileFilterByHash {\n default: bool,\n hash: Option<FileHash>,\n}\n\nimpl CliFileFilter for CliFileFilterByHash {\n\n fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {\n self.hash.clone().map(|h| {\n debug!(\"Filtering file with hash = {}\", h);\n let f = file.deref().borrow();\n f.id().get_id() == h\n })\n .unwrap_or(self.default)\n }\n\n}\n\npub struct CliFileFilterByDataRegex {\n default: bool,\n regex: Option<Regex>,\n}\n\nimpl CliFileFilter for CliFileFilterByDataRegex {\n\n fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {\n self.regex.clone().map(|r| {\n debug!(\"Filtering file with regex = {:?}\", r);\n let f = file.deref().borrow();\n r.is_match(&f.data()[..])\n })\n .unwrap_or(self.default)\n }\n\n}\n\npub struct CliFileFilterByHeaderRegex {\n default: bool,\n header_field_name: &'static str,\n regex: Option<Regex>,\n}\n\nimpl CliFileFilter for CliFileFilterByHeaderRegex {\n\n fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {\n use module::helpers::header::data::get_named_text_from_header;\n\n self.regex.clone().map(|r| {\n debug!(\"Filtering file (header field = {}) with regex = {:?}\", self.header_field_name, r);\n\n let f = file.deref().borrow();\n get_named_text_from_header(self.header_field_name, f.header())\n .map(|headerfield| r.is_match(&headerfield[..]))\n .unwrap_or(self.default)\n })\n .unwrap_or(self.default)\n }\n\n}\n\npub struct CliFileFilterByTags {\n default: bool,\n tags: Option<Vec<String>>,\n}\n\nimpl CliFileFilter for CliFileFilterByTags {\n\n fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {\n use module::helpers::header::tags::data::get_tags_from_header;\n\n self.tags.clone().map(|ts| {\n debug!(\"Filtering file with tags = {:?}\", ts);\n\n let f = file.deref().borrow();\n get_tags_from_header(f.header())\n .iter()\n .any(|tag| ts.iter().any(|remtag| remtag == tag))\n })\n .unwrap_or(self.default)\n }\n\n}\n\n\/*\n *\n *\n * Functions to generate filters\n *\n *\n *\/\n\npub fn create_hash_filter(matches: &ArgMatches, id_key: &'static str, default: bool) -> CliFileFilterByHash {\n CliFileFilterByHash {\n hash: matches.value_of(id_key).map(FileHash::from),\n default: default\n }\n}\n\npub fn create_content_grep_filter(matches: &ArgMatches, match_key: &'static str, default: bool) -> CliFileFilterByDataRegex {\n use std::process::exit;\n\n CliFileFilterByDataRegex {\n regex: matches.value_of(match_key).map(|m| {\n Regex::new(&m[..]).unwrap_or_else(|e| {\n error!(\"Regex compiler error: {}\", e);\n exit(1);\n })\n }),\n default: default,\n }\n}\n\npub fn create_text_header_field_grep_filter(matches: &ArgMatches,\n match_key: &'static str,\n header_field_name: &'static str,\n default: bool)\n -> CliFileFilterByHeaderRegex\n{\n CliFileFilterByHeaderRegex {\n default: default,\n header_field_name: header_field_name,\n regex: matches.value_of(match_key)\n .map(|m| {\n Regex::new(&m[..]).unwrap_or_else(|e| {\n error!(\"Regex compiler error: {}\", e);\n exit(1);\n })\n }),\n }\n}\n\npub fn create_tag_filter(matches: &ArgMatches, tag_key: &'static str, default: bool) -> CliFileFilterByTags {\n\n CliFileFilterByTags {\n default: default,\n tags: matches.value_of(tag_key)\n .map(|m| m.split(\",\")\n .map(String::from)\n .collect::<Vec<String>>()\n ),\n }\n}\n\n<commit_msg>Remove unused imports<commit_after>use std::rc::Rc;\nuse std::cell::RefCell;\nuse std::ops::Deref;\nuse std::process::exit;\n\nuse clap::ArgMatches;\nuse regex::Regex;\n\nuse storage::file::File;\nuse storage::file::hash::FileHash;\n\npub trait CliFileFilter {\n\n fn filter_file(&self, &Rc<RefCell<File>>) -> bool;\n\n fn not(self) -> CliFileFilterNot\n where Self: Sized + 'static\n {\n CliFileFilterNot {\n a: Box::new(self),\n }\n }\n\n fn or(self, other: Box<CliFileFilter>) -> CliFileFilterOr\n where Self: Sized + 'static\n {\n CliFileFilterOr {\n a: Box::new(self),\n b: other\n }\n }\n\n fn and(self, other: Box<CliFileFilter>) -> CliFileFilterAnd\n where Self: Sized + 'static\n {\n CliFileFilterAnd {\n a: Box::new(self),\n b: other\n }\n }\n\n}\n\npub struct CliFileFilterNot {\n a: Box<CliFileFilter>,\n}\n\nimpl CliFileFilter for CliFileFilterNot {\n\n fn filter_file(&self, f: &Rc<RefCell<File>>) -> bool {\n !self.a.filter_file(f)\n }\n\n}\n\npub struct CliFileFilterOr {\n a: Box<CliFileFilter>,\n b: Box<CliFileFilter>\n}\n\nimpl CliFileFilter for CliFileFilterOr {\n\n fn filter_file(&self, f: &Rc<RefCell<File>>) -> bool {\n self.a.filter_file(f) || self.b.filter_file(f)\n }\n\n}\n\npub struct CliFileFilterAnd {\n a: Box<CliFileFilter>,\n b: Box<CliFileFilter>\n}\n\nimpl CliFileFilter for CliFileFilterAnd {\n\n fn filter_file(&self, f: &Rc<RefCell<File>>) -> bool {\n self.a.filter_file(f) && self.b.filter_file(f)\n }\n\n}\n\npub struct CliFileFilterByHash {\n default: bool,\n hash: Option<FileHash>,\n}\n\nimpl CliFileFilter for CliFileFilterByHash {\n\n fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {\n self.hash.clone().map(|h| {\n debug!(\"Filtering file with hash = {}\", h);\n let f = file.deref().borrow();\n f.id().get_id() == h\n })\n .unwrap_or(self.default)\n }\n\n}\n\npub struct CliFileFilterByDataRegex {\n default: bool,\n regex: Option<Regex>,\n}\n\nimpl CliFileFilter for CliFileFilterByDataRegex {\n\n fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {\n self.regex.clone().map(|r| {\n debug!(\"Filtering file with regex = {:?}\", r);\n let f = file.deref().borrow();\n r.is_match(&f.data()[..])\n })\n .unwrap_or(self.default)\n }\n\n}\n\npub struct CliFileFilterByHeaderRegex {\n default: bool,\n header_field_name: &'static str,\n regex: Option<Regex>,\n}\n\nimpl CliFileFilter for CliFileFilterByHeaderRegex {\n\n fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {\n use module::helpers::header::data::get_named_text_from_header;\n\n self.regex.clone().map(|r| {\n debug!(\"Filtering file (header field = {}) with regex = {:?}\", self.header_field_name, r);\n\n let f = file.deref().borrow();\n get_named_text_from_header(self.header_field_name, f.header())\n .map(|headerfield| r.is_match(&headerfield[..]))\n .unwrap_or(self.default)\n })\n .unwrap_or(self.default)\n }\n\n}\n\npub struct CliFileFilterByTags {\n default: bool,\n tags: Option<Vec<String>>,\n}\n\nimpl CliFileFilter for CliFileFilterByTags {\n\n fn filter_file(&self, file: &Rc<RefCell<File>>) -> bool {\n use module::helpers::header::tags::data::get_tags_from_header;\n\n self.tags.clone().map(|ts| {\n debug!(\"Filtering file with tags = {:?}\", ts);\n\n let f = file.deref().borrow();\n get_tags_from_header(f.header())\n .iter()\n .any(|tag| ts.iter().any(|remtag| remtag == tag))\n })\n .unwrap_or(self.default)\n }\n\n}\n\n\/*\n *\n *\n * Functions to generate filters\n *\n *\n *\/\n\npub fn create_hash_filter(matches: &ArgMatches, id_key: &'static str, default: bool) -> CliFileFilterByHash {\n CliFileFilterByHash {\n hash: matches.value_of(id_key).map(FileHash::from),\n default: default\n }\n}\n\npub fn create_content_grep_filter(matches: &ArgMatches, match_key: &'static str, default: bool) -> CliFileFilterByDataRegex {\n use std::process::exit;\n\n CliFileFilterByDataRegex {\n regex: matches.value_of(match_key).map(|m| {\n Regex::new(&m[..]).unwrap_or_else(|e| {\n error!(\"Regex compiler error: {}\", e);\n exit(1);\n })\n }),\n default: default,\n }\n}\n\npub fn create_text_header_field_grep_filter(matches: &ArgMatches,\n match_key: &'static str,\n header_field_name: &'static str,\n default: bool)\n -> CliFileFilterByHeaderRegex\n{\n CliFileFilterByHeaderRegex {\n default: default,\n header_field_name: header_field_name,\n regex: matches.value_of(match_key)\n .map(|m| {\n Regex::new(&m[..]).unwrap_or_else(|e| {\n error!(\"Regex compiler error: {}\", e);\n exit(1);\n })\n }),\n }\n}\n\npub fn create_tag_filter(matches: &ArgMatches, tag_key: &'static str, default: bool) -> CliFileFilterByTags {\n\n CliFileFilterByTags {\n default: default,\n tags: matches.value_of(tag_key)\n .map(|m| m.split(\",\")\n .map(String::from)\n .collect::<Vec<String>>()\n ),\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added guard example<commit_after>fn main() {\n let pair = (2, -2);\n \/\/ TODO ^ Try different values for `pair`\n\n println!(\"Tell me about {:?}\", pair);\n match pair {\n (x, y) if x == y => println!(\"These are twins\"),\n \/\/ The ^ `if condition` part is a guard\n (x, y) if x + y == 0 => println!(\"Antimatter, kaboom!\"),\n (x, _) if x % 2 == 1 => println!(\"The first one is odd\"),\n _ => println!(\"No correlation...\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>docs(examples): add simple_example<commit_after>extern crate rocks;\n\nuse rocks::prelude::*;\n\nconst DB_PATH: &str = \"\/tmp\/rocksdb_simple_example\";\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n \/\/ Optimize RocksDB. This is the easiest way to get RocksDB to perform well\n \/\/ NOTE: Is rust, Options is splited into 2 parts.\n let options = Options::default()\n .map_db_options(|db| db.create_if_missing(true).increase_parallelism(16))\n .map_cf_options(|cf| cf.optimize_level_style_compaction(512 * 1024 * 1024));\n\n \/\/ open DB\n let db = DB::open(&options, DB_PATH)?;\n\n \/\/ Put key-value\n db.put(WriteOptions::default_instance(), b\"key1\", b\"value\")?;\n\n \/\/ get value\n let value = db.get(ReadOptions::default_instance(), b\"key1\")?;\n assert_eq!(value, b\"value\");\n\n \/\/ atomically apply a set of updates\n {\n let mut batch = WriteBatch::default();\n batch.delete(b\"key1\");\n batch.put(b\"key2\", &value);\n\n db.write(WriteOptions::default_instance(), &batch)?;\n }\n\n let ret = db.get(ReadOptions::default_instance(), b\"key1\");\n assert!(ret.is_err() && ret.unwrap_err().is_not_found());\n\n let value = db.get(ReadOptions::default_instance(), b\"key2\")?;\n assert_eq!(value, b\"value\");\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(plugin)]\n#![plugin(afl_coverage_plugin)]\n\nextern crate afl_coverage;\nextern crate tar;\nextern crate nom;\n\nuse tar::*;\nuse nom::IResult;\nuse std::fs::File;\nuse std::io::{self, Read};\n\nfn main() {\n let mut contents: Vec<u8> = Vec::new();\n let result = io::stdin().read_to_end(&mut contents).unwrap();\n let tar = &contents[..];\n\n match parse_tar(tar) {\n IResult::Done(_, entries) => {\n for e in entries.iter() {\n \/\/println!(\"{:?}\", e);\n }\n }\n e => {\n \/\/println!(\"error or incomplete: {:?}\", e);\n \/\/panic!(\"cannot parse tar archive\");\n }\n }\n}\n<commit_msg>fuzzed: silence warnings<commit_after>#![feature(plugin)]\n#![plugin(afl_coverage_plugin)]\n\nextern crate afl_coverage;\nextern crate tar;\nextern crate nom;\n\nuse tar::*;\nuse nom::IResult;\nuse std::io::{self, Read};\n\nfn main() {\n let mut contents: Vec<u8> = Vec::new();\n io::stdin().read_to_end(&mut contents).unwrap();\n let tar = &contents[..];\n\n match parse_tar(tar) {\n IResult::Done(_, entries) => {\n for e in entries.iter() {\n println!(\"{:?}\", e);\n }\n }\n e => {\n println!(\"error or incomplete: {:?}\", e);\n panic!(\"cannot parse tar archive\");\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add redact endpoint.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Call `sdl2::init` to enable OpenGL 4.1<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add an unit test for Builder corresponding to IRBuilder in C++ API.<commit_after>#![feature(libc)]\n\nextern crate libc;\nextern crate llvm;\n\nuse llvm::*;\nuse llvm::Attribute::*;\n\n#[test]\npub fn test() {\n let ctx = Context::new();\n let module = Module::new(\"simple\", &ctx);\n let func = module.add_function(\"fib\", Type::get::<fn(u64) -> u64>(&ctx));\n func.add_attributes(&[NoUnwind, ReadNone]);\n let value = &func[0];\n \n let entry = func.append(\"entry\");\n let then_bb = func.append(\"then_block\");\n let else_bb = func.append(\"else_block\");\n let merge_bb = func.append(\"merge_bb\");\n \n let builder = Builder::new(&ctx);\n builder.position_at_end(entry);\n \n let local1 = builder.create_alloca(Type::get::<u64>(&ctx));\n builder.create_store(8u64.compile(&ctx), local1);\n let local2 = builder.create_alloca(Type::get::<u64>(&ctx));\n builder.create_store(16u64.compile(&ctx), local2);\n \n let cond = builder.create_cmp(value, 5u64.compile(&ctx), Predicate::GreaterThan);\n builder.create_cond_br(cond, then_bb, Some(else_bb));\n \n builder.position_at_end(then_bb);\n let eight = builder.create_load(local1);\n builder.create_br(merge_bb);\n \n builder.position_at_end(else_bb);\n let sixteen = builder.create_load(local2);\n builder.create_br(merge_bb);\n \n builder.position_at_end(merge_bb);\n let phi = builder.create_phi(Type::get::<u64>(&ctx), \"cond\");\n phi.add_incoming(eight, then_bb);\n phi.add_incoming(sixteen, else_bb);\n builder.create_ret(phi);\n \n module.verify().unwrap();\n \n let ee = JitEngine::new(&module, JitOptions {opt_level: 0}).unwrap();\n ee.with_function(func, |fib: extern fn(u64) -> u64| {\n for i in 0..10 {\n println!(\"fib {} = {}\", i, fib(i))\n }\n });\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for the options<commit_after>\/\/ Copyright (c) 2018 Guillaume Pinot <texitoi(a)texitoi.eu>\n\/\/\n\/\/ This work is free. You can redistribute it and\/or modify it under\n\/\/ the terms of the Do What The Fuck You Want To Public License,\n\/\/ Version 2, as published by Sam Hocevar. See the COPYING file for\n\/\/ more details.\n\n#[macro_use]\nextern crate structopt;\n\nuse structopt::StructOpt;\n\n#[test]\nfn required_option() {\n #[derive(StructOpt, PartialEq, Debug)]\n struct Opt {\n #[structopt(short = \"a\", long = \"arg\")]\n arg: i32,\n }\n assert_eq!(Opt { arg: 42 },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\", \"-a42\"])));\n assert_eq!(Opt { arg: 42 },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\", \"-a\", \"42\"])));\n assert_eq!(Opt { arg: 42 },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\", \"--arg\", \"42\"])));\n assert!(Opt::clap().get_matches_from_safe(&[\"test\"]).is_err());\n assert!(Opt::clap().get_matches_from_safe(&[\"test\", \"-a42\", \"-a24\"]).is_err());\n}\n\n#[test]\nfn optional_option() {\n #[derive(StructOpt, PartialEq, Debug)]\n struct Opt {\n #[structopt(short = \"a\")]\n arg: Option<i32>,\n }\n assert_eq!(Opt { arg: Some(42) },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\", \"-a42\"])));\n assert_eq!(Opt { arg: None },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\"])));\n assert!(Opt::clap().get_matches_from_safe(&[\"test\", \"-a42\", \"-a24\"]).is_err());\n}\n\n#[test]\nfn option_with_default() {\n #[derive(StructOpt, PartialEq, Debug)]\n struct Opt {\n #[structopt(short = \"a\", default_value = \"42\")]\n arg: i32,\n }\n assert_eq!(Opt { arg: 24 },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\", \"-a24\"])));\n assert_eq!(Opt { arg: 42 },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\"])));\n assert!(Opt::clap().get_matches_from_safe(&[\"test\", \"-a42\", \"-a24\"]).is_err());\n}\n\n#[test]\nfn option_with_raw_default() {\n #[derive(StructOpt, PartialEq, Debug)]\n struct Opt {\n #[structopt(short = \"a\", raw(default_value = \"\\\"42\\\"\"))]\n arg: i32,\n }\n assert_eq!(Opt { arg: 24 },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\", \"-a24\"])));\n assert_eq!(Opt { arg: 42 },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\"])));\n assert!(Opt::clap().get_matches_from_safe(&[\"test\", \"-a42\", \"-a24\"]).is_err());\n}\n\n#[test]\nfn options() {\n #[derive(StructOpt, PartialEq, Debug)]\n struct Opt {\n #[structopt(short = \"a\", long = \"arg\")]\n arg: Vec<i32>,\n }\n assert_eq!(Opt { arg: vec![24] },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\", \"-a24\"])));\n assert_eq!(Opt { arg: vec![] },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\"])));\n assert_eq!(Opt { arg: vec![24, 42] },\n Opt::from_clap(&Opt::clap().get_matches_from(&[\"test\", \"-a24\", \"--arg\", \"42\"])));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add eigen.rs by stivstivsti.<commit_after>\/*\r\neigen.rs 0.2\r\n\r\nThis piece of code is transpiled EigenvalueDecomposition.java from Jama framework.\r\nI had to do this, because I haven't found any buildable rust opensource project\r\nto calculate real eigen values in Rust. There are many packages which are usually bound to\r\nopenBLAS, but I can't build them with gnu toolchain, and that's the only toolchain, which\r\nallows gdb (and thus IDE) debug nowadays.\r\n\r\nQuality code is far from perfect, hopefully someone will appreciate my one day of\r\nmanual code conversion nightmare and mention me in the source code.\r\n\r\nStepan Yakovenko,\r\nhttps:\/\/github.com\/stiv-yakovenko\r\n\r\n*\/\r\nuse std::fmt;\r\nuse std::ops::Index;\r\nuse std::collections::VecDeque;\r\nuse std::ops::IndexMut;\r\nuse std::cmp;\r\nextern crate num_traits;\r\n\r\nuse eigen::num_traits::Float;\r\n\/\/use self::num_traits;\r\n\r\npub struct Matrix {\r\n data:VecDeque<f64>,\r\n n:usize\r\n}\r\nimpl Matrix {\r\n pub fn new(n:usize)->Matrix{\r\n let mut data = VecDeque::new();\r\n data.resize(n * n, 0.);\r\n Matrix{data:data,n:n}\r\n }\r\n}\r\nimpl fmt::Debug for Matrix {\r\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\r\n write!(f, \"{{\").ok();\r\n for r in 0..self.n {\r\n for c in 0..self.n {\r\n write!(f, \"{:.3?} \", self[[c,r]]).ok();\r\n }\r\n writeln!(f, \"\").ok();\r\n }\r\n write!(f, \"}}\")\r\n }\r\n}\r\nimpl Index<[usize; 2]> for Matrix{\r\n type Output = f64;\r\n fn index(&self, idx: [usize; 2]) -> &f64 {\r\n &self.data[idx[0]+self.n*idx[1]]\r\n }\r\n}\r\nimpl IndexMut<[usize; 2]> for Matrix{\r\n fn index_mut(&mut self, idx: [usize; 2]) -> &mut f64 {\r\n self.data.get_mut(idx[0]+self.n*idx[1]).unwrap()\r\n }\r\n}\r\n#[allow(dead_code)]\r\nfn cdiv(xr: f64, xi: f64, yr: f64, yi: f64) -> (f64, f64) {\r\n let r: f64;\r\n let d: f64;\r\n if yr.abs() > yi.abs() {\r\n r = yi \/ yr;\r\n d = yr + r * yi;\r\n ((xr + r * xi) \/ d, (xi - r * xr) \/ d)\r\n } else {\r\n r = yr \/ yi;\r\n d = yi + r * yr;\r\n ((r * xr + xi) \/ d, (r * xi - xr) \/ d)\r\n } \r\n}\r\n\r\n#[allow(dead_code)]\r\npub fn hqr2(n_in: usize, h: &mut Matrix, v: &mut Matrix, d: &mut Vec<f64>, e: &mut Vec<f64>) {\r\n \/\/ This is derived from the Algol procedure hqr2,\r\n \/\/ by Martin and Wilkinson, Handbook for Auto. Comp.,\r\n \/\/ Vol.ii-Linear Algebra, and the corresponding\r\n \/\/ Fortran subroutine in EISPACK.\r\n \/\/ Initialize\r\n let nn = n_in;\r\n let mut n = nn as i16 - 1;\r\n let low = 0;\r\n let high = nn - 1;\r\n let eps = (2.0).powf(-52.0);\r\n let mut exshift = 0.0;\r\n let mut p = 0.;\r\n let mut q = 0.;\r\n let mut r = 0.;\r\n let mut s = 0.;\r\n let mut z = 0.;\r\n let mut t;\r\n let mut w;\r\n let mut x;\r\n let mut y;\r\n \/\/ Store roots isolated by balanc and compute matrix norm\r\n let mut norm = 0.0;\r\n let mut i = 0 as usize;\r\n while i < nn {\r\n if i < low || i > high {\r\n d[i] = h[[i, i]];\r\n e[i] = 0.0;\r\n }\r\n let mut j = cmp::max(i as i16 - 1, 0) as usize;\r\n while j < nn {\r\n norm = norm + (h[[i, j]]).abs();\r\n j = j + 1;\r\n }\r\n i = i + 1;\r\n }\r\n \/\/ Outer loop over eigenvalue index\r\n let mut iter = 0;\r\n while n >= low as i16 {\r\n \/\/ Look for single small sub-diagonal element\r\n let mut l = n;\r\n while l > low as i16 {\r\n s = (h[[l as usize - 1, l as usize - 1]]).abs() + (h[[l as usize, l as usize]]).abs();\r\n if s == 0.0 {\r\n s = norm;\r\n }\r\n if (h[[l as usize, l as usize - 1]]).abs() < eps * s {\r\n break;\r\n }\r\n l = l - 1;\r\n }\r\n \/\/ Check for convergence\r\n \/\/ One root found\r\n if l == n {\r\n h[[n as usize, n as usize]] = h[[n as usize, n as usize]] + exshift;\r\n d[n as usize] = h[[n as usize, n as usize]];\r\n e[n as usize] = 0.0;\r\n n = n - 1;\r\n iter = 0;\r\n \/\/ Two roots found\r\n } else if l == n - 1 {\r\n w = h[[n as usize, n as usize - 1]] * h[[n as usize - 1, n as usize]];\r\n p = (h[[n as usize - 1, n as usize - 1]] - h[[n as usize, n as usize]]) \/ 2.0;\r\n q = p * p + w;\r\n z = (q).abs().sqrt();\r\n h[[n as usize, n as usize]] = h[[n as usize, n as usize]] + exshift;\r\n h[[n as usize - 1, n as usize - 1]] = h[[n as usize - 1, n as usize - 1]] + exshift;\r\n x = h[[n as usize, n as usize]];\r\n \/\/ Real pair\r\n if q >= 0. {\r\n if p >= 0. {\r\n z = p + z;\r\n } else {\r\n z = p - z;\r\n }\r\n d[n as usize - 1] = x + z;\r\n d[n as usize] = d[n as usize - 1];\r\n if z != 0.0 {\r\n d[n as usize] = x - w \/ z;\r\n }\r\n e[n as usize - 1] = 0.0;\r\n e[n as usize] = 0.0;\r\n x = h[[n as usize, n as usize - 1]];\r\n s = (x).abs() + (z).abs();\r\n p = x \/ s;\r\n q = z \/ s;\r\n r = (p * p + q * q).sqrt();\r\n p = p \/ r;\r\n q = q \/ r;\r\n \/\/ Row modification\r\n let mut j = n - 1;\r\n while j < nn as i16 {\r\n z = h[[n as usize - 1, j as usize]];\r\n h[[n as usize - 1, j as usize]] = q * z + p * h[[n as usize, j as usize]];\r\n h[[n as usize, j as usize]] = q * h[[n as usize, j as usize]] - p * z;\r\n j = j + 1;\r\n }\r\n \/\/ Column modification\r\n let mut i = 0;\r\n while i <= n {\r\n z = h[[i as usize, n as usize - 1]];\r\n h[[i as usize, n as usize - 1]] = q * z + p * h[[i as usize, n as usize]];\r\n h[[i as usize, n as usize]] = q * h[[i as usize, n as usize]] - p * z;\r\n i = i + 1;\r\n }\r\n \/\/ Accumulate transformations\r\n let mut i = low;\r\n while i <= high {\r\n z = v[[i as usize, n as usize - 1]];\r\n v[[i as usize, n as usize - 1]] = q * z + p * v[[i as usize, n as usize]];\r\n v[[i as usize, n as usize]] = q * v[[i as usize, n as usize]] - p * z;\r\n i = i + 1;\r\n }\r\n \/\/ Complex pair\r\n } else {\r\n d[n as usize - 1] = x + p;\r\n d[n as usize] = x + p;\r\n e[n as usize - 1] = z;\r\n e[n as usize] = -z;\r\n }\r\n n = n - 2;\r\n iter = 0;\r\n \/\/ No convergence yet\r\n } else {\r\n \/\/ Form shift\r\n x = h[[n as usize, n as usize]];\r\n y = 0.0;\r\n w = 0.0;\r\n if l < n {\r\n y = h[[n as usize - 1, n as usize - 1]];\r\n w = h[[n as usize, n as usize - 1]] * h[[n as usize - 1, n as usize]];\r\n }\r\n \/\/ Wilkinson's original ad hoc shift\r\n if iter == 10 {\r\n exshift += x;\r\n let mut i = low;\r\n while i <= n as usize {\r\n h[[i, i]] -= x;\r\n i = i + 1;\r\n }\r\n s = (h[[n as usize, n as usize - 1]]).abs() + (h[[n as usize - 1, n as usize - 2]]).abs();\r\n y = 0.75 * s;\r\n x = y;\r\n w = -0.4375 * s * s;\r\n }\r\n \/\/ MATLAB's new ad hoc shift\r\n if iter == 30 {\r\n s = (y - x) \/ 2.0;\r\n s = s * s + w;\r\n if s > 0. {\r\n s = s.sqrt();\r\n if y < x {\r\n s = -s;\r\n }\r\n s = x - w \/ ((y - x) \/ 2.0 + s);\r\n let mut i = low;\r\n while i <= n as usize {\r\n h[[i, i]] -= s;\r\n i = i + 1;\r\n }\r\n exshift += s;\r\n x = 0.964;\r\n y = x;\r\n w = y;\r\n }\r\n }\r\n iter = iter + 1; \/\/ (Could check iteration count here.)\r\n \/\/ Look for two consecutive small sub-diagonal elements\r\n let mut m = n - 2;\r\n while m >= l {\r\n z = h[[m as usize, m as usize]];\r\n r = x - z;\r\n s = y - z;\r\n p = (r * s - w) \/ h[[m as usize + 1, m as usize]] + h[[m as usize, m as usize + 1]];\r\n q = h[[m as usize + 1, m as usize + 1]] - z - r - s;\r\n r = h[[m as usize + 2, m as usize + 1]];\r\n s = (p).abs() + (q).abs() + (r).abs();\r\n p = p \/ s;\r\n q = q \/ s;\r\n r = r \/ s;\r\n if m == l {\r\n break;\r\n }\r\n if h[[m as usize, m as usize - 1]].abs() * (q).abs() + (r).abs() <\r\n eps * ((p).abs() * ((h[[m as usize - 1, m as usize - 1]]).abs() + (z).abs() +\r\n (h[[m as usize + 1, m as usize + 1]]).abs()\r\n )) {\r\n break;\r\n }\r\n m = m - 1;\r\n }\r\n let mut i = m + 2;\r\n while i <= n {\r\n h[[i as usize, i as usize - 2]] = 0.0;\r\n if i > m + 2 {\r\n h[[i as usize, i as usize - 3]] = 0.0;\r\n }\r\n i = i + 1;\r\n }\r\n \/\/ Double QR step involving rows l:n and columns m:n\r\n let mut k = m;\r\n while k <= n - 1 {\r\n let notlast = if k != n - 1 { true } else { false };\r\n if k != m {\r\n p = h[[k as usize, k as usize - 1]];\r\n q = h[[k as usize + 1, k as usize - 1]];\r\n r = if notlast { h[[k as usize + 2, k as usize - 1]] } else { 0.0 };\r\n x = (p).abs() + (q).abs() + (r).abs();\r\n if x == 0.0 {\r\n k=k+1;\r\n continue;\r\n }\r\n p = p \/ x;\r\n q = q \/ x;\r\n r = r \/ x;\r\n }\r\n s = (p * p + q * q + r * r).sqrt();\r\n if p < 0. {\r\n s = -s;\r\n }\r\n if s != 0. {\r\n if k != m {\r\n h[[k as usize, k as usize - 1]] = -s * x;\r\n } else if l != m {\r\n h[[k as usize, k as usize - 1]] = -h[[k as usize, k as usize - 1]];\r\n }\r\n p = p + s;\r\n x = p \/ s;\r\n y = q \/ s;\r\n z = r \/ s;\r\n q = q \/ p;\r\n r = r \/ p;\r\n \/\/ Row modification\r\n let mut j = k;\r\n while j < nn as i16 {\r\n p = h[[k as usize, j as usize]] + q * h[[k as usize + 1, j as usize]];\r\n if notlast {\r\n p = p + r * h[[k as usize + 2, j as usize]];\r\n h[[k as usize + 2, j as usize]] = h[[k as usize + 2, j as usize]] - p * z;\r\n }\r\n h[[k as usize, j as usize]] = h[[k as usize, j as usize]] - p * x;\r\n h[[k as usize + 1, j as usize]] = h[[k as usize + 1, j as usize]] - p * y;\r\n j = j + 1;\r\n }\r\n \/\/ Column modification\r\n let mut i = 0;\r\n while i <= cmp::min(n as usize, k as usize + 3) {\r\n p = x * h[[i, k as usize]] + y * h[[i as usize, k as usize + 1]];\r\n if notlast {\r\n p = p + z * h[[i, k as usize + 2]];\r\n h[[i, k as usize + 2]] = h[[i, k as usize + 2]] - p * r;\r\n }\r\n h[[i, k as usize]] = h[[i, k as usize]] - p;\r\n h[[i, k as usize + 1]] = h[[i, k as usize + 1]] - p * q;\r\n i = i + 1;\r\n }\r\n \/\/ Accumulate transformations\r\n let mut i = low;\r\n while i <= high {\r\n p = x * v[[i, k as usize]] + y * v[[i, k as usize + 1]];\r\n if notlast {\r\n p = p + z * v[[i as usize, k as usize + 2]];\r\n v[[i as usize, k as usize + 2]] = v[[i as usize, k as usize + 2]] - p * r;\r\n }\r\n v[[i, k as usize]] = v[[i, k as usize]] - p;\r\n v[[i, k as usize + 1]] = v[[i, k as usize + 1]] - p * q;\r\n i = i + 1;\r\n }\r\n } \/\/ (s != 0)\r\n k = k + 1;\r\n } \/\/ k loop\r\n } \/\/ check convergence\r\n } \/\/ while n >= low\r\n \/\/ Backsubstitute to find vectors of upper triangular form\r\n if norm == 0.0 {\r\n return;\r\n }\r\n n = nn as i16 - 1;\r\n while n >= 0 {\r\n p = d[n as usize];\r\n q = e[n as usize];\r\n \/\/ Real vector\r\n if q == 0. {\r\n let mut l = n;\r\n h[[n as usize, n as usize]] = 1.0;\r\n let mut i = n as i16 - 1;\r\n while i >= 0 {\r\n w = h[[i as usize, i as usize]] - p;\r\n r = 0.0;\r\n let mut j = l;\r\n while j <= n {\r\n r = r + h[[i as usize, j as usize]] * h[[j as usize, n as usize]];\r\n j = j + 1;\r\n }\r\n if e[i as usize] < 0.0 {\r\n z = w;\r\n s = r;\r\n } else {\r\n l = i;\r\n if e[i as usize] == 0.0 {\r\n if w != 0.0 {\r\n h[[i as usize, n as usize]] = -r \/ w;\r\n } else {\r\n h[[i as usize, n as usize]] = -r \/ (eps * norm);\r\n }\r\n \/\/ Solve real equations\r\n } else {\r\n x = h[[i as usize, i as usize + 1]];\r\n y = h[[i as usize + 1, i as usize]];\r\n q = (d[i as usize] - p) * (d[i as usize] - p) + e[i as usize] * e[i as usize];\r\n t = (x * s - z * r) \/ q;\r\n h[[i as usize, n as usize]] = t;\r\n if (x).abs() > (z).abs() {\r\n h[[i as usize + 1, n as usize]] = (-r - w * t) \/ x;\r\n } else {\r\n h[[i as usize + 1, n as usize]] = (-s - y * t) \/ z;\r\n }\r\n }\r\n \/\/ Overflow control\r\n t = h[[i as usize, n as usize]];\r\n if (eps * t).abs() * t > 1. {\r\n let mut j = i;\r\n while j <= n as i16 {\r\n h[[j as usize, n as usize]] = h[[j as usize, n as usize]] \/ t;\r\n j = j + 1;\r\n }\r\n }\r\n }\r\n i = i - 1;\r\n }\r\n \/\/ Complex vector\r\n } else if q < 0. {\r\n let mut l = n - 1;\r\n \/\/ Last vector component imaginary so matrix is triangular\r\n if (h[[n as usize, n as usize - 1]]).abs() > (h[[n as usize - 1, n as usize]]).abs() {\r\n h[[n as usize - 1, n as usize - 1]] = q \/ h[[n as usize, n as usize - 1]];\r\n h[[n as usize - 1, n as usize]] = -(h[[n as usize, n as usize]] - p) \/ h[[n as usize, n as usize - 1]];\r\n } else {\r\n let (cdivr, cdivi) = cdiv(0.0, -h[[n as usize - 1, n as usize]], h[[n as usize - 1, n as usize - 1]] - p, q);\r\n h[[n as usize - 1, n as usize - 1]] = cdivr;\r\n h[[n as usize - 1, n as usize]] = cdivi;\r\n }\r\n h[[n as usize, n as usize - 1]] = 0.0;\r\n h[[n as usize, n as usize]] = 1.0;\r\n let mut i = n - 2;\r\n while i >= 0 {\r\n let mut ra = 0.;\r\n let mut sa = 0.;\r\n let mut vr;\r\n let vi;\r\n let mut j = l;\r\n while j <= n {\r\n ra = ra + h[[i as usize, j as usize]] * h[[j as usize, n as usize - 1]];\r\n sa = sa + h[[i as usize, j as usize]] * h[[j as usize, n as usize]];\r\n j = j + 1;\r\n }\r\n w = h[[i as usize, i as usize]] - p;\r\n if e[i as usize] < 0.0 {\r\n z = w;\r\n r = ra;\r\n s = sa;\r\n } else {\r\n l = i;\r\n if e[i as usize] == 0. {\r\n let (cdivr, cdivi) = cdiv(-ra, -sa, w, q);\r\n h[[i as usize, n as usize - 1]] = cdivr;\r\n h[[i as usize, n as usize]] = cdivi;\r\n } else {\r\n \/\/ Solve complex equations\r\n x = h[[i as usize, i as usize + 1]];\r\n y = h[[i as usize + 1, i as usize]];\r\n vr = (d[i as usize] - p) * (d[i as usize] - p) + e[i as usize] * e[i as usize] - q * q;\r\n vi = (d[i as usize] - p) * 2.0 * q;\r\n if vr == 0.0 && vi == 0.0 {\r\n vr = eps * norm * ((w).abs() + (q).abs() +\r\n (x).abs() + (y).abs() + (z)).abs();\r\n }\r\n let (cdivr, cdivi) = cdiv(x * r - z * ra + q * sa, x * s - z * sa - q * ra, vr, vi);\r\n h[[i as usize, n as usize - 1]] = cdivr;\r\n h[[i as usize, n as usize]] = cdivi;\r\n if (x).abs() > ((z).abs() + (q).abs()) {\r\n h[[i as usize + 1, n as usize - 1]] = (-ra - w * h[[i as usize, n as usize - 1]] + q * h[[i as usize, n as usize]]) \/ x;\r\n h[[i as usize + 1, n as usize]] = (-sa - w * h[[i as usize, n as usize]] - q * h[[i as usize, n as usize - 1]]) \/ x;\r\n } else {\r\n let (cdivr, cdivi) = cdiv(-r - y * h[[i as usize, n as usize - 1]], -s - y * h[[i as usize, n as usize]], z, q);\r\n h[[i as usize + 1, n as usize - 1]] = cdivr;\r\n h[[i as usize + 1, n as usize]] = cdivi;\r\n }\r\n }\r\n \/\/ Overflow control\r\n t = (h[[i as usize, n as usize - 1]]).abs().max(h[[i as usize, n as usize]].abs());\r\n if (eps * t) * t > 1. {\r\n let mut j = i;\r\n while j <= n {\r\n j = j + 1;\r\n h[[j as usize, n as usize - 1]] = h[[j as usize, n as usize - 1]] \/ t;\r\n h[[j as usize, n as usize]] = h[[j as usize, n as usize]] \/ t;\r\n }\r\n }\r\n }\r\n i = i - 1;\r\n }\r\n }\r\n n = n - 1;\r\n }\r\n \/\/ Vectors of isolated roots\r\n let mut i = 0;\r\n while i < nn {\r\n if i < low || i > high {\r\n let mut j = i;\r\n while j < nn {\r\n v[[i, j]] = h[[i, j]];\r\n j = j + 1;\r\n }\r\n }\r\n i = i + 1;\r\n }\r\n \/\/ Back transformation to get eigenvectors of original matrix\r\n let mut j = nn as i16 - 1;\r\n while j >= low as i16 {\r\n let mut i = low;\r\n while i <= high {\r\n z = 0.0;\r\n let mut k = low;\r\n while k <= cmp::min(j as usize, high) {\r\n z = z + v[[i, k]] * h[[k, j as usize]];\r\n k = k + 1;\r\n }\r\n v[[i, j as usize]] = z;\r\n i = i + 1;\r\n }\r\n j = j - 1;\r\n }\r\n}\r\n\r\n\/\/ This is derived from the Algol procedures orthes and ortran,\r\n\/\/ by Martin and Wilkinson, Handbook for Auto. Comp.,\r\n\/\/ Vol.ii-Linear Algebra, and the corresponding\r\n\/\/ Fortran subroutines in EISPACK.\r\n#[allow(dead_code)]\r\npub fn orthes(m: &mut Matrix, h_mat: &mut Matrix, v_mat: &mut Matrix) {\r\n let low = 0;\r\n let n = m.n;\r\n let high = n - 1;\r\n let mut m = low + 1;\r\n let mut ort = vec!(0.; n);\r\n while m < high - 1 {\r\n \/\/ Scale column.\r\n let mut scale = 0.0;\r\n let mut i = m;\r\n \/\/for (int i = m; i < = high; i + +)\r\n while i <= high {\r\n scale = scale + (h_mat[[i, m - 1]]).abs();\r\n i = i + 1;\r\n }\r\n if scale != 0.0 {\r\n \/\/ Compute Householder transformation.\r\n let mut h = 0.0;\r\n let mut i = high;\r\n while i >= m {\r\n ort[i] = h_mat[[i, m - 1]] \/ scale;\r\n h += ort[i] * ort[i];\r\n i = i - 1;\r\n }\r\n let mut g = h.sqrt();\r\n if ort[m] > 0. {\r\n g = -g;\r\n }\r\n h = h - ort[m] * g;\r\n ort[m] = ort[m] - g;\r\n \/\/ Apply Householder similarity transformation\r\n \/\/ H = (I-u*u'\/h)*H*(I-u*u')\/h)\r\n let mut j = m;\r\n while j < n {\r\n let mut f = 0.0;\r\n let mut i = high;\r\n while i >= m {\r\n f += ort[i] * h_mat[[i, j]];\r\n i = i - 1;\r\n }\r\n f = f \/ h;\r\n let mut i = m;\r\n while\r\n i <= high {\r\n h_mat[[i, j]] -= f * ort[i];\r\n i = i + 1;\r\n }\r\n j = j + 1;\r\n }\r\n let mut i = 0;\r\n while i <= high {\r\n let mut f = 0.0;\r\n let mut j = high;\r\n while j >= m {\r\n f += ort[j] * h_mat[[i, j]];\r\n j = j - 1;\r\n }\r\n f = f \/ h;\r\n let mut j = m;\r\n while j <= high {\r\n h_mat[[i, j]] -= f * ort[j];\r\n j = j + 1;\r\n }\r\n i = i + 1;\r\n }\r\n ort[m] = scale * ort[m];\r\n h_mat[[m, m - 1]] = scale * g;\r\n }\r\n m = m + 1;\r\n }\r\n \/\/ Accumulate transformations (Algol's ortran).\r\n for i in 0..n {\r\n for j in 0..n {\r\n v_mat[[i, j]] = if i == j { 1.0 } else { 0.0 };\r\n }\r\n }\r\n let mut m = high - 1;\r\n while m >= low + 1 {\r\n if h_mat[[m, m - 1]] != 0.0 {\r\n let mut i = m + 1;\r\n while i <= high {\r\n ort[i] = h_mat[[i, m - 1]];\r\n i = i + 1;\r\n }\r\n let mut j = m;\r\n while j <= high {\r\n let mut g = 0.0;\r\n let mut i = m;\r\n while i <= high {\r\n g += ort[i] * v_mat[[i, j]];\r\n i = i + 1;\r\n }\r\n \/\/ Double division avoids possible underflow\r\n g = (g \/ ort[m]) \/ h_mat[[m, m - 1]];\r\n let mut i = m;\r\n while i <= high {\r\n v_mat[[i, j]] += g * ort[i];\r\n i = i + 1;\r\n }\r\n j = j + 1;\r\n }\r\n }\r\n m = m - 1;\r\n }\r\n}\r\n\r\n#[allow(dead_code)]\r\nfn calc_eigen(m: &mut Matrix) -> Vec<(f64, f64)>{\r\n let n = m.n;\r\n let mut h_mat = Matrix::new(n);\r\n let mut v_mat = Matrix::new(n);\r\n \/\/let mut ort = vec!(0.;n);\r\n let mut d = vec!(0.; n);\r\n let mut e = vec!(0.; n);\r\n for i in 0..n {\r\n for j in 0..n {\r\n h_mat[[i, j]] = m[[i, j]];\r\n }\r\n }\r\n orthes(m, &mut h_mat, &mut v_mat);\r\n hqr2(n, &mut h_mat, &mut v_mat, &mut d, &mut e);\r\n let mut r= vec!((0.,0.); n);\r\n for i in 0..n {\r\n r[i]=(d[i],e[i])\r\n }\r\n r\r\n}\r\n\r\n#[allow(dead_code)]\r\n\/\/ 6 x x x + 9 x x + 3 x + 1 == 0 is vec![1.,3.,9.,6.] \r\npub fn solve_poly(c: Vec<f64>) -> VecDeque<f64> {\r\n let n = c.len();\r\n let mut m = Matrix::new(n);\r\n for i in 0..(n-1) {\r\n m[[i+1,i]]=1.;\r\n }\r\n for i in 0..(n) {\r\n m[[i,n-1]]=-c[i];\r\n }\r\n \/\/println!(\"{:?}\",m);\r\n let ei = calc_eigen(&mut m);\r\n let mut r = VecDeque::new();\r\n for c in ei {\r\n if c.1*c.1==0. {\r\n r.push_back(c.0);\r\n }\r\n }\r\n r\r\n}\r\n\r\n\/*fn main() {\r\n let c =vec![1.,3.,9.,6.];\r\n let r =solve_poly(c);\r\n println!(\"{:?}\",r);\r\n \/\/ let m = Matrix::new(10);\r\n \/\/ *m.get(5, 5) = 7.;\r\n}\r\n*\/<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #99<commit_after>use common::problem::{ Problem };\n\npub static problem: Problem<'static> = Problem {\n id: 99,\n answer: \"709\",\n solver: solve\n};\n\nfn solve() -> ~str {\n let result = io::file_reader(&Path(\"files\/base_exp.txt\"))\n .map(|input| {\n let mut line_idx = 1u;\n let mut max = 0f;\n let mut max_idx = 1;\n for input.each_line |line| {\n for str::find_char(line, ',').each |&idx| {\n let base = float::from_str(line.slice(0, idx)).get();\n let exp = float::from_str(line.slice(idx + 1, line.len())).get();\n let ln = exp * base.ln();\n if ln > max {\n max = ln;\n max_idx = line_idx;\n }\n line_idx += 1;\n }\n }\n max_idx\n });\n\n match result {\n Err(msg) => fail!(msg),\n Ok(value) => return value.to_str()\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#[macro_use]\nextern crate serde_json;\n\nextern crate xi_rpc;\nextern crate xi_core_lib;\n\nuse std::io;\n\nuse xi_rpc::{RpcLoop, ReadError};\nuse xi_rpc::test_utils::{make_reader, test_channel};\nuse xi_core_lib::MainState;\n\n#[test]\n\/\/\/ Tests that the handler responds to a standard startup sequence as expected.\nfn test_startup() {\n let mut state = MainState::new();\n let (tx, mut rx) = test_channel();\n let mut rpc_looper = RpcLoop::new(tx);\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n assert_eq!(rx.expect_object().get_method(), Some(\"available_themes\"));\n assert_eq!(rx.expect_object().get_method(), Some(\"theme_changed\"));\n\n let json = make_reader(r#\"{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n assert_eq!(rx.expect_response(), Ok(json!(\"view-id-1\")));\n}\n\n\n#[test]\n\/\/\/ Tests that the handler creates and destroys views and buffers\nfn test_state() {\n let mut state = MainState::new();\n let buffers = state._get_buffers();\n\n let write = io::sink();\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n let mut rpc_looper = RpcLoop::new(write);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n\n {\n let buffers = buffers.lock();\n assert_eq!(buffers.iter_editors().count(), 1);\n }\n assert!(buffers.buffer_for_view(&\"view-id-1\".into()).is_some());\n\n let json = make_reader(\n r#\"{\"method\":\"close_view\",\"params\":{\"view_id\":\"view-id-1\"}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n {\n let buffers = buffers.lock();\n assert_eq!(buffers.iter_editors().count(), 0);\n }\n\n let json = make_reader(r#\"{\"id\":1,\"method\":\"new_view\",\"params\":{}}\n{\"id\":2,\"method\":\"new_view\",\"params\":{}}\n{\"id\":3,\"method\":\"new_view\",\"params\":{}}\"#);\n\n\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n {\n let buffers = buffers.lock();\n assert_eq!(buffers.iter_editors().count(), 3);\n }\n}\n\n#[test]\n\/\/\/ Tests that the runloop exits with the correct error when receiving\n\/\/\/ malformed json.\nfn test_malformed_json() {\n let mut state = MainState::new();\n let write = io::sink();\n let mut rpc_looper = RpcLoop::new(write);\n \/\/ malformed json, no id: should not receive a response, and connection should close.\n let read = make_reader(r#\"{method:\"client_started\",\"params\":{}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n match rpc_looper.mainloop(|| read, &mut state).err()\n .expect(\"malformed json exits with error\") {\n ReadError::Json(_) => (), \/\/ expected\n err => panic!(\"Unexpected error: {:?}\", err),\n }\n \/\/ read should have ended after first item\n {\n let buffers = state._get_buffers();\n let buffers = buffers.lock();\n assert_eq!(buffers.iter_editors().count(), 0);\n }\n}\n\n#[test]\n\/\/\/ Sends all of the cursor movement-related commands, and verifies that\n\/\/\/ they are handled.\n\/\/\/\n\/\/\/\n\/\/\/ Note: this is a test of message parsing, not of editor behaviour.\nfn test_movement_cmds() {\n let mut state = MainState::new();\n let write = io::sink();\n let mut rpc_looper = RpcLoop::new(write);\n \/\/ init a new view\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n \n let json = make_reader(MOVEMENT_RPCS);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n}\n\n#[test]\n\/\/\/ Sends all the commands which modify the buffer, and verifies that they\n\/\/\/ are handled.\nfn test_text_commands() {\n let mut state = MainState::new();\n let write = io::sink();\n let mut rpc_looper = RpcLoop::new(write);\n \/\/ init a new view\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n \n let json = make_reader(TEXT_EDIT_RPCS);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n}\n\n#[test]\nfn test_other_edit_commands() {\n let mut state = MainState::new();\n let write = io::sink();\n let mut rpc_looper = RpcLoop::new(write);\n \/\/ init a new view\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n \n let json = make_reader(OTHER_EDIT_RPCS);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n}\n\n\/\/TODO: test saving rpc\n\/\/TODO: test plugin rpc\n\nconst MOVEMENT_RPCS: &str = r#\"{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_up\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_down\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_up_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_down_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_left\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_backward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_right\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_forward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_left_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_right_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_word_left\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_word_right\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_word_left_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_word_right_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_beginning_of_paragraph\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_end_of_paragraph\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_left_end_of_line\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_left_end_of_line_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_right_end_of_line\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_right_end_of_line_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_beginning_of_document\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_beginning_of_document_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_end_of_document\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_end_of_document_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"scroll_page_up\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"scroll_page_down\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"page_up_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"page_down_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"select_all\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"add_selection_above\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"add_selection_below\",\"params\":[]}}\"#;\n\nconst TEXT_EDIT_RPCS: &str = r#\"{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"insert\",\"params\":{\"chars\":\"a\"}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_backward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_forward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_word_forward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_word_backward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_to_end_of_paragraph\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"insert_newline\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"insert_tab\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"yank\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"undo\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"redo\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"transpose\",\"params\":[]}}\n{\"id\":2,\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"cut\",\"params\":[]}}\"#;\n\nconst OTHER_EDIT_RPCS: &str = r#\"{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"scroll\",\"params\":[0,1]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"goto_line\",\"params\":{\"line\":1}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"request_lines\",\"params\":[0,1]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"click\",\"params\":[6,0,0,1]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"drag\",\"params\":[17,15,0]}}\n{\"id\":4,\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"find\",\"params\":{\"case_sensitive\":false,\"chars\":\"m\"}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"find_next\",\"params\":{\"wrap_around\":true}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"find_previous\",\"params\":{\"wrap_around\":true}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"debug_rewrap\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"debug_print_spans\",\"params\":[]}}\n{\"id\":3,\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"copy\",\"params\":[]}}\"#;\n<commit_msg>Add test case for gesture edit rpc<commit_after>\/\/ Copyright 2017 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#[macro_use]\nextern crate serde_json;\n\nextern crate xi_rpc;\nextern crate xi_core_lib;\n\nuse std::io;\n\nuse xi_rpc::{RpcLoop, ReadError};\nuse xi_rpc::test_utils::{make_reader, test_channel};\nuse xi_core_lib::MainState;\n\n#[test]\n\/\/\/ Tests that the handler responds to a standard startup sequence as expected.\nfn test_startup() {\n let mut state = MainState::new();\n let (tx, mut rx) = test_channel();\n let mut rpc_looper = RpcLoop::new(tx);\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n assert_eq!(rx.expect_object().get_method(), Some(\"available_themes\"));\n assert_eq!(rx.expect_object().get_method(), Some(\"theme_changed\"));\n\n let json = make_reader(r#\"{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n assert_eq!(rx.expect_response(), Ok(json!(\"view-id-1\")));\n}\n\n\n#[test]\n\/\/\/ Tests that the handler creates and destroys views and buffers\nfn test_state() {\n let mut state = MainState::new();\n let buffers = state._get_buffers();\n\n let write = io::sink();\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n let mut rpc_looper = RpcLoop::new(write);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n\n {\n let buffers = buffers.lock();\n assert_eq!(buffers.iter_editors().count(), 1);\n }\n assert!(buffers.buffer_for_view(&\"view-id-1\".into()).is_some());\n\n let json = make_reader(\n r#\"{\"method\":\"close_view\",\"params\":{\"view_id\":\"view-id-1\"}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n {\n let buffers = buffers.lock();\n assert_eq!(buffers.iter_editors().count(), 0);\n }\n\n let json = make_reader(r#\"{\"id\":1,\"method\":\"new_view\",\"params\":{}}\n{\"id\":2,\"method\":\"new_view\",\"params\":{}}\n{\"id\":3,\"method\":\"new_view\",\"params\":{}}\"#);\n\n\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n {\n let buffers = buffers.lock();\n assert_eq!(buffers.iter_editors().count(), 3);\n }\n}\n\n#[test]\n\/\/\/ Tests that the runloop exits with the correct error when receiving\n\/\/\/ malformed json.\nfn test_malformed_json() {\n let mut state = MainState::new();\n let write = io::sink();\n let mut rpc_looper = RpcLoop::new(write);\n \/\/ malformed json, no id: should not receive a response, and connection should close.\n let read = make_reader(r#\"{method:\"client_started\",\"params\":{}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n match rpc_looper.mainloop(|| read, &mut state).err()\n .expect(\"malformed json exits with error\") {\n ReadError::Json(_) => (), \/\/ expected\n err => panic!(\"Unexpected error: {:?}\", err),\n }\n \/\/ read should have ended after first item\n {\n let buffers = state._get_buffers();\n let buffers = buffers.lock();\n assert_eq!(buffers.iter_editors().count(), 0);\n }\n}\n\n#[test]\n\/\/\/ Sends all of the cursor movement-related commands, and verifies that\n\/\/\/ they are handled.\n\/\/\/\n\/\/\/\n\/\/\/ Note: this is a test of message parsing, not of editor behaviour.\nfn test_movement_cmds() {\n let mut state = MainState::new();\n let write = io::sink();\n let mut rpc_looper = RpcLoop::new(write);\n \/\/ init a new view\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n \n let json = make_reader(MOVEMENT_RPCS);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n}\n\n#[test]\n\/\/\/ Sends all the commands which modify the buffer, and verifies that they\n\/\/\/ are handled.\nfn test_text_commands() {\n let mut state = MainState::new();\n let write = io::sink();\n let mut rpc_looper = RpcLoop::new(write);\n \/\/ init a new view\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n \n let json = make_reader(TEXT_EDIT_RPCS);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n}\n\n#[test]\nfn test_other_edit_commands() {\n let mut state = MainState::new();\n let write = io::sink();\n let mut rpc_looper = RpcLoop::new(write);\n \/\/ init a new view\n let json = make_reader(r#\"{\"method\":\"client_started\",\"params\":{}}\n{\"method\":\"set_theme\",\"params\":{\"theme_name\":\"InspiredGitHub\"}}\n{\"id\":0,\"method\":\"new_view\",\"params\":{}}\"#);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n \n let json = make_reader(OTHER_EDIT_RPCS);\n assert!(rpc_looper.mainloop(|| json, &mut state).is_ok());\n}\n\n\/\/TODO: test saving rpc\n\/\/TODO: test plugin rpc\n\nconst MOVEMENT_RPCS: &str = r#\"{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_up\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_down\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_up_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_down_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_left\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_backward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_right\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_forward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_left_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_right_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_word_left\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_word_right\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_word_left_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_word_right_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_beginning_of_paragraph\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_end_of_paragraph\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_left_end_of_line\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_left_end_of_line_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_right_end_of_line\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_right_end_of_line_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_beginning_of_document\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_beginning_of_document_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_end_of_document\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"move_to_end_of_document_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"scroll_page_up\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"scroll_page_down\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"page_up_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"page_down_and_modify_selection\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"select_all\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"add_selection_above\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"add_selection_below\",\"params\":[]}}\"#;\n\nconst TEXT_EDIT_RPCS: &str = r#\"{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"insert\",\"params\":{\"chars\":\"a\"}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_backward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_forward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_word_forward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_word_backward\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"delete_to_end_of_paragraph\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"insert_newline\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"insert_tab\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"yank\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"undo\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"redo\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"transpose\",\"params\":[]}}\n{\"id\":2,\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"cut\",\"params\":[]}}\"#;\n\nconst OTHER_EDIT_RPCS: &str = r#\"{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"scroll\",\"params\":[0,1]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"goto_line\",\"params\":{\"line\":1}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"request_lines\",\"params\":[0,1]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"click\",\"params\":[6,0,0,1]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"drag\",\"params\":[17,15,0]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"gesture\",\"params\":{\"line\": 1, \"col\": 2, \"ty\": \"toggle_sel\"}}}\n{\"id\":4,\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"find\",\"params\":{\"case_sensitive\":false,\"chars\":\"m\"}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"find_next\",\"params\":{\"wrap_around\":true}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"find_previous\",\"params\":{\"wrap_around\":true}}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"debug_rewrap\",\"params\":[]}}\n{\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"debug_print_spans\",\"params\":[]}}\n{\"id\":3,\"method\":\"edit\",\"params\":{\"view_id\":\"view-id-1\",\"method\":\"copy\",\"params\":[]}}\"#;\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(issue = \"0\", feature = \"windows_stdio\")]\n\nuse prelude::v1::*;\nuse io::prelude::*;\n\nuse io::{self, Cursor};\nuse ptr;\nuse str;\nuse sync::Mutex;\nuse sys::c;\nuse sys::cvt;\nuse sys::handle::Handle;\nuse sys_common::io::read_to_end_uninitialized;\n\npub struct NoClose(Option<Handle>);\n\npub enum Output {\n Console(NoClose),\n Pipe(NoClose),\n}\n\npub struct Stdin {\n handle: Output,\n utf8: Mutex<io::Cursor<Vec<u8>>>,\n}\npub struct Stdout(Output);\npub struct Stderr(Output);\n\npub fn get(handle: c::DWORD) -> io::Result<Output> {\n let handle = unsafe { c::GetStdHandle(handle) };\n if handle == c::INVALID_HANDLE_VALUE {\n Err(io::Error::last_os_error())\n } else if handle.is_null() {\n Err(io::Error::new(io::ErrorKind::Other,\n \"no stdio handle available for this process\"))\n } else {\n let ret = NoClose::new(handle);\n let mut out = 0;\n match unsafe { c::GetConsoleMode(handle, &mut out) } {\n 0 => Ok(Output::Pipe(ret)),\n _ => Ok(Output::Console(ret)),\n }\n }\n}\n\nfn write(out: &Output, data: &[u8]) -> io::Result<usize> {\n let handle = match *out {\n Output::Console(ref c) => c.get().raw(),\n Output::Pipe(ref p) => return p.get().write(data),\n };\n \/\/ As with stdin on windows, stdout often can't handle writes of large\n \/\/ sizes. For an example, see #14940. For this reason, don't try to\n \/\/ write the entire output buffer on windows.\n \/\/\n \/\/ For some other references, it appears that this problem has been\n \/\/ encountered by others [1] [2]. We choose the number 8K just because\n \/\/ libuv does the same.\n \/\/\n \/\/ [1]: https:\/\/tahoe-lafs.org\/trac\/tahoe-lafs\/ticket\/1232\n \/\/ [2]: http:\/\/www.mail-archive.com\/log4net-dev@logging.apache.org\/msg00661.html\n const OUT_MAX: usize = 8192;\n let (utf16, data_len) = match str::from_utf8(data).ok() {\n Some(mut utf8) => {\n if utf8.len() > OUT_MAX {\n let mut new_len = OUT_MAX;\n while !utf8.is_char_boundary(new_len) {\n new_len -= 1;\n }\n utf8 = &utf8[..new_len];\n }\n (utf8.encode_utf16().collect::<Vec<u16>>(), utf8.len())\n }\n None => return Err(invalid_encoding()),\n };\n let mut written = 0;\n try!(cvt(unsafe {\n c::WriteConsoleW(handle,\n utf16.as_ptr() as c::LPCVOID,\n utf16.len() as u32,\n &mut written,\n ptr::null_mut())\n }));\n\n \/\/ FIXME if this only partially writes the utf16 buffer then we need to\n \/\/ figure out how many bytes of `data` were actually written\n assert_eq!(written as usize, utf16.len());\n Ok(data_len)\n}\n\nimpl Stdin {\n pub fn new() -> io::Result<Stdin> {\n get(c::STD_INPUT_HANDLE).map(|handle| {\n Stdin {\n handle: handle,\n utf8: Mutex::new(Cursor::new(Vec::new())),\n }\n })\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n let handle = match self.handle {\n Output::Console(ref c) => c.get().raw(),\n Output::Pipe(ref p) => return p.get().read(buf),\n };\n let mut utf8 = self.utf8.lock().unwrap();\n \/\/ Read more if the buffer is empty\n if utf8.position() as usize == utf8.get_ref().len() {\n let mut utf16 = vec![0u16; 0x1000];\n let mut num = 0;\n try!(cvt(unsafe {\n c::ReadConsoleW(handle,\n utf16.as_mut_ptr() as c::LPVOID,\n utf16.len() as u32,\n &mut num,\n ptr::null_mut())\n }));\n utf16.truncate(num as usize);\n \/\/ FIXME: what to do about this data that has already been read?\n let data = match String::from_utf16(&utf16) {\n Ok(utf8) => utf8.into_bytes(),\n Err(..) => return Err(invalid_encoding()),\n };\n *utf8 = Cursor::new(data);\n }\n\n \/\/ MemReader shouldn't error here since we just filled it\n utf8.read(buf)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec<u8>) -> io::Result<usize> {\n let mut me = self;\n (&mut me).read_to_end(buf)\n }\n}\n\n#[unstable(reason = \"not public\", issue = \"0\", feature = \"fd_read\")]\nimpl<'a> Read for &'a Stdin {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n unsafe { read_to_end_uninitialized(self, buf) }\n }\n}\n\nimpl Stdout {\n pub fn new() -> io::Result<Stdout> {\n get(c::STD_OUTPUT_HANDLE).map(Stdout)\n }\n\n pub fn write(&self, data: &[u8]) -> io::Result<usize> {\n write(&self.0, data)\n }\n}\n\nimpl Stderr {\n pub fn new() -> io::Result<Stderr> {\n get(c::STD_ERROR_HANDLE).map(Stderr)\n }\n\n pub fn write(&self, data: &[u8]) -> io::Result<usize> {\n write(&self.0, data)\n }\n}\n\n\/\/ FIXME: right now this raw stderr handle is used in a few places because\n\/\/ std::io::stderr_raw isn't exposed, but once that's exposed this impl\n\/\/ should go away\nimpl io::Write for Stderr {\n fn write(&mut self, data: &[u8]) -> io::Result<usize> {\n Stderr::write(self, data)\n }\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\nimpl NoClose {\n fn new(handle: c::HANDLE) -> NoClose {\n NoClose(Some(Handle::new(handle)))\n }\n\n fn get(&self) -> &Handle { self.0.as_ref().unwrap() }\n}\n\nimpl Drop for NoClose {\n fn drop(&mut self) {\n self.0.take().unwrap().into_raw();\n }\n}\n\nimpl Output {\n pub fn handle(&self) -> &Handle {\n let nc = match *self {\n Output::Console(ref c) => c,\n Output::Pipe(ref c) => c,\n };\n nc.0.as_ref().unwrap()\n }\n}\n\nfn invalid_encoding() -> io::Error {\n io::Error::new(io::ErrorKind::InvalidData, \"text was not valid unicode\")\n}\n<commit_msg>Further simplify Windows stdout\/stderr<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(issue = \"0\", feature = \"windows_stdio\")]\n\nuse prelude::v1::*;\nuse io::prelude::*;\n\nuse cmp;\nuse io::{self, Cursor};\nuse ptr;\nuse str;\nuse sync::Mutex;\nuse sys::c;\nuse sys::cvt;\nuse sys::handle::Handle;\nuse sys_common::io::read_to_end_uninitialized;\n\npub struct NoClose(Option<Handle>);\n\npub enum Output {\n Console(NoClose),\n Pipe(NoClose),\n}\n\npub struct Stdin {\n handle: Output,\n utf8: Mutex<io::Cursor<Vec<u8>>>,\n}\npub struct Stdout(Output);\npub struct Stderr(Output);\n\npub fn get(handle: c::DWORD) -> io::Result<Output> {\n let handle = unsafe { c::GetStdHandle(handle) };\n if handle == c::INVALID_HANDLE_VALUE {\n Err(io::Error::last_os_error())\n } else if handle.is_null() {\n Err(io::Error::new(io::ErrorKind::Other,\n \"no stdio handle available for this process\"))\n } else {\n let ret = NoClose::new(handle);\n let mut out = 0;\n match unsafe { c::GetConsoleMode(handle, &mut out) } {\n 0 => Ok(Output::Pipe(ret)),\n _ => Ok(Output::Console(ret)),\n }\n }\n}\n\nfn write(out: &Output, data: &[u8]) -> io::Result<usize> {\n let handle = match *out {\n Output::Console(ref c) => c.get().raw(),\n Output::Pipe(ref p) => return p.get().write(data),\n };\n \/\/ As with stdin on windows, stdout often can't handle writes of large\n \/\/ sizes. For an example, see #14940. For this reason, don't try to\n \/\/ write the entire output buffer on windows.\n \/\/\n \/\/ For some other references, it appears that this problem has been\n \/\/ encountered by others [1] [2]. We choose the number 8K just because\n \/\/ libuv does the same.\n \/\/\n \/\/ [1]: https:\/\/tahoe-lafs.org\/trac\/tahoe-lafs\/ticket\/1232\n \/\/ [2]: http:\/\/www.mail-archive.com\/log4net-dev@logging.apache.org\/msg00661.html\n const OUT_MAX: usize = 8192;\n let len = cmp::min(data.len(), OUT_MAX);\n let utf8 = match str::from_utf8(&data[..len]) {\n Ok(s) => s,\n Err(ref e) if e.valid_up_to() == 0 => return Err(invalid_encoding()),\n Err(e) => str::from_utf8(&data[..e.valid_up_to()]).unwrap(),\n };\n let utf16 = utf8.encode_utf16().collect::<Vec<u16>>();\n let mut written = 0;\n try!(cvt(unsafe {\n c::WriteConsoleW(handle,\n utf16.as_ptr() as c::LPCVOID,\n utf16.len() as u32,\n &mut written,\n ptr::null_mut())\n }));\n\n \/\/ FIXME if this only partially writes the utf16 buffer then we need to\n \/\/ figure out how many bytes of `data` were actually written\n assert_eq!(written as usize, utf16.len());\n Ok(utf8.len())\n}\n\nimpl Stdin {\n pub fn new() -> io::Result<Stdin> {\n get(c::STD_INPUT_HANDLE).map(|handle| {\n Stdin {\n handle: handle,\n utf8: Mutex::new(Cursor::new(Vec::new())),\n }\n })\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n let handle = match self.handle {\n Output::Console(ref c) => c.get().raw(),\n Output::Pipe(ref p) => return p.get().read(buf),\n };\n let mut utf8 = self.utf8.lock().unwrap();\n \/\/ Read more if the buffer is empty\n if utf8.position() as usize == utf8.get_ref().len() {\n let mut utf16 = vec![0u16; 0x1000];\n let mut num = 0;\n try!(cvt(unsafe {\n c::ReadConsoleW(handle,\n utf16.as_mut_ptr() as c::LPVOID,\n utf16.len() as u32,\n &mut num,\n ptr::null_mut())\n }));\n utf16.truncate(num as usize);\n \/\/ FIXME: what to do about this data that has already been read?\n let data = match String::from_utf16(&utf16) {\n Ok(utf8) => utf8.into_bytes(),\n Err(..) => return Err(invalid_encoding()),\n };\n *utf8 = Cursor::new(data);\n }\n\n \/\/ MemReader shouldn't error here since we just filled it\n utf8.read(buf)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec<u8>) -> io::Result<usize> {\n let mut me = self;\n (&mut me).read_to_end(buf)\n }\n}\n\n#[unstable(reason = \"not public\", issue = \"0\", feature = \"fd_read\")]\nimpl<'a> Read for &'a Stdin {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n unsafe { read_to_end_uninitialized(self, buf) }\n }\n}\n\nimpl Stdout {\n pub fn new() -> io::Result<Stdout> {\n get(c::STD_OUTPUT_HANDLE).map(Stdout)\n }\n\n pub fn write(&self, data: &[u8]) -> io::Result<usize> {\n write(&self.0, data)\n }\n}\n\nimpl Stderr {\n pub fn new() -> io::Result<Stderr> {\n get(c::STD_ERROR_HANDLE).map(Stderr)\n }\n\n pub fn write(&self, data: &[u8]) -> io::Result<usize> {\n write(&self.0, data)\n }\n}\n\n\/\/ FIXME: right now this raw stderr handle is used in a few places because\n\/\/ std::io::stderr_raw isn't exposed, but once that's exposed this impl\n\/\/ should go away\nimpl io::Write for Stderr {\n fn write(&mut self, data: &[u8]) -> io::Result<usize> {\n Stderr::write(self, data)\n }\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\nimpl NoClose {\n fn new(handle: c::HANDLE) -> NoClose {\n NoClose(Some(Handle::new(handle)))\n }\n\n fn get(&self) -> &Handle { self.0.as_ref().unwrap() }\n}\n\nimpl Drop for NoClose {\n fn drop(&mut self) {\n self.0.take().unwrap().into_raw();\n }\n}\n\nimpl Output {\n pub fn handle(&self) -> &Handle {\n let nc = match *self {\n Output::Console(ref c) => c,\n Output::Pipe(ref c) => c,\n };\n nc.0.as_ref().unwrap()\n }\n}\n\nfn invalid_encoding() -> io::Error {\n io::Error::new(io::ErrorKind::InvalidData, \"text was not valid unicode\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>assert error removed<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! A syntax highlighting plugin based on syntect.\n\nextern crate syntect;\n#[macro_use]\nextern crate xi_plugin_lib;\n\nmod stackmap;\n\nuse xi_plugin_lib::state_cache::{self, PluginCtx};\nuse xi_plugin_lib::plugin_base::ScopeSpan;\nuse syntect::parsing::{ParseState, ScopeStack, SyntaxSet, SCOPE_REPO};\nuse stackmap::{StackMap, LookupResult};\n\n\/\/\/ The state for syntax highlighting of one file.\nstruct PluginState<'a> {\n syntax_set: &'a SyntaxSet,\n stack_idents: StackMap,\n offset: usize,\n initial_state: Option<(ParseState, ScopeStack)>,\n spans_start: usize,\n \/\/ unflushed spans\n spans: Vec<ScopeSpan>,\n new_scopes: Vec<Vec<String>>,\n syntax_name: String,\n}\n\nconst LINES_PER_RPC: usize = 50;\n\n\/\/\/ The syntax highlighting state corresponding to the beginning of a line\n\/\/\/ (as stored in the state cache).\n\/\/ Note: this needs to be option because the caching layer relies on Default.\n\/\/ We can't implement that because the actual initial state depends on the\n\/\/ syntax. There are other ways to handle this, but this will do for now.\ntype State = Option<(ParseState, ScopeStack)>;\n\n\nimpl<'a> PluginState<'a> {\n pub fn new(syntax_set: &'a SyntaxSet) -> Self {\n PluginState {\n syntax_set: syntax_set,\n stack_idents: StackMap::default(),\n offset: 0,\n initial_state: None,\n spans_start: 0,\n spans: Vec::new(),\n new_scopes: Vec::new(),\n syntax_name: String::from(\"None\"),\n }\n }\n\n \/\/ compute syntax for one line, also accumulating the style spans\n fn compute_syntax(&mut self, line: &str, state: State) -> State {\n let (mut parse_state, mut scope_state) = state.or_else(|| self.initial_state.clone()).unwrap();\n let ops = parse_state.parse_line(&line);\n\n let mut prev_cursor = 0;\n let repo = SCOPE_REPO.lock().unwrap();\n for (cursor, batch) in ops {\n if scope_state.len() > 0 {\n let scope_ident = self.stack_idents.get_value(scope_state.as_slice());\n let scope_ident = match scope_ident {\n LookupResult::Existing(id) => id,\n LookupResult::New(id) => {\n let stack_strings = scope_state.as_slice().iter()\n .map(|slice| repo.to_string(*slice))\n .collect::<Vec<_>>();\n self.new_scopes.push(stack_strings);\n id\n }\n };\n\n let start = self.offset - self.spans_start + prev_cursor;\n let end = start + (cursor - prev_cursor);\n if start != end {\n let span = ScopeSpan::new(start, end, scope_ident);\n self.spans.push(span);\n }\n }\n prev_cursor = cursor;\n scope_state.apply(&batch);\n }\n Some((parse_state, scope_state))\n }\n\n #[allow(unused)]\n \/\/ Return true if there's any more work to be done.\n fn highlight_one_line(&mut self, ctx: &mut PluginCtx<State>) -> bool {\n if let Some(line_num) = ctx.get_frontier() {\n \/\/print_err!(\"highlighting {}\", line_num);\n let (line_num, offset, state) = ctx.get_prev(line_num);\n if offset != self.offset {\n self.flush_spans(ctx);\n self.offset = offset;\n self.spans_start = offset;\n }\n let new_frontier = match ctx.get_line(line_num) {\n Ok(\"\") => None,\n Ok(s) => {\n let new_state = self.compute_syntax(s, state);\n self.offset += s.len();\n if s.as_bytes().last() == Some(&b'\\n') {\n Some((new_state, line_num + 1))\n } else {\n None\n }\n }\n Err(_) => None,\n };\n let mut converged = false;\n if let Some((ref new_state, new_line_num)) = new_frontier {\n if let Some(old_state) = ctx.get(new_line_num) {\n converged = old_state.as_ref().unwrap().0 == new_state.as_ref().unwrap().0;\n }\n }\n if !converged {\n if let Some((new_state, new_line_num)) = new_frontier {\n ctx.set(new_line_num, new_state);\n ctx.update_frontier(new_line_num);\n return true;\n }\n }\n ctx.close_frontier();\n }\n false\n }\n\n fn flush_spans(&mut self, ctx: &mut PluginCtx<State>) {\n if !self.new_scopes.is_empty() {\n ctx.add_scopes(&self.new_scopes);\n self.new_scopes.clear();\n }\n if self.spans_start != self.offset {\n ctx.update_spans(self.spans_start, self.offset - self.spans_start,\n &self.spans);\n self.spans.clear();\n }\n self.spans_start = self.offset;\n }\n\n fn do_highlighting(&mut self, mut ctx: PluginCtx<State>) {\n let syntax = match ctx.get_path() {\n Some(ref path) => self.syntax_set.find_syntax_for_file(path).unwrap()\n .unwrap_or_else(|| self.syntax_set.find_syntax_plain_text()),\n None => self.syntax_set.find_syntax_plain_text(),\n };\n\n if syntax.name != self.syntax_name {\n self.syntax_name = syntax.name.clone();\n print_err!(\"syntect using {}\", syntax.name);\n }\n\n self.initial_state = Some((ParseState::new(syntax), ScopeStack::new()));\n self.spans = Vec::new();\n self.new_scopes = Vec::new();\n self.offset = 0;\n self.spans_start = 0;\n ctx.reset();\n ctx.schedule_idle(0);\n }\n}\n\nimpl<'a> state_cache::Plugin for PluginState<'a> {\n type State = State;\n\n fn initialize(&mut self, ctx: PluginCtx<State>, _buf_size: usize) {\n self.do_highlighting(ctx);\n }\n\n fn update(&mut self, mut ctx: PluginCtx<State>) {\n ctx.schedule_idle(0);\n }\n\n fn did_save(&mut self, ctx: PluginCtx<State>) {\n \/\/ TODO: use smarter logic to figure out whether we need to re-highlight the whole file\n self.do_highlighting(ctx);\n }\n\n fn idle(&mut self, mut ctx: PluginCtx<State>, _token: usize) {\n \/\/print_err!(\"idle task at offset {}\", self.offset);\n for _ in 0..LINES_PER_RPC {\n if !self.highlight_one_line(&mut ctx) {\n self.flush_spans(&mut ctx);\n return;\n }\n if ctx.request_is_pending() {\n print_err!(\"request pending at offset {}\", self.offset);\n break;\n }\n }\n self.flush_spans(&mut ctx);\n ctx.schedule_idle(0);\n }\n}\n\nfn main() {\n let syntax_set = SyntaxSet::load_defaults_newlines();\n let mut state = PluginState::new(&syntax_set);\n\n state_cache::mainloop(&mut state);\n}\n<commit_msg>Fix #352<commit_after>\/\/ Copyright 2016 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! A syntax highlighting plugin based on syntect.\n\nextern crate syntect;\n#[macro_use]\nextern crate xi_plugin_lib;\n\nmod stackmap;\n\nuse std::sync::MutexGuard;\nuse xi_plugin_lib::state_cache::{self, PluginCtx};\nuse xi_plugin_lib::plugin_base::ScopeSpan;\nuse syntect::parsing::{ParseState, ScopeStack, SyntaxSet, SCOPE_REPO, ScopeRepository};\nuse stackmap::{StackMap, LookupResult};\n\n\/\/\/ The state for syntax highlighting of one file.\nstruct PluginState<'a> {\n syntax_set: &'a SyntaxSet,\n stack_idents: StackMap,\n offset: usize,\n initial_state: Option<(ParseState, ScopeStack)>,\n spans_start: usize,\n \/\/ unflushed spans\n spans: Vec<ScopeSpan>,\n new_scopes: Vec<Vec<String>>,\n syntax_name: String,\n}\n\nconst LINES_PER_RPC: usize = 50;\n\ntype LockedRepo = MutexGuard<'static, ScopeRepository>;\n\n\/\/\/ The syntax highlighting state corresponding to the beginning of a line\n\/\/\/ (as stored in the state cache).\n\/\/ Note: this needs to be option because the caching layer relies on Default.\n\/\/ We can't implement that because the actual initial state depends on the\n\/\/ syntax. There are other ways to handle this, but this will do for now.\ntype State = Option<(ParseState, ScopeStack)>;\n\n\nimpl<'a> PluginState<'a> {\n pub fn new(syntax_set: &'a SyntaxSet) -> Self {\n PluginState {\n syntax_set: syntax_set,\n stack_idents: StackMap::default(),\n offset: 0,\n initial_state: None,\n spans_start: 0,\n spans: Vec::new(),\n new_scopes: Vec::new(),\n syntax_name: String::from(\"None\"),\n }\n }\n\n \/\/ compute syntax for one line, also accumulating the style spans\n fn compute_syntax(&mut self, line: &str, state: State) -> State {\n let (mut parse_state, mut scope_state) = state.or_else(|| self.initial_state.clone()).unwrap();\n let ops = parse_state.parse_line(&line);\n\n let mut prev_cursor = 0;\n let repo = SCOPE_REPO.lock().unwrap();\n for (cursor, batch) in ops {\n if scope_state.len() > 0 {\n let scope_ident = self.identifier_for_stack(&scope_state, &repo);\n let start = self.offset - self.spans_start + prev_cursor;\n let end = start + (cursor - prev_cursor);\n if start != end {\n let span = ScopeSpan::new(start, end, scope_ident);\n self.spans.push(span);\n }\n }\n prev_cursor = cursor;\n scope_state.apply(&batch);\n }\n \/\/ add span for final state\n let start = self.offset - self.spans_start + prev_cursor;\n let end = start + (line.len() - prev_cursor);\n let scope_ident = self.identifier_for_stack(&scope_state, &repo);\n let span = ScopeSpan::new(start, end, scope_ident);\n self.spans.push(span);\n Some((parse_state, scope_state))\n }\n\n \/\/\/ Returns the unique identifier for this `ScopeStack`. We use identifiers\n \/\/\/ so we aren't constantly sending long stack names to the peer.\n fn identifier_for_stack(&mut self, stack: &ScopeStack, repo: &LockedRepo) -> u32 {\n let identifier = self.stack_idents.get_value(stack.as_slice());\n match identifier {\n LookupResult::Existing(id) => id,\n LookupResult::New(id) => {\n let stack_strings = stack.as_slice().iter()\n .map(|slice| repo.to_string(*slice))\n .collect::<Vec<_>>();\n self.new_scopes.push(stack_strings);\n id\n }\n }\n }\n\n #[allow(unused)]\n \/\/ Return true if there's any more work to be done.\n fn highlight_one_line(&mut self, ctx: &mut PluginCtx<State>) -> bool {\n if let Some(line_num) = ctx.get_frontier() {\n let (line_num, offset, state) = ctx.get_prev(line_num);\n if offset != self.offset {\n self.flush_spans(ctx);\n self.offset = offset;\n self.spans_start = offset;\n }\n let new_frontier = match ctx.get_line(line_num) {\n Ok(\"\") => None,\n Ok(s) => {\n let new_state = self.compute_syntax(s, state);\n self.offset += s.len();\n if s.as_bytes().last() == Some(&b'\\n') {\n Some((new_state, line_num + 1))\n } else {\n None\n }\n }\n Err(_) => None,\n };\n let mut converged = false;\n if let Some((ref new_state, new_line_num)) = new_frontier {\n if let Some(old_state) = ctx.get(new_line_num) {\n converged = old_state.as_ref().unwrap().0 == new_state.as_ref().unwrap().0;\n }\n }\n if !converged {\n if let Some((new_state, new_line_num)) = new_frontier {\n ctx.set(new_line_num, new_state);\n ctx.update_frontier(new_line_num);\n return true;\n }\n }\n ctx.close_frontier();\n }\n false\n }\n\n fn flush_spans(&mut self, ctx: &mut PluginCtx<State>) {\n if !self.new_scopes.is_empty() {\n ctx.add_scopes(&self.new_scopes);\n self.new_scopes.clear();\n }\n if self.spans_start != self.offset {\n ctx.update_spans(self.spans_start, self.offset - self.spans_start,\n &self.spans);\n self.spans.clear();\n }\n self.spans_start = self.offset;\n }\n\n fn do_highlighting(&mut self, mut ctx: PluginCtx<State>) {\n let syntax = match ctx.get_path() {\n Some(ref path) => self.syntax_set.find_syntax_for_file(path).unwrap()\n .unwrap_or_else(|| self.syntax_set.find_syntax_plain_text()),\n None => self.syntax_set.find_syntax_plain_text(),\n };\n\n if syntax.name != self.syntax_name {\n self.syntax_name = syntax.name.clone();\n print_err!(\"syntect using {}\", syntax.name);\n }\n\n self.initial_state = Some((ParseState::new(syntax), ScopeStack::new()));\n self.spans = Vec::new();\n self.new_scopes = Vec::new();\n self.offset = 0;\n self.spans_start = 0;\n ctx.reset();\n ctx.schedule_idle(0);\n }\n}\n\nimpl<'a> state_cache::Plugin for PluginState<'a> {\n type State = State;\n\n fn initialize(&mut self, ctx: PluginCtx<State>, _buf_size: usize) {\n self.do_highlighting(ctx);\n }\n\n fn update(&mut self, mut ctx: PluginCtx<State>) {\n ctx.schedule_idle(0);\n }\n\n fn did_save(&mut self, ctx: PluginCtx<State>) {\n \/\/ TODO: use smarter logic to figure out whether we need to re-highlight the whole file\n self.do_highlighting(ctx);\n }\n\n fn idle(&mut self, mut ctx: PluginCtx<State>, _token: usize) {\n \/\/print_err!(\"idle task at offset {}\", self.offset);\n for _ in 0..LINES_PER_RPC {\n if !self.highlight_one_line(&mut ctx) {\n self.flush_spans(&mut ctx);\n return;\n }\n if ctx.request_is_pending() {\n print_err!(\"request pending at offset {}\", self.offset);\n break;\n }\n }\n self.flush_spans(&mut ctx);\n ctx.schedule_idle(0);\n }\n}\n\nfn main() {\n let syntax_set = SyntaxSet::load_defaults_newlines();\n let mut state = PluginState::new(&syntax_set);\n\n state_cache::mainloop(&mut state);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add solution to Advent day 10<commit_after>\/\/ advent10.rs\n\/\/ look-and-say\n\nuse std::io;\n\nfn main() {\n \/\/ Part 1 and 2\n let mut input = String::new();\n\n io::stdin().read_line(&mut input)\n .ok()\n .expect(\"Failed to read line\");\n let mut seq = input.trim().to_string();\n\n \/\/ part 1\n for _ in 0..40 {\n seq = look_and_say(&seq);\n }\n println!(\"length 40:{}\", seq.len());\n\n \/\/ part 2\n for _ in 0..10 {\n seq = look_and_say(&seq);\n }\n println!(\"length 50:{}\", seq.len());\n}\n\n\/\/ Part 1\nfn look_and_say(s: &str) -> String {\n let mut say = String::new();\n if s.len() == 0 {\n return say;\n }\n\n let mut look = s.chars();\n\n let mut prev_char = look.next().unwrap();\n let mut run_length = 1;\n for c in look {\n if c != prev_char {\n say.push_str(&run_length.to_string());\n say.push(prev_char);\n run_length = 0;\n prev_char = c;\n }\n run_length += 1;\n }\n say.push_str(&run_length.to_string());\n say.push(prev_char);\n\n say\n}\n\n#[test]\nfn test_part1() {\n assert_eq!(\"11\", look_and_say(\"1\"));\n assert_eq!(\"21\", look_and_say(\"11\"));\n assert_eq!(\"1211\", look_and_say(\"21\"));\n assert_eq!(\"111221\", look_and_say(\"1211\"));\n assert_eq!(\"312211\", look_and_say(\"111221\"));\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added dominates.rs<commit_after>\/\/\/ Returns true if the candidate dominates or equal the parent\npub fn dominates(offspring_objectives: [f32; 2], parent_objectives: [f32; 2]) -> bool {\n if (offspring_objectives[0] <= parent_objectives[0]) &&\n (offspring_objectives[1] <= parent_objectives[1]) {\n return true;\n }\n return false;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add ansi mod<commit_after>const ESC: u8 = 27;\n\npub fn escape(sequence: &str) -> Vec<u8> {\n let mut ret = Vec::new();\n ret.push(ESC);\n ret.push_all(String::from_str(\"[\").as_bytes());\n ret.push_all(sequence.as_bytes());\n ret\n}\n\npub fn setpos(line: u16, column: u16) -> Vec<u8> {\n escape(format!(\"{};{}H\", line + 1, column + 1).as_slice())\n}\n\npub fn hide_cursor() -> Vec<u8> {\n escape(\"?25l\")\n}\n\npub fn show_cursor() -> Vec<u8> {\n escape(\"?25h\")\n}\n\npub fn inverse() -> Vec<u8> {\n escape(\"7m\")\n}\n\npub fn reset() -> Vec<u8> {\n escape(\"0m\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test(delete): refactor tests a little; add new ignored test that shows a failing case<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Chore(sync): log names of syncfiles when they can't be read<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix(delete): fix bug with delete code not detecting a conflict, add test<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::EventBinding;\nuse dom::bindings::codegen::EventBinding::EventConstants;\nuse dom::bindings::js::JS;\nuse dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};\nuse dom::bindings::error::Fallible;\nuse dom::eventtarget::EventTarget;\nuse dom::window::Window;\nuse servo_util::str::DOMString;\n\nuse geom::point::Point2D;\n\npub enum Event_ {\n ResizeEvent(uint, uint),\n ReflowEvent,\n ClickEvent(uint, Point2D<f32>),\n MouseDownEvent(uint, Point2D<f32>),\n MouseUpEvent(uint, Point2D<f32>),\n MouseMoveEvent(Point2D<f32>)\n}\n\n#[deriving(Encodable)]\npub enum EventPhase {\n PhaseNone = EventConstants::NONE,\n PhaseCapturing = EventConstants::CAPTURING_PHASE,\n PhaseAtTarget = EventConstants::AT_TARGET,\n PhaseBubbling = EventConstants::BUBBLING_PHASE,\n}\n\n#[deriving(Eq, Encodable)]\npub enum EventTypeId {\n HTMLEventTypeId,\n UIEventTypeId,\n MouseEventTypeId,\n KeyEventTypeId\n}\n\n#[deriving(Encodable)]\npub struct Event {\n type_id: EventTypeId,\n reflector_: Reflector,\n current_target: Option<JS<EventTarget>>,\n target: Option<JS<EventTarget>>,\n type_: DOMString,\n phase: EventPhase,\n canceled: bool,\n stop_propagation: bool,\n stop_immediate: bool,\n cancelable: bool,\n bubbles: bool,\n trusted: bool,\n dispatching: bool,\n initialized: bool,\n}\n\nimpl Event {\n pub fn new_inherited(type_id: EventTypeId) -> Event {\n Event {\n type_id: type_id,\n reflector_: Reflector::new(),\n current_target: None,\n target: None,\n phase: PhaseNone,\n type_: ~\"\",\n canceled: false,\n cancelable: true,\n bubbles: true,\n trusted: false,\n dispatching: false,\n stop_propagation: false,\n stop_immediate: false,\n initialized: false,\n }\n }\n\n pub fn new(window: &JS<Window>) -> JS<Event> {\n reflect_dom_object(~Event::new_inherited(HTMLEventTypeId),\n window,\n EventBinding::Wrap)\n }\n\n pub fn EventPhase(&self) -> u16 {\n self.phase as u16\n }\n\n pub fn Type(&self) -> DOMString {\n self.type_.clone()\n }\n\n pub fn GetTarget(&self) -> Option<JS<EventTarget>> {\n self.target.clone()\n }\n\n pub fn GetCurrentTarget(&self) -> Option<JS<EventTarget>> {\n self.current_target.clone()\n }\n\n pub fn DefaultPrevented(&self) -> bool {\n self.canceled\n }\n\n pub fn PreventDefault(&mut self) {\n if self.cancelable {\n self.canceled = true\n }\n }\n\n pub fn StopPropagation(&mut self) {\n self.stop_propagation = true;\n }\n\n pub fn StopImmediatePropagation(&mut self) {\n self.stop_immediate = true;\n self.stop_propagation = true;\n }\n\n pub fn Bubbles(&self) -> bool {\n self.bubbles\n }\n\n pub fn Cancelable(&self) -> bool {\n self.cancelable\n }\n\n pub fn TimeStamp(&self) -> u64 {\n 0\n }\n\n pub fn InitEvent(&mut self,\n type_: DOMString,\n bubbles: bool,\n cancelable: bool) {\n self.initialized = true;\n if self.dispatching {\n return;\n }\n self.stop_propagation = false;\n self.stop_immediate = false;\n self.canceled = false;\n self.trusted = false;\n self.target = None;\n self.type_ = type_;\n self.bubbles = bubbles;\n self.cancelable = cancelable;\n }\n\n pub fn IsTrusted(&self) -> bool {\n self.trusted\n }\n\n pub fn Constructor(global: &JS<Window>,\n type_: DOMString,\n init: &EventBinding::EventInit) -> Fallible<JS<Event>> {\n let mut ev = Event::new(global);\n ev.get_mut().InitEvent(type_, init.bubbles, init.cancelable);\n Ok(ev)\n }\n}\n\nimpl Reflectable for Event {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n &self.reflector_\n }\n\n fn mut_reflector<'a>(&'a mut self) -> &'a mut Reflector {\n &mut self.reflector_\n }\n}\n<commit_msg>#2183 Initialize Event::bubbles to false<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::EventBinding;\nuse dom::bindings::codegen::EventBinding::EventConstants;\nuse dom::bindings::js::JS;\nuse dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};\nuse dom::bindings::error::Fallible;\nuse dom::eventtarget::EventTarget;\nuse dom::window::Window;\nuse servo_util::str::DOMString;\n\nuse geom::point::Point2D;\n\npub enum Event_ {\n ResizeEvent(uint, uint),\n ReflowEvent,\n ClickEvent(uint, Point2D<f32>),\n MouseDownEvent(uint, Point2D<f32>),\n MouseUpEvent(uint, Point2D<f32>),\n MouseMoveEvent(Point2D<f32>)\n}\n\n#[deriving(Encodable)]\npub enum EventPhase {\n PhaseNone = EventConstants::NONE,\n PhaseCapturing = EventConstants::CAPTURING_PHASE,\n PhaseAtTarget = EventConstants::AT_TARGET,\n PhaseBubbling = EventConstants::BUBBLING_PHASE,\n}\n\n#[deriving(Eq, Encodable)]\npub enum EventTypeId {\n HTMLEventTypeId,\n UIEventTypeId,\n MouseEventTypeId,\n KeyEventTypeId\n}\n\n#[deriving(Encodable)]\npub struct Event {\n type_id: EventTypeId,\n reflector_: Reflector,\n current_target: Option<JS<EventTarget>>,\n target: Option<JS<EventTarget>>,\n type_: DOMString,\n phase: EventPhase,\n canceled: bool,\n stop_propagation: bool,\n stop_immediate: bool,\n cancelable: bool,\n bubbles: bool,\n trusted: bool,\n dispatching: bool,\n initialized: bool,\n}\n\nimpl Event {\n pub fn new_inherited(type_id: EventTypeId) -> Event {\n Event {\n type_id: type_id,\n reflector_: Reflector::new(),\n current_target: None,\n target: None,\n phase: PhaseNone,\n type_: ~\"\",\n canceled: false,\n cancelable: true,\n bubbles: false,\n trusted: false,\n dispatching: false,\n stop_propagation: false,\n stop_immediate: false,\n initialized: false,\n }\n }\n\n pub fn new(window: &JS<Window>) -> JS<Event> {\n reflect_dom_object(~Event::new_inherited(HTMLEventTypeId),\n window,\n EventBinding::Wrap)\n }\n\n pub fn EventPhase(&self) -> u16 {\n self.phase as u16\n }\n\n pub fn Type(&self) -> DOMString {\n self.type_.clone()\n }\n\n pub fn GetTarget(&self) -> Option<JS<EventTarget>> {\n self.target.clone()\n }\n\n pub fn GetCurrentTarget(&self) -> Option<JS<EventTarget>> {\n self.current_target.clone()\n }\n\n pub fn DefaultPrevented(&self) -> bool {\n self.canceled\n }\n\n pub fn PreventDefault(&mut self) {\n if self.cancelable {\n self.canceled = true\n }\n }\n\n pub fn StopPropagation(&mut self) {\n self.stop_propagation = true;\n }\n\n pub fn StopImmediatePropagation(&mut self) {\n self.stop_immediate = true;\n self.stop_propagation = true;\n }\n\n pub fn Bubbles(&self) -> bool {\n self.bubbles\n }\n\n pub fn Cancelable(&self) -> bool {\n self.cancelable\n }\n\n pub fn TimeStamp(&self) -> u64 {\n 0\n }\n\n pub fn InitEvent(&mut self,\n type_: DOMString,\n bubbles: bool,\n cancelable: bool) {\n self.initialized = true;\n if self.dispatching {\n return;\n }\n self.stop_propagation = false;\n self.stop_immediate = false;\n self.canceled = false;\n self.trusted = false;\n self.target = None;\n self.type_ = type_;\n self.bubbles = bubbles;\n self.cancelable = cancelable;\n }\n\n pub fn IsTrusted(&self) -> bool {\n self.trusted\n }\n\n pub fn Constructor(global: &JS<Window>,\n type_: DOMString,\n init: &EventBinding::EventInit) -> Fallible<JS<Event>> {\n let mut ev = Event::new(global);\n ev.get_mut().InitEvent(type_, init.bubbles, init.cancelable);\n Ok(ev)\n }\n}\n\nimpl Reflectable for Event {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n &self.reflector_\n }\n\n fn mut_reflector<'a>(&'a mut self) -> &'a mut Reflector {\n &mut self.reflector_\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added function notes<commit_after>\/\/ A complilation of notes and lessons from https:\/\/doc.rust-lang.org\/book\/functions.html\n\nfn main() {\n \/\/ print_number(5);\n print_number(add_two_numbers(5, 5));\n print_number(semicolon_test());\n}\n\n\/\/ Similar to specifically typing variables (bindings) - specify the type of parameters like so\n\/\/ Note that you *must* specify types in the function declaration. You can't just hope the compiler\n\/\/ figures it out (like 'let')\nfn print_number(num : i32) {\n println!(\"Number is {}\", num);\n}\n\n\/\/ If you don't specify a return type it just defaults to something similar to \"void\" in C++\n\/\/ You can specify a return like so\n\/\/ Side note: I'm not a fan of the style of having the brace on the same line as the declaration,\n\/\/ especially with a return type there too since it's harder to see the return type at a glance\n\/\/ when skimming code. However, it appears that's the suggested way of doing it (both in the example\n\/\/ and the Rust syntax in my editor)\nfn add_two_numbers(num1 : i32, num2 : i32) -> i32 {\n \/\/ This is an interesting thing: the last line of a method determines what it'll return\n \/\/ And yes, not having a semicolon is intentional - if we had one we'd get a compilation error\n \/\/ saying that not all control paths return a value\n \/\/ This is because \"num1 + num2\" is an expression while \"num1 + num2;\" is a statement. An expression\n \/\/ returns a value inherantly while a statement does not\n num1 + num2\n}\n\n\/\/ Statements vs. Expressions is fundamental in Rust. Like it's said above, the difference between expressions\n\/\/ and statements is that expressions return a value while statements do not\n\/\/ Using 'let' to assign a value to a variable is *not* an expression and thus doesn't return a value, which\n\/\/ might be useful in chaining in other languages (eg. x = y = z = 5). This is what's called a 'declaration statement'.\n\/\/ Note that assigning to an already bound variable is an expression but the return result is an empty tuple.\n\/\/ So chaining is not useful even then.\n\/\/ However, like seen above you can not include a semicolon to have a statement become an expression and return a value\nfn semicolon_test() -> i32 {\n \/\/ The below returns 34 inherantly. But it must be the last thing in the method. I suppose it's a type of shorthand...\n \/\/ 34\n\n \/\/ This returns in a more traditional way\n \/\/ return 30;\n\n \/\/ This...also returns 24, I guess. Rust is weird.\n return 24\n\n \/\/ A note on the above: using the 'return' keyword is used for early returns. It can be used 'traditionally', like\n \/\/ in C++ or Java or similar, as the last return of a method but that's considered bad style. The Rust way is to\n \/\/ use the 'return' keyword only for early returns\n}\n\n\/\/ Diverging functions are functions in Rust that do not return. Here's an example:\n\/\/ You can set RUST_BACKTRACE environment variable to 1 to see the stack trace at the crash site\nfn diverges() -> ! {\n \/\/ Forces a crash\n panic!(\"this method never returns\");\n}\n\n\/\/ We can set a variable binding to a function pointer\nfn fn_pointer() {\n \/\/ Type inference can be used here - here's an example of not using it though\n let pointer1: fn(i32, i32) -> i32 = add_two_numbers;\n pointer1(32, 54);\n\n \/\/ Type inference for a function pointer\n let pointer2 = add_two_numbers;\n pointer2(21, 21);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for parsing HTTP headers<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rework on the add subcommand<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>basic fluid simulation works<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed infinite loop<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add primitive bot code<commit_after>extern crate irc;\n\nuse irc::client::prelude::*;\nuse std::default::Default;\n\n\nfn main() {\n\tlet server = IrcServer::from_config(Config{\n\t\towners : Some(vec![\"nabijaczleweli\".to_string()]),\n\t\tnickname : Some(\"NabBot\".to_string()),\n\t\tusername : Some(\"NabBot\".to_string()),\n\t\trealname : Some(\"наб's IRC bot\".to_string()),\n\t\tserver : Some(\"chat.freenode.net\".to_string()),\n\t\tuse_ssl : Some(true),\n\t\tchannels : Some(vec![\"#loungecpp\".to_string()]),\n\t\tuser_info: Some(\"наб's IRC bot\".to_string()),\n\t\t..Default::default()\n\t}).unwrap();\n\tserver.identify().unwrap();\n\n\tfor message in server.iter() {\n\t\tif let Ok(message) = message {\n\t\t\tlet source_nickname = message.get_source_nickname().map(String::from);\n\n\t\t\tif let Ok(Command::PRIVMSG(target, msg)) = Command::from_message_io(Ok(message)) {\n\t\t\t\tif (target == \"NabBot\" && msg == \"Navaer\") || msg == \"Navaer, NabBot\" {\n\t\t\t\t\tserver.send_privmsg(&*&source_nickname.unwrap_or(target), \"Mára mesta\").unwrap();\n\t\t\t\t\tserver.send_quit(\"Mára mesta\").unwrap();\n\t\t\t\t} else if msg.contains(\"isn't\") || msg.contains(\"is not\") {\n\t\t\t\t\tprintln!(\"{}: {:?} -> Not your face\", source_nickname.as_ref().unwrap_or(&\"???\".to_string()), msg);\n\t\t\t\t\tserver.send_privmsg(&target, \"Quite unlike your face\").unwrap();\n\t\t\t\t} else if msg.contains(\"sucks\") || msg.contains(\"is\") {\n\t\t\t\t\tprintln!(\"{}: {:?} -> Your face\", source_nickname.as_ref().unwrap_or(&\"???\".to_string()), msg);\n\t\t\t\t\tserver.send_privmsg(&target, \"Much like your face\").unwrap();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before>use core::cmp::min;\nuse core::mem::size_of;\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\nstruct Memory {\n address: usize,\n}\n\nimpl Memory {\n pub fn new(size: usize) -> Option<Self> {\n unsafe {\n let alloc = alloc(size);\n if alloc > 0 {\n Some(Memory { address: alloc })\n }\n else { None }\n }\n }\n\n pub fn renew(&self, size: usize) -> Option<Self> {\n unsafe {\n let realloc = realloc(self.address, size);\n if realloc > 0 {\n Some(Memory { address: realloc })\n }\n else { None }\n }\n }\n}\n\nimpl Drop for Memory {\n fn drop(&mut self) {\n unsafe {\n unalloc(self.address)\n }\n }\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<commit_msg>Another Fix<commit_after>use core::cmp::min;\nuse core::mem::size_of;\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\nstruct Memory {\n address: usize,\n}\n\nimpl Memory {\n pub fn new(size: usize) -> Option<Self> {\n let alloc = unsafe { alloc(size) };\n if alloc > 0 {\n Some(Memory { address: alloc })\n }\n else { None }\n }\n\n pub fn renew(&self, size: usize) -> Option<Self> {\n let realloc = unsafe { realloc(self.address, size) };\n if realloc > 0 {\n Some(Memory { address: realloc })\n }\n else { None }\n }\n}\n\nimpl Drop for Memory {\n fn drop(&mut self) {\n unsafe { unalloc(self.address) }\n }\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test module<commit_after>use super::{ IID_IDXGIFactory1, CreateDXGIFactory1 };\nuse libc::c_void;\nuse std::ptr;\n\n#[test]\nfn test() {\n\tlet mut factory: *mut c_void = ptr::null_mut();\n\tassert_eq!(0, unsafe { CreateDXGIFactory1(&IID_IDXGIFactory1, &mut factory) });\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix memset to work with llvm's intrinsic bullshit<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>hover stats<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate serde_json;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate task_hookrs;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagerror;\nextern crate libimagtodo;\n\nuse std::process::exit;\nuse std::process::{Command, Stdio};\nuse std::io::stdin;\nuse std::io::BufRead;\n\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagrt::runtime::Runtime;\nuse libimagtodo::task::IntoTask;\nuse libimagerror::trace::trace_error;\n\nmod ui;\n\nuse ui::build_ui;\nfn main() {\n let name = \"imag-todo\";\n let version = &version!()[..];\n let about = \"Interface with taskwarrior\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n match rt.cli().subcommand_name() {\n Some(\"tw-hook\") => tw_hook(&rt),\n Some(\"list\") => list(&rt),\n _ => unimplemented!(),\n } \/\/ end match scmd\n} \/\/ end main\n\nfn tw_hook(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"tw-hook\").unwrap();\n if subcmd.is_present(\"add\") {\n let stdin = stdin();\n let mut stdin = stdin.lock();\n let mut line = String::new();\n\n if let Err(e) = stdin.read_line(&mut line) {\n trace_error(&e);\n exit(1);\n };\n\n if let Ok(ttask) = import_task(&line.as_str()) {\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n let uuid = *ttask.uuid();\n match ttask.into_filelockentry(rt.store()) {\n Ok(val) => {\n println!(\"Task {} stored in imag\", uuid);\n val\n },\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n };\n } else {\n error!(\"No usable input\");\n exit(1);\n }\n } else if subcmd.is_present(\"delete\") {\n \/\/ The used hook is \"on-modify\". This hook gives two json-objects\n \/\/ per usage und wants one (the second one) back.\n let mut counter = 0;\n let stdin = stdin();\n let stdin = stdin.lock();\n\n if let Ok(ttasks) = import_tasks(stdin) {\n for ttask in ttasks {\n if counter % 2 == 1 {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n match ttask.status() {\n &task_hookrs::status::TaskStatus::Deleted => {\n match libimagtodo::delete::delete(rt.store(), *ttask.uuid()) {\n Ok(_) => println!(\"Deleted task {}\", *ttask.uuid()),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n }\n _ => {\n }\n } \/\/ end match ttask.status()\n } \/\/ end if c % 2\n counter += 1;\n } \/\/ end for\n } else {\n error!(\"No usable input\");\n }\n } else {\n \/\/ Should not be possible, as one argument is required via\n \/\/ ArgGroup\n unreachable!();\n }\n}\n\nfn list(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let mut args = Vec::new();\n let verbose = subcmd.is_present(\"verbose\");\n let iter = match libimagtodo::read::get_todo_iterator(rt.store()) {\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n Ok(val) => val,\n };\n\n for task in iter {\n match task {\n Ok(val) => {\n let uuid = match val.flentry.get_header().read(\"todo.uuid\") {\n Ok(Some(u)) => u,\n Ok(None) => continue,\n Err(e) => {\n trace_error(&e);\n continue;\n }\n };\n\n if verbose {\n args.clear();\n args.push(format!(\"uuid:{}\", uuid));\n args.push(format!(\"{}\", \"information\"));\n let tw_process = Command::new(\"task\").stdin(Stdio::null()).args(&args).spawn()\n .unwrap_or_else(|e| {\n trace_error(&e);\n panic!(\"failed\");\n });\n let output = tw_process.wait_with_output().unwrap_or_else(|e| {\n panic!(\"failed to unwrap output: {}\", e);\n });\n let outstring = String::from_utf8(output.stdout).unwrap_or_else(|e| {\n panic!(\"failed to execute: {}\", e);\n });\n println!(\"{}\", outstring);\n } else {\n println!(\"{}\", match uuid {\n toml::Value::String(s) => s,\n _ => {\n error!(\"Unexpected type for todo.uuid: {}\", uuid);\n continue;\n },\n });\n }\n }\n Err(e) => {\n trace_error(&e);\n continue;\n }\n } \/\/ end match task\n } \/\/ end for\n}\n\n<commit_msg>Do not hide the error here, yell it out!<commit_after>extern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate serde_json;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate task_hookrs;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagerror;\nextern crate libimagtodo;\n\nuse std::process::exit;\nuse std::process::{Command, Stdio};\nuse std::io::stdin;\nuse std::io::BufRead;\n\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagrt::runtime::Runtime;\nuse libimagtodo::task::IntoTask;\nuse libimagerror::trace::trace_error;\n\nmod ui;\n\nuse ui::build_ui;\nfn main() {\n let name = \"imag-todo\";\n let version = &version!()[..];\n let about = \"Interface with taskwarrior\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n match rt.cli().subcommand_name() {\n Some(\"tw-hook\") => tw_hook(&rt),\n Some(\"list\") => list(&rt),\n _ => unimplemented!(),\n } \/\/ end match scmd\n} \/\/ end main\n\nfn tw_hook(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"tw-hook\").unwrap();\n if subcmd.is_present(\"add\") {\n let stdin = stdin();\n let mut stdin = stdin.lock();\n let mut line = String::new();\n\n if let Err(e) = stdin.read_line(&mut line) {\n trace_error(&e);\n exit(1);\n };\n\n if let Ok(ttask) = import_task(&line.as_str()) {\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n let uuid = *ttask.uuid();\n match ttask.into_filelockentry(rt.store()) {\n Ok(val) => {\n println!(\"Task {} stored in imag\", uuid);\n val\n },\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n };\n } else {\n error!(\"No usable input\");\n exit(1);\n }\n } else if subcmd.is_present(\"delete\") {\n \/\/ The used hook is \"on-modify\". This hook gives two json-objects\n \/\/ per usage und wants one (the second one) back.\n let mut counter = 0;\n let stdin = stdin();\n let stdin = stdin.lock();\n\n match import_tasks(stdin) {\n Ok(ttasks) => for ttask in ttasks {\n if counter % 2 == 1 {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n match ttask.status() {\n &task_hookrs::status::TaskStatus::Deleted => {\n match libimagtodo::delete::delete(rt.store(), *ttask.uuid()) {\n Ok(_) => println!(\"Deleted task {}\", *ttask.uuid()),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n }\n _ => {\n }\n } \/\/ end match ttask.status()\n } \/\/ end if c % 2\n counter += 1;\n },\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n }\n } else {\n \/\/ Should not be possible, as one argument is required via\n \/\/ ArgGroup\n unreachable!();\n }\n}\n\nfn list(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let mut args = Vec::new();\n let verbose = subcmd.is_present(\"verbose\");\n let iter = match libimagtodo::read::get_todo_iterator(rt.store()) {\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n Ok(val) => val,\n };\n\n for task in iter {\n match task {\n Ok(val) => {\n let uuid = match val.flentry.get_header().read(\"todo.uuid\") {\n Ok(Some(u)) => u,\n Ok(None) => continue,\n Err(e) => {\n trace_error(&e);\n continue;\n }\n };\n\n if verbose {\n args.clear();\n args.push(format!(\"uuid:{}\", uuid));\n args.push(format!(\"{}\", \"information\"));\n let tw_process = Command::new(\"task\").stdin(Stdio::null()).args(&args).spawn()\n .unwrap_or_else(|e| {\n trace_error(&e);\n panic!(\"failed\");\n });\n let output = tw_process.wait_with_output().unwrap_or_else(|e| {\n panic!(\"failed to unwrap output: {}\", e);\n });\n let outstring = String::from_utf8(output.stdout).unwrap_or_else(|e| {\n panic!(\"failed to execute: {}\", e);\n });\n println!(\"{}\", outstring);\n } else {\n println!(\"{}\", match uuid {\n toml::Value::String(s) => s,\n _ => {\n error!(\"Unexpected type for todo.uuid: {}\", uuid);\n continue;\n },\n });\n }\n }\n Err(e) => {\n trace_error(&e);\n continue;\n }\n } \/\/ end match task\n } \/\/ end for\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>implement VM executable<commit_after>extern crate lulzvm;\n\nextern crate env_logger;\n\n#[macro_use]\nextern crate clap;\n\nuse clap::{ArgGroup, ArgMatches, App};\nuse lulzvm::vm::VM;\nuse std::env;\nuse std::fs::File;\nuse std::io::{stdin, stdout, Read, Result};\n\nfn main() {\n let matches = App::new(\"LulzVM\")\n .args_from_usage(\"[FILE] 'Bytecode executable'\n -d, --debug 'Enable debug messages'\")\n .group(ArgGroup::with_name(\"required\")\n .args(&[\"FILE\"])\n .required(true))\n .get_matches();\n\n match do_checked_main(matches) {\n Ok(_) => (),\n Err(e) => println!(\"Error: {:?}\", e),\n }\n}\n\nfn do_checked_main(matches: ArgMatches) -> Result<()> {\n let executable_filename = matches.value_of(\"FILE\").unwrap();\n\n let mut executable = Vec::new();\n let mut executable_file = try!(File::open(executable_filename));\n let _ = try!(executable_file.read_to_end(&mut executable));\n\n if matches.is_present(\"debug\") {\n env::set_var(\"RUST_LOG\", \"lulzvm::vm=debug\");\n let _ = env_logger::init().unwrap();\n }\n\n let mut vm = VM::new(stdin(), stdout(), executable);\n vm.run();\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>PaddedBytes, problem 9 complete<commit_after>use byte_conversion::*;\n\npub struct PaddedBytes {\n\tciphertext: Vec<u8>\n}\n\nimpl PaddedBytes {\n\tpub fn from_text(text: &str, block_size: usize) -> Result<PaddedBytes, String> {\n\t\tPaddedBytes::from_bytes(&readable_text_to_bytes(&text)[..], block_size)\n\t}\n\n\tpub fn from_bytes(bytes: &[u8], block_size: usize) -> Result<PaddedBytes, String> {\n\t\tlet length = bytes.len();\n\t\tif length == 0 {\n\t\t\treturn Err(\"Attempted to pad 0 bytes.\".to_string());\n\t\t}\n\t\tif length > block_size {\n\t\t\treturn Err(\"Attempted to pad a block larger than the block size.\".to_string());\n\t\t}\n\n\t\tlet mut padded = bytes.to_vec();\n\t\tlet bytes_left: u8 = (block_size - length) as u8;\n\t\twhile padded.len() < block_size {\n\t\t\tpadded.push(bytes_left);\n\t\t}\n\n\t\tOk(\n\t\t\tPaddedBytes {\n\t\t\t\tciphertext: padded\n\t\t\t}\n\t\t)\n\t}\n\n\tpub fn bytes(&self) -> &[u8] {\n\t\t&self.ciphertext[..]\n\t}\n}\n\n\/\/-----------------------------------------------------------------------------\n\n#[cfg(test)]\n#[allow(non_snake_case)]\nmod test {\n\tuse super::*;\n\n\t#[test]\n\tfn PaddedText_empty() {\n\t\tlet text = PaddedBytes::from_text(\"\", 16);\n\n\t\tassert!(text.is_err());\n\t}\n\n\t#[test]\n\tfn PaddedText_yellow_submarine() {\n\t\tlet text = match PaddedBytes::from_text(\"YELLOW SUBMARINE\", 20) {\n\t\t\tOk(x) => x,\n\t\t\tErr(x) => panic!(x),\n\t\t};\n\n\t\tlet expected = ::byte_conversion::readable_text_to_bytes(&\"YELLOW SUBMARINE\\x04\\x04\\x04\\x04\");\n\n\t\tassert_eq!(expected, text.bytes());\n\t}\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Keep track of custom emoji usage<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Forgot to add the notex to git<commit_after>\/\/ Notex - it's not a mutex, but it acts like one\nuse core::prelude::*;\nuse core::cell::UnsafeCell;\nuse core::ops::{Deref,DerefMut};\n\npub struct Notex<T> {\n pub data: UnsafeCell<T>,\n}\n\nunsafe impl<T: Send> Send for Notex<T> { }\nunsafe impl<T: Send> Sync for Notex<T> { }\n\npub struct HeldNotex<'a, T: 'a> {\n data: &'a UnsafeCell<T>,\n}\n\nimpl<T> Notex<T> {\n pub fn new(t: T) -> Notex<T> {\n Notex { data: UnsafeCell::new(t) }\n }\n\n #[inline]\n pub fn lock(&self) -> HeldNotex<T> {\n HeldNotex { data: &self.data }\n }\n}\n\nimpl<'lock, T> Deref for HeldNotex<'lock, T> {\n type Target = T;\n\n fn deref<'a>(&'a self) -> &'a T {\n unsafe { &*self.data.get() }\n }\n}\n\nimpl<'lock, T> DerefMut for HeldNotex<'lock, T> {\n fn deref_mut<'a>(&'a mut self) -> &'a mut T {\n unsafe { &mut *self.data.get() }\n }\n}\n\n#[macro_export]\nmacro_rules! notex {\n ($val:expr) => (\n $crate::notex::Notex {\n data: ::core::cell::UnsafeCell { value: $val }\n });\n ($ty:ty, $val:expr) => (\n $crate::notex::Notex<$ty> {\n data: ::core::cell::UnsafeCell<$ty> { value: $val }\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>unbreak BNode::shunt<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>https:\/\/pt.stackoverflow.com\/q\/428633\/101<commit_after>y + x\ny.plus(x)\ny = &x\ny = AddressOf(x)\n*y = x\nValueOfAddress(y) == x\n\n\/\/https:\/\/pt.stackoverflow.com\/q\/428633\/101\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update curve25519 to use wrapping arithmetic functions<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[doc(keyword = \"fn\")]\n\/\/\n\/\/\/ The `fn` keyword.\n\/\/\/\n\/\/\/ The `fn` keyword is used to declare a function.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ fn some_function() {\n\/\/\/ \/\/ code goes in here\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ For more information about functions, take a look at the [Rust Book][book].\n\/\/\/\n\/\/\/ [book]: https:\/\/doc.rust-lang.org\/book\/second-edition\/ch03-03-how-functions-work.html\nmod fn_keyword { }\n\n#[doc(keyword = \"let\")]\n\/\/\n\/\/\/ The `let` keyword.\n\/\/\/\n\/\/\/ The `let` keyword is used to declare a variable.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # #![allow(unused_assignments)]\n\/\/\/ let x = 3; \/\/ We create a variable named `x` with the value `3`.\n\/\/\/ ```\n\/\/\/\n\/\/\/ By default, all variables are **not** mutable. If you want a mutable variable,\n\/\/\/ you'll have to use the `mut` keyword.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # #![allow(unused_assignments)]\n\/\/\/ let mut x = 3; \/\/ We create a mutable variable named `x` with the value `3`.\n\/\/\/\n\/\/\/ x += 4; \/\/ `x` is now equal to `7`.\n\/\/\/ ```\n\/\/\/\n\/\/\/ For more information about the `let` keyword, take a look at the [Rust Book][book].\n\/\/\/\n\/\/\/ [book]: https:\/\/doc.rust-lang.org\/book\/second-edition\/ch03-01-variables-and-mutability.html\nmod let_keyword { }\n\n#[doc(keyword = \"struct\")]\n\/\/\n\/\/\/ The keyword used to define structs.\n\/\/\/\n\/\/\/ Structs in Rust come in three flavours: Regular structs, tuple structs,\n\/\/\/ and empty structs.\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ struct Regular {\n\/\/\/ field1: f32,\n\/\/\/ field2: String,\n\/\/\/ pub field3: bool\n\/\/\/ }\n\/\/\/\n\/\/\/ struct Tuple(u32, String);\n\/\/\/\n\/\/\/ struct Empty;\n\/\/\/ ```\n\/\/\/\n\/\/\/ Regular structs are the most commonly used. Each field defined within them has a name and a\n\/\/\/ type, and once defined can be accessed using `example_struct.field` syntax. The fields of a\n\/\/\/ struct share its mutability, so `foo.bar = 2;` would only be valid if `foo` was mutable. Adding\n\/\/\/ `pub` to a field makes it visible to code in other modules, as well as allowing it to be\n\/\/\/ directly accessed and modified.\n\/\/\/\n\/\/\/ Tuple structs are similar to regular structs, but its fields have no names. They are used like\n\/\/\/ tuples, with deconstruction possible via `let TupleStruct(x, y) = foo;` syntax. For accessing\n\/\/\/ individual variables, the same syntax is used as with regular tuples, namely `foo.0`, `foo.1`,\n\/\/\/ etc, starting at zero.\n\/\/\/\n\/\/\/ Empty structs, or unit-like structs, are most commonly used as markers, for example\n\/\/\/ [`PhantomData`]. Empty structs have a size of zero bytes, but unlike empty enums they can be\n\/\/\/ instantiated, making them similar to the unit type `()`. Unit-like structs are useful when you\n\/\/\/ need to implement a trait on something, but don't need to store any data inside it.\n\/\/\/\n\/\/\/ # Instantiation\n\/\/\/\n\/\/\/ Structs can be instantiated in a manner of different ways, each of which can be mixed and\n\/\/\/ matched as needed. The most common way to make a new struct is via a constructor method such as\n\/\/\/ `new()`, but when that isn't available (or you're writing the constructor itself), struct\n\/\/\/ literal syntax is used:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # struct Foo { field1: f32, field2: String, etc: bool }\n\/\/\/ let example = Foo {\n\/\/\/ field1: 42.0,\n\/\/\/ field2: \"blah\".to_string(),\n\/\/\/ etc: true,\n\/\/\/ };\n\/\/\/ ```\n\/\/\/\n\/\/\/ It's only possible to directly instantiate a struct using struct literal syntax when all of its\n\/\/\/ fields are visible to you.\n\/\/\/\n\/\/\/ There are a handful of shortcuts provided to make writing constructors more convenient, most\n\/\/\/ common of which is the Field Init shorthand. When there is a variable and a field of the same\n\/\/\/ name, the assignment can be simplified from `field: field` into simply `field`. The following\n\/\/\/ example of a hypothetical constructor demonstrates this:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ struct User {\n\/\/\/ name: String,\n\/\/\/ admin: bool,\n\/\/\/ }\n\/\/\/\n\/\/\/ impl User {\n\/\/\/ pub fn new(name: String) -> Self {\n\/\/\/ Self {\n\/\/\/ name,\n\/\/\/ admin: false,\n\/\/\/ }\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Another shortcut for struct instantiation is available when you need to make a new struct that\n\/\/\/ shares most of a previous struct's values called struct update syntax:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # struct Foo { field1: String, field2: () }\n\/\/\/ # let thing = Foo { field1: \"\".to_string(), field2: () };\n\/\/\/ let updated_thing = Foo {\n\/\/\/ field1: \"a new value\".to_string(),\n\/\/\/ ..thing\n\/\/\/ };\n\/\/\/ ```\n\/\/\/\n\/\/\/ Tuple structs are instantiated in the same way as tuples themselves, except with the struct's\n\/\/\/ name as a prefix: `Foo(123, false, 0.1)`.\n\/\/\/\n\/\/\/ Empty structs are instantiated with just their name and nothing else. `let thing =\n\/\/\/ EmptyStruct;`\n\/\/\/\n\/\/\/\n\/\/\/ # Style conventions\n\/\/\/\n\/\/\/ Structs are always written in CamelCase, with few exceptions. While the trailing comma on a\n\/\/\/ struct's list of fields can be omitted, it's usually kept for convenience in adding and\n\/\/\/ removing fields down the line.\n\/\/\/\n\/\/\/ For more information on structs, take a look at the [Rust Book][book] or the\n\/\/\/ [Reference][reference].\n\/\/\/\n\/\/\/ [`PhantomData`]: marker\/struct.PhantomData.html\n\/\/\/ [book]: https:\/\/doc.rust-lang.org\/book\/second-edition\/ch05-01-defining-structs.html\n\/\/\/ [reference]: https:\/\/doc.rust-lang.org\/reference\/items\/structs.html\n\nmod struct_keyword { }\n<commit_msg>Add docs for `as` keyword<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[doc(keyword = \"as\")]\n\/\/\n\/\/\/ The type coercion keyword\n\/\/\/\n\/\/\/ `as` is most commonly used to turn primitive types into other primitive types, but it has other\n\/\/\/ uses that include turning pointers into addresses, addresses into pointers, and pointers into\n\/\/\/ other pointers.\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let thing1: u8 = 89.0 as u8;\n\/\/\/ assert_eq!('B' as u32, 66);\n\/\/\/ assert_eq!(thing1 as char, 'Y');\n\/\/\/ let thing2: f32 = thing1 as f32 + 10.5;\n\/\/\/ assert_eq!(true as u8 + thing2 as u8, 100);\n\/\/\/ ```\n\/\/\/\n\/\/\/ In general, any coercion that can be performed via writing out type hints can also be done\n\/\/\/ using `as`, so instead of writing `let x: u32 = 123`, you can write `let x = 123 as u32` (Note:\n\/\/\/ `let x = 123u32` would be best in that situation). The same is not true in the other direction,\n\/\/\/ however, explicitly using `as` allows a few more coercions that aren't allowed implicitly, such\n\/\/\/ as changing the type of a raw pointer or turning closures into raw pointers.\n\/\/\/\n\/\/\/ For more information on what `as` is capable of, see the [Reference]\n\/\/\/\n\/\/\/ [Reference]: https:\/\/doc.rust-lang.org\/reference\/expressions\/operator-expr.html#type-cast-expressions\nmod as_keyword { }\n\n#[doc(keyword = \"fn\")]\n\/\/\n\/\/\/ The `fn` keyword.\n\/\/\/\n\/\/\/ The `fn` keyword is used to declare a function.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ fn some_function() {\n\/\/\/ \/\/ code goes in here\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ For more information about functions, take a look at the [Rust Book][book].\n\/\/\/\n\/\/\/ [book]: https:\/\/doc.rust-lang.org\/book\/second-edition\/ch03-03-how-functions-work.html\nmod fn_keyword { }\n\n#[doc(keyword = \"let\")]\n\/\/\n\/\/\/ The `let` keyword.\n\/\/\/\n\/\/\/ The `let` keyword is used to declare a variable.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # #![allow(unused_assignments)]\n\/\/\/ let x = 3; \/\/ We create a variable named `x` with the value `3`.\n\/\/\/ ```\n\/\/\/\n\/\/\/ By default, all variables are **not** mutable. If you want a mutable variable,\n\/\/\/ you'll have to use the `mut` keyword.\n\/\/\/\n\/\/\/ Example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # #![allow(unused_assignments)]\n\/\/\/ let mut x = 3; \/\/ We create a mutable variable named `x` with the value `3`.\n\/\/\/\n\/\/\/ x += 4; \/\/ `x` is now equal to `7`.\n\/\/\/ ```\n\/\/\/\n\/\/\/ For more information about the `let` keyword, take a look at the [Rust Book][book].\n\/\/\/\n\/\/\/ [book]: https:\/\/doc.rust-lang.org\/book\/second-edition\/ch03-01-variables-and-mutability.html\nmod let_keyword { }\n\n#[doc(keyword = \"struct\")]\n\/\/\n\/\/\/ The keyword used to define structs.\n\/\/\/\n\/\/\/ Structs in Rust come in three flavours: Regular structs, tuple structs,\n\/\/\/ and empty structs.\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ struct Regular {\n\/\/\/ field1: f32,\n\/\/\/ field2: String,\n\/\/\/ pub field3: bool\n\/\/\/ }\n\/\/\/\n\/\/\/ struct Tuple(u32, String);\n\/\/\/\n\/\/\/ struct Empty;\n\/\/\/ ```\n\/\/\/\n\/\/\/ Regular structs are the most commonly used. Each field defined within them has a name and a\n\/\/\/ type, and once defined can be accessed using `example_struct.field` syntax. The fields of a\n\/\/\/ struct share its mutability, so `foo.bar = 2;` would only be valid if `foo` was mutable. Adding\n\/\/\/ `pub` to a field makes it visible to code in other modules, as well as allowing it to be\n\/\/\/ directly accessed and modified.\n\/\/\/\n\/\/\/ Tuple structs are similar to regular structs, but its fields have no names. They are used like\n\/\/\/ tuples, with deconstruction possible via `let TupleStruct(x, y) = foo;` syntax. For accessing\n\/\/\/ individual variables, the same syntax is used as with regular tuples, namely `foo.0`, `foo.1`,\n\/\/\/ etc, starting at zero.\n\/\/\/\n\/\/\/ Empty structs, or unit-like structs, are most commonly used as markers, for example\n\/\/\/ [`PhantomData`]. Empty structs have a size of zero bytes, but unlike empty enums they can be\n\/\/\/ instantiated, making them similar to the unit type `()`. Unit-like structs are useful when you\n\/\/\/ need to implement a trait on something, but don't need to store any data inside it.\n\/\/\/\n\/\/\/ # Instantiation\n\/\/\/\n\/\/\/ Structs can be instantiated in a manner of different ways, each of which can be mixed and\n\/\/\/ matched as needed. The most common way to make a new struct is via a constructor method such as\n\/\/\/ `new()`, but when that isn't available (or you're writing the constructor itself), struct\n\/\/\/ literal syntax is used:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # struct Foo { field1: f32, field2: String, etc: bool }\n\/\/\/ let example = Foo {\n\/\/\/ field1: 42.0,\n\/\/\/ field2: \"blah\".to_string(),\n\/\/\/ etc: true,\n\/\/\/ };\n\/\/\/ ```\n\/\/\/\n\/\/\/ It's only possible to directly instantiate a struct using struct literal syntax when all of its\n\/\/\/ fields are visible to you.\n\/\/\/\n\/\/\/ There are a handful of shortcuts provided to make writing constructors more convenient, most\n\/\/\/ common of which is the Field Init shorthand. When there is a variable and a field of the same\n\/\/\/ name, the assignment can be simplified from `field: field` into simply `field`. The following\n\/\/\/ example of a hypothetical constructor demonstrates this:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ struct User {\n\/\/\/ name: String,\n\/\/\/ admin: bool,\n\/\/\/ }\n\/\/\/\n\/\/\/ impl User {\n\/\/\/ pub fn new(name: String) -> Self {\n\/\/\/ Self {\n\/\/\/ name,\n\/\/\/ admin: false,\n\/\/\/ }\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Another shortcut for struct instantiation is available when you need to make a new struct that\n\/\/\/ shares most of a previous struct's values called struct update syntax:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # struct Foo { field1: String, field2: () }\n\/\/\/ # let thing = Foo { field1: \"\".to_string(), field2: () };\n\/\/\/ let updated_thing = Foo {\n\/\/\/ field1: \"a new value\".to_string(),\n\/\/\/ ..thing\n\/\/\/ };\n\/\/\/ ```\n\/\/\/\n\/\/\/ Tuple structs are instantiated in the same way as tuples themselves, except with the struct's\n\/\/\/ name as a prefix: `Foo(123, false, 0.1)`.\n\/\/\/\n\/\/\/ Empty structs are instantiated with just their name and nothing else. `let thing =\n\/\/\/ EmptyStruct;`\n\/\/\/\n\/\/\/\n\/\/\/ # Style conventions\n\/\/\/\n\/\/\/ Structs are always written in CamelCase, with few exceptions. While the trailing comma on a\n\/\/\/ struct's list of fields can be omitted, it's usually kept for convenience in adding and\n\/\/\/ removing fields down the line.\n\/\/\/\n\/\/\/ For more information on structs, take a look at the [Rust Book][book] or the\n\/\/\/ [Reference][reference].\n\/\/\/\n\/\/\/ [`PhantomData`]: marker\/struct.PhantomData.html\n\/\/\/ [book]: https:\/\/doc.rust-lang.org\/book\/second-edition\/ch05-01-defining-structs.html\n\/\/\/ [reference]: https:\/\/doc.rust-lang.org\/reference\/items\/structs.html\n\nmod struct_keyword { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fast print<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test case for issue #2734 (xfailed)<commit_after>\/\/ xfail-test\niface hax { } \nimpl <A> of hax for A { } \n\nfn perform_hax<T>(x: @T) -> hax {\n x as hax \n}\n\nfn deadcode() {\n perform_hax(@\"deadcode\");\n}\n\nfn main() {\n let _ = perform_hax(@42);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[MIR] Add test for `box EXPR` dereferencing<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_attrs, box_syntax)]\n\n#[rustc_mir]\nfn test() -> Box<i32> {\n box 42\n}\n\nfn main() {\n assert_eq!(*test(), 42);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 Nathan Sizemore <nathanrsizemore@gmail.com>\n\/\/\n\/\/ This Source Code Form is subject to the terms of the\n\/\/ Mozilla Public License, v. 2.0. If a copy of the MPL was not\n\/\/ distributed with this file, You can obtain one at\n\/\/ http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\n\/\/! The frame module provides a structred way to send and receive\n\/\/! message through streams.\n\/\/!\n\/\/! ## Data Framing\n\/\/!\n\/\/! ```ignore\n\/\/! 0 1 2 3\n\/\/! 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0\n\/\/! +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\/\/! | Frame Start | Payload Len | Payload |\n\/\/! +-----------------------------------------------------------+\n\/\/! | Payload Data Continued | Frame End |\n\/\/! +-----------------------------------------------------------+\n\/\/!\n\/\/! Start Frame: 8 bits, must be 0x01\n\/\/! Payload Len: 16 bits\n\/\/! Payload Data: (Payload Len) bytes\n\/\/! End Frame: 8 bits, must be 0x17\n\/\/! ```\n\n\nuse std::mem;\n\n\npub const START_BYTE: u8 = 0x01;\npub const END_BYTE: u8 = 0x17;\n\n\npub fn new(buf: &[u8]) -> Vec<u8> {\n let buf_len = buf.len() as u16;\n let mut ret_buf = Vec::<u8>::with_capacity(buf.len() + 4);\n\n ret_buf.push(START_BYTE);\n ret_buf.push((buf_len >> 8) as u8);\n ret_buf.push(buf_len as u8);\n ret_buf.extend_from_slice(buf);\n ret_buf.push(END_BYTE);\n\n ret_buf\n}\n\npub fn from_raw_parts(buf: &mut Vec<u8>) -> Option<Vec<u8>> {\n if buf.len() < 5 {\n trace!(\"buf.len not large enough to process\");\n return None;\n }\n\n if buf[0] != START_BYTE {\n trace!(\"buf[0] was not START_BYTE\");\n let mut new_buf = Vec::<u8>::with_capacity(1024);\n mem::swap(&mut new_buf, buf);\n return None;\n }\n\n let mask = 0xFFFFu16;\n let mut payload_len = ((buf[1] as u16) << 8) & mask;\n payload_len |= buf[2] as u16;\n\n trace!(\"Payload Len: {}\", payload_len);\n\n let payload_len = payload_len as usize;\n if (buf.len() - 4) < payload_len {\n trace!(\"Not enough in buf for expected payload\\nExpected: {}\\nActual: {}\",\n payload_len,\n (buf.len() - 4));\n return None;\n }\n\n if buf[payload_len] != END_BYTE {\n trace!(\"END_BYTE was not at expected location. Swapping for a fresh buffer\");\n let mut new_buf = Vec::<u8>::with_capacity(1024);\n mem::swap(&mut new_buf, buf);\n return None;\n }\n\n let mut ret_buf = Vec::<u8>::with_capacity(payload_len);\n ret_buf.extend_from_slice(&buf[3..payload_len]);\n\n let buf_len = buf.len();\n let mut remaining_buf = Vec::<u8>::with_capacity(buf.len() - (payload_len + 4));\n remaining_buf.extend_from_slice(&buf[payload_len..buf_len]);\n mem::swap(buf, &mut remaining_buf);\n\n trace!(\"Complete frame read\");\n\n Some(ret_buf)\n}\n<commit_msg>More debug msgs<commit_after>\/\/ Copyright 2015 Nathan Sizemore <nathanrsizemore@gmail.com>\n\/\/\n\/\/ This Source Code Form is subject to the terms of the\n\/\/ Mozilla Public License, v. 2.0. If a copy of the MPL was not\n\/\/ distributed with this file, You can obtain one at\n\/\/ http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\n\/\/! The frame module provides a structred way to send and receive\n\/\/! message through streams.\n\/\/!\n\/\/! ## Data Framing\n\/\/!\n\/\/! ```ignore\n\/\/! 0 1 2 3\n\/\/! 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0\n\/\/! +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\/\/! | Frame Start | Payload Len | Payload |\n\/\/! +-----------------------------------------------------------+\n\/\/! | Payload Data Continued | Frame End |\n\/\/! +-----------------------------------------------------------+\n\/\/!\n\/\/! Start Frame: 8 bits, must be 0x01\n\/\/! Payload Len: 16 bits\n\/\/! Payload Data: (Payload Len) bytes\n\/\/! End Frame: 8 bits, must be 0x17\n\/\/! ```\n\n\nuse std::mem;\n\n\npub const START_BYTE: u8 = 0x01;\npub const END_BYTE: u8 = 0x17;\n\n\npub fn new(buf: &[u8]) -> Vec<u8> {\n let buf_len = buf.len() as u16;\n let mut ret_buf = Vec::<u8>::with_capacity(buf.len() + 4);\n\n ret_buf.push(START_BYTE);\n ret_buf.push((buf_len >> 8) as u8);\n ret_buf.push(buf_len as u8);\n ret_buf.extend_from_slice(buf);\n ret_buf.push(END_BYTE);\n\n ret_buf\n}\n\npub fn from_raw_parts(buf: &mut Vec<u8>) -> Option<Vec<u8>> {\n if buf.len() < 5 {\n trace!(\"buf.len not large enough to process\");\n return None;\n }\n\n if buf[0] != START_BYTE {\n trace!(\"buf[0] was not START_BYTE\");\n let mut new_buf = Vec::<u8>::with_capacity(1024);\n mem::swap(&mut new_buf, buf);\n return None;\n }\n\n let mask = 0xFFFFu16;\n let mut payload_len = ((buf[1] as u16) << 8) & mask;\n payload_len |= buf[2] as u16;\n\n trace!(\"buf.len: {}\", buf.len());\n trace!(\"Payload Len: {}\", payload_len);\n\n let payload_len = payload_len as usize;\n if (buf.len() - 4) < payload_len {\n trace!(\"Not enough in buf for expected payload\\nExpected: {}\\nActual: {}\",\n payload_len,\n (buf.len() - 4));\n return None;\n }\n\n if buf[payload_len] != END_BYTE {\n trace!(\"END_BYTE was not at expected location. Swapping for a fresh buffer\");\n let mut new_buf = Vec::<u8>::with_capacity(1024);\n mem::swap(&mut new_buf, buf);\n return None;\n }\n\n let mut ret_buf = Vec::<u8>::with_capacity(payload_len);\n ret_buf.extend_from_slice(&buf[3..payload_len]);\n\n let buf_len = buf.len();\n let mut remaining_buf = Vec::<u8>::with_capacity(buf.len() - (payload_len + 4));\n remaining_buf.extend_from_slice(&buf[payload_len..buf_len]);\n mem::swap(buf, &mut remaining_buf);\n\n trace!(\"Complete frame read\");\n\n Some(ret_buf)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add queries to create tables and insert emoji<commit_after><|endoftext|>"} {"text":"<commit_before>use std::hash::{Hash};\nuse std::collections::HashMap;\nuse std::collections::HashSet;\nuse std::collections::RingBuf;\nuse std::iter::Map;\nuse std::collections::hash_map::{\n Keys,\n Occupied,\n Vacant,\n};\nuse std::slice::{\n Items,\n};\nuse std::fmt;\n\n\/\/\/ **Graph\\<N, E\\>** is a regular graph, with generic node values **N** and edge weights **E**.\n\/\/\/\n\/\/\/ It uses an adjacency list representation, i.e. using *O(|V| + |E|)* space.\n\/\/\/\n\/\/\/ The node type must be suitable as a hash table key (Implementing **Eq + Hash**)\n\/\/\/ as well as being a simple type.\n\/\/\/\n\/\/\/ The node type must implement **PartialOrd** so that the implementation can\n\/\/\/ properly order the pair (**a**, **b**) for an edge connecting any two nodes **a** and **b**.\n#[deriving(Clone)]\npub struct Graph<N: Eq + Hash, E> {\n nodes: HashMap<N, Vec<N>>,\n edges: HashMap<(N, N), E>,\n}\n\nimpl<N: Eq + Hash + fmt::Show, E: fmt::Show> fmt::Show for Graph<N, E>\n{\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.nodes.fmt(f)\n }\n}\n\n#[inline]\nfn edge_key<N: Copy + PartialOrd>(a: N, b: N) -> (N, N)\n{\n if a <= b { (a, b) } else { (b, a) }\n}\n\n#[inline]\nfn copy<N: Copy>(n: &N) -> N { *n }\n\nimpl<N, E> Graph<N, E> where N: Copy + PartialOrd + Eq + Hash\n{\n \/\/\/ Create a new **Graph**.\n pub fn new() -> Graph<N, E>\n {\n Graph {\n nodes: HashMap::new(),\n edges: HashMap::new(),\n }\n }\n\n \/\/\/ Add node **n** to the graph.\n pub fn add_node(&mut self, n: N) -> N {\n match self.nodes.entry(n) {\n Occupied(_) => {}\n Vacant(ent) => { ent.set(Vec::new()); }\n }\n n\n }\n\n \/\/\/ Return **true** if node **n** was removed.\n pub fn remove_node(&mut self, n: N) -> bool {\n let successors = match self.nodes.remove(&n) {\n None => return false,\n Some(sus) => sus,\n };\n for succ in successors.into_iter() {\n \/\/ remove all successor links\n self.remove_single_edge(&succ, &n);\n \/\/ Remove all edge values\n self.edges.remove(&edge_key(n, succ));\n }\n true\n }\n\n \/\/\/ Return **true** if the node is contained in the graph.\n pub fn contains_node(&self, n: N) -> bool {\n self.nodes.contains_key(&n)\n }\n\n \/\/\/ Add an edge connecting **a** and **b** to the graph.\n \/\/\/\n \/\/\/ Return **true** if edge did not previously exist.\n pub fn add_edge(&mut self, a: N, b: N, edge: E) -> bool\n {\n \/\/ Use PartialOrd to order the edges\n match self.nodes.entry(a) {\n Occupied(ent) => { ent.into_mut().push(b); }\n Vacant(ent) => { ent.set(vec![b]); }\n }\n match self.nodes.entry(b) {\n Occupied(ent) => { ent.into_mut().push(a); }\n Vacant(ent) => { ent.set(vec![a]); }\n }\n self.edges.insert(edge_key(a, b), edge).is_none()\n }\n\n \/\/\/ Remove successor relation from a to b\n fn remove_single_edge(&mut self, a: &N, b: &N) {\n match self.nodes.get_mut(a) {\n None => {}\n Some(sus) => {\n match sus.iter().position(|elt| elt == b) {\n Some(index) => { sus.swap_remove(index); }\n None => {}\n }\n }\n }\n }\n\n \/\/\/ Remove edge from **a** to **b** from the graph.\n \/\/\/\n \/\/\/ Return **None** if the edge didn't exist.\n pub fn remove_edge(&mut self, a: N, b: N) -> Option<E>\n {\n self.remove_single_edge(&a, &b);\n self.remove_single_edge(&b, &a);\n self.edges.remove(&edge_key(a, b))\n }\n\n \/\/\/ Return **true** if the edge connecting **a** with **b** is contained in the graph.\n pub fn contains_edge(&self, a: N, b: N) -> bool {\n self.edges.contains_key(&edge_key(a, b))\n }\n\n \/\/\/ Return an iterator over the nodes of the graph.\n \/\/\/\n \/\/\/ Iterator element type is **&'a N**.\n pub fn nodes<'a>(&'a self) -> Nodes<'a, N>\n {\n Nodes{iter: self.nodes.keys()}\n }\n\n \/\/\/ Return an iterator over the nodes that are connected with **from** by edges.\n \/\/\/\n \/\/\/ If the node **from** does not exist in the graph, return an empty iterator.\n \/\/\/\n \/\/\/ Iterator element type is **&'a N**.\n pub fn neighbors<'a>(&'a self, from: N) -> Neighbors<'a, N>\n {\n Neighbors{iter:\n match self.nodes.get(&from) {\n Some(neigh) => neigh.iter(),\n None => [].iter(),\n }.map(copy)\n }\n }\n\n \/\/\/ Return an iterator over the nodes that are connected with **from** by edges,\n \/\/\/ paired with the edge weight.\n \/\/\/\n \/\/\/ If the node **from** does not exist in the graph, return an empty iterator.\n \/\/\/\n \/\/\/ Iterator element type is **(N, &'a E)**.\n pub fn edges<'a>(&'a self, from: N) -> Edges<'a, N, E>\n {\n Edges {\n from: from,\n iter: self.neighbors(from),\n edges: &self.edges,\n }\n }\n\n \/\/\/ Return a reference to the edge weight connecting **a** with **b**, or\n \/\/\/ **None** if the edge does not exist in the graph.\n pub fn edge<'a>(&'a self, a: N, b: N) -> Option<&'a E>\n {\n self.edges.get(&edge_key(a, b))\n }\n\n \/\/\/ Return a mutable reference to the edge weight connecting **a** with **b**, or\n \/\/\/ **None** if the edge does not exist in the graph.\n pub fn edge_mut<'a>(&'a mut self, a: N, b: N) -> Option<&'a mut E>\n {\n self.edges.get_mut(&edge_key(a, b))\n }\n\n pub fn traverse_breadth_first(&self, start: N) -> BreadthFirstTraversal<N, E>\n {\n BreadthFirstTraversal{\n graph: self,\n stack: {\n let mut stack = RingBuf::new();\n stack.push_back(start);\n stack\n },\n visited: HashSet::new(),\n }\n }\n\n pub fn traverse_depth_first(&self, start: N) -> DepthFirstTraversal<N, E>\n {\n DepthFirstTraversal{\n graph: self,\n stack: vec![start],\n visited: HashSet::new(),\n }\n }\n}\n\nmacro_rules! iterator_methods(\n ($elt_type:ty) => (\n #[inline]\n fn next(&mut self) -> Option<$elt_type>\n {\n self.iter.next()\n }\n\n #[inline]\n fn size_hint(&self) -> (uint, Option<uint>)\n {\n self.iter.size_hint()\n }\n )\n);\n\npub struct Nodes<'a, N: 'a> {\n iter: Keys<'a, N, Vec<N>>\n}\n\nimpl<'a, N: 'a> Iterator<&'a N> for Nodes<'a, N>\n{\n iterator_methods!(&'a N);\n}\n\npub struct Neighbors<'a, N: 'a> {\n iter: Map<&'a N, N, Items<'a, N>, fn(&N) -> N>,\n}\n\nimpl<'a, N> Iterator<N> for Neighbors<'a, N>\n{\n iterator_methods!(N);\n}\n\npub struct Edges<'a, N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a> {\n pub from: N,\n pub edges: &'a HashMap<(N, N), E>,\n pub iter: Neighbors<'a, N>,\n}\n\nimpl<'a, N, E> Iterator<(N, &'a E)> for Edges<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n fn next(&mut self) -> Option<(N, &'a E)>\n {\n match self.iter.next() {\n None => None,\n Some(b) => {\n let a = self.from;\n match self.edges.get(&edge_key(a, b)) {\n None => unreachable!(),\n Some(edge) => {\n Some((b, edge))\n }\n }\n }\n }\n }\n}\n\npub struct BFT<'a, G, N, F, Neighbors>\n where\n G: 'a,\n N: 'a + Copy + PartialOrd + Eq + Hash,\n F: FnMut(&G, N) -> Neighbors,\n Neighbors: Iterator<N>,\n{\n pub graph: &'a G,\n pub stack: RingBuf<N>,\n pub visited: HashSet<N>,\n pub neighbors: F,\n}\n\nimpl<'a, G, N, F, Neighbors> Iterator<N> for BFT<'a, G, N, F, Neighbors>\n where\n G: 'a,\n N: 'a + Copy + PartialOrd + Eq + Hash,\n F: FnMut(&G, N) -> Neighbors,\n Neighbors: Iterator<N>,\n{\n fn next(&mut self) -> Option<N>\n {\n while let Some(node) = self.stack.pop_front() {\n if !self.visited.insert(node) {\n continue;\n }\n\n for succ in (self.neighbors)(self.graph, node) {\n if !self.visited.contains(&succ) {\n self.stack.push_back(succ);\n }\n }\n\n return Some(node);\n }\n None\n }\n}\n\npub struct BreadthFirstTraversal<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n graph: &'a Graph<N, E>,\n stack: RingBuf<N>,\n visited: HashSet<N>,\n}\n\nimpl<'a, N, E> Iterator<N> for BreadthFirstTraversal<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n fn next(&mut self) -> Option<N>\n {\n while let Some(node) = self.stack.pop_front() {\n if !self.visited.insert(node) {\n continue;\n }\n\n for succ in self.graph.neighbors(node) {\n if !self.visited.contains(&succ) {\n self.stack.push_back(succ);\n }\n }\n\n return Some(node);\n }\n None\n }\n}\n\npub struct DepthFirstTraversal<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n graph: &'a Graph<N, E>,\n stack: Vec<N>,\n visited: HashSet<N>,\n}\n\nimpl<'a, N, E> Iterator<N> for DepthFirstTraversal<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n fn next(&mut self) -> Option<N>\n {\n while let Some(node) = self.stack.pop() {\n if !self.visited.insert(node) {\n continue;\n }\n\n for succ in self.graph.neighbors(node) {\n if !self.visited.contains(&succ) {\n self.stack.push(succ);\n }\n }\n\n return Some(node);\n }\n None\n }\n}\n<commit_msg>Fix doc<commit_after>use std::hash::{Hash};\nuse std::collections::HashMap;\nuse std::collections::HashSet;\nuse std::collections::RingBuf;\nuse std::iter::Map;\nuse std::collections::hash_map::{\n Keys,\n Occupied,\n Vacant,\n};\nuse std::slice::{\n Items,\n};\nuse std::fmt;\n\n\/\/\/ **Graph\\<N, E\\>** is a regular graph, with generic node values **N** and edge weights **E**.\n\/\/\/\n\/\/\/ It uses an adjacency list representation, i.e. using *O(|V| + |E|)* space.\n\/\/\/\n\/\/\/ The node type must be suitable as a hash table key (Implementing **Eq + Hash**)\n\/\/\/ as well as being a simple type.\n\/\/\/\n\/\/\/ The node type must implement **PartialOrd** so that the implementation can\n\/\/\/ properly order the pair (**a**, **b**) for an edge connecting any two nodes **a** and **b**.\n#[deriving(Clone)]\npub struct Graph<N: Eq + Hash, E> {\n nodes: HashMap<N, Vec<N>>,\n edges: HashMap<(N, N), E>,\n}\n\nimpl<N: Eq + Hash + fmt::Show, E: fmt::Show> fmt::Show for Graph<N, E>\n{\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.nodes.fmt(f)\n }\n}\n\n#[inline]\nfn edge_key<N: Copy + PartialOrd>(a: N, b: N) -> (N, N)\n{\n if a <= b { (a, b) } else { (b, a) }\n}\n\n#[inline]\nfn copy<N: Copy>(n: &N) -> N { *n }\n\nimpl<N, E> Graph<N, E> where N: Copy + PartialOrd + Eq + Hash\n{\n \/\/\/ Create a new **Graph**.\n pub fn new() -> Graph<N, E>\n {\n Graph {\n nodes: HashMap::new(),\n edges: HashMap::new(),\n }\n }\n\n \/\/\/ Add node **n** to the graph.\n pub fn add_node(&mut self, n: N) -> N {\n match self.nodes.entry(n) {\n Occupied(_) => {}\n Vacant(ent) => { ent.set(Vec::new()); }\n }\n n\n }\n\n \/\/\/ Return **true** if node **n** was removed.\n pub fn remove_node(&mut self, n: N) -> bool {\n let successors = match self.nodes.remove(&n) {\n None => return false,\n Some(sus) => sus,\n };\n for succ in successors.into_iter() {\n \/\/ remove all successor links\n self.remove_single_edge(&succ, &n);\n \/\/ Remove all edge values\n self.edges.remove(&edge_key(n, succ));\n }\n true\n }\n\n \/\/\/ Return **true** if the node is contained in the graph.\n pub fn contains_node(&self, n: N) -> bool {\n self.nodes.contains_key(&n)\n }\n\n \/\/\/ Add an edge connecting **a** and **b** to the graph.\n \/\/\/\n \/\/\/ Return **true** if edge did not previously exist.\n pub fn add_edge(&mut self, a: N, b: N, edge: E) -> bool\n {\n \/\/ Use PartialOrd to order the edges\n match self.nodes.entry(a) {\n Occupied(ent) => { ent.into_mut().push(b); }\n Vacant(ent) => { ent.set(vec![b]); }\n }\n match self.nodes.entry(b) {\n Occupied(ent) => { ent.into_mut().push(a); }\n Vacant(ent) => { ent.set(vec![a]); }\n }\n self.edges.insert(edge_key(a, b), edge).is_none()\n }\n\n \/\/\/ Remove successor relation from a to b\n fn remove_single_edge(&mut self, a: &N, b: &N) {\n match self.nodes.get_mut(a) {\n None => {}\n Some(sus) => {\n match sus.iter().position(|elt| elt == b) {\n Some(index) => { sus.swap_remove(index); }\n None => {}\n }\n }\n }\n }\n\n \/\/\/ Remove edge from **a** to **b** from the graph.\n \/\/\/\n \/\/\/ Return **None** if the edge didn't exist.\n pub fn remove_edge(&mut self, a: N, b: N) -> Option<E>\n {\n self.remove_single_edge(&a, &b);\n self.remove_single_edge(&b, &a);\n self.edges.remove(&edge_key(a, b))\n }\n\n \/\/\/ Return **true** if the edge connecting **a** with **b** is contained in the graph.\n pub fn contains_edge(&self, a: N, b: N) -> bool {\n self.edges.contains_key(&edge_key(a, b))\n }\n\n \/\/\/ Return an iterator over the nodes of the graph.\n \/\/\/\n \/\/\/ Iterator element type is **&'a N**.\n pub fn nodes<'a>(&'a self) -> Nodes<'a, N>\n {\n Nodes{iter: self.nodes.keys()}\n }\n\n \/\/\/ Return an iterator over the nodes that are connected with **from** by edges.\n \/\/\/\n \/\/\/ If the node **from** does not exist in the graph, return an empty iterator.\n \/\/\/\n \/\/\/ Iterator element type is **N**.\n pub fn neighbors<'a>(&'a self, from: N) -> Neighbors<'a, N>\n {\n Neighbors{iter:\n match self.nodes.get(&from) {\n Some(neigh) => neigh.iter(),\n None => [].iter(),\n }.map(copy)\n }\n }\n\n \/\/\/ Return an iterator over the nodes that are connected with **from** by edges,\n \/\/\/ paired with the edge weight.\n \/\/\/\n \/\/\/ If the node **from** does not exist in the graph, return an empty iterator.\n \/\/\/\n \/\/\/ Iterator element type is **(N, &'a E)**.\n pub fn edges<'a>(&'a self, from: N) -> Edges<'a, N, E>\n {\n Edges {\n from: from,\n iter: self.neighbors(from),\n edges: &self.edges,\n }\n }\n\n \/\/\/ Return a reference to the edge weight connecting **a** with **b**, or\n \/\/\/ **None** if the edge does not exist in the graph.\n pub fn edge<'a>(&'a self, a: N, b: N) -> Option<&'a E>\n {\n self.edges.get(&edge_key(a, b))\n }\n\n \/\/\/ Return a mutable reference to the edge weight connecting **a** with **b**, or\n \/\/\/ **None** if the edge does not exist in the graph.\n pub fn edge_mut<'a>(&'a mut self, a: N, b: N) -> Option<&'a mut E>\n {\n self.edges.get_mut(&edge_key(a, b))\n }\n\n pub fn traverse_breadth_first(&self, start: N) -> BreadthFirstTraversal<N, E>\n {\n BreadthFirstTraversal{\n graph: self,\n stack: {\n let mut stack = RingBuf::new();\n stack.push_back(start);\n stack\n },\n visited: HashSet::new(),\n }\n }\n\n pub fn traverse_depth_first(&self, start: N) -> DepthFirstTraversal<N, E>\n {\n DepthFirstTraversal{\n graph: self,\n stack: vec![start],\n visited: HashSet::new(),\n }\n }\n}\n\nmacro_rules! iterator_methods(\n ($elt_type:ty) => (\n #[inline]\n fn next(&mut self) -> Option<$elt_type>\n {\n self.iter.next()\n }\n\n #[inline]\n fn size_hint(&self) -> (uint, Option<uint>)\n {\n self.iter.size_hint()\n }\n )\n);\n\npub struct Nodes<'a, N: 'a> {\n iter: Keys<'a, N, Vec<N>>\n}\n\nimpl<'a, N: 'a> Iterator<&'a N> for Nodes<'a, N>\n{\n iterator_methods!(&'a N);\n}\n\npub struct Neighbors<'a, N: 'a> {\n iter: Map<&'a N, N, Items<'a, N>, fn(&N) -> N>,\n}\n\nimpl<'a, N> Iterator<N> for Neighbors<'a, N>\n{\n iterator_methods!(N);\n}\n\npub struct Edges<'a, N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a> {\n pub from: N,\n pub edges: &'a HashMap<(N, N), E>,\n pub iter: Neighbors<'a, N>,\n}\n\nimpl<'a, N, E> Iterator<(N, &'a E)> for Edges<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n fn next(&mut self) -> Option<(N, &'a E)>\n {\n match self.iter.next() {\n None => None,\n Some(b) => {\n let a = self.from;\n match self.edges.get(&edge_key(a, b)) {\n None => unreachable!(),\n Some(edge) => {\n Some((b, edge))\n }\n }\n }\n }\n }\n}\n\npub struct BFT<'a, G, N, F, Neighbors>\n where\n G: 'a,\n N: 'a + Copy + PartialOrd + Eq + Hash,\n F: FnMut(&G, N) -> Neighbors,\n Neighbors: Iterator<N>,\n{\n pub graph: &'a G,\n pub stack: RingBuf<N>,\n pub visited: HashSet<N>,\n pub neighbors: F,\n}\n\nimpl<'a, G, N, F, Neighbors> Iterator<N> for BFT<'a, G, N, F, Neighbors>\n where\n G: 'a,\n N: 'a + Copy + PartialOrd + Eq + Hash,\n F: FnMut(&G, N) -> Neighbors,\n Neighbors: Iterator<N>,\n{\n fn next(&mut self) -> Option<N>\n {\n while let Some(node) = self.stack.pop_front() {\n if !self.visited.insert(node) {\n continue;\n }\n\n for succ in (self.neighbors)(self.graph, node) {\n if !self.visited.contains(&succ) {\n self.stack.push_back(succ);\n }\n }\n\n return Some(node);\n }\n None\n }\n}\n\npub struct BreadthFirstTraversal<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n graph: &'a Graph<N, E>,\n stack: RingBuf<N>,\n visited: HashSet<N>,\n}\n\nimpl<'a, N, E> Iterator<N> for BreadthFirstTraversal<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n fn next(&mut self) -> Option<N>\n {\n while let Some(node) = self.stack.pop_front() {\n if !self.visited.insert(node) {\n continue;\n }\n\n for succ in self.graph.neighbors(node) {\n if !self.visited.contains(&succ) {\n self.stack.push_back(succ);\n }\n }\n\n return Some(node);\n }\n None\n }\n}\n\npub struct DepthFirstTraversal<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n graph: &'a Graph<N, E>,\n stack: Vec<N>,\n visited: HashSet<N>,\n}\n\nimpl<'a, N, E> Iterator<N> for DepthFirstTraversal<'a, N, E>\n where N: 'a + Copy + PartialOrd + Eq + Hash, E: 'a\n{\n fn next(&mut self) -> Option<N>\n {\n while let Some(node) = self.stack.pop() {\n if !self.visited.insert(node) {\n continue;\n }\n\n for succ in self.graph.neighbors(node) {\n if !self.visited.contains(&succ) {\n self.stack.push(succ);\n }\n }\n\n return Some(node);\n }\n None\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Improve the physical device selection<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>ext: parse toml directly<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_name = \"piston\"]\n#![deny(missing_docs)]\n#![warn(dead_code)]\n#![feature(default_type_params)]\n#![feature(globs)]\n#![feature(if_let)]\n\n\/\/! A user friendly game engine written in Rust.\n\nextern crate gfx;\nextern crate gfx_graphics;\nextern crate opengl_graphics;\nextern crate sdl2;\nextern crate sdl2_window;\nextern crate window;\n\n\/\/ Crates used to reexport.\nextern crate \"vecmath\" as vecmath_lib;\nextern crate \"shader_version\" as shader_version_lib;\nextern crate \"image\" as image_lib;\nextern crate \"graphics\" as graphics_lib;\nextern crate \"input\" as input_lib;\nextern crate \"event\" as event_lib;\nextern crate \"cam\" as cam_lib;\nextern crate \"noise\" as noise_lib;\nextern crate \"genmesh\" as genmesh_lib;\nextern crate \"sprite\" as sprite_lib;\nextern crate \"current\" as current_lib;\nextern crate \"fps_counter\" as fps_counter_lib;\nextern crate \"wavefront-obj\" as wavefront_obj_lib;\nextern crate \"drag_controller\" as drag_controller_lib;\nextern crate \"read_color\" as read_color_lib;\nextern crate \"select_color\" as select_color_lib;\nextern crate \"texture_packer\" as texture_packer_lib;\nextern crate \"conrod\" as conrod_lib;\nextern crate \"dsp\" as dsp_lib;\nextern crate \"wire\" as wire_lib;\nextern crate \"astar\" as astar_lib;\nextern crate \"img_hash\" as img_hash_lib;\nextern crate \"nalgebra\" as nalgebra_lib;\nextern crate \"ncollide\" as ncollide_lib;\n\n\/\/ Reexports.\npub use shader_version_lib as shader_version;\npub use image_lib as image;\npub use graphics_lib as graphics;\npub use vecmath_lib as vecmath;\npub use input_lib as input;\npub use event_lib as event;\npub use cam_lib as cam;\npub use noise_lib as noise;\npub use genmesh_lib as genmesh;\npub use sprite_lib as sprite;\npub use current_lib as current;\npub use fps_counter_lib as fps_counter;\npub use wavefront_obj_lib as wavefront_obj;\npub use drag_controller_lib as drag_controller;\npub use texture_packer_lib as texture_packer;\npub use conrod_lib as conrod;\npub use dsp_lib as dsp;\npub use wire_lib as wire;\npub use astar_lib as astar;\npub use img_hash_lib as img_hash;\npub use nalgebra_lib as nalgebra;\npub use ncollide_lib as ncollide;\n\npub use sdl2_window::Sdl2Window as WindowBackEnd;\npub use event::{\n Event,\n Events,\n NoWindow,\n RenderArgs,\n UpdateArgs,\n Window,\n WindowSettings,\n};\npub use current::{\n Get,\n Set,\n Modifier,\n Current,\n CurrentGuard,\n};\n\nuse gfx_graphics::G2D;\nuse opengl_graphics::Gl;\nuse fps_counter::FPSCounter;\nuse gfx::{ DeviceHelper };\n\npub mod color {\n \/\/! Rexported libraries for working with colors\n pub use read_color_lib as read_color;\n pub use select_color_lib as select_color;\n}\n\n\n\/\/\/ Initializes window and sets up current objects.\npub fn start(\n opengl: shader_version::opengl::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n let mut window = WindowBackEnd::new(\n opengl,\n window_settings,\n );\n\n let mut device = gfx::GlDevice::new(|s| unsafe {\n std::mem::transmute(sdl2::video::gl_get_proc_address(s))\n });\n let mut gl = Gl::new(opengl);\n let mut g2d = G2D::new(&mut device);\n let mut renderer = device.create_renderer();\n let event::window::Size([w, h]) = window.get();\n let mut frame = gfx::Frame::new(w as u16, h as u16);\n let mut fps_counter = FPSCounter::new();\n\n let window_guard = CurrentGuard::new(&mut window);\n let device_guard = CurrentGuard::new(&mut device);\n let gl_guard = CurrentGuard::new(&mut gl);\n let g2d_guard = CurrentGuard::new(&mut g2d);\n let renderer_guard = CurrentGuard::new(&mut renderer);\n let frame_guard = CurrentGuard::new(&mut frame);\n let fps_counter_guard = CurrentGuard::new(&mut fps_counter);\n\n f();\n\n drop(window_guard);\n drop(device_guard);\n drop(gl_guard);\n drop(g2d_guard);\n drop(renderer_guard);\n drop(frame_guard);\n drop(fps_counter_guard);\n}\n\n\/\/\/ The current window\npub unsafe fn current_window() -> Current<WindowBackEnd> { Current }\n\/\/\/ The current Gfx device\npub unsafe fn current_gfx_device() -> Current<gfx::GlDevice> { Current }\n\/\/\/ The current opengl_graphics back-end\npub unsafe fn current_gl() -> Current<Gl> { Current }\n\/\/\/ The current gfx_graphics back-end\npub unsafe fn current_g2d() -> Current<G2D> { Current }\n\/\/\/ The current Gfx renderer\npub unsafe fn current_renderer() -> Current<gfx::Renderer<gfx::GlCommandBuffer>> { Current }\n\/\/\/ The current Gfx frame\npub unsafe fn current_frame() -> Current<gfx::Frame> { Current }\n\/\/\/ The current FPS counter\npub unsafe fn current_fps_counter() -> Current<FPSCounter> { Current }\n\n\/\/\/ Returns an event iterator for the event loop\npub fn events() -> event::Events<Current<WindowBackEnd>> {\n unsafe {\n Events::new(current_window())\n }\n}\n\n\/\/\/ Updates the FPS counter and gets the frames per second.\npub fn fps_tick() -> uint {\n unsafe {\n current_fps_counter().tick()\n }\n}\n\n\/\/\/ Sets title of the current window.\npub fn set_title(text: String) {\n unsafe {\n current_window().set_mut(window::Title(text));\n }\n}\n\n\/\/\/ Renders 2D graphics using Gfx.\npub fn render_2d_gfx(\n bg_color: Option<[f32, ..4]>, \n f: |&graphics::Context, \n &mut gfx_graphics::GraphicsBackEnd<gfx::GlCommandBuffer>|\n) {\n use gfx::Device; \n\n unsafe {\n current_g2d().draw(\n &mut *current_renderer(),\n &*current_frame(), \n |c, g| {\n use graphics::*;\n if let Some(bg_color) = bg_color {\n c.color(bg_color).draw(g);\n }\n f(&c, g);\n });\n current_gfx_device().submit(current_renderer().as_buffer());\n current_renderer().reset();\n }\n}\n\n\/\/\/ Renders 2D graphics using OpenGL.\npub fn render_2d_opengl(\n bg_color: Option<[f32, ..4]>,\n f: |&graphics::Context,\n &mut opengl_graphics::Gl|\n) {\n unsafe {\n use graphics::*;\n let gl = &mut *current_gl();\n let window::Size([w, h]) = current_window().get();\n gl.viewport(0, 0, w as i32, h as i32);\n gl.clear_program();\n let c = Context::abs(w as f64, h as f64);\n if let Some(bg_color) = bg_color {\n c.color(bg_color).draw(gl);\n }\n f(&c, gl);\n }\n}\n\n<commit_msg>Upgraded to latest piston-graphics<commit_after>#![crate_name = \"piston\"]\n#![deny(missing_docs)]\n#![warn(dead_code)]\n#![feature(default_type_params)]\n#![feature(globs)]\n#![feature(if_let)]\n\n\/\/! A user friendly game engine written in Rust.\n\nextern crate gfx;\nextern crate gfx_graphics;\nextern crate opengl_graphics;\nextern crate sdl2;\nextern crate sdl2_window;\nextern crate window;\n\n\/\/ Crates used to reexport.\nextern crate \"vecmath\" as vecmath_lib;\nextern crate \"shader_version\" as shader_version_lib;\nextern crate \"image\" as image_lib;\nextern crate \"graphics\" as graphics_lib;\nextern crate \"input\" as input_lib;\nextern crate \"event\" as event_lib;\nextern crate \"cam\" as cam_lib;\nextern crate \"noise\" as noise_lib;\nextern crate \"genmesh\" as genmesh_lib;\nextern crate \"sprite\" as sprite_lib;\nextern crate \"current\" as current_lib;\nextern crate \"fps_counter\" as fps_counter_lib;\nextern crate \"wavefront-obj\" as wavefront_obj_lib;\nextern crate \"drag_controller\" as drag_controller_lib;\nextern crate \"read_color\" as read_color_lib;\nextern crate \"select_color\" as select_color_lib;\nextern crate \"texture_packer\" as texture_packer_lib;\nextern crate \"conrod\" as conrod_lib;\nextern crate \"dsp\" as dsp_lib;\nextern crate \"wire\" as wire_lib;\nextern crate \"astar\" as astar_lib;\nextern crate \"img_hash\" as img_hash_lib;\nextern crate \"nalgebra\" as nalgebra_lib;\nextern crate \"ncollide\" as ncollide_lib;\n\n\/\/ Reexports.\npub use shader_version_lib as shader_version;\npub use image_lib as image;\npub use graphics_lib as graphics;\npub use vecmath_lib as vecmath;\npub use input_lib as input;\npub use event_lib as event;\npub use cam_lib as cam;\npub use noise_lib as noise;\npub use genmesh_lib as genmesh;\npub use sprite_lib as sprite;\npub use current_lib as current;\npub use fps_counter_lib as fps_counter;\npub use wavefront_obj_lib as wavefront_obj;\npub use drag_controller_lib as drag_controller;\npub use texture_packer_lib as texture_packer;\npub use conrod_lib as conrod;\npub use dsp_lib as dsp;\npub use wire_lib as wire;\npub use astar_lib as astar;\npub use img_hash_lib as img_hash;\npub use nalgebra_lib as nalgebra;\npub use ncollide_lib as ncollide;\n\npub use sdl2_window::Sdl2Window as WindowBackEnd;\npub use event::{\n Event,\n Events,\n NoWindow,\n RenderArgs,\n UpdateArgs,\n Window,\n WindowSettings,\n};\npub use current::{\n Get,\n Set,\n Modifier,\n Current,\n CurrentGuard,\n};\n\nuse gfx_graphics::G2D;\nuse opengl_graphics::Gl;\nuse fps_counter::FPSCounter;\nuse gfx::{ DeviceHelper };\n\npub mod color {\n \/\/! Rexported libraries for working with colors\n pub use read_color_lib as read_color;\n pub use select_color_lib as select_color;\n}\n\n\n\/\/\/ Initializes window and sets up current objects.\npub fn start(\n opengl: shader_version::opengl::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n let mut window = WindowBackEnd::new(\n opengl,\n window_settings,\n );\n\n let mut device = gfx::GlDevice::new(|s| unsafe {\n std::mem::transmute(sdl2::video::gl_get_proc_address(s))\n });\n let mut gl = Gl::new(opengl);\n let mut g2d = G2D::new(&mut device);\n let mut renderer = device.create_renderer();\n let event::window::Size([w, h]) = window.get();\n let mut frame = gfx::Frame::new(w as u16, h as u16);\n let mut fps_counter = FPSCounter::new();\n\n let window_guard = CurrentGuard::new(&mut window);\n let device_guard = CurrentGuard::new(&mut device);\n let gl_guard = CurrentGuard::new(&mut gl);\n let g2d_guard = CurrentGuard::new(&mut g2d);\n let renderer_guard = CurrentGuard::new(&mut renderer);\n let frame_guard = CurrentGuard::new(&mut frame);\n let fps_counter_guard = CurrentGuard::new(&mut fps_counter);\n\n f();\n\n drop(window_guard);\n drop(device_guard);\n drop(gl_guard);\n drop(g2d_guard);\n drop(renderer_guard);\n drop(frame_guard);\n drop(fps_counter_guard);\n}\n\n\/\/\/ The current window\npub unsafe fn current_window() -> Current<WindowBackEnd> { Current }\n\/\/\/ The current Gfx device\npub unsafe fn current_gfx_device() -> Current<gfx::GlDevice> { Current }\n\/\/\/ The current opengl_graphics back-end\npub unsafe fn current_gl() -> Current<Gl> { Current }\n\/\/\/ The current gfx_graphics back-end\npub unsafe fn current_g2d() -> Current<G2D> { Current }\n\/\/\/ The current Gfx renderer\npub unsafe fn current_renderer() -> Current<gfx::Renderer<gfx::GlCommandBuffer>> { Current }\n\/\/\/ The current Gfx frame\npub unsafe fn current_frame() -> Current<gfx::Frame> { Current }\n\/\/\/ The current FPS counter\npub unsafe fn current_fps_counter() -> Current<FPSCounter> { Current }\n\n\/\/\/ Returns an event iterator for the event loop\npub fn events() -> event::Events<Current<WindowBackEnd>> {\n unsafe {\n Events::new(current_window())\n }\n}\n\n\/\/\/ Updates the FPS counter and gets the frames per second.\npub fn fps_tick() -> uint {\n unsafe {\n current_fps_counter().tick()\n }\n}\n\n\/\/\/ Sets title of the current window.\npub fn set_title(text: String) {\n unsafe {\n current_window().set_mut(window::Title(text));\n }\n}\n\n\/\/\/ Renders 2D graphics using Gfx.\npub fn render_2d_gfx(\n bg_color: Option<[f32, ..4]>, \n f: |graphics::Context, \n &mut gfx_graphics::GraphicsBackEnd<gfx::GlCommandBuffer>|\n) {\n use gfx::Device; \n\n unsafe {\n current_g2d().draw(\n &mut *current_renderer(),\n &*current_frame(), \n |c, g| {\n use graphics::*;\n if let Some(bg_color) = bg_color {\n c.color(bg_color).draw(g);\n }\n f(c, g);\n });\n current_gfx_device().submit(current_renderer().as_buffer());\n current_renderer().reset();\n }\n}\n\n\/\/\/ Renders 2D graphics using OpenGL.\npub fn render_2d_opengl(\n bg_color: Option<[f32, ..4]>,\n f: |graphics::Context,\n &mut opengl_graphics::Gl|\n) {\n unsafe {\n use graphics::*;\n let gl = &mut *current_gl();\n let window::Size([w, h]) = current_window().get();\n gl.draw([0, 0, w as i32, h as i32], |c, g| {\n use graphics::*;\n if let Some(bg_color) = bg_color {\n c.color(bg_color).draw(g);\n }\n f(c, g);\n });\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #37694 - michaelwoerister:test-if-ich, r=brson<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for if expressions.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![crate_type=\"rlib\"]\n\n\/\/ Change condition (if) -------------------------------------------------------\n#[cfg(cfail1)]\npub fn change_condition(x: bool) -> u32 {\n if x {\n return 1\n }\n\n return 0\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_condition(x: bool) -> u32 {\n if !x {\n return 1\n }\n\n return 0\n}\n\n\/\/ Change then branch (if) -----------------------------------------------------\n#[cfg(cfail1)]\npub fn change_then_branch(x: bool) -> u32 {\n if x {\n return 1\n }\n\n return 0\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_then_branch(x: bool) -> u32 {\n if x {\n return 2\n }\n\n return 0\n}\n\n\n\n\/\/ Change else branch (if) -----------------------------------------------------\n#[cfg(cfail1)]\npub fn change_else_branch(x: bool) -> u32 {\n if x {\n 1\n } else {\n 2\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_else_branch(x: bool) -> u32 {\n if x {\n 1\n } else {\n 3\n }\n}\n\n\n\n\/\/ Add else branch (if) --------------------------------------------------------\n#[cfg(cfail1)]\npub fn add_else_branch(x: bool) -> u32 {\n let mut ret = 1;\n\n if x {\n ret += 1;\n }\n\n ret\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn add_else_branch(x: bool) -> u32 {\n let mut ret = 1;\n\n if x {\n ret += 1;\n } else {\n }\n\n ret\n}\n\n\n\n\/\/ Change condition (if let) ---------------------------------------------------\n#[cfg(cfail1)]\npub fn change_condition_if_let(x: Option<u32>) -> u32 {\n if let Some(_x) = x {\n return 1\n }\n\n 0\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_condition_if_let(x: Option<u32>) -> u32 {\n if let Some(_) = x {\n return 1\n }\n\n 0\n}\n\n\n\n\/\/ Change then branch (if let) -------------------------------------------------\n#[cfg(cfail1)]\npub fn change_then_branch_if_let(x: Option<u32>) -> u32 {\n if let Some(x) = x {\n return x\n }\n\n 0\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_then_branch_if_let(x: Option<u32>) -> u32 {\n if let Some(x) = x {\n return x + 1\n }\n\n 0\n}\n\n\n\n\/\/ Change else branch (if let) -------------------------------------------------\n#[cfg(cfail1)]\npub fn change_else_branch_if_let(x: Option<u32>) -> u32 {\n if let Some(x) = x {\n x\n } else {\n 1\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_else_branch_if_let(x: Option<u32>) -> u32 {\n if let Some(x) = x {\n x\n } else {\n 2\n }\n}\n\n\n\n\/\/ Add else branch (if let) ----------------------------------------------------\n#[cfg(cfail1)]\npub fn add_else_branch_if_let(x: Option<u32>) -> u32 {\n let mut ret = 1;\n\n if let Some(x) = x {\n ret += x;\n }\n\n ret\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn add_else_branch_if_let(x: Option<u32>) -> u32 {\n let mut ret = 1;\n\n if let Some(x) = x {\n ret += x;\n } else {\n }\n\n ret\n}\n<|endoftext|>"} {"text":"<commit_before>#![deny(missing_docs)]\n#![deny(missing_copy_implementations)]\n#![feature(globs)]\n#![feature(default_type_params)]\n\n\/\/! Window abstraction\n\nextern crate input;\nextern crate quack;\nextern crate event_loop;\n\nuse input::Input;\nuse quack::{ ActOn, GetFrom, SetAt };\n\n\/\/ Reexport everything from event_loop.\npub use event_loop::*;\n\n\/\/\/ The title of the window.\npub struct Title(pub String);\n\n\/\/\/ The anti-aliasing samples when rendering.\n#[derive(Copy)]\npub struct Samples(pub u8);\n\n\/\/\/ Whether window is opened in full screen mode.\n#[derive(Copy)]\npub struct Fullscreen(pub bool);\n\n\/\/\/ Whether to exit when pressing the Esc keyboard button.\n#[derive(Copy)]\npub struct ExitOnEsc(pub bool);\n\n\/\/\/ Whether to capture the mouse cursor.\n#[derive(Copy)]\npub struct CaptureCursor(pub bool);\n\n\/\/\/ The draw size of the window.\n#[derive(Copy)]\npub struct DrawSize(pub [u32; 2]);\n\n\/\/\/ Settings for window behavior.\npub struct WindowSettings {\n \/\/\/ Title of the window.\n pub title: String,\n \/\/\/ The size of the window.\n pub size: [u32; 2],\n \/\/\/ Number samples per pixel (anti-aliasing).\n pub samples: u8,\n \/\/\/ If true, the window is fullscreen.\n pub fullscreen: bool,\n \/\/\/ If true, exit when pressing Esc.\n pub exit_on_esc: bool,\n}\n\nimpl WindowSettings {\n \/\/\/ Gets default settings.\n \/\/\/\n \/\/\/ This exits the window when pressing `Esc`.\n \/\/\/ The background color is set to black.\n pub fn default() -> WindowSettings {\n WindowSettings {\n title: \"Piston\".to_string(),\n size: [640, 480],\n samples: 0,\n fullscreen: false,\n exit_on_esc: true,\n }\n }\n}\n\n\/\/\/ An implementation of Window that runs without a window at all.\npub struct NoWindow {\n should_close: bool,\n title: String,\n}\n\nimpl NoWindow {\n \/\/\/ Returns a new `NoWindow`.\n pub fn new(settings: WindowSettings) -> NoWindow {\n let title = settings.title.clone();\n NoWindow {\n should_close: false,\n title: title,\n }\n }\n}\n\nimpl ActOn<NoWindow, ()> for SwapBuffers {\n fn act_on(self, _window: &mut NoWindow) {}\n}\n\nimpl ActOn<NoWindow, Option<Input>> for PollEvent {\n fn act_on(self, _window: &mut NoWindow) -> Option<Input> { None }\n}\n\nimpl GetFrom<NoWindow> for ShouldClose {\n fn get_from(obj: &NoWindow) -> ShouldClose {\n ShouldClose(obj.should_close)\n }\n}\n\nimpl GetFrom<NoWindow> for Size {\n fn get_from(_obj: &NoWindow) -> Size {\n Size([0, 0])\n }\n}\n\nimpl SetAt<NoWindow> for CaptureCursor {\n fn set_at(self, _window: &mut NoWindow) {}\n}\n\nimpl SetAt<NoWindow> for ShouldClose {\n fn set_at(self, window: &mut NoWindow) {\n let ShouldClose(val) = self;\n window.should_close = val;\n }\n}\n\nimpl GetFrom<NoWindow> for DrawSize {\n fn get_from(obj: &NoWindow) -> DrawSize {\n let Size(val) = GetFrom::get_from(obj);\n DrawSize(val)\n }\n}\n\nimpl GetFrom<NoWindow> for Title {\n fn get_from(obj: &NoWindow) -> Title {\n Title(obj.title.clone())\n }\n}\n\nimpl SetAt<NoWindow> for Title {\n fn set_at(self, window: &mut NoWindow) {\n let Title(val) = self;\n window.title = val;\n }\n}\n\nimpl GetFrom<NoWindow> for ExitOnEsc {\n fn get_from(_obj: &NoWindow) -> ExitOnEsc {\n ExitOnEsc(false)\n }\n}\n\nimpl SetAt<NoWindow> for ExitOnEsc {\n \/\/ Ignore attempt to exit by pressing Esc.\n fn set_at(self, _window: &mut NoWindow) {}\n}\n<commit_msg>Updated to latest piston-quack<commit_after>#![deny(missing_docs)]\n#![deny(missing_copy_implementations)]\n#![allow(unstable)]\n\n\/\/! Window abstraction\n\nextern crate input;\nextern crate quack;\nextern crate event_loop;\n\nuse input::Input;\nuse quack::{ ActOn, GetFrom, SetAt, Me };\n\n\/\/ Reexport everything from event_loop.\npub use event_loop::*;\n\n\/\/\/ The title of the window.\npub struct Title(pub String);\n\n\/\/\/ The anti-aliasing samples when rendering.\n#[derive(Copy)]\npub struct Samples(pub u8);\n\n\/\/\/ Whether window is opened in full screen mode.\n#[derive(Copy)]\npub struct Fullscreen(pub bool);\n\n\/\/\/ Whether to exit when pressing the Esc keyboard button.\n#[derive(Copy)]\npub struct ExitOnEsc(pub bool);\n\n\/\/\/ Whether to capture the mouse cursor.\n#[derive(Copy)]\npub struct CaptureCursor(pub bool);\n\n\/\/\/ The draw size of the window.\n#[derive(Copy)]\npub struct DrawSize(pub [u32; 2]);\n\n\/\/\/ Settings for window behavior.\npub struct WindowSettings {\n \/\/\/ Title of the window.\n pub title: String,\n \/\/\/ The size of the window.\n pub size: [u32; 2],\n \/\/\/ Number samples per pixel (anti-aliasing).\n pub samples: u8,\n \/\/\/ If true, the window is fullscreen.\n pub fullscreen: bool,\n \/\/\/ If true, exit when pressing Esc.\n pub exit_on_esc: bool,\n}\n\nimpl WindowSettings {\n \/\/\/ Gets default settings.\n \/\/\/\n \/\/\/ This exits the window when pressing `Esc`.\n \/\/\/ The background color is set to black.\n pub fn default() -> WindowSettings {\n WindowSettings {\n title: \"Piston\".to_string(),\n size: [640, 480],\n samples: 0,\n fullscreen: false,\n exit_on_esc: true,\n }\n }\n}\n\n\/\/\/ An implementation of Window that runs without a window at all.\npub struct NoWindow {\n should_close: bool,\n title: String,\n}\n\nimpl NoWindow {\n \/\/\/ Returns a new `NoWindow`.\n pub fn new(settings: WindowSettings) -> NoWindow {\n let title = settings.title.clone();\n NoWindow {\n should_close: false,\n title: title,\n }\n }\n}\n\nimpl ActOn<()> for (SwapBuffers, NoWindow) {\n type Action = SwapBuffers;\n type Object = NoWindow;\n\n fn act_on(_: Me<Self>, _action: SwapBuffers, _window: &mut NoWindow) {}\n}\n\nimpl ActOn<Option<Input>> for (PollEvent, NoWindow) {\n type Action = PollEvent;\n type Object = NoWindow;\n\n fn act_on(_: Me<Self>, _action: PollEvent, _window: &mut NoWindow) \n -> Option<Input> { None }\n}\n\nimpl GetFrom for (ShouldClose, NoWindow) {\n type Property = ShouldClose;\n type Object = NoWindow;\n\n fn get_from(_: Me<Self>, obj: &NoWindow) -> ShouldClose {\n ShouldClose(obj.should_close)\n }\n}\n\nimpl GetFrom for (Size, NoWindow) {\n type Property = Size;\n type Object = NoWindow;\n\n fn get_from(_: Me<Self>, _obj: &NoWindow) -> Size {\n Size([0, 0])\n }\n}\n\nimpl SetAt for (CaptureCursor, NoWindow) {\n type Property = CaptureCursor;\n type Object = NoWindow;\n\n fn set_at(_: Me<Self>, _val: CaptureCursor, _window: &mut NoWindow) {}\n}\n\nimpl SetAt for (ShouldClose, NoWindow) {\n type Property = ShouldClose;\n type Object = NoWindow;\n\n fn set_at(\n _: Me<Self>, \n ShouldClose(val): ShouldClose, \n window: &mut NoWindow\n ) {\n window.should_close = val;\n }\n}\n\nimpl GetFrom for (DrawSize, NoWindow) {\n type Property = DrawSize;\n type Object = NoWindow;\n\n fn get_from(_: Me<Self>, obj: &NoWindow) -> DrawSize {\n let Size(val) = GetFrom::get_from(Me::<(Size, NoWindow)>, obj);\n DrawSize(val)\n }\n}\n\nimpl GetFrom for (Title, NoWindow) {\n type Property = Title;\n type Object = NoWindow;\n\n fn get_from(_: Me<Self>, obj: &NoWindow) -> Title {\n Title(obj.title.clone())\n }\n}\n\nimpl SetAt for (Title, NoWindow) {\n type Property = Title;\n type Object = NoWindow;\n\n fn set_at(_: Me<Self>, Title(val): Title, window: &mut NoWindow) {\n window.title = val;\n }\n}\n\nimpl GetFrom for (ExitOnEsc, NoWindow) {\n type Property = ExitOnEsc;\n type Object = NoWindow;\n\n fn get_from(_: Me<Self>, _obj: &NoWindow) -> ExitOnEsc {\n ExitOnEsc(false)\n }\n}\n\nimpl SetAt for (ExitOnEsc, NoWindow) {\n type Property = ExitOnEsc;\n type Object = NoWindow;\n\n \/\/ Ignore attempt to exit by pressing Esc.\n fn set_at(_: Me<Self>, _: ExitOnEsc, _window: &mut NoWindow) {}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use field-init-shorthand<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add serde test<commit_after>extern crate crates_index_diff;\nextern crate serde_json;\n\nuse crates_index_diff::*;\n\n#[test]\nfn test_parse_crate_version() {\n let c: CrateVersion = serde_json::from_str(\n r#\"{\n \"name\": \"test\",\n \"vers\": \"1.0.0\",\n \"yanked\": true\n }\"#).unwrap();\n assert_eq!(\n c,\n CrateVersion {\n name: \"test\".to_string(),\n kind: ChangeKind::Yanked,\n version: \"1.0.0\".to_string(),\n }\n );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fixed; draft<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #304 - kbknapp:rustdoc-fix, r=kbknapp<commit_after><|endoftext|>"} {"text":"<commit_before>\/*===============================================================================================*\/\n\/\/ Copyright 2016 Kyle Finlay\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\/*===============================================================================================*\/\n\n\/\/ Crate imports\nextern crate num_traits;\n\n\/\/ Module imports\nuse self::num_traits::Num;\n\n\/*===============================================================================================*\/\n\/*------CLAMP TRAIT------------------------------------------------------------------------------*\/\n\/*===============================================================================================*\/\n\n\/\/\/ Clamp trait.\npub trait Clamp {\n\n \/\/\/ Clamps a value between two numbers.\n fn clamp (self, min: Self, max: Self) -> Self;\n}\n\n\/*===============================================================================================*\/\n\/*------CLAMP TRAIT IMPLEMENTATIONS--------------------------------------------------------------*\/\n\/*===============================================================================================*\/\n\nimpl<T> Clamp for T where\n T: Num + PartialOrd {\n\n fn clamp (self, min: Self, max: Self) -> Self {\n\n if self < min {return min}\n if self > max {return max}\n self\n }\n}\n<commit_msg>Added debug assert<commit_after>\/*===============================================================================================*\/\n\/\/ Copyright 2016 Kyle Finlay\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\/*===============================================================================================*\/\n\n\/\/ Crate imports\nextern crate num_traits;\n\n\/\/ Module imports\nuse self::num_traits::Num;\n\n\/*===============================================================================================*\/\n\/*------CLAMP TRAIT------------------------------------------------------------------------------*\/\n\/*===============================================================================================*\/\n\n\/\/\/ Clamp trait.\npub trait Clamp {\n\n \/\/\/ Clamps a value between two numbers.\n fn clamp (self, min: Self, max: Self) -> Self;\n}\n\n\/*===============================================================================================*\/\n\/*------CLAMP TRAIT IMPLEMENTATIONS--------------------------------------------------------------*\/\n\/*===============================================================================================*\/\n\nimpl<T> Clamp for T where\n T: Num + PartialOrd {\n\n fn clamp (self, min: Self, max: Self) -> Self {\n\n debug_assert! (min < max, \"Min cannot be greater than max.\");\n\n if self < min {return min}\n if self > max {return max}\n self\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use super::{Numeric, Columnar, Tabular};\nuse std::ops::{Add, Sub, Mul, Div, Rem};\nuse std::ops;\nuse std::convert;\nuse std::mem::transmute;\n\n#[cfg(features = \"parallel\")]\nuse super::parallel::*;\n\n#[cfg(features = \"rand\")]\nuse rand::Rand;\n\n#[cfg(test)]\nmod test;\n\n#[macro_use]\nmod macros;\n\npub trait Vector<N>: Sized\nwhere N: Numeric {\n\n #[cfg(features = \"unstable\")]\n fn is_perpendicular_to<M>(self, v_prime: Self) -> bool\n where Self: Mul<Self, Output=M>\n , M: PartialEq\n {\n (self * v_prime) == M::zero()\n }\n}\n\n\/\/\/ A 3D vector of any numeric type.\n\/\/\/\n\/\/\/ This is the non-SIMD version.\n#[cfg(not(simd))]\n#[derive(Clone, Copy, Eq, PartialEq, PartialOrd, Debug, Default)]\n#[repr(C)]\npub struct Vector3<N>\nwhere N: Numeric\n , N: Copy { pub x: N\n , pub y: N\n , pub z: N\n }\n\n#[cfg(features = \"rand\")]\nimpl<N> Rand for Vector3<N>\nwhere N: Numeric\n , N: Rand {\n\n fn rand<R: Rng>(rng: &mut R) -> Self {\n Vector3 { x: N::rand(rng)\n , y: N::rand(rng)\n , z: N::rand(rng)\n }\n }\n}\n\n\/\/ impl<N> Columnar for Vector3<N>\n\/\/ where N: Numeric\n\/\/ , N: Copy {\n\/\/\n\/\/ type Column = Vector3<N>;\n\/\/\n\/\/ #[inline] fn ncols(&self) -> usize { 1 }\n\/\/ #[inline] fn column(&self, i: usize) -> Self::Column {\n\/\/ if i == 0 { *self }\n\/\/ else { panic!(\"Index out of bounds!\") }\n\/\/ }\n\/\/ #[inline] fn column_mut(&mut self, i: usize) -> &mut Self::Column {\n\/\/ if i == 0 { self }\n\/\/ else { panic!(\"Index out of bounds!\") }\n\/\/ }\n\/\/\n\/\/ }\n\/\/\n\/\/ impl<N> Tabular for Vector3<N>\n\/\/ where N: Numeric\n\/\/ , N: Copy {\n\/\/\n\/\/ type Row = N;\n\/\/ #[inline] fn nrows(&self) -> usize { 3 }\n\/\/ #[inline] fn row(&self, i: usize) -> Self::Row {\n\/\/ match i { 0 => self.x\n\/\/ , 1 => self.y\n\/\/ , 2 => self.z\n\/\/ , _ => panic!(\"Index out of bounds!\")\n\/\/ }\n\/\/ }\n\/\/ #[inline] fn row_mut(&mut self, i: usize) -> &mut Self::Row {\n\/\/ match i { 0 => &mut self.x\n\/\/ , 1 => &mut self.y\n\/\/ , 2 => &mut self.z\n\/\/ , _ => panic!(\"Index out of bounds!\")\n\/\/ }\n\/\/ }\n\/\/\n\/\/ }\n\n\/\/ impl<N> convert::From<[N; 3]> for Vector3<N>\n\/\/ where N: Numeric\n\/\/ , N: Copy {\n\/\/ fn from(a: [N; 3]) -> Self {\n\/\/ Vector3 { x: a[0], y: a[1], z: a[2] }\n\/\/ }\n\/\/ }\n\/\/\n\/\/ impl<'a, N> convert::From<&'a [N; 3]> for Vector3<N>\n\/\/ where N: Numeric\n\/\/ , N: Copy {\n\/\/ fn from(a: &[N; 3]) -> Self {\n\/\/ Vector3 { x: a[0], y: a[1], z: a[2] }\n\/\/ }\n\/\/ }\n\nimpl_v3_ops!{\n Add, add, +\n Sub, sub, -\n Div, div, \/\n Rem, rem, %\n}\n\nimpl<N> Mul<N> for Vector3<N>\nwhere N: Numeric + Mul<Output = N>\n , N: Copy\n{\n type Output = Self;\n fn mul(self, rhs: N) -> Self {\n Vector3 { x: self.x * rhs\n , y: self.y * rhs\n , z: self.z * rhs\n }\n }\n\n}\n\nimpl<N> Mul<Vector3<N>> for Vector3<N>\nwhere N: Numeric\n , N: Mul<Output = N> + Add<Output = N>\n , N: Copy\n{\n type Output = N;\n fn mul(self, rhs: Self) -> N {\n (self.x * rhs.x) +\n (self.y * rhs.y) +\n (self.z * rhs.z)\n }\n}\n\n#[cfg(features = \"parallel\")]\nimpl<N> Mul<N> for Vector3<N>\nwhere Self: Simdalize<Elem = N>\n , N: Numeric + Mul<Output = N>\n{\n type Output = Self;\n fn mul(self, rhs: N) -> Output { self.simdalize() * N::splat(rhs) }\n}\n\n\n\/\/\/ A 2D vector of any numeric type.\n\/\/\/\n\/\/\/ This is the non-SIMD version.\n#[cfg(not(simd))]\n#[derive(Clone, Copy, Eq, PartialEq, PartialOrd, Debug, Default)]\n#[repr(C)]\npub struct Vector2<N>\nwhere N: Numeric\n , N: Copy { pub x: N\n , pub y: N\n }\n\n#[cfg(features = \"rand\")]\nimpl<N> Rand for Vector2<N>\nwhere N: Numeric\n , N: Rand {\n\n fn rand<R: Rng>(rng: &mut R) -> Self {\n Vector2 { x: N::rand(rng)\n , y: N::rand(rng)\n }\n }\n}\n\nimpl_converts! { Vector2, 2\n , Vector3, 3\n }\nimpl_index! { Vector2, Vector3 }\n\nimpl_v2_ops! { Add, add, +\n Sub, sub, -\n Div, div, \/\n Rem, rem, %\n }\n\nimpl<N> Mul<N> for Vector2<N>\nwhere N: Numeric + Mul<Output = N>\n , N: Copy\n{\n type Output = Self;\n fn mul(self, rhs: N) -> Self {\n Vector2 { x: self.x * rhs\n , y: self.y * rhs\n }\n }\n}\n\nimpl<N> Mul<Vector2<N>> for Vector2<N>\nwhere N: Numeric\n , N: Mul<Output = N> + Add<Output = N>\n , N: Copy\n{\n type Output = N;\n fn mul(self, rhs: Self) -> N {\n (self.x * rhs.x) + (self.y * rhs.y)\n }\n}\n\n#[cfg(features = \"parallel\")]\nimpl<N> Mul<N> for Vector2<N>\nwhere Self: Simdalize<Elem = N>\n , N: Numeric + Mul<Output = N>\n{\n type Output = Self;\n fn mul(self, rhs: N) -> Output { self.simdalize() * N::splat(rhs) }\n}\n\npub struct VectorN<'a, N: Numeric + 'a>(&'a [N]);\n<commit_msg>Add 4- and 5-dimensional vectors (arithmetic NYI)<commit_after>use super::{Numeric, Columnar, Tabular};\nuse std::ops::{Add, Sub, Mul, Div, Rem};\nuse std::ops;\nuse std::convert;\nuse std::mem::transmute;\n\n#[cfg(features = \"parallel\")]\nuse super::parallel::*;\n\n#[cfg(features = \"rand\")]\nuse rand::Rand;\n\n#[cfg(test)]\nmod test;\n\n#[macro_use]\nmod macros;\n\npub trait Vector<N>: Sized\nwhere N: Numeric {\n\n #[cfg(features = \"unstable\")]\n fn is_perpendicular_to<M>(self, v_prime: Self) -> bool\n where Self: Mul<Self, Output=M>\n , M: PartialEq\n {\n (self * v_prime) == M::zero()\n }\n}\n\n\/\/\/ A 5D vector of any numeric type.\n\/\/\/\n\/\/\/ This is the non-SIMD version.\n#[cfg(not(simd))]\n#[derive(Clone, Copy, Eq, PartialEq, PartialOrd, Debug, Default)]\n#[repr(C)]\npub struct Vector5<N>\nwhere N: Numeric\n , N: Copy { pub x: N\n , pub y: N\n , pub z: N\n , pub w: N\n , pub a: N\n }\n\n#[cfg(features = \"rand\")]\nimpl<N> Rand for Vector5<N>\nwhere N: Numeric\n , N: Rand {\n\n fn rand<R: Rng>(rng: &mut R) -> Self {\n Vector3 { x: N::rand(rng)\n , y: N::rand(rng)\n , z: N::rand(rng)\n , w: N::rand(rng)\n , a: N::rand(rng)\n }\n }\n}\n\n\/\/\/ A 4D vector of any numeric type.\n\/\/\/\n\/\/\/ This is the non-SIMD version.\n#[cfg(not(simd))]\n#[derive(Clone, Copy, Eq, PartialEq, PartialOrd, Debug, Default)]\n#[repr(C)]\npub struct Vector4<N>\nwhere N: Numeric\n , N: Copy { pub x: N\n , pub y: N\n , pub z: N\n , pub w: N\n }\n\n#[cfg(features = \"rand\")]\nimpl<N> Rand for Vector4<N>\nwhere N: Numeric\n , N: Rand {\n\n fn rand<R: Rng>(rng: &mut R) -> Self {\n Vector3 { x: N::rand(rng)\n , y: N::rand(rng)\n , z: N::rand(rng)\n , w: N::rand(rng)\n }\n }\n}\n\n\/\/\/ A 3D vector of any numeric type.\n\/\/\/\n\/\/\/ This is the non-SIMD version.\n#[cfg(not(simd))]\n#[derive(Clone, Copy, Eq, PartialEq, PartialOrd, Debug, Default)]\n#[repr(C)]\npub struct Vector3<N>\nwhere N: Numeric\n , N: Copy { pub x: N\n , pub y: N\n , pub z: N\n }\n\n#[cfg(features = \"rand\")]\nimpl<N> Rand for Vector3<N>\nwhere N: Numeric\n , N: Rand {\n\n fn rand<R: Rng>(rng: &mut R) -> Self {\n Vector3 { x: N::rand(rng)\n , y: N::rand(rng)\n , z: N::rand(rng)\n }\n }\n}\n\n\/\/ impl<N> Columnar for Vector3<N>\n\/\/ where N: Numeric\n\/\/ , N: Copy {\n\/\/\n\/\/ type Column = Vector3<N>;\n\/\/\n\/\/ #[inline] fn ncols(&self) -> usize { 1 }\n\/\/ #[inline] fn column(&self, i: usize) -> Self::Column {\n\/\/ if i == 0 { *self }\n\/\/ else { panic!(\"Index out of bounds!\") }\n\/\/ }\n\/\/ #[inline] fn column_mut(&mut self, i: usize) -> &mut Self::Column {\n\/\/ if i == 0 { self }\n\/\/ else { panic!(\"Index out of bounds!\") }\n\/\/ }\n\/\/\n\/\/ }\n\/\/\n\/\/ impl<N> Tabular for Vector3<N>\n\/\/ where N: Numeric\n\/\/ , N: Copy {\n\/\/\n\/\/ type Row = N;\n\/\/ #[inline] fn nrows(&self) -> usize { 3 }\n\/\/ #[inline] fn row(&self, i: usize) -> Self::Row {\n\/\/ match i { 0 => self.x\n\/\/ , 1 => self.y\n\/\/ , 2 => self.z\n\/\/ , _ => panic!(\"Index out of bounds!\")\n\/\/ }\n\/\/ }\n\/\/ #[inline] fn row_mut(&mut self, i: usize) -> &mut Self::Row {\n\/\/ match i { 0 => &mut self.x\n\/\/ , 1 => &mut self.y\n\/\/ , 2 => &mut self.z\n\/\/ , _ => panic!(\"Index out of bounds!\")\n\/\/ }\n\/\/ }\n\/\/\n\/\/ }\n\n\/\/ impl<N> convert::From<[N; 3]> for Vector3<N>\n\/\/ where N: Numeric\n\/\/ , N: Copy {\n\/\/ fn from(a: [N; 3]) -> Self {\n\/\/ Vector3 { x: a[0], y: a[1], z: a[2] }\n\/\/ }\n\/\/ }\n\/\/\n\/\/ impl<'a, N> convert::From<&'a [N; 3]> for Vector3<N>\n\/\/ where N: Numeric\n\/\/ , N: Copy {\n\/\/ fn from(a: &[N; 3]) -> Self {\n\/\/ Vector3 { x: a[0], y: a[1], z: a[2] }\n\/\/ }\n\/\/ }\n\nimpl_v3_ops!{\n Add, add, +\n Sub, sub, -\n Div, div, \/\n Rem, rem, %\n}\n\nimpl<N> Mul<N> for Vector3<N>\nwhere N: Numeric + Mul<Output = N>\n , N: Copy\n{\n type Output = Self;\n fn mul(self, rhs: N) -> Self {\n Vector3 { x: self.x * rhs\n , y: self.y * rhs\n , z: self.z * rhs\n }\n }\n\n}\n\nimpl<N> Mul<Vector3<N>> for Vector3<N>\nwhere N: Numeric\n , N: Mul<Output = N> + Add<Output = N>\n , N: Copy\n{\n type Output = N;\n fn mul(self, rhs: Self) -> N {\n (self.x * rhs.x) +\n (self.y * rhs.y) +\n (self.z * rhs.z)\n }\n}\n\n#[cfg(features = \"parallel\")]\nimpl<N> Mul<N> for Vector3<N>\nwhere Self: Simdalize<Elem = N>\n , N: Numeric + Mul<Output = N>\n{\n type Output = Self;\n fn mul(self, rhs: N) -> Output { self.simdalize() * N::splat(rhs) }\n}\n\n\n\/\/\/ A 2D vector of any numeric type.\n\/\/\/\n\/\/\/ This is the non-SIMD version.\n#[cfg(not(simd))]\n#[derive(Clone, Copy, Eq, PartialEq, PartialOrd, Debug, Default)]\n#[repr(C)]\npub struct Vector2<N>\nwhere N: Numeric\n , N: Copy { pub x: N\n , pub y: N\n }\n\n#[cfg(features = \"rand\")]\nimpl<N> Rand for Vector2<N>\nwhere N: Numeric\n , N: Rand {\n\n fn rand<R: Rng>(rng: &mut R) -> Self {\n Vector2 { x: N::rand(rng)\n , y: N::rand(rng)\n }\n }\n}\n\nimpl_v2_ops! { Add, add, +\n Sub, sub, -\n Div, div, \/\n Rem, rem, %\n }\n\nimpl<N> Mul<N> for Vector2<N>\nwhere N: Numeric + Mul<Output = N>\n , N: Copy\n{\n type Output = Self;\n fn mul(self, rhs: N) -> Self {\n Vector2 { x: self.x * rhs\n , y: self.y * rhs\n }\n }\n}\n\nimpl<N> Mul<Vector2<N>> for Vector2<N>\nwhere N: Numeric\n , N: Mul<Output = N> + Add<Output = N>\n , N: Copy\n{\n type Output = N;\n fn mul(self, rhs: Self) -> N {\n (self.x * rhs.x) + (self.y * rhs.y)\n }\n}\n\n#[cfg(features = \"parallel\")]\nimpl<N> Mul<N> for Vector2<N>\nwhere Self: Simdalize<Elem = N>\n , N: Numeric + Mul<Output = N>\n{\n type Output = Self;\n fn mul(self, rhs: N) -> Output { self.simdalize() * N::splat(rhs) }\n}\n\npub struct VectorN<'a, N: Numeric + 'a>(&'a [N]);\n\nimpl_converts! { Vector2, 2\n , Vector3, 3\n , Vector4, 4\n , Vector5, 5\n }\n\nimpl_index! { Vector2\n , Vector3\n , Vector4\n , Vector5\n }\n<|endoftext|>"} {"text":"<commit_before>use std::collections::hashmap::HashMap;\nuse std::uint;\nuse std::rand;\nuse std::rand::Rng;\nuse cgmath::{Vector2};\nuse calx::color::RGB;\nuse calx::color::consts::*;\nuse world::system::{World, Entity, EngineLogic};\nuse world::spatial::{Location, Position, DIRECTIONS6};\nuse world::mapgen::{AreaSpec};\nuse world::mapgen;\nuse world::area::Area;\nuse world::fov::Fov;\nuse world::dijkstra;\n\n#[deriving(Clone, Show)]\npub struct MobComp {\n pub t: MobType,\n pub max_hp: int,\n pub hp: int,\n pub power: int,\n pub armor: int,\n pub status: int,\n}\n\nimpl MobComp {\n pub fn new(t: MobType) -> MobComp {\n let data = MOB_KINDS[t as uint];\n let status = if t != Player { status::Asleep as int } else { 0 };\n MobComp {\n t: t,\n max_hp: data.power,\n hp: data.power,\n power: data.power,\n armor: 0,\n status: status,\n }\n }\n}\n\npub mod intrinsic {\n#[deriving(Eq, PartialEq, Clone)]\npub enum Intrinsic {\n \/\/\/ Moves 1\/3 slower than usual.\n Slow = 0b1,\n \/\/\/ Moves 1\/3 faster than usual, stacks with Quick status.\n Fast = 0b10,\n \/\/\/ Can manipulate objects and doors.\n Hands = 0b100,\n}\n}\n\npub mod status {\n#[deriving(Eq, PartialEq, Clone)]\npub enum Status {\n \/\/\/ Moves 1\/3 slower than usual.\n Slow = 0b1,\n \/\/\/ Moves 1\/3 faster than usual, stacks with Fast intrinsic.\n Quick = 0b10,\n \/\/\/ Mob is inactive until disturbed.\n Asleep = 0b100,\n \/\/\/ Mob moves erratically.\n Confused = 0b1000,\n}\n}\n\npub struct MobKind {\n pub typ: MobType,\n pub name: &'static str,\n pub power: int,\n pub area_spec: AreaSpec,\n pub sprite: uint,\n pub color: RGB,\n pub intrinsics: int,\n}\n\n\/\/ Intrinsic flag union.\nmacro_rules! f {\n { $($flag:ident),* } => { 0 $( | intrinsic::$flag as int )* }\n}\n\nmacro_rules! mob_data {\n {\n count: $count:expr;\n $($symbol:ident: $power:expr, $depth:expr, $biome:ident, $sprite:expr, $color:expr, $flags:expr;)*\n\n } => {\n#[deriving(Eq, PartialEq, Clone, Show)]\npub enum MobType {\n $($symbol,)*\n}\n\npub static MOB_KINDS: [MobKind, ..$count] = [\n $(MobKind {\n typ: $symbol,\n name: stringify!($symbol),\n power: $power,\n area_spec: AreaSpec {\n depth: $depth,\n biome: mapgen::$biome,\n },\n sprite: $sprite,\n color: $color,\n intrinsics: $flags,\n },)*\n];\n\n\/\/ End macro\n }\n}\n\nmob_data! {\n count: 10;\n\/\/ Symbol power, depth, biome, sprite, color, intrinsics\n Player: 6, -1, Anywhere, 51, AZURE, f!();\n Dreg: 1, 1, Anywhere, 72, OLIVE, f!(Hands);\n Snake: 1, 1, Overland, 71, GREEN, f!();\n Ooze: 3, 3, Dungeon, 77, LIGHTSEAGREEN, f!();\n Flayer: 4, 4, Anywhere, 75, INDIANRED, f!();\n Ogre: 6, 5, Anywhere, 73, DARKSLATEGRAY, f!(Hands);\n Wraith: 8, 6, Dungeon, 74, HOTPINK, f!(Hands);\n Octopus: 10, 7, Anywhere, 63, DARKTURQUOISE, f!();\n Efreet: 12, 8, Anywhere, 78, ORANGE, f!();\n Serpent: 15, 9, Dungeon, 94, CORAL, f!();\n}\n\n\n\/\/\/ Trait for entities that are mobile things.\npub trait Mob {\n fn is_active(&self) -> bool;\n fn acts_this_frame(&self) -> bool;\n fn has_intrinsic(&self, f: intrinsic::Intrinsic) -> bool;\n fn has_status(&self, s: status::Status) -> bool;\n fn add_status(&mut self, s: status::Status);\n fn remove_status(&mut self, s: status::Status);\n fn mob_type(&self) -> MobType;\n fn power(&self) -> int;\n fn update_ai(&mut self);\n\n \/\/\/ Try to move the mob in a direction, then try to roll around obstacles\n \/\/\/ if the direction is blocked.\n fn smart_move(&mut self, dir8: uint) -> Option<Vector2<int>>;\n\n fn enemy_at(&self, loc: Location) -> Option<Entity>;\n fn attack(&mut self, loc: Location);\n\n \/\/\/ Something interesting is happening at location. See if the mob needs to\n \/\/\/ wake up and investigate.\n fn alert_at(&mut self, loc: Location);\n}\n\nimpl Mob for Entity {\n fn is_active(&self) -> bool {\n if !self.has::<MobComp>() { return false; }\n if self.has_status(status::Asleep) { return false; }\n return true;\n }\n\n fn acts_this_frame(&self) -> bool {\n if !self.has::<MobComp>() { return false; }\n if !self.is_active() { return false; }\n\n \/\/ Go through a cycle of 5 phases to get 4 possible speeds.\n \/\/ System idea from Jeff Lait.\n let phase = self.world().get_tick() % 5;\n match phase {\n 0 => return true,\n 1 => return self.has_intrinsic(intrinsic::Fast),\n 2 => return true,\n 3 => return self.has_status(status::Quick),\n 4 => return !self.has_intrinsic(intrinsic::Slow)\n && !self.has_status(status::Slow),\n _ => fail!(\"Invalid action phase\"),\n }\n }\n\n fn has_intrinsic(&self, f: intrinsic::Intrinsic) -> bool {\n self.into::<MobComp>().map_or(false,\n |m| MOB_KINDS[m.t as uint].intrinsics as int & f as int != 0)\n }\n\n fn has_status(&self, s: status::Status) -> bool {\n self.into::<MobComp>().map_or(false,\n |m| m.status as int & s as int != 0)\n }\n\n fn add_status(&mut self, s: status::Status) {\n self.into::<MobComp>().as_mut().map(\n |m| m.status |= s as int);\n }\n\n fn remove_status(&mut self, s: status::Status) {\n self.into::<MobComp>().as_mut().map(\n |m| m.status &= !(s as int));\n }\n\n fn mob_type(&self) -> MobType { self.into::<MobComp>().unwrap().t }\n\n fn power(&self) -> int { self.into::<MobComp>().unwrap().power }\n\n fn update_ai(&mut self) {\n if self.world().player().is_some() {\n let player = self.world().player().unwrap();\n let pathing = Pathing::at_loc(&self.world(), player.location());\n\n let move = pathing.towards_from(self.location());\n match move {\n Some(loc) => {\n let move_dir = &self.location().dir6_towards(loc);\n match self.enemy_at(loc) {\n Some(_) => { self.attack(loc); }\n _ => { self.move(move_dir); }\n }\n return;\n }\n _ => ()\n }\n }\n\n \/\/ No target, wander around randomly.\n self.move(rand::task_rng().choose(DIRECTIONS6.as_slice()).unwrap());\n }\n\n fn smart_move(&mut self, dir8: uint) -> Option<Vector2<int>> {\n static SMART_MOVE: &'static [&'static [Vector2<int>]] = &[\n &[DIRECTIONS6[0], DIRECTIONS6[5], DIRECTIONS6[1]],\n &[DIRECTIONS6[1], DIRECTIONS6[0], DIRECTIONS6[2]],\n &[DIRECTIONS6[2], DIRECTIONS6[1], DIRECTIONS6[3]],\n &[DIRECTIONS6[3], DIRECTIONS6[2], DIRECTIONS6[4]],\n &[DIRECTIONS6[4], DIRECTIONS6[3], DIRECTIONS6[5]],\n &[DIRECTIONS6[5], DIRECTIONS6[4], DIRECTIONS6[0]],\n\n \/\/ Right sideways move zigzag.\n &[DIRECTIONS6[1], DIRECTIONS6[2]],\n &[DIRECTIONS6[2], DIRECTIONS6[1]],\n\n \/\/ Left sideways move zigzag.\n &[DIRECTIONS6[5], DIRECTIONS6[4]],\n &[DIRECTIONS6[4], DIRECTIONS6[5]],\n ];\n \/\/ \"horizontal\" movement is a zig-zag since there's no natural hex axis\n \/\/ in that direction. Find out the grid column the mob is on and\n \/\/ determine whether to zig or zag based on that.\n let loc = self.location();\n let zag = ((loc.x - loc.y) % 2) as uint;\n\n let deltas = SMART_MOVE[match dir8 {\n 0 => 0,\n 1 => 1,\n 2 => 6 + zag,\n 3 => 2,\n 4 => 3,\n 5 => 4,\n 6 => 8 + zag,\n 7 => 5,\n _ => fail!(\"Invalid dir8\"),\n }];\n\n for delta in deltas.iter() {\n let new_loc = loc + *delta;\n match self.enemy_at(new_loc) {\n Some(_) => {\n self.attack(new_loc);\n return None;\n }\n _ => ()\n }\n if self.move(delta) { return Some(*delta); }\n }\n\n None\n }\n\n fn enemy_at(&self, loc: Location) -> Option<Entity> {\n let targs = self.world().mobs_at(loc);\n \/\/ Nothing to fight.\n if targs.len() == 0 { return None; }\n \/\/ TODO: Alignment check\n Some(targs[0].clone())\n }\n\n fn attack(&mut self, loc: Location) {\n let p = self.power();\n \/\/ No power, can't fight.\n if p == 0 { return; }\n\n let target = match self.enemy_at(loc) {\n None => return,\n Some(t) => t,\n };\n\n \/\/ Every five points of power is one certain hit.\n let full = p \/ 5;\n let partial = (p % 5) as f64 \/ 5.0;\n\n \/\/ TODO: Make some rng utility functions.\n let r = rand::random::<f64>() % 1.0;\n\n let damage = full + if r < partial { 1 } else { 0 };\n\n \/\/ TODO: A deal_damage method.\n let mut tm = target.into::<MobComp>().unwrap();\n tm.hp -= damage;\n\n if tm.hp <= 0 {\n if target.mob_type() == Player {\n println!(\"TODO handle player death\");\n tm.hp = tm.max_hp;\n }\n \/\/ TODO: Whatever extra stuff we want to do when killing a mob.\n \/\/ It's probably a special occasion if it's the player avatar.\n self.world().delete_entity(&target);\n }\n }\n\n fn alert_at(&mut self, loc: Location) {\n \/\/ TODO: More complex logic. Check the square for enemies, don't wake\n \/\/ up if enemy is stealthed successfully.\n if self.has_status(status::Asleep) {\n if self.location().dist(loc) < 6 {\n self.remove_status(status::Asleep);\n\n \/\/ XXX: Msging REALLY needs a nicer API.\n self.world().system_mut().fx.msg(format!(\"{} wakes up.\", MOB_KINDS[self.mob_type() as uint].name).as_slice());\n }\n }\n }\n}\n\n\/\/\/ Game world trait for global creature operations.\npub trait Mobs {\n fn mobs_at(&self, loc: Location) -> Vec<Entity>;\n fn mobs(&self) -> Vec<Entity>;\n fn player(&self) -> Option<Entity>;\n fn player_has_turn(&self) -> bool;\n fn clear_npcs(&mut self);\n fn update_mobs(&mut self);\n fn wake_up_mobs(&mut self);\n}\n\nimpl Mobs for World {\n fn mobs_at(&self, loc: Location) -> Vec<Entity> {\n self.entities_at(loc).iter().filter(|e| e.has::<MobComp>())\n .map(|e| e.clone()).collect()\n }\n\n fn mobs(&self) -> Vec<Entity> {\n self.entities().filter(|e| e.has::<MobComp>())\n .map(|e| e.clone()).collect()\n }\n\n fn player(&self) -> Option<Entity> {\n for e in self.mobs().iter() {\n if e.mob_type() == Player {\n return Some(e.clone());\n }\n }\n None\n }\n\n fn player_has_turn(&self) -> bool {\n match self.player() {\n Some(p) => p.acts_this_frame(),\n _ => false\n }\n }\n\n fn clear_npcs(&mut self) {\n for e in self.mobs().mut_iter() {\n if e.mob_type() != Player {\n e.delete();\n }\n }\n }\n\n fn update_mobs(&mut self) {\n self.wake_up_mobs();\n\n for mob in self.mobs().mut_iter() {\n if !mob.acts_this_frame() { continue; }\n if mob.mob_type() == Player { continue; }\n mob.update_ai();\n }\n\n self.advance_frame();\n }\n\n fn wake_up_mobs(&mut self) {\n if self.player().is_none() { return; }\n let player_fov = self.camera().into::<Fov>().unwrap();\n let player_loc = self.player().unwrap().location();\n for &loc in player_fov.deref().seen_locs() {\n for mob in self.mobs_at(loc).mut_iter() {\n mob.alert_at(player_loc);\n }\n }\n }\n}\n\npub struct Pathing {\n gradient: HashMap<Location, uint>,\n}\n\nimpl Pathing {\n \/\/\/ Pathing map towards a single point.\n pub fn at_loc(world: &World, loc: Location) -> Pathing {\n Pathing {\n gradient: dijkstra::build_map(\n vec![loc],\n |&loc| world.open_neighbors(loc, DIRECTIONS6.iter()),\n 256),\n }\n }\n\n fn neighbors(&self, loc: Location) -> Vec<Location> {\n DIRECTIONS6.iter().map(|&d| loc + d).collect()\n }\n\n pub fn towards_from(&self, current: Location) -> Option<Location> {\n let mut ret = None;\n let mut best = uint::MAX;\n for n in self.neighbors(current).iter() {\n match self.gradient.find(n) {\n Some(&weight) if weight <= best => {\n ret = Some(*n);\n best = weight;\n }\n _ => ()\n }\n }\n ret\n }\n}\n<commit_msg>Smaller pathfinding radius for enemy AI<commit_after>use std::collections::hashmap::HashMap;\nuse std::uint;\nuse std::rand;\nuse std::rand::Rng;\nuse cgmath::{Vector2};\nuse calx::color::RGB;\nuse calx::color::consts::*;\nuse world::system::{World, Entity, EngineLogic};\nuse world::spatial::{Location, Position, DIRECTIONS6};\nuse world::mapgen::{AreaSpec};\nuse world::mapgen;\nuse world::area::Area;\nuse world::fov::Fov;\nuse world::dijkstra;\n\n#[deriving(Clone, Show)]\npub struct MobComp {\n pub t: MobType,\n pub max_hp: int,\n pub hp: int,\n pub power: int,\n pub armor: int,\n pub status: int,\n}\n\nimpl MobComp {\n pub fn new(t: MobType) -> MobComp {\n let data = MOB_KINDS[t as uint];\n let status = if t != Player { status::Asleep as int } else { 0 };\n MobComp {\n t: t,\n max_hp: data.power,\n hp: data.power,\n power: data.power,\n armor: 0,\n status: status,\n }\n }\n}\n\npub mod intrinsic {\n#[deriving(Eq, PartialEq, Clone)]\npub enum Intrinsic {\n \/\/\/ Moves 1\/3 slower than usual.\n Slow = 0b1,\n \/\/\/ Moves 1\/3 faster than usual, stacks with Quick status.\n Fast = 0b10,\n \/\/\/ Can manipulate objects and doors.\n Hands = 0b100,\n}\n}\n\npub mod status {\n#[deriving(Eq, PartialEq, Clone)]\npub enum Status {\n \/\/\/ Moves 1\/3 slower than usual.\n Slow = 0b1,\n \/\/\/ Moves 1\/3 faster than usual, stacks with Fast intrinsic.\n Quick = 0b10,\n \/\/\/ Mob is inactive until disturbed.\n Asleep = 0b100,\n \/\/\/ Mob moves erratically.\n Confused = 0b1000,\n}\n}\n\npub struct MobKind {\n pub typ: MobType,\n pub name: &'static str,\n pub power: int,\n pub area_spec: AreaSpec,\n pub sprite: uint,\n pub color: RGB,\n pub intrinsics: int,\n}\n\n\/\/ Intrinsic flag union.\nmacro_rules! f {\n { $($flag:ident),* } => { 0 $( | intrinsic::$flag as int )* }\n}\n\nmacro_rules! mob_data {\n {\n count: $count:expr;\n $($symbol:ident: $power:expr, $depth:expr, $biome:ident, $sprite:expr, $color:expr, $flags:expr;)*\n\n } => {\n#[deriving(Eq, PartialEq, Clone, Show)]\npub enum MobType {\n $($symbol,)*\n}\n\npub static MOB_KINDS: [MobKind, ..$count] = [\n $(MobKind {\n typ: $symbol,\n name: stringify!($symbol),\n power: $power,\n area_spec: AreaSpec {\n depth: $depth,\n biome: mapgen::$biome,\n },\n sprite: $sprite,\n color: $color,\n intrinsics: $flags,\n },)*\n];\n\n\/\/ End macro\n }\n}\n\nmob_data! {\n count: 10;\n\/\/ Symbol power, depth, biome, sprite, color, intrinsics\n Player: 6, -1, Anywhere, 51, AZURE, f!();\n Dreg: 1, 1, Anywhere, 72, OLIVE, f!(Hands);\n Snake: 1, 1, Overland, 71, GREEN, f!();\n Ooze: 3, 3, Dungeon, 77, LIGHTSEAGREEN, f!();\n Flayer: 4, 4, Anywhere, 75, INDIANRED, f!();\n Ogre: 6, 5, Anywhere, 73, DARKSLATEGRAY, f!(Hands);\n Wraith: 8, 6, Dungeon, 74, HOTPINK, f!(Hands);\n Octopus: 10, 7, Anywhere, 63, DARKTURQUOISE, f!();\n Efreet: 12, 8, Anywhere, 78, ORANGE, f!();\n Serpent: 15, 9, Dungeon, 94, CORAL, f!();\n}\n\n\n\/\/\/ Trait for entities that are mobile things.\npub trait Mob {\n fn is_active(&self) -> bool;\n fn acts_this_frame(&self) -> bool;\n fn has_intrinsic(&self, f: intrinsic::Intrinsic) -> bool;\n fn has_status(&self, s: status::Status) -> bool;\n fn add_status(&mut self, s: status::Status);\n fn remove_status(&mut self, s: status::Status);\n fn mob_type(&self) -> MobType;\n fn power(&self) -> int;\n fn update_ai(&mut self);\n\n \/\/\/ Try to move the mob in a direction, then try to roll around obstacles\n \/\/\/ if the direction is blocked.\n fn smart_move(&mut self, dir8: uint) -> Option<Vector2<int>>;\n\n fn enemy_at(&self, loc: Location) -> Option<Entity>;\n fn attack(&mut self, loc: Location);\n\n \/\/\/ Something interesting is happening at location. See if the mob needs to\n \/\/\/ wake up and investigate.\n fn alert_at(&mut self, loc: Location);\n}\n\nimpl Mob for Entity {\n fn is_active(&self) -> bool {\n if !self.has::<MobComp>() { return false; }\n if self.has_status(status::Asleep) { return false; }\n return true;\n }\n\n fn acts_this_frame(&self) -> bool {\n if !self.has::<MobComp>() { return false; }\n if !self.is_active() { return false; }\n\n \/\/ Go through a cycle of 5 phases to get 4 possible speeds.\n \/\/ System idea from Jeff Lait.\n let phase = self.world().get_tick() % 5;\n match phase {\n 0 => return true,\n 1 => return self.has_intrinsic(intrinsic::Fast),\n 2 => return true,\n 3 => return self.has_status(status::Quick),\n 4 => return !self.has_intrinsic(intrinsic::Slow)\n && !self.has_status(status::Slow),\n _ => fail!(\"Invalid action phase\"),\n }\n }\n\n fn has_intrinsic(&self, f: intrinsic::Intrinsic) -> bool {\n self.into::<MobComp>().map_or(false,\n |m| MOB_KINDS[m.t as uint].intrinsics as int & f as int != 0)\n }\n\n fn has_status(&self, s: status::Status) -> bool {\n self.into::<MobComp>().map_or(false,\n |m| m.status as int & s as int != 0)\n }\n\n fn add_status(&mut self, s: status::Status) {\n self.into::<MobComp>().as_mut().map(\n |m| m.status |= s as int);\n }\n\n fn remove_status(&mut self, s: status::Status) {\n self.into::<MobComp>().as_mut().map(\n |m| m.status &= !(s as int));\n }\n\n fn mob_type(&self) -> MobType { self.into::<MobComp>().unwrap().t }\n\n fn power(&self) -> int { self.into::<MobComp>().unwrap().power }\n\n fn update_ai(&mut self) {\n if self.world().player().is_some() {\n let player = self.world().player().unwrap();\n let pathing = Pathing::at_loc(&self.world(), player.location());\n\n let move = pathing.towards_from(self.location());\n match move {\n Some(loc) => {\n let move_dir = &self.location().dir6_towards(loc);\n match self.enemy_at(loc) {\n Some(_) => { self.attack(loc); }\n _ => { self.move(move_dir); }\n }\n return;\n }\n _ => ()\n }\n }\n\n \/\/ No target, wander around randomly.\n self.move(rand::task_rng().choose(DIRECTIONS6.as_slice()).unwrap());\n }\n\n fn smart_move(&mut self, dir8: uint) -> Option<Vector2<int>> {\n static SMART_MOVE: &'static [&'static [Vector2<int>]] = &[\n &[DIRECTIONS6[0], DIRECTIONS6[5], DIRECTIONS6[1]],\n &[DIRECTIONS6[1], DIRECTIONS6[0], DIRECTIONS6[2]],\n &[DIRECTIONS6[2], DIRECTIONS6[1], DIRECTIONS6[3]],\n &[DIRECTIONS6[3], DIRECTIONS6[2], DIRECTIONS6[4]],\n &[DIRECTIONS6[4], DIRECTIONS6[3], DIRECTIONS6[5]],\n &[DIRECTIONS6[5], DIRECTIONS6[4], DIRECTIONS6[0]],\n\n \/\/ Right sideways move zigzag.\n &[DIRECTIONS6[1], DIRECTIONS6[2]],\n &[DIRECTIONS6[2], DIRECTIONS6[1]],\n\n \/\/ Left sideways move zigzag.\n &[DIRECTIONS6[5], DIRECTIONS6[4]],\n &[DIRECTIONS6[4], DIRECTIONS6[5]],\n ];\n \/\/ \"horizontal\" movement is a zig-zag since there's no natural hex axis\n \/\/ in that direction. Find out the grid column the mob is on and\n \/\/ determine whether to zig or zag based on that.\n let loc = self.location();\n let zag = ((loc.x - loc.y) % 2) as uint;\n\n let deltas = SMART_MOVE[match dir8 {\n 0 => 0,\n 1 => 1,\n 2 => 6 + zag,\n 3 => 2,\n 4 => 3,\n 5 => 4,\n 6 => 8 + zag,\n 7 => 5,\n _ => fail!(\"Invalid dir8\"),\n }];\n\n for delta in deltas.iter() {\n let new_loc = loc + *delta;\n match self.enemy_at(new_loc) {\n Some(_) => {\n self.attack(new_loc);\n return None;\n }\n _ => ()\n }\n if self.move(delta) { return Some(*delta); }\n }\n\n None\n }\n\n fn enemy_at(&self, loc: Location) -> Option<Entity> {\n let targs = self.world().mobs_at(loc);\n \/\/ Nothing to fight.\n if targs.len() == 0 { return None; }\n \/\/ TODO: Alignment check\n Some(targs[0].clone())\n }\n\n fn attack(&mut self, loc: Location) {\n let p = self.power();\n \/\/ No power, can't fight.\n if p == 0 { return; }\n\n let target = match self.enemy_at(loc) {\n None => return,\n Some(t) => t,\n };\n\n \/\/ Every five points of power is one certain hit.\n let full = p \/ 5;\n let partial = (p % 5) as f64 \/ 5.0;\n\n \/\/ TODO: Make some rng utility functions.\n let r = rand::random::<f64>() % 1.0;\n\n let damage = full + if r < partial { 1 } else { 0 };\n\n \/\/ TODO: A deal_damage method.\n let mut tm = target.into::<MobComp>().unwrap();\n tm.hp -= damage;\n\n if tm.hp <= 0 {\n if target.mob_type() == Player {\n println!(\"TODO handle player death\");\n tm.hp = tm.max_hp;\n }\n \/\/ TODO: Whatever extra stuff we want to do when killing a mob.\n \/\/ It's probably a special occasion if it's the player avatar.\n self.world().delete_entity(&target);\n }\n }\n\n fn alert_at(&mut self, loc: Location) {\n \/\/ TODO: More complex logic. Check the square for enemies, don't wake\n \/\/ up if enemy is stealthed successfully.\n if self.has_status(status::Asleep) {\n if self.location().dist(loc) < 6 {\n self.remove_status(status::Asleep);\n\n \/\/ XXX: Msging REALLY needs a nicer API.\n self.world().system_mut().fx.msg(format!(\"{} wakes up.\", MOB_KINDS[self.mob_type() as uint].name).as_slice());\n }\n }\n }\n}\n\n\/\/\/ Game world trait for global creature operations.\npub trait Mobs {\n fn mobs_at(&self, loc: Location) -> Vec<Entity>;\n fn mobs(&self) -> Vec<Entity>;\n fn player(&self) -> Option<Entity>;\n fn player_has_turn(&self) -> bool;\n fn clear_npcs(&mut self);\n fn update_mobs(&mut self);\n fn wake_up_mobs(&mut self);\n}\n\nimpl Mobs for World {\n fn mobs_at(&self, loc: Location) -> Vec<Entity> {\n self.entities_at(loc).iter().filter(|e| e.has::<MobComp>())\n .map(|e| e.clone()).collect()\n }\n\n fn mobs(&self) -> Vec<Entity> {\n self.entities().filter(|e| e.has::<MobComp>())\n .map(|e| e.clone()).collect()\n }\n\n fn player(&self) -> Option<Entity> {\n for e in self.mobs().iter() {\n if e.mob_type() == Player {\n return Some(e.clone());\n }\n }\n None\n }\n\n fn player_has_turn(&self) -> bool {\n match self.player() {\n Some(p) => p.acts_this_frame(),\n _ => false\n }\n }\n\n fn clear_npcs(&mut self) {\n for e in self.mobs().mut_iter() {\n if e.mob_type() != Player {\n e.delete();\n }\n }\n }\n\n fn update_mobs(&mut self) {\n self.wake_up_mobs();\n\n for mob in self.mobs().mut_iter() {\n if !mob.acts_this_frame() { continue; }\n if mob.mob_type() == Player { continue; }\n mob.update_ai();\n }\n\n self.advance_frame();\n }\n\n fn wake_up_mobs(&mut self) {\n if self.player().is_none() { return; }\n let player_fov = self.camera().into::<Fov>().unwrap();\n let player_loc = self.player().unwrap().location();\n for &loc in player_fov.deref().seen_locs() {\n for mob in self.mobs_at(loc).mut_iter() {\n mob.alert_at(player_loc);\n }\n }\n }\n}\n\npub struct Pathing {\n gradient: HashMap<Location, uint>,\n}\n\nimpl Pathing {\n \/\/\/ Pathing map towards a single point.\n pub fn at_loc(world: &World, loc: Location) -> Pathing {\n Pathing {\n gradient: dijkstra::build_map(\n vec![loc],\n |&loc| world.open_neighbors(loc, DIRECTIONS6.iter()),\n 64),\n }\n }\n\n fn neighbors(&self, loc: Location) -> Vec<Location> {\n DIRECTIONS6.iter().map(|&d| loc + d).collect()\n }\n\n pub fn towards_from(&self, current: Location) -> Option<Location> {\n let mut ret = None;\n let mut best = uint::MAX;\n for n in self.neighbors(current).iter() {\n match self.gradient.find(n) {\n Some(&weight) if weight <= best => {\n ret = Some(*n);\n best = weight;\n }\n _ => ()\n }\n }\n ret\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #78252 - bugadani:issue-45964, r=Mark-Simulacrum<commit_after>\/\/ This test case checks that slice::{r}position functions do not\n\/\/ prevent optimizing away bounds checks\n\n\/\/ compile-flags: -O\n\n#![crate_type=\"rlib\"]\n\n\/\/ CHECK-LABEL: @test\n#[no_mangle]\npub fn test(y: &[u32], x: &u32, z: &u32) -> bool {\n let result = match y.iter().position(|a| a == x) {\n Some(p) => Ok(p),\n None => Err(()),\n };\n\n if let Ok(p) = result {\n \/\/ CHECK-NOT: panic\n y[p] == *z\n } else {\n false\n }\n}\n\n\/\/ CHECK-LABEL: @rtest\n#[no_mangle]\npub fn rtest(y: &[u32], x: &u32, z: &u32) -> bool {\n let result = match y.iter().rposition(|a| a == x) {\n Some(p) => Ok(p),\n None => Err(()),\n };\n\n if let Ok(p) = result {\n \/\/ CHECK-NOT: panic\n y[p] == *z\n } else {\n false\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>compile-pass test for #53606<commit_after>\/\/compile-pass\n\n#![feature(nll)]\n\nfn main() {\n\n let _: &'static usize = &(loop {}, 1).1;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for nonexhaustive parsing<commit_after>extern crate peg;\nuse peg::parser;\n\nparser!{\n pub grammar nonexhaustive() for [u8] {\n pub nonexhaustive rule foo() = \"foo\"\n }\n}\n\nfn main() {\n assert_eq!(nonexhaustive::foo(b\"foobar\"), Ok(()));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove uneccessary call to iter()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #27146 - apasel422:issue-14229, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntrait Foo: Sized {\n fn foo(self) {}\n}\n\ntrait Bar: Sized {\n fn bar(self) {}\n}\n\nstruct S;\n\nimpl<'l> Foo for &'l S {}\n\nimpl<T: Foo> Bar for T {}\n\nfn main() {\n let s = S;\n s.foo();\n (&s).bar();\n s.bar();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Stress test for MPSC<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags:--test\n\/\/ ignore-emscripten\n\nuse std::sync::mpsc::channel;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::mpsc::RecvError;\nuse std::sync::mpsc::RecvTimeoutError;\nuse std::sync::Arc;\nuse std::sync::atomic::AtomicUsize;\nuse std::sync::atomic::Ordering;\n\nuse std::thread;\nuse std::time::Duration;\n\n\n\/\/\/ Simple thread synchronization utility\nstruct Barrier {\n \/\/ Not using mutex\/condvar for precision\n shared: Arc<AtomicUsize>,\n count: usize,\n}\n\nimpl Barrier {\n fn new(count: usize) -> Vec<Barrier> {\n let shared = Arc::new(AtomicUsize::new(0));\n (0..count).map(|_| Barrier { shared: shared.clone(), count: count }).collect()\n }\n\n fn new2() -> (Barrier, Barrier) {\n let mut v = Barrier::new(2);\n (v.pop().unwrap(), v.pop().unwrap())\n }\n\n \/\/\/ Returns when `count` threads enter `wait`\n fn wait(self) {\n self.shared.fetch_add(1, Ordering::SeqCst);\n while self.shared.load(Ordering::SeqCst) != self.count {\n }\n }\n}\n\n\nfn shared_close_sender_does_not_lose_messages_iter() {\n let (tb, rb) = Barrier::new2();\n\n let (tx, rx) = channel();\n let _ = tx.clone(); \/\/ convert to shared\n\n thread::spawn(move || {\n tb.wait();\n thread::sleep(Duration::from_micros(1));\n tx.send(17).expect(\"send\");\n drop(tx);\n });\n\n let i = rx.into_iter();\n rb.wait();\n \/\/ Make sure it doesn't return disconnected before returning an element\n assert_eq!(vec![17], i.collect::<Vec<_>>());\n}\n\n#[test]\nfn shared_close_sender_does_not_lose_messages() {\n for _ in 0..10000 {\n shared_close_sender_does_not_lose_messages_iter();\n }\n}\n\n\n\/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/39364\nfn concurrent_recv_timeout_and_upgrade_iter() {\n \/\/ 1 us\n let sleep = Duration::new(0, 1_000);\n\n let (a, b) = Barrier::new2();\n let (tx, rx) = channel();\n let th = thread::spawn(move || {\n a.wait();\n loop {\n match rx.recv_timeout(sleep) {\n Ok(_) => {\n break;\n },\n Err(_) => {},\n }\n }\n });\n b.wait();\n thread::sleep(sleep);\n tx.clone().send(()).expect(\"send\");\n th.join().unwrap();\n}\n\n#[test]\nfn concurrent_recv_timeout_and_upgrade() {\n \/\/ FIXME: fix and enable\n if true { return }\n\n \/\/ at the moment of writing this test fails like this:\n \/\/ thread '<unnamed>' panicked at 'assertion failed: `(left == right)`\n \/\/ left: `4561387584`,\n \/\/ right: `0`', libstd\/sync\/mpsc\/shared.rs:253:13\n\n for _ in 0..10000 {\n concurrent_recv_timeout_and_upgrade_iter();\n }\n}\n\n\nfn concurrent_writes_iter() {\n const THREADS: usize = 4;\n const PER_THR: usize = 100;\n\n let mut bs = Barrier::new(THREADS + 1);\n let (tx, rx) = channel();\n\n let mut threads = Vec::new();\n for j in 0..THREADS {\n let tx = tx.clone();\n let b = bs.pop().unwrap();\n threads.push(thread::spawn(move || {\n b.wait();\n for i in 0..PER_THR {\n tx.send(j * 1000 + i).expect(\"send\");\n }\n }));\n }\n\n let b = bs.pop().unwrap();\n b.wait();\n\n let mut v: Vec<_> = rx.iter().take(THREADS * PER_THR).collect();\n v.sort();\n\n for j in 0..THREADS {\n for i in 0..PER_THR {\n assert_eq!(j * 1000 + i, v[j * PER_THR + i]);\n }\n }\n\n for t in threads {\n t.join().unwrap();\n }\n\n let one_us = Duration::new(0, 1000);\n\n assert_eq!(TryRecvError::Empty, rx.try_recv().unwrap_err());\n assert_eq!(RecvTimeoutError::Timeout, rx.recv_timeout(one_us).unwrap_err());\n\n drop(tx);\n\n assert_eq!(RecvError, rx.recv().unwrap_err());\n assert_eq!(RecvTimeoutError::Disconnected, rx.recv_timeout(one_us).unwrap_err());\n assert_eq!(TryRecvError::Disconnected, rx.try_recv().unwrap_err());\n}\n\n#[test]\nfn concurrent_writes() {\n for _ in 0..100 {\n concurrent_writes_iter();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added debug example.<commit_after>\/\/ Derive the `fmt::Debug` implementation for `Structure`. `Structure`\n\/\/ is a structure which contains a single `i32`.\n#[derive(Debug)]\nstruct Structure(i32);\n\n\/\/ Put a `Structure` inside of the structure `Deep`. Make it printable\n\/\/ also.\n#[derive(Debug)]\nstruct Deep(Structure);\n\nfn main() {\n \/\/ Printing with `{:?}` is similar to with `{}`.\n println!(\"{:?} months in a year.\", 12);\n println!(\"{1:?} {0:?} is the {actor:?} name.\",\n \"Slater\",\n \"Christian\",\n actor=\"actor's\");\n\n \/\/ `Structure` is printable!\n println!(\"Now {:?} will print!\", Structure(3));\n\n \/\/ The problem with `derive` is there is no control over how\n \/\/ the results look. What if I want this to just show a `7`?\n println!(\"Now {:?} will print!\", Deep(Structure(7)));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore: introduce tokio<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test derived from #74961<commit_after>\/\/ check-pass\n\/\/ edition:2018\n\n\/\/ This test is derived from\n\/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/74961#issuecomment-666893845\n\/\/ by @SNCPlay42\n\n\/\/ This test demonstrates that, in `async fn g()`,\n\/\/ indeed a temporary borrow `y` from `x` is live\n\/\/ while `f().await` is being evaluated.\n\/\/ Thus, `&'_ A` should be included in type signature\n\/\/ of the underlying generator.\n\n#[derive(PartialEq, Eq)]\nstruct A;\n\nasync fn f() -> A {\n A\n}\n\nasync fn g() {\n let x = A;\n match x {\n y if f().await == y => {}\n _ => {}\n }\n}\n\nfn main() {}<|endoftext|>"} {"text":"<commit_before><commit_msg>Force flush echo output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test suite `str.rs` and trivial test<commit_after>\nuse wtools::str;\n\n#[test]\nfn trivial() {\n\n let opts = str::split_fast { src : String::from( \"abc\" ), delimeter : None, preservingEmpty : None, preservingDelimeters : None };\n assert_eq!( opts.src, String::from( \"abc\" ) );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>store agent and mob stats<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a velocity to the Object struct<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add comment for `&version!()[..]`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>updated binary tree<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add event queue accessors test<commit_after>extern crate entity_rust;\n\nuse entity_rust::events;\n\n#[test]\nfn event_queue_accessors() {\n let i : i64 = 5;\n events::set_event_queue(\"my_queue\", i);\n match events::get_event_queue_mut(\"my_queue\") {\n Some(queue) => {\n assert!(queue.len() == 1);\n assert!(i == queue[0])\n }\n None => assert!(false)\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add a test that logs are truncated with ellipses<commit_after>\/\/ Test that logs add `[...]` to truncated lines\n\/\/ error-pattern:[...]\n\nfn main() {\n fail \"\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\\\n \";\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rust: Problem 4<commit_after>\/\/\/ # Largest palindrome product\n\/\/\/ ## Problem 4\n\/\/\/ A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 × 99.\n\/\/\/ Find the largest palindrome made from the product of two 3-digit numbers.\n\nfn is_palindrome(n: int) -> bool {\n let fwd = n.to_str().into_bytes();\n let mut rev = fwd.clone();\n rev.reverse();\n\n let mut chars = fwd.iter().zip(rev.iter());\n\n chars.all(|(a,b)| a == b)\n}\n\nfn main() {\n let mut largest = 0;\n for a in range(100, 1000) {\n for b in range(100, 1000) {\n let product = a * b;\n if is_palindrome(product) {\n largest = if largest > product {largest} else {product}\n }\n }\n }\n println!(\"largest: {}\", largest);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>server<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add simple example<commit_after>extern crate lunr;\nextern crate serde_json;\n\nuse lunr::builder;\nuse lunr::index::Index;\nuse lunr::document::{Document, Field};\n\nstruct Quote {\n id: String,\n text: String\n}\n\nimpl<'a> Document<'a> for Quote {\n fn id(&self) -> String {\n self.id.to_owned()\n }\n\n fn fields(&self) -> Vec<Field> {\n vec![Field { name: String::from(\"text\"), text: self.text.to_owned() }]\n }\n}\n\nfn main() {\n let lennon = Quote {\n id: String::from(\"lennon\"),\n text: String::from(\"life is what happens while you are busy making other plans\"),\n };\n\n let wilde = Quote {\n id: String::from(\"wilde\"),\n text: String::from(\"work is the curse of the drinking classes\"),\n };\n\n let mut builder = builder::create();\n\n builder.add(lennon);\n builder.add(wilde);\n\n let index: Index = builder.into();\n\n let json = serde_json::to_string(&index).expect(\"json serialization failed\");\n\n println!(\"{}\", json);\n}<|endoftext|>"} {"text":"<commit_before>\/\/! HTTP Client\n\/\/!\n\/\/! # Usage\n\/\/!\n\/\/! The `Client` API is designed for most people to make HTTP requests.\n\/\/! It utilizes the lower level `Request` API.\n\/\/!\n\/\/! ## GET\n\/\/!\n\/\/! ```no_run\n\/\/! # use hyper::Client;\n\/\/! let mut client = Client::new();\n\/\/!\n\/\/! let res = client.get(\"http:\/\/example.domain\").send().unwrap();\n\/\/! assert_eq!(res.status, hyper::Ok);\n\/\/! ```\n\/\/!\n\/\/! The returned value is a `Response`, which provides easy access to\n\/\/! the `status`, the `headers`, and the response body via the `Read`\n\/\/! trait.\n\/\/!\n\/\/! ## POST\n\/\/!\n\/\/! ```no_run\n\/\/! # use hyper::Client;\n\/\/! let mut client = Client::new();\n\/\/!\n\/\/! let res = client.post(\"http:\/\/example.domain\")\n\/\/! .body(\"foo=bar\")\n\/\/! .send()\n\/\/! .unwrap();\n\/\/! assert_eq!(res.status, hyper::Ok);\n\/\/! ```\nuse std::default::Default;\nuse std::io::{self, copy, Read};\nuse std::iter::Extend;\n\nuse url::UrlParser;\nuse url::ParseError as UrlError;\n\nuse header::{Headers, Header, HeaderFormat};\nuse header::{ContentLength, Location};\nuse method::Method;\nuse net::{NetworkConnector, NetworkStream, ContextVerifier};\nuse status::StatusClass::Redirection;\nuse {Url};\nuse Error;\n\npub use self::pool::Pool;\npub use self::request::Request;\npub use self::response::Response;\n\npub mod pool;\npub mod request;\npub mod response;\n\n\/\/\/ A Client to use additional features with Requests.\n\/\/\/\n\/\/\/ Clients can handle things such as: redirect policy, connection pooling.\npub struct Client {\n connector: Connector,\n redirect_policy: RedirectPolicy,\n}\n\nimpl Client {\n\n \/\/\/ Create a new Client.\n pub fn new() -> Client {\n Client::with_pool_config(Default::default())\n }\n\n \/\/\/ Create a new Client with a configured Pool Config.\n pub fn with_pool_config(config: pool::Config) -> Client {\n Client::with_connector(Pool::new(config))\n }\n\n \/\/\/ Create a new client with a specific connector.\n pub fn with_connector<C, S>(connector: C) -> Client\n where C: NetworkConnector<Stream=S> + Send + 'static, S: NetworkStream + Send {\n Client {\n connector: with_connector(connector),\n redirect_policy: Default::default()\n }\n }\n\n \/\/\/ Set the SSL verifier callback for use with OpenSSL.\n pub fn set_ssl_verifier(&mut self, verifier: ContextVerifier) {\n self.connector.set_ssl_verifier(verifier);\n }\n\n \/\/\/ Set the RedirectPolicy.\n pub fn set_redirect_policy(&mut self, policy: RedirectPolicy) {\n self.redirect_policy = policy;\n }\n\n \/\/\/ Build a Get request.\n pub fn get<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Get, url)\n }\n\n \/\/\/ Build a Head request.\n pub fn head<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Head, url)\n }\n\n \/\/\/ Build a Post request.\n pub fn post<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Post, url)\n }\n\n \/\/\/ Build a Put request.\n pub fn put<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Put, url)\n }\n\n \/\/\/ Build a Delete request.\n pub fn delete<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Delete, url)\n }\n\n\n \/\/\/ Build a new request using this Client.\n pub fn request<U: IntoUrl>(&mut self, method: Method, url: U) -> RequestBuilder<U> {\n RequestBuilder {\n client: self,\n method: method,\n url: url,\n body: None,\n headers: None,\n }\n }\n}\n\nfn with_connector<C: NetworkConnector<Stream=S> + Send + 'static, S: NetworkStream + Send>(c: C) -> Connector {\n Connector(Box::new(ConnAdapter(c)))\n}\n\nimpl Default for Client {\n fn default() -> Client { Client::new() }\n}\n\nstruct ConnAdapter<C: NetworkConnector + Send>(C);\n\nimpl<C: NetworkConnector<Stream=S> + Send, S: NetworkStream + Send> NetworkConnector for ConnAdapter<C> {\n type Stream = Box<NetworkStream + Send>;\n #[inline]\n fn connect(&self, host: &str, port: u16, scheme: &str)\n -> ::Result<Box<NetworkStream + Send>> {\n Ok(try!(self.0.connect(host, port, scheme)).into())\n }\n #[inline]\n fn set_ssl_verifier(&mut self, verifier: ContextVerifier) {\n self.0.set_ssl_verifier(verifier);\n }\n}\n\nstruct Connector(Box<NetworkConnector<Stream=Box<NetworkStream + Send>> + Send>);\n\nimpl NetworkConnector for Connector {\n type Stream = Box<NetworkStream + Send>;\n #[inline]\n fn connect(&self, host: &str, port: u16, scheme: &str)\n -> ::Result<Box<NetworkStream + Send>> {\n Ok(try!(self.0.connect(host, port, scheme)).into())\n }\n #[inline]\n fn set_ssl_verifier(&mut self, verifier: ContextVerifier) {\n self.0.set_ssl_verifier(verifier);\n }\n}\n\n\/\/\/ Options for an individual Request.\n\/\/\/\n\/\/\/ One of these will be built for you if you use one of the convenience\n\/\/\/ methods, such as `get()`, `post()`, etc.\npub struct RequestBuilder<'a, U: IntoUrl> {\n client: &'a Client,\n url: U,\n headers: Option<Headers>,\n method: Method,\n body: Option<Body<'a>>,\n}\n\nimpl<'a, U: IntoUrl> RequestBuilder<'a, U> {\n\n \/\/\/ Set a request body to be sent.\n pub fn body<B: Into<Body<'a>>>(mut self, body: B) -> RequestBuilder<'a, U> {\n self.body = Some(body.into());\n self\n }\n\n \/\/\/ Add additional headers to the request.\n pub fn headers(mut self, headers: Headers) -> RequestBuilder<'a, U> {\n self.headers = Some(headers);\n self\n }\n\n \/\/\/ Add an individual new header to the request.\n pub fn header<H: Header + HeaderFormat>(mut self, header: H) -> RequestBuilder<'a, U> {\n {\n let mut headers = match self.headers {\n Some(ref mut h) => h,\n None => {\n self.headers = Some(Headers::new());\n self.headers.as_mut().unwrap()\n }\n };\n\n headers.set(header);\n }\n self\n }\n\n \/\/\/ Execute this request and receive a Response back.\n pub fn send(self) -> ::Result<Response> {\n let RequestBuilder { client, method, url, headers, body } = self;\n let mut url = try!(url.into_url());\n trace!(\"send {:?} {:?}\", method, url);\n\n let can_have_body = match &method {\n &Method::Get | &Method::Head => false,\n _ => true\n };\n\n let mut body = if can_have_body {\n body\n } else {\n None\n };\n\n loop {\n let mut req = try!(Request::with_connector(method.clone(), url.clone(), &client.connector));\n headers.as_ref().map(|headers| req.headers_mut().extend(headers.iter()));\n\n match (can_have_body, body.as_ref()) {\n (true, Some(body)) => match body.size() {\n Some(size) => req.headers_mut().set(ContentLength(size)),\n None => (), \/\/ chunked, Request will add it automatically\n },\n (true, None) => req.headers_mut().set(ContentLength(0)),\n _ => () \/\/ neither\n }\n let mut streaming = try!(req.start());\n body.take().map(|mut rdr| copy(&mut rdr, &mut streaming));\n let res = try!(streaming.send());\n if res.status.class() != Redirection {\n return Ok(res)\n }\n debug!(\"redirect code {:?} for {}\", res.status, url);\n\n let loc = {\n \/\/ punching borrowck here\n let loc = match res.headers.get::<Location>() {\n Some(&Location(ref loc)) => {\n Some(UrlParser::new().base_url(&url).parse(&loc[..]))\n }\n None => {\n debug!(\"no Location header\");\n \/\/ could be 304 Not Modified?\n None\n }\n };\n match loc {\n Some(r) => r,\n None => return Ok(res)\n }\n };\n url = match loc {\n Ok(u) => u,\n Err(e) => {\n debug!(\"Location header had invalid URI: {:?}\", e);\n return Ok(res);\n }\n };\n match client.redirect_policy {\n \/\/ separate branches because they can't be one\n RedirectPolicy::FollowAll => (), \/\/continue\n RedirectPolicy::FollowIf(cond) if cond(&url) => (), \/\/continue\n _ => return Ok(res),\n }\n }\n }\n}\n\n\/\/\/ An enum of possible body types for a Request.\npub enum Body<'a> {\n \/\/\/ A Reader does not necessarily know it's size, so it is chunked.\n ChunkedBody(&'a mut (Read + 'a)),\n \/\/\/ For Readers that can know their size, like a `File`.\n SizedBody(&'a mut (Read + 'a), u64),\n \/\/\/ A String has a size, and uses Content-Length.\n BufBody(&'a [u8] , usize),\n}\n\nimpl<'a> Body<'a> {\n fn size(&self) -> Option<u64> {\n match *self {\n Body::SizedBody(_, len) => Some(len),\n Body::BufBody(_, len) => Some(len as u64),\n _ => None\n }\n }\n}\n\nimpl<'a> Read for Body<'a> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n match *self {\n Body::ChunkedBody(ref mut r) => r.read(buf),\n Body::SizedBody(ref mut r, _) => r.read(buf),\n Body::BufBody(ref mut r, _) => Read::read(r, buf),\n }\n }\n}\n\nimpl<'a> Into<Body<'a>> for &'a [u8] {\n #[inline]\n fn into(self) -> Body<'a> {\n Body::BufBody(self, self.len())\n }\n}\n\nimpl<'a> Into<Body<'a>> for &'a str {\n #[inline]\n fn into(self) -> Body<'a> {\n self.as_bytes().into()\n }\n}\n\nimpl<'a> Into<Body<'a>> for &'a String {\n #[inline]\n fn into(self) -> Body<'a> {\n self.as_bytes().into()\n }\n}\n\nimpl<'a, R: Read> From<&'a mut R> for Body<'a> {\n #[inline]\n fn from(r: &'a mut R) -> Body<'a> {\n Body::ChunkedBody(r)\n }\n}\n\n\/\/\/ A helper trait to convert common objects into a Url.\npub trait IntoUrl {\n \/\/\/ Consumes the object, trying to return a Url.\n fn into_url(self) -> Result<Url, UrlError>;\n}\n\nimpl IntoUrl for Url {\n fn into_url(self) -> Result<Url, UrlError> {\n Ok(self)\n }\n}\n\nimpl<'a> IntoUrl for &'a str {\n fn into_url(self) -> Result<Url, UrlError> {\n Url::parse(self)\n }\n}\n\nimpl<'a> IntoUrl for &'a String {\n fn into_url(self) -> Result<Url, UrlError> {\n Url::parse(self)\n }\n}\n\n\/\/\/ Behavior regarding how to handle redirects within a Client.\n#[derive(Copy)]\npub enum RedirectPolicy {\n \/\/\/ Don't follow any redirects.\n FollowNone,\n \/\/\/ Follow all redirects.\n FollowAll,\n \/\/\/ Follow a redirect if the contained function returns true.\n FollowIf(fn(&Url) -> bool),\n}\n\n\/\/ This is a hack because of upstream typesystem issues.\nimpl Clone for RedirectPolicy {\n fn clone(&self) -> RedirectPolicy {\n *self\n }\n}\n\nimpl Default for RedirectPolicy {\n fn default() -> RedirectPolicy {\n RedirectPolicy::FollowAll\n }\n}\n\nfn get_host_and_port(url: &Url) -> ::Result<(String, u16)> {\n let host = match url.serialize_host() {\n Some(host) => host,\n None => return Err(Error::Uri(UrlError::EmptyHost))\n };\n trace!(\"host={:?}\", host);\n let port = match url.port_or_default() {\n Some(port) => port,\n None => return Err(Error::Uri(UrlError::InvalidPort))\n };\n trace!(\"port={:?}\", port);\n Ok((host, port))\n}\n\n#[cfg(test)]\nmod tests {\n use header::Server;\n use super::{Client, RedirectPolicy};\n use url::Url;\n use mock::ChannelMockConnector;\n use std::sync::mpsc::{self, TryRecvError};\n\n mock_connector!(MockRedirectPolicy {\n \"http:\/\/127.0.0.1\" => \"HTTP\/1.1 301 Redirect\\r\\n\\\n Location: http:\/\/127.0.0.2\\r\\n\\\n Server: mock1\\r\\n\\\n \\r\\n\\\n \"\n \"http:\/\/127.0.0.2\" => \"HTTP\/1.1 302 Found\\r\\n\\\n Location: https:\/\/127.0.0.3\\r\\n\\\n Server: mock2\\r\\n\\\n \\r\\n\\\n \"\n \"https:\/\/127.0.0.3\" => \"HTTP\/1.1 200 OK\\r\\n\\\n Server: mock3\\r\\n\\\n \\r\\n\\\n \"\n });\n\n #[test]\n fn test_redirect_followall() {\n let mut client = Client::with_connector(MockRedirectPolicy);\n client.set_redirect_policy(RedirectPolicy::FollowAll);\n\n let res = client.get(\"http:\/\/127.0.0.1\").send().unwrap();\n assert_eq!(res.headers.get(), Some(&Server(\"mock3\".to_owned())));\n }\n\n #[test]\n fn test_redirect_dontfollow() {\n let mut client = Client::with_connector(MockRedirectPolicy);\n client.set_redirect_policy(RedirectPolicy::FollowNone);\n let res = client.get(\"http:\/\/127.0.0.1\").send().unwrap();\n assert_eq!(res.headers.get(), Some(&Server(\"mock1\".to_owned())));\n }\n\n #[test]\n fn test_redirect_followif() {\n fn follow_if(url: &Url) -> bool {\n !url.serialize().contains(\"127.0.0.3\")\n }\n let mut client = Client::with_connector(MockRedirectPolicy);\n client.set_redirect_policy(RedirectPolicy::FollowIf(follow_if));\n let res = client.get(\"http:\/\/127.0.0.1\").send().unwrap();\n assert_eq!(res.headers.get(), Some(&Server(\"mock2\".to_owned())));\n }\n\n \/\/\/ Tests that the `Client::set_ssl_verifier` method does not drop the\n \/\/\/ old connector, but rather delegates the change to the connector itself.\n #[test]\n fn test_client_set_ssl_verifer() {\n let (tx, rx) = mpsc::channel();\n let mut client = Client::with_connector(ChannelMockConnector::new(tx));\n\n client.set_ssl_verifier(Box::new(|_| {}));\n\n \/\/ Make sure that the client called the `set_ssl_verifier` method\n match rx.try_recv() {\n Ok(meth) => {\n assert_eq!(meth, \"set_ssl_verifier\");\n },\n _ => panic!(\"Expected a call to `set_ssl_verifier`\"),\n };\n \/\/ Now make sure that no other method was called, as well as that\n \/\/ the connector is still alive (i.e. wasn't dropped by the client).\n match rx.try_recv() {\n Err(TryRecvError::Empty) => {},\n Err(TryRecvError::Disconnected) => {\n panic!(\"Expected the connector to still be alive.\");\n },\n Ok(_) => panic!(\"Did not expect any more method calls.\"),\n };\n }\n}\n<commit_msg>refactor(client): use a `Protocol` to create a message for a Request<commit_after>\/\/! HTTP Client\n\/\/!\n\/\/! # Usage\n\/\/!\n\/\/! The `Client` API is designed for most people to make HTTP requests.\n\/\/! It utilizes the lower level `Request` API.\n\/\/!\n\/\/! ## GET\n\/\/!\n\/\/! ```no_run\n\/\/! # use hyper::Client;\n\/\/! let mut client = Client::new();\n\/\/!\n\/\/! let res = client.get(\"http:\/\/example.domain\").send().unwrap();\n\/\/! assert_eq!(res.status, hyper::Ok);\n\/\/! ```\n\/\/!\n\/\/! The returned value is a `Response`, which provides easy access to\n\/\/! the `status`, the `headers`, and the response body via the `Read`\n\/\/! trait.\n\/\/!\n\/\/! ## POST\n\/\/!\n\/\/! ```no_run\n\/\/! # use hyper::Client;\n\/\/! let mut client = Client::new();\n\/\/!\n\/\/! let res = client.post(\"http:\/\/example.domain\")\n\/\/! .body(\"foo=bar\")\n\/\/! .send()\n\/\/! .unwrap();\n\/\/! assert_eq!(res.status, hyper::Ok);\n\/\/! ```\nuse std::default::Default;\nuse std::io::{self, copy, Read};\nuse std::iter::Extend;\n\nuse url::UrlParser;\nuse url::ParseError as UrlError;\n\nuse header::{Headers, Header, HeaderFormat};\nuse header::{ContentLength, Location};\nuse method::Method;\nuse net::{NetworkConnector, NetworkStream, ContextVerifier};\nuse status::StatusClass::Redirection;\nuse {Url};\nuse Error;\n\npub use self::pool::Pool;\npub use self::request::Request;\npub use self::response::Response;\n\npub mod pool;\npub mod request;\npub mod response;\n\nuse message::Protocol;\nuse http11::Http11Protocol;\n\n\/\/\/ A Client to use additional features with Requests.\n\/\/\/\n\/\/\/ Clients can handle things such as: redirect policy, connection pooling.\npub struct Client {\n protocol: Box<Protocol + Send>,\n redirect_policy: RedirectPolicy,\n}\n\nimpl Client {\n\n \/\/\/ Create a new Client.\n pub fn new() -> Client {\n Client::with_pool_config(Default::default())\n }\n\n \/\/\/ Create a new Client with a configured Pool Config.\n pub fn with_pool_config(config: pool::Config) -> Client {\n Client::with_connector(Pool::new(config))\n }\n\n \/\/\/ Create a new client with a specific connector.\n pub fn with_connector<C, S>(connector: C) -> Client\n where C: NetworkConnector<Stream=S> + Send + 'static, S: NetworkStream + Send {\n Client::with_protocol(Http11Protocol::with_connector(connector))\n }\n\n \/\/\/ Create a new client with a specific `Protocol`.\n pub fn with_protocol<P: Protocol + Send + 'static>(protocol: P) -> Client {\n Client {\n protocol: Box::new(protocol),\n redirect_policy: Default::default()\n }\n }\n\n \/\/\/ Set the SSL verifier callback for use with OpenSSL.\n pub fn set_ssl_verifier(&mut self, verifier: ContextVerifier) {\n self.protocol.set_ssl_verifier(verifier);\n }\n\n \/\/\/ Set the RedirectPolicy.\n pub fn set_redirect_policy(&mut self, policy: RedirectPolicy) {\n self.redirect_policy = policy;\n }\n\n \/\/\/ Build a Get request.\n pub fn get<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Get, url)\n }\n\n \/\/\/ Build a Head request.\n pub fn head<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Head, url)\n }\n\n \/\/\/ Build a Post request.\n pub fn post<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Post, url)\n }\n\n \/\/\/ Build a Put request.\n pub fn put<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Put, url)\n }\n\n \/\/\/ Build a Delete request.\n pub fn delete<U: IntoUrl>(&mut self, url: U) -> RequestBuilder<U> {\n self.request(Method::Delete, url)\n }\n\n\n \/\/\/ Build a new request using this Client.\n pub fn request<U: IntoUrl>(&mut self, method: Method, url: U) -> RequestBuilder<U> {\n RequestBuilder {\n client: self,\n method: method,\n url: url,\n body: None,\n headers: None,\n }\n }\n}\n\nimpl Default for Client {\n fn default() -> Client { Client::new() }\n}\n\n\/\/\/ Options for an individual Request.\n\/\/\/\n\/\/\/ One of these will be built for you if you use one of the convenience\n\/\/\/ methods, such as `get()`, `post()`, etc.\npub struct RequestBuilder<'a, U: IntoUrl> {\n client: &'a Client,\n url: U,\n headers: Option<Headers>,\n method: Method,\n body: Option<Body<'a>>,\n}\n\nimpl<'a, U: IntoUrl> RequestBuilder<'a, U> {\n\n \/\/\/ Set a request body to be sent.\n pub fn body<B: Into<Body<'a>>>(mut self, body: B) -> RequestBuilder<'a, U> {\n self.body = Some(body.into());\n self\n }\n\n \/\/\/ Add additional headers to the request.\n pub fn headers(mut self, headers: Headers) -> RequestBuilder<'a, U> {\n self.headers = Some(headers);\n self\n }\n\n \/\/\/ Add an individual new header to the request.\n pub fn header<H: Header + HeaderFormat>(mut self, header: H) -> RequestBuilder<'a, U> {\n {\n let mut headers = match self.headers {\n Some(ref mut h) => h,\n None => {\n self.headers = Some(Headers::new());\n self.headers.as_mut().unwrap()\n }\n };\n\n headers.set(header);\n }\n self\n }\n\n \/\/\/ Execute this request and receive a Response back.\n pub fn send(self) -> ::Result<Response> {\n let RequestBuilder { client, method, url, headers, body } = self;\n let mut url = try!(url.into_url());\n trace!(\"send {:?} {:?}\", method, url);\n\n let can_have_body = match &method {\n &Method::Get | &Method::Head => false,\n _ => true\n };\n\n let mut body = if can_have_body {\n body\n } else {\n None\n };\n\n loop {\n let message = {\n let (host, port) = try!(get_host_and_port(&url));\n try!(client.protocol.new_message(&host, port, &*url.scheme))\n };\n let mut req = try!(Request::with_message(method.clone(), url.clone(), message));\n headers.as_ref().map(|headers| req.headers_mut().extend(headers.iter()));\n\n match (can_have_body, body.as_ref()) {\n (true, Some(body)) => match body.size() {\n Some(size) => req.headers_mut().set(ContentLength(size)),\n None => (), \/\/ chunked, Request will add it automatically\n },\n (true, None) => req.headers_mut().set(ContentLength(0)),\n _ => () \/\/ neither\n }\n let mut streaming = try!(req.start());\n body.take().map(|mut rdr| copy(&mut rdr, &mut streaming));\n let res = try!(streaming.send());\n if res.status.class() != Redirection {\n return Ok(res)\n }\n debug!(\"redirect code {:?} for {}\", res.status, url);\n\n let loc = {\n \/\/ punching borrowck here\n let loc = match res.headers.get::<Location>() {\n Some(&Location(ref loc)) => {\n Some(UrlParser::new().base_url(&url).parse(&loc[..]))\n }\n None => {\n debug!(\"no Location header\");\n \/\/ could be 304 Not Modified?\n None\n }\n };\n match loc {\n Some(r) => r,\n None => return Ok(res)\n }\n };\n url = match loc {\n Ok(u) => u,\n Err(e) => {\n debug!(\"Location header had invalid URI: {:?}\", e);\n return Ok(res);\n }\n };\n match client.redirect_policy {\n \/\/ separate branches because they can't be one\n RedirectPolicy::FollowAll => (), \/\/continue\n RedirectPolicy::FollowIf(cond) if cond(&url) => (), \/\/continue\n _ => return Ok(res),\n }\n }\n }\n}\n\n\/\/\/ An enum of possible body types for a Request.\npub enum Body<'a> {\n \/\/\/ A Reader does not necessarily know it's size, so it is chunked.\n ChunkedBody(&'a mut (Read + 'a)),\n \/\/\/ For Readers that can know their size, like a `File`.\n SizedBody(&'a mut (Read + 'a), u64),\n \/\/\/ A String has a size, and uses Content-Length.\n BufBody(&'a [u8] , usize),\n}\n\nimpl<'a> Body<'a> {\n fn size(&self) -> Option<u64> {\n match *self {\n Body::SizedBody(_, len) => Some(len),\n Body::BufBody(_, len) => Some(len as u64),\n _ => None\n }\n }\n}\n\nimpl<'a> Read for Body<'a> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n match *self {\n Body::ChunkedBody(ref mut r) => r.read(buf),\n Body::SizedBody(ref mut r, _) => r.read(buf),\n Body::BufBody(ref mut r, _) => Read::read(r, buf),\n }\n }\n}\n\nimpl<'a> Into<Body<'a>> for &'a [u8] {\n #[inline]\n fn into(self) -> Body<'a> {\n Body::BufBody(self, self.len())\n }\n}\n\nimpl<'a> Into<Body<'a>> for &'a str {\n #[inline]\n fn into(self) -> Body<'a> {\n self.as_bytes().into()\n }\n}\n\nimpl<'a> Into<Body<'a>> for &'a String {\n #[inline]\n fn into(self) -> Body<'a> {\n self.as_bytes().into()\n }\n}\n\nimpl<'a, R: Read> From<&'a mut R> for Body<'a> {\n #[inline]\n fn from(r: &'a mut R) -> Body<'a> {\n Body::ChunkedBody(r)\n }\n}\n\n\/\/\/ A helper trait to convert common objects into a Url.\npub trait IntoUrl {\n \/\/\/ Consumes the object, trying to return a Url.\n fn into_url(self) -> Result<Url, UrlError>;\n}\n\nimpl IntoUrl for Url {\n fn into_url(self) -> Result<Url, UrlError> {\n Ok(self)\n }\n}\n\nimpl<'a> IntoUrl for &'a str {\n fn into_url(self) -> Result<Url, UrlError> {\n Url::parse(self)\n }\n}\n\nimpl<'a> IntoUrl for &'a String {\n fn into_url(self) -> Result<Url, UrlError> {\n Url::parse(self)\n }\n}\n\n\/\/\/ Behavior regarding how to handle redirects within a Client.\n#[derive(Copy)]\npub enum RedirectPolicy {\n \/\/\/ Don't follow any redirects.\n FollowNone,\n \/\/\/ Follow all redirects.\n FollowAll,\n \/\/\/ Follow a redirect if the contained function returns true.\n FollowIf(fn(&Url) -> bool),\n}\n\n\/\/ This is a hack because of upstream typesystem issues.\nimpl Clone for RedirectPolicy {\n fn clone(&self) -> RedirectPolicy {\n *self\n }\n}\n\nimpl Default for RedirectPolicy {\n fn default() -> RedirectPolicy {\n RedirectPolicy::FollowAll\n }\n}\n\nfn get_host_and_port(url: &Url) -> ::Result<(String, u16)> {\n let host = match url.serialize_host() {\n Some(host) => host,\n None => return Err(Error::Uri(UrlError::EmptyHost))\n };\n trace!(\"host={:?}\", host);\n let port = match url.port_or_default() {\n Some(port) => port,\n None => return Err(Error::Uri(UrlError::InvalidPort))\n };\n trace!(\"port={:?}\", port);\n Ok((host, port))\n}\n\n#[cfg(test)]\nmod tests {\n use header::Server;\n use super::{Client, RedirectPolicy};\n use url::Url;\n use mock::ChannelMockConnector;\n use std::sync::mpsc::{self, TryRecvError};\n\n mock_connector!(MockRedirectPolicy {\n \"http:\/\/127.0.0.1\" => \"HTTP\/1.1 301 Redirect\\r\\n\\\n Location: http:\/\/127.0.0.2\\r\\n\\\n Server: mock1\\r\\n\\\n \\r\\n\\\n \"\n \"http:\/\/127.0.0.2\" => \"HTTP\/1.1 302 Found\\r\\n\\\n Location: https:\/\/127.0.0.3\\r\\n\\\n Server: mock2\\r\\n\\\n \\r\\n\\\n \"\n \"https:\/\/127.0.0.3\" => \"HTTP\/1.1 200 OK\\r\\n\\\n Server: mock3\\r\\n\\\n \\r\\n\\\n \"\n });\n\n #[test]\n fn test_redirect_followall() {\n let mut client = Client::with_connector(MockRedirectPolicy);\n client.set_redirect_policy(RedirectPolicy::FollowAll);\n\n let res = client.get(\"http:\/\/127.0.0.1\").send().unwrap();\n assert_eq!(res.headers.get(), Some(&Server(\"mock3\".to_owned())));\n }\n\n #[test]\n fn test_redirect_dontfollow() {\n let mut client = Client::with_connector(MockRedirectPolicy);\n client.set_redirect_policy(RedirectPolicy::FollowNone);\n let res = client.get(\"http:\/\/127.0.0.1\").send().unwrap();\n assert_eq!(res.headers.get(), Some(&Server(\"mock1\".to_owned())));\n }\n\n #[test]\n fn test_redirect_followif() {\n fn follow_if(url: &Url) -> bool {\n !url.serialize().contains(\"127.0.0.3\")\n }\n let mut client = Client::with_connector(MockRedirectPolicy);\n client.set_redirect_policy(RedirectPolicy::FollowIf(follow_if));\n let res = client.get(\"http:\/\/127.0.0.1\").send().unwrap();\n assert_eq!(res.headers.get(), Some(&Server(\"mock2\".to_owned())));\n }\n\n \/\/\/ Tests that the `Client::set_ssl_verifier` method does not drop the\n \/\/\/ old connector, but rather delegates the change to the connector itself.\n #[test]\n fn test_client_set_ssl_verifer() {\n let (tx, rx) = mpsc::channel();\n let mut client = Client::with_connector(ChannelMockConnector::new(tx));\n\n client.set_ssl_verifier(Box::new(|_| {}));\n\n \/\/ Make sure that the client called the `set_ssl_verifier` method\n match rx.try_recv() {\n Ok(meth) => {\n assert_eq!(meth, \"set_ssl_verifier\");\n },\n _ => panic!(\"Expected a call to `set_ssl_verifier`\"),\n };\n \/\/ Now make sure that no other method was called, as well as that\n \/\/ the connector is still alive (i.e. wasn't dropped by the client).\n match rx.try_recv() {\n Err(TryRecvError::Empty) => {},\n Err(TryRecvError::Disconnected) => {\n panic!(\"Expected the connector to still be alive.\");\n },\n Ok(_) => panic!(\"Did not expect any more method calls.\"),\n };\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add is_present, get and set for vars<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\/ External linking is a complex implementation to be able to serve a clean and easy-to-use\n\/\/\/ interface.\n\/\/\/\n\/\/\/ Internally, there are no such things as \"external links\" (plural). Each Entry in the store can\n\/\/\/ only have _one_ external link.\n\/\/\/\n\/\/\/ This library does the following therefor: It allows you to have several external links with one\n\/\/\/ entry, which are internally one file in the store for each link, linked with \"internal\n\/\/\/ linking\".\n\/\/\/\n\/\/\/ This helps us greatly with deduplication of URLs.\n\/\/\/\n\nuse std::convert::Into;\nuse std::ops::Deref;\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\n\nuse libimagstore::store::Entry;\nuse libimagstore::store::EntryHeader;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\n\nuse error::LinkError as LE;\nuse error::LinkErrorKind as LEK;\nuse result::Result;\nuse internal::InternalLinker;\nuse module_path::ModuleEntryPath;\n\nuse toml::Value;\nuse toml::Table;\nuse url::Url;\nuse sodiumoxide::crypto::hash;\nuse sodiumoxide::crypto::hash::sha512::Digest;\n\n\/\/\/ \"Link\" Type, just an abstraction over FileLockEntry to have some convenience internally.\nstruct Link<'a> {\n link: FileLockEntry<'a>\n}\n\nimpl<'a> Link<'a> {\n\n pub fn new(fle: FileLockEntry<'a>) -> Link<'a> {\n Link { link: fle }\n }\n\n \/\/\/ For interal use only. Load an Link from a store id, if this is actually a Link\n fn retrieve(store: &'a Store, id: StoreId) -> Result<Option<Link<'a>>> {\n store.retrieve(id)\n .map(|fle| {\n if let Some(_) = Link::get_link_uri_from_filelockentry(&fle) {\n Some(Link {\n link: fle\n })\n } else {\n None\n }\n })\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Get a link Url object from a FileLockEntry, ignore errors.\n fn get_link_uri_from_filelockentry(file: &FileLockEntry<'a>) -> Option<Url> {\n file.deref()\n .get_header()\n .read(\"imag.content.uri\")\n .ok()\n .and_then(|opt| {\n match opt {\n Some(Value::String(s)) => Url::parse(&s[..]).ok(),\n _ => None\n }\n })\n }\n\n pub fn get_url(&self) -> Result<Option<Url>> {\n let opt = self.link\n .deref()\n .get_header()\n .read(\"imag.content.uri\");\n\n match opt {\n Ok(Some(Value::String(s))) => {\n Url::parse(&s[..])\n .map(|s| Some(s))\n .map_err(|e| LE::new(LEK::EntryHeaderReadError, Some(Box::new(e))))\n },\n Ok(None) => Ok(None),\n _ => Err(LE::new(LEK::EntryHeaderReadError, None))\n }\n }\n\n}\n\npub trait ExternalLinker : InternalLinker {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>>;\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()>;\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n}\n\n\/\/\/ Check whether the StoreId starts with `\/link\/external\/`\nfn is_link_store_id(id: &StoreId) -> bool {\n debug!(\"Checking whether this is a \/link\/external\/*: '{:?}'\", id);\n id.starts_with(\"\/link\/external\/\")\n}\n\nfn get_external_link_from_file(entry: &FileLockEntry) -> Result<Url> {\n Link::get_link_uri_from_filelockentry(entry) \/\/ TODO: Do not hide error by using this function\n .ok_or(LE::new(LEK::StoreReadError, None))\n}\n\n\/\/\/ Implement ExternalLinker for Entry, hiding the fact that there is no such thing as an external\n\/\/\/ link in an entry, but internal links to other entries which serve as external links, as one\n\/\/\/ entry in the store can only have one external link.\nimpl ExternalLinker for Entry {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>> {\n \/\/ Iterate through all internal links and filter for FileLockEntries which live in\n \/\/ \/link\/external\/<SHA> -> load these files and get the external link from their headers,\n \/\/ put them into the return vector.\n self.get_internal_links()\n .map(|vect| {\n debug!(\"Getting external links\");\n vect.into_iter()\n .filter(is_link_store_id)\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n match store.retrieve(id.clone()) {\n Ok(f) => get_external_link_from_file(&f),\n Err(e) => {\n debug!(\"Retrieving entry for id: '{:?}' failed\", id);\n Err(LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n }\n })\n .filter_map(|x| x.ok()) \/\/ TODO: Do not ignore error here\n .collect()\n })\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()> {\n \/\/ Take all the links, generate a SHA sum out of each one, filter out the already existing\n \/\/ store entries and store the other URIs in the header of one FileLockEntry each, in\n \/\/ the path \/link\/external\/<SHA of the URL>\n\n \/\/\/ Convert a hash::sha512::Digest from sodiumoxide into a StoreId\n fn hash_to_storeid(d: Digest) -> StoreId {\n let hash = &d[..];\n let v = Vec::from(hash);\n let s = String::from_utf8(v).unwrap(); \/\/ TODO: Uncaught unwrap()\n debug!(\"Generating store id for digest: '{:?}' == {}\", d, s);\n\n let id = ModuleEntryPath::new(format!(\"external\/{}\", s)).into_storeid();\n debug!(\"Generted store id: '{:?}'\", id);\n\n id\n }\n\n debug!(\"Iterating {} links = {:?}\", links.len(), links);\n for link in links { \/\/ for all links\n let hash = hash::hash(link.serialize().as_bytes());\n let file_id = hash_to_storeid(hash);\n\n debug!(\"Link = '{:?}'\", link);\n debug!(\"Hash = '{:?}'\", hash);\n debug!(\"StoreId = '{:?}'\", file_id);\n\n let mut file = {\n if let Ok(mut file) = store.retrieve(file_id.clone()) { \/\/ retrieve the file from the store\n debug!(\"Woha, there is already a file for this link: '{:?}'\", link);\n file\n } else { \/\/ or\n debug!(\"Generating file for link = '{:?}' on id = {:?}\", link, file_id);\n let res = store.create(file_id) \/\/ create it\n .and_then(|mut file| {\n {\n debug!(\"Generating header content!\");\n let mut hdr = file.deref_mut().get_header_mut();\n\n \/\/ Write the URI into the header\n match hdr.set(\"imag.content\", Value::Table(BTreeMap::new())) {\n Ok(_) => {\n let v = Value::String(link.serialize());\n debug!(\"setting URL = '{:?}\", v);\n hdr.set(\"imag.content.uri\", v);\n Ok(())\n },\n Err(e) => {\n debug!(\"Failed to generate a table in header at 'imag.content'\");\n Err(e)\n },\n }\n }.map(|_| file)\n })\n .map_err(|e| LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n\n \/\/ And if that fails we can error\n if let Err(e) = res {\n debug!(\"Failed to create or retrieve an file for this link '{:?}'\", link);\n return Err(e);\n }\n debug!(\"Success creating or retrieving an file for this link '{:?}'\", link);\n res.unwrap()\n }\n };\n\n \/\/ then add an internal link to the new file or return an error if this fails\n if let Err(e) = self.add_internal_link(file.deref_mut()) {\n debug!(\"Error adding internal link\");\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n }\n }\n debug!(\"Ready iterating\");\n Ok(())\n }\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, add this one, save them\n debug!(\"Getting links\");\n self.get_external_links(store)\n .and_then(|mut links| {\n debug!(\"Adding link = '{:?}' to links = {:?}\", link, links);\n links.push(link);\n debug!(\"Setting {} links = {:?}\", links.len(), links);\n self.set_external_links(store, links)\n })\n }\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, remove this one, save them\n self.get_external_links(store)\n .and_then(|mut links| {\n debug!(\"Removing link = '{:?}' from links = {:?}\", link, links);\n let links = links.into_iter()\n .filter(|l| l.serialize() != link.serialize())\n .collect();\n self.set_external_links(store, links)\n })\n }\n\n}\n\n<commit_msg>Reimplement hash generation with rust-crypto<commit_after>\/\/\/ External linking is a complex implementation to be able to serve a clean and easy-to-use\n\/\/\/ interface.\n\/\/\/\n\/\/\/ Internally, there are no such things as \"external links\" (plural). Each Entry in the store can\n\/\/\/ only have _one_ external link.\n\/\/\/\n\/\/\/ This library does the following therefor: It allows you to have several external links with one\n\/\/\/ entry, which are internally one file in the store for each link, linked with \"internal\n\/\/\/ linking\".\n\/\/\/\n\/\/\/ This helps us greatly with deduplication of URLs.\n\/\/\/\n\nuse std::convert::Into;\nuse std::ops::Deref;\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\n\nuse libimagstore::store::Entry;\nuse libimagstore::store::EntryHeader;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\n\nuse error::LinkError as LE;\nuse error::LinkErrorKind as LEK;\nuse result::Result;\nuse internal::InternalLinker;\nuse module_path::ModuleEntryPath;\n\nuse toml::Value;\nuse toml::Table;\nuse url::Url;\nuse crypto::sha1::Sha1;\nuse crypto::digest::Digest;\n\n\/\/\/ \"Link\" Type, just an abstraction over FileLockEntry to have some convenience internally.\nstruct Link<'a> {\n link: FileLockEntry<'a>\n}\n\nimpl<'a> Link<'a> {\n\n pub fn new(fle: FileLockEntry<'a>) -> Link<'a> {\n Link { link: fle }\n }\n\n \/\/\/ For interal use only. Load an Link from a store id, if this is actually a Link\n fn retrieve(store: &'a Store, id: StoreId) -> Result<Option<Link<'a>>> {\n store.retrieve(id)\n .map(|fle| {\n if let Some(_) = Link::get_link_uri_from_filelockentry(&fle) {\n Some(Link {\n link: fle\n })\n } else {\n None\n }\n })\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Get a link Url object from a FileLockEntry, ignore errors.\n fn get_link_uri_from_filelockentry(file: &FileLockEntry<'a>) -> Option<Url> {\n file.deref()\n .get_header()\n .read(\"imag.content.uri\")\n .ok()\n .and_then(|opt| {\n match opt {\n Some(Value::String(s)) => Url::parse(&s[..]).ok(),\n _ => None\n }\n })\n }\n\n pub fn get_url(&self) -> Result<Option<Url>> {\n let opt = self.link\n .deref()\n .get_header()\n .read(\"imag.content.uri\");\n\n match opt {\n Ok(Some(Value::String(s))) => {\n Url::parse(&s[..])\n .map(|s| Some(s))\n .map_err(|e| LE::new(LEK::EntryHeaderReadError, Some(Box::new(e))))\n },\n Ok(None) => Ok(None),\n _ => Err(LE::new(LEK::EntryHeaderReadError, None))\n }\n }\n\n}\n\npub trait ExternalLinker : InternalLinker {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>>;\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()>;\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n}\n\n\/\/\/ Check whether the StoreId starts with `\/link\/external\/`\nfn is_link_store_id(id: &StoreId) -> bool {\n debug!(\"Checking whether this is a \/link\/external\/*: '{:?}'\", id);\n id.starts_with(\"\/link\/external\/\")\n}\n\nfn get_external_link_from_file(entry: &FileLockEntry) -> Result<Url> {\n Link::get_link_uri_from_filelockentry(entry) \/\/ TODO: Do not hide error by using this function\n .ok_or(LE::new(LEK::StoreReadError, None))\n}\n\n\/\/\/ Implement ExternalLinker for Entry, hiding the fact that there is no such thing as an external\n\/\/\/ link in an entry, but internal links to other entries which serve as external links, as one\n\/\/\/ entry in the store can only have one external link.\nimpl ExternalLinker for Entry {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>> {\n \/\/ Iterate through all internal links and filter for FileLockEntries which live in\n \/\/ \/link\/external\/<SHA> -> load these files and get the external link from their headers,\n \/\/ put them into the return vector.\n self.get_internal_links()\n .map(|vect| {\n debug!(\"Getting external links\");\n vect.into_iter()\n .filter(is_link_store_id)\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n match store.retrieve(id.clone()) {\n Ok(f) => get_external_link_from_file(&f),\n Err(e) => {\n debug!(\"Retrieving entry for id: '{:?}' failed\", id);\n Err(LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n }\n })\n .filter_map(|x| x.ok()) \/\/ TODO: Do not ignore error here\n .collect()\n })\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()> {\n \/\/ Take all the links, generate a SHA sum out of each one, filter out the already existing\n \/\/ store entries and store the other URIs in the header of one FileLockEntry each, in\n \/\/ the path \/link\/external\/<SHA of the URL>\n\n debug!(\"Iterating {} links = {:?}\", links.len(), links);\n for link in links { \/\/ for all links\n let hash = {\n let mut s = Sha1::new();\n s.input_str(&link.serialize()[..]);\n s.result_str()\n };\n let file_id = ModuleEntryPath::new(format!(\"external\/{}\", hash)).into_storeid();\n\n debug!(\"Link = '{:?}'\", link);\n debug!(\"Hash = '{:?}'\", hash);\n debug!(\"StoreId = '{:?}'\", file_id);\n\n let mut file = {\n if let Ok(mut file) = store.retrieve(file_id.clone()) { \/\/ retrieve the file from the store\n debug!(\"Woha, there is already a file for this link: '{:?}'\", link);\n file\n } else { \/\/ or\n debug!(\"Generating file for link = '{:?}' on id = {:?}\", link, file_id);\n let res = store.create(file_id) \/\/ create it\n .and_then(|mut file| {\n {\n debug!(\"Generating header content!\");\n let mut hdr = file.deref_mut().get_header_mut();\n\n \/\/ Write the URI into the header\n match hdr.set(\"imag.content\", Value::Table(BTreeMap::new())) {\n Ok(_) => {\n let v = Value::String(link.serialize());\n debug!(\"setting URL = '{:?}\", v);\n hdr.set(\"imag.content.uri\", v);\n Ok(())\n },\n Err(e) => {\n debug!(\"Failed to generate a table in header at 'imag.content'\");\n Err(e)\n },\n }\n }.map(|_| file)\n })\n .map_err(|e| LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n\n \/\/ And if that fails we can error\n if let Err(e) = res {\n debug!(\"Failed to create or retrieve an file for this link '{:?}'\", link);\n return Err(e);\n }\n debug!(\"Success creating or retrieving an file for this link '{:?}'\", link);\n res.unwrap()\n }\n };\n\n \/\/ then add an internal link to the new file or return an error if this fails\n if let Err(e) = self.add_internal_link(file.deref_mut()) {\n debug!(\"Error adding internal link\");\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n }\n }\n debug!(\"Ready iterating\");\n Ok(())\n }\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, add this one, save them\n debug!(\"Getting links\");\n self.get_external_links(store)\n .and_then(|mut links| {\n debug!(\"Adding link = '{:?}' to links = {:?}\", link, links);\n links.push(link);\n debug!(\"Setting {} links = {:?}\", links.len(), links);\n self.set_external_links(store, links)\n })\n }\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, remove this one, save them\n self.get_external_links(store)\n .and_then(|mut links| {\n debug!(\"Removing link = '{:?}' from links = {:?}\", link, links);\n let links = links.into_iter()\n .filter(|l| l.serialize() != link.serialize())\n .collect();\n self.set_external_links(store, links)\n })\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Changed DFA set operations to take references<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement alloc_dbpage for Cursor<commit_after><|endoftext|>"} {"text":"<commit_before>#![doc(html_root_url = \"https:\/\/hyperium.github.io\/hyper\/\")]\n#![deny(missing_docs)]\n#![cfg_attr(test, deny(warnings))]\n#![cfg_attr(all(test, feature = \"nightly\"), feature(test))]\n\n\/\/! # Hyper\n\/\/!\n\/\/! Hyper is a fast, modern HTTP implementation written in and for Rust. It\n\/\/! is a low-level typesafe abstraction over raw HTTP, providing an elegant\n\/\/! layer over \"stringly-typed\" HTTP.\n\/\/!\n\/\/! Hyper offers both a [Client](client\/index.html) and a\n\/\/! [Server](server\/index.html) which can be used to drive complex web\n\/\/! applications written entirely in Rust.\n\/\/!\n\/\/! ## Internal Design\n\/\/!\n\/\/! Hyper is designed as a relatively low-level wrapper over raw HTTP. It should\n\/\/! allow the implementation of higher-level abstractions with as little pain as\n\/\/! possible, and should not irrevocably hide any information from its users.\n\/\/!\n\/\/! ### Common Functionality\n\/\/!\n\/\/! Functionality and code shared between the Server and Client implementations\n\/\/! can be found in `src` directly - this includes `NetworkStream`s, `Method`s,\n\/\/! `StatusCode`, and so on.\n\/\/!\n\/\/! #### Methods\n\/\/!\n\/\/! Methods are represented as a single `enum` to remain as simple as possible.\n\/\/! Extension Methods are represented as raw `String`s. A method's safety and\n\/\/! idempotence can be accessed using the `safe` and `idempotent` methods.\n\/\/!\n\/\/! #### StatusCode\n\/\/!\n\/\/! Status codes are also represented as a single, exhaustive, `enum`. This\n\/\/! representation is efficient, typesafe, and ergonomic as it allows the use of\n\/\/! `match` to disambiguate known status codes.\n\/\/!\n\/\/! #### Headers\n\/\/!\n\/\/! Hyper's [header](header\/index.html) representation is likely the most\n\/\/! complex API exposed by Hyper.\n\/\/!\n\/\/! Hyper's headers are an abstraction over an internal `HashMap` and provides a\n\/\/! typesafe API for interacting with headers that does not rely on the use of\n\/\/! \"string-typing.\"\n\/\/!\n\/\/! Each HTTP header in Hyper has an associated type and implementation of the\n\/\/! `Header` trait, which defines an HTTP headers name as a string, how to parse\n\/\/! that header, and how to format that header.\n\/\/!\n\/\/! Headers are then parsed from the string representation lazily when the typed\n\/\/! representation of a header is requested and formatted back into their string\n\/\/! representation when headers are written back to the client.\n\/\/!\n\/\/! #### NetworkStream and NetworkAcceptor\n\/\/!\n\/\/! These are found in `src\/net.rs` and define the interface that acceptors and\n\/\/! streams must fulfill for them to be used within Hyper. They are by and large\n\/\/! internal tools and you should only need to mess around with them if you want to\n\/\/! mock or replace `TcpStream` and `TcpAcceptor`.\n\/\/!\n\/\/! ### Server\n\/\/!\n\/\/! Server-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in in `src\/server`.\n\/\/!\n\/\/! #### Handler + Server\n\/\/!\n\/\/! A `Handler` in Hyper accepts a `Request` and `Response`. This is where\n\/\/! user-code can handle each connection. The server accepts connections in a\n\/\/! task pool with a customizable number of threads, and passes the Request \/\n\/\/! Response to the handler.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An incoming HTTP Request is represented as a struct containing\n\/\/! a `Reader` over a `NetworkStream`, which represents the body, headers, a remote\n\/\/! address, an HTTP version, and a `Method` - relatively standard stuff.\n\/\/!\n\/\/! `Request` implements `Reader` itself, meaning that you can ergonomically get\n\/\/! the body out of a `Request` using standard `Reader` methods and helpers.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! An outgoing HTTP Response is also represented as a struct containing a `Writer`\n\/\/! over a `NetworkStream` which represents the Response body in addition to\n\/\/! standard items such as the `StatusCode` and HTTP version. `Response`'s `Writer`\n\/\/! implementation provides a streaming interface for sending data over to the\n\/\/! client.\n\/\/!\n\/\/! One of the traditional problems with representing outgoing HTTP Responses is\n\/\/! tracking the write-status of the Response - have we written the status-line,\n\/\/! the headers, the body, etc.? Hyper tracks this information statically using the\n\/\/! type system and prevents you, using the type system, from writing headers after\n\/\/! you have started writing to the body or vice versa.\n\/\/!\n\/\/! Hyper does this through a phantom type parameter in the definition of Response,\n\/\/! which tracks whether you are allowed to write to the headers or the body. This\n\/\/! phantom type can have two values `Fresh` or `Streaming`, with `Fresh`\n\/\/! indicating that you can write the headers and `Streaming` indicating that you\n\/\/! may write to the body, but not the headers.\n\/\/!\n\/\/! ### Client\n\/\/!\n\/\/! Client-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in `src\/client`.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An outgoing HTTP Request is represented as a struct containing a `Writer` over\n\/\/! a `NetworkStream` which represents the Request body in addition to the standard\n\/\/! information such as headers and the request method.\n\/\/!\n\/\/! Outgoing Requests track their write-status in almost exactly the same way as\n\/\/! outgoing HTTP Responses do on the Server, so we will defer to the explanation\n\/\/! in the documentation for server Response.\n\/\/!\n\/\/! Requests expose an efficient streaming interface instead of a builder pattern,\n\/\/! but they also provide the needed interface for creating a builder pattern over\n\/\/! the API exposed by core Hyper.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! Incoming HTTP Responses are represented as a struct containing a `Reader` over\n\/\/! a `NetworkStream` and contain headers, a status, and an http version. They\n\/\/! implement `Reader` and can be read to get the data out of a `Response`.\n\/\/!\n\nextern crate rustc_serialize as serialize;\nextern crate time;\nextern crate url;\n#[cfg(feature = \"openssl\")]\nextern crate openssl;\nextern crate cookie;\nextern crate unicase;\nextern crate httparse;\nextern crate num_cpus;\nextern crate traitobject;\nextern crate typeable;\nextern crate solicit;\n\n#[macro_use]\nextern crate language_tags;\n\n#[macro_use]\nextern crate mime as mime_crate;\n\n#[macro_use]\nextern crate log;\n\n#[cfg(all(test, feature = \"nightly\"))]\nextern crate test;\n\n\npub use url::Url;\npub use client::Client;\npub use error::{Result, Error};\npub use method::Method::{Get, Head, Post, Delete};\npub use status::StatusCode::{Ok, BadRequest, NotFound};\npub use server::Server;\npub use language_tags::LanguageTag;\n\nmacro_rules! todo(\n ($($arg:tt)*) => (if cfg!(not(ndebug)) {\n trace!(\"TODO: {:?}\", format_args!($($arg)*))\n })\n);\n\nmacro_rules! inspect(\n ($name:expr, $value:expr) => ({\n let v = $value;\n trace!(\"inspect: {:?} = {:?}\", $name, v);\n v\n })\n);\n\n#[cfg(test)]\n#[macro_use]\nmod mock;\n#[doc(hidden)]\npub mod buffer;\npub mod client;\npub mod error;\npub mod method;\npub mod header;\npub mod http;\npub mod net;\npub mod server;\npub mod status;\npub mod uri;\npub mod version;\n\n\/\/\/ Re-exporting the mime crate, for convenience.\npub mod mime {\n pub use mime_crate::*;\n}\n\n#[allow(unconditional_recursion)]\nfn _assert_send<T: Send>() {\n _assert_send::<Client>();\n _assert_send::<client::Request<net::Fresh>>();\n _assert_send::<client::Response>();\n _assert_send::<error::Error>();\n}\n\n#[allow(unconditional_recursion)]\nfn _assert_sync<T: Sync>() {\n _assert_sync::<Client>();\n _assert_sync::<error::Error>();\n}\n<commit_msg>fix(lint): change deny(missing_docs) to only apply for tests<commit_after>#![doc(html_root_url = \"https:\/\/hyperium.github.io\/hyper\/\")]\n#![cfg_attr(test, deny(missing_docs))]\n#![cfg_attr(test, deny(warnings))]\n#![cfg_attr(all(test, feature = \"nightly\"), feature(test))]\n\n\/\/! # Hyper\n\/\/!\n\/\/! Hyper is a fast, modern HTTP implementation written in and for Rust. It\n\/\/! is a low-level typesafe abstraction over raw HTTP, providing an elegant\n\/\/! layer over \"stringly-typed\" HTTP.\n\/\/!\n\/\/! Hyper offers both a [Client](client\/index.html) and a\n\/\/! [Server](server\/index.html) which can be used to drive complex web\n\/\/! applications written entirely in Rust.\n\/\/!\n\/\/! ## Internal Design\n\/\/!\n\/\/! Hyper is designed as a relatively low-level wrapper over raw HTTP. It should\n\/\/! allow the implementation of higher-level abstractions with as little pain as\n\/\/! possible, and should not irrevocably hide any information from its users.\n\/\/!\n\/\/! ### Common Functionality\n\/\/!\n\/\/! Functionality and code shared between the Server and Client implementations\n\/\/! can be found in `src` directly - this includes `NetworkStream`s, `Method`s,\n\/\/! `StatusCode`, and so on.\n\/\/!\n\/\/! #### Methods\n\/\/!\n\/\/! Methods are represented as a single `enum` to remain as simple as possible.\n\/\/! Extension Methods are represented as raw `String`s. A method's safety and\n\/\/! idempotence can be accessed using the `safe` and `idempotent` methods.\n\/\/!\n\/\/! #### StatusCode\n\/\/!\n\/\/! Status codes are also represented as a single, exhaustive, `enum`. This\n\/\/! representation is efficient, typesafe, and ergonomic as it allows the use of\n\/\/! `match` to disambiguate known status codes.\n\/\/!\n\/\/! #### Headers\n\/\/!\n\/\/! Hyper's [header](header\/index.html) representation is likely the most\n\/\/! complex API exposed by Hyper.\n\/\/!\n\/\/! Hyper's headers are an abstraction over an internal `HashMap` and provides a\n\/\/! typesafe API for interacting with headers that does not rely on the use of\n\/\/! \"string-typing.\"\n\/\/!\n\/\/! Each HTTP header in Hyper has an associated type and implementation of the\n\/\/! `Header` trait, which defines an HTTP headers name as a string, how to parse\n\/\/! that header, and how to format that header.\n\/\/!\n\/\/! Headers are then parsed from the string representation lazily when the typed\n\/\/! representation of a header is requested and formatted back into their string\n\/\/! representation when headers are written back to the client.\n\/\/!\n\/\/! #### NetworkStream and NetworkAcceptor\n\/\/!\n\/\/! These are found in `src\/net.rs` and define the interface that acceptors and\n\/\/! streams must fulfill for them to be used within Hyper. They are by and large\n\/\/! internal tools and you should only need to mess around with them if you want to\n\/\/! mock or replace `TcpStream` and `TcpAcceptor`.\n\/\/!\n\/\/! ### Server\n\/\/!\n\/\/! Server-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in in `src\/server`.\n\/\/!\n\/\/! #### Handler + Server\n\/\/!\n\/\/! A `Handler` in Hyper accepts a `Request` and `Response`. This is where\n\/\/! user-code can handle each connection. The server accepts connections in a\n\/\/! task pool with a customizable number of threads, and passes the Request \/\n\/\/! Response to the handler.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An incoming HTTP Request is represented as a struct containing\n\/\/! a `Reader` over a `NetworkStream`, which represents the body, headers, a remote\n\/\/! address, an HTTP version, and a `Method` - relatively standard stuff.\n\/\/!\n\/\/! `Request` implements `Reader` itself, meaning that you can ergonomically get\n\/\/! the body out of a `Request` using standard `Reader` methods and helpers.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! An outgoing HTTP Response is also represented as a struct containing a `Writer`\n\/\/! over a `NetworkStream` which represents the Response body in addition to\n\/\/! standard items such as the `StatusCode` and HTTP version. `Response`'s `Writer`\n\/\/! implementation provides a streaming interface for sending data over to the\n\/\/! client.\n\/\/!\n\/\/! One of the traditional problems with representing outgoing HTTP Responses is\n\/\/! tracking the write-status of the Response - have we written the status-line,\n\/\/! the headers, the body, etc.? Hyper tracks this information statically using the\n\/\/! type system and prevents you, using the type system, from writing headers after\n\/\/! you have started writing to the body or vice versa.\n\/\/!\n\/\/! Hyper does this through a phantom type parameter in the definition of Response,\n\/\/! which tracks whether you are allowed to write to the headers or the body. This\n\/\/! phantom type can have two values `Fresh` or `Streaming`, with `Fresh`\n\/\/! indicating that you can write the headers and `Streaming` indicating that you\n\/\/! may write to the body, but not the headers.\n\/\/!\n\/\/! ### Client\n\/\/!\n\/\/! Client-specific functionality, such as `Request` and `Response`\n\/\/! representations, are found in `src\/client`.\n\/\/!\n\/\/! #### Request\n\/\/!\n\/\/! An outgoing HTTP Request is represented as a struct containing a `Writer` over\n\/\/! a `NetworkStream` which represents the Request body in addition to the standard\n\/\/! information such as headers and the request method.\n\/\/!\n\/\/! Outgoing Requests track their write-status in almost exactly the same way as\n\/\/! outgoing HTTP Responses do on the Server, so we will defer to the explanation\n\/\/! in the documentation for server Response.\n\/\/!\n\/\/! Requests expose an efficient streaming interface instead of a builder pattern,\n\/\/! but they also provide the needed interface for creating a builder pattern over\n\/\/! the API exposed by core Hyper.\n\/\/!\n\/\/! #### Response\n\/\/!\n\/\/! Incoming HTTP Responses are represented as a struct containing a `Reader` over\n\/\/! a `NetworkStream` and contain headers, a status, and an http version. They\n\/\/! implement `Reader` and can be read to get the data out of a `Response`.\n\/\/!\n\nextern crate rustc_serialize as serialize;\nextern crate time;\nextern crate url;\n#[cfg(feature = \"openssl\")]\nextern crate openssl;\nextern crate cookie;\nextern crate unicase;\nextern crate httparse;\nextern crate num_cpus;\nextern crate traitobject;\nextern crate typeable;\nextern crate solicit;\n\n#[macro_use]\nextern crate language_tags;\n\n#[macro_use]\nextern crate mime as mime_crate;\n\n#[macro_use]\nextern crate log;\n\n#[cfg(all(test, feature = \"nightly\"))]\nextern crate test;\n\n\npub use url::Url;\npub use client::Client;\npub use error::{Result, Error};\npub use method::Method::{Get, Head, Post, Delete};\npub use status::StatusCode::{Ok, BadRequest, NotFound};\npub use server::Server;\npub use language_tags::LanguageTag;\n\nmacro_rules! todo(\n ($($arg:tt)*) => (if cfg!(not(ndebug)) {\n trace!(\"TODO: {:?}\", format_args!($($arg)*))\n })\n);\n\nmacro_rules! inspect(\n ($name:expr, $value:expr) => ({\n let v = $value;\n trace!(\"inspect: {:?} = {:?}\", $name, v);\n v\n })\n);\n\n#[cfg(test)]\n#[macro_use]\nmod mock;\n#[doc(hidden)]\npub mod buffer;\npub mod client;\npub mod error;\npub mod method;\npub mod header;\npub mod http;\npub mod net;\npub mod server;\npub mod status;\npub mod uri;\npub mod version;\n\n\/\/\/ Re-exporting the mime crate, for convenience.\npub mod mime {\n pub use mime_crate::*;\n}\n\n#[allow(unconditional_recursion)]\nfn _assert_send<T: Send>() {\n _assert_send::<Client>();\n _assert_send::<client::Request<net::Fresh>>();\n _assert_send::<client::Response>();\n _assert_send::<error::Error>();\n}\n\n#[allow(unconditional_recursion)]\nfn _assert_sync<T: Sync>() {\n _assert_sync::<Client>();\n _assert_sync::<error::Error>();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add interesting case for `url::Url::set_port` doc examples.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update to new extern crate semantics<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>chore(lib): deny all warnings<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Support for Unix domain socket clients and servers.\n#![feature(io, std_misc, path, core)]\n#![warn(missing_docs)]\n#![doc(html_root_url=\"https:\/\/sfackler.github.io\/rust-unix-socket\/doc\")]\n\nextern crate libc;\n\nuse std::cmp;\nuse std::ffi::{OsStr, AsOsStr};\nuse std::io;\nuse std::iter::IntoIterator;\nuse std::mem;\nuse std::num::Int;\nuse std::os::unix::{Fd, OsStrExt, AsRawFd};\nuse std::path::AsPath;\nuse libc::c_int;\nuse std::fmt;\n\nextern \"C\" {\n fn socketpair(domain: c_int, ty: c_int, proto: c_int, sv: *mut [c_int; 2]) -> c_int;\n}\n\nstruct Inner(Fd);\n\nimpl Drop for Inner {\n fn drop(&mut self) {\n unsafe {\n libc::close(self.0);\n }\n }\n}\n\nimpl Inner {\n unsafe fn new() -> io::Result<Inner> {\n let fd = libc::socket(libc::AF_UNIX, libc::SOCK_STREAM, 0);\n if fd < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(Inner(fd))\n }\n }\n\n unsafe fn new_pair() -> io::Result<[Inner; 2]> {\n let mut fds = [0, 0];\n let res = socketpair(libc::AF_UNIX, libc::SOCK_STREAM, 0, &mut fds);\n if res < 0 {\n return Err(io::Error::last_os_error());\n }\n debug_assert_eq!(res, 0);\n Ok([Inner(fds[0]), Inner(fds[1])])\n }\n\n fn fmt(&self,\n f: unsafe extern \"system\" fn(libc::c_int,\n *mut libc::sockaddr,\n *mut libc::socklen_t) -> libc::c_int,\n fmt: &mut fmt::Formatter) -> fmt::Result {\n unsafe {\n let mut addr: libc::sockaddr_un = mem::zeroed();\n let mut len = mem::size_of::<libc::sockaddr_un>() as libc::socklen_t;\n\n let ret = f(self.0, &mut addr as *mut _ as *mut _ , &mut len as *mut _);\n\n if ret == 0 {\n debug_assert_eq!(addr.sun_family, libc::AF_UNIX as libc::sa_family_t);\n\n try!(write!(fmt, \", address: \"));\n\n let offset = &(*(0 as *const libc::sockaddr_un)).sun_path as *const _ as usize;\n let path_len = len as usize - offset;\n\n if path_len == 0 {\n try!(write!(fmt, \"(unnamed)\"));\n } else {\n let (path, kind) = if addr.sun_path[0] == 0 {\n (&addr.sun_path[1..path_len], \"abstract\")\n } else {\n (&addr.sun_path[..path_len - 1], \"pathname\")\n };\n\n let path: &[u8] = mem::transmute(path);\n let path = OsStr::from_bytes(path).as_path().display();\n try!(write!(fmt, \"{:?} ({})\", path, kind));\n }\n }\n }\n\n Ok(())\n }\n}\n\nunsafe fn sockaddr_un<P: AsPath + ?Sized>(path: &P) -> io::Result<libc::sockaddr_un> {\n let mut addr: libc::sockaddr_un = mem::zeroed();\n addr.sun_family = libc::AF_UNIX as libc::sa_family_t;\n\n let bytes = path.as_path().as_os_str().as_bytes();\n if bytes.len() >= addr.sun_path.len() {\n return Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"path must be smaller than SUN_LEN\",\n None));\n }\n for (dst, src) in addr.sun_path.iter_mut().zip(bytes.iter()) {\n *dst = *src as libc::c_char;\n }\n \/\/ null byte's already there because we zeroed the struct\n\n Ok(addr)\n}\n\n\/\/\/ A stream which communicates over a Unix domain socket.\npub struct UnixStream {\n inner: Inner,\n}\n\nimpl fmt::Debug for UnixStream {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(fmt, \"UnixStream {{ fd: {}\", self.inner.0));\n try!(self.inner.fmt(libc::getpeername, fmt));\n write!(fmt, \" }}\")\n }\n}\n\nimpl UnixStream {\n \/\/\/ Connect to the socket named by `path`.\n pub fn connect<P: AsPath + ?Sized>(path: &P) -> io::Result<UnixStream> {\n unsafe {\n let inner = try!(Inner::new());\n let addr = try!(sockaddr_un(path));\n\n let ret = libc::connect(inner.0,\n &addr as *const _ as *const _,\n mem::size_of::<libc::sockaddr_un>() as libc::socklen_t);\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixStream {\n inner: inner,\n })\n }\n }\n }\n\n \/\/\/ Create an unnamed pair of connected sockets.\n \/\/\/\n \/\/\/ Returns two `UnixStream`s which are connected to each other.\n pub fn unnamed() -> io::Result<[UnixStream; 2]> {\n unsafe {\n let [i1, i2] = try!(Inner::new_pair());\n Ok([UnixStream { inner: i1 }, UnixStream { inner: i2 }])\n }\n }\n\n \/\/\/ Create a new independently owned handle to the underlying socket.\n \/\/\/\n \/\/\/ The returned `UnixStream` is a reference to the same stream that this\n \/\/\/ object references. Both handles will read and write the same stream of\n \/\/\/ data, and options set on one stream will be propogated to the other\n \/\/\/ stream.\n pub fn try_clone(&self) -> io::Result<UnixStream> {\n let fd = unsafe { libc::dup(self.inner.0) };\n if fd < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixStream {\n inner: Inner(fd)\n })\n }\n }\n}\n\nfn calc_len(buf: &[u8]) -> libc::size_t {\n cmp::min(<libc::size_t as Int>::max_value() as usize, buf.len()) as libc::size_t\n}\n\nimpl io::Read for UnixStream {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n let ret = unsafe {\n libc::recv(self.inner.0, buf.as_mut_ptr() as *mut _, calc_len(buf), 0)\n };\n\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(ret as usize)\n }\n }\n}\n\nimpl io::Write for UnixStream {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n let ret = unsafe {\n libc::send(self.inner.0, buf.as_ptr() as *const _, calc_len(buf), 0)\n };\n\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(ret as usize)\n }\n }\n\n fn flush(&mut self) -> io::Result<()> {\n Ok(())\n }\n}\n\nimpl AsRawFd for UnixStream {\n fn as_raw_fd(&self) -> Fd {\n self.inner.0\n }\n}\n\n\/\/\/ A structure representing a Unix domain socket server.\npub struct UnixListener {\n inner: Inner,\n}\n\nimpl fmt::Debug for UnixListener {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(fmt, \"UnixListener {{ fd: {}\", self.inner.0));\n try!(self.inner.fmt(libc::getsockname, fmt));\n write!(fmt, \" }}\")\n }\n}\n\nimpl UnixListener {\n \/\/\/ Creates a new `UnixListener` which will be bound to the specified socket.\n pub fn bind<P: AsPath + ?Sized>(path: &P) -> io::Result<UnixListener> {\n unsafe {\n let inner = try!(Inner::new());\n let addr = try!(sockaddr_un(path));\n\n let ret = libc::bind(inner.0,\n &addr as *const _ as *const _,\n mem::size_of::<libc::sockaddr_un>() as libc::socklen_t);\n if ret < 0 {\n return Err(io::Error::last_os_error());\n }\n\n let ret = libc::listen(inner.0, 128);\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixListener {\n inner: inner,\n })\n }\n }\n }\n\n \/\/\/ Accepts a new incoming connection to this listener.\n pub fn accept(&self) -> io::Result<UnixStream> {\n unsafe {\n let ret = libc::accept(self.inner.0, 0 as *mut _, 0 as *mut _);\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixStream {\n inner: Inner(ret)\n })\n }\n }\n }\n\n \/\/\/ Create a new independently owned handle to the underlying socket.\n \/\/\/\n \/\/\/ The returned `UnixListener` is a reference to the same socket that this\n \/\/\/ object references. Both handles can be used to accept incoming\n \/\/\/ connections and options set on one listener will affect the other.\n pub fn try_clone(&self) -> io::Result<UnixStream> {\n let fd = unsafe { libc::dup(self.inner.0) };\n if fd < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixStream {\n inner: Inner(fd)\n })\n }\n }\n\n \/\/\/ Returns an iterator over incoming connections.\n \/\/\/\n \/\/\/ The iterator will never return `None`.\n pub fn incoming<'a>(&'a self) -> Incoming<'a> {\n Incoming {\n listener: self\n }\n }\n}\n\nimpl AsRawFd for UnixListener {\n fn as_raw_fd(&self) -> Fd {\n self.inner.0\n }\n}\n\nimpl<'a> IntoIterator for &'a UnixListener {\n type Item = io::Result<UnixStream>;\n type IntoIter = Incoming<'a>;\n\n fn into_iter(self) -> Incoming<'a> {\n self.incoming()\n }\n}\n\n\/\/\/ An iterator over incoming connections to a `UnixListener`.\n\/\/\/\n\/\/\/ It will never return `None`.\n#[derive(Debug)]\npub struct Incoming<'a> {\n listener: &'a UnixListener,\n}\n\nimpl<'a> Iterator for Incoming<'a> {\n type Item = io::Result<UnixStream>;\n\n fn next(&mut self) -> Option<io::Result<UnixStream>> {\n Some(self.listener.accept())\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n (Int::max_value(), None)\n }\n}\n\n#[cfg(test)]\nmod test {\n extern crate temporary;\n\n use std::thread;\n use std::io;\n use std::io::prelude::*;\n\n use {UnixListener, UnixStream};\n\n macro_rules! or_panic {\n ($e:expr) => {\n match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{}\", e),\n }\n }\n }\n\n #[test]\n fn basic() {\n let dir = or_panic!(temporary::Directory::new(\"unix_socket\"));\n let socket_path = dir.path().join(\"sock\");\n let msg1 = b\"hello\";\n let msg2 = b\"world!\";\n\n let listener = or_panic!(UnixListener::bind(&socket_path));\n let thread = thread::scoped(|| {\n let mut stream = or_panic!(listener.accept());\n let mut buf = [0; 5];\n or_panic!(stream.read(&mut buf));\n assert_eq!(msg1, buf);\n or_panic!(stream.write_all(msg2));\n });\n\n let mut stream = or_panic!(UnixStream::connect(&socket_path));\n or_panic!(stream.write_all(msg1));\n let mut buf = vec![];\n or_panic!(stream.read_to_end(&mut buf));\n assert_eq!(msg2, buf);\n drop(stream);\n\n thread.join();\n }\n\n #[test]\n fn unnamed() {\n let msg1 = b\"hello\";\n let msg2 = b\"world!\";\n\n let [mut s1, mut s2] = or_panic!(UnixStream::unnamed());\n let thread = thread::scoped(move || {\n \/\/ s1 must be moved in or the test will hang!\n let mut buf = [0; 5];\n or_panic!(s1.read(&mut buf));\n assert_eq!(msg1, buf);\n or_panic!(s1.write_all(msg2));\n });\n\n or_panic!(s2.write_all(msg1));\n let mut buf = vec![];\n or_panic!(s2.read_to_end(&mut buf));\n assert_eq!(msg2, buf);\n drop(s2);\n\n thread.join();\n }\n\n #[test]\n fn try_clone() {\n let dir = or_panic!(temporary::Directory::new(\"unix_socket\"));\n let socket_path = dir.path().join(\"sock\");\n let msg1 = b\"hello\";\n let msg2 = b\"world\";\n\n let listener = or_panic!(UnixListener::bind(&socket_path));\n let thread = thread::scoped(|| {\n let mut stream = or_panic!(listener.accept());\n or_panic!(stream.write_all(msg1));\n or_panic!(stream.write_all(msg2));\n });\n\n let mut stream = or_panic!(UnixStream::connect(&socket_path));\n let mut stream2 = or_panic!(stream.try_clone());\n\n let mut buf = [0; 5];\n or_panic!(stream.read(&mut buf));\n assert_eq!(msg1, buf);\n or_panic!(stream2.read(&mut buf));\n assert_eq!(msg2, buf);\n\n thread.join();\n }\n\n #[test]\n fn iter() {\n let dir = or_panic!(temporary::Directory::new(\"unix_socket\"));\n let socket_path = dir.path().join(\"sock\");\n\n let listener = or_panic!(UnixListener::bind(&socket_path));\n let thread = thread::scoped(|| {\n for stream in listener.incoming().take(2) {\n let mut stream = or_panic!(stream);\n let mut buf = [0];\n or_panic!(stream.read(&mut buf));\n }\n });\n\n for _ in 0..2 {\n let mut stream = or_panic!(UnixStream::connect(&socket_path));\n or_panic!(stream.write_all(&[0]));\n }\n\n thread.join();\n }\n\n #[test]\n fn long_path() {\n let socket_path = \"asdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasd\\\n asdfasdfasdfadfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasd\";\n match UnixStream::connect(socket_path) {\n Err(ref e) if e.kind() == io::ErrorKind::InvalidInput => {}\n Err(e) => panic!(\"unexpected error {}\", e),\n Ok(_) => panic!(\"unexpected success\"),\n }\n\n match UnixListener::bind(socket_path) {\n Err(ref e) if e.kind() == io::ErrorKind::InvalidInput => {}\n Err(e) => panic!(\"unexpected error {}\", e),\n Ok(_) => panic!(\"unexpected success\"),\n }\n }\n}\n<commit_msg>Also display errors in debug formatter<commit_after>\/\/! Support for Unix domain socket clients and servers.\n#![feature(io, std_misc, path, core)]\n#![warn(missing_docs)]\n#![doc(html_root_url=\"https:\/\/sfackler.github.io\/rust-unix-socket\/doc\")]\n\nextern crate libc;\n\nuse std::cmp;\nuse std::ffi::{OsStr, AsOsStr};\nuse std::io;\nuse std::iter::IntoIterator;\nuse std::mem;\nuse std::num::Int;\nuse std::os::unix::{Fd, OsStrExt, AsRawFd};\nuse std::path::AsPath;\nuse libc::c_int;\nuse std::fmt;\n\nextern \"C\" {\n fn socketpair(domain: c_int, ty: c_int, proto: c_int, sv: *mut [c_int; 2]) -> c_int;\n}\n\nstruct Inner(Fd);\n\nimpl Drop for Inner {\n fn drop(&mut self) {\n unsafe {\n libc::close(self.0);\n }\n }\n}\n\nimpl Inner {\n unsafe fn new() -> io::Result<Inner> {\n let fd = libc::socket(libc::AF_UNIX, libc::SOCK_STREAM, 0);\n if fd < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(Inner(fd))\n }\n }\n\n unsafe fn new_pair() -> io::Result<[Inner; 2]> {\n let mut fds = [0, 0];\n let res = socketpair(libc::AF_UNIX, libc::SOCK_STREAM, 0, &mut fds);\n if res < 0 {\n return Err(io::Error::last_os_error());\n }\n debug_assert_eq!(res, 0);\n Ok([Inner(fds[0]), Inner(fds[1])])\n }\n\n fn fmt(&self,\n f: unsafe extern \"system\" fn(libc::c_int,\n *mut libc::sockaddr,\n *mut libc::socklen_t) -> libc::c_int,\n fmt: &mut fmt::Formatter) -> fmt::Result {\n unsafe {\n let mut addr: libc::sockaddr_un = mem::zeroed();\n let mut len = mem::size_of::<libc::sockaddr_un>() as libc::socklen_t;\n\n let ret = f(self.0, &mut addr as *mut _ as *mut _ , &mut len as *mut _);\n\n if ret == 0 {\n debug_assert_eq!(addr.sun_family, libc::AF_UNIX as libc::sa_family_t);\n\n let offset = &(*(0 as *const libc::sockaddr_un)).sun_path as *const _ as usize;\n let path_len = len as usize - offset;\n\n if path_len == 0 {\n write!(fmt, \"(unnamed)\")\n } else {\n let (path, kind) = if addr.sun_path[0] == 0 {\n (&addr.sun_path[1..path_len], \"abstract\")\n } else {\n (&addr.sun_path[..path_len - 1], \"pathname\")\n };\n\n let path: &[u8] = mem::transmute(path);\n let path = OsStr::from_bytes(path).as_path().display();\n write!(fmt, \"{:?} ({})\", path, kind)\n }\n } else {\n write!(fmt, \"<{}>\", io::Error::last_os_error())\n }\n }\n }\n}\n\nunsafe fn sockaddr_un<P: AsPath + ?Sized>(path: &P) -> io::Result<libc::sockaddr_un> {\n let mut addr: libc::sockaddr_un = mem::zeroed();\n addr.sun_family = libc::AF_UNIX as libc::sa_family_t;\n\n let bytes = path.as_path().as_os_str().as_bytes();\n if bytes.len() >= addr.sun_path.len() {\n return Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"path must be smaller than SUN_LEN\",\n None));\n }\n for (dst, src) in addr.sun_path.iter_mut().zip(bytes.iter()) {\n *dst = *src as libc::c_char;\n }\n \/\/ null byte's already there because we zeroed the struct\n\n Ok(addr)\n}\n\n\/\/\/ A stream which communicates over a Unix domain socket.\npub struct UnixStream {\n inner: Inner,\n}\n\nimpl fmt::Debug for UnixStream {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(fmt, \"UnixStream {{ fd: {}, address: \", self.inner.0));\n try!(self.inner.fmt(libc::getpeername, fmt));\n write!(fmt, \" }}\")\n }\n}\n\nimpl UnixStream {\n \/\/\/ Connect to the socket named by `path`.\n pub fn connect<P: AsPath + ?Sized>(path: &P) -> io::Result<UnixStream> {\n unsafe {\n let inner = try!(Inner::new());\n let addr = try!(sockaddr_un(path));\n\n let ret = libc::connect(inner.0,\n &addr as *const _ as *const _,\n mem::size_of::<libc::sockaddr_un>() as libc::socklen_t);\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixStream {\n inner: inner,\n })\n }\n }\n }\n\n \/\/\/ Create an unnamed pair of connected sockets.\n \/\/\/\n \/\/\/ Returns two `UnixStream`s which are connected to each other.\n pub fn unnamed() -> io::Result<[UnixStream; 2]> {\n unsafe {\n let [i1, i2] = try!(Inner::new_pair());\n Ok([UnixStream { inner: i1 }, UnixStream { inner: i2 }])\n }\n }\n\n \/\/\/ Create a new independently owned handle to the underlying socket.\n \/\/\/\n \/\/\/ The returned `UnixStream` is a reference to the same stream that this\n \/\/\/ object references. Both handles will read and write the same stream of\n \/\/\/ data, and options set on one stream will be propogated to the other\n \/\/\/ stream.\n pub fn try_clone(&self) -> io::Result<UnixStream> {\n let fd = unsafe { libc::dup(self.inner.0) };\n if fd < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixStream {\n inner: Inner(fd)\n })\n }\n }\n}\n\nfn calc_len(buf: &[u8]) -> libc::size_t {\n cmp::min(<libc::size_t as Int>::max_value() as usize, buf.len()) as libc::size_t\n}\n\nimpl io::Read for UnixStream {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n let ret = unsafe {\n libc::recv(self.inner.0, buf.as_mut_ptr() as *mut _, calc_len(buf), 0)\n };\n\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(ret as usize)\n }\n }\n}\n\nimpl io::Write for UnixStream {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n let ret = unsafe {\n libc::send(self.inner.0, buf.as_ptr() as *const _, calc_len(buf), 0)\n };\n\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(ret as usize)\n }\n }\n\n fn flush(&mut self) -> io::Result<()> {\n Ok(())\n }\n}\n\nimpl AsRawFd for UnixStream {\n fn as_raw_fd(&self) -> Fd {\n self.inner.0\n }\n}\n\n\/\/\/ A structure representing a Unix domain socket server.\npub struct UnixListener {\n inner: Inner,\n}\n\nimpl fmt::Debug for UnixListener {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(fmt, \"UnixListener {{ fd: {}, address: \", self.inner.0));\n try!(self.inner.fmt(libc::getsockname, fmt));\n write!(fmt, \" }}\")\n }\n}\n\nimpl UnixListener {\n \/\/\/ Creates a new `UnixListener` which will be bound to the specified socket.\n pub fn bind<P: AsPath + ?Sized>(path: &P) -> io::Result<UnixListener> {\n unsafe {\n let inner = try!(Inner::new());\n let addr = try!(sockaddr_un(path));\n\n let ret = libc::bind(inner.0,\n &addr as *const _ as *const _,\n mem::size_of::<libc::sockaddr_un>() as libc::socklen_t);\n if ret < 0 {\n return Err(io::Error::last_os_error());\n }\n\n let ret = libc::listen(inner.0, 128);\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixListener {\n inner: inner,\n })\n }\n }\n }\n\n \/\/\/ Accepts a new incoming connection to this listener.\n pub fn accept(&self) -> io::Result<UnixStream> {\n unsafe {\n let ret = libc::accept(self.inner.0, 0 as *mut _, 0 as *mut _);\n if ret < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixStream {\n inner: Inner(ret)\n })\n }\n }\n }\n\n \/\/\/ Create a new independently owned handle to the underlying socket.\n \/\/\/\n \/\/\/ The returned `UnixListener` is a reference to the same socket that this\n \/\/\/ object references. Both handles can be used to accept incoming\n \/\/\/ connections and options set on one listener will affect the other.\n pub fn try_clone(&self) -> io::Result<UnixStream> {\n let fd = unsafe { libc::dup(self.inner.0) };\n if fd < 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(UnixStream {\n inner: Inner(fd)\n })\n }\n }\n\n \/\/\/ Returns an iterator over incoming connections.\n \/\/\/\n \/\/\/ The iterator will never return `None`.\n pub fn incoming<'a>(&'a self) -> Incoming<'a> {\n Incoming {\n listener: self\n }\n }\n}\n\nimpl AsRawFd for UnixListener {\n fn as_raw_fd(&self) -> Fd {\n self.inner.0\n }\n}\n\nimpl<'a> IntoIterator for &'a UnixListener {\n type Item = io::Result<UnixStream>;\n type IntoIter = Incoming<'a>;\n\n fn into_iter(self) -> Incoming<'a> {\n self.incoming()\n }\n}\n\n\/\/\/ An iterator over incoming connections to a `UnixListener`.\n\/\/\/\n\/\/\/ It will never return `None`.\n#[derive(Debug)]\npub struct Incoming<'a> {\n listener: &'a UnixListener,\n}\n\nimpl<'a> Iterator for Incoming<'a> {\n type Item = io::Result<UnixStream>;\n\n fn next(&mut self) -> Option<io::Result<UnixStream>> {\n Some(self.listener.accept())\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n (Int::max_value(), None)\n }\n}\n\n#[cfg(test)]\nmod test {\n extern crate temporary;\n\n use std::thread;\n use std::io;\n use std::io::prelude::*;\n\n use {UnixListener, UnixStream};\n\n macro_rules! or_panic {\n ($e:expr) => {\n match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{}\", e),\n }\n }\n }\n\n #[test]\n fn basic() {\n let dir = or_panic!(temporary::Directory::new(\"unix_socket\"));\n let socket_path = dir.path().join(\"sock\");\n let msg1 = b\"hello\";\n let msg2 = b\"world!\";\n\n let listener = or_panic!(UnixListener::bind(&socket_path));\n let thread = thread::scoped(|| {\n let mut stream = or_panic!(listener.accept());\n let mut buf = [0; 5];\n or_panic!(stream.read(&mut buf));\n assert_eq!(msg1, buf);\n or_panic!(stream.write_all(msg2));\n });\n\n let mut stream = or_panic!(UnixStream::connect(&socket_path));\n or_panic!(stream.write_all(msg1));\n let mut buf = vec![];\n or_panic!(stream.read_to_end(&mut buf));\n assert_eq!(msg2, buf);\n drop(stream);\n\n thread.join();\n }\n\n #[test]\n fn unnamed() {\n let msg1 = b\"hello\";\n let msg2 = b\"world!\";\n\n let [mut s1, mut s2] = or_panic!(UnixStream::unnamed());\n let thread = thread::scoped(move || {\n \/\/ s1 must be moved in or the test will hang!\n let mut buf = [0; 5];\n or_panic!(s1.read(&mut buf));\n assert_eq!(msg1, buf);\n or_panic!(s1.write_all(msg2));\n });\n\n or_panic!(s2.write_all(msg1));\n let mut buf = vec![];\n or_panic!(s2.read_to_end(&mut buf));\n assert_eq!(msg2, buf);\n drop(s2);\n\n thread.join();\n }\n\n #[test]\n fn try_clone() {\n let dir = or_panic!(temporary::Directory::new(\"unix_socket\"));\n let socket_path = dir.path().join(\"sock\");\n let msg1 = b\"hello\";\n let msg2 = b\"world\";\n\n let listener = or_panic!(UnixListener::bind(&socket_path));\n let thread = thread::scoped(|| {\n let mut stream = or_panic!(listener.accept());\n or_panic!(stream.write_all(msg1));\n or_panic!(stream.write_all(msg2));\n });\n\n let mut stream = or_panic!(UnixStream::connect(&socket_path));\n let mut stream2 = or_panic!(stream.try_clone());\n\n let mut buf = [0; 5];\n or_panic!(stream.read(&mut buf));\n assert_eq!(msg1, buf);\n or_panic!(stream2.read(&mut buf));\n assert_eq!(msg2, buf);\n\n thread.join();\n }\n\n #[test]\n fn iter() {\n let dir = or_panic!(temporary::Directory::new(\"unix_socket\"));\n let socket_path = dir.path().join(\"sock\");\n\n let listener = or_panic!(UnixListener::bind(&socket_path));\n let thread = thread::scoped(|| {\n for stream in listener.incoming().take(2) {\n let mut stream = or_panic!(stream);\n let mut buf = [0];\n or_panic!(stream.read(&mut buf));\n }\n });\n\n for _ in 0..2 {\n let mut stream = or_panic!(UnixStream::connect(&socket_path));\n or_panic!(stream.write_all(&[0]));\n }\n\n thread.join();\n }\n\n #[test]\n fn long_path() {\n let socket_path = \"asdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasd\\\n asdfasdfasdfadfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasd\";\n match UnixStream::connect(socket_path) {\n Err(ref e) if e.kind() == io::ErrorKind::InvalidInput => {}\n Err(e) => panic!(\"unexpected error {}\", e),\n Ok(_) => panic!(\"unexpected success\"),\n }\n\n match UnixListener::bind(socket_path) {\n Err(ref e) if e.kind() == io::ErrorKind::InvalidInput => {}\n Err(e) => panic!(\"unexpected error {}\", e),\n Ok(_) => panic!(\"unexpected success\"),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Disallow whitespace surrounding equals sign<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> change to make all tests run<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add pool for contain iformation about current conections<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>ejonecho<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add interesting unsizing test<commit_after>\/\/ Taken from the rustc test suite; this triggers an interesting case in unsizing.\n\n#![allow(non_upper_case_globals, incomplete_features)]\n#![feature(associated_type_bounds)]\n#![feature(impl_trait_in_bindings)]\n\nuse std::ops::Add;\n\ntrait Tr1 { type As1; fn mk(&self) -> Self::As1; }\ntrait Tr2<'a> { fn tr2(self) -> &'a Self; }\n\nfn assert_copy<T: Copy>(x: T) { let _x = x; let _x = x; }\nfn assert_static<T: 'static>(_: T) {}\nfn assert_forall_tr2<T: for<'a> Tr2<'a>>(_: T) {}\n\n#[derive(Copy, Clone)]\nstruct S1;\n#[derive(Copy, Clone)]\nstruct S2;\nimpl Tr1 for S1 { type As1 = S2; fn mk(&self) -> Self::As1 { S2 } }\n\nconst cdef_et1: &dyn Tr1<As1: Copy> = &S1;\nconst sdef_et1: &dyn Tr1<As1: Copy> = &S1;\npub fn use_et1() { assert_copy(cdef_et1.mk()); assert_copy(sdef_et1.mk()); }\n\nconst cdef_et2: &(dyn Tr1<As1: 'static> + Sync) = &S1;\nstatic sdef_et2: &(dyn Tr1<As1: 'static> + Sync) = &S1;\npub fn use_et2() { assert_static(cdef_et2.mk()); assert_static(sdef_et2.mk()); }\n\nconst cdef_et3: &dyn Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>>>> = {\n struct A;\n impl Tr1 for A {\n type As1 = core::ops::Range<u8>;\n fn mk(&self) -> Self::As1 { 0..10 }\n };\n &A\n};\npub fn use_et3() {\n let _0 = cdef_et3.mk().clone();\n let mut s = 0u8;\n for _1 in _0 {\n let _2 = _1 + 1u8;\n s += _2.into();\n }\n assert_eq!(s, (0..10).map(|x| x + 1).sum());\n}\n\nconst cdef_et4: &(dyn Tr1<As1: for<'a> Tr2<'a>> + Sync) = {\n #[derive(Copy, Clone)]\n struct A;\n impl Tr1 for A {\n type As1 = A;\n fn mk(&self) -> A { A }\n }\n impl<'a> Tr2<'a> for A {\n fn tr2(self) -> &'a Self { &A }\n }\n &A\n};\nstatic sdef_et4: &(dyn Tr1<As1: for<'a> Tr2<'a>> + Sync) = cdef_et4;\npub fn use_et4() { assert_forall_tr2(cdef_et4.mk()); assert_forall_tr2(sdef_et4.mk()); }\n\nfn main() {\n let _ = use_et1();\n let _ = use_et2();\n let _ = use_et3();\n let _ = use_et4();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! A collection of traits abstracting over Listeners and Streams.\nuse std::any::{Any, AnyRefExt};\nuse std::boxed::BoxAny;\nuse std::fmt;\nuse std::intrinsics::TypeId;\nuse std::io::{IoResult, IoError, ConnectionAborted, InvalidInput, OtherIoError,\n Stream, Listener, Acceptor};\nuse std::io::net::ip::{SocketAddr, ToSocketAddr, Port};\nuse std::io::net::tcp::{TcpStream, TcpListener, TcpAcceptor};\nuse std::mem::{mod, transmute, transmute_copy};\nuse std::raw::{mod, TraitObject};\n\nuse uany::UncheckedBoxAnyDowncast;\nuse openssl::ssl::{Ssl, SslStream, SslContext, VerifyCallback};\nuse openssl::ssl::SslVerifyMode::SslVerifyPeer;\nuse openssl::ssl::SslMethod::Sslv23;\nuse openssl::ssl::error::{SslError, StreamError, OpenSslErrors, SslSessionClosed};\n\nuse self::HttpStream::{Http, Https};\n\n\/\/\/ The write-status indicating headers have not been written.\n#[allow(missing_copy_implementations)]\npub struct Fresh;\n\n\/\/\/ The write-status indicating headers have been written.\n#[allow(missing_copy_implementations)]\npub struct Streaming;\n\n\/\/\/ An abstraction to listen for connections on a certain port.\npub trait NetworkListener<S: NetworkStream, A: NetworkAcceptor<S>>: Listener<S, A> {\n \/\/\/ Bind to a socket.\n \/\/\/\n \/\/\/ Note: This does not start listening for connections. You must call\n \/\/\/ `listen()` to do that.\n fn bind<To: ToSocketAddr>(addr: To) -> IoResult<Self>;\n\n \/\/\/ Get the address this Listener ended up listening on.\n fn socket_name(&mut self) -> IoResult<SocketAddr>;\n}\n\n\/\/\/ An abstraction to receive `NetworkStream`s.\npub trait NetworkAcceptor<S: NetworkStream>: Acceptor<S> + Clone + Send {\n \/\/\/ Closes the Acceptor, so no more incoming connections will be handled.\n fn close(&mut self) -> IoResult<()>;\n}\n\n\/\/\/ An abstraction over streams that a Server can utilize.\npub trait NetworkStream: Stream + Any + StreamClone + Send {\n \/\/\/ Get the remote address of the underlying connection.\n fn peer_name(&mut self) -> IoResult<SocketAddr>;\n}\n\n#[doc(hidden)]\npub trait StreamClone {\n fn clone_box(&self) -> Box<NetworkStream + Send>;\n}\n\nimpl<T: NetworkStream + Send + Clone> StreamClone for T {\n #[inline]\n fn clone_box(&self) -> Box<NetworkStream + Send> {\n box self.clone()\n }\n}\n\n\/\/\/ A connector creates a NetworkStream.\npub trait NetworkConnector<S: NetworkStream> {\n \/\/\/ Connect to a remote address.\n fn connect(&mut self, host: &str, port: Port, scheme: &str) -> IoResult<S>;\n}\n\nimpl fmt::Show for Box<NetworkStream + Send> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt.pad(\"Box<NetworkStream>\")\n }\n}\n\nimpl Clone for Box<NetworkStream + Send> {\n #[inline]\n fn clone(&self) -> Box<NetworkStream + Send> { self.clone_box() }\n}\n\nimpl Reader for Box<NetworkStream + Send> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { (**self).read(buf) }\n}\n\nimpl Writer for Box<NetworkStream + Send> {\n #[inline]\n fn write(&mut self, msg: &[u8]) -> IoResult<()> { (**self).write(msg) }\n\n #[inline]\n fn flush(&mut self) -> IoResult<()> { (**self).flush() }\n}\n\nimpl<'a> Reader for &'a mut NetworkStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { (**self).read(buf) }\n}\n\nimpl<'a> Writer for &'a mut NetworkStream {\n #[inline]\n fn write(&mut self, msg: &[u8]) -> IoResult<()> { (**self).write(msg) }\n\n #[inline]\n fn flush(&mut self) -> IoResult<()> { (**self).flush() }\n}\n\nimpl UncheckedBoxAnyDowncast for Box<NetworkStream + Send> {\n unsafe fn downcast_unchecked<T: 'static>(self) -> Box<T> {\n let to = *mem::transmute::<&Box<NetworkStream + Send>, &raw::TraitObject>(&self);\n \/\/ Prevent double-free.\n mem::forget(self);\n mem::transmute(to.data)\n }\n}\n\nimpl<'a> AnyRefExt<'a> for &'a (NetworkStream + 'static) {\n #[inline]\n fn is<T: 'static>(self) -> bool {\n self.get_type_id() == TypeId::of::<T>()\n }\n\n #[inline]\n fn downcast_ref<T: 'static>(self) -> Option<&'a T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute_copy(&self);\n \/\/ Extract the data pointer\n Some(transmute(to.data))\n }\n } else {\n None\n }\n }\n}\n\nimpl BoxAny for Box<NetworkStream + Send> {\n fn downcast<T: 'static>(self) -> Result<Box<T>, Box<NetworkStream + Send>> {\n if self.is::<T>() {\n Ok(unsafe { self.downcast_unchecked() })\n } else {\n Err(self)\n }\n }\n}\n\n\/\/\/ A `NetworkListener` for `HttpStream`s.\npub struct HttpListener {\n inner: TcpListener\n}\n\nimpl Listener<HttpStream, HttpAcceptor> for HttpListener {\n #[inline]\n fn listen(self) -> IoResult<HttpAcceptor> {\n Ok(HttpAcceptor {\n inner: try!(self.inner.listen())\n })\n }\n}\n\nimpl NetworkListener<HttpStream, HttpAcceptor> for HttpListener {\n #[inline]\n fn bind<To: ToSocketAddr>(addr: To) -> IoResult<HttpListener> {\n Ok(HttpListener {\n inner: try!(TcpListener::bind(addr))\n })\n }\n\n #[inline]\n fn socket_name(&mut self) -> IoResult<SocketAddr> {\n self.inner.socket_name()\n }\n}\n\n\/\/\/ A `NetworkAcceptor` for `HttpStream`s.\n#[deriving(Clone)]\npub struct HttpAcceptor {\n inner: TcpAcceptor\n}\n\nimpl Acceptor<HttpStream> for HttpAcceptor {\n #[inline]\n fn accept(&mut self) -> IoResult<HttpStream> {\n Ok(Http(try!(self.inner.accept())))\n }\n}\n\nimpl NetworkAcceptor<HttpStream> for HttpAcceptor {\n #[inline]\n fn close(&mut self) -> IoResult<()> {\n self.inner.close_accept()\n }\n}\n\n\/\/\/ A wrapper around a TcpStream.\n#[deriving(Clone)]\npub enum HttpStream {\n \/\/\/ A stream over the HTTP protocol.\n Http(TcpStream),\n \/\/\/ A stream over the HTTP protocol, protected by SSL.\n Https(SslStream<TcpStream>),\n}\n\nimpl Reader for HttpStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {\n match *self {\n Http(ref mut inner) => inner.read(buf),\n Https(ref mut inner) => inner.read(buf)\n }\n }\n}\n\nimpl Writer for HttpStream {\n #[inline]\n fn write(&mut self, msg: &[u8]) -> IoResult<()> {\n match *self {\n Http(ref mut inner) => inner.write(msg),\n Https(ref mut inner) => inner.write(msg)\n }\n }\n #[inline]\n fn flush(&mut self) -> IoResult<()> {\n match *self {\n Http(ref mut inner) => inner.flush(),\n Https(ref mut inner) => inner.flush(),\n }\n }\n}\n\nimpl NetworkStream for HttpStream {\n fn peer_name(&mut self) -> IoResult<SocketAddr> {\n match *self {\n Http(ref mut inner) => inner.peer_name(),\n Https(ref mut inner) => inner.get_mut().peer_name()\n }\n }\n}\n\n\/\/\/ A connector that will produce HttpStreams.\n#[allow(missing_copy_implementations)]\npub struct HttpConnector(pub Option<VerifyCallback>);\n\nimpl NetworkConnector<HttpStream> for HttpConnector {\n fn connect(&mut self, host: &str, port: Port, scheme: &str) -> IoResult<HttpStream> {\n let addr = (host, port);\n match scheme {\n \"http\" => {\n debug!(\"http scheme\");\n Ok(Http(try!(TcpStream::connect(addr))))\n },\n \"https\" => {\n debug!(\"https scheme\");\n let stream = try!(TcpStream::connect(addr));\n let mut context = try!(SslContext::new(Sslv23).map_err(lift_ssl_error));\n self.0.as_ref().map(|cb| context.set_verify(SslVerifyPeer, Some(*cb)));\n let ssl = try!(Ssl::new(&context).map_err(lift_ssl_error));\n try!(ssl.set_hostname(host).map_err(lift_ssl_error));\n let stream = try!(SslStream::new(&context, stream).map_err(lift_ssl_error));\n Ok(Https(stream))\n },\n _ => {\n Err(IoError {\n kind: InvalidInput,\n desc: \"Invalid scheme for Http\",\n detail: None\n })\n }\n }\n }\n}\n\nfn lift_ssl_error(ssl: SslError) -> IoError {\n debug!(\"lift_ssl_error: {}\", ssl);\n match ssl {\n StreamError(err) => err,\n SslSessionClosed => IoError {\n kind: ConnectionAborted,\n desc: \"SSL Connection Closed\",\n detail: None\n },\n \/\/ Unfortunately throw this away. No way to support this\n \/\/ detail without a better Error abstraction.\n OpenSslErrors(errs) => IoError {\n kind: OtherIoError,\n desc: \"Error in OpenSSL\",\n detail: Some(format!(\"{}\", errs))\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::boxed::BoxAny;\n use uany::UncheckedBoxAnyDowncast;\n\n use mock::MockStream;\n use super::NetworkStream;\n\n #[test]\n fn test_downcast_box_stream() {\n let stream = box MockStream::new() as Box<NetworkStream + Send>;\n\n let mock = stream.downcast::<MockStream>().unwrap();\n assert_eq!(mock, box MockStream::new());\n\n }\n\n #[test]\n fn test_downcast_unchecked_box_stream() {\n let stream = box MockStream::new() as Box<NetworkStream + Send>;\n\n let mock = unsafe { stream.downcast_unchecked::<MockStream>() };\n assert_eq!(mock, box MockStream::new());\n\n }\n\n}\n<commit_msg>Update for unsafe-any changes.<commit_after>\/\/! A collection of traits abstracting over Listeners and Streams.\nuse std::any::{Any, AnyRefExt};\nuse std::boxed::BoxAny;\nuse std::fmt;\nuse std::intrinsics::TypeId;\nuse std::io::{IoResult, IoError, ConnectionAborted, InvalidInput, OtherIoError,\n Stream, Listener, Acceptor};\nuse std::io::net::ip::{SocketAddr, ToSocketAddr, Port};\nuse std::io::net::tcp::{TcpStream, TcpListener, TcpAcceptor};\nuse std::mem::{mod, transmute, transmute_copy};\nuse std::raw::{mod, TraitObject};\n\nuse uany::UncheckedBoxAnyDowncast;\nuse openssl::ssl::{Ssl, SslStream, SslContext, VerifyCallback};\nuse openssl::ssl::SslVerifyMode::SslVerifyPeer;\nuse openssl::ssl::SslMethod::Sslv23;\nuse openssl::ssl::error::{SslError, StreamError, OpenSslErrors, SslSessionClosed};\n\nuse self::HttpStream::{Http, Https};\n\n\/\/\/ The write-status indicating headers have not been written.\n#[allow(missing_copy_implementations)]\npub struct Fresh;\n\n\/\/\/ The write-status indicating headers have been written.\n#[allow(missing_copy_implementations)]\npub struct Streaming;\n\n\/\/\/ An abstraction to listen for connections on a certain port.\npub trait NetworkListener<S: NetworkStream, A: NetworkAcceptor<S>>: Listener<S, A> {\n \/\/\/ Bind to a socket.\n \/\/\/\n \/\/\/ Note: This does not start listening for connections. You must call\n \/\/\/ `listen()` to do that.\n fn bind<To: ToSocketAddr>(addr: To) -> IoResult<Self>;\n\n \/\/\/ Get the address this Listener ended up listening on.\n fn socket_name(&mut self) -> IoResult<SocketAddr>;\n}\n\n\/\/\/ An abstraction to receive `NetworkStream`s.\npub trait NetworkAcceptor<S: NetworkStream>: Acceptor<S> + Clone + Send {\n \/\/\/ Closes the Acceptor, so no more incoming connections will be handled.\n fn close(&mut self) -> IoResult<()>;\n}\n\n\/\/\/ An abstraction over streams that a Server can utilize.\npub trait NetworkStream: Stream + Any + StreamClone + Send {\n \/\/\/ Get the remote address of the underlying connection.\n fn peer_name(&mut self) -> IoResult<SocketAddr>;\n}\n\n#[doc(hidden)]\npub trait StreamClone {\n fn clone_box(&self) -> Box<NetworkStream + Send>;\n}\n\nimpl<T: NetworkStream + Send + Clone> StreamClone for T {\n #[inline]\n fn clone_box(&self) -> Box<NetworkStream + Send> {\n box self.clone()\n }\n}\n\n\/\/\/ A connector creates a NetworkStream.\npub trait NetworkConnector<S: NetworkStream> {\n \/\/\/ Connect to a remote address.\n fn connect(&mut self, host: &str, port: Port, scheme: &str) -> IoResult<S>;\n}\n\nimpl fmt::Show for Box<NetworkStream + Send> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt.pad(\"Box<NetworkStream>\")\n }\n}\n\nimpl Clone for Box<NetworkStream + Send> {\n #[inline]\n fn clone(&self) -> Box<NetworkStream + Send> { self.clone_box() }\n}\n\nimpl Reader for Box<NetworkStream + Send> {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { (**self).read(buf) }\n}\n\nimpl Writer for Box<NetworkStream + Send> {\n #[inline]\n fn write(&mut self, msg: &[u8]) -> IoResult<()> { (**self).write(msg) }\n\n #[inline]\n fn flush(&mut self) -> IoResult<()> { (**self).flush() }\n}\n\nimpl<'a> Reader for &'a mut NetworkStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { (**self).read(buf) }\n}\n\nimpl<'a> Writer for &'a mut NetworkStream {\n #[inline]\n fn write(&mut self, msg: &[u8]) -> IoResult<()> { (**self).write(msg) }\n\n #[inline]\n fn flush(&mut self) -> IoResult<()> { (**self).flush() }\n}\n\nimpl UnsafeAnyExt for NetworkStream + Send {\n unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T {\n mem::transmute(mem::transmute::<&NetworkStream + Send,\n raw::TraitObject>(self).data)\n }\n\n unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T {\n mem::transmute(mem::transmute::<&mut NetworkStream + Send,\n raw::TraitObject>(self).data)\n }\n\n unsafe fn downcast_unchecked<T: 'static>(self: Box<NetworkStream + Send>) -> Box<T> {\n mem::transmute(mem::transmute::<Box<NetworkStream + Send>,\n raw::TraitObject>(self).data)\n }\n}\n\nimpl<'a> AnyRefExt<'a> for &'a (NetworkStream + 'static) {\n #[inline]\n fn is<T: 'static>(self) -> bool {\n self.get_type_id() == TypeId::of::<T>()\n }\n\n #[inline]\n fn downcast_ref<T: 'static>(self) -> Option<&'a T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute_copy(&self);\n \/\/ Extract the data pointer\n Some(transmute(to.data))\n }\n } else {\n None\n }\n }\n}\n\nimpl BoxAny for Box<NetworkStream + Send> {\n fn downcast<T: 'static>(self) -> Result<Box<T>, Box<NetworkStream + Send>> {\n if self.is::<T>() {\n Ok(unsafe { self.downcast_unchecked() })\n } else {\n Err(self)\n }\n }\n}\n\n\/\/\/ A `NetworkListener` for `HttpStream`s.\npub struct HttpListener {\n inner: TcpListener\n}\n\nimpl Listener<HttpStream, HttpAcceptor> for HttpListener {\n #[inline]\n fn listen(self) -> IoResult<HttpAcceptor> {\n Ok(HttpAcceptor {\n inner: try!(self.inner.listen())\n })\n }\n}\n\nimpl NetworkListener<HttpStream, HttpAcceptor> for HttpListener {\n #[inline]\n fn bind<To: ToSocketAddr>(addr: To) -> IoResult<HttpListener> {\n Ok(HttpListener {\n inner: try!(TcpListener::bind(addr))\n })\n }\n\n #[inline]\n fn socket_name(&mut self) -> IoResult<SocketAddr> {\n self.inner.socket_name()\n }\n}\n\n\/\/\/ A `NetworkAcceptor` for `HttpStream`s.\n#[deriving(Clone)]\npub struct HttpAcceptor {\n inner: TcpAcceptor\n}\n\nimpl Acceptor<HttpStream> for HttpAcceptor {\n #[inline]\n fn accept(&mut self) -> IoResult<HttpStream> {\n Ok(Http(try!(self.inner.accept())))\n }\n}\n\nimpl NetworkAcceptor<HttpStream> for HttpAcceptor {\n #[inline]\n fn close(&mut self) -> IoResult<()> {\n self.inner.close_accept()\n }\n}\n\n\/\/\/ A wrapper around a TcpStream.\n#[deriving(Clone)]\npub enum HttpStream {\n \/\/\/ A stream over the HTTP protocol.\n Http(TcpStream),\n \/\/\/ A stream over the HTTP protocol, protected by SSL.\n Https(SslStream<TcpStream>),\n}\n\nimpl Reader for HttpStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {\n match *self {\n Http(ref mut inner) => inner.read(buf),\n Https(ref mut inner) => inner.read(buf)\n }\n }\n}\n\nimpl Writer for HttpStream {\n #[inline]\n fn write(&mut self, msg: &[u8]) -> IoResult<()> {\n match *self {\n Http(ref mut inner) => inner.write(msg),\n Https(ref mut inner) => inner.write(msg)\n }\n }\n #[inline]\n fn flush(&mut self) -> IoResult<()> {\n match *self {\n Http(ref mut inner) => inner.flush(),\n Https(ref mut inner) => inner.flush(),\n }\n }\n}\n\nimpl NetworkStream for HttpStream {\n fn peer_name(&mut self) -> IoResult<SocketAddr> {\n match *self {\n Http(ref mut inner) => inner.peer_name(),\n Https(ref mut inner) => inner.get_mut().peer_name()\n }\n }\n}\n\n\/\/\/ A connector that will produce HttpStreams.\n#[allow(missing_copy_implementations)]\npub struct HttpConnector(pub Option<VerifyCallback>);\n\nimpl NetworkConnector<HttpStream> for HttpConnector {\n fn connect(&mut self, host: &str, port: Port, scheme: &str) -> IoResult<HttpStream> {\n let addr = (host, port);\n match scheme {\n \"http\" => {\n debug!(\"http scheme\");\n Ok(Http(try!(TcpStream::connect(addr))))\n },\n \"https\" => {\n debug!(\"https scheme\");\n let stream = try!(TcpStream::connect(addr));\n let mut context = try!(SslContext::new(Sslv23).map_err(lift_ssl_error));\n self.0.as_ref().map(|cb| context.set_verify(SslVerifyPeer, Some(*cb)));\n let ssl = try!(Ssl::new(&context).map_err(lift_ssl_error));\n try!(ssl.set_hostname(host).map_err(lift_ssl_error));\n let stream = try!(SslStream::new(&context, stream).map_err(lift_ssl_error));\n Ok(Https(stream))\n },\n _ => {\n Err(IoError {\n kind: InvalidInput,\n desc: \"Invalid scheme for Http\",\n detail: None\n })\n }\n }\n }\n}\n\nfn lift_ssl_error(ssl: SslError) -> IoError {\n debug!(\"lift_ssl_error: {}\", ssl);\n match ssl {\n StreamError(err) => err,\n SslSessionClosed => IoError {\n kind: ConnectionAborted,\n desc: \"SSL Connection Closed\",\n detail: None\n },\n \/\/ Unfortunately throw this away. No way to support this\n \/\/ detail without a better Error abstraction.\n OpenSslErrors(errs) => IoError {\n kind: OtherIoError,\n desc: \"Error in OpenSSL\",\n detail: Some(format!(\"{}\", errs))\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::boxed::BoxAny;\n use uany::UncheckedBoxAnyDowncast;\n\n use mock::MockStream;\n use super::NetworkStream;\n\n #[test]\n fn test_downcast_box_stream() {\n let stream = box MockStream::new() as Box<NetworkStream + Send>;\n\n let mock = stream.downcast::<MockStream>().unwrap();\n assert_eq!(mock, box MockStream::new());\n\n }\n\n #[test]\n fn test_downcast_unchecked_box_stream() {\n let stream = box MockStream::new() as Box<NetworkStream + Send>;\n\n let mock = unsafe { stream.downcast_unchecked::<MockStream>() };\n assert_eq!(mock, box MockStream::new());\n\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:green_heart: Add test for fetch_rescheduled_tasks<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-msvc -- sprintf isn't a symbol in msvcrt? maybe a #define?\n\n#![feature(libc, std_misc)]\n\nextern crate libc;\n\nuse std::ffi::{CStr, CString};\nuse libc::{c_char, c_int};\n\n\nextern {\n fn sprintf(s: *mut c_char, format: *const c_char, ...) -> c_int;\n}\n\nunsafe fn check<T, F>(expected: &str, f: F) where F: FnOnce(*mut c_char) -> T {\n let mut x = [0 as c_char; 50];\n f(&mut x[0] as *mut c_char);\n assert_eq!(expected.as_bytes(), CStr::from_ptr(x.as_ptr()).to_bytes());\n}\n\npub fn main() {\n\n unsafe {\n \/\/ Call with just the named parameter\n let c = CString::new(&b\"Hello World\\n\"[..]).unwrap();\n check(\"Hello World\\n\", |s| sprintf(s, c.as_ptr()));\n\n \/\/ Call with variable number of arguments\n let c = CString::new(&b\"%d %f %c %s\\n\"[..]).unwrap();\n check(\"42 42.500000 a %d %f %c %s\\n\\n\", |s| {\n sprintf(s, c.as_ptr(), 42, 42.5f64, 'a' as c_int, c.as_ptr());\n });\n\n \/\/ Make a function pointer\n let x: unsafe extern fn(*mut c_char, *const c_char, ...) -> c_int = sprintf;\n\n \/\/ A function that takes a function pointer\n unsafe fn call(p: unsafe extern fn(*mut c_char, *const c_char, ...) -> c_int) {\n \/\/ Call with just the named parameter\n let c = CString::new(&b\"Hello World\\n\"[..]).unwrap();\n check(\"Hello World\\n\", |s| sprintf(s, c.as_ptr()));\n\n \/\/ Call with variable number of arguments\n let c = CString::new(&b\"%d %f %c %s\\n\"[..]).unwrap();\n check(\"42 42.500000 a %d %f %c %s\\n\\n\", |s| {\n sprintf(s, c.as_ptr(), 42, 42.5f64, 'a' as c_int, c.as_ptr());\n });\n }\n\n \/\/ Pass sprintf directly\n call(sprintf);\n\n \/\/ Pass sprintf indirectly\n call(x);\n }\n\n}\n<commit_msg>test: Fix missing call of function pointer<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-msvc -- sprintf isn't a symbol in msvcrt? maybe a #define?\n\n#![feature(libc, std_misc)]\n\nextern crate libc;\n\nuse std::ffi::{CStr, CString};\nuse libc::{c_char, c_int};\n\n\nextern {\n fn sprintf(s: *mut c_char, format: *const c_char, ...) -> c_int;\n}\n\nunsafe fn check<T, F>(expected: &str, f: F) where F: FnOnce(*mut c_char) -> T {\n let mut x = [0 as c_char; 50];\n f(&mut x[0] as *mut c_char);\n assert_eq!(expected.as_bytes(), CStr::from_ptr(x.as_ptr()).to_bytes());\n}\n\npub fn main() {\n\n unsafe {\n \/\/ Call with just the named parameter\n let c = CString::new(&b\"Hello World\\n\"[..]).unwrap();\n check(\"Hello World\\n\", |s| sprintf(s, c.as_ptr()));\n\n \/\/ Call with variable number of arguments\n let c = CString::new(&b\"%d %f %c %s\\n\"[..]).unwrap();\n check(\"42 42.500000 a %d %f %c %s\\n\\n\", |s| {\n sprintf(s, c.as_ptr(), 42, 42.5f64, 'a' as c_int, c.as_ptr());\n });\n\n \/\/ Make a function pointer\n let x: unsafe extern fn(*mut c_char, *const c_char, ...) -> c_int = sprintf;\n\n \/\/ A function that takes a function pointer\n unsafe fn call(fp: unsafe extern fn(*mut c_char, *const c_char, ...) -> c_int) {\n \/\/ Call with just the named parameter\n let c = CString::new(&b\"Hello World\\n\"[..]).unwrap();\n check(\"Hello World\\n\", |s| fp(s, c.as_ptr()));\n\n \/\/ Call with variable number of arguments\n let c = CString::new(&b\"%d %f %c %s\\n\"[..]).unwrap();\n check(\"42 42.500000 a %d %f %c %s\\n\\n\", |s| {\n fp(s, c.as_ptr(), 42, 42.5f64, 'a' as c_int, c.as_ptr());\n });\n }\n\n \/\/ Pass sprintf directly\n call(sprintf);\n\n \/\/ Pass sprintf indirectly\n call(x);\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\nuse common::*;\nmod crypto;\nuse crypto::{Sha512, PubKey, RsaSignature};\n\n\/\/\/ Version\n#[derive(Hash, Clone)]\npub enum Version {\n NewerThan(u64),\n OlderThan(u64),\n Between(u64, u64),\n Excatly(u64),\n Newest,\n}\n\n\/\/ TODO: Implement format\n\n#[derive(Hash, Clone)]\n\/\/\/ A package developer\npub struct Developer {\n \/\/\/ The name of the developer\n pub name: String,\n \/\/\/ The public key of the developer\n pub key: PubKey,\n}\n\n#[derive(Hash, Clone)]\n\/\/\/ An installable package for Oxide\npub struct Package {\n \/\/\/ The id of this package\n pub id: Id,\n \/\/\/ Description\n pub desc: String,\n \/\/\/ The developer of the package\n pub dev: Developer,\n \/\/\/ The developer's signature of this package's content (tarball)\n pub dev_sign: RsaSignature,\n \/\/\/ The signatures of this package\n pub sign: Vec<RsaSignature>,\n \/\/\/ The files this package will create on the computer.\n pub files: Vec<String>,\n \/\/\/ Dependencies of this package\n pub deps: Vec<Id>,\n}\n\nimpl Package {\n \/\/\/ Get content\n pub fn get_content(&self) -> Tarball {\n \n }\n\n \/\/\/ Get package from string\n pub fn from_string(s: String) -> Option<Package> {\n\n \/\/ TODO\n\n for i in resp {\n let data = i.substr(5, i.len() - 5);\n let key = i.substr(0, 4);\n\n if key == \"name\" {\n name = data;\n } else if key == \"desc\" {\n desc = data;\n } else if key == \"host\" {\n host = data;\n } else if key == \"file\" {\n files = data;\n } else if key == \"vers\" {\n version = data;\n }\n }\n\n Package {\n host: host,\n name: name,\n desc: desc,\n files: files.split(\",\".to_string()).collect::<Vec<_>>(),\n version: version,\n }\n }\n\n \/\/\/ Install package\n pub fn install(&self, col: &Collection) -> Result<u64, InstallError> {\n \/\/ Install deps\n let mut installed = 0;\n for d in self.deps {\n let pkg = d.install();\n if let Err(n) = pkg {\n installed += n;\n if n > 10000 {\n println!(\"Warning: Potential infinite recursion (cyclic dependencies)\");\n }\n } else {\n return pkg;\n }\n\n }\n\n \/\/ TODO install + add to local package list\n }\n\n \/\/\/ Check validity\n pub fn check(&self, col: &Collection) -> TrustLevel {\n let con = self.get_content();\n\n for s in self.sign {\n if s.check(con) && col.keys.contains(s) {\n return TrustLevel::TrustedPackage\n } else if !s.check(con) {\n return TrustLevel::InvalidSignature\n }\n }\n\n if !self.dev_sign.check(con) {\n TrustLevel::InvalidSignature\n } else if !col.devs.contains(self.dev_sign) {\n TrustLevel::UntrustedSignature\n } else {\n TrustLevel::TrustedDev\n }\n }\n\n}\n\n\/\/\/ Trust level\npub enum TrustLevel {\n \/\/\/ 0\n InvalidSignature,\n \/\/\/ 1\n UntrustedSignature,\n \/\/\/ 2\n TrustedDeveloper,\n \/\/\/ 3\n TrustedPackage,\n}\n\nimpl TrustLevel {\n \/\/\/ Is this package trusted?\n pub fn is_trusted(&self) -> bool {\n match self {\n &TrustLevel::TrustedDeveloper | TrustLevel::TrustedPackage => true,\n _ => false,\n }\n }\n}\n\n\/\/\/ An error\npub enum PackageError {\n InvalidSyntax,\n InvalidSignature,\n UntrustedSignature,\n UntrustedDev,\n NotFound,\n E404,\n InfiniteDeps,\n Unknown,\n}\n\n#[derive(Hash, Clone)]\n\/\/\/ An package descriptor\npub struct Id {\n pub name: String,\n pub version: Version,\n pub dist_type: DistType,\n}\n\n\/\/\/ Distribution type\npub enum DistType {\n Binary,\n Source,\n Other,\n}\n\nimpl Id {\n pub fn to_string(&self) -> String {\n format!(\"{}-{}-{}\", self.name, self.dist_type, self.version)\n }\n}\n\n\/\/\/ Database of trusted developers\n#[derive(Hash, Clone)]\npub struct DevDb {\n pub data: HashSet<Developer>,\n}\n\n\/\/\/ Database of trusted keys\n#[derive(Hash, Clone)]\npub struct KeyDb {\n pub data: HashSet<PubKey>,\n}\n\n\/\/\/ An index of packages\n#[derive(Hash, Clone)]\npub struct Index {\n \/\/\/ Where the search queries can be send to\n pub host: String,\n}\n\nimpl Index {\n \/\/\/ Get a given package\n pub fn get(&self, id: Id) -> Result<Package, PackageError> {\n let con = File::open(\"tcp:\/\/\".to_string() + self.host);\n\n con.write(\"GET \/ox\/\".to_string() + id.to_string() + \" HTTP\/1.1\".to_string());\n\n let res = Vec::new();\n con.read_to_end(&mut res);\n\n Package::from_string(String::from_utf8(&res))\n }\n}\n\n\/\/\/ A collection of indexes, trusted keys, and trusted developers (all stored on the users\n\/\/\/ computer)\n#[derive(Hash, Clone)]\npub struct Collection {\n \/\/\/ Indexes\n pub index: Vec<Index>,\n \/\/\/ The trusted devs\n pub devs: DevDb,\n \/\/\/ The trusted keys\n pub keys: KeyDb,\n \/\/\/ The installed packages\n pub installed: Vec<LocalPackage>,\n}\n\n\/\/\/ A package installed locally\npub struct LocalPackage {\n \/\/\/ Files it owns\n pub owns: Vec<String>,\n \/\/\/ Is this package installed as root (i.e. isnt just installed as a dep for another package)?\n pub root: bool,\n \/\/\/ The package\n pub package: Package,\n}\n\nimpl LocalPackage {\n pub fn uninstall(&self) -> bool {\n\n }\n}\n\nimpl Collection {\n \/\/\/ Get a given package (guaranteed to be valid)\n pub fn get(&self, id: Id) -> Result<Package, PackageError> {\n for i in self.index {\n if let Ok(p) = i.get(id) {\n if p.check().is_trusted() {\n return Ok(p);\n }\n }\n }\n None\n }\n}\n\n\n<commit_msg>More work on Oxide<commit_after>use redox::*;\nuse common::*;\nmod crypto;\nuse crypto::{Sha512, PubKey, RsaSignature};\nuse redox::time::Duration;\n\n\/\/ TODO: Implement format\n\n#[derive(Hash, Clone)]\n\/\/\/ A package developer\npub struct Developer {\n \/\/\/ The name of the developer\n pub name: String,\n \/\/\/ The public key of the developer\n pub key: PubKey,\n}\n\n#[derive(Hash, Clone)]\n\/\/\/ An installable package for Oxide\npub struct Package {\n \/\/\/ Description\n pub desc: String,\n \/\/\/ The developer of the package\n pub dev: Developer,\n \/\/\/ The developer's signature of this package's content (tarball)\n pub dev_sign: RsaSignature,\n \/\/\/ The signatures of this package\n pub sign: HashSet<RsaSignature>,\n \/\/\/ The files this package will create on the computer.\n pub files: HashSet<String>,\n \/\/\/ Dependencies of this package\n \/\/\/ Making sure the newest (compatible) version is the one used in the deps is up to the\n \/\/\/ package provider (and is thus NOT included in the signature because it's already\n \/\/\/ signed).\n pub deps: HashSet<Id>,\n}\n\nimpl Package {\n \/\/\/ Get content\n pub fn get_content(&self) -> Tarball {\n \n }\n\n \/\/\/ Get package from string\n pub fn from_string(s: String) -> Option<Package> {\n\n \/\/ TODO\n\n for i in resp {\n let data = i.substr(5, i.len() - 5);\n let key = i.substr(0, 4);\n\n if key == \"name\" {\n name = data;\n } else if key == \"desc\" {\n desc = data;\n } else if key == \"host\" {\n host = data;\n } else if key == \"file\" {\n files = data;\n } else if key == \"vers\" {\n version = data;\n }\n }\n\n Package {\n host: host,\n name: name,\n desc: desc,\n files: files.split(\",\".to_string()).collect::<Vec<_>>(),\n version: version,\n }\n }\n\n \/\/\/ Install package\n pub fn install(&self, col: &Collection) -> Result<u64, PackageError> {\n \/\/ Install deps\n let mut installed = 0;\n for d in self.deps {\n let pkg = d.install();\n if let Err(n) = pkg {\n installed += n;\n if n > 10000 {\n println!(\"Warning: Potential infinite recursion (cyclic dependencies)\");\n }\n } else {\n return pkg;\n }\n\n }\n\n \/\/ TODO install + add to local package list\n }\n\n \/\/\/ Update packages\n pub fn update(&self, col: &Collection) -> Result<u64, PackageError> {\n\n \/\/ TODO install + add to local package list\n }\n\n \/\/\/ Check validity\n pub fn check(&self, col: &Collection) -> TrustLevel {\n let con = self.get_content();\n\n for s in self.sign {\n if s.check(con) && col.keys.contains(s) {\n return TrustLevel::TrustedPackage\n } else if !s.check(con) {\n return TrustLevel::InvalidSignature\n }\n }\n\n if !self.dev_sign.check(con) {\n TrustLevel::InvalidSignature\n } else if !col.devs.contains(self.dev_sign) {\n TrustLevel::UntrustedSignature\n } else {\n TrustLevel::TrustedDev\n }\n }\n\n}\n\n\/\/\/ Trust level\npub enum TrustLevel {\n \/\/\/ 0\n InvalidSignature,\n \/\/\/ 1\n UntrustedSignature,\n \/\/\/ 2\n TrustedDeveloper,\n \/\/\/ 3\n TrustedPackage,\n}\n\nimpl TrustLevel {\n \/\/\/ Is this package trusted?\n pub fn is_trusted(&self) -> bool {\n match self {\n &TrustLevel::TrustedDeveloper | TrustLevel::TrustedPackage => true,\n _ => false,\n }\n }\n}\n\n\/\/\/ An error\npub enum PackageError {\n InvalidSyntax,\n InvalidSignature,\n UntrustedSignature,\n UntrustedDev,\n NotFound,\n E404,\n InfiniteDeps,\n Unknown,\n}\n\n#[derive(Hash, Clone)]\n\/\/\/ An package descriptor\npub struct Id {\n pub name: String,\n pub version: String,\n pub dist_type: DistType,\n}\n\n\/\/\/ Distribution type\npub enum DistType {\n Binary,\n Source,\n Other,\n}\n\nimpl Id {\n pub fn to_string(&self) -> String {\n format!(\"{}-{}-{}\", self.name, self.dist_type, self.version)\n }\n}\n\n\/\/\/ Database of trusted developers\n#[derive(Hash, Clone)]\npub struct DevDb {\n pub data: HashSet<Developer>,\n}\n\n\/\/\/ Database of trusted keys\n#[derive(Hash, Clone)]\npub struct KeyDb {\n pub data: HashSet<PubKey>,\n}\n\n\/\/\/ An index of packages\n#[derive(Hash, Clone)]\npub struct Index {\n \/\/\/ Where the search queries can be send to\n pub host: String,\n}\n\nimpl Index {\n \/\/\/ Get a given package\n pub fn get(&self, id: Id) -> Result<Package, PackageError> {\n let con = File::open(\"tcp:\/\/\".to_string() + self.host);\n\n con.write(\"GET \/ox\/\".to_string() + id.to_string() + \" HTTP\/1.1\".to_string());\n\n let res = Vec::new();\n con.read_to_end(&mut res);\n\n Package::from_string(String::from_utf8(&res))\n }\n}\n\n\/\/\/ A collection of indexes, trusted keys, and trusted developers (all stored on the users\n\/\/\/ computer)\n#[derive(Hash, Clone)]\npub struct Collection {\n \/\/\/ Indexes\n pub index: Vec<Index>,\n \/\/\/ The trusted devs\n pub devs: DevDb,\n \/\/\/ The trusted keys\n pub keys: KeyDb,\n \/\/\/ The installed packages\n pub installed: HashMap<Id, LocalPackage>,\n \/\/\/ The root packages (packages which are not just installed as dependencies to other packages)\n pub root: HashSet<Id>,\n}\n\n\/\/\/ A package installed locally\npub struct LocalPackage {\n \/\/\/ Files it owns\n pub owns: HashSet<String>,\n \/\/\/ The package\n pub package: Package,\n \/\/\/ Dependency to\n pub dep_to: HashSet<Id>,\n \/\/\/ Dependency for\n pub dep_for: HashSet<Id>,\n}\n\nimpl LocalPackage {\n pub fn uninstall(&self) -> bool {\n\n }\n}\n\nimpl Collection {\n \/\/\/ Get a given package (guaranteed to be valid)\n pub fn get(&self, id: Id) -> Result<Package, PackageError> {\n for i in self.index {\n if let Ok(p) = i.get(id) {\n if p.check().is_trusted() {\n return Ok(p);\n }\n }\n }\n None\n }\n}\n\n\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A windowing implementation using GLUT.\n\/\/\/\n\/\/\/ GLUT is a very old and bare-bones toolkit. However, it has good cross-platform support, at\n\/\/\/ least on desktops. It is designed for testing Servo without the need of a UI.\n\nuse windowing::{ApplicationMethods, CompositeCallback, LoadUrlCallback, MouseCallback};\nuse windowing::{ResizeCallback, ScrollCallback, WindowMethods, WindowMouseEvent, WindowClickEvent};\nuse windowing::{WindowMouseDownEvent, WindowMouseUpEvent, ZoomCallback};\n\nuse alert::{Alert, AlertMethods};\nuse core::cell::Cell;\nuse core::libc::c_int;\nuse geom::point::Point2D;\nuse geom::size::Size2D;\nuse gfx::compositor::{IdleRenderState, RenderState, RenderingRenderState};\nuse glut::glut::{ACTIVE_CTRL, DOUBLE, HAVE_PRECISE_MOUSE_WHEEL, WindowHeight, WindowWidth};\nuse glut::glut;\nuse glut::machack;\nuse script::compositor_interface::{FinishedLoading, Loading, PerformingLayout, ReadyState};\n\nstatic THROBBER: [char, ..8] = [ '⣾', '⣽', '⣻', '⢿', '⡿', '⣟', '⣯', '⣷' ];\n\n\/\/\/ A structure responsible for setting up and tearing down the entire windowing system.\npub struct Application;\n\nimpl ApplicationMethods for Application {\n pub fn new() -> Application {\n glut::init();\n glut::init_display_mode(DOUBLE);\n Application\n }\n}\n\n\/\/\/ The type of a window.\npub struct Window {\n glut_window: glut::Window,\n\n composite_callback: Option<CompositeCallback>,\n resize_callback: Option<ResizeCallback>,\n load_url_callback: Option<LoadUrlCallback>,\n mouse_callback: Option<MouseCallback>,\n scroll_callback: Option<ScrollCallback>,\n zoom_callback: Option<ZoomCallback>,\n\n drag_origin: Point2D<c_int>,\n\n mouse_down_button: @mut c_int,\n mouse_down_point: @mut Point2D<c_int>,\n\n ready_state: ReadyState,\n render_state: RenderState,\n throbber_frame: u8,\n}\n\nimpl WindowMethods<Application> for Window {\n \/\/\/ Creates a new window.\n pub fn new(_: &Application) -> @mut Window {\n \/\/ Create the GLUT window.\n let glut_window = glut::create_window(~\"Servo\");\n glut::reshape_window(glut_window, 800, 600);\n\n \/\/ Create our window object.\n let window = @mut Window {\n glut_window: glut_window,\n\n composite_callback: None,\n resize_callback: None,\n load_url_callback: None,\n mouse_callback: None,\n scroll_callback: None,\n zoom_callback: None,\n\n drag_origin: Point2D(0, 0),\n\n mouse_down_button: @mut 0,\n mouse_down_point: @mut Point2D(0, 0),\n\n ready_state: FinishedLoading,\n render_state: IdleRenderState,\n throbber_frame: 0,\n };\n\n \/\/ Spin the event loop every 50 ms to allow the Rust channels to be polled.\n \/\/\n \/\/ This requirement is pretty much the nail in the coffin for GLUT's usefulness.\n \/\/\n \/\/ FIXME(pcwalton): What a mess.\n let register_timer_callback: @mut @fn() = @mut ||{};\n *register_timer_callback = || {\n glut::timer_func(50, *register_timer_callback);\n window.throbber_frame = (window.throbber_frame + 1) % (THROBBER.len() as u8);\n window.update_window_title()\n };\n\n \/\/ Register event handlers.\n do glut::reshape_func(window.glut_window) |width, height| {\n match window.resize_callback {\n None => {}\n Some(callback) => callback(width as uint, height as uint),\n }\n }\n do glut::display_func {\n \/\/ FIXME(pcwalton): This will not work with multiple windows.\n match window.composite_callback {\n None => {}\n Some(callback) => callback(),\n }\n }\n do glut::keyboard_func |key, _, _| {\n window.handle_key(key)\n }\n do glut::mouse_func |button, state, x, y| {\n if button < 3 {\n window.handle_mouse(button, state, x, y);\n }\n }\n do glut::mouse_wheel_func |wheel, direction, x, y| {\n let delta = if HAVE_PRECISE_MOUSE_WHEEL {\n (direction as f32) \/ 10000.0\n } else {\n (direction as f32) * 30.0\n };\n\n match wheel {\n 1 => window.handle_scroll(Point2D(delta, 0.0)),\n 2 => window.handle_zoom(delta),\n _ => window.handle_scroll(Point2D(0.0, delta)),\n }\n }\n (*register_timer_callback)();\n\n machack::perform_scroll_wheel_hack();\n\n window\n }\n\n \/\/\/ Returns the size of the window.\n pub fn size(&self) -> Size2D<f32> {\n Size2D(glut::get(WindowWidth) as f32, glut::get(WindowHeight) as f32)\n }\n\n \/\/\/ Presents the window to the screen (perhaps by page flipping).\n pub fn present(&mut self) {\n glut::swap_buffers();\n }\n\n \/\/\/ Registers a callback to run when a composite event occurs.\n pub fn set_composite_callback(&mut self, new_composite_callback: CompositeCallback) {\n self.composite_callback = Some(new_composite_callback)\n }\n\n \/\/\/ Registers a callback to run when a resize event occurs.\n pub fn set_resize_callback(&mut self, new_resize_callback: ResizeCallback) {\n self.resize_callback = Some(new_resize_callback)\n }\n\n \/\/\/ Registers a callback to be run when a new URL is to be loaded.\n pub fn set_load_url_callback(&mut self, new_load_url_callback: LoadUrlCallback) {\n self.load_url_callback = Some(new_load_url_callback)\n }\n\n \/\/\/ Registers a callback to be run when a mouse event occurs.\n pub fn set_mouse_callback(&mut self, new_mouse_callback: MouseCallback) {\n self.mouse_callback = Some(new_mouse_callback)\n }\n\n \/\/\/ Registers a callback to be run when the user scrolls.\n pub fn set_scroll_callback(&mut self, new_scroll_callback: ScrollCallback) {\n self.scroll_callback = Some(new_scroll_callback)\n }\n\n \/\/\/ Registers a zoom to be run when the user zooms.\n pub fn set_zoom_callback(&mut self, new_zoom_callback: ZoomCallback) {\n self.zoom_callback = Some(new_zoom_callback)\n }\n\n \/\/\/ Spins the event loop.\n pub fn check_loop(@mut self) {\n glut::check_loop()\n }\n\n \/\/\/ Schedules a redisplay.\n pub fn set_needs_display(@mut self) {\n glut::post_redisplay()\n }\n\n \/\/\/ Sets the ready state.\n pub fn set_ready_state(@mut self, ready_state: ReadyState) {\n self.ready_state = ready_state;\n self.update_window_title()\n }\n\n \/\/\/ Sets the render state.\n pub fn set_render_state(@mut self, render_state: RenderState) {\n self.render_state = render_state;\n self.update_window_title()\n }\n}\n\nimpl Window {\n \/\/\/ Helper function to set the window title in accordance with the ready state.\n fn update_window_title(&self) {\n let throbber = THROBBER[self.throbber_frame];\n match self.ready_state {\n Loading => {\n glut::set_window_title(self.glut_window, fmt!(\"%c Loading — Servo\", throbber))\n }\n PerformingLayout => {\n glut::set_window_title(self.glut_window,\n fmt!(\"%c Performing Layout — Servo\", throbber))\n }\n FinishedLoading => {\n match self.render_state {\n RenderingRenderState => {\n glut::set_window_title(self.glut_window,\n fmt!(\"%c Rendering — Servo\", throbber))\n }\n IdleRenderState => glut::set_window_title(self.glut_window, \"Servo\"),\n }\n }\n }\n }\n\n \/\/\/ Helper function to handle keyboard events.\n fn handle_key(&self, key: u8) {\n debug!(\"got key: %d\", key as int);\n match key {\n 12 => self.load_url(), \/\/ Ctrl+L\n k if k == ('=' as u8) && (glut::get_modifiers() & ACTIVE_CTRL) != 0 => { \/\/ Ctrl++\n for self.zoom_callback.each |&callback| {\n callback(0.1);\n }\n }\n k if k == 31 && (glut::get_modifiers() & ACTIVE_CTRL) != 0 => { \/\/ Ctrl+-\n for self.zoom_callback.each |&callback| {\n callback(-0.1);\n }\n }\n _ => {}\n }\n }\n\n \/\/\/ Helper function to handle a click\n fn handle_mouse(&self, button: c_int, state: c_int, x: c_int, y: c_int) {\n \/\/ FIXME(tkuehn): max pixel dist should be based on pixel density\n let max_pixel_dist = 10f;\n match self.mouse_callback {\n None => {}\n Some(callback) => {\n let event: WindowMouseEvent;\n match state {\n glut::MOUSE_DOWN => {\n event = WindowMouseDownEvent(button as uint, Point2D(x as f32, y as f32));\n *self.mouse_down_point = Point2D(x, y);\n *self.mouse_down_button = button;\n }\n glut::MOUSE_UP => {\n event = WindowMouseUpEvent(button as uint, Point2D(x as f32, y as f32));\n if *self.mouse_down_button == button {\n let pixel_dist = *self.mouse_down_point - Point2D(x, y);\n let pixel_dist = ((pixel_dist.x * pixel_dist.x +\n pixel_dist.y * pixel_dist.y) as float).sqrt();\n if pixel_dist < max_pixel_dist {\n let click_event = WindowClickEvent(button as uint,\n Point2D(x as f32, y as f32));\n callback(click_event);\n }\n }\n }\n _ => fail!(\"I cannot recognize the type of mouse action that occured. :-(\")\n };\n callback(event);\n }\n }\n }\n\n \/\/\/ Helper function to handle a scroll.\n fn handle_scroll(&mut self, delta: Point2D<f32>) {\n match self.scroll_callback {\n None => {}\n Some(callback) => callback(delta),\n }\n }\n\n \/\/\/ Helper function to handle a zoom.\n fn handle_zoom(&mut self, magnification: f32) {\n match self.zoom_callback {\n None => {}\n Some(callback) => callback(magnification),\n }\n }\n\n \/\/\/ Helper function to pop up an alert box prompting the user to load a URL.\n fn load_url(&self) {\n match self.load_url_callback {\n None => error!(\"no URL callback registered, doing nothing\"),\n Some(callback) => {\n let mut alert: Alert = AlertMethods::new(\"Navigate to:\");\n alert.add_prompt();\n alert.run();\n let value = alert.prompt_value();\n if \"\" == value { \/\/ To avoid crashing on Linux.\n callback(\"http:\/\/purple.com\/\")\n } else {\n callback(value)\n }\n }\n }\n }\n}\n\n<commit_msg>Get rid of initial reshape.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A windowing implementation using GLUT.\n\/\/\/\n\/\/\/ GLUT is a very old and bare-bones toolkit. However, it has good cross-platform support, at\n\/\/\/ least on desktops. It is designed for testing Servo without the need of a UI.\n\nuse windowing::{ApplicationMethods, CompositeCallback, LoadUrlCallback, MouseCallback};\nuse windowing::{ResizeCallback, ScrollCallback, WindowMethods, WindowMouseEvent, WindowClickEvent};\nuse windowing::{WindowMouseDownEvent, WindowMouseUpEvent, ZoomCallback};\n\nuse alert::{Alert, AlertMethods};\nuse core::cell::Cell;\nuse core::libc::c_int;\nuse geom::point::Point2D;\nuse geom::size::Size2D;\nuse gfx::compositor::{IdleRenderState, RenderState, RenderingRenderState};\nuse glut::glut::{ACTIVE_CTRL, DOUBLE, HAVE_PRECISE_MOUSE_WHEEL, WindowHeight, WindowWidth};\nuse glut::glut;\nuse glut::machack;\nuse script::compositor_interface::{FinishedLoading, Loading, PerformingLayout, ReadyState};\n\nstatic THROBBER: [char, ..8] = [ '⣾', '⣽', '⣻', '⢿', '⡿', '⣟', '⣯', '⣷' ];\n\n\/\/\/ A structure responsible for setting up and tearing down the entire windowing system.\npub struct Application;\n\nimpl ApplicationMethods for Application {\n pub fn new() -> Application {\n glut::init();\n glut::init_display_mode(DOUBLE);\n Application\n }\n}\n\n\/\/\/ The type of a window.\npub struct Window {\n glut_window: glut::Window,\n\n composite_callback: Option<CompositeCallback>,\n resize_callback: Option<ResizeCallback>,\n load_url_callback: Option<LoadUrlCallback>,\n mouse_callback: Option<MouseCallback>,\n scroll_callback: Option<ScrollCallback>,\n zoom_callback: Option<ZoomCallback>,\n\n drag_origin: Point2D<c_int>,\n\n mouse_down_button: @mut c_int,\n mouse_down_point: @mut Point2D<c_int>,\n\n ready_state: ReadyState,\n render_state: RenderState,\n throbber_frame: u8,\n}\n\nimpl WindowMethods<Application> for Window {\n \/\/\/ Creates a new window.\n pub fn new(_: &Application) -> @mut Window {\n \/\/ Create the GLUT window.\n unsafe { glut::bindgen::glutInitWindowSize(800, 600); }\n let glut_window = glut::create_window(~\"Servo\");\n\n \/\/ Create our window object.\n let window = @mut Window {\n glut_window: glut_window,\n\n composite_callback: None,\n resize_callback: None,\n load_url_callback: None,\n mouse_callback: None,\n scroll_callback: None,\n zoom_callback: None,\n\n drag_origin: Point2D(0, 0),\n\n mouse_down_button: @mut 0,\n mouse_down_point: @mut Point2D(0, 0),\n\n ready_state: FinishedLoading,\n render_state: IdleRenderState,\n throbber_frame: 0,\n };\n\n \/\/ Spin the event loop every 50 ms to allow the Rust channels to be polled.\n \/\/\n \/\/ This requirement is pretty much the nail in the coffin for GLUT's usefulness.\n \/\/\n \/\/ FIXME(pcwalton): What a mess.\n let register_timer_callback: @mut @fn() = @mut ||{};\n *register_timer_callback = || {\n glut::timer_func(50, *register_timer_callback);\n window.throbber_frame = (window.throbber_frame + 1) % (THROBBER.len() as u8);\n window.update_window_title()\n };\n\n \/\/ Register event handlers.\n do glut::reshape_func(window.glut_window) |width, height| {\n match window.resize_callback {\n None => {}\n Some(callback) => callback(width as uint, height as uint),\n }\n }\n do glut::display_func {\n \/\/ FIXME(pcwalton): This will not work with multiple windows.\n match window.composite_callback {\n None => {}\n Some(callback) => callback(),\n }\n }\n do glut::keyboard_func |key, _, _| {\n window.handle_key(key)\n }\n do glut::mouse_func |button, state, x, y| {\n if button < 3 {\n window.handle_mouse(button, state, x, y);\n }\n }\n do glut::mouse_wheel_func |wheel, direction, x, y| {\n let delta = if HAVE_PRECISE_MOUSE_WHEEL {\n (direction as f32) \/ 10000.0\n } else {\n (direction as f32) * 30.0\n };\n\n match wheel {\n 1 => window.handle_scroll(Point2D(delta, 0.0)),\n 2 => window.handle_zoom(delta),\n _ => window.handle_scroll(Point2D(0.0, delta)),\n }\n }\n (*register_timer_callback)();\n\n machack::perform_scroll_wheel_hack();\n\n window\n }\n\n \/\/\/ Returns the size of the window.\n pub fn size(&self) -> Size2D<f32> {\n Size2D(glut::get(WindowWidth) as f32, glut::get(WindowHeight) as f32)\n }\n\n \/\/\/ Presents the window to the screen (perhaps by page flipping).\n pub fn present(&mut self) {\n glut::swap_buffers();\n }\n\n \/\/\/ Registers a callback to run when a composite event occurs.\n pub fn set_composite_callback(&mut self, new_composite_callback: CompositeCallback) {\n self.composite_callback = Some(new_composite_callback)\n }\n\n \/\/\/ Registers a callback to run when a resize event occurs.\n pub fn set_resize_callback(&mut self, new_resize_callback: ResizeCallback) {\n self.resize_callback = Some(new_resize_callback)\n }\n\n \/\/\/ Registers a callback to be run when a new URL is to be loaded.\n pub fn set_load_url_callback(&mut self, new_load_url_callback: LoadUrlCallback) {\n self.load_url_callback = Some(new_load_url_callback)\n }\n\n \/\/\/ Registers a callback to be run when a mouse event occurs.\n pub fn set_mouse_callback(&mut self, new_mouse_callback: MouseCallback) {\n self.mouse_callback = Some(new_mouse_callback)\n }\n\n \/\/\/ Registers a callback to be run when the user scrolls.\n pub fn set_scroll_callback(&mut self, new_scroll_callback: ScrollCallback) {\n self.scroll_callback = Some(new_scroll_callback)\n }\n\n \/\/\/ Registers a zoom to be run when the user zooms.\n pub fn set_zoom_callback(&mut self, new_zoom_callback: ZoomCallback) {\n self.zoom_callback = Some(new_zoom_callback)\n }\n\n \/\/\/ Spins the event loop.\n pub fn check_loop(@mut self) {\n glut::check_loop()\n }\n\n \/\/\/ Schedules a redisplay.\n pub fn set_needs_display(@mut self) {\n glut::post_redisplay()\n }\n\n \/\/\/ Sets the ready state.\n pub fn set_ready_state(@mut self, ready_state: ReadyState) {\n self.ready_state = ready_state;\n self.update_window_title()\n }\n\n \/\/\/ Sets the render state.\n pub fn set_render_state(@mut self, render_state: RenderState) {\n self.render_state = render_state;\n self.update_window_title()\n }\n}\n\nimpl Window {\n \/\/\/ Helper function to set the window title in accordance with the ready state.\n fn update_window_title(&self) {\n let throbber = THROBBER[self.throbber_frame];\n match self.ready_state {\n Loading => {\n glut::set_window_title(self.glut_window, fmt!(\"%c Loading — Servo\", throbber))\n }\n PerformingLayout => {\n glut::set_window_title(self.glut_window,\n fmt!(\"%c Performing Layout — Servo\", throbber))\n }\n FinishedLoading => {\n match self.render_state {\n RenderingRenderState => {\n glut::set_window_title(self.glut_window,\n fmt!(\"%c Rendering — Servo\", throbber))\n }\n IdleRenderState => glut::set_window_title(self.glut_window, \"Servo\"),\n }\n }\n }\n }\n\n \/\/\/ Helper function to handle keyboard events.\n fn handle_key(&self, key: u8) {\n debug!(\"got key: %d\", key as int);\n match key {\n 12 => self.load_url(), \/\/ Ctrl+L\n k if k == ('=' as u8) && (glut::get_modifiers() & ACTIVE_CTRL) != 0 => { \/\/ Ctrl++\n for self.zoom_callback.each |&callback| {\n callback(0.1);\n }\n }\n k if k == 31 && (glut::get_modifiers() & ACTIVE_CTRL) != 0 => { \/\/ Ctrl+-\n for self.zoom_callback.each |&callback| {\n callback(-0.1);\n }\n }\n _ => {}\n }\n }\n\n \/\/\/ Helper function to handle a click\n fn handle_mouse(&self, button: c_int, state: c_int, x: c_int, y: c_int) {\n \/\/ FIXME(tkuehn): max pixel dist should be based on pixel density\n let max_pixel_dist = 10f;\n match self.mouse_callback {\n None => {}\n Some(callback) => {\n let event: WindowMouseEvent;\n match state {\n glut::MOUSE_DOWN => {\n event = WindowMouseDownEvent(button as uint, Point2D(x as f32, y as f32));\n *self.mouse_down_point = Point2D(x, y);\n *self.mouse_down_button = button;\n }\n glut::MOUSE_UP => {\n event = WindowMouseUpEvent(button as uint, Point2D(x as f32, y as f32));\n if *self.mouse_down_button == button {\n let pixel_dist = *self.mouse_down_point - Point2D(x, y);\n let pixel_dist = ((pixel_dist.x * pixel_dist.x +\n pixel_dist.y * pixel_dist.y) as float).sqrt();\n if pixel_dist < max_pixel_dist {\n let click_event = WindowClickEvent(button as uint,\n Point2D(x as f32, y as f32));\n callback(click_event);\n }\n }\n }\n _ => fail!(\"I cannot recognize the type of mouse action that occured. :-(\")\n };\n callback(event);\n }\n }\n }\n\n \/\/\/ Helper function to handle a scroll.\n fn handle_scroll(&mut self, delta: Point2D<f32>) {\n match self.scroll_callback {\n None => {}\n Some(callback) => callback(delta),\n }\n }\n\n \/\/\/ Helper function to handle a zoom.\n fn handle_zoom(&mut self, magnification: f32) {\n match self.zoom_callback {\n None => {}\n Some(callback) => callback(magnification),\n }\n }\n\n \/\/\/ Helper function to pop up an alert box prompting the user to load a URL.\n fn load_url(&self) {\n match self.load_url_callback {\n None => error!(\"no URL callback registered, doing nothing\"),\n Some(callback) => {\n let mut alert: Alert = AlertMethods::new(\"Navigate to:\");\n alert.add_prompt();\n alert.run();\n let value = alert.prompt_value();\n if \"\" == value { \/\/ To avoid crashing on Linux.\n callback(\"http:\/\/purple.com\/\")\n } else {\n callback(value)\n }\n }\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test unknown field in remote struct<commit_after>#[macro_use]\nextern crate serde_derive;\n\nmod remote {\n pub struct S {\n pub a: u8,\n }\n}\n\n#[derive(Serialize, Deserialize)]\n#[serde(remote = \"remote::S\")]\nstruct S {\n b: u8, \/\/~^^^ ERROR: no field `b` on type `&remote::S`\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse collections::vec::Vec;\n\nuse core::intrinsics::volatile_load;\nuse core::{mem, slice};\n\nuse drivers::mmio::Mmio;\nuse drivers::pci::config::PciConfig;\n\nuse schemes::KScheme;\n\nuse super::hci::{UsbHci, UsbMsg};\nuse super::setup::Setup;\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Gtd {\n flags: u32,\n buffer: u32,\n next: u32,\n end: u32,\n}\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Ed {\n flags: u32,\n tail: u32,\n head: u32,\n next: u32,\n}\n\nconst CTRL_CBSR: u32 = 0b11;\nconst CTRL_PLE: u32 = 1 << 2;\nconst CTRL_IE: u32 = 1 << 3;\nconst CTRL_CLE: u32 = 1 << 4;\nconst CTRL_BLE: u32 = 1 << 5;\nconst CTRL_HCFS: u32 = 0b11 << 6;\nconst CTRL_IR: u32 = 1 << 8;\nconst CTRL_RWC: u32 = 1 << 9;\nconst CTRL_RWE: u32 = 1 << 10;\n\nconst CMD_STS_HCR: u32 = 1;\nconst CMD_STS_CLF: u32 = 1 << 1;\nconst CMD_STS_BLF: u32 = 1 << 2;\nconst CMD_STS_OCR: u32 = 1 << 3;\n\nconst PORT_STS_CCS: u32 = 1;\nconst PORT_STS_PES: u32 = 1 << 1;\nconst PORT_STS_PSS: u32 = 1 << 2;\nconst PORT_STS_POCI: u32 = 1 << 3;\nconst PORT_STS_PPS: u32 = 1 << 8;\nconst PORT_STS_LSDA: u32 = 1 << 9;\nconst PORT_STS_CSC: u32 = 1 << 16;\nconst PORT_STS_PESC: u32 = 1 << 17;\nconst PORT_STS_PSSC: u32 = 1 << 18;\nconst PORT_STS_OCIC: u32 = 1 << 19;\nconst PORT_STS_PRSC: u32 = 1 << 20;\n\n#[repr(packed)]\npub struct OhciRegs {\n pub revision: Mmio<u32>,\n pub control: Mmio<u32>,\n pub cmd_sts: Mmio<u32>,\n pub int_sts: Mmio<u32>,\n pub int_en: Mmio<u32>,\n pub int_dis: Mmio<u32>,\n pub hcca: Mmio<u32>,\n pub period_current: Mmio<u32>,\n pub control_head: Mmio<u32>,\n pub control_current: Mmio<u32>,\n pub bulk_head: Mmio<u32>,\n pub bulk_current: Mmio<u32>,\n pub done_head: Mmio<u32>,\n pub fm_interval: Mmio<u32>,\n pub fm_remain: Mmio<u32>,\n pub fm_num: Mmio<u32>,\n pub periodic_start: Mmio<u32>,\n pub ls_thresh: Mmio<u32>,\n pub rh_desc_a: Mmio<u32>,\n pub rh_desc_b: Mmio<u32>,\n pub rh_sts: Mmio<u32>,\n pub port_sts: [Mmio<u32>; 15],\n}\n\npub struct Ohci {\n pub regs: &'static mut OhciRegs,\n pub irq: u8,\n}\n\nimpl KScheme for Ohci {\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n \/\/ d(\"OHCI IRQ\\n\");\n }\n }\n\n fn on_poll(&mut self) {\n }\n}\n\nimpl Ohci {\n pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {\n pci.flag(4, 4, true); \/\/ Bus mastering\n\n let base = pci.read(0x10) as usize & 0xFFFFFFF0;\n let regs = &mut *(base as *mut OhciRegs);\n\n let mut module = box Ohci {\n regs: regs,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n\n module.init();\n\n return module;\n }\n\n pub unsafe fn init(&mut self) {\n debugln!(\"OHCI on: {:X}, IRQ: {:X}\", (self.regs as *mut OhciRegs) as usize, self.irq);\n\n debugln!(\"Reset: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = ctrl & (0xFFFFFFFF - CTRL_HCFS);\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Enable: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = (ctrl & (0xFFFFFFFF - CTRL_HCFS)) | 0b10 << 6;\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Port Enumeration: {:X}\", self.regs.control.read());\n\n let ndp = self.regs.rh_desc_a.read() & 0xF;\n for i in 0..ndp as usize {\n debugln!(\"Port {}: {:X}\", i, self.regs.port_sts[i].read());\n\n if self.regs.port_sts[i].readf(PORT_STS_CCS) {\n debugln!(\"Device\");\n\n debugln!(\"Enable\");\n while ! self.regs.port_sts[i].readf(PORT_STS_PES) {\n self.regs.port_sts[i].writef(PORT_STS_PES, true);\n }\n\n self.device(i as u8);\n }\n }\n }\n}\n\n\nimpl UsbHci for Ohci {\n fn msg(&mut self, address: u8, endpoint: u8, msgs: &[UsbMsg]) -> usize {\n let mut tds = Vec::new();\n for msg in msgs.iter().rev() {\n let link_ptr = match tds.last() {\n Some(td) => (td as *const Gtd) as u32,\n None => 0\n };\n\n match *msg {\n UsbMsg::Setup(setup) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b00 << 19,\n buffer: (setup as *const Setup) as u32,\n next: link_ptr,\n end: (setup as *const Setup) as u32 + mem::size_of::<Setup>() as u32\n }),\n UsbMsg::In(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::InIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::Out(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::OutIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n })\n }\n }\n\n let mut count = 0;\n\n if ! tds.is_empty() {\n let ed = box Ed {\n \/\/TODO: Remove 1 << 13, it sets it to low speed\n flags: 0x3FF << 16 | 1 << 13 | (endpoint as u32) << 7 | address as u32,\n tail: 0,\n head: (tds.last().unwrap() as *const Gtd) as u32,\n next: 0\n };\n\n \/\/TODO: Calculate actual bytes\n for td in tds.iter().rev() {\n count += (td.end - td.buffer) as usize;\n }\n\n self.regs.control_head.write((&*ed as *const Ed) as u32);\n while ! self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, true);\n }\n while ! self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, true);\n }\n\n for td in tds.iter().rev() {\n let mut spin = 1000000;\n while unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28 == 0b1111 << 28 && spin > 0 {\n spin -= 1;\n \/\/unsafe { context_switch(false) };\n }\n let condition = (unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28) >> 28;\n if condition != 0 {\n debugln!(\"Condition: {:X}\", condition);\n break;\n }\n }\n\n \/*\n while self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, false);\n }\n *\/\n while self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, false);\n }\n self.regs.control_head.write(0);\n }\n\n count\n }\n}\n<commit_msg>Remove low speed flag, increase spin count<commit_after>use alloc::boxed::Box;\n\nuse collections::vec::Vec;\n\nuse core::intrinsics::volatile_load;\nuse core::{mem, slice};\n\nuse drivers::mmio::Mmio;\nuse drivers::pci::config::PciConfig;\n\nuse schemes::KScheme;\n\nuse super::hci::{UsbHci, UsbMsg};\nuse super::setup::Setup;\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Gtd {\n flags: u32,\n buffer: u32,\n next: u32,\n end: u32,\n}\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Ed {\n flags: u32,\n tail: u32,\n head: u32,\n next: u32,\n}\n\nconst CTRL_CBSR: u32 = 0b11;\nconst CTRL_PLE: u32 = 1 << 2;\nconst CTRL_IE: u32 = 1 << 3;\nconst CTRL_CLE: u32 = 1 << 4;\nconst CTRL_BLE: u32 = 1 << 5;\nconst CTRL_HCFS: u32 = 0b11 << 6;\nconst CTRL_IR: u32 = 1 << 8;\nconst CTRL_RWC: u32 = 1 << 9;\nconst CTRL_RWE: u32 = 1 << 10;\n\nconst CMD_STS_HCR: u32 = 1;\nconst CMD_STS_CLF: u32 = 1 << 1;\nconst CMD_STS_BLF: u32 = 1 << 2;\nconst CMD_STS_OCR: u32 = 1 << 3;\n\nconst PORT_STS_CCS: u32 = 1;\nconst PORT_STS_PES: u32 = 1 << 1;\nconst PORT_STS_PSS: u32 = 1 << 2;\nconst PORT_STS_POCI: u32 = 1 << 3;\nconst PORT_STS_PPS: u32 = 1 << 8;\nconst PORT_STS_LSDA: u32 = 1 << 9;\nconst PORT_STS_CSC: u32 = 1 << 16;\nconst PORT_STS_PESC: u32 = 1 << 17;\nconst PORT_STS_PSSC: u32 = 1 << 18;\nconst PORT_STS_OCIC: u32 = 1 << 19;\nconst PORT_STS_PRSC: u32 = 1 << 20;\n\n#[repr(packed)]\npub struct OhciRegs {\n pub revision: Mmio<u32>,\n pub control: Mmio<u32>,\n pub cmd_sts: Mmio<u32>,\n pub int_sts: Mmio<u32>,\n pub int_en: Mmio<u32>,\n pub int_dis: Mmio<u32>,\n pub hcca: Mmio<u32>,\n pub period_current: Mmio<u32>,\n pub control_head: Mmio<u32>,\n pub control_current: Mmio<u32>,\n pub bulk_head: Mmio<u32>,\n pub bulk_current: Mmio<u32>,\n pub done_head: Mmio<u32>,\n pub fm_interval: Mmio<u32>,\n pub fm_remain: Mmio<u32>,\n pub fm_num: Mmio<u32>,\n pub periodic_start: Mmio<u32>,\n pub ls_thresh: Mmio<u32>,\n pub rh_desc_a: Mmio<u32>,\n pub rh_desc_b: Mmio<u32>,\n pub rh_sts: Mmio<u32>,\n pub port_sts: [Mmio<u32>; 15],\n}\n\npub struct Ohci {\n pub regs: &'static mut OhciRegs,\n pub irq: u8,\n}\n\nimpl KScheme for Ohci {\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n \/\/ d(\"OHCI IRQ\\n\");\n }\n }\n\n fn on_poll(&mut self) {\n }\n}\n\nimpl Ohci {\n pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {\n pci.flag(4, 4, true); \/\/ Bus mastering\n\n let base = pci.read(0x10) as usize & 0xFFFFFFF0;\n let regs = &mut *(base as *mut OhciRegs);\n\n let mut module = box Ohci {\n regs: regs,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n\n module.init();\n\n return module;\n }\n\n pub unsafe fn init(&mut self) {\n debugln!(\"OHCI on: {:X}, IRQ: {:X}\", (self.regs as *mut OhciRegs) as usize, self.irq);\n\n debugln!(\"Reset: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = ctrl & (0xFFFFFFFF - CTRL_HCFS);\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Enable: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = (ctrl & (0xFFFFFFFF - CTRL_HCFS)) | 0b10 << 6;\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Port Enumeration: {:X}\", self.regs.control.read());\n\n let ndp = self.regs.rh_desc_a.read() & 0xF;\n for i in 0..ndp as usize {\n debugln!(\"Port {}: {:X}\", i, self.regs.port_sts[i].read());\n\n if self.regs.port_sts[i].readf(PORT_STS_CCS) {\n debugln!(\"Device\");\n\n debugln!(\"Enable\");\n while ! self.regs.port_sts[i].readf(PORT_STS_PES) {\n self.regs.port_sts[i].writef(PORT_STS_PES, true);\n }\n\n self.device(i as u8);\n }\n }\n }\n}\n\n\nimpl UsbHci for Ohci {\n fn msg(&mut self, address: u8, endpoint: u8, msgs: &[UsbMsg]) -> usize {\n let mut tds = Vec::new();\n for msg in msgs.iter().rev() {\n let link_ptr = match tds.last() {\n Some(td) => (td as *const Gtd) as u32,\n None => 0\n };\n\n match *msg {\n UsbMsg::Setup(setup) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b00 << 19,\n buffer: (setup as *const Setup) as u32,\n next: link_ptr,\n end: (setup as *const Setup) as u32 + mem::size_of::<Setup>() as u32\n }),\n UsbMsg::In(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::InIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::Out(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::OutIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n })\n }\n }\n\n let mut count = 0;\n\n if ! tds.is_empty() {\n let ed = box Ed {\n flags: 0x3FF << 16 | (endpoint as u32) << 7 | address as u32,\n tail: 0,\n head: (tds.last().unwrap() as *const Gtd) as u32,\n next: 0\n };\n\n \/\/TODO: Calculate actual bytes\n for td in tds.iter().rev() {\n count += (td.end - td.buffer) as usize;\n }\n\n self.regs.control_head.write((&*ed as *const Ed) as u32);\n while ! self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, true);\n }\n while ! self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, true);\n }\n\n for td in tds.iter().rev() {\n let mut spin = 1000000000;\n while unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28 == 0b1111 << 28 && spin > 0 {\n spin -= 1;\n \/\/unsafe { context_switch(false) };\n }\n let condition = (unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28) >> 28;\n if condition != 0 {\n debugln!(\"Condition: {:X}\", condition);\n break;\n }\n }\n\n \/*\n while self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, false);\n }\n *\/\n while self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, false);\n }\n self.regs.control_head.write(0);\n }\n\n count\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate sdl2;\nextern crate sdl2_sys;\nextern crate libc;\n\nextern crate sdl3d;\n\nuse sdl3d::vid::*;\nuse sdl3d::start;\n\nfn main() {\n let mut rend_contx = start::bootstrap(1280, 720, \"Hello world!\");\n let (mut renderer, mut pump) = rend_contx;\n\n unsafe {sdl2_sys::mouse::SDL_SetRelativeMouseMode(1);}\n\n \/\/ Variables, some of them should be static afaik.\n let screen_w = 1280\/2;\n let screen_h = 720\/2;\n\n let mut camera_x = 0.0;\n let mut camera_y = 0.0;\n let mut camera_z = 3.0; \n\n let mut camera_x_z = 0.0;\n let mut camera_y_z = 0.0;\n let mut camera_x_y = 0.0;\n\n \/\/A vector of arrays of points. Each array is a line from first to second point.\n let mut lines = Lines::new(vec![\n [DepthPoint::new(-0.5, -0.5, 0.6), DepthPoint::new(-0.5, 0.5, 0.6)],\n [DepthPoint::new(-0.5, -0.5, 0.6), DepthPoint::new(0.0, -0.5, 0.6)],\n [DepthPoint::new(-0.5, 0.0, 0.6), DepthPoint::new(0.0, 0.0, 0.6)],\n [DepthPoint::new(0.0, 0.0, 0.6), DepthPoint::new(0.0, -0.5, 0.6)],\n ]);\n\n 'game_loop: loop {\n use sdl2::pixels::Color::RGB;\n\n std::thread::sleep(std::time::Duration::from_millis(33));\n \n for event in pump.poll_iter() {\n use sdl2::event::Event::*;\n use sdl2::keyboard::Keycode::*;\n\n match event {\n Quit {..} => {break 'game_loop;},\n KeyDown {keycode, ..} => {\n match keycode {\n Some(Up) => {\n let z = camera_z;\n camera_z = z - 0.05;\n },\n Some(Down) => {\n let z = camera_z;\n camera_z = z + 0.05;\n },\n Some(Left) => {\n let x = camera_x;\n camera_x = x + 0.05;\n },\n Some(Right) => {\n let x = camera_x;\n camera_x = x - 0.05;\n },\n Some(RCtrl) => {\n let y = camera_y;\n camera_y = y + 0.05;\n },\n Some(RShift) => {\n let y = camera_y;\n camera_y = y - 0.05;\n },\n Some(Q) => {\n camera_x_y += 0.1;\n },\n Some(E) => {\n camera_x_y -= 0.1;\n },\n Some(Escape) => {\n break 'game_loop;\n }\n _ => {println!(\"{:?}\", keycode);}\n }\n },\n MouseMotion {xrel, yrel, ..} => {\n camera_x_z = (xrel as f64)\/30.0;\n \/\/camera_y_z = (yrel as f64)\/30.0;\n }\n _ => {}\n }\n }\n\n renderer.set_draw_color(RGB(20, 40, 60));\n renderer.clear();\n renderer.set_draw_color(RGB(200, 200, 200));\n\n lines.flat(screen_w, screen_h, &mut renderer,\n camera_x, camera_y, camera_z,\n camera_x_y, camera_x_z, camera_y_z);\n\n \/\/ Reset relative mouse move back to 0 as everything was already moved\n camera_x_z = 0.0;\n camera_y_z = 0.0; \n camera_x_y = 0.0;\n \n renderer.present(); \n }\n}<commit_msg>Added an option to move camera up\/down.<commit_after>extern crate sdl2;\nextern crate sdl2_sys;\nextern crate libc;\n\nextern crate sdl3d;\n\nuse sdl3d::vid::*;\nuse sdl3d::start;\n\nfn main() {\n let mut rend_contx = start::bootstrap(1280, 720, \"Hello world!\");\n let (mut renderer, mut pump) = rend_contx;\n\n unsafe {sdl2_sys::mouse::SDL_SetRelativeMouseMode(1);}\n\n \/\/ Variables, some of them should be static afaik.\n let screen_w = 1280\/2;\n let screen_h = 720\/2;\n\n let mut camera_x = 0.0;\n let mut camera_y = 0.0;\n let mut camera_z = 3.0; \n\n let mut camera_x_z = 0.0;\n let mut camera_y_z = 0.0;\n let mut camera_x_y = 0.0;\n\n \/\/A vector of arrays of points. Each array is a line from first to second point.\n let mut lines = Lines::new(vec![\n [DepthPoint::new(-0.5, -0.5, 0.6), DepthPoint::new(-0.5, 0.5, 0.6)],\n [DepthPoint::new(-0.5, -0.5, 0.6), DepthPoint::new(0.0, -0.5, 0.6)],\n [DepthPoint::new(-0.5, 0.0, 0.6), DepthPoint::new(0.0, 0.0, 0.6)],\n [DepthPoint::new(0.0, 0.0, 0.6), DepthPoint::new(0.0, -0.5, 0.6)],\n ]);\n\n 'game_loop: loop {\n use sdl2::pixels::Color::RGB;\n\n std::thread::sleep(std::time::Duration::from_millis(33));\n \n for event in pump.poll_iter() {\n use sdl2::event::Event::*;\n use sdl2::keyboard::Keycode::*;\n\n match event {\n Quit {..} => {break 'game_loop;},\n KeyDown {keycode, ..} => {\n match keycode {\n Some(Up) => {\n let z = camera_z;\n camera_z = z - 0.05;\n },\n Some(Down) => {\n let z = camera_z;\n camera_z = z + 0.05;\n },\n Some(Left) => {\n let x = camera_x;\n camera_x = x + 0.05;\n },\n Some(Right) => {\n let x = camera_x;\n camera_x = x - 0.05;\n },\n Some(RCtrl) => {\n let y = camera_y;\n camera_y = y + 0.05;\n },\n Some(RShift) => {\n let y = camera_y;\n camera_y = y - 0.05;\n },\n Some(Q) => {\n camera_x_y += 0.1;\n },\n Some(E) => {\n camera_x_y -= 0.1;\n },\n Some(Escape) => {\n break 'game_loop;\n }\n _ => {println!(\"{:?}\", keycode);}\n }\n },\n MouseMotion {xrel, yrel, ..} => {\n camera_x_z = (xrel as f64)\/30.0;\n camera_y_z = (yrel as f64)\/30.0;\n }\n _ => {}\n }\n }\n\n renderer.set_draw_color(RGB(20, 40, 60));\n renderer.clear();\n renderer.set_draw_color(RGB(200, 200, 200));\n\n lines.flat(screen_w, screen_h, &mut renderer,\n camera_x, camera_y, camera_z,\n camera_x_y, camera_x_z, camera_y_z);\n\n \/\/ Reset relative mouse move back to 0 as everything was already moved\n camera_x_z = 0.0;\n camera_y_z = 0.0; \n camera_x_y = 0.0;\n \n renderer.present(); \n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add prototype macros for mock generation<commit_after>extern crate double;\n\ntrait Dependency: Clone {\n fn profit(&self, revenue: u32, costs: u32) -> i32;\n}\n\nmacro_rules! mock_trait {\n ( $mock_name:ident, $($method:ident($retval:ty)($($arg_type:ty),*))* ) => (\n #[derive(Debug, Clone)]\n struct $mock_name {\n $(\n $method: double::Mock<(($($arg_type),*)), $retval>\n )*\n }\n\n impl Default for $mock_name {\n fn default() -> Self {\n $mock_name {\n $(\n $method: double::Mock::default(),\n )*\n }\n }\n }\n );\n}\n\nmacro_rules! mock_method {\n ( $method:ident($retval:ty)($($arg_name:ident: $arg_type:ty),*) ) => (\n fn $method(&self, $($arg_name: $arg_type),*) -> $retval {\n self.$method.call(($($arg_name),*))\n }\n )\n}\n\nmock_trait!(MockDependency, profit(i32)(u32, u32));\n\nimpl Dependency for MockDependency {\n mock_method!(profit(i32)(revenue: u32, costs: u32));\n\n \/*fn profit(&self, revenue: u32, costs: u32) -> i32 {\n self.profit.call((revenue, costs))\n }*\/\n}\n\nfn main() {\n \/\/ Test individual return values\n let mock = MockDependency::default();\n mock.profit.return_value(42);\n mock.profit.return_value_for((0, 0), 9001);\n\n let value = mock.profit(10, 20);\n assert_eq!(42, value);\n mock.profit.has_calls_exactly_in_order(vec!((10, 20)));\n\n let value = mock.profit(0, 0);\n assert_eq!(9001, value);\n mock.profit.has_calls_exactly_in_order(vec!((10, 20), (0, 0)));\n\n \/\/ Test sequence of return values\n mock.profit.return_values(vec!(1, 2, 3));\n assert_eq!(1, mock.profit.call((1, 2)));\n assert_eq!(2, mock.profit.call((2, 4)));\n assert_eq!(3, mock.profit.call((3, 6)));\n assert_eq!(42, mock.profit.call((4, 8)));\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate discord;\n\nuse discord::Discord;\nuse discord::voice::{self, VoiceConnection};\nuse discord::model::{Event, ChannelId, ServerId};\nuse std::env;\n\nfn main() {\n\t\/\/ Log in to Discord using the email and password in the environment\n\tlet discord = Discord::new(\n\t\t&env::var(\"DISCORD_EMAIL\").expect(\"DISCORD_EMAIL\"),\n\t\t&env::var(\"DISCORD_PASSWORD\").expect(\"DISCORD_PASSWORD\")\n\t).expect(\"login failed\");\n\n\t\/\/ Establish and use a websocket connection\n\tlet (mut connection, ready) = discord.connect().expect(\"connect failed\");\n\tlet mut voice = VoiceConnection::new(ready.user.id.clone());\n\n\tconnection.voice_connect(\n\t\t&ServerId(env::var(\"DISCORD_SERVER\").expect(\"DISCORD_SERVER\")),\n\t\t&ChannelId(env::var(\"DISCORD_CHANNEL\").expect(\"DISCORD_CHANNEL\")),\n\t);\n\n\tvoice.play(\n\t\tvoice::open_ffmpeg_stream(\n\t\t\t&env::var(\"DISCORD_AUDIO\").expect(\"DISCORD_AUDIO\")\n\t\t).expect(\"File read failed\")\n\t);\n\n\tprintln!(\"Ready.\");\n\tloop {\n\t\tlet event = match connection.recv_event() {\n\t\t\tOk(event) => event,\n\t\t\tErr(err) => {\n\t\t\t\tprintln!(\"Receive error: {:?}\", err);\n\t\t\t\tbreak\n\t\t\t}\n\t\t};\n\t\tvoice.update(&event);\n\t\tmatch event {\n\t\t\tEvent::Closed(n) => {\n\t\t\t\tprintln!(\"Discord closed on us with status {}\", n);\n\t\t\t\tbreak\n\t\t\t}\n\t\t\t_ => {}\n\t\t}\n\t}\n\n\t\/\/ Log out from the API\n\tdiscord.logout().expect(\"logout failed\");\n}\n<commit_msg>Remove subpar voice example, instead see dj example<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>cpsr: refactoring + tabs2space<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add a parser benchmark<commit_after>#![feature(test)]\nextern crate test;\n\nextern crate lambda_calculus as lambda;\n\nuse test::Bencher;\nuse lambda::parser::*;\n\n#[bench]\nfn parsing_debruijn(b: &mut Bencher) {\n let blc = \"(λ11)(λλλ1(λλλλ3(λ5(3(λ2(3(λλ3(λ123)))(4(λ4(λ31(21))))))(1(2(λ12))\\\n (λ4(λ4(λ2(14)))5))))(33)2)(λ1((λ11)(λ11)))\";\n b.iter(|| { let _ = parse(&blc, DeBruijn); } );\n}\n\n#[bench]\nfn parsing_classic(b: &mut Bencher) {\n let blc = \"(λa.a a) (λa.λb.λc.c (λd.λe.λf.λg.e (λh.d (f (λi.h (g (λj.λk.i (λl.l k j)))\\\n (f (λj.g (λk.i k (j k)))))) (h (g (λi.i h)) (λi.f (λj.g (λk.j (k h))) e)))) (a a) b)\\\n (λa.a ((λb.b b) (λb.b b)))\";\n b.iter(|| { let _ = parse(&blc, Classic); } );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #127<commit_after>#[crate_type = \"rlib\"];\n\nuse std::{iter, vec};\n\npub static EXPECTED_ANSWER: &'static str = \"18407904\";\n\n\/\/ [定理]\n\/\/ a + b = c のとき、\n\/\/ GCD(a, b) = 1 => GCD(a, c) = GCD(b, c) = 1\n\/\/\n\/\/ [証明]\n\/\/ まず、GCD(a, b) = 1 => GCD(a, c) = 1 を示す。\n\/\/ GCD(a, b) のとき、GCD(a, c) = k > 1 であると仮定すると、\n\/\/ 整数 n, m を用いて a = kn, c = km と表すことができる。\n\/\/ b = c - a = km - kn = k(m, n) より、GCD(a, b) >= k となり矛盾。\n\/\/ よって、GCD(a, c) = 1 である。\n\/\/\n\/\/ 次に、GCD(a, b) = 1 => GCD(b, c) = 1 を示す。\n\/\/ GCD(a, b) = 1 のとき、GCD(b, c) = k > 1 であると仮定すると、\n\/\/ 整数 n, m を用いて b = kn, c = km と表すことができる。\n\/\/ ここで、a = c - b = km - kn = k(m - n) より、 GCD(a, b) >= k となり矛盾。\n\/\/ よって、GCD(b, c) = 1 である。\n\n#[deriving(Eq)]\nstruct Rad(uint, ~[uint]);\n\nfn create_rad_vec(n_limit: uint) -> ~[Rad] {\n let mut rad_vec = vec::from_fn(n_limit, |_| Rad(1, ~[]));\n for p in range(2, rad_vec.len()) {\n let Rad(n, _) = rad_vec[p];\n if n != 1 { continue }\n\n for np in iter::count(p, p).take_while(|&np| np < n_limit) {\n let Rad(ref mut n, ref mut facts) = rad_vec[np];\n (*n) *= p;\n facts.push(p);\n }\n }\n rad_vec\n}\n\nfn rad_has_union(&Rad(_, ref a): &Rad, &Rad(_, ref b): &Rad) -> bool {\n let mut i_a = 0;\n let mut i_b = 0;\n\n loop {\n if i_a >= a.len() || i_b >= b.len() { return false }\n match a[i_a].cmp(&b[i_b]) {\n Equal => return true,\n Less => i_a += 1,\n Greater => i_b += 1\n }\n }\n}\n\nfn abc_hits_c_sum(c_limit: uint) -> uint {\n let rad_vec = create_rad_vec(c_limit);\n let mut c_sum = 0;\n\n for c in range(3, c_limit) {\n let Rad(rad_c, _) = rad_vec[c];\n for a in range(1, (c + 1) \/ 2) {\n let b = c - a;\n let Rad(rad_a, _) = rad_vec[a];\n let Rad(rad_b, _) = rad_vec[b];\n let rad_abc = rad_a * rad_b * rad_c;\n if rad_abc >= c || (a > 1 && rad_has_union(&rad_vec[a], &rad_vec[c])) { continue; }\n c_sum += c;\n }\n }\n\n c_sum\n}\n\npub fn solve() -> ~str { abc_hits_c_sum(120000).to_str() }\n\n#[cfg(test)]\nmod test {\n use super::Rad;\n\n #[test]\n fn create_rad_vec() {\n let rad_vec = ~[\n Rad(1, ~[]), Rad(1, ~[]), Rad(2, ~[2]), Rad(3, ~[3]), Rad(2, ~[2]), Rad(5, ~[5]),\n Rad(6, ~[2, 3]), Rad(7, ~[7]), Rad(2, ~[2]), Rad(3, ~[3])];\n assert_eq!(rad_vec, super::create_rad_vec(10))\n }\n\n #[test]\n fn rad_has_union() {\n assert!(super::rad_has_union(&Rad(2, ~[2]), &Rad(2, ~[2])));\n assert!(!super::rad_has_union(&Rad(2, ~[2]), &Rad(3, ~[4])));\n\n assert!(super::rad_has_union(&Rad(3, ~[3]), &Rad(6, ~[2, 3])));\n assert!(super::rad_has_union(&Rad(15, ~[3, 5]), &Rad(10, ~[2, 5])));\n\n assert!(!super::rad_has_union(&Rad(30, ~[2, 3, 5]), &Rad(1001, ~[7, 11, 13])));\n }\n\n #[test]\n fn abc_hits_c_sum() {\n assert_eq!(12523, super::abc_hits_c_sum(1000));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #13259. Closes #13259 and #14742<commit_after>extern crate libc;\nuse libc::{c_void, LPVOID, DWORD};\nuse libc::types::os::arch::extra::LPWSTR;\n\nextern \"system\" {\n fn FormatMessageW(flags: DWORD,\n lpSrc: LPVOID,\n msgId: DWORD,\n langId: DWORD,\n buf: LPWSTR,\n nsize: DWORD,\n args: *const c_void)\n -> DWORD;\n}\n\nfn test() {\n let mut buf: [u16, ..50] = [0, ..50];\n let ret = unsafe {\n FormatMessageW(0x1000, 0 as *mut c_void, 1, 0x400,\n buf.as_mut_ptr(), buf.len() as u32, 0 as *const c_void)\n };\n \/\/ On some 32-bit Windowses (Win7-8 at least) this will fail with segmented\n \/\/ stacks taking control of pvArbitrary\n assert!(ret != 0);\n}\nfn main() {\n test()\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #360 - sru:errors-wrong-num-values, r=Vinatorul<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Forgot to add renamed header.rs<commit_after>\/\/ Non-camel case types are used for Stomp Protocol version enum variants\n#![allow(non_camel_case_types)]\n\nuse std::slice::Items;\n\n\/\/ Ideally this would be a simple typedef. However:\n\/\/ See Rust bug #11047: https:\/\/github.com\/mozilla\/rust\/issues\/11047\n\/\/ Cannot call static methods (`with_capacity`) on type aliases (`HeaderList`)\npub struct HeaderList {\n pub headers: Vec<Header>\n}\n\nimpl HeaderList {\n pub fn new() -> HeaderList {\n HeaderList::with_capacity(0)\n }\n pub fn with_capacity(capacity: uint) -> HeaderList {\n HeaderList {\n headers: Vec::with_capacity(capacity)\n }\n }\n\n pub fn push(&mut self, header: Header) {\n self.headers.push(header);\n }\n\n pub fn iter<'a>(&'a self) -> Items<'a, Header> {\n self.headers.iter()\n }\n\n}\n\npub struct Header {\n buffer : String,\n delimiter_index : uint\n}\n\nimpl Header {\n fn from_string(raw_string: &str) -> Option<Header> {\n let delimiter_index = match raw_string.find(':') {\n Some(index) => index,\n None => return None\n };\n let mut header = Header{\n buffer: raw_string.to_string(),\n delimiter_index: delimiter_index\n };\n header = Header::decode_key_value(header.get_key(), header.get_value());\n Some(header)\n }\n\n pub fn encode_string(raw_string: &str) -> Option<Header> {\n let header = Header::from_string(raw_string);\n header.map(|h| Header::encode_key_value(h.get_key(), h.get_value()))\n }\n \n pub fn decode_string(raw_string: &str) -> Option<Header> {\n let header = Header::from_string(raw_string);\n header.map(|h| Header::decode_key_value(h.get_key(), h.get_value()))\n }\n\n pub fn encode_key_value(key: &str, value: &str) -> Header {\n let raw_string = format!(\"{}:{}\", key, Header::encode_value(value));\n Header {\n buffer: raw_string,\n delimiter_index: key.len()\n }\n }\n\n pub fn decode_key_value(key: &str, value: &str) -> Header {\n let raw_string = format!(\"{}:{}\", key, Header::decode_value(value));\n Header {\n buffer: raw_string,\n delimiter_index: key.len()\n }\n }\n\n \/\/TODO: Optimize this method.\n fn decode_value(value: &str) -> String {\n value\n .replace(r\"\\c\", \":\")\n .replace(r\"\\n\", \"\\n\")\n .replace(r\"\\r\", \"\\r\")\n .replace(r\"\\\\\", \"\\\\\")\n }\n\n\/\/TODO: Optimize this method.\n fn encode_value(value: &str) -> String {\n value\n .replace(\"\\\\\", r\"\\\\\") \/\/ Order is significant\n .replace(\"\\r\", r\"\\r\")\n .replace(\"\\n\", r\"\\n\")\n .replace(\":\", r\"\\c\")\n }\n\n pub fn get_raw<'a>(&'a self) -> &'a str {\n self.buffer.as_slice()\n }\n\n pub fn get_key<'a>(&'a self) -> &'a str {\n self.buffer.as_slice().slice_to(self.delimiter_index)\n }\n\n pub fn get_value<'a>(&'a self) -> &'a str {\n self.buffer.as_slice().slice_from(self.delimiter_index+1)\n }\n\n}\n\n\/\/ Headers in the Spec\npub struct AcceptVersion(pub Vec<StompVersion>);\npub struct Ack<'a>(pub &'a str);\npub struct ContentLength(pub uint);\npub struct Custom(pub Header);\npub struct Destination<'a> (pub &'a str);\npub struct HeartBeat(pub uint, uint);\npub struct Host<'a>(pub &'a str);\npub struct Id<'a>(pub &'a str);\npub struct Login<'a>(pub &'a str);\npub struct MessageId<'a>(pub &'a str);\npub struct Passcode<'a>(pub &'a str);\npub struct Receipt<'a>(pub &'a str);\npub struct ReceiptId<'a>(pub &'a str);\npub struct Server<'a>(pub &'a str);\npub struct Session<'a> (pub &'a str);\npub struct Subscription<'a>(pub &'a str);\npub struct Transaction<'a>(pub &'a str);\npub struct Version(pub StompVersion);\n\npub enum StompVersion {\n Stomp_v1_0,\n Stomp_v1_1,\n Stomp_v1_2,\n}\n\npub trait StompHeaderSet {\n fn get_content_length(&self) -> Option<ContentLength>;\n fn get_header<'a>(&'a self, key: &str) -> Option<&'a Header>;\n fn get_accept_version<'a>(&'a self) -> Option<Vec<StompVersion>>;\n fn get_ack<'a>(&'a self) -> Option<Ack<'a>>;\n fn get_destination<'a>(&'a self) -> Option<Destination<'a>>;\n fn get_heart_beat(&self) -> Option<HeartBeat>;\n fn get_host<'a>(&'a self) -> Option<Host<'a>>;\n fn get_id<'a>(&'a self) -> Option<Id<'a>>;\n fn get_login<'a>(&'a self) -> Option<Login<'a>>;\n fn get_message_id<'a>(&'a self) -> Option<MessageId<'a>>;\n fn get_passcode<'a>(&'a self) -> Option<Passcode<'a>>;\n fn get_receipt<'a>(&'a self) -> Option<Receipt<'a>>;\n fn get_receipt_id<'a>(&'a self) -> Option<ReceiptId<'a>>;\n fn get_server<'a>(&'a self) -> Option<Server<'a>>;\n fn get_session<'a>(&'a self) -> Option<Session<'a>>;\n fn get_subscription<'a>(&'a self) -> Option<Subscription<'a>>;\n fn get_transaction<'a>(&'a self) -> Option<Transaction<'a>>;\n fn get_version(&self) -> Option<Version>;\n}\n\nimpl StompHeaderSet for HeaderList {\n \n fn get_header<'a>(&'a self, key: &str) -> Option<&'a Header>{\n self.headers.iter().find(|header| \n match **header {\n ref h if h.get_key() == key => true, \n _ => false\n }\n )\n }\n\n fn get_accept_version(&self) -> Option<Vec<StompVersion>> {\n let versions : &str = match self.get_header(\"accept-version\") {\n Some(h) => h.get_value(),\n None => return None\n };\n let versions: Vec<StompVersion> = versions.split(',').filter_map(|v| match v.trim() {\n \"1.0\" => Some(Stomp_v1_0),\n \"1.1\" => Some(Stomp_v1_1),\n \"1.2\" => Some(Stomp_v1_2),\n _ => None\n }).collect();\n Some(versions)\n }\n\n fn get_ack<'a>(&'a self) -> Option<Ack<'a>> {\n match self.get_header(\"ack\") {\n Some(h) => Some(Ack(h.get_value())),\n None => None\n }\n }\n\n fn get_destination<'a>(&'a self) -> Option<Destination<'a>> {\n match self.get_header(\"destination\") {\n Some(h) => Some(Destination(h.get_value())),\n None => return None\n }\n }\n\n fn get_heart_beat(&self) -> Option<HeartBeat> {\n let spec = match self.get_header(\"heart-beat\") {\n Some(h) => h.get_value(), \n None => return None\n };\n let spec_list: Vec<uint> = spec.split(',').filter_map(|str_val| from_str::<uint>(str_val)).collect();\n match spec_list.as_slice() {\n [x, y] => Some(HeartBeat(x, y)),\n _ => None\n }\n }\n\n fn get_host<'a>(&'a self) -> Option<Host<'a>> {\n match self.get_header(\"host\") {\n Some(h) => Some(Host(h.get_value())),\n None => None\n }\n }\n \n fn get_id<'a>(&'a self) -> Option<Id<'a>> {\n match self.get_header(\"id\") {\n Some(h) => Some(Id(h.get_value())),\n None => None\n }\n }\n\n fn get_login<'a>(&'a self) -> Option<Login<'a>> {\n match self.get_header(\"login\"){\n Some(h) => Some(Login(h.get_value())),\n None => None\n }\n }\n\n fn get_message_id<'a>(&'a self) -> Option<MessageId<'a>> {\n match self.get_header(\"message-id\"){\n Some(h) => Some(MessageId(h.get_value())),\n None => None\n }\n }\n\n fn get_passcode<'a>(&'a self) -> Option<Passcode<'a>> {\n match self.get_header(\"passcode\"){\n Some(h) => Some(Passcode(h.get_value())),\n None => None\n }\n }\n\n fn get_receipt<'a>(&'a self) -> Option<Receipt<'a>> {\n match self.get_header(\"receipt\"){\n Some(h) => Some(Receipt(h.get_value())),\n None => None\n }\n }\n\n fn get_receipt_id<'a>(&'a self) -> Option<ReceiptId<'a>> {\n match self.get_header(\"receipt-id\"){\n Some(h) => Some(ReceiptId(h.get_value())),\n None => None\n }\n }\n\n fn get_server<'a>(&'a self) -> Option<Server<'a>> {\n match self.get_header(\"server\"){\n Some(h) => Some(Server(h.get_value())),\n None => None\n }\n }\n\n fn get_session<'a>(&'a self) -> Option<Session<'a>> {\n match self.get_header(\"session\"){\n Some(h) => Some(Session(h.get_value())),\n None => None\n }\n }\n\n fn get_subscription<'a>(&'a self) -> Option<Subscription<'a>> {\n match self.get_header(\"subscription\"){\n Some(h) => Some(Subscription(h.get_value())),\n None => None\n }\n }\n\n fn get_transaction<'a>(&'a self) -> Option<Transaction<'a>> {\n match self.get_header(\"transaction\"){\n Some(h) => Some(Transaction(h.get_value())),\n None => None\n }\n }\n\n fn get_version(&self) -> Option<Version> {\n let version = match self.get_header(\"version\"){\n Some(h) => h.get_value(),\n None => return None\n };\n match (version).as_slice() {\n \"1.0\" => Some(Version(Stomp_v1_0)), \/\/ TODO: Impl FromStr for StompVersion\n \"1.1\" => Some(Version(Stomp_v1_1)),\n \"1.2\" => Some(Version(Stomp_v1_2)),\n _ => None\n }\n }\n\n fn get_content_length(&self) -> Option<ContentLength> {\n let length = match self.get_header(\"content-length\") {\n Some(h) => h.get_value(),\n None => return None\n };\n match from_str::<uint>(length) {\n Some(l) => Some(ContentLength(l)),\n None => None\n }\n }\n\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>use the position() method instead<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Window abstraction\n\nuse std::cell::RefCell;\nuse input::InputEvent;\nuse current::{ Get, Set, Usage };\n\nuse GenericEvent;\n\n\/\/\/ Whether window should close or not.\npub struct ShouldClose(pub bool);\n\n\/\/\/ Work-around trait for `Get<ShouldClose>`.\n\/\/\/ Used to support generic constraints.\npub trait GetShouldClose: Get<ShouldClose> {\n \/\/\/ Returns whether window should close.\n fn get_should_close(&self) -> ShouldClose {\n self.get()\n }\n}\n\nimpl<T: Get<ShouldClose>> GetShouldClose for T {}\n\n\/\/\/ Work-around trait for `Set<ShouldClose>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetShouldClose: Set<ShouldClose> {\n \/\/\/ Sets whether window should close.\n fn set_should_close(&mut self, val: ShouldClose);\n}\n\n\/\/\/ The size of the window.\npub struct Size(pub [u32, ..2]);\n\n\/\/\/ Work-around trait for `Get<Size>`.\n\/\/\/ Used to support generic constraints.\npub trait GetSize: Get<Size> {\n \/\/\/ Returns the size of window.\n fn get_size(&self) -> Size {\n self.get()\n }\n}\n\nimpl<T: Get<Size>> GetSize for T {}\n\n\/\/\/ Work-around trait for `Get<Size>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetSize: Set<Size> {\n \/\/\/ Sets size of window.\n fn set_size(&mut self, val: Size);\n}\n\n#[test]\nfn test_methods() {\n use current::Modifier;\n \n struct Obj;\n\n impl Get<ShouldClose> for Obj {\n fn get(&self) -> ShouldClose { ShouldClose(false) }\n }\n\n impl Modifier<Obj> for ShouldClose {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetShouldClose for Obj {\n fn set_should_close(&mut self, val: ShouldClose) {\n self.set_mut(val);\n }\n }\n\n impl Get<Size> for Obj {\n fn get(&self) -> Size { Size([0, 0]) }\n }\n\n impl Modifier<Obj> for Size {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetSize for Obj {\n fn set_size(&mut self, val: Size) {\n self.set_mut(val);\n }\n }\n\n fn foo<T: GetShouldClose \n + GetSize \n + SetShouldClose\n + SetSize>(_obj: T) {}\n\n foo(Obj);\n}\n\n\/\/\/ Implemented by windows that can swap buffers.\npub trait SwapBuffers {\n \/\/\/ Swaps the buffers.\n fn swap_buffers(&mut self);\n}\n\nimpl<'a, W: 'a + SwapBuffers> SwapBuffers for Usage<'a, W> {\n #[inline(always)]\n fn swap_buffers(&mut self) {\n self.with_unwrap(|window: &RefCell<W>| {\n window.borrow_mut().deref_mut().swap_buffers()\n })\n }\n}\n\nimpl<'a, W: 'a + SwapBuffers> SwapBuffers for &'a RefCell<W> {\n #[inline(always)]\n fn swap_buffers(&mut self) {\n self.borrow_mut().deref_mut().swap_buffers()\n }\n}\n\n\/\/\/ Implemented by windows that can pull events.\npub trait PollEvent<E: GenericEvent> {\n \/\/\/ Polls event from window.\n fn poll_event(&mut self) -> Option<E>;\n}\n\nimpl<'a, W: 'a + PollEvent<I>, I: GenericEvent> PollEvent<I> for Usage<'a, W> {\n #[inline(always)]\n fn poll_event(&mut self) -> Option<I> {\n self.with_unwrap(|window: &RefCell<W>| {\n window.borrow_mut().deref_mut().poll_event()\n })\n }\n}\n\nimpl<'a, W: 'a + PollEvent<I>, I: GenericEvent> PollEvent<I> for &'a RefCell<W> {\n #[inline(always)]\n fn poll_event(&mut self) -> Option<I> {\n self.borrow_mut().deref_mut().poll_event()\n }\n}\n\n\/\/\/ Settings for window behavior.\npub struct WindowSettings {\n \/\/\/ Title of the window.\n pub title: String,\n \/\/\/ The size of the window.\n pub size: [u32, ..2],\n \/\/\/ Number samples per pixel (anti-aliasing).\n pub samples: u8,\n \/\/\/ If true, the window is fullscreen.\n pub fullscreen: bool,\n \/\/\/ If true, exit when pressing Esc.\n pub exit_on_esc: bool,\n}\n\nimpl WindowSettings {\n \/\/\/ Gets default settings.\n \/\/\/\n \/\/\/ This exits the window when pressing `Esc`.\n \/\/\/ The background color is set to black.\n pub fn default() -> WindowSettings {\n WindowSettings {\n title: \"Piston\".to_string(),\n size: [640, 480],\n samples: 0,\n fullscreen: false,\n exit_on_esc: true,\n }\n }\n}\n\n\/\/\/ Implemented by window back-end.\npub trait Window<E: GenericEvent = InputEvent>:\n SwapBuffers\n + PollEvent<E>\n + GetShouldClose\n + GetSize {\n \/\/\/ Get the window's settings.\n fn get_settings<'a>(&'a self) -> &'a WindowSettings;\n\n \/\/\/ Inform the window that it should close.\n fn close(&mut self);\n\n \/\/\/ Get the size in drawing coordinates.\n fn get_draw_size(&self) -> (u32, u32);\n\n \/\/\/ When the cursor is captured,\n \/\/\/ it is hidden and the cursor position does not change.\n \/\/\/ Only relative mouse motion is registered.\n fn capture_cursor(&mut self, _enabled: bool);\n}\n\n\/\/\/ An implementation of Window that runs without a window at all.\npub struct NoWindow {\n settings: WindowSettings,\n should_close: bool\n}\n\nimpl NoWindow {\n \/\/\/ Returns a new `NoWindow`.\n pub fn new(settings: WindowSettings) -> NoWindow {\n NoWindow {\n settings: settings,\n should_close: false\n }\n }\n}\n\nimpl SwapBuffers for NoWindow {\n fn swap_buffers(&mut self) {}\n}\n\nimpl PollEvent<InputEvent> for NoWindow {\n fn poll_event(&mut self) -> Option<InputEvent> { None }\n}\n\nimpl Get<ShouldClose> for NoWindow {\n fn get(&self) -> ShouldClose {\n ShouldClose(self.should_close)\n }\n}\n\nimpl Get<Size> for NoWindow {\n fn get(&self) -> Size {\n Size([0, 0])\n }\n}\n\nimpl Window<InputEvent> for NoWindow {\n fn get_settings<'a>(&'a self) -> &'a WindowSettings {\n &self.settings\n }\n\n fn close(&mut self) {\n self.should_close = true\n }\n\n fn get_draw_size(&self) -> (u32, u32) {\n let Size([w, h]) = self.get_size();\n (w, h)\n }\n\n fn capture_cursor(&mut self, _enabled: bool) {}\n}\n<commit_msg>Added property types to window module<commit_after>\/\/! Window abstraction\n\nuse std::cell::RefCell;\nuse input::InputEvent;\nuse current::{ Get, Set, Usage };\n\nuse GenericEvent;\n\n\/\/\/ Whether window should close or not.\npub struct ShouldClose(pub bool);\n\n\/\/\/ Work-around trait for `Get<ShouldClose>`.\n\/\/\/ Used to support generic constraints.\npub trait GetShouldClose: Get<ShouldClose> {\n \/\/\/ Returns whether window should close.\n fn get_should_close(&self) -> ShouldClose {\n self.get()\n }\n}\n\nimpl<T: Get<ShouldClose>> GetShouldClose for T {}\n\n\/\/\/ Work-around trait for `Set<ShouldClose>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetShouldClose: Set<ShouldClose> {\n \/\/\/ Sets whether window should close.\n fn set_should_close(&mut self, val: ShouldClose);\n}\n\n\/\/\/ The size of the window.\npub struct Size(pub [u32, ..2]);\n\n\/\/\/ Work-around trait for `Get<Size>`.\n\/\/\/ Used to support generic constraints.\npub trait GetSize: Get<Size> {\n \/\/\/ Returns the size of window.\n fn get_size(&self) -> Size {\n self.get()\n }\n}\n\nimpl<T: Get<Size>> GetSize for T {}\n\n\/\/\/ Work-around trait for `Set<Size>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetSize: Set<Size> {\n \/\/\/ Sets size of window.\n fn set_size(&mut self, val: Size);\n}\n\n\/\/\/ The title of the window.\npub struct Title(pub String);\n\n\/\/\/ Work-around trait for `Get<Title>`.\n\/\/\/ Used to support generic constraints.\npub trait GetTitle: Get<Title> {\n \/\/\/ Returns the title of the window.\n fn get_title(&self) -> Title {\n self.get()\n }\n}\n\nimpl<T: Get<Title>> GetTitle for T {}\n\n\/\/\/ Work-around trait for `Set<Title>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetTitle: Set<Title> {\n \/\/\/ Sets title of window.\n fn set_title(&mut self, val: Title);\n}\n\n\/\/\/ The anti-aliasing samples when rendering.\npub struct Samples(pub u8);\n\n\/\/\/ Work-around trait for `Get<Samples>`.\n\/\/\/ Used to support generic constraints.\npub trait GetSamples: Get<Samples> {\n \/\/\/ Returns the antialiasing samples when rendering.\n fn get_samples(&self) -> Samples {\n self.get()\n }\n}\n\nimpl<T: Get<Samples>> GetSamples for T {}\n\n\/\/\/ Work-around trait for `Set<Samples>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetSamples: Set<Samples> {\n \/\/\/ Sets antialiasing samples of window.\n fn set_samples(&mut self, val: Samples);\n}\n\n\/\/\/ Whether window is opened in full screen mode.\npub struct Fullscreen(pub bool);\n\n\/\/\/ Work-around trait for `Get<Fullscreen>`.\n\/\/\/ Used to support generic constraints.\npub trait GetFullscreen: Get<Fullscreen> {\n \/\/\/ Returns whether window is in full screen mode.\n fn get_fullscreen(&self) -> Fullscreen {\n self.get()\n }\n}\n\nimpl<T: Get<Fullscreen>> GetFullscreen for T {}\n\n\/\/\/ Work-around trait for `Set<Fullscreen>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetFullscreen: Set<Fullscreen> {\n \/\/\/ Sets window to fullscreen mode.\n fn set_fullscreen(&mut self, val: Fullscreen);\n}\n\n\/\/\/ Whether to exit when pressing the Esc keyboard button.\npub struct ExitOnEsc(pub bool);\n\n\/\/\/ Work-around trait for `Get<ExitOnEsc>`.\n\/\/\/ Used to support generic constraints.\npub trait GetExitOnEsc: Get<ExitOnEsc> {\n \/\/\/ Returns whether window exits when pressing Esc.\n fn get_exit_on_esc(&self) -> ExitOnEsc {\n self.get()\n }\n}\n\nimpl<T: Get<ExitOnEsc>> GetExitOnEsc for T {}\n\n\/\/\/ Work-around trait for `Set<ExitOnEsc>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetExitOnEsc: Set<ExitOnEsc> {\n \/\/\/ Sets exit when pressing Esc.\n fn set_exit_on_esc(&mut self, val: ExitOnEsc);\n}\n\n\/\/\/ Whether to capture the mouse cursor.\npub struct CaptureCursor(pub bool);\n\n\/\/\/ Work-around trait for `Get<CaptureCursor>`.\n\/\/\/ Used to support generic constraints.\npub trait GetCaptureCursor: Get<CaptureCursor> {\n \/\/\/ Returns whether window captures cursor.\n fn get_capture_cursor(&self) -> CaptureCursor {\n self.get()\n }\n}\n\nimpl<T: Get<CaptureCursor>> GetCaptureCursor for T {}\n\n\/\/\/ Work-around trait for `Set<CaptureCursor>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetCaptureCursor: Set<CaptureCursor> {\n \/\/\/ Sets capture cursor.\n fn set_capture_cursor(&mut self, val: CaptureCursor);\n}\n\n\/\/\/ The draw size of the window.\npub struct DrawSize(pub [u32, ..2]);\n\n\/\/\/ Work-around trait for `Get<DrawSize>`.\n\/\/\/ Used to support generic constraints.\npub trait GetDrawSize: Get<DrawSize> {\n \/\/\/ Returns the draw size of window.\n fn get_draw_size(&self) -> DrawSize {\n self.get()\n }\n}\n\nimpl<T: Get<DrawSize>> GetDrawSize for T {}\n\n\/\/\/ Work-around trait for `Set<DrawSize>`.\n\/\/\/ Used to support generic constraints.\n\/\/\/ This must be implemented for every `Modifier` impl.\npub trait SetDrawSize: Set<DrawSize> {\n \/\/\/ Sets draw size.\n fn set_draw_size(&mut self, val: DrawSize);\n}\n\n#[test]\nfn test_methods() {\n use current::Modifier;\n \n struct Obj;\n\n impl Get<ShouldClose> for Obj {\n fn get(&self) -> ShouldClose { ShouldClose(false) }\n }\n\n impl Modifier<Obj> for ShouldClose {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetShouldClose for Obj {\n fn set_should_close(&mut self, val: ShouldClose) {\n self.set_mut(val);\n }\n }\n\n impl Get<Size> for Obj {\n fn get(&self) -> Size { Size([0, 0]) }\n }\n\n impl Modifier<Obj> for Size {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetSize for Obj {\n fn set_size(&mut self, val: Size) {\n self.set_mut(val);\n }\n }\n\n impl Get<Title> for Obj {\n fn get(&self) -> Title { Title(\"hello\".to_string()) }\n }\n\n impl Modifier<Obj> for Title {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetTitle for Obj {\n fn set_title(&mut self, val: Title) {\n self.set_mut(val);\n }\n }\n\n impl Get<Samples> for Obj {\n fn get(&self) -> Samples { Samples(0) }\n }\n\n impl Modifier<Obj> for Samples {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetSamples for Obj {\n fn set_samples(&mut self, val: Samples) {\n self.set_mut(val);\n }\n }\n\n impl Get<Fullscreen> for Obj {\n fn get(&self) -> Fullscreen { Fullscreen(false) }\n }\n\n impl Modifier<Obj> for Fullscreen {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetFullscreen for Obj {\n fn set_fullscreen(&mut self, val: Fullscreen) {\n self.set_mut(val);\n }\n }\n\n impl Get<ExitOnEsc> for Obj {\n fn get(&self) -> ExitOnEsc { ExitOnEsc(true) }\n }\n\n impl Modifier<Obj> for ExitOnEsc {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetExitOnEsc for Obj {\n fn set_exit_on_esc(&mut self, val: ExitOnEsc) {\n self.set_mut(val);\n }\n }\n\n impl Get<CaptureCursor> for Obj {\n fn get(&self) -> CaptureCursor { CaptureCursor(false) }\n }\n\n impl Modifier<Obj> for CaptureCursor {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetCaptureCursor for Obj {\n fn set_capture_cursor(&mut self, val: CaptureCursor) {\n self.set_mut(val);\n }\n }\n\n impl Get<DrawSize> for Obj {\n fn get(&self) -> DrawSize { DrawSize([0, 0]) }\n }\n\n impl Modifier<Obj> for DrawSize {\n fn modify(self, _obj: &mut Obj) {}\n }\n\n impl SetDrawSize for Obj {\n fn set_draw_size(&mut self, val: DrawSize) {\n self.set_mut(val);\n }\n }\n\n fn foo<T: GetShouldClose + SetShouldClose\n + GetSize + SetSize\n + GetTitle + SetTitle\n + GetSamples + SetSamples\n + GetFullscreen + SetFullscreen\n + GetExitOnEsc + SetExitOnEsc\n + GetCaptureCursor + SetCaptureCursor\n + GetDrawSize + SetDrawSize>(_obj: T) {}\n\n foo(Obj);\n}\n\n\/\/\/ Implemented by windows that can swap buffers.\npub trait SwapBuffers {\n \/\/\/ Swaps the buffers.\n fn swap_buffers(&mut self);\n}\n\nimpl<'a, W: 'a + SwapBuffers> SwapBuffers for Usage<'a, W> {\n #[inline(always)]\n fn swap_buffers(&mut self) {\n self.with_unwrap(|window: &RefCell<W>| {\n window.borrow_mut().deref_mut().swap_buffers()\n })\n }\n}\n\nimpl<'a, W: 'a + SwapBuffers> SwapBuffers for &'a RefCell<W> {\n #[inline(always)]\n fn swap_buffers(&mut self) {\n self.borrow_mut().deref_mut().swap_buffers()\n }\n}\n\n\/\/\/ Implemented by windows that can pull events.\npub trait PollEvent<E: GenericEvent> {\n \/\/\/ Polls event from window.\n fn poll_event(&mut self) -> Option<E>;\n}\n\nimpl<'a, W: 'a + PollEvent<I>, I: GenericEvent> PollEvent<I> for Usage<'a, W> {\n #[inline(always)]\n fn poll_event(&mut self) -> Option<I> {\n self.with_unwrap(|window: &RefCell<W>| {\n window.borrow_mut().deref_mut().poll_event()\n })\n }\n}\n\nimpl<'a, W: 'a + PollEvent<I>, I: GenericEvent> PollEvent<I> for &'a RefCell<W> {\n #[inline(always)]\n fn poll_event(&mut self) -> Option<I> {\n self.borrow_mut().deref_mut().poll_event()\n }\n}\n\n\/\/\/ Settings for window behavior.\npub struct WindowSettings {\n \/\/\/ Title of the window.\n pub title: String,\n \/\/\/ The size of the window.\n pub size: [u32, ..2],\n \/\/\/ Number samples per pixel (anti-aliasing).\n pub samples: u8,\n \/\/\/ If true, the window is fullscreen.\n pub fullscreen: bool,\n \/\/\/ If true, exit when pressing Esc.\n pub exit_on_esc: bool,\n}\n\nimpl WindowSettings {\n \/\/\/ Gets default settings.\n \/\/\/\n \/\/\/ This exits the window when pressing `Esc`.\n \/\/\/ The background color is set to black.\n pub fn default() -> WindowSettings {\n WindowSettings {\n title: \"Piston\".to_string(),\n size: [640, 480],\n samples: 0,\n fullscreen: false,\n exit_on_esc: true,\n }\n }\n}\n\n\/\/\/ Implemented by window back-end.\npub trait Window<E: GenericEvent = InputEvent>:\n SwapBuffers\n + PollEvent<E>\n + GetShouldClose\n + GetSize {\n \/\/\/ Get the window's settings.\n fn get_settings<'a>(&'a self) -> &'a WindowSettings;\n\n \/\/\/ Inform the window that it should close.\n fn close(&mut self);\n\n \/\/\/ Get the size in drawing coordinates.\n fn get_draw_size(&self) -> (u32, u32);\n\n \/\/\/ When the cursor is captured,\n \/\/\/ it is hidden and the cursor position does not change.\n \/\/\/ Only relative mouse motion is registered.\n fn capture_cursor(&mut self, _enabled: bool);\n}\n\n\/\/\/ An implementation of Window that runs without a window at all.\npub struct NoWindow {\n settings: WindowSettings,\n should_close: bool\n}\n\nimpl NoWindow {\n \/\/\/ Returns a new `NoWindow`.\n pub fn new(settings: WindowSettings) -> NoWindow {\n NoWindow {\n settings: settings,\n should_close: false\n }\n }\n}\n\nimpl SwapBuffers for NoWindow {\n fn swap_buffers(&mut self) {}\n}\n\nimpl PollEvent<InputEvent> for NoWindow {\n fn poll_event(&mut self) -> Option<InputEvent> { None }\n}\n\nimpl Get<ShouldClose> for NoWindow {\n fn get(&self) -> ShouldClose {\n ShouldClose(self.should_close)\n }\n}\n\nimpl Get<Size> for NoWindow {\n fn get(&self) -> Size {\n Size([0, 0])\n }\n}\n\nimpl Window<InputEvent> for NoWindow {\n fn get_settings<'a>(&'a self) -> &'a WindowSettings {\n &self.settings\n }\n\n fn close(&mut self) {\n self.should_close = true\n }\n\n fn get_draw_size(&self) -> (u32, u32) {\n let Size([w, h]) = self.get_size();\n (w, h)\n }\n\n fn capture_cursor(&mut self, _enabled: bool) {}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Destroying dumb buffer test<commit_after>\nextern crate drm;\nuse drm::Device;\nuse drm::mode::*;\n\n#[test]\nfn destroy_dumb() {\n let dev = Device::first_card().expect(\"Failed to open card\");\n let buf = DumbBuf::<u32>::create(&dev, 640, 480).expect(\"Failed to crate dumbbuf\");\n\n drop(buf);\n\n println!(\"Didn't panic!\");\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(std_misc)]\n\nextern crate rustfmt;\n\nuse std::collections::HashMap;\nuse std::fs;\nuse std::io::Read;\nuse rustfmt::*;\n\n\/\/ For now, the only supported regression tests are idempotent tests - the input and\n\/\/ output must match exactly.\n\/\/ FIXME(#28) would be good to check for error messages and fail on them, or at least report.\n#[test]\nfn idempotent_tests() {\n println!(\"Idempotent tests:\");\n\n \/\/ Get all files in the tests\/idem directory\n let files = fs::read_dir(\"tests\/idem\").unwrap();\n let files2 = fs::read_dir(\"tests\").unwrap();\n let files3 = fs::read_dir(\"src\/bin\").unwrap();\n \/\/ For each file, run rustfmt and collect the output\n\n let mut count = 0;\n let mut fails = 0;\n for entry in files.chain(files2).chain(files3) {\n let path = entry.unwrap().path();\n let file_name = path.to_str().unwrap();\n if !file_name.ends_with(\".rs\") {\n continue;\n }\n println!(\"Testing '{}'...\", file_name);\n match idempotent_check(vec![\"rustfmt\".to_owned(), file_name.to_owned()]) {\n Ok(()) => {},\n Err(m) => {\n print_mismatches(m);\n fails += 1;\n },\n }\n count += 1;\n }\n \/\/ And also dogfood rustfmt!\n println!(\"Testing 'src\/lib.rs'...\");\n match idempotent_check(vec![\"rustfmt\".to_owned(), \"src\/lib.rs\".to_owned()]) {\n Ok(()) => {},\n Err(m) => {\n print_mismatches(m);\n fails += 1;\n },\n }\n count += 1;\n\n \/\/ Display results\n println!(\"Ran {} idempotent tests; {} failures.\", count, fails);\n assert!(fails == 0, \"{} idempotent tests failed\", fails);\n}\n\n\/\/ Compare output to input.\nfn print_mismatches(result: HashMap<String, String>) {\n for (file_name, fmt_text) in result {\n println!(\"Mismatch in {}.\", file_name);\n println!(\"{}\", fmt_text);\n }\n}\n\n\/\/ Ick, just needed to get a &'static to handle_result.\nstatic HANDLE_RESULT: &'static Fn(HashMap<String, String>) = &handle_result;\n\npub fn idempotent_check(args: Vec<String>) -> Result<(), HashMap<String, String>> {\n use std::thread;\n use std::fs;\n use std::io::Read;\n thread::spawn(move || {\n run(args, WriteMode::Return(HANDLE_RESULT));\n }).join().map_err(|mut any|\n any.downcast_mut::<HashMap<String, String>>()\n .unwrap() \/\/ i know it is a hashmap\n .drain() \/\/ i only get a reference :(\n .collect() \/\/ so i need to turn it into an iter and then back\n )\n}\n\n\/\/ Compare output to input.\nfn handle_result(result: HashMap<String, String>) {\n let mut failures = HashMap::new();\n\n for (file_name, fmt_text) in result {\n let mut f = fs::File::open(&file_name).unwrap();\n let mut text = String::new();\n \/\/ TODO: speedup by running through bytes iterator\n f.read_to_string(&mut text).unwrap();\n if fmt_text != text {\n failures.insert(file_name, fmt_text);\n }\n }\n if !failures.is_empty() {\n panic!(failures);\n }\n}<commit_msg>pass single filename instead of full argument list to idem_check<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(std_misc)]\n\nextern crate rustfmt;\n\nuse std::collections::HashMap;\nuse std::fs;\nuse std::io::Read;\nuse rustfmt::*;\n\n\/\/ For now, the only supported regression tests are idempotent tests - the input and\n\/\/ output must match exactly.\n\/\/ FIXME(#28) would be good to check for error messages and fail on them, or at least report.\n#[test]\nfn idempotent_tests() {\n println!(\"Idempotent tests:\");\n\n \/\/ Get all files in the tests\/idem directory\n let files = fs::read_dir(\"tests\/idem\").unwrap();\n let files2 = fs::read_dir(\"tests\").unwrap();\n let files3 = fs::read_dir(\"src\/bin\").unwrap();\n \/\/ For each file, run rustfmt and collect the output\n\n let mut count = 0;\n let mut fails = 0;\n for entry in files.chain(files2).chain(files3) {\n let path = entry.unwrap().path();\n let file_name = path.to_str().unwrap();\n if !file_name.ends_with(\".rs\") {\n continue;\n }\n println!(\"Testing '{}'...\", file_name);\n match idempotent_check(file_name.to_owned()) {\n Ok(()) => {},\n Err(m) => {\n print_mismatches(m);\n fails += 1;\n },\n }\n count += 1;\n }\n \/\/ And also dogfood rustfmt!\n println!(\"Testing 'src\/lib.rs'...\");\n match idempotent_check(\"src\/lib.rs\".to_owned()) {\n Ok(()) => {},\n Err(m) => {\n print_mismatches(m);\n fails += 1;\n },\n }\n count += 1;\n\n \/\/ Display results\n println!(\"Ran {} idempotent tests; {} failures.\", count, fails);\n assert!(fails == 0, \"{} idempotent tests failed\", fails);\n}\n\n\/\/ Compare output to input.\nfn print_mismatches(result: HashMap<String, String>) {\n for (file_name, fmt_text) in result {\n println!(\"Mismatch in {}.\", file_name);\n println!(\"{}\", fmt_text);\n }\n}\n\n\/\/ Ick, just needed to get a &'static to handle_result.\nstatic HANDLE_RESULT: &'static Fn(HashMap<String, String>) = &handle_result;\n\npub fn idempotent_check(filename: String) -> Result<(), HashMap<String, String>> {\n use std::thread;\n use std::fs;\n use std::io::Read;\n\tlet args = vec![\"rustfmt\".to_owned(), filename];\n thread::spawn(move || {\n run(args, WriteMode::Return(HANDLE_RESULT));\n }).join().map_err(|mut any|\n any.downcast_mut::<HashMap<String, String>>()\n .unwrap() \/\/ i know it is a hashmap\n .drain() \/\/ i only get a reference :(\n .collect() \/\/ so i need to turn it into an iter and then back\n )\n}\n\n\/\/ Compare output to input.\nfn handle_result(result: HashMap<String, String>) {\n let mut failures = HashMap::new();\n\n for (file_name, fmt_text) in result {\n let mut f = fs::File::open(&file_name).unwrap();\n let mut text = String::new();\n \/\/ TODO: speedup by running through bytes iterator\n f.read_to_string(&mut text).unwrap();\n if fmt_text != text {\n failures.insert(file_name, fmt_text);\n }\n }\n if !failures.is_empty() {\n panic!(failures);\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>print terms<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>working on factories<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>wip: bunch of code to retrieve-token example<commit_after>#![recursion_limit = \"1024\"]\nextern crate clap;\nextern crate futures;\nextern crate herder;\nextern crate hyper;\nextern crate hyper_tls;\nextern crate rpassword;\nextern crate serde_json;\nextern crate serde_urlencoded;\nextern crate tokio_core;\nextern crate url;\n\n#[macro_use]\nextern crate serde_derive;\n\nuse clap::{Arg, App};\nuse herder::api::oauth::OAuthApp;\nuse herder::errors::*;\nuse hyper::Client as WebClient;\nuse hyper::{Body, Post, Uri};\nuse hyper::client::{FutureResponse, Request, Response};\nuse hyper::header::{Headers, ContentType, Authorization, Bearer};\nuse hyper_tls::HttpsConnector;\nuse futures::{Future, Stream};\nuse futures::future::AndThen;\nuse tokio_core::reactor::Core;\nuse url::Url;\n\nuse std::io;\nuse std::io::Write;\nuse std::fs::File;\nuse std::str::FromStr;\nuse std::sync::{Arc, Mutex};\n\npub enum AuthorizationType {\n ClientCredentials {\n username: String\n },\n Password {\n user: String,\n pass: String\n },\n RequestAuthorization { user: String }\n}\n\nstruct HerderData {\n data: Arc<Mutex<Vec<u8>>>\n}\n\nimpl HerderData {\n fn new() -> Self {\n HerderData { data: Arc::new(Mutex::new(Vec::new())) }\n }\n}\n\n#[derive(Deserialize, Serialize, Debug)]\nstruct APIToken {\n access_token: String,\n token_type: String,\n scope: String,\n created_at: i64\n}\n\nfn main() {\n if let Err(ref e) = run() {\n println!(\"error: {}\", e);\n\n for e in e.iter().skip(1) {\n println!(\"caused by: {}\", e);\n }\n\n \/\/ The backtrace is not always generated. Try to run this example\n \/\/ with `RUST_BACKTRACE=1`.\n if let Some(backtrace) = e.backtrace() {\n println!(\"backtrace: {:?}\", backtrace);\n }\n ::std::process::exit(1);\n }\n}\n\nfn run() -> Result<()> {\n let matches = App::new(\"Herder Mastodon API Token Retriever\")\n .version(\"0.1.5\")\n .author(\"saibatizoku\")\n .about(\"Retrieves a bearer token for a given set of Client credentials.\")\n .arg(Arg::with_name(\"url\")\n .help(\"Sets the URL, https only, for the Mastodon instance. Example: https:\/\/example.com\/\")\n .required(true)\n .takes_value(true)\n .value_name(\"BASE_URL\"))\n .arg(Arg::with_name(\"client\")\n .help(\"Specifies the path to save the JSON results from the server, when successful. Defaults to: .\/client.json\")\n .required(true)\n .takes_value(true)\n .value_name(\"CLIENT_JSON_FILE\"))\n .get_matches();\n println!(\"Retrieving bearer token...\");\n println!(\"\\tReading input file...\");\n let base_url = matches.value_of(\"url\").unwrap_or(\"https:\/\/localhost:3000\");\n let client_path = matches.value_of(\"client\").unwrap_or(\"client.json\");\n\n let base_url = Url::from_str(base_url).chain_err(|| \"Invalid URL\")?;\n let end_url = base_url.join(\"\/oauth\/token\")?;\n let mastodon_uri = Uri::from_str(end_url.as_str()).chain_err(|| \"Invalid URL\")?;\n\n println!(\"\\t\\tURL: {}\", base_url);\n println!(\"\\t\\tURI: {}\", mastodon_uri);\n println!(\"\\t\\tClient: {}\", client_path);\n\n let client_json = File::open(client_path).chain_err(|| \"Invalid file path\")?;\n let app: OAuthApp = serde_json::from_reader(client_json).chain_err(|| \"Could not save OAuth to JSON File.\")?;\n println!(\"\\t...Loaded app: {}\", app);\n println!();\n println!(\"\\tUser credentials:\");\n {\n println!(\"username:\");\n let mut user = String::new();\n io::stdin().read_line(&mut user).chain_err(|| \"wrong username\").unwrap();\n\n \/\/let mut pass = rpassword::prompt_password_stdout(\"password?\")\n \/\/ .chain_err(|| \"Could not process password\").unwrap();\n\n user.pop();\n \/\/pass.pop();\n\n let herder_data = HerderData::new();\n let data = herder_data.data;\n\n fetch_token(mastodon_uri.as_ref(), &app, AuthorizationType::RequestAuthorization{ user }, data.clone())?;\n\n {\n let data = data.lock().unwrap();\n if data.is_empty() { panic!(\"Invalid result. Empty\") }\n println!();\n io::stdout().write_all(&data)?;\n println!();\n let api_token: APIToken = serde_json::from_slice(&data).chain_err(|| \"Unexpected JSON error.\")?;\n println!(\"Token: {:?}\", api_token);\n println!();\n }\n }\n Ok(())\n}\nfn fetch_token(url: &str, app: &OAuthApp, grant_type: AuthorizationType, data: Arc<Mutex<Vec<u8>>>) -> Result<()> {\n let mut core = Core::new().expect(\"Could not start client reactor\");\n let client = WebClient::configure()\n .connector(HttpsConnector::new(4, &core.handle()))\n .build(&core.handle());\n\n let mut uri = Url::from_str(url)?;\n \/\/ uri.query_pairs_mut().append_pair(\"id[]\", \"43\");\n \/\/ uri.query_pairs_mut().append_pair(\"id[]\", \"44\");\n \/\/ println!(\"query: {}\", &uri.query().unwrap());\n\n \/\/let params = client_credentials_grant_type(app);\n \/\/let body_str = serde_urlencoded::to_string(params).unwrap();\n let body_str = match grant_type {\n AuthorizationType::ClientCredentials { username: user } => {\n let params = &[\n (\"client_id\", &app.client_id),\n (\"client_secret\", &app.client_secret),\n (\"grant_type\", &String::from(\"client_credentials\")),\n (\"scope\", &\"read write follow\".to_owned())\n ];\n serde_urlencoded::to_string(params).unwrap()\n },\n AuthorizationType::Password { user, pass } => {\n let params = &[\n (\"client_id\", &app.client_id),\n (\"client_secret\", &app.client_secret),\n (\"grant_type\", &\"password\".to_owned()),\n (\"username\", &user),\n (\"password\", &pass)\n ];\n serde_urlencoded::to_string(params).unwrap()\n },\n AuthorizationType::RequestAuthorization { user } => {\n let params = &[\n (\"client_id\", &app.client_id),\n (\"client_secret\", &app.client_secret),\n (\"redirect_uri\", &app.redirect_uri),\n (\"code\", &\"CODE\".to_owned()),\n (\"grant_type\", &\"authorization_code\".to_owned())\n ];\n serde_urlencoded::to_string(params).unwrap()\n }\n };\n let req = build_request(uri.as_str(), None, Some(&body_str))?;\n println!(\"Created a new request: {:#?}\", &req);\n \/\/println!(\"Created a herder response: {}\", data.lock().unwrap());\n \/\/println!(\"Request: {:#?}\", &req);\n \/\/println!(\"Body: {:#?}\", &req.body());\n println!(\"Body text: {:?}\", body_str);\n let mut data = data.lock().unwrap();\n let work = client.request(req).and_then(|res: Response| {\n println!(\"response {:#?}\", res);\n res.body().for_each(|chunk| {\n data.extend_from_slice(&chunk);\n Ok(())\n })\n });\n core.run(work).chain_err(|| \"Failed to run registration\")?;\n Ok(())\n}\n\nfn build_request(url: &str, query: Option<&str>, body: Option<&str>) -> Result<Request<Body>> {\n let uri = Uri::from_str(url).chain_err(|| \"Invalid URI for endpoint\")?;\n let mut req: Request<Body> = Request::new(Post, uri);\n let mut headers = Headers::new();\n \/\/headers.set(\n \/\/ Authorization(\n \/\/ Bearer {\n \/\/ token: \"4a96540a231038a1346601e01eecfef36aece0e181a1362037b3a284db731246\".to_owned()\n \/\/ }\n \/\/ )\n \/\/);\n if body.is_some() {\n headers.set(ContentType::form_url_encoded());\n req.set_body(Body::from(body.unwrap().to_owned()));\n }\n req.headers_mut().clone_from(&headers);\n\n Ok(req)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>handle zero-size allocations correctly<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Initial commit, tested rust by attempting to rewrite parts of the C++ code<commit_after>use std::hashmap::HashMap;\n\n#[deriving(Clone)]\n#[deriving(Eq)]\n#[deriving(ToStr)]\nenum Type {\n Variable(int),\n Operator(~str, ~[Type])\n}\n\nstruct TypedExpr {\n expr : Expr<~TypedExpr>,\n typ : @mut Type\n}\n\nimpl TypedExpr {\n fn new(expr : Expr<~TypedExpr>) -> TypedExpr {\n TypedExpr { expr : expr, typ : @mut Variable(0) }\n }\n}\n\nenum Expr<T> {\n Identifier(~str),\n Apply(T, T),\n Number(int),\n Lambda(~str, T),\n Let(~[(~str, T)], T)\n}\n\nenum Instruction {\n Add,\n Sub,\n Push(int),\n PushGlobal(int),\n PushInt(int),\n Mkap,\n Eval,\n Unwind,\n Slide(int),\n}\n\nenum Var {\n StackVariable(int),\n GlobalVariable(int)\n}\n\nstruct SuperCombinator {\n arity : int,\n instructions : ~[Instruction]\n}\nimpl SuperCombinator {\n fn new() -> SuperCombinator {\n SuperCombinator { arity : 0, instructions : ~[] }\n }\n}\n\nstruct Scope {\n variables : HashMap<~str, Var>\n}\n\nstruct ScopedLookup {\n scopes : ~[Scope],\n}\n\nimpl ScopedLookup {\n fn new_scope(&mut self, scope : Scope) {\n self.scopes.push(scope);\n }\n fn pop_scope(&mut self) {\n self.scopes.pop();\n }\n\n fn find(&self, identifier : &str) -> Option<Var> {\n for scope in self.scopes.rev_iter() {\n match scope.variables.find_equiv(&identifier) {\n Some(var) => { return Some(*var); }\n None => {}\n }\n }\n return None\n }\n}\n\nstruct Compiler {\n stackSize : int,\n globals : HashMap<~str, SuperCombinator>,\n variables : HashMap<~str, Var>,\n globalIndex : int\n}\n\nimpl Compiler {\n fn compileBinding(&mut self, identifier : ~str, expr : &TypedExpr) {\n self.variables.insert(identifier.clone(), GlobalVariable(self.globalIndex));\n self.globalIndex += 1;\n\n let mut comb = SuperCombinator::new();\n match &expr.expr {\n &Lambda(_, _) => {\n \n }\n _ => self.compile(expr, &mut comb.instructions)\n }\n self.globals.insert(identifier, comb);\n }\n\n fn compile(&mut self, expr : &TypedExpr, instructions : &mut ~[Instruction]) {\n match &expr.expr {\n &Identifier(ref name) => {\n match self.variables.find(name) {\n None => fail!(\"Undefined variable \" + *name),\n Some(var) => {\n match var {\n &StackVariable(index) => instructions.push(Push(index)),\n &GlobalVariable(index) => instructions.push(PushGlobal(index))\n }\n }\n }\n }\n &Number(num) => instructions.push(PushInt(num)),\n &Apply(ref func, ref arg) => {\n self.compile(*arg, instructions);\n self.compile(*func, instructions);\n instructions.push(Mkap);\n instructions.push(Eval);\n }\n &Lambda(_, _) => {\n fail!(\"Can't compile a lambda\");\n }\n &Let(ref bindings, ref body) => {\n for &(ref name, ref expr) in bindings.iter() {\n self.variables.insert(name.clone(), StackVariable(self.stackSize));\n self.stackSize += 1;\n self.compile(*expr, instructions);\n }\n self.compile(*body, instructions);\n instructions.push(Slide(bindings.len() as int));\n self.stackSize -= bindings.len() as int;\n }\n }\n }\n}\n\nstruct TypeEnvironment<'self> {\n namedTypes : HashMap<~str, @mut Type>,\n types : ~[@mut Type],\n variableIndex : int\n}\n\nimpl TypeEnvironment {\n fn new() -> TypeEnvironment {\n TypeEnvironment { namedTypes : HashMap::new(), types : ~[] , variableIndex : 0 }\n }\n\n fn replace(old : &mut Type, subs : &HashMap<int, Type>) {\n match old {\n &Variable(id) => {\n match subs.find(&id) {\n Some(new) => { *old = new.clone() }\n None => ()\n }\n }\n &Operator(_, ref mut oldTypes) => {\n for t in oldTypes.mut_iter() {\n TypeEnvironment::replace(t, subs); \n }\n }\n }\n }\n\n fn typecheck(&mut self, expr : &mut TypedExpr) {\n *expr.typ = Variable(self.variableIndex);\n self.variableIndex += 1;\n self.types.push(expr.typ);\n match &mut expr.expr {\n &Number(_) => {\n expr.typ = @mut Operator(~\"Int\", ~[]);\n }\n &Identifier(ref name) => {\n match self.namedTypes.find(name) {\n Some(t) => { expr.typ = (*t).clone(); }\n None => { fail!(\"Undefined identifier \" + *name); }\n }\n }\n &Apply(ref mut func, ref mut arg) => {\n println!(\"Applying\");\n self.typecheck(*func);\n self.typecheck(*arg);\n let mut funcType = Operator(~\"->\", ~[(*arg.typ).clone(), Variable(self.variableIndex)]);\n self.variableIndex += 1;\n let subs = unify(self, func.typ, &funcType);\n self.substitute(&subs);\n TypeEnvironment::replace(&mut funcType, &subs);\n *expr.typ = match funcType {\n Operator(_, t) => t[1],\n _ => fail!(\"Can't happen\")\n };\n }\n _ => { () }\n };\n }\n\n fn substitute(&mut self, subs : &HashMap<int, Type>) {\n \/\/println!(\"Substituting {:?}\", subs);\n for t in self.types.iter() {\n println!(\"Type : {:?}\", *t);\n TypeEnvironment::replace(*t, subs);\n }\n }\n\n fn addName(&mut self, name : ~str, t : @mut Type) {\n self.namedTypes.insert(name, t);\n self.addType(t);\n }\n\n fn addType(&mut self, t : @mut Type) {\n self.types.push(t);\n }\n}\n\nfn unify(env : &mut TypeEnvironment, lhs : &Type, rhs : &Type) -> HashMap<int, Type> {\n let mut subs = HashMap::new();\n unify_(env, &mut subs, lhs, rhs);\n subs\n}\nfn unify_(env : &mut TypeEnvironment, subs : &mut HashMap<int, Type>, lhs : &Type, rhs : &Type) {\n \n \/\/println!(\"Unifying {:?} and {:?}\", lhs, rhs);\n match (lhs, rhs) {\n (&Variable(lid), &Variable(rid)) => {\n if lid != rid {\n subs.insert(lid, Variable(rid));\n }\n }\n (&Operator(ref lName, ref lTypes), &Operator(ref rName, ref rTypes)) => {\n if *lName != *rName || lTypes.len() != rTypes.len() {\n fail!(\"Could not unify Operators \" + *lName + \" and \" + *rName);\n }\n for i in range(0, lTypes.len()) {\n unify_(env, subs, &lTypes[i], &rTypes[i]);\n }\n }\n (&Variable(lid), &Operator(_, _)) => { subs.insert(lid, (*rhs).clone()); }\n _ => { unify_(env, subs, rhs, lhs); }\n }\n}\n\nfn function_type(func : &Type, arg : &Type) -> Type {\n Operator(~\"->\", ~[func.clone(), arg.clone()])\n}\n\n#[deriving(Clone)]\nenum Node {\n Application(@Node, @Node),\n Int(int),\n Combinator(@SuperCombinator)\n}\n\nstruct VM {\n globals : ~[@SuperCombinator],\n heap : ~[Node]\n}\n\nimpl VM {\n fn new() -> VM {\n VM { globals : ~[], heap : ~[] }\n }\n fn execute(&self, stack : &mut ~[@Node], code : &[Instruction]) {\n debug!(\"Entering frame\");\n let mut i = 0;\n while i < code.len() {\n println!(\"Executing instruction : {:?}\", code[i]);\n match &code[i] {\n &Add => {\n let l = stack.pop();\n let r = stack.pop();\n println!(\"{:?} + {:?}\", l, r);\n match (*l, *r) {\n (Int(lhs), Int(rhs)) => { stack.push(@Int(lhs + rhs)); }\n _ => fail!(\"Expected fully evaluted numbers in Add instruction\")\n }\n }\n &Sub => {\n let l = stack.pop();\n let r = stack.pop();\n match (*l, *r) {\n (Int(lhs), Int(rhs)) => { stack.push(@Int(lhs - rhs)); }\n _ => fail!(\"Expected fully evaluted numbers in Sub instruction\")\n }\n }\n &PushInt(value) => { stack.push(@Int(value)); }\n &Push(index) => {\n let x = stack[index].clone();\n stack.push(x);\n }\n &PushGlobal(index) => {\n stack.push(@Combinator(self.globals[index]));\n }\n &Mkap => {\n let func = stack.pop();\n let arg = stack.pop();\n stack.push(@Application(func, arg));\n }\n &Eval => {\n static unwindCode : &'static [Instruction] = &[Unwind];\n let mut newStack = ~[stack[stack.len() - 1]];\n self.execute(&mut newStack, unwindCode);\n assert!(newStack.len() == 1);\n stack[stack.len() - 1] = newStack[0];\n }\n &Unwind => {\n match *stack[stack.len() - 1] {\n Application(func, arg) => {\n stack.push(func);\n i -= 1;\/\/Redo the unwind instruction\n }\n Combinator(comb) => {\n for j in range(stack.len() - (comb.arity as uint) - 1, stack.len()) {\n stack[j] = match stack[j] {\n @Application(func, arg) => arg,\n _ => fail!(\"Expected Application\")\n };\n }\n let mut newStack = ~[stack[stack.len() - 1]];\n self.execute(&mut newStack, comb.instructions);\n assert!(newStack.len() == 0);\n for i in range(0, comb.arity) {\n stack.pop();\n }\n stack.push(newStack[0]);\n }\n Int(_) => ()\n }\n }\n &Slide(size) => {\n for _ in range(0, size) {\n stack.pop();\n }\n }\n }\n i += 1;\n }\n }\n}\n\nfn main() {\n let t1 = @mut Variable(1);\n let t2 = @mut Operator(~\"->\", ~[Variable(1), Operator(~\"Int\", ~[])]);\n let mut env = TypeEnvironment::new();\n env.addType(t1);\n env.addType(t2);\n let n = ~TypedExpr::new(Identifier(~\"add\"));\n let num = ~TypedExpr::new(Number(1));\n let mut expr = TypedExpr::new(Apply(n, num));\n let type_int = Operator(~\"Int\", ~[]);\n let add_type = @mut function_type(&type_int, &function_type(&type_int, &type_int));\n env.addName(~\"add\", add_type);\n env.typecheck(&mut expr);\n\n println!(\"Result {:?}\", expr.typ);\n\n\n let instr = [PushInt(2), PushInt(3), Add];\n let vm = VM::new();\n let mut stack = ~[];\n vm.execute(&mut stack, instr);\n\n println!(\"Add : {:?}\", stack);\n\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>result<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! NetBSD-specific raw type definitions\n\n#![stable(feature = \"raw_ext\", since = \"1.1.0\")]\n#![rustc_deprecated(since = \"1.8.0\",\n reason = \"these type aliases are no longer supported by \\\n the standard library, the `libc` crate on \\\n crates.io should be used instead for the correct \\\n definitions\")]\n#![allow(deprecated)]\n\nuse os::raw::c_long;\nuse os::unix::raw::{uid_t, gid_t};\n\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type blkcnt_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type blksize_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type dev_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type fflags_t = u32;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type ino_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type mode_t = u32;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type nlink_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type off_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type time_t = i64;\n\n#[unstable(feature = \"pthread_t\", issue = \"29791\")] pub type pthread_t = usize;\n\n#[repr(C)]\n#[derive(Clone)]\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")]\npub struct stat {\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_dev: u64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mode: u32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ino: u64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_nlink: u32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_uid: u32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_gid: u32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_rdev: u64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_atime: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_atime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mtime: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mtime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ctime: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ctime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_birthtime: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_birthtime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_size: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_blocks: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_blksize: i32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_flags: u32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_gen: u32,\n st_spare: [u32; 2],\n}\n<commit_msg>fixes a small netbsd compile error<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! NetBSD-specific raw type definitions\n\n#![stable(feature = \"raw_ext\", since = \"1.1.0\")]\n#![rustc_deprecated(since = \"1.8.0\",\n reason = \"these type aliases are no longer supported by \\\n the standard library, the `libc` crate on \\\n crates.io should be used instead for the correct \\\n definitions\")]\n#![allow(deprecated)]\n\nuse os::raw::c_long;\nuse os::unix::raw::{uid_t, gid_t};\n\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type blkcnt_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type blksize_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type dev_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type fflags_t = u32;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type ino_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type mode_t = u32;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type nlink_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type off_t = u64;\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")] pub type time_t = i64;\n\n#[unstable(feature = \"pthread_t\", issue = \"29791\")] pub type pthread_t = usize;\n\n#[repr(C)]\n#[derive(Clone)]\n#[stable(feature = \"raw_ext\", since = \"1.1.0\")]\npub struct stat {\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_dev: u64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mode: u32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ino: u64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_nlink: u32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_uid: uid_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_gid: gid_t,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_rdev: u64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_atime: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_atime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mtime: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_mtime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ctime: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_ctime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_birthtime: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_birthtime_nsec: c_long,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_size: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_blocks: i64,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_blksize: i32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_flags: u32,\n #[stable(feature = \"raw_ext\", since = \"1.1.0\")]\n pub st_gen: u32,\n st_spare: [u32; 2],\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Completed up to traits.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix a typo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>auto merge of #13556 : michaelwoerister\/rust\/various-fixes, r=alexcrichton<commit_after>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-android: FIXME(#10381)\n\n\/\/ compile-flags:-g\n\/\/ debugger:break issue12886.rs:29\n\/\/ debugger:run\n\/\/ debugger:next\n\/\/ check:[...]30[...]s\n\/\/ debugger:continue\n\n\/\/ IF YOU MODIFY THIS FILE, BE CAREFUL TO ADAPT THE LINE NUMBERS IN THE DEBUGGER COMMANDS\n\n\/\/ This test makes sure that gdb does not set unwanted breakpoints in inlined functions. If a\n\/\/ breakpoint existed in unwrap(), then calling `next` would (when stopped at line 27) would stop\n\/\/ in unwrap() instead of stepping over the function invocation. By making sure that `s` is\n\/\/ contained in the output, after calling `next` just once, we can be sure that we did not stop in\n\/\/ unwrap(). (The testing framework doesn't allow for checking that some text is *not* contained in\n\/\/ the output, which is why we have to make the test in this kind of roundabout way)\nfn bar() -> int {\n let s = Some(5).unwrap();\n s\n}\n\nfn main() {\n let _ = bar();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add simple link_section test to exercise it<commit_after>#[cfg(not(target_os = \"macos\"))]\n#[link_section=\".moretext\"]\nfn i_live_in_more_text() -> &'static str {\n \"knock knock\"\n}\n\n#[cfg(not(target_os = \"macos\"))]\n#[link_section=\".imm\"]\nstatic magic: uint = 42;\n\n#[cfg(not(target_os = \"macos\"))]\n#[link_section=\".mut\"]\nstatic mut frobulator: uint = 0xdeadbeef;\n\n#[cfg(target_os = \"macos\")]\n#[link_section=\"__TEXT,__moretext\"]\nfn i_live_in_more_text() -> &'static str {\n \"knock knock\"\n}\n\n#[cfg(target_os = \"macos\")]\n#[link_section=\"__RODATA,__imm\"]\nstatic magic: uint = 42;\n\n#[cfg(target_os = \"macos\")]\n#[link_section=\"__DATA,__mut\"]\nstatic mut frobulator: uint = 0xdeadbeef;\n\nfn main() {\n unsafe {\n frobulator = 0xcafebabe;\n printfln!(\"%? %? %?\", i_live_in_more_text(), magic, frobulator);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::convert::{From, Into};\nuse std::error::Error;\nuse std::str::FromStr;\n\n#[derive(Debug)]\n#[derive(Clone)]\n#[derive(PartialEq)]\n#[derive(Eq)]\n\/\/ #[derive(Display)]\npub enum FileIDType {\n UUID,\n}\n\npub enum FileIDTypeParseError {\n UnknownType\n}\n\nimpl FromStr for FileIDType {\n type Err = FileIDTypeParseError;\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n match s {\n \"UUID\" => Ok(FileIDType::UUID),\n _ => Err(FileIDTypeParseError::UnknownType)\n }\n }\n}\n\nimpl Into<String> for FileIDType {\n\n fn into(self) -> String {\n match self {\n FileIDType::UUID => String::from(\"UUID\"),\n }\n }\n}\n\n<commit_msg>Derive Hash for FileIDType<commit_after>use std::convert::{From, Into};\nuse std::error::Error;\nuse std::str::FromStr;\nuse std::hash::Hash;\n\n#[derive(Debug)]\n#[derive(Clone)]\n#[derive(PartialEq)]\n#[derive(Eq)]\n\/\/ #[derive(Display)]\n#[derive(Hash)]\npub enum FileIDType {\n UUID,\n}\n\npub enum FileIDTypeParseError {\n UnknownType\n}\n\nimpl FromStr for FileIDType {\n type Err = FileIDTypeParseError;\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n match s {\n \"UUID\" => Ok(FileIDType::UUID),\n _ => Err(FileIDTypeParseError::UnknownType)\n }\n }\n}\n\nimpl Into<String> for FileIDType {\n\n fn into(self) -> String {\n match self {\n FileIDType::UUID => String::from(\"UUID\"),\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a test for sending rope clones between threads.<commit_after>extern crate ropey;\n\nuse std::thread;\nuse std::sync::mpsc;\n\nuse std::iter::Iterator;\n\nuse ropey::Rope;\n\n#[test]\nfn clone_rope_to_thread() {\n let mut rope1 = Rope::from_str(TEXT);\n let rope2 = rope1.clone();\n\n \/\/ Spawn a thread for modifying the clone\n let (tx1, rx1) = mpsc::channel::<Rope>();\n let (tx2, rx2) = mpsc::channel::<Rope>();\n thread::spawn(move || {\n \/\/ Modify rope2\n let mut rope = rx1.recv().unwrap();\n rope.insert(432, \"Hello \");\n rope.insert(2345, \"world! \");\n rope.insert(5256, \"How are \");\n rope.insert(53, \"you \");\n rope.insert(768, \"doing?\\r\\n\");\n\n \/\/ Send it back\n tx2.send(rope).unwrap();\n\n \/\/ Modify it again\n let mut rope = rx1.recv().unwrap();\n rope.insert(3891, \"I'm doing fine, thanks!\");\n tx2.send(rope).unwrap();\n });\n\n \/\/ Send the clone to the other thread for modification\n tx1.send(rope2).unwrap();\n\n \/\/ Make identical modifications to rope1 as are being made\n \/\/ to rope2 in the other thread.\n rope1.insert(432, \"Hello \");\n rope1.insert(2345, \"world! \");\n rope1.insert(5256, \"How are \");\n rope1.insert(53, \"you \");\n rope1.insert(768, \"doing?\\r\\n\");\n\n \/\/ Get rope2 back and make sure they match\n let rope2 = rx2.recv().unwrap();\n let matches = Iterator::zip(rope1.chars(), rope2.chars())\n .map(|(a, b)| a == b)\n .fold(true, |acc, n| acc && n);\n assert_eq!(matches, true);\n\n \/\/ Send rope2 to the other thread again for more modifications.\n tx1.send(rope2).unwrap();\n\n \/\/ Get rope2 back again and make sure they don't match now.\n let rope2 = rx2.recv().unwrap();\n let matches = Iterator::zip(rope1.chars(), rope2.chars())\n .map(|(a, b)| a == b)\n .fold(true, |acc, n| acc && n);\n assert_eq!(matches, false);\n}\n\nconst TEXT: &str = \"\nLorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas sit\namet tellus nec turpis feugiat semper. Nam at nulla laoreet, finibus\neros sit amet, fringilla mauris. Fusce vestibulum nec ligula efficitur\nlaoreet. Nunc orci leo, varius eget ligula vulputate, consequat\neleifend nisi. Cras justo purus, imperdiet a augue malesuada, convallis\ncursus libero. Fusce pretium arcu in elementum laoreet. Duis mauris\nnulla, suscipit at est nec, malesuada pellentesque eros. Quisque semper\nporta malesuada. Nunc hendrerit est ac faucibus mollis. Nam fermentum\nid libero sed egestas. Duis a accumsan sapien. Nam neque diam, congue\nnon erat et, porta sagittis turpis. Vivamus vitae mauris sit amet massa\nmollis molestie. Morbi scelerisque, augue id congue imperdiet, felis\nlacus euismod dui, vitae facilisis massa dui quis sapien. Vivamus\nhendrerit a urna a lobortis.\n\nDonec ut suscipit risus. Vivamus dictum auctor vehicula. Sed lacinia\nligula sit amet urna tristique commodo. Sed sapien risus, egestas ac\ntempus vel, pellentesque sed velit. Duis pulvinar blandit suscipit.\nCurabitur viverra dignissim est quis ornare. Nam et lectus purus.\nInteger sed augue vehicula, volutpat est vel, convallis justo.\nSuspendisse a convallis nibh, pulvinar rutrum nisi. Fusce ultrices\naccumsan mauris vitae ornare. Cras elementum et ante at tincidunt. Sed\nluctus scelerisque lobortis. Sed vel dictum enim. Fusce quis arcu\neuismod, iaculis mi id, placerat nulla. Pellentesque porttitor felis\nelementum justo porttitor auctor.\n\nAliquam finibus metus commodo sem egestas, non mollis odio pretium.\nAenean ex lectus, rutrum nec laoreet at, posuere sit amet lacus. Nulla\neros augue, vehicula et molestie accumsan, dictum vel odio. In quis\nrisus finibus, pellentesque ipsum blandit, volutpat diam. Etiam\nsuscipit varius mollis. Proin vel luctus nisi, ac ornare justo. Integer\nporttitor quam magna. Donec vitae metus tempor, ultricies risus in,\ndictum erat. Integer porttitor faucibus vestibulum. Class aptent taciti\nsociosqu ad litora torquent per conubia nostra, per inceptos himenaeos.\nVestibulum ante ipsum primis in faucibus orci luctus et ultrices\nposuere cubilia Curae; Nam semper congue ante, a ultricies velit\nvenenatis vitae. Proin non neque sit amet ex commodo congue non nec\nelit. Nullam vel dignissim ipsum. Duis sed lobortis ante. Aenean\nfeugiat rutrum magna ac luctus.\n\nUt imperdiet non ante sit amet rutrum. Cras vel massa eget nisl gravida\nauctor. Nulla bibendum ut tellus ut rutrum. Quisque malesuada lacinia\nfelis, vitae semper elit. Praesent sit amet velit imperdiet, lobortis\nnunc at, faucibus tellus. Nullam porttitor augue mauris, a dapibus\ntellus ultricies et. Fusce aliquet nec velit in mattis. Sed mi ante,\nlacinia eget ornare vel, faucibus at metus.\n\nPellentesque nec viverra metus. Sed aliquet pellentesque scelerisque.\nDuis efficitur erat sit amet dui maximus egestas. Nullam blandit ante\ntortor. Suspendisse vitae consectetur sem, at sollicitudin neque.\nSuspendisse sodales faucibus eros vitae pellentesque. Cras non quam\ndictum, pellentesque urna in, ornare erat. Praesent leo est, aliquet et\neuismod non, hendrerit sed urna. Sed convallis porttitor est, vel\naliquet felis cursus ac. Vivamus feugiat eget nisi eu molestie.\nPhasellus tincidunt nisl eget molestie consectetur. Phasellus vitae ex\nut odio sollicitudin vulputate. Sed et nulla accumsan, eleifend arcu\neget, gravida neque. Donec sit amet tincidunt eros. Ut in volutpat\nante.\n\nLorem ipsum dolor sit amet, consectetur adipiscing elit. Maecenas sit\namet tellus nec turpis feugiat semper. Nam at nulla laoreet, finibus\neros sit amet, fringilla mauris. Fusce vestibulum nec ligula efficitur\nlaoreet. Nunc orci leo, varius eget ligula vulputate, consequat\neleifend nisi. Cras justo purus, imperdiet a augue malesuada, convallis\ncursus libero. Fusce pretium arcu in elementum laoreet. Duis mauris\nnulla, suscipit at est nec, malesuada pellentesque eros. Quisque semper\nporta malesuada. Nunc hendrerit est ac faucibus mollis. Nam fermentum\nid libero sed egestas. Duis a accumsan sapien. Nam neque diam, congue\nnon erat et, porta sagittis turpis. Vivamus vitae mauris sit amet massa\nmollis molestie. Morbi scelerisque, augue id congue imperdiet, felis\nlacus euismod dui, vitae facilisis massa dui quis sapien. Vivamus\nhendrerit a urna a lobortis.\n\nDonec ut suscipit risus. Vivamus dictum auctor vehicula. Sed lacinia\nligula sit amet urna tristique commodo. Sed sapien risus, egestas ac\ntempus vel, pellentesque sed velit. Duis pulvinar blandit suscipit.\nCurabitur viverra dignissim est quis ornare. Nam et lectus purus.\nInteger sed augue vehicula, volutpat est vel, convallis justo.\nSuspendisse a convallis nibh, pulvinar rutrum nisi. Fusce ultrices\naccumsan mauris vitae ornare. Cras elementum et ante at tincidunt. Sed\nluctus scelerisque lobortis. Sed vel dictum enim. Fusce quis arcu\neuismod, iaculis mi id, placerat nulla. Pellentesque porttitor felis\nelementum justo porttitor auctor.\n\nAliquam finibus metus commodo sem egestas, non mollis odio pretium.\nAenean ex lectus, rutrum nec laoreet at, posuere sit amet lacus. Nulla\neros augue, vehicula et molestie accumsan, dictum vel odio. In quis\nrisus finibus, pellentesque ipsum blandit, volutpat diam. Etiam\nsuscipit varius mollis. Proin vel luctus nisi, ac ornare justo. Integer\nporttitor quam magna. Donec vitae metus tempor, ultricies risus in,\ndictum erat. Integer porttitor faucibus vestibulum. Class aptent taciti\nsociosqu ad litora torquent per conubia nostra, per inceptos himenaeos.\nVestibulum ante ipsum primis in faucibus orci luctus et ultrices\nposuere cubilia Curae; Nam semper congue ante, a ultricies velit\nvenenatis vitae. Proin non neque sit amet ex commodo congue non nec\nelit. Nullam vel dignissim ipsum. Duis sed lobortis ante. Aenean\nfeugiat rutrum magna ac luctus.\n\nUt imperdiet non ante sit amet rutrum. Cras vel massa eget nisl gravida\nauctor. Nulla bibendum ut tellus ut rutrum. Quisque malesuada lacinia\nfelis, vitae semper elit. Praesent sit amet velit imperdiet, lobortis\nnunc at, faucibus tellus. Nullam porttitor augue mauris, a dapibus\ntellus ultricies et. Fusce aliquet nec velit in mattis. Sed mi ante,\nlacinia eget ornare vel, faucibus at metus.\n\nPellentesque nec viverra metus. Sed aliquet pellentesque scelerisque.\nDuis efficitur erat sit amet dui maximus egestas. Nullam blandit ante\ntortor. Suspendisse vitae consectetur sem, at sollicitudin neque.\nSuspendisse sodales faucibus eros vitae pellentesque. Cras non quam\ndictum, pellentesque urna in, ornare erat. Praesent leo est, aliquet et\neuismod non, hendrerit sed urna. Sed convallis porttitor est, vel\naliquet felis cursus ac. Vivamus feugiat eget nisi eu molestie.\nPhasellus tincidunt nisl eget molestie consectetur. Phasellus vitae ex\nut odio sollicitudin vulputate. Sed et nulla accumsan, eleifend arcu\neget, gravida neque. Donec sit amet tincidunt eros. Ut in volutpat\nante.\n\";\n<|endoftext|>"} {"text":"<commit_before><commit_msg>ejonecho<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added basic map and \"uint128\" test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Some minor code modernization (try\\!, map)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a simple example<commit_after>extern crate rustty;\n\nuse rustty::crayon::Style;\nuse rustty::crayon::Color::*;\n\nfn main() {\n println!(\"{}\", Red.on(Black).blink().paint(\"Hello world!\"));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added simple example<commit_after>extern crate \"lmdb-rs\" as lmdb;\n\nuse std::io::USER_DIR;\nuse lmdb::{EnvBuilder, DbFlags};\n\nfn main() {\n let path = Path::new(\"test-lmdb\");\n let mut env = EnvBuilder::new().open(&path, USER_DIR).unwrap();\n\n let db_handle = env.get_default_db(DbFlags::empty()).unwrap();\n let txn = env.new_transaction().unwrap();\n {\n let db = txn.bind(&db_handle); \/\/ get a database bound to this transaction\n\n let pairs = vec![(\"Albert\", \"Einstein\",),\n (\"Joe\", \"Smith\",),\n (\"Jack\", \"Daniels\")];\n\n for &(name, surname) in pairs.iter() {\n db.set(&surname, &name).unwrap();\n }\n }\n\n \/\/ Note: `commit` is choosen to be explicit as\n \/\/ in case of failure it is responsibility of\n \/\/ the client to handle the error\n match txn.commit() {\n Err(_) => panic!(\"failed to commit!\"),\n Ok(_) => ()\n }\n\n let reader = env.get_reader().unwrap();\n let db = reader.bind(&db_handle);\n let name = db.get::<&str>(&\"Smith\").unwrap();\n println!(\"It's {} Smith\", name);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>match on enums added and used<commit_after><|endoftext|>"} {"text":"<commit_before>use runtime::Runtime;\nuse module::Module;\nuse module::ModuleResult;\nuse module::ModuleError;\nuse std::path::Path;\nuse std::result::Result;\nuse clap::ArgMatches;\nuse regex::Regex;\n\nmod header;\n\npub struct BMModule {\n path: Option<String>,\n}\n\nconst CALLNAMES : &'static [&'static str] = &[ \"bm\", \"bookmark\" ];\n\nimpl Module for BMModule {\n\n fn new(rt : &Runtime) -> BMModule {\n BMModule {\n path: None\n }\n }\n\n fn callnames() -> &'static [&'static str] {\n CALLNAMES\n }\n\n fn name(&self) -> &'static str{\n \"Bookmark\"\n }\n\n fn execute(&self, rt : &Runtime) -> ModuleResult {\n let cmd = rt.config.cli_matches.subcommand_matches(\"bm\").unwrap();\n match cmd.subcommand_name() {\n Some(\"add\") => {\n debug!(\"Calling 'add'...\");\n add(rt, cmd.subcommand_matches(\"add\").unwrap())\n }\n Some(\"list\") => {\n debug!(\"Calling 'list'...\");\n list(rt, cmd.subcommand_matches(\"list\").unwrap())\n }\n Some(\"remove\") => {\n debug!(\"Calling 'remove'...\");\n list(rt, cmd.subcommand_matches(\"remove\").unwrap())\n }\n _ => {\n info!(\"Not calling any of add, list, remove\");\n Ok(())\n }\n }\n }\n\n fn shutdown(&self, rt : &Runtime) -> ModuleResult {\n Ok(())\n }\n}\n\nfn add<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> ModuleResult {\n let url = sub.value_of(\"url\").unwrap();\n let tags = get_tags(rt, sub);\n info!(\"Adding url '{}' with tags '{:?}'\", url, tags.unwrap_or(vec!()));\n\n Ok(())\n}\n\nfn list<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> ModuleResult {\n let tags = get_tags(rt, sub);\n let matcher = get_matcher(rt, sub);\n\n match matcher {\n Some(reg) => {\n info!(\"Listing urls with matcher '{}' and with tags {:?}\",\n reg.as_str(),\n tags.unwrap_or(vec!()));\n }\n None => {\n info!(\"Listing urls with tags {:?}\", tags.unwrap_or(vec!()));\n }\n }\n\n Ok(())\n}\n\nfn remove<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> ModuleResult {\n let tags = get_tags(rt, sub);\n let matcher = get_matcher(rt, sub);\n let id = get_id(rt, sub);\n\n match id {\n Some(idstr) => {\n info!(\"Removing urls with id '{}'\", idstr);\n }\n None => {\n match matcher {\n Some(reg) => {\n info!(\"Removing urls with matcher '{}' and with tags {:?}\",\n reg.as_str(),\n tags.unwrap_or(vec!()));\n }\n None => {\n info!(\"Listing urls with tags {:?}\", tags.unwrap_or(vec!()));\n }\n }\n }\n }\n\n Ok(())\n}\n\nfn get_tags<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<Vec<String>> {\n debug!(\"Fetching tags from commandline\");\n sub.value_of(\"tags\").and_then(|tags|\n Some(tags.split(\",\")\n .collect::<Vec<_>>()\n .iter()\n .filter(|e|\n if e.contains(\" \") {\n warn!(\"Tag contains spaces: '{}'\", e);\n true\n } else {\n false\n })\n .map(|s| s.to_string())\n .collect()\n )\n )\n\n}\n\nfn get_matcher<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<Regex> {\n debug!(\"Fetching matcher from commandline\");\n if let Some(s) = sub.value_of(\"match\") {\n if let Ok(r) = Regex::new(s) {\n return Some(r)\n } else {\n error!(\"Regex error, continuing without regex\");\n }\n }\n None\n\n}\n\nfn get_id<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<String> {\n debug!(\"Fetching id from commandline\");\n sub.value_of(\"id\").and_then(|s| Some(String::from(s)))\n}\n\n<commit_msg>Refactor get_tags to return no Option<commit_after>use runtime::Runtime;\nuse module::Module;\nuse module::ModuleResult;\nuse module::ModuleError;\nuse std::path::Path;\nuse std::result::Result;\nuse clap::ArgMatches;\nuse regex::Regex;\n\nmod header;\n\npub struct BMModule {\n path: Option<String>,\n}\n\nconst CALLNAMES : &'static [&'static str] = &[ \"bm\", \"bookmark\" ];\n\nimpl Module for BMModule {\n\n fn new(rt : &Runtime) -> BMModule {\n BMModule {\n path: None\n }\n }\n\n fn callnames() -> &'static [&'static str] {\n CALLNAMES\n }\n\n fn name(&self) -> &'static str{\n \"Bookmark\"\n }\n\n fn execute(&self, rt : &Runtime) -> ModuleResult {\n let cmd = rt.config.cli_matches.subcommand_matches(\"bm\").unwrap();\n match cmd.subcommand_name() {\n Some(\"add\") => {\n debug!(\"Calling 'add'...\");\n add(rt, cmd.subcommand_matches(\"add\").unwrap())\n }\n Some(\"list\") => {\n debug!(\"Calling 'list'...\");\n list(rt, cmd.subcommand_matches(\"list\").unwrap())\n }\n Some(\"remove\") => {\n debug!(\"Calling 'remove'...\");\n list(rt, cmd.subcommand_matches(\"remove\").unwrap())\n }\n _ => {\n info!(\"Not calling any of add, list, remove\");\n Ok(())\n }\n }\n }\n\n fn shutdown(&self, rt : &Runtime) -> ModuleResult {\n Ok(())\n }\n}\n\nfn add<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> ModuleResult {\n let url = sub.value_of(\"url\").unwrap();\n let tags = get_tags(rt, sub);\n info!(\"Adding url '{}' with tags '{:?}'\", url, tags);\n\n Ok(())\n}\n\nfn list<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> ModuleResult {\n let tags = get_tags(rt, sub);\n let matcher = get_matcher(rt, sub);\n\n match matcher {\n Some(reg) => {\n info!(\"Listing urls with matcher '{}' and with tags {:?}\",\n reg.as_str(),\n tags);\n }\n None => {\n info!(\"Listing urls with tags {:?}\", tags);\n }\n }\n\n Ok(())\n}\n\nfn remove<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> ModuleResult {\n let tags = get_tags(rt, sub);\n let matcher = get_matcher(rt, sub);\n let id = get_id(rt, sub);\n\n match id {\n Some(idstr) => {\n info!(\"Removing urls with id '{}'\", idstr);\n }\n None => {\n match matcher {\n Some(reg) => {\n info!(\"Removing urls with matcher '{}' and with tags {:?}\",\n reg.as_str(), tags);\n }\n None => {\n info!(\"Listing urls with tags {:?}\", tags);\n }\n }\n }\n }\n\n Ok(())\n}\n\nfn get_tags<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Vec<String> {\n debug!(\"Fetching tags from commandline\");\n sub.value_of(\"tags\").and_then(|tags|\n Some(tags.split(\",\")\n .into_iter()\n .map(|s| s.to_string())\n .filter(|e|\n if e.contains(\" \") {\n warn!(\"Tag contains spaces: '{}'\", e);\n true\n } else {\n false\n }).collect()\n )\n ).or(Some(vec![])).unwrap()\n\n}\n\nfn get_matcher<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<Regex> {\n debug!(\"Fetching matcher from commandline\");\n if let Some(s) = sub.value_of(\"match\") {\n if let Ok(r) = Regex::new(s) {\n return Some(r)\n } else {\n error!(\"Regex error, continuing without regex\");\n }\n }\n None\n\n}\n\nfn get_id<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<String> {\n debug!(\"Fetching id from commandline\");\n sub.value_of(\"id\").and_then(|s| Some(String::from(s)))\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::str::FromStr;\n\nuse libimagrt::runtime::Runtime;\nuse libimagerror::trace::trace_error_exit;\nuse libimagcounter::counter::Counter;\n\npub fn create(rt: &Runtime) {\n rt.cli()\n .subcommand_matches(\"create\")\n .map(|scmd| {\n debug!(\"Found 'create' subcommand...\");\n\n let name = scmd.value_of(\"name\").unwrap(); \/\/ safe because clap enforces\n let init : i64 = scmd\n .value_of(\"initval\")\n .and_then(|i| FromStr::from_str(i).ok())\n .unwrap_or(0);\n let unit = scmd\n .value_of(\"unit\")\n .unwrap_or(\"unit\");\n\n match Counter::new(rt.store(), String::from(name), init) {\n Err(e) => {\n warn!(\"Could not create Counter '{}' with initial value '{} {}'\", name, init, unit);\n trace_error_exit(&e, 1);\n },\n Ok(_) => info!(\"Created Counter '{}' with initial value '{} {}'\", name, init, unit),\n }\n });\n}\n<commit_msg>changed default behavior from unit to None<commit_after>use std::str::FromStr;\n\nuse libimagrt::runtime::Runtime;\nuse libimagerror::trace::trace_error_exit;\nuse libimagcounter::counter::Counter;\nuse libimagcounter::counter::CounterUnit;\n\npub fn create(rt: &Runtime) {\n rt.cli()\n .subcommand_matches(\"create\")\n .map(|scmd| {\n debug!(\"Found 'create' subcommand...\");\n\n let name = scmd.value_of(\"name\").unwrap(); \/\/ safe because clap enforces\n let init : i64 = scmd\n .value_of(\"initval\")\n .and_then(|i| FromStr::from_str(i).ok())\n .unwrap_or(0);\n\n let unit = scmd\n .value_of(\"unit\")\n .map(CounterUnit::new); \n\n Counter::new(rt.store(), String::from(name), init)\n .and_then(|c| c.with_unit(unit))\n .unwrap_or_else(|e| {\n warn!(\"Could not create Counter '{}' with initial value '{}'\", name, init);\n trace_error_exit(&e, 1);\n });\n });\n}\n<|endoftext|>"} {"text":"<commit_before>use gl;\nuse libc;\nuse std::collections::HashMap;\nuse std::c_vec::CVec;\nuse std::fmt;\nuse std::mem;\nuse std::sync::Arc;\n\n\/\/\/ A list of verices loaded in the graphics card's memory.\npub struct VertexBuffer<T> {\n display: Arc<super::DisplayImpl>,\n id: gl::types::GLuint,\n elements_size: uint,\n bindings: VertexBindings,\n elements_count: uint,\n}\n\n\/\/\/ This public function is accessible from within `glium` but not for the user.\npub fn get_clone<T>(vb: &VertexBuffer<T>) -> (gl::types::GLuint, uint, VertexBindings) {\n (vb.id.clone(), vb.elements_size.clone(), vb.bindings.clone())\n}\n\nimpl<T: VertexFormat + 'static + Send> VertexBuffer<T> {\n \/\/\/ Builds a new vertex buffer.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)]\n \/\/\/ # extern crate glium_macros;\n \/\/\/ # extern crate glium;\n \/\/\/ # fn main() {\n \/\/\/ #[vertex_format]\n \/\/\/ struct Vertex {\n \/\/\/ position: [f32, ..3],\n \/\/\/ texcoords: [f32, ..2],\n \/\/\/ }\n \/\/\/\n \/\/\/ # let display: glium::Display = unsafe { std::mem::uninitialized() };\n \/\/\/ let vertex_buffer = glium::VertexBuffer::new(&display, vec![\n \/\/\/ Vertex { position: [0.0, 0.0, 0.0], texcoords: [0.0, 1.0] },\n \/\/\/ Vertex { position: [5.0, -3.0, 2.0], texcoords: [1.0, 0.0] },\n \/\/\/ ]);\n \/\/\/ # }\n \/\/\/ ```\n \/\/\/ \n pub fn new(display: &super::Display, data: Vec<T>) -> VertexBuffer<T> {\n VertexBuffer::new_impl(display, data, false)\n }\n\n \/\/\/ Builds a new vertex buffer.\n \/\/\/\n \/\/\/ This function will create a buffer that has better performances when the it is modified\n \/\/\/ frequently.\n pub fn new_dynamic(display: &super::Display, data: Vec<T>) -> VertexBuffer<T> {\n VertexBuffer::new_impl(display, data, true)\n }\n\n fn new_impl(display: &super::Display, data: Vec<T>, dynamic: bool) -> VertexBuffer<T> {\n let bindings = VertexFormat::build_bindings(None::<T>);\n\n let elements_size = { use std::mem; mem::size_of::<T>() };\n let elements_count = data.len();\n let buffer_size = elements_count * elements_size as uint;\n\n let usage = if dynamic { gl::DYNAMIC_DRAW } else { gl::STATIC_DRAW };\n\n let (tx, rx) = channel();\n\n display.context.context.exec(proc(gl, state) {\n unsafe {\n let mut id: gl::types::GLuint = mem::uninitialized();\n gl.GenBuffers(1, &mut id);\n\n if gl.NamedBufferData.is_loaded() {\n gl.NamedBufferData(id, buffer_size as gl::types::GLsizei,\n data.as_ptr() as *const libc::c_void, usage);\n \n } else if gl.NamedBufferDataEXT.is_loaded() {\n gl.NamedBufferDataEXT(id, buffer_size as gl::types::GLsizeiptr,\n data.as_ptr() as *const libc::c_void, usage);\n\n } else {\n gl.BindBuffer(gl::ARRAY_BUFFER, id);\n state.array_buffer_binding = Some(id);\n gl.BufferData(gl::ARRAY_BUFFER, buffer_size as gl::types::GLsizeiptr,\n data.as_ptr() as *const libc::c_void, usage);\n }\n\n tx.send(id);\n }\n });\n\n VertexBuffer {\n display: display.context.clone(),\n id: rx.recv(),\n elements_size: elements_size,\n bindings: bindings,\n elements_count: elements_count,\n }\n }\n\n \/\/\/ Maps the buffer to allow write access to it.\n pub fn map<'a>(&'a mut self) -> Mapping<'a, T> {\n let (tx, rx) = channel();\n let id = self.id.clone();\n let elements_count = self.elements_count.clone();\n\n self.display.context.exec(proc(gl, state) {\n let ptr = {\n if gl.MapNamedBuffer.is_loaded() {\n gl.MapNamedBuffer(id, gl::READ_WRITE)\n } else {\n if state.array_buffer_binding != Some(id) {\n gl.BindBuffer(gl::ARRAY_BUFFER, id);\n state.array_buffer_binding = Some(id);\n }\n\n gl.MapBuffer(gl::ARRAY_BUFFER, gl::READ_WRITE)\n }\n };\n\n tx.send(ptr as *mut T);\n });\n\n Mapping {\n buffer: self,\n data: unsafe { CVec::new(rx.recv(), elements_count) },\n }\n }\n}\n\nimpl<T> fmt::Show for VertexBuffer<T> {\n fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::FormatError> {\n (format!(\"VertexBuffer #{}\", self.id)).fmt(formatter)\n }\n}\n\n#[unsafe_destructor]\nimpl<T> Drop for VertexBuffer<T> {\n fn drop(&mut self) {\n let id = self.id.clone();\n self.display.context.exec(proc(gl, state) {\n if state.array_buffer_binding == Some(id) {\n state.array_buffer_binding = None;\n }\n\n if state.element_array_buffer_binding == Some(id) {\n state.element_array_buffer_binding = None;\n }\n\n unsafe { gl.DeleteBuffers(1, [ id ].as_ptr()); }\n });\n }\n}\n\n\/\/\/ For each binding, the data type, number of elements, and offset.\n\/\/\/ Includes the total size.\n#[doc(hidden)]\npub type VertexBindings = HashMap<String, (gl::types::GLenum, gl::types::GLint, uint)>;\n\n\/\/\/ Trait for structures that represent a vertex.\n#[doc(hidden)]\npub trait VertexFormat: Copy {\n fn build_bindings(Option<Self>) -> VertexBindings;\n}\n\n\/\/\/ A mapping of a buffer.\npub struct Mapping<'b, T> {\n buffer: &'b mut VertexBuffer<T>,\n data: CVec<T>,\n}\n\n#[unsafe_destructor]\nimpl<'a, T> Drop for Mapping<'a, T> {\n fn drop(&mut self) {\n let id = self.buffer.id.clone();\n self.buffer.display.context.exec(proc(gl, state) {\n if gl.UnmapNamedBuffer.is_loaded() {\n gl.UnmapNamedBuffer(id);\n\n } else {\n if state.array_buffer_binding != Some(id) {\n gl.BindBuffer(gl::ARRAY_BUFFER, id);\n state.array_buffer_binding = Some(id);\n }\n\n gl.UnmapBuffer(gl::ARRAY_BUFFER);\n }\n });\n }\n}\n\nimpl<'a, T> Deref<[T]> for Mapping<'a, T> {\n fn deref<'b>(&'b self) -> &'b [T] {\n self.data.as_slice()\n }\n}\n\nimpl<'a, T> DerefMut<[T]> for Mapping<'a, T> {\n fn deref_mut<'b>(&'b mut self) -> &'b mut [T] {\n self.data.as_mut_slice()\n }\n}\n<commit_msg>Hide vertex_buffer::get_clone function<commit_after>use gl;\nuse libc;\nuse std::collections::HashMap;\nuse std::c_vec::CVec;\nuse std::fmt;\nuse std::mem;\nuse std::sync::Arc;\n\n\/\/\/ A list of verices loaded in the graphics card's memory.\npub struct VertexBuffer<T> {\n display: Arc<super::DisplayImpl>,\n id: gl::types::GLuint,\n elements_size: uint,\n bindings: VertexBindings,\n elements_count: uint,\n}\n\n\/\/\/ This public function is accessible from within `glium` but not for the user.\n#[doc(hidden)]\npub fn get_clone<T>(vb: &VertexBuffer<T>) -> (gl::types::GLuint, uint, VertexBindings) {\n (vb.id.clone(), vb.elements_size.clone(), vb.bindings.clone())\n}\n\nimpl<T: VertexFormat + 'static + Send> VertexBuffer<T> {\n \/\/\/ Builds a new vertex buffer.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)]\n \/\/\/ # extern crate glium_macros;\n \/\/\/ # extern crate glium;\n \/\/\/ # fn main() {\n \/\/\/ #[vertex_format]\n \/\/\/ struct Vertex {\n \/\/\/ position: [f32, ..3],\n \/\/\/ texcoords: [f32, ..2],\n \/\/\/ }\n \/\/\/\n \/\/\/ # let display: glium::Display = unsafe { std::mem::uninitialized() };\n \/\/\/ let vertex_buffer = glium::VertexBuffer::new(&display, vec![\n \/\/\/ Vertex { position: [0.0, 0.0, 0.0], texcoords: [0.0, 1.0] },\n \/\/\/ Vertex { position: [5.0, -3.0, 2.0], texcoords: [1.0, 0.0] },\n \/\/\/ ]);\n \/\/\/ # }\n \/\/\/ ```\n \/\/\/ \n pub fn new(display: &super::Display, data: Vec<T>) -> VertexBuffer<T> {\n VertexBuffer::new_impl(display, data, false)\n }\n\n \/\/\/ Builds a new vertex buffer.\n \/\/\/\n \/\/\/ This function will create a buffer that has better performances when the it is modified\n \/\/\/ frequently.\n pub fn new_dynamic(display: &super::Display, data: Vec<T>) -> VertexBuffer<T> {\n VertexBuffer::new_impl(display, data, true)\n }\n\n fn new_impl(display: &super::Display, data: Vec<T>, dynamic: bool) -> VertexBuffer<T> {\n let bindings = VertexFormat::build_bindings(None::<T>);\n\n let elements_size = { use std::mem; mem::size_of::<T>() };\n let elements_count = data.len();\n let buffer_size = elements_count * elements_size as uint;\n\n let usage = if dynamic { gl::DYNAMIC_DRAW } else { gl::STATIC_DRAW };\n\n let (tx, rx) = channel();\n\n display.context.context.exec(proc(gl, state) {\n unsafe {\n let mut id: gl::types::GLuint = mem::uninitialized();\n gl.GenBuffers(1, &mut id);\n\n if gl.NamedBufferData.is_loaded() {\n gl.NamedBufferData(id, buffer_size as gl::types::GLsizei,\n data.as_ptr() as *const libc::c_void, usage);\n \n } else if gl.NamedBufferDataEXT.is_loaded() {\n gl.NamedBufferDataEXT(id, buffer_size as gl::types::GLsizeiptr,\n data.as_ptr() as *const libc::c_void, usage);\n\n } else {\n gl.BindBuffer(gl::ARRAY_BUFFER, id);\n state.array_buffer_binding = Some(id);\n gl.BufferData(gl::ARRAY_BUFFER, buffer_size as gl::types::GLsizeiptr,\n data.as_ptr() as *const libc::c_void, usage);\n }\n\n tx.send(id);\n }\n });\n\n VertexBuffer {\n display: display.context.clone(),\n id: rx.recv(),\n elements_size: elements_size,\n bindings: bindings,\n elements_count: elements_count,\n }\n }\n\n \/\/\/ Maps the buffer to allow write access to it.\n pub fn map<'a>(&'a mut self) -> Mapping<'a, T> {\n let (tx, rx) = channel();\n let id = self.id.clone();\n let elements_count = self.elements_count.clone();\n\n self.display.context.exec(proc(gl, state) {\n let ptr = {\n if gl.MapNamedBuffer.is_loaded() {\n gl.MapNamedBuffer(id, gl::READ_WRITE)\n } else {\n if state.array_buffer_binding != Some(id) {\n gl.BindBuffer(gl::ARRAY_BUFFER, id);\n state.array_buffer_binding = Some(id);\n }\n\n gl.MapBuffer(gl::ARRAY_BUFFER, gl::READ_WRITE)\n }\n };\n\n tx.send(ptr as *mut T);\n });\n\n Mapping {\n buffer: self,\n data: unsafe { CVec::new(rx.recv(), elements_count) },\n }\n }\n}\n\nimpl<T> fmt::Show for VertexBuffer<T> {\n fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::FormatError> {\n (format!(\"VertexBuffer #{}\", self.id)).fmt(formatter)\n }\n}\n\n#[unsafe_destructor]\nimpl<T> Drop for VertexBuffer<T> {\n fn drop(&mut self) {\n let id = self.id.clone();\n self.display.context.exec(proc(gl, state) {\n if state.array_buffer_binding == Some(id) {\n state.array_buffer_binding = None;\n }\n\n if state.element_array_buffer_binding == Some(id) {\n state.element_array_buffer_binding = None;\n }\n\n unsafe { gl.DeleteBuffers(1, [ id ].as_ptr()); }\n });\n }\n}\n\n\/\/\/ For each binding, the data type, number of elements, and offset.\n\/\/\/ Includes the total size.\n#[doc(hidden)]\npub type VertexBindings = HashMap<String, (gl::types::GLenum, gl::types::GLint, uint)>;\n\n\/\/\/ Trait for structures that represent a vertex.\n#[doc(hidden)]\npub trait VertexFormat: Copy {\n fn build_bindings(Option<Self>) -> VertexBindings;\n}\n\n\/\/\/ A mapping of a buffer.\npub struct Mapping<'b, T> {\n buffer: &'b mut VertexBuffer<T>,\n data: CVec<T>,\n}\n\n#[unsafe_destructor]\nimpl<'a, T> Drop for Mapping<'a, T> {\n fn drop(&mut self) {\n let id = self.buffer.id.clone();\n self.buffer.display.context.exec(proc(gl, state) {\n if gl.UnmapNamedBuffer.is_loaded() {\n gl.UnmapNamedBuffer(id);\n\n } else {\n if state.array_buffer_binding != Some(id) {\n gl.BindBuffer(gl::ARRAY_BUFFER, id);\n state.array_buffer_binding = Some(id);\n }\n\n gl.UnmapBuffer(gl::ARRAY_BUFFER);\n }\n });\n }\n}\n\nimpl<'a, T> Deref<[T]> for Mapping<'a, T> {\n fn deref<'b>(&'b self) -> &'b [T] {\n self.data.as_slice()\n }\n}\n\nimpl<'a, T> DerefMut<[T]> for Mapping<'a, T> {\n fn deref_mut<'b>(&'b mut self) -> &'b mut [T] {\n self.data.as_mut_slice()\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub trait Foo<A=Self> {\n fn foo();\n}\n\npub trait Bar<X=usize, A=Self> {\n fn foo();\n}\n\nfn main() {\n let a = Foo::lol();\n \/\/~^ ERROR no associated item named\n let b = Foo::<_>::lol();\n \/\/~^ ERROR no associated item named\n let c = Bar::lol();\n \/\/~^ ERROR no associated item named\n let d = Bar::<usize, _>::lol();\n \/\/~^ ERROR no associated item named\n}\n<commit_msg>Added test for partially supplied type params in which remaining reference non-existant self<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub trait Foo<A=Self> {\n fn foo();\n}\n\npub trait Bar<X=usize, A=Self> {\n fn foo();\n}\n\nfn main() {\n let a = Foo::lol();\n \/\/~^ ERROR no associated item named\n let b = Foo::<_>::lol();\n \/\/~^ ERROR no associated item named\n let c = Bar::lol();\n \/\/~^ ERROR no associated item named\n let d = Bar::<usize, _>::lol();\n \/\/~^ ERROR no associated item named\n let e = Bar::<usize>::lol();\n \/\/~^ ERROR must be explicitly specified\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>More solutions<commit_after>\/\/ https:\/\/leetcode.cn\/problems\/encode-and-decode-strings\/\n\nstruct Codec {}\n\n\/**\n * `&self` means the method takes an immutable reference.\n * If you need a mutable reference, change it to `&mut self` instead.\n *\/\n\/\/ This solution aims at producing human-readable encoded strings.\n\/\/ A faster alternative is to encode the lengths of the original strings.\nimpl Codec {\n fn new() -> Self {\n Codec {}\n }\n\n fn encode(&self, strs: Vec<String>) -> String {\n let mut chars = Vec::new();\n for s in strs {\n for ch in s.chars() {\n match ch {\n '\\0' => {\n chars.push('\\\\');\n chars.push('0');\n }\n ';' => {\n chars.push('\\\\');\n chars.push(';');\n }\n '\\\\' => {\n chars.push('\\\\');\n chars.push('\\\\');\n }\n _ => {\n chars.push(ch);\n }\n }\n }\n chars.push(';');\n }\n\n chars.into_iter().collect()\n }\n\n fn decode(&self, s: String) -> Vec<String> {\n let mut result: Vec<String> = Vec::new();\n let mut escape = false;\n let mut chars = Vec::new();\n for ch in s.chars() {\n match ch {\n '\\\\' => {\n if escape {\n chars.push('\\\\');\n }\n escape = !escape;\n }\n ';' => {\n if escape {\n chars.push(';');\n escape = false;\n } else {\n result.push(chars.into_iter().collect());\n chars = Vec::new();\n }\n }\n '0' => {\n if escape {\n chars.push('\\0');\n escape = false;\n } else {\n chars.push('0');\n }\n }\n _ => {\n escape = false;\n chars.push(ch);\n }\n }\n }\n if !chars.is_empty() {\n result.push(chars.into_iter().collect());\n }\n return result;\n }\n}\n\n\/**\n * Your Codec object will be instantiated and called as such:\n * let obj = Codec::new();\n * let s: String = obj.encode(strs);\n * let ans: VecVec<String> = obj.decode(s);\n *\/\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn test_encode_1() {\n let strs = vec![\"hello;world\", \"foo;bar\\0\"]\n .into_iter()\n .map(|s| s.to_string())\n .collect();\n let codec = Codec::new();\n assert_eq!(\n codec.encode(strs),\n \"hello\\\\;world;foo\\\\;bar\\\\0;\".to_string()\n );\n }\n\n #[test]\n fn test_decode_1() {\n let strs: Vec<String> = vec![\"hello;world\", \"foo;bar\\0\"]\n .into_iter()\n .map(|s| s.to_string())\n .collect();\n let codec = Codec::new();\n assert_eq!(codec.decode(codec.encode(strs.clone())), strs);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #79514 - Julian-Wollersberger:order-dependent-bounds, r=Mark-Simulacrum<commit_after>\/\/ check-pass\n\n\/\/ From https:\/\/github.com\/rust-lang\/rust\/issues\/54121\/\n\/\/\n\/\/ Whether the code compiled depended on the order of the trait bounds in\n\/\/ `type T: Tr<u8, u8> + Tr<u16, u16>`\n\/\/ But both should compile as order shouldn't matter.\n\ntrait Tr<A, B> {\n fn exec(a: A, b: B);\n}\n\ntrait P {\n \/\/ This compiled successfully\n type T: Tr<u16, u16> + Tr<u8, u8>;\n}\n\ntrait Q {\n \/\/ This didn't compile\n type T: Tr<u8, u8> + Tr<u16, u16>;\n}\n\n#[allow(dead_code)]\nfn f<S: P>() {\n <S as P>::T::exec(0u8, 0u8)\n}\n\n#[allow(dead_code)]\nfn g<S: Q>() {\n \/\/ A mismatched types error was emitted on this line.\n <S as Q>::T::exec(0u8, 0u8)\n}\n\n\/\/ Another reproduction of the same issue\ntrait Trait {\n type Type: Into<Self::Type1> + Into<Self::Type2> + Copy;\n type Type1;\n type Type2;\n}\n\n#[allow(dead_code)]\nfn foo<T: Trait>(x: T::Type) {\n let _1: T::Type1 = x.into();\n let _2: T::Type2 = x.into();\n}\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before>use core::slice;\nuse core::str;\nuse collections::range::RangeArgument;\nuse core::ops::Range;\nuse core::cmp::{max, min};\n\n\/\/\/ Bounded slice abstraction\n\/\/\/\n\/\/\/ # Code Migration\n\/\/\/\n\/\/\/ `foo[a..b]` => `foo.get_slice(a..b)`\n\/\/\/\n\/\/\/ `foo[a..]` => `foo.get_slice(a..)`\n\/\/\/\n\/\/\/ `foo[..b]` => `foo.get_slice(..b)`\n\/\/\/\npub trait GetSlice {\n fn get_slice<T: RangeArgument<usize>>(&self, a: T) -> &Self;\n fn get_slice_mut<T: RangeArgument<usize>>(&mut self, a: T) -> &mut Self;\n}\n\nfn bound<T: RangeArgument<usize>>(len: usize, a: T) -> Range<usize> {\n let start = min(a.start().map(|&x| x).unwrap_or(0), len);\n let end = min(a.end().map(|&x| x).unwrap_or(len), len);\n\n if start <= end {\n start..end\n } else {\n 0..0\n }\n}\n\nimpl GetSlice for str {\n fn get_slice<T: RangeArgument<usize>>(&self, a: T) -> &Self {\n &self[bound(self.len(), a)]\n }\n\n fn get_slice_mut<T: RangeArgument<usize>>(&mut self, a: T) -> &mut Self {\n let len = self.len();\n &mut self[bound(len, a)]\n }\n}\n\nimpl<T> GetSlice for [T] {\n fn get_slice<U: RangeArgument<usize>>(&self, a: U) -> &Self {\n &self[bound(self.len(), a)]\n }\n\n fn get_slice_mut<U: RangeArgument<usize>>(&mut self, a: U) -> &mut Self {\n let len = self.len();\n &mut self[bound(len, a)]\n }\n}\n<commit_msg>Cleanup get_slice.rs module<commit_after>use collections::range::RangeArgument;\nuse core::ops::Range;\nuse core::cmp;\n\n\/\/\/ Bounded slice abstraction\n\/\/\/\n\/\/\/ # Code Migration\n\/\/\/\n\/\/\/ `foo[a..b]` => `foo.get_slice(a..b)`\n\/\/\/\n\/\/\/ `foo[a..]` => `foo.get_slice(a..)`\n\/\/\/\n\/\/\/ `foo[..b]` => `foo.get_slice(..b)`\n\/\/\/\npub trait GetSlice {\n fn get_slice<T: RangeArgument<usize>>(&self, a: T) -> &Self;\n fn get_slice_mut<T: RangeArgument<usize>>(&mut self, a: T) -> &mut Self;\n}\n\nfn bound<T: RangeArgument<usize>>(len: usize, a: T) -> Range<usize> {\n let start = cmp::min(a.start().map(|&x| x).unwrap_or(0), len);\n let end = cmp::min(a.end().map(|&x| x).unwrap_or(len), len);\n\n if start <= end {\n start..end\n } else {\n 0..0\n }\n}\n\nimpl GetSlice for str {\n fn get_slice<T: RangeArgument<usize>>(&self, a: T) -> &Self {\n &self[bound(self.len(), a)]\n }\n\n fn get_slice_mut<T: RangeArgument<usize>>(&mut self, a: T) -> &mut Self {\n let len = self.len();\n &mut self[bound(len, a)]\n }\n}\n\nimpl<T> GetSlice for [T] {\n fn get_slice<U: RangeArgument<usize>>(&self, a: U) -> &Self {\n &self[bound(self.len(), a)]\n }\n\n fn get_slice_mut<U: RangeArgument<usize>>(&mut self, a: U) -> &mut Self {\n let len = self.len();\n &mut self[bound(len, a)]\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: import anyhow::Error where it is needed<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! An engine for handling edits (possibly from async sources) and undo. This\n\/\/! module actually implements a mini Conflict-free Replicated Data Type, but\n\/\/! is considerably simpler than the usual CRDT implementation techniques,\n\/\/! because all operations are serialized in this central engine.\n\nuse std::borrow::Cow;\nuse std::collections::BTreeSet;\nuse std;\n\nuse rope::{Rope, RopeInfo};\nuse subset::Subset;\nuse delta::Delta;\n\npub struct Engine {\n rev_id_counter: usize,\n union_str: Rope,\n revs: Vec<Revision>,\n}\n\nstruct Revision {\n rev_id: usize,\n from_union: Subset,\n union_str_len: usize,\n edit: Contents,\n}\n\nuse self::Contents::*;\n\nenum Contents {\n Edit {\n priority: usize,\n undo_group: usize,\n inserts: Subset,\n deletes: Subset,\n },\n Undo {\n groups: BTreeSet<usize>, \/\/ set of undo_group id's\n }\n}\n\nimpl Engine {\n pub fn new(initial_contents: Rope) -> Engine {\n let rev = Revision {\n rev_id: 0,\n from_union: Subset::default(),\n union_str_len: initial_contents.len(),\n edit: Undo { groups: BTreeSet::default() },\n };\n Engine {\n rev_id_counter: 1,\n union_str: initial_contents,\n revs: vec![rev],\n }\n }\n\n fn get_current_undo(&self) -> Option<&BTreeSet<usize>> {\n for rev in self.revs.iter().rev() {\n if let Undo { ref groups } = rev.edit {\n return Some(&groups);\n }\n }\n None\n }\n\n fn find_rev(&self, rev_id: usize) -> Option<usize> {\n for (i, rev) in self.revs.iter().enumerate().rev() {\n if rev.rev_id == rev_id {\n return Some(i)\n }\n }\n None\n }\n\n fn get_rev(&self, rev_index: usize) -> Rope {\n let mut from_union = Cow::Borrowed(&self.revs[rev_index].from_union);\n for rev in &self.revs[rev_index + 1..] {\n if let Edit { ref inserts, .. } = rev.edit {\n if !inserts.is_trivial() {\n from_union = Cow::Owned(from_union.transform_intersect(inserts));\n }\n }\n }\n from_union.apply(&self.union_str)\n }\n\n \/\/\/ Get revision id of head revision.\n pub fn get_head_rev_id(&self) -> usize {\n self.revs.last().unwrap().rev_id\n }\n\n \/\/\/ Get text of head revision.\n pub fn get_head(&self) -> Rope {\n self.get_rev(self.revs.len() - 1)\n }\n\n \/\/\/ A delta that, when applied to previous head, results in the current head. Panics\n \/\/\/ if there is not at least one edit.\n pub fn delta_head(&self) -> Delta<RopeInfo> {\n let mut prev_from_union = Cow::Borrowed(&self.revs[self.revs.len() - 2].from_union);\n let rev = &self.revs.last().unwrap();\n if let Edit { ref inserts, .. } = rev.edit {\n if !inserts.is_trivial() {\n prev_from_union = Cow::Owned(prev_from_union.transform_intersect(inserts));\n }\n }\n Delta::synthesize(&self.union_str, &prev_from_union, &rev.from_union)\n }\n\n fn mk_new_rev(&self, new_priority: usize, undo_group: usize,\n base_rev: usize, delta: Delta<RopeInfo>) -> (Revision, Rope) {\n let ix = self.find_rev(base_rev).expect(\"base revision not found\");\n let rev = &self.revs[ix];\n let (ins_delta, deletes) = delta.factor();\n let mut union_ins_delta = ins_delta.transform_expand(&rev.from_union, rev.union_str_len, true);\n let mut new_deletes = deletes.transform_expand(&rev.from_union);\n for r in &self.revs[ix + 1..] {\n if let Edit { priority, ref inserts, .. } = rev.edit {\n if !inserts.is_trivial() {\n let after = new_priority >= priority; \/\/ should never be ==\n union_ins_delta = union_ins_delta.transform_expand(&inserts, r.union_str_len, after);\n new_deletes = new_deletes.transform_expand(&inserts);\n }\n }\n }\n let new_inserts = union_ins_delta.invert_insert();\n let new_union_str = union_ins_delta.apply(&self.union_str);\n let undone = self.get_current_undo().map_or(false, |undos| undos.contains(&undo_group));\n let mut new_from_union = Cow::Borrowed(&rev.from_union);\n {\n let edit = if undone { &new_inserts } else { &new_deletes };\n if !edit.is_trivial() {\n new_from_union = Cow::Owned(new_from_union.intersect(edit));\n }\n }\n (Revision {\n rev_id: self.rev_id_counter,\n from_union: new_from_union.into_owned(),\n union_str_len: new_union_str.len(),\n edit: Edit {\n priority: new_priority,\n undo_group: undo_group,\n inserts: new_inserts,\n deletes: new_deletes,\n }\n }, new_union_str)\n }\n\n pub fn edit_rev(&mut self, priority: usize, undo_group: usize,\n base_rev: usize, delta: Delta<RopeInfo>) {\n let (new_rev, new_union_str) = self.mk_new_rev(priority, undo_group, base_rev, delta);\n self.rev_id_counter += 1;\n self.revs.push(new_rev);\n self.union_str = new_union_str;\n }\n\n \/\/ This computes undo all the way from the beginning. An optimization would be to not\n \/\/ recompute the prefix up to where the history diverges, but it's not clear that's\n \/\/ even worth the code complexity.\n fn compute_undo(&self, groups: BTreeSet<usize>) -> Revision {\n let mut from_union = Cow::Borrowed(&self.revs[0].from_union);\n for rev in &self.revs[1..] {\n if let Edit { ref undo_group, ref inserts, ref deletes, .. } = rev.edit {\n if groups.contains(undo_group) {\n if !inserts.is_trivial() {\n from_union = Cow::Owned(from_union.transform_intersect(inserts));\n }\n } else {\n if !inserts.is_trivial() {\n from_union = Cow::Owned(from_union.transform_expand(inserts));\n }\n if !deletes.is_trivial() {\n from_union = Cow::Owned(from_union.intersect(deletes));\n }\n }\n }\n }\n Revision {\n rev_id: self.rev_id_counter,\n from_union: from_union.into_owned(),\n union_str_len: self.union_str.len(),\n edit: Undo {\n groups: groups\n }\n }\n }\n\n pub fn undo(&mut self, groups: BTreeSet<usize>) {\n let new_rev = self.compute_undo(groups);\n self.revs.push(new_rev);\n self.rev_id_counter += 1;\n }\n\n \/\/ Note: this function would need some work to handle retaining arbitrary revisions,\n \/\/ partly because the reachability calculation would become more complicated (a\n \/\/ revision might hold content from an undo group that would otherwise be gc'ed),\n \/\/ and partly because you need to retain more undo history, to supply input to the\n \/\/ reachability calculation.\n \/\/\n \/\/ Thus, it's easiest to defer gc to when all plugins quiesce, but it's certainly\n \/\/ possible to fix it so that's not necessary.\n pub fn gc(&mut self, gc_groups: &BTreeSet<usize>) {\n let mut gc_dels = Subset::default();\n \/\/ TODO: want to let caller retain more rev_id's.\n let mut retain_revs = BTreeSet::new();\n if let Some(last) = self.revs.last() {\n retain_revs.insert(last.rev_id);\n }\n {\n let cur_undo = self.get_current_undo();\n for rev in &self.revs {\n if let Edit { ref undo_group, ref inserts, ref deletes, .. } = rev.edit {\n if !retain_revs.contains(&rev.rev_id) && gc_groups.contains(undo_group) {\n if cur_undo.map_or(false, |undos| undos.contains(undo_group)) {\n if !inserts.is_trivial() {\n gc_dels = gc_dels.transform_intersect(inserts);\n }\n } else {\n if !inserts.is_trivial() {\n gc_dels = gc_dels.transform_expand(inserts);\n }\n if !deletes.is_trivial() {\n gc_dels = gc_dels.intersect(deletes);\n }\n }\n } else {\n if !inserts.is_trivial() {\n gc_dels = gc_dels.transform_expand(inserts);\n }\n }\n }\n }\n }\n if !gc_dels.is_trivial() {\n self.union_str = gc_dels.apply(&self.union_str);\n }\n let old_revs = std::mem::replace(&mut self.revs, Vec::new());\n for rev in old_revs.into_iter().rev() {\n match rev.edit {\n Edit { priority, undo_group, inserts, deletes } => {\n let new_gc_dels = if inserts.is_trivial() {\n None\n } else {\n Some(inserts.transform_shrink(&gc_dels))\n };\n if retain_revs.contains(&rev.rev_id) || !gc_groups.contains(&undo_group) {\n let (inserts, deletes, from_union, len) = if gc_dels.is_trivial() {\n (inserts, deletes, rev.from_union, rev.union_str_len)\n } else {\n (gc_dels.transform_shrink(&inserts),\n gc_dels.transform_shrink(&deletes),\n gc_dels.transform_shrink(&rev.from_union),\n gc_dels.len(rev.union_str_len))\n };\n self.revs.push(Revision {\n rev_id: rev.rev_id,\n from_union: from_union,\n union_str_len: len,\n edit: Edit {\n priority: priority,\n undo_group: undo_group,\n inserts: inserts,\n deletes: deletes,\n }\n });\n }\n if let Some(new_gc_dels) = new_gc_dels {\n gc_dels = new_gc_dels;\n }\n }\n Undo { groups } => {\n \/\/ We're super-aggressive about dropping these; after gc, the history\n \/\/ of which undos were used to compute from_union in edits may be lost.\n if retain_revs.contains(&rev.rev_id) {\n let (from_union, len) = if gc_dels.is_trivial() {\n (rev.from_union, rev.union_str_len)\n } else {\n (gc_dels.transform_shrink(&rev.from_union),\n gc_dels.len(rev.union_str_len))\n };\n self.revs.push(Revision {\n rev_id: rev.rev_id,\n from_union: from_union,\n union_str_len: len,\n edit: Undo {\n groups: &groups - gc_groups,\n }\n })\n }\n }\n }\n }\n self.revs.reverse();\n }\n}\n<commit_msg>Fix small breakage caused by gc<commit_after>\/\/ Copyright 2016 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! An engine for handling edits (possibly from async sources) and undo. This\n\/\/! module actually implements a mini Conflict-free Replicated Data Type, but\n\/\/! is considerably simpler than the usual CRDT implementation techniques,\n\/\/! because all operations are serialized in this central engine.\n\nuse std::borrow::Cow;\nuse std::collections::BTreeSet;\nuse std;\n\nuse rope::{Rope, RopeInfo};\nuse subset::Subset;\nuse delta::Delta;\n\npub struct Engine {\n rev_id_counter: usize,\n union_str: Rope,\n revs: Vec<Revision>,\n}\n\nstruct Revision {\n rev_id: usize,\n from_union: Subset,\n union_str_len: usize,\n edit: Contents,\n}\n\nuse self::Contents::*;\n\nenum Contents {\n Edit {\n priority: usize,\n undo_group: usize,\n inserts: Subset,\n deletes: Subset,\n },\n Undo {\n groups: BTreeSet<usize>, \/\/ set of undo_group id's\n }\n}\n\nimpl Engine {\n pub fn new(initial_contents: Rope) -> Engine {\n let rev = Revision {\n rev_id: 0,\n from_union: Subset::default(),\n union_str_len: initial_contents.len(),\n edit: Undo { groups: BTreeSet::default() },\n };\n Engine {\n rev_id_counter: 1,\n union_str: initial_contents,\n revs: vec![rev],\n }\n }\n\n fn get_current_undo(&self) -> Option<&BTreeSet<usize>> {\n for rev in self.revs.iter().rev() {\n if let Undo { ref groups } = rev.edit {\n return Some(&groups);\n }\n }\n None\n }\n\n fn find_rev(&self, rev_id: usize) -> Option<usize> {\n for (i, rev) in self.revs.iter().enumerate().rev() {\n if rev.rev_id == rev_id {\n return Some(i)\n }\n }\n None\n }\n\n fn get_rev(&self, rev_index: usize) -> Rope {\n let mut from_union = Cow::Borrowed(&self.revs[rev_index].from_union);\n for rev in &self.revs[rev_index + 1..] {\n if let Edit { ref inserts, .. } = rev.edit {\n if !inserts.is_trivial() {\n from_union = Cow::Owned(from_union.transform_intersect(inserts));\n }\n }\n }\n from_union.apply(&self.union_str)\n }\n\n \/\/\/ Get revision id of head revision.\n pub fn get_head_rev_id(&self) -> usize {\n self.revs.last().unwrap().rev_id\n }\n\n \/\/\/ Get text of head revision.\n pub fn get_head(&self) -> Rope {\n self.get_rev(self.revs.len() - 1)\n }\n\n \/\/\/ A delta that, when applied to previous head, results in the current head. Panics\n \/\/\/ if there is not at least one edit.\n pub fn delta_head(&self) -> Delta<RopeInfo> {\n let mut prev_from_union = Cow::Borrowed(&self.revs[self.revs.len() - 2].from_union);\n let rev = &self.revs.last().unwrap();\n if let Edit { ref inserts, .. } = rev.edit {\n if !inserts.is_trivial() {\n prev_from_union = Cow::Owned(prev_from_union.transform_intersect(inserts));\n }\n }\n Delta::synthesize(&self.union_str, &prev_from_union, &rev.from_union)\n }\n\n fn mk_new_rev(&self, new_priority: usize, undo_group: usize,\n base_rev: usize, delta: Delta<RopeInfo>) -> (Revision, Rope) {\n let ix = self.find_rev(base_rev).expect(\"base revision not found\");\n let rev = &self.revs[ix];\n let (ins_delta, deletes) = delta.factor();\n let mut union_ins_delta = ins_delta.transform_expand(&rev.from_union, rev.union_str_len, true);\n let mut new_deletes = deletes.transform_expand(&rev.from_union);\n for r in &self.revs[ix + 1..] {\n if let Edit { priority, ref inserts, .. } = rev.edit {\n if !inserts.is_trivial() {\n let after = new_priority >= priority; \/\/ should never be ==\n union_ins_delta = union_ins_delta.transform_expand(&inserts, r.union_str_len, after);\n new_deletes = new_deletes.transform_expand(&inserts);\n }\n }\n }\n let new_inserts = union_ins_delta.invert_insert();\n let new_union_str = union_ins_delta.apply(&self.union_str);\n let undone = self.get_current_undo().map_or(false, |undos| undos.contains(&undo_group));\n let mut new_from_union = Cow::Borrowed(&rev.from_union);\n {\n let edit = if undone { &new_inserts } else { &new_deletes };\n if !edit.is_trivial() {\n new_from_union = Cow::Owned(new_from_union.intersect(edit));\n }\n }\n (Revision {\n rev_id: self.rev_id_counter,\n from_union: new_from_union.into_owned(),\n union_str_len: new_union_str.len(),\n edit: Edit {\n priority: new_priority,\n undo_group: undo_group,\n inserts: new_inserts,\n deletes: new_deletes,\n }\n }, new_union_str)\n }\n\n pub fn edit_rev(&mut self, priority: usize, undo_group: usize,\n base_rev: usize, delta: Delta<RopeInfo>) {\n let (new_rev, new_union_str) = self.mk_new_rev(priority, undo_group, base_rev, delta);\n self.rev_id_counter += 1;\n self.revs.push(new_rev);\n self.union_str = new_union_str;\n }\n\n \/\/ This computes undo all the way from the beginning. An optimization would be to not\n \/\/ recompute the prefix up to where the history diverges, but it's not clear that's\n \/\/ even worth the code complexity.\n fn compute_undo(&self, groups: BTreeSet<usize>) -> Revision {\n let mut from_union = Subset::default();\n for rev in &self.revs {\n if let Edit { ref undo_group, ref inserts, ref deletes, .. } = rev.edit {\n if groups.contains(undo_group) {\n if !inserts.is_trivial() {\n from_union = from_union.transform_intersect(inserts);\n }\n } else {\n if !inserts.is_trivial() {\n from_union = from_union.transform_expand(inserts);\n }\n if !deletes.is_trivial() {\n from_union = from_union.intersect(deletes);\n }\n }\n }\n }\n Revision {\n rev_id: self.rev_id_counter,\n from_union: from_union,\n union_str_len: self.union_str.len(),\n edit: Undo {\n groups: groups\n }\n }\n }\n\n pub fn undo(&mut self, groups: BTreeSet<usize>) {\n let new_rev = self.compute_undo(groups);\n self.revs.push(new_rev);\n self.rev_id_counter += 1;\n }\n\n \/\/ Note: this function would need some work to handle retaining arbitrary revisions,\n \/\/ partly because the reachability calculation would become more complicated (a\n \/\/ revision might hold content from an undo group that would otherwise be gc'ed),\n \/\/ and partly because you need to retain more undo history, to supply input to the\n \/\/ reachability calculation.\n \/\/\n \/\/ Thus, it's easiest to defer gc to when all plugins quiesce, but it's certainly\n \/\/ possible to fix it so that's not necessary.\n pub fn gc(&mut self, gc_groups: &BTreeSet<usize>) {\n let mut gc_dels = Subset::default();\n \/\/ TODO: want to let caller retain more rev_id's.\n let mut retain_revs = BTreeSet::new();\n if let Some(last) = self.revs.last() {\n retain_revs.insert(last.rev_id);\n }\n {\n let cur_undo = self.get_current_undo();\n for rev in &self.revs {\n if let Edit { ref undo_group, ref inserts, ref deletes, .. } = rev.edit {\n if !retain_revs.contains(&rev.rev_id) && gc_groups.contains(undo_group) {\n if cur_undo.map_or(false, |undos| undos.contains(undo_group)) {\n if !inserts.is_trivial() {\n gc_dels = gc_dels.transform_intersect(inserts);\n }\n } else {\n if !inserts.is_trivial() {\n gc_dels = gc_dels.transform_expand(inserts);\n }\n if !deletes.is_trivial() {\n gc_dels = gc_dels.intersect(deletes);\n }\n }\n } else {\n if !inserts.is_trivial() {\n gc_dels = gc_dels.transform_expand(inserts);\n }\n }\n }\n }\n }\n if !gc_dels.is_trivial() {\n self.union_str = gc_dels.apply(&self.union_str);\n }\n let old_revs = std::mem::replace(&mut self.revs, Vec::new());\n for rev in old_revs.into_iter().rev() {\n match rev.edit {\n Edit { priority, undo_group, inserts, deletes } => {\n let new_gc_dels = if inserts.is_trivial() {\n None\n } else {\n Some(inserts.transform_shrink(&gc_dels))\n };\n if retain_revs.contains(&rev.rev_id) || !gc_groups.contains(&undo_group) {\n let (inserts, deletes, from_union, len) = if gc_dels.is_trivial() {\n (inserts, deletes, rev.from_union, rev.union_str_len)\n } else {\n (gc_dels.transform_shrink(&inserts),\n gc_dels.transform_shrink(&deletes),\n gc_dels.transform_shrink(&rev.from_union),\n gc_dels.len(rev.union_str_len))\n };\n self.revs.push(Revision {\n rev_id: rev.rev_id,\n from_union: from_union,\n union_str_len: len,\n edit: Edit {\n priority: priority,\n undo_group: undo_group,\n inserts: inserts,\n deletes: deletes,\n }\n });\n }\n if let Some(new_gc_dels) = new_gc_dels {\n gc_dels = new_gc_dels;\n }\n }\n Undo { groups } => {\n \/\/ We're super-aggressive about dropping these; after gc, the history\n \/\/ of which undos were used to compute from_union in edits may be lost.\n if retain_revs.contains(&rev.rev_id) {\n let (from_union, len) = if gc_dels.is_trivial() {\n (rev.from_union, rev.union_str_len)\n } else {\n (gc_dels.transform_shrink(&rev.from_union),\n gc_dels.len(rev.union_str_len))\n };\n self.revs.push(Revision {\n rev_id: rev.rev_id,\n from_union: from_union,\n union_str_len: len,\n edit: Undo {\n groups: &groups - gc_groups,\n }\n })\n }\n }\n }\n }\n self.revs.reverse();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>mark `f32` and `f64` as requiring libc for now<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>feat(tmux\/window): Store the layout of the window<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! BookmarkCollection module\n\/\/!\n\/\/! A BookmarkCollection is nothing more than a simple store entry. One can simply call functions\n\/\/! from the libimagentrylink::external::ExternalLinker trait on this to generate external links.\n\/\/!\n\/\/! The BookmarkCollection type offers helper functions to get all links or such things.\nuse std::ops::Deref;\nuse std::ops::DerefMut;\n\nuse error::BookmarkError as BE;\nuse error::BookmarkErrorKind as BEK;\nuse error::MapErrInto;\nuse result::Result;\nuse module_path::ModuleEntryPath;\n\nuse libimagstore::store::Store;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagstore::store::FileLockEntry;\nuse libimagentrylink::external::ExternalLinker;\nuse libimagentrylink::internal::InternalLinker;\nuse libimagentrylink::internal::Link;\nuse url::Url;\n\npub struct BookmarkCollection<'a> {\n fle: FileLockEntry<'a>,\n store: &'a Store,\n}\n\n\/\/\/ {Internal, External}Linker is implemented as Deref is implemented\nimpl<'a> Deref for BookmarkCollection<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.fle\n }\n\n}\n\nimpl<'a> DerefMut for BookmarkCollection<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.fle\n }\n\n}\n\nimpl<'a> BookmarkCollection<'a> {\n\n pub fn new(store: &'a Store, name: &str) -> Result<BookmarkCollection<'a>> {\n let id = ModuleEntryPath::new(name).into_storeid();\n store.create(id)\n .map(|fle| {\n BookmarkCollection {\n fle: fle,\n store: store,\n }\n })\n .map_err_into(BEK::StoreReadError)\n }\n\n pub fn open(store: &Store, name: &str) -> Result<BookmarkCollection<'a>> {\n unimplemented!()\n }\n\n pub fn delete(store: &Store, name: &str) -> Result<()> {\n unimplemented!()\n }\n\n pub fn links(&self) -> Result<Vec<Url>> {\n self.fle.get_external_links(&self.store).map_err_into(BEK::LinkError)\n }\n\n pub fn link_entries(&self) -> Result<Vec<Link>> {\n use libimagentrylink::external::is_external_link_storeid;\n\n self.fle\n .get_internal_links()\n .map(|v| v.into_iter().filter(|id| is_external_link_storeid(id)).collect())\n .map_err_into(BEK::StoreReadError)\n }\n\n}\n\n<commit_msg>Impl BookmarkCollection::get()<commit_after>\/\/! BookmarkCollection module\n\/\/!\n\/\/! A BookmarkCollection is nothing more than a simple store entry. One can simply call functions\n\/\/! from the libimagentrylink::external::ExternalLinker trait on this to generate external links.\n\/\/!\n\/\/! The BookmarkCollection type offers helper functions to get all links or such things.\nuse std::ops::Deref;\nuse std::ops::DerefMut;\n\nuse error::BookmarkError as BE;\nuse error::BookmarkErrorKind as BEK;\nuse error::MapErrInto;\nuse result::Result;\nuse module_path::ModuleEntryPath;\n\nuse libimagstore::store::Store;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagstore::store::FileLockEntry;\nuse libimagentrylink::external::ExternalLinker;\nuse libimagentrylink::internal::InternalLinker;\nuse libimagentrylink::internal::Link;\nuse libimagerror::into::IntoError;\nuse url::Url;\n\npub struct BookmarkCollection<'a> {\n fle: FileLockEntry<'a>,\n store: &'a Store,\n}\n\n\/\/\/ {Internal, External}Linker is implemented as Deref is implemented\nimpl<'a> Deref for BookmarkCollection<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.fle\n }\n\n}\n\nimpl<'a> DerefMut for BookmarkCollection<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.fle\n }\n\n}\n\nimpl<'a> BookmarkCollection<'a> {\n\n pub fn new(store: &'a Store, name: &str) -> Result<BookmarkCollection<'a>> {\n let id = ModuleEntryPath::new(name).into_storeid();\n store.create(id)\n .map(|fle| {\n BookmarkCollection {\n fle: fle,\n store: store,\n }\n })\n .map_err_into(BEK::StoreReadError)\n }\n\n pub fn get(store: &'a Store, name: &str) -> Result<BookmarkCollection<'a>> {\n let id = ModuleEntryPath::new(name).into_storeid();\n store.get(id)\n .map_err_into(BEK::StoreReadError)\n .and_then(|fle| {\n match fle {\n None => Err(BEK::CollectionNotFound.into_error()),\n Some(e) => Ok(BookmarkCollection {\n fle: e,\n store: store,\n }),\n }\n })\n }\n\n pub fn delete(store: &Store, name: &str) -> Result<()> {\n unimplemented!()\n }\n\n pub fn links(&self) -> Result<Vec<Url>> {\n self.fle.get_external_links(&self.store).map_err_into(BEK::LinkError)\n }\n\n pub fn link_entries(&self) -> Result<Vec<Link>> {\n use libimagentrylink::external::is_external_link_storeid;\n\n self.fle\n .get_internal_links()\n .map(|v| v.into_iter().filter(|id| is_external_link_storeid(id)).collect())\n .map_err_into(BEK::StoreReadError)\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Example of how to emulate Ruby or Python reference counting<commit_after>use std::rc::Rc;\n\nfn main() {\n let s: Rc<String> = Rc::new(\"shiratake\".to_string());\n let t: Rc<String> = s.clone();\n let u: Rc<String> = s.clone();\n\n \/\/ In this configuration strong ref count has 3\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This should fail a normal compile due to non_camel_case_types,\n\/\/ It should pass a doc-compile as it only needs to type-check and\n\/\/ therefore should not concern itself with the lints.\n#[deny(warnings)]\n\n\/\/ @has cap_lints\/struct.foo.html \/\/pre '#[must_use]'\n#[must_use]\npub struct foo {\n field: i32,\n}\n<commit_msg>Fix rustdoc test failure<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This should fail a normal compile due to non_camel_case_types,\n\/\/ It should pass a doc-compile as it only needs to type-check and\n\/\/ therefore should not concern itself with the lints.\n#[deny(warnings)]\n\n\/\/ @has cap_lints\/struct.Foo.html \/\/pre '#[must_use]'\n#[must_use]\npub struct Foo {\n field: i32,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Solutions<commit_after>\/\/ https:\/\/leetcode.com\/problems\/product-of-array-except-self\/\n\npub struct Solution;\n\nimpl Solution {\n \/\/ 以长度为6的数组,比如要计算index为2的值,其值为arr[0] * arr[1] * arr[3] * arr[4] * arr[5]\n \/\/ 亦即其前缀数组的积乘以后缀数组的积。\n \/\/ 想象我们可以创建两个额外数组,其中一个存储所有到当前位置的前缀之积,另一个存储后缀之积。\n \/\/ 创建前缀数组时,我们只需从前到后遍历一次;创建后缀数组时,只需反向遍历一次。\n \/\/ 此题的挑战项目为:使用O(1)空间(不包含返回结果)\n \/\/ 那么我们可以将前缀数组与后缀数组直接融合到结果数组上,即:\n \/\/ 首先从前到后遍历一次,使用前缀之积填充结果,再从后到前遍历一次,在之前结果的基础上乘以后缀之积,\n \/\/ 注意偏移即可。\n pub fn product_except_self(nums: Vec<i32>) -> Vec<i32> {\n let mut result = vec![1; nums.len()];\n \/\/ 使用至当前位置的前缀之积填充结果数组\n let mut prefix_product = 1;\n for idx in 0..nums.len() - 1 {\n prefix_product *= nums[idx];\n result[idx + 1] *= prefix_product;\n }\n let mut postfix_product = 1;\n \/\/ 使用至当前位置的后缀之积更新结果数组\n for idx in (1..nums.len()).rev() {\n postfix_product *= nums[idx];\n result[idx - 1] *= postfix_product;\n }\n return result;\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn test_1() {\n assert_eq!(\n Solution::product_except_self(vec![1, 2, 3, 4]),\n vec![24, 12, 8, 6]\n );\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>skeleton check module level assembly<commit_after>\/\/ Test LVI load hardening on SGX module level assembly code\n\n\/\/ assembly-output: emit-asm\n\/\/ compile-flags: --crate-type staticlib\n\/\/ only-x86_64-fortanix-unknown-sgx\n\n#![feature(global_asm)]\n\nglobal_asm!(\".start_module_asm:\n movq (%rdi), %rax\n retq\n .end_module_asm:\" );\n\n\/\/ CHECK: .start_module_asm\n\/\/ TODO add check, when module-level pass is corrected\n\/\/ CHECK: .end_module_asm\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Sanity checking performed by rustbuild before actually executing anything.\n\/\/!\n\/\/! This module contains the implementation of ensuring that the build\n\/\/! environment looks reasonable before progressing. This will verify that\n\/\/! various programs like git and python exist, along with ensuring that all C\n\/\/! compilers for cross-compiling are found.\n\/\/!\n\/\/! In theory if we get past this phase it's a bug if a build fails, but in\n\/\/! practice that's likely not true!\n\nuse std::collections::HashMap;\nuse std::env;\nuse std::ffi::{OsString, OsStr};\nuse std::fs;\nuse std::process::Command;\nuse std::path::PathBuf;\n\nuse build_helper::output;\n\nuse Build;\n\nstruct Finder {\n cache: HashMap<OsString, Option<PathBuf>>,\n path: OsString,\n}\n\nimpl Finder {\n fn new() -> Self {\n Self {\n cache: HashMap::new(),\n path: env::var_os(\"PATH\").unwrap_or_default()\n }\n }\n\n fn maybe_have<S: AsRef<OsStr>>(&mut self, cmd: S) -> Option<PathBuf> {\n let cmd: OsString = cmd.as_ref().into();\n let path = self.path.clone();\n self.cache.entry(cmd.clone()).or_insert_with(|| {\n for path in env::split_paths(&path) {\n let target = path.join(&cmd);\n let mut cmd_alt = cmd.clone();\n cmd_alt.push(\".exe\");\n if target.is_file() || \/\/ some\/path\/git\n target.with_extension(\"exe\").exists() || \/\/ some\/path\/git.exe\n target.join(&cmd_alt).exists() { \/\/ some\/path\/git\/git.exe\n return Some(target);\n }\n }\n None\n }).clone()\n }\n\n fn must_have<S: AsRef<OsStr>>(&mut self, cmd: S) -> PathBuf {\n self.maybe_have(&cmd).unwrap_or_else(|| {\n panic!(\"\\n\\ncouldn't find required command: {:?}\\n\\n\", cmd.as_ref());\n })\n }\n}\n\npub fn check(build: &mut Build) {\n let path = env::var_os(\"PATH\").unwrap_or_default();\n \/\/ On Windows, quotes are invalid characters for filename paths, and if\n \/\/ one is present as part of the PATH then that can lead to the system\n \/\/ being unable to identify the files properly. See\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/34959 for more details.\n if cfg!(windows) && path.to_string_lossy().contains(\"\\\"\") {\n panic!(\"PATH contains invalid character '\\\"'\");\n }\n\n let mut cmd_finder = Finder::new();\n \/\/ If we've got a git directory we're gona need git to update\n \/\/ submodules and learn about various other aspects.\n if build.rust_info.is_git() {\n cmd_finder.must_have(\"git\");\n }\n\n \/\/ We need cmake, but only if we're actually building LLVM or sanitizers.\n let building_llvm = build.hosts.iter()\n .filter_map(|host| build.config.target_config.get(host))\n .any(|config| config.llvm_config.is_none());\n if building_llvm || build.config.sanitizers {\n cmd_finder.must_have(\"cmake\");\n }\n\n \/\/ Ninja is currently only used for LLVM itself.\n \/\/ Some Linux distros rename `ninja` to `ninja-build`.\n \/\/ CMake can work with either binary name.\n if building_llvm && build.config.ninja && cmd_finder.maybe_have(\"ninja-build\").is_none() {\n cmd_finder.must_have(\"ninja\");\n }\n\n build.config.python = build.config.python.take().map(|p| cmd_finder.must_have(p))\n .or_else(|| env::var_os(\"BOOTSTRAP_PYTHON\").map(PathBuf::from)) \/\/ set by bootstrap.py\n .or_else(|| cmd_finder.maybe_have(\"python2.7\"))\n .or_else(|| cmd_finder.maybe_have(\"python2\"))\n .or_else(|| Some(cmd_finder.must_have(\"python\")));\n\n build.config.nodejs = build.config.nodejs.take().map(|p| cmd_finder.must_have(p))\n .or_else(|| cmd_finder.maybe_have(\"node\"))\n .or_else(|| cmd_finder.maybe_have(\"nodejs\"));\n\n build.config.gdb = build.config.gdb.take().map(|p| cmd_finder.must_have(p))\n .or_else(|| cmd_finder.maybe_have(\"gdb\"));\n\n \/\/ We're gonna build some custom C code here and there, host triples\n \/\/ also build some C++ shims for LLVM so we need a C++ compiler.\n for target in &build.targets {\n \/\/ On emscripten we don't actually need the C compiler to just\n \/\/ build the target artifacts, only for testing. For the sake\n \/\/ of easier bot configuration, just skip detection.\n if target.contains(\"emscripten\") {\n continue;\n }\n\n cmd_finder.must_have(build.cc(*target));\n if let Some(ar) = build.ar(*target) {\n cmd_finder.must_have(ar);\n }\n }\n\n for host in &build.hosts {\n cmd_finder.must_have(build.cxx(*host).unwrap());\n\n \/\/ The msvc hosts don't use jemalloc, turn it off globally to\n \/\/ avoid packaging the dummy liballoc_jemalloc on that platform.\n if host.contains(\"msvc\") {\n build.config.use_jemalloc = false;\n }\n }\n\n \/\/ Externally configured LLVM requires FileCheck to exist\n let filecheck = build.llvm_filecheck(build.build);\n if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {\n panic!(\"FileCheck executable {:?} does not exist\", filecheck);\n }\n\n for target in &build.targets {\n \/\/ Can't compile for iOS unless we're on macOS\n if target.contains(\"apple-ios\") &&\n !build.build.contains(\"apple-darwin\") {\n panic!(\"the iOS target is only supported on macOS\");\n }\n\n \/\/ Make sure musl-root is valid\n if target.contains(\"musl\") && !target.contains(\"mips\") {\n \/\/ If this is a native target (host is also musl) and no musl-root is given,\n \/\/ fall back to the system toolchain in \/usr before giving up\n if build.musl_root(*target).is_none() && build.config.build == *target {\n let target = build.config.target_config.entry(target.clone())\n .or_insert(Default::default());\n target.musl_root = Some(\"\/usr\".into());\n }\n match build.musl_root(*target) {\n Some(root) => {\n if fs::metadata(root.join(\"lib\/libc.a\")).is_err() {\n panic!(\"couldn't find libc.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n if fs::metadata(root.join(\"lib\/libunwind.a\")).is_err() {\n panic!(\"couldn't find libunwind.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n }\n None => {\n panic!(\"when targeting MUSL either the rust.musl-root \\\n option or the target.$TARGET.musl-root option must \\\n be specified in config.toml\")\n }\n }\n }\n\n if target.contains(\"msvc\") {\n \/\/ There are three builds of cmake on windows: MSVC, MinGW, and\n \/\/ Cygwin. The Cygwin build does not have generators for Visual\n \/\/ Studio, so detect that here and error.\n let out = output(Command::new(\"cmake\").arg(\"--help\"));\n if !out.contains(\"Visual Studio\") {\n panic!(\"\ncmake does not support Visual Studio generators.\n\nThis is likely due to it being an msys\/cygwin build of cmake,\nrather than the required windows version, built using MinGW\nor Visual Studio.\n\nIf you are building under msys2 try installing the mingw-w64-x86_64-cmake\npackage instead of cmake:\n\n$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake\n\");\n }\n }\n }\n\n let run = |cmd: &mut Command| {\n cmd.output().map(|output| {\n String::from_utf8_lossy(&output.stdout)\n .lines().next().unwrap()\n .to_string()\n })\n };\n build.lldb_version = run(Command::new(\"lldb\").arg(\"--version\")).ok();\n if build.lldb_version.is_some() {\n build.lldb_python_dir = run(Command::new(\"lldb\").arg(\"-P\")).ok();\n }\n\n if let Some(ref s) = build.config.ccache {\n cmd_finder.must_have(s);\n }\n}\n<commit_msg>Auto merge of #44084 - alexcrichton:msvc-ninja, r=Mark-Simulacrum<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Sanity checking performed by rustbuild before actually executing anything.\n\/\/!\n\/\/! This module contains the implementation of ensuring that the build\n\/\/! environment looks reasonable before progressing. This will verify that\n\/\/! various programs like git and python exist, along with ensuring that all C\n\/\/! compilers for cross-compiling are found.\n\/\/!\n\/\/! In theory if we get past this phase it's a bug if a build fails, but in\n\/\/! practice that's likely not true!\n\nuse std::collections::HashMap;\nuse std::env;\nuse std::ffi::{OsString, OsStr};\nuse std::fs;\nuse std::process::Command;\nuse std::path::PathBuf;\n\nuse build_helper::output;\n\nuse Build;\n\nstruct Finder {\n cache: HashMap<OsString, Option<PathBuf>>,\n path: OsString,\n}\n\nimpl Finder {\n fn new() -> Self {\n Self {\n cache: HashMap::new(),\n path: env::var_os(\"PATH\").unwrap_or_default()\n }\n }\n\n fn maybe_have<S: AsRef<OsStr>>(&mut self, cmd: S) -> Option<PathBuf> {\n let cmd: OsString = cmd.as_ref().into();\n let path = self.path.clone();\n self.cache.entry(cmd.clone()).or_insert_with(|| {\n for path in env::split_paths(&path) {\n let target = path.join(&cmd);\n let mut cmd_alt = cmd.clone();\n cmd_alt.push(\".exe\");\n if target.is_file() || \/\/ some\/path\/git\n target.with_extension(\"exe\").exists() || \/\/ some\/path\/git.exe\n target.join(&cmd_alt).exists() { \/\/ some\/path\/git\/git.exe\n return Some(target);\n }\n }\n None\n }).clone()\n }\n\n fn must_have<S: AsRef<OsStr>>(&mut self, cmd: S) -> PathBuf {\n self.maybe_have(&cmd).unwrap_or_else(|| {\n panic!(\"\\n\\ncouldn't find required command: {:?}\\n\\n\", cmd.as_ref());\n })\n }\n}\n\npub fn check(build: &mut Build) {\n let path = env::var_os(\"PATH\").unwrap_or_default();\n \/\/ On Windows, quotes are invalid characters for filename paths, and if\n \/\/ one is present as part of the PATH then that can lead to the system\n \/\/ being unable to identify the files properly. See\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/34959 for more details.\n if cfg!(windows) && path.to_string_lossy().contains(\"\\\"\") {\n panic!(\"PATH contains invalid character '\\\"'\");\n }\n\n let mut cmd_finder = Finder::new();\n \/\/ If we've got a git directory we're gona need git to update\n \/\/ submodules and learn about various other aspects.\n if build.rust_info.is_git() {\n cmd_finder.must_have(\"git\");\n }\n\n \/\/ We need cmake, but only if we're actually building LLVM or sanitizers.\n let building_llvm = build.hosts.iter()\n .filter_map(|host| build.config.target_config.get(host))\n .any(|config| config.llvm_config.is_none());\n if building_llvm || build.config.sanitizers {\n cmd_finder.must_have(\"cmake\");\n }\n\n \/\/ Ninja is currently only used for LLVM itself.\n if building_llvm {\n if build.config.ninja {\n \/\/ Some Linux distros rename `ninja` to `ninja-build`.\n \/\/ CMake can work with either binary name.\n if cmd_finder.maybe_have(\"ninja-build\").is_none() {\n cmd_finder.must_have(\"ninja\");\n }\n }\n\n \/\/ If ninja isn't enabled but we're building for MSVC then we try\n \/\/ doubly hard to enable it. It was realized in #43767 that the msbuild\n \/\/ CMake generator for MSVC doesn't respect configuration options like\n \/\/ disabling LLVM assertions, which can often be quite important!\n \/\/\n \/\/ In these cases we automatically enable Ninja if we find it in the\n \/\/ environment.\n if !build.config.ninja && build.config.build.contains(\"msvc\") {\n if cmd_finder.maybe_have(\"ninja\").is_some() {\n build.config.ninja = true;\n }\n }\n }\n\n build.config.python = build.config.python.take().map(|p| cmd_finder.must_have(p))\n .or_else(|| env::var_os(\"BOOTSTRAP_PYTHON\").map(PathBuf::from)) \/\/ set by bootstrap.py\n .or_else(|| cmd_finder.maybe_have(\"python2.7\"))\n .or_else(|| cmd_finder.maybe_have(\"python2\"))\n .or_else(|| Some(cmd_finder.must_have(\"python\")));\n\n build.config.nodejs = build.config.nodejs.take().map(|p| cmd_finder.must_have(p))\n .or_else(|| cmd_finder.maybe_have(\"node\"))\n .or_else(|| cmd_finder.maybe_have(\"nodejs\"));\n\n build.config.gdb = build.config.gdb.take().map(|p| cmd_finder.must_have(p))\n .or_else(|| cmd_finder.maybe_have(\"gdb\"));\n\n \/\/ We're gonna build some custom C code here and there, host triples\n \/\/ also build some C++ shims for LLVM so we need a C++ compiler.\n for target in &build.targets {\n \/\/ On emscripten we don't actually need the C compiler to just\n \/\/ build the target artifacts, only for testing. For the sake\n \/\/ of easier bot configuration, just skip detection.\n if target.contains(\"emscripten\") {\n continue;\n }\n\n cmd_finder.must_have(build.cc(*target));\n if let Some(ar) = build.ar(*target) {\n cmd_finder.must_have(ar);\n }\n }\n\n for host in &build.hosts {\n cmd_finder.must_have(build.cxx(*host).unwrap());\n\n \/\/ The msvc hosts don't use jemalloc, turn it off globally to\n \/\/ avoid packaging the dummy liballoc_jemalloc on that platform.\n if host.contains(\"msvc\") {\n build.config.use_jemalloc = false;\n }\n }\n\n \/\/ Externally configured LLVM requires FileCheck to exist\n let filecheck = build.llvm_filecheck(build.build);\n if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {\n panic!(\"FileCheck executable {:?} does not exist\", filecheck);\n }\n\n for target in &build.targets {\n \/\/ Can't compile for iOS unless we're on macOS\n if target.contains(\"apple-ios\") &&\n !build.build.contains(\"apple-darwin\") {\n panic!(\"the iOS target is only supported on macOS\");\n }\n\n \/\/ Make sure musl-root is valid\n if target.contains(\"musl\") && !target.contains(\"mips\") {\n \/\/ If this is a native target (host is also musl) and no musl-root is given,\n \/\/ fall back to the system toolchain in \/usr before giving up\n if build.musl_root(*target).is_none() && build.config.build == *target {\n let target = build.config.target_config.entry(target.clone())\n .or_insert(Default::default());\n target.musl_root = Some(\"\/usr\".into());\n }\n match build.musl_root(*target) {\n Some(root) => {\n if fs::metadata(root.join(\"lib\/libc.a\")).is_err() {\n panic!(\"couldn't find libc.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n if fs::metadata(root.join(\"lib\/libunwind.a\")).is_err() {\n panic!(\"couldn't find libunwind.a in musl dir: {}\",\n root.join(\"lib\").display());\n }\n }\n None => {\n panic!(\"when targeting MUSL either the rust.musl-root \\\n option or the target.$TARGET.musl-root option must \\\n be specified in config.toml\")\n }\n }\n }\n\n if target.contains(\"msvc\") {\n \/\/ There are three builds of cmake on windows: MSVC, MinGW, and\n \/\/ Cygwin. The Cygwin build does not have generators for Visual\n \/\/ Studio, so detect that here and error.\n let out = output(Command::new(\"cmake\").arg(\"--help\"));\n if !out.contains(\"Visual Studio\") {\n panic!(\"\ncmake does not support Visual Studio generators.\n\nThis is likely due to it being an msys\/cygwin build of cmake,\nrather than the required windows version, built using MinGW\nor Visual Studio.\n\nIf you are building under msys2 try installing the mingw-w64-x86_64-cmake\npackage instead of cmake:\n\n$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake\n\");\n }\n }\n }\n\n let run = |cmd: &mut Command| {\n cmd.output().map(|output| {\n String::from_utf8_lossy(&output.stdout)\n .lines().next().unwrap()\n .to_string()\n })\n };\n build.lldb_version = run(Command::new(\"lldb\").arg(\"--version\")).ok();\n if build.lldb_version.is_some() {\n build.lldb_python_dir = run(Command::new(\"lldb\").arg(\"-P\")).ok();\n }\n\n if let Some(ref s) = build.config.ccache {\n cmd_finder.must_have(s);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use serde_json;\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::{Path, PathBuf};\n\n#[derive(Debug, Clone)]\npub struct BookConfig {\n pub title: String,\n pub author: String,\n pub description: String,\n root: PathBuf,\n pub dest: PathBuf,\n pub src: PathBuf,\n pub indent_spaces: i32,\n multilingual: bool,\n}\n\n\nimpl BookConfig {\n pub fn new(root: &Path) -> Self {\n BookConfig {\n title: String::new(),\n author: String::new(),\n description: String::new(),\n root: root.to_owned(),\n dest: root.join(\"book\"),\n src: root.join(\"src\"),\n indent_spaces: 4, \/\/ indentation used for SUMMARY.md\n multilingual: false,\n }\n }\n\n pub fn read_config(&mut self, root: &Path) -> &mut Self {\n\n debug!(\"[fn]: read_config\");\n\n \/\/ If the file does not exist, return early\n let mut config_file = match File::open(root.join(\"book.json\")) {\n Ok(f) => f,\n Err(_) => {\n debug!(\"[*]: Failed to open {:?}\", root.join(\"book.json\"));\n return self;\n },\n };\n\n debug!(\"[*]: Reading config\");\n let mut data = String::new();\n\n \/\/ Just return if an error occured.\n \/\/ I would like to propagate the error, but I have to return `&self`\n if let Err(_) = config_file.read_to_string(&mut data) {\n return self;\n }\n\n \/\/ Convert to JSON\n if let Ok(config) = serde_json::from_str::<serde_json::Value>(&data) {\n \/\/ Extract data\n\n let config = config.as_object().unwrap();\n\n debug!(\"[*]: Extracting data from config\");\n \/\/ Title, author, description\n if let Some(a) = config.get(\"title\") {\n self.title = a.to_string().replace(\"\\\"\", \"\")\n }\n if let Some(a) = config.get(\"author\") {\n self.author = a.to_string().replace(\"\\\"\", \"\")\n }\n if let Some(a) = config.get(\"description\") {\n self.description = a.to_string().replace(\"\\\"\", \"\")\n }\n\n \/\/ Destination\n if let Some(a) = config.get(\"dest\") {\n let dest = PathBuf::from(&a.to_string().replace(\"\\\"\", \"\"));\n\n \/\/ If path is relative make it absolute from the parent directory of src\n match dest.is_relative() {\n true => {\n let dest = self.get_root().join(&dest).to_owned();\n self.set_dest(&dest);\n },\n false => {\n self.set_dest(&dest);\n },\n }\n }\n }\n\n self\n }\n\n pub fn get_root(&self) -> &Path {\n &self.root\n }\n\n pub fn set_root(&mut self, root: &Path) -> &mut Self {\n self.root = root.to_owned();\n self\n }\n\n pub fn get_dest(&self) -> &Path {\n &self.dest\n }\n\n pub fn set_dest(&mut self, dest: &Path) -> &mut Self {\n self.dest = dest.to_owned();\n self\n }\n\n pub fn get_src(&self) -> &Path {\n &self.src\n }\n\n pub fn set_src(&mut self, src: &Path) -> &mut Self {\n self.src = src.to_owned();\n self\n }\n}\n<commit_msg>use src key in book.json when given<commit_after>use serde_json;\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::{Path, PathBuf};\n\n#[derive(Debug, Clone)]\npub struct BookConfig {\n pub title: String,\n pub author: String,\n pub description: String,\n root: PathBuf,\n pub dest: PathBuf,\n pub src: PathBuf,\n pub indent_spaces: i32,\n multilingual: bool,\n}\n\n\nimpl BookConfig {\n pub fn new(root: &Path) -> Self {\n BookConfig {\n title: String::new(),\n author: String::new(),\n description: String::new(),\n root: root.to_owned(),\n dest: root.join(\"book\"),\n src: root.join(\"src\"),\n indent_spaces: 4, \/\/ indentation used for SUMMARY.md\n multilingual: false,\n }\n }\n\n pub fn read_config(&mut self, root: &Path) -> &mut Self {\n\n debug!(\"[fn]: read_config\");\n\n \/\/ If the file does not exist, return early\n let mut config_file = match File::open(root.join(\"book.json\")) {\n Ok(f) => f,\n Err(_) => {\n debug!(\"[*]: Failed to open {:?}\", root.join(\"book.json\"));\n return self;\n },\n };\n\n debug!(\"[*]: Reading config\");\n let mut data = String::new();\n\n \/\/ Just return if an error occured.\n \/\/ I would like to propagate the error, but I have to return `&self`\n if let Err(_) = config_file.read_to_string(&mut data) {\n return self;\n }\n\n \/\/ Convert to JSON\n if let Ok(config) = serde_json::from_str::<serde_json::Value>(&data) {\n \/\/ Extract data\n\n let config = config.as_object().unwrap();\n\n debug!(\"[*]: Extracting data from config\");\n \/\/ Title, author, description\n if let Some(a) = config.get(\"title\") {\n self.title = a.to_string().replace(\"\\\"\", \"\")\n }\n if let Some(a) = config.get(\"author\") {\n self.author = a.to_string().replace(\"\\\"\", \"\")\n }\n if let Some(a) = config.get(\"description\") {\n self.description = a.to_string().replace(\"\\\"\", \"\")\n }\n\n \/\/ Destination folder\n if let Some(a) = config.get(\"dest\") {\n let dest = PathBuf::from(&a.to_string().replace(\"\\\"\", \"\"));\n\n \/\/ If path is relative make it absolute from the parent directory of src\n match dest.is_relative() {\n true => {\n let dest = self.get_root().join(&dest).to_owned();\n self.set_dest(&dest);\n },\n false => {\n self.set_dest(&dest);\n },\n }\n }\n\n \/\/ Source folder\n if let Some(a) = config.get(\"src\") {\n let src = PathBuf::from(&a.to_string().replace(\"\\\"\", \"\"));\n match src.is_relative() {\n true => {\n let src = self.get_root().join(&src).to_owned();\n self.set_src(&src);\n },\n false => {\n self.set_src(&src);\n }\n }\n }\n }\n\n self\n }\n\n pub fn get_root(&self) -> &Path {\n &self.root\n }\n\n pub fn set_root(&mut self, root: &Path) -> &mut Self {\n self.root = root.to_owned();\n self\n }\n\n pub fn get_dest(&self) -> &Path {\n &self.dest\n }\n\n pub fn set_dest(&mut self, dest: &Path) -> &mut Self {\n self.dest = dest.to_owned();\n self\n }\n\n pub fn get_src(&self) -> &Path {\n &self.src\n }\n\n pub fn set_src(&mut self, src: &Path) -> &mut Self {\n self.src = src.to_owned();\n self\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Command-line interface of the rustbuild build system.\n\/\/!\n\/\/! This module implements the command-line parsing of the build system which\n\/\/! has various flags to configure how it's run.\n\nuse std::env;\nuse std::fs;\nuse std::path::PathBuf;\nuse std::process;\n\nuse getopts::Options;\n\nuse Build;\nuse config::Config;\nuse metadata;\nuse step;\n\n\/\/\/ Deserialized version of all flags for this compile.\npub struct Flags {\n pub verbose: usize, \/\/ verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose\n pub on_fail: Option<String>,\n pub stage: Option<u32>,\n pub keep_stage: Option<u32>,\n pub build: String,\n pub host: Vec<String>,\n pub target: Vec<String>,\n pub config: Option<PathBuf>,\n pub src: Option<PathBuf>,\n pub jobs: Option<u32>,\n pub cmd: Subcommand,\n pub incremental: bool,\n}\n\nimpl Flags {\n pub fn verbose(&self) -> bool {\n self.verbose > 0\n }\n\n pub fn very_verbose(&self) -> bool {\n self.verbose > 1\n }\n}\n\npub enum Subcommand {\n Build {\n paths: Vec<PathBuf>,\n },\n Doc {\n paths: Vec<PathBuf>,\n },\n Test {\n paths: Vec<PathBuf>,\n test_args: Vec<String>,\n no_fail_fast: bool,\n },\n Bench {\n paths: Vec<PathBuf>,\n test_args: Vec<String>,\n },\n Clean,\n Dist {\n paths: Vec<PathBuf>,\n },\n Install {\n paths: Vec<PathBuf>,\n },\n}\n\nimpl Flags {\n pub fn parse(args: &[String]) -> Flags {\n let mut extra_help = String::new();\n let mut subcommand_help = format!(\"\\\nUsage: x.py <subcommand> [options] [<paths>...]\n\nSubcommands:\n build Compile either the compiler or libraries\n test Build and run some test suites\n bench Build and run some benchmarks\n doc Build documentation\n clean Clean out build directories\n dist Build distribution artifacts\n install Install distribution artifacts\n\nTo learn more about a subcommand, run `.\/x.py <subcommand> -h`\");\n\n let mut opts = Options::new();\n \/\/ Options common to all subcommands\n opts.optflagmulti(\"v\", \"verbose\", \"use verbose output (-vv for very verbose)\");\n opts.optflag(\"i\", \"incremental\", \"use incremental compilation\");\n opts.optopt(\"\", \"config\", \"TOML configuration file for build\", \"FILE\");\n opts.optopt(\"\", \"build\", \"build target of the stage0 compiler\", \"BUILD\");\n opts.optmulti(\"\", \"host\", \"host targets to build\", \"HOST\");\n opts.optmulti(\"\", \"target\", \"target targets to build\", \"TARGET\");\n opts.optopt(\"\", \"on-fail\", \"command to run on failure\", \"CMD\");\n opts.optopt(\"\", \"stage\", \"stage to build\", \"N\");\n opts.optopt(\"\", \"keep-stage\", \"stage to keep without recompiling\", \"N\");\n opts.optopt(\"\", \"src\", \"path to the root of the rust checkout\", \"DIR\");\n opts.optopt(\"j\", \"jobs\", \"number of jobs to run in parallel\", \"JOBS\");\n opts.optflag(\"h\", \"help\", \"print this help message\");\n\n \/\/ fn usage()\n let usage = |exit_code: i32, opts: &Options, subcommand_help: &str, extra_help: &str| -> ! {\n println!(\"{}\", opts.usage(subcommand_help));\n if !extra_help.is_empty() {\n println!(\"{}\", extra_help);\n }\n process::exit(exit_code);\n };\n\n \/\/ We can't use getopt to parse the options until we have completed specifying which\n \/\/ options are valid, but under the current implementation, some options are conditional on\n \/\/ the subcommand. Therefore we must manually identify the subcommand first, so that we can\n \/\/ complete the definition of the options. Then we can use the getopt::Matches object from\n \/\/ there on out.\n let mut possible_subcommands = args.iter().collect::<Vec<_>>();\n possible_subcommands.retain(|&s|\n (s == \"build\")\n || (s == \"test\")\n || (s == \"bench\")\n || (s == \"doc\")\n || (s == \"clean\")\n || (s == \"dist\")\n || (s == \"install\"));\n let subcommand = match possible_subcommands.first() {\n Some(s) => s,\n None => {\n \/\/ No subcommand -- show the general usage and subcommand help\n println!(\"{}\\n\", subcommand_help);\n process::exit(0);\n }\n };\n\n \/\/ Some subcommands get extra options\n match subcommand.as_str() {\n \"test\" => {\n opts.optflag(\"\", \"no-fail-fast\", \"Run all tests regardless of failure\");\n opts.optmulti(\"\", \"test-args\", \"extra arguments\", \"ARGS\");\n },\n \"bench\" => { opts.optmulti(\"\", \"test-args\", \"extra arguments\", \"ARGS\"); },\n _ => { },\n };\n\n \/\/ Done specifying what options are possible, so do the getopts parsing\n let matches = opts.parse(&args[..]).unwrap_or_else(|e| {\n \/\/ Invalid argument\/option format\n println!(\"\\n{}\\n\", e);\n usage(1, &opts, &subcommand_help, &extra_help);\n });\n \/\/ Extra sanity check to make sure we didn't hit this crazy corner case:\n \/\/\n \/\/ .\/x.py --frobulate clean build\n \/\/ ^-- option ^ ^- actual subcommand\n \/\/ \\_ arg to option could be mistaken as subcommand\n let mut pass_sanity_check = true;\n match matches.free.get(0) {\n Some(check_subcommand) => {\n if &check_subcommand != subcommand {\n pass_sanity_check = false;\n }\n },\n None => {\n pass_sanity_check = false;\n }\n }\n if !pass_sanity_check {\n println!(\"{}\\n\", subcommand_help);\n println!(\"Sorry, I couldn't figure out which subcommand you were trying to specify.\\n\\\n You may need to move some options to after the subcommand.\\n\");\n process::exit(1);\n }\n \/\/ Extra help text for some commands\n match subcommand.as_str() {\n \"build\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories to the crates\n and\/or artifacts to compile. For example:\n\n .\/x.py build src\/libcore\n .\/x.py build src\/libcore src\/libproc_macro\n .\/x.py build src\/libstd --stage 1\n\n If no arguments are passed then the complete artifacts for that stage are\n also compiled.\n\n .\/x.py build\n .\/x.py build --stage 1\n\n For a quick build with a usable compile, you can pass:\n\n .\/x.py build --stage 1 src\/libtest\");\n }\n \"test\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories to tests that\n should be compiled and run. For example:\n\n .\/x.py test src\/test\/run-pass\n .\/x.py test src\/libstd --test-args hash_map\n .\/x.py test src\/libstd --stage 0\n\n If no arguments are passed then the complete artifacts for that stage are\n compiled and tested.\n\n .\/x.py test\n .\/x.py test --stage 1\");\n }\n \"doc\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories of documentation\n to build. For example:\n\n .\/x.py doc src\/doc\/book\n .\/x.py doc src\/doc\/nomicon\n .\/x.py doc src\/doc\/book src\/libstd\n\n If no arguments are passed then everything is documented:\n\n .\/x.py doc\n .\/x.py doc --stage 1\");\n }\n _ => { }\n };\n \/\/ Get any optional paths which occur after the subcommand\n let cwd = t!(env::current_dir());\n let paths = matches.free[1..].iter().map(|p| cwd.join(p)).collect::<Vec<_>>();\n\n\n \/\/ All subcommands can have an optional \"Available paths\" section\n if matches.opt_present(\"verbose\") {\n let flags = Flags::parse(&[\"build\".to_string()]);\n let mut config = Config::default();\n config.build = flags.build.clone();\n let mut build = Build::new(flags, config);\n metadata::build(&mut build);\n let maybe_rules_help = step::build_rules(&build).get_help(subcommand);\n if maybe_rules_help.is_some() {\n extra_help.push_str(maybe_rules_help.unwrap().as_str());\n }\n } else {\n extra_help.push_str(format!(\"Run `.\/x.py {} -h -v` to see a list of available paths.\",\n subcommand).as_str());\n }\n\n \/\/ User passed in -h\/--help?\n if matches.opt_present(\"help\") {\n usage(0, &opts, &subcommand_help, &extra_help);\n }\n\n let cmd = match subcommand.as_str() {\n \"build\" => {\n Subcommand::Build { paths: paths }\n }\n \"test\" => {\n Subcommand::Test {\n paths: paths,\n test_args: matches.opt_strs(\"test-args\"),\n no_fail_fast: matches.opt_present(\"no-fail-fast\"),\n }\n }\n \"bench\" => {\n Subcommand::Bench {\n paths: paths,\n test_args: matches.opt_strs(\"test-args\"),\n }\n }\n \"doc\" => {\n Subcommand::Doc { paths: paths }\n }\n \"clean\" => {\n if paths.len() > 0 {\n println!(\"\\nclean takes no arguments\\n\");\n usage(1, &opts, &subcommand_help, &extra_help);\n }\n Subcommand::Clean\n }\n \"dist\" => {\n Subcommand::Dist {\n paths: paths,\n }\n }\n \"install\" => {\n Subcommand::Install {\n paths: paths,\n }\n }\n _ => {\n usage(1, &opts, &subcommand_help, &extra_help);\n }\n };\n\n\n let cfg_file = matches.opt_str(\"config\").map(PathBuf::from).or_else(|| {\n if fs::metadata(\"config.toml\").is_ok() {\n Some(PathBuf::from(\"config.toml\"))\n } else {\n None\n }\n });\n\n let mut stage = matches.opt_str(\"stage\").map(|j| j.parse().unwrap());\n\n if matches.opt_present(\"incremental\") {\n if stage.is_none() {\n stage = Some(1);\n }\n }\n\n Flags {\n verbose: matches.opt_count(\"verbose\"),\n stage: stage,\n on_fail: matches.opt_str(\"on-fail\"),\n keep_stage: matches.opt_str(\"keep-stage\").map(|j| j.parse().unwrap()),\n build: matches.opt_str(\"build\").unwrap_or_else(|| {\n env::var(\"BUILD\").unwrap()\n }),\n host: split(matches.opt_strs(\"host\")),\n target: split(matches.opt_strs(\"target\")),\n config: cfg_file,\n src: matches.opt_str(\"src\").map(PathBuf::from),\n jobs: matches.opt_str(\"jobs\").map(|j| j.parse().unwrap()),\n cmd: cmd,\n incremental: matches.opt_present(\"incremental\"),\n }\n }\n}\n\nimpl Subcommand {\n pub fn test_args(&self) -> Vec<&str> {\n match *self {\n Subcommand::Test { ref test_args, .. } |\n Subcommand::Bench { ref test_args, .. } => {\n test_args.iter().flat_map(|s| s.split_whitespace()).collect()\n }\n _ => Vec::new(),\n }\n }\n\n pub fn no_fail_fast(&self) -> bool {\n match *self {\n Subcommand::Test { no_fail_fast, .. } => no_fail_fast,\n _ => false,\n }\n }\n}\n\nfn split(s: Vec<String>) -> Vec<String> {\n s.iter().flat_map(|s| s.split(',')).map(|s| s.to_string()).collect()\n}\n<commit_msg>Auto merge of #42491 - RalfJung:bootstrap-help, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Command-line interface of the rustbuild build system.\n\/\/!\n\/\/! This module implements the command-line parsing of the build system which\n\/\/! has various flags to configure how it's run.\n\nuse std::env;\nuse std::fs;\nuse std::path::PathBuf;\nuse std::process;\n\nuse getopts::Options;\n\nuse Build;\nuse config::Config;\nuse metadata;\nuse step;\n\n\/\/\/ Deserialized version of all flags for this compile.\npub struct Flags {\n pub verbose: usize, \/\/ verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose\n pub on_fail: Option<String>,\n pub stage: Option<u32>,\n pub keep_stage: Option<u32>,\n pub build: String,\n pub host: Vec<String>,\n pub target: Vec<String>,\n pub config: Option<PathBuf>,\n pub src: Option<PathBuf>,\n pub jobs: Option<u32>,\n pub cmd: Subcommand,\n pub incremental: bool,\n}\n\nimpl Flags {\n pub fn verbose(&self) -> bool {\n self.verbose > 0\n }\n\n pub fn very_verbose(&self) -> bool {\n self.verbose > 1\n }\n}\n\npub enum Subcommand {\n Build {\n paths: Vec<PathBuf>,\n },\n Doc {\n paths: Vec<PathBuf>,\n },\n Test {\n paths: Vec<PathBuf>,\n test_args: Vec<String>,\n no_fail_fast: bool,\n },\n Bench {\n paths: Vec<PathBuf>,\n test_args: Vec<String>,\n },\n Clean,\n Dist {\n paths: Vec<PathBuf>,\n },\n Install {\n paths: Vec<PathBuf>,\n },\n}\n\nimpl Flags {\n pub fn parse(args: &[String]) -> Flags {\n let mut extra_help = String::new();\n let mut subcommand_help = format!(\"\\\nUsage: x.py <subcommand> [options] [<paths>...]\n\nSubcommands:\n build Compile either the compiler or libraries\n test Build and run some test suites\n bench Build and run some benchmarks\n doc Build documentation\n clean Clean out build directories\n dist Build distribution artifacts\n install Install distribution artifacts\n\nTo learn more about a subcommand, run `.\/x.py <subcommand> -h`\");\n\n let mut opts = Options::new();\n \/\/ Options common to all subcommands\n opts.optflagmulti(\"v\", \"verbose\", \"use verbose output (-vv for very verbose)\");\n opts.optflag(\"i\", \"incremental\", \"use incremental compilation\");\n opts.optopt(\"\", \"config\", \"TOML configuration file for build\", \"FILE\");\n opts.optopt(\"\", \"build\", \"build target of the stage0 compiler\", \"BUILD\");\n opts.optmulti(\"\", \"host\", \"host targets to build\", \"HOST\");\n opts.optmulti(\"\", \"target\", \"target targets to build\", \"TARGET\");\n opts.optopt(\"\", \"on-fail\", \"command to run on failure\", \"CMD\");\n opts.optopt(\"\", \"stage\", \"stage to build\", \"N\");\n opts.optopt(\"\", \"keep-stage\", \"stage to keep without recompiling\", \"N\");\n opts.optopt(\"\", \"src\", \"path to the root of the rust checkout\", \"DIR\");\n opts.optopt(\"j\", \"jobs\", \"number of jobs to run in parallel\", \"JOBS\");\n opts.optflag(\"h\", \"help\", \"print this help message\");\n\n \/\/ fn usage()\n let usage = |exit_code: i32, opts: &Options, subcommand_help: &str, extra_help: &str| -> ! {\n println!(\"{}\", opts.usage(subcommand_help));\n if !extra_help.is_empty() {\n println!(\"{}\", extra_help);\n }\n process::exit(exit_code);\n };\n\n \/\/ We can't use getopt to parse the options until we have completed specifying which\n \/\/ options are valid, but under the current implementation, some options are conditional on\n \/\/ the subcommand. Therefore we must manually identify the subcommand first, so that we can\n \/\/ complete the definition of the options. Then we can use the getopt::Matches object from\n \/\/ there on out.\n let mut possible_subcommands = args.iter().collect::<Vec<_>>();\n possible_subcommands.retain(|&s|\n (s == \"build\")\n || (s == \"test\")\n || (s == \"bench\")\n || (s == \"doc\")\n || (s == \"clean\")\n || (s == \"dist\")\n || (s == \"install\"));\n let subcommand = match possible_subcommands.first() {\n Some(s) => s,\n None => {\n \/\/ No subcommand -- show the general usage and subcommand help\n println!(\"{}\\n\", subcommand_help);\n process::exit(0);\n }\n };\n\n \/\/ Some subcommands get extra options\n match subcommand.as_str() {\n \"test\" => {\n opts.optflag(\"\", \"no-fail-fast\", \"Run all tests regardless of failure\");\n opts.optmulti(\"\", \"test-args\", \"extra arguments\", \"ARGS\");\n },\n \"bench\" => { opts.optmulti(\"\", \"test-args\", \"extra arguments\", \"ARGS\"); },\n _ => { },\n };\n\n \/\/ Done specifying what options are possible, so do the getopts parsing\n let matches = opts.parse(&args[..]).unwrap_or_else(|e| {\n \/\/ Invalid argument\/option format\n println!(\"\\n{}\\n\", e);\n usage(1, &opts, &subcommand_help, &extra_help);\n });\n \/\/ Extra sanity check to make sure we didn't hit this crazy corner case:\n \/\/\n \/\/ .\/x.py --frobulate clean build\n \/\/ ^-- option ^ ^- actual subcommand\n \/\/ \\_ arg to option could be mistaken as subcommand\n let mut pass_sanity_check = true;\n match matches.free.get(0) {\n Some(check_subcommand) => {\n if &check_subcommand != subcommand {\n pass_sanity_check = false;\n }\n },\n None => {\n pass_sanity_check = false;\n }\n }\n if !pass_sanity_check {\n println!(\"{}\\n\", subcommand_help);\n println!(\"Sorry, I couldn't figure out which subcommand you were trying to specify.\\n\\\n You may need to move some options to after the subcommand.\\n\");\n process::exit(1);\n }\n \/\/ Extra help text for some commands\n match subcommand.as_str() {\n \"build\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories to the crates\n and\/or artifacts to compile. For example:\n\n .\/x.py build src\/libcore\n .\/x.py build src\/libcore src\/libproc_macro\n .\/x.py build src\/libstd --stage 1\n\n If no arguments are passed then the complete artifacts for that stage are\n also compiled.\n\n .\/x.py build\n .\/x.py build --stage 1\n\n For a quick build of a usable compiler, you can pass:\n\n .\/x.py build --stage 1 src\/libtest\n\n This will first build everything once (like --stage 0 without further\n arguments would), and then use the compiler built in stage 0 to build\n src\/libtest and its dependencies.\n Once this is done, build\/$ARCH\/stage1 contains a usable compiler.\");\n }\n \"test\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories to tests that\n should be compiled and run. For example:\n\n .\/x.py test src\/test\/run-pass\n .\/x.py test src\/libstd --test-args hash_map\n .\/x.py test src\/libstd --stage 0\n\n If no arguments are passed then the complete artifacts for that stage are\n compiled and tested.\n\n .\/x.py test\n .\/x.py test --stage 1\");\n }\n \"doc\" => {\n subcommand_help.push_str(\"\\n\nArguments:\n This subcommand accepts a number of paths to directories of documentation\n to build. For example:\n\n .\/x.py doc src\/doc\/book\n .\/x.py doc src\/doc\/nomicon\n .\/x.py doc src\/doc\/book src\/libstd\n\n If no arguments are passed then everything is documented:\n\n .\/x.py doc\n .\/x.py doc --stage 1\");\n }\n _ => { }\n };\n \/\/ Get any optional paths which occur after the subcommand\n let cwd = t!(env::current_dir());\n let paths = matches.free[1..].iter().map(|p| cwd.join(p)).collect::<Vec<_>>();\n\n\n \/\/ All subcommands can have an optional \"Available paths\" section\n if matches.opt_present(\"verbose\") {\n let flags = Flags::parse(&[\"build\".to_string()]);\n let mut config = Config::default();\n config.build = flags.build.clone();\n let mut build = Build::new(flags, config);\n metadata::build(&mut build);\n let maybe_rules_help = step::build_rules(&build).get_help(subcommand);\n if maybe_rules_help.is_some() {\n extra_help.push_str(maybe_rules_help.unwrap().as_str());\n }\n } else {\n extra_help.push_str(format!(\"Run `.\/x.py {} -h -v` to see a list of available paths.\",\n subcommand).as_str());\n }\n\n \/\/ User passed in -h\/--help?\n if matches.opt_present(\"help\") {\n usage(0, &opts, &subcommand_help, &extra_help);\n }\n\n let cmd = match subcommand.as_str() {\n \"build\" => {\n Subcommand::Build { paths: paths }\n }\n \"test\" => {\n Subcommand::Test {\n paths: paths,\n test_args: matches.opt_strs(\"test-args\"),\n no_fail_fast: matches.opt_present(\"no-fail-fast\"),\n }\n }\n \"bench\" => {\n Subcommand::Bench {\n paths: paths,\n test_args: matches.opt_strs(\"test-args\"),\n }\n }\n \"doc\" => {\n Subcommand::Doc { paths: paths }\n }\n \"clean\" => {\n if paths.len() > 0 {\n println!(\"\\nclean takes no arguments\\n\");\n usage(1, &opts, &subcommand_help, &extra_help);\n }\n Subcommand::Clean\n }\n \"dist\" => {\n Subcommand::Dist {\n paths: paths,\n }\n }\n \"install\" => {\n Subcommand::Install {\n paths: paths,\n }\n }\n _ => {\n usage(1, &opts, &subcommand_help, &extra_help);\n }\n };\n\n\n let cfg_file = matches.opt_str(\"config\").map(PathBuf::from).or_else(|| {\n if fs::metadata(\"config.toml\").is_ok() {\n Some(PathBuf::from(\"config.toml\"))\n } else {\n None\n }\n });\n\n let mut stage = matches.opt_str(\"stage\").map(|j| j.parse().unwrap());\n\n if matches.opt_present(\"incremental\") {\n if stage.is_none() {\n stage = Some(1);\n }\n }\n\n Flags {\n verbose: matches.opt_count(\"verbose\"),\n stage: stage,\n on_fail: matches.opt_str(\"on-fail\"),\n keep_stage: matches.opt_str(\"keep-stage\").map(|j| j.parse().unwrap()),\n build: matches.opt_str(\"build\").unwrap_or_else(|| {\n env::var(\"BUILD\").unwrap()\n }),\n host: split(matches.opt_strs(\"host\")),\n target: split(matches.opt_strs(\"target\")),\n config: cfg_file,\n src: matches.opt_str(\"src\").map(PathBuf::from),\n jobs: matches.opt_str(\"jobs\").map(|j| j.parse().unwrap()),\n cmd: cmd,\n incremental: matches.opt_present(\"incremental\"),\n }\n }\n}\n\nimpl Subcommand {\n pub fn test_args(&self) -> Vec<&str> {\n match *self {\n Subcommand::Test { ref test_args, .. } |\n Subcommand::Bench { ref test_args, .. } => {\n test_args.iter().flat_map(|s| s.split_whitespace()).collect()\n }\n _ => Vec::new(),\n }\n }\n\n pub fn no_fail_fast(&self) -> bool {\n match *self {\n Subcommand::Test { no_fail_fast, .. } => no_fail_fast,\n _ => false,\n }\n }\n}\n\nfn split(s: Vec<String>) -> Vec<String> {\n s.iter().flat_map(|s| s.split(',')).map(|s| s.to_string()).collect()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add collections::Vec::append_slice<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! # Elektra\n\/\/! Safe bindings for libelektra.\n\/\/! \n\/\/! See the [project's readme](https:\/\/master.libelektra.org\/src\/bindings\/rust) for an introduction and examples.\n\nextern crate bitflags;\nextern crate elektra_sys;\n\n\/\/\/ `StringKey` and `BinaryKey` are the essential structs that encapsulate name, value and metainfo.\npub mod key;\n\/\/\/ `KeyBuilder` can easily build keys with many meta values.\npub mod keybuilder;\n\/\/\/ Trait to read values from a key.\npub mod readable;\n\/\/\/ A wrapper Trait to make keys readonly.\npub mod readonly;\n\/\/\/ Trait to write values to a key.\npub mod writeable;\n\/\/\/ `KeySet` is a set of keys.\npub mod keyset;\n\/\/\/ General methods to access the Key database.\npub mod kdb;\n\npub use self::key::{BinaryKey, StringKey, MetaIter, NameIter, KeyNameInvalidError, KeyNameReadOnlyError, KeyNotFoundError};\npub use self::keybuilder::KeyBuilder;\npub use self::readable::ReadableKey;\npub use self::readonly::ReadOnly;\npub use self::writeable::WriteableKey;\npub use self::keyset::{KeySet, ReadOnlyStringKeyIter, StringKeyIter, Cursor, LookupOption};\npub use self::kdb::{KDB, KDBError};\n<commit_msg>rust: Improve the crate documentation<commit_after>\/\/! # Elektra\n\/\/! Safe bindings for [libelektra](https:\/\/www.libelektra.org).\n\/\/!\n\/\/! See the [project's readme](https:\/\/master.libelektra.org\/src\/bindings\/rust) for an introduction and examples.\n\/\/!\n\/\/! The crate consists of three major parts.\n\/\/!\n\/\/! - The [keys](key\/index.html) that encapsulate name, value and metainfo\n\/\/! - A [`KeySet`](keyset\/index.html) holds a set of `StringKey`s, since these are the most common type of key\n\/\/! - [`KDB`](kdb\/index.html) allows access to the persistent key database by reading or writing `KeySet`s\n\/\/!\n\/\/! Refer to the documentation of the modules to learn more about each.\n\nextern crate bitflags;\nextern crate elektra_sys;\n\npub mod key;\npub mod keybuilder;\n\/\/\/ Trait to read values from a key.\npub mod readable;\n\/\/\/ A wrapper Trait to make keys readonly.\npub mod readonly;\n\/\/\/ Trait to write values to a key.\npub mod writeable;\npub mod keyset;\npub mod kdb;\n\npub use self::key::{BinaryKey, StringKey, MetaIter, NameIter, KeyNameInvalidError, KeyNameReadOnlyError, KeyNotFoundError};\npub use self::keybuilder::KeyBuilder;\npub use self::readable::ReadableKey;\npub use self::readonly::ReadOnly;\npub use self::writeable::WriteableKey;\npub use self::keyset::{KeySet, ReadOnlyStringKeyIter, StringKeyIter, Cursor, LookupOption};\npub use self::kdb::{KDB, KDBError};\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test<commit_after>use super::{Boundary, EdgeKey, FaceKey, Mesh};\nuse crate::shapes::{Shape, StateOfShape};\nuse crate::StrError;\nuse russell_lab::Vector;\n\n\/\/\/ Holds data to evaluate normal vectors at edge or face\n#[derive(Clone, Debug)]\npub struct NormalVector {\n \/\/\/ Holds the Shape data for a given boundary edge or face\n pub shape: Shape,\n\n \/\/\/ Holds the state for the Shape data corresponding to an edge or face\n pub state: StateOfShape,\n\n \/\/\/ Holds the normal vector output from the evaluate function\n pub value: Vector,\n}\n\nimpl NormalVector {\n \/\/\/ Allocates data to compute normal vector at edge\n pub fn at_edge(mesh: &Mesh, boundary: &Boundary, edge_key: EdgeKey) -> Result<Self, StrError> {\n const GEO_NDIM: usize = 1;\n let edge = match boundary.edges.get(&edge_key) {\n Some(e) => e,\n None => return Err(\"edge_key is not present in boundary\"),\n };\n let shape = Shape::new(mesh.space_ndim, GEO_NDIM, edge.points.len())?;\n let state = StateOfShape::new(\n GEO_NDIM,\n &edge\n .points\n .iter()\n .map(|id| mesh.points[*id].coords.clone())\n .collect::<Vec<_>>(),\n )?;\n Ok(NormalVector {\n shape,\n state,\n value: Vector::new(mesh.space_ndim),\n })\n }\n\n \/\/\/ Allocates data to compute normal vector at face\n pub fn at_face(mesh: &Mesh, boundary: &Boundary, face_key: FaceKey) -> Result<Self, StrError> {\n const GEO_NDIM: usize = 2;\n let face = match boundary.faces.get(&face_key) {\n Some(e) => e,\n None => return Err(\"face_key is not present in boundary\"),\n };\n let shape = Shape::new(mesh.space_ndim, GEO_NDIM, face.points.len())?;\n let state = StateOfShape::new(\n GEO_NDIM,\n &face\n .points\n .iter()\n .map(|id| mesh.points[*id].coords.clone())\n .collect::<Vec<_>>(),\n )?;\n Ok(NormalVector {\n shape,\n state,\n value: Vector::new(mesh.space_ndim),\n })\n }\n\n \/\/\/ Evaluates boundary normal\n \/\/\/\n \/\/\/ # Input\n \/\/\/\n \/\/\/ * `ksi` -- ξ reference coordinate. The length of ξ must be equal to geo_ndim at least,\n \/\/\/ while lengths greater than geo_ndim are allowed (and ignored). In this way,\n \/\/\/ we can pass a slice with integration point data such as `[f64; 4]`.\n pub fn evaluate(&mut self, ksi: &[f64]) -> Result<(), StrError> {\n self.shape.calc_boundary_normal(&mut self.value, &mut self.state, ksi)\n }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n#[cfg(test)]\nmod tests {\n use super::NormalVector;\n use crate::mesh::{Boundary, Edge, Face, Samples, Shapes};\n use crate::StrError;\n use russell_chk::assert_vec_approx_eq;\n use std::collections::{HashMap, HashSet};\n\n #[test]\n fn capture_some_wrong_input() {\n let mesh = Samples::two_quads_horizontal();\n let boundary = Boundary {\n points: HashSet::new(),\n edges: HashMap::new(),\n faces: HashMap::new(),\n min: Vec::new(),\n max: Vec::new(),\n };\n assert_eq!(\n NormalVector::at_edge(&mesh, &boundary, (0, 1)).err(),\n Some(\"edge_key is not present in boundary\")\n );\n assert_eq!(\n NormalVector::at_face(&mesh, &boundary, (0, 1, 2, 3)).err(),\n Some(\"face_key is not present in boundary\")\n );\n\n let boundary = Boundary {\n points: HashSet::new(),\n edges: HashMap::from([((0, 1), Edge { points: Vec::new() })]),\n faces: HashMap::from([((0, 1, 2, 3), Face { points: Vec::new() })]),\n min: Vec::new(),\n max: Vec::new(),\n };\n assert_eq!(\n NormalVector::at_edge(&mesh, &boundary, (0, 1)).err(),\n Some(\"(geo_ndim,nnode) combination is invalid\")\n );\n assert_eq!(\n NormalVector::at_face(&mesh, &boundary, (0, 1, 2, 3)).err(),\n Some(\"(geo_ndim,nnode) combination is invalid\")\n );\n }\n\n #[test]\n fn at_edge_and_evaluate_work() -> Result<(), StrError> {\n \/\/ 3---------2---------5\n \/\/ | | |\n \/\/ | [0] | [1] |\n \/\/ | | |\n \/\/ 0---------1---------4\n let mesh = Samples::two_quads_horizontal();\n let shapes = Shapes::new(&mesh)?;\n let boundary = Boundary::new(&mesh, &shapes)?;\n\n \/\/ the magnitude (l) of the normal vector should be equal to\n \/\/ 0.5 = edge_length \/ 2.0 where 2.0 corresponds to the edge_length in the reference system\n let l = 0.5; \/\/ magnitude of normal vector\n\n \/\/ edge keys and correct normal vectors (solutions)\n let edge_keys_and_solutions = [\n \/\/ bottom\n (vec![(0, 1), (1, 4)], [0.0, -l]),\n \/\/ right\n (vec![(4, 5)], [l, 0.0]),\n \/\/ top\n (vec![(2, 3), (2, 5)], [0.0, l]),\n \/\/ left\n (vec![(0, 3)], [-l, 0.0]),\n ];\n\n \/\/ check if the normal vectors at boundary are outward\n let ksi = &[0.0, 0.0];\n for (edge_keys, solution) in &edge_keys_and_solutions {\n for edge_key in edge_keys {\n let mut n = NormalVector::at_edge(&mesh, &boundary, *edge_key)?;\n n.evaluate(ksi)?;\n assert_vec_approx_eq!(n.value.as_data(), solution, 1e-15);\n }\n }\n\n Ok(())\n }\n\n #[test]\n fn at_face_and_evaluate_work() -> Result<(), StrError> {\n \/\/ 8-------------11\n \/\/ \/. \/|\n \/\/ \/ . \/ |\n \/\/ \/ . \/ |\n \/\/ \/ . \/ |\n \/\/ 9-------------10 |\n \/\/ | . | |\n \/\/ | 4---------|----7\n \/\/ | \/. | \/|\n \/\/ | \/ . | \/ |\n \/\/ | \/ . | \/ |\n \/\/ |\/ . |\/ |\n \/\/ 5--------------6 |\n \/\/ | . | |\n \/\/ | 0---------|----3\n \/\/ | \/ | \/\n \/\/ | \/ | \/\n \/\/ | \/ | \/\n \/\/ |\/ |\/\n \/\/ 1--------------2\n let mesh = Samples::two_cubes_vertical();\n let shapes = Shapes::new(&mesh)?;\n let boundary = Boundary::new(&mesh, &shapes)?;\n\n \/\/ the magnitude (l) of the normal vector should be equal to\n \/\/ 0.25 = face_area \/ 4.0 where 4.0 corresponds to the face_area in the reference system\n let l = 0.25; \/\/ magnitude of normal vector\n\n \/\/ face keys and correct normal vectors (solutions)\n let face_keys_and_solutions = [\n \/\/ behind\n (vec![(0, 3, 4, 7), (4, 7, 8, 11)], [-l, 0.0, 0.0]),\n \/\/ front\n (vec![(1, 2, 5, 6), (5, 6, 9, 10)], [l, 0.0, 0.0]),\n \/\/ left\n (vec![(0, 1, 4, 5), (4, 5, 8, 9)], [0.0, -l, 0.0]),\n \/\/ right\n (vec![(2, 3, 6, 7), (6, 7, 10, 11)], [0.0, l, 0.0]),\n \/\/ bottom\n (vec![(0, 1, 2, 3)], [0.0, 0.0, -l]),\n \/\/ top\n (vec![(8, 9, 10, 11)], [0.0, 0.0, l]),\n ];\n\n let ksi = &[0.0, 0.0, 0.0];\n for (face_keys, solution) in &face_keys_and_solutions {\n for face_key in face_keys {\n let mut n = NormalVector::at_face(&mesh, &boundary, *face_key)?;\n n.evaluate(ksi)?;\n assert_vec_approx_eq!(n.value.as_data(), solution, 1e-15);\n }\n }\n Ok(())\n }\n\n #[test]\n fn derive_works() {\n let mesh = Samples::two_quads_horizontal();\n let shapes = Shapes::new(&mesh).unwrap();\n let boundary = Boundary::new(&mesh, &shapes).unwrap();\n let n01 = NormalVector::at_edge(&mesh, &boundary, (0, 1)).unwrap();\n let n01_clone = n01.clone();\n assert_eq!(format!(\"{:?}\", n01), \"NormalVector { shape: Shape { class: Lin, kind: Lin2, space_ndim: 2, geo_ndim: 1, nnode: 2, nedge: 0, nface: 0, edge_nnode: 0, face_nnode: 0, face_nedge: 0, fn_interp: FnInterp, fn_deriv: FnDeriv }, state: StateOfShape { coords_transp: NumMatrix { nrow: 2, ncol: 2, data: [1.0, 0.0, 0.0, 0.0] }, coords_min: [0.0, 0.0], coords_max: [1.0, 0.0], interp: NumVector { data: [0.0, 0.0] }, deriv: NumMatrix { nrow: 2, ncol: 1, data: [0.0, 0.0] }, jacobian: NumMatrix { nrow: 2, ncol: 1, data: [0.0, 0.0] }, inv_jacobian: NumMatrix { nrow: 0, ncol: 0, data: [] }, gradient: NumMatrix { nrow: 0, ncol: 0, data: [] } }, value: NumVector { data: [0.0, 0.0] } }\");\n assert_eq!(n01_clone.value.dim(), n01.value.dim());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:art: Enable to show help when no args<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Parse arguments to get URL to shorten<commit_after>use std::env;\n\nfn usage() {\n println!(\"\\\nURL Shortener\n\nUsage:\n url_shortener <url>\");\n}\n\nfn main() {\n let args: Vec<String> = env::args().collect();\n\n if args.len() != 2 {\n usage();\n return;\n }\n\n let _url = &args[1];\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Complete API Response struct<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Functions for dealing with device mapper devices.\n\nuse std::fmt;\nuse std::fmt::Display;\n\nuse devicemapper::{DmNameBuf, ThinDevId};\n\nuse super::super::errors::EngineResult;\n\nuse super::super::super::engine::{FilesystemUuid, PoolUuid};\n\nconst FORMAT_VERSION: u16 = 1;\n\npub enum FlexRole {\n MetadataVolume,\n ThinData,\n ThinMeta,\n ThinMetaSpare,\n}\n\nimpl Display for FlexRole {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n FlexRole::MetadataVolume => write!(f, \"mdv\"),\n FlexRole::ThinData => write!(f, \"thindata\"),\n FlexRole::ThinMeta => write!(f, \"thinmeta\"),\n FlexRole::ThinMetaSpare => write!(f, \"thinmetaspare\"),\n }\n }\n}\n\npub enum ThinRole {\n Filesystem(FilesystemUuid),\n}\n\nimpl Display for ThinRole {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n ThinRole::Filesystem(uuid) => write!(f, \"fs-{}\", uuid.simple().to_string()),\n }\n }\n}\n\npub enum ThinPoolRole {\n Pool,\n}\n\nimpl Display for ThinPoolRole {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n ThinPoolRole::Pool => write!(f, \"pool\"),\n }\n }\n}\n\n\/\/\/ Format a name for the flex layer.\n\/\/\/ Prerequisite: len(format!(\"{}\", FORMAT_VERSION)) < 72\npub fn format_flex_name(pool_uuid: &PoolUuid, role: FlexRole) -> DmNameBuf {\n DmNameBuf::new(format!(\"stratis-{}-{}-flex-{}\",\n FORMAT_VERSION,\n pool_uuid.simple().to_string(),\n role))\n .expect(\"FORMAT_VERSION display length < 72\")\n\n}\n\n\/\/\/ Format a name for the thin layer.\n\/\/\/ Prerequisite: len(format!(\"{}\", FORMAT_VERSION)) < 50\npub fn format_thin_name(pool_uuid: &PoolUuid, role: ThinRole) -> DmNameBuf {\n DmNameBuf::new(format!(\"stratis-{}-{}-thin-{}\",\n FORMAT_VERSION,\n pool_uuid.simple().to_string(),\n role))\n .expect(\"FORMAT_VERSION display length < 50\")\n}\n\n\/\/\/ Format a name for the thin pool layer.\n\/\/\/ Prerequisite: len(format!(\"{}\", FORMAT_VERSION)) < 81\npub fn format_thinpool_name(pool_uuid: &PoolUuid, role: ThinPoolRole) -> DmNameBuf {\n DmNameBuf::new(format!(\"stratis-{}-{}-thinpool-{}\",\n FORMAT_VERSION,\n pool_uuid.simple().to_string(),\n role))\n .expect(\"FORMAT_VERSION display_length < 81\")\n}\n\n\n#[derive(Debug)]\n\/\/\/ A pool of thindev ids, all unique.\npub struct ThinDevIdPool {\n next_id: u32,\n}\n\nimpl ThinDevIdPool {\n \/\/\/ Make a new pool from a possibly empty Vec of ids.\n \/\/\/ Does not verify the absence of duplicate ids.\n pub fn new_from_ids(ids: &[ThinDevId]) -> ThinDevIdPool {\n let max_id: Option<u32> = ids.into_iter().map(|x| (*x).into()).max();\n ThinDevIdPool { next_id: max_id.map(|x| x + 1).unwrap_or(0) }\n }\n\n \/\/\/ Get a new id for a thindev.\n \/\/\/ Returns an error if no thindev id can be constructed.\n \/\/ TODO: Improve this so that it is guaranteed only to fail if every 24 bit\n \/\/ number has been used.\n pub fn new_id(&mut self) -> EngineResult<ThinDevId> {\n let next_id = ThinDevId::new_u64((self.next_id) as u64)?;\n self.next_id += 1;\n Ok(next_id)\n }\n}\n<commit_msg>Fix cast-lossless clippy error<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Functions for dealing with device mapper devices.\n\nuse std::fmt;\nuse std::fmt::Display;\n\nuse devicemapper::{DmNameBuf, ThinDevId};\n\nuse super::super::errors::EngineResult;\n\nuse super::super::super::engine::{FilesystemUuid, PoolUuid};\n\nconst FORMAT_VERSION: u16 = 1;\n\npub enum FlexRole {\n MetadataVolume,\n ThinData,\n ThinMeta,\n ThinMetaSpare,\n}\n\nimpl Display for FlexRole {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n FlexRole::MetadataVolume => write!(f, \"mdv\"),\n FlexRole::ThinData => write!(f, \"thindata\"),\n FlexRole::ThinMeta => write!(f, \"thinmeta\"),\n FlexRole::ThinMetaSpare => write!(f, \"thinmetaspare\"),\n }\n }\n}\n\npub enum ThinRole {\n Filesystem(FilesystemUuid),\n}\n\nimpl Display for ThinRole {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n ThinRole::Filesystem(uuid) => write!(f, \"fs-{}\", uuid.simple().to_string()),\n }\n }\n}\n\npub enum ThinPoolRole {\n Pool,\n}\n\nimpl Display for ThinPoolRole {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n ThinPoolRole::Pool => write!(f, \"pool\"),\n }\n }\n}\n\n\/\/\/ Format a name for the flex layer.\n\/\/\/ Prerequisite: len(format!(\"{}\", FORMAT_VERSION)) < 72\npub fn format_flex_name(pool_uuid: &PoolUuid, role: FlexRole) -> DmNameBuf {\n DmNameBuf::new(format!(\"stratis-{}-{}-flex-{}\",\n FORMAT_VERSION,\n pool_uuid.simple().to_string(),\n role))\n .expect(\"FORMAT_VERSION display length < 72\")\n\n}\n\n\/\/\/ Format a name for the thin layer.\n\/\/\/ Prerequisite: len(format!(\"{}\", FORMAT_VERSION)) < 50\npub fn format_thin_name(pool_uuid: &PoolUuid, role: ThinRole) -> DmNameBuf {\n DmNameBuf::new(format!(\"stratis-{}-{}-thin-{}\",\n FORMAT_VERSION,\n pool_uuid.simple().to_string(),\n role))\n .expect(\"FORMAT_VERSION display length < 50\")\n}\n\n\/\/\/ Format a name for the thin pool layer.\n\/\/\/ Prerequisite: len(format!(\"{}\", FORMAT_VERSION)) < 81\npub fn format_thinpool_name(pool_uuid: &PoolUuid, role: ThinPoolRole) -> DmNameBuf {\n DmNameBuf::new(format!(\"stratis-{}-{}-thinpool-{}\",\n FORMAT_VERSION,\n pool_uuid.simple().to_string(),\n role))\n .expect(\"FORMAT_VERSION display_length < 81\")\n}\n\n\n#[derive(Debug)]\n\/\/\/ A pool of thindev ids, all unique.\npub struct ThinDevIdPool {\n next_id: u32,\n}\n\nimpl ThinDevIdPool {\n \/\/\/ Make a new pool from a possibly empty Vec of ids.\n \/\/\/ Does not verify the absence of duplicate ids.\n pub fn new_from_ids(ids: &[ThinDevId]) -> ThinDevIdPool {\n let max_id: Option<u32> = ids.into_iter().map(|x| (*x).into()).max();\n ThinDevIdPool { next_id: max_id.map(|x| x + 1).unwrap_or(0) }\n }\n\n \/\/\/ Get a new id for a thindev.\n \/\/\/ Returns an error if no thindev id can be constructed.\n \/\/ TODO: Improve this so that it is guaranteed only to fail if every 24 bit\n \/\/ number has been used.\n pub fn new_id(&mut self) -> EngineResult<ThinDevId> {\n let next_id = ThinDevId::new_u64(u64::from(self.next_id))?;\n self.next_id += 1;\n Ok(next_id)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>pub use alloc::boxed::*;\n\npub use core::cmp::max;\npub use core::cmp::min;\npub use core::clone::Clone;\npub use core::mem::size_of;\npub use core::mem::size_of_val;\npub use core::option::Option;\npub use core::ptr;\npub use core::sync::atomic::*;\n\npub use graphics::color::*;\npub use graphics::display::*;\npub use graphics::point::*;\npub use graphics::size::*;\npub use graphics::window::*;\n\npub use syscall::call::*;\n\nuse common::resource::{NoneResource, Resource, URL};\nuse common::string::String;\n\n#[allow(unused_variables)]\npub trait SessionItem {\n fn on_irq(&mut self, irq: u8) {\n\n }\n\n fn on_poll(&mut self) {\n\n }\n\n fn scheme(&self) -> String {\n String::new()\n }\n\n fn open(&mut self, url: &URL) -> Box<Resource> {\n box NoneResource\n }\n}\n<commit_msg>Remove the other pub imports in programs::common while were at it<commit_after>pub use core::clone::Clone;\npub use core::option::Option;\npub use core::sync::atomic::*;\n\nuse alloc::boxed::*;\n\nuse common::resource::{NoneResource, Resource, URL};\nuse common::string::String;\n\n#[allow(unused_variables)]\npub trait SessionItem {\n fn on_irq(&mut self, irq: u8) {\n\n }\n\n fn on_poll(&mut self) {\n\n }\n\n fn scheme(&self) -> String {\n String::new()\n }\n\n fn open(&mut self, url: &URL) -> Box<Resource> {\n box NoneResource\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Capture newline in line doc comment.<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A work queue for scheduling units of work across threads in a fork-join fashion.\n\/\/!\n\/\/! Data associated with queues is simply a pair of unsigned integers. It is expected that a\n\/\/! higher-level API on top of this could allow safe fork-join parallelism.\n\nuse task_state;\n\nuse native::task::NativeTaskBuilder;\nuse rand::{Rng, XorShiftRng};\nuse std::mem;\nuse std::rand::weak_rng;\nuse std::sync::atomics::{AtomicUint, SeqCst};\nuse std::sync::deque::{Abort, BufferPool, Data, Empty, Stealer, Worker};\nuse std::task::TaskBuilder;\nuse libc::funcs::posix88::unistd::usleep;\n\n\/\/\/ A unit of work.\n\/\/\/\n\/\/\/ # Type parameters\n\/\/\/\n\/\/\/ - `QueueData`: global custom data for the entire work queue.\n\/\/\/ - `WorkData`: custom data specific to each unit of work.\npub struct WorkUnit<QueueData, WorkData> {\n \/\/\/ The function to execute.\n pub fun: extern \"Rust\" fn(WorkData, &mut WorkerProxy<QueueData, WorkData>),\n \/\/\/ Arbitrary data.\n pub data: WorkData,\n}\n\n\/\/\/ Messages from the supervisor to the worker.\nenum WorkerMsg<QueueData, WorkData> {\n \/\/\/ Tells the worker to start work.\n StartMsg(Worker<WorkUnit<QueueData, WorkData>>, *mut AtomicUint, *const QueueData),\n \/\/\/ Tells the worker to stop. It can be restarted again with a `StartMsg`.\n StopMsg,\n \/\/\/ Tells the worker thread to terminate.\n ExitMsg,\n}\n\n\/\/\/ Messages to the supervisor.\nenum SupervisorMsg<QueueData, WorkData> {\n FinishedMsg,\n ReturnDequeMsg(uint, Worker<WorkUnit<QueueData, WorkData>>),\n}\n\n\/\/\/ Information that the supervisor thread keeps about the worker threads.\nstruct WorkerInfo<QueueData, WorkData> {\n \/\/\/ The communication channel to the workers.\n chan: Sender<WorkerMsg<QueueData, WorkData>>,\n \/\/\/ The worker end of the deque, if we have it.\n deque: Option<Worker<WorkUnit<QueueData, WorkData>>>,\n \/\/\/ The thief end of the work-stealing deque.\n thief: Stealer<WorkUnit<QueueData, WorkData>>,\n}\n\n\/\/\/ Information specific to each worker thread that the thread keeps.\nstruct WorkerThread<QueueData, WorkData> {\n \/\/\/ The index of this worker.\n index: uint,\n \/\/\/ The communication port from the supervisor.\n port: Receiver<WorkerMsg<QueueData, WorkData>>,\n \/\/\/ The communication channel on which messages are sent to the supervisor.\n chan: Sender<SupervisorMsg<QueueData, WorkData>>,\n \/\/\/ The thief end of the work-stealing deque for all other workers.\n other_deques: Vec<Stealer<WorkUnit<QueueData, WorkData>>>,\n \/\/\/ The random number generator for this worker.\n rng: XorShiftRng,\n}\n\nstatic SPIN_COUNT: uint = 128;\n\nimpl<QueueData: Send, WorkData: Send> WorkerThread<QueueData, WorkData> {\n \/\/\/ The main logic. This function starts up the worker and listens for\n \/\/\/ messages.\n fn start(&mut self) {\n loop {\n \/\/ Wait for a start message.\n let (mut deque, ref_count, queue_data) = match self.port.recv() {\n StartMsg(deque, ref_count, queue_data) => (deque, ref_count, queue_data),\n StopMsg => fail!(\"unexpected stop message\"),\n ExitMsg => return,\n };\n\n \/\/ We're off!\n \/\/\n \/\/ FIXME(pcwalton): Can't use labeled break or continue cross-crate due to a Rust bug.\n let mut back_off_sleep = 0 as u32;\n loop {\n \/\/ FIXME(pcwalton): Nasty workaround for the lack of labeled break\/continue\n \/\/ cross-crate.\n let mut work_unit = unsafe {\n mem::uninitialized()\n };\n match deque.pop() {\n Some(work) => work_unit = work,\n None => {\n \/\/ Become a thief.\n let mut i = 0;\n let mut should_continue = true;\n loop {\n let victim = (self.rng.next_u32() as uint) % self.other_deques.len();\n match self.other_deques.get_mut(victim).steal() {\n Empty | Abort => {\n \/\/ Continue.\n }\n Data(work) => {\n work_unit = work;\n back_off_sleep = 0 as u32;\n break\n }\n }\n\n if (i>100) {\n unsafe {usleep(back_off_sleep as u32)};\n back_off_sleep = back_off_sleep + 5;\n }\n if i == SPIN_COUNT {\n match self.port.try_recv() {\n Ok(StopMsg) => {\n should_continue = false;\n break\n }\n Ok(ExitMsg) => return,\n Ok(_) => fail!(\"unexpected message\"),\n _ => {}\n }\n\n i = 0\n } else {\n i += 1\n }\n }\n\n if !should_continue {\n break\n }\n }\n }\n\n \/\/ At this point, we have some work. Perform it.\n let mut proxy = WorkerProxy {\n worker: &mut deque,\n ref_count: ref_count,\n queue_data: queue_data,\n };\n (work_unit.fun)(work_unit.data, &mut proxy);\n\n \/\/ The work is done. Now decrement the count of outstanding work items. If this was\n \/\/ the last work unit in the queue, then send a message on the channel.\n unsafe {\n if (*ref_count).fetch_sub(1, SeqCst) == 1 {\n self.chan.send(FinishedMsg)\n }\n }\n }\n\n \/\/ Give the deque back to the supervisor.\n self.chan.send(ReturnDequeMsg(self.index, deque))\n }\n }\n}\n\n\/\/\/ A handle to the work queue that individual work units have.\npub struct WorkerProxy<'a, QueueData: 'a, WorkData: 'a> {\n worker: &'a mut Worker<WorkUnit<QueueData, WorkData>>,\n ref_count: *mut AtomicUint,\n queue_data: *const QueueData,\n}\n\nimpl<'a, QueueData: 'static, WorkData: Send> WorkerProxy<'a, QueueData, WorkData> {\n \/\/\/ Enqueues a block into the work queue.\n #[inline]\n pub fn push(&mut self, work_unit: WorkUnit<QueueData, WorkData>) {\n unsafe {\n drop((*self.ref_count).fetch_add(1, SeqCst));\n }\n self.worker.push(work_unit);\n }\n\n \/\/\/ Retrieves the queue user data.\n #[inline]\n pub fn user_data<'a>(&'a self) -> &'a QueueData {\n unsafe {\n mem::transmute(self.queue_data)\n }\n }\n}\n\n\/\/\/ A work queue on which units of work can be submitted.\npub struct WorkQueue<QueueData, WorkData> {\n \/\/\/ Information about each of the workers.\n workers: Vec<WorkerInfo<QueueData, WorkData>>,\n \/\/\/ A port on which deques can be received from the workers.\n port: Receiver<SupervisorMsg<QueueData, WorkData>>,\n \/\/\/ The amount of work that has been enqueued.\n work_count: uint,\n \/\/\/ Arbitrary user data.\n pub data: QueueData,\n}\n\nimpl<QueueData: Send, WorkData: Send> WorkQueue<QueueData, WorkData> {\n \/\/\/ Creates a new work queue and spawns all the threads associated with\n \/\/\/ it.\n pub fn new(task_name: &'static str,\n state: task_state::TaskState,\n thread_count: uint,\n user_data: QueueData) -> WorkQueue<QueueData, WorkData> {\n \/\/ Set up data structures.\n let (supervisor_chan, supervisor_port) = channel();\n let (mut infos, mut threads) = (vec!(), vec!());\n for i in range(0, thread_count) {\n let (worker_chan, worker_port) = channel();\n let pool = BufferPool::new();\n let (worker, thief) = pool.deque();\n infos.push(WorkerInfo {\n chan: worker_chan,\n deque: Some(worker),\n thief: thief,\n });\n threads.push(WorkerThread {\n index: i,\n port: worker_port,\n chan: supervisor_chan.clone(),\n other_deques: vec!(),\n rng: weak_rng(),\n });\n }\n\n \/\/ Connect workers to one another.\n for i in range(0, thread_count) {\n for j in range(0, thread_count) {\n if i != j {\n threads.get_mut(i).other_deques.push(infos[j].thief.clone())\n }\n }\n assert!(threads[i].other_deques.len() == thread_count - 1)\n }\n\n \/\/ Spawn threads.\n for thread in threads.into_iter() {\n TaskBuilder::new().named(task_name).native().spawn(proc() {\n task_state::initialize(state | task_state::InWorker);\n let mut thread = thread;\n thread.start()\n })\n }\n\n WorkQueue {\n workers: infos,\n port: supervisor_port,\n work_count: 0,\n data: user_data,\n }\n }\n\n \/\/\/ Enqueues a block into the work queue.\n #[inline]\n pub fn push(&mut self, work_unit: WorkUnit<QueueData, WorkData>) {\n match self.workers.get_mut(0).deque {\n None => {\n fail!(\"tried to push a block but we don't have the deque?!\")\n }\n Some(ref mut deque) => deque.push(work_unit),\n }\n self.work_count += 1\n }\n\n \/\/\/ Synchronously runs all the enqueued tasks and waits for them to complete.\n pub fn run(&mut self) {\n \/\/ Tell the workers to start.\n let mut work_count = AtomicUint::new(self.work_count);\n for worker in self.workers.iter_mut() {\n worker.chan.send(StartMsg(worker.deque.take().unwrap(), &mut work_count, &self.data))\n }\n\n \/\/ Wait for the work to finish.\n drop(self.port.recv());\n self.work_count = 0;\n\n \/\/ Tell everyone to stop.\n for worker in self.workers.iter() {\n worker.chan.send(StopMsg)\n }\n\n \/\/ Get our deques back.\n for _ in range(0, self.workers.len()) {\n match self.port.recv() {\n ReturnDequeMsg(index, deque) => self.workers.get_mut(index).deque = Some(deque),\n FinishedMsg => fail!(\"unexpected finished message!\"),\n }\n }\n }\n\n pub fn shutdown(&mut self) {\n for worker in self.workers.iter() {\n worker.chan.send(ExitMsg)\n }\n }\n}\n<commit_msg>Addressed code review comments.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A work queue for scheduling units of work across threads in a fork-join fashion.\n\/\/!\n\/\/! Data associated with queues is simply a pair of unsigned integers. It is expected that a\n\/\/! higher-level API on top of this could allow safe fork-join parallelism.\n\nuse task_state;\n\nuse native::task::NativeTaskBuilder;\nuse rand::{Rng, XorShiftRng};\nuse std::mem;\nuse std::rand::weak_rng;\nuse std::sync::atomics::{AtomicUint, SeqCst};\nuse std::sync::deque::{Abort, BufferPool, Data, Empty, Stealer, Worker};\nuse std::task::TaskBuilder;\nuse libc::funcs::posix88::unistd::usleep;\n\n\/\/\/ A unit of work.\n\/\/\/\n\/\/\/ # Type parameters\n\/\/\/\n\/\/\/ - `QueueData`: global custom data for the entire work queue.\n\/\/\/ - `WorkData`: custom data specific to each unit of work.\npub struct WorkUnit<QueueData, WorkData> {\n \/\/\/ The function to execute.\n pub fun: extern \"Rust\" fn(WorkData, &mut WorkerProxy<QueueData, WorkData>),\n \/\/\/ Arbitrary data.\n pub data: WorkData,\n}\n\n\/\/\/ Messages from the supervisor to the worker.\nenum WorkerMsg<QueueData, WorkData> {\n \/\/\/ Tells the worker to start work.\n StartMsg(Worker<WorkUnit<QueueData, WorkData>>, *mut AtomicUint, *const QueueData),\n \/\/\/ Tells the worker to stop. It can be restarted again with a `StartMsg`.\n StopMsg,\n \/\/\/ Tells the worker thread to terminate.\n ExitMsg,\n}\n\n\/\/\/ Messages to the supervisor.\nenum SupervisorMsg<QueueData, WorkData> {\n FinishedMsg,\n ReturnDequeMsg(uint, Worker<WorkUnit<QueueData, WorkData>>),\n}\n\n\/\/\/ Information that the supervisor thread keeps about the worker threads.\nstruct WorkerInfo<QueueData, WorkData> {\n \/\/\/ The communication channel to the workers.\n chan: Sender<WorkerMsg<QueueData, WorkData>>,\n \/\/\/ The worker end of the deque, if we have it.\n deque: Option<Worker<WorkUnit<QueueData, WorkData>>>,\n \/\/\/ The thief end of the work-stealing deque.\n thief: Stealer<WorkUnit<QueueData, WorkData>>,\n}\n\n\/\/\/ Information specific to each worker thread that the thread keeps.\nstruct WorkerThread<QueueData, WorkData> {\n \/\/\/ The index of this worker.\n index: uint,\n \/\/\/ The communication port from the supervisor.\n port: Receiver<WorkerMsg<QueueData, WorkData>>,\n \/\/\/ The communication channel on which messages are sent to the supervisor.\n chan: Sender<SupervisorMsg<QueueData, WorkData>>,\n \/\/\/ The thief end of the work-stealing deque for all other workers.\n other_deques: Vec<Stealer<WorkUnit<QueueData, WorkData>>>,\n \/\/\/ The random number generator for this worker.\n rng: XorShiftRng,\n}\n\nstatic SPIN_COUNT: u32 = 128;\nstatic SPINS_UNTIL_BACKOFF: u32 = 100;\nstatic BACKOFF_INCREMENT_IN_US: u32 = 5;\n\nimpl<QueueData: Send, WorkData: Send> WorkerThread<QueueData, WorkData> {\n \/\/\/ The main logic. This function starts up the worker and listens for\n \/\/\/ messages.\n fn start(&mut self) {\n loop {\n \/\/ Wait for a start message.\n let (mut deque, ref_count, queue_data) = match self.port.recv() {\n StartMsg(deque, ref_count, queue_data) => (deque, ref_count, queue_data),\n StopMsg => fail!(\"unexpected stop message\"),\n ExitMsg => return,\n };\n\n let mut back_off_sleep = 0 as u32;\n\n \/\/ We're off!\n \/\/\n \/\/ FIXME(pcwalton): Can't use labeled break or continue cross-crate due to a Rust bug.\n loop {\n \/\/ FIXME(pcwalton): Nasty workaround for the lack of labeled break\/continue\n \/\/ cross-crate.\n let mut work_unit = unsafe {\n mem::uninitialized()\n };\n match deque.pop() {\n Some(work) => work_unit = work,\n None => {\n \/\/ Become a thief.\n let mut i = 0;\n let mut should_continue = true;\n loop {\n let victim = (self.rng.next_u32() as uint) % self.other_deques.len();\n match self.other_deques.get_mut(victim).steal() {\n Empty | Abort => {\n \/\/ Continue.\n }\n Data(work) => {\n work_unit = work;\n back_off_sleep = 0 as u32;\n break\n }\n }\n\n if i > SPINS_UNTIL_BACKOFF {\n unsafe {\n usleep(back_off_sleep as u32);\n }\n back_off_sleep += BACKOFF_INCREMENT_IN_US;\n }\n\n if i == SPIN_COUNT {\n match self.port.try_recv() {\n Ok(StopMsg) => {\n should_continue = false;\n break\n }\n Ok(ExitMsg) => return,\n Ok(_) => fail!(\"unexpected message\"),\n _ => {}\n }\n\n i = 0\n } else {\n i += 1\n }\n }\n\n if !should_continue {\n break\n }\n }\n }\n\n \/\/ At this point, we have some work. Perform it.\n let mut proxy = WorkerProxy {\n worker: &mut deque,\n ref_count: ref_count,\n queue_data: queue_data,\n };\n (work_unit.fun)(work_unit.data, &mut proxy);\n\n \/\/ The work is done. Now decrement the count of outstanding work items. If this was\n \/\/ the last work unit in the queue, then send a message on the channel.\n unsafe {\n if (*ref_count).fetch_sub(1, SeqCst) == 1 {\n self.chan.send(FinishedMsg)\n }\n }\n }\n\n \/\/ Give the deque back to the supervisor.\n self.chan.send(ReturnDequeMsg(self.index, deque))\n }\n }\n}\n\n\/\/\/ A handle to the work queue that individual work units have.\npub struct WorkerProxy<'a, QueueData: 'a, WorkData: 'a> {\n worker: &'a mut Worker<WorkUnit<QueueData, WorkData>>,\n ref_count: *mut AtomicUint,\n queue_data: *const QueueData,\n}\n\nimpl<'a, QueueData: 'static, WorkData: Send> WorkerProxy<'a, QueueData, WorkData> {\n \/\/\/ Enqueues a block into the work queue.\n #[inline]\n pub fn push(&mut self, work_unit: WorkUnit<QueueData, WorkData>) {\n unsafe {\n drop((*self.ref_count).fetch_add(1, SeqCst));\n }\n self.worker.push(work_unit);\n }\n\n \/\/\/ Retrieves the queue user data.\n #[inline]\n pub fn user_data<'a>(&'a self) -> &'a QueueData {\n unsafe {\n mem::transmute(self.queue_data)\n }\n }\n}\n\n\/\/\/ A work queue on which units of work can be submitted.\npub struct WorkQueue<QueueData, WorkData> {\n \/\/\/ Information about each of the workers.\n workers: Vec<WorkerInfo<QueueData, WorkData>>,\n \/\/\/ A port on which deques can be received from the workers.\n port: Receiver<SupervisorMsg<QueueData, WorkData>>,\n \/\/\/ The amount of work that has been enqueued.\n work_count: uint,\n \/\/\/ Arbitrary user data.\n pub data: QueueData,\n}\n\nimpl<QueueData: Send, WorkData: Send> WorkQueue<QueueData, WorkData> {\n \/\/\/ Creates a new work queue and spawns all the threads associated with\n \/\/\/ it.\n pub fn new(task_name: &'static str,\n state: task_state::TaskState,\n thread_count: uint,\n user_data: QueueData) -> WorkQueue<QueueData, WorkData> {\n \/\/ Set up data structures.\n let (supervisor_chan, supervisor_port) = channel();\n let (mut infos, mut threads) = (vec!(), vec!());\n for i in range(0, thread_count) {\n let (worker_chan, worker_port) = channel();\n let pool = BufferPool::new();\n let (worker, thief) = pool.deque();\n infos.push(WorkerInfo {\n chan: worker_chan,\n deque: Some(worker),\n thief: thief,\n });\n threads.push(WorkerThread {\n index: i,\n port: worker_port,\n chan: supervisor_chan.clone(),\n other_deques: vec!(),\n rng: weak_rng(),\n });\n }\n\n \/\/ Connect workers to one another.\n for i in range(0, thread_count) {\n for j in range(0, thread_count) {\n if i != j {\n threads.get_mut(i).other_deques.push(infos[j].thief.clone())\n }\n }\n assert!(threads[i].other_deques.len() == thread_count - 1)\n }\n\n \/\/ Spawn threads.\n for thread in threads.into_iter() {\n TaskBuilder::new().named(task_name).native().spawn(proc() {\n task_state::initialize(state | task_state::InWorker);\n let mut thread = thread;\n thread.start()\n })\n }\n\n WorkQueue {\n workers: infos,\n port: supervisor_port,\n work_count: 0,\n data: user_data,\n }\n }\n\n \/\/\/ Enqueues a block into the work queue.\n #[inline]\n pub fn push(&mut self, work_unit: WorkUnit<QueueData, WorkData>) {\n match self.workers.get_mut(0).deque {\n None => {\n fail!(\"tried to push a block but we don't have the deque?!\")\n }\n Some(ref mut deque) => deque.push(work_unit),\n }\n self.work_count += 1\n }\n\n \/\/\/ Synchronously runs all the enqueued tasks and waits for them to complete.\n pub fn run(&mut self) {\n \/\/ Tell the workers to start.\n let mut work_count = AtomicUint::new(self.work_count);\n for worker in self.workers.iter_mut() {\n worker.chan.send(StartMsg(worker.deque.take().unwrap(), &mut work_count, &self.data))\n }\n\n \/\/ Wait for the work to finish.\n drop(self.port.recv());\n self.work_count = 0;\n\n \/\/ Tell everyone to stop.\n for worker in self.workers.iter() {\n worker.chan.send(StopMsg)\n }\n\n \/\/ Get our deques back.\n for _ in range(0, self.workers.len()) {\n match self.port.recv() {\n ReturnDequeMsg(index, deque) => self.workers.get_mut(index).deque = Some(deque),\n FinishedMsg => fail!(\"unexpected finished message!\"),\n }\n }\n }\n\n pub fn shutdown(&mut self) {\n for worker in self.workers.iter() {\n worker.chan.send(ExitMsg)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix Parsing Issue (#319)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for simple case of #9197<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntrait Foo {}\n\nfn foo<T: Foo + Foo>() {} \/\/~ ERROR `Foo` already appears in the list of bounds\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Do not use Store::retrieve_for_module() anymore here<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #70238 - petrochenkov:procmod, r=Centril<commit_after>\/\/ Out-of-line module is found on the filesystem if passed through a proc macro (issue #58818).\n\n\/\/ check-pass\n\/\/ aux-build:test-macros.rs\n\n#[macro_use]\nextern crate test_macros;\n\nmod outer {\n identity! { mod inner; }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a simple example to measuring performance of writing data<commit_after>extern crate da;\nextern crate env_logger;\nextern crate rand;\n\nuse std::path::Path;\n\nuse rand::{SeedableRng, XorShiftRng, Rng};\n\nuse da::storage::{LevelDB, LevelDBOptions};\nuse da::storage::{Map, MapExt};\n\n\/\/\/ usage \n\/\/\/ path - Directory where database is situated\n\/\/\/ count - Total amount of data items to write\n\/\/\/ data_len - Length of data chunk\n\nfn main() {\n ::std::env::set_var(\"RUST_LOG\", \"da=info\");\n\n let mut args = ::std::env::args();\n args.next();\n\n let path = args.next().unwrap();\n let count: usize = args.next().unwrap().parse().unwrap();\n let data_len: usize = args.next().unwrap().parse().unwrap();\n \/\/ TODO get them from command line\n let prefix = vec![1];\n let seed = [192, 168, 56, 1];\n\n let mut rng = XorShiftRng::from_seed(seed);\n let kv_generator = |_| {\n let mut v = vec![0; data_len];\n let mut k: Vec<u8> = vec![0; 32];\n\n rng.fill_bytes(&mut v);\n rng.fill_bytes(&mut k);\n (k, v)\n };\n\n let mut options = LevelDBOptions::new();\n options.create_if_missing = true;\n let mut db = LevelDB::new(&Path::new(&path), options).unwrap();\n\n let mut map = db.merkle_map(prefix); \n for item in (0..count).map(kv_generator) {\n map.put(&item.0, item.1.clone()).unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a test for #! comments<commit_after>#!\/usr\/bin\/env rustx\n\/\/ pp-exact\n\nimport io::println;\n\nfn main() { io::println(\"Hello World\"); }\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Configuration options for a single run of the servo application. Created\n\/\/! from command line arguments.\n\nuse azure::azure_hl::{BackendType, CairoBackend, CoreGraphicsBackend};\nuse azure::azure_hl::{CoreGraphicsAcceleratedBackend, Direct2DBackend, SkiaBackend};\nuse extra::getopts;\n\n\/\/\/ Global flags for Servo, currently set on the command line.\n#[deriving(Clone)]\npub struct Opts {\n \/\/\/ The initial URLs to load.\n urls: ~[~str],\n\n \/\/\/ The rendering backend to use (`-r`).\n render_backend: BackendType,\n\n \/\/\/ How many threads to use for CPU rendering (`-t`).\n \/\/\/\n \/\/\/ FIXME(pcwalton): This is not currently used. All rendering is sequential.\n n_render_threads: uint,\n\n \/\/\/ True to use CPU painting, false to use GPU painting via Skia-GL (`-c`). Note that\n \/\/\/ compositing is always done on the GPU.\n cpu_painting: bool,\n\n \/\/\/ The maximum size of each tile in pixels (`-s`).\n tile_size: uint,\n\n \/\/\/ `None` to disable the profiler or `Some` with an interval in seconds to enable it and cause\n \/\/\/ it to produce output on that interval (`-p`).\n profiler_period: Option<f64>,\n\n \/\/\/ True to exit after the page load (`-x`).\n exit_after_load: bool,\n\n output_file: Option<~str>,\n headless: bool,\n}\n\npub fn from_cmdline_args(args: &[~str]) -> Opts {\n let args = args.tail();\n\n let opts = ~[\n getopts::optflag(\"c\"), \/\/ CPU rendering\n getopts::optopt(\"o\"), \/\/ output file\n getopts::optopt(\"r\"), \/\/ rendering backend\n getopts::optopt(\"s\"), \/\/ size of tiles\n getopts::optopt(\"t\"), \/\/ threads to render with\n getopts::optflagopt(\"p\"), \/\/ profiler flag and output interval\n getopts::optflag(\"x\"), \/\/ exit after load flag\n getopts::optflag(\"z\"), \/\/ headless mode\n ];\n\n let opt_match = match getopts::getopts(args, opts) {\n Ok(m) => m,\n Err(f) => fail!(f.to_err_msg()),\n };\n\n let urls = if opt_match.free.is_empty() {\n fail!(~\"servo asks that you provide 1 or more URLs\")\n } else {\n opt_match.free.clone()\n };\n\n let render_backend = match opt_match.opt_str(\"r\") {\n Some(backend_str) => {\n if backend_str == ~\"direct2d\" {\n Direct2DBackend\n } else if backend_str == ~\"core-graphics\" {\n CoreGraphicsBackend\n } else if backend_str == ~\"core-graphics-accelerated\" {\n CoreGraphicsAcceleratedBackend\n } else if backend_str == ~\"cairo\" {\n CairoBackend\n } else if backend_str == ~\"skia\" {\n SkiaBackend\n } else {\n fail!(~\"unknown backend type\")\n }\n }\n None => SkiaBackend\n };\n\n let tile_size: uint = match opt_match.opt_str(\"s\") {\n Some(tile_size_str) => from_str(tile_size_str).unwrap(),\n None => 512,\n };\n\n let n_render_threads: uint = match opt_match.opt_str(\"t\") {\n Some(n_render_threads_str) => from_str(n_render_threads_str).unwrap(),\n None => 1, \/\/ FIXME: Number of cores.\n };\n\n \/\/ if only flag is present, default to 5 second period\n let profiler_period = do opt_match.opt_default(\"p\", \"5\").map |period| {\n from_str(period).unwrap()\n };\n\n let cpu_painting = opt_match.opt_present(\"c\");\n\n Opts {\n urls: urls,\n render_backend: render_backend,\n n_render_threads: n_render_threads,\n cpu_painting: cpu_painting,\n tile_size: tile_size,\n profiler_period: profiler_period,\n exit_after_load: opt_match.opt_present(\"x\"),\n output_file: opt_match.opt_str(\"o\"),\n headless: opt_match.opt_present(\"z\"),\n }\n}\n<commit_msg>Implemeting help message for servo browser app using groups::opts module.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Configuration options for a single run of the servo application. Created\n\/\/! from command line arguments.\n\nuse azure::azure_hl::{BackendType, CairoBackend, CoreGraphicsBackend};\nuse azure::azure_hl::{CoreGraphicsAcceleratedBackend, Direct2DBackend, SkiaBackend};\nuse extra::getopts::groups;\n\n\/\/\/ Global flags for Servo, currently set on the command line.\n#[deriving(Clone)]\npub struct Opts {\n \/\/\/ The initial URLs to load.\n urls: ~[~str],\n\n \/\/\/ The rendering backend to use (`-r`).\n render_backend: BackendType,\n\n \/\/\/ How many threads to use for CPU rendering (`-t`).\n \/\/\/\n \/\/\/ FIXME(pcwalton): This is not currently used. All rendering is sequential.\n n_render_threads: uint,\n\n \/\/\/ True to use CPU painting, false to use GPU painting via Skia-GL (`-c`). Note that\n \/\/\/ compositing is always done on the GPU.\n cpu_painting: bool,\n\n \/\/\/ The maximum size of each tile in pixels (`-s`).\n tile_size: uint,\n\n \/\/\/ `None` to disable the profiler or `Some` with an interval in seconds to enable it and cause\n \/\/\/ it to produce output on that interval (`-p`).\n profiler_period: Option<f64>,\n\n \/\/\/ True to exit after the page load (`-x`).\n exit_after_load: bool,\n\n output_file: Option<~str>,\n headless: bool,\n}\n\nfn print_usage(opts: &[groups::OptGroup]) {\n let message = format!(\"Usage: .\/servo [ options ... ] [URL]\\n\\twhere options include\");\n println(groups::usage(message, opts));\n}\n\npub fn from_cmdline_args(args: &[~str]) -> Opts {\n let args = args.tail();\n\n let opts = ~[\n groups::optflag(\"c\", \"cpu\", \"CPU rendering\"),\n groups::optopt(\"o\", \"output\", \"Output file\", \"output.png\"),\n groups::optopt(\"r\", \"rendering\", \"Rendering backend\", \"direct2d|core-graphics|core-graphics-accelerated|cairo|skia.\"),\n groups::optopt(\"s\", \"size\", \"Size of tiles\", \"512\"),\n groups::optopt(\"t\", \"threads\", \"Number of render threads\", \"1\"),\n groups::optflagopt(\"p\", \"profile\", \"Profiler flag and output interval\", \"10\"),\n groups::optflag(\"x\", \"exit\", \"Exit after load flag\"),\n groups::optflag(\"z\", \"headless\", \"Headless mode\"),\n groups::optflag(\"h\", \"help\", \"Print this message\")\n ];\n\n let opt_match = match groups::getopts(args, opts) {\n Ok(m) => m,\n Err(f) => fail!(f.to_err_msg()),\n };\n\n if opt_match.opt_present(\"h\") || opt_match.opt_present(\"help\") {\n print_usage(opts);\n \/\/ TODO: how to return a null struct and let the caller know that\n \/\/ it should abort?\n fail!(\"\")\n };\n\n let urls = if opt_match.free.is_empty() {\n print_usage(opts);\n fail!(~\"servo asks that you provide 1 or more URLs\")\n } else {\n opt_match.free.clone()\n };\n\n let render_backend = match opt_match.opt_str(\"r\") {\n Some(backend_str) => {\n if backend_str == ~\"direct2d\" {\n Direct2DBackend\n } else if backend_str == ~\"core-graphics\" {\n CoreGraphicsBackend\n } else if backend_str == ~\"core-graphics-accelerated\" {\n CoreGraphicsAcceleratedBackend\n } else if backend_str == ~\"cairo\" {\n CairoBackend\n } else if backend_str == ~\"skia\" {\n SkiaBackend\n } else {\n fail!(~\"unknown backend type\")\n }\n }\n None => SkiaBackend\n };\n\n let tile_size: uint = match opt_match.opt_str(\"s\") {\n Some(tile_size_str) => from_str(tile_size_str).unwrap(),\n None => 512,\n };\n\n let n_render_threads: uint = match opt_match.opt_str(\"t\") {\n Some(n_render_threads_str) => from_str(n_render_threads_str).unwrap(),\n None => 1, \/\/ FIXME: Number of cores.\n };\n\n \/\/ if only flag is present, default to 5 second period\n let profiler_period = do opt_match.opt_default(\"p\", \"5\").map |period| {\n from_str(period).unwrap()\n };\n\n let cpu_painting = opt_match.opt_present(\"c\");\n\n Opts {\n urls: urls,\n render_backend: render_backend,\n n_render_threads: n_render_threads,\n cpu_painting: cpu_painting,\n tile_size: tile_size,\n profiler_period: profiler_period,\n exit_after_load: opt_match.opt_present(\"x\"),\n output_file: opt_match.opt_str(\"o\"),\n headless: opt_match.opt_present(\"z\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added closure example<commit_after>struct MyStruct {\n a: u32,\n b: u32,\n}\n \nfn main() {\n let mut vec : Vec<MyStruct> = Vec::new();\n \n vec.push(MyStruct{a: 1, b: 2});\n vec.push(MyStruct{a: 2, b: 2});\n \n for e in vec.into_iter().filter(|x| x.a > 1) {\n println!(\"{}\", e.a);\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #2633. Xfailed.<commit_after>\/\/ Currently segfaults\n\/\/ xfail-test\nclass cat {\n let mut meow: fn@();\n new() { self.meow = fn@() { #error(\"meow\"); };}\n}\n\ntype kitty_info = {kitty: cat};\n\n\/\/ Code compiles and runs successfully if we add a + before the first arg\nfn nyan(kitty: cat, _kitty_info: kitty_info) {\n kitty.meow();\n}\n\nfn main() {\n let mut kitty = cat();\n nyan(kitty, {kitty: kitty});\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue #9446.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-test\n\nstruct Wrapper(~str);\n\nimpl Wrapper {\n pub fn new(wrapped: ~str) -> Wrapper {\n Wrapper(wrapped)\n }\n\n pub fn say_hi(&self) {\n println(fmt!(\"hello %s\", **self));\n }\n}\n\nimpl Drop for Wrapper {\n fn drop(&mut self) {}\n}\n\nfn main() {\n {\n \/\/ This runs without complaint.\n let x = Wrapper::new(~\"Bob\");\n x.say_hi();\n }\n {\n \/\/ This fails to compile, circa 0.8-89-gc635fba.\n \/\/ error: internal compiler error: drop_ty_immediate: non-box ty\n Wrapper::new(~\"Bob\").say_hi();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::cmp;\nuse std::default::Default;\nuse std::fmt;\nuse std::str;\n\n\/\/\/ Represents a quality used in quality values.\n\/\/\/\n\/\/\/ Can be created with the `q` function.\n\/\/\/\n\/\/\/ # Implementation notes\n\/\/\/\n\/\/\/ The quality value is defined as a number between 0 and 1 with three decimal places. This means\n\/\/\/ there are 1000 possible values. Since floating point numbers are not exact and the smallest\n\/\/\/ floating point data type (`f32`) consumes four bytes, hyper uses an `u16` value to store the\n\/\/\/ quality internally. For performance reasons you may set quality directly to a value between\n\/\/\/ 0 and 1000 e.g. `Quality(532)` matches the quality `q=0.532`.\n\/\/\/\n\/\/\/ [RFC7231 Section 5.3.1](https:\/\/tools.ietf.org\/html\/rfc7231#section-5.3.1)\n\/\/\/ gives more information on quality values in HTTP header fields.\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\npub struct Quality(pub u16);\n\nimpl fmt::Display for Quality {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self.0 {\n 1000 => Ok(()),\n 0 => f.write_str(\"; q=0\"),\n x => write!(f, \"; q=0.{}\", format!(\"{:03}\", x).trim_right_matches('0'))\n }\n }\n}\n\nimpl Default for Quality {\n fn default() -> Quality {\n Quality(1000)\n }\n}\n\n\/\/\/ Represents an item with a quality value as defined in\n\/\/\/ [RFC7231](https:\/\/tools.ietf.org\/html\/rfc7231#section-5.3.1).\n#[derive(Clone, PartialEq, Debug)]\npub struct QualityItem<T> {\n \/\/\/ The actual contents of the field.\n pub item: T,\n \/\/\/ The quality (client or server preference) for the value.\n pub quality: Quality,\n}\n\nimpl<T> QualityItem<T> {\n \/\/\/ Creates a new `QualityItem` from an item and a quality.\n \/\/\/ The item can be of any type.\n \/\/\/ The quality should be a value in the range [0, 1].\n pub fn new(item: T, quality: Quality) -> QualityItem<T> {\n QualityItem {\n item: item,\n quality: quality\n }\n }\n}\n\nimpl<T: PartialEq> cmp::PartialOrd for QualityItem<T> {\n fn partial_cmp(&self, other: &QualityItem<T>) -> Option<cmp::Ordering> {\n self.quality.partial_cmp(&other.quality)\n }\n}\n\nimpl<T: fmt::Display> fmt::Display for QualityItem<T> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}{}\", self.item, format!(\"{}\", self.quality))\n }\n}\n\nimpl<T: str::FromStr> str::FromStr for QualityItem<T> {\n type Err = ::Error;\n fn from_str(s: &str) -> ::Result<QualityItem<T>> {\n \/\/ Set defaults used if parsing fails.\n let mut raw_item = s;\n let mut quality = 1f32;\n\n let parts: Vec<&str> = s.rsplitn(2, ';').map(|x| x.trim()).collect();\n if parts.len() == 2 {\n let start = &parts[0][0..2];\n if start == \"q=\" || start == \"Q=\" {\n let q_part = &parts[0][2..parts[0].len()];\n if q_part.len() > 5 {\n return Err(::Error::Header);\n }\n match q_part.parse::<f32>() {\n Ok(q_value) => {\n if 0f32 <= q_value && q_value <= 1f32 {\n quality = q_value;\n raw_item = parts[1];\n } else {\n return Err(::Error::Header);\n }\n },\n Err(_) => return Err(::Error::Header),\n }\n }\n }\n match raw_item.parse::<T>() {\n \/\/ we already checked above that the quality is within range\n Ok(item) => Ok(QualityItem::new(item, from_f32(quality))),\n Err(_) => return Err(::Error::Header),\n }\n }\n}\n\nfn from_f32(f: f32) -> Quality {\n \/\/ this function is only used internally. A check that `f` is within range\n \/\/ should be done before calling this method. Just in case, this\n \/\/ debug_assert should catch if we were forgetful\n debug_assert!(f >= 0f32 && f <= 1f32, \"q value must be between 0.0 and 1.0\");\n Quality((f * 1000f32) as u16)\n}\n\n\/\/\/ Convinience function to wrap a value in a `QualityItem`\n\/\/\/ Sets `q` to the default 1.0\npub fn qitem<T>(item: T) -> QualityItem<T> {\n QualityItem::new(item, Default::default())\n}\n\n\/\/\/ Convenience function to create a `Quality` fromt a float.\npub fn q(f: f32) -> Quality {\n assert!(f >= 0f32 && f <= 1f32, \"q value must be between 0.0 and 1.0\");\n from_f32(f)\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use super::super::encoding::*;\n\n #[test]\n fn test_quality_item_show1() {\n let x = qitem(Chunked);\n assert_eq!(format!(\"{}\", x), \"chunked\");\n }\n #[test]\n fn test_quality_item_show2() {\n let x = QualityItem::new(Chunked, Quality(1));\n assert_eq!(format!(\"{}\", x), \"chunked; q=0.001\");\n }\n #[test]\n fn test_quality_item_show3() {\n \/\/ Custom value\n let x = QualityItem{\n item: EncodingExt(\"identity\".to_owned()),\n quality: Quality(500),\n };\n assert_eq!(format!(\"{}\", x), \"identity; q=0.5\");\n }\n\n #[test]\n fn test_quality_item_from_str1() {\n let x: ::Result<QualityItem<Encoding>> = \"chunked\".parse();\n assert_eq!(x.unwrap(), QualityItem{ item: Chunked, quality: Quality(1000), });\n }\n #[test]\n fn test_quality_item_from_str2() {\n let x: ::Result<QualityItem<Encoding>> = \"chunked; q=1\".parse();\n assert_eq!(x.unwrap(), QualityItem{ item: Chunked, quality: Quality(1000), });\n }\n #[test]\n fn test_quality_item_from_str3() {\n let x: ::Result<QualityItem<Encoding>> = \"gzip; q=0.5\".parse();\n assert_eq!(x.unwrap(), QualityItem{ item: Gzip, quality: Quality(500), });\n }\n #[test]\n fn test_quality_item_from_str4() {\n let x: ::Result<QualityItem<Encoding>> = \"gzip; q=0.273\".parse();\n assert_eq!(x.unwrap(), QualityItem{ item: Gzip, quality: Quality(273), });\n }\n #[test]\n fn test_quality_item_from_str5() {\n let x: ::Result<QualityItem<Encoding>> = \"gzip; q=0.2739999\".parse();\n assert!(x.is_err());\n }\n #[test]\n fn test_quality_item_from_str6() {\n let x: ::Result<QualityItem<Encoding>> = \"gzip; q=2\".parse();\n assert!(x.is_err());\n }\n #[test]\n fn test_quality_item_ordering() {\n let x: QualityItem<Encoding> = \"gzip; q=0.5\".parse().ok().unwrap();\n let y: QualityItem<Encoding> = \"gzip; q=0.273\".parse().ok().unwrap();\n let comparision_result: bool = x.gt(&y);\n assert!(comparision_result)\n }\n\n #[test]\n fn test_quality() {\n assert_eq!(q(0.5), Quality(500));\n }\n\n #[test]\n fn test_quality2() {\n assert_eq!(format!(\"{}\", q(0.0)), \"; q=0\");\n }\n\n #[test]\n #[should_panic]\n fn test_quality_invalid() {\n q(-1.0);\n }\n\n #[test]\n #[should_panic]\n fn test_quality_invalid2() {\n q(2.0);\n }\n}\n<commit_msg>test(windows): Ignore #[should_panic] tests on 32-bit msvc<commit_after>use std::cmp;\nuse std::default::Default;\nuse std::fmt;\nuse std::str;\n\n\/\/\/ Represents a quality used in quality values.\n\/\/\/\n\/\/\/ Can be created with the `q` function.\n\/\/\/\n\/\/\/ # Implementation notes\n\/\/\/\n\/\/\/ The quality value is defined as a number between 0 and 1 with three decimal places. This means\n\/\/\/ there are 1000 possible values. Since floating point numbers are not exact and the smallest\n\/\/\/ floating point data type (`f32`) consumes four bytes, hyper uses an `u16` value to store the\n\/\/\/ quality internally. For performance reasons you may set quality directly to a value between\n\/\/\/ 0 and 1000 e.g. `Quality(532)` matches the quality `q=0.532`.\n\/\/\/\n\/\/\/ [RFC7231 Section 5.3.1](https:\/\/tools.ietf.org\/html\/rfc7231#section-5.3.1)\n\/\/\/ gives more information on quality values in HTTP header fields.\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\npub struct Quality(pub u16);\n\nimpl fmt::Display for Quality {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self.0 {\n 1000 => Ok(()),\n 0 => f.write_str(\"; q=0\"),\n x => write!(f, \"; q=0.{}\", format!(\"{:03}\", x).trim_right_matches('0'))\n }\n }\n}\n\nimpl Default for Quality {\n fn default() -> Quality {\n Quality(1000)\n }\n}\n\n\/\/\/ Represents an item with a quality value as defined in\n\/\/\/ [RFC7231](https:\/\/tools.ietf.org\/html\/rfc7231#section-5.3.1).\n#[derive(Clone, PartialEq, Debug)]\npub struct QualityItem<T> {\n \/\/\/ The actual contents of the field.\n pub item: T,\n \/\/\/ The quality (client or server preference) for the value.\n pub quality: Quality,\n}\n\nimpl<T> QualityItem<T> {\n \/\/\/ Creates a new `QualityItem` from an item and a quality.\n \/\/\/ The item can be of any type.\n \/\/\/ The quality should be a value in the range [0, 1].\n pub fn new(item: T, quality: Quality) -> QualityItem<T> {\n QualityItem {\n item: item,\n quality: quality\n }\n }\n}\n\nimpl<T: PartialEq> cmp::PartialOrd for QualityItem<T> {\n fn partial_cmp(&self, other: &QualityItem<T>) -> Option<cmp::Ordering> {\n self.quality.partial_cmp(&other.quality)\n }\n}\n\nimpl<T: fmt::Display> fmt::Display for QualityItem<T> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}{}\", self.item, format!(\"{}\", self.quality))\n }\n}\n\nimpl<T: str::FromStr> str::FromStr for QualityItem<T> {\n type Err = ::Error;\n fn from_str(s: &str) -> ::Result<QualityItem<T>> {\n \/\/ Set defaults used if parsing fails.\n let mut raw_item = s;\n let mut quality = 1f32;\n\n let parts: Vec<&str> = s.rsplitn(2, ';').map(|x| x.trim()).collect();\n if parts.len() == 2 {\n let start = &parts[0][0..2];\n if start == \"q=\" || start == \"Q=\" {\n let q_part = &parts[0][2..parts[0].len()];\n if q_part.len() > 5 {\n return Err(::Error::Header);\n }\n match q_part.parse::<f32>() {\n Ok(q_value) => {\n if 0f32 <= q_value && q_value <= 1f32 {\n quality = q_value;\n raw_item = parts[1];\n } else {\n return Err(::Error::Header);\n }\n },\n Err(_) => return Err(::Error::Header),\n }\n }\n }\n match raw_item.parse::<T>() {\n \/\/ we already checked above that the quality is within range\n Ok(item) => Ok(QualityItem::new(item, from_f32(quality))),\n Err(_) => return Err(::Error::Header),\n }\n }\n}\n\nfn from_f32(f: f32) -> Quality {\n \/\/ this function is only used internally. A check that `f` is within range\n \/\/ should be done before calling this method. Just in case, this\n \/\/ debug_assert should catch if we were forgetful\n debug_assert!(f >= 0f32 && f <= 1f32, \"q value must be between 0.0 and 1.0\");\n Quality((f * 1000f32) as u16)\n}\n\n\/\/\/ Convinience function to wrap a value in a `QualityItem`\n\/\/\/ Sets `q` to the default 1.0\npub fn qitem<T>(item: T) -> QualityItem<T> {\n QualityItem::new(item, Default::default())\n}\n\n\/\/\/ Convenience function to create a `Quality` fromt a float.\npub fn q(f: f32) -> Quality {\n assert!(f >= 0f32 && f <= 1f32, \"q value must be between 0.0 and 1.0\");\n from_f32(f)\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use super::super::encoding::*;\n\n #[test]\n fn test_quality_item_show1() {\n let x = qitem(Chunked);\n assert_eq!(format!(\"{}\", x), \"chunked\");\n }\n #[test]\n fn test_quality_item_show2() {\n let x = QualityItem::new(Chunked, Quality(1));\n assert_eq!(format!(\"{}\", x), \"chunked; q=0.001\");\n }\n #[test]\n fn test_quality_item_show3() {\n \/\/ Custom value\n let x = QualityItem{\n item: EncodingExt(\"identity\".to_owned()),\n quality: Quality(500),\n };\n assert_eq!(format!(\"{}\", x), \"identity; q=0.5\");\n }\n\n #[test]\n fn test_quality_item_from_str1() {\n let x: ::Result<QualityItem<Encoding>> = \"chunked\".parse();\n assert_eq!(x.unwrap(), QualityItem{ item: Chunked, quality: Quality(1000), });\n }\n #[test]\n fn test_quality_item_from_str2() {\n let x: ::Result<QualityItem<Encoding>> = \"chunked; q=1\".parse();\n assert_eq!(x.unwrap(), QualityItem{ item: Chunked, quality: Quality(1000), });\n }\n #[test]\n fn test_quality_item_from_str3() {\n let x: ::Result<QualityItem<Encoding>> = \"gzip; q=0.5\".parse();\n assert_eq!(x.unwrap(), QualityItem{ item: Gzip, quality: Quality(500), });\n }\n #[test]\n fn test_quality_item_from_str4() {\n let x: ::Result<QualityItem<Encoding>> = \"gzip; q=0.273\".parse();\n assert_eq!(x.unwrap(), QualityItem{ item: Gzip, quality: Quality(273), });\n }\n #[test]\n fn test_quality_item_from_str5() {\n let x: ::Result<QualityItem<Encoding>> = \"gzip; q=0.2739999\".parse();\n assert!(x.is_err());\n }\n #[test]\n fn test_quality_item_from_str6() {\n let x: ::Result<QualityItem<Encoding>> = \"gzip; q=2\".parse();\n assert!(x.is_err());\n }\n #[test]\n fn test_quality_item_ordering() {\n let x: QualityItem<Encoding> = \"gzip; q=0.5\".parse().ok().unwrap();\n let y: QualityItem<Encoding> = \"gzip; q=0.273\".parse().ok().unwrap();\n let comparision_result: bool = x.gt(&y);\n assert!(comparision_result)\n }\n\n #[test]\n fn test_quality() {\n assert_eq!(q(0.5), Quality(500));\n }\n\n #[test]\n fn test_quality2() {\n assert_eq!(format!(\"{}\", q(0.0)), \"; q=0\");\n }\n\n #[test]\n #[should_panic] \/\/ FIXME - 32-bit msvc unwinding broken\n #[cfg_attr(all(target_arch=\"x86\", target_env=\"msvc\"), ignore)]\n fn test_quality_invalid() {\n q(-1.0);\n }\n\n #[test]\n #[should_panic] \/\/ FIXME - 32-bit msvc unwinding broken\n #[cfg_attr(all(target_arch=\"x86\", target_env=\"msvc\"), ignore)]\n fn test_quality_invalid2() {\n q(2.0);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse abi::{ArgAttribute, ArgType, CastTarget, FnType, LayoutExt, PassMode, Reg, RegKind, Uniform};\nuse context::CodegenCx;\nuse rustc::ty::layout::{self, Size};\n\nfn extend_integer_width_mips(arg: &mut ArgType, bits: u64) {\n \/\/ Always sign extend u32 values on 64-bit mips\n if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {\n if let layout::Int(i, signed) = scalar.value {\n if !signed && i.size().bits() == 32 {\n if let PassMode::Direct(ref mut attrs) = arg.mode {\n attrs.set(ArgAttribute::SExt);\n return;\n }\n }\n }\n }\n\n arg.extend_integer_width_to(bits);\n}\n\nfn bits_to_int_reg(bits: u64) -> Reg {\n if bits <= 8 {\n Reg::i8()\n } else if bits <= 16 {\n Reg::i16()\n } else if bits <= 32 {\n Reg::i32()\n } else {\n Reg::i64()\n }\n}\n\nfn float_reg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, ret: &ArgType<'tcx>, i: usize) -> Option<Reg> {\n match ret.layout.field(cx, i).abi {\n layout::Abi::Scalar(ref scalar) => match scalar.value {\n layout::F32 => Some(Reg::f32()),\n layout::F64 => Some(Reg::f64()),\n _ => None\n },\n _ => None\n }\n}\n\nfn classify_ret_ty<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, ret: &mut ArgType<'tcx>) {\n if !ret.layout.is_aggregate() {\n extend_integer_width_mips(ret, 64);\n return;\n }\n\n let size = ret.layout.size;\n let bits = size.bits();\n if bits <= 128 {\n \/\/ Unlike other architectures which return aggregates in registers, MIPS n64 limits the\n \/\/ use of float registers to structures (not unions) containing exactly one or two\n \/\/ float fields.\n\n if let layout::FieldPlacement::Arbitrary { .. } = ret.layout.fields {\n if ret.layout.fields.count() == 1 {\n if let Some(reg) = float_reg(cx, ret, 0) {\n ret.cast_to(reg);\n return;\n }\n } else if ret.layout.fields.count() == 2 {\n if let Some(reg0) = float_reg(cx, ret, 0) {\n if let Some(reg1) = float_reg(cx, ret, 1) {\n ret.cast_to(CastTarget::pair(reg0, reg1));\n return;\n }\n }\n }\n }\n\n \/\/ Cast to a uniform int structure\n ret.cast_to(Uniform {\n unit: bits_to_int_reg(bits),\n total: size\n });\n } else {\n ret.make_indirect();\n }\n}\n\nfn classify_arg_ty<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &mut ArgType<'tcx>) {\n if !arg.layout.is_aggregate() {\n extend_integer_width_mips(arg, 64);\n return;\n }\n\n let dl = &cx.tcx.data_layout;\n let size = arg.layout.size;\n let mut prefix = [None; 8];\n let mut prefix_index = 0;\n\n match arg.layout.fields {\n layout::FieldPlacement::Array { .. } => {\n \/\/ Arrays are passed indirectly\n arg.make_indirect();\n return;\n }\n layout::FieldPlacement::Union(_) => {\n \/\/ Unions and are always treated as a series of 64-bit integer chunks\n },\n layout::FieldPlacement::Arbitrary { .. } => {\n \/\/ Structures are split up into a series of 64-bit integer chunks, but any aligned\n \/\/ doubles not part of another aggregate are passed as floats.\n let mut last_offset = Size::from_bytes(0);\n\n for i in 0..arg.layout.fields.count() {\n let field = arg.layout.field(cx, i);\n let offset = arg.layout.fields.offset(i);\n\n \/\/ We only care about aligned doubles\n if let layout::Abi::Scalar(ref scalar) = field.abi {\n if let layout::F64 = scalar.value {\n if offset.is_abi_aligned(dl.f64_align) {\n \/\/ Insert enough integers to cover [last_offset, offset)\n assert!(last_offset.is_abi_aligned(dl.f64_align));\n for _ in 0..((offset - last_offset).bits() \/ 64)\n .min((prefix.len() - prefix_index) as u64) {\n\n prefix[prefix_index] = Some(RegKind::Integer);\n prefix_index += 1;\n }\n\n if prefix_index == prefix.len() {\n break;\n }\n\n prefix[prefix_index] = Some(RegKind::Float);\n prefix_index += 1;\n last_offset = offset + Reg::f64().size;\n }\n }\n }\n }\n }\n };\n\n \/\/ Extract first 8 chunks as the prefix\n let rest_size = size - Size::from_bytes(8) * prefix_index as u64;\n arg.cast_to(CastTarget {\n prefix: prefix,\n prefix_chunk: Size::from_bytes(8),\n rest: Uniform { unit: Reg::i64(), total: rest_size }\n });\n}\n\npub fn compute_abi_info<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, fty: &mut FnType<'tcx>) {\n if !fty.ret.is_ignore() {\n classify_ret_ty(cx, &mut fty.ret);\n }\n\n for arg in &mut fty.args {\n if arg.is_ignore() { continue; }\n classify_arg_ty(cx, arg);\n }\n}\n<commit_msg> rustc_trans: fix small aggregate returns for big-endian mips64 FFI<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse abi::{ArgAttribute, ArgType, CastTarget, FnType, LayoutExt, PassMode, Reg, RegKind, Uniform};\nuse context::CodegenCx;\nuse rustc::ty::layout::{self, Size};\n\nfn extend_integer_width_mips(arg: &mut ArgType, bits: u64) {\n \/\/ Always sign extend u32 values on 64-bit mips\n if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {\n if let layout::Int(i, signed) = scalar.value {\n if !signed && i.size().bits() == 32 {\n if let PassMode::Direct(ref mut attrs) = arg.mode {\n attrs.set(ArgAttribute::SExt);\n return;\n }\n }\n }\n }\n\n arg.extend_integer_width_to(bits);\n}\n\nfn float_reg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, ret: &ArgType<'tcx>, i: usize) -> Option<Reg> {\n match ret.layout.field(cx, i).abi {\n layout::Abi::Scalar(ref scalar) => match scalar.value {\n layout::F32 => Some(Reg::f32()),\n layout::F64 => Some(Reg::f64()),\n _ => None\n },\n _ => None\n }\n}\n\nfn classify_ret_ty<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, ret: &mut ArgType<'tcx>) {\n if !ret.layout.is_aggregate() {\n extend_integer_width_mips(ret, 64);\n return;\n }\n\n let size = ret.layout.size;\n let bits = size.bits();\n if bits <= 128 {\n \/\/ Unlike other architectures which return aggregates in registers, MIPS n64 limits the\n \/\/ use of float registers to structures (not unions) containing exactly one or two\n \/\/ float fields.\n\n if let layout::FieldPlacement::Arbitrary { .. } = ret.layout.fields {\n if ret.layout.fields.count() == 1 {\n if let Some(reg) = float_reg(cx, ret, 0) {\n ret.cast_to(reg);\n return;\n }\n } else if ret.layout.fields.count() == 2 {\n if let Some(reg0) = float_reg(cx, ret, 0) {\n if let Some(reg1) = float_reg(cx, ret, 1) {\n ret.cast_to(CastTarget::pair(reg0, reg1));\n return;\n }\n }\n }\n }\n\n \/\/ Cast to a uniform int structure\n ret.cast_to(Uniform {\n unit: Reg::i64(),\n total: size\n });\n } else {\n ret.make_indirect();\n }\n}\n\nfn classify_arg_ty<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &mut ArgType<'tcx>) {\n if !arg.layout.is_aggregate() {\n extend_integer_width_mips(arg, 64);\n return;\n }\n\n let dl = &cx.tcx.data_layout;\n let size = arg.layout.size;\n let mut prefix = [None; 8];\n let mut prefix_index = 0;\n\n match arg.layout.fields {\n layout::FieldPlacement::Array { .. } => {\n \/\/ Arrays are passed indirectly\n arg.make_indirect();\n return;\n }\n layout::FieldPlacement::Union(_) => {\n \/\/ Unions and are always treated as a series of 64-bit integer chunks\n },\n layout::FieldPlacement::Arbitrary { .. } => {\n \/\/ Structures are split up into a series of 64-bit integer chunks, but any aligned\n \/\/ doubles not part of another aggregate are passed as floats.\n let mut last_offset = Size::from_bytes(0);\n\n for i in 0..arg.layout.fields.count() {\n let field = arg.layout.field(cx, i);\n let offset = arg.layout.fields.offset(i);\n\n \/\/ We only care about aligned doubles\n if let layout::Abi::Scalar(ref scalar) = field.abi {\n if let layout::F64 = scalar.value {\n if offset.is_abi_aligned(dl.f64_align) {\n \/\/ Insert enough integers to cover [last_offset, offset)\n assert!(last_offset.is_abi_aligned(dl.f64_align));\n for _ in 0..((offset - last_offset).bits() \/ 64)\n .min((prefix.len() - prefix_index) as u64) {\n\n prefix[prefix_index] = Some(RegKind::Integer);\n prefix_index += 1;\n }\n\n if prefix_index == prefix.len() {\n break;\n }\n\n prefix[prefix_index] = Some(RegKind::Float);\n prefix_index += 1;\n last_offset = offset + Reg::f64().size;\n }\n }\n }\n }\n }\n };\n\n \/\/ Extract first 8 chunks as the prefix\n let rest_size = size - Size::from_bytes(8) * prefix_index as u64;\n arg.cast_to(CastTarget {\n prefix: prefix,\n prefix_chunk: Size::from_bytes(8),\n rest: Uniform { unit: Reg::i64(), total: rest_size }\n });\n}\n\npub fn compute_abi_info<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, fty: &mut FnType<'tcx>) {\n if !fty.ret.is_ignore() {\n classify_ret_ty(cx, &mut fty.ret);\n }\n\n for arg in &mut fty.args {\n if arg.is_ignore() { continue; }\n classify_arg_ty(cx, arg);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse ty;\n\nuse rustc_data_structures::indexed_vec::Idx;\nuse serialize::{self, Encoder, Decoder};\n\nuse std::fmt;\nuse std::u32;\n\nnewtype_index!(CrateNum\n {\n derive[Debug]\n ENCODABLE = custom\n\n \/\/\/ Item definitions in the currently-compiled crate would have the CrateNum\n \/\/\/ LOCAL_CRATE in their DefId.\n const LOCAL_CRATE = 0,\n\n \/\/\/ Virtual crate for builtin macros\n \/\/ FIXME(jseyfried): this is also used for custom derives until proc-macro crates get\n \/\/ `CrateNum`s.\n const BUILTIN_MACROS_CRATE = u32::MAX,\n\n \/\/\/ A CrateNum value that indicates that something is wrong.\n const INVALID_CRATE = u32::MAX - 1,\n });\n\nimpl CrateNum {\n pub fn new(x: usize) -> CrateNum {\n assert!(x < (u32::MAX as usize));\n CrateNum(x as u32)\n }\n\n pub fn from_u32(x: u32) -> CrateNum {\n CrateNum(x)\n }\n\n pub fn as_usize(&self) -> usize {\n self.0 as usize\n }\n\n pub fn as_u32(&self) -> u32 {\n self.0\n }\n\n pub fn as_def_id(&self) -> DefId { DefId { krate: *self, index: CRATE_DEF_INDEX } }\n}\n\nimpl fmt::Display for CrateNum {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(&self.0, f)\n }\n}\n\nimpl serialize::UseSpecializedEncodable for CrateNum {\n fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n s.emit_u32(self.0)\n }\n}\n\nimpl serialize::UseSpecializedDecodable for CrateNum {\n fn default_decode<D: Decoder>(d: &mut D) -> Result<CrateNum, D::Error> {\n d.read_u32().map(CrateNum)\n }\n}\n\n\/\/\/ A DefIndex is an index into the hir-map for a crate, identifying a\n\/\/\/ particular definition. It should really be considered an interned\n\/\/\/ shorthand for a particular DefPath.\n\/\/\/\n\/\/\/ At the moment we are allocating the numerical values of DefIndexes into two\n\/\/\/ ranges: the \"low\" range (starting at zero) and the \"high\" range (starting at\n\/\/\/ DEF_INDEX_HI_START). This allows us to allocate the DefIndexes of all\n\/\/\/ item-likes (Items, TraitItems, and ImplItems) into one of these ranges and\n\/\/\/ consequently use a simple array for lookup tables keyed by DefIndex and\n\/\/\/ known to be densely populated. This is especially important for the HIR map.\n\/\/\/\n\/\/\/ Since the DefIndex is mostly treated as an opaque ID, you probably\n\/\/\/ don't have to care about these ranges.\nnewtype_index!(DefIndex\n {\n DEBUG_FORMAT = custom,\n\n \/\/\/ The start of the \"high\" range of DefIndexes.\n const DEF_INDEX_HI_START = 1 << 31,\n\n \/\/\/ The crate root is always assigned index 0 by the AST Map code,\n \/\/\/ thanks to `NodeCollector::new`.\n const CRATE_DEF_INDEX = 0,\n });\n\nimpl fmt::Debug for DefIndex {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f,\n \"DefIndex({}:{})\",\n self.address_space().index(),\n self.as_array_index())\n }\n}\n\nimpl DefIndex {\n #[inline]\n pub fn from_u32(x: u32) -> DefIndex {\n DefIndex(x)\n }\n\n #[inline]\n pub fn as_usize(&self) -> usize {\n self.0 as usize\n }\n\n #[inline]\n pub fn as_u32(&self) -> u32 {\n self.0\n }\n\n #[inline]\n pub fn address_space(&self) -> DefIndexAddressSpace {\n if self.0 < DEF_INDEX_HI_START.0 {\n DefIndexAddressSpace::Low\n } else {\n DefIndexAddressSpace::High\n }\n }\n\n \/\/\/ Converts this DefIndex into a zero-based array index.\n \/\/\/ This index is the offset within the given \"range\" of the DefIndex,\n \/\/\/ that is, if the DefIndex is part of the \"high\" range, the resulting\n \/\/\/ index will be (DefIndex - DEF_INDEX_HI_START).\n #[inline]\n pub fn as_array_index(&self) -> usize {\n (self.0 & !DEF_INDEX_HI_START.0) as usize\n }\n\n pub fn from_array_index(i: usize, address_space: DefIndexAddressSpace) -> DefIndex {\n DefIndex::new(address_space.start() + i)\n }\n}\n\n#[derive(Copy, Clone, Eq, PartialEq, Hash)]\npub enum DefIndexAddressSpace {\n Low = 0,\n High = 1,\n}\n\nimpl DefIndexAddressSpace {\n #[inline]\n pub fn index(&self) -> usize {\n *self as usize\n }\n\n #[inline]\n pub fn start(&self) -> usize {\n self.index() * DEF_INDEX_HI_START.as_usize()\n }\n}\n\n\/\/\/ A DefId identifies a particular *definition*, by combining a crate\n\/\/\/ index and a def index.\n#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable, RustcDecodable, Hash, Copy)]\npub struct DefId {\n pub krate: CrateNum,\n pub index: DefIndex,\n}\n\nimpl fmt::Debug for DefId {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"DefId {{ krate: {:?}, index: {:?}\",\n self.krate, self.index)?;\n\n ty::tls::with_opt(|opt_tcx| {\n if let Some(tcx) = opt_tcx {\n write!(f, \" => {}\", tcx.def_path_debug_str(*self))?;\n }\n Ok(())\n })?;\n\n write!(f, \" }}\")\n }\n}\n\n\nimpl DefId {\n \/\/\/ Make a local `DefId` with the given index.\n pub fn local(index: DefIndex) -> DefId {\n DefId { krate: LOCAL_CRATE, index: index }\n }\n\n pub fn is_local(&self) -> bool {\n self.krate == LOCAL_CRATE\n }\n}\n<commit_msg>incr.comp.: Implement UseSpecializedXXcodable for DefIndex and DefId.<commit_after>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse ty;\n\nuse rustc_data_structures::indexed_vec::Idx;\nuse serialize::{self, Encoder, Decoder, Decodable, Encodable};\n\nuse std::fmt;\nuse std::u32;\n\nnewtype_index!(CrateNum\n {\n derive[Debug]\n ENCODABLE = custom\n\n \/\/\/ Item definitions in the currently-compiled crate would have the CrateNum\n \/\/\/ LOCAL_CRATE in their DefId.\n const LOCAL_CRATE = 0,\n\n \/\/\/ Virtual crate for builtin macros\n \/\/ FIXME(jseyfried): this is also used for custom derives until proc-macro crates get\n \/\/ `CrateNum`s.\n const BUILTIN_MACROS_CRATE = u32::MAX,\n\n \/\/\/ A CrateNum value that indicates that something is wrong.\n const INVALID_CRATE = u32::MAX - 1,\n });\n\nimpl CrateNum {\n pub fn new(x: usize) -> CrateNum {\n assert!(x < (u32::MAX as usize));\n CrateNum(x as u32)\n }\n\n pub fn from_u32(x: u32) -> CrateNum {\n CrateNum(x)\n }\n\n pub fn as_usize(&self) -> usize {\n self.0 as usize\n }\n\n pub fn as_u32(&self) -> u32 {\n self.0\n }\n\n pub fn as_def_id(&self) -> DefId { DefId { krate: *self, index: CRATE_DEF_INDEX } }\n}\n\nimpl fmt::Display for CrateNum {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(&self.0, f)\n }\n}\n\nimpl serialize::UseSpecializedEncodable for CrateNum {\n fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n s.emit_u32(self.0)\n }\n}\n\nimpl serialize::UseSpecializedDecodable for CrateNum {\n fn default_decode<D: Decoder>(d: &mut D) -> Result<CrateNum, D::Error> {\n d.read_u32().map(CrateNum)\n }\n}\n\n\/\/\/ A DefIndex is an index into the hir-map for a crate, identifying a\n\/\/\/ particular definition. It should really be considered an interned\n\/\/\/ shorthand for a particular DefPath.\n\/\/\/\n\/\/\/ At the moment we are allocating the numerical values of DefIndexes into two\n\/\/\/ ranges: the \"low\" range (starting at zero) and the \"high\" range (starting at\n\/\/\/ DEF_INDEX_HI_START). This allows us to allocate the DefIndexes of all\n\/\/\/ item-likes (Items, TraitItems, and ImplItems) into one of these ranges and\n\/\/\/ consequently use a simple array for lookup tables keyed by DefIndex and\n\/\/\/ known to be densely populated. This is especially important for the HIR map.\n\/\/\/\n\/\/\/ Since the DefIndex is mostly treated as an opaque ID, you probably\n\/\/\/ don't have to care about these ranges.\nnewtype_index!(DefIndex\n {\n DEBUG_FORMAT = custom,\n\n \/\/\/ The start of the \"high\" range of DefIndexes.\n const DEF_INDEX_HI_START = 1 << 31,\n\n \/\/\/ The crate root is always assigned index 0 by the AST Map code,\n \/\/\/ thanks to `NodeCollector::new`.\n const CRATE_DEF_INDEX = 0,\n });\n\nimpl fmt::Debug for DefIndex {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f,\n \"DefIndex({}:{})\",\n self.address_space().index(),\n self.as_array_index())\n }\n}\n\nimpl DefIndex {\n #[inline]\n pub fn from_u32(x: u32) -> DefIndex {\n DefIndex(x)\n }\n\n #[inline]\n pub fn as_usize(&self) -> usize {\n self.0 as usize\n }\n\n #[inline]\n pub fn as_u32(&self) -> u32 {\n self.0\n }\n\n #[inline]\n pub fn address_space(&self) -> DefIndexAddressSpace {\n if self.0 < DEF_INDEX_HI_START.0 {\n DefIndexAddressSpace::Low\n } else {\n DefIndexAddressSpace::High\n }\n }\n\n \/\/\/ Converts this DefIndex into a zero-based array index.\n \/\/\/ This index is the offset within the given \"range\" of the DefIndex,\n \/\/\/ that is, if the DefIndex is part of the \"high\" range, the resulting\n \/\/\/ index will be (DefIndex - DEF_INDEX_HI_START).\n #[inline]\n pub fn as_array_index(&self) -> usize {\n (self.0 & !DEF_INDEX_HI_START.0) as usize\n }\n\n pub fn from_array_index(i: usize, address_space: DefIndexAddressSpace) -> DefIndex {\n DefIndex::new(address_space.start() + i)\n }\n}\n\nimpl serialize::UseSpecializedEncodable for DefIndex {\n #[inline]\n fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n s.emit_u32(self.0)\n }\n}\n\nimpl serialize::UseSpecializedDecodable for DefIndex {\n #[inline]\n fn default_decode<D: Decoder>(d: &mut D) -> Result<DefIndex, D::Error> {\n d.read_u32().map(DefIndex)\n }\n}\n\n#[derive(Copy, Clone, Eq, PartialEq, Hash)]\npub enum DefIndexAddressSpace {\n Low = 0,\n High = 1,\n}\n\nimpl DefIndexAddressSpace {\n #[inline]\n pub fn index(&self) -> usize {\n *self as usize\n }\n\n #[inline]\n pub fn start(&self) -> usize {\n self.index() * DEF_INDEX_HI_START.as_usize()\n }\n}\n\n\/\/\/ A DefId identifies a particular *definition*, by combining a crate\n\/\/\/ index and a def index.\n#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable, RustcDecodable, Hash, Copy)]\npub struct DefId {\n pub krate: CrateNum,\n pub index: DefIndex,\n}\n\nimpl fmt::Debug for DefId {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"DefId {{ krate: {:?}, index: {:?}\",\n self.krate, self.index)?;\n\n ty::tls::with_opt(|opt_tcx| {\n if let Some(tcx) = opt_tcx {\n write!(f, \" => {}\", tcx.def_path_debug_str(*self))?;\n }\n Ok(())\n })?;\n\n write!(f, \" }}\")\n }\n}\n\nimpl DefId {\n \/\/\/ Make a local `DefId` with the given index.\n pub fn local(index: DefIndex) -> DefId {\n DefId { krate: LOCAL_CRATE, index: index }\n }\n\n pub fn is_local(&self) -> bool {\n self.krate == LOCAL_CRATE\n }\n}\n\nimpl serialize::UseSpecializedEncodable for DefId {\n #[inline]\n fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n let DefId {\n krate,\n index,\n } = *self;\n\n krate.encode(s)?;\n index.encode(s)\n }\n}\n\nimpl serialize::UseSpecializedDecodable for DefId {\n #[inline]\n fn default_decode<D: Decoder>(d: &mut D) -> Result<DefId, D::Error> {\n let krate = CrateNum::decode(d)?;\n let index = DefIndex::decode(d)?;\n\n Ok(DefId {\n krate,\n index\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>(feat) Re-added ResponseTime example.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>testsuite: test for fixed issue. Closes #7580.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Issue #7580\n\n\/\/ error-pattern:fail works\n#[feature(globs)];\n\nuse std::*;\n\nfn main() {\n str::with_capacity(10); \/\/ avoid an unused import message\n\n fail!(\"fail works\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #62016 - JohnTitor:add-test-for-issue-27697, r=alexcrichton<commit_after>\/\/ run-pass\n\nuse std::ops::Deref;\n\ntrait MyTrait {\n fn do_something(&self);\n fn as_str(&self) -> &str;\n}\n\nimpl Deref for dyn MyTrait {\n type Target = str;\n fn deref(&self) -> &Self::Target {\n self.as_str()\n }\n}\n\nfn trait_object_does_something(t: &dyn MyTrait) {\n t.do_something()\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add regression test for #120<commit_after>#![feature(static_recursion)]\n\nstruct S(&'static S);\nstatic S1: S = S(&S2);\nstatic S2: S = S(&S1);\n\nfn main() {\n let p: *const S = S2.0;\n let q: *const S = &S1;\n assert_eq!(p, q);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>FileWatcher now started within run()<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\n#[macro_use] extern crate version;\n\nextern crate libimagnotes;\nextern crate libimagrt;\nextern crate libimagtag;\nextern crate libimagutil;\n\nuse std::process::exit;\n\nuse libimagrt::runtime::Runtime;\nuse libimagnotes::note::Note;\nuse libimagutil::trace::trace_error;\n\nmod ui;\nuse ui::build_ui;\n\nfn main() {\n let name = \"imag-notes\";\n let version = &version!()[..];\n let about = \"Note taking helper\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.err().unwrap());\n exit(1);\n }\n };\n\n debug!(\"Hello. Logging was just enabled\");\n debug!(\"I already set up the Runtime object and build the commandline interface parser.\");\n debug!(\"Lets get rollin' ...\");\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n debug!(\"Call: {}\", name);\n match name {\n \"create\" => create(&rt),\n \"delete\" => delete(&rt),\n \"edit\" => edit(&rt),\n \"list\" => list(&rt),\n _ => {\n debug!(\"Unknown command\"); \/\/ More error handling\n },\n };\n });\n}\n\nfn create(rt: &Runtime) {\n let name = rt.cli()\n .subcommand_matches(\"create\")\n .unwrap() \/\/ we already know it is there\n .value_of(\"name\")\n .unwrap(); \/\/ enforced by clap\n\n Note::new(rt.store(), String::from(name), String::new())\n .map_err(|e| trace_error(&e))\n .map(|note| {\n \/\/ call editor now...\n });\n}\n\nfn delete(rt: &Runtime) {\n let name = rt.cli()\n .subcommand_matches(\"delete\")\n .unwrap() \/\/ we already know it is there\n .value_of(\"name\")\n .unwrap(); \/\/ enforced by clap\n\n Note::delete(rt.store(), String::from(name))\n .map_err(|e| trace_error(&e))\n .map(|_| println!(\"Ok\"));\n}\n\nfn edit(rt: &Runtime) {\n unimplemented!()\n}\n\nfn list(rt: &Runtime) {\n use std::cmp::Ordering;\n\n let iter = Note::all_notes(rt.store());\n if iter.is_err() {\n trace_error(&iter.err().unwrap());\n exit(1);\n }\n\n let mut iter = iter.unwrap()\n .filter_map(|note| {\n match note {\n Err(e) => {\n trace_error(&e);\n None\n },\n Ok(e) => Some(e)\n }\n })\n .collect::<Vec<Note>>();\n\n iter.sort_by(|note_a, note_b| {\n if let (Ok(a), Ok(b)) = (note_a.get_name(), note_b.get_name()) {\n return a.cmp(&b)\n } else {\n return Ordering::Greater;\n }\n });\n\n for note in iter {\n note.get_name().map(|name| println!(\"{}\", name));\n }\n}\n\n<commit_msg>Move cli-name-extracting into helper function<commit_after>extern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\n#[macro_use] extern crate version;\n\nextern crate libimagnotes;\nextern crate libimagrt;\nextern crate libimagtag;\nextern crate libimagutil;\n\nuse std::process::exit;\n\nuse libimagrt::runtime::Runtime;\nuse libimagnotes::note::Note;\nuse libimagutil::trace::trace_error;\n\nmod ui;\nuse ui::build_ui;\n\nfn main() {\n let name = \"imag-notes\";\n let version = &version!()[..];\n let about = \"Note taking helper\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.err().unwrap());\n exit(1);\n }\n };\n\n debug!(\"Hello. Logging was just enabled\");\n debug!(\"I already set up the Runtime object and build the commandline interface parser.\");\n debug!(\"Lets get rollin' ...\");\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n debug!(\"Call: {}\", name);\n match name {\n \"create\" => create(&rt),\n \"delete\" => delete(&rt),\n \"edit\" => edit(&rt),\n \"list\" => list(&rt),\n _ => {\n debug!(\"Unknown command\"); \/\/ More error handling\n },\n };\n });\n}\n\nfn name_from_cli(rt: &Runtime, subcmd: &str) -> String {\n rt.cli().subcommand_matches(subcmd).unwrap().value_of(\"name\").map(String::from).unwrap()\n}\n\nfn create(rt: &Runtime) {\n Note::new(rt.store(), String::from(name_from_cli(rt, \"create\")), String::new())\n .map_err(|e| trace_error(&e))\n .map(|note| {\n \/\/ call editor now...\n });\n}\n\nfn delete(rt: &Runtime) {\n Note::delete(rt.store(), String::from(name_from_cli(rt, \"delete\")))\n .map_err(|e| trace_error(&e))\n .map(|_| println!(\"Ok\"));\n}\n\nfn edit(rt: &Runtime) {\n unimplemented!()\n}\n\nfn list(rt: &Runtime) {\n use std::cmp::Ordering;\n\n let iter = Note::all_notes(rt.store());\n if iter.is_err() {\n trace_error(&iter.err().unwrap());\n exit(1);\n }\n\n let mut iter = iter.unwrap()\n .filter_map(|note| {\n match note {\n Err(e) => {\n trace_error(&e);\n None\n },\n Ok(e) => Some(e)\n }\n })\n .collect::<Vec<Note>>();\n\n iter.sort_by(|note_a, note_b| {\n if let (Ok(a), Ok(b)) = (note_a.get_name(), note_b.get_name()) {\n return a.cmp(&b)\n } else {\n return Ordering::Greater;\n }\n });\n\n for note in iter {\n note.get_name().map(|name| println!(\"{}\", name));\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Revert the ordering of planet and atmosphere rendering as the green-screen atmosphere problem has gone away<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Do not explicitly create a closure in calc_table_size<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>bigpack: Add a 'kind' argument to compress_dir_to_big<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Adapts the `solicit`-provided HTTP\/2 implementation into the `HttpMessage` API.\n\nuse std::io::{self, Write, Read, Cursor};\nuse std::net::Shutdown;\nuse std::ascii::AsciiExt;\nuse std::mem;\nuse std::time::Duration;\n\nuse http::{\n Protocol,\n HttpMessage,\n RequestHead,\n ResponseHead,\n RawStatus,\n};\nuse net::{NetworkStream, NetworkConnector};\nuse net::{HttpConnector, HttpStream};\nuse url::Position as UrlPosition;\nuse header::Headers;\n\nuse header;\nuse version;\n\nuse solicit::http::Header as Http2Header;\nuse solicit::http::HttpScheme;\nuse solicit::http::HttpError as Http2Error;\nuse solicit::http::transport::TransportStream;\nuse solicit::http::client::{ClientStream, HttpConnect, HttpConnectError, write_preface};\nuse solicit::client::SimpleClient;\n\nuse httparse;\n\n\/\/\/ A trait alias representing all types that are both `NetworkStream` and `Clone`.\npub trait CloneableStream: NetworkStream + Clone {}\nimpl<S: NetworkStream + Clone> CloneableStream for S {}\n\n\/\/\/ A newtype wrapping any `CloneableStream` in order to provide an implementation of a\n\/\/\/ `TransportSream` trait for all types that are a `CloneableStream`.\n#[derive(Clone)]\nstruct Http2Stream<S: CloneableStream>(S);\n\nimpl<S> Write for Http2Stream<S> where S: CloneableStream {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.0.write(buf)\n }\n #[inline]\n fn flush(&mut self) -> io::Result<()> {\n self.0.flush()\n }\n}\n\nimpl<S> Read for Http2Stream<S> where S: CloneableStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.0.read(buf)\n }\n}\n\nimpl<S> TransportStream for Http2Stream<S> where S: CloneableStream {\n fn try_split(&self) -> Result<Http2Stream<S>, io::Error> {\n Ok(self.clone())\n }\n\n fn close(&mut self) -> Result<(), io::Error> {\n self.0.close(Shutdown::Both)\n }\n}\n\n\/\/\/ A helper struct that implements the `HttpConnect` trait from the `solicit` crate.\n\/\/\/\n\/\/\/ This is used by the `Http2Protocol` when it needs to create a new `SimpleClient`.\nstruct Http2Connector<S> where S: CloneableStream {\n stream: S,\n scheme: HttpScheme,\n host: String,\n}\n\n#[derive(Debug)]\nstruct Http2ConnectError(io::Error);\n\nimpl ::std::fmt::Display for Http2ConnectError {\n fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n write!(fmt, \"HTTP\/2 connect error: {}\", (self as &::std::error::Error).description())\n }\n}\n\nimpl ::std::error::Error for Http2ConnectError {\n fn description(&self) -> &str {\n self.0.description()\n }\n\n fn cause(&self) -> Option<&::std::error::Error> {\n self.0.cause()\n }\n}\n\nimpl HttpConnectError for Http2ConnectError {}\n\nimpl From<io::Error> for Http2ConnectError {\n fn from(e: io::Error) -> Http2ConnectError { Http2ConnectError(e) }\n}\n\nimpl<S> HttpConnect for Http2Connector<S> where S: CloneableStream {\n \/\/\/ The type of the underlying transport stream that the `HttpConnection`s\n \/\/\/ produced by this `HttpConnect` implementation will be based on.\n type Stream = Http2Stream<S>;\n \/\/\/ The type of the error that can be produced by trying to establish the\n \/\/\/ connection (i.e. calling the `connect` method).\n type Err = Http2ConnectError;\n\n \/\/\/ Establishes a network connection that can be used by HTTP\/2 connections.\n fn connect(mut self) -> Result<ClientStream<Self::Stream>, Self::Err> {\n try!(write_preface(&mut self.stream));\n Ok(ClientStream(Http2Stream(self.stream), self.scheme, self.host))\n }\n}\n\n\/\/\/ The `Protocol` implementation that provides HTTP\/2 messages (i.e. `Http2Message`).\npub struct Http2Protocol<C, S> where C: NetworkConnector<Stream=S> + Send + 'static,\n S: NetworkStream + Send + Clone {\n connector: C,\n}\n\nimpl<C, S> Http2Protocol<C, S> where C: NetworkConnector<Stream=S> + Send + 'static,\n S: NetworkStream + Send + Clone {\n \/\/\/ Create a new `Http2Protocol` that will use the given `NetworkConnector` to establish TCP\n \/\/\/ connections to the server.\n pub fn with_connector(connector: C) -> Http2Protocol<C, S> {\n Http2Protocol {\n connector: connector,\n }\n }\n\n \/\/\/ A private helper method that creates a new `SimpleClient` that will use the given\n \/\/\/ `NetworkStream` to communicate to the remote host.\n fn new_client(&self, stream: S, host: String, scheme: HttpScheme)\n -> ::Result<SimpleClient<Http2Stream<S>>> {\n Ok(try!(SimpleClient::with_connector(Http2Connector {\n stream: stream,\n scheme: scheme,\n host: host,\n })))\n }\n}\n\nimpl<C, S> Protocol for Http2Protocol<C, S> where C: NetworkConnector<Stream=S> + Send + 'static,\n S: NetworkStream + Send + Clone {\n fn new_message(&self, host: &str, port: u16, scheme: &str) -> ::Result<Box<HttpMessage>> {\n let stream = try!(self.connector.connect(host, port, scheme)).into();\n\n let scheme = match scheme {\n \"http\" => HttpScheme::Http,\n \"https\" => HttpScheme::Https,\n _ => return Err(From::from(Http2Error::from(\n io::Error::new(io::ErrorKind::Other, \"Invalid scheme\")))),\n };\n let client = try!(self.new_client(stream, host.into(), scheme));\n\n Ok(Box::new(Http2Message::with_client(client)))\n }\n}\n\n\/\/\/ Represents an HTTP\/2 request, described by a `RequestHead` and the body of the request.\n\/\/\/ A convenience struct only in use by the `Http2Message`.\n#[derive(Clone, Debug)]\nstruct Http2Request {\n head: RequestHead,\n body: Vec<u8>,\n}\n\n\/\/\/ Represents an HTTP\/2 response.\n\/\/\/ A convenience struct only in use by the `Http2Message`.\n#[derive(Clone, Debug)]\nstruct Http2Response {\n body: Cursor<Vec<u8>>,\n}\n\n\/\/\/ The enum tracks the state of the `Http2Message`.\nenum MessageState {\n \/\/\/ State corresponding to no message being set to outgoing yet.\n Idle,\n \/\/\/ State corresponding to an outgoing message being written out.\n Writing(Http2Request),\n \/\/\/ State corresponding to an incoming message being read.\n Reading(Http2Response),\n}\n\nimpl MessageState {\n fn take_request(&mut self) -> Option<Http2Request> {\n match *self {\n MessageState::Idle | MessageState::Reading(_) => return None,\n MessageState::Writing(_) => {},\n }\n let old = mem::replace(self, MessageState::Idle);\n\n match old {\n \/\/ These states are effectively unreachable since we already know the state\n MessageState::Idle | MessageState::Reading(_) => None,\n MessageState::Writing(req) => Some(req),\n }\n }\n}\n\n\/\/\/ An implementation of the `HttpMessage` trait for HTTP\/2.\n\/\/\/\n\/\/\/ Relies on the `solicit::http::SimpleClient` for HTTP\/2 communication. Adapts both outgoing and\n\/\/\/ incoming messages to the API that `hyper` expects in order to be able to use the message in\n\/\/\/ the `hyper::client` module.\npub struct Http2Message<S> where S: CloneableStream {\n client: SimpleClient<Http2Stream<S>>,\n state: MessageState,\n}\n\nimpl<S> ::std::fmt::Debug for Http2Message<S> where S: CloneableStream {\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {\n write!(f, \"<Http2Message>\")\n }\n}\n\nimpl<S> Http2Message<S> where S: CloneableStream {\n \/\/\/ Helper method that creates a new completely fresh `Http2Message`, which will use the given\n \/\/\/ `SimpleClient` for its HTTP\/2 communication.\n fn with_client(client: SimpleClient<Http2Stream<S>>) -> Http2Message<S> {\n Http2Message {\n client: client,\n state: MessageState::Idle,\n }\n }\n}\n\nimpl<S> Write for Http2Message<S> where S: CloneableStream {\n #[inline]\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n if let MessageState::Writing(ref mut req) = self.state {\n req.body.write(buf)\n } else {\n Err(io::Error::new(io::ErrorKind::Other,\n \"Not in a writable state\"))\n }\n }\n #[inline]\n fn flush(&mut self) -> io::Result<()> {\n if let MessageState::Writing(ref mut req) = self.state {\n req.body.flush()\n } else {\n Err(io::Error::new(io::ErrorKind::Other,\n \"Not in a writable state\"))\n }\n }\n}\n\nimpl<S> Read for Http2Message<S> where S: CloneableStream {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n if let MessageState::Reading(ref mut res) = self.state {\n res.body.read(buf)\n } else {\n Err(io::Error::new(io::ErrorKind::Other,\n \"Not in a readable state\"))\n }\n }\n}\n\n\/\/\/ A helper function that prepares the headers that should be sent in an HTTP\/2 message.\n\/\/\/\n\/\/\/ Adapts the `Headers` into a list of octet string pairs.\nfn prepare_headers(mut headers: Headers) -> Vec<Http2Header> {\n if headers.remove::<header::Connection>() {\n warn!(\"The `Connection` header is not valid for an HTTP\/2 connection.\");\n }\n let mut http2_headers: Vec<_> = headers.iter().filter_map(|h| {\n if h.is::<header::SetCookie>() {\n None\n } else {\n \/\/ HTTP\/2 header names MUST be lowercase.\n Some((h.name().to_ascii_lowercase().into_bytes(), h.value_string().into_bytes()))\n }\n }).collect();\n\n \/\/ Now separately add the cookies, as `hyper` considers `Set-Cookie` to be only a single\n \/\/ header, even in the face of multiple cookies being set.\n if let Some(set_cookie) = headers.get::<header::SetCookie>() {\n for cookie in set_cookie.iter() {\n http2_headers.push((b\"set-cookie\".to_vec(), cookie.to_string().into_bytes()));\n }\n }\n\n http2_headers\n}\n\n\/\/\/ A helper function that prepares the body for sending in an HTTP\/2 request.\n#[inline]\nfn prepare_body(body: Vec<u8>) -> Option<Vec<u8>> {\n if body.is_empty() {\n None\n } else {\n Some(body)\n }\n}\n\n\/\/\/ Parses a set of HTTP\/2 headers into a `hyper::header::Headers` struct.\nfn parse_headers(http2_headers: Vec<Http2Header>) -> ::Result<Headers> {\n \/\/ Adapt the header name from `Vec<u8>` to `String`, without making any copies.\n let mut headers = Vec::new();\n for (name, value) in http2_headers.into_iter() {\n let name = match String::from_utf8(name) {\n Ok(name) => name,\n Err(_) => return Err(From::from(Http2Error::MalformedResponse)),\n };\n headers.push((name, value));\n }\n\n let mut raw_headers = Vec::new();\n for &(ref name, ref value) in &headers {\n raw_headers.push(httparse::Header { name: &name, value: &value });\n }\n\n Headers::from_raw(&raw_headers)\n}\n\n\/\/\/ Parses the response, as returned by `solicit`, into a `ResponseHead` and the full response\n\/\/\/ body.\n\/\/\/\n\/\/\/ Returns them as a two-tuple.\nfn parse_response(response: ::solicit::http::Response) -> ::Result<(ResponseHead, Vec<u8>)> {\n let status = try!(response.status_code());\n let headers = try!(parse_headers(response.headers));\n Ok((ResponseHead {\n headers: headers,\n raw_status: RawStatus(status, \"\".into()),\n version: version::HttpVersion::Http20,\n }, response.body))\n}\n\nimpl<S> HttpMessage for Http2Message<S> where S: CloneableStream {\n fn set_outgoing(&mut self, head: RequestHead) -> ::Result<RequestHead> {\n match self.state {\n MessageState::Writing(_) | MessageState::Reading(_) => {\n return Err(From::from(Http2Error::from(\n io::Error::new(io::ErrorKind::Other,\n \"An outoging has already been set\"))));\n },\n MessageState::Idle => {},\n };\n self.state = MessageState::Writing(Http2Request {\n head: head.clone(),\n body: Vec::new(),\n });\n\n Ok(head)\n }\n\n fn get_incoming(&mut self) -> ::Result<ResponseHead> {\n \/\/ Prepare the request so that it can be passed off to the HTTP\/2 client.\n let request = match self.state.take_request() {\n None => {\n return Err(From::from(Http2Error::from(\n io::Error::new(io::ErrorKind::Other,\n \"No request in progress\"))));\n },\n Some(req) => req,\n };\n let (RequestHead { headers, method, url }, body) = (request.head, request.body);\n\n let method = method.as_ref().as_bytes();\n let path = url[UrlPosition::BeforePath..UrlPosition::AfterQuery].as_bytes();\n let extra_headers = prepare_headers(headers);\n let body = prepare_body(body);\n\n \/\/ Finally, everything is ready and we issue the request.\n let stream_id = try!(self.client.request(method, &path, &extra_headers, body));\n\n \/\/ Wait for the response\n let resp = try!(self.client.get_response(stream_id));\n\n \/\/ Now that the response is back, adapt it to the structs that hyper expects\/provides.\n let (head, body) = try!(parse_response(resp));\n\n \/\/ For now, since `solicit` has already read the full response, we just wrap the body into\n \/\/ a `Cursor` to allow for the public interface to support `io::Read`.\n let body = Cursor::new(body);\n\n \/\/ The body is saved so that it can be read out from the message.\n self.state = MessageState::Reading(Http2Response {\n body: body,\n });\n\n Ok(head)\n }\n\n fn has_body(&self) -> bool {\n true\n }\n\n #[inline]\n fn set_read_timeout(&self, _dur: Option<Duration>) -> io::Result<()> {\n Ok(())\n }\n\n #[inline]\n fn set_write_timeout(&self, _dur: Option<Duration>) -> io::Result<()> {\n Ok(())\n }\n\n #[inline]\n fn close_connection(&mut self) -> ::Result<()> {\n Ok(())\n }\n}\n\n\/\/\/ A convenience method that creates a default `Http2Protocol` that uses a `net::HttpConnector`\n\/\/\/ (which produces an `HttpStream` for the underlying transport layer).\n#[inline]\npub fn new_protocol() -> Http2Protocol<HttpConnector, HttpStream> {\n Http2Protocol::with_connector(HttpConnector)\n}\n\n#[cfg(test)]\nmod tests {\n use super::{Http2Protocol, prepare_headers, parse_headers, parse_response};\n\n use std::io::{Read};\n\n use mock::{MockHttp2Connector, MockStream};\n use http::{RequestHead, ResponseHead, Protocol};\n\n use header::Headers;\n use header;\n use Url;\n use method;\n use cookie;\n use version;\n\n use solicit::http::connection::{HttpFrame, ReceiveFrame};\n\n \/\/\/ Tests that the `Http2Message` correctly reads a response with no body.\n #[test]\n fn test_http2_response_no_body() {\n let mut mock_connector = MockHttp2Connector::new();\n mock_connector.new_response_stream(b\"200\", &Headers::new(), None);\n let protocol = Http2Protocol::with_connector(mock_connector);\n\n let mut message = protocol.new_message(\"127.0.0.1\", 1337, \"http\").unwrap();\n message.set_outgoing(RequestHead {\n headers: Headers::new(),\n method: method::Method::Get,\n url: Url::parse(\"http:\/\/127.0.0.1\/hello\").unwrap(),\n }).unwrap();\n let resp = message.get_incoming().unwrap();\n\n assert_eq!(resp.raw_status.0, 200);\n let mut body = Vec::new();\n message.read_to_end(&mut body).unwrap();\n assert_eq!(body.len(), 0);\n }\n\n \/\/\/ Tests that the `Http2Message` correctly reads a response with a body.\n #[test]\n fn test_http2_response_with_body() {\n let mut mock_connector = MockHttp2Connector::new();\n mock_connector.new_response_stream(b\"200\", &Headers::new(), Some(vec![1, 2, 3]));\n let protocol = Http2Protocol::with_connector(mock_connector);\n\n let mut message = protocol.new_message(\"127.0.0.1\", 1337, \"http\").unwrap();\n message.set_outgoing(RequestHead {\n headers: Headers::new(),\n method: method::Method::Get,\n url: Url::parse(\"http:\/\/127.0.0.1\/hello\").unwrap(),\n }).unwrap();\n let resp = message.get_incoming().unwrap();\n\n assert_eq!(resp.raw_status.0, 200);\n let mut body = Vec::new();\n message.read_to_end(&mut body).unwrap();\n assert_eq!(vec![1, 2, 3], body);\n }\n\n \/\/\/ Tests that the `Http2Message` correctly reads a response with an empty body.\n #[test]\n fn test_http2_response_empty_body() {\n let mut mock_connector = MockHttp2Connector::new();\n mock_connector.new_response_stream(b\"200\", &Headers::new(), Some(vec![]));\n let protocol = Http2Protocol::with_connector(mock_connector);\n\n let mut message = protocol.new_message(\"127.0.0.1\", 1337, \"http\").unwrap();\n message.set_outgoing(RequestHead {\n headers: Headers::new(),\n method: method::Method::Get,\n url: Url::parse(\"http:\/\/127.0.0.1\/hello\").unwrap(),\n }).unwrap();\n let resp = message.get_incoming().unwrap();\n\n assert_eq!(resp.raw_status.0, 200);\n let mut body = Vec::new();\n message.read_to_end(&mut body).unwrap();\n assert_eq!(Vec::<u8>::new(), body);\n }\n\n \/\/\/ Tests that the `Http2Message` correctly parses out the headers into the `ResponseHead`.\n #[test]\n fn test_http2_response_headers() {\n let mut mock_connector = MockHttp2Connector::new();\n let mut headers = Headers::new();\n headers.set(header::ContentLength(3));\n headers.set(header::ETag(header::EntityTag::new(true, \"tag\".into())));\n mock_connector.new_response_stream(b\"200\", &headers, Some(vec![1, 2, 3]));\n let protocol = Http2Protocol::with_connector(mock_connector);\n\n let mut message = protocol.new_message(\"127.0.0.1\", 1337, \"http\").unwrap();\n message.set_outgoing(RequestHead {\n headers: Headers::new(),\n method: method::Method::Get,\n url: Url::parse(\"http:\/\/127.0.0.1\/hello\").unwrap(),\n }).unwrap();\n let resp = message.get_incoming().unwrap();\n\n assert_eq!(resp.raw_status.0, 200);\n assert!(resp.headers.has::<header::ContentLength>());\n let &header::ContentLength(len) = resp.headers.get::<header::ContentLength>().unwrap();\n assert_eq!(3, len);\n assert!(resp.headers.has::<header::ETag>());\n let &header::ETag(ref tag) = resp.headers.get::<header::ETag>().unwrap();\n assert_eq!(tag.tag(), \"tag\");\n }\n\n \/\/\/ Tests that an error is returned when the `Http2Message` is not in a readable state.\n #[test]\n fn test_http2_message_not_readable() {\n let mut mock_connector = MockHttp2Connector::new();\n mock_connector.new_response_stream(b\"200\", &Headers::new(), None);\n let protocol = Http2Protocol::with_connector(mock_connector);\n\n let mut message = protocol.new_message(\"127.0.0.1\", 1337, \"http\").unwrap();\n\n \/\/ No outgoing set yet, so nothing can be read at this point.\n assert!(message.read(&mut [0; 5]).is_err());\n }\n\n \/\/\/ Tests that an error is returned when the `Http2Message` is not in a writable state.\n #[test]\n fn test_http2_message_not_writable() {\n let mut mock_connector = MockHttp2Connector::new();\n mock_connector.new_response_stream(b\"200\", &Headers::new(), None);\n let protocol = Http2Protocol::with_connector(mock_connector);\n\n let mut message = protocol.new_message(\"127.0.0.1\", 1337, \"http\").unwrap();\n message.set_outgoing(RequestHead {\n headers: Headers::new(),\n method: method::Method::Get,\n url: Url::parse(\"http:\/\/127.0.0.1\/hello\").unwrap(),\n }).unwrap();\n let _ = message.get_incoming().unwrap();\n \/\/ Writes are invalid now\n assert!(message.write(&[1]).is_err());\n }\n\n \/\/\/ Asserts that the given stream contains the full expected client preface: the preface bytes,\n \/\/\/ settings frame, and settings ack frame.\n fn assert_client_preface(server_stream: &mut MockStream) {\n \/\/ Skip client preface\n server_stream.read(&mut [0; 24]).unwrap();\n \/\/ The first frame are the settings\n assert!(match server_stream.recv_frame().unwrap() {\n HttpFrame::SettingsFrame(_) => true,\n _ => false,\n });\n \/\/ Now the ACK to the server's settings.\n assert!(match server_stream.recv_frame().unwrap() {\n HttpFrame::SettingsFrame(_) => true,\n _ => false,\n });\n }\n\n \/\/\/ Tests that sending a request with no body works correctly.\n #[test]\n fn test_http2_request_no_body() {\n let mut mock_connector = MockHttp2Connector::new();\n let stream = mock_connector.new_response_stream(b\"200\", &Headers::new(), Some(vec![]));\n let protocol = Http2Protocol::with_connector(mock_connector);\n\n let mut message = protocol.new_message(\"127.0.0.1\", 1337, \"http\").unwrap();\n message.set_outgoing(RequestHead {\n headers: Headers::new(),\n method: method::Method::Get,\n url: Url::parse(\"http:\/\/127.0.0.1\/hello\").unwrap(),\n }).unwrap();\n let _ = message.get_incoming().unwrap();\n\n let stream = stream.inner.lock().unwrap();\n assert!(stream.write.len() > 0);\n \/\/ The output stream of the client side gets flipped so that we can read the stream from\n \/\/ the server's end.\n let mut server_stream = MockStream::with_input(&stream.write);\n assert_client_preface(&mut server_stream);\n let frame = server_stream.recv_frame().unwrap();\n assert!(match frame {\n HttpFrame::HeadersFrame(ref frame) => frame.is_end_of_stream(),\n _ => false,\n });\n }\n\n \/\/\/ Tests that sending a request with a body works correctly.\n #[test]\n fn test_http2_request_with_body() {\n let mut mock_connector = MockHttp2Connector::new();\n let stream = mock_connector.new_response_stream(b\"200\", &Headers::new(), None);\n let protocol = Http2Protocol::with_connector(mock_connector);\n\n let mut message = protocol.new_message(\"127.0.0.1\", 1337, \"http\").unwrap();\n message.set_outgoing(RequestHead {\n headers: Headers::new(),\n method: method::Method::Get,\n url: Url::parse(\"http:\/\/127.0.0.1\/hello\").unwrap(),\n }).unwrap();\n \/\/ Write a few things to the request in multiple writes.\n message.write(&[1]).unwrap();\n message.write(&[2, 3]).unwrap();\n let _ = message.get_incoming().unwrap();\n\n let stream = stream.inner.lock().unwrap();\n assert!(stream.write.len() > 0);\n \/\/ The output stream of the client side gets flipped so that we can read the stream from\n \/\/ the server's end.\n let mut server_stream = MockStream::with_input(&stream.write);\n assert_client_preface(&mut server_stream);\n let frame = server_stream.recv_frame().unwrap();\n assert!(match frame {\n HttpFrame::HeadersFrame(ref frame) => !frame.is_end_of_stream(),\n _ => false,\n });\n assert!(match server_stream.recv_frame().unwrap() {\n HttpFrame::DataFrame(ref frame) => frame.data == vec![1, 2, 3],\n _ => false,\n });\n }\n\n \/\/\/ Tests that headers are correctly prepared when they include a `Set-Cookie` header.\n #[test]\n fn test_http2_prepare_headers_with_set_cookie() {\n let cookies = header::SetCookie(vec![\n cookie::Cookie::new(\"foo\".to_owned(), \"bar\".to_owned()),\n cookie::Cookie::new(\"baz\".to_owned(), \"quux\".to_owned())\n ]);\n let mut headers = Headers::new();\n headers.set(cookies);\n\n let h2headers = prepare_headers(headers);\n\n assert_eq!(vec![\n (b\"set-cookie\".to_vec(), b\"foo=bar\".to_vec()),\n (b\"set-cookie\".to_vec(), b\"baz=quux\".to_vec()),\n ], h2headers);\n }\n\n \/\/\/ Tests that headers are correctly prepared when they include a `Cookie` header.\n #[test]\n fn test_http2_prepapre_headers_with_cookie() {\n let cookies = header::Cookie(vec![\n cookie::Cookie::new(\"foo\".to_owned(), \"bar\".to_owned()),\n cookie::Cookie::new(\"baz\".to_owned(), \"quux\".to_owned())\n ]);\n let mut headers = Headers::new();\n headers.set(cookies);\n\n let h2headers = prepare_headers(headers);\n\n assert_eq!(vec![\n (b\"cookie\".to_vec(), b\"foo=bar; baz=quux\".to_vec()),\n ], h2headers);\n }\n\n \/\/\/ Tests that HTTP\/2 headers are correctly prepared.\n #[test]\n fn test_http2_prepare_headers() {\n let mut headers = Headers::new();\n headers.set(header::ContentLength(3));\n let expected = vec![\n (b\"content-length\".to_vec(), b\"3\".to_vec()),\n ];\n\n assert_eq!(expected, prepare_headers(headers));\n }\n\n \/\/\/ Tests that the headers of a response are correctly parsed when they include a `Set-Cookie`\n \/\/\/ header.\n #[test]\n fn test_http2_parse_headers_with_set_cookie() {\n let h2headers = vec![\n (b\"set-cookie\".to_vec(), b\"foo=bar\".to_vec()),\n (b\"set-cookie\".to_vec(), b\"baz=quux\".to_vec()),\n ];\n let expected = header::SetCookie(vec![\n cookie::Cookie::new(\"foo\".to_owned(), \"bar\".to_owned()),\n cookie::Cookie::new(\"baz\".to_owned(), \"quux\".to_owned())\n ]);\n\n let headers = parse_headers(h2headers).unwrap();\n\n assert!(headers.has::<header::SetCookie>());\n let set_cookie = headers.get::<header::SetCookie>().unwrap();\n assert_eq!(expected, *set_cookie);\n }\n\n \/\/\/ Tests that parsing HTTP\/2 headers with `Cookie` headers works correctly.\n #[test]\n fn test_http2_parse_headers_with_cookie() {\n let expected = header::Cookie(vec![\n cookie::Cookie::new(\"foo\".to_owned(), \"bar\".to_owned()),\n cookie::Cookie::new(\"baz\".to_owned(), \"quux\".to_owned())\n ]);\n \/\/ HTTP\/2 allows the `Cookie` header to be split into multiple ones to facilitate better\n \/\/ compression.\n let h2headers = vec![\n (b\"cookie\".to_vec(), b\"foo=bar\".to_vec()),\n (b\"cookie\".to_vec(), b\"baz=quux\".to_vec()),\n ];\n\n let headers = parse_headers(h2headers).unwrap();\n\n assert!(headers.has::<header::Cookie>());\n assert_eq!(*headers.get::<header::Cookie>().unwrap(), expected);\n }\n\n \/\/\/ Tests that the headers of a response are correctly parsed.\n #[test]\n fn test_http2_parse_headers() {\n let h2headers = vec![\n (b\":status\".to_vec(), b\"200\".to_vec()),\n (b\"content-length\".to_vec(), b\"3\".to_vec()),\n ];\n\n let headers = parse_headers(h2headers).unwrap();\n\n assert!(headers.has::<header::ContentLength>());\n let &header::ContentLength(len) = headers.get::<header::ContentLength>().unwrap();\n assert_eq!(3, len);\n }\n\n \/\/\/ Tests that if a header name is not a valid utf8 byte sequence, an error is returned.\n #[test]\n fn test_http2_parse_headers_invalid_name() {\n let h2headers = vec![\n (vec![0xfe], vec![]),\n ];\n\n assert!(parse_headers(h2headers).is_err());\n }\n\n \/\/\/ Tests that a response with no pseudo-header for status is considered invalid.\n #[test]\n fn test_http2_parse_response_no_status_code() {\n let response = ::solicit::http::Response {\n body: Vec::new(),\n headers: vec![\n (b\"content-length\".to_vec(), b\"3\".to_vec()),\n ],\n stream_id: 1,\n };\n\n assert!(parse_response(response).is_err());\n }\n\n \/\/\/ Tests that an HTTP\/2 response gets correctly parsed into a body and response head, when\n \/\/\/ the body is empty.\n #[test]\n fn test_http2_parse_response_no_body() {\n let response = ::solicit::http::Response {\n body: Vec::new(),\n headers: vec![\n (b\":status\".to_vec(), b\"200\".to_vec()),\n (b\"content-length\".to_vec(), b\"0\".to_vec()),\n ],\n stream_id: 1,\n };\n\n let (head, body) = parse_response(response).unwrap();\n\n assert_eq!(body, vec![]);\n let ResponseHead { headers, raw_status, version } = head;\n assert_eq!(raw_status.0, 200);\n assert_eq!(raw_status.1, \"\");\n assert!(headers.has::<header::ContentLength>());\n assert_eq!(version, version::HttpVersion::Http20);\n }\n\n \/\/\/ Tests that an HTTP\/2 response gets correctly parsed into a body and response head, when\n \/\/\/ the body is not empty.\n #[test]\n fn test_http2_parse_response_with_body() {\n let expected_body = vec![1, 2, 3];\n let response = ::solicit::http::Response {\n body: expected_body.clone(),\n headers: vec![\n (b\":status\".to_vec(), b\"200\".to_vec()),\n (b\"content-length\".to_vec(), b\"3\".to_vec()),\n ],\n stream_id: 1,\n };\n\n let (head, body) = parse_response(response).unwrap();\n\n assert_eq!(body, expected_body);\n let ResponseHead { headers, raw_status, version } = head;\n assert_eq!(raw_status.0, 200);\n assert_eq!(raw_status.1, \"\");\n assert!(headers.has::<header::ContentLength>());\n assert_eq!(version, version::HttpVersion::Http20);\n }\n}\n<commit_msg>chore(h2): clear out unused h2 module<commit_after>\n<|endoftext|>"} {"text":"<commit_before>use std::default::Default;\nuse rustc_serialize::json;\nuse rustc_serialize::base64::{\n FromBase64,\n ToBase64,\n};\nuse error::Error;\nuse BASE_CONFIG;\n\n#[derive(Debug, PartialEq, RustcDecodable, RustcEncodable)]\npub struct Header {\n pub typ: String,\n pub alg: Option<String>,\n}\n\nimpl Header {\n pub fn parse(raw: &str) -> Result<Header, Error> {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n let header = try!(json::decode(&*s));\n\n Ok(header)\n }\n\n pub fn encode(&self) -> Result<String, Error> {\n let s = try!(json::encode(&self));\n let enc = (&*s).as_bytes().to_base64(BASE_CONFIG);\n Ok(enc)\n }\n}\n\nimpl Default for Header {\n fn default() -> Header {\n Header {\n typ: \"JWT\".into(),\n alg: Some(\"HS256\".into()),\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use header::Header;\n\n #[test]\n fn parse() {\n let enc = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let header = Header::parse(enc).unwrap();\n\n assert_eq!(header.typ, \"JWT\");\n assert_eq!(header.alg.unwrap(), \"HS256\");\n }\n\n #[test]\n fn roundtrip() {\n let header: Header = Default::default();\n let enc = header.encode().unwrap();\n assert_eq!(header, Header::parse(&*enc).unwrap());\n }\n}\n<commit_msg>Make header type into enum<commit_after>use std::default::Default;\nuse rustc_serialize::json;\nuse rustc_serialize::base64::{\n FromBase64,\n ToBase64,\n};\nuse error::Error;\nuse BASE_CONFIG;\n\n#[derive(Debug, PartialEq, RustcDecodable, RustcEncodable)]\npub struct Header {\n pub typ: HeaderType,\n pub alg: Option<String>,\n}\n\nimpl Header {\n pub fn parse(raw: &str) -> Result<Header, Error> {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n let header = try!(json::decode(&*s));\n\n Ok(header)\n }\n\n pub fn encode(&self) -> Result<String, Error> {\n let s = try!(json::encode(&self));\n let enc = (&*s).as_bytes().to_base64(BASE_CONFIG);\n Ok(enc)\n }\n}\n\n#[derive(Debug, PartialEq, RustcDecodable, RustcEncodable)]\npub enum HeaderType {\n JWT,\n}\n\nimpl Default for Header {\n fn default() -> Header {\n Header {\n typ: HeaderType::JWT,\n alg: Some(\"HS256\".into()),\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use header::{\n Header,\n HeaderType,\n };\n\n #[test]\n fn parse() {\n let enc = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let header = Header::parse(enc).unwrap();\n\n assert_eq!(header.typ, HeaderType::JWT);\n assert_eq!(header.alg.unwrap(), \"HS256\");\n }\n\n #[test]\n fn roundtrip() {\n let header: Header = Default::default();\n let enc = header.encode().unwrap();\n assert_eq!(header, Header::parse(&*enc).unwrap());\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Format list-like macro invocations. These are invocations whose token trees\n\/\/ can be interpreted as expressions and separated by commas.\n\/\/ Note that these token trees do not actually have to be interpreted as\n\/\/ expressions by the compiler. An example of an invocation we would reformat is\n\/\/ foo!( x, y, z ). The token x may represent an identifier in the code, but we\n\/\/ interpreted as an expression.\n\/\/ Macro uses which are not-list like, such as bar!(key => val), will not be\n\/\/ reformated.\n\/\/ List-like invocations with parentheses will be formatted as function calls,\n\/\/ and those with brackets will be formatted as array literals.\n\nuse syntax::ast;\nuse syntax::parse::token::{Eof, Comma, Token};\nuse syntax::parse::tts_to_parser;\nuse syntax::codemap::{mk_sp, BytePos};\n\nuse Indent;\nuse rewrite::RewriteContext;\nuse expr::{rewrite_call, rewrite_array};\nuse comment::FindUncommented;\nuse utils::{wrap_str, span_after};\n\nstatic FORCED_BRACKET_MACROS: &'static [&'static str] = &[\"vec!\"];\n\n\/\/ FIXME: use the enum from libsyntax?\n#[derive(Clone, Copy)]\nenum MacroStyle {\n Parens,\n Brackets,\n Braces,\n}\n\nimpl MacroStyle {\n fn opener(&self) -> &'static str {\n match *self {\n MacroStyle::Parens => \"(\",\n MacroStyle::Brackets => \"[\",\n MacroStyle::Braces => \"{\",\n }\n }\n}\n\npub fn rewrite_macro(mac: &ast::Mac,\n context: &RewriteContext,\n width: usize,\n offset: Indent)\n -> Option<String> {\n let original_style = macro_style(mac, context);\n let macro_name = format!(\"{}!\", mac.node.path);\n let style = if FORCED_BRACKET_MACROS.contains(&¯o_name[..]) {\n MacroStyle::Brackets\n } else {\n original_style\n };\n\n if let MacroStyle::Braces = style {\n return None;\n } else if mac.node.tts.is_empty() {\n return if let MacroStyle::Parens = style {\n Some(format!(\"{}()\", macro_name))\n } else {\n Some(format!(\"{}[]\", macro_name))\n };\n }\n\n let mut parser = tts_to_parser(context.parse_session, mac.node.tts.clone(), Vec::new());\n let mut expr_vec = Vec::new();\n\n loop {\n expr_vec.push(match parser.parse_expr() {\n Ok(expr) => expr,\n Err(..) => return None,\n });\n\n match parser.token {\n Token::Eof => break,\n Token::Comma => (),\n _ => return None,\n }\n\n let _ = parser.bump();\n\n if parser.token == Token::Eof {\n return None;\n }\n }\n\n match style {\n MacroStyle::Parens => {\n \/\/ Format macro invocation as function call.\n rewrite_call(context, ¯o_name, &expr_vec, mac.span, width, offset)\n }\n MacroStyle::Brackets => {\n \/\/ Format macro invocation as array literal.\n let extra_offset = macro_name.len();\n let rewrite = try_opt!(rewrite_array(expr_vec.iter().map(|x| &**x),\n mk_sp(span_after(mac.span,\n original_style.opener(),\n context.codemap),\n mac.span.hi - BytePos(1)),\n context,\n try_opt!(width.checked_sub(extra_offset)),\n offset + extra_offset));\n\n Some(format!(\"{}{}\", macro_name, rewrite))\n }\n MacroStyle::Braces => {\n \/\/ Skip macro invocations with braces, for now.\n wrap_str(context.snippet(mac.span),\n context.config.max_width,\n width,\n offset)\n }\n }\n}\n\nfn macro_style(mac: &ast::Mac, context: &RewriteContext) -> MacroStyle {\n let snippet = context.snippet(mac.span);\n let paren_pos = snippet.find_uncommented(\"(\").unwrap_or(usize::max_value());\n let bracket_pos = snippet.find_uncommented(\"[\").unwrap_or(usize::max_value());\n let brace_pos = snippet.find_uncommented(\"{\").unwrap_or(usize::max_value());\n\n if paren_pos < bracket_pos && paren_pos < brace_pos {\n MacroStyle::Parens\n } else if bracket_pos < brace_pos {\n MacroStyle::Brackets\n } else {\n MacroStyle::Braces\n }\n}\n<commit_msg>Remove unused imports<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Format list-like macro invocations. These are invocations whose token trees\n\/\/ can be interpreted as expressions and separated by commas.\n\/\/ Note that these token trees do not actually have to be interpreted as\n\/\/ expressions by the compiler. An example of an invocation we would reformat is\n\/\/ foo!( x, y, z ). The token x may represent an identifier in the code, but we\n\/\/ interpreted as an expression.\n\/\/ Macro uses which are not-list like, such as bar!(key => val), will not be\n\/\/ reformated.\n\/\/ List-like invocations with parentheses will be formatted as function calls,\n\/\/ and those with brackets will be formatted as array literals.\n\nuse syntax::ast;\nuse syntax::parse::token::Token;\nuse syntax::parse::tts_to_parser;\nuse syntax::codemap::{mk_sp, BytePos};\n\nuse Indent;\nuse rewrite::RewriteContext;\nuse expr::{rewrite_call, rewrite_array};\nuse comment::FindUncommented;\nuse utils::{wrap_str, span_after};\n\nstatic FORCED_BRACKET_MACROS: &'static [&'static str] = &[\"vec!\"];\n\n\/\/ FIXME: use the enum from libsyntax?\n#[derive(Clone, Copy)]\nenum MacroStyle {\n Parens,\n Brackets,\n Braces,\n}\n\nimpl MacroStyle {\n fn opener(&self) -> &'static str {\n match *self {\n MacroStyle::Parens => \"(\",\n MacroStyle::Brackets => \"[\",\n MacroStyle::Braces => \"{\",\n }\n }\n}\n\npub fn rewrite_macro(mac: &ast::Mac,\n context: &RewriteContext,\n width: usize,\n offset: Indent)\n -> Option<String> {\n let original_style = macro_style(mac, context);\n let macro_name = format!(\"{}!\", mac.node.path);\n let style = if FORCED_BRACKET_MACROS.contains(&¯o_name[..]) {\n MacroStyle::Brackets\n } else {\n original_style\n };\n\n if let MacroStyle::Braces = style {\n return None;\n } else if mac.node.tts.is_empty() {\n return if let MacroStyle::Parens = style {\n Some(format!(\"{}()\", macro_name))\n } else {\n Some(format!(\"{}[]\", macro_name))\n };\n }\n\n let mut parser = tts_to_parser(context.parse_session, mac.node.tts.clone(), Vec::new());\n let mut expr_vec = Vec::new();\n\n loop {\n expr_vec.push(match parser.parse_expr() {\n Ok(expr) => expr,\n Err(..) => return None,\n });\n\n match parser.token {\n Token::Eof => break,\n Token::Comma => (),\n _ => return None,\n }\n\n let _ = parser.bump();\n\n if parser.token == Token::Eof {\n return None;\n }\n }\n\n match style {\n MacroStyle::Parens => {\n \/\/ Format macro invocation as function call.\n rewrite_call(context, ¯o_name, &expr_vec, mac.span, width, offset)\n }\n MacroStyle::Brackets => {\n \/\/ Format macro invocation as array literal.\n let extra_offset = macro_name.len();\n let rewrite = try_opt!(rewrite_array(expr_vec.iter().map(|x| &**x),\n mk_sp(span_after(mac.span,\n original_style.opener(),\n context.codemap),\n mac.span.hi - BytePos(1)),\n context,\n try_opt!(width.checked_sub(extra_offset)),\n offset + extra_offset));\n\n Some(format!(\"{}{}\", macro_name, rewrite))\n }\n MacroStyle::Braces => {\n \/\/ Skip macro invocations with braces, for now.\n wrap_str(context.snippet(mac.span),\n context.config.max_width,\n width,\n offset)\n }\n }\n}\n\nfn macro_style(mac: &ast::Mac, context: &RewriteContext) -> MacroStyle {\n let snippet = context.snippet(mac.span);\n let paren_pos = snippet.find_uncommented(\"(\").unwrap_or(usize::max_value());\n let bracket_pos = snippet.find_uncommented(\"[\").unwrap_or(usize::max_value());\n let brace_pos = snippet.find_uncommented(\"{\").unwrap_or(usize::max_value());\n\n if paren_pos < bracket_pos && paren_pos < brace_pos {\n MacroStyle::Parens\n } else if bracket_pos < brace_pos {\n MacroStyle::Brackets\n } else {\n MacroStyle::Braces\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a compare_dyldinfos integration test<commit_after>extern crate goblin;\nuse std::process;\n\nfn compare(args: Vec<&str>) {\n let apple = process::Command::new(\"\/Library\/Developer\/CommandLineTools\/usr\/bin\/dyldinfo\")\n .args(&args)\n .output()\n .expect(\"run Apple dyldinfo\");\n\n let goblin = process::Command::new(\"cargo\")\n .arg(\"run\")\n .arg(\"--quiet\")\n .arg(\"--example\")\n .arg(\"dyldinfo\")\n .arg(\"--\")\n .args(&args)\n .output()\n .expect(\"run cargo dyldinfo\");\n\n if apple.stdout.as_slice() != goblin.stdout.as_slice() {\n eprintln!(\"dyldinfo calls disagree!\");\n eprintln!(\"Apple dyldinfo {:?} output:\\n{}\", &args, String::from_utf8_lossy(&apple.stdout));\n eprintln!(\"---\");\n eprintln!(\"cargo dyldinfo {:?} output:\\n{}\", &args, String::from_utf8_lossy(&goblin.stdout));\n panic!(\"Apple dyldinfo and cargo dyldinfo differed (args: {:?})\", args);\n }\n}\n\n#[cfg(target_os=\"macos\")]\n#[test]\nfn compare_binds() {\n compare(vec![\"-bind\", \"\/Library\/Developer\/CommandLineTools\/usr\/bin\/dyldinfo\"]);\n compare(vec![\"-bind\", \"\/Library\/Developer\/CommandLineTools\/usr\/bin\/clang\"]);\n compare(vec![\"-bind\", \"\/usr\/bin\/tmutil\"]);\n}\n\n#[cfg(not(target_os=\"macos\"))]\n#[test]\nfn skipped_on_this_platform() {\n \/\/ this test does nothing on other platforms\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright (c) 2015 Daniel Grunwald\n\/\/\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a copy of this\n\/\/ software and associated documentation files (the \"Software\"), to deal in the Software\n\/\/ without restriction, including without limitation the rights to use, copy, modify, merge,\n\/\/ publish, distribute, sublicense, and\/or sell copies of the Software, and to permit persons\n\/\/ to whom the Software is furnished to do so, subject to the following conditions:\n\/\/\n\/\/ The above copyright notice and this permission notice shall be included in all copies or\n\/\/ substantial portions of the Software.\n\/\/\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n\/\/ INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n\/\/ PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE\n\/\/ FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n\/\/ OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\/\/ DEALINGS IN THE SOFTWARE.\n\nextern crate num;\n\nuse libc::{c_long, c_double};\nuse std;\nuse python::{Python, PythonObject, ToPythonPointer};\nuse err::{self, PyResult, PyErr};\nuse super::object::PyObject;\nuse super::exc;\nuse ffi::{self, Py_ssize_t};\nuse conversion::{ToPyObject, FromPyObject};\n\n#[cfg(feature=\"python27-sys\")]\npyobject_newtype!(PyInt, PyInt_Check, PyInt_Type);\n\npyobject_newtype!(PyLong, PyLong_Check, PyLong_Type);\npyobject_newtype!(PyFloat, PyFloat_Check, PyFloat_Type);\n\n#[cfg(feature=\"python27-sys\")]\nimpl <'p> PyInt<'p> {\n \/\/\/ Creates a new python `int` object.\n pub fn new(py: Python<'p>, val: c_long) -> PyInt<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py, ffi::PyInt_FromLong(val))\n }\n }\n\n \/\/\/ Gets the value of this integer.\n pub fn value(&self) -> c_long {\n unsafe { ffi::PyInt_AS_LONG(self.as_ptr()) }\n }\n}\n\n\nimpl <'p> PyFloat<'p> {\n \/\/\/ Creates a new python `float` object.\n pub fn new(py: Python<'p>, val: c_double) -> PyFloat<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py, ffi::PyFloat_FromDouble(val))\n }\n }\n\n \/\/\/ Gets the value of this float.\n pub fn value(&self) -> c_double {\n unsafe { ffi::PyFloat_AsDouble(self.as_ptr()) }\n }\n}\n\nmacro_rules! int_fits_c_long(\n ($rust_type:ty) => (\n #[cfg(feature=\"python27-sys\")]\n impl <'p> ToPyObject<'p> for $rust_type {\n type ObjectType = PyInt<'p>;\n\n fn to_py_object(&self, py: Python<'p>) -> PyInt<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py,\n ffi::PyInt_FromLong(*self as c_long))\n }\n }\n }\n\n #[cfg(feature=\"python3-sys\")]\n impl <'p> ToPyObject<'p> for $rust_type {\n type ObjectType = PyLong<'p>;\n\n fn to_py_object(&self, py: Python<'p>) -> PyLong<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py,\n ffi::PyLong_FromLong(*self as c_long))\n }\n }\n }\n\n #[cfg(feature=\"python27-sys\")]\n impl <'p> FromPyObject<'p> for $rust_type {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, $rust_type> {\n let py = s.python();\n let val = unsafe { ffi::PyInt_AsLong(s.as_ptr()) };\n if val == -1 && PyErr::occurred(py) {\n return Err(PyErr::fetch(py));\n }\n match num::traits::cast::<c_long, $rust_type>(val) {\n Some(v) => Ok(v),\n None => Err(overflow_error(py))\n }\n }\n }\n \n #[cfg(feature=\"python3-sys\")]\n impl <'p> FromPyObject<'p> for $rust_type {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, $rust_type> {\n let py = s.python();\n let val = unsafe { ffi::PyLong_AsLong(s.as_ptr()) };\n if val == -1 && PyErr::occurred(py) {\n return Err(PyErr::fetch(py));\n }\n match num::traits::cast::<c_long, $rust_type>(val) {\n Some(v) => Ok(v),\n None => Err(overflow_error(py))\n }\n }\n }\n )\n);\n\n\nmacro_rules! int_fits_larger_int(\n ($rust_type:ty, $larger_type:ty) => (\n impl <'p> ToPyObject<'p> for $rust_type {\n type ObjectType = <$larger_type as ToPyObject<'p>>::ObjectType;\n\n #[inline]\n fn to_py_object(&self, py: Python<'p>) -> <$larger_type as ToPyObject<'p>>::ObjectType {\n (*self as $larger_type).to_py_object(py)\n }\n }\n\n impl <'p> FromPyObject<'p> for $rust_type {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, $rust_type> {\n let py = s.python();\n let val = try!(s.extract::<$larger_type>());\n match num::traits::cast::<$larger_type, $rust_type>(val) {\n Some(v) => Ok(v),\n None => Err(overflow_error(py))\n }\n }\n }\n )\n);\n\n\nint_fits_c_long!(i8);\nint_fits_c_long!(u8);\nint_fits_c_long!(i16);\nint_fits_c_long!(u16);\nint_fits_c_long!(i32);\n\n\/\/ If c_long is 64-bits, we can use more types with int_fits_c_long!:\n#[cfg(all(target_pointer_width=\"64\", not(target_os=\"windows\")))]\nint_fits_c_long!(u32);\n#[cfg(any(target_pointer_width=\"32\", target_os=\"windows\"))]\nint_fits_larger_int!(u32, u64);\n\n#[cfg(all(target_pointer_width=\"64\", not(target_os=\"windows\")))]\nint_fits_c_long!(i64);\n\/\/ TODO: manual implementation for i64 on systems with 32-bit long\n\n\/\/ u64 has a manual implementation as it never fits into signed long\n\n#[cfg(all(target_pointer_width=\"64\", not(target_os=\"windows\")))]\nint_fits_c_long!(isize);\n#[cfg(any(target_pointer_width=\"32\", target_os=\"windows\"))]\nint_fits_larger_int!(isize, i64);\n\nint_fits_larger_int!(usize, u64);\n\nimpl <'p> ToPyObject<'p> for u64 {\n #[cfg(feature=\"python27-sys\")]\n type ObjectType = PyObject<'p>;\n\n #[cfg(feature=\"python3-sys\")]\n type ObjectType = PyLong<'p>;\n\n #[cfg(feature=\"python27-sys\")]\n fn to_py_object(&self, py: Python<'p>) -> PyObject<'p> {\n unsafe {\n let ptr = match num::traits::cast::<u64, c_long>(*self) {\n Some(v) => ffi::PyInt_FromLong(v),\n None => ffi::PyLong_FromUnsignedLongLong(*self)\n };\n err::from_owned_ptr_or_panic(py, ptr)\n }\n }\n\n #[cfg(feature=\"python3-sys\")]\n fn to_py_object(&self, py: Python<'p>) -> PyLong<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py, ffi::PyLong_FromUnsignedLongLong(*self))\n }\n }\n}\n\nfn pylong_as_u64<'p>(obj: &PyObject<'p>) -> PyResult<'p, u64> {\n let py = obj.python();\n let v = unsafe { ffi::PyLong_AsUnsignedLongLong(obj.as_ptr()) };\n if v == !0 && PyErr::occurred(py) {\n Err(PyErr::fetch(py))\n } else {\n Ok(v)\n }\n}\n\nimpl <'p> FromPyObject<'p> for u64 {\n #[cfg(feature=\"python27-sys\")]\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, u64> {\n let py = s.python();\n let ptr = s.as_ptr();\n unsafe {\n if ffi::PyLong_Check(ptr) != 0 {\n pylong_as_u64(s)\n } else if ffi::PyInt_Check(ptr) != 0 {\n match num::traits::cast::<c_long, u64>(ffi::PyInt_AS_LONG(ptr)) {\n Some(v) => Ok(v),\n None => Err(overflow_error(py))\n }\n } else {\n let num = try!(err::result_from_owned_ptr(py, ffi::PyNumber_Long(ptr)));\n pylong_as_u64(&num)\n }\n }\n }\n\n #[cfg(feature=\"python3-sys\")]\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, u64> {\n let py = s.python();\n let ptr = s.as_ptr();\n unsafe {\n if ffi::PyLong_Check(ptr) != 0 {\n pylong_as_u64(s)\n } else {\n let num = try!(err::result_from_owned_ptr(py, ffi::PyNumber_Long(ptr)));\n pylong_as_u64(&num)\n }\n }\n }\n}\n\nimpl <'p> ToPyObject<'p> for f64 {\n type ObjectType = PyFloat<'p>;\n\n fn to_py_object(&self, py: Python<'p>) -> PyFloat<'p> {\n PyFloat::new(py, *self)\n }\n}\n\nimpl <'p> FromPyObject<'p> for f64 {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, f64> {\n let py = s.python();\n let v = unsafe { ffi::PyFloat_AsDouble(s.as_ptr()) };\n if v == -1.0 && PyErr::occurred(py) {\n Err(PyErr::fetch(py))\n } else {\n Ok(v)\n }\n }\n}\n\nfn overflow_error(py: Python) -> PyErr {\n PyErr::new_lazy_init(py.get_type::<exc::OverflowError>(), None)\n}\n\nimpl <'p> ToPyObject<'p> for f32 {\n type ObjectType = PyFloat<'p>;\n\n fn to_py_object(&self, py: Python<'p>) -> PyFloat<'p> {\n PyFloat::new(py, *self as f64)\n }\n}\n\nimpl <'p, 's> FromPyObject<'p> for f32 {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, f32> {\n Ok(try!(s.extract::<f64>()) as f32)\n }\n}\n\n#[cfg(test)]\nmod test {\n use std;\n use python::{Python, PythonObject};\n use conversion::ToPyObject;\n\n macro_rules! num_to_py_object_and_back (\n ($func_name:ident, $t1:ty, $t2:ty) => (\n #[test]\n fn $func_name() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n let val = 123 as $t1;\n let obj = val.to_py_object(py).into_object();\n assert_eq!(obj.extract::<$t2>().unwrap(), val as $t2);\n }\n )\n );\n\n num_to_py_object_and_back!(to_from_f64, f64, f64);\n num_to_py_object_and_back!(to_from_f32, f32, f32);\n num_to_py_object_and_back!(to_from_i8, i8, i8);\n num_to_py_object_and_back!(to_from_u8, u8, u8);\n num_to_py_object_and_back!(to_from_i16, i16, i16);\n num_to_py_object_and_back!(to_from_u16, u16, u16);\n num_to_py_object_and_back!(to_from_i32, i32, i32);\n num_to_py_object_and_back!(to_from_u32, u32, u32);\n num_to_py_object_and_back!(to_from_i64, i64, i64);\n num_to_py_object_and_back!(to_from_u64, u64, u64);\n num_to_py_object_and_back!(to_from_isize, isize, isize);\n num_to_py_object_and_back!(to_from_usize, usize, usize);\n num_to_py_object_and_back!(float_to_i32, f64, i32);\n num_to_py_object_and_back!(float_to_u32, f64, u32);\n num_to_py_object_and_back!(float_to_i64, f64, i64);\n num_to_py_object_and_back!(float_to_u64, f64, u64);\n num_to_py_object_and_back!(int_to_float, i32, f64);\n\n #[test]\n fn test_u32_max() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n let v = std::u32::MAX;\n let obj = v.to_py_object(py).into_object();\n assert_eq!(v, obj.extract::<u32>().unwrap());\n assert_eq!(v as u64, obj.extract::<u64>().unwrap());\n assert!(obj.extract::<i32>().is_err());\n }\n\n #[test]\n fn test_u64_max() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n let v = std::u64::MAX;\n let obj = v.to_py_object(py).into_object();\n println!(\"{:?}\", obj);\n assert_eq!(v, obj.extract::<u64>().unwrap());\n assert!(obj.extract::<i64>().is_err());\n }\n}\n\n<commit_msg>Generalize pylong_as_u64.<commit_after>\/\/ Copyright (c) 2015 Daniel Grunwald\n\/\/\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a copy of this\n\/\/ software and associated documentation files (the \"Software\"), to deal in the Software\n\/\/ without restriction, including without limitation the rights to use, copy, modify, merge,\n\/\/ publish, distribute, sublicense, and\/or sell copies of the Software, and to permit persons\n\/\/ to whom the Software is furnished to do so, subject to the following conditions:\n\/\/\n\/\/ The above copyright notice and this permission notice shall be included in all copies or\n\/\/ substantial portions of the Software.\n\/\/\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n\/\/ INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n\/\/ PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE\n\/\/ FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n\/\/ OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\/\/ DEALINGS IN THE SOFTWARE.\n\nextern crate num;\n\nuse libc::{c_long, c_double};\nuse std;\nuse python::{Python, PythonObject, ToPythonPointer};\nuse err::{self, PyResult, PyErr};\nuse super::object::PyObject;\nuse super::exc;\nuse ffi::{self, Py_ssize_t};\nuse conversion::{ToPyObject, FromPyObject};\n\n#[cfg(feature=\"python27-sys\")]\npyobject_newtype!(PyInt, PyInt_Check, PyInt_Type);\n\npyobject_newtype!(PyLong, PyLong_Check, PyLong_Type);\npyobject_newtype!(PyFloat, PyFloat_Check, PyFloat_Type);\n\n#[cfg(feature=\"python27-sys\")]\nimpl <'p> PyInt<'p> {\n \/\/\/ Creates a new python `int` object.\n pub fn new(py: Python<'p>, val: c_long) -> PyInt<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py, ffi::PyInt_FromLong(val))\n }\n }\n\n \/\/\/ Gets the value of this integer.\n pub fn value(&self) -> c_long {\n unsafe { ffi::PyInt_AS_LONG(self.as_ptr()) }\n }\n}\n\n\nimpl <'p> PyFloat<'p> {\n \/\/\/ Creates a new python `float` object.\n pub fn new(py: Python<'p>, val: c_double) -> PyFloat<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py, ffi::PyFloat_FromDouble(val))\n }\n }\n\n \/\/\/ Gets the value of this float.\n pub fn value(&self) -> c_double {\n unsafe { ffi::PyFloat_AsDouble(self.as_ptr()) }\n }\n}\n\nmacro_rules! int_fits_c_long(\n ($rust_type:ty) => (\n #[cfg(feature=\"python27-sys\")]\n impl <'p> ToPyObject<'p> for $rust_type {\n type ObjectType = PyInt<'p>;\n\n fn to_py_object(&self, py: Python<'p>) -> PyInt<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py,\n ffi::PyInt_FromLong(*self as c_long))\n }\n }\n }\n\n #[cfg(feature=\"python3-sys\")]\n impl <'p> ToPyObject<'p> for $rust_type {\n type ObjectType = PyLong<'p>;\n\n fn to_py_object(&self, py: Python<'p>) -> PyLong<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py,\n ffi::PyLong_FromLong(*self as c_long))\n }\n }\n }\n\n #[cfg(feature=\"python27-sys\")]\n impl <'p> FromPyObject<'p> for $rust_type {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, $rust_type> {\n let py = s.python();\n let val = unsafe { ffi::PyInt_AsLong(s.as_ptr()) };\n if val == -1 && PyErr::occurred(py) {\n return Err(PyErr::fetch(py));\n }\n match num::traits::cast::<c_long, $rust_type>(val) {\n Some(v) => Ok(v),\n None => Err(overflow_error(py))\n }\n }\n }\n\n #[cfg(feature=\"python3-sys\")]\n impl <'p> FromPyObject<'p> for $rust_type {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, $rust_type> {\n let py = s.python();\n let val = unsafe { ffi::PyLong_AsLong(s.as_ptr()) };\n if val == -1 && PyErr::occurred(py) {\n return Err(PyErr::fetch(py));\n }\n match num::traits::cast::<c_long, $rust_type>(val) {\n Some(v) => Ok(v),\n None => Err(overflow_error(py))\n }\n }\n }\n )\n);\n\n\nmacro_rules! int_fits_larger_int(\n ($rust_type:ty, $larger_type:ty) => (\n impl <'p> ToPyObject<'p> for $rust_type {\n type ObjectType = <$larger_type as ToPyObject<'p>>::ObjectType;\n\n #[inline]\n fn to_py_object(&self, py: Python<'p>) -> <$larger_type as ToPyObject<'p>>::ObjectType {\n (*self as $larger_type).to_py_object(py)\n }\n }\n\n impl <'p> FromPyObject<'p> for $rust_type {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, $rust_type> {\n let py = s.python();\n let val = try!(s.extract::<$larger_type>());\n match num::traits::cast::<$larger_type, $rust_type>(val) {\n Some(v) => Ok(v),\n None => Err(overflow_error(py))\n }\n }\n }\n )\n);\n\n\nint_fits_c_long!(i8);\nint_fits_c_long!(u8);\nint_fits_c_long!(i16);\nint_fits_c_long!(u16);\nint_fits_c_long!(i32);\n\n\/\/ If c_long is 64-bits, we can use more types with int_fits_c_long!:\n#[cfg(all(target_pointer_width=\"64\", not(target_os=\"windows\")))]\nint_fits_c_long!(u32);\n#[cfg(any(target_pointer_width=\"32\", target_os=\"windows\"))]\nint_fits_larger_int!(u32, u64);\n\n#[cfg(all(target_pointer_width=\"64\", not(target_os=\"windows\")))]\nint_fits_c_long!(i64);\n\/\/ TODO: manual implementation for i64 on systems with 32-bit long\n\n\/\/ u64 has a manual implementation as it never fits into signed long\n\n#[cfg(all(target_pointer_width=\"64\", not(target_os=\"windows\")))]\nint_fits_c_long!(isize);\n#[cfg(any(target_pointer_width=\"32\", target_os=\"windows\"))]\nint_fits_larger_int!(isize, i64);\n\nint_fits_larger_int!(usize, u64);\n\nfn err_if_invalid_value<'p, T: PartialEq, F: Fn() -> T>\n (obj: &PyObject<'p>, invalid_value: T, func: F) -> PyResult<'p, T> {\n let py = obj.python();\n let v = func();\n if v == invalid_value && PyErr::occurred(py) {\n Err(PyErr::fetch(py))\n } else {\n Ok(v)\n }\n}\n\nimpl <'p> ToPyObject<'p> for u64 {\n #[cfg(feature=\"python27-sys\")]\n type ObjectType = PyObject<'p>;\n\n #[cfg(feature=\"python3-sys\")]\n type ObjectType = PyLong<'p>;\n\n #[cfg(feature=\"python27-sys\")]\n fn to_py_object(&self, py: Python<'p>) -> PyObject<'p> {\n unsafe {\n let ptr = match num::traits::cast::<u64, c_long>(*self) {\n Some(v) => ffi::PyInt_FromLong(v),\n None => ffi::PyLong_FromUnsignedLongLong(*self)\n };\n err::from_owned_ptr_or_panic(py, ptr)\n }\n }\n\n #[cfg(feature=\"python3-sys\")]\n fn to_py_object(&self, py: Python<'p>) -> PyLong<'p> {\n unsafe {\n err::cast_from_owned_ptr_or_panic(py, ffi::PyLong_FromUnsignedLongLong(*self))\n }\n }\n}\n\nimpl <'p> FromPyObject<'p> for u64 {\n #[cfg(feature=\"python27-sys\")]\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, u64> {\n let py = s.python();\n let ptr = s.as_ptr();\n\n unsafe {\n if ffi::PyLong_Check(ptr) != 0 {\n err_if_invalid_value(s, !0, || ffi::PyLong_AsUnsignedLongLong(s.as_ptr()) )\n } else if ffi::PyInt_Check(ptr) != 0 {\n match num::traits::cast::<c_long, u64>(ffi::PyInt_AS_LONG(ptr)) {\n Some(v) => Ok(v),\n None => Err(overflow_error(py))\n }\n } else {\n let num = try!(err::result_from_owned_ptr(py, ffi::PyNumber_Long(ptr)));\n err_if_invalid_value(&num, !0, || ffi::PyLong_AsUnsignedLongLong(num.as_ptr()) )\n }\n }\n }\n\n #[cfg(feature=\"python3-sys\")]\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, u64> {\n let py = s.python();\n let ptr = s.as_ptr();\n unsafe {\n if ffi::PyLong_Check(ptr) != 0 {\n err_if_invalid_value(s, !0, || ffi::PyLong_AsUnsignedLongLong(s.as_ptr()) )\n } else {\n let num = try!(err::result_from_owned_ptr(py, ffi::PyNumber_Long(ptr)));\n err_if_invalid_value(&num, !0, || ffi::PyLong_AsUnsignedLongLong(num.as_ptr()) )\n }\n }\n }\n}\n\nimpl <'p> ToPyObject<'p> for f64 {\n type ObjectType = PyFloat<'p>;\n\n fn to_py_object(&self, py: Python<'p>) -> PyFloat<'p> {\n PyFloat::new(py, *self)\n }\n}\n\nimpl <'p> FromPyObject<'p> for f64 {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, f64> {\n let py = s.python();\n let v = unsafe { ffi::PyFloat_AsDouble(s.as_ptr()) };\n if v == -1.0 && PyErr::occurred(py) {\n Err(PyErr::fetch(py))\n } else {\n Ok(v)\n }\n }\n}\n\nfn overflow_error(py: Python) -> PyErr {\n PyErr::new_lazy_init(py.get_type::<exc::OverflowError>(), None)\n}\n\nimpl <'p> ToPyObject<'p> for f32 {\n type ObjectType = PyFloat<'p>;\n\n fn to_py_object(&self, py: Python<'p>) -> PyFloat<'p> {\n PyFloat::new(py, *self as f64)\n }\n}\n\nimpl <'p, 's> FromPyObject<'p> for f32 {\n fn from_py_object(s: &PyObject<'p>) -> PyResult<'p, f32> {\n Ok(try!(s.extract::<f64>()) as f32)\n }\n}\n\n#[cfg(test)]\nmod test {\n use std;\n use python::{Python, PythonObject};\n use conversion::ToPyObject;\n\n macro_rules! num_to_py_object_and_back (\n ($func_name:ident, $t1:ty, $t2:ty) => (\n #[test]\n fn $func_name() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n let val = 123 as $t1;\n let obj = val.to_py_object(py).into_object();\n assert_eq!(obj.extract::<$t2>().unwrap(), val as $t2);\n }\n )\n );\n\n num_to_py_object_and_back!(to_from_f64, f64, f64);\n num_to_py_object_and_back!(to_from_f32, f32, f32);\n num_to_py_object_and_back!(to_from_i8, i8, i8);\n num_to_py_object_and_back!(to_from_u8, u8, u8);\n num_to_py_object_and_back!(to_from_i16, i16, i16);\n num_to_py_object_and_back!(to_from_u16, u16, u16);\n num_to_py_object_and_back!(to_from_i32, i32, i32);\n num_to_py_object_and_back!(to_from_u32, u32, u32);\n num_to_py_object_and_back!(to_from_i64, i64, i64);\n num_to_py_object_and_back!(to_from_u64, u64, u64);\n num_to_py_object_and_back!(to_from_isize, isize, isize);\n num_to_py_object_and_back!(to_from_usize, usize, usize);\n num_to_py_object_and_back!(float_to_i32, f64, i32);\n num_to_py_object_and_back!(float_to_u32, f64, u32);\n num_to_py_object_and_back!(float_to_i64, f64, i64);\n num_to_py_object_and_back!(float_to_u64, f64, u64);\n num_to_py_object_and_back!(int_to_float, i32, f64);\n\n #[test]\n fn test_u32_max() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n let v = std::u32::MAX;\n let obj = v.to_py_object(py).into_object();\n assert_eq!(v, obj.extract::<u32>().unwrap());\n assert_eq!(v as u64, obj.extract::<u64>().unwrap());\n assert!(obj.extract::<i32>().is_err());\n }\n\n #[test]\n fn test_u64_max() {\n let gil = Python::acquire_gil();\n let py = gil.python();\n let v = std::u64::MAX;\n let obj = v.to_py_object(py).into_object();\n println!(\"{:?}\", obj);\n assert_eq!(v, obj.extract::<u64>().unwrap());\n assert!(obj.extract::<i64>().is_err());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>renaming TrackerRequest to QueryString, moving to submodule, adding methods<commit_after>use std::collections::HashMap;\nuse url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};\n\n\/\/generic query string builder, automatically escapes each parameter val if necessary\n\/\/KEYS ARE TAKEN AS IS\n\npub struct QueryString<'a> {\n params: HashMap<&'a str, String>\n}\n\nimpl <'a> QueryString<'a> {\n\n pub fn from (params: Vec<(&'a str, String)>) -> QueryString <'a> {\n let mut hm = QueryString {params: HashMap::new()};\n hm.add_params(params);\n hm\n }\n\n pub fn add_params (&mut self, params: Vec<(&'a str, String)>) {\n for (key, val) in params {\n self.params.insert(key, utf8_percent_encode(&val, DEFAULT_ENCODE_SET));\n }\n }\n\n pub fn to_param_string (&self) -> String {\n self.params.iter().map(|(k, v)| k.to_string() + \"=\" + v)\n .collect::<Vec<String>>()\n .join(\"&\")\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>simple thread<commit_after>use std::thread;\n\nstatic NTHREADS: i32 = 10;\n\nfn main() {\n\n let mut children = vec![];\n\n for i in 0..NTHREADS {\n children.push(\n thread::spawn(move || {\n println!(\"Thread no: {}\", i);\n })\n );\n }\n\n for c in children {\n let _ = c.join();\n }\n\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Change auth test to pass in config<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added empty test<commit_after>fn main() {\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/ Timing functions.\nuse extra::time::precise_time_ns;\nuse std::cell::Cell;\nuse std::comm::{Port, SharedChan};\nuse extra::sort::tim_sort;\nuse std::iterator::AdditiveIterator;\n\n\/\/ front-end representation of the profiler used to communicate with the profiler\n#[deriving(Clone)]\npub struct ProfilerChan {\n chan: SharedChan<ProfilerMsg>,\n}\n\nimpl ProfilerChan {\n pub fn new(chan: Chan<ProfilerMsg>) -> ProfilerChan {\n ProfilerChan {\n chan: SharedChan::new(chan),\n }\n }\n pub fn send(&self, msg: ProfilerMsg) {\n self.chan.send(msg);\n }\n}\n\n#[deriving(Eq, Clone)]\npub enum ProfilerCategory {\n CompositingCategory,\n LayoutQueryCategory,\n LayoutPerformCategory,\n LayoutAuxInitCategory,\n LayoutSelectorMatchCategory,\n LayoutTreeBuilderCategory,\n LayoutMainCategory,\n LayoutShapingCategory,\n LayoutDispListBuildCategory,\n GfxRegenAvailableFontsCategory,\n RenderingDrawingCategory,\n RenderingPrepBuffCategory,\n RenderingCategory,\n \/\/ hackish but helps prevent errors when adding new categories\n NUM_BUCKETS,\n}\n\/\/ FIXME(#5873) this should be initialized by a NUM_BUCKETS cast,\nstatic BUCKETS: uint = 13;\ntype ProfilerBuckets = [(ProfilerCategory, ~[float]), ..BUCKETS];\n\npub enum ProfilerMsg {\n \/\/ Normal message used for reporting time\n TimeMsg(ProfilerCategory, float),\n \/\/ Message used to force print the profiling metrics\n PrintMsg,\n}\n\n\/\/ back end of the profiler that handles data aggregation and performance metrics\npub struct Profiler {\n port: Port<ProfilerMsg>,\n buckets: ProfilerBuckets,\n last_msg: Option<ProfilerMsg>,\n}\n\nimpl ProfilerCategory {\n \/\/ convenience function to not have to cast every time\n pub fn num_buckets() -> uint {\n NUM_BUCKETS as uint\n }\n\n \/\/ enumeration of all ProfilerCategory types\n \/\/ TODO(tkuehn): is there a better way to ensure proper order of categories?\n fn empty_buckets() -> ProfilerBuckets {\n let buckets = [\n (CompositingCategory, ~[]),\n (LayoutQueryCategory, ~[]),\n (LayoutPerformCategory, ~[]),\n (LayoutAuxInitCategory, ~[]),\n (LayoutSelectorMatchCategory, ~[]),\n (LayoutTreeBuilderCategory, ~[]),\n (LayoutMainCategory, ~[]),\n (LayoutShapingCategory, ~[]),\n (LayoutDispListBuildCategory, ~[]),\n (GfxRegenAvailableFontsCategory, ~[]),\n (RenderingDrawingCategory, ~[]),\n (RenderingPrepBuffCategory, ~[]),\n (RenderingCategory, ~[]),\n ];\n\n ProfilerCategory::check_order(&buckets);\n buckets\n }\n\n \/\/ ensure that the order of the buckets matches the order of the enum categories\n fn check_order(vec: &ProfilerBuckets) {\n for &(category, _) in vec.iter() {\n if category != vec[category as uint].first() {\n fail!(\"Enum category does not match bucket index. This is a bug.\");\n }\n }\n }\n\n \/\/ some categories are subcategories of LayoutPerformCategory\n \/\/ and should be printed to indicate this\n pub fn format(self) -> ~str {\n let padding = match self {\n LayoutAuxInitCategory | LayoutSelectorMatchCategory | LayoutTreeBuilderCategory |\n LayoutMainCategory | LayoutDispListBuildCategory | LayoutShapingCategory=> \" - \",\n _ => \"\"\n };\n fmt!(\"%s%?\", padding, self)\n }\n}\n\nimpl Profiler {\n pub fn create(port: Port<ProfilerMsg>) {\n let port = Cell::new(port);\n do spawn {\n let mut profiler = Profiler::new(port.take());\n profiler.start();\n }\n }\n\n pub fn new(port: Port<ProfilerMsg>) -> Profiler {\n Profiler {\n port: port,\n buckets: ProfilerCategory::empty_buckets(),\n last_msg: None,\n }\n }\n\n pub fn start(&mut self) {\n loop {\n let msg = self.port.recv();\n self.handle_msg(msg);\n }\n }\n\n fn handle_msg(&mut self, msg: ProfilerMsg) {\n match msg {\n TimeMsg(category, t) => match self.buckets[category as uint] {\n \/\/TODO(tkuehn): would be nice to have tuple.second_mut()\n (_, ref mut data) => data.push(t),\n },\n PrintMsg => match self.last_msg {\n \/\/ only print if more data has arrived since the last printout\n Some(TimeMsg(*)) => self.print_buckets(),\n _ => {}\n },\n };\n self.last_msg = Some(msg);\n }\n\n fn print_buckets(&mut self) {\n println(fmt!(\"%31s %15s %15s %15s %15s %15s\",\n \"_category_\", \"_mean (ms)_\", \"_median (ms)_\",\n \"_min (ms)_\", \"_max (ms)_\", \"_bucket size_\"));\n for bucket in self.buckets.mut_iter() {\n let (category, data) = match *bucket {\n (category, ref mut data) => (category, data),\n };\n tim_sort(*data);\n let data_len = data.len();\n if data_len > 0 {\n let (mean, median, &min, &max) =\n (data.iter().map(|&x|x).sum() \/ (data_len as float),\n data[data_len \/ 2],\n data.iter().min().unwrap(),\n data.iter().max().unwrap());\n println(fmt!(\"%-30s: %15.4f %15.4f %15.4f %15.4f %15u\",\n category.format(), mean, median, min, max, data_len));\n }\n }\n println(\"\");\n }\n}\n\n\npub fn profile<T>(category: ProfilerCategory, \n profiler_chan: ProfilerChan,\n callback: &fn() -> T)\n -> T {\n let start_time = precise_time_ns();\n let val = callback();\n let end_time = precise_time_ns();\n let ms = ((end_time - start_time) as float \/ 1000000f);\n profiler_chan.send(TimeMsg(category, ms));\n return val;\n}\n\npub fn time<T>(msg: &str, callback: &fn() -> T) -> T{\n let start_time = precise_time_ns();\n let val = callback();\n let end_time = precise_time_ns();\n let ms = ((end_time - start_time) as float \/ 1000000f);\n if ms >= 5f {\n debug!(\"%s took %? ms\", msg, ms);\n }\n return val;\n}\n\n\n<commit_msg>auto merge of #772 : larsbergstrom\/servo\/profile-chan-fix, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/ Timing functions.\nuse extra::time::precise_time_ns;\nuse std::cell::Cell;\nuse std::comm::{Port, SharedChan};\nuse extra::sort::tim_sort;\nuse std::iterator::AdditiveIterator;\n\n\/\/ front-end representation of the profiler used to communicate with the profiler\n#[deriving(Clone)]\npub struct ProfilerChan {\n chan: SharedChan<ProfilerMsg>,\n}\n\nimpl ProfilerChan {\n pub fn new(chan: Chan<ProfilerMsg>) -> ProfilerChan {\n ProfilerChan {\n chan: SharedChan::new(chan),\n }\n }\n pub fn send(&self, msg: ProfilerMsg) {\n self.chan.send(msg);\n }\n}\n\n#[deriving(Eq, Clone)]\npub enum ProfilerCategory {\n CompositingCategory,\n LayoutQueryCategory,\n LayoutPerformCategory,\n LayoutAuxInitCategory,\n LayoutSelectorMatchCategory,\n LayoutTreeBuilderCategory,\n LayoutMainCategory,\n LayoutShapingCategory,\n LayoutDispListBuildCategory,\n GfxRegenAvailableFontsCategory,\n RenderingDrawingCategory,\n RenderingPrepBuffCategory,\n RenderingCategory,\n \/\/ hackish but helps prevent errors when adding new categories\n NUM_BUCKETS,\n}\n\/\/ FIXME(#5873) this should be initialized by a NUM_BUCKETS cast,\nstatic BUCKETS: uint = 13;\ntype ProfilerBuckets = [(ProfilerCategory, ~[float]), ..BUCKETS];\n\npub enum ProfilerMsg {\n \/\/ Normal message used for reporting time\n TimeMsg(ProfilerCategory, float),\n \/\/ Message used to force print the profiling metrics\n PrintMsg,\n}\n\n\/\/ back end of the profiler that handles data aggregation and performance metrics\npub struct Profiler {\n port: Port<ProfilerMsg>,\n buckets: ProfilerBuckets,\n last_msg: Option<ProfilerMsg>,\n}\n\nimpl ProfilerCategory {\n \/\/ convenience function to not have to cast every time\n pub fn num_buckets() -> uint {\n NUM_BUCKETS as uint\n }\n\n \/\/ enumeration of all ProfilerCategory types\n \/\/ TODO(tkuehn): is there a better way to ensure proper order of categories?\n fn empty_buckets() -> ProfilerBuckets {\n let buckets = [\n (CompositingCategory, ~[]),\n (LayoutQueryCategory, ~[]),\n (LayoutPerformCategory, ~[]),\n (LayoutAuxInitCategory, ~[]),\n (LayoutSelectorMatchCategory, ~[]),\n (LayoutTreeBuilderCategory, ~[]),\n (LayoutMainCategory, ~[]),\n (LayoutShapingCategory, ~[]),\n (LayoutDispListBuildCategory, ~[]),\n (GfxRegenAvailableFontsCategory, ~[]),\n (RenderingDrawingCategory, ~[]),\n (RenderingPrepBuffCategory, ~[]),\n (RenderingCategory, ~[]),\n ];\n\n ProfilerCategory::check_order(&buckets);\n buckets\n }\n\n \/\/ ensure that the order of the buckets matches the order of the enum categories\n fn check_order(vec: &ProfilerBuckets) {\n for &(category, _) in vec.iter() {\n if category != vec[category as uint].first() {\n fail!(\"Enum category does not match bucket index. This is a bug.\");\n }\n }\n }\n\n \/\/ some categories are subcategories of LayoutPerformCategory\n \/\/ and should be printed to indicate this\n pub fn format(self) -> ~str {\n let padding = match self {\n LayoutAuxInitCategory | LayoutSelectorMatchCategory | LayoutTreeBuilderCategory |\n LayoutMainCategory | LayoutDispListBuildCategory | LayoutShapingCategory=> \" - \",\n _ => \"\"\n };\n fmt!(\"%s%?\", padding, self)\n }\n}\n\nimpl Profiler {\n pub fn create(port: Port<ProfilerMsg>) {\n let port = Cell::new(port);\n do spawn {\n let mut profiler = Profiler::new(port.take());\n profiler.start();\n }\n }\n\n pub fn new(port: Port<ProfilerMsg>) -> Profiler {\n Profiler {\n port: port,\n buckets: ProfilerCategory::empty_buckets(),\n last_msg: None,\n }\n }\n\n pub fn start(&mut self) {\n loop {\n let msg = self.port.try_recv();\n match msg {\n Some (msg) => self.handle_msg(msg),\n None => break\n }\n }\n }\n\n fn handle_msg(&mut self, msg: ProfilerMsg) {\n match msg {\n TimeMsg(category, t) => match self.buckets[category as uint] {\n \/\/TODO(tkuehn): would be nice to have tuple.second_mut()\n (_, ref mut data) => data.push(t),\n },\n PrintMsg => match self.last_msg {\n \/\/ only print if more data has arrived since the last printout\n Some(TimeMsg(*)) => self.print_buckets(),\n _ => {}\n },\n };\n self.last_msg = Some(msg);\n }\n\n fn print_buckets(&mut self) {\n println(fmt!(\"%31s %15s %15s %15s %15s %15s\",\n \"_category_\", \"_mean (ms)_\", \"_median (ms)_\",\n \"_min (ms)_\", \"_max (ms)_\", \"_bucket size_\"));\n for bucket in self.buckets.mut_iter() {\n let (category, data) = match *bucket {\n (category, ref mut data) => (category, data),\n };\n tim_sort(*data);\n let data_len = data.len();\n if data_len > 0 {\n let (mean, median, &min, &max) =\n (data.iter().map(|&x|x).sum() \/ (data_len as float),\n data[data_len \/ 2],\n data.iter().min().unwrap(),\n data.iter().max().unwrap());\n println(fmt!(\"%-30s: %15.4f %15.4f %15.4f %15.4f %15u\",\n category.format(), mean, median, min, max, data_len));\n }\n }\n println(\"\");\n }\n}\n\n\npub fn profile<T>(category: ProfilerCategory, \n profiler_chan: ProfilerChan,\n callback: &fn() -> T)\n -> T {\n let start_time = precise_time_ns();\n let val = callback();\n let end_time = precise_time_ns();\n let ms = ((end_time - start_time) as float \/ 1000000f);\n profiler_chan.send(TimeMsg(category, ms));\n return val;\n}\n\npub fn time<T>(msg: &str, callback: &fn() -> T) -> T{\n let start_time = precise_time_ns();\n let val = callback();\n let end_time = precise_time_ns();\n let ms = ((end_time - start_time) as float \/ 1000000f);\n if ms >= 5f {\n debug!(\"%s took %? ms\", msg, ms);\n }\n return val;\n}\n\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! An \"interner\" is a data structure that associates values with usize tags and\n\/\/! allows bidirectional lookup; i.e. given a value, one can easily find the\n\/\/! type, and vice versa.\n\nuse hygiene::SyntaxContext;\n\nuse serialize::{Decodable, Decoder, Encodable, Encoder};\nuse std::cell::RefCell;\nuse std::collections::HashMap;\nuse std::fmt;\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash)]\npub struct Ident {\n pub name: Symbol,\n pub ctxt: SyntaxContext,\n}\n\nimpl Ident {\n pub const fn with_empty_ctxt(name: Symbol) -> Ident {\n Ident { name: name, ctxt: SyntaxContext::empty() }\n }\n\n \/\/\/ Maps a string to an identifier with an empty syntax context.\n pub fn from_str(string: &str) -> Ident {\n Ident::with_empty_ctxt(Symbol::intern(string))\n }\n\n pub fn modern(self) -> Ident {\n Ident { name: self.name, ctxt: self.ctxt.modern() }\n }\n}\n\nimpl fmt::Debug for Ident {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}{:?}\", self.name, self.ctxt)\n }\n}\n\nimpl fmt::Display for Ident {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(&self.name, f)\n }\n}\n\nimpl Encodable for Ident {\n fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n if self.ctxt.modern() == SyntaxContext::empty() {\n s.emit_str(&self.name.as_str())\n } else { \/\/ FIXME(jseyfried) intercrate hygiene\n let mut string = \"#\".to_owned();\n string.push_str(&self.name.as_str());\n s.emit_str(&string)\n }\n }\n}\n\nimpl Decodable for Ident {\n fn decode<D: Decoder>(d: &mut D) -> Result<Ident, D::Error> {\n let string = d.read_str()?;\n Ok(if !string.starts_with('#') {\n Ident::from_str(&string)\n } else { \/\/ FIXME(jseyfried) intercrate hygiene\n Ident::with_empty_ctxt(Symbol::gensym(&string[1..]))\n })\n }\n}\n\n\/\/\/ A symbol is an interned or gensymed string.\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\npub struct Symbol(u32);\n\n\/\/ The interner in thread-local, so `Symbol` shouldn't move between threads.\nimpl !Send for Symbol { }\n\nimpl Symbol {\n \/\/\/ Maps a string to its interned representation.\n pub fn intern(string: &str) -> Self {\n with_interner(|interner| interner.intern(string))\n }\n\n pub fn interned(self) -> Self {\n with_interner(|interner| interner.interned(self))\n }\n\n \/\/\/ gensym's a new usize, using the current interner.\n pub fn gensym(string: &str) -> Self {\n with_interner(|interner| interner.gensym(string))\n }\n\n pub fn gensymed(self) -> Self {\n with_interner(|interner| interner.gensymed(self))\n }\n\n pub fn as_str(self) -> InternedString {\n with_interner(|interner| unsafe {\n InternedString {\n string: ::std::mem::transmute::<&str, &str>(interner.get(self))\n }\n })\n }\n\n pub fn as_u32(self) -> u32 {\n self.0\n }\n}\n\nimpl fmt::Debug for Symbol {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}({})\", self, self.0)\n }\n}\n\nimpl fmt::Display for Symbol {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(&self.as_str(), f)\n }\n}\n\nimpl Encodable for Symbol {\n fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n s.emit_str(&self.as_str())\n }\n}\n\nimpl Decodable for Symbol {\n fn decode<D: Decoder>(d: &mut D) -> Result<Symbol, D::Error> {\n Ok(Symbol::intern(&d.read_str()?))\n }\n}\n\nimpl<T: ::std::ops::Deref<Target=str>> PartialEq<T> for Symbol {\n fn eq(&self, other: &T) -> bool {\n self.as_str() == other.deref()\n }\n}\n\n#[derive(Default)]\npub struct Interner {\n names: HashMap<Box<str>, Symbol>,\n strings: Vec<Box<str>>,\n gensyms: Vec<Symbol>,\n}\n\nimpl Interner {\n pub fn new() -> Self {\n Interner::default()\n }\n\n fn prefill(init: &[&str]) -> Self {\n let mut this = Interner::new();\n for &string in init {\n this.intern(string);\n }\n this\n }\n\n pub fn intern(&mut self, string: &str) -> Symbol {\n if let Some(&name) = self.names.get(string) {\n return name;\n }\n\n let name = Symbol(self.strings.len() as u32);\n let string = string.to_string().into_boxed_str();\n self.strings.push(string.clone());\n self.names.insert(string, name);\n name\n }\n\n pub fn interned(&self, symbol: Symbol) -> Symbol {\n if (symbol.0 as usize) < self.strings.len() {\n symbol\n } else {\n self.interned(self.gensyms[(!0 - symbol.0) as usize])\n }\n }\n\n fn gensym(&mut self, string: &str) -> Symbol {\n let symbol = self.intern(string);\n self.gensymed(symbol)\n }\n\n fn gensymed(&mut self, symbol: Symbol) -> Symbol {\n self.gensyms.push(symbol);\n Symbol(!0 - self.gensyms.len() as u32 + 1)\n }\n\n pub fn get(&self, symbol: Symbol) -> &str {\n match self.strings.get(symbol.0 as usize) {\n Some(ref string) => string,\n None => self.get(self.gensyms[(!0 - symbol.0) as usize]),\n }\n }\n}\n\n\/\/ In this macro, there is the requirement that the name (the number) must be monotonically\n\/\/ increasing by one in the special identifiers, starting at 0; the same holds for the keywords,\n\/\/ except starting from the next number instead of zero.\nmacro_rules! declare_keywords {(\n $( ($index: expr, $konst: ident, $string: expr) )*\n) => {\n pub mod keywords {\n use super::{Symbol, Ident};\n #[derive(Clone, Copy, PartialEq, Eq)]\n pub struct Keyword {\n ident: Ident,\n }\n impl Keyword {\n #[inline] pub fn ident(self) -> Ident { self.ident }\n #[inline] pub fn name(self) -> Symbol { self.ident.name }\n }\n $(\n #[allow(non_upper_case_globals)]\n pub const $konst: Keyword = Keyword {\n ident: Ident::with_empty_ctxt(super::Symbol($index))\n };\n )*\n }\n\n impl Interner {\n fn fresh() -> Self {\n Interner::prefill(&[$($string,)*])\n }\n }\n}}\n\n\/\/ NB: leaving holes in the ident table is bad! a different ident will get\n\/\/ interned with the id from the hole, but it will be between the min and max\n\/\/ of the reserved words, and thus tagged as \"reserved\".\n\/\/ After modifying this list adjust `is_strict_keyword`\/`is_reserved_keyword`,\n\/\/ this should be rarely necessary though if the keywords are kept in alphabetic order.\ndeclare_keywords! {\n \/\/ Invalid identifier\n (0, Invalid, \"\")\n\n \/\/ Strict keywords used in the language.\n (1, As, \"as\")\n (2, Box, \"box\")\n (3, Break, \"break\")\n (4, Const, \"const\")\n (5, Continue, \"continue\")\n (6, Crate, \"crate\")\n (7, Else, \"else\")\n (8, Enum, \"enum\")\n (9, Extern, \"extern\")\n (10, False, \"false\")\n (11, Fn, \"fn\")\n (12, For, \"for\")\n (13, If, \"if\")\n (14, Impl, \"impl\")\n (15, In, \"in\")\n (16, Let, \"let\")\n (17, Loop, \"loop\")\n (18, Match, \"match\")\n (19, Mod, \"mod\")\n (20, Move, \"move\")\n (21, Mut, \"mut\")\n (22, Pub, \"pub\")\n (23, Ref, \"ref\")\n (24, Return, \"return\")\n (25, SelfValue, \"self\")\n (26, SelfType, \"Self\")\n (27, Static, \"static\")\n (28, Struct, \"struct\")\n (29, Super, \"super\")\n (30, Trait, \"trait\")\n (31, True, \"true\")\n (32, Type, \"type\")\n (33, Unsafe, \"unsafe\")\n (34, Use, \"use\")\n (35, Where, \"where\")\n (36, While, \"while\")\n\n \/\/ Keywords reserved for future use.\n (37, Abstract, \"abstract\")\n (38, Alignof, \"alignof\")\n (39, Become, \"become\")\n (40, Do, \"do\")\n (41, Final, \"final\")\n (42, Macro, \"macro\")\n (43, Offsetof, \"offsetof\")\n (44, Override, \"override\")\n (45, Priv, \"priv\")\n (46, Proc, \"proc\")\n (47, Pure, \"pure\")\n (48, Sizeof, \"sizeof\")\n (49, Typeof, \"typeof\")\n (50, Unsized, \"unsized\")\n (51, Virtual, \"virtual\")\n (52, Yield, \"yield\")\n\n \/\/ Weak keywords, have special meaning only in specific contexts.\n (53, Default, \"default\")\n (54, StaticLifetime, \"'static\")\n (55, Union, \"union\")\n (56, Catch, \"catch\")\n\n \/\/ A virtual keyword that resolves to the crate root when used in a lexical scope.\n (57, CrateRoot, \"{{root}}\")\n}\n\n\/\/ If an interner exists in TLS, return it. Otherwise, prepare a fresh one.\nfn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {\n thread_local!(static INTERNER: RefCell<Interner> = {\n RefCell::new(Interner::fresh())\n });\n INTERNER.with(|interner| f(&mut *interner.borrow_mut()))\n}\n\n\/\/\/ Represents a string stored in the thread-local interner. Because the\n\/\/\/ interner lives for the life of the thread, this can be safely treated as an\n\/\/\/ immortal string, as long as it never crosses between threads.\n\/\/\/\n\/\/\/ FIXME(pcwalton): You must be careful about what you do in the destructors\n\/\/\/ of objects stored in TLS, because they may run after the interner is\n\/\/\/ destroyed. In particular, they must not access string contents. This can\n\/\/\/ be fixed in the future by just leaking all strings until thread death\n\/\/\/ somehow.\n#[derive(Clone, Hash, PartialOrd, Eq, Ord)]\npub struct InternedString {\n string: &'static str,\n}\n\nimpl<U: ?Sized> ::std::convert::AsRef<U> for InternedString where str: ::std::convert::AsRef<U> {\n fn as_ref(&self) -> &U {\n self.string.as_ref()\n }\n}\n\nimpl<T: ::std::ops::Deref<Target = str>> ::std::cmp::PartialEq<T> for InternedString {\n fn eq(&self, other: &T) -> bool {\n self.string == other.deref()\n }\n}\n\nimpl ::std::cmp::PartialEq<InternedString> for str {\n fn eq(&self, other: &InternedString) -> bool {\n self == other.string\n }\n}\n\nimpl<'a> ::std::cmp::PartialEq<InternedString> for &'a str {\n fn eq(&self, other: &InternedString) -> bool {\n *self == other.string\n }\n}\n\nimpl ::std::cmp::PartialEq<InternedString> for String {\n fn eq(&self, other: &InternedString) -> bool {\n self == other.string\n }\n}\n\nimpl<'a> ::std::cmp::PartialEq<InternedString> for &'a String {\n fn eq(&self, other: &InternedString) -> bool {\n *self == other.string\n }\n}\n\nimpl !Send for InternedString { }\n\nimpl ::std::ops::Deref for InternedString {\n type Target = str;\n fn deref(&self) -> &str { self.string }\n}\n\nimpl fmt::Debug for InternedString {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Debug::fmt(self.string, f)\n }\n}\n\nimpl fmt::Display for InternedString {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(self.string, f)\n }\n}\n\nimpl Decodable for InternedString {\n fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {\n Ok(Symbol::intern(&d.read_str()?).as_str())\n }\n}\n\nimpl Encodable for InternedString {\n fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n s.emit_str(self.string)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn interner_tests() {\n let mut i: Interner = Interner::new();\n \/\/ first one is zero:\n assert_eq!(i.intern(\"dog\"), Symbol(0));\n \/\/ re-use gets the same entry:\n assert_eq!(i.intern (\"dog\"), Symbol(0));\n \/\/ different string gets a different #:\n assert_eq!(i.intern(\"cat\"), Symbol(1));\n assert_eq!(i.intern(\"cat\"), Symbol(1));\n \/\/ dog is still at zero\n assert_eq!(i.intern(\"dog\"), Symbol(0));\n assert_eq!(i.gensym(\"zebra\"), Symbol(4294967295));\n \/\/ gensym of same string gets new number :\n assert_eq!(i.gensym(\"zebra\"), Symbol(4294967294));\n \/\/ gensym of *existing* string gets new number:\n assert_eq!(i.gensym(\"dog\"), Symbol(4294967293));\n }\n}\n<commit_msg>syntax_pos::Symbol should not implement Sync<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! An \"interner\" is a data structure that associates values with usize tags and\n\/\/! allows bidirectional lookup; i.e. given a value, one can easily find the\n\/\/! type, and vice versa.\n\nuse hygiene::SyntaxContext;\n\nuse serialize::{Decodable, Decoder, Encodable, Encoder};\nuse std::cell::RefCell;\nuse std::collections::HashMap;\nuse std::fmt;\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash)]\npub struct Ident {\n pub name: Symbol,\n pub ctxt: SyntaxContext,\n}\n\nimpl Ident {\n pub const fn with_empty_ctxt(name: Symbol) -> Ident {\n Ident { name: name, ctxt: SyntaxContext::empty() }\n }\n\n \/\/\/ Maps a string to an identifier with an empty syntax context.\n pub fn from_str(string: &str) -> Ident {\n Ident::with_empty_ctxt(Symbol::intern(string))\n }\n\n pub fn modern(self) -> Ident {\n Ident { name: self.name, ctxt: self.ctxt.modern() }\n }\n}\n\nimpl fmt::Debug for Ident {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}{:?}\", self.name, self.ctxt)\n }\n}\n\nimpl fmt::Display for Ident {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(&self.name, f)\n }\n}\n\nimpl Encodable for Ident {\n fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n if self.ctxt.modern() == SyntaxContext::empty() {\n s.emit_str(&self.name.as_str())\n } else { \/\/ FIXME(jseyfried) intercrate hygiene\n let mut string = \"#\".to_owned();\n string.push_str(&self.name.as_str());\n s.emit_str(&string)\n }\n }\n}\n\nimpl Decodable for Ident {\n fn decode<D: Decoder>(d: &mut D) -> Result<Ident, D::Error> {\n let string = d.read_str()?;\n Ok(if !string.starts_with('#') {\n Ident::from_str(&string)\n } else { \/\/ FIXME(jseyfried) intercrate hygiene\n Ident::with_empty_ctxt(Symbol::gensym(&string[1..]))\n })\n }\n}\n\n\/\/\/ A symbol is an interned or gensymed string.\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\npub struct Symbol(u32);\n\n\/\/ The interner in thread-local, so `Symbol` shouldn't move between threads.\nimpl !Send for Symbol { }\nimpl !Sync for Symbol { }\n\nimpl Symbol {\n \/\/\/ Maps a string to its interned representation.\n pub fn intern(string: &str) -> Self {\n with_interner(|interner| interner.intern(string))\n }\n\n pub fn interned(self) -> Self {\n with_interner(|interner| interner.interned(self))\n }\n\n \/\/\/ gensym's a new usize, using the current interner.\n pub fn gensym(string: &str) -> Self {\n with_interner(|interner| interner.gensym(string))\n }\n\n pub fn gensymed(self) -> Self {\n with_interner(|interner| interner.gensymed(self))\n }\n\n pub fn as_str(self) -> InternedString {\n with_interner(|interner| unsafe {\n InternedString {\n string: ::std::mem::transmute::<&str, &str>(interner.get(self))\n }\n })\n }\n\n pub fn as_u32(self) -> u32 {\n self.0\n }\n}\n\nimpl fmt::Debug for Symbol {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}({})\", self, self.0)\n }\n}\n\nimpl fmt::Display for Symbol {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(&self.as_str(), f)\n }\n}\n\nimpl Encodable for Symbol {\n fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n s.emit_str(&self.as_str())\n }\n}\n\nimpl Decodable for Symbol {\n fn decode<D: Decoder>(d: &mut D) -> Result<Symbol, D::Error> {\n Ok(Symbol::intern(&d.read_str()?))\n }\n}\n\nimpl<T: ::std::ops::Deref<Target=str>> PartialEq<T> for Symbol {\n fn eq(&self, other: &T) -> bool {\n self.as_str() == other.deref()\n }\n}\n\n#[derive(Default)]\npub struct Interner {\n names: HashMap<Box<str>, Symbol>,\n strings: Vec<Box<str>>,\n gensyms: Vec<Symbol>,\n}\n\nimpl Interner {\n pub fn new() -> Self {\n Interner::default()\n }\n\n fn prefill(init: &[&str]) -> Self {\n let mut this = Interner::new();\n for &string in init {\n this.intern(string);\n }\n this\n }\n\n pub fn intern(&mut self, string: &str) -> Symbol {\n if let Some(&name) = self.names.get(string) {\n return name;\n }\n\n let name = Symbol(self.strings.len() as u32);\n let string = string.to_string().into_boxed_str();\n self.strings.push(string.clone());\n self.names.insert(string, name);\n name\n }\n\n pub fn interned(&self, symbol: Symbol) -> Symbol {\n if (symbol.0 as usize) < self.strings.len() {\n symbol\n } else {\n self.interned(self.gensyms[(!0 - symbol.0) as usize])\n }\n }\n\n fn gensym(&mut self, string: &str) -> Symbol {\n let symbol = self.intern(string);\n self.gensymed(symbol)\n }\n\n fn gensymed(&mut self, symbol: Symbol) -> Symbol {\n self.gensyms.push(symbol);\n Symbol(!0 - self.gensyms.len() as u32 + 1)\n }\n\n pub fn get(&self, symbol: Symbol) -> &str {\n match self.strings.get(symbol.0 as usize) {\n Some(ref string) => string,\n None => self.get(self.gensyms[(!0 - symbol.0) as usize]),\n }\n }\n}\n\n\/\/ In this macro, there is the requirement that the name (the number) must be monotonically\n\/\/ increasing by one in the special identifiers, starting at 0; the same holds for the keywords,\n\/\/ except starting from the next number instead of zero.\nmacro_rules! declare_keywords {(\n $( ($index: expr, $konst: ident, $string: expr) )*\n) => {\n pub mod keywords {\n use super::{Symbol, Ident};\n #[derive(Clone, Copy, PartialEq, Eq)]\n pub struct Keyword {\n ident: Ident,\n }\n impl Keyword {\n #[inline] pub fn ident(self) -> Ident { self.ident }\n #[inline] pub fn name(self) -> Symbol { self.ident.name }\n }\n $(\n #[allow(non_upper_case_globals)]\n pub const $konst: Keyword = Keyword {\n ident: Ident::with_empty_ctxt(super::Symbol($index))\n };\n )*\n }\n\n impl Interner {\n fn fresh() -> Self {\n Interner::prefill(&[$($string,)*])\n }\n }\n}}\n\n\/\/ NB: leaving holes in the ident table is bad! a different ident will get\n\/\/ interned with the id from the hole, but it will be between the min and max\n\/\/ of the reserved words, and thus tagged as \"reserved\".\n\/\/ After modifying this list adjust `is_strict_keyword`\/`is_reserved_keyword`,\n\/\/ this should be rarely necessary though if the keywords are kept in alphabetic order.\ndeclare_keywords! {\n \/\/ Invalid identifier\n (0, Invalid, \"\")\n\n \/\/ Strict keywords used in the language.\n (1, As, \"as\")\n (2, Box, \"box\")\n (3, Break, \"break\")\n (4, Const, \"const\")\n (5, Continue, \"continue\")\n (6, Crate, \"crate\")\n (7, Else, \"else\")\n (8, Enum, \"enum\")\n (9, Extern, \"extern\")\n (10, False, \"false\")\n (11, Fn, \"fn\")\n (12, For, \"for\")\n (13, If, \"if\")\n (14, Impl, \"impl\")\n (15, In, \"in\")\n (16, Let, \"let\")\n (17, Loop, \"loop\")\n (18, Match, \"match\")\n (19, Mod, \"mod\")\n (20, Move, \"move\")\n (21, Mut, \"mut\")\n (22, Pub, \"pub\")\n (23, Ref, \"ref\")\n (24, Return, \"return\")\n (25, SelfValue, \"self\")\n (26, SelfType, \"Self\")\n (27, Static, \"static\")\n (28, Struct, \"struct\")\n (29, Super, \"super\")\n (30, Trait, \"trait\")\n (31, True, \"true\")\n (32, Type, \"type\")\n (33, Unsafe, \"unsafe\")\n (34, Use, \"use\")\n (35, Where, \"where\")\n (36, While, \"while\")\n\n \/\/ Keywords reserved for future use.\n (37, Abstract, \"abstract\")\n (38, Alignof, \"alignof\")\n (39, Become, \"become\")\n (40, Do, \"do\")\n (41, Final, \"final\")\n (42, Macro, \"macro\")\n (43, Offsetof, \"offsetof\")\n (44, Override, \"override\")\n (45, Priv, \"priv\")\n (46, Proc, \"proc\")\n (47, Pure, \"pure\")\n (48, Sizeof, \"sizeof\")\n (49, Typeof, \"typeof\")\n (50, Unsized, \"unsized\")\n (51, Virtual, \"virtual\")\n (52, Yield, \"yield\")\n\n \/\/ Weak keywords, have special meaning only in specific contexts.\n (53, Default, \"default\")\n (54, StaticLifetime, \"'static\")\n (55, Union, \"union\")\n (56, Catch, \"catch\")\n\n \/\/ A virtual keyword that resolves to the crate root when used in a lexical scope.\n (57, CrateRoot, \"{{root}}\")\n}\n\n\/\/ If an interner exists in TLS, return it. Otherwise, prepare a fresh one.\nfn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {\n thread_local!(static INTERNER: RefCell<Interner> = {\n RefCell::new(Interner::fresh())\n });\n INTERNER.with(|interner| f(&mut *interner.borrow_mut()))\n}\n\n\/\/\/ Represents a string stored in the thread-local interner. Because the\n\/\/\/ interner lives for the life of the thread, this can be safely treated as an\n\/\/\/ immortal string, as long as it never crosses between threads.\n\/\/\/\n\/\/\/ FIXME(pcwalton): You must be careful about what you do in the destructors\n\/\/\/ of objects stored in TLS, because they may run after the interner is\n\/\/\/ destroyed. In particular, they must not access string contents. This can\n\/\/\/ be fixed in the future by just leaking all strings until thread death\n\/\/\/ somehow.\n#[derive(Clone, Hash, PartialOrd, Eq, Ord)]\npub struct InternedString {\n string: &'static str,\n}\n\nimpl<U: ?Sized> ::std::convert::AsRef<U> for InternedString where str: ::std::convert::AsRef<U> {\n fn as_ref(&self) -> &U {\n self.string.as_ref()\n }\n}\n\nimpl<T: ::std::ops::Deref<Target = str>> ::std::cmp::PartialEq<T> for InternedString {\n fn eq(&self, other: &T) -> bool {\n self.string == other.deref()\n }\n}\n\nimpl ::std::cmp::PartialEq<InternedString> for str {\n fn eq(&self, other: &InternedString) -> bool {\n self == other.string\n }\n}\n\nimpl<'a> ::std::cmp::PartialEq<InternedString> for &'a str {\n fn eq(&self, other: &InternedString) -> bool {\n *self == other.string\n }\n}\n\nimpl ::std::cmp::PartialEq<InternedString> for String {\n fn eq(&self, other: &InternedString) -> bool {\n self == other.string\n }\n}\n\nimpl<'a> ::std::cmp::PartialEq<InternedString> for &'a String {\n fn eq(&self, other: &InternedString) -> bool {\n *self == other.string\n }\n}\n\nimpl !Send for InternedString { }\n\nimpl ::std::ops::Deref for InternedString {\n type Target = str;\n fn deref(&self) -> &str { self.string }\n}\n\nimpl fmt::Debug for InternedString {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Debug::fmt(self.string, f)\n }\n}\n\nimpl fmt::Display for InternedString {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(self.string, f)\n }\n}\n\nimpl Decodable for InternedString {\n fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {\n Ok(Symbol::intern(&d.read_str()?).as_str())\n }\n}\n\nimpl Encodable for InternedString {\n fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {\n s.emit_str(self.string)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn interner_tests() {\n let mut i: Interner = Interner::new();\n \/\/ first one is zero:\n assert_eq!(i.intern(\"dog\"), Symbol(0));\n \/\/ re-use gets the same entry:\n assert_eq!(i.intern (\"dog\"), Symbol(0));\n \/\/ different string gets a different #:\n assert_eq!(i.intern(\"cat\"), Symbol(1));\n assert_eq!(i.intern(\"cat\"), Symbol(1));\n \/\/ dog is still at zero\n assert_eq!(i.intern(\"dog\"), Symbol(0));\n assert_eq!(i.gensym(\"zebra\"), Symbol(4294967295));\n \/\/ gensym of same string gets new number :\n assert_eq!(i.gensym(\"zebra\"), Symbol(4294967294));\n \/\/ gensym of *existing* string gets new number:\n assert_eq!(i.gensym(\"dog\"), Symbol(4294967293));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Finishing up chapter 17.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update comm::protocol to rethinkdb\/rethinkdb@02d6796<commit_after><|endoftext|>"} {"text":"<commit_before>use lyon::path;\nuse lyon::path_builder::BaseBuilder;\nuse lyon::path_iterator::PathIterator;\nuse lyon::tessellation;\nuse lyon::tessellation::math;\nuse lyon::tessellation::path_stroke;\nuse lyon::tessellation::geometry_builder;\n\nuse super::{Point, Vertex};\nuse GameError;\nuse GameResult;\n\npub type Buffer = geometry_builder::VertexBuffers<Vertex>;\n\n\/\/ Not used anywhere? Not sure what vickenty was planning for this.\n\/\/ pub struct ConstantUV {\n\/\/ uv: [f32; 2],\n\/\/ }\n\n\/\/ impl geometry_builder::VertexConstructor<math::Point, Vertex> for ConstantUV {\n\/\/ fn new_vertex(&mut self, input: math::Point) -> Vertex {\n\/\/ Vertex {\n\/\/ pos: [input.x, input.y],\n\/\/ uv: self.uv.clone(),\n\/\/ }\n\/\/ }\n\/\/ }\n\npub struct ScreenUV;\n\nimpl geometry_builder::VertexConstructor<math::Point, Vertex> for ScreenUV {\n fn new_vertex(&mut self, input: math::Point) -> Vertex {\n Vertex {\n pos: [input.x, input.y],\n uv: [input.x, input.y],\n }\n }\n}\n\nfn build_path(points: &[Point], closed: bool) -> path::Path {\n let mut path_builder = path::Builder::with_capacity(points.len());\n path_builder.move_to(math::point(points[0].x, points[0].y));\n\n for p in &points[1..] {\n path_builder.line_to(math::point(p.x, p.y));\n }\n\n if closed {\n path_builder.close();\n }\n\n path_builder.build()\n}\n\ntype BuffersBuilder<'a> = geometry_builder::BuffersBuilder<'a, Vertex, math::Point, ScreenUV>;\n\nfn build_geometry<F>(f: F) -> GameResult<Buffer>\n where F: for<'a> FnOnce(&mut BuffersBuilder<'a>)\n -> Result<tessellation::geometry_builder::Count, ()>\n{\n let mut buffers = geometry_builder::VertexBuffers::new();\n {\n let mut builder = geometry_builder::BuffersBuilder::new(&mut buffers, ScreenUV);\n if let Err(()) = f(&mut builder) {\n return Err(GameError::RenderError(String::from(\"geometry tessellation failed\")));\n }\n }\n Ok(buffers)\n}\n\npub fn build_line(points: &[Point], line_width: f32) -> GameResult<Buffer> {\n let path = build_path(points, false);\n let opts = path_stroke::StrokeOptions::stroke_width(line_width);\n let mut tessellator = path_stroke::StrokeTessellator::new();\n build_geometry(|builder| {\n tessellator.tessellate(path.path_iter().flattened(0.5), &opts, builder)\n })\n}\n\n\/\/\/ Build a closed polygon. Identical to build_line but closes the path,\n\/\/\/ which makes sure the two endpoints actually line up.\npub fn build_polygon(points: &[Point], line_width: f32) -> GameResult<Buffer> {\n\n let path = build_path(points, true);\n let opts = path_stroke::StrokeOptions::stroke_width(line_width);\n let mut tessellator = path_stroke::StrokeTessellator::new();\n build_geometry(|builder| {\n tessellator.tessellate(path.path_iter().flattened(0.5), &opts, builder)\n })\n}\n\n\/\/ This would be ideal but really should be left until we can get around\n\/\/ to updating lyon.\n\/\/ pub fn build_polygon_fill(points: &[Point]) -> GameResult<Buffer> {\n\n\/\/ let path = build_path(points, true);\n\/\/ let opts = path_fill::FillOptions::default();\n\/\/ let mut tessellator = path_fill::FillTessellator::new();\n\/\/ build_geometry(|builder| {\n\/\/ tessellator.tessellate_events(path.path_iter()\n\/\/ .flattened(0.5),\n\/\/ &opts,\n\/\/ builder)\n\/\/ })\n\/\/ }\n\npub fn build_ellipse_fill(point: Point, r1: f32, r2: f32, segments: u32) -> GameResult<Buffer> {\n build_geometry(|builder| {\n Ok(tessellation::basic_shapes::tessellate_ellipsis(math::point(point.x,\n point.y),\n math::point(r1, r2),\n segments,\n builder))\n })\n}\n<commit_msg>Remove old unused code.<commit_after>use lyon::path;\nuse lyon::path_builder::BaseBuilder;\nuse lyon::path_iterator::PathIterator;\nuse lyon::tessellation;\nuse lyon::tessellation::math;\nuse lyon::tessellation::path_stroke;\nuse lyon::tessellation::geometry_builder;\n\nuse super::{Point, Vertex};\nuse GameError;\nuse GameResult;\n\npub type Buffer = geometry_builder::VertexBuffers<Vertex>;\n\npub struct ScreenUV;\n\nimpl geometry_builder::VertexConstructor<math::Point, Vertex> for ScreenUV {\n fn new_vertex(&mut self, input: math::Point) -> Vertex {\n Vertex {\n pos: [input.x, input.y],\n uv: [input.x, input.y],\n }\n }\n}\n\nfn build_path(points: &[Point], closed: bool) -> path::Path {\n let mut path_builder = path::Builder::with_capacity(points.len());\n path_builder.move_to(math::point(points[0].x, points[0].y));\n\n for p in &points[1..] {\n path_builder.line_to(math::point(p.x, p.y));\n }\n\n if closed {\n path_builder.close();\n }\n\n path_builder.build()\n}\n\ntype BuffersBuilder<'a> = geometry_builder::BuffersBuilder<'a, Vertex, math::Point, ScreenUV>;\n\nfn build_geometry<F>(f: F) -> GameResult<Buffer>\n where F: for<'a> FnOnce(&mut BuffersBuilder<'a>)\n -> Result<tessellation::geometry_builder::Count, ()>\n{\n let mut buffers = geometry_builder::VertexBuffers::new();\n {\n let mut builder = geometry_builder::BuffersBuilder::new(&mut buffers, ScreenUV);\n if let Err(()) = f(&mut builder) {\n return Err(GameError::RenderError(String::from(\"geometry tessellation failed\")));\n }\n }\n Ok(buffers)\n}\n\npub fn build_line(points: &[Point], line_width: f32) -> GameResult<Buffer> {\n let path = build_path(points, false);\n let opts = path_stroke::StrokeOptions::stroke_width(line_width);\n let mut tessellator = path_stroke::StrokeTessellator::new();\n build_geometry(|builder| {\n tessellator.tessellate(path.path_iter().flattened(0.5), &opts, builder)\n })\n}\n\n\/\/\/ Build a closed polygon. Identical to build_line but closes the path,\n\/\/\/ which makes sure the two endpoints actually line up.\npub fn build_polygon(points: &[Point], line_width: f32) -> GameResult<Buffer> {\n\n let path = build_path(points, true);\n let opts = path_stroke::StrokeOptions::stroke_width(line_width);\n let mut tessellator = path_stroke::StrokeTessellator::new();\n build_geometry(|builder| {\n tessellator.tessellate(path.path_iter().flattened(0.5), &opts, builder)\n })\n}\n\n\/\/ This would be ideal but really should be left until we can get around\n\/\/ to updating lyon.\n\/\/ pub fn build_polygon_fill(points: &[Point]) -> GameResult<Buffer> {\n\n\/\/ let path = build_path(points, true);\n\/\/ let opts = path_fill::FillOptions::default();\n\/\/ let mut tessellator = path_fill::FillTessellator::new();\n\/\/ build_geometry(|builder| {\n\/\/ tessellator.tessellate_events(path.path_iter()\n\/\/ .flattened(0.5),\n\/\/ &opts,\n\/\/ builder)\n\/\/ })\n\/\/ }\n\npub fn build_ellipse_fill(point: Point, r1: f32, r2: f32, segments: u32) -> GameResult<Buffer> {\n build_geometry(|builder| {\n Ok(tessellation::basic_shapes::tessellate_ellipsis(math::point(point.x,\n point.y),\n math::point(r1, r2),\n segments,\n builder))\n })\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse super::{LinkArgs, LinkerFlavor, Target, TargetOptions};\n\npub fn target() -> Result<Target, String> {\n let mut args = LinkArgs::new();\n args.insert(LinkerFlavor::Em,\n vec![\"-s\".to_string(),\n \"ERROR_ON_UNDEFINED_SYMBOLS=1\".to_string(),\n \"-s\".to_string(),\n \"ABORTING_MALLOC=0\".to_string()]);\n\n let opts = TargetOptions {\n dynamic_linking: false,\n executables: true,\n exe_suffix: \".js\".to_string(),\n linker_is_gnu: true,\n allow_asm: false,\n obj_is_bitcode: true,\n is_like_emscripten: true,\n max_atomic_width: Some(32),\n post_link_args: args,\n target_family: Some(\"unix\".to_string()),\n codegen_backend: \"emscripten\".to_string(),\n .. Default::default()\n };\n Ok(Target {\n llvm_target: \"asmjs-unknown-emscripten\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n target_c_int_width: \"32\".to_string(),\n target_os: \"emscripten\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n data_layout: \"e-p:32:32-i64:64-v128:32:128-n32-S128\".to_string(),\n arch: \"asmjs\".to_string(),\n linker_flavor: LinkerFlavor::Em,\n options: opts,\n })\n}\n<commit_msg>Rollup merge of #51875 - badboy:emscripten-no-wasm, r=nikomatsakis<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse super::{LinkArgs, LinkerFlavor, Target, TargetOptions};\n\npub fn target() -> Result<Target, String> {\n let mut args = LinkArgs::new();\n args.insert(LinkerFlavor::Em,\n vec![\"-s\".to_string(),\n \"ERROR_ON_UNDEFINED_SYMBOLS=1\".to_string(),\n \"-s\".to_string(),\n \"ABORTING_MALLOC=0\".to_string(),\n \"-s\".to_string(),\n \"WASM=0\".to_string()]);\n\n let opts = TargetOptions {\n dynamic_linking: false,\n executables: true,\n exe_suffix: \".js\".to_string(),\n linker_is_gnu: true,\n allow_asm: false,\n obj_is_bitcode: true,\n is_like_emscripten: true,\n max_atomic_width: Some(32),\n post_link_args: args,\n target_family: Some(\"unix\".to_string()),\n codegen_backend: \"emscripten\".to_string(),\n .. Default::default()\n };\n Ok(Target {\n llvm_target: \"asmjs-unknown-emscripten\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n target_c_int_width: \"32\".to_string(),\n target_os: \"emscripten\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n data_layout: \"e-p:32:32-i64:64-v128:32:128-n32-S128\".to_string(),\n arch: \"asmjs\".to_string(),\n linker_flavor: LinkerFlavor::Em,\n options: opts,\n })\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! A single-producer, single-consumer, futures-aware channel\n\nuse std::any::Any;\nuse std::error::Error;\nuse std::fmt;\n\nmod bounded;\nmod unbounded;\nmod queue;\n\npub use self::bounded::{channel, Sender, Receiver};\npub use self::unbounded::{unbounded, UnboundedSender, UnboundedReceiver};\n\n\/\/\/ Error type for sending, used when the receiving end of the channel is\n\/\/\/ dropped\npub struct SendError<T>(T);\n\nimpl<T> fmt::Debug for SendError<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt.debug_tuple(\"SendError\")\n .field(&\"...\")\n .finish()\n }\n}\n\nimpl<T> fmt::Display for SendError<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"send failed because receiver is gone\")\n }\n}\n\nimpl<T> Error for SendError<T>\n where T: Any\n{\n fn description(&self) -> &str {\n \"send failed because receiver is gone\"\n }\n}\n\nimpl<T> SendError<T> {\n \/\/\/ Returns the message that was attempted to be sent but failed.\n pub fn into_inner(self) -> T {\n self.0\n }\n}\n\n<commit_msg>Add more docs to spsc<commit_after>\/\/! A single-producer, single-consumer, futures-aware channel\n\/\/!\n\/\/! A channel can be used as a communication primitive between tasks running on\n\/\/! `futures-rs` executors. Channel creation provides `Receiver` and `Sender`\n\/\/! handles. `Receiver` implements `Stream` and allows a task to read values\n\/\/! out of the channel. If there is no message to read from the channel, the\n\/\/! curernt task will be notified when a new value is sent. `Sender` implements\n\/\/! the `Sink` trait and allows a task to send messages into the channel. If\n\/\/! the channel is at capacity, then send will be rejected and the task will be\n\/\/! notified when additional capacity is available.\n\/\/!\n\/\/! # Disconnection\n\/\/!\n\/\/! When all `Sender` handles have been dropped, it is no longer possible to\n\/\/! send values into the channel. This is considered the termination event of\n\/\/! the stream. As such, `Sender::poll` will return `Ok(Ready(None))`.\n\/\/!\n\/\/! If the receiver handle is dropped, then messages can no longer be read out\n\/\/! of the channel. In this case, a `send` will result in an error.\n\/\/!\n\/\/! # Clean Shutdown\n\/\/!\n\/\/! If the `Receiver` is simply dropped, then it is possible for there to be\n\/\/! messages still in the channel that will not be processed. As such, it is\n\/\/! usually desirable to perform a \"clean\" shutdown. To do this, the receiver\n\/\/! will first call `close`, which will prevent any further messages to be sent\n\/\/! into the channel. Then, the receiver consumes the channel to completion, at\n\/\/! which point the receiver can be dropped.\n\nuse std::any::Any;\nuse std::error::Error;\nuse std::fmt;\n\nmod bounded;\nmod unbounded;\nmod queue;\n\npub use self::bounded::{channel, Sender, Receiver};\npub use self::unbounded::{unbounded, UnboundedSender, UnboundedReceiver};\n\n\/\/\/ Error type for sending, used when the receiving end of the channel is\n\/\/\/ dropped\npub struct SendError<T>(T);\n\nimpl<T> fmt::Debug for SendError<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt.debug_tuple(\"SendError\")\n .field(&\"...\")\n .finish()\n }\n}\n\nimpl<T> fmt::Display for SendError<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"send failed because receiver is gone\")\n }\n}\n\nimpl<T> Error for SendError<T>\n where T: Any\n{\n fn description(&self) -> &str {\n \"send failed because receiver is gone\"\n }\n}\n\nimpl<T> SendError<T> {\n \/\/\/ Returns the message that was attempted to be sent but failed.\n pub fn into_inner(self) -> T {\n self.0\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test<commit_after>macro_rules! e( \/\/~ ERROR unknown macro variable `nonexistent`\n ($inp:ident) => (\n $nonexistent\n );\n)\n\nfn main() {\n e!(foo);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>FEAT(core)<commit_after><|endoftext|>"} {"text":"<commit_before>use std::io::UserRWX;\nuse std::io::fs;\nuse std::io::process::{Command,ProcessOutput};\nuse std::io::{File,Truncate,Write};\nuse std::os;\n\npub fn mkdir(path: &Path) {\n match fs::mkdir_recursive(path, UserRWX) {\n Err(_) => {},\n Ok(_) => {},\n }\n}\n\npub fn read(path: &Path) -> Result<String, String> {\n match File::open(path) {\n Err(_) => Err(format!(\"couldn't open {}\", path.display())),\n Ok(mut file) => match file.read_to_str() {\n Err(_) => Err(format!(\"couldn't read {}\", path.display())),\n Ok(string) => Ok(string),\n }\n }\n}\n\npub fn run(prefix: &str, id: &str, src: &str) -> Result<String, String> {\n let cwd = os::getcwd();\n let out_dir = cwd.join(format!(\"bin\/{}\/{}\", prefix, id));\n\n let mut cmd = Command::new(\"rustc\");\n cmd.cwd(&Path::new(format!(\"examples\/{}\/{}\", prefix, id)));\n cmd.arg(format!(\"{}.rs\", src));\n cmd.arg(\"--out-dir\");\n cmd.arg(out_dir);\n\n match cmd.output() {\n Err(_) => return Err(format!(\"couldn't find rustc\")),\n Ok(p) => if !p.status.success() {\n return Ok(String::from_utf8(p.error).unwrap());\n },\n }\n\n let executable = Path::new(format!(\".\/bin\/{}\/{}\/{}\", prefix, id, src));\n\n match Command::new(&executable).output() {\n Err(_) => Err(format!(\"couldn't find {}\", executable.display())),\n Ok(ProcessOutput { error: error, output: output, status: status }) => {\n let mut s = String::from_utf8(output).unwrap();\n if !status.success() {\n s.push_str(String::from_utf8(error).unwrap().as_slice());\n }\n\n Ok(s)\n }\n }\n}\n\npub fn write(path: &Path, string: &str) -> Result<(), String> {\n match File::open_mode(path, Truncate, Write) {\n Err(_) => Err(format!(\"couldn't open {}\", path.display())),\n Ok(mut file) => match file.write_str(string) {\n Err(_) => Err(format!(\"couldn't write {}\", path.display())),\n Ok(_) => Ok(()),\n }\n }\n}\n<commit_msg>fix method name: read_to_str should be read_to_string<commit_after>use std::io::UserRWX;\nuse std::io::fs;\nuse std::io::process::{Command,ProcessOutput};\nuse std::io::{File,Truncate,Write};\nuse std::os;\n\npub fn mkdir(path: &Path) {\n match fs::mkdir_recursive(path, UserRWX) {\n Err(_) => {},\n Ok(_) => {},\n }\n}\n\npub fn read(path: &Path) -> Result<String, String> {\n match File::open(path) {\n Err(_) => Err(format!(\"couldn't open {}\", path.display())),\n Ok(mut file) => match file.read_to_string() {\n Err(_) => Err(format!(\"couldn't read {}\", path.display())),\n Ok(string) => Ok(string),\n }\n }\n}\n\npub fn run(prefix: &str, id: &str, src: &str) -> Result<String, String> {\n let cwd = os::getcwd();\n let out_dir = cwd.join(format!(\"bin\/{}\/{}\", prefix, id));\n\n let mut cmd = Command::new(\"rustc\");\n cmd.cwd(&Path::new(format!(\"examples\/{}\/{}\", prefix, id)));\n cmd.arg(format!(\"{}.rs\", src));\n cmd.arg(\"--out-dir\");\n cmd.arg(out_dir);\n\n match cmd.output() {\n Err(_) => return Err(format!(\"couldn't find rustc\")),\n Ok(p) => if !p.status.success() {\n return Ok(String::from_utf8(p.error).unwrap());\n },\n }\n\n let executable = Path::new(format!(\".\/bin\/{}\/{}\/{}\", prefix, id, src));\n\n match Command::new(&executable).output() {\n Err(_) => Err(format!(\"couldn't find {}\", executable.display())),\n Ok(ProcessOutput { error: error, output: output, status: status }) => {\n let mut s = String::from_utf8(output).unwrap();\n if !status.success() {\n s.push_str(String::from_utf8(error).unwrap().as_slice());\n }\n\n Ok(s)\n }\n }\n}\n\npub fn write(path: &Path, string: &str) -> Result<(), String> {\n match File::open_mode(path, Truncate, Write) {\n Err(_) => Err(format!(\"couldn't open {}\", path.display())),\n Ok(mut file) => match file.write_str(string) {\n Err(_) => Err(format!(\"couldn't write {}\", path.display())),\n Ok(_) => Ok(()),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added corner-cases for polygons<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a HTTPResponse struct, with response codes enum<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Allowing acquisition thread to initiate stop.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Simplify first dirty impl.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Enter key now toggles the OSD<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix a use-after-free in the readiness queue<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a run-pass test for recursive copyable stack closures.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Ensures that it's legal to create a recursive stack closure as long as\n\/\/ its environment is copyable\n\nstruct R<'self> {\n \/\/ This struct is needed to create the\n \/\/ otherwise infinite type of a fn that\n \/\/ accepts itself as argument:\n c: &'self fn:Copy(&R, uint) -> uint\n}\n\nfn main() {\n \/\/ Stupid version of fibonacci.\n let fib: &fn:Copy(&R, uint) -> uint = |fib, x| {\n if x == 0 || x == 1 {\n x\n } else {\n (fib.c)(fib, x-1) + (fib.c)(fib, x-2)\n }\n };\n assert!(fib(&R { c: fib }, 7) == 13);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add more error handling functions<commit_after><|endoftext|>"} {"text":"<commit_before>#![cfg(feature = \"s3\")]\n\nextern crate env_logger;\n#[macro_use]\nextern crate log;\nextern crate time;\n\n#[macro_use]\nextern crate rusoto;\n\nuse std::io::Read;\nuse std::fs::File;\nuse std::env::var;\nuse rusoto::{DefaultCredentialsProvider, Region};\nuse rusoto::s3::{S3Helper, S3Client, ListObjectsRequest, HeadObjectRequest, CreateBucketRequest};\n\nfn test_bucket() -> String {\n match var(\"S3_TEST_BUCKET\") {\n Ok(val) => val.to_owned(),\n Err(_) => \"rusototester\".to_owned()\n }\n}\n\nfn test_bucket_region() -> Region {\n match var(\"S3_TEST_BUCKET_REGION\") {\n Ok(val) => val.parse().unwrap(),\n Err(_) => \"us-west-2\".parse().unwrap()\n }\n}\n\n#[test]\nfn list_buckets_tests() {\n let _ = env_logger::init();\n let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n let response = s3.list_buckets().unwrap();\n info!(\"Got list of buckets: {:?}\", response);\n for q in response.buckets {\n info!(\"Existing bucket: {:?}\", q.name);\n }\n}\n\n#[test]\nfn object_lifecycle_test() {\n \/\/ PUT an object\n let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n\n let mut f = File::open(\"tests\/sample-data\/no_credentials\").unwrap();\n let mut contents : Vec<u8> = Vec::new();\n match f.read_to_end(&mut contents) {\n Err(why) => panic!(\"Error opening file to send to S3: {}\", why),\n Ok(_) => {\n s3.put_object(&test_bucket(), \"no_credentials\", &contents).unwrap();\n }\n }\n\n let client = S3Client::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n \/\/ HEAD the object that was PUT\n let size_req = HeadObjectRequest{\n bucket: test_bucket(),\n key: \"no_credentials\".to_string(),\n ..Default::default()\n };\n\n println!(\"{:?}\", client.head_object(&size_req).unwrap());\n\n \/\/ GET the object\n s3.get_object(&test_bucket(), \"no_credentials\").unwrap();\n\n \/\/ DELETE the object\n s3.delete_object(&test_bucket(), \"no_credentials\").unwrap(); \n}\n\n#[test]\nfn create_bucket_in_useast1_and_use_immediately() {\n let s3 = S3Client::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n let create_bucket_request = CreateBucketRequest{\n bucket: \"rusoto_foo_bucket\".to_string(),\n ..Default::default()\n };\n let bucket_creation_result = s3.create_bucket(&create_bucket_request).unwrap();\n println!(\"bucket created: {:?}\", bucket_creation_result);\n\n let mut f = File::open(\"tests\/sample-data\/no_credentials\").unwrap();\n let mut contents : Vec<u8> = Vec::new();\n match f.read_to_end(&mut contents) {\n Err(why) => panic!(\"Error opening file to send to S3: {}\", why),\n Ok(_) => {\n let s3_helper = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n let put_response = s3_helper.put_object(\"rusoto_foo_bucket\", \"no_credentials\", &contents).unwrap();\n println!(\"put_response is {:?}\", put_response);\n }\n }\n\n}\n\n#[test]\nfn put_and_fetch_timestamp_named_object_test() {\n let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n let mut f = File::open(\"tests\/sample-data\/no_credentials\").unwrap();\n let mut contents : Vec<u8> = Vec::new();\n match f.read_to_end(&mut contents) {\n Err(why) => panic!(\"Error opening file to send to S3: {}\", why),\n Ok(_) => {\n s3.put_object(&test_bucket(), \"2016-10-07T23:30:38Z\", &contents).unwrap();\n }\n }\n let get_response = s3.get_object(&test_bucket(), \"2016-10-07T23:30:38Z\").unwrap();\n println!(\"Got object back: {:?}\", get_response);\n}\n\n#[test]\nfn list_objects_test() {\n let _ = env_logger::init();\n let bare_s3 = S3Client::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n let mut list_request = ListObjectsRequest::default(); \/\/ need to set bucket\n list_request.bucket = test_bucket();\n let result = bare_s3.list_objects(&list_request).unwrap();\n println!(\"result is {:?}\", result);\n}\n\n<commit_msg>Reworks us-east-1 s3 bucket creation test.<commit_after>#![cfg(feature = \"s3\")]\n\nextern crate env_logger;\n#[macro_use]\nextern crate log;\nextern crate time;\n\n#[macro_use]\nextern crate rusoto;\n\nuse std::io::Read;\nuse std::fs::File;\nuse std::env::var;\nuse rusoto::{DefaultCredentialsProvider, Region};\nuse rusoto::s3::{S3Helper, S3Client, ListObjectsRequest, HeadObjectRequest, CreateBucketRequest};\n\nfn test_bucket() -> String {\n match var(\"S3_TEST_BUCKET\") {\n Ok(val) => val.to_owned(),\n Err(_) => \"rusototester\".to_owned()\n }\n}\n\nfn test_bucket_region() -> Region {\n match var(\"S3_TEST_BUCKET_REGION\") {\n Ok(val) => val.parse().unwrap(),\n Err(_) => \"us-west-2\".parse().unwrap()\n }\n}\n\n#[test]\nfn list_buckets_tests() {\n let _ = env_logger::init();\n let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n let response = s3.list_buckets().unwrap();\n info!(\"Got list of buckets: {:?}\", response);\n for q in response.buckets {\n info!(\"Existing bucket: {:?}\", q.name);\n }\n}\n\n#[test]\nfn object_lifecycle_test() {\n \/\/ PUT an object\n let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n\n let mut f = File::open(\"tests\/sample-data\/no_credentials\").unwrap();\n let mut contents : Vec<u8> = Vec::new();\n match f.read_to_end(&mut contents) {\n Err(why) => panic!(\"Error opening file to send to S3: {}\", why),\n Ok(_) => {\n s3.put_object(&test_bucket(), \"no_credentials\", &contents).unwrap();\n }\n }\n\n let client = S3Client::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n \/\/ HEAD the object that was PUT\n let size_req = HeadObjectRequest{\n bucket: test_bucket(),\n key: \"no_credentials\".to_string(),\n ..Default::default()\n };\n\n println!(\"{:?}\", client.head_object(&size_req).unwrap());\n\n \/\/ GET the object\n s3.get_object(&test_bucket(), \"no_credentials\").unwrap();\n\n \/\/ DELETE the object\n s3.delete_object(&test_bucket(), \"no_credentials\").unwrap(); \n}\n\n#[test]\nfn create_bucket_in_useast1_and_use_immediately() {\n let mut f = File::open(\"tests\/sample-data\/no_credentials\").unwrap();\n let mut contents : Vec<u8> = Vec::new();\n match f.read_to_end(&mut contents) {\n Err(why) => panic!(\"Error opening file to send to S3: {}\", why),\n Ok(_) => (),\n }\n\n let bucket_name = format!(\"rusototestbucket\");\n let s3_helper = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), Region::UsEast1);\n let s3 = S3Client::new(DefaultCredentialsProvider::new().unwrap(), Region::UsEast1);\n let create_bucket_request = CreateBucketRequest{\n bucket: bucket_name.clone(),\n ..Default::default()\n };\n let bucket_creation_result = s3.create_bucket(&create_bucket_request).unwrap();\n println!(\"bucket created: {:?}\", bucket_creation_result);\n \n let put_response = s3_helper.put_object(&bucket_name, \"no_credentials\", &contents).unwrap();\n println!(\"put_response is {:?}\", put_response);\n}\n\n#[test]\nfn put_and_fetch_timestamp_named_object_test() {\n let s3 = S3Helper::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n let mut f = File::open(\"tests\/sample-data\/no_credentials\").unwrap();\n let mut contents : Vec<u8> = Vec::new();\n match f.read_to_end(&mut contents) {\n Err(why) => panic!(\"Error opening file to send to S3: {}\", why),\n Ok(_) => {\n s3.put_object(&test_bucket(), \"2016-10-07T23:30:38Z\", &contents).unwrap();\n }\n }\n let get_response = s3.get_object(&test_bucket(), \"2016-10-07T23:30:38Z\").unwrap();\n println!(\"Got object back: {:?}\", get_response);\n}\n\n#[test]\nfn list_objects_test() {\n let _ = env_logger::init();\n let bare_s3 = S3Client::new(DefaultCredentialsProvider::new().unwrap(), test_bucket_region());\n let mut list_request = ListObjectsRequest::default(); \/\/ need to set bucket\n list_request.bucket = test_bucket();\n let result = bare_s3.list_objects(&list_request).unwrap();\n println!(\"result is {:?}\", result);\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::DOMRefCell;\nuse dom::bindings::codegen::Bindings::WebSocketBinding;\nuse dom::bindings::codegen::Bindings::WebSocketBinding::WebSocketMethods;\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;\nuse dom::bindings::codegen::InheritTypes::EventTargetCast;\nuse dom::bindings::codegen::InheritTypes::EventCast;\nuse dom::bindings::error::{Error, Fallible};\nuse dom::bindings::error::Error::{InvalidAccess, Syntax};\nuse dom::bindings::global::{GlobalField, GlobalRef};\nuse dom::bindings::js::{Temporary, JSRef, Rootable};\nuse dom::bindings::refcounted::Trusted;\nuse dom::bindings::str::USVString;\nuse dom::bindings::trace::JSTraceable;\nuse dom::bindings::utils::reflect_dom_object;\nuse dom::closeevent::CloseEvent;\nuse dom::event::{Event, EventBubbles, EventCancelable, EventHelpers};\nuse dom::eventtarget::{EventTarget, EventTargetHelpers, EventTargetTypeId};\nuse script_task::Runnable;\nuse script_task::ScriptMsg;\nuse std::cell::{Cell, RefCell};\nuse std::borrow::ToOwned;\nuse util::str::DOMString;\n\nuse websocket::Message;\nuse websocket::ws::sender::Sender as Sender_Object;\nuse websocket::client::sender::Sender;\nuse websocket::client::receiver::Receiver;\nuse websocket::stream::WebSocketStream;\nuse websocket::client::request::Url;\nuse websocket::Client;\n\n#[derive(PartialEq, Copy, Clone)]\n#[jstraceable]\nenum WebSocketRequestState {\n Connecting = 0,\n Open = 1,\n Closing = 2,\n Closed = 3,\n}\n\nno_jsmanaged_fields!(Sender<WebSocketStream>);\nno_jsmanaged_fields!(Receiver<WebSocketStream>);\n\n#[dom_struct]\npub struct WebSocket {\n eventtarget: EventTarget,\n url: DOMString,\n global: GlobalField,\n ready_state: Cell<WebSocketRequestState>,\n sender: RefCell<Option<Sender<WebSocketStream>>>,\n receiver: RefCell<Option<Receiver<WebSocketStream>>>,\n failed: Cell<bool>, \/\/Flag to tell if websocket was closed due to failure\n full: Cell<bool>, \/\/Flag to tell if websocket queue is full\n clean_close: Cell<bool>, \/\/Flag to tell if the websocket closed cleanly (not due to full or fail)\n code: Cell<u16>, \/\/Closing code\n reason: DOMRefCell<DOMString>, \/\/Closing reason\n data: DOMRefCell<DOMString>, \/\/Data from send - TODO: Remove after buffer is added.\n sendCloseFrame: Cell<bool>\n}\n\nfn parse_web_socket_url(url_str: &str) -> Fallible<(Url, String, u16, String, bool)> {\n \/\/ https:\/\/html.spec.whatwg.org\/multipage\/#parse-a-websocket-url's-components\n \/\/ Steps 1 and 2\n let parsed_url = Url::parse(url_str);\n let parsed_url = match parsed_url {\n Ok(parsed_url) => parsed_url,\n Err(_) => return Err(Error::Syntax),\n };\n\n \/\/ Step 4\n if parsed_url.fragment != None {\n return Err(Error::Syntax);\n }\n\n \/\/ Steps 3 and 5\n let secure = match parsed_url.scheme.as_ref() {\n \"ws\" => false,\n \"wss\" => true,\n _ => return Err(Error::Syntax), \/\/ step 3\n };\n\n let host = parsed_url.host().unwrap().serialize(); \/\/ Step 6\n let port = parsed_url.port_or_default().unwrap(); \/\/ Steps 7 and 8\n let mut resource = parsed_url.path().unwrap().connect(\"\/\"); \/\/ Step 9\n if resource.is_empty() {\n resource = \"\/\".to_owned(); \/\/ Step 10\n }\n\n \/\/ Step 11\n if let Some(ref query) = parsed_url.query {\n resource.push('?');\n resource.push_str(query);\n }\n\n \/\/ Step 12\n \/\/ FIXME remove parsed_url once it's no longer used in WebSocket::new\n Ok((parsed_url, host, port, resource, secure))\n}\n\nimpl WebSocket {\n pub fn new_inherited(global: GlobalRef, url: DOMString) -> WebSocket {\n WebSocket {\n eventtarget: EventTarget::new_inherited(EventTargetTypeId::WebSocket),\n url: url,\n global: GlobalField::from_rooted(&global),\n ready_state: Cell::new(WebSocketRequestState::Connecting),\n failed: Cell::new(false),\n sender: RefCell::new(None),\n receiver: RefCell::new(None),\n full: Cell::new(false),\n clean_close: Cell::new(true),\n code: Cell::new(0),\n reason: DOMRefCell::new(\"\".to_owned()),\n data: DOMRefCell::new(\"\".to_owned()),\n sendCloseFrame: Cell::new(false)\n }\n\n }\n\n pub fn new(global: GlobalRef, url: DOMString) -> Fallible<Temporary<WebSocket>> {\n \/*TODO: This constructor is only a prototype, it does not accomplish the specs\n defined here:\n http:\/\/html.spec.whatwg.org\n Item 1 is already satisfied.\n The remaining 8 items must be satisfied.\n TODO: This constructor should be responsible for spawning a thread for the\n receive loop after ws_root.r().Open() - See comment\n *\/\n let ws_root = reflect_dom_object(box WebSocket::new_inherited(global, url),\n global,\n WebSocketBinding::Wrap).root();\n let ws_root = ws_root.r();\n\n \/\/ FIXME extract the right variables once Client::connect implementation is\n \/\/ fixed to follow the RFC 6455 properly\n let (parsed_url, _, _, _, _) = try!(parse_web_socket_url(&ws_root.url));\n\n \/\/ TODO Client::connect does not conform to RFC 6455\n \/\/ see https:\/\/github.com\/cyderize\/rust-websocket\/issues\/38\n let request = Client::connect(parsed_url).unwrap();\n let response = request.send().unwrap();\n response.validate().unwrap();\n ws_root.ready_state.set(WebSocketRequestState::Open);\n \/\/Check to see if ready_state is Closing or Closed and failed = true - means we failed the websocket\n \/\/if so return without setting any states\n let ready_state = ws_root.ready_state.get();\n let failed = ws_root.failed.get();\n if failed && (ready_state == WebSocketRequestState::Closed || ready_state == WebSocketRequestState::Closing) {\n \/\/Do nothing else. Let the close finish.\n return Ok(Temporary::from_rooted(ws_root));\n }\n\n let (temp_sender, temp_receiver) = response.begin().split();\n let mut other_sender = ws_root.sender.borrow_mut();\n let mut other_receiver = ws_root.receiver.borrow_mut();\n *other_sender = Some(temp_sender);\n *other_receiver = Some(temp_receiver);\n\n \/\/Create everything necessary for starting the open asynchronous task, then begin the task.\n let global_root = ws_root.global.root();\n let addr: Trusted<WebSocket> = Trusted::new(global_root.r().get_cx(), ws_root, global_root.r().script_chan().clone());\n let open_task = box WebSocketTaskHandler::new(addr.clone(), WebSocketTask::Open);\n global_root.r().script_chan().send(ScriptMsg::RunnableMsg(open_task)).unwrap();\n \/\/TODO: Spawn thread here for receive loop\n \/*TODO: Add receive loop here and make new thread run this\n Receive is an infinite loop \"similiar\" the one shown here:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: The receive loop however does need to follow the spec. These are outlined here\n under \"WebSocket message has been received\" items 1-5:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: The receive loop also needs to dispatch an asynchronous event as stated here:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: When the receive loop receives a close message from the server,\n it confirms the websocket is now closed. This requires the close event\n to be fired (dispatch_close fires the close event - see implementation below)\n *\/\n Ok(Temporary::from_rooted(ws_root))\n }\n\n pub fn Constructor(global: GlobalRef, url: DOMString) -> Fallible<Temporary<WebSocket>> {\n WebSocket::new(global, url)\n }\n}\n\nimpl<'a> WebSocketMethods for JSRef<'a, WebSocket> {\n event_handler!(open, GetOnopen, SetOnopen);\n event_handler!(close, GetOnclose, SetOnclose);\n event_handler!(error, GetOnerror, SetOnerror);\n\n fn Url(self) -> DOMString {\n self.url.clone()\n }\n\n fn ReadyState(self) -> u16 {\n self.ready_state.get() as u16\n }\n\n fn Send(self, data: Option<USVString>)-> Fallible<()>{\n \/*TODO: This is not up to spec see http:\/\/html.spec.whatwg.org\/multipage\/comms.html search for \"If argument is a string\"\n TODO: Need to buffer data\n TODO: bufferedAmount attribute returns the size of the buffer in bytes -\n this is a required attribute defined in the websocket.webidl file\n TODO: The send function needs to flag when full by using the following\n self.full.set(true). This needs to be done when the buffer is full\n *\/\n let mut other_sender = self.sender.borrow_mut();\n let my_sender = other_sender.as_mut().unwrap();\n if self.sendCloseFrame.get() { \/\/TODO: Also check if the buffer is full\n self.sendCloseFrame.set(false);\n let _ = my_sender.send_message(Message::Close(None));\n return Ok(());\n }\n let _ = my_sender.send_message(Message::Text(data.unwrap().0));\n return Ok(())\n }\n\n fn Close(self, code: Option<u16>, reason: Option<USVString>) -> Fallible<()>{\n if let Some(code) = code {\n \/\/Check code is NOT 1000 NOR in the range of 3000-4999 (inclusive)\n if code != 1000 && (code < 3000 || code > 4999) {\n return Err(Error::InvalidAccess);\n }\n }\n if let Some(ref reason) = reason {\n if reason.0.as_bytes().len() > 123 { \/\/reason cannot be larger than 123 bytes\n return Err(Error::Syntax);\n }\n }\n\n match self.ready_state.get() {\n WebSocketRequestState::Closing | WebSocketRequestState::Closed => {} \/\/Do nothing\n WebSocketRequestState::Connecting => { \/\/Connection is not yet established\n \/*By setting the state to closing, the open function\n will abort connecting the websocket*\/\n self.ready_state.set(WebSocketRequestState::Closing);\n self.failed.set(true);\n self.sendCloseFrame.set(true);\n \/\/Dispatch send task to send close frame\n \/\/TODO: Sending here is just empty string, though no string is really needed. Another send, empty send, could be used.\n let _ = self.Send(None);\n \/\/Note: After sending the close message, the receive loop confirms a close message from the server and must fire a close event\n }\n WebSocketRequestState::Open => {\n \/\/Closing handshake not started - still in open\n \/\/Start the closing by setting the code and reason if they exist\n if let Some(code) = code {\n self.code.set(code);\n }\n if let Some(reason) = reason {\n *self.reason.borrow_mut() = reason.0;\n }\n self.ready_state.set(WebSocketRequestState::Closing);\n self.sendCloseFrame.set(true);\n \/\/Dispatch send task to send close frame\n let _ = self.Send(None);\n \/\/Note: After sending the close message, the receive loop confirms a close message from the server and must fire a close event\n }\n }\n Ok(()) \/\/Return Ok\n }\n}\n\n\npub enum WebSocketTask {\n Open,\n Close,\n}\n\npub struct WebSocketTaskHandler {\n addr: Trusted<WebSocket>,\n task: WebSocketTask,\n}\n\nimpl WebSocketTaskHandler {\n pub fn new(addr: Trusted<WebSocket>, task: WebSocketTask) -> WebSocketTaskHandler {\n WebSocketTaskHandler {\n addr: addr,\n task: task,\n }\n }\n\n fn dispatch_open(&self) {\n \/*TODO: Items 1, 3, 4, & 5 under \"WebSocket connection is established\" as specified here:\n https:\/\/html.spec.whatwg.org\/multipage\/#feedback-from-the-protocol\n *\/\n let ws = self.addr.to_temporary().root(); \/\/Get root\n let ws = ws.r(); \/\/Get websocket reference\n let global = ws.global.root();\n let event = Event::new(global.r(),\n \"open\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::NotCancelable).root();\n let target: JSRef<EventTarget> = EventTargetCast::from_ref(ws);\n event.r().fire(target);\n }\n\n fn dispatch_close(&self) {\n let ws = self.addr.to_temporary().root();\n let ws = ws.r();\n let global = ws.global.root();\n ws.ready_state.set(WebSocketRequestState::Closed);\n \/\/If failed or full, fire error event\n if ws.failed.get() || ws.full.get() {\n ws.failed.set(false);\n ws.full.set(false);\n \/\/A Bad close\n ws.clean_close.set(false);\n let event = Event::new(global.r(),\n \"error\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::Cancelable).root();\n let target: JSRef<EventTarget> = EventTargetCast::from_ref(ws);\n event.r().fire(target);\n }\n let rsn = ws.reason.borrow();\n let rsn_clone = rsn.clone();\n \/*In addition, we also have to fire a close even if error event fired\n https:\/\/html.spec.whatwg.org\/multipage\/#closeWebSocket\n *\/\n let close_event = CloseEvent::new(global.r(),\n \"close\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::Cancelable,\n ws.clean_close.get(),\n ws.code.get(),\n rsn_clone).root();\n let target: JSRef<EventTarget> = EventTargetCast::from_ref(ws);\n let event: JSRef<Event> = EventCast::from_ref(close_event.r());\n event.fire(target);\n }\n}\n\nimpl Runnable for WebSocketTaskHandler {\n fn handler(self: Box<WebSocketTaskHandler>) {\n match self.task {\n WebSocketTask::Open => {\n self.dispatch_open();\n }\n WebSocketTask::Close => {\n self.dispatch_close();\n }\n }\n }\n}\n\n<commit_msg>fixes 6111: WebSocket close event shouldn't be Cancelable<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::DOMRefCell;\nuse dom::bindings::codegen::Bindings::WebSocketBinding;\nuse dom::bindings::codegen::Bindings::WebSocketBinding::WebSocketMethods;\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;\nuse dom::bindings::codegen::InheritTypes::EventTargetCast;\nuse dom::bindings::codegen::InheritTypes::EventCast;\nuse dom::bindings::error::{Error, Fallible};\nuse dom::bindings::error::Error::{InvalidAccess, Syntax};\nuse dom::bindings::global::{GlobalField, GlobalRef};\nuse dom::bindings::js::{Temporary, JSRef, Rootable};\nuse dom::bindings::refcounted::Trusted;\nuse dom::bindings::str::USVString;\nuse dom::bindings::trace::JSTraceable;\nuse dom::bindings::utils::reflect_dom_object;\nuse dom::closeevent::CloseEvent;\nuse dom::event::{Event, EventBubbles, EventCancelable, EventHelpers};\nuse dom::eventtarget::{EventTarget, EventTargetHelpers, EventTargetTypeId};\nuse script_task::Runnable;\nuse script_task::ScriptMsg;\nuse std::cell::{Cell, RefCell};\nuse std::borrow::ToOwned;\nuse util::str::DOMString;\n\nuse websocket::Message;\nuse websocket::ws::sender::Sender as Sender_Object;\nuse websocket::client::sender::Sender;\nuse websocket::client::receiver::Receiver;\nuse websocket::stream::WebSocketStream;\nuse websocket::client::request::Url;\nuse websocket::Client;\n\n#[derive(PartialEq, Copy, Clone)]\n#[jstraceable]\nenum WebSocketRequestState {\n Connecting = 0,\n Open = 1,\n Closing = 2,\n Closed = 3,\n}\n\nno_jsmanaged_fields!(Sender<WebSocketStream>);\nno_jsmanaged_fields!(Receiver<WebSocketStream>);\n\n#[dom_struct]\npub struct WebSocket {\n eventtarget: EventTarget,\n url: DOMString,\n global: GlobalField,\n ready_state: Cell<WebSocketRequestState>,\n sender: RefCell<Option<Sender<WebSocketStream>>>,\n receiver: RefCell<Option<Receiver<WebSocketStream>>>,\n failed: Cell<bool>, \/\/Flag to tell if websocket was closed due to failure\n full: Cell<bool>, \/\/Flag to tell if websocket queue is full\n clean_close: Cell<bool>, \/\/Flag to tell if the websocket closed cleanly (not due to full or fail)\n code: Cell<u16>, \/\/Closing code\n reason: DOMRefCell<DOMString>, \/\/Closing reason\n data: DOMRefCell<DOMString>, \/\/Data from send - TODO: Remove after buffer is added.\n sendCloseFrame: Cell<bool>\n}\n\nfn parse_web_socket_url(url_str: &str) -> Fallible<(Url, String, u16, String, bool)> {\n \/\/ https:\/\/html.spec.whatwg.org\/multipage\/#parse-a-websocket-url's-components\n \/\/ Steps 1 and 2\n let parsed_url = Url::parse(url_str);\n let parsed_url = match parsed_url {\n Ok(parsed_url) => parsed_url,\n Err(_) => return Err(Error::Syntax),\n };\n\n \/\/ Step 4\n if parsed_url.fragment != None {\n return Err(Error::Syntax);\n }\n\n \/\/ Steps 3 and 5\n let secure = match parsed_url.scheme.as_ref() {\n \"ws\" => false,\n \"wss\" => true,\n _ => return Err(Error::Syntax), \/\/ step 3\n };\n\n let host = parsed_url.host().unwrap().serialize(); \/\/ Step 6\n let port = parsed_url.port_or_default().unwrap(); \/\/ Steps 7 and 8\n let mut resource = parsed_url.path().unwrap().connect(\"\/\"); \/\/ Step 9\n if resource.is_empty() {\n resource = \"\/\".to_owned(); \/\/ Step 10\n }\n\n \/\/ Step 11\n if let Some(ref query) = parsed_url.query {\n resource.push('?');\n resource.push_str(query);\n }\n\n \/\/ Step 12\n \/\/ FIXME remove parsed_url once it's no longer used in WebSocket::new\n Ok((parsed_url, host, port, resource, secure))\n}\n\nimpl WebSocket {\n pub fn new_inherited(global: GlobalRef, url: DOMString) -> WebSocket {\n WebSocket {\n eventtarget: EventTarget::new_inherited(EventTargetTypeId::WebSocket),\n url: url,\n global: GlobalField::from_rooted(&global),\n ready_state: Cell::new(WebSocketRequestState::Connecting),\n failed: Cell::new(false),\n sender: RefCell::new(None),\n receiver: RefCell::new(None),\n full: Cell::new(false),\n clean_close: Cell::new(true),\n code: Cell::new(0),\n reason: DOMRefCell::new(\"\".to_owned()),\n data: DOMRefCell::new(\"\".to_owned()),\n sendCloseFrame: Cell::new(false)\n }\n\n }\n\n pub fn new(global: GlobalRef, url: DOMString) -> Fallible<Temporary<WebSocket>> {\n \/*TODO: This constructor is only a prototype, it does not accomplish the specs\n defined here:\n http:\/\/html.spec.whatwg.org\n Item 1 is already satisfied.\n The remaining 8 items must be satisfied.\n TODO: This constructor should be responsible for spawning a thread for the\n receive loop after ws_root.r().Open() - See comment\n *\/\n let ws_root = reflect_dom_object(box WebSocket::new_inherited(global, url),\n global,\n WebSocketBinding::Wrap).root();\n let ws_root = ws_root.r();\n\n \/\/ FIXME extract the right variables once Client::connect implementation is\n \/\/ fixed to follow the RFC 6455 properly\n let (parsed_url, _, _, _, _) = try!(parse_web_socket_url(&ws_root.url));\n\n \/\/ TODO Client::connect does not conform to RFC 6455\n \/\/ see https:\/\/github.com\/cyderize\/rust-websocket\/issues\/38\n let request = Client::connect(parsed_url).unwrap();\n let response = request.send().unwrap();\n response.validate().unwrap();\n ws_root.ready_state.set(WebSocketRequestState::Open);\n \/\/Check to see if ready_state is Closing or Closed and failed = true - means we failed the websocket\n \/\/if so return without setting any states\n let ready_state = ws_root.ready_state.get();\n let failed = ws_root.failed.get();\n if failed && (ready_state == WebSocketRequestState::Closed || ready_state == WebSocketRequestState::Closing) {\n \/\/Do nothing else. Let the close finish.\n return Ok(Temporary::from_rooted(ws_root));\n }\n\n let (temp_sender, temp_receiver) = response.begin().split();\n let mut other_sender = ws_root.sender.borrow_mut();\n let mut other_receiver = ws_root.receiver.borrow_mut();\n *other_sender = Some(temp_sender);\n *other_receiver = Some(temp_receiver);\n\n \/\/Create everything necessary for starting the open asynchronous task, then begin the task.\n let global_root = ws_root.global.root();\n let addr: Trusted<WebSocket> = Trusted::new(global_root.r().get_cx(), ws_root, global_root.r().script_chan().clone());\n let open_task = box WebSocketTaskHandler::new(addr.clone(), WebSocketTask::Open);\n global_root.r().script_chan().send(ScriptMsg::RunnableMsg(open_task)).unwrap();\n \/\/TODO: Spawn thread here for receive loop\n \/*TODO: Add receive loop here and make new thread run this\n Receive is an infinite loop \"similiar\" the one shown here:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: The receive loop however does need to follow the spec. These are outlined here\n under \"WebSocket message has been received\" items 1-5:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: The receive loop also needs to dispatch an asynchronous event as stated here:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: When the receive loop receives a close message from the server,\n it confirms the websocket is now closed. This requires the close event\n to be fired (dispatch_close fires the close event - see implementation below)\n *\/\n Ok(Temporary::from_rooted(ws_root))\n }\n\n pub fn Constructor(global: GlobalRef, url: DOMString) -> Fallible<Temporary<WebSocket>> {\n WebSocket::new(global, url)\n }\n}\n\nimpl<'a> WebSocketMethods for JSRef<'a, WebSocket> {\n event_handler!(open, GetOnopen, SetOnopen);\n event_handler!(close, GetOnclose, SetOnclose);\n event_handler!(error, GetOnerror, SetOnerror);\n\n fn Url(self) -> DOMString {\n self.url.clone()\n }\n\n fn ReadyState(self) -> u16 {\n self.ready_state.get() as u16\n }\n\n fn Send(self, data: Option<USVString>)-> Fallible<()>{\n \/*TODO: This is not up to spec see http:\/\/html.spec.whatwg.org\/multipage\/comms.html search for \"If argument is a string\"\n TODO: Need to buffer data\n TODO: bufferedAmount attribute returns the size of the buffer in bytes -\n this is a required attribute defined in the websocket.webidl file\n TODO: The send function needs to flag when full by using the following\n self.full.set(true). This needs to be done when the buffer is full\n *\/\n let mut other_sender = self.sender.borrow_mut();\n let my_sender = other_sender.as_mut().unwrap();\n if self.sendCloseFrame.get() { \/\/TODO: Also check if the buffer is full\n self.sendCloseFrame.set(false);\n let _ = my_sender.send_message(Message::Close(None));\n return Ok(());\n }\n let _ = my_sender.send_message(Message::Text(data.unwrap().0));\n return Ok(())\n }\n\n fn Close(self, code: Option<u16>, reason: Option<USVString>) -> Fallible<()>{\n if let Some(code) = code {\n \/\/Check code is NOT 1000 NOR in the range of 3000-4999 (inclusive)\n if code != 1000 && (code < 3000 || code > 4999) {\n return Err(Error::InvalidAccess);\n }\n }\n if let Some(ref reason) = reason {\n if reason.0.as_bytes().len() > 123 { \/\/reason cannot be larger than 123 bytes\n return Err(Error::Syntax);\n }\n }\n\n match self.ready_state.get() {\n WebSocketRequestState::Closing | WebSocketRequestState::Closed => {} \/\/Do nothing\n WebSocketRequestState::Connecting => { \/\/Connection is not yet established\n \/*By setting the state to closing, the open function\n will abort connecting the websocket*\/\n self.ready_state.set(WebSocketRequestState::Closing);\n self.failed.set(true);\n self.sendCloseFrame.set(true);\n \/\/Dispatch send task to send close frame\n \/\/TODO: Sending here is just empty string, though no string is really needed. Another send, empty send, could be used.\n let _ = self.Send(None);\n \/\/Note: After sending the close message, the receive loop confirms a close message from the server and must fire a close event\n }\n WebSocketRequestState::Open => {\n \/\/Closing handshake not started - still in open\n \/\/Start the closing by setting the code and reason if they exist\n if let Some(code) = code {\n self.code.set(code);\n }\n if let Some(reason) = reason {\n *self.reason.borrow_mut() = reason.0;\n }\n self.ready_state.set(WebSocketRequestState::Closing);\n self.sendCloseFrame.set(true);\n \/\/Dispatch send task to send close frame\n let _ = self.Send(None);\n \/\/Note: After sending the close message, the receive loop confirms a close message from the server and must fire a close event\n }\n }\n Ok(()) \/\/Return Ok\n }\n}\n\n\npub enum WebSocketTask {\n Open,\n Close,\n}\n\npub struct WebSocketTaskHandler {\n addr: Trusted<WebSocket>,\n task: WebSocketTask,\n}\n\nimpl WebSocketTaskHandler {\n pub fn new(addr: Trusted<WebSocket>, task: WebSocketTask) -> WebSocketTaskHandler {\n WebSocketTaskHandler {\n addr: addr,\n task: task,\n }\n }\n\n fn dispatch_open(&self) {\n \/*TODO: Items 1, 3, 4, & 5 under \"WebSocket connection is established\" as specified here:\n https:\/\/html.spec.whatwg.org\/multipage\/#feedback-from-the-protocol\n *\/\n let ws = self.addr.to_temporary().root(); \/\/Get root\n let ws = ws.r(); \/\/Get websocket reference\n let global = ws.global.root();\n let event = Event::new(global.r(),\n \"open\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::NotCancelable).root();\n let target: JSRef<EventTarget> = EventTargetCast::from_ref(ws);\n event.r().fire(target);\n }\n\n fn dispatch_close(&self) {\n let ws = self.addr.to_temporary().root();\n let ws = ws.r();\n let global = ws.global.root();\n ws.ready_state.set(WebSocketRequestState::Closed);\n \/\/If failed or full, fire error event\n if ws.failed.get() || ws.full.get() {\n ws.failed.set(false);\n ws.full.set(false);\n \/\/A Bad close\n ws.clean_close.set(false);\n let event = Event::new(global.r(),\n \"error\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::Cancelable).root();\n let target: JSRef<EventTarget> = EventTargetCast::from_ref(ws);\n event.r().fire(target);\n }\n let rsn = ws.reason.borrow();\n let rsn_clone = rsn.clone();\n \/*In addition, we also have to fire a close even if error event fired\n https:\/\/html.spec.whatwg.org\/multipage\/#closeWebSocket\n *\/\n let close_event = CloseEvent::new(global.r(),\n \"close\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::NotCancelable,\n ws.clean_close.get(),\n ws.code.get(),\n rsn_clone).root();\n let target: JSRef<EventTarget> = EventTargetCast::from_ref(ws);\n let event: JSRef<Event> = EventCast::from_ref(close_event.r());\n event.fire(target);\n }\n}\n\nimpl Runnable for WebSocketTaskHandler {\n fn handler(self: Box<WebSocketTaskHandler>) {\n match self.task {\n WebSocketTask::Open => {\n self.dispatch_open();\n }\n WebSocketTask::Close => {\n self.dispatch_close();\n }\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Day 18 solution<commit_after>\/\/ advent18.rs\n\/\/ game of life\n\nuse std::io;\nuse std::cmp::min;\n\n#[cfg(not(test))]\nconst GRID_SIZE: usize = 100;\n\n#[cfg(test)]\nconst GRID_SIZE: usize = 6;\n\ntype LightGrid = [[bool; GRID_SIZE]; GRID_SIZE];\n\nfn main() {\n let mut grid = [[false; GRID_SIZE]; GRID_SIZE];\n let mut row = 0;\n\n loop {\n let mut input = String::new();\n let result = io::stdin().read_line(&mut input);\n match result {\n Ok(byte_count) => if byte_count == 0 { break; },\n Err(_) => {\n println!(\"error reading from stdin\");\n break;\n }\n }\n\n assert!(row < GRID_SIZE);\n for (i, c) in input.trim().char_indices() {\n grid[row][i] = match c {\n '.' => false,\n '#' => true,\n _ => panic!(\"Unexpected character!\")\n };\n }\n\n row += 1;\n }\n\n assert!(row == GRID_SIZE);\n\n let mut grid2 = grid;\n\n for _ in 0..100 {\n grid = calc_new_grid(&grid);\n print_grid(&grid);\n }\n\n println!(\"Lights: {}\", count_lights(&grid));\n \n set_corners(&mut grid2);\n for _ in 0..100 {\n grid2 = calc_new_grid2(&grid2);\n print_grid(&grid2);\n }\n\n println!(\"Lights part 2: {}\", count_lights(&grid2));\n}\n\n\/\/ just for fun\nfn print_grid(grid: &LightGrid) {\n for j in 0..GRID_SIZE {\n let mut row_string = String::new();\n for i in 0..GRID_SIZE {\n row_string.push(if grid[j][i] { '#' } else { '.' });\n }\n println!(\"{}\", row_string);\n }\n}\n\n\/\/ count how many neighbor lights are on around point (x,y)\nfn count_neighbors(grid: &LightGrid, x: usize, y: usize) -> usize {\n let x1 = if x == 0 { 0 } else { x - 1 };\n let x2 = min(x + 2, GRID_SIZE);\n let y1 = if y == 0 { 0 } else { y - 1 };\n let y2 = min(y + 2, GRID_SIZE);\n\n let mut total = 0;\n for i in x1..x2 {\n for j in y1..y2 {\n if (i != x || j != y) && grid[j][i] {\n total += 1;\n }\n }\n }\n total\n}\n\n\/\/ new state of the light at this location\nfn new_light(grid: &LightGrid, x: usize, y: usize) -> bool {\n let neighbors = count_neighbors(grid, x, y);\n\n if grid[y][x] {\n neighbors == 2 || neighbors == 3\n } else {\n neighbors == 3\n }\n}\n\nfn calc_new_grid(grid: &LightGrid) -> LightGrid {\n let mut new_grid = [[false; GRID_SIZE]; GRID_SIZE];\n for i in 0..GRID_SIZE {\n for j in 0..GRID_SIZE {\n new_grid[j][i] = new_light(grid, i, j);\n }\n }\n new_grid\n}\n\nfn count_lights(grid: &LightGrid) -> u32 {\n let mut total = 0;\n\n for row in grid.iter() {\n for light in row.iter() {\n if *light {\n total += 1;\n }\n }\n }\n\n total\n}\n\n#[test]\nfn test_count_neighbors() {\n let grid: LightGrid = [\n [false, true, false, true, false, true],\n [false, false, false, true, true, false],\n [true, false, false, false, false, true],\n [false, false, true, false, false, false],\n [true, false, true, false, false, true],\n [true, true, true, true, false, false]];\n\n assert_eq!(1, count_neighbors(&grid, 0, 0));\n assert_eq!(2, count_neighbors(&grid, 0, 5));\n assert_eq!(1, count_neighbors(&grid, 5, 0));\n assert_eq!(1, count_neighbors(&grid, 5, 5));\n assert_eq!(2, count_neighbors(&grid, 1, 1));\n}\n\n#[test]\nfn test_calc_new_grid() {\n const GRID_TEMPLATE: LightGrid = [\n [false, true, false, true, false, true],\n [false, false, false, true, true, false],\n [true, false, false, false, false, true],\n [false, false, true, false, false, false],\n [true, false, true, false, false, true],\n [true, true, true, true, false, false]];\n\n const GRID_SOLUTION: LightGrid = [\n [false, false, false, false, false, false],\n [false, false, false, false, false, false],\n [false, false, true, true, false, false],\n [false, false, true, true, false, false],\n [false, false, false, false, false, false],\n [false, false, false, false, false, false]];\n\n let mut grid = GRID_TEMPLATE;\n for _ in 0..4 {\n grid = calc_new_grid(&grid);\n }\n\n assert_eq!(GRID_SOLUTION, grid);\n assert_eq!(4, count_lights(&grid));\n}\n\n\/\/ part 2\n\nfn set_corners(grid: &mut LightGrid) {\n grid[0][0] = true; \n grid[0][GRID_SIZE - 1] = true; \n grid[GRID_SIZE - 1][0] = true; \n grid[GRID_SIZE - 1][GRID_SIZE - 1] = true; \n}\n\nfn calc_new_grid2(grid: &LightGrid) -> LightGrid {\n let mut new_grid = calc_new_grid(grid);\n set_corners(&mut new_grid);\n new_grid\n}\n\n#[test]\nfn test_calc_new_grid2() {\n const GRID_TEMPLATE: LightGrid = [\n [false, true, false, true, false, true],\n [false, false, false, true, true, false],\n [true, false, false, false, false, true],\n [false, false, true, false, false, false],\n [true, false, true, false, false, true],\n [true, true, true, true, false, false]];\n\n const GRID_SOLUTION: LightGrid = [\n [true, true, false, true, true, true],\n [false, true, true, false, false, true],\n [false, true, true, false, false, false],\n [false, true, true, false, false, false],\n [true, false, true, false, false, false],\n [true, true, false, false, false, true]];\n\n let mut grid = GRID_TEMPLATE;\n set_corners(&mut grid);\n\n for _ in 0..5 {\n grid = calc_new_grid2(&grid);\n }\n\n assert_eq!(GRID_SOLUTION, grid);\n assert_eq!(17, count_lights(&grid));\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test binary<commit_after>extern crate chef_api;\n\n#[macro_use]\nextern crate log;\nextern crate env_logger;\n\nuse chef_api::api_client::ApiClient;\nuse chef_api::config::Config;\nuse chef_api::requests::node::NodesRequest;\n\nuse std::io::Read;\n\npub fn main() {\n env_logger::init().unwrap();\n let cfg = Config::from_json(\"\/Users\/thom\/.chef\/knife.json\");\n let client = ApiClient::new(cfg);\n let nodes = NodesRequest::new();\n\n let res = client.run(nodes);\n\n let mut output = String::new();\n res.unwrap().read_to_string(&mut output);\n info!(\"{:?}\", output)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>critters fight back<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse platform::font_list::get_available_families;\nuse platform::font_list::get_system_default_family;\nuse platform::font_list::get_variations_for_family;\nuse platform::font_list::get_last_resort_font_families;\nuse platform::font_context::FontContextHandle;\n\nuse std::collections::HashMap;\nuse sync::Arc;\nuse font_template::{FontTemplate, FontTemplateDescriptor};\nuse platform::font_template::FontTemplateData;\nuse servo_net::resource_task::{ResourceTask, load_whole_resource};\nuse url::Url;\n\n\/\/\/ A list of font templates that make up a given font family.\nstruct FontFamily {\n templates: Vec<FontTemplate>,\n}\n\nimpl FontFamily {\n fn new() -> FontFamily {\n FontFamily {\n templates: vec!(),\n }\n }\n\n \/\/\/ Find a font in this family that matches a given desriptor.\n fn find_font_for_style<'a>(&'a mut self, desc: &FontTemplateDescriptor, fctx: &FontContextHandle)\n -> Option<Arc<FontTemplateData>> {\n \/\/ TODO(Issue #189): optimize lookup for\n \/\/ regular\/bold\/italic\/bolditalic with fixed offsets and a\n \/\/ static decision table for fallback between these values.\n\n \/\/ TODO(Issue #190): if not in the fast path above, do\n \/\/ expensive matching of weights, etc.\n for template in self.templates.mut_iter() {\n let maybe_template = template.get_if_matches(fctx, desc);\n if maybe_template.is_some() {\n return maybe_template;\n }\n }\n\n \/\/ If a request is made for a font family that exists,\n \/\/ pick the first valid font in the family if we failed\n \/\/ to find an exact match for the descriptor.\n for template in self.templates.mut_iter() {\n let maybe_template = template.get();\n if maybe_template.is_some() {\n return maybe_template;\n }\n }\n\n None\n }\n\n fn add_template(&mut self, identifier: &str, maybe_data: Option<Vec<u8>>) {\n for template in self.templates.iter() {\n if template.identifier() == identifier {\n return;\n }\n }\n\n let template = FontTemplate::new(identifier, maybe_data);\n self.templates.push(template);\n }\n}\n\n\/\/\/ Commands that the FontContext sends to the font cache task.\npub enum Command {\n GetFontTemplate(String, FontTemplateDescriptor, Sender<Reply>),\n AddWebFont(String, Url, Sender<()>),\n Exit(Sender<()>),\n}\n\n\/\/\/ Reply messages sent from the font cache task to the FontContext caller.\npub enum Reply {\n GetFontTemplateReply(Arc<FontTemplateData>),\n}\n\n\/\/\/ The font cache task itself. It maintains a list of reference counted\n\/\/\/ font templates that are currently in use.\nstruct FontCache {\n port: Receiver<Command>,\n generic_fonts: HashMap<String, String>,\n local_families: HashMap<String, FontFamily>,\n web_families: HashMap<String, FontFamily>,\n font_context: FontContextHandle,\n resource_task: ResourceTask,\n}\n\nfn add_generic_font(generic_fonts: &mut HashMap<String, String>,\n generic_name: &str, mapped_name: &str) {\n let opt_system_default = get_system_default_family(generic_name);\n let family_name = match opt_system_default {\n Some(system_default) => system_default,\n None => mapped_name.to_string(),\n };\n generic_fonts.insert(generic_name.to_string(), family_name);\n}\n\nimpl FontCache {\n fn run(&mut self) {\n loop {\n let msg = self.port.recv();\n\n match msg {\n GetFontTemplate(family, descriptor, result) => {\n let maybe_font_template = self.get_font_template(&family, &descriptor);\n let font_template = match maybe_font_template {\n Some(font_template) => font_template,\n None => self.get_last_resort_template(&descriptor),\n };\n\n result.send(GetFontTemplateReply(font_template));\n }\n AddWebFont(family_name, url, result) => {\n let maybe_resource = load_whole_resource(&self.resource_task, url.clone());\n match maybe_resource {\n Ok((_, bytes)) => {\n if !self.web_families.contains_key(&family_name) {\n let family = FontFamily::new();\n self.web_families.insert(family_name.clone(), family);\n }\n let family = self.web_families.get_mut(&family_name);\n family.add_template(format!(\"{}\", url).as_slice(), Some(bytes));\n },\n Err(msg) => {\n fail!(\"{}: url={}\", msg, url);\n }\n }\n result.send(());\n }\n Exit(result) => {\n result.send(());\n break;\n }\n }\n }\n }\n\n fn refresh_local_families(&mut self) {\n self.local_families.clear();\n get_available_families(|family_name| {\n if !self.local_families.contains_key(&family_name) {\n let family = FontFamily::new();\n self.local_families.insert(family_name, family);\n }\n });\n }\n\n fn transform_family(&self, family: &String) -> String {\n match self.generic_fonts.find(family) {\n None => family.to_string(),\n Some(mapped_family) => (*mapped_family).clone()\n }\n }\n\n fn find_font_in_local_family<'a>(&'a mut self, family_name: &String, desc: &FontTemplateDescriptor)\n -> Option<Arc<FontTemplateData>> {\n \/\/ TODO(Issue #188): look up localized font family names if canonical name not found\n \/\/ look up canonical name\n if self.local_families.contains_key(family_name) {\n debug!(\"FontList: Found font family with name={:s}\", family_name.to_string());\n let s = self.local_families.get_mut(family_name);\n\n if s.templates.len() == 0 {\n get_variations_for_family(family_name.as_slice(), |path| {\n s.add_template(path.as_slice(), None);\n });\n }\n\n \/\/ TODO(Issue #192: handle generic font families, like 'serif' and 'sans-serif'.\n \/\/ if such family exists, try to match style to a font\n let result = s.find_font_for_style(desc, &self.font_context);\n if result.is_some() {\n return result;\n }\n\n None\n } else {\n debug!(\"FontList: Couldn't find font family with name={:s}\", family_name.to_string());\n None\n }\n }\n\n fn find_font_in_web_family<'a>(&'a mut self, family_name: &String, desc: &FontTemplateDescriptor)\n -> Option<Arc<FontTemplateData>> {\n if self.web_families.contains_key(family_name) {\n let family = self.web_families.get_mut(family_name);\n let maybe_font = family.find_font_for_style(desc, &self.font_context);\n maybe_font\n } else {\n None\n }\n }\n\n fn get_font_template(&mut self, family: &String, desc: &FontTemplateDescriptor) -> Option<Arc<FontTemplateData>> {\n let transformed_family_name = self.transform_family(family);\n let mut maybe_template = self.find_font_in_web_family(&transformed_family_name, desc);\n if maybe_template.is_none() {\n maybe_template = self.find_font_in_local_family(&transformed_family_name, desc);\n }\n maybe_template\n }\n\n fn get_last_resort_template(&mut self, desc: &FontTemplateDescriptor) -> Arc<FontTemplateData> {\n let last_resort = get_last_resort_font_families();\n\n for family in last_resort.iter() {\n let maybe_font_in_family = self.find_font_in_local_family(family, desc);\n if maybe_font_in_family.is_some() {\n return maybe_font_in_family.unwrap();\n }\n }\n\n fail!(\"Unable to find any fonts that match (do you have fallback fonts installed?)\");\n }\n}\n\n\/\/\/ The public interface to the font cache task, used exclusively by\n\/\/\/ the per-thread\/task FontContext structures.\n#[deriving(Clone)]\npub struct FontCacheTask {\n chan: Sender<Command>,\n}\n\nimpl FontCacheTask {\n pub fn new(resource_task: ResourceTask) -> FontCacheTask {\n let (chan, port) = channel();\n\n spawn(proc() {\n \/\/ TODO: Allow users to specify these.\n let mut generic_fonts = HashMap::with_capacity(5);\n add_generic_font(&mut generic_fonts, \"serif\", \"Times New Roman\");\n add_generic_font(&mut generic_fonts, \"sans-serif\", \"Arial\");\n add_generic_font(&mut generic_fonts, \"cursive\", \"Apple Chancery\");\n add_generic_font(&mut generic_fonts, \"fantasy\", \"Papyrus\");\n add_generic_font(&mut generic_fonts, \"monospace\", \"Menlo\");\n\n let mut cache = FontCache {\n port: port,\n generic_fonts: generic_fonts,\n local_families: HashMap::new(),\n web_families: HashMap::new(),\n font_context: FontContextHandle::new(),\n resource_task: resource_task,\n };\n\n cache.refresh_local_families();\n cache.run();\n });\n\n FontCacheTask {\n chan: chan,\n }\n }\n\n pub fn get_font_template(&self, family: String, desc: FontTemplateDescriptor)\n -> Arc<FontTemplateData> {\n\n let (response_chan, response_port) = channel();\n self.chan.send(GetFontTemplate(family, desc, response_chan));\n\n let reply = response_port.recv();\n\n match reply {\n GetFontTemplateReply(data) => {\n data\n }\n }\n }\n\n pub fn add_web_font(&self, family: String, url: Url) {\n let (response_chan, response_port) = channel();\n self.chan.send(AddWebFont(family, url, response_chan));\n response_port.recv();\n }\n\n pub fn exit(&self) {\n let (response_chan, response_port) = channel();\n self.chan.send(Exit(response_chan));\n response_port.recv();\n }\n}\n<commit_msg>Print debug message instead of fail when unable to load web font. Fixes #3301.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse platform::font_list::get_available_families;\nuse platform::font_list::get_system_default_family;\nuse platform::font_list::get_variations_for_family;\nuse platform::font_list::get_last_resort_font_families;\nuse platform::font_context::FontContextHandle;\n\nuse std::collections::HashMap;\nuse sync::Arc;\nuse font_template::{FontTemplate, FontTemplateDescriptor};\nuse platform::font_template::FontTemplateData;\nuse servo_net::resource_task::{ResourceTask, load_whole_resource};\nuse url::Url;\n\n\/\/\/ A list of font templates that make up a given font family.\nstruct FontFamily {\n templates: Vec<FontTemplate>,\n}\n\nimpl FontFamily {\n fn new() -> FontFamily {\n FontFamily {\n templates: vec!(),\n }\n }\n\n \/\/\/ Find a font in this family that matches a given desriptor.\n fn find_font_for_style<'a>(&'a mut self, desc: &FontTemplateDescriptor, fctx: &FontContextHandle)\n -> Option<Arc<FontTemplateData>> {\n \/\/ TODO(Issue #189): optimize lookup for\n \/\/ regular\/bold\/italic\/bolditalic with fixed offsets and a\n \/\/ static decision table for fallback between these values.\n\n \/\/ TODO(Issue #190): if not in the fast path above, do\n \/\/ expensive matching of weights, etc.\n for template in self.templates.mut_iter() {\n let maybe_template = template.get_if_matches(fctx, desc);\n if maybe_template.is_some() {\n return maybe_template;\n }\n }\n\n \/\/ If a request is made for a font family that exists,\n \/\/ pick the first valid font in the family if we failed\n \/\/ to find an exact match for the descriptor.\n for template in self.templates.mut_iter() {\n let maybe_template = template.get();\n if maybe_template.is_some() {\n return maybe_template;\n }\n }\n\n None\n }\n\n fn add_template(&mut self, identifier: &str, maybe_data: Option<Vec<u8>>) {\n for template in self.templates.iter() {\n if template.identifier() == identifier {\n return;\n }\n }\n\n let template = FontTemplate::new(identifier, maybe_data);\n self.templates.push(template);\n }\n}\n\n\/\/\/ Commands that the FontContext sends to the font cache task.\npub enum Command {\n GetFontTemplate(String, FontTemplateDescriptor, Sender<Reply>),\n AddWebFont(String, Url, Sender<()>),\n Exit(Sender<()>),\n}\n\n\/\/\/ Reply messages sent from the font cache task to the FontContext caller.\npub enum Reply {\n GetFontTemplateReply(Arc<FontTemplateData>),\n}\n\n\/\/\/ The font cache task itself. It maintains a list of reference counted\n\/\/\/ font templates that are currently in use.\nstruct FontCache {\n port: Receiver<Command>,\n generic_fonts: HashMap<String, String>,\n local_families: HashMap<String, FontFamily>,\n web_families: HashMap<String, FontFamily>,\n font_context: FontContextHandle,\n resource_task: ResourceTask,\n}\n\nfn add_generic_font(generic_fonts: &mut HashMap<String, String>,\n generic_name: &str, mapped_name: &str) {\n let opt_system_default = get_system_default_family(generic_name);\n let family_name = match opt_system_default {\n Some(system_default) => system_default,\n None => mapped_name.to_string(),\n };\n generic_fonts.insert(generic_name.to_string(), family_name);\n}\n\nimpl FontCache {\n fn run(&mut self) {\n loop {\n let msg = self.port.recv();\n\n match msg {\n GetFontTemplate(family, descriptor, result) => {\n let maybe_font_template = self.get_font_template(&family, &descriptor);\n let font_template = match maybe_font_template {\n Some(font_template) => font_template,\n None => self.get_last_resort_template(&descriptor),\n };\n\n result.send(GetFontTemplateReply(font_template));\n }\n AddWebFont(family_name, url, result) => {\n let maybe_resource = load_whole_resource(&self.resource_task, url.clone());\n match maybe_resource {\n Ok((_, bytes)) => {\n if !self.web_families.contains_key(&family_name) {\n let family = FontFamily::new();\n self.web_families.insert(family_name.clone(), family);\n }\n let family = self.web_families.get_mut(&family_name);\n family.add_template(format!(\"{}\", url).as_slice(), Some(bytes));\n },\n Err(msg) => {\n debug!(\"Failed to load web font: family={} url={}\", family_name, url);\n }\n }\n result.send(());\n }\n Exit(result) => {\n result.send(());\n break;\n }\n }\n }\n }\n\n fn refresh_local_families(&mut self) {\n self.local_families.clear();\n get_available_families(|family_name| {\n if !self.local_families.contains_key(&family_name) {\n let family = FontFamily::new();\n self.local_families.insert(family_name, family);\n }\n });\n }\n\n fn transform_family(&self, family: &String) -> String {\n match self.generic_fonts.find(family) {\n None => family.to_string(),\n Some(mapped_family) => (*mapped_family).clone()\n }\n }\n\n fn find_font_in_local_family<'a>(&'a mut self, family_name: &String, desc: &FontTemplateDescriptor)\n -> Option<Arc<FontTemplateData>> {\n \/\/ TODO(Issue #188): look up localized font family names if canonical name not found\n \/\/ look up canonical name\n if self.local_families.contains_key(family_name) {\n debug!(\"FontList: Found font family with name={:s}\", family_name.to_string());\n let s = self.local_families.get_mut(family_name);\n\n if s.templates.len() == 0 {\n get_variations_for_family(family_name.as_slice(), |path| {\n s.add_template(path.as_slice(), None);\n });\n }\n\n \/\/ TODO(Issue #192: handle generic font families, like 'serif' and 'sans-serif'.\n \/\/ if such family exists, try to match style to a font\n let result = s.find_font_for_style(desc, &self.font_context);\n if result.is_some() {\n return result;\n }\n\n None\n } else {\n debug!(\"FontList: Couldn't find font family with name={:s}\", family_name.to_string());\n None\n }\n }\n\n fn find_font_in_web_family<'a>(&'a mut self, family_name: &String, desc: &FontTemplateDescriptor)\n -> Option<Arc<FontTemplateData>> {\n if self.web_families.contains_key(family_name) {\n let family = self.web_families.get_mut(family_name);\n let maybe_font = family.find_font_for_style(desc, &self.font_context);\n maybe_font\n } else {\n None\n }\n }\n\n fn get_font_template(&mut self, family: &String, desc: &FontTemplateDescriptor) -> Option<Arc<FontTemplateData>> {\n let transformed_family_name = self.transform_family(family);\n let mut maybe_template = self.find_font_in_web_family(&transformed_family_name, desc);\n if maybe_template.is_none() {\n maybe_template = self.find_font_in_local_family(&transformed_family_name, desc);\n }\n maybe_template\n }\n\n fn get_last_resort_template(&mut self, desc: &FontTemplateDescriptor) -> Arc<FontTemplateData> {\n let last_resort = get_last_resort_font_families();\n\n for family in last_resort.iter() {\n let maybe_font_in_family = self.find_font_in_local_family(family, desc);\n if maybe_font_in_family.is_some() {\n return maybe_font_in_family.unwrap();\n }\n }\n\n fail!(\"Unable to find any fonts that match (do you have fallback fonts installed?)\");\n }\n}\n\n\/\/\/ The public interface to the font cache task, used exclusively by\n\/\/\/ the per-thread\/task FontContext structures.\n#[deriving(Clone)]\npub struct FontCacheTask {\n chan: Sender<Command>,\n}\n\nimpl FontCacheTask {\n pub fn new(resource_task: ResourceTask) -> FontCacheTask {\n let (chan, port) = channel();\n\n spawn(proc() {\n \/\/ TODO: Allow users to specify these.\n let mut generic_fonts = HashMap::with_capacity(5);\n add_generic_font(&mut generic_fonts, \"serif\", \"Times New Roman\");\n add_generic_font(&mut generic_fonts, \"sans-serif\", \"Arial\");\n add_generic_font(&mut generic_fonts, \"cursive\", \"Apple Chancery\");\n add_generic_font(&mut generic_fonts, \"fantasy\", \"Papyrus\");\n add_generic_font(&mut generic_fonts, \"monospace\", \"Menlo\");\n\n let mut cache = FontCache {\n port: port,\n generic_fonts: generic_fonts,\n local_families: HashMap::new(),\n web_families: HashMap::new(),\n font_context: FontContextHandle::new(),\n resource_task: resource_task,\n };\n\n cache.refresh_local_families();\n cache.run();\n });\n\n FontCacheTask {\n chan: chan,\n }\n }\n\n pub fn get_font_template(&self, family: String, desc: FontTemplateDescriptor)\n -> Arc<FontTemplateData> {\n\n let (response_chan, response_port) = channel();\n self.chan.send(GetFontTemplate(family, desc, response_chan));\n\n let reply = response_port.recv();\n\n match reply {\n GetFontTemplateReply(data) => {\n data\n }\n }\n }\n\n pub fn add_web_font(&self, family: String, url: Url) {\n let (response_chan, response_port) = channel();\n self.chan.send(AddWebFont(family, url, response_chan));\n response_port.recv();\n }\n\n pub fn exit(&self) {\n let (response_chan, response_port) = channel();\n self.chan.send(Exit(response_chan));\n response_port.recv();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>git merging master<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement youtube api client<commit_after>use std::io::Read;\nuse std::env;\nuse hyper::Client;\nuse hyper::header::Connection;\nuse std::collections::BTreeMap;\nuse rustc_serialize::json;\nuse rustc_serialize::json::{DecodeResult};\nuse std::fs::File;\n\nstatic BASE_URL: &'static str = \"https:\/\/www.googleapis.com\/youtube\/v3\";\nlazy_static! {\n static ref API_KEY: String = {\n let opt_key = env::var(\"YOUTUBE_API_KEY\");\n match opt_key {\n Ok(key) => key,\n Err(_) => {\n let mut f = File::open(\"youtube.txt\").unwrap();\n let mut s = String::new();\n let _ = f.read_to_string(&mut s);\n s\n }\n }\n };\n}\n\n\n#[derive(Debug, RustcDecodable, RustcEncodable)]\npub struct Thumbnail {\n pub url: String,\n pub width: i32,\n pub height: i32\n}\n\nimpl PartialEq for Thumbnail {\n fn eq(&self, t: &Thumbnail) -> bool {\n return self.url == t.url;\n }\n}\n\n#[allow(non_snake_case)]\n#[derive(Debug, RustcDecodable, RustcEncodable)]\npub struct PlaylistItemResponse {\n pub kind: String,\n pub etag: String,\n pub nextPageToken: String,\n pub pageInfo: BTreeMap<String, i32>,\n pub items: Vec<PlaylistItem>,\n}\n\n#[allow(non_snake_case)]\n#[derive(Debug, RustcDecodable, RustcEncodable)]\npub struct PlaylistItem {\n pub kind: String,\n pub etag: String,\n pub id: String,\n pub snippet: PlaylistItemSnippet,\n}\n\n#[allow(non_snake_case)]\n#[derive(Debug, RustcDecodable, RustcEncodable)]\npub struct PlaylistItemSnippet {\n pub title: String,\n pub description: String,\n pub publishedAt: String,\n pub channelId: String,\n pub channelTitle: String,\n pub thumbnails: BTreeMap<String, Thumbnail>,\n pub position: i32,\n pub playlistId: String,\n pub resourceId: BTreeMap<String, String>\n}\n\npub fn fetch_playlist(id: &str) -> json::DecodeResult<PlaylistItemResponse> {\n let params = format!(\"key={}&part=snippet&playlistId={}\", *API_KEY, id);\n let url = format!(\"{}\/{}?{}\",\n BASE_URL,\n \"playlistItems\",\n params);\n println!(\"{}\", url);\n let client = Client::new();\n let mut res = client.get(&url)\n .header(Connection::close())\n .send().unwrap();\n\n let mut body = String::new();\n res.read_to_string(&mut body).unwrap();\n\n return json::decode::<PlaylistItemResponse>(&body)\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add code for diffie-hellman<commit_after>pub fn private_key(p: u64) -> u64 {\n p - 1\n}\n\nfn modpow(b: u64, e: u64, m: u64) -> u64 {\n if m == 1 {\n 0\n } else {\n let mut r = 1;\n let mut me = e;\n let mut mb = b % m;\n\n while me > 0 {\n if me % 2 == 1 {\n r = (r * mb) % m\n }\n me = me >> 1;\n mb = (mb * mb) % m\n }\n\n r\n }\n}\n\npub fn public_key(p: u64, g: u64, a: u64) -> u64 {\n modpow(g, a, p)\n}\n\npub fn secret(p: u64, b_pub: u64, a: u64) -> u64 {\n modpow(b_pub, a, p)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update rimemd160's use of Macros to align with new restrictions for tokens<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Formatting<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::collections::VecDeque;\nuse std::rc::Rc;\n\nuse borrow_check::nll::region_infer::{Cause, RegionInferenceContext};\nuse borrow_check::nll::ToRegionVid;\nuse rustc::mir::visit::{MirVisitable, PlaceContext, Visitor};\nuse rustc::mir::{Local, Location, Mir};\nuse rustc::ty::{RegionVid, TyCtxt};\nuse rustc_data_structures::fx::FxHashSet;\nuse util::liveness::{self, DefUse, LivenessMode};\n\ncrate fn find<'cx, 'gcx: 'tcx, 'tcx: 'cx>(\n mir: &'cx Mir<'tcx>,\n regioncx: &'cx Rc<RegionInferenceContext<'tcx>>,\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n region_vid: RegionVid,\n start_point: Location,\n) -> Option<Cause> {\n let mut uf = UseFinder {\n mir,\n regioncx,\n tcx,\n region_vid,\n start_point,\n liveness_mode: LivenessMode {\n include_regular_use: true,\n include_drops: true,\n },\n };\n\n uf.find()\n}\n\nstruct UseFinder<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n mir: &'cx Mir<'tcx>,\n regioncx: &'cx Rc<RegionInferenceContext<'tcx>>,\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n region_vid: RegionVid,\n start_point: Location,\n liveness_mode: LivenessMode,\n}\n\nimpl<'cx, 'gcx, 'tcx> UseFinder<'cx, 'gcx, 'tcx> {\n fn find(&mut self) -> Option<Cause> {\n let mut queue = VecDeque::new();\n let mut visited = FxHashSet();\n\n queue.push_back(self.start_point);\n while let Some(p) = queue.pop_front() {\n if !self.regioncx.region_contains(self.region_vid, p) {\n continue;\n }\n\n if !visited.insert(p) {\n continue;\n }\n\n let block_data = &self.mir[p.block];\n\n match self.def_use(p, block_data.visitable(p.statement_index)) {\n Some(DefUseResult::Def) => {}\n\n Some(DefUseResult::UseLive { local }) => {\n return Some(Cause::LiveVar(local, p));\n }\n\n Some(DefUseResult::UseDrop { local }) => {\n return Some(Cause::DropVar(local, p));\n }\n\n None => {\n if p.statement_index < block_data.statements.len() {\n queue.push_back(Location {\n statement_index: p.statement_index + 1,\n ..p\n });\n } else {\n queue.extend(\n block_data\n .terminator()\n .successors()\n .filter(|&bb| Some(&Some(*bb)) != block_data.terminator().unwind())\n .map(|&bb| Location {\n statement_index: 0,\n block: bb,\n }),\n );\n }\n }\n }\n }\n\n None\n }\n\n fn def_use(&self, location: Location, thing: &dyn MirVisitable<'tcx>) -> Option<DefUseResult> {\n let mut visitor = DefUseVisitor {\n mir: self.mir,\n tcx: self.tcx,\n region_vid: self.region_vid,\n liveness_mode: self.liveness_mode,\n def_use_result: None,\n };\n\n thing.apply(location, &mut visitor);\n\n visitor.def_use_result\n }\n}\n\nstruct DefUseVisitor<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n mir: &'cx Mir<'tcx>,\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n region_vid: RegionVid,\n liveness_mode: LivenessMode,\n def_use_result: Option<DefUseResult>,\n}\n\nenum DefUseResult {\n Def,\n\n UseLive { local: Local },\n\n UseDrop { local: Local },\n}\n\nimpl<'cx, 'gcx, 'tcx> Visitor<'tcx> for DefUseVisitor<'cx, 'gcx, 'tcx> {\n fn visit_local(&mut self, &local: &Local, context: PlaceContext<'tcx>, _: Location) {\n let local_ty = self.mir.local_decls[local].ty;\n\n let mut found_it = false;\n self.tcx.for_each_free_region(&local_ty, |r| {\n if r.to_region_vid() == self.region_vid {\n found_it = true;\n }\n });\n\n if found_it {\n match liveness::categorize(context, self.liveness_mode) {\n Some(DefUse::Def) => {\n self.def_use_result = Some(DefUseResult::Def);\n }\n\n Some(DefUse::Use) => {\n self.def_use_result = if context.is_drop() {\n Some(DefUseResult::UseDrop { local })\n } else {\n Some(DefUseResult::UseLive { local })\n };\n }\n\n None => {\n self.def_use_result = None;\n }\n }\n }\n }\n}\n<commit_msg>Let lifetimes on find be inferred<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::collections::VecDeque;\nuse std::rc::Rc;\n\nuse borrow_check::nll::region_infer::{Cause, RegionInferenceContext};\nuse borrow_check::nll::ToRegionVid;\nuse rustc::mir::visit::{MirVisitable, PlaceContext, Visitor};\nuse rustc::mir::{Local, Location, Mir};\nuse rustc::ty::{RegionVid, TyCtxt};\nuse rustc_data_structures::fx::FxHashSet;\nuse util::liveness::{self, DefUse, LivenessMode};\n\ncrate fn find<'tcx>(\n mir: &Mir<'tcx>,\n regioncx: &Rc<RegionInferenceContext<'tcx>>,\n tcx: TyCtxt<'_, '_, 'tcx>,\n region_vid: RegionVid,\n start_point: Location,\n) -> Option<Cause> {\n let mut uf = UseFinder {\n mir,\n regioncx,\n tcx,\n region_vid,\n start_point,\n liveness_mode: LivenessMode {\n include_regular_use: true,\n include_drops: true,\n },\n };\n\n uf.find()\n}\n\nstruct UseFinder<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n mir: &'cx Mir<'tcx>,\n regioncx: &'cx Rc<RegionInferenceContext<'tcx>>,\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n region_vid: RegionVid,\n start_point: Location,\n liveness_mode: LivenessMode,\n}\n\nimpl<'cx, 'gcx, 'tcx> UseFinder<'cx, 'gcx, 'tcx> {\n fn find(&mut self) -> Option<Cause> {\n let mut queue = VecDeque::new();\n let mut visited = FxHashSet();\n\n queue.push_back(self.start_point);\n while let Some(p) = queue.pop_front() {\n if !self.regioncx.region_contains(self.region_vid, p) {\n continue;\n }\n\n if !visited.insert(p) {\n continue;\n }\n\n let block_data = &self.mir[p.block];\n\n match self.def_use(p, block_data.visitable(p.statement_index)) {\n Some(DefUseResult::Def) => {}\n\n Some(DefUseResult::UseLive { local }) => {\n return Some(Cause::LiveVar(local, p));\n }\n\n Some(DefUseResult::UseDrop { local }) => {\n return Some(Cause::DropVar(local, p));\n }\n\n None => {\n if p.statement_index < block_data.statements.len() {\n queue.push_back(Location {\n statement_index: p.statement_index + 1,\n ..p\n });\n } else {\n queue.extend(\n block_data\n .terminator()\n .successors()\n .filter(|&bb| Some(&Some(*bb)) != block_data.terminator().unwind())\n .map(|&bb| Location {\n statement_index: 0,\n block: bb,\n }),\n );\n }\n }\n }\n }\n\n None\n }\n\n fn def_use(&self, location: Location, thing: &dyn MirVisitable<'tcx>) -> Option<DefUseResult> {\n let mut visitor = DefUseVisitor {\n mir: self.mir,\n tcx: self.tcx,\n region_vid: self.region_vid,\n liveness_mode: self.liveness_mode,\n def_use_result: None,\n };\n\n thing.apply(location, &mut visitor);\n\n visitor.def_use_result\n }\n}\n\nstruct DefUseVisitor<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n mir: &'cx Mir<'tcx>,\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n region_vid: RegionVid,\n liveness_mode: LivenessMode,\n def_use_result: Option<DefUseResult>,\n}\n\nenum DefUseResult {\n Def,\n\n UseLive { local: Local },\n\n UseDrop { local: Local },\n}\n\nimpl<'cx, 'gcx, 'tcx> Visitor<'tcx> for DefUseVisitor<'cx, 'gcx, 'tcx> {\n fn visit_local(&mut self, &local: &Local, context: PlaceContext<'tcx>, _: Location) {\n let local_ty = self.mir.local_decls[local].ty;\n\n let mut found_it = false;\n self.tcx.for_each_free_region(&local_ty, |r| {\n if r.to_region_vid() == self.region_vid {\n found_it = true;\n }\n });\n\n if found_it {\n match liveness::categorize(context, self.liveness_mode) {\n Some(DefUse::Def) => {\n self.def_use_result = Some(DefUseResult::Def);\n }\n\n Some(DefUse::Use) => {\n self.def_use_result = if context.is_drop() {\n Some(DefUseResult::UseDrop { local })\n } else {\n Some(DefUseResult::UseLive { local })\n };\n }\n\n None => {\n self.def_use_result = None;\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>move Random out of macros<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Follow rust changes: Rename uint to usize<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse boxed::Box;\nuse convert::Into;\nuse error;\nuse fmt;\nuse marker::{Send, Sync};\nuse option::Option::{self, Some, None};\nuse result;\nuse sys;\n\n\/\/\/ A specialized [`Result`](..\/result\/enum.Result.html) type for I\/O\n\/\/\/ operations.\n\/\/\/\n\/\/\/ This type is broadly used across `std::io` for any operation which may\n\/\/\/ produce an error.\n\/\/\/\n\/\/\/ This typedef is generally used to avoid writing out `io::Error` directly and\n\/\/\/ is otherwise a direct mapping to `Result`.\n\/\/\/\n\/\/\/ While usual Rust style is to import types directly, aliases of `Result`\n\/\/\/ often are not, to make it easier to distinguish between them. `Result` is\n\/\/\/ generally assumed to be `std::result::Result`, and so users of this alias\n\/\/\/ will generally use `io::Result` instead of shadowing the prelude's import\n\/\/\/ of `std::result::Result`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ A convenience function that bubbles an `io::Result` to its caller:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/\n\/\/\/ fn get_string() -> io::Result<String> {\n\/\/\/ let mut buffer = String::new();\n\/\/\/\n\/\/\/ try!(io::stdin().read_line(&mut buffer));\n\/\/\/\n\/\/\/ Ok(buffer)\n\/\/\/ }\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type Result<T> = result::Result<T, Error>;\n\n\/\/\/ The error type for I\/O operations of the `Read`, `Write`, `Seek`, and\n\/\/\/ associated traits.\n\/\/\/\n\/\/\/ Errors mostly originate from the underlying OS, but custom instances of\n\/\/\/ `Error` can be created with crafted error messages and a particular value of\n\/\/\/ `ErrorKind`.\n#[derive(Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Error {\n repr: Repr,\n}\n\nenum Repr {\n Os(i32),\n Custom(Box<Custom>),\n}\n\n#[derive(Debug)]\nstruct Custom {\n kind: ErrorKind,\n error: Box<error::Error+Send+Sync>,\n}\n\n\/\/\/ A list specifying general categories of I\/O error.\n\/\/\/\n\/\/\/ This list is intended to grow over time and it is not recommended to\n\/\/\/ exhaustively match against it.\n#[derive(Copy, PartialEq, Eq, Clone, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[allow(deprecated)]\npub enum ErrorKind {\n \/\/\/ An entity was not found, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotFound,\n \/\/\/ The operation lacked the necessary privileges to complete.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n PermissionDenied,\n \/\/\/ The connection was refused by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionRefused,\n \/\/\/ The connection was reset by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionReset,\n \/\/\/ The connection was aborted (terminated) by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionAborted,\n \/\/\/ The network operation failed because it was not connected yet.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotConnected,\n \/\/\/ A socket address could not be bound because the address is already in\n \/\/\/ use elsewhere.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrInUse,\n \/\/\/ A nonexistent interface was requested or the requested address was not\n \/\/\/ local.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrNotAvailable,\n \/\/\/ The operation failed because a pipe was closed.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n BrokenPipe,\n \/\/\/ An entity already exists, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AlreadyExists,\n \/\/\/ The operation needs to block to complete, but the blocking operation was\n \/\/\/ requested to not occur.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WouldBlock,\n \/\/\/ A parameter was incorrect.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n InvalidInput,\n \/\/\/ Data not valid for the operation were encountered.\n \/\/\/\n \/\/\/ Unlike `InvalidInput`, this typically means that the operation\n \/\/\/ parameters were valid, however the error was caused by malformed\n \/\/\/ input data.\n \/\/\/\n \/\/\/ For example, a function that reads a file into a string will error with\n \/\/\/ `InvalidData` if the file's contents are not valid UTF-8.\n #[stable(feature = \"io_invalid_data\", since = \"1.2.0\")]\n InvalidData,\n \/\/\/ The I\/O operation's timeout expired, causing it to be canceled.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n TimedOut,\n \/\/\/ An error returned when an operation could not be completed because a\n \/\/\/ call to `write` returned `Ok(0)`.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it wrote a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ written.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WriteZero,\n \/\/\/ This operation was interrupted.\n \/\/\/\n \/\/\/ Interrupted operations can typically be retried.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Interrupted,\n \/\/\/ Any I\/O error not part of this list.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Other,\n\n \/\/\/ An error returned when an operation could not be completed because an\n \/\/\/ \"end of file\" was reached prematurely.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it read a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ read.\n #[stable(feature = \"read_exact\", since = \"1.6.0\")]\n UnexpectedEof,\n\n \/\/\/ Any I\/O error not part of this list.\n #[unstable(feature = \"io_error_internals\",\n reason = \"better expressed through extensible enums that this \\\n enum cannot be exhaustively matched against\",\n issue = \"0\")]\n #[doc(hidden)]\n __Nonexhaustive,\n}\n\nimpl Error {\n \/\/\/ Creates a new I\/O error from a known kind of error as well as an\n \/\/\/ arbitrary error payload.\n \/\/\/\n \/\/\/ This function is used to generically create I\/O errors which do not\n \/\/\/ originate from the OS itself. The `error` argument is an arbitrary\n \/\/\/ payload which will be contained in this `Error`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::io::{Error, ErrorKind};\n \/\/\/\n \/\/\/ \/\/ errors can be created from strings\n \/\/\/ let custom_error = Error::new(ErrorKind::Other, \"oh no!\");\n \/\/\/\n \/\/\/ \/\/ errors can also be created from other errors\n \/\/\/ let custom_error2 = Error::new(ErrorKind::Interrupted, custom_error);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn new<E>(kind: ErrorKind, error: E) -> Error\n where E: Into<Box<error::Error+Send+Sync>>\n {\n Self::_new(kind, error.into())\n }\n\n fn _new(kind: ErrorKind, error: Box<error::Error+Send+Sync>) -> Error {\n Error {\n repr: Repr::Custom(Box::new(Custom {\n kind: kind,\n error: error,\n }))\n }\n }\n\n \/\/\/ Returns an error representing the last OS error which occurred.\n \/\/\/\n \/\/\/ This function reads the value of `errno` for the target platform (e.g.\n \/\/\/ `GetLastError` on Windows) and will return a corresponding instance of\n \/\/\/ `Error` for the error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn last_os_error() -> Error {\n Error::from_raw_os_error(sys::os::errno() as i32)\n }\n\n \/\/\/ Creates a new instance of an `Error` from a particular OS error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn from_raw_os_error(code: i32) -> Error {\n Error { repr: Repr::Os(code) }\n }\n\n \/\/\/ Returns the OS error that this error represents (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `last_os_error` or\n \/\/\/ `from_raw_os_error`, then this function will return `Some`, otherwise\n \/\/\/ it will return `None`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn raw_os_error(&self) -> Option<i32> {\n match self.repr {\n Repr::Os(i) => Some(i),\n Repr::Custom(..) => None,\n }\n }\n\n \/\/\/ Returns a reference to the inner error wrapped by this error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_ref(&self) -> Option<&(error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => Some(&*c.error),\n }\n }\n\n \/\/\/ Returns a mutable reference to the inner error wrapped by this error\n \/\/\/ (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_mut(&mut self) -> Option<&mut (error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref mut c) => Some(&mut *c.error),\n }\n }\n\n \/\/\/ Consumes the `Error`, returning its inner error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn into_inner(self) -> Option<Box<error::Error+Send+Sync>> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(c) => Some(c.error)\n }\n }\n\n \/\/\/ Returns the corresponding `ErrorKind` for this error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn kind(&self) -> ErrorKind {\n match self.repr {\n Repr::Os(code) => sys::decode_error_kind(code),\n Repr::Custom(ref c) => c.kind,\n }\n }\n}\n\nimpl fmt::Debug for Repr {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Repr::Os(ref code) =>\n fmt.debug_struct(\"Os\").field(\"code\", code)\n .field(\"message\", &sys::os::error_string(*code)).finish(),\n Repr::Custom(ref c) => fmt.debug_tuple(\"Custom\").field(c).finish(),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Display for Error {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match self.repr {\n Repr::Os(code) => {\n let detail = sys::os::error_string(code);\n write!(fmt, \"{} (os error {})\", detail, code)\n }\n Repr::Custom(ref c) => c.error.fmt(fmt),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl error::Error for Error {\n fn description(&self) -> &str {\n match self.repr {\n Repr::Os(..) => match self.kind() {\n ErrorKind::NotFound => \"entity not found\",\n ErrorKind::PermissionDenied => \"permission denied\",\n ErrorKind::ConnectionRefused => \"connection refused\",\n ErrorKind::ConnectionReset => \"connection reset\",\n ErrorKind::ConnectionAborted => \"connection aborted\",\n ErrorKind::NotConnected => \"not connected\",\n ErrorKind::AddrInUse => \"address in use\",\n ErrorKind::AddrNotAvailable => \"address not available\",\n ErrorKind::BrokenPipe => \"broken pipe\",\n ErrorKind::AlreadyExists => \"entity already exists\",\n ErrorKind::WouldBlock => \"operation would block\",\n ErrorKind::InvalidInput => \"invalid input parameter\",\n ErrorKind::InvalidData => \"invalid data\",\n ErrorKind::TimedOut => \"timed out\",\n ErrorKind::WriteZero => \"write zero\",\n ErrorKind::Interrupted => \"operation interrupted\",\n ErrorKind::Other => \"other os error\",\n ErrorKind::UnexpectedEof => \"unexpected end of file\",\n ErrorKind::__Nonexhaustive => unreachable!()\n },\n Repr::Custom(ref c) => c.error.description(),\n }\n }\n\n fn cause(&self) -> Option<&error::Error> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => c.error.cause(),\n }\n }\n}\n\nfn _assert_error_is_sync_send() {\n fn _is_sync_send<T: Sync+Send>() {}\n _is_sync_send::<Error>();\n}\n\n#[cfg(test)]\nmod test {\n use prelude::v1::*;\n use super::{Error, ErrorKind};\n use error;\n use fmt;\n use sys::os::error_string;\n\n #[test]\n fn test_debug_error() {\n let code = 6;\n let msg = error_string(code);\n let err = Error { repr: super::Repr::Os(code) };\n let expected = format!(\"Error {{ repr: Os {{ code: {:?}, message: {:?} }} }}\", code, msg);\n assert_eq!(format!(\"{:?}\", err), expected);\n }\n\n #[test]\n fn test_downcasting() {\n #[derive(Debug)]\n struct TestError;\n\n impl fmt::Display for TestError {\n fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result {\n Ok(())\n }\n }\n\n impl error::Error for TestError {\n fn description(&self) -> &str {\n \"asdf\"\n }\n }\n\n \/\/ we have to call all of these UFCS style right now since method\n \/\/ resolution won't implicitly drop the Send+Sync bounds\n let mut err = Error::new(ErrorKind::Other, TestError);\n assert!(err.get_ref().unwrap().is::<TestError>());\n assert_eq!(\"asdf\", err.get_ref().unwrap().description());\n assert!(err.get_mut().unwrap().is::<TestError>());\n let extracted = err.into_inner().unwrap();\n extracted.downcast::<TestError>().unwrap();\n }\n}\n<commit_msg>Add doc examples for `io::Error::from_raw_os_error`.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse boxed::Box;\nuse convert::Into;\nuse error;\nuse fmt;\nuse marker::{Send, Sync};\nuse option::Option::{self, Some, None};\nuse result;\nuse sys;\n\n\/\/\/ A specialized [`Result`](..\/result\/enum.Result.html) type for I\/O\n\/\/\/ operations.\n\/\/\/\n\/\/\/ This type is broadly used across `std::io` for any operation which may\n\/\/\/ produce an error.\n\/\/\/\n\/\/\/ This typedef is generally used to avoid writing out `io::Error` directly and\n\/\/\/ is otherwise a direct mapping to `Result`.\n\/\/\/\n\/\/\/ While usual Rust style is to import types directly, aliases of `Result`\n\/\/\/ often are not, to make it easier to distinguish between them. `Result` is\n\/\/\/ generally assumed to be `std::result::Result`, and so users of this alias\n\/\/\/ will generally use `io::Result` instead of shadowing the prelude's import\n\/\/\/ of `std::result::Result`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ A convenience function that bubbles an `io::Result` to its caller:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/\n\/\/\/ fn get_string() -> io::Result<String> {\n\/\/\/ let mut buffer = String::new();\n\/\/\/\n\/\/\/ try!(io::stdin().read_line(&mut buffer));\n\/\/\/\n\/\/\/ Ok(buffer)\n\/\/\/ }\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type Result<T> = result::Result<T, Error>;\n\n\/\/\/ The error type for I\/O operations of the `Read`, `Write`, `Seek`, and\n\/\/\/ associated traits.\n\/\/\/\n\/\/\/ Errors mostly originate from the underlying OS, but custom instances of\n\/\/\/ `Error` can be created with crafted error messages and a particular value of\n\/\/\/ `ErrorKind`.\n#[derive(Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Error {\n repr: Repr,\n}\n\nenum Repr {\n Os(i32),\n Custom(Box<Custom>),\n}\n\n#[derive(Debug)]\nstruct Custom {\n kind: ErrorKind,\n error: Box<error::Error+Send+Sync>,\n}\n\n\/\/\/ A list specifying general categories of I\/O error.\n\/\/\/\n\/\/\/ This list is intended to grow over time and it is not recommended to\n\/\/\/ exhaustively match against it.\n#[derive(Copy, PartialEq, Eq, Clone, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[allow(deprecated)]\npub enum ErrorKind {\n \/\/\/ An entity was not found, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotFound,\n \/\/\/ The operation lacked the necessary privileges to complete.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n PermissionDenied,\n \/\/\/ The connection was refused by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionRefused,\n \/\/\/ The connection was reset by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionReset,\n \/\/\/ The connection was aborted (terminated) by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionAborted,\n \/\/\/ The network operation failed because it was not connected yet.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotConnected,\n \/\/\/ A socket address could not be bound because the address is already in\n \/\/\/ use elsewhere.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrInUse,\n \/\/\/ A nonexistent interface was requested or the requested address was not\n \/\/\/ local.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrNotAvailable,\n \/\/\/ The operation failed because a pipe was closed.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n BrokenPipe,\n \/\/\/ An entity already exists, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AlreadyExists,\n \/\/\/ The operation needs to block to complete, but the blocking operation was\n \/\/\/ requested to not occur.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WouldBlock,\n \/\/\/ A parameter was incorrect.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n InvalidInput,\n \/\/\/ Data not valid for the operation were encountered.\n \/\/\/\n \/\/\/ Unlike `InvalidInput`, this typically means that the operation\n \/\/\/ parameters were valid, however the error was caused by malformed\n \/\/\/ input data.\n \/\/\/\n \/\/\/ For example, a function that reads a file into a string will error with\n \/\/\/ `InvalidData` if the file's contents are not valid UTF-8.\n #[stable(feature = \"io_invalid_data\", since = \"1.2.0\")]\n InvalidData,\n \/\/\/ The I\/O operation's timeout expired, causing it to be canceled.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n TimedOut,\n \/\/\/ An error returned when an operation could not be completed because a\n \/\/\/ call to `write` returned `Ok(0)`.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it wrote a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ written.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WriteZero,\n \/\/\/ This operation was interrupted.\n \/\/\/\n \/\/\/ Interrupted operations can typically be retried.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Interrupted,\n \/\/\/ Any I\/O error not part of this list.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Other,\n\n \/\/\/ An error returned when an operation could not be completed because an\n \/\/\/ \"end of file\" was reached prematurely.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it read a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ read.\n #[stable(feature = \"read_exact\", since = \"1.6.0\")]\n UnexpectedEof,\n\n \/\/\/ Any I\/O error not part of this list.\n #[unstable(feature = \"io_error_internals\",\n reason = \"better expressed through extensible enums that this \\\n enum cannot be exhaustively matched against\",\n issue = \"0\")]\n #[doc(hidden)]\n __Nonexhaustive,\n}\n\nimpl Error {\n \/\/\/ Creates a new I\/O error from a known kind of error as well as an\n \/\/\/ arbitrary error payload.\n \/\/\/\n \/\/\/ This function is used to generically create I\/O errors which do not\n \/\/\/ originate from the OS itself. The `error` argument is an arbitrary\n \/\/\/ payload which will be contained in this `Error`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::io::{Error, ErrorKind};\n \/\/\/\n \/\/\/ \/\/ errors can be created from strings\n \/\/\/ let custom_error = Error::new(ErrorKind::Other, \"oh no!\");\n \/\/\/\n \/\/\/ \/\/ errors can also be created from other errors\n \/\/\/ let custom_error2 = Error::new(ErrorKind::Interrupted, custom_error);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn new<E>(kind: ErrorKind, error: E) -> Error\n where E: Into<Box<error::Error+Send+Sync>>\n {\n Self::_new(kind, error.into())\n }\n\n fn _new(kind: ErrorKind, error: Box<error::Error+Send+Sync>) -> Error {\n Error {\n repr: Repr::Custom(Box::new(Custom {\n kind: kind,\n error: error,\n }))\n }\n }\n\n \/\/\/ Returns an error representing the last OS error which occurred.\n \/\/\/\n \/\/\/ This function reads the value of `errno` for the target platform (e.g.\n \/\/\/ `GetLastError` on Windows) and will return a corresponding instance of\n \/\/\/ `Error` for the error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn last_os_error() -> Error {\n Error::from_raw_os_error(sys::os::errno() as i32)\n }\n\n \/\/\/ Creates a new instance of an `Error` from a particular OS error code.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ On Linux:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # if cfg!(target_os = \"linux\") {\n \/\/\/ use std::io;\n \/\/\/\n \/\/\/ let error = io::Error::from_raw_os_error(98);\n \/\/\/ assert_eq!(error.kind(), io::ErrorKind::AddrInUse);\n \/\/\/ # }\n \/\/\/ ```\n \/\/\/\n \/\/\/ On Windows:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # if cfg!(windows) {\n \/\/\/ use std::io;\n \/\/\/\n \/\/\/ let error = io::Error::from_raw_os_error(10048);\n \/\/\/ assert_eq!(error.kind(), io::ErrorKind::AddrInUse);\n \/\/\/ # }\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn from_raw_os_error(code: i32) -> Error {\n Error { repr: Repr::Os(code) }\n }\n\n \/\/\/ Returns the OS error that this error represents (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `last_os_error` or\n \/\/\/ `from_raw_os_error`, then this function will return `Some`, otherwise\n \/\/\/ it will return `None`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn raw_os_error(&self) -> Option<i32> {\n match self.repr {\n Repr::Os(i) => Some(i),\n Repr::Custom(..) => None,\n }\n }\n\n \/\/\/ Returns a reference to the inner error wrapped by this error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_ref(&self) -> Option<&(error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => Some(&*c.error),\n }\n }\n\n \/\/\/ Returns a mutable reference to the inner error wrapped by this error\n \/\/\/ (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_mut(&mut self) -> Option<&mut (error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref mut c) => Some(&mut *c.error),\n }\n }\n\n \/\/\/ Consumes the `Error`, returning its inner error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn into_inner(self) -> Option<Box<error::Error+Send+Sync>> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(c) => Some(c.error)\n }\n }\n\n \/\/\/ Returns the corresponding `ErrorKind` for this error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn kind(&self) -> ErrorKind {\n match self.repr {\n Repr::Os(code) => sys::decode_error_kind(code),\n Repr::Custom(ref c) => c.kind,\n }\n }\n}\n\nimpl fmt::Debug for Repr {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Repr::Os(ref code) =>\n fmt.debug_struct(\"Os\").field(\"code\", code)\n .field(\"message\", &sys::os::error_string(*code)).finish(),\n Repr::Custom(ref c) => fmt.debug_tuple(\"Custom\").field(c).finish(),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Display for Error {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match self.repr {\n Repr::Os(code) => {\n let detail = sys::os::error_string(code);\n write!(fmt, \"{} (os error {})\", detail, code)\n }\n Repr::Custom(ref c) => c.error.fmt(fmt),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl error::Error for Error {\n fn description(&self) -> &str {\n match self.repr {\n Repr::Os(..) => match self.kind() {\n ErrorKind::NotFound => \"entity not found\",\n ErrorKind::PermissionDenied => \"permission denied\",\n ErrorKind::ConnectionRefused => \"connection refused\",\n ErrorKind::ConnectionReset => \"connection reset\",\n ErrorKind::ConnectionAborted => \"connection aborted\",\n ErrorKind::NotConnected => \"not connected\",\n ErrorKind::AddrInUse => \"address in use\",\n ErrorKind::AddrNotAvailable => \"address not available\",\n ErrorKind::BrokenPipe => \"broken pipe\",\n ErrorKind::AlreadyExists => \"entity already exists\",\n ErrorKind::WouldBlock => \"operation would block\",\n ErrorKind::InvalidInput => \"invalid input parameter\",\n ErrorKind::InvalidData => \"invalid data\",\n ErrorKind::TimedOut => \"timed out\",\n ErrorKind::WriteZero => \"write zero\",\n ErrorKind::Interrupted => \"operation interrupted\",\n ErrorKind::Other => \"other os error\",\n ErrorKind::UnexpectedEof => \"unexpected end of file\",\n ErrorKind::__Nonexhaustive => unreachable!()\n },\n Repr::Custom(ref c) => c.error.description(),\n }\n }\n\n fn cause(&self) -> Option<&error::Error> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => c.error.cause(),\n }\n }\n}\n\nfn _assert_error_is_sync_send() {\n fn _is_sync_send<T: Sync+Send>() {}\n _is_sync_send::<Error>();\n}\n\n#[cfg(test)]\nmod test {\n use prelude::v1::*;\n use super::{Error, ErrorKind};\n use error;\n use fmt;\n use sys::os::error_string;\n\n #[test]\n fn test_debug_error() {\n let code = 6;\n let msg = error_string(code);\n let err = Error { repr: super::Repr::Os(code) };\n let expected = format!(\"Error {{ repr: Os {{ code: {:?}, message: {:?} }} }}\", code, msg);\n assert_eq!(format!(\"{:?}\", err), expected);\n }\n\n #[test]\n fn test_downcasting() {\n #[derive(Debug)]\n struct TestError;\n\n impl fmt::Display for TestError {\n fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result {\n Ok(())\n }\n }\n\n impl error::Error for TestError {\n fn description(&self) -> &str {\n \"asdf\"\n }\n }\n\n \/\/ we have to call all of these UFCS style right now since method\n \/\/ resolution won't implicitly drop the Send+Sync bounds\n let mut err = Error::new(ErrorKind::Other, TestError);\n assert!(err.get_ref().unwrap().is::<TestError>());\n assert_eq!(\"asdf\", err.get_ref().unwrap().description());\n assert!(err.get_mut().unwrap().is::<TestError>());\n let extracted = err.into_inner().unwrap();\n extracted.downcast::<TestError>().unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>reinstate test\/bench\/shootout-threadring.rs<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Based on threadring.erlang by Jira Isa\n\nfn start(n_tasks: int, token: int) {\n let mut (p, ch1) = comm::stream();\n ch1.send(token);\n \/\/ XXX could not get this to work with a range closure\n let mut i = 2;\n while i <= n_tasks {\n let (next_p, ch) = comm::stream();\n let imm_i = i;\n let imm_p = p;\n do task::spawn {\n roundtrip(imm_i, n_tasks, &imm_p, &ch);\n };\n p = next_p;\n i += 1;\n }\n let imm_p = p;\n let imm_ch = ch1;\n do task::spawn {\n roundtrip(1, n_tasks, &imm_p, &imm_ch);\n }\n}\n\nfn roundtrip(id: int, n_tasks: int, p: &comm::Port<int>, ch: &comm::Chan<int>) {\n while (true) {\n match p.recv() {\n 1 => {\n io::println(fmt!(\"%d\\n\", id));\n return;\n }\n token => {\n debug!(\"thread: %d got token: %d\", id, token);\n ch.send(token - 1);\n if token <= n_tasks {\n return;\n }\n }\n }\n }\n}\n\nfn main() {\n let args = if os::getenv(~\"RUST_BENCH\").is_some() {\n ~[~\"\", ~\"2000000\", ~\"503\"]\n }\n else {\n os::args()\n };\n let token = if args.len() > 1u {\n int::from_str(args[1]).get()\n }\n else {\n 1000\n };\n let n_tasks = if args.len() > 2u {\n int::from_str(args[2]).get()\n }\n else {\n 503\n };\n start(n_tasks, token);\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add wrapper for resolving syscall names -> numbers<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add benchmarks for measuring function call overhead<commit_after>#![feature(test)]\n\nextern crate test;\n\nextern crate gluon;\n\nuse gluon::{Compiler, new_vm};\nuse gluon::vm::api::FunctionRef;\n\n\/\/ Benchmarks function calls\n#[bench]\nfn factorial(b: &mut ::test::Bencher) {\n let vm = new_vm();\n let text = r#\"\n let factorial n =\n if n < 2\n then 1\n else n * factorial (n - 1)\n factorial\n \"#;\n Compiler::new()\n .load_script(&vm, \"factorial\", text)\n .unwrap();\n let mut factorial: FunctionRef<fn (i32) -> i32> = vm.get_global(\"factorial\").unwrap();\n b.iter(|| {\n let result = factorial.call(100).unwrap();\n ::test::black_box(result)\n })\n}\n\n#[bench]\nfn factorial_tail_call(b: &mut ::test::Bencher) {\n let vm = new_vm();\n let text = r#\"\n let factorial a n =\n if n < 2\n then a\n else factorial (a * n) (n - 1)\n factorial 1\n \"#;\n Compiler::new()\n .load_script(&vm, \"factorial\", text)\n .unwrap();\n let mut factorial: FunctionRef<fn (i32) -> i32> = vm.get_global(\"factorial\").unwrap();\n b.iter(|| {\n let result = factorial.call(100).unwrap();\n ::test::black_box(result)\n })\n}\n\n#[bench]\nfn gluon_rust_boundary_overhead(b: &mut ::test::Bencher) {\n let vm = new_vm();\n let text = r#\"\n let { trim } = import \"std\/string.glu\"\n let for n f =\n if n #Int== 0 then\n ()\n else\n f n\n for (n #Int- 10) f\n \\n -> for n \\_ -> trim \"\"\n \"#;\n Compiler::new()\n .load_script(&vm, \"test\", text)\n .unwrap();\n let mut factorial: FunctionRef<fn (i32) -> ()> = vm.get_global(\"test\").unwrap();\n b.iter(|| {\n let result = factorial.call(1000).unwrap();\n ::test::black_box(result)\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Graphite backend docs<commit_after><|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(basepath: String) -> StorageBackend {\n StorageBackend {\n basepath: basepath,\n }\n }\n\n fn build<M: Module>(rt: &Runtime, m: &M) -> StorageBackend {\n let path = rt.get_rtp() + m.name() + \"\/store\";\n \/\/ TODO: Don't use \"\/store\" but value from configuration\n debug!(\"Building StorageBackend for {}\", path);\n StorageBackend::new(path)\n }\n\n fn get_file_ids(&self) -> Option<Vec<FileID>> {\n debug!(\"Getting files from {}\", self.basepath);\n let list = glob(&self.basepath[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n debug!(\" - File: {:?}\", path);\n v.push(from_pathbuf(&path));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n pub fn iter_ids(&self, m: &Module) -> Option<IntoIter<FileID>>\n {\n glob(&self.prefix_of_files_for_module(m)[..]).and_then(|globlist| {\n let v = globlist.filter_map(Result::ok)\n .map(|pbuf| from_pathbuf(&pbuf))\n .collect::<Vec<FileID>>()\n .into_iter();\n Ok(v)\n }).ok()\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Option<IntoIter<File<'a>>>\n where HP: FileHeaderParser\n {\n self.iter_ids(m).and_then(|ids| {\n Some(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n .into_iter())\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success reading file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::build(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id : '{}'\", id);\n self.prefix_of_files_for_module(owner) + \"-\" + &id[..] + \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n fn new(action: String,\n desc : String,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: action,\n desc: desc,\n data_dump: data,\n caused_by: None,\n }\n }\n\n fn build(action: &'static str,\n desc: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n dataDump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::build(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n<commit_msg>Reimplement StorageBackend::new(), create directory if not existing, store storepath in extra variable<commit_after>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::build(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n fn build<M: Module>(rt: &Runtime, m: &M) -> StorageBackend {\n let path = rt.get_rtp() + m.name() + \"\/store\";\n \/\/ TODO: Don't use \"\/store\" but value from configuration\n debug!(\"Building StorageBackend for {}\", path);\n StorageBackend::new(path)\n }\n\n fn get_file_ids(&self) -> Option<Vec<FileID>> {\n debug!(\"Getting files from {}\", self.basepath);\n let list = glob(&self.basepath[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n debug!(\" - File: {:?}\", path);\n v.push(from_pathbuf(&path));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n pub fn iter_ids(&self, m: &Module) -> Option<IntoIter<FileID>>\n {\n glob(&self.prefix_of_files_for_module(m)[..]).and_then(|globlist| {\n let v = globlist.filter_map(Result::ok)\n .map(|pbuf| from_pathbuf(&pbuf))\n .collect::<Vec<FileID>>()\n .into_iter();\n Ok(v)\n }).ok()\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Option<IntoIter<File<'a>>>\n where HP: FileHeaderParser\n {\n self.iter_ids(m).and_then(|ids| {\n Some(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n .into_iter())\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success reading file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::build(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id : '{}'\", id);\n self.prefix_of_files_for_module(owner) + \"-\" + &id[..] + \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n fn new(action: String,\n desc : String,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: action,\n desc: desc,\n data_dump: data,\n caused_by: None,\n }\n }\n\n fn build(action: &'static str,\n desc: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n dataDump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::build(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Traits for working with Errors.\n\/\/!\n\/\/! # The `Error` trait\n\/\/!\n\/\/! `Error` is a trait representing the basic expectations for error values,\n\/\/! i.e. values of type `E` in `Result<T, E>`. At a minimum, errors must provide\n\/\/! a description, but they may optionally provide additional detail (via\n\/\/! `Display`) and cause chain information:\n\/\/!\n\/\/! ```\n\/\/! use std::fmt::Display;\n\/\/!\n\/\/! trait Error: Display {\n\/\/! fn description(&self) -> &str;\n\/\/!\n\/\/! fn cause(&self) -> Option<&Error> { None }\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! The `cause` method is generally used when errors cross \"abstraction\n\/\/! boundaries\", i.e. when a one module must report an error that is \"caused\"\n\/\/! by an error from a lower-level module. This setup makes it possible for the\n\/\/! high-level module to provide its own errors that do not commit to any\n\/\/! particular implementation, but also reveal some of its implementation for\n\/\/! debugging via `cause` chains.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n\/\/ A note about crates and the facade:\n\/\/\n\/\/ Originally, the `Error` trait was defined in libcore, and the impls\n\/\/ were scattered about. However, coherence objected to this\n\/\/ arrangement, because to create the blanket impls for `Box` required\n\/\/ knowing that `&str: !Error`, and we have no means to deal with that\n\/\/ sort of conflict just now. Therefore, for the time being, we have\n\/\/ moved the `Error` trait into libstd. As we evolve a sol'n to the\n\/\/ coherence challenge (e.g., specialization, neg impls, etc) we can\n\/\/ reconsider what crate these items belong in.\n\nuse any::TypeId;\nuse boxed::Box;\nuse char;\nuse fmt::{self, Debug, Display};\nuse marker::{Send, Sync, Reflect};\nuse mem::transmute;\nuse num;\nuse raw::TraitObject;\nuse str;\nuse string::{self, String};\n\n\/\/\/ Base functionality for all errors in Rust.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait Error: Debug + Display + Reflect {\n \/\/\/ A short description of the error.\n \/\/\/\n \/\/\/ The description should not contain newlines or sentence-ending\n \/\/\/ punctuation, to facilitate embedding in larger user-facing\n \/\/\/ strings.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn description(&self) -> &str;\n\n \/\/\/ The lower-level cause of this error, if any.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn cause(&self) -> Option<&Error> { None }\n\n \/\/\/ Get the `TypeId` of `self`\n #[doc(hidden)]\n #[unstable(feature = \"error_type_id\",\n reason = \"unclear whether to commit to this public implementation detail\",\n issue = \"27745\")]\n fn type_id(&self) -> TypeId where Self: 'static {\n TypeId::of::<Self>()\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, E: Error + 'a> From<E> for Box<Error + 'a> {\n fn from(err: E) -> Box<Error + 'a> {\n Box::new(err)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, E: Error + Send + Sync + 'a> From<E> for Box<Error + Send + Sync + 'a> {\n fn from(err: E) -> Box<Error + Send + Sync + 'a> {\n Box::new(err)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl From<String> for Box<Error + Send + Sync> {\n fn from(err: String) -> Box<Error + Send + Sync> {\n #[derive(Debug)]\n struct StringError(String);\n\n impl Error for StringError {\n fn description(&self) -> &str { &self.0 }\n }\n\n impl Display for StringError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n Display::fmt(&self.0, f)\n }\n }\n\n Box::new(StringError(err))\n }\n}\n\n#[stable(feature = \"string_box_error\", since = \"1.7.0\")]\nimpl From<String> for Box<Error> {\n fn from(str_err: String) -> Box<Error> {\n let err1: Box<Error + Send + Sync> = From::from(str_err);\n let err2: Box<Error> = err1;\n err2\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, 'b> From<&'b str> for Box<Error + Send + Sync + 'a> {\n fn from(err: &'b str) -> Box<Error + Send + Sync + 'a> {\n From::from(String::from(err))\n }\n}\n\n#[stable(feature = \"string_box_error\", since = \"1.7.0\")]\nimpl<'a> From<&'a str> for Box<Error> {\n fn from(err: &'a str) -> Box<Error> {\n From::from(String::from(err))\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for str::ParseBoolError {\n fn description(&self) -> &str { \"failed to parse bool\" }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for str::Utf8Error {\n fn description(&self) -> &str {\n \"invalid utf-8: corrupt contents\"\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for num::ParseIntError {\n fn description(&self) -> &str {\n self.__description()\n }\n}\n\n#[unstable(feature = \"try_from\", issue = \"33417\")]\nimpl Error for num::TryFromIntError {\n fn description(&self) -> &str {\n self.__description()\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for num::ParseFloatError {\n fn description(&self) -> &str {\n self.__description()\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for string::FromUtf8Error {\n fn description(&self) -> &str {\n \"invalid utf-8\"\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for string::FromUtf16Error {\n fn description(&self) -> &str {\n \"invalid utf-16\"\n }\n}\n\n#[stable(feature = \"str_parse_error2\", since = \"1.8.0\")]\nimpl Error for string::ParseError {\n fn description(&self) -> &str {\n match *self {}\n }\n}\n\n#[stable(feature = \"decode_utf16\", since = \"1.9.0\")]\nimpl Error for char::DecodeUtf16Error {\n fn description(&self) -> &str {\n \"unpaired surrogate found\"\n }\n}\n\n#[stable(feature = \"box_error\", since = \"1.7.0\")]\nimpl<T: Error> Error for Box<T> {\n fn description(&self) -> &str {\n Error::description(&**self)\n }\n\n fn cause(&self) -> Option<&Error> {\n Error::cause(&**self)\n }\n}\n\n\/\/ copied from any.rs\nimpl Error + 'static {\n \/\/\/ Returns true if the boxed type is the same as `T`\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn is<T: Error + 'static>(&self) -> bool {\n \/\/ Get TypeId of the type this function is instantiated with\n let t = TypeId::of::<T>();\n\n \/\/ Get TypeId of the type in the trait object\n let boxed = self.type_id();\n\n \/\/ Compare both TypeIds on equality\n t == boxed\n }\n\n \/\/\/ Returns some reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_ref<T: Error + 'static>(&self) -> Option<&T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute(self);\n\n \/\/ Extract the data pointer\n Some(&*(to.data as *const T))\n }\n } else {\n None\n }\n }\n\n \/\/\/ Returns some mutable reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_mut<T: Error + 'static>(&mut self) -> Option<&mut T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute(self);\n\n \/\/ Extract the data pointer\n Some(&mut *(to.data as *const T as *mut T))\n }\n } else {\n None\n }\n }\n}\n\nimpl Error + 'static + Send {\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn is<T: Error + 'static>(&self) -> bool {\n <Error + 'static>::is::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_ref<T: Error + 'static>(&self) -> Option<&T> {\n <Error + 'static>::downcast_ref::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_mut<T: Error + 'static>(&mut self) -> Option<&mut T> {\n <Error + 'static>::downcast_mut::<T>(self)\n }\n}\n\nimpl Error + 'static + Send + Sync {\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn is<T: Error + 'static>(&self) -> bool {\n <Error + 'static>::is::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_ref<T: Error + 'static>(&self) -> Option<&T> {\n <Error + 'static>::downcast_ref::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_mut<T: Error + 'static>(&mut self) -> Option<&mut T> {\n <Error + 'static>::downcast_mut::<T>(self)\n }\n}\n\nimpl Error {\n #[inline]\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n \/\/\/ Attempt to downcast the box to a concrete type.\n pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<Error>> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let raw = Box::into_raw(self);\n let to: TraitObject =\n transmute::<*mut Error, TraitObject>(raw);\n\n \/\/ Extract the data pointer\n Ok(Box::from_raw(to.data as *mut T))\n }\n } else {\n Err(self)\n }\n }\n}\n\nimpl Error + Send {\n #[inline]\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n \/\/\/ Attempt to downcast the box to a concrete type.\n pub fn downcast<T: Error + 'static>(self: Box<Self>)\n -> Result<Box<T>, Box<Error + Send>> {\n let err: Box<Error> = self;\n <Error>::downcast(err).map_err(|s| unsafe {\n \/\/ reapply the Send marker\n transmute::<Box<Error>, Box<Error + Send>>(s)\n })\n }\n}\n\nimpl Error + Send + Sync {\n #[inline]\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n \/\/\/ Attempt to downcast the box to a concrete type.\n pub fn downcast<T: Error + 'static>(self: Box<Self>)\n -> Result<Box<T>, Box<Self>> {\n let err: Box<Error> = self;\n <Error>::downcast(err).map_err(|s| unsafe {\n \/\/ reapply the Send+Sync marker\n transmute::<Box<Error>, Box<Error + Send + Sync>>(s)\n })\n }\n}\n\n#[cfg(test)]\nmod tests {\n use prelude::v1::*;\n use super::Error;\n use fmt;\n\n #[derive(Debug, PartialEq)]\n struct A;\n #[derive(Debug, PartialEq)]\n struct B;\n\n impl fmt::Display for A {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"A\")\n }\n }\n impl fmt::Display for B {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"B\")\n }\n }\n\n impl Error for A {\n fn description(&self) -> &str { \"A-desc\" }\n }\n impl Error for B {\n fn description(&self) -> &str { \"A-desc\" }\n }\n\n #[test]\n fn downcasting() {\n let mut a = A;\n let mut a = &mut a as &mut (Error + 'static);\n assert_eq!(a.downcast_ref::<A>(), Some(&A));\n assert_eq!(a.downcast_ref::<B>(), None);\n assert_eq!(a.downcast_mut::<A>(), Some(&mut A));\n assert_eq!(a.downcast_mut::<B>(), None);\n\n let a: Box<Error> = Box::new(A);\n match a.downcast::<B>() {\n Ok(..) => panic!(\"expected error\"),\n Err(e) => assert_eq!(*e.downcast::<A>().unwrap(), A),\n }\n }\n}\n<commit_msg>Rollup merge of #33856 - GuillaumeGomez:fmt_error, r=alexcrichton<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Traits for working with Errors.\n\/\/!\n\/\/! # The `Error` trait\n\/\/!\n\/\/! `Error` is a trait representing the basic expectations for error values,\n\/\/! i.e. values of type `E` in `Result<T, E>`. At a minimum, errors must provide\n\/\/! a description, but they may optionally provide additional detail (via\n\/\/! `Display`) and cause chain information:\n\/\/!\n\/\/! ```\n\/\/! use std::fmt::Display;\n\/\/!\n\/\/! trait Error: Display {\n\/\/! fn description(&self) -> &str;\n\/\/!\n\/\/! fn cause(&self) -> Option<&Error> { None }\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! The `cause` method is generally used when errors cross \"abstraction\n\/\/! boundaries\", i.e. when a one module must report an error that is \"caused\"\n\/\/! by an error from a lower-level module. This setup makes it possible for the\n\/\/! high-level module to provide its own errors that do not commit to any\n\/\/! particular implementation, but also reveal some of its implementation for\n\/\/! debugging via `cause` chains.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n\/\/ A note about crates and the facade:\n\/\/\n\/\/ Originally, the `Error` trait was defined in libcore, and the impls\n\/\/ were scattered about. However, coherence objected to this\n\/\/ arrangement, because to create the blanket impls for `Box` required\n\/\/ knowing that `&str: !Error`, and we have no means to deal with that\n\/\/ sort of conflict just now. Therefore, for the time being, we have\n\/\/ moved the `Error` trait into libstd. As we evolve a sol'n to the\n\/\/ coherence challenge (e.g., specialization, neg impls, etc) we can\n\/\/ reconsider what crate these items belong in.\n\nuse any::TypeId;\nuse boxed::Box;\nuse char;\nuse fmt::{self, Debug, Display};\nuse marker::{Send, Sync, Reflect};\nuse mem::transmute;\nuse num;\nuse raw::TraitObject;\nuse str;\nuse string::{self, String};\n\n\/\/\/ Base functionality for all errors in Rust.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait Error: Debug + Display + Reflect {\n \/\/\/ A short description of the error.\n \/\/\/\n \/\/\/ The description should not contain newlines or sentence-ending\n \/\/\/ punctuation, to facilitate embedding in larger user-facing\n \/\/\/ strings.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn description(&self) -> &str;\n\n \/\/\/ The lower-level cause of this error, if any.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn cause(&self) -> Option<&Error> { None }\n\n \/\/\/ Get the `TypeId` of `self`\n #[doc(hidden)]\n #[unstable(feature = \"error_type_id\",\n reason = \"unclear whether to commit to this public implementation detail\",\n issue = \"27745\")]\n fn type_id(&self) -> TypeId where Self: 'static {\n TypeId::of::<Self>()\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, E: Error + 'a> From<E> for Box<Error + 'a> {\n fn from(err: E) -> Box<Error + 'a> {\n Box::new(err)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, E: Error + Send + Sync + 'a> From<E> for Box<Error + Send + Sync + 'a> {\n fn from(err: E) -> Box<Error + Send + Sync + 'a> {\n Box::new(err)\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl From<String> for Box<Error + Send + Sync> {\n fn from(err: String) -> Box<Error + Send + Sync> {\n #[derive(Debug)]\n struct StringError(String);\n\n impl Error for StringError {\n fn description(&self) -> &str { &self.0 }\n }\n\n impl Display for StringError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n Display::fmt(&self.0, f)\n }\n }\n\n Box::new(StringError(err))\n }\n}\n\n#[stable(feature = \"string_box_error\", since = \"1.7.0\")]\nimpl From<String> for Box<Error> {\n fn from(str_err: String) -> Box<Error> {\n let err1: Box<Error + Send + Sync> = From::from(str_err);\n let err2: Box<Error> = err1;\n err2\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, 'b> From<&'b str> for Box<Error + Send + Sync + 'a> {\n fn from(err: &'b str) -> Box<Error + Send + Sync + 'a> {\n From::from(String::from(err))\n }\n}\n\n#[stable(feature = \"string_box_error\", since = \"1.7.0\")]\nimpl<'a> From<&'a str> for Box<Error> {\n fn from(err: &'a str) -> Box<Error> {\n From::from(String::from(err))\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for str::ParseBoolError {\n fn description(&self) -> &str { \"failed to parse bool\" }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for str::Utf8Error {\n fn description(&self) -> &str {\n \"invalid utf-8: corrupt contents\"\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for num::ParseIntError {\n fn description(&self) -> &str {\n self.__description()\n }\n}\n\n#[unstable(feature = \"try_from\", issue = \"33417\")]\nimpl Error for num::TryFromIntError {\n fn description(&self) -> &str {\n self.__description()\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for num::ParseFloatError {\n fn description(&self) -> &str {\n self.__description()\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for string::FromUtf8Error {\n fn description(&self) -> &str {\n \"invalid utf-8\"\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Error for string::FromUtf16Error {\n fn description(&self) -> &str {\n \"invalid utf-16\"\n }\n}\n\n#[stable(feature = \"str_parse_error2\", since = \"1.8.0\")]\nimpl Error for string::ParseError {\n fn description(&self) -> &str {\n match *self {}\n }\n}\n\n#[stable(feature = \"decode_utf16\", since = \"1.9.0\")]\nimpl Error for char::DecodeUtf16Error {\n fn description(&self) -> &str {\n \"unpaired surrogate found\"\n }\n}\n\n#[stable(feature = \"box_error\", since = \"1.7.0\")]\nimpl<T: Error> Error for Box<T> {\n fn description(&self) -> &str {\n Error::description(&**self)\n }\n\n fn cause(&self) -> Option<&Error> {\n Error::cause(&**self)\n }\n}\n\n#[stable(feature = \"fmt_error\", since = \"1.11.0\")]\nimpl Error for fmt::Error {\n fn description(&self) -> &str {\n \"an error occurred when formatting an argument\"\n }\n}\n\n\/\/ copied from any.rs\nimpl Error + 'static {\n \/\/\/ Returns true if the boxed type is the same as `T`\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn is<T: Error + 'static>(&self) -> bool {\n \/\/ Get TypeId of the type this function is instantiated with\n let t = TypeId::of::<T>();\n\n \/\/ Get TypeId of the type in the trait object\n let boxed = self.type_id();\n\n \/\/ Compare both TypeIds on equality\n t == boxed\n }\n\n \/\/\/ Returns some reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_ref<T: Error + 'static>(&self) -> Option<&T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute(self);\n\n \/\/ Extract the data pointer\n Some(&*(to.data as *const T))\n }\n } else {\n None\n }\n }\n\n \/\/\/ Returns some mutable reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_mut<T: Error + 'static>(&mut self) -> Option<&mut T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute(self);\n\n \/\/ Extract the data pointer\n Some(&mut *(to.data as *const T as *mut T))\n }\n } else {\n None\n }\n }\n}\n\nimpl Error + 'static + Send {\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn is<T: Error + 'static>(&self) -> bool {\n <Error + 'static>::is::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_ref<T: Error + 'static>(&self) -> Option<&T> {\n <Error + 'static>::downcast_ref::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_mut<T: Error + 'static>(&mut self) -> Option<&mut T> {\n <Error + 'static>::downcast_mut::<T>(self)\n }\n}\n\nimpl Error + 'static + Send + Sync {\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn is<T: Error + 'static>(&self) -> bool {\n <Error + 'static>::is::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_ref<T: Error + 'static>(&self) -> Option<&T> {\n <Error + 'static>::downcast_ref::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n #[inline]\n pub fn downcast_mut<T: Error + 'static>(&mut self) -> Option<&mut T> {\n <Error + 'static>::downcast_mut::<T>(self)\n }\n}\n\nimpl Error {\n #[inline]\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n \/\/\/ Attempt to downcast the box to a concrete type.\n pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<Error>> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let raw = Box::into_raw(self);\n let to: TraitObject =\n transmute::<*mut Error, TraitObject>(raw);\n\n \/\/ Extract the data pointer\n Ok(Box::from_raw(to.data as *mut T))\n }\n } else {\n Err(self)\n }\n }\n}\n\nimpl Error + Send {\n #[inline]\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n \/\/\/ Attempt to downcast the box to a concrete type.\n pub fn downcast<T: Error + 'static>(self: Box<Self>)\n -> Result<Box<T>, Box<Error + Send>> {\n let err: Box<Error> = self;\n <Error>::downcast(err).map_err(|s| unsafe {\n \/\/ reapply the Send marker\n transmute::<Box<Error>, Box<Error + Send>>(s)\n })\n }\n}\n\nimpl Error + Send + Sync {\n #[inline]\n #[stable(feature = \"error_downcast\", since = \"1.3.0\")]\n \/\/\/ Attempt to downcast the box to a concrete type.\n pub fn downcast<T: Error + 'static>(self: Box<Self>)\n -> Result<Box<T>, Box<Self>> {\n let err: Box<Error> = self;\n <Error>::downcast(err).map_err(|s| unsafe {\n \/\/ reapply the Send+Sync marker\n transmute::<Box<Error>, Box<Error + Send + Sync>>(s)\n })\n }\n}\n\n#[cfg(test)]\nmod tests {\n use prelude::v1::*;\n use super::Error;\n use fmt;\n\n #[derive(Debug, PartialEq)]\n struct A;\n #[derive(Debug, PartialEq)]\n struct B;\n\n impl fmt::Display for A {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"A\")\n }\n }\n impl fmt::Display for B {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"B\")\n }\n }\n\n impl Error for A {\n fn description(&self) -> &str { \"A-desc\" }\n }\n impl Error for B {\n fn description(&self) -> &str { \"A-desc\" }\n }\n\n #[test]\n fn downcasting() {\n let mut a = A;\n let mut a = &mut a as &mut (Error + 'static);\n assert_eq!(a.downcast_ref::<A>(), Some(&A));\n assert_eq!(a.downcast_ref::<B>(), None);\n assert_eq!(a.downcast_mut::<A>(), Some(&mut A));\n assert_eq!(a.downcast_mut::<B>(), None);\n\n let a: Box<Error> = Box::new(A);\n match a.downcast::<B>() {\n Ok(..) => panic!(\"expected error\"),\n Err(e) => assert_eq!(*e.downcast::<A>().unwrap(), A),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use prolog::allocator::*;\nuse prolog::ast::*;\nuse prolog::targets::*;\n\nuse std::cell::Cell;\nuse std::collections::{BTreeSet, HashMap};\nuse std::rc::Rc;\n\npub struct DebrayAllocator {\n bindings: HashMap<Rc<Var>, VarData>,\n arg_c: usize,\n temp_lb: usize,\n arity: usize, \/\/ 0 if not at head.\n contents: HashMap<usize, Rc<Var>>,\n in_use: BTreeSet<usize>,\n}\n\nimpl DebrayAllocator {\n fn is_curr_arg_distinct_from(&self, var: &Var) -> bool {\n match self.contents.get(&self.arg_c) {\n Some(t_var) if **t_var != *var => true,\n _ => false\n }\n }\n\n fn occurs_shallowly_in_head(&self, var: &Var, r: usize) -> bool\n {\n match self.bindings.get(var).unwrap() {\n &VarData::Temp(_, _, ref tvd) =>\n tvd.use_set.contains(&(GenContext::Head, r)),\n _ => false\n }\n }\n\n fn is_in_use(&self, r: usize) -> bool {\n let in_use_range = r < self.arity && r >= self.arg_c;\n self.in_use.contains(&r) || in_use_range\n }\n\n fn alloc_with_cr(&self, var: &Var) -> usize\n {\n match self.bindings.get(var) {\n Some(&VarData::Temp(_, _, ref tvd)) => {\n for &(_, reg) in tvd.use_set.iter() {\n if !self.is_in_use(reg) {\n return reg;\n }\n }\n\n let mut result = 0;\n\n for reg in self.temp_lb .. {\n if !self.is_in_use(reg) {\n if !tvd.no_use_set.contains(®) {\n result = reg;\n break;\n }\n }\n }\n\n result\n },\n _ => 0\n }\n }\n\n fn alloc_with_ca(&self, var: &Var) -> usize\n {\n match self.bindings.get(var) {\n Some(&VarData::Temp(_, _, ref tvd)) => {\n for &(_, reg) in tvd.use_set.iter() {\n if !self.is_in_use(reg) {\n return reg;\n }\n }\n\n let mut result = 0;\n\n for reg in self.temp_lb .. {\n if !self.is_in_use(reg) {\n if !tvd.no_use_set.contains(®) {\n if !tvd.conflict_set.contains(®) {\n result = reg;\n break;\n }\n }\n }\n }\n\n result\n },\n _ => 0\n }\n }\n\n fn alloc_in_last_goal_hint(&self, chunk_num: usize) -> Option<(Rc<Var>, usize)>\n {\n \/\/ we want to allocate a register to the k^{th} parameter, par_k.\n \/\/ par_k may not be a temporary variable.\n let k = self.arg_c;\n\n match self.contents.get(&k) {\n Some(t_var) => {\n \/\/ suppose this branch fires. then t_var is a\n \/\/ temp. var. belonging to the current chunk.\n \/\/ consider its use set. T == par_k iff\n \/\/ (GenContext::Last(_), k) is in t_var.use_set.\n\n let tvd = self.bindings.get(t_var).unwrap();\n if let &VarData::Temp(_, _, ref tvd) = tvd {\n if !tvd.use_set.contains(&(GenContext::Last(chunk_num), k)) {\n return Some((t_var.clone(), self.alloc_with_ca(t_var)));\n }\n }\n\n None\n },\n _ => None\n }\n }\n\n fn evacuate_arg<'a, Target>(&mut self, chunk_num: usize, target: &mut Vec<Target>)\n where Target: CompilationTarget<'a>\n {\n match self.alloc_in_last_goal_hint(chunk_num) {\n Some((var, r)) => {\n let k = self.arg_c;\n\n if r != k {\n let r = RegType::Temp(r);\n\n target.push(Target::move_to_register(r, k));\n\n self.contents.remove(&k);\n self.contents.insert(r.reg_num(), var.clone());\n\n self.record_register(var, r);\n self.in_use.insert(r.reg_num());\n }\n },\n _ => {}\n };\n }\n\n fn alloc_reg_to_var<'a, Target>(&mut self, var: &Var, lvl: Level, term_loc: GenContext,\n target: &mut Vec<Target>)\n -> usize\n where Target: CompilationTarget<'a>\n {\n match term_loc {\n GenContext::Head =>\n if let Level::Shallow = lvl {\n self.evacuate_arg(0, target);\n self.alloc_with_cr(var)\n } else {\n self.alloc_with_ca(var)\n },\n GenContext::Mid(_) =>\n self.alloc_with_ca(var),\n GenContext::Last(chunk_num) =>\n if let Level::Shallow = lvl {\n self.evacuate_arg(chunk_num, target);\n self.alloc_with_cr(var)\n } else {\n self.alloc_with_ca(var)\n }\n }\n }\n\n fn alloc_reg_to_non_var(&mut self) -> usize\n {\n let mut final_index = 0;\n\n for index in self.temp_lb .. {\n if !self.in_use.contains(&index) {\n final_index = index;\n break;\n }\n }\n\n self.temp_lb = final_index + 1;\n\n final_index\n }\n\n fn in_place(&self, var: &Var, term_loc: GenContext, r: RegType, k: usize) -> bool\n {\n match term_loc {\n GenContext::Head if !r.is_perm() => r.reg_num() == k,\n _ => match self.bindings().get(var).unwrap() {\n &VarData::Temp(_, o, _) if r.reg_num() == k => o == k,\n _ => false\n }\n }\n }\n}\n\nimpl<'a> Allocator<'a> for DebrayAllocator\n{\n fn new() -> DebrayAllocator {\n DebrayAllocator {\n arity: 0,\n arg_c: 1,\n temp_lb: 1,\n bindings: HashMap::new(),\n contents: HashMap::new(),\n in_use: BTreeSet::new()\n }\n }\n\n fn mark_anon_var<Target>(&mut self, lvl: Level, target: &mut Vec<Target>)\n where Target: CompilationTarget<'a>\n {\n let r = RegType::Temp(self.alloc_reg_to_non_var());\n\n match lvl {\n Level::Deep => target.push(Target::subterm_to_variable(r)),\n Level::Root | Level::Shallow => {\n let k = self.arg_c;\n self.arg_c += 1;\n\n target.push(Target::argument_to_variable(r, k));\n }\n };\n }\n\n fn mark_non_var<Target>(&mut self, lvl: Level, term_loc: GenContext,\n cell: &Cell<RegType>, target: &mut Vec<Target>)\n where Target: CompilationTarget<'a>\n {\n let r = cell.get();\n\n if r.reg_num() == 0 {\n let r = match lvl {\n Level::Shallow => {\n let k = self.arg_c;\n\n if let GenContext::Last(chunk_num) = term_loc {\n self.evacuate_arg(chunk_num, target);\n }\n\n self.arg_c += 1;\n RegType::Temp(k)\n },\n _ => RegType::Temp(self.alloc_reg_to_non_var())\n };\n\n cell.set(r);\n }\n }\n\n fn mark_var<Target>(&mut self, var: Rc<Var>, lvl: Level, cell: &Cell<VarReg>,\n term_loc: GenContext, target: &mut Vec<Target>)\n where Target: CompilationTarget<'a>\n {\n let (r, is_new_var) = match self.get(var.clone()) {\n RegType::Temp(0) => {\n \/\/ here, r is temporary *and* unassigned.\n let o = self.alloc_reg_to_var(&var, lvl, term_loc, target);\n\n cell.set(VarReg::Norm(RegType::Temp(o)));\n\n (RegType::Temp(o), true)\n },\n RegType::Perm(0) => {\n let pr = cell.get().norm();\n self.record_register(var.clone(), pr);\n\n (pr, true)\n },\n r => (r, false)\n };\n\n match lvl {\n Level::Root | Level::Shallow => {\n let k = self.arg_c;\n\n if self.is_curr_arg_distinct_from(&var) {\n self.evacuate_arg(term_loc.chunk_num(), target);\n }\n\n self.arg_c += 1;\n\n cell.set(VarReg::ArgAndNorm(r, k));\n\n if !self.in_place(&var, term_loc, r, k) {\n if is_new_var {\n target.push(Target::argument_to_variable(r, k));\n } else {\n target.push(Target::argument_to_value(r, k));\n }\n }\n },\n Level::Deep if is_new_var =>\n if let GenContext::Head = term_loc {\n if self.occurs_shallowly_in_head(&var, r.reg_num()) {\n target.push(Target::subterm_to_value(r));\n } else {\n target.push(Target::subterm_to_variable(r));\n }\n } else {\n target.push(Target::subterm_to_variable(r));\n },\n Level::Deep =>\n target.push(Target::subterm_to_value(r))\n };\n\n if !r.is_perm() {\n let o = r.reg_num();\n\n self.contents.insert(o, var.clone());\n self.record_register(var.clone(), r);\n self.in_use.insert(o);\n }\n }\n\n fn reset(&mut self) {\n self.bindings.clear();\n self.contents.clear();\n self.in_use.clear();\n }\n\n fn reset_contents(&mut self) {\n self.contents.clear();\n self.in_use.clear();\n }\n\n fn advance_arg(&mut self) {\n self.arg_c += 1;\n }\n\n fn bindings(&self) -> &AllocVarDict {\n &self.bindings\n }\n\n fn bindings_mut(&mut self) -> &mut AllocVarDict {\n &mut self.bindings\n }\n\n fn take_bindings(self) -> AllocVarDict {\n self.bindings\n }\n\n fn reset_at_head(&mut self, args: &Vec<Box<Term>>) {\n self.reset_arg(args.len());\n self.arity = args.len();\n\n for (idx, arg) in args.iter().enumerate() {\n if let &Term::Var(_, ref var) = arg.as_ref() {\n let r = self.get(var.clone());\n\n if !r.is_perm() && r.reg_num() == 0 {\n self.in_use.insert(idx + 1);\n self.contents.insert(idx + 1, var.clone());\n self.record_register(var.clone(), temp_v!(idx + 1));\n }\n }\n }\n }\n\n fn reset_arg(&mut self, arity: usize) {\n self.arity = 0;\n self.arg_c = 1;\n self.temp_lb = arity + 1;\n }\n}\n<commit_msg>further debray fix.<commit_after>use prolog::allocator::*;\nuse prolog::ast::*;\nuse prolog::targets::*;\n\nuse std::cell::Cell;\nuse std::collections::{BTreeSet, HashMap};\nuse std::rc::Rc;\n\npub struct DebrayAllocator {\n bindings: HashMap<Rc<Var>, VarData>,\n arg_c: usize,\n temp_lb: usize,\n arity: usize, \/\/ 0 if not at head.\n contents: HashMap<usize, Rc<Var>>,\n in_use: BTreeSet<usize>,\n}\n\nimpl DebrayAllocator {\n fn is_curr_arg_distinct_from(&self, var: &Var) -> bool {\n match self.contents.get(&self.arg_c) {\n Some(t_var) if **t_var != *var => true,\n _ => false\n }\n }\n\n fn occurs_shallowly_in_head(&self, var: &Var, r: usize) -> bool\n {\n match self.bindings.get(var).unwrap() {\n &VarData::Temp(_, _, ref tvd) =>\n tvd.use_set.contains(&(GenContext::Head, r)),\n _ => false\n }\n }\n\n fn is_in_use(&self, r: usize) -> bool {\n let in_use_range = r <= self.arity && r >= self.arg_c;\n self.in_use.contains(&r) || in_use_range\n }\n\n fn alloc_with_cr(&self, var: &Var) -> usize\n {\n match self.bindings.get(var) {\n Some(&VarData::Temp(_, _, ref tvd)) => {\n for &(_, reg) in tvd.use_set.iter() {\n if !self.is_in_use(reg) {\n return reg;\n }\n }\n\n let mut result = 0;\n\n for reg in self.temp_lb .. {\n if !self.is_in_use(reg) {\n if !tvd.no_use_set.contains(®) {\n result = reg;\n break;\n }\n }\n }\n\n result\n },\n _ => 0\n }\n }\n\n fn alloc_with_ca(&self, var: &Var) -> usize\n {\n match self.bindings.get(var) {\n Some(&VarData::Temp(_, _, ref tvd)) => {\n for &(_, reg) in tvd.use_set.iter() {\n if !self.is_in_use(reg) {\n return reg;\n }\n }\n\n let mut result = 0;\n\n for reg in self.temp_lb .. {\n if !self.is_in_use(reg) {\n if !tvd.no_use_set.contains(®) {\n if !tvd.conflict_set.contains(®) {\n result = reg;\n break;\n }\n }\n }\n }\n\n result\n },\n _ => 0\n }\n }\n\n fn alloc_in_last_goal_hint(&self, chunk_num: usize) -> Option<(Rc<Var>, usize)>\n {\n \/\/ we want to allocate a register to the k^{th} parameter, par_k.\n \/\/ par_k may not be a temporary variable.\n let k = self.arg_c;\n\n match self.contents.get(&k) {\n Some(t_var) => {\n \/\/ suppose this branch fires. then t_var is a\n \/\/ temp. var. belonging to the current chunk.\n \/\/ consider its use set. T == par_k iff\n \/\/ (GenContext::Last(_), k) is in t_var.use_set.\n\n let tvd = self.bindings.get(t_var).unwrap();\n if let &VarData::Temp(_, _, ref tvd) = tvd {\n if !tvd.use_set.contains(&(GenContext::Last(chunk_num), k)) {\n return Some((t_var.clone(), self.alloc_with_ca(t_var)));\n }\n }\n\n None\n },\n _ => None\n }\n }\n\n fn evacuate_arg<'a, Target>(&mut self, chunk_num: usize, target: &mut Vec<Target>)\n where Target: CompilationTarget<'a>\n {\n match self.alloc_in_last_goal_hint(chunk_num) {\n Some((var, r)) => {\n let k = self.arg_c;\n\n if r != k {\n let r = RegType::Temp(r);\n\n target.push(Target::move_to_register(r, k));\n\n self.contents.remove(&k);\n self.contents.insert(r.reg_num(), var.clone());\n\n self.record_register(var, r);\n self.in_use.insert(r.reg_num());\n }\n },\n _ => {}\n };\n }\n\n fn alloc_reg_to_var<'a, Target>(&mut self, var: &Var, lvl: Level, term_loc: GenContext,\n target: &mut Vec<Target>)\n -> usize\n where Target: CompilationTarget<'a>\n {\n match term_loc {\n GenContext::Head =>\n if let Level::Shallow = lvl {\n self.evacuate_arg(0, target);\n self.alloc_with_cr(var)\n } else {\n self.alloc_with_ca(var)\n },\n GenContext::Mid(_) =>\n self.alloc_with_ca(var),\n GenContext::Last(chunk_num) =>\n if let Level::Shallow = lvl {\n self.evacuate_arg(chunk_num, target);\n self.alloc_with_cr(var)\n } else {\n self.alloc_with_ca(var)\n }\n }\n }\n\n fn alloc_reg_to_non_var(&mut self) -> usize\n {\n let mut final_index = 0;\n\n for index in self.temp_lb .. {\n if !self.in_use.contains(&index) {\n final_index = index;\n break;\n }\n }\n\n self.temp_lb = final_index + 1;\n\n final_index\n }\n\n fn in_place(&self, var: &Var, term_loc: GenContext, r: RegType, k: usize) -> bool\n {\n match term_loc {\n GenContext::Head if !r.is_perm() => r.reg_num() == k,\n _ => match self.bindings().get(var).unwrap() {\n &VarData::Temp(_, o, _) if r.reg_num() == k => o == k,\n _ => false\n }\n }\n }\n}\n\nimpl<'a> Allocator<'a> for DebrayAllocator\n{\n fn new() -> DebrayAllocator {\n DebrayAllocator {\n arity: 0,\n arg_c: 1,\n temp_lb: 1,\n bindings: HashMap::new(),\n contents: HashMap::new(),\n in_use: BTreeSet::new()\n }\n }\n\n fn mark_anon_var<Target>(&mut self, lvl: Level, target: &mut Vec<Target>)\n where Target: CompilationTarget<'a>\n {\n let r = RegType::Temp(self.alloc_reg_to_non_var());\n\n match lvl {\n Level::Deep => target.push(Target::subterm_to_variable(r)),\n Level::Root | Level::Shallow => {\n let k = self.arg_c;\n self.arg_c += 1;\n\n target.push(Target::argument_to_variable(r, k));\n }\n };\n }\n\n fn mark_non_var<Target>(&mut self, lvl: Level, term_loc: GenContext,\n cell: &Cell<RegType>, target: &mut Vec<Target>)\n where Target: CompilationTarget<'a>\n {\n let r = cell.get();\n\n if r.reg_num() == 0 {\n let r = match lvl {\n Level::Shallow => {\n let k = self.arg_c;\n\n if let GenContext::Last(chunk_num) = term_loc {\n self.evacuate_arg(chunk_num, target);\n }\n\n self.arg_c += 1;\n RegType::Temp(k)\n },\n _ => RegType::Temp(self.alloc_reg_to_non_var())\n };\n\n cell.set(r);\n }\n }\n\n fn mark_var<Target>(&mut self, var: Rc<Var>, lvl: Level, cell: &Cell<VarReg>,\n term_loc: GenContext, target: &mut Vec<Target>)\n where Target: CompilationTarget<'a>\n {\n let (r, is_new_var) = match self.get(var.clone()) {\n RegType::Temp(0) => {\n \/\/ here, r is temporary *and* unassigned.\n let o = self.alloc_reg_to_var(&var, lvl, term_loc, target);\n\n cell.set(VarReg::Norm(RegType::Temp(o)));\n\n (RegType::Temp(o), true)\n },\n RegType::Perm(0) => {\n let pr = cell.get().norm();\n self.record_register(var.clone(), pr);\n\n (pr, true)\n },\n r => (r, false)\n };\n\n match lvl {\n Level::Root | Level::Shallow => {\n let k = self.arg_c;\n\n if self.is_curr_arg_distinct_from(&var) {\n self.evacuate_arg(term_loc.chunk_num(), target);\n }\n\n self.arg_c += 1;\n\n cell.set(VarReg::ArgAndNorm(r, k));\n\n if !self.in_place(&var, term_loc, r, k) {\n if is_new_var {\n target.push(Target::argument_to_variable(r, k));\n } else {\n target.push(Target::argument_to_value(r, k));\n }\n }\n },\n Level::Deep if is_new_var =>\n if let GenContext::Head = term_loc {\n if self.occurs_shallowly_in_head(&var, r.reg_num()) {\n target.push(Target::subterm_to_value(r));\n } else {\n target.push(Target::subterm_to_variable(r));\n }\n } else {\n target.push(Target::subterm_to_variable(r));\n },\n Level::Deep =>\n target.push(Target::subterm_to_value(r))\n };\n\n if !r.is_perm() {\n let o = r.reg_num();\n\n self.contents.insert(o, var.clone());\n self.record_register(var.clone(), r);\n self.in_use.insert(o);\n }\n }\n\n fn reset(&mut self) {\n self.bindings.clear();\n self.contents.clear();\n self.in_use.clear();\n }\n\n fn reset_contents(&mut self) {\n self.contents.clear();\n self.in_use.clear();\n }\n\n fn advance_arg(&mut self) {\n self.arg_c += 1;\n }\n\n fn bindings(&self) -> &AllocVarDict {\n &self.bindings\n }\n\n fn bindings_mut(&mut self) -> &mut AllocVarDict {\n &mut self.bindings\n }\n\n fn take_bindings(self) -> AllocVarDict {\n self.bindings\n }\n\n fn reset_at_head(&mut self, args: &Vec<Box<Term>>) {\n self.reset_arg(args.len());\n self.arity = args.len();\n\n for (idx, arg) in args.iter().enumerate() {\n if let &Term::Var(_, ref var) = arg.as_ref() {\n let r = self.get(var.clone());\n\n if !r.is_perm() && r.reg_num() == 0 {\n self.in_use.insert(idx + 1);\n self.contents.insert(idx + 1, var.clone());\n self.record_register(var.clone(), temp_v!(idx + 1));\n }\n }\n }\n }\n\n fn reset_arg(&mut self, arity: usize) {\n self.arity = 0;\n self.arg_c = 1;\n self.temp_lb = arity + 1;\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>genet-kernel: allow integer literal separators<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>start new<commit_after>enum MathExp {\n Value(i32),\n Add(Box<MathExp>, Box<MathExp>),\n Minus(Box<MathExp>, Box<MathExp>),\n Multiply(Box<MathExp>, Box<MathExp>),\n Divide(Box<MathExp>, Box<MathExp>),\n}\n\nimpl MathExp {\n fn evaluate(&mut self) -> i32 {\n match *self {\n MathExp::Value(a) => return a,\n MathExp::Add(a, b) => Box::into_raw(a).evaluate() + Box::into_raw(b).evaluate(),\n MathExp::Minus(a, b) => Box::into_raw(a).evaluate() - Box::into_raw(b).evaluate(),\n MathExp::Multiply(a, b) => Box::into_raw(a).evaluate() * Box::into_raw(b).evaluate(),\n MathExp::Divide(a, b) => Box::into_raw(a).evaluate() \/ Box::into_raw(b).evaluate(),\n }\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Start Porting Decompressor<commit_after>\nuse std::io;\nuse std::io::{BufWriter, ErrorKind, Write, Seek, SeekFrom, Result, Error};\nuse std::result;\nuse self::byteorder::{ByteOrder, WriteBytesExt, LittleEndian};\n\n\n\/\/ decompressed_len returns the length of the decoded block.\npub fn decompressed_len(src: &[u8]) -> Result<usize> {\n\tsrc.read_u64::<LittleEndian>()\n}\n\n\/\/ Decompress reads the decoded form of src into dst and returns the length\n\/\/ read.\n\/\/ Returns an error if dst was not large enough to hold the entire decoded\n\/\/ block.\nfn decompress(dst: &[u8], src: &[u8]) -> io::Result<usize> {\n\t\/\/ TODO: Handle Error\n\tlet dLen = decompressed_len(src).unwrap();\n\t\/\/ TODO FIX!!\n\t\/\/ For now, always assume 8 bytes used for src header length\n\tlet s: usize = 8;\n\n\tif dst.len() < dLen {\n return Err(Error::new(ErrorKind::InvalidInput, \"snappy: destination buffer is too short\"));\n }\n\n let (d, offset, length): (isize, isize, isize);\n\n while s < src.len() {\n \tmatch src[s] & 0x03 {\n \t\tTAG_LITERAL => {\n \t\t\tlet mut x = src[s] >> 2;\n\n \t\t\tmatch x {\n\t \t\t\t0..59 => s += 1,\n \t\t\t\t60 => {\n \t\t\t\t\ts += 2;\n \t\t\t\t\tif s > src.len() {\n \t\t\t\t\t\treturn Err(ErrorKind::InvalidData)\n \t\t\t\t\t};\n \t\t\t\t\tx = src[s-1] as usize;\n \t\t\t\t},\n \t\t\t\t61 => {\n \t\t\t\t\ts += 3;\n \t\t\t\t\tif s > src.len() {\n \t\t\t\t\t\treturn Err(ErrorKind::InvalidData)\n \t\t\t\t\t};\n \t\t\t\t\tx = (src[s-2] as usize) | ((src[s-1]<<8) as usize);\n \t\t\t\t},\n \t\t\t\t62 => {\n \t\t\t\t\ts += 4;\n \t\t\t\t\tif s > src.len() {\n \t\t\t\t\t\treturn Err(ErrorKind::InvalidData)\n \t\t\t\t\t};\n \t\t\t\t\tx = (src[s-3] as usize) | ((src[s-2]<<8) as usize) | ((src[s-1]<<16) as usize);\n \t\t\t\t},\n \t\t\t\t63 => {\n \t\t\t\t\ts += 5;\n \t\t\t\t\tif s > src.len() {\n \t\t\t\t\t\treturn Err(ErrorKind::InvalidData)\n \t\t\t\t\t};\n \t\t\t\t\tx = (src[s-4] as usize) | ((src[s-3]<<8) as usize) | ((src[s-2]<<16) as usize) | ((src[s-1]<<24) as usize);\n \t\t\t\t},\n \t\t\t\t_\n \t\t\t}\n \t\t\tlength = (x+1) as isize;\n\n \t\t\tif length <= 0 {\n\t\t\t return Err(Error::new(ErrorKind::InvalidInput, \"snappy: unsupported literal length\"));\n \t\t\t}\n \t\t\tif length > dst.len() - d || length > src.len() - s {\n \t\t\t\treturn Err(ErrorKind::InvalidData)\n \t\t\t}\n\n \t\t\t\/\/ Copy src[s: s+length] to dst[d:0]\n \t\t\tfor (id, is) in dst.split_at_mut(d).1.iter_mut().zip(src.split_at(s).1.split_at(s+length).0.iter()) {\n\t\t\t *id = *is;\n\t\t\t }\n\t\t\t d += length;\n\t\t\t s += length;\n\t\t\t continue;\n \t\t}\n \t}\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add check-pass test for `#[unwind(aborts)]` on a `const fn`<commit_after>\/\/ check-pass\n\n#![feature(unwind_attributes, const_panic)]\n\n\/\/ `#[unwind(aborts)]` is okay for a `const fn`. We don't unwind in const-eval anyways.\n#[unwind(aborts)]\nconst fn foo() {\n panic!()\n}\n\nconst fn bar() {\n foo();\n}\n\nfn main() {\n bar();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add basic setter test<commit_after>#[macro_use] extern crate custom_derive;\n#[macro_use] extern crate derive_builder;\n\ncustom_derive!{\n #[derive(Debug, PartialEq, Default, Builder)]\n struct Lorem {\n ipsum: String,\n dolor: String,\n sit: i32,\n amet: bool,\n }\n}\n\nimpl Lorem {\n pub fn new<T: Into<String>>(value: T) -> Self {\n Lorem {\n ipsum: value.into(),\n ..Default::default()\n }\n }\n}\n\n#[test]\nfn contructor_sanity_check() {\n let x = Lorem::new(\"lorem\");\n\n assert_eq!(x, Lorem { ipsum: \"lorem\".into(), dolor: \"\".into(), sit: 0, amet: false, });\n}\n\n#[test]\nfn setters() {\n let x = Lorem::new(\"lorem\")\n .dolor(\"dolor\")\n .sit(42)\n .amet(true);\n\n assert_eq!(x, Lorem { ipsum: \"lorem\".into(), dolor: \"dolor\".into(), sit: 42, amet: true, });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>start new<commit_after>fn main() {\n let test = vec![\n 30, 63, 83, 40, 15, 45, 46, 15, 56, 39, 82, 97, 59, 88, 3, 1, 40, 95, 83, 32, 38, 70, 4,\n 87, 54, 48, 19, 8, 52, 49, 64, 72, 46, 72, 59, 36, 21, 68, 81, 34, 23, 6, 70, 80, 80, 12,\n 32, 84, 17, 19, 28, 58, 68, 19, 65, 46, 43, 22, 12, 95, 89, 15, 39, 88, 64, 95, 99, 25, 2,\n 7, 86, 36, 73, 90, 30, 31, 0, 62, 73, 35, 4, 26, 0, 93, 91, 77, 34, 92, 31, 56, 34, 61, 23,\n 47, 78, 5, 5, 26, 36, 71, 50, 5, 59, 22, 21, 0, 72, 72, 72, 69, 5, 11, 95, 5, 0, 14, 34,\n 91, 4, 27, 46, 21, 94, 96, 48, 58, 79, 21, 65, 35, 17, 16, 57, 91, 36, 50, 16, 82, 92, 1,\n 29, 52, 74, 90, 48, 79, 81, 53, 46, 82, 36, 43, 64, 24, 55, 48, 27, 21, 69, 93, 49, 70, 58,\n 8, 50, 97, 30, 68, 1, 34, 15, 38, 52, 27, 50, 10, 22, 67, 25, 37, 84, 91, 13, 15, 0, 5, 31,\n 18, 5, 31, 49, 93, 95, 3, 86, 11, 37, 68, 43, 74,\n ];\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Generic WebSocket protocol implementation\n\nmod frame;\nmod message;\n\npub use self::message::Message;\n\nuse std::collections::VecDeque;\nuse std::io::{Read, Write};\nuse std::mem::replace;\n\nuse error::{Error, Result};\nuse self::message::{IncompleteMessage, IncompleteMessageType};\nuse self::frame::{Frame, FrameSocket};\nuse self::frame::coding::{OpCode, Data as OpData, Control as OpCtl, CloseCode};\nuse util::NonBlockingResult;\n\n\/\/\/ Indicates a Client or Server role of the websocket\n#[derive(Debug, Clone, Copy)]\npub enum Role {\n \/\/\/ This socket is a server\n Server,\n \/\/\/ This socket is a client\n Client,\n}\n\n\/\/\/ WebSocket input-output stream\npub struct WebSocket<Stream> {\n \/\/\/ Server or client?\n role: Role,\n \/\/\/ The underlying socket.\n socket: FrameSocket<Stream>,\n \/\/\/ The state of processing, either \"active\" or \"closing\".\n state: WebSocketState,\n \/\/\/ Receive: an incomplete message being processed.\n incomplete: Option<IncompleteMessage>,\n \/\/\/ Send: a data send queue.\n send_queue: VecDeque<Frame>,\n \/\/\/ Send: an OOB pong message.\n pong: Option<Frame>,\n}\n\nimpl<Stream> WebSocket<Stream> {\n \/\/\/ Convert a raw socket into a WebSocket without performing a handshake.\n pub fn from_raw_socket(stream: Stream, role: Role) -> Self {\n WebSocket::from_frame_socket(FrameSocket::new(stream), role)\n }\n\n \/\/\/ Convert a raw socket into a WebSocket without performing a handshake.\n pub fn from_partially_read(stream: Stream, part: Vec<u8>, role: Role) -> Self {\n WebSocket::from_frame_socket(FrameSocket::from_partially_read(stream, part), role)\n }\n\n \/\/\/ Returns a shared reference to the inner stream.\n pub fn get_ref(&self) -> &Stream {\n self.socket.get_ref()\n }\n \/\/\/ Returns a mutable reference to the inner stream.\n pub fn get_mut(&mut self) -> &mut Stream {\n self.socket.get_mut()\n }\n\n \/\/\/ Convert a frame socket into a WebSocket.\n fn from_frame_socket(socket: FrameSocket<Stream>, role: Role) -> Self {\n WebSocket {\n role: role,\n socket: socket,\n state: WebSocketState::Active,\n incomplete: None,\n send_queue: VecDeque::new(),\n pong: None,\n }\n }\n}\n\nimpl<Stream: Read + Write> WebSocket<Stream> {\n \/\/\/ Read a message from stream, if possible.\n \/\/\/\n \/\/\/ This function sends pong and close responses automatically.\n \/\/\/ However, it never blocks on write.\n pub fn read_message(&mut self) -> Result<Message> {\n loop {\n \/\/ Since we may get ping or close, we need to reply to the messages even during read.\n \/\/ Thus we call write_pending() but ignore its blocking.\n self.write_pending().no_block()?;\n \/\/ If we get here, either write blocks or we have nothing to write.\n \/\/ Thus if read blocks, just let it return WouldBlock.\n if let Some(message) = self.read_message_frame()? {\n trace!(\"Received message {}\", message);\n return Ok(message)\n }\n }\n }\n\n \/\/\/ Send a message to stream, if possible.\n \/\/\/\n \/\/\/ This function guarantees that the frame is queued regardless of any errors.\n \/\/\/ There is no need to resend the frame. In order to handle WouldBlock or Incomplete,\n \/\/\/ call write_pending() afterwards.\n pub fn write_message(&mut self, message: Message) -> Result<()> {\n let frame = {\n let opcode = match message {\n Message::Text(_) => OpData::Text,\n Message::Binary(_) => OpData::Binary,\n };\n Frame::message(message.into_data(), OpCode::Data(opcode), true)\n };\n self.send_queue.push_back(frame);\n self.write_pending()\n }\n\n \/\/\/ Close the connection.\n \/\/\/\n \/\/\/ This function guarantees that the close frame will be queued.\n \/\/\/ There is no need to call it again, just like write_message().\n pub fn close(&mut self) -> Result<()> {\n match self.state {\n WebSocketState::Active => {\n self.state = WebSocketState::ClosedByUs;\n let frame = Frame::close(None);\n self.send_queue.push_back(frame);\n }\n _ => {\n \/\/ already closed, nothing to do\n }\n }\n self.write_pending()\n }\n\n \/\/\/ Flush the pending send queue.\n pub fn write_pending(&mut self) -> Result<()> {\n \/\/ First, make sure we have no pending frame sending.\n self.socket.write_pending()?;\n\n \/\/ Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in\n \/\/ response, unless it already received a Close frame. It SHOULD\n \/\/ respond with Pong frame as soon as is practical. (RFC 6455)\n if let Some(pong) = replace(&mut self.pong, None) {\n self.send_one_frame(pong)?;\n }\n \/\/ If we have any unsent frames, send them.\n while let Some(data) = self.send_queue.pop_front() {\n self.send_one_frame(data)?;\n }\n\n \/\/ If we're closing and there is nothing to send anymore, we should close the connection.\n match self.state {\n WebSocketState::ClosedByPeer if self.send_queue.is_empty() => {\n \/\/ The underlying TCP connection, in most normal cases, SHOULD be closed\n \/\/ first by the server, so that it holds the TIME_WAIT state and not the\n \/\/ client (as this would prevent it from re-opening the connection for 2\n \/\/ maximum segment lifetimes (2MSL), while there is no corresponding\n \/\/ server impact as a TIME_WAIT connection is immediately reopened upon\n \/\/ a new SYN with a higher seq number). (RFC 6455)\n match self.role {\n Role::Client => Ok(()),\n Role::Server => Err(Error::ConnectionClosed),\n }\n }\n _ => Ok(()),\n }\n }\n\n \/\/\/ Try to decode one message frame. May return None.\n fn read_message_frame(&mut self) -> Result<Option<Message>> {\n if let Some(mut frame) = self.socket.read_frame()? {\n\n \/\/ MUST be 0 unless an extension is negotiated that defines meanings\n \/\/ for non-zero values. If a nonzero value is received and none of\n \/\/ the negotiated extensions defines the meaning of such a nonzero\n \/\/ value, the receiving endpoint MUST _Fail the WebSocket\n \/\/ Connection_.\n if frame.has_rsv1() || frame.has_rsv2() || frame.has_rsv3() {\n return Err(Error::Protocol(\"Reserved bits are non-zero\".into()))\n }\n\n match self.role {\n Role::Server => {\n if frame.is_masked() {\n \/\/ A server MUST remove masking for data frames received from a client\n \/\/ as described in Section 5.3. (RFC 6455)\n frame.remove_mask()\n } else {\n \/\/ The server MUST close the connection upon receiving a\n \/\/ frame that is not masked. (RFC 6455)\n return Err(Error::Protocol(\"Received an unmasked frame from client\".into()))\n }\n }\n Role::Client => {\n if frame.is_masked() {\n \/\/ A client MUST close a connection if it detects a masked frame. (RFC 6455)\n return Err(Error::Protocol(\"Received a masked frame from server\".into()))\n }\n }\n }\n\n match frame.opcode() {\n\n OpCode::Control(ctl) => {\n (match ctl {\n \/\/ All control frames MUST have a payload length of 125 bytes or less\n \/\/ and MUST NOT be fragmented. (RFC 6455)\n _ if !frame.is_final() => {\n Err(Error::Protocol(\"Fragmented control frame\".into()))\n }\n _ if frame.payload().len() > 125 => {\n Err(Error::Protocol(\"Control frame too big\".into()))\n }\n OpCtl::Close => {\n self.do_close(frame.into_close()?)\n }\n OpCtl::Reserved(i) => {\n Err(Error::Protocol(format!(\"Unknown control frame type {}\", i).into()))\n }\n OpCtl::Ping | OpCtl::Pong if !self.state.is_active() => {\n \/\/ No ping processing while closing.\n Ok(())\n }\n OpCtl::Ping => {\n self.do_ping(frame.into_data())\n }\n OpCtl::Pong => {\n self.do_pong(frame.into_data())\n }\n }).map(|_| None)\n }\n\n OpCode::Data(_) if !self.state.is_active() => {\n \/\/ No data processing while closing.\n Ok(None)\n }\n\n OpCode::Data(data) => {\n let fin = frame.is_final();\n match data {\n OpData::Continue => {\n if let Some(ref mut msg) = self.incomplete {\n \/\/ TODO if msg too big\n msg.extend(frame.into_data())?;\n } else {\n return Err(Error::Protocol(\"Continue frame but nothing to continue\".into()))\n }\n if fin {\n Ok(Some(replace(&mut self.incomplete, None).unwrap().complete()?))\n } else {\n Ok(None)\n }\n }\n c if self.incomplete.is_some() => {\n Err(Error::Protocol(\n format!(\"Received {} while waiting for more fragments\", c).into()\n ))\n }\n OpData::Text | OpData::Binary => {\n let msg = {\n let message_type = match data {\n OpData::Text => IncompleteMessageType::Text,\n OpData::Binary => IncompleteMessageType::Binary,\n _ => panic!(\"Bug: message is not text nor binary\"),\n };\n let mut m = IncompleteMessage::new(message_type);\n m.extend(frame.into_data())?;\n m\n };\n if fin {\n Ok(Some(msg.complete()?))\n } else {\n self.incomplete = Some(msg);\n Ok(None)\n }\n }\n OpData::Reserved(i) => {\n Err(Error::Protocol(format!(\"Unknown data frame type {}\", i).into()))\n }\n }\n }\n\n } \/\/ match opcode\n\n } else {\n Err(Error::Protocol(\"Connection reset without closing handshake\".into()))\n }\n }\n\n \/\/\/ Received a close frame.\n fn do_close(&mut self, close: Option<(CloseCode, String)>) -> Result<()> {\n match self.state {\n WebSocketState::Active => {\n self.state = WebSocketState::ClosedByPeer;\n let reply = if let Some((code, _)) = close {\n if code.is_allowed() {\n Frame::close(Some((CloseCode::Normal, \"\")))\n } else {\n Frame::close(Some((CloseCode::Protocol, \"Protocol violation\")))\n }\n } else {\n Frame::close(None)\n };\n self.send_queue.push_back(reply);\n Ok(())\n }\n WebSocketState::ClosedByPeer => {\n \/\/ It is already closed, just ignore.\n Ok(())\n }\n WebSocketState::ClosedByUs => {\n \/\/ We received a reply.\n match self.role {\n Role::Client => {\n \/\/ Client waits for the server to close the connection.\n Ok(())\n }\n Role::Server => {\n \/\/ Server closes the connection.\n Err(Error::ConnectionClosed)\n }\n }\n }\n }\n }\n\n \/\/\/ Received a ping frame.\n fn do_ping(&mut self, ping: Vec<u8>) -> Result<()> {\n \/\/ If an endpoint receives a Ping frame and has not yet sent Pong\n \/\/ frame(s) in response to previous Ping frame(s), the endpoint MAY\n \/\/ elect to send a Pong frame for only the most recently processed Ping\n \/\/ frame. (RFC 6455)\n \/\/ We do exactly that, keeping a \"queue\" from one and only Pong frame.\n self.pong = Some(Frame::pong(ping));\n Ok(())\n }\n\n \/\/\/ Received a pong frame.\n fn do_pong(&mut self, _: Vec<u8>) -> Result<()> {\n \/\/ A Pong frame MAY be sent unsolicited. This serves as a\n \/\/ unidirectional heartbeat. A response to an unsolicited Pong frame is\n \/\/ not expected. (RFC 6455)\n \/\/ Due to this, we just don't check pongs right now.\n \/\/ TODO: check if there was a reply to our ping at all...\n Ok(())\n }\n\n \/\/\/ Send a single pending frame.\n fn send_one_frame(&mut self, mut frame: Frame) -> Result<()> {\n match self.role {\n Role::Server => {\n }\n Role::Client => {\n \/\/ 5. If the data is being sent by the client, the frame(s) MUST be\n \/\/ masked as defined in Section 5.3. (RFC 6455)\n frame.set_mask();\n }\n }\n self.socket.write_frame(frame)\n }\n\n}\n\n\/\/\/ The current connection state.\nenum WebSocketState {\n Active,\n ClosedByUs,\n ClosedByPeer,\n}\n\nimpl WebSocketState {\n \/\/\/ Tell if we're allowed to process normal messages.\n fn is_active(&self) -> bool {\n match *self {\n WebSocketState::Active => true,\n _ => false,\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{WebSocket, Role, Message};\n\n use std::io;\n use std::io::Cursor;\n\n struct WriteMoc<Stream>(Stream);\n\n impl<Stream> io::Write for WriteMoc<Stream> {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n Ok(buf.len())\n }\n fn flush(&mut self) -> io::Result<()> {\n Ok(())\n }\n }\n\n impl<Stream: io::Read> io::Read for WriteMoc<Stream> {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.0.read(buf)\n }\n }\n\n\n #[test]\n fn receive_messages() {\n let incoming = Cursor::new(vec![\n 0x01, 0x07,\n 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20,\n 0x80, 0x06,\n 0x57, 0x6f, 0x72, 0x6c, 0x64, 0x21,\n 0x82, 0x03,\n 0x01, 0x02, 0x03,\n ]);\n let mut socket = WebSocket::from_raw_socket(WriteMoc(incoming), Role::Client);\n assert_eq!(socket.read_message().unwrap(), Message::Text(\"Hello, World!\".into()));\n assert_eq!(socket.read_message().unwrap(), Message::Binary(vec![0x01, 0x02, 0x03]));\n }\n\n}\n<commit_msg>Add possibility to send pings<commit_after>\/\/! Generic WebSocket protocol implementation\n\nmod frame;\nmod message;\n\npub use self::message::Message;\n\nuse std::collections::VecDeque;\nuse std::io::{Read, Write};\nuse std::mem::replace;\n\nuse error::{Error, Result};\nuse self::message::{IncompleteMessage, IncompleteMessageType};\nuse self::frame::{Frame, FrameSocket};\nuse self::frame::coding::{OpCode, Data as OpData, Control as OpCtl, CloseCode};\nuse util::NonBlockingResult;\n\n\/\/\/ Indicates a Client or Server role of the websocket\n#[derive(Debug, Clone, Copy)]\npub enum Role {\n \/\/\/ This socket is a server\n Server,\n \/\/\/ This socket is a client\n Client,\n}\n\n\/\/\/ WebSocket input-output stream\npub struct WebSocket<Stream> {\n \/\/\/ Server or client?\n role: Role,\n \/\/\/ The underlying socket.\n socket: FrameSocket<Stream>,\n \/\/\/ The state of processing, either \"active\" or \"closing\".\n state: WebSocketState,\n \/\/\/ Receive: an incomplete message being processed.\n incomplete: Option<IncompleteMessage>,\n \/\/\/ Send: a data send queue.\n send_queue: VecDeque<Frame>,\n \/\/\/ Send: an OOB pong message.\n pong: Option<Frame>,\n}\n\nimpl<Stream> WebSocket<Stream> {\n \/\/\/ Convert a raw socket into a WebSocket without performing a handshake.\n pub fn from_raw_socket(stream: Stream, role: Role) -> Self {\n WebSocket::from_frame_socket(FrameSocket::new(stream), role)\n }\n\n \/\/\/ Convert a raw socket into a WebSocket without performing a handshake.\n pub fn from_partially_read(stream: Stream, part: Vec<u8>, role: Role) -> Self {\n WebSocket::from_frame_socket(FrameSocket::from_partially_read(stream, part), role)\n }\n\n \/\/\/ Returns a shared reference to the inner stream.\n pub fn get_ref(&self) -> &Stream {\n self.socket.get_ref()\n }\n \/\/\/ Returns a mutable reference to the inner stream.\n pub fn get_mut(&mut self) -> &mut Stream {\n self.socket.get_mut()\n }\n\n \/\/\/ Convert a frame socket into a WebSocket.\n fn from_frame_socket(socket: FrameSocket<Stream>, role: Role) -> Self {\n WebSocket {\n role: role,\n socket: socket,\n state: WebSocketState::Active,\n incomplete: None,\n send_queue: VecDeque::new(),\n pong: None,\n }\n }\n}\n\nimpl<Stream: Read + Write> WebSocket<Stream> {\n \/\/\/ Read a message from stream, if possible.\n \/\/\/\n \/\/\/ This function sends pong and close responses automatically.\n \/\/\/ However, it never blocks on write.\n pub fn read_message(&mut self) -> Result<Message> {\n loop {\n \/\/ Since we may get ping or close, we need to reply to the messages even during read.\n \/\/ Thus we call write_pending() but ignore its blocking.\n self.write_pending().no_block()?;\n \/\/ If we get here, either write blocks or we have nothing to write.\n \/\/ Thus if read blocks, just let it return WouldBlock.\n if let Some(message) = self.read_message_frame()? {\n trace!(\"Received message {}\", message);\n return Ok(message)\n }\n }\n }\n\n \/\/\/ Send a message to stream, if possible.\n \/\/\/\n \/\/\/ This function guarantees that the frame is queued regardless of any errors.\n \/\/\/ There is no need to resend the frame. In order to handle WouldBlock or Incomplete,\n \/\/\/ call write_pending() afterwards.\n pub fn write_message(&mut self, message: Message) -> Result<()> {\n let frame = {\n let opcode = match message {\n Message::Text(_) => OpData::Text,\n Message::Binary(_) => OpData::Binary,\n };\n Frame::message(message.into_data(), OpCode::Data(opcode), true)\n };\n self.send_queue.push_back(frame);\n self.write_pending()\n }\n\n \/\/\/ Send ping.\n pub fn send_ping(&mut self, payload: Vec<u8>) -> Result<()> {\n self.send_queue.push_back(Frame::ping(payload));\n self.write_pending()\n }\n\n \/\/\/ Close the connection.\n \/\/\/\n \/\/\/ This function guarantees that the close frame will be queued.\n \/\/\/ There is no need to call it again, just like write_message().\n pub fn close(&mut self) -> Result<()> {\n match self.state {\n WebSocketState::Active => {\n self.state = WebSocketState::ClosedByUs;\n let frame = Frame::close(None);\n self.send_queue.push_back(frame);\n }\n _ => {\n \/\/ already closed, nothing to do\n }\n }\n self.write_pending()\n }\n\n \/\/\/ Flush the pending send queue.\n pub fn write_pending(&mut self) -> Result<()> {\n \/\/ First, make sure we have no pending frame sending.\n self.socket.write_pending()?;\n\n \/\/ Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in\n \/\/ response, unless it already received a Close frame. It SHOULD\n \/\/ respond with Pong frame as soon as is practical. (RFC 6455)\n if let Some(pong) = replace(&mut self.pong, None) {\n self.send_one_frame(pong)?;\n }\n \/\/ If we have any unsent frames, send them.\n while let Some(data) = self.send_queue.pop_front() {\n self.send_one_frame(data)?;\n }\n\n \/\/ If we're closing and there is nothing to send anymore, we should close the connection.\n match self.state {\n WebSocketState::ClosedByPeer if self.send_queue.is_empty() => {\n \/\/ The underlying TCP connection, in most normal cases, SHOULD be closed\n \/\/ first by the server, so that it holds the TIME_WAIT state and not the\n \/\/ client (as this would prevent it from re-opening the connection for 2\n \/\/ maximum segment lifetimes (2MSL), while there is no corresponding\n \/\/ server impact as a TIME_WAIT connection is immediately reopened upon\n \/\/ a new SYN with a higher seq number). (RFC 6455)\n match self.role {\n Role::Client => Ok(()),\n Role::Server => Err(Error::ConnectionClosed),\n }\n }\n _ => Ok(()),\n }\n }\n\n \/\/\/ Try to decode one message frame. May return None.\n fn read_message_frame(&mut self) -> Result<Option<Message>> {\n if let Some(mut frame) = self.socket.read_frame()? {\n\n \/\/ MUST be 0 unless an extension is negotiated that defines meanings\n \/\/ for non-zero values. If a nonzero value is received and none of\n \/\/ the negotiated extensions defines the meaning of such a nonzero\n \/\/ value, the receiving endpoint MUST _Fail the WebSocket\n \/\/ Connection_.\n if frame.has_rsv1() || frame.has_rsv2() || frame.has_rsv3() {\n return Err(Error::Protocol(\"Reserved bits are non-zero\".into()))\n }\n\n match self.role {\n Role::Server => {\n if frame.is_masked() {\n \/\/ A server MUST remove masking for data frames received from a client\n \/\/ as described in Section 5.3. (RFC 6455)\n frame.remove_mask()\n } else {\n \/\/ The server MUST close the connection upon receiving a\n \/\/ frame that is not masked. (RFC 6455)\n return Err(Error::Protocol(\"Received an unmasked frame from client\".into()))\n }\n }\n Role::Client => {\n if frame.is_masked() {\n \/\/ A client MUST close a connection if it detects a masked frame. (RFC 6455)\n return Err(Error::Protocol(\"Received a masked frame from server\".into()))\n }\n }\n }\n\n match frame.opcode() {\n\n OpCode::Control(ctl) => {\n (match ctl {\n \/\/ All control frames MUST have a payload length of 125 bytes or less\n \/\/ and MUST NOT be fragmented. (RFC 6455)\n _ if !frame.is_final() => {\n Err(Error::Protocol(\"Fragmented control frame\".into()))\n }\n _ if frame.payload().len() > 125 => {\n Err(Error::Protocol(\"Control frame too big\".into()))\n }\n OpCtl::Close => {\n self.do_close(frame.into_close()?)\n }\n OpCtl::Reserved(i) => {\n Err(Error::Protocol(format!(\"Unknown control frame type {}\", i).into()))\n }\n OpCtl::Ping | OpCtl::Pong if !self.state.is_active() => {\n \/\/ No ping processing while closing.\n Ok(())\n }\n OpCtl::Ping => {\n self.do_ping(frame.into_data())\n }\n OpCtl::Pong => {\n self.do_pong(frame.into_data())\n }\n }).map(|_| None)\n }\n\n OpCode::Data(_) if !self.state.is_active() => {\n \/\/ No data processing while closing.\n Ok(None)\n }\n\n OpCode::Data(data) => {\n let fin = frame.is_final();\n match data {\n OpData::Continue => {\n if let Some(ref mut msg) = self.incomplete {\n \/\/ TODO if msg too big\n msg.extend(frame.into_data())?;\n } else {\n return Err(Error::Protocol(\"Continue frame but nothing to continue\".into()))\n }\n if fin {\n Ok(Some(replace(&mut self.incomplete, None).unwrap().complete()?))\n } else {\n Ok(None)\n }\n }\n c if self.incomplete.is_some() => {\n Err(Error::Protocol(\n format!(\"Received {} while waiting for more fragments\", c).into()\n ))\n }\n OpData::Text | OpData::Binary => {\n let msg = {\n let message_type = match data {\n OpData::Text => IncompleteMessageType::Text,\n OpData::Binary => IncompleteMessageType::Binary,\n _ => panic!(\"Bug: message is not text nor binary\"),\n };\n let mut m = IncompleteMessage::new(message_type);\n m.extend(frame.into_data())?;\n m\n };\n if fin {\n Ok(Some(msg.complete()?))\n } else {\n self.incomplete = Some(msg);\n Ok(None)\n }\n }\n OpData::Reserved(i) => {\n Err(Error::Protocol(format!(\"Unknown data frame type {}\", i).into()))\n }\n }\n }\n\n } \/\/ match opcode\n\n } else {\n Err(Error::Protocol(\"Connection reset without closing handshake\".into()))\n }\n }\n\n \/\/\/ Received a close frame.\n fn do_close(&mut self, close: Option<(CloseCode, String)>) -> Result<()> {\n match self.state {\n WebSocketState::Active => {\n self.state = WebSocketState::ClosedByPeer;\n let reply = if let Some((code, _)) = close {\n if code.is_allowed() {\n Frame::close(Some((CloseCode::Normal, \"\")))\n } else {\n Frame::close(Some((CloseCode::Protocol, \"Protocol violation\")))\n }\n } else {\n Frame::close(None)\n };\n self.send_queue.push_back(reply);\n Ok(())\n }\n WebSocketState::ClosedByPeer => {\n \/\/ It is already closed, just ignore.\n Ok(())\n }\n WebSocketState::ClosedByUs => {\n \/\/ We received a reply.\n match self.role {\n Role::Client => {\n \/\/ Client waits for the server to close the connection.\n Ok(())\n }\n Role::Server => {\n \/\/ Server closes the connection.\n Err(Error::ConnectionClosed)\n }\n }\n }\n }\n }\n\n \/\/\/ Received a ping frame.\n fn do_ping(&mut self, ping: Vec<u8>) -> Result<()> {\n \/\/ If an endpoint receives a Ping frame and has not yet sent Pong\n \/\/ frame(s) in response to previous Ping frame(s), the endpoint MAY\n \/\/ elect to send a Pong frame for only the most recently processed Ping\n \/\/ frame. (RFC 6455)\n \/\/ We do exactly that, keeping a \"queue\" from one and only Pong frame.\n self.pong = Some(Frame::pong(ping));\n Ok(())\n }\n\n \/\/\/ Received a pong frame.\n fn do_pong(&mut self, _: Vec<u8>) -> Result<()> {\n \/\/ A Pong frame MAY be sent unsolicited. This serves as a\n \/\/ unidirectional heartbeat. A response to an unsolicited Pong frame is\n \/\/ not expected. (RFC 6455)\n \/\/ Due to this, we just don't check pongs right now.\n \/\/ TODO: check if there was a reply to our ping at all...\n Ok(())\n }\n\n \/\/\/ Send a single pending frame.\n fn send_one_frame(&mut self, mut frame: Frame) -> Result<()> {\n match self.role {\n Role::Server => {\n }\n Role::Client => {\n \/\/ 5. If the data is being sent by the client, the frame(s) MUST be\n \/\/ masked as defined in Section 5.3. (RFC 6455)\n frame.set_mask();\n }\n }\n self.socket.write_frame(frame)\n }\n\n}\n\n\/\/\/ The current connection state.\nenum WebSocketState {\n Active,\n ClosedByUs,\n ClosedByPeer,\n}\n\nimpl WebSocketState {\n \/\/\/ Tell if we're allowed to process normal messages.\n fn is_active(&self) -> bool {\n match *self {\n WebSocketState::Active => true,\n _ => false,\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{WebSocket, Role, Message};\n\n use std::io;\n use std::io::Cursor;\n\n struct WriteMoc<Stream>(Stream);\n\n impl<Stream> io::Write for WriteMoc<Stream> {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n Ok(buf.len())\n }\n fn flush(&mut self) -> io::Result<()> {\n Ok(())\n }\n }\n\n impl<Stream: io::Read> io::Read for WriteMoc<Stream> {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.0.read(buf)\n }\n }\n\n\n #[test]\n fn receive_messages() {\n let incoming = Cursor::new(vec![\n 0x01, 0x07,\n 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x2c, 0x20,\n 0x80, 0x06,\n 0x57, 0x6f, 0x72, 0x6c, 0x64, 0x21,\n 0x82, 0x03,\n 0x01, 0x02, 0x03,\n ]);\n let mut socket = WebSocket::from_raw_socket(WriteMoc(incoming), Role::Client);\n assert_eq!(socket.read_message().unwrap(), Message::Text(\"Hello, World!\".into()));\n assert_eq!(socket.read_message().unwrap(), Message::Binary(vec![0x01, 0x02, 0x03]));\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Have find use iter_elements.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added src\/lib.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>lib.rs for extra file and path handling features<commit_after>use std::path::Path;\nuse std::process::Command;\n\npub fn mkdir(path: &String) {\n if Path::new(path).exists(){\n println!(\"{:?} already exists\", path);\n } else {\n Command::new(\"mkdir\")\n .arg(path)\n .spawn()\n .expect(\"failed to mkdir\");\n }\n}\n\npub fn zopen(path: &String, mode: String) {\n \n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement flush_to_disk for macos<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>updated to check\/remove dirty file<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added CXXFLAGS=-std=c++11 to make<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>start integrating websocket for rewarder<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Allow more fine-granular control over IO behaviour in runtime<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>More test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove residual extern crates.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement basic ecr for ldsr\/stsr<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add code ror semi-structured-logs<commit_after>\/\/\/ various log levels\n#[derive(Clone, PartialEq, Debug)]\npub enum LogLevel {\n Debug,\n Info,\n Warning,\n Error,\n}\n\/\/\/ primary function for emitting logs\npub fn log(level: LogLevel, message: &str) -> String {\n match level {\n LogLevel::Debug => debug(message),\n LogLevel::Info => info(message),\n LogLevel::Warning => warn(message),\n LogLevel::Error => error(message),\n }\n}\n\npub fn debug(message: &str) -> String {\n format!(\"[DEBUG]: {}\", message)\n}\n\npub fn info(message: &str) -> String {\n format!(\"[INFO]: {}\", message)\n}\npub fn warn(message: &str) -> String {\n format!(\"[WARNING]: {}\", message)\n}\npub fn error(message: &str) -> String {\n format!(\"[ERROR]: {}\", message)\n}\n<|endoftext|>"} {"text":"<commit_before>use super::errors::*;\n\nstatic C_PRE: &'static str = \"const char data[] = {\";\nstatic C_SEPERATOR: &'static str = \",\";\nstatic C_POST: &'static str = \"};\";\n\nstatic CPP_PRE: &'static str = \"const char data[] = {\";\nstatic CPP_SEPERATOR: &'static str = \",\";\nstatic CPP_POST: &'static str = \"};\";\n\nstatic PYTHON_PRE: &'static str = \"data = [\";\nstatic PYTHON_SEPERATOR: &'static str = \",\";\nstatic PYTHON_POST: &'static str = \"]\";\n\nstatic RUST_PRE: &'static str = \"let data = [\";\nstatic RUST_SEPERATOR: &'static str = \",\";\nstatic RUST_POST: &'static str = \"];\";\n\ntrait Render {\n fn render(&self, data: &[u8]) -> String;\n}\n\npub enum Language {\n C,\n Cpp,\n Rust,\n Python,\n}\n\nstruct Template {\n prefix: String,\n separator: String,\n suffix: String,\n}\n\nimpl Template {\n fn new(lang: Language) -> Template {\n match lang {\n Language::C => {\n Template {\n prefix: C_PRE.to_string(),\n separator: C_SEPERATOR.to_string(),\n suffix: C_POST.to_string(),\n }\n }\n Language::Cpp => {\n Template {\n prefix: CPP_PRE.to_string(),\n separator: CPP_SEPERATOR.to_string(),\n suffix: CPP_POST.to_string(),\n }\n }\n Language::Python => {\n Template {\n prefix: PYTHON_PRE.to_string(),\n separator: PYTHON_SEPERATOR.to_string(),\n suffix: PYTHON_POST.to_string(),\n }\n }\n Language::Rust => {\n Template {\n prefix: RUST_PRE.to_string(),\n separator: RUST_SEPERATOR.to_string(),\n suffix: RUST_POST.to_string(),\n }\n }\n }\n }\n}\n\nimpl Render for Template {\n fn render(&self, data: &[u8]) -> String {\n let mut output = String::new();\n output = output + &self.prefix;\n for element in data.iter().enumerate() {\n let (index, byte) = element;\n if data.len() - 1 == index {\n output = output + &format!(\"{}\", byte);\n } else {\n output = output + &format!(\"{}{} \", byte, self.separator);\n }\n }\n output = output + &self.suffix;\n output\n }\n}\n\nmod test {\n use super::*;\n\n #[test]\n fn render_basic_c_template() {\n let data = [0, 1, 2, 3, 4, 5];\n let template = Template::new(Language::C);\n let expected_result = \"const char data[] = {0, 1, 2, 3, 4, 5};\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_basic_cplusplus_template() {\n let data = [0, 1, 2, 3, 4, 5];\n let template = Template::new(Language::Cpp);\n let expected_result = \"const char data[] = {0, 1, 2, 3, 4, 5};\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_basic_python_template() {\n let data = [0, 1, 2, 3, 4, 5];\n let template = Template::new(Language::Python);\n let expected_result = \"data = [0, 1, 2, 3, 4, 5]\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_basic_rust_template() {\n let data = [0, 1, 2, 3, 4, 5];\n let template = Template::new(Language::Rust);\n let expected_result = \"let data = [0, 1, 2, 3, 4, 5];\";\n assert_eq!(template.render(&data), expected_result);\n }\n}\n<commit_msg>Add bytes_per_line setting<commit_after>use super::errors::*;\n\nstatic C_PRE: &'static str = \"const char data[] = {\";\nstatic C_SEPERATOR: &'static str = \",\";\nstatic C_POST: &'static str = \"};\";\n\nstatic CPP_PRE: &'static str = \"const char data[] = {\";\nstatic CPP_SEPERATOR: &'static str = \",\";\nstatic CPP_POST: &'static str = \"};\";\n\nstatic PYTHON_PRE: &'static str = \"data = [\";\nstatic PYTHON_SEPERATOR: &'static str = \",\";\nstatic PYTHON_POST: &'static str = \"]\";\n\nstatic RUST_PRE: &'static str = \"let data = [\";\nstatic RUST_SEPERATOR: &'static str = \",\";\nstatic RUST_POST: &'static str = \"];\";\n\ntrait Render {\n fn render(&self, data: &[u8]) -> String;\n}\n\npub enum Language {\n C,\n Cpp,\n Rust,\n Python,\n}\n\nstruct Template {\n prefix: String,\n separator: String,\n suffix: String,\n bytes_per_line: usize,\n}\n\nimpl Template {\n fn new(lang: Language) -> Template {\n match lang {\n Language::C => {\n Template {\n prefix: C_PRE.to_string(),\n separator: C_SEPERATOR.to_string(),\n suffix: C_POST.to_string(),\n bytes_per_line: 16,\n }\n }\n Language::Cpp => {\n Template {\n prefix: CPP_PRE.to_string(),\n separator: CPP_SEPERATOR.to_string(),\n suffix: CPP_POST.to_string(),\n bytes_per_line: 16,\n }\n }\n Language::Python => {\n Template {\n prefix: PYTHON_PRE.to_string(),\n separator: PYTHON_SEPERATOR.to_string(),\n suffix: PYTHON_POST.to_string(),\n bytes_per_line: 16,\n }\n }\n Language::Rust => {\n Template {\n prefix: RUST_PRE.to_string(),\n separator: RUST_SEPERATOR.to_string(),\n suffix: RUST_POST.to_string(),\n bytes_per_line: 16,\n }\n }\n }\n }\n}\n\nimpl Render for Template {\n fn render(&self, data: &[u8]) -> String {\n let mut output = String::new();\n output = output + &self.prefix;\n for element in data.iter().enumerate() {\n let (index, byte) = element;\n if data.len() - 1 == index {\n output = output + &format!(\"{}\", byte);\n } else {\n output = output + &format!(\"{}{} \", byte, self.separator);\n if (index + 1) % self.bytes_per_line == 0 {\n output = output + &\"\\n\";\n }\n }\n }\n output = output + &self.suffix;\n output\n }\n}\n\nmod test {\n use super::*;\n\n #[test]\n fn render_basic_c_template() {\n let data = [0, 1, 2, 3, 4, 5];\n let template = Template::new(Language::C);\n let expected_result = \"const char data[] = {0, 1, 2, 3, 4, 5};\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_c_template_with_large_data() {\n let data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,\n 22, 23, 24, 25, 26];\n let template = Template::new(Language::C);\n let expected_result = \"const char data[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, \\n16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26};\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_basic_cplusplus_template() {\n let data = [0, 1, 2, 3, 4, 5];\n let template = Template::new(Language::Cpp);\n let expected_result = \"const char data[] = {0, 1, 2, 3, 4, 5};\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_cplusplus_template_with_large_data() {\n let data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,\n 22, 23, 24, 25, 26];\n let template = Template::new(Language::Cpp);\n let expected_result = \"const char data[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, \\n16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26};\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_basic_python_template() {\n let data = [0, 1, 2, 3, 4, 5];\n let template = Template::new(Language::Python);\n let expected_result = \"data = [0, 1, 2, 3, 4, 5]\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_python_template_with_large_data() {\n let data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,\n 22, 23, 24, 25, 26];\n let template = Template::new(Language::Python);\n let expected_result = \"data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, \\n16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26]\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_basic_rust_template() {\n let data = [0, 1, 2, 3, 4, 5];\n let template = Template::new(Language::Rust);\n let expected_result = \"let data = [0, 1, 2, 3, 4, 5];\";\n assert_eq!(template.render(&data), expected_result);\n }\n\n #[test]\n fn render_rust_template_with_large_data() {\n let data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,\n 22, 23, 24, 25, 26];\n let template = Template::new(Language::Rust);\n let expected_result = \"let data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, \\n16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26];\";\n assert_eq!(template.render(&data), expected_result);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::rc::Rc;\nuse std::cell::RefCell;\nuse std::collections::HashMap;\nuse std::fs::File as FSFile;\nuse std::ops::Deref;\nuse std::io::Write;\nuse std::io::Read;\n\npub mod path;\npub mod file;\npub mod parser;\npub mod json;\n\nuse module::Module;\nuse runtime::Runtime;\nuse storage::file::File;\nuse storage::file::id::FileID;\nuse storage::file::id_type::FileIDType;\nuse storage::file::hash::FileHash;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\nuse storage::file::header::data::FileHeaderData;\n\ntype Cache = HashMap<FileID, Rc<RefCell<File>>>;\n\npub struct Store {\n storepath: String,\n cache : RefCell<Cache>,\n}\n\nimpl Store {\n\n pub fn new(storepath: String) -> Store {\n Store {\n storepath: storepath,\n cache: RefCell::new(HashMap::new()),\n }\n }\n\n fn put_in_cache(&self, f: File) -> FileID {\n let res = f.id().clone();\n self.cache.borrow_mut().insert(f.id().clone(), Rc::new(RefCell::new(f)));\n res\n }\n\n pub fn load_in_cache<HP>(&self, m: &Module, parser: &Parser<HP>, id: FileID)\n -> Option<Rc<RefCell<File>>>\n where HP: FileHeaderParser\n {\n let idstr : String = id.clone().into();\n let path = format!(\"{}\/{}-{}.imag\", self.storepath, m.name(), idstr);\n let mut string = String::new();\n\n FSFile::open(&path).map(|mut file| {\n file.read_to_string(&mut string)\n .map_err(|e| error!(\"Failed reading file: '{}'\", path));\n });\n\n parser.read(string).map(|(header, data)| {\n self.new_file_from_parser_result(m, id.clone(), header, data);\n });\n\n self.load(&id)\n }\n\n pub fn new_file(&self, module: &Module)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: FileHeaderData::Null,\n data: String::from(\"\"),\n id: self.get_new_file_id(),\n };\n\n debug!(\"Create new File object: {:?}\", &f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_from_parser_result(&self,\n module: &Module,\n id: FileID,\n header: FileHeaderData,\n data: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: header,\n data: data,\n id: id,\n };\n debug!(\"Create new File object from parser result: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_header(&self,\n module: &Module,\n h: FileHeaderData)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: h,\n data: String::from(\"\"),\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with header: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_data(&self, module: &Module, d: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: FileHeaderData::Null,\n data: d,\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with data: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_content(&self,\n module: &Module,\n h: FileHeaderData,\n d: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: h,\n data: d,\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with content: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn persist<HP>(&self,\n p: &Parser<HP>,\n f: Rc<RefCell<File>>) -> bool\n where HP: FileHeaderParser\n {\n let file = f.deref().borrow();\n let text = p.write(file.contents());\n if text.is_err() {\n error!(\"Error: {}\", text.err().unwrap());\n return false;\n }\n\n let path = {\n let ids : String = file.id().clone().into();\n format!(\"{}\/{}-{}.imag\", self.storepath, file.owning_module_name, ids)\n };\n\n self.ensure_store_path_exists();\n\n FSFile::create(&path).map(|mut fsfile| {\n fsfile.write_all(&text.unwrap().clone().into_bytes()[..])\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n }).and(Ok(true)).unwrap()\n }\n\n fn ensure_store_path_exists(&self) {\n use std::fs::create_dir_all;\n use std::process::exit;\n\n create_dir_all(&self.storepath).unwrap_or_else(|e| {\n error!(\"Could not create store: '{}'\", self.storepath);\n error!(\"Error : '{}'\", e);\n error!(\"Killing myself now\");\n exit(1);\n })\n }\n\n pub fn load(&self, id: &FileID) -> Option<Rc<RefCell<File>>> {\n debug!(\"Loading '{:?}'\", id);\n self.cache.borrow().get(id).cloned()\n }\n\n pub fn remove(&self, id: FileID) -> bool {\n use std::fs::remove_file;\n\n self.cache\n .borrow_mut()\n .remove(&id)\n .map(|file| {\n let idstr : String = id.into();\n let path = format!(\"{}\/{}-{}.imag\",\n self.storepath,\n file.deref().borrow().owner_name(),\n idstr);\n remove_file(path).is_ok()\n })\n .unwrap_or(false)\n }\n\n fn get_new_file_id(&self) -> FileID {\n use uuid::Uuid;\n let hash = FileHash::from(Uuid::new_v4().to_hyphenated_string());\n FileID::new(FileIDType::UUID, hash)\n }\n\n}\n<commit_msg>Add Store::load_for_module()<commit_after>use std::rc::Rc;\nuse std::cell::RefCell;\nuse std::collections::HashMap;\nuse std::fs::File as FSFile;\nuse std::ops::Deref;\nuse std::io::Write;\nuse std::io::Read;\n\npub mod path;\npub mod file;\npub mod parser;\npub mod json;\n\nuse module::Module;\nuse runtime::Runtime;\nuse storage::file::File;\nuse storage::file::id::FileID;\nuse storage::file::id_type::FileIDType;\nuse storage::file::hash::FileHash;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\nuse storage::file::header::data::FileHeaderData;\n\ntype Cache = HashMap<FileID, Rc<RefCell<File>>>;\n\npub struct Store {\n storepath: String,\n cache : RefCell<Cache>,\n}\n\nimpl Store {\n\n pub fn new(storepath: String) -> Store {\n Store {\n storepath: storepath,\n cache: RefCell::new(HashMap::new()),\n }\n }\n\n fn put_in_cache(&self, f: File) -> FileID {\n let res = f.id().clone();\n self.cache.borrow_mut().insert(f.id().clone(), Rc::new(RefCell::new(f)));\n res\n }\n\n pub fn load_in_cache<HP>(&self, m: &Module, parser: &Parser<HP>, id: FileID)\n -> Option<Rc<RefCell<File>>>\n where HP: FileHeaderParser\n {\n let idstr : String = id.clone().into();\n let path = format!(\"{}\/{}-{}.imag\", self.storepath, m.name(), idstr);\n let mut string = String::new();\n\n FSFile::open(&path).map(|mut file| {\n file.read_to_string(&mut string)\n .map_err(|e| error!(\"Failed reading file: '{}'\", path));\n });\n\n parser.read(string).map(|(header, data)| {\n self.new_file_from_parser_result(m, id.clone(), header, data);\n });\n\n self.load(&id)\n }\n\n pub fn new_file(&self, module: &Module)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: FileHeaderData::Null,\n data: String::from(\"\"),\n id: self.get_new_file_id(),\n };\n\n debug!(\"Create new File object: {:?}\", &f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_from_parser_result(&self,\n module: &Module,\n id: FileID,\n header: FileHeaderData,\n data: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: header,\n data: data,\n id: id,\n };\n debug!(\"Create new File object from parser result: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_header(&self,\n module: &Module,\n h: FileHeaderData)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: h,\n data: String::from(\"\"),\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with header: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_data(&self, module: &Module, d: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: FileHeaderData::Null,\n data: d,\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with data: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn new_file_with_content(&self,\n module: &Module,\n h: FileHeaderData,\n d: String)\n -> FileID\n {\n let f = File {\n owning_module_name: module.name(),\n header: h,\n data: d,\n id: self.get_new_file_id(),\n };\n debug!(\"Create new File object with content: {:?}\", f);\n self.put_in_cache(f)\n }\n\n pub fn persist<HP>(&self,\n p: &Parser<HP>,\n f: Rc<RefCell<File>>) -> bool\n where HP: FileHeaderParser\n {\n let file = f.deref().borrow();\n let text = p.write(file.contents());\n if text.is_err() {\n error!(\"Error: {}\", text.err().unwrap());\n return false;\n }\n\n let path = {\n let ids : String = file.id().clone().into();\n format!(\"{}\/{}-{}.imag\", self.storepath, file.owning_module_name, ids)\n };\n\n self.ensure_store_path_exists();\n\n FSFile::create(&path).map(|mut fsfile| {\n fsfile.write_all(&text.unwrap().clone().into_bytes()[..])\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n }).and(Ok(true)).unwrap()\n }\n\n fn ensure_store_path_exists(&self) {\n use std::fs::create_dir_all;\n use std::process::exit;\n\n create_dir_all(&self.storepath).unwrap_or_else(|e| {\n error!(\"Could not create store: '{}'\", self.storepath);\n error!(\"Error : '{}'\", e);\n error!(\"Killing myself now\");\n exit(1);\n })\n }\n\n pub fn load(&self, id: &FileID) -> Option<Rc<RefCell<File>>> {\n debug!(\"Loading '{:?}'\", id);\n self.cache.borrow().get(id).cloned()\n }\n\n pub fn remove(&self, id: FileID) -> bool {\n use std::fs::remove_file;\n\n self.cache\n .borrow_mut()\n .remove(&id)\n .map(|file| {\n let idstr : String = id.into();\n let path = format!(\"{}\/{}-{}.imag\",\n self.storepath,\n file.deref().borrow().owner_name(),\n idstr);\n remove_file(path).is_ok()\n })\n .unwrap_or(false)\n }\n\n pub fn load_for_module<HP>(&self, m: &Module, parser: &Parser<HP>)\n -> Vec<Rc<RefCell<File>>>\n where HP: FileHeaderParser\n {\n use glob::{glob, Paths, PatternError};\n\n let globstr = format!(\"{}\/{}-*.imag\", self.storepath, m.name());\n let mut res = vec![];\n\n glob(&globstr[..]).map(|paths| {\n for path in paths {\n if let Ok(pathbuf) = path {\n let fname = pathbuf.file_name().and_then(|s| s.to_str());\n fname.map(|s| {\n FileID::parse(&String::from(s)).map(|id| {\n self.load_in_cache(m, parser, id).map(|file| {\n res.push(file);\n })\n });\n });\n }\n }\n });\n res\n }\n\n fn get_new_file_id(&self) -> FileID {\n use uuid::Uuid;\n let hash = FileHash::from(Uuid::new_v4().to_hyphenated_string());\n FileID::new(FileIDType::UUID, hash)\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:sparkles: Make to possible create all project name by id<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Commiting wheel timer<commit_after>extern crate test;\n\nuse test::Bencher;\n\nuse std::fmt::{Show, Formatter, Result};\nuse std::iter::AdditiveIterator;\n\nenum Node<T> {\n Cons(T, Box<Node<T>>),\n Nil\n}\n\nimpl<T> Node<T> {\n fn new() -> Node<T> {\n Nil\n }\n\n fn prepend(self, elem: T) -> Node<T> {\n Cons(elem, box self)\n }\n\n fn len(&self) -> uint {\n match *self {\n Cons(_, ref tail) => tail.len() + 1,\n Nil => 0\n }\n }\n}\n\nimpl<T: Show> Show for Node<T> {\n fn fmt(&self, f: &mut Formatter) -> Result {\n match *self {\n Cons(ref head, ref tail) => {\n write!(f, \"{}, [ ] -> {}\", head, tail)\n },\n Nil => {\n write!(f, \"Nil\")\n },\n }\n }\n}\n\nimpl<T: PartialEq> PartialEq for Node<T> {\n fn eq(&self, ys: &Node<T>) -> bool {\n match (self, ys) {\n (&Nil, &Nil) => true,\n (&Cons(ref x, box ref next_xs), &Cons(ref y, box ref next_ys))\n if x == y => next_xs == next_ys,\n _ => false\n }\n }\n}\n\/\/ Simple hashed wheel timer with bounded interval\n\/\/ See http:\/\/www.cs.columbia.edu\/~nahum\/w6998\/papers\/sosp87-timing-wheels.pdf\nstruct WheelTimer<T> {\n maxInterval: uint,\n currentTick: uint,\n\n ring: Vec<Node<T>>\n}\n\nimpl<T> WheelTimer<T> {\n\n \/\/ Returns the number of items currently scheduled\n fn size(&self) -> uint {\n return self.ring.iter().map(|node| node.len()).sum()\n }\n\n \/\/ Creates a new timer with the specified max interval\n fn new(maxInterval: uint) -> WheelTimer<T> {\n \/\/ Initialize the ring with Nil values\n let mut ring = Vec::with_capacity(maxInterval);\n for _ in range(0u, maxInterval) {\n ring.push(Nil)\n }\n\n return WheelTimer{\n maxInterval: maxInterval,\n currentTick: 0,\n ring: ring\n }\n }\n\n \/\/ Schedules a new value, available after `ticks`\n fn schedule(&mut self, ticks: uint, value: T) {\n \/\/ Compute the scheduled position in the wheel\n let index = (self.currentTick + ticks) % self.maxInterval;\n\n \/\/ Get the current node at `index` in the wheel\n let node = std::mem::replace(self.ring.get_mut(index), Nil);\n\n \/\/ Set the position in the wheel with the appended node\n *self.ring.get_mut(index) = node.prepend(value);\n }\n\n \/\/ Tick the timer, returning the list of nodes at the spot\n fn tick(&mut self) -> Node<T> {\n \/\/ Get the node at the current tick in the wheel\n let node = std::mem::replace(self.ring.get_mut(self.currentTick), Nil);\n\n \/\/ Increment the timer\n self.currentTick = (self.currentTick + 1) % self.maxInterval;\n\n \/\/ Return the node that was in that spot\n return node\n }\n}\n\nfn main() {\n \/\/ LinkedList example\n \/\/ Create an empty linked list\n let mut list = Node::<uint>::new();\n\n \/\/ Append some elements\n list = list.prepend(1);\n list = list.prepend(2);\n list = list.prepend(3);\n\n \/\/ Show the final state of the list\n println!(\"linked list has length: {}\", list.len());\n println!(\"{}\", list);\n\n \/\/ WheelTimer example\n \/\/ Create a new timer\n let mut timer = WheelTimer::<uint>::new(3);\n\n \/\/ Schedule some things\n timer.schedule(1, 1);\n timer.schedule(2, 2);\n timer.schedule(3, 3);\n\n \/\/ Print the timer size\n println!(\"size: {}\", timer.size());\n\n \/\/ Tick! Tick! Tick!\n println!(\"{}\", timer.tick());\n println!(\"{}\", timer.tick());\n println!(\"{}\", timer.tick());\n}\n\n#[test]\nfn wheel_timer_new_test() {\n let timer = WheelTimer::<uint>::new(3);\n assert!(timer.maxInterval == 3);\n assert!(timer.ring.capacity() == 3);\n assert!(timer.ring.len() == 3);\n}\n\n#[test]\nfn wheel_timer_schedule_test() {\n let mut timer = WheelTimer::<&'static str>::new(10);\n timer.schedule(3, \"tick\");\n\n timer.tick();\n timer.tick();\n timer.tick();\n\n let node = timer.tick();\n assert!(node != Nil);\n\n let val = match node {\n Cons(val, _) => val,\n Nil => \"\"\n };\n assert!(val == \"tick\");\n}\n\n#[test]\nfn wheel_timer_tick_test() {\n let mut timer = WheelTimer::<uint>::new(10);\n\n for i in range(0, 10) {\n timer.schedule(i, i)\n }\n\n for i in range(0, 10) {\n let node = timer.tick();\n assert!(node != Nil);\n\n let val = match node {\n Cons(val, _) => val,\n Nil => -1\n };\n assert!(val == i);\n }\n}\n\n#[bench]\nfn bench_wheel_timer_drain(b: &mut Bencher) {\n let maxInterval = 20;\n let mut timer = WheelTimer::<uint>::new(maxInterval);\n\n b.iter(|| {\n \/\/ Fill\n for j in range(0u, 100u) {\n timer.schedule(j%maxInterval, j%maxInterval);\n }\n\n \/\/ Drain\n for _ in range(0u, 100u) {\n timer.tick();\n }\n });\n}\n\n#[bench]\nfn bench_wheel_timer_fill(b: &mut Bencher) {\n let maxInterval = 20;\n let mut timer = WheelTimer::<uint>::new(maxInterval);\n let mut i = 0;\n\n b.iter(|| {\n timer.schedule(i%maxInterval, i%maxInterval);\n i = i + 1;\n });\n}\n\n#[bench]\nfn bench_wheel_timer_fast(b: &mut Bencher) {\n let maxInterval = 2;\n let mut timer = WheelTimer::<uint>::new(maxInterval);\n let mut i = 0;\n\n b.iter(|| {\n timer.schedule(i%maxInterval, i%maxInterval);\n timer.tick();\n i = i + 1;\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Get started with importing ethernet scheme to userspace<commit_after>use redox::Box;\nuse redox::fs::file::File;\nuse redox::io::{Read, Write, Seek, SeekFrom};\nuse redox::mem;\nuse redox::net::*;\nuse redox::ptr;\nuse redox::rand;\nuse redox::slice;\nuse redox::{str, String, ToString};\nuse redox::to_num::*;\nuse redox::Vec;\n\n\/\/\/ Ethernet resource\npub struct Resource {\n \/\/\/ The network\n network: Box<Resource>,\n \/\/\/ The data\n data: Vec<u8>,\n \/\/\/ The MAC addresss\n peer_addr: MACAddr,\n \/\/\/ The ethernet type\n ethertype: u16,\n}\n\nimpl Resource {\n fn dup(&self) -> Option<Box<Self>> {\n match self.network.dup() {\n Some(network) => Some(box Resource {\n network: network,\n data: self.data.clone(),\n peer_addr: self.peer_addr,\n ethertype: self.ethertype,\n }),\n None => None\n }\n }\n\n pub fn path(&self, buf: &mut [u8]) -> Option<usize> {\n let path = format!(\"ethernet:\/\/{}\/{}\", self.peer_addr.to_string(), String::from_num_radix(self.ethertype as usize, 16));\n\n let mut i = 0;\n for b in path.bytes() {\n if i < buf.len() {\n buf[i] = b;\n i += 1;\n } else {\n break;\n }\n }\n\n Some(i)\n }\n\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/*\n if self.data.len() > 0 {\n let mut bytes: Vec<u8> = Vec::new();\n swap(&mut self.data, &mut bytes);\n vec.push_all(&bytes);\n return Some(bytes.len());\n }\n\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match self.network.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(frame) = EthernetII::from_bytes(bytes) {\n if frame.header.ethertype.get() == self.ethertype &&\n (unsafe { frame.header.dst.equals(MAC_ADDR) } ||\n frame.header.dst.equals(BROADCAST_MAC_ADDR)) &&\n (frame.header.src.equals(self.peer_addr) ||\n self.peer_addr.equals(BROADCAST_MAC_ADDR)) {\n vec.push_all(&frame.data);\n return Some(frame.data.len());\n }\n }\n }\n None => return None,\n }\n }\n *\/\n None\n }\n\n fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let data = Vec::from(buf);\n\n \/*\n match self.network.write(EthernetII {\n header: EthernetIIHeader {\n src: unsafe { MAC_ADDR },\n dst: self.peer_addr,\n ethertype: n16::new(self.ethertype),\n },\n data: data,\n }.to_bytes().as_slice()) {\n Some(_) => return Some(buf.len()),\n None => return None,\n }\n *\/\n None\n }\n\n fn seek(&mut self, pos: SeekFrom) -> Option<usize> {\n None\n }\n\n fn sync(&mut self) -> bool {\n self.network.sync()\n }\n}\n\npub struct Scheme;\n\nimpl Scheme {\n fn new() -> Box<Self> {\n box Scheme\n }\n\n fn open(&mut self, url: &str) -> Option<Box<Resource>> {\n \/\/Split scheme from the rest of the URL\n let (scheme, mut not_scheme) = url.split_at(url.find(':').unwrap_or(url.len()));\n\n \/\/Remove the starting two slashes\n if not_scheme.starts_with(\"\/\/\") {\n not_scheme = ¬_scheme[2..not_scheme.len() - 2];\n }\n\n \/\/Check host and port vs path\n if not_scheme.starts_with(\"\/\") {\n if let Some(mut network) = File::open(\"network:\/\/\") {\n if url.path().len() > 0 {\n let ethertype = url.path().to_num_radix(16) as u16;\n\n if url.host().len() > 0 {\n return Some(box Resource {\n network: network,\n data: Vec::new(),\n peer_addr: MACAddr::from_string(&url.host()),\n ethertype: ethertype,\n });\n } else {\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match network.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(frame) = EthernetII::from_bytes(bytes) {\n if frame.header.ethertype.get() == ethertype &&\n (unsafe { frame.header.dst.equals(MAC_ADDR) } ||\n frame.header.dst.equals(BROADCAST_MAC_ADDR)) {\n return Some(box Resource {\n network: network,\n data: frame.data,\n peer_addr: frame.header.src,\n ethertype: ethertype,\n });\n }\n }\n }\n None => break,\n }\n }\n }\n } else {\n \/*\n debug::d(\"Ethernet: No ethertype provided\\n\");\n *\/\n }\n }\n }\n\n None\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Move two AbsoluteBoxOffsets fields into a Vec2<commit_after><|endoftext|>"} {"text":"<commit_before>use runtime::Runtime;\nuse std::error::Error;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::fmt::Display;\nuse std::result::Result;\nuse std::collections::HashMap;\n\nuse clap::{App, ArgMatches};\n\nuse storage::backend::{StorageBackend, StorageBackendError};\n\npub mod bm;\n\n#[derive(Debug)]\npub struct ModuleError {\n desc: String,\n caused_by: Option<Box<Error>>,\n}\n\nimpl ModuleError {\n pub fn new(desc: &'static str) -> ModuleError {\n ModuleError {\n desc: desc.to_owned().to_string(),\n caused_by: None,\n }\n }\n}\n\nimpl Error for ModuleError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl Display for ModuleError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"ModuleError: {}\", self.description())\n }\n}\n\npub struct CommandEnv<'a> {\n pub rt: &'a Runtime<'a>,\n pub bk: &'a StorageBackend,\n pub matches: &'a ArgMatches<'a, 'a>,\n}\n\npub type ModuleResult = Result<(), ModuleError>;\npub type CommandResult = ModuleResult;\npub type CommandMap<'a> = HashMap<&'a str, fn(&Module, CommandEnv) -> CommandResult>;\n\npub trait Module {\n\n fn callnames(&self) -> &'static [&'static str];\n fn name(&self) -> &'static str;\n fn shutdown(&self, rt : &Runtime) -> ModuleResult;\n\n fn get_commands(&self, rt: &Runtime) -> CommandMap;\n\n}\n\n<commit_msg>Rewrite Debug for Module<commit_after>use runtime::Runtime;\nuse std::error::Error;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::fmt::Display;\nuse std::fmt::Debug;\nuse std::path::Path;\nuse std::result::Result;\nuse std::collections::HashMap;\n\nuse clap::{App, ArgMatches};\n\nuse storage::backend::{StorageBackend, StorageBackendError};\n\npub mod bm;\n\n#[derive(Debug)]\npub struct ModuleError {\n desc: String,\n caused_by: Option<Box<Error>>,\n}\n\nimpl ModuleError {\n pub fn new(desc: &'static str) -> ModuleError {\n ModuleError {\n desc: desc.to_owned().to_string(),\n caused_by: None,\n }\n }\n}\n\nimpl Error for ModuleError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl Display for ModuleError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"ModuleError: {}\", self.description())\n }\n}\n\npub struct CommandEnv<'a> {\n pub rt: &'a Runtime<'a>,\n pub bk: &'a StorageBackend,\n pub matches: &'a ArgMatches<'a, 'a>,\n}\n\npub type ModuleResult = Result<(), ModuleError>;\npub type CommandResult = ModuleResult;\npub type CommandMap<'a> = HashMap<&'a str, fn(&Module, CommandEnv) -> CommandResult>;\n\npub trait Module : Debug {\n\n fn callnames(&self) -> &'static [&'static str];\n fn name(&self) -> &'static str;\n fn shutdown(&self, rt : &Runtime) -> ModuleResult;\n\n fn get_commands(&self, rt: &Runtime) -> CommandMap;\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove tests for ruma_common types, they now live in that crate<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix the Vulkan instance extensions, which moves the renderer initialisation on further, on Windows at least<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[impl] Add a simple example showing the various options<commit_after>extern crate hexplay;\n\nuse hexplay::*;\n\nfn main() {\n let data: Vec<u8> = (0u16..256u16).map(|v| v as u8).collect();\n\n let default_view = HexViewBuilder::new(&data)\n .finish();\n println!(\"Default view of all data:\\n{}\\n\\n\", default_view);\n\n let ascii_view = HexViewBuilder::new(&data)\n .codepage(CODEPAGE_ASCII)\n .finish();\n println!(\"Ascii codepage view of all data:\\n{}\\n\\n\", ascii_view);\n\n let partial_view = HexViewBuilder::new(&data[10..80])\n .address_offset(10)\n .finish();\n println!(\"Default view of a subslice:\\n{}\\n\\n\", partial_view);\n\n let narrowed_view = HexViewBuilder::new(&data)\n .row_width(10)\n .finish();\n println!(\"Narrowed view of all data:\\n{}\\n\\n\", narrowed_view);\n\n let combined_view = HexViewBuilder::new(&data[10..180])\n .address_offset(10)\n .codepage(CODEPAGE_1252)\n .row_width(14)\n .finish();\n println!(\"Custom view: \\n{}\\n\\n\", combined_view);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Started on example<commit_after>extern crate dynamic_reload;\n\nfn main() {\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2379<commit_after>\/\/ https:\/\/leetcode.com\/problems\/minimum-recolors-to-get-k-consecutive-black-blocks\/\npub fn minimum_recolors(blocks: String, k: i32) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", minimum_recolors(\"WBBWWBBWBW\".to_string(), 7)); \/\/ 3\n println!(\"{}\", minimum_recolors(\"WBWBBBW\".to_string(), 2)); \/\/ 0\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Get started with importing the ip scheme to userspace<commit_after>use redox::Box;\nuse redox::fs::file::File;\nuse redox::io::{Read, Write, Seek, SeekFrom};\nuse redox::mem;\nuse redox::net::*;\nuse redox::ptr;\nuse redox::rand;\nuse redox::slice;\nuse redox::str;\nuse redox::{String, ToString};\nuse redox::to_num::*;\nuse redox::Vec;\n\n\/\/\/ IP resource\npub struct Resource {\n link: Box<Resource>,\n data: Vec<u8>,\n peer_addr: IPv4Addr,\n proto: u8,\n id: u16,\n}\n\nimpl Resource {\n pub fn dup(&self) -> Option<Box<Self>> {\n match self.link.dup() {\n Some(link) => Some(box IPResource {\n link: link,\n data: self.data.clone(),\n peer_addr: self.peer_addr,\n proto: self.proto,\n id: self.id,\n }),\n None => None\n }\n }\n\n pub fn path(&self, buf: &mut [u8]) -> Option<usize> {\n let path = format!(\"ip:\/\/{}{}\/{}\", self.peer_addr.to_string(), String::from_num_radix(self.proto as usize, 16));\n\n let mut i = 0;\n for b in path.bytes() {\n if i < buf.len() {\n buf[i] = b;\n i += 1;\n } else {\n break;\n }\n }\n\n Some(i)\n }\n\n pub fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/*\n if self.data.len() > 0 {\n let mut bytes: Vec<u8> = Vec::new();\n mem::swap(&mut self.data, &mut bytes);\n vec.push_all(&bytes);\n return Some(bytes.len());\n }\n\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match self.link.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(packet) = IPv4::from_bytes(bytes) {\n if packet.header.proto == self.proto && packet.header.dst.equals(IP_ADDR) &&\n packet.header.src.equals(self.peer_addr) {\n vec.push_all(&packet.data);\n return Some(packet.data.len());\n }\n }\n }\n None => return None,\n }\n }\n *\/\n None\n }\n\n pub fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let ip_data = Vec::from(buf);\n\n self.id += 1;\n let mut ip = IPv4 {\n header: IPv4Header {\n ver_hlen: 0x40 | (mem::size_of::<IPv4Header>()\/4 & 0xF) as u8, \/\/ No Options\n services: 0,\n len: n16::new((mem::size_of::<IPv4Header>() + ip_data.len()) as u16), \/\/ No Options\n id: n16::new(self.id),\n flags_fragment: n16::new(0),\n ttl: 128,\n proto: self.proto,\n checksum: Checksum { data: 0 },\n src: IP_ADDR,\n dst: self.peer_addr,\n },\n options: Vec::new(),\n data: ip_data,\n };\n\n unsafe {\n let header_ptr: *const IPv4Header = &ip.header;\n ip.header.checksum.data =\n Checksum::compile(Checksum::sum(header_ptr as usize, mem::size_of::<IPv4Header>()) +\n Checksum::sum(ip.options.as_ptr() as usize, ip.options.len()));\n }\n\n match self.link.write(ip.to_bytes().as_slice()) {\n Some(_) => return Some(buf.len()),\n None => return None,\n }\n }\n\n pub fn seek(&mut self, pos: SeekFrom) -> Option<usize> {\n None\n }\n\n pub fn sync(&mut self) -> bool {\n self.link.sync()\n }\n}\n\n\/\/\/ A ARP entry (MAC + IP)\npub struct ARPEntry {\n ip: IPv4Addr,\n mac: MACAddr,\n}\n\n\/\/\/ IP scheme\npub struct Scheme {\n pub arp: Vec<ARPEntry>,\n}\n\nimpl Scheme {\n pub fn new() -> Box<Self> {\n box Scheme\n }\n\n pub fn open(&mut self, url: &str) -> Option<Box<Resource>> {\n if url.path().len() > 0 {\n let proto = url.path().to_num_radix(16) as u8;\n\n if url.host().len() > 0 {\n let peer_addr = IPv4Addr::from_string(&url.host());\n let mut peer_mac = BROADCAST_MAC_ADDR;\n\n for entry in self.arp.iter() {\n if entry.ip.equals(peer_addr) {\n peer_mac = entry.mac;\n break;\n }\n }\n\n if peer_mac.equals(BROADCAST_MAC_ADDR) {\n if let Some(mut link) = URL::from_string(&(\"ethernet:\/\/\".to_string() + peer_mac.to_string() + \"\/806\")).open() {\n let arp = ARP {\n header: ARPHeader {\n htype: n16::new(1),\n ptype: n16::new(0x800),\n hlen: 6,\n plen: 4,\n oper: n16::new(1),\n src_mac: unsafe { MAC_ADDR },\n src_ip: IP_ADDR,\n dst_mac: peer_mac,\n dst_ip: peer_addr,\n },\n data: Vec::new(),\n };\n\n match link.write(arp.to_bytes().as_slice()) {\n Some(_) => loop {\n let mut bytes: Vec<u8> = Vec::new();\n match link.read_to_end(&mut bytes) {\n Some(_) =>\n if let Some(packet) = ARP::from_bytes(bytes) {\n if packet.header.oper.get() == 2 &&\n packet.header.src_ip.equals(peer_addr) {\n peer_mac = packet.header.src_mac;\n self.arp.push(ARPEntry {\n ip: peer_addr,\n mac: peer_mac,\n });\n break;\n }\n },\n None => (),\n }\n },\n None => debug::d(\"IP: ARP Write Failed!\\n\"),\n }\n }\n }\n\n if let Some(link) = URL::from_string(&(\"ethernet:\/\/\".to_string() + peer_mac.to_string() + \"\/800\")).open() {\n return Some(box IPResource {\n link: link,\n data: Vec::new(),\n peer_addr: peer_addr,\n proto: proto,\n id: (rand() % 65536) as u16,\n });\n }\n } else {\n while let Some(mut link) = URL::from_str(\"ethernet:\/\/\/800\").open() {\n let mut bytes: Vec<u8> = Vec::new();\n match link.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(packet) = IPv4::from_bytes(bytes) {\n if packet.header.proto == proto &&\n packet.header.dst.equals(IP_ADDR) {\n return Some(box IPResource {\n link: link,\n data: packet.data,\n peer_addr: packet.header.src,\n proto: proto,\n id: (rand() % 65536) as u16,\n });\n }\n }\n }\n None => break,\n }\n }\n }\n } else {\n \/*\n debug::d(\"IP: No protocol provided\\n\");\n *\/\n }\n\n None\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #24828 - jooert:fix23253, r=pnkfelix<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nenum Foo { Bar }\n\nfn main() {\n Foo::Bar.a;\n \/\/~^ ERROR: attempted access of field `a` on type `Foo`, but no field with that name was found\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Change HabitInstance implementation to not contain comment in instance<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A windowing implementation using GLUT.\n\nuse windowing::{ApplicationMethods, WindowEvent, WindowMethods};\nuse windowing::{IdleWindowEvent, ResizeWindowEvent, LoadUrlWindowEvent, MouseWindowEventClass};\nuse windowing::{ScrollWindowEvent, ZoomWindowEvent, NavigationWindowEvent, FinishedWindowEvent};\nuse windowing::{MouseWindowClickEvent, MouseWindowMouseDownEvent, MouseWindowMouseUpEvent};\nuse windowing::{Forward, Back};\n\nuse alert::{Alert, AlertMethods};\nuse libc::{c_int, c_uchar};\nuse std::cell::{Cell, RefCell};\nuse std::local_data;\nuse std::rc::Rc;\nuse geom::point::Point2D;\nuse geom::size::Size2D;\nuse servo_msg::compositor_msg::{IdleRenderState, RenderState, RenderingRenderState};\nuse servo_msg::compositor_msg::{FinishedLoading, Blank, ReadyState};\n\nuse glut::glut::{ACTIVE_SHIFT, DOUBLE, WindowHeight};\nuse glut::glut::WindowWidth;\nuse glut::glut;\n\n\/\/ static THROBBER: [char, ..8] = [ '⣾', '⣽', '⣻', '⢿', '⡿', '⣟', '⣯', '⣷' ];\n\n\/\/\/ A structure responsible for setting up and tearing down the entire windowing system.\npub struct Application;\n\nimpl ApplicationMethods for Application {\n fn new() -> Application {\n glut::init();\n glut::init_display_mode(DOUBLE);\n Application\n }\n}\n\nimpl Drop for Application {\n fn drop(&mut self) {\n drop_local_window();\n }\n}\n\n\/\/\/ The type of a window.\npub struct Window {\n pub glut_window: glut::Window,\n\n pub event_queue: RefCell<~[WindowEvent]>,\n\n pub drag_origin: Point2D<c_int>,\n\n pub mouse_down_button: Cell<c_int>,\n pub mouse_down_point: Cell<Point2D<c_int>>,\n\n pub ready_state: Cell<ReadyState>,\n pub render_state: Cell<RenderState>,\n pub throbber_frame: Cell<u8>,\n}\n\nimpl WindowMethods<Application> for Window {\n \/\/\/ Creates a new window.\n fn new(_: &Application) -> Rc<Window> {\n \/\/ Create the GLUT window.\n glut::init_window_size(800, 600);\n let glut_window = glut::create_window(\"Servo\".to_owned());\n\n \/\/ Create our window object.\n let window = Window {\n glut_window: glut_window,\n\n event_queue: RefCell::new(~[]),\n\n drag_origin: Point2D(0 as c_int, 0),\n\n mouse_down_button: Cell::new(0),\n mouse_down_point: Cell::new(Point2D(0 as c_int, 0)),\n\n ready_state: Cell::new(Blank),\n render_state: Cell::new(IdleRenderState),\n throbber_frame: Cell::new(0),\n };\n\n \/\/ Register event handlers.\n\n \/\/Added dummy display callback to freeglut. According to freeglut ref, we should register some kind of display callback after freeglut 3.0.\n\n struct DisplayCallbackState;\n impl glut::DisplayCallback for DisplayCallbackState {\n fn call(&self) {\n debug!(\"GLUT display func registgered\");\n }\n }\n glut::display_func(~DisplayCallbackState);\n struct ReshapeCallbackState;\n impl glut::ReshapeCallback for ReshapeCallbackState {\n fn call(&self, width: c_int, height: c_int) {\n let tmp = local_window();\n tmp.event_queue.borrow_mut().push(ResizeWindowEvent(width as uint, height as uint))\n }\n }\n glut::reshape_func(glut_window, ~ReshapeCallbackState);\n struct KeyboardCallbackState;\n impl glut::KeyboardCallback for KeyboardCallbackState {\n fn call(&self, key: c_uchar, _x: c_int, _y: c_int) {\n let tmp = local_window();\n tmp.handle_key(key)\n }\n }\n glut::keyboard_func(~KeyboardCallbackState);\n struct MouseCallbackState;\n impl glut::MouseCallback for MouseCallbackState {\n fn call(&self, button: c_int, state: c_int, x: c_int, y: c_int) {\n if button < 3 {\n let tmp = local_window();\n tmp.handle_mouse(button, state, x, y);\n } else {\n match button {\n 3 => {\n let tmp = local_window();\n tmp.event_queue.borrow_mut().push(ScrollWindowEvent(Point2D(0.0, 5.0 as f32), Point2D(0.0 as i32, 5.0 as i32)));\n },\n 4 => {\n let tmp = local_window();\n tmp.event_queue.borrow_mut().push(ScrollWindowEvent(Point2D(0.0, -5.0 as f32), Point2D(0.0 as i32, -5.0 as i32)));\n },\n _ => {}\n }\n }\n }\n }\n glut::mouse_func(~MouseCallbackState);\n\n let wrapped_window = Rc::new(window);\n\n install_local_window(wrapped_window.clone());\n\n wrapped_window\n }\n\n \/\/\/ Returns the size of the window.\n fn size(&self) -> Size2D<f32> {\n Size2D(glut::get(WindowWidth) as f32, glut::get(WindowHeight) as f32)\n }\n\n \/\/\/ Presents the window to the screen (perhaps by page flipping).\n fn present(&self) {\n glut::swap_buffers();\n }\n\n fn recv(&self) -> WindowEvent {\n if !self.event_queue.borrow_mut().is_empty() {\n return self.event_queue.borrow_mut().shift().unwrap();\n }\n\n glut::check_loop();\n\n self.event_queue.borrow_mut().shift().unwrap_or(IdleWindowEvent)\n }\n\n \/\/\/ Sets the ready state.\n fn set_ready_state(&self, ready_state: ReadyState) {\n self.ready_state.set(ready_state);\n \/\/FIXME: set_window_title causes crash with Android version of freeGLUT. Temporarily blocked.\n \/\/self.update_window_title()\n }\n\n \/\/\/ Sets the render state.\n fn set_render_state(&self, render_state: RenderState) {\n if self.ready_state.get() == FinishedLoading &&\n self.render_state.get() == RenderingRenderState &&\n render_state == IdleRenderState {\n \/\/ page loaded\n self.event_queue.borrow_mut().push(FinishedWindowEvent);\n }\n\n self.render_state.set(render_state);\n \/\/FIXME: set_window_title causes crash with Android version of freeGLUT. Temporarily blocked.\n \/\/self.update_window_title()\n }\n\n fn hidpi_factor(&self) -> f32 {\n \/\/FIXME: Do nothing in GLUT now.\n 0f32\n }\n}\n\nimpl Window {\n \/\/\/ Helper function to set the window title in accordance with the ready state.\n \/\/ fn update_window_title(&self) {\n \/\/ let throbber = THROBBER[self.throbber_frame];\n \/\/ match self.ready_state {\n \/\/ Blank => {\n \/\/ glut::set_window_title(self.glut_window, \"Blank\")\n \/\/ }\n \/\/ Loading => {\n \/\/ glut::set_window_title(self.glut_window, format!(\"{:c} Loading . Servo\", throbber))\n \/\/ }\n \/\/ PerformingLayout => {\n \/\/ glut::set_window_title(self.glut_window, format!(\"{:c} Performing Layout . Servo\", throbber))\n \/\/ }\n \/\/ FinishedLoading => {\n \/\/ match self.render_state {\n \/\/ RenderingRenderState => {\n \/\/ glut::set_window_title(self.glut_window, format!(\"{:c} Rendering . Servo\", throbber))\n \/\/ }\n \/\/ IdleRenderState => glut::set_window_title(self.glut_window, \"Servo\"),\n \/\/ }\n \/\/ }\n \/\/ }\n \/\/ }\n\n \/\/\/ Helper function to handle keyboard events.\n fn handle_key(&self, key: u8) {\n debug!(\"got key: {}\", key);\n let modifiers = glut::get_modifiers();\n match key {\n 42 => self.load_url(),\n 43 => self.event_queue.borrow_mut().push(ZoomWindowEvent(1.1)),\n 45 => self.event_queue.borrow_mut().push(ZoomWindowEvent(0.909090909)),\n 56 => self.event_queue.borrow_mut().push(ScrollWindowEvent(Point2D(0.0, 5.0 as f32), Point2D(0.0 as i32, 5.0 as i32))),\n 50 => self.event_queue.borrow_mut().push(ScrollWindowEvent(Point2D(0.0, -5.0 as f32), Point2D(0.0 as i32, -5.0 as i32))),\n 127 => {\n if (modifiers & ACTIVE_SHIFT) != 0 {\n self.event_queue.borrow_mut().push(NavigationWindowEvent(Forward));\n }\n else {\n self.event_queue.borrow_mut().push(NavigationWindowEvent(Back));\n }\n }\n _ => {}\n }\n }\n\n \/\/\/ Helper function to handle a click\n fn handle_mouse(&self, button: c_int, state: c_int, x: c_int, y: c_int) {\n \/\/ FIXME(tkuehn): max pixel dist should be based on pixel density\n let max_pixel_dist = 10f32;\n let event = match state {\n glut::MOUSE_DOWN => {\n self.mouse_down_point.set(Point2D(x, y));\n self.mouse_down_button.set(button);\n MouseWindowMouseDownEvent(button as uint, Point2D(x as f32, y as f32))\n }\n glut::MOUSE_UP => {\n if self.mouse_down_button.get() == button {\n let pixel_dist = self.mouse_down_point.get() - Point2D(x, y);\n let pixel_dist = ((pixel_dist.x * pixel_dist.x +\n pixel_dist.y * pixel_dist.y) as f32).sqrt();\n if pixel_dist < max_pixel_dist {\n let click_event = MouseWindowClickEvent(button as uint,\n Point2D(x as f32, y as f32));\n self.event_queue.borrow_mut().push(MouseWindowEventClass(click_event));\n }\n }\n MouseWindowMouseUpEvent(button as uint, Point2D(x as f32, y as f32))\n }\n _ => fail!(\"I cannot recognize the type of mouse action that occured. :-(\")\n };\n self.event_queue.borrow_mut().push(MouseWindowEventClass(event));\n }\n\n \/\/\/ Helper function to pop up an alert box prompting the user to load a URL.\n fn load_url(&self) {\n let mut alert: Alert = AlertMethods::new(\"Navigate to:\");\n alert.add_prompt();\n alert.run();\n let value = alert.prompt_value();\n if \"\" == value { \/\/ To avoid crashing on Linux.\n self.event_queue.borrow_mut().push(LoadUrlWindowEvent(\"http:\/\/purple.com\/\".to_owned()))\n } else {\n self.event_queue.borrow_mut().push(LoadUrlWindowEvent(value.clone()))\n }\n }\n}\n\nstatic TLS_KEY: local_data::Key<Rc<Window>> = &local_data::Key;\n\nfn install_local_window(window: Rc<Window>) {\n local_data::set(TLS_KEY, window);\n}\n\nfn drop_local_window() {\n local_data::pop(TLS_KEY);\n}\n\nfn local_window() -> Rc<Window> {\n local_data::get(TLS_KEY, |v| v.unwrap().clone())\n}\n<commit_msg>~[] to Vec in glut_windowing<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A windowing implementation using GLUT.\n\nuse windowing::{ApplicationMethods, WindowEvent, WindowMethods};\nuse windowing::{IdleWindowEvent, ResizeWindowEvent, LoadUrlWindowEvent, MouseWindowEventClass};\nuse windowing::{ScrollWindowEvent, ZoomWindowEvent, NavigationWindowEvent, FinishedWindowEvent};\nuse windowing::{MouseWindowClickEvent, MouseWindowMouseDownEvent, MouseWindowMouseUpEvent};\nuse windowing::{Forward, Back};\n\nuse alert::{Alert, AlertMethods};\nuse libc::{c_int, c_uchar};\nuse std::cell::{Cell, RefCell};\nuse std::local_data;\nuse std::rc::Rc;\nuse geom::point::Point2D;\nuse geom::size::Size2D;\nuse servo_msg::compositor_msg::{IdleRenderState, RenderState, RenderingRenderState};\nuse servo_msg::compositor_msg::{FinishedLoading, Blank, ReadyState};\n\nuse glut::glut::{ACTIVE_SHIFT, DOUBLE, WindowHeight};\nuse glut::glut::WindowWidth;\nuse glut::glut;\n\n\/\/ static THROBBER: [char, ..8] = [ '⣾', '⣽', '⣻', '⢿', '⡿', '⣟', '⣯', '⣷' ];\n\n\/\/\/ A structure responsible for setting up and tearing down the entire windowing system.\npub struct Application;\n\nimpl ApplicationMethods for Application {\n fn new() -> Application {\n glut::init();\n glut::init_display_mode(DOUBLE);\n Application\n }\n}\n\nimpl Drop for Application {\n fn drop(&mut self) {\n drop_local_window();\n }\n}\n\n\/\/\/ The type of a window.\npub struct Window {\n pub glut_window: glut::Window,\n\n pub event_queue: RefCell<Vec<WindowEvent>>,\n\n pub drag_origin: Point2D<c_int>,\n\n pub mouse_down_button: Cell<c_int>,\n pub mouse_down_point: Cell<Point2D<c_int>>,\n\n pub ready_state: Cell<ReadyState>,\n pub render_state: Cell<RenderState>,\n pub throbber_frame: Cell<u8>,\n}\n\nimpl WindowMethods<Application> for Window {\n \/\/\/ Creates a new window.\n fn new(_: &Application) -> Rc<Window> {\n \/\/ Create the GLUT window.\n glut::init_window_size(800, 600);\n let glut_window = glut::create_window(\"Servo\".to_owned());\n\n \/\/ Create our window object.\n let window = Window {\n glut_window: glut_window,\n\n event_queue: RefCell::new(Vec::new()),\n\n drag_origin: Point2D(0 as c_int, 0),\n\n mouse_down_button: Cell::new(0),\n mouse_down_point: Cell::new(Point2D(0 as c_int, 0)),\n\n ready_state: Cell::new(Blank),\n render_state: Cell::new(IdleRenderState),\n throbber_frame: Cell::new(0),\n };\n\n \/\/ Register event handlers.\n\n \/\/Added dummy display callback to freeglut. According to freeglut ref, we should register some kind of display callback after freeglut 3.0.\n\n struct DisplayCallbackState;\n impl glut::DisplayCallback for DisplayCallbackState {\n fn call(&self) {\n debug!(\"GLUT display func registgered\");\n }\n }\n glut::display_func(~DisplayCallbackState);\n struct ReshapeCallbackState;\n impl glut::ReshapeCallback for ReshapeCallbackState {\n fn call(&self, width: c_int, height: c_int) {\n let tmp = local_window();\n tmp.event_queue.borrow_mut().push(ResizeWindowEvent(width as uint, height as uint))\n }\n }\n glut::reshape_func(glut_window, ~ReshapeCallbackState);\n struct KeyboardCallbackState;\n impl glut::KeyboardCallback for KeyboardCallbackState {\n fn call(&self, key: c_uchar, _x: c_int, _y: c_int) {\n let tmp = local_window();\n tmp.handle_key(key)\n }\n }\n glut::keyboard_func(~KeyboardCallbackState);\n struct MouseCallbackState;\n impl glut::MouseCallback for MouseCallbackState {\n fn call(&self, button: c_int, state: c_int, x: c_int, y: c_int) {\n if button < 3 {\n let tmp = local_window();\n tmp.handle_mouse(button, state, x, y);\n } else {\n match button {\n 3 => {\n let tmp = local_window();\n tmp.event_queue.borrow_mut().push(ScrollWindowEvent(Point2D(0.0, 5.0 as f32), Point2D(0.0 as i32, 5.0 as i32)));\n },\n 4 => {\n let tmp = local_window();\n tmp.event_queue.borrow_mut().push(ScrollWindowEvent(Point2D(0.0, -5.0 as f32), Point2D(0.0 as i32, -5.0 as i32)));\n },\n _ => {}\n }\n }\n }\n }\n glut::mouse_func(~MouseCallbackState);\n\n let wrapped_window = Rc::new(window);\n\n install_local_window(wrapped_window.clone());\n\n wrapped_window\n }\n\n \/\/\/ Returns the size of the window.\n fn size(&self) -> Size2D<f32> {\n Size2D(glut::get(WindowWidth) as f32, glut::get(WindowHeight) as f32)\n }\n\n \/\/\/ Presents the window to the screen (perhaps by page flipping).\n fn present(&self) {\n glut::swap_buffers();\n }\n\n fn recv(&self) -> WindowEvent {\n if !self.event_queue.borrow_mut().is_empty() {\n return self.event_queue.borrow_mut().shift().unwrap();\n }\n\n glut::check_loop();\n\n self.event_queue.borrow_mut().shift().unwrap_or(IdleWindowEvent)\n }\n\n \/\/\/ Sets the ready state.\n fn set_ready_state(&self, ready_state: ReadyState) {\n self.ready_state.set(ready_state);\n \/\/FIXME: set_window_title causes crash with Android version of freeGLUT. Temporarily blocked.\n \/\/self.update_window_title()\n }\n\n \/\/\/ Sets the render state.\n fn set_render_state(&self, render_state: RenderState) {\n if self.ready_state.get() == FinishedLoading &&\n self.render_state.get() == RenderingRenderState &&\n render_state == IdleRenderState {\n \/\/ page loaded\n self.event_queue.borrow_mut().push(FinishedWindowEvent);\n }\n\n self.render_state.set(render_state);\n \/\/FIXME: set_window_title causes crash with Android version of freeGLUT. Temporarily blocked.\n \/\/self.update_window_title()\n }\n\n fn hidpi_factor(&self) -> f32 {\n \/\/FIXME: Do nothing in GLUT now.\n 0f32\n }\n}\n\nimpl Window {\n \/\/\/ Helper function to set the window title in accordance with the ready state.\n \/\/ fn update_window_title(&self) {\n \/\/ let throbber = THROBBER[self.throbber_frame];\n \/\/ match self.ready_state {\n \/\/ Blank => {\n \/\/ glut::set_window_title(self.glut_window, \"Blank\")\n \/\/ }\n \/\/ Loading => {\n \/\/ glut::set_window_title(self.glut_window, format!(\"{:c} Loading . Servo\", throbber))\n \/\/ }\n \/\/ PerformingLayout => {\n \/\/ glut::set_window_title(self.glut_window, format!(\"{:c} Performing Layout . Servo\", throbber))\n \/\/ }\n \/\/ FinishedLoading => {\n \/\/ match self.render_state {\n \/\/ RenderingRenderState => {\n \/\/ glut::set_window_title(self.glut_window, format!(\"{:c} Rendering . Servo\", throbber))\n \/\/ }\n \/\/ IdleRenderState => glut::set_window_title(self.glut_window, \"Servo\"),\n \/\/ }\n \/\/ }\n \/\/ }\n \/\/ }\n\n \/\/\/ Helper function to handle keyboard events.\n fn handle_key(&self, key: u8) {\n debug!(\"got key: {}\", key);\n let modifiers = glut::get_modifiers();\n match key {\n 42 => self.load_url(),\n 43 => self.event_queue.borrow_mut().push(ZoomWindowEvent(1.1)),\n 45 => self.event_queue.borrow_mut().push(ZoomWindowEvent(0.909090909)),\n 56 => self.event_queue.borrow_mut().push(ScrollWindowEvent(Point2D(0.0, 5.0 as f32), Point2D(0.0 as i32, 5.0 as i32))),\n 50 => self.event_queue.borrow_mut().push(ScrollWindowEvent(Point2D(0.0, -5.0 as f32), Point2D(0.0 as i32, -5.0 as i32))),\n 127 => {\n if (modifiers & ACTIVE_SHIFT) != 0 {\n self.event_queue.borrow_mut().push(NavigationWindowEvent(Forward));\n }\n else {\n self.event_queue.borrow_mut().push(NavigationWindowEvent(Back));\n }\n }\n _ => {}\n }\n }\n\n \/\/\/ Helper function to handle a click\n fn handle_mouse(&self, button: c_int, state: c_int, x: c_int, y: c_int) {\n \/\/ FIXME(tkuehn): max pixel dist should be based on pixel density\n let max_pixel_dist = 10f32;\n let event = match state {\n glut::MOUSE_DOWN => {\n self.mouse_down_point.set(Point2D(x, y));\n self.mouse_down_button.set(button);\n MouseWindowMouseDownEvent(button as uint, Point2D(x as f32, y as f32))\n }\n glut::MOUSE_UP => {\n if self.mouse_down_button.get() == button {\n let pixel_dist = self.mouse_down_point.get() - Point2D(x, y);\n let pixel_dist = ((pixel_dist.x * pixel_dist.x +\n pixel_dist.y * pixel_dist.y) as f32).sqrt();\n if pixel_dist < max_pixel_dist {\n let click_event = MouseWindowClickEvent(button as uint,\n Point2D(x as f32, y as f32));\n self.event_queue.borrow_mut().push(MouseWindowEventClass(click_event));\n }\n }\n MouseWindowMouseUpEvent(button as uint, Point2D(x as f32, y as f32))\n }\n _ => fail!(\"I cannot recognize the type of mouse action that occured. :-(\")\n };\n self.event_queue.borrow_mut().push(MouseWindowEventClass(event));\n }\n\n \/\/\/ Helper function to pop up an alert box prompting the user to load a URL.\n fn load_url(&self) {\n let mut alert: Alert = AlertMethods::new(\"Navigate to:\");\n alert.add_prompt();\n alert.run();\n let value = alert.prompt_value();\n if \"\" == value { \/\/ To avoid crashing on Linux.\n self.event_queue.borrow_mut().push(LoadUrlWindowEvent(\"http:\/\/purple.com\/\".to_owned()))\n } else {\n self.event_queue.borrow_mut().push(LoadUrlWindowEvent(value.clone()))\n }\n }\n}\n\nstatic TLS_KEY: local_data::Key<Rc<Window>> = &local_data::Key;\n\nfn install_local_window(window: Rc<Window>) {\n local_data::set(TLS_KEY, window);\n}\n\nfn drop_local_window() {\n local_data::pop(TLS_KEY);\n}\n\nfn local_window() -> Rc<Window> {\n local_data::get(TLS_KEY, |v| v.unwrap().clone())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #49781 - Robbepop:master, r=nikomatsakis<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for #16223: without NLL the `if let` construct together with\n\/\/ the nested box-structure of `Root` causes an unwanted collateral move.\n\n\/\/ The exact error prevented here is:\n\/\/\n\/\/ error[E0382]: use of collaterally moved value: `(root.boxed.rhs as SomeVariant::B).0`\n\/\/ --> src\/main.rs:55:29\n\/\/ |\n\/\/ 56 | lhs: SomeVariant::A(a),\n\/\/ | - value moved here\n\/\/ 57 | rhs: SomeVariant::B(b),\n\/\/ | ^ value used here after move\n\/\/ |\n\/\/ = note: move occurs because the value has type `A`, which does not implement the `Copy` trait\n\n\/\/ must-compile-successfully\n\n#![feature(nll)]\n#![feature(box_patterns)]\n\nstruct Root {\n boxed: Box<SetOfVariants>,\n}\n\nstruct SetOfVariants {\n lhs: SomeVariant,\n rhs: SomeVariant,\n}\n\nenum SomeVariant {\n A(A),\n B(B),\n}\n\nstruct A(String);\nstruct B(String);\n\nfn main() {\n let root = Root {\n boxed: Box::new(SetOfVariants {\n lhs: SomeVariant::A(A(String::from(\"This is A\"))),\n rhs: SomeVariant::B(B(String::from(\"This is B\"))),\n }),\n };\n if let box SetOfVariants {\n lhs: SomeVariant::A(a),\n rhs: SomeVariant::B(b),\n } = root.boxed\n {\n println!(\"a = {}\", a.0);\n println!(\"b = {}\", b.0);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(PartialEq)]\nenum Status {\n Stable,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\n\nstruct Feature {\n name: String,\n level: Status,\n since: String,\n}\n\nstruct LibFeature {\n level: Status,\n since: String,\n}\n\npub fn check(path: &Path, bad: &mut bool) {\n let features = collect_lang_features(&path.join(\"libsyntax\/feature_gate.rs\"));\n assert!(!features.is_empty());\n let mut lib_features = HashMap::<String, LibFeature>::new();\n\n let mut contents = String::new();\n super::walk(path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(file)).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n println!(\"{}:{}: {}\", file.display(), i + 1, msg);\n *bad = true;\n };\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => {\n err(\"malformed stability attribute\");\n continue;\n }\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err(\"malformed stability attribute\");\n continue;\n }\n None => \"None\",\n };\n\n if features.iter().any(|f| f.name == feature_name) {\n err(\"duplicating a lang feature\");\n }\n if let Some(ref s) = lib_features.get(feature_name) {\n if s.level != level {\n err(\"different stability level than before\");\n }\n if s.since != since {\n err(\"different `since` than before\");\n }\n continue;\n }\n lib_features.insert(feature_name.to_owned(),\n LibFeature {\n level: level,\n since: since.to_owned(),\n });\n }\n });\n\n if *bad {\n return;\n }\n\n let mut lines = Vec::new();\n for feature in features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n feature.name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn collect_lang_features(path: &Path) -> Vec<Feature> {\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Unstable,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n Some(Feature {\n name: name.to_owned(),\n level: level,\n since: since.to_owned(),\n })\n })\n .collect()\n}\n<commit_msg>tidy features: use 2-parameter form of internal try macro for open err<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(PartialEq)]\nenum Status {\n Stable,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\n\nstruct Feature {\n name: String,\n level: Status,\n since: String,\n}\n\nstruct LibFeature {\n level: Status,\n since: String,\n}\n\npub fn check(path: &Path, bad: &mut bool) {\n let features = collect_lang_features(&path.join(\"libsyntax\/feature_gate.rs\"));\n assert!(!features.is_empty());\n let mut lib_features = HashMap::<String, LibFeature>::new();\n\n let mut contents = String::new();\n super::walk(path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n println!(\"{}:{}: {}\", file.display(), i + 1, msg);\n *bad = true;\n };\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => {\n err(\"malformed stability attribute\");\n continue;\n }\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err(\"malformed stability attribute\");\n continue;\n }\n None => \"None\",\n };\n\n if features.iter().any(|f| f.name == feature_name) {\n err(\"duplicating a lang feature\");\n }\n if let Some(ref s) = lib_features.get(feature_name) {\n if s.level != level {\n err(\"different stability level than before\");\n }\n if s.since != since {\n err(\"different `since` than before\");\n }\n continue;\n }\n lib_features.insert(feature_name.to_owned(),\n LibFeature {\n level: level,\n since: since.to_owned(),\n });\n }\n });\n\n if *bad {\n return;\n }\n\n let mut lines = Vec::new();\n for feature in features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n feature.name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn collect_lang_features(path: &Path) -> Vec<Feature> {\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Unstable,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n Some(Feature {\n name: name.to_owned(),\n level: level,\n since: since.to_owned(),\n })\n })\n .collect()\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(globs)]\n\nextern crate piston;\n\nextern crate hgl;\nextern crate gl;\nextern crate sdl2_game_window;\n\nuse sdl2_game_window::GameWindowSDL2;\nuse piston::{\n Game, \n GameIteratorSettings,\n GameWindowSettings, \n RenderArgs\n};\n\nuse std::mem::size_of;\nuse hgl::{Shader, Program, Triangles, Vbo, Vao};\n\n#[allow(dead_code)]\npub struct App {\n program: Program,\n vao: Vao,\n vbo: Vbo\n}\n\nstatic VERTEX_SHADER: &'static str = r\"\n attribute vec2 position;\n \n void main() {\n gl_Position = vec4(position, 0.0, 1.0);\n }\n\";\n\nstatic FRAGMENT_SHADER: &'static str = r\"\n void main() {\n gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n }\n\";\n\nimpl App {\n \/\/\/ Creates a new application.\n pub fn new() -> App {\n let vao = Vao::new();\n vao.bind();\n\n let program = Program::link([Shader::compile(VERTEX_SHADER, hgl::VertexShader),\n Shader::compile(FRAGMENT_SHADER, hgl::FragmentShader)]).unwrap();\n program.bind();\n\n let vbo = Vbo::from_data([\n 0.0f32, 0.5, 1.0, 0.0, 0.0,\n 0.5, -0.5, 0.0, 1.0, 0.0,\n -0.5, -0.5, 0.0, 0.0, 1.0\n ], hgl::StaticDraw);\n\n vao.enable_attrib(&program, \"position\", gl::FLOAT, 2, 5*size_of::<f32>() as i32, 0);\n vao.enable_attrib(&program, \"color\", gl::FLOAT, 3, 5*size_of::<f32>() as i32, 2*size_of::<f32>());\n vbo.bind();\n\n App {\n program: program,\n vao: vao,\n vbo: vbo\n }\n }\n}\n\nimpl Game for App {\n fn render(&mut self, args: &mut RenderArgs) {\n gl::Viewport(0, 0, args.width as i32, args.height as i32);\n gl::ClearColor(0.0, 0.0, 0.0, 0.1);\n gl::Clear(gl::COLOR_BUFFER_BIT);\n self.vao.draw_array(Triangles, 0, 3);\n }\n}\n\nfn main() {\n let mut window = GameWindowSDL2::new(\n GameWindowSettings {\n title: \"Test\".to_string(),\n size: [800, 600],\n fullscreen: false,\n exit_on_esc: true\n }\n );\n\n let game_iter_settings = GameIteratorSettings {\n updates_per_second: 120,\n max_frames_per_second: 60,\n };\n App::new().run(&mut window, &game_iter_settings);\n}\n\n<commit_msg>Removed hgl-rs triangle example<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse font::{Font, FontDescriptor, FontGroup, FontHandleMethods, SelectorPlatformIdentifier};\nuse font::{SpecifiedFontStyle, UsedFontStyle};\nuse font_list::FontList;\nuse platform::font::FontHandle;\nuse platform::font_context::FontContextHandle;\n\nuse azure::azure_hl::BackendType;\nuse collections::hashmap::HashMap;\nuse servo_util::cache::{Cache, LRUCache};\nuse servo_util::time::ProfilerChan;\n\nuse std::rc::Rc;\nuse std::cell::RefCell;\n\n\/\/\/ Information needed to create a font context.\n#[deriving(Clone)]\npub struct FontContextInfo {\n \/\/\/ The painting backend we're using.\n pub backend: BackendType,\n\n \/\/\/ Whether we need a font list.\n pub needs_font_list: bool,\n\n \/\/\/ A channel up to the profiler.\n pub profiler_chan: ProfilerChan,\n}\n\npub trait FontContextHandleMethods {\n fn create_font_from_identifier(&self, ~str, UsedFontStyle) -> Result<FontHandle, ()>;\n}\n\npub struct FontContext {\n pub instance_cache: LRUCache<FontDescriptor, Rc<RefCell<Font>>>,\n pub font_list: Option<FontList>, \/\/ only needed by layout\n pub group_cache: LRUCache<SpecifiedFontStyle, Rc<RefCell<FontGroup>>>,\n pub handle: FontContextHandle,\n pub backend: BackendType,\n pub generic_fonts: HashMap<~str,~str>,\n pub profiler_chan: ProfilerChan,\n}\n\nimpl FontContext {\n pub fn new(info: FontContextInfo) -> FontContext {\n let handle = FontContextHandle::new();\n let font_list = if info.needs_font_list {\n Some(FontList::new(&handle, info.profiler_chan.clone()))\n } else {\n None\n };\n\n \/\/ TODO: Allow users to specify these.\n let mut generic_fonts = HashMap::with_capacity(5);\n generic_fonts.insert(\"serif\".to_owned(), \"Times New Roman\".to_owned());\n generic_fonts.insert(\"sans-serif\".to_owned(), \"Arial\".to_owned());\n generic_fonts.insert(\"cursive\".to_owned(), \"Apple Chancery\".to_owned());\n generic_fonts.insert(\"fantasy\".to_owned(), \"Papyrus\".to_owned());\n generic_fonts.insert(\"monospace\".to_owned(), \"Menlo\".to_owned());\n\n FontContext {\n instance_cache: LRUCache::new(10),\n font_list: font_list,\n group_cache: LRUCache::new(10),\n handle: handle,\n backend: info.backend,\n generic_fonts: generic_fonts,\n profiler_chan: info.profiler_chan.clone(),\n }\n }\n\n pub fn get_resolved_font_for_style(&mut self, style: &SpecifiedFontStyle)\n -> Rc<RefCell<FontGroup>> {\n match self.group_cache.find(style) {\n Some(fg) => {\n debug!(\"font group cache hit\");\n fg\n },\n None => {\n debug!(\"font group cache miss\");\n let fg = self.create_font_group(style);\n self.group_cache.insert(style.clone(), fg.clone());\n fg\n }\n }\n }\n\n pub fn get_font_by_descriptor(&mut self, desc: &FontDescriptor)\n -> Result<Rc<RefCell<Font>>, ()> {\n match self.instance_cache.find(desc) {\n Some(f) => {\n debug!(\"font cache hit\");\n Ok(f)\n },\n None => {\n debug!(\"font cache miss\");\n let result = self.create_font_instance(desc);\n match result.clone() {\n Ok(ref font) => {\n self.instance_cache.insert(desc.clone(), font.clone());\n }, _ => {}\n };\n result\n }\n }\n }\n\n fn transform_family(&self, family: &~str) -> ~str {\n debug!(\"(transform family) searching for `{:s}`\", family.as_slice());\n match self.generic_fonts.find(family) {\n None => family.to_owned(),\n Some(mapped_family) => (*mapped_family).clone()\n }\n }\n\n fn create_font_group(&mut self, style: &SpecifiedFontStyle) -> Rc<RefCell<FontGroup>> {\n let mut fonts = vec!();\n\n debug!(\"(create font group) --- starting ---\");\n\n \/\/ TODO(Issue #193): make iteration over 'font-family' more robust.\n for family in style.families.iter() {\n let transformed_family_name = self.transform_family(family);\n debug!(\"(create font group) transformed family is `{:s}`\", transformed_family_name);\n let mut found = false;\n\n let result = match self.font_list {\n Some(ref mut fl) => {\n let font_in_family = fl.find_font_in_family(&transformed_family_name, style);\n match font_in_family {\n Some(font_entry) => {\n let font_id =\n SelectorPlatformIdentifier(font_entry.handle.face_identifier());\n let font_desc = FontDescriptor::new((*style).clone(), font_id);\n Some(font_desc)\n },\n None => {\n None\n }\n }\n }\n None => None,\n };\n\n match result {\n Some(ref result) => {\n found = true;\n let instance = self.get_font_by_descriptor(result);\n let _ = instance.map(|font| fonts.push(font.clone()));\n },\n _ => {}\n }\n\n if !found {\n debug!(\"(create font group) didn't find `{:s}`\", transformed_family_name);\n }\n }\n\n if fonts.len() == 0 {\n let last_resort = FontList::get_last_resort_font_families();\n for family in last_resort.iter() {\n let font_desc = match self.font_list {\n Some(ref mut font_list) => {\n let font_desc = {\n let font_entry = font_list.find_font_in_family(family, style);\n match font_entry {\n Some(v) => {\n let font_id =\n SelectorPlatformIdentifier(v.handle.face_identifier());\n Some(FontDescriptor::new((*style).clone(), font_id))\n },\n None => {\n None\n }\n }\n };\n font_desc\n },\n None => {\n None\n }\n };\n\n match font_desc {\n Some(ref fd) => {\n let instance = self.get_font_by_descriptor(fd);\n let _ = instance.map(|font| fonts.push(font.clone()));\n },\n None => { }\n };\n }\n }\n assert!(fonts.len() > 0);\n \/\/ TODO(Issue #179): Split FontStyle into specified and used styles\n let used_style = (*style).clone();\n\n debug!(\"(create font group) --- finished ---\");\n\n Rc::new(\n RefCell::new(\n FontGroup::new(style.families.clone(), &used_style, fonts)))\n }\n\n fn create_font_instance(&self, desc: &FontDescriptor) -> Result<Rc<RefCell<Font>>, ()> {\n return match &desc.selector {\n \/\/ TODO(Issue #174): implement by-platform-name font selectors.\n &SelectorPlatformIdentifier(ref identifier) => {\n let result_handle = self.handle.create_font_from_identifier((*identifier).clone(),\n desc.style.clone());\n result_handle.and_then(|handle| {\n Ok(\n Rc::new(\n RefCell::new(\n Font::new_from_adopted_handle(self,\n handle,\n &desc.style,\n self.backend))))\n })\n }\n };\n }\n}\n<commit_msg>auto merge of #2426 : bjwbell\/servo\/add-message-to-assert, r=pcwalton<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse font::{Font, FontDescriptor, FontGroup, FontHandleMethods, SelectorPlatformIdentifier};\nuse font::{SpecifiedFontStyle, UsedFontStyle};\nuse font_list::FontList;\nuse platform::font::FontHandle;\nuse platform::font_context::FontContextHandle;\n\nuse azure::azure_hl::BackendType;\nuse collections::hashmap::HashMap;\nuse servo_util::cache::{Cache, LRUCache};\nuse servo_util::time::ProfilerChan;\n\nuse std::rc::Rc;\nuse std::cell::RefCell;\n\n\/\/\/ Information needed to create a font context.\n#[deriving(Clone)]\npub struct FontContextInfo {\n \/\/\/ The painting backend we're using.\n pub backend: BackendType,\n\n \/\/\/ Whether we need a font list.\n pub needs_font_list: bool,\n\n \/\/\/ A channel up to the profiler.\n pub profiler_chan: ProfilerChan,\n}\n\npub trait FontContextHandleMethods {\n fn create_font_from_identifier(&self, ~str, UsedFontStyle) -> Result<FontHandle, ()>;\n}\n\npub struct FontContext {\n pub instance_cache: LRUCache<FontDescriptor, Rc<RefCell<Font>>>,\n pub font_list: Option<FontList>, \/\/ only needed by layout\n pub group_cache: LRUCache<SpecifiedFontStyle, Rc<RefCell<FontGroup>>>,\n pub handle: FontContextHandle,\n pub backend: BackendType,\n pub generic_fonts: HashMap<~str,~str>,\n pub profiler_chan: ProfilerChan,\n}\n\nimpl FontContext {\n pub fn new(info: FontContextInfo) -> FontContext {\n let handle = FontContextHandle::new();\n let font_list = if info.needs_font_list {\n Some(FontList::new(&handle, info.profiler_chan.clone()))\n } else {\n None\n };\n\n \/\/ TODO: Allow users to specify these.\n let mut generic_fonts = HashMap::with_capacity(5);\n generic_fonts.insert(\"serif\".to_owned(), \"Times New Roman\".to_owned());\n generic_fonts.insert(\"sans-serif\".to_owned(), \"Arial\".to_owned());\n generic_fonts.insert(\"cursive\".to_owned(), \"Apple Chancery\".to_owned());\n generic_fonts.insert(\"fantasy\".to_owned(), \"Papyrus\".to_owned());\n generic_fonts.insert(\"monospace\".to_owned(), \"Menlo\".to_owned());\n\n FontContext {\n instance_cache: LRUCache::new(10),\n font_list: font_list,\n group_cache: LRUCache::new(10),\n handle: handle,\n backend: info.backend,\n generic_fonts: generic_fonts,\n profiler_chan: info.profiler_chan.clone(),\n }\n }\n\n pub fn get_resolved_font_for_style(&mut self, style: &SpecifiedFontStyle)\n -> Rc<RefCell<FontGroup>> {\n match self.group_cache.find(style) {\n Some(fg) => {\n debug!(\"font group cache hit\");\n fg\n },\n None => {\n debug!(\"font group cache miss\");\n let fg = self.create_font_group(style);\n self.group_cache.insert(style.clone(), fg.clone());\n fg\n }\n }\n }\n\n pub fn get_font_by_descriptor(&mut self, desc: &FontDescriptor)\n -> Result<Rc<RefCell<Font>>, ()> {\n match self.instance_cache.find(desc) {\n Some(f) => {\n debug!(\"font cache hit\");\n Ok(f)\n },\n None => {\n debug!(\"font cache miss\");\n let result = self.create_font_instance(desc);\n match result.clone() {\n Ok(ref font) => {\n self.instance_cache.insert(desc.clone(), font.clone());\n }, _ => {}\n };\n result\n }\n }\n }\n\n fn transform_family(&self, family: &~str) -> ~str {\n debug!(\"(transform family) searching for `{:s}`\", family.as_slice());\n match self.generic_fonts.find(family) {\n None => family.to_owned(),\n Some(mapped_family) => (*mapped_family).clone()\n }\n }\n\n fn create_font_group(&mut self, style: &SpecifiedFontStyle) -> Rc<RefCell<FontGroup>> {\n let mut fonts = vec!();\n\n debug!(\"(create font group) --- starting ---\");\n\n \/\/ TODO(Issue #193): make iteration over 'font-family' more robust.\n for family in style.families.iter() {\n let transformed_family_name = self.transform_family(family);\n debug!(\"(create font group) transformed family is `{:s}`\", transformed_family_name);\n let mut found = false;\n\n let result = match self.font_list {\n Some(ref mut fl) => {\n let font_in_family = fl.find_font_in_family(&transformed_family_name, style);\n match font_in_family {\n Some(font_entry) => {\n let font_id =\n SelectorPlatformIdentifier(font_entry.handle.face_identifier());\n let font_desc = FontDescriptor::new((*style).clone(), font_id);\n Some(font_desc)\n },\n None => {\n None\n }\n }\n }\n None => None,\n };\n\n match result {\n Some(ref result) => {\n found = true;\n let instance = self.get_font_by_descriptor(result);\n let _ = instance.map(|font| fonts.push(font.clone()));\n },\n _ => {}\n }\n\n if !found {\n debug!(\"(create font group) didn't find `{:s}`\", transformed_family_name);\n }\n }\n\n if fonts.len() == 0 {\n let last_resort = FontList::get_last_resort_font_families();\n for family in last_resort.iter() {\n let font_desc = match self.font_list {\n Some(ref mut font_list) => {\n let font_desc = {\n let font_entry = font_list.find_font_in_family(family, style);\n match font_entry {\n Some(v) => {\n let font_id =\n SelectorPlatformIdentifier(v.handle.face_identifier());\n Some(FontDescriptor::new((*style).clone(), font_id))\n },\n None => {\n None\n }\n }\n };\n font_desc\n },\n None => {\n None\n }\n };\n\n match font_desc {\n Some(ref fd) => {\n let instance = self.get_font_by_descriptor(fd);\n let _ = instance.map(|font| fonts.push(font.clone()));\n },\n None => { }\n };\n }\n }\n assert!(fonts.len() > 0, \"No matching font(s), are the appropriate fonts installed?\");\n \/\/ TODO(Issue #179): Split FontStyle into specified and used styles\n let used_style = (*style).clone();\n\n debug!(\"(create font group) --- finished ---\");\n\n Rc::new(\n RefCell::new(\n FontGroup::new(style.families.clone(), &used_style, fonts)))\n }\n\n fn create_font_instance(&self, desc: &FontDescriptor) -> Result<Rc<RefCell<Font>>, ()> {\n return match &desc.selector {\n \/\/ TODO(Issue #174): implement by-platform-name font selectors.\n &SelectorPlatformIdentifier(ref identifier) => {\n let result_handle = self.handle.create_font_from_identifier((*identifier).clone(),\n desc.style.clone());\n result_handle.and_then(|handle| {\n Ok(\n Rc::new(\n RefCell::new(\n Font::new_from_adopted_handle(self,\n handle,\n &desc.style,\n self.backend))))\n })\n }\n };\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Cosmetic changes.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Hotfix: Ignore code snippet here<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary call<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Reimplement the hasher<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement $replace() and $replacen()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that `Clone` is correctly implemented for builtin types.\n\/\/ Also test that cloning an array or a tuple is done right, i.e.\n\/\/ each component is cloned.\n\nfn test_clone<T: Clone>(arg: T) {\n let _ = arg.clone();\n}\n\nfn foo() { }\n\n#[derive(Debug, PartialEq, Eq)]\nstruct S(i32);\n\nimpl Clone for S {\n fn clone(&self) -> Self {\n S(self.0 + 1)\n }\n}\n\nfn main() {\n test_clone(foo);\n test_clone([1; 56]);\n test_clone((1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1));\n\n let a = [S(0), S(1), S(2)];\n let b = [S(1), S(2), S(3)];\n assert_eq!(b, a.clone());\n\n let a = (\n (S(1), S(0)),\n (\n (S(0), S(0), S(1)),\n S(0)\n )\n );\n let b = (\n (S(2), S(1)),\n (\n (S(1), S(1), S(2)),\n S(1)\n )\n );\n assert_eq!(b, a.clone());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add handler for settled events to serial_16550<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>start itermut draft<commit_after>#![allow(dead_code)]\n\n\/\/\/ # Mutable Iterators\n\/\/\/\n\/\/\/ We will implement a simple tree that could, say, represent a simple DOM\n\/\/\/ structure. You may notice that we don't actually allow for text in our\n\/\/\/ simple representation, but this isn't important here, what we really want\n\/\/\/ to investigate is how we would implement a mutable iterator using our custom\n\/\/\/ data structure. Our DOM structure will only allow for four different types\n\/\/\/ of nodes: Header, Paragraph, Text, and Images.\n\nenum NodeType {\n Header,\n Paragraph,\n Text,\n Image,\n}\n\n\/\/\/ Each node with have a list of `children` nodes and a tag for the respective\n\/\/\/ DOM element type.\n\nstruct DomNode {\n children: Vec<DomNode>,\n node_type: NodeType\n}\n\n\/\/\/ To implement an iterator, we essentially just need to tell Rust two things:\n\/\/\/ a. What kind of elements will we be iteratorating over? In this example\n\/\/\/ we will be iteratorating over the nodes of our Dom, so we will want\n\/\/\/ to iterate over `NodeType`s.\n\/\/\/ b. What is the next element?\n\/\/\/\n\/\/\/ We will therefore have the responsibility of keeping track of where we are at\n\/\/\/ while iteratorating over our DOM. We will do this by constructing what is\n\/\/\/ oftern called an \"iterator interface\". We make one like this:\n\nstruct DomMutIterator<'a> {\n data: &'a mut DomNode,\n cursor: usize,\n}\n\n\/\/\/ We provide answers for the questions posted above for Rust by implementing an\n\/\/\/ `Iterator` for our iterator interface.\n\nimpl<'a> Iterator for DomMutIterator<'a> {\n type Item = &'a mut NodeType;\n fn next(&mut self) -> Option<&'a mut NodeType> {\n \n \/\/ This part will require some thought...\n \n unimplemented!();\n }\n}\n\nfn main() { \n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Updates<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>practice<commit_after>fn main() {\n let x: i32 = 8;\n {\n println!(\"{}\", x);\n let x = 12;\n println!(\"{}\", x);\n }\n println!(\"{}\", x);\n let x = 42;\n println!(\"{}\", x);\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse std::str;\n\nuse resource_task::{Done, Payload, Metadata, LoadData, LoadResponse, LoaderTask, start_sending};\n\nuse serialize::base64::FromBase64;\n\nuse http::headers::test_utils::from_stream_with_str;\nuse http::headers::content_type::MediaType;\nuse url::{percent_decode, OtherSchemeData};\n\n\npub fn factory() -> LoaderTask {\n proc(url, start_chan) {\n \/\/ NB: we don't spawn a new task.\n \/\/ Hypothesis: data URLs are too small for parallel base64 etc. to be worth it.\n \/\/ Should be tested at some point.\n load(url, start_chan)\n }\n}\n\nfn load(load_data: LoadData, start_chan: Sender<LoadResponse>) {\n let url = load_data.url;\n assert!(\"data\" == url.scheme.as_slice());\n\n let mut metadata = Metadata::default(url.clone());\n\n \/\/ Split out content type and data.\n let mut scheme_data = match url.scheme_data {\n OtherSchemeData(scheme_data) => scheme_data,\n _ => fail!(\"Expected a non-relative scheme URL.\")\n };\n match url.query {\n Some(query) => {\n scheme_data.push_str(\"?\");\n scheme_data.push_str(query.as_slice());\n },\n None => ()\n }\n let parts: Vec<&str> = scheme_data.as_slice().splitn(',', 1).collect();\n if parts.len() != 2 {\n start_sending(start_chan, metadata).send(Done(Err(\"invalid data uri\".to_string())));\n return;\n }\n\n \/\/ \";base64\" must come at the end of the content type, per RFC 2397.\n \/\/ rust-http will fail to parse it because there's no =value part.\n let mut is_base64 = false;\n let mut ct_str = *parts.get(0);\n if ct_str.ends_with(\";base64\") {\n is_base64 = true;\n ct_str = ct_str.slice_to(ct_str.as_bytes().len() - 7);\n }\n\n \/\/ Parse the content type using rust-http.\n \/\/ FIXME: this can go into an infinite loop! (rust-http #25)\n let content_type: Option<MediaType> = from_stream_with_str(ct_str);\n metadata.set_content_type(&content_type);\n\n let progress_chan = start_sending(start_chan, metadata);\n let bytes = percent_decode(parts.get(1).as_bytes());\n\n if is_base64 {\n \/\/ See #1268\n let bytes = bytes.move_iter().filter(|&b| b != ' ' as u8).collect::<Vec<u8>>();\n \/\/ FIXME(#2877): use bytes.as_slice().from_base64() when we upgrade to a Rust version\n \/\/ that includes https:\/\/github.com\/rust-lang\/rust\/pull\/15810\n let fake_utf8 = unsafe { str::raw::from_utf8(bytes.as_slice()) };\n match fake_utf8.from_base64() {\n Err(..) => {\n progress_chan.send(Done(Err(\"non-base64 data uri\".to_string())));\n }\n Ok(data) => {\n progress_chan.send(Payload(data));\n progress_chan.send(Done(Ok(())));\n }\n }\n } else {\n progress_chan.send(Payload(bytes));\n progress_chan.send(Done(Ok(())));\n }\n}\n\n#[cfg(test)]\nfn assert_parse(url: &'static str,\n content_type: Option<(String, String)>,\n charset: Option<String>,\n data: Option<Vec<u8>>) {\n use std::comm;\n use url::Url;\n\n let (start_chan, start_port) = comm::channel();\n load(LoadData::new(Url::parse(url).unwrap()), start_chan);\n\n let response = start_port.recv();\n assert_eq!(&response.metadata.content_type, &content_type);\n assert_eq!(&response.metadata.charset, &charset);\n\n let progress = response.progress_port.recv();\n\n match data {\n None => {\n assert_eq!(progress, Done(Err(\"invalid data uri\".to_string())));\n }\n Some(dat) => {\n assert_eq!(progress, Payload(dat));\n assert_eq!(response.progress_port.recv(), Done(Ok(())));\n }\n }\n}\n\n#[test]\nfn empty_invalid() {\n assert_parse(\"data:\", None, None, None);\n}\n\n#[test]\nfn plain() {\n assert_parse(\"data:,hello%20world\", None, None, Some(bytes!(\"hello world\").iter().map(|&x| x).collect()));\n}\n\n#[test]\nfn plain_ct() {\n assert_parse(\"data:text\/plain,hello\",\n Some((\"text\".to_string(), \"plain\".to_string())), None, Some(bytes!(\"hello\").iter().map(|&x| x).collect()));\n}\n\n#[test]\nfn plain_charset() {\n assert_parse(\"data:text\/plain;charset=latin1,hello\",\n Some((\"text\".to_string(), \"plain\".to_string())), Some(\"latin1\".to_string()), Some(bytes!(\"hello\").iter().map(|&x| x).collect()));\n}\n\n#[test]\nfn base64() {\n assert_parse(\"data:;base64,C62+7w==\", None, None, Some(vec!(0x0B, 0xAD, 0xBE, 0xEF)));\n}\n\n#[test]\nfn base64_ct() {\n assert_parse(\"data:application\/octet-stream;base64,C62+7w==\",\n Some((\"application\".to_string(), \"octet-stream\".to_string())), None, Some(vec!(0x0B, 0xAD, 0xBE, 0xEF)));\n}\n\n#[test]\nfn base64_charset() {\n assert_parse(\"data:text\/plain;charset=koi8-r;base64,8PLl9+XkIO3l5Pfl5A==\",\n Some((\"text\".to_string(), \"plain\".to_string())), Some(\"koi8-r\".to_string()),\n Some(vec!(0xF0, 0xF2, 0xE5, 0xF7, 0xE5, 0xE4, 0x20, 0xED, 0xE5, 0xE4, 0xF7, 0xE5, 0xE4)));\n}\n<commit_msg>fixup! Ignore spaces in base64 data URLs. Fix #1268.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse std::str;\n\nuse resource_task::{Done, Payload, Metadata, LoadData, LoadResponse, LoaderTask, start_sending};\n\nuse serialize::base64::FromBase64;\n\nuse http::headers::test_utils::from_stream_with_str;\nuse http::headers::content_type::MediaType;\nuse url::{percent_decode, OtherSchemeData};\n\n\npub fn factory() -> LoaderTask {\n proc(url, start_chan) {\n \/\/ NB: we don't spawn a new task.\n \/\/ Hypothesis: data URLs are too small for parallel base64 etc. to be worth it.\n \/\/ Should be tested at some point.\n load(url, start_chan)\n }\n}\n\nfn load(load_data: LoadData, start_chan: Sender<LoadResponse>) {\n let url = load_data.url;\n assert!(\"data\" == url.scheme.as_slice());\n\n let mut metadata = Metadata::default(url.clone());\n\n \/\/ Split out content type and data.\n let mut scheme_data = match url.scheme_data {\n OtherSchemeData(scheme_data) => scheme_data,\n _ => fail!(\"Expected a non-relative scheme URL.\")\n };\n match url.query {\n Some(query) => {\n scheme_data.push_str(\"?\");\n scheme_data.push_str(query.as_slice());\n },\n None => ()\n }\n let parts: Vec<&str> = scheme_data.as_slice().splitn(',', 1).collect();\n if parts.len() != 2 {\n start_sending(start_chan, metadata).send(Done(Err(\"invalid data uri\".to_string())));\n return;\n }\n\n \/\/ \";base64\" must come at the end of the content type, per RFC 2397.\n \/\/ rust-http will fail to parse it because there's no =value part.\n let mut is_base64 = false;\n let mut ct_str = *parts.get(0);\n if ct_str.ends_with(\";base64\") {\n is_base64 = true;\n ct_str = ct_str.slice_to(ct_str.as_bytes().len() - 7);\n }\n\n \/\/ Parse the content type using rust-http.\n \/\/ FIXME: this can go into an infinite loop! (rust-http #25)\n let content_type: Option<MediaType> = from_stream_with_str(ct_str);\n metadata.set_content_type(&content_type);\n\n let progress_chan = start_sending(start_chan, metadata);\n let bytes = percent_decode(parts.get(1).as_bytes());\n\n if is_base64 {\n \/\/ FIXME(#2909): It’s unclear what to do with non-alphabet characters,\n \/\/ but Acid 3 apparently depends on spaces being ignored.\n let bytes = bytes.move_iter().filter(|&b| b != ' ' as u8).collect::<Vec<u8>>();\n \/\/ FIXME(#2877): use bytes.as_slice().from_base64() when we upgrade to a Rust version\n \/\/ that includes https:\/\/github.com\/rust-lang\/rust\/pull\/15810\n let fake_utf8 = unsafe { str::raw::from_utf8(bytes.as_slice()) };\n match fake_utf8.from_base64() {\n Err(..) => {\n progress_chan.send(Done(Err(\"non-base64 data uri\".to_string())));\n }\n Ok(data) => {\n progress_chan.send(Payload(data));\n progress_chan.send(Done(Ok(())));\n }\n }\n } else {\n progress_chan.send(Payload(bytes));\n progress_chan.send(Done(Ok(())));\n }\n}\n\n#[cfg(test)]\nfn assert_parse(url: &'static str,\n content_type: Option<(String, String)>,\n charset: Option<String>,\n data: Option<Vec<u8>>) {\n use std::comm;\n use url::Url;\n\n let (start_chan, start_port) = comm::channel();\n load(LoadData::new(Url::parse(url).unwrap()), start_chan);\n\n let response = start_port.recv();\n assert_eq!(&response.metadata.content_type, &content_type);\n assert_eq!(&response.metadata.charset, &charset);\n\n let progress = response.progress_port.recv();\n\n match data {\n None => {\n assert_eq!(progress, Done(Err(\"invalid data uri\".to_string())));\n }\n Some(dat) => {\n assert_eq!(progress, Payload(dat));\n assert_eq!(response.progress_port.recv(), Done(Ok(())));\n }\n }\n}\n\n#[test]\nfn empty_invalid() {\n assert_parse(\"data:\", None, None, None);\n}\n\n#[test]\nfn plain() {\n assert_parse(\"data:,hello%20world\", None, None, Some(bytes!(\"hello world\").iter().map(|&x| x).collect()));\n}\n\n#[test]\nfn plain_ct() {\n assert_parse(\"data:text\/plain,hello\",\n Some((\"text\".to_string(), \"plain\".to_string())), None, Some(bytes!(\"hello\").iter().map(|&x| x).collect()));\n}\n\n#[test]\nfn plain_charset() {\n assert_parse(\"data:text\/plain;charset=latin1,hello\",\n Some((\"text\".to_string(), \"plain\".to_string())), Some(\"latin1\".to_string()), Some(bytes!(\"hello\").iter().map(|&x| x).collect()));\n}\n\n#[test]\nfn base64() {\n assert_parse(\"data:;base64,C62+7w==\", None, None, Some(vec!(0x0B, 0xAD, 0xBE, 0xEF)));\n}\n\n#[test]\nfn base64_ct() {\n assert_parse(\"data:application\/octet-stream;base64,C62+7w==\",\n Some((\"application\".to_string(), \"octet-stream\".to_string())), None, Some(vec!(0x0B, 0xAD, 0xBE, 0xEF)));\n}\n\n#[test]\nfn base64_charset() {\n assert_parse(\"data:text\/plain;charset=koi8-r;base64,8PLl9+XkIO3l5Pfl5A==\",\n Some((\"text\".to_string(), \"plain\".to_string())), Some(\"koi8-r\".to_string()),\n Some(vec!(0xF0, 0xF2, 0xE5, 0xF7, 0xE5, 0xE4, 0x20, 0xED, 0xE5, 0xE4, 0xF7, 0xE5, 0xE4)));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Crate ruma_api contains core types used to define the requests and responses for each endpoint\n\/\/! in the various [Matrix](https:\/\/matrix.org) API specifications.\n\/\/! These types can be shared by client and server code for all Matrix APIs.\n\/\/!\n\/\/! When implementing a new Matrix API, each endpoint has a type that implements `Endpoint`, plus\n\/\/! the necessary associated types.\n\/\/! An implementation of `Endpoint` contains all the information about the HTTP method, the path and\n\/\/! input parameters for requests, and the structure of a successful response.\n\/\/! Such types can then be used by client code to make requests, and by server code to fulfill\n\/\/! those requests.\n#![deny(missing_debug_implementations)]\n#![deny(missing_docs)]\n#![feature(try_from)]\n\nextern crate futures;\nextern crate http;\n#[cfg(test)]\nextern crate ruma_identifiers;\n#[cfg(test)]\n#[macro_use]\nextern crate serde_derive;\nextern crate serde_json;\nextern crate serde_urlencoded;\n\nuse std::convert::TryInto;\nuse std::io;\n\nuse futures::future::FutureFrom;\nuse http::{Method, Request, Response, StatusCode};\n\n\/\/\/ A Matrix API endpoint.\npub trait Endpoint<T, U> {\n \/\/\/ Data needed to make a request to the endpoint.\n type Request: TryInto<Request<T>, Error = Error>;\n \/\/\/ Data returned from the endpoint.\n type Response: FutureFrom<Response<U>, Error = Error>;\n\n \/\/\/ Metadata about the endpoint.\n const METADATA: Metadata;\n}\n\n\/\/\/ An error when converting an `Endpoint::Request` to a `http::Request` or a `http::Response` to\n\/\/\/ an `Endpoint::Response`.\n#[derive(Debug)]\npub enum Error {\n \/\/\/ An HTTP error.\n Http(http::Error),\n \/\/\/ A I\/O error.\n Io(io::Error),\n \/\/\/ A Serde JSON error.\n SerdeJson(serde_json::Error),\n \/\/\/ A Serde URL encoding error.\n SerdeUrlEncoded(serde_urlencoded::ser::Error),\n \/\/\/ An HTTP status code indicating error.\n StatusCode(StatusCode),\n}\n\nimpl From<http::Error> for Error {\n fn from(error: http::Error) -> Self {\n Error::Http(error)\n }\n}\n\nimpl From<io::Error> for Error {\n fn from(error: io::Error) -> Self {\n Error::Io(error)\n }\n}\n\nimpl From<serde_json::Error> for Error {\n fn from(error: serde_json::Error) -> Self {\n Error::SerdeJson(error)\n }\n}\n\nimpl From<serde_urlencoded::ser::Error> for Error {\n fn from(error: serde_urlencoded::ser::Error) -> Self {\n Error::SerdeUrlEncoded(error)\n }\n}\n\n\/\/\/ Metadata about an API endpoint.\n#[derive(Clone, Debug)]\npub struct Metadata {\n \/\/\/ A human-readable description of the endpoint.\n pub description: &'static str,\n \/\/\/ The HTTP method used by this endpoint.\n pub method: Method,\n \/\/\/ A unique identifier for this endpoint.\n pub name: &'static str,\n \/\/\/ The path of this endpoint's URL, with variable names where path parameters should be filled\n \/\/\/ in during a request.\n pub path: &'static str,\n \/\/\/ Whether or not this endpoint is rate limited by the server.\n pub rate_limited: bool,\n \/\/\/ Whether or not the server requires an authenticated user for this endpoint.\n pub requires_authentication: bool,\n}\n\n#[cfg(test)]\nmod tests {\n \/\/\/ PUT \/_matrix\/client\/r0\/directory\/room\/:room_alias\n pub mod create {\n use std::convert::TryFrom;\n\n use futures::future::{err, ok, FutureFrom, FutureResult};\n use http::method::Method;\n use http::{Request as HttpRequest, Response as HttpResponse};\n use ruma_identifiers::{RoomAliasId, RoomId};\n use serde_json;\n\n use super::super::{Endpoint as ApiEndpoint, Error, Metadata};\n\n #[derive(Debug)]\n pub struct Endpoint;\n\n impl ApiEndpoint<Vec<u8>, Vec<u8>> for Endpoint {\n type Request = Request;\n type Response = Response;\n\n const METADATA: Metadata = Metadata {\n description: \"Add an alias to a room.\",\n method: Method::PUT,\n name: \"create_alias\",\n path: \"\/_matrix\/client\/r0\/directory\/room\/:room_alias\",\n rate_limited: false,\n requires_authentication: true,\n };\n }\n\n \/\/\/ A request to create a new room alias.\n #[derive(Debug)]\n pub struct Request {\n pub room_id: RoomId, \/\/ body\n pub room_alias: RoomAliasId, \/\/ path\n }\n\n #[derive(Debug, Serialize)]\n struct RequestBody {\n room_id: RoomId,\n }\n\n impl TryFrom<Request> for HttpRequest<Vec<u8>> {\n type Error = Error;\n\n fn try_from(request: Request) -> Result<HttpRequest<Vec<u8>>, Self::Error> {\n let metadata = Endpoint::METADATA;\n\n let path = metadata\n .path\n .to_string()\n .replace(\":room_alias\", &request.room_alias.to_string());\n\n let request_body = RequestBody {\n room_id: request.room_id,\n };\n\n let http_request = HttpRequest::builder()\n .method(metadata.method)\n .uri(path.as_ref())\n .body(serde_json::to_vec(&request_body).map_err(Error::from)?)?;\n\n Ok(http_request)\n }\n }\n\n \/\/\/ The response to a request to create a new room alias.\n pub struct Response;\n\n impl FutureFrom<HttpResponse<Vec<u8>>> for Response {\n type Future = FutureResult<Self, Self::Error>;\n type Error = Error;\n\n fn future_from(\n http_response: HttpResponse<Vec<u8>>,\n ) -> FutureResult<Self, Self::Error> {\n if http_response.status().is_success() {\n ok(Response)\n } else {\n err(Error::StatusCode(http_response.status().clone()))\n }\n }\n }\n }\n}\n<commit_msg>Add a nonexhaustive variant to Error.<commit_after>\/\/! Crate ruma_api contains core types used to define the requests and responses for each endpoint\n\/\/! in the various [Matrix](https:\/\/matrix.org) API specifications.\n\/\/! These types can be shared by client and server code for all Matrix APIs.\n\/\/!\n\/\/! When implementing a new Matrix API, each endpoint has a type that implements `Endpoint`, plus\n\/\/! the necessary associated types.\n\/\/! An implementation of `Endpoint` contains all the information about the HTTP method, the path and\n\/\/! input parameters for requests, and the structure of a successful response.\n\/\/! Such types can then be used by client code to make requests, and by server code to fulfill\n\/\/! those requests.\n#![deny(missing_debug_implementations)]\n#![deny(missing_docs)]\n#![feature(try_from)]\n\nextern crate futures;\nextern crate http;\n#[cfg(test)]\nextern crate ruma_identifiers;\n#[cfg(test)]\n#[macro_use]\nextern crate serde_derive;\nextern crate serde_json;\nextern crate serde_urlencoded;\n\nuse std::convert::TryInto;\nuse std::io;\n\nuse futures::future::FutureFrom;\nuse http::{Method, Request, Response, StatusCode};\n\n\/\/\/ A Matrix API endpoint.\npub trait Endpoint<T, U> {\n \/\/\/ Data needed to make a request to the endpoint.\n type Request: TryInto<Request<T>, Error = Error>;\n \/\/\/ Data returned from the endpoint.\n type Response: FutureFrom<Response<U>, Error = Error>;\n\n \/\/\/ Metadata about the endpoint.\n const METADATA: Metadata;\n}\n\n\/\/\/ An error when converting an `Endpoint::Request` to a `http::Request` or a `http::Response` to\n\/\/\/ an `Endpoint::Response`.\n#[derive(Debug)]\npub enum Error {\n \/\/\/ An HTTP error.\n Http(http::Error),\n \/\/\/ A I\/O error.\n Io(io::Error),\n \/\/\/ A Serde JSON error.\n SerdeJson(serde_json::Error),\n \/\/\/ A Serde URL encoding error.\n SerdeUrlEncoded(serde_urlencoded::ser::Error),\n \/\/\/ An HTTP status code indicating error.\n StatusCode(StatusCode),\n \/\/\/ Standard hack to prevent exhaustive matching.\n \/\/\/ This will be replaced by the #[non_exhaustive] feature when available.\n #[doc(hidden)]\n __Nonexhaustive,\n}\n\nimpl From<http::Error> for Error {\n fn from(error: http::Error) -> Self {\n Error::Http(error)\n }\n}\n\nimpl From<io::Error> for Error {\n fn from(error: io::Error) -> Self {\n Error::Io(error)\n }\n}\n\nimpl From<serde_json::Error> for Error {\n fn from(error: serde_json::Error) -> Self {\n Error::SerdeJson(error)\n }\n}\n\nimpl From<serde_urlencoded::ser::Error> for Error {\n fn from(error: serde_urlencoded::ser::Error) -> Self {\n Error::SerdeUrlEncoded(error)\n }\n}\n\n\/\/\/ Metadata about an API endpoint.\n#[derive(Clone, Debug)]\npub struct Metadata {\n \/\/\/ A human-readable description of the endpoint.\n pub description: &'static str,\n \/\/\/ The HTTP method used by this endpoint.\n pub method: Method,\n \/\/\/ A unique identifier for this endpoint.\n pub name: &'static str,\n \/\/\/ The path of this endpoint's URL, with variable names where path parameters should be filled\n \/\/\/ in during a request.\n pub path: &'static str,\n \/\/\/ Whether or not this endpoint is rate limited by the server.\n pub rate_limited: bool,\n \/\/\/ Whether or not the server requires an authenticated user for this endpoint.\n pub requires_authentication: bool,\n}\n\n#[cfg(test)]\nmod tests {\n \/\/\/ PUT \/_matrix\/client\/r0\/directory\/room\/:room_alias\n pub mod create {\n use std::convert::TryFrom;\n\n use futures::future::{err, ok, FutureFrom, FutureResult};\n use http::method::Method;\n use http::{Request as HttpRequest, Response as HttpResponse};\n use ruma_identifiers::{RoomAliasId, RoomId};\n use serde_json;\n\n use super::super::{Endpoint as ApiEndpoint, Error, Metadata};\n\n #[derive(Debug)]\n pub struct Endpoint;\n\n impl ApiEndpoint<Vec<u8>, Vec<u8>> for Endpoint {\n type Request = Request;\n type Response = Response;\n\n const METADATA: Metadata = Metadata {\n description: \"Add an alias to a room.\",\n method: Method::PUT,\n name: \"create_alias\",\n path: \"\/_matrix\/client\/r0\/directory\/room\/:room_alias\",\n rate_limited: false,\n requires_authentication: true,\n };\n }\n\n \/\/\/ A request to create a new room alias.\n #[derive(Debug)]\n pub struct Request {\n pub room_id: RoomId, \/\/ body\n pub room_alias: RoomAliasId, \/\/ path\n }\n\n #[derive(Debug, Serialize)]\n struct RequestBody {\n room_id: RoomId,\n }\n\n impl TryFrom<Request> for HttpRequest<Vec<u8>> {\n type Error = Error;\n\n fn try_from(request: Request) -> Result<HttpRequest<Vec<u8>>, Self::Error> {\n let metadata = Endpoint::METADATA;\n\n let path = metadata\n .path\n .to_string()\n .replace(\":room_alias\", &request.room_alias.to_string());\n\n let request_body = RequestBody {\n room_id: request.room_id,\n };\n\n let http_request = HttpRequest::builder()\n .method(metadata.method)\n .uri(path.as_ref())\n .body(serde_json::to_vec(&request_body).map_err(Error::from)?)?;\n\n Ok(http_request)\n }\n }\n\n \/\/\/ The response to a request to create a new room alias.\n pub struct Response;\n\n impl FutureFrom<HttpResponse<Vec<u8>>> for Response {\n type Future = FutureResult<Self, Self::Error>;\n type Error = Error;\n\n fn future_from(\n http_response: HttpResponse<Vec<u8>>,\n ) -> FutureResult<Self, Self::Error> {\n if http_response.status().is_success() {\n ok(Response)\n } else {\n err(Error::StatusCode(http_response.status().clone()))\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>os specific clean<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Adapter pattern in Rust<commit_after>\/*\n * Adapter Design Pattern\n * http:\/\/joshldavis.com\/design-patterns\/adapter\/\n *\/\n\n\/*\n * Core Trait that defines a basic Rocket Ship\n *\/\ntrait RocketShip {\n fn turn_on(&self);\n fn turn_off(&self);\n fn blast_off(&self);\n fn fly(&self);\n}\n\n\/*\n * Basic struct for a NASA Ship\n *\/\nstruct NASAShip;\n\n\/*\n * Implement RocketShip trait to add functionality to NASAShip\n *\/\nimpl RocketShip for NASAShip {\n fn turn_on(&self) {\n println(\"NASA Ship is turning on.\")\n }\n\n fn turn_off(&self) {\n println(\"NASA Ship is turning off.\")\n }\n\n fn blast_off(&self) {\n println(\"NASA Ship is blasting off.\")\n }\n\n fn fly(&self) {\n println(\"NASA Ship is flying away.\")\n }\n}\n\n\/*\n * Uh oh, here is our problem. It's the amazingly advanced SpaceX ship that our\n * astronaut doesn't know how to pilot.\n *\/\ntrait SpaceXShip {\n fn ignition(&self);\n fn on(&self);\n fn off(&self);\n fn launch(&self);\n fn fly(&self);\n}\n\nstruct SpaceXDragon;\n\nimpl SpaceXShip for SpaceXDragon {\n fn ignition(&self) {\n println(\"Turning Dragon's ignition.\")\n }\n\n fn on(&self) {\n println(\"Turning on the Dragon.\")\n }\n\n fn off(&self) {\n println(\"Turing off the Dragon.\")\n }\n\n fn launch(&self) {\n println(\"Launching the Dragon\")\n }\n\n fn fly(&self) {\n println(\"The Dragon is flying away.\")\n }\n}\n\nstruct SpaceXAdapter {\n ship: SpaceXDragon\n}\n\nimpl RocketShip for SpaceXAdapter {\n fn turn_on(&self) {\n self.ship.ignition();\n self.ship.on();\n }\n\n fn turn_off(&self) {\n self.ship.off();\n }\n\n fn blast_off(&self) {\n self.ship.launch();\n }\n\n fn fly(&self) {\n self.ship.fly();\n }\n}\n\nfn pilot<S: RocketShip>(ship: &S) {\n ship.turn_on();\n ship.blast_off();\n ship.fly();\n ship.turn_off();\n print(\"\\n\");\n}\n\nfn main() {\n let saturn5 = NASAShip;\n\n \/\/ Let's fly our NASAShip\n println(\"Piloting the Saturn 5.\");\n pilot(&saturn5);\n\n let dragon = SpaceXDragon;\n\n \/\/ Uh oh, our pilot function doesn't recognize this ship...\n \/\/ println(\"Piloting the Dragon.\");\n \/\/ pilot(&dragon); <-- Gives a compile time error.\n\n \/\/ Let's Adapt our SpaceXDragon ship\n let dragon_adapter = SpaceXAdapter {\n ship: dragon\n };\n\n \/\/ Now we can pilot the Dragon!\n println(\"Piloting the Dragon Adapter.\");\n pilot(&dragon_adapter);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Got grid generation working for day 13, taking checkpoint before trying to solve pathfinding.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add skeleton for deck struct<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Creating 'safe' (native) rust wrappers around unsafe FFI<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Move a square in any direction on the screen.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Put a mirror transform in main pipeline.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Codeforces 764B<commit_after>macro_rules! read_line {\n ($v:ident) => {\n let mut temp = String::new();\n std::io::stdin().read_line(&mut temp).unwrap();\n let $v = temp;\n };\n (var, $t:ty, $($v:ident), *) => {\n read_line!(input_line);\n let mut iter = parse_token!($t, input_line);\n $(\n let $v = iter.next().unwrap();\n )*\n };\n (vec, $t:ty, $v:ident) => {\n read_line!(input_line);\n let iter = parse_token!($t, input_line);\n let $v: Vec<$t> = iter.collect();\n };\n ($($v:ident; $t:ty), *) => {\n read_line!(input_line);\n let mut iter = input_line.split_whitespace();\n $(\n let $v: $t = iter.next().unwrap().parse().unwrap();\n )*\n };\n}\n\nmacro_rules! parse_token {\n ($t:ty, $e:expr) => {\n $e.split_whitespace().map(|x| x.parse::<$t>().unwrap());\n };\n}\n\nfn main() {\n read_line!(cube_count;usize);\n read_line!(vec,i32,cubes);\n let mid = cube_count \/ 2;\n for i in 0..cube_count {\n let mut reversed = true;\n if i < mid {\n reversed = i % 2 == 0;\n } else {\n reversed = (cube_count - 1 - i) % 2 == 0;\n }\n if reversed {\n print!(\"{} \", cubes[cube_count - 1 - i]);\n } else {\n print!(\"{} \", cubes[i]);\n }\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Make program print contents of file passed by command line<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Set ion as a process group<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>:art: Change the location of config file<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implemented a simple Hello World example<commit_after>extern crate pancurses;\n\nuse pancurses::{initscr, printw, refresh, endwin};\n\nfn main() {\n let window = initscr();\n printw(\"Hello Rust\");\n refresh();\n window.getch();\n endwin();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Chore(trash): fix another import warning<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::cmp::Ordering;\n\nuse syntax::ast::{self, Visibility, Attribute, MetaItem, MetaItem_};\nuse syntax::codemap::{CodeMap, Span, BytePos};\nuse syntax::abi;\n\nuse Indent;\nuse comment::FindUncommented;\nuse rewrite::{Rewrite, RewriteContext};\n\nuse SKIP_ANNOTATION;\n\n\/\/ Computes the length of a string's last line, minus offset.\n#[inline]\npub fn extra_offset(text: &str, offset: Indent) -> usize {\n match text.rfind('\\n') {\n \/\/ 1 for newline character\n Some(idx) => text.len() - idx - 1 - offset.width(),\n None => text.len(),\n }\n}\n\n#[inline]\npub fn span_after(original: Span, needle: &str, codemap: &CodeMap) -> BytePos {\n let snippet = codemap.span_to_snippet(original).unwrap();\n let offset = snippet.find_uncommented(needle).unwrap() + needle.len();\n\n original.lo + BytePos(offset as u32)\n}\n\n#[inline]\npub fn span_after_last(original: Span, needle: &str, codemap: &CodeMap) -> BytePos {\n let snippet = codemap.span_to_snippet(original).unwrap();\n let mut offset = 0;\n\n while let Some(additional_offset) = snippet[offset..].find_uncommented(needle) {\n offset += additional_offset + needle.len();\n }\n\n original.lo + BytePos(offset as u32)\n}\n\n#[inline]\npub fn format_visibility(vis: Visibility) -> &'static str {\n match vis {\n Visibility::Public => \"pub \",\n Visibility::Inherited => \"\",\n }\n}\n\n#[inline]\npub fn format_unsafety(unsafety: ast::Unsafety) -> &'static str {\n match unsafety {\n ast::Unsafety::Unsafe => \"unsafe \",\n ast::Unsafety::Normal => \"\",\n }\n}\n\n#[inline]\npub fn format_mutability(mutability: ast::Mutability) -> &'static str {\n match mutability {\n ast::Mutability::MutMutable => \"mut \",\n ast::Mutability::MutImmutable => \"\",\n }\n}\n\n#[inline]\n\/\/ FIXME(#451): include \"C\"?\npub fn format_abi(abi: abi::Abi) -> String {\n format!(\"extern {} \", abi)\n}\n\n\/\/ The width of the first line in s.\n#[inline]\npub fn first_line_width(s: &str) -> usize {\n match s.find('\\n') {\n Some(n) => n,\n None => s.len(),\n }\n}\n\n\/\/ The width of the last line in s.\n#[inline]\npub fn last_line_width(s: &str) -> usize {\n match s.rfind('\\n') {\n Some(n) => s.len() - n - 1,\n None => s.len(),\n }\n}\n\n#[inline]\nfn is_skip(meta_item: &MetaItem) -> bool {\n match meta_item.node {\n MetaItem_::MetaWord(ref s) => *s == SKIP_ANNOTATION,\n MetaItem_::MetaList(ref s, ref l) => *s == \"cfg_attr\" && l.len() == 2 && is_skip(&l[1]),\n _ => false,\n }\n}\n\n#[inline]\npub fn contains_skip(attrs: &[Attribute]) -> bool {\n attrs.iter().any(|a| is_skip(&a.node.value))\n}\n\n\/\/ Find the end of a TyParam\n#[inline]\npub fn end_typaram(typaram: &ast::TyParam) -> BytePos {\n typaram.bounds\n .last()\n .map(|bound| {\n match *bound {\n ast::RegionTyParamBound(ref lt) => lt.span,\n ast::TraitTyParamBound(ref prt, _) => prt.span,\n }\n })\n .unwrap_or(typaram.span)\n .hi\n}\n\n#[inline]\npub fn semicolon_for_expr(expr: &ast::Expr) -> bool {\n match expr.node {\n ast::Expr_::ExprRet(..) |\n ast::Expr_::ExprAgain(..) |\n ast::Expr_::ExprBreak(..) => true,\n _ => false,\n }\n}\n\n#[inline]\npub fn semicolon_for_stmt(stmt: &ast::Stmt) -> bool {\n match stmt.node {\n ast::Stmt_::StmtSemi(ref expr, _) => {\n match expr.node {\n ast::Expr_::ExprWhile(..) |\n ast::Expr_::ExprWhileLet(..) |\n ast::Expr_::ExprLoop(..) |\n ast::Expr_::ExprForLoop(..) => false,\n _ => true,\n }\n }\n ast::Stmt_::StmtExpr(..) => false,\n _ => true,\n }\n}\n\n#[inline]\npub fn trim_newlines(input: &str) -> &str {\n match input.find(|c| c != '\\n' && c != '\\r') {\n Some(start) => {\n let end = input.rfind(|c| c != '\\n' && c != '\\r').unwrap_or(0) + 1;\n &input[start..end]\n }\n None => \"\",\n }\n}\n\n#[inline]\n#[cfg(target_pointer_width=\"64\")]\n\/\/ Based on the trick layed out at\n\/\/ http:\/\/graphics.stanford.edu\/~seander\/bithacks.html#RoundUpPowerOf2\npub fn round_up_to_power_of_two(mut x: usize) -> usize {\n x = x.wrapping_sub(1);\n x |= x >> 1;\n x |= x >> 2;\n x |= x >> 4;\n x |= x >> 8;\n x |= x >> 16;\n x |= x >> 32;\n x.wrapping_add(1)\n}\n\n#[inline]\n#[cfg(target_pointer_width=\"32\")]\npub fn round_up_to_power_of_two(mut x: usize) -> usize {\n x = x.wrapping_sub(1);\n x |= x >> 1;\n x |= x >> 2;\n x |= x >> 4;\n x |= x >> 8;\n x |= x >> 16;\n x.wrapping_add(1)\n}\n\n\/\/ Macro for deriving implementations of Decodable for enums\n#[macro_export]\nmacro_rules! impl_enum_decodable {\n ( $e:ident, $( $x:ident ),* ) => {\n impl ::rustc_serialize::Decodable for $e {\n fn decode<D: ::rustc_serialize::Decoder>(d: &mut D) -> Result<Self, D::Error> {\n let s = try!(d.read_str());\n match &*s {\n $(\n stringify!($x) => Ok($e::$x),\n )*\n _ => Err(d.error(\"Bad variant\")),\n }\n }\n }\n\n impl ::std::str::FromStr for $e {\n type Err = &'static str;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n match &*s {\n $(\n stringify!($x) => Ok($e::$x),\n )*\n _ => Err(\"Bad variant\"),\n }\n }\n }\n\n impl ::config::ConfigType for $e {\n fn get_variant_names() -> String {\n let mut variants = Vec::new();\n $(\n variants.push(stringify!($x));\n )*\n format!(\"[{}]\", variants.join(\"|\"))\n }\n }\n };\n}\n\n\/\/ Same as try!, but for Option\n#[macro_export]\nmacro_rules! try_opt {\n ($expr:expr) => (match $expr {\n Some(val) => val,\n None => { return None; }\n })\n}\n\n\/\/ Wraps string-like values in an Option. Returns Some when the string adheres\n\/\/ to the Rewrite constraints defined for the Rewrite trait and else otherwise.\npub fn wrap_str<S: AsRef<str>>(s: S, max_width: usize, width: usize, offset: Indent) -> Option<S> {\n {\n let snippet = s.as_ref();\n\n if !snippet.contains('\\n') && snippet.len() > width {\n return None;\n } else {\n let mut lines = snippet.lines();\n\n \/\/ The caller of this function has already placed `offset`\n \/\/ characters on the first line.\n let first_line_max_len = try_opt!(max_width.checked_sub(offset.width()));\n if lines.next().unwrap().len() > first_line_max_len {\n return None;\n }\n\n \/\/ The other lines must fit within the maximum width.\n if lines.find(|line| line.len() > max_width).is_some() {\n return None;\n }\n\n \/\/ `width` is the maximum length of the last line, excluding\n \/\/ indentation.\n \/\/ A special check for the last line, since the caller may\n \/\/ place trailing characters on this line.\n if snippet.lines().rev().next().unwrap().len() > offset.width() + width {\n return None;\n }\n }\n }\n\n Some(s)\n}\n\nimpl Rewrite for String {\n fn rewrite(&self, context: &RewriteContext, width: usize, offset: Indent) -> Option<String> {\n wrap_str(self, context.config.max_width, width, offset).map(ToOwned::to_owned)\n }\n}\n\n\/\/ Binary search in integer range. Returns the first Ok value returned by the\n\/\/ callback.\n\/\/ The callback takes an integer and returns either an Ok, or an Err indicating\n\/\/ whether the `guess' was too high (Ordering::Less), or too low.\n\/\/ This function is guaranteed to try to the hi value first.\npub fn binary_search<C, T>(mut lo: usize, mut hi: usize, callback: C) -> Option<T>\n where C: Fn(usize) -> Result<T, Ordering>\n{\n let mut middle = hi;\n\n while lo <= hi {\n match callback(middle) {\n Ok(val) => return Some(val),\n Err(Ordering::Less) => {\n hi = middle - 1;\n }\n Err(..) => {\n lo = middle + 1;\n }\n }\n middle = (hi + lo) \/ 2;\n }\n\n None\n}\n\n#[test]\nfn bin_search_test() {\n let closure = |i| {\n match i {\n 4 => Ok(()),\n j if j > 4 => Err(Ordering::Less),\n j if j < 4 => Err(Ordering::Greater),\n _ => unreachable!(),\n }\n };\n\n assert_eq!(Some(()), binary_search(1, 10, &closure));\n assert_eq!(None, binary_search(1, 3, &closure));\n assert_eq!(Some(()), binary_search(0, 44, &closure));\n assert_eq!(Some(()), binary_search(4, 125, &closure));\n assert_eq!(None, binary_search(6, 100, &closure));\n}\n\n#[test]\nfn power_rounding() {\n assert_eq!(0, round_up_to_power_of_two(0));\n assert_eq!(1, round_up_to_power_of_two(1));\n assert_eq!(64, round_up_to_power_of_two(33));\n assert_eq!(256, round_up_to_power_of_two(256));\n}\n<commit_msg>ignore case for config enums. Fixes #738<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::cmp::Ordering;\n\nuse syntax::ast::{self, Visibility, Attribute, MetaItem, MetaItem_};\nuse syntax::codemap::{CodeMap, Span, BytePos};\nuse syntax::abi;\n\nuse Indent;\nuse comment::FindUncommented;\nuse rewrite::{Rewrite, RewriteContext};\n\nuse SKIP_ANNOTATION;\n\n\/\/ Computes the length of a string's last line, minus offset.\n#[inline]\npub fn extra_offset(text: &str, offset: Indent) -> usize {\n match text.rfind('\\n') {\n \/\/ 1 for newline character\n Some(idx) => text.len() - idx - 1 - offset.width(),\n None => text.len(),\n }\n}\n\n#[inline]\npub fn span_after(original: Span, needle: &str, codemap: &CodeMap) -> BytePos {\n let snippet = codemap.span_to_snippet(original).unwrap();\n let offset = snippet.find_uncommented(needle).unwrap() + needle.len();\n\n original.lo + BytePos(offset as u32)\n}\n\n#[inline]\npub fn span_after_last(original: Span, needle: &str, codemap: &CodeMap) -> BytePos {\n let snippet = codemap.span_to_snippet(original).unwrap();\n let mut offset = 0;\n\n while let Some(additional_offset) = snippet[offset..].find_uncommented(needle) {\n offset += additional_offset + needle.len();\n }\n\n original.lo + BytePos(offset as u32)\n}\n\n#[inline]\npub fn format_visibility(vis: Visibility) -> &'static str {\n match vis {\n Visibility::Public => \"pub \",\n Visibility::Inherited => \"\",\n }\n}\n\n#[inline]\npub fn format_unsafety(unsafety: ast::Unsafety) -> &'static str {\n match unsafety {\n ast::Unsafety::Unsafe => \"unsafe \",\n ast::Unsafety::Normal => \"\",\n }\n}\n\n#[inline]\npub fn format_mutability(mutability: ast::Mutability) -> &'static str {\n match mutability {\n ast::Mutability::MutMutable => \"mut \",\n ast::Mutability::MutImmutable => \"\",\n }\n}\n\n#[inline]\n\/\/ FIXME(#451): include \"C\"?\npub fn format_abi(abi: abi::Abi) -> String {\n format!(\"extern {} \", abi)\n}\n\n\/\/ The width of the first line in s.\n#[inline]\npub fn first_line_width(s: &str) -> usize {\n match s.find('\\n') {\n Some(n) => n,\n None => s.len(),\n }\n}\n\n\/\/ The width of the last line in s.\n#[inline]\npub fn last_line_width(s: &str) -> usize {\n match s.rfind('\\n') {\n Some(n) => s.len() - n - 1,\n None => s.len(),\n }\n}\n\n#[inline]\nfn is_skip(meta_item: &MetaItem) -> bool {\n match meta_item.node {\n MetaItem_::MetaWord(ref s) => *s == SKIP_ANNOTATION,\n MetaItem_::MetaList(ref s, ref l) => *s == \"cfg_attr\" && l.len() == 2 && is_skip(&l[1]),\n _ => false,\n }\n}\n\n#[inline]\npub fn contains_skip(attrs: &[Attribute]) -> bool {\n attrs.iter().any(|a| is_skip(&a.node.value))\n}\n\n\/\/ Find the end of a TyParam\n#[inline]\npub fn end_typaram(typaram: &ast::TyParam) -> BytePos {\n typaram.bounds\n .last()\n .map(|bound| {\n match *bound {\n ast::RegionTyParamBound(ref lt) => lt.span,\n ast::TraitTyParamBound(ref prt, _) => prt.span,\n }\n })\n .unwrap_or(typaram.span)\n .hi\n}\n\n#[inline]\npub fn semicolon_for_expr(expr: &ast::Expr) -> bool {\n match expr.node {\n ast::Expr_::ExprRet(..) |\n ast::Expr_::ExprAgain(..) |\n ast::Expr_::ExprBreak(..) => true,\n _ => false,\n }\n}\n\n#[inline]\npub fn semicolon_for_stmt(stmt: &ast::Stmt) -> bool {\n match stmt.node {\n ast::Stmt_::StmtSemi(ref expr, _) => {\n match expr.node {\n ast::Expr_::ExprWhile(..) |\n ast::Expr_::ExprWhileLet(..) |\n ast::Expr_::ExprLoop(..) |\n ast::Expr_::ExprForLoop(..) => false,\n _ => true,\n }\n }\n ast::Stmt_::StmtExpr(..) => false,\n _ => true,\n }\n}\n\n#[inline]\npub fn trim_newlines(input: &str) -> &str {\n match input.find(|c| c != '\\n' && c != '\\r') {\n Some(start) => {\n let end = input.rfind(|c| c != '\\n' && c != '\\r').unwrap_or(0) + 1;\n &input[start..end]\n }\n None => \"\",\n }\n}\n\n#[inline]\n#[cfg(target_pointer_width=\"64\")]\n\/\/ Based on the trick layed out at\n\/\/ http:\/\/graphics.stanford.edu\/~seander\/bithacks.html#RoundUpPowerOf2\npub fn round_up_to_power_of_two(mut x: usize) -> usize {\n x = x.wrapping_sub(1);\n x |= x >> 1;\n x |= x >> 2;\n x |= x >> 4;\n x |= x >> 8;\n x |= x >> 16;\n x |= x >> 32;\n x.wrapping_add(1)\n}\n\n#[inline]\n#[cfg(target_pointer_width=\"32\")]\npub fn round_up_to_power_of_two(mut x: usize) -> usize {\n x = x.wrapping_sub(1);\n x |= x >> 1;\n x |= x >> 2;\n x |= x >> 4;\n x |= x >> 8;\n x |= x >> 16;\n x.wrapping_add(1)\n}\n\n\/\/ Macro for deriving implementations of Decodable for enums\n#[macro_export]\nmacro_rules! impl_enum_decodable {\n ( $e:ident, $( $x:ident ),* ) => {\n impl ::rustc_serialize::Decodable for $e {\n fn decode<D: ::rustc_serialize::Decoder>(d: &mut D) -> Result<Self, D::Error> {\n use std::ascii::AsciiExt;\n let s = try!(d.read_str());\n $(\n if stringify!($x).eq_ignore_ascii_case(&s) {\n return Ok($e::$x);\n }\n )*\n Err(d.error(\"Bad variant\"))\n }\n }\n\n impl ::std::str::FromStr for $e {\n type Err = &'static str;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n use std::ascii::AsciiExt;\n $(\n if stringify!($x).eq_ignore_ascii_case(s) {\n return Ok($e::$x);\n }\n )*\n Err(\"Bad variant\")\n }\n }\n\n impl ::config::ConfigType for $e {\n fn get_variant_names() -> String {\n let mut variants = Vec::new();\n $(\n variants.push(stringify!($x));\n )*\n format!(\"[{}]\", variants.join(\"|\"))\n }\n }\n };\n}\n\n\/\/ Same as try!, but for Option\n#[macro_export]\nmacro_rules! try_opt {\n ($expr:expr) => (match $expr {\n Some(val) => val,\n None => { return None; }\n })\n}\n\n\/\/ Wraps string-like values in an Option. Returns Some when the string adheres\n\/\/ to the Rewrite constraints defined for the Rewrite trait and else otherwise.\npub fn wrap_str<S: AsRef<str>>(s: S, max_width: usize, width: usize, offset: Indent) -> Option<S> {\n {\n let snippet = s.as_ref();\n\n if !snippet.contains('\\n') && snippet.len() > width {\n return None;\n } else {\n let mut lines = snippet.lines();\n\n \/\/ The caller of this function has already placed `offset`\n \/\/ characters on the first line.\n let first_line_max_len = try_opt!(max_width.checked_sub(offset.width()));\n if lines.next().unwrap().len() > first_line_max_len {\n return None;\n }\n\n \/\/ The other lines must fit within the maximum width.\n if lines.find(|line| line.len() > max_width).is_some() {\n return None;\n }\n\n \/\/ `width` is the maximum length of the last line, excluding\n \/\/ indentation.\n \/\/ A special check for the last line, since the caller may\n \/\/ place trailing characters on this line.\n if snippet.lines().rev().next().unwrap().len() > offset.width() + width {\n return None;\n }\n }\n }\n\n Some(s)\n}\n\nimpl Rewrite for String {\n fn rewrite(&self, context: &RewriteContext, width: usize, offset: Indent) -> Option<String> {\n wrap_str(self, context.config.max_width, width, offset).map(ToOwned::to_owned)\n }\n}\n\n\/\/ Binary search in integer range. Returns the first Ok value returned by the\n\/\/ callback.\n\/\/ The callback takes an integer and returns either an Ok, or an Err indicating\n\/\/ whether the `guess' was too high (Ordering::Less), or too low.\n\/\/ This function is guaranteed to try to the hi value first.\npub fn binary_search<C, T>(mut lo: usize, mut hi: usize, callback: C) -> Option<T>\n where C: Fn(usize) -> Result<T, Ordering>\n{\n let mut middle = hi;\n\n while lo <= hi {\n match callback(middle) {\n Ok(val) => return Some(val),\n Err(Ordering::Less) => {\n hi = middle - 1;\n }\n Err(..) => {\n lo = middle + 1;\n }\n }\n middle = (hi + lo) \/ 2;\n }\n\n None\n}\n\n#[test]\nfn bin_search_test() {\n let closure = |i| {\n match i {\n 4 => Ok(()),\n j if j > 4 => Err(Ordering::Less),\n j if j < 4 => Err(Ordering::Greater),\n _ => unreachable!(),\n }\n };\n\n assert_eq!(Some(()), binary_search(1, 10, &closure));\n assert_eq!(None, binary_search(1, 3, &closure));\n assert_eq!(Some(()), binary_search(0, 44, &closure));\n assert_eq!(Some(()), binary_search(4, 125, &closure));\n assert_eq!(None, binary_search(6, 100, &closure));\n}\n\n#[test]\nfn power_rounding() {\n assert_eq!(0, round_up_to_power_of_two(0));\n assert_eq!(1, round_up_to_power_of_two(1));\n assert_eq!(64, round_up_to_power_of_two(33));\n assert_eq!(256, round_up_to_power_of_two(256));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Problem: server command not implemented<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add protoyping file<commit_after>use std::ops::Index;\n\ntrait Point\n{}\n\nstruct Point1D\n{\n x: usize,\n}\nmod abs\n{\n pub trait Signal\n { \n type valueType; \n type pointType;\n }\n}\n\ntrait Signal<T: abs::Signal> : Index<T::pointType>\n{}\n\nstruct Signal1D<T> \n{\n arr: Box<[T]>,\n}\n\nimpl <T: Copy> Signal1D<T> \n{\n fn new( init_value: T, size: usize) -> Signal1D<T>\n {\n let mut vec: Vec<T> = Vec::with_capacity(size);\n for _ in 0 .. size\n {\n vec.push(init_value);\n };\n Signal1D {arr: vec.into_boxed_slice()}\n }\n}\n\n\nimpl <T> abs::Signal for Signal1D<T>\n{\n type valueType = T;\n type pointType = Point1D;\n}\n\nimpl <T> Signal<Signal1D<T>> for Signal1D<T> {}\n\nimpl <T> Index<Point1D> for Signal1D<T>\n{\n type Output = T;\n fn index<'a>(&'a self, _index: Point1D) -> &'a T\n {\n &self.arr[_index.x]\n }\n}\n\nfn main()\n{\n let a :Signal1D<i32> = Signal1D::new(0, 10);\n let b = Point1D{x:0};\n\n println!(\"{}\", a[b]);\n \n}\n<|endoftext|>"} {"text":"<commit_before>use crate::core::compiler::RustcTargetData;\nuse crate::core::resolver::HasDevUnits;\nuse crate::core::{Shell, Workspace};\nuse crate::ops;\nuse crate::util::CargoResult;\nuse serde::Deserialize;\nuse std::collections::HashMap;\nuse std::ffi::OsString;\nuse std::path::Path;\nuse std::process::Command;\n\n\/\/\/ Strongly typed options for the `cargo doc` command.\n#[derive(Debug)]\npub struct DocOptions {\n \/\/\/ Whether to attempt to open the browser after compiling the docs\n pub open_result: bool,\n \/\/\/ Options to pass through to the compiler\n pub compile_opts: ops::CompileOptions,\n}\n\n#[derive(Deserialize)]\nstruct CargoDocConfig {\n \/\/\/ Browser to use to open docs. If this is unset, the value of the environment variable\n \/\/\/ `BROWSER` will be used.\n browser: Option<String>,\n}\n\n\/\/\/ Main method for `cargo doc`.\npub fn doc(ws: &Workspace<'_>, options: &DocOptions) -> CargoResult<()> {\n let specs = options.compile_opts.spec.to_package_id_specs(ws)?;\n let target_data = RustcTargetData::new(ws, &options.compile_opts.build_config.requested_kinds)?;\n let ws_resolve = ops::resolve_ws_with_opts(\n ws,\n &target_data,\n &options.compile_opts.build_config.requested_kinds,\n &options.compile_opts.cli_features,\n &specs,\n HasDevUnits::No,\n crate::core::resolver::features::ForceAllTargets::No,\n )?;\n\n let ids = ws_resolve.targeted_resolve.specs_to_ids(&specs)?;\n let pkgs = ws_resolve.pkg_set.get_many(ids)?;\n\n let mut lib_names = HashMap::new();\n let mut bin_names = HashMap::new();\n let mut names = Vec::new();\n for package in &pkgs {\n for target in package.targets().iter().filter(|t| t.documented()) {\n if target.is_lib() {\n if let Some(prev) = lib_names.insert(target.crate_name(), package) {\n anyhow::bail!(\n \"The library `{}` is specified by packages `{}` and \\\n `{}` but can only be documented once. Consider renaming \\\n or marking one of the targets as `doc = false`.\",\n target.crate_name(),\n prev,\n package\n );\n }\n } else if let Some(prev) = bin_names.insert(target.crate_name(), package) {\n anyhow::bail!(\n \"The binary `{}` is specified by packages `{}` and \\\n `{}` but can be documented only once. Consider renaming \\\n or marking one of the targets as `doc = false`.\",\n target.crate_name(),\n prev,\n package\n );\n }\n names.push(target.crate_name());\n }\n }\n\n let open_kind = if options.open_result {\n Some(options.compile_opts.build_config.single_requested_kind()?)\n } else {\n None\n };\n\n let compilation = ops::compile(ws, &options.compile_opts)?;\n\n if let Some(kind) = open_kind {\n let name = match names.first() {\n Some(s) => s.to_string(),\n None => return Ok(()),\n };\n let path = compilation.root_output[&kind]\n .with_file_name(\"doc\")\n .join(&name)\n .join(\"index.html\");\n if path.exists() {\n let mut shell = ws.config().shell();\n shell.status(\"Opening\", path.display())?;\n let cfg = ws.config().get::<CargoDocConfig>(\"cargo-doc\")?;\n open_docs(&path, &mut shell, cfg.browser.map(|v| v.into()))?;\n }\n }\n\n Ok(())\n}\n\nfn open_docs(path: &Path, shell: &mut Shell, config_browser: Option<OsString>) -> CargoResult<()> {\n let browser = config_browser.or_else(|| std::env::var_os(\"BROWSER\"));\n match browser {\n Some(browser) => {\n if let Err(e) = Command::new(&browser).arg(path).status() {\n shell.warn(format!(\n \"Couldn't open docs with {}: {}\",\n browser.to_string_lossy(),\n e\n ))?;\n }\n }\n None => {\n if let Err(e) = opener::open(&path) {\n let e = e.into();\n crate::display_warning_with_error(\"couldn't open docs\", &e, shell);\n }\n }\n };\n\n Ok(())\n}\n<commit_msg>Move browser setting to [doc], use PathAndArgs<commit_after>use crate::core::resolver::HasDevUnits;\nuse crate::core::{Shell, Workspace};\nuse crate::ops;\nuse crate::util::CargoResult;\nuse crate::{core::compiler::RustcTargetData, util::config::PathAndArgs};\nuse serde::Deserialize;\nuse std::path::Path;\nuse std::process::Command;\nuse std::{collections::HashMap, path::PathBuf};\n\n\/\/\/ Strongly typed options for the `cargo doc` command.\n#[derive(Debug)]\npub struct DocOptions {\n \/\/\/ Whether to attempt to open the browser after compiling the docs\n pub open_result: bool,\n \/\/\/ Options to pass through to the compiler\n pub compile_opts: ops::CompileOptions,\n}\n\n#[derive(Deserialize)]\nstruct CargoDocConfig {\n \/\/\/ Browser to use to open docs. If this is unset, the value of the environment variable\n \/\/\/ `BROWSER` will be used.\n browser: Option<PathAndArgs>,\n}\n\n\/\/\/ Main method for `cargo doc`.\npub fn doc(ws: &Workspace<'_>, options: &DocOptions) -> CargoResult<()> {\n let specs = options.compile_opts.spec.to_package_id_specs(ws)?;\n let target_data = RustcTargetData::new(ws, &options.compile_opts.build_config.requested_kinds)?;\n let ws_resolve = ops::resolve_ws_with_opts(\n ws,\n &target_data,\n &options.compile_opts.build_config.requested_kinds,\n &options.compile_opts.cli_features,\n &specs,\n HasDevUnits::No,\n crate::core::resolver::features::ForceAllTargets::No,\n )?;\n\n let ids = ws_resolve.targeted_resolve.specs_to_ids(&specs)?;\n let pkgs = ws_resolve.pkg_set.get_many(ids)?;\n\n let mut lib_names = HashMap::new();\n let mut bin_names = HashMap::new();\n let mut names = Vec::new();\n for package in &pkgs {\n for target in package.targets().iter().filter(|t| t.documented()) {\n if target.is_lib() {\n if let Some(prev) = lib_names.insert(target.crate_name(), package) {\n anyhow::bail!(\n \"The library `{}` is specified by packages `{}` and \\\n `{}` but can only be documented once. Consider renaming \\\n or marking one of the targets as `doc = false`.\",\n target.crate_name(),\n prev,\n package\n );\n }\n } else if let Some(prev) = bin_names.insert(target.crate_name(), package) {\n anyhow::bail!(\n \"The binary `{}` is specified by packages `{}` and \\\n `{}` but can be documented only once. Consider renaming \\\n or marking one of the targets as `doc = false`.\",\n target.crate_name(),\n prev,\n package\n );\n }\n names.push(target.crate_name());\n }\n }\n\n let open_kind = if options.open_result {\n Some(options.compile_opts.build_config.single_requested_kind()?)\n } else {\n None\n };\n\n let compilation = ops::compile(ws, &options.compile_opts)?;\n\n if let Some(kind) = open_kind {\n let name = match names.first() {\n Some(s) => s.to_string(),\n None => return Ok(()),\n };\n let path = compilation.root_output[&kind]\n .with_file_name(\"doc\")\n .join(&name)\n .join(\"index.html\");\n if path.exists() {\n let mut shell = ws.config().shell();\n shell.status(\"Opening\", path.display())?;\n let cfg = ws.config().get::<CargoDocConfig>(\"doc\")?;\n open_docs(\n &path,\n &mut shell,\n cfg.browser.map(|path_args| {\n (path_args.path.resolve_program(&ws.config()), path_args.args)\n }),\n )?;\n }\n }\n\n Ok(())\n}\n\nfn open_docs(\n path: &Path,\n shell: &mut Shell,\n config_browser: Option<(PathBuf, Vec<String>)>,\n) -> CargoResult<()> {\n let browser =\n config_browser.or_else(|| Some((PathBuf::from(std::env::var_os(\"BROWSER\")?), Vec::new())));\n\n match browser {\n Some((browser, initial_args)) => {\n if let Err(e) = Command::new(&browser).args(initial_args).arg(path).status() {\n shell.warn(format!(\n \"Couldn't open docs with {}: {}\",\n browser.to_string_lossy(),\n e\n ))?;\n }\n }\n None => {\n if let Err(e) = opener::open(&path) {\n let e = e.into();\n crate::display_warning_with_error(\"couldn't open docs\", &e, shell);\n }\n }\n };\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Table driven tests (read\/write, i32\/i64)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a fence method to combine wait and reset, which applies to all current uses<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Looking for a way to recognize EOF correctly.<commit_after>extern crate rand;\n\nuse rand::random;\nuse std::io::BufferedReader;\nuse std::io;\n\nfn main() {\n println!(\"I'm thinking of a number between 1 and 100. Can you guess it?\");\n let x = random();\n let mut reader = BufferedReader::new(io::stdin());\n while !reader.eof() {\n let line = reader.read_line().unwrap();\n let num = from_str::<int>(line.slice_to(line.len() - 1));\n match num {\n Some(number_string) => println!(\"Well, it was a number.\"),\n None => println!(\"Doesn't count.\")\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>docs(examples): add compaction_filter example code<commit_after>extern crate rocks;\n\nuse lazy_static::lazy_static;\nuse rocks::compaction_filter::{CompactionFilter, Decision, ValueType};\nuse rocks::merge_operator::{MergeOperationInput, MergeOperationOutput, MergeOperator};\nuse rocks::prelude::*;\n\npub struct MyMerge;\n\nimpl MergeOperator for MyMerge {\n fn full_merge(&self, merge_in: &MergeOperationInput, merge_out: &mut MergeOperationOutput) -> bool {\n if let Some(value) = merge_in.existing_value() {\n merge_out.assign(value);\n }\n for m in merge_in.operands() {\n eprintln!(\"Merge({:?})\", String::from_utf8_lossy(m));\n \/\/ the compaction filter filters out bad values\n assert!(m != b\"bad\");\n merge_out.assign(m);\n }\n true\n }\n\n fn name(&self) -> &str {\n \"MyMerge\\0\"\n }\n}\n\n#[derive(Debug, Default)]\npub struct MyFilter {\n count: usize,\n merge_count: usize,\n}\n\nimpl CompactionFilter for MyFilter {\n \/\/ rust-rocks only impls the `FilterV2` API.\n fn filter(&mut self, _level: i32, key: &[u8], value_type: ValueType, existing_value: &[u8]) -> Decision {\n match value_type {\n ValueType::Value => {\n eprintln!(\"Filter({:?})\", String::from_utf8_lossy(key));\n self.count += 1;\n Decision::Keep\n },\n ValueType::MergeOperand => {\n eprintln!(\"FilterMerge({:?})\", String::from_utf8_lossy(key));\n self.merge_count += 1;\n if existing_value == b\"bad\" {\n Decision::Remove\n } else {\n Decision::Keep\n }\n },\n }\n }\n\n fn name(&self) -> &str {\n \"MyFilterV2\\0\"\n }\n}\n\nlazy_static! {\n static ref MY_FILTER: MyFilter = MyFilter::default();\n}\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n const DB_PATH: &str = \"\/tmp\/rocksmergetest\";\n\n let options = Options::default()\n .map_db_options(|opt| opt.create_if_missing(true))\n .map_cf_options(|opt| opt.merge_operator(Box::new(MyMerge)).compaction_filter(&*MY_FILTER));\n\n let db = DB::open(&options, DB_PATH)?;\n\n let wopts = WriteOptions::default_instance();\n db.merge(wopts, b\"0\", b\"bad\")?;\n db.merge(wopts, b\"1\", b\"data1\")?;\n db.merge(wopts, b\"1\", b\"bad\")?;\n db.merge(wopts, b\"1\", b\"data2\")?;\n db.merge(wopts, b\"1\", b\"bad\")?;\n db.merge(wopts, b\"3\", b\"data3\")?;\n\n db.compact_range(&CompactRangeOptions::default(), ..)?;\n\n println!(\"{:?}\", &*MY_FILTER);\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #69312<commit_after>\/\/ build-pass\n\n\/\/ Verify that the compiler doesn't ICE during const prop while evaluating the index operation.\n\n#![allow(unconditional_panic)]\n\nfn main() {\n let cols = [0u32; 0];\n cols[0];\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse url::Url;\n\nuse selectors::bloom::BloomFilter;\nuse selectors::matching::{SelectorMap, Rule};\nuse selectors::matching::DeclarationBlock as GenericDeclarationBlock;\nuse selectors::parser::PseudoElement;\nuse selectors::tree::TNode;\nuse util::resource_files::read_resource_file;\nuse util::smallvec::VecLike;\n\nuse legacy::PresentationalHintSynthesis;\nuse media_queries::Device;\nuse node::TElementAttributes;\nuse properties::{PropertyDeclaration, PropertyDeclarationBlock};\nuse stylesheets::{Stylesheet, CSSRuleIteratorExt, Origin};\nuse viewport::{ViewportConstraints, ViewportRuleCascade};\n\n\npub type DeclarationBlock = GenericDeclarationBlock<Vec<PropertyDeclaration>>;\n\n\npub struct Stylist {\n \/\/ List of stylesheets (including all media rules)\n stylesheets: Vec<Stylesheet>,\n\n \/\/ Device that the stylist is currently evaluating against.\n pub device: Device,\n\n \/\/ If true, a stylesheet has been added or the device has\n \/\/ changed, and the stylist needs to be updated.\n is_dirty: bool,\n\n \/\/ The current selector maps, after evaluating media\n \/\/ rules against the current device.\n element_map: PerPseudoElementSelectorMap,\n before_map: PerPseudoElementSelectorMap,\n after_map: PerPseudoElementSelectorMap,\n rules_source_order: usize,\n}\n\nimpl Stylist {\n #[inline]\n pub fn new(device: Device) -> Stylist {\n let mut stylist = Stylist {\n stylesheets: vec!(),\n device: device,\n is_dirty: true,\n\n element_map: PerPseudoElementSelectorMap::new(),\n before_map: PerPseudoElementSelectorMap::new(),\n after_map: PerPseudoElementSelectorMap::new(),\n rules_source_order: 0,\n };\n \/\/ FIXME: Add iso-8859-9.css when the document’s encoding is ISO-8859-8.\n \/\/ FIXME: presentational-hints.css should be at author origin with zero specificity.\n \/\/ (Does it make a difference?)\n for &filename in [\"user-agent.css\", \"servo.css\", \"presentational-hints.css\"].iter() {\n let ua_stylesheet = Stylesheet::from_bytes(\n &read_resource_file(&[filename]).unwrap(),\n Url::parse(&format!(\"chrome:\/\/\/{:?}\", filename)).unwrap(),\n None,\n None,\n Origin::UserAgent);\n stylist.add_stylesheet(ua_stylesheet);\n }\n stylist\n }\n\n pub fn constrain_viewport(&self) -> Option<ViewportConstraints> {\n let cascaded_rule = self.stylesheets.iter()\n .flat_map(|s| s.effective_rules(&self.device).viewport())\n .cascade();\n\n ViewportConstraints::maybe_new(self.device.viewport_size, &cascaded_rule)\n }\n\n pub fn update(&mut self) -> bool {\n if self.is_dirty {\n self.element_map = PerPseudoElementSelectorMap::new();\n self.before_map = PerPseudoElementSelectorMap::new();\n self.after_map = PerPseudoElementSelectorMap::new();\n self.rules_source_order = 0;\n\n for stylesheet in self.stylesheets.iter() {\n let (mut element_map, mut before_map, mut after_map) = match stylesheet.origin {\n Origin::UserAgent => (\n &mut self.element_map.user_agent,\n &mut self.before_map.user_agent,\n &mut self.after_map.user_agent,\n ),\n Origin::Author => (\n &mut self.element_map.author,\n &mut self.before_map.author,\n &mut self.after_map.author,\n ),\n Origin::User => (\n &mut self.element_map.user,\n &mut self.before_map.user,\n &mut self.after_map.user,\n ),\n };\n let mut rules_source_order = self.rules_source_order;\n\n \/\/ Take apart the StyleRule into individual Rules and insert\n \/\/ them into the SelectorMap of that priority.\n macro_rules! append(\n ($style_rule: ident, $priority: ident) => {\n if $style_rule.declarations.$priority.len() > 0 {\n for selector in $style_rule.selectors.iter() {\n let map = match selector.pseudo_element {\n None => &mut element_map,\n Some(PseudoElement::Before) => &mut before_map,\n Some(PseudoElement::After) => &mut after_map,\n };\n map.$priority.insert(Rule {\n selector: selector.compound_selectors.clone(),\n declarations: DeclarationBlock {\n specificity: selector.specificity,\n declarations: $style_rule.declarations.$priority.clone(),\n source_order: rules_source_order,\n },\n });\n }\n }\n };\n );\n\n for style_rule in stylesheet.effective_rules(&self.device).style() {\n append!(style_rule, normal);\n append!(style_rule, important);\n rules_source_order += 1;\n }\n self.rules_source_order = rules_source_order;\n }\n\n self.is_dirty = false;\n return true;\n }\n\n false\n }\n\n pub fn set_device(&mut self, device: Device) {\n let is_dirty = self.is_dirty || self.stylesheets.iter()\n .flat_map(|stylesheet| stylesheet.rules().media())\n .any(|media_rule| media_rule.evaluate(&self.device) != media_rule.evaluate(&device));\n\n self.device = device;\n self.is_dirty |= is_dirty;\n }\n\n pub fn add_quirks_mode_stylesheet(&mut self) {\n self.add_stylesheet(Stylesheet::from_bytes(\n &read_resource_file(&[\"quirks-mode.css\"]).unwrap(),\n Url::parse(\"chrome:\/\/\/quirks-mode.css\").unwrap(),\n None,\n None,\n Origin::UserAgent))\n }\n\n pub fn add_stylesheet(&mut self, stylesheet: Stylesheet) {\n self.stylesheets.push(stylesheet);\n self.is_dirty = true;\n }\n\n \/\/\/ Returns the applicable CSS declarations for the given element. This corresponds to\n \/\/\/ `ElementRuleCollector` in WebKit.\n \/\/\/\n \/\/\/ The returned boolean indicates whether the style is *shareable*; that is, whether the\n \/\/\/ matched selectors are simple enough to allow the matching logic to be reduced to the logic\n \/\/\/ in `css::matching::PrivateMatchMethods::candidate_element_allows_for_style_sharing`.\n pub fn push_applicable_declarations<'a,N,V>(\n &self,\n element: &N,\n parent_bf: &Option<Box<BloomFilter>>,\n style_attribute: Option<&PropertyDeclarationBlock>,\n pseudo_element: Option<PseudoElement>,\n applicable_declarations: &mut V)\n -> bool\n where N: TNode<'a>,\n N::Element: TElementAttributes<'a>,\n V: VecLike<DeclarationBlock> {\n assert!(!self.is_dirty);\n assert!(element.is_element());\n assert!(style_attribute.is_none() || pseudo_element.is_none(),\n \"Style attributes do not apply to pseudo-elements\");\n\n let map = match pseudo_element {\n None => &self.element_map,\n Some(PseudoElement::Before) => &self.before_map,\n Some(PseudoElement::After) => &self.after_map,\n };\n\n let mut shareable = true;\n\n\n \/\/ Step 1: Normal user-agent rules.\n map.user_agent.normal.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n\n \/\/ Step 2: Presentational hints.\n self.synthesize_presentational_hints_for_legacy_attributes(element,\n applicable_declarations,\n &mut shareable);\n\n \/\/ Step 3: User and author normal rules.\n map.user.normal.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n map.author.normal.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n\n \/\/ Step 4: Normal style attributes.\n style_attribute.map(|sa| {\n shareable = false;\n applicable_declarations.push(\n GenericDeclarationBlock::from_declarations(sa.normal.clone()))\n });\n\n \/\/ Step 5: Author-supplied `!important` rules.\n map.author.important.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n\n \/\/ Step 6: `!important` style attributes.\n style_attribute.map(|sa| {\n shareable = false;\n applicable_declarations.push(\n GenericDeclarationBlock::from_declarations(sa.important.clone()))\n });\n\n \/\/ Step 7: User and UA `!important` rules.\n map.user.important.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n map.user_agent.important.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n\n shareable\n }\n\n pub fn is_dirty(&self) -> bool {\n self.is_dirty\n }\n}\n\nstruct PerOriginSelectorMap {\n normal: SelectorMap<Vec<PropertyDeclaration>>,\n important: SelectorMap<Vec<PropertyDeclaration>>,\n}\n\nimpl PerOriginSelectorMap {\n #[inline]\n fn new() -> PerOriginSelectorMap {\n PerOriginSelectorMap {\n normal: SelectorMap::new(),\n important: SelectorMap::new(),\n }\n }\n}\n\nstruct PerPseudoElementSelectorMap {\n user_agent: PerOriginSelectorMap,\n author: PerOriginSelectorMap,\n user: PerOriginSelectorMap,\n}\n\nimpl PerPseudoElementSelectorMap {\n #[inline]\n fn new() -> PerPseudoElementSelectorMap {\n PerPseudoElementSelectorMap {\n user_agent: PerOriginSelectorMap::new(),\n author: PerOriginSelectorMap::new(),\n user: PerOriginSelectorMap::new(),\n }\n }\n}\n<commit_msg>fix Stylist::new() to not panic when resources cannot be loaded<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse url::Url;\n\nuse selectors::bloom::BloomFilter;\nuse selectors::matching::{SelectorMap, Rule};\nuse selectors::matching::DeclarationBlock as GenericDeclarationBlock;\nuse selectors::parser::PseudoElement;\nuse selectors::tree::TNode;\nuse std::process;\nuse util::resource_files::read_resource_file;\nuse util::smallvec::VecLike;\n\nuse legacy::PresentationalHintSynthesis;\nuse media_queries::Device;\nuse node::TElementAttributes;\nuse properties::{PropertyDeclaration, PropertyDeclarationBlock};\nuse stylesheets::{Stylesheet, CSSRuleIteratorExt, Origin};\nuse viewport::{ViewportConstraints, ViewportRuleCascade};\n\n\npub type DeclarationBlock = GenericDeclarationBlock<Vec<PropertyDeclaration>>;\n\n\npub struct Stylist {\n \/\/ List of stylesheets (including all media rules)\n stylesheets: Vec<Stylesheet>,\n\n \/\/ Device that the stylist is currently evaluating against.\n pub device: Device,\n\n \/\/ If true, a stylesheet has been added or the device has\n \/\/ changed, and the stylist needs to be updated.\n is_dirty: bool,\n\n \/\/ The current selector maps, after evaluating media\n \/\/ rules against the current device.\n element_map: PerPseudoElementSelectorMap,\n before_map: PerPseudoElementSelectorMap,\n after_map: PerPseudoElementSelectorMap,\n rules_source_order: usize,\n}\n\nimpl Stylist {\n #[inline]\n pub fn new(device: Device) -> Stylist {\n let mut stylist = Stylist {\n stylesheets: vec!(),\n device: device,\n is_dirty: true,\n\n element_map: PerPseudoElementSelectorMap::new(),\n before_map: PerPseudoElementSelectorMap::new(),\n after_map: PerPseudoElementSelectorMap::new(),\n rules_source_order: 0,\n };\n \/\/ FIXME: Add iso-8859-9.css when the document’s encoding is ISO-8859-8.\n \/\/ FIXME: presentational-hints.css should be at author origin with zero specificity.\n \/\/ (Does it make a difference?)\n for &filename in [\"user-agent.css\", \"servo.css\", \"presentational-hints.css\"].iter() {\n match read_resource_file(&[filename]) {\n Ok(res) => {\n let ua_stylesheet = Stylesheet::from_bytes(\n &res,\n Url::parse(&format!(\"chrome:\/\/\/{:?}\", filename)).unwrap(),\n None,\n None,\n Origin::UserAgent);\n stylist.add_stylesheet(ua_stylesheet);\n }\n Err(..) => {\n error!(\"Stylist::new() failed at loading {}!\", filename);\n process::exit(1);\n }\n }\n }\n stylist\n }\n\n pub fn constrain_viewport(&self) -> Option<ViewportConstraints> {\n let cascaded_rule = self.stylesheets.iter()\n .flat_map(|s| s.effective_rules(&self.device).viewport())\n .cascade();\n\n ViewportConstraints::maybe_new(self.device.viewport_size, &cascaded_rule)\n }\n\n pub fn update(&mut self) -> bool {\n if self.is_dirty {\n self.element_map = PerPseudoElementSelectorMap::new();\n self.before_map = PerPseudoElementSelectorMap::new();\n self.after_map = PerPseudoElementSelectorMap::new();\n self.rules_source_order = 0;\n\n for stylesheet in self.stylesheets.iter() {\n let (mut element_map, mut before_map, mut after_map) = match stylesheet.origin {\n Origin::UserAgent => (\n &mut self.element_map.user_agent,\n &mut self.before_map.user_agent,\n &mut self.after_map.user_agent,\n ),\n Origin::Author => (\n &mut self.element_map.author,\n &mut self.before_map.author,\n &mut self.after_map.author,\n ),\n Origin::User => (\n &mut self.element_map.user,\n &mut self.before_map.user,\n &mut self.after_map.user,\n ),\n };\n let mut rules_source_order = self.rules_source_order;\n\n \/\/ Take apart the StyleRule into individual Rules and insert\n \/\/ them into the SelectorMap of that priority.\n macro_rules! append(\n ($style_rule: ident, $priority: ident) => {\n if $style_rule.declarations.$priority.len() > 0 {\n for selector in $style_rule.selectors.iter() {\n let map = match selector.pseudo_element {\n None => &mut element_map,\n Some(PseudoElement::Before) => &mut before_map,\n Some(PseudoElement::After) => &mut after_map,\n };\n map.$priority.insert(Rule {\n selector: selector.compound_selectors.clone(),\n declarations: DeclarationBlock {\n specificity: selector.specificity,\n declarations: $style_rule.declarations.$priority.clone(),\n source_order: rules_source_order,\n },\n });\n }\n }\n };\n );\n\n for style_rule in stylesheet.effective_rules(&self.device).style() {\n append!(style_rule, normal);\n append!(style_rule, important);\n rules_source_order += 1;\n }\n self.rules_source_order = rules_source_order;\n }\n\n self.is_dirty = false;\n return true;\n }\n\n false\n }\n\n pub fn set_device(&mut self, device: Device) {\n let is_dirty = self.is_dirty || self.stylesheets.iter()\n .flat_map(|stylesheet| stylesheet.rules().media())\n .any(|media_rule| media_rule.evaluate(&self.device) != media_rule.evaluate(&device));\n\n self.device = device;\n self.is_dirty |= is_dirty;\n }\n\n pub fn add_quirks_mode_stylesheet(&mut self) {\n self.add_stylesheet(Stylesheet::from_bytes(\n &read_resource_file(&[\"quirks-mode.css\"]).unwrap(),\n Url::parse(\"chrome:\/\/\/quirks-mode.css\").unwrap(),\n None,\n None,\n Origin::UserAgent))\n }\n\n pub fn add_stylesheet(&mut self, stylesheet: Stylesheet) {\n self.stylesheets.push(stylesheet);\n self.is_dirty = true;\n }\n\n \/\/\/ Returns the applicable CSS declarations for the given element. This corresponds to\n \/\/\/ `ElementRuleCollector` in WebKit.\n \/\/\/\n \/\/\/ The returned boolean indicates whether the style is *shareable*; that is, whether the\n \/\/\/ matched selectors are simple enough to allow the matching logic to be reduced to the logic\n \/\/\/ in `css::matching::PrivateMatchMethods::candidate_element_allows_for_style_sharing`.\n pub fn push_applicable_declarations<'a,N,V>(\n &self,\n element: &N,\n parent_bf: &Option<Box<BloomFilter>>,\n style_attribute: Option<&PropertyDeclarationBlock>,\n pseudo_element: Option<PseudoElement>,\n applicable_declarations: &mut V)\n -> bool\n where N: TNode<'a>,\n N::Element: TElementAttributes<'a>,\n V: VecLike<DeclarationBlock> {\n assert!(!self.is_dirty);\n assert!(element.is_element());\n assert!(style_attribute.is_none() || pseudo_element.is_none(),\n \"Style attributes do not apply to pseudo-elements\");\n\n let map = match pseudo_element {\n None => &self.element_map,\n Some(PseudoElement::Before) => &self.before_map,\n Some(PseudoElement::After) => &self.after_map,\n };\n\n let mut shareable = true;\n\n\n \/\/ Step 1: Normal user-agent rules.\n map.user_agent.normal.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n\n \/\/ Step 2: Presentational hints.\n self.synthesize_presentational_hints_for_legacy_attributes(element,\n applicable_declarations,\n &mut shareable);\n\n \/\/ Step 3: User and author normal rules.\n map.user.normal.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n map.author.normal.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n\n \/\/ Step 4: Normal style attributes.\n style_attribute.map(|sa| {\n shareable = false;\n applicable_declarations.push(\n GenericDeclarationBlock::from_declarations(sa.normal.clone()))\n });\n\n \/\/ Step 5: Author-supplied `!important` rules.\n map.author.important.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n\n \/\/ Step 6: `!important` style attributes.\n style_attribute.map(|sa| {\n shareable = false;\n applicable_declarations.push(\n GenericDeclarationBlock::from_declarations(sa.important.clone()))\n });\n\n \/\/ Step 7: User and UA `!important` rules.\n map.user.important.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n map.user_agent.important.get_all_matching_rules(element,\n parent_bf,\n applicable_declarations,\n &mut shareable);\n\n shareable\n }\n\n pub fn is_dirty(&self) -> bool {\n self.is_dirty\n }\n}\n\nstruct PerOriginSelectorMap {\n normal: SelectorMap<Vec<PropertyDeclaration>>,\n important: SelectorMap<Vec<PropertyDeclaration>>,\n}\n\nimpl PerOriginSelectorMap {\n #[inline]\n fn new() -> PerOriginSelectorMap {\n PerOriginSelectorMap {\n normal: SelectorMap::new(),\n important: SelectorMap::new(),\n }\n }\n}\n\nstruct PerPseudoElementSelectorMap {\n user_agent: PerOriginSelectorMap,\n author: PerOriginSelectorMap,\n user: PerOriginSelectorMap,\n}\n\nimpl PerPseudoElementSelectorMap {\n #[inline]\n fn new() -> PerPseudoElementSelectorMap {\n PerPseudoElementSelectorMap {\n user_agent: PerOriginSelectorMap::new(),\n author: PerOriginSelectorMap::new(),\n user: PerOriginSelectorMap::new(),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset <= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset <= 1 {\n editor.backspace(window);\n }\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n if editor.offset < editor.string.len() {\n break;\n }\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n if editor.offset >= 1 {\n break;\n }\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<commit_msg>Fix D command<commit_after>use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset <= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n if editor.offset < editor.string.len() {\n break;\n }\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n if editor.offset >= 1 {\n break;\n }\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match key_event.character {\n '0' if !no_mult => times *= 10,\n\n '1' if no_mult => times = 1,\n '1' => times = times * 10 + 1,\n\n '2' if no_mult => times = 2,\n '2' => times = times * 10 + 2,\n\n '3' if no_mult => times = 3,\n '3' => times = times * 10 + 3,\n\n '4' if no_mult => times = 4,\n '4' => times = times * 10 + 4,\n\n '5' if no_mult => times = 5,\n '5' => times = times * 10 + 5,\n\n '6' if no_mult => times = 6,\n '6' => times = times * 10 + 6,\n\n '7' if no_mult => times = 7,\n '7' => times = times * 10 + 7,\n\n '8' if no_mult => times = 8,\n '8' => times = times * 10 + 8,\n\n '9' if no_mult => times = 9,\n '9' => times = times * 10 + 9,\n _ => {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') => editor.down(),\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'o') => {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: '$',\n scancode: 0,\n pressed: true,\n }, window);\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: 'i',\n scancode: 0,\n pressed: true,\n }, window);\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: '\\n',\n scancode: 0,\n pressed: true,\n }, window);\n },\n (Normal, '$') => {\n let mut new_offset = editor.string.len();\n for i in editor.offset..editor.string.len() {\n match editor.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i;\n break;\n }\n _ => (),\n }\n }\n editor.offset = new_offset;\n },\n (Normal, '0') => {\n\n let mut new_offset = 0;\n for i in 2..editor.offset {\n match editor.string.as_bytes()[editor.offset - i] {\n 0 => break,\n 10 => {\n new_offset = editor.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n editor.offset = new_offset;\n },\n (Insert, '\\0') => (),\n (Insert, _) => {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &editor.url, \") Changed\"));\n editor.string = editor.string[0 .. editor.offset].to_string() +\n &key_event.character.to_string() +\n &editor.string[editor.offset .. editor.string.len()];\n editor.offset += 1;\n },\n _ => {},\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<commit_msg>Add `O` command<commit_after>use redox::*;\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match key_event.character {\n '0' if !no_mult => times *= 10,\n\n '1' if no_mult => times = 1,\n '1' => times = times * 10 + 1,\n\n '2' if no_mult => times = 2,\n '2' => times = times * 10 + 2,\n\n '3' if no_mult => times = 3,\n '3' => times = times * 10 + 3,\n\n '4' if no_mult => times = 4,\n '4' => times = times * 10 + 4,\n\n '5' if no_mult => times = 5,\n '5' => times = times * 10 + 5,\n\n '6' if no_mult => times = 6,\n '6' => times = times * 10 + 6,\n\n '7' if no_mult => times = 7,\n '7' => times = times * 10 + 7,\n\n '8' if no_mult => times = 8,\n '8' => times = times * 10 + 8,\n\n '9' if no_mult => times = 9,\n '9' => times = times * 10 + 9,\n _ => {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') => editor.down(),\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'o') => {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: '$',\n scancode: 0,\n pressed: true,\n }, window);\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: 'i',\n scancode: 0,\n pressed: true,\n }, window);\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: '\\n',\n scancode: 0,\n pressed: true,\n }, window);\n },\n (Normal, 'O') => {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: 'k',\n scancode: 0,\n pressed: true,\n }, window);\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: '$',\n scancode: 0,\n pressed: true,\n }, window);\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: 'i',\n scancode: 0,\n pressed: true,\n }, window);\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: '\\n',\n scancode: 0,\n pressed: true,\n }, window);\n },\n (Normal, '$') => {\n let mut new_offset = editor.string.len();\n for i in editor.offset..editor.string.len() {\n match editor.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i;\n break;\n }\n _ => (),\n }\n }\n editor.offset = new_offset;\n },\n (Normal, '0') => {\n\n let mut new_offset = 0;\n for i in 2..editor.offset {\n match editor.string.as_bytes()[editor.offset - i] {\n 0 => break,\n 10 => {\n new_offset = editor.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n editor.offset = new_offset;\n },\n (Insert, '\\0') => (),\n (Insert, _) => {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &editor.url, \") Changed\"));\n editor.string = editor.string[0 .. editor.offset].to_string() +\n &key_event.character.to_string() +\n &editor.string[editor.offset .. editor.string.len()];\n editor.offset += 1;\n },\n _ => {},\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before> use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') | (Normal, ' ') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') | (Normal, '\\n') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 'Z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset >= 0 {\n clipboard.push(editor.cur());\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') | (Normal, 'H') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'Y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = editor.cur().to_string() + clipboard;\n editor.left();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.right();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') | (Normal, 'L') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<commit_msg>Fix space alias<commit_after> use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') | (Normal, ' ') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') | (Normal, '\\n') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 'Z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset >= 0 {\n clipboard.push(editor.cur());\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') | (Normal, 'H') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'Y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = editor.cur().to_string() + clipboard;\n editor.left();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.right();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') | (Normal, 'L') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add integration testing setup<commit_after>\/\/! The integration suite for inspecting sessions.\n\nextern crate libc;\nextern crate rand;\n\nuse std::process::Command;\nuse libc::system;\nuse std::ffi::CString;\nuse rand::random;\nuse std::fs::File;\nuse std::fs;\nuse std::path::{Path, PathBuf};\nuse std::io::prelude::*;\nuse std::env::home_dir;\n\nfn homedir() -> Result<PathBuf, String>{\n match home_dir() {\n Some(dir) => Ok(dir),\n None => Err(String::from(\"We couldn't find your home directory.\"))\n }\n}\n\n\/\/\/ List windows will give details about the active sessions in testing.\n\/\/\/ target: A string represented by the {named_session}:{named_window}\nfn list_windows(target: &String) -> String {\n let output = Command::new(\"tmux\")\n .arg(\"list-windows\")\n .arg(format!(\"-t {}\", target))\n .output()\n .unwrap_or_else(|e| { panic!(\"failed to execute process: {}\", e) });\n\n String::from_utf8_lossy(&output.stdout).into_owned()\n}\n\n#[test]\nfn list_3_windows() {\n let name = random::<u16>();\n let home = homedir().unwrap();\n let name1 = format!(\"{}\/.{}\/{}.yml\", home.display(), \"muxed\", name);\n let path = Path::new(&name1);\n let _ = fs::create_dir(Path::new(&format!(\"{}\/.muxed\/\", home.display())));\n let mut buffer = File::create(path).unwrap();\n let _ = buffer.write(b\"---\nwindows: ['cargo', 'vim', 'git']\n\");\n\n let line = format!(\".\/target\/debug\/muxed {}\", name);\n let system_call = CString::new(line.clone()).unwrap();\n \/\/unsafe { system(system_call.as_ptr()); };\n\n let _ = fs::remove_file(path);\n let result = list_windows(&name.to_string());\n \/\/assert_eq!(result, \"hi\")\n assert!(true)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add root-level integration tests<commit_after>#![feature(box_syntax,box_patterns)]\n#![feature(compile)]\n#![feature(scheme)]\n\n#[macro_use]\nextern crate seax_svm as svm;\nextern crate seax_scheme as scheme;\n\nuse svm::slist::Stack;\nuse svm::slist::List::{Cons,Nil};\nuse svm::cell::Atom::*;\nuse svm::cell::SVMCell::*;\n\n\/\/\/ Test for simple list construction through CONS.\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ (cons 10 (cons 20 nil))\n\/\/\/ ==> (10 . 20)\n\/\/\/ ```\n#[test]\nfn run_list_construction() {\n assert_eq!(\n svm::eval_program(scheme::compile(\"(cons 10 (cons 20 nil))\").unwrap()).peek(),\n Some(&ListCell( box list!(AtomCell(SInt(10)), AtomCell(SInt(20))) ))\n )\n}\n\n\n\/\/\/ Test for simple list construction and deconstruction\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ (car (cons 20 (cons 10 nil)))\n\/\/\/ ==> 20\n\/\/\/ ```\n#[test]\nfn run_list_car() {\n assert_eq!(\n svm::eval_program(scheme::compile(\"(car (cons 20 (cons 10 nil)))\").unwrap()).peek(),\n Some(&AtomCell(SInt(20)))\n )\n}\n\n\/\/\/ Test for simple list construction and destructuring\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ (cdr (cons 20 (cons 10 nil)))\n\/\/\/ ==> (10)\n\/\/\/ ```\n#[test]\nfn run_list_cdr() {\n assert_eq!(\n svm::eval_program(scheme::compile(\"(cdr (cons 20 (cons 10 nil)))\").unwrap()).peek(),\n Some(&ListCell(box list!(AtomCell(SInt(10)))))\n )\n}\n\n\/\/\/ Test for simple mathematics application\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ (+ 10 10)\n\/\/\/ ==> 20\n\/\/\/ ```\n#[test]\nfn run_simple_add() {\n assert_eq!(\n svm::eval_program(scheme::compile(\"(+ 10 10)\").unwrap()).peek(),\n Some(&AtomCell(SInt(20)))\n )\n}\n\n\/\/\/ Test for nested arithmetic\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ (- 20 (+ 5 5))\n\/\/\/ ==> 10\n\/\/\/ ```\n#[test]\nfn run_nested_arith() {\n assert_eq!(\n svm::eval_program(scheme::compile(\"(- 20 (+ 5 5))\").unwrap()).peek(),\n Some(&AtomCell(SInt(10)))\n )\n}\n\n\/\/\/ Test for basic branching with `if` expressions.\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ ((if (= 0 (- 1 1)) #t #f)\n\/\/\/ ==> #t\n\/\/\/ ```\n#[test]\nfn run_basic_branching_1() {\n assert_eq!(\n svm::eval_program(scheme::compile(\"(if (= 0 (- 1 1)) #t #f)\").unwrap()).peek(),\n Some(&AtomCell(SInt(1)))\n )\n}\n\n\/\/\/ Test for basic branching with `if` expressions.\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ (+ 10 (if (nil? nil) 10 20))\n\/\/\/ ==> 20\n\/\/\/ ```\n#[test]\nfn run_basic_branching_2() {\n assert_eq!(\n svm::eval_program(scheme::compile(\"(+ 10 (if (nil? nil) 10 20))\").unwrap()).peek(),\n Some(&AtomCell(SInt(20)))\n )\n}\n\n\/\/\/ Test for applying a lambda expression\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ ((lambda (x y) (+ x y)) 2 3)\n\/\/\/ ==> 5\n\/\/\/ ```\n#[test]\nfn run_lambda_ap() {\n assert_eq!(\n svm::eval_program(scheme::compile(\"((lambda (x y) (+ x y)) 2 3)\").unwrap()).peek(),\n Some(&AtomCell(SInt(5)))\n )\n}\n\n\/\/\/ Test for applying an expression with nested lambdas\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ ((lambda (z) ((lambda (x y) (+ (- x y) z)) 3 5)) 6)\n\/\/\/ ==> 4\n\/\/\/ ```\n#[test]\nfn run_nested_lambda() {\n assert_eq!(\n svm::eval_program(scheme::compile(\"((lambda (z) ((lambda (x y) (+ (- x y) z)) 3 5)) 6)\").unwrap()).peek(),\n Some(&AtomCell(SInt(4)))\n )\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>more modifiers<commit_after><|endoftext|>"} {"text":"<commit_before>use core::cmp::Ordering;\nuse core::ops::{Add, Sub};\n\nuse scheduler;\n\npub const NANOS_PER_MICRO: i32 = 1000;\npub const NANOS_PER_MILLI: i32 = 1000000;\npub const NANOS_PER_SEC: i32 = 1000000000;\n\n\/\/\/ A duration\n#[derive(Copy, Clone)]\npub struct Duration {\n \/\/\/ The seconds\n pub secs: i64,\n \/\/\/ The nano seconds\n pub nanos: i32,\n}\n\nimpl Duration {\n \/\/\/ Create a new duration\n pub fn new(mut secs: i64, mut nanos: i32) -> Self {\n while nanos >= NANOS_PER_SEC || (nanos > 0 && secs < 0) {\n secs += 1;\n nanos -= NANOS_PER_SEC;\n }\n\n while nanos < 0 && secs > 0 {\n secs -= 1;\n nanos += NANOS_PER_SEC;\n }\n\n Duration {\n secs: secs,\n nanos: nanos,\n }\n }\n\n \/\/\/ Get the current duration\n pub fn monotonic() -> Self {\n let ret;\n unsafe {\n let reenable = scheduler::start_no_ints();\n ret = ::clock_monotonic;\n scheduler::end_no_ints(reenable);\n }\n ret\n }\n\n \/\/\/ Get the realtime\n pub fn realtime() -> Self {\n let ret;\n unsafe {\n let reenable = scheduler::start_no_ints();\n ret = ::clock_realtime;\n scheduler::end_no_ints(reenable);\n }\n ret\n }\n\n \/\/\/ Sleep the duration\n pub fn sleep(&self) {\n let start_time = Duration::monotonic();\n loop {\n let elapsed = Duration::monotonic() - start_time;\n if elapsed > *self {\n break;\n } else {\n unsafe {\n let disable = scheduler::start_ints();\n scheduler::end_ints(disable);\n }\n }\n }\n }\n}\n\nimpl Add for Duration {\n type Output = Duration;\n\n fn add(self, other: Self) -> Self {\n Duration::new(self.secs + other.secs, self.nanos + other.nanos)\n }\n}\n\nimpl Sub for Duration {\n type Output = Duration;\n\n fn sub(self, other: Self) -> Self {\n Duration::new(self.secs - other.secs, self.nanos - other.nanos)\n }\n}\n\nimpl PartialEq for Duration {\n fn eq(&self, other: &Self) -> bool {\n let dif = *self - *other;\n dif.secs == 0 && dif.nanos == 0\n }\n}\n\nimpl PartialOrd for Duration {\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n let dif = *self - *other;\n if dif.secs > 0 {\n Some(Ordering::Greater)\n } else if dif.secs < 0 {\n Some(Ordering::Less)\n } else if dif.nanos > 0 {\n Some(Ordering::Greater)\n } else if dif.nanos < 0 {\n Some(Ordering::Less)\n } else {\n Some(Ordering::Equal)\n }\n }\n}\n<commit_msg>Context switch in sleep loop<commit_after>use core::cmp::Ordering;\nuse core::ops::{Add, Sub};\n\nuse scheduler;\nuse scheduler::context;\n\npub const NANOS_PER_MICRO: i32 = 1000;\npub const NANOS_PER_MILLI: i32 = 1000000;\npub const NANOS_PER_SEC: i32 = 1000000000;\n\n\/\/\/ A duration\n#[derive(Copy, Clone)]\npub struct Duration {\n \/\/\/ The seconds\n pub secs: i64,\n \/\/\/ The nano seconds\n pub nanos: i32,\n}\n\nimpl Duration {\n \/\/\/ Create a new duration\n pub fn new(mut secs: i64, mut nanos: i32) -> Self {\n while nanos >= NANOS_PER_SEC || (nanos > 0 && secs < 0) {\n secs += 1;\n nanos -= NANOS_PER_SEC;\n }\n\n while nanos < 0 && secs > 0 {\n secs -= 1;\n nanos += NANOS_PER_SEC;\n }\n\n Duration {\n secs: secs,\n nanos: nanos,\n }\n }\n\n \/\/\/ Get the current duration\n pub fn monotonic() -> Self {\n let ret;\n unsafe {\n let reenable = scheduler::start_no_ints();\n ret = ::clock_monotonic;\n scheduler::end_no_ints(reenable);\n }\n ret\n }\n\n \/\/\/ Get the realtime\n pub fn realtime() -> Self {\n let ret;\n unsafe {\n let reenable = scheduler::start_no_ints();\n ret = ::clock_realtime;\n scheduler::end_no_ints(reenable);\n }\n ret\n }\n\n \/\/\/ Sleep the duration\n pub fn sleep(&self) {\n let start_time = Duration::monotonic();\n loop {\n let elapsed = Duration::monotonic() - start_time;\n if elapsed > *self {\n break;\n } else {\n unsafe { context::context_switch(false) };\n }\n }\n }\n}\n\nimpl Add for Duration {\n type Output = Duration;\n\n fn add(self, other: Self) -> Self {\n Duration::new(self.secs + other.secs, self.nanos + other.nanos)\n }\n}\n\nimpl Sub for Duration {\n type Output = Duration;\n\n fn sub(self, other: Self) -> Self {\n Duration::new(self.secs - other.secs, self.nanos - other.nanos)\n }\n}\n\nimpl PartialEq for Duration {\n fn eq(&self, other: &Self) -> bool {\n let dif = *self - *other;\n dif.secs == 0 && dif.nanos == 0\n }\n}\n\nimpl PartialOrd for Duration {\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n let dif = *self - *other;\n if dif.secs > 0 {\n Some(Ordering::Greater)\n } else if dif.secs < 0 {\n Some(Ordering::Less)\n } else if dif.nanos > 0 {\n Some(Ordering::Greater)\n } else if dif.nanos < 0 {\n Some(Ordering::Less)\n } else {\n Some(Ordering::Equal)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Problem 31<commit_after>fn is_prime(n: int) -> bool {\n if n <= 2 {\n return n == 2;\n }\n let mut primes = ~[2];\n let mut i = 1;\n while std::num::pow(i, 2) < n {\n let &p = primes.last().unwrap();\n if n % p == 0 {\n return false;\n }\n i += 2;\n if primes.iter().all(|&x| i%x != 0) {\n primes.push(i);\n }\n }\n true\n}\n\nfn main() {\n for n in range(0, 100) {\n if is_prime(n) {\n println!(\"{}\", n);\n }\n }\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Send message on firest connection on server<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test<commit_after>\/\/ only-linux: Uses Linux-only APIs\n\n#![feature(rustc_private)]\nextern crate libc;\n\nfn main() {\n let mut buf = [0u8; 5];\n unsafe {\n\t\tassert_eq!(libc::syscall(libc::SYS_getrandom, 0 as *mut libc::c_void, 0 as libc::size_t, 0 as libc::c_uint), 0);\n assert_eq!(libc::syscall(libc::SYS_getrandom, buf.as_mut_ptr() as *mut libc::c_void, 5 as libc::size_t, 0 as libc::c_uint), 5);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix breakage from nickel change<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[No-auto] bin\/core\/ref: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate libc;\n\nuse self::libc::{c_char, c_uchar};\n\n\/\/\/ Signs and sends transaction message to validator pool.\n\/\/\/\n\/\/\/ Adds submitter information to passed transaction json, signs it with submitter\n\/\/\/ sign key (see wallet_sign_by_my_did), and sends signed transaction message\n\/\/\/ to validator pool (see ledger_write_txn).\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ txn_json: Transaction data json.\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn ledger_sign_and_send_txn(client_handle: i32, command_handle: i32,\n submitter_did: *const c_char, txn_json: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_result_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Sends transaction message to validator pool (no signing, unlike ledger_sign_and_write_txn).\n\/\/\/\n\/\/\/ The transaction is sent to the validator pool as is. It's assumed that it's already prepared.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ txn_json: Transaction data json.\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn ledger_send_txn(client_handle: i32, command_handle: i32,\n txn_json: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_result_json: *const c_char)) {\n unimplemented!();\n}\n\n\n\/\/\/ Creates and optionally signs a txn to get a DDO.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\n#[no_mangle]\npub extern fn create_get_ddo_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char, target_did: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\n\n\/\/\/ Creates and optionally signs NYM transaction.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\n#[no_mangle]\npub extern fn create_nym_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char,\n target_did: *const c_char,\n verkey: *const c_char, xref: *const c_char,\n data: *const c_char, role: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs ATTRIB transaction.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\n#[no_mangle]\npub extern fn create_attrib_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char, target_did: *const c_char,\n hash: *const c_char, raw: *const c_char, enc: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs GET_ATTRIB transaction.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\npub extern fn create_get_attrib_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char, target_did: *const c_char,\n data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs GET_NYM transaction.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\n#[no_mangle]\npub extern fn create_get_nym_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char, target_did: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs SCHEMA transaction.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\n#[no_mangle]\npub extern fn create_schema_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char, data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs GET_SCHEMA transaction.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\n#[no_mangle]\npub extern fn create_get_schema_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char, data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs ISSUER_KEY transaction.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\n#[no_mangle]\npub extern fn create_issuer_key_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char, xref: *const c_char,\n data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_result_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs GET_ISSUER_KEY transaction.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\npub extern fn crate_get_issuer_key_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char, xref: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs NODE transaction.\n\/\/\/ Call ledger_send_txn or ledger_sign_and_send_txn to send txn to the Validator Pool.\n#[no_mangle]\npub extern fn create_node_txn(client_handle: i32, command_handle: i32,\n sign: int,\n submitter_did: *const c_char, target_did: *const c_char,\n data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}<commit_msg>added documentation for ledger api<commit_after>extern crate libc;\n\nuse self::libc::{c_char, c_uchar};\n\n\/\/\/ Signs and sends transaction message to validator pool.\n\/\/\/\n\/\/\/ Adds submitter information to passed transaction json, signs it with submitter\n\/\/\/ sign key (see wallet_sign_by_my_did), and sends signed transaction message\n\/\/\/ to validator pool (see ledger_write_txn).\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ txn_json: Transaction data json.\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn ledger_sign_and_send_txn(client_handle: i32, command_handle: i32,\n submitter_did: *const c_char, txn_json: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_result_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Sends transaction message to validator pool (no signing, unlike ledger_sign_and_write_txn).\n\/\/\/\n\/\/\/ The transaction is sent to the validator pool as is. It's assumed that it's already prepared.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ txn_json: Transaction data json.\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn ledger_send_txn(client_handle: i32, command_handle: i32,\n txn_json: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_result_json: *const c_char)) {\n unimplemented!();\n}\n\n\n\/\/\/ Creates and optionally signs a txn to get a DDO.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ target_did: Id of Identity stored in secured Wallet.\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn create_get_ddo_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char, target_did: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\n\n\/\/\/ Creates and optionally signs NYM transaction.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ target_did: Id of Identity stored in secured Wallet.\n\/\/\/ verkey:\n\/\/\/ xref:\n\/\/\/ data:\n\/\/\/ role:\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn create_nym_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char,\n target_did: *const c_char,\n verkey: *const c_char, xref: *const c_char,\n data: *const c_char, role: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs ATTRIB transaction.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ target_did: Id of Identity stored in secured Wallet.\n\/\/\/ hash:\n\/\/\/ raw:\n\/\/\/ enc:\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn create_attrib_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char, target_did: *const c_char,\n hash: *const c_char, raw: *const c_char, enc: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs GET_ATTRIB transaction.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ target_did: Id of Identity stored in secured Wallet.\n\/\/\/ data:\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\npub extern fn create_get_attrib_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char, target_did: *const c_char,\n data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs GET_NYM transaction.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ target_did: Id of Identity stored in secured Wallet.\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn create_get_nym_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char, target_did: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs SCHEMA transaction.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ data:\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn create_schema_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char, data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs GET_SCHEMA transaction.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ data:\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn create_get_schema_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char, data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs ISSUER_KEY transaction.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ xref:\n\/\/\/ data:\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn create_issuer_key_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char, xref: *const c_char,\n data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_result_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs GET_ISSUER_KEY transaction.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ xref:\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\npub extern fn crate_get_issuer_key_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char, xref: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}\n\n\/\/\/ Creates and optionally signs NODE transaction.\n\/\/\/\n\/\/\/ #Params\n\/\/\/ client_handle: id of Ledger client instance.\n\/\/\/ command_handle: command id to map of callback to user context.\n\/\/\/ sign:\n\/\/\/ submitter_did: Id of Identity stored in secured Wallet.\n\/\/\/ target_did: Id of Identity stored in secured Wallet.\n\/\/\/ data:\n\/\/\/ cb: Callback that takes command result as parameter.\n\/\/\/\n\/\/\/ #Returns\n\/\/\/ Transaction result as json.\n\/\/\/\n\/\/\/ #Errors\n\/\/\/ No method specific errors.\n\/\/\/ See `LedgerError` docs for common errors description.\n#[no_mangle]\npub extern fn create_node_txn(client_handle: i32, command_handle: i32, sign: i32,\n submitter_did: *const c_char, target_did: *const c_char,\n data: *const c_char,\n cb: extern fn(xcommand_handle: i32, err: i32,\n txn_json: *const c_char)) {\n unimplemented!();\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Negation error<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>imag-contact: Replace read with typed read<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add identify<commit_after>\/\/ (c) 2016 Productize SPRL <joost@productize.be>\n\nextern crate rustykicad;\n\nfn main() { \n let mut args = std::env::args();\n args.next();\n let name = args.next().unwrap();\n let f = rustykicad::read_kicad_file(&name, rustykicad::Expected::Any).unwrap();\n println!(\"found: {}\", f);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a codegen test for a recent optimization for overflow-checks=on<commit_after>\/\/ no-system-llvm\n\/\/ compile-flags: -O -C overflow-checks=on\n\n#![crate_type = \"lib\"]\n\n\npub struct S1<'a> {\n data: &'a [u8],\n position: usize,\n}\n\n\/\/ CHECK-LABEL: @slice_no_index_order\n#[no_mangle]\npub fn slice_no_index_order<'a>(s: &'a mut S1, n: usize) -> &'a [u8] {\n \/\/ CHECK-NOT: slice_index_order_fail\n let d = &s.data[s.position..s.position+n];\n s.position += n;\n return d;\n}\n\n\/\/ CHECK-LABEL: @test_check\n#[no_mangle]\npub fn test_check<'a>(s: &'a mut S1, x: usize, y: usize) -> &'a [u8] {\n \/\/ CHECK: slice_index_order_fail\n &s.data[x..y]\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #17994<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntrait Tr {}\ntype Huh<T> where T: Tr = isize; \/\/~ ERROR type parameter `T` is unused\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add models for Clue and Category<commit_after>use chrono::*;\r\n\r\npub struct Clue {\r\n pub id: u64,\r\n pub answer: String,\r\n pub question: String,\r\n pub value: i32,\r\n pub airdate: DateTime<UTC>,\r\n pub created_at: Option<DateTime<UTC>>,\r\n pub updated_at: Option<DateTime<UTC>>,\r\n pub category_id: u64,\r\n pub game_id: Option<u64>,\r\n pub invalid_count: Option<u32>,\r\n pub category: Option<Category>,\r\n}\r\n\r\npub struct Category {\r\n pub id: u64,\r\n pub title: String,\r\n pub created_at: Option<DateTime<UTC>>,\r\n pub updated_at: Option<DateTime<UTC>>,\r\n pub clues_count: u32,\r\n pub clues: Option<Vec<Clue>>,\r\n}\r\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add version\/build info to panic! output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added Condition struct for having, refactored filter to use condition<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update remove() and delete().<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! HTTP Server\nuse std::io::net::tcp::{TcpListener, TcpAcceptor};\nuse std::io::{Acceptor, Listener, IoResult, EndOfFile};\nuse std::io::net::ip::{IpAddr, Port, SocketAddr};\n\npub use self::request::Request;\npub use self::response::Response;\n\npub mod request;\npub mod response;\n\n\/\/\/ A server can listen on a TCP socket.\n\/\/\/\n\/\/\/ Once listening, it will create a `Request`\/`Response` pair for each\n\/\/\/ incoming connection, and hand them to the provided handler.\npub struct Server {\n ip: IpAddr,\n port: Port\n}\n\n\nimpl Server {\n\n \/\/\/ Creates a server to be used for `http` conenctions.\n pub fn http(ip: IpAddr, port: Port) -> Server {\n Server {\n ip: ip,\n port: port\n }\n }\n\n \/\/\/ Binds to a socket, and starts handling connections.\n pub fn listen<H: Handler + 'static>(&self, mut handler: H) -> IoResult<Listening> {\n let mut listener = try!(TcpListener::bind(self.ip.to_string().as_slice(), self.port));\n let socket = try!(listener.socket_name());\n let acceptor = try!(listener.listen());\n let worker = acceptor.clone();\n\n spawn(proc() {\n let mut acceptor = worker;\n for conn in acceptor.incoming() {\n match conn {\n Ok(stream) => {\n debug!(\"Incoming stream\");\n let clone = stream.clone();\n let req = match Request::new(stream) {\n Ok(r) => r,\n Err(err) => {\n error!(\"creating Request: {}\", err);\n continue;\n }\n };\n let mut res = Response::new(clone);\n res.version = req.version;\n match handler.handle(req, res) {\n Ok(..) => debug!(\"Stream handled\"),\n Err(e) => {\n error!(\"Error from handler: {}\", e)\n \/\/TODO try to send a status code\n }\n }\n },\n Err(ref e) if e.kind == EndOfFile => break, \/\/ server closed\n Err(e) => {\n error!(\"Connection failed: {}\", e);\n }\n }\n }\n });\n\n Ok(Listening {\n acceptor: acceptor,\n socket_addr: socket,\n })\n }\n\n}\n\n\/\/\/ A listening server, which can later be closed.\npub struct Listening {\n acceptor: TcpAcceptor,\n \/\/\/ The socket address that the server is bound to.\n pub socket_addr: SocketAddr,\n}\n\nimpl Listening {\n \/\/\/ Stop the server from listening to it's socket address.\n pub fn close(mut self) -> IoResult<()> {\n debug!(\"closing server\");\n self.acceptor.close_accept()\n }\n}\n\n\/\/\/ A handler that can handle incoming requests for a server.\npub trait Handler: Send {\n \/\/\/ Receives a `Request`\/`Response` pair, and should perform some action on them.\n \/\/\/\n \/\/\/ This could reading from the request, and writing to the response.\n fn handle(&mut self, req: Request, res: Response) -> IoResult<()>;\n}\n\nimpl Handler for fn(Request, Response) -> IoResult<()> {\n fn handle(&mut self, req: Request, res: Response) -> IoResult<()> {\n (*self)(req, res)\n }\n}\n<commit_msg>consume Server upon listen<commit_after>\/\/! HTTP Server\nuse std::io::net::tcp::{TcpListener, TcpAcceptor};\nuse std::io::{Acceptor, Listener, IoResult, EndOfFile};\nuse std::io::net::ip::{IpAddr, Port, SocketAddr};\n\npub use self::request::Request;\npub use self::response::Response;\n\npub mod request;\npub mod response;\n\n\/\/\/ A server can listen on a TCP socket.\n\/\/\/\n\/\/\/ Once listening, it will create a `Request`\/`Response` pair for each\n\/\/\/ incoming connection, and hand them to the provided handler.\npub struct Server {\n ip: IpAddr,\n port: Port\n}\n\n\nimpl Server {\n\n \/\/\/ Creates a server to be used for `http` conenctions.\n pub fn http(ip: IpAddr, port: Port) -> Server {\n Server {\n ip: ip,\n port: port\n }\n }\n\n \/\/\/ Binds to a socket, and starts handling connections.\n pub fn listen<H: Handler + 'static>(self, mut handler: H) -> IoResult<Listening> {\n let mut listener = try!(TcpListener::bind(self.ip.to_string().as_slice(), self.port));\n let socket = try!(listener.socket_name());\n let acceptor = try!(listener.listen());\n let worker = acceptor.clone();\n\n spawn(proc() {\n let mut acceptor = worker;\n for conn in acceptor.incoming() {\n match conn {\n Ok(stream) => {\n debug!(\"Incoming stream\");\n let clone = stream.clone();\n let req = match Request::new(stream) {\n Ok(r) => r,\n Err(err) => {\n error!(\"creating Request: {}\", err);\n continue;\n }\n };\n let mut res = Response::new(clone);\n res.version = req.version;\n match handler.handle(req, res) {\n Ok(..) => debug!(\"Stream handled\"),\n Err(e) => {\n error!(\"Error from handler: {}\", e)\n \/\/TODO try to send a status code\n }\n }\n },\n Err(ref e) if e.kind == EndOfFile => break, \/\/ server closed\n Err(e) => {\n error!(\"Connection failed: {}\", e);\n }\n }\n }\n });\n\n Ok(Listening {\n acceptor: acceptor,\n socket_addr: socket,\n })\n }\n\n}\n\n\/\/\/ A listening server, which can later be closed.\npub struct Listening {\n acceptor: TcpAcceptor,\n \/\/\/ The socket address that the server is bound to.\n pub socket_addr: SocketAddr,\n}\n\nimpl Listening {\n \/\/\/ Stop the server from listening to it's socket address.\n pub fn close(mut self) -> IoResult<()> {\n debug!(\"closing server\");\n self.acceptor.close_accept()\n }\n}\n\n\/\/\/ A handler that can handle incoming requests for a server.\npub trait Handler: Send {\n \/\/\/ Receives a `Request`\/`Response` pair, and should perform some action on them.\n \/\/\/\n \/\/\/ This could reading from the request, and writing to the response.\n fn handle(&mut self, req: Request, res: Response) -> IoResult<()>;\n}\n\nimpl Handler for fn(Request, Response) -> IoResult<()> {\n fn handle(&mut self, req: Request, res: Response) -> IoResult<()> {\n (*self)(req, res)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>debuginfo: Add test case for destructured for-loop variable.<commit_after>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-android: FIXME(#10381)\n\/\/ min-lldb-version: 310\n\n\/\/ compile-flags:-g\n\n\/\/ === GDB TESTS ===================================================================================\n\n\/\/ gdb-command:run\n\n\/\/ DESTRUCTURED STRUCT\n\/\/ gdb-command:print x\n\/\/ gdb-check:$1 = 400\n\/\/ gdb-command:print y\n\/\/ gdb-check:$2 = 401.5\n\/\/ gdb-command:print z\n\/\/ gdb-check:$3 = true\n\/\/ gdb-command:continue\n\n\/\/ DESTRUCTURED TUPLE\n\/\/ gdb-command:print\/x _i8\n\/\/ gdb-check:$4 = 0x6f\n\/\/ gdb-command:print\/x _u8\n\/\/ gdb-check:$5 = 0x70\n\/\/ gdb-command:print _i16\n\/\/ gdb-check:$6 = -113\n\/\/ gdb-command:print _u16\n\/\/ gdb-check:$7 = 114\n\/\/ gdb-command:print _i32\n\/\/ gdb-check:$8 = -115\n\/\/ gdb-command:print _u32\n\/\/ gdb-check:$9 = 116\n\/\/ gdb-command:print _i64\n\/\/ gdb-check:$10 = -117\n\/\/ gdb-command:print _u64\n\/\/ gdb-check:$11 = 118\n\/\/ gdb-command:print _f32\n\/\/ gdb-check:$12 = 119.5\n\/\/ gdb-command:print _f64\n\/\/ gdb-check:$13 = 120.5\n\/\/ gdb-command:continue\n\n\/\/ MORE COMPLEX CASE\n\/\/ gdb-command:print v1\n\/\/ gdb-check:$14 = 80000\n\/\/ gdb-command:print x1\n\/\/ gdb-check:$15 = 8000\n\/\/ gdb-command:print *y1\n\/\/ gdb-check:$16 = 80001.5\n\/\/ gdb-command:print z1\n\/\/ gdb-check:$17 = false\n\/\/ gdb-command:print *x2\n\/\/ gdb-check:$18 = -30000\n\/\/ gdb-command:print y2\n\/\/ gdb-check:$19 = -300001.5\n\/\/ gdb-command:print *z2\n\/\/ gdb-check:$20 = true\n\/\/ gdb-command:print v2\n\/\/ gdb-check:$21 = 854237.5\n\/\/ gdb-command:continue\n\n\n\/\/ === LLDB TESTS ==================================================================================\n\n\/\/ lldb-command:type format add --format hex char\n\/\/ lldb-command:type format add --format hex 'unsigned char'\n\n\/\/ lldb-command:run\n\n\/\/ DESTRUCTURED STRUCT\n\/\/ lldb-command:print x\n\/\/ lldb-check:[...]$0 = 400\n\/\/ lldb-command:print y\n\/\/ lldb-check:[...]$1 = 401.5\n\/\/ lldb-command:print z\n\/\/ lldb-check:[...]$2 = true\n\/\/ lldb-command:continue\n\n\/\/ DESTRUCTURED TUPLE\n\/\/ lldb-command:print _i8\n\/\/ lldb-check:[...]$3 = 0x6f\n\/\/ lldb-command:print _u8\n\/\/ lldb-check:[...]$4 = 0x70\n\/\/ lldb-command:print _i16\n\/\/ lldb-check:[...]$5 = -113\n\/\/ lldb-command:print _u16\n\/\/ lldb-check:[...]$6 = 114\n\/\/ lldb-command:print _i32\n\/\/ lldb-check:[...]$7 = -115\n\/\/ lldb-command:print _u32\n\/\/ lldb-check:[...]$8 = 116\n\/\/ lldb-command:print _i64\n\/\/ lldb-check:[...]$9 = -117\n\/\/ lldb-command:print _u64\n\/\/ lldb-check:[...]$10 = 118\n\/\/ lldb-command:print _f32\n\/\/ lldb-check:[...]$11 = 119.5\n\/\/ lldb-command:print _f64\n\/\/ lldb-check:[...]$12 = 120.5\n\/\/ lldb-command:continue\n\n\/\/ MORE COMPLEX CASE\n\/\/ lldb-command:print v1\n\/\/ lldb-check:[...]$13 = 80000\n\/\/ lldb-command:print x1\n\/\/ lldb-check:[...]$14 = 8000\n\/\/ lldb-command:print *y1\n\/\/ lldb-check:[...]$15 = 80001.5\n\/\/ lldb-command:print z1\n\/\/ lldb-check:[...]$16 = false\n\/\/ lldb-command:print *x2\n\/\/ lldb-check:[...]$17 = -30000\n\/\/ lldb-command:print y2\n\/\/ lldb-check:[...]$18 = -300001.5\n\/\/ lldb-command:print *z2\n\/\/ lldb-check:[...]$19 = true\n\/\/ lldb-command:print v2\n\/\/ lldb-check:[...]$20 = 854237.5\n\/\/ lldb-command:continue\n\n\nstruct Struct {\n x: i16,\n y: f32,\n z: bool\n}\n\nfn main() {\n\n let s = Struct {\n x: 400,\n y: 401.5,\n z: true\n };\n\n for &Struct { x, y, z } in [s].iter() {\n zzz(); \/\/ #break\n }\n\n let tuple: (i8, u8, i16, u16, i32, u32, i64, u64, f32, f64) =\n (0x6f, 0x70, -113, 114, -115, 116, -117, 118, 119.5, 120.5);\n\n for &(_i8, _u8, _i16, _u16, _i32, _u32, _i64, _u64, _f32, _f64) in [tuple].iter() {\n zzz(); \/\/ #break\n }\n\n let more_complex: (i32, &Struct, Struct, Box<f64>) =\n (80000,\n &Struct {\n x: 8000,\n y: 80001.5,\n z: false\n },\n Struct {\n x: -30000,\n y: -300001.5,\n z: true\n },\n box 854237.5);\n\n for &(v1,\n &Struct { x: x1, y: ref y1, z: z1 },\n Struct { x: ref x2, y: y2, z: ref z2 },\n box v2) in [more_complex].iter() {\n zzz(); \/\/ #break\n }\n}\n\nfn zzz() {()}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>autocommit 2015-06-02 01:56:46 CEST<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n#[prelude_import] use base::prelude::*;\nuse syscall::{getresuid, getresgid, setresuid, setresgid, setgroups, getgroups};\nuse base::error::{self};\nuse cty::alias::{UserId, GroupId};\nuse fmt::{Debug, Write};\n\n\/\/\/ User ids of a thread.\n#[derive(Pod, Eq)]\npub struct UserIds {\n \/\/\/ Real id\n pub real: UserId,\n \/\/\/ Effective id\n pub effective: UserId,\n \/\/\/ Saved id\n pub saved: UserId,\n}\n\nimpl UserIds {\n \/\/\/ Retrieves the user ids of this thread.\n pub fn get() -> UserIds {\n let mut ids = UserIds {\n real: 0,\n effective: 0,\n saved: 0,\n };\n getresuid(&mut ids.real, &mut ids.effective, &mut ids.saved);\n ids\n }\n\n \/\/\/ Sets the user ids of this thread.\n pub fn set(&self) -> Result {\n rv!(setresuid(self.real, self.effective, self.saved))\n }\n}\n\nimpl Debug for UserIds {\n fn fmt<W: Write>(&self, mut w: &mut W) -> Result {\n write!(w, \"UserIds {{ real: {}, effective: {}, saved {} }}\",\n self.real, self.effective, self.saved)\n }\n}\n\n\/\/\/ Group ids of a thread.\n#[derive(Pod, Eq)]\npub struct GroupIds {\n \/\/\/ Real id\n pub real: GroupId,\n \/\/\/ Effective id\n pub effective: GroupId,\n \/\/\/ Saved id\n pub saved: GroupId,\n}\n\nimpl GroupIds {\n \/\/\/ Retrieves the group ids of this thread.\n pub fn get() -> GroupIds {\n let mut ids = GroupIds {\n real: 0,\n effective: 0,\n saved: 0,\n };\n getresgid(&mut ids.real, &mut ids.effective, &mut ids.saved);\n ids\n }\n\n \/\/\/ Sets the group ids of this process.\n pub fn set(&self) -> Result {\n rv!(setresgid(self.real, self.effective, self.saved))\n }\n}\n\nimpl Debug for GroupIds {\n fn fmt<W: Write>(&self, mut w: &mut W) -> Result {\n write!(w, \"GroupIds {{ real: {}, effective: {}, saved {} }}\",\n self.real, self.effective, self.saved)\n }\n}\n\n\/\/\/ Sets all user ids of this thread to the real id.\npub fn drop_user_privileges() -> Result {\n let mut ids = UserIds::get();\n ids.effective = ids.real;\n ids.saved = ids.real;\n ids.set()\n}\n\n\/\/\/ Sets all group ids of this thread to the real id.\npub fn drop_group_privileges() -> Result {\n let mut ids = GroupIds::get();\n ids.effective = ids.real;\n ids.saved = ids.real;\n ids.set()\n}\n\n\/\/\/ Sets the effective user id of this thread.\n\/\/\/\n\/\/\/ [argument, id]\n\/\/\/ The new effective user id of the thread.\npub fn set_effective_user_id(id: UserId) -> Result {\n rv!(setresuid(-1, id, -1))\n}\n\n\/\/\/ Sets the effective group id of this thread.\n\/\/\/\n\/\/\/ [argument, id]\n\/\/\/ The new effective group id of the thread.\npub fn set_effective_group_id(id: GroupId) -> Result {\n rv!(setresgid(-1, id, -1))\n}\n\n\/\/\/ Returns the number of supplementary groups of this thread.\npub fn num_supplementary_groups() -> usize {\n getgroups(&mut []) as usize\n}\n\nconst MAX_SUP_GROUPS: usize = 65536;\n\n\/\/\/ Retrieves the supplementary groups of this thread.\n\/\/\/\n\/\/\/ [argument, buf]\n\/\/\/ The buffer in which the supplementary groups will be stored.\n\/\/\/\n\/\/\/ [return_value]\n\/\/\/ Returns the number of supplementary groups stored\npub fn supplementary_groups(buf: &mut [GroupId]) -> Result<usize> {\n if buf.len() > MAX_SUP_GROUPS {\n rv!(getgroups(&mut buf[..MAX_SUP_GROUPS]), -> usize)\n } else {\n rv!(getgroups(buf), -> usize)\n }\n}\n\n\/\/\/ Sets the supplementary groups of this thread.\n\/\/\/\n\/\/\/ [argument, buf]\n\/\/\/ The buffer which contains the new supplementary groups.\npub fn set_supplementary_groups(buf: &[GroupId]) -> Result {\n if buf.len() > MAX_SUP_GROUPS {\n return Err(error::InvalidArgument);\n }\n rv!(setgroups(buf))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add smol example<commit_after>use futures_util::future;\nuse lapin::{\n message::DeliveryResult, options::*, publisher_confirm::Confirmation, types::FieldTable,\n BasicProperties, Connection, ConnectionProperties, Result,\n};\nuse lapinou::*;\nuse log::info;\n\nfn main() -> Result<()> {\n env_logger::init();\n\n let addr = std::env::var(\"AMQP_ADDR\").unwrap_or_else(|_| \"amqp:\/\/127.0.0.1:5672\/%2f\".into());\n\n \/\/ spawn a thread pool\n for _ in 0..5 {\n std::thread::spawn(|| smol::run(future::pending::<()>()));\n }\n\n smol::run(async {\n let conn = Connection::connect(&addr, ConnectionProperties::default().with_smol()).await?;\n\n info!(\"CONNECTED\");\n\n let channel_a = conn.create_channel().await?;\n let channel_b = conn.create_channel().await?;\n\n let queue = channel_a\n .queue_declare(\n \"hello\",\n QueueDeclareOptions::default(),\n FieldTable::default(),\n )\n .await?;\n\n info!(\"Declared queue {:?}\", queue);\n\n let consumer = channel_b\n .clone()\n .basic_consume(\n \"hello\",\n \"my_consumer\",\n BasicConsumeOptions::default(),\n FieldTable::default(),\n )\n .await?;\n\n consumer.set_delegate(move |delivery: DeliveryResult| {\n let channel_b = channel_b.clone();\n async move {\n let delivery = delivery.expect(\"error caught in in consumer\");\n if let Some(delivery) = delivery {\n channel_b\n .basic_ack(delivery.delivery_tag, BasicAckOptions::default())\n .await\n .expect(\"failed to ack\");\n }\n }\n });\n\n let payload = b\"Hello world!\";\n\n loop {\n let confirm = channel_a\n .basic_publish(\n \"\",\n \"hello\",\n BasicPublishOptions::default(),\n payload.to_vec(),\n BasicProperties::default(),\n )\n .await?\n .await?;\n assert_eq!(confirm, Confirmation::NotRequested);\n }\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>new file: src\/event.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix ??? avatar loading<commit_after><|endoftext|>"} {"text":"<commit_before>#![allow(unused_variables)]\n\nextern crate sdl2;\n\n\/*struct SDL_handles<'a> {\n context: sdl2::sdl::Sdl,\n video: sdl2::sdl::VideoSubsystem,\n renderer: sdl2::render::Renderer<'a>,\n}*\/\n\npub fn bootstrap<'a>(win_width: i32, win_height: i32, win_name: &str) -> (sdl2::render::Renderer<'a>, sdl2::EventPump) {\n let sdl_context = sdl2::init().unwrap(); \/\/ context\n let sdl_video = sdl_context.video().unwrap(); \/\/ video\n let mut event_pump = sdl_context.event_pump().unwrap();\n\n \/\/ Make a new window\n let window = sdl_video.window(win_name, 1280, 720)\n .position_centered()\n .opengl()\n .build()\n .expect(\"Failed on creating a new window!\");\n \n \/\/ turn window into a renderer, cannot do anything with window from now on.\n let mut renderer = window.renderer().build().unwrap(); \n\n (renderer, event_pump)\n}<commit_msg>Added an Engine struct to store variables used to drawing.<commit_after>#![allow(unused_variables)]\n\nextern crate sdl2;\n\nuse super::vid;\n\n\/*struct SDL_handles<'a> {\n context: sdl2::sdl::Sdl,\n video: sdl2::sdl::VideoSubsystem,\n renderer: sdl2::render::Renderer<'a>,\n}*\/\n\npub struct Engine<'a> {\n pub screen_x: u32,\n pub screen_y: u32,\n\n pub camera_x: f32,\n pub camera_y: f32,\n pub camera_z: f32,\n\n pub camera_x_y: f32,\n pub camera_x_z: f32,\n pub camera_y_z: f32,\n\n pub renderer: sdl2::render::Renderer<'a>,\n\n pub event_pump: sdl2::EventPump,\n\n pub render_queue: Vec<vid::Triangle>,\n}\n\nimpl<'a> Engine<'a> {\n pub fn new(screen_x: u32, screen_y: u32, window_name: String) -> Engine<'a> {\n let sdl_ctx = sdl2::init().unwrap();\n let sdl_vid = sdl_ctx.video().unwrap();\n\n let sdl_win = sdl_vid.window(&window_name, screen_x, screen_y)\n .position_centered()\n .opengl()\n .build()\n .expect(\"Failed on creating a new window!\");\n Engine {\n screen_x: screen_x,\n screen_y: screen_y,\n\n camera_x: 0.0,\n camera_y: 0.0,\n camera_z: 0.0,\n\n camera_x_y: 0.0,\n camera_x_z: 0.0,\n camera_y_z: 0.0,\n\n renderer: sdl_win.renderer().build().unwrap(),\n\n event_pump: sdl_ctx.event_pump().unwrap(),\n\n render_queue: Vec::new(),\n }\n }\n\n pub fn render(&mut self) {\n for mut triangle in &mut self.render_queue {\n let flat_1 = triangle.p1.flat_point(self.screen_x, self.screen_y, \n triangle.x + self.camera_x, \n triangle.y + self.camera_y,\n triangle.z + self.camera_z).make_sdl();\n let flat_2 = triangle.p2.flat_point(self.screen_x, self.screen_y,\n triangle.x + self.camera_x,\n triangle.y + self.camera_y,\n triangle.z + self.camera_z).make_sdl();\n let flat_3 = triangle.p3.flat_point(self.screen_x, self.screen_y,\n triangle.x + self.camera_x,\n triangle.y + self.camera_y,\n triangle.z + self.camera_z).make_sdl();\n \n self.renderer.draw_lines(&[flat_1, flat_2, flat_3, flat_1]);\n }\n\n self.render_queue = Vec::new();\n }\n}\n\n\npub fn bootstrap<'a>(win_width: i32, win_height: i32, win_name: &str) -> (sdl2::render::Renderer<'a>, sdl2::EventPump) {\n let sdl_context = sdl2::init().unwrap(); \/\/ context\n let sdl_video = sdl_context.video().unwrap(); \/\/ video\n let mut event_pump = sdl_context.event_pump().unwrap();\n\n \/\/ Make a new window\n let window = sdl_video.window(win_name, 1280, 720)\n .position_centered()\n .opengl()\n .build()\n .expect(\"Failed on creating a new window!\");\n \n \/\/ turn window into a renderer, cannot do anything with window from now on.\n let mut renderer = window.renderer().build().unwrap(); \n\n (renderer, event_pump)\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix struct paths<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Documentation of music<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>implement probabilistic choice<commit_after>extern crate rand;\n\nuse rand::distributions::{IndependentSample, Sample, Weighted, WeightedChoice};\nuse rand::{weak_rng, Rng};\n\nconst DATA: [(&str, f64); 8] = [\n (\"aleph\", 1.0 \/ 5.0),\n (\"beth\", 1.0 \/ 6.0),\n (\"gimel\", 1.0 \/ 7.0),\n (\"daleth\", 1.0 \/ 8.0),\n (\"he\", 1.0 \/ 9.0),\n (\"waw\", 1.0 \/ 10.0),\n (\"zayin\", 1.0 \/ 11.0),\n (\"heth\", 1759.0 \/ 27720.0),\n];\n\nconst SAMPLES: usize = 1_000_000;\n\n\/\/ Generate a mapping to be used by `WeightedChoice`\nfn gen_mapping() -> Vec<Weighted<usize>> {\n DATA.iter()\n .enumerate()\n .map(|(i, &(_, p))| Weighted {\n \/\/ `WeightedChoice` requires `u32` weights rather than raw probabilities. For each\n \/\/ probability, we convert it to a `u32` weight, and associate it with an index. We\n \/\/ multiply by a constant because small numbers such as 0.2 when casted to `u32`\n \/\/ become `0`. This conversion decreases the accuracy of the mapping, which is why we\n \/\/ provide an implementation which uses `f64`s for the best accuracy.\n weight: (p * 1_000_000_000.0) as u32,\n item: i,\n })\n .collect()\n}\n\n\/\/ Generate a mapping of the raw probabilities\nfn gen_mapping_float() -> Vec<f64> {\n \/\/ This does the work of `WeightedChoice::new`, splitting a number into various ranges. The\n \/\/ `item` of `Weighted` is represented here merely by the probability's position in the `Vec`.\n let mut running_total = 0.0;\n DATA.iter()\n .map(|&(_, p)| {\n running_total += p;\n running_total\n })\n .collect()\n}\n\n\/\/ An implementation of `WeightedChoice` which uses probabilities rather than weights. Refer to\n\/\/ the `WeightedChoice` source for serious usage.\nstruct WcFloat {\n mapping: Vec<f64>,\n}\n\nimpl WcFloat {\n fn new(mapping: &[f64]) -> Self {\n Self {\n mapping: mapping.to_vec(),\n }\n }\n\n \/\/ This is roughly the same logic as `WeightedChoice::ind_sample` (though is likely slower)\n fn search(&self, sample_prob: f64) -> usize {\n let idx = self.mapping\n .binary_search_by(|p| p.partial_cmp(&sample_prob).unwrap());\n match idx {\n Ok(i) | Err(i) => i,\n }\n }\n}\n\nimpl IndependentSample<usize> for WcFloat {\n fn ind_sample<R: Rng>(&self, rng: &mut R) -> usize {\n \/\/ Because we know the total is exactly 1.0, we can merely use a raw float value.\n \/\/ Otherwise caching `Range::new(0.0, running_total)` and sampling with\n \/\/ `range.ind_sample(&mut rng)` is recommended.\n let sample_prob = rng.next_f64();\n self.search(sample_prob)\n }\n}\n\nimpl Sample<usize> for WcFloat {\n fn sample<R: Rng>(&mut self, rng: &mut R) -> usize {\n self.ind_sample(rng)\n }\n}\n\nfn take_samples<R: Rng, T>(rng: &mut R, wc: &T) -> [usize; 8]\nwhere\n T: IndependentSample<usize>,\n{\n let mut counts = [0; 8];\n for _ in 0..SAMPLES {\n let sample = wc.ind_sample(rng);\n counts[sample] += 1;\n }\n counts\n}\n\nfn print_mapping(counts: &[usize]) {\n println!(\"Item | Expected | Actual \");\n println!(\"-------+----------+----------\");\n for (&(name, expected), &count) in DATA.iter().zip(counts.iter()) {\n let real = count as f64 \/ SAMPLES as f64;\n println!(\"{:06} | {:.6} | {:.6}\", name, expected, real);\n }\n}\n\nfn main() {\n let mut rng = weak_rng();\n\n println!(\" ~~~ U32 METHOD ~~~\");\n let mut mapping = gen_mapping();\n let wc = WeightedChoice::new(&mut mapping);\n\n let counts = take_samples(&mut rng, &wc);\n print_mapping(&counts);\n\n println!();\n\n println!(\" ~~~ FLOAT METHOD ~~~\");\n \/\/ initialize the float version of `WeightedChoice`\n let mapping = gen_mapping_float();\n let wc = WcFloat::new(&mapping);\n\n let counts = take_samples(&mut rng, &wc);\n print_mapping(&counts);\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n fn test_sample_logic(samples: usize) {\n let mut rng = weak_rng();\n\n let mapping = gen_mapping_float();\n let wc = WcFloat::new(&mapping);\n\n for _ in 0..samples {\n let prob = rng.next_f64();\n let i = wc.search(prob);\n\n assert!(prob <= mapping[i], \"p:{} m:{}\", prob, mapping[i]);\n if i != 0 {\n assert!(mapping[i - 1] <= prob, \"p:{} m:{}\", prob, mapping[i - 1]);\n }\n }\n }\n\n #[test]\n fn small_logic() {\n test_sample_logic(1000);\n }\n\n #[test]\n #[ignore]\n fn large_logic() {\n test_sample_logic(100_000_000);\n }\n\n fn test_deviation<T: IndependentSample<usize>>(wc: T) {\n let mut rng = weak_rng();\n\n let counts = take_samples(&mut rng, &wc);\n for (&(_, expected), &count) in DATA.iter().zip(counts.iter()) {\n let real = count as f64 \/ SAMPLES as f64;\n let dev = (1.0 - real \/ expected).abs();\n assert!(dev < 0.01, \"{}\", dev);\n }\n }\n\n #[test]\n fn wcf_deviation() {\n let mapping = gen_mapping_float();\n let wc = WcFloat::new(&mapping);\n\n test_deviation(wc);\n }\n\n #[test]\n fn wc_deviation() {\n let mut mapping = gen_mapping();\n let wc = WeightedChoice::new(&mut mapping);\n\n test_deviation(wc);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Program x86 segmentation hardware.\n\nuse core::fmt;\n\n\/\/\/ Specifies which element to load into a segment from\n\/\/\/ descriptor tables (i.e., is a index to LDT or GDT table\n\/\/\/ with some additional flags).\nbitflags! {\n flags SegmentSelector: u16 {\n \/\/\/ Requestor Privilege Level\n const RPL_0 = 0b00,\n const RPL_1 = 0b01,\n const RPL_2 = 0b10,\n const RPL_3 = 0b11,\n\n \/\/\/ Table Indicator (TI) 0 means GDT is used.\n const TI_GDT = 0 << 3,\n \/\/\/ Table Indicator (TI) 1 means LDT is used.\n const TI_LDT = 1 << 3,\n }\n}\n\nimpl SegmentSelector {\n \/\/\/ Create a new SegmentSelector\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/ * `index` index in GDT or LDT array.\n \/\/\/\n pub fn new(index: u16) -> SegmentSelector {\n SegmentSelector { bits: index << 3 }\n }\n\n pub fn from_raw(bits: u16) -> SegmentSelector {\n SegmentSelector { bits: bits }\n }\n}\n\nimpl fmt::Debug for SegmentSelector {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let r0 = match self.contains(RPL_0) {\n false => \"\",\n true => \"Ring 0 segment selector.\",\n };\n let r1 = match self.contains(RPL_1) {\n false => \"\",\n true => \"Ring 1 segment selector.\",\n };\n let r2 = match self.contains(RPL_2) {\n false => \"\",\n true => \"Ring 2 segment selector.\",\n };\n let r3 = match self.contains(RPL_3) {\n false => \"\",\n true => \"Ring 3 segment selector.\",\n };\n let tbl = match self.contains(TI_LDT) {\n false => \"GDT Table\",\n true => \"LDT Table\",\n };\n\n write!(f,\n \"Index {} in {}, {}{}{}{}\",\n self.bits >> 3,\n tbl,\n r0,\n r1,\n r2,\n r3)\n \/\/ write!(f, \"Index\")\n }\n}\n\n\n\/\/\/ Entry for GDT or LDT. Provides size and location of a segment.\nbitflags! {\n flags SegmentDescriptor: u64 {\n \/\/\/ Descriptor type (0 = system; 1 = code or data).\n const DESC_S = 1 << (32+12),\n \/\/\/ Descriptor privilege level 0.\n const DESC_DPL0 = 0b00 << (32+13),\n \/\/\/ Descriptor privilege level 1.\n const DESC_DPL1 = 0b01 << (32+13),\n \/\/\/ Descriptor privilege level 2.\n const DESC_DPL2 = 0b10 << (32+13),\n \/\/\/ Descriptor privilege level 3.\n const DESC_DPL3 = 0b11 << (32+13),\n \/\/\/ Descriptor is Present.\n const DESC_P = 1 << (32+15),\n \/\/\/ Available for use by system software.\n const DESC_AVL = 1 << (32+20),\n \/\/\/ 64-bit code segment (IA-32e mode only).\n const DESC_L = 1 << (32+21),\n \/\/\/ Default operation size (0 = 16-bit segment, 1 = 32-bit segment)\n const DESC_DB = 1 << (32+22),\n \/\/\/ Granularity.\n const DESC_G = 1 << (32+23),\n\n \/\/ System-Segment and Gate-Descriptor Types for IA32e mode.\n \/\/ When the S (descriptor type) flag in a segment descriptor is clear,\n \/\/ the descriptor type is a system descriptor.\n\n const TYPE_SYS_LDT = 0b0010 << (32+8),\n const TYPE_SYS_TSS_AVAILABLE = 0b1001 << (32+8),\n const TYPE_SYS_TSS_BUSY = 0b1011 << (32+8),\n const TYPE_SYS_CALL_GATE = 0b1100 << (32+8),\n const TYPE_SYS_INTERRUPT_GATE = 0b1110 << (32+8),\n const TYPE_SYS_TRAP_GATE = 0b1111 << (32+8),\n\n \/\/ Code- and Data-Segment Descriptor Types.\n \/\/ When the S (descriptor type) flag in a segment descriptor is set,\n \/\/ the descriptor is for either a code or a data segment.\n\n \/\/\/ Data Read-Only\n const TYPE_D_RO = 0b0000 << (32+8),\n \/\/\/ Data Read-Only, accessed\n const TYPE_D_ROA = 0b0001 << (32+8),\n \/\/\/ Data Read\/Write\n const TYPE_D_RW = 0b0010 << (32+8),\n \/\/\/ Data Read\/Write, accessed\n const TYPE_D_RWA = 0b0011 << (32+8),\n \/\/\/ Data Read-Only, expand-down\n const TYPE_D_ROEXD = 0b0100 << (32+8),\n \/\/\/ Data Read-Only, expand-down, accessed\n const TYPE_D_ROEXDA = 0b0101 << (32+8),\n \/\/\/ Data Read\/Write, expand-down\n const TYPE_D_RWEXD = 0b0110 << (32+8),\n \/\/\/ Data Read\/Write, expand-down, accessed\n const TYPE_D_RWEXDA = 0b0111 << (32+8),\n\n \/\/\/ Code Execute-Only\n const TYPE_C_EO = 0b1000 << (32+8),\n \/\/\/ Code Execute-Only, accessed\n const TYPE_C_EOA = 0b1001 << (32+8),\n \/\/\/ Code Execute\/Read\n const TYPE_C_ER = 0b1010 << (32+8),\n \/\/\/ Code Execute\/Read, accessed\n const TYPE_C_ERA = 0b1011 << (32+8),\n \/\/\/ Code Execute-Only, conforming\n const TYPE_C_EOC = 0b1100 << (32+8),\n \/\/\/ Code Execute-Only, conforming, accessed\n const TYPE_C_EOCA = 0b1101 << (32+8),\n \/\/\/ Code Execute\/Read, conforming\n const TYPE_C_ERC = 0b1110 << (32+8),\n \/\/\/ Code Execute\/Read, conforming, accessed\n const TYPE_C_ERCA = 0b1111 << (32+8),\n }\n}\n\n\/\/\/ This is data-structure is a ugly mess thing so we provide some\n\/\/\/ convenience function to program it.\nimpl SegmentDescriptor {\n pub fn new(base: u32, limit: u32) -> SegmentDescriptor {\n let base_low: u64 = base as u64 & 0xffffff;\n let base_high: u64 = (base as u64 >> 24) & 0xff;\n\n let limit_low: u64 = limit as u64 & 0xffff;\n let limit_high: u64 = (limit as u64 & (0b1111 << 16)) >> 16;\n\n SegmentDescriptor {\n bits: limit_low | base_low << 16 | limit_high << (32 + 16) | base_high << (32 + 24),\n }\n }\n}\n\nimpl fmt::Debug for SegmentDescriptor {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"SD: 0x{:x}\", self.bits)\n }\n}\n\n\/\/\/ Reload stack segment register.\npub unsafe fn load_ss(sel: SegmentSelector) {\n asm!(\"movw $0, %ss \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload data segment register.\npub unsafe fn load_ds(sel: SegmentSelector) {\n asm!(\"movw $0, %ds \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload fs segment register.\npub unsafe fn load_es(sel: SegmentSelector) {\n asm!(\"movw $0, %es \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload fs segment register.\npub unsafe fn load_fs(sel: SegmentSelector) {\n asm!(\"movw $0, %fs \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload gs segment register.\npub unsafe fn load_gs(sel: SegmentSelector) {\n asm!(\"movw $0, %gs \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload code segment register.\n\/\/\/ Note this is special since we can not directly move\n\/\/\/ to %cs. Instead we push the new segment selector\n\/\/\/ and return value on the stack and use lretq\n\/\/\/ to reload cs and continue at 1:.\npub unsafe fn load_cs(sel: SegmentSelector) {\n asm!(\"pushq $0\n lea 1f(%rip), %rax\n pushq %rax\n lretq\n 1:\" :: \"r\" (sel.bits() as u64) : \"{rax}\" \"memory\");\n}\n<commit_msg>Add method to retrieve current code segment selector<commit_after>\/\/! Program x86 segmentation hardware.\n\nuse core::fmt;\n\n\/\/\/ Specifies which element to load into a segment from\n\/\/\/ descriptor tables (i.e., is a index to LDT or GDT table\n\/\/\/ with some additional flags).\nbitflags! {\n flags SegmentSelector: u16 {\n \/\/\/ Requestor Privilege Level\n const RPL_0 = 0b00,\n const RPL_1 = 0b01,\n const RPL_2 = 0b10,\n const RPL_3 = 0b11,\n\n \/\/\/ Table Indicator (TI) 0 means GDT is used.\n const TI_GDT = 0 << 3,\n \/\/\/ Table Indicator (TI) 1 means LDT is used.\n const TI_LDT = 1 << 3,\n }\n}\n\nimpl SegmentSelector {\n \/\/\/ Create a new SegmentSelector\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/ * `index` index in GDT or LDT array.\n \/\/\/\n pub fn new(index: u16) -> SegmentSelector {\n SegmentSelector { bits: index << 3 }\n }\n\n pub fn from_raw(bits: u16) -> SegmentSelector {\n SegmentSelector { bits: bits }\n }\n}\n\nimpl fmt::Debug for SegmentSelector {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let r0 = match self.contains(RPL_0) {\n false => \"\",\n true => \"Ring 0 segment selector.\",\n };\n let r1 = match self.contains(RPL_1) {\n false => \"\",\n true => \"Ring 1 segment selector.\",\n };\n let r2 = match self.contains(RPL_2) {\n false => \"\",\n true => \"Ring 2 segment selector.\",\n };\n let r3 = match self.contains(RPL_3) {\n false => \"\",\n true => \"Ring 3 segment selector.\",\n };\n let tbl = match self.contains(TI_LDT) {\n false => \"GDT Table\",\n true => \"LDT Table\",\n };\n\n write!(f,\n \"Index {} in {}, {}{}{}{}\",\n self.bits >> 3,\n tbl,\n r0,\n r1,\n r2,\n r3)\n \/\/ write!(f, \"Index\")\n }\n}\n\n\n\/\/\/ Entry for GDT or LDT. Provides size and location of a segment.\nbitflags! {\n flags SegmentDescriptor: u64 {\n \/\/\/ Descriptor type (0 = system; 1 = code or data).\n const DESC_S = 1 << (32+12),\n \/\/\/ Descriptor privilege level 0.\n const DESC_DPL0 = 0b00 << (32+13),\n \/\/\/ Descriptor privilege level 1.\n const DESC_DPL1 = 0b01 << (32+13),\n \/\/\/ Descriptor privilege level 2.\n const DESC_DPL2 = 0b10 << (32+13),\n \/\/\/ Descriptor privilege level 3.\n const DESC_DPL3 = 0b11 << (32+13),\n \/\/\/ Descriptor is Present.\n const DESC_P = 1 << (32+15),\n \/\/\/ Available for use by system software.\n const DESC_AVL = 1 << (32+20),\n \/\/\/ 64-bit code segment (IA-32e mode only).\n const DESC_L = 1 << (32+21),\n \/\/\/ Default operation size (0 = 16-bit segment, 1 = 32-bit segment)\n const DESC_DB = 1 << (32+22),\n \/\/\/ Granularity.\n const DESC_G = 1 << (32+23),\n\n \/\/ System-Segment and Gate-Descriptor Types for IA32e mode.\n \/\/ When the S (descriptor type) flag in a segment descriptor is clear,\n \/\/ the descriptor type is a system descriptor.\n\n const TYPE_SYS_LDT = 0b0010 << (32+8),\n const TYPE_SYS_TSS_AVAILABLE = 0b1001 << (32+8),\n const TYPE_SYS_TSS_BUSY = 0b1011 << (32+8),\n const TYPE_SYS_CALL_GATE = 0b1100 << (32+8),\n const TYPE_SYS_INTERRUPT_GATE = 0b1110 << (32+8),\n const TYPE_SYS_TRAP_GATE = 0b1111 << (32+8),\n\n \/\/ Code- and Data-Segment Descriptor Types.\n \/\/ When the S (descriptor type) flag in a segment descriptor is set,\n \/\/ the descriptor is for either a code or a data segment.\n\n \/\/\/ Data Read-Only\n const TYPE_D_RO = 0b0000 << (32+8),\n \/\/\/ Data Read-Only, accessed\n const TYPE_D_ROA = 0b0001 << (32+8),\n \/\/\/ Data Read\/Write\n const TYPE_D_RW = 0b0010 << (32+8),\n \/\/\/ Data Read\/Write, accessed\n const TYPE_D_RWA = 0b0011 << (32+8),\n \/\/\/ Data Read-Only, expand-down\n const TYPE_D_ROEXD = 0b0100 << (32+8),\n \/\/\/ Data Read-Only, expand-down, accessed\n const TYPE_D_ROEXDA = 0b0101 << (32+8),\n \/\/\/ Data Read\/Write, expand-down\n const TYPE_D_RWEXD = 0b0110 << (32+8),\n \/\/\/ Data Read\/Write, expand-down, accessed\n const TYPE_D_RWEXDA = 0b0111 << (32+8),\n\n \/\/\/ Code Execute-Only\n const TYPE_C_EO = 0b1000 << (32+8),\n \/\/\/ Code Execute-Only, accessed\n const TYPE_C_EOA = 0b1001 << (32+8),\n \/\/\/ Code Execute\/Read\n const TYPE_C_ER = 0b1010 << (32+8),\n \/\/\/ Code Execute\/Read, accessed\n const TYPE_C_ERA = 0b1011 << (32+8),\n \/\/\/ Code Execute-Only, conforming\n const TYPE_C_EOC = 0b1100 << (32+8),\n \/\/\/ Code Execute-Only, conforming, accessed\n const TYPE_C_EOCA = 0b1101 << (32+8),\n \/\/\/ Code Execute\/Read, conforming\n const TYPE_C_ERC = 0b1110 << (32+8),\n \/\/\/ Code Execute\/Read, conforming, accessed\n const TYPE_C_ERCA = 0b1111 << (32+8),\n }\n}\n\n\/\/\/ This is data-structure is a ugly mess thing so we provide some\n\/\/\/ convenience function to program it.\nimpl SegmentDescriptor {\n pub fn new(base: u32, limit: u32) -> SegmentDescriptor {\n let base_low: u64 = base as u64 & 0xffffff;\n let base_high: u64 = (base as u64 >> 24) & 0xff;\n\n let limit_low: u64 = limit as u64 & 0xffff;\n let limit_high: u64 = (limit as u64 & (0b1111 << 16)) >> 16;\n\n SegmentDescriptor {\n bits: limit_low | base_low << 16 | limit_high << (32 + 16) | base_high << (32 + 24),\n }\n }\n}\n\nimpl fmt::Debug for SegmentDescriptor {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"SD: 0x{:x}\", self.bits)\n }\n}\n\n\/\/\/ Reload stack segment register.\npub unsafe fn load_ss(sel: SegmentSelector) {\n asm!(\"movw $0, %ss \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload data segment register.\npub unsafe fn load_ds(sel: SegmentSelector) {\n asm!(\"movw $0, %ds \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload fs segment register.\npub unsafe fn load_es(sel: SegmentSelector) {\n asm!(\"movw $0, %es \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload fs segment register.\npub unsafe fn load_fs(sel: SegmentSelector) {\n asm!(\"movw $0, %fs \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload gs segment register.\npub unsafe fn load_gs(sel: SegmentSelector) {\n asm!(\"movw $0, %gs \" :: \"r\" (sel) : \"memory\");\n}\n\n\/\/\/ Reload code segment register.\n\/\/\/ Note this is special since we can not directly move\n\/\/\/ to %cs. Instead we push the new segment selector\n\/\/\/ and return value on the stack and use lretq\n\/\/\/ to reload cs and continue at 1:.\npub unsafe fn load_cs(sel: SegmentSelector) {\n asm!(\"pushq $0\n lea 1f(%rip), %rax\n pushq %rax\n lretq\n 1:\" :: \"r\" (sel.bits() as u64) : \"{rax}\" \"memory\");\n}\n\n\/\/\/ Returns the current value of the code segment register.\npub fn cs() -> SegmentSelector {\n let segment: u16;\n unsafe { asm!(\"mov %cs, $0\" : \"=r\" (segment) ) };\n SegmentSelector::from_raw(segment)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>can borrow after closure goes out of scope<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>store_buffer: Add storing buffer implemantaion<commit_after>use std::fs::File;\nuse std::io::Write;\nuse std::fmt::Debug;\nuse retry_conf::RetryConf;\nuse retry::RetryError;\nuse rustc_serialize::Encodable;\nuse record::FluentError;\n\npub fn maybe_write_file<T>(conf: &RetryConf, record: T, err: RetryError) -> Result<(), FluentError>\n where T: Encodable + Debug\n{\n let store_needed = conf.clone().need_to_store();\n let store_path = conf.clone().store_path();\n if store_needed && store_path.is_some() {\n match File::create(store_path.unwrap()) {\n Ok(mut f) => {\n let mut w = Vec::new();\n write!(&mut w, \"{:?}\", record).unwrap();\n try!(f.write(&w));\n },\n Err(e) => return Err(From::from(e)),\n }\n Ok(())\n }\n else {\n Err(From::from(err))\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use {io};\nuse sys::unix::{nix, Io};\nuse std::net::SocketAddr;\nuse std::os::unix::io::{AsRawFd, RawFd};\npub use net::tcp::Shutdown;\n\npub fn socket(family: nix::AddressFamily, ty: nix::SockType, nonblock: bool) -> io::Result<RawFd> {\n let opts = if nonblock {\n nix::SOCK_NONBLOCK | nix::SOCK_CLOEXEC\n } else {\n nix::SOCK_CLOEXEC\n };\n\n nix::socket(family, ty, opts)\n .map_err(super::from_nix_error)\n}\n\npub fn connect(io: &Io, addr: &nix::SockAddr) -> io::Result<bool> {\n match nix::connect(io.as_raw_fd(), addr) {\n Ok(_) => Ok(true),\n Err(e) => {\n match e {\n nix::Error::Sys(nix::EINPROGRESS) => Ok(false),\n _ => Err(super::from_nix_error(e))\n }\n }\n }\n}\n\npub fn bind(io: &Io, addr: &nix::SockAddr) -> io::Result<()> {\n nix::bind(io.as_raw_fd(), addr)\n .map_err(super::from_nix_error)\n}\n\npub fn listen(io: &Io, backlog: usize) -> io::Result<()> {\n nix::listen(io.as_raw_fd(), backlog)\n .map_err(super::from_nix_error)\n}\n\npub fn accept(io: &Io, nonblock: bool) -> io::Result<RawFd> {\n let opts = if nonblock {\n nix::SOCK_NONBLOCK | nix::SOCK_CLOEXEC\n } else {\n nix::SOCK_CLOEXEC\n };\n\n nix::accept4(io.as_raw_fd(), opts)\n .map_err(super::from_nix_error)\n}\n\npub fn shutdown(io: &Io, how: Shutdown) -> io::Result<()> {\n let how: nix::Shutdown = match how {\n Shutdown::Read => nix::Shutdown::Read,\n Shutdown::Write => nix::Shutdown::Write,\n Shutdown::Both => nix::Shutdown::Both,\n };\n nix::shutdown(io.as_raw_fd(), &how)\n .map_err(super::from_nix_error)\n}\n\n\/\/ UDP & UDS\n#[inline]\npub fn recvfrom(io: &Io, buf: &mut [u8]) -> io::Result<(usize, nix::SockAddr)> {\n nix::recvfrom(io.as_raw_fd(), buf)\n .map_err(super::from_nix_error)\n}\n\n\/\/ UDP & UDS\n#[inline]\npub fn sendto(io: &Io, buf: &[u8], target: &nix::SockAddr) -> io::Result<usize> {\n nix::sendto(io.as_raw_fd(), buf, target, nix::MSG_DONTWAIT)\n .map_err(super::from_nix_error)\n}\n\npub fn getpeername(io: &Io) -> io::Result<nix::SockAddr> {\n nix::getpeername(io.as_raw_fd())\n .map_err(super::from_nix_error)\n}\n\npub fn getsockname(io: &Io) -> io::Result<nix::SockAddr> {\n nix::getsockname(io.as_raw_fd())\n .map_err(super::from_nix_error)\n}\n\n#[inline]\npub fn dup(io: &Io) -> io::Result<Io> {\n nix::dup(io.as_raw_fd())\n .map_err(super::from_nix_error)\n .map(|fd| Io::from_raw_fd(fd))\n}\n\n\/*\n *\n * ===== Helpers =====\n *\n *\/\n\npub fn to_nix_addr(addr: &SocketAddr) -> nix::SockAddr {\n nix::SockAddr::Inet(nix::InetAddr::from_std(addr))\n}\n\npub fn to_std_addr(addr: nix::SockAddr) -> SocketAddr {\n match addr {\n nix::SockAddr::Inet(ref addr) => addr.to_std(),\n _ => panic!(\"unexpected unix socket address\"),\n }\n}\n<commit_msg>Bugfix: sync with carllerche\/nix-rust<commit_after>use {io};\nuse sys::unix::{nix, Io};\nuse std::net::SocketAddr;\nuse std::os::unix::io::{AsRawFd, RawFd};\npub use net::tcp::Shutdown;\n\npub fn socket(family: nix::AddressFamily, ty: nix::SockType, nonblock: bool) -> io::Result<RawFd> {\n let opts = if nonblock {\n nix::SOCK_NONBLOCK | nix::SOCK_CLOEXEC\n } else {\n nix::SOCK_CLOEXEC\n };\n\n nix::socket(family, ty, opts)\n .map_err(super::from_nix_error)\n}\n\npub fn connect(io: &Io, addr: &nix::SockAddr) -> io::Result<bool> {\n match nix::connect(io.as_raw_fd(), addr) {\n Ok(_) => Ok(true),\n Err(e) => {\n match e {\n nix::Error::Sys(nix::EINPROGRESS) => Ok(false),\n _ => Err(super::from_nix_error(e))\n }\n }\n }\n}\n\npub fn bind(io: &Io, addr: &nix::SockAddr) -> io::Result<()> {\n nix::bind(io.as_raw_fd(), addr)\n .map_err(super::from_nix_error)\n}\n\npub fn listen(io: &Io, backlog: usize) -> io::Result<()> {\n nix::listen(io.as_raw_fd(), backlog)\n .map_err(super::from_nix_error)\n}\n\npub fn accept(io: &Io, nonblock: bool) -> io::Result<RawFd> {\n let opts = if nonblock {\n nix::SOCK_NONBLOCK | nix::SOCK_CLOEXEC\n } else {\n nix::SOCK_CLOEXEC\n };\n\n nix::accept4(io.as_raw_fd(), opts)\n .map_err(super::from_nix_error)\n}\n\npub fn shutdown(io: &Io, how: Shutdown) -> io::Result<()> {\n let how: nix::Shutdown = match how {\n Shutdown::Read => nix::Shutdown::Read,\n Shutdown::Write => nix::Shutdown::Write,\n Shutdown::Both => nix::Shutdown::Both,\n };\n nix::shutdown(io.as_raw_fd(), how)\n .map_err(super::from_nix_error)\n}\n\n\/\/ UDP & UDS\n#[inline]\npub fn recvfrom(io: &Io, buf: &mut [u8]) -> io::Result<(usize, nix::SockAddr)> {\n nix::recvfrom(io.as_raw_fd(), buf)\n .map_err(super::from_nix_error)\n}\n\n\/\/ UDP & UDS\n#[inline]\npub fn sendto(io: &Io, buf: &[u8], target: &nix::SockAddr) -> io::Result<usize> {\n nix::sendto(io.as_raw_fd(), buf, target, nix::MSG_DONTWAIT)\n .map_err(super::from_nix_error)\n}\n\npub fn getpeername(io: &Io) -> io::Result<nix::SockAddr> {\n nix::getpeername(io.as_raw_fd())\n .map_err(super::from_nix_error)\n}\n\npub fn getsockname(io: &Io) -> io::Result<nix::SockAddr> {\n nix::getsockname(io.as_raw_fd())\n .map_err(super::from_nix_error)\n}\n\n#[inline]\npub fn dup(io: &Io) -> io::Result<Io> {\n nix::dup(io.as_raw_fd())\n .map_err(super::from_nix_error)\n .map(|fd| Io::from_raw_fd(fd))\n}\n\n\/*\n *\n * ===== Helpers =====\n *\n *\/\n\npub fn to_nix_addr(addr: &SocketAddr) -> nix::SockAddr {\n nix::SockAddr::Inet(nix::InetAddr::from_std(addr))\n}\n\npub fn to_std_addr(addr: nix::SockAddr) -> SocketAddr {\n match addr {\n nix::SockAddr::Inet(ref addr) => addr.to_std(),\n _ => panic!(\"unexpected unix socket address\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add code for palindrome-products<commit_after>#[derive(Debug, PartialEq, Eq)]\npub struct Palindrome {\n v: Vec<(u64, u64)>,\n}\n\nimpl Palindrome {\n pub fn new(a: u64, b: u64) -> Palindrome {\n Palindrome { v: vec![(a, b)] }\n }\n\n pub fn insert(&mut self, a: u64, b: u64) {\n self.v.push((a, b))\n }\n}\n\nfn is_palindrome(value: u64) -> bool {\n let mut v = value;\n let mut r = 0;\n\n while v > 0 {\n r = r * 10 + v % 10;\n v \/= 10;\n }\n\n r == value\n}\n\npub fn palindrome_products(min: u64, max: u64) -> Option<(Palindrome, Palindrome)> {\n if min > max {\n return None;\n }\n\n let mut min_value = u64::MAX;\n let mut max_value = u64::MIN;\n\n let mut min_set = Palindrome::new(0, 0);\n let mut max_set = Palindrome::new(0, 0);\n\n for i in min..=max {\n for j in i..=max {\n let v = i * j;\n if !is_palindrome(v) {\n continue;\n }\n\n if v < min_value {\n min_value = v;\n min_set = Palindrome::new(i, j);\n } else if v == min_value {\n min_set.insert(i, j);\n } else if v > max_value {\n max_value = v;\n max_set = Palindrome::new(i, j);\n } else if v == max_value {\n max_set.insert(i, j);\n }\n }\n }\n\n if min_value == u64::MAX {\n None\n } else {\n Some((min_set, max_set))\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::default::Default;\nuse ::internal::prelude::*;\n\nmacro_rules! colour {\n ($(#[$attr:meta] $name:ident, $val:expr;)*) => {\n impl Colour {\n $(\n #[$attr]\n pub fn $name() -> Colour {\n Colour::new($val)\n }\n )*\n }\n }\n}\n\n\/\/\/ A utility struct to help with working with the basic representation of a\n\/\/\/ colour. This is particularly useful when working with a [`Role`]'s colour,\n\/\/\/ as the API works with an integer value instead of an RGB value.\n\/\/\/\n\/\/\/ Instances can be created by using the struct's associated functions. These\n\/\/\/ produce presets equivilant to those found in the official client's colour\n\/\/\/ picker.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Passing in a role's colour, and then retrieving its green component\n\/\/\/ via [`get_g`]:\n\/\/\/\n\/\/\/ ```rust,ignore\n\/\/\/ use serenity::utils::Colour;\n\/\/\/\n\/\/\/ \/\/ assuming a `role` has already been bound\n\/\/\/\n\/\/\/ let colour = Colour::new(role.colour);\n\/\/\/ let green = colour.get_g();\n\/\/\/\n\/\/\/ println!(\"The green component is: {}\", green);\n\/\/\/ ```\n\/\/\/\n\/\/\/ Creating an instance with the [`dark_teal`] presets:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ use serenity::utils::Colour;\n\/\/\/\n\/\/\/ let colour = Colour::dark_teal();\n\/\/\/\n\/\/\/ assert_eq!(colour.get_tuple(), (17, 128, 106));\n\/\/\/ ```\n\/\/\/\n\/\/\/ [`Role`]: ..\/model\/struct.Role.html\n\/\/\/ [`dark_teal`]: #method.dark_teal\n\/\/\/ [`get_g`]: #method.get_g\n#[derive(Clone, Copy, Debug)]\npub struct Colour {\n \/\/\/ The raw inner 32-bit unsigned integer value of this Colour. This is\n \/\/\/ worked with to generate values such as the red component value.\n pub value: u32,\n}\n\nimpl Colour {\n \/\/\/ Generates a new Colour with the given integer value set.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Create a new Colour, and then ensure that its inner value is equivilant\n \/\/\/ to a specific RGB value, retrieved via [`get_tuple`]:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ let colour = Colour::new(6573123);\n \/\/\/\n \/\/\/ assert_eq!(colour.get_tuple(), (100, 76, 67));\n \/\/\/ ```\n \/\/\/\n \/\/\/ [`get_tuple`]: #method.get_tuple\n pub fn new(value: u32) -> Colour {\n Colour {\n value: value,\n }\n }\n\n \/\/\/ Generates a new Colour from an RGB value, creating an inner u32\n \/\/\/ representation.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Creating a `Colour` via its RGB values will set its inner u32 correctly:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert!(Colour::from_rgb(255, 0, 0).value == 0xFF0000);\n \/\/\/ assert!(Colour::from_rgb(217, 23, 211).value == 0xD917D3);\n \/\/\/ ```\n \/\/\/\n \/\/\/ And you can then retrieve those same RGB values via its methods:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ let colour = Colour::from_rgb(217, 45, 215);\n \/\/\/\n \/\/\/ assert_eq!(colour.get_r(), 217);\n \/\/\/ assert_eq!(colour.get_g(), 45);\n \/\/\/ assert_eq!(colour.get_b(), 215);\n \/\/\/ assert_eq!(colour.get_tuple(), (217, 45, 215));\n \/\/\/ ```\n pub fn from_rgb(r: u8, g: u8, b: u8) -> Colour {\n let mut uint = r as u32;\n uint = (uint << 8) | (g as u32);\n uint = (uint << 8) | (b as u32);\n\n Colour::new(uint)\n }\n\n #[doc(hidden)]\n pub fn decode(value: Value) -> Result<Colour> {\n match value {\n Value::U64(v) => Ok(Colour::new(v as u32)),\n Value::I64(v) => Ok(Colour::new(v as u32)),\n other => Err(Error::Decode(\"Expected valid colour\", other)),\n }\n }\n\n \/\/\/ Returns the red RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_r(), 100);\n \/\/\/ ```\n pub fn get_r(&self) -> u8 {\n ((self.value >> 16) & 255) as u8\n }\n\n \/\/\/ Returns the green RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_g(), 76);\n \/\/\/ ```\n pub fn get_g(&self) -> u8 {\n ((self.value >> 8) & 255) as u8\n }\n\n \/\/\/ Returns the blue RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_b(), 67);\n pub fn get_b(&self) -> u8 {\n (self.value & 255) as u8\n }\n\n \/\/\/ Returns a tuple of the red, green, and blue components of this Colour.\n \/\/\/\n \/\/\/ This is equivilant to creating a tuple with the return values of\n \/\/\/ [`get_r`], [`get_g`], and [`get_b`].\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_tuple(), (100, 76, 67));\n \/\/\/ ```\n \/\/\/\n \/\/\/ [`get_r`]: #method.get_r\n \/\/\/ [`get_g`]: #method.get_g\n \/\/\/ [`get_b`]: #method.get_b\n pub fn get_tuple(&self) -> (u8, u8, u8) {\n (self.get_r(), self.get_g(), self.get_b())\n }\n}\n\nimpl From<i32> for Colour {\n \/\/\/ Constructs a Colour from a i32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ This is useful when providing hex values.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(0xDEA584).get_tuple(), (222, 165, 132));\n \/\/\/ ```\n fn from(value: i32) -> Colour {\n Colour::new(value as u32)\n }\n}\n\nimpl From<u32> for Colour {\n \/\/\/ Constructs a Colour from a u32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(6573123u32).get_r(), 100);\n \/\/\/ ```\n fn from(value: u32) -> Colour {\n Colour::new(value)\n }\n}\n\nimpl From<u64> for Colour {\n \/\/\/ Constructs a Colour from a u32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(6573123u64).get_r(), 100);\n \/\/\/ ```\n fn from(value: u64) -> Colour {\n Colour::new(value as u32)\n }\n}\n\ncolour! {\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(111, 198, 226)`.\n blitz_blue, 0x6FC6E2;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(52, 152, 219)`.\n blue, 0x3498DB;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(114, 137, 218)`.\n blurple, 0x7289DA;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(32, 102, 148)`.\n dark_blue, 0x206694;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(194, 124, 14)`.\n dark_gold, 0xC27C0E;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(31, 139, 76)`.\n dark_green, 0x1F8B4C;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(96, 125, 139)`.\n dark_grey, 0x607D8B;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(173, 20, 87)`.\n dark_magenta, 0xAD1457;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(168, 67, 0)`.\n dark_orange, 0xA84300;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(113, 54, 138)`.\n dark_purple, 0x71368A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(153, 45, 34)`.\n dark_red, 0x992D22;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(17, 128, 106)`.\n dark_teal, 0x11806A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(84, 110, 122)`.\n darker_grey, 0x546E7A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(250, 177, 237)`.\n fabled_pink, 0xFAB1ED\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(241, 196, 15)`.\n gold, 0xF1C40F;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(186, 218, 85)`.\n kerbal, 0xBADA55;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(151, 156, 159)`.\n light_grey, 0x979C9F;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(149, 165, 166)`.\n lighter_grey, 0x95A5A6;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(233, 30, 99)`.\n magenta, 0xE91E63;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(230, 126, 34)`.\n orange, 0xE67E22;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(155, 89, 182)`.\n purple, 0x9B59B6;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(231, 76, 60)`.\n red, 0xE74C3C;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(26, 188, 156)`.\n teal, 0x1ABC9C;\n}\n\nimpl Default for Colour {\n \/\/\/ Creates a default value for a `Colour`, setting the inner value to `0`.\n \/\/\/ This is equivilant to setting the RGB value to `(0, 0, 0)`.\n fn default() -> Colour {\n Colour {\n value: 0,\n }\n }\n}\n<commit_msg>Add 'fooyoo' to Colour struct<commit_after>use std::default::Default;\nuse ::internal::prelude::*;\n\nmacro_rules! colour {\n ($(#[$attr:meta] $name:ident, $val:expr;)*) => {\n impl Colour {\n $(\n #[$attr]\n pub fn $name() -> Colour {\n Colour::new($val)\n }\n )*\n }\n }\n}\n\n\/\/\/ A utility struct to help with working with the basic representation of a\n\/\/\/ colour. This is particularly useful when working with a [`Role`]'s colour,\n\/\/\/ as the API works with an integer value instead of an RGB value.\n\/\/\/\n\/\/\/ Instances can be created by using the struct's associated functions. These\n\/\/\/ produce presets equivilant to those found in the official client's colour\n\/\/\/ picker.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Passing in a role's colour, and then retrieving its green component\n\/\/\/ via [`get_g`]:\n\/\/\/\n\/\/\/ ```rust,ignore\n\/\/\/ use serenity::utils::Colour;\n\/\/\/\n\/\/\/ \/\/ assuming a `role` has already been bound\n\/\/\/\n\/\/\/ let colour = Colour::new(role.colour);\n\/\/\/ let green = colour.get_g();\n\/\/\/\n\/\/\/ println!(\"The green component is: {}\", green);\n\/\/\/ ```\n\/\/\/\n\/\/\/ Creating an instance with the [`dark_teal`] presets:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ use serenity::utils::Colour;\n\/\/\/\n\/\/\/ let colour = Colour::dark_teal();\n\/\/\/\n\/\/\/ assert_eq!(colour.get_tuple(), (17, 128, 106));\n\/\/\/ ```\n\/\/\/\n\/\/\/ [`Role`]: ..\/model\/struct.Role.html\n\/\/\/ [`dark_teal`]: #method.dark_teal\n\/\/\/ [`get_g`]: #method.get_g\n#[derive(Clone, Copy, Debug)]\npub struct Colour {\n \/\/\/ The raw inner 32-bit unsigned integer value of this Colour. This is\n \/\/\/ worked with to generate values such as the red component value.\n pub value: u32,\n}\n\nimpl Colour {\n \/\/\/ Generates a new Colour with the given integer value set.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Create a new Colour, and then ensure that its inner value is equivilant\n \/\/\/ to a specific RGB value, retrieved via [`get_tuple`]:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ let colour = Colour::new(6573123);\n \/\/\/\n \/\/\/ assert_eq!(colour.get_tuple(), (100, 76, 67));\n \/\/\/ ```\n \/\/\/\n \/\/\/ [`get_tuple`]: #method.get_tuple\n pub fn new(value: u32) -> Colour {\n Colour {\n value: value,\n }\n }\n\n \/\/\/ Generates a new Colour from an RGB value, creating an inner u32\n \/\/\/ representation.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Creating a `Colour` via its RGB values will set its inner u32 correctly:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert!(Colour::from_rgb(255, 0, 0).value == 0xFF0000);\n \/\/\/ assert!(Colour::from_rgb(217, 23, 211).value == 0xD917D3);\n \/\/\/ ```\n \/\/\/\n \/\/\/ And you can then retrieve those same RGB values via its methods:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ let colour = Colour::from_rgb(217, 45, 215);\n \/\/\/\n \/\/\/ assert_eq!(colour.get_r(), 217);\n \/\/\/ assert_eq!(colour.get_g(), 45);\n \/\/\/ assert_eq!(colour.get_b(), 215);\n \/\/\/ assert_eq!(colour.get_tuple(), (217, 45, 215));\n \/\/\/ ```\n pub fn from_rgb(r: u8, g: u8, b: u8) -> Colour {\n let mut uint = r as u32;\n uint = (uint << 8) | (g as u32);\n uint = (uint << 8) | (b as u32);\n\n Colour::new(uint)\n }\n\n #[doc(hidden)]\n pub fn decode(value: Value) -> Result<Colour> {\n match value {\n Value::U64(v) => Ok(Colour::new(v as u32)),\n Value::I64(v) => Ok(Colour::new(v as u32)),\n other => Err(Error::Decode(\"Expected valid colour\", other)),\n }\n }\n\n \/\/\/ Returns the red RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_r(), 100);\n \/\/\/ ```\n pub fn get_r(&self) -> u8 {\n ((self.value >> 16) & 255) as u8\n }\n\n \/\/\/ Returns the green RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_g(), 76);\n \/\/\/ ```\n pub fn get_g(&self) -> u8 {\n ((self.value >> 8) & 255) as u8\n }\n\n \/\/\/ Returns the blue RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_b(), 67);\n pub fn get_b(&self) -> u8 {\n (self.value & 255) as u8\n }\n\n \/\/\/ Returns a tuple of the red, green, and blue components of this Colour.\n \/\/\/\n \/\/\/ This is equivilant to creating a tuple with the return values of\n \/\/\/ [`get_r`], [`get_g`], and [`get_b`].\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_tuple(), (100, 76, 67));\n \/\/\/ ```\n \/\/\/\n \/\/\/ [`get_r`]: #method.get_r\n \/\/\/ [`get_g`]: #method.get_g\n \/\/\/ [`get_b`]: #method.get_b\n pub fn get_tuple(&self) -> (u8, u8, u8) {\n (self.get_r(), self.get_g(), self.get_b())\n }\n}\n\nimpl From<i32> for Colour {\n \/\/\/ Constructs a Colour from a i32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ This is useful when providing hex values.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(0xDEA584).get_tuple(), (222, 165, 132));\n \/\/\/ ```\n fn from(value: i32) -> Colour {\n Colour::new(value as u32)\n }\n}\n\nimpl From<u32> for Colour {\n \/\/\/ Constructs a Colour from a u32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(6573123u32).get_r(), 100);\n \/\/\/ ```\n fn from(value: u32) -> Colour {\n Colour::new(value)\n }\n}\n\nimpl From<u64> for Colour {\n \/\/\/ Constructs a Colour from a u32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(6573123u64).get_r(), 100);\n \/\/\/ ```\n fn from(value: u64) -> Colour {\n Colour::new(value as u32)\n }\n}\n\ncolour! {\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(111, 198, 226)`.\n blitz_blue, 0x6FC6E2;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(52, 152, 219)`.\n blue, 0x3498DB;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(114, 137, 218)`.\n blurple, 0x7289DA;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(32, 102, 148)`.\n dark_blue, 0x206694;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(194, 124, 14)`.\n dark_gold, 0xC27C0E;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(31, 139, 76)`.\n dark_green, 0x1F8B4C;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(96, 125, 139)`.\n dark_grey, 0x607D8B;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(173, 20, 87)`.\n dark_magenta, 0xAD1457;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(168, 67, 0)`.\n dark_orange, 0xA84300;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(113, 54, 138)`.\n dark_purple, 0x71368A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(153, 45, 34)`.\n dark_red, 0x992D22;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(17, 128, 106)`.\n dark_teal, 0x11806A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(84, 110, 122)`.\n darker_grey, 0x546E7A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(250, 177, 237)`.\n fabled_pink, 0xFAB1ED;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(17, 202, 128)`.\n fooyoo, 0x11CA80;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(241, 196, 15)`.\n gold, 0xF1C40F;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(186, 218, 85)`.\n kerbal, 0xBADA55;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(151, 156, 159)`.\n light_grey, 0x979C9F;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(149, 165, 166)`.\n lighter_grey, 0x95A5A6;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(233, 30, 99)`.\n magenta, 0xE91E63;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(230, 126, 34)`.\n orange, 0xE67E22;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(155, 89, 182)`.\n purple, 0x9B59B6;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(231, 76, 60)`.\n red, 0xE74C3C;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(26, 188, 156)`.\n teal, 0x1ABC9C;\n}\n\nimpl Default for Colour {\n \/\/\/ Creates a default value for a `Colour`, setting the inner value to `0`.\n \/\/\/ This is equivilant to setting the RGB value to `(0, 0, 0)`.\n fn default() -> Colour {\n Colour {\n value: 0,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use clippy_utils::diagnostics::span_lint_and_help;\nuse clippy_utils::is_ty_param_diagnostic_item;\nuse rustc_hir::{self as hir, def_id::DefId, QPath};\nuse rustc_lint::LateContext;\nuse rustc_span::symbol::sym;\n\nuse super::BOX_COLLECTION;\n\npub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {\n if_chain! {\n if Some(def_id) == cx.tcx.lang_items().owned_box();\n if let Some(item_type) = get_std_collection(cx, qpath);\n then {\n let generic = if item_type == \"String\" {\n \"\"\n } else {\n \"<..>\"\n };\n span_lint_and_help(\n cx,\n BOX_COLLECTION,\n hir_ty.span,\n &format!(\n \"you seem to be trying to use `Box<{outer}{generic}>`. Consider using just `{outer}{generic}`\",\n outer=item_type,\n generic = generic),\n None,\n &format!(\n \"`{outer}{generic}` is already on the heap, `Box<{outer}{generic}>` makes an extra allocation\",\n outer=item_type,\n generic = generic)\n );\n true\n } else {\n false\n }\n }\n}\n\nfn get_std_collection(cx: &LateContext<'_>, qpath: &QPath<'_>) -> Option<String> {\n if is_ty_param_diagnostic_item(cx, qpath, sym::vec_type).is_some() {\n Some(String::from(\"Vec\"))\n } else if is_ty_param_diagnostic_item(cx, qpath, sym::string_type).is_some() {\n Some(String::from(\"String\"))\n } else if is_ty_param_diagnostic_item(cx, qpath, sym::hashmap_type).is_some() {\n Some(String::from(\"HashMap\"))\n } else {\n None\n }\n}\n<commit_msg>Avoid needless heap allocation in box_collection<commit_after>use clippy_utils::diagnostics::span_lint_and_help;\nuse clippy_utils::is_ty_param_diagnostic_item;\nuse rustc_hir::{self as hir, def_id::DefId, QPath};\nuse rustc_lint::LateContext;\nuse rustc_span::symbol::sym;\n\nuse super::BOX_COLLECTION;\n\npub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {\n if_chain! {\n if Some(def_id) == cx.tcx.lang_items().owned_box();\n if let Some(item_type) = get_std_collection(cx, qpath);\n then {\n let generic = if item_type == \"String\" {\n \"\"\n } else {\n \"<..>\"\n };\n span_lint_and_help(\n cx,\n BOX_COLLECTION,\n hir_ty.span,\n &format!(\n \"you seem to be trying to use `Box<{outer}{generic}>`. Consider using just `{outer}{generic}`\",\n outer=item_type,\n generic = generic),\n None,\n &format!(\n \"`{outer}{generic}` is already on the heap, `Box<{outer}{generic}>` makes an extra allocation\",\n outer=item_type,\n generic = generic)\n );\n true\n } else {\n false\n }\n }\n}\n\nfn get_std_collection(cx: &LateContext<'_>, qpath: &QPath<'_>) -> Option<&'static str> {\n if is_ty_param_diagnostic_item(cx, qpath, sym::vec_type).is_some() {\n Some(\"Vec\")\n } else if is_ty_param_diagnostic_item(cx, qpath, sym::string_type).is_some() {\n Some(\"String\")\n } else if is_ty_param_diagnostic_item(cx, qpath, sym::hashmap_type).is_some() {\n Some(\"HashMap\")\n } else {\n None\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rust stack<commit_after>struct Stack<T> {\n data: Vec<T>,\n}\n\nimpl<T> Stack<T> {\n \/\/ Returns a new Stack\n fn new() -> Stack<T> {\n Stack { data: vec![] }\n }\n\n \/\/ Pushes the elemnt to top of stack\n fn push(&mut self, e: T) {\n self.data.push(e)\n }\n\n \/\/ Returns Option with top element of the stack\n fn pop(&mut self) -> Option<T> {\n self.data.pop()\n }\n\n \/\/ Returns true if stack is empty\n fn is_empty(&self) -> bool {\n self.data.is_empty()\n }\n\n \/\/ Returns length of Stack\n fn len(&self) -> usize {\n self.data.len()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update common error with more details<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Gecko's restyle damage computation (aka change hints, aka `nsChangeHint`).\n\nuse gecko_bindings::bindings;\nuse gecko_bindings::structs;\nuse gecko_bindings::structs::{nsChangeHint, nsStyleContext, nsStyleStructID};\nuse matching::{StyleChange, StyleDifference};\nuse properties::ComputedValues;\nuse servo_arc::Arc;\nuse std::ops::{BitAnd, BitOr, BitOrAssign, Not};\n\n\/\/\/ The representation of Gecko's restyle damage is just a wrapper over\n\/\/\/ `nsChangeHint`.\n#[derive(Clone, Copy, Debug, PartialEq)]\npub struct GeckoRestyleDamage(nsChangeHint);\n\nimpl GeckoRestyleDamage {\n \/\/\/ Trivially construct a new `GeckoRestyleDamage`.\n pub fn new(raw: nsChangeHint) -> Self {\n GeckoRestyleDamage(raw)\n }\n\n \/\/\/ Get the inner change hint for this damage.\n pub fn as_change_hint(&self) -> nsChangeHint {\n self.0\n }\n\n \/\/\/ Get an empty change hint, that is (`nsChangeHint(0)`).\n pub fn empty() -> Self {\n GeckoRestyleDamage(nsChangeHint(0))\n }\n\n \/\/\/ Returns whether this restyle damage represents the empty damage.\n pub fn is_empty(&self) -> bool {\n self.0 == nsChangeHint(0)\n }\n\n \/\/\/ Computes the `StyleDifference` (including the appropriate change hint)\n \/\/\/ given an old style (in the form of a `nsStyleContext`, and a new style\n \/\/\/ (in the form of `ComputedValues`).\n \/\/\/\n \/\/\/ Note that we could in theory just get two `ComputedValues` here and diff\n \/\/\/ them, but Gecko has an interesting optimization when they mark accessed\n \/\/\/ structs, so they effectively only diff structs that have ever been\n \/\/\/ accessed from layout.\n pub fn compute_style_difference(\n source: &nsStyleContext,\n old_style: &ComputedValues,\n new_style: &Arc<ComputedValues>,\n ) -> StyleDifference {\n let mut any_style_changed: bool = false;\n let hint = unsafe {\n bindings::Gecko_CalcStyleDifference(old_style,\n new_style,\n source.mBits,\n &mut any_style_changed)\n };\n let change = if any_style_changed { StyleChange::Changed } else { StyleChange::Unchanged };\n StyleDifference::new(GeckoRestyleDamage(hint), change)\n }\n\n \/\/\/ Computes the `StyleDifference` between the two `ComputedValues` objects\n \/\/\/ for the case where the old and new style are both `display: none`.\n \/\/\/\n \/\/\/ In general we don't need to generate damage for such elements, but we\n \/\/\/ do need to generate a frame reconstruction for `-moz-binding` changes,\n \/\/\/ so that we can start loading the new binding.\n pub fn compute_undisplayed_style_difference(\n old_style: &ComputedValues,\n new_style: &ComputedValues,\n ) -> StyleDifference {\n let mut any_style_changed: bool = false;\n\n \/\/ Just compute the Display struct's difference.\n let display_struct_bit = 1 << (nsStyleStructID::eStyleStruct_Display as u32);\n let hint = unsafe {\n bindings::Gecko_CalcStyleDifference(old_style,\n new_style,\n display_struct_bit,\n &mut any_style_changed)\n };\n\n \/\/ Only pay attention to a reconstruct change hint.\n let damage = GeckoRestyleDamage(hint) & Self::reconstruct();\n\n let change = if damage.is_empty() { StyleChange::Changed } else { StyleChange::Unchanged };\n StyleDifference::new(damage, change)\n }\n\n \/\/\/ Returns true if this restyle damage contains all the damage of |other|.\n pub fn contains(self, other: Self) -> bool {\n self & other == other\n }\n\n \/\/\/ Gets restyle damage to reconstruct the entire frame, subsuming all\n \/\/\/ other damage.\n pub fn reconstruct() -> Self {\n GeckoRestyleDamage(structs::nsChangeHint_nsChangeHint_ReconstructFrame)\n }\n}\n\nimpl Default for GeckoRestyleDamage {\n fn default() -> Self {\n Self::empty()\n }\n}\n\nimpl BitOr for GeckoRestyleDamage {\n type Output = Self;\n fn bitor(self, other: Self) -> Self {\n GeckoRestyleDamage(self.0 | other.0)\n }\n}\n\nimpl BitOrAssign for GeckoRestyleDamage {\n fn bitor_assign(&mut self, other: Self) {\n *self = *self | other;\n }\n}\n\nimpl BitAnd for GeckoRestyleDamage {\n type Output = Self;\n fn bitand(self, other: Self) -> Self {\n GeckoRestyleDamage(nsChangeHint((self.0).0 & (other.0).0))\n }\n}\n\nimpl Not for GeckoRestyleDamage {\n type Output = Self;\n fn not(self) -> Self {\n GeckoRestyleDamage(nsChangeHint(!(self.0).0))\n }\n}\n<commit_msg>Fix up Gecko_CalcStyleDifference for Linux 32-bit ABI<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Gecko's restyle damage computation (aka change hints, aka `nsChangeHint`).\n\nuse gecko_bindings::bindings;\nuse gecko_bindings::structs;\nuse gecko_bindings::structs::{nsChangeHint, nsStyleContext, nsStyleStructID};\nuse matching::{StyleChange, StyleDifference};\nuse properties::ComputedValues;\nuse servo_arc::Arc;\nuse std::ops::{BitAnd, BitOr, BitOrAssign, Not};\n\n\/\/\/ The representation of Gecko's restyle damage is just a wrapper over\n\/\/\/ `nsChangeHint`.\n#[derive(Clone, Copy, Debug, PartialEq)]\npub struct GeckoRestyleDamage(nsChangeHint);\n\nimpl GeckoRestyleDamage {\n \/\/\/ Trivially construct a new `GeckoRestyleDamage`.\n pub fn new(raw: nsChangeHint) -> Self {\n GeckoRestyleDamage(raw)\n }\n\n \/\/\/ Get the inner change hint for this damage.\n pub fn as_change_hint(&self) -> nsChangeHint {\n self.0\n }\n\n \/\/\/ Get an empty change hint, that is (`nsChangeHint(0)`).\n pub fn empty() -> Self {\n GeckoRestyleDamage(nsChangeHint(0))\n }\n\n \/\/\/ Returns whether this restyle damage represents the empty damage.\n pub fn is_empty(&self) -> bool {\n self.0 == nsChangeHint(0)\n }\n\n \/\/\/ Computes the `StyleDifference` (including the appropriate change hint)\n \/\/\/ given an old style (in the form of a `nsStyleContext`, and a new style\n \/\/\/ (in the form of `ComputedValues`).\n \/\/\/\n \/\/\/ Note that we could in theory just get two `ComputedValues` here and diff\n \/\/\/ them, but Gecko has an interesting optimization when they mark accessed\n \/\/\/ structs, so they effectively only diff structs that have ever been\n \/\/\/ accessed from layout.\n pub fn compute_style_difference(\n source: &nsStyleContext,\n old_style: &ComputedValues,\n new_style: &Arc<ComputedValues>,\n ) -> StyleDifference {\n let mut any_style_changed: bool = false;\n let hint = unsafe {\n bindings::Gecko_CalcStyleDifference(old_style,\n new_style,\n source.mBits,\n &mut any_style_changed)\n };\n let change = if any_style_changed { StyleChange::Changed } else { StyleChange::Unchanged };\n StyleDifference::new(GeckoRestyleDamage(nsChangeHint(hint)), change)\n }\n\n \/\/\/ Computes the `StyleDifference` between the two `ComputedValues` objects\n \/\/\/ for the case where the old and new style are both `display: none`.\n \/\/\/\n \/\/\/ In general we don't need to generate damage for such elements, but we\n \/\/\/ do need to generate a frame reconstruction for `-moz-binding` changes,\n \/\/\/ so that we can start loading the new binding.\n pub fn compute_undisplayed_style_difference(\n old_style: &ComputedValues,\n new_style: &ComputedValues,\n ) -> StyleDifference {\n let mut any_style_changed: bool = false;\n\n \/\/ Just compute the Display struct's difference.\n let display_struct_bit = 1 << (nsStyleStructID::eStyleStruct_Display as u32);\n let hint = unsafe {\n bindings::Gecko_CalcStyleDifference(old_style,\n new_style,\n display_struct_bit,\n &mut any_style_changed)\n };\n\n \/\/ Only pay attention to a reconstruct change hint.\n let damage = GeckoRestyleDamage(nsChangeHint(hint)) & Self::reconstruct();\n\n let change = if damage.is_empty() { StyleChange::Changed } else { StyleChange::Unchanged };\n StyleDifference::new(damage, change)\n }\n\n \/\/\/ Returns true if this restyle damage contains all the damage of |other|.\n pub fn contains(self, other: Self) -> bool {\n self & other == other\n }\n\n \/\/\/ Gets restyle damage to reconstruct the entire frame, subsuming all\n \/\/\/ other damage.\n pub fn reconstruct() -> Self {\n GeckoRestyleDamage(structs::nsChangeHint_nsChangeHint_ReconstructFrame)\n }\n}\n\nimpl Default for GeckoRestyleDamage {\n fn default() -> Self {\n Self::empty()\n }\n}\n\nimpl BitOr for GeckoRestyleDamage {\n type Output = Self;\n fn bitor(self, other: Self) -> Self {\n GeckoRestyleDamage(self.0 | other.0)\n }\n}\n\nimpl BitOrAssign for GeckoRestyleDamage {\n fn bitor_assign(&mut self, other: Self) {\n *self = *self | other;\n }\n}\n\nimpl BitAnd for GeckoRestyleDamage {\n type Output = Self;\n fn bitand(self, other: Self) -> Self {\n GeckoRestyleDamage(nsChangeHint((self.0).0 & (other.0).0))\n }\n}\n\nimpl Not for GeckoRestyleDamage {\n type Output = Self;\n fn not(self) -> Self {\n GeckoRestyleDamage(nsChangeHint(!(self.0).0))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>style: Remove layout.css.xul-box-display-values.survive-blockification.enabled.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for all-your-base<commit_after>\/\/\/\n\/\/\/ Convert a number between two bases.\n\/\/\/\n\/\/\/ A number is any slice of digits.\n\/\/\/ A digit is any unsigned integer (e.g. u8, u16, u32, u64, or usize).\n\/\/\/ Bases are specified as unsigned integers.\n\/\/\/\n\/\/\/ Return an `Err(.)` if the conversion is impossible.\n\/\/\/ The tests do not test for specific values inside the `Err(.)`.\n\/\/\/\n\/\/\/\n\/\/\/ You are allowed to change the function signature as long as all test still pass.\n\/\/\/\n\/\/\/\n\/\/\/ Example:\n\/\/\/ Input\n\/\/\/ number: &[4, 2]\n\/\/\/ from_base: 10\n\/\/\/ to_base: 2\n\/\/\/ Result\n\/\/\/ Ok(vec![1, 0, 1, 0, 1, 0])\n\/\/\/\n\/\/\/ The example corresponds to converting the number 42 from decimal\n\/\/\/ which is equivalent to 101010 in binary.\n\/\/\/\n\/\/\/\n\/\/\/ Notes:\n\/\/\/ * The empty slice ( \"[]\" ) is equal to the number 0.\n\/\/\/ * Never output leading 0 digits. However, your function must be able to\n\/\/\/ process input with leading 0 digits.\n\/\/\/\n#[allow(unused_variables)]\npub fn convert(number: &[u32], from_base: u32, to_base: u32) -> Result<Vec<u32>, ()> {\n if number.into_iter().any(|&x| x >= from_base) {\n return Err(());\n }\n\n if from_base < 2 || to_base < 2 {\n return Err(());\n }\n\n let mut r = number.into_iter().fold(0, |acc, &x| acc * from_base + x);\n\n let mut result = (0..)\n .take_while(move |&x| r >= to_base.pow(x as u32))\n .map(|x| {\n let d = r % to_base;\n r \/= to_base;\n d\n })\n .collect::<Vec<_>>();\n\n result.reverse();\n\n Ok(result)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Switch to absolute offsets for extend comment word<commit_after><|endoftext|>"} {"text":"<commit_before>use crate::schema::*;\nuse diesel::*;\n\n#[test]\nfn insert_from_table() {\n use crate::schema::posts::dsl::*;\n let conn = connection_with_sean_and_tess_in_users_table();\n insert_into(posts)\n .values(users::table)\n .into_columns((user_id, title, body))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select((user_id, title, body)).load(&conn);\n let expected = vec![\n (1, String::from(\"Sean\"), None::<String>),\n (2, String::from(\"Tess\"), None),\n ];\n assert_eq!(Ok(expected), data);\n}\n\n#[test]\nfn insert_from_table_reference() {\n use crate::schema::posts::dsl::*;\n let conn = connection_with_sean_and_tess_in_users_table();\n insert_into(posts)\n .values(&users::table)\n .into_columns((user_id, title, body))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select((user_id, title, body)).load(&conn);\n let expected = vec![\n (1, String::from(\"Sean\"), None::<String>),\n (2, String::from(\"Tess\"), None),\n ];\n assert_eq!(Ok(expected), data);\n}\n\n#[test]\nfn insert_from_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n users\n .select((id, name.concat(\" says hi\")))\n .insert_into(posts)\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\nfn insert_from_select_reference() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n let select = users.select((id, name.concat(\" says hi\")));\n insert_into(posts)\n .values(&select)\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\nfn insert_from_boxed() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n users\n .select((id, name.concat(\" says hi\")))\n .into_boxed()\n .insert_into(posts)\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\nfn insert_from_boxed_reference() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n let select = users.select((id, name.concat(\" says hi\"))).into_boxed();\n insert_into(posts)\n .values(&select)\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"sqlite\")]\nfn insert_or_ignore_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n sql_query(\"CREATE UNIQUE INDEX foo ON posts (user_id)\")\n .execute(&conn)\n .unwrap();\n\n insert_or_ignore_into(posts)\n .values(users.select((id, name.concat(\" says hi\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n insert_or_ignore_into(posts)\n .values(users.select((id, name.concat(\" says bye\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"sqlite\")]\nfn insert_or_replace_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n sql_query(\"CREATE UNIQUE INDEX foo ON posts (user_id)\")\n .execute(&conn)\n .unwrap();\n\n replace_into(posts)\n .values(users.select((id, name.concat(\" says hi\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n replace_into(posts)\n .values(users.select((id, name.concat(\" says bye\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says bye\", \"Tess says bye\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"mysql\")]\n\/\/ We can't share the test with SQLite because it modifies\n\/\/ schema, but we can at least make sure the query is *syntactically* valid.\nfn insert_or_ignore_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n insert_or_ignore_into(posts)\n .values(users.select((id, name.concat(\" says hi\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"mysql\")]\n\/\/ We can't share the test with SQLite because it modifies\n\/\/ schema, but we can at least make sure the query is *syntactically* valid.\nfn insert_or_replace_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n replace_into(posts)\n .values(users.select((id, name.concat(\" says hi\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(all(feature = \"postgres\", feature = \"sqlite\"))]\nfn on_conflict_do_nothing_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n sql_query(\"CREATE UNIQUE INDEX ON posts (title)\")\n .execute(&conn)\n .unwrap();\n let query = users\n .select((id, name.concat(\" says hi\")))\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict_do_nothing();\n\n let inserted_rows = query.execute(&conn).unwrap();\n assert_eq!(2, inserted_rows);\n let inserted_rows = query.execute(&conn).unwrap();\n assert_eq!(0, inserted_rows);\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"sqlite\")]\nfn on_conflict_do_nothing_with_select_for_sqlite() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n sql_query(\"CREATE UNIQUE INDEX index_on_title ON posts (title)\")\n .execute(&conn)\n .unwrap();\n\n let inserted_rows = users\n .select((id, name.concat(\" says hi\")))\n .filter(diesel::dsl::sql(\" 1=1 \"))\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict_do_nothing()\n .execute(&conn)\n .unwrap();\n assert_eq!(2, inserted_rows);\n let inserted_rows = users\n .select((id, name.concat(\" says hi\")))\n .filter(diesel::dsl::sql(\" 1=1 \"))\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict_do_nothing()\n .execute(&conn)\n .unwrap();\n assert_eq!(0, inserted_rows);\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"postgres\")]\nfn on_conflict_do_update_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n sql_query(\"CREATE UNIQUE INDEX ON posts (title)\")\n .execute(&conn)\n .unwrap();\n let query = users\n .select((id, name.concat(\" says hi\")))\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict(title)\n .do_update()\n .set(body.eq(\"updated\"));\n\n query.execute(&conn).unwrap();\n\n insert_into(users)\n .values(name.eq(\"Ruby\"))\n .execute(&conn)\n .unwrap();\n\n query.execute(&conn).unwrap();\n\n let data = posts.select((title, body)).load(&conn).unwrap();\n let expected = vec![\n (String::from(\"Sean says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Tess says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Ruby says hi\"), None),\n ];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"sqlite\")]\nfn on_conflict_do_update_with_select_for_sqlite() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n sql_query(\"CREATE UNIQUE INDEX index_on_title ON posts (title)\")\n .execute(&conn)\n .unwrap();\n\n users\n .select((id, name.concat(\" says hi\")))\n .filter(diesel::dsl::sql(\"1=1\"))\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict(title)\n .do_update()\n .set(body.eq(\"updated\"))\n .execute(&conn)\n .unwrap();\n\n insert_into(users)\n .values(name.eq(\"Ruby\"))\n .execute(&conn)\n .unwrap();\n\n users\n .select((id, name.concat(\" says hi\")))\n .filter(diesel::dsl::sql(\"1=1\"))\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict(title)\n .do_update()\n .set(body.eq(\"updated\"))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select((title, body)).load(&conn).unwrap();\n let expected = vec![\n (String::from(\"Sean says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Tess says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Ruby says hi\"), None),\n ];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(all(feature = \"postgres\", feature = \"sqlite\"))]\nfn on_conflict_do_update_with_boxed_select() {\n use schema::posts::dsl::*;\n use schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n sql_query(\"CREATE UNIQUE INDEX index_on_title ON posts (title)\")\n .execute(&conn)\n .unwrap();\n\n users\n .select((id, name.concat(\" says hi\")))\n .into_boxed()\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict(title)\n .do_update()\n .set(body.eq(\"updated\"))\n .execute(&conn)\n .unwrap();\n\n insert_into(users)\n .values(name.eq(\"Ruby\"))\n .execute(&conn)\n .unwrap();\n\n users\n .select((id, name.concat(\" says hi\")))\n .into_boxed()\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict(title)\n .do_update()\n .set(body.eq(\"updated\"))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select((title, body)).load(&conn).unwrap();\n let expected = vec![\n (String::from(\"Sean says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Tess says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Ruby says hi\"), None),\n ];\n assert_eq!(expected, data);\n}\n<commit_msg>Unify test cases<commit_after>use crate::schema::*;\nuse diesel::*;\n\n#[test]\nfn insert_from_table() {\n use crate::schema::posts::dsl::*;\n let conn = connection_with_sean_and_tess_in_users_table();\n insert_into(posts)\n .values(users::table)\n .into_columns((user_id, title, body))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select((user_id, title, body)).load(&conn);\n let expected = vec![\n (1, String::from(\"Sean\"), None::<String>),\n (2, String::from(\"Tess\"), None),\n ];\n assert_eq!(Ok(expected), data);\n}\n\n#[test]\nfn insert_from_table_reference() {\n use crate::schema::posts::dsl::*;\n let conn = connection_with_sean_and_tess_in_users_table();\n insert_into(posts)\n .values(&users::table)\n .into_columns((user_id, title, body))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select((user_id, title, body)).load(&conn);\n let expected = vec![\n (1, String::from(\"Sean\"), None::<String>),\n (2, String::from(\"Tess\"), None),\n ];\n assert_eq!(Ok(expected), data);\n}\n\n#[test]\nfn insert_from_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n users\n .select((id, name.concat(\" says hi\")))\n .insert_into(posts)\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\nfn insert_from_select_reference() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n let select = users.select((id, name.concat(\" says hi\")));\n insert_into(posts)\n .values(&select)\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\nfn insert_from_boxed() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n users\n .select((id, name.concat(\" says hi\")))\n .into_boxed()\n .insert_into(posts)\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\nfn insert_from_boxed_reference() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n let select = users.select((id, name.concat(\" says hi\"))).into_boxed();\n insert_into(posts)\n .values(&select)\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"sqlite\")]\nfn insert_or_ignore_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n sql_query(\"CREATE UNIQUE INDEX foo ON posts (user_id)\")\n .execute(&conn)\n .unwrap();\n\n insert_or_ignore_into(posts)\n .values(users.select((id, name.concat(\" says hi\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n insert_or_ignore_into(posts)\n .values(users.select((id, name.concat(\" says bye\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"sqlite\")]\nfn insert_or_replace_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n sql_query(\"CREATE UNIQUE INDEX foo ON posts (user_id)\")\n .execute(&conn)\n .unwrap();\n\n replace_into(posts)\n .values(users.select((id, name.concat(\" says hi\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n replace_into(posts)\n .values(users.select((id, name.concat(\" says bye\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says bye\", \"Tess says bye\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"mysql\")]\n\/\/ We can't share the test with SQLite because it modifies\n\/\/ schema, but we can at least make sure the query is *syntactically* valid.\nfn insert_or_ignore_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n insert_or_ignore_into(posts)\n .values(users.select((id, name.concat(\" says hi\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(feature = \"mysql\")]\n\/\/ We can't share the test with SQLite because it modifies\n\/\/ schema, but we can at least make sure the query is *syntactically* valid.\nfn insert_or_replace_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n replace_into(posts)\n .values(users.select((id, name.concat(\" says hi\"))))\n .into_columns((user_id, title))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(any(feature = \"postgres\", feature = \"sqlite\"))]\nfn on_conflict_do_nothing_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n sql_query(\"CREATE UNIQUE INDEX index_on_title ON posts (title)\")\n .execute(&conn)\n .unwrap();\n let query = users\n .select((id, name.concat(\" says hi\")))\n .filter(id.ge(0)) \/\/ Sqlite needs a where claues\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict_do_nothing();\n\n let inserted_rows = query.execute(&conn).unwrap();\n assert_eq!(2, inserted_rows);\n let inserted_rows = query.execute(&conn).unwrap();\n assert_eq!(0, inserted_rows);\n\n let data = posts.select(title).load::<String>(&conn).unwrap();\n let expected = vec![\"Sean says hi\", \"Tess says hi\"];\n assert_eq!(expected, data);\n}\n\n#[test]\n#[cfg(any(feature = \"postgres\", feature = \"sqlite\"))]\nfn on_conflict_do_update_with_select() {\n use crate::schema::posts::dsl::*;\n use crate::schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n sql_query(\"CREATE UNIQUE INDEX index_on_title ON posts (title)\")\n .execute(&conn)\n .unwrap();\n let query = users\n .select((id, name.concat(\" says hi\")))\n .filter(id.ge(0)) \/\/ exists because sqlite needs a where clause\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict(title)\n .do_update()\n .set(body.eq(\"updated\"));\n\n query.execute(&conn).unwrap();\n\n insert_into(users)\n .values(name.eq(\"Ruby\"))\n .execute(&conn)\n .unwrap();\n\n query.execute(&conn).unwrap();\n\n let data = posts.select((title, body)).load(&conn).unwrap();\n let expected = vec![\n (String::from(\"Sean says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Tess says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Ruby says hi\"), None),\n ];\n assert_eq!(expected, data);\n}\n\n\n#[test]\n#[cfg(all(feature = \"postgres\", feature = \"sqlite\"))]\nfn on_conflict_do_update_with_boxed_select() {\n use schema::posts::dsl::*;\n use schema::users::dsl::{id, name, users};\n\n let conn = connection_with_sean_and_tess_in_users_table();\n\n sql_query(\"CREATE UNIQUE INDEX index_on_title ON posts (title)\")\n .execute(&conn)\n .unwrap();\n\n users\n .select((id, name.concat(\" says hi\")))\n .into_boxed()\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict(title)\n .do_update()\n .set(body.eq(\"updated\"))\n .execute(&conn)\n .unwrap();\n\n insert_into(users)\n .values(name.eq(\"Ruby\"))\n .execute(&conn)\n .unwrap();\n\n users\n .select((id, name.concat(\" says hi\")))\n .into_boxed()\n .insert_into(posts)\n .into_columns((user_id, title))\n .on_conflict(title)\n .do_update()\n .set(body.eq(\"updated\"))\n .execute(&conn)\n .unwrap();\n\n let data = posts.select((title, body)).load(&conn).unwrap();\n let expected = vec![\n (String::from(\"Sean says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Tess says hi\"), Some(String::from(\"updated\"))),\n (String::from(\"Ruby says hi\"), None),\n ];\n assert_eq!(expected, data);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add test<commit_after>#[macro_use]\nextern crate generator;\n\nuse generator::Generator;\n\nunsafe fn f0() {\n let mut i = 0;\n println!(\"{}\", i);\n\n _yield_!();\n\n i = 100;\n println!(\"{}\", i);\n\n _yield_!();\n\n i = 1000;\n println!(\"{}\", i);\n}\n\n\nunsafe fn f1() -> u32 {\n let mut j = 0;\n let mut i:i32;\n while j < 10 {\n i = _yield!(j);\n println!(\"get send: {:?}\", i);\n j+=1;\n }\n\n return 10000;\n}\n\nunsafe fn f2() -> (u64, u64, u64)\n{\n let mut i = 0u64;\n while i < 10 {\n _yield_!((i, i+1, i+2));\n i+=1;\n }\n\n \/\/ the last return is not deal with carefully\n (0, 0, 0)\n}\n\n#[test]\nfn test_main() {\n let mut g = generator!(f0());\n\n g.next();\n g.next();\n g.next();\n\n let mut g = generator!(f1(), <i32>);\n let mut i = 0;\n while !g.is_done() {\n println!(\"get yield: {:?}\", g.send(i));\n i += 1;\n }\n\n let g = generator!(f2());\n for x in g {\n println!(\"get{:?}\", x);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rust: Problem 6<commit_after>\/\/\/ # Sum square difference\n\/\/\/ # Problem 6\n\/\/\/ The sum of the squares of the first ten natural numbers is\n\/\/\/ > 12 + 22 + ... + 102 = 385\n\/\/\/ The square of the sum of the first ten natural numbers is,\n\/\/\/ > (1 + 2 + ... + 10)2 = 552 = 3025\n\/\/\/ Hence the difference between the sum of the squares of the first ten natural numbers and the square of the sum is 3025 − 385 = 2640.\n\/\/\/ Find the difference between the sum of the squares of the first one hundred natural numbers and the square of the sum.\n\n\nuse std::num::pow;\nuse std::iter::AdditiveIterator;\n\nfn main() {\n let sum_squares = range(1, 101).map(|n| pow(n, 2)).sum();\n let squares_sum = pow(range(1, 101).sum(), 2);\n\n println!(\"{}\", squares_sum - sum_squares );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Add ID reporting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>newline spacing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Prioritize 'original' field in 'largest' methods<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update episode_count and episode_length definitions (#2)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add timer test for incrementing 1s<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>More progress<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>hello world for rust<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added Stack module<commit_after>\/*!\n Heterogeneous Stack\n \n No need for any code - Rust can already do that.\n\n# Example\n```rust\n let s = ();\n let s = (1u, s); \/\/append\n let s = ('c', s);\n let (ch, s) = s; \/\/consume\n let (num, s) = s;\n println!(\"Stack: {} {} {}\", ch, num, s);\n```\n*\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added soul count mechanic<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::collections::HashSet;\n\nuse build::{Build, Compiler};\n\n#[derive(Hash, Eq, PartialEq, Clone, Debug)]\npub struct Step<'a> {\n pub src: Source<'a>,\n pub target: &'a str,\n}\n\nmacro_rules! targets {\n ($m:ident) => {\n $m! {\n \/\/ Step representing building the stageN compiler. This is just the\n \/\/ compiler executable itself, not any of the support libraries\n (rustc, Rustc { stage: u32 }),\n\n \/\/ Steps for the two main cargo builds, one for the standard library\n \/\/ and one for the compiler itself. These are parameterized over the\n \/\/ stage output they're going to be placed in along with the\n \/\/ compiler which is producing the copy of libstd or librustc\n (libstd, Libstd { stage: u32, compiler: Compiler<'a> }),\n (librustc, Librustc { stage: u32, compiler: Compiler<'a> }),\n\n \/\/ Links the standard library\/librustc produced by the compiler\n \/\/ provided into the host's directory also provided.\n (libstd_link, LibstdLink {\n stage: u32,\n compiler: Compiler<'a>,\n host: &'a str\n }),\n (librustc_link, LibrustcLink {\n stage: u32,\n compiler: Compiler<'a>,\n host: &'a str\n }),\n\n \/\/ Steps for long-running native builds. Ideally these wouldn't\n \/\/ actually exist and would be part of build scripts, but for now\n \/\/ these are here.\n \/\/\n \/\/ There aren't really any parameters to this, but empty structs\n \/\/ with braces are unstable so we just pick something that works.\n (llvm, Llvm { _dummy: () }),\n (compiler_rt, CompilerRt { _dummy: () }),\n\n \/\/ Steps for various pieces of documentation that we can generate,\n \/\/ the 'doc' step is just a pseudo target to depend on a bunch of\n \/\/ others.\n (doc, Doc { stage: u32 }),\n (doc_book, DocBook { stage: u32 }),\n (doc_nomicon, DocNomicon { stage: u32 }),\n (doc_style, DocStyle { stage: u32 }),\n (doc_standalone, DocStandalone { stage: u32 }),\n (doc_std, DocStd { stage: u32 }),\n (doc_rustc, DocRustc { stage: u32 }),\n\n \/\/ Steps for running tests. The 'check' target is just a pseudo\n \/\/ target to depend on a bunch of others.\n (check, Check { stage: u32, compiler: Compiler<'a> }),\n }\n }\n}\n\nmacro_rules! item { ($a:item) => ($a) }\n\nmacro_rules! define_source {\n ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {\n item! {\n #[derive(Hash, Eq, PartialEq, Clone, Debug)]\n pub enum Source<'a> {\n $($name { $($args)* }),*\n }\n }\n }\n}\n\ntargets!(define_source);\n\npub fn all(build: &Build) -> Vec<Step> {\n let mut ret = Vec::new();\n let mut all = HashSet::new();\n for target in top_level(build) {\n fill(build, &target, &mut ret, &mut all);\n }\n return ret;\n\n fn fill<'a>(build: &'a Build,\n target: &Step<'a>,\n ret: &mut Vec<Step<'a>>,\n set: &mut HashSet<Step<'a>>) {\n if set.insert(target.clone()) {\n for dep in target.deps(build) {\n fill(build, &dep, ret, set);\n }\n ret.push(target.clone());\n }\n }\n}\n\nfn top_level(build: &Build) -> Vec<Step> {\n let mut targets = Vec::new();\n let stage = build.flags.stage.unwrap_or(2);\n\n let host = Step {\n src: Source::Llvm { _dummy: () },\n target: build.flags.host.iter().next()\n .unwrap_or(&build.config.build),\n };\n let target = Step {\n src: Source::Llvm { _dummy: () },\n target: build.flags.target.iter().next().map(|x| &x[..])\n .unwrap_or(host.target)\n };\n\n add_steps(build, stage, &host, &target, &mut targets);\n\n if targets.len() == 0 {\n let t = Step {\n src: Source::Llvm { _dummy: () },\n target: &build.config.build,\n };\n targets.push(t.doc(stage));\n for host in build.config.host.iter() {\n if !build.flags.host.contains(host) {\n continue\n }\n let host = t.target(host);\n if host.target == build.config.build {\n targets.push(host.librustc(stage, host.compiler(stage)));\n } else {\n targets.push(host.librustc_link(stage, t.compiler(stage),\n host.target));\n }\n for target in build.config.target.iter() {\n if !build.flags.target.contains(target) {\n continue\n }\n\n if host.target == build.config.build {\n targets.push(host.target(target)\n .libstd(stage, host.compiler(stage)));\n } else {\n targets.push(host.target(target)\n .libstd_link(stage, t.compiler(stage),\n host.target));\n }\n }\n }\n }\n\n return targets\n\n}\n\nfn add_steps<'a>(build: &'a Build,\n stage: u32,\n host: &Step<'a>,\n target: &Step<'a>,\n targets: &mut Vec<Step<'a>>) {\n for step in build.flags.step.iter() {\n let compiler = host.target(&build.config.build).compiler(stage);\n match &step[..] {\n \"libstd\" => targets.push(target.libstd(stage, compiler)),\n \"librustc\" => targets.push(target.librustc(stage, compiler)),\n \"libstd-link\" => targets.push(target.libstd_link(stage, compiler,\n host.target)),\n \"librustc-link\" => targets.push(target.librustc_link(stage, compiler,\n host.target)),\n \"rustc\" => targets.push(host.rustc(stage)),\n \"llvm\" => targets.push(target.llvm(())),\n \"compiler-rt\" => targets.push(target.compiler_rt(())),\n \"doc-style\" => targets.push(host.doc_style(stage)),\n \"doc-standalone\" => targets.push(host.doc_standalone(stage)),\n \"doc-nomicon\" => targets.push(host.doc_nomicon(stage)),\n \"doc-book\" => targets.push(host.doc_book(stage)),\n \"doc-std\" => targets.push(host.doc_std(stage)),\n \"doc-rustc\" => targets.push(host.doc_rustc(stage)),\n \"doc\" => targets.push(host.doc(stage)),\n \"check\" => targets.push(host.check(stage, compiler)),\n _ => panic!(\"unknown build target: `{}`\", step),\n }\n }\n}\n\nmacro_rules! constructors {\n ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(\n fn $short(&self, $($arg: $t),*) -> Step<'a> {\n Step {\n src: Source::$name { $($arg: $arg),* },\n target: self.target,\n }\n }\n )*}\n}\n\nimpl<'a> Step<'a> {\n fn compiler(&self, stage: u32) -> Compiler<'a> {\n Compiler::new(stage, self.target)\n }\n\n fn target(&self, target: &'a str) -> Step<'a> {\n Step { target: target, src: self.src.clone() }\n }\n\n targets!(constructors);\n\n pub fn deps(&self, build: &'a Build) -> Vec<Step<'a>> {\n match self.src {\n Source::Rustc { stage: 0 } => {\n Vec::new()\n }\n Source::Rustc { stage } => {\n let compiler = Compiler::new(stage - 1, &build.config.build);\n vec![self.librustc(stage - 1, compiler)]\n }\n Source::Librustc { stage, compiler } => {\n vec![self.libstd(stage, compiler), self.llvm(())]\n }\n Source::Libstd { stage: _, compiler } => {\n vec![self.compiler_rt(()),\n self.rustc(compiler.stage).target(compiler.host)]\n }\n Source::LibrustcLink { stage, compiler, host } => {\n vec![self.librustc(stage, compiler),\n self.libstd_link(stage, compiler, host)]\n }\n Source::LibstdLink { stage, compiler, host } => {\n vec![self.libstd(stage, compiler),\n self.target(host).rustc(stage)]\n }\n Source::CompilerRt { _dummy } => {\n vec![self.llvm(()).target(&build.config.build)]\n }\n Source::Llvm { _dummy } => Vec::new(),\n Source::DocStd { stage } => {\n vec![self.libstd(stage, self.compiler(stage))]\n }\n Source::DocBook { stage } |\n Source::DocNomicon { stage } |\n Source::DocStyle { stage } |\n Source::DocStandalone { stage } => {\n vec![self.rustc(stage)]\n }\n Source::DocRustc { stage } => {\n vec![self.doc_std(stage)]\n }\n Source::Doc { stage } => {\n vec![self.doc_book(stage), self.doc_nomicon(stage),\n self.doc_style(stage), self.doc_standalone(stage),\n self.doc_std(stage)]\n }\n Source::Check { stage, compiler: _ } => {\n vec![]\n }\n }\n }\n}\n<commit_msg>rustbuild: Refactor adding steps manually<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::collections::HashSet;\n\nuse build::{Build, Compiler};\n\n#[derive(Hash, Eq, PartialEq, Clone, Debug)]\npub struct Step<'a> {\n pub src: Source<'a>,\n pub target: &'a str,\n}\n\nmacro_rules! targets {\n ($m:ident) => {\n $m! {\n \/\/ Step representing building the stageN compiler. This is just the\n \/\/ compiler executable itself, not any of the support libraries\n (rustc, Rustc { stage: u32 }),\n\n \/\/ Steps for the two main cargo builds, one for the standard library\n \/\/ and one for the compiler itself. These are parameterized over the\n \/\/ stage output they're going to be placed in along with the\n \/\/ compiler which is producing the copy of libstd or librustc\n (libstd, Libstd { stage: u32, compiler: Compiler<'a> }),\n (librustc, Librustc { stage: u32, compiler: Compiler<'a> }),\n\n \/\/ Links the standard library\/librustc produced by the compiler\n \/\/ provided into the host's directory also provided.\n (libstd_link, LibstdLink {\n stage: u32,\n compiler: Compiler<'a>,\n host: &'a str\n }),\n (librustc_link, LibrustcLink {\n stage: u32,\n compiler: Compiler<'a>,\n host: &'a str\n }),\n\n \/\/ Steps for long-running native builds. Ideally these wouldn't\n \/\/ actually exist and would be part of build scripts, but for now\n \/\/ these are here.\n \/\/\n \/\/ There aren't really any parameters to this, but empty structs\n \/\/ with braces are unstable so we just pick something that works.\n (llvm, Llvm { _dummy: () }),\n (compiler_rt, CompilerRt { _dummy: () }),\n\n \/\/ Steps for various pieces of documentation that we can generate,\n \/\/ the 'doc' step is just a pseudo target to depend on a bunch of\n \/\/ others.\n (doc, Doc { stage: u32 }),\n (doc_book, DocBook { stage: u32 }),\n (doc_nomicon, DocNomicon { stage: u32 }),\n (doc_style, DocStyle { stage: u32 }),\n (doc_standalone, DocStandalone { stage: u32 }),\n (doc_std, DocStd { stage: u32 }),\n (doc_rustc, DocRustc { stage: u32 }),\n\n \/\/ Steps for running tests. The 'check' target is just a pseudo\n \/\/ target to depend on a bunch of others.\n (check, Check { stage: u32, compiler: Compiler<'a> }),\n }\n }\n}\n\nmacro_rules! item { ($a:item) => ($a) }\n\nmacro_rules! define_source {\n ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {\n item! {\n #[derive(Hash, Eq, PartialEq, Clone, Debug)]\n pub enum Source<'a> {\n $($name { $($args)* }),*\n }\n }\n }\n}\n\ntargets!(define_source);\n\npub fn all(build: &Build) -> Vec<Step> {\n let mut ret = Vec::new();\n let mut all = HashSet::new();\n for target in top_level(build) {\n fill(build, &target, &mut ret, &mut all);\n }\n return ret;\n\n fn fill<'a>(build: &'a Build,\n target: &Step<'a>,\n ret: &mut Vec<Step<'a>>,\n set: &mut HashSet<Step<'a>>) {\n if set.insert(target.clone()) {\n for dep in target.deps(build) {\n fill(build, &dep, ret, set);\n }\n ret.push(target.clone());\n }\n }\n}\n\nfn top_level(build: &Build) -> Vec<Step> {\n let mut targets = Vec::new();\n let stage = build.flags.stage.unwrap_or(2);\n\n let host = Step {\n src: Source::Llvm { _dummy: () },\n target: build.flags.host.iter().next()\n .unwrap_or(&build.config.build),\n };\n let target = Step {\n src: Source::Llvm { _dummy: () },\n target: build.flags.target.iter().next().map(|x| &x[..])\n .unwrap_or(host.target)\n };\n\n add_steps(build, stage, &host, &target, &mut targets);\n\n if targets.len() == 0 {\n let t = Step {\n src: Source::Llvm { _dummy: () },\n target: &build.config.build,\n };\n targets.push(t.doc(stage));\n for host in build.config.host.iter() {\n if !build.flags.host.contains(host) {\n continue\n }\n let host = t.target(host);\n if host.target == build.config.build {\n targets.push(host.librustc(stage, host.compiler(stage)));\n } else {\n targets.push(host.librustc_link(stage, t.compiler(stage),\n host.target));\n }\n for target in build.config.target.iter() {\n if !build.flags.target.contains(target) {\n continue\n }\n\n if host.target == build.config.build {\n targets.push(host.target(target)\n .libstd(stage, host.compiler(stage)));\n } else {\n targets.push(host.target(target)\n .libstd_link(stage, t.compiler(stage),\n host.target));\n }\n }\n }\n }\n\n return targets\n\n}\n\nfn add_steps<'a>(build: &'a Build,\n stage: u32,\n host: &Step<'a>,\n target: &Step<'a>,\n targets: &mut Vec<Step<'a>>) {\n struct Context<'a> {\n stage: u32,\n compiler: Compiler<'a>,\n _dummy: (),\n host: &'a str,\n }\n for step in build.flags.step.iter() {\n\n \/\/ The macro below insists on hygienic access to all local variables, so\n \/\/ we shove them all in a struct and subvert hygiene by accessing struct\n \/\/ fields instead,\n let cx = Context {\n stage: stage,\n compiler: host.target(&build.config.build).compiler(stage),\n _dummy: (),\n host: host.target,\n };\n macro_rules! add_step {\n ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => ({$(\n let name = stringify!($short).replace(\"_\", \"-\");\n if &step[..] == &name[..] {\n targets.push(target.$short($(cx.$arg),*));\n continue\n }\n drop(name);\n )*})\n }\n\n targets!(add_step);\n }\n}\n\nmacro_rules! constructors {\n ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(\n fn $short(&self, $($arg: $t),*) -> Step<'a> {\n Step {\n src: Source::$name { $($arg: $arg),* },\n target: self.target,\n }\n }\n )*}\n}\n\nimpl<'a> Step<'a> {\n fn compiler(&self, stage: u32) -> Compiler<'a> {\n Compiler::new(stage, self.target)\n }\n\n fn target(&self, target: &'a str) -> Step<'a> {\n Step { target: target, src: self.src.clone() }\n }\n\n targets!(constructors);\n\n pub fn deps(&self, build: &'a Build) -> Vec<Step<'a>> {\n match self.src {\n Source::Rustc { stage: 0 } => {\n Vec::new()\n }\n Source::Rustc { stage } => {\n let compiler = Compiler::new(stage - 1, &build.config.build);\n vec![self.librustc(stage - 1, compiler)]\n }\n Source::Librustc { stage, compiler } => {\n vec![self.libstd(stage, compiler), self.llvm(())]\n }\n Source::Libstd { stage: _, compiler } => {\n vec![self.compiler_rt(()),\n self.rustc(compiler.stage).target(compiler.host)]\n }\n Source::LibrustcLink { stage, compiler, host } => {\n vec![self.librustc(stage, compiler),\n self.libstd_link(stage, compiler, host)]\n }\n Source::LibstdLink { stage, compiler, host } => {\n vec![self.libstd(stage, compiler),\n self.target(host).rustc(stage)]\n }\n Source::CompilerRt { _dummy } => {\n vec![self.llvm(()).target(&build.config.build)]\n }\n Source::Llvm { _dummy } => Vec::new(),\n Source::DocStd { stage } => {\n vec![self.libstd(stage, self.compiler(stage))]\n }\n Source::DocBook { stage } |\n Source::DocNomicon { stage } |\n Source::DocStyle { stage } |\n Source::DocStandalone { stage } => {\n vec![self.rustc(stage)]\n }\n Source::DocRustc { stage } => {\n vec![self.doc_std(stage)]\n }\n Source::Doc { stage } => {\n vec![self.doc_book(stage), self.doc_nomicon(stage),\n self.doc_style(stage), self.doc_standalone(stage),\n self.doc_std(stage)]\n }\n Source::Check { stage, compiler: _ } => {\n vec![]\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Day3 not implemented<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Updated all tests to use the new test! macro<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add decode tests for block transfer operations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update lib.rs<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub use self::imp::OsRng;\n\nuse mem;\n\nfn next_u32(mut fill_buf: &mut FnMut(&mut [u8])) -> u32 {\n let mut buf: [u8; 4] = [0; 4];\n fill_buf(&mut buf);\n unsafe { mem::transmute::<[u8; 4], u32>(buf) }\n}\n\nfn next_u64(mut fill_buf: &mut FnMut(&mut [u8])) -> u64 {\n let mut buf: [u8; 8] = [0; 8];\n fill_buf(&mut buf);\n unsafe { mem::transmute::<[u8; 8], u64>(buf) }\n}\n\n#[cfg(all(unix,\n not(target_os = \"ios\"),\n not(target_os = \"openbsd\"),\n not(target_os = \"freebsd\"),\n not(target_os = \"fuchsia\")))]\nmod imp {\n use self::OsRngInner::*;\n use super::{next_u32, next_u64};\n\n use fs::File;\n use io;\n use libc;\n use rand::Rng;\n use rand::reader::ReaderRng;\n use sys::os::errno;\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\",\n target_arch = \"powerpc64\",\n target_arch = \"s390x\")))]\n fn getrandom(buf: &mut [u8]) -> libc::c_long {\n #[cfg(target_arch = \"x86_64\")]\n const NR_GETRANDOM: libc::c_long = 318;\n #[cfg(target_arch = \"x86\")]\n const NR_GETRANDOM: libc::c_long = 355;\n #[cfg(target_arch = \"arm\")]\n const NR_GETRANDOM: libc::c_long = 384;\n #[cfg(target_arch = \"s390x\")]\n const NR_GETRANDOM: libc::c_long = 349;\n #[cfg(any(target_arch = \"powerpc\", target_arch = \"powerpc64\"))]\n const NR_GETRANDOM: libc::c_long = 359;\n #[cfg(target_arch = \"aarch64\")]\n const NR_GETRANDOM: libc::c_long = 278;\n\n const GRND_NONBLOCK: libc::c_uint = 0x0001;\n\n unsafe {\n libc::syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), GRND_NONBLOCK)\n }\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\",\n target_arch = \"powerpc64\",\n target_arch = \"s390x\"))))]\n fn getrandom(_buf: &mut [u8]) -> libc::c_long { -1 }\n\n fn getrandom_fill_bytes(v: &mut [u8]) {\n let mut read = 0;\n while read < v.len() {\n let result = getrandom(&mut v[read..]);\n if result == -1 {\n let err = errno() as libc::c_int;\n if err == libc::EINTR {\n continue;\n } else if err == libc::EAGAIN {\n \/\/ if getrandom() returns EAGAIN it would have blocked\n \/\/ because the non-blocking pool (urandom) has not\n \/\/ initialized in the kernel yet due to a lack of entropy\n \/\/ the fallback we do here is to avoid blocking applications\n \/\/ which could depend on this call without ever knowing\n \/\/ they do and don't have a work around. The PRNG of\n \/\/ \/dev\/urandom will still be used but not over a completely\n \/\/ full entropy pool\n let reader = File::open(\"\/dev\/urandom\").expect(\"Unable to open \/dev\/urandom\");\n let mut reader_rng = ReaderRng::new(reader);\n reader_rng.fill_bytes(&mut v[read..]);\n read += v.len();\n } else {\n panic!(\"unexpected getrandom error: {}\", err);\n }\n } else {\n read += result as usize;\n }\n }\n }\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\",\n target_arch = \"powerpc64\",\n target_arch = \"s390x\")))]\n fn is_getrandom_available() -> bool {\n use sync::atomic::{AtomicBool, Ordering};\n use sync::Once;\n\n static CHECKER: Once = Once::new();\n static AVAILABLE: AtomicBool = AtomicBool::new(false);\n\n CHECKER.call_once(|| {\n let mut buf: [u8; 0] = [];\n let result = getrandom(&mut buf);\n let available = if result == -1 {\n let err = io::Error::last_os_error().raw_os_error();\n err != Some(libc::ENOSYS)\n } else {\n true\n };\n AVAILABLE.store(available, Ordering::Relaxed);\n });\n\n AVAILABLE.load(Ordering::Relaxed)\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\",\n target_arch = \"powerpc64\",\n target_arch = \"s390x\"))))]\n fn is_getrandom_available() -> bool { false }\n\n pub struct OsRng {\n inner: OsRngInner,\n }\n\n enum OsRngInner {\n OsGetrandomRng,\n OsReaderRng(ReaderRng<File>),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n if is_getrandom_available() {\n return Ok(OsRng { inner: OsGetrandomRng });\n }\n\n let reader = File::open(\"\/dev\/urandom\")?;\n let reader_rng = ReaderRng::new(reader);\n\n Ok(OsRng { inner: OsReaderRng(reader_rng) })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n match self.inner {\n OsGetrandomRng => next_u32(&mut getrandom_fill_bytes),\n OsReaderRng(ref mut rng) => rng.next_u32(),\n }\n }\n fn next_u64(&mut self) -> u64 {\n match self.inner {\n OsGetrandomRng => next_u64(&mut getrandom_fill_bytes),\n OsReaderRng(ref mut rng) => rng.next_u64(),\n }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n match self.inner {\n OsGetrandomRng => getrandom_fill_bytes(v),\n OsReaderRng(ref mut rng) => rng.fill_bytes(v)\n }\n }\n }\n}\n\n#[cfg(target_os = \"openbsd\")]\nmod imp {\n use super::{next_u32, next_u64};\n\n use io;\n use libc;\n use sys::os::errno;\n use rand::Rng;\n\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n next_u32(&mut |v| self.fill_bytes(v))\n }\n fn next_u64(&mut self) -> u64 {\n next_u64(&mut |v| self.fill_bytes(v))\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n \/\/ getentropy(2) permits a maximum buffer size of 256 bytes\n for s in v.chunks_mut(256) {\n let ret = unsafe {\n libc::getentropy(s.as_mut_ptr() as *mut libc::c_void, s.len())\n };\n if ret == -1 {\n panic!(\"unexpected getentropy error: {}\", errno());\n }\n }\n }\n }\n}\n\n#[cfg(target_os = \"ios\")]\nmod imp {\n use super::{next_u32, next_u64};\n\n use io;\n use ptr;\n use rand::Rng;\n use libc::{c_int, size_t};\n\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n enum SecRandom {}\n\n #[allow(non_upper_case_globals)]\n const kSecRandomDefault: *const SecRandom = ptr::null();\n\n #[link(name = \"Security\", kind = \"framework\")]\n #[cfg(not(cargobuild))]\n extern {}\n\n extern {\n fn SecRandomCopyBytes(rnd: *const SecRandom,\n count: size_t, bytes: *mut u8) -> c_int;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n next_u32(&mut |v| self.fill_bytes(v))\n }\n fn next_u64(&mut self) -> u64 {\n next_u64(&mut |v| self.fill_bytes(v))\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n SecRandomCopyBytes(kSecRandomDefault, v.len(),\n v.as_mut_ptr())\n };\n if ret == -1 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(target_os = \"freebsd\")]\nmod imp {\n use super::{next_u32, next_u64};\n\n use io;\n use libc;\n use rand::Rng;\n use ptr;\n\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n next_u32(&mut |v| self.fill_bytes(v))\n }\n fn next_u64(&mut self) -> u64 {\n next_u64(&mut |v| self.fill_bytes(v))\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let mib = [libc::CTL_KERN, libc::KERN_ARND];\n \/\/ kern.arandom permits a maximum buffer size of 256 bytes\n for s in v.chunks_mut(256) {\n let mut s_len = s.len();\n let ret = unsafe {\n libc::sysctl(mib.as_ptr(), mib.len() as libc::c_uint,\n s.as_mut_ptr() as *mut _, &mut s_len,\n ptr::null(), 0)\n };\n if ret == -1 || s_len != s.len() {\n panic!(\"kern.arandom sysctl failed! (returned {}, s.len() {}, oldlenp {})\",\n ret, s.len(), s_len);\n }\n }\n }\n }\n}\n\n#[cfg(target_os = \"fuchsia\")]\nmod imp {\n use super::{next_u32, next_u64};\n\n use io;\n use rand::Rng;\n\n #[link(name = \"magenta\")]\n extern {\n fn mx_cprng_draw(buffer: *mut u8, len: usize) -> isize;\n }\n\n fn getrandom(buf: &mut [u8]) -> isize {\n unsafe { mx_cprng_draw(buf.as_mut_ptr(), buf.len()) }\n }\n\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n next_u32(&mut |v| self.fill_bytes(v))\n }\n fn next_u64(&mut self) -> u64 {\n next_u64(&mut |v| self.fill_bytes(v))\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let mut buf = v;\n while !buf.is_empty() {\n let ret = getrandom(buf);\n if ret < 0 {\n panic!(\"kernel mx_cprng_draw call failed! (returned {}, buf.len() {})\",\n ret, buf.len());\n }\n let move_buf = buf;\n buf = &mut move_buf[(ret as usize)..];\n }\n }\n }\n}\n<commit_msg>std: Track change to cprng syscall signature (Fuchsia)<commit_after>\/\/ Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub use self::imp::OsRng;\n\nuse mem;\n\nfn next_u32(mut fill_buf: &mut FnMut(&mut [u8])) -> u32 {\n let mut buf: [u8; 4] = [0; 4];\n fill_buf(&mut buf);\n unsafe { mem::transmute::<[u8; 4], u32>(buf) }\n}\n\nfn next_u64(mut fill_buf: &mut FnMut(&mut [u8])) -> u64 {\n let mut buf: [u8; 8] = [0; 8];\n fill_buf(&mut buf);\n unsafe { mem::transmute::<[u8; 8], u64>(buf) }\n}\n\n#[cfg(all(unix,\n not(target_os = \"ios\"),\n not(target_os = \"openbsd\"),\n not(target_os = \"freebsd\"),\n not(target_os = \"fuchsia\")))]\nmod imp {\n use self::OsRngInner::*;\n use super::{next_u32, next_u64};\n\n use fs::File;\n use io;\n use libc;\n use rand::Rng;\n use rand::reader::ReaderRng;\n use sys::os::errno;\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\",\n target_arch = \"powerpc64\",\n target_arch = \"s390x\")))]\n fn getrandom(buf: &mut [u8]) -> libc::c_long {\n #[cfg(target_arch = \"x86_64\")]\n const NR_GETRANDOM: libc::c_long = 318;\n #[cfg(target_arch = \"x86\")]\n const NR_GETRANDOM: libc::c_long = 355;\n #[cfg(target_arch = \"arm\")]\n const NR_GETRANDOM: libc::c_long = 384;\n #[cfg(target_arch = \"s390x\")]\n const NR_GETRANDOM: libc::c_long = 349;\n #[cfg(any(target_arch = \"powerpc\", target_arch = \"powerpc64\"))]\n const NR_GETRANDOM: libc::c_long = 359;\n #[cfg(target_arch = \"aarch64\")]\n const NR_GETRANDOM: libc::c_long = 278;\n\n const GRND_NONBLOCK: libc::c_uint = 0x0001;\n\n unsafe {\n libc::syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), GRND_NONBLOCK)\n }\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\",\n target_arch = \"powerpc64\",\n target_arch = \"s390x\"))))]\n fn getrandom(_buf: &mut [u8]) -> libc::c_long { -1 }\n\n fn getrandom_fill_bytes(v: &mut [u8]) {\n let mut read = 0;\n while read < v.len() {\n let result = getrandom(&mut v[read..]);\n if result == -1 {\n let err = errno() as libc::c_int;\n if err == libc::EINTR {\n continue;\n } else if err == libc::EAGAIN {\n \/\/ if getrandom() returns EAGAIN it would have blocked\n \/\/ because the non-blocking pool (urandom) has not\n \/\/ initialized in the kernel yet due to a lack of entropy\n \/\/ the fallback we do here is to avoid blocking applications\n \/\/ which could depend on this call without ever knowing\n \/\/ they do and don't have a work around. The PRNG of\n \/\/ \/dev\/urandom will still be used but not over a completely\n \/\/ full entropy pool\n let reader = File::open(\"\/dev\/urandom\").expect(\"Unable to open \/dev\/urandom\");\n let mut reader_rng = ReaderRng::new(reader);\n reader_rng.fill_bytes(&mut v[read..]);\n read += v.len();\n } else {\n panic!(\"unexpected getrandom error: {}\", err);\n }\n } else {\n read += result as usize;\n }\n }\n }\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\",\n target_arch = \"powerpc64\",\n target_arch = \"s390x\")))]\n fn is_getrandom_available() -> bool {\n use sync::atomic::{AtomicBool, Ordering};\n use sync::Once;\n\n static CHECKER: Once = Once::new();\n static AVAILABLE: AtomicBool = AtomicBool::new(false);\n\n CHECKER.call_once(|| {\n let mut buf: [u8; 0] = [];\n let result = getrandom(&mut buf);\n let available = if result == -1 {\n let err = io::Error::last_os_error().raw_os_error();\n err != Some(libc::ENOSYS)\n } else {\n true\n };\n AVAILABLE.store(available, Ordering::Relaxed);\n });\n\n AVAILABLE.load(Ordering::Relaxed)\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\",\n target_arch = \"powerpc64\",\n target_arch = \"s390x\"))))]\n fn is_getrandom_available() -> bool { false }\n\n pub struct OsRng {\n inner: OsRngInner,\n }\n\n enum OsRngInner {\n OsGetrandomRng,\n OsReaderRng(ReaderRng<File>),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n if is_getrandom_available() {\n return Ok(OsRng { inner: OsGetrandomRng });\n }\n\n let reader = File::open(\"\/dev\/urandom\")?;\n let reader_rng = ReaderRng::new(reader);\n\n Ok(OsRng { inner: OsReaderRng(reader_rng) })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n match self.inner {\n OsGetrandomRng => next_u32(&mut getrandom_fill_bytes),\n OsReaderRng(ref mut rng) => rng.next_u32(),\n }\n }\n fn next_u64(&mut self) -> u64 {\n match self.inner {\n OsGetrandomRng => next_u64(&mut getrandom_fill_bytes),\n OsReaderRng(ref mut rng) => rng.next_u64(),\n }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n match self.inner {\n OsGetrandomRng => getrandom_fill_bytes(v),\n OsReaderRng(ref mut rng) => rng.fill_bytes(v)\n }\n }\n }\n}\n\n#[cfg(target_os = \"openbsd\")]\nmod imp {\n use super::{next_u32, next_u64};\n\n use io;\n use libc;\n use sys::os::errno;\n use rand::Rng;\n\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n next_u32(&mut |v| self.fill_bytes(v))\n }\n fn next_u64(&mut self) -> u64 {\n next_u64(&mut |v| self.fill_bytes(v))\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n \/\/ getentropy(2) permits a maximum buffer size of 256 bytes\n for s in v.chunks_mut(256) {\n let ret = unsafe {\n libc::getentropy(s.as_mut_ptr() as *mut libc::c_void, s.len())\n };\n if ret == -1 {\n panic!(\"unexpected getentropy error: {}\", errno());\n }\n }\n }\n }\n}\n\n#[cfg(target_os = \"ios\")]\nmod imp {\n use super::{next_u32, next_u64};\n\n use io;\n use ptr;\n use rand::Rng;\n use libc::{c_int, size_t};\n\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n enum SecRandom {}\n\n #[allow(non_upper_case_globals)]\n const kSecRandomDefault: *const SecRandom = ptr::null();\n\n #[link(name = \"Security\", kind = \"framework\")]\n #[cfg(not(cargobuild))]\n extern {}\n\n extern {\n fn SecRandomCopyBytes(rnd: *const SecRandom,\n count: size_t, bytes: *mut u8) -> c_int;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n next_u32(&mut |v| self.fill_bytes(v))\n }\n fn next_u64(&mut self) -> u64 {\n next_u64(&mut |v| self.fill_bytes(v))\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n SecRandomCopyBytes(kSecRandomDefault, v.len(),\n v.as_mut_ptr())\n };\n if ret == -1 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(target_os = \"freebsd\")]\nmod imp {\n use super::{next_u32, next_u64};\n\n use io;\n use libc;\n use rand::Rng;\n use ptr;\n\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n next_u32(&mut |v| self.fill_bytes(v))\n }\n fn next_u64(&mut self) -> u64 {\n next_u64(&mut |v| self.fill_bytes(v))\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let mib = [libc::CTL_KERN, libc::KERN_ARND];\n \/\/ kern.arandom permits a maximum buffer size of 256 bytes\n for s in v.chunks_mut(256) {\n let mut s_len = s.len();\n let ret = unsafe {\n libc::sysctl(mib.as_ptr(), mib.len() as libc::c_uint,\n s.as_mut_ptr() as *mut _, &mut s_len,\n ptr::null(), 0)\n };\n if ret == -1 || s_len != s.len() {\n panic!(\"kern.arandom sysctl failed! (returned {}, s.len() {}, oldlenp {})\",\n ret, s.len(), s_len);\n }\n }\n }\n }\n}\n\n#[cfg(target_os = \"fuchsia\")]\nmod imp {\n use super::{next_u32, next_u64};\n\n use io;\n use rand::Rng;\n\n #[link(name = \"magenta\")]\n extern {\n fn mx_cprng_draw(buffer: *mut u8, len: usize, actual: *mut usize) -> i32;\n }\n\n fn getrandom(buf: &mut [u8]) -> Result<usize, i32> {\n unsafe {\n let mut actual = 0;\n let status = mx_cprng_draw(buf.as_mut_ptr(), buf.len(), &mut actual);\n if status == 0 {\n Ok(actual)\n } else {\n Err(status)\n }\n }\n }\n\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n next_u32(&mut |v| self.fill_bytes(v))\n }\n fn next_u64(&mut self) -> u64 {\n next_u64(&mut |v| self.fill_bytes(v))\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let mut buf = v;\n while !buf.is_empty() {\n let ret = getrandom(buf);\n match ret {\n Err(err) => {\n panic!(\"kernel mx_cprng_draw call failed! (returned {}, buf.len() {})\",\n err, buf.len())\n }\n Ok(actual) => {\n let move_buf = buf;\n buf = &mut move_buf[(actual as usize)..];\n }\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adjust crate attributes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Allow a key per listener.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>The persister only needs to care about the bodies.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update codegen for latest nightly.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #33169 - swgillespie:issue32829, r=eddyb<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n#![feature(const_fn)]\n\nconst bad : u32 = {\n {\n 5; \/\/~ ERROR: blocks in constants are limited to items and tail expressions\n 0\n }\n};\n\nconst bad_two : u32 = {\n {\n invalid();\n \/\/~^ ERROR: blocks in constants are limited to items and tail expressions\n \/\/~^^ ERROR: calls in constants are limited to constant functions, struct and enum\n 0\n }\n};\n\nconst bad_three : u32 = {\n {\n valid();\n \/\/~^ ERROR: blocks in constants are limited to items and tail expressions\n 0\n }\n};\n\nstatic bad_four : u32 = {\n {\n 5; \/\/~ ERROR: blocks in statics are limited to items and tail expressions\n 0\n }\n};\n\nstatic bad_five : u32 = {\n {\n invalid();\n \/\/~^ ERROR: blocks in statics are limited to items and tail expressions\n \/\/~^^ ERROR: calls in statics are limited to constant functions, struct and enum\n 0\n }\n};\n\nstatic bad_six : u32 = {\n {\n valid();\n \/\/~^ ERROR: blocks in statics are limited to items and tail expressions\n 0\n }\n};\n\nstatic mut bad_seven : u32 = {\n {\n 5; \/\/~ ERROR: blocks in statics are limited to items and tail expressions\n 0\n }\n};\n\nstatic mut bad_eight : u32 = {\n {\n invalid();\n \/\/~^ ERROR: blocks in statics are limited to items and tail expressions\n \/\/~^^ ERROR: calls in statics are limited to constant functions, struct and enum\n 0\n }\n};\n\nstatic mut bad_nine : u32 = {\n {\n valid();\n \/\/~^ ERROR: blocks in statics are limited to items and tail expressions\n 0\n }\n};\n\n\nfn invalid() {}\nconst fn valid() {}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>extern crate find_folder;\nextern crate gfx_device_gl;\n\nuse std::path::PathBuf;\nuse piston_window::{PistonWindow, Texture, Flip, TextureSettings};\n\npub struct Manager {\n assets_path: PathBuf,\n}\n\nimpl Manager {\n pub fn new() -> Self {\n Manager { assets_path: find_folder::Search::KidsThenParents(3, 5).for_folder(\"assets\").unwrap() }\n }\n\n pub fn get_asset(&self, name: &str) -> PathBuf {\n self.assets_path.join(name)\n }\n\n pub fn get_font(&self) -> PathBuf {\n self.get_asset(\"NotoSans-Regular.ttf\")\n }\n\n pub fn load_texture(&self, window: &PistonWindow) -> Texture<gfx_device_gl::Resources> {\n Texture::from_path(&mut *window.factory.borrow_mut(), self.get_asset(\"skeleton.png\"), Flip::None, &TextureSettings::new()).unwrap()\n }\n}\n\n#[test]\nfn new() {\n let manager = Manager::new();\n assert!(!manager.assets_path.to_string_lossy().is_empty());\n assert!(manager.assets_path.is_absolute());\n assert!(manager.assets_path.exists());\n assert!(manager.assets_path.is_dir());\n}\n\n#[test]\nfn get_asset() {\n let manager = Manager::new();\n let asset_path = manager.get_asset(\"test\");\n assert!(asset_path.to_string_lossy().len() > manager.assets_path.to_string_lossy().len());\n}\n\n#[test]\nfn get_font() {\n let manager = Manager::new();\n let font_path = manager.get_font();\n assert!(font_path.is_absolute());\n assert!(font_path.is_file());\n assert_eq!(\"ttf\", font_path.extension().unwrap());\n}\n<commit_msg>Format<commit_after>extern crate find_folder;\nextern crate gfx_device_gl;\n\nuse std::path::PathBuf;\nuse piston_window::{PistonWindow, Texture, Flip, TextureSettings};\n\npub struct Manager {\n assets_path: PathBuf,\n}\n\nimpl Manager {\n pub fn new() -> Self {\n Manager { assets_path: find_folder::Search::KidsThenParents(3, 5).for_folder(\"assets\").unwrap() }\n }\n\n pub fn get_asset(&self, name: &str) -> PathBuf {\n self.assets_path.join(name)\n }\n\n pub fn get_font(&self) -> PathBuf {\n self.get_asset(\"NotoSans-Regular.ttf\")\n }\n\n pub fn load_texture(&self, window: &PistonWindow) -> Texture<gfx_device_gl::Resources> {\n Texture::from_path(&mut *window.factory.borrow_mut(),\n self.get_asset(\"skeleton.png\"),\n Flip::None,\n &TextureSettings::new())\n .unwrap()\n }\n}\n\n#[test]\nfn new() {\n let manager = Manager::new();\n assert!(!manager.assets_path.to_string_lossy().is_empty());\n assert!(manager.assets_path.is_absolute());\n assert!(manager.assets_path.exists());\n assert!(manager.assets_path.is_dir());\n}\n\n#[test]\nfn get_asset() {\n let manager = Manager::new();\n let asset_path = manager.get_asset(\"test\");\n assert!(asset_path.to_string_lossy().len() > manager.assets_path.to_string_lossy().len());\n}\n\n#[test]\nfn get_font() {\n let manager = Manager::new();\n let font_path = manager.get_font();\n assert!(font_path.is_absolute());\n assert!(font_path.is_file());\n assert_eq!(\"ttf\", font_path.extension().unwrap());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fixup! Factor out the \"naive\" implementation functionality.<commit_after>macro_rules! special_cases {\n ($($icount: expr, $in_: ident $iwidth: expr,\n $ocount: expr, $out: ident $owidth: expr,\n $instr: expr, $promote: ident);*;) => {{\n let mut map = ::std::collections::HashMap::new();\n $(\n map.insert((::ty::Type::new(stringify!($in_), $iwidth, $icount),\n ::ty::Type::new(stringify!($out), $owidth, $ocount)),\n ($instr, ::src::Promotion::$promote));\n )*\n map\n }}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Rust implememtation<commit_after>\/\/ https:\/\/www.hackerrank.com\/challenges\/angry-professor\/problem\n\n\/*\n * Implementation questions:\n *\n * 1. Why do I need to use `&t` in: `filter(|&t| t <= 0)`?\n *\/\n\nuse std::io;\nuse std::fmt;\nuse std::convert::TryFrom;\n\nstruct Case {\n min_students_on_time: i32,\n arrival_times: Vec<i32>,\n}\n\nenum Answer {\n YES,\n NO,\n}\n\nimpl fmt::Display for Answer {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Answer::YES => write!(f, \"YES\"),\n Answer::NO => write!(f, \"NO\"),\n }\n }\n}\n\nfn main() {\n let cases = to_int(&read_line());\n for _ in 0..cases {\n let case = read_case();\n println!(\"{}\", is_class_cancelled(case));\n }\n}\n\nfn read_line() -> String {\n let mut buffer = String::new();\n io::stdin()\n .read_line(&mut buffer)\n .expect(\"Failed to read line\");\n return buffer.trim().to_owned();\n}\n\nfn to_int(str: &str) -> i32 {\n return str.parse::<i32>().unwrap();\n}\n\nfn read_case_first_line() -> (i32, i32) {\n let line = &read_line();\n let list: Vec<&str> = line.split(\" \").collect();\n let students = to_int(list[0]);\n let min_students_on_time = to_int(list[1]);\n return (students, min_students_on_time);\n}\n\nfn read_case_second_line() -> Vec<i32> {\n let line = &read_line();\n let list: Vec<&str> = line.split(\" \").collect();\n return list.into_iter().map(|x| to_int(x)).collect();\n}\n\nfn read_case() -> Case {\n let (students, min_students_on_time) = read_case_first_line();\n let arrival_times = read_case_second_line();\n let arrivals_size = i32::try_from(arrival_times.len()).unwrap();\n assert_eq!(students, arrivals_size, \"arrival_times size is not correct\");\n return Case {\n min_students_on_time: min_students_on_time,\n arrival_times: arrival_times,\n };\n}\n\nfn is_class_cancelled(case: Case) -> Answer {\n let students_on_time = case.arrival_times.into_iter().filter(|&t| t <= 0).count();\n if i32::try_from(students_on_time).unwrap() >= case.min_students_on_time {\n return Answer::NO;\n }\n return Answer::YES;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for icompr!()<commit_after>#![feature(phase)]\n\n#[phase(link, plugin)]\n\nextern crate itertools;\n\nuse std::iter::AdditiveIterator;\n\n#[test]\nfn icompr() {\n let log = \"GET \/ 4096\\nGET \/home\/ 16301\\nPOST \/home\/ 49\\nGET \/ 4096\\n\";\n let lines = log.lines();\n let rows = icompr!(line.words() for line in lines);\n let ngets = icompr!(1i for mut row in rows if row.next() == Some(\"GET\")).sum();\n assert_eq!(ngets, 3i);\n}\n<|endoftext|>"} {"text":"<commit_before>generate_error_module!(\n generate_error_types!(GitHookError, GitHookErrorKind,\n ConfigError => \"Configuration Error\",\n ConfigTypeError => \"Configuration value type wrong\",\n RepositoryError => \"Error while interacting with git repository\",\n RepositoryBranchError => \"Error while interacting with git branch(es)\",\n RepositoryBranchNameFetchingError => \"Error while fetching branch name\",\n RepositorySignatureFetchingError => \"Error while fetching Authors\/Committers signature\",\n RepositoryIndexFetchingError => \"Error while fetching Repository Index\",\n RepositoryPathAddingError => \"Error while adding Path to Index\",\n RepositoryTreeWritingError => \"Error while writing repository tree\",\n RepositoryTreeFindingError => \"Error while finding repository tree\",\n RepositoryCommitFindingError => \"Error while finding commit\",\n RepositoryCommittingError => \"Error while committing\",\n HeadFetchError => \"Error while getting HEAD\",\n NotOnBranch => \"No Branch is checked out\",\n MkRepo => \"Repository creation error\",\n MkSignature => \"Error while building Signature object\"\n );\n);\n\npub use self::error::GitHookError;\npub use self::error::GitHookErrorKind;\npub use self::error::MapErrInto;\n\n<commit_msg>Add GitHookError::inside_if()<commit_after>use libimagstore::hook::error::HookError as HE;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\n\ngenerate_error_module!(\n generate_error_types!(GitHookError, GitHookErrorKind,\n ConfigError => \"Configuration Error\",\n ConfigTypeError => \"Configuration value type wrong\",\n RepositoryError => \"Error while interacting with git repository\",\n RepositoryBranchError => \"Error while interacting with git branch(es)\",\n RepositoryBranchNameFetchingError => \"Error while fetching branch name\",\n RepositorySignatureFetchingError => \"Error while fetching Authors\/Committers signature\",\n RepositoryIndexFetchingError => \"Error while fetching Repository Index\",\n RepositoryPathAddingError => \"Error while adding Path to Index\",\n RepositoryTreeWritingError => \"Error while writing repository tree\",\n RepositoryTreeFindingError => \"Error while finding repository tree\",\n RepositoryCommitFindingError => \"Error while finding commit\",\n RepositoryCommittingError => \"Error while committing\",\n HeadFetchError => \"Error while getting HEAD\",\n NotOnBranch => \"No Branch is checked out\",\n MkRepo => \"Repository creation error\",\n MkSignature => \"Error while building Signature object\"\n );\n);\n\nimpl GitHookError {\n\n pub fn inside_of<T>(self, h: HEK) -> HookResult<T> {\n Err(HE::new(h, Some(Box::new(self))))\n }\n\n}\n\npub use self::error::GitHookError;\npub use self::error::GitHookErrorKind;\npub use self::error::MapErrInto;\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Apply clippy suggestion<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::env;\nuse std::process::Command;\nuse std::path::{Path, PathBuf};\nuse std::fs::File;\nuse std::io::Write;\n\nstruct Test {\n repo: &'static str,\n name: &'static str,\n sha: &'static str,\n lock: Option<&'static str>,\n}\n\nconst TEST_REPOS: &'static [Test] = &[\n Test {\n name: \"iron\",\n repo: \"https:\/\/github.com\/iron\/iron\",\n sha: \"21c7dae29c3c214c08533c2a55ac649b418f2fe3\",\n lock: Some(include_str!(\"lockfiles\/iron-Cargo.lock\")),\n },\n Test {\n name: \"ripgrep\",\n repo: \"https:\/\/github.com\/BurntSushi\/ripgrep\",\n sha: \"b65bb37b14655e1a89c7cd19c8b011ef3e312791\",\n lock: None,\n },\n Test {\n name: \"tokei\",\n repo: \"https:\/\/github.com\/Aaronepower\/tokei\",\n sha: \"5e11c4852fe4aa086b0e4fe5885822fbe57ba928\",\n lock: None,\n },\n Test {\n name: \"treeify\",\n repo: \"https:\/\/github.com\/dzamlo\/treeify\",\n sha: \"999001b223152441198f117a68fb81f57bc086dd\",\n lock: None,\n },\n Test {\n name: \"xsv\",\n repo: \"https:\/\/github.com\/BurntSushi\/xsv\",\n sha: \"4b308adbe48ac81657fd124b90b44f7c3263f771\",\n lock: None,\n },\n];\n\nfn main() {\n let args = env::args().collect::<Vec<_>>();\n let ref cargo = args[1];\n let out_dir = Path::new(&args[2]);\n let ref cargo = Path::new(cargo);\n\n for test in TEST_REPOS.iter().rev() {\n test_repo(cargo, out_dir, test);\n }\n}\n\nfn test_repo(cargo: &Path, out_dir: &Path, test: &Test) {\n println!(\"testing {}\", test.repo);\n let dir = clone_repo(test, out_dir);\n if let Some(lockfile) = test.lock {\n File::create(&dir.join(\"Cargo.lock\"))\n .expect(\"\")\n .write_all(lockfile.as_bytes())\n .expect(\"\");\n }\n if !run_cargo_test(cargo, &dir) {\n panic!(\"tests failed for {}\", test.repo);\n }\n}\n\nfn clone_repo(test: &Test, out_dir: &Path) -> PathBuf {\n let out_dir = out_dir.join(test.name);\n\n if !out_dir.join(\".git\").is_dir() {\n let status = Command::new(\"git\")\n .arg(\"init\")\n .arg(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n \/\/ Try progressively deeper fetch depths to find the commit\n let mut found = false;\n for depth in &[0, 1, 10, 100, 1000, 100000] {\n if *depth > 0 {\n let status = Command::new(\"git\")\n .arg(\"fetch\")\n .arg(test.repo)\n .arg(\"master\")\n .arg(&format!(\"--depth={}\", depth))\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n let status = Command::new(\"git\")\n .arg(\"reset\")\n .arg(test.sha)\n .arg(\"--hard\")\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n\n if status.success() {\n found = true;\n break;\n }\n }\n\n if !found {\n panic!(\"unable to find commit {}\", test.sha)\n }\n let status = Command::new(\"git\")\n .arg(\"clean\")\n .arg(\"-fdx\")\n .current_dir(&out_dir)\n .status()\n .unwrap();\n assert!(status.success());\n\n out_dir\n}\n\nfn run_cargo_test(cargo_path: &Path, crate_path: &Path) -> bool {\n let status = Command::new(cargo_path)\n .arg(\"test\")\n \/\/ Disable rust-lang\/cargo's cross-compile tests\n .env(\"CFG_DISABLE_CROSS_TESTS\", \"1\")\n .current_dir(crate_path)\n .status()\n .expect(\"\");\n\n status.success()\n}\n<commit_msg>Extend cargotest to specify packages to test (within a Cargo workspace).<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::env;\nuse std::process::Command;\nuse std::path::{Path, PathBuf};\nuse std::fs::File;\nuse std::io::Write;\n\nstruct Test {\n repo: &'static str,\n name: &'static str,\n sha: &'static str,\n lock: Option<&'static str>,\n packages: &'static [&'static str],\n}\n\nconst TEST_REPOS: &'static [Test] = &[\n Test {\n name: \"iron\",\n repo: \"https:\/\/github.com\/iron\/iron\",\n sha: \"21c7dae29c3c214c08533c2a55ac649b418f2fe3\",\n lock: Some(include_str!(\"lockfiles\/iron-Cargo.lock\")),\n packages: &[],\n },\n Test {\n name: \"ripgrep\",\n repo: \"https:\/\/github.com\/BurntSushi\/ripgrep\",\n sha: \"b65bb37b14655e1a89c7cd19c8b011ef3e312791\",\n lock: None,\n packages: &[],\n },\n Test {\n name: \"tokei\",\n repo: \"https:\/\/github.com\/Aaronepower\/tokei\",\n sha: \"5e11c4852fe4aa086b0e4fe5885822fbe57ba928\",\n lock: None,\n packages: &[],\n },\n Test {\n name: \"treeify\",\n repo: \"https:\/\/github.com\/dzamlo\/treeify\",\n sha: \"999001b223152441198f117a68fb81f57bc086dd\",\n lock: None,\n packages: &[],\n },\n Test {\n name: \"xsv\",\n repo: \"https:\/\/github.com\/BurntSushi\/xsv\",\n sha: \"4b308adbe48ac81657fd124b90b44f7c3263f771\",\n lock: None,\n packages: &[],\n },\n];\n\nfn main() {\n let args = env::args().collect::<Vec<_>>();\n let ref cargo = args[1];\n let out_dir = Path::new(&args[2]);\n let ref cargo = Path::new(cargo);\n\n for test in TEST_REPOS.iter().rev() {\n test_repo(cargo, out_dir, test);\n }\n}\n\nfn test_repo(cargo: &Path, out_dir: &Path, test: &Test) {\n println!(\"testing {}\", test.repo);\n let dir = clone_repo(test, out_dir);\n if let Some(lockfile) = test.lock {\n File::create(&dir.join(\"Cargo.lock\"))\n .expect(\"\")\n .write_all(lockfile.as_bytes())\n .expect(\"\");\n }\n if !run_cargo_test(cargo, &dir, test.packages) {\n panic!(\"tests failed for {}\", test.repo);\n }\n}\n\nfn clone_repo(test: &Test, out_dir: &Path) -> PathBuf {\n let out_dir = out_dir.join(test.name);\n\n if !out_dir.join(\".git\").is_dir() {\n let status = Command::new(\"git\")\n .arg(\"init\")\n .arg(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n \/\/ Try progressively deeper fetch depths to find the commit\n let mut found = false;\n for depth in &[0, 1, 10, 100, 1000, 100000] {\n if *depth > 0 {\n let status = Command::new(\"git\")\n .arg(\"fetch\")\n .arg(test.repo)\n .arg(\"master\")\n .arg(&format!(\"--depth={}\", depth))\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n let status = Command::new(\"git\")\n .arg(\"reset\")\n .arg(test.sha)\n .arg(\"--hard\")\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n\n if status.success() {\n found = true;\n break;\n }\n }\n\n if !found {\n panic!(\"unable to find commit {}\", test.sha)\n }\n let status = Command::new(\"git\")\n .arg(\"clean\")\n .arg(\"-fdx\")\n .current_dir(&out_dir)\n .status()\n .unwrap();\n assert!(status.success());\n\n out_dir\n}\n\nfn run_cargo_test(cargo_path: &Path, crate_path: &Path, packages: &[&str]) -> bool {\n let mut command = Command::new(cargo_path);\n command.arg(\"test\");\n for name in packages {\n command.arg(\"-p\").arg(name);\n }\n let status = command\n \/\/ Disable rust-lang\/cargo's cross-compile tests\n .env(\"CFG_DISABLE_CROSS_TESTS\", \"1\")\n .current_dir(crate_path)\n .status()\n .expect(\"\");\n\n status.success()\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to enforce various stylistic guidelines on the Rust codebase.\n\/\/!\n\/\/! Example checks are:\n\/\/!\n\/\/! * No lines over 100 characters\n\/\/! * No tabs\n\/\/! * No trailing whitespace\n\/\/! * No CR characters\n\/\/! * No `TODO` or `XXX` directives\n\/\/! * A valid license header is at the top\n\/\/! * No unexplained ` ```ignore ` or ` ```rust,ignore ` doc tests\n\/\/!\n\/\/! A number of these checks can be opted-out of with various directives like\n\/\/! `\/\/ ignore-tidy-linelength`.\n\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\nconst COLS: usize = 100;\nconst LICENSE: &'static str = \"\\\nCopyright <year> The Rust Project Developers. See the COPYRIGHT\nfile at the top-level directory of this distribution and at\nhttp:\/\/rust-lang.org\/COPYRIGHT.\n\nLicensed under the Apache License, Version 2.0 <LICENSE-APACHE or\nhttp:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n<LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\noption. This file may not be copied, modified, or distributed\nexcept according to those terms.\";\n\nconst UNEXPLAINED_IGNORE_DOCTEST_INFO: &str = r#\"unexplained \"```ignore\" doctest; try one:\n\n* make the test actually pass, by adding necessary imports and declarations, or\n* use \"```text\", if the code is not Rust code, or\n* use \"```compile_fail,Ennnn\", if the code is expected to fail at compile time, or\n* use \"```should_panic\", if the code is expected to fail at run time, or\n* use \"```no_run\", if the code should type-check but not necessary linkable\/runnable, or\n* explain it like \"```ignore (cannot-test-this-because-xxxx)\", if the annotation cannot be avoided.\n\n\"#;\n\nconst LLVM_UNREACHABLE_INFO: &str = r\"\\\nC++ code used llvm_unreachable, which triggers undefined behavior\nwhen executed when assertions are disabled.\nUse llvm::report_fatal_error for increased robustness.\";\n\n\/\/\/ Parser states for line_is_url.\n#[derive(PartialEq)]\n#[allow(non_camel_case_types)]\nenum LIUState { EXP_COMMENT_START,\n EXP_LINK_LABEL_OR_URL,\n EXP_URL,\n EXP_END }\n\n\/\/\/ True if LINE appears to be a line comment containing an URL,\n\/\/\/ possibly with a Markdown link label in front, and nothing else.\n\/\/\/ The Markdown link label, if present, may not contain whitespace.\n\/\/\/ Lines of this form are allowed to be overlength, because Markdown\n\/\/\/ offers no way to split a line in the middle of a URL, and the lengths\n\/\/\/ of URLs to external references are beyond our control.\nfn line_is_url(line: &str) -> bool {\n use self::LIUState::*;\n let mut state: LIUState = EXP_COMMENT_START;\n\n for tok in line.split_whitespace() {\n match (state, tok) {\n (EXP_COMMENT_START, \"\/\/\") => state = EXP_LINK_LABEL_OR_URL,\n (EXP_COMMENT_START, \"\/\/\/\") => state = EXP_LINK_LABEL_OR_URL,\n (EXP_COMMENT_START, \"\/\/!\") => state = EXP_LINK_LABEL_OR_URL,\n\n (EXP_LINK_LABEL_OR_URL, w)\n if w.len() >= 4 && w.starts_with(\"[\") && w.ends_with(\"]:\")\n => state = EXP_URL,\n\n (EXP_LINK_LABEL_OR_URL, w)\n if w.starts_with(\"http:\/\/\") || w.starts_with(\"https:\/\/\")\n => state = EXP_END,\n\n (EXP_URL, w)\n if w.starts_with(\"http:\/\/\") || w.starts_with(\"https:\/\/\") || w.starts_with(\"..\/\")\n => state = EXP_END,\n\n (_, _) => return false,\n }\n }\n\n state == EXP_END\n}\n\n\/\/\/ True if LINE is allowed to be longer than the normal limit.\n\/\/\/ Currently there is only one exception, for long URLs, but more\n\/\/\/ may be added in the future.\nfn long_line_is_ok(line: &str) -> bool {\n if line_is_url(line) {\n return true;\n }\n\n false\n}\n\npub fn check(path: &Path, bad: &mut bool) {\n let mut contents = String::new();\n super::walk(path, &mut super::filter_dirs, &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n let extensions = [\".rs\", \".py\", \".js\", \".sh\", \".c\", \".cpp\", \".h\"];\n if extensions.iter().all(|e| !filename.ends_with(e)) ||\n filename.starts_with(\".#\") {\n return\n }\n\n contents.truncate(0);\n t!(t!(File::open(file), file).read_to_string(&mut contents));\n\n if contents.is_empty() {\n tidy_error!(bad, \"{}: empty file\", file.display());\n }\n\n let skip_cr = contents.contains(\"ignore-tidy-cr\");\n let skip_tab = contents.contains(\"ignore-tidy-tab\");\n let skip_length = contents.contains(\"ignore-tidy-linelength\");\n let skip_end_whitespace = contents.contains(\"ignore-tidy-end-whitespace\");\n for (i, line) in contents.split(\"\\n\").enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n if !skip_length && line.chars().count() > COLS\n && !long_line_is_ok(line) {\n err(&format!(\"line longer than {} chars\", COLS));\n }\n if line.contains(\"\\t\") && !skip_tab {\n err(\"tab character\");\n }\n if !skip_end_whitespace && (line.ends_with(\" \") || line.ends_with(\"\\t\")) {\n err(\"trailing whitespace\");\n }\n if line.contains(\"\\r\") && !skip_cr {\n err(\"CR character\");\n }\n if filename != \"style.rs\" {\n if line.contains(\"TODO\") {\n err(\"TODO is deprecated; use FIXME\")\n }\n if line.contains(\"\/\/\") && line.contains(\" XXX\") {\n err(\"XXX is deprecated; use FIXME\")\n }\n }\n if line.ends_with(\"```ignore\") || line.ends_with(\"```rust,ignore\") {\n err(UNEXPLAINED_IGNORE_DOCTEST_INFO);\n }\n if filename.ends_with(\".cpp\") && line.contains(\"llvm_unreachable\") {\n err(LLVM_UNREACHABLE_INFO);\n }\n }\n if !licenseck(file, &contents) {\n tidy_error!(bad, \"{}: incorrect license\", file.display());\n }\n })\n}\n\nfn licenseck(file: &Path, contents: &str) -> bool {\n if contents.contains(\"ignore-license\") {\n return true\n }\n let exceptions = [\n \"libstd\/sync\/mpsc\/mpsc_queue.rs\",\n \"libstd\/sync\/mpsc\/spsc_queue.rs\",\n ];\n if exceptions.iter().any(|f| file.ends_with(f)) {\n return true\n }\n\n \/\/ Skip the BOM if it's there\n let bom = \"\\u{feff}\";\n let contents = if contents.starts_with(bom) {&contents[3..]} else {contents};\n\n \/\/ See if the license shows up in the first 100 lines\n let lines = contents.lines().take(100).collect::<Vec<_>>();\n lines.windows(LICENSE.lines().count()).any(|window| {\n let offset = if window.iter().all(|w| w.starts_with(\"\/\/\")) {\n 2\n } else if window.iter().all(|w| w.starts_with('#')) {\n 1\n } else if window.iter().all(|w| w.starts_with(\" *\")) {\n 2\n } else {\n return false\n };\n window.iter().map(|a| a[offset..].trim())\n .zip(LICENSE.lines()).all(|(a, b)| {\n a == b || match b.find(\"<year>\") {\n Some(i) => a.starts_with(&b[..i]) && a.ends_with(&b[i+6..]),\n None => false,\n }\n })\n })\n\n}\n<commit_msg>Add a tidy check to ensure all files have 1 or 2 trailing newlines.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to enforce various stylistic guidelines on the Rust codebase.\n\/\/!\n\/\/! Example checks are:\n\/\/!\n\/\/! * No lines over 100 characters\n\/\/! * No tabs\n\/\/! * No trailing whitespace\n\/\/! * No CR characters\n\/\/! * No `TODO` or `XXX` directives\n\/\/! * A valid license header is at the top\n\/\/! * No unexplained ` ```ignore ` or ` ```rust,ignore ` doc tests\n\/\/!\n\/\/! A number of these checks can be opted-out of with various directives like\n\/\/! `\/\/ ignore-tidy-linelength`.\n\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\nconst COLS: usize = 100;\nconst LICENSE: &'static str = \"\\\nCopyright <year> The Rust Project Developers. See the COPYRIGHT\nfile at the top-level directory of this distribution and at\nhttp:\/\/rust-lang.org\/COPYRIGHT.\n\nLicensed under the Apache License, Version 2.0 <LICENSE-APACHE or\nhttp:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n<LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\noption. This file may not be copied, modified, or distributed\nexcept according to those terms.\";\n\nconst UNEXPLAINED_IGNORE_DOCTEST_INFO: &str = r#\"unexplained \"```ignore\" doctest; try one:\n\n* make the test actually pass, by adding necessary imports and declarations, or\n* use \"```text\", if the code is not Rust code, or\n* use \"```compile_fail,Ennnn\", if the code is expected to fail at compile time, or\n* use \"```should_panic\", if the code is expected to fail at run time, or\n* use \"```no_run\", if the code should type-check but not necessary linkable\/runnable, or\n* explain it like \"```ignore (cannot-test-this-because-xxxx)\", if the annotation cannot be avoided.\n\n\"#;\n\nconst LLVM_UNREACHABLE_INFO: &str = r\"\\\nC++ code used llvm_unreachable, which triggers undefined behavior\nwhen executed when assertions are disabled.\nUse llvm::report_fatal_error for increased robustness.\";\n\n\/\/\/ Parser states for line_is_url.\n#[derive(PartialEq)]\n#[allow(non_camel_case_types)]\nenum LIUState { EXP_COMMENT_START,\n EXP_LINK_LABEL_OR_URL,\n EXP_URL,\n EXP_END }\n\n\/\/\/ True if LINE appears to be a line comment containing an URL,\n\/\/\/ possibly with a Markdown link label in front, and nothing else.\n\/\/\/ The Markdown link label, if present, may not contain whitespace.\n\/\/\/ Lines of this form are allowed to be overlength, because Markdown\n\/\/\/ offers no way to split a line in the middle of a URL, and the lengths\n\/\/\/ of URLs to external references are beyond our control.\nfn line_is_url(line: &str) -> bool {\n use self::LIUState::*;\n let mut state: LIUState = EXP_COMMENT_START;\n\n for tok in line.split_whitespace() {\n match (state, tok) {\n (EXP_COMMENT_START, \"\/\/\") => state = EXP_LINK_LABEL_OR_URL,\n (EXP_COMMENT_START, \"\/\/\/\") => state = EXP_LINK_LABEL_OR_URL,\n (EXP_COMMENT_START, \"\/\/!\") => state = EXP_LINK_LABEL_OR_URL,\n\n (EXP_LINK_LABEL_OR_URL, w)\n if w.len() >= 4 && w.starts_with(\"[\") && w.ends_with(\"]:\")\n => state = EXP_URL,\n\n (EXP_LINK_LABEL_OR_URL, w)\n if w.starts_with(\"http:\/\/\") || w.starts_with(\"https:\/\/\")\n => state = EXP_END,\n\n (EXP_URL, w)\n if w.starts_with(\"http:\/\/\") || w.starts_with(\"https:\/\/\") || w.starts_with(\"..\/\")\n => state = EXP_END,\n\n (_, _) => return false,\n }\n }\n\n state == EXP_END\n}\n\n\/\/\/ True if LINE is allowed to be longer than the normal limit.\n\/\/\/ Currently there is only one exception, for long URLs, but more\n\/\/\/ may be added in the future.\nfn long_line_is_ok(line: &str) -> bool {\n if line_is_url(line) {\n return true;\n }\n\n false\n}\n\npub fn check(path: &Path, bad: &mut bool) {\n let mut contents = String::new();\n super::walk(path, &mut super::filter_dirs, &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n let extensions = [\".rs\", \".py\", \".js\", \".sh\", \".c\", \".cpp\", \".h\"];\n if extensions.iter().all(|e| !filename.ends_with(e)) ||\n filename.starts_with(\".#\") {\n return\n }\n\n contents.truncate(0);\n t!(t!(File::open(file), file).read_to_string(&mut contents));\n\n if contents.is_empty() {\n tidy_error!(bad, \"{}: empty file\", file.display());\n }\n\n let skip_cr = contents.contains(\"ignore-tidy-cr\");\n let skip_tab = contents.contains(\"ignore-tidy-tab\");\n let skip_length = contents.contains(\"ignore-tidy-linelength\");\n let skip_end_whitespace = contents.contains(\"ignore-tidy-end-whitespace\");\n let mut trailing_new_lines = 0;\n for (i, line) in contents.split(\"\\n\").enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n if !skip_length && line.chars().count() > COLS\n && !long_line_is_ok(line) {\n err(&format!(\"line longer than {} chars\", COLS));\n }\n if line.contains(\"\\t\") && !skip_tab {\n err(\"tab character\");\n }\n if !skip_end_whitespace && (line.ends_with(\" \") || line.ends_with(\"\\t\")) {\n err(\"trailing whitespace\");\n }\n if line.contains(\"\\r\") && !skip_cr {\n err(\"CR character\");\n }\n if filename != \"style.rs\" {\n if line.contains(\"TODO\") {\n err(\"TODO is deprecated; use FIXME\")\n }\n if line.contains(\"\/\/\") && line.contains(\" XXX\") {\n err(\"XXX is deprecated; use FIXME\")\n }\n }\n if line.ends_with(\"```ignore\") || line.ends_with(\"```rust,ignore\") {\n err(UNEXPLAINED_IGNORE_DOCTEST_INFO);\n }\n if filename.ends_with(\".cpp\") && line.contains(\"llvm_unreachable\") {\n err(LLVM_UNREACHABLE_INFO);\n }\n if line.is_empty() {\n trailing_new_lines += 1;\n } else {\n trailing_new_lines = 0;\n }\n }\n if !licenseck(file, &contents) {\n tidy_error!(bad, \"{}: incorrect license\", file.display());\n }\n match trailing_new_lines {\n 0 => tidy_error!(bad, \"{}: missing trailing newline\", file.display()),\n 1 | 2 => {}\n n => tidy_error!(bad, \"{}: too many trailing newlines ({})\", file.display(), n),\n };\n })\n}\n\nfn licenseck(file: &Path, contents: &str) -> bool {\n if contents.contains(\"ignore-license\") {\n return true\n }\n let exceptions = [\n \"libstd\/sync\/mpsc\/mpsc_queue.rs\",\n \"libstd\/sync\/mpsc\/spsc_queue.rs\",\n ];\n if exceptions.iter().any(|f| file.ends_with(f)) {\n return true\n }\n\n \/\/ Skip the BOM if it's there\n let bom = \"\\u{feff}\";\n let contents = if contents.starts_with(bom) {&contents[3..]} else {contents};\n\n \/\/ See if the license shows up in the first 100 lines\n let lines = contents.lines().take(100).collect::<Vec<_>>();\n lines.windows(LICENSE.lines().count()).any(|window| {\n let offset = if window.iter().all(|w| w.starts_with(\"\/\/\")) {\n 2\n } else if window.iter().all(|w| w.starts_with('#')) {\n 1\n } else if window.iter().all(|w| w.starts_with(\" *\")) {\n 2\n } else {\n return false\n };\n window.iter().map(|a| a[offset..].trim())\n .zip(LICENSE.lines()).all(|(a, b)| {\n a == b || match b.find(\"<year>\") {\n Some(i) => a.starts_with(&b[..i]) && a.ends_with(&b[i+6..]),\n None => false,\n }\n })\n })\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add another mean test case<commit_after>#![feature(untagged_unions)]\n\/\/ A callee may not read the destination of our `&mut` without\n\/\/ us noticing.\n\/\/ Thise code got carefully checked to not introduce any reborrows\n\/\/ that are not explicit in the source. Let's hope the compiler does not break this later!\n\nuse std::mem;\n\nfn main() {\n let mut x: i32 = 15;\n let xref1 = &mut x;\n let xref1_sneaky: usize = unsafe { mem::transmute_copy(&xref1) };\n let xref2 = &mut *xref1; \/\/ derived from xref1, so using raw is still okay...\n callee(xref1_sneaky);\n let _val = *xref2; \/\/ ...but any use of it will invalidate our ref.\n \/\/~^ ERROR: does not exist on the stack\n}\n\nfn callee(xref1: usize) {\n \/\/ Transmuting through a union to avoid retagging\n union UsizeToRef {\n from: usize,\n to: &'static mut i32,\n }\n let xref1 = UsizeToRef { from: xref1 };\n \/\/ Doing the deref and the transmute (through the union) in the same place expression\n \/\/ should avoid retagging.\n let _val = unsafe { *xref1.to };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update GLFW copyright notice<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Ensure command and args are provided correctly<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use the '!' type for 'str::FromForm::Error'.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add file produces during recording<commit_after>fn main(){\n\tprintln!(\"Hello, World!\");\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>auto merge of #6195 : luqmana\/rust\/newtype-drop, r=catamorphism<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Make sure the destructor is run for newtype structs.\n\nstruct Foo(@mut int);\n\n#[unsafe_destructor]\nimpl Drop for Foo {\n fn finalize(&self) {\n ***self = 23;\n }\n}\n\nfn main() {\n let y = @mut 32;\n {\n let x = Foo(y);\n }\n assert_eq!(*y, 23);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Code refactoring.<commit_after><|endoftext|>"} {"text":"<commit_before>#[macro_export]\nmacro_rules! generate_result_logging_extension {\n {\n $name: ident,\n $map_name: ident,\n $map_str_name: ident,\n $map_err_name: ident,\n $map_err_str_name: ident,\n $closure: expr\n } => {\n pub trait InfoResult<T, E> : Sized {\n\n fn $map_name<F: FnOnce(&T) -> String>(self, f: F) -> Self;\n\n fn $map_str_name(self, s: &str) -> Self {\n self.$map_name(|_| format!(\"{}\", s))\n }\n\n fn $map_err_name<F: FnOnce(&E) -> String>(self, f: F) -> Self;\n\n fn $map_err_str_name(self, s: &str) -> Self {\n self.$map_err_name(|_| format!(\"{}\", s))\n }\n\n }\n\n impl<T, E> InfoResult<T, E> for Result<T, E> {\n\n fn $map_name<F: FnOnce(&T) -> String>(self, f: F) -> Self {\n self.map(|x| { $closure(f(&x)); x })\n }\n\n fn $map_err_name<F: FnOnce(&E) -> String>(self, f: F) -> Self {\n self.map_err(|e| { $closure(f(&e)); e })\n }\n\n }\n\n }\n}\n<commit_msg>Add documentation for the code-generator macro<commit_after>\/\/\/ This macro is used to generate extensions for the `Result<T, U>` type which only have\n\/\/\/ sideeffects.\n\/\/\/\n\/\/\/ This macro is then used to generate debug\/info\/log\/warning\/etc extensions.\n\/\/\/\n\/\/\/ It is exported, so other crates can use it to generate more specific extensions for\n\/\/\/ `Result<T, U>` types\n\/\/\/\n\/\/\/ # Parameters\n\/\/\/\n\/\/\/ The documentation for the parameters of the macro follow.\n\/\/\/\n\/\/\/ ## `$name`\n\/\/\/\n\/\/\/ name of the trait to generate\n\/\/\/\n\/\/\/ ## `$map_name`\n\/\/\/\n\/\/\/ Name of the function which is generated to call the closure with.\n\/\/\/\n\/\/\/ This function gets `&T` from `Result<T, E>` and can now build the argument for\n\/\/\/ `$closure`. So, this function can, for example, `|e| format!(\"Look here: {:?}\", e)`, the\n\/\/\/ result gets fed to `$closure`.\n\/\/\/\n\/\/\/ ## `$map_str_name`\n\/\/\/\n\/\/\/ Name of the function which is generated to call the closure with.\n\/\/\/\n\/\/\/ This function gets simply a `&str` which gets fed to the `$closure` later.\n\/\/\/ So it can be used to `foo().$map_str_name(\"Something happened\")`\n\/\/\/\n\/\/\/ ## `$map_err_name`\n\/\/\/\n\/\/\/ Same as `$map_name`, but gets `&E` from `Resul<T, E>`.\n\/\/\/\n\/\/\/ ## `$map_err_str_name`\n\/\/\/\n\/\/\/ Same as `$map_str_name`, but is called for error cases in `Result<T, E>` (though no\n\/\/\/ argument is passed.\n\/\/\/\n\/\/\/ ## `$closure`\n\/\/\/\n\/\/\/ The closure which should be called when mapping.\n\/\/\/\n\/\/\/ This closure can now do things, but the return value of the closure is discarded.\n\/\/\/ So, this closure can be used for its sideeffects (logging for example) only.\n\/\/\/\n\/\/\/ An example would be: `|element| debug!(\"Element: {:?}\", element)`.\n\/\/\/\n#[macro_export]\nmacro_rules! generate_result_logging_extension {\n {\n $name: ident,\n $map_name: ident,\n $map_str_name: ident,\n $map_err_name: ident,\n $map_err_str_name: ident,\n $closure: expr\n } => {\n pub trait $name<T, E> : Sized {\n\n fn $map_name<F: FnOnce(&T) -> String>(self, f: F) -> Self;\n\n fn $map_str_name(self, s: &str) -> Self {\n self.$map_name(|_| format!(\"{}\", s))\n }\n\n fn $map_err_name<F: FnOnce(&E) -> String>(self, f: F) -> Self;\n\n fn $map_err_str_name(self, s: &str) -> Self {\n self.$map_err_name(|_| format!(\"{}\", s))\n }\n\n }\n\n impl<T, E> $name<T, E> for Result<T, E> {\n\n fn $map_name<F: FnOnce(&T) -> String>(self, f: F) -> Self {\n self.map(|x| { $closure(f(&x)); x })\n }\n\n fn $map_err_name<F: FnOnce(&E) -> String>(self, f: F) -> Self {\n self.map_err(|e| { $closure(f(&e)); e })\n }\n\n }\n\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add hitable trait and hit record struct<commit_after>use vector::Vec3;\nuse ray::Ray;\n\npub struct hit_record {\n pub t : f64,\n pub p : Vec3,\n pub normal : Vec3,\n}\n\ntrait hitable {\n fn hit(r: Ray, t_min: f64, t_max: f64, rec: hit_record) -> bool\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>batch sync::mpsc<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests from old display_derive crate to ensure backward-compatibility<commit_after>\/\/ The following code is from https:\/\/github.com\/withoutboats\/display_derive\/blob\/232a32ee19e262aacbd2c93be5b4ce9e89a5fc30\/tests\/tests.rs\n\/\/ Written by without boats originally\n\n#[macro_use]\nextern crate derive_more;\n\n#[derive(Display)]\n#[display(fmt = \"An error has occurred.\")]\nstruct UnitError;\n\n#[test]\nfn unit_struct() {\n let s = format!(\"{}\", UnitError);\n assert_eq!(&s[..], \"An error has occurred.\");\n}\n\n#[derive(Display)]\n#[display(fmt = \"Error code: {}\", code)]\nstruct RecordError {\n code: u32,\n}\n\n#[test]\nfn record_struct() {\n let s = format!(\"{}\", RecordError { code: 0 });\n assert_eq!(&s[..], \"Error code: 0\");\n}\n\n#[derive(Display)]\n#[display(fmt = \"Error code: {}\", _0)]\nstruct TupleError(i32);\n\n#[test]\nfn tuple_struct() {\n let s = format!(\"{}\", TupleError(2));\n assert_eq!(&s[..], \"Error code: 2\");\n}\n\n#[derive(Display)]\nenum EnumError {\n #[display(fmt = \"Error code: {}\", code)]\n StructVariant { code: i32 },\n #[display(fmt = \"Error: {}\", _0)]\n TupleVariant(&'static str),\n #[display(fmt = \"An error has occurred.\")]\n UnitVariant,\n}\n\n#[test]\nfn enum_error() {\n let s = format!(\"{}\", EnumError::StructVariant { code: 2 });\n assert_eq!(&s[..], \"Error code: 2\");\n let s = format!(\"{}\", EnumError::TupleVariant(\"foobar\"));\n assert_eq!(&s[..], \"Error: foobar\");\n let s = format!(\"{}\", EnumError::UnitVariant);\n assert_eq!(&s[..], \"An error has occurred.\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a simple consumer as example<commit_after>#[macro_use]\nextern crate nom;\n\nuse nom::{Consumer,ConsumerState,MemProducer,IResult};\nuse nom::IResult::*;\n\n#[derive(PartialEq,Eq,Debug)]\nenum State {\n Beginning,\n Middle,\n End,\n Done\n}\n\nstruct TestConsumer {\n state: State,\n counter: usize,\n}\n\ntag!(om_parser \"om\".as_bytes());\ntag!(nom_parser \"nom\".as_bytes());\nmany1!(nomnom_parser<&[u8],&[u8]> nom_parser);\ntag!(end_parser \"kthxbye\".as_bytes());\n\nimpl Consumer for TestConsumer {\n fn consume(&mut self, input: &[u8]) -> ConsumerState {\n match self.state {\n State::Beginning => {\n match om_parser(input) {\n Error(a) => ConsumerState::ConsumerError(a),\n Incomplete(_) => ConsumerState::Await(0, 2),\n Done(_,_) => {\n self.state = State::Middle;\n ConsumerState::Await(2, 3)\n }\n }\n },\n State::Middle => {\n match nomnom_parser(input) {\n Error(a) => {\n self.state = State::End;\n ConsumerState::Await(0, 7)\n },\n Incomplete(_) => ConsumerState::Await(0, 3),\n Done(i,noms_vec) => {\n self.counter = self.counter + noms_vec.len();\n ConsumerState::Await(input.len() - i.len(), 3)\n }\n }\n },\n State::End => {\n match end_parser(input) {\n Error(a) => ConsumerState::ConsumerError(a),\n Incomplete(_) => ConsumerState::Await(0, 7),\n Done(_,_) => {\n self.state = State::Done;\n ConsumerState::ConsumerDone\n }\n }\n },\n State::Done => {\n \/\/ this should not be called\n ConsumerState::ConsumerError(42)\n }\n }\n }\n\n fn end(&mut self) {\n println!(\"counted {} noms\", self.counter);\n }\n}\n\n#[test]\nfn nom1() {\n let mut p = MemProducer::new(\"omnomkthxbye\".as_bytes(), 4);\n let mut c = TestConsumer{state: State::Beginning, counter: 0};\n c.run(&mut p);\n\n assert_eq!(c.counter, 1);\n assert_eq!(c.state, State::Done);\n}\n\n#[test]\nfn nomnomnom() {\n let mut p = MemProducer::new(\"omnomnomnomkthxbye\".as_bytes(), 4);\n let mut c = TestConsumer{state: State::Beginning, counter: 0};\n c.run(&mut p);\n\n assert_eq!(c.counter, 3);\n assert_eq!(c.state, State::Done);\n}\n\n#[test]\nfn no_nomnom() {\n let mut p = MemProducer::new(\"omkthxbye\".as_bytes(), 4);\n let mut c = TestConsumer{state: State::Beginning, counter: 0};\n c.run(&mut p);\n\n assert_eq!(c.counter, 0);\n assert_eq!(c.state, State::Done);\n}\n\n#[test]\nfn impolite() {\n let mut p = MemProducer::new(\"omnomnomnom\".as_bytes(), 4);\n let mut c = TestConsumer{state: State::Beginning, counter: 0};\n c.run(&mut p);\n\n assert_eq!(c.counter, 3);\n assert_eq!(c.state, State::Middle);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added skeleton implementation of a B-tree with a few bells and whistles. No major functionality added yet (such as insertion and removal).<commit_after>\/\/\n\/\/ btree.rs\n\/\/ Nif Ward\n\/\/ 10\/24\/13\n\/\/\n\/\/ starting implementation of a btree for rust\n\/\/ inspired by github user davidhalperin's gist\n\n\n\/\/What's in a BTree?\npub struct BTree<K, V>{\n root: Node<K, V>,\n len: uint,\n lower_bound: uint,\n upper_bound: uint\n}\n\n\nimpl<K: Clone + TotalOrd, V: Clone> BTree<K, V>{\n \n \/\/Returns new BTree with root node (leaf) and user-supplied lower bound\n fn new(k: K, v: V, lb: uint) -> BTree<K, V>{\n BTree{\n\t root: Node::new_leaf(~[LeafElt::new(k, v)]),\n\t len: 1,\n\t lower_bound: lb,\n\t upper_bound: 2 * lb\n }\n }\n\n \/\/Helper function for clone\n fn new_with_node_len(n: Node<K, V>, length: uint, lb: uint) -> BTree<K, V>{\n BTree{\n\t root: n,\n\t len: length,\n\t lower_bound: lb,\n\t upper_bound: 2 * lb\n\t}\n }\n\n\n fn clone(&self) -> BTree<K, V>{\n return BTree::new_with_node_len(self.root.clone(), self.len, self.lower_bound);\n }\n\n fn get(self, k: K) -> Option<V>{\n return self.root.get(k);\n }\n\n\n fn add(self, k: K, v: V) -> bool{\n let is_get = &self.clone().get(k.clone());\n\tif is_get.is_some(){ return false; }\n\telse{\n\t std::util::replace(&mut self.root.clone(),self.root.add(k.clone(), v));\n\t return true;\n\t}\n\n }\n\n\n\n}\n\nimpl<K: ToStr + TotalOrd, V: ToStr> ToStr for BTree<K, V>{\n \/\/Returns a string representation of the BTree\n fn to_str(&self) -> ~str{\n let ret=self.root.to_str();\n\treturn ret;\n }\n}\n\n\n\/\/Node types\nenum Node<K, V>{\n LeafNode(Leaf<K, V>),\n BranchNode(Branch<K, V>)\n}\n\n\n\/\/Node functions\/methods\nimpl<K: Clone + TotalOrd, V: Clone> Node<K, V>{\n \/\/differentiates between leaf and branch nodes\n fn is_leaf(&self) -> bool{\n match self{\n\t &LeafNode(*) => true,\n\t &BranchNode(*) => false\n }\n }\n \n \/\/Creates a new leaf or branch node\n fn new_leaf(vec: ~[LeafElt<K, V>]) -> Node<K,V>{\n LeafNode(Leaf::new(vec))\n }\n fn new_branch(vec: ~[BranchElt<K, V>], right: ~Node<K, V>) -> Node<K, V>{\n BranchNode(Branch::new(vec, right))\n }\n\n fn get(&self, k: K) -> Option<V>{\n match *self{\n\t LeafNode(ref leaf) => return leaf.get(k),\n\t BranchNode(ref branch) => return branch.get(k)\n }\n }\n\n \/\/A placeholder for add\n \/\/Currently returns a leaf node with a single value (the added one)\n fn add(self, k: K, v: V) -> Node<K, V>{\n return Node::new_leaf(~[LeafElt::new(k, v)]);\n }\n}\n\n\nimpl<K: Clone + TotalOrd, V: Clone> Clone for Node<K, V>{\n fn clone(&self) -> Node<K, V>{\n match *self{\n\t LeafNode(ref leaf) => return Node::new_leaf(leaf.elts.clone()),\n\t BranchNode(ref branch) => return Node::new_branch(branch.elts.clone(), branch.rightmost_child.clone())\n\t}\n }\n}\n\nimpl<K: Clone + TotalOrd, V: Clone> TotalOrd for Node<K, V>{\n #[allow(unused_variable)]\n fn cmp(&self, other: &Node<K, V>) -> Ordering{\n \/\/Requires a match statement--defer these procs to branch and leaf.\n \/* if self.elts[0].less_than(other.elts[0]) { return Less}\n\tif self.elts[0].greater_than(other.elts[0]) {return Greater}\n\telse {return Equal}\n\t*\/\n\treturn Equal;\n }\n}\n\nimpl<K: Clone + TotalOrd, V: Clone> TotalEq for Node<K, V>{\n \/\/Making sure Nodes have TotalEq\n #[allow(unused_variable)]\n fn equals(&self, other: &Node<K, V>) -> bool{\n \/* put in a match and defer this stuff to branch and leaf\n\n let mut shorter = 0;\n if self.elts.len() <= other.elts.len(){\n\t shorter = self.elts.len();\n\t}\n\telse{\n\t shorter = other.elts.len();\n\t}\n\tlet mut i = 0;\n\twhile i < shorter{\n\t if !self.elts[i].has_key(other.elts[i].key){\n\t return false;\n\t }\n\t i +=1;\n }\n\treturn true;\n *\/\n return true;\n }\n}\n\n\nimpl<K: ToStr + TotalOrd, V: ToStr> ToStr for Node<K, V>{\n fn to_str(&self) -> ~str{\n match *self{\n LeafNode(ref leaf) => leaf.to_str(),\n\t BranchNode(*) => ~\"\"\n }\n }\n}\n\n\n\/\/Array with no children\nstruct Leaf<K, V>{\n elts: ~[LeafElt<K, V>]\n}\n\n\/\/Array of values with children, plus a rightmost child (greater than all)\nstruct Branch<K, V>{\n elts: ~[BranchElt<K,V>],\n rightmost_child: ~Node<K, V>\n}\n\n\nimpl<K: Clone + TotalOrd, V: Clone> Leaf<K, V>{\n \/\/Constructor takes in a vector of leaves\n fn new(vec: ~[LeafElt<K, V>]) -> Leaf<K, V>{\n Leaf{\n elts: vec\n }\n }\n\n\n fn get(&self, k: K) -> Option<V>{\n for s in self.elts.iter(){\n\t let order=s.key.cmp(&k);\n\t match order{\n\t Equal => return Some(s.value.clone()),\n\t\t_ => {}\n\t }\n\t}\n\treturn None;\n }\n\n \/\/Add method in progress\n fn add(&self, k: K, v: V) -> Node<K, V>{\n return Node::new_leaf(~[LeafElt::new(k, v)]);\n }\n\n}\n\nimpl<K: ToStr + TotalOrd, V: ToStr> ToStr for Leaf<K, V>{\n fn to_str(&self) -> ~str{\n let mut ret=~\"\";\n for s in self.elts.iter(){\n ret = ret+\" \/\/ \"+ s.to_str();\n }\n return ret;\n }\n\n}\n\n\nimpl<K: Clone + TotalOrd, V: Clone> Branch<K, V>{\n \/\/constructor takes a branch vector and a rightmost child\n fn new(vec: ~[BranchElt<K, V>], right: ~Node<K, V>) -> Branch<K, V>{\n Branch{\n\t elts: vec,\n\t rightmost_child: right\n }\n }\n\n fn get(&self, k: K) -> Option<V>{\n for s in self.elts.iter(){\n\t let order = s.key.cmp(&k);\n\t match order{\n\t Less => return s.left.get(k),\n\t\tEqual => return Some(s.value.clone()),\n\t\t_ => {}\n\t }\n\t}\n\treturn self.rightmost_child.get(k);\n }\n\n\n \/\/Add method in progress\n fn add(&self, k: K, v: V) -> Node<K, V>{\n return Node::new_leaf(~[LeafElt::new(k, v)]);\n }\n}\n\n\/\/No left child\nstruct LeafElt<K, V>{\n key: K,\n value: V\n}\n\n\/\/Has a left child\nstruct BranchElt<K, V>{\n left: Node<K, V>,\n key: K,\n value: V\n}\n\nimpl<K: Clone + TotalOrd, V> LeafElt<K, V>{\n fn new(k: K, v: V) -> LeafElt<K, V>{\n LeafElt{\n key: k,\n\t value: v\n\t}\n }\n\n fn less_than(&self, other: LeafElt<K, V>) -> bool{\n let order = self.key.cmp(&other.key);\n\tmatch order{\n\t Less => true,\n\t _ => false\n\t}\n }\n\n fn greater_than(&self, other: LeafElt<K, V>) -> bool{\n let order = self.key.cmp(&other.key);\n\tmatch order{\n\t Greater => true,\n\t _ => false\n\t}\n }\n\n\n fn has_key(&self, other: K) -> bool{\n let order = self.key.cmp(&other);\n\tmatch order{\n\t Equal => true,\n\t _ => false\n\t}\n }\n\n}\n\nimpl<K: Clone + TotalOrd, V: Clone> Clone for LeafElt<K, V>{\n fn clone(&self) -> LeafElt<K, V>{\n return LeafElt::new(self.key.clone(), self.value.clone());\n }\n}\n\nimpl<K: ToStr + TotalOrd, V: ToStr> ToStr for LeafElt<K, V>{\n fn to_str(&self) -> ~str{\n return \"Key: \"+self.key.to_str()+\", value: \"+self.value.to_str()+\"; \";\n }\n\n}\n\nimpl<K: Clone + TotalOrd, V: Clone> BranchElt<K, V>{\n fn new(k: K, v: V, n: Node<K, V>) -> BranchElt<K, V>{\n BranchElt{\n left: n,\n key: k,\n value: v\n }\n }\n\n \/\/Add method in progress. Should it return a branch or a leaf elt? It will depend on implementation.\n fn add(&self, k: K, v: V) -> LeafElt<K, V>{\n return LeafElt::new(k, v);\n }\n}\n\nimpl<K: Clone + TotalOrd, V: Clone> Clone for BranchElt<K, V>{\n fn clone(&self) -> BranchElt<K, V>{\n return BranchElt::new(self.key.clone(), self.value.clone(), self.left.clone());\n }\n}\n\n#[test]\nfn add_test(){\n let b = BTree::new(1, ~\"abc\", 2);\n let is_add = b.add(2, ~\"xyz\");\n assert!(is_add);\n\n}\n\n#[test]\nfn get_test(){\n let b = BTree::new(1, ~\"abc\", 2);\n let val = b.get(1);\n assert_eq!(val, Some(~\"abc\"));\n}\n\n\/\/Testing LeafElt<K, V> functions (less_than, greater_than, and has_key)\n#[test]\nfn leaf_lt(){\n let l1 = LeafElt::new(1, ~\"abc\");\n let l2 = LeafElt::new(2, ~\"xyz\");\n assert!(l1.less_than(l2));\n}\n\n#[test]\nfn leaf_gt(){\n let l1 = LeafElt::new(1, ~\"abc\");\n let l2 = LeafElt::new(2, ~\"xyz\");\n assert!(l2.greater_than(l1));\n}\n\n#[test]\nfn leaf_hk(){\n let l1 = LeafElt::new(1, ~\"abc\");\n assert!(l1.has_key(1));\n}\n\nfn main(){\n\n\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(missing_copy_implementations)]\n\nuse io::{self, Read, Write, ErrorKind, BufRead};\n\n\/\/\/ Copies the entire contents of a reader into a writer.\n\/\/\/\n\/\/\/ This function will continuously read data from `reader` and then\n\/\/\/ write it into `writer` in a streaming fashion until `reader`\n\/\/\/ returns EOF.\n\/\/\/\n\/\/\/ On success, the total number of bytes that were copied from\n\/\/\/ `reader` to `writer` is returned.\n\/\/\/\n\/\/\/ # Errors\n\/\/\/\n\/\/\/ This function will return an error immediately if any call to `read` or\n\/\/\/ `write` returns an error. All instances of `ErrorKind::Interrupted` are\n\/\/\/ handled by this function and the underlying operation is retried.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/\n\/\/\/ # fn foo() -> io::Result<()> {\n\/\/\/ let mut reader: &[u8] = b\"hello\";\n\/\/\/ let mut writer: Vec<u8> = vec![];\n\/\/\/\n\/\/\/ try!(io::copy(&mut reader, &mut writer));\n\/\/\/\n\/\/\/ assert_eq!(reader, &writer[..]);\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn copy<R: ?Sized, W: ?Sized>(reader: &mut R, writer: &mut W) -> io::Result<u64>\n where R: Read, W: Write\n{\n let mut buf = [0; super::DEFAULT_BUF_SIZE];\n let mut written = 0;\n loop {\n let len = match reader.read(&mut buf) {\n Ok(0) => return Ok(written),\n Ok(len) => len,\n Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,\n Err(e) => return Err(e),\n };\n writer.write_all(&buf[..len])?;\n written += len as u64;\n }\n}\n\n\/\/\/ A reader which is always at EOF.\n\/\/\/\n\/\/\/ This struct is generally created by calling [`empty()`][empty]. Please see\n\/\/\/ the documentation of `empty()` for more details.\n\/\/\/\n\/\/\/ [empty]: fn.empty.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Empty { _priv: () }\n\n\/\/\/ Constructs a new handle to an empty reader.\n\/\/\/\n\/\/\/ All reads from the returned reader will return `Ok(0)`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ A slightly sad example of not reading anything into a buffer:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io::{self, Read};\n\/\/\/\n\/\/\/ let mut buffer = String::new();\n\/\/\/ io::empty().read_to_string(&mut buffer).unwrap();\n\/\/\/ assert!(buffer.is_empty());\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn empty() -> Empty { Empty { _priv: () } }\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Read for Empty {\n fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> { Ok(0) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl BufRead for Empty {\n fn fill_buf(&mut self) -> io::Result<&[u8]> { Ok(&[]) }\n fn consume(&mut self, _n: usize) {}\n}\n\n\/\/\/ A reader which yields one byte over and over and over and over and over and...\n\/\/\/\n\/\/\/ This struct is generally created by calling [`repeat()`][repeat]. Please\n\/\/\/ see the documentation of `repeat()` for more details.\n\/\/\/\n\/\/\/ [repeat]: fn.repeat.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Repeat { byte: u8 }\n\n\/\/\/ Creates an instance of a reader that infinitely repeats one byte.\n\/\/\/\n\/\/\/ All reads from this reader will succeed by filling the specified buffer with\n\/\/\/ the given byte.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn repeat(byte: u8) -> Repeat { Repeat { byte: byte } }\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Read for Repeat {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n for slot in &mut *buf {\n *slot = self.byte;\n }\n Ok(buf.len())\n }\n}\n\n\/\/\/ A writer which will move data into the void.\n\/\/\/\n\/\/\/ This struct is generally created by calling [`sink()`][sink]. Please\n\/\/\/ see the documentation of `sink()` for more details.\n\/\/\/\n\/\/\/ [sink]: fn.sink.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Sink { _priv: () }\n\n\/\/\/ Creates an instance of a writer which will successfully consume all data.\n\/\/\/\n\/\/\/ All calls to `write` on the returned instance will return `Ok(buf.len())`\n\/\/\/ and the contents of the buffer will not be inspected.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn sink() -> Sink { Sink { _priv: () } }\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Write for Sink {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { Ok(buf.len()) }\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n#[cfg(test)]\nmod tests {\n use prelude::v1::*;\n\n use io::prelude::*;\n use io::{copy, sink, empty, repeat};\n\n #[test]\n fn copy_copies() {\n let mut r = repeat(0).take(4);\n let mut w = sink();\n assert_eq!(copy(&mut r, &mut w).unwrap(), 4);\n\n let mut r = repeat(0).take(1 << 17);\n assert_eq!(copy(&mut r as &mut Read, &mut w as &mut Write).unwrap(), 1 << 17);\n }\n\n #[test]\n fn sink_sinks() {\n let mut s = sink();\n assert_eq!(s.write(&[]).unwrap(), 0);\n assert_eq!(s.write(&[0]).unwrap(), 1);\n assert_eq!(s.write(&[0; 1024]).unwrap(), 1024);\n assert_eq!(s.by_ref().write(&[0; 1024]).unwrap(), 1024);\n }\n\n #[test]\n fn empty_reads() {\n let mut e = empty();\n assert_eq!(e.read(&mut []).unwrap(), 0);\n assert_eq!(e.read(&mut [0]).unwrap(), 0);\n assert_eq!(e.read(&mut [0; 1024]).unwrap(), 0);\n assert_eq!(e.by_ref().read(&mut [0; 1024]).unwrap(), 0);\n }\n\n #[test]\n fn repeat_repeats() {\n let mut r = repeat(4);\n let mut b = [0; 1024];\n assert_eq!(r.read(&mut b).unwrap(), 1024);\n assert!(b.iter().all(|b| *b == 4));\n }\n\n #[test]\n fn take_some_bytes() {\n assert_eq!(repeat(4).take(100).bytes().count(), 100);\n assert_eq!(repeat(4).take(100).bytes().next().unwrap().unwrap(), 4);\n assert_eq!(repeat(1).take(10).chain(repeat(2).take(10)).bytes().count(), 20);\n }\n}\n<commit_msg>Rollup merge of #34518 - frewsxcv:io-repeat, r=GuillaumeGomez<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(missing_copy_implementations)]\n\nuse io::{self, Read, Write, ErrorKind, BufRead};\n\n\/\/\/ Copies the entire contents of a reader into a writer.\n\/\/\/\n\/\/\/ This function will continuously read data from `reader` and then\n\/\/\/ write it into `writer` in a streaming fashion until `reader`\n\/\/\/ returns EOF.\n\/\/\/\n\/\/\/ On success, the total number of bytes that were copied from\n\/\/\/ `reader` to `writer` is returned.\n\/\/\/\n\/\/\/ # Errors\n\/\/\/\n\/\/\/ This function will return an error immediately if any call to `read` or\n\/\/\/ `write` returns an error. All instances of `ErrorKind::Interrupted` are\n\/\/\/ handled by this function and the underlying operation is retried.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/\n\/\/\/ # fn foo() -> io::Result<()> {\n\/\/\/ let mut reader: &[u8] = b\"hello\";\n\/\/\/ let mut writer: Vec<u8> = vec![];\n\/\/\/\n\/\/\/ try!(io::copy(&mut reader, &mut writer));\n\/\/\/\n\/\/\/ assert_eq!(reader, &writer[..]);\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn copy<R: ?Sized, W: ?Sized>(reader: &mut R, writer: &mut W) -> io::Result<u64>\n where R: Read, W: Write\n{\n let mut buf = [0; super::DEFAULT_BUF_SIZE];\n let mut written = 0;\n loop {\n let len = match reader.read(&mut buf) {\n Ok(0) => return Ok(written),\n Ok(len) => len,\n Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,\n Err(e) => return Err(e),\n };\n writer.write_all(&buf[..len])?;\n written += len as u64;\n }\n}\n\n\/\/\/ A reader which is always at EOF.\n\/\/\/\n\/\/\/ This struct is generally created by calling [`empty()`][empty]. Please see\n\/\/\/ the documentation of `empty()` for more details.\n\/\/\/\n\/\/\/ [empty]: fn.empty.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Empty { _priv: () }\n\n\/\/\/ Constructs a new handle to an empty reader.\n\/\/\/\n\/\/\/ All reads from the returned reader will return `Ok(0)`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ A slightly sad example of not reading anything into a buffer:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io::{self, Read};\n\/\/\/\n\/\/\/ let mut buffer = String::new();\n\/\/\/ io::empty().read_to_string(&mut buffer).unwrap();\n\/\/\/ assert!(buffer.is_empty());\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn empty() -> Empty { Empty { _priv: () } }\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Read for Empty {\n fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> { Ok(0) }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl BufRead for Empty {\n fn fill_buf(&mut self) -> io::Result<&[u8]> { Ok(&[]) }\n fn consume(&mut self, _n: usize) {}\n}\n\n\/\/\/ A reader which yields one byte over and over and over and over and over and...\n\/\/\/\n\/\/\/ This struct is generally created by calling [`repeat()`][repeat]. Please\n\/\/\/ see the documentation of `repeat()` for more details.\n\/\/\/\n\/\/\/ [repeat]: fn.repeat.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Repeat { byte: u8 }\n\n\/\/\/ Creates an instance of a reader that infinitely repeats one byte.\n\/\/\/\n\/\/\/ All reads from this reader will succeed by filling the specified buffer with\n\/\/\/ the given byte.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io::{self, Read};\n\/\/\/\n\/\/\/ let mut buffer = [0; 3];\n\/\/\/ io::repeat(0b101).read_exact(&mut buffer).unwrap();\n\/\/\/ assert_eq!(buffer, [0b101, 0b101, 0b101]);\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn repeat(byte: u8) -> Repeat { Repeat { byte: byte } }\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Read for Repeat {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n for slot in &mut *buf {\n *slot = self.byte;\n }\n Ok(buf.len())\n }\n}\n\n\/\/\/ A writer which will move data into the void.\n\/\/\/\n\/\/\/ This struct is generally created by calling [`sink()`][sink]. Please\n\/\/\/ see the documentation of `sink()` for more details.\n\/\/\/\n\/\/\/ [sink]: fn.sink.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Sink { _priv: () }\n\n\/\/\/ Creates an instance of a writer which will successfully consume all data.\n\/\/\/\n\/\/\/ All calls to `write` on the returned instance will return `Ok(buf.len())`\n\/\/\/ and the contents of the buffer will not be inspected.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub fn sink() -> Sink { Sink { _priv: () } }\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl Write for Sink {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> { Ok(buf.len()) }\n fn flush(&mut self) -> io::Result<()> { Ok(()) }\n}\n\n#[cfg(test)]\nmod tests {\n use prelude::v1::*;\n\n use io::prelude::*;\n use io::{copy, sink, empty, repeat};\n\n #[test]\n fn copy_copies() {\n let mut r = repeat(0).take(4);\n let mut w = sink();\n assert_eq!(copy(&mut r, &mut w).unwrap(), 4);\n\n let mut r = repeat(0).take(1 << 17);\n assert_eq!(copy(&mut r as &mut Read, &mut w as &mut Write).unwrap(), 1 << 17);\n }\n\n #[test]\n fn sink_sinks() {\n let mut s = sink();\n assert_eq!(s.write(&[]).unwrap(), 0);\n assert_eq!(s.write(&[0]).unwrap(), 1);\n assert_eq!(s.write(&[0; 1024]).unwrap(), 1024);\n assert_eq!(s.by_ref().write(&[0; 1024]).unwrap(), 1024);\n }\n\n #[test]\n fn empty_reads() {\n let mut e = empty();\n assert_eq!(e.read(&mut []).unwrap(), 0);\n assert_eq!(e.read(&mut [0]).unwrap(), 0);\n assert_eq!(e.read(&mut [0; 1024]).unwrap(), 0);\n assert_eq!(e.by_ref().read(&mut [0; 1024]).unwrap(), 0);\n }\n\n #[test]\n fn repeat_repeats() {\n let mut r = repeat(4);\n let mut b = [0; 1024];\n assert_eq!(r.read(&mut b).unwrap(), 1024);\n assert!(b.iter().all(|b| *b == 4));\n }\n\n #[test]\n fn take_some_bytes() {\n assert_eq!(repeat(4).take(100).bytes().count(), 100);\n assert_eq!(repeat(4).take(100).bytes().next().unwrap().unwrap(), 4);\n assert_eq!(repeat(1).take(10).chain(repeat(2).take(10)).bytes().count(), 20);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove dev-time leftovers.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>09 - while<commit_after>fn main() {\n \/\/ A counter variable\n let mut n = 1u;\n\n \/\/ Loop while `n` is less than 101\n while n < 101 {\n if n % 15 == 0 {\n println!(\"fizzbuzz\");\n } else if n % 3 == 0 {\n println!(\"fizz\");\n } else if n % 5 == 0 {\n println!(\"buzz\");\n } else {\n println!(\"{}\", n);\n }\n\n \/\/ Increment counter\n n += 1;\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>watch should never rebuild on changes that are ignored<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added additional FFI from AgMD1Fundamental<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>108:24 warning: statement with no effect, #[warn(no_effect)] on by default src\/main.rs:108 Inhibit(false);<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>conditionals allow rendering to depend on arrow-state<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>creatures rendering<commit_after><|endoftext|>"} {"text":"<commit_before>use common::context::*;\nuse common::scheduler::*;\n\nuse graphics::bmp::*;\n\nuse programs::common::*;\nuse programs::editor::*;\nuse programs::executor::*;\nuse programs::filemanager::*;\nuse programs::viewer::*;\n\npub struct Session {\n pub display: Display,\n pub background: BMP,\n pub cursor: BMP,\n pub icon: BMP,\n pub mouse_point: Point,\n last_mouse_event: MouseEvent,\n pub items: Vec<Box<SessionItem>>,\n pub windows: Vec<*mut Window>,\n pub windows_ordered: Vec<*mut Window>,\n pub redraw: usize\n}\n\nimpl Session {\n pub fn new() -> Session {\n unsafe {\n Session {\n display: Display::root(),\n background: BMP::new(),\n cursor: BMP::new(),\n icon: BMP::new(),\n mouse_point: Point::new(0, 0),\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n valid: false\n },\n items: Vec::new(),\n windows: Vec::new(),\n windows_ordered: Vec::new(),\n redraw: REDRAW_ALL\n }\n }\n }\n\n pub unsafe fn add_window(&mut self, add_window_ptr: *mut Window){\n self.windows.push(add_window_ptr);\n self.windows_ordered.push(add_window_ptr);\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n\n pub unsafe fn remove_window(&mut self, remove_window_ptr: *mut Window){\n let mut i = 0;\n while i < self.windows.len() {\n let mut remove = false;\n\n match self.windows.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n }else{\n i += 1;\n },\n Option::None => break\n }\n\n if remove {\n self.windows.remove(i);\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n }\n\n i = 0;\n while i < self.windows_ordered.len() {\n let mut remove = false;\n\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n }else{\n i += 1;\n },\n Option::None => break\n }\n\n if remove {\n self.windows_ordered.remove(i);\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n }\n }\n\n pub unsafe fn on_irq(&mut self, irq: u8){\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_irq(irq);\n end_no_ints(reenable);\n }\n }\n\n pub unsafe fn on_poll(&mut self){\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_poll();\n end_no_ints(reenable);\n }\n }\n\n pub fn open(&self, url: &URL) -> Box<Resource>{\n if url.scheme().len() == 0 {\n let mut list = String::new();\n\n for item in self.items.iter() {\n let scheme = item.scheme();\n if scheme.len() > 0 {\n if list.len() > 0 {\n list = list + \"\\n\" + scheme;\n }else{\n list = scheme;\n }\n }\n }\n\n return box VecResource::new(URL::new(), ResourceType::Dir, list.to_utf8());\n }else{\n for item in self.items.iter() {\n if item.scheme() == url.scheme() {\n return item.open(url);\n }\n }\n return box NoneResource;\n }\n }\n\n fn item_main(&mut self, mut item: Box<SessionItem>, url: URL){\n Context::spawn(box move ||{\n item.main(url);\n });\n }\n\n fn on_key(&mut self, key_event: KeyEvent){\n if self.windows.len() > 0 {\n match self.windows.get(self.windows.len() - 1){\n Option::Some(window_ptr) => {\n unsafe{\n (**window_ptr).on_key(key_event);\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n },\n Option::None => ()\n }\n }\n }\n\n fn on_mouse(&mut self, mouse_event: MouseEvent){\n let mut catcher = -1;\n for reverse_i in 0..self.windows.len() {\n let i = self.windows.len() - 1 - reverse_i;\n match self.windows.get(i){\n Option::Some(window_ptr) => unsafe{\n if (**window_ptr).on_mouse(mouse_event, catcher < 0) {\n catcher = i as isize;\n\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n },\n Option::None => ()\n }\n }\n\n \/\/Not caught, can be caught by task bar\n if catcher < 0 {\n if mouse_event.left_button && !self.last_mouse_event.left_button && mouse_event.y >= self.display.height as isize - 32 {\n if mouse_event.x <= 56 {\n self.item_main(box FileManager::new(), URL::from_string(&\"file:\/\/\/\".to_string()));\n }else{\n let mut chars = 32;\n while chars > 4 && (chars*8 + 3*4) * self.windows.len() > self.display.width {\n chars -= 1;\n }\n\n for i in 0..self.windows_ordered.len() {\n let x = (5*8 + 2*8 + (chars*8 + 3*4) * i) as isize;\n let w = (chars*8 + 2*4) as usize;\n if mouse_event.x >= x && mouse_event.x < x + w as isize {\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => unsafe {\n for j in 0..self.windows.len() {\n match self.windows.get(j){\n Option::Some(catcher_window_ptr) => if catcher_window_ptr == window_ptr {\n if j == self.windows.len() - 1 {\n (**window_ptr).minimized = !(**window_ptr).minimized;\n }else{\n catcher = j as isize;\n (**window_ptr).minimized = false;\n }\n break;\n },\n Option::None => break\n }\n }\n self.redraw = max(self.redraw, REDRAW_ALL);\n },\n Option::None => ()\n }\n break;\n }\n }\n }\n }\n }\n\n if catcher >= 0 && catcher < self.windows.len() as isize - 1 {\n match self.windows.remove(catcher as usize){\n Option::Some(window_ptr) => self.windows.push(window_ptr),\n Option::None => ()\n }\n }\n\n self.last_mouse_event = mouse_event;\n }\n\n pub unsafe fn redraw(&mut self){\n if self.redraw > REDRAW_NONE {\n if self.redraw >= REDRAW_ALL {\n self.display.set(Color::new(64, 64, 64));\n if self.background.data > 0 {\n self.display.image(Point::new((self.display.width as isize - self.background.size.width as isize)\/2, (self.display.height as isize - self.background.size.height as isize)\/2), self.background.data, self.background.size);\n }\n\n for i in 0..self.windows.len(){\n match self.windows.get(i) {\n Option::Some(window_ptr) => {\n (**window_ptr).focused = i == self.windows.len() - 1;\n (**window_ptr).draw(&self.display);\n },\n Option::None => ()\n }\n }\n\n self.display.rect(Point::new(0, self.display.height as isize - 32), Size::new(self.display.width, 32), Color::new(0, 0, 0));\n if self.icon.data > 0 {\n self.display.image_alpha(Point::new(12, self.display.height as isize - 32), self.icon.data, self.icon.size);\n }else{\n self.display.text(Point::new(8, self.display.height as isize - 24), &String::from_str(\"Redox\"), Color::new(255, 255, 255));\n }\n\n let mut chars = 32;\n while chars > 4 && (chars*8 + 3*4) * self.windows.len() > self.display.width {\n chars -= 1;\n }\n\n for i in 0..self.windows_ordered.len() {\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => {\n let x = (5*8 + 2*8 + (chars*8 + 3*4) * i) as isize;\n let w = (chars*8 + 2*4) as usize;\n self.display.rect(Point::new(x, self.display.height as isize - 32), Size::new(w, 32), (**window_ptr).border_color);\n self.display.text(Point::new(x + 4, self.display.height as isize - 24), &(**window_ptr).title.substr(0, chars as usize), (**window_ptr).title_color);\n },\n Option::None => ()\n }\n }\n }\n\n let reenable = start_no_ints();\n\n self.display.flip();\n\n if self.cursor.data > 0 {\n self.display.image_alpha_onscreen(self.mouse_point, self.cursor.data, self.cursor.size);\n }else{\n self.display.char_onscreen(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9), 'X', Color::new(255, 255, 255));\n }\n\n self.redraw = REDRAW_NONE;\n\n end_no_ints(reenable);\n }\n }\n\n pub fn event(&mut self, event: Event){\n match event.to_option() {\n EventOption::Mouse(mouse_event) => self.on_mouse(mouse_event),\n EventOption::Key(key_event) => self.on_key(key_event),\n EventOption::Redraw(redraw_event) => self.redraw = max(self.redraw, redraw_event.redraw),\n EventOption::Open(open_event) => {\n let url_string = open_event.url_string;\n let url = URL::from_string(&url_string);\n\n if url_string.ends_with(\".md\".to_string()) || url_string.ends_with(\".rs\".to_string()) || url_string.ends_with(\".sh\".to_string()){\n self.item_main(box Editor::new(), url);\n }else if url_string.ends_with(\".bin\".to_string()){\n self.item_main(box Executor::new(), url);\n }else if url_string.ends_with(\".bmp\".to_string()){\n self.item_main(box Viewer::new(), url);\n }else{\n d(\"No program found: \");\n url.d();\n dl();\n }\n }\n _ => ()\n }\n }\n}\n<commit_msg>Bug fixes for windows<commit_after>use common::context::*;\nuse common::scheduler::*;\n\nuse graphics::bmp::*;\n\nuse programs::common::*;\nuse programs::editor::*;\nuse programs::executor::*;\nuse programs::filemanager::*;\nuse programs::viewer::*;\n\npub struct Session {\n pub display: Display,\n pub background: BMP,\n pub cursor: BMP,\n pub icon: BMP,\n pub mouse_point: Point,\n last_mouse_event: MouseEvent,\n pub items: Vec<Box<SessionItem>>,\n pub windows: Vec<*mut Window>,\n pub windows_ordered: Vec<*mut Window>,\n pub redraw: usize\n}\n\nimpl Session {\n pub fn new() -> Session {\n unsafe {\n Session {\n display: Display::root(),\n background: BMP::new(),\n cursor: BMP::new(),\n icon: BMP::new(),\n mouse_point: Point::new(0, 0),\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n valid: false\n },\n items: Vec::new(),\n windows: Vec::new(),\n windows_ordered: Vec::new(),\n redraw: REDRAW_ALL\n }\n }\n }\n\n pub unsafe fn add_window(&mut self, add_window_ptr: *mut Window){\n self.windows.push(add_window_ptr);\n self.windows_ordered.push(add_window_ptr);\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n\n pub unsafe fn remove_window(&mut self, remove_window_ptr: *mut Window){\n let mut i = 0;\n while i < self.windows.len() {\n let mut remove = false;\n\n match self.windows.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n }else{\n i += 1;\n },\n Option::None => break\n }\n\n if remove {\n self.windows.remove(i);\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n }\n\n i = 0;\n while i < self.windows_ordered.len() {\n let mut remove = false;\n\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n }else{\n i += 1;\n },\n Option::None => break\n }\n\n if remove {\n self.windows_ordered.remove(i);\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n }\n }\n\n pub unsafe fn on_irq(&mut self, irq: u8){\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_irq(irq);\n end_no_ints(reenable);\n }\n }\n\n pub unsafe fn on_poll(&mut self){\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_poll();\n end_no_ints(reenable);\n }\n }\n\n pub fn open(&self, url: &URL) -> Box<Resource>{\n if url.scheme().len() == 0 {\n let mut list = String::new();\n\n for item in self.items.iter() {\n let scheme = item.scheme();\n if scheme.len() > 0 {\n if list.len() > 0 {\n list = list + \"\\n\" + scheme;\n }else{\n list = scheme;\n }\n }\n }\n\n return box VecResource::new(URL::new(), ResourceType::Dir, list.to_utf8());\n }else{\n for item in self.items.iter() {\n if item.scheme() == url.scheme() {\n return item.open(url);\n }\n }\n return box NoneResource;\n }\n }\n\n fn item_main(&mut self, mut item: Box<SessionItem>, url: URL){\n Context::spawn(box move ||{\n item.main(url);\n });\n }\n\n fn on_key(&mut self, key_event: KeyEvent){\n if self.windows.len() > 0 {\n match self.windows.get(self.windows.len() - 1){\n Option::Some(window_ptr) => {\n unsafe{\n (**window_ptr).on_key(key_event);\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n },\n Option::None => ()\n }\n }\n }\n\n fn on_mouse(&mut self, mouse_event: MouseEvent){\n let mut catcher = -1;\n\n if mouse_event.y >= self.display.height as isize - 32 {\n if mouse_event.left_button && !self.last_mouse_event.left_button {\n if mouse_event.x <= 56 {\n self.item_main(box FileManager::new(), URL::from_string(&\"file:\/\/\/\".to_string()));\n }else{\n let mut chars = 32;\n while chars > 4 && (chars*8 + 3*4) * self.windows.len() > self.display.width {\n chars -= 1;\n }\n\n for i in 0..self.windows_ordered.len() {\n let x = (5*8 + 2*8 + (chars*8 + 3*4) * i) as isize;\n let w = (chars*8 + 2*4) as usize;\n if mouse_event.x >= x && mouse_event.x < x + w as isize {\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => unsafe {\n for j in 0..self.windows.len() {\n match self.windows.get(j){\n Option::Some(catcher_window_ptr) => if catcher_window_ptr == window_ptr {\n if j == self.windows.len() - 1 {\n (**window_ptr).minimized = !(**window_ptr).minimized;\n }else{\n catcher = j as isize;\n (**window_ptr).minimized = false;\n }\n break;\n },\n Option::None => break\n }\n }\n self.redraw = max(self.redraw, REDRAW_ALL);\n },\n Option::None => ()\n }\n break;\n }\n }\n }\n }\n }else{\n for reverse_i in 0..self.windows.len() {\n let i = self.windows.len() - 1 - reverse_i;\n match self.windows.get(i){\n Option::Some(window_ptr) => unsafe{\n if (**window_ptr).on_mouse(mouse_event, catcher < 0) {\n catcher = i as isize;\n\n self.redraw = max(self.redraw, REDRAW_ALL);\n }\n },\n Option::None => ()\n }\n }\n }\n\n if catcher >= 0 && catcher < self.windows.len() as isize - 1 {\n match self.windows.remove(catcher as usize){\n Option::Some(window_ptr) => self.windows.push(window_ptr),\n Option::None => ()\n }\n }\n\n self.last_mouse_event = mouse_event;\n }\n\n pub unsafe fn redraw(&mut self){\n if self.redraw > REDRAW_NONE {\n if self.redraw >= REDRAW_ALL {\n self.display.set(Color::new(64, 64, 64));\n if self.background.data > 0 {\n self.display.image(Point::new((self.display.width as isize - self.background.size.width as isize)\/2, (self.display.height as isize - self.background.size.height as isize)\/2), self.background.data, self.background.size);\n }\n\n for i in 0..self.windows.len(){\n match self.windows.get(i) {\n Option::Some(window_ptr) => {\n (**window_ptr).focused = i == self.windows.len() - 1;\n (**window_ptr).draw(&self.display);\n },\n Option::None => ()\n }\n }\n\n self.display.rect(Point::new(0, self.display.height as isize - 32), Size::new(self.display.width, 32), Color::new(0, 0, 0));\n if self.icon.data > 0 {\n self.display.image_alpha(Point::new(12, self.display.height as isize - 32), self.icon.data, self.icon.size);\n }else{\n self.display.text(Point::new(8, self.display.height as isize - 24), &String::from_str(\"Redox\"), Color::new(255, 255, 255));\n }\n\n let mut chars = 32;\n while chars > 4 && (chars*8 + 3*4) * self.windows.len() > self.display.width {\n chars -= 1;\n }\n\n for i in 0..self.windows_ordered.len() {\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => {\n let x = (5*8 + 2*8 + (chars*8 + 3*4) * i) as isize;\n let w = (chars*8 + 2*4) as usize;\n self.display.rect(Point::new(x, self.display.height as isize - 32), Size::new(w, 32), (**window_ptr).border_color);\n self.display.text(Point::new(x + 4, self.display.height as isize - 24), &(**window_ptr).title.substr(0, chars as usize), (**window_ptr).title_color);\n },\n Option::None => ()\n }\n }\n }\n\n let reenable = start_no_ints();\n\n self.display.flip();\n\n if self.cursor.data > 0 {\n self.display.image_alpha_onscreen(self.mouse_point, self.cursor.data, self.cursor.size);\n }else{\n self.display.char_onscreen(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9), 'X', Color::new(255, 255, 255));\n }\n\n self.redraw = REDRAW_NONE;\n\n end_no_ints(reenable);\n }\n }\n\n pub fn event(&mut self, event: Event){\n match event.to_option() {\n EventOption::Mouse(mouse_event) => self.on_mouse(mouse_event),\n EventOption::Key(key_event) => self.on_key(key_event),\n EventOption::Redraw(redraw_event) => self.redraw = max(self.redraw, redraw_event.redraw),\n EventOption::Open(open_event) => {\n let url_string = open_event.url_string;\n let url = URL::from_string(&url_string);\n\n if url_string.ends_with(\".md\".to_string()) || url_string.ends_with(\".rs\".to_string()) || url_string.ends_with(\".sh\".to_string()){\n self.item_main(box Editor::new(), url);\n }else if url_string.ends_with(\".bin\".to_string()){\n self.item_main(box Executor::new(), url);\n }else if url_string.ends_with(\".bmp\".to_string()){\n self.item_main(box Viewer::new(), url);\n }else{\n d(\"No program found: \");\n url.d();\n dl();\n }\n }\n _ => ()\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add benchmark<commit_after>\/\/ The MIT License (MIT)\n\n\/\/ Copyright (c) 2014 Y. T. CHUNG <zonyitoo@gmail.com>\n\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a copy of\n\/\/ this software and associated documentation files (the \"Software\"), to deal in\n\/\/ the Software without restriction, including without limitation the rights to\n\/\/ use, copy, modify, merge, publish, distribute, sublicense, and\/or sell copies of\n\/\/ the Software, and to permit persons to whom the Software is furnished to do so,\n\/\/ subject to the following conditions:\n\n\/\/ The above copyright notice and this permission notice shall be included in all\n\/\/ copies or substantial portions of the Software.\n\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\/\/ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\/\/ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\/\/ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\/\/ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\/\/ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n#![feature(phase)]\n\nextern crate memcached;\n#[phase(plugin, link)]\nextern crate log;\nextern crate time;\n\nuse std::rand::random;\n\nuse memcached::client::Client;\nuse memcached::proto::{Binary, Operation};\nuse memcached::proto::MemCachedError;\nuse memcached::proto::{KeyNotFound};\n\nconst SERVERS: &'static [(&'static str, uint)] = [\n (\"127.0.0.1:11211\", 1),\n \/\/ (\"127.0.0.1:11212\", 1),\n \/\/ (\"127.0.0.1:11213\", 1),\n];\n\nconst TESTS: &'static [(uint, uint, f64, uint)] = [\n (1, 10240, 0.2, 64),\n (1, 10240, 0.2, 512),\n (1, 10240, 0.2, 1024),\n (1, 10240, 0.2, 4096),\n (1, 10240, 0.2, 16384),\n\n (10, 8192, 0.2, 64),\n (10, 8192, 0.2, 512),\n (10, 8192, 0.2, 1024),\n (10, 8192, 0.2, 4096),\n (10, 8192, 0.2, 16384),\n\n (50, 4096, 0.2, 64),\n (50, 4096, 0.2, 512),\n (50, 4096, 0.2, 1024),\n (50, 4096, 0.2, 4096),\n (50, 4096, 0.2, 16384),\n\n (100, 2048, 0.2, 64),\n (100, 2048, 0.2, 512),\n (100, 2048, 0.2, 1024),\n (100, 2048, 0.2, 4096),\n (100, 2048, 0.2, 16384),\n\n (300, 1024, 0.2, 64),\n (300, 1024, 0.2, 512),\n (300, 1024, 0.2, 1024),\n (300, 1024, 0.2, 4096),\n (300, 1024, 0.2, 16384),\n];\n\nfn main() {\n println!(\"Concurrent,Repeat,WriteRate,ValueLength,Hit,Miss,Error,Time,TPS\");\n for &(concurrent, repeat, write_rate, value_size) in TESTS.iter() {\n let mut rxs = Vec::new();\n\n let begin_time = time::now().to_timespec();\n for offset in range(0, concurrent) {\n let (tx, rx) = channel();\n spawn(proc() {\n let mut client = Client::connect(SERVERS, Binary).unwrap_or_else(|err| {\n panic!(\"{} failed to connect server {}\", offset, err);\n });\n\n let (mut hit, mut miss, mut error) = (0u, 0u, 0u);\n for key in range(offset, offset + (write_rate * repeat as f64) as uint) {\n client.set(key.to_string().as_slice().as_bytes(), generate_data(value_size).as_slice(),\n 0xdeadbeef, 0).unwrap_or_else(|err| {\n panic!(\"Failed to set: {}\", err);\n })\n }\n for _ in range(0, ((1f64 - write_rate) \/ write_rate) as uint) {\n for key in range(offset, offset + (write_rate * repeat as f64) as uint) {\n match client.get(key.to_string().as_slice().as_bytes()) {\n Err(e) => {\n match e.kind {\n MemCachedError(err) => {\n if err == KeyNotFound {\n miss += 1;\n } else {\n error += 1;\n }\n },\n _ => {\n panic!(\"Failed to get: {}\", e);\n }\n }\n },\n Ok(..) => {\n hit += 1;\n }\n }\n }\n }\n\n tx.send((hit, miss, error));\n \/\/ drop(tx);\n drop(client);\n });\n rxs.push(rx);\n }\n\n let (mut tot_hit, mut tot_miss, mut tot_error) = (0u, 0u, 0u);\n for rx in rxs.into_iter() {\n let (h, m, e) = rx.recv();\n tot_hit += h;\n tot_miss += m;\n tot_error += e;\n }\n let end_time = time::now().to_timespec();\n\n let duration = end_time - begin_time;\n let tps = (((concurrent * repeat) * 1000) as f64) \/ (duration.num_milliseconds() as f64);\n println!(\"{},{},{},{},{},{},{},{},{}\",\n concurrent, repeat, write_rate, value_size,\n tot_hit, tot_miss, tot_error, duration.num_milliseconds(), tps);\n }\n}\n\nfn generate_data(len: uint) -> Vec<u8> {\n Vec::from_fn(len, |_| random())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse bar::Foo; \/\/~ ERROR There is no `Foo` in `bar` [E0432]\nmod bar {\n use Foo; \/\/~ ERROR There is no `Foo` in the crate root [E0432]\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Pick more appropriate int types for cryptoutil<commit_after><|endoftext|>"} {"text":"<commit_before>use {Handler, Evented, Poll, Token};\nuse os::event::{IoEvent, Interest, PollOpt};\nuse notify::Notify;\nuse timer::{Timer, Timeout, TimerResult};\nuse std::default::Default;\nuse std::time::duration::Duration;\nuse std::{io, fmt, usize};\n\n\/\/\/ Configure EventLoop runtime details\n#[derive(Copy, Clone, Debug)]\npub struct EventLoopConfig {\n pub io_poll_timeout_ms: usize,\n\n \/\/ == Notifications ==\n pub notify_capacity: usize,\n pub messages_per_tick: usize,\n\n \/\/ == Timer ==\n pub timer_tick_ms: u64,\n pub timer_wheel_size: usize,\n pub timer_capacity: usize,\n}\n\nimpl Default for EventLoopConfig {\n fn default() -> EventLoopConfig {\n EventLoopConfig {\n io_poll_timeout_ms: 1_000,\n notify_capacity: 1_024,\n messages_per_tick: 64,\n timer_tick_ms: 100,\n timer_wheel_size: 1_024,\n timer_capacity: 65_536,\n }\n }\n}\n\n\/\/\/ Single threaded IO event loop.\npub struct EventLoop<H: Handler> {\n run: bool,\n poll: Poll,\n timer: Timer<H::Timeout>,\n notify: Notify<H::Message>,\n config: EventLoopConfig,\n}\n\n\/\/ Token used to represent notifications\nconst NOTIFY: Token = Token(usize::MAX);\n\nimpl<H: Handler> EventLoop<H> {\n\n \/\/\/ Initializes a new event loop using default configuration settings. The\n \/\/\/ event loop will not be running yet.\n pub fn new() -> io::Result<EventLoop<H>> {\n EventLoop::configured(Default::default())\n }\n\n pub fn configured(config: EventLoopConfig) -> io::Result<EventLoop<H>> {\n \/\/ Create the IO poller\n let mut poll = try!(Poll::new());\n\n \/\/ Create the timer\n let mut timer = Timer::new(\n config.timer_tick_ms,\n config.timer_wheel_size,\n config.timer_capacity);\n\n \/\/ Create cross thread notification queue\n let notify = try!(Notify::with_capacity(config.notify_capacity));\n\n \/\/ Register the notification wakeup FD with the IO poller\n try!(poll.register(¬ify, NOTIFY, Interest::readable() | Interest::writable() , PollOpt::edge()));\n\n \/\/ Set the timer's starting time reference point\n timer.setup();\n\n Ok(EventLoop {\n run: true,\n poll: poll,\n timer: timer,\n notify: notify,\n config: config,\n })\n }\n\n \/\/\/ Returns a sender that allows sending messages to the event loop in a\n \/\/\/ thread-safe way, waking up the event loop if needed.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ #![allow(unstable)]\n \/\/\/\n \/\/\/ use std::thread::Thread;\n \/\/\/ use mio::{EventLoop, Handler};\n \/\/\/\n \/\/\/ struct MyHandler;\n \/\/\/\n \/\/\/ impl Handler for MyHandler {\n \/\/\/ type Timeout = ();\n \/\/\/ type Message = u32;\n \/\/\/\n \/\/\/ fn notify(&mut self, event_loop: &mut EventLoop<MyHandler>, msg: u32) {\n \/\/\/ assert_eq!(msg, 123);\n \/\/\/ event_loop.shutdown();\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut event_loop = EventLoop::new().unwrap();\n \/\/\/ let sender = event_loop.channel();\n \/\/\/\n \/\/\/ \/\/ Send the notification from another thread\n \/\/\/ Thread::spawn(move || {\n \/\/\/ let _ = sender.send(123);\n \/\/\/ });\n \/\/\/\n \/\/\/ let _ = event_loop.run(&mut MyHandler);\n \/\/\/ ```\n \/\/\/\n \/\/\/ # Implementation Details\n \/\/\/\n \/\/\/ Each [EventLoop](#) contains a lock-free queue with a pre-allocated\n \/\/\/ buffer size. The size can be changed by modifying\n \/\/\/ [EventLoopConfig.notify_capacity](struct.EventLoopConfig.html#structfield.notify_capacity).\n \/\/\/ When a message is sent to the EventLoop, it is first pushed on to the\n \/\/\/ queue. Then, if the EventLoop is currently running, an atomic flag is\n \/\/\/ set to indicate that the next loop iteration should be started without\n \/\/\/ waiting.\n \/\/\/\n \/\/\/ If the loop is blocked waiting for IO events, then it is woken up. The\n \/\/\/ strategy for waking up the event loop is platform dependent. For\n \/\/\/ example, on a modern Linux OS, eventfd is used. On older OSes, a pipe\n \/\/\/ is used.\n \/\/\/\n \/\/\/ The strategy of setting an atomic flag if the event loop is not already\n \/\/\/ sleeping allows avoiding an expensive wakeup operation if at all possible.\n pub fn channel(&self) -> EventLoopSender<H::Message> {\n EventLoopSender::new(self.notify.clone())\n }\n\n \/\/\/ Schedules a timeout after the requested time interval. When the\n \/\/\/ duration has been reached,\n \/\/\/ [Handler::timeout](trait.Handler.html#method.timeout) will be invoked\n \/\/\/ passing in the supplied token.\n \/\/\/\n \/\/\/ Returns a handle to the timeout that can be used to cancel the timeout\n \/\/\/ using [#clear_timeout](#method.clear_timeout).\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ #![allow(unstable)]\n \/\/\/\n \/\/\/ use mio::{EventLoop, Handler};\n \/\/\/ use std::time::Duration;\n \/\/\/\n \/\/\/ struct MyHandler;\n \/\/\/\n \/\/\/ impl Handler for MyHandler {\n \/\/\/ type Timeout = u32;\n \/\/\/ type Message = ();\n \/\/\/\n \/\/\/ fn timeout(&mut self, event_loop: &mut EventLoop<MyHandler>, timeout: u32) {\n \/\/\/ assert_eq!(timeout, 123);\n \/\/\/ event_loop.shutdown();\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ let mut event_loop = EventLoop::new().unwrap();\n \/\/\/ let timeout = event_loop.timeout(123, Duration::milliseconds(300)).unwrap();\n \/\/\/ let _ = event_loop.run(&mut MyHandler);\n \/\/\/ ```\n pub fn timeout(&mut self, token: H::Timeout, delay: Duration) -> TimerResult<Timeout> {\n self.timer.timeout(token, delay)\n }\n\n \/\/\/ If the supplied timeout has not been triggered, cancel it such that it\n \/\/\/ will not be triggered in the future.\n pub fn clear_timeout(&mut self, timeout: Timeout) -> bool {\n self.timer.clear(timeout)\n }\n\n \/\/\/ Tells the event loop to exit after it is done handling all events in the\n \/\/\/ current iteration.\n pub fn shutdown(&mut self) {\n self.run = false;\n }\n\n \/\/\/ Registers an IO handle with the event loop.\n pub fn register<E: Evented>(&mut self, io: &E, token: Token) -> io::Result<()> {\n self.poll.register(io, token, Interest::readable(), PollOpt::level())\n }\n\n \/\/\/ Registers an IO handle with the event loop.\n pub fn register_opt<E: Evented>(&mut self, io: &E, token: Token, interest: Interest, opt: PollOpt) -> io::Result<()> {\n self.poll.register(io, token, interest, opt)\n }\n\n \/\/\/ Re-Registers an IO handle with the event loop.\n pub fn reregister<E: Evented>(&mut self, io: &E, token: Token, interest: Interest, opt: PollOpt) -> io::Result<()> {\n self.poll.reregister(io, token, interest, opt)\n }\n\n \/\/\/ Keep spinning the event loop indefinitely, and notify the handler whenever\n \/\/\/ any of the registered handles are ready.\n pub fn run(&mut self, handler: &mut H) -> io::Result<()> {\n self.run = true;\n\n while self.run {\n \/\/ Execute ticks as long as the event loop is running\n try!(self.run_once(handler));\n }\n\n Ok(())\n }\n\n \/\/\/ Deregisters an IO handle with the event loop.\n pub fn deregister<E: Evented>(&mut self, io: &E) -> io::Result<()> {\n self.poll.deregister(io)\n }\n\n \/\/\/ Spin the event loop once, with a timeout of one second, and notify the\n \/\/\/ handler if any of the registered handles become ready during that\n \/\/\/ time.\n pub fn run_once(&mut self, handler: &mut H) -> io::Result<()> {\n let mut messages;\n let mut pending;\n\n debug!(\"event loop tick\");\n\n \/\/ Check the notify channel for any pending messages. If there are any,\n \/\/ avoid blocking when polling for IO events. Messages will be\n \/\/ processed after IO events.\n messages = self.notify.check(self.config.messages_per_tick, true);\n pending = messages > 0;\n\n \/\/ Check the registered IO handles for any new events. Each poll\n \/\/ is for one second, so a shutdown request can last as long as\n \/\/ one second before it takes effect.\n let events = match self.io_poll(pending) {\n Ok(e) => e,\n Err(err) => {\n if err.kind() == io::ErrorKind::Interrupted {\n handler.interrupted(self);\n 0\n } else {\n return Err(err);\n }\n }\n };\n\n if !pending {\n \/\/ Indicate that the sleep period is over, also grab any additional\n \/\/ messages\n let remaining = self.config.messages_per_tick - messages;\n messages += self.notify.check(remaining, false);\n }\n\n self.io_process(handler, events);\n self.notify(handler, messages);\n self.timer_process(handler);\n\n Ok(())\n }\n\n #[inline]\n fn io_poll(&mut self, immediate: bool) -> io::Result<usize> {\n if immediate {\n self.poll.poll(0)\n } else {\n let mut sleep = self.timer.next_tick_in_ms() as usize;\n\n if sleep > self.config.io_poll_timeout_ms {\n sleep = self.config.io_poll_timeout_ms;\n }\n\n self.poll.poll(sleep)\n }\n }\n\n \/\/ Process IO events that have been previously polled\n fn io_process(&mut self, handler: &mut H, cnt: usize) {\n let mut i = 0;\n\n \/\/ Iterate over the notifications. Each event provides the token\n \/\/ it was registered with (which usually represents, at least, the\n \/\/ handle that the event is about) as well as information about\n \/\/ what kind of event occurred (readable, writable, signal, etc.)\n while i < cnt {\n let evt = self.poll.event(i);\n\n debug!(\"event={:?}\", evt);\n\n match evt.token() {\n NOTIFY => self.notify.cleanup(),\n _ => self.io_event(handler, evt)\n }\n\n i += 1;\n }\n }\n\n fn io_event(&mut self, handler: &mut H, evt: IoEvent) {\n let tok = evt.token();\n\n if evt.is_readable() | evt.is_error() {\n handler.readable(self, tok, evt.read_hint());\n }\n\n if evt.is_writable() {\n handler.writable(self, tok);\n }\n }\n\n fn notify(&mut self, handler: &mut H, mut cnt: usize) {\n while cnt > 0 {\n let msg = self.notify.poll()\n .expect(\"[BUG] at this point there should always be a message\");\n\n handler.notify(self, msg);\n cnt -= 1;\n }\n }\n\n fn timer_process(&mut self, handler: &mut H) {\n let now = self.timer.now();\n\n loop {\n match self.timer.tick_to(now) {\n Some(t) => handler.timeout(self, t),\n _ => return\n }\n }\n }\n}\n\nunsafe impl<H: Handler> Sync for EventLoop<H> { }\n\n\/\/\/ Sends messages to the EventLoop from other threads.\npub struct EventLoopSender<M: Send> {\n notify: Notify<M>\n}\n\nimpl<M: Send> Clone for EventLoopSender<M> {\n fn clone(&self) -> EventLoopSender<M> {\n EventLoopSender { notify: self.notify.clone() }\n }\n}\n\nimpl<M: Send> fmt::Debug for EventLoopSender<M> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"EventLoopSender<?> {{ ... }}\")\n }\n}\n\nunsafe impl<M: Send> Sync for EventLoopSender<M> { }\n\nimpl<M: Send> EventLoopSender<M> {\n fn new(notify: Notify<M>) -> EventLoopSender<M> {\n EventLoopSender { notify: notify }\n }\n\n pub fn send(&self, msg: M) -> Result<(), M> {\n self.notify.notify(msg)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::str;\n use std::sync::Arc;\n use std::sync::atomic::AtomicIsize;\n use std::sync::atomic::Ordering::SeqCst;\n use super::EventLoop;\n use {io, buf, Buf, Handler, Token, TryRead, TryWrite};\n use os::event;\n\n #[test]\n pub fn test_event_loop_size() {\n use std::mem;\n assert!(512 >= mem::size_of::<EventLoop<Funtimes>>());\n }\n\n struct Funtimes {\n rcount: Arc<AtomicIsize>,\n wcount: Arc<AtomicIsize>\n }\n\n impl Funtimes {\n fn new(rcount: Arc<AtomicIsize>, wcount: Arc<AtomicIsize>) -> Funtimes {\n Funtimes {\n rcount: rcount,\n wcount: wcount\n }\n }\n }\n\n impl Handler for Funtimes {\n type Timeout = usize;\n type Message = ();\n\n fn readable(&mut self, _event_loop: &mut EventLoop<Funtimes>, token: Token, _hint: event::ReadHint) {\n (*self.rcount).fetch_add(1, SeqCst);\n assert_eq!(token, Token(10));\n }\n }\n\n #[test]\n pub fn test_readable() {\n let mut event_loop = EventLoop::new().ok().expect(\"Couldn't make event loop\");\n\n let (mut reader, mut writer) = io::pipe().unwrap();\n\n let rcount = Arc::new(AtomicIsize::new(0));\n let wcount = Arc::new(AtomicIsize::new(0));\n let mut handler = Funtimes::new(rcount.clone(), wcount.clone());\n\n writer.write(&mut buf::SliceBuf::wrap(\"hello\".as_bytes())).unwrap();\n event_loop.register(&reader, Token(10)).unwrap();\n\n let _ = event_loop.run_once(&mut handler);\n let mut b = buf::ByteBuf::mut_with_capacity(16);\n\n assert_eq!((*rcount).load(SeqCst), 1);\n\n reader.read(&mut b).unwrap();\n\n assert_eq!(str::from_utf8(b.flip().bytes()).unwrap(), \"hello\");\n }\n}\n<commit_msg>Remove some dead code from tests<commit_after>use {Handler, Evented, Poll, Token};\nuse os::event::{IoEvent, Interest, PollOpt};\nuse notify::Notify;\nuse timer::{Timer, Timeout, TimerResult};\nuse std::default::Default;\nuse std::time::duration::Duration;\nuse std::{io, fmt, usize};\n\n\/\/\/ Configure EventLoop runtime details\n#[derive(Copy, Clone, Debug)]\npub struct EventLoopConfig {\n pub io_poll_timeout_ms: usize,\n\n \/\/ == Notifications ==\n pub notify_capacity: usize,\n pub messages_per_tick: usize,\n\n \/\/ == Timer ==\n pub timer_tick_ms: u64,\n pub timer_wheel_size: usize,\n pub timer_capacity: usize,\n}\n\nimpl Default for EventLoopConfig {\n fn default() -> EventLoopConfig {\n EventLoopConfig {\n io_poll_timeout_ms: 1_000,\n notify_capacity: 1_024,\n messages_per_tick: 64,\n timer_tick_ms: 100,\n timer_wheel_size: 1_024,\n timer_capacity: 65_536,\n }\n }\n}\n\n\/\/\/ Single threaded IO event loop.\npub struct EventLoop<H: Handler> {\n run: bool,\n poll: Poll,\n timer: Timer<H::Timeout>,\n notify: Notify<H::Message>,\n config: EventLoopConfig,\n}\n\n\/\/ Token used to represent notifications\nconst NOTIFY: Token = Token(usize::MAX);\n\nimpl<H: Handler> EventLoop<H> {\n\n \/\/\/ Initializes a new event loop using default configuration settings. The\n \/\/\/ event loop will not be running yet.\n pub fn new() -> io::Result<EventLoop<H>> {\n EventLoop::configured(Default::default())\n }\n\n pub fn configured(config: EventLoopConfig) -> io::Result<EventLoop<H>> {\n \/\/ Create the IO poller\n let mut poll = try!(Poll::new());\n\n \/\/ Create the timer\n let mut timer = Timer::new(\n config.timer_tick_ms,\n config.timer_wheel_size,\n config.timer_capacity);\n\n \/\/ Create cross thread notification queue\n let notify = try!(Notify::with_capacity(config.notify_capacity));\n\n \/\/ Register the notification wakeup FD with the IO poller\n try!(poll.register(¬ify, NOTIFY, Interest::readable() | Interest::writable() , PollOpt::edge()));\n\n \/\/ Set the timer's starting time reference point\n timer.setup();\n\n Ok(EventLoop {\n run: true,\n poll: poll,\n timer: timer,\n notify: notify,\n config: config,\n })\n }\n\n \/\/\/ Returns a sender that allows sending messages to the event loop in a\n \/\/\/ thread-safe way, waking up the event loop if needed.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ #![allow(unstable)]\n \/\/\/\n \/\/\/ use std::thread::Thread;\n \/\/\/ use mio::{EventLoop, Handler};\n \/\/\/\n \/\/\/ struct MyHandler;\n \/\/\/\n \/\/\/ impl Handler for MyHandler {\n \/\/\/ type Timeout = ();\n \/\/\/ type Message = u32;\n \/\/\/\n \/\/\/ fn notify(&mut self, event_loop: &mut EventLoop<MyHandler>, msg: u32) {\n \/\/\/ assert_eq!(msg, 123);\n \/\/\/ event_loop.shutdown();\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut event_loop = EventLoop::new().unwrap();\n \/\/\/ let sender = event_loop.channel();\n \/\/\/\n \/\/\/ \/\/ Send the notification from another thread\n \/\/\/ Thread::spawn(move || {\n \/\/\/ let _ = sender.send(123);\n \/\/\/ });\n \/\/\/\n \/\/\/ let _ = event_loop.run(&mut MyHandler);\n \/\/\/ ```\n \/\/\/\n \/\/\/ # Implementation Details\n \/\/\/\n \/\/\/ Each [EventLoop](#) contains a lock-free queue with a pre-allocated\n \/\/\/ buffer size. The size can be changed by modifying\n \/\/\/ [EventLoopConfig.notify_capacity](struct.EventLoopConfig.html#structfield.notify_capacity).\n \/\/\/ When a message is sent to the EventLoop, it is first pushed on to the\n \/\/\/ queue. Then, if the EventLoop is currently running, an atomic flag is\n \/\/\/ set to indicate that the next loop iteration should be started without\n \/\/\/ waiting.\n \/\/\/\n \/\/\/ If the loop is blocked waiting for IO events, then it is woken up. The\n \/\/\/ strategy for waking up the event loop is platform dependent. For\n \/\/\/ example, on a modern Linux OS, eventfd is used. On older OSes, a pipe\n \/\/\/ is used.\n \/\/\/\n \/\/\/ The strategy of setting an atomic flag if the event loop is not already\n \/\/\/ sleeping allows avoiding an expensive wakeup operation if at all possible.\n pub fn channel(&self) -> EventLoopSender<H::Message> {\n EventLoopSender::new(self.notify.clone())\n }\n\n \/\/\/ Schedules a timeout after the requested time interval. When the\n \/\/\/ duration has been reached,\n \/\/\/ [Handler::timeout](trait.Handler.html#method.timeout) will be invoked\n \/\/\/ passing in the supplied token.\n \/\/\/\n \/\/\/ Returns a handle to the timeout that can be used to cancel the timeout\n \/\/\/ using [#clear_timeout](#method.clear_timeout).\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ #![allow(unstable)]\n \/\/\/\n \/\/\/ use mio::{EventLoop, Handler};\n \/\/\/ use std::time::Duration;\n \/\/\/\n \/\/\/ struct MyHandler;\n \/\/\/\n \/\/\/ impl Handler for MyHandler {\n \/\/\/ type Timeout = u32;\n \/\/\/ type Message = ();\n \/\/\/\n \/\/\/ fn timeout(&mut self, event_loop: &mut EventLoop<MyHandler>, timeout: u32) {\n \/\/\/ assert_eq!(timeout, 123);\n \/\/\/ event_loop.shutdown();\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/\n \/\/\/ let mut event_loop = EventLoop::new().unwrap();\n \/\/\/ let timeout = event_loop.timeout(123, Duration::milliseconds(300)).unwrap();\n \/\/\/ let _ = event_loop.run(&mut MyHandler);\n \/\/\/ ```\n pub fn timeout(&mut self, token: H::Timeout, delay: Duration) -> TimerResult<Timeout> {\n self.timer.timeout(token, delay)\n }\n\n \/\/\/ If the supplied timeout has not been triggered, cancel it such that it\n \/\/\/ will not be triggered in the future.\n pub fn clear_timeout(&mut self, timeout: Timeout) -> bool {\n self.timer.clear(timeout)\n }\n\n \/\/\/ Tells the event loop to exit after it is done handling all events in the\n \/\/\/ current iteration.\n pub fn shutdown(&mut self) {\n self.run = false;\n }\n\n \/\/\/ Registers an IO handle with the event loop.\n pub fn register<E: Evented>(&mut self, io: &E, token: Token) -> io::Result<()> {\n self.poll.register(io, token, Interest::readable(), PollOpt::level())\n }\n\n \/\/\/ Registers an IO handle with the event loop.\n pub fn register_opt<E: Evented>(&mut self, io: &E, token: Token, interest: Interest, opt: PollOpt) -> io::Result<()> {\n self.poll.register(io, token, interest, opt)\n }\n\n \/\/\/ Re-Registers an IO handle with the event loop.\n pub fn reregister<E: Evented>(&mut self, io: &E, token: Token, interest: Interest, opt: PollOpt) -> io::Result<()> {\n self.poll.reregister(io, token, interest, opt)\n }\n\n \/\/\/ Keep spinning the event loop indefinitely, and notify the handler whenever\n \/\/\/ any of the registered handles are ready.\n pub fn run(&mut self, handler: &mut H) -> io::Result<()> {\n self.run = true;\n\n while self.run {\n \/\/ Execute ticks as long as the event loop is running\n try!(self.run_once(handler));\n }\n\n Ok(())\n }\n\n \/\/\/ Deregisters an IO handle with the event loop.\n pub fn deregister<E: Evented>(&mut self, io: &E) -> io::Result<()> {\n self.poll.deregister(io)\n }\n\n \/\/\/ Spin the event loop once, with a timeout of one second, and notify the\n \/\/\/ handler if any of the registered handles become ready during that\n \/\/\/ time.\n pub fn run_once(&mut self, handler: &mut H) -> io::Result<()> {\n let mut messages;\n let mut pending;\n\n debug!(\"event loop tick\");\n\n \/\/ Check the notify channel for any pending messages. If there are any,\n \/\/ avoid blocking when polling for IO events. Messages will be\n \/\/ processed after IO events.\n messages = self.notify.check(self.config.messages_per_tick, true);\n pending = messages > 0;\n\n \/\/ Check the registered IO handles for any new events. Each poll\n \/\/ is for one second, so a shutdown request can last as long as\n \/\/ one second before it takes effect.\n let events = match self.io_poll(pending) {\n Ok(e) => e,\n Err(err) => {\n if err.kind() == io::ErrorKind::Interrupted {\n handler.interrupted(self);\n 0\n } else {\n return Err(err);\n }\n }\n };\n\n if !pending {\n \/\/ Indicate that the sleep period is over, also grab any additional\n \/\/ messages\n let remaining = self.config.messages_per_tick - messages;\n messages += self.notify.check(remaining, false);\n }\n\n self.io_process(handler, events);\n self.notify(handler, messages);\n self.timer_process(handler);\n\n Ok(())\n }\n\n #[inline]\n fn io_poll(&mut self, immediate: bool) -> io::Result<usize> {\n if immediate {\n self.poll.poll(0)\n } else {\n let mut sleep = self.timer.next_tick_in_ms() as usize;\n\n if sleep > self.config.io_poll_timeout_ms {\n sleep = self.config.io_poll_timeout_ms;\n }\n\n self.poll.poll(sleep)\n }\n }\n\n \/\/ Process IO events that have been previously polled\n fn io_process(&mut self, handler: &mut H, cnt: usize) {\n let mut i = 0;\n\n \/\/ Iterate over the notifications. Each event provides the token\n \/\/ it was registered with (which usually represents, at least, the\n \/\/ handle that the event is about) as well as information about\n \/\/ what kind of event occurred (readable, writable, signal, etc.)\n while i < cnt {\n let evt = self.poll.event(i);\n\n debug!(\"event={:?}\", evt);\n\n match evt.token() {\n NOTIFY => self.notify.cleanup(),\n _ => self.io_event(handler, evt)\n }\n\n i += 1;\n }\n }\n\n fn io_event(&mut self, handler: &mut H, evt: IoEvent) {\n let tok = evt.token();\n\n if evt.is_readable() | evt.is_error() {\n handler.readable(self, tok, evt.read_hint());\n }\n\n if evt.is_writable() {\n handler.writable(self, tok);\n }\n }\n\n fn notify(&mut self, handler: &mut H, mut cnt: usize) {\n while cnt > 0 {\n let msg = self.notify.poll()\n .expect(\"[BUG] at this point there should always be a message\");\n\n handler.notify(self, msg);\n cnt -= 1;\n }\n }\n\n fn timer_process(&mut self, handler: &mut H) {\n let now = self.timer.now();\n\n loop {\n match self.timer.tick_to(now) {\n Some(t) => handler.timeout(self, t),\n _ => return\n }\n }\n }\n}\n\nunsafe impl<H: Handler> Sync for EventLoop<H> { }\n\n\/\/\/ Sends messages to the EventLoop from other threads.\npub struct EventLoopSender<M: Send> {\n notify: Notify<M>\n}\n\nimpl<M: Send> Clone for EventLoopSender<M> {\n fn clone(&self) -> EventLoopSender<M> {\n EventLoopSender { notify: self.notify.clone() }\n }\n}\n\nimpl<M: Send> fmt::Debug for EventLoopSender<M> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"EventLoopSender<?> {{ ... }}\")\n }\n}\n\nunsafe impl<M: Send> Sync for EventLoopSender<M> { }\n\nimpl<M: Send> EventLoopSender<M> {\n fn new(notify: Notify<M>) -> EventLoopSender<M> {\n EventLoopSender { notify: notify }\n }\n\n pub fn send(&self, msg: M) -> Result<(), M> {\n self.notify.notify(msg)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::str;\n use std::sync::Arc;\n use std::sync::atomic::AtomicIsize;\n use std::sync::atomic::Ordering::SeqCst;\n use super::EventLoop;\n use {io, buf, Buf, Handler, Token, TryRead, TryWrite, ReadHint};\n\n #[test]\n pub fn test_event_loop_size() {\n use std::mem;\n assert!(512 >= mem::size_of::<EventLoop<Funtimes>>());\n }\n\n struct Funtimes {\n rcount: Arc<AtomicIsize>,\n wcount: Arc<AtomicIsize>\n }\n\n impl Funtimes {\n fn new(rcount: Arc<AtomicIsize>, wcount: Arc<AtomicIsize>) -> Funtimes {\n Funtimes {\n rcount: rcount,\n wcount: wcount\n }\n }\n }\n\n impl Handler for Funtimes {\n type Timeout = usize;\n type Message = ();\n\n fn readable(&mut self, _event_loop: &mut EventLoop<Funtimes>, token: Token, _: ReadHint) {\n (*self.rcount).fetch_add(1, SeqCst);\n assert_eq!(token, Token(10));\n }\n\n fn writable(&mut self, _event_loop: &mut EventLoop<Funtimes>, token: Token) {\n (*self.wcount).fetch_add(1, SeqCst);\n assert_eq!(token, Token(10));\n }\n }\n\n #[test]\n pub fn test_readable() {\n let mut event_loop = EventLoop::new().ok().expect(\"Couldn't make event loop\");\n\n let (mut reader, mut writer) = io::pipe().unwrap();\n\n let rcount = Arc::new(AtomicIsize::new(0));\n let wcount = Arc::new(AtomicIsize::new(0));\n let mut handler = Funtimes::new(rcount.clone(), wcount.clone());\n\n writer.write(&mut buf::SliceBuf::wrap(\"hello\".as_bytes())).unwrap();\n event_loop.register(&reader, Token(10)).unwrap();\n\n let _ = event_loop.run_once(&mut handler);\n let mut b = buf::ByteBuf::mut_with_capacity(16);\n\n assert_eq!((*rcount).load(SeqCst), 1);\n\n reader.read(&mut b).unwrap();\n\n assert_eq!(str::from_utf8(b.flip().bytes()).unwrap(), \"hello\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A private parser implementation of IPv4, IPv6, and socket addresses.\n\/\/!\n\/\/! This module is \"publicly exported\" through the `FromStr` implementations\n\/\/! below.\n\nuse prelude::v1::*;\n\nuse str::FromStr;\nuse net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6};\n\nstruct Parser<'a> {\n \/\/ parsing as ASCII, so can use byte array\n s: &'a [u8],\n pos: usize,\n}\n\nimpl<'a> Parser<'a> {\n fn new(s: &'a str) -> Parser<'a> {\n Parser {\n s: s.as_bytes(),\n pos: 0,\n }\n }\n\n fn is_eof(&self) -> bool {\n self.pos == self.s.len()\n }\n\n \/\/ Commit only if parser returns Some\n fn read_atomically<T, F>(&mut self, cb: F) -> Option<T> where\n F: FnOnce(&mut Parser) -> Option<T>,\n {\n let pos = self.pos;\n let r = cb(self);\n if r.is_none() {\n self.pos = pos;\n }\n r\n }\n\n \/\/ Commit only if parser read till EOF\n fn read_till_eof<T, F>(&mut self, cb: F) -> Option<T> where\n F: FnOnce(&mut Parser) -> Option<T>,\n {\n self.read_atomically(move |p| {\n match cb(p) {\n Some(x) => if p.is_eof() {Some(x)} else {None},\n None => None,\n }\n })\n }\n\n \/\/ Return result of first successful parser\n fn read_or<T>(&mut self, parsers: &mut [Box<FnMut(&mut Parser) -> Option<T> + 'static>])\n -> Option<T> {\n for pf in parsers {\n match self.read_atomically(|p: &mut Parser| pf(p)) {\n Some(r) => return Some(r),\n None => {}\n }\n }\n None\n }\n\n \/\/ Apply 3 parsers sequentially\n fn read_seq_3<A, B, C, PA, PB, PC>(&mut self,\n pa: PA,\n pb: PB,\n pc: PC)\n -> Option<(A, B, C)> where\n PA: FnOnce(&mut Parser) -> Option<A>,\n PB: FnOnce(&mut Parser) -> Option<B>,\n PC: FnOnce(&mut Parser) -> Option<C>,\n {\n self.read_atomically(move |p| {\n let a = pa(p);\n let b = if a.is_some() { pb(p) } else { None };\n let c = if b.is_some() { pc(p) } else { None };\n match (a, b, c) {\n (Some(a), Some(b), Some(c)) => Some((a, b, c)),\n _ => None\n }\n })\n }\n\n \/\/ Read next char\n fn read_char(&mut self) -> Option<char> {\n if self.is_eof() {\n None\n } else {\n let r = self.s[self.pos] as char;\n self.pos += 1;\n Some(r)\n }\n }\n\n \/\/ Return char and advance iff next char is equal to requested\n fn read_given_char(&mut self, c: char) -> Option<char> {\n self.read_atomically(|p| {\n match p.read_char() {\n Some(next) if next == c => Some(next),\n _ => None,\n }\n })\n }\n\n \/\/ Read digit\n fn read_digit(&mut self, radix: u8) -> Option<u8> {\n fn parse_digit(c: char, radix: u8) -> Option<u8> {\n let c = c as u8;\n \/\/ assuming radix is either 10 or 16\n if c >= b'0' && c <= b'9' {\n Some(c - b'0')\n } else if radix > 10 && c >= b'a' && c < b'a' + (radix - 10) {\n Some(c - b'a' + 10)\n } else if radix > 10 && c >= b'A' && c < b'A' + (radix - 10) {\n Some(c - b'A' + 10)\n } else {\n None\n }\n }\n\n self.read_atomically(|p| {\n p.read_char().and_then(|c| parse_digit(c, radix))\n })\n }\n\n fn read_number_impl(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> {\n let mut r = 0;\n let mut digit_count = 0;\n loop {\n match self.read_digit(radix) {\n Some(d) => {\n r = r * (radix as u32) + (d as u32);\n digit_count += 1;\n if digit_count > max_digits || r >= upto {\n return None\n }\n }\n None => {\n if digit_count == 0 {\n return None\n } else {\n return Some(r)\n }\n }\n };\n }\n }\n\n \/\/ Read number, failing if max_digits of number value exceeded\n fn read_number(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> {\n self.read_atomically(|p| p.read_number_impl(radix, max_digits, upto))\n }\n\n fn read_ipv4_addr_impl(&mut self) -> Option<Ipv4Addr> {\n let mut bs = [0; 4];\n let mut i = 0;\n while i < 4 {\n if i != 0 && self.read_given_char('.').is_none() {\n return None;\n }\n\n let octet = self.read_number(10, 3, 0x100).map(|n| n as u8);\n match octet {\n Some(d) => bs[i] = d,\n None => return None,\n };\n i += 1;\n }\n Some(Ipv4Addr::new(bs[0], bs[1], bs[2], bs[3]))\n }\n\n \/\/ Read IPv4 address\n fn read_ipv4_addr(&mut self) -> Option<Ipv4Addr> {\n self.read_atomically(|p| p.read_ipv4_addr_impl())\n }\n\n fn read_ipv6_addr_impl(&mut self) -> Option<Ipv6Addr> {\n fn ipv6_addr_from_head_tail(head: &[u16], tail: &[u16]) -> Ipv6Addr {\n assert!(head.len() + tail.len() <= 8);\n let mut gs = [0; 8];\n gs.clone_from_slice(head);\n gs[(8 - tail.len()) .. 8].clone_from_slice(tail);\n Ipv6Addr::new(gs[0], gs[1], gs[2], gs[3], gs[4], gs[5], gs[6], gs[7])\n }\n\n fn read_groups(p: &mut Parser, groups: &mut [u16; 8], limit: usize)\n -> (usize, bool) {\n let mut i = 0;\n while i < limit {\n if i < limit - 1 {\n let ipv4 = p.read_atomically(|p| {\n if i == 0 || p.read_given_char(':').is_some() {\n p.read_ipv4_addr()\n } else {\n None\n }\n });\n if let Some(v4_addr) = ipv4 {\n let octets = v4_addr.octets();\n groups[i + 0] = ((octets[0] as u16) << 8) | (octets[1] as u16);\n groups[i + 1] = ((octets[2] as u16) << 8) | (octets[3] as u16);\n return (i + 2, true);\n }\n }\n\n let group = p.read_atomically(|p| {\n if i == 0 || p.read_given_char(':').is_some() {\n p.read_number(16, 4, 0x10000).map(|n| n as u16)\n } else {\n None\n }\n });\n match group {\n Some(g) => groups[i] = g,\n None => return (i, false)\n }\n i += 1;\n }\n (i, false)\n }\n\n let mut head = [0; 8];\n let (head_size, head_ipv4) = read_groups(self, &mut head, 8);\n\n if head_size == 8 {\n return Some(Ipv6Addr::new(\n head[0], head[1], head[2], head[3],\n head[4], head[5], head[6], head[7]))\n }\n\n \/\/ IPv4 part is not allowed before `::`\n if head_ipv4 {\n return None\n }\n\n \/\/ read `::` if previous code parsed less than 8 groups\n if !self.read_given_char(':').is_some() || !self.read_given_char(':').is_some() {\n return None;\n }\n\n let mut tail = [0; 8];\n let (tail_size, _) = read_groups(self, &mut tail, 8 - head_size);\n Some(ipv6_addr_from_head_tail(&head[..head_size], &tail[..tail_size]))\n }\n\n fn read_ipv6_addr(&mut self) -> Option<Ipv6Addr> {\n self.read_atomically(|p| p.read_ipv6_addr_impl())\n }\n\n fn read_ip_addr(&mut self) -> Option<IpAddr> {\n let ipv4_addr = |p: &mut Parser| p.read_ipv4_addr().map(|v4| IpAddr::V4(v4));\n let ipv6_addr = |p: &mut Parser| p.read_ipv6_addr().map(|v6| IpAddr::V6(v6));\n self.read_or(&mut [Box::new(ipv4_addr), Box::new(ipv6_addr)])\n }\n\n fn read_socket_addr(&mut self) -> Option<SocketAddr> {\n let ip_addr = |p: &mut Parser| {\n let ipv4_p = |p: &mut Parser| p.read_ip_addr();\n let ipv6_p = |p: &mut Parser| {\n let open_br = |p: &mut Parser| p.read_given_char('[');\n let ip_addr = |p: &mut Parser| p.read_ipv6_addr();\n let clos_br = |p: &mut Parser| p.read_given_char(']');\n p.read_seq_3::<char, Ipv6Addr, char, _, _, _>(open_br, ip_addr, clos_br)\n .map(|t| match t { (_, ip, _) => IpAddr::V6(ip) })\n };\n p.read_or(&mut [Box::new(ipv4_p), Box::new(ipv6_p)])\n };\n let colon = |p: &mut Parser| p.read_given_char(':');\n let port = |p: &mut Parser| p.read_number(10, 5, 0x10000).map(|n| n as u16);\n\n \/\/ host, colon, port\n self.read_seq_3(ip_addr, colon, port).map(|t| {\n let (ip, _, port): (IpAddr, char, u16) = t;\n match ip {\n IpAddr::V4(ip) => SocketAddr::V4(SocketAddrV4::new(ip, port)),\n IpAddr::V6(ip) => SocketAddr::V6(SocketAddrV6::new(ip, port, 0, 0)),\n }\n })\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl FromStr for IpAddr {\n type Err = AddrParseError;\n fn from_str(s: &str) -> Result<IpAddr, AddrParseError> {\n match Parser::new(s).read_till_eof(|p| p.read_ip_addr()) {\n Some(s) => Ok(s),\n None => Err(AddrParseError(()))\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl FromStr for Ipv4Addr {\n type Err = AddrParseError;\n fn from_str(s: &str) -> Result<Ipv4Addr, AddrParseError> {\n match Parser::new(s).read_till_eof(|p| p.read_ipv4_addr()) {\n Some(s) => Ok(s),\n None => Err(AddrParseError(()))\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl FromStr for Ipv6Addr {\n type Err = AddrParseError;\n fn from_str(s: &str) -> Result<Ipv6Addr, AddrParseError> {\n match Parser::new(s).read_till_eof(|p| p.read_ipv6_addr()) {\n Some(s) => Ok(s),\n None => Err(AddrParseError(()))\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl FromStr for SocketAddr {\n type Err = AddrParseError;\n fn from_str(s: &str) -> Result<SocketAddr, AddrParseError> {\n match Parser::new(s).read_till_eof(|p| p.read_socket_addr()) {\n Some(s) => Ok(s),\n None => Err(AddrParseError(())),\n }\n }\n}\n\n\/\/\/ An error returned when parsing an IP address or a socket address.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[derive(Debug, Clone, PartialEq)]\npub struct AddrParseError(());\n<commit_msg>Auto merge of #27976 - sfackler:addrparseerror-error, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A private parser implementation of IPv4, IPv6, and socket addresses.\n\/\/!\n\/\/! This module is \"publicly exported\" through the `FromStr` implementations\n\/\/! below.\n\nuse prelude::v1::*;\n\nuse error::Error;\nuse fmt;\nuse net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6};\nuse str::FromStr;\n\nstruct Parser<'a> {\n \/\/ parsing as ASCII, so can use byte array\n s: &'a [u8],\n pos: usize,\n}\n\nimpl<'a> Parser<'a> {\n fn new(s: &'a str) -> Parser<'a> {\n Parser {\n s: s.as_bytes(),\n pos: 0,\n }\n }\n\n fn is_eof(&self) -> bool {\n self.pos == self.s.len()\n }\n\n \/\/ Commit only if parser returns Some\n fn read_atomically<T, F>(&mut self, cb: F) -> Option<T> where\n F: FnOnce(&mut Parser) -> Option<T>,\n {\n let pos = self.pos;\n let r = cb(self);\n if r.is_none() {\n self.pos = pos;\n }\n r\n }\n\n \/\/ Commit only if parser read till EOF\n fn read_till_eof<T, F>(&mut self, cb: F) -> Option<T> where\n F: FnOnce(&mut Parser) -> Option<T>,\n {\n self.read_atomically(move |p| {\n match cb(p) {\n Some(x) => if p.is_eof() {Some(x)} else {None},\n None => None,\n }\n })\n }\n\n \/\/ Return result of first successful parser\n fn read_or<T>(&mut self, parsers: &mut [Box<FnMut(&mut Parser) -> Option<T> + 'static>])\n -> Option<T> {\n for pf in parsers {\n match self.read_atomically(|p: &mut Parser| pf(p)) {\n Some(r) => return Some(r),\n None => {}\n }\n }\n None\n }\n\n \/\/ Apply 3 parsers sequentially\n fn read_seq_3<A, B, C, PA, PB, PC>(&mut self,\n pa: PA,\n pb: PB,\n pc: PC)\n -> Option<(A, B, C)> where\n PA: FnOnce(&mut Parser) -> Option<A>,\n PB: FnOnce(&mut Parser) -> Option<B>,\n PC: FnOnce(&mut Parser) -> Option<C>,\n {\n self.read_atomically(move |p| {\n let a = pa(p);\n let b = if a.is_some() { pb(p) } else { None };\n let c = if b.is_some() { pc(p) } else { None };\n match (a, b, c) {\n (Some(a), Some(b), Some(c)) => Some((a, b, c)),\n _ => None\n }\n })\n }\n\n \/\/ Read next char\n fn read_char(&mut self) -> Option<char> {\n if self.is_eof() {\n None\n } else {\n let r = self.s[self.pos] as char;\n self.pos += 1;\n Some(r)\n }\n }\n\n \/\/ Return char and advance iff next char is equal to requested\n fn read_given_char(&mut self, c: char) -> Option<char> {\n self.read_atomically(|p| {\n match p.read_char() {\n Some(next) if next == c => Some(next),\n _ => None,\n }\n })\n }\n\n \/\/ Read digit\n fn read_digit(&mut self, radix: u8) -> Option<u8> {\n fn parse_digit(c: char, radix: u8) -> Option<u8> {\n let c = c as u8;\n \/\/ assuming radix is either 10 or 16\n if c >= b'0' && c <= b'9' {\n Some(c - b'0')\n } else if radix > 10 && c >= b'a' && c < b'a' + (radix - 10) {\n Some(c - b'a' + 10)\n } else if radix > 10 && c >= b'A' && c < b'A' + (radix - 10) {\n Some(c - b'A' + 10)\n } else {\n None\n }\n }\n\n self.read_atomically(|p| {\n p.read_char().and_then(|c| parse_digit(c, radix))\n })\n }\n\n fn read_number_impl(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> {\n let mut r = 0;\n let mut digit_count = 0;\n loop {\n match self.read_digit(radix) {\n Some(d) => {\n r = r * (radix as u32) + (d as u32);\n digit_count += 1;\n if digit_count > max_digits || r >= upto {\n return None\n }\n }\n None => {\n if digit_count == 0 {\n return None\n } else {\n return Some(r)\n }\n }\n };\n }\n }\n\n \/\/ Read number, failing if max_digits of number value exceeded\n fn read_number(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> {\n self.read_atomically(|p| p.read_number_impl(radix, max_digits, upto))\n }\n\n fn read_ipv4_addr_impl(&mut self) -> Option<Ipv4Addr> {\n let mut bs = [0; 4];\n let mut i = 0;\n while i < 4 {\n if i != 0 && self.read_given_char('.').is_none() {\n return None;\n }\n\n let octet = self.read_number(10, 3, 0x100).map(|n| n as u8);\n match octet {\n Some(d) => bs[i] = d,\n None => return None,\n };\n i += 1;\n }\n Some(Ipv4Addr::new(bs[0], bs[1], bs[2], bs[3]))\n }\n\n \/\/ Read IPv4 address\n fn read_ipv4_addr(&mut self) -> Option<Ipv4Addr> {\n self.read_atomically(|p| p.read_ipv4_addr_impl())\n }\n\n fn read_ipv6_addr_impl(&mut self) -> Option<Ipv6Addr> {\n fn ipv6_addr_from_head_tail(head: &[u16], tail: &[u16]) -> Ipv6Addr {\n assert!(head.len() + tail.len() <= 8);\n let mut gs = [0; 8];\n gs.clone_from_slice(head);\n gs[(8 - tail.len()) .. 8].clone_from_slice(tail);\n Ipv6Addr::new(gs[0], gs[1], gs[2], gs[3], gs[4], gs[5], gs[6], gs[7])\n }\n\n fn read_groups(p: &mut Parser, groups: &mut [u16; 8], limit: usize)\n -> (usize, bool) {\n let mut i = 0;\n while i < limit {\n if i < limit - 1 {\n let ipv4 = p.read_atomically(|p| {\n if i == 0 || p.read_given_char(':').is_some() {\n p.read_ipv4_addr()\n } else {\n None\n }\n });\n if let Some(v4_addr) = ipv4 {\n let octets = v4_addr.octets();\n groups[i + 0] = ((octets[0] as u16) << 8) | (octets[1] as u16);\n groups[i + 1] = ((octets[2] as u16) << 8) | (octets[3] as u16);\n return (i + 2, true);\n }\n }\n\n let group = p.read_atomically(|p| {\n if i == 0 || p.read_given_char(':').is_some() {\n p.read_number(16, 4, 0x10000).map(|n| n as u16)\n } else {\n None\n }\n });\n match group {\n Some(g) => groups[i] = g,\n None => return (i, false)\n }\n i += 1;\n }\n (i, false)\n }\n\n let mut head = [0; 8];\n let (head_size, head_ipv4) = read_groups(self, &mut head, 8);\n\n if head_size == 8 {\n return Some(Ipv6Addr::new(\n head[0], head[1], head[2], head[3],\n head[4], head[5], head[6], head[7]))\n }\n\n \/\/ IPv4 part is not allowed before `::`\n if head_ipv4 {\n return None\n }\n\n \/\/ read `::` if previous code parsed less than 8 groups\n if !self.read_given_char(':').is_some() || !self.read_given_char(':').is_some() {\n return None;\n }\n\n let mut tail = [0; 8];\n let (tail_size, _) = read_groups(self, &mut tail, 8 - head_size);\n Some(ipv6_addr_from_head_tail(&head[..head_size], &tail[..tail_size]))\n }\n\n fn read_ipv6_addr(&mut self) -> Option<Ipv6Addr> {\n self.read_atomically(|p| p.read_ipv6_addr_impl())\n }\n\n fn read_ip_addr(&mut self) -> Option<IpAddr> {\n let ipv4_addr = |p: &mut Parser| p.read_ipv4_addr().map(|v4| IpAddr::V4(v4));\n let ipv6_addr = |p: &mut Parser| p.read_ipv6_addr().map(|v6| IpAddr::V6(v6));\n self.read_or(&mut [Box::new(ipv4_addr), Box::new(ipv6_addr)])\n }\n\n fn read_socket_addr(&mut self) -> Option<SocketAddr> {\n let ip_addr = |p: &mut Parser| {\n let ipv4_p = |p: &mut Parser| p.read_ip_addr();\n let ipv6_p = |p: &mut Parser| {\n let open_br = |p: &mut Parser| p.read_given_char('[');\n let ip_addr = |p: &mut Parser| p.read_ipv6_addr();\n let clos_br = |p: &mut Parser| p.read_given_char(']');\n p.read_seq_3::<char, Ipv6Addr, char, _, _, _>(open_br, ip_addr, clos_br)\n .map(|t| match t { (_, ip, _) => IpAddr::V6(ip) })\n };\n p.read_or(&mut [Box::new(ipv4_p), Box::new(ipv6_p)])\n };\n let colon = |p: &mut Parser| p.read_given_char(':');\n let port = |p: &mut Parser| p.read_number(10, 5, 0x10000).map(|n| n as u16);\n\n \/\/ host, colon, port\n self.read_seq_3(ip_addr, colon, port).map(|t| {\n let (ip, _, port): (IpAddr, char, u16) = t;\n match ip {\n IpAddr::V4(ip) => SocketAddr::V4(SocketAddrV4::new(ip, port)),\n IpAddr::V6(ip) => SocketAddr::V6(SocketAddrV6::new(ip, port, 0, 0)),\n }\n })\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl FromStr for IpAddr {\n type Err = AddrParseError;\n fn from_str(s: &str) -> Result<IpAddr, AddrParseError> {\n match Parser::new(s).read_till_eof(|p| p.read_ip_addr()) {\n Some(s) => Ok(s),\n None => Err(AddrParseError(()))\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl FromStr for Ipv4Addr {\n type Err = AddrParseError;\n fn from_str(s: &str) -> Result<Ipv4Addr, AddrParseError> {\n match Parser::new(s).read_till_eof(|p| p.read_ipv4_addr()) {\n Some(s) => Ok(s),\n None => Err(AddrParseError(()))\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl FromStr for Ipv6Addr {\n type Err = AddrParseError;\n fn from_str(s: &str) -> Result<Ipv6Addr, AddrParseError> {\n match Parser::new(s).read_till_eof(|p| p.read_ipv6_addr()) {\n Some(s) => Ok(s),\n None => Err(AddrParseError(()))\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl FromStr for SocketAddr {\n type Err = AddrParseError;\n fn from_str(s: &str) -> Result<SocketAddr, AddrParseError> {\n match Parser::new(s).read_till_eof(|p| p.read_socket_addr()) {\n Some(s) => Ok(s),\n None => Err(AddrParseError(())),\n }\n }\n}\n\n\/\/\/ An error returned when parsing an IP address or a socket address.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[derive(Debug, Clone, PartialEq)]\npub struct AddrParseError(());\n\n#[stable(feature = \"addr_parse_error_error\", since = \"1.4.0\")]\nimpl fmt::Display for AddrParseError {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt.write_str(self.description())\n }\n}\n\n#[stable(feature = \"addr_parse_error_error\", since = \"1.4.0\")]\nimpl Error for AddrParseError {\n fn description(&self) -> &str {\n \"invalid IP address syntax\"\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>+ benches<commit_after>#![feature(test)]\n\nextern crate test;\nextern crate serde;\n#[macro_use]\nextern crate serde_derive;\nextern crate serde_bencode;\n\nuse test::Bencher;\nuse serde::Serialize;\nuse serde_bencode::ser::Serializer;\nuse serde_bencode::de::from_bytes;\n\n\n#[bench]\nfn ser_de_simple(b: &mut Bencher) {\n #[derive(Serialize, Deserialize)]\n struct Fake {\n a: i64,\n b: i64,\n }\n\n b.iter(|| {\n let a = Fake {a: 2, b: 7};\n let mut ser = Serializer::new();\n a.serialize(&mut ser).unwrap();\n let a_bytes: Vec<u8> = ser.into();\n let b: Fake = from_bytes(a_bytes.as_ref()).unwrap();\n b\n });\n}\n\n#[bench]\nfn ser_de_nested(b: &mut Bencher) {\n #[derive(Serialize, Deserialize)]\n struct FakeA {\n a: i64,\n b: i64,\n }\n\n #[derive(Serialize, Deserialize)]\n struct FakeB {\n a: i64,\n b: FakeA,\n }\n\n b.iter(|| {\n let a = FakeB {a: 2, b: FakeA {a: 7, b: 9}};\n let mut ser = Serializer::new();\n a.serialize(&mut ser).unwrap();\n let a_bytes: Vec<u8> = ser.into();\n let b: FakeB = from_bytes(a_bytes.as_ref()).unwrap();\n b\n });\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate clap;\nextern crate rusqlite;\n#[macro_use]\nextern crate log;\nextern crate chrono;\nextern crate fern;\nextern crate rustc_serialize;\nextern crate serde_yaml;\nextern crate uuid;\n#[cfg(test)]\n#[macro_use]\nextern crate proptest;\n\nmod actions_process;\nmod actions_process_list_provides;\nmod autoconf;\nmod cfg;\nmod clap_actions;\nmod clap_fern;\nmod cli_clap;\nmod db;\nmod db_enviroment;\nmod db_fs_dir;\nuse db::fs_dir_type;\nmod db_fs_file;\nuse db::db_job;\nmod db_job_depend;\nmod db_job_provide;\nmod db_job_require_variable;\nmod db_job_require_variable_pair;\nuse db::db_provider;\nuse db::db_session;\nmod db_variable_name;\nmod db_variable_pair;\nmod elephant;\nmod jobs_load;\nmod json_loader_elephant;\nmod loader;\nuse uuid::Uuid;\n\nfn main() {\n let mut runtime_cfg = cfg::Config::new().unwrap();\n let pointless_value = 10; \/\/ pointless variable due to not knowing about lifetimes\n let clap_matches = cli_clap::cli_clap(&pointless_value);\n clap_fern::log_setup(&clap_matches);\n clap_actions::cfg_actions_update_clap(&mut runtime_cfg, &clap_matches);\n let session_uuid = Uuid::new_v4();\n let session_uuid_string = session_uuid.hyphenated().to_string();\n trace!(\"session_uuid_string:{}\", session_uuid_string);\n let conn = actions_process::cfg_process_action_db_connect(&mut runtime_cfg);\n db::create_tables(&conn);\n loader::deligate(&conn, &mut runtime_cfg);\n let pk_session = elephant::elephant_session(&conn, &session_uuid_string);\n loader::enviroment(&conn, &mut runtime_cfg, pk_session);\n jobs_load::load(&conn);\n actions_process::process(&conn, &runtime_cfg)\n}\n<commit_msg>Use lifetime functions<commit_after>extern crate clap;\nextern crate rusqlite;\n#[macro_use]\nextern crate log;\nextern crate chrono;\nextern crate fern;\nextern crate rustc_serialize;\nextern crate serde_yaml;\nextern crate uuid;\n#[cfg(test)]\n#[macro_use]\nextern crate proptest;\n\nmod actions_process;\nmod actions_process_list_provides;\nmod autoconf;\nmod cfg;\nmod clap_actions;\nmod clap_fern;\nmod cli_clap;\nmod db;\nmod db_enviroment;\nmod db_fs_dir;\nuse db::fs_dir_type;\nmod db_fs_file;\nuse db::db_job;\nmod db_job_depend;\nmod db_job_provide;\nmod db_job_require_variable;\nmod db_job_require_variable_pair;\nuse db::db_provider;\nuse db::db_session;\nmod db_variable_name;\nmod db_variable_pair;\nmod elephant;\nmod jobs_load;\nmod json_loader_elephant;\nmod loader;\nuse uuid::Uuid;\n\nfn main() {\n let mut runtime_cfg = cfg::Config::new().unwrap();\n let clap_matches = cli_clap::cli_clap();\n clap_fern::log_setup(&clap_matches);\n clap_actions::cfg_actions_update_clap(&mut runtime_cfg, &clap_matches);\n let session_uuid = Uuid::new_v4();\n let session_uuid_string = session_uuid.hyphenated().to_string();\n trace!(\"session_uuid_string:{}\", session_uuid_string);\n let conn = actions_process::cfg_process_action_db_connect(&mut runtime_cfg);\n db::create_tables(&conn);\n loader::deligate(&conn, &mut runtime_cfg);\n let pk_session = elephant::elephant_session(&conn, &session_uuid_string);\n loader::enviroment(&conn, &mut runtime_cfg, pk_session);\n jobs_load::load(&conn);\n actions_process::process(&conn, &runtime_cfg)\n}\n<|endoftext|>"} {"text":"<commit_before>\/*\n * Cymbalum, Molecular Simulation in Rust\n * Copyright (C) 2015 Guillaume Fraux\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/\n*\/\n\nuse std::ops::{Add, Sub, Mul, Div};\nuse std::cmp::PartialEq;\nuse super::matrix::Matrix3;\n\n\/\/\/ 3 dimensional vector type, implementing all usual operations\n#[derive(Copy, Clone, Debug)]\npub struct Vector3D {\n \/\/\/ First component of the vector\n pub x: f64,\n \/\/\/ Second component of the vector\n pub y: f64,\n \/\/\/ Third component of the vector\n pub z: f64,\n}\n\nimpl Vector3D {\n \/\/\/ Create a new Vector3D with components `x`, `y`, `z`\n pub fn new(x: f64, y: f64, z: f64) -> Vector3D {\n Vector3D{x: x, y: y, z: z}\n }\n \/\/\/ Return the squared euclidean norm of a Vector3D\n #[inline]\n pub fn norm2(&self) -> f64 {\n (*self) * (*self)\n }\n \/\/\/ Return the euclidean norm of a Vector3D\n #[inline]\n pub fn norm(&self) -> f64 {\n f64::sqrt(self.norm2())\n }\n \/\/\/ Normalize a Vector3D\n #[inline]\n pub fn normalize(&self) -> Vector3D {\n *self \/ self.norm()\n }\n \/\/\/ Tensorial product between vectors\n pub fn tensorial(&self, other: &Vector3D) -> Matrix3 {\n Matrix3::new(self.x * other.x, self.x * other.y, self.x * other.z,\n self.y * other.x, self.y * other.y, self.y * other.z,\n self.z * other.x, self.z * other.y, self.z * other.z)\n }\n}\n\n\/\/\/ Add two vectors\nimpl Add for Vector3D {\n type Output = Vector3D;\n fn add(self, other: Vector3D) -> Vector3D {\n Vector3D::new(self.x + other.x, self.y + other.y, self.z + other.z)\n }\n}\n\n\/\/\/ Substract two vectors\nimpl Sub for Vector3D {\n type Output = Vector3D;\n fn sub(self, other: Vector3D) -> Vector3D {\n Vector3D::new(self.x - other.x, self.y - other.y, self.z - other.z)\n }\n}\n\n\/\/\/ Multiply by a scalar on the right hand side\nimpl Mul<f64> for Vector3D {\n type Output = Vector3D;\n fn mul(self, other: f64) -> Vector3D {\n Vector3D::new(self.x * other, self.y * other, self.z * other)\n }\n}\n\n\/\/\/ Multiply by a scalar on the left hand side\nimpl Mul<Vector3D> for f64 {\n type Output = Vector3D;\n fn mul(self, other: Vector3D) -> Vector3D {\n Vector3D::new(self * other.x, self * other.y, self * other.z)\n }\n}\n\n\/\/\/ Scalar product between vectors\nimpl Mul<Vector3D> for Vector3D {\n type Output = f64;\n fn mul(self, other: Vector3D) -> f64 {\n self.x * other.x + self.y * other.y + self.z * other.z\n }\n}\n\n\/\/\/ Dividing a vector by a scalar\nimpl Div<f64> for Vector3D {\n type Output = Vector3D;\n fn div(self, other: f64) -> Vector3D {\n Vector3D::new(self.x \/ other, self.y \/ other, self.z \/ other)\n }\n}\n\n\/\/\/ Comparing two vectors\nimpl PartialEq for Vector3D {\n fn eq(&self, other: &Vector3D) -> bool {\n self.x == other.x && self.y == other.y && self.z == other.z\n }\n}\n\n\/******************************************************************************\/\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn add() {\n let a = Vector3D::new(2.0, 3.5, 4.8);\n let b = Vector3D::new(6.1, -8.5, 7.3);\n\n let c = a + b;\n assert_eq!(c, Vector3D::new(8.1, -5.0, 12.1));\n }\n\n #[test]\n fn sub() {\n let a = Vector3D::new(2.0, 3.5, 4.8);\n let b = Vector3D::new(6.1, -8.5, 7.3);\n\n let c = a - b;\n assert_eq!(c, Vector3D::new(-4.1, 12.0, -2.5));\n }\n\n #[test]\n fn mul() {\n let a = Vector3D::new(2.0, 3.5, 4.8);\n let b = 2.0;\n\n let c = b * a;\n assert_eq!(c, Vector3D::new(4.0, 7.0, 9.6));\n\n let b = 1.5;\n let c = a * b;\n assert_eq!(c, Vector3D::new(3.0, 5.25, 7.199999999999999));\n }\n\n #[test]\n fn dot_product() {\n let a = Vector3D::new(2.1, 3.5, 4.8);\n let b = Vector3D::new(6.1, -8.5, 7.3);\n\n let c = a * b;\n assert_eq!(c, 18.1);\n }\n}\n<commit_msg>Add cross product between vectors<commit_after>\/*\n * Cymbalum, Molecular Simulation in Rust\n * Copyright (C) 2015 Guillaume Fraux\n *\n * This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/\n*\/\n\nuse std::ops::{Add, Sub, Mul, Div, BitXor};\nuse std::cmp::PartialEq;\nuse super::matrix::Matrix3;\n\n\/\/\/ 3 dimensional vector type, implementing all usual operations\n#[derive(Copy, Clone, Debug)]\npub struct Vector3D {\n \/\/\/ First component of the vector\n pub x: f64,\n \/\/\/ Second component of the vector\n pub y: f64,\n \/\/\/ Third component of the vector\n pub z: f64,\n}\n\nimpl Vector3D {\n \/\/\/ Create a new Vector3D with components `x`, `y`, `z`\n pub fn new(x: f64, y: f64, z: f64) -> Vector3D {\n Vector3D{x: x, y: y, z: z}\n }\n \/\/\/ Return the squared euclidean norm of a Vector3D\n #[inline]\n pub fn norm2(&self) -> f64 {\n (*self) * (*self)\n }\n \/\/\/ Return the euclidean norm of a Vector3D\n #[inline]\n pub fn norm(&self) -> f64 {\n f64::sqrt(self.norm2())\n }\n \/\/\/ Normalize a Vector3D\n #[inline]\n pub fn normalize(&self) -> Vector3D {\n *self \/ self.norm()\n }\n \/\/\/ Tensorial product between vectors\n pub fn tensorial(&self, other: &Vector3D) -> Matrix3 {\n Matrix3::new(self.x * other.x, self.x * other.y, self.x * other.z,\n self.y * other.x, self.y * other.y, self.y * other.z,\n self.z * other.x, self.z * other.y, self.z * other.z)\n }\n}\n\n\/\/\/ Add two vectors\nimpl Add for Vector3D {\n type Output = Vector3D;\n fn add(self, other: Vector3D) -> Vector3D {\n Vector3D::new(self.x + other.x, self.y + other.y, self.z + other.z)\n }\n}\n\n\/\/\/ Substract two vectors\nimpl Sub for Vector3D {\n type Output = Vector3D;\n fn sub(self, other: Vector3D) -> Vector3D {\n Vector3D::new(self.x - other.x, self.y - other.y, self.z - other.z)\n }\n}\n\n\/\/\/ Multiply by a scalar on the right hand side\nimpl Mul<f64> for Vector3D {\n type Output = Vector3D;\n fn mul(self, other: f64) -> Vector3D {\n Vector3D::new(self.x * other, self.y * other, self.z * other)\n }\n}\n\n\/\/\/ Multiply by a scalar on the left hand side\nimpl Mul<Vector3D> for f64 {\n type Output = Vector3D;\n fn mul(self, other: Vector3D) -> Vector3D {\n Vector3D::new(self * other.x, self * other.y, self * other.z)\n }\n}\n\n\/\/\/ Scalar product between vectors\nimpl Mul<Vector3D> for Vector3D {\n type Output = f64;\n fn mul(self, other: Vector3D) -> f64 {\n self.x * other.x + self.y * other.y + self.z * other.z\n }\n}\n\n\/\/\/ Vectorial product will use the a^b notation.\nimpl BitXor<Vector3D> for Vector3D {\n type Output = Vector3D;\n fn bitxor(self, other: Vector3D) -> Vector3D {\n let x = self.y * other.z - self.z * other.y;\n let y = self.z * other.x - self.x * other.z;\n let z = self.x * other.y - self.y * other.x;\n Vector3D::new(x, y, z)\n }\n}\n\n\/\/\/ Dividing a vector by a scalar\nimpl Div<f64> for Vector3D {\n type Output = Vector3D;\n fn div(self, other: f64) -> Vector3D {\n Vector3D::new(self.x \/ other, self.y \/ other, self.z \/ other)\n }\n}\n\n\/\/\/ Comparing two vectors\nimpl PartialEq for Vector3D {\n fn eq(&self, other: &Vector3D) -> bool {\n self.x == other.x && self.y == other.y && self.z == other.z\n }\n}\n\n\/******************************************************************************\/\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn add() {\n let a = Vector3D::new(2.0, 3.5, 4.8);\n let b = Vector3D::new(6.1, -8.5, 7.3);\n\n let c = a + b;\n assert_eq!(c, Vector3D::new(8.1, -5.0, 12.1));\n }\n\n #[test]\n fn sub() {\n let a = Vector3D::new(2.0, 3.5, 4.8);\n let b = Vector3D::new(6.1, -8.5, 7.3);\n\n let c = a - b;\n assert_eq!(c, Vector3D::new(-4.1, 12.0, -2.5));\n }\n\n #[test]\n fn mul() {\n let a = Vector3D::new(2.0, 3.5, 4.8);\n let b = 2.0;\n\n let c = b * a;\n assert_eq!(c, Vector3D::new(4.0, 7.0, 9.6));\n\n let b = 1.5;\n let c = a * b;\n assert_eq!(c, Vector3D::new(3.0, 5.25, 7.199999999999999));\n }\n\n #[test]\n fn dot_product() {\n let a = Vector3D::new(2.1, 3.5, 4.8);\n let b = Vector3D::new(6.1, -8.5, 7.3);\n\n let c = a * b;\n assert_eq!(c, 18.1);\n }\n\n #[test]\n fn cross_product() {\n let a = Vector3D::new(2.1, 3.5, 4.8);\n let b = Vector3D::new(6.1, -8.5, 7.3);\n\n let c = a ^ b;\n assert_eq!(c*a, 0.0);\n\n let a = Vector3D::new(1.0, 0.0, 0.0);\n let b = Vector3D::new(0.0, 1.0, 0.0);\n\n let c = a ^ b;\n assert_eq!(c, Vector3D::new(0.0, 0.0, 1.0));\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate futures;\n\nuse std::cell::Cell;\nuse std::sync::Arc;\n\nuse futures::{Future, Task, Poll};\nuse futures::executor::{Executor, ExecuteCallback};\n\nthread_local!(static EXECUTOR_HIT: Cell<bool> = Cell::new(false));\n\nstruct MyFuture {\n executor: Arc<Executor>,\n}\n\nimpl Future for MyFuture {\n type Item = ();\n type Error = ();\n\n fn poll(&mut self, task: &mut Task) -> Poll<(), ()> {\n if EXECUTOR_HIT.with(|p| p.get()) {\n Poll::Ok(())\n } else {\n task.poll_on(self.executor.clone());\n Poll::NotReady\n }\n }\n\n fn schedule(&mut self, task: &mut Task) {\n panic!(\"can't schedule\");\n }\n}\n\nstruct MyExecutor;\n\nimpl Executor for MyExecutor {\n fn execute_boxed(&self, f: Box<ExecuteCallback>) {\n EXECUTOR_HIT.with(|p| p.set(true));\n f.call();\n }\n}\n\n#[test]\nfn simple() {\n let f = MyFuture { executor: Arc::new(MyExecutor) };\n\n f.forget();\n\n assert!(EXECUTOR_HIT.with(|p| p.get()));\n}\n<commit_msg>Fix an unused variable warning<commit_after>extern crate futures;\n\nuse std::cell::Cell;\nuse std::sync::Arc;\n\nuse futures::{Future, Task, Poll};\nuse futures::executor::{Executor, ExecuteCallback};\n\nthread_local!(static EXECUTOR_HIT: Cell<bool> = Cell::new(false));\n\nstruct MyFuture {\n executor: Arc<Executor>,\n}\n\nimpl Future for MyFuture {\n type Item = ();\n type Error = ();\n\n fn poll(&mut self, task: &mut Task) -> Poll<(), ()> {\n if EXECUTOR_HIT.with(|p| p.get()) {\n Poll::Ok(())\n } else {\n task.poll_on(self.executor.clone());\n Poll::NotReady\n }\n }\n\n fn schedule(&mut self, _task: &mut Task) {\n panic!(\"can't schedule\");\n }\n}\n\nstruct MyExecutor;\n\nimpl Executor for MyExecutor {\n fn execute_boxed(&self, f: Box<ExecuteCallback>) {\n EXECUTOR_HIT.with(|p| p.set(true));\n f.call();\n }\n}\n\n#[test]\nfn simple() {\n let f = MyFuture { executor: Arc::new(MyExecutor) };\n\n f.forget();\n\n assert!(EXECUTOR_HIT.with(|p| p.get()));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>combinators map<commit_after>#[derive(Debug)]\nenum Food {Apple, Orange, Grape}\n\nstruct Peeled(Food);\nstruct Chopped(Food);\n\n#[derive(Debug)]\nstruct Cooked(Food);\n\n\/\/ peel the food\nfn peel(food: Option<Food>) -> Option<Peeled> {\n match food {\n Some(food) => Some(Peeled(food)),\n None => None\n }\n}\n\n\/\/ chop food\nfn chop(peeled: Option<Peeled>) -> Option<Chopped> {\n match peeled {\n Some(Peeled(food)) => Some(Chopped(food)),\n None => None\n }\n}\n\n\/\/ cook food\nfn cook(chopped: Option<Chopped>) -> Option<Cooked> {\n match chopped {\n Some(Chopped(food)) => Some(Cooked(food)),\n None => None\n }\n}\n\nfn cook_food(food: Option<Food>) -> Option<Cooked> {\n food.map(|f| Peeled(f))\n .map(|Peeled(f)| Chopped(f))\n .map(|Chopped(f)| Cooked(f))\n}\n\n\nfn main() {\n\n let apple = Some(Food::Apple);\n\n println!(\"I like: {:?}\", cook_food(apple));\n\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for DerefMut methods<commit_after>#![crate_name = \"foo\"]\n\nuse std::ops;\n\npub struct Foo;\n\nimpl Foo {\n pub fn foo(&mut self) {}\n}\n\n\/\/ @has foo\/struct.Bar.html\n\/\/ @has - '\/\/div[@class=\"sidebar-links\"]\/a[@href=\"#method.foo\"]' 'foo'\npub struct Bar {\n foo: Foo,\n}\n\nimpl ops::Deref for Bar {\n type Target = Foo;\n\n fn deref(&self) -> &Foo {\n &self.foo\n }\n}\n\nimpl ops::DerefMut for Bar {\n fn deref_mut(&mut self) -> &mut Foo {\n &mut self.foo\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement wildcard_imports lint<commit_after>use crate::utils::{in_macro, snippet_with_applicability, span_lint_and_sugg};\nuse if_chain::if_chain;\nuse rustc::ty::DefIdTree;\nuse rustc_data_structures::fx::FxHashSet;\nuse rustc_errors::Applicability;\nuse rustc_hir::def_id::DefId;\nuse rustc_hir::intravisit::{walk_item, NestedVisitorMap, Visitor};\nuse rustc_hir::*;\nuse rustc_lint::{LateContext, LateLintPass};\nuse rustc_session::{declare_lint_pass, declare_tool_lint};\nuse rustc_span::{symbol::Symbol, BytePos};\n\ndeclare_clippy_lint! {\n \/\/\/ **What it does:** Checks for wildcard imports `use _::*`.\n \/\/\/\n \/\/\/ **Why is this bad?** wildcard imports can polute the namespace. This is especially bad if\n \/\/\/ you try to import something through a wildcard, that already has been imported by name from\n \/\/\/ a different source:\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ use crate1::foo; \/\/ Imports a function named foo\n \/\/\/ use crate2::*; \/\/ Has a function named foo\n \/\/\/\n \/\/\/ foo(); \/\/ Calls crate1::foo\n \/\/\/ ```\n \/\/\/\n \/\/\/ This can lead to confusing error messages at best and to unexpected behavior at worst.\n \/\/\/\n \/\/\/ **Known problems:** If macros are imported through the wildcard, this macro is not included\n \/\/\/ by the suggestion and has to be added by hand.\n \/\/\/\n \/\/\/ **Example:**\n \/\/\/\n \/\/\/ Bad:\n \/\/\/ ```rust,ignore\n \/\/\/ use crate1::*;\n \/\/\/\n \/\/\/ foo();\n \/\/\/ ```\n \/\/\/\n \/\/\/ Good:\n \/\/\/ ```rust,ignore\n \/\/\/ use crate1::foo;\n \/\/\/\n \/\/\/ foo();\n \/\/\/ ```\n pub WILDCARD_IMPORTS,\n pedantic,\n \"lint `use _::*` statements\"\n}\n\ndeclare_lint_pass!(WildcardImports => [WILDCARD_IMPORTS]);\n\nimpl LateLintPass<'_, '_> for WildcardImports {\n fn check_item(&mut self, cx: &LateContext<'_, '_>, item: &Item<'_>) {\n if item.vis.node.is_pub() || item.vis.node.is_pub_restricted() {\n return;\n }\n if_chain! {\n if !in_macro(item.span);\n if let ItemKind::Use(use_path, UseKind::Glob) = &item.kind;\n if let Some(def_id) = use_path.res.opt_def_id();\n then {\n let hir = cx.tcx.hir();\n let parent_id = hir.get_parent_item(item.hir_id);\n let (items, in_module) = if parent_id == CRATE_HIR_ID {\n let items = hir\n .krate()\n .module\n .item_ids\n .iter()\n .map(|item_id| hir.get(item_id.id))\n .filter_map(|node| {\n if let Node::Item(item) = node {\n Some(item)\n } else {\n None\n }\n })\n .collect();\n (items, true)\n } else if let Node::Item(item) = hir.get(parent_id) {\n (vec![item], false)\n } else {\n (vec![], false)\n };\n\n let mut import_used_visitor = ImportsUsedVisitor {\n cx,\n wildcard_def_id: def_id,\n in_module,\n used_imports: FxHashSet::default(),\n };\n for item in items {\n import_used_visitor.visit_item(item);\n }\n\n if !import_used_visitor.used_imports.is_empty() {\n let module_name = use_path\n .segments\n .iter()\n .last()\n .expect(\"path has at least one segment\")\n .ident\n .name;\n\n let mut applicability = Applicability::MachineApplicable;\n let import_source = snippet_with_applicability(cx, use_path.span, \"..\", &mut applicability);\n let (span, braced_glob) = if import_source.is_empty() {\n \/\/ This is a `_::{_, *}` import\n \/\/ Probably it's `_::{self, *}`, in that case we don't want to suggest to\n \/\/ import `self`.\n \/\/ If it is something else, we also don't want to include `self` in the\n \/\/ suggestion, since either it was imported through another use statement:\n \/\/ ```\n \/\/ use foo::bar;\n \/\/ use foo::bar::{baz, *};\n \/\/ ```\n \/\/ or it couldn't be used anywhere.\n (\n use_path.span.with_hi(use_path.span.hi() + BytePos(1)),\n true,\n )\n } else {\n (\n use_path.span.with_hi(use_path.span.hi() + BytePos(3)),\n false,\n )\n };\n\n let imports_string = if import_used_visitor.used_imports.len() == 1 {\n \/\/ We don't need to check for accidental suggesting the module name instead\n \/\/ of `self` here, since if `used_imports.len() == 1`, and the only usage\n \/\/ is `self`, then it's not through a `*` and if there is a `*`, it gets\n \/\/ already linted by `unused_imports` of rustc.\n import_used_visitor.used_imports.iter().next().unwrap().to_string()\n } else {\n let mut imports = import_used_visitor\n .used_imports\n .iter()\n .filter_map(|import_name| {\n if braced_glob && *import_name == module_name {\n None\n } else if *import_name == module_name {\n Some(\"self\".to_string())\n } else {\n Some(import_name.to_string())\n }\n })\n .collect::<Vec<_>>();\n imports.sort();\n if braced_glob {\n imports.join(\", \")\n } else {\n format!(\"{{{}}}\", imports.join(\", \"))\n }\n };\n\n let sugg = if import_source.is_empty() {\n imports_string\n } else {\n format!(\"{}::{}\", import_source, imports_string)\n };\n\n span_lint_and_sugg(\n cx,\n WILDCARD_IMPORTS,\n span,\n \"usage of wildcard import\",\n \"try\",\n sugg,\n applicability,\n );\n }\n }\n }\n }\n}\n\nstruct ImportsUsedVisitor<'a, 'tcx> {\n cx: &'a LateContext<'a, 'tcx>,\n wildcard_def_id: def_id::DefId,\n in_module: bool,\n used_imports: FxHashSet<Symbol>,\n}\n\nimpl<'a, 'tcx> Visitor<'tcx> for ImportsUsedVisitor<'a, 'tcx> {\n type Map = Map<'tcx>;\n\n fn visit_item(&mut self, item: &'tcx Item<'_>) {\n match item.kind {\n ItemKind::Use(..) => {},\n ItemKind::Mod(..) if self.in_module => {},\n ItemKind::Mod(..) => self.in_module = true,\n _ => walk_item(self, item),\n }\n }\n\n fn visit_path(&mut self, path: &Path<'_>, _: HirId) {\n if let Some(def_id) = self.first_path_segment_def_id(path) {\n \/\/ Check if the function\/enum\/... was exported\n if let Some(exports) = self.cx.tcx.module_exports(self.wildcard_def_id) {\n for export in exports {\n if let Some(export_def_id) = export.res.opt_def_id() {\n if export_def_id == def_id {\n self.used_imports.insert(\n path.segments\n .iter()\n .next()\n .expect(\"path has at least one segment\")\n .ident\n .name,\n );\n return;\n }\n }\n }\n }\n\n \/\/ Check if it is directly in the module\n if let Some(parent_def_id) = self.cx.tcx.parent(def_id) {\n if self.wildcard_def_id == parent_def_id {\n self.used_imports.insert(\n path.segments\n .iter()\n .next()\n .expect(\"path has at least one segment\")\n .ident\n .name,\n );\n }\n }\n }\n }\n\n fn nested_visit_map(&mut self) -> NestedVisitorMap<'_, Self::Map> {\n NestedVisitorMap::All(&self.cx.tcx.hir())\n }\n}\n\nimpl ImportsUsedVisitor<'_, '_> {\n fn skip_def_id(&self, def_id: DefId) -> DefId {\n let def_key = self.cx.tcx.def_key(def_id);\n match def_key.disambiguated_data.data {\n DefPathData::Ctor => {\n if let Some(def_id) = self.cx.tcx.parent(def_id) {\n self.skip_def_id(def_id)\n } else {\n def_id\n }\n },\n _ => def_id,\n }\n }\n\n fn first_path_segment_def_id(&self, path: &Path<'_>) -> Option<DefId> {\n path.res.opt_def_id().and_then(|mut def_id| {\n def_id = self.skip_def_id(def_id);\n for _ in path.segments.iter().skip(1) {\n def_id = self.skip_def_id(def_id);\n if let Some(parent_def_id) = self.cx.tcx.parent(def_id) {\n def_id = parent_def_id;\n } else {\n return None;\n }\n }\n\n Some(def_id)\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>thread registry implementation<commit_after>\/\/! Parses the `status` files from the `\/proc` pseudo file system and collects information about\n\/\/! currently running processes.\n\nuse super::parsers::{parse_line, consume_until_line_ending, parse_u32_octal, parse_u64, parse_u32};\nuse super::cgroups::{CGroup, cgroups};\nuse nom::{IResult, line_ending, space};\nuse std::collections::HashMap;\nuse std::io::{Error, Result, ErrorKind, Read};\nuse std::fs::File;\nuse glob::glob;\nuse std::path::Path;\n\n#[derive(Debug, Serialize)]\npub enum ThreadState {\n Running,\n Sleeping,\n Waiting,\n Stopped,\n TraceStopped,\n Dead,\n Zombie\n}\n\npub struct ThreadRegistry {\n pub threads: HashMap<u64, ThreadInfo>,\n proc_root: String\n}\n\n#[derive(Serialize, Debug)]\npub struct ThreadInfo {\n \/\/\/ filename of the executable\n pub comm: String,\n \/\/\/ current state of the process\n pub state: ThreadState,\n \/\/\/ process id (i.e., thread group id)\n pub pid: u64,\n \/\/\/ thread id\n pub tid: u64,\n \/\/\/ process id of the parent process\n pub ppid: u64,\n \/\/\/ real user id\n pub uid: u32,\n \/\/\/ real group id\n pub gid: u32,\n \/\/\/ cgroups bounded to this thread\n pub cgroups: Option<Vec<CGroup>>\n}\n\nnamed!(parse_thread_state<ThreadState>,\n alt!(tag!(\"R (running)\") => { |_| ThreadState::Running }\n | tag!(\"S (sleeping)\") => { |_| ThreadState::Sleeping }\n | tag!(\"D (disk sleep)\") => { |_| ThreadState::Waiting }\n | tag!(\"T (stopped)\") => { |_| ThreadState::Stopped }\n | tag!(\"t (tracing stop)\") => { |_| ThreadState::TraceStopped }\n | tag!(\"X (dead)\") => { |_| ThreadState::Dead }\n | tag!(\"Z (zombie)\") => { |_| ThreadState::Zombie }));\n\n\nnamed!(parse_command<String>, delimited!(tag!(\"Name:\\t\"), parse_line, line_ending));\nnamed!(parse_umask<u32>, delimited!(tag!(\"Umask:\\t\"), parse_u32_octal, line_ending));\nnamed!(parse_state<ThreadState>, delimited!(tag!(\"State:\\t\"), parse_thread_state, line_ending));\nnamed!(parse_pid<u64>, delimited!(tag!(\"Tgid:\\t\"), parse_u64, line_ending));\nnamed!(parse_tid<u64>, delimited!(tag!(\"Pid:\\t\"), parse_u64, line_ending));\nnamed!(parse_ppid<u64>, delimited!(tag!(\"PPid:\\t\"), parse_u64, line_ending));\n\nnamed!(parse_uid<u32>, chain!(tag!(\"Uid:\\t\") ~ uid: parse_u32 ~ consume_until_line_ending,\n || {(uid)}));\nnamed!(parse_gid<u32>, chain!(tag!(\"Gid:\\t\") ~ gid: parse_u32 ~ consume_until_line_ending,\n || {(gid)}));\n\nnamed!(thread_info<ThreadInfo>,\n do_parse!(\n comm: parse_command >>\n opt!(parse_umask) >>\n state: parse_state >>\n pid: parse_pid >>\n consume_until_line_ending >>\n tid: parse_tid >>\n ppid: parse_ppid >>\n consume_until_line_ending >>\n uid: parse_uid >>\n gid: parse_gid >>\n (ThreadInfo {\n comm: comm,\n state: state,\n pid: pid,\n tid: tid,\n ppid: ppid,\n uid: uid,\n gid: gid,\n cgroups: None,\n })\n ));\n\nfn parse_thread(buf: &[u8]) -> IResult<&[u8], ThreadInfo> {\n map!(buf, thread_info, |t| {t})\n}\n\npub fn parse_thread_info(pid: u64, root: String) -> Result<ThreadInfo> {\n let mut buf = String::new();\n let mut f = try!(File::open(format!(\"{}\/{}\/status\", root, pid)));\n f.read_to_string(&mut buf);\n match parse_thread(buf.as_bytes()) {\n IResult::Done(i, o) => {\n Ok(o)\n },\n IResult::Error(e) => {\n Err(Error::new(ErrorKind::InvalidInput, \"unable to parse status file\"))\n },\n _ => Err(Error::new(ErrorKind::InvalidInput, \"unable to parse status file\")),\n }\n}\n\nimpl ThreadRegistry {\n\n pub fn new() -> ThreadRegistry {\n ThreadRegistry {\n threads: HashMap::new(),\n proc_root: \"\/proc\".to_string()\n }\n }\n\n pub fn collect(&mut self) {\n for e in glob(&format!(\"{}\/*[0-9]*\", self.proc_root.clone()))\n .expect(\"error\") {\n match e {\n Ok(path) => {\n let pid = path.file_name().unwrap()\n .to_str()\n .unwrap().parse::<u64>().unwrap();\n let mut ti = parse_thread_info(pid, self.proc_root.clone()).unwrap();\n ti.cgroups = Some(cgroups(pid, self.proc_root.clone()).unwrap());\n self.threads.insert(pid, ti);\n },\n Err(e) => {\n\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>todo's<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>docs(server): Add server example querying a web api<commit_after>#![deny(warnings)]\nextern crate futures;\nextern crate hyper;\nextern crate pretty_env_logger;\nextern crate tokio_core;\n\nuse futures::{Future, Stream};\n\nuse hyper::{Body, Chunk, Client, Get, Post, StatusCode};\nuse hyper::error::Error;\nuse hyper::header::ContentLength;\nuse hyper::server::{Http, Service, Request, Response};\n\n#[allow(unused)]\nuse std::ascii::AsciiExt;\n\nstatic NOTFOUND: &[u8] = b\"Not Found\";\nstatic URL: &str = \"http:\/\/127.0.0.1:1337\/web_api\";\nstatic INDEX: &[u8] = b\"<a href=\\\"test.html\\\">test.html<\/a>\";\nstatic LOWERCASE: &[u8] = b\"i am a lower case string\";\n\npub type ResponseStream = Box<Stream<Item=Chunk, Error=Error>>;\n\nstruct ResponseExamples(tokio_core::reactor::Handle);\n\nimpl Service for ResponseExamples {\n type Request = Request;\n type Response = Response<ResponseStream>;\n type Error = hyper::Error;\n type Future = Box<Future<Item = Self::Response, Error = Self::Error>>;\n\n fn call(&self, req: Request) -> Self::Future {\n match (req.method(), req.path()) {\n (&Get, \"\/\") | (&Get, \"\/index.html\") => {\n let body: ResponseStream = Box::new(Body::from(INDEX));\n Box::new(futures::future::ok(Response::new()\n .with_header(ContentLength(INDEX.len() as u64))\n .with_body(body)))\n },\n (&Get, \"\/test.html\") => {\n \/\/ Run a web query against the web api below\n let client = Client::configure().build(&self.0);\n let mut req = Request::new(Post, URL.parse().unwrap());\n req.set_body(LOWERCASE);\n let web_res_future = client.request(req);\n\n Box::new(web_res_future.map(|web_res| {\n let body: ResponseStream = Box::new(web_res.body().map(|b| {\n Chunk::from(format!(\"before: '{:?}'<br>after: '{:?}'\",\n std::str::from_utf8(LOWERCASE).unwrap(),\n std::str::from_utf8(&b).unwrap()))\n }));\n Response::new().with_body(body)\n }))\n },\n (&Post, \"\/web_api\") => {\n \/\/ A web api to run against. Simple upcasing of the body.\n let body: ResponseStream = Box::new(req.body().map(|chunk| {\n let upper = chunk.iter().map(|byte| byte.to_ascii_uppercase())\n .collect::<Vec<u8>>();\n Chunk::from(upper)\n }));\n Box::new(futures::future::ok(Response::new().with_body(body)))\n },\n _ => {\n let body: ResponseStream = Box::new(Body::from(NOTFOUND));\n Box::new(futures::future::ok(Response::new()\n .with_status(StatusCode::NotFound)\n .with_header(ContentLength(NOTFOUND.len() as u64))\n .with_body(body)))\n }\n }\n }\n\n}\n\n\nfn main() {\n pretty_env_logger::init().unwrap();\n let addr = \"127.0.0.1:1337\".parse().unwrap();\n\n let mut core = tokio_core::reactor::Core::new().unwrap();\n let handle = core.handle();\n let client_handle = core.handle();\n\n let serve = Http::new().serve_addr_handle(&addr, &handle, move || Ok(ResponseExamples(client_handle.clone()))).unwrap();\n println!(\"Listening on http:\/\/{} with 1 thread.\", serve.incoming_ref().local_addr());\n\n let h2 = handle.clone();\n handle.spawn(serve.for_each(move |conn| {\n h2.spawn(conn.map(|_| ()).map_err(|err| println!(\"serve error: {:?}\", err)));\n Ok(())\n }).map_err(|_| ()));\n\n core.run(futures::future::empty::<(), ()>()).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl Command {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n println!(\"URL: {:?}\", file.path());\n\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"sleep\".to_string(),\n main: box |args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n },\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + &c.name) + \" exit\";\n\n commands.push(Command {\n name: \"help\".to_string(),\n main: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl Application {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n }\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.is_empty() {\n return;\n }\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(\"Terminal\");\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command == \"exit\" {\n break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>Refactor terminal commands to be `str` instead of `String`<commit_after>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application<'static> = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command<'a> {\n pub name: &'a str,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl<'a> Command<'a> {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\",\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"open\",\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"run\",\n main: box |args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n println!(\"URL: {:?}\", file.path());\n\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"send\",\n main: box |args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"sleep\",\n main: box |args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n },\n });\n\n commands.push(Command {\n name: \"url\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\",\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + c.name) + \" exit\";\n\n commands.push(Command {\n name: \"help\",\n main: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application<'a> {\n commands: Vec<Command<'a>>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl<'a> Application<'a> {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n }\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.is_empty() {\n return;\n }\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(\"Terminal\");\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command == \"exit\" {\n break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #79302 - est31:issue_73899_test, r=lcnr<commit_after>\/\/ run-pass\n#![feature(const_evaluatable_checked)]\n#![feature(const_generics)]\n#![allow(incomplete_features)]\n\ntrait Foo {}\n\nimpl<const N: usize> Foo for [(); N] where Self: FooImpl<{ N == 0 }> {}\n\ntrait FooImpl<const IS_ZERO: bool> {}\n\nimpl FooImpl<{ 0u8 == 0u8 }> for [(); 0] {}\n\nimpl<const N: usize> FooImpl<{ 0u8 != 0u8 }> for [(); N] {}\n\nfn foo<T: Foo>(_v: T) {}\n\nfn main() {\n foo([]);\n foo([()]);\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application<'static> = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command<'a> {\n pub name: &'a str,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl<'a> Command<'a> {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\",\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"open\",\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"run\",\n main: box |args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n println!(\"URL: {:?}\", file.path());\n\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"send\",\n main: box |args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"sleep\",\n main: box |args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n },\n });\n\n commands.push(Command {\n name: \"test_ht\",\n main: box |args: &Vec<String>| {\n ::redox::hashmap::test();\n },\n });\n\n commands.push(Command {\n name: \"url\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\",\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n commands.push(Command {\n name: \"pwd\",\n main: box |args: &Vec<String>| {\n let mut err = false;\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n println!(\"{}\", path);\n } else {\n err = true;\n }\n } else {\n err = true;\n }\n if err {\n println!(\"Could not get the path\");\n }\n },\n });\n\n commands.push(Command {\n name: \"cd\",\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(path) => {\n if !change_cwd(&path) {\n println!(\"Bad path: {}\", path);\n }\n }\n None => println!(\"No path given\")\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + c.name) + \" exit\";\n\n commands.push(Command {\n name: \"help\",\n main: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application<'a> {\n commands: Vec<Command<'a>>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl<'a> Application<'a> {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &str) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if command_string == \"$\" {\n let variables = self.variables.iter()\n .fold(String::new(),\n |string, variable| string + \"\\n\" + &variable.name + \"=\" + &variable.value);\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n }\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.is_empty() {\n return;\n }\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(\"Terminal\");\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command == \"exit\" {\n break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>Single command to create a file, with your terminal<commit_after>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application<'static> = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command<'a> {\n pub name: &'a str,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl<'a> Command<'a> {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\",\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"open\",\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"run\",\n main: box |args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n println!(\"URL: {:?}\", file.path());\n\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"send\",\n main: box |args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"sleep\",\n main: box |args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n },\n });\n\n commands.push(Command {\n name: \"test_ht\",\n main: box |args: &Vec<String>| {\n ::redox::hashmap::test();\n },\n });\n\n commands.push(Command {\n name: \"url\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\",\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n \/\/\/ Simple command to create a file, in the current directory\n \/\/\/ The file has got the name given as the first argument of the command\n \/\/\/ If the command have no arguments, the command don't create the file\n commands.push(Command {\n name: \"cfile\",\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(file_name) => {\n File::create(file_name);\n }\n None => {\n println!(\"Could not create a file without a name\");\n }\n }\n }\n });\n\n commands.push(Command {\n name: \"pwd\",\n main: box |args: &Vec<String>| {\n let mut err = false;\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n println!(\"{}\", path);\n } else {\n err = true;\n }\n } else {\n err = true;\n }\n if err {\n println!(\"Could not get the path\");\n }\n },\n });\n\n commands.push(Command {\n name: \"cd\",\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(path) => {\n if !change_cwd(&path) {\n println!(\"Bad path: {}\", path);\n }\n }\n None => println!(\"No path given\")\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + c.name) + \" exit\";\n\n commands.push(Command {\n name: \"help\",\n main: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application<'a> {\n commands: Vec<Command<'a>>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl<'a> Application<'a> {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &str) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if command_string == \"$\" {\n let variables = self.variables.iter()\n .fold(String::new(),\n |string, variable| string + \"\\n\" + &variable.name + \"=\" + &variable.value);\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n }\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.is_empty() {\n return;\n }\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(\"Terminal\");\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command == \"exit\" {\n break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add end-to-end test<commit_after>extern crate zip;\n\nuse std::io::prelude::*;\nuse zip::write::FileOptions;\nuse std::io::Cursor;\n\n\/\/ This test asserts that after creating a zip file, then reading its contents back out,\n\/\/ the extracted data will *always* be exactly the same as the original data.\n#[test]\nfn main() -> zip::result::ZipResult<()> {\n let buf: &mut Vec<u8> = &mut Vec::new();\n let mut file = Cursor::new(buf);\n\n write_to_zip_file(&mut file)?;\n\n let file_contents: String = read_zip_file(file).unwrap();\n\n assert!(file_contents.as_bytes() == LOREM_IPSUM);\n\n Ok(())\n}\n\nfn write_to_zip_file(file: &mut Cursor<&mut Vec<u8>>) -> zip::result::ZipResult<()> {\n let mut zip = zip::ZipWriter::new(file);\n\n zip.add_directory(\"test\/\", FileOptions::default())?;\n\n let options = FileOptions::default()\n .compression_method(zip::CompressionMethod::Stored)\n .unix_permissions(0o755);\n zip.start_file(\"test\/☃.txt\", options)?;\n zip.write_all(b\"Hello, World!\\n\")?;\n\n zip.start_file(\"test\/lorem_ipsum.txt\", FileOptions::default())?;\n zip.write_all(LOREM_IPSUM)?;\n\n zip.finish()?;\n Ok(())\n}\n\nfn read_zip_file(zip_file: Cursor<&mut Vec<u8>>) -> zip::result::ZipResult<String> {\n let mut archive = zip::ZipArchive::new(zip_file).unwrap();\n\n let mut file = archive.by_name(\"test\/lorem_ipsum.txt\")?;\n\n let mut contents = String::new();\n file.read_to_string(&mut contents).unwrap();\n Ok(contents)\n}\n\nconst LOREM_IPSUM : &'static [u8] = b\"Lorem ipsum dolor sit amet, consectetur adipiscing elit. In tellus elit, tristique vitae mattis egestas, ultricies vitae risus. Quisque sit amet quam ut urna aliquet\nmolestie. Proin blandit ornare dui, a tempor nisl accumsan in. Praesent a consequat felis. Morbi metus diam, auctor in auctor vel, feugiat id odio. Curabitur ex ex,\ndictum quis auctor quis, suscipit id lorem. Aliquam vestibulum dolor nec enim vehicula, porta tristique augue tincidunt. Vivamus ut gravida est. Sed pellentesque, dolor\nvitae tristique consectetur, neque lectus pulvinar dui, sed feugiat purus diam id lectus. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per\ninceptos himenaeos. Maecenas feugiat velit in ex ultrices scelerisque id id neque.\n\";\n<|endoftext|>"} {"text":"<commit_before><commit_msg>commit bin folder;<commit_after>extern crate getopts;\nextern crate csv;\nextern crate veye_checker;\n\nuse getopts::Options;\nuse std::path::Path;\nuse std::fs::File;\nuse std::io::Write;\nuse std::process;\nuse std::env;\nuse std::error::Error;\nuse std::rc::Rc;\nuse std::borrow::Borrow;\n\nuse veye_checker::{product, api, checker};\nuse product::CSVSerializer;\n\nfn init_out_file(outfile_path: &Path) -> Result<bool, std::io::Error> {\n \/\/it creates a new file or truncates existing one\n let mut f = File::create( outfile_path ).ok().expect(\"Failed to create output file\");\n try!(f.write_all(b\"file_path,package_sha\\n\"));\n try!(f.sync_all());\n\n Ok(true)\n}\n\nfn show_usage(program_name: &str, opts: Options) -> Result<bool, String> {\n let brief = format!(r#\"\n usage:\n {} scan DIRECTORY_PATH -o OUTPUT_FILE\n {} lookup FILE_SHA -a API_TOKEN\n {} lookup_csv SHA_FILE_PATH -o OUTPUT_FILE -a API_TOKEN\n \"#,\n program_name, program_name, program_name\n );\n print!(\"{}\", opts.usage(&brief));\n Ok(true)\n}\n\nfn main() {\n let args: Vec<String> = env::args().collect();\n let program_name = args[0].clone();\n let mut opts = Options::new();\n\n \/\/register options\n opts.optopt(\"o\", \"output\", \"specifies the name of output file\", \"FILENAME\");\n opts.optopt(\"a\", \"auth\", \"specifies api-key for API calls\", \"API_TOKEN\");\n opts.optflag(\"h\", \"help\", \"shows usage help\");\n\n \/\/parse command-line arguments\n let matches = match opts.parse(&args[1..]){\n Ok(m) => { m },\n Err(f) => { panic!(f.to_string()) }\n };\n\n \/\/did user asked to see help menu\n if matches.opt_present(\"h\") {\n show_usage(&program_name, opts);\n return;\n }\n\n if matches.free.len() < 1 {\n println!(\"Error: Subcommand is unspecified\");\n show_usage(&program_name, opts);\n return;\n }\n\n let command = matches.free[0].clone();\n let cmd_res = match command.as_ref() {\n \"scan\" => do_scan_task(&matches),\n \"lookup\" => do_lookup_task(&matches),\n \"lookup_csv\" => do_lookup_csv_task(&matches),\n _ => show_usage(&program_name, opts)\n };\n\n print_cmd_result(cmd_res);\n}\n\n\nfn do_scan_task(matches: &getopts::Matches) -> Result<bool, String> {\n \/\/extract input arguments\n let dir_txt = if matches.free.len() != 2 {\n panic!(\"scan command misses a path to folder\".to_string());\n } else {\n matches.free[1].clone()\n };\n let outpath = matches.opt_str(\"o\");\n\n println!(\"Scanning: {}\", dir_txt);\n let dir = Path::new(&dir_txt);\n\n if outpath.is_some() {\n println!(\"Will dump results into file...\");\n let path_str = outpath.unwrap();\n let out_dir = Path::new(&path_str);\n let res = init_out_file(&out_dir);\n checker::scan_dir(dir, Some(&out_dir));\n\n } else {\n println!(\"No output file were defined\");\n checker::scan_dir(dir, None);\n }\n Ok(true)\n}\n\nfn fetch_product_details(file_sha: &str, api_key: &str) -> Result<product::ProductMatch, std::io::Error> {\n println!(\"Going to checkup product by SHA: {}\", file_sha);\n let sha_res = api::fetch_product_by_sha(&file_sha, &api_key.clone());\n\n\n match sha_res {\n Ok(m) => {\n println!(\"Going to check product details by matched SHA result\");\n let sha = m.sha.expect(\"No product sha from SHA result\");\n let product = m.product.expect(\"No product info from SHA result\");\n match api::fetch_product( &product.language, &product.prod_key, &product.version, &api_key ) {\n Ok(mut m) => {\n m.sha = Some(sha);\n Ok(m)\n },\n Err(e) => Err(e)\n }\n\n },\n Err(e) => Err(e)\n }\n}\n\nfn do_lookup_task(matches: &getopts::Matches) -> Result<bool, String> {\n\n let file_sha = if matches.free.len() != 2 {\n panic!(\"lookup command misses SHA-code\");\n\n } else {\n matches.free[1].clone()\n };\n\n let api_key = matches.opt_str(\"a\").expect(\"Missing API_KEY!\");\n let out_filepath = matches.opt_str(\"o\");\n\n match fetch_product_details(&file_sha.clone(), &api_key) {\n Ok(m) => {\n if out_filepath.is_none() {\n println!(\"{}{}\", m.to_csv_header(), m.to_csv() )\n } else {\n let out_fp = out_filepath.unwrap();\n let mut wtr = File::create(out_fp).expect(\"Failed to open outout file\");\n wtr.write_all(& m.to_csv_header().into_bytes());\n wtr.write_all(& m.to_csv().into_bytes());\n wtr.sync_data();\n\n println!(\"Dumped result into specified file;\");\n }\n },\n Err(e) => println!(\"No product info for sha {}\", file_sha)\n }\n\n Ok(true)\n}\n\nfn do_lookup_csv_task(matches: &getopts::Matches) -> Result<bool, String> {\n let sha_results_filepath = if matches.free.len() != 2 {\n panic!(\"lookup_csv: no input file was specified\");\n } else {\n matches.free[1].clone()\n };\n\n let api_key = matches.opt_str(\"a\").expect(\"Missing API_KEY!\");\n let output_path = matches.opt_str(\"o\").expect(\"Missing output file\");\n\n let mut rdr = csv::Reader::from_file(\n sha_results_filepath.clone()\n ).expect(format!(\"Failed to read SHA file from {}\", sha_results_filepath).as_ref());\n\n let mut csv_rows = vec![];\n\n for row in rdr.decode() {\n\n let (file_path, file_sha): (String, String) = row.unwrap();\n match fetch_product_details(&file_sha.clone(), &api_key){\n Ok(mut m) => {\n m.filepath = Some(file_path.clone());\n if csv_rows.len() == 0 {\n csv_rows.push(m.to_csv_header());\n }\n\n csv_rows.push(m.to_csv());\n },\n Err(e) => {\n println!(\"Failed to get product details for {}, {}\", file_path.clone(), file_sha.clone());\n let empty_m = product::ProductMatch {\n sha: None,\n product: None,\n url: None,\n licenses: vec![],\n n_vulns: 0,\n filepath: Some(file_path.clone())\n };\n\n if csv_rows.len() == 0 {\n csv_rows.push(empty_m.to_csv_header());\n }\n csv_rows.push(empty_m.to_csv());\n }\n }\n\n };\n\n let mut wtr = File::create(output_path).expect(\"Failed to open output file\");\n for row in csv_rows {\n wtr.write_all(& row.into_bytes());\n };\n wtr.sync_data();\n\n Ok(true)\n}\n\n\nfn print_cmd_result(cmd_res: Result<bool, std::string::String>){\n match cmd_res {\n Ok(_) => println!(\"Done!\"),\n Err(e) => println!(\"Failed to finish the task: {}\", e)\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fetch\/artifact: remove the \"extract\" property<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>testsuite: Add xfailed test case for #6128<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-test\n\nuse std::hashmap::HashMap;\n\ntrait Graph<Node, Edge> {\n fn f(&self, Edge);\n\n}\n\nimpl<E> Graph<int, E> for HashMap<int, int> {\n fn f(&self, _e: E) {\n fail!();\n }\n}\n\nfn main() {\n let g : ~HashMap<int, int> = ~HashMap::new();\n let _g2 : ~Graph<int,int> = g as ~Graph<int,int>;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added test case<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn decode() -> ~str {\n 'outer: loop {\n let mut ch_start: uint;\n break 'outer;\n }\n ~\"\"\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for #16403<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ error-pattern:already defined\n\n\n#![allow(warnings)]\n\nfn main() {\n {\n extern fn fail() {}\n }\n {\n extern fn fail() {}\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test to check that AtomicBool has the proper representation<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(atomic_access)]\nuse std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT};\nuse std::sync::atomic::Ordering::*;\n\nstatic mut ATOMIC: AtomicBool = ATOMIC_BOOL_INIT;\n\nfn main() {\n unsafe {\n assert_eq!(*ATOMIC.get_mut(), false);\n ATOMIC.store(true, SeqCst);\n assert_eq!(*ATOMIC.get_mut(), true);\n ATOMIC.fetch_or(false, SeqCst);\n assert_eq!(*ATOMIC.get_mut(), true);\n ATOMIC.fetch_and(false, SeqCst);\n assert_eq!(*ATOMIC.get_mut(), false);\n ATOMIC.fetch_nand(true, SeqCst);\n assert_eq!(*ATOMIC.get_mut(), true);\n ATOMIC.fetch_xor(true, SeqCst);\n assert_eq!(*ATOMIC.get_mut(), false);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use core::cmp::min;\nuse core::mem::size_of;\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<commit_msg>Implement common::memory::Memory<commit_after>use core::cmp::min;\nuse core::mem::size_of;\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\nstruct Memory {\n address: usize,\n}\n\nimpl Memory {\n pub fn new(size: usize) -> Option<Self> {\n let alloc = alloc(size);\n if alloc > 0 {\n Some(Memory { address: alloc })\n }\n else { None }\n }\n\n pub fn renew(&self, size: usize) -> Option<Self> {\n let realloc = realloc(self.address, size);\n if realloc > 0 {\n Some(Memory { address: realloc })\n }\n else { None }\n }\n}\n\nimpl Drop for Memory {\n fn drop(&mut self) {\n unalloc(self.address)\n }\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>First version of buffer traits. Not compilable.<commit_after>pub struct Position {\n line: u32,\n column: u32\n}\n\npub enum Cursor {\n Simple(Position),\n Range(Position, Position),\n Block(Position, Position)\n}\n\npub trait StrLike {\n type Bytes: Iterator;\n type Chars: Iterator;\n type Lines: Iterator;\n\n \/\/\/ Return number of bytes in this object\n fn len(&self) -> usize;\n \/\/\/ Return number of unicode characters in this object\n fn char_count(&self) -> usize;\n \/\/\/ Return number of lines in this object\n fn line_count(&self) -> usize;\n\n fn bytes(&self) -> Bytes;\n fn chars(&self) -> Chars;\n fn lines(&self) -> Lines;\n\n \/\/ TODO: strchr or some such. Bytes or chars based?\n \/\/ TODO: Storing styles\n}\n\npub trait Snapshot: StrLike {\n fn cursors(&self) -> &[Cursor];\n\n \/\/\/ Return a snapshot on which this one was based or None if it's not available\n fn previous(&self) -> Option<Self>;\n\n \/\/\/ Return all snapshots based on this one\n fn next(&self) -> &[Self];\n}\n\npub trait Buffer {\n type SnapshotType: Snapshot;\n\n fn current_snapshot(&self) -> Snapshot;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add field<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add src\/bin\/mittens.rs<commit_after>extern crate mittens;\n\nfn main() -> () {\n println!(\"test\");\n}\n<|endoftext|>"} {"text":"<commit_before>use std::fmt::{Debug, Formatter};\nuse std::fmt::Result as FMTResult;\n\nuse clap::ArgMatches;\n\nmod header;\n\nuse module::Module;\nuse runtime::Runtime;\nuse storage::parser::Parser;\nuse storage::json::parser::JsonHeaderParser;\n\npub struct Notes<'a> {\n rt: &'a Runtime<'a>,\n}\n\nimpl<'a> Notes<'a> {\n\n pub fn new(rt: &'a Runtime<'a>) -> Notes<'a> {\n Notes {\n rt: rt,\n }\n }\n\n fn command_add(&self, matches: &ArgMatches) -> bool {\n use std::process::exit;\n use self::header::build_header;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n let name = matches.value_of(\"name\")\n .map(String::from)\n .unwrap_or(String::from(\"\"));\n let tags = matches.value_of(\"tags\")\n .and_then(|s| Some(s.split(\",\").map(String::from).collect()))\n .unwrap_or(vec![]);\n\n debug!(\"Building header with\");\n debug!(\" name = '{:?}'\", name);\n debug!(\" tags = '{:?}'\", tags);\n let header = build_header(name, tags);\n\n let fileid = self.rt.store().new_file_with_header(self, header);\n self.rt\n .store()\n .load(self, &parser, &fileid)\n .and_then(|file| {\n info!(\"Created file in memory: {}\", fileid);\n Some(self.rt.store().persist(&parser, file))\n })\n .unwrap_or(false)\n }\n\n fn command_list(&self, matches: &ArgMatches) -> bool {\n unimplemented!()\n }\n\n fn command_remove(&self, matches: &ArgMatches) -> bool {\n unimplemented!()\n }\n\n fn command_add_tags(&self, matches: &ArgMatches) -> bool {\n unimplemented!()\n }\n\n fn command_rm_tags(&self, matches: &ArgMatches) -> bool {\n unimplemented!()\n }\n\n fn command_set_tags(&self, matches: &ArgMatches) -> bool {\n unimplemented!()\n }\n\n}\n\nimpl<'a> Module<'a> for Notes<'a> {\n\n fn exec(&self, matches: &ArgMatches) -> bool {\n match matches.subcommand_name() {\n Some(\"add\") => {\n self.command_add(matches.subcommand_matches(\"add\").unwrap())\n },\n\n Some(\"list\") => {\n self.command_list(matches.subcommand_matches(\"list\").unwrap())\n },\n\n Some(\"remove\") => {\n self.command_remove(matches.subcommand_matches(\"remove\").unwrap())\n },\n\n Some(\"add_tags\") => {\n self.command_add_tags(matches.subcommand_matches(\"add_tags\").unwrap())\n },\n\n Some(\"rm_tags\") => {\n self.command_rm_tags(matches.subcommand_matches(\"rm_tags\").unwrap())\n },\n\n Some(\"set_tags\") => {\n self.command_set_tags(matches.subcommand_matches(\"set_tags\").unwrap())\n },\n\n Some(_) | None => {\n info!(\"No command given, doing nothing\");\n false\n },\n }\n }\n\n fn name(&self) -> &'static str{\n \"notes\"\n }\n\n}\n\nimpl<'a> Debug for Notes<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> FMTResult {\n write!(fmt, \"[Module][Notes]\");\n Ok(())\n }\n\n}\n<commit_msg>Notes: Implement command_list()<commit_after>use std::fmt::{Debug, Formatter};\nuse std::fmt::Result as FMTResult;\nuse std::rc::Rc;\nuse std::cell::RefCell;\n\nuse clap::ArgMatches;\nuse regex::Regex;\n\nmod header;\n\nuse module::Module;\nuse runtime::Runtime;\nuse storage::file::File;\nuse storage::parser::Parser;\nuse storage::json::parser::JsonHeaderParser;\nuse module::helpers::cli::create_tag_filter;\nuse module::helpers::cli::create_hash_filter;\nuse module::helpers::cli::create_text_header_field_grep_filter;\nuse module::helpers::cli::create_content_grep_filter;\nuse module::helpers::cli::CliFileFilter;\n\npub struct Notes<'a> {\n rt: &'a Runtime<'a>,\n}\n\nimpl<'a> Notes<'a> {\n\n pub fn new(rt: &'a Runtime<'a>) -> Notes<'a> {\n Notes {\n rt: rt,\n }\n }\n\n fn command_add(&self, matches: &ArgMatches) -> bool {\n use std::process::exit;\n use self::header::build_header;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n let name = matches.value_of(\"name\")\n .map(String::from)\n .unwrap_or(String::from(\"\"));\n let tags = matches.value_of(\"tags\")\n .and_then(|s| Some(s.split(\",\").map(String::from).collect()))\n .unwrap_or(vec![]);\n\n debug!(\"Building header with\");\n debug!(\" name = '{:?}'\", name);\n debug!(\" tags = '{:?}'\", tags);\n let header = build_header(name, tags);\n\n let fileid = self.rt.store().new_file_with_header(self, header);\n self.rt\n .store()\n .load(self, &parser, &fileid)\n .and_then(|file| {\n info!(\"Created file in memory: {}\", fileid);\n Some(self.rt.store().persist(&parser, file))\n })\n .unwrap_or(false)\n }\n\n fn command_list(&self, matches: &ArgMatches) -> bool {\n use ui::file::{FilePrinter, TablePrinter};\n use std::ops::Deref;\n use self::header::get_name_from_header;\n use self::header::get_tags_from_header;\n use std::process::exit;\n use module::helpers::cli::CliFileFilter;\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n\n let filter = {\n let hash_filter = create_hash_filter(matches, \"id\", true);\n let head_filter = create_text_header_field_grep_filter(matches, \"match\", \"NAME\", true);\n let text_filter = create_content_grep_filter(matches, \"match\", true);\n let tags_filter = create_tag_filter(matches, \"tags\", true);\n hash_filter.or(Box::new(head_filter)).and(Box::new(text_filter)).and(Box::new(tags_filter))\n };\n\n let printer = TablePrinter::new(self.rt.is_verbose(), self.rt.is_debugging());\n\n printer.print_files_custom(\n self.rt.store()\n .load_for_module(self, &parser)\n .into_iter()\n .filter(|f| filter.filter_file(f)),\n &|file| {\n let fl = file.deref().borrow();\n let hdr = fl.header();\n let name = get_name_from_header(hdr);\n let tags = get_tags_from_header(hdr);\n\n debug!(\"Custom printer field: name = '{:?}'\", name);\n debug!(\"Custom printer field: tags = '{:?}'\", tags);\n\n vec![name, tags.join(\", \")]\n }\n );\n true\n }\n\n fn command_remove(&self, matches: &ArgMatches) -> bool {\n unimplemented!()\n }\n\n fn command_add_tags(&self, matches: &ArgMatches) -> bool {\n unimplemented!()\n }\n\n fn command_rm_tags(&self, matches: &ArgMatches) -> bool {\n unimplemented!()\n }\n\n fn command_set_tags(&self, matches: &ArgMatches) -> bool {\n unimplemented!()\n }\n\n}\n\nimpl<'a> Module<'a> for Notes<'a> {\n\n fn exec(&self, matches: &ArgMatches) -> bool {\n match matches.subcommand_name() {\n Some(\"add\") => {\n self.command_add(matches.subcommand_matches(\"add\").unwrap())\n },\n\n Some(\"list\") => {\n self.command_list(matches.subcommand_matches(\"list\").unwrap())\n },\n\n Some(\"remove\") => {\n self.command_remove(matches.subcommand_matches(\"remove\").unwrap())\n },\n\n Some(\"add_tags\") => {\n self.command_add_tags(matches.subcommand_matches(\"add_tags\").unwrap())\n },\n\n Some(\"rm_tags\") => {\n self.command_rm_tags(matches.subcommand_matches(\"rm_tags\").unwrap())\n },\n\n Some(\"set_tags\") => {\n self.command_set_tags(matches.subcommand_matches(\"set_tags\").unwrap())\n },\n\n Some(_) | None => {\n info!(\"No command given, doing nothing\");\n false\n },\n }\n }\n\n fn name(&self) -> &'static str{\n \"notes\"\n }\n\n}\n\nimpl<'a> Debug for Notes<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> FMTResult {\n write!(fmt, \"[Module][Notes]\");\n Ok(())\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adding main file<commit_after>fn main() {\n\tprintln!(\"Hello, watermark!\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Stopped printing events<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added some logging to token.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test case illustrating some variants of the issue pointed out by ariel.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ revisions: ast mir\n\/\/[mir]compile-flags: -Z emit-end-regions -Z borrowck-mir\n\nstruct S<X, Y> {\n x: X,\n y: Y,\n}\n\nfn main() {\n let x: &&Box<i32>;\n let _y = &**x; \/\/[ast]~ ERROR use of possibly uninitialized variable: `**x` [E0381]\n \/\/[mir]~^ (Ast) [E0381]\n \/\/[mir]~| (Mir) [E0381]\n\n let x: &&S<i32, i32>;\n let _y = &**x; \/\/[ast]~ ERROR use of possibly uninitialized variable: `**x` [E0381]\n \/\/[mir]~^ (Ast) [E0381]\n \/\/[mir]~| (Mir) [E0381]\n\n let x: &&i32;\n let _y = &**x; \/\/[ast]~ ERROR use of possibly uninitialized variable: `**x` [E0381]\n \/\/[mir]~^ (Ast) [E0381]\n \/\/[mir]~| (Mir) [E0381]\n\n\n let mut a: S<i32, i32>;\n a.x = 0;\n let _b = &a.x; \/\/[ast]~ ERROR use of possibly uninitialized variable: `a.x` [E0381]\n \/\/[mir]~^ ERROR (Ast) [E0381]\n \/\/ (deliberately *not* an error under MIR-borrowck)\n\n let mut a: S<&&i32, &&i32>;\n a.x = &&0;\n let _b = &**a.x; \/\/[ast]~ ERROR use of possibly uninitialized variable: `**a.x` [E0381]\n \/\/[mir]~^ ERROR (Ast) [E0381]\n \/\/ (deliberately *not* an error under MIR-borrowck)\n\n\n let mut a: S<i32, i32>;\n a.x = 0;\n let _b = &a.y; \/\/[ast]~ ERROR use of possibly uninitialized variable: `a.y` [E0381]\n \/\/[mir]~^ ERROR (Ast) [E0381]\n \/\/[mir]~| ERROR (Mir) [E0381]\n\n let mut a: S<&&i32, &&i32>;\n a.x = &&0;\n let _b = &**a.y; \/\/[ast]~ ERROR use of possibly uninitialized variable: `**a.y` [E0381]\n \/\/[mir]~^ ERROR (Ast) [E0381]\n \/\/[mir]~| ERROR (Mir) [E0381]\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fixed<commit_after>use std::thunk::Invoke;\n\nfn foo (f:Box<Invoke<(), int>>) {\n print!(\"{}\", f.invoke(()) );\n}\n\npub fn main () { \n let f : Box<Invoke<(),int>> = box move |:()| 1 ;\n foo (f);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Slices\n\/\/!\n\/\/! See `Slice`-structure documentation for more information on this module.\n\nuse core::{handle, buffer};\nuse core::{Primitive, Resources, VertexCount};\nuse core::command::InstanceParams;\nuse core::factory::Factory;\nuse core::memory::Bind;\nuse format::Format;\nuse pso;\n\n\/\/\/ A `Slice` dictates in which and in what order vertices get processed. It is required for\n\/\/\/ processing a PSO.\n\/\/\/\n\/\/\/ # Overview\n\/\/\/ A `Slice` object in essence dictates in what order the vertices in a `VertexBuffer` get\n\/\/\/ processed. To do this, it contains an internal index-buffer. This `Buffer` is a list of\n\/\/\/ indices into this `VertexBuffer` (vertex-index). A vertex-index of 0 represents the first\n\/\/\/ vertex in the `VertexBuffer`, a vertex-index of 1 represents the second, 2 represents the\n\/\/\/ third, and so on. The vertex-indices in the index-buffer are read in order; every vertex-index\n\/\/\/ tells the pipeline which vertex to process next. \n\/\/\/\n\/\/\/ Because the same index can re-appear multiple times, duplicate-vertices can be avoided. For\n\/\/\/ instance, if you want to draw a square, you need two triangles, and thus six vertices. Because\n\/\/\/ the same index can reappear multiple times, this means we can instead use 4 vertices, and 6\n\/\/\/ vertex-indices.\n\/\/\/\n\/\/\/ This index-buffer has a few variants. See the `IndexBuffer` documentation for a detailed\n\/\/\/ description.\n\/\/\/\n\/\/\/ The `start` and `end` fields say where in the index-buffer to start and stop reading.\n\/\/\/ Setting `start` to 0, and `end` to the length of the index-buffer, will cause the entire\n\/\/\/ index-buffer to be processed. The `base_vertex` dictates the index of the first vertex\n\/\/\/ in the `VertexBuffer`. This essentially moves the the start of the `VertexBuffer`, to the\n\/\/\/ vertex with this index.\n\/\/\/\n\/\/\/ # Constuction & Handling\n\/\/\/ The `Slice` structure can be constructed automatically when using a `Factory` to create a\n\/\/\/ vertex buffer. If needed, it can also be created manually.\n\/\/\/\n\/\/\/ A `Slice` is required to process a PSO, as it contains the needed information on in what order\n\/\/\/ to draw which vertices. As such, every `draw` call on an `Encoder` requires a `Slice`.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\npub struct Slice<R: Resources> {\n \/\/\/ The start index of the index-buffer. Processing will start at this location in the\n \/\/\/ index-buffer. \n pub start: VertexCount,\n \/\/\/ The end index in the index-buffer. Processing will stop at this location (exclusive) in\n \/\/\/ the index buffer.\n pub end: VertexCount,\n \/\/\/ This is the index of the first vertex in the `VertexBuffer`. This value will be added to\n \/\/\/ every index in the index-buffer, effectively moving the start of the `VertexBuffer` to this\n \/\/\/ base-vertex.\n pub base_vertex: VertexCount,\n \/\/\/ Instancing configuration.\n pub instances: Option<InstanceParams>,\n \/\/\/ Represents the type of index-buffer used. \n pub buffer: IndexBuffer<R>,\n}\n\nimpl<R: Resources> Slice<R> {\n \/\/\/ Creates a new `Slice` to match the supplied vertex buffer, from start to end, in order.\n pub fn new_match_vertex_buffer<V>(vbuf: &handle::Buffer<R, V>) -> Self\n where V: pso::buffer::Structure<Format> {\n Slice {\n start: 0,\n end: vbuf.len() as u32,\n base_vertex: 0,\n instances: None,\n buffer: IndexBuffer::Auto,\n }\n }\n \n \/\/\/ Calculates the number of primitives of the specified type in this `Slice`.\n pub fn get_prim_count(&self, prim: Primitive) -> u32 {\n use core::Primitive as p;\n let nv = (self.end - self.start) as u32;\n match prim {\n p::PointList => nv,\n p::LineList => nv \/ 2,\n p::LineStrip => (nv-1),\n p::TriangleList => nv \/ 3,\n p::TriangleStrip => (nv-2) \/ 3,\n p::LineListAdjacency => nv \/ 4,\n p::LineStripAdjacency => (nv-3),\n p::TriangleListAdjacency => nv \/ 6,\n p::TriangleStripAdjacency => (nv-4) \/ 2,\n p::PatchList(num) => nv \/ (num as u32),\n }\n }\n\n \/\/\/ Divides one slice into two at an index.\n \/\/\/\n \/\/\/ The first will contain the range in the index-buffer [self.start, mid) (excluding the index mid itself) and the\n \/\/\/ second will contain the range [mid, self.end).\n pub fn split_at(&self, mid: VertexCount) -> (Self, Self) {\n let mut first = self.clone();\n let mut second = self.clone();\n first.end = mid;\n second.start = mid;\n\n (first, second)\n }\n}\n\n\/\/\/ Type of index-buffer used in a Slice.\n\/\/\/\n\/\/\/ The `Auto` variant represents a hypothetical index-buffer from 0 to infinity. In other words,\n\/\/\/ all vertices get processed in order. Do note that the `Slice`' `start` and `end` restrictions\n\/\/\/ still apply for this variant. To render every vertex in the `VertexBuffer`, you would set\n\/\/\/ `start` to 0, and `end` to the `VertexBuffer`'s length.\n\/\/\/\n\/\/\/ The `Index*` variants represent an actual `Buffer` with a list of vertex-indices. The numeric \n\/\/\/ suffix specifies the amount of bits to use per index. Each of these also contains a\n\/\/\/ base-vertex. This is the index of the first vertex in the `VertexBuffer`. This value will be\n\/\/\/ added to every index in the index-buffer, effectively moving the start of the `VertexBuffer` to\n\/\/\/ this base-vertex.\n\/\/\/\n\/\/\/ # Construction & Handling\n\/\/\/ A `IndexBuffer` can be constructed using the `IntoIndexBuffer` trait, from either a slice or a\n\/\/\/ `Buffer` of integers, using a factory.\n\/\/\/\n\/\/\/ An `IndexBuffer` is exclusively used to create `Slice`s.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\npub enum IndexBuffer<R: Resources> {\n \/\/\/ Represents a hypothetical index-buffer from 0 to infinity. In other words, all vertices\n \/\/\/ get processed in order.\n Auto,\n \/\/\/ An index-buffer with unsigned 16 bit indices.\n Index16(handle::Buffer<R, u16>),\n \/\/\/ An index-buffer with unsigned 32 bit indices.\n Index32(handle::Buffer<R, u32>),\n}\n\nimpl<R: Resources> Default for IndexBuffer<R> {\n fn default() -> Self {\n IndexBuffer::Auto\n }\n}\n\/\/\/ A helper trait to create `IndexBuffers` from different kinds of data.\npub trait IntoIndexBuffer<R: Resources> {\n \/\/\/ Turns self into an `IndexBuffer`.\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, factory: &mut F) -> IndexBuffer<R>;\n}\n\nimpl<R: Resources> IntoIndexBuffer<R> for IndexBuffer<R> {\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, _: &mut F) -> IndexBuffer<R> {\n self\n }\n}\n\nimpl<R: Resources> IntoIndexBuffer<R> for () {\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, _: &mut F) -> IndexBuffer<R> {\n IndexBuffer::Auto\n }\n}\n\nmacro_rules! impl_index_buffer {\n ($prim_ty:ty, $buf_ty:ident) => (\n impl<R: Resources> IntoIndexBuffer<R> for handle::Buffer<R, $prim_ty> {\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, _: &mut F) -> IndexBuffer<R> {\n IndexBuffer::$buf_ty(self)\n }\n }\n \n impl<'s, R: Resources> IntoIndexBuffer<R> for &'s [$prim_ty] {\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, factory: &mut F) -> IndexBuffer<R> {\n factory.create_buffer_immutable(self, buffer::Role::Index, Bind::empty())\n .unwrap()\n .into_index_buffer(factory)\n }\n }\n )\n}\n\nimpl_index_buffer!(u16, Index16);\nimpl_index_buffer!(u32, Index32);\n<commit_msg>Fix typo and remove trailing whitespace in slice.rs<commit_after>\/\/ Copyright 2014 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Slices\n\/\/!\n\/\/! See `Slice`-structure documentation for more information on this module.\n\nuse core::{handle, buffer};\nuse core::{Primitive, Resources, VertexCount};\nuse core::command::InstanceParams;\nuse core::factory::Factory;\nuse core::memory::Bind;\nuse format::Format;\nuse pso;\n\n\/\/\/ A `Slice` dictates in which and in what order vertices get processed. It is required for\n\/\/\/ processing a PSO.\n\/\/\/\n\/\/\/ # Overview\n\/\/\/ A `Slice` object in essence dictates in what order the vertices in a `VertexBuffer` get\n\/\/\/ processed. To do this, it contains an internal index-buffer. This `Buffer` is a list of\n\/\/\/ indices into this `VertexBuffer` (vertex-index). A vertex-index of 0 represents the first\n\/\/\/ vertex in the `VertexBuffer`, a vertex-index of 1 represents the second, 2 represents the\n\/\/\/ third, and so on. The vertex-indices in the index-buffer are read in order; every vertex-index\n\/\/\/ tells the pipeline which vertex to process next.\n\/\/\/\n\/\/\/ Because the same index can re-appear multiple times, duplicate-vertices can be avoided. For\n\/\/\/ instance, if you want to draw a square, you need two triangles, and thus six vertices. Because\n\/\/\/ the same index can reappear multiple times, this means we can instead use 4 vertices, and 6\n\/\/\/ vertex-indices.\n\/\/\/\n\/\/\/ This index-buffer has a few variants. See the `IndexBuffer` documentation for a detailed\n\/\/\/ description.\n\/\/\/\n\/\/\/ The `start` and `end` fields say where in the index-buffer to start and stop reading.\n\/\/\/ Setting `start` to 0, and `end` to the length of the index-buffer, will cause the entire\n\/\/\/ index-buffer to be processed. The `base_vertex` dictates the index of the first vertex\n\/\/\/ in the `VertexBuffer`. This essentially moves the the start of the `VertexBuffer`, to the\n\/\/\/ vertex with this index.\n\/\/\/\n\/\/\/ # Constuction & Handling\n\/\/\/ The `Slice` structure can be constructed automatically when using a `Factory` to create a\n\/\/\/ vertex buffer. If needed, it can also be created manually.\n\/\/\/\n\/\/\/ A `Slice` is required to process a PSO, as it contains the needed information on in what order\n\/\/\/ to draw which vertices. As such, every `draw` call on an `Encoder` requires a `Slice`.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\npub struct Slice<R: Resources> {\n \/\/\/ The start index of the index-buffer. Processing will start at this location in the\n \/\/\/ index-buffer.\n pub start: VertexCount,\n \/\/\/ The end index in the index-buffer. Processing will stop at this location (exclusive) in\n \/\/\/ the index buffer.\n pub end: VertexCount,\n \/\/\/ This is the index of the first vertex in the `VertexBuffer`. This value will be added to\n \/\/\/ every index in the index-buffer, effectively moving the start of the `VertexBuffer` to this\n \/\/\/ base-vertex.\n pub base_vertex: VertexCount,\n \/\/\/ Instancing configuration.\n pub instances: Option<InstanceParams>,\n \/\/\/ Represents the type of index-buffer used.\n pub buffer: IndexBuffer<R>,\n}\n\nimpl<R: Resources> Slice<R> {\n \/\/\/ Creates a new `Slice` to match the supplied vertex buffer, from start to end, in order.\n pub fn new_match_vertex_buffer<V>(vbuf: &handle::Buffer<R, V>) -> Self\n where V: pso::buffer::Structure<Format> {\n Slice {\n start: 0,\n end: vbuf.len() as u32,\n base_vertex: 0,\n instances: None,\n buffer: IndexBuffer::Auto,\n }\n }\n\n \/\/\/ Calculates the number of primitives of the specified type in this `Slice`.\n pub fn get_prim_count(&self, prim: Primitive) -> u32 {\n use core::Primitive as p;\n let nv = (self.end - self.start) as u32;\n match prim {\n p::PointList => nv,\n p::LineList => nv \/ 2,\n p::LineStrip => (nv-1),\n p::TriangleList => nv \/ 3,\n p::TriangleStrip => (nv-2) \/ 3,\n p::LineListAdjacency => nv \/ 4,\n p::LineStripAdjacency => (nv-3),\n p::TriangleListAdjacency => nv \/ 6,\n p::TriangleStripAdjacency => (nv-4) \/ 2,\n p::PatchList(num) => nv \/ (num as u32),\n }\n }\n\n \/\/\/ Divides one slice into two at an index.\n \/\/\/\n \/\/\/ The first will contain the range in the index-buffer [self.start, mid) (excluding the index mid itself) and the\n \/\/\/ second will contain the range [mid, self.end).\n pub fn split_at(&self, mid: VertexCount) -> (Self, Self) {\n let mut first = self.clone();\n let mut second = self.clone();\n first.end = mid;\n second.start = mid;\n\n (first, second)\n }\n}\n\n\/\/\/ Type of index-buffer used in a Slice.\n\/\/\/\n\/\/\/ The `Auto` variant represents a hypothetical index-buffer from 0 to infinity. In other words,\n\/\/\/ all vertices get processed in order. Do note that the `Slice`'s `start` and `end` restrictions\n\/\/\/ still apply for this variant. To render every vertex in the `VertexBuffer`, you would set\n\/\/\/ `start` to 0, and `end` to the `VertexBuffer`'s length.\n\/\/\/\n\/\/\/ The `Index*` variants represent an actual `Buffer` with a list of vertex-indices. The numeric\n\/\/\/ suffix specifies the amount of bits to use per index. Each of these also contains a\n\/\/\/ base-vertex. This is the index of the first vertex in the `VertexBuffer`. This value will be\n\/\/\/ added to every index in the index-buffer, effectively moving the start of the `VertexBuffer` to\n\/\/\/ this base-vertex.\n\/\/\/\n\/\/\/ # Construction & Handling\n\/\/\/ A `IndexBuffer` can be constructed using the `IntoIndexBuffer` trait, from either a slice or a\n\/\/\/ `Buffer` of integers, using a factory.\n\/\/\/\n\/\/\/ An `IndexBuffer` is exclusively used to create `Slice`s.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\npub enum IndexBuffer<R: Resources> {\n \/\/\/ Represents a hypothetical index-buffer from 0 to infinity. In other words, all vertices\n \/\/\/ get processed in order.\n Auto,\n \/\/\/ An index-buffer with unsigned 16 bit indices.\n Index16(handle::Buffer<R, u16>),\n \/\/\/ An index-buffer with unsigned 32 bit indices.\n Index32(handle::Buffer<R, u32>),\n}\n\nimpl<R: Resources> Default for IndexBuffer<R> {\n fn default() -> Self {\n IndexBuffer::Auto\n }\n}\n\/\/\/ A helper trait to create `IndexBuffers` from different kinds of data.\npub trait IntoIndexBuffer<R: Resources> {\n \/\/\/ Turns self into an `IndexBuffer`.\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, factory: &mut F) -> IndexBuffer<R>;\n}\n\nimpl<R: Resources> IntoIndexBuffer<R> for IndexBuffer<R> {\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, _: &mut F) -> IndexBuffer<R> {\n self\n }\n}\n\nimpl<R: Resources> IntoIndexBuffer<R> for () {\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, _: &mut F) -> IndexBuffer<R> {\n IndexBuffer::Auto\n }\n}\n\nmacro_rules! impl_index_buffer {\n ($prim_ty:ty, $buf_ty:ident) => (\n impl<R: Resources> IntoIndexBuffer<R> for handle::Buffer<R, $prim_ty> {\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, _: &mut F) -> IndexBuffer<R> {\n IndexBuffer::$buf_ty(self)\n }\n }\n\n impl<'s, R: Resources> IntoIndexBuffer<R> for &'s [$prim_ty] {\n fn into_index_buffer<F: Factory<R> + ?Sized>(self, factory: &mut F) -> IndexBuffer<R> {\n factory.create_buffer_immutable(self, buffer::Role::Index, Bind::empty())\n .unwrap()\n .into_index_buffer(factory)\n }\n }\n )\n}\n\nimpl_index_buffer!(u16, Index16);\nimpl_index_buffer!(u32, Index32);\n<|endoftext|>"} {"text":"<commit_before>use super::*;\nuse redox::*;\n\nimpl Editor {\n pub fn exec(&mut self, Inst(n, cmd): Inst) {\n use super::Mode::*;\n use super::PrimitiveMode::*;\n use super::CommandMode::*;\n match self.cursor().mode {\n Command(Normal) => match cmd {\n 'i' => {\n self.cursor_mut().mode = Mode::Primitive(PrimitiveMode::Insert(\n InsertOptions {\n mode: InsertMode::Insert,\n }));\n\n },\n 'h' => self.left(n as usize),\n 'j' => self.down(n as usize),\n 'k' => self.up(n as usize),\n 'l' => self.right(n as usize),\n 'J' => self.down(15),\n 'K' => self.up(15),\n 'x' => self.delete(),\n 'X' => {\n self.previous();\n self.delete();\n },\n '$' => self.cursor_mut().x = self.text[self.y()].len(),\n '0' => self.cursor_mut().x = 0,\n ' ' => self.next(),\n _ => {},\n },\n Primitive(Insert(_)) => {\n self.insert(cmd);\n },\n }\n }\n}\n<commit_msg>Add r command<commit_after>use super::*;\nuse redox::*;\n\nimpl Editor {\n pub fn exec(&mut self, Inst(n, cmd): Inst) {\n use super::Mode::*;\n use super::PrimitiveMode::*;\n use super::CommandMode::*;\n match self.cursor().mode {\n Command(Normal) => match cmd {\n 'i' => {\n self.cursor_mut().mode = Mode::Primitive(PrimitiveMode::Insert(\n InsertOptions {\n mode: InsertMode::Insert,\n }));\n\n },\n 'h' => self.left(n as usize),\n 'j' => self.down(n as usize),\n 'k' => self.up(n as usize),\n 'l' => self.right(n as usize),\n 'J' => self.down(15),\n 'K' => self.up(15),\n 'x' => self.delete(),\n 'X' => {\n self.previous();\n self.delete();\n },\n '$' => self.cursor_mut().x = self.text[self.y()].len(),\n '0' => self.cursor_mut().x = 0,\n 'r' => {\n if let EventOption::Key(k) = self.window.poll()\n .unwrap_or(Event::new())\n .to_option() {\n let x = self.x();\n let y = self.y();\n self.text[y][x] = k.character;\n }\n },\n ' ' => self.next(),\n _ => {},\n },\n Primitive(Insert(_)) => {\n self.insert(cmd);\n },\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added mod super example<commit_after>fn function() {\n println!(\"called `function()`\");\n}\n\nmod my {\n pub fn indirect_call() {\n \/\/ Let's access all the functions named `function` from this scope\n print!(\"called `my::indirect_call()`, that\\n> \");\n\n \/\/ `my::function` can be called directly\n function();\n\n {\n \/\/ This will bind to the `cool::function` in the *crate* scope\n \/\/ In this case the crate scope is the outermost scope\n use cool::function as root_cool_function;\n\n print!(\"> \");\n root_cool_function();\n }\n\n {\n \/\/ `self` refers to the current module scope, in this case: `my`\n use self::cool::function as my_cool_function;\n\n print!(\"> \");\n my_cool_function();\n }\n\n {\n \/\/ `super` refers to the parent scope, i.e. outside of the `my`\n \/\/ module\n use super::function as root_function;\n\n print!(\"> \");\n root_function();\n }\n }\n\n fn function() {\n println!(\"called `my::function()`\");\n }\n\n mod cool {\n pub fn function() {\n println!(\"called `my::cool::function()`\");\n }\n }\n}\n\nmod cool {\n pub fn function() {\n println!(\"called `cool::function()`\");\n }\n}\n\nfn main() {\n my::indirect_call();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix serverbound chat message field type and uncomment that packet<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: 'start-time' cannot be None<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Packet::write method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add utility functions for determine the type of a dbpage<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use Vec as the container for database page, and use BufReader to read header<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove DeviceRef in favor of a pointer.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[Rust] HowMuch (CodeWars)<commit_after>fn how_much(mut m: i32, mut n: i32) -> Vec<(String, String, String)> {\n if m > n {\n std::mem::swap(&mut m, &mut n);\n }\n\n let mut result: Vec<(String, String, String)> = Vec::new();\n let mut c: f32;\n let mut b: f32;\n\n for i in m..n+1 {\n c = (i as f32-1f32)\/9f32;\n b = (i as f32-2f32)\/7f32;\n if c < 0f32 || b < 0f32 {\n continue;\n }\n if c.fract() == 0f32 && b.fract() == 0f32 {\n let r_tuple = (format!(\"M: {}\", i),\n format!(\"B: {}\", b as i32),\n format!(\"C: {}\", c as i32));\n\n result.push(r_tuple);\n }\n }\n\n return result;\n}\n\nfn testing(m: i32, n: i32, exp: Vec<(&str, &str, &str)>) -> () {\n let ans: String = format!(\"{:?}\", how_much(m, n));\n let sol: String = format!(\"{:?}\", exp);\n assert_eq!(ans, sol)\n}\nfn tests() {\n testing(1, 100, vec![(\"M: 37\", \"B: 5\", \"C: 4\"), (\"M: 100\", \"B: 14\", \"C: 11\")]);\n testing(1000, 1100, vec![(\"M: 1045\", \"B: 149\", \"C: 116\")]);\n testing(10000, 9950, vec![(\"M: 9991\", \"B: 1427\", \"C: 1110\")]);\n testing(0, 200, vec![(\"M: 37\", \"B: 5\", \"C: 4\"), (\"M: 100\", \"B: 14\", \"C: 11\"), (\"M: 163\", \"B: 23\", \"C: 18\")]);\n testing(2950, 2950, vec![]);\n}\n\nfn main() {\n tests();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix a doctest and explain a particular weird quoting rule.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ TODO:\n\/\/ - Simplify using instruction iterators\n\/\/ - Make movement mode\n\/\/ - Record modifiers\n\nmod editor;\npub use self::editor::*;\n\nmod mode;\npub use self::mode::*;\n\nmod movement;\npub use self::movement::*;\n\nmod cursor;\npub use self::cursor::*;\n\nmod insert;\npub use self::insert::*;\n\nmod exec;\npub use self::exec::*;\n\nuse redox::*;\n\npub fn main() {\n let mut window = Window::new((rand() % 400 + 50) as isize, \n\t\t\t\t\t\t\t\t (rand() % 300 + 50) as isize, \n\t\t\t\t\t\t\t\t 576, \n\t\t\t\t\t\t\t\t 400, \n\t\t\t\t\t\t\t\t &\"Sodium\").unwrap(); \n\n let mut editor = Editor::new();\n\n let mut inp = window.event_iter().map(|x| {\n x.to_option()\n }).inst_iter(&editor.cursor().mode);\n\n for i in inp {\n editor.exec(i, &mut inp);\n }\n window.set([255, 255, 255, 255]);\n\n\n}\n\npub fn redraw() {\n \/*\n \/\/ Redraw window\n window.set([255, 255, 255, 255]);\n\n for (y, row) in editor.text.iter().enumerate() {\n for (x, c) in row.iter().enumerate() {\n window.char(8 * (y - editor.scroll_y) as isize, 16 * (x - editor.scroll_x) as isize, *c, [128, 128, 128, 255]);\n if editor.cursor().x == x && editor.cursor().y == y {\n window.char(8 * (y - editor.scroll_y) as isize, 16 * (x - editor.scroll_x) as isize, '_', [128, 128, 128, 255]);\n }\n }\n }\n *\/\n}\n<commit_msg>Convert tabs to spaces and remove some tailing spaces<commit_after>\/\/ TODO:\n\/\/ - Simplify using instruction iterators\n\/\/ - Make movement mode\n\/\/ - Record modifiers\n\nmod editor;\npub use self::editor::*;\n\nmod mode;\npub use self::mode::*;\n\nmod movement;\npub use self::movement::*;\n\nmod cursor;\npub use self::cursor::*;\n\nmod insert;\npub use self::insert::*;\n\nmod exec;\npub use self::exec::*;\n\nuse redox::*;\n\npub fn main() {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &\"Sodium\").unwrap();\n\n let mut editor = Editor::new();\n\n let mut inp = window.event_iter().map(|x| {\n x.to_option()\n }).inst_iter(&editor.cursor().mode);\n\n for i in inp {\n editor.exec(i, &mut inp);\n }\n window.set([255, 255, 255, 255]);\n\n\n}\n\npub fn redraw() {\n \/*\n \/\/ Redraw window\n window.set([255, 255, 255, 255]);\n\n for (y, row) in editor.text.iter().enumerate() {\n for (x, c) in row.iter().enumerate() {\n window.char(8 * (y - editor.scroll_y) as isize, 16 * (x - editor.scroll_x) as isize, *c, [128, 128, 128, 255]);\n if editor.cursor().x == x && editor.cursor().y == y {\n window.char(8 * (y - editor.scroll_y) as isize, 16 * (x - editor.scroll_x) as isize, '_', [128, 128, 128, 255]);\n }\n }\n }\n *\/\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Game window operations.\n\nuse input::InputEvent;\n\n\/\/\/ Settings for window behavior.\npub struct WindowSettings {\n \/\/\/ Title of the window.\n pub title: String,\n \/\/\/ The size of the window.\n pub size: [u32, ..2],\n \/\/\/ Number samples per pixel (anti-aliasing).\n pub samples: u8,\n \/\/\/ If true, the window is fullscreen.\n pub fullscreen: bool,\n \/\/\/ If true, exit when pressing Esc.\n pub exit_on_esc: bool,\n}\n\nimpl WindowSettings {\n \/\/\/ Gets default settings.\n \/\/\/\n \/\/\/ This exits the window when pressing `Esc`.\n \/\/\/ The background color is set to black.\n pub fn default() -> WindowSettings {\n WindowSettings {\n title: \"Piston\".to_string(),\n size: [640, 480],\n samples: 0,\n fullscreen: false,\n exit_on_esc: true,\n }\n }\n}\n\n\n\/\/\/ Implemented by window back-end.\npub trait Window {\n \/\/\/ Get the window's settings.\n fn get_settings<'a>(&'a self) -> &'a WindowSettings;\n\n \/\/\/ Returns true if the window should close.\n fn should_close(&self) -> bool;\n\n \/\/\/ Inform the window that it should close.\n fn close(&mut self);\n\n \/\/\/ Get the window's size\n fn get_size(&self) -> (u32, u32) {\n let settings = self.get_settings();\n (settings.size[0], settings.size[1])\n }\n\n \/\/\/ Get the size in drawing coordinates.\n fn get_draw_size(&self) -> (u32, u32) {\n self.get_size()\n }\n\n \/\/\/ Swap buffers.\n fn swap_buffers(&self) {}\n\n \/\/\/ When the cursor is captured,\n \/\/\/ it is hidden and the cursor position does not change.\n \/\/\/ Only relative mouse motion is registered.\n fn capture_cursor(&mut self, _enabled: bool) {}\n\n \/\/\/ Poll a event from window's event queue.\n fn poll_event(&mut self) -> Option<InputEvent> { None }\n}\n\n\/\/\/ An implementation of GameWindow that represents running without a window at all\npub struct NoWindow {\n settings: WindowSettings,\n should_close: bool\n}\n\nimpl NoWindow {\n \/\/\/ Create a new nonexistant game window\n pub fn new(settings: WindowSettings) -> NoWindow {\n NoWindow {\n settings: settings,\n should_close: false\n }\n }\n}\n\nimpl Window for NoWindow {\n fn get_settings<'a>(&'a self) -> &'a WindowSettings {\n &self.settings\n }\n\n fn should_close(&self) -> bool {\n self.should_close\n }\n\n fn close(&mut self) {\n self.should_close = true\n }\n\n fn get_size(&self) -> (u32, u32) {\n (0, 0)\n }\n}\n<commit_msg>Move Window.get_size into implementations<commit_after>\/\/! Game window operations.\n\nuse input::InputEvent;\n\n\/\/\/ Settings for window behavior.\npub struct WindowSettings {\n \/\/\/ Title of the window.\n pub title: String,\n \/\/\/ The size of the window.\n pub size: [u32, ..2],\n \/\/\/ Number samples per pixel (anti-aliasing).\n pub samples: u8,\n \/\/\/ If true, the window is fullscreen.\n pub fullscreen: bool,\n \/\/\/ If true, exit when pressing Esc.\n pub exit_on_esc: bool,\n}\n\nimpl WindowSettings {\n \/\/\/ Gets default settings.\n \/\/\/\n \/\/\/ This exits the window when pressing `Esc`.\n \/\/\/ The background color is set to black.\n pub fn default() -> WindowSettings {\n WindowSettings {\n title: \"Piston\".to_string(),\n size: [640, 480],\n samples: 0,\n fullscreen: false,\n exit_on_esc: true,\n }\n }\n}\n\n\n\/\/\/ Implemented by window back-end.\npub trait Window {\n \/\/\/ Get the window's settings.\n fn get_settings<'a>(&'a self) -> &'a WindowSettings;\n\n \/\/\/ Returns true if the window should close.\n fn should_close(&self) -> bool;\n\n \/\/\/ Inform the window that it should close.\n fn close(&mut self);\n\n \/\/\/ Get the window's size\n fn get_size(&self) -> (u32, u32);\n\n \/\/\/ Get the size in drawing coordinates.\n fn get_draw_size(&self) -> (u32, u32) {\n self.get_size()\n }\n\n \/\/\/ Swap buffers.\n fn swap_buffers(&self) {}\n\n \/\/\/ When the cursor is captured,\n \/\/\/ it is hidden and the cursor position does not change.\n \/\/\/ Only relative mouse motion is registered.\n fn capture_cursor(&mut self, _enabled: bool) {}\n\n \/\/\/ Poll a event from window's event queue.\n fn poll_event(&mut self) -> Option<InputEvent> { None }\n}\n\n\/\/\/ An implementation of GameWindow that represents running without a window at all\npub struct NoWindow {\n settings: WindowSettings,\n should_close: bool\n}\n\nimpl NoWindow {\n \/\/\/ Create a new nonexistant game window\n pub fn new(settings: WindowSettings) -> NoWindow {\n NoWindow {\n settings: settings,\n should_close: false\n }\n }\n}\n\nimpl Window for NoWindow {\n fn get_settings<'a>(&'a self) -> &'a WindowSettings {\n &self.settings\n }\n\n fn should_close(&self) -> bool {\n self.should_close\n }\n\n fn close(&mut self) {\n self.should_close = true\n }\n\n fn get_size(&self) -> (u32, u32) {\n (0, 0)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>begin mod worker<commit_after>\/\/\n\/\/ Copyright (c) 2016, Boris Popov <popov@whitekefir.ru>\n\/\/\n\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\/\/\n\nextern crate libc;\nuse self::libc::sleep;\n\npub fn main() {\n \/\/\n \/\/TODO work\n \/\/\n unsafe {\n sleep(15);\n }\n}\n\/\/mod program;\n\/\/use program::{NAME, VERSION};\n\n\/\/mod logger;\n\/\/use logger::{Logger};\n\n\/\/use forker::hello;\n\/\/use forker::hello;\n\/\/use libc;\n\/\/use libc::fork;\n\/\/use libc::exit;\n\n\n \/\/let logger = logger::Logger{name: NAME, version: VERSION};\n \/\/logger.start();\n \/\/let logger = Logger::new(NAME, VERSION);\n \/\/let logger = Logger::new(NAME, VERSION);\n\n\n \n \/\/println!(\"{}\", NAME);\n \/\/let pi = program::Info{name: NAME, version: VERSION};\n \/\/pi.init();\n \/\/println!(\"{}\", pi.name());\n \/\/println!(\"{}\", pi.version());\n \/*\n println!(\"Hello, world!\");\n hello();\n\n let x: i32;\n unsafe {\n x = libc::fork();\n }\n\n if x == 0 {\n unsafe {\n libc::sleep(10);\n libc::exit(2);\n }\n }\n \/\/\n unsafe {\n sleep(5);\n }\n \n \/\/pub unsafe extern fn fork() -> pid_t\n println!(\"Exit!\");\n *\/\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ensures that public symbols are not removed completely\n#![crate_type = \"lib\"]\n\/\/ we can compile to a variety of platforms, because we don't need\n\/\/ cross-compiled standard libraries.\n#![feature(no_core)]\n#![no_core]\n\n#![feature(repr_simd, simd_ffi, link_llvm_intrinsics, lang_items)]\n\n\n#[repr(C)]\n#[derive(Copy)]\n#[repr(simd)]\npub struct f32x4(f32, f32, f32, f32);\n\n\nextern {\n #[link_name = \"llvm.sqrt.v4f32\"]\n fn vsqrt(x: f32x4) -> f32x4;\n}\n\npub fn foo(x: f32x4) -> f32x4 {\n unsafe {vsqrt(x)}\n}\n\n#[repr(C)]\n#[derive(Copy)]\n#[repr(simd)]\npub struct i32x4(i32, i32, i32, i32);\n\n\nextern {\n \/\/ _mm_sll_epi32\n #[cfg(any(target_arch = \"x86\",\n target_arch = \"x86-64\"))]\n #[link_name = \"llvm.x86.sse2.psll.d\"]\n fn integer(a: i32x4, b: i32x4) -> i32x4;\n\n \/\/ vmaxq_s32\n #[cfg(any(target_arch = \"arm\"))]\n #[link_name = \"llvm.arm.neon.vmaxs.v4i32\"]\n fn integer(a: i32x4, b: i32x4) -> i32x4;\n \/\/ vmaxq_s32\n #[cfg(any(target_arch = \"aarch64\"))]\n #[link_name = \"llvm.aarch64.neon.maxs.v4i32\"]\n fn integer(a: i32x4, b: i32x4) -> i32x4;\n\n \/\/ just some substitute foreign symbol, not an LLVM intrinsic; so\n \/\/ we still get type checking, but not as detailed as (ab)using\n \/\/ LLVM.\n #[cfg(not(any(target_arch = \"x86\",\n target_arch = \"x86-64\",\n target_arch = \"arm\",\n target_arch = \"aarch64\")))]\n fn integer(a: i32x4, b: i32x4) -> i32x4;\n}\n\npub fn bar(a: i32x4, b: i32x4) -> i32x4 {\n unsafe {integer(a, b)}\n}\n\n#[lang = \"sized\"]\npub trait Sized { }\n\n#[lang = \"copy\"]\npub trait Copy { }\n\npub mod marker {\n pub use Copy;\n}\n<commit_msg>Auto merge of #30547 - mmcco:cfg, r=sanxiyn<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ensures that public symbols are not removed completely\n#![crate_type = \"lib\"]\n\/\/ we can compile to a variety of platforms, because we don't need\n\/\/ cross-compiled standard libraries.\n#![feature(no_core)]\n#![no_core]\n\n#![feature(repr_simd, simd_ffi, link_llvm_intrinsics, lang_items)]\n\n\n#[repr(C)]\n#[derive(Copy)]\n#[repr(simd)]\npub struct f32x4(f32, f32, f32, f32);\n\n\nextern {\n #[link_name = \"llvm.sqrt.v4f32\"]\n fn vsqrt(x: f32x4) -> f32x4;\n}\n\npub fn foo(x: f32x4) -> f32x4 {\n unsafe {vsqrt(x)}\n}\n\n#[repr(C)]\n#[derive(Copy)]\n#[repr(simd)]\npub struct i32x4(i32, i32, i32, i32);\n\n\nextern {\n \/\/ _mm_sll_epi32\n #[cfg(any(target_arch = \"x86\",\n target_arch = \"x86-64\"))]\n #[link_name = \"llvm.x86.sse2.psll.d\"]\n fn integer(a: i32x4, b: i32x4) -> i32x4;\n\n \/\/ vmaxq_s32\n #[cfg(target_arch = \"arm\")]\n #[link_name = \"llvm.arm.neon.vmaxs.v4i32\"]\n fn integer(a: i32x4, b: i32x4) -> i32x4;\n \/\/ vmaxq_s32\n #[cfg(target_arch = \"aarch64\")]\n #[link_name = \"llvm.aarch64.neon.maxs.v4i32\"]\n fn integer(a: i32x4, b: i32x4) -> i32x4;\n\n \/\/ just some substitute foreign symbol, not an LLVM intrinsic; so\n \/\/ we still get type checking, but not as detailed as (ab)using\n \/\/ LLVM.\n #[cfg(not(any(target_arch = \"x86\",\n target_arch = \"x86-64\",\n target_arch = \"arm\",\n target_arch = \"aarch64\")))]\n fn integer(a: i32x4, b: i32x4) -> i32x4;\n}\n\npub fn bar(a: i32x4, b: i32x4) -> i32x4 {\n unsafe {integer(a, b)}\n}\n\n#[lang = \"sized\"]\npub trait Sized { }\n\n#[lang = \"copy\"]\npub trait Copy { }\n\npub mod marker {\n pub use Copy;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test that native mods don't need to declare an ABI<commit_after>\/\/ ABI is cdecl by default\n\nnative mod rustrt {\n fn unsupervise();\n}\n\nfn main() {\n rustrt::unsupervise();\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Issue #52129: ICE when trying to document the `quote` proc-macro from proc_macro\n\n\/\/ As of this writing, we don't currently attempt to document proc-macros. However, we shouldn't\n\/\/ crash when we try.\n\nextern crate proc_macro;\n\npub use proc_macro::*;\n<commit_msg>ignore rustdoc\/doc-proc-macro on stage1<commit_after>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-stage1\n\n\/\/ Issue #52129: ICE when trying to document the `quote` proc-macro from proc_macro\n\n\/\/ As of this writing, we don't currently attempt to document proc-macros. However, we shouldn't\n\/\/ crash when we try.\n\nextern crate proc_macro;\n\npub use proc_macro::*;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>More formatting tweaks<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>chore: #[allow(unused_variables)]<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #17746<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for #17746\n\nfn main() {}\n\nstruct A;\n\nimpl A {\n fn b(&mut self) {\n self.a()\n }\n}\n\ntrait Foo {\n fn dummy(&self) {}\n}\ntrait Bar {\n fn a(&self);\n}\n\nimpl Foo for A {}\nimpl<T> Bar for T where T: Foo {\n fn a(&self) {}\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[derive(Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]\nstruct Array<T> {\n f00: [T; 00],\n f01: [T; 01],\n f02: [T; 02],\n f03: [T; 03],\n f04: [T; 04],\n f05: [T; 05],\n f06: [T; 06],\n f07: [T; 07],\n f08: [T; 08],\n f09: [T; 09],\n f10: [T; 10],\n f11: [T; 11],\n f12: [T; 12],\n f13: [T; 13],\n f14: [T; 14],\n f15: [T; 15],\n f16: [T; 16],\n f17: [T; 17],\n f18: [T; 18],\n f19: [T; 19],\n f20: [T; 20],\n f21: [T; 21],\n f22: [T; 22],\n f23: [T; 23],\n f24: [T; 24],\n f25: [T; 25],\n f26: [T; 26],\n f27: [T; 27],\n f28: [T; 28],\n f29: [T; 29],\n f30: [T; 30],\n f31: [T; 31],\n f32: [T; 32],\n}\n\n\/\/ FIXME(#7622): merge with `Array` once `[T; N]: Clone` where `T: Clone`\n#[derive(Clone, Copy)]\nstruct CopyArray<T: Copy> {\n f00: [T; 00],\n f01: [T; 01],\n f02: [T; 02],\n f03: [T; 03],\n f04: [T; 04],\n f05: [T; 05],\n f06: [T; 06],\n f07: [T; 07],\n f08: [T; 08],\n f09: [T; 09],\n f10: [T; 10],\n f11: [T; 11],\n f12: [T; 12],\n f13: [T; 13],\n f14: [T; 14],\n f15: [T; 15],\n f16: [T; 16],\n f17: [T; 17],\n f18: [T; 18],\n f19: [T; 19],\n f20: [T; 20],\n f21: [T; 21],\n f22: [T; 22],\n f23: [T; 23],\n f24: [T; 24],\n f25: [T; 25],\n f26: [T; 26],\n f27: [T; 27],\n f28: [T; 28],\n f29: [T; 29],\n f30: [T; 30],\n f31: [T; 31],\n f32: [T; 32],\n}\n\n#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]\nstruct Fn<A, B, C, D, E, F, G, H, I, J, K, L> {\n f00: fn(),\n f01: fn(A),\n f02: fn(A, B),\n f03: fn(A, B, C),\n f04: fn(A, B, C, D),\n f05: fn(A, B, C, D, E),\n f06: fn(A, B, C, D, E, F),\n f07: fn(A, B, C, D, E, F, G),\n f08: fn(A, B, C, D, E, F, G, H),\n f09: fn(A, B, C, D, E, F, G, H, I),\n f10: fn(A, B, C, D, E, F, G, H, I, J),\n f11: fn(A, B, C, D, E, F, G, H, I, J, K),\n f12: fn(A, B, C, D, E, F, G, H, I, J, K, L),\n}\n\n#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]\nstruct Tuple<A, B, C, D, E, F, G, H, I, J, K, L> {\n f00: (),\n f01: (A),\n f02: (A, B),\n f03: (A, B, C),\n f04: (A, B, C, D),\n f05: (A, B, C, D, E),\n f06: (A, B, C, D, E, F),\n f07: (A, B, C, D, E, F, G),\n f08: (A, B, C, D, E, F, G, H),\n f09: (A, B, C, D, E, F, G, H, I),\n f10: (A, B, C, D, E, F, G, H, I, J),\n f11: (A, B, C, D, E, F, G, H, I, J, K),\n f12: (A, B, C, D, E, F, G, H, I, J, K, L),\n}\n\nfn main() {}\n<commit_msg>update FIXME(#7622) to point to issue 44580 (tracking issue for const generics)<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[derive(Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]\nstruct Array<T> {\n f00: [T; 00],\n f01: [T; 01],\n f02: [T; 02],\n f03: [T; 03],\n f04: [T; 04],\n f05: [T; 05],\n f06: [T; 06],\n f07: [T; 07],\n f08: [T; 08],\n f09: [T; 09],\n f10: [T; 10],\n f11: [T; 11],\n f12: [T; 12],\n f13: [T; 13],\n f14: [T; 14],\n f15: [T; 15],\n f16: [T; 16],\n f17: [T; 17],\n f18: [T; 18],\n f19: [T; 19],\n f20: [T; 20],\n f21: [T; 21],\n f22: [T; 22],\n f23: [T; 23],\n f24: [T; 24],\n f25: [T; 25],\n f26: [T; 26],\n f27: [T; 27],\n f28: [T; 28],\n f29: [T; 29],\n f30: [T; 30],\n f31: [T; 31],\n f32: [T; 32],\n}\n\n\/\/ FIXME(#44580): merge with `Array` once `[T; N]: Clone` where `T: Clone`\n#[derive(Clone, Copy)]\nstruct CopyArray<T: Copy> {\n f00: [T; 00],\n f01: [T; 01],\n f02: [T; 02],\n f03: [T; 03],\n f04: [T; 04],\n f05: [T; 05],\n f06: [T; 06],\n f07: [T; 07],\n f08: [T; 08],\n f09: [T; 09],\n f10: [T; 10],\n f11: [T; 11],\n f12: [T; 12],\n f13: [T; 13],\n f14: [T; 14],\n f15: [T; 15],\n f16: [T; 16],\n f17: [T; 17],\n f18: [T; 18],\n f19: [T; 19],\n f20: [T; 20],\n f21: [T; 21],\n f22: [T; 22],\n f23: [T; 23],\n f24: [T; 24],\n f25: [T; 25],\n f26: [T; 26],\n f27: [T; 27],\n f28: [T; 28],\n f29: [T; 29],\n f30: [T; 30],\n f31: [T; 31],\n f32: [T; 32],\n}\n\n#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]\nstruct Fn<A, B, C, D, E, F, G, H, I, J, K, L> {\n f00: fn(),\n f01: fn(A),\n f02: fn(A, B),\n f03: fn(A, B, C),\n f04: fn(A, B, C, D),\n f05: fn(A, B, C, D, E),\n f06: fn(A, B, C, D, E, F),\n f07: fn(A, B, C, D, E, F, G),\n f08: fn(A, B, C, D, E, F, G, H),\n f09: fn(A, B, C, D, E, F, G, H, I),\n f10: fn(A, B, C, D, E, F, G, H, I, J),\n f11: fn(A, B, C, D, E, F, G, H, I, J, K),\n f12: fn(A, B, C, D, E, F, G, H, I, J, K, L),\n}\n\n#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]\nstruct Tuple<A, B, C, D, E, F, G, H, I, J, K, L> {\n f00: (),\n f01: (A),\n f02: (A, B),\n f03: (A, B, C),\n f04: (A, B, C, D),\n f05: (A, B, C, D, E),\n f06: (A, B, C, D, E, F),\n f07: (A, B, C, D, E, F, G),\n f08: (A, B, C, D, E, F, G, H),\n f09: (A, B, C, D, E, F, G, H, I),\n f10: (A, B, C, D, E, F, G, H, I, J),\n f11: (A, B, C, D, E, F, G, H, I, J, K),\n f12: (A, B, C, D, E, F, G, H, I, J, K, L),\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test case for #30276<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct Test([i32]);\nfn main() {\n let _x: fn(_) -> Test = Test;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix match same arms bug found by clippy<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Day 21 solution<commit_after>\/\/ advent21.rs\n\/\/ RPG\n\n#[macro_use] extern crate scan_fmt;\n\nuse std::io;\nuse std::cmp::max;\n\nfn main() {\n let mut input = String::new();\n let mut boss = Stats {hit_points: 0, damage: 0, armor: 0};\n\n io::stdin().read_line(&mut input).ok().expect(\"Failed to read line\");\n let hit_points = scan_fmt!(&input, \"Hit Points: {}\", i32);\n boss.hit_points = hit_points.unwrap();\n\n input.clear();\n io::stdin().read_line(&mut input).ok().expect(\"Failed to read line\");\n let damage = scan_fmt!(&input, \"Damage: {}\", i32);\n boss.damage = damage.unwrap();\n\n input.clear();\n io::stdin().read_line(&mut input).ok().expect(\"Failed to read line\");\n let armor = scan_fmt!(&input, \"Armor: {}\", i32);\n boss.armor = armor.unwrap();\n\n\n let cost = find_least_gold(&boss);\n println!(\"Least gold needed: {}\", cost);\n\n let cost2 = find_most_gold(&boss);\n println!(\"Most gold needed: {}\", cost2);\n\n}\n\nstruct Stats {\n hit_points: i32,\n damage: i32,\n armor: i32,\n}\n\nfn does_player_win(player: &Stats, boss: &Stats) -> bool {\n let mut player_hp = player.hit_points;\n let mut boss_hp = boss.hit_points;\n\n let player_damage = max(1, player.damage - boss.armor);\n let boss_damage = max(1, boss.damage - player.armor);\n\n loop {\n if player_hp <= 0 {\n return false;\n }\n boss_hp -= player_damage;\n\n if boss_hp <= 0 {\n return true;\n }\n\n player_hp -= boss_damage;\n }\n}\n\n#[derive(Clone)]\nstruct Item {\n cost: i32,\n damage: i32,\n armor: i32,\n}\n\nfn add_items(item1: &Item, item2: &Item) -> Item {\n Item {\n cost: item1.cost + item2.cost,\n damage: item1.damage + item2.damage,\n armor: item1.armor + item2.armor,\n }\n}\n\nfn find_item_combinations() -> Vec<Item> {\n let weapons = vec![\n Item {cost: 8, damage: 4, armor: 0},\n Item {cost: 10, damage: 5, armor: 0},\n Item {cost: 25, damage: 6, armor: 0},\n Item {cost: 40, damage: 7, armor: 0},\n Item {cost: 74, damage: 8, armor: 0},\n ];\n let armors = vec![\n Item {cost: 13, damage: 0, armor: 1},\n Item {cost: 31, damage: 0, armor: 2},\n Item {cost: 53, damage: 0, armor: 3},\n Item {cost: 75, damage: 0, armor: 4},\n Item {cost: 102, damage: 0, armor: 5},\n ];\n let rings = vec![\n Item {cost: 25, damage: 1, armor: 0},\n Item {cost: 50, damage: 2, armor: 0},\n Item {cost: 100, damage: 3, armor: 0},\n Item {cost: 20, damage: 0, armor: 1},\n Item {cost: 40, damage: 0, armor: 2},\n Item {cost: 80, damage: 0, armor: 3},\n ];\n\n let mut combos = Vec::new();\n\n \/\/ exactly 1 weapon\n \/\/ 0 or 1 armor\n \/\/ 0 - 2 rings\n for w in &weapons {\n \/\/ 0 armor, 0 rings\n combos.push(w.clone());\n\n \/\/ 0 armor, 0 - 2 rings\n enumerate_rings(&mut combos, &rings, &w);\n\n \/\/ 1 armor, 0 - 2 rings\n for a in &armors {\n let base = add_items(&w, &a);\n enumerate_rings(&mut combos, &rings, &base);\n }\n }\n \n combos\n}\n\nfn stats_from_items(item_total: &Item) -> Stats {\n Stats {hit_points: 100, damage: item_total.damage, armor: item_total.armor}\n}\n\n\/\/ Find least gold needed to defeat a given boss\nfn find_least_gold(boss: &Stats) -> i32 {\n let combos = find_item_combinations();\n\n combos.iter()\n .filter(|combo| does_player_win(&stats_from_items(&combo), boss))\n .map(|combo| combo.cost)\n .min().unwrap()\n}\n\nfn enumerate_rings(combos: &mut Vec<Item>, rings: &[Item], base: &Item) {\n \/\/ 0 - 2 rings\n for bit_mask in 0..(1 << rings.len()) {\n if count_bits(bit_mask) <= 2 {\n let ring_sum = rings.iter().enumerate()\n .filter(|&(i, _)| bit_mask & (1 << i) != 0)\n .fold(base.clone(), |acc, (_, r)| add_items(&acc, r));\n combos.push(ring_sum);\n }\n }\n}\n\n\n\/\/ copied from Day 17\nfn count_bits(x: u32) -> u32 {\n let mut total = 0;\n let mut val = x;\n\n while val > 0 {\n if (val & 1) != 0 {\n total += 1;\n }\n val >>= 1;\n }\n\n total\n}\n\n#[test]\nfn test_does_player_win() {\n let player = Stats{hit_points: 8, damage: 5, armor: 5};\n let boss = Stats{hit_points: 12, damage: 7, armor: 2};\n let boss2 = Stats{hit_points: 12, damage: 7, armor: 3};\n\n\n assert!(does_player_win(&player, &boss));\n assert!(!does_player_win(&player, &boss2));\n}\n\n\/\/ part 2\n\n\/\/ Find most gold you can spend and still lose to a given boss\nfn find_most_gold(boss: &Stats) -> i32 {\n let combos = find_item_combinations();\n\n combos.iter()\n .filter(|combo| !does_player_win(&stats_from_items(&combo), boss))\n .map(|combo| combo.cost)\n .max().unwrap()\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove log<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add a self-test that actually inspects a mount<commit_after>\/\/! Test that this very testfile is accessible in our own mount.\n\n#![feature(path)]\n#![feature(path_ext)]\n\nextern crate gitfs;\n\nuse std::fs::PathExt;\nuse std::path::Path;\n\n#[test]\nfn mounted_test_exists() {\n let git_dir = Path::new(\".git\");\n let mount = git_dir.join(\"fs\");\n let file = mount.join(\"HEAD\").join(file!());\n\n \/\/ NB: If this isn't a git checkout, we'll fail here, sorry!\n let fs = gitfs::GitFS::new(&git_dir).unwrap();\n\n assert!(!file.exists(), \"{:?} shouldn't exist before mounting!\", file);\n\n let session = fs.spawn_mount(&mount).unwrap();\n\n assert!(file.exists(), \"{:?} should exist in the mount!\", file);\n\n drop(session);\n\n assert!(!file.exists(), \"{:?} shouldn't exist after unmounting!\", file);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(warnings)]\n\nextern crate build_helper;\nextern crate gcc;\n\nuse std::env;\nuse std::process::Command;\nuse build_helper::{run, native_lib_boilerplate};\n\nfn main() {\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let host = env::var(\"HOST\").expect(\"HOST was not set\");\n if cfg!(feature = \"backtrace\") && !target.contains(\"apple\") && !target.contains(\"msvc\") &&\n !target.contains(\"emscripten\") && !target.contains(\"fuchsia\") && !target.contains(\"redox\") {\n let _ = build_libbacktrace(&host, &target);\n }\n\n if target.contains(\"linux\") {\n if target.contains(\"android\") {\n println!(\"cargo:rustc-link-lib=dl\");\n println!(\"cargo:rustc-link-lib=log\");\n println!(\"cargo:rustc-link-lib=gcc\");\n } else if !target.contains(\"musl\") || target.contains(\"mips\") {\n println!(\"cargo:rustc-link-lib=dl\");\n println!(\"cargo:rustc-link-lib=rt\");\n println!(\"cargo:rustc-link-lib=pthread\");\n }\n } else if target.contains(\"freebsd\") {\n println!(\"cargo:rustc-link-lib=execinfo\");\n println!(\"cargo:rustc-link-lib=pthread\");\n } else if target.contains(\"dragonfly\") || target.contains(\"bitrig\") ||\n target.contains(\"netbsd\") || target.contains(\"openbsd\") {\n println!(\"cargo:rustc-link-lib=pthread\");\n } else if target.contains(\"solaris\") {\n println!(\"cargo:rustc-link-lib=socket\");\n println!(\"cargo:rustc-link-lib=posix4\");\n println!(\"cargo:rustc-link-lib=pthread\");\n } else if target.contains(\"apple-darwin\") {\n println!(\"cargo:rustc-link-lib=System\");\n\n \/\/ res_init and friends require -lresolv on macOS\/iOS.\n \/\/ See #41582 and http:\/\/blog.achernya.com\/2013\/03\/os-x-has-silly-libsystem.html\n println!(\"cargo:rustc-link-lib=resolv\");\n } else if target.contains(\"apple-ios\") {\n println!(\"cargo:rustc-link-lib=System\");\n println!(\"cargo:rustc-link-lib=objc\");\n println!(\"cargo:rustc-link-lib=framework=Security\");\n println!(\"cargo:rustc-link-lib=framework=Foundation\");\n println!(\"cargo:rustc-link-lib=resolv\");\n } else if target.contains(\"windows\") {\n println!(\"cargo:rustc-link-lib=advapi32\");\n println!(\"cargo:rustc-link-lib=ws2_32\");\n println!(\"cargo:rustc-link-lib=userenv\");\n println!(\"cargo:rustc-link-lib=shell32\");\n } else if target.contains(\"fuchsia\") {\n \/\/ use system-provided libbacktrace\n if cfg!(feature = \"backtrace\") {\n println!(\"cargo:rustc-link-lib=backtrace\");\n }\n println!(\"cargo:rustc-link-lib=magenta\");\n println!(\"cargo:rustc-link-lib=mxio\");\n println!(\"cargo:rustc-link-lib=launchpad\"); \/\/ for std::process\n }\n}\n\nfn build_libbacktrace(host: &str, target: &str) -> Result<(), ()> {\n let native = native_lib_boilerplate(\"libbacktrace\", \"libbacktrace\", \"backtrace\", \".libs\")?;\n\n let compiler = gcc::Config::new().get_compiler();\n \/\/ only msvc returns None for ar so unwrap is okay\n let ar = build_helper::cc2ar(compiler.path(), target).unwrap();\n let mut cflags = compiler.args().iter().map(|s| s.to_str().unwrap())\n .collect::<Vec<_>>().join(\" \");\n cflags.push_str(\" -fvisibility=hidden\");\n run(Command::new(\"sh\")\n .current_dir(&native.out_dir)\n .arg(native.src_dir.join(\"configure\").to_str().unwrap()\n .replace(\"C:\\\\\", \"\/c\/\")\n .replace(\"\\\\\", \"\/\"))\n .arg(\"--with-pic\")\n .arg(\"--disable-multilib\")\n .arg(\"--disable-shared\")\n .arg(\"--disable-host-shared\")\n .arg(format!(\"--host={}\", build_helper::gnu_target(target)))\n .arg(format!(\"--build={}\", build_helper::gnu_target(host)))\n .env(\"CC\", compiler.path())\n .env(\"AR\", &ar)\n .env(\"RANLIB\", format!(\"{} s\", ar.display()))\n .env(\"CFLAGS\", cflags));\n\n run(Command::new(build_helper::make(host))\n .current_dir(&native.out_dir)\n .arg(format!(\"INCDIR={}\", native.src_dir.display()))\n .arg(\"-j\").arg(env::var(\"NUM_JOBS\").expect(\"NUM_JOBS was not set\")));\n Ok(())\n}\n<commit_msg>DNS functions are in libresolv on Solaris, just like on MacOS<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(warnings)]\n\nextern crate build_helper;\nextern crate gcc;\n\nuse std::env;\nuse std::process::Command;\nuse build_helper::{run, native_lib_boilerplate};\n\nfn main() {\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let host = env::var(\"HOST\").expect(\"HOST was not set\");\n if cfg!(feature = \"backtrace\") && !target.contains(\"apple\") && !target.contains(\"msvc\") &&\n !target.contains(\"emscripten\") && !target.contains(\"fuchsia\") && !target.contains(\"redox\") {\n let _ = build_libbacktrace(&host, &target);\n }\n\n if target.contains(\"linux\") {\n if target.contains(\"android\") {\n println!(\"cargo:rustc-link-lib=dl\");\n println!(\"cargo:rustc-link-lib=log\");\n println!(\"cargo:rustc-link-lib=gcc\");\n } else if !target.contains(\"musl\") || target.contains(\"mips\") {\n println!(\"cargo:rustc-link-lib=dl\");\n println!(\"cargo:rustc-link-lib=rt\");\n println!(\"cargo:rustc-link-lib=pthread\");\n }\n } else if target.contains(\"freebsd\") {\n println!(\"cargo:rustc-link-lib=execinfo\");\n println!(\"cargo:rustc-link-lib=pthread\");\n } else if target.contains(\"dragonfly\") || target.contains(\"bitrig\") ||\n target.contains(\"netbsd\") || target.contains(\"openbsd\") {\n println!(\"cargo:rustc-link-lib=pthread\");\n } else if target.contains(\"solaris\") {\n println!(\"cargo:rustc-link-lib=socket\");\n println!(\"cargo:rustc-link-lib=posix4\");\n println!(\"cargo:rustc-link-lib=pthread\");\n println!(\"cargo:rustc-link-lib=resolv\");\n } else if target.contains(\"apple-darwin\") {\n println!(\"cargo:rustc-link-lib=System\");\n\n \/\/ res_init and friends require -lresolv on macOS\/iOS.\n \/\/ See #41582 and http:\/\/blog.achernya.com\/2013\/03\/os-x-has-silly-libsystem.html\n println!(\"cargo:rustc-link-lib=resolv\");\n } else if target.contains(\"apple-ios\") {\n println!(\"cargo:rustc-link-lib=System\");\n println!(\"cargo:rustc-link-lib=objc\");\n println!(\"cargo:rustc-link-lib=framework=Security\");\n println!(\"cargo:rustc-link-lib=framework=Foundation\");\n println!(\"cargo:rustc-link-lib=resolv\");\n } else if target.contains(\"windows\") {\n println!(\"cargo:rustc-link-lib=advapi32\");\n println!(\"cargo:rustc-link-lib=ws2_32\");\n println!(\"cargo:rustc-link-lib=userenv\");\n println!(\"cargo:rustc-link-lib=shell32\");\n } else if target.contains(\"fuchsia\") {\n \/\/ use system-provided libbacktrace\n if cfg!(feature = \"backtrace\") {\n println!(\"cargo:rustc-link-lib=backtrace\");\n }\n println!(\"cargo:rustc-link-lib=magenta\");\n println!(\"cargo:rustc-link-lib=mxio\");\n println!(\"cargo:rustc-link-lib=launchpad\"); \/\/ for std::process\n }\n}\n\nfn build_libbacktrace(host: &str, target: &str) -> Result<(), ()> {\n let native = native_lib_boilerplate(\"libbacktrace\", \"libbacktrace\", \"backtrace\", \".libs\")?;\n\n let compiler = gcc::Config::new().get_compiler();\n \/\/ only msvc returns None for ar so unwrap is okay\n let ar = build_helper::cc2ar(compiler.path(), target).unwrap();\n let mut cflags = compiler.args().iter().map(|s| s.to_str().unwrap())\n .collect::<Vec<_>>().join(\" \");\n cflags.push_str(\" -fvisibility=hidden\");\n run(Command::new(\"sh\")\n .current_dir(&native.out_dir)\n .arg(native.src_dir.join(\"configure\").to_str().unwrap()\n .replace(\"C:\\\\\", \"\/c\/\")\n .replace(\"\\\\\", \"\/\"))\n .arg(\"--with-pic\")\n .arg(\"--disable-multilib\")\n .arg(\"--disable-shared\")\n .arg(\"--disable-host-shared\")\n .arg(format!(\"--host={}\", build_helper::gnu_target(target)))\n .arg(format!(\"--build={}\", build_helper::gnu_target(host)))\n .env(\"CC\", compiler.path())\n .env(\"AR\", &ar)\n .env(\"RANLIB\", format!(\"{} s\", ar.display()))\n .env(\"CFLAGS\", cflags));\n\n run(Command::new(build_helper::make(host))\n .current_dir(&native.out_dir)\n .arg(format!(\"INCDIR={}\", native.src_dir.display()))\n .arg(\"-j\").arg(env::var(\"NUM_JOBS\").expect(\"NUM_JOBS was not set\")));\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before>use std::cmp::Ordering;\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::store::Entry;\nuse libimagstore::store::EntryHeader;\nuse libimagstore::store::Result as StoreResult;\n\nuse error::{LinkError, LinkErrorKind};\nuse result::Result;\n\nuse toml::Value;\nuse itertools::Itertools;\n\npub type Link = StoreId;\n\npub trait InternalLinker {\n\n \/\/\/ Get the internal links from the implementor object\n fn get_internal_links(&self) -> Result<Vec<Link>>;\n\n \/\/\/ Set the internal links for the implementor object\n fn set_internal_links(&mut self, links: Vec<&mut Entry>) -> Result<Vec<Link>>;\n\n \/\/\/ Add an internal link to the implementor object\n fn add_internal_link(&mut self, link: &mut Entry) -> Result<()>;\n\n \/\/\/ Remove an internal link from the implementor object\n fn remove_internal_link(&mut self, link: &mut Entry) -> Result<()>;\n\n}\n\nimpl InternalLinker for Entry {\n\n fn get_internal_links(&self) -> Result<Vec<Link>> {\n process_rw_result(self.get_header().read(\"imag.links\"))\n }\n\n \/\/\/ Set the links in a header and return the old links, if any.\n fn set_internal_links(&mut self, links: Vec<&mut Entry>) -> Result<Vec<Link>> {\n let self_location = self.get_location().clone();\n let mut new_links = vec![];\n\n for link in links {\n if let Err(e) = add_foreign_link(link, self_location.clone()) {\n return Err(e);\n }\n let link = link.get_location().clone();\n new_links.push(link);\n }\n\n let new_links = links_into_values(new_links);\n if new_links.iter().any(|o| o.is_none()) {\n return Err(LinkError::new(LinkErrorKind::InternalConversionError, None));\n }\n let new_links = new_links.into_iter().map(|o| o.unwrap()).collect();\n process_rw_result(self.get_header_mut().set(\"imag.links\", Value::Array(new_links)))\n }\n\n fn add_internal_link(&mut self, link: &mut Entry) -> Result<()> {\n let new_link = link.get_location().clone();\n\n add_foreign_link(link, self.get_location().clone())\n .and_then(|_| {\n self.get_internal_links()\n .and_then(|mut links| {\n links.push(new_link);\n rewrite_links(self.get_header_mut(), links)\n })\n })\n }\n\n fn remove_internal_link(&mut self, link: &mut Entry) -> Result<()> {\n let own_loc = link.get_location().clone();\n let other_loc = link.get_location().clone();\n\n link.get_internal_links()\n .and_then(|links| {\n let links = links.into_iter().filter(|l| l.clone() != own_loc).collect();\n rewrite_links(self.get_header_mut(), links)\n })\n .and_then(|_| {\n self.get_internal_links()\n .and_then(|links| {\n let links = links.into_iter().filter(|l| l.clone() != other_loc).collect();\n rewrite_links(link.get_header_mut(), links)\n })\n })\n }\n\n}\n\nfn links_into_values(links: Vec<StoreId>) -> Vec<Option<Value>> {\n links\n .into_iter()\n .map(|s| s.to_str().map(String::from))\n .unique()\n .map(|elem| elem.map(Value::String))\n .sorted_by(|a, b| {\n match (a, b) {\n (&Some(Value::String(ref a)), &Some(Value::String(ref b))) => Ord::cmp(a, b),\n (&None, _) | (_, &None) => Ordering::Equal,\n _ => unreachable!()\n }\n })\n}\n\nfn rewrite_links(header: &mut EntryHeader, links: Vec<StoreId>) -> Result<()> {\n let links = links_into_values(links);\n\n if links.iter().any(|o| o.is_none()) {\n \/\/ if any type convert failed we fail as well\n Err(LinkError::new(LinkErrorKind::InternalConversionError, None))\n } else {\n \/\/ I know it is ugly\n let links = links.into_iter().map(|opt| opt.unwrap()).collect();\n let process = header.set(\"imag.links\", Value::Array(links));\n process_rw_result(process).map(|_| ())\n }\n}\n\n\/\/\/ When Linking A -> B, the specification wants us to link back B -> A.\n\/\/\/ This is a helper function which does this.\nfn add_foreign_link(target: &mut Entry, from: StoreId) -> Result<()> {\n target.get_internal_links()\n .and_then(|mut links| {\n links.push(from);\n let links = links_into_values(links);\n if links.iter().any(|o| o.is_none()) {\n Err(LinkError::new(LinkErrorKind::InternalConversionError, None))\n } else {\n let links = links.into_iter().map(|opt| opt.unwrap()).collect();\n process_rw_result(target.get_header_mut().set(\"imag.links\", Value::Array(links)))\n .map(|_| ())\n }\n })\n}\n\nfn process_rw_result(links: StoreResult<Option<Value>>) -> Result<Vec<Link>> {\n if links.is_err() {\n debug!(\"RW action on store failed. Generating LinkError\");\n let lerr = LinkError::new(LinkErrorKind::EntryHeaderReadError,\n Some(Box::new(links.unwrap_err())));\n return Err(lerr);\n }\n let links = links.unwrap();\n\n if links.is_none() {\n debug!(\"We got no value from the header!\");\n return Ok(vec![])\n }\n let links = links.unwrap();\n\n let links = {\n match links {\n Value::Array(a) => a,\n _ => {\n debug!(\"We expected an Array for the links, but there was a non-Array!\");\n return Err(LinkError::new(LinkErrorKind::ExistingLinkTypeWrong, None));\n },\n }\n };\n\n if !links.iter().all(|l| is_match!(*l, Value::String(_))) {\n debug!(\"At least one of the Values which were expected in the Array of links is a non-String!\");\n debug!(\"Generating LinkError\");\n return Err(LinkError::new(LinkErrorKind::ExistingLinkTypeWrong, None));\n }\n\n let links : Vec<Link> = links.into_iter()\n .map(|link| {\n match link {\n Value::String(s) => StoreId::from(s),\n _ => unreachable!(),\n }\n })\n .collect();\n\n debug!(\"Ok, the RW action was successful, returning link vector now!\");\n Ok(links)\n}\n\n<commit_msg>Use IntoError to have less noise in the error construction code<commit_after>use std::cmp::Ordering;\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::store::Entry;\nuse libimagstore::store::EntryHeader;\nuse libimagstore::store::Result as StoreResult;\nuse libimagerror::into::IntoError;\n\nuse error::LinkErrorKind as LEK;\nuse result::Result;\n\nuse toml::Value;\nuse itertools::Itertools;\n\npub type Link = StoreId;\n\npub trait InternalLinker {\n\n \/\/\/ Get the internal links from the implementor object\n fn get_internal_links(&self) -> Result<Vec<Link>>;\n\n \/\/\/ Set the internal links for the implementor object\n fn set_internal_links(&mut self, links: Vec<&mut Entry>) -> Result<Vec<Link>>;\n\n \/\/\/ Add an internal link to the implementor object\n fn add_internal_link(&mut self, link: &mut Entry) -> Result<()>;\n\n \/\/\/ Remove an internal link from the implementor object\n fn remove_internal_link(&mut self, link: &mut Entry) -> Result<()>;\n\n}\n\nimpl InternalLinker for Entry {\n\n fn get_internal_links(&self) -> Result<Vec<Link>> {\n process_rw_result(self.get_header().read(\"imag.links\"))\n }\n\n \/\/\/ Set the links in a header and return the old links, if any.\n fn set_internal_links(&mut self, links: Vec<&mut Entry>) -> Result<Vec<Link>> {\n let self_location = self.get_location().clone();\n let mut new_links = vec![];\n\n for link in links {\n if let Err(e) = add_foreign_link(link, self_location.clone()) {\n return Err(e);\n }\n let link = link.get_location().clone();\n new_links.push(link);\n }\n\n let new_links = links_into_values(new_links);\n if new_links.iter().any(|o| o.is_none()) {\n return Err(LEK::InternalConversionError.into());\n }\n let new_links = new_links.into_iter().map(|o| o.unwrap()).collect();\n process_rw_result(self.get_header_mut().set(\"imag.links\", Value::Array(new_links)))\n }\n\n fn add_internal_link(&mut self, link: &mut Entry) -> Result<()> {\n let new_link = link.get_location().clone();\n\n add_foreign_link(link, self.get_location().clone())\n .and_then(|_| {\n self.get_internal_links()\n .and_then(|mut links| {\n links.push(new_link);\n rewrite_links(self.get_header_mut(), links)\n })\n })\n }\n\n fn remove_internal_link(&mut self, link: &mut Entry) -> Result<()> {\n let own_loc = link.get_location().clone();\n let other_loc = link.get_location().clone();\n\n link.get_internal_links()\n .and_then(|links| {\n let links = links.into_iter().filter(|l| l.clone() != own_loc).collect();\n rewrite_links(self.get_header_mut(), links)\n })\n .and_then(|_| {\n self.get_internal_links()\n .and_then(|links| {\n let links = links.into_iter().filter(|l| l.clone() != other_loc).collect();\n rewrite_links(link.get_header_mut(), links)\n })\n })\n }\n\n}\n\nfn links_into_values(links: Vec<StoreId>) -> Vec<Option<Value>> {\n links\n .into_iter()\n .map(|s| s.to_str().map(String::from))\n .unique()\n .map(|elem| elem.map(Value::String))\n .sorted_by(|a, b| {\n match (a, b) {\n (&Some(Value::String(ref a)), &Some(Value::String(ref b))) => Ord::cmp(a, b),\n (&None, _) | (_, &None) => Ordering::Equal,\n _ => unreachable!()\n }\n })\n}\n\nfn rewrite_links(header: &mut EntryHeader, links: Vec<StoreId>) -> Result<()> {\n let links = links_into_values(links);\n\n if links.iter().any(|o| o.is_none()) {\n \/\/ if any type convert failed we fail as well\n Err(LEK::InternalConversionError.into())\n } else {\n \/\/ I know it is ugly\n let links = links.into_iter().map(|opt| opt.unwrap()).collect();\n let process = header.set(\"imag.links\", Value::Array(links));\n process_rw_result(process).map(|_| ())\n }\n}\n\n\/\/\/ When Linking A -> B, the specification wants us to link back B -> A.\n\/\/\/ This is a helper function which does this.\nfn add_foreign_link(target: &mut Entry, from: StoreId) -> Result<()> {\n target.get_internal_links()\n .and_then(|mut links| {\n links.push(from);\n let links = links_into_values(links);\n if links.iter().any(|o| o.is_none()) {\n Err(LEK::InternalConversionError.into())\n } else {\n let links = links.into_iter().map(|opt| opt.unwrap()).collect();\n process_rw_result(target.get_header_mut().set(\"imag.links\", Value::Array(links)))\n .map(|_| ())\n }\n })\n}\n\nfn process_rw_result(links: StoreResult<Option<Value>>) -> Result<Vec<Link>> {\n if links.is_err() {\n debug!(\"RW action on store failed. Generating LinkError\");\n return Err(LEK::EntryHeaderReadError.into_error_with_cause(Box::new(links.unwrap_err())))\n }\n let links = links.unwrap();\n\n if links.is_none() {\n debug!(\"We got no value from the header!\");\n return Ok(vec![])\n }\n let links = links.unwrap();\n\n let links = {\n match links {\n Value::Array(a) => a,\n _ => {\n debug!(\"We expected an Array for the links, but there was a non-Array!\");\n return Err(LEK::ExistingLinkTypeWrong.into());\n },\n }\n };\n\n if !links.iter().all(|l| is_match!(*l, Value::String(_))) {\n debug!(\"At least one of the Values which were expected in the Array of links is a non-String!\");\n debug!(\"Generating LinkError\");\n return Err(LEK::ExistingLinkTypeWrong.into());\n }\n\n let links : Vec<Link> = links.into_iter()\n .map(|link| {\n match link {\n Value::String(s) => StoreId::from(s),\n _ => unreachable!(),\n }\n })\n .collect();\n\n debug!(\"Ok, the RW action was successful, returning link vector now!\");\n Ok(links)\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add console example<commit_after>\/\/! An extremely simple libtock-rs example. Just prints out a message\n\/\/! using the Console capsule, then terminates.\n\n#![no_main]\n#![no_std]\nuse core::fmt::Write;\nuse libtock2::console::Console;\nuse libtock2::runtime::{set_main, stack_size};\n\nset_main! {main}\nstack_size! {0x100}\n\nfn main() {\n writeln!(Console::writer(), \"Hello world!\").unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2399<commit_after>\/\/ https:\/\/leetcode.com\/problems\/check-distances-between-same-letters\/\npub fn check_distances(s: String, distance: Vec<i32>) -> bool {\n todo!()\n}\n\nfn main() {\n println!(\n \"{}\",\n check_distances(\n \"abaccb\".to_string(),\n vec![1, 3, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n )\n ); \/\/ true\n println!(\n \"{}\",\n check_distances(\n \"aa\".to_string(),\n vec![1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n )\n ); \/\/ false\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Optimization: Get the Message-Id from the header, if it is there<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>config: remove commented code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Outsource hashing itself so that we can re-use it<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Format and organization<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove old unsafe device code.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ run-pass\n\/\/ ignore-wasm\n\n\/\/ Tests ensuring that `dbg!(expr)` has the expected run-time behavior.\n\/\/ as well as some compile time properties we expect.\n\n#![feature(dbg_macro)]\n\n#[derive(Copy, Clone, Debug)]\nstruct Unit;\n\n#[derive(Copy, Clone, Debug, PartialEq)]\nstruct Point<T> {\n x: T,\n y: T,\n}\n\n#[derive(Debug, PartialEq)]\nstruct NoCopy(usize);\n\nfn test() {\n let a: Unit = dbg!(Unit);\n let _: Unit = dbg!(a);\n \/\/ We can move `a` because it's Copy.\n drop(a);\n\n \/\/ `Point<T>` will be faithfully formatted according to `{:#?}`.\n let a = Point { x: 42, y: 24 };\n let b: Point<u8> = dbg!(Point { x: 42, y: 24 }); \/\/ test stringify!(..)\n let c: Point<u8> = dbg!(b);\n \/\/ Identity conversion:\n assert_eq!(a, b);\n assert_eq!(a, c);\n \/\/ We can move `b` because it's Copy.\n drop(b);\n\n \/\/ Test that we can borrow and that successive applications is still identity.\n let a = NoCopy(1337);\n let b: &NoCopy = dbg!(dbg!(&a));\n assert_eq!(&a, b);\n\n \/\/ Test involving lifetimes of temporaries:\n fn f<'a>(x: &'a u8) -> &'a u8 { x }\n let a: &u8 = dbg!(f(&42));\n assert_eq!(a, &42);\n\n \/\/ Test side effects:\n let mut foo = 41;\n assert_eq!(7331, dbg!({\n foo += 1;\n eprintln!(\"before\");\n 7331\n }));\n assert_eq!(foo, 42);\n}\n\nfn validate_stderr(stderr: Vec<String>) {\n assert_eq!(stderr, &[\n \":22] Unit = Unit\",\n\n \":23] a = Unit\",\n\n \":29] Point{x: 42, y: 24,} = Point {\",\n \" x: 42,\",\n \" y: 24\",\n \"}\",\n\n \":30] b = Point {\",\n \" x: 42,\",\n \" y: 24\",\n \"}\",\n\n \":39] &a = NoCopy(\",\n \" 1337\",\n \")\",\n\n \":39] dbg!(& a) = NoCopy(\",\n \" 1337\",\n \")\",\n \":44] f(&42) = 42\",\n\n \"before\",\n \":49] { foo += 1; eprintln!(\\\"before\\\"); 7331 } = 7331\",\n ]);\n}\n\nfn main() {\n \/\/ The following is a hack to deal with compiletest's inability\n \/\/ to check the output (to stdout) of run-pass tests.\n use std::env;\n use std::process::Command;\n\n let mut args = env::args();\n let prog = args.next().unwrap();\n let child = args.next();\n if let Some(\"child\") = child.as_ref().map(|s| &**s) {\n \/\/ Only run the test if we've been spawned as 'child'\n test()\n } else {\n \/\/ This essentially spawns as 'child' to run the tests\n \/\/ and then it collects output of stderr and checks the output\n \/\/ against what we expect.\n let out = Command::new(&prog).arg(\"child\").output().unwrap();\n assert!(out.status.success());\n assert!(out.stdout.is_empty());\n\n let stderr = String::from_utf8(out.stderr).unwrap();\n let stderr = stderr.lines().map(|mut s| {\n if s.starts_with(\"[\") {\n \/\/ Strip `[` and file path:\n s = s.trim_start_matches(\"[\");\n assert!(s.starts_with(file!()));\n s = s.trim_start_matches(file!());\n }\n s.to_owned()\n }).collect();\n\n validate_stderr(stderr);\n }\n}\n<commit_msg>dbg_macro: more things...<commit_after>\/\/ run-pass\n\/\/ ignore-wasm\n\/\/ ignore-wasm32\n\/\/ ignore-cloudabi no processes\n\/\/ ignore-emscripten no processes\n\n\/\/ Tests ensuring that `dbg!(expr)` has the expected run-time behavior.\n\/\/ as well as some compile time properties we expect.\n\n#![feature(dbg_macro)]\n\n#[derive(Copy, Clone, Debug)]\nstruct Unit;\n\n#[derive(Copy, Clone, Debug, PartialEq)]\nstruct Point<T> {\n x: T,\n y: T,\n}\n\n#[derive(Debug, PartialEq)]\nstruct NoCopy(usize);\n\nfn test() {\n let a: Unit = dbg!(Unit);\n let _: Unit = dbg!(a);\n \/\/ We can move `a` because it's Copy.\n drop(a);\n\n \/\/ `Point<T>` will be faithfully formatted according to `{:#?}`.\n let a = Point { x: 42, y: 24 };\n let b: Point<u8> = dbg!(Point { x: 42, y: 24 }); \/\/ test stringify!(..)\n let c: Point<u8> = dbg!(b);\n \/\/ Identity conversion:\n assert_eq!(a, b);\n assert_eq!(a, c);\n \/\/ We can move `b` because it's Copy.\n drop(b);\n\n \/\/ Test that we can borrow and that successive applications is still identity.\n let a = NoCopy(1337);\n let b: &NoCopy = dbg!(dbg!(&a));\n assert_eq!(&a, b);\n\n \/\/ Test involving lifetimes of temporaries:\n fn f<'a>(x: &'a u8) -> &'a u8 { x }\n let a: &u8 = dbg!(f(&42));\n assert_eq!(a, &42);\n\n \/\/ Test side effects:\n let mut foo = 41;\n assert_eq!(7331, dbg!({\n foo += 1;\n eprintln!(\"before\");\n 7331\n }));\n assert_eq!(foo, 42);\n}\n\nfn validate_stderr(stderr: Vec<String>) {\n assert_eq!(stderr, &[\n \":22] Unit = Unit\",\n\n \":23] a = Unit\",\n\n \":29] Point{x: 42, y: 24,} = Point {\",\n \" x: 42,\",\n \" y: 24\",\n \"}\",\n\n \":30] b = Point {\",\n \" x: 42,\",\n \" y: 24\",\n \"}\",\n\n \":39] &a = NoCopy(\",\n \" 1337\",\n \")\",\n\n \":39] dbg!(& a) = NoCopy(\",\n \" 1337\",\n \")\",\n \":44] f(&42) = 42\",\n\n \"before\",\n \":49] { foo += 1; eprintln!(\\\"before\\\"); 7331 } = 7331\",\n ]);\n}\n\nfn main() {\n \/\/ The following is a hack to deal with compiletest's inability\n \/\/ to check the output (to stdout) of run-pass tests.\n use std::env;\n use std::process::Command;\n\n let mut args = env::args();\n let prog = args.next().unwrap();\n let child = args.next();\n if let Some(\"child\") = child.as_ref().map(|s| &**s) {\n \/\/ Only run the test if we've been spawned as 'child'\n test()\n } else {\n \/\/ This essentially spawns as 'child' to run the tests\n \/\/ and then it collects output of stderr and checks the output\n \/\/ against what we expect.\n let out = Command::new(&prog).arg(\"child\").output().unwrap();\n assert!(out.status.success());\n assert!(out.stdout.is_empty());\n\n let stderr = String::from_utf8(out.stderr).unwrap();\n let stderr = stderr.lines().map(|mut s| {\n if s.starts_with(\"[\") {\n \/\/ Strip `[` and file path:\n s = s.trim_start_matches(\"[\");\n assert!(s.starts_with(file!()));\n s = s.trim_start_matches(file!());\n }\n s.to_owned()\n }).collect();\n\n validate_stderr(stderr);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Move Frame<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Ignore distance between stations for now.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>More solutions<commit_after>\/\/ https:\/\/leetcode.com\/problems\/car-fleet\/\n\npub trait Solution {\n fn car_fleet(target: i32, position: Vec<i32>, speed: Vec<i32>) -> i32;\n}\npub struct Solution1;\npub struct Solution2;\n\nimpl Solution1 {\n #[inline]\n fn compute_duration(target: i32, pos: i32, speed: i32) -> f64 {\n (target as f64 - pos as f64) \/ speed as f64\n }\n}\n\nimpl Solution for Solution1 {\n \/\/ This solution is based on stack\n \/\/ 1. Calculate durations from the position and speed of each car.\n \/\/ 2. Sort positions. Also sort durations by their corresponding positions.\n \/\/ 3. Iterate through positions and durations. For each new duration, pop\n \/\/ anything in the stack that has a smaller duration (runs faster). Then\n \/\/ push the new duration into the stack.\n \/\/ 4. Return the size of the stack. Each element represents a fleet.\n fn car_fleet(target: i32, position: Vec<i32>, speed: Vec<i32>) -> i32 {\n if position.len() == 0 {\n return 0;\n }\n let durations: Vec<f64> = speed\n .into_iter()\n .enumerate()\n .map(|(idx, spd)| Self::compute_duration(target, position[idx], spd))\n .collect();\n let mut pos_dur_pairs: Vec<(i32, f64)> = position.into_iter().zip(durations).collect();\n pos_dur_pairs.sort_by(|a, b| a.0.cmp(&b.0));\n\n let mut stack: Vec<f64> = Vec::new();\n\n for (_, dur) in pos_dur_pairs {\n while !stack.is_empty() && stack.last().unwrap() <= &dur {\n stack.pop();\n }\n stack.push(dur);\n }\n\n return stack.len() as i32;\n }\n}\n\nimpl Solution for Solution2 {\n \/\/ This is the fastest submitted solution in leetcode so far. The idea is\n \/\/ instead of iterating forward, by iterating in the reverse order, we can\n \/\/ get rid of the stack because if a car behind me reaches the target later\n \/\/ than me, it definitely forms a new fleet. The same goes with all cars\n \/\/ behind it.\n fn car_fleet(target: i32, position: Vec<i32>, speed: Vec<i32>) -> i32 {\n let n = position.len();\n let mut pos_speed = vec![(0, 0); n];\n\n for i in 0..n {\n pos_speed[i] = (position[i], speed[i]);\n }\n\n pos_speed.sort_unstable_by(|a, b| b.cmp(&a));\n\n \/\/ println!(\"{:?}\", pos_speed);\n\n let mut result = 0;\n let mut current = 0f64;\n\n for i in 0..n {\n let (pos, speed) = pos_speed[i];\n let time = (target - pos) as f64 \/ speed as f64;\n\n if time > current {\n result += 1;\n current = time;\n }\n }\n return result as i32;\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n mod solution1 {\n use super::*;\n\n #[test]\n fn test_1() {\n assert_eq!(\n Solution1::car_fleet(12, vec![10, 8, 0, 5, 3], vec![2, 4, 1, 1, 3]),\n 3\n );\n }\n\n #[test]\n fn test_2() {\n assert_eq!(Solution1::car_fleet(10, vec![3], vec![3]), 1);\n }\n\n #[test]\n fn test_3() {\n assert_eq!(Solution1::car_fleet(100, vec![0, 2, 4], vec![4, 2, 1]), 1);\n }\n }\n\n mod solution2 {\n use super::*;\n\n #[test]\n fn test_1() {\n assert_eq!(\n Solution2::car_fleet(12, vec![10, 8, 0, 5, 3], vec![2, 4, 1, 1, 3]),\n 3\n );\n }\n\n #[test]\n fn test_2() {\n assert_eq!(Solution2::car_fleet(10, vec![3], vec![3]), 1);\n }\n\n #[test]\n fn test_3() {\n assert_eq!(Solution2::car_fleet(100, vec![0, 2, 4], vec![4, 2, 1]), 1);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for expanding doc strings in macros.<commit_after>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-pretty - token trees can't pretty print\n\n#![feature(macro_rules)]\n\nmacro_rules! descriptions {\n ($name:ident is $desc:expr) => {\n \/\/ Check that we will correctly expand attributes\n #[doc = $desc]\n #[allow(dead_code)]\n const $name : &'static str = $desc;\n }\n}\n\n\/\/ item\ndescriptions!(DOG is \"an animal\")\ndescriptions!(RUST is \"a language\")\n\npub fn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>split LCG into library and binary<commit_after>extern crate rand;\n\npub use rand::{Rng, SeedableRng};\n\npub struct BsdLcg {\n state: u32,\n}\n\nimpl Rng for BsdLcg {\n fn next_u32(&mut self) -> u32 {\n self.state = self.state.wrapping_mul(1_103_515_245).wrapping_add(12_345);\n self.state %= 1 << 31;\n self.state\n }\n}\n\nimpl SeedableRng<u32> for BsdLcg {\n fn from_seed(seed: u32) -> Self {\n Self { state: seed }\n }\n fn reseed(&mut self, seed: u32) {\n self.state = seed;\n }\n}\n\npub struct MsLcg {\n state: u32,\n}\n\nimpl Rng for MsLcg {\n \/\/ Because this only uses the high 16 bits of the state (`>> 16`), this should technically use\n \/\/ `next_u16`, but the `rand` crate does not provide it. If serious usage is required,\n \/\/ implementing this function as a concatenation of two `next_u16`s (elsewhere defined) should\n \/\/ work.\n fn next_u32(&mut self) -> u32 {\n self.state = self.state.wrapping_mul(214_013).wrapping_add(2_531_011);\n self.state %= 1 << 31;\n self.state >> 16 \/\/ rand_n = state_n \/ 2^16\n }\n}\n\nimpl SeedableRng<u32> for MsLcg {\n fn from_seed(seed: u32) -> Self {\n Self { state: seed }\n }\n fn reseed(&mut self, seed: u32) {\n self.state = seed;\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn test_ms() {\n let mut rng = MsLcg::from_seed(1);\n\n for &num in &[41, 18467, 6334, 26500, 19169] {\n assert_eq!(rng.next_u32(), num);\n }\n }\n\n #[test]\n fn test_bsd() {\n let mut rng = BsdLcg::from_seed(1);\n\n for &num in &[1103527590, 377401575, 662824084, 1147902781, 2035015474] {\n assert_eq!(rng.next_u32(), num);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example with threads and a shared counter<commit_after>\/\/ same as threaded.rs, except that the counter is thread safe\n\/\/ and report the correct number of requests, parallel or not\n\nextern crate rotor;\nextern crate rotor_http;\n\n\nuse std::env;\nuse std::thread;\nuse std::time::Duration;\n\nuse rotor::{Scope, Time};\nuse rotor_http::server::{Fsm, RecvMode, Server, Head, Response};\nuse rotor::mio::tcp::TcpListener;\nuse std::sync::atomic::{AtomicUsize, Ordering};\nuse std::sync::Arc;\n\n\nstruct Context {\n counter: Arc<AtomicUsize>,\n}\n\ntrait Counter {\n fn increment(&mut self);\n fn get(&self) -> usize;\n}\n\nimpl Counter for Context {\n fn increment(&mut self) { self.counter.fetch_add(1, Ordering::SeqCst); }\n fn get(&self) -> usize { self.counter.load(Ordering::SeqCst) }\n}\n\n#[derive(Debug, Clone)]\nenum HelloWorld {\n Hello,\n GetNum,\n HelloName(String),\n PageNotFound,\n}\n\nfn send_string(res: &mut Response, data: &[u8]) {\n res.status(200, \"OK\");\n res.add_length(data.len() as u64).unwrap();\n res.done_headers().unwrap();\n res.write_body(data);\n res.done();\n}\n\nimpl Server for HelloWorld {\n type Seed = ();\n type Context = Context;\n fn headers_received(_seed: (), head: Head, _res: &mut Response,\n scope: &mut Scope<Context>)\n -> Option<(Self, RecvMode, Time)>\n {\n use self::HelloWorld::*;\n scope.increment();\n Some((match head.path {\n \"\/\" => Hello,\n \"\/num\" => GetNum,\n p if p.starts_with('\/') => HelloName(p[1..].to_string()),\n _ => PageNotFound\n }, RecvMode::Buffered(1024), scope.now() + Duration::new(10, 0)))\n }\n fn request_received(self, _data: &[u8], res: &mut Response,\n scope: &mut Scope<Context>)\n -> Option<Self>\n {\n use self::HelloWorld::*;\n match self {\n Hello => {\n send_string(res, b\"Hello World!\");\n }\n GetNum => {\n send_string(res,\n format!(\"This host has been visited {} times\",\n scope.get())\n .as_bytes());\n }\n HelloName(name) => {\n send_string(res, format!(\"Hello {}!\", name).as_bytes());\n }\n PageNotFound => {\n let data = b\"404 - Page not found\";\n res.status(404, \"Not Found\");\n res.add_length(data.len() as u64).unwrap();\n res.done_headers().unwrap();\n res.write_body(data);\n res.done();\n }\n }\n None\n }\n fn request_chunk(self, _chunk: &[u8], _response: &mut Response,\n _scope: &mut Scope<Context>)\n -> Option<Self>\n {\n unreachable!();\n }\n\n \/\/\/ End of request body, only for Progressive requests\n fn request_end(self, _response: &mut Response, _scope: &mut Scope<Context>)\n -> Option<Self>\n {\n unreachable!();\n }\n\n fn timeout(self, _response: &mut Response, _scope: &mut Scope<Context>)\n -> Option<(Self, Time)>\n {\n unimplemented!();\n }\n fn wakeup(self, _response: &mut Response, _scope: &mut Scope<Context>)\n -> Option<Self>\n {\n unimplemented!();\n }\n}\n\nfn main() {\n let lst = TcpListener::bind(&\"127.0.0.1:3000\".parse().unwrap()).unwrap();\n let threads = env::var(\"THREADS\").unwrap_or(\"2\".to_string())\n .parse().unwrap();\n let mut children = Vec::new();\n let counter = Arc::new(AtomicUsize::new(0));\n for _ in 0..threads {\n let counter_ref = counter.clone();\n let listener = lst.try_clone().unwrap();\n children.push(thread::spawn(move || {\n let event_loop = rotor::Loop::new(\n &rotor::Config::new()).unwrap();\n let mut loop_inst = event_loop.instantiate(Context {\n counter: counter_ref,\n });\n loop_inst.add_machine_with(|scope| {\n Fsm::<HelloWorld, _>::new(listener, (), scope)\n }).unwrap();\n loop_inst.run().unwrap();\n }));\n }\n for child in children {\n child.join().unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix #7093<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>style: Undo minor debugging change.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>implemented and tested Heaps<commit_after>use std::cmp::{Ord, Ordering};\nuse std::collections::BinaryHeap as RHeap;\nuse super::{ Cost, NodeId, INF, NEG_INF };\n\n\/\/\/ minimalistic heap trait restricted for `(NodeId, Cost)` tuples\npub trait Heap {\n \/\/\/ Find the min element in `O(1)` time.\n fn find_min(&self) -> Option<NodeId>;\n \/\/\/ Return the current number of elements in the heap.\n fn size(&self) -> usize;\n fn is_empty(&self) -> bool;\n fn insert(&mut self, node_id: NodeId, cost: Cost);\n \/\/\/ Remove the current minimal element.\n fn delete_min(&mut self);\n}\n\n\/\/\/ BinaryHeap, wraps the native Rust implementation.\n\/\/\/ Rust's BinaryHeap is a max heap, we need a min heap.\n\/\/\/ I solved that by adding the costs as negative in the `insert` function.\npub struct BinaryHeap {\n inner_heap: RHeap<HeapMember>\n}\nimpl BinaryHeap {\n fn new() -> Self {\n BinaryHeap {\n inner_heap: RHeap::new()\n }\n }\n fn with_capacity(capacity: usize) -> Self {\n BinaryHeap {\n inner_heap: RHeap::with_capacity(capacity)\n }\n }\n}\n\nimpl Heap for BinaryHeap {\n fn find_min(&self) -> Option<NodeId> {\n match self.inner_heap.peek() {\n Some(&member) => Some(member.key),\n None => None,\n }\n }\n fn size(&self) -> usize {\n self.inner_heap.len()\n }\n fn is_empty(&self) -> bool {\n self.inner_heap.is_empty()\n }\n fn insert(&mut self, node_id: NodeId, cost: Cost) {\n self.inner_heap.push(HeapMember { key: node_id, cost: -cost }) \/\/ rust heap is a max heap\n }\n fn delete_min(&mut self) {\n self.inner_heap.pop();\n }\n}\n\nstruct FibElement {\n heap_member: HeapMember,\n rank: usize,\n children: Vec<FibElement>,\n}\n\npub struct FibonacciHeap {\n min_elem: Option<FibElement>,\n root: Vec<FibElement>,\n size: usize,\n}\n\nimpl FibonacciHeap {\n fn new() -> Self {\n FibonacciHeap {\n min_elem: None,\n root: Vec::new(),\n size: 0,\n }\n }\n fn remove_min(&mut self) {\n let min_elem = self.min_elem.take().unwrap();\n for child in min_elem.children {\n self.root.push(child);\n }\n }\n fn merge_roots(&mut self) {\n let root_degrees = &mut (vec![self.size; self.root.len()])[..]; \/\/ self.size is an invalid id\n while self.not_all_different() {\n for i in 0..self.root.len() {\n let rank = self.root.get(i).unwrap().rank;\n if root_degrees[rank] != self.size {\n self.link(root_degrees[rank], i);\n root_degrees[rank] = self.size;\n break;\n } else {\n root_degrees[rank] = i;\n }\n }\n }\n }\n fn update_min(&mut self) {\n let mut min_id = 0;\n let mut min_cost = INF;\n for i in 0..self.root.len() {\n let current_cost = self.root.get(i).unwrap().heap_member.cost;\n if current_cost < min_cost {\n min_cost = current_cost;\n min_id = i;\n }\n }\n self.min_elem = Some(self.root.remove(min_id))\n }\n fn link(&mut self, i: usize, j: usize) {\n let mut elem_i = self.root.remove(i);\n let mut elem_j = self.root.remove(j);\n if elem_i.heap_member < elem_j.heap_member {\n elem_i.children.push(elem_j);\n elem_i.rank += 1;\n self.root.push(elem_i);\n } else {\n elem_j.children.push(elem_i);\n elem_j.rank += 1;\n self.root.push(elem_j);\n }\n }\n fn not_all_different(&self) -> bool {\n let root_degrees = &mut (vec![self.size; self.root.len()])[..]; \/\/ self.size is an invalid id\n for i in 0..self.root.len() {\n let rank = self.root.get(i).unwrap().rank;\n if root_degrees[rank] != self.size {\n return true;\n } else {\n root_degrees[rank] = i;\n }\n }\n false\n } \n}\n\nimpl Heap for FibonacciHeap {\n fn find_min(&self) -> Option<NodeId> {\n self.min_elem.as_ref().map(|elem| elem.heap_member.key)\n }\n fn size(&self) -> usize {\n self.size\n }\n fn is_empty(&self) -> bool {\n self.size == 0\n }\n fn insert(&mut self, node: NodeId, cost: Cost) {\n let new_elem = FibElement {\n heap_member: HeapMember{ key: node, cost: cost },\n rank: 0,\n children: Vec::new(),\n };\n\n if self.is_empty() {\n self.min_elem = Some(new_elem)\n } else if cost < self.min_elem.as_ref().unwrap().heap_member.cost {\n self.root.push(self.min_elem.take().unwrap());\n self.min_elem = Some(new_elem);\n } else {\n self.root.push(new_elem)\n }\n self.size += 1;\n }\n fn delete_min(&mut self) {\n if ! self.is_empty() {\n self.size -= 1;\n self.remove_min();\n self.merge_roots();\n self.update_min();\n }\n }\n}\n\n\/\/\/ Heap element, wraps a tuple of node id and respective costs\n#[derive(Copy, Clone, PartialEq)]\nstruct HeapMember {\n key: NodeId,\n cost: Cost,\n}\n\nimpl Eq for HeapMember {}\n\n\/\/\/ Implementation of `PartialOrd` based on the cost to reach a node\nimpl PartialOrd for HeapMember {\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n if self.cost.is_nan() || other.cost.is_nan() {\n return None;\n }\n if self.cost < other.cost {\n return Some(Ordering::Less);\n } else if self.cost > other.cost {\n return Some(Ordering::Greater);\n } else {\n return Some(Ordering::Equal);\n }\n }\n\n fn lt(&self, other: &Self) -> bool {\n self.cost < other.cost\n }\n fn le(&self, other: &Self) -> bool {\n self.cost <= other.cost\n }\n fn gt(&self, other: &Self) -> bool {\n self.cost > other.cost\n }\n fn ge(&self, other: &Self) -> bool {\n self.cost >= other.cost\n }\n}\n\n\/\/\/ Implement a total ordering on elements of a heap based on costs\nimpl Ord for HeapMember {\n fn cmp(&self, other: &Self) -> Ordering {\n if self.cost < other.cost {\n return Ordering::Less;\n } else if self.cost > other.cost {\n return Ordering::Greater;\n } else {\n return Ordering::Equal;\n }\n }\n}\n\n#[test]\nfn test_partial_ordering() {\n let mem1 = HeapMember{key: 0, cost: 0.0};\n let mem2 = HeapMember{key: 1, cost: 1.0};\n let mem3 = HeapMember{key: 2, cost: -1.0};\n\n assert!(mem1 < mem2);\n assert!(mem2 > mem1);\n assert!(mem3 < mem2);\n assert!(mem3 < mem1);\n assert!(mem1 == mem1);\n}\n\n#[test]\nfn test_ordering() {\n let mem1 = HeapMember{key: 0, cost: 0.0};\n let mem2 = HeapMember{key: 1, cost: 1.0};\n let mem3 = HeapMember{key: 2, cost: -1.0};\n\n assert_eq!(Ordering::Less, mem1.cmp(&mem2));\n assert_eq!(Ordering::Greater, mem2.cmp(&mem1));\n assert_eq!(Ordering::Equal, mem1.cmp(&mem1));\n assert_eq!(Ordering::Less, mem3.cmp(&mem1));\n}\n\n#[test]\nfn test_binary_heap() {\n let mut binary_heap = BinaryHeap::new();\n binary_heap.insert(0,0.0);\n assert_eq!(Some(0), binary_heap.find_min());\n binary_heap.insert(1,1.0);\n binary_heap.delete_min();\n binary_heap.insert(2,2.0);\n binary_heap.insert(3,3.0);\n assert_eq!(Some(1), binary_heap.find_min());\n assert_eq!(3, binary_heap.size());\n binary_heap.insert(4,4.0);\n binary_heap.insert(5,5.0);\n assert_eq!(5, binary_heap.size());\n assert_eq!(Some(1), binary_heap.find_min());\n binary_heap.insert(0,0.0);\n assert_eq!(Some(0), binary_heap.find_min());\n}\n\n#[test]\nfn test_fibonacci_heap() {\n let mut fibonacci_heap = FibonacciHeap::new();\n fibonacci_heap.insert(0,0.0);\n assert_eq!(Some(0), fibonacci_heap.find_min());\n fibonacci_heap.insert(1,1.0);\n fibonacci_heap.delete_min();\n fibonacci_heap.insert(2,2.0);\n fibonacci_heap.insert(3,3.0);\n assert_eq!(Some(1), fibonacci_heap.find_min());\n assert_eq!(3, fibonacci_heap.size());\n fibonacci_heap.insert(4,4.0);\n fibonacci_heap.insert(5,5.0);\n assert_eq!(5, fibonacci_heap.size());\n assert_eq!(Some(1), fibonacci_heap.find_min());\n fibonacci_heap.insert(0,0.0);\n assert_eq!(Some(0), fibonacci_heap.find_min());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>file open err<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Use module path when getting mail instead of using plain MessageId object in String representation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added Node and Graphyll data structure -- needs testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Replace 'static' keyword with 'const' to be compilable in rust 0.12<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #27147 - apasel422:issue-19404, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(reflect_marker)]\n\nuse std::any::TypeId;\nuse std::marker::Reflect;\nuse std::rc::Rc;\n\ntype Fp<T> = Rc<T>;\n\nstruct Engine;\n\ntrait Component: 'static + Reflect {}\nimpl Component for Engine {}\n\ntrait Env {\n fn get_component_type_id(&self, type_id: TypeId) -> Option<Fp<Component>>;\n}\n\nimpl<'a> Env+'a {\n fn get_component<T: Component>(&self) -> Option<Fp<T>> {\n let x = self.get_component_type_id(TypeId::of::<T>());\n None\n }\n}\n\ntrait Figment {\n fn init(&mut self, env: &Env);\n}\n\nstruct MyFigment;\n\nimpl Figment for MyFigment {\n fn init(&mut self, env: &Env) {\n let engine = env.get_component::<Engine>();\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add AWS Config integration tests<commit_after>#![cfg(feature = \"config\")]\n\nextern crate rusoto;\n\nuse rusoto::config::{ConfigServiceClient, DescribeConfigRulesRequest, DescribeDeliveryChannelsRequest};\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\nfn should_describe_config_rules() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = ConfigServiceClient::new(credentials, Region::UsEast1);\n\n let request = DescribeConfigRulesRequest::default();\n\n match client.describe_config_rules(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true)\n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n\n#[test]\nfn should_describe_delivery_channels() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = ConfigServiceClient::new(credentials, Region::UsEast1);\n\n let request = DescribeDeliveryChannelsRequest::default();\n\n match client.describe_delivery_channels(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true)\n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>traits documentation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix broken pipe panics<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix for broken pipe errors<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix mod echo interface<commit_after><|endoftext|>"} {"text":"<commit_before>use std::string::FromUtf8Error;\nuse rustc_serialize::base64::FromBase64Error;\nuse rustc_serialize::json;\n\npub enum Error {\n Format,\n Base64,\n Decode,\n Json,\n Utf8,\n}\n\nmacro_rules! error_wrap {\n ($f: ty, $e: expr) => {\n impl From<$f> for Error {\n fn from(_: $f) -> Error { $e }\n }\n}\n}\n\nerror_wrap!(FromBase64Error, Error::Base64);\nerror_wrap!(json::DecoderError, Error::Decode);\nerror_wrap!(json::ErrorCode, Error::Json);\nerror_wrap!(FromUtf8Error, Error::Utf8);\n<commit_msg>Derive debug<commit_after>use std::string::FromUtf8Error;\nuse rustc_serialize::base64::FromBase64Error;\nuse rustc_serialize::json;\n\n#[derive(Debug)]\npub enum Error {\n Format,\n Base64,\n Decode,\n Json,\n Utf8,\n}\n\nmacro_rules! error_wrap {\n ($f: ty, $e: expr) => {\n impl From<$f> for Error {\n fn from(_: $f) -> Error { $e }\n }\n}\n}\n\nerror_wrap!(FromBase64Error, Error::Base64);\nerror_wrap!(json::DecoderError, Error::Decode);\nerror_wrap!(json::ErrorCode, Error::Json);\nerror_wrap!(FromUtf8Error, Error::Utf8);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Simplify a match statement<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rename parameter from X to This<commit_after><|endoftext|>"} {"text":"<commit_before>use std::uint;\nuse std::num;\nuse time::precise_time_ns;\nuse token::Token;\nuse util::Slab;\n\nconst EMPTY: Token = Token(uint::MAX);\nconst NS_PER_MS: u64 = 1_000_000;\n\n\/\/ Implements coarse-grained timeouts using an algorithm based on hashed timing\n\/\/ wheels by Varghese & Lauck.\n\/\/\n\/\/ TODO:\n\/\/ * Handle the case when the timer falls more than an entire wheel behind. There\n\/\/ is no point to loop multiple times around the wheel in one go.\n\/\/ * New type for tick, now() -> Tick\npub struct Timer<T> {\n \/\/ Size of each tick in milliseconds\n tick_ms: u64,\n \/\/ Slab of timeout entries\n entries: Slab<Entry<T>>,\n \/\/ Timeout wheel. Each tick, the timer will look at the next slot for\n \/\/ timeouts that match the current tick.\n wheel: Vec<Token>,\n \/\/ Tick 0's time in milliseconds\n start: u64,\n \/\/ The current tick\n tick: u64,\n \/\/ The next entry to possibly timeout\n next: Token,\n \/\/ Masks the target tick to get the slot\n mask: u64,\n}\n\npub struct Timeout {\n \/\/ Reference into the timer entry slab\n token: Token,\n \/\/ Tick that it should matchup with\n tick: u64,\n}\n\nimpl<T> Timer<T> {\n pub fn new(tick_ms: u64, mut slots: uint, mut capacity: uint) -> Timer<T> {\n slots = num::next_power_of_two(slots);\n capacity = num::next_power_of_two(capacity);\n\n Timer {\n tick_ms: tick_ms,\n entries: Slab::new(capacity),\n wheel: Vec::from_fn(slots, |_| EMPTY),\n start: 0,\n tick: 0,\n next: EMPTY,\n mask: (slots as u64) - 1\n }\n }\n\n pub fn count(&self) -> uint {\n self.entries.count()\n }\n\n \/\/ Number of ms remaining until the next tick\n pub fn next_tick_in_ms(&self) -> u64 {\n let now = self.now_ms();\n let nxt = self.start + (self.tick + 1) * self.tick_ms;\n\n if nxt <= now {\n return 0;\n }\n\n nxt - now\n }\n\n \/*\n *\n * ===== Initialization =====\n *\n *\/\n\n \/\/ Sets the starting time of the timer using the current system time\n pub fn setup(&mut self) {\n let now = self.now_ms();\n self.set_start_ms(now);\n }\n\n fn set_start_ms(&mut self, start: u64) {\n assert!(!self.is_initialized(), \"the timer has already started\");\n self.start = start;\n }\n\n \/*\n *\n * ===== Timeout create \/ cancel =====\n *\n *\/\n\n pub fn timeout_ms(&mut self, token: T, delay: u64) -> TimerResult<Timeout> {\n let at = self.now_ms() + delay;\n self.timeout_at_ms(token, at)\n }\n\n pub fn timeout_at_ms(&mut self, token: T, mut at: u64) -> TimerResult<Timeout> {\n \/\/ Make relative to start\n at -= self.start;\n \/\/ Calculate tick\n let mut tick = (at + self.tick_ms - 1) \/ self.tick_ms;\n\n \/\/ Always target at least 1 tick in the future\n if tick <= self.tick {\n tick = self.tick + 1;\n }\n\n self.insert(token, tick)\n }\n\n pub fn clear(&mut self, timeout: Timeout) -> bool {\n let links = match self.entries.get(timeout.token) {\n Some(e) => e.links,\n None => return false\n };\n\n \/\/ Sanity check\n if links.tick != timeout.tick {\n return false;\n }\n\n self.unlink(&links, timeout.token);\n self.entries.remove(timeout.token);\n true\n }\n\n fn insert(&mut self, token: T, tick: u64) -> TimerResult<Timeout> {\n \/\/ Get the slot for the requested tick\n let slot = (tick & self.mask) as uint;\n let curr = self.wheel[slot];\n\n \/\/ Insert the new entry\n let token = try!(\n self.entries.insert(Entry::new(token, tick, curr))\n .map_err(|_| TimerError::overflow()));\n\n if curr != EMPTY {\n \/\/ If there was a previous entry, set its prev pointer to the new\n \/\/ entry\n self.entries[curr].links.prev = token;\n }\n\n \/\/ Update the head slot\n *self.wheel.get_mut(slot) = token;\n\n debug!(\"inserted timout; slot={}; token={}\", slot, token);\n\n \/\/ Return the new timeout\n Ok(Timeout {\n token: token,\n tick: tick\n })\n }\n\n fn unlink(&mut self, links: &EntryLinks, token: Token) {\n debug!(\"unlinking timeout; slot={}; token={}\",\n self.slot_for(links.tick), token);\n\n if links.prev == EMPTY {\n let slot = self.slot_for(links.tick);\n *self.wheel.get_mut(slot) = links.next;\n } else {\n self.entries[links.prev].links.next = links.next;\n }\n\n if links.next != EMPTY {\n self.entries[links.next].links.prev = links.prev;\n\n if token == self.next {\n self.next = links.next;\n }\n } else if token == self.next {\n self.next = EMPTY;\n }\n }\n\n \/*\n *\n * ===== Advance time =====\n *\n *\/\n\n pub fn now(&self) -> u64 {\n self.ms_to_tick(self.now_ms())\n }\n\n pub fn tick_to(&mut self, now: u64) -> Option<T> {\n debug!(\"tick_to; now={}; tick={}\", now, self.tick);\n\n while self.tick <= now {\n let curr = self.next;\n\n debug!(\"ticking; curr={}\", curr);\n\n if curr == EMPTY {\n self.tick += 1;\n self.next = self.wheel[self.slot_for(self.tick)];\n } else {\n let links = self.entries[curr].links;\n\n if links.tick <= self.tick {\n debug!(\"triggering; token={}\", curr);\n\n \/\/ Unlink will also advance self.next\n self.unlink(&links, curr);\n\n \/\/ Remove and return the token\n return self.entries.remove(curr)\n .map(|e| e.token);\n } else {\n self.next = links.next;\n }\n }\n }\n\n None\n }\n\n \/*\n *\n * ===== Misc =====\n *\n *\/\n\n \/\/ Timers are initialized when either the current time has been advanced or a timeout has been set\n #[inline]\n fn is_initialized(&self) -> bool {\n self.tick > 0 || !self.entries.is_empty()\n }\n\n #[inline]\n fn slot_for(&self, tick: u64) -> uint {\n (self.mask & tick) as uint\n }\n\n \/\/ Convert a ms duration into a number of ticks, rounds up\n #[inline]\n fn ms_to_tick(&self, ms: u64) -> u64 {\n (ms - self.start) \/ self.tick_ms\n }\n\n #[inline]\n fn now_ms(&self) -> u64 {\n precise_time_ns() \/ NS_PER_MS\n }\n}\n\n\/\/ Doubly linked list of timer entries. Allows for efficient insertion \/\n\/\/ removal of timeouts.\nstruct Entry<T> {\n token: T,\n links: EntryLinks,\n}\n\nimpl<T> Entry<T> {\n fn new(token: T, tick: u64, next: Token) -> Entry<T> {\n Entry {\n token: token,\n links: EntryLinks {\n tick: tick,\n prev: EMPTY,\n next: next,\n },\n }\n }\n}\n\nstruct EntryLinks {\n tick: u64,\n prev: Token,\n next: Token\n}\n\npub type TimerResult<T> = Result<T, TimerError>;\n\n#[deriving(Show)]\npub struct TimerError {\n kind: TimerErrorKind,\n desc: &'static str,\n}\n\nimpl TimerError {\n fn overflow() -> TimerError {\n TimerError {\n kind: TimerOverflow,\n desc: \"too many timer entries\"\n }\n }\n}\n\n#[deriving(Show)]\npub enum TimerErrorKind {\n TimerOverflow,\n}\n\n#[cfg(test)]\nmod test {\n use super::Timer;\n\n #[test]\n pub fn test_timeout_next_tick() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 100).unwrap();\n\n tick = t.ms_to_tick(50);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(100);\n assert_eq!(Some(\"a\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(150);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(200);\n assert_eq!(None, t.tick_to(tick));\n\n assert_eq!(t.count(), 0);\n }\n\n #[test]\n pub fn test_clearing_timeout() {\n let mut t = timer();\n let mut tick;\n\n let to = t.timeout_at_ms(\"a\", 100).unwrap();\n assert!(t.clear(to));\n\n tick = t.ms_to_tick(100);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(200);\n assert_eq!(None, t.tick_to(tick));\n\n assert_eq!(t.count(), 0);\n }\n\n #[test]\n pub fn test_multiple_timeouts_same_tick() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 100).unwrap();\n t.timeout_at_ms(\"b\", 100).unwrap();\n\n let mut rcv = vec![];\n\n tick = t.ms_to_tick(100);\n rcv.push(t.tick_to(tick).unwrap());\n rcv.push(t.tick_to(tick).unwrap());\n\n assert_eq!(None, t.tick_to(tick));\n\n rcv.sort();\n assert!(rcv.as_slice() == [\"a\", \"b\"], \"actual={}\", rcv.as_slice());\n\n tick = t.ms_to_tick(200);\n assert_eq!(None, t.tick_to(tick));\n\n assert_eq!(t.count(), 0);\n }\n\n #[test]\n pub fn test_multiple_timeouts_diff_tick() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 110).unwrap();\n t.timeout_at_ms(\"b\", 220).unwrap();\n t.timeout_at_ms(\"c\", 230).unwrap();\n t.timeout_at_ms(\"d\", 440).unwrap();\n\n tick = t.ms_to_tick(100);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(200);\n assert_eq!(Some(\"a\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(300);\n assert_eq!(Some(\"c\"), t.tick_to(tick));\n assert_eq!(Some(\"b\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(400);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(500);\n assert_eq!(Some(\"d\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(600);\n assert_eq!(None, t.tick_to(tick));\n }\n\n #[test]\n pub fn test_catching_up() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 110).unwrap();\n t.timeout_at_ms(\"b\", 220).unwrap();\n t.timeout_at_ms(\"c\", 230).unwrap();\n t.timeout_at_ms(\"d\", 440).unwrap();\n\n tick = t.ms_to_tick(600);\n assert_eq!(Some(\"a\"), t.tick_to(tick));\n assert_eq!(Some(\"c\"), t.tick_to(tick));\n assert_eq!(Some(\"b\"), t.tick_to(tick));\n assert_eq!(Some(\"d\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n }\n\n #[test]\n pub fn test_timeout_hash_collision() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 100).unwrap();\n t.timeout_at_ms(\"b\", 100 + TICK * SLOTS as u64).unwrap();\n\n tick = t.ms_to_tick(100);\n assert_eq!(Some(\"a\"), t.tick_to(tick));\n assert_eq!(1, t.count());\n\n tick = t.ms_to_tick(200);\n assert_eq!(None, t.tick_to(tick));\n assert_eq!(1, t.count());\n\n tick = t.ms_to_tick(100 + TICK * SLOTS as u64);\n assert_eq!(Some(\"b\"), t.tick_to(tick));\n assert_eq!(0, t.count());\n }\n\n #[test]\n pub fn test_clearing_timeout_between_triggers() {\n let mut t = timer();\n let mut tick;\n\n let a = t.timeout_at_ms(\"a\", 100).unwrap();\n let _ = t.timeout_at_ms(\"b\", 100).unwrap();\n let _ = t.timeout_at_ms(\"c\", 200).unwrap();\n\n tick = t.ms_to_tick(100);\n assert_eq!(Some(\"b\"), t.tick_to(tick));\n assert_eq!(2, t.count());\n\n t.clear(a);\n assert_eq!(1, t.count());\n\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(200);\n assert_eq!(Some(\"c\"), t.tick_to(tick));\n assert_eq!(0, t.count());\n }\n\n const TICK: u64 = 100;\n const SLOTS: uint = 16;\n\n fn timer() -> Timer<&'static str> {\n Timer::new(TICK, SLOTS, 32)\n }\n}\n<commit_msg>Use Vec []-operator instead of deprecated get_mut<commit_after>use std::uint;\nuse std::num;\nuse time::precise_time_ns;\nuse token::Token;\nuse util::Slab;\n\nconst EMPTY: Token = Token(uint::MAX);\nconst NS_PER_MS: u64 = 1_000_000;\n\n\/\/ Implements coarse-grained timeouts using an algorithm based on hashed timing\n\/\/ wheels by Varghese & Lauck.\n\/\/\n\/\/ TODO:\n\/\/ * Handle the case when the timer falls more than an entire wheel behind. There\n\/\/ is no point to loop multiple times around the wheel in one go.\n\/\/ * New type for tick, now() -> Tick\npub struct Timer<T> {\n \/\/ Size of each tick in milliseconds\n tick_ms: u64,\n \/\/ Slab of timeout entries\n entries: Slab<Entry<T>>,\n \/\/ Timeout wheel. Each tick, the timer will look at the next slot for\n \/\/ timeouts that match the current tick.\n wheel: Vec<Token>,\n \/\/ Tick 0's time in milliseconds\n start: u64,\n \/\/ The current tick\n tick: u64,\n \/\/ The next entry to possibly timeout\n next: Token,\n \/\/ Masks the target tick to get the slot\n mask: u64,\n}\n\npub struct Timeout {\n \/\/ Reference into the timer entry slab\n token: Token,\n \/\/ Tick that it should matchup with\n tick: u64,\n}\n\nimpl<T> Timer<T> {\n pub fn new(tick_ms: u64, mut slots: uint, mut capacity: uint) -> Timer<T> {\n slots = num::next_power_of_two(slots);\n capacity = num::next_power_of_two(capacity);\n\n Timer {\n tick_ms: tick_ms,\n entries: Slab::new(capacity),\n wheel: Vec::from_fn(slots, |_| EMPTY),\n start: 0,\n tick: 0,\n next: EMPTY,\n mask: (slots as u64) - 1\n }\n }\n\n pub fn count(&self) -> uint {\n self.entries.count()\n }\n\n \/\/ Number of ms remaining until the next tick\n pub fn next_tick_in_ms(&self) -> u64 {\n let now = self.now_ms();\n let nxt = self.start + (self.tick + 1) * self.tick_ms;\n\n if nxt <= now {\n return 0;\n }\n\n nxt - now\n }\n\n \/*\n *\n * ===== Initialization =====\n *\n *\/\n\n \/\/ Sets the starting time of the timer using the current system time\n pub fn setup(&mut self) {\n let now = self.now_ms();\n self.set_start_ms(now);\n }\n\n fn set_start_ms(&mut self, start: u64) {\n assert!(!self.is_initialized(), \"the timer has already started\");\n self.start = start;\n }\n\n \/*\n *\n * ===== Timeout create \/ cancel =====\n *\n *\/\n\n pub fn timeout_ms(&mut self, token: T, delay: u64) -> TimerResult<Timeout> {\n let at = self.now_ms() + delay;\n self.timeout_at_ms(token, at)\n }\n\n pub fn timeout_at_ms(&mut self, token: T, mut at: u64) -> TimerResult<Timeout> {\n \/\/ Make relative to start\n at -= self.start;\n \/\/ Calculate tick\n let mut tick = (at + self.tick_ms - 1) \/ self.tick_ms;\n\n \/\/ Always target at least 1 tick in the future\n if tick <= self.tick {\n tick = self.tick + 1;\n }\n\n self.insert(token, tick)\n }\n\n pub fn clear(&mut self, timeout: Timeout) -> bool {\n let links = match self.entries.get(timeout.token) {\n Some(e) => e.links,\n None => return false\n };\n\n \/\/ Sanity check\n if links.tick != timeout.tick {\n return false;\n }\n\n self.unlink(&links, timeout.token);\n self.entries.remove(timeout.token);\n true\n }\n\n fn insert(&mut self, token: T, tick: u64) -> TimerResult<Timeout> {\n \/\/ Get the slot for the requested tick\n let slot = (tick & self.mask) as uint;\n let curr = self.wheel[slot];\n\n \/\/ Insert the new entry\n let token = try!(\n self.entries.insert(Entry::new(token, tick, curr))\n .map_err(|_| TimerError::overflow()));\n\n if curr != EMPTY {\n \/\/ If there was a previous entry, set its prev pointer to the new\n \/\/ entry\n self.entries[curr].links.prev = token;\n }\n\n \/\/ Update the head slot\n self.wheel[slot] = token;\n\n debug!(\"inserted timout; slot={}; token={}\", slot, token);\n\n \/\/ Return the new timeout\n Ok(Timeout {\n token: token,\n tick: tick\n })\n }\n\n fn unlink(&mut self, links: &EntryLinks, token: Token) {\n debug!(\"unlinking timeout; slot={}; token={}\",\n self.slot_for(links.tick), token);\n\n if links.prev == EMPTY {\n let slot = self.slot_for(links.tick);\n self.wheel[slot] = links.next;\n } else {\n self.entries[links.prev].links.next = links.next;\n }\n\n if links.next != EMPTY {\n self.entries[links.next].links.prev = links.prev;\n\n if token == self.next {\n self.next = links.next;\n }\n } else if token == self.next {\n self.next = EMPTY;\n }\n }\n\n \/*\n *\n * ===== Advance time =====\n *\n *\/\n\n pub fn now(&self) -> u64 {\n self.ms_to_tick(self.now_ms())\n }\n\n pub fn tick_to(&mut self, now: u64) -> Option<T> {\n debug!(\"tick_to; now={}; tick={}\", now, self.tick);\n\n while self.tick <= now {\n let curr = self.next;\n\n debug!(\"ticking; curr={}\", curr);\n\n if curr == EMPTY {\n self.tick += 1;\n self.next = self.wheel[self.slot_for(self.tick)];\n } else {\n let links = self.entries[curr].links;\n\n if links.tick <= self.tick {\n debug!(\"triggering; token={}\", curr);\n\n \/\/ Unlink will also advance self.next\n self.unlink(&links, curr);\n\n \/\/ Remove and return the token\n return self.entries.remove(curr)\n .map(|e| e.token);\n } else {\n self.next = links.next;\n }\n }\n }\n\n None\n }\n\n \/*\n *\n * ===== Misc =====\n *\n *\/\n\n \/\/ Timers are initialized when either the current time has been advanced or a timeout has been set\n #[inline]\n fn is_initialized(&self) -> bool {\n self.tick > 0 || !self.entries.is_empty()\n }\n\n #[inline]\n fn slot_for(&self, tick: u64) -> uint {\n (self.mask & tick) as uint\n }\n\n \/\/ Convert a ms duration into a number of ticks, rounds up\n #[inline]\n fn ms_to_tick(&self, ms: u64) -> u64 {\n (ms - self.start) \/ self.tick_ms\n }\n\n #[inline]\n fn now_ms(&self) -> u64 {\n precise_time_ns() \/ NS_PER_MS\n }\n}\n\n\/\/ Doubly linked list of timer entries. Allows for efficient insertion \/\n\/\/ removal of timeouts.\nstruct Entry<T> {\n token: T,\n links: EntryLinks,\n}\n\nimpl<T> Entry<T> {\n fn new(token: T, tick: u64, next: Token) -> Entry<T> {\n Entry {\n token: token,\n links: EntryLinks {\n tick: tick,\n prev: EMPTY,\n next: next,\n },\n }\n }\n}\n\nstruct EntryLinks {\n tick: u64,\n prev: Token,\n next: Token\n}\n\npub type TimerResult<T> = Result<T, TimerError>;\n\n#[deriving(Show)]\npub struct TimerError {\n kind: TimerErrorKind,\n desc: &'static str,\n}\n\nimpl TimerError {\n fn overflow() -> TimerError {\n TimerError {\n kind: TimerOverflow,\n desc: \"too many timer entries\"\n }\n }\n}\n\n#[deriving(Show)]\npub enum TimerErrorKind {\n TimerOverflow,\n}\n\n#[cfg(test)]\nmod test {\n use super::Timer;\n\n #[test]\n pub fn test_timeout_next_tick() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 100).unwrap();\n\n tick = t.ms_to_tick(50);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(100);\n assert_eq!(Some(\"a\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(150);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(200);\n assert_eq!(None, t.tick_to(tick));\n\n assert_eq!(t.count(), 0);\n }\n\n #[test]\n pub fn test_clearing_timeout() {\n let mut t = timer();\n let mut tick;\n\n let to = t.timeout_at_ms(\"a\", 100).unwrap();\n assert!(t.clear(to));\n\n tick = t.ms_to_tick(100);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(200);\n assert_eq!(None, t.tick_to(tick));\n\n assert_eq!(t.count(), 0);\n }\n\n #[test]\n pub fn test_multiple_timeouts_same_tick() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 100).unwrap();\n t.timeout_at_ms(\"b\", 100).unwrap();\n\n let mut rcv = vec![];\n\n tick = t.ms_to_tick(100);\n rcv.push(t.tick_to(tick).unwrap());\n rcv.push(t.tick_to(tick).unwrap());\n\n assert_eq!(None, t.tick_to(tick));\n\n rcv.sort();\n assert!(rcv.as_slice() == [\"a\", \"b\"], \"actual={}\", rcv.as_slice());\n\n tick = t.ms_to_tick(200);\n assert_eq!(None, t.tick_to(tick));\n\n assert_eq!(t.count(), 0);\n }\n\n #[test]\n pub fn test_multiple_timeouts_diff_tick() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 110).unwrap();\n t.timeout_at_ms(\"b\", 220).unwrap();\n t.timeout_at_ms(\"c\", 230).unwrap();\n t.timeout_at_ms(\"d\", 440).unwrap();\n\n tick = t.ms_to_tick(100);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(200);\n assert_eq!(Some(\"a\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(300);\n assert_eq!(Some(\"c\"), t.tick_to(tick));\n assert_eq!(Some(\"b\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(400);\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(500);\n assert_eq!(Some(\"d\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(600);\n assert_eq!(None, t.tick_to(tick));\n }\n\n #[test]\n pub fn test_catching_up() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 110).unwrap();\n t.timeout_at_ms(\"b\", 220).unwrap();\n t.timeout_at_ms(\"c\", 230).unwrap();\n t.timeout_at_ms(\"d\", 440).unwrap();\n\n tick = t.ms_to_tick(600);\n assert_eq!(Some(\"a\"), t.tick_to(tick));\n assert_eq!(Some(\"c\"), t.tick_to(tick));\n assert_eq!(Some(\"b\"), t.tick_to(tick));\n assert_eq!(Some(\"d\"), t.tick_to(tick));\n assert_eq!(None, t.tick_to(tick));\n }\n\n #[test]\n pub fn test_timeout_hash_collision() {\n let mut t = timer();\n let mut tick;\n\n t.timeout_at_ms(\"a\", 100).unwrap();\n t.timeout_at_ms(\"b\", 100 + TICK * SLOTS as u64).unwrap();\n\n tick = t.ms_to_tick(100);\n assert_eq!(Some(\"a\"), t.tick_to(tick));\n assert_eq!(1, t.count());\n\n tick = t.ms_to_tick(200);\n assert_eq!(None, t.tick_to(tick));\n assert_eq!(1, t.count());\n\n tick = t.ms_to_tick(100 + TICK * SLOTS as u64);\n assert_eq!(Some(\"b\"), t.tick_to(tick));\n assert_eq!(0, t.count());\n }\n\n #[test]\n pub fn test_clearing_timeout_between_triggers() {\n let mut t = timer();\n let mut tick;\n\n let a = t.timeout_at_ms(\"a\", 100).unwrap();\n let _ = t.timeout_at_ms(\"b\", 100).unwrap();\n let _ = t.timeout_at_ms(\"c\", 200).unwrap();\n\n tick = t.ms_to_tick(100);\n assert_eq!(Some(\"b\"), t.tick_to(tick));\n assert_eq!(2, t.count());\n\n t.clear(a);\n assert_eq!(1, t.count());\n\n assert_eq!(None, t.tick_to(tick));\n\n tick = t.ms_to_tick(200);\n assert_eq!(Some(\"c\"), t.tick_to(tick));\n assert_eq!(0, t.count());\n }\n\n const TICK: u64 = 100;\n const SLOTS: uint = 16;\n\n fn timer() -> Timer<&'static str> {\n Timer::new(TICK, SLOTS, 32)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Get started with importing the context scheme to userspace<commit_after>use redox::Box;\nuse redox::cell::UnsafeCell;\nuse redox::console::ConsoleWindow;\nuse redox::fs::file::File;\nuse redox::rc::Rc;\nuse redox::str;\nuse redox::string::*;\nuse redox::io::SeekFrom;\n\npub struct Scheme;\n\nimpl Scheme {\n fn scheme(&self) -> Box<Self> {\n box Scheme\n }\n\n fn open(&mut self, url: &URL) -> Option<Box<Resource>> {\n let i;\n let len;\n unsafe {\n \/*\n let reenable = scheduler::start_no_ints();\n i = context_i;\n len = (*contexts_ptr).len();\n scheduler::end_no_ints(reenable);\n *\/\n }\n\n Some(box Resource::new(File::open(\"context:\/\/\"), (\"Current: \".to_string() + i + \"\\nTotal: \" + len).to_utf8()))\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/\/ Formatting of chained expressions, i.e. expressions which are chained by\n\/\/\/ dots: struct and enum field access and method calls.\n\/\/\/\n\/\/\/ Instead of walking these subexpressions one-by-one, as is our usual strategy\n\/\/\/ for expression formatting, we collect maximal sequences of these expressions\n\/\/\/ and handle them simultaneously.\n\/\/\/\n\/\/\/ Whenever possible, the entire chain is put on a single line. If that fails,\n\/\/\/ we put each subexpression on a separate, much like the (default) function\n\/\/\/ argument function argument strategy.\n\/\/\/\n\/\/\/ Depends on config options: `chain_base_indent` is the indent to use for\n\/\/\/ blocks in the parent\/root\/base of the chain.\n\/\/\/ E.g., `let foo = { aaaa; bbb; ccc }.bar.baz();`, we would layout for the\n\/\/\/ following values of `chain_base_indent`:\n\/\/\/ Visual:\n\/\/\/ ```\n\/\/\/ let foo = {\n\/\/\/ aaaa;\n\/\/\/ bbb;\n\/\/\/ ccc\n\/\/\/ }\n\/\/\/ .bar\n\/\/\/ .baz();\n\/\/\/ ```\n\/\/\/ Inherit:\n\/\/\/ ```\n\/\/\/ let foo = {\n\/\/\/ aaaa;\n\/\/\/ bbb;\n\/\/\/ ccc\n\/\/\/ }\n\/\/\/ .bar\n\/\/\/ .baz();\n\/\/\/ ```\n\/\/\/ Tabbed:\n\/\/\/ ```\n\/\/\/ let foo = {\n\/\/\/ aaaa;\n\/\/\/ bbb;\n\/\/\/ ccc\n\/\/\/ }\n\/\/\/ .bar\n\/\/\/ .baz();\n\/\/\/ ```\n\/\/\/\n\/\/\/ `chain_indent` dictates how the rest of the chain is aligned.\n\/\/\/ If the first item in the chain is a block expression, we align the dots with\n\/\/\/ the braces.\n\/\/\/ Visual:\n\/\/\/ ```\n\/\/\/ let a = foo.bar\n\/\/\/ .baz()\n\/\/\/ .qux\n\/\/\/ ```\n\/\/\/ Inherit:\n\/\/\/ ```\n\/\/\/ let a = foo.bar\n\/\/\/ .baz()\n\/\/\/ .qux\n\/\/\/ ```\n\/\/\/ Tabbed:\n\/\/\/ ```\n\/\/\/ let a = foo.bar\n\/\/\/ .baz()\n\/\/\/ .qux\n\/\/\/ ```\n\/\/\/ `chains_overflow_last` applies only to chains where the last item is a\n\/\/\/ method call. Usually, any line break in a chain sub-expression causes the\n\/\/\/ whole chain to be split with newlines at each `.`. With `chains_overflow_last`\n\/\/\/ true, then we allow the last method call to spill over multiple lines without\n\/\/\/ forcing the rest of the chain to be split.\n\n\nuse Indent;\nuse rewrite::{Rewrite, RewriteContext};\nuse utils::{wrap_str, first_line_width};\nuse expr::rewrite_call;\nuse config::BlockIndentStyle;\n\nuse syntax::{ast, ptr};\nuse syntax::codemap::{mk_sp, Span};\n\n\npub fn rewrite_chain(expr: &ast::Expr,\n context: &RewriteContext,\n width: usize,\n offset: Indent)\n -> Option<String> {\n let total_span = expr.span;\n let (parent, subexpr_list) = make_subexpr_list(expr);\n\n \/\/ Parent is the first item in the chain, e.g., `foo` in `foo.bar.baz()`.\n let parent_block_indent = chain_base_indent(context, offset);\n let parent_context = &RewriteContext { block_indent: parent_block_indent, ..*context };\n let parent_rewrite = try_opt!(parent.rewrite(parent_context, width, offset));\n\n \/\/ Decide how to layout the rest of the chain. `extend` is true if we can\n \/\/ put the first non-parent item on the same line as the parent.\n let (indent, extend) = if !parent_rewrite.contains('\\n') && is_continuable(parent) ||\n parent_rewrite.len() <= context.config.tab_spaces {\n \/\/ Try and put at least the first two items on the same line.\n (chain_indent(context, offset + Indent::new(0, parent_rewrite.len())), true)\n } else if is_block_expr(parent, &parent_rewrite) {\n \/\/ The parent is a block, so align the rest of the chain with the closing\n \/\/ brace.\n (parent_block_indent, false)\n } else if parent_rewrite.contains('\\n') {\n (chain_indent(context, parent_block_indent.block_indent(context.config)), false)\n } else {\n (chain_indent_newline(context, offset + Indent::new(0, parent_rewrite.len())), false)\n };\n\n let max_width = try_opt!((width + offset.width()).checked_sub(indent.width()));\n let mut rewrites = try_opt!(subexpr_list.iter()\n .rev()\n .map(|e| rewrite_chain_subexpr(e, total_span, context, max_width, indent))\n .collect::<Option<Vec<_>>>());\n\n \/\/ Total of all items excluding the last.\n let almost_total = rewrites[..rewrites.len() - 1]\n .iter()\n .fold(0, |a, b| a + first_line_width(b)) + parent_rewrite.len();\n let total_width = almost_total + first_line_width(rewrites.last().unwrap());\n\n let veto_single_line = if context.config.take_source_hints && subexpr_list.len() > 1 {\n \/\/ Look at the source code. Unless all chain elements start on the same\n \/\/ line, we won't consider putting them on a single line either.\n let last_span = context.snippet(mk_sp(subexpr_list[1].span.hi, total_span.hi));\n let first_span = context.snippet(subexpr_list[1].span);\n let last_iter = last_span.chars().take_while(|c| c.is_whitespace());\n\n first_span.chars().chain(last_iter).any(|c| c == '\\n')\n } else {\n false\n };\n\n let mut fits_single_line = !veto_single_line && total_width <= width;\n if fits_single_line {\n let len = rewrites.len();\n let (init, last) = rewrites.split_at_mut(len - 1);\n fits_single_line = init.iter().all(|s| !s.contains('\\n'));\n\n if fits_single_line {\n fits_single_line = match expr.node {\n ref e @ ast::ExprKind::MethodCall(..) if context.config.chains_overflow_last => {\n rewrite_method_call_with_overflow(e,\n &mut last[0],\n almost_total,\n width,\n total_span,\n context,\n offset)\n }\n _ => !last[0].contains('\\n'),\n }\n }\n }\n\n let connector = if fits_single_line && !parent_rewrite.contains('\\n') {\n \/\/ Yay, we can put everything on one line.\n String::new()\n } else {\n \/\/ Use new lines.\n format!(\"\\n{}\", indent.to_string(context.config))\n };\n\n let first_connector = if extend {\n \"\"\n } else {\n &connector\n };\n\n wrap_str(format!(\"{}{}{}\",\n parent_rewrite,\n first_connector,\n rewrites.join(&connector)),\n context.config.max_width,\n width,\n offset)\n}\n\n\/\/ States whether an expression's last line exclusively consists of closing\n\/\/ parens, braces, and brackets in its idiomatic formatting.\nfn is_block_expr(expr: &ast::Expr, repr: &str) -> bool {\n match expr.node {\n ast::ExprKind::Struct(..) |\n ast::ExprKind::While(..) |\n ast::ExprKind::WhileLet(..) |\n ast::ExprKind::If(..) |\n ast::ExprKind::IfLet(..) |\n ast::ExprKind::Block(..) |\n ast::ExprKind::Loop(..) |\n ast::ExprKind::ForLoop(..) |\n ast::ExprKind::Match(..) => repr.contains('\\n'),\n ast::ExprKind::Paren(ref expr) |\n ast::ExprKind::Binary(_, _, ref expr) |\n ast::ExprKind::Index(_, ref expr) |\n ast::ExprKind::Unary(_, ref expr) => is_block_expr(expr, repr),\n _ => false,\n }\n}\n\n\/\/ Returns the root of the chain and a Vec of the prefixes of the rest of the chain.\n\/\/ E.g., for input `a.b.c` we return (`a`, [`a.b.c`, `a.b`])\nfn make_subexpr_list(mut expr: &ast::Expr) -> (&ast::Expr, Vec<&ast::Expr>) {\n fn pop_expr_chain(expr: &ast::Expr) -> Option<&ast::Expr> {\n match expr.node {\n ast::ExprKind::MethodCall(_, _, ref expressions) => Some(&expressions[0]),\n ast::ExprKind::TupField(ref subexpr, _) |\n ast::ExprKind::Field(ref subexpr, _) => Some(subexpr),\n _ => None,\n }\n }\n\n let mut subexpr_list = vec![expr];\n\n while let Some(subexpr) = pop_expr_chain(expr) {\n subexpr_list.push(subexpr);\n expr = subexpr;\n }\n\n let parent = subexpr_list.pop().unwrap();\n (parent, subexpr_list)\n}\n\nfn chain_base_indent(context: &RewriteContext, offset: Indent) -> Indent {\n match context.config.chain_base_indent {\n BlockIndentStyle::Visual => offset,\n BlockIndentStyle::Inherit => context.block_indent,\n BlockIndentStyle::Tabbed => context.block_indent.block_indent(context.config),\n }\n}\n\nfn chain_indent(context: &RewriteContext, offset: Indent) -> Indent {\n match context.config.chain_indent {\n BlockIndentStyle::Visual => offset,\n BlockIndentStyle::Inherit => context.block_indent,\n BlockIndentStyle::Tabbed => context.block_indent.block_indent(context.config),\n }\n}\n\n\/\/ Ignores visual indenting because this function should be called where it is\n\/\/ not possible to use visual indentation because we are starting on a newline.\nfn chain_indent_newline(context: &RewriteContext, _offset: Indent) -> Indent {\n match context.config.chain_indent {\n BlockIndentStyle::Inherit => context.block_indent,\n BlockIndentStyle::Visual | BlockIndentStyle::Tabbed => {\n context.block_indent.block_indent(context.config)\n }\n }\n}\n\nfn rewrite_method_call_with_overflow(expr_kind: &ast::ExprKind,\n last: &mut String,\n almost_total: usize,\n width: usize,\n total_span: Span,\n context: &RewriteContext,\n offset: Indent)\n -> bool {\n if let &ast::ExprKind::MethodCall(ref method_name, ref types, ref expressions) = expr_kind {\n let budget = match width.checked_sub(almost_total) {\n Some(b) => b,\n None => return false,\n };\n let mut last_rewrite = rewrite_method_call(method_name.node,\n types,\n expressions,\n total_span,\n context,\n budget,\n offset + almost_total);\n\n if let Some(ref mut s) = last_rewrite {\n ::std::mem::swap(s, last);\n true\n } else {\n false\n }\n } else {\n unreachable!();\n }\n}\n\n\/\/ Rewrite the last element in the chain `expr`. E.g., given `a.b.c` we rewrite\n\/\/ `.c`.\nfn rewrite_chain_subexpr(expr: &ast::Expr,\n span: Span,\n context: &RewriteContext,\n width: usize,\n offset: Indent)\n -> Option<String> {\n match expr.node {\n ast::ExprKind::MethodCall(ref method_name, ref types, ref expressions) => {\n let inner = &RewriteContext { block_indent: offset, ..*context };\n rewrite_method_call(method_name.node,\n types,\n expressions,\n span,\n inner,\n width,\n offset)\n }\n ast::ExprKind::Field(_, ref field) => {\n let s = format!(\".{}\", field.node);\n if s.len() <= width {\n Some(s)\n } else {\n None\n }\n }\n ast::ExprKind::TupField(_, ref field) => {\n let s = format!(\".{}\", field.node);\n if s.len() <= width {\n Some(s)\n } else {\n None\n }\n }\n _ => unreachable!(),\n }\n}\n\n\/\/ Determines if we can continue formatting a given expression on the same line.\nfn is_continuable(expr: &ast::Expr) -> bool {\n match expr.node {\n ast::ExprKind::Path(..) => true,\n _ => false,\n }\n}\n\nfn rewrite_method_call(method_name: ast::Ident,\n types: &[ptr::P<ast::Ty>],\n args: &[ptr::P<ast::Expr>],\n span: Span,\n context: &RewriteContext,\n width: usize,\n offset: Indent)\n -> Option<String> {\n let (lo, type_str) = if types.is_empty() {\n (args[0].span.hi, String::new())\n } else {\n let type_list: Vec<_> = try_opt!(types.iter()\n .map(|ty| ty.rewrite(context, width, offset))\n .collect());\n\n (types.last().unwrap().span.hi, format!(\"::<{}>\", type_list.join(\", \")))\n };\n\n let callee_str = format!(\".{}{}\", method_name, type_str);\n let span = mk_sp(lo, span.hi);\n\n rewrite_call(context, &callee_str, &args[1..], span, width, offset)\n}\n<commit_msg>Fix for stable<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/\/ Formatting of chained expressions, i.e. expressions which are chained by\n\/\/\/ dots: struct and enum field access and method calls.\n\/\/\/\n\/\/\/ Instead of walking these subexpressions one-by-one, as is our usual strategy\n\/\/\/ for expression formatting, we collect maximal sequences of these expressions\n\/\/\/ and handle them simultaneously.\n\/\/\/\n\/\/\/ Whenever possible, the entire chain is put on a single line. If that fails,\n\/\/\/ we put each subexpression on a separate, much like the (default) function\n\/\/\/ argument function argument strategy.\n\/\/\/\n\/\/\/ Depends on config options: `chain_base_indent` is the indent to use for\n\/\/\/ blocks in the parent\/root\/base of the chain.\n\/\/\/ E.g., `let foo = { aaaa; bbb; ccc }.bar.baz();`, we would layout for the\n\/\/\/ following values of `chain_base_indent`:\n\/\/\/ Visual:\n\/\/\/ ```\n\/\/\/ let foo = {\n\/\/\/ aaaa;\n\/\/\/ bbb;\n\/\/\/ ccc\n\/\/\/ }\n\/\/\/ .bar\n\/\/\/ .baz();\n\/\/\/ ```\n\/\/\/ Inherit:\n\/\/\/ ```\n\/\/\/ let foo = {\n\/\/\/ aaaa;\n\/\/\/ bbb;\n\/\/\/ ccc\n\/\/\/ }\n\/\/\/ .bar\n\/\/\/ .baz();\n\/\/\/ ```\n\/\/\/ Tabbed:\n\/\/\/ ```\n\/\/\/ let foo = {\n\/\/\/ aaaa;\n\/\/\/ bbb;\n\/\/\/ ccc\n\/\/\/ }\n\/\/\/ .bar\n\/\/\/ .baz();\n\/\/\/ ```\n\/\/\/\n\/\/\/ `chain_indent` dictates how the rest of the chain is aligned.\n\/\/\/ If the first item in the chain is a block expression, we align the dots with\n\/\/\/ the braces.\n\/\/\/ Visual:\n\/\/\/ ```\n\/\/\/ let a = foo.bar\n\/\/\/ .baz()\n\/\/\/ .qux\n\/\/\/ ```\n\/\/\/ Inherit:\n\/\/\/ ```\n\/\/\/ let a = foo.bar\n\/\/\/ .baz()\n\/\/\/ .qux\n\/\/\/ ```\n\/\/\/ Tabbed:\n\/\/\/ ```\n\/\/\/ let a = foo.bar\n\/\/\/ .baz()\n\/\/\/ .qux\n\/\/\/ ```\n\/\/\/ `chains_overflow_last` applies only to chains where the last item is a\n\/\/\/ method call. Usually, any line break in a chain sub-expression causes the\n\/\/\/ whole chain to be split with newlines at each `.`. With `chains_overflow_last`\n\/\/\/ true, then we allow the last method call to spill over multiple lines without\n\/\/\/ forcing the rest of the chain to be split.\n\n\nuse Indent;\nuse rewrite::{Rewrite, RewriteContext};\nuse utils::{wrap_str, first_line_width};\nuse expr::rewrite_call;\nuse config::BlockIndentStyle;\n\nuse syntax::{ast, ptr};\nuse syntax::codemap::{mk_sp, Span};\n\n\npub fn rewrite_chain(expr: &ast::Expr,\n context: &RewriteContext,\n width: usize,\n offset: Indent)\n -> Option<String> {\n let total_span = expr.span;\n let (parent, subexpr_list) = make_subexpr_list(expr);\n\n \/\/ Parent is the first item in the chain, e.g., `foo` in `foo.bar.baz()`.\n let parent_block_indent = chain_base_indent(context, offset);\n let parent_context = &RewriteContext { block_indent: parent_block_indent, ..*context };\n let parent_rewrite = try_opt!(parent.rewrite(parent_context, width, offset));\n\n \/\/ Decide how to layout the rest of the chain. `extend` is true if we can\n \/\/ put the first non-parent item on the same line as the parent.\n let (indent, extend) = if !parent_rewrite.contains('\\n') && is_continuable(parent) ||\n parent_rewrite.len() <= context.config.tab_spaces {\n \/\/ Try and put at least the first two items on the same line.\n (chain_indent(context, offset + Indent::new(0, parent_rewrite.len())), true)\n } else if is_block_expr(parent, &parent_rewrite) {\n \/\/ The parent is a block, so align the rest of the chain with the closing\n \/\/ brace.\n (parent_block_indent, false)\n } else if parent_rewrite.contains('\\n') {\n (chain_indent(context, parent_block_indent.block_indent(context.config)), false)\n } else {\n (chain_indent_newline(context, offset + Indent::new(0, parent_rewrite.len())), false)\n };\n\n let max_width = try_opt!((width + offset.width()).checked_sub(indent.width()));\n let mut rewrites = try_opt!(subexpr_list.iter()\n .rev()\n .map(|e| rewrite_chain_subexpr(e, total_span, context, max_width, indent))\n .collect::<Option<Vec<_>>>());\n\n \/\/ Total of all items excluding the last.\n let almost_total = rewrites[..rewrites.len() - 1]\n .iter()\n .fold(0, |a, b| a + first_line_width(b)) + parent_rewrite.len();\n let total_width = almost_total + first_line_width(rewrites.last().unwrap());\n\n let veto_single_line = if context.config.take_source_hints && subexpr_list.len() > 1 {\n \/\/ Look at the source code. Unless all chain elements start on the same\n \/\/ line, we won't consider putting them on a single line either.\n let last_span = context.snippet(mk_sp(subexpr_list[1].span.hi, total_span.hi));\n let first_span = context.snippet(subexpr_list[1].span);\n let last_iter = last_span.chars().take_while(|c| c.is_whitespace());\n\n first_span.chars().chain(last_iter).any(|c| c == '\\n')\n } else {\n false\n };\n\n let mut fits_single_line = !veto_single_line && total_width <= width;\n if fits_single_line {\n let len = rewrites.len();\n let (init, last) = rewrites.split_at_mut(len - 1);\n fits_single_line = init.iter().all(|s| !s.contains('\\n'));\n\n if fits_single_line {\n fits_single_line = match expr.node {\n ref e @ ast::ExprKind::MethodCall(..) if context.config.chains_overflow_last => {\n rewrite_method_call_with_overflow(e,\n &mut last[0],\n almost_total,\n width,\n total_span,\n context,\n offset)\n }\n _ => !last[0].contains('\\n'),\n }\n }\n }\n\n let connector = if fits_single_line && !parent_rewrite.contains('\\n') {\n \/\/ Yay, we can put everything on one line.\n String::new()\n } else {\n \/\/ Use new lines.\n format!(\"\\n{}\", indent.to_string(context.config))\n };\n\n let first_connector = if extend {\n \"\"\n } else {\n &*connector\n };\n\n wrap_str(format!(\"{}{}{}\",\n parent_rewrite,\n first_connector,\n rewrites.join(&connector)),\n context.config.max_width,\n width,\n offset)\n}\n\n\/\/ States whether an expression's last line exclusively consists of closing\n\/\/ parens, braces, and brackets in its idiomatic formatting.\nfn is_block_expr(expr: &ast::Expr, repr: &str) -> bool {\n match expr.node {\n ast::ExprKind::Struct(..) |\n ast::ExprKind::While(..) |\n ast::ExprKind::WhileLet(..) |\n ast::ExprKind::If(..) |\n ast::ExprKind::IfLet(..) |\n ast::ExprKind::Block(..) |\n ast::ExprKind::Loop(..) |\n ast::ExprKind::ForLoop(..) |\n ast::ExprKind::Match(..) => repr.contains('\\n'),\n ast::ExprKind::Paren(ref expr) |\n ast::ExprKind::Binary(_, _, ref expr) |\n ast::ExprKind::Index(_, ref expr) |\n ast::ExprKind::Unary(_, ref expr) => is_block_expr(expr, repr),\n _ => false,\n }\n}\n\n\/\/ Returns the root of the chain and a Vec of the prefixes of the rest of the chain.\n\/\/ E.g., for input `a.b.c` we return (`a`, [`a.b.c`, `a.b`])\nfn make_subexpr_list(mut expr: &ast::Expr) -> (&ast::Expr, Vec<&ast::Expr>) {\n fn pop_expr_chain(expr: &ast::Expr) -> Option<&ast::Expr> {\n match expr.node {\n ast::ExprKind::MethodCall(_, _, ref expressions) => Some(&expressions[0]),\n ast::ExprKind::TupField(ref subexpr, _) |\n ast::ExprKind::Field(ref subexpr, _) => Some(subexpr),\n _ => None,\n }\n }\n\n let mut subexpr_list = vec![expr];\n\n while let Some(subexpr) = pop_expr_chain(expr) {\n subexpr_list.push(subexpr);\n expr = subexpr;\n }\n\n let parent = subexpr_list.pop().unwrap();\n (parent, subexpr_list)\n}\n\nfn chain_base_indent(context: &RewriteContext, offset: Indent) -> Indent {\n match context.config.chain_base_indent {\n BlockIndentStyle::Visual => offset,\n BlockIndentStyle::Inherit => context.block_indent,\n BlockIndentStyle::Tabbed => context.block_indent.block_indent(context.config),\n }\n}\n\nfn chain_indent(context: &RewriteContext, offset: Indent) -> Indent {\n match context.config.chain_indent {\n BlockIndentStyle::Visual => offset,\n BlockIndentStyle::Inherit => context.block_indent,\n BlockIndentStyle::Tabbed => context.block_indent.block_indent(context.config),\n }\n}\n\n\/\/ Ignores visual indenting because this function should be called where it is\n\/\/ not possible to use visual indentation because we are starting on a newline.\nfn chain_indent_newline(context: &RewriteContext, _offset: Indent) -> Indent {\n match context.config.chain_indent {\n BlockIndentStyle::Inherit => context.block_indent,\n BlockIndentStyle::Visual | BlockIndentStyle::Tabbed => {\n context.block_indent.block_indent(context.config)\n }\n }\n}\n\nfn rewrite_method_call_with_overflow(expr_kind: &ast::ExprKind,\n last: &mut String,\n almost_total: usize,\n width: usize,\n total_span: Span,\n context: &RewriteContext,\n offset: Indent)\n -> bool {\n if let &ast::ExprKind::MethodCall(ref method_name, ref types, ref expressions) = expr_kind {\n let budget = match width.checked_sub(almost_total) {\n Some(b) => b,\n None => return false,\n };\n let mut last_rewrite = rewrite_method_call(method_name.node,\n types,\n expressions,\n total_span,\n context,\n budget,\n offset + almost_total);\n\n if let Some(ref mut s) = last_rewrite {\n ::std::mem::swap(s, last);\n true\n } else {\n false\n }\n } else {\n unreachable!();\n }\n}\n\n\/\/ Rewrite the last element in the chain `expr`. E.g., given `a.b.c` we rewrite\n\/\/ `.c`.\nfn rewrite_chain_subexpr(expr: &ast::Expr,\n span: Span,\n context: &RewriteContext,\n width: usize,\n offset: Indent)\n -> Option<String> {\n match expr.node {\n ast::ExprKind::MethodCall(ref method_name, ref types, ref expressions) => {\n let inner = &RewriteContext { block_indent: offset, ..*context };\n rewrite_method_call(method_name.node,\n types,\n expressions,\n span,\n inner,\n width,\n offset)\n }\n ast::ExprKind::Field(_, ref field) => {\n let s = format!(\".{}\", field.node);\n if s.len() <= width {\n Some(s)\n } else {\n None\n }\n }\n ast::ExprKind::TupField(_, ref field) => {\n let s = format!(\".{}\", field.node);\n if s.len() <= width {\n Some(s)\n } else {\n None\n }\n }\n _ => unreachable!(),\n }\n}\n\n\/\/ Determines if we can continue formatting a given expression on the same line.\nfn is_continuable(expr: &ast::Expr) -> bool {\n match expr.node {\n ast::ExprKind::Path(..) => true,\n _ => false,\n }\n}\n\nfn rewrite_method_call(method_name: ast::Ident,\n types: &[ptr::P<ast::Ty>],\n args: &[ptr::P<ast::Expr>],\n span: Span,\n context: &RewriteContext,\n width: usize,\n offset: Indent)\n -> Option<String> {\n let (lo, type_str) = if types.is_empty() {\n (args[0].span.hi, String::new())\n } else {\n let type_list: Vec<_> = try_opt!(types.iter()\n .map(|ty| ty.rewrite(context, width, offset))\n .collect());\n\n (types.last().unwrap().span.hi, format!(\"::<{}>\", type_list.join(\", \")))\n };\n\n let callee_str = format!(\".{}{}\", method_name, type_str);\n let span = mk_sp(lo, span.hi);\n\n rewrite_call(context, &callee_str, &args[1..], span, width, offset)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add manual page to cp, -h and --help options<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implemented Lights::search_new_lights()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adapt to new libimagstore::iter::Entries API<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement is_expired on config<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #30<commit_after>extern mod std;\n\nuse std::sort::{ quick_sort };\n\n\/\/ 9^5 = 59049\n\/\/ 9999 => 9^5 * 4 = 236196\n\/\/ 99999 => 9^5 * 5 = 295245\n\/\/ 999999 => 9^5 * 6 = 354294\n\/\/ 9999999 => 9^5 * 7 = 413343\n\n\/\/ 1-6 digits numbers meet conditions\nfn main() {\n let pows = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9].map(|n| int::pow(*n, 5) as uint);\n let nums = [mut 0, 0, 0, 0, 0, 0, 0];\n let mut sum = 0;\n\n for uint::range(0, 10) |d0| {\n let p0 = pows[d0];\n let s0 = p0;\n for uint::range(d0, 10) |d1| {\n let p1 = pows[d1];\n let s1 = s0 + p1;\n for uint::range(d1, 10) |d2| {\n let p2 = pows[d2];\n let s2 = s1 + p2;\n for uint::range(d2, 10) |d3| {\n let p3 = pows[d3];\n let s3 = s2 + p3;\n for uint::range(d3, 10) |d4| {\n let p4 = pows[d4];\n let s4 = s3 + p4;\n for uint::range(d4, 10) |d5| {\n let p5 = pows[d5];\n let s5 = s4 + p5;\n for uint::range(d5, 10) |d6| {\n let p6 = pows[d6];\n let s6 = s5 + p6;\n let mut itr = s6;\n for uint::range(0, nums.len()) |i| {\n nums[i] = itr % 10;\n itr \/= 10;\n }\n quick_sort(|a, b| a < b, nums);\n if nums[0] == d0 &&\n nums[1] == d1 &&\n nums[2] == d2 &&\n nums[3] == d3 &&\n nums[4] == d4 &&\n nums[5] == d5 &&\n nums[6] == d6 {\n io::println(fmt!(\"%u\", s6));\n sum += s6;\n }\n }\n }\n }\n }\n }\n }\n }\n\n io::println(fmt!(\"answer: %u\", sum - 1)); \/\/ remove 1\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix StringExpander::ttyname(): Return the actual name, not the full path.<commit_after><|endoftext|>"} {"text":"<commit_before>use std::io::net::ip::{Port, IpAddr};\n\nuse router::Router;\nuse middleware::{ MiddlewareStack, Middleware, Action };\nuse into_middleware::IntoMiddleware;\nuse into_error_handler::IntoErrorHandler;\nuse nickel_error::{ NickelError, ErrorWithStatusCode };\nuse server::Server;\n\nuse http::method::{ Method, Get, Post, Put, Delete };\nuse http::status::NotFound;\nuse request::Request;\nuse response::Response;\n\n\n\/\/pre defined middleware\nuse json_body_parser::JsonBodyParser;\nuse query_string::QueryStringParser;\nuse default_error_handler::DefaultErrorHandler;\n\n\/\/\/ Nickel is the application object. It's the surface that\n\/\/\/ holds all public APIs.\n\npub struct Nickel{\n middleware_stack: MiddlewareStack,\n server: Option<Server>,\n}\n\nimpl Nickel {\n\n \/\/\/ In order to use Nickels API one first has to create an instance.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ ```\n pub fn new() -> Nickel {\n let mut middleware_stack = MiddlewareStack::new();\n\n \/\/ Hook up the default error handler by default. Users are\n \/\/ free to cancel it out from their custom error handler if\n \/\/ they don't like the default behaviour.\n middleware_stack.add_error_handler(DefaultErrorHandler);\n\n Nickel {\n middleware_stack: middleware_stack,\n server: None\n }\n }\n\n \/\/\/ Registers a middleware handler which will be invoked among other middleware\n \/\/\/ handlers before each request. Middleware can be stacked and is invoked in the\n \/\/\/ same order it was registered.\n \/\/\/\n \/\/\/ A middleware handler is nearly identical to a regular route handler with the only\n \/\/\/ difference that it expects a return value of boolean. That is to indicate whether\n \/\/\/ other middleware handler (if any) further down the stack should continue or if the\n \/\/\/ middleware invocation should be stopped after the current handler.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust,ignore}\n \/\/\/ fn logger (req: &Request, res: &mut Response) -> Result<Action, NickelError>{\n \/\/\/ println!(\"logging request: {}\", req.origin.request_uri);\n \/\/\/ Ok(Continue)\n \/\/\/ }\n \/\/\/ ```\n pub fn utilize<T: Middleware>(&mut self, handler: T){\n self.middleware_stack.add_middleware(handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific GET request.\n \/\/\/ Handlers are assigned to paths and paths are allowed to contain\n \/\/\/ variables and wildcards.\n \/\/\/\n \/\/\/ A handler added through this API will\n \/\/\/ be attached to the default router. Consider creating the router\n \/\/\/ middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ let mut server = Nickel::new();\n \/\/\/\n \/\/\/ \/\/ without variables or wildcards\n \/\/\/ fn bare_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\", bare_handler);\n \/\/\/\n \/\/\/ \/\/ with variables\n \/\/\/ fn var_handler(request: &Request, response: &mut Response) {\n \/\/\/ let text = format!(\"This is user: {}\", request.param(\"userid\"));\n \/\/\/ response.send(text.as_slice());\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/:userid\", var_handler);\n \/\/\/\n \/\/\/ \/\/ with simple wildcard\n \/\/\/ fn wild_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 but not \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/*\/:userid\", wild_handler);\n \/\/\/\n \/\/\/ \/\/ with double wildcard\n \/\/\/ fn very_wild_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 and also \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/**\/:userid\", very_wild_handler);\n \/\/\/ ```\n pub fn get(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Get, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific POST request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ fn handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/post\/request\");\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.post(\"\/a\/post\/request\", handler);\n \/\/\/ ```\n pub fn post(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Post, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific PUT request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ fn handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/put\/request\");\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.put(\"\/a\/put\/request\", handler);\n \/\/\/ ```\n pub fn put(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Put, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific DELETE request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ fn handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a DELETE request to \/a\/delete\/request\");\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.delete(\"\/a\/delete\/request\", handler);\n \/\/\/ ```\n pub fn delete(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Delete, uri, handler);\n }\n\n fn register_route_with_new_router(&mut self, method: Method, uri: &str, handler: fn(request: &Request, response: &mut Response)) {\n let mut router = Router::new();\n router.add_route(method, String::from_str(uri), handler);\n self.utilize(router);\n }\n\n \/\/\/ Registers an error handler which will be invoked among other error handler\n \/\/\/ as soon as any regular handler returned an error\n \/\/\/\n \/\/\/ A error handler is nearly identical to a regular middleware handler with the only\n \/\/\/ difference that it takes an additional error parameter or type `NickelError.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ # extern crate http;\n \/\/\/ # extern crate nickel;\n \/\/\/ # fn main() {\n \/\/\/ use nickel::{Nickel, Request, Response, Action, Continue, Halt};\n \/\/\/ use nickel::{NickelError, ErrorWithStatusCode, get_media_type};\n \/\/\/ use http::status::NotFound;\n \/\/\/\n \/\/\/ fn error_handler(err: &NickelError, req: &Request, response: &mut Response)\n \/\/\/ -> Result<Action, NickelError>{\n \/\/\/ match err.kind {\n \/\/\/ ErrorWithStatusCode(NotFound) => {\n \/\/\/ response.origin.headers.content_type = get_media_type(\"html\");\n \/\/\/ response.origin.status = NotFound;\n \/\/\/ response.send(\"<h1>Call the police!<h1>\");\n \/\/\/ Ok(Halt)\n \/\/\/ },\n \/\/\/ _ => Ok(Continue)\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.handle_error(error_handler)\n \/\/\/ # }\n \/\/\/ ```\n pub fn handle_error(&mut self, handler: fn(err: &NickelError,\n req: &Request,\n res: &mut Response)\n -> Result<Action, NickelError>){\n let handler = IntoErrorHandler::from_fn(handler);\n self.middleware_stack.add_error_handler(handler);\n }\n\n \/\/\/ Create a new middleware to serve as a router.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ let mut router = Nickel::router();\n \/\/\/\n \/\/\/ fn foo_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"Hi from \/foo\");\n \/\/\/ };\n \/\/\/\n \/\/\/ router.get(\"\/foo\", foo_handler);\n \/\/\/ server.utilize(router);\n \/\/\/ ```\n pub fn router() -> Router {\n Router::new()\n }\n\n \/\/\/ Create a new middleware to parse JSON bodies.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)] extern crate nickel_macros;\n \/\/\/ # extern crate nickel;\n \/\/\/ # extern crate serialize;\n \/\/\/ # use nickel::{Nickel, Request, Response};\n \/\/\/ use nickel::JsonBody;\n \/\/\/\n \/\/\/ # fn main() {\n \/\/\/ #[deriving(Decodable, Encodable)]\n \/\/\/ struct Person {\n \/\/\/ first_name: String,\n \/\/\/ last_name: String,\n \/\/\/ }\n \/\/\/\n \/\/\/ let router = router! {\n \/\/\/ post \"\/a\/post\/request\" => |request, response| {\n \/\/\/ let person = request.json_as::<Person>().unwrap();\n \/\/\/ let text = format!(\"Hello {} {}\", person.first_name, person.last_name);\n \/\/\/ response.send(text);\n \/\/\/ }\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ \/\/ It is currently a requirement that the json_body_parser middleware\n \/\/\/ \/\/ is added before any routes that require it.\n \/\/\/ server.utilize(Nickel::json_body_parser());\n \/\/\/ server.utilize(router);\n \/\/\/ # }\n \/\/\/ ```\n pub fn json_body_parser() -> JsonBodyParser {\n JsonBodyParser\n }\n\n \/\/\/ Create a new middleware to parse the query string.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)] extern crate nickel_macros;\n \/\/\/ # extern crate nickel;\n \/\/\/ # use nickel::{Nickel, Request, Response};\n \/\/\/ use nickel::QueryString;\n \/\/\/ # fn main() {\n \/\/\/ let router = router! {\n \/\/\/ get \"\/a\/get\/request\" => |request, response| {\n \/\/\/ let foo = request.query(\"foo\", \"this is the default value, if foo is not present!\");\n \/\/\/ response.send(foo[0].as_slice());\n \/\/\/ }\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ \/\/ It is currently a requirement that the query_string middleware\n \/\/\/ \/\/ is added before any routes that require it.\n \/\/\/ server.utilize(Nickel::query_string());\n \/\/\/ server.utilize(router);\n \/\/\/ # }\n \/\/\/ ```\n pub fn query_string() -> QueryStringParser {\n QueryStringParser\n }\n\n \/\/\/ Bind and listen for connections on the given host and port\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.listen(Ipv4Addr(127, 0, 0, 1), 6767);\n \/\/\/ ```\n pub fn listen(mut self, ip: IpAddr, port: Port) {\n fn not_found_handler(_request: &Request, _response: &mut Response) -> Result<Action, NickelError> {\n Err(NickelError::new(\"File Not Found\", ErrorWithStatusCode(NotFound)))\n }\n\n self.middleware_stack.add_middleware(IntoMiddleware::from_fn(not_found_handler));\n self.server = Some(Server::new(self.middleware_stack, ip, port));\n self.server.unwrap().serve();\n }\n}\n<commit_msg>docs(nickel): fix docs for utilize<commit_after>use std::io::net::ip::{Port, IpAddr};\n\nuse router::Router;\nuse middleware::{ MiddlewareStack, Middleware, Action };\nuse into_middleware::IntoMiddleware;\nuse into_error_handler::IntoErrorHandler;\nuse nickel_error::{ NickelError, ErrorWithStatusCode };\nuse server::Server;\n\nuse http::method::{ Method, Get, Post, Put, Delete };\nuse http::status::NotFound;\nuse request::Request;\nuse response::Response;\n\n\n\/\/pre defined middleware\nuse json_body_parser::JsonBodyParser;\nuse query_string::QueryStringParser;\nuse default_error_handler::DefaultErrorHandler;\n\n\/\/\/ Nickel is the application object. It's the surface that\n\/\/\/ holds all public APIs.\n\npub struct Nickel{\n middleware_stack: MiddlewareStack,\n server: Option<Server>,\n}\n\nimpl Nickel {\n\n \/\/\/ In order to use Nickels API one first has to create an instance.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ ```\n pub fn new() -> Nickel {\n let mut middleware_stack = MiddlewareStack::new();\n\n \/\/ Hook up the default error handler by default. Users are\n \/\/ free to cancel it out from their custom error handler if\n \/\/ they don't like the default behaviour.\n middleware_stack.add_error_handler(DefaultErrorHandler);\n\n Nickel {\n middleware_stack: middleware_stack,\n server: None\n }\n }\n\n \/\/\/ Registers a middleware handler which will be invoked among other middleware\n \/\/\/ handlers before each request. Middleware can be stacked and is invoked in the\n \/\/\/ same order it was registered.\n \/\/\/\n \/\/\/ A middleware handler is nearly identical to a regular route handler with the only\n \/\/\/ difference that it expects a result of either Action or NickelError.\n \/\/\/ That is to indicate whether other middleware handlers (if any) further\n \/\/\/ down the stack should continue or if the middleware invocation should\n \/\/\/ be stopped after the current handler.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::IntoMiddleware; \/\/ This is *hopefully* temporary\n \/\/\/ use nickel::{Nickel, Request, Response, Action, Continue, NickelError};\n \/\/\/ fn logger(req: &Request, res: &mut Response) -> Result<Action, NickelError>{\n \/\/\/ println!(\"logging request: {}\", req.origin.request_uri);\n \/\/\/ Ok(Continue)\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.utilize(IntoMiddleware::from_fn(logger));\n \/\/\/ ```\n pub fn utilize<T: Middleware>(&mut self, handler: T){\n self.middleware_stack.add_middleware(handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific GET request.\n \/\/\/ Handlers are assigned to paths and paths are allowed to contain\n \/\/\/ variables and wildcards.\n \/\/\/\n \/\/\/ A handler added through this API will\n \/\/\/ be attached to the default router. Consider creating the router\n \/\/\/ middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ let mut server = Nickel::new();\n \/\/\/\n \/\/\/ \/\/ without variables or wildcards\n \/\/\/ fn bare_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\", bare_handler);\n \/\/\/\n \/\/\/ \/\/ with variables\n \/\/\/ fn var_handler(request: &Request, response: &mut Response) {\n \/\/\/ let text = format!(\"This is user: {}\", request.param(\"userid\"));\n \/\/\/ response.send(text.as_slice());\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/:userid\", var_handler);\n \/\/\/\n \/\/\/ \/\/ with simple wildcard\n \/\/\/ fn wild_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 but not \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/*\/:userid\", wild_handler);\n \/\/\/\n \/\/\/ \/\/ with double wildcard\n \/\/\/ fn very_wild_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 and also \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/**\/:userid\", very_wild_handler);\n \/\/\/ ```\n pub fn get(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Get, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific POST request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ fn handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/post\/request\");\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.post(\"\/a\/post\/request\", handler);\n \/\/\/ ```\n pub fn post(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Post, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific PUT request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ fn handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/put\/request\");\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.put(\"\/a\/put\/request\", handler);\n \/\/\/ ```\n pub fn put(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Put, uri, handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific DELETE request.\n \/\/\/ A handler added through this API will be attached to the default router.\n \/\/\/ Consider creating the router middleware manually for advanced functionality.\n \/\/\/\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/ fn handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a DELETE request to \/a\/delete\/request\");\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.delete(\"\/a\/delete\/request\", handler);\n \/\/\/ ```\n pub fn delete(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.register_route_with_new_router(Delete, uri, handler);\n }\n\n fn register_route_with_new_router(&mut self, method: Method, uri: &str, handler: fn(request: &Request, response: &mut Response)) {\n let mut router = Router::new();\n router.add_route(method, String::from_str(uri), handler);\n self.utilize(router);\n }\n\n \/\/\/ Registers an error handler which will be invoked among other error handler\n \/\/\/ as soon as any regular handler returned an error\n \/\/\/\n \/\/\/ A error handler is nearly identical to a regular middleware handler with the only\n \/\/\/ difference that it takes an additional error parameter or type `NickelError.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```{rust}\n \/\/\/ # extern crate http;\n \/\/\/ # extern crate nickel;\n \/\/\/ # fn main() {\n \/\/\/ use nickel::{Nickel, Request, Response, Action, Continue, Halt};\n \/\/\/ use nickel::{NickelError, ErrorWithStatusCode, get_media_type};\n \/\/\/ use http::status::NotFound;\n \/\/\/\n \/\/\/ fn error_handler(err: &NickelError, req: &Request, response: &mut Response)\n \/\/\/ -> Result<Action, NickelError>{\n \/\/\/ match err.kind {\n \/\/\/ ErrorWithStatusCode(NotFound) => {\n \/\/\/ response.origin.headers.content_type = get_media_type(\"html\");\n \/\/\/ response.origin.status = NotFound;\n \/\/\/ response.send(\"<h1>Call the police!<h1>\");\n \/\/\/ Ok(Halt)\n \/\/\/ },\n \/\/\/ _ => Ok(Continue)\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.handle_error(error_handler)\n \/\/\/ # }\n \/\/\/ ```\n pub fn handle_error(&mut self, handler: fn(err: &NickelError,\n req: &Request,\n res: &mut Response)\n -> Result<Action, NickelError>){\n let handler = IntoErrorHandler::from_fn(handler);\n self.middleware_stack.add_error_handler(handler);\n }\n\n \/\/\/ Create a new middleware to serve as a router.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ use nickel::{Nickel, Request, Response};\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ let mut router = Nickel::router();\n \/\/\/\n \/\/\/ fn foo_handler(request: &Request, response: &mut Response) {\n \/\/\/ response.send(\"Hi from \/foo\");\n \/\/\/ };\n \/\/\/\n \/\/\/ router.get(\"\/foo\", foo_handler);\n \/\/\/ server.utilize(router);\n \/\/\/ ```\n pub fn router() -> Router {\n Router::new()\n }\n\n \/\/\/ Create a new middleware to parse JSON bodies.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)] extern crate nickel_macros;\n \/\/\/ # extern crate nickel;\n \/\/\/ # extern crate serialize;\n \/\/\/ # use nickel::{Nickel, Request, Response};\n \/\/\/ use nickel::JsonBody;\n \/\/\/\n \/\/\/ # fn main() {\n \/\/\/ #[deriving(Decodable, Encodable)]\n \/\/\/ struct Person {\n \/\/\/ first_name: String,\n \/\/\/ last_name: String,\n \/\/\/ }\n \/\/\/\n \/\/\/ let router = router! {\n \/\/\/ post \"\/a\/post\/request\" => |request, response| {\n \/\/\/ let person = request.json_as::<Person>().unwrap();\n \/\/\/ let text = format!(\"Hello {} {}\", person.first_name, person.last_name);\n \/\/\/ response.send(text);\n \/\/\/ }\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ \/\/ It is currently a requirement that the json_body_parser middleware\n \/\/\/ \/\/ is added before any routes that require it.\n \/\/\/ server.utilize(Nickel::json_body_parser());\n \/\/\/ server.utilize(router);\n \/\/\/ # }\n \/\/\/ ```\n pub fn json_body_parser() -> JsonBodyParser {\n JsonBodyParser\n }\n\n \/\/\/ Create a new middleware to parse the query string.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust}\n \/\/\/ # #![feature(phase)]\n \/\/\/ # #[phase(plugin)] extern crate nickel_macros;\n \/\/\/ # extern crate nickel;\n \/\/\/ # use nickel::{Nickel, Request, Response};\n \/\/\/ use nickel::QueryString;\n \/\/\/ # fn main() {\n \/\/\/ let router = router! {\n \/\/\/ get \"\/a\/get\/request\" => |request, response| {\n \/\/\/ let foo = request.query(\"foo\", \"this is the default value, if foo is not present!\");\n \/\/\/ response.send(foo[0].as_slice());\n \/\/\/ }\n \/\/\/ };\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ \/\/ It is currently a requirement that the query_string middleware\n \/\/\/ \/\/ is added before any routes that require it.\n \/\/\/ server.utilize(Nickel::query_string());\n \/\/\/ server.utilize(router);\n \/\/\/ # }\n \/\/\/ ```\n pub fn query_string() -> QueryStringParser {\n QueryStringParser\n }\n\n \/\/\/ Bind and listen for connections on the given host and port\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.listen(Ipv4Addr(127, 0, 0, 1), 6767);\n \/\/\/ ```\n pub fn listen(mut self, ip: IpAddr, port: Port) {\n fn not_found_handler(_request: &Request, _response: &mut Response) -> Result<Action, NickelError> {\n Err(NickelError::new(\"File Not Found\", ErrorWithStatusCode(NotFound)))\n }\n\n self.middleware_stack.add_middleware(IntoMiddleware::from_fn(not_found_handler));\n self.server = Some(Server::new(self.middleware_stack, ip, port));\n self.server.unwrap().serve();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Revert \"add inventory rendering\"<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Rust solution for array_pair_sum<commit_after>use std::collections::HashSet;\ntype Pair = (i64, i64);\n\nfn array_pair_sum(values: Vec<i64>, k: i64) -> HashSet<Pair> {\n let mut pairs: HashSet<Pair> = HashSet::new();\n let mut seen: HashSet<i64> = HashSet::new();\n\n for v1 in &values {\n let other = k - v1;\n if seen.contains(&other) {\n pairs.insert((*v1, other));\n }\n seen.insert(*v1);\n }\n pairs\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn test_array_pair_sum() {\n let mut expected1: HashSet<Pair> = HashSet::new();\n expected1.insert((6, 4));\n expected1.insert((7, 3));\n assert_eq!(array_pair_sum(vec![3, 4, 5, 6, 7], 10), expected1);\n\n let mut expected2: HashSet<Pair> = HashSet::new();\n expected2.insert((5, 3));\n expected2.insert((4, 4));\n assert_eq!(array_pair_sum(vec![3, 4, 5, 4, 4], 8), expected2);\n\n assert!(array_pair_sum(vec![4], 8).is_empty());\n\n let mut expected3: HashSet<Pair> = HashSet::new();\n expected3.insert((-4, 4));\n assert_eq!(array_pair_sum(vec![4, -4], 0), expected3);\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>extern crate serialize;\n\nuse serialize::{Decodable,json};\nuse std::io::Process;\nuse std::io::fs::File;\nuse std::io::process::ProcessOutput;\nuse std::io::{fs,IoResult,Truncate,UserRWX,Write};\nuse std::str;\n\n#[deriving(Decodable,Show)]\nstruct Example {\n id: ~str,\n title: ~str,\n}\n\nfn read(file: &Path) -> IoResult<~str> {\n let mut file = try!(File::open(file));\n\n Ok(try!(file.read_to_str()))\n}\n\nfn write(file: &Path, contents: &str) -> IoResult<()> {\n let mut file = try!(File::open_mode(file, Truncate, Write));\n\n Ok(try!(file.write_str(contents)))\n}\n\nfn update(example: &Example) -> bool {\n print!(\"{}... \", example.id);\n\n let src_dir = Path::new(format!(\"src\/{}\", example.id));\n let out_dir = Path::new(format!(\"output\/examples\/{}\", example.id));\n\n let sources: Vec<Path> = match fs::readdir(&src_dir) {\n Err(_) => {\n println!(\"couldn't read {}\", src_dir.display());\n return false\n } Ok(contents) => contents.move_iter().filter(|path| match path.extension_str() {\n None => false,\n Some(extension) => extension == \"rs\",\n }).collect(),\n };\n\n let mut template = match read(&src_dir.join(\"input.md\")) {\n Err(_) => {\n println!(\"couldn't read input.md\");\n return false\n } Ok(contents) => format!(\"\\\\# {}\\n\\n{}\", example.title, contents),\n };\n\n \/\/ insert source code in markdown\n for source in sources.iter() {\n let filename = source.filename_str().unwrap();\n let code = match read(source) {\n Err(_) => {\n println!(\"couldn't read {}\", filename);\n return false;\n }, Ok(contents) => format!(\"``` rust\\n\/\/ {}\\n{}```\", filename, contents),\n };\n\n template = template.replace(format!(\"\\\\{{}\\\\}\", filename), code);\n }\n\n \/\/ insert program output in markdown\n for source in sources.iter() {\n let filename = source.filename_str().unwrap();\n let token = format!(\"\\\\{{}\\\\}\", source.with_extension(\"out\").filename_str().unwrap());\n\n if template.contains(token) {\n match compile_run(source) {\n Err(action) => {\n println!(\"couldn't {} {}\", action, filename);\n return false;\n }, Ok(output) => {\n let output = format!(\"```\\n$ rustc {} && .\/{}\\n{}```\",\n filename,\n filename.split('.').nth(0).unwrap(),\n output);\n\n template = template.replace(token, output);\n }\n }\n }\n }\n\n if !out_dir.exists() {\n fs::mkdir(&out_dir, UserRWX).unwrap();\n }\n\n match write(&out_dir.join(\"README.md\"), template) {\n Err(_) => {\n println!(\"couldn't write README.md\");\n false\n } Ok(_) => {\n println!(\"DONE\");\n true\n }\n }\n}\n\nfn compile_run(path: &Path) -> Result<~str, &'static str> {\n match Process::output(\"rustc\", [path.as_str().unwrap().to_owned(),\n \"-o\".to_owned(),\n \"executable\".to_owned()]) {\n Err(_) => return Err(\"compile\"),\n Ok(out) => {\n if !out.status.success() {\n return Ok(str::from_utf8_owned(out.error.as_slice().to_owned()).unwrap());\n }\n }\n }\n\n match Process::output(\".\/executable\", []) {\n Err(_) => Err(\"run\"),\n Ok(out) => {\n fs::unlink(&Path::new(\".\/executable\")).unwrap();\n let ProcessOutput { status: _, output: out, error: err } = out;\n let stdout = str::from_utf8_owned(out.as_slice().to_owned()).unwrap();\n let stderr = str::from_utf8_owned(err.as_slice().to_owned()).unwrap();\n\n Ok(vec!(stdout, stderr).concat())\n }\n }\n}\n\nfn main() {\n let src_dir = Path::new(\"src\");\n\n let examples: Vec<Example> = match read(&src_dir.join(\"order.json\")) {\n Err(err) => fail!(\"couldn't read order.json: {}\", err),\n Ok(string) => match json::from_str(string) {\n Err(err) => fail!(\"order.json is invalid json: {}\", err),\n Ok(json) => match Decodable::decode(&mut json::Decoder::new(json)) {\n Err(err) => fail!(\"error decoding order.json: {}\", err),\n Ok(data) => data,\n }\n }\n };\n\n let mut indent = false;\n let summary = examples.move_iter().map(|example| {\n if example.id.as_slice() == \"staging\" {\n indent = true;\n }\n\n let chapter = if update(&example) {\n format!(\"* [{}](examples\/{}\/README.md)\", example.title, example.id)\n } else {\n format!(\"* {}\", example.title)\n };\n\n if indent &&\n example.id.as_slice() != \"staging\" &&\n example.id.as_slice() != \"todo\" {\n format!(\" {}\", chapter)\n } else {\n chapter\n }\n }).collect::<Vec<~str>>().connect(\"\\n\");\n\n if write(&Path::new(\"output\/SUMMARY.md\"), summary).is_err() {\n fail!(\"failed to write SUMMARY.md\");\n }\n}\n<commit_msg>update.rs: use new Command API, look for source files also in directories<commit_after>extern crate serialize;\n\nuse serialize::{Decodable,json};\nuse std::io::fs::File;\nuse std::io::process::{Command,ProcessOutput};\nuse std::io::{fs,IoResult,Truncate,UserRWX,Write};\nuse std::str;\n\n#[deriving(Decodable,Show)]\nstruct Example {\n id: ~str,\n title: ~str,\n}\n\nfn read(file: &Path) -> IoResult<~str> {\n let mut file = try!(File::open(file));\n\n Ok(try!(file.read_to_str()))\n}\n\nfn write(file: &Path, contents: &str) -> IoResult<()> {\n let mut file = try!(File::open_mode(file, Truncate, Write));\n\n Ok(try!(file.write_str(contents)))\n}\n\nfn update(example: &Example) -> bool {\n print!(\"{}... \", example.id);\n\n let src_dir = Path::new(format!(\"src\/{}\", example.id));\n let out_dir = Path::new(format!(\"output\/examples\/{}\", example.id));\n\n let sources: Vec<Path> = match fs::walk_dir(&src_dir) {\n Err(_) => {\n println!(\"couldn't read {}\", src_dir.display());\n return false\n } Ok(contents) => contents.filter(|path| match path.extension_str() {\n None => false,\n Some(extension) => extension == \"rs\",\n }).collect(),\n };\n\n let mut template = match read(&src_dir.join(\"input.md\")) {\n Err(_) => {\n println!(\"couldn't read input.md\");\n return false\n } Ok(contents) => format!(\"\\\\# {}\\n\\n{}\", example.title, contents),\n };\n\n \/\/ insert source code in markdown\n for source in sources.iter() {\n let path = source.as_str().unwrap().split('\/').skip(2).collect::<Vec<&str>>().connect(\"\/\");\n let code = match read(source) {\n Err(_) => {\n println!(\"couldn't read {}\", path);\n return false;\n }, Ok(contents) => format!(\"``` rust\\n\/\/ {}\\n{}```\", path, contents),\n };\n\n template = template.replace(format!(\"\\\\{{}\\\\}\", path), code);\n }\n\n \/\/ insert program output in markdown\n for source in sources.iter() {\n let filename = source.filename_str().unwrap();\n let token = format!(\"\\\\{{}\\\\}\", source.with_extension(\"out\").filename_str().unwrap());\n\n if template.contains(token) {\n match compile_run(source) {\n Err(action) => {\n println!(\"couldn't {} {}\", action, filename);\n return false;\n }, Ok(output) => {\n let output = format!(\"```\\n$ rustc {} && .\/{}\\n{}```\",\n filename,\n filename.split('.').nth(0).unwrap(),\n output);\n\n template = template.replace(token, output);\n }\n }\n }\n }\n\n if !out_dir.exists() {\n fs::mkdir(&out_dir, UserRWX).unwrap();\n }\n\n match write(&out_dir.join(\"README.md\"), template) {\n Err(_) => {\n println!(\"couldn't write README.md\");\n false\n } Ok(_) => {\n println!(\"DONE\");\n true\n }\n }\n}\n\nfn compile_run(path: &Path) -> Result<~str, &'static str> {\n match Command::new(\"rustc\").args([path.as_str().unwrap(), \"-o\", \"executable\"]).output() {\n Err(_) => return Err(\"compile\"),\n Ok(out) => {\n if !out.status.success() {\n return Ok(str::from_utf8_owned(out.error.as_slice().to_owned()).unwrap());\n }\n }\n }\n\n match Command::new(\".\/executable\").output() {\n Err(_) => Err(\"run\"),\n Ok(out) => {\n fs::unlink(&Path::new(\".\/executable\")).unwrap();\n let ProcessOutput { status: _, output: out, error: err } = out;\n let stdout = str::from_utf8_owned(out.as_slice().to_owned()).unwrap();\n let stderr = str::from_utf8_owned(err.as_slice().to_owned()).unwrap();\n\n Ok(vec!(stdout, stderr).concat())\n }\n }\n}\n\nfn main() {\n let src_dir = Path::new(\"src\");\n\n let examples: Vec<Example> = match read(&src_dir.join(\"order.json\")) {\n Err(err) => fail!(\"couldn't read order.json: {}\", err),\n Ok(string) => match json::from_str(string) {\n Err(err) => fail!(\"order.json is invalid json: {}\", err),\n Ok(json) => match Decodable::decode(&mut json::Decoder::new(json)) {\n Err(err) => fail!(\"error decoding order.json: {}\", err),\n Ok(data) => data,\n }\n }\n };\n\n let mut indent = false;\n let summary = examples.move_iter().map(|example| {\n if example.id.as_slice() == \"staging\" {\n indent = true;\n }\n\n let chapter = if update(&example) {\n format!(\"* [{}](examples\/{}\/README.md)\", example.title, example.id)\n } else {\n format!(\"* {}\", example.title)\n };\n\n if indent &&\n example.id.as_slice() != \"staging\" &&\n example.id.as_slice() != \"todo\" {\n format!(\" {}\", chapter)\n } else {\n chapter\n }\n }).collect::<Vec<~str>>().connect(\"\\n\");\n\n if write(&Path::new(\"output\/SUMMARY.md\"), summary).is_err() {\n fail!(\"failed to write SUMMARY.md\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>create & overwriting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: We go to the next month, not to the current one<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>ioreg::register: Fix span associated with struct<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>pass by-ref to val_eq<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Swap current and next HashSets + drain next instead of creating new allocations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Incorporated debugger into new arch<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added tournament selection<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Creating simple black image<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>day 10<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Function renaming.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed string offsets<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Render a grid on Products.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove ncollide as extern crate<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Renamed status handler (hand -> statush)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>use a bit int parameter for lychrel test<commit_after><|endoftext|>"} {"text":"<commit_before>use redox::{Vec, String, ToString};\nuse redox::collections::{BTreeMap, VecDeque};\n\nuse super::dvaddr::DVAddr;\nuse super::zio;\n\n\/\/ Our implementation of the Adaptive Replacement Cache (ARC) is set up to allocate\n\/\/ its buffer on the heap rather than in a private pool thing. This makes it much\n\/\/ simpler to implement, but defers the fragmentation problem to the heap allocator.\n\/\/ We named the type `ArCache` to avoid confusion with Rust's `Arc` reference type.\npub struct ArCache {\n mru_map: BTreeMap<DVAddr, Vec<u8>>, \/\/ Most recently used cache\n mru_queue: VecDeque<DVAddr>, \/\/ Oldest paths are at the end\n mru_size: usize, \/\/ Max mru cache size in bytes\n mru_used: usize, \/\/ Used bytes in mru cache\n}\n\nimpl ArCache {\n pub fn new() -> Self {\n ArCache {\n mru_map: BTreeMap::new(),\n mru_queue: VecDeque::new(),\n mru_size: 10,\n mru_used: 0,\n }\n }\n\n pub fn read(&mut self, reader: &mut zio::Reader, dva: &DVAddr) -> Result<Vec<u8>, String> {\n if let Some(block) = self.mru_map.get(dva) {\n \/\/ Block is cached\n return Ok(block.clone());\n }\n\n \/\/ Block isn't cached, have to read it from disk\n let block = reader.read(dva.sector() as usize, dva.asize() as usize);\n\n \/\/ If necessary, make room for the block in the cache\n while self.mru_used + block.len() > self.mru_size {\n let last_dva = match self.mru_queue.pop_back()\n {\n Some(dva) => dva,\n None => return Err(\"No more ARC MRU items to free\".to_string()),\n };\n self.mru_map.remove(&last_dva);\n self.mru_used -= last_dva.asize() as usize;\n }\n\n \/\/ Add the block to the cache\n self.mru_used += block.len();\n self.mru_map.insert(*dva, block);\n self.mru_queue.push_front(*dva);\n Ok(self.mru_map.get(dva).unwrap().clone())\n }\n}\n<commit_msg>Added TODOs in ARC for stratact<commit_after>use redox::{Vec, String, ToString};\nuse redox::collections::{BTreeMap, VecDeque};\n\nuse super::dvaddr::DVAddr;\nuse super::zio;\n\n\/\/ Our implementation of the Adaptive Replacement Cache (ARC) is set up to allocate\n\/\/ its buffer on the heap rather than in a private pool thing. This makes it much\n\/\/ simpler to implement, but defers the fragmentation problem to the heap allocator.\n\/\/ We named the type `ArCache` to avoid confusion with Rust's `Arc` reference type.\npub struct ArCache {\n \/\/ MRU\n \/\/ TODO: keep track of use counts. So mru_map becomes (use_count: u64, Vec<u8>)\n mru_map: BTreeMap<DVAddr, Vec<u8>>, \/\/ Most recently used cache\n mru_queue: VecDeque<DVAddr>, \/\/ Oldest DVAddrs are at the end\n mru_size: usize, \/\/ Max mru cache size in bytes\n mru_used: usize, \/\/ Used bytes in mru cache\n\n \/\/ MFU\n \/\/ TODO: Keep track of use counts. So mfu_map becomes (use_count: u64, Vec<u8>). Reset the use\n \/\/ count every once in a while. For instance, every 1000 reads. This will probably end up being\n \/\/ a knob for the user.\n mfu_map: BTreeMap<DVAddr, Vec<u8>>, \/\/ Most frequently used cache\n \/\/ TODO: Keep track of minimum frequency.\n mfu_min_freq: u64,\n mfu_size: usize, \/\/ Max mfu cache size in bytes\n mfu_used: usize, \/\/ Used bytes in mfu cache\n}\n\nimpl ArCache {\n pub fn new() -> Self {\n ArCache {\n mru_map: BTreeMap::new(),\n mru_queue: VecDeque::new(),\n mru_size: 10,\n mru_used: 0,\n\n mfu_map: BTreeMap::new(),\n mfu_size: 10,\n mfu_used: 0,\n }\n }\n\n pub fn read(&mut self, reader: &mut zio::Reader, dva: &DVAddr) -> Result<Vec<u8>, String> {\n if let Some(block) = self.mru_map.get(dva) {\n \/\/ TODO: Keep track of MRU DVA use count. If it gets used a second time, move the block into\n \/\/ the MFU cache.\n\n \/\/ Block is cached\n return Ok(block.clone());\n }\n if let Some(block) = self.mfu_map.get(dva) {\n \/\/ TODO: keep track of DVA use count\n \/\/ Block is cached\n return Ok(block.clone());\n }\n\n \/\/ Block isn't cached, have to read it from disk\n let block = reader.read(dva.sector() as usize, dva.asize() as usize);\n self.cache_block(block);\n }\n\n fn mru_cache_block(&mut self, block: Vec<u8>) {\n \/\/ If necessary, make room for the block in the cache\n while self.mru_used + block.len() > self.mru_size {\n let last_dva = match self.mru_queue.pop_back()\n {\n Some(dva) => dva,\n None => return Err(\"No more ARC MRU items to free\".to_string()),\n };\n self.mru_map.remove(&last_dva);\n self.mru_used -= last_dva.asize() as usize;\n }\n\n \/\/ Add the block to the cache\n self.mru_used += block.len();\n self.mru_map.insert(*dva, block);\n self.mru_queue.push_front(*dva);\n Ok(self.mru_map.get(dva).unwrap().clone())\n }\n\n \/\/ TODO: mfu_cache_block\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add cfail test for custom attribute gate<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[foo] \/\/~ ERROR The attribute `foo`\nfn main() {\n\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add lib to examples.<commit_after>extern crate lcm;\n\npub mod exlcm;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat(examples): add an example for version negotiation<commit_after>\/\/ Copyright 2017 Dmitry Tantsur <divius.inside@gmail.com>\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nextern crate env_logger;\nextern crate hyper;\nextern crate openstack;\n\nconst KILO: openstack::ApiVersion = openstack::ApiVersion(2, 3);\nconst LIBERTY: openstack::ApiVersion = openstack::ApiVersion(2, 12);\nconst MITAKA: openstack::ApiVersion = openstack::ApiVersion(2, 25);\nconst NEWTON: openstack::ApiVersion = openstack::ApiVersion(2, 38);\nconst OCATA: openstack::ApiVersion = openstack::ApiVersion(2, 42);\n\n#[cfg(feature = \"compute\")]\nfn main() {\n env_logger::init().unwrap();\n\n let identity = openstack::auth::Identity::from_env()\n .expect(\"Failed to create an identity provider from the environment\");\n let mut session = openstack::Session::new(identity);\n\n let version_choice = vec![KILO, LIBERTY, MITAKA, NEWTON, OCATA];\n let version = session.negotiate_api_version::<openstack::compute::V2>(\n openstack::ApiVersionRequest::Choice(version_choice)\n ).expect(\"Unable to negotiation any Compute API version\");\n\n match version {\n KILO => println!(\"Kilo API detected\"),\n LIBERTY => println!(\"Liberty API detected\"),\n MITAKA => println!(\"Mitaka API detected\"),\n NEWTON => println!(\"Newton API detected\"),\n OCATA => println!(\"Ocata API detected\"),\n _ => unreachable!()\n }\n openstack::compute::v2::servers(&session).list().fetch()\n .expect(&format!(\"Cannot list servers with API version {}\", version));\n}\n\n#[cfg(not(feature = \"compute\"))]\nfn main() {\n panic!(\"This example cannot run with 'compute' feature disabled\");\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>examples: add simple<commit_after>extern crate rand;\nextern crate pbr;\nuse rand::Rng;\nuse pbr::ProgressBar;\nuse std::thread;\nuse std::time::Duration;\n\nfn main() {\n let count = 500;\n let mut pb = ProgressBar::new(count);\n pb.format(\"╢▌▌░╟\");\n for _ in 0..count {\n pb.inc();\n let n = rand::thread_rng().gen_range(0, 100);\n thread::sleep(Duration::from_millis(n));\n }\n pb.finish_println(\"done!\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add solution to problem 33<commit_after>#![feature(zero_one)]\n#[macro_use] extern crate libeuler;\n\nuse std::collections::HashSet;\nuse std::num::One;\nuse std::ops::Mul;\n\/\/\/ The fraction 49\/98 is a curious fraction, as an inexperienced mathematician in attempting to\n\/\/\/ simplify it may incorrectly believe that 49\/98 = 4\/8, which is correct, is obtained by\n\/\/\/ cancelling the 9s.\n\/\/\/\n\/\/\/ We shall consider fractions like, 30\/50 = 3\/5, to be trivial examples.\n\/\/\/\n\/\/\/ There are exactly four non-trivial examples of this type of fraction, less than one in value,\n\/\/\/ and containing two digits in the numerator and denominator.\n\/\/\/\n\/\/\/ If the product of these four fractions is given in its lowest common terms, find the value of\n\/\/\/ the denominator.\nfn main() {\n solutions! {\n sol naive {\n let mut prod = Fraction::one();\n\n for i in 10..99 {\n for j in (i+1)..99 {\n let fr = Fraction::new(i, j);\n if fr.is_curious() {\n prod = prod * fr;\n }\n }\n }\n\n prod\n }\n }\n}\n\n#[derive(Debug, Clone)]\nstruct Fraction {\n numer: i64,\n denom: i64\n}\n\nimpl Fraction {\n fn new(n: i64, d: i64) -> Fraction {\n Fraction {\n numer: n,\n denom: d\n }\n }\n\n fn gcd(a: i64, b: i64) -> i64 {\n if a == 0 || b == 0 {\n 0\n } else if a == b {\n a\n } else if a > b {\n Fraction::gcd(a - b, b)\n } else {\n Fraction::gcd(a, b - a)\n }\n }\n\n fn reduced(&self) -> Fraction {\n let gcd = Fraction::gcd(self.numer, self.denom);\n\n if gcd == 0 {\n Fraction::new(0, 0)\n } else {\n Fraction::new(\n self.numer \/ gcd,\n self.denom \/ gcd)\n }\n }\n\n fn digits(mut a: i64) -> HashSet<u8> {\n let mut r = HashSet::new();\n while a > 0 {\n r.insert((a % 10) as u8);\n a \/= 10;\n }\n\n r\n }\n\n fn common_digits(&self) -> Vec<u8> {\n Fraction::digits(self.numer).intersection(&Fraction::digits(self.denom)).map(|&a| a).collect()\n }\n\n fn is_curious(&self) -> bool {\n for &digit in self.common_digits().iter() {\n if digit != 0 {\n if &Fraction::new(self.numer.remove_digit(digit), self.denom.remove_digit(digit)) == self {\n return true;\n }\n }\n }\n\n false\n }\n}\n\nimpl PartialEq for Fraction {\n fn eq(&self, other: &Fraction) -> bool {\n let a = self.reduced();\n let b = other.reduced();\n\n a.numer == b.numer && a.denom == b.denom\n }\n}\n\nimpl One for Fraction {\n fn one() -> Fraction {\n Fraction::new(1, 1)\n }\n}\n\nimpl Mul for Fraction {\n type Output = Fraction;\n\n fn mul(self, rhs: Fraction) -> Fraction {\n Fraction::new(self.numer * rhs.numer, self.denom * rhs.denom).reduced()\n }\n}\n\ntrait RemoveDigit {\n fn remove_digit(&self, digit: u8) -> Self;\n}\n\nimpl RemoveDigit for i64 {\n fn remove_digit(&self, digit: u8) -> i64 {\n format!(\"{}\", self).chars()\n .filter(|c| format!(\"{}\", c) != format!(\"{}\", digit))\n .collect::<String>()\n .parse().ok().unwrap_or(0)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add solution to problem 35<commit_after>#[macro_use] extern crate libeuler;\nextern crate num;\n\nuse libeuler::SieveOfAtkin;\nuse std::collections::HashSet;\nuse num::traits::PrimInt;\n\n\/\/\/ The number, 197, is called a circular prime because all rotations of the digits: 197, 971, and\n\/\/\/ 719, are themselves prime.\n\/\/\/\n\/\/\/ There are thirteen such primes below 100: 2, 3, 5, 7, 11, 13, 17, 31, 37, 71, 73, 79, and 97.\n\/\/\/\n\/\/\/ How many circular primes are there below one million?\nfn main() {\n solutions! {\n sol naive {\n let sieve = SieveOfAtkin::new(10_000_000);\n let mut vals = HashSet::new();\n\n for i in 1..1_000_000 {\n if vals.contains(&i) {\n continue\n }\n\n let rots = rotations(i);\n let valid = rots.iter().all(|&v| sieve.is_prime(v as u64));\n if valid {\n for v in rots.iter() {\n vals.insert(v.clone());\n }\n }\n }\n\n vals.len()\n }\n }\n}\n\nfn rotations(i: i64) -> Vec<i64> {\n let digits = (i as f64).log10().floor() as u32 + 1;\n (0..digits).map(|d| (10.pow(d), 10.pow(digits - d))).map(|(d, dig)| {\n let chop = i % d;\n let bottom = i \/ d;\n bottom + chop * dig\n }).collect()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Shift opcodes - Probably wrong<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>More documentation, simplify API<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added count_substr<commit_after>\/\/ Implements http:\/\/rosettacode.org\/wiki\/Count_occurrences_of_a_substring\n\/\/ Author : Rahul Sharma\n\/\/ Github : github.com\/creativcoder\n\nfn count_sub_string(src:&str,target:&str) -> usize {\n let mut buff = src.to_string();\n if buff.contains(target) {\n buff = buff.replace(target,\"\"); \n }\n\n (src.len() - buff.len())\/target.len()\n}\n\nfn main() {\n let text = \"this is three of the four\";\n let sub_str = \"th\";\n println!(\"{:?}\",count_sub_string(text,sub_str));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Heavily modified string parsing funcion<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>* just be treated as column * to simplify the query struct<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactored Endpoint objects away from relay..<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Mutate widgets inside a callback closure<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Start defining integration tests for trait impls.<commit_after>#[macro_use(owned_tree)]\nextern crate entmut;\n\nuse ::entmut::Nav;\nuse ::entmut::owned::Tree;\nuse std::borrow::Borrow;\n\n\/\/ This will define macros for generalized tests of Nav and Editor impls.\n\/\/ #[cfg(test)]\n\/\/ mod template;\n\n#[test]\n#[allow(unused_variables)]\nfn nav_instantiation() {\n let t = owned_tree![\"a\"];\n let v = t.view();\n}\n\n#[test]\nfn nav_preserves_leaf_topology() {\n let t = owned_tree![\"a\"];\n let v = t.view();\n assert![v.at_leaf()];\n assert![v.at_root()];\n assert_eq![0, v.child_count()];\n}\n\n\/\/ #[test]\n\/\/ fn nav_preserves_leaf_data() {\n\/\/ let t = owned_tree![\"a\"];\n\/\/ let t_data_view = &t.data;\n\/\/ let v = t.view();\n\/\/ let data: &str = v.borrow();\n\/\/ assert_eq![\"a\", data];\n\/\/ assert_eq![t_data_view, data];\n\/\/ }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added library tests.<commit_after>extern crate exempi;\n\nuse exempi::*;\nuse exempi::xmpstring::XmpString;\nuse exempi::xmp::Xmp;\n\n#[test]\nfn libary_tests() {\n assert!(exempi::init());\n\n assert!(exempi::get_error() == 0);\n\n \/\/ namespace registration tests.\n let mut prefix = XmpString::new();\n assert!(!prefix.is_null());\n\n assert!(exempi::register_namespace(\"http:\/\/rust.figuiere.net\/ns\/rust\/\",\n \"rust\", &mut prefix));\n assert!(exempi::get_error() == 0);\n assert!(prefix.to_str() != \"\");\n let mut prefix2 = XmpString::new();\n assert!(exempi::namespace_prefix(\"http:\/\/rust.figuiere.net\/ns\/rust\/\",\n &mut prefix2));\n assert!(exempi::get_error() == 0);\n assert!(prefix2 == prefix);\n\n let mut ns = XmpString::new();\n assert!(exempi::prefix_namespace(prefix.to_str(), &mut ns));\n assert!(exempi::get_error() == 0);\n assert!(ns.to_str() == \"http:\/\/rust.figuiere.net\/ns\/rust\/\");\n\n let mut xmpblock = Xmp::new();\n assert!(!xmpblock.is_null());\n\n assert!(!xmpblock.has_property(\"http:\/\/rust.figuiere.net\/ns\/rust\/\", \"test\"));\n assert!(xmpblock.set_property(\"http:\/\/rust.figuiere.net\/ns\/rust\/\", \"test\",\n \"foobar\", 0));\n assert!(xmpblock.has_property(\"http:\/\/rust.figuiere.net\/ns\/rust\/\", \"test\"));\n let mut value = XmpString::new();\n let mut optionbits: u32 = 0;\n assert!(xmpblock.get_property(\"http:\/\/rust.figuiere.net\/ns\/rust\/\", \"test\",\n &mut value, &mut optionbits));\n assert!(value.to_str() == \"foobar\");\n assert!(optionbits == 0);\n\n let mut buffer = XmpString::new();\n \/\/ XXX we should use the constants... that we need to define.\n assert!(xmpblock.serialize(&mut buffer, 0x50, 0));\n println!(\"{}\", buffer.to_str());\n\n exempi::terminate();\n}\n<|endoftext|>"} {"text":"<commit_before>\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Arcterus <arcterus@mail.com>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\n#![macro_escape]\n\nextern crate libc;\n\n#[macro_export]\nmacro_rules! show_error(\n ($($args:expr),+) => ({\n pipe_write!(&mut ::std::io::stderr(), \"{}: error: \", ::NAME);\n pipe_writeln!(&mut ::std::io::stderr(), $($args),+);\n })\n)\n\n#[macro_export]\nmacro_rules! show_warning(\n ($($args:expr),+) => ({\n pipe_write!(&mut ::std::io::stderr(), \"{}: warning: \", ::NAME);\n pipe_writeln!(&mut ::std::io::stderr(), $($args),+);\n })\n)\n\n#[macro_export]\nmacro_rules! show_info(\n ($($args:expr),+) => ({\n pipe_write!(&mut ::std::io::stderr(), \"{}: \", ::NAME);\n pipe_writeln!(&mut ::std::io::stderr(), $($args),+);\n })\n)\n\n#[macro_export]\nmacro_rules! eprint(\n ($($args:expr),+) => (pipe_write!(&mut ::std::io::stderr(), $($args),+))\n)\n\n#[macro_export]\nmacro_rules! eprintln(\n ($($args:expr),+) => (pipe_writeln!(&mut ::std::io::stderr(), $($args),+))\n)\n\n#[macro_export]\nmacro_rules! crash(\n ($exitcode:expr, $($args:expr),+) => ({\n show_error!($($args),+);\n unsafe { ::util::libc::exit($exitcode as ::util::libc::c_int); }\n })\n)\n\n#[macro_export]\nmacro_rules! exit(\n ($exitcode:expr) => ({\n unsafe { ::util::libc::exit($exitcode); }\n })\n)\n\n#[macro_export]\nmacro_rules! crash_if_err(\n ($exitcode:expr, $exp:expr) => (\n match $exp {\n Ok(m) => m,\n Err(f) => crash!($exitcode, \"{}\", f.to_string())\n }\n )\n)\n\n#[macro_export]\nmacro_rules! return_if_err(\n ($exitcode:expr, $exp:expr) => (\n match $exp {\n Ok(m) => m,\n Err(f) => {\n show_error!(\"{}\", f);\n return $exitcode;\n }\n }\n )\n)\n\n\/\/ XXX: should the pipe_* macros return an Err just to show the write failed?\n\n#[macro_export]\nmacro_rules! pipe_print(\n ($($args:expr),+) => (\n match write!(&mut ::std::io::stdout() as &mut Writer, $($args),+) {\n Ok(_) => true,\n Err(f) => {\n if f.kind == ::std::io::BrokenPipe {\n false\n } else {\n fail!(\"{}\", f)\n }\n }\n }\n )\n)\n\n#[macro_export]\nmacro_rules! pipe_println(\n ($($args:expr),+) => (\n match writeln!(&mut ::std::io::stdout() as &mut Writer, $($args),+) {\n Ok(_) => true,\n Err(f) => {\n if f.kind == ::std::io::BrokenPipe {\n false\n } else {\n fail!(\"{}\", f)\n }\n }\n }\n )\n)\n\n#[macro_export]\nmacro_rules! pipe_write(\n ($fd:expr, $($args:expr),+) => (\n match write!($fd, $($args),+) {\n Ok(_) => true,\n Err(f) => {\n if f.kind == ::std::io::BrokenPipe {\n false\n } else {\n fail!(\"{}\", f)\n }\n }\n }\n )\n)\n\n#[macro_export]\nmacro_rules! pipe_writeln(\n ($fd:expr, $($args:expr),+) => (\n match write!($fd, $($args),+) {\n Ok(_) => true,\n Err(f) => {\n if f.kind == ::std::io::BrokenPipe {\n false\n } else {\n fail!(\"{}\", f)\n }\n }\n }\n )\n)\n\n#[macro_export]\nmacro_rules! safe_write(\n ($fd:expr, $($args:expr),+) => (\n match write!($fd, $($args),+) {\n Ok(_) => {}\n Err(f) => fail!(f.to_string())\n }\n )\n)\n\n#[macro_export]\nmacro_rules! safe_writeln(\n ($fd:expr, $($args:expr),+) => (\n match writeln!($fd, $($args),+) {\n Ok(_) => {}\n Err(f) => fail!(f.to_string())\n }\n )\n)\n\n#[macro_export]\nmacro_rules! safe_unwrap(\n ($exp:expr) => (\n match $exp {\n Ok(m) => m,\n Err(f) => crash!(1, \"{}\", f.to_string())\n }\n )\n)\n<commit_msg>common\/util: make pipe_writeln! use writeln! instead of write!<commit_after>\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Arcterus <arcterus@mail.com>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\n#![macro_escape]\n\nextern crate libc;\n\n#[macro_export]\nmacro_rules! show_error(\n ($($args:expr),+) => ({\n pipe_write!(&mut ::std::io::stderr(), \"{}: error: \", ::NAME);\n pipe_writeln!(&mut ::std::io::stderr(), $($args),+);\n })\n)\n\n#[macro_export]\nmacro_rules! show_warning(\n ($($args:expr),+) => ({\n pipe_write!(&mut ::std::io::stderr(), \"{}: warning: \", ::NAME);\n pipe_writeln!(&mut ::std::io::stderr(), $($args),+);\n })\n)\n\n#[macro_export]\nmacro_rules! show_info(\n ($($args:expr),+) => ({\n pipe_write!(&mut ::std::io::stderr(), \"{}: \", ::NAME);\n pipe_writeln!(&mut ::std::io::stderr(), $($args),+);\n })\n)\n\n#[macro_export]\nmacro_rules! eprint(\n ($($args:expr),+) => (pipe_write!(&mut ::std::io::stderr(), $($args),+))\n)\n\n#[macro_export]\nmacro_rules! eprintln(\n ($($args:expr),+) => (pipe_writeln!(&mut ::std::io::stderr(), $($args),+))\n)\n\n#[macro_export]\nmacro_rules! crash(\n ($exitcode:expr, $($args:expr),+) => ({\n show_error!($($args),+);\n unsafe { ::util::libc::exit($exitcode as ::util::libc::c_int); }\n })\n)\n\n#[macro_export]\nmacro_rules! exit(\n ($exitcode:expr) => ({\n unsafe { ::util::libc::exit($exitcode); }\n })\n)\n\n#[macro_export]\nmacro_rules! crash_if_err(\n ($exitcode:expr, $exp:expr) => (\n match $exp {\n Ok(m) => m,\n Err(f) => crash!($exitcode, \"{}\", f.to_string())\n }\n )\n)\n\n#[macro_export]\nmacro_rules! return_if_err(\n ($exitcode:expr, $exp:expr) => (\n match $exp {\n Ok(m) => m,\n Err(f) => {\n show_error!(\"{}\", f);\n return $exitcode;\n }\n }\n )\n)\n\n\/\/ XXX: should the pipe_* macros return an Err just to show the write failed?\n\n#[macro_export]\nmacro_rules! pipe_print(\n ($($args:expr),+) => (\n match write!(&mut ::std::io::stdout() as &mut Writer, $($args),+) {\n Ok(_) => true,\n Err(f) => {\n if f.kind == ::std::io::BrokenPipe {\n false\n } else {\n fail!(\"{}\", f)\n }\n }\n }\n )\n)\n\n#[macro_export]\nmacro_rules! pipe_println(\n ($($args:expr),+) => (\n match writeln!(&mut ::std::io::stdout() as &mut Writer, $($args),+) {\n Ok(_) => true,\n Err(f) => {\n if f.kind == ::std::io::BrokenPipe {\n false\n } else {\n fail!(\"{}\", f)\n }\n }\n }\n )\n)\n\n#[macro_export]\nmacro_rules! pipe_write(\n ($fd:expr, $($args:expr),+) => (\n match write!($fd, $($args),+) {\n Ok(_) => true,\n Err(f) => {\n if f.kind == ::std::io::BrokenPipe {\n false\n } else {\n fail!(\"{}\", f)\n }\n }\n }\n )\n)\n\n#[macro_export]\nmacro_rules! pipe_writeln(\n ($fd:expr, $($args:expr),+) => (\n match writeln!($fd, $($args),+) {\n Ok(_) => true,\n Err(f) => {\n if f.kind == ::std::io::BrokenPipe {\n false\n } else {\n fail!(\"{}\", f)\n }\n }\n }\n )\n)\n\n#[macro_export]\nmacro_rules! safe_write(\n ($fd:expr, $($args:expr),+) => (\n match write!($fd, $($args),+) {\n Ok(_) => {}\n Err(f) => fail!(f.to_string())\n }\n )\n)\n\n#[macro_export]\nmacro_rules! safe_writeln(\n ($fd:expr, $($args:expr),+) => (\n match writeln!($fd, $($args),+) {\n Ok(_) => {}\n Err(f) => fail!(f.to_string())\n }\n )\n)\n\n#[macro_export]\nmacro_rules! safe_unwrap(\n ($exp:expr) => (\n match $exp {\n Ok(m) => m,\n Err(f) => crash!(1, \"{}\", f.to_string())\n }\n )\n)\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added dataflow example<commit_after>extern crate rand;\nextern crate time;\nextern crate timely;\nextern crate differential_dataflow;\n\nuse std::hash::Hash;\n\nuse timely::dataflow::*;\nuse timely::dataflow::operators::*;\nuse timely::progress::timestamp::RootTimestamp;\n\nuse rand::{Rng, SeedableRng, StdRng};\n\nuse differential_dataflow::Collection;\nuse differential_dataflow::operators::*;\nuse differential_dataflow::operators::join::JoinUnsigned;\nuse differential_dataflow::operators::group::GroupUnsigned;\nuse differential_dataflow::collection::LeastUpperBound;\n\ntype Node = usize;\ntype Edge = (Node, Node);\n\nfn main() {\n\n let users: usize = std::env::args().nth(1).unwrap().parse().unwrap();\n let topics: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n let inspect: bool = std::env::args().nth(4).unwrap() == \"inspect\";\n\n \/\/ define a new computational scope, in which to run BFS\n timely::execute_from_args(std::env::args().skip(5), move |computation| {\n let start = time::precise_time_s();\n\n \/\/ define BFS dataflow; return handles to roots and edges inputs\n let (mut tweets, mut queries, probe) = computation.scoped(|scope| {\n\n \/\/ entries corresponding to (@username, @mention, #topic), but a (u32, u32, u32) instead.\n let (tweet_input, tweets) = scope.new_input(); \n let tweets = Collection::new(tweets);\n\n \/\/ determine connected components based on mentions.\n let labels = connected_components(&tweets.map(|(u,m,_)| (u,m)));\n\n \/\/ produce pairs (label, topic) for each topic.\n let label_topics = tweets.map(|(u,_,t)| (u,t))\n .join(&labels)\n .map(|(_,t,l)| (l,t));\n\n \/\/ group by (l,t) and emit a count for each.\n let counts = label_topics.map(|x| (x,()))\n .group(|_,s,t| {\n t.push((s.next().unwrap().1, 1))\n });\n\n let k = 5;\n \/\/ retain the k largest counts. negate first to exploit ordering.\n let topk = counts.map(|((l,t), c)| (l, (-c, t)))\n .group(move |_,s,t| {\n t.extend(s.take(k).map(|(&(_,t),_)| (t,1)));\n });\n\n \/\/ entries corresponding to a @username, but a u32 instead.\n let (query_input, queries) = scope.new_input(); \n let queries = Collection::new(queries);\n\n let label_query = queries.map(|q| (q,()))\n .join(&labels)\n .map(|(q, _, l)| (l,q));\n\n let mut query_topics = label_query.join(&topk);\n\n if !inspect {\n query_topics = query_topics.filter(|_| false);\n }\n\n let probe = query_topics.consolidate_by(|&(_,q,_)| q)\n .inspect(|&((l,q,t),w)| println!(\"\\t(query: {},\\tlabel: {},\\ttopic:{}\\t(weight: {})\", q, l, t, w))\n .probe();\n\n (tweet_input, query_input, probe.0)\n });\n\n let tweet_seed: &[_] = &[0, 1, 2, computation.index()];\n let mut tweet_rng1: StdRng = SeedableRng::from_seed(tweet_seed); \/\/ rng for edge additions\n let mut tweet_rng2: StdRng = SeedableRng::from_seed(tweet_seed); \/\/ rng for edge deletions\n\n let query_seed: &[_] = &[1, 2, 3, computation.index()];\n let mut query_rng1: StdRng = SeedableRng::from_seed(query_seed); \/\/ rng for edge additions\n let mut query_rng2: StdRng = SeedableRng::from_seed(query_seed); \/\/ rng for edge deletions\n\n println!(\"performing AppealingDataflow with {} users, {} topics:\", users, topics);\n\n for _ in 0 .. users\/computation.peers() {\n tweets.send(((tweet_rng1.gen_range(0, users), \n tweet_rng1.gen_range(0, users),\n tweet_rng1.gen_range(0, topics)),1));\n } \n\n if computation.index() == 0 {\n queries.send((query_rng1.gen_range(0, users),1));\n }\n\n println!(\"loaded; elapsed: {}s\", time::precise_time_s() - start);\n\n tweets.advance_to(1);\n queries.advance_to(1);\n while probe.le(&RootTimestamp::new(0)) { computation.step(); }\n\n println!(\"stable; elapsed: {}s\", time::precise_time_s() - start);\n\n if batch > 0 {\n let mut changes = Vec::new();\n for wave in 0.. {\n let mut batch = batch \/ computation.peers();\n if computation.index() < batch % computation.peers() { \n batch += 1; \n }\n\n for _ in 0..batch {\n changes.push(((tweet_rng1.gen_range(0, users), \n tweet_rng1.gen_range(0, users),\n tweet_rng1.gen_range(0, topics)), 1));\n changes.push(((tweet_rng2.gen_range(0, users), \n tweet_rng2.gen_range(0, users),\n tweet_rng2.gen_range(0, topics)),-1));\n }\n\n\n let start = time::precise_time_s();\n let round = *tweets.epoch();\n for change in changes.drain(..) {\n tweets.send(change);\n }\n if computation.index() == 0 {\n queries.send((query_rng1.gen_range(0, users), 1));\n queries.send((query_rng2.gen_range(0, users),-1));\n }\n\n tweets.advance_to(round + 1);\n queries.advance_to(round + 1);\n\n while probe.le(&RootTimestamp::new(round)) { computation.step(); }\n\n if computation.index() == 0 {\n println!(\"wave {}: avg {}\", wave, (time::precise_time_s() - start) \/ (batch as f64));\n }\n }\n }\n });\n}\n\nfn connected_components<G: Scope>(edges: &Collection<G, Edge>) -> Collection<G, (Node, Node)>\n where G::Timestamp: LeastUpperBound+Hash {\n\n \/\/ each edge (x,y) means that we need at least a label for the min of x and y.\n let nodes = edges.map_in_place(|pair| {\n let min = std::cmp::min(pair.0, pair.1);\n *pair = (min, min);\n })\n .consolidate_by(|x| x.0);\n\n \/\/ each edge should exist in both directions.\n let edges = edges.map_in_place(|x| ::std::mem::swap(&mut x.0, &mut x.1))\n .concat(&edges);\n\n \/\/ don't actually use these labels, just grab the type\n nodes.filter(|_| false)\n .iterate(|inner| {\n let edges = edges.enter(&inner.scope());\n let nodes = nodes.enter_at(&inner.scope(), |r| 256 * (64 - (r.0).0.leading_zeros() as u64));\n\n inner.join_map_u(&edges, |_k,l,d| (*d,*l))\n .concat(&nodes)\n .group_u(|_, s, t| { t.push((*s.peek().unwrap().0, 1)); } )\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add die-hard example.<commit_after>extern crate suppositions;\nuse std::cmp::min;\nuse suppositions::*;\nuse suppositions::generators::*;\n\n\n\/\/ Run this with:\n\/\/ ```\n\/\/ cargo run --example die-hard\n\/\/ ```\n\/\/ You should see output of the form:\n\/\/ ```\n\/\/ thread 'main' panicked at 'Predicate failed for argument\n\/\/ Ok([FillBigJug, BigToSmall, EmptySmallJug, BigToSmall, FillBigJug, BigToSmall]);\n\/\/ check returned Ok(Err(State { big: 4, small: 3 }))', src\/properties.rs:56:24\n\/\/ ```\n\n\n#[derive(Debug, Clone)]\npub enum Op {\n FillSmallJug,\n FillBigJug,\n EmptySmallJug,\n EmptyBigJug,\n SmallToBig,\n BigToSmall,\n}\n\n#[derive(Debug, Default, Clone)]\npub struct State {\n big: usize,\n small: usize,\n}\n\nimpl State {\n fn apply(&mut self, op: &Op) {\n match op {\n &Op::FillSmallJug => {\n self.small = 3;\n }\n &Op::FillBigJug => self.big = 5,\n &Op::EmptySmallJug => self.small = 0,\n &Op::EmptyBigJug => self.big = 0,\n &Op::SmallToBig => {\n let old = self.clone();\n self.big = min(old.big + self.small, 5);\n self.small -= self.big - old.big\n }\n\n &Op::BigToSmall => {\n let old = self.clone();\n self.small = min(old.big + self.small, 3);\n self.big -= self.small - old.small\n }\n }\n }\n\n fn assert_invariants(&self) {\n assert!(self.big <= 5);\n assert!(self.small <= 3);\n }\n fn finished(&self) -> bool {\n self.big == 4\n }\n}\n\nfn ops() -> Box<Generator<Item = Op>> {\n let g = one_of(consts(Op::FillSmallJug))\n .or(consts(Op::FillBigJug))\n .or(consts(Op::EmptySmallJug))\n .or(consts(Op::EmptyBigJug))\n .or(consts(Op::SmallToBig))\n .or(consts(Op::BigToSmall));\n Box::new(g)\n}\n\nfn main() {\n property(vecs(ops()).mean_length(1000)).check(|xs| {\n let mut sts = Vec::new();\n let mut st = State::default();\n for o in xs.iter() {\n st.apply(o);\n sts.push((o.clone(), st.clone()));\n st.assert_invariants();\n if st.finished() {\n return Err(st);\n }\n }\n return Ok(());\n });\n\n panic!(\"No solution found\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #34460 - dsprenkels:issue-33455, r=alexcrichton<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse foo.bar; \/\/~ ERROR expected one of `::`, `;`, or `as`, found `.`\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test with implemented bits of issue #570.<commit_after>\/\/ Issue #570\n\nconst lsl : int = 1 << 2;\nconst add : int = 1 + 2;\nconst addf : float = 1.0f + 2.0f;\nconst not : int = !0;\nconst notb : bool = !true;\nconst neg : int = -(1);\n\nfn main() {\n assert(lsl == 4);\n assert(add == 3);\n assert(addf == 3.0f);\n assert(not == -1);\n assert(notb == false);\n assert(neg == -1);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Metrics\n\nuse std::sync::Arc;\n\nmod counter;\nmod gauge;\nmod meter;\n\npub use self::counter::{Counter, CounterSnapshot, StdCounter};\npub use self::gauge::{Gauge, GaugeSnapshot, StdGauge};\npub use self::meter::{Meter, MeterSnapshot, StdMeter};\n\n\/\/\/ a Metric\nuse histogram::Histogram;\n\n#[allow(missing_docs)]\npub enum Metric {\n Counter(Arc<Counter>),\n Gauge(Arc<Gauge>),\n Meter(Arc<Meter>),\n Histogram(Histogram),\n}\n<commit_msg>Derive Clone for Metric<commit_after>\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Metrics\n\nuse std::sync::Arc;\n\nmod counter;\nmod gauge;\nmod meter;\n\npub use self::counter::{Counter, CounterSnapshot, StdCounter};\npub use self::gauge::{Gauge, GaugeSnapshot, StdGauge};\npub use self::meter::{Meter, MeterSnapshot, StdMeter};\n\n\/\/\/ a Metric\nuse histogram::Histogram;\n\n#[allow(missing_docs)]\n#[derive(Clone)]\npub enum Metric {\n Counter(Arc<Counter>),\n Gauge(Arc<Gauge>),\n Meter(Arc<Meter>),\n Histogram(Histogram),\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>generic float added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make generate_variants() more flexible<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::fs;\nuse std::path::{PathBuf, Path};\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse build::{Build, Compiler};\n\npub fn linkcheck(build: &Build, stage: u32, host: &str) {\n println!(\"Linkcheck stage{} ({})\", stage, host);\n let compiler = Compiler::new(stage, host);\n build.run(build.tool_cmd(&compiler, \"linkchecker\")\n .arg(build.out.join(host).join(\"doc\")));\n}\n\npub fn cargotest(build: &Build, stage: u32, host: &str) {\n\n let ref compiler = Compiler::new(stage, host);\n\n \/\/ Configure PATH to find the right rustc. NB. we have to use PATH\n \/\/ and not RUSTC because the Cargo test suite has tests that will\n \/\/ fail if rustc is not spelled `rustc`.\n let path = build.sysroot(compiler).join(\"bin\");\n let old_path = ::std::env::var(\"PATH\").expect(\"\");\n let sep = if cfg!(windows) { \";\" } else {\":\" };\n let ref newpath = format!(\"{}{}{}\", path.display(), sep, old_path);\n\n \/\/ Note that this is a short, cryptic, and not scoped directory name. This\n \/\/ is currently to minimize the length of path on Windows where we otherwise\n \/\/ quickly run into path name limit constraints.\n let out_dir = build.out.join(\"ct\");\n t!(fs::create_dir_all(&out_dir));\n\n build.run(build.tool_cmd(compiler, \"cargotest\")\n .env(\"PATH\", newpath)\n .arg(&build.cargo)\n .arg(&out_dir));\n}\n\npub fn tidy(build: &Build, stage: u32, host: &str) {\n println!(\"tidy check stage{} ({})\", stage, host);\n let compiler = Compiler::new(stage, host);\n build.run(build.tool_cmd(&compiler, \"tidy\")\n .arg(build.src.join(\"src\")));\n}\n\nfn testdir(build: &Build, host: &str) -> PathBuf {\n build.out.join(host).join(\"test\")\n}\n\npub fn compiletest(build: &Build,\n compiler: &Compiler,\n target: &str,\n mode: &str,\n suite: &str) {\n let mut cmd = build.tool_cmd(compiler, \"compiletest\");\n\n cmd.arg(\"--compile-lib-path\").arg(build.rustc_libdir(compiler));\n cmd.arg(\"--run-lib-path\").arg(build.sysroot_libdir(compiler, target));\n cmd.arg(\"--rustc-path\").arg(build.compiler_path(compiler));\n cmd.arg(\"--rustdoc-path\").arg(build.rustdoc(compiler));\n cmd.arg(\"--src-base\").arg(build.src.join(\"src\/test\").join(suite));\n cmd.arg(\"--aux-base\").arg(build.src.join(\"src\/test\/auxiliary\"));\n cmd.arg(\"--build-base\").arg(testdir(build, compiler.host).join(suite));\n cmd.arg(\"--stage-id\").arg(format!(\"stage{}-{}\", compiler.stage, target));\n cmd.arg(\"--mode\").arg(mode);\n cmd.arg(\"--target\").arg(target);\n cmd.arg(\"--host\").arg(compiler.host);\n cmd.arg(\"--llvm-filecheck\").arg(build.llvm_filecheck(&build.config.build));\n\n let linkflag = format!(\"-Lnative={}\", build.test_helpers_out(target).display());\n cmd.arg(\"--host-rustcflags\").arg(\"-Crpath\");\n cmd.arg(\"--target-rustcflags\").arg(format!(\"-Crpath {}\", linkflag));\n\n \/\/ FIXME: needs android support\n cmd.arg(\"--android-cross-path\").arg(\"\");\n\n \/\/ FIXME: CFG_PYTHON should probably be detected more robustly elsewhere\n let python_default = \"python\";\n cmd.arg(\"--docck-python\").arg(python_default);\n\n if build.config.build.ends_with(\"apple-darwin\") {\n \/\/ Force \/usr\/bin\/python on OSX for LLDB tests because we're loading the\n \/\/ LLDB plugin's compiled module which only works with the system python\n \/\/ (namely not Homebrew-installed python)\n cmd.arg(\"--lldb-python\").arg(\"\/usr\/bin\/python\");\n } else {\n cmd.arg(\"--lldb-python\").arg(python_default);\n }\n\n if let Some(ref vers) = build.gdb_version {\n cmd.arg(\"--gdb-version\").arg(vers);\n }\n if let Some(ref vers) = build.lldb_version {\n cmd.arg(\"--lldb-version\").arg(vers);\n }\n if let Some(ref dir) = build.lldb_python_dir {\n cmd.arg(\"--lldb-python-dir\").arg(dir);\n }\n\n cmd.args(&build.flags.args);\n\n if build.config.verbose || build.flags.verbose {\n cmd.arg(\"--verbose\");\n }\n\n if suite == \"run-make\" {\n let llvm_config = build.llvm_config(target);\n let llvm_components = output(Command::new(&llvm_config).arg(\"--components\"));\n let llvm_cxxflags = output(Command::new(&llvm_config).arg(\"--cxxflags\"));\n cmd.arg(\"--cc\").arg(build.cc(target))\n .arg(\"--cxx\").arg(build.cxx(target))\n .arg(\"--cflags\").arg(build.cflags(target).join(\" \"))\n .arg(\"--llvm-components\").arg(llvm_components.trim())\n .arg(\"--llvm-cxxflags\").arg(llvm_cxxflags.trim());\n } else {\n cmd.arg(\"--cc\").arg(\"\")\n .arg(\"--cxx\").arg(\"\")\n .arg(\"--cflags\").arg(\"\")\n .arg(\"--llvm-components\").arg(\"\")\n .arg(\"--llvm-cxxflags\").arg(\"\");\n }\n\n \/\/ Running a C compiler on MSVC requires a few env vars to be set, to be\n \/\/ sure to set them here.\n if target.contains(\"msvc\") {\n for &(ref k, ref v) in build.cc[target].0.env() {\n if k != \"PATH\" {\n cmd.env(k, v);\n }\n }\n }\n\n build.run(&mut cmd);\n}\n\npub fn docs(build: &Build, compiler: &Compiler) {\n let mut stack = vec![build.src.join(\"src\/doc\")];\n\n while let Some(p) = stack.pop() {\n if p.is_dir() {\n stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));\n continue\n }\n\n if p.extension().and_then(|s| s.to_str()) != Some(\"md\") {\n continue\n }\n\n println!(\"doc tests for: {}\", p.display());\n markdown_test(build, compiler, &p);\n }\n}\n\npub fn error_index(build: &Build, compiler: &Compiler) {\n println!(\"Testing error-index stage{}\", compiler.stage);\n\n let output = testdir(build, compiler.host).join(\"error-index.md\");\n build.run(build.tool_cmd(compiler, \"error_index_generator\")\n .arg(\"markdown\")\n .arg(&output)\n .env(\"CFG_BUILD\", &build.config.build));\n\n markdown_test(build, compiler, &output);\n}\n\nfn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {\n let mut cmd = Command::new(build.rustdoc(compiler));\n build.add_rustc_lib_path(compiler, &mut cmd);\n cmd.arg(\"--test\");\n cmd.arg(markdown);\n cmd.arg(\"--test-args\").arg(build.flags.args.join(\" \"));\n build.run(&mut cmd);\n}\n<commit_msg>remove unused aux-base argument<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::fs;\nuse std::path::{PathBuf, Path};\nuse std::process::Command;\n\nuse build_helper::output;\n\nuse build::{Build, Compiler};\n\npub fn linkcheck(build: &Build, stage: u32, host: &str) {\n println!(\"Linkcheck stage{} ({})\", stage, host);\n let compiler = Compiler::new(stage, host);\n build.run(build.tool_cmd(&compiler, \"linkchecker\")\n .arg(build.out.join(host).join(\"doc\")));\n}\n\npub fn cargotest(build: &Build, stage: u32, host: &str) {\n\n let ref compiler = Compiler::new(stage, host);\n\n \/\/ Configure PATH to find the right rustc. NB. we have to use PATH\n \/\/ and not RUSTC because the Cargo test suite has tests that will\n \/\/ fail if rustc is not spelled `rustc`.\n let path = build.sysroot(compiler).join(\"bin\");\n let old_path = ::std::env::var(\"PATH\").expect(\"\");\n let sep = if cfg!(windows) { \";\" } else {\":\" };\n let ref newpath = format!(\"{}{}{}\", path.display(), sep, old_path);\n\n \/\/ Note that this is a short, cryptic, and not scoped directory name. This\n \/\/ is currently to minimize the length of path on Windows where we otherwise\n \/\/ quickly run into path name limit constraints.\n let out_dir = build.out.join(\"ct\");\n t!(fs::create_dir_all(&out_dir));\n\n build.run(build.tool_cmd(compiler, \"cargotest\")\n .env(\"PATH\", newpath)\n .arg(&build.cargo)\n .arg(&out_dir));\n}\n\npub fn tidy(build: &Build, stage: u32, host: &str) {\n println!(\"tidy check stage{} ({})\", stage, host);\n let compiler = Compiler::new(stage, host);\n build.run(build.tool_cmd(&compiler, \"tidy\")\n .arg(build.src.join(\"src\")));\n}\n\nfn testdir(build: &Build, host: &str) -> PathBuf {\n build.out.join(host).join(\"test\")\n}\n\npub fn compiletest(build: &Build,\n compiler: &Compiler,\n target: &str,\n mode: &str,\n suite: &str) {\n let mut cmd = build.tool_cmd(compiler, \"compiletest\");\n\n cmd.arg(\"--compile-lib-path\").arg(build.rustc_libdir(compiler));\n cmd.arg(\"--run-lib-path\").arg(build.sysroot_libdir(compiler, target));\n cmd.arg(\"--rustc-path\").arg(build.compiler_path(compiler));\n cmd.arg(\"--rustdoc-path\").arg(build.rustdoc(compiler));\n cmd.arg(\"--src-base\").arg(build.src.join(\"src\/test\").join(suite));\n cmd.arg(\"--build-base\").arg(testdir(build, compiler.host).join(suite));\n cmd.arg(\"--stage-id\").arg(format!(\"stage{}-{}\", compiler.stage, target));\n cmd.arg(\"--mode\").arg(mode);\n cmd.arg(\"--target\").arg(target);\n cmd.arg(\"--host\").arg(compiler.host);\n cmd.arg(\"--llvm-filecheck\").arg(build.llvm_filecheck(&build.config.build));\n\n let linkflag = format!(\"-Lnative={}\", build.test_helpers_out(target).display());\n cmd.arg(\"--host-rustcflags\").arg(\"-Crpath\");\n cmd.arg(\"--target-rustcflags\").arg(format!(\"-Crpath {}\", linkflag));\n\n \/\/ FIXME: needs android support\n cmd.arg(\"--android-cross-path\").arg(\"\");\n\n \/\/ FIXME: CFG_PYTHON should probably be detected more robustly elsewhere\n let python_default = \"python\";\n cmd.arg(\"--docck-python\").arg(python_default);\n\n if build.config.build.ends_with(\"apple-darwin\") {\n \/\/ Force \/usr\/bin\/python on OSX for LLDB tests because we're loading the\n \/\/ LLDB plugin's compiled module which only works with the system python\n \/\/ (namely not Homebrew-installed python)\n cmd.arg(\"--lldb-python\").arg(\"\/usr\/bin\/python\");\n } else {\n cmd.arg(\"--lldb-python\").arg(python_default);\n }\n\n if let Some(ref vers) = build.gdb_version {\n cmd.arg(\"--gdb-version\").arg(vers);\n }\n if let Some(ref vers) = build.lldb_version {\n cmd.arg(\"--lldb-version\").arg(vers);\n }\n if let Some(ref dir) = build.lldb_python_dir {\n cmd.arg(\"--lldb-python-dir\").arg(dir);\n }\n\n cmd.args(&build.flags.args);\n\n if build.config.verbose || build.flags.verbose {\n cmd.arg(\"--verbose\");\n }\n\n if suite == \"run-make\" {\n let llvm_config = build.llvm_config(target);\n let llvm_components = output(Command::new(&llvm_config).arg(\"--components\"));\n let llvm_cxxflags = output(Command::new(&llvm_config).arg(\"--cxxflags\"));\n cmd.arg(\"--cc\").arg(build.cc(target))\n .arg(\"--cxx\").arg(build.cxx(target))\n .arg(\"--cflags\").arg(build.cflags(target).join(\" \"))\n .arg(\"--llvm-components\").arg(llvm_components.trim())\n .arg(\"--llvm-cxxflags\").arg(llvm_cxxflags.trim());\n } else {\n cmd.arg(\"--cc\").arg(\"\")\n .arg(\"--cxx\").arg(\"\")\n .arg(\"--cflags\").arg(\"\")\n .arg(\"--llvm-components\").arg(\"\")\n .arg(\"--llvm-cxxflags\").arg(\"\");\n }\n\n \/\/ Running a C compiler on MSVC requires a few env vars to be set, to be\n \/\/ sure to set them here.\n if target.contains(\"msvc\") {\n for &(ref k, ref v) in build.cc[target].0.env() {\n if k != \"PATH\" {\n cmd.env(k, v);\n }\n }\n }\n\n build.run(&mut cmd);\n}\n\npub fn docs(build: &Build, compiler: &Compiler) {\n let mut stack = vec![build.src.join(\"src\/doc\")];\n\n while let Some(p) = stack.pop() {\n if p.is_dir() {\n stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));\n continue\n }\n\n if p.extension().and_then(|s| s.to_str()) != Some(\"md\") {\n continue\n }\n\n println!(\"doc tests for: {}\", p.display());\n markdown_test(build, compiler, &p);\n }\n}\n\npub fn error_index(build: &Build, compiler: &Compiler) {\n println!(\"Testing error-index stage{}\", compiler.stage);\n\n let output = testdir(build, compiler.host).join(\"error-index.md\");\n build.run(build.tool_cmd(compiler, \"error_index_generator\")\n .arg(\"markdown\")\n .arg(&output)\n .env(\"CFG_BUILD\", &build.config.build));\n\n markdown_test(build, compiler, &output);\n}\n\nfn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {\n let mut cmd = Command::new(build.rustdoc(compiler));\n build.add_rustc_lib_path(compiler, &mut cmd);\n cmd.arg(\"--test\");\n cmd.arg(markdown);\n cmd.arg(\"--test-args\").arg(build.flags.args.join(\" \"));\n build.run(&mut cmd);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Scheduler API changed allowing to peek next action<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>:art: Refactor the return value for the method of fetch_completed_tasks<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Graphics pipeline descriptor.\n\nuse {state as s};\nuse super::EntryPoint;\nuse super::input_assembler::{AttributeDesc, InputAssemblerDesc, VertexBufferDesc};\nuse super::output_merger::{ColorInfo, DepthStencilDesc};\n\n\/\/ Vulkan:\n\/\/ - SpecializationInfo not provided per shader\n\/\/ - TODO: infer rasterization discard from shaders?\n\/\/\n\/\/ D3D12:\n\/\/ - rootSignature specified outside\n\/\/ - logicOp can be set for each RTV\n\/\/ - streamOutput not included\n\/\/ - IA: semantic name and index extracted from shader reflection\n\n\/\/\/ A complete set of shaders to build a graphics pipeline.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\npub struct GraphicsShaderSet {\n \/\/\/\n pub vertex_shader: EntryPoint,\n \/\/\/\n pub hull_shader: Option<EntryPoint>,\n \/\/\/\n pub domain_shader: Option<EntryPoint>,\n \/\/\/\n pub geometry_shader: Option<EntryPoint>,\n \/\/\/\n pub pixel_shader: Option<EntryPoint>,\n}\n\n\/\/\/\npub struct GraphicsPipelineDesc {\n \/\/\/ Rasterizer setup\n pub rasterizer: Rasterizer,\n \/\/\/ Shader entry points\n pub shader_entries: GraphicsShaderSet,\n\n \/\/\/ Vertex buffers (IA)\n pub vertex_buffers: Vec<VertexBufferDesc>,\n \/\/\/ Vertex attributes (IA)\n pub attributes: Vec<AttributeDesc>,\n \/\/\/\n pub input_assembler: InputAssemblerDesc,\n\n \/\/\/\n pub blending: Vec<BlendDesc>,\n \/\/\/ Depth stencil (DSV)\n pub depth_stencil: Option<DepthStencilDesc>,\n}\n\n\/\/\/\n#[derive(Copy, Clone, Debug)]\n#[cfg_attr(feature=\"serialize\", derive(Serialize, Deserialize))]\npub struct DepthBias {\n \/\/\/\n pub const_factor: f32,\n \/\/\/\n pub clamp: f32,\n \/\/\/\n pub slope_factor: f32,\n}\n\n\/\/\/ Rasterization state.\n#[derive(Clone, Debug)]\n#[cfg_attr(feature=\"serialize\", derive(Serialize, Deserialize))]\npub struct Rasterizer {\n \/\/\/ How to rasterize this primitive.\n pub polgyon_mode: s::RasterMethod,\n \/\/\/ Which face should be culled.\n pub cull_mode: s::CullFace,\n \/\/\/ Which vertex winding is considered to be the front face for culling.\n pub front_face: s::FrontFace,\n \/\/\/\n pub depth_clamping: bool,\n \/\/\/\n pub depth_bias: Option<DepthBias>,\n \/\/\/\n pub conservative_rasterization: bool,\n}\n\n\/\/\/\npub enum BlendTargets {\n \/\/\/\n Single(ColorInfo),\n \/\/\/\n Independent(Vec<ColorInfo>),\n}\n\n\/\/\/\npub struct BlendDesc {\n \/\/\/\n pub alpha_coverage: bool,\n \/\/\/\n pub logic_op: Option<LogicOp>,\n \/\/\/\n pub blend_targets: BlendTargets,\n}\n\n\/\/\/\npub enum LogicOp {\n \/\/\/\n Clear,\n \/\/\/\n And,\n \/\/\/\n AndReverse,\n \/\/\/\n AndInverted,\n \/\/\/\n Copy,\n \/\/\/\n CopyInverted,\n \/\/\/\n NoOp,\n \/\/\/\n Xor,\n \/\/\/\n Nor,\n \/\/\/\n Or,\n \/\/\/\n OrReverse,\n \/\/\/\n OrInverted,\n \/\/\/\n Equivalent,\n \/\/\/\n Invert,\n \/\/\/\n Nand,\n \/\/\/\n Set,\n}\n<commit_msg>[ll] core: Add feature flag to discard primitives before the rasterizer stage (pso)<commit_after>\/\/ Copyright 2017 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Graphics pipeline descriptor.\n\nuse {state as s};\nuse super::EntryPoint;\nuse super::input_assembler::{AttributeDesc, InputAssemblerDesc, VertexBufferDesc};\nuse super::output_merger::{ColorInfo, DepthStencilDesc};\n\n\/\/ Vulkan:\n\/\/ - SpecializationInfo not provided per shader\n\/\/\n\/\/ D3D12:\n\/\/ - rootSignature specified outside\n\/\/ - logicOp can be set for each RTV\n\/\/ - streamOutput not included\n\/\/ - IA: semantic name and index extracted from shader reflection\n\n\/\/\/ A complete set of shaders to build a graphics pipeline.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\npub struct GraphicsShaderSet {\n \/\/\/\n pub vertex_shader: EntryPoint,\n \/\/\/\n pub hull_shader: Option<EntryPoint>,\n \/\/\/\n pub domain_shader: Option<EntryPoint>,\n \/\/\/\n pub geometry_shader: Option<EntryPoint>,\n \/\/\/\n pub pixel_shader: Option<EntryPoint>,\n}\n\n\/\/\/\npub struct GraphicsPipelineDesc {\n \/\/\/ Rasterizer setup\n pub rasterizer: Rasterizer,\n \/\/\/ Shader entry points\n pub shader_entries: GraphicsShaderSet,\n\n \/\/\/ Vertex buffers (IA)\n pub vertex_buffers: Vec<VertexBufferDesc>,\n \/\/\/ Vertex attributes (IA)\n pub attributes: Vec<AttributeDesc>,\n \/\/\/\n pub input_assembler: InputAssemblerDesc,\n\n \/\/\/\n pub blending: Vec<BlendDesc>,\n \/\/\/ Depth stencil (DSV)\n pub depth_stencil: Option<DepthStencilDesc>,\n}\n\n\/\/\/\n#[derive(Copy, Clone, Debug)]\n#[cfg_attr(feature=\"serialize\", derive(Serialize, Deserialize))]\npub struct DepthBias {\n \/\/\/\n pub const_factor: f32,\n \/\/\/\n pub clamp: f32,\n \/\/\/\n pub slope_factor: f32,\n}\n\n\/\/\/ Rasterization state.\n#[derive(Clone, Debug)]\n#[cfg_attr(feature=\"serialize\", derive(Serialize, Deserialize))]\npub struct Rasterizer {\n \/\/\/ How to rasterize this primitive.\n pub polgyon_mode: s::RasterMethod,\n \/\/\/ Which face should be culled.\n pub cull_mode: s::CullFace,\n \/\/\/ Which vertex winding is considered to be the front face for culling.\n pub front_face: s::FrontFace,\n \/\/\/\n pub depth_clamping: bool,\n \/\/\/\n pub depth_bias: Option<DepthBias>,\n \/\/\/\n pub conservative_rasterization: bool,\n \/\/\/ Discard primitives before the rasterizer.\n pub rasterizer_discard: bool,\n}\n\n\/\/\/\npub enum BlendTargets {\n \/\/\/ All attachments share the same blend state.\n Single(ColorInfo),\n \/\/\/ All attachments have independt blend states.\n Independent(Vec<ColorInfo>),\n}\n\n\/\/\/\npub struct BlendDesc {\n \/\/\/\n pub alpha_coverage: bool,\n \/\/\/\n pub logic_op: Option<LogicOp>,\n \/\/\/\n pub blend_targets: BlendTargets,\n}\n\n\/\/\/\npub enum LogicOp {\n \/\/\/\n Clear,\n \/\/\/\n And,\n \/\/\/\n AndReverse,\n \/\/\/\n AndInverted,\n \/\/\/\n Copy,\n \/\/\/\n CopyInverted,\n \/\/\/\n NoOp,\n \/\/\/\n Xor,\n \/\/\/\n Nor,\n \/\/\/\n Or,\n \/\/\/\n OrReverse,\n \/\/\/\n OrInverted,\n \/\/\/\n Equivalent,\n \/\/\/\n Invert,\n \/\/\/\n Nand,\n \/\/\/\n Set,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>start hacking together a backtrace hack module.<commit_after>use libc::{c_char, c_int, c_void};\nuse std::mem;\nuse std::ptr;\n\nextern {\n fn backtrace(buffer: *mut *mut c_void, size: c_int) -> c_int;\n fn backtrace_symbols(buffer: *mut *const c_void, size: c_int) -> *mut *mut c_char;\n fn backtrace_symbols_fd(buffer: *mut *const c_void, size: c_int, fd: c_int);\n}\n\npub fn stack_height() -> usize {\n let local: u32 = 0;\n let height = &local as *const _ as usize;\n super::main_height() - height\n}\n\npub fn backtrace_return_addresses() -> Vec<*const c_void> {\n let height = stack_height();\n let word_size = mem::size_of::<usize>();\n let rounded_height = (height + (word_size - 1)) \/ word_size;\n assert!(rounded_height >= 0);\n assert!(rounded_height <= ::std::i32::MAX as usize);\n let mut buffer = vec![ptr::null(); rounded_height];\n let rounded_height = rounded_height as i32;\n unsafe {\n let filled_size = backtrace(buffer.as_mut_ptr() as *mut *mut c_void,\n rounded_height);\n assert!(filled_size >= 0);\n buffer.set_len(filled_size as usize);\n }\n buffer\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ FIXME - This test gives different results on different machines.\nfn invoke<'a, F>(x: &'a i32, f: F) -> &'a i32\nwhere F: FnOnce(&'a i32, &i32) -> &'a i32\n{\n let y = 22;\n f(x, &y)\n}\n\nfn foo<'a>(x: &'a i32) { \/\/~ ERROR E0495\n invoke(&x, |a, b| if a > b { a } else { b });\n}\n\nfn main() {}\n<commit_msg>tweak comments in E0495.rs<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that we give the generic E0495 when one of the free regions is\n\/\/ bound in a closure (rather than suggesting a change to the signature\n\/\/ of the closure, which is not specified in `foo` but rather in `invoke`).\n\n\/\/ FIXME - This might be better as a UI test, but the finer details\n\/\/ of the error seem to vary on different machines.\nfn invoke<'a, F>(x: &'a i32, f: F) -> &'a i32\nwhere F: FnOnce(&'a i32, &i32) -> &'a i32\n{\n let y = 22;\n f(x, &y)\n}\n\nfn foo<'a>(x: &'a i32) { \/\/~ ERROR E0495\n invoke(&x, |a, b| if a > b { a } else { b });\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\/\/! * All unstable lang features have tests to ensure they are actually unstable\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(Debug, PartialEq, Clone)]\npub enum Status {\n Stable,\n Removed,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n Status::Removed => \"removed\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\n#[derive(Debug, Clone)]\npub struct Feature {\n pub level: Status,\n pub since: String,\n pub has_gate_test: bool,\n pub tracking_issue: Option<u32>,\n}\n\nimpl Feature {\n fn check_match(&self, other: &Feature)-> Result<(), Vec<&'static str>> {\n let mut mismatches = Vec::new();\n if self.level != other.level {\n mismatches.push(\"stability level\");\n }\n if self.level == Status::Stable || other.level == Status::Stable {\n \/\/ As long as a feature is unstable, the since field tracks\n \/\/ when the given part of the feature has been implemented.\n \/\/ Mismatches are tolerable as features evolve and functionality\n \/\/ gets added.\n \/\/ Once a feature is stable, the since field tracks the first version\n \/\/ it was part of the stable distribution, and mismatches are disallowed.\n if self.since != other.since {\n mismatches.push(\"since\");\n }\n }\n if self.tracking_issue != other.tracking_issue {\n mismatches.push(\"tracking issue\");\n }\n if mismatches.is_empty() {\n Ok(())\n } else {\n Err(mismatches)\n }\n }\n}\n\npub type Features = HashMap<String, Feature>;\n\npub fn check(path: &Path, bad: &mut bool, quiet: bool) {\n let mut features = collect_lang_features(path);\n assert!(!features.is_empty());\n\n let lib_features = get_and_check_lib_features(path, bad, &features);\n assert!(!lib_features.is_empty());\n\n let mut contents = String::new();\n\n super::walk_many(&[&path.join(\"test\/compile-fail\"),\n &path.join(\"test\/compile-fail-fulldeps\"),\n &path.join(\"test\/parse-fail\"),],\n &mut |path| super::filter_dirs(path),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n let filen_underscore = filename.replace(\"-\",\"_\").replace(\".rs\",\"\");\n let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features);\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n\n let gate_test_str = \"gate-test-\";\n\n if !line.contains(gate_test_str) {\n continue;\n }\n\n let feature_name = match line.find(gate_test_str) {\n Some(i) => {\n &line[i+gate_test_str.len()..line[i+1..].find(' ').unwrap_or(line.len())]\n },\n None => continue,\n };\n match features.get_mut(feature_name) {\n Some(f) => {\n if filename_is_gate_test {\n err(&format!(\"The file is already marked as gate test \\\n through its name, no need for a \\\n 'gate-test-{}' comment\",\n feature_name));\n }\n f.has_gate_test = true;\n }\n None => {\n err(&format!(\"gate-test test found referencing a nonexistent feature '{}'\",\n feature_name));\n }\n }\n }\n });\n\n \/\/ Only check the number of lang features.\n \/\/ Obligatory testing for library features is dumb.\n let gate_untested = features.iter()\n .filter(|&(_, f)| f.level == Status::Unstable)\n .filter(|&(_, f)| !f.has_gate_test)\n .collect::<Vec<_>>();\n\n for &(name, _) in gate_untested.iter() {\n println!(\"Expected a gate test for the feature '{}'.\", name);\n println!(\"Hint: create a file named 'feature-gate-{}.rs' in the compile-fail\\\n \\n test suite, with its failures due to missing usage of\\\n \\n #![feature({})].\", name, name);\n println!(\"Hint: If you already have such a test and don't want to rename it,\\\n \\n you can also add a \/\/ gate-test-{} line to the test file.\",\n name);\n }\n\n if gate_untested.len() > 0 {\n tidy_error!(bad, \"Found {} features without a gate test.\", gate_untested.len());\n }\n\n if *bad {\n return;\n }\n if quiet {\n println!(\"* {} features\", features.len());\n return;\n }\n\n let mut lines = Vec::new();\n for (name, feature) in features.iter() {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {\n if filen_underscore.starts_with(\"feature_gate\") {\n for (n, f) in features.iter_mut() {\n if filen_underscore == format!(\"feature_gate_{}\", n) {\n f.has_gate_test = true;\n return true;\n }\n }\n }\n return false;\n}\n\npub fn collect_lang_features(base_src_path: &Path) -> Features {\n let mut contents = String::new();\n let path = base_src_path.join(\"libsyntax\/feature_gate.rs\");\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Removed,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n let issue_str = parts.next().unwrap().trim();\n let tracking_issue = if issue_str.starts_with(\"None\") {\n None\n } else {\n let s = issue_str.split(\"(\").nth(1).unwrap().split(\")\").nth(0).unwrap();\n Some(s.parse().unwrap())\n };\n Some((name.to_owned(),\n Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n }))\n })\n .collect()\n}\n\npub fn collect_lib_features(base_src_path: &Path) -> Features {\n let mut lib_features = Features::new();\n\n \/\/ This library feature is defined in the `compiler_builtins` crate, which\n \/\/ has been moved out-of-tree. Now it can no longer be auto-discovered by\n \/\/ `tidy`, because we need to filter out its (submodule) directory. Manually\n \/\/ add it to the set of known library features so we can still generate docs.\n lib_features.insert(\"compiler_builtins_lib\".to_owned(), Feature {\n level: Status::Unstable,\n since: \"\".to_owned(),\n has_gate_test: false,\n tracking_issue: None,\n });\n\n map_lib_features(base_src_path,\n &mut |res, _, _| {\n match res {\n Ok((name, feature)) => {\n if lib_features.get(name).is_some() {\n return;\n }\n lib_features.insert(name.to_owned(), feature);\n },\n Err(_) => (),\n }\n });\n lib_features\n}\n\nfn get_and_check_lib_features(base_src_path: &Path,\n bad: &mut bool,\n lang_features: &Features) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, file, line| {\n match res {\n Ok((name, f)) => {\n let mut check_features = |f: &Feature, list: &Features, display: &str| {\n if let Some(ref s) = list.get(name) {\n if let Err(m) = (&f).check_match(s) {\n tidy_error!(bad,\n \"{}:{}: mismatches to {} in: {:?}\",\n file.display(),\n line,\n display,\n &m);\n }\n }\n };\n check_features(&f, &lang_features, \"corresponding lang feature\");\n check_features(&f, &lib_features, \"previous\");\n lib_features.insert(name.to_owned(), f);\n },\n Err(msg) => {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), line, msg);\n },\n }\n\n });\n lib_features\n}\n\nfn map_lib_features(base_src_path: &Path,\n mf: &mut FnMut(Result<(&str, Feature), &str>, &Path, usize)) {\n let mut contents = String::new();\n super::walk(base_src_path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n let mut becoming_feature: Option<(String, Feature)> = None;\n for (i, line) in contents.lines().enumerate() {\n macro_rules! err {\n ($msg:expr) => {{\n mf(Err($msg), file, i + 1);\n continue;\n }};\n };\n if let Some((ref name, ref mut f)) = becoming_feature {\n if f.tracking_issue.is_none() {\n f.tracking_issue = find_attr_val(line, \"issue\")\n .map(|s| s.parse().unwrap());\n }\n if line.ends_with(\"]\") {\n mf(Ok((name, f.clone())), file, i + 1);\n } else if !line.ends_with(\",\") && !line.ends_with(\"\\\\\") {\n \/\/ We need to bail here because we might have missed the\n \/\/ end of a stability attribute above because the \"]\"\n \/\/ might not have been at the end of the line.\n \/\/ We could then get into the very unfortunate situation that\n \/\/ we continue parsing the file assuming the current stability\n \/\/ attribute has not ended, and ignoring possible feature\n \/\/ attributes in the process.\n err!(\"malformed stability attribute\");\n } else {\n continue;\n }\n }\n becoming_feature = None;\n if line.contains(\"rustc_const_unstable(\") && line.contains(\"#[\") {\n \/\/ const fn features are handled specially\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => err!(\"malformed stability attribute\"),\n };\n let feature = Feature {\n level: Status::Unstable,\n since: \"None\".to_owned(),\n has_gate_test: false,\n \/\/ Whether there is a common tracking issue\n \/\/ for these feature gates remains an open question\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/24111#issuecomment-340283184\n \/\/ But we take 24111 otherwise they will be shown as\n \/\/ \"internal to the compiler\" which they are not.\n tracking_issue: Some(24111),\n };\n mf(Ok((feature_name, feature)), file, i + 1);\n continue;\n }\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => err!(\"malformed stability attribute\"),\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err!(\"malformed stability attribute\");\n }\n None => \"None\",\n };\n let tracking_issue = find_attr_val(line, \"issue\").map(|s| s.parse().unwrap());\n\n let feature = Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n };\n if line.contains(\"]\") {\n mf(Ok((feature_name, feature)), file, i + 1);\n } else {\n becoming_feature = Some((feature_name.to_owned(), feature));\n }\n }\n });\n}\n<commit_msg>Also support macro generated atomic types<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Tidy check to ensure that unstable features are all in order\n\/\/!\n\/\/! This check will ensure properties like:\n\/\/!\n\/\/! * All stability attributes look reasonably well formed\n\/\/! * The set of library features is disjoint from the set of language features\n\/\/! * Library features have at most one stability level\n\/\/! * Library features have at most one `since` value\n\/\/! * All unstable lang features have tests to ensure they are actually unstable\n\nuse std::collections::HashMap;\nuse std::fmt;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse std::path::Path;\n\n#[derive(Debug, PartialEq, Clone)]\npub enum Status {\n Stable,\n Removed,\n Unstable,\n}\n\nimpl fmt::Display for Status {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let as_str = match *self {\n Status::Stable => \"stable\",\n Status::Unstable => \"unstable\",\n Status::Removed => \"removed\",\n };\n fmt::Display::fmt(as_str, f)\n }\n}\n\n#[derive(Debug, Clone)]\npub struct Feature {\n pub level: Status,\n pub since: String,\n pub has_gate_test: bool,\n pub tracking_issue: Option<u32>,\n}\n\nimpl Feature {\n fn check_match(&self, other: &Feature)-> Result<(), Vec<&'static str>> {\n let mut mismatches = Vec::new();\n if self.level != other.level {\n mismatches.push(\"stability level\");\n }\n if self.level == Status::Stable || other.level == Status::Stable {\n \/\/ As long as a feature is unstable, the since field tracks\n \/\/ when the given part of the feature has been implemented.\n \/\/ Mismatches are tolerable as features evolve and functionality\n \/\/ gets added.\n \/\/ Once a feature is stable, the since field tracks the first version\n \/\/ it was part of the stable distribution, and mismatches are disallowed.\n if self.since != other.since {\n mismatches.push(\"since\");\n }\n }\n if self.tracking_issue != other.tracking_issue {\n mismatches.push(\"tracking issue\");\n }\n if mismatches.is_empty() {\n Ok(())\n } else {\n Err(mismatches)\n }\n }\n}\n\npub type Features = HashMap<String, Feature>;\n\npub fn check(path: &Path, bad: &mut bool, quiet: bool) {\n let mut features = collect_lang_features(path);\n assert!(!features.is_empty());\n\n let lib_features = get_and_check_lib_features(path, bad, &features);\n assert!(!lib_features.is_empty());\n\n let mut contents = String::new();\n\n super::walk_many(&[&path.join(\"test\/compile-fail\"),\n &path.join(\"test\/compile-fail-fulldeps\"),\n &path.join(\"test\/parse-fail\"),],\n &mut |path| super::filter_dirs(path),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n let filen_underscore = filename.replace(\"-\",\"_\").replace(\".rs\",\"\");\n let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features);\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n for (i, line) in contents.lines().enumerate() {\n let mut err = |msg: &str| {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), i + 1, msg);\n };\n\n let gate_test_str = \"gate-test-\";\n\n if !line.contains(gate_test_str) {\n continue;\n }\n\n let feature_name = match line.find(gate_test_str) {\n Some(i) => {\n &line[i+gate_test_str.len()..line[i+1..].find(' ').unwrap_or(line.len())]\n },\n None => continue,\n };\n match features.get_mut(feature_name) {\n Some(f) => {\n if filename_is_gate_test {\n err(&format!(\"The file is already marked as gate test \\\n through its name, no need for a \\\n 'gate-test-{}' comment\",\n feature_name));\n }\n f.has_gate_test = true;\n }\n None => {\n err(&format!(\"gate-test test found referencing a nonexistent feature '{}'\",\n feature_name));\n }\n }\n }\n });\n\n \/\/ Only check the number of lang features.\n \/\/ Obligatory testing for library features is dumb.\n let gate_untested = features.iter()\n .filter(|&(_, f)| f.level == Status::Unstable)\n .filter(|&(_, f)| !f.has_gate_test)\n .collect::<Vec<_>>();\n\n for &(name, _) in gate_untested.iter() {\n println!(\"Expected a gate test for the feature '{}'.\", name);\n println!(\"Hint: create a file named 'feature-gate-{}.rs' in the compile-fail\\\n \\n test suite, with its failures due to missing usage of\\\n \\n #![feature({})].\", name, name);\n println!(\"Hint: If you already have such a test and don't want to rename it,\\\n \\n you can also add a \/\/ gate-test-{} line to the test file.\",\n name);\n }\n\n if gate_untested.len() > 0 {\n tidy_error!(bad, \"Found {} features without a gate test.\", gate_untested.len());\n }\n\n if *bad {\n return;\n }\n if quiet {\n println!(\"* {} features\", features.len());\n return;\n }\n\n let mut lines = Vec::new();\n for (name, feature) in features.iter() {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lang\",\n feature.level,\n feature.since));\n }\n for (name, feature) in lib_features {\n lines.push(format!(\"{:<32} {:<8} {:<12} {:<8}\",\n name,\n \"lib\",\n feature.level,\n feature.since));\n }\n\n lines.sort();\n for line in lines {\n println!(\"* {}\", line);\n }\n}\n\nfn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {\n line.find(attr)\n .and_then(|i| line[i..].find('\"').map(|j| i + j + 1))\n .and_then(|i| line[i..].find('\"').map(|j| (i, i + j)))\n .map(|(i, j)| &line[i..j])\n}\n\nfn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {\n if filen_underscore.starts_with(\"feature_gate\") {\n for (n, f) in features.iter_mut() {\n if filen_underscore == format!(\"feature_gate_{}\", n) {\n f.has_gate_test = true;\n return true;\n }\n }\n }\n return false;\n}\n\npub fn collect_lang_features(base_src_path: &Path) -> Features {\n let mut contents = String::new();\n let path = base_src_path.join(\"libsyntax\/feature_gate.rs\");\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n contents.lines()\n .filter_map(|line| {\n let mut parts = line.trim().split(\",\");\n let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {\n Some(\"active\") => Status::Unstable,\n Some(\"removed\") => Status::Removed,\n Some(\"accepted\") => Status::Stable,\n _ => return None,\n };\n let name = parts.next().unwrap().trim();\n let since = parts.next().unwrap().trim().trim_matches('\"');\n let issue_str = parts.next().unwrap().trim();\n let tracking_issue = if issue_str.starts_with(\"None\") {\n None\n } else {\n let s = issue_str.split(\"(\").nth(1).unwrap().split(\")\").nth(0).unwrap();\n Some(s.parse().unwrap())\n };\n Some((name.to_owned(),\n Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n }))\n })\n .collect()\n}\n\npub fn collect_lib_features(base_src_path: &Path) -> Features {\n let mut lib_features = Features::new();\n\n \/\/ This library feature is defined in the `compiler_builtins` crate, which\n \/\/ has been moved out-of-tree. Now it can no longer be auto-discovered by\n \/\/ `tidy`, because we need to filter out its (submodule) directory. Manually\n \/\/ add it to the set of known library features so we can still generate docs.\n lib_features.insert(\"compiler_builtins_lib\".to_owned(), Feature {\n level: Status::Unstable,\n since: \"\".to_owned(),\n has_gate_test: false,\n tracking_issue: None,\n });\n\n map_lib_features(base_src_path,\n &mut |res, _, _| {\n match res {\n Ok((name, feature)) => {\n if lib_features.get(name).is_some() {\n return;\n }\n lib_features.insert(name.to_owned(), feature);\n },\n Err(_) => (),\n }\n });\n lib_features\n}\n\nfn get_and_check_lib_features(base_src_path: &Path,\n bad: &mut bool,\n lang_features: &Features) -> Features {\n let mut lib_features = Features::new();\n map_lib_features(base_src_path,\n &mut |res, file, line| {\n match res {\n Ok((name, f)) => {\n let mut check_features = |f: &Feature, list: &Features, display: &str| {\n if let Some(ref s) = list.get(name) {\n if let Err(m) = (&f).check_match(s) {\n tidy_error!(bad,\n \"{}:{}: mismatches to {} in: {:?}\",\n file.display(),\n line,\n display,\n &m);\n }\n }\n };\n check_features(&f, &lang_features, \"corresponding lang feature\");\n check_features(&f, &lib_features, \"previous\");\n lib_features.insert(name.to_owned(), f);\n },\n Err(msg) => {\n tidy_error!(bad, \"{}:{}: {}\", file.display(), line, msg);\n },\n }\n\n });\n lib_features\n}\n\nfn map_lib_features(base_src_path: &Path,\n mf: &mut FnMut(Result<(&str, Feature), &str>, &Path, usize)) {\n let mut contents = String::new();\n super::walk(base_src_path,\n &mut |path| super::filter_dirs(path) || path.ends_with(\"src\/test\"),\n &mut |file| {\n let filename = file.file_name().unwrap().to_string_lossy();\n if !filename.ends_with(\".rs\") || filename == \"features.rs\" ||\n filename == \"diagnostic_list.rs\" {\n return;\n }\n\n contents.truncate(0);\n t!(t!(File::open(&file), &file).read_to_string(&mut contents));\n\n let mut becoming_feature: Option<(String, Feature)> = None;\n for (i, line) in contents.lines().enumerate() {\n macro_rules! err {\n ($msg:expr) => {{\n mf(Err($msg), file, i + 1);\n continue;\n }};\n };\n if let Some((ref name, ref mut f)) = becoming_feature {\n if f.tracking_issue.is_none() {\n f.tracking_issue = find_attr_val(line, \"issue\")\n .map(|s| s.parse().unwrap());\n }\n if line.ends_with(\"]\") {\n mf(Ok((name, f.clone())), file, i + 1);\n } else if !line.ends_with(\",\") && !line.ends_with(\"\\\\\") {\n \/\/ We need to bail here because we might have missed the\n \/\/ end of a stability attribute above because the \"]\"\n \/\/ might not have been at the end of the line.\n \/\/ We could then get into the very unfortunate situation that\n \/\/ we continue parsing the file assuming the current stability\n \/\/ attribute has not ended, and ignoring possible feature\n \/\/ attributes in the process.\n err!(\"malformed stability attribute\");\n } else {\n continue;\n }\n }\n becoming_feature = None;\n if line.contains(\"rustc_const_unstable(\") {\n \/\/ const fn features are handled specially\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => err!(\"malformed stability attribute\"),\n };\n let feature = Feature {\n level: Status::Unstable,\n since: \"None\".to_owned(),\n has_gate_test: false,\n \/\/ Whether there is a common tracking issue\n \/\/ for these feature gates remains an open question\n \/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/24111#issuecomment-340283184\n \/\/ But we take 24111 otherwise they will be shown as\n \/\/ \"internal to the compiler\" which they are not.\n tracking_issue: Some(24111),\n };\n mf(Ok((feature_name, feature)), file, i + 1);\n continue;\n }\n let level = if line.contains(\"[unstable(\") {\n Status::Unstable\n } else if line.contains(\"[stable(\") {\n Status::Stable\n } else {\n continue;\n };\n let feature_name = match find_attr_val(line, \"feature\") {\n Some(name) => name,\n None => err!(\"malformed stability attribute\"),\n };\n let since = match find_attr_val(line, \"since\") {\n Some(name) => name,\n None if level == Status::Stable => {\n err!(\"malformed stability attribute\");\n }\n None => \"None\",\n };\n let tracking_issue = find_attr_val(line, \"issue\").map(|s| s.parse().unwrap());\n\n let feature = Feature {\n level,\n since: since.to_owned(),\n has_gate_test: false,\n tracking_issue,\n };\n if line.contains(\"]\") {\n mf(Ok((feature_name, feature)), file, i + 1);\n } else {\n becoming_feature = Some((feature_name.to_owned(), feature));\n }\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Removed a wrong 'Expect a semicolon'.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix test with negative numbers<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added parts on traits implementation and default method implemenation in traits.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse std::path::PathBuf;\nuse std::process::Command;\nuse std::env;\nuse std::io::stderr;\nuse std::io::Write;\n\npub use clap::App;\n\nuse clap::{Arg, ArgMatches};\nuse log;\nuse log::LogLevelFilter;\n\nuse configuration::Configuration;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\nuse error::MapErrInto;\nuse logger::ImagLogger;\n\nuse libimagstore::store::Store;\n\n#[derive(Debug)]\npub struct Runtime<'a> {\n rtp: PathBuf,\n configuration: Option<Configuration>,\n cli_matches: ArgMatches<'a>,\n store: Store,\n}\n\nimpl<'a> Runtime<'a> {\n\n \/**\n * Gets the CLI spec for the program and retreives the config file path (or uses the default on\n * in $HOME\/.imag\/config, $XDG_CONFIG_DIR\/imag\/config or from env(\"$IMAG_CONFIG\")\n * and builds the Runtime object with it.\n *\n * The cli_spec object should be initially build with the ::get_default_cli_builder() function.\n *\n *\/\n pub fn new(cli_spec: App<'a, 'a>) -> Result<Runtime<'a>, RuntimeError> {\n use std::env;\n use std::io::stdout;\n\n use clap::Shell;\n\n use libimagstore::hook::position::HookPosition as HP;\n use libimagstore::hook::Hook;\n use libimagstore::error::StoreErrorKind;\n use libimagstorestdhook::debug::DebugHook;\n use libimagstorestdhook::vcs::git::delete::DeleteHook as GitDeleteHook;\n use libimagstorestdhook::vcs::git::update::UpdateHook as GitUpdateHook;\n use libimagstorestdhook::vcs::git::store_unload::StoreUnloadHook as GitStoreUnloadHook;\n use libimagerror::trace::trace_error;\n use libimagerror::trace::trace_error_dbg;\n use libimagerror::into::IntoError;\n\n use configuration::error::ConfigErrorKind;\n\n let matches = cli_spec.get_matches();\n\n let is_debugging = matches.is_present(\"debugging\");\n let is_verbose = matches.is_present(\"verbosity\");\n let colored = !matches.is_present(\"no-color-output\");\n\n Runtime::init_logger(is_debugging, is_verbose, colored);\n\n match matches.value_of(Runtime::arg_generate_compl()) {\n Some(shell) => {\n debug!(\"Generating shell completion script, writing to stdout\");\n let shell = shell.parse::<Shell>().unwrap(); \/\/ clap has our back here.\n cli_spec.gen_completions_to(\"fakename\", shell, &mut stdout());\n },\n _ => debug!(\"Not generating shell completion script\"),\n }\n\n let rtp : PathBuf = matches.value_of(\"runtimepath\")\n .map_or_else(|| {\n env::var(\"HOME\")\n .map(PathBuf::from)\n .map(|mut p| { p.push(\".imag\"); p})\n .unwrap_or_else(|_| {\n panic!(\"You seem to be $HOME-less. Please get a $HOME before using this software. We are sorry for you and hope you have some accommodation anyways.\");\n })\n }, PathBuf::from);\n let storepath = matches.value_of(\"storepath\")\n .map_or_else(|| {\n let mut spath = rtp.clone();\n spath.push(\"store\");\n spath\n }, PathBuf::from);\n\n let configpath = matches.value_of(\"config\")\n .map_or_else(|| rtp.clone(), PathBuf::from);\n\n let cfg = match Configuration::new(&configpath) {\n Err(e) => if e.err_type() != ConfigErrorKind::NoConfigFileFound {\n return Err(RuntimeErrorKind::Instantiate.into_error_with_cause(Box::new(e)));\n } else {\n warn!(\"No config file found.\");\n warn!(\"Continuing without configuration file\");\n None\n },\n\n Ok(mut cfg) => {\n if let Err(e) = cfg.override_config(get_override_specs(&matches)) {\n error!(\"Could not apply config overrides\");\n trace_error(&e);\n\n \/\/ TODO: continue question (interactive)\n }\n\n Some(cfg)\n }\n };\n\n let store_config = match cfg {\n Some(ref c) => c.store_config().cloned(),\n None => None,\n };\n\n if is_debugging {\n write!(stderr(), \"Config: {:?}\\n\", cfg).ok();\n write!(stderr(), \"Store-config: {:?}\\n\", store_config).ok();\n }\n\n Store::new(storepath.clone(), store_config).map(|mut store| {\n \/\/ If we are debugging, generate hooks for all positions\n if is_debugging {\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(DebugHook::new(HP::PreCreate)) , \"debug\", HP::PreCreate),\n (Box::new(DebugHook::new(HP::PostCreate)) , \"debug\", HP::PostCreate),\n (Box::new(DebugHook::new(HP::PreRetrieve)) , \"debug\", HP::PreRetrieve),\n (Box::new(DebugHook::new(HP::PostRetrieve)) , \"debug\", HP::PostRetrieve),\n (Box::new(DebugHook::new(HP::PreUpdate)) , \"debug\", HP::PreUpdate),\n (Box::new(DebugHook::new(HP::PostUpdate)) , \"debug\", HP::PostUpdate),\n (Box::new(DebugHook::new(HP::PreDelete)) , \"debug\", HP::PreDelete),\n (Box::new(DebugHook::new(HP::PostDelete)) , \"debug\", HP::PostDelete),\n ];\n\n \/\/ If hook registration fails, trace the error and warn, but continue.\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n }\n\n let sp = storepath;\n\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(GitDeleteHook::new(sp.clone(), HP::PostDelete)), \"vcs\", HP::PostDelete),\n (Box::new(GitUpdateHook::new(sp.clone(), HP::PostUpdate)), \"vcs\", HP::PostUpdate),\n (Box::new(GitStoreUnloadHook::new(sp)), \"vcs\", HP::StoreUnload),\n ];\n\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering git hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n\n Runtime {\n cli_matches: matches,\n configuration: cfg,\n rtp: rtp,\n store: store,\n }\n })\n .map_err_into(RuntimeErrorKind::Instantiate)\n }\n\n \/**\n * Get a commandline-interface builder object from `clap`\n *\n * This commandline interface builder object already contains some predefined interface flags:\n * * -v | --verbose for verbosity\n * * --debug for debugging\n * * -c <file> | --config <file> for alternative configuration file\n * * -r <path> | --rtp <path> for alternative runtimepath\n * * --store <path> for alternative store path\n * Each has the appropriate help text included.\n *\n * The `appname` shall be \"imag-<command>\".\n *\/\n pub fn get_default_cli_builder(appname: &'a str,\n version: &'a str,\n about: &'a str)\n -> App<'a, 'a>\n {\n App::new(appname)\n .version(version)\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .about(about)\n .arg(Arg::with_name(Runtime::arg_verbosity_name())\n .short(\"v\")\n .long(\"verbose\")\n .help(\"Enables verbosity\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_debugging_name())\n .long(\"debug\")\n .help(\"Enables debugging output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_no_color_output_name())\n .long(\"no-color\")\n .help(\"Disable color output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_config_name())\n .long(\"config\")\n .help(\"Path to alternative config file\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_config_override_name())\n .long(\"override-config\")\n .help(\"Override a configuration settings. Use 'key=value' pairs, where the key is a path in the TOML configuration. The value must be present in the configuration and be convertible to the type of the configuration setting. If the argument does not contain a '=', it gets ignored. Setting Arrays and Tables is not yet supported.\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_runtimepath_name())\n .long(\"rtp\")\n .help(\"Alternative runtimepath\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_storepath_name())\n .long(\"store\")\n .help(\"Alternative storepath. Must be specified as full path, can be outside of the RTP\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_editor_name())\n .long(\"editor\")\n .help(\"Set editor\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_generate_compl())\n .long(\"generate-commandline-completion\")\n .help(\"Generate the commandline completion for bash or zsh or fish\")\n .required(false)\n .takes_value(true)\n .value_name(\"SHELL\")\n .possible_values(&[\"bash\", \"fish\", \"zsh\"]))\n\n }\n\n pub fn arg_names() -> Vec<&'static str> {\n vec![\n Runtime::arg_verbosity_name(),\n Runtime::arg_debugging_name(),\n Runtime::arg_no_color_output_name(),\n Runtime::arg_config_name(),\n Runtime::arg_config_override_name(),\n Runtime::arg_runtimepath_name(),\n Runtime::arg_storepath_name(),\n Runtime::arg_editor_name(),\n ]\n }\n\n pub fn arg_verbosity_name() -> &'static str {\n \"verbosity\"\n }\n\n pub fn arg_debugging_name() -> &'static str {\n \"debugging\"\n }\n\n pub fn arg_no_color_output_name() -> &'static str {\n \"no-color-output\"\n }\n\n pub fn arg_config_name() -> &'static str {\n \"config\"\n }\n\n pub fn arg_config_override_name() -> &'static str {\n \"config-override\"\n }\n\n pub fn arg_runtimepath_name() -> &'static str {\n \"runtimepath\"\n }\n\n pub fn arg_storepath_name() -> &'static str {\n \"storepath\"\n }\n\n pub fn arg_editor_name() -> &'static str {\n \"editor\"\n }\n\n pub fn arg_generate_compl() -> &'static str {\n \"generate-completion\"\n }\n\n \/**\n * Initialize the internal logger\n *\/\n fn init_logger(is_debugging: bool, is_verbose: bool, colored: bool) {\n use std::env::var as env_var;\n use env_logger;\n\n if env_var(\"IMAG_LOG_ENV\").is_ok() {\n env_logger::init().unwrap();\n } else {\n let lvl = if is_debugging {\n LogLevelFilter::Debug\n } else if is_verbose {\n LogLevelFilter::Info\n } else {\n LogLevelFilter::Warn\n };\n\n log::set_logger(|max_log_lvl| {\n max_log_lvl.set(lvl);\n debug!(\"Init logger with {}\", lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()).with_color(colored))\n })\n .map_err(|_| {\n panic!(\"Could not setup logger\");\n })\n .ok();\n }\n }\n\n \/**\n * Get the verbosity flag value\n *\/\n pub fn is_verbose(&self) -> bool {\n self.cli_matches.is_present(\"verbosity\")\n }\n\n \/**\n * Get the debugging flag value\n *\/\n pub fn is_debugging(&self) -> bool {\n self.cli_matches.is_present(\"debugging\")\n }\n\n \/**\n * Get the runtimepath\n *\/\n pub fn rtp(&self) -> &PathBuf {\n &self.rtp\n }\n\n \/**\n * Get the commandline interface matches\n *\/\n pub fn cli(&self) -> &ArgMatches {\n &self.cli_matches\n }\n\n \/**\n * Get the configuration object\n *\/\n pub fn config(&self) -> Option<&Configuration> {\n self.configuration.as_ref()\n }\n\n \/**\n * Get the store object\n *\/\n pub fn store(&self) -> &Store {\n &self.store\n }\n\n pub fn editor(&self) -> Option<Command> {\n self.cli()\n .value_of(\"editor\")\n .map(String::from)\n .or({\n match self.configuration {\n Some(ref c) => c.editor().cloned(),\n _ => None,\n }\n })\n .or(env::var(\"EDITOR\").ok())\n .map(Command::new)\n }\n}\n\nfn get_override_specs(matches: &ArgMatches) -> Vec<String> {\n matches\n .values_of(\"config-override\")\n .map(|values| {\n values\n .filter(|s| {\n let b = s.contains(\"=\");\n if !b { warn!(\"override '{}' does not contain '=' - will be ignored!\", s); }\n b\n })\n .map(String::from)\n .collect()\n })\n .unwrap_or(vec![])\n}\n\n<commit_msg>Clone App here so we can consume it elsewhere<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse std::path::PathBuf;\nuse std::process::Command;\nuse std::env;\nuse std::io::stderr;\nuse std::io::Write;\n\npub use clap::App;\n\nuse clap::{Arg, ArgMatches};\nuse log;\nuse log::LogLevelFilter;\n\nuse configuration::Configuration;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\nuse error::MapErrInto;\nuse logger::ImagLogger;\n\nuse libimagstore::store::Store;\n\n#[derive(Debug)]\npub struct Runtime<'a> {\n rtp: PathBuf,\n configuration: Option<Configuration>,\n cli_matches: ArgMatches<'a>,\n store: Store,\n}\n\nimpl<'a> Runtime<'a> {\n\n \/**\n * Gets the CLI spec for the program and retreives the config file path (or uses the default on\n * in $HOME\/.imag\/config, $XDG_CONFIG_DIR\/imag\/config or from env(\"$IMAG_CONFIG\")\n * and builds the Runtime object with it.\n *\n * The cli_spec object should be initially build with the ::get_default_cli_builder() function.\n *\n *\/\n pub fn new(mut cli_spec: App<'a, 'a>) -> Result<Runtime<'a>, RuntimeError> {\n use std::env;\n use std::io::stdout;\n\n use clap::Shell;\n\n use libimagstore::hook::position::HookPosition as HP;\n use libimagstore::hook::Hook;\n use libimagstore::error::StoreErrorKind;\n use libimagstorestdhook::debug::DebugHook;\n use libimagstorestdhook::vcs::git::delete::DeleteHook as GitDeleteHook;\n use libimagstorestdhook::vcs::git::update::UpdateHook as GitUpdateHook;\n use libimagstorestdhook::vcs::git::store_unload::StoreUnloadHook as GitStoreUnloadHook;\n use libimagerror::trace::trace_error;\n use libimagerror::trace::trace_error_dbg;\n use libimagerror::into::IntoError;\n\n use configuration::error::ConfigErrorKind;\n\n let matches = cli_spec.clone().get_matches();\n\n let is_debugging = matches.is_present(\"debugging\");\n let is_verbose = matches.is_present(\"verbosity\");\n let colored = !matches.is_present(\"no-color-output\");\n\n Runtime::init_logger(is_debugging, is_verbose, colored);\n\n match matches.value_of(Runtime::arg_generate_compl()) {\n Some(shell) => {\n debug!(\"Generating shell completion script, writing to stdout\");\n let shell = shell.parse::<Shell>().unwrap(); \/\/ clap has our back here.\n cli_spec.gen_completions_to(\"fakename\", shell, &mut stdout());\n },\n _ => debug!(\"Not generating shell completion script\"),\n }\n\n let rtp : PathBuf = matches.value_of(\"runtimepath\")\n .map_or_else(|| {\n env::var(\"HOME\")\n .map(PathBuf::from)\n .map(|mut p| { p.push(\".imag\"); p})\n .unwrap_or_else(|_| {\n panic!(\"You seem to be $HOME-less. Please get a $HOME before using this software. We are sorry for you and hope you have some accommodation anyways.\");\n })\n }, PathBuf::from);\n let storepath = matches.value_of(\"storepath\")\n .map_or_else(|| {\n let mut spath = rtp.clone();\n spath.push(\"store\");\n spath\n }, PathBuf::from);\n\n let configpath = matches.value_of(\"config\")\n .map_or_else(|| rtp.clone(), PathBuf::from);\n\n let cfg = match Configuration::new(&configpath) {\n Err(e) => if e.err_type() != ConfigErrorKind::NoConfigFileFound {\n return Err(RuntimeErrorKind::Instantiate.into_error_with_cause(Box::new(e)));\n } else {\n warn!(\"No config file found.\");\n warn!(\"Continuing without configuration file\");\n None\n },\n\n Ok(mut cfg) => {\n if let Err(e) = cfg.override_config(get_override_specs(&matches)) {\n error!(\"Could not apply config overrides\");\n trace_error(&e);\n\n \/\/ TODO: continue question (interactive)\n }\n\n Some(cfg)\n }\n };\n\n let store_config = match cfg {\n Some(ref c) => c.store_config().cloned(),\n None => None,\n };\n\n if is_debugging {\n write!(stderr(), \"Config: {:?}\\n\", cfg).ok();\n write!(stderr(), \"Store-config: {:?}\\n\", store_config).ok();\n }\n\n Store::new(storepath.clone(), store_config).map(|mut store| {\n \/\/ If we are debugging, generate hooks for all positions\n if is_debugging {\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(DebugHook::new(HP::PreCreate)) , \"debug\", HP::PreCreate),\n (Box::new(DebugHook::new(HP::PostCreate)) , \"debug\", HP::PostCreate),\n (Box::new(DebugHook::new(HP::PreRetrieve)) , \"debug\", HP::PreRetrieve),\n (Box::new(DebugHook::new(HP::PostRetrieve)) , \"debug\", HP::PostRetrieve),\n (Box::new(DebugHook::new(HP::PreUpdate)) , \"debug\", HP::PreUpdate),\n (Box::new(DebugHook::new(HP::PostUpdate)) , \"debug\", HP::PostUpdate),\n (Box::new(DebugHook::new(HP::PreDelete)) , \"debug\", HP::PreDelete),\n (Box::new(DebugHook::new(HP::PostDelete)) , \"debug\", HP::PostDelete),\n ];\n\n \/\/ If hook registration fails, trace the error and warn, but continue.\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n }\n\n let sp = storepath;\n\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(GitDeleteHook::new(sp.clone(), HP::PostDelete)), \"vcs\", HP::PostDelete),\n (Box::new(GitUpdateHook::new(sp.clone(), HP::PostUpdate)), \"vcs\", HP::PostUpdate),\n (Box::new(GitStoreUnloadHook::new(sp)), \"vcs\", HP::StoreUnload),\n ];\n\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering git hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n\n Runtime {\n cli_matches: matches,\n configuration: cfg,\n rtp: rtp,\n store: store,\n }\n })\n .map_err_into(RuntimeErrorKind::Instantiate)\n }\n\n \/**\n * Get a commandline-interface builder object from `clap`\n *\n * This commandline interface builder object already contains some predefined interface flags:\n * * -v | --verbose for verbosity\n * * --debug for debugging\n * * -c <file> | --config <file> for alternative configuration file\n * * -r <path> | --rtp <path> for alternative runtimepath\n * * --store <path> for alternative store path\n * Each has the appropriate help text included.\n *\n * The `appname` shall be \"imag-<command>\".\n *\/\n pub fn get_default_cli_builder(appname: &'a str,\n version: &'a str,\n about: &'a str)\n -> App<'a, 'a>\n {\n App::new(appname)\n .version(version)\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .about(about)\n .arg(Arg::with_name(Runtime::arg_verbosity_name())\n .short(\"v\")\n .long(\"verbose\")\n .help(\"Enables verbosity\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_debugging_name())\n .long(\"debug\")\n .help(\"Enables debugging output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_no_color_output_name())\n .long(\"no-color\")\n .help(\"Disable color output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_config_name())\n .long(\"config\")\n .help(\"Path to alternative config file\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_config_override_name())\n .long(\"override-config\")\n .help(\"Override a configuration settings. Use 'key=value' pairs, where the key is a path in the TOML configuration. The value must be present in the configuration and be convertible to the type of the configuration setting. If the argument does not contain a '=', it gets ignored. Setting Arrays and Tables is not yet supported.\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_runtimepath_name())\n .long(\"rtp\")\n .help(\"Alternative runtimepath\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_storepath_name())\n .long(\"store\")\n .help(\"Alternative storepath. Must be specified as full path, can be outside of the RTP\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_editor_name())\n .long(\"editor\")\n .help(\"Set editor\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_generate_compl())\n .long(\"generate-commandline-completion\")\n .help(\"Generate the commandline completion for bash or zsh or fish\")\n .required(false)\n .takes_value(true)\n .value_name(\"SHELL\")\n .possible_values(&[\"bash\", \"fish\", \"zsh\"]))\n\n }\n\n pub fn arg_names() -> Vec<&'static str> {\n vec![\n Runtime::arg_verbosity_name(),\n Runtime::arg_debugging_name(),\n Runtime::arg_no_color_output_name(),\n Runtime::arg_config_name(),\n Runtime::arg_config_override_name(),\n Runtime::arg_runtimepath_name(),\n Runtime::arg_storepath_name(),\n Runtime::arg_editor_name(),\n ]\n }\n\n pub fn arg_verbosity_name() -> &'static str {\n \"verbosity\"\n }\n\n pub fn arg_debugging_name() -> &'static str {\n \"debugging\"\n }\n\n pub fn arg_no_color_output_name() -> &'static str {\n \"no-color-output\"\n }\n\n pub fn arg_config_name() -> &'static str {\n \"config\"\n }\n\n pub fn arg_config_override_name() -> &'static str {\n \"config-override\"\n }\n\n pub fn arg_runtimepath_name() -> &'static str {\n \"runtimepath\"\n }\n\n pub fn arg_storepath_name() -> &'static str {\n \"storepath\"\n }\n\n pub fn arg_editor_name() -> &'static str {\n \"editor\"\n }\n\n pub fn arg_generate_compl() -> &'static str {\n \"generate-completion\"\n }\n\n \/**\n * Initialize the internal logger\n *\/\n fn init_logger(is_debugging: bool, is_verbose: bool, colored: bool) {\n use std::env::var as env_var;\n use env_logger;\n\n if env_var(\"IMAG_LOG_ENV\").is_ok() {\n env_logger::init().unwrap();\n } else {\n let lvl = if is_debugging {\n LogLevelFilter::Debug\n } else if is_verbose {\n LogLevelFilter::Info\n } else {\n LogLevelFilter::Warn\n };\n\n log::set_logger(|max_log_lvl| {\n max_log_lvl.set(lvl);\n debug!(\"Init logger with {}\", lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()).with_color(colored))\n })\n .map_err(|_| {\n panic!(\"Could not setup logger\");\n })\n .ok();\n }\n }\n\n \/**\n * Get the verbosity flag value\n *\/\n pub fn is_verbose(&self) -> bool {\n self.cli_matches.is_present(\"verbosity\")\n }\n\n \/**\n * Get the debugging flag value\n *\/\n pub fn is_debugging(&self) -> bool {\n self.cli_matches.is_present(\"debugging\")\n }\n\n \/**\n * Get the runtimepath\n *\/\n pub fn rtp(&self) -> &PathBuf {\n &self.rtp\n }\n\n \/**\n * Get the commandline interface matches\n *\/\n pub fn cli(&self) -> &ArgMatches {\n &self.cli_matches\n }\n\n \/**\n * Get the configuration object\n *\/\n pub fn config(&self) -> Option<&Configuration> {\n self.configuration.as_ref()\n }\n\n \/**\n * Get the store object\n *\/\n pub fn store(&self) -> &Store {\n &self.store\n }\n\n pub fn editor(&self) -> Option<Command> {\n self.cli()\n .value_of(\"editor\")\n .map(String::from)\n .or({\n match self.configuration {\n Some(ref c) => c.editor().cloned(),\n _ => None,\n }\n })\n .or(env::var(\"EDITOR\").ok())\n .map(Command::new)\n }\n}\n\nfn get_override_specs(matches: &ArgMatches) -> Vec<String> {\n matches\n .values_of(\"config-override\")\n .map(|values| {\n values\n .filter(|s| {\n let b = s.contains(\"=\");\n if !b { warn!(\"override '{}' does not contain '=' - will be ignored!\", s); }\n b\n })\n .map(String::from)\n .collect()\n })\n .unwrap_or(vec![])\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>methods for mutability and ownership added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>variable bindings<commit_after>fn main() {\n let x = 5;\n println!(\"{}\", x);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2427<commit_after>\/\/ https:\/\/leetcode.com\/problems\/number-of-common-factors\/\npub fn common_factors(a: i32, b: i32) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", common_factors(12, 6)); \/\/ 4\n println!(\"{}\", common_factors(25, 30)); \/\/ 2\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Sort mod lines<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove uses of deprecated UInt::min_value<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>start new<commit_after><|endoftext|>"} {"text":"<commit_before>use std::sync::Arc;\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::ops::Deref;\nuse std::sync::mpsc::{channel, Receiver, Sender};\nuse std::cell::UnsafeCell;\nuse std::marker::Sync;\nuse {Future, Poll, Async};\nuse task::{self, Task};\nuse lock::Lock;\n\n\n\/\/\/ A wrapped item of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedItem<T> {\n item: Arc<T>,\n}\n\nimpl<T> Clone for SharedItem<T> {\n fn clone(&self) -> Self {\n SharedItem { item: self.item.clone() }\n }\n}\n\nimpl<T> Deref for SharedItem<T> {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.item.as_ref()\n }\n}\n\n\/\/\/ A wrapped error of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedError<E> {\n error: Arc<E>,\n}\n\nimpl<T> Clone for SharedError<T> {\n fn clone(&self) -> Self {\n SharedError { error: self.error.clone() }\n }\n}\n\nimpl<E> Deref for SharedError<E> {\n type Target = E;\n\n fn deref(&self) -> &E {\n &self.error.as_ref()\n }\n}\n\nimpl<T> SharedItem<T> {\n fn new(item: T) -> Self {\n SharedItem { item: Arc::new(item) }\n }\n}\n\nimpl<E> SharedError<E> {\n fn new(error: E) -> Self {\n SharedError { error: Arc::new(error) }\n }\n}\n\n\/\/\/ The data that has to be synced to implement `Shared`, in order to satisfy the `Future` trait's constraints.\nstruct SyncedInner<F>\n where F: Future\n{\n original_future: F, \/\/ The original future\n}\n\nstruct Inner<F>\n where F: Future\n{\n synced_inner: Lock<SyncedInner<F>>,\n tasks_unpark_started: AtomicBool,\n tasks_receiver: Lock<Receiver<Task>>, \/\/ When original future is polled and ready, unparks all the tasks in that channel\n result: UnsafeCell<Option<Result<Async<SharedItem<F::Item>>, SharedError<F::Error>>>>, \/\/ The original future result wrapped with `SharedItem`\/`SharedError` \n}\n\nunsafe impl<F> Sync for Inner<F> where F: Future {}\n\n\/\/\/ TODO: doc\n#[must_use = \"futures do nothing unless polled\"]\npub struct Shared<F>\n where F: Future\n{\n inner: Arc<Inner<F>>,\n tasks_sender: Sender<Task>,\n}\n\npub fn new<F>(future: F) -> Shared<F>\n where F: Future\n{\n let (tasks_sender, tasks_receiver) = channel();\n Shared {\n inner: Arc::new(Inner {\n synced_inner: Lock::new(SyncedInner {\n original_future: future,\n }),\n tasks_unpark_started: AtomicBool::new(false),\n tasks_receiver: Lock::new(tasks_receiver), \n result: UnsafeCell::new(None),\n }),\n tasks_sender: tasks_sender,\n }\n}\n\nimpl<F> Future for Shared<F>\n where F: Future\n{\n type Item = SharedItem<F::Item>;\n type Error = SharedError<F::Error>;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n let mut polled_result: Option<Result<Async<SharedItem<F::Item>>, SharedError<F::Error>>> = None;\n\n \/\/ If the result is ready, just return it\n if self.inner.tasks_unpark_started.load(Ordering::Relaxed) {\n unsafe {\n if let Some(ref result) = *self.inner.result.get() {\n return result.clone();\n }\n }\n }\n\n \/\/ The result was not ready.\n match self.inner.synced_inner.try_lock() {\n Some(mut inner_guard) => {\n let ref mut inner = *inner_guard;\n\n \/\/ By the time that synced_inner was unlocked, other thread could poll the result,\n \/\/ so we check if result has a value\n unsafe {\n if (*self.inner.result.get()).is_some() {\n polled_result = (*self.inner.result.get()).clone();\n } else {\n match inner.original_future.poll() {\n Ok(Async::Ready(item)) => {\n *self.inner.result.get() = Some(Ok(Async::Ready(SharedItem::new(item))));\n polled_result = (*self.inner.result.get()).clone();\n }\n Err(error) => {\n *self.inner.result.get() = Some(Err(SharedError::new(error)));\n polled_result = (*self.inner.result.get()).clone();\n }\n Ok(Async::NotReady) => {} \/\/ Will be handled later\n }\n }\n }\n }\n None => {} \/\/ Will be handled later\n }\n\n if let Some(result) = polled_result {\n self.inner.tasks_unpark_started.store(true, Ordering::Relaxed);\n match self.inner.tasks_receiver.try_lock() {\n Some(tasks_receiver_guard) => {\n let ref tasks_receiver = *tasks_receiver_guard;\n loop {\n match tasks_receiver.try_recv() {\n Ok(task) => task.unpark(),\n _ => break,\n }\n }\n }\n None => {} \/\/ Other thread is unparking the tasks\n }\n\n return result.clone();\n }\n\n let t = task::park();\n let _ = self.tasks_sender.send(t);\n if self.inner.tasks_unpark_started.load(Ordering::Relaxed) {\n \/\/ If the tasks unpark has started, self.inner.result has a value (not None).\n \/\/ The result must be read here because it is possible that the task,\n \/\/ t (see variable above), had not been unparked.\n unsafe {\n if let Some(ref result) = *self.inner.result.get() {\n return result.clone();\n }\n }\n }\n\n Ok(Async::NotReady)\n }\n}\n\nimpl<F> Clone for Shared<F>\n where F: Future\n{\n fn clone(&self) -> Self {\n Shared {\n inner: self.inner.clone(),\n tasks_sender: self.tasks_sender.clone(),\n }\n }\n}\n<commit_msg>Add doc for poll logic<commit_after>use std::sync::Arc;\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::ops::Deref;\nuse std::sync::mpsc::{channel, Receiver, Sender};\nuse std::cell::UnsafeCell;\nuse std::marker::Sync;\nuse {Future, Poll, Async};\nuse task::{self, Task};\nuse lock::Lock;\n\n\n\/\/\/ A wrapped item of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedItem<T> {\n item: Arc<T>,\n}\n\nimpl<T> Clone for SharedItem<T> {\n fn clone(&self) -> Self {\n SharedItem { item: self.item.clone() }\n }\n}\n\nimpl<T> Deref for SharedItem<T> {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.item.as_ref()\n }\n}\n\n\/\/\/ A wrapped error of the original future.\n\/\/\/ It is clonable and implements Deref for ease of use.\n#[derive(Debug)]\npub struct SharedError<E> {\n error: Arc<E>,\n}\n\nimpl<T> Clone for SharedError<T> {\n fn clone(&self) -> Self {\n SharedError { error: self.error.clone() }\n }\n}\n\nimpl<E> Deref for SharedError<E> {\n type Target = E;\n\n fn deref(&self) -> &E {\n &self.error.as_ref()\n }\n}\n\nimpl<T> SharedItem<T> {\n fn new(item: T) -> Self {\n SharedItem { item: Arc::new(item) }\n }\n}\n\nimpl<E> SharedError<E> {\n fn new(error: E) -> Self {\n SharedError { error: Arc::new(error) }\n }\n}\n\n\/\/\/ The data that has to be synced to implement `Shared`,\n\/\/\/ in order to satisfy the `Future` trait's constraints.\nstruct SyncedInner<F>\n where F: Future\n{\n original_future: F, \/\/ The original future\n}\n\nstruct Inner<F>\n where F: Future\n{\n synced_inner: Lock<SyncedInner<F>>,\n tasks_unpark_started: AtomicBool,\n \/\/\/ When original future is polled and ready, unparks all the tasks in that channel\n tasks_receiver: Lock<Receiver<Task>>,\n \/\/\/ The original future result wrapped with `SharedItem`\/`SharedError`\n result: UnsafeCell<Option<Result<Async<SharedItem<F::Item>>, SharedError<F::Error>>>>,\n}\n\nunsafe impl<F> Sync for Inner<F> where F: Future {}\n\n\/\/\/ TODO: doc\n#[must_use = \"futures do nothing unless polled\"]\npub struct Shared<F>\n where F: Future\n{\n inner: Arc<Inner<F>>,\n tasks_sender: Sender<Task>,\n}\n\npub fn new<F>(future: F) -> Shared<F>\n where F: Future\n{\n let (tasks_sender, tasks_receiver) = channel();\n Shared {\n inner: Arc::new(Inner {\n synced_inner: Lock::new(SyncedInner { original_future: future }),\n tasks_unpark_started: AtomicBool::new(false),\n tasks_receiver: Lock::new(tasks_receiver),\n result: UnsafeCell::new(None),\n }),\n tasks_sender: tasks_sender,\n }\n}\n\nimpl<F> Future for Shared<F>\n where F: Future\n{\n type Item = SharedItem<F::Item>;\n type Error = SharedError<F::Error>;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n \/\/ The logic is as follows:\n \/\/ 1. Check if the result is ready (with tasks_unpark_started)\n \/\/ - If the result is ready, return it.\n \/\/ - Otherwise:\n \/\/ 2. Try lock the self.inner.synced_inner:\n \/\/ - If successfully locked, poll the original future.\n \/\/ If the future is ready, unpark the tasks from\n \/\/ self.inner.tasks_receiver and return the result.\n \/\/ - If the future is not ready:\n \/\/ 3. Create a task and send it through self.tasks_sender.\n \/\/ 4. Check again if the result is ready (with tasks_unpark_started)\n \/\/ 5. Return the result if it's ready. It is necessary because otherwise there could be\n \/\/ a race between the task sending and the thread receiving the tasks.\n\n let mut should_unpark_tasks: bool = false;\n\n \/\/ If the result is ready, just return it\n if self.inner.tasks_unpark_started.load(Ordering::Relaxed) {\n unsafe {\n if let Some(ref result) = *self.inner.result.get() {\n return result.clone();\n }\n }\n }\n\n \/\/ The result was not ready.\n match self.inner.synced_inner.try_lock() {\n Some(mut inner_guard) => {\n let ref mut inner = *inner_guard;\n unsafe {\n \/\/ Other thread could poll the result, so we check if result has a value\n if (*self.inner.result.get()).is_some() {\n should_unpark_tasks = true;\n } else {\n match inner.original_future.poll() {\n Ok(Async::Ready(item)) => {\n *self.inner.result.get() =\n Some(Ok(Async::Ready(SharedItem::new(item))));\n should_unpark_tasks = true;\n }\n Err(error) => {\n *self.inner.result.get() = Some(Err(SharedError::new(error)));\n should_unpark_tasks = true;\n }\n Ok(Async::NotReady) => {} \/\/ Will be handled later\n }\n }\n }\n }\n None => {} \/\/ Will be handled later\n }\n\n if should_unpark_tasks {\n self.inner.tasks_unpark_started.store(true, Ordering::Relaxed);\n match self.inner.tasks_receiver.try_lock() {\n Some(tasks_receiver_guard) => {\n let ref tasks_receiver = *tasks_receiver_guard;\n loop {\n match tasks_receiver.try_recv() {\n Ok(task) => task.unpark(),\n _ => break,\n }\n }\n }\n None => {} \/\/ Other thread is unparking the tasks\n }\n\n return result.clone();\n }\n\n let t = task::park();\n let _ = self.tasks_sender.send(t);\n if self.inner.tasks_unpark_started.load(Ordering::Relaxed) {\n \/\/ If the tasks unpark has started, self.inner.result has a value (not None).\n \/\/ The result must be read here because it is possible that the task,\n \/\/ t (see variable above), had not been unparked.\n unsafe {\n if let Some(ref result) = *self.inner.result.get() {\n return result.clone();\n } else {\n \/\/ How should I use unwrap here?\n \/\/ The compiler says cannot \"move out of borrowed content\"\n unreachable!();\n }\n }\n }\n\n Ok(Async::NotReady)\n }\n}\n\nimpl<F> Clone for Shared<F>\n where F: Future\n{\n fn clone(&self) -> Self {\n Shared {\n inner: self.inner.clone(),\n tasks_sender: self.tasks_sender.clone(),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for Issue #21837<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub trait Bound {}\npub struct Foo<T: Bound>(T);\n\npub trait Trait1 {}\nimpl<T: Bound> Trait1 for Foo<T> {}\n\npub trait Trait2 {}\nimpl<T> Trait2 for Foo<T> {} \/\/~ ERROR the trait bound `T: Bound` is not satisfied\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update to master<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>easage-list: Sort entries by name before printing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor code to be more idomatic<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove Interval and simplify Sphere code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>adjust char imports<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Useful synchronization primitives.\n\/\/!\n\/\/! This module contains useful safe and unsafe synchronization primitives.\n\/\/! Most of the primitives in this module do not provide any sort of locking\n\/\/! and\/or blocking at all, but rather provide the necessary tools to build\n\/\/! other types of concurrent primitives.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use alloc_crate::sync::{Arc, Weak};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use core::sync::atomic;\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::barrier::{Barrier, BarrierWaitResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::condvar::{Condvar, WaitTimeoutResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::mutex::{Mutex, MutexGuard};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::once::{Once, OnceState, ONCE_INIT};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use sys_common::poison::{PoisonError, TryLockError, TryLockResult, LockResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::rwlock::{RwLock, RwLockReadGuard, RwLockWriteGuard};\n\npub mod mpsc;\n\nmod barrier;\nmod condvar;\nmod mutex;\nmod once;\nmod rwlock;\n<commit_msg>Rollup merge of #54078 - GabrielMajeri:expand-sync-docs, r=steveklabnik<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Useful synchronization primitives.\n\/\/!\n\/\/! ## The need for synchronization\n\/\/!\n\/\/! Conceptually, a Rust program is a series of operations which will\n\/\/! be executed on a computer. The timeline of events happening in the\n\/\/! program is consistent with the order of the operations in the code.\n\/\/!\n\/\/! Consider the following code, operating on some global static variables:\n\/\/!\n\/\/! ```rust\n\/\/! static mut A: u32 = 0;\n\/\/! static mut B: u32 = 0;\n\/\/! static mut C: u32 = 0;\n\/\/!\n\/\/! fn main() {\n\/\/! unsafe {\n\/\/! A = 3;\n\/\/! B = 4;\n\/\/! A = A + B;\n\/\/! C = B;\n\/\/! println!(\"{} {} {}\", A, B, C);\n\/\/! C = A;\n\/\/! }\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! It appears as if some variables stored in memory are changed, an addition\n\/\/! is performed, result is stored in `A` and the variable `C` is\n\/\/! modified twice.\n\/\/!\n\/\/! When only a single thread is involved, the results are as expected:\n\/\/! the line `7 4 4` gets printed.\n\/\/!\n\/\/! As for what happens behind the scenes, when optimizations are enabled the\n\/\/! final generated machine code might look very different from the code:\n\/\/!\n\/\/! - The first store to `C` might be moved before the store to `A` or `B`,\n\/\/! _as if_ we had written `C = 4; A = 3; B = 4`.\n\/\/!\n\/\/! - Assignment of `A + B` to `A` might be removed, since the sum can be stored\n\/\/! in a temporary location until it gets printed, with the global variable\n\/\/! never getting updated.\n\/\/!\n\/\/! - The final result could be determined just by looking at the code\n\/\/! at compile time, so [constant folding] might turn the whole\n\/\/! block into a simple `println!(\"7 4 4\")`.\n\/\/!\n\/\/! The compiler is allowed to perform any combination of these\n\/\/! optimizations, as long as the final optimized code, when executed,\n\/\/! produces the same results as the one without optimizations.\n\/\/!\n\/\/! Due to the [concurrency] involved in modern computers, assumptions\n\/\/! about the program's execution order are often wrong. Access to\n\/\/! global variables can lead to nondeterministic results, **even if**\n\/\/! compiler optimizations are disabled, and it is **still possible**\n\/\/! to introduce synchronization bugs.\n\/\/!\n\/\/! Note that thanks to Rust's safety guarantees, accessing global (static)\n\/\/! variables requires `unsafe` code, assuming we don't use any of the\n\/\/! synchronization primitives in this module.\n\/\/!\n\/\/! [constant folding]: https:\/\/en.wikipedia.org\/wiki\/Constant_folding\n\/\/! [concurrency]: https:\/\/en.wikipedia.org\/wiki\/Concurrency_(computer_science)\n\/\/!\n\/\/! ## Out-of-order execution\n\/\/!\n\/\/! Instructions can execute in a different order from the one we define, due to\n\/\/! various reasons:\n\/\/!\n\/\/! - The **compiler** reordering instructions: If the compiler can issue an\n\/\/! instruction at an earlier point, it will try to do so. For example, it\n\/\/! might hoist memory loads at the top of a code block, so that the CPU can\n\/\/! start [prefetching] the values from memory.\n\/\/!\n\/\/! In single-threaded scenarios, this can cause issues when writing\n\/\/! signal handlers or certain kinds of low-level code.\n\/\/! Use [compiler fences] to prevent this reordering.\n\/\/!\n\/\/! - A **single processor** executing instructions [out-of-order]:\n\/\/! Modern CPUs are capable of [superscalar] execution,\n\/\/! i.e. multiple instructions might be executing at the same time,\n\/\/! even though the machine code describes a sequential process.\n\/\/!\n\/\/! This kind of reordering is handled transparently by the CPU.\n\/\/!\n\/\/! - A **multiprocessor** system executing multiple hardware threads\n\/\/! at the same time: In multi-threaded scenarios, you can use two\n\/\/! kinds of primitives to deal with synchronization:\n\/\/! - [memory fences] to ensure memory accesses are made visibile to\n\/\/! other CPUs in the right order.\n\/\/! - [atomic operations] to ensure simultaneous access to the same\n\/\/! memory location doesn't lead to undefined behavior.\n\/\/!\n\/\/! [prefetching]: https:\/\/en.wikipedia.org\/wiki\/Cache_prefetching\n\/\/! [compiler fences]: crate::sync::atomic::compiler_fence\n\/\/! [out-of-order]: https:\/\/en.wikipedia.org\/wiki\/Out-of-order_execution\n\/\/! [superscalar]: https:\/\/en.wikipedia.org\/wiki\/Superscalar_processor\n\/\/! [memory fences]: crate::sync::atomic::fence\n\/\/! [atomic operations]: crate::sync::atomic\n\/\/!\n\/\/! ## Higher-level synchronization objects\n\/\/!\n\/\/! Most of the low-level synchronization primitives are quite error-prone and\n\/\/! inconvenient to use, which is why the standard library also exposes some\n\/\/! higher-level synchronization objects.\n\/\/!\n\/\/! These abstractions can be built out of lower-level primitives.\n\/\/! For efficiency, the sync objects in the standard library are usually\n\/\/! implemented with help from the operating system's kernel, which is\n\/\/! able to reschedule the threads while they are blocked on acquiring\n\/\/! a lock.\n\/\/!\n\/\/! The following is an overview of the available synchronization\n\/\/! objects:\n\/\/!\n\/\/! - [`Arc`]: Atomically Reference-Counted pointer, which can be used\n\/\/! in multithreaded environments to prolong the lifetime of some\n\/\/! data until all the threads have finished using it.\n\/\/!\n\/\/! - [`Barrier`]: Ensures multiple threads will wait for each other\n\/\/! to reach a point in the program, before continuing execution all\n\/\/! together.\n\/\/!\n\/\/! - [`Condvar`]: Condition Variable, providing the ability to block\n\/\/! a thread while waiting for an event to occur.\n\/\/!\n\/\/! - [`mpsc`]: Multi-producer, single-consumer queues, used for\n\/\/! message-based communication. Can provide a lightweight\n\/\/! inter-thread synchronisation mechanism, at the cost of some\n\/\/! extra memory.\n\/\/!\n\/\/! - [`Mutex`]: Mutual Exclusion mechanism, which ensures that at\n\/\/! most one thread at a time is able to access some data.\n\/\/!\n\/\/! - [`Once`]: Used for thread-safe, one-time initialization of a\n\/\/! global variable.\n\/\/!\n\/\/! - [`RwLock`]: Provides a mutual exclusion mechanism which allows\n\/\/! multiple readers at the same time, while allowing only one\n\/\/! writer at a time. In some cases, this can be more efficient than\n\/\/! a mutex.\n\/\/!\n\/\/! [`Arc`]: crate::sync::Arc\n\/\/! [`Barrier`]: crate::sync::Barrier\n\/\/! [`Condvar`]: crate::sync::Condvar\n\/\/! [`mpsc`]: crate::sync::mpsc\n\/\/! [`Mutex`]: crate::sync::Mutex\n\/\/! [`Once`]: crate::sync::Once\n\/\/! [`RwLock`]: crate::sync::RwLock\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use alloc_crate::sync::{Arc, Weak};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use core::sync::atomic;\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::barrier::{Barrier, BarrierWaitResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::condvar::{Condvar, WaitTimeoutResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::mutex::{Mutex, MutexGuard};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::once::{Once, OnceState, ONCE_INIT};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use sys_common::poison::{PoisonError, TryLockError, TryLockResult, LockResult};\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub use self::rwlock::{RwLock, RwLockReadGuard, RwLockWriteGuard};\n\npub mod mpsc;\n\nmod barrier;\nmod condvar;\nmod mutex;\nmod once;\nmod rwlock;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add hashing directory and mod file for later.<commit_after><|endoftext|>"} {"text":"<commit_before>use std::{io, fs};\n\nmacro_rules! readln {\n () => {\n {\n let mut line = String::new();\n match io::stdin().read_line(&mut line) {\n Ok(n) => Some(line.trim().to_string()),\n Err(e) => None\n }\n }\n };\n}\n\nfn console_title(title: &str) {\n\n}\n\n#[no_mangle]\npub fn main() {\n console_title(\"Test\");\n\n println!(\"Type help for a command list\");\n while let Some(line) = readln!() {\n let args: Vec<String> = line.split(' ').map(|arg| arg.to_string()).collect();\n\n if let Some(command) = args.get(0) {\n println!(\"# {}\", line);\n\n match &command[..]\n {\n \"panic\" => panic!(\"Test panic\"),\n \"ls\" => {\n \/\/ TODO: when libredox is completed\n \/\/fs::read_dir(\"\/\").unwrap().map(|dir| println!(\"{}\", dir));\n }\n _ => println!(\"Commands: panic\"),\n }\n }\n }\n}\n<commit_msg>Use an array to label all commands and add `ptr_write` command<commit_after>#![feature(alloc)]\n#![feature(core)]\n\nextern crate alloc;\nextern crate core;\n\nuse alloc::boxed::Box;\nuse std::{io, fs, rand};\nuse core::ptr;\n\nmacro_rules! readln {\n () => {\n {\n let mut line = String::new();\n match io::stdin().read_line(&mut line) {\n Ok(n) => Some(line.trim().to_string()),\n Err(e) => None\n }\n }\n };\n}\n\nfn console_title(title: &str) {\n\n}\n\n#[no_mangle]\npub fn main() {\n console_title(\"Test\");\n\n println!(\"Type help for a command list\");\n while let Some(line) = readln!() {\n let args: Vec<String> = line.split(' ').map(|arg| arg.to_string()).collect();\n\n if let Some(command) = args.get(0) {\n println!(\"# {}\", line);\n let console_commands = [\"panic\", \"ls\", \"ptr_write\"];\n\n match &command[..]\n {\n command if command == console_commands[0] => panic!(\"Test panic\"),\n command if command == console_commands[1] => {\n \/\/ TODO: import std::fs functions into libredox\n \/\/fs::read_dir(\"\/\").unwrap().map(|dir| println!(\"{}\", dir));\n }\n command if command == console_commands[2] => {\n let a_ptr = rand() as *mut u8;\n \/\/ TODO: import Box::{from_raw, to_raw} methods in libredox\n \/\/let mut a_box = Box::new(rand() as u8);\n unsafe {\n ptr::write(a_ptr, rand() as u8);\n \/\/ptr::write(a_box.to_raw(), rand() as u8);\n }\n }\n _ => println!(\"Commands: {}\", console_commands.join(\" \")),\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(alloc)]\n#![feature(core)]\n\nextern crate alloc;\nextern crate core;\n\nuse alloc::boxed::Box;\nuse std::{io, fs, rand};\nuse core::ptr;\n\nmacro_rules! readln {\n () => {\n {\n let mut line = String::new();\n match io::stdin().read_line(&mut line) {\n Ok(n) => Some(line.trim().to_string()),\n Err(e) => None\n }\n }\n };\n}\n\nfn console_title(title: &str){\n\n}\n\n#[no_mangle]\npub fn main() {\n console_title(\"Test\");\n\n println!(\"Type help for a command list\");\n while let Some(line) = readln!() {\n let args: Vec<String> = line.split(' ').map(|arg| arg.to_string()).collect();\n\n if let Some(command) = args.get(0) {\n println!(\"# {}\", line);\n let console_commands = [\"panic\", \"ls\", \"ptr_write\"];\n\n match &command[..]\n {\n command if command == console_commands[0] => panic!(\"Test panic\"),\n command if command == console_commands[1] => {\n \/\/ TODO: import std::fs functions into libredox\n \/\/fs::read_dir(\"\/\").unwrap().map(|dir| println!(\"{}\", dir));\n }\n command if command == console_commands[2] => {\n let a_ptr = rand() as *mut u8;\n \/\/ TODO: import Box::{from_raw, to_raw} methods in libredox\n \/\/let mut a_box = Box::new(rand() as u8);\n unsafe {\n ptr::write(a_ptr, rand() as u8);\n \/\/ptr::write(a_box.to_raw(), rand() as u8);\n }\n }\n _ => println!(\"Commands: {}\", console_commands.join(\" \")),\n }\n }\n }\n}\n<commit_msg>Minor cleanup<commit_after>#![feature(alloc)]\n#![feature(core)]\n\nextern crate alloc;\nextern crate core;\n\nuse alloc::boxed::Box;\nuse std::{io, fs, rand};\nuse core::ptr;\n\nmacro_rules! readln {\n () => {\n {\n let mut line = String::new();\n match io::stdin().read_line(&mut line) {\n Ok(n) => Some(line.trim().to_string()),\n Err(e) => None\n }\n }\n };\n}\n\nfn console_title(title: &str){\n\n}\n\n#[no_mangle]\npub fn main() {\n console_title(\"Test\");\n\n println!(\"Type help for a command list\");\n while let Some(line) = readln!() {\n let args: Vec<String> = line.split(' ').map(|arg| arg.to_string()).collect();\n\n if let Some(command) = args.get(0) {\n println!(\"# {}\", line);\n let console_commands = [\"panic\", \"ls\", \"ptr_write\"];\n\n match &command[..] {\n command if command == console_commands[0] =>\n panic!(\"Test panic\"),\n command if command == console_commands[1] => {\n \/\/ TODO: import std::fs functions into libredox\n \/\/fs::read_dir(\"\/\").unwrap().map(|dir| println!(\"{}\", dir));\n }\n command if command == console_commands[2] => {\n let a_ptr = rand() as *mut u8;\n let mut a_box = Box::new(rand() as u8);\n unsafe {\n ptr::write(a_ptr, rand() as u8);\n \/\/ TODO: import Box::{from_raw, to_raw} methods in libredox\n \/\/ptr::write(a_box.to_raw(), rand() as u8);\n }\n }\n _ => println!(\"Commands: {}\", console_commands.join(\" \")),\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>export Font, create_test_font, test_font_bin;\n\nimport glyph::GlyphIndex;\nimport vec_to_ptr = vec::unsafe::to_ptr;\nimport libc::{ c_int, c_double, c_ulong };\nimport ptr::{ null, addr_of };\nimport azure::cairo::{\n cairo_font_face_t,\n cairo_scaled_font_t,\n cairo_glyph_t,\n cairo_text_extents_t,\n CAIRO_STATUS_SUCCESS,\n};\nimport azure::cairo::bindgen::{\n cairo_font_face_destroy,\n cairo_scaled_font_destroy,\n cairo_scaled_font_status,\n cairo_scaled_font_text_to_glyphs,\n cairo_scaled_font_glyph_extents,\n cairo_glyph_free,\n cairo_status_to_string\n};\n\n#[doc = \"\nA font handle. Layout can use this to calculate glyph metrics\nand the renderer can use it to render text.\n\"]\n#[warn(no_non_implicitly_copyable_typarams)]\nclass Font\/& {\n let fontbuf: [u8];\n let cairo_font: *cairo_scaled_font_t;\n let font_dtor: fn@();\n\n new(-fontbuf: [u8]) {\n\n let (cairo_font, font_dtor) = get_cairo_font(© fontbuf);\n assert cairo_font.is_not_null();\n\n self.fontbuf <- fontbuf;\n self.cairo_font = cairo_font;\n self.font_dtor = font_dtor;\n }\n\n drop {\n self.font_dtor();\n }\n\n fn buf() -> &self.[u8] {\n &self.fontbuf\n }\n\n fn glyph_idx(codepoint: char) -> option<GlyphIndex> {\n #debug(\"getting glyph for codepoint %u\", codepoint as uint);\n let codepoint_str = str::from_char(codepoint);\n\n let mut glyphs: *cairo_glyph_t = null();\n let mut num_glyphs = 0 as c_int;\n\n let status = str::as_c_str(codepoint_str) { |codepoint_buf|\n cairo_scaled_font_text_to_glyphs(\n self.cairo_font,\n 0.0 as c_double, 0.0 as c_double,\n codepoint_buf, codepoint_str.len() as c_int,\n addr_of(glyphs), addr_of(num_glyphs),\n null(), null(), null()\n )\n };\n\n ret if status == CAIRO_STATUS_SUCCESS {\n\n \/\/ This might not be true, but at least we'll know if it isn't\n assert num_glyphs == 1 as c_int;\n\n let glyph_index = unsafe { *glyphs }.index as GlyphIndex;\n #debug(\"glyph index is %?\", glyph_index);\n cairo_glyph_free(glyphs);\n some(glyph_index)\n } else {\n #error(\"cairo did not give me a glyph for %u\", codepoint as uint);\n none\n }\n }\n\n fn glyph_h_advance(glyph: GlyphIndex) -> int {\n\n #debug(\"getting h advance for glyph %?\", glyph);\n\n let glyphs: [cairo_glyph_t] = [{\n index: glyph as c_ulong,\n x: 0 as c_double,\n y: 0 as c_double,\n }];\n let extents: cairo_text_extents_t = {\n x_bearing: 0 as c_double,\n y_bearing: 0 as c_double,\n width: 0 as c_double,\n height: 0 as c_double,\n x_advance: 0 as c_double,\n y_advance: 0 as c_double,\n };\n\n assert self.cairo_font.is_not_null();\n\n cairo_scaled_font_glyph_extents(\n self.cairo_font, unsafe { vec_to_ptr(glyphs) },\n 1 as c_int, addr_of(extents));\n\n alt cairo_scaled_font_status(self.cairo_font) {\n status if status == CAIRO_STATUS_SUCCESS {\n\n #debug(\"x_advance: %?\", extents.x_advance);\n #debug(\"y_advance: %?\", extents.y_advance);\n\n ret extents.x_advance as int;\n }\n status {\n import str::unsafe::from_c_str;\n\n let status_cstr = cairo_status_to_string(status);\n let status_str = unsafe { from_c_str(status_cstr) };\n\n #error(\"cairo_scaled_font_glyph_extents status: %s\", status_str);\n fail \"failed to get glyph extents from cairo\"\n }\n }\n }\n}\n\nfn get_cairo_font(buf: &[u8]) -> (*cairo_scaled_font_t, fn@()) {\n\n import libc::c_double;\n import azure::cairo;\n import cairo::{ cairo_matrix_t };\n import cairo::bindgen::{\n cairo_matrix_init_identity,\n cairo_matrix_scale,\n cairo_font_options_create,\n cairo_font_options_destroy,\n cairo_scaled_font_create,\n cairo_scaled_font_destroy\n };\n \n import cairo::bindgen::cairo_scaled_font_create;\n\n let mut (face, dtor) = get_cairo_face(buf);\n\n let idmatrix: cairo_matrix_t = {\n xx: 0 as c_double,\n yx: 0 as c_double,\n xy: 0 as c_double,\n yy: 0 as c_double,\n x0: 0 as c_double,\n y0: 0 as c_double\n };\n cairo_matrix_init_identity(addr_of(idmatrix));\n\n let fontmatrix = idmatrix;\n cairo_matrix_scale(addr_of(fontmatrix),\n 20f as c_double, 20f as c_double);\n\n let options = cairo_font_options_create();\n let cfont = cairo_scaled_font_create(face, addr_of(fontmatrix),\n addr_of(idmatrix), options);\n cairo_font_options_destroy(options);\n\n \/\/ FIXME: Need negative tests\n if cfont.is_null() {\n dtor();\n fail \"unable to create cairo scaled font\";\n }\n dtor = fn@(move dtor) { cairo_scaled_font_destroy(cfont); dtor() };\n\n (cfont, dtor)\n}\n\n#[cfg(target_os = \"linux\")]\nfn get_cairo_face(buf: &[u8]) -> (*cairo_font_face_t, fn@()) {\n import freetype = azure::freetype;\n import freetype::{ FT_Error, FT_Library, FT_Face, FT_Long };\n import freetype::bindgen::{\n FT_Init_FreeType,\n FT_Done_FreeType,\n FT_New_Memory_Face,\n FT_Done_Face\n };\n import azure::cairo_ft;\n import cairo_ft::bindgen::cairo_ft_font_face_create_for_ft_face;\n\n impl methods for FT_Error {\n fn for_sure() { assert !self.failed() }\n fn failed() -> bool { self != 0 as FT_Error }\n }\n\n let mut dtor = fn@() { };\n\n let library: FT_Library = null();\n \/\/ FIXME: Need tests for failure case\n FT_Init_FreeType(addr_of(library)).for_sure();\n dtor = fn@(move dtor) { FT_Done_FreeType(library).for_sure(); dtor() };\n\n let face: FT_Face = null();\n vec::as_buf(*buf) { |cbuf|\n if FT_New_Memory_Face(library, cbuf, (*buf).len() as FT_Long,\n 0 as FT_Long, addr_of(face)).failed() {\n dtor();\n fail \"unable to create FreeType face\";\n }\n }\n dtor = fn@(move dtor) { FT_Done_Face(face).for_sure(); dtor() };\n\n let cface = cairo_ft_font_face_create_for_ft_face(face, 0 as c_int);\n if cface.is_null() {\n \/\/ FIXME: Need tests for failure case\n dtor();\n fail \"unable to create cairo font face\";\n }\n dtor = fn@(move dtor) { cairo_font_face_destroy(cface); dtor() };\n\n (cface, dtor)\n}\n\n#[cfg(target_os = \"macos\")]\nmod cocoa {\n use cocoa;\n export cocoa;\n}\n\n#[cfg(target_os = \"macos\")]\nfn get_cairo_face(buf: &[u8]) -> (*cairo_font_face_t, fn@()) {\n import unsafe::reinterpret_cast;\n import libc::size_t;\n import cocoa::cocoa;\n import cocoa::cg::cg::{\n CGDataProviderCreateWithData,\n CGDataProviderRelease,\n CGFontCreateWithDataProvider,\n CGFontRelease\n };\n import azure::cairo_quartz::bindgen::cairo_quartz_font_face_create_for_cgfont;\n\n let mut dtor = fn@() { };\n\n let fontprov = vec::as_buf(*buf) { |cbuf|\n CGDataProviderCreateWithData(\n null(),\n unsafe { reinterpret_cast(cbuf) },\n (*buf).len() as size_t,\n null()\n )\n };\n dtor = fn@(move dtor) { CGDataProviderRelease(fontprov); dtor() };\n\n let cgfont = CGFontCreateWithDataProvider(fontprov);\n if cgfont.is_null() { fail \"could not create quartz font\" }\n dtor = fn@(move dtor) { CGFontRelease(cgfont); dtor() };\n\n let cface = cairo_quartz_font_face_create_for_cgfont(cgfont);\n assert cface.is_not_null(); \/\/ FIXME: error handling\n dtor = fn@(move dtor) { cairo_font_face_destroy(cface); dtor() };\n\n (cface, dtor)\n}\n\nfn create_test_font() -> Font {\n Font(test_font_bin())\n}\n\nfn test_font_bin() -> [u8] { #include_bin(\"JosefinSans-SemiBold.ttf\") }\n\nfn should_destruct_on_fail_without_leaking() {\n #[test];\n #[should_fail];\n #[ignore];\n\n let _font = create_test_font();\n fail;\n}\n\nfn should_get_glyph_indexes() {\n #[test];\n\n let font = create_test_font();\n let glyph_idx = font.glyph_idx('w');\n assert glyph_idx == some(40u);\n}\n\nfn should_get_glyph_advance() {\n #[test];\n #[ignore(reason = \"random failures\")];\n\n let font = create_test_font();\n let x = font.glyph_h_advance(40u);\n assert x == 15;\n}\n\nfn should_be_able_to_create_instances_in_multiple_threads() {\n #[test];\n\n iter::repeat(10u) {||\n task::spawn {||\n create_test_font();\n }\n }\n}\n\nfn get_cairo_face_should_fail_and_not_leak_if_font_cant_be_created() {\n #[test];\n #[should_fail];\n\n get_cairo_face(&[0u8, 1u8, 2u8, 3u8]);\n}\n\nfn get_cairo_face_should_return_a_new_face_and_dtor() {\n #[test];\n\n let buf = test_font_bin();\n let (face, dtor) = get_cairo_face(&buf);\n assert face.is_not_null();\n dtor();\n}\n\nfn get_cairo_font_should_return_a_new_font_and_dtor() {\n #[test];\n\n let buf = test_font_bin();\n let (font, dtor) = get_cairo_font(&buf);\n assert font.is_not_null();\n dtor();\n}\n<commit_msg>Convert Font from a class to a resource + impl to avoid bugs<commit_after>export Font, create_test_font, test_font_bin;\n\nimport glyph::GlyphIndex;\nimport vec_to_ptr = vec::unsafe::to_ptr;\nimport libc::{ c_int, c_double, c_ulong };\nimport ptr::{ null, addr_of };\nimport azure::cairo::{\n cairo_font_face_t,\n cairo_scaled_font_t,\n cairo_glyph_t,\n cairo_text_extents_t,\n CAIRO_STATUS_SUCCESS,\n};\nimport azure::cairo::bindgen::{\n cairo_font_face_destroy,\n cairo_scaled_font_destroy,\n cairo_scaled_font_status,\n cairo_scaled_font_text_to_glyphs,\n cairo_scaled_font_glyph_extents,\n cairo_glyph_free,\n cairo_status_to_string\n};\n\n\/\/ FIXME (rust 2708): convert this to a class\n\ntype Font = FontDtor;\n\n#[doc = \"\nA font handle. Layout can use this to calculate glyph metrics\nand the renderer can use it to render text.\n\"]\nresource FontDtor(state: FontState) {\n state.font_dtor();\n}\n\ntype FontState = {\n fontbuf: @[u8],\n cairo_font: *cairo_scaled_font_t,\n font_dtor: fn@()\n};\n\nfn Font(-fontbuf: [u8]) -> Font {\n let (cairo_font, font_dtor) = get_cairo_font(© fontbuf);\n assert cairo_font.is_not_null();\n\n ret FontDtor({\n fontbuf: @fontbuf,\n cairo_font: cairo_font,\n font_dtor: font_dtor\n });\n}\n\nimpl Font for Font {\n fn buf() -> @[u8] {\n self.fontbuf\n }\n\n fn glyph_idx(codepoint: char) -> option<GlyphIndex> {\n #debug(\"getting glyph for codepoint %u\", codepoint as uint);\n let codepoint_str = str::from_char(codepoint);\n\n let mut glyphs: *cairo_glyph_t = null();\n let mut num_glyphs = 0 as c_int;\n\n let status = str::as_c_str(codepoint_str) { |codepoint_buf|\n cairo_scaled_font_text_to_glyphs(\n self.cairo_font,\n 0.0 as c_double, 0.0 as c_double,\n codepoint_buf, codepoint_str.len() as c_int,\n addr_of(glyphs), addr_of(num_glyphs),\n null(), null(), null()\n )\n };\n\n ret if status == CAIRO_STATUS_SUCCESS {\n\n \/\/ This might not be true, but at least we'll know if it isn't\n assert num_glyphs == 1 as c_int;\n\n let glyph_index = unsafe { *glyphs }.index as GlyphIndex;\n #debug(\"glyph index is %?\", glyph_index);\n cairo_glyph_free(glyphs);\n some(glyph_index)\n } else {\n #error(\"cairo did not give me a glyph for %u\", codepoint as uint);\n none\n }\n }\n\n fn glyph_h_advance(glyph: GlyphIndex) -> int {\n\n #debug(\"getting h advance for glyph %?\", glyph);\n\n let glyphs: [cairo_glyph_t] = [{\n index: glyph as c_ulong,\n x: 0 as c_double,\n y: 0 as c_double,\n }];\n let extents: cairo_text_extents_t = {\n x_bearing: 0 as c_double,\n y_bearing: 0 as c_double,\n width: 0 as c_double,\n height: 0 as c_double,\n x_advance: 0 as c_double,\n y_advance: 0 as c_double,\n };\n\n assert self.cairo_font.is_not_null();\n\n cairo_scaled_font_glyph_extents(\n self.cairo_font, unsafe { vec_to_ptr(glyphs) },\n 1 as c_int, addr_of(extents));\n\n alt cairo_scaled_font_status(self.cairo_font) {\n status if status == CAIRO_STATUS_SUCCESS {\n\n #debug(\"x_advance: %?\", extents.x_advance);\n #debug(\"y_advance: %?\", extents.y_advance);\n\n ret extents.x_advance as int;\n }\n status {\n import str::unsafe::from_c_str;\n\n let status_cstr = cairo_status_to_string(status);\n let status_str = unsafe { from_c_str(status_cstr) };\n\n #error(\"cairo_scaled_font_glyph_extents status: %s\", status_str);\n fail \"failed to get glyph extents from cairo\"\n }\n }\n }\n}\n\nfn get_cairo_font(buf: &[u8]) -> (*cairo_scaled_font_t, fn@()) {\n\n import libc::c_double;\n import azure::cairo;\n import cairo::{ cairo_matrix_t };\n import cairo::bindgen::{\n cairo_matrix_init_identity,\n cairo_matrix_scale,\n cairo_font_options_create,\n cairo_font_options_destroy,\n cairo_scaled_font_create,\n cairo_scaled_font_destroy\n };\n \n import cairo::bindgen::cairo_scaled_font_create;\n\n let mut (face, dtor) = get_cairo_face(buf);\n\n let idmatrix: cairo_matrix_t = {\n xx: 0 as c_double,\n yx: 0 as c_double,\n xy: 0 as c_double,\n yy: 0 as c_double,\n x0: 0 as c_double,\n y0: 0 as c_double\n };\n cairo_matrix_init_identity(addr_of(idmatrix));\n\n let fontmatrix = idmatrix;\n cairo_matrix_scale(addr_of(fontmatrix),\n 20f as c_double, 20f as c_double);\n\n let options = cairo_font_options_create();\n let cfont = cairo_scaled_font_create(face, addr_of(fontmatrix),\n addr_of(idmatrix), options);\n cairo_font_options_destroy(options);\n\n \/\/ FIXME: Need negative tests\n if cfont.is_null() {\n dtor();\n fail \"unable to create cairo scaled font\";\n }\n dtor = fn@(move dtor) { cairo_scaled_font_destroy(cfont); dtor() };\n\n (cfont, dtor)\n}\n\n#[cfg(target_os = \"linux\")]\nfn get_cairo_face(buf: &[u8]) -> (*cairo_font_face_t, fn@()) {\n import freetype = azure::freetype;\n import freetype::{ FT_Error, FT_Library, FT_Face, FT_Long };\n import freetype::bindgen::{\n FT_Init_FreeType,\n FT_Done_FreeType,\n FT_New_Memory_Face,\n FT_Done_Face\n };\n import azure::cairo_ft;\n import cairo_ft::bindgen::cairo_ft_font_face_create_for_ft_face;\n\n impl methods for FT_Error {\n fn for_sure() { assert !self.failed() }\n fn failed() -> bool { self != 0 as FT_Error }\n }\n\n let mut dtor = fn@() { };\n\n let library: FT_Library = null();\n \/\/ FIXME: Need tests for failure case\n FT_Init_FreeType(addr_of(library)).for_sure();\n dtor = fn@(move dtor) { FT_Done_FreeType(library).for_sure(); dtor() };\n\n let face: FT_Face = null();\n vec::as_buf(*buf) { |cbuf|\n if FT_New_Memory_Face(library, cbuf, (*buf).len() as FT_Long,\n 0 as FT_Long, addr_of(face)).failed() {\n dtor();\n fail \"unable to create FreeType face\";\n }\n }\n dtor = fn@(move dtor) { FT_Done_Face(face).for_sure(); dtor() };\n\n let cface = cairo_ft_font_face_create_for_ft_face(face, 0 as c_int);\n if cface.is_null() {\n \/\/ FIXME: Need tests for failure case\n dtor();\n fail \"unable to create cairo font face\";\n }\n dtor = fn@(move dtor) { cairo_font_face_destroy(cface); dtor() };\n\n (cface, dtor)\n}\n\n#[cfg(target_os = \"macos\")]\nmod cocoa {\n use cocoa;\n export cocoa;\n}\n\n#[cfg(target_os = \"macos\")]\nfn get_cairo_face(buf: &[u8]) -> (*cairo_font_face_t, fn@()) {\n import unsafe::reinterpret_cast;\n import libc::size_t;\n import cocoa::cocoa;\n import cocoa::cg::cg::{\n CGDataProviderCreateWithData,\n CGDataProviderRelease,\n CGFontCreateWithDataProvider,\n CGFontRelease\n };\n import azure::cairo_quartz::bindgen::cairo_quartz_font_face_create_for_cgfont;\n\n let mut dtor = fn@() { };\n\n let fontprov = vec::as_buf(*buf) { |cbuf|\n CGDataProviderCreateWithData(\n null(),\n unsafe { reinterpret_cast(cbuf) },\n (*buf).len() as size_t,\n null()\n )\n };\n dtor = fn@(move dtor) { CGDataProviderRelease(fontprov); dtor() };\n\n let cgfont = CGFontCreateWithDataProvider(fontprov);\n if cgfont.is_null() { fail \"could not create quartz font\" }\n dtor = fn@(move dtor) { CGFontRelease(cgfont); dtor() };\n\n let cface = cairo_quartz_font_face_create_for_cgfont(cgfont);\n assert cface.is_not_null(); \/\/ FIXME: error handling\n dtor = fn@(move dtor) { cairo_font_face_destroy(cface); dtor() };\n\n (cface, dtor)\n}\n\nfn create_test_font() -> Font {\n Font(test_font_bin())\n}\n\nfn test_font_bin() -> [u8] { #include_bin(\"JosefinSans-SemiBold.ttf\") }\n\nfn should_destruct_on_fail_without_leaking() {\n #[test];\n #[should_fail];\n #[ignore];\n\n let _font = create_test_font();\n fail;\n}\n\nfn should_get_glyph_indexes() {\n #[test];\n\n let font = create_test_font();\n let glyph_idx = font.glyph_idx('w');\n assert glyph_idx == some(40u);\n}\n\nfn should_get_glyph_advance() {\n #[test];\n #[ignore(reason = \"random failures\")];\n\n let font = create_test_font();\n let x = font.glyph_h_advance(40u);\n assert x == 15;\n}\n\nfn should_be_able_to_create_instances_in_multiple_threads() {\n #[test];\n\n iter::repeat(10u) {||\n task::spawn {||\n create_test_font();\n }\n }\n}\n\nfn get_cairo_face_should_fail_and_not_leak_if_font_cant_be_created() {\n #[test];\n #[should_fail];\n\n get_cairo_face(&[0u8, 1u8, 2u8, 3u8]);\n}\n\nfn get_cairo_face_should_return_a_new_face_and_dtor() {\n #[test];\n\n let buf = test_font_bin();\n let (face, dtor) = get_cairo_face(&buf);\n assert face.is_not_null();\n dtor();\n}\n\nfn get_cairo_font_should_return_a_new_font_and_dtor() {\n #[test];\n\n let buf = test_font_bin();\n let (font, dtor) = get_cairo_font(&buf);\n assert font.is_not_null();\n dtor();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove extraneous uniform block type<commit_after><|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::io::Read;\nuse std::io::Write;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult = Result<(), StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(basepath: String) -> StorageBackend {\n StorageBackend {\n basepath: basepath,\n }\n }\n\n fn build<M: Module>(rt: &Runtime, m: &M) -> StorageBackend {\n let path = rt.get_rtp() + m.name() + \"\/store\";\n StorageBackend::new(path)\n }\n\n fn get_file_ids(&self) -> Option<Vec<FileID>> {\n let list = glob(&self.basepath[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n v.push(from_pathbuf(&path));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<'a, HP>(&self, f: File, p: &Parser<HP>) ->\n Result<BackendOperationResult, ParserError>\n where HP: FileHeaderParser<'a>\n {\n let written = p.write(f.contents());\n if let Ok(string) = written {\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n Ok(Ok(()))\n } else {\n Err(written.err().unwrap())\n }\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<'a, HP>(&self, f: File, p: &Parser<HP>)\n -> Result<BackendOperationResult, ParserError>\n where HP: FileHeaderParser<'a>\n {\n let contents = p.write(f.contents());\n\n if contents.is_err() {\n return Err(contents.err().unwrap());\n }\n\n let content = contents.unwrap();\n\n let path = self.build_filepath(&f);\n if let Err(_) = FSFile::open(path) {\n return Ok(Err(StorageBackendError::new(\n \"File::open()\",\n &format!(\"Tried to open '{}'\", path)[..],\n \"Tried to update contents of this file, though file doesn't exist\",\n None)))\n }\n\n if let Ok(mut file) = FSFile::create(path) {\n if let Err(writeerr) = file.write_all(&content.into_bytes()) {\n return Ok(Err(StorageBackendError::new(\n \"File::write()\",\n &format!(\"Tried to write '{}'\", path)[..],\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(content))))\n }\n }\n\n Ok(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, id: FileID, p: &Parser<HP>) -> Option<File>\n where HP: FileHeaderParser<'a>\n {\n if let Ok(fs) = FSFile::open(self.build_filepath_with_id(id)) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(id, h, d))).ok()\n } else {\n None\n }\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.id())\n }\n\n fn build_filepath_with_id(&self, id: FileID) -> String {\n self.basepath + &id[..]\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub explanation: String, \/\/ A long, user friendly description\n pub dataDump: Option<String> \/\/ Data dump, if any\n}\n\nimpl StorageBackendError {\n fn new(action: &'static str,\n desc : &'static str,\n explan: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n explanation: String::from(explan),\n dataDump: data,\n }\n }\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\\n\\n{}\",\n self.action, self.desc, self.explanation)\n }\n}\n\n<commit_msg>StorageBackendError: Use String instead of str in params<commit_after>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::io::Read;\nuse std::io::Write;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult = Result<(), StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(basepath: String) -> StorageBackend {\n StorageBackend {\n basepath: basepath,\n }\n }\n\n fn build<M: Module>(rt: &Runtime, m: &M) -> StorageBackend {\n let path = rt.get_rtp() + m.name() + \"\/store\";\n StorageBackend::new(path)\n }\n\n fn get_file_ids(&self) -> Option<Vec<FileID>> {\n let list = glob(&self.basepath[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n v.push(from_pathbuf(&path));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<'a, HP>(&self, f: File, p: &Parser<HP>) ->\n Result<BackendOperationResult, ParserError>\n where HP: FileHeaderParser<'a>\n {\n let written = p.write(f.contents());\n if let Ok(string) = written {\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n Ok(Ok(()))\n } else {\n Err(written.err().unwrap())\n }\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<'a, HP>(&self, f: File, p: &Parser<HP>)\n -> Result<BackendOperationResult, ParserError>\n where HP: FileHeaderParser<'a>\n {\n let contents = p.write(f.contents());\n\n if contents.is_err() {\n return Err(contents.err().unwrap());\n }\n\n let content = contents.unwrap();\n\n let path = self.build_filepath(&f);\n if let Err(_) = FSFile::open(path) {\n return Ok(Err(StorageBackendError::new(\n String::from(\"File::open()\"),\n format!(\"Tried to open '{}'\", path),\n String::from(\"Tried to update contents of this file, though file doesn't exist\"),\n None)))\n }\n\n if let Ok(mut file) = FSFile::create(path) {\n if let Err(writeerr) = file.write_all(&content.into_bytes()) {\n return Ok(Err(StorageBackendError::new(\n String::from(\"File::write()\"),\n format!(\"Tried to write '{}'\", path),\n String::from(\"Tried to write contents of this file, though operation did not succeed\"),\n Some(content))))\n }\n }\n\n Ok(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, id: FileID, p: &Parser<HP>) -> Option<File>\n where HP: FileHeaderParser<'a>\n {\n if let Ok(fs) = FSFile::open(self.build_filepath_with_id(id)) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(id, h, d))).ok()\n } else {\n None\n }\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.id())\n }\n\n fn build_filepath_with_id(&self, id: FileID) -> String {\n self.basepath + &id[..]\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub explanation: String, \/\/ A long, user friendly description\n pub dataDump: Option<String> \/\/ Data dump, if any\n}\n\nimpl StorageBackendError {\n fn new(action: String,\n desc : String,\n explan: String,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: action,\n desc: desc,\n explanation: explan,\n dataDump: data,\n }\n }\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\\n\\n{}\",\n self.action, self.desc, self.explanation)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add - for range<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make test_smoke use Poll API (#651)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a simple to-device test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>delete duplicates<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! See docs in build\/expr\/mod.rs\n\nuse rustc_data_structures::fnv::FnvHashMap;\n\nuse build::{BlockAnd, BlockAndExtension, Builder};\nuse build::expr::category::{Category, RvalueFunc};\nuse hair::*;\nuse rustc::mir::repr::*;\n\nimpl<'a,'tcx> Builder<'a,'tcx> {\n \/\/\/ Compile `expr`, yielding an rvalue.\n pub fn as_rvalue<M>(&mut self, block: BasicBlock, expr: M) -> BlockAnd<Rvalue<'tcx>>\n where M: Mirror<'tcx, Output = Expr<'tcx>>\n {\n let expr = self.hir.mirror(expr);\n self.expr_as_rvalue(block, expr)\n }\n\n fn expr_as_rvalue(&mut self,\n mut block: BasicBlock,\n expr: Expr<'tcx>)\n -> BlockAnd<Rvalue<'tcx>> {\n debug!(\"expr_as_rvalue(block={:?}, expr={:?})\", block, expr);\n\n let this = self;\n let expr_span = expr.span;\n\n match expr.kind {\n ExprKind::Scope { extent, value } => {\n this.in_scope(extent, block, |this| this.as_rvalue(block, value))\n }\n ExprKind::InlineAsm { asm } => {\n block.and(Rvalue::InlineAsm(asm.clone()))\n }\n ExprKind::Repeat { value, count } => {\n let value_operand = unpack!(block = this.as_operand(block, value));\n block.and(Rvalue::Repeat(value_operand, count))\n }\n ExprKind::Borrow { region, borrow_kind, arg } => {\n let arg_lvalue = unpack!(block = this.as_lvalue(block, arg));\n block.and(Rvalue::Ref(region, borrow_kind, arg_lvalue))\n }\n ExprKind::Binary { op, lhs, rhs } => {\n let lhs = unpack!(block = this.as_operand(block, lhs));\n let rhs = unpack!(block = this.as_operand(block, rhs));\n block.and(Rvalue::BinaryOp(op, lhs, rhs))\n }\n ExprKind::Unary { op, arg } => {\n let arg = unpack!(block = this.as_operand(block, arg));\n block.and(Rvalue::UnaryOp(op, arg))\n }\n ExprKind::Box { value } => {\n let value = this.hir.mirror(value);\n let value_ty = value.ty.clone();\n let result = this.temp(value_ty.clone());\n\n \/\/ to start, malloc some memory of suitable type (thus far, uninitialized):\n let rvalue = Rvalue::Box(value.ty.clone());\n this.cfg.push_assign(block, expr_span, &result, rvalue);\n\n \/\/ schedule a shallow free of that memory, lest we unwind:\n let extent = this.extent_of_innermost_scope();\n this.schedule_drop(expr_span, extent, DropKind::Free, &result, value_ty);\n\n \/\/ initialize the box contents:\n let contents = result.clone().deref();\n unpack!(block = this.into(&contents, block, value));\n\n \/\/ now that the result is fully initialized, cancel the drop\n \/\/ by \"using\" the result (which is linear):\n block.and(Rvalue::Use(Operand::Consume(result)))\n }\n ExprKind::Cast { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty))\n }\n ExprKind::ReifyFnPointer { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::ReifyFnPointer, source, expr.ty))\n }\n ExprKind::UnsafeFnPointer { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::UnsafeFnPointer, source, expr.ty))\n }\n ExprKind::Unsize { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::Unsize, source, expr.ty))\n }\n ExprKind::Vec { fields } => {\n \/\/ (*) We would (maybe) be closer to trans if we\n \/\/ handled this and other aggregate cases via\n \/\/ `into()`, not `as_rvalue` -- in that case, instead\n \/\/ of generating\n \/\/\n \/\/ let tmp1 = ...1;\n \/\/ let tmp2 = ...2;\n \/\/ dest = Rvalue::Aggregate(Foo, [tmp1, tmp2])\n \/\/\n \/\/ we could just generate\n \/\/\n \/\/ dest.f = ...1;\n \/\/ dest.g = ...2;\n \/\/\n \/\/ The problem is that then we would need to:\n \/\/\n \/\/ (a) have a more complex mechanism for handling\n \/\/ partial cleanup;\n \/\/ (b) distinguish the case where the type `Foo` has a\n \/\/ destructor, in which case creating an instance\n \/\/ as a whole \"arms\" the destructor, and you can't\n \/\/ write individual fields; and,\n \/\/ (c) handle the case where the type Foo has no\n \/\/ fields. We don't want `let x: ();` to compile\n \/\/ to the same MIR as `let x = ();`.\n\n \/\/ first process the set of fields\n let fields: Vec<_> =\n fields.into_iter()\n .map(|f| unpack!(block = this.as_operand(block, f)))\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Vec, fields))\n }\n ExprKind::Tuple { fields } => { \/\/ see (*) above\n \/\/ first process the set of fields\n let fields: Vec<_> =\n fields.into_iter()\n .map(|f| unpack!(block = this.as_operand(block, f)))\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Tuple, fields))\n }\n ExprKind::Closure { closure_id, substs, upvars } => { \/\/ see (*) above\n let upvars =\n upvars.into_iter()\n .map(|upvar| unpack!(block = this.as_operand(block, upvar)))\n .collect();\n block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars))\n }\n ExprKind::Adt { adt_def, variant_index, substs, fields, base } => { \/\/ see (*) above\n \/\/ first process the set of fields that were provided\n \/\/ (evaluating them in order given by user)\n let fields_map: FnvHashMap<_, _> =\n fields.into_iter()\n .map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr))))\n .collect();\n\n \/\/ if base expression is given, evaluate it now\n let base = base.map(|base| unpack!(block = this.as_lvalue(block, base)));\n\n \/\/ get list of all fields that we will need\n let field_names = this.hir.all_fields(adt_def, variant_index);\n\n \/\/ for the actual values we use, take either the\n \/\/ expr the user specified or, if they didn't\n \/\/ specify something for this field name, create a\n \/\/ path relative to the base (which must have been\n \/\/ supplied, or the IR is internally\n \/\/ inconsistent).\n let fields: Vec<_> =\n field_names.into_iter()\n .map(|n| match fields_map.get(&n) {\n Some(v) => v.clone(),\n None => Operand::Consume(base.clone().unwrap().field(n)),\n })\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs),\n fields))\n }\n ExprKind::Literal { .. } |\n ExprKind::Block { .. } |\n ExprKind::Match { .. } |\n ExprKind::If { .. } |\n ExprKind::Loop { .. } |\n ExprKind::LogicalOp { .. } |\n ExprKind::Call { .. } |\n ExprKind::Field { .. } |\n ExprKind::Deref { .. } |\n ExprKind::Index { .. } |\n ExprKind::VarRef { .. } |\n ExprKind::SelfRef |\n ExprKind::Assign { .. } |\n ExprKind::AssignOp { .. } |\n ExprKind::Break { .. } |\n ExprKind::Continue { .. } |\n ExprKind::Return { .. } |\n ExprKind::StaticRef { .. } => {\n \/\/ these do not have corresponding `Rvalue` variants,\n \/\/ so make an operand and then return that\n debug_assert!(match Category::of(&expr.kind) {\n Some(Category::Rvalue(RvalueFunc::AsRvalue)) => false,\n _ => true,\n });\n let operand = unpack!(block = this.as_operand(block, expr));\n block.and(Rvalue::Use(operand))\n }\n }\n }\n}\n<commit_msg>[MIR] Fix type of temporary for `box EXPR`<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! See docs in build\/expr\/mod.rs\n\nuse rustc_data_structures::fnv::FnvHashMap;\n\nuse build::{BlockAnd, BlockAndExtension, Builder};\nuse build::expr::category::{Category, RvalueFunc};\nuse hair::*;\nuse rustc::mir::repr::*;\n\nimpl<'a,'tcx> Builder<'a,'tcx> {\n \/\/\/ Compile `expr`, yielding an rvalue.\n pub fn as_rvalue<M>(&mut self, block: BasicBlock, expr: M) -> BlockAnd<Rvalue<'tcx>>\n where M: Mirror<'tcx, Output = Expr<'tcx>>\n {\n let expr = self.hir.mirror(expr);\n self.expr_as_rvalue(block, expr)\n }\n\n fn expr_as_rvalue(&mut self,\n mut block: BasicBlock,\n expr: Expr<'tcx>)\n -> BlockAnd<Rvalue<'tcx>> {\n debug!(\"expr_as_rvalue(block={:?}, expr={:?})\", block, expr);\n\n let this = self;\n let expr_span = expr.span;\n\n match expr.kind {\n ExprKind::Scope { extent, value } => {\n this.in_scope(extent, block, |this| this.as_rvalue(block, value))\n }\n ExprKind::InlineAsm { asm } => {\n block.and(Rvalue::InlineAsm(asm.clone()))\n }\n ExprKind::Repeat { value, count } => {\n let value_operand = unpack!(block = this.as_operand(block, value));\n block.and(Rvalue::Repeat(value_operand, count))\n }\n ExprKind::Borrow { region, borrow_kind, arg } => {\n let arg_lvalue = unpack!(block = this.as_lvalue(block, arg));\n block.and(Rvalue::Ref(region, borrow_kind, arg_lvalue))\n }\n ExprKind::Binary { op, lhs, rhs } => {\n let lhs = unpack!(block = this.as_operand(block, lhs));\n let rhs = unpack!(block = this.as_operand(block, rhs));\n block.and(Rvalue::BinaryOp(op, lhs, rhs))\n }\n ExprKind::Unary { op, arg } => {\n let arg = unpack!(block = this.as_operand(block, arg));\n block.and(Rvalue::UnaryOp(op, arg))\n }\n ExprKind::Box { value } => {\n let value = this.hir.mirror(value);\n let result = this.temp(expr.ty);\n\n \/\/ to start, malloc some memory of suitable type (thus far, uninitialized):\n let rvalue = Rvalue::Box(value.ty);\n this.cfg.push_assign(block, expr_span, &result, rvalue);\n\n \/\/ schedule a shallow free of that memory, lest we unwind:\n let extent = this.extent_of_innermost_scope();\n this.schedule_drop(expr_span, extent, DropKind::Free, &result, value.ty);\n\n \/\/ initialize the box contents:\n let contents = result.clone().deref();\n unpack!(block = this.into(&contents, block, value));\n\n \/\/ now that the result is fully initialized, cancel the drop\n \/\/ by \"using\" the result (which is linear):\n block.and(Rvalue::Use(Operand::Consume(result)))\n }\n ExprKind::Cast { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty))\n }\n ExprKind::ReifyFnPointer { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::ReifyFnPointer, source, expr.ty))\n }\n ExprKind::UnsafeFnPointer { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::UnsafeFnPointer, source, expr.ty))\n }\n ExprKind::Unsize { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::Unsize, source, expr.ty))\n }\n ExprKind::Vec { fields } => {\n \/\/ (*) We would (maybe) be closer to trans if we\n \/\/ handled this and other aggregate cases via\n \/\/ `into()`, not `as_rvalue` -- in that case, instead\n \/\/ of generating\n \/\/\n \/\/ let tmp1 = ...1;\n \/\/ let tmp2 = ...2;\n \/\/ dest = Rvalue::Aggregate(Foo, [tmp1, tmp2])\n \/\/\n \/\/ we could just generate\n \/\/\n \/\/ dest.f = ...1;\n \/\/ dest.g = ...2;\n \/\/\n \/\/ The problem is that then we would need to:\n \/\/\n \/\/ (a) have a more complex mechanism for handling\n \/\/ partial cleanup;\n \/\/ (b) distinguish the case where the type `Foo` has a\n \/\/ destructor, in which case creating an instance\n \/\/ as a whole \"arms\" the destructor, and you can't\n \/\/ write individual fields; and,\n \/\/ (c) handle the case where the type Foo has no\n \/\/ fields. We don't want `let x: ();` to compile\n \/\/ to the same MIR as `let x = ();`.\n\n \/\/ first process the set of fields\n let fields: Vec<_> =\n fields.into_iter()\n .map(|f| unpack!(block = this.as_operand(block, f)))\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Vec, fields))\n }\n ExprKind::Tuple { fields } => { \/\/ see (*) above\n \/\/ first process the set of fields\n let fields: Vec<_> =\n fields.into_iter()\n .map(|f| unpack!(block = this.as_operand(block, f)))\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Tuple, fields))\n }\n ExprKind::Closure { closure_id, substs, upvars } => { \/\/ see (*) above\n let upvars =\n upvars.into_iter()\n .map(|upvar| unpack!(block = this.as_operand(block, upvar)))\n .collect();\n block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars))\n }\n ExprKind::Adt { adt_def, variant_index, substs, fields, base } => { \/\/ see (*) above\n \/\/ first process the set of fields that were provided\n \/\/ (evaluating them in order given by user)\n let fields_map: FnvHashMap<_, _> =\n fields.into_iter()\n .map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr))))\n .collect();\n\n \/\/ if base expression is given, evaluate it now\n let base = base.map(|base| unpack!(block = this.as_lvalue(block, base)));\n\n \/\/ get list of all fields that we will need\n let field_names = this.hir.all_fields(adt_def, variant_index);\n\n \/\/ for the actual values we use, take either the\n \/\/ expr the user specified or, if they didn't\n \/\/ specify something for this field name, create a\n \/\/ path relative to the base (which must have been\n \/\/ supplied, or the IR is internally\n \/\/ inconsistent).\n let fields: Vec<_> =\n field_names.into_iter()\n .map(|n| match fields_map.get(&n) {\n Some(v) => v.clone(),\n None => Operand::Consume(base.clone().unwrap().field(n)),\n })\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs),\n fields))\n }\n ExprKind::Literal { .. } |\n ExprKind::Block { .. } |\n ExprKind::Match { .. } |\n ExprKind::If { .. } |\n ExprKind::Loop { .. } |\n ExprKind::LogicalOp { .. } |\n ExprKind::Call { .. } |\n ExprKind::Field { .. } |\n ExprKind::Deref { .. } |\n ExprKind::Index { .. } |\n ExprKind::VarRef { .. } |\n ExprKind::SelfRef |\n ExprKind::Assign { .. } |\n ExprKind::AssignOp { .. } |\n ExprKind::Break { .. } |\n ExprKind::Continue { .. } |\n ExprKind::Return { .. } |\n ExprKind::StaticRef { .. } => {\n \/\/ these do not have corresponding `Rvalue` variants,\n \/\/ so make an operand and then return that\n debug_assert!(match Category::of(&expr.kind) {\n Some(Category::Rvalue(RvalueFunc::AsRvalue)) => false,\n _ => true,\n });\n let operand = unpack!(block = this.as_operand(block, expr));\n block.and(Rvalue::Use(operand))\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! See docs in build\/expr\/mod.rs\n\nuse build::{BlockAnd, BlockAndExtension, Builder};\nuse build::expr::category::{Category, RvalueFunc};\nuse build::scope::LoopScope;\nuse hair::*;\nuse rustc::middle::region::CodeExtent;\nuse rustc::middle::ty;\nuse rustc::mir::repr::*;\nuse syntax::codemap::Span;\n\nimpl<'a,'tcx> Builder<'a,'tcx> {\n \/\/\/ Compile `expr`, storing the result into `destination`, which\n \/\/\/ is assumed to be uninitialized.\n pub fn into_expr(&mut self,\n destination: &Lvalue<'tcx>,\n mut block: BasicBlock,\n expr: Expr<'tcx>)\n -> BlockAnd<()>\n {\n debug!(\"into_expr(destination={:?}, block={:?}, expr={:?})\",\n destination, block, expr);\n\n \/\/ since we frequently have to reference `self` from within a\n \/\/ closure, where `self` would be shadowed, it's easier to\n \/\/ just use the name `this` uniformly\n let this = self;\n let expr_span = expr.span;\n\n match expr.kind {\n ExprKind::Scope { extent, value } => {\n this.in_scope(extent, block, |this| this.into(destination, block, value))\n }\n ExprKind::Block { body: ast_block } => {\n this.ast_block(destination, block, ast_block)\n }\n ExprKind::Match { discriminant, arms } => {\n this.match_expr(destination, expr_span, block, discriminant, arms)\n }\n ExprKind::If { condition: cond_expr, then: then_expr, otherwise: else_expr } => {\n let operand = unpack!(block = this.as_operand(block, cond_expr));\n\n let mut then_block = this.cfg.start_new_block();\n let mut else_block = this.cfg.start_new_block();\n this.cfg.terminate(block, Terminator::If {\n cond: operand,\n targets: (then_block, else_block)\n });\n\n unpack!(then_block = this.into(destination, then_block, then_expr));\n else_block = if let Some(else_expr) = else_expr {\n unpack!(this.into(destination, else_block, else_expr))\n } else {\n \/\/ Body of the `if` expression without an `else` clause must return `()`, thus\n \/\/ we implicitly generate a `else {}` if it is not specified.\n this.cfg.push_assign_unit(else_block, expr_span, &Lvalue::ReturnPointer);\n else_block\n };\n\n let join_block = this.cfg.start_new_block();\n this.cfg.terminate(then_block, Terminator::Goto { target: join_block });\n this.cfg.terminate(else_block, Terminator::Goto { target: join_block });\n\n join_block.unit()\n }\n ExprKind::LogicalOp { op, lhs, rhs } => {\n \/\/ And:\n \/\/\n \/\/ [block: If(lhs)] -true-> [else_block: If(rhs)] -true-> [true_block]\n \/\/ | | (false)\n \/\/ +----------false-----------+------------------> [false_block]\n \/\/\n \/\/ Or:\n \/\/\n \/\/ [block: If(lhs)] -false-> [else_block: If(rhs)] -true-> [true_block]\n \/\/ | | (false)\n \/\/ +----------true------------+-------------------> [false_block]\n\n let (true_block, false_block, mut else_block, join_block) =\n (this.cfg.start_new_block(), this.cfg.start_new_block(),\n this.cfg.start_new_block(), this.cfg.start_new_block());\n\n let lhs = unpack!(block = this.as_operand(block, lhs));\n let blocks = match op {\n LogicalOp::And => (else_block, false_block),\n LogicalOp::Or => (true_block, else_block),\n };\n this.cfg.terminate(block, Terminator::If { cond: lhs, targets: blocks });\n\n let rhs = unpack!(else_block = this.as_operand(else_block, rhs));\n this.cfg.terminate(else_block, Terminator::If {\n cond: rhs,\n targets: (true_block, false_block)\n });\n\n this.cfg.push_assign_constant(\n true_block, expr_span, destination,\n Constant {\n span: expr_span,\n ty: this.hir.bool_ty(),\n literal: this.hir.true_literal(),\n });\n\n this.cfg.push_assign_constant(\n false_block, expr_span, destination,\n Constant {\n span: expr_span,\n ty: this.hir.bool_ty(),\n literal: this.hir.false_literal(),\n });\n\n this.cfg.terminate(true_block, Terminator::Goto { target: join_block });\n this.cfg.terminate(false_block, Terminator::Goto { target: join_block });\n\n join_block.unit()\n }\n ExprKind::Loop { condition: opt_cond_expr, body } => {\n \/\/ [block] --> [loop_block] ~~> [loop_block_end] -1-> [exit_block]\n \/\/ ^ |\n \/\/ | 0\n \/\/ | |\n \/\/ | v\n \/\/ [body_block_end] <~~~ [body_block]\n \/\/\n \/\/ If `opt_cond_expr` is `None`, then the graph is somewhat simplified:\n \/\/\n \/\/ [block] --> [loop_block \/ body_block ] ~~> [body_block_end] [exit_block]\n \/\/ ^ |\n \/\/ | |\n \/\/ +--------------------------+\n \/\/\n\n let loop_block = this.cfg.start_new_block();\n let exit_block = this.cfg.start_new_block();\n\n \/\/ start the loop\n this.cfg.terminate(block, Terminator::Goto { target: loop_block });\n\n let might_break = this.in_loop_scope(loop_block, exit_block, move |this| {\n \/\/ conduct the test, if necessary\n let body_block;\n if let Some(cond_expr) = opt_cond_expr {\n \/\/ This loop has a condition, ergo its exit_block is reachable.\n this.find_loop_scope(expr_span, None).might_break = true;\n\n let loop_block_end;\n let cond = unpack!(loop_block_end = this.as_operand(loop_block, cond_expr));\n body_block = this.cfg.start_new_block();\n this.cfg.terminate(loop_block_end,\n Terminator::If {\n cond: cond,\n targets: (body_block, exit_block)\n });\n } else {\n body_block = loop_block;\n }\n\n \/\/ The “return” value of the loop body must always be an unit, but we cannot\n \/\/ reuse that as a “return” value of the whole loop expressions, because some\n \/\/ loops are diverging (e.g. `loop {}`). Thus, we introduce a unit temporary as\n \/\/ the destination for the loop body and assign the loop’s own “return” value\n \/\/ immediately after the iteration is finished.\n let tmp = this.get_unit_temp();\n \/\/ Execute the body, branching back to the test.\n let body_block_end = unpack!(this.into(&tmp, body_block, body));\n this.cfg.terminate(body_block_end, Terminator::Goto { target: loop_block });\n });\n \/\/ If the loop may reach its exit_block, we assign an empty tuple to the\n \/\/ destination to keep the MIR well-formed.\n if might_break {\n this.cfg.push_assign_unit(exit_block, expr_span, destination);\n }\n exit_block.unit()\n }\n ExprKind::Assign { lhs, rhs } => {\n \/\/ Note: we evaluate assignments right-to-left. This\n \/\/ is better for borrowck interaction with overloaded\n \/\/ operators like x[j] = x[i].\n let rhs = unpack!(block = this.as_operand(block, rhs));\n let lhs = unpack!(block = this.as_lvalue(block, lhs));\n unpack!(block = this.build_drop(block, lhs.clone()));\n this.cfg.push_assign(block, expr_span, &lhs, Rvalue::Use(rhs));\n block.unit()\n }\n ExprKind::AssignOp { op, lhs, rhs } => {\n \/\/ FIXME(#28160) there is an interesting semantics\n \/\/ question raised here -- should we \"freeze\" the\n \/\/ value of the lhs here? I'm inclined to think not,\n \/\/ since it seems closer to the semantics of the\n \/\/ overloaded version, which takes `&mut self`. This\n \/\/ only affects weird things like `x += {x += 1; x}`\n \/\/ -- is that equal to `x + (x + 1)` or `2*(x+1)`?\n\n \/\/ As above, RTL.\n let rhs = unpack!(block = this.as_operand(block, rhs));\n let lhs = unpack!(block = this.as_lvalue(block, lhs));\n\n \/\/ we don't have to drop prior contents or anything\n \/\/ because AssignOp is only legal for Copy types\n \/\/ (overloaded ops should be desugared into a call).\n this.cfg.push_assign(block, expr_span, &lhs,\n Rvalue::BinaryOp(op,\n Operand::Consume(lhs.clone()),\n rhs));\n\n block.unit()\n }\n ExprKind::Continue { label } => {\n this.break_or_continue(expr_span, label, block,\n |loop_scope| loop_scope.continue_block)\n }\n ExprKind::Break { label } => {\n this.break_or_continue(expr_span, label, block, |loop_scope| {\n loop_scope.might_break = true;\n loop_scope.break_block\n })\n }\n ExprKind::Return { value } => {\n block = match value {\n Some(value) => unpack!(this.into(&Lvalue::ReturnPointer, block, value)),\n None => {\n this.cfg.push_assign_unit(block, expr_span, &Lvalue::ReturnPointer);\n block\n }\n };\n let extent = this.extent_of_outermost_scope();\n this.exit_scope(expr_span, extent, block, END_BLOCK);\n this.cfg.start_new_block().unit()\n }\n ExprKind::Call { ty, fun, args } => {\n let diverges = match ty.sty {\n ty::TyBareFn(_, ref f) => f.sig.0.output.diverges(),\n _ => false\n };\n let fun = unpack!(block = this.as_operand(block, fun));\n let args: Vec<_> =\n args.into_iter()\n .map(|arg| unpack!(block = this.as_operand(block, arg)))\n .collect();\n\n let success = this.cfg.start_new_block();\n let cleanup = this.diverge_cleanup();\n this.cfg.terminate(block, Terminator::Call {\n func: fun,\n args: args,\n cleanup: cleanup,\n destination: if diverges {\n None\n } else {\n Some ((destination.clone(), success))\n }\n });\n success.unit()\n }\n\n \/\/ these are the cases that are more naturally handled by some other mode\n ExprKind::Unary { .. } |\n ExprKind::Binary { .. } |\n ExprKind::Box { .. } |\n ExprKind::Cast { .. } |\n ExprKind::ReifyFnPointer { .. } |\n ExprKind::UnsafeFnPointer { .. } |\n ExprKind::Unsize { .. } |\n ExprKind::Repeat { .. } |\n ExprKind::Borrow { .. } |\n ExprKind::VarRef { .. } |\n ExprKind::SelfRef |\n ExprKind::StaticRef { .. } |\n ExprKind::Vec { .. } |\n ExprKind::Tuple { .. } |\n ExprKind::Adt { .. } |\n ExprKind::Closure { .. } |\n ExprKind::Index { .. } |\n ExprKind::Deref { .. } |\n ExprKind::Literal { .. } |\n ExprKind::InlineAsm { .. } |\n ExprKind::Field { .. } => {\n debug_assert!(match Category::of(&expr.kind).unwrap() {\n Category::Rvalue(RvalueFunc::Into) => false,\n _ => true,\n });\n\n let rvalue = unpack!(block = this.as_rvalue(block, expr));\n this.cfg.push_assign(block, expr_span, destination, rvalue);\n block.unit()\n }\n }\n }\n\n fn break_or_continue<F>(&mut self,\n span: Span,\n label: Option<CodeExtent>,\n block: BasicBlock,\n exit_selector: F)\n -> BlockAnd<()>\n where F: FnOnce(&mut LoopScope) -> BasicBlock\n {\n let (exit_block, extent) = {\n let loop_scope = self.find_loop_scope(span, label);\n (exit_selector(loop_scope), loop_scope.extent)\n };\n self.exit_scope(span, extent, block, exit_block);\n self.cfg.start_new_block().unit()\n }\n}\n<commit_msg>Auto merge of #31469 - nagisa:mir-else-dest-fix, r=arielb1<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! See docs in build\/expr\/mod.rs\n\nuse build::{BlockAnd, BlockAndExtension, Builder};\nuse build::expr::category::{Category, RvalueFunc};\nuse build::scope::LoopScope;\nuse hair::*;\nuse rustc::middle::region::CodeExtent;\nuse rustc::middle::ty;\nuse rustc::mir::repr::*;\nuse syntax::codemap::Span;\n\nimpl<'a,'tcx> Builder<'a,'tcx> {\n \/\/\/ Compile `expr`, storing the result into `destination`, which\n \/\/\/ is assumed to be uninitialized.\n pub fn into_expr(&mut self,\n destination: &Lvalue<'tcx>,\n mut block: BasicBlock,\n expr: Expr<'tcx>)\n -> BlockAnd<()>\n {\n debug!(\"into_expr(destination={:?}, block={:?}, expr={:?})\",\n destination, block, expr);\n\n \/\/ since we frequently have to reference `self` from within a\n \/\/ closure, where `self` would be shadowed, it's easier to\n \/\/ just use the name `this` uniformly\n let this = self;\n let expr_span = expr.span;\n\n match expr.kind {\n ExprKind::Scope { extent, value } => {\n this.in_scope(extent, block, |this| this.into(destination, block, value))\n }\n ExprKind::Block { body: ast_block } => {\n this.ast_block(destination, block, ast_block)\n }\n ExprKind::Match { discriminant, arms } => {\n this.match_expr(destination, expr_span, block, discriminant, arms)\n }\n ExprKind::If { condition: cond_expr, then: then_expr, otherwise: else_expr } => {\n let operand = unpack!(block = this.as_operand(block, cond_expr));\n\n let mut then_block = this.cfg.start_new_block();\n let mut else_block = this.cfg.start_new_block();\n this.cfg.terminate(block, Terminator::If {\n cond: operand,\n targets: (then_block, else_block)\n });\n\n unpack!(then_block = this.into(destination, then_block, then_expr));\n else_block = if let Some(else_expr) = else_expr {\n unpack!(this.into(destination, else_block, else_expr))\n } else {\n \/\/ Body of the `if` expression without an `else` clause must return `()`, thus\n \/\/ we implicitly generate a `else {}` if it is not specified.\n this.cfg.push_assign_unit(else_block, expr_span, destination);\n else_block\n };\n\n let join_block = this.cfg.start_new_block();\n this.cfg.terminate(then_block, Terminator::Goto { target: join_block });\n this.cfg.terminate(else_block, Terminator::Goto { target: join_block });\n\n join_block.unit()\n }\n ExprKind::LogicalOp { op, lhs, rhs } => {\n \/\/ And:\n \/\/\n \/\/ [block: If(lhs)] -true-> [else_block: If(rhs)] -true-> [true_block]\n \/\/ | | (false)\n \/\/ +----------false-----------+------------------> [false_block]\n \/\/\n \/\/ Or:\n \/\/\n \/\/ [block: If(lhs)] -false-> [else_block: If(rhs)] -true-> [true_block]\n \/\/ | | (false)\n \/\/ +----------true------------+-------------------> [false_block]\n\n let (true_block, false_block, mut else_block, join_block) =\n (this.cfg.start_new_block(), this.cfg.start_new_block(),\n this.cfg.start_new_block(), this.cfg.start_new_block());\n\n let lhs = unpack!(block = this.as_operand(block, lhs));\n let blocks = match op {\n LogicalOp::And => (else_block, false_block),\n LogicalOp::Or => (true_block, else_block),\n };\n this.cfg.terminate(block, Terminator::If { cond: lhs, targets: blocks });\n\n let rhs = unpack!(else_block = this.as_operand(else_block, rhs));\n this.cfg.terminate(else_block, Terminator::If {\n cond: rhs,\n targets: (true_block, false_block)\n });\n\n this.cfg.push_assign_constant(\n true_block, expr_span, destination,\n Constant {\n span: expr_span,\n ty: this.hir.bool_ty(),\n literal: this.hir.true_literal(),\n });\n\n this.cfg.push_assign_constant(\n false_block, expr_span, destination,\n Constant {\n span: expr_span,\n ty: this.hir.bool_ty(),\n literal: this.hir.false_literal(),\n });\n\n this.cfg.terminate(true_block, Terminator::Goto { target: join_block });\n this.cfg.terminate(false_block, Terminator::Goto { target: join_block });\n\n join_block.unit()\n }\n ExprKind::Loop { condition: opt_cond_expr, body } => {\n \/\/ [block] --> [loop_block] ~~> [loop_block_end] -1-> [exit_block]\n \/\/ ^ |\n \/\/ | 0\n \/\/ | |\n \/\/ | v\n \/\/ [body_block_end] <~~~ [body_block]\n \/\/\n \/\/ If `opt_cond_expr` is `None`, then the graph is somewhat simplified:\n \/\/\n \/\/ [block] --> [loop_block \/ body_block ] ~~> [body_block_end] [exit_block]\n \/\/ ^ |\n \/\/ | |\n \/\/ +--------------------------+\n \/\/\n\n let loop_block = this.cfg.start_new_block();\n let exit_block = this.cfg.start_new_block();\n\n \/\/ start the loop\n this.cfg.terminate(block, Terminator::Goto { target: loop_block });\n\n let might_break = this.in_loop_scope(loop_block, exit_block, move |this| {\n \/\/ conduct the test, if necessary\n let body_block;\n if let Some(cond_expr) = opt_cond_expr {\n \/\/ This loop has a condition, ergo its exit_block is reachable.\n this.find_loop_scope(expr_span, None).might_break = true;\n\n let loop_block_end;\n let cond = unpack!(loop_block_end = this.as_operand(loop_block, cond_expr));\n body_block = this.cfg.start_new_block();\n this.cfg.terminate(loop_block_end,\n Terminator::If {\n cond: cond,\n targets: (body_block, exit_block)\n });\n } else {\n body_block = loop_block;\n }\n\n \/\/ The “return” value of the loop body must always be an unit, but we cannot\n \/\/ reuse that as a “return” value of the whole loop expressions, because some\n \/\/ loops are diverging (e.g. `loop {}`). Thus, we introduce a unit temporary as\n \/\/ the destination for the loop body and assign the loop’s own “return” value\n \/\/ immediately after the iteration is finished.\n let tmp = this.get_unit_temp();\n \/\/ Execute the body, branching back to the test.\n let body_block_end = unpack!(this.into(&tmp, body_block, body));\n this.cfg.terminate(body_block_end, Terminator::Goto { target: loop_block });\n });\n \/\/ If the loop may reach its exit_block, we assign an empty tuple to the\n \/\/ destination to keep the MIR well-formed.\n if might_break {\n this.cfg.push_assign_unit(exit_block, expr_span, destination);\n }\n exit_block.unit()\n }\n ExprKind::Assign { lhs, rhs } => {\n \/\/ Note: we evaluate assignments right-to-left. This\n \/\/ is better for borrowck interaction with overloaded\n \/\/ operators like x[j] = x[i].\n let rhs = unpack!(block = this.as_operand(block, rhs));\n let lhs = unpack!(block = this.as_lvalue(block, lhs));\n unpack!(block = this.build_drop(block, lhs.clone()));\n this.cfg.push_assign(block, expr_span, &lhs, Rvalue::Use(rhs));\n block.unit()\n }\n ExprKind::AssignOp { op, lhs, rhs } => {\n \/\/ FIXME(#28160) there is an interesting semantics\n \/\/ question raised here -- should we \"freeze\" the\n \/\/ value of the lhs here? I'm inclined to think not,\n \/\/ since it seems closer to the semantics of the\n \/\/ overloaded version, which takes `&mut self`. This\n \/\/ only affects weird things like `x += {x += 1; x}`\n \/\/ -- is that equal to `x + (x + 1)` or `2*(x+1)`?\n\n \/\/ As above, RTL.\n let rhs = unpack!(block = this.as_operand(block, rhs));\n let lhs = unpack!(block = this.as_lvalue(block, lhs));\n\n \/\/ we don't have to drop prior contents or anything\n \/\/ because AssignOp is only legal for Copy types\n \/\/ (overloaded ops should be desugared into a call).\n this.cfg.push_assign(block, expr_span, &lhs,\n Rvalue::BinaryOp(op,\n Operand::Consume(lhs.clone()),\n rhs));\n\n block.unit()\n }\n ExprKind::Continue { label } => {\n this.break_or_continue(expr_span, label, block,\n |loop_scope| loop_scope.continue_block)\n }\n ExprKind::Break { label } => {\n this.break_or_continue(expr_span, label, block, |loop_scope| {\n loop_scope.might_break = true;\n loop_scope.break_block\n })\n }\n ExprKind::Return { value } => {\n block = match value {\n Some(value) => unpack!(this.into(&Lvalue::ReturnPointer, block, value)),\n None => {\n this.cfg.push_assign_unit(block, expr_span, &Lvalue::ReturnPointer);\n block\n }\n };\n let extent = this.extent_of_outermost_scope();\n this.exit_scope(expr_span, extent, block, END_BLOCK);\n this.cfg.start_new_block().unit()\n }\n ExprKind::Call { ty, fun, args } => {\n let diverges = match ty.sty {\n ty::TyBareFn(_, ref f) => f.sig.0.output.diverges(),\n _ => false\n };\n let fun = unpack!(block = this.as_operand(block, fun));\n let args: Vec<_> =\n args.into_iter()\n .map(|arg| unpack!(block = this.as_operand(block, arg)))\n .collect();\n\n let success = this.cfg.start_new_block();\n let cleanup = this.diverge_cleanup();\n this.cfg.terminate(block, Terminator::Call {\n func: fun,\n args: args,\n cleanup: cleanup,\n destination: if diverges {\n None\n } else {\n Some ((destination.clone(), success))\n }\n });\n success.unit()\n }\n\n \/\/ these are the cases that are more naturally handled by some other mode\n ExprKind::Unary { .. } |\n ExprKind::Binary { .. } |\n ExprKind::Box { .. } |\n ExprKind::Cast { .. } |\n ExprKind::ReifyFnPointer { .. } |\n ExprKind::UnsafeFnPointer { .. } |\n ExprKind::Unsize { .. } |\n ExprKind::Repeat { .. } |\n ExprKind::Borrow { .. } |\n ExprKind::VarRef { .. } |\n ExprKind::SelfRef |\n ExprKind::StaticRef { .. } |\n ExprKind::Vec { .. } |\n ExprKind::Tuple { .. } |\n ExprKind::Adt { .. } |\n ExprKind::Closure { .. } |\n ExprKind::Index { .. } |\n ExprKind::Deref { .. } |\n ExprKind::Literal { .. } |\n ExprKind::InlineAsm { .. } |\n ExprKind::Field { .. } => {\n debug_assert!(match Category::of(&expr.kind).unwrap() {\n Category::Rvalue(RvalueFunc::Into) => false,\n _ => true,\n });\n\n let rvalue = unpack!(block = this.as_rvalue(block, expr));\n this.cfg.push_assign(block, expr_span, destination, rvalue);\n block.unit()\n }\n }\n }\n\n fn break_or_continue<F>(&mut self,\n span: Span,\n label: Option<CodeExtent>,\n block: BasicBlock,\n exit_selector: F)\n -> BlockAnd<()>\n where F: FnOnce(&mut LoopScope) -> BasicBlock\n {\n let (exit_block, extent) = {\n let loop_scope = self.find_loop_scope(span, label);\n (exit_selector(loop_scope), loop_scope.extent)\n };\n self.exit_scope(span, extent, block, exit_block);\n self.cfg.start_new_block().unit()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #46573 - jseyfried:add_decl_macro_test, r=nrc<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(decl_macro)]\n\npub macro create_struct($a:ident) {\n struct $a;\n impl Clone for $a {\n fn clone(&self) -> Self {\n $a\n }\n }\n}\n\nfn main() {\n create_struct!(Test);\n Test.clone();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test of placement-in syntax, analogous to `new-box-syntax.rs`<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code, unused_variables)]\n#![feature(box_heap)]\n#![feature(placement_in_syntax)]\n\n\/\/ Tests that the new `in` syntax works with unique pointers.\n\/\/\n\/\/ Compare with new-box-syntax.rs\n\nuse std::boxed::{Box, HEAP};\n\nstruct Structure {\n x: isize,\n y: isize,\n}\n\npub fn main() {\n let x: Box<isize> = in HEAP { 2 };\n let b: Box<isize> = in HEAP { 1 + 2 };\n let c = in HEAP { 3 + 4 };\n\n let s: Box<Structure> = in HEAP {\n Structure {\n x: 3,\n y: 4,\n }\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>niko example<commit_after>#[macro_use]\nextern crate abomonation_derive;\nextern crate abomonation;\n\nextern crate rand;\nextern crate timely;\nextern crate graph_map;\nextern crate differential_dataflow;\n\nuse timely::dataflow::*;\n\nuse differential_dataflow::input::Input;\nuse differential_dataflow::operators::*;\n\n\/\/ Types whose definitions I don't actually know.\n#[derive(Ord,PartialOrd,Eq,PartialEq,Clone,Copy,Abomonation,Debug,Hash)]\nstruct Region(usize);\n#[derive(Ord,PartialOrd,Eq,PartialEq,Clone,Copy,Abomonation,Debug,Hash)]\nstruct Borrow(usize);\n#[derive(Ord,PartialOrd,Eq,PartialEq,Clone,Copy,Abomonation,Debug,Hash)]\nstruct Point(usize);\n\/\/ apparently unused?\n#[derive(Ord,PartialOrd,Eq,PartialEq,Clone,Copy,Abomonation,Debug,Hash)]\nenum EdgeKind { Inter, Intra }\n\nfn main() {\n\n timely::execute_from_args(std::env::args().skip(1), move |worker| {\n\n let mut probe = ProbeHandle::new();\n\n let mut inputs = worker.dataflow::<(),_,_>(|scope| {\n\n \/\/ inputs to the computation\n let (input_1, borrow_region) = scope.new_collection::<(Region,Borrow,Point),isize>();\n let (input_2, next_statement) = scope.new_collection::<(Point,Point),isize>();\n let (input_3, goto) = scope.new_collection::<(Point,Point),isize>();\n let (input_4, rloets) = scope.new_collection::<(Region,Point),isize>();\n let (input_5, killed) = scope.new_collection::<(Borrow,Point),isize>();\n let (input_6, out_lives) = scope.new_collection::<(Region,Region,Point),isize>();\n\n \/\/ `cfg_edge` contains pairs (P, Q)\n let cfg_edge =\n next_statement\n .concat(&goto)\n .distinct()\n .probe_with(&mut probe);\n\n \/\/ `region_live_at` contains pairs (R, P)\n let region_live_at_1 = rloets.clone();\n let region_live_at_2 = goto.map(|(p,q)| (q,p))\n .join_map(&rloets.map(|(r,q)| (q,r)), |_q,&p,&r| (r,p));\n let region_live_at =\n region_live_at_1\n .concat(®ion_live_at_2)\n .distinct()\n .probe_with(&mut probe);\n\n \/\/ `points_to` contains triples (R, B, P)\n let points_to =\n borrow_region\n .iterate(|points_to| {\n\n \/\/ rule 1: base case.\n let mut result = borrow_region.enter(&points_to.scope());\n\n \/\/ rule 2: result + whatever from this rule.\n result =\n points_to\n .map(|(r2,b,p)| ((b,p),r2))\n .antijoin(&killed.enter(&points_to.scope()))\n .map(|((b,p),r2)| ((r2,p),b))\n .join(&out_lives.enter(&points_to.scope()).map(|(r2,r1,p)| ((r2,p),r1)))\n .map(|((_r2,p),b,r1)| (p,(b,r1)))\n .join(&next_statement.enter(&points_to.scope()))\n .map(|(_p,(b,r1),q)| (r1,b,q))\n .concat(&result);\n\n \/\/ rule 3: result + whatever from this rule.\n result =\n points_to\n .map(|(r1,b,p)| ((b,p),r1))\n .antijoin(&killed.enter(&points_to.scope()))\n .map(|((b,p),r1)| (p,(b,r1)))\n .join(&next_statement.enter(&points_to.scope()))\n .map(|(p,(b,r1),q)| ((r1,q),(b,p)))\n .semijoin(®ion_live_at.enter(&points_to.scope()))\n .map(|((r1,q),(b,_p))| (r1,b,q))\n .concat(&result);\n\n result.distinct()\n })\n .probe_with(&mut probe);\n\n let borrow_live_at =\n points_to\n .map(|(r,b,p)| ((r,p),b))\n .semijoin(®ion_live_at)\n .probe_with(&mut probe);\n\n (input_1, input_2, input_3, input_4, input_5, input_6)\n });\n\n\n }).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The compiler code necessary to implement the `#[derive]` extensions.\n\nuse syntax::ast::{self, MetaItem};\nuse syntax::ext::base::{Annotatable, ExtCtxt};\nuse syntax::ext::build::AstBuilder;\nuse syntax::feature_gate;\nuse syntax::codemap;\nuse syntax::parse::token::{intern, intern_and_get_ident};\nuse syntax::ptr::P;\nuse syntax_pos::Span;\n\nmacro_rules! pathvec {\n ($($x:ident)::+) => (\n vec![ $( stringify!($x) ),+ ]\n )\n}\n\nmacro_rules! path {\n ($($x:tt)*) => (\n ::ext::deriving::generic::ty::Path::new( pathvec!( $($x)* ) )\n )\n}\n\nmacro_rules! path_local {\n ($x:ident) => (\n ::deriving::generic::ty::Path::new_local(stringify!($x))\n )\n}\n\nmacro_rules! pathvec_std {\n ($cx:expr, $first:ident :: $($rest:ident)::+) => ({\n let mut v = pathvec!($($rest)::+);\n if let Some(s) = $cx.crate_root {\n v.insert(0, s);\n }\n v\n })\n}\n\nmacro_rules! path_std {\n ($($x:tt)*) => (\n ::deriving::generic::ty::Path::new( pathvec_std!( $($x)* ) )\n )\n}\n\npub mod bounds;\npub mod clone;\npub mod encodable;\npub mod decodable;\npub mod hash;\npub mod debug;\npub mod default;\npub mod custom;\n\n#[path=\"cmp\/partial_eq.rs\"]\npub mod partial_eq;\n#[path=\"cmp\/eq.rs\"]\npub mod eq;\n#[path=\"cmp\/partial_ord.rs\"]\npub mod partial_ord;\n#[path=\"cmp\/ord.rs\"]\npub mod ord;\n\n\npub mod generic;\n\nfn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span {\n Span {\n expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {\n call_site: span,\n callee: codemap::NameAndSpan {\n format: codemap::MacroAttribute(intern(attr_name)),\n span: Some(span),\n allow_internal_unstable: true,\n },\n }),\n ..span\n }\n}\n\npub fn expand_derive(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n annotatable: Annotatable)\n -> Vec<Annotatable> {\n debug!(\"expand_derive: span = {:?}\", span);\n debug!(\"expand_derive: mitem = {:?}\", mitem);\n debug!(\"expand_derive: annotatable input = {:?}\", annotatable);\n let mut item = match annotatable {\n Annotatable::Item(item) => item,\n other => {\n cx.span_err(span, \"`derive` can only be applied to items\");\n return vec![other]\n }\n };\n\n if mitem.value_str().is_some() {\n cx.span_err(mitem.span, \"unexpected value in `derive`\");\n }\n\n let mut traits = mitem.meta_item_list().unwrap_or(&[]).to_owned();\n if traits.is_empty() {\n cx.span_warn(mitem.span, \"empty trait list in `derive`\");\n }\n\n \/\/ First, weed out malformed #[derive]\n traits.retain(|titem| {\n if titem.word().is_none() {\n cx.span_err(titem.span, \"malformed `derive` entry\");\n false\n } else {\n true\n }\n });\n\n \/\/ Next, check for old-style #[derive(Foo)]\n \/\/\n \/\/ These all get expanded to `#[derive_Foo]` and will get expanded first. If\n \/\/ we actually add any attributes here then we return to get those expanded\n \/\/ and then eventually we'll come back to finish off the other derive modes.\n let mut new_attributes = Vec::new();\n traits.retain(|titem| {\n let tword = titem.word().unwrap();\n let tname = tword.name();\n\n let derive_mode = ast::Ident::with_empty_ctxt(intern(&tname));\n let derive_mode = cx.resolver.resolve_derive_mode(derive_mode);\n if is_builtin_trait(&tname) || derive_mode.is_some() {\n return true\n }\n\n if !cx.ecfg.enable_custom_derive() {\n feature_gate::emit_feature_err(&cx.parse_sess,\n \"custom_derive\",\n titem.span,\n feature_gate::GateIssue::Language,\n feature_gate::EXPLAIN_CUSTOM_DERIVE);\n } else {\n let name = intern_and_get_ident(&format!(\"derive_{}\", tname));\n let mitem = cx.meta_word(titem.span, name);\n new_attributes.push(cx.attribute(mitem.span, mitem));\n }\n false\n });\n if new_attributes.len() > 0 {\n item = item.map(|mut i| {\n let list = cx.meta_list(mitem.span,\n intern_and_get_ident(\"derive\"),\n traits);\n i.attrs.extend(new_attributes);\n i.attrs.push(cx.attribute(mitem.span, list));\n i\n });\n return vec![Annotatable::Item(item)]\n }\n\n \/\/ Now check for macros-1.1 style custom #[derive].\n \/\/\n \/\/ Expand each of them in order given, but *before* we expand any built-in\n \/\/ derive modes. The logic here is to:\n \/\/\n \/\/ 1. Collect the remaining `#[derive]` annotations into a list. If\n \/\/ there are any left, attach a `#[derive]` attribute to the item\n \/\/ that we're currently expanding with the remaining derive modes.\n \/\/ 2. Manufacture a `#[derive(Foo)]` attribute to pass to the expander.\n \/\/ 3. Expand the current item we're expanding, getting back a list of\n \/\/ items that replace it.\n \/\/ 4. Extend the returned list with the current list of items we've\n \/\/ collected so far.\n \/\/ 5. Return everything!\n \/\/\n \/\/ If custom derive extensions end up threading through the `#[derive]`\n \/\/ attribute, we'll get called again later on to continue expanding\n \/\/ those modes.\n let macros_11_derive = traits.iter()\n .cloned()\n .enumerate()\n .filter(|&(_, ref name)| !is_builtin_trait(&name.name().unwrap()))\n .next();\n if let Some((i, titem)) = macros_11_derive {\n let tname = ast::Ident::with_empty_ctxt(intern(&titem.name().unwrap()));\n let ext = cx.resolver.resolve_derive_mode(tname).unwrap();\n traits.remove(i);\n if traits.len() > 0 {\n item = item.map(|mut i| {\n let list = cx.meta_list(mitem.span,\n intern_and_get_ident(\"derive\"),\n traits);\n i.attrs.push(cx.attribute(mitem.span, list));\n i\n });\n }\n let titem = cx.meta_list_item_word(titem.span, titem.name().unwrap());\n let mitem = cx.meta_list(titem.span,\n intern_and_get_ident(\"derive\"),\n vec![titem]);\n let item = Annotatable::Item(item);\n return ext.expand(cx, mitem.span, &mitem, item)\n }\n\n \/\/ Ok, at this point we know that there are no old-style `#[derive_Foo]` nor\n \/\/ any macros-1.1 style `#[derive(Foo)]`. Expand all built-in traits here.\n\n \/\/ RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted)\n \/\/ `#[structural_match]` attribute.\n if traits.iter().filter_map(|t| t.name()).any(|t| t == \"PartialEq\") &&\n traits.iter().filter_map(|t| t.name()).any(|t| t == \"Eq\") {\n let structural_match = intern_and_get_ident(\"structural_match\");\n let span = allow_unstable(cx, span, \"derive(PartialEq, Eq)\");\n let meta = cx.meta_word(span, structural_match);\n item = item.map(|mut i| {\n i.attrs.push(cx.attribute(span, meta));\n i\n });\n }\n\n \/\/ RFC #1521. `Clone` can assume that `Copy` types' clone implementation is\n \/\/ the same as the copy implementation.\n \/\/\n \/\/ Add a marker attribute here picked up during #[derive(Clone)]\n if traits.iter().filter_map(|t| t.name()).any(|t| t == \"Clone\") &&\n traits.iter().filter_map(|t| t.name()).any(|t| t == \"Copy\") {\n let marker = intern_and_get_ident(\"rustc_copy_clone_marker\");\n let span = allow_unstable(cx, span, \"derive(Copy, Clone)\");\n let meta = cx.meta_word(span, marker);\n item = item.map(|mut i| {\n i.attrs.push(cx.attribute(span, meta));\n i\n });\n }\n\n let mut items = Vec::new();\n for titem in traits.iter() {\n let tname = titem.word().unwrap().name();\n let name = intern_and_get_ident(&format!(\"derive({})\", tname));\n let mitem = cx.meta_word(titem.span, name);\n\n let span = Span {\n expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {\n call_site: titem.span,\n callee: codemap::NameAndSpan {\n format: codemap::MacroAttribute(intern(&format!(\"derive({})\", tname))),\n span: Some(titem.span),\n allow_internal_unstable: true,\n },\n }),\n ..titem.span\n };\n\n let my_item = Annotatable::Item(item);\n expand_builtin(&tname, cx, span, &mitem, &my_item, &mut |a| {\n items.push(a);\n });\n item = my_item.expect_item();\n }\n\n items.insert(0, Annotatable::Item(item));\n return items\n}\n\nmacro_rules! derive_traits {\n ($( $name:expr => $func:path, )+) => {\n pub fn is_builtin_trait(name: &str) -> bool {\n match name {\n $( $name )|+ => true,\n _ => false,\n }\n }\n\n fn expand_builtin(name: &str,\n ecx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Annotatable,\n push: &mut FnMut(Annotatable)) {\n match name {\n $(\n $name => {\n warn_if_deprecated(ecx, span, $name);\n $func(ecx, span, mitem, item, push);\n }\n )*\n _ => panic!(\"not a builtin derive mode: {}\", name),\n }\n }\n }\n}\n\nderive_traits! {\n \"Clone\" => clone::expand_deriving_clone,\n\n \"Hash\" => hash::expand_deriving_hash,\n\n \"RustcEncodable\" => encodable::expand_deriving_rustc_encodable,\n\n \"RustcDecodable\" => decodable::expand_deriving_rustc_decodable,\n\n \"PartialEq\" => partial_eq::expand_deriving_partial_eq,\n \"Eq\" => eq::expand_deriving_eq,\n \"PartialOrd\" => partial_ord::expand_deriving_partial_ord,\n \"Ord\" => ord::expand_deriving_ord,\n\n \"Debug\" => debug::expand_deriving_debug,\n\n \"Default\" => default::expand_deriving_default,\n\n \"Send\" => bounds::expand_deriving_unsafe_bound,\n \"Sync\" => bounds::expand_deriving_unsafe_bound,\n \"Copy\" => bounds::expand_deriving_copy,\n\n \/\/ deprecated\n \"Encodable\" => encodable::expand_deriving_encodable,\n \"Decodable\" => decodable::expand_deriving_decodable,\n}\n\n#[inline] \/\/ because `name` is a compile-time constant\nfn warn_if_deprecated(ecx: &mut ExtCtxt, sp: Span, name: &str) {\n if let Some(replacement) = match name {\n \"Encodable\" => Some(\"RustcEncodable\"),\n \"Decodable\" => Some(\"RustcDecodable\"),\n _ => None,\n } {\n ecx.span_warn(sp,\n &format!(\"derive({}) is deprecated in favor of derive({})\",\n name,\n replacement));\n }\n}\n\n\/\/\/ Construct a name for the inner type parameter that can't collide with any type parameters of\n\/\/\/ the item. This is achieved by starting with a base and then concatenating the names of all\n\/\/\/ other type parameters.\n\/\/ FIXME(aburka): use real hygiene when that becomes possible\nfn hygienic_type_parameter(item: &Annotatable, base: &str) -> String {\n let mut typaram = String::from(base);\n if let Annotatable::Item(ref item) = *item {\n match item.node {\n ast::ItemKind::Struct(_, ast::Generics { ref ty_params, .. }) |\n ast::ItemKind::Enum(_, ast::Generics { ref ty_params, .. }) => {\n for ty in ty_params.iter() {\n typaram.push_str(&ty.ident.name.as_str());\n }\n }\n\n _ => {}\n }\n }\n\n typaram\n}\n\n\/\/\/ Constructs an expression that calls an intrinsic\nfn call_intrinsic(cx: &ExtCtxt,\n mut span: Span,\n intrinsic: &str,\n args: Vec<P<ast::Expr>>)\n -> P<ast::Expr> {\n span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {\n call_site: span,\n callee: codemap::NameAndSpan {\n format: codemap::MacroAttribute(intern(\"derive\")),\n span: Some(span),\n allow_internal_unstable: true,\n },\n });\n\n let path = cx.std_path(&[\"intrinsics\", intrinsic]);\n let call = cx.expr_call_global(span, path, args);\n\n cx.expr_block(P(ast::Block {\n stmts: vec![cx.stmt_expr(call)],\n id: ast::DUMMY_NODE_ID,\n rules: ast::BlockCheckMode::Unsafe(ast::CompilerGenerated),\n span: span,\n }))\n}\n<commit_msg>Do not add an empty #[derive()] list in expand_derive (fixes #37033)<commit_after>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The compiler code necessary to implement the `#[derive]` extensions.\n\nuse syntax::ast::{self, MetaItem};\nuse syntax::ext::base::{Annotatable, ExtCtxt};\nuse syntax::ext::build::AstBuilder;\nuse syntax::feature_gate;\nuse syntax::codemap;\nuse syntax::parse::token::{intern, intern_and_get_ident};\nuse syntax::ptr::P;\nuse syntax_pos::Span;\n\nmacro_rules! pathvec {\n ($($x:ident)::+) => (\n vec![ $( stringify!($x) ),+ ]\n )\n}\n\nmacro_rules! path {\n ($($x:tt)*) => (\n ::ext::deriving::generic::ty::Path::new( pathvec!( $($x)* ) )\n )\n}\n\nmacro_rules! path_local {\n ($x:ident) => (\n ::deriving::generic::ty::Path::new_local(stringify!($x))\n )\n}\n\nmacro_rules! pathvec_std {\n ($cx:expr, $first:ident :: $($rest:ident)::+) => ({\n let mut v = pathvec!($($rest)::+);\n if let Some(s) = $cx.crate_root {\n v.insert(0, s);\n }\n v\n })\n}\n\nmacro_rules! path_std {\n ($($x:tt)*) => (\n ::deriving::generic::ty::Path::new( pathvec_std!( $($x)* ) )\n )\n}\n\npub mod bounds;\npub mod clone;\npub mod encodable;\npub mod decodable;\npub mod hash;\npub mod debug;\npub mod default;\npub mod custom;\n\n#[path=\"cmp\/partial_eq.rs\"]\npub mod partial_eq;\n#[path=\"cmp\/eq.rs\"]\npub mod eq;\n#[path=\"cmp\/partial_ord.rs\"]\npub mod partial_ord;\n#[path=\"cmp\/ord.rs\"]\npub mod ord;\n\n\npub mod generic;\n\nfn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span {\n Span {\n expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {\n call_site: span,\n callee: codemap::NameAndSpan {\n format: codemap::MacroAttribute(intern(attr_name)),\n span: Some(span),\n allow_internal_unstable: true,\n },\n }),\n ..span\n }\n}\n\npub fn expand_derive(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n annotatable: Annotatable)\n -> Vec<Annotatable> {\n debug!(\"expand_derive: span = {:?}\", span);\n debug!(\"expand_derive: mitem = {:?}\", mitem);\n debug!(\"expand_derive: annotatable input = {:?}\", annotatable);\n let mut item = match annotatable {\n Annotatable::Item(item) => item,\n other => {\n cx.span_err(span, \"`derive` can only be applied to items\");\n return vec![other]\n }\n };\n\n if mitem.value_str().is_some() {\n cx.span_err(mitem.span, \"unexpected value in `derive`\");\n }\n\n let mut traits = mitem.meta_item_list().unwrap_or(&[]).to_owned();\n if traits.is_empty() {\n cx.span_warn(mitem.span, \"empty trait list in `derive`\");\n }\n\n \/\/ First, weed out malformed #[derive]\n traits.retain(|titem| {\n if titem.word().is_none() {\n cx.span_err(titem.span, \"malformed `derive` entry\");\n false\n } else {\n true\n }\n });\n\n \/\/ Next, check for old-style #[derive(Foo)]\n \/\/\n \/\/ These all get expanded to `#[derive_Foo]` and will get expanded first. If\n \/\/ we actually add any attributes here then we return to get those expanded\n \/\/ and then eventually we'll come back to finish off the other derive modes.\n let mut new_attributes = Vec::new();\n traits.retain(|titem| {\n let tword = titem.word().unwrap();\n let tname = tword.name();\n\n let derive_mode = ast::Ident::with_empty_ctxt(intern(&tname));\n let derive_mode = cx.resolver.resolve_derive_mode(derive_mode);\n if is_builtin_trait(&tname) || derive_mode.is_some() {\n return true\n }\n\n if !cx.ecfg.enable_custom_derive() {\n feature_gate::emit_feature_err(&cx.parse_sess,\n \"custom_derive\",\n titem.span,\n feature_gate::GateIssue::Language,\n feature_gate::EXPLAIN_CUSTOM_DERIVE);\n } else {\n let name = intern_and_get_ident(&format!(\"derive_{}\", tname));\n let mitem = cx.meta_word(titem.span, name);\n new_attributes.push(cx.attribute(mitem.span, mitem));\n }\n false\n });\n if new_attributes.len() > 0 {\n item = item.map(|mut i| {\n i.attrs.extend(new_attributes);\n if traits.len() > 0 {\n let list = cx.meta_list(mitem.span,\n intern_and_get_ident(\"derive\"),\n traits);\n i.attrs.push(cx.attribute(mitem.span, list));\n }\n i\n });\n return vec![Annotatable::Item(item)]\n }\n\n \/\/ Now check for macros-1.1 style custom #[derive].\n \/\/\n \/\/ Expand each of them in order given, but *before* we expand any built-in\n \/\/ derive modes. The logic here is to:\n \/\/\n \/\/ 1. Collect the remaining `#[derive]` annotations into a list. If\n \/\/ there are any left, attach a `#[derive]` attribute to the item\n \/\/ that we're currently expanding with the remaining derive modes.\n \/\/ 2. Manufacture a `#[derive(Foo)]` attribute to pass to the expander.\n \/\/ 3. Expand the current item we're expanding, getting back a list of\n \/\/ items that replace it.\n \/\/ 4. Extend the returned list with the current list of items we've\n \/\/ collected so far.\n \/\/ 5. Return everything!\n \/\/\n \/\/ If custom derive extensions end up threading through the `#[derive]`\n \/\/ attribute, we'll get called again later on to continue expanding\n \/\/ those modes.\n let macros_11_derive = traits.iter()\n .cloned()\n .enumerate()\n .filter(|&(_, ref name)| !is_builtin_trait(&name.name().unwrap()))\n .next();\n if let Some((i, titem)) = macros_11_derive {\n let tname = ast::Ident::with_empty_ctxt(intern(&titem.name().unwrap()));\n let ext = cx.resolver.resolve_derive_mode(tname).unwrap();\n traits.remove(i);\n if traits.len() > 0 {\n item = item.map(|mut i| {\n let list = cx.meta_list(mitem.span,\n intern_and_get_ident(\"derive\"),\n traits);\n i.attrs.push(cx.attribute(mitem.span, list));\n i\n });\n }\n let titem = cx.meta_list_item_word(titem.span, titem.name().unwrap());\n let mitem = cx.meta_list(titem.span,\n intern_and_get_ident(\"derive\"),\n vec![titem]);\n let item = Annotatable::Item(item);\n return ext.expand(cx, mitem.span, &mitem, item)\n }\n\n \/\/ Ok, at this point we know that there are no old-style `#[derive_Foo]` nor\n \/\/ any macros-1.1 style `#[derive(Foo)]`. Expand all built-in traits here.\n\n \/\/ RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted)\n \/\/ `#[structural_match]` attribute.\n if traits.iter().filter_map(|t| t.name()).any(|t| t == \"PartialEq\") &&\n traits.iter().filter_map(|t| t.name()).any(|t| t == \"Eq\") {\n let structural_match = intern_and_get_ident(\"structural_match\");\n let span = allow_unstable(cx, span, \"derive(PartialEq, Eq)\");\n let meta = cx.meta_word(span, structural_match);\n item = item.map(|mut i| {\n i.attrs.push(cx.attribute(span, meta));\n i\n });\n }\n\n \/\/ RFC #1521. `Clone` can assume that `Copy` types' clone implementation is\n \/\/ the same as the copy implementation.\n \/\/\n \/\/ Add a marker attribute here picked up during #[derive(Clone)]\n if traits.iter().filter_map(|t| t.name()).any(|t| t == \"Clone\") &&\n traits.iter().filter_map(|t| t.name()).any(|t| t == \"Copy\") {\n let marker = intern_and_get_ident(\"rustc_copy_clone_marker\");\n let span = allow_unstable(cx, span, \"derive(Copy, Clone)\");\n let meta = cx.meta_word(span, marker);\n item = item.map(|mut i| {\n i.attrs.push(cx.attribute(span, meta));\n i\n });\n }\n\n let mut items = Vec::new();\n for titem in traits.iter() {\n let tname = titem.word().unwrap().name();\n let name = intern_and_get_ident(&format!(\"derive({})\", tname));\n let mitem = cx.meta_word(titem.span, name);\n\n let span = Span {\n expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {\n call_site: titem.span,\n callee: codemap::NameAndSpan {\n format: codemap::MacroAttribute(intern(&format!(\"derive({})\", tname))),\n span: Some(titem.span),\n allow_internal_unstable: true,\n },\n }),\n ..titem.span\n };\n\n let my_item = Annotatable::Item(item);\n expand_builtin(&tname, cx, span, &mitem, &my_item, &mut |a| {\n items.push(a);\n });\n item = my_item.expect_item();\n }\n\n items.insert(0, Annotatable::Item(item));\n return items\n}\n\nmacro_rules! derive_traits {\n ($( $name:expr => $func:path, )+) => {\n pub fn is_builtin_trait(name: &str) -> bool {\n match name {\n $( $name )|+ => true,\n _ => false,\n }\n }\n\n fn expand_builtin(name: &str,\n ecx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Annotatable,\n push: &mut FnMut(Annotatable)) {\n match name {\n $(\n $name => {\n warn_if_deprecated(ecx, span, $name);\n $func(ecx, span, mitem, item, push);\n }\n )*\n _ => panic!(\"not a builtin derive mode: {}\", name),\n }\n }\n }\n}\n\nderive_traits! {\n \"Clone\" => clone::expand_deriving_clone,\n\n \"Hash\" => hash::expand_deriving_hash,\n\n \"RustcEncodable\" => encodable::expand_deriving_rustc_encodable,\n\n \"RustcDecodable\" => decodable::expand_deriving_rustc_decodable,\n\n \"PartialEq\" => partial_eq::expand_deriving_partial_eq,\n \"Eq\" => eq::expand_deriving_eq,\n \"PartialOrd\" => partial_ord::expand_deriving_partial_ord,\n \"Ord\" => ord::expand_deriving_ord,\n\n \"Debug\" => debug::expand_deriving_debug,\n\n \"Default\" => default::expand_deriving_default,\n\n \"Send\" => bounds::expand_deriving_unsafe_bound,\n \"Sync\" => bounds::expand_deriving_unsafe_bound,\n \"Copy\" => bounds::expand_deriving_copy,\n\n \/\/ deprecated\n \"Encodable\" => encodable::expand_deriving_encodable,\n \"Decodable\" => decodable::expand_deriving_decodable,\n}\n\n#[inline] \/\/ because `name` is a compile-time constant\nfn warn_if_deprecated(ecx: &mut ExtCtxt, sp: Span, name: &str) {\n if let Some(replacement) = match name {\n \"Encodable\" => Some(\"RustcEncodable\"),\n \"Decodable\" => Some(\"RustcDecodable\"),\n _ => None,\n } {\n ecx.span_warn(sp,\n &format!(\"derive({}) is deprecated in favor of derive({})\",\n name,\n replacement));\n }\n}\n\n\/\/\/ Construct a name for the inner type parameter that can't collide with any type parameters of\n\/\/\/ the item. This is achieved by starting with a base and then concatenating the names of all\n\/\/\/ other type parameters.\n\/\/ FIXME(aburka): use real hygiene when that becomes possible\nfn hygienic_type_parameter(item: &Annotatable, base: &str) -> String {\n let mut typaram = String::from(base);\n if let Annotatable::Item(ref item) = *item {\n match item.node {\n ast::ItemKind::Struct(_, ast::Generics { ref ty_params, .. }) |\n ast::ItemKind::Enum(_, ast::Generics { ref ty_params, .. }) => {\n for ty in ty_params.iter() {\n typaram.push_str(&ty.ident.name.as_str());\n }\n }\n\n _ => {}\n }\n }\n\n typaram\n}\n\n\/\/\/ Constructs an expression that calls an intrinsic\nfn call_intrinsic(cx: &ExtCtxt,\n mut span: Span,\n intrinsic: &str,\n args: Vec<P<ast::Expr>>)\n -> P<ast::Expr> {\n span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {\n call_site: span,\n callee: codemap::NameAndSpan {\n format: codemap::MacroAttribute(intern(\"derive\")),\n span: Some(span),\n allow_internal_unstable: true,\n },\n });\n\n let path = cx.std_path(&[\"intrinsics\", intrinsic]);\n let call = cx.expr_call_global(span, path, args);\n\n cx.expr_block(P(ast::Block {\n stmts: vec![cx.stmt_expr(call)],\n id: ast::DUMMY_NODE_ID,\n rules: ast::BlockCheckMode::Unsafe(ast::CompilerGenerated),\n span: span,\n }))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #45311 - goffrie:issue-40003, r=alexcrichton<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn main() {\n if false { test(); }\n}\n\nfn test() {\n let rx = Err::<Vec<usize>, u32>(1).into_future();\n\n rx.map(|l: Vec<usize>| stream::iter(l.into_iter().map(|i| Ok(i))))\n .flatten_stream()\n .chunks(50)\n .buffer_unordered(5);\n}\n\nuse future::{Future, IntoFuture};\nmod future {\n use std::result;\n\n use {stream, Stream};\n\n pub trait Future {\n type Item;\n type Error;\n\n fn map<F, U>(self, _: F) -> Map<Self, F>\n where F: FnOnce(Self::Item) -> U,\n Self: Sized,\n {\n panic!()\n }\n\n fn flatten_stream(self) -> FlattenStream<Self>\n where <Self as Future>::Item: stream::Stream<Error=Self::Error>,\n Self: Sized\n {\n panic!()\n }\n }\n\n pub trait IntoFuture {\n type Future: Future<Item=Self::Item, Error=Self::Error>;\n type Item;\n type Error;\n fn into_future(self) -> Self::Future;\n }\n\n impl<F: Future> IntoFuture for F {\n type Future = F;\n type Item = F::Item;\n type Error = F::Error;\n\n fn into_future(self) -> F {\n panic!()\n }\n }\n\n impl<T, E> IntoFuture for result::Result<T, E> {\n type Future = FutureResult<T, E>;\n type Item = T;\n type Error = E;\n\n fn into_future(self) -> FutureResult<T, E> {\n panic!()\n }\n }\n\n pub struct Map<A, F> {\n _a: (A, F),\n }\n\n impl<U, A, F> Future for Map<A, F>\n where A: Future,\n F: FnOnce(A::Item) -> U,\n {\n type Item = U;\n type Error = A::Error;\n }\n\n pub struct FlattenStream<F> {\n _f: F,\n }\n\n impl<F> Stream for FlattenStream<F>\n where F: Future,\n <F as Future>::Item: Stream<Error=F::Error>,\n {\n type Item = <F::Item as Stream>::Item;\n type Error = <F::Item as Stream>::Error;\n }\n\n pub struct FutureResult<T, E> {\n _inner: (T, E),\n }\n\n impl<T, E> Future for FutureResult<T, E> {\n type Item = T;\n type Error = E;\n }\n}\n\nmod stream {\n use IntoFuture;\n\n pub trait Stream {\n type Item;\n type Error;\n\n fn buffer_unordered(self, amt: usize) -> BufferUnordered<Self>\n where Self::Item: IntoFuture<Error = <Self as Stream>::Error>,\n Self: Sized\n {\n new(self, amt)\n }\n\n fn chunks(self, _capacity: usize) -> Chunks<Self>\n where Self: Sized\n {\n panic!()\n }\n }\n\n pub struct IterStream<I> {\n _iter: I,\n }\n\n pub fn iter<J, T, E>(_: J) -> IterStream<J::IntoIter>\n where J: IntoIterator<Item=Result<T, E>>,\n {\n panic!()\n }\n\n impl<I, T, E> Stream for IterStream<I>\n where I: Iterator<Item=Result<T, E>>,\n {\n type Item = T;\n type Error = E;\n }\n\n pub struct Chunks<S> {\n _stream: S\n }\n\n impl<S> Stream for Chunks<S>\n where S: Stream\n {\n type Item = Result<Vec<<S as Stream>::Item>, u32>;\n type Error = <S as Stream>::Error;\n }\n\n pub struct BufferUnordered<S> {\n _stream: S,\n }\n\n enum Slot<T> {\n Next(usize),\n _Data { _a: T },\n }\n\n fn new<S>(_s: S, _amt: usize) -> BufferUnordered<S>\n where S: Stream,\n S::Item: IntoFuture<Error=<S as Stream>::Error>,\n {\n (0..0).map(|_| {\n Slot::Next::<<S::Item as IntoFuture>::Future>(1)\n }).collect::<Vec<_>>();\n panic!()\n }\n\n impl<S> Stream for BufferUnordered<S>\n where S: Stream,\n S::Item: IntoFuture<Error=<S as Stream>::Error>,\n {\n type Item = <S::Item as IntoFuture>::Item;\n type Error = <S as Stream>::Error;\n }\n}\nuse stream::Stream;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add ThreadSanitizer test case<commit_after>\/\/ Verifies that ThreadSanitizer is able to detect a data race in heap allocated\n\/\/ memory block.\n\/\/\n\/\/ Test case minimizes the use of the standard library to avoid its ambiguous\n\/\/ status with respect to instrumentation (it could vary depending on whatever\n\/\/ a function call is inlined or not).\n\/\/\n\/\/ The conflicting data access is de-facto synchronized with a special TSAN\n\/\/ barrier, which does not introduce synchronization from TSAN perspective, but\n\/\/ is necessary to make the test robust. Without the barrier data race detection\n\/\/ would occasionally fail, making test flaky.\n\/\/\n\/\/ needs-sanitizer-support\n\/\/ only-x86_64\n\/\/\n\/\/ compile-flags: -Z sanitizer=thread -O\n\/\/\n\/\/ run-fail\n\/\/ error-pattern: WARNING: ThreadSanitizer: data race\n\/\/ error-pattern: Location is heap block of size 4\n\/\/ error-pattern: allocated by main thread\n\n#![feature(raw_ref_op)]\n#![feature(rustc_private)]\nextern crate libc;\n\nuse std::mem;\nuse std::ptr;\n\nstatic mut BARRIER: u64 = 0;\n\nextern \"C\" {\n fn __tsan_testonly_barrier_init(barrier: *mut u64, count: u32);\n fn __tsan_testonly_barrier_wait(barrier: *mut u64);\n}\n\nextern \"C\" fn start(c: *mut libc::c_void) -> *mut libc::c_void {\n unsafe {\n let c: *mut u32 = c.cast();\n *c += 1;\n __tsan_testonly_barrier_wait(&raw mut BARRIER);\n ptr::null_mut()\n }\n}\n\nfn main() {\n unsafe {\n __tsan_testonly_barrier_init(&raw mut BARRIER, 2);\n let c: *mut u32 = Box::into_raw(Box::new(1));\n let mut t: libc::pthread_t = mem::zeroed();\n libc::pthread_create(&mut t, ptr::null(), start, c.cast());\n __tsan_testonly_barrier_wait(&raw mut BARRIER);\n *c += 1;\n libc::pthread_join(t, ptr::null_mut());\n Box::from_raw(c);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use toml::Value;\nuse hook::position::HookPosition;\n\n\/\/\/ Check whether the configuration is valid for the store\n\/\/\/\n\/\/\/ The passed `Value` _must be_ the `[store]` sub-tree of the configuration. Otherwise this will\n\/\/\/ fail.\n\/\/\/\n\/\/\/ It checks whether the configuration looks like the store wants it to be:\n\/\/\/\n\/\/\/ ```toml\n\/\/\/ [store]\n\/\/\/ pre-create-hook-aspects = [ \"misc\", \"encryption\", \"version-control\"]\n\/\/\/\n\/\/\/ [[aspects.misc]]\n\/\/\/ parallel = true\n\/\/\/ [[aspects.encryption]]\n\/\/\/ parallel = false\n\/\/\/ [[aspects.version-control]]\n\/\/\/ parallel = false\n\/\/\/\n\/\/\/ [[hooks.gnupg]]\n\/\/\/ aspect = \"encryption\"\n\/\/\/ key = \"0x123456789\"\n\/\/\/\n\/\/\/ [[hooks.git]]\n\/\/\/ aspect = \"version-control\"\n\/\/\/ ```\n\/\/\/\n\/\/\/ It checks:\n\/\/\/ * Whether all the maps are there (whether store, store.aspects, store.aspects.example are all\n\/\/\/ maps)\n\/\/\/ * Whether each aspect configuration has a \"parallel = <Boolean>\" setting\n\/\/\/ * Whether each hook congfiguration has a \"aspect = <String>\" setting\n\/\/\/\n\/\/\/ It does NOT check:\n\/\/\/ * Whether all aspects which are used in the hook configuration are also configured\n\/\/\/\n\/\/\/ No configuration is a valid configuration, as the store will use the most conservative settings\n\/\/\/ automatically. This has also performance impact, as all hooks run in no-parallel mode then.\n\/\/\/ You have been warned!\n\/\/\/\n\/\/\/\npub fn config_is_valid(config: &Option<Value>) -> bool {\n use std::collections::BTreeMap;\n\n if config.is_none() {\n return true;\n }\n\n fn has_key_with_map(v: &BTreeMap<String, Value>, key: &str) -> bool {\n v.get(key).map(|t| match t { &Value::Table(_) => true, _ => false }).unwrap_or(false)\n }\n\n fn has_key_with_string_ary(v: &BTreeMap<String, Value>, key: &str) -> bool {\n v.get(key)\n .map(|t| match t {\n &Value::Array(ref a) => a.iter().all(|elem| {\n match elem {\n &Value::String(_) => true,\n _ => false,\n }\n }),\n _ => false\n }).unwrap_or(false)\n }\n\n \/\/\/ Check that\n \/\/\/ * the top-level configuration\n \/\/\/ * is a table\n \/\/\/ * where all entries of a key `section` (eg. \"hooks\" or \"aspects\")\n \/\/\/ * Are maps\n \/\/\/ * where each has a key `key` (eg. \"aspect\" or \"parallel\")\n \/\/\/ * which fullfills constraint `f` (typecheck)\n fn check_all_inner_maps_have_key_with<F>(store_config: &BTreeMap<String, Value>,\n section: &str,\n key: &str,\n f: F)\n -> bool\n where F: Fn(&Value) -> bool\n {\n store_config.get(section) \/\/ The store config has the section `section`\n .map(|section_table| {\n match section_table { \/\/ which is\n &Value::Table(ref section_table) => \/\/ a table\n section_table\n .values() \/\/ which has values,\n .all(|cfg| { \/\/ and all of these values\n match cfg {\n &Value::Table(ref hook_config) => { \/\/ are tables\n hook_config.get(key) \/\/ with a key\n \/\/ fullfilling this constraint\n .map(|hook_aspect| f(&hook_aspect))\n .unwrap_or(false)\n },\n _ => false,\n }\n }),\n _ => false,\n }\n })\n .unwrap_or(false)\n }\n\n match config {\n &Some(Value::Table(ref t)) => {\n has_key_with_string_ary(t, \"pre-read-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-read-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-create-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-create-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-retrieve-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-retrieve-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-update-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-update-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-delete-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-delete-hook-aspects\") &&\n\n \/\/ The section \"hooks\" has maps which have a key \"aspect\" which has a value of type\n \/\/ String\n check_all_inner_maps_have_key_with(t, \"hooks\", \"aspect\", |asp| {\n match asp { &Value::String(_) => true, _ => false }\n }) &&\n\n \/\/ The section \"aspects\" has maps which have a key \"parllel\" which has a value of type\n \/\/ Boolean\n check_all_inner_maps_have_key_with(t, \"aspects\", \"parallel\", |asp| {\n match asp { &Value::Boolean(_) => true, _ => false, }\n })\n }\n _ => false,\n }\n}\n\npub fn get_pre_read_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-read-hook-aspects\", value)\n}\n\npub fn get_post_read_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-read-hook-aspects\", value)\n}\n\npub fn get_pre_create_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-create-hook-aspects\", value)\n}\n\npub fn get_post_create_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-create-hook-aspects\", value)\n}\n\npub fn get_pre_retrieve_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-retrieve-hook-aspects\", value)\n}\n\npub fn get_post_retrieve_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-retrieve-hook-aspects\", value)\n}\n\npub fn get_pre_update_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-update-hook-aspects\", value)\n}\n\npub fn get_post_update_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-update-hook-aspects\", value)\n}\n\npub fn get_pre_delete_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-delete-hook-aspects\", value)\n}\n\npub fn get_post_delete_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-delete-hook-aspects\", value)\n}\n\n#[derive(Debug)]\npub struct AspectConfig {\n parallel: bool,\n config: Value,\n}\n\nimpl AspectConfig {\n\n pub fn new(init: Value) -> AspectConfig {\n let parallel = AspectConfig::is_parallel(&init);\n AspectConfig {\n config: init,\n parallel: parallel,\n }\n }\n\n pub fn config(&self) -> &Value {\n &self.config\n }\n\n fn is_parallel(init: &Value) -> bool {\n match init {\n &Value::Table(ref t) =>\n t.get(\"parallel\")\n .map(|value| {\n match value {\n &Value::Boolean(b) => b,\n _ => false,\n }\n })\n .unwrap_or(false),\n _ => false,\n }\n }\n\n \/\/\/ Get the aspect configuration for an aspect.\n \/\/\/\n \/\/\/ Pass the store configuration object, this searches in `[aspects][<aspect_name>]`.\n \/\/\/\n \/\/\/ Returns `None` if one of the keys in the chain is not available\n fn get_for(v: Value, aspect_name: &str) -> Option<AspectConfig> {\n unimplemented!()\n }\n\n}\n\nfn get_aspect_names_for_aspect_position(config_name: &'static str, value: &Option<Value>) -> Vec<String> {\n let mut v = vec![];\n\n match value {\n &Some(Value::Table(ref t)) => {\n match t.get(config_name) {\n Some(&Value::Array(ref a)) => {\n for elem in a {\n match elem {\n &Value::String(ref s) => v.push(s.clone()),\n _ => warn!(\"Non-String in configuration, inside '{}'\", config_name),\n }\n }\n },\n _ => warn!(\"'{}' configuration key should contain Array, does not\", config_name),\n };\n },\n &None => warn!(\"No store configuration\"),\n _ => warn!(\"Configuration is not a table\"),\n }\n v\n}\n\n\n<commit_msg>Implement AspectConfig::get_for()<commit_after>use toml::Value;\nuse hook::position::HookPosition;\n\n\/\/\/ Check whether the configuration is valid for the store\n\/\/\/\n\/\/\/ The passed `Value` _must be_ the `[store]` sub-tree of the configuration. Otherwise this will\n\/\/\/ fail.\n\/\/\/\n\/\/\/ It checks whether the configuration looks like the store wants it to be:\n\/\/\/\n\/\/\/ ```toml\n\/\/\/ [store]\n\/\/\/ pre-create-hook-aspects = [ \"misc\", \"encryption\", \"version-control\"]\n\/\/\/\n\/\/\/ [[aspects.misc]]\n\/\/\/ parallel = true\n\/\/\/ [[aspects.encryption]]\n\/\/\/ parallel = false\n\/\/\/ [[aspects.version-control]]\n\/\/\/ parallel = false\n\/\/\/\n\/\/\/ [[hooks.gnupg]]\n\/\/\/ aspect = \"encryption\"\n\/\/\/ key = \"0x123456789\"\n\/\/\/\n\/\/\/ [[hooks.git]]\n\/\/\/ aspect = \"version-control\"\n\/\/\/ ```\n\/\/\/\n\/\/\/ It checks:\n\/\/\/ * Whether all the maps are there (whether store, store.aspects, store.aspects.example are all\n\/\/\/ maps)\n\/\/\/ * Whether each aspect configuration has a \"parallel = <Boolean>\" setting\n\/\/\/ * Whether each hook congfiguration has a \"aspect = <String>\" setting\n\/\/\/\n\/\/\/ It does NOT check:\n\/\/\/ * Whether all aspects which are used in the hook configuration are also configured\n\/\/\/\n\/\/\/ No configuration is a valid configuration, as the store will use the most conservative settings\n\/\/\/ automatically. This has also performance impact, as all hooks run in no-parallel mode then.\n\/\/\/ You have been warned!\n\/\/\/\n\/\/\/\npub fn config_is_valid(config: &Option<Value>) -> bool {\n use std::collections::BTreeMap;\n\n if config.is_none() {\n return true;\n }\n\n fn has_key_with_map(v: &BTreeMap<String, Value>, key: &str) -> bool {\n v.get(key).map(|t| match t { &Value::Table(_) => true, _ => false }).unwrap_or(false)\n }\n\n fn has_key_with_string_ary(v: &BTreeMap<String, Value>, key: &str) -> bool {\n v.get(key)\n .map(|t| match t {\n &Value::Array(ref a) => a.iter().all(|elem| {\n match elem {\n &Value::String(_) => true,\n _ => false,\n }\n }),\n _ => false\n }).unwrap_or(false)\n }\n\n \/\/\/ Check that\n \/\/\/ * the top-level configuration\n \/\/\/ * is a table\n \/\/\/ * where all entries of a key `section` (eg. \"hooks\" or \"aspects\")\n \/\/\/ * Are maps\n \/\/\/ * where each has a key `key` (eg. \"aspect\" or \"parallel\")\n \/\/\/ * which fullfills constraint `f` (typecheck)\n fn check_all_inner_maps_have_key_with<F>(store_config: &BTreeMap<String, Value>,\n section: &str,\n key: &str,\n f: F)\n -> bool\n where F: Fn(&Value) -> bool\n {\n store_config.get(section) \/\/ The store config has the section `section`\n .map(|section_table| {\n match section_table { \/\/ which is\n &Value::Table(ref section_table) => \/\/ a table\n section_table\n .values() \/\/ which has values,\n .all(|cfg| { \/\/ and all of these values\n match cfg {\n &Value::Table(ref hook_config) => { \/\/ are tables\n hook_config.get(key) \/\/ with a key\n \/\/ fullfilling this constraint\n .map(|hook_aspect| f(&hook_aspect))\n .unwrap_or(false)\n },\n _ => false,\n }\n }),\n _ => false,\n }\n })\n .unwrap_or(false)\n }\n\n match config {\n &Some(Value::Table(ref t)) => {\n has_key_with_string_ary(t, \"pre-read-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-read-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-create-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-create-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-retrieve-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-retrieve-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-update-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-update-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-delete-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-delete-hook-aspects\") &&\n\n \/\/ The section \"hooks\" has maps which have a key \"aspect\" which has a value of type\n \/\/ String\n check_all_inner_maps_have_key_with(t, \"hooks\", \"aspect\", |asp| {\n match asp { &Value::String(_) => true, _ => false }\n }) &&\n\n \/\/ The section \"aspects\" has maps which have a key \"parllel\" which has a value of type\n \/\/ Boolean\n check_all_inner_maps_have_key_with(t, \"aspects\", \"parallel\", |asp| {\n match asp { &Value::Boolean(_) => true, _ => false, }\n })\n }\n _ => false,\n }\n}\n\npub fn get_pre_read_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-read-hook-aspects\", value)\n}\n\npub fn get_post_read_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-read-hook-aspects\", value)\n}\n\npub fn get_pre_create_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-create-hook-aspects\", value)\n}\n\npub fn get_post_create_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-create-hook-aspects\", value)\n}\n\npub fn get_pre_retrieve_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-retrieve-hook-aspects\", value)\n}\n\npub fn get_post_retrieve_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-retrieve-hook-aspects\", value)\n}\n\npub fn get_pre_update_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-update-hook-aspects\", value)\n}\n\npub fn get_post_update_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-update-hook-aspects\", value)\n}\n\npub fn get_pre_delete_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-delete-hook-aspects\", value)\n}\n\npub fn get_post_delete_aspect_names(value: &Option<Value>) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-delete-hook-aspects\", value)\n}\n\n#[derive(Debug)]\npub struct AspectConfig {\n parallel: bool,\n config: Value,\n}\n\nimpl AspectConfig {\n\n pub fn new(init: Value) -> AspectConfig {\n let parallel = AspectConfig::is_parallel(&init);\n AspectConfig {\n config: init,\n parallel: parallel,\n }\n }\n\n pub fn config(&self) -> &Value {\n &self.config\n }\n\n fn is_parallel(init: &Value) -> bool {\n match init {\n &Value::Table(ref t) =>\n t.get(\"parallel\")\n .map(|value| {\n match value {\n &Value::Boolean(b) => b,\n _ => false,\n }\n })\n .unwrap_or(false),\n _ => false,\n }\n }\n\n \/\/\/ Get the aspect configuration for an aspect.\n \/\/\/\n \/\/\/ Pass the store configuration object, this searches in `[aspects][<aspect_name>]`.\n \/\/\/\n \/\/\/ Returns `None` if one of the keys in the chain is not available\n pub fn get_for(v: &Option<Value>, a_name: String) -> Option<AspectConfig> {\n match v {\n &Some(Value::Table(ref tabl)) => tabl.get(&a_name[..])\n .map(|asp| AspectConfig::new(asp.clone())),\n _ => None,\n }\n }\n\n}\n\nfn get_aspect_names_for_aspect_position(config_name: &'static str, value: &Option<Value>) -> Vec<String> {\n let mut v = vec![];\n\n match value {\n &Some(Value::Table(ref t)) => {\n match t.get(config_name) {\n Some(&Value::Array(ref a)) => {\n for elem in a {\n match elem {\n &Value::String(ref s) => v.push(s.clone()),\n _ => warn!(\"Non-String in configuration, inside '{}'\", config_name),\n }\n }\n },\n _ => warn!(\"'{}' configuration key should contain Array, does not\", config_name),\n };\n },\n &None => warn!(\"No store configuration\"),\n _ => warn!(\"Configuration is not a table\"),\n }\n v\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Partial commit<commit_after>use nitro::app::App;\nuse nitro::component::Component;\nuse nitro::component::Message;\nuse nitro::game_object::GameObject;\n\npub struct Spinny {\n\n}\n\nimpl Component for Spinny {\n fn receive_message(&mut self, app : &mut App, game_object : &mut GameObject, message : &Message) {\n match *message {\n Message::Update{delta_time} => {\n \/\/game_object.transform.add_rotation(1.0 * delta_time);\n if let Some(horizontal) = app.get_axis_value(\"horizontal\") {\n game_object.transform.add_x(10.0 * delta_time * horizontal);\n }\n }\n _ => {}\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-test will be fixed later\n\/\/ revisions: ast mir\n\/\/[mir]compile-flags: -Z borrowck=mir\n\n#![feature(thread_local)]\n\n#[thread_local]\nstatic FOO: u8 = 3;\n\nfn assert_static(_t: &'static u8) {}\nfn main() {\n assert_static(&FOO); \/\/[ast]~ ERROR [E0597]\n \/\/[mir]~^ ERROR [E0597]\n}\n<commit_msg>Rollup merge of #49547 - Phlosioneer:44831-borrowck-remove-ignore, r=arielb1<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ revisions: ast mir\n\/\/[mir]compile-flags: -Z borrowck=mir\n\n#![feature(thread_local)]\n\n#[thread_local]\nstatic FOO: u8 = 3;\n\nfn assert_static(_t: &'static u8) {}\nfn main() {\n assert_static(&FOO); \/\/[ast]~ ERROR [E0597]\n \/\/[mir]~^ ERROR [E0597]\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Crs type.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>missing file: \/etc\/passwd parsing<commit_after>use std::io::{BufferedReader,File};\nuse std::vec::Vec;\n\n#[deriving(Clone)]\npub struct GECOS {\n pub name : String,\n pub location : String,\n pub telephone : String,\n pub other : Vec<String>,\n}\n\n#[deriving(Clone)]\npub struct Entry {\n pub username : String,\n pub gecos : GECOS,\n pub home : String,\n pub shell : String,\n}\n\nfn parse_gecos(s:&str) -> GECOS {\n let mut split = s.split(',').map(|x| String::from_str(x));\n let name = split.next().unwrap_or(String::from_str(\"\"));\n let loc = split.next().unwrap_or(String::from_str(\"\"));\n let tel = split.next().unwrap_or(String::from_str(\"\"));\n let mut other = Vec::new();\n for o in split {\n other.push(o);\n }\n GECOS{ name:name,location:loc,telephone:tel,other:other }\n}\n\nfn parse_line(s:&str) -> Entry {\n let mut splits = s.split(':').map(|x| String::from_str(x));\n let un = splits.next().unwrap();\n \/\/ skip next three\n splits.next(); splits.next(); splits.next();\n let gecos = parse_gecos(splits.next().unwrap().as_slice());\n let home = splits.next().unwrap();\n let shell = splits.next().unwrap();\n Entry{ username:un, gecos:gecos, home:home, shell:shell }\n}\n\nfn get_all_entries() -> Vec<Entry> {\n let mut v = Vec::new();\n let pw_path = Path::new(\"\/etc\/passwd\");\n let mut f = BufferedReader::new(File::open(&pw_path));\n for line in f.lines() {\n v.push(parse_line(line.unwrap().as_slice()));\n }\n return v;\n}\n\npub fn get_entry(username : String) -> Option<Entry> {\n let passwds = get_all_entries();\n for entry in passwds.iter() {\n if entry.username == username { return Some(entry.clone()); }\n }\n None\n\/\/ Some(Entry{ username: username, gecos : g, home: home, shell:shell })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add first example<commit_after>\/\/! Example from spec chapter 2.13.1\n\nextern crate gerber_types;\n\nuse gerber_types::{Command};\nuse gerber_types::{ExtendedCode, Unit, FileAttribute, Part, Polarity,\n ApertureDefinition, Aperture, Circle};\nuse gerber_types::{FunctionCode};\nuse gerber_types::{DCode, Operation, Coordinates, CoordinateOffset};\nuse gerber_types::{GCode, InterpolationMode};\nuse gerber_types::{MCode};\nuse gerber_types::GerberCode;\n\nfn main() {\n let commands: Vec<Command> = vec![\n Command::FunctionCode(\n FunctionCode::GCode(\n GCode::Comment(\"Ucamco ex. 1: Two square boxes\".to_string())\n )\n ),\n Command::ExtendedCode(\n ExtendedCode::CoordinateFormat(2, 5)\n ),\n Command::ExtendedCode(\n ExtendedCode::Unit(Unit::Millimeters)\n ),\n Command::ExtendedCode(\n ExtendedCode::FileAttribute(\n FileAttribute::Part(Part::Other(\"Only an example\".to_string()))\n )\n ),\n Command::ExtendedCode(\n ExtendedCode::LoadPolarity(Polarity::Dark)\n ),\n Command::ExtendedCode(\n ExtendedCode::ApertureDefinition(\n ApertureDefinition {\n code: 10,\n aperture: Aperture::Circle(Circle { diameter: 0.01, hole_diameter: None }),\n }\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::SelectAperture(10)\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Move(Coordinates::new(0, 0))\n )\n )\n ),\n Command::FunctionCode(\n FunctionCode::GCode(\n GCode::InterpolationMode(InterpolationMode::Linear)\n )\n ),\n \/\/ TODO: The interpolate representation needs to take the coordinate\n \/\/ format into account!\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Interpolate(Coordinates::new(5, 0), None)\n )\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Interpolate(Coordinates::at_y(5), None)\n )\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Interpolate(Coordinates::at_x(0), None)\n )\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Interpolate(Coordinates::at_y(0), None)\n )\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Move(Coordinates::at_x(6))\n )\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Interpolate(Coordinates::at_x(11), None)\n )\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Interpolate(Coordinates::at_y(5), None)\n )\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Interpolate(Coordinates::at_x(6), None)\n )\n )\n ),\n Command::FunctionCode(\n FunctionCode::DCode(\n DCode::Operation(\n Operation::Interpolate(Coordinates::at_y(0), None)\n )\n )\n ),\n Command::FunctionCode(FunctionCode::MCode(MCode::EndOfFile)),\n ];\n println!(\"{}\", commands.to_code());\n}\n<|endoftext|>"} {"text":"<commit_before>#[macro_use] extern crate log;\nextern crate clap;\n#[macro_use] extern crate semver;\nextern crate toml;\nextern crate url;\n#[macro_use] extern crate version;\n\nextern crate libimaglink;\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagutil;\n\nuse std::process::exit;\nuse std::ops::Deref;\nuse std::error::Error;\n\nuse libimagrt::runtime::Runtime;\nuse libimagstore::error::StoreError;\nuse libimagstore::store::Entry;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagutil::trace::trace_error;\nuse libimaglink::external::ExternalLinker;\nuse clap::ArgMatches;\nuse url::Url;\n\nmod ui;\n\nuse ui::build_ui;\n\nfn main() {\n let name = \"imag-link\";\n let version = &version!()[..];\n let about = \"Link entries\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.err().unwrap());\n exit(1);\n }\n };\n\n debug!(\"Hello. Logging was just enabled\");\n debug!(\"I already set up the Runtime object and build the commandline interface parser.\");\n debug!(\"Lets get rollin' ...\");\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n match name {\n \"internal\" => handle_internal_linking(&rt),\n \"external\" => handle_external_linking(&rt),\n _ => {\n warn!(\"No commandline call\");\n exit(1);\n },\n }\n });\n}\n\nfn handle_internal_linking(rt: &Runtime) {\n use libimaglink::internal::InternalLinker;\n use libimagutil::trace::trace_error;\n\n debug!(\"Handle internal linking call\");\n let cmd = rt.cli().subcommand_matches(\"internal\").unwrap();\n\n if cmd.is_present(\"list\") {\n debug!(\"List...\");\n for entry in cmd.value_of(\"list\").unwrap().split(\",\") {\n debug!(\"Listing for '{}'\", entry);\n match get_entry_by_name(rt, entry) {\n Ok(e) => {\n e.get_internal_links()\n .map(|links| {\n let mut i = 0;\n for link in links.iter().map(|l| l.to_str()).filter_map(|x| x) {\n println!(\"{: <3}: {}\", i, link);\n i += 1;\n }\n });\n },\n\n Err(e) => {\n trace_error(&e);\n break;\n },\n }\n }\n debug!(\"Listing ready!\");\n } else {\n let mut from = {\n let mut from = get_from_entry(&rt);\n if from.is_none() {\n warn!(\"No 'from' entry\");\n exit(1);\n }\n from.unwrap()\n };\n debug!(\"Link from = {:?}\", from.deref());\n\n let mut to = {\n let mut to = get_to_entries(&rt);\n if to.is_none() {\n warn!(\"No 'to' entry\");\n exit(1);\n }\n to.unwrap()\n };\n debug!(\"Link to = {:?}\", to.iter().map(|f| f.deref()).collect::<Vec<&Entry>>());\n\n match cmd.subcommand_name() {\n Some(\"add\") => {\n for mut to_entry in to {\n if let Err(e) = to_entry.add_internal_link(&mut from) {\n trace_error(&e);\n exit(1);\n }\n }\n },\n\n Some(\"remove\") => {\n for mut to_entry in to {\n if let Err(e) = to_entry.remove_internal_link(&mut from) {\n trace_error(&e);\n exit(1);\n }\n }\n },\n\n _ => unreachable!(),\n };\n }\n}\n\nfn get_from_entry<'a>(rt: &'a Runtime) -> Option<FileLockEntry<'a>> {\n rt.cli()\n .subcommand_matches(\"internal\")\n .unwrap() \/\/ safe, we know there is an \"internal\" subcommand\"\n .subcommand_matches(\"add\")\n .unwrap() \/\/ safe, we know there is an \"add\" subcommand\n .value_of(\"from\")\n .and_then(|from_name| {\n match get_entry_by_name(rt, from_name) {\n Err(e) => {\n debug!(\"We couldn't get the entry from name: '{:?}'\", from_name);\n trace_error(&e); None\n },\n Ok(e) => Some(e),\n }\n\n })\n}\n\nfn get_to_entries<'a>(rt: &'a Runtime) -> Option<Vec<FileLockEntry<'a>>> {\n rt.cli()\n .subcommand_matches(\"internal\")\n .unwrap() \/\/ safe, we know there is an \"internal\" subcommand\"\n .subcommand_matches(\"add\")\n .unwrap() \/\/ safe, we know there is an \"add\" subcommand\n .values_of(\"to\")\n .map(|values| {\n let mut v = vec![];\n for entry in values.map(|v| get_entry_by_name(rt, v)) {\n match entry {\n Err(e) => trace_error(&e),\n Ok(e) => v.push(e),\n }\n }\n v\n })\n}\n\nfn get_entry_by_name<'a>(rt: &'a Runtime, name: &str) -> Result<FileLockEntry<'a>, StoreError> {\n use libimagstore::storeid::build_entry_path;\n build_entry_path(rt.store(), name)\n .and_then(|path| rt.store().retrieve(path))\n}\n\nfn handle_external_linking(rt: &Runtime) {\n use libimagutil::trace::trace_error;\n\n let scmd = rt.cli().subcommand_matches(\"external\").unwrap();\n let entry_name = scmd.value_of(\"id\").unwrap(); \/\/ enforced by clap\n let entry = get_entry_by_name(rt, entry_name);\n if entry.is_err() {\n trace_error(&entry.err().unwrap());\n exit(1);\n }\n let mut entry = entry.unwrap();\n\n if scmd.is_present(\"add\") {\n debug!(\"Adding link to entry!\");\n add_link_to_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"remove\") {\n debug!(\"Removing link from entry!\");\n remove_link_from_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"set\") {\n debug!(\"Setting links in entry!\");\n set_links_for_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"list\") {\n debug!(\"Listing links in entry!\");\n list_links_for_entry(rt.store(), &mut entry);\n return;\n }\n\n panic!(\"Clap failed to enforce one of 'add', 'remove', 'set' or 'list'\");\n}\n\nfn add_link_to_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n let link = matches.value_of(\"add\").unwrap();\n\n let link = Url::parse(link);\n if link.is_err() {\n debug!(\"URL parsing error...\");\n trace_error(&link.err().unwrap());\n debug!(\"Exiting\");\n exit(1);\n }\n let link = link.unwrap();\n\n if let Err(e) = entry.add_external_link(store, link) {\n debug!(\"Error while adding external link...\");\n trace_error(&e);\n } else {\n debug!(\"Everything worked well\");\n info!(\"Ok\");\n }\n}\n\nfn remove_link_from_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n let link = matches.value_of(\"remove\").unwrap();\n\n let link = Url::parse(link);\n if link.is_err() {\n trace_error(&link.err().unwrap());\n exit(1);\n }\n let link = link.unwrap();\n\n if let Err(e) = entry.remove_external_link(store, link) {\n trace_error(&e);\n } else {\n info!(\"Ok\");\n }\n}\n\nfn set_links_for_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n let links = matches\n .value_of(\"links\")\n .map(String::from)\n .unwrap()\n .split(\",\")\n .map(|uri| {\n match Url::parse(uri) {\n Err(e) => {\n warn!(\"Could not parse '{}' as URL, ignoring\", uri);\n trace_error(&e);\n None\n },\n Ok(u) => Some(u),\n }\n })\n .filter_map(|x| x)\n .collect();\n\n if let Err(e) = entry.set_external_links(store, links) {\n trace_error(&e);\n } else {\n info!(\"Ok\");\n }\n}\n\nfn list_links_for_entry(store: &Store, entry: &mut FileLockEntry) {\n let res = entry.get_external_links(store)\n .and_then(|links| {\n let mut i = 0;\n for link in links {\n println!(\"{: <3}: {}\", i, link);\n i += 1;\n }\n Ok(())\n });\n\n match res {\n Err(e) => {\n trace_error(&e);\n },\n Ok(_) => {\n info!(\"Ok\");\n },\n }\n}\n\n<commit_msg>Remove unused imports<commit_after>#[macro_use] extern crate log;\nextern crate clap;\n#[macro_use] extern crate semver;\nextern crate toml;\nextern crate url;\n#[macro_use] extern crate version;\n\nextern crate libimaglink;\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagutil;\n\nuse std::process::exit;\nuse std::ops::Deref;\n\nuse libimagrt::runtime::Runtime;\nuse libimagstore::error::StoreError;\nuse libimagstore::store::Entry;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagutil::trace::trace_error;\nuse libimaglink::external::ExternalLinker;\nuse clap::ArgMatches;\nuse url::Url;\n\nmod ui;\n\nuse ui::build_ui;\n\nfn main() {\n let name = \"imag-link\";\n let version = &version!()[..];\n let about = \"Link entries\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.err().unwrap());\n exit(1);\n }\n };\n\n debug!(\"Hello. Logging was just enabled\");\n debug!(\"I already set up the Runtime object and build the commandline interface parser.\");\n debug!(\"Lets get rollin' ...\");\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n match name {\n \"internal\" => handle_internal_linking(&rt),\n \"external\" => handle_external_linking(&rt),\n _ => {\n warn!(\"No commandline call\");\n exit(1);\n },\n }\n });\n}\n\nfn handle_internal_linking(rt: &Runtime) {\n use libimaglink::internal::InternalLinker;\n use libimagutil::trace::trace_error;\n\n debug!(\"Handle internal linking call\");\n let cmd = rt.cli().subcommand_matches(\"internal\").unwrap();\n\n if cmd.is_present(\"list\") {\n debug!(\"List...\");\n for entry in cmd.value_of(\"list\").unwrap().split(\",\") {\n debug!(\"Listing for '{}'\", entry);\n match get_entry_by_name(rt, entry) {\n Ok(e) => {\n e.get_internal_links()\n .map(|links| {\n let mut i = 0;\n for link in links.iter().map(|l| l.to_str()).filter_map(|x| x) {\n println!(\"{: <3}: {}\", i, link);\n i += 1;\n }\n });\n },\n\n Err(e) => {\n trace_error(&e);\n break;\n },\n }\n }\n debug!(\"Listing ready!\");\n } else {\n let mut from = {\n let mut from = get_from_entry(&rt);\n if from.is_none() {\n warn!(\"No 'from' entry\");\n exit(1);\n }\n from.unwrap()\n };\n debug!(\"Link from = {:?}\", from.deref());\n\n let mut to = {\n let mut to = get_to_entries(&rt);\n if to.is_none() {\n warn!(\"No 'to' entry\");\n exit(1);\n }\n to.unwrap()\n };\n debug!(\"Link to = {:?}\", to.iter().map(|f| f.deref()).collect::<Vec<&Entry>>());\n\n match cmd.subcommand_name() {\n Some(\"add\") => {\n for mut to_entry in to {\n if let Err(e) = to_entry.add_internal_link(&mut from) {\n trace_error(&e);\n exit(1);\n }\n }\n },\n\n Some(\"remove\") => {\n for mut to_entry in to {\n if let Err(e) = to_entry.remove_internal_link(&mut from) {\n trace_error(&e);\n exit(1);\n }\n }\n },\n\n _ => unreachable!(),\n };\n }\n}\n\nfn get_from_entry<'a>(rt: &'a Runtime) -> Option<FileLockEntry<'a>> {\n rt.cli()\n .subcommand_matches(\"internal\")\n .unwrap() \/\/ safe, we know there is an \"internal\" subcommand\"\n .subcommand_matches(\"add\")\n .unwrap() \/\/ safe, we know there is an \"add\" subcommand\n .value_of(\"from\")\n .and_then(|from_name| {\n match get_entry_by_name(rt, from_name) {\n Err(e) => {\n debug!(\"We couldn't get the entry from name: '{:?}'\", from_name);\n trace_error(&e); None\n },\n Ok(e) => Some(e),\n }\n\n })\n}\n\nfn get_to_entries<'a>(rt: &'a Runtime) -> Option<Vec<FileLockEntry<'a>>> {\n rt.cli()\n .subcommand_matches(\"internal\")\n .unwrap() \/\/ safe, we know there is an \"internal\" subcommand\"\n .subcommand_matches(\"add\")\n .unwrap() \/\/ safe, we know there is an \"add\" subcommand\n .values_of(\"to\")\n .map(|values| {\n let mut v = vec![];\n for entry in values.map(|v| get_entry_by_name(rt, v)) {\n match entry {\n Err(e) => trace_error(&e),\n Ok(e) => v.push(e),\n }\n }\n v\n })\n}\n\nfn get_entry_by_name<'a>(rt: &'a Runtime, name: &str) -> Result<FileLockEntry<'a>, StoreError> {\n use libimagstore::storeid::build_entry_path;\n build_entry_path(rt.store(), name)\n .and_then(|path| rt.store().retrieve(path))\n}\n\nfn handle_external_linking(rt: &Runtime) {\n use libimagutil::trace::trace_error;\n\n let scmd = rt.cli().subcommand_matches(\"external\").unwrap();\n let entry_name = scmd.value_of(\"id\").unwrap(); \/\/ enforced by clap\n let entry = get_entry_by_name(rt, entry_name);\n if entry.is_err() {\n trace_error(&entry.err().unwrap());\n exit(1);\n }\n let mut entry = entry.unwrap();\n\n if scmd.is_present(\"add\") {\n debug!(\"Adding link to entry!\");\n add_link_to_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"remove\") {\n debug!(\"Removing link from entry!\");\n remove_link_from_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"set\") {\n debug!(\"Setting links in entry!\");\n set_links_for_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"list\") {\n debug!(\"Listing links in entry!\");\n list_links_for_entry(rt.store(), &mut entry);\n return;\n }\n\n panic!(\"Clap failed to enforce one of 'add', 'remove', 'set' or 'list'\");\n}\n\nfn add_link_to_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n let link = matches.value_of(\"add\").unwrap();\n\n let link = Url::parse(link);\n if link.is_err() {\n debug!(\"URL parsing error...\");\n trace_error(&link.err().unwrap());\n debug!(\"Exiting\");\n exit(1);\n }\n let link = link.unwrap();\n\n if let Err(e) = entry.add_external_link(store, link) {\n debug!(\"Error while adding external link...\");\n trace_error(&e);\n } else {\n debug!(\"Everything worked well\");\n info!(\"Ok\");\n }\n}\n\nfn remove_link_from_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n let link = matches.value_of(\"remove\").unwrap();\n\n let link = Url::parse(link);\n if link.is_err() {\n trace_error(&link.err().unwrap());\n exit(1);\n }\n let link = link.unwrap();\n\n if let Err(e) = entry.remove_external_link(store, link) {\n trace_error(&e);\n } else {\n info!(\"Ok\");\n }\n}\n\nfn set_links_for_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n let links = matches\n .value_of(\"links\")\n .map(String::from)\n .unwrap()\n .split(\",\")\n .map(|uri| {\n match Url::parse(uri) {\n Err(e) => {\n warn!(\"Could not parse '{}' as URL, ignoring\", uri);\n trace_error(&e);\n None\n },\n Ok(u) => Some(u),\n }\n })\n .filter_map(|x| x)\n .collect();\n\n if let Err(e) = entry.set_external_links(store, links) {\n trace_error(&e);\n } else {\n info!(\"Ok\");\n }\n}\n\nfn list_links_for_entry(store: &Store, entry: &mut FileLockEntry) {\n let res = entry.get_external_links(store)\n .and_then(|links| {\n let mut i = 0;\n for link in links {\n println!(\"{: <3}: {}\", i, link);\n i += 1;\n }\n Ok(())\n });\n\n match res {\n Err(e) => {\n trace_error(&e);\n },\n Ok(_) => {\n info!(\"Ok\");\n },\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>extern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate serde_json;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate task_hookrs;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagerror;\nextern crate libimagtodo;\n\nuse std::process::exit;\nuse std::process::{Command, Stdio};\nuse std::io::stdin;\nuse std::io::BufRead;\n\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagrt::runtime::Runtime;\nuse libimagtodo::task::IntoTask;\nuse libimagerror::trace::trace_error;\n\nmod ui;\n\nuse ui::build_ui;\nfn main() {\n let name = \"imag-todo\";\n let version = &version!()[..];\n let about = \"Interface with taskwarrior\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n match rt.cli().subcommand_name() {\n Some(\"tw-hook\") => tw_hook(&rt),\n Some(\"list\") => list(&rt),\n _ => unimplemented!(),\n } \/\/ end match scmd\n} \/\/ end main\n\nfn tw_hook(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"tw-hook\").unwrap();\n if subcmd.is_present(\"add\") {\n let stdin = stdin();\n let mut stdin = stdin.lock();\n let mut line = String::new();\n\n if let Err(e) = stdin.read_line(&mut line) {\n trace_error(&e);\n exit(1);\n };\n\n if let Ok(ttask) = import_task(&line.as_str()) {\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n let uuid = *ttask.uuid();\n match ttask.into_task(rt.store()) {\n Ok(val) => {\n println!(\"Task {} stored in imag\", uuid);\n val\n },\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n };\n } else {\n error!(\"No usable input\");\n exit(1);\n }\n } else if subcmd.is_present(\"delete\") {\n \/\/ The used hook is \"on-modify\". This hook gives two json-objects\n \/\/ per usage und wants one (the second one) back.\n let mut counter = 0;\n let stdin = stdin();\n let stdin = stdin.lock();\n\n match import_tasks(stdin) {\n Ok(ttasks) => for ttask in ttasks {\n if counter % 2 == 1 {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n match ttask.status() {\n &task_hookrs::status::TaskStatus::Deleted => {\n match libimagtodo::delete::delete(rt.store(), *ttask.uuid()) {\n Ok(_) => println!(\"Deleted task {}\", *ttask.uuid()),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n }\n _ => {\n }\n } \/\/ end match ttask.status()\n } \/\/ end if c % 2\n counter += 1;\n },\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n }\n } else {\n \/\/ Should not be possible, as one argument is required via\n \/\/ ArgGroup\n unreachable!();\n }\n}\n\nfn list(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let mut args = Vec::new();\n let verbose = subcmd.is_present(\"verbose\");\n let iter = match libimagtodo::read::get_todo_iterator(rt.store()) {\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n Ok(val) => val,\n };\n\n for task in iter {\n match task {\n Ok(val) => {\n let uuid = match val.flentry.get_header().read(\"todo.uuid\") {\n Ok(Some(u)) => u,\n Ok(None) => continue,\n Err(e) => {\n trace_error(&e);\n continue;\n }\n };\n\n if verbose {\n args.clear();\n args.push(format!(\"uuid:{} information\", uuid));\n\n let tw_process = Command::new(\"task\")\n .stdin(Stdio::null())\n .args(&args)\n .spawn()\n .unwrap_or_else(|e| {\n trace_error(&e);\n panic!(\"failed\");\n });\n let output = tw_process\n .wait_with_output()\n .unwrap_or_else(|e| panic!(\"failed to unwrap output: {}\", e));\n let outstring = String::from_utf8(output.stdout)\n .unwrap_or_else(|e| panic!(\"failed to execute: {}\", e));\n\n println!(\"{}\", outstring);\n } else {\n println!(\"{}\", match uuid {\n toml::Value::String(s) => s,\n _ => {\n error!(\"Unexpected type for todo.uuid: {}\", uuid);\n continue;\n },\n });\n }\n }\n Err(e) => {\n trace_error(&e);\n continue;\n }\n } \/\/ end match task\n } \/\/ end for\n}\n\n<commit_msg>Adapt for new interface of libimagtodo<commit_after>extern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate serde_json;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate task_hookrs;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagerror;\nextern crate libimagtodo;\n\nuse std::process::exit;\nuse std::process::{Command, Stdio};\nuse std::io::stdin;\nuse std::io::BufRead;\n\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagrt::runtime::Runtime;\nuse libimagtodo::task::IntoTask;\nuse libimagtodo::task::Task;\nuse libimagerror::trace::trace_error;\n\nmod ui;\n\nuse ui::build_ui;\nfn main() {\n let name = \"imag-todo\";\n let version = &version!()[..];\n let about = \"Interface with taskwarrior\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n match rt.cli().subcommand_name() {\n Some(\"tw-hook\") => tw_hook(&rt),\n Some(\"list\") => list(&rt),\n _ => unimplemented!(),\n } \/\/ end match scmd\n} \/\/ end main\n\nfn tw_hook(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"tw-hook\").unwrap();\n if subcmd.is_present(\"add\") {\n let stdin = stdin();\n let mut stdin = stdin.lock();\n let mut line = String::new();\n\n if let Err(e) = stdin.read_line(&mut line) {\n trace_error(&e);\n exit(1);\n };\n\n if let Ok(ttask) = import_task(&line.as_str()) {\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n let uuid = *ttask.uuid();\n match ttask.into_task(rt.store()) {\n Ok(val) => {\n println!(\"Task {} stored in imag\", uuid);\n val\n },\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n };\n } else {\n error!(\"No usable input\");\n exit(1);\n }\n } else if subcmd.is_present(\"delete\") {\n \/\/ The used hook is \"on-modify\". This hook gives two json-objects\n \/\/ per usage und wants one (the second one) back.\n let mut counter = 0;\n let stdin = stdin();\n let stdin = stdin.lock();\n\n match import_tasks(stdin) {\n Ok(ttasks) => for ttask in ttasks {\n if counter % 2 == 1 {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n match ttask.status() {\n &task_hookrs::status::TaskStatus::Deleted => {\n match Task::delete_by_uuid(rt.store(), *ttask.uuid()) {\n Ok(_) => println!(\"Deleted task {}\", *ttask.uuid()),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n }\n _ => {\n }\n } \/\/ end match ttask.status()\n } \/\/ end if c % 2\n counter += 1;\n },\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n }\n } else {\n \/\/ Should not be possible, as one argument is required via\n \/\/ ArgGroup\n unreachable!();\n }\n}\n\nfn list(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let mut args = Vec::new();\n let verbose = subcmd.is_present(\"verbose\");\n let iter = match Task::all(rt.store()) {\n Ok(iter) => iter,\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n };\n\n for task in iter {\n match task {\n Ok(val) => {\n let uuid = match val.get_header().read(\"todo.uuid\") {\n Ok(Some(u)) => u,\n Ok(None) => continue,\n Err(e) => {\n trace_error(&e);\n continue;\n }\n };\n\n if verbose {\n args.clear();\n args.push(format!(\"uuid:{} information\", uuid));\n\n let tw_process = Command::new(\"task\")\n .stdin(Stdio::null())\n .args(&args)\n .spawn()\n .unwrap_or_else(|e| {\n trace_error(&e);\n panic!(\"failed\");\n });\n let output = tw_process\n .wait_with_output()\n .unwrap_or_else(|e| panic!(\"failed to unwrap output: {}\", e));\n let outstring = String::from_utf8(output.stdout)\n .unwrap_or_else(|e| panic!(\"failed to execute: {}\", e));\n\n println!(\"{}\", outstring);\n } else {\n println!(\"{}\", match uuid {\n toml::Value::String(s) => s,\n _ => {\n error!(\"Unexpected type for todo.uuid: {}\", uuid);\n continue;\n },\n });\n }\n }\n Err(e) => {\n trace_error(&e);\n continue;\n }\n } \/\/ end match task\n } \/\/ end for\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>account.setSilenceMode method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>decimal has new funtion and all data needed for its calculations<commit_after>const N_DIGITS: usize = 128;\nconst N_BYTES: usize = N_DIGITS \/ 8;\n\n#[derive(Debug, Clone, Copy)]\nstruct Decimal {\n digits: [u8; N_BYTES],\n exp: u8,\n neg: bool,\n}\n\nimpl Decimal {\n pub fn new() -> Decimal {\n Decimal {\n digits: [0; N_BYTES],\n exp: 0,\n neg: false,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update for latest Rust master.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>git work<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add texture class<commit_after>use std;\nuse utils;\nuse image;\nuse glium::texture::texture2d::Texture2d;\nuse glium::backend::glutin_backend::GlutinFacade;\n\npub struct Texture {\n pub data: Texture2d,\n}\n\nimpl Texture {\n pub fn new(facade: &GlutinFacade, filename: &std::path::Path) -> Result<Texture, String> {\n let texture_path = try!(utils::get_base_dir()).join(filename);\n println!(\"load texture: \\\"{}\\\"\", texture_path.display());\n\n let image = match image::open(texture_path.as_path()) {\n Ok(v) => v,\n Err(e) => return Err(format!(\"error load texture \\\"{}\\\": {}\", texture_path.display(), e))\n };\n\n let texture = match Texture2d::new(facade, image) {\n Ok(v) => v,\n Err(e) => return Err(format!(\"error create texture \\\"{}\\\": {:?}\", texture_path.display(), e))\n };\n\n Ok(Texture {\n data: texture,\n })\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Crudely implement BG X\/BG Y window attrib's<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement proper background dimensions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Approaching a pattern I'm starting to like<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add a test for the memory leak error<commit_after>\/\/error-pattern: the evaluated program leaked memory\n\nfn main() {\n std::mem::forget(Box::new(42));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 Johannes Köster.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Various alignment algorithms.\n\npub mod pairwise;\n\n\n\/\/\/ Alignment operations (Match, Subst, Del and Ins).\n#[derive(PartialEq, Debug, Copy, Clone)]\npub enum AlignmentOperation {\n Match,\n Subst,\n Del,\n Ins\n}\n\n\n\/\/\/ An alignment, consisting of a score, a start in sequence y, a start in sequence x, a length\n\/\/\/ and its edit operations (see alignment::pairwise for meaning of x and y).\n#[derive(Debug)]\npub struct Alignment {\n pub score: i32,\n pub ystart: usize,\n pub xstart: usize,\n pub xlen: usize,\n pub operations: Vec<AlignmentOperation>\n}\n\n\nimpl Alignment {\n \/\/\/ Calculate the cigar string.\n pub fn cigar(&self, hard_clip: bool) -> String {\n let add_op = |op, k, cigar: &mut String| {\n cigar.push_str(&format!(\"{}{}\", k, match op {\n AlignmentOperation::Match => \"=\",\n AlignmentOperation::Subst => \"X\",\n AlignmentOperation::Del => \"D\",\n AlignmentOperation::Ins => \"I\",\n }));\n };\n\n let op_len = |op: AlignmentOperation| {\n (op == AlignmentOperation::Match || op == AlignmentOperation::Subst || op == AlignmentOperation::Ins) as usize\n };\n\n let clip_str = if hard_clip {\"H\"} else {\"S\"};\n\n let mut cigar = String::new();\n\n if !self.operations.is_empty() {\n if self.xstart > 0 {\n cigar.push_str(&format!(\"{}{}\", self.xstart, clip_str));\n }\n\n let mut last = self.operations[0];\n let mut k = 1;\n let mut alen = op_len(last);\n for &op in self.operations[1..].iter() {\n if op == last {\n k += 1;\n } else {\n add_op(last, k, &mut cigar);\n k = 1;\n }\n last = op;\n alen += op_len(op);\n }\n add_op(last, k, &mut cigar);\n\n let clip = self.xlen - alen;\n if clip > 0 {\n cigar.push_str(&format!(\"{}{}\", clip, clip_str));\n }\n }\n else {\n cigar.push_str(&format!(\"{}{}\", self.xlen, clip_str));\n }\n\n cigar\n }\n\n \/\/\/ Return the pretty formatted alignment as a String.\n pub fn pretty(&self, x: &[u8], y: &[u8]) -> String {\n let mut x_pretty = String::new();\n let mut y_pretty = String::new();\n let mut inb_pretty = String::new();\n\n if !self.operations.is_empty() {\n let mut x_i : usize = self.xstart;\n let mut y_i : usize = self.ystart;\n\n \/\/ Add '-' before aligned subsequences and un-aligned 5' substrings of sequences.\n if x_i > y_i {\n let diff = x_i - y_i;\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&x[0..x_i])));\n for _ in 0..diff {\n y_pretty.push('-');\n }\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&x[0..y_i])));\n } else if x_i < y_i {\n let diff = y_i - x_i;\n for _ in 0..diff {\n x_pretty.push('-');\n }\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&x[0..x_i])));\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&y[0..y_i])));\n } else {\n for i in 0..x_i {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[i]])));\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[i]])));\n }\n }\n for _ in 0..x_pretty.len() {\n inb_pretty.push(' ');\n }\n\n \/\/ Process the alignment.\n for i in 0..self.operations.len() {\n match self.operations[i] {\n AlignmentOperation::Match => {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[x_i]])));\n x_i += 1;\n\n inb_pretty.push_str(\"|\");\n\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[y_i]])));\n y_i += 1;\n },\n AlignmentOperation::Subst => {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[x_i]])));\n x_i += 1;\n\n inb_pretty.push(' ');\n\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[y_i]])));\n y_i += 1;\n },\n AlignmentOperation::Del => {\n x_pretty.push('-');\n\n inb_pretty.push(' ');\n\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[y_i]])));\n y_i += 1;\n },\n AlignmentOperation::Ins => {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[x_i]])));\n x_i += 1;\n\n inb_pretty.push(' ');\n\n y_pretty.push('-');\n },\n }\n }\n\n \/\/ Add un-aligned 3' substrings of sequences.\n for i in x_i..x.len() {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[i]])));\n }\n for i in y_i..y.len() {\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[i]])));\n }\n\n \/\/ Add trailing '-'.\n if x_pretty.len() > y_pretty.len() {\n for _ in y_pretty.len()..x_pretty.len() {\n y_pretty.push('-');\n }\n } else {\n for _ in x_pretty.len()..y_pretty.len() {\n x_pretty.push('-');\n }\n }\n }\n\n format!(\"{}\\n{}\\n{}\", x_pretty, inb_pretty, y_pretty)\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use super::AlignmentOperation::*;\n\n #[test]\n fn test_cigar() {\n let alignment = Alignment { score: 5, xstart: 3, ystart: 0, xlen: 10, operations: vec![Match, Match, Match, Subst, Ins, Ins, Del, Del] };\n assert_eq!(alignment.cigar(false), \"3S3=1X2I2D4S\");\n }\n}\n<commit_msg>update copyright; move the pretty alignment test here<commit_after>\/\/ Copyright 2014-2015 Johannes Köster, Vadim Nazarov.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Various alignment algorithms.\n\npub mod pairwise;\n\n\n\/\/\/ Alignment operations (Match, Subst, Del and Ins).\n#[derive(PartialEq, Debug, Copy, Clone)]\npub enum AlignmentOperation {\n Match,\n Subst,\n Del,\n Ins\n}\n\n\n\/\/\/ An alignment, consisting of a score, a start in sequence y, a start in sequence x, a length\n\/\/\/ and its edit operations (see alignment::pairwise for meaning of x and y).\n#[derive(Debug)]\npub struct Alignment {\n pub score: i32,\n pub ystart: usize,\n pub xstart: usize,\n pub xlen: usize,\n pub operations: Vec<AlignmentOperation>\n}\n\n\nimpl Alignment {\n \/\/\/ Calculate the cigar string.\n pub fn cigar(&self, hard_clip: bool) -> String {\n let add_op = |op, k, cigar: &mut String| {\n cigar.push_str(&format!(\"{}{}\", k, match op {\n AlignmentOperation::Match => \"=\",\n AlignmentOperation::Subst => \"X\",\n AlignmentOperation::Del => \"D\",\n AlignmentOperation::Ins => \"I\",\n }));\n };\n\n let op_len = |op: AlignmentOperation| {\n (op == AlignmentOperation::Match || op == AlignmentOperation::Subst || op == AlignmentOperation::Ins) as usize\n };\n\n let clip_str = if hard_clip {\"H\"} else {\"S\"};\n\n let mut cigar = String::new();\n\n if !self.operations.is_empty() {\n if self.xstart > 0 {\n cigar.push_str(&format!(\"{}{}\", self.xstart, clip_str));\n }\n\n let mut last = self.operations[0];\n let mut k = 1;\n let mut alen = op_len(last);\n for &op in self.operations[1..].iter() {\n if op == last {\n k += 1;\n } else {\n add_op(last, k, &mut cigar);\n k = 1;\n }\n last = op;\n alen += op_len(op);\n }\n add_op(last, k, &mut cigar);\n\n let clip = self.xlen - alen;\n if clip > 0 {\n cigar.push_str(&format!(\"{}{}\", clip, clip_str));\n }\n }\n else {\n cigar.push_str(&format!(\"{}{}\", self.xlen, clip_str));\n }\n\n cigar\n }\n\n \/\/\/ Return the pretty formatted alignment as a String.\n pub fn pretty(&self, x: &[u8], y: &[u8]) -> String {\n let mut x_pretty = String::new();\n let mut y_pretty = String::new();\n let mut inb_pretty = String::new();\n\n if !self.operations.is_empty() {\n let mut x_i : usize = self.xstart;\n let mut y_i : usize = self.ystart;\n\n \/\/ Add '-' before aligned subsequences and un-aligned 5' substrings of sequences.\n if x_i > y_i {\n let diff = x_i - y_i;\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&x[0..x_i])));\n for _ in 0..diff {\n y_pretty.push('-');\n }\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&x[0..y_i])));\n } else if x_i < y_i {\n let diff = y_i - x_i;\n for _ in 0..diff {\n x_pretty.push('-');\n }\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&x[0..x_i])));\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&y[0..y_i])));\n } else {\n for i in 0..x_i {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[i]])));\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[i]])));\n }\n }\n for _ in 0..x_pretty.len() {\n inb_pretty.push(' ');\n }\n\n \/\/ Process the alignment.\n for i in 0..self.operations.len() {\n match self.operations[i] {\n AlignmentOperation::Match => {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[x_i]])));\n x_i += 1;\n\n inb_pretty.push_str(\"|\");\n\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[y_i]])));\n y_i += 1;\n },\n AlignmentOperation::Subst => {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[x_i]])));\n x_i += 1;\n\n inb_pretty.push(' ');\n\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[y_i]])));\n y_i += 1;\n },\n AlignmentOperation::Del => {\n x_pretty.push('-');\n\n inb_pretty.push(' ');\n\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[y_i]])));\n y_i += 1;\n },\n AlignmentOperation::Ins => {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[x_i]])));\n x_i += 1;\n\n inb_pretty.push(' ');\n\n y_pretty.push('-');\n },\n }\n }\n\n \/\/ Add un-aligned 3' substrings of sequences.\n for i in x_i..x.len() {\n x_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[x[i]])));\n }\n for i in y_i..y.len() {\n y_pretty.push_str(&format!(\"{}\", String::from_utf8_lossy(&[y[i]])));\n }\n\n \/\/ Add trailing '-'.\n if x_pretty.len() > y_pretty.len() {\n for _ in y_pretty.len()..x_pretty.len() {\n y_pretty.push('-');\n }\n } else {\n for _ in x_pretty.len()..y_pretty.len() {\n x_pretty.push('-');\n }\n }\n }\n\n format!(\"{}\\n{}\\n{}\", x_pretty, inb_pretty, y_pretty)\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use super::AlignmentOperation::*;\n use super::pairwise::*;\n\n #[test]\n fn test_cigar() {\n let alignment = Alignment { score: 5, xstart: 3, ystart: 0, xlen: 10, operations: vec![Match, Match, Match, Subst, Ins, Ins, Del, Del] };\n assert_eq!(alignment.cigar(false), \"3S3=1X2I2D4S\");\n }\n\n #[test]\n fn test_pretty_alignment() {\n let y = b\"TACCGTGGAC\";\n let x = b\"AAAAACCGTTGACGCAA\";\n let score = |a: u8, b: u8| if a == b {1i32} else {-1i32};\n \/\/ alignment.operations: [Match, Match, Match, Match, Match, Subst, Match, Match, Match]\n let mut aligner = Aligner::with_capacity(x.len(), y.len(), -5, -1, score);\n let alignment = aligner.semiglobal(x, y);\n \/\/ let mut alignment = align_semiglobal!(x, y, -5, -1, score);\n \/\/ assert_eq!(alignment.pretty(x, y), align_semiglobal_pretty!(x, y, -5, -1, score));\n println!(\"semiglobal:\\t{:?}\", alignment);\n println!(\"SEMIGLOBAL: \\n{}\\n\", alignment.pretty(x, y));\n \/\/ alignment.operations: [Match, Match, Match, Match, Match, Subst, Match, Match, Match]\n \/\/ alignment = align_local!(x, y, -5, -1, score);\n let alignment = aligner.local(x, y);\n println!(\"local:\\t{:?}\", alignment);\n println!(\"LOCAL: \\n{}\\n\", alignment.pretty(x, y));\n \/\/ assert_eq!(alignment.pretty(x, y), align_local_pretty!(x, y, -5, -1, score));\n \/\/ alignment.operations: [Del, Del, Del, Del, Match, Match, Match, Match, Match, Subst, Match, Match, Match]\n \/\/ alignment = align_global!(x, y, -5, -1, score);\n let alignment = aligner.global(x, y);\n println!(\"global:\\t{:?}\", alignment);\n println!(\"GLOBAL: \\n{}\\n\", alignment.pretty(x, y));\n\n \/\/ let x = b\"AAAAAAA\";\n \/\/ let y = b\"TTTTTTTTTT\";\n \/\/ let score = |a: u8, b: u8| if a == b {1i32} else {-1i32};\n \/\/ let mut aligner = Aligner::with_capacity(x.len(), y.len(), -5, -1, score);\n \/\/ let alignment = aligner.semiglobal(x, y);\n \/\/ println!(\"SEMIGLOBAL: \\n{}\\n\", alignment.pretty(x, y));\n \/\/ let alignment = aligner.local(x, y);\n \/\/ println!(\"LOCAL: \\n{}\\n\", alignment.pretty(x, y));\n \/\/ let alignment = aligner.global(x, y);\n \/\/ println!(\"GLOBAL: \\n{}\\n\", alignment.pretty(x, y));\n \/\/ assert_eq!(alignment.pretty(x, y), align_global_pretty!(x, y, -5, -1, score));\n }\n\n \/\/ fn test_pretty_alignment() {\n \/\/ let x = b\"TACCGTGGAC\";\n \/\/ let y = b\"AAAAACCGTTGACGCAA\";\n \/\/ let score = |a: u8, b: u8| if a == b {1i32} else {-1i32};\n \/\/ let mut aligner = Aligner::with_capacity(x.len(), y.len(), -5, -1, score);\n \/\/ let alignment = aligner.semiglobal(x, y);\n \/\/ let alignment = aligner.local(x, y);\n \/\/ let alignment = aligner.global(x, y);\n \/\/ }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add the Alioth chameneos-redux benchmark<commit_after>\/\/ chameneos\n\nimport io::reader_util;\n\nuse std;\nimport std::map;\nimport std::map::hashmap;\nimport std::sort;\n\nfn print_complements() {\n let all = ~[Blue, Red, Yellow];\n for vec::each(all) |aa| {\n for vec::each(all) |bb| {\n io::println(show_color(aa) + \" + \" + show_color(bb) +\n \" -> \" + show_color(transform(aa,bb)));\n }\n }\n}\n\n\/\/ can I combine these two lines?\nenum color_e { Red, Yellow, Blue }\ntype color = color_e;\n\ntype creature_info = { name: uint, color: color };\n\nfn show_color(cc: color) -> str {\n alt (cc) {\n Red {\"red\"}\n Yellow {\"yellow\"}\n Blue {\"blue\"}\n }\n}\n\nfn show_color_list(set: ~[color]) -> str {\n let mut out = \"\";\n for vec::eachi(set) |_ii, col| {\n out += \" \";\n out += show_color(col);\n }\n ret out;\n}\n\nfn show_digit(nn: uint) -> str {\n alt (nn) {\n 0 {\"zero\"}\n 1 {\"one\"}\n 2 {\"two\"}\n 3 {\"three\"}\n 4 {\"four\"}\n 5 {\"five\"}\n 6 {\"six\"}\n 7 {\"seven\"}\n 8 {\"eight\"}\n 9 {\"nine\"}\n _ {fail \"expected digits from 0 to 9...\"}\n }\n}\n\nfn show_number(nn: uint) -> str {\n let mut out = \"\";\n let mut num = nn;\n let mut dig;\n\n if num == 0 { out = show_digit(0) };\n\n while num != 0 {\n dig = num % 10;\n num = num \/ 10;\n out = show_digit(dig) + \" \" + out;\n }\n\n ret out;\n}\n\nfn transform(aa: color, bb: color) -> color {\n alt (aa, bb) {\n (Red, Red ) { Red }\n (Red, Yellow) { Blue }\n (Red, Blue ) { Yellow }\n (Yellow, Red ) { Blue }\n (Yellow, Yellow) { Yellow }\n (Yellow, Blue ) { Red }\n (Blue, Red ) { Yellow }\n (Blue, Yellow) { Red }\n (Blue, Blue ) { Blue }\n }\n}\n\nfn creature(\n name: uint,\n color: color,\n from_rendezvous: comm::port<option<creature_info>>,\n to_rendezvous: comm::chan<creature_info>,\n to_rendezvous_log: comm::chan<str>\n) {\n let mut color = color;\n let mut creatures_met = 0;\n let mut evil_clones_met = 0;\n\n loop {\n \/\/ ask for a pairing\n comm::send(to_rendezvous, {name: name, color: color});\n let resp = comm::recv(from_rendezvous);\n\n \/\/ log and change, or print and quit\n alt resp {\n option::some(other_creature) {\n color = transform(color, other_creature.color);\n\n \/\/ track some statistics\n creatures_met += 1;\n if other_creature.name == name {\n evil_clones_met += 1;\n }\n }\n option::none {\n \/\/ log creatures met and evil clones of self\n let report = #fmt(\"%u\", creatures_met) + \" \" +\n show_number(evil_clones_met);\n comm::send(to_rendezvous_log, report);\n break;\n }\n }\n }\n}\n\nfn rendezvous(nn: uint, set: ~[color]) {\n let from_creatures: comm::port<creature_info> = comm::port();\n let from_creatures_log: comm::port<str> = comm::port();\n let to_rendezvous = comm::chan(from_creatures);\n let to_rendezvous_log = comm::chan(from_creatures_log);\n let to_creature: ~[comm::chan<option<creature_info>>] =\n vec::mapi(set,\n fn@(ii: uint, col: color) -> comm::chan<option<creature_info>> {\n ret do task::spawn_listener |from_rendezvous| {\n creature(ii, col, from_rendezvous, to_rendezvous,\n to_rendezvous_log);\n };\n }\n );\n\n let mut meetings = 0;\n let mut creatures_met = 0;\n let mut creatures_present = 0;\n\n \/\/ use option type instead of initializing to junk?\n let mut first_creature = { name: 0, color: Red };\n let mut second_creature = { name: 0, color: Red };\n\n \/\/ set up meetings...\n while meetings < nn {\n let creature_req: creature_info = comm::recv(from_creatures);\n creatures_met += 1;\n\n alt creatures_present {\n 0 {\n first_creature = creature_req;\n creatures_present = 1;\n }\n 1 {\n second_creature = creature_req;\n comm::send(to_creature[first_creature.name],\n some(second_creature));\n comm::send(to_creature[second_creature.name],\n some(first_creature));\n creatures_present = 0;\n meetings += 1;\n }\n _ { fail \"too many creatures are here!\" }\n }\n }\n\n \/\/ tell each creature to stop\n for vec::eachi(to_creature) |_ii, to_one| {\n comm::send(to_one, none);\n }\n\n \/\/ save each creature's meeting stats\n let mut report = ~[];\n for vec::each(to_creature) |_to_one| {\n vec::push(report, comm::recv(from_creatures_log));\n }\n\n \/\/ print each color in the set\n io::println(show_color_list(set));\n\n \/\/ print each creature's stats\n for vec::each(report) |rep| {\n io::println(rep);\n }\n\n \/\/ print the total number of creatures met\n io::println(show_number(creatures_met));\n}\n\nfn main(args: ~[str]) {\n let args = if os::getenv(\"RUST_BENCH\").is_some() || args.len() <= 1u {\n ~[\"\", \"600\"]\n } else {\n args\n };\n\n let nn = uint::from_str(args[1]).get();\n\n print_complements();\n io::println(\"\");\n\n rendezvous(nn, ~[Blue, Red, Yellow]);\n io::println(\"\");\n\n rendezvous(nn,\n ~[Blue, Red, Yellow, Red, Yellow, Blue, Red, Yellow, Red, Blue]);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Change typestate to use visit instead of walk<commit_after>\/\/ error-pattern:Unsatisfied precondition constraint (for example, init(bar\n\/\/ xfail-stage0\nfn main() {\n auto bar;\n fn baz(int x) { }\n bind baz(bar);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix dashing skeletons bug<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add hello_keepalive_srv example<commit_after>extern crate clap;\nextern crate fibers;\nextern crate futures;\nextern crate handy_async;\nextern crate miasht;\n\nuse fibers::{Executor, Spawn, ThreadPoolExecutor};\nuse fibers::net::TcpStream;\nuse futures::{Async, Future, Poll};\nuse handy_async::future::Phase;\nuse miasht::{Server, Status};\nuse miasht::server::{ReadRequest, Response};\nuse miasht::builtin::servers::{RawConnection, SimpleHttpServer};\nuse miasht::builtin::futures::WriteAllBytes;\nuse miasht::builtin::headers::ContentLength;\nuse miasht::builtin::FutureExt;\n\nfn main() {\n let mut executor = ThreadPoolExecutor::new().unwrap();\n let addr = \"0.0.0.0:3000\".parse().unwrap();\n let server = SimpleHttpServer::new((), hello);\n let server = server.start(addr, executor.handle());\n let monitor = executor.spawn_monitor(server.join());\n let result = executor.run_fiber(monitor).unwrap();\n println!(\"HTTP Server shutdown: {:?}\", result);\n}\n\nfn hello(_: (), connection: RawConnection) -> Box<Future<Item = (), Error = ()> + Send + 'static> {\n let phase = Phase::A(connection.read_request());\n Box::new(Hello { phase })\n}\n\nstruct Hello {\n phase: Phase<\n ReadRequest<TcpStream>,\n WriteAllBytes<Response<TcpStream>, &'static [u8; 12]>,\n Response<TcpStream>,\n >,\n}\nimpl Future for Hello {\n type Item = ();\n type Error = ();\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n while let Async::Ready(phase) = self.phase.poll().map_err(|_| ())? {\n let next = match phase {\n Phase::A(request) => {\n let bytes = b\"Hello, World\";\n let connection = request.finish();\n let mut response = connection.build_response(Status::Ok);\n response.add_header(&ContentLength(bytes.len() as u64));\n Phase::B(response.finish().write_all_bytes(bytes))\n }\n Phase::B(response) => Phase::C(response),\n Phase::C(connection) => Phase::A(connection.read_request()),\n _ => unreachable!(),\n };\n self.phase = next;\n }\n Ok(Async::NotReady)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use core::cmp::min;\nuse core::mem::size_of;\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\nstruct Memory {\n address: usize,\n}\n\nimpl Memory {\n pub fn new(size: usize) -> Option<Self> {\n let alloc = unsafe { alloc(size) };\n if alloc > 0 {\n Some(Memory { address: alloc })\n }\n else { None }\n }\n\n pub fn renew(&self, size: usize) -> Option<Self> {\n let realloc = unsafe { realloc(self.address, size) };\n if realloc > 0 {\n Some(Memory { address: realloc })\n }\n else { None }\n }\n}\n\nimpl Drop for Memory {\n fn drop(&mut self) {\n unsafe { unalloc(self.address) }\n }\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<commit_msg>Implement Memory::size<commit_after>use core::cmp::min;\nuse core::mem::size_of;\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\nstruct Memory {\n address: usize,\n}\n\nimpl Memory {\n pub fn new(size: usize) -> Option<Self> {\n let alloc = unsafe { alloc(size) };\n if alloc > 0 {\n Some(Memory { address: alloc })\n }\n else { None }\n }\n\n pub fn renew(&self, size: usize) -> Option<Self> {\n let realloc = unsafe { realloc(self.address, size) };\n if realloc > 0 {\n Some(Memory { address: realloc })\n }\n else { None }\n }\n\n pub fn size(self) -> usize {\n unsafe { alloc_size(self.address) }\n }\n}\n\nimpl Drop for Memory {\n fn drop(&mut self) {\n unsafe { unalloc(self.address) }\n }\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>a Rust solution to problem #5<commit_after>\/\/\/ Smallest Multiple - https:\/\/projecteuler.net\/problem=5\n\/\/\/\n\/\/\/ 2520 is the smallest number that can be divided by each of the numbers from\n\/\/\/ 1 to 10 without any remainder.\n\/\/\/\n\/\/\/What is the smallest positive number that is evenly divisible by all of the\n\/\/\/numbers from 1 to 20?\n\nuse std::collections::HashMap;\n\/\/use std::collections::Vec;\n\n\n\/\/\/ Return a dictionary with prime keys and exponent values\n\/\/\/\n\/\/\/ The product of all the keys raised to their exponent is the number\n\/\/\/\n\/\/\/ # Example:\n\/\/\/\n\/\/\/ 20 = 2 ** 2 * 5 -> {2: 2, 5:1}\n\/\/\/\nfn get_prime_product(n: u64) -> HashMap<u64, u64> {\n let mut result: HashMap<u64, u64> = HashMap::new();\n let primes_to_20 = [2, 3, 5, 7, 11, 13, 17, 19];\n\n \/\/ Handle quickly case where the number is already prime\n let found = primes_to_20.iter().find(|&x| *x == n);\n match found {\n Some(_) => {\n result.insert(n, 1);\n return result;\n }\n None => {\n }\n }\n\n \/\/ Initialize all multiples to primes to 0\n for p in primes_to_20.iter() {\n result.insert(*p, 0);\n }\n\n let mut number: u64 = n;\n for p in primes_to_20.iter() {\n while number % *p == 0 {\n let x = result[p].clone();\n result.insert(*p, x + 1);\n number \/= *p;\n }\n }\n\n result = result.iter().filter(|&(_, v)| *v != 0)\n .map(|(&k, &v)| (k, v))\n .collect();\n return result;\n}\n\nfn main() {\n let mut result: HashMap<u64, u64> = HashMap::new();\n\n for n in 0..18 {\n let prime_product = get_prime_product(20 - n);\n for (&k, &v) in prime_product.iter() {\n if !result.contains_key(&k) || v > result[&k] {\n result.insert(k, v);\n }\n }\n }\n\n let mut final_number = 1;\n for (&k, &v) in result.iter() {\n final_number *= k.pow(v as u32);\n }\n \/\/assert 264792560 == final_number\n println!(\"{}\", final_number);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example for debug info<commit_after>extern crate spin;\n\nfn main() {\n let mutex = spin::Mutex::new(42);\n println!(\"{:?}\", mutex);\n {\n let x = mutex.lock();\n println!(\"{:?}, {:?}\", mutex, *x);\n }\n\n let rwlock = spin::RwLock::new(42);\n println!(\"{:?}\", rwlock);\n {\n let x = rwlock.read();\n println!(\"{:?}, {:?}\", rwlock, *x);\n }\n {\n let x = rwlock.write();\n println!(\"{:?}, {:?}\", rwlock, *x);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse ash::vk;\nuse ash::version::DeviceV1_0;\nuse core::{command, pso, shade, state, target, texture as tex};\nuse core::{IndexType, VertexCount};\nuse {Backend, RawDevice, Resources};\nuse std::sync::Arc;\n\npub struct SubmitInfo {\n pub command_buffer: vk::CommandBuffer,\n}\n\npub struct CommandBuffer {\n pub raw: vk::CommandBuffer,\n pub device: Arc<RawDevice>,\n}\n\nimpl CommandBuffer {\n fn end(&mut self) -> SubmitInfo {\n unsafe {\n self.device.0.end_command_buffer(self.raw); \/\/ TODO: error handling\n }\n\n SubmitInfo {\n command_buffer: self.raw,\n }\n }\n}\n\n\/\/ CommandBuffer trait implementation\nmacro_rules! impl_cmd_buffer {\n ($buffer:ident) => (\n impl command::CommandBuffer<Backend> for $buffer {\n unsafe fn end(&mut self) -> SubmitInfo {\n self.end()\n }\n }\n\n \/\/ TEMPORARY!\n impl command::Buffer<Resources> for $buffer {\n fn reset(&mut self) {\n unimplemented!()\n }\n\n fn bind_pipeline_state(&mut self, _: ()) {\n unimplemented!()\n }\n\n fn bind_vertex_buffers(&mut self, _: pso::VertexBufferSet<Resources>) {\n unimplemented!()\n }\n\n fn bind_constant_buffers(&mut self, _: &[pso::ConstantBufferParam<Resources>]) {\n unimplemented!()\n }\n\n fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {\n unimplemented!()\n }\n\n fn bind_resource_views(&mut self, _: &[pso::ResourceViewParam<Resources>]) {\n unimplemented!()\n }\n\n fn bind_unordered_views(&mut self, _: &[pso::UnorderedViewParam<Resources>]) {\n unimplemented!()\n }\n\n fn bind_samplers(&mut self, _: &[pso::SamplerParam<Resources>]) {\n unimplemented!()\n }\n\n fn bind_pixel_targets(&mut self, _: pso::PixelTargetSet<Resources>) {\n unimplemented!()\n }\n\n fn bind_index(&mut self, _: (), _: IndexType) {\n unimplemented!()\n }\n\n fn set_scissor(&mut self, _: target::Rect) {\n unimplemented!()\n }\n\n fn set_ref_values(&mut self, _: state::RefValues) {\n unimplemented!()\n }\n\n fn copy_buffer(&mut self, src: (), dst: (),\n src_offset_bytes: usize, dst_offset_bytes: usize,\n size_bytes: usize) {\n unimplemented!()\n }\n\n fn copy_buffer_to_texture(&mut self, src: (), src_offset_bytes: usize,\n dst: (),\n kind: tex::Kind,\n face: Option<tex::CubeFace>,\n img: tex::RawImageInfo) {\n unimplemented!()\n }\n\n fn copy_texture_to_buffer(&mut self,\n src: (),\n kind: tex::Kind,\n face: Option<tex::CubeFace>,\n img: tex::RawImageInfo,\n dst: (), dst_offset_bytes: usize) {\n unimplemented!()\n }\n\n fn update_buffer(&mut self, buf: (), data: &[u8], offset: usize) {\n unimplemented!()\n }\n\n fn update_texture(&mut self, tex: (), kind: tex::Kind, face: Option<tex::CubeFace>,\n data: &[u8], image: tex::RawImageInfo) {\n unimplemented!()\n }\n\n fn generate_mipmap(&mut self, srv: ()) {\n unimplemented!()\n }\n\n fn clear_color(&mut self, target: (), value: command::ClearColor) {\n unimplemented!()\n }\n\n fn clear_depth_stencil(&mut self, target: (), depth: Option<target::Depth>,\n stencil: Option<target::Stencil>) {\n unimplemented!()\n }\n\n fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {\n unimplemented!();\n }\n\n fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,\n base: VertexCount, instances: Option<command::InstanceParams>) {\n unimplemented!()\n }\n\n }\n )\n}\n\nimpl_cmd_buffer!(CommandBuffer);\n\npub struct SubpassCommandBuffer(pub CommandBuffer);\n\nimpl command::CommandBuffer<Backend> for SubpassCommandBuffer {\n unsafe fn end(&mut self) -> SubmitInfo {\n self.0.end()\n }\n}\n<commit_msg>[ll] vk: Remove outdated command buffer trait impl macro<commit_after>\/\/ Copyright 2017 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse ash::vk;\nuse ash::version::DeviceV1_0;\nuse core::{command, pso, shade, state, target, texture as tex};\nuse core::{IndexType, VertexCount};\nuse {Backend, RawDevice, Resources};\nuse std::sync::Arc;\n\npub struct SubmitInfo {\n pub command_buffer: vk::CommandBuffer,\n}\n\npub struct CommandBuffer {\n pub raw: vk::CommandBuffer,\n pub device: Arc<RawDevice>,\n}\n\nimpl CommandBuffer {\n fn end(&mut self) -> SubmitInfo {\n unsafe {\n self.device.0.end_command_buffer(self.raw); \/\/ TODO: error handling\n }\n\n SubmitInfo {\n command_buffer: self.raw,\n }\n }\n}\n\n\/\/ CommandBuffer trait implementations\nimpl command::CommandBuffer<Backend> for CommandBuffer {\n unsafe fn end(&mut self) -> SubmitInfo {\n self.end()\n }\n}\n\n\/\/ TEMPORARY!\nimpl command::Buffer<Resources> for CommandBuffer {\n fn reset(&mut self) {\n unimplemented!()\n }\n\n fn bind_pipeline_state(&mut self, _: ()) {\n unimplemented!()\n }\n\n fn bind_vertex_buffers(&mut self, _: pso::VertexBufferSet<Resources>) {\n unimplemented!()\n }\n\n fn bind_constant_buffers(&mut self, _: &[pso::ConstantBufferParam<Resources>]) {\n unimplemented!()\n }\n\n fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {\n unimplemented!()\n }\n\n fn bind_resource_views(&mut self, _: &[pso::ResourceViewParam<Resources>]) {\n unimplemented!()\n }\n\n fn bind_unordered_views(&mut self, _: &[pso::UnorderedViewParam<Resources>]) {\n unimplemented!()\n }\n\n fn bind_samplers(&mut self, _: &[pso::SamplerParam<Resources>]) {\n unimplemented!()\n }\n\n fn bind_pixel_targets(&mut self, _: pso::PixelTargetSet<Resources>) {\n unimplemented!()\n }\n\n fn bind_index(&mut self, _: (), _: IndexType) {\n unimplemented!()\n }\n\n fn set_scissor(&mut self, _: target::Rect) {\n unimplemented!()\n }\n\n fn set_ref_values(&mut self, _: state::RefValues) {\n unimplemented!()\n }\n\n fn copy_buffer(&mut self, src: (), dst: (),\n src_offset_bytes: usize, dst_offset_bytes: usize,\n size_bytes: usize) {\n unimplemented!()\n }\n\n fn copy_buffer_to_texture(&mut self, src: (), src_offset_bytes: usize,\n dst: (),\n kind: tex::Kind,\n face: Option<tex::CubeFace>,\n img: tex::RawImageInfo) {\n unimplemented!()\n }\n\n fn copy_texture_to_buffer(&mut self,\n src: (),\n kind: tex::Kind,\n face: Option<tex::CubeFace>,\n img: tex::RawImageInfo,\n dst: (), dst_offset_bytes: usize) {\n unimplemented!()\n }\n\n fn update_buffer(&mut self, buf: (), data: &[u8], offset: usize) {\n unimplemented!()\n }\n\n fn update_texture(&mut self, tex: (), kind: tex::Kind, face: Option<tex::CubeFace>,\n data: &[u8], image: tex::RawImageInfo) {\n unimplemented!()\n }\n\n fn generate_mipmap(&mut self, srv: ()) {\n unimplemented!()\n }\n\n fn clear_color(&mut self, target: (), value: command::ClearColor) {\n unimplemented!()\n }\n\n fn clear_depth_stencil(&mut self, target: (), depth: Option<target::Depth>,\n stencil: Option<target::Stencil>) {\n unimplemented!()\n }\n\n fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {\n unimplemented!();\n }\n\n fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,\n base: VertexCount, instances: Option<command::InstanceParams>) {\n unimplemented!()\n }\n}\n\npub struct SubpassCommandBuffer(pub CommandBuffer);\n\nimpl command::CommandBuffer<Backend> for SubpassCommandBuffer {\n unsafe fn end(&mut self) -> SubmitInfo {\n self.0.end()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix typo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>lol i'm not using the gpl anymore<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>starting out with Rust basics, and Nominal Adapton basics<commit_after>mod name {\n #[deriving(Clone,Show,Hash)]\n enum Name { \n Symbol (Box<String>),\n ForkL (Box<Name>),\n ForkR (Box<Name>),\n Pair (Box<Name>,Box<Name>),\n }\n #[allow(dead_code)]\n pub fn symbol (s:String) -> Name {\n Name::Symbol(box s)\n }\n #[allow(dead_code)]\n pub fn fork (n:Name) -> (Name,Name) {\n let m = n.clone () ;\n (Name::ForkL(box n), Name::ForkR(box m))\n } \n #[allow(dead_code)]\n pub fn pair (n:Name, m:Name) -> Name {\n Name::Pair(box n, box m)\n }\n\n #[test]\n fn printstuff () {\n let n = symbol (format!(\"one\")) ;\n let m = symbol (format!(\"two\")) ;\n println!(\"Adapton: {}, {}\", n, m);\n println!(\"Adapton: {}, {}\", fork(n), m);\n }\n}\n\n\nfn main () {\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Tuple version of rect<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::attr::Attr;\nuse dom::attr::AttrHelpers;\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;\nuse dom::bindings::codegen::Bindings::HTMLElementBinding;\nuse dom::bindings::codegen::Bindings::HTMLElementBinding::HTMLElementMethods;\nuse dom::bindings::codegen::Bindings::HTMLInputElementBinding::HTMLInputElementMethods;\nuse dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;\nuse dom::bindings::codegen::InheritTypes::{ElementCast, HTMLFrameSetElementDerived};\nuse dom::bindings::codegen::InheritTypes::{EventTargetCast, HTMLInputElementCast};\nuse dom::bindings::codegen::InheritTypes::{HTMLElementDerived, HTMLBodyElementDerived};\nuse dom::bindings::js::{JSRef, Temporary, MutNullableJS};\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::cssstyledeclaration::CSSStyleDeclaration;\nuse dom::document::Document;\nuse dom::element::{Element, ElementTypeId, ActivationElementHelpers};\nuse dom::eventtarget::{EventTarget, EventTargetHelpers, EventTargetTypeId};\nuse dom::node::{Node, NodeTypeId, window_from_node};\nuse dom::virtualmethods::VirtualMethods;\n\nuse servo_util::str::DOMString;\n\nuse string_cache::Atom;\n\nuse std::default::Default;\n\n#[dom_struct]\npub struct HTMLElement {\n element: Element,\n style_decl: MutNullableJS<CSSStyleDeclaration>,\n}\n\nimpl HTMLElementDerived for EventTarget {\n fn is_htmlelement(&self) -> bool {\n match *self.type_id() {\n EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::Element)) => false,\n EventTargetTypeId::Node(NodeTypeId::Element(_)) => true,\n _ => false\n }\n }\n}\n\nimpl HTMLElement {\n pub fn new_inherited(type_id: ElementTypeId, tag_name: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> HTMLElement {\n HTMLElement {\n element: Element::new_inherited(type_id, tag_name, ns!(HTML), prefix, document),\n style_decl: Default::default(),\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> Temporary<HTMLElement> {\n let element = HTMLElement::new_inherited(ElementTypeId::HTMLElement, localName, prefix, document);\n Node::reflect_node(box element, document, HTMLElementBinding::Wrap)\n }\n}\n\ntrait PrivateHTMLElementHelpers {\n fn is_body_or_frameset(self) -> bool;\n}\n\nimpl<'a> PrivateHTMLElementHelpers for JSRef<'a, HTMLElement> {\n fn is_body_or_frameset(self) -> bool {\n let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);\n eventtarget.is_htmlbodyelement() || eventtarget.is_htmlframesetelement()\n }\n}\n\nimpl<'a> HTMLElementMethods for JSRef<'a, HTMLElement> {\n fn Style(self) -> Temporary<CSSStyleDeclaration> {\n self.style_decl.or_init(|| {\n let global = window_from_node(self).root();\n CSSStyleDeclaration::new(*global, self)\n })\n }\n\n make_getter!(Title)\n make_setter!(SetTitle, \"title\")\n\n make_getter!(Lang)\n make_setter!(SetLang, \"lang\")\n\n \/\/ http:\/\/html.spec.whatwg.org\/multipage\/#dom-hidden\n make_bool_getter!(Hidden)\n make_bool_setter!(SetHidden, \"hidden\")\n\n global_event_handlers!(NoOnload)\n\n fn GetOnload(self) -> Option<EventHandlerNonNull> {\n if self.is_body_or_frameset() {\n let win = window_from_node(self).root();\n win.GetOnload()\n } else {\n None\n }\n }\n\n fn SetOnload(self, listener: Option<EventHandlerNonNull>) {\n if self.is_body_or_frameset() {\n let win = window_from_node(self).root();\n win.SetOnload(listener)\n }\n }\n\n \/\/ https:\/\/html.spec.whatwg.org\/multipage\/interaction.html#dom-click\n fn Click(self) {\n let maybe_input = HTMLInputElementCast::to_ref(self);\n match maybe_input {\n Some(i) if i.Disabled() => return,\n _ => ()\n }\n let element: JSRef<Element> = ElementCast::from_ref(self);\n \/\/ https:\/\/www.w3.org\/Bugs\/Public\/show_bug.cgi?id=27430 ?\n element.as_maybe_activatable().map(|a| a.synthetic_click_activation(false, false, false, false));\n }\n}\n\nimpl<'a> VirtualMethods for JSRef<'a, HTMLElement> {\n fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods> {\n let element: &JSRef<Element> = ElementCast::from_borrowed_ref(self);\n Some(element as &VirtualMethods)\n }\n\n fn after_set_attr(&self, attr: JSRef<Attr>) {\n match self.super_type() {\n Some(ref s) => s.after_set_attr(attr),\n _ => ()\n }\n\n let name = attr.local_name().as_slice();\n if name.starts_with(\"on\") {\n let window = window_from_node(*self).root();\n let (cx, url, reflector) = (window.get_cx(),\n window.get_url(),\n window.reflector().get_jsobject());\n let evtarget: JSRef<EventTarget> = EventTargetCast::from_ref(*self);\n evtarget.set_event_handler_uncompiled(cx, url, reflector,\n name.slice_from(2),\n attr.value().as_slice().to_string());\n }\n }\n}\n\nimpl Reflectable for HTMLElement {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.element.reflector()\n }\n}\n<commit_msg>Handle getting\/setting onload for any element.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::attr::Attr;\nuse dom::attr::AttrHelpers;\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;\nuse dom::bindings::codegen::Bindings::HTMLElementBinding;\nuse dom::bindings::codegen::Bindings::HTMLElementBinding::HTMLElementMethods;\nuse dom::bindings::codegen::Bindings::HTMLInputElementBinding::HTMLInputElementMethods;\nuse dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;\nuse dom::bindings::codegen::InheritTypes::{ElementCast, HTMLFrameSetElementDerived};\nuse dom::bindings::codegen::InheritTypes::{EventTargetCast, HTMLInputElementCast};\nuse dom::bindings::codegen::InheritTypes::{HTMLElementDerived, HTMLBodyElementDerived};\nuse dom::bindings::js::{JSRef, Temporary, MutNullableJS};\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::cssstyledeclaration::CSSStyleDeclaration;\nuse dom::document::Document;\nuse dom::element::{Element, ElementTypeId, ActivationElementHelpers};\nuse dom::eventtarget::{EventTarget, EventTargetHelpers, EventTargetTypeId};\nuse dom::node::{Node, NodeTypeId, window_from_node};\nuse dom::virtualmethods::VirtualMethods;\n\nuse servo_util::str::DOMString;\n\nuse string_cache::Atom;\n\nuse std::default::Default;\n\n#[dom_struct]\npub struct HTMLElement {\n element: Element,\n style_decl: MutNullableJS<CSSStyleDeclaration>,\n}\n\nimpl HTMLElementDerived for EventTarget {\n fn is_htmlelement(&self) -> bool {\n match *self.type_id() {\n EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::Element)) => false,\n EventTargetTypeId::Node(NodeTypeId::Element(_)) => true,\n _ => false\n }\n }\n}\n\nimpl HTMLElement {\n pub fn new_inherited(type_id: ElementTypeId, tag_name: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> HTMLElement {\n HTMLElement {\n element: Element::new_inherited(type_id, tag_name, ns!(HTML), prefix, document),\n style_decl: Default::default(),\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> Temporary<HTMLElement> {\n let element = HTMLElement::new_inherited(ElementTypeId::HTMLElement, localName, prefix, document);\n Node::reflect_node(box element, document, HTMLElementBinding::Wrap)\n }\n}\n\ntrait PrivateHTMLElementHelpers {\n fn is_body_or_frameset(self) -> bool;\n}\n\nimpl<'a> PrivateHTMLElementHelpers for JSRef<'a, HTMLElement> {\n fn is_body_or_frameset(self) -> bool {\n let eventtarget: JSRef<EventTarget> = EventTargetCast::from_ref(self);\n eventtarget.is_htmlbodyelement() || eventtarget.is_htmlframesetelement()\n }\n}\n\nimpl<'a> HTMLElementMethods for JSRef<'a, HTMLElement> {\n fn Style(self) -> Temporary<CSSStyleDeclaration> {\n self.style_decl.or_init(|| {\n let global = window_from_node(self).root();\n CSSStyleDeclaration::new(*global, self)\n })\n }\n\n make_getter!(Title)\n make_setter!(SetTitle, \"title\")\n\n make_getter!(Lang)\n make_setter!(SetLang, \"lang\")\n\n \/\/ http:\/\/html.spec.whatwg.org\/multipage\/#dom-hidden\n make_bool_getter!(Hidden)\n make_bool_setter!(SetHidden, \"hidden\")\n\n global_event_handlers!(NoOnload)\n\n fn GetOnload(self) -> Option<EventHandlerNonNull> {\n if self.is_body_or_frameset() {\n let win = window_from_node(self).root();\n win.GetOnload()\n } else {\n let target: JSRef<EventTarget> = EventTargetCast::from_ref(self);\n target.get_event_handler_common(\"load\")\n }\n }\n\n fn SetOnload(self, listener: Option<EventHandlerNonNull>) {\n if self.is_body_or_frameset() {\n let win = window_from_node(self).root();\n win.SetOnload(listener)\n } else {\n let target: JSRef<EventTarget> = EventTargetCast::from_ref(self);\n target.set_event_handler_common(\"load\", listener)\n }\n }\n\n \/\/ https:\/\/html.spec.whatwg.org\/multipage\/interaction.html#dom-click\n fn Click(self) {\n let maybe_input = HTMLInputElementCast::to_ref(self);\n match maybe_input {\n Some(i) if i.Disabled() => return,\n _ => ()\n }\n let element: JSRef<Element> = ElementCast::from_ref(self);\n \/\/ https:\/\/www.w3.org\/Bugs\/Public\/show_bug.cgi?id=27430 ?\n element.as_maybe_activatable().map(|a| a.synthetic_click_activation(false, false, false, false));\n }\n}\n\nimpl<'a> VirtualMethods for JSRef<'a, HTMLElement> {\n fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods> {\n let element: &JSRef<Element> = ElementCast::from_borrowed_ref(self);\n Some(element as &VirtualMethods)\n }\n\n fn after_set_attr(&self, attr: JSRef<Attr>) {\n match self.super_type() {\n Some(ref s) => s.after_set_attr(attr),\n _ => ()\n }\n\n let name = attr.local_name().as_slice();\n if name.starts_with(\"on\") {\n let window = window_from_node(*self).root();\n let (cx, url, reflector) = (window.get_cx(),\n window.get_url(),\n window.reflector().get_jsobject());\n let evtarget: JSRef<EventTarget> = EventTargetCast::from_ref(*self);\n evtarget.set_event_handler_uncompiled(cx, url, reflector,\n name.slice_from(2),\n attr.value().as_slice().to_string());\n }\n }\n}\n\nimpl Reflectable for HTMLElement {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.element.reflector()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Run N child pipe processes in parallel.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a HTTP benchmark<commit_after>#![feature(test)]\nextern crate test;\n\n#[macro_use]\nextern crate nom;\n\nuse nom::IResult;\nuse std::env;\nuse std::fs::File;\n\n#[derive(Debug)]\nstruct Request<'a> {\n method: &'a [u8],\n uri: &'a [u8],\n version: &'a [u8],\n}\n\n#[derive(Debug)]\nstruct Header<'a> {\n name: &'a [u8],\n value: Vec<&'a [u8]>,\n}\n\nfn is_token(c: u8) -> bool {\n match c {\n 128...255 => false,\n 0...31 => false,\n b'(' => false,\n b')' => false,\n b'<' => false,\n b'>' => false,\n b'@' => false,\n b',' => false,\n b';' => false,\n b':' => false,\n b'\\\\' => false,\n b'\"' => false,\n b'\/' => false,\n b'[' => false,\n b']' => false,\n b'?' => false,\n b'=' => false,\n b'{' => false,\n b'}' => false,\n b' ' => false,\n _ => true,\n }\n}\n\nfn not_line_ending(c: u8) -> bool {\n c != b'\\r' && c != b'\\n'\n}\n\nfn is_space(c: u8) -> bool {\n c == b' '\n}\n\nfn is_not_space(c: u8) -> bool { c != b' ' }\nfn is_horizontal_space(c: u8) -> bool { c == b' ' || c == b'\\t' }\n\nfn is_version(c: u8) -> bool {\n c >= b'0' && c <= b'9' || c == b'.'\n}\n\nnamed!(line_ending, alt!(tag!(\"\\r\\n\") | tag!(\"\\n\")));\n\nfn request_line<'a>(input: &'a [u8]) -> IResult<&'a[u8], Request<'a>> {\n chain!(input,\n method: take_while1!(is_token) ~\n take_while1!(is_space) ~\n url: take_while1!(is_not_space) ~\n take_while1!(is_space) ~\n version: http_version ~\n line_ending,\n\n || Request {\n method: method,\n uri: url,\n version: version,\n })\n}\n\nnamed!(http_version, chain!(\n tag!(\"HTTP\/\") ~\n version: take_while1!(is_version),\n \n || version));\n\nnamed!(message_header_value, chain!(\n take_while1!(is_horizontal_space) ~\n data: take_while1!(not_line_ending) ~\n line_ending,\n \n || data));\n\nfn message_header<'a>(input: &'a [u8]) -> IResult<&'a[u8], Header<'a>> {\n chain!(input,\n name: take_while1!(is_token) ~\n char!(':') ~\n values: many1!(message_header_value),\n\n || Header {\n name: name,\n value: values,\n })\n}\n\nfn request<'a>(input: &'a [u8]) -> IResult<&'a[u8], (Request<'a>, Vec<Header<'a>>)> {\n chain!(input,\n req: request_line ~\n h: many1!(message_header) ~\n line_ending,\n\n || (req, h))\n}\n\n\nfn parse(data:&[u8]) -> Option<Vec<(Request, Vec<Header>)>> {\n let mut buf = &data[..];\n let mut v = Vec::new();\n loop {\n match request(buf) {\n IResult::Done(b, r) => {\n buf = b;\n v.push(r);\n\n if b.is_empty() {\n\n \/\/println!(\"{}\", i);\n break;\n }\n },\n IResult::Error(e) => return None\/*panic!(\"{:?}\", e)*\/,\n IResult::Incomplete(_) => return None\/*panic!(\"Incomplete!\")*\/,\n }\n }\n\n Some(v)\n}\n\nuse test::Bencher;\n\n\/*\n#[bench]\nfn small_test(b: &mut Bencher) {\n let data = include_bytes!(\"..\/..\/http-requests.txt\");\n b.iter(||{\n parse(data)\n });\n}\n\n#[bench]\nfn bigger_test(b: &mut Bencher) {\n let data = include_bytes!(\"..\/..\/bigger.txt\");\n b.iter(||{\n parse(data)\n });\n}\n*\/\n\n#[bench]\nfn one_test(b: &mut Bencher) {\n let data = &b\"GET \/ HTTP\/1.1\nHost: www.reddit.com\nUser-Agent: Mozilla\/5.0 (Macintosh; Intel Mac OS X 10.8; rv:15.0) Gecko\/20100101 Firefox\/15.0.1\nAccept: text\/html,application\/xhtml+xml,application\/xml;q=0.9,*\/*;q=0.8\nAccept-Language: en-us,en;q=0.5\nAccept-Encoding: gzip, deflate\nConnection: keep-alive\"[..];\n b.iter(||{\n parse(data)\n });\n}\n\nfn main() {\n let mut contents: Vec<u8> = Vec::new();\n\n {\n use std::io::Read;\n\n let mut file = File::open(env::args().nth(1).expect(\"File to read\")).ok().expect(\"Failed to open file\");\n\n let _ = file.read_to_end(&mut contents).unwrap();\n }\n \n let mut buf = &contents[..];\n loop { parse(buf); }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adding a loop to the guessing game (rust)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>start rust version and fighting with pointer<commit_after>#[derive(Debug)]\nstruct ListNode {\n Val: i32,\n Next: Option<Box<ListNode>>,\n}\n\nimpl ListNode {\n fn new(i: i32) -> Self {\n ListNode { Val: i, Next: None }\n }\n\n fn addL(&mut self, l: &Vec<i32>) {\n let org: *mut ListNode = self as *mut ListNode;\n let mut p: *mut ListNode = org;\n for i in l {\n unsafe {\n println!(\"{:?}\", *p);\n (*p).Next = Some(Box::new(ListNode::new(*i)));\n let temp_b = Box::from_raw(p);\n p = Box::into_raw(temp_b.Next.unwrap());\n };\n }\n }\n fn removeNthFromEnd(&self, n: i32) {}\n}\nfn main() {\n let mut a = ListNode::new(1);\n a.addL(&vec![2, 3, 4, 5]);\n println!(\"{:?}\", a);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>...that, but for real this time<commit_after>use chrono::{DateTime, FixedOffset};\nuse serde_derive::Deserialize;\n\n#[derive(Deserialize)]\npub(super) struct Metadata {\n title: Option<String>,\n subtitle: Option<String>,\n summary: Option<String>,\n qualifiers: Option<Qualifiers>,\n date: Option<DateTime<FixedOffset>>,\n updated: Option<DateTime<FixedOffset>>,\n permalink: Option<String>,\n thanks: Option<String>,\n tags: Vec<String>,\n featured: bool,\n layout: Option<String>,\n series: Option<Series>,\n}\n\n#[derive(Deserialize)]\nstruct Qualifiers {\n audience: Option<String>,\n epistemic: Option<String>,\n}\n\n#[derive(Deserialize)]\nstruct Book {\n title: String,\n author: String,\n editors: Vec<String>,\n translators: Vec<String>,\n cover: Option<String>,\n link: Option<String>,\n year: u16,\n review: Review,\n}\n\n#[derive(Deserialize)]\nstruct Review {\n rating: Rating,\n summary: String,\n}\n\n#[derive(Deserialize)]\nenum Rating {\n NotRecommended,\n WithQualifications,\n Recommended,\n Required,\n}\n\nimpl std::fmt::Display for Rating {\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n write!(\n f,\n \"{}\",\n match self {\n Rating::NotRecommended => \"Not recommended\",\n Rating::WithQualifications => \"Recommended with qualifications\",\n Rating::Recommended => \"Recommended\",\n Rating::Required => \"Required\",\n }\n )\n }\n}\n\n#[derive(Deserialize)]\nstruct Series {\n name: String,\n part: u8,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Leave it up to the application to handle Vulkan snapshots<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Get GLSL shader recompilation working again in the OpenGL renderer<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-tidy-linelength\n\nuse std::marker::PhantomData;\n\nstruct Foo<'x> { bar: isize, marker: PhantomData<&'x ()> }\nfn foo1<'a>(x: &Foo) -> &'a isize {\n\/\/~^ HELP: consider using an explicit lifetime parameter as shown: fn foo1<'a>(x: &'a Foo) -> &'a isize\n &x.bar \/\/~ ERROR: cannot infer\n}\n\nfn foo2<'a, 'b>(x: &'a Foo) -> &'b isize {\n\/\/~^ HELP: consider using an explicit lifetime parameter as shown: fn foo2<'a>(x: &'a Foo) -> &'a isize\n &x.bar \/\/~ ERROR: cannot infer\n}\n\nfn foo3<'a>(x: &Foo) -> (&'a isize, &'a isize) {\n\/\/~^ HELP: consider using an explicit lifetime parameter as shown: fn foo3<'a>(x: &'a Foo) -> (&'a isize, &'a isize)\n (&x.bar, &x.bar) \/\/~ ERROR: cannot infer\n \/\/~^ ERROR: cannot infer\n}\n\nfn foo4<'a, 'b>(x: &'a Foo) -> (&'b isize, &'a isize, &'b isize) {\n\/\/~^ HELP: consider using an explicit lifetime parameter as shown: fn foo4<'a>(x: &'a Foo) -> (&'a isize, &'a isize, &'a isize)\n (&x.bar, &x.bar, &x.bar) \/\/~ ERROR: cannot infer\n \/\/~^ ERROR: cannot infer\n}\n\nstruct Cat<'x, T> { cat: &'x isize, t: T }\nstruct Dog<'y> { dog: &'y isize }\n\nfn cat2<'x, 'y>(x: Cat<'x, Dog<'y>>) -> &'x isize {\n \/\/~^ HELP consider using an explicit lifetime parameter as shown: fn cat2<'x>(x: Cat<'x, Dog<'x>>) -> &'x isize\n x.t.dog \/\/~ ERROR E0312\n}\n\nstruct Baz<'x> {\n bar: &'x isize\n}\n\nimpl<'a> Baz<'a> {\n fn baz2<'b>(&self, x: &isize) -> (&'b isize, &'b isize) {\n \/\/~^ HELP consider using an explicit lifetime parameter as shown: fn baz2<'b>(&self, x: &'a isize) -> (&'a isize, &'a isize)\n (self.bar, x) \/\/~ ERROR E0312\n \/\/~^ ERROR E0312\n }\n}\n\nfn main() {}\n<commit_msg>Weaken test `compile-fail\/lifetime-inference-give-expl-lifetime-param`.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-tidy-linelength\n\nuse std::marker::PhantomData;\n\nstruct Foo<'x> { bar: isize, marker: PhantomData<&'x ()> }\nfn foo1<'a>(x: &Foo) -> &'a isize {\n\/\/~^ HELP: consider using an explicit lifetime parameter as shown: fn foo1<'a>(x: &'a Foo) -> &'a isize\n &x.bar \/\/~ ERROR: cannot infer\n}\n\nfn foo2<'a, 'b>(x: &'a Foo) -> &'b isize {\n\/\/~^ HELP: consider using an explicit lifetime parameter as shown: fn foo2<'a>(x: &'a Foo) -> &'a isize\n &x.bar \/\/~ ERROR: cannot infer\n}\n\nfn foo3<'a>(x: &Foo) -> (&'a isize, &'a isize) {\n\/\/~^ HELP: consider using an explicit lifetime parameter as shown: fn foo3<'a>(x: &'a Foo) -> (&'a isize, &'a isize)\n (&x.bar, &x.bar) \/\/~ ERROR: cannot infer\n \/\/~^ ERROR: cannot infer\n}\n\nfn foo4<'a, 'b>(x: &'a Foo) -> (&'b isize, &'a isize, &'b isize) {\n\/\/~^ HELP: consider using an explicit lifetime parameter as shown: fn foo4<'a>(x: &'a Foo) -> (&'a isize, &'a isize, &'a isize)\n (&x.bar, &x.bar, &x.bar) \/\/~ ERROR: cannot infer\n \/\/~^ ERROR: cannot infer\n}\n\nstruct Cat<'x, T> { cat: &'x isize, t: T }\nstruct Dog<'y> { dog: &'y isize }\n\nfn cat2<'x, 'y>(x: Cat<'x, Dog<'y>>) -> &'x isize {\n \/\/~^ HELP consider using an explicit lifetime parameter as shown: fn cat2<'x>(x: Cat<'x, Dog<'x>>) -> &'x isize\n x.t.dog \/\/~ ERROR E0312\n}\n\nstruct Baz<'x> {\n bar: &'x isize\n}\n\nimpl<'a> Baz<'a> {\n fn baz2<'b>(&self, x: &isize) -> (&'b isize, &'b isize) {\n \/\/~^ HELP consider using an explicit lifetime parameter as shown: fn baz2<'b>(&self, x: &'\n \/\/ FIXME #35038: The above suggestion is different on Linux and Mac.\n (self.bar, x) \/\/~ ERROR E0312\n \/\/~^ ERROR E0312\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>We now check if statement conditions and contained statements.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Replace StrAllocating with String type<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>elf32: add program_header.rs<commit_after>pub use super::super::program_header::*;\n\n#[repr(C)]\n#[derive(Clone, PartialEq, Default)]\n#[derive(Debug)]\npub struct ProgramHeader {\n pub p_type: u32,\n pub p_offset: u32,\n pub p_vaddr: u32,\n pub p_paddr: u32,\n pub p_filesz: u32,\n pub p_memsz: u32,\n pub p_flags: u32,\n pub p_align: u32,\n}\n\npub const SIZEOF_PHDR: usize = 32;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add boilerplate for update hook<commit_after>use toml::Value;\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::hook::Hook;\nuse libimagstore::hook::result::HookResult;\nuse libimagstore::hook::position::HookPosition;\nuse libimagstore::hook::accessor::{HookDataAccessor, HookDataAccessorProvider};\nuse libimagstore::hook::accessor::StoreIdAccessor;\n\n#[derive(Debug)]\npub struct UpdateHook {\n position: HookPosition,\n config: Option<Value>,\n}\n\nimpl UpdateHook {\n\n pub fn new(p: HookPosition) -> UpdateHook {\n UpdateHook {\n position: p,\n config: None,\n }\n }\n\n}\n\nimpl Hook for UpdateHook {\n\n fn name(&self) -> &'static str {\n \"stdhook_git_update\"\n }\n\n fn set_config(&mut self, config: &Value) {\n self.config = Some(config.clone());\n }\n\n}\n\nimpl HookDataAccessorProvider for UpdateHook {\n\n fn accessor(&self) -> HookDataAccessor {\n HookDataAccessor::StoreIdAccess(self)\n }\n}\n\nimpl StoreIdAccessor for UpdateHook {\n\n fn access(&self, id: &StoreId) -> HookResult<()> {\n debug!(\"[GIT UPDATE HOOK]: {:?}\", id);\n Ok(())\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>#[doc=\"Fundamental layout structures and algorithms.\"]\n\nimport dom::base::{Element, ElementKind, HTMLDivElement, HTMLImageElement, Node, NodeData};\nimport dom::base::{NodeKind};\nimport dom::rcu;\nimport dom::style::Unit;\nimport gfx::geometry;\nimport gfx::geometry::{au, zero_size_au};\nimport geom::point::Point2D;\nimport geom::rect::Rect;\nimport geom::size::Size2D;\nimport image::base::Image;\nimport util::tree;\nimport util::color::Color;\nimport text::TextBox;\nimport traverse::extended_full_traversal;\nimport style::style::{SpecifiedStyle};\nimport vec::{push, push_all};\nimport std::net::url::Url;\nimport resource::image_cache_task;\nimport image_cache_task::ImageCacheTask;\nimport core::to_str::ToStr;\nimport std::arc::{ARC, clone};\nimport task::spawn;\n\nenum BoxKind {\n BlockBox,\n InlineBox,\n IntrinsicBox(@Size2D<au>),\n TextBoxKind(@TextBox)\n}\n\nimpl BoxKind : cmp::Eq {\n pure fn eq(&&other: BoxKind) -> bool {\n match (self, other) {\n (BlockBox, BlockBox) => true,\n _ => fail ~\"unimplemented case in BoxKind.eq\"\n }\n }\n}\n\nstruct Appearance {\n let mut background_image: Option<ImageHolder>;\n let mut background_color: Color;\n let mut width: Unit;\n let mut height: Unit;\n\n new(kind: NodeKind) {\n self.background_image = None;\n self.background_color = kind.default_color();\n self.width = kind.default_width();\n self.height = kind.default_height();\n }\n\n \/\/ This will be very unhappy if it is getting run in parallel with\n \/\/ anything trying to read the background image\n fn get_image() -> Option<~ARC<~Image>> {\n let mut image = None;\n\n \/\/ Do a dance where we swap the ImageHolder out before we can\n \/\/ get the image out of it because we can't match against it\n \/\/ because holder.get_image() is not pure.\n if (self.background_image).is_some() {\n let mut temp = None;\n temp <-> self.background_image;\n let holder <- option::unwrap(temp);\n image = holder.get_image();\n self.background_image = Some(holder);\n }\n\n return image;\n }\n}\n\nstruct Box {\n let tree: tree::Tree<@Box>;\n let node: Node;\n let kind: BoxKind;\n let mut bounds: Rect<au>;\n let appearance: Appearance;\n\n new(node: Node, kind: BoxKind) {\n self.appearance = node.read(|n| Appearance(*n.kind));\n self.tree = tree::empty();\n self.node = node;\n self.kind = kind;\n self.bounds = geometry::zero_rect_au();\n }\n}\n\n#[doc=\"A struct to store image data. The image will be loaded once,\n the first time it is requested, and an arc will be stored. Clones of\n this arc are given out on demand.\"]\nstruct ImageHolder {\n \/\/ Invariant: at least one of url and image is not none, except\n \/\/ occasionally while get_image is being called\n let mut url : Option<Url>;\n let mut image : Option<ARC<~Image>>;\n let image_cache_task: ImageCacheTask;\n let reflow: fn~();\n\n new(-url : Url, image_cache_task: ImageCacheTask, reflow: fn~()) {\n self.url = Some(copy url);\n self.image = None;\n self.image_cache_task = image_cache_task;\n self.reflow = copy reflow;\n\n \/\/ Tell the image cache we're going to be interested in this url\n \/\/ FIXME: These two messages must be sent to prep an image for use\n \/\/ but they are intended to be spread out in time. Ideally prefetch\n \/\/ should be done as early as possible and decode only once we\n \/\/ are sure that the image will be used.\n image_cache_task.send(image_cache_task::Prefetch(copy url));\n image_cache_task.send(image_cache_task::Decode(move url));\n }\n\n \/\/ This function should not be called by two tasks at the same time\n fn get_image() -> Option<~ARC<~Image>> {\n \/\/ If this is the first time we've called this function, load\n \/\/ the image and store it for the future\n if self.image.is_none() {\n assert self.url.is_some();\n\n let mut temp = None;\n temp <-> self.url;\n let url = option::unwrap(temp);\n\n let response_port = Port();\n self.image_cache_task.send(image_cache_task::GetImage(copy url, response_port.chan()));\n self.image = match response_port.recv() {\n image_cache_task::ImageReady(image) => Some(clone(&image)),\n image_cache_task::ImageNotReady => {\n \/\/ Need to reflow when the image is available\n let image_cache_task = self.image_cache_task;\n let reflow = copy self.reflow;\n do spawn |copy url, move reflow| {\n let response_port = Port();\n image_cache_task.send(image_cache_task::WaitForImage(copy url, response_port.chan()));\n match response_port.recv() {\n image_cache_task::ImageReady(*) => reflow(),\n image_cache_task::ImageNotReady => fail \/*not possible*\/,\n image_cache_task::ImageFailed => ()\n }\n }\n None\n }\n image_cache_task::ImageFailed => {\n #info(\"image was not ready for %s\", url.to_str());\n \/\/ FIXME: Need to schedule another layout when the image is ready\n None\n }\n };\n }\n\n if self.image.is_some() {\n \/\/ Temporarily swap out the arc of the image so we can clone\n \/\/ it without breaking purity, then put it back and return the\n \/\/ clone. This is not threadsafe.\n let mut temp = None;\n temp <-> self.image;\n let im_arc = option::unwrap(temp);\n self.image = Some(clone(&im_arc));\n\n return Some(~im_arc);\n } else {\n return None;\n }\n }\n}\n\nenum LayoutData = {\n mut specified_style: ~SpecifiedStyle,\n mut box: Option<@Box>\n};\n\n\/\/ FIXME: This is way too complex! Why do these have to have dummy receivers? --pcw\n\nenum NTree { NTree }\nimpl NTree : tree::ReadMethods<Node> {\n fn each_child(node: Node, f: fn(Node) -> bool) {\n tree::each_child(self, node, f)\n }\n\n fn with_tree_fields<R>(&&n: Node, f: fn(tree::Tree<Node>) -> R) -> R {\n n.read(|n| f(n.tree))\n }\n}\n\nenum BTree { BTree }\n\nimpl BTree : tree::ReadMethods<@Box> {\n fn each_child(node: @Box, f: fn(&&@Box) -> bool) {\n tree::each_child(self, node, f)\n }\n\n fn with_tree_fields<R>(&&b: @Box, f: fn(tree::Tree<@Box>) -> R) -> R {\n f(b.tree)\n }\n}\n\nimpl BTree : tree::WriteMethods<@Box> {\n fn add_child(node: @Box, child: @Box) {\n tree::add_child(self, node, child)\n }\n\n fn with_tree_fields<R>(&&b: @Box, f: fn(tree::Tree<@Box>) -> R) -> R {\n f(b.tree)\n }\n}\n\nimpl @Box {\n #[doc=\"The main reflow routine.\"]\n fn reflow() {\n match self.kind {\n BlockBox => self.reflow_block(),\n InlineBox => self.reflow_inline(),\n IntrinsicBox(size) => self.reflow_intrinsic(*size),\n TextBoxKind(subbox) => self.reflow_text(subbox)\n }\n }\n\n #[doc=\"Dumps the box tree, for debugging, with indentation.\"]\n fn dump_indent(indent: uint) {\n let mut s = ~\"\";\n for uint::range(0u, indent) |_i| {\n s += ~\" \";\n }\n\n s += #fmt(\"%?\", self.kind);\n #debug[\"%s\", s];\n\n for BTree.each_child(self) |kid| {\n kid.dump_indent(indent + 1u) \n }\n }\n}\n\n#[doc = \"\n Set your width to the maximum available width and return the\n maximum available width any children can use. Currently children\n are just given the same available width.\n\"]\nfn give_kids_width(+available_width : au, box : @Box) -> au {\n \/\/ TODO: give smaller available widths if the width of the\n \/\/ containing box is constrained\n match box.kind {\n BlockBox => box.bounds.size.width = available_width,\n InlineBox | IntrinsicBox(*) | TextBoxKind(*) => { }\n }\n\n available_width\n}\n\n#[doc=\"Wrapper around reflow so it can be passed to traverse\"]\nfn reflow_wrapper(b : @Box) {\n b.reflow();\n}\n\nimpl @Box {\n #[doc=\"\n Run a parallel traversal over the layout tree rooted at\n this box. On the top-down traversal give each box the\n available width determined by their parent and on the\n bottom-up traversal reflow each box based on their\n attributes and their children's sizes.\n \"]\n fn reflow_subtree(available_width : au) {\n extended_full_traversal(self, available_width, give_kids_width, reflow_wrapper);\n }\n\n #[doc=\"The trivial reflow routine for instrinsically-sized frames.\"]\n fn reflow_intrinsic(size: Size2D<au>) {\n self.bounds.size = copy size;\n\n #debug[\"reflow_intrinsic size=%?\", copy self.bounds];\n }\n\n #[doc=\"Dumps the box tree, for debugging.\"]\n fn dump() {\n self.dump_indent(0u);\n }\n}\n\n\/\/ Debugging\n\ntrait PrivateNodeMethods{\n fn dump_indent(ident: uint);\n}\n\nimpl Node : PrivateNodeMethods {\n #[doc=\"Dumps the node tree, for debugging, with indentation.\"]\n fn dump_indent(indent: uint) {\n let mut s = ~\"\";\n for uint::range(0u, indent) |_i| {\n s += ~\" \";\n }\n\n s += #fmt(\"%?\", self.read(|n| copy n.kind ));\n #debug[\"%s\", s];\n\n for NTree.each_child(self) |kid| {\n kid.dump_indent(indent + 1u) \n }\n }\n}\n\ntrait NodeMethods {\n fn dump();\n}\n\nimpl Node : NodeMethods {\n #[doc=\"Dumps the subtree rooted at this node, for debugging.\"]\n fn dump() {\n self.dump_indent(0u);\n }\n}\n\n#[cfg(test)]\nmod test {\n import dom::base::{Element, ElementData, HTMLDivElement, HTMLImageElement, Node, NodeKind};\n import dom::base::{NodeScope};\n import dom::rcu::Scope;\n\n \/*\n use sdl;\n import sdl::video;\n\n fn with_screen(f: fn(*sdl::surface)) {\n let screen = video::set_video_mode(\n 320, 200, 32,\n ~[video::hwsurface], ~[video::doublebuf]);\n assert screen != ptr::null();\n\n f(screen);\n\n video::free_surface(screen);\n }\n *\/\n\n fn flat_bounds(root: @Box) -> ~[Rect<au>] {\n let mut r = ~[];\n for tree::each_child(BTree, root) |c| {\n push_all(r, flat_bounds(c));\n }\n\n push(r, copy root.bounds);\n\n return r;\n }\n\n #[test]\n #[ignore(reason = \"busted\")]\n fn do_layout() {\n let s = Scope();\n\n fn mk_img(size: Size2D<au>) -> ~ElementKind {\n ~HTMLImageElement({mut size: size})\n }\n\n let n0 = s.new_node(Element(ElementData(~\"img\", mk_img(Size2D(au(10),au(10))))));\n let n1 = s.new_node(Element(ElementData(~\"img\", mk_img(Size2D(au(10),au(10))))));\n let n2 = s.new_node(Element(ElementData(~\"img\", mk_img(Size2D(au(10),au(20))))));\n let n3 = s.new_node(Element(ElementData(~\"div\", ~HTMLDivElement)));\n\n tree::add_child(s, n3, n0);\n tree::add_child(s, n3, n1);\n tree::add_child(s, n3, n2);\n\n let b0 = n0.construct_boxes();\n let b1 = n1.construct_boxes();\n let b2 = n2.construct_boxes();\n let b3 = n3.construct_boxes();\n\n tree::add_child(BTree, b3, b0);\n tree::add_child(BTree, b3, b1);\n tree::add_child(BTree, b3, b2);\n\n b3.reflow_subtree(au(100));\n let fb = flat_bounds(b3);\n #debug[\"fb=%?\", fb];\n assert fb == ~[geometry::box(au(0), au(0), au(10), au(10)), \/\/ n0\n geometry::box(au(0), au(10), au(10), au(15)), \/\/ n1\n geometry::box(au(0), au(25), au(10), au(20)), \/\/ n2\n geometry::box(au(0), au(0), au(100), au(45))]; \/\/ n3\n }\n}\n\n<commit_msg>Add missing case to BoxKind : Eq<commit_after>#[doc=\"Fundamental layout structures and algorithms.\"]\n\nimport dom::base::{Element, ElementKind, HTMLDivElement, HTMLImageElement, Node, NodeData};\nimport dom::base::{NodeKind};\nimport dom::rcu;\nimport dom::style::Unit;\nimport gfx::geometry;\nimport gfx::geometry::{au, zero_size_au};\nimport geom::point::Point2D;\nimport geom::rect::Rect;\nimport geom::size::Size2D;\nimport image::base::Image;\nimport util::tree;\nimport util::color::Color;\nimport text::TextBox;\nimport traverse::extended_full_traversal;\nimport style::style::{SpecifiedStyle};\nimport vec::{push, push_all};\nimport std::net::url::Url;\nimport resource::image_cache_task;\nimport image_cache_task::ImageCacheTask;\nimport core::to_str::ToStr;\nimport std::arc::{ARC, clone};\nimport task::spawn;\n\nenum BoxKind {\n BlockBox,\n InlineBox,\n IntrinsicBox(@Size2D<au>),\n TextBoxKind(@TextBox)\n}\n\nimpl BoxKind : cmp::Eq {\n pure fn eq(&&other: BoxKind) -> bool {\n match (self, other) {\n (BlockBox, BlockBox) => true,\n (InlineBox, InlineBox) => true,\n _ => fail ~\"unimplemented case in BoxKind.eq\"\n }\n }\n}\n\nstruct Appearance {\n let mut background_image: Option<ImageHolder>;\n let mut background_color: Color;\n let mut width: Unit;\n let mut height: Unit;\n\n new(kind: NodeKind) {\n self.background_image = None;\n self.background_color = kind.default_color();\n self.width = kind.default_width();\n self.height = kind.default_height();\n }\n\n \/\/ This will be very unhappy if it is getting run in parallel with\n \/\/ anything trying to read the background image\n fn get_image() -> Option<~ARC<~Image>> {\n let mut image = None;\n\n \/\/ Do a dance where we swap the ImageHolder out before we can\n \/\/ get the image out of it because we can't match against it\n \/\/ because holder.get_image() is not pure.\n if (self.background_image).is_some() {\n let mut temp = None;\n temp <-> self.background_image;\n let holder <- option::unwrap(temp);\n image = holder.get_image();\n self.background_image = Some(holder);\n }\n\n return image;\n }\n}\n\nstruct Box {\n let tree: tree::Tree<@Box>;\n let node: Node;\n let kind: BoxKind;\n let mut bounds: Rect<au>;\n let appearance: Appearance;\n\n new(node: Node, kind: BoxKind) {\n self.appearance = node.read(|n| Appearance(*n.kind));\n self.tree = tree::empty();\n self.node = node;\n self.kind = kind;\n self.bounds = geometry::zero_rect_au();\n }\n}\n\n#[doc=\"A struct to store image data. The image will be loaded once,\n the first time it is requested, and an arc will be stored. Clones of\n this arc are given out on demand.\"]\nstruct ImageHolder {\n \/\/ Invariant: at least one of url and image is not none, except\n \/\/ occasionally while get_image is being called\n let mut url : Option<Url>;\n let mut image : Option<ARC<~Image>>;\n let image_cache_task: ImageCacheTask;\n let reflow: fn~();\n\n new(-url : Url, image_cache_task: ImageCacheTask, reflow: fn~()) {\n self.url = Some(copy url);\n self.image = None;\n self.image_cache_task = image_cache_task;\n self.reflow = copy reflow;\n\n \/\/ Tell the image cache we're going to be interested in this url\n \/\/ FIXME: These two messages must be sent to prep an image for use\n \/\/ but they are intended to be spread out in time. Ideally prefetch\n \/\/ should be done as early as possible and decode only once we\n \/\/ are sure that the image will be used.\n image_cache_task.send(image_cache_task::Prefetch(copy url));\n image_cache_task.send(image_cache_task::Decode(move url));\n }\n\n \/\/ This function should not be called by two tasks at the same time\n fn get_image() -> Option<~ARC<~Image>> {\n \/\/ If this is the first time we've called this function, load\n \/\/ the image and store it for the future\n if self.image.is_none() {\n assert self.url.is_some();\n\n let mut temp = None;\n temp <-> self.url;\n let url = option::unwrap(temp);\n\n let response_port = Port();\n self.image_cache_task.send(image_cache_task::GetImage(copy url, response_port.chan()));\n self.image = match response_port.recv() {\n image_cache_task::ImageReady(image) => Some(clone(&image)),\n image_cache_task::ImageNotReady => {\n \/\/ Need to reflow when the image is available\n let image_cache_task = self.image_cache_task;\n let reflow = copy self.reflow;\n do spawn |copy url, move reflow| {\n let response_port = Port();\n image_cache_task.send(image_cache_task::WaitForImage(copy url, response_port.chan()));\n match response_port.recv() {\n image_cache_task::ImageReady(*) => reflow(),\n image_cache_task::ImageNotReady => fail \/*not possible*\/,\n image_cache_task::ImageFailed => ()\n }\n }\n None\n }\n image_cache_task::ImageFailed => {\n #info(\"image was not ready for %s\", url.to_str());\n \/\/ FIXME: Need to schedule another layout when the image is ready\n None\n }\n };\n }\n\n if self.image.is_some() {\n \/\/ Temporarily swap out the arc of the image so we can clone\n \/\/ it without breaking purity, then put it back and return the\n \/\/ clone. This is not threadsafe.\n let mut temp = None;\n temp <-> self.image;\n let im_arc = option::unwrap(temp);\n self.image = Some(clone(&im_arc));\n\n return Some(~im_arc);\n } else {\n return None;\n }\n }\n}\n\nenum LayoutData = {\n mut specified_style: ~SpecifiedStyle,\n mut box: Option<@Box>\n};\n\n\/\/ FIXME: This is way too complex! Why do these have to have dummy receivers? --pcw\n\nenum NTree { NTree }\nimpl NTree : tree::ReadMethods<Node> {\n fn each_child(node: Node, f: fn(Node) -> bool) {\n tree::each_child(self, node, f)\n }\n\n fn with_tree_fields<R>(&&n: Node, f: fn(tree::Tree<Node>) -> R) -> R {\n n.read(|n| f(n.tree))\n }\n}\n\nenum BTree { BTree }\n\nimpl BTree : tree::ReadMethods<@Box> {\n fn each_child(node: @Box, f: fn(&&@Box) -> bool) {\n tree::each_child(self, node, f)\n }\n\n fn with_tree_fields<R>(&&b: @Box, f: fn(tree::Tree<@Box>) -> R) -> R {\n f(b.tree)\n }\n}\n\nimpl BTree : tree::WriteMethods<@Box> {\n fn add_child(node: @Box, child: @Box) {\n tree::add_child(self, node, child)\n }\n\n fn with_tree_fields<R>(&&b: @Box, f: fn(tree::Tree<@Box>) -> R) -> R {\n f(b.tree)\n }\n}\n\nimpl @Box {\n #[doc=\"The main reflow routine.\"]\n fn reflow() {\n match self.kind {\n BlockBox => self.reflow_block(),\n InlineBox => self.reflow_inline(),\n IntrinsicBox(size) => self.reflow_intrinsic(*size),\n TextBoxKind(subbox) => self.reflow_text(subbox)\n }\n }\n\n #[doc=\"Dumps the box tree, for debugging, with indentation.\"]\n fn dump_indent(indent: uint) {\n let mut s = ~\"\";\n for uint::range(0u, indent) |_i| {\n s += ~\" \";\n }\n\n s += #fmt(\"%?\", self.kind);\n #debug[\"%s\", s];\n\n for BTree.each_child(self) |kid| {\n kid.dump_indent(indent + 1u) \n }\n }\n}\n\n#[doc = \"\n Set your width to the maximum available width and return the\n maximum available width any children can use. Currently children\n are just given the same available width.\n\"]\nfn give_kids_width(+available_width : au, box : @Box) -> au {\n \/\/ TODO: give smaller available widths if the width of the\n \/\/ containing box is constrained\n match box.kind {\n BlockBox => box.bounds.size.width = available_width,\n InlineBox | IntrinsicBox(*) | TextBoxKind(*) => { }\n }\n\n available_width\n}\n\n#[doc=\"Wrapper around reflow so it can be passed to traverse\"]\nfn reflow_wrapper(b : @Box) {\n b.reflow();\n}\n\nimpl @Box {\n #[doc=\"\n Run a parallel traversal over the layout tree rooted at\n this box. On the top-down traversal give each box the\n available width determined by their parent and on the\n bottom-up traversal reflow each box based on their\n attributes and their children's sizes.\n \"]\n fn reflow_subtree(available_width : au) {\n extended_full_traversal(self, available_width, give_kids_width, reflow_wrapper);\n }\n\n #[doc=\"The trivial reflow routine for instrinsically-sized frames.\"]\n fn reflow_intrinsic(size: Size2D<au>) {\n self.bounds.size = copy size;\n\n #debug[\"reflow_intrinsic size=%?\", copy self.bounds];\n }\n\n #[doc=\"Dumps the box tree, for debugging.\"]\n fn dump() {\n self.dump_indent(0u);\n }\n}\n\n\/\/ Debugging\n\ntrait PrivateNodeMethods{\n fn dump_indent(ident: uint);\n}\n\nimpl Node : PrivateNodeMethods {\n #[doc=\"Dumps the node tree, for debugging, with indentation.\"]\n fn dump_indent(indent: uint) {\n let mut s = ~\"\";\n for uint::range(0u, indent) |_i| {\n s += ~\" \";\n }\n\n s += #fmt(\"%?\", self.read(|n| copy n.kind ));\n #debug[\"%s\", s];\n\n for NTree.each_child(self) |kid| {\n kid.dump_indent(indent + 1u) \n }\n }\n}\n\ntrait NodeMethods {\n fn dump();\n}\n\nimpl Node : NodeMethods {\n #[doc=\"Dumps the subtree rooted at this node, for debugging.\"]\n fn dump() {\n self.dump_indent(0u);\n }\n}\n\n#[cfg(test)]\nmod test {\n import dom::base::{Element, ElementData, HTMLDivElement, HTMLImageElement, Node, NodeKind};\n import dom::base::{NodeScope};\n import dom::rcu::Scope;\n\n \/*\n use sdl;\n import sdl::video;\n\n fn with_screen(f: fn(*sdl::surface)) {\n let screen = video::set_video_mode(\n 320, 200, 32,\n ~[video::hwsurface], ~[video::doublebuf]);\n assert screen != ptr::null();\n\n f(screen);\n\n video::free_surface(screen);\n }\n *\/\n\n fn flat_bounds(root: @Box) -> ~[Rect<au>] {\n let mut r = ~[];\n for tree::each_child(BTree, root) |c| {\n push_all(r, flat_bounds(c));\n }\n\n push(r, copy root.bounds);\n\n return r;\n }\n\n #[test]\n #[ignore(reason = \"busted\")]\n fn do_layout() {\n let s = Scope();\n\n fn mk_img(size: Size2D<au>) -> ~ElementKind {\n ~HTMLImageElement({mut size: size})\n }\n\n let n0 = s.new_node(Element(ElementData(~\"img\", mk_img(Size2D(au(10),au(10))))));\n let n1 = s.new_node(Element(ElementData(~\"img\", mk_img(Size2D(au(10),au(10))))));\n let n2 = s.new_node(Element(ElementData(~\"img\", mk_img(Size2D(au(10),au(20))))));\n let n3 = s.new_node(Element(ElementData(~\"div\", ~HTMLDivElement)));\n\n tree::add_child(s, n3, n0);\n tree::add_child(s, n3, n1);\n tree::add_child(s, n3, n2);\n\n let b0 = n0.construct_boxes();\n let b1 = n1.construct_boxes();\n let b2 = n2.construct_boxes();\n let b3 = n3.construct_boxes();\n\n tree::add_child(BTree, b3, b0);\n tree::add_child(BTree, b3, b1);\n tree::add_child(BTree, b3, b2);\n\n b3.reflow_subtree(au(100));\n let fb = flat_bounds(b3);\n #debug[\"fb=%?\", fb];\n assert fb == ~[geometry::box(au(0), au(0), au(10), au(10)), \/\/ n0\n geometry::box(au(0), au(10), au(10), au(15)), \/\/ n1\n geometry::box(au(0), au(25), au(10), au(20)), \/\/ n2\n geometry::box(au(0), au(0), au(100), au(45))]; \/\/ n3\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add opcodes module<commit_after>pub static INT: u8 = b'I';\npub static BININT: u8 = b'J';\npub static BININT1: u8 = b'K';\npub static BININT2: u8 = b'M';\npub static LONG: u8 = b'L';\npub static LONG1: u8 = b'\\x8a';\npub static LONG4: u8 = b'\\x8b';\npub static MARK: u8 = b'(';\npub static STOP: u8 = b'.';\npub static POP: u8 = b'0';\npub static POP_MARK: u8 = b'1';\npub static DUP: u8 = b'2';\npub static FLOAT: u8 = b'F';\npub static BINFLOAT: u8 = b'G';\npub static NONE: u8 = b'N';\npub static PERSID: u8 = b'P';\npub static BINPERSID: u8 = b'Q';\npub static REDUCE: u8 = b'R';\npub static STRING: u8 = b'S';\npub static BINSTRING: u8 = b'T';\npub static SHORT_BINSTRING: u8 = b'U';\npub static UNICODE: u8 = b'V';\npub static BINUNICODE: u8 = b'X';\npub static APPEND: u8 = b'a';\npub static BUILD: u8 = b'b';\npub static GLOBAL: u8 = b'c';\npub static DICT: u8 = b'd';\npub static EMPTY_DICT: u8 = b'}';\npub static APPENDS: u8 = b'e';\npub static GET: u8 = b'g';\npub static BINGET: u8 = b'h';\npub static INST: u8 = b'i';\npub static LONG_BINGET: u8 = b'j';\npub static LIST: u8 = b'l';\npub static EMPTY_LIST: u8 = b']';\npub static OBJ: u8 = b'o';\npub static PUT: u8 = b'p';\npub static BINPUT: u8 = b'q';\npub static LONG_BINPUT: u8 = b'r';\npub static SETITEM: u8 = b's';\npub static TUPLE: u8 = b't';\npub static EMPTY_TUPLE: u8 = b')';\npub static SETITEMS: u8 = b'u';\npub static PROTO: u8 = b'\\x80';\npub static NEWOBJ: u8 = b'\\x81';\npub static EXT1: u8 = b'\\x82';\npub static EXT2: u8 = b'\\x83';\npub static EXT4: u8 = b'\\x84';\npub static TUPLE1: u8 = b'\\x85';\npub static TUPLE2: u8 = b'\\x86';\npub static TUPLE3: u8 = b'\\x87';\npub static NEWTRUE: u8 = b'\\x88';\npub static NEWFALSE: u8 = b'\\x89';\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix the command-line MSAA validation so that it permits only 2, 4, or 8<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix indexing error in collision response<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rename is_power_of_two<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix(Usage Strings): fixes a bug ordering of elements in usage strings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #375 - messense:messense-patch-1, r=Vinatorul<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactores function with new structure<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(libc)]\n#![feature(std_misc)]\n#![feature(optin_builtin_traits)]\n\nextern crate gag;\nextern crate libc;\nextern crate num;\n#[macro_use] extern crate enum_primitive;\nextern crate termbox_sys as termbox;\n#[macro_use] extern crate bitflags;\n\npub use self::style::{Style, RB_BOLD, RB_UNDERLINE, RB_REVERSE, RB_NORMAL};\n\nuse std::error::Error;\nuse std::fmt;\nuse std::io;\nuse std::char;\nuse std::time::duration::Duration;\nuse std::default::Default;\n\nuse num::FromPrimitive;\nuse termbox::RawEvent;\nuse libc::c_int;\nuse gag::Hold;\n\npub mod keyboard;\npub mod mouse;\n\npub use keyboard::Key;\npub use mouse::Mouse;\n\n#[derive(Clone, Copy)]\npub enum Event {\n KeyEventRaw(u8, u16, u32),\n KeyEvent(Option<Key>),\n ResizeEvent(i32, i32),\n MouseEvent(Mouse, i32, i32),\n NoEvent\n}\n\nbitflags! {\n #[derive(Debug)]\n flags InputMode: u16 {\n const RB_INPUT_CURRENT = 0x00,\n \/\/\/ When ESC sequence is in the buffer and it doesn't match any known\n \/\/\/ ESC sequence => ESC means TB_KEY_ESC\n const RB_INPUT_ESC = 0x01,\n \/\/\/ When ESC sequence is in the buffer and it doesn't match any known\n \/\/\/ sequence => ESC enables TB_MOD_ALT modifier for the next keyboard event.\n const RB_INPUT_ALT = 0x02,\n \/\/\/ Enables mouse input\n const RB_INPUT_MOUSE = 0x04\n }\n}\n\n#[derive(Clone, Copy, PartialEq)]\n#[repr(C,u16)]\npub enum Color {\n Default = 0x00,\n Black = 0x01,\n Red = 0x02,\n Green = 0x03,\n Yellow = 0x04,\n Blue = 0x05,\n Magenta = 0x06,\n Cyan = 0x07,\n White = 0x08,\n}\n\nmod style {\n bitflags! {\n #[repr(C)]\n flags Style: u16 {\n const TB_NORMAL_COLOR = 0x000F,\n const RB_BOLD = 0x0100,\n const RB_UNDERLINE = 0x0200,\n const RB_REVERSE = 0x0400,\n const RB_NORMAL = 0x0000,\n const TB_ATTRIB = RB_BOLD.bits | RB_UNDERLINE.bits | RB_REVERSE.bits,\n }\n }\n\n impl Style {\n pub fn from_color(color: super::Color) -> Style {\n Style { bits: color as u16 & TB_NORMAL_COLOR.bits }\n }\n }\n}\n\nconst NIL_RAW_EVENT: RawEvent = RawEvent { etype: 0, emod: 0, key: 0, ch: 0, w: 0, h: 0, x: 0, y: 0 };\n\n\/\/ FIXME: Rust doesn't support this enum representation.\n\/\/ #[derive(Copy,FromPrimitive,Debug)]\n\/\/ #[repr(C,int)]\n\/\/ pub enum EventErrorKind {\n\/\/ Error = -1,\n\/\/ }\n\/\/ pub type EventError = Option<EventErrorKind>;\n#[allow(non_snake_case)]\npub mod EventErrorKind {\n #[derive(Clone, Copy,Debug)]\n pub struct Error;\n}\n\npub type EventError = Option<EventErrorKind::Error>;\n\npub type EventResult<T> = Result<T, EventError>;\n\n\/\/\/ Unpack a RawEvent to an Event\n\/\/\/\n\/\/\/ if the `raw` parameter is true, then the Event variant will be the raw\n\/\/\/ representation of the event.\n\/\/\/ for instance KeyEventRaw instead of KeyEvent\n\/\/\/\n\/\/\/ This is useful if you want to interpret the raw event data yourself, rather\n\/\/\/ than having rustbox translate it to its own representation.\nfn unpack_event(ev_type: c_int, ev: &RawEvent, raw: bool) -> EventResult<Event> {\n match ev_type {\n 0 => Ok(Event::NoEvent),\n 1 => Ok(\n if raw {\n Event::KeyEventRaw(ev.emod, ev.key, ev.ch)\n } else {\n let k = match ev.key {\n 0 => char::from_u32(ev.ch).map(|c| Key::Char(c)),\n a => Key::from_code(a),\n };\n Event::KeyEvent(k)\n }),\n 2 => Ok(Event::ResizeEvent(ev.w, ev.h)),\n 3 => {\n let mouse = Mouse::from_code(ev.key).unwrap_or(Mouse::Left);\n Ok(Event::MouseEvent(mouse, ev.x, ev.y))\n }\n \/\/ FIXME: Rust doesn't support this error representation\n \/\/ res => FromPrimitive::from_int(res as isize),\n -1 => Err(Some(EventErrorKind::Error)),\n _ => Err(None)\n }\n}\n\nenum_from_primitive! {\n#[derive(Clone, Copy, Debug)]\n#[repr(C,isize)]\npub enum InitErrorKind {\n UnsupportedTerminal = -1,\n FailedToOpenTty = -2,\n PipeTrapError = -3,\n}\n}\n\n#[derive(Debug)]\npub enum InitError {\n BufferStderrFailed(io::Error),\n AlreadyOpen,\n TermBox(Option<InitErrorKind>),\n}\n\nimpl fmt::Display for InitError {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"{}\", self.description())\n }\n}\n\nimpl Error for InitError {\n fn description(&self) -> &str {\n match *self {\n InitError::BufferStderrFailed(_) => \"Could not redirect stderr\",\n InitError::AlreadyOpen => \"RustBox is already open\",\n InitError::TermBox(e) => e.map_or(\"Unexpected TermBox return code\", |e| match e {\n InitErrorKind::UnsupportedTerminal => \"Unsupported terminal\",\n InitErrorKind::FailedToOpenTty => \"Failed to open TTY\",\n InitErrorKind::PipeTrapError => \"Pipe trap error\",\n }),\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n match *self {\n InitError::BufferStderrFailed(ref e) => Some(e),\n _ => None\n }\n }\n}\n\n#[allow(missing_copy_implementations)]\npub struct RustBox {\n \/\/ We only bother to redirect stderr for the moment, since it's used for panic!\n _stderr: Option<Hold>,\n}\n\n\/\/ Termbox is not thread safe\nimpl !Send for RustBox {}\n\n#[derive(Clone, Copy,Debug)]\npub struct InitOptions {\n \/\/\/ Use this option to initialize with a specific input mode\n \/\/\/\n \/\/\/ See InputMode enum for details on the variants.\n pub input_mode: InputMode,\n\n \/\/\/ Use this option to automatically buffer stderr while RustBox is running. It will be\n \/\/\/ written when RustBox exits.\n \/\/\/\n \/\/\/ This option uses a nonblocking OS pipe to buffer stderr output. This means that if the\n \/\/\/ pipe fills up, subsequent writes will fail until RustBox exits. If this is a concern for\n \/\/\/ your program, don't use RustBox's default pipe-based redirection; instead, redirect stderr\n \/\/\/ to a log file or another process that is capable of handling it better.\n pub buffer_stderr: bool,\n}\n\nimpl Default for InitOptions {\n fn default() -> Self {\n InitOptions {\n input_mode: RB_INPUT_CURRENT,\n buffer_stderr: false,\n }\n }\n}\n\nimpl RustBox {\n \/\/\/ Initialize rustbox.\n \/\/\/\n \/\/\/ For the default options, you can use:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use rustbox::RustBox;\n \/\/\/ use std::default::Default;\n \/\/\/ let rb = RustBox::init(Default::default());\n \/\/\/ ```\n \/\/\/\n \/\/\/ Otherwise, you can specify:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use rustbox::{RustBox, InitOptions};\n \/\/\/ use std::default::Default;\n \/\/\/ let rb = RustBox::init(InitOptions { input_mode: rustbox::InputMode::Esc, ..Default::default() });\n \/\/\/ ```\n pub fn init(opts: InitOptions) -> Result<RustBox, InitError> {\n let stderr = if opts.buffer_stderr {\n Some(try!(Hold::stderr().map_err(|e| InitError::BufferStderrFailed(e))))\n } else {\n None\n };\n\n \/\/ Create the RustBox.\n let rb = unsafe { match termbox::tb_init() {\n 0 => RustBox {\n _stderr: stderr,\n },\n res => {\n return Err(InitError::TermBox(FromPrimitive::from_isize(res as isize)))\n }\n }};\n match opts.input_mode {\n RB_INPUT_CURRENT => (),\n _ => rb.set_input_mode(opts.input_mode),\n }\n Ok(rb)\n }\n\n pub fn width(&self) -> usize {\n unsafe { termbox::tb_width() as usize }\n }\n\n pub fn height(&self) -> usize {\n unsafe { termbox::tb_height() as usize }\n }\n\n pub fn clear(&self) {\n unsafe { termbox::tb_clear() }\n }\n\n pub fn present(&self) {\n unsafe { termbox::tb_present() }\n }\n\n pub fn set_cursor(&self, x: isize, y: isize) {\n unsafe { termbox::tb_set_cursor(x as c_int, y as c_int) }\n }\n\n pub unsafe fn change_cell(&self, x: usize, y: usize, ch: u32, fg: u16, bg: u16) {\n termbox::tb_change_cell(x as c_int, y as c_int, ch, fg, bg)\n }\n\n pub fn print(&self, x: usize, y: usize, sty: Style, fg: Color, bg: Color, s: &str) {\n let fg = Style::from_color(fg) | (sty & style::TB_ATTRIB);\n let bg = Style::from_color(bg);\n for (i, ch) in s.chars().enumerate() {\n unsafe {\n self.change_cell(x+i, y, ch as u32, fg.bits(), bg.bits());\n }\n }\n }\n\n pub fn print_char(&self, x: usize, y: usize, sty: Style, fg: Color, bg: Color, ch: char) {\n let fg = Style::from_color(fg) | (sty & style::TB_ATTRIB);\n let bg = Style::from_color(bg);\n unsafe {\n self.change_cell(x, y, ch as u32, fg.bits(), bg.bits());\n }\n }\n\n pub fn poll_event(&self, raw: bool) -> EventResult<Event> {\n let ev = NIL_RAW_EVENT;\n let rc = unsafe {\n termbox::tb_poll_event(&ev as *const RawEvent)\n };\n unpack_event(rc, &ev, raw)\n }\n\n pub fn peek_event(&self, timeout: Duration, raw: bool) -> EventResult<Event> {\n let ev = NIL_RAW_EVENT;\n let rc = unsafe {\n termbox::tb_peek_event(&ev as *const RawEvent, timeout.num_milliseconds() as c_int)\n };\n unpack_event(rc, &ev, raw)\n }\n\n pub fn set_input_mode(&self, mode: InputMode) {\n unsafe {\n termbox::tb_select_input_mode(mode.bits() as c_int);\n }\n }\n}\n\nimpl Drop for RustBox {\n fn drop(&mut self) {\n \/\/ Since only one instance of the RustBox is ever accessible, we should not\n \/\/ need to do this atomically.\n \/\/ Note: we should definitely have RUSTBOX_RUNNING = true here.\n unsafe {\n termbox::tb_shutdown();\n }\n }\n}\n<commit_msg>Revert \"Make InputMode a bitflags type\"<commit_after>#![feature(libc)]\n#![feature(std_misc)]\n#![feature(optin_builtin_traits)]\n\nextern crate gag;\nextern crate libc;\nextern crate num;\n#[macro_use] extern crate enum_primitive;\nextern crate termbox_sys as termbox;\n#[macro_use] extern crate bitflags;\n\npub use self::style::{Style, RB_BOLD, RB_UNDERLINE, RB_REVERSE, RB_NORMAL};\n\nuse std::error::Error;\nuse std::fmt;\nuse std::io;\nuse std::char;\nuse std::time::duration::Duration;\nuse std::default::Default;\n\nuse num::FromPrimitive;\nuse termbox::RawEvent;\nuse libc::c_int;\nuse gag::Hold;\n\npub mod keyboard;\npub mod mouse;\n\npub use keyboard::Key;\npub use mouse::Mouse;\n\n#[derive(Clone, Copy)]\npub enum Event {\n KeyEventRaw(u8, u16, u32),\n KeyEvent(Option<Key>),\n ResizeEvent(i32, i32),\n MouseEvent(Mouse, i32, i32),\n NoEvent\n}\n\n#[derive(Clone, Copy, Debug)]\npub enum InputMode {\n Current = 0x00,\n\n \/\/\/ When ESC sequence is in the buffer and it doesn't match any known\n \/\/\/ ESC sequence => ESC means TB_KEY_ESC\n Esc = 0x01,\n \/\/\/ When ESC sequence is in the buffer and it doesn't match any known\n \/\/\/ sequence => ESC enables TB_MOD_ALT modifier for the next keyboard event.\n Alt = 0x02,\n}\n\n#[derive(Clone, Copy, PartialEq)]\n#[repr(C,u16)]\npub enum Color {\n Default = 0x00,\n Black = 0x01,\n Red = 0x02,\n Green = 0x03,\n Yellow = 0x04,\n Blue = 0x05,\n Magenta = 0x06,\n Cyan = 0x07,\n White = 0x08,\n}\n\nmod style {\n bitflags! {\n #[repr(C)]\n flags Style: u16 {\n const TB_NORMAL_COLOR = 0x000F,\n const RB_BOLD = 0x0100,\n const RB_UNDERLINE = 0x0200,\n const RB_REVERSE = 0x0400,\n const RB_NORMAL = 0x0000,\n const TB_ATTRIB = RB_BOLD.bits | RB_UNDERLINE.bits | RB_REVERSE.bits,\n }\n }\n\n impl Style {\n pub fn from_color(color: super::Color) -> Style {\n Style { bits: color as u16 & TB_NORMAL_COLOR.bits }\n }\n }\n}\n\nconst NIL_RAW_EVENT: RawEvent = RawEvent { etype: 0, emod: 0, key: 0, ch: 0, w: 0, h: 0, x: 0, y: 0 };\n\n\/\/ FIXME: Rust doesn't support this enum representation.\n\/\/ #[derive(Copy,FromPrimitive,Debug)]\n\/\/ #[repr(C,int)]\n\/\/ pub enum EventErrorKind {\n\/\/ Error = -1,\n\/\/ }\n\/\/ pub type EventError = Option<EventErrorKind>;\n#[allow(non_snake_case)]\npub mod EventErrorKind {\n #[derive(Clone, Copy,Debug)]\n pub struct Error;\n}\n\npub type EventError = Option<EventErrorKind::Error>;\n\npub type EventResult<T> = Result<T, EventError>;\n\n\/\/\/ Unpack a RawEvent to an Event\n\/\/\/\n\/\/\/ if the `raw` parameter is true, then the Event variant will be the raw\n\/\/\/ representation of the event.\n\/\/\/ for instance KeyEventRaw instead of KeyEvent\n\/\/\/\n\/\/\/ This is useful if you want to interpret the raw event data yourself, rather\n\/\/\/ than having rustbox translate it to its own representation.\nfn unpack_event(ev_type: c_int, ev: &RawEvent, raw: bool) -> EventResult<Event> {\n match ev_type {\n 0 => Ok(Event::NoEvent),\n 1 => Ok(\n if raw {\n Event::KeyEventRaw(ev.emod, ev.key, ev.ch)\n } else {\n let k = match ev.key {\n 0 => char::from_u32(ev.ch).map(|c| Key::Char(c)),\n a => Key::from_code(a),\n };\n Event::KeyEvent(k)\n }),\n 2 => Ok(Event::ResizeEvent(ev.w, ev.h)),\n 3 => {\n let mouse = Mouse::from_code(ev.key).unwrap_or(Mouse::Left);\n Ok(Event::MouseEvent(mouse, ev.x, ev.y))\n }\n \/\/ FIXME: Rust doesn't support this error representation\n \/\/ res => FromPrimitive::from_int(res as isize),\n -1 => Err(Some(EventErrorKind::Error)),\n _ => Err(None)\n }\n}\n\nenum_from_primitive! {\n#[derive(Clone, Copy, Debug)]\n#[repr(C,isize)]\npub enum InitErrorKind {\n UnsupportedTerminal = -1,\n FailedToOpenTty = -2,\n PipeTrapError = -3,\n}\n}\n\n#[derive(Debug)]\npub enum InitError {\n BufferStderrFailed(io::Error),\n AlreadyOpen,\n TermBox(Option<InitErrorKind>),\n}\n\nimpl fmt::Display for InitError {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"{}\", self.description())\n }\n}\n\nimpl Error for InitError {\n fn description(&self) -> &str {\n match *self {\n InitError::BufferStderrFailed(_) => \"Could not redirect stderr\",\n InitError::AlreadyOpen => \"RustBox is already open\",\n InitError::TermBox(e) => e.map_or(\"Unexpected TermBox return code\", |e| match e {\n InitErrorKind::UnsupportedTerminal => \"Unsupported terminal\",\n InitErrorKind::FailedToOpenTty => \"Failed to open TTY\",\n InitErrorKind::PipeTrapError => \"Pipe trap error\",\n }),\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n match *self {\n InitError::BufferStderrFailed(ref e) => Some(e),\n _ => None\n }\n }\n}\n\n#[allow(missing_copy_implementations)]\npub struct RustBox {\n \/\/ We only bother to redirect stderr for the moment, since it's used for panic!\n _stderr: Option<Hold>,\n}\n\n\/\/ Termbox is not thread safe\nimpl !Send for RustBox {}\n\n#[derive(Clone, Copy,Debug)]\npub struct InitOptions {\n \/\/\/ Use this option to initialize with a specific input mode\n \/\/\/\n \/\/\/ See InputMode enum for details on the variants.\n pub input_mode: InputMode,\n\n \/\/\/ Use this option to automatically buffer stderr while RustBox is running. It will be\n \/\/\/ written when RustBox exits.\n \/\/\/\n \/\/\/ This option uses a nonblocking OS pipe to buffer stderr output. This means that if the\n \/\/\/ pipe fills up, subsequent writes will fail until RustBox exits. If this is a concern for\n \/\/\/ your program, don't use RustBox's default pipe-based redirection; instead, redirect stderr\n \/\/\/ to a log file or another process that is capable of handling it better.\n pub buffer_stderr: bool,\n}\n\nimpl Default for InitOptions {\n fn default() -> Self {\n InitOptions {\n input_mode: InputMode::Current,\n buffer_stderr: false,\n }\n }\n}\n\nimpl RustBox {\n \/\/\/ Initialize rustbox.\n \/\/\/\n \/\/\/ For the default options, you can use:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use rustbox::RustBox;\n \/\/\/ use std::default::Default;\n \/\/\/ let rb = RustBox::init(Default::default());\n \/\/\/ ```\n \/\/\/\n \/\/\/ Otherwise, you can specify:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use rustbox::{RustBox, InitOptions};\n \/\/\/ use std::default::Default;\n \/\/\/ let rb = RustBox::init(InitOptions { input_mode: rustbox::InputMode::Esc, ..Default::default() });\n \/\/\/ ```\n pub fn init(opts: InitOptions) -> Result<RustBox, InitError> {\n let stderr = if opts.buffer_stderr {\n Some(try!(Hold::stderr().map_err(|e| InitError::BufferStderrFailed(e))))\n } else {\n None\n };\n\n \/\/ Create the RustBox.\n let rb = unsafe { match termbox::tb_init() {\n 0 => RustBox {\n _stderr: stderr,\n },\n res => {\n return Err(InitError::TermBox(FromPrimitive::from_isize(res as isize)))\n }\n }};\n match opts.input_mode {\n InputMode::Current => (),\n _ => rb.set_input_mode(opts.input_mode),\n }\n Ok(rb)\n }\n\n pub fn width(&self) -> usize {\n unsafe { termbox::tb_width() as usize }\n }\n\n pub fn height(&self) -> usize {\n unsafe { termbox::tb_height() as usize }\n }\n\n pub fn clear(&self) {\n unsafe { termbox::tb_clear() }\n }\n\n pub fn present(&self) {\n unsafe { termbox::tb_present() }\n }\n\n pub fn set_cursor(&self, x: isize, y: isize) {\n unsafe { termbox::tb_set_cursor(x as c_int, y as c_int) }\n }\n\n pub unsafe fn change_cell(&self, x: usize, y: usize, ch: u32, fg: u16, bg: u16) {\n termbox::tb_change_cell(x as c_int, y as c_int, ch, fg, bg)\n }\n\n pub fn print(&self, x: usize, y: usize, sty: Style, fg: Color, bg: Color, s: &str) {\n let fg = Style::from_color(fg) | (sty & style::TB_ATTRIB);\n let bg = Style::from_color(bg);\n for (i, ch) in s.chars().enumerate() {\n unsafe {\n self.change_cell(x+i, y, ch as u32, fg.bits(), bg.bits());\n }\n }\n }\n\n pub fn print_char(&self, x: usize, y: usize, sty: Style, fg: Color, bg: Color, ch: char) {\n let fg = Style::from_color(fg) | (sty & style::TB_ATTRIB);\n let bg = Style::from_color(bg);\n unsafe {\n self.change_cell(x, y, ch as u32, fg.bits(), bg.bits());\n }\n }\n\n pub fn poll_event(&self, raw: bool) -> EventResult<Event> {\n let ev = NIL_RAW_EVENT;\n let rc = unsafe {\n termbox::tb_poll_event(&ev as *const RawEvent)\n };\n unpack_event(rc, &ev, raw)\n }\n\n pub fn peek_event(&self, timeout: Duration, raw: bool) -> EventResult<Event> {\n let ev = NIL_RAW_EVENT;\n let rc = unsafe {\n termbox::tb_peek_event(&ev as *const RawEvent, timeout.num_milliseconds() as c_int)\n };\n unpack_event(rc, &ev, raw)\n }\n\n pub fn set_input_mode(&self, mode: InputMode) {\n unsafe {\n termbox::tb_select_input_mode(mode as c_int);\n }\n }\n}\n\nimpl Drop for RustBox {\n fn drop(&mut self) {\n \/\/ Since only one instance of the RustBox is ever accessible, we should not\n \/\/ need to do this atomically.\n \/\/ Note: we should definitely have RUSTBOX_RUNNING = true here.\n unsafe {\n termbox::tb_shutdown();\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>coap work<commit_after><|endoftext|>"} {"text":"<commit_before>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::file::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl Command {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>| {\n let mut echo = String::new();\n let mut first = true;\n for i in 1..args.len() {\n match args.get(i) {\n Some(arg) => {\n if first {\n first = false\n } else {\n echo = echo + \" \";\n }\n echo = echo + arg;\n }\n None => (),\n }\n }\n println!(\"{}\", echo);\n },\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(arg) => {\n File::exec(arg);\n },\n None => (),\n }\n },\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(arg) => {\n let path = arg.clone();\n println!(\"URL: {}\", path);\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(&path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n None => (),\n }\n },\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n for i in 2..args.len() {\n if let Some(arg) = args.get(i) {\n if i >= 3 {\n string.push_str(\" \");\n }\n string.push_str(arg);\n }\n }\n string.push_str(\"\\r\\n\\r\\n\");\n\n match file.write(&string.as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n\t\tlet mut command_list = String::new();\n\t\tcommand_list = commands.iter().fold(command_list, |l , n| l + \" \" + &n.name);\n\n commands.push(Command {\n\t\t\tname: \"help\".to_string(),\n\t\t\tmain: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n\t\t\t},\n\t\t});\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl Application {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if arg.len() > 0 {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n match args.get(0) {\n Some(cmd) => {\n if cmd == \"if\" {\n let mut value = false;\n\n match args.get(1) {\n Some(left) => match args.get(2) {\n Some(cmp) => match args.get(3) {\n Some(right) => {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n None => (),\n },\n None => (),\n },\n None => (),\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if self.modes.len() > 0 {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n match cmd.find('=') {\n Some(i) => {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n match args.get(i) {\n Some(arg) => value = value + \" \" + &arg,\n None => (),\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n None => (),\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n\n }\n None => (),\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(&\"Terminal\".to_string());\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>formatting<commit_after>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::file::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl Command {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>| {\n let mut echo = String::new();\n let mut first = true;\n for i in 1..args.len() {\n match args.get(i) {\n Some(arg) => {\n if first {\n first = false\n } else {\n echo = echo + \" \";\n }\n echo = echo + arg;\n }\n None => (),\n }\n }\n println!(\"{}\", echo);\n },\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(arg) => {\n File::exec(arg);\n },\n None => (),\n }\n },\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(arg) => {\n let path = arg.clone();\n println!(\"URL: {}\", path);\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(&path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n None => (),\n }\n },\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n for i in 2..args.len() {\n if let Some(arg) = args.get(i) {\n if i >= 3 {\n string.push_str(\" \");\n }\n string.push_str(arg);\n }\n }\n string.push_str(\"\\r\\n\\r\\n\");\n\n match file.write(&string.as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n let mut command_list = String::new();\n command_list = commands.iter().fold(command_list, |l , c| l + \" \" + &c.name);\n\n commands.push(Command {\n name: \"help\".to_string(),\n main: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl Application {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if arg.len() > 0 {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n match args.get(0) {\n Some(cmd) => {\n if cmd == \"if\" {\n let mut value = false;\n\n match args.get(1) {\n Some(left) => match args.get(2) {\n Some(cmp) => match args.get(3) {\n Some(right) => {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n None => (),\n },\n None => (),\n },\n None => (),\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if self.modes.len() > 0 {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n match cmd.find('=') {\n Some(i) => {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n match args.get(i) {\n Some(arg) => value = value + \" \" + &arg,\n None => (),\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n None => (),\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n\n }\n None => (),\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(&\"Terminal\".to_string());\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2090<commit_after>\/\/ https:\/\/leetcode.com\/problems\/k-radius-subarray-averages\/\npub fn get_averages(nums: Vec<i32>, k: i32) -> Vec<i32> {\n todo!()\n}\n\nfn main() {\n println!(\"{:?}\", get_averages(vec![7, 4, 3, 9, 1, 8, 5, 2, 6], 3)); \/\/ [-1,-1,-1,5,4,4,-1,-1,-1]\n println!(\"{:?}\", get_averages(vec![100000], 0)); \/\/ [100000]\n println!(\"{:?}\", get_averages(vec![8], 100000)); \/\/ [-1]\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate glutin;\n#[macro_use]\nextern crate glium;\n#[cfg(feature = \"cgmath\")]\nextern crate cgmath;\n#[cfg(feature = \"image\")]\nextern crate image;\n\nuse glium::Surface;\nuse glium::DisplayBuild;\n#[cfg(feature = \"cgmath\")]\nuse cgmath::FixedArray;\nuse std::old_io::BufReader;\n\nmod support;\n\n#[cfg(not(all(feature = \"cgmath\", feature = \"image\")))]\nfn main() {\n println!(\"This example requires the `cgmath` and `image` features to be enabled\");\n}\n\n#[cfg(all(feature = \"cgmath\", feature = \"image\"))]\nfn main() {\n use glium::DisplayBuild;\n\n \/\/ building the display, ie. the main object\n let display = glutin::WindowBuilder::new()\n .with_dimensions(800, 500)\n .with_title(format!(\"Glium Deferred Example\"))\n .build_glium()\n .unwrap();\n\n let image = image::load(BufReader::new(include_bytes!(\"..\/tests\/fixture\/opengl.png\")), image::PNG).unwrap();\n let opengl_texture = glium::texture::Texture2d::new(&display, image);\n\n let floor_vertex_buffer = {\n #[derive(Copy)]\n struct Vertex {\n position: [f32; 4],\n normal: [f32; 4],\n texcoord: [f32; 2]\n }\n\n implement_vertex!(Vertex, position, normal, texcoord);\n \n glium::VertexBuffer::new(&display,\n vec![\n Vertex { position: [-1.0, 0.0, -1.0, 1.0], normal: [0.0, 1.0, 0.0, 1.0], texcoord: [0.0, 0.0] },\n Vertex { position: [1.0, 0.0, -1.0, 1.0], normal: [0.0, 1.0, 0.0, 1.0], texcoord: [1.0, 0.0] },\n Vertex { position: [1.0, 0.0, 1.0, 1.0], normal: [0.0, 1.0, 0.0, 1.0], texcoord: [1.0, 1.0] },\n Vertex { position: [-1.0, 0.0, 1.0, 1.0], normal: [0.0, 1.0, 0.0, 1.0], texcoord: [0.0, 1.0] },\n ]\n )\n };\n\n let floor_index_buffer = glium::IndexBuffer::new(&display,\n glium::index::TrianglesList(vec![0u16, 1, 2, 0, 2, 3]));\n\n let quad_vertex_buffer = {\n #[derive(Copy)]\n struct Vertex {\n position: [f32; 4],\n texcoord: [f32; 2]\n }\n\n implement_vertex!(Vertex, position, texcoord);\n \n glium::VertexBuffer::new(&display,\n vec![\n Vertex { position: [0.0, 0.0, 0.0, 1.0], texcoord: [0.0, 0.0] },\n Vertex { position: [800.0, 0.0, 0.0, 1.0], texcoord: [1.0, 0.0] },\n Vertex { position: [800.0, 500.0, 0.0, 1.0], texcoord: [1.0, 1.0] },\n Vertex { position: [0.0, 500.0, 0.0, 1.0], texcoord: [0.0, 1.0] },\n ]\n )\n };\n\n let quad_index_buffer = glium::IndexBuffer::new(&display,\n glium::index::TrianglesList(vec![0u16, 1, 2, 0, 2, 3]));\n\n \/\/ compiling shaders and linking them together\n let prepass_program = glium::Program::from_source(&display,\n \/\/ vertex shader\n \"\n #version 130\n\n uniform mat4 perspective_matrix;\n uniform mat4 view_matrix;\n uniform mat4 model_matrix;\n\n in vec4 position;\n in vec4 normal;\n in vec2 texcoord;\n\n smooth out vec4 frag_position;\n smooth out vec4 frag_normal;\n smooth out vec2 frag_texcoord;\n\n void main() {\n frag_position = model_matrix * position;\n frag_normal = model_matrix * normal;\n frag_texcoord = texcoord;\n gl_Position = perspective_matrix * view_matrix * frag_position;\n }\n \",\n\n \/\/ fragment shader\n \"\n #version 130\n \n uniform sampler2D texture;\n\n smooth in vec4 frag_position;\n smooth in vec4 frag_normal;\n smooth in vec2 frag_texcoord;\n\n out vec4 output1;\n out vec4 output2;\n out vec4 output3;\n out vec4 output4;\n\n void main() {\n output1 = vec4(frag_position);\n output2 = vec4(frag_normal);\n output3 = texture2D(texture, frag_texcoord);\n output4 = vec4(1.0, 0.0, 1.0, 1.0);\n }\n \",\n\n \/\/ geometry shader\n None)\n .unwrap();\n\n let lighting_program = glium::Program::from_source(&display,\n \/\/ vertex shader\n \"\n #version 130\n\n uniform mat4 matrix;\n\n in vec4 position;\n in vec2 texcoord;\n\n smooth out vec2 frag_texcoord;\n\n void main() {\n gl_Position = matrix * position;\n frag_texcoord = texcoord;\n }\n \",\n\n \/\/ fragment shader\n \"\n #version 130\n \n uniform sampler2D position_texture;\n uniform sampler2D normal_texture;\n uniform vec4 light_position;\n uniform vec3 light_color;\n uniform vec3 light_attenuation;\n uniform float light_radius;\n\n smooth in vec2 frag_texcoord;\n\n out vec4 frag_output;\n\n void main() {\n vec4 position = texture2D(position_texture, frag_texcoord);\n vec4 normal = texture2D(normal_texture, frag_texcoord);\n vec3 light_vector = light_position.xyz - position.xyz;\n float light_distance = abs(length(light_vector));\n vec3 normal_vector = normalize(normal.xyz);\n float diffuse = max(dot(normal_vector, light_vector), 0.0);\n if (diffuse > 0.0) {\n float attenuation_factor = 1.0 \/ (\n light_attenuation.x +\n (light_attenuation.y * light_distance) +\n (light_attenuation.z * light_distance * light_distance)\n );\n attenuation_factor *= (1.0 - pow((light_distance \/ light_radius), 2.0));\n diffuse *= attenuation_factor;\n \n }\n frag_output = vec4(light_color * diffuse, 1.0);\n }\n \",\n\n \/\/ geometry shader\n None)\n .unwrap();\n\n \/\/ compiling shaders and linking them together\n let composition_program = glium::Program::from_source(&display,\n \/\/ vertex shader\n \"\n #version 130\n\n uniform mat4 matrix;\n\n in vec4 position;\n in vec2 texcoord;\n\n smooth out vec2 frag_texcoord;\n\n void main() {\n frag_texcoord = texcoord;\n gl_Position = matrix * position;\n }\n \",\n\n \/\/ fragment shader\n \"\n #version 130\n\n uniform sampler2D decal_texture;\n uniform sampler2D lighting_texture;\n\n smooth in vec2 frag_texcoord;\n\n out vec4 frag_output;\n\n void main() {\n vec4 lighting_value = texture2D(lighting_texture, frag_texcoord);\n frag_output = vec4(texture2D(decal_texture, frag_texcoord).rgb * lighting_value.rgb, 1.0);\n }\n \",\n\n \/\/ geometry shader\n None)\n .unwrap();\n\n \/*\/\/ creating the uniforms structure\n #[uniforms]\n #[derive(Copy)]\n struct PrepassUniforms<'a> {\n perspective_matrix: [[f32; 4]; 4],\n view_matrix: [[f32; 4]; 4],\n model_matrix: [[f32; 4]; 4],\n texture: &'a glium::texture::Texture2d\n }\n\n #[uniforms]\n #[derive(Copy)]\n struct LightingUniforms<'a> {\n matrix: [[f32; 4]; 4],\n position_texture: &'a glium::texture::Texture2d,\n normal_texture: &'a glium::texture::Texture2d,\n light_position: [f32; 4],\n light_color: [f32; 3],\n light_attenuation: [f32; 3],\n light_radius: f32\n }\n\n #[uniforms]\n #[derive(Copy)]\n struct CompositionUniforms<'a> {\n matrix: [[f32; 4]; 4],\n decal_texture: &'a glium::texture::Texture2d,\n lighting_texture: &'a glium::texture::Texture2d\n }*\/\n\n struct Light {\n position: [f32; 4],\n color: [f32; 3],\n attenuation: [f32; 3],\n radius: f32\n }\n\n let texture1 = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let texture2 = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let texture3 = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let texture4 = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let depthtexture = glium::texture::DepthTexture2d::new_empty(&display, glium::texture::DepthFormat::F32, 800, 500);\n let output = &[(\"output1\", &texture1), (\"output2\", &texture2), (\"output3\", &texture3), (\"output4\", &texture4)];\n let mut framebuffer = glium::framebuffer::MultiOutputFrameBuffer::with_depth_buffer(&display, output, &depthtexture);\n\n let light_texture = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let mut light_buffer = glium::framebuffer::SimpleFrameBuffer::with_depth_buffer(&display, &light_texture, &depthtexture);\n\n let ortho_matrix: cgmath::Matrix4<f32> = cgmath::ortho(0.0, 800.0, 0.0, 500.0, -1.0, 1.0);\n let fixed_ortho_matrix = ortho_matrix.as_fixed();\n\n let perspective_matrix: cgmath::Matrix4<f32> = cgmath::perspective(cgmath::deg(45.0), 1.333, 0.0001, 100.0);\n let fixed_perspective_matrix = perspective_matrix.as_fixed();\n let view_eye: cgmath::Point3<f32> = cgmath::Point3::new(0.0, 2.0, -2.0);\n let view_center: cgmath::Point3<f32> = cgmath::Point3::new(0.0, 0.0, 0.0);\n let view_up: cgmath::Vector3<f32> = cgmath::Vector3::new(0.0, 1.0, 0.0);\n let view_matrix: cgmath::Matrix4<f32> = cgmath::Matrix4::look_at(&view_eye, &view_center, &view_up);\n let fixed_view_matrix = view_matrix.as_fixed();\n let model_matrix: cgmath::Matrix4<f32> = cgmath::Matrix4::identity();\n let fixed_model_matrix = model_matrix.as_fixed();\n\n let lights = [\n Light {\n position: [1.0, 1.0, 1.0, 1.0],\n attenuation: [0.8, 0.00125, 0.0000001],\n color: [1.0, 0.0, 0.0],\n radius: 1.5\n },\n Light {\n position: [0.0, 1.0, 0.0, 1.0],\n attenuation: [0.8, 0.00125, 0.0000001],\n color: [0.0, 1.0, 0.0],\n radius: 1.5\n },\n Light {\n position: [0.0, 1.0, 1.0, 1.0],\n attenuation: [0.8, 0.00125, 0.0000001],\n color: [0.0, 0.0, 1.0],\n radius: 1.5\n },\n Light {\n position: [1.0, 1.0, 0.0, 1.0],\n attenuation: [0.8, 0.00125, 0.0000001],\n color: [1.0, 1.0, 0.0],\n radius: 1.5\n }\n ];\n\n \/\/ the main loop\n support::start_loop(|| {\n \/\/ prepass\n let uniforms = uniform! {\n perspective_matrix: *fixed_perspective_matrix,\n view_matrix: *fixed_view_matrix,\n model_matrix: *fixed_model_matrix,\n texture: &opengl_texture\n };\n framebuffer.clear_color(0.0, 0.0, 0.0, 0.0);\n framebuffer.draw(&floor_vertex_buffer, &floor_index_buffer, &prepass_program, &uniforms, &std::default::Default::default()).unwrap();\n\n \/\/ lighting\n let draw_params = glium::DrawParameters {\n \/\/depth_function: glium::DepthFunction::IfLessOrEqual,\n blending_function: Some(glium::BlendingFunction::Addition{\n source: glium::LinearBlendingFactor::One,\n destination: glium::LinearBlendingFactor::One\n }),\n .. std::default::Default::default()\n };\n light_buffer.clear_color(0.0, 0.0, 0.0, 0.0);\n for light in lights.iter() {\n let uniforms = uniform! {\n matrix: *fixed_ortho_matrix,\n position_texture: &texture1,\n normal_texture: &texture2,\n light_position: light.position,\n light_attenuation: light.attenuation,\n light_color: light.color,\n light_radius: light.radius\n };\n light_buffer.draw(&quad_vertex_buffer, &quad_index_buffer, &lighting_program, &uniforms, &draw_params).unwrap();\n }\n\n \/\/ composition\n let uniforms = uniform! {\n matrix: *fixed_ortho_matrix,\n decal_texture: &texture3,\n lighting_texture: &light_texture\n };\n let mut target = display.draw();\n target.clear_color(0.0, 0.0, 0.0, 0.0);\n target.draw(&quad_vertex_buffer, &quad_index_buffer, &composition_program, &uniforms, &std::default::Default::default()).unwrap();\n target.finish();\n\n \/\/ polling and handling the events received by the window\n for event in display.poll_events() {\n match event {\n glutin::Event::Closed => return support::Action::Stop,\n _ => ()\n }\n }\n\n support::Action::Continue\n });\n}\n<commit_msg>Fix the OpenGL logo being in the wrong direction in the deferred example<commit_after>extern crate glutin;\n#[macro_use]\nextern crate glium;\n#[cfg(feature = \"cgmath\")]\nextern crate cgmath;\n#[cfg(feature = \"image\")]\nextern crate image;\n\nuse glium::Surface;\nuse glium::DisplayBuild;\n#[cfg(feature = \"cgmath\")]\nuse cgmath::FixedArray;\nuse std::old_io::BufReader;\n\nmod support;\n\n#[cfg(not(all(feature = \"cgmath\", feature = \"image\")))]\nfn main() {\n println!(\"This example requires the `cgmath` and `image` features to be enabled\");\n}\n\n#[cfg(all(feature = \"cgmath\", feature = \"image\"))]\nfn main() {\n use glium::DisplayBuild;\n\n \/\/ building the display, ie. the main object\n let display = glutin::WindowBuilder::new()\n .with_dimensions(800, 500)\n .with_title(format!(\"Glium Deferred Example\"))\n .build_glium()\n .unwrap();\n\n let image = image::load(BufReader::new(include_bytes!(\"..\/tests\/fixture\/opengl.png\")), image::PNG).unwrap();\n let opengl_texture = glium::texture::Texture2d::new(&display, image);\n\n let floor_vertex_buffer = {\n #[derive(Copy)]\n struct Vertex {\n position: [f32; 4],\n normal: [f32; 4],\n texcoord: [f32; 2]\n }\n\n implement_vertex!(Vertex, position, normal, texcoord);\n \n glium::VertexBuffer::new(&display,\n vec![\n Vertex { position: [-1.0, 0.0, -1.0, 1.0], normal: [0.0, 1.0, 0.0, 1.0], texcoord: [1.0, 0.0] },\n Vertex { position: [1.0, 0.0, -1.0, 1.0], normal: [0.0, 1.0, 0.0, 1.0], texcoord: [0.0, 0.0] },\n Vertex { position: [1.0, 0.0, 1.0, 1.0], normal: [0.0, 1.0, 0.0, 1.0], texcoord: [0.0, 1.0] },\n Vertex { position: [-1.0, 0.0, 1.0, 1.0], normal: [0.0, 1.0, 0.0, 1.0], texcoord: [1.0, 1.0] },\n ]\n )\n };\n\n let floor_index_buffer = glium::IndexBuffer::new(&display,\n glium::index::TrianglesList(vec![0u16, 1, 2, 0, 2, 3]));\n\n let quad_vertex_buffer = {\n #[derive(Copy)]\n struct Vertex {\n position: [f32; 4],\n texcoord: [f32; 2]\n }\n\n implement_vertex!(Vertex, position, texcoord);\n \n glium::VertexBuffer::new(&display,\n vec![\n Vertex { position: [0.0, 0.0, 0.0, 1.0], texcoord: [0.0, 0.0] },\n Vertex { position: [800.0, 0.0, 0.0, 1.0], texcoord: [1.0, 0.0] },\n Vertex { position: [800.0, 500.0, 0.0, 1.0], texcoord: [1.0, 1.0] },\n Vertex { position: [0.0, 500.0, 0.0, 1.0], texcoord: [0.0, 1.0] },\n ]\n )\n };\n\n let quad_index_buffer = glium::IndexBuffer::new(&display,\n glium::index::TrianglesList(vec![0u16, 1, 2, 0, 2, 3]));\n\n \/\/ compiling shaders and linking them together\n let prepass_program = glium::Program::from_source(&display,\n \/\/ vertex shader\n \"\n #version 130\n\n uniform mat4 perspective_matrix;\n uniform mat4 view_matrix;\n uniform mat4 model_matrix;\n\n in vec4 position;\n in vec4 normal;\n in vec2 texcoord;\n\n smooth out vec4 frag_position;\n smooth out vec4 frag_normal;\n smooth out vec2 frag_texcoord;\n\n void main() {\n frag_position = model_matrix * position;\n frag_normal = model_matrix * normal;\n frag_texcoord = texcoord;\n gl_Position = perspective_matrix * view_matrix * frag_position;\n }\n \",\n\n \/\/ fragment shader\n \"\n #version 130\n \n uniform sampler2D texture;\n\n smooth in vec4 frag_position;\n smooth in vec4 frag_normal;\n smooth in vec2 frag_texcoord;\n\n out vec4 output1;\n out vec4 output2;\n out vec4 output3;\n out vec4 output4;\n\n void main() {\n output1 = vec4(frag_position);\n output2 = vec4(frag_normal);\n output3 = texture2D(texture, frag_texcoord);\n output4 = vec4(1.0, 0.0, 1.0, 1.0);\n }\n \",\n\n \/\/ geometry shader\n None)\n .unwrap();\n\n let lighting_program = glium::Program::from_source(&display,\n \/\/ vertex shader\n \"\n #version 130\n\n uniform mat4 matrix;\n\n in vec4 position;\n in vec2 texcoord;\n\n smooth out vec2 frag_texcoord;\n\n void main() {\n gl_Position = matrix * position;\n frag_texcoord = texcoord;\n }\n \",\n\n \/\/ fragment shader\n \"\n #version 130\n \n uniform sampler2D position_texture;\n uniform sampler2D normal_texture;\n uniform vec4 light_position;\n uniform vec3 light_color;\n uniform vec3 light_attenuation;\n uniform float light_radius;\n\n smooth in vec2 frag_texcoord;\n\n out vec4 frag_output;\n\n void main() {\n vec4 position = texture2D(position_texture, frag_texcoord);\n vec4 normal = texture2D(normal_texture, frag_texcoord);\n vec3 light_vector = light_position.xyz - position.xyz;\n float light_distance = abs(length(light_vector));\n vec3 normal_vector = normalize(normal.xyz);\n float diffuse = max(dot(normal_vector, light_vector), 0.0);\n if (diffuse > 0.0) {\n float attenuation_factor = 1.0 \/ (\n light_attenuation.x +\n (light_attenuation.y * light_distance) +\n (light_attenuation.z * light_distance * light_distance)\n );\n attenuation_factor *= (1.0 - pow((light_distance \/ light_radius), 2.0));\n diffuse *= attenuation_factor;\n \n }\n frag_output = vec4(light_color * diffuse, 1.0);\n }\n \",\n\n \/\/ geometry shader\n None)\n .unwrap();\n\n \/\/ compiling shaders and linking them together\n let composition_program = glium::Program::from_source(&display,\n \/\/ vertex shader\n \"\n #version 130\n\n uniform mat4 matrix;\n\n in vec4 position;\n in vec2 texcoord;\n\n smooth out vec2 frag_texcoord;\n\n void main() {\n frag_texcoord = texcoord;\n gl_Position = matrix * position;\n }\n \",\n\n \/\/ fragment shader\n \"\n #version 130\n\n uniform sampler2D decal_texture;\n uniform sampler2D lighting_texture;\n\n smooth in vec2 frag_texcoord;\n\n out vec4 frag_output;\n\n void main() {\n vec4 lighting_value = texture2D(lighting_texture, frag_texcoord);\n frag_output = vec4(texture2D(decal_texture, frag_texcoord).rgb * lighting_value.rgb, 1.0);\n }\n \",\n\n \/\/ geometry shader\n None)\n .unwrap();\n\n \/*\/\/ creating the uniforms structure\n #[uniforms]\n #[derive(Copy)]\n struct PrepassUniforms<'a> {\n perspective_matrix: [[f32; 4]; 4],\n view_matrix: [[f32; 4]; 4],\n model_matrix: [[f32; 4]; 4],\n texture: &'a glium::texture::Texture2d\n }\n\n #[uniforms]\n #[derive(Copy)]\n struct LightingUniforms<'a> {\n matrix: [[f32; 4]; 4],\n position_texture: &'a glium::texture::Texture2d,\n normal_texture: &'a glium::texture::Texture2d,\n light_position: [f32; 4],\n light_color: [f32; 3],\n light_attenuation: [f32; 3],\n light_radius: f32\n }\n\n #[uniforms]\n #[derive(Copy)]\n struct CompositionUniforms<'a> {\n matrix: [[f32; 4]; 4],\n decal_texture: &'a glium::texture::Texture2d,\n lighting_texture: &'a glium::texture::Texture2d\n }*\/\n\n struct Light {\n position: [f32; 4],\n color: [f32; 3],\n attenuation: [f32; 3],\n radius: f32\n }\n\n let texture1 = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let texture2 = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let texture3 = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let texture4 = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let depthtexture = glium::texture::DepthTexture2d::new_empty(&display, glium::texture::DepthFormat::F32, 800, 500);\n let output = &[(\"output1\", &texture1), (\"output2\", &texture2), (\"output3\", &texture3), (\"output4\", &texture4)];\n let mut framebuffer = glium::framebuffer::MultiOutputFrameBuffer::with_depth_buffer(&display, output, &depthtexture);\n\n let light_texture = glium::texture::Texture2d::new_empty(&display, glium::texture::UncompressedFloatFormat::F32F32F32F32, 800, 500);\n let mut light_buffer = glium::framebuffer::SimpleFrameBuffer::with_depth_buffer(&display, &light_texture, &depthtexture);\n\n let ortho_matrix: cgmath::Matrix4<f32> = cgmath::ortho(0.0, 800.0, 0.0, 500.0, -1.0, 1.0);\n let fixed_ortho_matrix = ortho_matrix.as_fixed();\n\n let perspective_matrix: cgmath::Matrix4<f32> = cgmath::perspective(cgmath::deg(45.0), 1.333, 0.0001, 100.0);\n let fixed_perspective_matrix = perspective_matrix.as_fixed();\n let view_eye: cgmath::Point3<f32> = cgmath::Point3::new(0.0, 2.0, -2.0);\n let view_center: cgmath::Point3<f32> = cgmath::Point3::new(0.0, 0.0, 0.0);\n let view_up: cgmath::Vector3<f32> = cgmath::Vector3::new(0.0, 1.0, 0.0);\n let view_matrix: cgmath::Matrix4<f32> = cgmath::Matrix4::look_at(&view_eye, &view_center, &view_up);\n let fixed_view_matrix = view_matrix.as_fixed();\n let model_matrix: cgmath::Matrix4<f32> = cgmath::Matrix4::identity();\n let fixed_model_matrix = model_matrix.as_fixed();\n\n let lights = [\n Light {\n position: [1.0, 1.0, 1.0, 1.0],\n attenuation: [0.8, 0.00125, 0.0000001],\n color: [1.0, 0.0, 0.0],\n radius: 1.5\n },\n Light {\n position: [0.0, 1.0, 0.0, 1.0],\n attenuation: [0.8, 0.00125, 0.0000001],\n color: [0.0, 1.0, 0.0],\n radius: 1.5\n },\n Light {\n position: [0.0, 1.0, 1.0, 1.0],\n attenuation: [0.8, 0.00125, 0.0000001],\n color: [0.0, 0.0, 1.0],\n radius: 1.5\n },\n Light {\n position: [1.0, 1.0, 0.0, 1.0],\n attenuation: [0.8, 0.00125, 0.0000001],\n color: [1.0, 1.0, 0.0],\n radius: 1.5\n }\n ];\n\n \/\/ the main loop\n support::start_loop(|| {\n \/\/ prepass\n let uniforms = uniform! {\n perspective_matrix: *fixed_perspective_matrix,\n view_matrix: *fixed_view_matrix,\n model_matrix: *fixed_model_matrix,\n texture: &opengl_texture\n };\n framebuffer.clear_color(0.0, 0.0, 0.0, 0.0);\n framebuffer.draw(&floor_vertex_buffer, &floor_index_buffer, &prepass_program, &uniforms, &std::default::Default::default()).unwrap();\n\n \/\/ lighting\n let draw_params = glium::DrawParameters {\n \/\/depth_function: glium::DepthFunction::IfLessOrEqual,\n blending_function: Some(glium::BlendingFunction::Addition{\n source: glium::LinearBlendingFactor::One,\n destination: glium::LinearBlendingFactor::One\n }),\n .. std::default::Default::default()\n };\n light_buffer.clear_color(0.0, 0.0, 0.0, 0.0);\n for light in lights.iter() {\n let uniforms = uniform! {\n matrix: *fixed_ortho_matrix,\n position_texture: &texture1,\n normal_texture: &texture2,\n light_position: light.position,\n light_attenuation: light.attenuation,\n light_color: light.color,\n light_radius: light.radius\n };\n light_buffer.draw(&quad_vertex_buffer, &quad_index_buffer, &lighting_program, &uniforms, &draw_params).unwrap();\n }\n\n \/\/ composition\n let uniforms = uniform! {\n matrix: *fixed_ortho_matrix,\n decal_texture: &texture3,\n lighting_texture: &light_texture\n };\n let mut target = display.draw();\n target.clear_color(0.0, 0.0, 0.0, 0.0);\n target.draw(&quad_vertex_buffer, &quad_index_buffer, &composition_program, &uniforms, &std::default::Default::default()).unwrap();\n target.finish();\n\n \/\/ polling and handling the events received by the window\n for event in display.poll_events() {\n match event {\n glutin::Event::Closed => return support::Action::Stop,\n _ => ()\n }\n }\n\n support::Action::Continue\n });\n}\n<|endoftext|>"} {"text":"<commit_before>use std::collections::BTreeMap;\n\n#[derive(Debug, Deserialize)]\npub struct Service {\n pub documentation: Option<String>,\n pub examples: Option<BTreeMap<String, String>>,\n pub metadata: Metadata,\n pub operations: BTreeMap<String, Operation>,\n pub shapes: BTreeMap<String, Shape>,\n pub version: String,\n}\n\nimpl Service {\n pub fn client_type_name(&self) -> String {\n format!(\"{}Client\", self.service_type_name())\n }\n\n pub fn error_type_name(&self) -> String {\n format!(\"{}Error\", self.service_type_name())\n }\n\n fn service_type_name(&self) -> String {\n self.metadata.service_abbreviation.replace(\"Amazon \", \"\").replace(\" \", \"\")\n }\n}\n\n#[derive(Debug, Deserialize)]\npub struct HttpRequest {\n pub method: String,\n #[serde(rename=\"requestUri\")]\n pub request_uri: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Input {\n pub documentation: Option<String>,\n pub shape: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Output {\n pub documentation: Option<String>,\n #[serde(rename=\"resultWrapper\")]\n pub result_wrapper: Option<String>,\n pub shape: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Error {\n pub documentation: Option<String>,\n pub error: Option<HttpError>,\n pub exception: Option<bool>,\n pub fault: Option<bool>,\n pub shape: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct HttpError {\n pub code: String,\n #[serde(rename=\"httpStatusCode\")]\n pub http_status_code: i32,\n #[serde(rename=\"senderFault\")]\n pub sender_fault: Option<bool>,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Member {\n pub deprecated: Option<bool>,\n pub documentation: Option<String>,\n pub flattened: Option<bool>,\n pub location: Option<String>,\n #[serde(rename=\"locationName\")]\n pub location_name: Option<String>,\n pub shape: String,\n pub streaming: Option<bool>,\n #[serde(rename=\"xmlAttribute\")]\n pub xml_attribute: Option<bool>,\n #[serde(rename=\"xmlNamespace\")]\n pub xml_namespace: Option<XmlNamespace>,\n}\n\n#[derive(Debug, Deserialize)]\npub struct XmlNamespace {\n pub prefix: Option<String>,\n pub uri: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Key {\n pub documentation: Option<String>,\n #[serde(rename=\"locationName\")]\n pub location_name: Option<String>,\n pub required: Option<bool>,\n pub shape: String,\n}\n\n\n#[derive(Debug, Deserialize)]\npub struct Value {\n pub documentation: Option<String>,\n #[serde(rename=\"locationName\")]\n pub location_name: Option<String>,\n pub shape: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Shape {\n #[serde(rename=\"box\")]\n pub aws_box: Option<bool>,\n pub documentation: Option<String>,\n pub error: Option<HttpError>,\n pub exception: Option<bool>,\n pub fault: Option<bool>,\n pub flattened: Option<bool>,\n pub key: Option<Key>,\n #[serde(rename=\"locationName\")]\n pub location_name: Option<String>,\n pub max: Option<i32>,\n pub member: Option<Member>,\n pub members: Option<BTreeMap<String, Member>>,\n pub min: Option<i32>,\n pub pattern: Option<String>,\n pub payload: Option<String>,\n pub required: Option<Vec<String>>,\n #[serde(rename=\"enum\")]\n pub shape_enum: Option<Vec<String>>,\n #[serde(rename=\"type\")]\n pub shape_type: String,\n pub sensitive: Option<bool>,\n #[serde(rename=\"timestampFormat\")]\n pub timestamp_format: Option<String>,\n pub value: Option<Value>,\n #[serde(rename=\"xmlNamespace\")]\n pub xml_namespace: Option<XmlNamespace>,\n}\n\nimpl<'a> Shape {\n pub fn key(&'a self) -> &'a str {\n &self.key.as_ref().expect(\"Key shape undefined\").shape\n }\n\n pub fn value(&'a self) -> &'a str {\n &self.value.as_ref().expect(\"Value shape undefined\").shape\n }\n\n pub fn member(&'a self) -> &'a str {\n &self.member.as_ref().expect(\"Member shape undefined\").shape\n }\n\n pub fn required(&self, field: &'a str) -> bool {\n self.required.is_some() && self.required.as_ref().unwrap().contains(&String::from(field))\n }\n}\n\n#[derive(Debug, Deserialize)]\npub struct Operation {\n pub alias: Option<String>,\n pub deprecated: Option<bool>,\n pub documentation: Option<String>,\n #[serde(rename=\"documentationUrl\")]\n pub documentation_url: Option<String>,\n pub errors: Vec<Error>,\n pub http: HttpRequest,\n pub input: Option<Input>,\n pub name: String,\n pub output: Option<Output>,\n}\n\nimpl<'a> Operation {\n pub fn input_shape(&'a self) -> &'a str {\n &self.input.as_ref().expect(\"Operation input undefined\").shape\n }\n\n pub fn output_shape_or(&'a self, default: &'a str) -> &'a str {\n match self.output.as_ref() {\n Some(output) => &output.shape,\n None => default\n }\n }\n}\n\n#[derive(Debug, Deserialize)]\npub struct Metadata {\n #[serde(rename=\"apiVersion\")]\n pub api_version: String,\n #[serde(rename=\"checksumFormat\")]\n pub checksum_format: Option<String>,\n #[serde(rename=\"endpointPrefix\")]\n pub endpoint_prefix: String,\n #[serde(rename=\"globalEndpoint\")]\n pub global_endpoint: Option<String>,\n #[serde(rename=\"jsonVersion\")]\n pub json_version: Option<String>,\n pub protocol: String,\n #[serde(rename=\"serviceAbbreviation\")]\n pub service_abbreviation: String,\n #[serde(rename=\"serviceFullName\")]\n pub service_full_name: String,\n #[serde(rename=\"signatureVersion\")]\n pub signature_version: String,\n #[serde(rename=\"targetPrefix\")]\n pub target_prefix: Option<String>,\n #[serde(rename=\"timestampFormat\")]\n pub timestamp_format: Option<String>,\n #[serde(rename=\"xmlNamespace\")]\n pub xml_namespace: Option<String>\n}\n<commit_msg>Not all operations have errors.<commit_after>use std::collections::BTreeMap;\n\n#[derive(Debug, Deserialize)]\npub struct Service {\n pub documentation: Option<String>,\n pub examples: Option<BTreeMap<String, String>>,\n pub metadata: Metadata,\n pub operations: BTreeMap<String, Operation>,\n pub shapes: BTreeMap<String, Shape>,\n pub version: String,\n}\n\nimpl Service {\n pub fn client_type_name(&self) -> String {\n format!(\"{}Client\", self.service_type_name())\n }\n\n pub fn error_type_name(&self) -> String {\n format!(\"{}Error\", self.service_type_name())\n }\n\n fn service_type_name(&self) -> String {\n self.metadata.service_abbreviation.replace(\"Amazon \", \"\").replace(\" \", \"\")\n }\n}\n\n#[derive(Debug, Deserialize)]\npub struct HttpRequest {\n pub method: String,\n #[serde(rename=\"requestUri\")]\n pub request_uri: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Input {\n pub documentation: Option<String>,\n pub shape: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Output {\n pub documentation: Option<String>,\n #[serde(rename=\"resultWrapper\")]\n pub result_wrapper: Option<String>,\n pub shape: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Error {\n pub documentation: Option<String>,\n pub error: Option<HttpError>,\n pub exception: Option<bool>,\n pub fault: Option<bool>,\n pub shape: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct HttpError {\n pub code: String,\n #[serde(rename=\"httpStatusCode\")]\n pub http_status_code: i32,\n #[serde(rename=\"senderFault\")]\n pub sender_fault: Option<bool>,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Member {\n pub deprecated: Option<bool>,\n pub documentation: Option<String>,\n pub flattened: Option<bool>,\n pub location: Option<String>,\n #[serde(rename=\"locationName\")]\n pub location_name: Option<String>,\n pub shape: String,\n pub streaming: Option<bool>,\n #[serde(rename=\"xmlAttribute\")]\n pub xml_attribute: Option<bool>,\n #[serde(rename=\"xmlNamespace\")]\n pub xml_namespace: Option<XmlNamespace>,\n}\n\n#[derive(Debug, Deserialize)]\npub struct XmlNamespace {\n pub prefix: Option<String>,\n pub uri: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Key {\n pub documentation: Option<String>,\n #[serde(rename=\"locationName\")]\n pub location_name: Option<String>,\n pub required: Option<bool>,\n pub shape: String,\n}\n\n\n#[derive(Debug, Deserialize)]\npub struct Value {\n pub documentation: Option<String>,\n #[serde(rename=\"locationName\")]\n pub location_name: Option<String>,\n pub shape: String,\n}\n\n#[derive(Debug, Deserialize)]\npub struct Shape {\n #[serde(rename=\"box\")]\n pub aws_box: Option<bool>,\n pub documentation: Option<String>,\n pub error: Option<HttpError>,\n pub exception: Option<bool>,\n pub fault: Option<bool>,\n pub flattened: Option<bool>,\n pub key: Option<Key>,\n #[serde(rename=\"locationName\")]\n pub location_name: Option<String>,\n pub max: Option<i32>,\n pub member: Option<Member>,\n pub members: Option<BTreeMap<String, Member>>,\n pub min: Option<i32>,\n pub pattern: Option<String>,\n pub payload: Option<String>,\n pub required: Option<Vec<String>>,\n #[serde(rename=\"enum\")]\n pub shape_enum: Option<Vec<String>>,\n #[serde(rename=\"type\")]\n pub shape_type: String,\n pub sensitive: Option<bool>,\n #[serde(rename=\"timestampFormat\")]\n pub timestamp_format: Option<String>,\n pub value: Option<Value>,\n #[serde(rename=\"xmlNamespace\")]\n pub xml_namespace: Option<XmlNamespace>,\n}\n\nimpl<'a> Shape {\n pub fn key(&'a self) -> &'a str {\n &self.key.as_ref().expect(\"Key shape undefined\").shape\n }\n\n pub fn value(&'a self) -> &'a str {\n &self.value.as_ref().expect(\"Value shape undefined\").shape\n }\n\n pub fn member(&'a self) -> &'a str {\n &self.member.as_ref().expect(\"Member shape undefined\").shape\n }\n\n pub fn required(&self, field: &'a str) -> bool {\n self.required.is_some() && self.required.as_ref().unwrap().contains(&String::from(field))\n }\n}\n\n#[derive(Debug, Deserialize)]\npub struct Operation {\n pub alias: Option<String>,\n pub deprecated: Option<bool>,\n pub documentation: Option<String>,\n #[serde(rename=\"documentationUrl\")]\n pub documentation_url: Option<String>,\n pub errors: Option<Vec<Error>>,\n pub http: HttpRequest,\n pub input: Option<Input>,\n pub name: String,\n pub output: Option<Output>,\n}\n\nimpl<'a> Operation {\n pub fn input_shape(&'a self) -> &'a str {\n &self.input.as_ref().expect(\"Operation input undefined\").shape\n }\n\n pub fn output_shape_or(&'a self, default: &'a str) -> &'a str {\n match self.output.as_ref() {\n Some(output) => &output.shape,\n None => default\n }\n }\n}\n\n#[derive(Debug, Deserialize)]\npub struct Metadata {\n #[serde(rename=\"apiVersion\")]\n pub api_version: String,\n #[serde(rename=\"checksumFormat\")]\n pub checksum_format: Option<String>,\n #[serde(rename=\"endpointPrefix\")]\n pub endpoint_prefix: String,\n #[serde(rename=\"globalEndpoint\")]\n pub global_endpoint: Option<String>,\n #[serde(rename=\"jsonVersion\")]\n pub json_version: Option<String>,\n pub protocol: String,\n #[serde(rename=\"serviceAbbreviation\")]\n pub service_abbreviation: String,\n #[serde(rename=\"serviceFullName\")]\n pub service_full_name: String,\n #[serde(rename=\"signatureVersion\")]\n pub signature_version: String,\n #[serde(rename=\"targetPrefix\")]\n pub target_prefix: Option<String>,\n #[serde(rename=\"timestampFormat\")]\n pub timestamp_format: Option<String>,\n #[serde(rename=\"xmlNamespace\")]\n pub xml_namespace: Option<String>\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for const<commit_after>\/\/ run-pass\n\n#![feature(cow_is_borrowed)]\n\nuse std::borrow::Cow;\n\nfn main() {\n const COW: Cow<str> = Cow::Borrowed(\"moo\");\n\n const IS_BORROWED: bool = COW.is_borrowed();\n assert!(IS_BORROWED);\n\n const IS_OWNED: bool = COW.is_owned();\n assert!(!IS_OWNED);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Started on integration testing<commit_after>\n#![feature(box_syntax,box_patterns)]\nextern crate seax_svm;\n\nuse seax_svm::svm::slist::Stack;\nuse seax_svm::svm::slist::List::{Cons,Nil};\nuse seax_svm::svm::State;\nuse seax_svm::svm::cell::Atom::*;\nuse seax_svm::svm::cell::SVMCell::*;\nuse seax_svm::svm::Inst::*;\n\nmacro_rules! list(\n ( $e:expr, $($rest:expr),+ ) => ( Cons($e, Box::new(list!( $( $rest ),+ )) ));\n ( $e:expr ) => ( Cons($e, Box::new(Nil)) );\n () => ( Box::new(Nil) );\n);\n\n\/\/\/ SVM integration tests.\n\/\/\/\n\/\/\/ These are based on the sample programs in Zach Allaun's Clojure SECD\n\/\/\/ [implementation](https:\/\/github.com\/zachallaun\/secd). Each example also\n\/\/\/ provides the source code for the equivalent Lisp program.\n\n\/\/\/ Test for simple list construction through CONS.\n\/\/\/\n\/\/\/ ```lisp\n\/\/\/ (cons 10 (cons 20 nil))\n\/\/\/ ```\n#[test]\nfn test_list_creation() {\n let state = seax_svm::svm::eval_program(list!(\n InstCell(NIL),\n InstCell(LDC), AtomCell(SInt(10)), InstCell(CONS),\n InstCell(LDC), AtomCell(SInt(20)), InstCell(CONS)\n ));\n assert_eq!(state,\n list!(ListCell(box list!(AtomCell(SInt(20)), AtomCell(SInt(10)))))\n );\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add calloc test<commit_after>\/\/ignore-windows: Uses POSIX APIs\n\n#![feature(rustc_private)]\n\nuse core::slice;\n\nextern crate libc;\n\nfn main() {\n unsafe {\n let p1 = libc::calloc(0, 0);\n assert!(p1.is_null());\n\n let p2 = libc::calloc(20, 0);\n assert!(p2.is_null());\n\n let p3 = libc::calloc(0, 20);\n assert!(p3.is_null());\n\n let p4 = libc::calloc(4, 8) as *const u8;\n assert!(!p4.is_null());\n\n let slice = slice::from_raw_parts(p4, 4 * 8);\n assert_eq!(&slice, &[0_u8; 4 * 8]);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #25368. Fixes #25368.<commit_after>\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::sync::mpsc::channel;\nuse std::thread::spawn;\nuse std::marker::PhantomData;\n\nstruct Foo<T> {foo: PhantomData<T>}\n\nfn main() {\n let (tx, rx) = channel();\n\n spawn(move || {\n tx.send(Foo{ foo: PhantomData }); \/\/~ ERROR E0282\n });\n}\n<|endoftext|>"} {"text":"<commit_before>#[allow(dead_code)]\nextern crate rand;\nextern crate orbclient;\n\nuse super::vid;\nuse super::start;\n\npub fn wireframe(id: u16) -> vid::Shader {\n let wireframe_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n let flat_1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y, \n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n let flat_2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n let flat_3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n \n window.window.line(flat_1.x, flat_1.y, flat_2.x, flat_2.y, triangle.color.orb_color());\n window.window.line(flat_3.x, flat_3.y, flat_2.x, flat_2.y, triangle.color.orb_color());\n window.window.line(flat_1.x, flat_1.y, flat_3.x, flat_3.y, triangle.color.orb_color());\n };\n\n vid::Shader::new(id, Box::new(wireframe_shader))\n}\n\npub fn disco_wireframe(id: u16) -> vid::Shader {\n let noise_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n let flat_1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y, \n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n let flat_2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n let flat_3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n \n window.window.line(flat_1.x, flat_1.y, flat_2.x, flat_2.y, vid::Color::new(rand::random::<u8>(),\n rand::random::<u8>(),\n rand::random::<u8>()).orb_color());\n window.window.line(flat_3.x, flat_3.y, flat_2.x, flat_2.y, vid::Color::new(rand::random::<u8>(),\n rand::random::<u8>(),\n rand::random::<u8>()).orb_color());\n window.window.line(flat_1.x, flat_1.y, flat_3.x, flat_3.y, vid::Color::new(rand::random::<u8>(),\n rand::random::<u8>(),\n rand::random::<u8>()).orb_color());\n };\n\n vid::Shader::new(id, Box::new(noise_shader))\n}\n\npub fn filled_triangle_color(id: u16) -> vid::Shader {\n let rasterize_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n let p1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n struct FloatPoint {\n x: f32,\n y: f32\n }\n\n let points = [p1, p2, p3];\n\n let upmost = points.iter().max_by_key(|p| -p.y).unwrap().clone();\n let leftmost = points.iter().max_by_key(|p| -p.x).unwrap().clone();\n let rightmost = points.iter().max_by_key(|p| p.x).unwrap().clone();\n let lowmost = points.iter().max_by_key(|p| p.y).unwrap().clone();\n\n for px in leftmost.x..rightmost.x {\n for py in upmost.y..lowmost.y {\n let p1 = FloatPoint {x: p1.x as f32, y: p1.y as f32};\n let p2 = FloatPoint {x: p2.x as f32, y: p2.y as f32};\n let p3 = FloatPoint {x: p3.x as f32, y: p3.y as f32};\n\n let p = FloatPoint {x: px as f32, y: py as f32};\n\n let alpha = ((p2.y - p3.y)*(p.x - p3.x) + (p3.x - p2.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let beta = ((p3.y - p1.y)*(p.x - p3.x) + (p1.x - p3.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let gamma = 1.0 - alpha - beta;\n\n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n window.window.pixel(px, py, triangle.color.orb_color());\n }\n }\n }\n };\n\n vid::Shader::new(id, Box::new(rasterize_shader))\n}\n\npub fn filled_b_w_noise(id: u16) -> vid::Shader {\n let rasterize_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n let p1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n struct FloatPoint {\n x: f32,\n y: f32\n }\n\n let points = [p1, p2, p3];\n\n let upmost = points.iter().max_by_key(|p| -p.y).unwrap().clone();\n let leftmost = points.iter().max_by_key(|p| -p.x).unwrap().clone();\n let rightmost = points.iter().max_by_key(|p| p.x).unwrap().clone();\n let lowmost = points.iter().max_by_key(|p| p.y).unwrap().clone();\n\n for px in leftmost.x..rightmost.x {\n for py in upmost.y..lowmost.y {\n let p1 = FloatPoint {x: p1.x as f32, y: p1.y as f32};\n let p2 = FloatPoint {x: p2.x as f32, y: p2.y as f32};\n let p3 = FloatPoint {x: p3.x as f32, y: p3.y as f32};\n\n let p = FloatPoint {x: px as f32, y: py as f32};\n\n let alpha = ((p2.y - p3.y)*(p.x - p3.x) + (p3.x - p2.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let beta = ((p3.y - p1.y)*(p.x - p3.x) + (p1.x - p3.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let gamma = 1.0 - alpha - beta;\n\n let random = rand::random::<u8>();\n\n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n window.window.pixel(px, py, vid::Color::new(random, random, random).orb_color());\n }\n }\n }\n };\n\n vid::Shader::new(id, Box::new(rasterize_shader))\n}\n\npub fn filled_texture_naive(id: u16, texture_path: &str) -> vid::Shader {\n let img = orbclient::BmpFile::from_path(texture_path);\n\n let rasterize_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n use std::ops::Deref;\n\n let p1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n struct FloatPoint {\n x: f32,\n y: f32\n }\n\n let img_w = wrapper.image_data.width();\n let img_h = wrapper.image_data.height();\n\n let img_slice = wrapper.image_data.deref();\n\n let points = [p1, p2, p3];\n\n let upmost = points.iter().max_by_key(|p| -p.y).unwrap().clone();\n let leftmost = points.iter().max_by_key(|p| -p.x).unwrap().clone();\n let rightmost = points.iter().max_by_key(|p| p.x).unwrap().clone();\n let lowmost = points.iter().max_by_key(|p| p.y).unwrap().clone();\n\n for px in leftmost.x..rightmost.x {\n for py in upmost.y..lowmost.y {\n let p1 = FloatPoint {x: p1.x as f32, y: p1.y as f32};\n let p2 = FloatPoint {x: p2.x as f32, y: p2.y as f32};\n let p3 = FloatPoint {x: p3.x as f32, y: p3.y as f32};\n\n let p = FloatPoint {x: px as f32, y: py as f32};\n\n let alpha = ((p2.y - p3.y)*(p.x - p3.x) + (p3.x - p2.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let beta = ((p3.y - p1.y)*(p.x - p3.x) + (p1.x - p3.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let gamma = 1.0 - alpha - beta;\n\n let random = rand::random::<u8>();\n\n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n window.window.pixel(px, py, img_slice[(px + py*(img_w as i32)) as usize]);\n }\n }\n }\n };\n\n let mut shader = vid::Shader::new(id, Box::new(rasterize_shader));\n\n shader.image_data = img;\n\n shader\n}\n<commit_msg>Added gradient shader<commit_after>extern crate rand;\nextern crate orbclient;\n\nuse super::vid;\nuse super::start;\n\npub fn wireframe(id: u16) -> vid::Shader {\n let wireframe_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n let flat_1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y, \n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n let flat_2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n let flat_3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n \n window.window.line(flat_1.x, flat_1.y, flat_2.x, flat_2.y, triangle.color.orb_color());\n window.window.line(flat_3.x, flat_3.y, flat_2.x, flat_2.y, triangle.color.orb_color());\n window.window.line(flat_1.x, flat_1.y, flat_3.x, flat_3.y, triangle.color.orb_color());\n };\n\n vid::Shader::new(id, Box::new(wireframe_shader))\n}\n\npub fn disco_wireframe(id: u16) -> vid::Shader {\n let noise_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n let flat_1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y, \n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n let flat_2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n let flat_3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n \n window.window.line(flat_1.x, flat_1.y, flat_2.x, flat_2.y, vid::Color::new(rand::random::<u8>(),\n rand::random::<u8>(),\n rand::random::<u8>()).orb_color());\n window.window.line(flat_3.x, flat_3.y, flat_2.x, flat_2.y, vid::Color::new(rand::random::<u8>(),\n rand::random::<u8>(),\n rand::random::<u8>()).orb_color());\n window.window.line(flat_1.x, flat_1.y, flat_3.x, flat_3.y, vid::Color::new(rand::random::<u8>(),\n rand::random::<u8>(),\n rand::random::<u8>()).orb_color());\n };\n\n vid::Shader::new(id, Box::new(noise_shader))\n}\n\npub fn filled_triangle_color(id: u16) -> vid::Shader {\n let rasterize_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n let p1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n struct FloatPoint {\n x: f32,\n y: f32\n }\n\n let points = [p1, p2, p3];\n\n let upmost = points.iter().max_by_key(|p| -p.y).unwrap().clone();\n let leftmost = points.iter().max_by_key(|p| -p.x).unwrap().clone();\n let rightmost = points.iter().max_by_key(|p| p.x).unwrap().clone();\n let lowmost = points.iter().max_by_key(|p| p.y).unwrap().clone();\n\n for px in leftmost.x..rightmost.x {\n for py in upmost.y..lowmost.y {\n let p1 = FloatPoint {x: p1.x as f32, y: p1.y as f32};\n let p2 = FloatPoint {x: p2.x as f32, y: p2.y as f32};\n let p3 = FloatPoint {x: p3.x as f32, y: p3.y as f32};\n\n let p = FloatPoint {x: px as f32, y: py as f32};\n\n let alpha = ((p2.y - p3.y)*(p.x - p3.x) + (p3.x - p2.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let beta = ((p3.y - p1.y)*(p.x - p3.x) + (p1.x - p3.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let gamma = 1.0 - alpha - beta;\n\n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n window.window.pixel(px, py, triangle.color.orb_color());\n }\n }\n }\n };\n\n vid::Shader::new(id, Box::new(rasterize_shader))\n}\n\npub fn filled_b_w_noise(id: u16) -> vid::Shader {\n let rasterize_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n let p1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n struct FloatPoint {\n x: f32,\n y: f32\n }\n\n let points = [p1, p2, p3];\n\n let upmost = points.iter().max_by_key(|p| -p.y).unwrap().clone();\n let leftmost = points.iter().max_by_key(|p| -p.x).unwrap().clone();\n let rightmost = points.iter().max_by_key(|p| p.x).unwrap().clone();\n let lowmost = points.iter().max_by_key(|p| p.y).unwrap().clone();\n\n for px in leftmost.x..rightmost.x {\n for py in upmost.y..lowmost.y {\n let p1 = FloatPoint {x: p1.x as f32, y: p1.y as f32};\n let p2 = FloatPoint {x: p2.x as f32, y: p2.y as f32};\n let p3 = FloatPoint {x: p3.x as f32, y: p3.y as f32};\n\n let p = FloatPoint {x: px as f32, y: py as f32};\n\n let alpha = ((p2.y - p3.y)*(p.x - p3.x) + (p3.x - p2.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let beta = ((p3.y - p1.y)*(p.x - p3.x) + (p1.x - p3.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let gamma = 1.0 - alpha - beta;\n\n let random = rand::random::<u8>();\n\n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n window.window.pixel(px, py, vid::Color::new(random, random, random).orb_color());\n }\n }\n }\n };\n\n vid::Shader::new(id, Box::new(rasterize_shader))\n}\n\npub fn filled_texture_naive(id: u16, texture_path: &str) -> vid::Shader {\n let img = orbclient::BmpFile::from_path(texture_path);\n\n let rasterize_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n use std::ops::Deref;\n\n let p1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n struct FloatPoint {\n x: f32,\n y: f32\n }\n\n let img_w = wrapper.image_data.width();\n let img_h = wrapper.image_data.height();\n\n let img_slice = wrapper.image_data.deref();\n\n let points = [p1, p2, p3];\n\n let upmost = points.iter().max_by_key(|p| -p.y).unwrap().clone();\n let leftmost = points.iter().max_by_key(|p| -p.x).unwrap().clone();\n let rightmost = points.iter().max_by_key(|p| p.x).unwrap().clone();\n let lowmost = points.iter().max_by_key(|p| p.y).unwrap().clone();\n\n for px in leftmost.x..rightmost.x {\n for py in upmost.y..lowmost.y {\n let p1 = FloatPoint {x: p1.x as f32, y: p1.y as f32};\n let p2 = FloatPoint {x: p2.x as f32, y: p2.y as f32};\n let p3 = FloatPoint {x: p3.x as f32, y: p3.y as f32};\n\n let p = FloatPoint {x: px as f32, y: py as f32};\n\n let alpha = ((p2.y - p3.y)*(p.x - p3.x) + (p3.x - p2.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let beta = ((p3.y - p1.y)*(p.x - p3.x) + (p1.x - p3.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let gamma = 1.0 - alpha - beta;\n\n let random = rand::random::<u8>();\n\n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n window.window.pixel(px, py, img_slice[(px - leftmost.x + (py - upmost.y)*(img_w as i32)) as usize]);\n }\n }\n }\n };\n\n let mut shader = vid::Shader::new(id, Box::new(rasterize_shader));\n\n shader.image_data = img;\n\n shader\n}\n\npub fn filled_triangle_gradient(id: u16) -> vid::Shader {\n let rasterize_shader = |triangle: &vid::Triangle, window: &mut start::Window, wrapper: &vid::Shader| {\n let p1 = triangle.p1.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x, \n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p2 = triangle.p2.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n let p3 = triangle.p3.clone().flat_point(window.screen_x, window.screen_y,\n triangle.x + window.camera_x,\n triangle.y + window.camera_y,\n triangle.z + window.camera_z);\n\n struct FloatPoint {\n x: f32,\n y: f32\n }\n\n let points = [p1, p2, p3];\n\n let upmost = points.iter().max_by_key(|p| -p.y).unwrap().clone();\n let leftmost = points.iter().max_by_key(|p| -p.x).unwrap().clone();\n let rightmost = points.iter().max_by_key(|p| p.x).unwrap().clone();\n let lowmost = points.iter().max_by_key(|p| p.y).unwrap().clone();\n\n for px in leftmost.x..rightmost.x {\n for py in upmost.y..lowmost.y {\n let p1 = FloatPoint {x: p1.x as f32, y: p1.y as f32};\n let p2 = FloatPoint {x: p2.x as f32, y: p2.y as f32};\n let p3 = FloatPoint {x: p3.x as f32, y: p3.y as f32};\n\n let p = FloatPoint {x: px as f32, y: py as f32};\n\n let alpha = ((p2.y - p3.y)*(p.x - p3.x) + (p3.x - p2.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let beta = ((p3.y - p1.y)*(p.x - p3.x) + (p1.x - p3.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let gamma = 1.0 - alpha - beta;\n\n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n let p1_dist = ((px as f32 - p1.x)*(px as f32 - p1.x) + (py as f32 - p1.y)*(py as f32 - p1.y)).sqrt();\n let p2_dist = ((px as f32 - p2.x)*(px as f32 - p2.x) + (py as f32 - p2.y)*(py as f32 - p2.y)).sqrt();\n let p3_dist = ((px as f32 - p3.x)*(px as f32 - p3.x) + (py as f32 - p3.y)*(py as f32 - p3.y)).sqrt();\n\n window.window.pixel(px, py, vid::Color::new(p1_dist as u8, p2_dist as u8, p3_dist as u8).orb_color());\n }\n }\n }\n };\n\n vid::Shader::new(id, Box::new(rasterize_shader))\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright (C) 2020 O.S. Systems Sofware LTDA\n\/\/\n\/\/ SPDX-License-Identifier: Apache-2.0\n\nuse crate::{api, Error, Result};\nuse async_std::{fs, io};\nuse futures_util::future::BoxFuture;\nuse slog_scope::{debug, error};\nuse std::{\n convert::{TryFrom, TryInto},\n path::Path,\n};\nuse surf::{\n http::headers,\n middleware::{self, Middleware},\n StatusCode,\n};\n\nstruct API;\n\nimpl Middleware for API {\n fn handle<'a>(\n &'a self,\n mut req: middleware::Request,\n client: std::sync::Arc<dyn middleware::HttpClient>,\n next: middleware::Next<'a>,\n ) -> BoxFuture<'a, std::result::Result<middleware::Response, surf::Error>> {\n Box::pin(async move {\n req.insert_header(headers::USER_AGENT, \"updatehub\/next\");\n req.insert_header(headers::CONTENT_TYPE, \"application\/json\");\n req.insert_header(\"api-content-type\", \"application\/vnd.updatehub-v1+json\");\n Ok(next.run(req, client).await?)\n })\n }\n}\n\npub struct Client<'a> {\n client: surf::Client,\n server: &'a str,\n}\n\npub async fn get<W>(url: &str, handle: &mut W) -> Result<()>\nwhere\n W: io::Write + Unpin,\n{\n let req = surf::get(url);\n save_body_to(req, handle).await\n}\n\nasync fn save_body_to<W>(req: surf::Request, handle: &mut W) -> Result<()>\nwhere\n W: io::Write + Unpin,\n{\n use async_std::prelude::StreamExt;\n use io::prelude::{ReadExt, WriteExt};\n use std::str::FromStr;\n\n let rep = req.await?;\n if !rep.status().is_success() {\n return Err(Error::InvalidStatusResponse(rep.status()));\n }\n\n let mut written: f32 = 0.;\n let mut threshold = 10;\n let length = match rep.header(headers::CONTENT_LENGTH) {\n Some(v) => usize::from_str(v.as_str())?,\n None => 0,\n };\n\n let mut stream = rep.bytes();\n while let Some(byte) = stream.next().await {\n let byte = byte?;\n handle.write_all(&[byte]).await?;\n if length > 0 {\n written += 1.0 \/ (length \/ 100) as f32;\n if written as usize >= threshold {\n threshold += 20;\n debug!(\"{}% of the file has been downloaded\", std::cmp::max(written as usize, 100));\n }\n }\n }\n debug!(\"100% of the file has been downloaded\");\n\n Ok(())\n}\n\nimpl<'a> Client<'a> {\n pub fn new(server: &'a str) -> Self {\n Self { server, client: surf::Client::new() }\n }\n\n pub async fn probe(\n &self,\n num_retries: u64,\n firmware: api::FirmwareMetadata<'_>,\n ) -> Result<api::ProbeResponse> {\n let mut response = self\n .client\n .post(&format!(\"{}\/upgrades\", &self.server))\n .middleware(API)\n .set_header(\"api-retries\", num_retries.to_string())\n .body_json(&firmware)?\n .await?;\n\n match response.status() {\n StatusCode::NotFound => Ok(api::ProbeResponse::NoUpdate),\n StatusCode::Ok => {\n match response\n .header(\"add-extra-poll\")\n .map(|extra_poll| extra_poll.as_str())\n .and_then(|extra_poll| extra_poll.parse().ok())\n {\n Some(extra_poll) => Ok(api::ProbeResponse::ExtraPoll(extra_poll)),\n None => {\n let signature =\n response.header(\"UH-Signature\").map(TryInto::try_into).transpose()?;\n Ok(api::ProbeResponse::Update(\n api::UpdatePackage::parse(&response.body_bytes().await?)?,\n signature,\n ))\n }\n }\n }\n s => Err(Error::InvalidStatusResponse(s)),\n }\n }\n\n pub async fn download_object(\n &self,\n product_uid: &str,\n package_uid: &str,\n download_dir: &Path,\n object: &str,\n ) -> Result<()> {\n \/\/ FIXME: Discuss the need of packages inside the route\n let mut request = self\n .client\n .get(&format!(\n \"{}\/products\/{}\/packages\/{}\/objects\/{}\",\n &self.server, product_uid, package_uid, object\n ))\n .middleware(API);\n\n if !download_dir.exists() {\n fs::create_dir_all(download_dir).await.map_err(|e| {\n error!(\"fail to create {:?} directory, error: {}\", download_dir, e);\n e\n })?;\n }\n\n let file = download_dir.join(object);\n if file.exists() {\n request = request.set_header(\n \"RANGE\",\n format!(\"bytes={}-\", file.metadata()?.len().saturating_sub(1)),\n );\n }\n\n let mut file = fs::OpenOptions::new().create(true).append(true).open(&file).await?;\n\n save_body_to(request, &mut file).await\n }\n\n pub async fn report(\n &self,\n state: &str,\n firmware: api::FirmwareMetadata<'_>,\n package_uid: &str,\n previous_state: Option<&str>,\n error_message: Option<String>,\n current_log: Option<String>,\n ) -> Result<()> {\n #[derive(serde::Serialize)]\n #[serde(rename_all = \"kebab-case\")]\n struct Payload<'a> {\n #[serde(rename = \"status\")]\n state: &'a str,\n #[serde(flatten)]\n firmware: api::FirmwareMetadata<'a>,\n package_uid: &'a str,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n previous_state: Option<&'a str>,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n error_message: Option<String>,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n current_log: Option<String>,\n }\n\n let payload =\n Payload { state, firmware, package_uid, previous_state, error_message, current_log };\n\n self.client\n .post(&format!(\"{}\/report\", &self.server))\n .middleware(API)\n .body_json(&payload)?\n .await?;\n Ok(())\n }\n}\n\nimpl TryFrom<&headers::HeaderValues> for api::Signature {\n type Error = Error;\n\n fn try_from(value: &headers::HeaderValues) -> Result<Self> {\n Ok(Self::from_base64_str(value.as_str())?)\n }\n}\n<commit_msg>updatehub-cloud-sdk: workaround for empty base64 decode bug<commit_after>\/\/ Copyright (C) 2020 O.S. Systems Sofware LTDA\n\/\/\n\/\/ SPDX-License-Identifier: Apache-2.0\n\nuse crate::{api, Error, Result};\nuse async_std::{fs, io};\nuse futures_util::future::BoxFuture;\nuse slog_scope::{debug, error};\nuse std::{\n convert::{TryFrom, TryInto},\n path::Path,\n};\nuse surf::{\n http::headers,\n middleware::{self, Middleware},\n StatusCode,\n};\n\nstruct API;\n\nimpl Middleware for API {\n fn handle<'a>(\n &'a self,\n mut req: middleware::Request,\n client: std::sync::Arc<dyn middleware::HttpClient>,\n next: middleware::Next<'a>,\n ) -> BoxFuture<'a, std::result::Result<middleware::Response, surf::Error>> {\n Box::pin(async move {\n req.insert_header(headers::USER_AGENT, \"updatehub\/next\");\n req.insert_header(headers::CONTENT_TYPE, \"application\/json\");\n req.insert_header(\"api-content-type\", \"application\/vnd.updatehub-v1+json\");\n Ok(next.run(req, client).await?)\n })\n }\n}\n\npub struct Client<'a> {\n client: surf::Client,\n server: &'a str,\n}\n\npub async fn get<W>(url: &str, handle: &mut W) -> Result<()>\nwhere\n W: io::Write + Unpin,\n{\n let req = surf::get(url);\n save_body_to(req, handle).await\n}\n\nasync fn save_body_to<W>(req: surf::Request, handle: &mut W) -> Result<()>\nwhere\n W: io::Write + Unpin,\n{\n use async_std::prelude::StreamExt;\n use io::prelude::{ReadExt, WriteExt};\n use std::str::FromStr;\n\n let rep = req.await?;\n if !rep.status().is_success() {\n return Err(Error::InvalidStatusResponse(rep.status()));\n }\n\n let mut written: f32 = 0.;\n let mut threshold = 10;\n let length = match rep.header(headers::CONTENT_LENGTH) {\n Some(v) => usize::from_str(v.as_str())?,\n None => 0,\n };\n\n let mut stream = rep.bytes();\n while let Some(byte) = stream.next().await {\n let byte = byte?;\n handle.write_all(&[byte]).await?;\n if length > 0 {\n written += 1.0 \/ (length \/ 100) as f32;\n if written as usize >= threshold {\n threshold += 20;\n debug!(\"{}% of the file has been downloaded\", std::cmp::max(written as usize, 100));\n }\n }\n }\n debug!(\"100% of the file has been downloaded\");\n\n Ok(())\n}\n\nimpl<'a> Client<'a> {\n pub fn new(server: &'a str) -> Self {\n Self { server, client: surf::Client::new() }\n }\n\n pub async fn probe(\n &self,\n num_retries: u64,\n firmware: api::FirmwareMetadata<'_>,\n ) -> Result<api::ProbeResponse> {\n let mut response = self\n .client\n .post(&format!(\"{}\/upgrades\", &self.server))\n .middleware(API)\n .set_header(\"api-retries\", num_retries.to_string())\n .body_json(&firmware)?\n .await?;\n\n match response.status() {\n StatusCode::NotFound => Ok(api::ProbeResponse::NoUpdate),\n StatusCode::Ok => {\n match response\n .header(\"add-extra-poll\")\n .map(|extra_poll| extra_poll.as_str())\n .and_then(|extra_poll| extra_poll.parse().ok())\n {\n Some(extra_poll) => Ok(api::ProbeResponse::ExtraPoll(extra_poll)),\n None => {\n let signature =\n response.header(\"UH-Signature\").map(TryInto::try_into).transpose()?;\n Ok(api::ProbeResponse::Update(\n api::UpdatePackage::parse(&response.body_bytes().await?)?,\n signature,\n ))\n }\n }\n }\n s => Err(Error::InvalidStatusResponse(s)),\n }\n }\n\n pub async fn download_object(\n &self,\n product_uid: &str,\n package_uid: &str,\n download_dir: &Path,\n object: &str,\n ) -> Result<()> {\n \/\/ FIXME: Discuss the need of packages inside the route\n let mut request = self\n .client\n .get(&format!(\n \"{}\/products\/{}\/packages\/{}\/objects\/{}\",\n &self.server, product_uid, package_uid, object\n ))\n .middleware(API);\n\n if !download_dir.exists() {\n fs::create_dir_all(download_dir).await.map_err(|e| {\n error!(\"fail to create {:?} directory, error: {}\", download_dir, e);\n e\n })?;\n }\n\n let file = download_dir.join(object);\n if file.exists() {\n request = request.set_header(\n \"RANGE\",\n format!(\"bytes={}-\", file.metadata()?.len().saturating_sub(1)),\n );\n }\n\n let mut file = fs::OpenOptions::new().create(true).append(true).open(&file).await?;\n\n save_body_to(request, &mut file).await\n }\n\n pub async fn report(\n &self,\n state: &str,\n firmware: api::FirmwareMetadata<'_>,\n package_uid: &str,\n previous_state: Option<&str>,\n error_message: Option<String>,\n current_log: Option<String>,\n ) -> Result<()> {\n #[derive(serde::Serialize)]\n #[serde(rename_all = \"kebab-case\")]\n struct Payload<'a> {\n #[serde(rename = \"status\")]\n state: &'a str,\n #[serde(flatten)]\n firmware: api::FirmwareMetadata<'a>,\n package_uid: &'a str,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n previous_state: Option<&'a str>,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n error_message: Option<String>,\n #[serde(skip_serializing_if = \"Option::is_none\")]\n current_log: Option<String>,\n }\n\n let payload =\n Payload { state, firmware, package_uid, previous_state, error_message, current_log };\n\n self.client\n .post(&format!(\"{}\/report\", &self.server))\n .middleware(API)\n .body_json(&payload)?\n .await?;\n Ok(())\n }\n}\n\nimpl TryFrom<&headers::HeaderValues> for api::Signature {\n type Error = Error;\n\n fn try_from(value: &headers::HeaderValues) -> Result<Self> {\n let value = value.as_str();\n\n \/\/ Workarround for https:\/\/github.com\/sfackler\/rust-openssl\/issues\/1325\n if value.is_empty() {\n return Ok(Self::from_base64_str(\"\")?);\n }\n\n Ok(Self::from_base64_str(value)?)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove request body from get handlers.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add an empty test file which just calls `speculate!`.<commit_after>#![feature(phase)]\n\n#[phase(plugin)]\nextern crate speculate;\n\nspeculate! {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for variable-length-quantity case<commit_after>\/\/\/ Convert a list of numbers to a stream of bytes encoded with variable length encoding.\npub fn to_bytes(values: &[u32]) -> Vec<u8> {\n let mut result:Vec<u8> = Vec::with_capacity(values.len() * 5);\n\n for w in values {\n let mut zeroable = false;\n\n let mut b = *w >> 28 & 0x0000_007F;\n if b != 0 {\n zeroable = true;\n result.push(0x80u8 + b as u8);\n }\n\n b = *w >> 21 & 0x0000_007F;\n if b != 0 || zeroable {\n zeroable = true;\n result.push(0x80u8 + b as u8);\n }\n\n b = *w >> 14 & 0x0000_007F;\n if b != 0 || zeroable {\n zeroable = true;\n result.push(0x80u8 + b as u8);\n }\n\n b = *w >> 7 & 0x0000_007F;\n if b != 0 || zeroable {\n result.push(0x80u8 + b as u8);\n }\n\n b = *w & 0x0000_007F;\n result.push(b as u8);\n }\n\n result\n}\n\n\/\/\/ Given a stream of bytes, extract all numbers which are encoded in there.\npub fn from_bytes(bytes: &[u8]) -> Result<Vec<u32>, &'static str> {\n let mut length = 0;\n let mut value: u32 = 0;\n let mut result:Vec<u32> = Vec::new();\n\n for b in bytes {\n length += 1;\n\n if length > 5 {\n return Err(\"Invalid VLQ code\");\n }\n\n if let Some(v) = value.checked_mul(128) {\n value = v;\n } else {\n return Err(\"Overflowed VLQ code\");\n }\n\n if *b & 0x80 != 0 {\n value += (*b - 0x80u8) as u32;\n } else {\n value += *b as u32;\n\n result.push(value);\n\n value = 0;\n length = 0;\n }\n }\n\n if length != 0 {\n return Err(\"Invalid VLQ code\");\n }\n\n Ok(result)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Also use sbsps from other scenario tags<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(libc)]\n#![feature(std_misc)]\n#![feature(optin_builtin_traits)]\n\nextern crate gag;\nextern crate libc;\nextern crate num;\n#[macro_use] extern crate enum_primitive;\nextern crate termbox_sys as termbox;\n#[macro_use] extern crate bitflags;\n\npub use self::style::{Style, RB_BOLD, RB_UNDERLINE, RB_REVERSE, RB_NORMAL};\n\nuse std::error::Error;\nuse std::fmt;\nuse std::io;\nuse std::char;\nuse std::time::duration::Duration;\nuse std::default::Default;\n\nuse num::FromPrimitive;\nuse termbox::RawEvent;\nuse libc::c_int;\nuse gag::Hold;\n\npub mod keyboard;\npub mod mouse;\n\npub use keyboard::Key;\npub use mouse::Mouse;\n\n#[derive(Clone, Copy)]\npub enum Event {\n KeyEventRaw(u8, u16, u32),\n KeyEvent(Option<Key>),\n ResizeEvent(i32, i32),\n MouseEvent(Mouse, i32, i32),\n NoEvent\n}\n\n#[derive(Clone, Copy, Debug)]\npub enum InputMode {\n Current = 0x00,\n\n \/\/\/ When ESC sequence is in the buffer and it doesn't match any known\n \/\/\/ ESC sequence => ESC means TB_KEY_ESC\n Esc = 0x01,\n \/\/\/ When ESC sequence is in the buffer and it doesn't match any known\n \/\/\/ sequence => ESC enables TB_MOD_ALT modifier for the next keyboard event.\n Alt = 0x02,\n}\n\n#[derive(Clone, Copy, PartialEq)]\n#[repr(C,u16)]\npub enum Color {\n Default = 0x00,\n Black = 0x01,\n Red = 0x02,\n Green = 0x03,\n Yellow = 0x04,\n Blue = 0x05,\n Magenta = 0x06,\n Cyan = 0x07,\n White = 0x08,\n}\n\nmod style {\n bitflags! {\n #[repr(C)]\n flags Style: u16 {\n const TB_NORMAL_COLOR = 0x000F,\n const RB_BOLD = 0x0100,\n const RB_UNDERLINE = 0x0200,\n const RB_REVERSE = 0x0400,\n const RB_NORMAL = 0x0000,\n const TB_ATTRIB = RB_BOLD.bits | RB_UNDERLINE.bits | RB_REVERSE.bits,\n }\n }\n\n impl Style {\n pub fn from_color(color: super::Color) -> Style {\n Style { bits: color as u16 & TB_NORMAL_COLOR.bits }\n }\n }\n}\n\nconst NIL_RAW_EVENT: RawEvent = RawEvent { etype: 0, emod: 0, key: 0, ch: 0, w: 0, h: 0, x: 0, y: 0 };\n\n\/\/ FIXME: Rust doesn't support this enum representation.\n\/\/ #[derive(Copy,FromPrimitive,Debug)]\n\/\/ #[repr(C,int)]\n\/\/ pub enum EventErrorKind {\n\/\/ Error = -1,\n\/\/ }\n\/\/ pub type EventError = Option<EventErrorKind>;\n#[allow(non_snake_case)]\npub mod EventErrorKind {\n #[derive(Clone, Copy,Debug)]\n pub struct Error;\n}\n\npub type EventError = Option<EventErrorKind::Error>;\n\npub type EventResult<T> = Result<T, EventError>;\n\n\/\/\/ Unpack a RawEvent to an Event\n\/\/\/\n\/\/\/ if the `raw` parameter is true, then the Event variant will be the raw\n\/\/\/ representation of the event.\n\/\/\/ for instance KeyEventRaw instead of KeyEvent\n\/\/\/\n\/\/\/ This is useful if you want to interpret the raw event data yourself, rather\n\/\/\/ than having rustbox translate it to its own representation.\nfn unpack_event(ev_type: c_int, ev: &RawEvent, raw: bool) -> EventResult<Event> {\n match ev_type {\n 0 => Ok(Event::NoEvent),\n 1 => Ok(\n if raw {\n Event::KeyEventRaw(ev.emod, ev.key, ev.ch)\n } else {\n let k = match ev.key {\n 0 => char::from_u32(ev.ch).map(|c| Key::Char(c)),\n a => Key::from_code(a),\n };\n Event::KeyEvent(k)\n }),\n 2 => Ok(Event::ResizeEvent(ev.w, ev.h)),\n 3 => {\n let mouse = Mouse::from_code(ev.key).unwrap_or(Mouse::Left);\n Ok(Event::MouseEvent(mouse, ev.x, ev.y))\n }\n \/\/ FIXME: Rust doesn't support this error representation\n \/\/ res => FromPrimitive::from_int(res as isize),\n -1 => Err(Some(EventErrorKind::Error)),\n _ => Err(None)\n }\n}\n\nenum_from_primitive! {\n#[derive(Clone, Copy, Debug)]\n#[repr(C,isize)]\npub enum InitErrorKind {\n UnsupportedTerminal = -1,\n FailedToOpenTty = -2,\n PipeTrapError = -3,\n}\n}\n\n#[derive(Debug)]\npub enum InitError {\n BufferStderrFailed(io::Error),\n AlreadyOpen,\n TermBox(Option<InitErrorKind>),\n}\n\nimpl fmt::Display for InitError {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"{}\", self.description())\n }\n}\n\nimpl Error for InitError {\n fn description(&self) -> &str {\n match *self {\n InitError::BufferStderrFailed(_) => \"Could not redirect stderr\",\n InitError::AlreadyOpen => \"RustBox is already open\",\n InitError::TermBox(e) => e.map_or(\"Unexpected TermBox return code\", |e| match e {\n InitErrorKind::UnsupportedTerminal => \"Unsupported terminal\",\n InitErrorKind::FailedToOpenTty => \"Failed to open TTY\",\n InitErrorKind::PipeTrapError => \"Pipe trap error\",\n }),\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n match *self {\n InitError::BufferStderrFailed(ref e) => Some(e),\n _ => None\n }\n }\n}\n\n#[allow(missing_copy_implementations)]\npub struct RustBox {\n \/\/ We only bother to redirect stderr for the moment, since it's used for panic!\n _stderr: Option<Hold>,\n}\n\n\/\/ Termbox is not thread safe\nimpl !Send for RustBox {}\n\n#[derive(Clone, Copy,Debug)]\npub struct InitOptions {\n \/\/\/ Use this option to initialize with a specific input mode\n \/\/\/\n \/\/\/ See InputMode enum for details on the variants.\n pub input_mode: InputMode,\n\n \/\/\/ Use this option to automatically buffer stderr while RustBox is running. It will be\n \/\/\/ written when RustBox exits.\n \/\/\/\n \/\/\/ This option uses a nonblocking OS pipe to buffer stderr output. This means that if the\n \/\/\/ pipe fills up, subsequent writes will fail until RustBox exits. If this is a concern for\n \/\/\/ your program, don't use RustBox's default pipe-based redirection; instead, redirect stderr\n \/\/\/ to a log file or another process that is capable of handling it better.\n pub buffer_stderr: bool,\n}\n\nimpl Default for InitOptions {\n fn default() -> Self {\n InitOptions {\n input_mode: InputMode::Current,\n buffer_stderr: false,\n }\n }\n}\n\nimpl RustBox {\n \/\/\/ Initialize rustbox.\n \/\/\/\n \/\/\/ For the default options, you can use:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use rustbox::RustBox;\n \/\/\/ use std::default::Default;\n \/\/\/ let rb = RustBox::init(Default::default());\n \/\/\/ ```\n \/\/\/\n \/\/\/ Otherwise, you can specify:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use rustbox::{RustBox, InitOptions};\n \/\/\/ use std::default::Default;\n \/\/\/ let rb = RustBox::init(InitOptions { input_mode: rustbox::InputMode::Esc, ..Default::default() });\n \/\/\/ ```\n pub fn init(opts: InitOptions) -> Result<RustBox, InitError> {\n let stderr = if opts.buffer_stderr {\n Some(try!(Hold::stderr().map_err(|e| InitError::BufferStderrFailed(e))))\n } else {\n None\n };\n\n \/\/ Create the RustBox.\n let rb = unsafe { match termbox::tb_init() {\n 0 => RustBox {\n _stderr: stderr,\n },\n res => {\n return Err(InitError::TermBox(FromPrimitive::from_isize(res as isize)))\n }\n }};\n match opts.input_mode {\n InputMode::Current => (),\n _ => rb.set_input_mode(opts.input_mode),\n }\n Ok(rb)\n }\n\n pub fn width(&self) -> usize {\n unsafe { termbox::tb_width() as usize }\n }\n\n pub fn height(&self) -> usize {\n unsafe { termbox::tb_height() as usize }\n }\n\n pub fn clear(&self) {\n unsafe { termbox::tb_clear() }\n }\n\n pub fn present(&self) {\n unsafe { termbox::tb_present() }\n }\n\n pub fn set_cursor(&self, x: isize, y: isize) {\n unsafe { termbox::tb_set_cursor(x as c_int, y as c_int) }\n }\n\n pub unsafe fn change_cell(&self, x: usize, y: usize, ch: u32, fg: u16, bg: u16) {\n termbox::tb_change_cell(x as c_int, y as c_int, ch, fg, bg)\n }\n\n pub fn print(&self, x: usize, y: usize, sty: Style, fg: Color, bg: Color, s: &str) {\n let fg = Style::from_color(fg) | (sty & style::TB_ATTRIB);\n let bg = Style::from_color(bg);\n for (i, ch) in s.chars().enumerate() {\n unsafe {\n self.change_cell(x+i, y, ch as u32, fg.bits(), bg.bits());\n }\n }\n }\n\n pub fn print_char(&self, x: usize, y: usize, sty: Style, fg: Color, bg: Color, ch: char) {\n let fg = Style::from_color(fg) | (sty & style::TB_ATTRIB);\n let bg = Style::from_color(bg);\n unsafe {\n self.change_cell(x, y, ch as u32, fg.bits(), bg.bits());\n }\n }\n\n pub fn poll_event(&self, raw: bool) -> EventResult<Event> {\n let ev = NIL_RAW_EVENT;\n let rc = unsafe {\n termbox::tb_poll_event(&ev as *const RawEvent)\n };\n unpack_event(rc, &ev, raw)\n }\n\n pub fn peek_event(&self, timeout: Duration, raw: bool) -> EventResult<Event> {\n let ev = NIL_RAW_EVENT;\n let rc = unsafe {\n termbox::tb_peek_event(&ev as *const RawEvent, timeout.num_milliseconds() as c_int)\n };\n unpack_event(rc, &ev, raw)\n }\n\n pub fn set_input_mode(&self, mode: InputMode) {\n unsafe {\n termbox::tb_select_input_mode(mode as c_int);\n }\n }\n}\n\nimpl Drop for RustBox {\n fn drop(&mut self) {\n \/\/ Since only one instance of the RustBox is ever accessible, we should not\n \/\/ need to do this atomically.\n \/\/ Note: we should definitely have RUSTBOX_RUNNING = true here.\n unsafe {\n termbox::tb_shutdown();\n }\n }\n}\n<commit_msg>Add mouse input modes<commit_after>#![feature(libc)]\n#![feature(std_misc)]\n#![feature(optin_builtin_traits)]\n\nextern crate gag;\nextern crate libc;\nextern crate num;\n#[macro_use] extern crate enum_primitive;\nextern crate termbox_sys as termbox;\n#[macro_use] extern crate bitflags;\n\npub use self::style::{Style, RB_BOLD, RB_UNDERLINE, RB_REVERSE, RB_NORMAL};\n\nuse std::error::Error;\nuse std::fmt;\nuse std::io;\nuse std::char;\nuse std::time::duration::Duration;\nuse std::default::Default;\n\nuse num::FromPrimitive;\nuse termbox::RawEvent;\nuse libc::c_int;\nuse gag::Hold;\n\npub mod keyboard;\npub mod mouse;\n\npub use keyboard::Key;\npub use mouse::Mouse;\n\n#[derive(Clone, Copy)]\npub enum Event {\n KeyEventRaw(u8, u16, u32),\n KeyEvent(Option<Key>),\n ResizeEvent(i32, i32),\n MouseEvent(Mouse, i32, i32),\n NoEvent\n}\n\n#[derive(Clone, Copy, Debug)]\npub enum InputMode {\n Current = 0x00,\n\n \/\/\/ When ESC sequence is in the buffer and it doesn't match any known\n \/\/\/ ESC sequence => ESC means TB_KEY_ESC\n Esc = 0x01,\n \/\/\/ When ESC sequence is in the buffer and it doesn't match any known\n \/\/\/ sequence => ESC enables TB_MOD_ALT modifier for the next keyboard event.\n Alt = 0x02,\n \/\/\/ Same as `Esc` but enables mouse events\n EscMouse = 0x05,\n \/\/\/ Same as `Alt` but enables mouse events\n AltMouse = 0x06\n}\n\n#[derive(Clone, Copy, PartialEq)]\n#[repr(C,u16)]\npub enum Color {\n Default = 0x00,\n Black = 0x01,\n Red = 0x02,\n Green = 0x03,\n Yellow = 0x04,\n Blue = 0x05,\n Magenta = 0x06,\n Cyan = 0x07,\n White = 0x08,\n}\n\nmod style {\n bitflags! {\n #[repr(C)]\n flags Style: u16 {\n const TB_NORMAL_COLOR = 0x000F,\n const RB_BOLD = 0x0100,\n const RB_UNDERLINE = 0x0200,\n const RB_REVERSE = 0x0400,\n const RB_NORMAL = 0x0000,\n const TB_ATTRIB = RB_BOLD.bits | RB_UNDERLINE.bits | RB_REVERSE.bits,\n }\n }\n\n impl Style {\n pub fn from_color(color: super::Color) -> Style {\n Style { bits: color as u16 & TB_NORMAL_COLOR.bits }\n }\n }\n}\n\nconst NIL_RAW_EVENT: RawEvent = RawEvent { etype: 0, emod: 0, key: 0, ch: 0, w: 0, h: 0, x: 0, y: 0 };\n\n\/\/ FIXME: Rust doesn't support this enum representation.\n\/\/ #[derive(Copy,FromPrimitive,Debug)]\n\/\/ #[repr(C,int)]\n\/\/ pub enum EventErrorKind {\n\/\/ Error = -1,\n\/\/ }\n\/\/ pub type EventError = Option<EventErrorKind>;\n#[allow(non_snake_case)]\npub mod EventErrorKind {\n #[derive(Clone, Copy,Debug)]\n pub struct Error;\n}\n\npub type EventError = Option<EventErrorKind::Error>;\n\npub type EventResult<T> = Result<T, EventError>;\n\n\/\/\/ Unpack a RawEvent to an Event\n\/\/\/\n\/\/\/ if the `raw` parameter is true, then the Event variant will be the raw\n\/\/\/ representation of the event.\n\/\/\/ for instance KeyEventRaw instead of KeyEvent\n\/\/\/\n\/\/\/ This is useful if you want to interpret the raw event data yourself, rather\n\/\/\/ than having rustbox translate it to its own representation.\nfn unpack_event(ev_type: c_int, ev: &RawEvent, raw: bool) -> EventResult<Event> {\n match ev_type {\n 0 => Ok(Event::NoEvent),\n 1 => Ok(\n if raw {\n Event::KeyEventRaw(ev.emod, ev.key, ev.ch)\n } else {\n let k = match ev.key {\n 0 => char::from_u32(ev.ch).map(|c| Key::Char(c)),\n a => Key::from_code(a),\n };\n Event::KeyEvent(k)\n }),\n 2 => Ok(Event::ResizeEvent(ev.w, ev.h)),\n 3 => {\n let mouse = Mouse::from_code(ev.key).unwrap_or(Mouse::Left);\n Ok(Event::MouseEvent(mouse, ev.x, ev.y))\n }\n \/\/ FIXME: Rust doesn't support this error representation\n \/\/ res => FromPrimitive::from_int(res as isize),\n -1 => Err(Some(EventErrorKind::Error)),\n _ => Err(None)\n }\n}\n\nenum_from_primitive! {\n#[derive(Clone, Copy, Debug)]\n#[repr(C,isize)]\npub enum InitErrorKind {\n UnsupportedTerminal = -1,\n FailedToOpenTty = -2,\n PipeTrapError = -3,\n}\n}\n\n#[derive(Debug)]\npub enum InitError {\n BufferStderrFailed(io::Error),\n AlreadyOpen,\n TermBox(Option<InitErrorKind>),\n}\n\nimpl fmt::Display for InitError {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"{}\", self.description())\n }\n}\n\nimpl Error for InitError {\n fn description(&self) -> &str {\n match *self {\n InitError::BufferStderrFailed(_) => \"Could not redirect stderr\",\n InitError::AlreadyOpen => \"RustBox is already open\",\n InitError::TermBox(e) => e.map_or(\"Unexpected TermBox return code\", |e| match e {\n InitErrorKind::UnsupportedTerminal => \"Unsupported terminal\",\n InitErrorKind::FailedToOpenTty => \"Failed to open TTY\",\n InitErrorKind::PipeTrapError => \"Pipe trap error\",\n }),\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n match *self {\n InitError::BufferStderrFailed(ref e) => Some(e),\n _ => None\n }\n }\n}\n\n#[allow(missing_copy_implementations)]\npub struct RustBox {\n \/\/ We only bother to redirect stderr for the moment, since it's used for panic!\n _stderr: Option<Hold>,\n}\n\n\/\/ Termbox is not thread safe\nimpl !Send for RustBox {}\n\n#[derive(Clone, Copy,Debug)]\npub struct InitOptions {\n \/\/\/ Use this option to initialize with a specific input mode\n \/\/\/\n \/\/\/ See InputMode enum for details on the variants.\n pub input_mode: InputMode,\n\n \/\/\/ Use this option to automatically buffer stderr while RustBox is running. It will be\n \/\/\/ written when RustBox exits.\n \/\/\/\n \/\/\/ This option uses a nonblocking OS pipe to buffer stderr output. This means that if the\n \/\/\/ pipe fills up, subsequent writes will fail until RustBox exits. If this is a concern for\n \/\/\/ your program, don't use RustBox's default pipe-based redirection; instead, redirect stderr\n \/\/\/ to a log file or another process that is capable of handling it better.\n pub buffer_stderr: bool,\n}\n\nimpl Default for InitOptions {\n fn default() -> Self {\n InitOptions {\n input_mode: InputMode::Current,\n buffer_stderr: false,\n }\n }\n}\n\nimpl RustBox {\n \/\/\/ Initialize rustbox.\n \/\/\/\n \/\/\/ For the default options, you can use:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use rustbox::RustBox;\n \/\/\/ use std::default::Default;\n \/\/\/ let rb = RustBox::init(Default::default());\n \/\/\/ ```\n \/\/\/\n \/\/\/ Otherwise, you can specify:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use rustbox::{RustBox, InitOptions};\n \/\/\/ use std::default::Default;\n \/\/\/ let rb = RustBox::init(InitOptions { input_mode: rustbox::InputMode::Esc, ..Default::default() });\n \/\/\/ ```\n pub fn init(opts: InitOptions) -> Result<RustBox, InitError> {\n let stderr = if opts.buffer_stderr {\n Some(try!(Hold::stderr().map_err(|e| InitError::BufferStderrFailed(e))))\n } else {\n None\n };\n\n \/\/ Create the RustBox.\n let rb = unsafe { match termbox::tb_init() {\n 0 => RustBox {\n _stderr: stderr,\n },\n res => {\n return Err(InitError::TermBox(FromPrimitive::from_isize(res as isize)))\n }\n }};\n match opts.input_mode {\n InputMode::Current => (),\n _ => rb.set_input_mode(opts.input_mode),\n }\n Ok(rb)\n }\n\n pub fn width(&self) -> usize {\n unsafe { termbox::tb_width() as usize }\n }\n\n pub fn height(&self) -> usize {\n unsafe { termbox::tb_height() as usize }\n }\n\n pub fn clear(&self) {\n unsafe { termbox::tb_clear() }\n }\n\n pub fn present(&self) {\n unsafe { termbox::tb_present() }\n }\n\n pub fn set_cursor(&self, x: isize, y: isize) {\n unsafe { termbox::tb_set_cursor(x as c_int, y as c_int) }\n }\n\n pub unsafe fn change_cell(&self, x: usize, y: usize, ch: u32, fg: u16, bg: u16) {\n termbox::tb_change_cell(x as c_int, y as c_int, ch, fg, bg)\n }\n\n pub fn print(&self, x: usize, y: usize, sty: Style, fg: Color, bg: Color, s: &str) {\n let fg = Style::from_color(fg) | (sty & style::TB_ATTRIB);\n let bg = Style::from_color(bg);\n for (i, ch) in s.chars().enumerate() {\n unsafe {\n self.change_cell(x+i, y, ch as u32, fg.bits(), bg.bits());\n }\n }\n }\n\n pub fn print_char(&self, x: usize, y: usize, sty: Style, fg: Color, bg: Color, ch: char) {\n let fg = Style::from_color(fg) | (sty & style::TB_ATTRIB);\n let bg = Style::from_color(bg);\n unsafe {\n self.change_cell(x, y, ch as u32, fg.bits(), bg.bits());\n }\n }\n\n pub fn poll_event(&self, raw: bool) -> EventResult<Event> {\n let ev = NIL_RAW_EVENT;\n let rc = unsafe {\n termbox::tb_poll_event(&ev as *const RawEvent)\n };\n unpack_event(rc, &ev, raw)\n }\n\n pub fn peek_event(&self, timeout: Duration, raw: bool) -> EventResult<Event> {\n let ev = NIL_RAW_EVENT;\n let rc = unsafe {\n termbox::tb_peek_event(&ev as *const RawEvent, timeout.num_milliseconds() as c_int)\n };\n unpack_event(rc, &ev, raw)\n }\n\n pub fn set_input_mode(&self, mode: InputMode) {\n unsafe {\n termbox::tb_select_input_mode(mode as c_int);\n }\n }\n}\n\nimpl Drop for RustBox {\n fn drop(&mut self) {\n \/\/ Since only one instance of the RustBox is ever accessible, we should not\n \/\/ need to do this atomically.\n \/\/ Note: we should definitely have RUSTBOX_RUNNING = true here.\n unsafe {\n termbox::tb_shutdown();\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Respect sigterm<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Day 17 solution<commit_after>\/\/ advent17.rs\n\/\/ eggnog perfect bin packing\n\nuse std::io;\n\nfn main() {\n let mut containers: Vec<u32> = Vec::new();\n loop {\n let mut input = String::new();\n let result = io::stdin().read_line(&mut input);\n match result {\n Ok(byte_count) => if byte_count == 0 { break; },\n Err(_) => {\n println!(\"error reading from stdin\");\n break;\n }\n }\n \n \/\/ Parse container sizes from input\n if let Ok(x) = input.trim().parse::<u32>() {\n containers.push(x);\n }\n }\n\n println!(\"exact fits: {}\", count_exact_fits(&containers, 150));\n println!(\"min exact fits: {}\", count_min_exact_fits(&containers, 150));\n\n}\n\n\/\/ check if we can exactly get target size from combination of bins\nfn is_exact_fit(bins: &[u32], bit_mask: u32, target: u32) -> bool {\n let total = bins.iter().enumerate()\n .fold(0, |sum, (i, bin)| \n if bit_mask & (1 << i) == 0 { \n sum \n } else { \n sum + bin \n });\n total == target\n}\n\nfn count_exact_fits(bins: &[u32], target: u32) -> usize {\n \/\/ because I'm storing a bit mask in a u32\n assert!(bins.len() <= 32);\n\n (1u32..(1 << bins.len())).filter(|x| is_exact_fit(bins, *x, target)).count()\n}\n\n\n#[test]\nfn test_is_exact_fit() {\n let bins = [20, 15, 10, 5, 5];\n assert!(is_exact_fit(&bins, 0b0_0110, 25));\n assert!(!is_exact_fit(&bins, 0b1_0110, 25));\n}\n\n#[test]\nfn test_count_exact_fits() {\n let bins = [20, 15, 10, 5, 5];\n\n assert_eq!(4, count_exact_fits(&bins, 25));\n}\n\n\/\/ part 2\nfn find_exact_fits(bins: &[u32], target: u32) -> Vec<u32> {\n \/\/ because I'm storing a bit mask in a u32\n assert!(bins.len() <= 32);\n\n (1u32..(1 << bins.len())).filter(|x| is_exact_fit(bins, *x, target)).collect()\n}\n\nfn count_bits(x: u32) -> u32 {\n let mut total = 0;\n let mut val = x;\n\n while val > 0 {\n if (val & 1) != 0 {\n total += 1;\n }\n val >>= 1;\n }\n\n total\n}\n\nfn count_min_exact_fits(bins: &[u32], target: u32) -> usize {\n let fit_bits: Vec<u32> = find_exact_fits(bins, target).iter().map(|x| count_bits(*x)).collect();\n let min_bits = fit_bits.iter().min().unwrap();\n \n fit_bits.iter().filter(|x| *x == min_bits).count()\n}\n\n#[test]\nfn test_find_exact_fits() {\n let bins = [20, 15, 10, 5, 5];\n let expected_result: Vec<u32> = vec![0b0_0110, 0b0_1001, 0b1_0001, 0b1_1010];\n\n assert_eq!(expected_result, find_exact_fits(&bins, 25));\n}\n\n#[test]\nfn test_count_min_exact_fits() {\n let bins = [20, 15, 10, 5, 5];\n\n assert_eq!(3, count_min_exact_fits(&bins, 25));\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nextern crate libstratis;\n#[macro_use]\nextern crate log;\nextern crate env_logger;\nextern crate devicemapper;\nextern crate clap;\nextern crate nix;\nextern crate crc;\nextern crate byteorder;\nextern crate uuid;\nextern crate time;\nextern crate dbus;\nextern crate term;\nextern crate rand;\nextern crate serde;\nextern crate libc;\n\nextern crate custom_derive;\nextern crate enum_derive;\n\n#[cfg(test)]\nextern crate quickcheck;\n\nuse std::io::Write;\nuse std::env;\nuse std::error::Error;\nuse std::rc::Rc;\nuse std::cell::RefCell;\nuse std::process::exit;\n\nuse clap::{App, Arg};\nuse log::LogLevelFilter;\nuse env_logger::LogBuilder;\nuse dbus::WatchEvent;\n\nuse libstratis::engine::Engine;\nuse libstratis::engine::sim_engine::SimEngine;\nuse libstratis::engine::strat_engine::StratEngine;\nuse libstratis::stratis::VERSION;\nuse libstratis::types::{StratisResult, StratisError};\n\nfn write_err(err: StratisError) -> StratisResult<()> {\n let mut out = term::stderr().expect(\"could not get stderr\");\n\n try!(out.fg(term::color::RED));\n try!(writeln!(out, \"{}\", err.description()));\n try!(out.reset());\n Ok(())\n}\n\n\/\/\/ If writing a program error to stderr fails, panic.\nfn write_or_panic(err: StratisError) -> () {\n if let Err(e) = write_err(err) {\n panic!(\"Unable to write to stderr: {}\", e)\n }\n}\n\nfn run() -> StratisResult<()> {\n\n let matches = App::new(\"stratis\")\n .version(VERSION)\n .about(\"Stratis storage management\")\n .arg(Arg::with_name(\"debug\")\n .long(\"debug\")\n .help(\"Print additional output for debugging\"))\n .arg(Arg::with_name(\"sim\")\n .long(\"sim\")\n .help(\"Use simulator engine\"))\n .get_matches();\n\n let mut builder = LogBuilder::new();\n if matches.is_present(\"debug\") {\n builder.filter(Some(\"stratisd\"), LogLevelFilter::Debug);\n builder.filter(Some(\"libstratis\"), LogLevelFilter::Debug);\n } else {\n if let Ok(s) = env::var(\"RUST_LOG\") {\n builder.parse(&s);\n }\n };\n\n builder\n .init()\n .expect(\"This is the first and only initialization of the logger; it must succeed.\");\n\n let engine: Rc<RefCell<Engine>> = {\n if matches.is_present(\"sim\") {\n info!(\"Using SimEngine\");\n Rc::new(RefCell::new(SimEngine::new()))\n } else {\n info!(\"Using StratEngine\");\n Rc::new(RefCell::new(StratEngine::new()))\n }\n };\n\n let (dbus_conn, mut tree, dbus_context) = libstratis::dbus_api::connect(engine.clone())\n .expect(\"Could not connect to D-Bus\");\n\n \/\/ Get a list of fds to poll for\n let mut fds: Vec<_> = dbus_conn\n .watch_fds()\n .iter()\n .map(|w| w.to_pollfd())\n .collect();\n\n loop {\n \/\/ Poll them with a 10 s timeout\n let r = unsafe { libc::poll(fds.as_mut_ptr(), fds.len() as libc::c_ulong, 10000) };\n assert!(r >= 0);\n\n \/\/ And handle incoming events\n for pfd in fds.iter().filter(|pfd| pfd.revents != 0) {\n for item in dbus_conn.watch_handle(pfd.fd, WatchEvent::from_revents(pfd.revents)) {\n if let Err(r) = libstratis::dbus_api::handle(&dbus_conn,\n item,\n &mut tree,\n &dbus_context) {\n write_or_panic(r);\n }\n }\n }\n\n \/\/ Ask the engine to check its pools\n engine.borrow_mut().check()\n }\n}\n\nfn main() {\n let error_code = match run() {\n Ok(_) => 0,\n Err(err) => {\n write_or_panic(err);\n 1\n }\n };\n exit(error_code);\n}\n<commit_msg>Do not expect() the uncertain<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nextern crate libstratis;\n#[macro_use]\nextern crate log;\nextern crate env_logger;\nextern crate devicemapper;\nextern crate clap;\nextern crate nix;\nextern crate crc;\nextern crate byteorder;\nextern crate uuid;\nextern crate time;\nextern crate dbus;\nextern crate term;\nextern crate rand;\nextern crate serde;\nextern crate libc;\n\nextern crate custom_derive;\nextern crate enum_derive;\n\n#[cfg(test)]\nextern crate quickcheck;\n\nuse std::io::Write;\nuse std::env;\nuse std::error::Error;\nuse std::rc::Rc;\nuse std::cell::RefCell;\nuse std::process::exit;\n\nuse clap::{App, Arg};\nuse log::LogLevelFilter;\nuse env_logger::LogBuilder;\nuse dbus::WatchEvent;\n\nuse libstratis::engine::Engine;\nuse libstratis::engine::sim_engine::SimEngine;\nuse libstratis::engine::strat_engine::StratEngine;\nuse libstratis::stratis::VERSION;\nuse libstratis::types::{StratisResult, StratisError};\n\nfn write_err(err: StratisError) -> StratisResult<()> {\n let mut out = term::stderr().expect(\"could not get stderr\");\n\n try!(out.fg(term::color::RED));\n try!(writeln!(out, \"{}\", err.description()));\n try!(out.reset());\n Ok(())\n}\n\n\/\/\/ If writing a program error to stderr fails, panic.\nfn write_or_panic(err: StratisError) -> () {\n if let Err(e) = write_err(err) {\n panic!(\"Unable to write to stderr: {}\", e)\n }\n}\n\nfn run() -> StratisResult<()> {\n\n let matches = App::new(\"stratis\")\n .version(VERSION)\n .about(\"Stratis storage management\")\n .arg(Arg::with_name(\"debug\")\n .long(\"debug\")\n .help(\"Print additional output for debugging\"))\n .arg(Arg::with_name(\"sim\")\n .long(\"sim\")\n .help(\"Use simulator engine\"))\n .get_matches();\n\n let mut builder = LogBuilder::new();\n if matches.is_present(\"debug\") {\n builder.filter(Some(\"stratisd\"), LogLevelFilter::Debug);\n builder.filter(Some(\"libstratis\"), LogLevelFilter::Debug);\n } else {\n if let Ok(s) = env::var(\"RUST_LOG\") {\n builder.parse(&s);\n }\n };\n\n builder\n .init()\n .expect(\"This is the first and only initialization of the logger; it must succeed.\");\n\n let engine: Rc<RefCell<Engine>> = {\n if matches.is_present(\"sim\") {\n info!(\"Using SimEngine\");\n Rc::new(RefCell::new(SimEngine::new()))\n } else {\n info!(\"Using StratEngine\");\n Rc::new(RefCell::new(StratEngine::new()))\n }\n };\n\n let (dbus_conn, mut tree, dbus_context) = try!(libstratis::dbus_api::connect(engine.clone()));\n\n \/\/ Get a list of fds to poll for\n let mut fds: Vec<_> = dbus_conn\n .watch_fds()\n .iter()\n .map(|w| w.to_pollfd())\n .collect();\n\n loop {\n \/\/ Poll them with a 10 s timeout\n let r = unsafe { libc::poll(fds.as_mut_ptr(), fds.len() as libc::c_ulong, 10000) };\n assert!(r >= 0);\n\n \/\/ And handle incoming events\n for pfd in fds.iter().filter(|pfd| pfd.revents != 0) {\n for item in dbus_conn.watch_handle(pfd.fd, WatchEvent::from_revents(pfd.revents)) {\n if let Err(r) = libstratis::dbus_api::handle(&dbus_conn,\n item,\n &mut tree,\n &dbus_context) {\n write_or_panic(r);\n }\n }\n }\n\n \/\/ Ask the engine to check its pools\n engine.borrow_mut().check()\n }\n}\n\nfn main() {\n let error_code = match run() {\n Ok(_) => 0,\n Err(err) => {\n write_or_panic(err);\n 1\n }\n };\n exit(error_code);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nextern crate libstratis;\n#[macro_use]\nextern crate log;\nextern crate env_logger;\nextern crate devicemapper;\nextern crate clap;\nextern crate nix;\nextern crate crc;\nextern crate byteorder;\nextern crate uuid;\nextern crate time;\nextern crate dbus;\nextern crate term;\nextern crate rand;\nextern crate serde;\nextern crate libc;\n\nextern crate custom_derive;\nextern crate enum_derive;\n\n#[cfg(test)]\nextern crate quickcheck;\n\nuse std::io::Write;\nuse std::env;\nuse std::error::Error;\nuse std::rc::Rc;\nuse std::cell::RefCell;\n\nuse clap::{App, Arg};\nuse log::LogLevelFilter;\nuse env_logger::LogBuilder;\nuse dbus::WatchEvent;\n\nuse libstratis::engine::Engine;\nuse libstratis::engine::sim_engine::SimEngine;\nuse libstratis::engine::strat_engine::StratEngine;\nuse libstratis::stratis::VERSION;\nuse libstratis::types::{StratisResult, StratisError};\n\nfn write_err(err: StratisError) -> StratisResult<()> {\n let mut out = term::stderr().expect(\"could not get stderr\");\n\n try!(out.fg(term::color::RED));\n try!(writeln!(out, \"{}\", err.description()));\n try!(out.reset());\n Ok(())\n}\n\nfn main() {\n\n let matches = App::new(\"stratis\")\n .version(VERSION)\n .about(\"Stratis storage management\")\n .arg(Arg::with_name(\"debug\")\n .long(\"debug\")\n .help(\"Print additional output for debugging\"))\n .arg(Arg::with_name(\"sim\")\n .long(\"sim\")\n .help(\"Use simulator engine\"))\n .get_matches();\n\n let mut builder = LogBuilder::new();\n if matches.is_present(\"debug\") {\n builder.filter(Some(\"stratisd\"), LogLevelFilter::Debug);\n builder.filter(Some(\"libstratis\"), LogLevelFilter::Debug);\n } else {\n if let Ok(s) = env::var(\"RUST_LOG\") {\n builder.parse(&s);\n }\n };\n\n builder\n .init()\n .expect(\"This is the first and only initialization of the logger; it must succeed.\");\n\n let engine: Rc<RefCell<Engine>> = {\n if matches.is_present(\"sim\") {\n info!(\"Using SimEngine\");\n Rc::new(RefCell::new(SimEngine::new()))\n } else {\n info!(\"Using StratEngine\");\n Rc::new(RefCell::new(StratEngine::new()))\n }\n };\n\n let (dbus_conn, mut tree, dbus_context) = libstratis::dbus_api::connect(engine.clone())\n .expect(\"Could not connect to D-Bus\");\n\n \/\/ Get a list of fds to poll for\n let mut fds: Vec<_> = dbus_conn\n .watch_fds()\n .iter()\n .map(|w| w.to_pollfd())\n .collect();\n\n loop {\n \/\/ Poll them with a 10 s timeout\n let r = unsafe { libc::poll(fds.as_mut_ptr(), fds.len() as libc::c_ulong, 10000) };\n assert!(r >= 0);\n\n \/\/ And handle incoming events\n for pfd in fds.iter().filter(|pfd| pfd.revents != 0) {\n for item in dbus_conn.watch_handle(pfd.fd, WatchEvent::from_revents(pfd.revents)) {\n if let Err(r) = libstratis::dbus_api::handle(&dbus_conn,\n item,\n &mut tree,\n &dbus_context) {\n if let Err(e) = write_err(r) {\n panic!(\"Unable to write to stderr: {}\", e)\n }\n }\n }\n }\n\n \/\/ Ask the engine to check its pools\n engine.borrow_mut().check()\n }\n}\n<commit_msg>Add a write_or_panic() method<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nextern crate libstratis;\n#[macro_use]\nextern crate log;\nextern crate env_logger;\nextern crate devicemapper;\nextern crate clap;\nextern crate nix;\nextern crate crc;\nextern crate byteorder;\nextern crate uuid;\nextern crate time;\nextern crate dbus;\nextern crate term;\nextern crate rand;\nextern crate serde;\nextern crate libc;\n\nextern crate custom_derive;\nextern crate enum_derive;\n\n#[cfg(test)]\nextern crate quickcheck;\n\nuse std::io::Write;\nuse std::env;\nuse std::error::Error;\nuse std::rc::Rc;\nuse std::cell::RefCell;\n\nuse clap::{App, Arg};\nuse log::LogLevelFilter;\nuse env_logger::LogBuilder;\nuse dbus::WatchEvent;\n\nuse libstratis::engine::Engine;\nuse libstratis::engine::sim_engine::SimEngine;\nuse libstratis::engine::strat_engine::StratEngine;\nuse libstratis::stratis::VERSION;\nuse libstratis::types::{StratisResult, StratisError};\n\nfn write_err(err: StratisError) -> StratisResult<()> {\n let mut out = term::stderr().expect(\"could not get stderr\");\n\n try!(out.fg(term::color::RED));\n try!(writeln!(out, \"{}\", err.description()));\n try!(out.reset());\n Ok(())\n}\n\n\/\/\/ If writing a program error to stderr fails, panic.\nfn write_or_panic(err: StratisError) -> () {\n if let Err(e) = write_err(err) {\n panic!(\"Unable to write to stderr: {}\", e)\n }\n}\n\nfn main() {\n\n let matches = App::new(\"stratis\")\n .version(VERSION)\n .about(\"Stratis storage management\")\n .arg(Arg::with_name(\"debug\")\n .long(\"debug\")\n .help(\"Print additional output for debugging\"))\n .arg(Arg::with_name(\"sim\")\n .long(\"sim\")\n .help(\"Use simulator engine\"))\n .get_matches();\n\n let mut builder = LogBuilder::new();\n if matches.is_present(\"debug\") {\n builder.filter(Some(\"stratisd\"), LogLevelFilter::Debug);\n builder.filter(Some(\"libstratis\"), LogLevelFilter::Debug);\n } else {\n if let Ok(s) = env::var(\"RUST_LOG\") {\n builder.parse(&s);\n }\n };\n\n builder\n .init()\n .expect(\"This is the first and only initialization of the logger; it must succeed.\");\n\n let engine: Rc<RefCell<Engine>> = {\n if matches.is_present(\"sim\") {\n info!(\"Using SimEngine\");\n Rc::new(RefCell::new(SimEngine::new()))\n } else {\n info!(\"Using StratEngine\");\n Rc::new(RefCell::new(StratEngine::new()))\n }\n };\n\n let (dbus_conn, mut tree, dbus_context) = libstratis::dbus_api::connect(engine.clone())\n .expect(\"Could not connect to D-Bus\");\n\n \/\/ Get a list of fds to poll for\n let mut fds: Vec<_> = dbus_conn\n .watch_fds()\n .iter()\n .map(|w| w.to_pollfd())\n .collect();\n\n loop {\n \/\/ Poll them with a 10 s timeout\n let r = unsafe { libc::poll(fds.as_mut_ptr(), fds.len() as libc::c_ulong, 10000) };\n assert!(r >= 0);\n\n \/\/ And handle incoming events\n for pfd in fds.iter().filter(|pfd| pfd.revents != 0) {\n for item in dbus_conn.watch_handle(pfd.fd, WatchEvent::from_revents(pfd.revents)) {\n if let Err(r) = libstratis::dbus_api::handle(&dbus_conn,\n item,\n &mut tree,\n &dbus_context) {\n write_or_panic(r);\n }\n }\n }\n\n \/\/ Ask the engine to check its pools\n engine.borrow_mut().check()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>taskbar<commit_after>extern crate x11;\n\nuse x11::xlib;\nuse std::mem;\nuse std::slice::Iter;\nuse super::super::libx;\npub struct TaskBar {\n context: libx::Context,\n id: xlib::Window,\n height: u32,\n workspaces: Vec<char>,\n}\n\nimpl TaskBar {\n pub fn new(context: libx::Context, height: u32, position: i32) -> TaskBar {\n let pid = context.root;\n let attrs = libx::get_window_attributes(context, pid);\n let y: i32 = if position > 0 {\n attrs.y\n }else{\n attrs.y + attrs.height - height as i32\n };\n let id = libx::create_window(context, pid, attrs.x, y,\n attrs.width as u32,\n height);\n\n \/\/ attributes\n let mut attrs: xlib::XSetWindowAttributes = unsafe { mem::zeroed() };\n attrs.override_redirect = 1;\n libx::set_window_attributes(context, id, xlib::CWOverrideRedirect, attrs);\n\n \/\/ inputs\n let mask = xlib::ButtonPressMask | xlib::ExposureMask;\n libx::select_input(context, id, mask);\n libx::map_window(context, id);\n TaskBar {\n context: context,\n id: id,\n height: height,\n workspaces: Vec::new()\n }\n }\n\n pub fn load(&mut self, keys: Vec<char>) {\n self.workspaces = keys;\n }\n\n pub fn draw_workspaces(&mut self) {\n let context = self.context;\n let gc = context.gc;\n let display = context.display;\n unsafe{\n xlib::XSetBackground(display, gc,\n context.focus_bg);\n xlib::XSetForeground(display, gc,\n context.focus_fg);\n\n }\n for (i, v) in self.workspaces.iter().enumerate() {\n let x = (i as u32 * (self.height + 1)) as i32 + 1;\n let y = 0;\n let width = self.height - 2;\n let height = self.height - 2;\n unsafe{\n xlib::XFillRectangle(display, self.id, gc,\n x, y,\n width, height);\n }\n }\n }\n\n pub fn handle(&mut self, e: &xlib::XEvent) {\n let t = e.get_type();\n match t {\n xlib::Expose => {\n let event: xlib::XExposeEvent = From::from(*e);\n if event.window == self.id {\n self.draw_workspaces();\n }\n }\n _ => {}\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Exercise 7.5<commit_after>\/\/\/ Exercise 7.5: Use the typedef facility of C to define a new data type Exitfunc for an exit\n\/\/\/ handler. Redo the prototype for atexit using this data type.\n\nextern crate libc;\n#[macro_use(cstr)]\nextern crate apue;\nextern crate errno;\n\nuse apue::LibcResult;\nuse libc::{atexit, printf};\n\ntype Exitfunc = extern fn();\n\nfn my_atexit(f:Exitfunc) -> Option<i32> {\n unsafe {\n atexit(f).to_option()\n }\n}\n\nextern \"C\" fn my_exit1() {\n unsafe { printf(cstr!(\"first exit handler\\n\")) };\n}\n\nextern \"C\" fn my_exit2() {\n unsafe { printf(cstr!(\"second exit handler\\n\")) };\n}\n\nfn main() {\n my_atexit(my_exit2).expect(&format!(\"can't register my_exit2: {}\", errno::errno()));\n my_atexit(my_exit1).expect(&format!(\"can't register my_exit1: {}\", errno::errno()));\n my_atexit(my_exit1).expect(&format!(\"can't register my_exit1: {}\", errno::errno()));\n println!(\"main is done\");\n}<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::document::{AbstractDocument, Document, WrappableDocument, HTML};\nuse dom::bindings::codegen::HTMLDocumentBinding;\nuse dom::bindings::utils::{DOMString, ErrorResult, null_string};\nuse dom::bindings::utils::{CacheableWrapper, BindingObject, WrapperCache};\nuse dom::htmlcollection::HTMLCollection;\nuse dom::node::{AbstractNode, ScriptView};\nuse dom::window::Window;\n\nuse js::jsapi::{JSObject, JSContext};\n\nuse servo_util::tree::TreeUtils;\n\nuse std::libc;\nuse std::ptr;\n\npub struct HTMLDocument {\n parent: Document\n}\n\nimpl HTMLDocument {\n pub fn new(root: AbstractNode<ScriptView>, window: Option<@mut Window>) -> AbstractDocument {\n let doc = @mut HTMLDocument {\n parent: Document::new(root, window, HTML)\n };\n\n let compartment = unsafe { (*window.get_ref().page).js_info.get_ref().js_compartment };\n AbstractDocument::as_abstract(compartment.cx.ptr, doc)\n }\n\n fn get_scope_and_cx(&self) -> (*JSObject, *JSContext) {\n let win = self.parent.window.get_ref();\n let cx = unsafe {(*win.page).js_info.get_ref().js_compartment.cx.ptr};\n let cache = win.get_wrappercache();\n let scope = cache.get_wrapper();\n (scope, cx)\n }\n}\n\nimpl WrappableDocument for HTMLDocument {\n pub fn init_wrapper(@mut self, cx: *JSContext) {\n self.wrap_object_shared(cx, ptr::null()); \/\/XXXjdm a proper scope would be nice\n }\n}\n\nimpl HTMLDocument {\n pub fn GetDomain(&self, _rv: &mut ErrorResult) -> DOMString {\n null_string\n }\n\n pub fn SetDomain(&self, _domain: &DOMString, _rv: &mut ErrorResult) {\n }\n\n pub fn GetCookie(&self, _rv: &mut ErrorResult) -> DOMString {\n null_string\n }\n\n pub fn SetCookie(&self, _cookie: &DOMString, _rv: &mut ErrorResult) {\n }\n\n pub fn GetHead(&self) -> Option<AbstractNode<ScriptView>> {\n None\n }\n\n pub fn Images(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(~\"img\")\n }\n\n pub fn Embeds(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(~\"embed\")\n }\n\n pub fn Plugins(&self) -> @mut HTMLCollection {\n self.Embeds()\n }\n\n pub fn Links(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(~\"link\")\n }\n\n pub fn Forms(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(~\"form\")\n }\n\n pub fn Scripts(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(~\"script\")\n }\n\n pub fn Close(&self, _rv: &mut ErrorResult) {\n }\n\n pub fn DesignMode(&self) -> DOMString {\n null_string\n }\n\n pub fn SetDesignMode(&self, _mode: &DOMString, _rv: &mut ErrorResult) {\n }\n\n pub fn ExecCommand(&self, _command_id: &DOMString, _show_ui: bool, _value: &DOMString, _rv: &mut ErrorResult) -> bool {\n false\n }\n\n pub fn QueryCommandEnabled(&self, _command_id: &DOMString, _rv: &mut ErrorResult) -> bool {\n false\n }\n\n pub fn QueryCommandIndeterm(&self, _command_id: &DOMString, _rv: &mut ErrorResult) -> bool {\n false\n }\n\n pub fn QueryCommandState(&self, _command_id: &DOMString, _rv: &mut ErrorResult) -> bool {\n false\n }\n\n pub fn QueryCommandSupported(&self, _command_id: &DOMString) -> bool {\n false\n }\n\n pub fn QueryCommandValue(&self, _command_id: &DOMString, _rv: &mut ErrorResult) -> DOMString {\n null_string\n }\n\n pub fn FgColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetFgColor(&self, _color: &DOMString) {\n }\n\n pub fn LinkColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetLinkColor(&self, _color: &DOMString) {\n }\n\n pub fn VlinkColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetVlinkColor(&self, _color: &DOMString) {\n }\n\n pub fn AlinkColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetAlinkColor(&self, _color: &DOMString) {\n }\n\n pub fn BgColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetBgColor(&self, _color: &DOMString) {\n }\n\n pub fn Anchors(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(~\"a\")\n }\n\n pub fn Applets(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(~\"applet\")\n }\n\n pub fn Clear(&self) {\n }\n\n pub fn GetAll(&self, _cx: *JSContext, _rv: &mut ErrorResult) -> *libc::c_void {\n ptr::null()\n }\n\n fn createHTMLCollection(&self, elem_name: ~str) -> @mut HTMLCollection {\n let (scope, cx) = self.get_scope_and_cx();\n let mut elements = ~[];\n let _ = for self.parent.root.traverse_preorder |child| {\n if child.is_element() {\n do child.with_imm_element |elem| {\n match elem_name {\n ~\"link\" => {\n if elem.tag_name == ~\"a\" || elem.tag_name == ~\"area\" {\n match elem.get_attr(\"href\") {\n Some(_val) => elements.push(child),\n None() => ()\n }\n } \n }\n _ => {\n if elem.tag_name == elem_name {\n elements.push(child);\n }\n }\n }\n }\n }\n };\n HTMLCollection::new(elements, cx, scope)\n }\n}\n\nimpl CacheableWrapper for HTMLDocument {\n fn get_wrappercache(&mut self) -> &mut WrapperCache {\n self.parent.get_wrappercache()\n }\n\n fn wrap_object_shared(@mut self, cx: *JSContext, scope: *JSObject) -> *JSObject {\n let mut unused = false;\n HTMLDocumentBinding::Wrap(cx, scope, self, &mut unused)\n }\n}\n\nimpl BindingObject for HTMLDocument {\n fn GetParentObject(&self, cx: *JSContext) -> Option<@mut CacheableWrapper> {\n self.parent.GetParentObject(cx)\n }\n}\n\n<commit_msg>Fix some implementations for DOM tree accessors<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::HTMLDocumentBinding;\nuse dom::bindings::utils::{DOMString, ErrorResult, null_string};\nuse dom::bindings::utils::{CacheableWrapper, BindingObject, WrapperCache};\nuse dom::document::{AbstractDocument, Document, WrappableDocument, HTML};\nuse dom::element::Element;\nuse dom::htmlcollection::HTMLCollection;\nuse dom::node::{AbstractNode, ScriptView};\nuse dom::window::Window;\n\nuse js::jsapi::{JSObject, JSContext};\n\nuse servo_util::tree::TreeUtils;\n\nuse std::libc;\nuse std::ptr;\nuse std::str::eq_slice;\n\npub struct HTMLDocument {\n parent: Document\n}\n\nimpl HTMLDocument {\n pub fn new(root: AbstractNode<ScriptView>, window: Option<@mut Window>) -> AbstractDocument {\n let doc = @mut HTMLDocument {\n parent: Document::new(root, window, HTML)\n };\n\n let compartment = unsafe { (*window.get_ref().page).js_info.get_ref().js_compartment };\n AbstractDocument::as_abstract(compartment.cx.ptr, doc)\n }\n\n fn get_scope_and_cx(&self) -> (*JSObject, *JSContext) {\n let win = self.parent.window.get_ref();\n let cx = unsafe {(*win.page).js_info.get_ref().js_compartment.cx.ptr};\n let cache = win.get_wrappercache();\n let scope = cache.get_wrapper();\n (scope, cx)\n }\n}\n\nimpl WrappableDocument for HTMLDocument {\n pub fn init_wrapper(@mut self, cx: *JSContext) {\n self.wrap_object_shared(cx, ptr::null()); \/\/XXXjdm a proper scope would be nice\n }\n}\n\nimpl HTMLDocument {\n pub fn GetDomain(&self, _rv: &mut ErrorResult) -> DOMString {\n null_string\n }\n\n pub fn SetDomain(&self, _domain: &DOMString, _rv: &mut ErrorResult) {\n }\n\n pub fn GetCookie(&self, _rv: &mut ErrorResult) -> DOMString {\n null_string\n }\n\n pub fn SetCookie(&self, _cookie: &DOMString, _rv: &mut ErrorResult) {\n }\n\n pub fn GetHead(&self) -> Option<AbstractNode<ScriptView>> {\n None\n }\n\n pub fn Images(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(|elem| eq_slice(elem.tag_name, \"img\"))\n }\n\n pub fn Embeds(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(|elem| eq_slice(elem.tag_name, \"embed\"))\n }\n\n pub fn Plugins(&self) -> @mut HTMLCollection {\n self.Embeds()\n }\n\n pub fn Links(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(|elem| {\n if eq_slice(elem.tag_name, \"a\") || eq_slice(elem.tag_name, \"area\") {\n match elem.get_attr(\"href\") {\n Some(_val) => true,\n None() => false\n }\n }\n else { false }\n })\n }\n\n pub fn Forms(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(|elem| eq_slice(elem.tag_name, \"form\"))\n }\n\n pub fn Scripts(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(|elem| eq_slice(elem.tag_name, \"script\"))\n }\n\n pub fn Close(&self, _rv: &mut ErrorResult) {\n }\n\n pub fn DesignMode(&self) -> DOMString {\n null_string\n }\n\n pub fn SetDesignMode(&self, _mode: &DOMString, _rv: &mut ErrorResult) {\n }\n\n pub fn ExecCommand(&self, _command_id: &DOMString, _show_ui: bool, _value: &DOMString, _rv: &mut ErrorResult) -> bool {\n false\n }\n\n pub fn QueryCommandEnabled(&self, _command_id: &DOMString, _rv: &mut ErrorResult) -> bool {\n false\n }\n\n pub fn QueryCommandIndeterm(&self, _command_id: &DOMString, _rv: &mut ErrorResult) -> bool {\n false\n }\n\n pub fn QueryCommandState(&self, _command_id: &DOMString, _rv: &mut ErrorResult) -> bool {\n false\n }\n\n pub fn QueryCommandSupported(&self, _command_id: &DOMString) -> bool {\n false\n }\n\n pub fn QueryCommandValue(&self, _command_id: &DOMString, _rv: &mut ErrorResult) -> DOMString {\n null_string\n }\n\n pub fn FgColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetFgColor(&self, _color: &DOMString) {\n }\n\n pub fn LinkColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetLinkColor(&self, _color: &DOMString) {\n }\n\n pub fn VlinkColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetVlinkColor(&self, _color: &DOMString) {\n }\n\n pub fn AlinkColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetAlinkColor(&self, _color: &DOMString) {\n }\n\n pub fn BgColor(&self) -> DOMString {\n null_string\n }\n\n pub fn SetBgColor(&self, _color: &DOMString) {\n }\n\n pub fn Anchors(&self) -> @mut HTMLCollection {\n self.createHTMLCollection(|elem| {\n if eq_slice(elem.tag_name, \"a\") {\n match elem.get_attr(\"name\") {\n Some(_val) => true,\n None() => false\n }\n }\n else { false }\n })\n }\n\n pub fn Applets(&self) -> @mut HTMLCollection {\n \/\/ FIXME: This should be return OBJECT elements containing applets.\n self.createHTMLCollection(|elem| eq_slice(elem.tag_name, \"applet\"))\n }\n\n pub fn Clear(&self) {\n }\n\n pub fn GetAll(&self, _cx: *JSContext, _rv: &mut ErrorResult) -> *libc::c_void {\n ptr::null()\n }\n\n fn createHTMLCollection(&self, callback: &fn(elem: &Element) -> bool) -> @mut HTMLCollection {\n let (scope, cx) = self.get_scope_and_cx();\n let mut elements = ~[];\n let _ = for self.parent.root.traverse_preorder |child| {\n if child.is_element() {\n do child.with_imm_element |elem| {\n if callback(elem) {\n elements.push(child);\n }\n }\n }\n };\n HTMLCollection::new(elements, cx, scope)\n }\n}\n\nimpl CacheableWrapper for HTMLDocument {\n fn get_wrappercache(&mut self) -> &mut WrapperCache {\n self.parent.get_wrappercache()\n }\n\n fn wrap_object_shared(@mut self, cx: *JSContext, scope: *JSObject) -> *JSObject {\n let mut unused = false;\n HTMLDocumentBinding::Wrap(cx, scope, self, &mut unused)\n }\n}\n\nimpl BindingObject for HTMLDocument {\n fn GetParentObject(&self, cx: *JSContext) -> Option<@mut CacheableWrapper> {\n self.parent.GetParentObject(cx)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for thread locals.<commit_after>#![feature(thread_local)]\n\nuse std::thread;\n\n#[thread_local]\nstatic mut A: u8 = 0;\n#[thread_local]\nstatic mut B: u8 = 0;\nstatic mut C: u8 = 0;\n\nunsafe fn get_a_ref() -> *mut u8 {\n &mut A\n}\n\nfn main() {\n\n unsafe {\n let x = get_a_ref();\n *x = 5;\n assert_eq!(A, 5);\n B = 15;\n C = 25;\n }\n \n thread::spawn(|| {\n unsafe {\n assert_eq!(A, 0);\n assert_eq!(B, 0);\n assert_eq!(C, 25);\n B = 14;\n C = 24;\n let y = get_a_ref();\n assert_eq!(*y, 0);\n *y = 4;\n assert_eq!(A, 4);\n assert_eq!(*get_a_ref(), 4);\n \n }\n }).join().unwrap();\n\n unsafe {\n assert_eq!(*get_a_ref(), 5);\n assert_eq!(A, 5);\n assert_eq!(B, 15);\n assert_eq!(C, 24);\n }\n \n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement insert operation.<commit_after>use super::Operation;\nuse buffer::gap_buffer::GapBuffer;\nuse buffer::{Position, Range};\n\npub struct Insert {\n content: String,\n position: Position,\n}\n\nimpl Operation for Insert {\n fn run(&mut self, buffer: &mut GapBuffer) {\n buffer.insert(&self.content, &self.position);\n }\n\n \/\/ We need to calculate the range of the inserted content.\n \/\/ The start of the range corresponds to the cursor position at the time of the insert,\n \/\/ which we've stored. Finding the end of the range requires that we dig into the content.\n fn reverse(&mut self, buffer: &mut GapBuffer) {\n \/\/ The line count of the content tells us the line number for the end of the\n \/\/ range (just add the number of new lines to the starting line).\n let line_count = self.content.lines().count();\n let end_line = self.position.line + line_count - 1;\n\n let end_offset = if line_count == 1 {\n \/\/ If there's only one line, the range starts and ends on the same line, and so its\n \/\/ offset needs to take the original insertion location into consideration.\n self.position.offset + self.content.len()\n } else {\n \/\/ If there are multiple lines, the end of the range doesn't\n \/\/ need to consider the original insertion location.\n match self.content.lines().last() {\n Some(line) => line.len(),\n None => return,\n }\n };\n\n \/\/ Now that we have the required info,\n \/\/ build the end position and total range.\n let end_position = Position{\n line: end_line,\n offset: end_offset,\n };\n let range = Range{\n start: self.position.clone(),\n end: end_position,\n };\n\n \/\/ Remove the content we'd previously inserted.\n buffer.delete(&range);\n }\n}\n\npub fn new(content: String, position: Position) -> Insert {\n Insert{ content: content, position: position }\n}\n\n#[cfg(test)]\nmod tests {\n use super::new;\n use buffer::position::Position;\n use buffer::operations::Operation;\n\n #[test]\n fn run_and_reverse_add_and_remove_content_without_newlines_at_cursor_position() {\n \/\/ Set up a buffer with some data.\n let mut buffer = ::buffer::gap_buffer::new(String::new());\n let start_position = Position{ line: 0, offset: 0 };\n buffer.insert(&\"something\", &start_position);\n\n \/\/ Set up a position pointing to the end of the buffer's contents.\n let insert_position = Position{ line: 0, offset: 9 };\n\n \/\/ Create the insert operation and run it.\n let mut insert_operation = super::new(\" else\".to_string(), insert_position);\n insert_operation.run(&mut buffer);\n\n assert_eq!(buffer.to_string(), \"something else\");\n\n insert_operation.reverse(&mut buffer);\n\n assert_eq!(buffer.to_string(), \"something\");\n }\n\n #[test]\n fn run_and_reverse_add_and_remove_content_with_newlines_at_cursor_position() {\n \/\/ Set up a buffer with some data.\n let mut buffer = ::buffer::gap_buffer::new(String::new());\n let start_position = Position{ line: 0, offset: 0 };\n buffer.insert(&\"\\n something\", &start_position);\n\n \/\/ Set up a position pointing to the end of the buffer's contents.\n let insert_position = Position{ line: 1, offset: 10 };\n\n \/\/ Create the insert operation and run it.\n \/\/\n \/\/ NOTE: The newline character ensures that the operation doesn't use a naive\n \/\/ algorithm based purely on the content length.\n let mut insert_operation = super::new(\"\\n else\\n entirely\".to_string(), insert_position);\n insert_operation.run(&mut buffer);\n\n assert_eq!(buffer.to_string(), \"\\n something\\n else\\n entirely\");\n\n insert_operation.reverse(&mut buffer);\n\n assert_eq!(buffer.to_string(), \"\\n something\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Change in phone_metadata<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>update<commit_after>fn main(){\n let day = 5;\n match day {\n 6 | 7 | 8 => println!(\"weekend\"),\n 1...5 => println!(\"weekday\"),\n _ => println!(\"FK\"),\n }\n \/\/使用','分开每句\n \/\/ \"|\"用于匹配多个值,如: 5|4 匹配5和4.\n \/\/ \"...\"用于匹配一个范围,如:1...5 匹配1,2,3,4,5.\n \/\/ \"_\"用于匹配其他值,因为match必须覆盖所有可能值.\n\n let x = 1;\n match x {\n e @ 1...5 => println!(\"x = {}\", e),\n f @ 7 |f @ 8 | f @ 9 => println!(\"x = {}\", f),\n _ => println!(\"Balabala\"),\n }\n \/\/使用@获得|或者...匹配到的值.\n\n let y = 5;\n let mut z = 5;\n match y {\n ref r => println!(\"r = {}\", r),\n }\n match z {\n ref mut mutr => {\n *mutr = 10;\n println!(\"z = {}\", mutr);\n },\n }\n \/\/用ref获得一个引用,可以是mut可以是immut\n\n let a = (0, 2);\n match a {\n \/\/(0, y) => println!(\"x = 0, y = {}\", y),\n (x, 0) => println!(\"x = {}, y = 0\", x),\n (0, 2) => println!(\"x = 0, y = 2\"),\n _ => println!(\"Balabala\"),\n }\n \/\/可以用match匹配元组\n\n \/\/wait more...\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test case for previous commit<commit_after>fn main() {\n let x = 10, y = 20;\n let z = x + y;\n assert (z == 30);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add manual page to env, -h and --help options.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add problem 3 in rust<commit_after>use std::time::SystemTime;\n\nfn is_prime(number: i64) -> bool {\n let limit = (number as f64).sqrt() as i64 + 1;\n let mut factor_exists = false;\n \n for i in 2..limit {\n if number % i == 0 {\n factor_exists = true;\n break;\n }\n }\n\n !factor_exists\n}\n\nfn main() {\n let large_number = 600851475143;\n let limit = (large_number as f64).sqrt() as i64 + 1; \n\n let mut result = 0;\n\n let time_start = SystemTime::now();\n\n for i in (3..limit).rev() {\n if large_number % i == 0 && is_prime(i){\n result = i;\n break;\n }\n }\n\n match time_start.elapsed() {\n Ok(elapsed) => {\n println!(\"Calc duration: {}\", elapsed.as_secs());\n println!(\"Subsec nanos: {}\", elapsed.subsec_nanos());\n }\n Err(e) => {\n println!(\"Error: {:?}\", e);\n }\n }\n\n println!(\"Result: {}\", result);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat(environ): Using nix's pread to read header<commit_after><|endoftext|>"} {"text":"<commit_before>use clap::{Arg, ArgGroup, App, SubCommand};\n\nuse libimagentrytag::ui::tag_argument;\nuse libimagentrytag::ui::tag_argument_name;\n\npub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {\n app\n .subcommand(SubCommand::with_name(\"create\")\n .about(\"Create a note\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"name\")\n .long(\"name\")\n .short(\"n\")\n .takes_value(true)\n .required(true)\n .help(\"Create Note with this name\")\n .value_name(\"NAME\"))\n .arg(Arg::with_name(\"edit\")\n .long(\"edit\")\n .short(\"e\")\n .takes_value(false)\n .required(false)\n .help(\"Edit after creating\"))\n )\n\n .subcommand(SubCommand::with_name(\"delete\")\n .about(\"Delete a Note\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"name\")\n .long(\"name\")\n .short(\"n\")\n .takes_value(true)\n .required(true)\n .help(\"Delete Note with this name\")\n .value_name(\"NAME\")))\n\n .subcommand(SubCommand::with_name(\"edit\")\n .about(\"Edit a Note\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"name\")\n .long(\"name\")\n .short(\"n\")\n .takes_value(true)\n .required(true)\n .help(\"Edit Note with this name\")\n .value_name(\"NAME\"))\n\n .arg(tag_argument())\n )\n\n .subcommand(SubCommand::with_name(\"list\")\n .about(\"List Notes\")\n .version(\"0.1\"))\n\n}\n<commit_msg>removed warnings<commit_after>use clap::{Arg, App, SubCommand};\n\nuse libimagentrytag::ui::tag_argument;\n\npub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {\n app\n .subcommand(SubCommand::with_name(\"create\")\n .about(\"Create a note\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"name\")\n .long(\"name\")\n .short(\"n\")\n .takes_value(true)\n .required(true)\n .help(\"Create Note with this name\")\n .value_name(\"NAME\"))\n .arg(Arg::with_name(\"edit\")\n .long(\"edit\")\n .short(\"e\")\n .takes_value(false)\n .required(false)\n .help(\"Edit after creating\"))\n )\n\n .subcommand(SubCommand::with_name(\"delete\")\n .about(\"Delete a Note\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"name\")\n .long(\"name\")\n .short(\"n\")\n .takes_value(true)\n .required(true)\n .help(\"Delete Note with this name\")\n .value_name(\"NAME\")))\n\n .subcommand(SubCommand::with_name(\"edit\")\n .about(\"Edit a Note\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"name\")\n .long(\"name\")\n .short(\"n\")\n .takes_value(true)\n .required(true)\n .help(\"Edit Note with this name\")\n .value_name(\"NAME\"))\n\n .arg(tag_argument())\n )\n\n .subcommand(SubCommand::with_name(\"list\")\n .about(\"List Notes\")\n .version(\"0.1\"))\n\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate libimagstore;\nextern crate libimagrt;\nextern crate libimagentrytag;\nextern crate libimagerror;\n\nuse std::process::exit;\n\nuse libimagrt::runtime::Runtime;\nuse libimagentrytag::tagable::Tagable;\nuse libimagstore::storeid::build_entry_path;\nuse libimagerror::trace::trace_error;\n\nmod ui;\n\nuse ui::build_ui;\n\nfn main() {\n let name = \"imag-store\";\n let version = &version!()[..];\n let about = \"Direct interface to the store. Use with great care!\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n let id = rt.cli().value_of(\"id\").unwrap(); \/\/ enforced by clap\n rt.cli()\n .subcommand_name()\n .map_or_else(\n || {\n let add = rt.cli().values_of(\"add\").map(|o| o.map(String::from));\n let rem = rt.cli().values_of(\"remove\").map(|o| o.map(String::from));\n\n alter(&rt, id, add, rem);\n },\n |name| {\n debug!(\"Call: {}\", name);\n match name {\n \"list\" => list(id, &rt),\n _ => {\n warn!(\"Unknown command\");\n \/\/ More error handling\n },\n };\n });\n}\n\nfn alter<SI: Iterator<Item = String>>(rt: &Runtime, id: &str, add: Option<SI>, rem: Option<SI>) {\n let path = {\n match build_entry_path(rt.store(), id) {\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n Ok(s) => s,\n }\n };\n debug!(\"path = {:?}\", path);\n\n rt.store()\n \/\/ \"id\" must be present, enforced via clap spec\n .retrieve(path)\n .map(|mut e| {\n add.map(|tags| {\n for tag in tags {\n debug!(\"Adding tag '{:?}'\", tag);\n if let Err(e) = e.add_tag(tag) {\n trace_error(&e);\n }\n }\n }); \/\/ it is okay to ignore a None here\n\n rem.map(|tags| {\n for tag in tags {\n debug!(\"Removing tag '{:?}'\", tag);\n if let Err(e) = e.remove_tag(tag) {\n trace_error(&e);\n }\n }\n }); \/\/ it is okay to ignore a None here\n })\n .map_err(|e| {\n info!(\"No entry.\");\n trace_error(&e);\n })\n .ok();\n}\n\nfn list(id: &str, rt: &Runtime) {\n let path = {\n match build_entry_path(rt.store(), id) {\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n Ok(s) => s,\n }\n };\n debug!(\"path = {:?}\", path);\n\n let entry = rt.store().retrieve(path.clone());\n if entry.is_err() {\n debug!(\"Could not retrieve '{:?}' => {:?}\", id, path);\n warn!(\"Could not retrieve entry '{}'\", id);\n trace_error(&entry.unwrap_err());\n exit(1);\n }\n let entry = entry.unwrap();\n\n let scmd = rt.cli().subcommand_matches(\"list\").unwrap(); \/\/ safe, we checked in main()\n\n let json_out = scmd.is_present(\"json\");\n let line_out = scmd.is_present(\"linewise\");\n let sepp_out = scmd.is_present(\"sep\");\n let mut comm_out = scmd.is_present(\"commasep\");\n\n if !vec![json_out, line_out, comm_out, sepp_out].iter().any(|v| *v) {\n \/\/ None of the flags passed, go to default\n comm_out = true;\n }\n\n let tags = entry.get_tags();\n if tags.is_err() {\n trace_error(&tags.unwrap_err());\n exit(1);\n }\n let tags = tags.unwrap();\n\n if json_out {\n unimplemented!()\n }\n\n if line_out {\n for tag in &tags {\n println!(\"{}\", tag);\n }\n }\n\n if sepp_out {\n let sepp = scmd.value_of(\"sep\").unwrap(); \/\/ we checked before\n println!(\"{}\", tags.join(sepp));\n }\n\n if comm_out {\n println!(\"{}\", tags.join(\", \"));\n }\n}\n\n<commit_msg>imag-tag: Fix commandline extracting by using helpers from libimagentrytag<commit_after>extern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate libimagstore;\nextern crate libimagrt;\nextern crate libimagentrytag;\nextern crate libimagerror;\n\nuse std::process::exit;\n\nuse libimagrt::runtime::Runtime;\nuse libimagentrytag::tagable::Tagable;\nuse libimagentrytag::tag::Tag;\nuse libimagstore::storeid::build_entry_path;\nuse libimagerror::trace::trace_error;\nuse libimagentrytag::ui::{get_add_tags, get_remove_tags};\n\nmod ui;\n\nuse ui::build_ui;\n\nfn main() {\n let name = \"imag-store\";\n let version = &version!()[..];\n let about = \"Direct interface to the store. Use with great care!\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n let id = rt.cli().value_of(\"id\").unwrap(); \/\/ enforced by clap\n rt.cli()\n .subcommand_name()\n .map_or_else(\n || {\n let add = get_add_tags(rt.cli());\n let rem = get_remove_tags(rt.cli());\n alter(&rt, id, add, rem);\n },\n |name| {\n debug!(\"Call: {}\", name);\n match name {\n \"list\" => list(id, &rt),\n _ => {\n warn!(\"Unknown command\");\n \/\/ More error handling\n },\n };\n });\n}\n\nfn alter(rt: &Runtime, id: &str, add: Option<Vec<Tag>>, rem: Option<Vec<Tag>>) {\n let path = {\n match build_entry_path(rt.store(), id) {\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n Ok(s) => s,\n }\n };\n debug!(\"path = {:?}\", path);\n\n rt.store()\n \/\/ \"id\" must be present, enforced via clap spec\n .retrieve(path)\n .map(|mut e| {\n add.map(|tags| {\n for tag in tags {\n debug!(\"Adding tag '{:?}'\", tag);\n if let Err(e) = e.add_tag(tag) {\n trace_error(&e);\n }\n }\n }); \/\/ it is okay to ignore a None here\n\n rem.map(|tags| {\n for tag in tags {\n debug!(\"Removing tag '{:?}'\", tag);\n if let Err(e) = e.remove_tag(tag) {\n trace_error(&e);\n }\n }\n }); \/\/ it is okay to ignore a None here\n })\n .map_err(|e| {\n info!(\"No entry.\");\n trace_error(&e);\n })\n .ok();\n}\n\nfn list(id: &str, rt: &Runtime) {\n let path = {\n match build_entry_path(rt.store(), id) {\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n Ok(s) => s,\n }\n };\n debug!(\"path = {:?}\", path);\n\n let entry = rt.store().retrieve(path.clone());\n if entry.is_err() {\n debug!(\"Could not retrieve '{:?}' => {:?}\", id, path);\n warn!(\"Could not retrieve entry '{}'\", id);\n trace_error(&entry.unwrap_err());\n exit(1);\n }\n let entry = entry.unwrap();\n\n let scmd = rt.cli().subcommand_matches(\"list\").unwrap(); \/\/ safe, we checked in main()\n\n let json_out = scmd.is_present(\"json\");\n let line_out = scmd.is_present(\"linewise\");\n let sepp_out = scmd.is_present(\"sep\");\n let mut comm_out = scmd.is_present(\"commasep\");\n\n if !vec![json_out, line_out, comm_out, sepp_out].iter().any(|v| *v) {\n \/\/ None of the flags passed, go to default\n comm_out = true;\n }\n\n let tags = entry.get_tags();\n if tags.is_err() {\n trace_error(&tags.unwrap_err());\n exit(1);\n }\n let tags = tags.unwrap();\n\n if json_out {\n unimplemented!()\n }\n\n if line_out {\n for tag in &tags {\n println!(\"{}\", tag);\n }\n }\n\n if sepp_out {\n let sepp = scmd.value_of(\"sep\").unwrap(); \/\/ we checked before\n println!(\"{}\", tags.join(sepp));\n }\n\n if comm_out {\n println!(\"{}\", tags.join(\", \"));\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update PacketHandler and add PacketSide enum.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix read_client_packet missing 2 bytes and add test.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add basic get\/put bench<commit_after>#![feature(test)]\n\nextern crate cask;\nextern crate rand;\nextern crate test;\n\nuse std::fs;\n\nuse cask::CaskOptions;\nuse rand::Rng;\nuse test::Bencher;\n\n#[bench]\nfn get_latency(b: &mut Bencher) {\n let id: String = rand::thread_rng().gen_ascii_chars().take(16).collect();\n let path = format!(\"bench-{}.db\", id);\n\n let cask = CaskOptions::default()\n .compaction_check_frequency(1)\n .max_file_size(50 * 1024 * 1024)\n .sync(false)\n .open(&path)\n .unwrap();\n\n let key = vec![1u8; 512];\n let vec = vec![1u8; 4096];\n\n cask.put(key.clone(), &vec).unwrap();\n\n b.bytes = vec.len() as u64;\n b.iter(|| cask.get(&key).unwrap());\n\n fs::remove_dir_all(path).unwrap();\n}\n\n#[bench]\nfn put_latency(b: &mut Bencher) {\n let id: String = rand::thread_rng().gen_ascii_chars().take(16).collect();\n let path = format!(\"bench-{}.db\", id);\n\n let cask = CaskOptions::default()\n .compaction_check_frequency(1)\n .max_file_size(50 * 1024 * 1024)\n .sync(false)\n .open(&path)\n .unwrap();\n\n let key = vec![1u8; 512];\n let vec = vec![1u8; 4096];\n\n b.bytes = vec.len() as u64;\n b.iter(|| cask.put(key.clone(), &vec).unwrap());\n\n fs::remove_dir_all(path).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix bench<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue 6157<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub trait OpInt<'a> { fn call<'a>(&'a self, int, int) -> int; }\n\nimpl<'a> OpInt<'a> for 'a |int, int| -> int {\n fn call(&self, a:int, b:int) -> int {\n (*self)(a, b)\n }\n}\n\nfn squarei<'a>(x: int, op: &'a OpInt) -> int { op.call(x, x) }\n\nfn muli(x:int, y:int) -> int { x * y }\n\npub fn main() {\n let f = |x,y| muli(x,y);\n {\n let g = &f;\n let h = g as &OpInt;\n squarei(3, h);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add wireframe Example<commit_after>extern crate bootstrap_rs as bootstrap;\nextern crate gl_util as gl;\n\nuse bootstrap::window::*;\nuse gl::*;\n\nstatic VERTEX_POSITIONS: [f32; 9] = [\n -1.0, -1.0, 0.0,\n 1.0, -1.0, 0.0,\n 0.0, 1.0, 0.0,\n];\n\nstatic VERTEX_INDICES: [u32; 6] = [\n 0, 1,\n 1, 2,\n 2, 0,\n];\n\nfn main() {\n let mut window = Window::new(\"gl-util - wireframe example\");\n\n gl::init();\n let mut vertex_buffer = VertexBuffer::new();\n vertex_buffer.set_data_f32(&VERTEX_POSITIONS[..]);\n vertex_buffer.set_attrib_f32(AttributeLocation::from_index(0), 3, 0, 0);\n\n let mut index_buffer = IndexBuffer::new();\n index_buffer.set_data_u32(&VERTEX_INDICES[..]);\n\n 'outer: loop {\n while let Some(message) = window.next_message() {\n match message {\n Message::Close => break 'outer,\n _ => {},\n }\n\n gl::clear();\n vertex_buffer.draw_elements(DrawMode::Lines, &index_buffer);\n gl::swap_buffers();\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adapt cp.rs to ArgParser and fix behavior to POSIX specification<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::error::Fallible;\nuse dom::bindings::error::Error::DataClone;\nuse dom::bindings::global::GlobalRef;\n\nuse js::glue::JS_STRUCTURED_CLONE_VERSION;\nuse js::jsapi::JSContext;\nuse js::jsapi::{JS_WriteStructuredClone, JS_ClearPendingException};\nuse js::jsapi::JS_ReadStructuredClone;\nuse js::jsval::{JSVal, UndefinedValue};\n\nuse libc::size_t;\nuse std::ptr;\n\npub struct StructuredCloneData {\n data: *mut u64,\n nbytes: size_t,\n}\n\nimpl StructuredCloneData {\n pub fn write(cx: *mut JSContext, message: JSVal)\n -> Fallible<StructuredCloneData> {\n let mut data = ptr::null_mut();\n let mut nbytes = 0;\n let result = unsafe {\n JS_WriteStructuredClone(cx, message, &mut data, &mut nbytes,\n ptr::null(), ptr::null_mut())\n };\n if result == 0 {\n unsafe { JS_ClearPendingException(cx); }\n return Err(DataClone);\n }\n Ok(StructuredCloneData {\n data: data,\n nbytes: nbytes,\n })\n }\n\n pub fn read(self, global: GlobalRef) -> JSVal {\n let mut message = UndefinedValue();\n unsafe {\n assert!(JS_ReadStructuredClone(\n global.get_cx(), self.data as *const u64, self.nbytes,\n JS_STRUCTURED_CLONE_VERSION, &mut message,\n ptr::null(), ptr::null_mut()) != 0);\n }\n message\n }\n}\n<commit_msg>auto merge of #4678 : Ms2ger\/servo\/doc-clone, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! This module implements structured cloning, as defined by [HTML]\n\/\/! (https:\/\/html.spec.whatwg.org\/multipage\/#safe-passing-of-structured-data).\n\n#![deny(missing_docs)]\n\nuse dom::bindings::error::Fallible;\nuse dom::bindings::error::Error::DataClone;\nuse dom::bindings::global::GlobalRef;\n\nuse js::glue::JS_STRUCTURED_CLONE_VERSION;\nuse js::jsapi::JSContext;\nuse js::jsapi::{JS_WriteStructuredClone, JS_ClearPendingException};\nuse js::jsapi::JS_ReadStructuredClone;\nuse js::jsval::{JSVal, UndefinedValue};\n\nuse libc::size_t;\nuse std::ptr;\n\n\/\/\/ A buffer for a structured clone.\npub struct StructuredCloneData {\n data: *mut u64,\n nbytes: size_t,\n}\n\nimpl StructuredCloneData {\n \/\/\/ Writes a structured clone. Returns a `DataClone` error if that fails.\n pub fn write(cx: *mut JSContext, message: JSVal)\n -> Fallible<StructuredCloneData> {\n let mut data = ptr::null_mut();\n let mut nbytes = 0;\n let result = unsafe {\n JS_WriteStructuredClone(cx, message, &mut data, &mut nbytes,\n ptr::null(), ptr::null_mut())\n };\n if result == 0 {\n unsafe { JS_ClearPendingException(cx); }\n return Err(DataClone);\n }\n Ok(StructuredCloneData {\n data: data,\n nbytes: nbytes,\n })\n }\n\n \/\/\/ Reads a structured clone.\n \/\/\/\n \/\/\/ Panics if `JS_ReadStructuredClone` fails.\n pub fn read(self, global: GlobalRef) -> JSVal {\n let mut message = UndefinedValue();\n unsafe {\n assert!(JS_ReadStructuredClone(\n global.get_cx(), self.data as *const u64, self.nbytes,\n JS_STRUCTURED_CLONE_VERSION, &mut message,\n ptr::null(), ptr::null_mut()) != 0);\n }\n message\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>acc config fix<commit_after>extern crate rustc_serialize;\nextern crate getopts;\nextern crate byteorder;\nextern crate flv_toolbox_rs;\n\nuse std::path::Path;\nuse std::fs::File;\nuse std::io::{ Seek, SeekFrom, Read, Write };\n\nuse self::byteorder::{BigEndian, WriteBytesExt};\nuse rustc_serialize::json::Json;\nuse getopts::Options;\n\nuse flv_toolbox_rs::lib::{ FLVTagRead, FLVHeader, FLVTagType, FLVTag };\n\nfn print_usage(program: &str, opts: Options) {\n let brief = format!(\"Usage: {} FILE [options]\", program);\n eprintln!(\"{}\", opts.usage(&brief));\n}\n\nfn main() {\n let args: Vec<String> = std::env::args().collect();\n let program = args[0].clone();\n\n let mut opts = Options::new();\n opts.optflagopt(\"o\", \"output\", \"output flv file\", \"OUTPUT\");\n opts.optflag(\"h\", \"help\", \"print this help menu\");\n opts.optflag(\"t\", \"check-only\", \"test, check only\");\n\n let usage_str = {\n let brief = format!(\"Usage: {} FILE [options]\", program);\n format!(\"{}\", opts.usage(&brief))\n };\n\n let exit_with_usage = || {\n eprintln!(\"{}\", usage_str);\n std::process::exit(-1);\n };\n\n let matches: getopts::Matches = match opts.parse(&args[1..]) {\n Ok(m) => m,\n Err(f) => {\n eprintln!(\"{}\", f.to_string());\n return exit_with_usage();\n }\n };\n\n if matches.opt_present(\"h\") {\n return exit_with_usage();\n }\n\n let test = matches.opt_present(\"t\");\n\n let input: String = if !matches.free.is_empty() {\n matches.free[0].clone()\n } else {\n eprintln!(\"no input file.\");\n return exit_with_usage();\n };\n\n let input_path: &Path = Path::new(&input);\n if !input_path.exists() {\n eprintln!(\"input file does not exist.\");\n return exit_with_usage();\n }\n\n let output = match matches.opt_default(\"o\", \"\") {\n Some(c) => c,\n _ => {\n let file = input_path.file_stem().unwrap().to_string_lossy().to_string();\n let mut output = input_path.with_file_name(format!(\"{}-fixed.flv\", &file));\n let mut i: i32 = 0;\n while output.exists() {\n i += 1;\n output = input_path.with_file_name(format!(\"{}-fixed({}).flv\", &file, i));\n }\n if !test {\n eprintln!(\"no output file, use {}\", output.to_str().unwrap());\n }\n output.to_string_lossy().to_string()\n }\n };\n\n let need_fix = detect_flv_acc(input_path);\n if need_fix {\n match fix_flv_acc(input_path, &output, test) {\n Ok(_) => {\n if test {\n eprintln!(\"test complete.\");\n } else {\n eprintln!(\"fixed.\");\n }\n std::process::exit(0);\n }\n Err(e) => {\n eprintln!(\"fix err: {}\", e);\n std::process::exit(-1);\n }\n }\n } else {\n eprintln!(\"pass\");\n }\n}\n\nfn next_tag_of_type<'a, R: Read>(parser: &mut FLVTagRead<'a, R>, tag_type: FLVTagType) -> Option<FLVTag> {\n loop {\n if let Some(tag) = parser.next() {\n if tag.get_tag_type() == tag_type {\n break Some(tag);\n } else {\n continue;\n }\n } else {\n break None;\n }\n }\n}\n\nfn detect_flv_acc(flv_path: &Path) -> bool {\n let mut file = std::fs::File::open(flv_path).unwrap();\n let mut parser = FLVTagRead::new(&mut file);\n {\n let header: &FLVHeader = &parser.header;\n if !header.hasAudioTags {\n eprintln!(\"no audio stream\");\n return false;\n }\n }\n\n let audio_tag = next_tag_of_type(&mut parser, FLVTagType::TAG_TYPE_AUDIO);\n if audio_tag.is_none() {\n return false;\n }\n let audio_tag: FLVTag = audio_tag.unwrap();\n if !audio_tag.is_acc_sequence_header() {\n eprintln!(\"first audio tag is not acc_sequence_header, exit\");\n return false;\n }\n let data_size = audio_tag.get_data_size();\n \/\/ println!(\"{:?}\", audio_tag.get_sound_audio_specific_config());\n return data_size == 2;\n}\n\nfn fix_flv_acc(flv_path: &Path, output_path: &str, test: bool) -> Result<(), String> {\n let new_tag = {\n let mut file = std::fs::File::open(flv_path).map_err(|_| \"cannot open output file.\".to_owned())?;\n let mut parser = FLVTagRead::new(&mut file);\n \n let meta_tag = next_tag_of_type(&mut parser, FLVTagType::TAG_TYPE_SCRIPTDATAOBJECT).ok_or(\"no meta tag\".to_string())?;\n let mut acc_tag = next_tag_of_type(&mut parser, FLVTagType::TAG_TYPE_AUDIO).ok_or::<String>(\"no acc_sequence_header\".into())?;\n assert!(acc_tag.is_acc_sequence_header());\n assert_eq!(acc_tag.get_data_size(), 2);\n let a_tag = next_tag_of_type(&mut parser, FLVTagType::TAG_TYPE_AUDIO).ok_or::<String>(\"only one acc_sequence_header\".into())?;\n let meta_obj = &meta_tag.get_objects()[1];\n println!(\"{:?}\", meta_obj);\n let sample: i64 = meta_obj.find(\"audiosamplerate\").ok_or(\"no audiosamplerate in meta, can't fix.\".to_owned())?.as_f64().ok_or(\"audiosamplerate is not f64, can't fix.\".to_string())? as _;\n let stereo = meta_obj.find(\"stereo\").ok_or(\"no stereo in meta, can't fix.\".to_owned())?.as_boolean().ok_or(\"no stereo in meta or stereo is not boolean, can't fix.\".to_owned())?;\n if meta_obj.find(\"keyframes\").is_some() {\n eprintln!(\"warning: flv has keyframes table. filepositions should adjust, but not.\");\n }\n let original_audio_object_type = 2;\n let sample_index = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350].iter().position(|i: &i64| *i == sample).ok_or(\"sample not in sample list.\".to_owned())?;\n let channel_config = if stereo { 2 } else { 1 };\n\n eprintln!(\"use config: original_audio_object_type {} sample_index {} channel_config {}\", original_audio_object_type, sample_index, channel_config);\n let mut data: Vec<u8> = Vec::with_capacity(11 + 4 + 4);\n acc_tag.write(&mut data);\n eprintln!(\"{:?}\", data);\n \/\/ set data size\n data[1] = ((4 >> 16) & 0xff) as u8;\n data[2] = ((4 >> 8) & 0xff) as u8;\n data[3] = ((4 ) & 0xff) as u8;\n data[13] = ((original_audio_object_type as u8 & 0x1f) << 3) | ((sample_index as u8 & 0xf) >> 1);\n data[14] = ((sample_index as u8 & 1) << 7) | ((channel_config & 0xf) << 3);\n data.pop();\n data.pop();\n data.write_u32::<BigEndian>(15).unwrap();\n eprintln!(\"{:?}\", data);\n let new_tag = FLVTag::read(&mut &*data).unwrap();\n eprintln!(\"{:?}\", (new_tag.get_tag_type(), new_tag.get_data_size(), new_tag.get_sound_audio_specific_config()));\n new_tag\n };\n if test {\n return Ok(());\n }\n let mut new_tag = Some(new_tag);\n \/\/ reopen\n let mut file = std::fs::File::open(flv_path).map_err(|_| \"cannot open output file.\".to_owned())?;\n let mut parser = FLVTagRead::new(&mut file);\n let mut ofile = std::fs::File::create(output_path).map_err(|_| \"cannot open output file.\".to_owned())?;\n parser.header.write(&mut ofile);\n while let Some(mut tag) = parser.next() {\n if new_tag.is_some() && tag.get_tag_type() == FLVTagType::TAG_TYPE_AUDIO && tag.is_acc_sequence_header() {\n tag = new_tag.take().unwrap();\/\/ only switch once\n }\n tag.write(&mut ofile);\n }\n Ok(())\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix imag-mail for new error interface<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Module dedicated to the handling of kraken requests<commit_after>\/*\nTristen Horton\ntristen@tristenhorton.com\n2017-04-03 23:37\n*\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove stride member from LayerBuffer<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Format macros.rs test to pass rustfmt<commit_after><|endoftext|>"} {"text":"<commit_before>use std::io::net::ip::{Port, IpAddr};\n\nuse http::method;\n\nuse router::Router;\nuse middleware::{ MiddlewareStack, Middleware };\nuse server::Server;\nuse request::Request;\nuse response::Response;\n\n\/\/pre defined middleware\nuse static_files_handler::StaticFilesHandler;\nuse json_body_parser::JsonBodyParser;\nuse query_string::QueryStringParser;\n\n\/\/\/Nickel is the application object. It's the surface that\n\/\/\/holds all public APIs.\n\n#[deriving(Clone)]\npub struct Nickel{\n router: Router,\n middleware_stack: MiddlewareStack,\n server: Option<Server>\n}\nimpl Nickel {\n\n \/\/\/ In order to use Nickels API one first has to create an instance.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ ```\n pub fn new() -> Nickel {\n let router = Router::new();\n let middleware_stack = MiddlewareStack::new();\n Nickel {\n router: router,\n middleware_stack: middleware_stack,\n server: None,\n }\n }\n\n \/\/\/ Registers a handler to be used for a specific GET request.\n \/\/\/ Handlers are assigned to paths and paths are allowed to contain\n \/\/\/ variables and wildcards.\n \/\/\/\n \/\/\/ # Example without variables and wildcards\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\", handler);\n \/\/\/ ```\n \/\/\/ # Example with variables\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ let text = format!(\"This is user: {}\", request.params.get(&\"userid\".to_string()));\n \/\/\/ response.send(text.as_slice());\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/:userid\", handler);\n \/\/\/ ```\n \/\/\/ # Example with simple wildcard\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 but not \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/*\/:userid\", handler);\n \/\/\/ ```\n \/\/\/ # Example with double wildcard\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 and also \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/**\/:userid\", handler);\n \/\/\/ ```\n pub fn get(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.router.add_route(method::Get, String::from_str(uri), handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific POST request.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/post\/request\");\n \/\/\/ };\n \/\/\/ server.post(\"\/a\/post\/request\", handler);\n \/\/\/ ```\n \/\/\/ Take a look at `get()` for a more detailed description.\n pub fn post(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.router.add_route(method::Post, String::from_str(uri), handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific PUT request.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/put\/request\");\n \/\/\/ };\n \/\/\/ server.put(\"\/a\/put\/request\", handler);\n \/\/\/ ```\n \/\/\/ Take a look at `get(..)` for a more detailed description.\n pub fn put(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.router.add_route(method::Put, String::from_str(uri), handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific DELETE request.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a DELETE request to \/a\/delete\/request\");\n \/\/\/ };\n \/\/\/ server.delete(\"\/a\/delete\/request\", handler);\n \/\/\/ ```\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n pub fn delete(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.router.add_route(method::Put, String::from_str(uri), handler);\n }\n\n \/\/\/ Registers a middleware handler which will be invoked among other middleware\n \/\/\/ handlers before each request. Middleware can be stacked and is invoked in the\n \/\/\/ same order it was registered.\n \/\/\/\n \/\/\/ A middleware handler is nearly identical to a regular route handler with the only\n \/\/\/ difference that it expects a return value of boolean. That is to indicate whether\n \/\/\/ other middleware handler (if any) further down the stack should continue or if the\n \/\/\/ middleware invocation should be stopped after the current handler.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn logger (req: &Request, res: &mut Response) -> Action{\n \/\/\/ println!(\"logging request: {}\", req.origin.request_uri);\n \/\/\/ Continue\n \/\/\/ }\n \/\/\/ ```\n pub fn utilize<T: Middleware>(&mut self, handler: T){\n self.middleware_stack.add(handler);\n }\n\n \/\/\/ Create a new middleware to serve files from within a given root directory.\n \/\/\/ The file to serve will be determined by combining the requested Url with\n \/\/\/ the provided root directory.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.utilize(Nickel::static_files(\"\/path\/to\/serve\/\"));\n \/\/\/ ```\n pub fn static_files(root_path: &str) -> StaticFilesHandler {\n StaticFilesHandler::new(root_path)\n }\n\n \/\/\/ Create a new middleware to parse JSON bodies.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/\n \/\/\/ #[deriving(Decodable, Encodable)]\n \/\/\/ struct Person {\n \/\/\/ firstname: String,\n \/\/\/ lastname: String,\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.utilize(Nickel::json_body_parser();\n \/\/\/\n \/\/\/ fn post_handler (request: &Request, response: &mut Response) {\n \/\/\/\n \/\/\/ let person = request.json_as::<Person>().unwrap();\n \/\/\/ let text = format!(\"Hello {} {}\", person.firstname, person.lastname);\n \/\/\/ response.send(text.as_slice());\n \/\/\/ };\n \/\/\/\n \/\/\/ server.post(\"\/a\/post\/request\", post_handler);\n \/\/\/ ```\n pub fn json_body_parser() -> JsonBodyParser {\n JsonBodyParser\n }\n\n \/\/\/ Create a new middleware to parse the query string.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.utilize(Nickel::query_string();\n \/\/\/\n \/\/\/ fn get_handler (request: &Request, response: &mut Response) {\n \/\/\/ let foo = request.query(\"foo\", \"this is the default value, if foo is not present!\");\n \/\/\/ response.send(foo.as_slice());\n \/\/\/ };\n \/\/\/\n \/\/\/ server.get(\"\/a\/get\/request\", get_handler);\n \/\/\/ ```\n pub fn query_string() -> QueryStringParser {\n QueryStringParser\n }\n\n \/\/\/ Bind and listen for connections on the given host and port\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.listen(Ipv4Addr(127, 0, 0, 1), 6767);\n \/\/\/ ```\n pub fn listen(mut self, ip: IpAddr, port: Port) {\n self.server = Some(Server::new(self.router, self.middleware_stack, ip, port));\n self.server.unwrap().serve();\n }\n}\n<commit_msg>=BG= fn delete should use http::method::Delete<commit_after>use std::io::net::ip::{Port, IpAddr};\n\nuse http::method;\n\nuse router::Router;\nuse middleware::{ MiddlewareStack, Middleware };\nuse server::Server;\nuse request::Request;\nuse response::Response;\n\n\/\/pre defined middleware\nuse static_files_handler::StaticFilesHandler;\nuse json_body_parser::JsonBodyParser;\nuse query_string::QueryStringParser;\n\n\/\/\/Nickel is the application object. It's the surface that\n\/\/\/holds all public APIs.\n\n#[deriving(Clone)]\npub struct Nickel{\n router: Router,\n middleware_stack: MiddlewareStack,\n server: Option<Server>\n}\nimpl Nickel {\n\n \/\/\/ In order to use Nickels API one first has to create an instance.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ ```\n pub fn new() -> Nickel {\n let router = Router::new();\n let middleware_stack = MiddlewareStack::new();\n Nickel {\n router: router,\n middleware_stack: middleware_stack,\n server: None,\n }\n }\n\n \/\/\/ Registers a handler to be used for a specific GET request.\n \/\/\/ Handlers are assigned to paths and paths are allowed to contain\n \/\/\/ variables and wildcards.\n \/\/\/\n \/\/\/ # Example without variables and wildcards\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\", handler);\n \/\/\/ ```\n \/\/\/ # Example with variables\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ let text = format!(\"This is user: {}\", request.params.get(&\"userid\".to_string()));\n \/\/\/ response.send(text.as_slice());\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/:userid\", handler);\n \/\/\/ ```\n \/\/\/ # Example with simple wildcard\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 but not \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/*\/:userid\", handler);\n \/\/\/ ```\n \/\/\/ # Example with double wildcard\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches \/user\/list\/4711 and also \/user\/extended\/list\/4711\");\n \/\/\/ };\n \/\/\/ server.get(\"\/user\/**\/:userid\", handler);\n \/\/\/ ```\n pub fn get(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.router.add_route(method::Get, String::from_str(uri), handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific POST request.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/post\/request\");\n \/\/\/ };\n \/\/\/ server.post(\"\/a\/post\/request\", handler);\n \/\/\/ ```\n \/\/\/ Take a look at `get()` for a more detailed description.\n pub fn post(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.router.add_route(method::Post, String::from_str(uri), handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific PUT request.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a POST request to \/a\/put\/request\");\n \/\/\/ };\n \/\/\/ server.put(\"\/a\/put\/request\", handler);\n \/\/\/ ```\n \/\/\/ Take a look at `get(..)` for a more detailed description.\n pub fn put(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.router.add_route(method::Put, String::from_str(uri), handler);\n }\n\n \/\/\/ Registers a handler to be used for a specific DELETE request.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn handler (request: Request, response: &mut Response) {\n \/\/\/ response.send(\"This matches a DELETE request to \/a\/delete\/request\");\n \/\/\/ };\n \/\/\/ server.delete(\"\/a\/delete\/request\", handler);\n \/\/\/ ```\n \/\/\/ Take a look at `get(...)` for a more detailed description.\n pub fn delete(&mut self, uri: &str, handler: fn(request: &Request, response: &mut Response)){\n self.router.add_route(method::Delete, String::from_str(uri), handler);\n }\n\n \/\/\/ Registers a middleware handler which will be invoked among other middleware\n \/\/\/ handlers before each request. Middleware can be stacked and is invoked in the\n \/\/\/ same order it was registered.\n \/\/\/\n \/\/\/ A middleware handler is nearly identical to a regular route handler with the only\n \/\/\/ difference that it expects a return value of boolean. That is to indicate whether\n \/\/\/ other middleware handler (if any) further down the stack should continue or if the\n \/\/\/ middleware invocation should be stopped after the current handler.\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ fn logger (req: &Request, res: &mut Response) -> Action{\n \/\/\/ println!(\"logging request: {}\", req.origin.request_uri);\n \/\/\/ Continue\n \/\/\/ }\n \/\/\/ ```\n pub fn utilize<T: Middleware>(&mut self, handler: T){\n self.middleware_stack.add(handler);\n }\n\n \/\/\/ Create a new middleware to serve files from within a given root directory.\n \/\/\/ The file to serve will be determined by combining the requested Url with\n \/\/\/ the provided root directory.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.utilize(Nickel::static_files(\"\/path\/to\/serve\/\"));\n \/\/\/ ```\n pub fn static_files(root_path: &str) -> StaticFilesHandler {\n StaticFilesHandler::new(root_path)\n }\n\n \/\/\/ Create a new middleware to parse JSON bodies.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/\n \/\/\/ #[deriving(Decodable, Encodable)]\n \/\/\/ struct Person {\n \/\/\/ firstname: String,\n \/\/\/ lastname: String,\n \/\/\/ }\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.utilize(Nickel::json_body_parser();\n \/\/\/\n \/\/\/ fn post_handler (request: &Request, response: &mut Response) {\n \/\/\/\n \/\/\/ let person = request.json_as::<Person>().unwrap();\n \/\/\/ let text = format!(\"Hello {} {}\", person.firstname, person.lastname);\n \/\/\/ response.send(text.as_slice());\n \/\/\/ };\n \/\/\/\n \/\/\/ server.post(\"\/a\/post\/request\", post_handler);\n \/\/\/ ```\n pub fn json_body_parser() -> JsonBodyParser {\n JsonBodyParser\n }\n\n \/\/\/ Create a new middleware to parse the query string.\n \/\/\/\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.utilize(Nickel::query_string();\n \/\/\/\n \/\/\/ fn get_handler (request: &Request, response: &mut Response) {\n \/\/\/ let foo = request.query(\"foo\", \"this is the default value, if foo is not present!\");\n \/\/\/ response.send(foo.as_slice());\n \/\/\/ };\n \/\/\/\n \/\/\/ server.get(\"\/a\/get\/request\", get_handler);\n \/\/\/ ```\n pub fn query_string() -> QueryStringParser {\n QueryStringParser\n }\n\n \/\/\/ Bind and listen for connections on the given host and port\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```rust\n \/\/\/ let mut server = Nickel::new();\n \/\/\/ server.listen(Ipv4Addr(127, 0, 0, 1), 6767);\n \/\/\/ ```\n pub fn listen(mut self, ip: IpAddr, port: Port) {\n self.server = Some(Server::new(self.router, self.middleware_stack, ip, port));\n self.server.unwrap().serve();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add function to get flags<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse db::{ Db, FindFilter, Record };\nuse iron::headers::ContentType;\nuse iron::prelude::*;\nuse iron::status::Status;\nuse params::{ Map, Params, Value };\nuse router::Router;\nuse std::error::Error;\nuse std::fmt::{self, Debug};\nuse std::time::{ SystemTime, UNIX_EPOCH };\n\n#[derive(Debug)]\nstruct StringError(String);\n\nimpl fmt::Display for StringError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n Debug::fmt(self, f)\n }\n}\n\nimpl Error for StringError {\n fn description(&self) -> &str { &*self.0 }\n}\n\nfn seconds_from_epoch() -> i64 {\n let now = SystemTime::now();\n now.duration_from_earlier(UNIX_EPOCH).unwrap().as_secs() as i64\n}\n\npub fn create() -> Router {\n let mut router = Router::new();\n router.get(\"register\", move |req: &mut Request| -> IronResult<Response> {\n \/\/ Get the local ip from the query, and the public one from the socket.\n let public_ip = format!(\"{}\", req.remote_addr.ip());\n\n let map: &Map = req.get_ref::<Params>().unwrap();\n let ip_param = match map.find(&[\"ip\"]) {\n Some(&Value::String(ref name)) => Some(name),\n _ => None\n };\n\n if ip_param == None {\n return Err(IronError::new(StringError(\"Error\".to_string()), Status::BadRequest));\n }\n let local_ip = ip_param.unwrap().to_string();\n\n \/\/ Get the current number of seconds since epoch.\n let now = seconds_from_epoch();\n\n info!(\"GET \/register public_ip={} local_ip={} time is {}\",\n public_ip, local_ip, now);\n\n \/\/ Save this registration in the database.\n \/\/ If we already have the same (local, public) tuple, update it, if not\n \/\/ create a new tuple.\n let db = Db::new();\n match db.find(FindFilter::PublicAndLocalIp(public_ip.clone(), local_ip.clone())) {\n Ok(rvect) => {\n \/\/ If the vector is empty, create a new record, if not update\n \/\/ the existing one with the new timestamp.\n let record = Record {\n public_ip: public_ip,\n local_ip: local_ip,\n timestamp: now,\n };\n\n if rvect.is_empty() {\n db.add(record).unwrap();\n } else {\n db.update(record).unwrap();\n }\n },\n Err(_) => {\n let record = Record {\n public_ip: public_ip,\n local_ip: local_ip,\n timestamp: now,\n };\n db.add(record).unwrap();\n }\n }\n\n let mut response = Response::with(\"{\\\"status\\\" : \\\"registered\\\"}\");\n response.status = Some(Status::Ok);\n response.headers.set(ContentType::json());\n\n Ok(response)\n });\n\n router.get(\"ping\", move |req: &mut Request| -> IronResult<Response> {\n info!(\"GET \/ping\");\n let public_ip = format!(\"{}\", req.remote_addr.ip());\n\n let mut serialized = String::from(\"[\");\n\n let db = Db::new();\n match db.find(FindFilter::PublicIp(public_ip)) {\n Ok(rvect) => {\n \/\/ Serialize the vector.\n for record in rvect {\n serialized.push_str(&record.to_json());\n }\n },\n Err(_) => { }\n }\n\n serialized.push_str(\"]\");\n let mut response = Response::with(serialized);\n \/\/let mut response = Response::with(\"ok\");\n response.status = Some(Status::Ok);\n response.headers.set(ContentType::json());\n\n Ok(response)\n });\n\n router\n}<commit_msg>Set CORS header<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse db::{ Db, FindFilter, Record };\nuse iron::headers::{ AccessControlAllowOrigin, ContentType };\nuse iron::prelude::*;\nuse iron::status::Status;\nuse params::{ Map, Params, Value };\nuse router::Router;\nuse std::error::Error;\nuse std::fmt::{self, Debug};\nuse std::time::{ SystemTime, UNIX_EPOCH };\n\n#[derive(Debug)]\nstruct StringError(String);\n\nimpl fmt::Display for StringError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n Debug::fmt(self, f)\n }\n}\n\nimpl Error for StringError {\n fn description(&self) -> &str { &*self.0 }\n}\n\nfn seconds_from_epoch() -> i64 {\n let now = SystemTime::now();\n now.duration_from_earlier(UNIX_EPOCH).unwrap().as_secs() as i64\n}\n\npub fn create() -> Router {\n let mut router = Router::new();\n router.get(\"register\", move |req: &mut Request| -> IronResult<Response> {\n \/\/ Get the local ip from the query, and the public one from the socket.\n let public_ip = format!(\"{}\", req.remote_addr.ip());\n\n let map: &Map = req.get_ref::<Params>().unwrap();\n let ip_param = match map.find(&[\"ip\"]) {\n Some(&Value::String(ref name)) => Some(name),\n _ => None\n };\n\n if ip_param == None {\n return Err(IronError::new(StringError(\"Error\".to_string()), Status::BadRequest));\n }\n let local_ip = ip_param.unwrap().to_string();\n\n \/\/ Get the current number of seconds since epoch.\n let now = seconds_from_epoch();\n\n info!(\"GET \/register public_ip={} local_ip={} time is {}\",\n public_ip, local_ip, now);\n\n \/\/ Save this registration in the database.\n \/\/ If we already have the same (local, public) tuple, update it, if not\n \/\/ create a new tuple.\n let db = Db::new();\n match db.find(FindFilter::PublicAndLocalIp(public_ip.clone(), local_ip.clone())) {\n Ok(rvect) => {\n \/\/ If the vector is empty, create a new record, if not update\n \/\/ the existing one with the new timestamp.\n let record = Record {\n public_ip: public_ip,\n local_ip: local_ip,\n timestamp: now,\n };\n\n if rvect.is_empty() {\n db.add(record).unwrap();\n } else {\n db.update(record).unwrap();\n }\n },\n Err(_) => {\n let record = Record {\n public_ip: public_ip,\n local_ip: local_ip,\n timestamp: now,\n };\n db.add(record).unwrap();\n }\n }\n\n let mut response = Response::with(\"{\\\"status\\\" : \\\"registered\\\"}\");\n response.status = Some(Status::Ok);\n response.headers.set(AccessControlAllowOrigin::Any);\n response.headers.set(ContentType::json());\n\n Ok(response)\n });\n\n router.get(\"ping\", move |req: &mut Request| -> IronResult<Response> {\n info!(\"GET \/ping\");\n let public_ip = format!(\"{}\", req.remote_addr.ip());\n\n let mut serialized = String::from(\"[\");\n\n let db = Db::new();\n match db.find(FindFilter::PublicIp(public_ip)) {\n Ok(rvect) => {\n \/\/ Serialize the vector.\n for record in rvect {\n serialized.push_str(&record.to_json());\n }\n },\n Err(_) => { }\n }\n\n serialized.push_str(\"]\");\n let mut response = Response::with(serialized);\n \/\/let mut response = Response::with(\"ok\");\n response.status = Some(Status::Ok);\n response.headers.set(AccessControlAllowOrigin::Any);\n response.headers.set(ContentType::json());\n\n Ok(response)\n });\n\n router\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>fix: check vector length<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>docs: document associated types<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Derive standard traits for nat enums<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>color and mutable values in hashtables<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix becnhmarks<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>new test which features conflicting variance requirements<commit_after>fn main() {\n\n \/\/ Note: here we do not have any type annotations\n \/\/ but we do express conflicting requirements:\n\n let v = [mut [0]];\n let w = [mut [mut 0]];\n let x = [mut [mut 0]];\n\n fn f(&&v: [mut [int]]) {\n v[0] = [3]\n }\n\n fn g(&&v: [const [const int]]) {\n }\n\n fn h(&&v: [mut [mut int]]) {\n v[0] = [mut 3]\n }\n\n fn i(&&v: [mut [const int]]) {\n v[0] = [mut 3]\n }\n\n fn j(&&v: [[const int]]) {\n }\n\n f(v);\n g(v);\n h(v); \/\/! ERROR (values differ in mutability)\n i(v); \/\/! ERROR (values differ in mutability)\n j(v); \/\/! ERROR (values differ in mutability)\n\n f(w); \/\/! ERROR (values differ in mutability)\n g(w);\n h(w);\n i(w); \/\/! ERROR (values differ in mutability)\n j(w); \/\/! ERROR (values differ in mutability)\n\n \/\/ Note that without adding f() or h() to the mix, it is valid for\n \/\/ x to have the type [mut [const int]], and thus we can safely\n \/\/ call g() and i() but not j():\n g(x);\n i(x);\n j(x); \/\/! ERROR (values differ in mutability)\n}\n<|endoftext|>"} {"text":"<commit_before>#![warn(rust_2018_idioms)] \/\/ while we're getting used to 2018\n#![allow(clippy::all)]\n\nuse cargo::util::toml::StringOrVec;\nuse cargo::util::CliError;\nuse cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config};\nuse cargo_util::{ProcessBuilder, ProcessError};\nuse std::collections::BTreeMap;\nuse std::env;\nuse std::ffi::OsStr;\nuse std::fs;\nuse std::path::{Path, PathBuf};\n\nmod cli;\nmod commands;\n\nuse crate::command_prelude::*;\n\nfn main() {\n #[cfg(feature = \"pretty-env-logger\")]\n pretty_env_logger::init_custom_env(\"CARGO_LOG\");\n #[cfg(not(feature = \"pretty-env-logger\"))]\n env_logger::init_from_env(\"CARGO_LOG\");\n\n let mut config = cli::LazyConfig::new();\n\n let result = if let Some(lock_addr) = cargo::ops::fix_get_proxy_lock_addr() {\n cargo::ops::fix_exec_rustc(config.get(), &lock_addr).map_err(|e| CliError::from(e))\n } else {\n let _token = cargo::util::job::setup();\n cli::main(&mut config)\n };\n\n match result {\n Err(e) => cargo::exit_with_error(e, &mut config.get_mut().shell()),\n Ok(()) => {}\n }\n}\n\n\/\/\/ Table for defining the aliases which come builtin in `Cargo`.\n\/\/\/ The contents are structured as: `(alias, aliased_command, description)`.\nconst BUILTIN_ALIASES: [(&str, &str, &str); 5] = [\n (\"b\", \"build\", \"alias: build\"),\n (\"c\", \"check\", \"alias: check\"),\n (\"d\", \"doc\", \"alias: doc\"),\n (\"r\", \"run\", \"alias: run\"),\n (\"t\", \"test\", \"alias: test\"),\n];\n\n\/\/\/ Function which contains the list of all of the builtin aliases and it's\n\/\/\/ corresponding execs represented as &str.\nfn builtin_aliases_execs(cmd: &str) -> Option<&(&str, &str, &str)> {\n BUILTIN_ALIASES.iter().find(|alias| alias.0 == cmd)\n}\n\nfn aliased_command(config: &Config, command: &str) -> CargoResult<Option<Vec<String>>> {\n let alias_name = format!(\"alias.{}\", command);\n let user_alias = match config.get_string(&alias_name) {\n Ok(Some(record)) => Some(\n record\n .val\n .split_whitespace()\n .map(|s| s.to_string())\n .collect(),\n ),\n Ok(None) => None,\n Err(_) => config.get::<Option<Vec<String>>>(&alias_name)?,\n };\n\n let result = user_alias.or_else(|| {\n builtin_aliases_execs(command).map(|command_str| vec![command_str.1.to_string()])\n });\n Ok(result)\n}\n\n\/\/\/ List all runnable commands\nfn list_commands(config: &Config) -> BTreeMap<String, CommandInfo> {\n let prefix = \"cargo-\";\n let suffix = env::consts::EXE_SUFFIX;\n let mut commands = BTreeMap::new();\n for dir in search_directories(config) {\n let entries = match fs::read_dir(dir) {\n Ok(entries) => entries,\n _ => continue,\n };\n for entry in entries.filter_map(|e| e.ok()) {\n let path = entry.path();\n let filename = match path.file_name().and_then(|s| s.to_str()) {\n Some(filename) => filename,\n _ => continue,\n };\n if !filename.starts_with(prefix) || !filename.ends_with(suffix) {\n continue;\n }\n if is_executable(entry.path()) {\n let end = filename.len() - suffix.len();\n commands.insert(\n filename[prefix.len()..end].to_string(),\n CommandInfo::External { path: path.clone() },\n );\n }\n }\n }\n\n for cmd in commands::builtin() {\n commands.insert(\n cmd.get_name().to_string(),\n CommandInfo::BuiltIn {\n about: cmd.get_about().map(|s| s.to_string()),\n },\n );\n }\n\n \/\/ Add the builtin_aliases and them descriptions to the\n \/\/ `commands` `BTreeMap`.\n for command in &BUILTIN_ALIASES {\n commands.insert(\n command.0.to_string(),\n CommandInfo::BuiltIn {\n about: Some(command.2.to_string()),\n },\n );\n }\n\n \/\/ Add the user-defined aliases\n if let Ok(aliases) = config.get::<BTreeMap<String, StringOrVec>>(\"alias\") {\n for (name, target) in aliases.iter() {\n commands.insert(\n name.to_string(),\n CommandInfo::Alias {\n target: target.clone(),\n },\n );\n }\n }\n\n \/\/ `help` is special, so it needs to be inserted separately.\n commands.insert(\n \"help\".to_string(),\n CommandInfo::BuiltIn {\n about: Some(\"Displays help for a cargo subcommand\".to_string()),\n },\n );\n\n commands\n}\n\nfn find_external_subcommand(config: &Config, cmd: &str) -> Option<PathBuf> {\n let command_exe = format!(\"cargo-{}{}\", cmd, env::consts::EXE_SUFFIX);\n search_directories(config)\n .iter()\n .map(|dir| dir.join(&command_exe))\n .find(|file| is_executable(file))\n}\n\nfn execute_external_subcommand(config: &Config, cmd: &str, args: &[&OsStr]) -> CliResult {\n let path = find_external_subcommand(config, cmd);\n let command = match path {\n Some(command) => command,\n None => {\n let err = if cmd.starts_with('+') {\n anyhow::format_err!(\n \"no such subcommand: `{}`\\n\\n\\t\\\n Cargo does not handle `+toolchain` directives.\\n\\t\\\n Did you mean to invoke `cargo` through `rustup` instead?\",\n cmd\n )\n } else {\n let suggestions = list_commands(config);\n let did_you_mean = closest_msg(cmd, suggestions.keys(), |c| c);\n\n anyhow::format_err!(\n \"no such subcommand: `{}`{}\\n\\n\\t\\\n View all installed commands with `cargo --list`\",\n cmd,\n did_you_mean\n )\n };\n\n return Err(CliError::new(err, 101));\n }\n };\n\n let cargo_exe = config.cargo_exe()?;\n let mut cmd = ProcessBuilder::new(&command);\n cmd.env(cargo::CARGO_ENV, cargo_exe).args(args);\n if let Some(client) = config.jobserver_from_env() {\n cmd.inherit_jobserver(client);\n }\n let err = match cmd.exec_replace() {\n Ok(()) => return Ok(()),\n Err(e) => e,\n };\n\n if let Some(perr) = err.downcast_ref::<ProcessError>() {\n if let Some(code) = perr.code {\n return Err(CliError::code(code));\n }\n }\n Err(CliError::new(err, 101))\n}\n\n#[cfg(unix)]\nfn is_executable<P: AsRef<Path>>(path: P) -> bool {\n use std::os::unix::prelude::*;\n fs::metadata(path)\n .map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0)\n .unwrap_or(false)\n}\n#[cfg(windows)]\nfn is_executable<P: AsRef<Path>>(path: P) -> bool {\n path.as_ref().is_file()\n}\n\nfn search_directories(config: &Config) -> Vec<PathBuf> {\n let mut path_dirs = if let Some(val) = env::var_os(\"PATH\") {\n env::split_paths(&val).collect()\n } else {\n vec![]\n };\n\n let home_bin = config.home().clone().into_path_unlocked().join(\"bin\");\n\n \/\/ If any of that PATH elements contains `home_bin`, do not\n \/\/ add it again. This is so that the users can control priority\n \/\/ of it using PATH, while preserving the historical\n \/\/ behavior of preferring it over system global directories even\n \/\/ when not in PATH at all.\n \/\/ See https:\/\/github.com\/rust-lang\/cargo\/issues\/11020 for details.\n \/\/\n \/\/ Note: `p == home_bin` will ignore trailing slash, but we don't\n \/\/ `canonicalize` the paths.\n if !path_dirs.iter().any(|p| p == &home_bin) {\n path_dirs.insert(0, home_bin);\n };\n\n path_dirs\n}\n\nfn init_git_transports(config: &Config) {\n \/\/ Only use a custom transport if any HTTP options are specified,\n \/\/ such as proxies or custom certificate authorities. The custom\n \/\/ transport, however, is not as well battle-tested.\n\n match cargo::ops::needs_custom_http_transport(config) {\n Ok(true) => {}\n _ => return,\n }\n\n let handle = match cargo::ops::http_handle(config) {\n Ok(handle) => handle,\n Err(..) => return,\n };\n\n \/\/ The unsafety of the registration function derives from two aspects:\n \/\/\n \/\/ 1. This call must be synchronized with all other registration calls as\n \/\/ well as construction of new transports.\n \/\/ 2. The argument is leaked.\n \/\/\n \/\/ We're clear on point (1) because this is only called at the start of this\n \/\/ binary (we know what the state of the world looks like) and we're mostly\n \/\/ clear on point (2) because we'd only free it after everything is done\n \/\/ anyway\n unsafe {\n git2_curl::register(handle);\n }\n\n \/\/ Disabling the owner validation in git can, in theory, lead to code execution\n \/\/ vulnerabilities. However, libgit2 does not launch executables, which is the foundation of\n \/\/ the original security issue. Meanwhile, issues with refusing to load git repos in\n \/\/ `CARGO_HOME` for example will likely be very frustrating for users. So, we disable the\n \/\/ validation.\n \/\/\n \/\/ For further discussion of Cargo's current interactions with git, see\n \/\/\n \/\/ https:\/\/github.com\/rust-lang\/rfcs\/pull\/3279\n \/\/\n \/\/ and in particular the subsection on \"Git support\".\n \/\/\n \/\/ Note that we only disable this when Cargo is run as a binary. If Cargo is used as a library,\n \/\/ this code won't be invoked. Instead, developers will need to explicitly disable the\n \/\/ validation in their code. This is inconvenient, but won't accidentally open consuming\n \/\/ applications up to security issues if they use git2 to open repositories elsewhere in their\n \/\/ code.\n unsafe {\n if git2::opts::set_verify_owner_validation(false).is_err() {\n return;\n }\n }\n}\n<commit_msg>doc: rustdoc for `aliased_command`<commit_after>#![warn(rust_2018_idioms)] \/\/ while we're getting used to 2018\n#![allow(clippy::all)]\n\nuse cargo::util::toml::StringOrVec;\nuse cargo::util::CliError;\nuse cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config};\nuse cargo_util::{ProcessBuilder, ProcessError};\nuse std::collections::BTreeMap;\nuse std::env;\nuse std::ffi::OsStr;\nuse std::fs;\nuse std::path::{Path, PathBuf};\n\nmod cli;\nmod commands;\n\nuse crate::command_prelude::*;\n\nfn main() {\n #[cfg(feature = \"pretty-env-logger\")]\n pretty_env_logger::init_custom_env(\"CARGO_LOG\");\n #[cfg(not(feature = \"pretty-env-logger\"))]\n env_logger::init_from_env(\"CARGO_LOG\");\n\n let mut config = cli::LazyConfig::new();\n\n let result = if let Some(lock_addr) = cargo::ops::fix_get_proxy_lock_addr() {\n cargo::ops::fix_exec_rustc(config.get(), &lock_addr).map_err(|e| CliError::from(e))\n } else {\n let _token = cargo::util::job::setup();\n cli::main(&mut config)\n };\n\n match result {\n Err(e) => cargo::exit_with_error(e, &mut config.get_mut().shell()),\n Ok(()) => {}\n }\n}\n\n\/\/\/ Table for defining the aliases which come builtin in `Cargo`.\n\/\/\/ The contents are structured as: `(alias, aliased_command, description)`.\nconst BUILTIN_ALIASES: [(&str, &str, &str); 5] = [\n (\"b\", \"build\", \"alias: build\"),\n (\"c\", \"check\", \"alias: check\"),\n (\"d\", \"doc\", \"alias: doc\"),\n (\"r\", \"run\", \"alias: run\"),\n (\"t\", \"test\", \"alias: test\"),\n];\n\n\/\/\/ Function which contains the list of all of the builtin aliases and it's\n\/\/\/ corresponding execs represented as &str.\nfn builtin_aliases_execs(cmd: &str) -> Option<&(&str, &str, &str)> {\n BUILTIN_ALIASES.iter().find(|alias| alias.0 == cmd)\n}\n\n\/\/\/ Resolve the aliased command from the [`Config`] with a given command string.\n\/\/\/\n\/\/\/ The search fallback chain is:\n\/\/\/\n\/\/\/ 1. Get the aliased command as a string.\n\/\/\/ 2. If an `Err` occurs (missing key, type mismatch, or any possible error),\n\/\/\/ try to get it as an array again.\n\/\/\/ 3. If still cannot find any, finds one insides [`BUILTIN_ALIASES`].\nfn aliased_command(config: &Config, command: &str) -> CargoResult<Option<Vec<String>>> {\n let alias_name = format!(\"alias.{}\", command);\n let user_alias = match config.get_string(&alias_name) {\n Ok(Some(record)) => Some(\n record\n .val\n .split_whitespace()\n .map(|s| s.to_string())\n .collect(),\n ),\n Ok(None) => None,\n Err(_) => config.get::<Option<Vec<String>>>(&alias_name)?,\n };\n\n let result = user_alias.or_else(|| {\n builtin_aliases_execs(command).map(|command_str| vec![command_str.1.to_string()])\n });\n Ok(result)\n}\n\n\/\/\/ List all runnable commands\nfn list_commands(config: &Config) -> BTreeMap<String, CommandInfo> {\n let prefix = \"cargo-\";\n let suffix = env::consts::EXE_SUFFIX;\n let mut commands = BTreeMap::new();\n for dir in search_directories(config) {\n let entries = match fs::read_dir(dir) {\n Ok(entries) => entries,\n _ => continue,\n };\n for entry in entries.filter_map(|e| e.ok()) {\n let path = entry.path();\n let filename = match path.file_name().and_then(|s| s.to_str()) {\n Some(filename) => filename,\n _ => continue,\n };\n if !filename.starts_with(prefix) || !filename.ends_with(suffix) {\n continue;\n }\n if is_executable(entry.path()) {\n let end = filename.len() - suffix.len();\n commands.insert(\n filename[prefix.len()..end].to_string(),\n CommandInfo::External { path: path.clone() },\n );\n }\n }\n }\n\n for cmd in commands::builtin() {\n commands.insert(\n cmd.get_name().to_string(),\n CommandInfo::BuiltIn {\n about: cmd.get_about().map(|s| s.to_string()),\n },\n );\n }\n\n \/\/ Add the builtin_aliases and them descriptions to the\n \/\/ `commands` `BTreeMap`.\n for command in &BUILTIN_ALIASES {\n commands.insert(\n command.0.to_string(),\n CommandInfo::BuiltIn {\n about: Some(command.2.to_string()),\n },\n );\n }\n\n \/\/ Add the user-defined aliases\n if let Ok(aliases) = config.get::<BTreeMap<String, StringOrVec>>(\"alias\") {\n for (name, target) in aliases.iter() {\n commands.insert(\n name.to_string(),\n CommandInfo::Alias {\n target: target.clone(),\n },\n );\n }\n }\n\n \/\/ `help` is special, so it needs to be inserted separately.\n commands.insert(\n \"help\".to_string(),\n CommandInfo::BuiltIn {\n about: Some(\"Displays help for a cargo subcommand\".to_string()),\n },\n );\n\n commands\n}\n\nfn find_external_subcommand(config: &Config, cmd: &str) -> Option<PathBuf> {\n let command_exe = format!(\"cargo-{}{}\", cmd, env::consts::EXE_SUFFIX);\n search_directories(config)\n .iter()\n .map(|dir| dir.join(&command_exe))\n .find(|file| is_executable(file))\n}\n\nfn execute_external_subcommand(config: &Config, cmd: &str, args: &[&OsStr]) -> CliResult {\n let path = find_external_subcommand(config, cmd);\n let command = match path {\n Some(command) => command,\n None => {\n let err = if cmd.starts_with('+') {\n anyhow::format_err!(\n \"no such subcommand: `{}`\\n\\n\\t\\\n Cargo does not handle `+toolchain` directives.\\n\\t\\\n Did you mean to invoke `cargo` through `rustup` instead?\",\n cmd\n )\n } else {\n let suggestions = list_commands(config);\n let did_you_mean = closest_msg(cmd, suggestions.keys(), |c| c);\n\n anyhow::format_err!(\n \"no such subcommand: `{}`{}\\n\\n\\t\\\n View all installed commands with `cargo --list`\",\n cmd,\n did_you_mean\n )\n };\n\n return Err(CliError::new(err, 101));\n }\n };\n\n let cargo_exe = config.cargo_exe()?;\n let mut cmd = ProcessBuilder::new(&command);\n cmd.env(cargo::CARGO_ENV, cargo_exe).args(args);\n if let Some(client) = config.jobserver_from_env() {\n cmd.inherit_jobserver(client);\n }\n let err = match cmd.exec_replace() {\n Ok(()) => return Ok(()),\n Err(e) => e,\n };\n\n if let Some(perr) = err.downcast_ref::<ProcessError>() {\n if let Some(code) = perr.code {\n return Err(CliError::code(code));\n }\n }\n Err(CliError::new(err, 101))\n}\n\n#[cfg(unix)]\nfn is_executable<P: AsRef<Path>>(path: P) -> bool {\n use std::os::unix::prelude::*;\n fs::metadata(path)\n .map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0)\n .unwrap_or(false)\n}\n#[cfg(windows)]\nfn is_executable<P: AsRef<Path>>(path: P) -> bool {\n path.as_ref().is_file()\n}\n\nfn search_directories(config: &Config) -> Vec<PathBuf> {\n let mut path_dirs = if let Some(val) = env::var_os(\"PATH\") {\n env::split_paths(&val).collect()\n } else {\n vec![]\n };\n\n let home_bin = config.home().clone().into_path_unlocked().join(\"bin\");\n\n \/\/ If any of that PATH elements contains `home_bin`, do not\n \/\/ add it again. This is so that the users can control priority\n \/\/ of it using PATH, while preserving the historical\n \/\/ behavior of preferring it over system global directories even\n \/\/ when not in PATH at all.\n \/\/ See https:\/\/github.com\/rust-lang\/cargo\/issues\/11020 for details.\n \/\/\n \/\/ Note: `p == home_bin` will ignore trailing slash, but we don't\n \/\/ `canonicalize` the paths.\n if !path_dirs.iter().any(|p| p == &home_bin) {\n path_dirs.insert(0, home_bin);\n };\n\n path_dirs\n}\n\nfn init_git_transports(config: &Config) {\n \/\/ Only use a custom transport if any HTTP options are specified,\n \/\/ such as proxies or custom certificate authorities. The custom\n \/\/ transport, however, is not as well battle-tested.\n\n match cargo::ops::needs_custom_http_transport(config) {\n Ok(true) => {}\n _ => return,\n }\n\n let handle = match cargo::ops::http_handle(config) {\n Ok(handle) => handle,\n Err(..) => return,\n };\n\n \/\/ The unsafety of the registration function derives from two aspects:\n \/\/\n \/\/ 1. This call must be synchronized with all other registration calls as\n \/\/ well as construction of new transports.\n \/\/ 2. The argument is leaked.\n \/\/\n \/\/ We're clear on point (1) because this is only called at the start of this\n \/\/ binary (we know what the state of the world looks like) and we're mostly\n \/\/ clear on point (2) because we'd only free it after everything is done\n \/\/ anyway\n unsafe {\n git2_curl::register(handle);\n }\n\n \/\/ Disabling the owner validation in git can, in theory, lead to code execution\n \/\/ vulnerabilities. However, libgit2 does not launch executables, which is the foundation of\n \/\/ the original security issue. Meanwhile, issues with refusing to load git repos in\n \/\/ `CARGO_HOME` for example will likely be very frustrating for users. So, we disable the\n \/\/ validation.\n \/\/\n \/\/ For further discussion of Cargo's current interactions with git, see\n \/\/\n \/\/ https:\/\/github.com\/rust-lang\/rfcs\/pull\/3279\n \/\/\n \/\/ and in particular the subsection on \"Git support\".\n \/\/\n \/\/ Note that we only disable this when Cargo is run as a binary. If Cargo is used as a library,\n \/\/ this code won't be invoked. Instead, developers will need to explicitly disable the\n \/\/ validation in their code. This is inconvenient, but won't accidentally open consuming\n \/\/ applications up to security issues if they use git2 to open repositories elsewhere in their\n \/\/ code.\n unsafe {\n if git2::opts::set_verify_owner_validation(false).is_err() {\n return;\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use common::memory;\n\nuse core::mem::size_of;\nuse core::u32;\n\nuse drivers::mmio::Mmio;\n\nuse super::fis::{FIS_TYPE_REG_H2D, FisRegH2D};\n\nconst ATA_CMD_READ_DMA_EXT: u8 = 0x25;\nconst ATA_DEV_BUSY: u8 = 0x80;\nconst ATA_DEV_DRQ: u8 = 0x08;\n\nconst HBA_PxCMD_CR: u32 = 1 << 15;\nconst HBA_PxCMD_FR: u32 = 1 << 14;\nconst HBA_PxCMD_FRE: u32 = 1 << 4;\nconst HBA_PxCMD_ST: u32 = 1;\nconst HBA_PxIS_TFES: u32 = 1 << 30;\nconst HBA_SSTS_PRESENT: u32 = 0x13;\nconst HBA_SIG_ATA: u32 = 0x00000101;\nconst HBA_SIG_ATAPI: u32 = 0xEB140101;\nconst HBA_SIG_PM: u32 = 0x96690101;\nconst HBA_SIG_SEMB: u32 = 0xC33C0101;\n\n#[derive(Debug)]\npub enum HbaPortType {\n None,\n Unknown(u32),\n SATA,\n SATAPI,\n PM,\n SEMB,\n}\n\n#[repr(packed)]\npub struct HbaPort {\n pub clb: Mmio<u64>, \/\/ 0x00, command list base address, 1K-byte aligned\n pub fb: Mmio<u64>, \/\/ 0x08, FIS base address, 256-byte aligned\n pub is: Mmio<u32>, \/\/ 0x10, interrupt status\n pub ie: Mmio<u32>, \/\/ 0x14, interrupt enable\n pub cmd: Mmio<u32>, \/\/ 0x18, command and status\n pub rsv0: Mmio<u32>, \/\/ 0x1C, Reserved\n pub tfd: Mmio<u32>, \/\/ 0x20, task file data\n pub sig: Mmio<u32>, \/\/ 0x24, signature\n pub ssts: Mmio<u32>, \/\/ 0x28, SATA status (SCR0:SStatus)\n pub sctl: Mmio<u32>, \/\/ 0x2C, SATA control (SCR2:SControl)\n pub serr: Mmio<u32>, \/\/ 0x30, SATA error (SCR1:SError)\n pub sact: Mmio<u32>, \/\/ 0x34, SATA active (SCR3:SActive)\n pub ci: Mmio<u32>, \/\/ 0x38, command issue\n pub sntf: Mmio<u32>, \/\/ 0x3C, SATA notification (SCR4:SNotification)\n pub fbs: Mmio<u32>, \/\/ 0x40, FIS-based switch control\n pub rsv1: [Mmio<u32>; 11], \/\/ 0x44 ~ 0x6F, Reserved\n pub vendor: [Mmio<u32>; 4] \/\/ 0x70 ~ 0x7F, vendor specific\n}\n\nimpl HbaPort {\n pub fn probe(&self) -> HbaPortType {\n if self.ssts.readf(HBA_SSTS_PRESENT) {\n let sig = self.sig.read();\n match sig {\n HBA_SIG_ATA => HbaPortType::SATA,\n HBA_SIG_ATAPI => HbaPortType::SATAPI,\n HBA_SIG_PM => HbaPortType::PM,\n HBA_SIG_SEMB => HbaPortType::SEMB,\n _ => HbaPortType::Unknown(sig)\n }\n } else {\n HbaPortType::None\n }\n }\n\n pub fn init(&mut self) {\n self.stop();\n\n debugln!(\"Port Command List\");\n let clb = unsafe { memory::alloc_aligned(size_of::<HbaCmdHeader>(), 1024) };\n self.clb.write(clb as u64);\n\n debugln!(\"Port FIS\");\n let fb = unsafe { memory::alloc_aligned(256, 256) };\n self.fb.write(fb as u64);\n\n for i in 0..32 {\n debugln!(\"Port Command Table {}\", i);\n let cmdheader = unsafe { &mut * (clb as *mut HbaCmdHeader).offset(i) };\n let ctba = unsafe { memory::alloc_aligned(256, 256) };\n cmdheader.ctba.write(ctba as u64);\n cmdheader.prdtl.write(8);\n }\n\n self.start();\n }\n\n pub fn start(&mut self) {\n debugln!(\"Starting port\");\n\n while self.cmd.readf(HBA_PxCMD_CR) {}\n\n self.cmd.writef(HBA_PxCMD_FRE, true);\n self.cmd.writef(HBA_PxCMD_ST, true);\n }\n\n pub fn stop(&mut self) {\n debugln!(\"Stopping port\");\n\n \tself.cmd.writef(HBA_PxCMD_ST, false);\n\n \twhile self.cmd.readf(HBA_PxCMD_FR | HBA_PxCMD_CR) {}\n\n \tself.cmd.writef(HBA_PxCMD_FRE, false);\n }\n\n pub fn slot(&self) -> Option<u32> {\n let slots = self.sact.read() | self.ci.read();\n for i in 0..32 {\n if slots & 1 << i == 0 {\n return Some(i);\n }\n }\n None\n }\n\n pub fn read(&mut self, lba: u64, buf: usize, len: usize) -> bool {\n let sectors = len\/512;\n let entries = 1;\n\n debugln!(\"LBA: {:X} BUF: {:X}, SECTORS: {}\", lba, buf, sectors);\n\n self.is.write(u32::MAX);\n\n if let Some(slot) = self.slot() {\n debugln!(\"Slot {}\", slot);\n\n let clb = self.clb.read() as usize;\n let cmdheader = unsafe { &mut * (clb as *mut HbaCmdHeader).offset(slot as isize) };\n\n cmdheader.cfl.write(((size_of::<FisRegH2D>()\/size_of::<u32>()) as u8) << 3);\n cmdheader.prdtl.write(entries);\n\n let ctba = cmdheader.ctba.read() as usize;\n unsafe { ::memset(ctba as *mut u8, 0, size_of::<HbaCmdTable>()) };\n let cmdtbl = unsafe { &mut * (ctba as *mut HbaCmdTable) };\n\n let prdt_entry = &mut cmdtbl.prdt_entry[0];\n prdt_entry.dba.write(buf as u64);\n prdt_entry.dbc.write(((sectors * 512) as u32) << 10 | 1);\n\n let cmdfis = unsafe { &mut * (cmdtbl.cfis.as_ptr() as *mut FisRegH2D) };\n\n cmdfis.fis_type.write(FIS_TYPE_REG_H2D);\n cmdfis.pm.writef(1, true);\n cmdfis.command.write(ATA_CMD_READ_DMA_EXT);\n\n cmdfis.lba0.write(lba as u8);\n cmdfis.lba1.write((lba >> 8) as u8);\n cmdfis.lba2.write((lba >> 16) as u8);\n\n cmdfis.device.write(1 << 6);\n\n cmdfis.lba3.write((lba >> 24) as u8);\n cmdfis.lba4.write((lba >> 32) as u8);\n cmdfis.lba5.write((lba >> 40) as u8);\n\n cmdfis.countl.write(sectors as u8);\n cmdfis.counth.write((sectors >> 8) as u8);\n\n debugln!(\"Busy Wait\");\n while self.tfd.readf((ATA_DEV_BUSY | ATA_DEV_DRQ) as u32) {}\n\n self.ci.write(1 << slot);\n\n debugln!(\"Completion Wait\");\n while self.ci.readf(1 << slot) {\n if self.is.readf(HBA_PxIS_TFES) {\n \t\t\tdebugln!(\"Read disk error\");\n \t\t\treturn false;\n \t\t}\n }\n\n debugln!(\"Return\");\n\n if self.is.readf(HBA_PxIS_TFES) {\n \t\t\tdebugln!(\"Read disk error\");\n \t\t\treturn false;\n \t\t}\n\n return true;\n }\n\n false\n }\n}\n\n#[repr(packed)]\npub struct HbaMem {\n pub cap: Mmio<u32>, \/\/ 0x00, Host capability\n pub ghc: Mmio<u32>, \/\/ 0x04, Global host control\n pub is: Mmio<u32>, \/\/ 0x08, Interrupt status\n pub pi: Mmio<u32>, \/\/ 0x0C, Port implemented\n pub vs: Mmio<u32>, \/\/ 0x10, Version\n pub ccc_ctl: Mmio<u32>, \/\/ 0x14, Command completion coalescing control\n pub ccc_pts: Mmio<u32>, \/\/ 0x18, Command completion coalescing ports\n pub em_loc: Mmio<u32>, \/\/ 0x1C, Enclosure management location\n pub em_ctl: Mmio<u32>, \/\/ 0x20, Enclosure management control\n pub cap2: Mmio<u32>, \/\/ 0x24, Host capabilities extended\n pub bohc: Mmio<u32>, \/\/ 0x28, BIOS\/OS handoff control and status\n pub rsv: [Mmio<u8>; 116], \/\/ 0x2C - 0x9F, Reserved\n pub vendor: [Mmio<u8>; 96], \/\/ 0xA0 - 0xFF, Vendor specific registers\n pub ports: [HbaPort; 32] \/\/ 0x100 - 0x10FF, Port control registers\n}\n\n#[repr(packed)]\nstruct HbaPrdtEntry {\n dba: Mmio<u64>,\t\t\/\/ Data base address\n rsv0: Mmio<u32>,\t\t\/\/ Reserved\n dbc: Mmio<u32>,\t\t\/\/ Byte count, 4M max, interrupt = 1\n}\n\n#[repr(packed)]\nstruct HbaCmdTable {\n\t\/\/ 0x00\n\tcfis: [Mmio<u8>; 64],\t\/\/ Command FIS\n\n\t\/\/ 0x40\n\tacmd: [Mmio<u8>; 16],\t\/\/ ATAPI command, 12 or 16 bytes\n\n\t\/\/ 0x50\n\trsv: [Mmio<u8>; 48],\t\/\/ Reserved\n\n\t\/\/ 0x80\n\tprdt_entry: [HbaPrdtEntry; 65536],\t\/\/ Physical region descriptor table entries, 0 ~ 65535\n}\n\n#[repr(packed)]\nstruct HbaCmdHeader {\n\t\/\/ DW0\n\tcfl: Mmio<u8>,\t\t \/\/ Command FIS length in DWORDS, 2 ~ 16, atapi: 4, write - host to device: 2, prefetchable: 1\n\tpm: Mmio<u8>,\t\t \/\/ Reset - 0x80, bist: 0x40, clear busy on ok: 0x20, port multiplier\n\n\tprdtl: Mmio<u16>,\t\t\/\/ Physical region descriptor table length in entries\n\n\t\/\/ DW1\n\tprdbc: Mmio<u32>,\t\t\/\/ Physical region descriptor byte count transferred\n\n\t\/\/ DW2, 3\n\tctba: Mmio<u64>,\t\t\/\/ Command table descriptor base address\n\n\t\/\/ DW4 - 7\n\trsv1: [Mmio<u32>; 4],\t\/\/ Reserved\n}\n<commit_msg>Hooray data!<commit_after>use common::memory;\n\nuse core::mem::size_of;\nuse core::u32;\n\nuse drivers::mmio::Mmio;\n\nuse super::fis::{FIS_TYPE_REG_H2D, FisRegH2D};\n\nconst ATA_CMD_READ_DMA_EXT: u8 = 0x25;\nconst ATA_DEV_BUSY: u8 = 0x80;\nconst ATA_DEV_DRQ: u8 = 0x08;\n\nconst HBA_PxCMD_CR: u32 = 1 << 15;\nconst HBA_PxCMD_FR: u32 = 1 << 14;\nconst HBA_PxCMD_FRE: u32 = 1 << 4;\nconst HBA_PxCMD_ST: u32 = 1;\nconst HBA_PxIS_TFES: u32 = 1 << 30;\nconst HBA_SSTS_PRESENT: u32 = 0x13;\nconst HBA_SIG_ATA: u32 = 0x00000101;\nconst HBA_SIG_ATAPI: u32 = 0xEB140101;\nconst HBA_SIG_PM: u32 = 0x96690101;\nconst HBA_SIG_SEMB: u32 = 0xC33C0101;\n\n#[derive(Debug)]\npub enum HbaPortType {\n None,\n Unknown(u32),\n SATA,\n SATAPI,\n PM,\n SEMB,\n}\n\n#[repr(packed)]\npub struct HbaPort {\n pub clb: Mmio<u64>, \/\/ 0x00, command list base address, 1K-byte aligned\n pub fb: Mmio<u64>, \/\/ 0x08, FIS base address, 256-byte aligned\n pub is: Mmio<u32>, \/\/ 0x10, interrupt status\n pub ie: Mmio<u32>, \/\/ 0x14, interrupt enable\n pub cmd: Mmio<u32>, \/\/ 0x18, command and status\n pub rsv0: Mmio<u32>, \/\/ 0x1C, Reserved\n pub tfd: Mmio<u32>, \/\/ 0x20, task file data\n pub sig: Mmio<u32>, \/\/ 0x24, signature\n pub ssts: Mmio<u32>, \/\/ 0x28, SATA status (SCR0:SStatus)\n pub sctl: Mmio<u32>, \/\/ 0x2C, SATA control (SCR2:SControl)\n pub serr: Mmio<u32>, \/\/ 0x30, SATA error (SCR1:SError)\n pub sact: Mmio<u32>, \/\/ 0x34, SATA active (SCR3:SActive)\n pub ci: Mmio<u32>, \/\/ 0x38, command issue\n pub sntf: Mmio<u32>, \/\/ 0x3C, SATA notification (SCR4:SNotification)\n pub fbs: Mmio<u32>, \/\/ 0x40, FIS-based switch control\n pub rsv1: [Mmio<u32>; 11], \/\/ 0x44 ~ 0x6F, Reserved\n pub vendor: [Mmio<u32>; 4] \/\/ 0x70 ~ 0x7F, vendor specific\n}\n\nimpl HbaPort {\n pub fn probe(&self) -> HbaPortType {\n if self.ssts.readf(HBA_SSTS_PRESENT) {\n let sig = self.sig.read();\n match sig {\n HBA_SIG_ATA => HbaPortType::SATA,\n HBA_SIG_ATAPI => HbaPortType::SATAPI,\n HBA_SIG_PM => HbaPortType::PM,\n HBA_SIG_SEMB => HbaPortType::SEMB,\n _ => HbaPortType::Unknown(sig)\n }\n } else {\n HbaPortType::None\n }\n }\n\n pub fn init(&mut self) {\n self.stop();\n\n debugln!(\"Port Command List\");\n let clb = unsafe { memory::alloc_aligned(size_of::<HbaCmdHeader>(), 1024) };\n self.clb.write(clb as u64);\n\n debugln!(\"Port FIS\");\n let fb = unsafe { memory::alloc_aligned(256, 256) };\n self.fb.write(fb as u64);\n\n for i in 0..32 {\n debugln!(\"Port Command Table {}\", i);\n let cmdheader = unsafe { &mut * (clb as *mut HbaCmdHeader).offset(i) };\n let ctba = unsafe { memory::alloc_aligned(256, 256) };\n cmdheader.ctba.write(ctba as u64);\n cmdheader.prdtl.write(8);\n }\n\n self.start();\n }\n\n pub fn start(&mut self) {\n debugln!(\"Starting port\");\n\n while self.cmd.readf(HBA_PxCMD_CR) {}\n\n self.cmd.writef(HBA_PxCMD_FRE, true);\n self.cmd.writef(HBA_PxCMD_ST, true);\n }\n\n pub fn stop(&mut self) {\n debugln!(\"Stopping port\");\n\n \tself.cmd.writef(HBA_PxCMD_ST, false);\n\n \twhile self.cmd.readf(HBA_PxCMD_FR | HBA_PxCMD_CR) {}\n\n \tself.cmd.writef(HBA_PxCMD_FRE, false);\n }\n\n pub fn slot(&self) -> Option<u32> {\n let slots = self.sact.read() | self.ci.read();\n for i in 0..32 {\n if slots & 1 << i == 0 {\n return Some(i);\n }\n }\n None\n }\n\n pub fn read(&mut self, lba: u64, buf: usize, len: usize) -> bool {\n let sectors = len\/512;\n let entries = 1;\n\n debugln!(\"LBA: {:X} BUF: {:X} SECTORS: {}\", lba, buf, sectors);\n\n self.is.write(u32::MAX);\n\n if let Some(slot) = self.slot() {\n debugln!(\"Slot {}\", slot);\n\n let clb = self.clb.read() as usize;\n let cmdheader = unsafe { &mut * (clb as *mut HbaCmdHeader).offset(slot as isize) };\n\n cmdheader.cfl.write(((size_of::<FisRegH2D>()\/size_of::<u32>()) as u8));\n cmdheader.prdtl.write(entries);\n\n let ctba = cmdheader.ctba.read() as usize;\n unsafe { ::memset(ctba as *mut u8, 0, size_of::<HbaCmdTable>()) };\n let cmdtbl = unsafe { &mut * (ctba as *mut HbaCmdTable) };\n\n let prdt_entry = &mut cmdtbl.prdt_entry[0];\n prdt_entry.dba.write(buf as u64);\n prdt_entry.dbc.write(((sectors * 512) as u32) | 1);\n\n let cmdfis = unsafe { &mut * (cmdtbl.cfis.as_ptr() as *mut FisRegH2D) };\n\n cmdfis.fis_type.write(FIS_TYPE_REG_H2D);\n cmdfis.pm.write(1 << 7);\n cmdfis.command.write(ATA_CMD_READ_DMA_EXT);\n\n cmdfis.lba0.write(lba as u8);\n cmdfis.lba1.write((lba >> 8) as u8);\n cmdfis.lba2.write((lba >> 16) as u8);\n\n cmdfis.device.write(1 << 6);\n\n cmdfis.lba3.write((lba >> 24) as u8);\n cmdfis.lba4.write((lba >> 32) as u8);\n cmdfis.lba5.write((lba >> 40) as u8);\n\n cmdfis.countl.write(sectors as u8);\n cmdfis.counth.write((sectors >> 8) as u8);\n\n debugln!(\"Busy Wait\");\n while self.tfd.readf((ATA_DEV_BUSY | ATA_DEV_DRQ) as u32) {}\n\n self.ci.write(1 << slot);\n\n debugln!(\"Completion Wait\");\n while self.ci.readf(1 << slot) {\n if self.is.readf(HBA_PxIS_TFES) {\n \t\t\tdebugln!(\"Read disk error\");\n \t\t\treturn false;\n \t\t}\n }\n\n debugln!(\"Return\");\n\n if self.is.readf(HBA_PxIS_TFES) {\n \t\t\tdebugln!(\"Read disk error\");\n \t\t\treturn false;\n \t\t}\n\n return true;\n }\n\n false\n }\n}\n\n#[repr(packed)]\npub struct HbaMem {\n pub cap: Mmio<u32>, \/\/ 0x00, Host capability\n pub ghc: Mmio<u32>, \/\/ 0x04, Global host control\n pub is: Mmio<u32>, \/\/ 0x08, Interrupt status\n pub pi: Mmio<u32>, \/\/ 0x0C, Port implemented\n pub vs: Mmio<u32>, \/\/ 0x10, Version\n pub ccc_ctl: Mmio<u32>, \/\/ 0x14, Command completion coalescing control\n pub ccc_pts: Mmio<u32>, \/\/ 0x18, Command completion coalescing ports\n pub em_loc: Mmio<u32>, \/\/ 0x1C, Enclosure management location\n pub em_ctl: Mmio<u32>, \/\/ 0x20, Enclosure management control\n pub cap2: Mmio<u32>, \/\/ 0x24, Host capabilities extended\n pub bohc: Mmio<u32>, \/\/ 0x28, BIOS\/OS handoff control and status\n pub rsv: [Mmio<u8>; 116], \/\/ 0x2C - 0x9F, Reserved\n pub vendor: [Mmio<u8>; 96], \/\/ 0xA0 - 0xFF, Vendor specific registers\n pub ports: [HbaPort; 32] \/\/ 0x100 - 0x10FF, Port control registers\n}\n\n#[repr(packed)]\nstruct HbaPrdtEntry {\n dba: Mmio<u64>,\t\t\/\/ Data base address\n rsv0: Mmio<u32>,\t\t\/\/ Reserved\n dbc: Mmio<u32>,\t\t\/\/ Byte count, 4M max, interrupt = 1\n}\n\n#[repr(packed)]\nstruct HbaCmdTable {\n\t\/\/ 0x00\n\tcfis: [Mmio<u8>; 64],\t\/\/ Command FIS\n\n\t\/\/ 0x40\n\tacmd: [Mmio<u8>; 16],\t\/\/ ATAPI command, 12 or 16 bytes\n\n\t\/\/ 0x50\n\trsv: [Mmio<u8>; 48],\t\/\/ Reserved\n\n\t\/\/ 0x80\n\tprdt_entry: [HbaPrdtEntry; 65536],\t\/\/ Physical region descriptor table entries, 0 ~ 65535\n}\n\n#[repr(packed)]\nstruct HbaCmdHeader {\n\t\/\/ DW0\n\tcfl: Mmio<u8>,\t\t \/\/ Command FIS length in DWORDS, 2 ~ 16, atapi: 4, write - host to device: 2, prefetchable: 1\n\tpm: Mmio<u8>,\t\t \/\/ Reset - 0x80, bist: 0x40, clear busy on ok: 0x20, port multiplier\n\n\tprdtl: Mmio<u16>,\t\t\/\/ Physical region descriptor table length in entries\n\n\t\/\/ DW1\n\tprdbc: Mmio<u32>,\t\t\/\/ Physical region descriptor byte count transferred\n\n\t\/\/ DW2, 3\n\tctba: Mmio<u64>,\t\t\/\/ Command table descriptor base address\n\n\t\/\/ DW4 - 7\n\trsv1: [Mmio<u32>; 4],\t\/\/ Reserved\n}\n<|endoftext|>"} {"text":"<commit_before>use std::collections::BTreeMap;\nuse std::ops::{Deref, DerefMut};\nuse std::io::BufRead;\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse uuid::Uuid;\n\nuse task_hookrs::task::Task as TTask;\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagstore::store::{FileLockEntry, Store};\nuse libimagstore::storeid::{IntoStoreId, StoreIdIterator, StoreId};\nuse module_path::ModuleEntryPath;\n\nuse error::{TodoError, TodoErrorKind, MapErrInto};\nuse result::Result;\n\n\/\/\/ Task struct containing a `FileLockEntry`\n#[derive(Debug)]\npub struct Task<'a>(FileLockEntry<'a>);\n\nimpl<'a> Task<'a> {\n\n \/\/\/ Concstructs a new `Task` with a `FileLockEntry`\n pub fn new(fle: FileLockEntry<'a>) -> Task<'a> {\n Task(fle)\n }\n\n pub fn import<R: BufRead>(store: &'a Store, mut r: R) -> Result<(Task<'a>, Uuid)> {\n let mut line = String::new();\n r.read_line(&mut line);\n import_task(&line.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| {\n let uuid = t.uuid().clone();\n t.into_task(store).map(|t| (t, uuid))\n })\n }\n\n \/\/\/ Get a task from an import string. That is: read the imported string, get the UUID from it\n \/\/\/ and try to load this UUID from store.\n \/\/\/\n \/\/\/ Possible return values are:\n \/\/\/\n \/\/\/ * Ok(Ok(Task))\n \/\/\/ * Ok(Err(String)) - where the String is the String read from the `r` parameter\n \/\/\/ * Err(_) - where the error is an error that happened during evaluation\n \/\/\/\n pub fn get_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<RResult<Task<'a>, String>>\n {\n unimplemented!()\n }\n\n \/\/\/ Get a task from a String. The String is expected to contain the JSON-representation of the\n \/\/\/ Task to get from the store (only the UUID really matters in this case)\n \/\/\/\n \/\/\/ For an explanation on the return values see `Task::get_from_import()`.\n pub fn get_from_string(store: &'a Store, s: String) -> Result<RResult<Task<'a>, String>> {\n unimplemented!()\n }\n\n \/\/\/ Get a task from an UUID.\n \/\/\/\n \/\/\/ If there is no task with this UUID, this returns `Ok(None)`.\n pub fn get_from_uuid(store: &'a Store, uuid: Uuid) -> Result<Option<Task<'a>>> {\n unimplemented!()\n }\n\n \/\/\/ Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to\n \/\/\/ implicitely create the task if it does not exist.\n pub fn retrieve_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<Task<'a>> {\n unimplemented!()\n }\n\n \/\/\/ Retrieve a task from a String. The String is expected to contain the JSON-representation of\n \/\/\/ the Task to retrieve from the store (only the UUID really matters in this case)\n pub fn retrieve_from_string(store: &'a Store, s: String) -> Result<Task<'a>> {\n unimplemented!()\n }\n\n pub fn delete_by_uuid(store: &Store, uuid: Uuid) -> Result<()> {\n store.delete(ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid())\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all_as_ids(store: &Store) -> Result<StoreIdIterator> {\n store.retrieve_for_module(\"todo\/taskwarrior\")\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all(store: &Store) -> Result<TaskIterator> {\n Task::all_as_ids(store)\n .map(|iter| TaskIterator::new(store, iter))\n }\n\n}\n\nimpl<'a> Deref for Task<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Task<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n\n\/\/\/ A trait to get a `libimagtodo::task::Task` out of the implementing object.\n\/\/\/ This Task struct is merely a wrapper for a `FileLockEntry`, therefore the function name\n\/\/\/ `into_filelockentry`.\npub trait IntoTask<'a> {\n\n \/\/\/ # Usage\n \/\/\/ ```ignore\n \/\/\/ use std::io::stdin;\n \/\/\/\n \/\/\/ use task_hookrs::task::Task;\n \/\/\/ use task_hookrs::import::import;\n \/\/\/ use libimagstore::store::{Store, FileLockEntry};\n \/\/\/\n \/\/\/ if let Ok(task_hookrs_task) = import(stdin()) {\n \/\/\/ \/\/ Store is given at runtime\n \/\/\/ let task = task_hookrs_task.into_filelockentry(store);\n \/\/\/ println!(\"Task with uuid: {}\", task.flentry.get_header().get(\"todo.uuid\"));\n \/\/\/ }\n \/\/\/ ```\n fn into_task(self, store : &'a Store) -> Result<Task<'a>>;\n\n}\n\nimpl<'a> IntoTask<'a> for TTask {\n\n fn into_task(self, store : &'a Store) -> Result<Task<'a>> {\n let uuid = self.uuid();\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n match store.retrieve(store_id) {\n Err(e) => return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(mut fle) => {\n {\n let mut header = fle.get_header_mut();\n match header.read(\"todo\") {\n Ok(None) => {\n if let Err(e) = header.set(\"todo\", Value::Table(BTreeMap::new())) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n Ok(Some(_)) => { }\n Err(e) => {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n if let Err(e) = header.set(\"todo.uuid\", Value::String(format!(\"{}\",uuid))) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n \/\/ If none of the errors above have returned the function, everything is fine\n Ok(Task::new(fle))\n }\n }\n }\n\n}\n\ntrait FromStoreId {\n fn from_storeid<'a>(&'a Store, StoreId) -> Result<Task<'a>>;\n}\n\nimpl<'a> FromStoreId for Task<'a> {\n\n fn from_storeid<'b>(store: &'b Store, id: StoreId) -> Result<Task<'b>> {\n match store.retrieve(id) {\n Err(e) => Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(c) => Ok(Task::new( c )),\n }\n }\n}\n\npub struct TaskIterator<'a> {\n store: &'a Store,\n iditer: StoreIdIterator,\n}\n\nimpl<'a> TaskIterator<'a> {\n\n pub fn new(store: &'a Store, iditer: StoreIdIterator) -> TaskIterator<'a> {\n TaskIterator {\n store: store,\n iditer: iditer,\n }\n }\n\n}\n\nimpl<'a> Iterator for TaskIterator<'a> {\n type Item = Result<Task<'a>>;\n\n fn next(&mut self) -> Option<Result<Task<'a>>> {\n self.iditer.next().map(|id| Task::from_storeid(self.store, id))\n }\n}\n\n<commit_msg>Impl Task::get_from_import()<commit_after>use std::collections::BTreeMap;\nuse std::ops::{Deref, DerefMut};\nuse std::io::BufRead;\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse uuid::Uuid;\n\nuse task_hookrs::task::Task as TTask;\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagstore::store::{FileLockEntry, Store};\nuse libimagstore::storeid::{IntoStoreId, StoreIdIterator, StoreId};\nuse module_path::ModuleEntryPath;\n\nuse error::{TodoError, TodoErrorKind, MapErrInto};\nuse result::Result;\n\n\/\/\/ Task struct containing a `FileLockEntry`\n#[derive(Debug)]\npub struct Task<'a>(FileLockEntry<'a>);\n\nimpl<'a> Task<'a> {\n\n \/\/\/ Concstructs a new `Task` with a `FileLockEntry`\n pub fn new(fle: FileLockEntry<'a>) -> Task<'a> {\n Task(fle)\n }\n\n pub fn import<R: BufRead>(store: &'a Store, mut r: R) -> Result<(Task<'a>, Uuid)> {\n let mut line = String::new();\n r.read_line(&mut line);\n import_task(&line.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| {\n let uuid = t.uuid().clone();\n t.into_task(store).map(|t| (t, uuid))\n })\n }\n\n \/\/\/ Get a task from an import string. That is: read the imported string, get the UUID from it\n \/\/\/ and try to load this UUID from store.\n \/\/\/\n \/\/\/ Possible return values are:\n \/\/\/\n \/\/\/ * Ok(Ok(Task))\n \/\/\/ * Ok(Err(String)) - where the String is the String read from the `r` parameter\n \/\/\/ * Err(_) - where the error is an error that happened during evaluation\n \/\/\/\n pub fn get_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<RResult<Task<'a>, String>>\n {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::get_from_string(store, line)\n }\n\n \/\/\/ Get a task from a String. The String is expected to contain the JSON-representation of the\n \/\/\/ Task to get from the store (only the UUID really matters in this case)\n \/\/\/\n \/\/\/ For an explanation on the return values see `Task::get_from_import()`.\n pub fn get_from_string(store: &'a Store, s: String) -> Result<RResult<Task<'a>, String>> {\n unimplemented!()\n }\n\n \/\/\/ Get a task from an UUID.\n \/\/\/\n \/\/\/ If there is no task with this UUID, this returns `Ok(None)`.\n pub fn get_from_uuid(store: &'a Store, uuid: Uuid) -> Result<Option<Task<'a>>> {\n unimplemented!()\n }\n\n \/\/\/ Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to\n \/\/\/ implicitely create the task if it does not exist.\n pub fn retrieve_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<Task<'a>> {\n unimplemented!()\n }\n\n \/\/\/ Retrieve a task from a String. The String is expected to contain the JSON-representation of\n \/\/\/ the Task to retrieve from the store (only the UUID really matters in this case)\n pub fn retrieve_from_string(store: &'a Store, s: String) -> Result<Task<'a>> {\n unimplemented!()\n }\n\n pub fn delete_by_uuid(store: &Store, uuid: Uuid) -> Result<()> {\n store.delete(ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid())\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all_as_ids(store: &Store) -> Result<StoreIdIterator> {\n store.retrieve_for_module(\"todo\/taskwarrior\")\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all(store: &Store) -> Result<TaskIterator> {\n Task::all_as_ids(store)\n .map(|iter| TaskIterator::new(store, iter))\n }\n\n}\n\nimpl<'a> Deref for Task<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Task<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n\n\/\/\/ A trait to get a `libimagtodo::task::Task` out of the implementing object.\n\/\/\/ This Task struct is merely a wrapper for a `FileLockEntry`, therefore the function name\n\/\/\/ `into_filelockentry`.\npub trait IntoTask<'a> {\n\n \/\/\/ # Usage\n \/\/\/ ```ignore\n \/\/\/ use std::io::stdin;\n \/\/\/\n \/\/\/ use task_hookrs::task::Task;\n \/\/\/ use task_hookrs::import::import;\n \/\/\/ use libimagstore::store::{Store, FileLockEntry};\n \/\/\/\n \/\/\/ if let Ok(task_hookrs_task) = import(stdin()) {\n \/\/\/ \/\/ Store is given at runtime\n \/\/\/ let task = task_hookrs_task.into_filelockentry(store);\n \/\/\/ println!(\"Task with uuid: {}\", task.flentry.get_header().get(\"todo.uuid\"));\n \/\/\/ }\n \/\/\/ ```\n fn into_task(self, store : &'a Store) -> Result<Task<'a>>;\n\n}\n\nimpl<'a> IntoTask<'a> for TTask {\n\n fn into_task(self, store : &'a Store) -> Result<Task<'a>> {\n let uuid = self.uuid();\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n match store.retrieve(store_id) {\n Err(e) => return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(mut fle) => {\n {\n let mut header = fle.get_header_mut();\n match header.read(\"todo\") {\n Ok(None) => {\n if let Err(e) = header.set(\"todo\", Value::Table(BTreeMap::new())) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n Ok(Some(_)) => { }\n Err(e) => {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n if let Err(e) = header.set(\"todo.uuid\", Value::String(format!(\"{}\",uuid))) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n \/\/ If none of the errors above have returned the function, everything is fine\n Ok(Task::new(fle))\n }\n }\n }\n\n}\n\ntrait FromStoreId {\n fn from_storeid<'a>(&'a Store, StoreId) -> Result<Task<'a>>;\n}\n\nimpl<'a> FromStoreId for Task<'a> {\n\n fn from_storeid<'b>(store: &'b Store, id: StoreId) -> Result<Task<'b>> {\n match store.retrieve(id) {\n Err(e) => Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(c) => Ok(Task::new( c )),\n }\n }\n}\n\npub struct TaskIterator<'a> {\n store: &'a Store,\n iditer: StoreIdIterator,\n}\n\nimpl<'a> TaskIterator<'a> {\n\n pub fn new(store: &'a Store, iditer: StoreIdIterator) -> TaskIterator<'a> {\n TaskIterator {\n store: store,\n iditer: iditer,\n }\n }\n\n}\n\nimpl<'a> Iterator for TaskIterator<'a> {\n type Item = Result<Task<'a>>;\n\n fn next(&mut self) -> Option<Result<Task<'a>>> {\n self.iditer.next().map(|id| Task::from_storeid(self.store, id))\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create calc.rs<commit_after>enum OpType{\n Add, \/\/ +\n Sub, \/\/ -\n Mul, \/\/ *\n Div, \/\/ \/\n Modulus, \/\/ %\n Pow, \/\/ ^\n}\n\nenum Element{\n Operator(OpType),\n Value(f32),\n Variable(String)\n}\n\nuse stack::List;\nuse calc::OpType::*;\nuse calc::Element::*;\nuse std::str;\n\nfn get_op_prioroty(op_ch: &char) -> i32{\n match *op_ch{\n '(' => 0,\n ')' => 1,\n '+' | '-' => 2,\n '*' | '\/' | '%' => 3,\n '^' => 4,\n _ => -1\n }\n}\n\nfn read_char(option: Option<char>) -> char{\n match option{\n Some(x) => x,\n None => panic!(\"Ohh, we got end of stack!\")\n }\n}\n\nfn get_polen_notation(expression: &String) -> Result<String, String>{\n let mut result = String::new();\n let mut stack = String::new();\n\n let mut operand = false;\n let mut prev_space = false;\n\n 'process: for c in expression.chars(){\n\n if prev_space && c != ' ' {\n result.push(' ');\n prev_space = false;\n }\n\n match c{\n '0'...'9' | '.' =>{\n result.push(c);\n operand = true;\n },\n '(' => {\n stack.push(c);},\n ')' => {\n loop{\n if(stack.is_empty()){\n return Err(\"Can't find closing parenthesis or unexpected delimiter.\".to_string());\n }\n\n let stack_char = read_char(stack.pop());\n\n if(stack_char != '('){\n result.push(stack_char);\n }else{\n break;\n }\n }\n },\n ' ' => {\n prev_space = true;\n },\n _ => {\n\n if !operand && c == '-' {\n operand = true;\n result.push(c);\n\n continue 'process;\n }\n\n let prior = get_op_prioroty(&c);\n if(prior != -1){\n operand = false;\n loop{\n if !stack.is_empty() {\n \/\/get last operator priority\n let stack_operator = &(stack.as_bytes()[stack.len() - 1] as char);\n if prior <= get_op_prioroty(stack_operator) {\n stack.pop();\n result.push(*stack_operator);\n }else{break;}\n }else{ \/\/return Err(\"Stack is empty\".to_string());\n break;\n }\n }\n stack.push(c);\n prev_space = true;\n }else{\n return Err(format!(\"Unexpected char: <{}>, expected operator\", c));\n }\n }\n }\n }\n\n while !stack.is_empty(){\n result.push(read_char(stack.pop()));\n }\n\n return Ok(result);\n}\n\nfn read_f32(option: Option<f32>) -> f32{\n match option{\n Some(x) => x,\n None => panic!(\"Ohh, we got end of stack!\")\n }\n}\n\nfn calc_expression(expr_stack: List<Element>) -> f32{\n let mut val_stack: List<f32> = List::new();\n\n for x in expr_stack.iter(){\n match x{\n &Operator(ref op) => {\n let val2 = read_f32(val_stack.pop());\n let val1 = read_f32(val_stack.pop());\n\n val_stack.push( match *op {\n Add => val1 + val2,\n Sub => val1 - val2,\n Mul => val1 * val2,\n Div => val1 \/ val2,\n Modulus => val1 % val2,\n Pow => val1.powf(val2)\n });\n },\n\n &Value(ref val) => {\n val_stack.push(*val);\n },\n\n &Variable(ref var) => {\n unimplemented!()\n }\n }\n }\n\n return read_f32(val_stack.pop());\n}\n\nfn get_f32_from_string(val: &String) -> f32{\n match val.parse::<f32>(){\n Ok(x) => x,\n Err(_) => 0.0\n }\n}\n\nfn get_element_by_string(token: &String) -> Element{\n match (token.as_bytes()[0] as char){\n '+' => Operator(Add),\n '-' => if token.len() > 1 {\n Value(get_f32_from_string(token))\n } else {\n Operator(Sub)\n },\n '*' => Operator(Mul),\n '\/' => Operator(Div),\n '%' => Operator(Modulus),\n '^' => Operator(Pow),\n _ => {\n Value(get_f32_from_string(token))\n }\n }\n}\n\n\nfn print_stack(stack: &List<Element>){\n for x in stack.iter(){\n print!(\"{} \",\n match *x{\n Operator(Add) => \"+\".to_string(),\n Operator(Sub) => \"-\".to_string(),\n Operator(Mul) => \"*\".to_string(),\n Operator(Div) => \"\/\".to_string(),\n Operator(Modulus) => \"%\".to_string(),\n Operator(Pow) => \"^\".to_string(),\n Value(val) => val.to_string(),\n Variable(_) => \"ITS VAR\".to_string()\n });\n }\n print!(\"\/n\");\n}\n\nfn string_to_list(str_expr: &String) -> List<Element>{\n let mut tmp: List<Element> = List::new();\n let mut token: String = String::new();\n\n let mut operand = false;\n\n 'tokenize: for ch in str_expr.chars(){\n match ch{\n '0'...'9' | '.' => {\n token.push(ch);\n operand = true;\n },\n '+'|'-'|'*'|'\/'|'%'|'^' => {\n\n if !operand && ch == '-'{\n token.push(ch);\n operand = true;\n\n continue 'tokenize;\n }\n\n if !token.is_empty(){\n tmp.push(get_element_by_string( &(token) ));\n token.clear();\n }\n\n token.push(ch);\n\n tmp.push(get_element_by_string( &(token) ));\n token.clear();\n\n operand = false;\n },\n ' ' => if !token.is_empty(){\n operand = false;\n tmp.push(get_element_by_string( &(token) ));\n token.clear();\n },\n _ => {println!(\"unexpected situation\")}\n }\n }\n\n print_stack(&tmp);\n\n let mut stack: List<Element> = List::new();\n\n while let Some(el) = tmp.pop(){\n stack.push(el);\n }\n\n return stack;\n}\n\npub fn calc(infix_str: &String) -> Result<f32, String>{\n let stack2;\n match get_polen_notation(infix_str) {\n Ok(x) => {\n stack2 = string_to_list(&x);\n Ok(calc_expression(stack2))\n },\n Err(x) => Err(x)\n }\n}\n\n#[cfg(test)]\nmod calc_test {\n use super::calc;\n\n #[test]\n fn calc_basics() {\n\n let tests_result_4 = vec![\n \"2+2\",\n \"2 +2\",\n \"2 + 2\",\n \" 2 + 2 \",\n \"(2+2)\",\n \"( 2+2)\",\n \" ( 2 +2)\",\n \"(2+ 2)\",\n \"(2+ 2 )\",\n \"(2+ 2 ) \",\n \"(((2+2)))\",\n \"(((2)) + 2)\",\n \"4-0\",\n \"8\/2\",\n \"8*0.5\",\n \"9.5 - 5.5\",\n \"9 % 5\",\n \"-4 + 8\",\n \"8 + -4\",\n \"-2 + 6\"\n ];\n\n for test in tests_result_4{\n match calc(&test.to_string()) {\n Ok(x) => assert_eq!(x, 4.0),\n Err(x) => {\n println!(\"FAILED TEST: {}, error: {} \\n\", test, x);\n assert!(false);\n }\n }\n }\n\n }\n\n #[test]\n fn calc_long_expression(){\n let tests_result_2648 = vec![\n \"55 * (3552 \/ 74) + 2^3\",\n \"55 * (3552 \/ (37 * 2)) + 2^3\",\n \"(54 + 1) * (3552 \/ (37 * 2)) + 2^3\",\n \"55 * (3552 \/ (147 % 74 + 1)) + 2^3\",\n \"55 * (3552 \/ 74) + 2^(2+1)\",\n \"55 * (3552 \/ 74) + 2^(4-1)\",\n \"55 * (3552 \/ 74) + 2^(0.5 * 6)\",\n \"55 * (3552 \/ 74) + 2^(6 * 0.5)\",\n \"2^3 + 55 \/ (1\/(3552 * (1\/74)))\",\n ];\n\n for test in tests_result_2648{\n match calc(&test.to_string()) {\n Ok(x) => assert_eq!(x, 2648.0),\n Err(x) => {\n println!(\"FAILED TEST: {}, error: {} \\n\", test, x);\n assert!(false);\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix comment for rustc 1.26<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove ununsed sourcefile<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement state block parsing.<commit_after>enum Error {\n WrongPassword,\n UnknownChecksumAlgorithm,\n UnknownCompressionAlgorithm,\n InvalidChecksum,\n Disk(disk::Error),\n}\n\nenum ChecksumAlgorithm {\n Constant,\n SeaHash,\n}\n\nenum CompressionAlgorithm {\n Identity,\n Lz4,\n}\n\nstruct StateBlock {\n checksum_algorithm: ChecksumAlgorithm,\n compression_algorithm: CompressionAlgorithm,\n freelist_head: clusters::Pointer,\n super_page: clusters::Pointer,\n}\n\nimpl StateBlock {\n fn new(buf: &[u8]) -> Result<(), Error> {\n if &buf[..4] != &[0, 0, 0, 0] {\n return Err(Error::WrongPassword);\n }\n\n let checksum_algorithm = match LittleEndian::read(buf[16..18]) {\n 0 => ChecksumAlgorithm::Constant,\n 1 => ChecksumAlgorithm::SeaHash,\n _ => return Err(Error::UnknownChecksumAlgorithm),\n };\n\n if checksum_algorithm.hash(&buf[..40]) != LittleEndian::read(&buf[40..44]) {\n return Err(Error::InvalidChecksum);\n }\n\n StateBlock {\n checksum_algorithm: checksum_algorithm,\n compression_algorithm: match LittleEndian::read(buf[18..20]) {\n 0 => CompressionAlgorithm::Identity,\n 1 => CompressionAlgorithm::Lz4,\n _ => return Err(Error::UnknownCompressionAlgorithm),\n },\n freelist_head: LittleEndian::read(buf[32..40]),\n super_page: LittleEndian::read(buf[40..48]),\n }\n\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix function types in ast.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unused functions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Handle if command has already config arg<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added test for name mapping<commit_after>extern crate sourcemap;\n\nuse sourcemap::SourceMap;\n\n\n#[test]\nfn test_basic_name_mapping() {\n let input = r#\"{\"version\":3,\"file\":\"test.min.js\",\"sources\":[\"test.js\"],\"names\":[\"makeAFailure\",\"testingStuff\",\"Error\",\"onSuccess\",\"data\",\"onFailure\",\"invoke\",\"cb\",\"failed\",\"test\",\"value\"],\"mappings\":\"AAAA,GAAIA,cAAe,WACjB,QAASC,KACP,GAAIA,GAAe,EACnB,MAAM,IAAIC,OAAMD,GAGlB,QAASE,GAAUC,GACjBH,IAGF,QAASI,GAAUD,GACjB,KAAM,IAAIF,OAAM,WAGlB,QAASI,GAAOF,GACd,GAAIG,GAAK,IACT,IAAIH,EAAKI,OAAQ,CACfD,EAAKF,MACA,CACLE,EAAKJ,EAEPI,EAAGH,GAGL,QAASK,KACP,GAAIL,IAAQI,OAAQ,KAAME,MAAO,GACjCJ,GAAOF,GAGT,MAAOK\",\"sourcesContent\":[\"var makeAFailure = (function() {\\n function testingStuff() {\\n var testingStuff = 42;\\n throw new Error(testingStuff);\\n }\\n\\n function onSuccess(data) {\\n testingStuff();\\n }\\n\\n function onFailure(data) {\\n throw new Error('failed!');\\n }\\n\\n function invoke(data) {\\n var cb = null;\\n if (data.failed) {\\n cb = onFailure;\\n } else {\\n cb = onSuccess;\\n }\\n cb(data);\\n }\\n\\n function test() {\\n var data = {failed: true, value: 42};\\n invoke(data);\\n }\\n\\n return test;\\n})();\\n\"]}\"#.as_bytes();\n let minified_file = r#\"var makeAFailure=function(){function n(){var n=42;throw new Error(n)}function r(r){n()}function e(n){throw new Error(\"failed!\")}function i(n){var i=null;if(n.failed){i=e}else{i=r}i(n)}function u(){var n={failed:true,value:42};i(n)}return u}();\"#;\n let sm = SourceMap::from_reader(input).unwrap();\n\n let tok = sm.lookup_token(0, 45).unwrap();\n assert_eq!(tok.get_name(), Some(\"testingStuff\"));\n assert_eq!(tok.get_minified_name(minified_file), Some(\"n\"));\n\n let tok = sm.lookup_token(0, 66).unwrap();\n assert_eq!(tok.get_name(), Some(\"testingStuff\"));\n assert_eq!(tok.get_minified_name(minified_file), Some(\"n\"));\n\n let tok = sm.lookup_token(0, 96).unwrap();\n assert_eq!(tok.get_name(), Some(\"onFailure\"));\n assert_eq!(tok.get_minified_name(minified_file), Some(\"e\"));\n\n let name = sm.map_minified_name(0, 107, \"e\", minified_file);\n assert_eq!(name, Some(\"onFailure\"));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>examples: add npm_bar example<commit_after>extern crate pbr;\nuse pbr::ProgressBar;\nuse std::thread;\nuse std::time::Duration;\n\nfn main() {\n let count = 30;\n let mut pb = ProgressBar::new(count * 10);\n pb.tick_format(\"\\\\|\/-\");\n pb.format(\"|#--|\");\n pb.show_tick = true;\n pb.show_speed = false;\n pb.show_percent = false;\n pb.show_counter = false;\n pb.show_time_left = false;\n pb.inc();\n for _ in 0..count {\n for _ in 0..10 {\n pb.message(\"normalize -> thing \");\n thread::sleep(Duration::from_millis(80));\n pb.tick();\n }\n for _ in 0..10 {\n pb.message(\"fuzz -> tree \");\n thread::sleep(Duration::from_millis(80));\n pb.inc();\n }\n }\n pb.finish_println(\"done!\");\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::path::Path;\nuse std::process::Command;\nuse std::fs;\n\nuse build_helper::output;\nuse cmake;\n\nuse build::Build;\nuse build::util::{exe, staticlib};\n\npub fn llvm(build: &Build, target: &str) {\n \/\/ If we're using a custom LLVM bail out here, but we can only use a\n \/\/ custom LLVM for the build triple.\n if let Some(config) = build.config.target_config.get(target) {\n if let Some(ref s) = config.llvm_config {\n return check_llvm_version(build, s);\n }\n }\n\n \/\/ If the cleaning trigger is newer than our built artifacts (or if the\n \/\/ artifacts are missing) then we keep going, otherwise we bail out.\n let dst = build.llvm_out(target);\n let stamp = build.src.join(\"src\/rustllvm\/llvm-auto-clean-trigger\");\n let llvm_config = dst.join(\"bin\").join(exe(\"llvm-config\", target));\n build.clear_if_dirty(&dst, &stamp);\n if fs::metadata(llvm_config).is_ok() {\n return\n }\n\n let _ = fs::remove_dir_all(&dst.join(\"build\"));\n t!(fs::create_dir_all(&dst.join(\"build\")));\n let assertions = if build.config.llvm_assertions {\"ON\"} else {\"OFF\"};\n\n \/\/ http:\/\/llvm.org\/docs\/CMake.html\n let mut cfg = cmake::Config::new(build.src.join(\"src\/llvm\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(if build.config.llvm_optimize {\"Release\"} else {\"Debug\"})\n .define(\"LLVM_ENABLE_ASSERTIONS\", assertions)\n .define(\"LLVM_TARGETS_TO_BUILD\", \"X86;ARM;AArch64;Mips;PowerPC\")\n .define(\"LLVM_INCLUDE_EXAMPLES\", \"OFF\")\n .define(\"LLVM_INCLUDE_TESTS\", \"OFF\")\n .define(\"LLVM_INCLUDE_DOCS\", \"OFF\")\n .define(\"LLVM_ENABLE_ZLIB\", \"OFF\")\n .define(\"WITH_POLLY\", \"OFF\")\n .define(\"LLVM_ENABLE_TERMINFO\", \"OFF\")\n .define(\"LLVM_ENABLE_LIBEDIT\", \"OFF\")\n .define(\"LLVM_PARALLEL_COMPILE_JOBS\", build.jobs().to_string());\n\n if target.starts_with(\"i686\") {\n cfg.define(\"LLVM_BUILD_32_BITS\", \"ON\");\n }\n\n \/\/ http:\/\/llvm.org\/docs\/HowToCrossCompileLLVM.html\n if target != build.config.build {\n \/\/ FIXME: if the llvm root for the build triple is overridden then we\n \/\/ should use llvm-tblgen from there, also should verify that it\n \/\/ actually exists most of the time in normal installs of LLVM.\n let host = build.llvm_out(&build.config.build).join(\"bin\/llvm-tblgen\");\n cfg.define(\"CMAKE_CROSSCOMPILING\", \"True\")\n .define(\"LLVM_TARGET_ARCH\", target.split('-').next().unwrap())\n .define(\"LLVM_TABLEGEN\", &host)\n .define(\"LLVM_DEFAULT_TARGET_TRIPLE\", target);\n }\n\n \/\/ MSVC handles compiler business itself\n if !target.contains(\"msvc\") {\n if build.config.ccache {\n cfg.define(\"CMAKE_C_COMPILER\", \"ccache\")\n .define(\"CMAKE_C_COMPILER_ARG1\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", \"ccache\")\n .define(\"CMAKE_CXX_COMPILER_ARG1\", build.cxx(target));\n } else {\n cfg.define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cxx(target));\n }\n cfg.build_arg(\"-j\").build_arg(build.jobs().to_string());\n }\n\n \/\/ FIXME: we don't actually need to build all LLVM tools and all LLVM\n \/\/ libraries here, e.g. we just want a few components and a few\n \/\/ tools. Figure out how to filter them down and only build the right\n \/\/ tools and libs on all platforms.\n cfg.build();\n}\n\nfn check_llvm_version(build: &Build, llvm_config: &Path) {\n if !build.config.llvm_version_check {\n return\n }\n\n let mut cmd = Command::new(llvm_config);\n let version = output(cmd.arg(\"--version\"));\n if version.starts_with(\"3.5\") || version.starts_with(\"3.6\") ||\n version.starts_with(\"3.7\") {\n return\n }\n panic!(\"\\n\\nbad LLVM version: {}, need >=3.5\\n\\n\", version)\n}\n\npub fn compiler_rt(build: &Build, target: &str) {\n let dst = build.compiler_rt_out(target);\n let arch = target.split('-').next().unwrap();\n let mode = if build.config.rust_optimize {\"Release\"} else {\"Debug\"};\n let (dir, build_target, libname) = if target.contains(\"linux\") ||\n target.contains(\"freebsd\") {\n let os = if target.contains(\"android\") {\"-android\"} else {\"\"};\n let arch = if arch.starts_with(\"arm\") && target.contains(\"eabihf\") {\n \"armhf\"\n } else {\n arch\n };\n let target = format!(\"clang_rt.builtins-{}{}\", arch, os);\n (\"linux\".to_string(), target.clone(), target)\n } else if target.contains(\"darwin\") {\n let target = format!(\"clang_rt.builtins_{}_osx\", arch);\n (\"builtins\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-gnu\") {\n let target = format!(\"clang_rt.builtins-{}\", arch);\n (\"windows\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-msvc\") {\n (format!(\"windows\/{}\", mode),\n \"lib\/builtins\/builtins\".to_string(),\n format!(\"clang_rt.builtins-{}\", arch.replace(\"i686\", \"i386\")))\n } else {\n panic!(\"can't get os from target: {}\", target)\n };\n let output = dst.join(\"build\/lib\").join(dir)\n .join(staticlib(&libname, target));\n build.compiler_rt_built.borrow_mut().insert(target.to_string(),\n output.clone());\n if fs::metadata(&output).is_ok() {\n return\n }\n let _ = fs::remove_dir_all(&dst);\n t!(fs::create_dir_all(&dst));\n let build_llvm_config = build.llvm_out(&build.config.build)\n .join(\"bin\")\n .join(exe(\"llvm-config\", &build.config.build));\n let mut cfg = cmake::Config::new(build.src.join(\"src\/compiler-rt\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(mode)\n .define(\"LLVM_CONFIG_PATH\", build_llvm_config)\n .define(\"COMPILER_RT_DEFAULT_TARGET_TRIPLE\", target)\n .define(\"COMPILER_RT_BUILD_SANITIZERS\", \"OFF\")\n .define(\"COMPILER_RT_BUILD_EMUTLS\", \"OFF\")\n \/\/ inform about c\/c++ compilers, the c++ compiler isn't actually used but\n \/\/ it's needed to get the initial configure to work on all platforms.\n .define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cc(target))\n .build_target(&build_target);\n cfg.build();\n}\n<commit_msg>rustbuild: Fix cross to netbsd from Linux<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::path::Path;\nuse std::process::Command;\nuse std::fs;\n\nuse build_helper::output;\nuse cmake;\n\nuse build::Build;\nuse build::util::{exe, staticlib};\n\npub fn llvm(build: &Build, target: &str) {\n \/\/ If we're using a custom LLVM bail out here, but we can only use a\n \/\/ custom LLVM for the build triple.\n if let Some(config) = build.config.target_config.get(target) {\n if let Some(ref s) = config.llvm_config {\n return check_llvm_version(build, s);\n }\n }\n\n \/\/ If the cleaning trigger is newer than our built artifacts (or if the\n \/\/ artifacts are missing) then we keep going, otherwise we bail out.\n let dst = build.llvm_out(target);\n let stamp = build.src.join(\"src\/rustllvm\/llvm-auto-clean-trigger\");\n let llvm_config = dst.join(\"bin\").join(exe(\"llvm-config\", target));\n build.clear_if_dirty(&dst, &stamp);\n if fs::metadata(llvm_config).is_ok() {\n return\n }\n\n let _ = fs::remove_dir_all(&dst.join(\"build\"));\n t!(fs::create_dir_all(&dst.join(\"build\")));\n let assertions = if build.config.llvm_assertions {\"ON\"} else {\"OFF\"};\n\n \/\/ http:\/\/llvm.org\/docs\/CMake.html\n let mut cfg = cmake::Config::new(build.src.join(\"src\/llvm\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(if build.config.llvm_optimize {\"Release\"} else {\"Debug\"})\n .define(\"LLVM_ENABLE_ASSERTIONS\", assertions)\n .define(\"LLVM_TARGETS_TO_BUILD\", \"X86;ARM;AArch64;Mips;PowerPC\")\n .define(\"LLVM_INCLUDE_EXAMPLES\", \"OFF\")\n .define(\"LLVM_INCLUDE_TESTS\", \"OFF\")\n .define(\"LLVM_INCLUDE_DOCS\", \"OFF\")\n .define(\"LLVM_ENABLE_ZLIB\", \"OFF\")\n .define(\"WITH_POLLY\", \"OFF\")\n .define(\"LLVM_ENABLE_TERMINFO\", \"OFF\")\n .define(\"LLVM_ENABLE_LIBEDIT\", \"OFF\")\n .define(\"LLVM_PARALLEL_COMPILE_JOBS\", build.jobs().to_string());\n\n if target.starts_with(\"i686\") {\n cfg.define(\"LLVM_BUILD_32_BITS\", \"ON\");\n }\n\n \/\/ http:\/\/llvm.org\/docs\/HowToCrossCompileLLVM.html\n if target != build.config.build {\n \/\/ FIXME: if the llvm root for the build triple is overridden then we\n \/\/ should use llvm-tblgen from there, also should verify that it\n \/\/ actually exists most of the time in normal installs of LLVM.\n let host = build.llvm_out(&build.config.build).join(\"bin\/llvm-tblgen\");\n cfg.define(\"CMAKE_CROSSCOMPILING\", \"True\")\n .define(\"LLVM_TARGET_ARCH\", target.split('-').next().unwrap())\n .define(\"LLVM_TABLEGEN\", &host)\n .define(\"LLVM_DEFAULT_TARGET_TRIPLE\", target);\n }\n\n \/\/ MSVC handles compiler business itself\n if !target.contains(\"msvc\") {\n if build.config.ccache {\n cfg.define(\"CMAKE_C_COMPILER\", \"ccache\")\n .define(\"CMAKE_C_COMPILER_ARG1\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", \"ccache\")\n .define(\"CMAKE_CXX_COMPILER_ARG1\", build.cxx(target));\n } else {\n cfg.define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cxx(target));\n }\n cfg.build_arg(\"-j\").build_arg(build.jobs().to_string());\n }\n\n \/\/ FIXME: we don't actually need to build all LLVM tools and all LLVM\n \/\/ libraries here, e.g. we just want a few components and a few\n \/\/ tools. Figure out how to filter them down and only build the right\n \/\/ tools and libs on all platforms.\n cfg.build();\n}\n\nfn check_llvm_version(build: &Build, llvm_config: &Path) {\n if !build.config.llvm_version_check {\n return\n }\n\n let mut cmd = Command::new(llvm_config);\n let version = output(cmd.arg(\"--version\"));\n if version.starts_with(\"3.5\") || version.starts_with(\"3.6\") ||\n version.starts_with(\"3.7\") {\n return\n }\n panic!(\"\\n\\nbad LLVM version: {}, need >=3.5\\n\\n\", version)\n}\n\npub fn compiler_rt(build: &Build, target: &str) {\n let dst = build.compiler_rt_out(target);\n let arch = target.split('-').next().unwrap();\n let mode = if build.config.rust_optimize {\"Release\"} else {\"Debug\"};\n let (dir, build_target, libname) = if target.contains(\"linux\") ||\n target.contains(\"freebsd\") ||\n target.contains(\"netbsd\") {\n let os = if target.contains(\"android\") {\"-android\"} else {\"\"};\n let arch = if arch.starts_with(\"arm\") && target.contains(\"eabihf\") {\n \"armhf\"\n } else {\n arch\n };\n let target = format!(\"clang_rt.builtins-{}{}\", arch, os);\n (\"linux\".to_string(), target.clone(), target)\n } else if target.contains(\"darwin\") {\n let target = format!(\"clang_rt.builtins_{}_osx\", arch);\n (\"builtins\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-gnu\") {\n let target = format!(\"clang_rt.builtins-{}\", arch);\n (\"windows\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-msvc\") {\n (format!(\"windows\/{}\", mode),\n \"lib\/builtins\/builtins\".to_string(),\n format!(\"clang_rt.builtins-{}\", arch.replace(\"i686\", \"i386\")))\n } else {\n panic!(\"can't get os from target: {}\", target)\n };\n let output = dst.join(\"build\/lib\").join(dir)\n .join(staticlib(&libname, target));\n build.compiler_rt_built.borrow_mut().insert(target.to_string(),\n output.clone());\n if fs::metadata(&output).is_ok() {\n return\n }\n let _ = fs::remove_dir_all(&dst);\n t!(fs::create_dir_all(&dst));\n let build_llvm_config = build.llvm_out(&build.config.build)\n .join(\"bin\")\n .join(exe(\"llvm-config\", &build.config.build));\n let mut cfg = cmake::Config::new(build.src.join(\"src\/compiler-rt\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(mode)\n .define(\"LLVM_CONFIG_PATH\", build_llvm_config)\n .define(\"COMPILER_RT_DEFAULT_TARGET_TRIPLE\", target)\n .define(\"COMPILER_RT_BUILD_SANITIZERS\", \"OFF\")\n .define(\"COMPILER_RT_BUILD_EMUTLS\", \"OFF\")\n \/\/ inform about c\/c++ compilers, the c++ compiler isn't actually used but\n \/\/ it's needed to get the initial configure to work on all platforms.\n .define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cc(target))\n .build_target(&build_target);\n cfg.build();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2391<commit_after>\/\/ https:\/\/leetcode.com\/problems\/minimum-amount-of-time-to-collect-garbage\/\npub fn garbage_collection(garbage: Vec<String>, travel: Vec<i32>) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\n \"{}\",\n garbage_collection(\n vec![\n \"G\".to_string(),\n \"P\".to_string(),\n \"GP\".to_string(),\n \"GG\".to_string()\n ],\n vec![2, 4, 3]\n )\n ); \/\/ 21\n println!(\n \"{}\",\n garbage_collection(\n vec![\"MMM\".to_string(), \"PGM\".to_string(), \"GP\".to_string()],\n vec![3, 10]\n )\n ); \/\/ 37\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2404<commit_after>\/\/ https:\/\/leetcode.com\/problems\/most-frequent-even-element\/\npub fn most_frequent_even(nums: Vec<i32>) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", most_frequent_even(vec![0, 1, 2, 2, 4, 4, 1])); \/\/ 2\n println!(\"{}\", most_frequent_even(vec![4, 4, 4, 9, 2, 4])); \/\/ 4\n println!(\n \"{}\",\n most_frequent_even(vec![29, 47, 21, 41, 13, 37, 25, 7])\n ); \/\/ -1\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Unix-specific extensions to primitives in the `std::fs` module.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse fs::{self, Permissions, OpenOptions};\nuse io;\nuse libc;\nuse path::Path;\nuse sys;\nuse sys_common::{FromInner, AsInner, AsInnerMut};\nuse sys::platform::fs::MetadataExt as UnixMetadataExt;\n\n\/\/\/ Unix-specific extensions to `Permissions`\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\npub trait PermissionsExt {\n \/\/\/ Returns the underlying raw `mode_t` bits that are the standard Unix\n \/\/\/ permissions for this file.\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn mode(&self) -> u32;\n\n \/\/\/ Sets the underlying raw bits for this set of permissions.\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn set_mode(&mut self, mode: u32);\n\n \/\/\/ Creates a new instance of `Permissions` from the given set of Unix\n \/\/\/ permission bits.\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn from_mode(mode: u32) -> Self;\n}\n\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\nimpl PermissionsExt for Permissions {\n fn mode(&self) -> u32 {\n self.as_inner().mode()\n }\n\n fn set_mode(&mut self, mode: u32) {\n *self = Permissions::from_inner(FromInner::from_inner(mode));\n }\n\n fn from_mode(mode: u32) -> Permissions {\n Permissions::from_inner(FromInner::from_inner(mode))\n }\n}\n\n\/\/\/ Unix-specific extensions to `OpenOptions`\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\npub trait OpenOptionsExt {\n \/\/\/ Sets the mode bits that a new file will be created with.\n \/\/\/\n \/\/\/ If a new file is created as part of a `File::open_opts` call then this\n \/\/\/ specified `mode` will be used as the permission bits for the new file.\n \/\/\/ If no `mode` is set, the default of `0o666` will be used.\n \/\/\/ The operating system masks out bits with the systems `umask`, to produce\n \/\/\/ the final permissions.\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn mode(&mut self, mode: u32) -> &mut Self;\n\n \/\/\/ Pass custom flags to the `flags` agument of `open`.\n \/\/\/\n \/\/\/ The bits that define the access mode are masked out with `O_ACCMODE`, to\n \/\/\/ ensure they do not interfere with the access mode set by Rusts options.\n \/\/\/\n \/\/\/ Custom flags can only set flags, not remove flags set by Rusts options.\n \/\/\/ This options overwrites any previously set custom flags.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ extern crate libc;\n \/\/\/ use std::fs::OpenOptions;\n \/\/\/ use std::os::unix::fs::OpenOptionsExt;\n \/\/\/\n \/\/\/ let mut options = OpenOptions::new();\n \/\/\/ options.write(true);\n \/\/\/ if cfg!(unix) {\n \/\/\/ options.custom_flags(libc::O_NOFOLLOW);\n \/\/\/ }\n \/\/\/ let file = options.open(\"foo.txt\");\n \/\/\/ ```\n #[stable(feature = \"open_options_ext\", since = \"1.10.0\")]\n fn custom_flags(&mut self, flags: i32) -> &mut Self;\n}\n\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\nimpl OpenOptionsExt for OpenOptions {\n fn mode(&mut self, mode: u32) -> &mut OpenOptions {\n self.as_inner_mut().mode(mode); self\n }\n\n fn custom_flags(&mut self, flags: i32) -> &mut OpenOptions {\n self.as_inner_mut().custom_flags(flags); self\n }\n}\n\n\/\/ Hm, why are there casts here to the returned type, shouldn't the types always\n\/\/ be the same? Right you are! Turns out, however, on android at least the types\n\/\/ in the raw `stat` structure are not the same as the types being returned. Who\n\/\/ knew!\n\/\/\n\/\/ As a result to make sure this compiles for all platforms we do the manual\n\/\/ casts and rely on manual lowering to `stat` if the raw type is desired.\n#[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\npub trait MetadataExt {\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn dev(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ino(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mode(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn nlink(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn uid(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn gid(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn rdev(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn size(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn atime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn atime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mtime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mtime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ctime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ctime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn blksize(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn blocks(&self) -> u64;\n}\n\n#[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\nimpl MetadataExt for fs::Metadata {\n fn dev(&self) -> u64 { self.st_dev() }\n fn ino(&self) -> u64 { self.st_ino() }\n fn mode(&self) -> u32 { self.st_mode() }\n fn nlink(&self) -> u64 { self.st_nlink() }\n fn uid(&self) -> u32 { self.st_uid() }\n fn gid(&self) -> u32 { self.st_gid() }\n fn rdev(&self) -> u64 { self.st_rdev() }\n fn size(&self) -> u64 { self.st_size() }\n fn atime(&self) -> i64 { self.st_atime() }\n fn atime_nsec(&self) -> i64 { self.st_atime_nsec() }\n fn mtime(&self) -> i64 { self.st_mtime() }\n fn mtime_nsec(&self) -> i64 { self.st_mtime_nsec() }\n fn ctime(&self) -> i64 { self.st_ctime() }\n fn ctime_nsec(&self) -> i64 { self.st_ctime_nsec() }\n fn blksize(&self) -> u64 { self.st_blksize() }\n fn blocks(&self) -> u64 { self.st_blocks() }\n}\n\n\/\/\/ Add special unix types (block\/char device, fifo and socket)\n#[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\npub trait FileTypeExt {\n \/\/\/ Returns whether this file type is a block device.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_block_device(&self) -> bool;\n \/\/\/ Returns whether this file type is a char device.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_char_device(&self) -> bool;\n \/\/\/ Returns whether this file type is a fifo.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_fifo(&self) -> bool;\n \/\/\/ Returns whether this file type is a socket.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_socket(&self) -> bool;\n}\n\n#[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\nimpl FileTypeExt for fs::FileType {\n fn is_block_device(&self) -> bool { self.as_inner().is(libc::S_IFBLK) }\n fn is_char_device(&self) -> bool { self.as_inner().is(libc::S_IFCHR) }\n fn is_fifo(&self) -> bool { self.as_inner().is(libc::S_IFIFO) }\n fn is_socket(&self) -> bool { self.as_inner().is(libc::S_IFSOCK) }\n}\n\n\/\/\/ Unix-specific extension methods for `fs::DirEntry`\n#[stable(feature = \"dir_entry_ext\", since = \"1.1.0\")]\npub trait DirEntryExt {\n \/\/\/ Returns the underlying `d_ino` field in the contained `dirent`\n \/\/\/ structure.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::fs;\n \/\/\/ use std::os::unix::fs::DirEntryExt;\n \/\/\/\n \/\/\/ if let Ok(entries) = fs::read_dir(\".\") {\n \/\/\/ for entry in entries {\n \/\/\/ if let Ok(entry) = entry {\n \/\/\/ \/\/ Here, `entry` is a `DirEntry`.\n \/\/\/ println!(\"{:?}: {}\", entry.file_name(), entry.ino());\n \/\/\/ }\n \/\/\/ }\n \/\/\/ }\n \/\/\/ ```\n #[stable(feature = \"dir_entry_ext\", since = \"1.1.0\")]\n fn ino(&self) -> u64;\n}\n\n#[stable(feature = \"dir_entry_ext\", since = \"1.1.0\")]\nimpl DirEntryExt for fs::DirEntry {\n fn ino(&self) -> u64 { self.as_inner().ino() }\n}\n\n\/\/\/ Creates a new symbolic link on the filesystem.\n\/\/\/\n\/\/\/ The `dst` path will be a symbolic link pointing to the `src` path.\n\/\/\/\n\/\/\/ # Note\n\/\/\/\n\/\/\/ On Windows, you must specify whether a symbolic link points to a file\n\/\/\/ or directory. Use `os::windows::fs::symlink_file` to create a\n\/\/\/ symbolic link to a file, or `os::windows::fs::symlink_dir` to create a\n\/\/\/ symbolic link to a directory. Additionally, the process must have\n\/\/\/ `SeCreateSymbolicLinkPrivilege` in order to be able to create a\n\/\/\/ symbolic link.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::os::unix::fs;\n\/\/\/\n\/\/\/ # fn foo() -> std::io::Result<()> {\n\/\/\/ try!(fs::symlink(\"a.txt\", \"b.txt\"));\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[stable(feature = \"symlink\", since = \"1.1.0\")]\npub fn symlink<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> io::Result<()>\n{\n sys::fs::symlink(src.as_ref(), dst.as_ref())\n}\n\n#[stable(feature = \"dir_builder\", since = \"1.6.0\")]\n\/\/\/ An extension trait for `fs::DirBuilder` for unix-specific options.\npub trait DirBuilderExt {\n \/\/\/ Sets the mode to create new directories with. This option defaults to\n \/\/\/ 0o777.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ use std::fs::DirBuilder;\n \/\/\/ use std::os::unix::fs::DirBuilderExt;\n \/\/\/\n \/\/\/ let mut builder = DirBuilder::new();\n \/\/\/ builder.mode(0o755);\n \/\/\/ ```\n #[stable(feature = \"dir_builder\", since = \"1.6.0\")]\n fn mode(&mut self, mode: u32) -> &mut Self;\n}\n\n#[stable(feature = \"dir_builder\", since = \"1.6.0\")]\nimpl DirBuilderExt for fs::DirBuilder {\n fn mode(&mut self, mode: u32) -> &mut fs::DirBuilder {\n self.as_inner_mut().set_mode(mode);\n self\n }\n}\n<commit_msg>Add doc examples for std::fs::unix::OpenOptionsExt<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Unix-specific extensions to primitives in the `std::fs` module.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse fs::{self, Permissions, OpenOptions};\nuse io;\nuse libc;\nuse path::Path;\nuse sys;\nuse sys_common::{FromInner, AsInner, AsInnerMut};\nuse sys::platform::fs::MetadataExt as UnixMetadataExt;\n\n\/\/\/ Unix-specific extensions to `Permissions`\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\npub trait PermissionsExt {\n \/\/\/ Returns the underlying raw `mode_t` bits that are the standard Unix\n \/\/\/ permissions for this file.\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn mode(&self) -> u32;\n\n \/\/\/ Sets the underlying raw bits for this set of permissions.\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn set_mode(&mut self, mode: u32);\n\n \/\/\/ Creates a new instance of `Permissions` from the given set of Unix\n \/\/\/ permission bits.\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn from_mode(mode: u32) -> Self;\n}\n\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\nimpl PermissionsExt for Permissions {\n fn mode(&self) -> u32 {\n self.as_inner().mode()\n }\n\n fn set_mode(&mut self, mode: u32) {\n *self = Permissions::from_inner(FromInner::from_inner(mode));\n }\n\n fn from_mode(mode: u32) -> Permissions {\n Permissions::from_inner(FromInner::from_inner(mode))\n }\n}\n\n\/\/\/ Unix-specific extensions to `OpenOptions`\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\npub trait OpenOptionsExt {\n \/\/\/ Sets the mode bits that a new file will be created with.\n \/\/\/\n \/\/\/ If a new file is created as part of a `File::open_opts` call then this\n \/\/\/ specified `mode` will be used as the permission bits for the new file.\n \/\/\/ If no `mode` is set, the default of `0o666` will be used.\n \/\/\/ The operating system masks out bits with the systems `umask`, to produce\n \/\/\/ the final permissions.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ extern crate libc;\n \/\/\/ use std::fs::OpenOptions;\n \/\/\/ use std::os::unix::fs::OpenOptionsExt;\n \/\/\/\n \/\/\/ let mut options = OpenOptions::new();\n \/\/\/ options.mode(0o644); \/\/ Give read\/write for owner and read for others.\n \/\/\/ let file = options.open(\"foo.txt\");\n \/\/\/ ```\n #[stable(feature = \"fs_ext\", since = \"1.1.0\")]\n fn mode(&mut self, mode: u32) -> &mut Self;\n\n \/\/\/ Pass custom flags to the `flags` agument of `open`.\n \/\/\/\n \/\/\/ The bits that define the access mode are masked out with `O_ACCMODE`, to\n \/\/\/ ensure they do not interfere with the access mode set by Rusts options.\n \/\/\/\n \/\/\/ Custom flags can only set flags, not remove flags set by Rusts options.\n \/\/\/ This options overwrites any previously set custom flags.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust,ignore\n \/\/\/ extern crate libc;\n \/\/\/ use std::fs::OpenOptions;\n \/\/\/ use std::os::unix::fs::OpenOptionsExt;\n \/\/\/\n \/\/\/ let mut options = OpenOptions::new();\n \/\/\/ options.write(true);\n \/\/\/ if cfg!(unix) {\n \/\/\/ options.custom_flags(libc::O_NOFOLLOW);\n \/\/\/ }\n \/\/\/ let file = options.open(\"foo.txt\");\n \/\/\/ ```\n #[stable(feature = \"open_options_ext\", since = \"1.10.0\")]\n fn custom_flags(&mut self, flags: i32) -> &mut Self;\n}\n\n#[stable(feature = \"fs_ext\", since = \"1.1.0\")]\nimpl OpenOptionsExt for OpenOptions {\n fn mode(&mut self, mode: u32) -> &mut OpenOptions {\n self.as_inner_mut().mode(mode); self\n }\n\n fn custom_flags(&mut self, flags: i32) -> &mut OpenOptions {\n self.as_inner_mut().custom_flags(flags); self\n }\n}\n\n\/\/ Hm, why are there casts here to the returned type, shouldn't the types always\n\/\/ be the same? Right you are! Turns out, however, on android at least the types\n\/\/ in the raw `stat` structure are not the same as the types being returned. Who\n\/\/ knew!\n\/\/\n\/\/ As a result to make sure this compiles for all platforms we do the manual\n\/\/ casts and rely on manual lowering to `stat` if the raw type is desired.\n#[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\npub trait MetadataExt {\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn dev(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ino(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mode(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn nlink(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn uid(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn gid(&self) -> u32;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn rdev(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn size(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn atime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn atime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mtime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn mtime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ctime(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn ctime_nsec(&self) -> i64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn blksize(&self) -> u64;\n #[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\n fn blocks(&self) -> u64;\n}\n\n#[stable(feature = \"metadata_ext\", since = \"1.1.0\")]\nimpl MetadataExt for fs::Metadata {\n fn dev(&self) -> u64 { self.st_dev() }\n fn ino(&self) -> u64 { self.st_ino() }\n fn mode(&self) -> u32 { self.st_mode() }\n fn nlink(&self) -> u64 { self.st_nlink() }\n fn uid(&self) -> u32 { self.st_uid() }\n fn gid(&self) -> u32 { self.st_gid() }\n fn rdev(&self) -> u64 { self.st_rdev() }\n fn size(&self) -> u64 { self.st_size() }\n fn atime(&self) -> i64 { self.st_atime() }\n fn atime_nsec(&self) -> i64 { self.st_atime_nsec() }\n fn mtime(&self) -> i64 { self.st_mtime() }\n fn mtime_nsec(&self) -> i64 { self.st_mtime_nsec() }\n fn ctime(&self) -> i64 { self.st_ctime() }\n fn ctime_nsec(&self) -> i64 { self.st_ctime_nsec() }\n fn blksize(&self) -> u64 { self.st_blksize() }\n fn blocks(&self) -> u64 { self.st_blocks() }\n}\n\n\/\/\/ Add special unix types (block\/char device, fifo and socket)\n#[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\npub trait FileTypeExt {\n \/\/\/ Returns whether this file type is a block device.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_block_device(&self) -> bool;\n \/\/\/ Returns whether this file type is a char device.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_char_device(&self) -> bool;\n \/\/\/ Returns whether this file type is a fifo.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_fifo(&self) -> bool;\n \/\/\/ Returns whether this file type is a socket.\n #[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\n fn is_socket(&self) -> bool;\n}\n\n#[stable(feature = \"file_type_ext\", since = \"1.5.0\")]\nimpl FileTypeExt for fs::FileType {\n fn is_block_device(&self) -> bool { self.as_inner().is(libc::S_IFBLK) }\n fn is_char_device(&self) -> bool { self.as_inner().is(libc::S_IFCHR) }\n fn is_fifo(&self) -> bool { self.as_inner().is(libc::S_IFIFO) }\n fn is_socket(&self) -> bool { self.as_inner().is(libc::S_IFSOCK) }\n}\n\n\/\/\/ Unix-specific extension methods for `fs::DirEntry`\n#[stable(feature = \"dir_entry_ext\", since = \"1.1.0\")]\npub trait DirEntryExt {\n \/\/\/ Returns the underlying `d_ino` field in the contained `dirent`\n \/\/\/ structure.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::fs;\n \/\/\/ use std::os::unix::fs::DirEntryExt;\n \/\/\/\n \/\/\/ if let Ok(entries) = fs::read_dir(\".\") {\n \/\/\/ for entry in entries {\n \/\/\/ if let Ok(entry) = entry {\n \/\/\/ \/\/ Here, `entry` is a `DirEntry`.\n \/\/\/ println!(\"{:?}: {}\", entry.file_name(), entry.ino());\n \/\/\/ }\n \/\/\/ }\n \/\/\/ }\n \/\/\/ ```\n #[stable(feature = \"dir_entry_ext\", since = \"1.1.0\")]\n fn ino(&self) -> u64;\n}\n\n#[stable(feature = \"dir_entry_ext\", since = \"1.1.0\")]\nimpl DirEntryExt for fs::DirEntry {\n fn ino(&self) -> u64 { self.as_inner().ino() }\n}\n\n\/\/\/ Creates a new symbolic link on the filesystem.\n\/\/\/\n\/\/\/ The `dst` path will be a symbolic link pointing to the `src` path.\n\/\/\/\n\/\/\/ # Note\n\/\/\/\n\/\/\/ On Windows, you must specify whether a symbolic link points to a file\n\/\/\/ or directory. Use `os::windows::fs::symlink_file` to create a\n\/\/\/ symbolic link to a file, or `os::windows::fs::symlink_dir` to create a\n\/\/\/ symbolic link to a directory. Additionally, the process must have\n\/\/\/ `SeCreateSymbolicLinkPrivilege` in order to be able to create a\n\/\/\/ symbolic link.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::os::unix::fs;\n\/\/\/\n\/\/\/ # fn foo() -> std::io::Result<()> {\n\/\/\/ try!(fs::symlink(\"a.txt\", \"b.txt\"));\n\/\/\/ # Ok(())\n\/\/\/ # }\n\/\/\/ ```\n#[stable(feature = \"symlink\", since = \"1.1.0\")]\npub fn symlink<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> io::Result<()>\n{\n sys::fs::symlink(src.as_ref(), dst.as_ref())\n}\n\n#[stable(feature = \"dir_builder\", since = \"1.6.0\")]\n\/\/\/ An extension trait for `fs::DirBuilder` for unix-specific options.\npub trait DirBuilderExt {\n \/\/\/ Sets the mode to create new directories with. This option defaults to\n \/\/\/ 0o777.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```ignore\n \/\/\/ use std::fs::DirBuilder;\n \/\/\/ use std::os::unix::fs::DirBuilderExt;\n \/\/\/\n \/\/\/ let mut builder = DirBuilder::new();\n \/\/\/ builder.mode(0o755);\n \/\/\/ ```\n #[stable(feature = \"dir_builder\", since = \"1.6.0\")]\n fn mode(&mut self, mode: u32) -> &mut Self;\n}\n\n#[stable(feature = \"dir_builder\", since = \"1.6.0\")]\nimpl DirBuilderExt for fs::DirBuilder {\n fn mode(&mut self, mode: u32) -> &mut fs::DirBuilder {\n self.as_inner_mut().set_mode(mode);\n self\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #47<commit_after>extern mod euler;\nuse euler::prime::{ Prime, factors };\n\nfn num_factors(n: uint, ps: &Prime) -> uint {\n let mut cnt = 0;\n for factors(n, ps) |_f| { cnt += 1; }\n return cnt;\n}\n\nfn main() {\n let ps = Prime();\n let mut cnt = 0;\n let len = 4;\n let num_factor = 4;\n let mut n = 1;\n loop {\n if num_factors(n, &ps) == num_factor {\n cnt += 1;\n } else {\n cnt = 0;\n }\n if cnt == len {\n io::println(fmt!(\"answer: %u\", n + 1 - len));\n break;\n }\n n += 1;\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Integration test for Budgets.<commit_after>#![cfg(feature = \"budgets\")]\n\nextern crate rusoto_core;\nextern crate rusoto_budgets;\n\nuse rusoto_budgets::{Budgets, BudgetsClient, DescribeBudgetsRequest};\nuse rusoto_core::{DefaultCredentialsProvider, Region, default_tls_client};\n\n\/\/ Switch to DescribeReportDefinitions when botocore is updated?\n#[test]\nfn should_describe_budgets() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = BudgetsClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1);\n \/\/ This request needs the accountId set:\n let request = DescribeBudgetsRequest::default();\n\n let response = client.describe_budgets(&request).unwrap();\n println!(\"response: {:?}\", response);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example that calculate Lyapunov exponents of Lorenz 63 model<commit_after>\nextern crate ndarray;\nextern crate ndarray_odeint;\nextern crate ndarray_linalg;\nextern crate itertools;\n\nuse ndarray::*;\nuse ndarray_linalg::prelude::*;\nuse ndarray_odeint::prelude::*;\nuse ndarray_odeint::lyapunov::*;\nuse itertools::iterate;\n\nfn main() {\n let dt = 0.01;\n let eom = Lorenz63::default();\n let teo = explicit::rk4(eom, dt);\n let ts = iterate(rcarr1(&[1.0, 0.0, 0.0]), |y| teo.iterate(y.clone()));\n let qr_series = ts.scan(Array::eye(3), |st, x| {\n let j = teo.jacobian(x.clone(), 1e-7);\n let (q_next, r) = j.dot(st).qr().unwrap();\n let q = std::mem::replace(st, q_next);\n Some((q, r))\n });\n let duration = 100000;\n let mut l = arr1(&[0.0, 0.0, 0.0]);\n for (_, r) in qr_series.skip(duration \/ 10).take(duration) {\n l = l + r.diag().map(|x| x.abs().ln());\n }\n println!(\"{:?}\", l \/ (dt * duration as f64));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #23281<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-tidy-linelength\n\npub struct Struct;\n\nimpl Struct {\n pub fn function(funs: Vec<Fn() -> ()>) {}\n \/\/~^ ERROR the trait bound `std::ops::Fn() + 'static: std::marker::Sized` is not satisfied\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #86796 - JohnTitor:test-70703, r=jonas-schievink<commit_after>\/\/ check-pass\n\ntrait Factory {\n type Product;\n}\n\nimpl Factory for () {\n type Product = ();\n}\n\ntrait ProductConsumer<P> {\n fn consume(self, product: P);\n}\n\nimpl<P> ProductConsumer<P> for () {\n fn consume(self, _: P) {}\n}\n\nfn make_product_consumer<F: Factory>(_: F) -> impl ProductConsumer<F::Product> {\n ()\n}\n\nfn main() {\n let consumer = make_product_consumer(());\n consumer.consume(());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Documentation of command<commit_after><|endoftext|>"} {"text":"<commit_before>\/*\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *\/\n\nuse common::{Location, WithLocation};\nuse graphql_ir::{\n FragmentSpread, InlineFragment, LinkedField, Program, ScalarField, Selection, Transformed,\n TransformedValue, Transformer,\n};\n\nuse interner::{Intern, StringKey};\nuse schema::{FieldID, InterfaceID, ObjectID, Type};\nuse std::collections::HashMap;\nuse std::sync::Arc;\n\/\/\/\n\/\/\/ A transform that adds an `id` field on any type that has an id field but\n\/\/\/ where there is no unaliased `id` selection.\n\/\/\/\npub fn generate_id_field<'s>(program: &Program<'s>) -> Program<'s> {\n let mut transform = GenerateIDFieldTransform::new(program);\n transform\n .transform_program(program)\n .replace_or_else(|| program.clone())\n}\n\nstruct GenerateIDFieldTransform<'s> {\n program: &'s Program<'s>,\n id_name: StringKey,\n node_interface_id: InterfaceID,\n cache: HashMap<Type, Option<FieldID>>,\n node_id_field_id: FieldID,\n}\n\nimpl<'s> Transformer for GenerateIDFieldTransform<'s> {\n const NAME: &'static str = \"GenerateIDFieldTransform\";\n const VISIT_ARGUMENTS: bool = false;\n const VISIT_DIRECTIVES: bool = false;\n\n fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed<Arc<LinkedField>> {\n let selections = self.transform_selections(&field.selections);\n\n let next_selections = if self.has_unaliased_id_field(&field.selections) {\n selections\n } else {\n let schema = self.program.schema();\n let type_ = self\n .program\n .schema()\n .field(field.definition.item)\n .type_\n .inner();\n match type_ {\n Type::Object(id) => {\n let object = schema.object(id);\n if let Some(id_field_id) = self.get_id_field_id(type_, &object.fields) {\n let mut next_selections =\n selections.replace_or_else(|| field.selections.clone());\n next_selections.push(Selection::ScalarField(\n self.create_id_field(field.definition.location, id_field_id),\n ));\n TransformedValue::Replace(next_selections)\n } else {\n selections\n }\n }\n Type::Interface(id) => {\n let interface = schema.interface(id);\n if let Some(id_field_id) = self.get_id_field_id(type_, &interface.fields) {\n let mut next_selections =\n selections.replace_or_else(|| field.selections.clone());\n next_selections.push(Selection::ScalarField(\n self.create_id_field(field.definition.location, id_field_id),\n ));\n TransformedValue::Replace(next_selections)\n } else {\n let mut inline_fragments = self.create_id_inline_fragments(\n field.definition.location,\n &interface.implementors,\n );\n if inline_fragments.is_empty() {\n selections\n } else {\n if let TransformedValue::Replace(selections) = selections {\n inline_fragments.extend(selections.into_iter())\n } else {\n inline_fragments.extend(field.selections.iter().cloned());\n }\n TransformedValue::Replace(inline_fragments)\n }\n }\n }\n Type::Union(id) => {\n let union = schema.union(id);\n let mut inline_fragments =\n self.create_id_inline_fragments(field.definition.location, &union.members);\n\n if inline_fragments.is_empty() {\n selections\n } else {\n if let TransformedValue::Replace(selections) = selections {\n inline_fragments.extend(selections.into_iter())\n } else {\n inline_fragments.extend(field.selections.iter().cloned());\n }\n TransformedValue::Replace(inline_fragments)\n }\n }\n _ => selections,\n }\n };\n match next_selections {\n TransformedValue::Keep => Transformed::Keep,\n TransformedValue::Replace(selections) => Transformed::Replace(Arc::new(LinkedField {\n alias: field.alias,\n definition: field.definition,\n arguments: field.arguments.clone(),\n directives: field.directives.clone(),\n selections,\n })),\n }\n }\n\n fn transform_fragment_spread(\n &mut self,\n _spread: &FragmentSpread,\n ) -> Transformed<Arc<FragmentSpread>> {\n Transformed::Keep\n }\n\n fn transform_scalar_field(&mut self, _field: &ScalarField) -> Transformed<Arc<ScalarField>> {\n Transformed::Keep\n }\n}\n\nimpl<'s> GenerateIDFieldTransform<'s> {\n fn new(program: &'s Program<'s>) -> Self {\n let schema = program.schema();\n let node_interface_id = match schema.get_type(\"Node\".intern()) {\n Some(Type::Interface(id)) => id,\n _ => panic!(\"Expected the schema to contain an interface named `Node`.\"),\n };\n let node_interface = schema.interface(node_interface_id);\n let id_key = \"id\".intern();\n let node_id_field_id = *node_interface\n .fields\n .iter()\n .find(|&&id| schema.field(id).name == id_key)\n .expect(\"Expected `Node` to contain a field named `id`.\");\n Self {\n program,\n id_name: schema.field(node_id_field_id).name,\n node_interface_id,\n cache: Default::default(),\n node_id_field_id,\n }\n }\n\n fn has_unaliased_id_field(&self, selections: &[Selection]) -> bool {\n selections.iter().any(|x| match x {\n Selection::ScalarField(child) => {\n child.alias.is_none()\n && self.program.schema().field(child.definition.item).name == self.id_name\n }\n _ => false,\n })\n }\n\n fn get_id_field_id(&mut self, type_: Type, fields: &[FieldID]) -> Option<FieldID> {\n match self.cache.get(&type_) {\n Some(result) => *result,\n None => {\n for id in fields {\n let field = self.program.schema().field(*id);\n if field.name == self.id_name\n && self.program.schema().is_id(field.type_.inner())\n {\n let result = Some(*id);\n self.cache.insert(type_, result);\n return result;\n }\n }\n self.cache.insert(type_, None);\n None\n }\n }\n }\n\n \/\/\/ For interfaces and unions: generate a `... on Node { id }`\n \/\/\/ fragment if *any* concrete type implements Node. Then generate a\n \/\/\/ `... on PossibleType { id }` for every concrete type that does *not*\n \/\/\/ implement `Node`\n fn create_id_inline_fragments(\n &mut self,\n location: Location,\n concrete_ids: &[ObjectID],\n ) -> Vec<Selection> {\n let mut next_selections = vec![];\n let mut should_generate_node = false;\n\n for object_id in concrete_ids {\n let object = self.program.schema().object(*object_id);\n let implements_node = object\n .interfaces\n .iter()\n .any(|interface_id| interface_id == &self.node_interface_id);\n if implements_node {\n should_generate_node = true;\n } else if let Some(id_field_id) =\n self.get_id_field_id(Type::Object(*object_id), &object.fields)\n {\n next_selections.push(Selection::InlineFragment(self.create_inline_id_fragment(\n location,\n Type::Object(*object_id),\n id_field_id,\n )));\n }\n }\n\n if should_generate_node {\n next_selections.push(Selection::InlineFragment(self.create_inline_id_fragment(\n location,\n Type::Interface(self.node_interface_id),\n self.node_id_field_id,\n )));\n }\n\n next_selections\n }\n\n fn create_id_field(&self, location: Location, id_field_id: FieldID) -> Arc<ScalarField> {\n Arc::new(ScalarField {\n alias: None,\n definition: WithLocation::new(location, id_field_id),\n arguments: Default::default(),\n directives: Default::default(),\n })\n }\n\n fn create_inline_id_fragment(\n &self,\n location: Location,\n type_: Type,\n id_field_id: FieldID,\n ) -> Arc<InlineFragment> {\n Arc::new(InlineFragment {\n type_condition: Some(type_),\n directives: Default::default(),\n selections: vec![Selection::ScalarField(\n self.create_id_field(location, id_field_id),\n )],\n })\n }\n}\n<commit_msg>small refactor in generate_id_transform<commit_after>\/*\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *\/\n\nuse common::{Location, WithLocation};\nuse graphql_ir::{\n FragmentSpread, InlineFragment, LinkedField, Program, ScalarField, Selection, Transformed,\n TransformedValue, Transformer,\n};\n\nuse interner::{Intern, StringKey};\nuse schema::{FieldID, InterfaceID, ObjectID, Type};\nuse std::collections::HashMap;\nuse std::sync::Arc;\n\/\/\/\n\/\/\/ A transform that adds an `id` field on any type that has an id field but\n\/\/\/ where there is no unaliased `id` selection.\n\/\/\/\npub fn generate_id_field<'s>(program: &Program<'s>) -> Program<'s> {\n let mut transform = GenerateIDFieldTransform::new(program);\n transform\n .transform_program(program)\n .replace_or_else(|| program.clone())\n}\n\nstruct GenerateIDFieldTransform<'s> {\n program: &'s Program<'s>,\n id_name: StringKey,\n node_interface_id: InterfaceID,\n cache: HashMap<Type, Option<FieldID>>,\n node_id_field_id: FieldID,\n}\n\nimpl<'s> Transformer for GenerateIDFieldTransform<'s> {\n const NAME: &'static str = \"GenerateIDFieldTransform\";\n const VISIT_ARGUMENTS: bool = false;\n const VISIT_DIRECTIVES: bool = false;\n\n fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed<Arc<LinkedField>> {\n let selections = self.transform_selections(&field.selections);\n\n let next_selections = if self.has_unaliased_id_field(&field.selections) {\n selections\n } else {\n let schema = self.program.schema();\n let type_ = self\n .program\n .schema()\n .field(field.definition.item)\n .type_\n .inner();\n match type_ {\n Type::Object(id) => {\n let object = schema.object(id);\n if let Some(id_field_id) = self.get_id_field_id(type_, &object.fields) {\n let mut next_selections =\n selections.replace_or_else(|| field.selections.clone());\n next_selections\n .push(self.create_id_selection(field.definition.location, id_field_id));\n TransformedValue::Replace(next_selections)\n } else {\n selections\n }\n }\n Type::Interface(id) => {\n let interface = schema.interface(id);\n if let Some(id_field_id) = self.get_id_field_id(type_, &interface.fields) {\n let mut next_selections =\n selections.replace_or_else(|| field.selections.clone());\n next_selections\n .push(self.create_id_selection(field.definition.location, id_field_id));\n TransformedValue::Replace(next_selections)\n } else {\n let mut inline_fragments = self.create_id_inline_fragments(\n field.definition.location,\n &interface.implementors,\n );\n if inline_fragments.is_empty() {\n selections\n } else {\n if let TransformedValue::Replace(selections) = selections {\n inline_fragments.extend(selections.into_iter())\n } else {\n inline_fragments.extend(field.selections.iter().cloned());\n }\n TransformedValue::Replace(inline_fragments)\n }\n }\n }\n Type::Union(id) => {\n let union = schema.union(id);\n let mut inline_fragments =\n self.create_id_inline_fragments(field.definition.location, &union.members);\n\n if inline_fragments.is_empty() {\n selections\n } else {\n if let TransformedValue::Replace(selections) = selections {\n inline_fragments.extend(selections.into_iter())\n } else {\n inline_fragments.extend(field.selections.iter().cloned());\n }\n TransformedValue::Replace(inline_fragments)\n }\n }\n _ => selections,\n }\n };\n match next_selections {\n TransformedValue::Keep => Transformed::Keep,\n TransformedValue::Replace(selections) => Transformed::Replace(Arc::new(LinkedField {\n alias: field.alias,\n definition: field.definition,\n arguments: field.arguments.clone(),\n directives: field.directives.clone(),\n selections,\n })),\n }\n }\n\n fn transform_fragment_spread(\n &mut self,\n _spread: &FragmentSpread,\n ) -> Transformed<Arc<FragmentSpread>> {\n Transformed::Keep\n }\n\n fn transform_scalar_field(&mut self, _field: &ScalarField) -> Transformed<Arc<ScalarField>> {\n Transformed::Keep\n }\n}\n\nimpl<'s> GenerateIDFieldTransform<'s> {\n fn new(program: &'s Program<'s>) -> Self {\n let schema = program.schema();\n let node_interface_id = match schema.get_type(\"Node\".intern()) {\n Some(Type::Interface(id)) => id,\n _ => panic!(\"Expected the schema to contain an interface named `Node`.\"),\n };\n let node_interface = schema.interface(node_interface_id);\n let id_key = \"id\".intern();\n let node_id_field_id = *node_interface\n .fields\n .iter()\n .find(|&&id| schema.field(id).name == id_key)\n .expect(\"Expected `Node` to contain a field named `id`.\");\n Self {\n program,\n id_name: schema.field(node_id_field_id).name,\n node_interface_id,\n cache: Default::default(),\n node_id_field_id,\n }\n }\n\n fn has_unaliased_id_field(&self, selections: &[Selection]) -> bool {\n selections.iter().any(|x| match x {\n Selection::ScalarField(child) => {\n child.alias.is_none()\n && self.program.schema().field(child.definition.item).name == self.id_name\n }\n _ => false,\n })\n }\n\n fn get_id_field_id(&mut self, type_: Type, fields: &[FieldID]) -> Option<FieldID> {\n match self.cache.get(&type_) {\n Some(result) => *result,\n None => {\n for id in fields {\n let field = self.program.schema().field(*id);\n if field.name == self.id_name\n && self.program.schema().is_id(field.type_.inner())\n {\n let result = Some(*id);\n self.cache.insert(type_, result);\n return result;\n }\n }\n self.cache.insert(type_, None);\n None\n }\n }\n }\n\n \/\/\/ For interfaces and unions: generate a `... on Node { id }`\n \/\/\/ fragment if *any* concrete type implements Node. Then generate a\n \/\/\/ `... on PossibleType { id }` for every concrete type that does *not*\n \/\/\/ implement `Node`\n fn create_id_inline_fragments(\n &mut self,\n location: Location,\n concrete_ids: &[ObjectID],\n ) -> Vec<Selection> {\n let mut next_selections = vec![];\n let mut should_generate_node = false;\n\n for object_id in concrete_ids {\n let object = self.program.schema().object(*object_id);\n let implements_node = object\n .interfaces\n .iter()\n .any(|interface_id| interface_id == &self.node_interface_id);\n if implements_node {\n should_generate_node = true;\n } else if let Some(id_field_id) =\n self.get_id_field_id(Type::Object(*object_id), &object.fields)\n {\n next_selections.push(Selection::InlineFragment(self.create_inline_id_fragment(\n location,\n Type::Object(*object_id),\n id_field_id,\n )));\n }\n }\n\n if should_generate_node {\n next_selections.push(Selection::InlineFragment(self.create_inline_id_fragment(\n location,\n Type::Interface(self.node_interface_id),\n self.node_id_field_id,\n )));\n }\n\n next_selections\n }\n\n fn create_id_selection(&self, location: Location, id_field_id: FieldID) -> Selection {\n Selection::ScalarField(Arc::new(ScalarField {\n alias: None,\n definition: WithLocation::new(location, id_field_id),\n arguments: Default::default(),\n directives: Default::default(),\n }))\n }\n\n fn create_inline_id_fragment(\n &self,\n location: Location,\n type_: Type,\n id_field_id: FieldID,\n ) -> Arc<InlineFragment> {\n Arc::new(InlineFragment {\n type_condition: Some(type_),\n directives: Default::default(),\n selections: vec![self.create_id_selection(location, id_field_id)],\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add DataPacket packets<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test cases for database<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Improve gamma<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>More safely handle Danbooru results with missing file_url.<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate rgtk;\n\nuse rgtk::*;\n\npub fn create_pane() -> gtk::Box {\n let new_project_button = gtk::Button::new_with_label(\"New Project\").unwrap();\n new_project_button.connect(gtk::signals::ButtonReleaseEvent::new(|_| {\n println!(\"new project\");\n true\n }));\n\n let import_button = gtk::Button::new_with_label(\"Import\").unwrap();\n import_button.connect(gtk::signals::ButtonReleaseEvent::new(|_| {\n println!(\"import\");\n true\n }));\n\n let rename_button = gtk::Button::new_with_label(\"Rename\").unwrap();\n rename_button.connect(gtk::signals::ButtonReleaseEvent::new(|_| {\n println!(\"rename\");\n true\n }));\n\n let remove_button = gtk::Button::new_with_label(\"Remove\").unwrap();\n remove_button.connect(gtk::signals::ButtonReleaseEvent::new(|_| {\n println!(\"remove\");\n true\n }));\n\n let mut project_buttons = gtk::Box::new(gtk::orientation::Horizontal, 0).unwrap();\n project_buttons.set_size_request(-1, -1);\n project_buttons.add(&new_project_button);\n project_buttons.add(&import_button);\n project_buttons.add(&rename_button);\n project_buttons.add(&remove_button);\n\n let mut project_pane = gtk::Box::new(gtk::orientation::Vertical, 0).unwrap();\n let project_tree = gtk::TreeView::new().unwrap();\n project_pane.set_size_request(-1, -1);\n project_pane.pack_start(&project_buttons, false, true, 0);\n project_pane.pack_start(&project_tree, true, true, 0);\n\n project_pane\n}\n<commit_msg>Show dialogs<commit_after>extern crate rgtk;\n\nuse rgtk::*;\n\nfn new_project() {\n let chooser = gtk::FileChooserDialog::new(\n \"New Project\",\n None,\n gtk::enums::file_chooser_action::CreateFolder).unwrap();\n chooser.run();\n let filename = chooser.get_filename();\n if filename.is_some() {\n println!(\"{}\", filename.unwrap());\n }\n chooser.destroy();\n}\n\nfn import_project() {\n let chooser = gtk::FileChooserDialog::new(\n \"Import\",\n None,\n gtk::enums::file_chooser_action::SelectFolder).unwrap();\n chooser.run();\n let filename = chooser.get_filename();\n if filename.is_some() {\n println!(\"{}\", filename.unwrap());\n }\n chooser.destroy();\n}\n\nfn rename_project() {\n \n}\n\nfn remove_project() {\n \n}\n\npub fn create_pane() -> gtk::Box {\n let new_project_button = gtk::Button::new_with_label(\"New Project\").unwrap();\n new_project_button.connect(gtk::signals::Clicked::new(new_project));\n\n let import_button = gtk::Button::new_with_label(\"Import\").unwrap();\n import_button.connect(gtk::signals::Clicked::new(import_project));\n\n let rename_button = gtk::Button::new_with_label(\"Rename\").unwrap();\n rename_button.connect(gtk::signals::Clicked::new(rename_project));\n\n let remove_button = gtk::Button::new_with_label(\"Remove\").unwrap();\n remove_button.connect(gtk::signals::Clicked::new(remove_project));\n\n let mut project_buttons = gtk::Box::new(gtk::orientation::Horizontal, 0).unwrap();\n project_buttons.set_size_request(-1, -1);\n project_buttons.add(&new_project_button);\n project_buttons.add(&import_button);\n project_buttons.add(&rename_button);\n project_buttons.add(&remove_button);\n\n let mut project_pane = gtk::Box::new(gtk::orientation::Vertical, 0).unwrap();\n let project_tree = gtk::TreeView::new().unwrap();\n project_pane.set_size_request(-1, -1);\n project_pane.pack_start(&project_buttons, false, true, 0);\n project_pane.pack_start(&project_tree, true, true, 0);\n\n project_pane\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix(syncfile): use write_all instead of write<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rename: find -> find_by_id<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Created update-licenses to update previous versions of crates to have their corresponding licenses.<commit_after>\/\/! Updates all of the licenses from the existing crates into each of their\n\/\/! already existing versions.\n\n\/\/\n\/\/ Usage:\n\/\/ cargo run --bin update-licenses\n\nextern crate cargo_registry;\nextern crate postgres;\n\nuse std::io::prelude::*;\n\nfn main() {\n let conn = cargo_registry::db::connect_now();\n {\n let tx = conn.transaction().unwrap();\n transfer(&tx);\n tx.set_commit();\n tx.finish().unwrap();\n }\n}\n\nfn transfer(tx: &postgres::transaction::Transaction) {\n let stmt = tx.prepare(\"SELECT id, name, license FROM crates\").unwrap();\n let rows = stmt.query(&[]).unwrap();\n\n for row in rows.iter() {\n let id: i32 = row.get(\"id\");\n let name: String = row.get(\"name\");\n let license: String = row.get(\"license\");\n\n println!(\"Setting the license for all versions of {} to {}.\", name, license);\n\n let num_updated = tx.execute(\"UPDATE versions SET license = $1 WHERE crate_id = $2\", &[&license, &id]).unwrap();\n assert!(num_updated > 0);\n }\n\n get_confirm(\"Finish committing?\");\n}\n\nfn get_confirm(msg: &str) {\n print!(\"{} [y\/N]: \", msg);\n std::io::stdout().flush().unwrap();\n \n let mut line = String::new();\n std::io::stdin().read_line(&mut line).unwrap();\n\n if !line.starts_with(\"y\") {\n std::process::exit(0);\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Vulkan depth-buffering support<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Handle a not-ready status from vkAcquireNextImageKHR, which happens on AMD with vsync<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove cleardepth_command_buffers, and use command_buffers instead<commit_after><|endoftext|>"} {"text":"<commit_before>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::file::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl Command {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>| {\n let mut echo = String::new();\n let mut first = true;\n for i in 1..args.len() {\n match args.get(i) {\n Some(arg) => {\n if first {\n first = false\n } else {\n echo = echo + \" \";\n }\n echo = echo + arg;\n }\n None => (),\n }\n }\n println!(\"{}\", echo);\n },\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(arg) => {\n File::exec(arg);\n },\n None => (),\n }\n },\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(arg) => {\n let path = arg.clone();\n println!(\"URL: {}\", path);\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(&path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n None => (),\n }\n },\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n for i in 2..args.len() {\n if let Some(arg) = args.get(i) {\n if i >= 3 {\n string.push_str(\" \");\n }\n string.push_str(arg);\n }\n }\n string.push_str(\"\\r\\n\\r\\n\");\n\n match file.write(&string.as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl Application {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if arg.len() > 0 {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n match args.get(0) {\n Some(cmd) => {\n if cmd == \"if\" {\n let mut value = false;\n\n match args.get(1) {\n Some(left) => match args.get(2) {\n Some(cmp) => match args.get(3) {\n Some(right) => {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n None => (),\n },\n None => (),\n },\n None => (),\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if self.modes.len() > 0 {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n match cmd.find('=') {\n Some(i) => {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n match args.get(i) {\n Some(arg) => value = value + \" \" + &arg,\n None => (),\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n None => (),\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n\n let mut help = \"Commands:\".to_string();\n for command in self.commands.iter() {\n help = help + \" \" + &command.name;\n }\n println!(\"{}\", help);\n }\n None => (),\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(&\"Terminal\".to_string());\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>Add help command to terminal<commit_after>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::file::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl Command {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>| {\n let mut echo = String::new();\n let mut first = true;\n for i in 1..args.len() {\n match args.get(i) {\n Some(arg) => {\n if first {\n first = false\n } else {\n echo = echo + \" \";\n }\n echo = echo + arg;\n }\n None => (),\n }\n }\n println!(\"{}\", echo);\n },\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(arg) => {\n File::exec(arg);\n },\n None => (),\n }\n },\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(arg) => {\n let path = arg.clone();\n println!(\"URL: {}\", path);\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(&path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n None => (),\n }\n },\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n for i in 2..args.len() {\n if let Some(arg) = args.get(i) {\n if i >= 3 {\n string.push_str(\" \");\n }\n string.push_str(arg);\n }\n }\n string.push_str(\"\\r\\n\\r\\n\");\n\n match file.write(&string.as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n println!(\"URL: {}\", path);\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n\t\tlet mut command_list = String::new();\n\t\tcommand_list = commands.iter().fold(command_list, |l , n| l + \" \" + &n.name);\n\n commands.push(Command {\n\t\t\tname: \"help\".to_string(),\n\t\t\tmain: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n\t\t\t},\n\t\t});\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl Application {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if arg.len() > 0 {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n match args.get(0) {\n Some(cmd) => {\n if cmd == \"if\" {\n let mut value = false;\n\n match args.get(1) {\n Some(left) => match args.get(2) {\n Some(cmp) => match args.get(3) {\n Some(right) => {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n None => (),\n },\n None => (),\n },\n None => (),\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if self.modes.len() > 0 {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n match cmd.find('=') {\n Some(i) => {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n match args.get(i) {\n Some(arg) => value = value + \" \" + &arg,\n None => (),\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n None => (),\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n\n }\n None => (),\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(&\"Terminal\".to_string());\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"alloc_system\"]\n#![crate_type = \"rlib\"]\n#![staged_api]\n#![no_std]\n#![cfg_attr(not(stage0), allocator)]\n#![unstable(feature = \"alloc_system\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![feature(allocator)]\n#![feature(libc)]\n#![feature(no_std)]\n#![feature(staged_api)]\n\nextern crate libc;\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"mipsel\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\")))]\nconst MIN_ALIGN: usize = 16;\n\n#[no_mangle]\npub extern fn __rust_allocate(size: usize, align: usize) -> *mut u8 {\n unsafe { imp::allocate(size, align) }\n}\n\n#[no_mangle]\npub extern fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {\n unsafe { imp::deallocate(ptr, old_size, align) }\n}\n\n#[no_mangle]\npub extern fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize,\n align: usize) -> *mut u8 {\n unsafe { imp::reallocate(ptr, old_size, size, align) }\n}\n\n#[no_mangle]\npub extern fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize,\n size: usize, align: usize) -> usize {\n unsafe { imp::reallocate_inplace(ptr, old_size, size, align) }\n}\n\n#[no_mangle]\npub extern fn __rust_usable_size(size: usize, align: usize) -> usize {\n imp::usable_size(size, align)\n}\n\n#[cfg(unix)]\nmod imp {\n use core::cmp;\n use core::ptr;\n use libc;\n use MIN_ALIGN;\n\n extern {\n \/\/ Apparently android doesn't have posix_memalign\n #[cfg(target_os = \"android\")]\n fn memalign(align: libc::size_t, size: libc::size_t) -> *mut libc::c_void;\n\n #[cfg(not(target_os = \"android\"))]\n fn posix_memalign(memptr: *mut *mut libc::c_void,\n align: libc::size_t,\n size: libc::size_t) -> libc::c_int;\n }\n\n pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {\n if align <= MIN_ALIGN {\n libc::malloc(size as libc::size_t) as *mut u8\n } else {\n #[cfg(target_os = \"android\")]\n unsafe fn more_aligned_malloc(size: usize, align: usize) -> *mut u8 {\n memalign(align as libc::size_t, size as libc::size_t) as *mut u8\n }\n #[cfg(not(target_os = \"android\"))]\n unsafe fn more_aligned_malloc(size: usize, align: usize) -> *mut u8 {\n let mut out = ptr::null_mut();\n let ret = posix_memalign(&mut out,\n align as libc::size_t,\n size as libc::size_t);\n if ret != 0 {\n ptr::null_mut()\n } else {\n out as *mut u8\n }\n }\n more_aligned_malloc(size, align)\n }\n }\n\n pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize,\n align: usize) -> *mut u8 {\n if align <= MIN_ALIGN {\n libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8\n } else {\n let new_ptr = allocate(size, align);\n ptr::copy(ptr, new_ptr, cmp::min(size, old_size));\n deallocate(ptr, old_size, align);\n new_ptr\n }\n }\n\n pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: usize, _size: usize,\n _align: usize) -> usize {\n old_size\n }\n\n pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, _align: usize) {\n libc::free(ptr as *mut libc::c_void)\n }\n\n pub fn usable_size(size: usize, _align: usize) -> usize {\n size\n }\n}\n\n#[cfg(windows)]\nmod imp {\n use libc::{BOOL, DWORD, HANDLE, LPVOID, SIZE_T};\n use MIN_ALIGN;\n\n extern \"system\" {\n fn GetProcessHeap() -> HANDLE;\n fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T) -> LPVOID;\n fn HeapReAlloc(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID,\n dwBytes: SIZE_T) -> LPVOID;\n fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL;\n }\n\n #[repr(C)]\n struct Header(*mut u8);\n\n const HEAP_REALLOC_IN_PLACE_ONLY: DWORD = 0x00000010;\n\n unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header {\n &mut *(ptr as *mut Header).offset(-1)\n }\n\n unsafe fn align_ptr(ptr: *mut u8, align: usize) -> *mut u8 {\n let aligned = ptr.offset((align - (ptr as usize & (align - 1))) as isize);\n *get_header(aligned) = Header(ptr);\n aligned\n }\n\n pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {\n if align <= MIN_ALIGN {\n HeapAlloc(GetProcessHeap(), 0, size as SIZE_T) as *mut u8\n } else {\n let ptr = HeapAlloc(GetProcessHeap(), 0,\n (size + align) as SIZE_T) as *mut u8;\n if ptr.is_null() { return ptr }\n align_ptr(ptr, align)\n }\n }\n\n pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize,\n align: usize) -> *mut u8 {\n if align <= MIN_ALIGN {\n HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, size as SIZE_T) as *mut u8\n } else {\n let header = get_header(ptr);\n let new = HeapReAlloc(GetProcessHeap(), 0, header.0 as LPVOID,\n (size + align) as SIZE_T) as *mut u8;\n if new.is_null() { return new }\n align_ptr(new, align)\n }\n }\n\n pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,\n align: usize) -> usize {\n if align <= MIN_ALIGN {\n let new = HeapReAlloc(GetProcessHeap(), HEAP_REALLOC_IN_PLACE_ONLY,\n ptr as LPVOID, size as SIZE_T) as *mut u8;\n if new.is_null() { old_size } else { size }\n } else {\n old_size\n }\n }\n\n pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, align: usize) {\n if align <= MIN_ALIGN {\n let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID);\n debug_assert!(err != 0);\n } else {\n let header = get_header(ptr);\n let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID);\n debug_assert!(err != 0);\n }\n }\n\n pub fn usable_size(size: usize, _align: usize) -> usize {\n size\n }\n}\n<commit_msg>Run rustfmt on liballoc_system.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"alloc_system\"]\n#![crate_type = \"rlib\"]\n#![staged_api]\n#![no_std]\n#![cfg_attr(not(stage0), allocator)]\n#![unstable(feature = \"alloc_system\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![feature(allocator)]\n#![feature(libc)]\n#![feature(no_std)]\n#![feature(staged_api)]\n\nextern crate libc;\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"mipsel\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\")))]\nconst MIN_ALIGN: usize = 16;\n\n#[no_mangle]\npub extern \"C\" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {\n unsafe { imp::allocate(size, align) }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {\n unsafe { imp::deallocate(ptr, old_size, align) }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate(ptr: *mut u8,\n old_size: usize,\n size: usize,\n align: usize)\n -> *mut u8 {\n unsafe { imp::reallocate(ptr, old_size, size, align) }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_reallocate_inplace(ptr: *mut u8,\n old_size: usize,\n size: usize,\n align: usize)\n -> usize {\n unsafe { imp::reallocate_inplace(ptr, old_size, size, align) }\n}\n\n#[no_mangle]\npub extern \"C\" fn __rust_usable_size(size: usize, align: usize) -> usize {\n imp::usable_size(size, align)\n}\n\n#[cfg(unix)]\nmod imp {\n use core::cmp;\n use core::ptr;\n use libc;\n use MIN_ALIGN;\n\n extern {\n \/\/ Apparently android doesn't have posix_memalign\n #[cfg(target_os = \"android\")]\n fn memalign(align: libc::size_t, size: libc::size_t) -> *mut libc::c_void;\n\n #[cfg(not(target_os = \"android\"))]\n fn posix_memalign(memptr: *mut *mut libc::c_void,\n align: libc::size_t,\n size: libc::size_t)\n -> libc::c_int;\n }\n\n pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {\n if align <= MIN_ALIGN {\n libc::malloc(size as libc::size_t) as *mut u8\n } else {\n #[cfg(target_os = \"android\")]\n unsafe fn more_aligned_malloc(size: usize, align: usize) -> *mut u8 {\n memalign(align as libc::size_t, size as libc::size_t) as *mut u8\n }\n #[cfg(not(target_os = \"android\"))]\n unsafe fn more_aligned_malloc(size: usize, align: usize) -> *mut u8 {\n let mut out = ptr::null_mut();\n let ret = posix_memalign(&mut out, align as libc::size_t, size as libc::size_t);\n if ret != 0 {\n ptr::null_mut()\n } else {\n out as *mut u8\n }\n }\n more_aligned_malloc(size, align)\n }\n }\n\n pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 {\n if align <= MIN_ALIGN {\n libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8\n } else {\n let new_ptr = allocate(size, align);\n ptr::copy(ptr, new_ptr, cmp::min(size, old_size));\n deallocate(ptr, old_size, align);\n new_ptr\n }\n }\n\n pub unsafe fn reallocate_inplace(_ptr: *mut u8,\n old_size: usize,\n _size: usize,\n _align: usize)\n -> usize {\n old_size\n }\n\n pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, _align: usize) {\n libc::free(ptr as *mut libc::c_void)\n }\n\n pub fn usable_size(size: usize, _align: usize) -> usize {\n size\n }\n}\n\n#[cfg(windows)]\nmod imp {\n use libc::{BOOL, DWORD, HANDLE, LPVOID, SIZE_T};\n use MIN_ALIGN;\n\n extern \"system\" {\n fn GetProcessHeap() -> HANDLE;\n fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T) -> LPVOID;\n fn HeapReAlloc(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID, dwBytes: SIZE_T) -> LPVOID;\n fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL;\n }\n\n #[repr(C)]\n struct Header(*mut u8);\n\n const HEAP_REALLOC_IN_PLACE_ONLY: DWORD = 0x00000010;\n\n unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header {\n &mut *(ptr as *mut Header).offset(-1)\n }\n\n unsafe fn align_ptr(ptr: *mut u8, align: usize) -> *mut u8 {\n let aligned = ptr.offset((align - (ptr as usize & (align - 1))) as isize);\n *get_header(aligned) = Header(ptr);\n aligned\n }\n\n pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {\n if align <= MIN_ALIGN {\n HeapAlloc(GetProcessHeap(), 0, size as SIZE_T) as *mut u8\n } else {\n let ptr = HeapAlloc(GetProcessHeap(), 0, (size + align) as SIZE_T) as *mut u8;\n if ptr.is_null() {\n return ptr\n }\n align_ptr(ptr, align)\n }\n }\n\n pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 {\n if align <= MIN_ALIGN {\n HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, size as SIZE_T) as *mut u8\n } else {\n let header = get_header(ptr);\n let new = HeapReAlloc(GetProcessHeap(),\n 0,\n header.0 as LPVOID,\n (size + align) as SIZE_T) as *mut u8;\n if new.is_null() {\n return new\n }\n align_ptr(new, align)\n }\n }\n\n pub unsafe fn reallocate_inplace(ptr: *mut u8,\n old_size: usize,\n size: usize,\n align: usize)\n -> usize {\n if align <= MIN_ALIGN {\n let new = HeapReAlloc(GetProcessHeap(),\n HEAP_REALLOC_IN_PLACE_ONLY,\n ptr as LPVOID,\n size as SIZE_T) as *mut u8;\n if new.is_null() {\n old_size\n } else {\n size\n }\n } else {\n old_size\n }\n }\n\n pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, align: usize) {\n if align <= MIN_ALIGN {\n let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID);\n debug_assert!(err != 0);\n } else {\n let header = get_header(ptr);\n let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID);\n debug_assert!(err != 0);\n }\n }\n\n pub fn usable_size(size: usize, _align: usize) -> usize {\n size\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test 'extern crate lua_rs;'.<commit_after>\/\/ Copyright (C) 2015 Ahmed Charles - acharles@outlook.com\n\/\/ Distributed under the MIT License.\n\/\/ (See accompanying file LICENSE.txt or copy at\n\/\/ http:\/\/opensource.org\/licenses\/MIT)\n\nextern crate lua_rs;\n\n#[test]\nfn it_works() {\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Compiles the profiler part of the `compiler-rt` library.\n\/\/!\n\/\/! See the build.rs for libcompiler_builtins crate for details.\n\nextern crate gcc;\n\nuse std::env;\nuse std::path::Path;\n\nfn main() {\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let cfg = &mut gcc::Config::new();\n\n let mut profile_sources = vec![\"GCDAProfiling.c\",\n \"InstrProfiling.c\",\n \"InstrProfilingBuffer.c\",\n \"InstrProfilingFile.c\",\n \"InstrProfilingMerge.c\",\n \"InstrProfilingMergeFile.c\",\n \"InstrProfilingPlatformDarwin.c\",\n \"InstrProfilingPlatformLinux.c\",\n \"InstrProfilingPlatformOther.c\",\n \"InstrProfilingRuntime.cc\",\n \"InstrProfilingUtil.c\",\n \"InstrProfilingValue.c\",\n \"InstrProfilingWriter.c\"];\n\n if target.contains(\"msvc\") {\n \/\/ Don't pull in extra libraries on MSVC\n cfg.flag(\"\/Zl\");\n profile_sources.push(\"WindowsMMap.c\");\n } else {\n \/\/ Turn off various features of gcc and such, mostly copying\n \/\/ compiler-rt's build system already\n cfg.flag(\"-fno-builtin\");\n cfg.flag(\"-fvisibility=hidden\");\n cfg.flag(\"-fomit-frame-pointer\");\n cfg.flag(\"-ffreestanding\");\n cfg.define(\"VISIBILITY_HIDDEN\", None);\n }\n\n for src in profile_sources {\n cfg.file(Path::new(\"..\/compiler-rt\/lib\/profile\").join(src));\n }\n\n cfg.compile(\"libprofiler-rt.a\");\n}\n<commit_msg>Redefine strdup, open, fdopen as _strdup, _open, _fdopen to avoid linking errors on MSVC<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Compiles the profiler part of the `compiler-rt` library.\n\/\/!\n\/\/! See the build.rs for libcompiler_builtins crate for details.\n\nextern crate gcc;\n\nuse std::env;\nuse std::path::Path;\n\nfn main() {\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n let cfg = &mut gcc::Config::new();\n\n let mut profile_sources = vec![\"GCDAProfiling.c\",\n \"InstrProfiling.c\",\n \"InstrProfilingBuffer.c\",\n \"InstrProfilingFile.c\",\n \"InstrProfilingMerge.c\",\n \"InstrProfilingMergeFile.c\",\n \"InstrProfilingPlatformDarwin.c\",\n \"InstrProfilingPlatformLinux.c\",\n \"InstrProfilingPlatformOther.c\",\n \"InstrProfilingRuntime.cc\",\n \"InstrProfilingUtil.c\",\n \"InstrProfilingValue.c\",\n \"InstrProfilingWriter.c\"];\n\n if target.contains(\"msvc\") {\n \/\/ Don't pull in extra libraries on MSVC\n cfg.flag(\"\/Zl\");\n profile_sources.push(\"WindowsMMap.c\");\n cfg.define(\"strdup\", Some(\"_strdup\"));\n cfg.define(\"open\", Some(\"_open\"));\n cfg.define(\"fdopen\", Some(\"_fdopen\"));\n } else {\n \/\/ Turn off various features of gcc and such, mostly copying\n \/\/ compiler-rt's build system already\n cfg.flag(\"-fno-builtin\");\n cfg.flag(\"-fvisibility=hidden\");\n cfg.flag(\"-fomit-frame-pointer\");\n cfg.flag(\"-ffreestanding\");\n cfg.define(\"VISIBILITY_HIDDEN\", None);\n }\n\n for src in profile_sources {\n cfg.file(Path::new(\"..\/compiler-rt\/lib\/profile\").join(src));\n }\n\n cfg.compile(\"libprofiler-rt.a\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added test for module_path! fix<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nmod foo {\n pub mod bar {\n pub mod baz {\n pub fn name() -> &'static str {\n module_path!()\n }\n }\n }\n}\n\nfn main() {\n assert_eq!(module_path!(), \"issue-18859\");\n assert_eq!(foo::bar::baz::name(), \"issue-18859::foo::bar::baz\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #29281 - skeleten:issue-28189, r=steveklabnik<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct S<T>(T) where [T; (||{}, 1).1]: Copy;\n\nfn main() {\n\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::hir;\nuse rustc::ty::TyCtxt;\nuse rustc::mir::*;\nuse rustc_data_structures::indexed_vec::Idx;\nuse transform::{MirPass, MirSource};\n\npub struct Deaggregator;\n\nimpl MirPass for Deaggregator {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n source: MirSource,\n mir: &mut Mir<'tcx>) {\n let node_path = tcx.item_path_str(source.def_id);\n debug!(\"running on: {:?}\", node_path);\n \/\/ we only run when mir_opt_level > 2\n if tcx.sess.opts.debugging_opts.mir_opt_level <= 2 {\n return;\n }\n\n \/\/ Don't run on constant MIR, because trans might not be able to\n \/\/ evaluate the modified MIR.\n \/\/ FIXME(eddyb) Remove check after miri is merged.\n let id = tcx.hir.as_local_node_id(source.def_id).unwrap();\n match (tcx.hir.body_owner_kind(id), source.promoted) {\n (hir::BodyOwnerKind::Fn, None) => {},\n _ => return\n }\n \/\/ In fact, we might not want to trigger in other cases.\n \/\/ Ex: when we could use SROA. See issue #35259\n\n for bb in mir.basic_blocks_mut() {\n let mut curr: usize = 0;\n while let Some(idx) = get_aggregate_statement_index(curr, &bb.statements) {\n \/\/ do the replacement\n debug!(\"removing statement {:?}\", idx);\n let src_info = bb.statements[idx].source_info;\n let suffix_stmts = bb.statements.split_off(idx+1);\n let orig_stmt = bb.statements.pop().unwrap();\n let (lhs, rhs) = match orig_stmt.kind {\n StatementKind::Assign(ref lhs, ref rhs) => (lhs, rhs),\n _ => span_bug!(src_info.span, \"expected assign, not {:?}\", orig_stmt),\n };\n let (agg_kind, operands) = match rhs {\n &Rvalue::Aggregate(ref agg_kind, ref operands) => (agg_kind, operands),\n _ => span_bug!(src_info.span, \"expected aggregate, not {:?}\", rhs),\n };\n let (adt_def, variant, substs) = match **agg_kind {\n AggregateKind::Adt(adt_def, variant, substs, None)\n => (adt_def, variant, substs),\n _ => span_bug!(src_info.span, \"expected struct, not {:?}\", rhs),\n };\n let n = bb.statements.len();\n bb.statements.reserve(n + operands.len() + suffix_stmts.len());\n for (i, op) in operands.iter().enumerate() {\n let ref variant_def = adt_def.variants[variant];\n let ty = variant_def.fields[i].ty(tcx, substs);\n let rhs = Rvalue::Use(op.clone());\n\n let lhs_cast = if adt_def.is_enum() {\n Place::Projection(Box::new(PlaceProjection {\n base: lhs.clone(),\n elem: ProjectionElem::Downcast(adt_def, variant),\n }))\n } else {\n lhs.clone()\n };\n\n let lhs_proj = Place::Projection(Box::new(PlaceProjection {\n base: lhs_cast,\n elem: ProjectionElem::Field(Field::new(i), ty),\n }));\n let new_statement = Statement {\n source_info: src_info,\n kind: StatementKind::Assign(lhs_proj, rhs),\n };\n debug!(\"inserting: {:?} @ {:?}\", new_statement, idx + i);\n bb.statements.push(new_statement);\n }\n\n \/\/ if the aggregate was an enum, we need to set the discriminant\n if adt_def.is_enum() {\n let set_discriminant = Statement {\n kind: StatementKind::SetDiscriminant {\n place: lhs.clone(),\n variant_index: variant,\n },\n source_info: src_info,\n };\n bb.statements.push(set_discriminant);\n };\n\n curr = bb.statements.len();\n bb.statements.extend(suffix_stmts);\n }\n }\n }\n}\n\nfn get_aggregate_statement_index<'a, 'tcx, 'b>(start: usize,\n statements: &Vec<Statement<'tcx>>)\n -> Option<usize> {\n for i in start..statements.len() {\n let ref statement = statements[i];\n let rhs = match statement.kind {\n StatementKind::Assign(_, ref rhs) => rhs,\n _ => continue,\n };\n let (kind, operands) = match rhs {\n &Rvalue::Aggregate(ref kind, ref operands) => (kind, operands),\n _ => continue,\n };\n let (adt_def, variant) = match **kind {\n AggregateKind::Adt(adt_def, variant, _, None) => (adt_def, variant),\n _ => continue,\n };\n if operands.len() == 0 {\n \/\/ don't deaggregate ()\n continue;\n }\n debug!(\"getting variant {:?}\", variant);\n debug!(\"for adt_def {:?}\", adt_def);\n return Some(i);\n };\n None\n}\n<commit_msg>rustc_mir: use the \"idiomatic\" optimization gating in the deaggregator.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::hir;\nuse rustc::ty::TyCtxt;\nuse rustc::mir::*;\nuse rustc_data_structures::indexed_vec::Idx;\nuse transform::{MirPass, MirSource};\n\npub struct Deaggregator;\n\nimpl MirPass for Deaggregator {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n source: MirSource,\n mir: &mut Mir<'tcx>) {\n \/\/ Don't run on constant MIR, because trans might not be able to\n \/\/ evaluate the modified MIR.\n \/\/ FIXME(eddyb) Remove check after miri is merged.\n let id = tcx.hir.as_local_node_id(source.def_id).unwrap();\n match (tcx.hir.body_owner_kind(id), source.promoted) {\n (_, Some(_)) |\n (hir::BodyOwnerKind::Const, _) |\n (hir::BodyOwnerKind::Static(_), _) => return,\n\n (hir::BodyOwnerKind::Fn, _) => {\n if tcx.is_const_fn(source.def_id) {\n \/\/ Don't run on const functions, as, again, trans might not be able to evaluate\n \/\/ the optimized IR.\n return\n }\n }\n }\n\n \/\/ We only run when the MIR optimization level is > 2.\n if tcx.sess.opts.debugging_opts.mir_opt_level <= 2 {\n return;\n }\n\n for bb in mir.basic_blocks_mut() {\n let mut curr: usize = 0;\n while let Some(idx) = get_aggregate_statement_index(curr, &bb.statements) {\n \/\/ do the replacement\n debug!(\"removing statement {:?}\", idx);\n let src_info = bb.statements[idx].source_info;\n let suffix_stmts = bb.statements.split_off(idx+1);\n let orig_stmt = bb.statements.pop().unwrap();\n let (lhs, rhs) = match orig_stmt.kind {\n StatementKind::Assign(ref lhs, ref rhs) => (lhs, rhs),\n _ => span_bug!(src_info.span, \"expected assign, not {:?}\", orig_stmt),\n };\n let (agg_kind, operands) = match rhs {\n &Rvalue::Aggregate(ref agg_kind, ref operands) => (agg_kind, operands),\n _ => span_bug!(src_info.span, \"expected aggregate, not {:?}\", rhs),\n };\n let (adt_def, variant, substs) = match **agg_kind {\n AggregateKind::Adt(adt_def, variant, substs, None)\n => (adt_def, variant, substs),\n _ => span_bug!(src_info.span, \"expected struct, not {:?}\", rhs),\n };\n let n = bb.statements.len();\n bb.statements.reserve(n + operands.len() + suffix_stmts.len());\n for (i, op) in operands.iter().enumerate() {\n let ref variant_def = adt_def.variants[variant];\n let ty = variant_def.fields[i].ty(tcx, substs);\n let rhs = Rvalue::Use(op.clone());\n\n let lhs_cast = if adt_def.is_enum() {\n Place::Projection(Box::new(PlaceProjection {\n base: lhs.clone(),\n elem: ProjectionElem::Downcast(adt_def, variant),\n }))\n } else {\n lhs.clone()\n };\n\n let lhs_proj = Place::Projection(Box::new(PlaceProjection {\n base: lhs_cast,\n elem: ProjectionElem::Field(Field::new(i), ty),\n }));\n let new_statement = Statement {\n source_info: src_info,\n kind: StatementKind::Assign(lhs_proj, rhs),\n };\n debug!(\"inserting: {:?} @ {:?}\", new_statement, idx + i);\n bb.statements.push(new_statement);\n }\n\n \/\/ if the aggregate was an enum, we need to set the discriminant\n if adt_def.is_enum() {\n let set_discriminant = Statement {\n kind: StatementKind::SetDiscriminant {\n place: lhs.clone(),\n variant_index: variant,\n },\n source_info: src_info,\n };\n bb.statements.push(set_discriminant);\n };\n\n curr = bb.statements.len();\n bb.statements.extend(suffix_stmts);\n }\n }\n }\n}\n\nfn get_aggregate_statement_index<'a, 'tcx, 'b>(start: usize,\n statements: &Vec<Statement<'tcx>>)\n -> Option<usize> {\n for i in start..statements.len() {\n let ref statement = statements[i];\n let rhs = match statement.kind {\n StatementKind::Assign(_, ref rhs) => rhs,\n _ => continue,\n };\n let (kind, operands) = match rhs {\n &Rvalue::Aggregate(ref kind, ref operands) => (kind, operands),\n _ => continue,\n };\n let (adt_def, variant) = match **kind {\n AggregateKind::Adt(adt_def, variant, _, None) => (adt_def, variant),\n _ => continue,\n };\n if operands.len() == 0 {\n \/\/ don't deaggregate ()\n continue;\n }\n debug!(\"getting variant {:?}\", variant);\n debug!(\"for adt_def {:?}\", adt_def);\n return Some(i);\n };\n None\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Better wording<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>`IXAudio2Voice::SetEffectChain()`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test<commit_after>\/\/ compile-flags: --crate-type lib\n\/\/ check-pass\n\/\/\n\/\/ Regression test for issue #84399\n\/\/ Tests that we keep the full `ParamEnv` when\n\/\/ caching predicates with freshened types in the global cache\n\nuse std::marker::PhantomData;\npub trait Allocator<R> {\n type Buffer;\n}\npub struct DefaultAllocator;\nimpl <R> Allocator<R> for DefaultAllocator {\n type Buffer = ();\n}\npub type Owned<R> = <DefaultAllocator as Allocator<R>>::Buffer;\npub type MatrixMN<R> = Matrix<R, Owned<R>>;\npub type Matrix4<N> = Matrix<N, ()>;\npub struct Matrix<R, S> {\n pub data: S,\n _phantoms: PhantomData<R>,\n}\npub fn set_object_transform(matrix: &Matrix4<()>) {\n matrix.js_buffer_view();\n}\npub trait Storable {\n type Cell;\n fn slice_to_items(_buffer: &()) -> &[Self::Cell] {\n unimplemented!()\n }\n}\npub type Cell<T> = <T as Storable>::Cell;\nimpl<R> Storable for MatrixMN<R>\nwhere\n DefaultAllocator: Allocator<R>,\n{\n type Cell = ();\n}\npub trait JsBufferView {\n fn js_buffer_view(&self) -> usize {\n unimplemented!()\n }\n}\nimpl<R> JsBufferView for [MatrixMN<R>]\nwhere\n DefaultAllocator: Allocator<R>,\n MatrixMN<R>: Storable,\n [Cell<MatrixMN<R>>]: JsBufferView,\n{\n fn js_buffer_view(&self) -> usize {\n <MatrixMN<R> as Storable>::slice_to_items(&()).js_buffer_view()\n }\n}\nimpl JsBufferView for [()] {}\nimpl<R> JsBufferView for MatrixMN<R> where DefaultAllocator: Allocator<R> {}\n<|endoftext|>"} {"text":"<commit_before>#[macro_use] extern crate nickel;\nextern crate regex;\nextern crate rustc_serialize;\n\nuse std::io::Write;\nuse nickel::status::StatusCode::{self, NotFound};\nuse nickel::{\n Nickel, NickelError, Continue, Halt, Request, Response, MediaType,\n QueryString, JsonBody, StaticFilesHandler, MiddlewareResult, HttpRouter, Action\n};\nuse regex::Regex;\n\n#[derive(RustcDecodable, RustcEncodable)]\nstruct Person {\n firstname: String,\n lastname: String,\n}\n\n\/\/this is an example middleware function that just logs each request\nfn logger<'a>(request: &mut Request, response: Response<'a>) -> MiddlewareResult<'a> {\n println!(\"logging request: {:?}\", request.origin.uri);\n Ok(Continue(response))\n}\n\n\/\/this is how to overwrite the default error handler to handle 404 cases with a custom view\nfn custom_404<'a>(err: &mut NickelError, _req: &mut Request) -> Action {\n if let Some(ref mut res) = err.stream {\n if res.status() == NotFound {\n let _ = res.write_all(b\"<h1>Call the police!<\/h1>\");\n return Halt(())\n }\n }\n\n Continue(())\n}\n\nfn main() {\n let mut server = Nickel::new();\n\n \/\/ middleware is optional and can be registered with `utilize`\n server.utilize(logger);\n\n \/\/ go to http:\/\/localhost:6767\/thoughtram_logo_brain.png to see static file serving in action\n server.utilize(StaticFilesHandler::new(\"examples\/assets\/\"));\n\n let hello_regex = Regex::new(\"\/hello\/(?P<name>[a-zA-Z]+)\").unwrap();\n\n \/\/ The return type for a route can be anything that implements `Responder`\n server.utilize(router!(\n \/\/ go to http:\/\/localhost:6767\/user\/4711 to see this route in action\n get \"\/user\/:userid\" => |request| {\n \/\/ returning a String\n format!(\"This is user: {}\", request.param(\"userid\"))\n }\n\n \/\/ go to http:\/\/localhost:6767\/no_alloc\/4711 to see this route in action\n get \"\/no_alloc\/:userid\" => |request, response| {\n \/\/ returning a slice of T where T: Display\n &[\"This is user: \", request.param(\"userid\")][..]\n }\n\n \/\/ go to http:\/\/localhost:6767\/bar to see this route in action\n get \"\/bar\" => {\n \/\/ returning a http status code and a static string\n (200u16, \"This is the \/bar handler\")\n }\n\n \/\/ go to http:\/\/localhost:6767\/content-type to see this route in action\n get \"\/content-type\" => |_, mut response| {\n response.set(MediaType::Json);\n \"{'foo':'bar'}\"\n }\n\n \/\/ go to http:\/\/localhost:6767\/hello\/moomah to see this route in action\n get hello_regex => |request| {\n format!(\"Hello {}\", request.param(\"name\"))\n }\n\n \/\/ FIXME\n \/\/ \/\/ go to http:\/\/localhost:6767\/redirect to see this route in action\n \/\/ get \"\/redirect\" => |request, response| {\n \/\/ use http::headers::response::Header::Location;\n \/\/ let root = url::Url::parse(\"http:\/\/www.rust-lang.org\/\").unwrap();\n \/\/ \/\/ returning a typed http status, a response body and some additional headers\n \/\/ (StatusCode::TemporaryRedirect, \"Redirecting you to 'rust-lang.org'\", vec![Location(root)])\n \/\/ }\n\n \/\/ go to http:\/\/localhost:6767\/private to see this route in action\n get \"\/private\" => {\n \/\/ returning a typed http status and a response body\n (StatusCode::Unauthorized, \"This is a private place\")\n }\n\n \/\/ go to http:\/\/localhost:6767\/some\/crazy\/route to see this route in action\n get \"\/some\/*\/route\" => {\n \/\/ returning a static string\n \"This matches \/some\/crazy\/route but not \/some\/super\/crazy\/route\"\n }\n\n \/\/ go to http:\/\/localhost:6767\/some\/crazy\/route to see this route in action\n get \"\/a\/**\/route\" => {\n \"This matches \/a\/crazy\/route and also \/a\/super\/crazy\/route\"\n }\n\n \/\/ try it with curl\n \/\/ curl 'http:\/\/localhost:6767\/a\/post\/request' -H 'Content-Type: application\/json;charset=UTF-8' --data-binary $'{ \"firstname\": \"John\",\"lastname\": \"Connor\" }'\n post \"\/a\/post\/request\" => |request| {\n let person = request.json_as::<Person>().unwrap();\n format!(\"Hello {} {}\", person.firstname, person.lastname)\n }\n\n \/\/ try calling http:\/\/localhost:6767\/query?foo=bar\n get \"\/query\" => |request| {\n let query = request.query();\n let foo = query.get(\"foo\").unwrap_or(\"This is only a default value\");\n let bar = query.get(\"bar\").unwrap_or(\"This is only a default value\");\n let text = format!(\"<p>Your foo values in the query string are: {:?}\\\n <p>Your bar values are: {:?}\",\n foo, bar);\n text\n }\n ));\n\n \/\/ issue #20178\n let custom_handler: fn(&mut NickelError, &mut Request) -> Action = custom_404;\n\n server.handle_error(custom_handler);\n\n println!(\"Running server!\");\n server.listen(\"127.0.0.1:6767\");\n}\n<commit_msg>doc(example): reintroduce redirection example<commit_after>#[macro_use] extern crate nickel;\nextern crate regex;\nextern crate rustc_serialize;\nextern crate hyper;\n\nuse std::io::Write;\nuse nickel::status::StatusCode::{self, NotFound};\nuse nickel::{\n Nickel, NickelError, Continue, Halt, Request, Response, MediaType,\n QueryString, JsonBody, StaticFilesHandler, MiddlewareResult, HttpRouter, Action\n};\nuse regex::Regex;\nuse hyper::header::Location;\n\n#[derive(RustcDecodable, RustcEncodable)]\nstruct Person {\n firstname: String,\n lastname: String,\n}\n\n\/\/this is an example middleware function that just logs each request\nfn logger<'a>(request: &mut Request, response: Response<'a>) -> MiddlewareResult<'a> {\n println!(\"logging request: {:?}\", request.origin.uri);\n Ok(Continue(response))\n}\n\n\/\/this is how to overwrite the default error handler to handle 404 cases with a custom view\nfn custom_404<'a>(err: &mut NickelError, _req: &mut Request) -> Action {\n if let Some(ref mut res) = err.stream {\n if res.status() == NotFound {\n let _ = res.write_all(b\"<h1>Call the police!<\/h1>\");\n return Halt(())\n }\n }\n\n Continue(())\n}\n\nfn main() {\n let mut server = Nickel::new();\n\n \/\/ middleware is optional and can be registered with `utilize`\n server.utilize(logger);\n\n \/\/ go to http:\/\/localhost:6767\/thoughtram_logo_brain.png to see static file serving in action\n server.utilize(StaticFilesHandler::new(\"examples\/assets\/\"));\n\n let hello_regex = Regex::new(\"\/hello\/(?P<name>[a-zA-Z]+)\").unwrap();\n\n \/\/ The return type for a route can be anything that implements `Responder`\n server.utilize(router!(\n \/\/ go to http:\/\/localhost:6767\/user\/4711 to see this route in action\n get \"\/user\/:userid\" => |request| {\n \/\/ returning a String\n format!(\"This is user: {}\", request.param(\"userid\"))\n }\n\n \/\/ go to http:\/\/localhost:6767\/no_alloc\/4711 to see this route in action\n get \"\/no_alloc\/:userid\" => |request, response| {\n \/\/ returning a slice of T where T: Display\n &[\"This is user: \", request.param(\"userid\")][..]\n }\n\n \/\/ go to http:\/\/localhost:6767\/bar to see this route in action\n get \"\/bar\" => {\n \/\/ returning a http status code and a static string\n (200u16, \"This is the \/bar handler\")\n }\n\n \/\/ go to http:\/\/localhost:6767\/content-type to see this route in action\n get \"\/content-type\" => |_, mut response| {\n response.set(MediaType::Json);\n \"{'foo':'bar'}\"\n }\n\n \/\/ go to http:\/\/localhost:6767\/hello\/moomah to see this route in action\n get hello_regex => |request| {\n format!(\"Hello {}\", request.param(\"name\"))\n }\n\n \/\/ go to http:\/\/localhost:6767\/redirect to see this route in action\n get \"\/redirect\" => |_, mut response| {\n response.set(Location(\"http:\/\/nickel.rs\".into()));\n\n (StatusCode::PermanentRedirect, \"\")\n }\n\n \/\/ go to http:\/\/localhost:6767\/private to see this route in action\n get \"\/private\" => {\n \/\/ returning a typed http status and a response body\n (StatusCode::Unauthorized, \"This is a private place\")\n }\n\n \/\/ go to http:\/\/localhost:6767\/some\/crazy\/route to see this route in action\n get \"\/some\/*\/route\" => {\n \/\/ returning a static string\n \"This matches \/some\/crazy\/route but not \/some\/super\/crazy\/route\"\n }\n\n \/\/ go to http:\/\/localhost:6767\/some\/crazy\/route to see this route in action\n get \"\/a\/**\/route\" => {\n \"This matches \/a\/crazy\/route and also \/a\/super\/crazy\/route\"\n }\n\n \/\/ try it with curl\n \/\/ curl 'http:\/\/localhost:6767\/a\/post\/request' -H 'Content-Type: application\/json;charset=UTF-8' --data-binary $'{ \"firstname\": \"John\",\"lastname\": \"Connor\" }'\n post \"\/a\/post\/request\" => |request| {\n let person = request.json_as::<Person>().unwrap();\n format!(\"Hello {} {}\", person.firstname, person.lastname)\n }\n\n \/\/ try calling http:\/\/localhost:6767\/query?foo=bar\n get \"\/query\" => |request| {\n let query = request.query();\n let foo = query.get(\"foo\").unwrap_or(\"This is only a default value\");\n let bar = query.get(\"bar\").unwrap_or(\"This is only a default value\");\n let text = format!(\"<p>Your foo values in the query string are: {:?}\\\n <p>Your bar values are: {:?}\",\n foo, bar);\n text\n }\n ));\n\n \/\/ issue #20178\n let custom_handler: fn(&mut NickelError, &mut Request) -> Action = custom_404;\n\n server.handle_error(custom_handler);\n\n println!(\"Running server!\");\n server.listen(\"127.0.0.1:6767\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example mtlinfo: prints information about available metal devices<commit_after>\/\/! A simple example which prints out information about available Metal devices\n\nextern crate mtl;\n\n#[cfg(not(any(target_os = \"macos\", target_os = \"ios\")))]\nfn main() {\n println!(\"Metal is not supported on this platform\");\n}\n\n#[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\nfn main() {\n let device = match mtl::Device::system_default_device() {\n Ok(device) => device,\n Err(e) => {\n use std::error::Error;\n println!(\"{}\", e.description());\n return;\n }\n };\n\n println!(\"Device: {}\", device.get_name());\n println!(\"Is low power: {}\", device.is_low_power());\n println!(\"Is headless: {}\", device.is_headless());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>ICH: Add test case for extern mods.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for `extern` modules.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![feature(unboxed_closures)]\n#![feature(link_args)]\n#![crate_type=\"rlib\"]\n\n\n\/\/ Change function name --------------------------------------------------------\n#[cfg(cfail1)]\nextern {\n pub fn change_function_name1(c: i64) -> i32;\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern {\n pub fn change_function_name2(c: i64) -> i32;\n}\n\n\n\n\/\/ Change parameter name -------------------------------------------------------\n#[cfg(cfail1)]\nextern {\n pub fn change_parameter_name(c: i64) -> i32;\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern {\n pub fn change_parameter_name(d: i64) -> i32;\n}\n\n\n\n\/\/ Change parameter type -------------------------------------------------------\n#[cfg(cfail1)]\nextern {\n pub fn change_parameter_type(c: i64) -> i32;\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern {\n pub fn change_parameter_type(c: i32) -> i32;\n}\n\n\n\n\/\/ Change return type ----------------------------------------------------------\n#[cfg(cfail1)]\nextern {\n pub fn change_return_type(c: i32) -> i32;\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern {\n pub fn change_return_type(c: i32) -> i8;\n}\n\n\n\n\/\/ Add parameter ---------------------------------------------------------------\n#[cfg(cfail1)]\nextern {\n pub fn add_parameter(c: i32) -> i32;\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern {\n pub fn add_parameter(c: i32, d: i32) -> i32;\n}\n\n\n\n\/\/ Add return type -------------------------------------------------------------\n#[cfg(cfail1)]\nextern {\n pub fn add_return_type(c: i32);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern {\n pub fn add_return_type(c: i32) -> i32;\n}\n\n\n\n\/\/ Make function variadic ------------------------------------------------------\n#[cfg(cfail1)]\nextern {\n pub fn make_function_variadic(c: i32);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern {\n pub fn make_function_variadic(c: i32, ...);\n}\n\n\n\n\/\/ Change calling convention ---------------------------------------------------\n#[cfg(cfail1)]\nextern \"C\" {\n pub fn change_calling_convention(c: i32);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern \"rust-call\" {\n pub fn change_calling_convention(c: i32);\n}\n\n\n\n\/\/ Make function public --------------------------------------------------------\n#[cfg(cfail1)]\nextern {\n fn make_function_public(c: i32);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern {\n pub fn make_function_public(c: i32);\n}\n\n\n\n\/\/ Add function ----------------------------------------------------------------\n#[cfg(cfail1)]\nextern {\n pub fn add_function1(c: i32);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nextern {\n pub fn add_function1(c: i32);\n pub fn add_function2();\n}\n\n\n\n\/\/ Change link-args ------------------------------------------------------------\n#[cfg(cfail1)]\n#[link_args = \"-foo -bar\"]\nextern {\n pub fn change_link_args(c: i32);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\n#[link_args = \"-foo -bar -baz\"]\nextern {\n pub fn change_link_args(c: i32);\n}\n\n\n\n\/\/ Change link-name ------------------------------------------------------------\n#[cfg(cfail1)]\n#[link(name = \"foo\")]\nextern {\n pub fn change_link_name(c: i32);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\n#[link(name = \"bar\")]\nextern {\n pub fn change_link_name(c: i32);\n}\n\ntype c_i32 = i32;\ntype c_i64 = i64;\n\n\/\/ Indirectly change parameter type --------------------------------------------\nmod indirectly_change_parameter_type {\n #[cfg(cfail1)]\n use super::c_i32 as c_int;\n #[cfg(not(cfail1))]\n use super::c_i64 as c_int;\n\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n extern {\n pub fn indirectly_change_parameter_type(c: c_int);\n }\n}\n\n\n\n\/\/ Indirectly change return type --------------------------------------------\nmod indirectly_change_return_type {\n #[cfg(cfail1)]\n use super::c_i32 as c_int;\n #[cfg(not(cfail1))]\n use super::c_i64 as c_int;\n\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n extern {\n pub fn indirectly_change_return_type() -> c_int;\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Rust solution for reversing a string<commit_after>fn reverse<T>(s: T) -> String where T: Into<String> {\n s.into().chars().rev().collect()\n}\n\nfn main() {\n println!(\"{}\", reverse(\"hello élle 今日は\"));\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') | (Normal, ' ') => editor.up(),\n (Normal, 'j') | (Normal, '\\n') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 'Z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset >= 0 {\n clipboard.push(editor.cur());\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset <= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'Y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = editor.cur().to_string() + clipboard;\n editor.left();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.right();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n if editor.offset < editor.string.len() {\n break;\n }\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n if editor.offset >= 1 {\n break;\n }\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<commit_msg>Make ^ command better<commit_after>use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') | (Normal, ' ') => editor.up(),\n (Normal, 'j') | (Normal, '\\n') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 'Z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset >= 0 {\n clipboard.push(editor.cur());\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'Y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = editor.cur().to_string() + clipboard;\n editor.left();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.right();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n if editor.offset < editor.string.len() {\n break;\n }\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n if editor.offset >= 1 {\n break;\n }\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>genet-kernel: emit on_filtered_frames_updated every 10000 frames<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #3617 - matthiaskrgr:3462_test, r=phansch<commit_after>\/\/ Copyright 2014-2019 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![warn(clippy::all)]\n#![allow(clippy::blacklisted_name)]\n#![allow(unused)]\n\nenum Foo {\n Bar,\n Baz,\n}\n\nfn bar(foo: Foo) {\n macro_rules! baz {\n () => {\n if let Foo::Bar = foo {}\n };\n }\n\n baz!();\n baz!();\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ All the comments here should not disappear.\n\nfn a() {\n match x {\n X |\n \/\/ A comment\n Y => {}\n };\n}\n\nfn b() {\n match x {\n X =>\n \/\/ A comment\n y\n }\n}\n\nfn c() {\n a() \/* ... *\/;\n}\n\nfn foo() -> Vec<i32> {\n (0..11)\n .map(|x|\n \/\/ This comment disappears.\n if x % 2 == 0 { x } else { x * 2 })\n .collect()\n}\n\nfn d() {\n if true \/* and ... *\/ {\n a();\n }\n}\n\nfn calc_page_len(prefix_len: usize, sofar: usize) -> usize {\n 2 \/\/ page type and flags\n + 1 \/\/ stored depth\n + 2 \/\/ stored count\n + prefix_len + sofar \/\/ sum of size of all the actual items\n}\n<commit_msg>Removed the failing test part since it will work when #754 is accepted<commit_after>\/\/ All the comments here should not disappear.\n\nfn a() {\n match x {\n X |\n \/\/ A comment\n Y => {}\n };\n}\n\nfn b() {\n match x {\n X =>\n \/\/ A comment\n y\n }\n}\n\nfn c() {\n a() \/* ... *\/;\n}\n\nfn foo() -> Vec<i32> {\n (0..11)\n .map(|x|\n \/\/ This comment disappears.\n if x % 2 == 0 { x } else { x * 2 })\n .collect()\n}\n\nfn calc_page_len(prefix_len: usize, sofar: usize) -> usize {\n 2 \/\/ page type and flags\n + 1 \/\/ stored depth\n + 2 \/\/ stored count\n + prefix_len + sofar \/\/ sum of size of all the actual items\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>debuginfo: Add test case for limited debuginfo<commit_after>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-android: FIXME(#10381)\n\n\/\/ compile-flags:-g1\n\n\/\/ Make sure functions have proper names\n\/\/ debugger:info functions\n\/\/ check:static void limited-debuginfo::main();\n\/\/ check:static void limited-debuginfo::some_function();\n\/\/ check:static void limited-debuginfo::some_other_function();\n\/\/ check:static void limited-debuginfo::zzz();\n\n\/\/ debugger:rbreak zzz\n\/\/ debugger:run\n\n\/\/ Make sure there is no information about locals\n\/\/ debugger:finish\n\/\/ debugger:info locals\n\/\/ check:No locals.\n\/\/ debugger:continue\n\n\n#[allow(unused_variable)];\n\nstruct Struct {\n a: i64,\n b: i32\n}\n\nfn main() {\n some_function(101, 202);\n}\n\n\nfn zzz() {()}\n\nfn some_function(a: int, b: int) {\n let some_variable = Struct { a: 11, b: 22 };\n let some_other_variable = 23;\n zzz();\n}\n\nfn some_other_function(a: int, b: int) -> bool { true }<|endoftext|>"} {"text":"<commit_before><commit_msg>mutable variable added<commit_after><|endoftext|>"} {"text":"<commit_before>use registry::{Registry, StdRegistry};\nuse std::time::Duration;\nuse std::thread;\nuse std::sync::Arc;\n\npub trait Reporter: Send + Sync {\n fn get_unique_reporter_name(&self) -> &'static str;\n}\n\npub struct ConsoleReporter {\n registry: Arc<StdRegistry<'static>>,\n reporter_name: &'static str,\n}\n\nimpl Reporter for ConsoleReporter {\n fn get_unique_reporter_name(&self) -> &'static str {\n self.reporter_name\n }\n}\n\nimpl ConsoleReporter {\n pub fn new(registry: Arc<StdRegistry<'static>>,\n reporter_name: &'static str)\n -> ConsoleReporter {\n ConsoleReporter {\n registry: registry,\n reporter_name: reporter_name,\n }\n }\n pub fn start(&self, delay_ms: u32) {\n use metrics::metric::MetricValue::{Counter, Gauge, Histogram, Meter};\n let registry = self.registry.clone();\n thread::spawn(move || {\n loop {\n for metric_name in ®istry.get_metrics_names() {\n let metric = registry.get(metric_name);\n match metric.export_metric() {\n Meter(x) => {\n println!(\"{:?}\", x);\n }\n Gauge(x) => {\n println!(\"{:?}\", x);\n }\n Counter(x) => {\n println!(\"{:?}\", x);\n }\n Histogram(x) => {\n println!(\"histogram{:?}\", x);\n }\n }\n }\n thread::sleep(Duration::from_millis(delay_ms as u64));\n }\n });\n }\n}\n\n#[cfg(test)]\nmod test {\n\n use metrics::meter::{Meter, StdMeter};\n use metrics::counter::{Counter, StdCounter};\n use metrics::gauge::{Gauge, StdGauge};\n use registry::{Registry, StdRegistry};\n use reporter::base::ConsoleReporter;\n use std::sync::Arc;\n use std::time::Duration;\n use std::thread;\n use histogram::*;\n\n #[test]\n fn meter() {\n let m = StdMeter::new();\n m.mark(100);\n\n let mut c: StdCounter = StdCounter::new();\n c.inc();\n\n let mut g: StdGauge = StdGauge { value: 0f64 };\n g.set(1.2);\n\n let mut hc = HistogramConfig::new();\n hc.max_value(100).precision(1);\n let mut h = Histogram::configured(hc).unwrap();\n h.record(1, 1);\n\n let mut r = StdRegistry::new();\n r.insert(\"meter1\", m);\n r.insert(\"counter1\", c);\n r.insert(\"gauge1\", g);\n r.insert(\"histogram\", h);\n\n let arc_registry = Arc::new(r);\n let reporter = ConsoleReporter::new(arc_registry.clone(), \"test\");\n reporter.start(1);\n g.set(1.4);\n thread::sleep(Duration::from_millis(200 as u64));\n println!(\"poplopit\");\n\n }\n}\n<commit_msg>Add purpose of class<commit_after>\/\/ Common traits and functionality to reporting. \n\/\/ Also contains the ConsoleReporter\nuse registry::{Registry, StdRegistry};\nuse std::time::Duration;\nuse std::thread;\nuse std::sync::Arc;\n\npub trait Reporter: Send + Sync {\n fn get_unique_reporter_name(&self) -> &'static str;\n}\n\npub struct ConsoleReporter {\n registry: Arc<StdRegistry<'static>>,\n reporter_name: &'static str,\n}\n\nimpl Reporter for ConsoleReporter {\n fn get_unique_reporter_name(&self) -> &'static str {\n self.reporter_name\n }\n}\n\nimpl ConsoleReporter {\n pub fn new(registry: Arc<StdRegistry<'static>>,\n reporter_name: &'static str)\n -> ConsoleReporter {\n ConsoleReporter {\n registry: registry,\n reporter_name: reporter_name,\n }\n }\n pub fn start(&self, delay_ms: u32) {\n use metrics::metric::MetricValue::{Counter, Gauge, Histogram, Meter};\n let registry = self.registry.clone();\n thread::spawn(move || {\n loop {\n for metric_name in ®istry.get_metrics_names() {\n let metric = registry.get(metric_name);\n match metric.export_metric() {\n Meter(x) => {\n println!(\"{:?}\", x);\n }\n Gauge(x) => {\n println!(\"{:?}\", x);\n }\n Counter(x) => {\n println!(\"{:?}\", x);\n }\n Histogram(x) => {\n println!(\"histogram{:?}\", x);\n }\n }\n }\n thread::sleep(Duration::from_millis(delay_ms as u64));\n }\n });\n }\n}\n\n#[cfg(test)]\nmod test {\n\n use metrics::meter::{Meter, StdMeter};\n use metrics::counter::{Counter, StdCounter};\n use metrics::gauge::{Gauge, StdGauge};\n use registry::{Registry, StdRegistry};\n use reporter::base::ConsoleReporter;\n use std::sync::Arc;\n use std::time::Duration;\n use std::thread;\n use histogram::*;\n\n #[test]\n fn meter() {\n let m = StdMeter::new();\n m.mark(100);\n\n let mut c: StdCounter = StdCounter::new();\n c.inc();\n\n let mut g: StdGauge = StdGauge { value: 0f64 };\n g.set(1.2);\n\n let mut hc = HistogramConfig::new();\n hc.max_value(100).precision(1);\n let mut h = Histogram::configured(hc).unwrap();\n h.record(1, 1);\n\n let mut r = StdRegistry::new();\n r.insert(\"meter1\", m);\n r.insert(\"counter1\", c);\n r.insert(\"gauge1\", g);\n r.insert(\"histogram\", h);\n\n let arc_registry = Arc::new(r);\n let reporter = ConsoleReporter::new(arc_registry.clone(), \"test\");\n reporter.start(1);\n g.set(1.4);\n thread::sleep(Duration::from_millis(200 as u64));\n println!(\"poplopit\");\n\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/*!\n * SMTP commands library\n *\n * RFC 5321 : http:\/\/tools.ietf.org\/html\/rfc5321#section-4.1\n *\/\n\nuse std::fmt;\nuse common::CRLF;\n\n\/*\n * HELO <SP> <domain> <CRLF>\n * MAIL <SP> FROM:<reverse-path> <CRLF>\n * RCPT <SP> TO:<forward-path> <CRLF>\n * DATA <CRLF>\n * RSET <CRLF>\n * SEND <SP> FROM:<reverse-path> <CRLF>\n * SOML <SP> FROM:<reverse-path> <CRLF>\n * SAML <SP> FROM:<reverse-path> <CRLF>\n * VRFY <SP> <string> <CRLF>\n * EXPN <SP> <string> <CRLF>\n * HELP [<SP> <string>] <CRLF>\n * NOOP <CRLF>\n * QUIT <CRLF>\n * TURN <CRLF>\n *\/\n\n\/\/\/ List of SMTP commands\n#[deriving(Eq,Clone)]\npub enum Command {\n Hello,\n Ehello,\n Mail,\n Recipient,\n Data,\n Reset,\n SendMail,\n SendOrMail,\n SendAndMail,\n Verify,\n Expand,\n Help,\n Noop,\n Quit,\n \/\/\/ Deprecated in RFC 5321\n Turn,\n}\n\nimpl Command {\n \/\/\/ Tell if the command accetps an string argument.\n pub fn takes_argument(&self) -> bool{\n match *self {\n Ehello => true,\n Hello => true,\n Mail => true,\n Recipient => true,\n Data => false,\n Reset => false,\n SendMail => true,\n SendOrMail => true,\n SendAndMail => true,\n Verify => true,\n Expand => true,\n Help => true,\n Noop => false,\n Quit => false,\n Turn => false,\n }\n }\n\n \/\/\/ Tell if an argument is needed by the command.\n pub fn needs_argument(&self) -> bool {\n match *self {\n Ehello => true,\n Hello => true,\n Mail => true,\n Recipient => true,\n Data => false,\n Reset => false,\n SendMail => true,\n SendOrMail => true,\n SendAndMail => true,\n Verify => true,\n Expand => true,\n Help => false,\n Noop => false,\n Quit => false,\n Turn => false,\n }\n }\n}\n\nimpl ToStr for Command {\n \/\/\/ Get the name of a command.\n fn to_str(&self) -> ~str {\n match *self {\n Hello => ~\"HELO\",\n Ehello => ~\"EHLO\",\n Mail => ~\"MAIL\",\n Recipient => ~\"RCPT\",\n Data => ~\"DATA\",\n Reset => ~\"RSET\",\n SendMail => ~\"SEND\",\n SendOrMail => ~\"SOML\",\n SendAndMail => ~\"SAML\",\n Verify => ~\"VRFY\",\n Expand => ~\"EXPN\",\n Help => ~\"HELP\",\n Noop => ~\"NOOP\",\n Quit => ~\"QUIT\",\n Turn => ~\"TURN\",\n }\n }\n}\n\nimpl FromStr for Command {\n \/\/\/ Get the Command from its name.\n fn from_str(command: &str) -> Option<Command> {\n if !command.is_ascii() {\n return None;\n }\n match command {\n \"HELO\" => Some(Hello),\n \"EHLO\" => Some(Ehello),\n \"MAIL\" => Some(Mail),\n \"RCPT\" => Some(Recipient),\n \"DATA\" => Some(Data),\n \"RSET\" => Some(Reset),\n \"SEND\" => Some(SendMail),\n \"SOML\" => Some(SendOrMail),\n \"SAML\" => Some(SendAndMail),\n \"VRFY\" => Some(Verify),\n \"EXPN\" => Some(Expand),\n \"HELP\" => Some(Help),\n \"NOOP\" => Some(Noop),\n \"QUIT\" => Some(Quit),\n \"TURN\" => Some(Turn),\n _ => None,\n }\n }\n}\n\nimpl fmt::Show for Command {\n \/\/\/ Format SMTP command display\n fn fmt(s: &Command, f: &mut fmt::Formatter) {\n f.buf.write(match *s {\n Ehello => \"EHLO\".as_bytes(),\n Hello => \"HELO\".as_bytes(),\n Mail => \"MAIL FROM:\".as_bytes(),\n Recipient => \"RCPT TO:\".as_bytes(),\n Data => \"DATA\".as_bytes(),\n Reset => \"RSET\".as_bytes(),\n SendMail => \"SEND TO:\".as_bytes(),\n SendOrMail => \"SOML TO:\".as_bytes(),\n SendAndMail => \"SAML TO:\".as_bytes(),\n Verify => \"VRFY\".as_bytes(),\n Expand => \"EXPN\".as_bytes(),\n Help => \"HELP\".as_bytes(),\n Noop => \"NOOP\".as_bytes(),\n Quit => \"QUIT\".as_bytes(),\n Turn => \"TURN\".as_bytes()\n })\n }\n}\n\n\/\/\/ Structure for a complete SMTP command, containing an optionnal string argument.\npub struct SmtpCommand {\n command: Command,\n argument: Option<~str>\n}\n\nimpl SmtpCommand {\n \/\/\/ Return a new structure from the name of the command and an optionnal argument.\n pub fn new(command_str: ~str, argument: Option<~str>) -> SmtpCommand {\n let command = match from_str::<Command>(command_str) {\n Some(x) => x,\n None => fail!(\"Unrecognized SMTP command\")\n };\n\n match (command.takes_argument(), command.needs_argument(), argument.clone()) {\n (true, true, None) => fail!(\"Wrong SMTP syntax : argument needed\"),\n (false, false, Some(x)) => fail!(\"Wrong SMTP syntax : {:s} not accepted\", x),\n _ => SmtpCommand {command: command, argument: argument}\n }\n }\n\n \/\/\/ Return the formatted command, ready to be used in an SMTP session.\n pub fn get_formatted_command(&self) -> ~str {\n match (self.command.takes_argument(), self.command.needs_argument(), self.argument.clone()) {\n (true, _, Some(argument)) => format!(\"{} {}{}\", self.command, argument, CRLF),\n (_, false, None) => format!(\"{}{}\", self.command, CRLF),\n _ => fail!(\"Wrong SMTP syntax\")\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::SmtpCommand;\n\n #[test]\n fn test_command_parameters() {\n assert!((super::Help).takes_argument() == true);\n assert!((super::Reset).takes_argument() == false);\n assert!((super::Hello).needs_argument() == true);\n }\n\n #[test]\n fn test_get_simple_command() {\n assert!(SmtpCommand::new(~\"TURN\", None).get_formatted_command() == format!(\"TURN{}\", ::common::CRLF));\n }\n\n #[test]\n fn test_get_argument_command() {\n assert!(SmtpCommand::new(~\"EHLO\", Some(~\"example.example\")).get_formatted_command() == format!(\"EHLO example.example{}\", ::common::CRLF));\n }\n}<commit_msg>Make compilation work with yersterday's rust version<commit_after>\/*!\n * SMTP commands library\n *\n * RFC 5321 : http:\/\/tools.ietf.org\/html\/rfc5321#section-4.1\n *\/\n\nuse std::fmt;\nuse common::CRLF;\n\n\/*\n * HELO <SP> <domain> <CRLF>\n * MAIL <SP> FROM:<reverse-path> <CRLF>\n * RCPT <SP> TO:<forward-path> <CRLF>\n * DATA <CRLF>\n * RSET <CRLF>\n * SEND <SP> FROM:<reverse-path> <CRLF>\n * SOML <SP> FROM:<reverse-path> <CRLF>\n * SAML <SP> FROM:<reverse-path> <CRLF>\n * VRFY <SP> <string> <CRLF>\n * EXPN <SP> <string> <CRLF>\n * HELP [<SP> <string>] <CRLF>\n * NOOP <CRLF>\n * QUIT <CRLF>\n * TURN <CRLF>\n *\/\n\n\/\/\/ List of SMTP commands\n#[deriving(Eq,Clone)]\npub enum Command {\n Hello,\n Ehello,\n Mail,\n Recipient,\n Data,\n Reset,\n SendMail,\n SendOrMail,\n SendAndMail,\n Verify,\n Expand,\n Help,\n Noop,\n Quit,\n \/\/\/ Deprecated in RFC 5321\n Turn,\n}\n\nimpl Command {\n \/\/\/ Tell if the command accetps an string argument.\n pub fn takes_argument(&self) -> bool{\n match *self {\n Ehello => true,\n Hello => true,\n Mail => true,\n Recipient => true,\n Data => false,\n Reset => false,\n SendMail => true,\n SendOrMail => true,\n SendAndMail => true,\n Verify => true,\n Expand => true,\n Help => true,\n Noop => false,\n Quit => false,\n Turn => false,\n }\n }\n\n \/\/\/ Tell if an argument is needed by the command.\n pub fn needs_argument(&self) -> bool {\n match *self {\n Ehello => true,\n Hello => true,\n Mail => true,\n Recipient => true,\n Data => false,\n Reset => false,\n SendMail => true,\n SendOrMail => true,\n SendAndMail => true,\n Verify => true,\n Expand => true,\n Help => false,\n Noop => false,\n Quit => false,\n Turn => false,\n }\n }\n}\n\nimpl ToStr for Command {\n \/\/\/ Get the name of a command.\n fn to_str(&self) -> ~str {\n match *self {\n Hello => ~\"HELO\",\n Ehello => ~\"EHLO\",\n Mail => ~\"MAIL\",\n Recipient => ~\"RCPT\",\n Data => ~\"DATA\",\n Reset => ~\"RSET\",\n SendMail => ~\"SEND\",\n SendOrMail => ~\"SOML\",\n SendAndMail => ~\"SAML\",\n Verify => ~\"VRFY\",\n Expand => ~\"EXPN\",\n Help => ~\"HELP\",\n Noop => ~\"NOOP\",\n Quit => ~\"QUIT\",\n Turn => ~\"TURN\",\n }\n }\n}\n\nimpl FromStr for Command {\n \/\/\/ Get the Command from its name.\n fn from_str(command: &str) -> Option<Command> {\n if !command.is_ascii() {\n return None;\n }\n match command {\n \"HELO\" => Some(Hello),\n \"EHLO\" => Some(Ehello),\n \"MAIL\" => Some(Mail),\n \"RCPT\" => Some(Recipient),\n \"DATA\" => Some(Data),\n \"RSET\" => Some(Reset),\n \"SEND\" => Some(SendMail),\n \"SOML\" => Some(SendOrMail),\n \"SAML\" => Some(SendAndMail),\n \"VRFY\" => Some(Verify),\n \"EXPN\" => Some(Expand),\n \"HELP\" => Some(Help),\n \"NOOP\" => Some(Noop),\n \"QUIT\" => Some(Quit),\n \"TURN\" => Some(Turn),\n _ => None,\n }\n }\n}\n\nimpl fmt::Default for Command {\n \/\/\/ Format SMTP command display\n fn fmt(s: &Command, f: &mut fmt::Formatter) {\n f.buf.write(match *s {\n Ehello => \"EHLO\".as_bytes(),\n Hello => \"HELO\".as_bytes(),\n Mail => \"MAIL FROM:\".as_bytes(),\n Recipient => \"RCPT TO:\".as_bytes(),\n Data => \"DATA\".as_bytes(),\n Reset => \"RSET\".as_bytes(),\n SendMail => \"SEND TO:\".as_bytes(),\n SendOrMail => \"SOML TO:\".as_bytes(),\n SendAndMail => \"SAML TO:\".as_bytes(),\n Verify => \"VRFY\".as_bytes(),\n Expand => \"EXPN\".as_bytes(),\n Help => \"HELP\".as_bytes(),\n Noop => \"NOOP\".as_bytes(),\n Quit => \"QUIT\".as_bytes(),\n Turn => \"TURN\".as_bytes()\n })\n }\n}\n\n\/\/\/ Structure for a complete SMTP command, containing an optionnal string argument.\npub struct SmtpCommand {\n command: Command,\n argument: Option<~str>\n}\n\nimpl SmtpCommand {\n \/\/\/ Return a new structure from the name of the command and an optionnal argument.\n pub fn new(command_str: ~str, argument: Option<~str>) -> SmtpCommand {\n let command = match from_str::<Command>(command_str) {\n Some(x) => x,\n None => fail!(\"Unrecognized SMTP command\")\n };\n\n match (command.takes_argument(), command.needs_argument(), argument.clone()) {\n (true, true, None) => fail!(\"Wrong SMTP syntax : argument needed\"),\n (false, false, Some(x)) => fail!(\"Wrong SMTP syntax : {:s} not accepted\", x),\n _ => SmtpCommand {command: command, argument: argument}\n }\n }\n\n \/\/\/ Return the formatted command, ready to be used in an SMTP session.\n pub fn get_formatted_command(&self) -> ~str {\n match (self.command.takes_argument(), self.command.needs_argument(), self.argument.clone()) {\n (true, _, Some(argument)) => format!(\"{} {}{}\", self.command, argument, CRLF),\n (_, false, None) => format!(\"{}{}\", self.command, CRLF),\n _ => fail!(\"Wrong SMTP syntax\")\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::SmtpCommand;\n\n #[test]\n fn test_command_parameters() {\n assert!((super::Help).takes_argument() == true);\n assert!((super::Reset).takes_argument() == false);\n assert!((super::Hello).needs_argument() == true);\n }\n\n #[test]\n fn test_get_simple_command() {\n assert!(SmtpCommand::new(~\"TURN\", None).get_formatted_command() == format!(\"TURN{}\", ::common::CRLF));\n }\n\n #[test]\n fn test_get_argument_command() {\n assert!(SmtpCommand::new(~\"EHLO\", Some(~\"example.example\")).get_formatted_command() == format!(\"EHLO example.example{}\", ::common::CRLF));\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![crate_name = \"uptime\"]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Jordi Boggiano <j.boggiano@seld.be>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\n\/* last synced with: cat (GNU coreutils) 8.13 *\/\n\n#![allow(non_camel_case_types)]\n#![feature(macro_rules, globs)]\n\nextern crate getopts;\nextern crate libc;\nextern crate \"time\" as rtime;\n\nuse std::mem::transmute;\nuse std::io::{print, File};\nuse std::ptr::null;\nuse std::str::from_str;\nuse libc::{time_t, c_double, c_int, c_char};\nuse utmpx::*;\n\n#[path = \"..\/common\/util.rs\"] mod util;\n\n#[path = \"..\/common\/c_types.rs\"] mod c_types;\n\n#[path = \"..\/common\/utmpx.rs\"] mod utmpx;\n\nstatic NAME: &'static str = \"uptime\";\n\n#[cfg(unix)]\nextern {\n fn getloadavg(loadavg: *mut c_double, nelem: c_int) -> c_int;\n\n fn getutxent() -> *const c_utmp;\n fn setutxent();\n fn endutxent();\n\n #[cfg(any(target_os = \"macos\", target_os = \"linux\"))]\n fn utmpxname(file: *const c_char) -> c_int;\n}\n\n#[cfg(windows)]\nextern {\n fn GetTickCount() -> libc::uint32_t;\n}\n\n#[cfg(target_os = \"freebsd\")]\nunsafe extern fn utmpxname(_file: *const c_char) -> c_int {\n 0\n}\n\npub fn uumain(args: Vec<String>) -> int {\n let program = args[0].clone();\n let opts = [\n getopts::optflag(\"v\", \"version\", \"output version information and exit\"),\n getopts::optflag(\"h\", \"help\", \"display this help and exit\"),\n ];\n let matches = match getopts::getopts(args.tail(), &opts) {\n Ok(m) => m,\n Err(f) => crash!(1, \"Invalid options\\n{}\", f)\n };\n if matches.opt_present(\"version\") {\n println!(\"uptime 1.0.0\");\n return 0;\n }\n if matches.opt_present(\"help\") || matches.free.len() > 0 {\n println!(\"Usage:\");\n println!(\" {0} [OPTION]\", program);\n println!(\"\");\n print(getopts::usage(\"Print the current time, the length of time the system has been up,\\n\\\n the number of users on the system, and the average number of jobs\\n\\\n in the run queue over the last 1, 5 and 15 minutes.\", &opts).as_slice());\n return 0;\n }\n\n print_time();\n let (boot_time, user_count) = process_utmpx();\n let upsecs = get_uptime(boot_time) \/ 100;\n print_uptime(upsecs);\n print_nusers(user_count);\n print_loadavg();\n\n 0\n}\n\nfn print_loadavg() {\n let mut avg: [c_double, ..3] = [0.0, ..3];\n let loads: i32 = unsafe { transmute(getloadavg(avg.as_mut_ptr(), 3)) };\n\n if loads == -1 {\n print!(\"\\n\");\n }\n else {\n print!(\"load average: \")\n for n in range(0, loads) {\n print!(\"{:.2}{}\", avg[n as uint], if n == loads - 1 { \"\\n\" }\n else { \", \" } );\n }\n }\n}\n\n#[cfg(unix)]\nfn process_utmpx() -> (Option<time_t>, uint) {\n DEFAULT_FILE.with_c_str(|filename| {\n unsafe {\n utmpxname(filename);\n }\n });\n\n let mut nusers = 0;\n let mut boot_time = None;\n\n unsafe {\n setutxent();\n\n loop {\n let line = getutxent();\n\n if line == null() {\n break;\n }\n\n match (*line).ut_type {\n USER_PROCESS => nusers += 1,\n BOOT_TIME => {\n let t = (*line).ut_tv;\n if t.tv_sec > 0 {\n boot_time = Some(t.tv_sec);\n }\n },\n _ => continue\n }\n }\n\n endutxent();\n }\n\n (boot_time, nusers)\n}\n\n#[cfg(windows)]\nfn process_utmpx() -> (Option<time_t>, uint) {\n (None, 0) \/\/ TODO: change 0 to number of users\n}\n\nfn print_nusers(nusers: uint) {\n if nusers == 1 {\n print!(\"1 user, \");\n } else if nusers > 1 {\n print!(\"{} users, \", nusers);\n }\n}\n\nfn print_time() {\n let local_time = rtime::now();\n\n print!(\" {:02}:{:02}:{:02} \", local_time.tm_hour,\n local_time.tm_min, local_time.tm_sec);\n}\n\n#[cfg(unix)]\nfn get_uptime(boot_time: Option<time_t>) -> i64 {\n let proc_uptime = File::open(&Path::new(\"\/proc\/uptime\"))\n .read_to_string();\n\n let uptime_text = match proc_uptime {\n Ok(s) => s,\n _ => return match boot_time {\n Some(t) => {\n let now = rtime::get_time().sec;\n ((now - t) * 100) as i64 \/\/ Return in ms\n },\n _ => -1\n }\n };\n\n match uptime_text.as_slice().words().next() {\n Some(s) => match from_str(s.replace(\".\", \"\").as_slice()) {\n Some(n) => n,\n None => -1\n },\n None => -1\n }\n}\n\n#[cfg(windows)]\nfn get_uptime(boot_time: Option<time_t>) -> i64 {\n unsafe { GetTickCount() as i64 }\n}\n\nfn print_uptime(upsecs: i64) {\n let updays = upsecs \/ 86400;\n let uphours = (upsecs - (updays * 86400)) \/ 3600;\n let upmins = (upsecs - (updays * 86400) - (uphours * 3600)) \/ 60;\n if updays == 1 {\n print!(\"up {:1} day, {:2}:{:02}, \", updays, uphours, upmins);\n }\n else if updays > 1 {\n print!(\"up {:1} days, {:2}:{:02}, \", updays, uphours, upmins);\n }\n else {\n print!(\"up {:2}:{:02}, \", uphours, upmins);\n }\n}\n<commit_msg>fix 32-bit kernel uptime error<commit_after>#![crate_name = \"uptime\"]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Jordi Boggiano <j.boggiano@seld.be>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\n\/* last synced with: cat (GNU coreutils) 8.13 *\/\n\n#![allow(non_camel_case_types)]\n#![feature(macro_rules, globs)]\n\nextern crate getopts;\nextern crate libc;\nextern crate \"time\" as rtime;\n\nuse std::mem::transmute;\nuse std::io::{print, File};\nuse std::ptr::null;\nuse std::str::from_str;\nuse libc::{time_t, c_double, c_int, c_char};\nuse utmpx::*;\n\n#[path = \"..\/common\/util.rs\"] mod util;\n\n#[path = \"..\/common\/c_types.rs\"] mod c_types;\n\n#[path = \"..\/common\/utmpx.rs\"] mod utmpx;\n\nstatic NAME: &'static str = \"uptime\";\n\n#[cfg(unix)]\nextern {\n fn getloadavg(loadavg: *mut c_double, nelem: c_int) -> c_int;\n\n fn getutxent() -> *const c_utmp;\n fn setutxent();\n fn endutxent();\n\n #[cfg(any(target_os = \"macos\", target_os = \"linux\"))]\n fn utmpxname(file: *const c_char) -> c_int;\n}\n\n#[cfg(windows)]\nextern {\n fn GetTickCount() -> libc::uint32_t;\n}\n\n#[cfg(target_os = \"freebsd\")]\nunsafe extern fn utmpxname(_file: *const c_char) -> c_int {\n 0\n}\n\npub fn uumain(args: Vec<String>) -> int {\n let program = args[0].clone();\n let opts = [\n getopts::optflag(\"v\", \"version\", \"output version information and exit\"),\n getopts::optflag(\"h\", \"help\", \"display this help and exit\"),\n ];\n let matches = match getopts::getopts(args.tail(), &opts) {\n Ok(m) => m,\n Err(f) => crash!(1, \"Invalid options\\n{}\", f)\n };\n if matches.opt_present(\"version\") {\n println!(\"uptime 1.0.0\");\n return 0;\n }\n if matches.opt_present(\"help\") || matches.free.len() > 0 {\n println!(\"Usage:\");\n println!(\" {0} [OPTION]\", program);\n println!(\"\");\n print(getopts::usage(\"Print the current time, the length of time the system has been up,\\n\\\n the number of users on the system, and the average number of jobs\\n\\\n in the run queue over the last 1, 5 and 15 minutes.\", &opts).as_slice());\n return 0;\n }\n\n print_time();\n let (boot_time, user_count) = process_utmpx();\n let upsecs = get_uptime(boot_time) \/ 100;\n print_uptime(upsecs);\n print_nusers(user_count);\n print_loadavg();\n\n 0\n}\n\nfn print_loadavg() {\n let mut avg: [c_double, ..3] = [0.0, ..3];\n let loads: i32 = unsafe { transmute(getloadavg(avg.as_mut_ptr(), 3)) };\n\n if loads == -1 {\n print!(\"\\n\");\n }\n else {\n print!(\"load average: \")\n for n in range(0, loads) {\n print!(\"{:.2}{}\", avg[n as uint], if n == loads - 1 { \"\\n\" }\n else { \", \" } );\n }\n }\n}\n\n#[cfg(unix)]\nfn process_utmpx() -> (Option<time_t>, uint) {\n DEFAULT_FILE.with_c_str(|filename| {\n unsafe {\n utmpxname(filename);\n }\n });\n\n let mut nusers = 0;\n let mut boot_time = None;\n\n unsafe {\n setutxent();\n\n loop {\n let line = getutxent();\n\n if line == null() {\n break;\n }\n\n match (*line).ut_type {\n USER_PROCESS => nusers += 1,\n BOOT_TIME => {\n let t = (*line).ut_tv;\n if t.tv_sec > 0 {\n boot_time = Some(t.tv_sec);\n }\n },\n _ => continue\n }\n }\n\n endutxent();\n }\n\n (boot_time, nusers)\n}\n\n#[cfg(windows)]\nfn process_utmpx() -> (Option<time_t>, uint) {\n (None, 0) \/\/ TODO: change 0 to number of users\n}\n\nfn print_nusers(nusers: uint) {\n if nusers == 1 {\n print!(\"1 user, \");\n } else if nusers > 1 {\n print!(\"{} users, \", nusers);\n }\n}\n\nfn print_time() {\n let local_time = rtime::now();\n\n print!(\" {:02}:{:02}:{:02} \", local_time.tm_hour,\n local_time.tm_min, local_time.tm_sec);\n}\n\n#[cfg(unix)]\nfn get_uptime(boot_time: Option<time_t>) -> i64 {\n let proc_uptime = File::open(&Path::new(\"\/proc\/uptime\"))\n .read_to_string();\n\n let uptime_text = match proc_uptime {\n Ok(s) => s,\n _ => return match boot_time {\n Some(t) => {\n let now = rtime::get_time().sec;\n let time = t.to_i64().unwrap();\n ((now - time) * 100) as i64 \/\/ Return in ms\n },\n _ => -1\n }\n };\n\n match uptime_text.as_slice().words().next() {\n Some(s) => match from_str(s.replace(\".\", \"\").as_slice()) {\n Some(n) => n,\n None => -1\n },\n None => -1\n }\n}\n\n#[cfg(windows)]\nfn get_uptime(boot_time: Option<time_t>) -> i64 {\n unsafe { GetTickCount() as i64 }\n}\n\nfn print_uptime(upsecs: i64) {\n let updays = upsecs \/ 86400;\n let uphours = (upsecs - (updays * 86400)) \/ 3600;\n let upmins = (upsecs - (updays * 86400) - (uphours * 3600)) \/ 60;\n if updays == 1 {\n print!(\"up {:1} day, {:2}:{:02}, \", updays, uphours, upmins);\n }\n else if updays > 1 {\n print!(\"up {:1} days, {:2}:{:02}, \", updays, uphours, upmins);\n }\n else {\n print!(\"up {:2}:{:02}, \", uphours, upmins);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[parser] add dataset writer<commit_after>use crate::dataset::*;\nuse crate::error::{Error, Result};\nuse dicom_core::{DataElementHeader, Length, VR};\nuse dicom_encoding::encode::{Encode, EncodeTo};\nuse dicom_encoding::text::{SpecificCharacterSet, TextCodec};\nuse dicom_encoding::TransferSyntax;\nuse std::io::Write;\n\n\/\/\/ A token representing a sequence or item start.\n#[derive(Debug)]\nstruct SeqToken {\n \/\/\/ Whether it is the start of a sequence or the start of an item.\n typ: SeqTokenType,\n \/\/\/ The length of the value, as indicated by the starting element,\n \/\/\/ can be unknown.\n len: Length,\n}\n\n\/\/\/ A stateful device for printing a DICOM data set in sequential order.\n\/\/\/ This is analogous to the `DatasetReader` type for converting data\n\/\/\/ set tokens to bytes.\n#[derive(Debug)]\npub struct DataSetWriter<W, E, T> {\n to: W,\n encoder: E,\n text: T,\n seq_tokens: Vec<SeqToken>,\n}\n\nimpl<W> DataSetWriter<W, Box<dyn EncodeTo<W>>, Box<dyn TextCodec>>\nwhere\n W: Write,\n{\n pub fn with_ts_cs(to: W, ts: TransferSyntax, cs: SpecificCharacterSet) -> Result<Self> {\n let encoder = ts\n .encoder_for()\n .ok_or_else(|| Error::UnsupportedTransferSyntax)?;\n let text = cs.codec().ok_or_else(|| Error::UnsupportedCharacterSet)?;\n Ok(DataSetWriter::new(to, encoder, text))\n }\n}\n\nimpl<W, E, T> DataSetWriter<W, E, T> {\n pub fn new(to: W, encoder: E, text: T) -> Self {\n DataSetWriter {\n to,\n encoder,\n text,\n seq_tokens: Vec::new(),\n }\n }\n}\n\nimpl<W, E, T> DataSetWriter<W, E, T>\nwhere\n W: Write,\n E: Encode,\n T: TextCodec,\n{\n \/\/\/ Feed the given sequence of tokens which are part of the same data set.\n pub fn write_sequence<I>(&mut self, tokens: I) -> Result<()>\n where\n I: IntoIterator<Item = DataToken>,\n {\n for token in tokens {\n self.write(token)?;\n }\n\n Ok(())\n }\n\n \/\/\/ Feed the given data set token for writing the data set.\n #[inline]\n pub fn write(&mut self, token: DataToken) -> Result<()> {\n \/\/ TODO adjust the logic of sequence printing:\n \/\/ explicit length sequences or items should not print\n \/\/ the respective delimiter\n\n match token {\n DataToken::SequenceStart { tag: _, len } => {\n self.seq_tokens.push(SeqToken {\n typ: SeqTokenType::Sequence,\n len,\n });\n self.write_stateless(token)?;\n Ok(())\n }\n DataToken::ItemStart { len } => {\n self.seq_tokens.push(SeqToken {\n typ: SeqTokenType::Item,\n len,\n });\n self.write_stateless(token)?;\n Ok(())\n }\n DataToken::ItemEnd => {\n \/\/ only write if it's an unknown length item\n if let Some(seq_start) = self.seq_tokens.pop() {\n if seq_start.typ == SeqTokenType::Item && seq_start.len.is_undefined() {\n self.write_stateless(token)?;\n }\n }\n Ok(())\n }\n DataToken::SequenceEnd => {\n \/\/ only write if it's an unknown length sequence\n if let Some(seq_start) = self.seq_tokens.pop() {\n if seq_start.typ == SeqTokenType::Sequence && seq_start.len.is_undefined() {\n self.write_stateless(token)?;\n }\n }\n Ok(())\n }\n _ => self.write_stateless(token),\n }\n }\n\n fn write_stateless(&mut self, token: DataToken) -> Result<()> {\n use DataToken::*;\n match token {\n ElementHeader(header) => {\n self.encoder.encode_element_header(&mut self.to, header)?;\n }\n SequenceStart { tag, len } => {\n self.encoder.encode_element_header(\n &mut self.to,\n DataElementHeader::new(tag, VR::SQ, len),\n )?;\n }\n SequenceEnd => {\n self.encoder.encode_sequence_delimiter(&mut self.to)?;\n }\n ItemStart { len } => {\n self.encoder.encode_item_header(&mut self.to, len.0)?;\n }\n ItemEnd => {\n self.encoder.encode_item_delimiter(&mut self.to)?;\n }\n PrimitiveValue(value) => {\n \/\/ TODO handle strings properly\n self.encoder.encode_primitive(&mut self.to, &value)?;\n }\n }\n Ok(())\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::super::DataToken;\n use super::DataSetWriter;\n use crate::printer::Printer;\n use dicom_core::header::{DataElementHeader, Length};\n use dicom_core::value::PrimitiveValue;\n use dicom_encoding::text::DefaultCharacterSetCodec;\n use dicom_encoding::transfer_syntax::explicit_le::ExplicitVRLittleEndianEncoder;\n\n fn validate_dataset_writer<I>(tokens: I, ground_truth: &[u8])\n where\n I: IntoIterator<Item = DataToken>,\n {\n let mut raw_out: Vec<u8> = vec![];\n let encoder = ExplicitVRLittleEndianEncoder::default();\n let text = DefaultCharacterSetCodec::default();\n let mut dset_writer = DataSetWriter::new(&mut raw_out, encoder, text);\n\n \/\/let mut iter = Iterator::zip(dset_writer.by_ref(), ground_truth);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use regex::Regex;\nuse regex::Captures;\nuse rustc_demangle::demangle;\n\npub fn remove_assembler_directives(block: &str) -> String {\n lazy_static! {\n static ref ASM_DIR_REGEX: Regex = Regex::new(r\"(?m)^\\s*\\..*$\").expect(\"Failed to create ASM_DIR_REGEX\");\n }\n\n let mut filtered_asm = String::new();\n for line in block.lines() {\n if !ASM_DIR_REGEX.is_match(line) {\n filtered_asm.push_str(line);\n filtered_asm.push('\\n');\n }\n }\n\n filtered_asm\n}\n\npub fn demangle_asm(block: &str) -> String {\n lazy_static! {\n static ref DEMANGLE_REGEX: Regex = Regex::new(r\"_[a-zA-Z0-9._$]*\").expect(\"Failed to create DEMANGLE_REGEX\");\n }\n\n DEMANGLE_REGEX.replace_all(block, |caps: &Captures| {\n format!(\"{:#}\", demangle(caps.get(0)\n .expect(\"Failed to find symbols to demangle\")\n .as_str()))\n }).to_string()\n}\n\n#[cfg(test)]\nmod test {\n #[test]\n fn directives_pass_through() {\n assert_eq!(\n super::remove_assembler_directives(\"core::fmt::Arguments::new_v1:\\n push rbp\\n mov rbp, rsp\"),\n \"core::fmt::Arguments::new_v1:\\n push rbp\\n mov rbp, rsp\\n\");\n }\n\n #[test]\n fn one_directive_removed() {\n assert_eq!(\n super::remove_assembler_directives(\" .filesystem1 \\\"<println macros>\\\"\\n movq%rsp, %rbp\\n\"),\n \" movq%rsp, %rbp\\n\");\n }\n\n #[test]\n fn many_directives_removed() {\n assert_eq!(\n super::remove_assembler_directives(\" .cfi_def_cfa_register %rbp\\n subq$80, %rsp\\n .Ltmp2:\"),\n \" subq$80, %rsp\\n\");\n }\n\n #[test]\n fn demangle_pass_through() {\n assert_eq!(\n super::demangle_asm(\"push rbp\\n mov rbp, rsp\"),\n \"push rbp\\n mov rbp, rsp\");\n }\n\n #[test]\n fn demangles() {\n assert_eq!(\n super::demangle_asm(\"_ZN4core3fmt9Arguments6new_v117h3c6f806acbe1ddabE\"),\n \"core::fmt::Arguments::new_v1\");\n }\n\n #[test]\n fn many_demangles() {\n assert_eq!(\n super::demangle_asm(\".section.text._ZN4core3fmt9Arguments6new_v117h3c6f806acbe1ddabE,\\\"ax\\\",@progbits\\n .p2align4, 0x90\\n .type_ZN4core3fmt9Arguments6new_v117h3c6f806acbe1ddabE,@function\"),\n \".section.text.core::fmt::Arguments::new_v1,\\\"ax\\\",@progbits\\n .p2align4, 0x90\\n .typecore::fmt::Arguments::new_v1,@function\");\n }\n \n}\n<commit_msg>Fixed labels being removed from asm<commit_after>use regex::Regex;\nuse regex::Captures;\nuse rustc_demangle::demangle;\n\npub fn remove_assembler_directives(block: &str) -> String {\n lazy_static! {\n static ref ASM_DIR_REGEX: Regex = Regex::new(r\"(?m)^\\s*\\..*[^:]$\").expect(\"Failed to create ASM_DIR_REGEX\");\n }\n\n let mut filtered_asm = String::new();\n for line in block.lines() {\n if !ASM_DIR_REGEX.is_match(line) {\n filtered_asm.push_str(line);\n filtered_asm.push('\\n');\n }\n }\n\n filtered_asm\n}\n\npub fn demangle_asm(block: &str) -> String {\n lazy_static! {\n static ref DEMANGLE_REGEX: Regex = Regex::new(r\"_[a-zA-Z0-9._$]*\").expect(\"Failed to create DEMANGLE_REGEX\");\n }\n\n DEMANGLE_REGEX.replace_all(block, |caps: &Captures| {\n format!(\"{:#}\", demangle(caps.get(0)\n .expect(\"Failed to find symbols to demangle\")\n .as_str()))\n }).to_string()\n}\n\n#[cfg(test)]\nmod test {\n #[test]\n fn directives_pass_through() {\n assert_eq!(\n super::remove_assembler_directives(\"core::fmt::Arguments::new_v1:\\n push rbp\\n mov rbp, rsp\"),\n \"core::fmt::Arguments::new_v1:\\n push rbp\\n mov rbp, rsp\\n\");\n }\n\n #[test]\n fn one_directive_removed() {\n assert_eq!(\n super::remove_assembler_directives(\" .filesystem1 \\\"<println macros>\\\"\\n movq%rsp, %rbp\\n\"),\n \" movq%rsp, %rbp\\n\");\n }\n\n #[test]\n fn many_directives_removed() {\n assert_eq!(\n super::remove_assembler_directives(\" .cfi_def_cfa_register %rbp\\n subq$80, %rsp\\n .text\\n\"),\n \" subq$80, %rsp\\n\");\n }\n\n #[test]\n fn labels_not_removed() {\n assert_eq!(\n super::remove_assembler_directives(\n \".type core::fmt::Arguments::new_v1,@function\\n core::fmt::Arguments::new_v1:\\n .Lfunc_begin0:\\n\"),\n \" core::fmt::Arguments::new_v1:\\n .Lfunc_begin0:\\n\");\n }\n\n #[test]\n fn demangle_pass_through() {\n assert_eq!(\n super::demangle_asm(\"push rbp\\n mov rbp, rsp\"),\n \"push rbp\\n mov rbp, rsp\");\n }\n\n #[test]\n fn demangles() {\n assert_eq!(\n super::demangle_asm(\"_ZN4core3fmt9Arguments6new_v117h3c6f806acbe1ddabE\"),\n \"core::fmt::Arguments::new_v1\");\n }\n\n #[test]\n fn many_demangles() {\n assert_eq!(\n super::demangle_asm(\".section.text._ZN4core3fmt9Arguments6new_v117h3c6f806acbe1ddabE,\\\"ax\\\",@progbits\\n .p2align4, 0x90\\n .type_ZN4core3fmt9Arguments6new_v117h3c6f806acbe1ddabE,@function\"),\n \".section.text.core::fmt::Arguments::new_v1,\\\"ax\\\",@progbits\\n .p2align4, 0x90\\n .typecore::fmt::Arguments::new_v1,@function\");\n }\n \n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>short result<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rollup merge of #23921: aturon\/issue-17746<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for #17746\n\nfn main() {}\n\nstruct A;\n\nimpl A {\n fn b(&mut self) {\n self.a()\n }\n}\n\ntrait Foo {\n fn dummy(&self) {}\n}\ntrait Bar {\n fn a(&self);\n}\n\nimpl Foo for A {}\nimpl<T> Bar for T where T: Foo {\n fn a(&self) {}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test case for borrowk ICE #25579<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nenum Sexpression {\n Num(()),\n Cons(&'static mut Sexpression)\n}\n\nfn causes_ice(mut l: &mut Sexpression)\n{\n loop { match l {\n &mut Sexpression::Num(ref mut n) => {},\n &mut Sexpression::Cons(ref mut expr) => {\n l = &mut **expr;\n }\n }}\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #55963 - stepancheg:mpsc-take-2, r=alexcrichton<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags:--test\n\/\/ ignore-emscripten\n\nuse std::sync::mpsc::channel;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::mpsc::RecvError;\nuse std::sync::mpsc::RecvTimeoutError;\nuse std::sync::Arc;\nuse std::sync::atomic::AtomicUsize;\nuse std::sync::atomic::Ordering;\n\nuse std::thread;\nuse std::time::Duration;\n\n\n\/\/\/ Simple thread synchronization utility\nstruct Barrier {\n \/\/ Not using mutex\/condvar for precision\n shared: Arc<AtomicUsize>,\n count: usize,\n}\n\nimpl Barrier {\n fn new(count: usize) -> Vec<Barrier> {\n let shared = Arc::new(AtomicUsize::new(0));\n (0..count).map(|_| Barrier { shared: shared.clone(), count: count }).collect()\n }\n\n fn new2() -> (Barrier, Barrier) {\n let mut v = Barrier::new(2);\n (v.pop().unwrap(), v.pop().unwrap())\n }\n\n \/\/\/ Returns when `count` threads enter `wait`\n fn wait(self) {\n self.shared.fetch_add(1, Ordering::SeqCst);\n while self.shared.load(Ordering::SeqCst) != self.count {\n }\n }\n}\n\n\nfn shared_close_sender_does_not_lose_messages_iter() {\n let (tb, rb) = Barrier::new2();\n\n let (tx, rx) = channel();\n let _ = tx.clone(); \/\/ convert to shared\n\n thread::spawn(move || {\n tb.wait();\n thread::sleep(Duration::from_micros(1));\n tx.send(17).expect(\"send\");\n drop(tx);\n });\n\n let i = rx.into_iter();\n rb.wait();\n \/\/ Make sure it doesn't return disconnected before returning an element\n assert_eq!(vec![17], i.collect::<Vec<_>>());\n}\n\n#[test]\nfn shared_close_sender_does_not_lose_messages() {\n for _ in 0..10000 {\n shared_close_sender_does_not_lose_messages_iter();\n }\n}\n\n\n\/\/ https:\/\/github.com\/rust-lang\/rust\/issues\/39364\nfn concurrent_recv_timeout_and_upgrade_iter() {\n \/\/ 1 us\n let sleep = Duration::new(0, 1_000);\n\n let (a, b) = Barrier::new2();\n let (tx, rx) = channel();\n let th = thread::spawn(move || {\n a.wait();\n loop {\n match rx.recv_timeout(sleep) {\n Ok(_) => {\n break;\n },\n Err(_) => {},\n }\n }\n });\n b.wait();\n thread::sleep(sleep);\n tx.clone().send(()).expect(\"send\");\n th.join().unwrap();\n}\n\n#[test]\nfn concurrent_recv_timeout_and_upgrade() {\n \/\/ FIXME: fix and enable\n if true { return }\n\n \/\/ at the moment of writing this test fails like this:\n \/\/ thread '<unnamed>' panicked at 'assertion failed: `(left == right)`\n \/\/ left: `4561387584`,\n \/\/ right: `0`', libstd\/sync\/mpsc\/shared.rs:253:13\n\n for _ in 0..10000 {\n concurrent_recv_timeout_and_upgrade_iter();\n }\n}\n\n\nfn concurrent_writes_iter() {\n const THREADS: usize = 4;\n const PER_THR: usize = 100;\n\n let mut bs = Barrier::new(THREADS + 1);\n let (tx, rx) = channel();\n\n let mut threads = Vec::new();\n for j in 0..THREADS {\n let tx = tx.clone();\n let b = bs.pop().unwrap();\n threads.push(thread::spawn(move || {\n b.wait();\n for i in 0..PER_THR {\n tx.send(j * 1000 + i).expect(\"send\");\n }\n }));\n }\n\n let b = bs.pop().unwrap();\n b.wait();\n\n let mut v: Vec<_> = rx.iter().take(THREADS * PER_THR).collect();\n v.sort();\n\n for j in 0..THREADS {\n for i in 0..PER_THR {\n assert_eq!(j * 1000 + i, v[j * PER_THR + i]);\n }\n }\n\n for t in threads {\n t.join().unwrap();\n }\n\n let one_us = Duration::new(0, 1000);\n\n assert_eq!(TryRecvError::Empty, rx.try_recv().unwrap_err());\n assert_eq!(RecvTimeoutError::Timeout, rx.recv_timeout(one_us).unwrap_err());\n\n drop(tx);\n\n assert_eq!(RecvError, rx.recv().unwrap_err());\n assert_eq!(RecvTimeoutError::Disconnected, rx.recv_timeout(one_us).unwrap_err());\n assert_eq!(TryRecvError::Disconnected, rx.try_recv().unwrap_err());\n}\n\n#[test]\nfn concurrent_writes() {\n for _ in 0..100 {\n concurrent_writes_iter();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse arch::memory;\n\nuse collections::slice;\nuse collections::string::ToString;\nuse collections::vec::Vec;\nuse collections::vec_deque::VecDeque;\n\nuse core::ptr;\n\nuse common::debug;\n\nuse drivers::pci::config::PciConfig;\nuse drivers::io::{Io, Pio};\n\nuse network::common::*;\nuse network::scheme::*;\n\nuse fs::{KScheme, Resource, Url};\n\nuse syscall::Result;\n\nuse sync::Intex;\n\nconst RTL8139_TSR_OWN: u32 = 1 << 13;\n\nconst RTL8139_CR_RST: u8 = 1 << 4;\nconst RTL8139_CR_RE: u8 = 1 << 3;\nconst RTL8139_CR_TE: u8 = 1 << 2;\nconst RTL8139_CR_BUFE: u8 = 1 << 0;\n\nconst RTL8139_ISR_SERR: u16 = 1 << 15;\nconst RTL8139_ISR_TIMEOUT: u16 = 1 << 14;\nconst RTL8139_ISR_LENCHG: u16 = 1 << 13;\nconst RTL8139_ISR_FOVW: u16 = 1 << 6;\nconst RTL8139_ISR_PUN_LINKCHG: u16 = 1 << 5;\nconst RTL8139_ISR_RXOVW: u16 = 1 << 4;\nconst RTL8139_ISR_TER: u16 = 1 << 3;\nconst RTL8139_ISR_TOK: u16 = 1 << 2;\nconst RTL8139_ISR_RER: u16 = 1 << 1;\nconst RTL8139_ISR_ROK: u16 = 1 << 0;\n\nconst RTL8139_TCR_IFG: u32 = 0b11 << 24;\n\nconst RTL8139_RCR_WRAP: u32 = 1 << 7;\nconst RTL8139_RCR_AR: u32 = 1 << 4;\nconst RTL8139_RCR_AB: u32 = 1 << 3;\nconst RTL8139_RCR_AM: u32 = 1 << 2;\nconst RTL8139_RCR_APM: u32 = 1 << 1;\n\n#[repr(packed)]\nstruct Txd {\n pub address_port: Pio<u32>,\n pub status_port: Pio<u32>,\n pub buffer: usize,\n}\n\npub struct Rtl8139Port {\n pub idr: [Pio<u8>; 6],\n pub rbstart: Pio<u32>,\n pub cr: Pio<u8>,\n pub capr: Pio<u16>,\n pub cbr: Pio<u16>,\n pub imr: Pio<u16>,\n pub isr: Pio<u16>,\n pub tcr: Pio<u32>,\n pub rcr: Pio<u32>,\n pub config1: Pio<u8>,\n}\n\nimpl Rtl8139Port {\n pub fn new(base: u16) -> Self {\n return Rtl8139Port {\n idr: [Pio::<u8>::new(base + 0x00),\n Pio::<u8>::new(base + 0x01),\n Pio::<u8>::new(base + 0x02),\n Pio::<u8>::new(base + 0x03),\n Pio::<u8>::new(base + 0x04),\n Pio::<u8>::new(base + 0x05)],\n rbstart: Pio::<u32>::new(base + 0x30),\n cr: Pio::<u8>::new(base + 0x37),\n capr: Pio::<u16>::new(base + 0x38),\n cbr: Pio::<u16>::new(base + 0x3A),\n imr: Pio::<u16>::new(base + 0x3C),\n isr: Pio::<u16>::new(base + 0x3E),\n tcr: Pio::<u32>::new(base + 0x40),\n rcr: Pio::<u32>::new(base + 0x44),\n config1: Pio::<u8>::new(base + 0x52),\n };\n }\n}\n\npub struct Rtl8139 {\n pci: PciConfig,\n base: usize,\n memory_mapped: bool,\n irq: u8,\n resources: Intex<Vec<*mut NetworkResource>>,\n inbound: VecDeque<Vec<u8>>,\n outbound: VecDeque<Vec<u8>>,\n txds: Vec<Txd>,\n txd_i: usize,\n port: Rtl8139Port,\n}\n\nimpl Rtl8139 {\n pub fn new(mut pci: PciConfig) -> Box<Self> {\n let pci_id = unsafe { pci.read(0x00) };\n let revision = (unsafe { pci.read(0x08) } & 0xFF) as u8;\n if pci_id == 0x813910EC && revision < 0x20 {\n debug::d(\"Not an 8139C+ compatible chip\")\n }\n\n let base = unsafe { pci.read(0x10) as usize };\n let irq = unsafe { pci.read(0x3C) as u8 & 0xF };\n\n let mut module = box Rtl8139 {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: irq,\n resources: Intex::new(Vec::new()),\n inbound: VecDeque::new(),\n outbound: VecDeque::new(),\n txds: Vec::new(),\n txd_i: 0,\n port: Rtl8139Port::new((base & 0xFFFFFFF0) as u16),\n };\n\n unsafe { module.init() };\n\n module\n }\n\n unsafe fn init(&mut self) {\n debug::d(\"RTL8139 on: \");\n debug::dh(self.base);\n if self.memory_mapped {\n debug::d(\" memory mapped\");\n } else {\n debug::d(\" port mapped\");\n }\n debug::d(\" IRQ: \");\n debug::dbh(self.irq);\n\n self.pci.flag(4, 4, true); \/\/ Bus mastering\n\n let base = self.base as u16;\n\n self.port.config1.write(0);\n self.port.cr.write(RTL8139_CR_RST);\n while self.port.cr.read() & RTL8139_CR_RST != 0 {}\n\n debug::d(\" MAC: \");\n MAC_ADDR = MacAddr {\n bytes: [self.port.idr[0].read(),\n self.port.idr[1].read(),\n self.port.idr[2].read(),\n self.port.idr[3].read(),\n self.port.idr[4].read(),\n self.port.idr[5].read()],\n };\n debug::d(&MAC_ADDR.to_string());\n\n let receive_buffer = memory::alloc(10240);\n self.port.rbstart.write(receive_buffer as u32);\n\n for i in 0..4 {\n self.txds.push(Txd {\n address_port: Pio::<u32>::new(base + 0x20 + (i as u16) * 4),\n status_port: Pio::<u32>::new(base + 0x10 + (i as u16) * 4),\n buffer: memory::alloc(4096),\n });\n }\n\n self.port.imr.write(RTL8139_ISR_TOK | RTL8139_ISR_ROK);\n debug::d(\" IMR: \");\n debug::dh(self.port.imr.read() as usize);\n\n self.port.cr.write(RTL8139_CR_RE | RTL8139_CR_TE);\n debug::d(\" CMD: \");\n debug::dbh(self.port.cr.read());\n\n self.port.rcr.write(RTL8139_RCR_WRAP | RTL8139_RCR_AR | RTL8139_RCR_AB | RTL8139_RCR_AM |\n RTL8139_RCR_APM);\n debug::d(\" RCR: \");\n debug::dh(self.port.rcr.read() as usize);\n\n self.port.tcr.writef(RTL8139_TCR_IFG, true);\n debug::d(\" TCR: \");\n debug::dh(self.port.tcr.read() as usize);\n\n debug::dl();\n }\n\n unsafe fn receive_inbound(&mut self) {\n let receive_buffer = self.port.rbstart.read() as usize;\n let mut capr = (self.port.capr.read() + 16) as usize;\n let cbr = self.port.cbr.read() as usize;\n\n while capr != cbr {\n let frame_addr = receive_buffer + capr + 4;\n let frame_status = ptr::read((receive_buffer + capr) as *const u16) as usize;\n let frame_len = ptr::read((receive_buffer + capr + 2) as *const u16) as usize;\n\n debug::d(\"Recv \");\n debug::dh(capr as usize);\n debug::d(\" \");\n debug::dh(frame_status);\n debug::d(\" \");\n debug::dh(frame_addr);\n debug::d(\" \");\n debug::dh(frame_len);\n debug::dl();\n\n self.inbound.push_back(Vec::from(slice::from_raw_parts(frame_addr as *const u8, frame_len - 4)));\n\n capr = capr + frame_len + 4;\n capr = (capr + 3) & (0xFFFFFFFF - 3);\n if capr >= 8192 {\n capr -= 8192\n }\n\n self.port.capr.write((capr as u16) - 16);\n }\n }\n\n unsafe fn send_outbound(&mut self) {\n while let Some(bytes) = self.outbound.pop_front() {\n if let Some(ref mut txd) = self.txds.get_mut(self.txd_i) {\n if bytes.len() < 4096 {\n while !txd.status_port.readf(RTL8139_TSR_OWN) {}\n\n debug::d(\"Send \");\n debug::dh(self.txd_i as usize);\n debug::d(\" \");\n debug::dh(txd.status_port.read() as usize);\n debug::d(\" \");\n debug::dh(txd.buffer);\n debug::d(\" \");\n debug::dh(bytes.len() & 0xFFF);\n debug::dl();\n\n ::memcpy(txd.buffer as *mut u8, bytes.as_ptr(), bytes.len());\n\n txd.address_port.write(txd.buffer as u32);\n txd.status_port.write(bytes.len() as u32 & 0xFFF);\n\n self.txd_i = (self.txd_i + 1) % 4;\n } else {\n debug::dl();\n debug::d(\"RTL8139: Frame too long for transmit: \");\n debug::dd(bytes.len());\n debug::dl();\n }\n } else {\n debug::d(\"RTL8139: TXD Overflow!\\n\");\n self.txd_i = 0;\n }\n }\n }\n}\n\nimpl KScheme for Rtl8139 {\n fn scheme(&self) -> &str {\n \"network\"\n }\n\n fn open(&mut self, _: &Url, _: usize) -> Result<Box<Resource>> {\n Ok(NetworkResource::new(self))\n }\n\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n let isr = self.port.isr.read();\n self.port.isr.write(isr);\n\n \/\/ dh(isr as usize);\n \/\/ dl();\n\n self.sync();\n }\n }\n\n fn on_poll(&mut self) {\n self.sync();\n }\n}\n\nimpl NetworkScheme for Rtl8139 {\n fn add(&mut self, resource: *mut NetworkResource) {\n self.resources.lock().push(resource);\n }\n\n fn remove(&mut self, resource: *mut NetworkResource) {\n let mut resources = self.resources.lock();\n\n let mut i = 0;\n while i < resources.len() {\n let mut remove = false;\n\n match resources.get(i) {\n Some(ptr) => if *ptr == resource {\n remove = true;\n } else {\n i += 1;\n },\n None => break,\n }\n\n if remove {\n resources.remove(i);\n }\n }\n }\n\n fn sync(&mut self) {\n unsafe {\n {\n let resources = self.resources.lock();\n\n for resource in resources.iter() {\n while let Some(bytes) = (**resource).outbound.lock().pop_front() {\n self.outbound.push_back(bytes);\n }\n }\n }\n\n self.send_outbound();\n\n self.receive_inbound();\n\n {\n let resources = self.resources.lock();\n\n while let Some(bytes) = self.inbound.pop_front() {\n for resource in resources.iter() {\n (**resource).inbound.lock().push_back(bytes.clone());\n }\n }\n }\n }\n }\n}\n<commit_msg>Newline in rtl8139 driver<commit_after>use alloc::boxed::Box;\n\nuse arch::memory;\n\nuse collections::slice;\nuse collections::string::ToString;\nuse collections::vec::Vec;\nuse collections::vec_deque::VecDeque;\n\nuse core::ptr;\n\nuse common::debug;\n\nuse drivers::pci::config::PciConfig;\nuse drivers::io::{Io, Pio};\n\nuse network::common::*;\nuse network::scheme::*;\n\nuse fs::{KScheme, Resource, Url};\n\nuse syscall::Result;\n\nuse sync::Intex;\n\nconst RTL8139_TSR_OWN: u32 = 1 << 13;\n\nconst RTL8139_CR_RST: u8 = 1 << 4;\nconst RTL8139_CR_RE: u8 = 1 << 3;\nconst RTL8139_CR_TE: u8 = 1 << 2;\nconst RTL8139_CR_BUFE: u8 = 1 << 0;\n\nconst RTL8139_ISR_SERR: u16 = 1 << 15;\nconst RTL8139_ISR_TIMEOUT: u16 = 1 << 14;\nconst RTL8139_ISR_LENCHG: u16 = 1 << 13;\nconst RTL8139_ISR_FOVW: u16 = 1 << 6;\nconst RTL8139_ISR_PUN_LINKCHG: u16 = 1 << 5;\nconst RTL8139_ISR_RXOVW: u16 = 1 << 4;\nconst RTL8139_ISR_TER: u16 = 1 << 3;\nconst RTL8139_ISR_TOK: u16 = 1 << 2;\nconst RTL8139_ISR_RER: u16 = 1 << 1;\nconst RTL8139_ISR_ROK: u16 = 1 << 0;\n\nconst RTL8139_TCR_IFG: u32 = 0b11 << 24;\n\nconst RTL8139_RCR_WRAP: u32 = 1 << 7;\nconst RTL8139_RCR_AR: u32 = 1 << 4;\nconst RTL8139_RCR_AB: u32 = 1 << 3;\nconst RTL8139_RCR_AM: u32 = 1 << 2;\nconst RTL8139_RCR_APM: u32 = 1 << 1;\n\n#[repr(packed)]\nstruct Txd {\n pub address_port: Pio<u32>,\n pub status_port: Pio<u32>,\n pub buffer: usize,\n}\n\npub struct Rtl8139Port {\n pub idr: [Pio<u8>; 6],\n pub rbstart: Pio<u32>,\n pub cr: Pio<u8>,\n pub capr: Pio<u16>,\n pub cbr: Pio<u16>,\n pub imr: Pio<u16>,\n pub isr: Pio<u16>,\n pub tcr: Pio<u32>,\n pub rcr: Pio<u32>,\n pub config1: Pio<u8>,\n}\n\nimpl Rtl8139Port {\n pub fn new(base: u16) -> Self {\n return Rtl8139Port {\n idr: [Pio::<u8>::new(base + 0x00),\n Pio::<u8>::new(base + 0x01),\n Pio::<u8>::new(base + 0x02),\n Pio::<u8>::new(base + 0x03),\n Pio::<u8>::new(base + 0x04),\n Pio::<u8>::new(base + 0x05)],\n rbstart: Pio::<u32>::new(base + 0x30),\n cr: Pio::<u8>::new(base + 0x37),\n capr: Pio::<u16>::new(base + 0x38),\n cbr: Pio::<u16>::new(base + 0x3A),\n imr: Pio::<u16>::new(base + 0x3C),\n isr: Pio::<u16>::new(base + 0x3E),\n tcr: Pio::<u32>::new(base + 0x40),\n rcr: Pio::<u32>::new(base + 0x44),\n config1: Pio::<u8>::new(base + 0x52),\n };\n }\n}\n\npub struct Rtl8139 {\n pci: PciConfig,\n base: usize,\n memory_mapped: bool,\n irq: u8,\n resources: Intex<Vec<*mut NetworkResource>>,\n inbound: VecDeque<Vec<u8>>,\n outbound: VecDeque<Vec<u8>>,\n txds: Vec<Txd>,\n txd_i: usize,\n port: Rtl8139Port,\n}\n\nimpl Rtl8139 {\n pub fn new(mut pci: PciConfig) -> Box<Self> {\n let pci_id = unsafe { pci.read(0x00) };\n let revision = (unsafe { pci.read(0x08) } & 0xFF) as u8;\n if pci_id == 0x813910EC && revision < 0x20 {\n debugln!(\"Not an 8139C+ compatible chip\")\n }\n\n let base = unsafe { pci.read(0x10) as usize };\n let irq = unsafe { pci.read(0x3C) as u8 & 0xF };\n\n let mut module = box Rtl8139 {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: irq,\n resources: Intex::new(Vec::new()),\n inbound: VecDeque::new(),\n outbound: VecDeque::new(),\n txds: Vec::new(),\n txd_i: 0,\n port: Rtl8139Port::new((base & 0xFFFFFFF0) as u16),\n };\n\n unsafe { module.init() };\n\n module\n }\n\n unsafe fn init(&mut self) {\n debug::d(\"RTL8139 on: \");\n debug::dh(self.base);\n if self.memory_mapped {\n debug::d(\" memory mapped\");\n } else {\n debug::d(\" port mapped\");\n }\n debug::d(\" IRQ: \");\n debug::dbh(self.irq);\n\n self.pci.flag(4, 4, true); \/\/ Bus mastering\n\n let base = self.base as u16;\n\n self.port.config1.write(0);\n self.port.cr.write(RTL8139_CR_RST);\n while self.port.cr.read() & RTL8139_CR_RST != 0 {}\n\n debug::d(\" MAC: \");\n MAC_ADDR = MacAddr {\n bytes: [self.port.idr[0].read(),\n self.port.idr[1].read(),\n self.port.idr[2].read(),\n self.port.idr[3].read(),\n self.port.idr[4].read(),\n self.port.idr[5].read()],\n };\n debug::d(&MAC_ADDR.to_string());\n\n let receive_buffer = memory::alloc(10240);\n self.port.rbstart.write(receive_buffer as u32);\n\n for i in 0..4 {\n self.txds.push(Txd {\n address_port: Pio::<u32>::new(base + 0x20 + (i as u16) * 4),\n status_port: Pio::<u32>::new(base + 0x10 + (i as u16) * 4),\n buffer: memory::alloc(4096),\n });\n }\n\n self.port.imr.write(RTL8139_ISR_TOK | RTL8139_ISR_ROK);\n debug::d(\" IMR: \");\n debug::dh(self.port.imr.read() as usize);\n\n self.port.cr.write(RTL8139_CR_RE | RTL8139_CR_TE);\n debug::d(\" CMD: \");\n debug::dbh(self.port.cr.read());\n\n self.port.rcr.write(RTL8139_RCR_WRAP | RTL8139_RCR_AR | RTL8139_RCR_AB | RTL8139_RCR_AM |\n RTL8139_RCR_APM);\n debug::d(\" RCR: \");\n debug::dh(self.port.rcr.read() as usize);\n\n self.port.tcr.writef(RTL8139_TCR_IFG, true);\n debug::d(\" TCR: \");\n debug::dh(self.port.tcr.read() as usize);\n\n debug::dl();\n }\n\n unsafe fn receive_inbound(&mut self) {\n let receive_buffer = self.port.rbstart.read() as usize;\n let mut capr = (self.port.capr.read() + 16) as usize;\n let cbr = self.port.cbr.read() as usize;\n\n while capr != cbr {\n let frame_addr = receive_buffer + capr + 4;\n let frame_status = ptr::read((receive_buffer + capr) as *const u16) as usize;\n let frame_len = ptr::read((receive_buffer + capr + 2) as *const u16) as usize;\n\n debug::d(\"Recv \");\n debug::dh(capr as usize);\n debug::d(\" \");\n debug::dh(frame_status);\n debug::d(\" \");\n debug::dh(frame_addr);\n debug::d(\" \");\n debug::dh(frame_len);\n debug::dl();\n\n self.inbound.push_back(Vec::from(slice::from_raw_parts(frame_addr as *const u8, frame_len - 4)));\n\n capr = capr + frame_len + 4;\n capr = (capr + 3) & (0xFFFFFFFF - 3);\n if capr >= 8192 {\n capr -= 8192\n }\n\n self.port.capr.write((capr as u16) - 16);\n }\n }\n\n unsafe fn send_outbound(&mut self) {\n while let Some(bytes) = self.outbound.pop_front() {\n if let Some(ref mut txd) = self.txds.get_mut(self.txd_i) {\n if bytes.len() < 4096 {\n while !txd.status_port.readf(RTL8139_TSR_OWN) {}\n\n debug::d(\"Send \");\n debug::dh(self.txd_i as usize);\n debug::d(\" \");\n debug::dh(txd.status_port.read() as usize);\n debug::d(\" \");\n debug::dh(txd.buffer);\n debug::d(\" \");\n debug::dh(bytes.len() & 0xFFF);\n debug::dl();\n\n ::memcpy(txd.buffer as *mut u8, bytes.as_ptr(), bytes.len());\n\n txd.address_port.write(txd.buffer as u32);\n txd.status_port.write(bytes.len() as u32 & 0xFFF);\n\n self.txd_i = (self.txd_i + 1) % 4;\n } else {\n debug::dl();\n debug::d(\"RTL8139: Frame too long for transmit: \");\n debug::dd(bytes.len());\n debug::dl();\n }\n } else {\n debug::d(\"RTL8139: TXD Overflow!\\n\");\n self.txd_i = 0;\n }\n }\n }\n}\n\nimpl KScheme for Rtl8139 {\n fn scheme(&self) -> &str {\n \"network\"\n }\n\n fn open(&mut self, _: &Url, _: usize) -> Result<Box<Resource>> {\n Ok(NetworkResource::new(self))\n }\n\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n let isr = self.port.isr.read();\n self.port.isr.write(isr);\n\n \/\/ dh(isr as usize);\n \/\/ dl();\n\n self.sync();\n }\n }\n\n fn on_poll(&mut self) {\n self.sync();\n }\n}\n\nimpl NetworkScheme for Rtl8139 {\n fn add(&mut self, resource: *mut NetworkResource) {\n self.resources.lock().push(resource);\n }\n\n fn remove(&mut self, resource: *mut NetworkResource) {\n let mut resources = self.resources.lock();\n\n let mut i = 0;\n while i < resources.len() {\n let mut remove = false;\n\n match resources.get(i) {\n Some(ptr) => if *ptr == resource {\n remove = true;\n } else {\n i += 1;\n },\n None => break,\n }\n\n if remove {\n resources.remove(i);\n }\n }\n }\n\n fn sync(&mut self) {\n unsafe {\n {\n let resources = self.resources.lock();\n\n for resource in resources.iter() {\n while let Some(bytes) = (**resource).outbound.lock().pop_front() {\n self.outbound.push_back(bytes);\n }\n }\n }\n\n self.send_outbound();\n\n self.receive_inbound();\n\n {\n let resources = self.resources.lock();\n\n while let Some(bytes) = self.inbound.pop_front() {\n for resource in resources.iter() {\n (**resource).inbound.lock().push_back(bytes.clone());\n }\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix copy error<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove DXGIGetDebugInterface, there is no .lib for this, it has to be loaded dynamically.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Reorder config overrides (cli \/ file)<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Panic support in the standard library\n\n#![unstable(feature = \"std_panic\", reason = \"awaiting feedback\",\n issue = \"27719\")]\n\nuse any::Any;\nuse boxed::Box;\nuse cell::UnsafeCell;\nuse ops::{Deref, DerefMut};\nuse ptr::{Unique, Shared};\nuse rc::Rc;\nuse sync::{Arc, Mutex, RwLock};\nuse sys_common::unwind;\nuse thread::Result;\n\npub use panicking::{take_handler, set_handler, PanicInfo, Location};\n\n\/\/\/ A marker trait which represents \"panic safe\" types in Rust.\n\/\/\/\n\/\/\/ This trait is implemented by default for many types and behaves similarly in\n\/\/\/ terms of inference of implementation to the `Send` and `Sync` traits. The\n\/\/\/ purpose of this trait is to encode what types are safe to cross a `recover`\n\/\/\/ boundary with no fear of panic safety.\n\/\/\/\n\/\/\/ ## What is panic safety?\n\/\/\/\n\/\/\/ In Rust a function can \"return\" early if it either panics or calls a\n\/\/\/ function which transitively panics. This sort of control flow is not always\n\/\/\/ anticipated, and has the possibility of causing subtle bugs through a\n\/\/\/ combination of two cricial components:\n\/\/\/\n\/\/\/ 1. A data structure is in a temporarily invalid state when the thread\n\/\/\/ panics.\n\/\/\/ 2. This broken invariant is then later observed.\n\/\/\/\n\/\/\/ Typically in Rust, it is difficult to perform step (2) because catching a\n\/\/\/ panic involves either spawning a thread (which in turns makes it difficult\n\/\/\/ to later witness broken invariants) or using the `recover` function in this\n\/\/\/ module. Additionally, even if an invariant is witnessed, it typically isn't a\n\/\/\/ problem in Rust because there's no uninitialized values (like in C or C++).\n\/\/\/\n\/\/\/ It is possible, however, for **logical** invariants to be broken in Rust,\n\/\/\/ which can end up causing behavioral bugs. Another key aspect of panic safety\n\/\/\/ in Rust is that, in the absence of `unsafe` code, a panic cannot lead to\n\/\/\/ memory unsafety.\n\/\/\/\n\/\/\/ That was a bit of a whirlwind tour of panic safety, but for more information\n\/\/\/ about panic safety and how it applies to Rust, see an [associated RFC][rfc].\n\/\/\/\n\/\/\/ [rfc]: https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1236-stabilize-catch-panic.md\n\/\/\/\n\/\/\/ ## What is `RecoverSafe`?\n\/\/\/\n\/\/\/ Now that we've got an idea of what panic safety is in Rust, it's also\n\/\/\/ important to understand what this trait represents. As mentioned above, one\n\/\/\/ way to witness broken invariants is through the `recover` function in this\n\/\/\/ module as it allows catching a panic and then re-using the environment of\n\/\/\/ the closure.\n\/\/\/\n\/\/\/ Simply put, a type `T` implements `RecoverSafe` if it cannot easily allow\n\/\/\/ witnessing a broken invariant through the use of `recover` (catching a\n\/\/\/ panic). This trait is a marker trait, so it is automatically implemented for\n\/\/\/ many types, and it is also structurally composed (e.g. a struct is recover\n\/\/\/ safe if all of its components are recover safe).\n\/\/\/\n\/\/\/ Note, however, that this is not an unsafe trait, so there is not a succinct\n\/\/\/ contract that this trait is providing. Instead it is intended as more of a\n\/\/\/ \"speed bump\" to alert users of `recover` that broken invariants may be\n\/\/\/ witnessed and may need to be accounted for.\n\/\/\/\n\/\/\/ ## Who implements `RecoverSafe`?\n\/\/\/\n\/\/\/ Types such as `&mut T` and `&RefCell<T>` are examples which are **not**\n\/\/\/ recover safe. The general idea is that any mutable state which can be shared\n\/\/\/ across `recover` is not recover safe by default. This is because it is very\n\/\/\/ easy to witness a broken invariant outside of `recover` as the data is\n\/\/\/ simply accessed as usual.\n\/\/\/\n\/\/\/ Types like `&Mutex<T>`, however, are recover safe because they implement\n\/\/\/ poisoning by default. They still allow witnessing a broken invariant, but\n\/\/\/ they already provide their own \"speed bumps\" to do so.\n\/\/\/\n\/\/\/ ## When should `RecoverSafe` be used?\n\/\/\/\n\/\/\/ Is not intended that most types or functions need to worry about this trait.\n\/\/\/ It is only used as a bound on the `recover` function and as mentioned above,\n\/\/\/ the lack of `unsafe` means it is mostly an advisory. The `AssertRecoverSafe`\n\/\/\/ wrapper struct in this module can be used to force this trait to be\n\/\/\/ implemented for any closed over variables passed to the `recover` function\n\/\/\/ (more on this below).\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n#[rustc_on_unimplemented = \"the type {Self} may not be safely transferred \\\n across a recover boundary\"]\npub trait RecoverSafe {}\n\n\/\/\/ A marker trait representing types where a shared reference is considered\n\/\/\/ recover safe.\n\/\/\/\n\/\/\/ This trait is namely not implemented by `UnsafeCell`, the root of all\n\/\/\/ interior mutability.\n\/\/\/\n\/\/\/ This is a \"helper marker trait\" used to provide impl blocks for the\n\/\/\/ `RecoverSafe` trait, for more information see that documentation.\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n#[rustc_on_unimplemented = \"the type {Self} contains interior mutability \\\n and a reference may not be safely transferrable \\\n across a recover boundary\"]\npub trait RefRecoverSafe {}\n\n\/\/\/ A simple wrapper around a type to assert that it is panic safe.\n\/\/\/\n\/\/\/ When using `recover` it may be the case that some of the closed over\n\/\/\/ variables are not panic safe. For example if `&mut T` is captured the\n\/\/\/ compiler will generate a warning indicating that it is not panic safe. It\n\/\/\/ may not be the case, however, that this is actually a problem due to the\n\/\/\/ specific usage of `recover` if panic safety is specifically taken into\n\/\/\/ account. This wrapper struct is useful for a quick and lightweight\n\/\/\/ annotation that a variable is indeed panic safe.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic::{self, AssertRecoverSafe};\n\/\/\/\n\/\/\/ let mut variable = 4;\n\/\/\/\n\/\/\/ \/\/ This code will not compile because the closure captures `&mut variable`\n\/\/\/ \/\/ which is not considered panic safe by default.\n\/\/\/\n\/\/\/ \/\/ panic::recover(|| {\n\/\/\/ \/\/ variable += 3;\n\/\/\/ \/\/ });\n\/\/\/\n\/\/\/ \/\/ This, however, will compile due to the `AssertRecoverSafe` wrapper\n\/\/\/ let result = {\n\/\/\/ let mut wrapper = AssertRecoverSafe::new(&mut variable);\n\/\/\/ panic::recover(move || {\n\/\/\/ **wrapper += 3;\n\/\/\/ })\n\/\/\/ };\n\/\/\/ \/\/ ...\n\/\/\/ ```\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\npub struct AssertRecoverSafe<T>(T);\n\n\/\/ Implementations of the `RecoverSafe` trait:\n\/\/\n\/\/ * By default everything is recover safe\n\/\/ * pointers T contains mutability of some form are not recover safe\n\/\/ * Unique, an owning pointer, lifts an implementation\n\/\/ * Types like Mutex\/RwLock which are explicilty poisoned are recover safe\n\/\/ * Our custom AssertRecoverSafe wrapper is indeed recover safe\nimpl RecoverSafe for .. {}\nimpl<'a, T: ?Sized> !RecoverSafe for &'a mut T {}\nimpl<'a, T: RefRecoverSafe + ?Sized> RecoverSafe for &'a T {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for *const T {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for *mut T {}\nimpl<T: RecoverSafe> RecoverSafe for Unique<T> {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Shared<T> {}\nimpl<T: ?Sized> RecoverSafe for Mutex<T> {}\nimpl<T: ?Sized> RecoverSafe for RwLock<T> {}\nimpl<T> RecoverSafe for AssertRecoverSafe<T> {}\n\n\/\/ not covered via the Shared impl above b\/c the inner contents use\n\/\/ Cell\/AtomicUsize, but the usage here is recover safe so we can lift the\n\/\/ impl up one level to Arc\/Rc itself\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Rc<T> {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Arc<T> {}\n\n\/\/ Pretty simple implementations for the `RefRecoverSafe` marker trait,\n\/\/ basically just saying that this is a marker trait and `UnsafeCell` is the\n\/\/ only thing which doesn't implement it (which then transitively applies to\n\/\/ everything else).\nimpl RefRecoverSafe for .. {}\nimpl<T: ?Sized> !RefRecoverSafe for UnsafeCell<T> {}\nimpl<T> RefRecoverSafe for AssertRecoverSafe<T> {}\n\nimpl<T> AssertRecoverSafe<T> {\n \/\/\/ Creates a new `AssertRecoverSafe` wrapper around the provided type.\n #[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n pub fn new(t: T) -> AssertRecoverSafe<T> {\n AssertRecoverSafe(t)\n }\n}\n\nimpl<T> Deref for AssertRecoverSafe<T> {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.0\n }\n}\n\nimpl<T> DerefMut for AssertRecoverSafe<T> {\n fn deref_mut(&mut self) -> &mut T {\n &mut self.0\n }\n}\n\n\/\/\/ Invokes a closure, capturing the cause of panic if one occurs.\n\/\/\/\n\/\/\/ This function will return `Ok` with the closure's result if the closure\n\/\/\/ does not panic, and will return `Err(cause)` if the closure panics. The\n\/\/\/ `cause` returned is the object with which panic was originally invoked.\n\/\/\/\n\/\/\/ It is currently undefined behavior to unwind from Rust code into foreign\n\/\/\/ code, so this function is particularly useful when Rust is called from\n\/\/\/ another language (normally C). This can run arbitrary Rust code, capturing a\n\/\/\/ panic and allowing a graceful handling of the error.\n\/\/\/\n\/\/\/ It is **not** recommended to use this function for a general try\/catch\n\/\/\/ mechanism. The `Result` type is more appropriate to use for functions that\n\/\/\/ can fail on a regular basis.\n\/\/\/\n\/\/\/ The closure provided is required to adhere to the `RecoverSafe` to ensure\n\/\/\/ that all captured variables are safe to cross this recover boundary. The\n\/\/\/ purpose of this bound is to encode the concept of [exception safety][rfc] in\n\/\/\/ the type system. Most usage of this function should not need to worry about\n\/\/\/ this bound as programs are naturally panic safe without `unsafe` code. If it\n\/\/\/ becomes a problem the associated `AssertRecoverSafe` wrapper type in this\n\/\/\/ module can be used to quickly assert that the usage here is indeed exception\n\/\/\/ safe.\n\/\/\/\n\/\/\/ [rfc]: https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1236-stabilize-catch-panic.md\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic;\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ println!(\"hello!\");\n\/\/\/ });\n\/\/\/ assert!(result.is_ok());\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ panic!(\"oh no!\");\n\/\/\/ });\n\/\/\/ assert!(result.is_err());\n\/\/\/ ```\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\npub fn recover<F: FnOnce() -> R + RecoverSafe, R>(f: F) -> Result<R> {\n let mut result = None;\n unsafe {\n let result = &mut result;\n try!(unwind::try(move || *result = Some(f())))\n }\n Ok(result.unwrap())\n}\n\n\/\/\/ Triggers a panic without invoking the panic handler.\n\/\/\/\n\/\/\/ This is designed to be used in conjunction with `recover` to, for example,\n\/\/\/ carry a panic across a layer of C code.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```should_panic\n\/\/\/ #![feature(std_panic, recover, panic_propagate)]\n\/\/\/\n\/\/\/ use std::panic;\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ panic!(\"oh no!\");\n\/\/\/ });\n\/\/\/\n\/\/\/ if let Err(err) = result {\n\/\/\/ panic::propagate(err);\n\/\/\/ }\n\/\/\/ ```\n#[unstable(feature = \"panic_propagate\", reason = \"awaiting feedback\", issue = \"30752\")]\npub fn propagate(payload: Box<Any + Send>) -> ! {\n unwind::rust_panic(payload)\n}\n<commit_msg>Add into_inner to AssertRecoverSafe<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Panic support in the standard library\n\n#![unstable(feature = \"std_panic\", reason = \"awaiting feedback\",\n issue = \"27719\")]\n\nuse any::Any;\nuse boxed::Box;\nuse cell::UnsafeCell;\nuse ops::{Deref, DerefMut};\nuse ptr::{Unique, Shared};\nuse rc::Rc;\nuse sync::{Arc, Mutex, RwLock};\nuse sys_common::unwind;\nuse thread::Result;\n\npub use panicking::{take_handler, set_handler, PanicInfo, Location};\n\n\/\/\/ A marker trait which represents \"panic safe\" types in Rust.\n\/\/\/\n\/\/\/ This trait is implemented by default for many types and behaves similarly in\n\/\/\/ terms of inference of implementation to the `Send` and `Sync` traits. The\n\/\/\/ purpose of this trait is to encode what types are safe to cross a `recover`\n\/\/\/ boundary with no fear of panic safety.\n\/\/\/\n\/\/\/ ## What is panic safety?\n\/\/\/\n\/\/\/ In Rust a function can \"return\" early if it either panics or calls a\n\/\/\/ function which transitively panics. This sort of control flow is not always\n\/\/\/ anticipated, and has the possibility of causing subtle bugs through a\n\/\/\/ combination of two cricial components:\n\/\/\/\n\/\/\/ 1. A data structure is in a temporarily invalid state when the thread\n\/\/\/ panics.\n\/\/\/ 2. This broken invariant is then later observed.\n\/\/\/\n\/\/\/ Typically in Rust, it is difficult to perform step (2) because catching a\n\/\/\/ panic involves either spawning a thread (which in turns makes it difficult\n\/\/\/ to later witness broken invariants) or using the `recover` function in this\n\/\/\/ module. Additionally, even if an invariant is witnessed, it typically isn't a\n\/\/\/ problem in Rust because there's no uninitialized values (like in C or C++).\n\/\/\/\n\/\/\/ It is possible, however, for **logical** invariants to be broken in Rust,\n\/\/\/ which can end up causing behavioral bugs. Another key aspect of panic safety\n\/\/\/ in Rust is that, in the absence of `unsafe` code, a panic cannot lead to\n\/\/\/ memory unsafety.\n\/\/\/\n\/\/\/ That was a bit of a whirlwind tour of panic safety, but for more information\n\/\/\/ about panic safety and how it applies to Rust, see an [associated RFC][rfc].\n\/\/\/\n\/\/\/ [rfc]: https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1236-stabilize-catch-panic.md\n\/\/\/\n\/\/\/ ## What is `RecoverSafe`?\n\/\/\/\n\/\/\/ Now that we've got an idea of what panic safety is in Rust, it's also\n\/\/\/ important to understand what this trait represents. As mentioned above, one\n\/\/\/ way to witness broken invariants is through the `recover` function in this\n\/\/\/ module as it allows catching a panic and then re-using the environment of\n\/\/\/ the closure.\n\/\/\/\n\/\/\/ Simply put, a type `T` implements `RecoverSafe` if it cannot easily allow\n\/\/\/ witnessing a broken invariant through the use of `recover` (catching a\n\/\/\/ panic). This trait is a marker trait, so it is automatically implemented for\n\/\/\/ many types, and it is also structurally composed (e.g. a struct is recover\n\/\/\/ safe if all of its components are recover safe).\n\/\/\/\n\/\/\/ Note, however, that this is not an unsafe trait, so there is not a succinct\n\/\/\/ contract that this trait is providing. Instead it is intended as more of a\n\/\/\/ \"speed bump\" to alert users of `recover` that broken invariants may be\n\/\/\/ witnessed and may need to be accounted for.\n\/\/\/\n\/\/\/ ## Who implements `RecoverSafe`?\n\/\/\/\n\/\/\/ Types such as `&mut T` and `&RefCell<T>` are examples which are **not**\n\/\/\/ recover safe. The general idea is that any mutable state which can be shared\n\/\/\/ across `recover` is not recover safe by default. This is because it is very\n\/\/\/ easy to witness a broken invariant outside of `recover` as the data is\n\/\/\/ simply accessed as usual.\n\/\/\/\n\/\/\/ Types like `&Mutex<T>`, however, are recover safe because they implement\n\/\/\/ poisoning by default. They still allow witnessing a broken invariant, but\n\/\/\/ they already provide their own \"speed bumps\" to do so.\n\/\/\/\n\/\/\/ ## When should `RecoverSafe` be used?\n\/\/\/\n\/\/\/ Is not intended that most types or functions need to worry about this trait.\n\/\/\/ It is only used as a bound on the `recover` function and as mentioned above,\n\/\/\/ the lack of `unsafe` means it is mostly an advisory. The `AssertRecoverSafe`\n\/\/\/ wrapper struct in this module can be used to force this trait to be\n\/\/\/ implemented for any closed over variables passed to the `recover` function\n\/\/\/ (more on this below).\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n#[rustc_on_unimplemented = \"the type {Self} may not be safely transferred \\\n across a recover boundary\"]\npub trait RecoverSafe {}\n\n\/\/\/ A marker trait representing types where a shared reference is considered\n\/\/\/ recover safe.\n\/\/\/\n\/\/\/ This trait is namely not implemented by `UnsafeCell`, the root of all\n\/\/\/ interior mutability.\n\/\/\/\n\/\/\/ This is a \"helper marker trait\" used to provide impl blocks for the\n\/\/\/ `RecoverSafe` trait, for more information see that documentation.\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n#[rustc_on_unimplemented = \"the type {Self} contains interior mutability \\\n and a reference may not be safely transferrable \\\n across a recover boundary\"]\npub trait RefRecoverSafe {}\n\n\/\/\/ A simple wrapper around a type to assert that it is panic safe.\n\/\/\/\n\/\/\/ When using `recover` it may be the case that some of the closed over\n\/\/\/ variables are not panic safe. For example if `&mut T` is captured the\n\/\/\/ compiler will generate a warning indicating that it is not panic safe. It\n\/\/\/ may not be the case, however, that this is actually a problem due to the\n\/\/\/ specific usage of `recover` if panic safety is specifically taken into\n\/\/\/ account. This wrapper struct is useful for a quick and lightweight\n\/\/\/ annotation that a variable is indeed panic safe.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic::{self, AssertRecoverSafe};\n\/\/\/\n\/\/\/ let mut variable = 4;\n\/\/\/\n\/\/\/ \/\/ This code will not compile because the closure captures `&mut variable`\n\/\/\/ \/\/ which is not considered panic safe by default.\n\/\/\/\n\/\/\/ \/\/ panic::recover(|| {\n\/\/\/ \/\/ variable += 3;\n\/\/\/ \/\/ });\n\/\/\/\n\/\/\/ \/\/ This, however, will compile due to the `AssertRecoverSafe` wrapper\n\/\/\/ let result = {\n\/\/\/ let mut wrapper = AssertRecoverSafe::new(&mut variable);\n\/\/\/ panic::recover(move || {\n\/\/\/ **wrapper += 3;\n\/\/\/ })\n\/\/\/ };\n\/\/\/ \/\/ ...\n\/\/\/ ```\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\npub struct AssertRecoverSafe<T>(T);\n\n\/\/ Implementations of the `RecoverSafe` trait:\n\/\/\n\/\/ * By default everything is recover safe\n\/\/ * pointers T contains mutability of some form are not recover safe\n\/\/ * Unique, an owning pointer, lifts an implementation\n\/\/ * Types like Mutex\/RwLock which are explicilty poisoned are recover safe\n\/\/ * Our custom AssertRecoverSafe wrapper is indeed recover safe\nimpl RecoverSafe for .. {}\nimpl<'a, T: ?Sized> !RecoverSafe for &'a mut T {}\nimpl<'a, T: RefRecoverSafe + ?Sized> RecoverSafe for &'a T {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for *const T {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for *mut T {}\nimpl<T: RecoverSafe> RecoverSafe for Unique<T> {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Shared<T> {}\nimpl<T: ?Sized> RecoverSafe for Mutex<T> {}\nimpl<T: ?Sized> RecoverSafe for RwLock<T> {}\nimpl<T> RecoverSafe for AssertRecoverSafe<T> {}\n\n\/\/ not covered via the Shared impl above b\/c the inner contents use\n\/\/ Cell\/AtomicUsize, but the usage here is recover safe so we can lift the\n\/\/ impl up one level to Arc\/Rc itself\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Rc<T> {}\nimpl<T: RefRecoverSafe + ?Sized> RecoverSafe for Arc<T> {}\n\n\/\/ Pretty simple implementations for the `RefRecoverSafe` marker trait,\n\/\/ basically just saying that this is a marker trait and `UnsafeCell` is the\n\/\/ only thing which doesn't implement it (which then transitively applies to\n\/\/ everything else).\nimpl RefRecoverSafe for .. {}\nimpl<T: ?Sized> !RefRecoverSafe for UnsafeCell<T> {}\nimpl<T> RefRecoverSafe for AssertRecoverSafe<T> {}\n\nimpl<T> AssertRecoverSafe<T> {\n \/\/\/ Creates a new `AssertRecoverSafe` wrapper around the provided type.\n #[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n pub fn new(t: T) -> AssertRecoverSafe<T> {\n AssertRecoverSafe(t)\n }\n\n \/\/\/ Consumes the `AssertRecoverSafe`, returning the wrapped value.\n #[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\n pub fn into_inner(self) -> T {\n self.0\n }\n}\n\nimpl<T> Deref for AssertRecoverSafe<T> {\n type Target = T;\n\n fn deref(&self) -> &T {\n &self.0\n }\n}\n\nimpl<T> DerefMut for AssertRecoverSafe<T> {\n fn deref_mut(&mut self) -> &mut T {\n &mut self.0\n }\n}\n\n\/\/\/ Invokes a closure, capturing the cause of panic if one occurs.\n\/\/\/\n\/\/\/ This function will return `Ok` with the closure's result if the closure\n\/\/\/ does not panic, and will return `Err(cause)` if the closure panics. The\n\/\/\/ `cause` returned is the object with which panic was originally invoked.\n\/\/\/\n\/\/\/ It is currently undefined behavior to unwind from Rust code into foreign\n\/\/\/ code, so this function is particularly useful when Rust is called from\n\/\/\/ another language (normally C). This can run arbitrary Rust code, capturing a\n\/\/\/ panic and allowing a graceful handling of the error.\n\/\/\/\n\/\/\/ It is **not** recommended to use this function for a general try\/catch\n\/\/\/ mechanism. The `Result` type is more appropriate to use for functions that\n\/\/\/ can fail on a regular basis.\n\/\/\/\n\/\/\/ The closure provided is required to adhere to the `RecoverSafe` to ensure\n\/\/\/ that all captured variables are safe to cross this recover boundary. The\n\/\/\/ purpose of this bound is to encode the concept of [exception safety][rfc] in\n\/\/\/ the type system. Most usage of this function should not need to worry about\n\/\/\/ this bound as programs are naturally panic safe without `unsafe` code. If it\n\/\/\/ becomes a problem the associated `AssertRecoverSafe` wrapper type in this\n\/\/\/ module can be used to quickly assert that the usage here is indeed exception\n\/\/\/ safe.\n\/\/\/\n\/\/\/ [rfc]: https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1236-stabilize-catch-panic.md\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ #![feature(recover, std_panic)]\n\/\/\/\n\/\/\/ use std::panic;\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ println!(\"hello!\");\n\/\/\/ });\n\/\/\/ assert!(result.is_ok());\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ panic!(\"oh no!\");\n\/\/\/ });\n\/\/\/ assert!(result.is_err());\n\/\/\/ ```\n#[unstable(feature = \"recover\", reason = \"awaiting feedback\", issue = \"27719\")]\npub fn recover<F: FnOnce() -> R + RecoverSafe, R>(f: F) -> Result<R> {\n let mut result = None;\n unsafe {\n let result = &mut result;\n try!(unwind::try(move || *result = Some(f())))\n }\n Ok(result.unwrap())\n}\n\n\/\/\/ Triggers a panic without invoking the panic handler.\n\/\/\/\n\/\/\/ This is designed to be used in conjunction with `recover` to, for example,\n\/\/\/ carry a panic across a layer of C code.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```should_panic\n\/\/\/ #![feature(std_panic, recover, panic_propagate)]\n\/\/\/\n\/\/\/ use std::panic;\n\/\/\/\n\/\/\/ let result = panic::recover(|| {\n\/\/\/ panic!(\"oh no!\");\n\/\/\/ });\n\/\/\/\n\/\/\/ if let Err(err) = result {\n\/\/\/ panic::propagate(err);\n\/\/\/ }\n\/\/\/ ```\n#[unstable(feature = \"panic_propagate\", reason = \"awaiting feedback\", issue = \"30752\")]\npub fn propagate(payload: Box<Any + Send>) -> ! {\n unwind::rust_panic(payload)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary let binding<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #36087 - apasel422:issue-28324, r=alexcrichton<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern {\n static error_message_count: u32;\n}\n\npub static BAZ: u32 = *&error_message_count;\n\/\/~^ ERROR cannot refer to other statics by value\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update cryptoutil to use wrapping arithmetic functions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #76961 - bugadani:test-34634, r=Mark-Simulacrum<commit_after>\/\/ Test that `wrapping_div` only checks divisor once.\n\/\/ This test checks that there is only a single compare agains -1 and -1 is not present as a\n\/\/ switch case (the second check present until rustc 1.12).\n\/\/ This test also verifies that a single panic call is generated (for the division by zero case).\n\n\/\/ compile-flags: -O\n#![crate_type = \"lib\"]\n\n\/\/ CHECK-LABEL: @f\n#[no_mangle]\npub fn f(x: i32, y: i32) -> i32 {\n \/\/ CHECK-COUNT-1: icmp eq i32 %y, -1\n \/\/ CHECK-COUNT-1: panic\n \/\/ CHECK-NOT: i32 -1, label\n x.wrapping_div(y)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #5783<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Issue #5783\n\/\/ Nondeterministic behavior when referencing a closure more than once\n\nfn main() {\n let a: &fn(int) -> @fn(int) -> int = |x:int| |y:int| -> int x + y;\n let b = a(2);\n assert!(a(2)(3) == 5);\n assert!(b(6) == 8);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>auto merge of #11606 : alexcrichton\/rust\/issue-9259, r=brson<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct A<'a> {\n a: &'a [~str],\n b: Option<&'a [~str]>,\n}\n\npub fn main() {\n let b = &[~\"foo\"];\n let a = A {\n a: &[~\"test\"],\n b: Some(b),\n };\n assert_eq!(a.b.get_ref()[0].as_slice(), \"foo\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat(json_schema): less rigid date-parsing<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse canvas_msg::{CanvasMsg, CanvasWebGLMsg, CanvasCommonMsg};\nuse geom::size::Size2D;\n\nuse gleam::gl;\nuse gleam::gl::types::{GLsizei};\n\nuse util::task::spawn_named;\n\nuse std::borrow::ToOwned;\nuse std::slice::bytes::copy_memory;\nuse std::sync::mpsc::{channel, Sender};\nuse util::vec::byte_swap;\nuse offscreen_gl_context::{GLContext, GLContextAttributes};\n\npub struct WebGLPaintTask {\n size: Size2D<i32>,\n original_context_size: Size2D<i32>,\n gl_context: GLContext,\n}\n\n\/\/ This allows trying to create the PaintTask\n\/\/ before creating the thread\nunsafe impl Send for WebGLPaintTask {}\n\nimpl WebGLPaintTask {\n fn new(size: Size2D<i32>) -> Result<WebGLPaintTask, &'static str> {\n \/\/ TODO(ecoal95): Get the GLContextAttributes from the `GetContext` call\n let context = try!(GLContext::create_offscreen(size, GLContextAttributes::default()));\n Ok(WebGLPaintTask {\n size: size,\n original_context_size: size,\n gl_context: context\n })\n }\n\n pub fn start(size: Size2D<i32>) -> Result<Sender<CanvasMsg>, &'static str> {\n let (chan, port) = channel::<CanvasMsg>();\n let mut painter = try!(WebGLPaintTask::new(size));\n spawn_named(\"WebGLTask\".to_owned(), move || {\n painter.init();\n loop {\n match port.recv().unwrap() {\n CanvasMsg::WebGL(message) => {\n match message {\n CanvasWebGLMsg::AttachShader(program_id, shader_id) => painter.attach_shader(program_id, shader_id),\n CanvasWebGLMsg::BindBuffer(buffer_type, buffer_id) => painter.bind_buffer(buffer_type, buffer_id),\n CanvasWebGLMsg::BufferData(buffer_type, data, usage) => painter.buffer_data(buffer_type, data, usage),\n CanvasWebGLMsg::Clear(mask) => painter.clear(mask),\n CanvasWebGLMsg::ClearColor(r, g, b, a) => painter.clear_color(r, g, b, a),\n CanvasWebGLMsg::CreateBuffer(chan) => painter.create_buffer(chan),\n CanvasWebGLMsg::DrawArrays(mode, first, count) => painter.draw_arrays(mode, first, count),\n CanvasWebGLMsg::EnableVertexAttribArray(attrib_id) => painter.enable_vertex_attrib_array(attrib_id),\n CanvasWebGLMsg::GetAttribLocation(program_id, name, chan) => painter.get_attrib_location(program_id, name, chan),\n CanvasWebGLMsg::GetShaderInfoLog(shader_id, chan) => painter.get_shader_info_log(shader_id, chan),\n CanvasWebGLMsg::GetShaderParameter(shader_id, param_id, chan) => painter.get_shader_parameter(shader_id, param_id, chan),\n CanvasWebGLMsg::GetUniformLocation(program_id, name, chan) => painter.get_uniform_location(program_id, name, chan),\n CanvasWebGLMsg::CompileShader(shader_id) => painter.compile_shader(shader_id),\n CanvasWebGLMsg::CreateProgram(chan) => painter.create_program(chan),\n CanvasWebGLMsg::CreateShader(shader_type, chan) => painter.create_shader(shader_type, chan),\n CanvasWebGLMsg::LinkProgram(program_id) => painter.link_program(program_id),\n CanvasWebGLMsg::ShaderSource(shader_id, source) => painter.shader_source(shader_id, source),\n CanvasWebGLMsg::Uniform4fv(uniform_id, data) => painter.uniform_4fv(uniform_id, data),\n CanvasWebGLMsg::UseProgram(program_id) => painter.use_program(program_id),\n CanvasWebGLMsg::VertexAttribPointer2f(attrib_id, size, normalized, stride, offset) => {\n painter.vertex_attrib_pointer_f32(attrib_id, size, normalized, stride, offset);\n },\n CanvasWebGLMsg::Viewport(x, y, width, height) => painter.viewport(x, y, width, height),\n }\n },\n CanvasMsg::Common(message) => {\n match message {\n CanvasCommonMsg::Close => break,\n CanvasCommonMsg::SendPixelContents(chan) => painter.send_pixel_contents(chan),\n \/\/ TODO(ecoal95): handle error nicely\n CanvasCommonMsg::Recreate(size) => painter.recreate(size).unwrap(),\n }\n },\n CanvasMsg::Canvas2d(_) => panic!(\"Wrong message sent to WebGLTask\"),\n }\n }\n });\n\n Ok(chan)\n }\n\n fn attach_shader(&self, program_id: u32, shader_id: u32) {\n gl::attach_shader(program_id, shader_id);\n }\n\n fn bind_buffer(&self, buffer_type: u32, buffer_id: u32) {\n gl::bind_buffer(buffer_type, buffer_id);\n }\n\n fn buffer_data(&self, buffer_type: u32, data: Vec<f32>, usage: u32) {\n gl::buffer_data(buffer_type, &data, usage);\n }\n\n fn clear(&self, mask: u32) {\n gl::clear(mask);\n }\n\n fn clear_color(&self, r: f32, g: f32, b: f32, a: f32) {\n gl::clear_color(r, g, b, a);\n }\n\n fn create_buffer(&self, chan: Sender<u32>) {\n let buffers = gl::gen_buffers(1);\n chan.send(buffers[0]).unwrap();\n }\n\n fn compile_shader(&self, shader_id: u32) {\n gl::compile_shader(shader_id);\n }\n\n fn create_program(&self, chan: Sender<u32>) {\n let program = gl::create_program();\n chan.send(program).unwrap();\n }\n\n fn create_shader(&self, shader_type: u32, chan: Sender<u32>) {\n let shader = gl::create_shader(shader_type);\n chan.send(shader).unwrap();\n }\n\n fn draw_arrays(&self, mode: u32, first: i32, count: i32) {\n gl::draw_arrays(mode, first, count);\n }\n\n fn enable_vertex_attrib_array(&self, attrib_id: u32) {\n gl::enable_vertex_attrib_array(attrib_id);\n }\n\n fn get_attrib_location(&self, program_id: u32, name: String, chan: Sender<i32> ) {\n let attrib_location = gl::get_attrib_location(program_id, &name);\n chan.send(attrib_location).unwrap();\n }\n\n fn get_shader_info_log(&self, shader_id: u32, chan: Sender<String>) {\n let info = gl::get_shader_info_log(shader_id);\n chan.send(info).unwrap();\n }\n\n fn get_shader_parameter(&self, shader_id: u32, param_id: u32, chan: Sender<i32>) {\n let parameter = gl::get_shader_iv(shader_id, param_id);\n chan.send(parameter as i32).unwrap();\n }\n\n fn get_uniform_location(&self, program_id: u32, name: String, chan: Sender<u32>) {\n let uniform_location = gl::get_uniform_location(program_id, &name);\n chan.send(uniform_location as u32).unwrap();\n }\n\n fn link_program(&self, program_id: u32) {\n gl::link_program(program_id);\n }\n\n fn send_pixel_contents(&mut self, chan: Sender<Vec<u8>>) {\n \/\/ FIXME(#5652, dmarcos) Instead of a readback strategy we have\n \/\/ to layerize the canvas\n let width = self.size.width as usize;\n let height = self.size.height as usize;\n let mut pixels = gl::read_pixels(0, 0,\n self.size.width as gl::GLsizei,\n self.size.height as gl::GLsizei,\n gl::RGBA, gl::UNSIGNED_BYTE);\n \/\/ flip image vertically (texture is upside down)\n let orig_pixels = pixels.clone();\n let stride = width * 4;\n for y in 0..height {\n let dst_start = y * stride;\n let src_start = (height - y - 1) * stride;\n let src_slice = &orig_pixels[src_start .. src_start + stride];\n copy_memory(&src_slice[..stride], &mut pixels[dst_start .. dst_start + stride]);\n }\n\n \/\/ rgba -> bgra\n byte_swap(&mut pixels);\n chan.send(pixels).unwrap();\n }\n\n fn shader_source(&self, shader_id: u32, source_lines: Vec<String>) {\n let mut lines: Vec<&[u8]> = source_lines.iter().map(|line| line.as_bytes()).collect();\n gl::shader_source(shader_id, &mut lines);\n }\n\n fn uniform_4fv(&self, uniform_id: u32, data: Vec<f32>) {\n gl::uniform_4f(uniform_id as i32, data[0], data[1], data[2], data[3]);\n }\n\n fn use_program(&self, program_id: u32) {\n gl::use_program(program_id);\n }\n\n fn vertex_attrib_pointer_f32(&self, attrib_id: u32, size: i32,\n normalized: bool, stride: i32, offset: i64) {\n gl::vertex_attrib_pointer_f32(attrib_id, size, normalized, stride, offset as u32);\n }\n\n fn viewport(&self, x: i32, y: i32, width: i32, height: i32) {\n gl::viewport(x, y, width, height);\n }\n\n fn recreate(&mut self, size: Size2D<i32>) -> Result<(), &'static str> {\n if size.width > self.original_context_size.width ||\n size.height > self.original_context_size.height {\n try!(self.gl_context.resize(size));\n self.size = size;\n } else {\n self.size = size;\n unsafe { gl::Scissor(0, 0, size.width, size.height); }\n }\n Ok(())\n }\n\n fn init(&mut self) {\n self.gl_context.make_current().unwrap();\n }\n}\n<commit_msg>Move handling of CanvasWebGLMsgs into WebGLPaintTask::handle_webgl_message.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse canvas_msg::{CanvasMsg, CanvasWebGLMsg, CanvasCommonMsg};\nuse geom::size::Size2D;\n\nuse gleam::gl;\nuse gleam::gl::types::{GLsizei};\n\nuse util::task::spawn_named;\n\nuse std::borrow::ToOwned;\nuse std::slice::bytes::copy_memory;\nuse std::sync::mpsc::{channel, Sender};\nuse util::vec::byte_swap;\nuse offscreen_gl_context::{GLContext, GLContextAttributes};\n\npub struct WebGLPaintTask {\n size: Size2D<i32>,\n original_context_size: Size2D<i32>,\n gl_context: GLContext,\n}\n\n\/\/ This allows trying to create the PaintTask\n\/\/ before creating the thread\nunsafe impl Send for WebGLPaintTask {}\n\nimpl WebGLPaintTask {\n fn new(size: Size2D<i32>) -> Result<WebGLPaintTask, &'static str> {\n \/\/ TODO(ecoal95): Get the GLContextAttributes from the `GetContext` call\n let context = try!(GLContext::create_offscreen(size, GLContextAttributes::default()));\n Ok(WebGLPaintTask {\n size: size,\n original_context_size: size,\n gl_context: context\n })\n }\n\n pub fn handle_webgl_message(&self, message: CanvasWebGLMsg) {\n match message {\n CanvasWebGLMsg::AttachShader(program_id, shader_id) => self.attach_shader(program_id, shader_id),\n CanvasWebGLMsg::BindBuffer(buffer_type, buffer_id) => self.bind_buffer(buffer_type, buffer_id),\n CanvasWebGLMsg::BufferData(buffer_type, data, usage) => self.buffer_data(buffer_type, data, usage),\n CanvasWebGLMsg::Clear(mask) => self.clear(mask),\n CanvasWebGLMsg::ClearColor(r, g, b, a) => self.clear_color(r, g, b, a),\n CanvasWebGLMsg::CreateBuffer(chan) => self.create_buffer(chan),\n CanvasWebGLMsg::DrawArrays(mode, first, count) => self.draw_arrays(mode, first, count),\n CanvasWebGLMsg::EnableVertexAttribArray(attrib_id) => self.enable_vertex_attrib_array(attrib_id),\n CanvasWebGLMsg::GetAttribLocation(program_id, name, chan) => self.get_attrib_location(program_id, name, chan),\n CanvasWebGLMsg::GetShaderInfoLog(shader_id, chan) => self.get_shader_info_log(shader_id, chan),\n CanvasWebGLMsg::GetShaderParameter(shader_id, param_id, chan) => self.get_shader_parameter(shader_id, param_id, chan),\n CanvasWebGLMsg::GetUniformLocation(program_id, name, chan) => self.get_uniform_location(program_id, name, chan),\n CanvasWebGLMsg::CompileShader(shader_id) => self.compile_shader(shader_id),\n CanvasWebGLMsg::CreateProgram(chan) => self.create_program(chan),\n CanvasWebGLMsg::CreateShader(shader_type, chan) => self.create_shader(shader_type, chan),\n CanvasWebGLMsg::LinkProgram(program_id) => self.link_program(program_id),\n CanvasWebGLMsg::ShaderSource(shader_id, source) => self.shader_source(shader_id, source),\n CanvasWebGLMsg::Uniform4fv(uniform_id, data) => self.uniform_4fv(uniform_id, data),\n CanvasWebGLMsg::UseProgram(program_id) => self.use_program(program_id),\n CanvasWebGLMsg::VertexAttribPointer2f(attrib_id, size, normalized, stride, offset) => {\n self.vertex_attrib_pointer_f32(attrib_id, size, normalized, stride, offset);\n },\n CanvasWebGLMsg::Viewport(x, y, width, height) => self.viewport(x, y, width, height),\n }\n }\n\n pub fn start(size: Size2D<i32>) -> Result<Sender<CanvasMsg>, &'static str> {\n let (chan, port) = channel::<CanvasMsg>();\n let mut painter = try!(WebGLPaintTask::new(size));\n spawn_named(\"WebGLTask\".to_owned(), move || {\n painter.init();\n loop {\n match port.recv().unwrap() {\n CanvasMsg::WebGL(message) => painter.handle_webgl_message(message),\n CanvasMsg::Common(message) => {\n match message {\n CanvasCommonMsg::Close => break,\n CanvasCommonMsg::SendPixelContents(chan) => painter.send_pixel_contents(chan),\n \/\/ TODO(ecoal95): handle error nicely\n CanvasCommonMsg::Recreate(size) => painter.recreate(size).unwrap(),\n }\n },\n CanvasMsg::Canvas2d(_) => panic!(\"Wrong message sent to WebGLTask\"),\n }\n }\n });\n\n Ok(chan)\n }\n\n fn attach_shader(&self, program_id: u32, shader_id: u32) {\n gl::attach_shader(program_id, shader_id);\n }\n\n fn bind_buffer(&self, buffer_type: u32, buffer_id: u32) {\n gl::bind_buffer(buffer_type, buffer_id);\n }\n\n fn buffer_data(&self, buffer_type: u32, data: Vec<f32>, usage: u32) {\n gl::buffer_data(buffer_type, &data, usage);\n }\n\n fn clear(&self, mask: u32) {\n gl::clear(mask);\n }\n\n fn clear_color(&self, r: f32, g: f32, b: f32, a: f32) {\n gl::clear_color(r, g, b, a);\n }\n\n fn create_buffer(&self, chan: Sender<u32>) {\n let buffers = gl::gen_buffers(1);\n chan.send(buffers[0]).unwrap();\n }\n\n fn compile_shader(&self, shader_id: u32) {\n gl::compile_shader(shader_id);\n }\n\n fn create_program(&self, chan: Sender<u32>) {\n let program = gl::create_program();\n chan.send(program).unwrap();\n }\n\n fn create_shader(&self, shader_type: u32, chan: Sender<u32>) {\n let shader = gl::create_shader(shader_type);\n chan.send(shader).unwrap();\n }\n\n fn draw_arrays(&self, mode: u32, first: i32, count: i32) {\n gl::draw_arrays(mode, first, count);\n }\n\n fn enable_vertex_attrib_array(&self, attrib_id: u32) {\n gl::enable_vertex_attrib_array(attrib_id);\n }\n\n fn get_attrib_location(&self, program_id: u32, name: String, chan: Sender<i32> ) {\n let attrib_location = gl::get_attrib_location(program_id, &name);\n chan.send(attrib_location).unwrap();\n }\n\n fn get_shader_info_log(&self, shader_id: u32, chan: Sender<String>) {\n let info = gl::get_shader_info_log(shader_id);\n chan.send(info).unwrap();\n }\n\n fn get_shader_parameter(&self, shader_id: u32, param_id: u32, chan: Sender<i32>) {\n let parameter = gl::get_shader_iv(shader_id, param_id);\n chan.send(parameter as i32).unwrap();\n }\n\n fn get_uniform_location(&self, program_id: u32, name: String, chan: Sender<u32>) {\n let uniform_location = gl::get_uniform_location(program_id, &name);\n chan.send(uniform_location as u32).unwrap();\n }\n\n fn link_program(&self, program_id: u32) {\n gl::link_program(program_id);\n }\n\n fn send_pixel_contents(&mut self, chan: Sender<Vec<u8>>) {\n \/\/ FIXME(#5652, dmarcos) Instead of a readback strategy we have\n \/\/ to layerize the canvas\n let width = self.size.width as usize;\n let height = self.size.height as usize;\n let mut pixels = gl::read_pixels(0, 0,\n self.size.width as gl::GLsizei,\n self.size.height as gl::GLsizei,\n gl::RGBA, gl::UNSIGNED_BYTE);\n \/\/ flip image vertically (texture is upside down)\n let orig_pixels = pixels.clone();\n let stride = width * 4;\n for y in 0..height {\n let dst_start = y * stride;\n let src_start = (height - y - 1) * stride;\n let src_slice = &orig_pixels[src_start .. src_start + stride];\n copy_memory(&src_slice[..stride], &mut pixels[dst_start .. dst_start + stride]);\n }\n\n \/\/ rgba -> bgra\n byte_swap(&mut pixels);\n chan.send(pixels).unwrap();\n }\n\n fn shader_source(&self, shader_id: u32, source_lines: Vec<String>) {\n let mut lines: Vec<&[u8]> = source_lines.iter().map(|line| line.as_bytes()).collect();\n gl::shader_source(shader_id, &mut lines);\n }\n\n fn uniform_4fv(&self, uniform_id: u32, data: Vec<f32>) {\n gl::uniform_4f(uniform_id as i32, data[0], data[1], data[2], data[3]);\n }\n\n fn use_program(&self, program_id: u32) {\n gl::use_program(program_id);\n }\n\n fn vertex_attrib_pointer_f32(&self, attrib_id: u32, size: i32,\n normalized: bool, stride: i32, offset: i64) {\n gl::vertex_attrib_pointer_f32(attrib_id, size, normalized, stride, offset as u32);\n }\n\n fn viewport(&self, x: i32, y: i32, width: i32, height: i32) {\n gl::viewport(x, y, width, height);\n }\n\n fn recreate(&mut self, size: Size2D<i32>) -> Result<(), &'static str> {\n if size.width > self.original_context_size.width ||\n size.height > self.original_context_size.height {\n try!(self.gl_context.resize(size));\n self.size = size;\n } else {\n self.size = size;\n unsafe { gl::Scissor(0, 0, size.width, size.height); }\n }\n Ok(())\n }\n\n fn init(&mut self) {\n self.gl_context.make_current().unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added solution for problem 2.<commit_after>\/\/ Each new term in the Fibonacci sequence is generated by adding the previous two terms.\n\/\/ By starting with 1 and 2, the first 10 terms will be:\n\/\/\n\/\/ 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...\n\/\/\n\/\/ By considering the terms in the Fibonacci sequence whose values do not exceed four million,\n\/\/ find the sum of the even-valued terms.\n\nfn main() {\n\n let mut sum = 2; \/\/ First even value\n let mut prev = 1;\n let mut curr = 2;\n \n while curr < 4_000_000 {\n let term = curr + prev;\n prev = curr;\n curr = term;\n if term % 2 == 0 {\n sum += curr;\n }\n }\n println!(\"Sum of even fibs under {} is: {}\", 4_000_000, sum);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reimplement SignatureSet as a map so signatures can be efficiently looked up by key ID.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update for latest nightly<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix typos in documentation for Poll (#734)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for #20396<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)]\n\ntrait Foo<T> {\n fn noop(&self, _: T);\n}\n\nenum Bar<T> { Bla(T) }\n\nstruct Baz<'a> {\n inner: for<'b> Foo<Bar<&'b ()>> + 'a,\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #68<commit_after>use common::problem::{ Problem };\n\npub static problem: Problem<'static> = Problem {\n id: 68,\n answer: \"6531031914842725\",\n solver: solve\n};\n\nfn solve() -> ~str {\n \/\/ solve by hand...\n \/\/\n \/\/ outside: 6, 10, 9, 8, 7\n \/\/ (1 + 2 + 3 + 4 + 5) * 2 +\n \/\/ 6 + 7 + 8 + 9 + 10\n \/\/ = 15 + 55 = 70\n \/\/ 70 \/ 5 = 14\n \/\/\n \/\/ 6, 5, X is max => 6, 5, 3\n \/\/\n \/\/ 6, 5, 3\n \/\/ 10, 3, 1\n \/\/ 9, 1, 4\n \/\/ 8, 4, 2\n \/\/ 7, 2, 5\n\n ~\"6531031914842725\"\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test that uses a hypothetical bare-fn spawn signature<commit_after>\/\/ This is what the signature to spawn should look like with bare functions\n\nfn spawn<~T>(val: T, f: fn#(T)) {\n f(val);\n}\n\nfn# f(&&i: int) {\n assert i == 100;\n}\n\nfn main() {\n spawn(100, f);\n spawn(100, fn#(&&i: int) {\n assert i == 100;\n });\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>chore: closure with move<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>am 19744204: Merge \"fix build break\" into jb-mr1-dev<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix for library::handle_submissions.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test(sync): fix it<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor(syncfile): add new interface that allows saving data from a buffer<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove test_usermode<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>static const<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>genet-abi: use NonNull for Fixed and MutFixed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Correct F1E opcode<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>example of recursive structure in Rust, looks like idiomatic SML<commit_after>\/\/ Recursive structure\/implementation of Json\n\nuse std::collections::HashMap;\n\nenum Json {\n Null,\n Boolean(bool),\n Number(f64),\n String(String),\n Array(Vec<Json>),\n Object(Box<HashMap<String, Json>>)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added test to check the Accept-Encoding is not changed if already set<commit_after><|endoftext|>"} {"text":"<commit_before>use std::collections::BTreeMap;\nuse std::str::Split;\n\nuse clap::ArgMatches;\nuse toml::Value;\n\nuse libimagstore::store::EntryHeader;\nuse libimagutil::key_value_split::IntoKeyValue;\n\npub fn build_toml_header(matches: &ArgMatches, mut header: EntryHeader) -> EntryHeader {\n debug!(\"Building header from cli spec\");\n if let Some(headerspecs) = matches.values_of(\"header\") {\n let mut main = header.into();\n let kvs = headerspecs.into_iter()\n .filter_map(|hs| {\n debug!(\"- Processing: '{}'\", hs);\n let kv = String::from(hs).into_kv();\n debug!(\"- got: '{:?}'\", kv);\n kv\n });\n for tpl in kvs {\n let (key, value) = tpl.into();\n debug!(\"Splitting: {:?}\", key);\n let mut split = key.split(\".\");\n let current = split.next();\n if current.is_some() {\n insert_key_into(String::from(current.unwrap()), &mut split, value, &mut main);\n }\n }\n\n debug!(\"Header = {:?}\", main);\n EntryHeader::from(main)\n } else {\n debug!(\"Header = {:?}\", header);\n header\n }\n}\n\nfn insert_key_into(current: String,\n rest_path: &mut Split<&str>,\n value: String,\n map: &mut BTreeMap<String, Value>) {\n let next = rest_path.next();\n\n if next.is_none() {\n debug!(\"Inserting into {:?} = {:?}\", current, value);\n map.insert(current, parse_value(value));\n } else {\n debug!(\"Inserting into {:?} ... = {:?}\", current, value);\n if map.contains_key(¤t) {\n match map.get_mut(¤t).unwrap() {\n &mut Value::Table(ref mut t) => {\n insert_key_into(String::from(next.unwrap()), rest_path, value, t);\n },\n _ => unreachable!(),\n }\n } else {\n let mut submap = BTreeMap::new();\n insert_key_into(String::from(next.unwrap()), rest_path, value, &mut submap);\n debug!(\"Inserting submap = {:?}\", submap);\n map.insert(current, Value::Table(submap));\n }\n }\n}\n\nfn parse_value(value: String) -> Value {\n use std::str::FromStr;\n\n fn is_ary(v: &String) -> bool {\n v.chars().next() == Some('[') && v.chars().last() == Some(']') && v.len() >= 3\n }\n\n if value == \"true\" {\n debug!(\"Building Boolean out of: {:?}...\", value);\n Value::Boolean(true)\n } else if value == \"false\" {\n debug!(\"Building Boolean out of: {:?}...\", value);\n Value::Boolean(false)\n } else if is_ary(&value) {\n debug!(\"Building Array out of: {:?}...\", value);\n let sub = &value[1..(value.len()-1)];\n Value::Array(sub.split(\",\").map(|v| parse_value(String::from(v))).collect())\n } else {\n FromStr::from_str(&value[..])\n .map(|i: i64| {\n debug!(\"Building Integer out of: {:?}...\", value);\n Value::Integer(i)\n })\n .unwrap_or_else(|_| {\n FromStr::from_str(&value[..])\n .map(|f: f64| {\n debug!(\"Building Float out of: {:?}...\", value);\n Value::Float(f)\n })\n .unwrap_or_else(|_| {\n debug!(\"Building String out of: {:?}...\", value);\n Value::String(value)\n })\n })\n }\n}\n\n<commit_msg>Remove unneeded keyword \"mut\"<commit_after>use std::collections::BTreeMap;\nuse std::str::Split;\n\nuse clap::ArgMatches;\nuse toml::Value;\n\nuse libimagstore::store::EntryHeader;\nuse libimagutil::key_value_split::IntoKeyValue;\n\npub fn build_toml_header(matches: &ArgMatches, header: EntryHeader) -> EntryHeader {\n debug!(\"Building header from cli spec\");\n if let Some(headerspecs) = matches.values_of(\"header\") {\n let mut main = header.into();\n let kvs = headerspecs.into_iter()\n .filter_map(|hs| {\n debug!(\"- Processing: '{}'\", hs);\n let kv = String::from(hs).into_kv();\n debug!(\"- got: '{:?}'\", kv);\n kv\n });\n for tpl in kvs {\n let (key, value) = tpl.into();\n debug!(\"Splitting: {:?}\", key);\n let mut split = key.split(\".\");\n let current = split.next();\n if current.is_some() {\n insert_key_into(String::from(current.unwrap()), &mut split, value, &mut main);\n }\n }\n\n debug!(\"Header = {:?}\", main);\n EntryHeader::from(main)\n } else {\n debug!(\"Header = {:?}\", header);\n header\n }\n}\n\nfn insert_key_into(current: String,\n rest_path: &mut Split<&str>,\n value: String,\n map: &mut BTreeMap<String, Value>) {\n let next = rest_path.next();\n\n if next.is_none() {\n debug!(\"Inserting into {:?} = {:?}\", current, value);\n map.insert(current, parse_value(value));\n } else {\n debug!(\"Inserting into {:?} ... = {:?}\", current, value);\n if map.contains_key(¤t) {\n match map.get_mut(¤t).unwrap() {\n &mut Value::Table(ref mut t) => {\n insert_key_into(String::from(next.unwrap()), rest_path, value, t);\n },\n _ => unreachable!(),\n }\n } else {\n let mut submap = BTreeMap::new();\n insert_key_into(String::from(next.unwrap()), rest_path, value, &mut submap);\n debug!(\"Inserting submap = {:?}\", submap);\n map.insert(current, Value::Table(submap));\n }\n }\n}\n\nfn parse_value(value: String) -> Value {\n use std::str::FromStr;\n\n fn is_ary(v: &String) -> bool {\n v.chars().next() == Some('[') && v.chars().last() == Some(']') && v.len() >= 3\n }\n\n if value == \"true\" {\n debug!(\"Building Boolean out of: {:?}...\", value);\n Value::Boolean(true)\n } else if value == \"false\" {\n debug!(\"Building Boolean out of: {:?}...\", value);\n Value::Boolean(false)\n } else if is_ary(&value) {\n debug!(\"Building Array out of: {:?}...\", value);\n let sub = &value[1..(value.len()-1)];\n Value::Array(sub.split(\",\").map(|v| parse_value(String::from(v))).collect())\n } else {\n FromStr::from_str(&value[..])\n .map(|i: i64| {\n debug!(\"Building Integer out of: {:?}...\", value);\n Value::Integer(i)\n })\n .unwrap_or_else(|_| {\n FromStr::from_str(&value[..])\n .map(|f: f64| {\n debug!(\"Building Float out of: {:?}...\", value);\n Value::Float(f)\n })\n .unwrap_or_else(|_| {\n debug!(\"Building String out of: {:?}...\", value);\n Value::String(value)\n })\n })\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Choose better temporaries when building SPIR-V, which prevents my editor having conniptions<commit_after><|endoftext|>"} {"text":"<commit_before>use inflector::Inflector;\nuse regex::{Captures, Regex};\n\nuse botocore::{Member, Operation, Service, Shape};\nuse super::GenerateProtocol;\n\npub struct RestJsonGenerator;\n\nimpl GenerateProtocol for RestJsonGenerator {\n fn generate_methods(&self, service: &Service) -> String {\n service.operations.values().map(|operation| {\n let input_type = operation.input_shape();\n let output_type = operation.output_shape_or(\"()\");\n\n \/\/ Retrieve the `Shape` for the input for this operation.\n let input_shape = service.shapes.get(input_type).unwrap();\n\n \/\/ Construct a list of format strings which will be used to format\n \/\/ the request URI, mapping the input struct to the URI arguments.\n let member_uri_strings = generate_shape_member_uri_strings(input_shape);\n\n \/\/ A boolean controlling whether or not the payload should be loaded\n \/\/ into the request.\n \/\/ According to the AWS SDK documentation, requests should only have\n \/\/ a request body for operations with ANY non-URI or non-query\n \/\/ parameters.\n let load_payload = input_shape.members\n .as_ref()\n .unwrap()\n .iter()\n .any(|(_, member)| member.location.is_none());\n\n \/\/ Construct a list of strings which will be used to load request\n \/\/ parameters from the input struct into a `Params` vec, which will\n \/\/ then be added to the request.\n let member_param_strings = generate_shape_member_param_strings(input_shape);\n\n format!(\"\n {documentation}\n pub fn {method_name}(&self, input: &{input_type}) -> AwsResult<{output_type}> {{\n {encode_input}\n\n {request_uri_formatter}\n\n let mut request = SignedRequest::new(\\\"{http_method}\\\", \\\"{endpoint_prefix}\\\", self.region, &request_uri);\n request.set_content_type(\\\"application\/x-amz-json-1.1\\\".to_owned());\n {load_payload}\n {load_params}\n\n let mut result = request.sign_and_execute(try!(self.credentials_provider.credentials()));\n let status = result.status.to_u16();\n let mut body = String::new();\n result.read_to_string(&mut body).unwrap();\n\n \/\/ `serde-json` serializes field-less structs as \\\"null\\\", but AWS returns\n \/\/ \\\"{{}}\\\" for a field-less response, so we must check for this result\n \/\/ and convert it if necessary.\n if body == \\\"{{}}\\\" {{\n body = \\\"null\\\".to_owned();\n }}\n\n debug!(\\\"Response body: {{}}\\\", body);\n debug!(\\\"Response status: {{}}\\\", status);\n\n match status {{\n {status_code} => {{\n {ok_response}\n }}\n _ => Err(parse_json_protocol_error(&body)),\n }}\n }}\n \",\n documentation = generate_documentation(operation).unwrap_or(\"\".to_owned()),\n endpoint_prefix = service.metadata.endpoint_prefix,\n http_method = operation.http.method,\n input_type = input_type,\n method_name = operation.name.to_snake_case(),\n status_code = operation.http.response_code.unwrap_or(200),\n ok_response = generate_ok_response(operation, output_type),\n output_type = output_type,\n request_uri_formatter = generate_uri_formatter(\n &generate_snake_case_uri(&operation.http.request_uri),\n &member_uri_strings\n ),\n load_payload = generate_payload_loading_string(load_payload),\n load_params = generate_params_loading_string(&member_param_strings),\n encode_input = generate_encoding_string(load_payload),\n )\n }).collect::<Vec<String>>().join(\"\\n\")\n }\n\n fn generate_prelude(&self, _: &Service) -> String {\n \"use std::io::Read;\n\n use serde_json;\n\n use credential::ProvideAwsCredentials;\n use error::{AwsResult, parse_json_protocol_error};\n use param::{Params, ServiceParams};\n use region;\n use signature::SignedRequest;\n \".to_owned()\n }\n\n fn generate_struct_attributes(&self) -> String {\n \"#[derive(Debug, Default, Deserialize, Serialize)]\".to_owned()\n }\n\n fn timestamp_type(&self) -> &'static str {\n \"f64\"\n }\n}\n\nfn generate_encoding_string(load_payload: bool) -> String {\n if load_payload {\n \"let encoded = serde_json::to_string(input).unwrap();\".to_owned()\n } else {\n \"\".to_owned()\n }\n }\n\nfn generate_payload_loading_string(load_payload: bool) -> String {\n if load_payload {\n \"request.set_payload(Some(encoded.as_bytes()));\".to_owned()\n } else {\n \"\".to_owned()\n }\n}\n\nfn generate_snake_case_uri(request_uri: &str) -> String {\n lazy_static! {\n static ref URI_ARGS_REGEX: Regex = Regex::new(r\"\\{([\\w\\d]+)\\}\").unwrap();\n }\n\n URI_ARGS_REGEX.replace_all(request_uri, |caps: &Captures| {\n format!(\"{{{}}}\", caps.at(1).map(Inflector::to_snake_case).unwrap())\n })\n}\n\nfn generate_params_loading_string(param_strings: &[String]) -> String {\n match param_strings.len() {\n 0 => \"\".to_owned(),\n _ => {\n format!(\n \"let mut params = Params::new();\n {param_strings}\n request.set_params(params);\",\n param_strings = param_strings.join(\"\\n\")\n )\n },\n }\n}\n\nfn generate_shape_member_param_strings(shape: &Shape) -> Vec<String> {\n shape.members.as_ref().unwrap().iter()\n .filter_map(|(member_name, member)| generate_param_load_string(&member_name, member))\n .collect::<Vec<String>>()\n}\n\nfn generate_param_load_string(member_name: &str, member: &Member) -> Option<String> {\n match member.location {\n Some(ref x) if x == \"querystring\" => {\n Some(format!(\n \"match input.{field_name} {{\n Some(ref x) => params.put(\\\"{member_name}\\\", x),\n None => {{}},\n }}\",\n member_name = member_name,\n field_name = member_name.to_snake_case(),\n ))\n },\n Some(_) => None,\n None => None,\n }\n}\n\nfn generate_uri_formatter(request_uri: &str, uri_strings: &[String]) -> String {\n match uri_strings.len() {\n 0 => {\n format!(\n \"let request_uri = \\\"{request_uri}\\\";\",\n request_uri = request_uri,\n )\n },\n _ => {\n format!(\n \"let request_uri = format!(\\\"{request_uri}\\\", {uri_strings});\",\n request_uri = request_uri,\n uri_strings = uri_strings.join(\", \"))\n },\n }\n}\n\nfn generate_shape_member_uri_strings(shape: &Shape) -> Vec<String> {\n shape.members.as_ref().unwrap().iter()\n .filter_map(|(member_name, member)| generate_member_format_string(&member_name.to_snake_case(), member))\n .collect::<Vec<String>>()\n}\n\nfn generate_member_format_string(member_name: &str, member: &Member) -> Option<String> {\n match member.location {\n Some(ref x) if x == \"uri\" => {\n Some(format!(\n \"{member_name} = input.{field_name}\",\n field_name = member_name,\n member_name = member_name,\n ))\n },\n Some(_) => None,\n None => None,\n }\n}\n\nfn generate_documentation(operation: &Operation) -> Option<String> {\n operation.documentation.as_ref().map(|docs| {\n format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\"))\n })\n}\n\nfn generate_ok_response(operation: &Operation, output_type: &str) -> String {\n if operation.output.is_some() {\n format!(\"Ok(serde_json::from_str::<{}>(&body).unwrap())\", output_type)\n } else {\n \"Ok(())\".to_owned()\n }\n}\n<commit_msg>update rest_json protocol codegen to use shared codegen<commit_after>use inflector::Inflector;\nuse regex::{Captures, Regex};\n\nuse botocore::{Member, Operation, Service, Shape};\nuse super::GenerateProtocol;\n\npub struct RestJsonGenerator;\n\nimpl GenerateProtocol for RestJsonGenerator {\n fn generate_methods(&self, service: &Service) -> String {\n service.operations.values().map(|operation| {\n let input_type = operation.input_shape();\n let output_type = operation.output_shape_or(\"()\");\n\n \/\/ Retrieve the `Shape` for the input for this operation.\n let input_shape = service.shapes.get(input_type).unwrap();\n\n \/\/ Construct a list of format strings which will be used to format\n \/\/ the request URI, mapping the input struct to the URI arguments.\n let member_uri_strings = generate_shape_member_uri_strings(input_shape);\n\n \/\/ A boolean controlling whether or not the payload should be loaded\n \/\/ into the request.\n \/\/ According to the AWS SDK documentation, requests should only have\n \/\/ a request body for operations with ANY non-URI or non-query\n \/\/ parameters.\n let load_payload = input_shape.members\n .as_ref()\n .unwrap()\n .iter()\n .any(|(_, member)| member.location.is_none());\n\n \/\/ Construct a list of strings which will be used to load request\n \/\/ parameters from the input struct into a `Params` vec, which will\n \/\/ then be added to the request.\n let member_param_strings = generate_shape_member_param_strings(input_shape);\n\n format!(\"\n {documentation}\n pub fn {method_name}(&self, input: &{input_type}) -> AwsResult<{output_type}> {{\n {encode_input}\n\n {request_uri_formatter}\n\n let mut request = SignedRequest::new(\\\"{http_method}\\\", \\\"{endpoint_prefix}\\\", self.region, &request_uri);\n request.set_content_type(\\\"application\/x-amz-json-1.1\\\".to_owned());\n {load_payload}\n {load_params}\n\n request.sign(&try!(self.credentials_provider.credentials()));\n\n let dispatch_result = self.dispatcher.dispatch(&request);\n if dispatch_result.is_err() {{\n return Err(AwsError::new(format!(\\\"Error dispatching HTTP request\\\")));\n }}\n\n let result = dispatch_result.unwrap(); \n let mut body = result.body;\n\n \/\/ `serde-json` serializes field-less structs as \\\"null\\\", but AWS returns\n \/\/ \\\"{{}}\\\" for a field-less response, so we must check for this result\n \/\/ and convert it if necessary.\n if body == \\\"{{}}\\\" {{\n body = \\\"null\\\".to_owned();\n }}\n\n debug!(\\\"Response body: {{}}\\\", body);\n debug!(\\\"Response status: {{}}\\\", result.status);\n\n match result.status {{\n {status_code} => {{\n {ok_response}\n }}\n _ => Err(parse_json_protocol_error(&body)),\n }}\n }}\n \",\n documentation = generate_documentation(operation).unwrap_or(\"\".to_owned()),\n endpoint_prefix = service.metadata.endpoint_prefix,\n http_method = operation.http.method,\n input_type = input_type,\n method_name = operation.name.to_snake_case(),\n status_code = operation.http.response_code.unwrap_or(200),\n ok_response = generate_ok_response(operation, output_type),\n output_type = output_type,\n request_uri_formatter = generate_uri_formatter(\n &generate_snake_case_uri(&operation.http.request_uri),\n &member_uri_strings\n ),\n load_payload = generate_payload_loading_string(load_payload),\n load_params = generate_params_loading_string(&member_param_strings),\n encode_input = generate_encoding_string(load_payload),\n )\n }).collect::<Vec<String>>().join(\"\\n\")\n }\n\n fn generate_prelude(&self, _: &Service) -> String {\n \"use serde_json;\n\n use credential::ProvideAwsCredentials;\n use error::{AwsResult, parse_json_protocol_error, AwsError};\n use param::{Params, ServiceParams};\n\n use signature::SignedRequest;\n \".to_owned()\n }\n\n fn generate_struct_attributes(&self) -> String {\n \"#[derive(Debug, Default, Deserialize, Serialize)]\".to_owned()\n }\n\n fn timestamp_type(&self) -> &'static str {\n \"f64\"\n }\n}\n\nfn generate_encoding_string(load_payload: bool) -> String {\n if load_payload {\n \"let encoded = serde_json::to_string(input).unwrap();\".to_owned()\n } else {\n \"\".to_owned()\n }\n }\n\nfn generate_payload_loading_string(load_payload: bool) -> String {\n if load_payload {\n \"request.set_payload(Some(encoded.as_bytes()));\".to_owned()\n } else {\n \"\".to_owned()\n }\n}\n\nfn generate_snake_case_uri(request_uri: &str) -> String {\n lazy_static! {\n static ref URI_ARGS_REGEX: Regex = Regex::new(r\"\\{([\\w\\d]+)\\}\").unwrap();\n }\n\n URI_ARGS_REGEX.replace_all(request_uri, |caps: &Captures| {\n format!(\"{{{}}}\", caps.at(1).map(Inflector::to_snake_case).unwrap())\n })\n}\n\nfn generate_params_loading_string(param_strings: &[String]) -> String {\n match param_strings.len() {\n 0 => \"\".to_owned(),\n _ => {\n format!(\n \"let mut params = Params::new();\n {param_strings}\n request.set_params(params);\",\n param_strings = param_strings.join(\"\\n\")\n )\n },\n }\n}\n\nfn generate_shape_member_param_strings(shape: &Shape) -> Vec<String> {\n shape.members.as_ref().unwrap().iter()\n .filter_map(|(member_name, member)| generate_param_load_string(&member_name, member))\n .collect::<Vec<String>>()\n}\n\nfn generate_param_load_string(member_name: &str, member: &Member) -> Option<String> {\n match member.location {\n Some(ref x) if x == \"querystring\" => {\n Some(format!(\n \"match input.{field_name} {{\n Some(ref x) => params.put(\\\"{member_name}\\\", x),\n None => {{}},\n }}\",\n member_name = member_name,\n field_name = member_name.to_snake_case(),\n ))\n },\n Some(_) => None,\n None => None,\n }\n}\n\nfn generate_uri_formatter(request_uri: &str, uri_strings: &[String]) -> String {\n match uri_strings.len() {\n 0 => {\n format!(\n \"let request_uri = \\\"{request_uri}\\\";\",\n request_uri = request_uri,\n )\n },\n _ => {\n format!(\n \"let request_uri = format!(\\\"{request_uri}\\\", {uri_strings});\",\n request_uri = request_uri,\n uri_strings = uri_strings.join(\", \"))\n },\n }\n}\n\nfn generate_shape_member_uri_strings(shape: &Shape) -> Vec<String> {\n shape.members.as_ref().unwrap().iter()\n .filter_map(|(member_name, member)| generate_member_format_string(&member_name.to_snake_case(), member))\n .collect::<Vec<String>>()\n}\n\nfn generate_member_format_string(member_name: &str, member: &Member) -> Option<String> {\n match member.location {\n Some(ref x) if x == \"uri\" => {\n Some(format!(\n \"{member_name} = input.{field_name}\",\n field_name = member_name,\n member_name = member_name,\n ))\n },\n Some(_) => None,\n None => None,\n }\n}\n\nfn generate_documentation(operation: &Operation) -> Option<String> {\n operation.documentation.as_ref().map(|docs| {\n format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\"))\n })\n}\n\nfn generate_ok_response(operation: &Operation, output_type: &str) -> String {\n if operation.output.is_some() {\n format!(\"Ok(serde_json::from_str::<{}>(&body).unwrap())\", output_type)\n } else {\n \"Ok(())\".to_owned()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix whitespace issue, add semicolon support<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>lidar working and positioned to drive<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary annotations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Relay socket stuff initial commit<commit_after>use std::net::TcpStream;\n\nstruct WeechatRelay{\n host: String,\n port: i32,\n password: String,\n reconnect_on_error: bool,\n stream: TcpStream,\n}\n\nimpl WeechatRelay {\n fn new(host: String, port: i32, password: String) -> WeechatRelay {\n let addr = format!(\"{}:{}\", host, port);\n let stream = TcpStream::connect(&*addr).unwrap();\n WeechatRelay {\n host: host,\n port: port,\n password: password,\n reconnect_on_error: false,\n stream: stream\n }\n }\n}\n\nfn main() {\n \/\/ TODO move these into a conf file somewhere\n let host = String::from(\"weechat.vimalloc.com\");\n let port = 8001;\n let password = String::from(\"porter2pears\");\n let relay = WeechatRelay::new(host, port, password);\n println!(\"Connecting to {}:{}\", relay.host, relay.port);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>wrap the world<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Interactions with the file system.\n\nuse std::collections::HashMap;\nuse std::io::{self, Read, Write};\nuse std::fmt;\nuse std::fs::File;\nuse std::path::{Path, PathBuf};\nuse std::str;\nuse std::time::SystemTime;\n\nuse xi_rpc::RemoteError;\nuse xi_rope::Rope;\n\nuse tabs::BufferId;\n\n#[cfg(feature = \"notify\")]\nuse tabs::OPEN_FILE_EVENT_TOKEN;\n#[cfg(feature = \"notify\")]\nuse watcher::FileWatcher;\n\nconst UTF8_BOM: &str = \"\\u{feff}\";\n\n\/\/\/ Tracks all state related to open files.\npub struct FileManager {\n open_files: HashMap<PathBuf, BufferId>,\n file_info: HashMap<BufferId, FileInfo>,\n \/\/\/ A monitor of filesystem events, for things like reloading changed files.\n #[cfg(feature = \"notify\")]\n watcher: FileWatcher,\n}\n\n#[derive(Debug)]\npub struct FileInfo {\n pub encoding: CharacterEncoding,\n pub path: PathBuf,\n pub mod_time: Option<SystemTime>,\n pub has_changed: bool,\n}\n\npub enum FileError {\n Io(io::Error),\n UnknownEncoding,\n HasChanged,\n}\n\n#[derive(Debug, Clone, Copy)]\npub enum CharacterEncoding {\n Utf8,\n Utf8WithBom\n}\n\nimpl FileManager {\n #[cfg(feature = \"notify\")]\n pub fn new(watcher: FileWatcher) -> Self {\n FileManager {\n open_files: HashMap::new(),\n file_info: HashMap::new(),\n watcher,\n }\n }\n\n #[cfg(not(feature = \"notify\"))]\n pub fn new() -> Self {\n FileManager {\n open_files: HashMap::new(),\n file_info: HashMap::new(),\n }\n }\n\n #[cfg(feature = \"notify\")]\n pub fn watcher(&mut self) -> &mut FileWatcher {\n &mut self.watcher\n }\n\n pub fn get_info(&self, id: BufferId) -> Option<&FileInfo> {\n self.file_info.get(&id)\n }\n\n pub fn get_editor(&self, path: &Path) -> Option<BufferId> {\n self.open_files.get(path).map(|id| *id)\n }\n\n \/\/\/ Returns `true` if this file is open and has changed on disk.\n \/\/\/ This state is stashed.\n pub fn check_file(&mut self, path: &Path, id: BufferId) -> bool {\n if let Some(info) = self.file_info.get_mut(&id) {\n let mod_t = get_mod_time(path);\n if mod_t != info.mod_time {\n info.has_changed = true\n }\n return info.has_changed;\n }\n false\n }\n\n pub fn open(&mut self, path: &Path, id: BufferId)\n -> Result<Rope, FileError>\n {\n let (rope, info) = if path.exists() {\n try_load_file(path)?\n } else {\n new_for_path(path)\n };\n\n self.open_files.insert(path.to_owned(), id);\n if self.file_info.insert(id, info).is_none() {\n #[cfg(feature = \"notify\")]\n self.watcher.watch(path, false, OPEN_FILE_EVENT_TOKEN);\n }\n Ok(rope)\n }\n\n pub fn close(&mut self, id: BufferId) {\n if let Some(info) = self.file_info.remove(&id) {\n self.open_files.remove(&info.path);\n #[cfg(feature = \"notify\")]\n self.watcher.unwatch(&info.path, OPEN_FILE_EVENT_TOKEN);\n }\n }\n\n pub fn save(&mut self, path: &Path, text: &Rope, id: BufferId)\n -> Result<(), FileError>\n {\n let is_existing = self.file_info.contains_key(&id);\n if is_existing {\n self.save_existing(path, text, id)\n } else {\n self.save_new(path, text, id)\n }\n }\n\n fn save_new(&mut self, path: &Path, text: &Rope, id: BufferId)\n -> Result<(), FileError>\n {\n try_save(path, text, CharacterEncoding::Utf8)?;\n let info = FileInfo {\n encoding: CharacterEncoding::Utf8,\n path: path.to_owned(),\n mod_time: get_mod_time(path),\n has_changed: false,\n };\n self.open_files.insert(path.to_owned(), id);\n self.file_info.insert(id, info);\n #[cfg(feature = \"notify\")]\n self.watcher.watch(path, false, OPEN_FILE_EVENT_TOKEN);\n Ok(())\n }\n\n fn save_existing(&mut self, path: &Path, text: &Rope, id: BufferId)\n -> Result<(), FileError>\n {\n let prev_path = self.file_info.get(&id).unwrap().path.clone();\n if prev_path != path {\n self.save_new(path, text, id)?;\n self.open_files.remove(&prev_path);\n #[cfg(feature = \"notify\")]\n self.watcher.unwatch(&prev_path, OPEN_FILE_EVENT_TOKEN);\n } else if self.file_info.get(&id).unwrap().has_changed {\n return Err(FileError::HasChanged);\n } else {\n let encoding = self.file_info.get(&id).unwrap().encoding;\n try_save(path, text, encoding)?;\n self.file_info.get_mut(&id).unwrap()\n .mod_time = get_mod_time(path);\n }\n Ok(())\n }\n}\n\n\/\/\/ We allow 'opening' paths that don't exist\nfn new_for_path<P: AsRef<Path>>(path: P) -> (Rope, FileInfo) {\n let info = FileInfo {\n encoding: CharacterEncoding::Utf8,\n mod_time: None,\n path: path.as_ref().to_owned(),\n has_changed: false,\n };\n\n (\"\".into(), info)\n}\n\nfn try_load_file<P>(path: P) -> Result<(Rope, FileInfo), FileError>\nwhere P: AsRef<Path>\n{\n \/\/ TODO: support for non-utf8\n \/\/ it's arguable that the rope crate should have file loading functionality\n let mut f = File::open(path.as_ref())?;\n let mod_time = f.metadata()?.modified().ok();\n let mut bytes = Vec::new();\n f.read_to_end(&mut bytes)?;\n\n let encoding = CharacterEncoding::guess(&bytes);\n let rope = try_decode(bytes, encoding)?;\n let info = FileInfo {\n encoding,\n mod_time,\n path: path.as_ref().to_owned(),\n has_changed: false,\n };\n Ok((rope, info))\n}\n\nfn try_save(path: &Path, text: &Rope, encoding: CharacterEncoding)\n -> io::Result<()>\n{\n let mut f = File::create(path)?;\n match encoding {\n CharacterEncoding::Utf8WithBom => f.write_all(UTF8_BOM.as_bytes())?,\n CharacterEncoding::Utf8 => (),\n }\n\n for chunk in text.iter_chunks(0, text.len()) {\n f.write_all(chunk.as_bytes())?;\n }\n Ok(())\n}\n\nfn try_decode(bytes: Vec<u8>,\n encoding: CharacterEncoding) -> Result<Rope, FileError> {\n match encoding {\n CharacterEncoding::Utf8 =>\n Ok(Rope::from(str::from_utf8(&bytes)?)),\n CharacterEncoding::Utf8WithBom => {\n let s = String::from_utf8(bytes).map_err(|e| e.utf8_error())?;\n Ok(Rope::from(&s[UTF8_BOM.len()..]))\n }\n }\n}\n\nimpl CharacterEncoding {\n fn guess(s: &[u8]) -> Self {\n if s.starts_with(UTF8_BOM.as_bytes()) {\n CharacterEncoding::Utf8WithBom\n } else {\n CharacterEncoding::Utf8\n }\n }\n}\n\n\/\/\/ Returns the modification timestamp for the file at a given path,\n\/\/\/ if present.\nfn get_mod_time<P>(path: P) -> Option<SystemTime>\nwhere P: AsRef<Path>\n{\n File::open(path)\n .and_then(|f| f.metadata())\n .and_then(|meta| meta.modified())\n .ok()\n}\n\nimpl From<io::Error> for FileError {\n fn from(src: io::Error) -> FileError {\n FileError::Io(src)\n }\n}\n\nimpl From<str::Utf8Error> for FileError {\n fn from(_: str::Utf8Error) -> FileError {\n FileError::UnknownEncoding\n }\n}\n\nimpl From<FileError> for RemoteError {\n fn from(_src: FileError) -> RemoteError {\n \/\/TODO: when we migrate to using the failure crate for error handling,\n \/\/ this should return a better message\n RemoteError::custom(5, \"failed to load file\", None)\n }\n}\n\nimpl fmt::Display for FileError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self {\n &FileError::Io(ref e) => write!(f, \"{}\", e),\n &FileError::UnknownEncoding => write!(f, \"Error decoding file\"),\n &FileError::HasChanged => write!(f, \"File has changed on disk. \\\n Please save elsewhere and reload the file.\"),\n }\n }\n}\n\n<commit_msg>Create file on open if necessary<commit_after>\/\/ Copyright 2018 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Interactions with the file system.\n\nuse std::collections::HashMap;\nuse std::io::{self, Read, Write};\nuse std::fmt;\nuse std::fs::File;\nuse std::path::{Path, PathBuf};\nuse std::str;\nuse std::time::SystemTime;\n\nuse xi_rpc::RemoteError;\nuse xi_rope::Rope;\n\nuse tabs::BufferId;\n\n#[cfg(feature = \"notify\")]\nuse tabs::OPEN_FILE_EVENT_TOKEN;\n#[cfg(feature = \"notify\")]\nuse watcher::FileWatcher;\n\nconst UTF8_BOM: &str = \"\\u{feff}\";\n\n\/\/\/ Tracks all state related to open files.\npub struct FileManager {\n open_files: HashMap<PathBuf, BufferId>,\n file_info: HashMap<BufferId, FileInfo>,\n \/\/\/ A monitor of filesystem events, for things like reloading changed files.\n #[cfg(feature = \"notify\")]\n watcher: FileWatcher,\n}\n\n#[derive(Debug)]\npub struct FileInfo {\n pub encoding: CharacterEncoding,\n pub path: PathBuf,\n pub mod_time: Option<SystemTime>,\n pub has_changed: bool,\n}\n\npub enum FileError {\n Io(io::Error),\n UnknownEncoding,\n HasChanged,\n}\n\n#[derive(Debug, Clone, Copy)]\npub enum CharacterEncoding {\n Utf8,\n Utf8WithBom\n}\n\nimpl FileManager {\n #[cfg(feature = \"notify\")]\n pub fn new(watcher: FileWatcher) -> Self {\n FileManager {\n open_files: HashMap::new(),\n file_info: HashMap::new(),\n watcher,\n }\n }\n\n #[cfg(not(feature = \"notify\"))]\n pub fn new() -> Self {\n FileManager {\n open_files: HashMap::new(),\n file_info: HashMap::new(),\n }\n }\n\n #[cfg(feature = \"notify\")]\n pub fn watcher(&mut self) -> &mut FileWatcher {\n &mut self.watcher\n }\n\n pub fn get_info(&self, id: BufferId) -> Option<&FileInfo> {\n self.file_info.get(&id)\n }\n\n pub fn get_editor(&self, path: &Path) -> Option<BufferId> {\n self.open_files.get(path).map(|id| *id)\n }\n\n \/\/\/ Returns `true` if this file is open and has changed on disk.\n \/\/\/ This state is stashed.\n pub fn check_file(&mut self, path: &Path, id: BufferId) -> bool {\n if let Some(info) = self.file_info.get_mut(&id) {\n let mod_t = get_mod_time(path);\n if mod_t != info.mod_time {\n info.has_changed = true\n }\n return info.has_changed;\n }\n false\n }\n\n pub fn open(&mut self, path: &Path, id: BufferId)\n -> Result<Rope, FileError>\n {\n if !path.exists() {\n let _ = File::create(path)?;\n }\n\n let (rope, info) = try_load_file(path)?;\n\n self.open_files.insert(path.to_owned(), id);\n if self.file_info.insert(id, info).is_none() {\n #[cfg(feature = \"notify\")]\n self.watcher.watch(path, false, OPEN_FILE_EVENT_TOKEN);\n }\n Ok(rope)\n }\n\n pub fn close(&mut self, id: BufferId) {\n if let Some(info) = self.file_info.remove(&id) {\n self.open_files.remove(&info.path);\n #[cfg(feature = \"notify\")]\n self.watcher.unwatch(&info.path, OPEN_FILE_EVENT_TOKEN);\n }\n }\n\n pub fn save(&mut self, path: &Path, text: &Rope, id: BufferId)\n -> Result<(), FileError>\n {\n let is_existing = self.file_info.contains_key(&id);\n if is_existing {\n self.save_existing(path, text, id)\n } else {\n self.save_new(path, text, id)\n }\n }\n\n fn save_new(&mut self, path: &Path, text: &Rope, id: BufferId)\n -> Result<(), FileError>\n {\n try_save(path, text, CharacterEncoding::Utf8)?;\n let info = FileInfo {\n encoding: CharacterEncoding::Utf8,\n path: path.to_owned(),\n mod_time: get_mod_time(path),\n has_changed: false,\n };\n self.open_files.insert(path.to_owned(), id);\n self.file_info.insert(id, info);\n #[cfg(feature = \"notify\")]\n self.watcher.watch(path, false, OPEN_FILE_EVENT_TOKEN);\n Ok(())\n }\n\n fn save_existing(&mut self, path: &Path, text: &Rope, id: BufferId)\n -> Result<(), FileError>\n {\n let prev_path = self.file_info.get(&id).unwrap().path.clone();\n if prev_path != path {\n self.save_new(path, text, id)?;\n self.open_files.remove(&prev_path);\n #[cfg(feature = \"notify\")]\n self.watcher.unwatch(&prev_path, OPEN_FILE_EVENT_TOKEN);\n } else if self.file_info.get(&id).unwrap().has_changed {\n return Err(FileError::HasChanged);\n } else {\n let encoding = self.file_info.get(&id).unwrap().encoding;\n try_save(path, text, encoding)?;\n self.file_info.get_mut(&id).unwrap()\n .mod_time = get_mod_time(path);\n }\n Ok(())\n }\n}\n\nfn try_load_file<P>(path: P) -> Result<(Rope, FileInfo), FileError>\nwhere P: AsRef<Path>\n{\n \/\/ TODO: support for non-utf8\n \/\/ it's arguable that the rope crate should have file loading functionality\n let mut f = File::open(path.as_ref())?;\n let mod_time = f.metadata()?.modified().ok();\n let mut bytes = Vec::new();\n f.read_to_end(&mut bytes)?;\n\n let encoding = CharacterEncoding::guess(&bytes);\n let rope = try_decode(bytes, encoding)?;\n let info = FileInfo {\n encoding,\n mod_time,\n path: path.as_ref().to_owned(),\n has_changed: false,\n };\n Ok((rope, info))\n}\n\nfn try_save(path: &Path, text: &Rope, encoding: CharacterEncoding)\n -> io::Result<()>\n{\n let mut f = File::create(path)?;\n match encoding {\n CharacterEncoding::Utf8WithBom => f.write_all(UTF8_BOM.as_bytes())?,\n CharacterEncoding::Utf8 => (),\n }\n\n for chunk in text.iter_chunks(0, text.len()) {\n f.write_all(chunk.as_bytes())?;\n }\n Ok(())\n}\n\nfn try_decode(bytes: Vec<u8>,\n encoding: CharacterEncoding) -> Result<Rope, FileError> {\n match encoding {\n CharacterEncoding::Utf8 =>\n Ok(Rope::from(str::from_utf8(&bytes)?)),\n CharacterEncoding::Utf8WithBom => {\n let s = String::from_utf8(bytes).map_err(|e| e.utf8_error())?;\n Ok(Rope::from(&s[UTF8_BOM.len()..]))\n }\n }\n}\n\nimpl CharacterEncoding {\n fn guess(s: &[u8]) -> Self {\n if s.starts_with(UTF8_BOM.as_bytes()) {\n CharacterEncoding::Utf8WithBom\n } else {\n CharacterEncoding::Utf8\n }\n }\n}\n\n\/\/\/ Returns the modification timestamp for the file at a given path,\n\/\/\/ if present.\nfn get_mod_time<P>(path: P) -> Option<SystemTime>\nwhere P: AsRef<Path>\n{\n File::open(path)\n .and_then(|f| f.metadata())\n .and_then(|meta| meta.modified())\n .ok()\n}\n\nimpl From<io::Error> for FileError {\n fn from(src: io::Error) -> FileError {\n FileError::Io(src)\n }\n}\n\nimpl From<str::Utf8Error> for FileError {\n fn from(_: str::Utf8Error) -> FileError {\n FileError::UnknownEncoding\n }\n}\n\nimpl From<FileError> for RemoteError {\n fn from(_src: FileError) -> RemoteError {\n \/\/TODO: when we migrate to using the failure crate for error handling,\n \/\/ this should return a better message\n RemoteError::custom(5, \"failed to load file\", None)\n }\n}\n\nimpl fmt::Display for FileError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self {\n &FileError::Io(ref e) => write!(f, \"{}\", e),\n &FileError::UnknownEncoding => write!(f, \"Error decoding file\"),\n &FileError::HasChanged => write!(f, \"File has changed on disk. \\\n Please save elsewhere and reload the file.\"),\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Support inheriting jobserver fd for external subcommands<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nextern crate semver;\nextern crate clap;\nextern crate toml;\nextern crate url;\n#[macro_use] extern crate log;\n#[macro_use] extern crate version;\n\nextern crate libimagrt;\nextern crate libimagmail;\nextern crate libimagerror;\nextern crate libimagutil;\nextern crate libimagref;\n\nuse libimagerror::trace::{MapErrTrace, trace_error, trace_error_exit};\nuse libimagmail::mail::Mail;\nuse libimagref::reference::Ref;\nuse libimagrt::runtime::Runtime;\nuse libimagrt::setup::generate_runtime_setup;\nuse libimagutil::debug_result::*;\nuse libimagutil::info_result::*;\n\nmod ui;\n\nuse ui::build_ui;\n\nfn main() {\n let rt = generate_runtime_setup(\"imag-mail\",\n &version!()[..],\n \"Mail collection tool\",\n build_ui);\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n debug!(\"Call {}\", name);\n match name {\n \"import-mail\" => import_mail(&rt),\n \"list\" => list(&rt),\n \"mail-store\" => mail_store(&rt),\n _ => debug!(\"Unknown command\") \/\/ More error handling\n }\n });\n}\n\nfn import_mail(rt: &Runtime) {\n let scmd = rt.cli().subcommand_matches(\"import-mail\").unwrap();\n let path = scmd.value_of(\"path\").unwrap(); \/\/ enforced by clap\n\n Mail::import_from_path(rt.store(), path)\n .map_err_trace()\n .map_info_str(\"Ok\");\n}\n\nfn list(rt: &Runtime) {\n use libimagmail::error::MailErrorKind as MEK;\n use libimagmail::error::MapErrInto;\n\n let scmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let do_check_dead = scmd.is_present(\"check-dead\");\n let do_check_changed = scmd.is_present(\"check-changed\");\n let do_check_changed_content = scmd.is_present(\"check-changed-content\");\n let do_check_changed_permiss = scmd.is_present(\"check-changed-permissions\");\n let store = rt.store();\n\n let iter = match store.retrieve_for_module(\"ref\") {\n Ok(iter) => iter.filter_map(|id| {\n Ref::get(store, id)\n .map_err_into(MEK::RefHandlingError)\n .and_then(|rf| Mail::from_ref(rf))\n .map_err_trace()\n .ok()\n }),\n Err(e) => trace_error_exit(&e, 1),\n };\n\n fn list_mail(m: Mail) {\n unimplemented!()\n }\n\n \/\/ TODO: Implement lister type in libimagmail for this\n for mail in iter {\n list_mail(mail)\n }\n}\n\nfn mail_store(rt: &Runtime) {\n let scmd = rt.cli().subcommand_matches(\"mail-store\").unwrap();\n error!(\"This feature is currently not implemented.\");\n unimplemented!()\n}\n\n<commit_msg>Implement list_mail() helper fn<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nextern crate semver;\nextern crate clap;\nextern crate toml;\nextern crate url;\n#[macro_use] extern crate log;\n#[macro_use] extern crate version;\n\nextern crate libimagrt;\nextern crate libimagmail;\nextern crate libimagerror;\nextern crate libimagutil;\nextern crate libimagref;\n\nuse libimagerror::trace::{MapErrTrace, trace_error, trace_error_exit};\nuse libimagmail::mail::Mail;\nuse libimagref::reference::Ref;\nuse libimagrt::runtime::Runtime;\nuse libimagrt::setup::generate_runtime_setup;\nuse libimagutil::debug_result::*;\nuse libimagutil::info_result::*;\n\nmod ui;\n\nuse ui::build_ui;\n\nfn main() {\n let rt = generate_runtime_setup(\"imag-mail\",\n &version!()[..],\n \"Mail collection tool\",\n build_ui);\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n debug!(\"Call {}\", name);\n match name {\n \"import-mail\" => import_mail(&rt),\n \"list\" => list(&rt),\n \"mail-store\" => mail_store(&rt),\n _ => debug!(\"Unknown command\") \/\/ More error handling\n }\n });\n}\n\nfn import_mail(rt: &Runtime) {\n let scmd = rt.cli().subcommand_matches(\"import-mail\").unwrap();\n let path = scmd.value_of(\"path\").unwrap(); \/\/ enforced by clap\n\n Mail::import_from_path(rt.store(), path)\n .map_err_trace()\n .map_info_str(\"Ok\");\n}\n\nfn list(rt: &Runtime) {\n use libimagmail::error::MailErrorKind as MEK;\n use libimagmail::error::MapErrInto;\n\n let scmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let do_check_dead = scmd.is_present(\"check-dead\");\n let do_check_changed = scmd.is_present(\"check-changed\");\n let do_check_changed_content = scmd.is_present(\"check-changed-content\");\n let do_check_changed_permiss = scmd.is_present(\"check-changed-permissions\");\n let store = rt.store();\n\n let iter = match store.retrieve_for_module(\"ref\") {\n Ok(iter) => iter.filter_map(|id| {\n Ref::get(store, id)\n .map_err_into(MEK::RefHandlingError)\n .and_then(|rf| Mail::from_ref(rf))\n .map_err_trace()\n .ok()\n }),\n Err(e) => trace_error_exit(&e, 1),\n };\n\n fn list_mail(m: Mail) {\n let id = match m.get_message_id() {\n Ok(Some(f)) => f,\n Ok(None) => \"<no id>\".to_owned(),\n Err(e) => {\n trace_error(&e);\n \"<error>\".to_owned()\n },\n };\n\n let from = match m.get_from() {\n Ok(Some(f)) => f,\n Ok(None) => \"<no from>\".to_owned(),\n Err(e) => {\n trace_error(&e);\n \"<error>\".to_owned()\n },\n };\n\n let to = match m.get_to() {\n Ok(Some(f)) => f,\n Ok(None) => \"<no to>\".to_owned(),\n Err(e) => {\n trace_error(&e);\n \"<error>\".to_owned()\n },\n };\n\n let subject = match m.get_subject() {\n Ok(Some(f)) => f,\n Ok(None) => \"<no subject>\".to_owned(),\n Err(e) => {\n trace_error(&e);\n \"<error>\".to_owned()\n },\n };\n\n println!(\"Mail: {id}\\n\\tFrom: {from}\\n\\tTo: {to}\\n\\t{subj}\\n\",\n from = from,\n id = id,\n subj = subject,\n to = to\n );\n }\n\n \/\/ TODO: Implement lister type in libimagmail for this\n for mail in iter {\n list_mail(mail)\n }\n}\n\nfn mail_store(rt: &Runtime) {\n let scmd = rt.cli().subcommand_matches(\"mail-store\").unwrap();\n error!(\"This feature is currently not implemented.\");\n unimplemented!()\n}\n\n<|endoftext|>"} {"text":"<commit_before>extern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate serde_json;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate task_hookrs;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagutil;\nextern crate libimagtodo;\n\nuse std::process::exit;\nuse std::process::{Command, Stdio};\nuse std::io::stdin;\nuse std::io::BufRead;\n\nuse task_hookrs::import::import;\n\nuse libimagrt::runtime::Runtime;\nuse libimagtodo::task::IntoTask;\nuse libimagutil::trace::trace_error;\n\nmod ui;\n\nuse ui::build_ui;\nfn main() {\n\n let name = \"imag-todo\";\n let version = &version!()[..];\n let about = \"Interface with taskwarrior\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n\n\n let scmd = rt.cli().subcommand_name();\n match scmd {\n Some(\"tw-hook\") => {\n let subcmd = rt.cli().subcommand_matches(\"tw-hook\").unwrap();\n if subcmd.is_present(\"add\") {\n if let Ok(ttasks) = import(stdin()) {\n for ttask in ttasks {\n println!(\"{}\", match serde_json::ser::to_string(&ttask) {\n Ok(val) => val,\n Err(e) => {\n error!(\"{}\", e);\n return;\n }\n });\n match ttask.into_filelockentry(rt.store()) {\n Ok(val) => val,\n Err(e) => {\n trace_error(&e);\n error!(\"{}\", e);\n return;\n }\n };\n }\n }\n else {\n error!(\"No usable input\");\n return;\n }\n }\n else if subcmd.is_present(\"delete\") {\n \/\/ The used hook is \"on-modify\". This hook gives two json-objects\n \/\/ per usage und wants one (the second one) back.\n let mut counter = 0;\n let stdin = stdin();\n for rline in stdin.lock().lines() {\n let mut line = match rline {\n Ok(l) => l,\n Err(e) => {\n error!(\"{}\", e);\n return;\n }\n };\n line.insert(0, '[');\n line.push(']');\n if counter % 2 == 1 {\n if let Ok(ttasks) = import(line.as_bytes()) {\n for ttask in ttasks {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n println!(\"{}\", match serde_json::ser::to_string(&ttask) {\n Ok(val) => val,\n Err(e) => {\n error!(\"{}\", e);\n return;\n }\n });\n match ttask.status() {\n &task_hookrs::status::TaskStatus::Deleted => {\n match libimagtodo::delete::delete(rt.store(), *ttask.uuid()) {\n Ok(_) => { }\n Err(e) => {\n trace_error(&e);\n error!(\"{}\", e);\n return;\n }\n }\n }\n _ => {\n }\n }\n }\n }\n else {\n error!(\"No usable input\");\n }\n }\n counter += 1;\n }\n }\n else {\n \/\/ Should not be possible, as one argument is required via\n \/\/ ArgGroup\n unreachable!();\n }\n },\n Some(\"exec\") => {\n let subcmd = rt.cli().subcommand_matches(\"exec\").unwrap();\n let mut args = Vec::new();\n if let Some(exec_string) = subcmd.values_of(\"command\") {\n for e in exec_string {\n args.push(e);\n }\n let tw_process = Command::new(\"task\").stdin(Stdio::null()).args(&args).spawn().unwrap_or_else(|e| {\n panic!(\"failed to execute taskwarrior: {}\", e);\n });\n\n let output = tw_process.wait_with_output().unwrap_or_else(|e| {\n panic!(\"failed to unwrap output: {}\", e);\n });\n let outstring = String::from_utf8(output.stdout).unwrap_or_else(|e| {\n panic!(\"failed to ececute: {}\", e);\n });\n println!(\"{}\", outstring);\n } else {\n panic!(\"faild to execute: You need to exec --command\");\n }\n }\n Some(\"list\") => {\n let subcmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let mut args = Vec::new();\n let verbose = subcmd.is_present(\"verbose\");\n let iter = match libimagtodo::read::get_todo_iterator(rt.store()) {\n \/\/let iter = match rt.store().retrieve_for_module(\"todo\/taskwarrior\") {\n Err(e) => {\n error!(\"{}\", e);\n return;\n }\n Ok(val) => val,\n };\n for task in iter {\n match task {\n Ok(val) => {\n \/\/let val = libimagtodo::task::Task::new(fle);\n \/\/println!(\"{:#?}\", val.flentry);\n let uuid = match val.flentry.get_header().read(\"todo.uuid\") {\n Ok(Some(u)) => u,\n Ok(None) => continue,\n Err(e) => {\n error!(\"{}\", e);\n continue;\n }\n };\n if verbose {\n args.clear();\n args.push(format!(\"uuid:{}\", uuid));\n args.push(format!(\"{}\", \"information\"));\n let tw_process = Command::new(\"task\").stdin(Stdio::null()).args(&args).spawn()\n .unwrap_or_else(|e| {\n error!(\"{}\", e);\n panic!(\"failed\");\n });\n let output = tw_process.wait_with_output().unwrap_or_else(|e| {\n panic!(\"failed to unwrap output: {}\", e);\n });\n let outstring = String::from_utf8(output.stdout).unwrap_or_else(|e| {\n panic!(\"failed to ececute: {}\", e);\n });\n println!(\"{}\", outstring);\n }\n else {\n println!(\"{}\", match uuid {\n toml::Value::String(s) => s,\n _ => {\n error!(\"Unexpected type for todo.uuid: {}\", uuid);\n continue;\n },\n });\n }\n }\n Err(e) => {\n error!(\"{}\", e);\n continue;\n }\n }\n }\n }\n _ => unimplemented!(),\n }\n }\n\n<commit_msg>add-hook working<commit_after>extern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate serde_json;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate task_hookrs;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagutil;\nextern crate libimagtodo;\n\nuse std::process::exit;\nuse std::process::{Command, Stdio};\nuse std::io::stdin;\nuse std::io::BufRead;\n\nuse task_hookrs::import::{import, import_task, import_tasks};\n\nuse libimagrt::runtime::Runtime;\nuse libimagtodo::task::IntoTask;\nuse libimagutil::trace::trace_error;\n\nmod ui;\n\nuse ui::build_ui;\nfn main() {\n\n let name = \"imag-todo\";\n let version = &version!()[..];\n let about = \"Interface with taskwarrior\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n\n\n let scmd = rt.cli().subcommand_name();\n match scmd {\n Some(\"tw-hook\") => {\n let subcmd = rt.cli().subcommand_matches(\"tw-hook\").unwrap();\n if subcmd.is_present(\"add\") {\n let stdin = stdin();\n let mut stdin = stdin.lock();\n let mut line = String::new();\n match stdin.read_line(&mut line) {\n Ok(_) => { }\n Err(e) => {\n error!(\"{}\", e);\n return;\n }\n };\n if let Ok(ttask) = import_task(&line.as_str()) {\n print!(\"{}\", match serde_json::ser::to_string(&ttask) {\n Ok(val) => val,\n Err(e) => {\n error!(\"{}\", e);\n return;\n }\n });\n match ttask.into_filelockentry(rt.store()) {\n Ok(val) => val,\n Err(e) => {\n trace_error(&e);\n error!(\"{}\", e);\n return;\n }\n };\n }\n else {\n error!(\"No usable input\");\n return;\n }\n }\n else if subcmd.is_present(\"delete\") {\n \/\/ The used hook is \"on-modify\". This hook gives two json-objects\n \/\/ per usage und wants one (the second one) back.\n let mut counter = 0;\n let stdin = stdin();\n for rline in stdin.lock().lines() {\n let mut line = match rline {\n Ok(l) => l,\n Err(e) => {\n error!(\"{}\", e);\n return;\n }\n };\n line.insert(0, '[');\n line.push(']');\n if counter % 2 == 1 {\n if let Ok(ttasks) = import(line.as_bytes()) {\n for ttask in ttasks {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n println!(\"{}\", match serde_json::ser::to_string(&ttask) {\n Ok(val) => val,\n Err(e) => {\n error!(\"{}\", e);\n return;\n }\n });\n match ttask.status() {\n &task_hookrs::status::TaskStatus::Deleted => {\n match libimagtodo::delete::delete(rt.store(), *ttask.uuid()) {\n Ok(_) => { }\n Err(e) => {\n trace_error(&e);\n error!(\"{}\", e);\n return;\n }\n }\n }\n _ => {\n }\n } \/\/ end match ttask.status()\n } \/\/ end for\n }\n else {\n error!(\"No usable input\");\n }\n }\n counter += 1;\n }\n }\n else {\n \/\/ Should not be possible, as one argument is required via\n \/\/ ArgGroup\n unreachable!();\n }\n },\n Some(\"exec\") => {\n let subcmd = rt.cli().subcommand_matches(\"exec\").unwrap();\n let mut args = Vec::new();\n if let Some(exec_string) = subcmd.values_of(\"command\") {\n for e in exec_string {\n args.push(e);\n }\n let tw_process = Command::new(\"task\").stdin(Stdio::null()).args(&args).spawn().unwrap_or_else(|e| {\n panic!(\"failed to execute taskwarrior: {}\", e);\n });\n\n let output = tw_process.wait_with_output().unwrap_or_else(|e| {\n panic!(\"failed to unwrap output: {}\", e);\n });\n let outstring = String::from_utf8(output.stdout).unwrap_or_else(|e| {\n panic!(\"failed to ececute: {}\", e);\n });\n println!(\"{}\", outstring);\n } else {\n panic!(\"faild to execute: You need to exec --command\");\n }\n }\n Some(\"list\") => {\n let subcmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let mut args = Vec::new();\n let verbose = subcmd.is_present(\"verbose\");\n let iter = match libimagtodo::read::get_todo_iterator(rt.store()) {\n \/\/let iter = match rt.store().retrieve_for_module(\"todo\/taskwarrior\") {\n Err(e) => {\n error!(\"{}\", e);\n return;\n }\n Ok(val) => val,\n };\n for task in iter {\n match task {\n Ok(val) => {\n \/\/let val = libimagtodo::task::Task::new(fle);\n \/\/println!(\"{:#?}\", val.flentry);\n let uuid = match val.flentry.get_header().read(\"todo.uuid\") {\n Ok(Some(u)) => u,\n Ok(None) => continue,\n Err(e) => {\n error!(\"{}\", e);\n continue;\n }\n };\n if verbose {\n args.clear();\n args.push(format!(\"uuid:{}\", uuid));\n args.push(format!(\"{}\", \"information\"));\n let tw_process = Command::new(\"task\").stdin(Stdio::null()).args(&args).spawn()\n .unwrap_or_else(|e| {\n error!(\"{}\", e);\n panic!(\"failed\");\n });\n let output = tw_process.wait_with_output().unwrap_or_else(|e| {\n panic!(\"failed to unwrap output: {}\", e);\n });\n let outstring = String::from_utf8(output.stdout).unwrap_or_else(|e| {\n panic!(\"failed to ececute: {}\", e);\n });\n println!(\"{}\", outstring);\n }\n else {\n println!(\"{}\", match uuid {\n toml::Value::String(s) => s,\n _ => {\n error!(\"Unexpected type for todo.uuid: {}\", uuid);\n continue;\n },\n });\n }\n }\n Err(e) => {\n error!(\"{}\", e);\n continue;\n }\n } \/\/ end match task\n } \/\/ end for\n }\n _ => unimplemented!(),\n } \/\/ end match scmd\n} \/\/ end main\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>docs(examples): add iterator example<commit_after>use std::str;\nuse rocks::prelude::*;\n\nfn main() {\n let opt = Options::default().map_db_options(|db_opt| db_opt.create_if_missing(true));\n let db = DB::open(opt, \".\/data\").unwrap();\n\n let mut wb = WriteBatch::new();\n\n for i in 0..1000 {\n wb.put(format!(\"{:03}-key\", i).as_bytes(), format!(\"value-{:03}\", i).as_bytes());\n }\n\n println!(\"wb => {:?}\", wb);\n\n let _ = db.write(WriteOptions::default_instance(), wb).unwrap();\n\n println!(\"got => {:?}\", db.get(ReadOptions::default_instance(), b\"key-042\"));\n\n for (key, value) in db.new_iterator(ReadOptions::default_instance()) {\n unsafe {\n println!(\"{:?} => {:?}\", str::from_utf8_unchecked(key), str::from_utf8_unchecked(value));\n }\n }\n println!(\"done!\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Format alu...() test to pass rustfmt (part 3: code removal)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added basic deserialization tests for complex types except union<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Check in initial rough heap sketch<commit_after>\/\/! Baphomet is a persistent slab allocator.\n\/\/! It doesn't know about\n\nuse std::{\n convert::TryFrom,\n fs::File,\n io::Result,\n mem::{transmute, MaybeUninit},\n path::Path,\n sync::{\n atomic::{AtomicU32, Ordering::Acquire},\n Arc,\n },\n};\n\nuse crossbeam_epoch::pin;\n\nuse crate::stack::Stack;\n\npub type SlabId = u8;\npub type SlabIdx = u32;\n\n#[derive(Debug, Clone, Copy)]\npub struct HeapId(u64);\n\nimpl HeapId {\n pub fn decompose(&self) -> (SlabId, SlabIdx) {\n const IDX_MASK: u64 = (1 << 32) - 1;\n let slab_id = u8::try_from((self.0 >> 32).trailing_zeros()).unwrap();\n let slab_idx = u32::try_from(self.0 & IDX_MASK).unwrap();\n (slab_id, slab_idx)\n }\n\n pub fn compose(slab_id: SlabId, slab_idx: SlabIdx) -> HeapId {\n let slab = 1 << (32 + slab_id as u64);\n let heap_id = slab | slab_idx as u64;\n HeapId(heap_id)\n }\n}\n\nfn slab_id_to_size(slab_id: u8) -> u64 {\n 1 << (16 + slab_id as u64)\n}\n\nfn size_to_slab_id(size: u64) -> SlabId {\n \/\/ find the power of 2 that is at least 64k\n let normalized_size = std::cmp::max(64 * 1024, size.next_power_of_two());\n\n \/\/ drop the lowest 16 bits\n let rebased_size = normalized_size >> 16;\n\n u8::try_from(rebased_size.trailing_zeros()).unwrap()\n}\n\npub struct Reservation {\n slab_free: Arc<Stack<u32>>,\n completed: bool,\n file: File,\n idx: u32,\n offset: u64,\n size: u64,\n \/\/ a callback that is executed\n \/\/ when the reservation is filled\n \/\/ and stabilized\n stability_cb: Option<Box<dyn FnOnce(SlabId)>>,\n}\n\nimpl Drop for Reservation {\n fn drop(&mut self) {\n if !self.completed {\n self.slab_free.push(self.idx, &pin());\n }\n }\n}\n\nimpl Reservation {\n pub fn complete(mut self, data: &[u8]) -> Result<HeapId> {\n assert_eq!(data.len() as u64, self.size);\n\n use std::os::unix::fs::FileExt;\n self.file.write_at(data, self.offset)?;\n self.file.sync_all()?;\n\n \/\/ if this is not reached due to an IO error,\n \/\/ the offset will be returned to the Slab in Drop\n self.completed = true;\n\n let slab_id = size_to_slab_id(self.size);\n\n if let Some(stability_cb) = self.stability_cb.take() {\n (stability_cb)(slab_id);\n } else {\n unreachable!();\n }\n\n Ok(HeapId::compose(slab_id, self.idx))\n }\n\n pub fn abort(self) {\n \/\/ actual logic in Drop\n }\n}\n\npub struct Heap {\n \/\/ each slab stores\n \/\/ items that are double\n \/\/ the size of the previous,\n \/\/ ranging from 64k in the\n \/\/ smallest slab to 2^48 in\n \/\/ the last.\n slabs: [Slab; 32],\n}\n\nimpl Heap {\n pub fn start<P: AsRef<Path>>(p: P) -> Result<Heap> {\n let mut slabs: [MaybeUninit<Slab>; 32] = unsafe { std::mem::zeroed() };\n\n for slab_id in 0..32 {\n let slab = Slab::start(&p, slab_id)?;\n slabs[slab_id as usize] = MaybeUninit::new(slab);\n }\n\n Ok(Heap { slabs: unsafe { transmute(slabs) } })\n }\n\n pub fn read(&self, heap_id: HeapId) -> Result<Vec<u8>> {\n let (slab_id, slab_idx) = heap_id.decompose();\n self.slabs[slab_id as usize].read(slab_idx)\n }\n\n pub fn free(&self, heap_id: HeapId) -> Result<()> {\n let (slab_id, slab_idx) = heap_id.decompose();\n self.slabs[slab_id as usize].free(slab_idx)\n }\n\n pub fn reserve(\n &self,\n size: u64,\n stability_cb: Box<dyn FnOnce(SlabId)>,\n ) -> Reservation {\n assert!(size < 1 << 48);\n let slab_id = size_to_slab_id(size);\n self.slabs[slab_id as usize].reserve(size, stability_cb)\n }\n}\n\nstruct Slab {\n file: File,\n bs: u64,\n tip: AtomicU32,\n free: Arc<Stack<u32>>,\n}\n\nimpl Slab {\n pub fn start<P: AsRef<Path>>(directory: P, slab_id: u8) -> Result<Slab> {\n let bs = slab_id_to_size(slab_id);\n let free = Arc::new(Stack::default());\n let file = File::open(directory.as_ref().join(format!(\"{}\", slab_id)))?;\n let tip =\n AtomicU32::new(u32::try_from(file.metadata()?.len() \/ bs).unwrap());\n\n Ok(Slab { file, bs, tip, free })\n }\n\n fn read(&self, slab_idx: SlabIdx) -> Result<Vec<u8>> {\n let mut ret = vec![0; usize::try_from(self.bs).unwrap()];\n\n let offset = slab_idx as u64 * self.bs;\n\n use std::os::unix::fs::FileExt;\n self.file.read_exact_at(&mut ret, offset)?;\n\n Ok(ret)\n }\n\n fn reserve(\n &self,\n size: u64,\n stability_cb: Box<dyn FnOnce(SlabId)>,\n ) -> Reservation {\n let idx = if let Some(idx) = self.free.pop(&pin()) {\n idx\n } else {\n self.tip.fetch_add(1, Acquire)\n };\n\n let offset = idx as u64 * self.bs;\n\n Reservation {\n slab_free: self.free.clone(),\n completed: false,\n file: self.file.try_clone().unwrap(),\n idx,\n offset,\n size,\n stability_cb: Some(stability_cb),\n }\n }\n\n fn free(&self, idx: u32) -> Result<()> {\n self.punch_hole(idx)?;\n self.free.push(idx, &pin());\n Ok(())\n }\n\n fn punch_hole(&self, idx: u32) -> Result<()> {\n let offset = idx as u64 * self.bs;\n\n #[cfg(target_os = \"linux\")]\n {\n use libc::{fallocate, FALLOC_FL_KEEP_SIZE, FALLOC_FL_PUNCH_HOLE};\n use std::os::unix::io::AsRawFd;\n\n let mode = FALLOC_FL_KEEP_SIZE | FALLOC_FL_PUNCH_HOLE;\n\n let fd = self.file.as_raw_fd();\n\n let ret =\n unsafe { fallocate(fd, mode, offset as i64, self.bs as i64) };\n\n if ret != 0 {\n return Err(std::io::Error::last_os_error());\n }\n }\n Ok(())\n }\n}\n\nfn main() {\n println!(\"Hello, world!\");\n}\n<|endoftext|>"} {"text":"<commit_before>use super::package::*;\nuse super::executor::*;\n\nuse alloc::boxed::Box;\n\nuse core::{cmp, ptr, mem};\n\nuse common::event::{self, Event, EventOption, KeyEvent, MouseEvent};\nuse common::resource::{NoneResource, Resource, ResourceType, URL, VecResource};\nuse common::scheduler::*;\nuse common::string::{String, ToString};\nuse common::vec::Vec;\n\nuse graphics::bmp::*;\nuse graphics::color::Color;\nuse graphics::display::Display;\nuse graphics::point::Point;\nuse graphics::size::Size;\nuse graphics::window::Window;\n\nuse programs::common::SessionItem;\n\npub struct Session {\n pub display: Display,\n pub background: BMPFile,\n pub cursor: BMPFile,\n pub mouse_point: Point,\n last_mouse_event: MouseEvent,\n pub items: Vec<Box<SessionItem>>,\n pub packages: Vec<Box<Package>>,\n pub windows: Vec<*mut Window>,\n pub windows_ordered: Vec<*mut Window>,\n pub redraw: usize,\n}\n\nimpl Session {\n pub fn new() -> Self {\n unsafe {\n Session {\n display: Display::root(),\n background: BMPFile::new(),\n cursor: BMPFile::new(),\n mouse_point: Point::new(0, 0),\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n },\n items: Vec::new(),\n packages: Vec::new(),\n windows: Vec::new(),\n windows_ordered: Vec::new(),\n redraw: event::REDRAW_ALL,\n }\n }\n }\n\n pub unsafe fn add_window(&mut self, add_window_ptr: *mut Window) {\n self.windows.push(add_window_ptr);\n self.windows_ordered.push(add_window_ptr);\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n\n pub unsafe fn remove_window(&mut self, remove_window_ptr: *mut Window) {\n let mut i = 0;\n while i < self.windows.len() {\n let mut remove = false;\n\n match self.windows.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n } else {\n i += 1;\n },\n Option::None => break,\n }\n\n if remove {\n self.windows.remove(i);\n }\n }\n\n i = 0;\n while i < self.windows_ordered.len() {\n let mut remove = false;\n\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n } else {\n i += 1;\n },\n Option::None => break,\n }\n\n if remove {\n self.windows_ordered.remove(i);\n }\n }\n\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n\n pub unsafe fn on_irq(&mut self, irq: u8) {\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_irq(irq);\n end_no_ints(reenable);\n }\n }\n\n pub unsafe fn on_poll(&mut self) {\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_poll();\n end_no_ints(reenable);\n }\n }\n\n pub fn open(&self, url: &URL) -> Box<Resource> {\n if url.scheme().len() == 0 {\n let mut list = String::new();\n\n for item in self.items.iter() {\n let scheme = item.scheme();\n if scheme.len() > 0 {\n if list.len() > 0 {\n list = list + \"\\n\" + scheme;\n } else {\n list = scheme;\n }\n }\n }\n\n box VecResource::new(URL::new(), ResourceType::Dir, list.to_utf8())\n } else {\n for item in self.items.iter() {\n if item.scheme() == url.scheme() {\n return item.open(url);\n }\n }\n box NoneResource\n }\n }\n\n fn on_key(&mut self, key_event: KeyEvent) {\n if self.windows.len() > 0 {\n match self.windows.get(self.windows.len() - 1) {\n Option::Some(window_ptr) => {\n unsafe {\n (**window_ptr).on_key(key_event);\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n }\n Option::None => (),\n }\n }\n }\n\n fn on_mouse(&mut self, mouse_event: MouseEvent) {\n let mut catcher = -1;\n\n if mouse_event.y >= self.display.height as isize - 32 {\n if mouse_event.left_button && !self.last_mouse_event.left_button {\n let mut x = 0;\n for package in self.packages.iter() {\n if package.icon.data.len() > 0 {\n if mouse_event.x >= x &&\n mouse_event.x < x + package.icon.size.width as isize {\n execute(&package.binary, &package.url, &Vec::new());\n }\n x += package.icon.size.width as isize;\n }\n }\n\n let mut chars = 32;\n while chars > 4 &&\n (x as usize + (chars * 8 + 3 * 4) * self.windows.len()) >\n self.display.width + 32 {\n chars -= 1;\n }\n\n x += 4;\n for window_ptr in self.windows_ordered.iter() {\n let w = (chars*8 + 2*4) as usize;\n if mouse_event.x >= x && mouse_event.x < x + w as isize {\n for j in 0..self.windows.len() {\n match self.windows.get(j) {\n Option::Some(catcher_window_ptr) =>\n if catcher_window_ptr == window_ptr {\n unsafe {\n if j == self.windows.len() - 1 {\n (**window_ptr).minimized = !(**window_ptr).minimized;\n } else {\n catcher = j as isize;\n (**window_ptr).minimized = false;\n }\n }\n break;\n },\n Option::None => break,\n }\n }\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n break;\n }\n x += w as isize;\n }\n }\n } else {\n for reverse_i in 0..self.windows.len() {\n let i = self.windows.len() - 1 - reverse_i;\n match self.windows.get(i) {\n Option::Some(window_ptr) => unsafe {\n if (**window_ptr).on_mouse(mouse_event, catcher < 0) {\n catcher = i as isize;\n\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n },\n Option::None => (),\n }\n }\n }\n\n if catcher >= 0 && catcher < self.windows.len() as isize - 1 {\n match self.windows.remove(catcher as usize) {\n Option::Some(window_ptr) => self.windows.push(window_ptr),\n Option::None => (),\n }\n }\n\n self.last_mouse_event = mouse_event;\n }\n\n pub unsafe fn redraw(&mut self) {\n if self.redraw > event::REDRAW_NONE {\n \/\/if self.redraw >= REDRAW_ALL {\n self.display.set(Color::new(64, 64, 64));\n if self.background.data.len() > 0 {\n self.background.draw(&self.display,\n Point::new((self.display.width as isize -\n self.background.size.width as isize) \/\n 2,\n (self.display.height as isize -\n self.background.size.height as isize) \/\n 2));\n }\n\n for i in 0..self.windows.len() {\n match self.windows.get(i) {\n Option::Some(window_ptr) => {\n (**window_ptr).focused = i == self.windows.len() - 1;\n (**window_ptr).draw(&self.display);\n }\n Option::None => (),\n }\n }\n\n self.display.rect(Point::new(0, self.display.height as isize - 32),\n Size::new(self.display.width, 32),\n Color::alpha(0, 0, 0, 128));\n\n let mut x = 0;\n for package in self.packages.iter() {\n if package.icon.data.len() > 0 {\n let y = self.display.height as isize - package.icon.size.height as isize;\n if self.mouse_point.y >= y && self.mouse_point.x >= x &&\n self.mouse_point.x < x + package.icon.size.width as isize {\n self.display.rect(Point::new(x, y),\n package.icon.size,\n Color::alpha(128, 128, 128, 128));\n\n let mut c_x = x;\n for c in package.name.chars() {\n self.display\n .char(Point::new(c_x, y - 16), c, Color::new(255, 255, 255));\n c_x += 8;\n }\n }\n package.icon.draw(&self.display, Point::new(x, y));\n x += package.icon.size.width as isize;\n }\n }\n\n let mut chars = 32;\n while chars > 4 &&\n (x as usize + (chars * 8 + 3 * 4) * self.windows.len()) >\n self.display.width + 32 {\n chars -= 1;\n }\n\n x += 4;\n for window_ptr in self.windows_ordered.iter() {\n let w = (chars*8 + 2*4) as usize;\n self.display.rect(Point::new(x, self.display.height as isize - 32),\n Size::new(w, 32),\n (**window_ptr).border_color);\n x += 4;\n\n for i in 0..chars {\n let c = (**window_ptr).title[i];\n if c != '\\0' {\n self.display.char(Point::new(x, self.display.height as isize - 24),\n c,\n (**window_ptr).title_color);\n }\n x += 8;\n }\n x += 8;\n }\n\n if self.cursor.data.len() > 0 {\n self.display.image_alpha(self.mouse_point,\n self.cursor.data.as_ptr(),\n self.cursor.size);\n } else {\n self.display.char(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9),\n 'X',\n Color::new(255, 255, 255));\n }\n \/\/}\n\n let reenable = start_no_ints();\n\n self.display.flip();\n\n \/*\n if self.cursor.data.len() > 0 {\n self.display.image_alpha_onscreen(self.mouse_point, self.cursor.data.as_ptr(), self.cursor.size);\n } else {\n self.display.char_onscreen(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9), 'X', Color::new(255, 255, 255));\n }\n *\/\n\n self.redraw = event::REDRAW_NONE;\n\n end_no_ints(reenable);\n }\n }\n\n pub fn event(&mut self, event: Event) {\n match event.to_option() {\n EventOption::Mouse(mouse_event) => self.on_mouse(mouse_event),\n EventOption::Key(key_event) => self.on_key(key_event),\n EventOption::Redraw(redraw_event) =>\n self.redraw = cmp::max(self.redraw, redraw_event.redraw),\n EventOption::Open(open_event) => {\n let url_string = open_event.url_string;\n\n if url_string.ends_with(\".bin\".to_string()) {\n execute(&URL::from_string(&url_string),\n &URL::new(),\n &Vec::new());\n } else {\n for package in self.packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if url_string.ends_with(accept.substr(1, accept.len() - 1)) {\n accepted = true;\n break;\n }\n }\n if accepted {\n let mut args: Vec<String> = Vec::new();\n args.push(url_string.clone());\n execute(&package.binary, &package.url, &args);\n break;\n }\n }\n }\n }\n _ => (),\n }\n }\n}\n<commit_msg>Use default color to make loading time less noticeable<commit_after>use super::package::*;\nuse super::executor::*;\n\nuse alloc::boxed::Box;\n\nuse core::{cmp, ptr, mem};\n\nuse common::event::{self, Event, EventOption, KeyEvent, MouseEvent};\nuse common::resource::{NoneResource, Resource, ResourceType, URL, VecResource};\nuse common::scheduler::*;\nuse common::string::{String, ToString};\nuse common::vec::Vec;\n\nuse graphics::bmp::*;\nuse graphics::color::Color;\nuse graphics::display::Display;\nuse graphics::point::Point;\nuse graphics::size::Size;\nuse graphics::window::Window;\n\nuse programs::common::SessionItem;\n\npub struct Session {\n pub display: Display,\n pub background: BMPFile,\n pub cursor: BMPFile,\n pub mouse_point: Point,\n last_mouse_event: MouseEvent,\n pub items: Vec<Box<SessionItem>>,\n pub packages: Vec<Box<Package>>,\n pub windows: Vec<*mut Window>,\n pub windows_ordered: Vec<*mut Window>,\n pub redraw: usize,\n}\n\nimpl Session {\n pub fn new() -> Self {\n unsafe {\n Session {\n display: Display::root(),\n background: BMPFile::new(),\n cursor: BMPFile::new(),\n mouse_point: Point::new(0, 0),\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n },\n items: Vec::new(),\n packages: Vec::new(),\n windows: Vec::new(),\n windows_ordered: Vec::new(),\n redraw: event::REDRAW_ALL,\n }\n }\n }\n\n pub unsafe fn add_window(&mut self, add_window_ptr: *mut Window) {\n self.windows.push(add_window_ptr);\n self.windows_ordered.push(add_window_ptr);\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n\n pub unsafe fn remove_window(&mut self, remove_window_ptr: *mut Window) {\n let mut i = 0;\n while i < self.windows.len() {\n let mut remove = false;\n\n match self.windows.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n } else {\n i += 1;\n },\n Option::None => break,\n }\n\n if remove {\n self.windows.remove(i);\n }\n }\n\n i = 0;\n while i < self.windows_ordered.len() {\n let mut remove = false;\n\n match self.windows_ordered.get(i) {\n Option::Some(window_ptr) => if *window_ptr == remove_window_ptr {\n remove = true;\n } else {\n i += 1;\n },\n Option::None => break,\n }\n\n if remove {\n self.windows_ordered.remove(i);\n }\n }\n\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n\n pub unsafe fn on_irq(&mut self, irq: u8) {\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_irq(irq);\n end_no_ints(reenable);\n }\n }\n\n pub unsafe fn on_poll(&mut self) {\n for item in self.items.iter() {\n let reenable = start_no_ints();\n item.on_poll();\n end_no_ints(reenable);\n }\n }\n\n pub fn open(&self, url: &URL) -> Box<Resource> {\n if url.scheme().len() == 0 {\n let mut list = String::new();\n\n for item in self.items.iter() {\n let scheme = item.scheme();\n if scheme.len() > 0 {\n if list.len() > 0 {\n list = list + \"\\n\" + scheme;\n } else {\n list = scheme;\n }\n }\n }\n\n box VecResource::new(URL::new(), ResourceType::Dir, list.to_utf8())\n } else {\n for item in self.items.iter() {\n if item.scheme() == url.scheme() {\n return item.open(url);\n }\n }\n box NoneResource\n }\n }\n\n fn on_key(&mut self, key_event: KeyEvent) {\n if self.windows.len() > 0 {\n match self.windows.get(self.windows.len() - 1) {\n Option::Some(window_ptr) => {\n unsafe {\n (**window_ptr).on_key(key_event);\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n }\n Option::None => (),\n }\n }\n }\n\n fn on_mouse(&mut self, mouse_event: MouseEvent) {\n let mut catcher = -1;\n\n if mouse_event.y >= self.display.height as isize - 32 {\n if mouse_event.left_button && !self.last_mouse_event.left_button {\n let mut x = 0;\n for package in self.packages.iter() {\n if package.icon.data.len() > 0 {\n if mouse_event.x >= x &&\n mouse_event.x < x + package.icon.size.width as isize {\n execute(&package.binary, &package.url, &Vec::new());\n }\n x += package.icon.size.width as isize;\n }\n }\n\n let mut chars = 32;\n while chars > 4 &&\n (x as usize + (chars * 8 + 3 * 4) * self.windows.len()) >\n self.display.width + 32 {\n chars -= 1;\n }\n\n x += 4;\n for window_ptr in self.windows_ordered.iter() {\n let w = (chars*8 + 2*4) as usize;\n if mouse_event.x >= x && mouse_event.x < x + w as isize {\n for j in 0..self.windows.len() {\n match self.windows.get(j) {\n Option::Some(catcher_window_ptr) =>\n if catcher_window_ptr == window_ptr {\n unsafe {\n if j == self.windows.len() - 1 {\n (**window_ptr).minimized = !(**window_ptr).minimized;\n } else {\n catcher = j as isize;\n (**window_ptr).minimized = false;\n }\n }\n break;\n },\n Option::None => break,\n }\n }\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n break;\n }\n x += w as isize;\n }\n }\n } else {\n for reverse_i in 0..self.windows.len() {\n let i = self.windows.len() - 1 - reverse_i;\n match self.windows.get(i) {\n Option::Some(window_ptr) => unsafe {\n if (**window_ptr).on_mouse(mouse_event, catcher < 0) {\n catcher = i as isize;\n\n self.redraw = cmp::max(self.redraw, event::REDRAW_ALL);\n }\n },\n Option::None => (),\n }\n }\n }\n\n if catcher >= 0 && catcher < self.windows.len() as isize - 1 {\n match self.windows.remove(catcher as usize) {\n Option::Some(window_ptr) => self.windows.push(window_ptr),\n Option::None => (),\n }\n }\n\n self.last_mouse_event = mouse_event;\n }\n\n pub unsafe fn redraw(&mut self) {\n if self.redraw > event::REDRAW_NONE {\n \/\/if self.redraw >= REDRAW_ALL {\n self.display.set(Color::new(75, 163, 253));\n if self.background.data.len() > 0 {\n self.background.draw(&self.display,\n Point::new((self.display.width as isize -\n self.background.size.width as isize) \/\n 2,\n (self.display.height as isize -\n self.background.size.height as isize) \/\n 2));\n }\n\n for i in 0..self.windows.len() {\n match self.windows.get(i) {\n Option::Some(window_ptr) => {\n (**window_ptr).focused = i == self.windows.len() - 1;\n (**window_ptr).draw(&self.display);\n }\n Option::None => (),\n }\n }\n\n self.display.rect(Point::new(0, self.display.height as isize - 32),\n Size::new(self.display.width, 32),\n Color::alpha(0, 0, 0, 128));\n\n let mut x = 0;\n for package in self.packages.iter() {\n if package.icon.data.len() > 0 {\n let y = self.display.height as isize - package.icon.size.height as isize;\n if self.mouse_point.y >= y && self.mouse_point.x >= x &&\n self.mouse_point.x < x + package.icon.size.width as isize {\n self.display.rect(Point::new(x, y),\n package.icon.size,\n Color::alpha(128, 128, 128, 128));\n\n let mut c_x = x;\n for c in package.name.chars() {\n self.display\n .char(Point::new(c_x, y - 16), c, Color::new(255, 255, 255));\n c_x += 8;\n }\n }\n package.icon.draw(&self.display, Point::new(x, y));\n x += package.icon.size.width as isize;\n }\n }\n\n let mut chars = 32;\n while chars > 4 &&\n (x as usize + (chars * 8 + 3 * 4) * self.windows.len()) >\n self.display.width + 32 {\n chars -= 1;\n }\n\n x += 4;\n for window_ptr in self.windows_ordered.iter() {\n let w = (chars*8 + 2*4) as usize;\n self.display.rect(Point::new(x, self.display.height as isize - 32),\n Size::new(w, 32),\n (**window_ptr).border_color);\n x += 4;\n\n for i in 0..chars {\n let c = (**window_ptr).title[i];\n if c != '\\0' {\n self.display.char(Point::new(x, self.display.height as isize - 24),\n c,\n (**window_ptr).title_color);\n }\n x += 8;\n }\n x += 8;\n }\n\n if self.cursor.data.len() > 0 {\n self.display.image_alpha(self.mouse_point,\n self.cursor.data.as_ptr(),\n self.cursor.size);\n } else {\n self.display.char(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9),\n 'X',\n Color::new(255, 255, 255));\n }\n \/\/}\n\n let reenable = start_no_ints();\n\n self.display.flip();\n\n \/*\n if self.cursor.data.len() > 0 {\n self.display.image_alpha_onscreen(self.mouse_point, self.cursor.data.as_ptr(), self.cursor.size);\n } else {\n self.display.char_onscreen(Point::new(self.mouse_point.x - 3, self.mouse_point.y - 9), 'X', Color::new(255, 255, 255));\n }\n *\/\n\n self.redraw = event::REDRAW_NONE;\n\n end_no_ints(reenable);\n }\n }\n\n pub fn event(&mut self, event: Event) {\n match event.to_option() {\n EventOption::Mouse(mouse_event) => self.on_mouse(mouse_event),\n EventOption::Key(key_event) => self.on_key(key_event),\n EventOption::Redraw(redraw_event) =>\n self.redraw = cmp::max(self.redraw, redraw_event.redraw),\n EventOption::Open(open_event) => {\n let url_string = open_event.url_string;\n\n if url_string.ends_with(\".bin\".to_string()) {\n execute(&URL::from_string(&url_string),\n &URL::new(),\n &Vec::new());\n } else {\n for package in self.packages.iter() {\n let mut accepted = false;\n for accept in package.accepts.iter() {\n if url_string.ends_with(accept.substr(1, accept.len() - 1)) {\n accepted = true;\n break;\n }\n }\n if accepted {\n let mut args: Vec<String> = Vec::new();\n args.push(url_string.clone());\n execute(&package.binary, &package.url, &args);\n break;\n }\n }\n }\n }\n _ => (),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix for Duration API changes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove DepFn stuff<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added lib.rs<commit_after>#![crate_type = \"lib\"]\n#![crate_name = \"fsnotify\"]\n\n\/\/ Required by operations.rs\n#![feature(hash)]\n#[macro_use] extern crate bitflags;\n\nmod fsnotify;\npub use self::fsnotify::*;<|endoftext|>"} {"text":"<commit_before><commit_msg>Id trait returns<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>master: Print out kernel section flags in binary rather than hex.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Assert that empty strings don't trip the line iterator<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>lifecycle work<commit_after><|endoftext|>"} {"text":"<commit_before>use std::default::Default;\nuse ::internal::prelude::*;\n\nmacro_rules! colour {\n ($struct_:ident; $(#[$attr:meta] $name:ident, $val:expr;)*) => {\n impl $struct_ {\n $(\n #[$attr]\n pub fn $name() -> Colour {\n Colour::new($val)\n }\n )*\n }\n }\n}\n\n\/\/\/ A utility struct to help with working with the basic representation of a\n\/\/\/ colour. This is particularly useful when working with a [`Role`]'s colour,\n\/\/\/ as the API works with an integer value instead of an RGB value.\n\/\/\/\n\/\/\/ Instances can be created by using the struct's associated functions. These\n\/\/\/ produce presets equivilant to those found in the official client's colour\n\/\/\/ picker.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Passing in a role's colour, and then retrieving its green component\n\/\/\/ via [`get_g`]:\n\/\/\/\n\/\/\/ ```rust,ignore\n\/\/\/ use serenity::utils::Colour;\n\/\/\/\n\/\/\/ \/\/ assuming a `role` has already been bound\n\/\/\/\n\/\/\/ let colour = Colour::new(role.colour);\n\/\/\/ let green = colour.get_g();\n\/\/\/\n\/\/\/ println!(\"The green component is: {}\", green);\n\/\/\/ ```\n\/\/\/\n\/\/\/ Creating an instance with the [`dark_teal`] presets:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ use serenity::utils::Colour;\n\/\/\/\n\/\/\/ let colour = Colour::dark_teal();\n\/\/\/\n\/\/\/ assert_eq!(colour.get_tuple(), (17, 128, 106));\n\/\/\/ ```\n\/\/\/\n\/\/\/ [`Role`]: ..\/model\/struct.Role.html\n\/\/\/ [`dark_teal`]: #method.dark_teal\n\/\/\/ [`get_g`]: #method.get_g\n#[derive(Clone, Copy, Debug)]\npub struct Colour {\n \/\/\/ The raw inner 32-bit unsigned integer value of this Colour. This is\n \/\/\/ worked with to generate values such as the red component value.\n pub value: u32,\n}\n\nimpl Colour {\n \/\/\/ Generates a new Colour with the given integer value set.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Create a new Colour, and then ensure that its inner value is equivilant\n \/\/\/ to a specific RGB value, retrieved via [`get_tuple`]:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ let colour = Colour::new(6573123);\n \/\/\/\n \/\/\/ assert_eq!(colour.get_tuple(), (100, 76, 67));\n \/\/\/ ```\n \/\/\/\n \/\/\/ [`get_tuple`]: #method.get_tuple\n pub fn new(value: u32) -> Colour {\n Colour {\n value: value,\n }\n }\n\n \/\/\/ Generates a new Colour from an RGB value, creating an inner u32\n \/\/\/ representation.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Creating a `Colour` via its RGB values will set its inner u32 correctly:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert!(Colour::from_rgb(255, 0, 0).value == 0xFF0000);\n \/\/\/ assert!(Colour::from_rgb(217, 23, 211).value == 0xD917D3);\n \/\/\/ ```\n \/\/\/\n \/\/\/ And you can then retrieve those same RGB values via its methods:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ let colour = Colour::from_rgb(217, 45, 215);\n \/\/\/\n \/\/\/ assert_eq!(colour.get_r(), 217);\n \/\/\/ assert_eq!(colour.get_g(), 45);\n \/\/\/ assert_eq!(colour.get_b(), 215);\n \/\/\/ assert_eq!(colour.get_tuple(), (217, 45, 215));\n \/\/\/ ```\n pub fn from_rgb(r: u8, g: u8, b: u8) -> Colour {\n let mut uint = r as u32;\n uint = (uint << 8) | (g as u32);\n uint = (uint << 8) | (b as u32);\n\n Colour::new(uint)\n }\n\n #[doc(hidden)]\n pub fn decode(value: Value) -> Result<Colour> {\n match value {\n Value::U64(v) => Ok(Colour::new(v as u32)),\n Value::I64(v) => Ok(Colour::new(v as u32)),\n other => Err(Error::Decode(\"Expected valid colour\", other)),\n }\n }\n\n \/\/\/ Returns the red RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_r(), 100);\n \/\/\/ ```\n pub fn get_r(&self) -> u8 {\n ((self.value >> 16) & 255) as u8\n }\n\n \/\/\/ Returns the green RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_g(), 76);\n \/\/\/ ```\n pub fn get_g(&self) -> u8 {\n ((self.value >> 8) & 255) as u8\n }\n\n \/\/\/ Returns the blue RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_b(), 67);\n pub fn get_b(&self) -> u8 {\n (self.value & 255) as u8\n }\n\n \/\/\/ Returns a tuple of the red, green, and blue components of this Colour.\n \/\/\/\n \/\/\/ This is equivilant to creating a tuple with the return values of\n \/\/\/ [`get_r`], [`get_g`], and [`get_b`].\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_tuple(), (100, 76, 67));\n \/\/\/ ```\n \/\/\/\n \/\/\/ [`get_r`]: #method.get_r\n \/\/\/ [`get_g`]: #method.get_g\n \/\/\/ [`get_b`]: #method.get_b\n pub fn get_tuple(&self) -> (u8, u8, u8) {\n (self.get_r(), self.get_g(), self.get_b())\n }\n}\n\nimpl From<i32> for Colour {\n \/\/\/ Constructs a Colour from a i32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ This is useful when providing hex values.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(0xDEA584).get_tuple(), (222, 165, 132));\n \/\/\/ ```\n fn from(value: i32) -> Colour {\n Colour::new(value as u32)\n }\n}\n\nimpl From<u32> for Colour {\n \/\/\/ Constructs a Colour from a u32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(6573123u32).get_r(), 100);\n \/\/\/ ```\n fn from(value: u32) -> Colour {\n Colour::new(value)\n }\n}\n\nimpl From<u64> for Colour {\n \/\/\/ Constructs a Colour from a u32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(6573123u64).get_r(), 100);\n \/\/\/ ```\n fn from(value: u64) -> Colour {\n Colour::new(value as u32)\n }\n}\n\ncolour! {\n Colour;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(111, 198, 226)`.\n blitz_blue, 0x6fc6e2;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(52, 152, 219)`.\n blue, 0x3498DB;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(114, 137, 218)`.\n blurple, 0x7289DA;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(32, 102, 148)`.\n dark_blue, 0x206694;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(31, 139, 76)`.\n dark_green, 0x1F8B4C;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(194, 124, 14)`.\n dark_gold, 0xC27C0E;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(96, 125, 139)`.\n dark_grey, 0x607D8B;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(173, 20, 87)`.\n dark_magenta, 0xAD1457;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(168, 67, 0)`.\n dark_orange, 0xA84300;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(113, 54, 138)`.\n dark_purple, 0x71368A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(153, 45, 34)`.\n dark_red, 0x992D22;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(17, 128, 106)`.\n dark_teal, 0x11806A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(84, 110, 122)`.\n darker_grey, 0x546E7A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(241, 196, 15)`.\n gold, 0xF1C40F;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(186, 218, 85)`.\n kerbal, 0xBADA55;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(151, 156, 159)`.\n light_grey, 0x979C9F;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(149, 165, 166)`.\n lighter_grey, 0x95A5A6;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(233, 30, 99)`.\n magenta, 0xE91E63;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(230, 126, 34)`.\n orange, 0xE67E22;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(155, 89, 182)`.\n purple, 0x9B59B6;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(231, 76, 60)`.\n red, 0xE74C3C;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(26, 188, 156)`.\n teal, 0x1ABC9C;\n}\n\nimpl Default for Colour {\n \/\/\/ Creates a default value for a `Colour`, setting the inner value to `0`.\n \/\/\/ This is equivilant to setting the RGB value to `(0, 0, 0)`.\n fn default() -> Colour {\n Colour {\n value: 0,\n }\n }\n}\n<commit_msg>Capitalize the hex value for blitz_blue<commit_after>use std::default::Default;\nuse ::internal::prelude::*;\n\nmacro_rules! colour {\n ($struct_:ident; $(#[$attr:meta] $name:ident, $val:expr;)*) => {\n impl $struct_ {\n $(\n #[$attr]\n pub fn $name() -> Colour {\n Colour::new($val)\n }\n )*\n }\n }\n}\n\n\/\/\/ A utility struct to help with working with the basic representation of a\n\/\/\/ colour. This is particularly useful when working with a [`Role`]'s colour,\n\/\/\/ as the API works with an integer value instead of an RGB value.\n\/\/\/\n\/\/\/ Instances can be created by using the struct's associated functions. These\n\/\/\/ produce presets equivilant to those found in the official client's colour\n\/\/\/ picker.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Passing in a role's colour, and then retrieving its green component\n\/\/\/ via [`get_g`]:\n\/\/\/\n\/\/\/ ```rust,ignore\n\/\/\/ use serenity::utils::Colour;\n\/\/\/\n\/\/\/ \/\/ assuming a `role` has already been bound\n\/\/\/\n\/\/\/ let colour = Colour::new(role.colour);\n\/\/\/ let green = colour.get_g();\n\/\/\/\n\/\/\/ println!(\"The green component is: {}\", green);\n\/\/\/ ```\n\/\/\/\n\/\/\/ Creating an instance with the [`dark_teal`] presets:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ use serenity::utils::Colour;\n\/\/\/\n\/\/\/ let colour = Colour::dark_teal();\n\/\/\/\n\/\/\/ assert_eq!(colour.get_tuple(), (17, 128, 106));\n\/\/\/ ```\n\/\/\/\n\/\/\/ [`Role`]: ..\/model\/struct.Role.html\n\/\/\/ [`dark_teal`]: #method.dark_teal\n\/\/\/ [`get_g`]: #method.get_g\n#[derive(Clone, Copy, Debug)]\npub struct Colour {\n \/\/\/ The raw inner 32-bit unsigned integer value of this Colour. This is\n \/\/\/ worked with to generate values such as the red component value.\n pub value: u32,\n}\n\nimpl Colour {\n \/\/\/ Generates a new Colour with the given integer value set.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Create a new Colour, and then ensure that its inner value is equivilant\n \/\/\/ to a specific RGB value, retrieved via [`get_tuple`]:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ let colour = Colour::new(6573123);\n \/\/\/\n \/\/\/ assert_eq!(colour.get_tuple(), (100, 76, 67));\n \/\/\/ ```\n \/\/\/\n \/\/\/ [`get_tuple`]: #method.get_tuple\n pub fn new(value: u32) -> Colour {\n Colour {\n value: value,\n }\n }\n\n \/\/\/ Generates a new Colour from an RGB value, creating an inner u32\n \/\/\/ representation.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ Creating a `Colour` via its RGB values will set its inner u32 correctly:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert!(Colour::from_rgb(255, 0, 0).value == 0xFF0000);\n \/\/\/ assert!(Colour::from_rgb(217, 23, 211).value == 0xD917D3);\n \/\/\/ ```\n \/\/\/\n \/\/\/ And you can then retrieve those same RGB values via its methods:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ let colour = Colour::from_rgb(217, 45, 215);\n \/\/\/\n \/\/\/ assert_eq!(colour.get_r(), 217);\n \/\/\/ assert_eq!(colour.get_g(), 45);\n \/\/\/ assert_eq!(colour.get_b(), 215);\n \/\/\/ assert_eq!(colour.get_tuple(), (217, 45, 215));\n \/\/\/ ```\n pub fn from_rgb(r: u8, g: u8, b: u8) -> Colour {\n let mut uint = r as u32;\n uint = (uint << 8) | (g as u32);\n uint = (uint << 8) | (b as u32);\n\n Colour::new(uint)\n }\n\n #[doc(hidden)]\n pub fn decode(value: Value) -> Result<Colour> {\n match value {\n Value::U64(v) => Ok(Colour::new(v as u32)),\n Value::I64(v) => Ok(Colour::new(v as u32)),\n other => Err(Error::Decode(\"Expected valid colour\", other)),\n }\n }\n\n \/\/\/ Returns the red RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_r(), 100);\n \/\/\/ ```\n pub fn get_r(&self) -> u8 {\n ((self.value >> 16) & 255) as u8\n }\n\n \/\/\/ Returns the green RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_g(), 76);\n \/\/\/ ```\n pub fn get_g(&self) -> u8 {\n ((self.value >> 8) & 255) as u8\n }\n\n \/\/\/ Returns the blue RGB component of this Colour.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_b(), 67);\n pub fn get_b(&self) -> u8 {\n (self.value & 255) as u8\n }\n\n \/\/\/ Returns a tuple of the red, green, and blue components of this Colour.\n \/\/\/\n \/\/\/ This is equivilant to creating a tuple with the return values of\n \/\/\/ [`get_r`], [`get_g`], and [`get_b`].\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::new(6573123).get_tuple(), (100, 76, 67));\n \/\/\/ ```\n \/\/\/\n \/\/\/ [`get_r`]: #method.get_r\n \/\/\/ [`get_g`]: #method.get_g\n \/\/\/ [`get_b`]: #method.get_b\n pub fn get_tuple(&self) -> (u8, u8, u8) {\n (self.get_r(), self.get_g(), self.get_b())\n }\n}\n\nimpl From<i32> for Colour {\n \/\/\/ Constructs a Colour from a i32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ This is useful when providing hex values.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(0xDEA584).get_tuple(), (222, 165, 132));\n \/\/\/ ```\n fn from(value: i32) -> Colour {\n Colour::new(value as u32)\n }\n}\n\nimpl From<u32> for Colour {\n \/\/\/ Constructs a Colour from a u32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(6573123u32).get_r(), 100);\n \/\/\/ ```\n fn from(value: u32) -> Colour {\n Colour::new(value)\n }\n}\n\nimpl From<u64> for Colour {\n \/\/\/ Constructs a Colour from a u32.\n \/\/\/\n \/\/\/ This is used for functions that accept `Into<Colour>`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use serenity::utils::Colour;\n \/\/\/\n \/\/\/ assert_eq!(Colour::from(6573123u64).get_r(), 100);\n \/\/\/ ```\n fn from(value: u64) -> Colour {\n Colour::new(value as u32)\n }\n}\n\ncolour! {\n Colour;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(111, 198, 226)`.\n blitz_blue, 0x6FC6E2;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(52, 152, 219)`.\n blue, 0x3498DB;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(114, 137, 218)`.\n blurple, 0x7289DA;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(32, 102, 148)`.\n dark_blue, 0x206694;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(31, 139, 76)`.\n dark_green, 0x1F8B4C;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(194, 124, 14)`.\n dark_gold, 0xC27C0E;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(96, 125, 139)`.\n dark_grey, 0x607D8B;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(173, 20, 87)`.\n dark_magenta, 0xAD1457;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(168, 67, 0)`.\n dark_orange, 0xA84300;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(113, 54, 138)`.\n dark_purple, 0x71368A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(153, 45, 34)`.\n dark_red, 0x992D22;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(17, 128, 106)`.\n dark_teal, 0x11806A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(84, 110, 122)`.\n darker_grey, 0x546E7A;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(241, 196, 15)`.\n gold, 0xF1C40F;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(186, 218, 85)`.\n kerbal, 0xBADA55;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(151, 156, 159)`.\n light_grey, 0x979C9F;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(149, 165, 166)`.\n lighter_grey, 0x95A5A6;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(233, 30, 99)`.\n magenta, 0xE91E63;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(230, 126, 34)`.\n orange, 0xE67E22;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(155, 89, 182)`.\n purple, 0x9B59B6;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(231, 76, 60)`.\n red, 0xE74C3C;\n \/\/\/ Creates a new `Colour`, setting its RGB value to `(26, 188, 156)`.\n teal, 0x1ABC9C;\n}\n\nimpl Default for Colour {\n \/\/\/ Creates a default value for a `Colour`, setting the inner value to `0`.\n \/\/\/ This is equivilant to setting the RGB value to `(0, 0, 0)`.\n fn default() -> Colour {\n Colour {\n value: 0,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added test to show motivation for modified TryFrom impl<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This test relies on `TryFrom` being auto impl for all `T: Into`\n\/\/ and `TryInto` being auto impl for all `U: TryFrom`\n\n\/\/ This test was added to show the motivation for doing this\n\/\/ over `TryFrom` being auto impl for all `T: From`\n\n#![feature(try_from, never_type)]\n\nuse std::convert::TryInto;\n\nstruct Foo<T> {\n t: T\n}\n\n\/*\n\/\/ This fails to compile due to coherence restrictions\n\/\/ as of rust version 1.32.x\nimpl<T> From<Foo<T>> for Box<T> {\n fn from(foo: Foo<T>) -> Box<T> {\n Box::new(foo.t)\n }\n}\n*\/\n\nimpl<T> Into<Box<T>> for Foo<T> {\n fn into(self) -> Box<T> {\n Box::new(self.t)\n }\n}\n\npub fn main() {\n let _: Result<Box<i32>, !> = Foo { t: 10 }.try_into();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add from_bytes test; unfinished<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Tiny clarification<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>use seconds to measure duration<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>style: removes trailing whitespace<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added clone to git<commit_after><|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt;\nuse std::io;\nuse std::num::{ParseIntError, ParseFloatError};\nuse std::str::Utf8Error;\n\n#[derive(Debug)]\npub enum OperationError {\n OverflowError,\n ValueError,\n UnknownKeyError,\n WrongTypeError,\n OutOfBoundsError,\n IOError(io::Error),\n}\n\nimpl fmt::Display for OperationError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.description().fmt(f)\n }\n}\n\nimpl Error for OperationError {\n fn description(&self) -> &str {\n return \"oops\";\n }\n}\n\nimpl From<Utf8Error> for OperationError {\n fn from(_: Utf8Error) -> OperationError { OperationError::ValueError }\n}\n\nimpl From<ParseIntError> for OperationError {\n fn from(_: ParseIntError) -> OperationError { OperationError::ValueError }\n}\n\nimpl From<ParseFloatError> for OperationError {\n fn from(_: ParseFloatError) -> OperationError { OperationError::ValueError }\n}\n\nimpl From<io::Error> for OperationError {\n fn from(e: io::Error) -> OperationError { OperationError::IOError(e) }\n}\n<commit_msg>Update error for wrong key type<commit_after>use std::error::Error;\nuse std::fmt;\nuse std::io;\nuse std::num::{ParseIntError, ParseFloatError};\nuse std::str::Utf8Error;\n\n#[derive(Debug)]\npub enum OperationError {\n OverflowError,\n ValueError,\n UnknownKeyError,\n WrongTypeError,\n OutOfBoundsError,\n IOError(io::Error),\n}\n\nimpl fmt::Display for OperationError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.description().fmt(f)\n }\n}\n\nimpl Error for OperationError {\n fn description(&self) -> &str {\n match *self {\n OperationError::WrongTypeError => \"WRONGTYPE Operation against a key holding the wrong kind of value\",\n _ => \"ERR\",\n }\n }\n}\n\nimpl From<Utf8Error> for OperationError {\n fn from(_: Utf8Error) -> OperationError { OperationError::ValueError }\n}\n\nimpl From<ParseIntError> for OperationError {\n fn from(_: ParseIntError) -> OperationError { OperationError::ValueError }\n}\n\nimpl From<ParseFloatError> for OperationError {\n fn from(_: ParseFloatError) -> OperationError { OperationError::ValueError }\n}\n\nimpl From<io::Error> for OperationError {\n fn from(e: io::Error) -> OperationError { OperationError::IOError(e) }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Comment type fixed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Run `reach_max_unpack_size` test only on debug build<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse hir::def_id::DefId;\nuse infer::outlives::env::RegionBoundPairs;\nuse infer::{GenericKind, VerifyBound};\nuse traits;\nuse ty::subst::{Subst, Substs};\nuse ty::{self, Ty, TyCtxt};\n\n\/\/\/ The `TypeOutlives` struct has the job of \"lowering\" a `T: 'a`\n\/\/\/ obligation into a series of `'a: 'b` constraints and \"verifys\", as\n\/\/\/ described on the module comment. The final constraints are emitted\n\/\/\/ via a \"delegate\" of type `D` -- this is usually the `infcx`, which\n\/\/\/ accrues them into the `region_obligations` code, but for NLL we\n\/\/\/ use something else.\npub struct VerifyBoundCx<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n region_bound_pairs: &'cx RegionBoundPairs<'tcx>,\n implicit_region_bound: Option<ty::Region<'tcx>>,\n param_env: ty::ParamEnv<'tcx>,\n}\n\nimpl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> {\n pub fn new(\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n region_bound_pairs: &'cx RegionBoundPairs<'tcx>,\n implicit_region_bound: Option<ty::Region<'tcx>>,\n param_env: ty::ParamEnv<'tcx>,\n ) -> Self {\n Self {\n tcx,\n region_bound_pairs,\n implicit_region_bound,\n param_env,\n }\n }\n\n \/\/\/ Returns a \"verify bound\" that encodes what we know about\n \/\/\/ `generic` and the regions it outlives.\n pub fn generic_bound(&self, generic: GenericKind<'tcx>) -> VerifyBound<'tcx> {\n match generic {\n GenericKind::Param(param_ty) => self.param_bound(param_ty),\n GenericKind::Projection(projection_ty) => self.projection_bound(projection_ty),\n }\n }\n\n fn type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> {\n match ty.sty {\n ty::Param(p) => self.param_bound(p),\n ty::Projection(data) => self.projection_bound(data),\n _ => self.recursive_type_bound(ty),\n }\n }\n\n fn param_bound(&self, param_ty: ty::ParamTy) -> VerifyBound<'tcx> {\n debug!(\"param_bound(param_ty={:?})\", param_ty);\n\n let mut param_bounds = self.declared_generic_bounds_from_env(GenericKind::Param(param_ty));\n\n \/\/ Add in the default bound of fn body that applies to all in\n \/\/ scope type parameters:\n param_bounds.extend(self.implicit_region_bound);\n\n VerifyBound::AnyRegion(param_bounds)\n }\n\n \/\/\/ Given a projection like `T::Item`, searches the environment\n \/\/\/ for where-clauses like `T::Item: 'a`. Returns the set of\n \/\/\/ regions `'a` that it finds.\n \/\/\/\n \/\/\/ This is an \"approximate\" check -- it may not find all\n \/\/\/ applicable bounds, and not all the bounds it returns can be\n \/\/\/ relied upon. In particular, this check ignores region\n \/\/\/ identity. So, for example, if we have `<T as\n \/\/\/ Trait<'0>>::Item` where `'0` is a region variable, and the\n \/\/\/ user has `<T as Trait<'a>>::Item: 'b` in the environment, then\n \/\/\/ the clause from the environment only applies if `'0 = 'a`,\n \/\/\/ which we don't know yet. But we would still include `'b` in\n \/\/\/ this list.\n pub fn projection_approx_declared_bounds_from_env(\n &self,\n projection_ty: ty::ProjectionTy<'tcx>,\n ) -> Vec<ty::Region<'tcx>> {\n let projection_ty = GenericKind::Projection(projection_ty).to_ty(self.tcx);\n let erased_projection_ty = self.tcx.erase_regions(&projection_ty);\n self.declared_generic_bounds_from_env_with_compare_fn(\n |ty| if let ty::Projection(..) = ty.sty {\n let erased_ty = self.tcx.erase_regions(&ty);\n erased_ty == erased_projection_ty\n } else {\n false\n },\n )\n }\n\n \/\/\/ Searches the where clauses in scope for regions that\n \/\/\/ `projection_ty` is known to outlive. Currently requires an\n \/\/\/ exact match.\n pub fn projection_declared_bounds_from_trait(\n &self,\n projection_ty: ty::ProjectionTy<'tcx>,\n ) -> Vec<ty::Region<'tcx>> {\n self.declared_projection_bounds_from_trait(projection_ty)\n }\n\n pub fn projection_bound(&self, projection_ty: ty::ProjectionTy<'tcx>) -> VerifyBound<'tcx> {\n debug!(\"projection_bound(projection_ty={:?})\", projection_ty);\n\n \/\/ Search the env for where clauses like `P: 'a`.\n let mut declared_bounds =\n self.declared_generic_bounds_from_env(GenericKind::Projection(projection_ty));\n\n \/\/ Extend with bounds that we can find from the trait.\n declared_bounds.extend(self.projection_declared_bounds_from_trait(projection_ty));\n\n debug!(\"projection_bound: declared_bounds = {:?}\", declared_bounds);\n\n \/\/ see the extensive comment in projection_must_outlive\n let ty = self.tcx\n .mk_projection(projection_ty.item_def_id, projection_ty.substs);\n let recursive_bound = self.recursive_type_bound(ty);\n\n VerifyBound::AnyRegion(declared_bounds).or(recursive_bound)\n }\n\n fn recursive_type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> {\n let mut bounds = ty.walk_shallow()\n .map(|subty| self.type_bound(subty))\n .collect::<Vec<_>>();\n\n let mut regions = ty.regions();\n regions.retain(|r| !r.is_late_bound()); \/\/ ignore late-bound regions\n bounds.push(VerifyBound::AllRegions(regions));\n\n \/\/ remove bounds that must hold, since they are not interesting\n bounds.retain(|b| !b.must_hold());\n\n if bounds.len() == 1 {\n bounds.pop().unwrap()\n } else {\n VerifyBound::AllBounds(bounds)\n }\n }\n\n \/\/\/ Searches the environment for where-clauses like `G: 'a` where\n \/\/\/ `G` is either some type parameter `T` or a projection like\n \/\/\/ `T::Item`. Returns a vector of the `'a` bounds it can find.\n \/\/\/\n \/\/\/ This is a conservative check -- it may not find all applicable\n \/\/\/ bounds, but all the bounds it returns can be relied upon.\n fn declared_generic_bounds_from_env(\n &self,\n generic: GenericKind<'tcx>,\n ) -> Vec<ty::Region<'tcx>> {\n let generic_ty = generic.to_ty(self.tcx);\n self.declared_generic_bounds_from_env_with_compare_fn(|ty| ty == generic_ty)\n }\n\n fn declared_generic_bounds_from_env_with_compare_fn(\n &self,\n compare_ty: impl Fn(Ty<'tcx>) -> bool,\n ) -> Vec<ty::Region<'tcx>> {\n let tcx = self.tcx;\n\n \/\/ To start, collect bounds from user environment. Note that\n \/\/ parameter environments are already elaborated, so we don't\n \/\/ have to worry about that. Comparing using `==` is a bit\n \/\/ dubious for projections, but it will work for simple cases\n \/\/ like `T` and `T::Item`. It may not work as well for things\n \/\/ like `<T as Foo<'a>>::Item`.\n let c_b = self.param_env.caller_bounds;\n let mut param_bounds = self.collect_outlives_from_predicate_list(&compare_ty, c_b);\n\n \/\/ Next, collect regions we scraped from the well-formedness\n \/\/ constraints in the fn signature. To do that, we walk the list\n \/\/ of known relations from the fn ctxt.\n \/\/\n \/\/ This is crucial because otherwise code like this fails:\n \/\/\n \/\/ fn foo<'a, A>(x: &'a A) { x.bar() }\n \/\/\n \/\/ The problem is that the type of `x` is `&'a A`. To be\n \/\/ well-formed, then, A must be lower-generic by `'a`, but we\n \/\/ don't know that this holds from first principles.\n for &(r, p) in self.region_bound_pairs {\n debug!(\n \"declared_generic_bounds_from_env_with_compare_fn: region_bound_pair = {:?}\",\n (r, p)\n );\n if compare_ty(p.to_ty(tcx)) {\n param_bounds.push(r);\n }\n }\n\n param_bounds\n }\n\n \/\/\/ Given a projection like `<T as Foo<'x>>::Bar`, returns any bounds\n \/\/\/ declared in the trait definition. For example, if the trait were\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ trait Foo<'a> {\n \/\/\/ type Bar: 'a;\n \/\/\/ }\n \/\/\/ ```\n \/\/\/\n \/\/\/ then this function would return `'x`. This is subject to the\n \/\/\/ limitations around higher-ranked bounds described in\n \/\/\/ `region_bounds_declared_on_associated_item`.\n fn declared_projection_bounds_from_trait(\n &self,\n projection_ty: ty::ProjectionTy<'tcx>,\n ) -> Vec<ty::Region<'tcx>> {\n debug!(\"projection_bounds(projection_ty={:?})\", projection_ty);\n let mut bounds = self.region_bounds_declared_on_associated_item(projection_ty.item_def_id);\n for r in &mut bounds {\n *r = r.subst(self.tcx, projection_ty.substs);\n }\n bounds\n }\n\n \/\/\/ Given the def-id of an associated item, returns any region\n \/\/\/ bounds attached to that associated item from the trait definition.\n \/\/\/\n \/\/\/ For example:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ trait Foo<'a> {\n \/\/\/ type Bar: 'a;\n \/\/\/ }\n \/\/\/ ```\n \/\/\/\n \/\/\/ If we were given the def-id of `Foo::Bar`, we would return\n \/\/\/ `'a`. You could then apply the substitutions from the\n \/\/\/ projection to convert this into your namespace. This also\n \/\/\/ works if the user writes `where <Self as Foo<'a>>::Bar: 'a` on\n \/\/\/ the trait. In fact, it works by searching for just such a\n \/\/\/ where-clause.\n \/\/\/\n \/\/\/ It will not, however, work for higher-ranked bounds like:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ trait Foo<'a, 'b>\n \/\/\/ where for<'x> <Self as Foo<'x, 'b>>::Bar: 'x\n \/\/\/ {\n \/\/\/ type Bar;\n \/\/\/ }\n \/\/\/ ```\n \/\/\/\n \/\/\/ This is for simplicity, and because we are not really smart\n \/\/\/ enough to cope with such bounds anywhere.\n fn region_bounds_declared_on_associated_item(\n &self,\n assoc_item_def_id: DefId,\n ) -> Vec<ty::Region<'tcx>> {\n let tcx = self.tcx;\n let assoc_item = tcx.associated_item(assoc_item_def_id);\n let trait_def_id = assoc_item.container.assert_trait();\n let trait_predicates = tcx.predicates_of(trait_def_id);\n let identity_substs = Substs::identity_for_item(tcx, assoc_item_def_id);\n let identity_proj = tcx.mk_projection(assoc_item_def_id, identity_substs);\n self.collect_outlives_from_predicate_list(\n |ty| ty == identity_proj,\n traits::elaborate_predicates(tcx, trait_predicates.predicates),\n )\n }\n\n \/\/\/ Searches through a predicate list for a predicate `T: 'a`.\n \/\/\/\n \/\/\/ Careful: does not elaborate predicates, and just uses `==`\n \/\/\/ when comparing `ty` for equality, so `ty` must be something\n \/\/\/ that does not involve inference variables and where you\n \/\/\/ otherwise want a precise match.\n fn collect_outlives_from_predicate_list(\n &self,\n compare_ty: impl Fn(Ty<'tcx>) -> bool,\n predicates: impl IntoIterator<Item = impl AsRef<ty::Predicate<'tcx>>>,\n ) -> Vec<ty::Region<'tcx>> {\n predicates\n .into_iter()\n .filter_map(|p| p.as_ref().to_opt_type_outlives())\n .filter_map(|p| p.no_late_bound_regions())\n .filter(|p| compare_ty(p.0))\n .map(|p| p.1)\n .collect()\n }\n}\n<commit_msg>change to use impl Trait a bit<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse hir::def_id::DefId;\nuse infer::outlives::env::RegionBoundPairs;\nuse infer::{GenericKind, VerifyBound};\nuse traits;\nuse ty::subst::{Subst, Substs};\nuse ty::{self, Ty, TyCtxt};\n\n\/\/\/ The `TypeOutlives` struct has the job of \"lowering\" a `T: 'a`\n\/\/\/ obligation into a series of `'a: 'b` constraints and \"verifys\", as\n\/\/\/ described on the module comment. The final constraints are emitted\n\/\/\/ via a \"delegate\" of type `D` -- this is usually the `infcx`, which\n\/\/\/ accrues them into the `region_obligations` code, but for NLL we\n\/\/\/ use something else.\npub struct VerifyBoundCx<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n region_bound_pairs: &'cx RegionBoundPairs<'tcx>,\n implicit_region_bound: Option<ty::Region<'tcx>>,\n param_env: ty::ParamEnv<'tcx>,\n}\n\nimpl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> {\n pub fn new(\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n region_bound_pairs: &'cx RegionBoundPairs<'tcx>,\n implicit_region_bound: Option<ty::Region<'tcx>>,\n param_env: ty::ParamEnv<'tcx>,\n ) -> Self {\n Self {\n tcx,\n region_bound_pairs,\n implicit_region_bound,\n param_env,\n }\n }\n\n \/\/\/ Returns a \"verify bound\" that encodes what we know about\n \/\/\/ `generic` and the regions it outlives.\n pub fn generic_bound(&self, generic: GenericKind<'tcx>) -> VerifyBound<'tcx> {\n match generic {\n GenericKind::Param(param_ty) => self.param_bound(param_ty),\n GenericKind::Projection(projection_ty) => self.projection_bound(projection_ty),\n }\n }\n\n fn type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> {\n match ty.sty {\n ty::Param(p) => self.param_bound(p),\n ty::Projection(data) => self.projection_bound(data),\n _ => self.recursive_type_bound(ty),\n }\n }\n\n fn param_bound(&self, param_ty: ty::ParamTy) -> VerifyBound<'tcx> {\n debug!(\"param_bound(param_ty={:?})\", param_ty);\n\n let mut param_bounds = self.declared_generic_bounds_from_env(GenericKind::Param(param_ty));\n\n \/\/ Add in the default bound of fn body that applies to all in\n \/\/ scope type parameters:\n param_bounds.extend(self.implicit_region_bound);\n\n VerifyBound::AnyRegion(param_bounds)\n }\n\n \/\/\/ Given a projection like `T::Item`, searches the environment\n \/\/\/ for where-clauses like `T::Item: 'a`. Returns the set of\n \/\/\/ regions `'a` that it finds.\n \/\/\/\n \/\/\/ This is an \"approximate\" check -- it may not find all\n \/\/\/ applicable bounds, and not all the bounds it returns can be\n \/\/\/ relied upon. In particular, this check ignores region\n \/\/\/ identity. So, for example, if we have `<T as\n \/\/\/ Trait<'0>>::Item` where `'0` is a region variable, and the\n \/\/\/ user has `<T as Trait<'a>>::Item: 'b` in the environment, then\n \/\/\/ the clause from the environment only applies if `'0 = 'a`,\n \/\/\/ which we don't know yet. But we would still include `'b` in\n \/\/\/ this list.\n pub fn projection_approx_declared_bounds_from_env(\n &self,\n projection_ty: ty::ProjectionTy<'tcx>,\n ) -> Vec<ty::Region<'tcx>> {\n let projection_ty = GenericKind::Projection(projection_ty).to_ty(self.tcx);\n let erased_projection_ty = self.tcx.erase_regions(&projection_ty);\n self.declared_generic_bounds_from_env_with_compare_fn(|ty| {\n if let ty::Projection(..) = ty.sty {\n let erased_ty = self.tcx.erase_regions(&ty);\n erased_ty == erased_projection_ty\n } else {\n false\n }\n })\n }\n\n \/\/\/ Searches the where clauses in scope for regions that\n \/\/\/ `projection_ty` is known to outlive. Currently requires an\n \/\/\/ exact match.\n pub fn projection_declared_bounds_from_trait(\n &self,\n projection_ty: ty::ProjectionTy<'tcx>,\n ) -> Vec<ty::Region<'tcx>> {\n self.declared_projection_bounds_from_trait(projection_ty)\n }\n\n pub fn projection_bound(&self, projection_ty: ty::ProjectionTy<'tcx>) -> VerifyBound<'tcx> {\n debug!(\"projection_bound(projection_ty={:?})\", projection_ty);\n\n \/\/ Search the env for where clauses like `P: 'a`.\n let mut declared_bounds =\n self.declared_generic_bounds_from_env(GenericKind::Projection(projection_ty));\n\n \/\/ Extend with bounds that we can find from the trait.\n declared_bounds.extend(self.projection_declared_bounds_from_trait(projection_ty));\n\n debug!(\"projection_bound: declared_bounds = {:?}\", declared_bounds);\n\n \/\/ see the extensive comment in projection_must_outlive\n let ty = self.tcx\n .mk_projection(projection_ty.item_def_id, projection_ty.substs);\n let recursive_bound = self.recursive_type_bound(ty);\n\n VerifyBound::AnyRegion(declared_bounds).or(recursive_bound)\n }\n\n fn recursive_type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> {\n let mut bounds = ty.walk_shallow()\n .map(|subty| self.type_bound(subty))\n .collect::<Vec<_>>();\n\n let mut regions = ty.regions();\n regions.retain(|r| !r.is_late_bound()); \/\/ ignore late-bound regions\n bounds.push(VerifyBound::AllRegions(regions));\n\n \/\/ remove bounds that must hold, since they are not interesting\n bounds.retain(|b| !b.must_hold());\n\n if bounds.len() == 1 {\n bounds.pop().unwrap()\n } else {\n VerifyBound::AllBounds(bounds)\n }\n }\n\n \/\/\/ Searches the environment for where-clauses like `G: 'a` where\n \/\/\/ `G` is either some type parameter `T` or a projection like\n \/\/\/ `T::Item`. Returns a vector of the `'a` bounds it can find.\n \/\/\/\n \/\/\/ This is a conservative check -- it may not find all applicable\n \/\/\/ bounds, but all the bounds it returns can be relied upon.\n fn declared_generic_bounds_from_env(\n &self,\n generic: GenericKind<'tcx>,\n ) -> Vec<ty::Region<'tcx>> {\n let generic_ty = generic.to_ty(self.tcx);\n self.declared_generic_bounds_from_env_with_compare_fn(|ty| ty == generic_ty)\n }\n\n fn declared_generic_bounds_from_env_with_compare_fn(\n &self,\n compare_ty: impl Fn(Ty<'tcx>) -> bool,\n ) -> Vec<ty::Region<'tcx>> {\n let tcx = self.tcx;\n\n \/\/ To start, collect bounds from user environment. Note that\n \/\/ parameter environments are already elaborated, so we don't\n \/\/ have to worry about that. Comparing using `==` is a bit\n \/\/ dubious for projections, but it will work for simple cases\n \/\/ like `T` and `T::Item`. It may not work as well for things\n \/\/ like `<T as Foo<'a>>::Item`.\n let c_b = self.param_env.caller_bounds;\n let param_bounds = self.collect_outlives_from_predicate_list(&compare_ty, c_b);\n\n \/\/ Next, collect regions we scraped from the well-formedness\n \/\/ constraints in the fn signature. To do that, we walk the list\n \/\/ of known relations from the fn ctxt.\n \/\/\n \/\/ This is crucial because otherwise code like this fails:\n \/\/\n \/\/ fn foo<'a, A>(x: &'a A) { x.bar() }\n \/\/\n \/\/ The problem is that the type of `x` is `&'a A`. To be\n \/\/ well-formed, then, A must be lower-generic by `'a`, but we\n \/\/ don't know that this holds from first principles.\n let from_region_bound_pairs = self.region_bound_pairs.iter().filter_map(|&(r, p)| {\n debug!(\n \"declared_generic_bounds_from_env_with_compare_fn: region_bound_pair = {:?}\",\n (r, p)\n );\n if compare_ty(p.to_ty(tcx)) {\n Some(r)\n } else {\n None\n }\n });\n\n param_bounds.chain(from_region_bound_pairs).collect()\n }\n\n \/\/\/ Given a projection like `<T as Foo<'x>>::Bar`, returns any bounds\n \/\/\/ declared in the trait definition. For example, if the trait were\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ trait Foo<'a> {\n \/\/\/ type Bar: 'a;\n \/\/\/ }\n \/\/\/ ```\n \/\/\/\n \/\/\/ then this function would return `'x`. This is subject to the\n \/\/\/ limitations around higher-ranked bounds described in\n \/\/\/ `region_bounds_declared_on_associated_item`.\n fn declared_projection_bounds_from_trait(\n &self,\n projection_ty: ty::ProjectionTy<'tcx>,\n ) -> Vec<ty::Region<'tcx>> {\n debug!(\"projection_bounds(projection_ty={:?})\", projection_ty);\n let mut bounds = self.region_bounds_declared_on_associated_item(projection_ty.item_def_id);\n for r in &mut bounds {\n *r = r.subst(self.tcx, projection_ty.substs);\n }\n bounds\n }\n\n \/\/\/ Given the def-id of an associated item, returns any region\n \/\/\/ bounds attached to that associated item from the trait definition.\n \/\/\/\n \/\/\/ For example:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ trait Foo<'a> {\n \/\/\/ type Bar: 'a;\n \/\/\/ }\n \/\/\/ ```\n \/\/\/\n \/\/\/ If we were given the def-id of `Foo::Bar`, we would return\n \/\/\/ `'a`. You could then apply the substitutions from the\n \/\/\/ projection to convert this into your namespace. This also\n \/\/\/ works if the user writes `where <Self as Foo<'a>>::Bar: 'a` on\n \/\/\/ the trait. In fact, it works by searching for just such a\n \/\/\/ where-clause.\n \/\/\/\n \/\/\/ It will not, however, work for higher-ranked bounds like:\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ trait Foo<'a, 'b>\n \/\/\/ where for<'x> <Self as Foo<'x, 'b>>::Bar: 'x\n \/\/\/ {\n \/\/\/ type Bar;\n \/\/\/ }\n \/\/\/ ```\n \/\/\/\n \/\/\/ This is for simplicity, and because we are not really smart\n \/\/\/ enough to cope with such bounds anywhere.\n fn region_bounds_declared_on_associated_item(\n &self,\n assoc_item_def_id: DefId,\n ) -> Vec<ty::Region<'tcx>> {\n let tcx = self.tcx;\n let assoc_item = tcx.associated_item(assoc_item_def_id);\n let trait_def_id = assoc_item.container.assert_trait();\n let trait_predicates = tcx.predicates_of(trait_def_id);\n let identity_substs = Substs::identity_for_item(tcx, assoc_item_def_id);\n let identity_proj = tcx.mk_projection(assoc_item_def_id, identity_substs);\n self.collect_outlives_from_predicate_list(\n move |ty| ty == identity_proj,\n traits::elaborate_predicates(tcx, trait_predicates.predicates),\n ).collect()\n }\n\n \/\/\/ Searches through a predicate list for a predicate `T: 'a`.\n \/\/\/\n \/\/\/ Careful: does not elaborate predicates, and just uses `==`\n \/\/\/ when comparing `ty` for equality, so `ty` must be something\n \/\/\/ that does not involve inference variables and where you\n \/\/\/ otherwise want a precise match.\n fn collect_outlives_from_predicate_list(\n &self,\n compare_ty: impl Fn(Ty<'tcx>) -> bool,\n predicates: impl IntoIterator<Item = impl AsRef<ty::Predicate<'tcx>>>,\n ) -> impl Iterator<Item = ty::Region<'tcx>> {\n predicates\n .into_iter()\n .filter_map(|p| p.as_ref().to_opt_type_outlives())\n .filter_map(|p| p.no_late_bound_regions())\n .filter(move |p| compare_ty(p.0))\n .map(|p| p.1)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add implementation of Leonid Volnitsky's fast substring search<commit_after>extern crate core;\n\nuse core::mem;\nuse std::io;\nuse std::io::{BufReader, SeekSet};\nuse std::iter::range_step;\n\nstatic hash_size:uint = 64*1024;\n\nfn volnitsky(data: &[u8], substring: &[u8]) -> Option<uint> {\n let mut dataReader = BufReader::new(data);\n let mut substringReader = BufReader::new(substring);\n\n let w_size = mem::size_of::<u16>();\n let step = substring.len() - w_size + 1;\n\n let mut hash = [0u8, ..hash_size];\n\n for i in range(0, substring.len() - w_size).rev() {\n substringReader.seek(i as i64, SeekSet).unwrap();\n let mut hash_index = (substringReader.read_le_u16().unwrap() as uint) % hash_size;\n while hash[hash_index] != 0 {\n hash_index = (hash_index + 1) % hash_size;\n }\n hash[hash_index] = i as u8 + 1;\n }\n\n for offset in range_step(substring.len() - w_size, data.len() - substring.len() + 1, step) {\n dataReader.seek(offset as i64, SeekSet).unwrap();\n let mut hash_index = (dataReader.read_le_u16().unwrap() as uint) % hash_size;\n 'hash_check: loop {\n if hash[hash_index] == 0 {\n break;\n }\n let subOffset = offset - (hash[hash_index] as uint - 1);\n for i in range(0,substring.len()) {\n if data[subOffset + i] != substring[i] {\n hash_index = (hash_index + 1) % hash_size;\n continue 'hash_check;\n }\n }\n return Some(subOffset);\n }\n }\n\n return None;\n \/\/ should have:\n \/\/return std::search(P-step+1,Se,SS,SSe);\n}\n\n\nfn main() {\n let mut stdin = io::stdin();\n let dataVec = stdin.read_to_end().unwrap();\n let data = dataVec.slice(0,dataVec.len());\n let substring = bytes!(\"Anything whatsoever related to the Rust programming language: an open-source systems programming language from Mozilla, emphasizing safety, concurrency, and speed.\");\n \/\/ let substring = bytes!(\"This eBook is for the use \");\n println!(\"voln: {}\", volnitsky(data, substring));\n}\n<|endoftext|>"} {"text":"<commit_before>use std::sync::{Arc, RWLock};\nuse std::collections::HashMap;\nuse std::io::{IoResult, File};\nuse std::io::util::copy;\nuse std::path::BytesContainer;\nuse serialize::Encodable;\nuse http;\nuse http::server::ResponseWriter;\nuse time;\nuse mimes::get_media_type;\nuse mustache;\nuse mustache::{Template, Encoder, Error};\n\n\/\/\/A container for the response\npub struct Response<'a, 'b: 'a> {\n \/\/\/the original `http::server::ResponseWriter`\n pub origin: &'a mut ResponseWriter<'b>,\n templates: Arc<RWLock<HashMap<&'static str, Template>>>\n}\n\nimpl<'a, 'b> Response<'a, 'b> {\n pub fn from_internal<'c, 'd>(response: &'c mut ResponseWriter<'d>,\n templates: Arc<RWLock<HashMap<&'static str, Template>>>)\n -> Response<'c, 'd> {\n Response {\n origin: response,\n templates: templates\n }\n }\n\n \/\/\/ Writes a response\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ response.send(\"hello world\");\n \/\/\/ ```\n pub fn send<T: BytesContainer> (&mut self, text: T) {\n \/\/ TODO: This needs to be more sophisticated to return the correct headers\n \/\/ not just \"some headers\" :)\n Response::set_headers(self.origin);\n let _ = self.origin.write(text.container_as_bytes());\n }\n\n \/\/\/ Sets the content type by it's short form. \n \/\/\/ Returns the response for chaining.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ response.content_type(\"html\");\n \/\/\/ ```\n pub fn content_type(&mut self, text: &str) -> &mut Response<'a,'b> {\n self.origin.headers.content_type = get_media_type(text);\n self\n }\n\n \/\/\/ Sets the status code and returns the response for chaining\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ response.status_code(http::status::NotFound);\n \/\/\/ ```\n pub fn status_code(&mut self, status: http::status::Status) -> &mut Response<'a,'b> {\n self.origin.status = status;\n self\n }\n\n fn set_headers(response_writer: &mut http::server::ResponseWriter) {\n let ref mut headers = response_writer.headers;\n headers.date = Some(time::now_utc());\n\n \/\/ we don't need to set this https:\/\/github.com\/Ogeon\/rustful\/issues\/3#issuecomment-44787613\n headers.content_length = None;\n if headers.content_type.is_none() {\n headers.content_type = get_media_type(\"txt\");\n }\n\n headers.server = Some(String::from_str(\"Nickel\"));\n }\n\n pub fn send_file(&mut self, path: &Path) -> IoResult<()> {\n let mut file = try!(File::open(path));\n self.origin.headers.content_length = None;\n\n self.origin.headers.content_type = path.extension_str().and_then(get_media_type);\n self.origin.headers.server = Some(String::from_str(\"Nickel\"));\n copy(&mut file, self.origin)\n }\n\n \/\/\/ Renders the given template bound with the given data.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut data = HashMap::<&'static str, &'static str>::new();\n \/\/\/ data.insert(\"name\", \"user\");\n \/\/\/ response.render(\"examples\/assets\/template.tpl\", &data);\n \/\/\/ ```\n pub fn render<'a, T: Encodable<Encoder<'a>, Error>>\n (&mut self, path: &'static str, data: &T) {\n \/\/ Fast path doesn't need writer lock\n let _ = match self.templates.read().find(&path) {\n Some(template) => template.render(self.origin, data),\n None => {\n \/\/ Search again incase there was a race to compile the template\n let mut templates = self.templates.write();\n let template = templates.find_or_insert_with(path, |_| {\n let mut file = File::open(&Path::new(path));\n let raw_template = file.read_to_string()\n .ok()\n .expect(format!(\"Couldn't open the template file: {}\",\n path).as_slice());\n mustache::compile_str(raw_template.as_slice())\n });\n\n template.render(self.origin, data)\n }\n };\n }\n}\n\n#[test]\nfn matches_content_type () {\n let path = &Path::new(\"test.txt\");\n let content_type = path.extension_str().and_then(get_media_type).unwrap();\n\n assert_eq!(content_type.type_.as_slice(), \"text\");\n assert_eq!(content_type.subtype.as_slice(), \"plain\");\n}\n<commit_msg>chore(response): moving chaining APIs to the top<commit_after>use std::sync::{Arc, RWLock};\nuse std::collections::HashMap;\nuse std::io::{IoResult, File};\nuse std::io::util::copy;\nuse std::path::BytesContainer;\nuse serialize::Encodable;\nuse http;\nuse http::server::ResponseWriter;\nuse time;\nuse mimes::get_media_type;\nuse mustache;\nuse mustache::{Template, Encoder, Error};\n\n\/\/\/A container for the response\npub struct Response<'a, 'b: 'a> {\n \/\/\/the original `http::server::ResponseWriter`\n pub origin: &'a mut ResponseWriter<'b>,\n templates: Arc<RWLock<HashMap<&'static str, Template>>>\n}\n\nimpl<'a, 'b> Response<'a, 'b> {\n pub fn from_internal<'c, 'd>(response: &'c mut ResponseWriter<'d>,\n templates: Arc<RWLock<HashMap<&'static str, Template>>>)\n -> Response<'c, 'd> {\n Response {\n origin: response,\n templates: templates\n }\n }\n\n \/\/\/ Sets the content type by it's short form. \n \/\/\/ Returns the response for chaining.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ response.content_type(\"html\");\n \/\/\/ ```\n pub fn content_type(&mut self, text: &str) -> &mut Response<'a,'b> {\n self.origin.headers.content_type = get_media_type(text);\n self\n }\n\n \/\/\/ Sets the status code and returns the response for chaining\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ response.status_code(http::status::NotFound);\n \/\/\/ ```\n pub fn status_code(&mut self, status: http::status::Status) -> &mut Response<'a,'b> {\n self.origin.status = status;\n self\n }\n\n \/\/\/ Writes a response\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ response.send(\"hello world\");\n \/\/\/ ```\n pub fn send<T: BytesContainer> (&mut self, text: T) {\n \/\/ TODO: This needs to be more sophisticated to return the correct headers\n \/\/ not just \"some headers\" :)\n Response::set_headers(self.origin);\n let _ = self.origin.write(text.container_as_bytes());\n }\n\n fn set_headers(response_writer: &mut http::server::ResponseWriter) {\n let ref mut headers = response_writer.headers;\n headers.date = Some(time::now_utc());\n\n \/\/ we don't need to set this https:\/\/github.com\/Ogeon\/rustful\/issues\/3#issuecomment-44787613\n headers.content_length = None;\n if headers.content_type.is_none() {\n headers.content_type = get_media_type(\"txt\");\n }\n\n headers.server = Some(String::from_str(\"Nickel\"));\n }\n\n pub fn send_file(&mut self, path: &Path) -> IoResult<()> {\n let mut file = try!(File::open(path));\n self.origin.headers.content_length = None;\n\n self.origin.headers.content_type = path.extension_str().and_then(get_media_type);\n self.origin.headers.server = Some(String::from_str(\"Nickel\"));\n copy(&mut file, self.origin)\n }\n\n \/\/\/ Renders the given template bound with the given data.\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```{rust,ignore}\n \/\/\/ let mut data = HashMap::<&'static str, &'static str>::new();\n \/\/\/ data.insert(\"name\", \"user\");\n \/\/\/ response.render(\"examples\/assets\/template.tpl\", &data);\n \/\/\/ ```\n pub fn render<'a, T: Encodable<Encoder<'a>, Error>>\n (&mut self, path: &'static str, data: &T) {\n \/\/ Fast path doesn't need writer lock\n let _ = match self.templates.read().find(&path) {\n Some(template) => template.render(self.origin, data),\n None => {\n \/\/ Search again incase there was a race to compile the template\n let mut templates = self.templates.write();\n let template = templates.find_or_insert_with(path, |_| {\n let mut file = File::open(&Path::new(path));\n let raw_template = file.read_to_string()\n .ok()\n .expect(format!(\"Couldn't open the template file: {}\",\n path).as_slice());\n mustache::compile_str(raw_template.as_slice())\n });\n\n template.render(self.origin, data)\n }\n };\n }\n}\n\n#[test]\nfn matches_content_type () {\n let path = &Path::new(\"test.txt\");\n let content_type = path.extension_str().and_then(get_media_type).unwrap();\n\n assert_eq!(content_type.type_.as_slice(), \"text\");\n assert_eq!(content_type.subtype.as_slice(), \"plain\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example of using the stopwords filter (#377)<commit_after>\/\/ # Stop Words Example\n\/\/\n\/\/ This example covers the basic usage of stop words\n\/\/ with tantivy\n\/\/\n\/\/ We will :\n\/\/ - define our schema\n\/\/ - create an index in a directory\n\/\/ - add a few stop words\n\/\/ - index few documents in our index\n\nextern crate tempdir;\n\n\/\/ ---\n\/\/ Importing tantivy...\n#[macro_use]\nextern crate tantivy;\nuse tantivy::collector::TopCollector;\nuse tantivy::query::QueryParser;\nuse tantivy::schema::*;\nuse tantivy::tokenizer::*;\nuse tantivy::Index;\n\nfn main() -> tantivy::Result<()> {\n \/\/ this example assumes you understand the content in `basic_search`\n let index_path = TempDir::new(\"tantivy_stopwords_example_dir\")?;\n let mut schema_builder = SchemaBuilder::default();\n\n \/\/ This configures your custom options for how tantivy will\n \/\/ store and process your content in the index; The key\n \/\/ to note is that we are setting the tokenizer to `stoppy`\n \/\/ which will be defined and registered below.\n let text_field_indexing = TextFieldIndexing::default()\n .set_tokenizer(\"stoppy\")\n .set_index_option(IndexRecordOption::WithFreqsAndPositions);\n let text_options = TextOptions::default()\n .set_indexing_options(text_field_indexing)\n .set_stored();\n\n \/\/ Our first field is title.\n schema_builder.add_text_field(\"title\", text_options);\n\n \/\/ Our second field is body.\n let text_field_indexing = TextFieldIndexing::default()\n .set_tokenizer(\"stoppy\")\n .set_index_option(IndexRecordOption::WithFreqsAndPositions);\n let text_options = TextOptions::default()\n .set_indexing_options(text_field_indexing)\n .set_stored();\n schema_builder.add_text_field(\"body\", text_options);\n\n let schema = schema_builder.build();\n\n let index = Index::create_in_dir(&index_path, schema.clone())?;\n\n \/\/ This tokenizer lowers all of the text (to help with stop word matching)\n \/\/ then removes all instances of `the` and `and` from the corpus\n let tokenizer = SimpleTokenizer\n .filter(LowerCaser)\n .filter(StopWordFilter::remove(vec![\n \"the\".to_string(),\n \"and\".to_string(),\n ]));\n\n index.tokenizers().register(\"stoppy\", tokenizer);\n\n let mut index_writer = index.writer(50_000_000)?;\n\n let title = schema.get_field(\"title\").unwrap();\n let body = schema.get_field(\"body\").unwrap();\n\n index_writer.add_document(doc!(\n title => \"The Old Man and the Sea\",\n body => \"He was an old man who fished alone in a skiff in the Gulf Stream and \\\n he had gone eighty-four days now without taking a fish.\"\n ));\n\n index_writer.add_document(doc!(\n title => \"Of Mice and Men\",\n body => \"A few miles south of Soledad, the Salinas River drops in close to the hillside \\\n bank and runs deep and green. The water is warm too, for it has slipped twinkling \\\n over the yellow sands in the sunlight before reaching the narrow pool. On one \\\n side of the river the golden foothill slopes curve up to the strong and rocky \\\n Gabilan Mountains, but on the valley side the water is lined with trees—willows \\\n fresh and green with every spring, carrying in their lower leaf junctures the \\\n debris of the winter’s flooding; and sycamores with mottled, white, recumbent \\\n limbs and branches that arch over the pool\"\n ));\n\n index_writer.add_document(doc!(\n title => \"Frankenstein\",\n body => \"You will rejoice to hear that no disaster has accompanied the commencement of an \\\n enterprise which you have regarded with such evil forebodings. I arrived here \\\n yesterday, and my first task is to assure my dear sister of my welfare and \\\n increasing confidence in the success of my undertaking.\"\n ));\n\n index_writer.commit()?;\n\n index.load_searchers()?;\n\n let searcher = index.searcher();\n\n let query_parser = QueryParser::for_index(&index, vec![title, body]);\n\n \/\/ this will have NO hits because it was filtered out\n \/\/ because the query is run through the analyzer you\n \/\/ actually will get an error here because the query becomes\n \/\/ empty\n assert!(query_parser.parse_query(\"the\").is_err());\n\n \/\/ this will have hits\n let query = query_parser.parse_query(\"is\")?;\n\n let mut top_collector = TopCollector::with_limit(10);\n\n searcher.search(&*query, &mut top_collector)?;\n\n let doc_addresses = top_collector.docs();\n\n for doc_address in doc_addresses {\n let retrieved_doc = searcher.doc(&doc_address)?;\n println!(\"{}\", schema.to_json(&retrieved_doc));\n }\n\n Ok(())\n}\n\nuse tempdir::TempDir;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Actually handle end of file.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add trivial assembler example w\/SRecord output<commit_after>extern crate r68k_tools;\n\nuse r68k_tools::assembler::Assembler;\nuse std::io;\nuse std::io::BufReader;\nuse r68k_tools::srecords::write_s68;\n\nfn main() {\n let r68k = Assembler::new();\n\n let asm = r#\"ADD.B #$3,D0\nADD.B D0,D1\"#;\n\n println!(\"{}\", asm);\n let mut reader = BufReader::new(asm.as_bytes());\n let (bytes, mem) = r68k.assemble(&mut reader).unwrap();\n println!(\"wrote {} bytes\", bytes);\n let mut stdout = io::stdout();\n write_s68(&mut stdout, vec![&mem], 0).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Documentation for config<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>thread panic<commit_after>use std::thread;\n\nfn main() {\n let result = thread::spawn(move || {\n panic!(\"Panic!\");\n }).join();\n if result.is_err() {\n println!(\"子线程发生了panic!\");\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Gym struct constructor implementation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Start device api<commit_after>use std::ffi::{CStr, CString};\nuse std::marker::PhantomData;\nuse std::mem;\nuse std::ptr;\n\nuse libc;\n\nuse ::{ffi, FromRaw, AsRaw, Userdata, LibinputContext, LibinputSeat};\n\npub struct LibinputDevice<C: 'static, D: 'static, S: 'static>\n{\n device: *mut ffi::libinput_device,\n _context_userdata_type: PhantomData<C>,\n _device_userdata_type: PhantomData<D>,\n _seat_userdata_type: PhantomData<S>,\n}\n\nimpl<C: 'static, D: 'static, S: 'static> FromRaw<ffi::libinput_device> for LibinputDevice<C, D, S>\n{\n unsafe fn from_raw(raw: *mut ffi::libinput_device) -> LibinputDevice<C, D, S>\n {\n LibinputDevice {\n device: ffi::libinput_device_ref(raw),\n _context_userdata_type: PhantomData,\n _device_userdata_type: PhantomData,\n _seat_userdata_type: PhantomData,\n }\n }\n}\n\nimpl<C: 'static, D: 'static, S: 'static> AsRaw<ffi::libinput_device> for LibinputDevice<C, D, S>\n{\n unsafe fn as_raw(&self) -> *const ffi::libinput_device {\n self.device as *const _\n }\n\n unsafe fn as_raw_mut(&mut self) -> *mut ffi::libinput_device {\n self.device as *mut _\n }\n}\n\nimpl<C: 'static, D: 'static, S: 'static> Userdata<D> for LibinputDevice<C, D, S>\n{\n fn userdata(&self) -> Option<&D> {\n unsafe {\n (ffi::libinput_device_get_user_data(self.device) as *const D).as_ref()\n }\n }\n\n fn userdata_mut(&mut self) -> Option<&mut D> {\n unsafe {\n (ffi::libinput_device_get_user_data(self.device) as *mut D).as_mut()\n }\n }\n\n fn set_userdata(&mut self, userdata: Option<D>) -> Option<D> {\n let old = unsafe {\n let ptr = ffi::libinput_device_get_user_data(self.device);\n if !ptr.is_null() {\n Some(Box::from_raw(ptr as *mut D))\n } else {\n None\n }\n };\n let mut boxed = Box::new(userdata);\n unsafe {\n ffi::libinput_device_set_user_data(self.device, match (*boxed).as_mut() {\n Some(value) => value as *mut D as *mut libc::c_void,\n None => ptr::null_mut(),\n });\n }\n mem::forget(boxed);\n old.map(|x| *x)\n }\n}\n\nimpl<C: 'static, D: 'static, S: 'static> Clone for LibinputDevice<C, D, S>\n{\n fn clone(&self) -> LibinputDevice<C, D, S>\n {\n LibinputDevice {\n device: unsafe { ffi::libinput_device_ref(self.device) },\n _context_userdata_type: PhantomData,\n _device_userdata_type: PhantomData,\n _seat_userdata_type: PhantomData,\n }\n }\n}\n\nimpl<C: 'static, D: 'static, S: 'static> Drop for LibinputDevice<C, D, S>\n{\n fn drop(&mut self) {\n unsafe {\n let userdata_ref = ffi::libinput_device_get_user_data(self.device);\n if ffi::libinput_device_unref(self.device).is_null() {\n Box::from_raw(userdata_ref);\n }\n }\n }\n}\n\nimpl<C: 'static, D: 'static, S: 'static> LibinputDevice<C, D, S>\n{\n pub fn context(&self) -> LibinputContext<C, D, S>\n {\n unsafe {\n LibinputContext::from_raw(ffi::libinput_device_get_context(self.device))\n }\n }\n\n \/*\n pub fn device_group(&self) -> LibinputDeviceGroup\n {\n unsafe {\n LibinputDeviceGroup::from_raw(ffi::libinput_device_get_display_group(self.device))\n }\n }\n *\/\n\n pub fn sysname(&self) -> &str\n {\n unsafe {\n CStr::from_ptr(ffi::libinput_device_get_sysname(self.device) ).to_str().expect(\"Device sysname is no valid utf8\")\n }\n }\n\n pub fn name(&self) -> &str\n {\n unsafe {\n CStr::from_ptr(ffi::libinput_device_get_name(self.device) ).to_str().expect(\"Device name is no valid utf8\")\n }\n }\n\n pub fn output_name(&self) -> Option<&str>\n {\n unsafe {\n let ptr = ffi::libinput_device_get_output_name(self.device);\n if !ptr.is_null() {\n Some(CStr::from_ptr(ptr).to_str().expect(\"Device output_name is no valid utf8\"))\n } else {\n None\n }\n }\n }\n\n pub fn id_product(&self) -> u32\n {\n unsafe {\n ffi::libinput_device_get_id_product(self.device)\n }\n }\n\n pub fn id_vendor(&self) -> u32\n {\n unsafe {\n ffi::libinput_device_get_id_vendor(self.device)\n }\n }\n\n pub fn seat(&self) -> LibinputSeat<C, D, S>\n {\n unsafe {\n LibinputSeat::from_raw(ffi::libinput_device_get_seat(self.device))\n }\n }\n\n pub fn set_seat_logical_name(&mut self, name: &str) -> Result<(), ()>\n {\n let name = CString::new(name).expect(\"New logical_seat name contained a null-byte\");\n unsafe {\n if ffi::libinput_device_set_seat_logical_name(self.device, name.as_ptr()) == 0 {\n Ok(())\n } else {\n Err(())\n }\n }\n }\n\n pub unsafe fn udev_device(&self) -> *mut libc::c_void\n {\n ffi::libinput_device_get_udev_device(self.device) as *mut _\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added an example to the doc based on the integration test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>autocommit 2015-03-31 23:16:21 CEST<commit_after>extern crate linux;\n\nuse linux::file::{Mode};\n\nfn main() {\n let mode: Mode = \"rwxrwxrwx\".parse().unwrap();\n assert_eq!(mode.to_string(), \"rwxrwxrwx\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Main rename<commit_after>#![cfg_attr(test, allow(dead_code))]\n\nmod byte_conversion;\nmod byte_manipulation;\nmod byte_utilities;\nmod english_scoring;\nmod general_utilities;\nmod key;\n\nfn guess_single_xor_char_decode(bytes: &Vec<u8>) -> (u16, Vec<u8>, u8) {\n\tlet mut top_score = 0;\n\tlet mut top_decode = vec![];\n\tlet mut top_x = 0;\n\tfor x in 0..255 {\n\t\tlet mut spin: Vec<u8> = vec![];\n\t\twhile spin.len() < bytes.len() {\n\t\t\tspin.push(x);\n\t\t}\n\n\t\tlet decoded = byte_manipulation::xor_byte_streams(&bytes, &spin);\n\t\tlet score = english_scoring::score_on_letter_frequency(&decoded);\n\t\tif score > top_score {\n\t\t\ttop_score = score;\n\t\t\ttop_decode = decoded;\n\t\t\ttop_x = x;\n\t\t}\n\t}\n\n\t(top_score, top_decode, top_x)\n}\n\n\nfn problem_3() {\n\t\/\/ http:\/\/cryptopals.com\/sets\/1\/challenges\/3\/\n\tuse byte_conversion::*;\n\tlet encoded = hex_to_bytes(\"1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736\");\n\n\tlet (top_score, top_decode, _) = guess_single_xor_char_decode(&encoded);\n\n\tprintln!(\"Best guess: `{}`, with a score of {}.\", bytes_to_readable_text(&top_decode), top_score);\n}\n\nfn problem_4() {\n\t\/\/ http:\/\/cryptopals.com\/sets\/1\/challenges\/4\/\n\n\tuse byte_conversion::*;\n\n\tlet s = general_utilities::read_file(\"C:\\\\Users\\\\Topher\\\\Dropbox\\\\Public\\\\Programming\\\\Matasano\\\\matasano-crypto-challenges\\\\res\\\\4.txt\");\n\n\tlet mut top_score = 0;\n\tlet mut top_decode = vec![];\t\n for line in s.split(\"\\n\") {\n \tlet (score, decoded, x) = guess_single_xor_char_decode(&hex_to_bytes(line));\n\t\t\n\t\tif score > top_score {\n\t\t\ttop_score = score;\n\t\t\ttop_decode = decoded.clone();\n\t\t}\n\n }\n\n println!(\"\\n\\n\\n{} - `{}`\", top_score, bytes_to_readable_text(&top_decode));\n}\n\nfn problem_6() {\n\t\/\/ http:\/\/cryptopals.com\/sets\/1\/challenges\/6\/\n\n\tuse byte_conversion::*;\n\tuse byte_manipulation::*;\n\tuse byte_utilities::*;\n\tuse english_scoring::*;\n\tuse general_utilities::*;\n\tuse key::*;\n\n\tlet file_data = general_utilities::read_file(\"C:\\\\Users\\\\Topher\\\\Dropbox\\\\Public\\\\Programming\\\\Matasano\\\\matasano-crypto-challenges\\\\res\\\\6.txt\");\n\tlet data_bytes = readable_text_to_bytes(&file_data);\n\n\t\/\/ For each KEYSIZE, take the first KEYSIZE worth of bytes, and the second KEYSIZE worth of bytes,\n\t\/\/ and find the edit distance between them. Normalize this result by dividing by KEYSIZE.\n\t\/\/ The KEYSIZE with the smallest normalized edit distance is probably the key.\n\t\/\/ You could proceed perhaps with the smallest 2-3 KEYSIZE values. Or take 4 KEYSIZE blocks instead of 2 and average the distances.\n\tlet mut keysize_points = Vec::new();\n\tfor keysize in 2..41 {\n\t\tif data_bytes.len() < keysize * 4 {\n\t\t\tcontinue;\n\t\t}\n\n\t\tlet chunks: Vec<_> = data_bytes.chunks(keysize).map(|x| slice_to_vec(x)).collect();\n\t\tassert!(chunks.len() >= 4);\n\n\t\tlet distance1 = hamming_distance(&chunks[0], &chunks[1]);\n\t\tlet distance2 = hamming_distance(&chunks[1], &chunks[2]);\n\t\tlet distance3 = hamming_distance(&chunks[2], &chunks[3]);\n\t\tlet distance4 = hamming_distance(&chunks[0], &chunks[3]);\n\t\tlet avg_distance: f32 = (distance1 + distance2 + distance3 + distance4) as f32 \/ 4.0;\n\t\tlet normalized_distance = avg_distance \/ keysize as f32;\n\n\t\tlet pair = (keysize, normalized_distance);\n\t\tkeysize_points.push(pair);\n\t}\n\n\t\/\/ Find the 4 best keysize values\n\tkeysize_points.sort_by(|a, b| {\n\t\tuse std::cmp::Ordering::*;\n\n\t\tlet (_, x) = *a;\n\t\tlet (_, y) = *b;\n\n\t\tx.partial_cmp(&y).unwrap_or(Equal)\n\t});\n\n\tprintln!(\"{:?}\", keysize_points);\n\n\tlet (best_keysize, _) = keysize_points[0];\n\tprintln!(\"{:?}\", best_keysize);\n\n\t\/\/ Now that you probably know the KEYSIZE: break the ciphertext into blocks of KEYSIZE length.\n\tlet blocks: Vec<Vec<u8>> = data_bytes\n\t\t.chunks(best_keysize)\n\t\t.map(|x| slice_to_vec(x))\n\t\t.collect();\n\n\t\/\/ Now transpose the blocks: make a block that is the first byte of every block, and a block that is the second byte of every block, and so on.\n\tlet transposed_blocks: Vec<Vec<u8>> = transpose_chunks(&blocks);\n\n\t\/\/ Solve each block as if it was single-character XOR. You already have code to do this.\n\tlet solved_key: Vec<(u16, String, u8)> = transposed_blocks.iter()\n\t\t.map(|x| {\n\t\t\tlet (a, b, c) = guess_single_xor_char_decode(x);\n\t\t\t(a, bytes_to_readable_text(&b), c)\n\t\t})\n\t\t.collect();\n\n\t\/\/ For each block, the single-byte XOR key that produces the best looking histogram is the repeating-key XOR key byte for that block.\n\t\/\/ Put them together and you have the key.\n\t\/\/let final_blocks: Vec<Vec<u8>> = transpose_chunks(&solved_blocks);\n\n\t\/\/let r: Vec<String> = final_blocks.iter().map(|x| bytes_to_readable_text(x)).collect();\n\t\/\/println!(\"{:?}\", transposed_blocks.iter().map(|x| bytes_to_readable_text(x)).collect());\n\tprintln!(\"{:?}\", solved_key);\n\n\tlet mut rk = RepeatingKey::new(&\"\\u{0}\\u{3}\\u{0}\\u{3}\\u{0}\\u{0}\\u{3}\\u{3}\\u{3}\\u{0}\\u{3}\\u{0}\\u{3}\\u{0}\\u{2}\\u{3}\\u{3}\\u{2}\\u{3}\\u{0}\");\n\n\n\t\/\/println!(\"{:?}\", bytes_to_readable_text(&rk.encrypt_bytes(&transposed_blocks)));\n}\n\nfn main() {\n\tprintln!(\"Running.\");\n\n\tproblem_6();\n}<|endoftext|>"} {"text":"<commit_before>use libimagstore::store::Entry;\nuse libimagstore::store::Store;\nuse libimagerror::into::IntoError;\n\nuse viewer::Viewer;\nuse result::Result;\nuse error::ViewErrorKind as VEK;\n\npub struct VersionsViewer<'a> {\n store: &'a Store,\n}\n\nimpl<'a> VersionsViewer<'a> {\n\n pub fn new(store: &'a Store) -> VersionsViewer<'a> {\n VersionsViewer {\n store: store,\n }\n }\n\n}\n\nimpl<'a> Viewer for VersionsViewer<'a> {\n\n fn view_entry(&self, entr: &Entry) -> Result<()> {\n use glob::glob;\n\n entr.get_location()\n .clone()\n .storified(self.store)\n .to_str()\n .and_then(|s| s.split(\"~\").next())\n .map(|component| {\n glob(&format!(\"{}~*\", component)[..])\n .map_err(|_| VEK::PatternError.into_error())\n .and_then(|paths| {\n for entry in paths {\n let p = match entry {\n Err(_) => return Err(VEK::GlobError.into_error()),\n Ok(p) => p,\n };\n let p = p.file_name()\n .and_then(|s| s.to_str())\n .unwrap(); \/\/ TODO\n println!(\"{}\", p);\n };\n Ok(())\n })\n })\n .unwrap_or(Err(VEK::PatternBuildingError.into_error()))\n }\n\n}\n\n<commit_msg>Fix libimagentryview::builtin::versions::* for new StoreId interface<commit_after>use libimagstore::store::Entry;\nuse libimagstore::store::Store;\nuse libimagerror::into::IntoError;\n\nuse viewer::Viewer;\nuse result::Result;\nuse error::ViewErrorKind as VEK;\nuse error::MapErrInto;\n\npub struct VersionsViewer<'a> {\n store: &'a Store,\n}\n\nimpl<'a> VersionsViewer<'a> {\n\n pub fn new(store: &'a Store) -> VersionsViewer<'a> {\n VersionsViewer {\n store: store,\n }\n }\n\n}\n\nimpl<'a> Viewer for VersionsViewer<'a> {\n\n fn view_entry(&self, entr: &Entry) -> Result<()> {\n use glob::glob;\n\n entr.get_location()\n .clone()\n .with_base(self.store.path().clone())\n .to_str()\n .map_err_into(VEK::ViewError)\n .and_then(|s| {\n s.split(\"~\")\n .next()\n .ok_or(VEK::PatternError.into_error())\n .map(|s| format!(\"{}~*\", s))\n .and_then(|pat| glob(&pat[..]).map_err(|_| VEK::PatternError.into_error()))\n .and_then(|paths| {\n for entry in paths {\n println!(\"{}\",\n try!(entry\n .map_err(|_| VEK::GlobError.into_error()))\n .file_name()\n .and_then(|s| s.to_str())\n .unwrap() \/\/ TODO\n );\n };\n Ok(())\n })\n })\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>use libimagstore::hook::error::HookError as HE;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\n\ngenerate_error_module!(\n generate_error_types!(GitHookError, GitHookErrorKind,\n ConfigError => \"Configuration Error\",\n NoConfigError => \"No Configuration\",\n ConfigTypeError => \"Configuration value type wrong\",\n RuntimeInformationSetupError => \"Couldn't setup runtime information for git hook\",\n RepositoryError => \"Error while interacting with git repository\",\n RepositoryBranchError => \"Error while interacting with git branch(es)\",\n RepositoryBranchNameFetchingError => \"Error while fetching branch name\",\n RepositorySignatureFetchingError => \"Error while fetching Authors\/Committers signature\",\n RepositoryIndexFetchingError => \"Error while fetching Repository Index\",\n RepositoryPathAddingError => \"Error while adding Path to Index\",\n RepositoryTreeWritingError => \"Error while writing repository tree\",\n RepositoryTreeFindingError => \"Error while finding repository tree\",\n RepositoryCommitFindingError => \"Error while finding commit\",\n RepositoryCommittingError => \"Error while committing\",\n HeadFetchError => \"Error while getting HEAD\",\n NotOnBranch => \"No Branch is checked out\",\n MkRepo => \"Repository creation error\",\n MkSignature => \"Error while building Signature object\"\n );\n);\n\nimpl GitHookError {\n\n pub fn inside_of<T>(self, h: HEK) -> HookResult<T> {\n Err(HE::new(h, Some(Box::new(self))))\n }\n\n}\n\npub use self::error::GitHookError;\npub use self::error::GitHookErrorKind;\npub use self::error::MapErrInto;\n\n<commit_msg>Impl From<GitHookError> for HookError<commit_after>use libimagstore::hook::error::HookError as HE;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\n\ngenerate_error_module!(\n generate_error_types!(GitHookError, GitHookErrorKind,\n ConfigError => \"Configuration Error\",\n NoConfigError => \"No Configuration\",\n ConfigTypeError => \"Configuration value type wrong\",\n RuntimeInformationSetupError => \"Couldn't setup runtime information for git hook\",\n RepositoryError => \"Error while interacting with git repository\",\n RepositoryBranchError => \"Error while interacting with git branch(es)\",\n RepositoryBranchNameFetchingError => \"Error while fetching branch name\",\n RepositorySignatureFetchingError => \"Error while fetching Authors\/Committers signature\",\n RepositoryIndexFetchingError => \"Error while fetching Repository Index\",\n RepositoryPathAddingError => \"Error while adding Path to Index\",\n RepositoryTreeWritingError => \"Error while writing repository tree\",\n RepositoryTreeFindingError => \"Error while finding repository tree\",\n RepositoryCommitFindingError => \"Error while finding commit\",\n RepositoryCommittingError => \"Error while committing\",\n HeadFetchError => \"Error while getting HEAD\",\n NotOnBranch => \"No Branch is checked out\",\n MkRepo => \"Repository creation error\",\n MkSignature => \"Error while building Signature object\"\n );\n);\n\nimpl GitHookError {\n\n pub fn inside_of<T>(self, h: HEK) -> HookResult<T> {\n Err(HE::new(h, Some(Box::new(self))))\n }\n\n}\n\nimpl From<GitHookError> for HE {\n\n fn from(he: GitHookError) -> HE {\n HE::new(HEK::HookExecutionError, Some(Box::new(he)))\n }\n\n}\n\npub use self::error::GitHookError;\npub use self::error::GitHookErrorKind;\npub use self::error::MapErrInto;\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove some boxing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add some XML character predicates<commit_after>use super::{Document,Root,RootChild};\n\ntrait XmlChar {\n fn is_name_start_char(&self) -> bool;\n fn is_name_char(&self) -> bool;\n}\n\nimpl XmlChar for char {\n fn is_name_start_char(&self) -> bool {\n match *self {\n ':' |\n 'A'..'Z' |\n '_' |\n 'a'..'z' |\n '\\U000000C0'..'\\U000000D6' |\n '\\U000000D8'..'\\U000000F6' |\n '\\U000000F8'..'\\U000002FF' |\n '\\U00000370'..'\\U0000037D' |\n '\\U0000037F'..'\\U00001FFF' |\n '\\U0000200C'..'\\U0000200D' |\n '\\U00002070'..'\\U0000218F' |\n '\\U00002C00'..'\\U00002FEF' |\n '\\U00003001'..'\\U0000D7FF' |\n '\\U0000F900'..'\\U0000FDCF' |\n '\\U0000FDF0'..'\\U0000FFFD' |\n '\\U00010000'..'\\U000EFFFF' => true,\n _ => false,\n }\n }\n\n fn is_name_char(&self) -> bool {\n if self.is_name_start_char() { return true; }\n match *self {\n '-' |\n '.' |\n '0'..'9' |\n '\\u00B7' |\n '\\u0300'..'\\u036F' |\n '\\u203F'..'\\u2040' => true,\n _ => false\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Print all errors<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ aux-build:two_macros.rs\n\nmacro_rules! foo { () => {} }\nmacro_rules! macro_one { () => {} }\n\nmacro_rules! m1 { () => {\n macro_rules! foo { () => {} } \/\/~ ERROR `foo` is already in scope\n \/\/~^ NOTE macro-expanded `macro_rules!`s and `#[macro_use]`s may not shadow existing macros\n\n #[macro_use] \/\/~ ERROR `macro_one` is already in scope\n \/\/~^ NOTE macro-expanded `macro_rules!`s and `#[macro_use]`s may not shadow existing macros\n extern crate two_macros;\n}}\nm1!(); \/\/~ NOTE in this expansion\n \/\/~| NOTE in this expansion\n \/\/~| NOTE in this expansion\n \/\/~| NOTE in this expansion\n\nfn f() { macro_one!(); }\nfoo!();\n\nmacro_rules! m2 { () => {\n macro_rules! foo { () => {} }\n #[macro_use] extern crate two_macros as __;\n\n fn g() { macro_one!(); }\n foo!();\n}}\nm2!();\n\/\/^ Since `foo` and `macro_one` are not used outside this expansion, they are not shadowing errors.\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n#![feature(int_uint)]\n#![feature(unboxed_closures)]\n#![feature(box_syntax)]\n\n#![deny(unused_imports)]\n#![deny(unused_variables)]\n#![allow(missing_copy_implementations)]\n#![allow(unstable)]\n\nextern crate collections;\nextern crate geom;\nextern crate hyper;\nextern crate png;\n#[macro_use]\nextern crate log;\nextern crate serialize;\nextern crate \"util\" as servo_util;\nextern crate stb_image;\nextern crate time;\nextern crate url;\n\n\/\/\/ Image handling.\n\/\/\/\n\/\/\/ It may be surprising that this goes in the network crate as opposed to the graphics crate.\n\/\/\/ However, image handling is generally very integrated with the network stack (especially where\n\/\/\/ caching is involved) and as a result it must live in here.\npub mod image {\n pub mod base;\n pub mod holder;\n}\n\npub mod about_loader;\npub mod file_loader;\npub mod http_loader;\npub mod data_loader;\npub mod image_cache_task;\npub mod local_image_cache;\npub mod resource_task;\npub mod storage_task;\nmod sniffer_task;\n\n\/\/\/ An implementation of the [Fetch spec](http:\/\/fetch.spec.whatwg.org\/)\npub mod fetch {\n #![allow(dead_code)] \/\/ XXXManishearth this is only temporary until the Fetch mod starts being used\n pub mod request;\n pub mod response;\n pub mod cors_cache;\n}\n<commit_msg>silence fetch warnings<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n#![feature(int_uint)]\n#![feature(unboxed_closures)]\n#![feature(box_syntax)]\n\n#![deny(unused_imports)]\n#![deny(unused_variables)]\n#![allow(missing_copy_implementations)]\n#![allow(unstable)]\n\nextern crate collections;\nextern crate geom;\nextern crate hyper;\nextern crate png;\n#[macro_use]\nextern crate log;\nextern crate serialize;\nextern crate \"util\" as servo_util;\nextern crate stb_image;\nextern crate time;\nextern crate url;\n\n\/\/\/ Image handling.\n\/\/\/\n\/\/\/ It may be surprising that this goes in the network crate as opposed to the graphics crate.\n\/\/\/ However, image handling is generally very integrated with the network stack (especially where\n\/\/\/ caching is involved) and as a result it must live in here.\npub mod image {\n pub mod base;\n pub mod holder;\n}\n\npub mod about_loader;\npub mod file_loader;\npub mod http_loader;\npub mod data_loader;\npub mod image_cache_task;\npub mod local_image_cache;\npub mod resource_task;\npub mod storage_task;\nmod sniffer_task;\n\n\/\/\/ An implementation of the [Fetch spec](http:\/\/fetch.spec.whatwg.org\/)\npub mod fetch {\n #![allow(dead_code, unused)] \/\/ XXXManishearth this is only temporary until the Fetch mod starts being used\n pub mod request;\n pub mod response;\n pub mod cors_cache;\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/! Set and unset common attributes on LLVM values.\n\nuse libc::{c_uint, c_ulonglong};\nuse llvm::{self, ValueRef, AttrHelper};\nuse middle::ty::{self, ClosureTyper};\nuse session::config::NoDebugInfo;\nuse syntax::abi;\nuse syntax::ast;\npub use syntax::attr::InlineAttr;\nuse trans::base;\nuse trans::common;\nuse trans::context::CrateContext;\nuse trans::machine;\nuse trans::type_of;\n\n\/\/\/ Mark LLVM function to use split stack.\n#[inline]\npub fn split_stack(val: ValueRef, set: bool) {\n unsafe {\n let attr = \"split-stack\\0\".as_ptr() as *const _;\n if set {\n llvm::LLVMAddFunctionAttrString(val, llvm::FunctionIndex as c_uint, attr);\n } else {\n llvm::LLVMRemoveFunctionAttrString(val, llvm::FunctionIndex as c_uint, attr);\n }\n }\n}\n\n\/\/\/ Mark LLVM function to use provided inline heuristic.\n#[inline]\npub fn inline(val: ValueRef, inline: InlineAttr) {\n use self::InlineAttr::*;\n match inline {\n Hint => llvm::SetFunctionAttribute(val, llvm::Attribute::InlineHint),\n Always => llvm::SetFunctionAttribute(val, llvm::Attribute::AlwaysInline),\n Never => llvm::SetFunctionAttribute(val, llvm::Attribute::NoInline),\n None => {\n let attr = llvm::Attribute::InlineHint |\n llvm::Attribute::AlwaysInline |\n llvm::Attribute::NoInline;\n unsafe {\n llvm::LLVMRemoveFunctionAttr(val, attr.bits() as c_ulonglong)\n }\n },\n };\n}\n\n\/\/\/ Tell LLVM to emit or not emit the information necessary to unwind the stack for the function.\n#[inline]\npub fn emit_uwtable(val: ValueRef, emit: bool) {\n if emit {\n llvm::SetFunctionAttribute(val, llvm::Attribute::UWTable);\n } else {\n unsafe {\n llvm::LLVMRemoveFunctionAttr(\n val,\n llvm::Attribute::UWTable.bits() as c_ulonglong,\n );\n }\n }\n}\n\n\/\/\/ Tell LLVM whether the function can or cannot unwind.\n#[inline]\n#[allow(dead_code)] \/\/ possibly useful function\npub fn unwind(val: ValueRef, can_unwind: bool) {\n if can_unwind {\n unsafe {\n llvm::LLVMRemoveFunctionAttr(\n val,\n llvm::Attribute::NoUnwind.bits() as c_ulonglong,\n );\n }\n } else {\n llvm::SetFunctionAttribute(val, llvm::Attribute::NoUnwind);\n }\n}\n\n\/\/\/ Tell LLVM whether it should optimise function for size.\n#[inline]\n#[allow(dead_code)] \/\/ possibly useful function\npub fn set_optimize_for_size(val: ValueRef, optimize: bool) {\n if optimize {\n llvm::SetFunctionAttribute(val, llvm::Attribute::OptimizeForSize);\n } else {\n unsafe {\n llvm::LLVMRemoveFunctionAttr(\n val,\n llvm::Attribute::OptimizeForSize.bits() as c_ulonglong,\n );\n }\n }\n}\n\n\/\/\/ Composite function which sets LLVM attributes for function depending on its AST (#[attribute])\n\/\/\/ attributes.\npub fn from_fn_attrs(ccx: &CrateContext, attrs: &[ast::Attribute], llfn: ValueRef) {\n use syntax::attr::*;\n inline(llfn, find_inline_attr(Some(ccx.sess().diagnostic()), attrs));\n\n \/\/ FIXME: #11906: Omitting frame pointers breaks retrieving the value of a\n \/\/ parameter.\n let no_fp_elim = (ccx.sess().opts.debuginfo != NoDebugInfo) ||\n !ccx.sess().target.target.options.eliminate_frame_pointer;\n if no_fp_elim {\n unsafe {\n let attr = \"no-frame-pointer-elim\\0\".as_ptr() as *const _;\n let val = \"true\\0\".as_ptr() as *const _;\n llvm::LLVMAddFunctionAttrStringValue(llfn,\n llvm::FunctionIndex as c_uint,\n attr, val);\n }\n }\n\n for attr in attrs {\n if attr.check_name(\"no_stack_check\") {\n split_stack(llfn, false);\n } else if attr.check_name(\"cold\") {\n unsafe {\n llvm::LLVMAddFunctionAttribute(llfn,\n llvm::FunctionIndex as c_uint,\n llvm::ColdAttribute as u64)\n }\n } else if attr.check_name(\"allocator\") {\n llvm::Attribute::NoAlias.apply_llfn(llvm::ReturnIndex as c_uint, llfn);\n }\n }\n}\n\n\/\/\/ Composite function which converts function type into LLVM attributes for the function.\npub fn from_fn_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_type: ty::Ty<'tcx>)\n -> llvm::AttrBuilder {\n use middle::ty::{BrAnon, ReLateBound};\n\n let function_type;\n let (fn_sig, abi, env_ty) = match fn_type.sty {\n ty::TyBareFn(_, ref f) => (&f.sig, f.abi, None),\n ty::TyClosure(closure_did, substs) => {\n let typer = common::NormalizingClosureTyper::new(ccx.tcx());\n function_type = typer.closure_type(closure_did, substs);\n let self_type = base::self_type_for_closure(ccx, closure_did, fn_type);\n (&function_type.sig, abi::RustCall, Some(self_type))\n }\n _ => ccx.sess().bug(\"expected closure or function.\")\n };\n\n let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig);\n\n let mut attrs = llvm::AttrBuilder::new();\n let ret_ty = fn_sig.output;\n\n \/\/ These have an odd calling convention, so we need to manually\n \/\/ unpack the input ty's\n let input_tys = match fn_type.sty {\n ty::TyClosure(..) => {\n assert!(abi == abi::RustCall);\n\n match fn_sig.inputs[0].sty {\n ty::TyTuple(ref inputs) => {\n let mut full_inputs = vec![env_ty.expect(\"Missing closure environment\")];\n full_inputs.push_all(inputs);\n full_inputs\n }\n _ => ccx.sess().bug(\"expected tuple'd inputs\")\n }\n },\n ty::TyBareFn(..) if abi == abi::RustCall => {\n let mut inputs = vec![fn_sig.inputs[0]];\n\n match fn_sig.inputs[1].sty {\n ty::TyTuple(ref t_in) => {\n inputs.push_all(&t_in[..]);\n inputs\n }\n _ => ccx.sess().bug(\"expected tuple'd inputs\")\n }\n }\n _ => fn_sig.inputs.clone()\n };\n\n \/\/ Index 0 is the return value of the llvm func, so we start at 1\n let mut first_arg_offset = 1;\n if let ty::FnConverging(ret_ty) = ret_ty {\n \/\/ A function pointer is called without the declaration\n \/\/ available, so we have to apply any attributes with ABI\n \/\/ implications directly to the call instruction. Right now,\n \/\/ the only attribute we need to worry about is `sret`.\n if type_of::return_uses_outptr(ccx, ret_ty) {\n let llret_sz = machine::llsize_of_real(ccx, type_of::type_of(ccx, ret_ty));\n\n \/\/ The outptr can be noalias and nocapture because it's entirely\n \/\/ invisible to the program. We also know it's nonnull as well\n \/\/ as how many bytes we can dereference\n attrs.arg(1, llvm::Attribute::StructRet)\n .arg(1, llvm::Attribute::NoAlias)\n .arg(1, llvm::Attribute::NoCapture)\n .arg(1, llvm::DereferenceableAttribute(llret_sz));\n\n \/\/ Add one more since there's an outptr\n first_arg_offset += 1;\n } else {\n \/\/ The `noalias` attribute on the return value is useful to a\n \/\/ function ptr caller.\n match ret_ty.sty {\n \/\/ `Box` pointer return values never alias because ownership\n \/\/ is transferred\n ty::TyBox(it) if common::type_is_sized(ccx.tcx(), it) => {\n attrs.ret(llvm::Attribute::NoAlias);\n }\n _ => {}\n }\n\n \/\/ We can also mark the return value as `dereferenceable` in certain cases\n match ret_ty.sty {\n \/\/ These are not really pointers but pairs, (pointer, len)\n ty::TyRef(_, ty::mt { ty: inner, .. })\n | ty::TyBox(inner) if common::type_is_sized(ccx.tcx(), inner) => {\n let llret_sz = machine::llsize_of_real(ccx, type_of::type_of(ccx, inner));\n attrs.ret(llvm::DereferenceableAttribute(llret_sz));\n }\n _ => {}\n }\n\n if let ty::TyBool = ret_ty.sty {\n attrs.ret(llvm::Attribute::ZExt);\n }\n }\n }\n\n for (idx, &t) in input_tys.iter().enumerate().map(|(i, v)| (i + first_arg_offset, v)) {\n match t.sty {\n \/\/ this needs to be first to prevent fat pointers from falling through\n _ if !common::type_is_immediate(ccx, t) => {\n let llarg_sz = machine::llsize_of_real(ccx, type_of::type_of(ccx, t));\n\n \/\/ For non-immediate arguments the callee gets its own copy of\n \/\/ the value on the stack, so there are no aliases. It's also\n \/\/ program-invisible so can't possibly capture\n attrs.arg(idx, llvm::Attribute::NoAlias)\n .arg(idx, llvm::Attribute::NoCapture)\n .arg(idx, llvm::DereferenceableAttribute(llarg_sz));\n }\n\n ty::TyBool => {\n attrs.arg(idx, llvm::Attribute::ZExt);\n }\n\n \/\/ `Box` pointer parameters never alias because ownership is transferred\n ty::TyBox(inner) => {\n let llsz = machine::llsize_of_real(ccx, type_of::type_of(ccx, inner));\n\n attrs.arg(idx, llvm::Attribute::NoAlias)\n .arg(idx, llvm::DereferenceableAttribute(llsz));\n }\n\n \/\/ `&mut` pointer parameters never alias other parameters, or mutable global data\n \/\/\n \/\/ `&T` where `T` contains no `UnsafeCell<U>` is immutable, and can be marked as both\n \/\/ `readonly` and `noalias`, as LLVM's definition of `noalias` is based solely on\n \/\/ memory dependencies rather than pointer equality\n ty::TyRef(b, mt) if mt.mutbl == ast::MutMutable ||\n !ty::type_contents(ccx.tcx(), mt.ty).interior_unsafe() => {\n\n let llsz = machine::llsize_of_real(ccx, type_of::type_of(ccx, mt.ty));\n attrs.arg(idx, llvm::Attribute::NoAlias)\n .arg(idx, llvm::DereferenceableAttribute(llsz));\n\n if mt.mutbl == ast::MutImmutable {\n attrs.arg(idx, llvm::Attribute::ReadOnly);\n }\n\n if let ReLateBound(_, BrAnon(_)) = *b {\n attrs.arg(idx, llvm::Attribute::NoCapture);\n }\n }\n\n \/\/ When a reference in an argument has no named lifetime, it's impossible for that\n \/\/ reference to escape this function (returned or stored beyond the call by a closure).\n ty::TyRef(&ReLateBound(_, BrAnon(_)), mt) => {\n let llsz = machine::llsize_of_real(ccx, type_of::type_of(ccx, mt.ty));\n attrs.arg(idx, llvm::Attribute::NoCapture)\n .arg(idx, llvm::DereferenceableAttribute(llsz));\n }\n\n \/\/ & pointer parameters are also never null and we know exactly how\n \/\/ many bytes we can dereference\n ty::TyRef(_, mt) => {\n let llsz = machine::llsize_of_real(ccx, type_of::type_of(ccx, mt.ty));\n attrs.arg(idx, llvm::DereferenceableAttribute(llsz));\n }\n _ => ()\n }\n }\n\n attrs\n}\n<commit_msg>Use a single match arm for all TyRef variants when deducing function argument attributes<commit_after>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/! Set and unset common attributes on LLVM values.\n\nuse libc::{c_uint, c_ulonglong};\nuse llvm::{self, ValueRef, AttrHelper};\nuse middle::ty::{self, ClosureTyper};\nuse session::config::NoDebugInfo;\nuse syntax::abi;\nuse syntax::ast;\npub use syntax::attr::InlineAttr;\nuse trans::base;\nuse trans::common;\nuse trans::context::CrateContext;\nuse trans::machine;\nuse trans::type_of;\n\n\/\/\/ Mark LLVM function to use split stack.\n#[inline]\npub fn split_stack(val: ValueRef, set: bool) {\n unsafe {\n let attr = \"split-stack\\0\".as_ptr() as *const _;\n if set {\n llvm::LLVMAddFunctionAttrString(val, llvm::FunctionIndex as c_uint, attr);\n } else {\n llvm::LLVMRemoveFunctionAttrString(val, llvm::FunctionIndex as c_uint, attr);\n }\n }\n}\n\n\/\/\/ Mark LLVM function to use provided inline heuristic.\n#[inline]\npub fn inline(val: ValueRef, inline: InlineAttr) {\n use self::InlineAttr::*;\n match inline {\n Hint => llvm::SetFunctionAttribute(val, llvm::Attribute::InlineHint),\n Always => llvm::SetFunctionAttribute(val, llvm::Attribute::AlwaysInline),\n Never => llvm::SetFunctionAttribute(val, llvm::Attribute::NoInline),\n None => {\n let attr = llvm::Attribute::InlineHint |\n llvm::Attribute::AlwaysInline |\n llvm::Attribute::NoInline;\n unsafe {\n llvm::LLVMRemoveFunctionAttr(val, attr.bits() as c_ulonglong)\n }\n },\n };\n}\n\n\/\/\/ Tell LLVM to emit or not emit the information necessary to unwind the stack for the function.\n#[inline]\npub fn emit_uwtable(val: ValueRef, emit: bool) {\n if emit {\n llvm::SetFunctionAttribute(val, llvm::Attribute::UWTable);\n } else {\n unsafe {\n llvm::LLVMRemoveFunctionAttr(\n val,\n llvm::Attribute::UWTable.bits() as c_ulonglong,\n );\n }\n }\n}\n\n\/\/\/ Tell LLVM whether the function can or cannot unwind.\n#[inline]\n#[allow(dead_code)] \/\/ possibly useful function\npub fn unwind(val: ValueRef, can_unwind: bool) {\n if can_unwind {\n unsafe {\n llvm::LLVMRemoveFunctionAttr(\n val,\n llvm::Attribute::NoUnwind.bits() as c_ulonglong,\n );\n }\n } else {\n llvm::SetFunctionAttribute(val, llvm::Attribute::NoUnwind);\n }\n}\n\n\/\/\/ Tell LLVM whether it should optimise function for size.\n#[inline]\n#[allow(dead_code)] \/\/ possibly useful function\npub fn set_optimize_for_size(val: ValueRef, optimize: bool) {\n if optimize {\n llvm::SetFunctionAttribute(val, llvm::Attribute::OptimizeForSize);\n } else {\n unsafe {\n llvm::LLVMRemoveFunctionAttr(\n val,\n llvm::Attribute::OptimizeForSize.bits() as c_ulonglong,\n );\n }\n }\n}\n\n\/\/\/ Composite function which sets LLVM attributes for function depending on its AST (#[attribute])\n\/\/\/ attributes.\npub fn from_fn_attrs(ccx: &CrateContext, attrs: &[ast::Attribute], llfn: ValueRef) {\n use syntax::attr::*;\n inline(llfn, find_inline_attr(Some(ccx.sess().diagnostic()), attrs));\n\n \/\/ FIXME: #11906: Omitting frame pointers breaks retrieving the value of a\n \/\/ parameter.\n let no_fp_elim = (ccx.sess().opts.debuginfo != NoDebugInfo) ||\n !ccx.sess().target.target.options.eliminate_frame_pointer;\n if no_fp_elim {\n unsafe {\n let attr = \"no-frame-pointer-elim\\0\".as_ptr() as *const _;\n let val = \"true\\0\".as_ptr() as *const _;\n llvm::LLVMAddFunctionAttrStringValue(llfn,\n llvm::FunctionIndex as c_uint,\n attr, val);\n }\n }\n\n for attr in attrs {\n if attr.check_name(\"no_stack_check\") {\n split_stack(llfn, false);\n } else if attr.check_name(\"cold\") {\n unsafe {\n llvm::LLVMAddFunctionAttribute(llfn,\n llvm::FunctionIndex as c_uint,\n llvm::ColdAttribute as u64)\n }\n } else if attr.check_name(\"allocator\") {\n llvm::Attribute::NoAlias.apply_llfn(llvm::ReturnIndex as c_uint, llfn);\n }\n }\n}\n\n\/\/\/ Composite function which converts function type into LLVM attributes for the function.\npub fn from_fn_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_type: ty::Ty<'tcx>)\n -> llvm::AttrBuilder {\n use middle::ty::{BrAnon, ReLateBound};\n\n let function_type;\n let (fn_sig, abi, env_ty) = match fn_type.sty {\n ty::TyBareFn(_, ref f) => (&f.sig, f.abi, None),\n ty::TyClosure(closure_did, substs) => {\n let typer = common::NormalizingClosureTyper::new(ccx.tcx());\n function_type = typer.closure_type(closure_did, substs);\n let self_type = base::self_type_for_closure(ccx, closure_did, fn_type);\n (&function_type.sig, abi::RustCall, Some(self_type))\n }\n _ => ccx.sess().bug(\"expected closure or function.\")\n };\n\n let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig);\n\n let mut attrs = llvm::AttrBuilder::new();\n let ret_ty = fn_sig.output;\n\n \/\/ These have an odd calling convention, so we need to manually\n \/\/ unpack the input ty's\n let input_tys = match fn_type.sty {\n ty::TyClosure(..) => {\n assert!(abi == abi::RustCall);\n\n match fn_sig.inputs[0].sty {\n ty::TyTuple(ref inputs) => {\n let mut full_inputs = vec![env_ty.expect(\"Missing closure environment\")];\n full_inputs.push_all(inputs);\n full_inputs\n }\n _ => ccx.sess().bug(\"expected tuple'd inputs\")\n }\n },\n ty::TyBareFn(..) if abi == abi::RustCall => {\n let mut inputs = vec![fn_sig.inputs[0]];\n\n match fn_sig.inputs[1].sty {\n ty::TyTuple(ref t_in) => {\n inputs.push_all(&t_in[..]);\n inputs\n }\n _ => ccx.sess().bug(\"expected tuple'd inputs\")\n }\n }\n _ => fn_sig.inputs.clone()\n };\n\n \/\/ Index 0 is the return value of the llvm func, so we start at 1\n let mut first_arg_offset = 1;\n if let ty::FnConverging(ret_ty) = ret_ty {\n \/\/ A function pointer is called without the declaration\n \/\/ available, so we have to apply any attributes with ABI\n \/\/ implications directly to the call instruction. Right now,\n \/\/ the only attribute we need to worry about is `sret`.\n if type_of::return_uses_outptr(ccx, ret_ty) {\n let llret_sz = machine::llsize_of_real(ccx, type_of::type_of(ccx, ret_ty));\n\n \/\/ The outptr can be noalias and nocapture because it's entirely\n \/\/ invisible to the program. We also know it's nonnull as well\n \/\/ as how many bytes we can dereference\n attrs.arg(1, llvm::Attribute::StructRet)\n .arg(1, llvm::Attribute::NoAlias)\n .arg(1, llvm::Attribute::NoCapture)\n .arg(1, llvm::DereferenceableAttribute(llret_sz));\n\n \/\/ Add one more since there's an outptr\n first_arg_offset += 1;\n } else {\n \/\/ The `noalias` attribute on the return value is useful to a\n \/\/ function ptr caller.\n match ret_ty.sty {\n \/\/ `Box` pointer return values never alias because ownership\n \/\/ is transferred\n ty::TyBox(it) if common::type_is_sized(ccx.tcx(), it) => {\n attrs.ret(llvm::Attribute::NoAlias);\n }\n _ => {}\n }\n\n \/\/ We can also mark the return value as `dereferenceable` in certain cases\n match ret_ty.sty {\n \/\/ These are not really pointers but pairs, (pointer, len)\n ty::TyRef(_, ty::mt { ty: inner, .. })\n | ty::TyBox(inner) if common::type_is_sized(ccx.tcx(), inner) => {\n let llret_sz = machine::llsize_of_real(ccx, type_of::type_of(ccx, inner));\n attrs.ret(llvm::DereferenceableAttribute(llret_sz));\n }\n _ => {}\n }\n\n if let ty::TyBool = ret_ty.sty {\n attrs.ret(llvm::Attribute::ZExt);\n }\n }\n }\n\n for (idx, &t) in input_tys.iter().enumerate().map(|(i, v)| (i + first_arg_offset, v)) {\n match t.sty {\n \/\/ this needs to be first to prevent fat pointers from falling through\n _ if !common::type_is_immediate(ccx, t) => {\n let llarg_sz = machine::llsize_of_real(ccx, type_of::type_of(ccx, t));\n\n \/\/ For non-immediate arguments the callee gets its own copy of\n \/\/ the value on the stack, so there are no aliases. It's also\n \/\/ program-invisible so can't possibly capture\n attrs.arg(idx, llvm::Attribute::NoAlias)\n .arg(idx, llvm::Attribute::NoCapture)\n .arg(idx, llvm::DereferenceableAttribute(llarg_sz));\n }\n\n ty::TyBool => {\n attrs.arg(idx, llvm::Attribute::ZExt);\n }\n\n \/\/ `Box` pointer parameters never alias because ownership is transferred\n ty::TyBox(inner) => {\n let llsz = machine::llsize_of_real(ccx, type_of::type_of(ccx, inner));\n\n attrs.arg(idx, llvm::Attribute::NoAlias)\n .arg(idx, llvm::DereferenceableAttribute(llsz));\n }\n\n ty::TyRef(b, mt) => {\n \/\/ `&mut` pointer parameters never alias other parameters, or mutable global data\n \/\/\n \/\/ `&T` where `T` contains no `UnsafeCell<U>` is immutable, and can be marked as\n \/\/ both `readonly` and `noalias`, as LLVM's definition of `noalias` is based solely\n \/\/ on memory dependencies rather than pointer equality\n let interior_unsafe = ty::type_contents(ccx.tcx(), mt.ty).interior_unsafe();\n\n if mt.mutbl == ast::MutMutable || !interior_unsafe {\n attrs.arg(idx, llvm::Attribute::NoAlias);\n }\n\n if mt.mutbl == ast::MutImmutable && !interior_unsafe {\n attrs.arg(idx, llvm::Attribute::ReadOnly);\n }\n\n \/\/ & pointer parameters are also never null and we know exactly\n \/\/ how many bytes we can dereference\n let llsz = machine::llsize_of_real(ccx, type_of::type_of(ccx, mt.ty));\n attrs.arg(idx, llvm::DereferenceableAttribute(llsz));\n\n \/\/ When a reference in an argument has no named lifetime, it's\n \/\/ impossible for that reference to escape this function\n \/\/ (returned or stored beyond the call by a closure).\n if let ReLateBound(_, BrAnon(_)) = *b {\n attrs.arg(idx, llvm::Attribute::NoCapture);\n }\n }\n\n _ => ()\n }\n }\n\n attrs\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix clip warning about short idents<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>decrease bench reps<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>some functional tests for mpsc<commit_after>extern crate mioco;\n\nuse mioco::sync::mpsc;\nuse std::thread;\n\n#[test]\nfn tx_rx_outside_mioco() {\n let (tx, rx) = mpsc::channel::<i32>();\n\n thread::spawn(move || for i in 0..10 {\n let _ = tx.send(i);\n });\n\n thread::sleep_ms(1000);\n\n let mut sum = 0;\n for _ in 0..50 {\n if let Ok(i) = rx.try_recv() {\n sum += i;\n }\n }\n\n assert_eq!(sum, 45);\n}\n\n#[test]\nfn tx_outside_rx_inside_mioco() {\n let (tx, rx) = mpsc::channel::<i32>();\n for i in 0..10 {\n let _ = tx.send(i);\n }\n\n mioco::spawn(move || {\n let mut sum = 0;\n for _ in 0..50 {\n if let Ok(i) = rx.try_recv() {\n sum += i;\n }\n }\n assert_eq!(sum, 45);\n });\n\n thread::sleep_ms(1000);\n}\n\n#[test]\nfn tx_inside_rx_inside_mioco() {\n let (tx, rx) = mpsc::channel::<i32>();\n \n mioco::spawn(move ||{\n for i in 0..10 {\n let _ = tx.send(i);\n }\n });\n\n mioco::spawn(move || {\n let mut sum = 0;\n for _ in 0..50 {\n if let Ok(i) = rx.try_recv() {\n sum += i;\n }\n }\n assert_eq!(sum, 45);\n });\n\n thread::sleep_ms(1000);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Stop using deprecated `SipHasher`.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"alloc_system\"]\n#![crate_type = \"rlib\"]\n#![no_std]\n#![cfg_attr(not(stage0), allocator)]\n#![unstable(feature = \"alloc_system\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![feature(allocator)]\n#![feature(asm)]\n#![feature(staged_api)]\n\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"mipsel\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\")))]\nconst MIN_ALIGN: usize = 16;\n\nextern {\n fn memmove(dst: *mut u8, src: *const u8, size: usize);\n fn __rust_allocate(size: usize, align: usize) -> *mut u8;\n fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);\n fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;\n fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> usize;\n fn __rust_usable_size(size: usize, align: usize) -> usize;\n }\n<commit_msg>run rustfmt on liballoc_system<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"alloc_system\"]\n#![crate_type = \"rlib\"]\n#![no_std]\n#![cfg_attr(not(stage0), allocator)]\n#![unstable(feature = \"alloc_system\",\n reason = \"this library is unlikely to be stabilized in its current \\\n form or name\",\n issue = \"27783\")]\n#![feature(allocator)]\n#![feature(asm)]\n#![feature(staged_api)]\n\n\n\/\/ The minimum alignment guaranteed by the architecture. This value is used to\n\/\/ add fast paths for low alignment values. In practice, the alignment is a\n\/\/ constant at the call site and the branch will be optimized out.\n#[cfg(all(any(target_arch = \"arm\",\n target_arch = \"mips\",\n target_arch = \"mipsel\",\n target_arch = \"powerpc\")))]\nconst MIN_ALIGN: usize = 8;\n#[cfg(all(any(target_arch = \"x86\",\n target_arch = \"x86_64\",\n target_arch = \"aarch64\")))]\nconst MIN_ALIGN: usize = 16;\n\nextern \"C\" {\n fn memmove(dst: *mut u8, src: *const u8, size: usize);\n fn __rust_allocate(size: usize, align: usize) -> *mut u8;\n fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);\n fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;\n fn __rust_reallocate_inplace(ptr: *mut u8,\n old_size: usize,\n size: usize,\n align: usize)\n -> usize;\n fn __rust_usable_size(size: usize, align: usize) -> usize;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Write tailed data to stdout, and cleanup a tad.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Style adjustments<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix config override mechanism<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Matrix room version identifiers.\n\nuse std::{\n convert::TryFrom,\n fmt::{Display, Formatter, Result as FmtResult},\n};\n\n#[cfg(feature = \"diesel\")]\nuse diesel::sql_types::Text;\nuse serde::{\n de::{Error as SerdeError, Unexpected, Visitor},\n Deserialize, Deserializer, Serialize, Serializer,\n};\n\nuse crate::error::Error;\n\n\/\/\/ Room version identifiers cannot be more than 32 code points.\nconst MAX_CODE_POINTS: usize = 32;\n\n\/\/\/ A Matrix room version ID.\n\/\/\/\n\/\/\/ A `RoomVersionId` can be or converted or deserialized from a string slice, and can be converted\n\/\/\/ or serialized back into a string as needed.\n\/\/\/\n\/\/\/ ```\n\/\/\/ # use std::convert::TryFrom;\n\/\/\/ # use ruma_identifiers::RoomVersionId;\n\/\/\/ assert_eq!(RoomVersionId::try_from(\"1\").unwrap().to_string(), \"1\");\n\/\/\/ ```\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\n#[cfg_attr(feature = \"diesel\", derive(FromSqlRow, QueryId, AsExpression, SqlType))]\n#[cfg_attr(feature = \"diesel\", sql_type = \"Text\")]\npub struct RoomVersionId(InnerRoomVersionId);\n\n\/\/\/ Possibile values for room version, distinguishing between official Matrix versions and custom\n\/\/\/ versions.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\nenum InnerRoomVersionId {\n \/\/\/ A version 1 room.\n Version1,\n \/\/\/ A version 2 room.\n Version2,\n \/\/\/ A version 3 room.\n Version3,\n \/\/\/ A version 4 room.\n Version4,\n \/\/\/ A custom room version.\n Custom(String),\n}\n\n\/\/\/ A serde visitor for `RoomVersionId`.\nstruct RoomVersionIdVisitor;\n\nimpl RoomVersionId {\n \/\/\/ Creates a version 1 room ID.\n pub fn version_1() -> Self {\n Self(InnerRoomVersionId::Version1)\n }\n\n \/\/\/ Creates a version 2 room ID.\n pub fn version_2() -> Self {\n Self(InnerRoomVersionId::Version2)\n }\n\n \/\/\/ Creates a version 3 room ID.\n pub fn version_3() -> Self {\n Self(InnerRoomVersionId::Version3)\n }\n\n \/\/\/ Creates a version 4 room ID.\n pub fn version_4() -> Self {\n Self(InnerRoomVersionId::Version4)\n }\n\n \/\/\/ Creates a custom room version ID from the given string slice.\n pub fn custom(id: &str) -> Self {\n Self(InnerRoomVersionId::Custom(id.to_string()))\n }\n\n \/\/\/ Whether or not this room version is an official one specified by the Matrix protocol.\n pub fn is_official(&self) -> bool {\n !self.is_custom()\n }\n\n \/\/\/ Whether or not this is a custom room version.\n pub fn is_custom(&self) -> bool {\n match self.0 {\n InnerRoomVersionId::Custom(_) => true,\n _ => false,\n }\n }\n\n \/\/\/ Whether or not this is a version 1 room.\n pub fn is_version_1(&self) -> bool {\n self.0 == InnerRoomVersionId::Version1\n }\n\n \/\/\/ Whether or not this is a version 2 room.\n pub fn is_version_2(&self) -> bool {\n self.0 == InnerRoomVersionId::Version2\n }\n\n \/\/\/ Whether or not this is a version 3 room.\n pub fn is_version_3(&self) -> bool {\n self.0 == InnerRoomVersionId::Version3\n }\n\n \/\/\/ Whether or not this is a version 4 room.\n pub fn is_version_4(&self) -> bool {\n self.0 == InnerRoomVersionId::Version4\n }\n}\n\nimpl Display for RoomVersionId {\n fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n let message = match self.0 {\n InnerRoomVersionId::Version1 => \"1\",\n InnerRoomVersionId::Version2 => \"2\",\n InnerRoomVersionId::Version3 => \"3\",\n InnerRoomVersionId::Version4 => \"4\",\n InnerRoomVersionId::Custom(ref version) => version,\n };\n\n write!(f, \"{}\", message)\n }\n}\n\nimpl Serialize for RoomVersionId {\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n where\n S: Serializer,\n {\n serializer.serialize_str(&self.to_string())\n }\n}\n\nimpl<'de> Deserialize<'de> for RoomVersionId {\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n where\n D: Deserializer<'de>,\n {\n deserializer.deserialize_any(RoomVersionIdVisitor)\n }\n}\n\nimpl<'a> TryFrom<&'a str> for RoomVersionId {\n type Error = Error;\n\n \/\/\/ Attempts to create a new Matrix room version ID from a string representation.\n fn try_from(room_version_id: &'a str) -> Result<Self, Error> {\n let version = match room_version_id {\n \"1\" => Self(InnerRoomVersionId::Version1),\n \"2\" => Self(InnerRoomVersionId::Version2),\n \"3\" => Self(InnerRoomVersionId::Version3),\n \"4\" => Self(InnerRoomVersionId::Version4),\n custom => {\n if custom.is_empty() {\n return Err(Error::MinimumLengthNotSatisfied);\n } else if custom.chars().count() > MAX_CODE_POINTS {\n return Err(Error::MaximumLengthExceeded);\n } else {\n Self(InnerRoomVersionId::Custom(custom.to_string()))\n }\n }\n };\n\n Ok(version)\n }\n}\n\nimpl<'de> Visitor<'de> for RoomVersionIdVisitor {\n type Value = RoomVersionId;\n\n fn expecting(&self, formatter: &mut Formatter<'_>) -> FmtResult {\n write!(formatter, \"a Matrix room version ID as a string\")\n }\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n where\n E: SerdeError,\n {\n match RoomVersionId::try_from(v) {\n Ok(room_id) => Ok(room_id),\n Err(_) => Err(SerdeError::invalid_value(Unexpected::Str(v), &self)),\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::convert::TryFrom;\n\n use serde_json::{from_str, to_string};\n\n use super::RoomVersionId;\n use crate::error::Error;\n\n #[test]\n fn valid_version_1_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"1\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"1\"\n );\n }\n #[test]\n fn valid_version_2_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"2\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"2\"\n );\n }\n #[test]\n fn valid_version_3_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"3\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"3\"\n );\n }\n #[test]\n fn valid_version_4_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"4\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"4\"\n );\n }\n\n #[test]\n fn valid_custom_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"io.ruma.1\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"io.ruma.1\"\n );\n }\n\n #[test]\n fn empty_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"\"),\n Err(Error::MinimumLengthNotSatisfied)\n );\n }\n\n #[test]\n fn over_max_code_point_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"0123456789012345678901234567890123456789\"),\n Err(Error::MaximumLengthExceeded)\n );\n }\n\n #[test]\n fn serialize_official_room_id() {\n assert_eq!(\n to_string(&RoomVersionId::try_from(\"1\").expect(\"Failed to create RoomVersionId.\"))\n .expect(\"Failed to convert RoomVersionId to JSON.\"),\n r#\"\"1\"\"#\n );\n }\n\n #[test]\n fn deserialize_official_room_id() {\n let deserialized =\n from_str::<RoomVersionId>(r#\"\"1\"\"#).expect(\"Failed to convert RoomVersionId to JSON.\");\n\n assert!(deserialized.is_version_1());\n assert!(deserialized.is_official());\n\n assert_eq!(\n deserialized,\n RoomVersionId::try_from(\"1\").expect(\"Failed to create RoomVersionId.\")\n );\n }\n\n #[test]\n fn serialize_custom_room_id() {\n assert_eq!(\n to_string(\n &RoomVersionId::try_from(\"io.ruma.1\").expect(\"Failed to create RoomVersionId.\")\n )\n .expect(\"Failed to convert RoomVersionId to JSON.\"),\n r#\"\"io.ruma.1\"\"#\n );\n }\n\n #[test]\n fn deserialize_custom_room_id() {\n let deserialized = from_str::<RoomVersionId>(r#\"\"io.ruma.1\"\"#)\n .expect(\"Failed to convert RoomVersionId to JSON.\");\n\n assert!(deserialized.is_custom());\n\n assert_eq!(\n deserialized,\n RoomVersionId::try_from(\"io.ruma.1\").expect(\"Failed to create RoomVersionId.\")\n );\n }\n\n #[test]\n fn constructors() {\n assert!(RoomVersionId::version_1().is_version_1());\n assert!(RoomVersionId::version_2().is_version_2());\n assert!(RoomVersionId::version_3().is_version_3());\n assert!(RoomVersionId::version_4().is_version_4());\n assert!(RoomVersionId::custom(\"foo\").is_custom());\n }\n\n #[test]\n fn predicate_methods() {\n let version_1 = RoomVersionId::try_from(\"1\").expect(\"Failed to create RoomVersionId.\");\n let version_2 = RoomVersionId::try_from(\"2\").expect(\"Failed to create RoomVersionId.\");\n let version_3 = RoomVersionId::try_from(\"3\").expect(\"Failed to create RoomVersionId.\");\n let version_4 = RoomVersionId::try_from(\"4\").expect(\"Failed to create RoomVersionId.\");\n let custom = RoomVersionId::try_from(\"io.ruma.1\").expect(\"Failed to create RoomVersionId.\");\n\n assert!(version_1.is_version_1());\n assert!(version_2.is_version_2());\n assert!(version_3.is_version_3());\n assert!(version_4.is_version_4());\n\n assert!(!version_1.is_version_2());\n assert!(!version_1.is_version_3());\n assert!(!version_1.is_version_4());\n\n assert!(version_1.is_official());\n assert!(version_2.is_official());\n assert!(version_3.is_official());\n assert!(version_4.is_official());\n\n assert!(!version_1.is_custom());\n assert!(!version_2.is_custom());\n assert!(!version_3.is_custom());\n assert!(!version_4.is_custom());\n\n assert!(custom.is_custom());\n assert!(!custom.is_official());\n assert!(!custom.is_version_1());\n assert!(!custom.is_version_2());\n assert!(!custom.is_version_3());\n assert!(!custom.is_version_4());\n }\n}\n<commit_msg>Add support for room version 5.<commit_after>\/\/! Matrix room version identifiers.\n\nuse std::{\n convert::TryFrom,\n fmt::{Display, Formatter, Result as FmtResult},\n};\n\n#[cfg(feature = \"diesel\")]\nuse diesel::sql_types::Text;\nuse serde::{\n de::{Error as SerdeError, Unexpected, Visitor},\n Deserialize, Deserializer, Serialize, Serializer,\n};\n\nuse crate::error::Error;\n\n\/\/\/ Room version identifiers cannot be more than 32 code points.\nconst MAX_CODE_POINTS: usize = 32;\n\n\/\/\/ A Matrix room version ID.\n\/\/\/\n\/\/\/ A `RoomVersionId` can be or converted or deserialized from a string slice, and can be converted\n\/\/\/ or serialized back into a string as needed.\n\/\/\/\n\/\/\/ ```\n\/\/\/ # use std::convert::TryFrom;\n\/\/\/ # use ruma_identifiers::RoomVersionId;\n\/\/\/ assert_eq!(RoomVersionId::try_from(\"1\").unwrap().to_string(), \"1\");\n\/\/\/ ```\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\n#[cfg_attr(feature = \"diesel\", derive(FromSqlRow, QueryId, AsExpression, SqlType))]\n#[cfg_attr(feature = \"diesel\", sql_type = \"Text\")]\npub struct RoomVersionId(InnerRoomVersionId);\n\n\/\/\/ Possibile values for room version, distinguishing between official Matrix versions and custom\n\/\/\/ versions.\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\nenum InnerRoomVersionId {\n \/\/\/ A version 1 room.\n Version1,\n\n \/\/\/ A version 2 room.\n Version2,\n\n \/\/\/ A version 3 room.\n Version3,\n\n \/\/\/ A version 4 room.\n Version4,\n\n \/\/\/ A version 5 room.\n Version5,\n\n \/\/\/ A custom room version.\n Custom(String),\n}\n\n\/\/\/ A serde visitor for `RoomVersionId`.\nstruct RoomVersionIdVisitor;\n\nimpl RoomVersionId {\n \/\/\/ Creates a version 1 room ID.\n pub fn version_1() -> Self {\n Self(InnerRoomVersionId::Version1)\n }\n\n \/\/\/ Creates a version 2 room ID.\n pub fn version_2() -> Self {\n Self(InnerRoomVersionId::Version2)\n }\n\n \/\/\/ Creates a version 3 room ID.\n pub fn version_3() -> Self {\n Self(InnerRoomVersionId::Version3)\n }\n\n \/\/\/ Creates a version 4 room ID.\n pub fn version_4() -> Self {\n Self(InnerRoomVersionId::Version4)\n }\n\n \/\/\/ Creates a version 5 room ID.\n pub fn version_5() -> Self {\n Self(InnerRoomVersionId::Version5)\n }\n\n \/\/\/ Creates a custom room version ID from the given string slice.\n pub fn custom(id: &str) -> Self {\n Self(InnerRoomVersionId::Custom(id.to_string()))\n }\n\n \/\/\/ Whether or not this room version is an official one specified by the Matrix protocol.\n pub fn is_official(&self) -> bool {\n !self.is_custom()\n }\n\n \/\/\/ Whether or not this is a custom room version.\n pub fn is_custom(&self) -> bool {\n match self.0 {\n InnerRoomVersionId::Custom(_) => true,\n _ => false,\n }\n }\n\n \/\/\/ Whether or not this is a version 1 room.\n pub fn is_version_1(&self) -> bool {\n self.0 == InnerRoomVersionId::Version1\n }\n\n \/\/\/ Whether or not this is a version 2 room.\n pub fn is_version_2(&self) -> bool {\n self.0 == InnerRoomVersionId::Version2\n }\n\n \/\/\/ Whether or not this is a version 3 room.\n pub fn is_version_3(&self) -> bool {\n self.0 == InnerRoomVersionId::Version3\n }\n\n \/\/\/ Whether or not this is a version 4 room.\n pub fn is_version_4(&self) -> bool {\n self.0 == InnerRoomVersionId::Version4\n }\n\n \/\/\/ Whether or not this is a version 5 room.\n pub fn is_version_5(&self) -> bool {\n self.0 == InnerRoomVersionId::Version5\n }\n}\n\nimpl Display for RoomVersionId {\n fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n let message = match self.0 {\n InnerRoomVersionId::Version1 => \"1\",\n InnerRoomVersionId::Version2 => \"2\",\n InnerRoomVersionId::Version3 => \"3\",\n InnerRoomVersionId::Version4 => \"4\",\n InnerRoomVersionId::Version5 => \"5\",\n InnerRoomVersionId::Custom(ref version) => version,\n };\n\n write!(f, \"{}\", message)\n }\n}\n\nimpl Serialize for RoomVersionId {\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n where\n S: Serializer,\n {\n serializer.serialize_str(&self.to_string())\n }\n}\n\nimpl<'de> Deserialize<'de> for RoomVersionId {\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n where\n D: Deserializer<'de>,\n {\n deserializer.deserialize_any(RoomVersionIdVisitor)\n }\n}\n\nimpl<'a> TryFrom<&'a str> for RoomVersionId {\n type Error = Error;\n\n \/\/\/ Attempts to create a new Matrix room version ID from a string representation.\n fn try_from(room_version_id: &'a str) -> Result<Self, Error> {\n let version = match room_version_id {\n \"1\" => Self(InnerRoomVersionId::Version1),\n \"2\" => Self(InnerRoomVersionId::Version2),\n \"3\" => Self(InnerRoomVersionId::Version3),\n \"4\" => Self(InnerRoomVersionId::Version4),\n \"5\" => Self(InnerRoomVersionId::Version5),\n custom => {\n if custom.is_empty() {\n return Err(Error::MinimumLengthNotSatisfied);\n } else if custom.chars().count() > MAX_CODE_POINTS {\n return Err(Error::MaximumLengthExceeded);\n } else {\n Self(InnerRoomVersionId::Custom(custom.to_string()))\n }\n }\n };\n\n Ok(version)\n }\n}\n\nimpl<'de> Visitor<'de> for RoomVersionIdVisitor {\n type Value = RoomVersionId;\n\n fn expecting(&self, formatter: &mut Formatter<'_>) -> FmtResult {\n write!(formatter, \"a Matrix room version ID as a string\")\n }\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n where\n E: SerdeError,\n {\n match RoomVersionId::try_from(v) {\n Ok(room_id) => Ok(room_id),\n Err(_) => Err(SerdeError::invalid_value(Unexpected::Str(v), &self)),\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::convert::TryFrom;\n\n use serde_json::{from_str, to_string};\n\n use super::RoomVersionId;\n use crate::error::Error;\n\n #[test]\n fn valid_version_1_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"1\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"1\"\n );\n }\n\n #[test]\n fn valid_version_2_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"2\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"2\"\n );\n }\n\n #[test]\n fn valid_version_3_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"3\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"3\"\n );\n }\n\n #[test]\n fn valid_version_4_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"4\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"4\"\n );\n }\n\n #[test]\n fn valid_version_5_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"5\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"5\"\n );\n }\n\n #[test]\n fn valid_custom_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"io.ruma.1\")\n .expect(\"Failed to create RoomVersionId.\")\n .to_string(),\n \"io.ruma.1\"\n );\n }\n\n #[test]\n fn empty_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"\"),\n Err(Error::MinimumLengthNotSatisfied)\n );\n }\n\n #[test]\n fn over_max_code_point_room_version_id() {\n assert_eq!(\n RoomVersionId::try_from(\"0123456789012345678901234567890123456789\"),\n Err(Error::MaximumLengthExceeded)\n );\n }\n\n #[test]\n fn serialize_official_room_id() {\n assert_eq!(\n to_string(&RoomVersionId::try_from(\"1\").expect(\"Failed to create RoomVersionId.\"))\n .expect(\"Failed to convert RoomVersionId to JSON.\"),\n r#\"\"1\"\"#\n );\n }\n\n #[test]\n fn deserialize_official_room_id() {\n let deserialized =\n from_str::<RoomVersionId>(r#\"\"1\"\"#).expect(\"Failed to convert RoomVersionId to JSON.\");\n\n assert!(deserialized.is_version_1());\n assert!(deserialized.is_official());\n\n assert_eq!(\n deserialized,\n RoomVersionId::try_from(\"1\").expect(\"Failed to create RoomVersionId.\")\n );\n }\n\n #[test]\n fn serialize_custom_room_id() {\n assert_eq!(\n to_string(\n &RoomVersionId::try_from(\"io.ruma.1\").expect(\"Failed to create RoomVersionId.\")\n )\n .expect(\"Failed to convert RoomVersionId to JSON.\"),\n r#\"\"io.ruma.1\"\"#\n );\n }\n\n #[test]\n fn deserialize_custom_room_id() {\n let deserialized = from_str::<RoomVersionId>(r#\"\"io.ruma.1\"\"#)\n .expect(\"Failed to convert RoomVersionId to JSON.\");\n\n assert!(deserialized.is_custom());\n\n assert_eq!(\n deserialized,\n RoomVersionId::try_from(\"io.ruma.1\").expect(\"Failed to create RoomVersionId.\")\n );\n }\n\n #[test]\n fn constructors() {\n assert!(RoomVersionId::version_1().is_version_1());\n assert!(RoomVersionId::version_2().is_version_2());\n assert!(RoomVersionId::version_3().is_version_3());\n assert!(RoomVersionId::version_4().is_version_4());\n assert!(RoomVersionId::version_5().is_version_5());\n assert!(RoomVersionId::custom(\"foo\").is_custom());\n }\n\n #[test]\n #[allow(clippy::cognitive_complexity)]\n fn predicate_methods() {\n let version_1 = RoomVersionId::try_from(\"1\").expect(\"Failed to create RoomVersionId.\");\n let version_2 = RoomVersionId::try_from(\"2\").expect(\"Failed to create RoomVersionId.\");\n let version_3 = RoomVersionId::try_from(\"3\").expect(\"Failed to create RoomVersionId.\");\n let version_4 = RoomVersionId::try_from(\"4\").expect(\"Failed to create RoomVersionId.\");\n let version_5 = RoomVersionId::try_from(\"5\").expect(\"Failed to create RoomVersionId.\");\n let custom = RoomVersionId::try_from(\"io.ruma.1\").expect(\"Failed to create RoomVersionId.\");\n\n assert!(version_1.is_version_1());\n assert!(version_2.is_version_2());\n assert!(version_3.is_version_3());\n assert!(version_4.is_version_4());\n assert!(version_5.is_version_5());\n\n assert!(!version_1.is_version_2());\n assert!(!version_1.is_version_3());\n assert!(!version_1.is_version_4());\n assert!(!version_1.is_version_5());\n\n assert!(version_1.is_official());\n assert!(version_2.is_official());\n assert!(version_3.is_official());\n assert!(version_4.is_official());\n assert!(version_5.is_official());\n\n assert!(!version_1.is_custom());\n assert!(!version_2.is_custom());\n assert!(!version_3.is_custom());\n assert!(!version_4.is_custom());\n assert!(!version_5.is_custom());\n\n assert!(custom.is_custom());\n assert!(!custom.is_official());\n assert!(!custom.is_version_1());\n assert!(!custom.is_version_2());\n assert!(!custom.is_version_3());\n assert!(!custom.is_version_4());\n assert!(!custom.is_version_5());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add RedactionEvent.<commit_after><|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::build(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n fn get_file_ids(&self, m: &Module) -> Option<Vec<FileID>> {\n let list = glob(&self.prefix_of_files_for_module(m)[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n debug!(\" - File: {:?}\", path);\n v.push(from_pathbuf(&path));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n pub fn iter_ids(&self, m: &Module) -> Option<IntoIter<FileID>>\n {\n glob(&self.prefix_of_files_for_module(m)[..]).and_then(|globlist| {\n let v = globlist.filter_map(Result::ok)\n .map(|pbuf| from_pathbuf(&pbuf))\n .collect::<Vec<FileID>>()\n .into_iter();\n Ok(v)\n }).ok()\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Option<IntoIter<File<'a>>>\n where HP: FileHeaderParser\n {\n self.iter_ids(m).and_then(|ids| {\n Some(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n .into_iter())\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success reading file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::build(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id : '{}'\", id);\n self.prefix_of_files_for_module(owner) + \"-\" + &id[..] + \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n fn new(action: String,\n desc : String,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: action,\n desc: desc,\n data_dump: data,\n caused_by: None,\n }\n }\n\n fn build(action: &'static str,\n desc: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n data_dump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl<'a> Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::build(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n<commit_msg>Refactor uses of file-id-parsing functions to ensure we honnor the Result type<commit_after>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::build(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n fn get_file_ids(&self, m: &Module) -> Option<Vec<FileID>> {\n let list = glob(&self.prefix_of_files_for_module(m)[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n debug!(\" - File: {:?}\", path);\n if let Ok(id) = from_pathbuf(&path) {\n v.push(id);\n } else {\n error!(\"Cannot parse ID from path: {:?}\", path);\n }\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n pub fn iter_ids(&self, m: &Module) -> Option<IntoIter<FileID>>\n {\n glob(&self.prefix_of_files_for_module(m)[..]).and_then(|globlist| {\n let v = globlist.filter_map(Result::ok)\n .map(|pbuf| from_pathbuf(&pbuf))\n .filter_map(Result::ok)\n .collect::<Vec<FileID>>()\n .into_iter();\n Ok(v)\n }).ok()\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Option<IntoIter<File<'a>>>\n where HP: FileHeaderParser\n {\n self.iter_ids(m).and_then(|ids| {\n Some(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n .into_iter())\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success reading file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::build(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id : '{}'\", id);\n self.prefix_of_files_for_module(owner) + \"-\" + &id[..] + \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n fn new(action: String,\n desc : String,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: action,\n desc: desc,\n data_dump: data,\n caused_by: None,\n }\n }\n\n fn build(action: &'static str,\n desc: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n data_dump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl<'a> Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::build(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reexport Tags<commit_after>use structure;\n\npub mod integer;\npub mod sequence;\npub mod octetstring;\npub mod boolean;\npub mod null;\n\n\/\/ Reexport everything\npub use integer::Integer;\npub use sequence::{Sequence, SequenceOf, SetOf};\npub use octetstring::OctetString;\npub use boolean::Boolean;\npub use null::Null;\n\npub trait ASNTag {\n \/\/\/ Encode yourself into a generic Tag format.\n \/\/\/ \n \/\/\/ The only thing that changes between types is how to encode the value they wrap into bytes,\n \/\/\/ however the encoding of the class and id does not change. By first converting the tag into\n \/\/\/ a more generic tag (with already encoded payload), we don't have to reimplement the\n \/\/\/ encoding step for class & id every time.\n fn into_structure(self) -> structure::StructureTag;\n}\n\n#[derive(Clone, Debug)]\n\/\/\/ This enum does not cover all ASN.1 Types but only the types needed for LDAPv3.\npub enum Tag {\n Integer(integer::Integer),\n Sequence(sequence::Sequence),\n OctetString(octetstring::OctetString),\n Boolean(boolean::Boolean),\n Null(null::Null),\n StructureTag(structure::StructureTag),\n}\n\nimpl ASNTag for Tag {\n fn into_structure(self) -> structure::StructureTag {\n match self {\n Tag::Integer(i) => i.into_structure(),\n Tag::Sequence(i) => i.into_structure(),\n Tag::OctetString(i) => i.into_structure(),\n Tag::Boolean(i) => i.into_structure(),\n Tag::Null(i) => i.into_structure(),\n Tag::StructureTag(s) => s\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![deny(\n dead_code,\n non_camel_case_types,\n non_snake_case,\n path_statements,\n trivial_numeric_casts,\n unstable_features,\n unused_allocation,\n unused_import_braces,\n unused_imports,\n unused_must_use,\n unused_mut,\n unused_qualifications,\n while_true,\n)]\n\n#[macro_use] extern crate log;\nextern crate toml;\nextern crate fs2;\n\nextern crate libimagstore;\nextern crate libimagentrylink;\nextern crate libimagerror;\n\npub mod debug;\npub mod flock;\npub mod linkverify;\n\n<commit_msg>Import libimagerror with macros, so we can have own error types internally<commit_after>#![deny(\n dead_code,\n non_camel_case_types,\n non_snake_case,\n path_statements,\n trivial_numeric_casts,\n unstable_features,\n unused_allocation,\n unused_import_braces,\n unused_imports,\n unused_must_use,\n unused_mut,\n unused_qualifications,\n while_true,\n)]\n\n#[macro_use] extern crate log;\nextern crate toml;\nextern crate fs2;\n\nextern crate libimagstore;\nextern crate libimagentrylink;\n#[macro_use] extern crate libimagerror;\n\npub mod debug;\npub mod flock;\npub mod linkverify;\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add build.rs for codegen module.<commit_after>#![feature(process, path, env)]\nuse std::process::Command;\nuse std::env::current_dir;\n\nfn main() {\n\t\n assert!(\n \tCommand::new(\"clang++\")\n \t\t.args(&[\"tests\/test-ir.cc\", \"-S\", \"-emit-llvm\", \"-O2\", \"-o\", \"tests\/test-ir.ll\"])\n \t.status()\n \t.unwrap()\n \t.success()\n \t);\n\n}<|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ zhttpto.rs\n\/\/\n\/\/ Starting code for PS1\n\/\/ Running on Rust 0.9\n\/\/\n\/\/ Note that this code has serious security risks! You should not run it \n\/\/ on any system with access to sensitive files.\n\/\/ \n\/\/ University of Virginia - cs4414 Spring 2014\n\/\/ Weilin Xu and David Evans\n\/\/ Version 0.3\n\n#[feature(globs)];\nuse std::io::*;\nuse std::io::net::ip::{SocketAddr};\nuse std::{str};\n\nstatic IP: &'static str = \"127.0.0.1\";\nstatic PORT: int = 4414;\n\nfn main() {\n let addr = from_str::<SocketAddr>(format!(\"{:s}:{:d}\", IP, PORT)).unwrap();\n let mut acceptor = net::tcp::TcpListener::bind(addr).listen();\n \n println(format!(\"Listening on [{:s}] ...\", addr.to_str()));\n \n for stream in acceptor.incoming() {\n \/\/ Spawn a task to handle the connection\n do spawn {\n let mut stream = stream;\n \n match stream {\n Some(ref mut s) => {\n match s.peer_name() {\n Some(pn) => {println(format!(\"Received connection from: [{:s}]\", pn.to_str()));},\n None => ()\n }\n },\n None => ()\n }\n \n let mut buf = [0, ..500];\n stream.read(buf);\n let request_str = str::from_utf8(buf);\n println(format!(\"Received request :\\n{:s}\", request_str));\n \n let response: ~str = \n ~\"HTTP\/1.1 200 OK\\r\\nContent-Type: text\/html; charset=UTF-8\\r\\n\\r\\n\n <doctype !html><html><head><title>Hello, Rust!<\/title>\n <style>body { background-color: #111; color: #FFEEAA }\n h1 { font-size:2cm; text-align: center; color: black; text-shadow: 0 0 4mm red}\n h2 { font-size:2cm; text-align: center; color: black; text-shadow: 0 0 4mm green}\n <\/style><\/head>\n <body>\n <h1>Greetings, Krusty!<\/h1>\n <\/body><\/html>\\r\\n\";\n stream.write(response.as_bytes());\n println!(\"Connection terminates.\");\n }\n }\n}\n<commit_msg>Count works, needs to be displayed in HTML for web page<commit_after>\/\/\n\/\/ zhttpto.rs\n\/\/\n\/\/ Starting code for PS1\n\/\/ Running on Rust 0.9\n\/\/\n\/\/ Note that this code has serious security risks! You should not run it \n\/\/ on any system with access to sensitive files.\n\/\/ \n\/\/ University of Virginia - cs4414 Spring 2014\n\/\/ Weilin Xu and David Evans\n\/\/ Version 0.3\n\n#[feature(globs)];\nuse std::io::*;\nuse std::io::net::ip::{SocketAddr};\nuse std::{str};\n\nstatic IP: &'static str = \"127.0.0.1\";\nstatic PORT: int = 4414;\n\nfn main() {\n let addr = from_str::<SocketAddr>(format!(\"{:s}:{:d}\", IP, PORT)).unwrap();\n let mut acceptor = net::tcp::TcpListener::bind(addr).listen();\n let mut visitor_count = 0;\n \n println(format!(\"Listening on [{:s}] ...\", addr.to_str()));\n \n for stream in acceptor.incoming() {\n \/\/ Spawn a task to handle the connection\n\n\tvisitor_count += 1;\n\tlet count = visitor_count;\n println!(\"Visitor count: {}\", visitor_count);\n\n\tdo spawn {\n let mut stream = stream;\n \n match stream {\n Some(ref mut s) => {\n match s.peer_name() {\n Some(pn) => {println(format!(\"Received connection from: [{:s}]\", pn.to_str()));},\n None => ()\n }\n },\n None => ()\n }\n \n let mut buf = [0, ..500];\n stream.read(buf);\n let request_str = str::from_utf8(buf);\n println(format!(\"Received request :\\n{:s}\", request_str));\n \n let response: ~str = \n ~\"HTTP\/1.1 200 OK\\r\\nContent-Type: text\/html; charset=UTF-8\\r\\n\\r\\n\n <doctype !html><html><head><title>Hello, Rust!<\/title>\n <style>body { background-color: #111; color: #FFEEAA }\n h1 { font-size:2cm; text-align: center; color: black; text-shadow: 0 0 4mm red}\n h2 { font-size:2cm; text-align: center; color: black; text-shadow: 0 0 4mm green}\n <\/style><\/head>\n <body>\n <h1>Greetings, Krusty!<\/h1>\n <\/body><\/html>\\r\\n\";\n stream.write(response.as_bytes());\n println!(\"Connection terminates.\");\n\t println!(\"Count: {}\", count);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n#![deny(\n non_camel_case_types,\n non_snake_case,\n path_statements,\n trivial_numeric_casts,\n unstable_features,\n unused_allocation,\n unused_import_braces,\n unused_imports,\n unused_must_use,\n unused_mut,\n unused_qualifications,\n while_true,\n)]\n\n#[macro_use] extern crate log;\nextern crate clap;\n#[macro_use] extern crate semver;\nextern crate toml;\nextern crate url;\n#[macro_use] extern crate version;\n\nextern crate libimagentrylink;\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagerror;\nextern crate libimagutil;\n\nuse std::ops::Deref;\n\nuse libimagrt::runtime::Runtime;\nuse libimagrt::setup::generate_runtime_setup;\nuse libimagstore::error::StoreError;\nuse libimagstore::store::Entry;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagerror::trace::{MapErrTrace, trace_error, trace_error_exit};\nuse libimagentrylink::external::ExternalLinker;\nuse libimagutil::warn_result::*;\nuse libimagutil::warn_exit::warn_exit;\nuse libimagutil::info_result::*;\nuse clap::ArgMatches;\nuse url::Url;\n\nmod ui;\n\nuse ui::build_ui;\n\nfn main() {\n let rt = generate_runtime_setup(\"imag-link\",\n &version!()[..],\n \"Link entries\",\n build_ui);\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n match name {\n \"internal\" => handle_internal_linking(&rt),\n \"external\" => handle_external_linking(&rt),\n _ => warn_exit(\"No commandline call\", 1)\n }\n });\n}\n\nfn handle_internal_linking(rt: &Runtime) {\n use libimagentrylink::internal::InternalLinker;\n\n debug!(\"Handle internal linking call\");\n let cmd = rt.cli().subcommand_matches(\"internal\").unwrap();\n\n if cmd.is_present(\"list\") {\n debug!(\"List...\");\n for entry in cmd.value_of(\"list\").unwrap().split(',') {\n debug!(\"Listing for '{}'\", entry);\n match get_entry_by_name(rt, entry) {\n Ok(Some(e)) => {\n e.get_internal_links()\n .map(|links| {\n let i = links\n .filter_map(|l| {\n l.to_str()\n .map_warn_err(|e| format!(\"Failed to convert StoreId to string: {:?}\", e))\n .ok()\n })\n .enumerate();\n\n for (i, link) in i {\n println!(\"{: <3}: {}\", i, link);\n }\n })\n .map_err_trace()\n .ok();\n },\n\n Ok(None) => {\n warn!(\"Entry not found: {:?}\", entry);\n break;\n }\n\n Err(e) => {\n trace_error(&e);\n break;\n },\n }\n }\n debug!(\"Listing ready!\");\n } else {\n let mut from = {\n let from = get_from_entry(&rt);\n if from.is_none() {\n warn_exit(\"No 'from' entry\", 1);\n }\n from.unwrap()\n };\n debug!(\"Link from = {:?}\", from.deref());\n\n let to = {\n let to = get_to_entries(&rt);\n if to.is_none() {\n warn_exit(\"No 'to' entry\", 1);\n }\n to.unwrap()\n };\n debug!(\"Link to = {:?}\", to.iter().map(|f| f.deref()).collect::<Vec<&Entry>>());\n\n match cmd.subcommand_name() {\n Some(\"add\") => {\n for mut to_entry in to {\n if let Err(e) = to_entry.add_internal_link(&mut from) {\n trace_error_exit(&e, 1);\n }\n }\n },\n\n Some(\"remove\") => {\n for mut to_entry in to {\n if let Err(e) = to_entry.remove_internal_link(&mut from) {\n trace_error_exit(&e, 1);\n }\n }\n },\n\n _ => unreachable!(),\n };\n }\n}\n\nfn get_from_entry<'a>(rt: &'a Runtime) -> Option<FileLockEntry<'a>> {\n rt.cli()\n .subcommand_matches(\"internal\")\n .unwrap() \/\/ safe, we know there is an \"internal\" subcommand\"\n .subcommand_matches(\"add\")\n .unwrap() \/\/ safe, we know there is an \"add\" subcommand\n .value_of(\"from\")\n .and_then(|from_name| {\n match get_entry_by_name(rt, from_name) {\n Err(e) => {\n debug!(\"We couldn't get the entry from name: '{:?}'\", from_name);\n trace_error(&e); None\n },\n Ok(Some(e)) => Some(e),\n Ok(None) => None,\n }\n\n })\n}\n\nfn get_to_entries<'a>(rt: &'a Runtime) -> Option<Vec<FileLockEntry<'a>>> {\n rt.cli()\n .subcommand_matches(\"internal\")\n .unwrap() \/\/ safe, we know there is an \"internal\" subcommand\"\n .subcommand_matches(\"add\")\n .unwrap() \/\/ safe, we know there is an \"add\" subcommand\n .values_of(\"to\")\n .map(|values| {\n let mut v = vec![];\n for entry in values.map(|v| get_entry_by_name(rt, v)) {\n match entry {\n Err(e) => trace_error(&e),\n Ok(Some(e)) => v.push(e),\n Ok(None) => warn!(\"Entry not found: {:?}\", v),\n }\n }\n v\n })\n}\n\nfn get_entry_by_name<'a>(rt: &'a Runtime, name: &str) -> Result<Option<FileLockEntry<'a>>, StoreError> {\n use std::path::PathBuf;\n use libimagstore::storeid::StoreId;\n\n StoreId::new(Some(rt.store().path().clone()), PathBuf::from(name))\n .and_then(|id| rt.store().get(id))\n}\n\nfn handle_external_linking(rt: &Runtime) {\n let scmd = rt.cli().subcommand_matches(\"external\").unwrap();\n let entry_name = scmd.value_of(\"id\").unwrap(); \/\/ enforced by clap\n let entry = get_entry_by_name(rt, entry_name);\n if entry.is_err() {\n trace_error_exit(&entry.unwrap_err(), 1);\n }\n let entry = entry.unwrap();\n if entry.is_none() {\n warn!(\"Entry not found: {:?}\", entry_name);\n return;\n }\n let mut entry = entry.unwrap();\n\n if scmd.is_present(\"add\") {\n debug!(\"Adding link to entry!\");\n add_link_to_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"remove\") {\n debug!(\"Removing link from entry!\");\n remove_link_from_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"set\") {\n debug!(\"Setting links in entry!\");\n set_links_for_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"list\") {\n debug!(\"Listing links in entry!\");\n list_links_for_entry(rt.store(), &mut entry);\n return;\n }\n\n panic!(\"Clap failed to enforce one of 'add', 'remove', 'set' or 'list'\");\n}\n\nfn add_link_to_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n Url::parse(matches.value_of(\"add\").unwrap())\n .map_err_trace_exit(1)\n .map(|link| entry.add_external_link(store, link).map_err_trace().map_info_str(\"Ok\"))\n .ok();\n}\n\nfn remove_link_from_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n Url::parse(matches.value_of(\"remove\").unwrap())\n .map_err_trace_exit(1)\n .map(|link| entry.remove_external_link(store, link).map_err_trace().map_info_str(\"Ok\"))\n .ok();\n}\n\nfn set_links_for_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n let links = matches\n .value_of(\"links\")\n .map(String::from)\n .unwrap()\n .split(',')\n .map(|uri| {\n match Url::parse(uri) {\n Err(e) => {\n warn!(\"Could not parse '{}' as URL, ignoring\", uri);\n trace_error(&e);\n None\n },\n Ok(u) => Some(u),\n }\n })\n .filter_map(|x| x)\n .collect();\n\n entry.set_external_links(store, links)\n .map_err_trace()\n .map_info_str(\"Ok\")\n .ok();\n}\n\nfn list_links_for_entry(store: &Store, entry: &mut FileLockEntry) {\n entry.get_external_links(store)\n .and_then(|links| {\n for (i, link) in links.enumerate() {\n match link {\n Ok(link) => println!(\"{: <3}: {}\", i, link),\n Err(e) => trace_error(&e),\n }\n }\n Ok(())\n })\n .map_err_trace()\n .map_info_str(\"Ok\")\n .ok();\n}\n\n<commit_msg>Rewrite internal linking handling to use less unwrap()<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n#![deny(\n non_camel_case_types,\n non_snake_case,\n path_statements,\n trivial_numeric_casts,\n unstable_features,\n unused_allocation,\n unused_import_braces,\n unused_imports,\n unused_must_use,\n unused_mut,\n unused_qualifications,\n while_true,\n)]\n\n#[macro_use] extern crate log;\nextern crate clap;\n#[macro_use] extern crate semver;\nextern crate toml;\nextern crate url;\n#[macro_use] extern crate version;\n\nextern crate libimagentrylink;\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagerror;\nextern crate libimagutil;\n\nuse std::ops::Deref;\n\nuse libimagrt::runtime::Runtime;\nuse libimagrt::setup::generate_runtime_setup;\nuse libimagstore::error::StoreError;\nuse libimagstore::store::Entry;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagerror::trace::{MapErrTrace, trace_error, trace_error_exit};\nuse libimagentrylink::external::ExternalLinker;\nuse libimagutil::warn_result::*;\nuse libimagutil::warn_exit::warn_exit;\nuse libimagutil::info_result::*;\nuse clap::ArgMatches;\nuse url::Url;\n\nmod ui;\n\nuse ui::build_ui;\n\nfn main() {\n let rt = generate_runtime_setup(\"imag-link\",\n &version!()[..],\n \"Link entries\",\n build_ui);\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n match name {\n \"internal\" => handle_internal_linking(&rt),\n \"external\" => handle_external_linking(&rt),\n _ => warn_exit(\"No commandline call\", 1)\n }\n });\n}\n\nfn handle_internal_linking(rt: &Runtime) {\n use libimagentrylink::internal::InternalLinker;\n\n debug!(\"Handle internal linking call\");\n let cmd = rt.cli().subcommand_matches(\"internal\").unwrap();\n\n match cmd.value_of(\"list\") {\n Some(list) => {\n debug!(\"List...\");\n for entry in list.split(',') {\n debug!(\"Listing for '{}'\", entry);\n match get_entry_by_name(rt, entry) {\n Ok(Some(e)) => {\n e.get_internal_links()\n .map(|links| {\n let i = links\n .filter_map(|l| {\n l.to_str()\n .map_warn_err(|e| format!(\"Failed to convert StoreId to string: {:?}\", e))\n .ok()\n })\n .enumerate();\n\n for (i, link) in i {\n println!(\"{: <3}: {}\", i, link);\n }\n })\n .map_err_trace()\n .ok();\n },\n\n Ok(None) => {\n warn!(\"Entry not found: {:?}\", entry);\n break;\n }\n\n Err(e) => {\n trace_error(&e);\n break;\n },\n }\n }\n debug!(\"Listing ready!\");\n },\n None => {\n let mut from = match get_from_entry(&rt) {\n None => warn_exit(\"No 'from' entry\", 1),\n Some(s) => s,\n };\n debug!(\"Link from = {:?}\", from.deref());\n\n let to = match get_to_entries(&rt) {\n None => warn_exit(\"No 'to' entry\", 1),\n Some(to) => to,\n };\n debug!(\"Link to = {:?}\", to.iter().map(|f| f.deref()).collect::<Vec<&Entry>>());\n\n match cmd.subcommand_name() {\n Some(\"add\") => {\n for mut to_entry in to {\n if let Err(e) = to_entry.add_internal_link(&mut from) {\n trace_error_exit(&e, 1);\n }\n }\n },\n\n Some(\"remove\") => {\n for mut to_entry in to {\n if let Err(e) = to_entry.remove_internal_link(&mut from) {\n trace_error_exit(&e, 1);\n }\n }\n },\n\n _ => unreachable!(),\n };\n }\n }\n}\n\nfn get_from_entry<'a>(rt: &'a Runtime) -> Option<FileLockEntry<'a>> {\n rt.cli()\n .subcommand_matches(\"internal\")\n .unwrap() \/\/ safe, we know there is an \"internal\" subcommand\"\n .subcommand_matches(\"add\")\n .unwrap() \/\/ safe, we know there is an \"add\" subcommand\n .value_of(\"from\")\n .and_then(|from_name| {\n match get_entry_by_name(rt, from_name) {\n Err(e) => {\n debug!(\"We couldn't get the entry from name: '{:?}'\", from_name);\n trace_error(&e); None\n },\n Ok(Some(e)) => Some(e),\n Ok(None) => None,\n }\n\n })\n}\n\nfn get_to_entries<'a>(rt: &'a Runtime) -> Option<Vec<FileLockEntry<'a>>> {\n rt.cli()\n .subcommand_matches(\"internal\")\n .unwrap() \/\/ safe, we know there is an \"internal\" subcommand\"\n .subcommand_matches(\"add\")\n .unwrap() \/\/ safe, we know there is an \"add\" subcommand\n .values_of(\"to\")\n .map(|values| {\n let mut v = vec![];\n for entry in values.map(|v| get_entry_by_name(rt, v)) {\n match entry {\n Err(e) => trace_error(&e),\n Ok(Some(e)) => v.push(e),\n Ok(None) => warn!(\"Entry not found: {:?}\", v),\n }\n }\n v\n })\n}\n\nfn get_entry_by_name<'a>(rt: &'a Runtime, name: &str) -> Result<Option<FileLockEntry<'a>>, StoreError> {\n use std::path::PathBuf;\n use libimagstore::storeid::StoreId;\n\n StoreId::new(Some(rt.store().path().clone()), PathBuf::from(name))\n .and_then(|id| rt.store().get(id))\n}\n\nfn handle_external_linking(rt: &Runtime) {\n let scmd = rt.cli().subcommand_matches(\"external\").unwrap();\n let entry_name = scmd.value_of(\"id\").unwrap(); \/\/ enforced by clap\n let entry = get_entry_by_name(rt, entry_name);\n if entry.is_err() {\n trace_error_exit(&entry.unwrap_err(), 1);\n }\n let entry = entry.unwrap();\n if entry.is_none() {\n warn!(\"Entry not found: {:?}\", entry_name);\n return;\n }\n let mut entry = entry.unwrap();\n\n if scmd.is_present(\"add\") {\n debug!(\"Adding link to entry!\");\n add_link_to_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"remove\") {\n debug!(\"Removing link from entry!\");\n remove_link_from_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"set\") {\n debug!(\"Setting links in entry!\");\n set_links_for_entry(rt.store(), scmd, &mut entry);\n return;\n }\n\n if scmd.is_present(\"list\") {\n debug!(\"Listing links in entry!\");\n list_links_for_entry(rt.store(), &mut entry);\n return;\n }\n\n panic!(\"Clap failed to enforce one of 'add', 'remove', 'set' or 'list'\");\n}\n\nfn add_link_to_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n Url::parse(matches.value_of(\"add\").unwrap())\n .map_err_trace_exit(1)\n .map(|link| entry.add_external_link(store, link).map_err_trace().map_info_str(\"Ok\"))\n .ok();\n}\n\nfn remove_link_from_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n Url::parse(matches.value_of(\"remove\").unwrap())\n .map_err_trace_exit(1)\n .map(|link| entry.remove_external_link(store, link).map_err_trace().map_info_str(\"Ok\"))\n .ok();\n}\n\nfn set_links_for_entry(store: &Store, matches: &ArgMatches, entry: &mut FileLockEntry) {\n let links = matches\n .value_of(\"links\")\n .map(String::from)\n .unwrap()\n .split(',')\n .map(|uri| {\n match Url::parse(uri) {\n Err(e) => {\n warn!(\"Could not parse '{}' as URL, ignoring\", uri);\n trace_error(&e);\n None\n },\n Ok(u) => Some(u),\n }\n })\n .filter_map(|x| x)\n .collect();\n\n entry.set_external_links(store, links)\n .map_err_trace()\n .map_info_str(\"Ok\")\n .ok();\n}\n\nfn list_links_for_entry(store: &Store, entry: &mut FileLockEntry) {\n entry.get_external_links(store)\n .and_then(|links| {\n for (i, link) in links.enumerate() {\n match link {\n Ok(link) => println!(\"{: <3}: {}\", i, link),\n Err(e) => trace_error(&e),\n }\n }\n Ok(())\n })\n .map_err_trace()\n .map_info_str(\"Ok\")\n .ok();\n}\n\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cmp::{min, max};\n\n\/\/\/ ARP scheme\npub mod arp;\n\/\/\/ Context scheme\npub mod context;\n\/\/\/ Debug scheme\npub mod debug;\n\/\/\/ Display Scheme\npub mod display;\n\/\/\/ Ethernet scheme\npub mod ethernet;\n\/\/\/ File scheme\npub mod file;\n\/\/\/ IP scheme\npub mod ip;\n\/\/\/ Memory scheme\npub mod memory;\n\/\/\/ Pseudo random generation scheme\npub mod random;\n\/\/\/ Time scheme\npub mod time;\n\/\/\/ Window scheme\npub mod window;\n\n#[allow(unused_variables)]\npub trait KScheme {\n fn on_irq(&mut self, irq: u8) {\n\n }\n\n fn on_poll(&mut self) {\n\n }\n\n fn scheme(&self) -> &str {\n \"\"\n }\n\n fn open(&mut self, url: &URL) -> Option<Box<Resource>> {\n None\n }\n}\n\n\/\/\/ Resource seek\npub enum ResourceSeek {\n \/\/\/ Start point\n Start(usize),\n \/\/\/ Current point\n Current(isize),\n \/\/\/ End point\n End(isize),\n}\n\n\/\/\/ A system resource\n#[allow(unused_variables)]\npub trait Resource {\n \/\/\/ Duplicate the resource\n fn dup(&self) -> Option<Box<Resource>> {\n None\n }\n \/\/\/ Return the url of this resource\n fn url(&self) -> URL;\n \/\/ TODO: Make use of Write and Read trait\n \/\/\/ Read data to buffer\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n None\n }\n \/\/\/ Write to resource\n fn write(&mut self, buf: &[u8]) -> Option<usize> {\n None\n }\n \/\/\/ Seek\n fn seek(&mut self, pos: ResourceSeek) -> Option<usize> {\n None\n }\n \/\/\/ Sync the resource\n fn sync(&mut self) -> bool {\n false\n }\n\n \/\/Helper functions\n fn read_to_end(&mut self, vec: &mut Vec<u8>) -> Option<usize> {\n let mut read = 0;\n loop {\n let mut bytes = [0; 1024];\n match self.read(&mut bytes) {\n Some(0) => return Some(read),\n None => return None,\n Some(count) => {\n for i in 0..count {\n vec.push(bytes[i]);\n }\n read += count;\n }\n }\n }\n }\n}\n\n\/\/\/ An URL, see wiki\npub struct URL {\n pub string: String,\n}\n\nimpl URL {\n \/\/\/ Create a new empty URL\n pub fn new() -> Self {\n URL { string: String::new() }\n }\n\n \/\/\/ Create an URL from a string literal\n pub fn from_str(url_str: &'static str) -> Self {\n return URL::from_string(&url_str.to_string());\n }\n\n \/\/\/ Create an URL from `String`\n pub fn from_string(url_string: &String) -> Self {\n URL { string: url_string.clone() }\n }\n\n \/\/\/ Convert to string\n pub fn to_string(&self) -> String {\n self.string.clone()\n }\n\n \/\/\/ Get the length of this URL\n pub fn len(&self) -> usize {\n self.string.len()\n }\n\n \/\/\/ Open this URL (returns a resource)\n pub fn open(&self) -> Option<Box<Resource>> {\n unsafe {\n return (*::session_ptr).open(&self);\n }\n }\n\n \/\/\/ Return the scheme of this url\n pub fn scheme(&self) -> &str {\n let mut l = 0;\n\n for c in self.string.chars() {\n\n if c == ':' {\n break;\n }\n\n l += 1;\n\n }\n\n &self.string[..l]\n }\n\n \/\/\/ Get the reference (after the ':') of the url\n pub fn reference(&self) -> &str {\n let mut l = 1;\n\n for c in self.string.chars() {\n l += 1;\n\n if c == ':' {\n break;\n }\n\n }\n\n &self.string[l..]\n }\n\n}\n\nimpl Clone for URL {\n fn clone(&self) -> Self {\n URL { string: self.string.clone() }\n }\n}\n\n\/\/\/ A vector resource\npub struct VecResource {\n url: URL,\n vec: Vec<u8>,\n seek: usize,\n}\n\nimpl VecResource {\n pub fn new(url: URL, vec: Vec<u8>) -> Self {\n VecResource {\n url: url,\n vec: vec,\n seek: 0,\n }\n }\n\n pub fn inner(&self) -> &Vec<u8> {\n return &self.vec;\n }\n}\n\nimpl Resource for VecResource {\n fn dup(&self) -> Option<Box<Resource>> {\n Some(box VecResource {\n url: self.url.clone(),\n vec: self.vec.clone(),\n seek: self.seek,\n })\n }\n\n fn url(&self) -> URL {\n return self.url.clone();\n }\n\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n let mut i = 0;\n while i < buf.len() && self.seek < self.vec.len() {\n match self.vec.get(self.seek) {\n Some(b) => buf[i] = *b,\n None => (),\n }\n self.seek += 1;\n i += 1;\n }\n return Some(i);\n }\n\n fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let mut i = 0;\n while i < buf.len() && self.seek < self.vec.len() {\n self.vec[self.seek] = buf[i];\n self.seek += 1;\n i += 1;\n }\n while i < buf.len() {\n self.vec.push(buf[i]);\n self.seek += 1;\n i += 1;\n }\n return Some(i);\n }\n\n fn seek(&mut self, pos: ResourceSeek) -> Option<usize> {\n match pos {\n ResourceSeek::Start(offset) => self.seek = min(self.vec.len(), offset),\n ResourceSeek::Current(offset) =>\n self.seek = max(0, min(self.seek as isize, self.seek as isize + offset)) as usize,\n ResourceSeek::End(offset) =>\n self.seek =\n max(0, min(self.seek as isize, self.vec.len() as isize + offset)) as usize,\n }\n return Some(self.seek);\n }\n\n fn sync(&mut self) -> bool {\n return true;\n }\n}\n<commit_msg>Minor refactor and prevention of possible system panic<commit_after>use alloc::boxed::Box;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cmp::{min, max};\n\n\/\/\/ ARP scheme\npub mod arp;\n\/\/\/ Context scheme\npub mod context;\n\/\/\/ Debug scheme\npub mod debug;\n\/\/\/ Display Scheme\npub mod display;\n\/\/\/ Ethernet scheme\npub mod ethernet;\n\/\/\/ File scheme\npub mod file;\n\/\/\/ IP scheme\npub mod ip;\n\/\/\/ Memory scheme\npub mod memory;\n\/\/\/ Pseudo random generation scheme\npub mod random;\n\/\/\/ Time scheme\npub mod time;\n\/\/\/ Window scheme\npub mod window;\n\n#[allow(unused_variables)]\npub trait KScheme {\n fn on_irq(&mut self, irq: u8) {\n\n }\n\n fn on_poll(&mut self) {\n\n }\n\n fn scheme(&self) -> &str {\n \"\"\n }\n\n fn open(&mut self, url: &URL) -> Option<Box<Resource>> {\n None\n }\n}\n\n\/\/\/ Resource seek\npub enum ResourceSeek {\n \/\/\/ Start point\n Start(usize),\n \/\/\/ Current point\n Current(isize),\n \/\/\/ End point\n End(isize),\n}\n\n\/\/\/ A system resource\n#[allow(unused_variables)]\npub trait Resource {\n \/\/\/ Duplicate the resource\n fn dup(&self) -> Option<Box<Resource>> {\n None\n }\n \/\/\/ Return the url of this resource\n fn url(&self) -> URL;\n \/\/ TODO: Make use of Write and Read trait\n \/\/\/ Read data to buffer\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n None\n }\n \/\/\/ Write to resource\n fn write(&mut self, buf: &[u8]) -> Option<usize> {\n None\n }\n \/\/\/ Seek\n fn seek(&mut self, pos: ResourceSeek) -> Option<usize> {\n None\n }\n \/\/\/ Sync the resource\n fn sync(&mut self) -> bool {\n false\n }\n\n \/\/Helper functions\n fn read_to_end(&mut self, vec: &mut Vec<u8>) -> Option<usize> {\n let mut read = 0;\n loop {\n let mut bytes = [0; 1024];\n match self.read(&mut bytes) {\n Some(0) => return Some(read),\n None => return None,\n Some(count) => {\n vec.push_all(&bytes[0..count]);\n read += count;\n }\n }\n }\n }\n}\n\n\/\/\/ An URL, see wiki\npub struct URL {\n pub string: String,\n}\n\nimpl URL {\n \/\/\/ Create a new empty URL\n pub fn new() -> Self {\n URL { string: String::new() }\n }\n\n \/\/\/ Create an URL from a string literal\n pub fn from_str(url_str: &'static str) -> Self {\n return URL::from_string(&url_str.to_string());\n }\n\n \/\/\/ Create an URL from `String`\n pub fn from_string(url_string: &String) -> Self {\n URL { string: url_string.clone() }\n }\n\n \/\/\/ Convert to string\n pub fn to_string(&self) -> String {\n self.string.clone()\n }\n\n \/\/\/ Get the length of this URL\n pub fn len(&self) -> usize {\n self.string.len()\n }\n\n \/\/\/ Open this URL (returns a resource)\n pub fn open(&self) -> Option<Box<Resource>> {\n unsafe {\n return (*::session_ptr).open(&self);\n }\n }\n\n \/\/\/ Return the scheme of this url\n pub fn scheme(&self) -> &str {\n &self.string[..self.string.find(':').unwrap_or(self.string.len())]\n }\n\n \/\/\/ Get the reference (after the ':') of the url\n pub fn reference(&self) -> &str {\n &self.string[(1 + self.string.find(':').unwrap_or(self.string.len()))..]\n }\n\n}\n\nimpl Clone for URL {\n fn clone(&self) -> Self {\n URL { string: self.string.clone() }\n }\n}\n\n\/\/\/ A vector resource\npub struct VecResource {\n url: URL,\n vec: Vec<u8>,\n seek: usize,\n}\n\nimpl VecResource {\n pub fn new(url: URL, vec: Vec<u8>) -> Self {\n VecResource {\n url: url,\n vec: vec,\n seek: 0,\n }\n }\n\n pub fn inner(&self) -> &Vec<u8> {\n return &self.vec;\n }\n}\n\nimpl Resource for VecResource {\n fn dup(&self) -> Option<Box<Resource>> {\n Some(box VecResource {\n url: self.url.clone(),\n vec: self.vec.clone(),\n seek: self.seek,\n })\n }\n\n fn url(&self) -> URL {\n return self.url.clone();\n }\n\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n let mut i = 0;\n while i < buf.len() && self.seek < self.vec.len() {\n match self.vec.get(self.seek) {\n Some(b) => buf[i] = *b,\n None => (),\n }\n self.seek += 1;\n i += 1;\n }\n return Some(i);\n }\n\n fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let mut i = 0;\n while i < buf.len() && self.seek < self.vec.len() {\n self.vec[self.seek] = buf[i];\n self.seek += 1;\n i += 1;\n }\n while i < buf.len() {\n self.vec.push(buf[i]);\n self.seek += 1;\n i += 1;\n }\n return Some(i);\n }\n\n fn seek(&mut self, pos: ResourceSeek) -> Option<usize> {\n match pos {\n ResourceSeek::Start(offset) => self.seek = min(self.vec.len(), offset),\n ResourceSeek::Current(offset) =>\n self.seek = max(0, min(self.seek as isize, self.seek as isize + offset)) as usize,\n ResourceSeek::End(offset) =>\n self.seek =\n max(0, min(self.seek as isize, self.vec.len() as isize + offset)) as usize,\n }\n return Some(self.seek);\n }\n\n fn sync(&mut self) -> bool {\n return true;\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add from_bytes for decoding all routing commands<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement angular impulse response<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add src\/output\/stream_writers\/output_stream_writer_html.rs<commit_after>\nuse std::io;\n\nuse parser::ast::*;\nuse processing::structs::*;\nuse scope::context::*;\nuse scope::bindings::*;\nuse output::stream_writers::output_writer::*;\nuse output::stream_writers::output_stream_writer::*;\n\n\n#[derive(Debug, Clone, Default)]\npub struct ElementOpsStreamWriterHtml {}\n\nimpl ElementOpsStreamWriter for ElementOpsStreamWriterHtml {\n fn write_op_element_open<PropIter, EventIter, BindingIter>(&mut self, w: &mut io::Write, ctx: &mut Context, bindings: &BindingContext, element_tag: &str, element_key: &str, is_void: bool, props: PropIter, events: EventIter, binding: BindingIter) -> Result\n where PropIter : IntoIterator<Item = Prop>, EventIter: IntoIterator<Item = EventHandler>, BindingIter: IntoIterator<Item = ElementValueBinding>\n {\n Ok(())\n }\n\n fn write_op_element_close(&mut self, w: &mut io::Write, ctx: &mut Context, bindings: &BindingContext, element_tag: &str, element_key: &str) -> Result {\n Ok(())\n }\n\n fn write_op_element_start_block<PropIter: IntoIterator<Item = Prop>>(&mut self, w: &mut io::Write, ctx: &mut Context, bindings: &BindingContext, block_id: &str, props: PropIter) -> Result {\n Ok(())\n }\n\n fn write_op_element_end_block(&mut self, w: &mut io::Write, ctx: &mut Context, bindings: &BindingContext, block_id: &str) -> Result {\n Ok(())\n }\n\n fn write_op_element_map_collection_to_block(&mut self, w: &mut io::Write, ctx: &mut Context, bindings: &BindingContext, coll_expr: &ExprValue, block_id: &str) -> Result {\n Ok(())\n }\n\n fn write_op_element_instance_component<PropIter, EventIter, BindingIter>(&mut self, w: &mut io::Write, ctx: &mut Context, bindings: &BindingContext, element_tag: &str, element_key: &str, is_void: bool, props: PropIter, events: EventIter, binding: BindingIter) -> Result\n where PropIter : IntoIterator<Item = Prop>, EventIter: IntoIterator<Item = EventHandler>, BindingIter: IntoIterator<Item = ElementValueBinding>\n {\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add benchmarks<commit_after>#![feature(plugin, test)]\n#![plugin(phf_macros)]\n\nextern crate test;\nextern crate phf;\n\nmod map {\n use std::collections::{BTreeMap, HashMap};\n use test::Bencher;\n\n use phf;\n\n static MAP: phf::Map<&'static str, isize> = phf_map!(\n \"apple\" => 0,\n \"banana\" => 1,\n \"carrot\" => 2,\n \"doughnut\" => 3,\n \"eggplant\" => 4,\n \"frankincene\" => 5,\n \"grapes\" => 6,\n \"haggis\" => 7,\n \"ice cream\" => 8,\n \"jelly beans\" => 9,\n \"kaffir lime leaves\" => 10,\n \"lemonade\" => 11,\n \"mashmallows\" => 12,\n \"nectarines\" => 13,\n \"oranges\" => 14,\n \"pineapples\" => 15,\n \"quinoa\" => 16,\n \"rosemary\" => 17,\n \"sourdough\" => 18,\n \"tomatoes\" => 19,\n \"unleavened bread\" => 20,\n \"vanilla\" => 21,\n \"watermelon\" => 22,\n \"xinomavro grapes\" => 23,\n \"yogurt\" => 24,\n \"zucchini\" => 25,\n );\n\n fn match_get(key: &str) -> Option<usize> {\n match key {\n \"apple\" => Some(0),\n \"banana\" => Some(1),\n \"carrot\" => Some(2),\n \"doughnut\" => Some(3),\n \"eggplant\" => Some(4),\n \"frankincene\" => Some(5),\n \"grapes\" => Some(6),\n \"haggis\" => Some(7),\n \"ice cream\" => Some(8),\n \"jelly beans\" => Some(9),\n \"kaffir lime leaves\" => Some(10),\n \"lemonade\" => Some(11),\n \"mashmallows\" => Some(12),\n \"nectarines\" => Some(13),\n \"oranges\" => Some(14),\n \"pineapples\" => Some(15),\n \"quinoa\" => Some(16),\n \"rosemary\" => Some(17),\n \"sourdough\" => Some(18),\n \"tomatoes\" => Some(19),\n \"unleavened bread\" => Some(20),\n \"vanilla\" => Some(21),\n \"watermelon\" => Some(22),\n \"xinomavro grapes\" => Some(23),\n \"yogurt\" => Some(24),\n \"zucchini\" => Some(25),\n _ => None\n }\n }\n\n #[bench]\n fn bench_match_some(b: &mut Bencher) {\n b.iter(|| {\n assert_eq!(match_get(\"zucchini\").unwrap(), 25);\n })\n }\n\n #[bench]\n fn bench_match_none(b: &mut Bencher) {\n b.iter(|| {\n assert_eq!(match_get(\"potato\"), None);\n })\n }\n\n #[bench]\n fn bench_btreemap_some(b: &mut Bencher) {\n let mut map = BTreeMap::new();\n for (key, value) in MAP.entries() {\n map.insert(*key, *value);\n }\n\n b.iter(|| {\n assert_eq!(map.get(\"zucchini\").unwrap(), &25);\n })\n }\n\n #[bench]\n fn bench_hashmap_some(b: &mut Bencher) {\n let mut map = HashMap::new();\n for (key, value) in MAP.entries() {\n map.insert(*key, *value);\n }\n\n b.iter(|| {\n assert_eq!(map.get(\"zucchini\").unwrap(), &25);\n })\n }\n\n #[bench]\n fn bench_phf_some(b: &mut Bencher) {\n b.iter(|| {\n assert_eq!(MAP.get(\"zucchini\").unwrap(), &25);\n })\n }\n\n #[bench]\n fn bench_btreemap_none(b: &mut Bencher) {\n let mut map = BTreeMap::new();\n for (key, value) in MAP.entries() {\n map.insert(*key, *value);\n }\n\n b.iter(|| {\n assert_eq!(map.get(\"potato\"), None);\n })\n }\n\n\n #[bench]\n fn bench_hashmap_none(b: &mut Bencher) {\n let mut map = BTreeMap::new();\n for (key, value) in MAP.entries() {\n map.insert(*key, *value);\n }\n\n b.iter(|| {\n assert_eq!(map.get(\"potato\"), None);\n })\n }\n\n #[bench]\n fn bench_phf_none(b: &mut Bencher) {\n b.iter(|| {\n assert_eq!(MAP.get(\"potato\"), None);\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before>export DisplayListBuilder;\n\nuse au = gfx::geometry;\nuse au::Au;\nuse newcss::values::{BgColor, BgColorTransparent, Specified};\nuse dom::node::{Text, NodeScope};\nuse dom::cow::Scope;\nuse dvec::DVec;\nuse either::{Left, Right};\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse gfx::display_list::DisplayList;\nuse layout::box::{RenderBox, TextBox};\nuse layout::context::LayoutContext;\nuse layout::flow::FlowContext;\nuse layout::text::TextBoxData;\nuse servo_text::text_run::TextRun;\nuse util::tree;\nuse vec::push;\n\n\/** A builder object that manages display list builder should mainly\n hold information about the initial request and desired result---for\n example, whether the DisplayList to be used for painting or hit\n testing. This can affect which boxes are created.\n\n Right now, the builder isn't used for much, but it establishes the\n pattern we'll need once we support DL-based hit testing &c. *\/\npub struct DisplayListBuilder {\n ctx: &LayoutContext,\n}\n\n\ntrait FlowDisplayListBuilderMethods {\n fn build_display_list(@self, a: &DisplayListBuilder, b: &Rect<Au>, c: &mut DisplayList);\n\n fn build_display_list_for_child(@self, a: &DisplayListBuilder, b: @FlowContext,\n c: &Rect<Au>, d: &Point2D<Au>, e: &mut DisplayList);\n}\n\nimpl FlowContext: FlowDisplayListBuilderMethods {\n fn build_display_list(@self, builder: &DisplayListBuilder, dirty: &Rect<Au>,\n list: &mut DisplayList) {\n let zero = au::zero_point();\n self.build_display_list_recurse(builder, dirty, &zero, list);\n }\n\n fn build_display_list_for_child(@self,\n builder: &DisplayListBuilder,\n child_flow: @FlowContext,\n dirty: &Rect<Au>, offset: &Point2D<Au>,\n list: &mut DisplayList) {\n\n \/\/ adjust the dirty rect to child flow context coordinates\n let abs_flow_bounds = child_flow.d().position.translate(offset);\n let adj_offset = offset.add(&child_flow.d().position.origin);\n\n debug!(\"build_display_list_for_child: rel=%?, abs=%?\",\n child_flow.d().position, abs_flow_bounds);\n debug!(\"build_display_list_for_child: dirty=%?, offset=%?\",\n dirty, offset);\n\n if dirty.intersects(&abs_flow_bounds) {\n debug!(\"build_display_list_for_child: intersected. recursing into child flow...\");\n child_flow.build_display_list_recurse(builder, dirty, &adj_offset, list);\n } else {\n debug!(\"build_display_list_for_child: Did not intersect...\");\n }\n }\n}\n\n\/* TODO: redo unit tests, if possible?gn\n\nfn should_convert_text_boxes_to_solid_color_background_items() {\n #[test];\n\n use layout::box_builder::LayoutTreeBuilder;\n\n let s = Scope();\n let n = s.new_node(Text(~\"firecracker\"));\n let builder = LayoutTreeBuilder();\n let b = builder.construct_trees(n).get();\n\n b.reflow_text();\n let list = DVec();\n box_to_display_items(list, b, Point2D(au::from_px(0), au::from_px(0)));\n\n do list.borrow |l| {\n match l[0].data {\n dl::SolidColorData(*) => { }\n _ => { fail }\n }\n } \n}\n\nfn should_convert_text_boxes_to_text_items() {\n #[test];\n use layout::box_builder::LayoutTreeBuilder;\n\n let s = Scope();\n let n = s.new_node(Text(~\"firecracker\"));\n let builder = LayoutTreeBuilder();\n let b = builder.construct_trees(n).get();\n\n b.reflow_text();\n let list = DVec();\n box_to_display_items(list, b, Point2D(au::from_px(0), au::from_px(0)));\n\n do list.borrow |l| {\n match l[1].data {\n dl::GlyphData(_) => { }\n _ => { fail }\n }\n }\n}\n\nfn should_calculate_the_bounds_of_the_text_box_background_color() {\n #[test];\n #[ignore(cfg(target_os = \"macos\"))];\n use layout::box_builder::LayoutTreeBuilder;\n\n let s = Scope();\n let n = s.new_node(Text(~\"firecracker\"));\n let builder = LayoutTreeBuilder();\n let b = builder.construct_trees(n).get();\n\n b.reflow_text();\n let list = DVec();\n box_to_display_items(list, b, Point2D(au::from_px(0), au::from_px(0)));\n\n let expected = Rect(\n Point2D(au::from_px(0), au::from_px(0)),\n Size2D(au::from_px(84), au::from_px(20))\n );\n\n do list.borrow |l| { assert l[0].bounds == expected }\n}\n\nfn should_calculate_the_bounds_of_the_text_items() {\n #[test];\n #[ignore(reason = \"busted\")];\n use layout::box_builder::LayoutTreeBuilder;\n\n let s = Scope();\n let n = s.new_node(Text(~\"firecracker\"));\n let builder = LayoutTreeBuilder();\n let b = builder.construct_trees(n).get();\n\n b.reflow_text();\n let list = DVec();\n box_to_display_items(list, b, Point2D(au::from_px(0), au::from_px(0)));\n\n let expected = Rect(\n Point2D(au::from_px(0), au::from_px(0)),\n Size2D(au::from_px(84), au::from_px(20))\n );\n\n do list.borrow |l| { assert l[1].bounds == expected; }\n}\n*\/\n<commit_msg>Remove dead code<commit_after>export DisplayListBuilder;\n\nuse au = gfx::geometry;\nuse au::Au;\nuse newcss::values::{BgColor, BgColorTransparent, Specified};\nuse dom::node::{Text, NodeScope};\nuse dom::cow::Scope;\nuse dvec::DVec;\nuse either::{Left, Right};\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse gfx::display_list::DisplayList;\nuse layout::box::{RenderBox, TextBox};\nuse layout::context::LayoutContext;\nuse layout::flow::FlowContext;\nuse layout::text::TextBoxData;\nuse servo_text::text_run::TextRun;\nuse util::tree;\nuse vec::push;\n\n\/** A builder object that manages display list builder should mainly\n hold information about the initial request and desired result---for\n example, whether the DisplayList to be used for painting or hit\n testing. This can affect which boxes are created.\n\n Right now, the builder isn't used for much, but it establishes the\n pattern we'll need once we support DL-based hit testing &c. *\/\npub struct DisplayListBuilder {\n ctx: &LayoutContext,\n}\n\n\ntrait FlowDisplayListBuilderMethods {\n fn build_display_list(@self, a: &DisplayListBuilder, b: &Rect<Au>, c: &mut DisplayList);\n\n fn build_display_list_for_child(@self, a: &DisplayListBuilder, b: @FlowContext,\n c: &Rect<Au>, d: &Point2D<Au>, e: &mut DisplayList);\n}\n\nimpl FlowContext: FlowDisplayListBuilderMethods {\n fn build_display_list(@self, builder: &DisplayListBuilder, dirty: &Rect<Au>,\n list: &mut DisplayList) {\n let zero = au::zero_point();\n self.build_display_list_recurse(builder, dirty, &zero, list);\n }\n\n fn build_display_list_for_child(@self,\n builder: &DisplayListBuilder,\n child_flow: @FlowContext,\n dirty: &Rect<Au>, offset: &Point2D<Au>,\n list: &mut DisplayList) {\n\n \/\/ adjust the dirty rect to child flow context coordinates\n let abs_flow_bounds = child_flow.d().position.translate(offset);\n let adj_offset = offset.add(&child_flow.d().position.origin);\n\n debug!(\"build_display_list_for_child: rel=%?, abs=%?\",\n child_flow.d().position, abs_flow_bounds);\n debug!(\"build_display_list_for_child: dirty=%?, offset=%?\",\n dirty, offset);\n\n if dirty.intersects(&abs_flow_bounds) {\n debug!(\"build_display_list_for_child: intersected. recursing into child flow...\");\n child_flow.build_display_list_recurse(builder, dirty, &adj_offset, list);\n } else {\n debug!(\"build_display_list_for_child: Did not intersect...\");\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>practice<commit_after>fn main() {\n let x = 5 + \/* 90 + *\/ 5;\n println!(\"Is `x` 10 or 100? x = {}\", x);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 Alexander Reece\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg_attr(feature=\"clippy\", feature(plugin))]\n#![cfg_attr(feature=\"clippy\", plugin(clippy))]\n#![cfg_attr(not(feature=\"clippy\"), allow(unknown_lints))]\n\n#[macro_use]\nmod macros;\n\n#[cfg(not(feature = \"amqp0-build-primitives\"))]\ninclude!(concat!(\"..\/pregen\/mod.rs\"));\n#[cfg(feature = \"amqp0-build-primitives\")]\ninclude!(concat!(env!(\"OUT_DIR\"), \"\/mod.rs\"));\n\npub mod field;\n\npub trait Protocol<'a> {\n type Frame;\n\n fn protocol_header() -> &'static [u8];\n}\n\npub trait ProtocolFramePayload<'a> {\n type Method: ProtocolMethod<'a>;\n\n fn as_method(&self) -> Option<&Self::Method>;\n}\n\npub trait ProtocolMethod<'a> {\n type Start: ProtocolMethodPayload + 'a;\n fn as_start(&self) -> Option<&Self::Start>;\n}\n\npub trait ProtocolMethodPayload {\n fn class_id(&self) -> u16;\n fn method_id(&self) -> u16;\n fn payload_size(&self) -> usize;\n}\n\n<commit_msg>[primitives] Require the associated type `Protocol::Frame` to live as long as 'a<commit_after>\/\/ Copyright 2016 Alexander Reece\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![cfg_attr(feature=\"clippy\", feature(plugin))]\n#![cfg_attr(feature=\"clippy\", plugin(clippy))]\n#![cfg_attr(not(feature=\"clippy\"), allow(unknown_lints))]\n\n#[macro_use]\nmod macros;\n\n#[cfg(not(feature = \"amqp0-build-primitives\"))]\ninclude!(concat!(\"..\/pregen\/mod.rs\"));\n#[cfg(feature = \"amqp0-build-primitives\")]\ninclude!(concat!(env!(\"OUT_DIR\"), \"\/mod.rs\"));\n\npub mod field;\n\npub trait Protocol<'a> {\n type Frame: 'a;\n\n fn protocol_header() -> &'static [u8];\n}\n\npub trait ProtocolFramePayload<'a> {\n type Method: ProtocolMethod<'a>;\n\n fn as_method(&self) -> Option<&Self::Method>;\n}\n\npub trait ProtocolMethod<'a> {\n type Start: ProtocolMethodPayload + 'a;\n fn as_start(&self) -> Option<&Self::Start>;\n}\n\npub trait ProtocolMethodPayload {\n fn class_id(&self) -> u16;\n fn method_id(&self) -> u16;\n fn payload_size(&self) -> usize;\n}\n\n<|endoftext|>"} {"text":"<commit_before>use core::slice;\nuse core::str;\nuse collections::range::RangeArgument;\nuse core::ops::Range;\nuse core::cmp::{max, min};\n\n\/\/\/ Bounded slice abstraction\n\/\/\/\n\/\/\/ # Code Migration\n\/\/\/\n\/\/\/ `foo[a..b]` => `foo.get_slice(a..b)`\n\/\/\/\n\/\/\/ `foo[a..]` => `foo.get_slice(a..)`\n\/\/\/\n\/\/\/ `foo[..b]` => `foo.get_slice(..b)`\n\/\/\/\npub trait GetSlice {\n fn get_slice<T: RangeArgument<usize>>(&self, a: T) -> &Self;\n fn get_slice_mut<T: RangeArgument<usize>>(&mut self, a: T) -> &mut Self;\n}\n\nfn bound<T: RangeArgument<usize>>(len: usize, a: T) -> Range<usize> {\n let start = min(a.start().map(|&x| x).unwrap_or(0), len);\n let end = min(a.end().map(|&x| x).unwrap_or(len), len);\n\n if start <= end {\n start..end\n } else {\n 0..0\n }\n}\n\nimpl GetSlice for str {\n fn get_slice<T: RangeArgument<usize>>(&self, a: T) -> &Self {\n &self[bound(self.len(), a)]\n }\n\n fn get_slice_mut<T: RangeArgument<usize>>(&mut self, a: T) -> &mut Self {\n let len = self.len();\n &mut self[bound(len, a)]\n }\n}\n\nimpl<T> GetSlice for [T] {\n fn get_slice<U: RangeArgument<usize>>(&self, a: U) -> &Self {\n &self[bound(self.len(), a)]\n }\n\n fn get_slice_mut<U: RangeArgument<usize>>(&mut self, a: U) -> &mut Self {\n let len = self.len();\n &mut self[bound(len, a)]\n }\n}\n<commit_msg>Cleanup get_slice.rs module<commit_after>use collections::range::RangeArgument;\nuse core::ops::Range;\nuse core::cmp;\n\n\/\/\/ Bounded slice abstraction\n\/\/\/\n\/\/\/ # Code Migration\n\/\/\/\n\/\/\/ `foo[a..b]` => `foo.get_slice(a..b)`\n\/\/\/\n\/\/\/ `foo[a..]` => `foo.get_slice(a..)`\n\/\/\/\n\/\/\/ `foo[..b]` => `foo.get_slice(..b)`\n\/\/\/\npub trait GetSlice {\n fn get_slice<T: RangeArgument<usize>>(&self, a: T) -> &Self;\n fn get_slice_mut<T: RangeArgument<usize>>(&mut self, a: T) -> &mut Self;\n}\n\nfn bound<T: RangeArgument<usize>>(len: usize, a: T) -> Range<usize> {\n let start = cmp::min(a.start().map(|&x| x).unwrap_or(0), len);\n let end = cmp::min(a.end().map(|&x| x).unwrap_or(len), len);\n\n if start <= end {\n start..end\n } else {\n 0..0\n }\n}\n\nimpl GetSlice for str {\n fn get_slice<T: RangeArgument<usize>>(&self, a: T) -> &Self {\n &self[bound(self.len(), a)]\n }\n\n fn get_slice_mut<T: RangeArgument<usize>>(&mut self, a: T) -> &mut Self {\n let len = self.len();\n &mut self[bound(len, a)]\n }\n}\n\nimpl<T> GetSlice for [T] {\n fn get_slice<U: RangeArgument<usize>>(&self, a: U) -> &Self {\n &self[bound(self.len(), a)]\n }\n\n fn get_slice_mut<U: RangeArgument<usize>>(&mut self, a: U) -> &mut Self {\n let len = self.len();\n &mut self[bound(len, a)]\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>practice<commit_after>fn main() {\n for n in 1..100 {\n if n % 15 == 0 {\n println!(\"fizzbuzz\");\n } else if n % 3 == 0 {\n println!(\"fizz\");\n }else if n % 5 == 0 {\n println!(\"buzz\");\n }else {\n println!(\"{}\", n);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>LeetCode: 937. Reorder Data in Log Files<commit_after>use std::cmp::Ordering;\n\n#[derive(Debug)]\nstruct Solution {}\n\nimpl Solution {\n pub fn reorder_log_files(logs: Vec<String>) -> Vec<String> {\n let mut cloned = logs.clone();\n cloned.sort_by(|a, b|\n {\n let (ai, ad) = a.split_at(a.find(' ').unwrap()+1);\n let (bi, bd) = b.split_at(b.find(' ').unwrap()+1);\n let alpha: (bool, bool) = (ad.chars().any(char::is_alphabetic), bd.chars().any(char::is_alphabetic));\n\n let result = match alpha {\n (true, true) => {\n if ad.cmp(bd) == Ordering::Equal {\n ai.cmp(bi)\n } else {\n ad.cmp(bd)\n }\n },\n (true, false) => Ordering::Less,\n (false, true) => Ordering::Greater,\n (false, false) => Ordering::Equal,\n };\n result\n }\n );\n return cloned;\n }\n}\n\n\/\/ [\"8 fj dzz k\", \"5r 446 6 3\", \"63 gu psub\", \"5 ba iedrz\", \"6s 87979 5\", \"3r 587 01\", \"jc 3480612\", \"bb wsrd kp\", \"b aq cojj\", \"r5 6316 71\"]\nfn main() {\n println!(\"{:?}\", \"wsrd kp\".cmp(\"ba iedrz\"));\n println!(\"{:?}\", Solution::reorder_log_files(vec!{\n String::from(\"8 fj dzz k\"),\n String::from(\"5r 446 6 3\"),\n String::from(\"63 gu psub\"),\n String::from(\"5 ba iedrz\"),\n String::from(\"6s 87979 5\"),\n String::from(\"3r 587 01\"),\n String::from(\"jc 3480612\"),\n String::from(\"bb wsrd kp\"),\n String::from(\"b aq cojj\"),\n String::from(\"r5 6316 71\"),\n }));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for Condition decoding<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rename InternalOrLeafFull<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactored code to pop elements from the addition code; Added tests; Improved error handling<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate xml;\n\nuse std::ascii::AsciiExt;\nuse std::default::Default;\nuse std::io;\nuse xml::{Element, ElementBuilder, Parser, Xml};\n\n\ntrait ElementUtils {\n fn tag_with_text(&mut self, child_name: &'static str, child_body: &str);\n fn tag_with_text_opt(&mut self, child_name: &'static str, child_body: &Option<String>);\n}\n\n\nimpl ElementUtils for Element {\n fn tag_with_text(&mut self, child_name: &'static str, child_body: &str) {\n self.tag(elem_with_text(child_name, child_body));\n }\n\n fn tag_with_text_opt(&mut self, child_name: &'static str, child_body: &Option<String>) {\n if let Some(ref c) = *child_body {\n self.tag_with_text(child_name, &c);\n }\n }\n}\n\n\nfn elem_with_text(tag_name: &'static str, chars: &str) -> Element {\n let mut elem = Element::new(tag_name.to_string(), None, vec![]);\n elem.text(chars.to_string());\n elem\n}\n\n\ntrait ViaXml {\n fn to_xml(&self) -> Element;\n fn from_xml(element: Element) -> Result<Self, &'static str>;\n}\n\n\n\/\/\/ RSS\n\/\/\/\n\/\/\/ \"At the top level, a RSS document is a \\<rss\\> element, with a mandatory attribute called\n\/\/\/ version, that specifies the version of RSS that the document conforms to. If it conforms to\n\/\/\/ this specification, the version attribute must be 2.0.\"\n\/\/\/\n\/\/\/ [RSS 2.0 Specification § RSS]\n\/\/\/ (http:\/\/cyber.law.harvard.edu\/rss\/rss.html#whatIsRss)\n#[derive(Default)]\npub struct Rss(pub Channel);\n\nimpl ViaXml for Rss {\n fn to_xml(&self) -> Element {\n let mut rss = Element::new(\"rss\".to_string(), None, vec![(\"version\".to_string(), None, \"2.0\".to_string())]);\n\n let &Rss(ref channel) = self;\n rss.tag(channel.to_xml());\n\n rss\n }\n\n fn from_xml(rss_elem: Element) -> Result<Self, &'static str> {\n if rss_elem.name.to_ascii_lowercase() != \"rss\" {\n panic!(\"Expected <rss>, found <{}>\", rss_elem.name);\n }\n\n let channel_elem = match rss_elem.get_child(\"channel\", None) {\n Some(elem) => elem,\n None => return Err(\"No <channel> element found in <rss>\"),\n };\n\n let channel = try!(ViaXml::from_xml(channel_elem.clone()));\n\n Ok(Rss(channel))\n }\n}\n\nimpl Rss {\n pub fn to_string(&self) -> String {\n let mut ret = format!(\"{}\", Xml::PINode(\"xml version='1.0' encoding='UTF-8'\".to_string()));\n ret.push_str(&format!(\"{}\", self.to_xml()));\n ret\n }\n\n pub fn from_read(reader: &mut io::Read) -> Result<Self, &'static str> {\n let mut rss_string = String::new();\n\n if let Err(..) = reader.read_to_string(&mut rss_string) {\n return Err(\"Error reading string from reader\");\n }\n\n let mut parser = Parser::new();\n parser.feed_str(&rss_string);\n\n let mut builder = ElementBuilder::new();\n\n for event in parser {\n if let Some(Ok(element)) = builder.handle_event(event) {\n return ViaXml::from_xml(element);\n }\n }\n\n Err(\"RSS read error\")\n }\n}\n\n\n\/\/\/ Channel\n\/\/\/\n\/\/\/ \"Subordinate to the \\<rss\\> element is a single \\<channel\\> element, which contains information\n\/\/\/ about the channel (metadata) and its contents.\"\n\/\/\/\n\/\/\/ [RSS 2.0 Specification § Channel]\n\/\/\/ (http:\/\/cyber.law.harvard.edu\/rss\/rss.html#requiredChannelElements)\n\/\/\/\n\/\/\/ ## Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use rss::Channel;\n\/\/\/ use std::default::Default;\n\/\/\/\n\/\/\/ let channel = Channel {\n\/\/\/ title: \"My Blog\".to_string(),\n\/\/\/ link: \"http:\/\/myblog.com\".to_string(),\n\/\/\/ description: \"Where I write stuff\".to_string(),\n\/\/\/ items: vec![],\n\/\/\/ ..Default::default()\n\/\/\/ };\n\/\/\/ ```\n#[derive(Default)]\npub struct Channel {\n pub title: String,\n pub link: String,\n pub description: String,\n pub items: Vec<Item>,\n pub language: Option<String>,\n pub copyright: Option<String>,\n pub managing_editor: Option<String>,\n pub web_master: Option<String>,\n pub pub_date: Option<String>,\n pub last_build_date: Option<String>,\n pub categories: Vec<Category>,\n pub generator: Option<String>,\n pub docs: Option<String>,\n \/\/ pub cloud:\n \/\/ pub ttl:\n pub image: Option<String>,\n pub rating: Option<String>,\n \/\/ pub text_input:\n pub skip_hours: Option<String>,\n pub skip_days: Option<String>,\n}\n\nimpl ViaXml for Channel {\n fn to_xml(&self) -> Element {\n let mut channel = Element::new(\"channel\".to_string(), None, vec![]);\n\n channel.tag_with_text(\"title\", &self.title);\n channel.tag_with_text(\"link\", &self.link);\n channel.tag_with_text(\"description\", &self.description);\n\n for item in &self.items {\n channel.tag(item.to_xml());\n }\n\n channel.tag_with_text_opt(\"language\", &self.language);\n channel.tag_with_text_opt(\"copyright\", &self.copyright);\n channel.tag_with_text_opt(\"managingEditor\", &self.managing_editor);\n channel.tag_with_text_opt(\"webMaster\", &self.web_master);\n channel.tag_with_text_opt(\"pubDate\", &self.pub_date);\n channel.tag_with_text_opt(\"lastBuildDate\", &self.last_build_date);\n channel.tag_with_text_opt(\"generator\", &self.generator);\n channel.tag_with_text_opt(\"docs\", &self.docs);\n channel.tag_with_text_opt(\"image\", &self.image);\n channel.tag_with_text_opt(\"rating\", &self.rating);\n channel.tag_with_text_opt(\"skipHours\", &self.skip_hours);\n channel.tag_with_text_opt(\"skipDays\", &self.skip_days);\n\n for category in &self.categories {\n channel.tag(category.to_xml());\n }\n\n channel\n }\n\n fn from_xml(element: Element) -> Result<Self, &'static str> {\n let title = match element.get_child(\"title\", None) {\n Some(element) => element.content_str(),\n None => return Err(\"<channel> is missing required <title> element\"),\n };\n\n let link = match element.get_child(\"link\", None) {\n Some(element) => element.content_str(),\n None => return Err(\"<channel> is missing required <link> element\"),\n };\n\n let description = match element.get_child(\"description\", None) {\n Some(element) => element.content_str(),\n None => return Err(\"<channel> is missing required <description> element\"),\n };\n\n let items = element.get_children(\"item\", None)\n .map(|e| ViaXml::from_xml(e.clone()).unwrap())\n .collect();\n\n let categories = element.get_children(\"category\", None)\n .map(|e| ViaXml::from_xml(e.clone()).unwrap())\n .collect();\n\n Ok(Channel {\n title: title,\n link: link,\n description: description,\n items: items,\n categories: categories,\n ..Default::default() \/\/ TODO\n })\n }\n}\n\n\n\/\/\/ Item\n\/\/\/\n\/\/\/ [RSS 2.0 Specification § Item]\n\/\/\/ http:\/\/cyber.law.harvard.edu\/rss\/rss.html#hrelementsOfLtitemgt\n#[derive(Default)]\npub struct Item {\n pub title: Option<String>,\n pub link: Option<String>,\n pub description: Option<String>,\n pub author: Option<String>,\n pub categories: Vec<Category>,\n pub comments: Option<String>,\n \/\/ pub enclosure\n \/\/ pub guid\n \/\/ pub pubDate\n \/\/ pub source\n}\n\n\nimpl ViaXml for Item {\n fn to_xml(&self) -> Element {\n let mut item = Element::new(\"item\".to_string(), None, vec![]);\n\n item.tag_with_text_opt(\"title\", &self.title);\n item.tag_with_text_opt(\"link\", &self.link);\n item.tag_with_text_opt(\"description\", &self.description);\n item.tag_with_text_opt(\"author\", &self.author);\n item.tag_with_text_opt(\"comments\", &self.comments);\n\n for category in &self.categories {\n item.tag(category.to_xml());\n }\n\n item\n }\n\n fn from_xml(element: Element) -> Result<Self, &'static str> {\n let title = element.get_child(\"title\", None).map(Element::content_str);\n let link = element.get_child(\"link\", None).map(Element::content_str);\n let description = element.get_child(\"description\", None).map(Element::content_str);\n let author = element.get_child(\"author\", None).map(Element::content_str);\n let comments = element.get_child(\"comments\", None).map(Element::content_str);\n\n let categories = element.get_children(\"category\", None)\n .map(|e| ViaXml::from_xml(e.clone()).unwrap())\n .collect();\n\n Ok(Item {\n title: title,\n link: link,\n description: description,\n categories: categories,\n author: author,\n comments: comments,\n })\n }\n}\n\n\n\/\/\/ Category\n\/\/\/\n\/\/\/ [RSS 2.0 Specification § Category]\n\/\/\/ (http:\/\/cyber.law.harvard.edu\/rss\/rss.html#ltcategorygtSubelementOfLtitemgt)\n#[derive(Default)]\npub struct Category {\n pub domain: Option<String>,\n pub value: String,\n}\n\nimpl ViaXml for Category {\n fn to_xml(&self) -> Element {\n let mut category = match self.domain {\n Some(ref d) => Element::new(\"category\".to_string(), None, vec![(\"domain\".to_string(), None, d.clone())]),\n None => Element::new(\"category\".to_string(), None, vec![]),\n };\n category.text(self.value.clone());\n category\n }\n\n fn from_xml(elem: Element) -> Result<Self, &'static str> {\n let domain = elem.get_attribute(\"domain\", None).map(|s| s.to_string());\n let value = elem.content_str();\n\n Ok(Category {\n domain: domain,\n value: value,\n })\n }\n}\n\n\n\n#[cfg(test)]\nmod test {\n use std::default::Default;\n use std::fs::File;\n use super::{Rss, Item, Channel};\n\n #[test]\n fn test_basic_to_string() {\n let item = Item {\n title: Some(\"My first post!\".to_string()),\n link: Some(\"http:\/\/myblog.com\/post1\".to_string()),\n description: Some(\"This is my first post\".to_string()),\n ..Default::default()\n };\n\n let channel = Channel {\n title: \"My Blog\".to_string(),\n link: \"http:\/\/myblog.com\".to_string(),\n description: \"Where I write stuff\".to_string(),\n items: vec![item],\n ..Default::default()\n };\n\n let rss = Rss(channel);\n assert_eq!(rss.to_string(), \"<?xml version=\\'1.0\\' encoding=\\'UTF-8\\'?><rss version=\\'2.0\\'><channel><title>My Blog<\/title><link>http:\/\/myblog.com<\/link><description>Where I write stuff<\/description><item><title>My first post!<\/title><link>http:\/\/myblog.com\/post1<\/link><description>This is my first post<\/description><\/item><\/channel><\/rss>\");\n }\n\n #[test]\n fn test_from_file() {\n let mut file = File::open(\"test-data\/pinboard.xml\").unwrap();\n let rss = Rss::from_read(&mut file).unwrap();\n assert!(rss.to_string().len() > 0);\n }\n\n #[test]\n #[should_panic]\n fn test_from_read_no_channels() {\n let mut rss_bytes = \"<rss><\/rss>\".as_bytes();\n let Rss(_) = Rss::from_read(&mut rss_bytes).unwrap();\n }\n\n #[test]\n #[should_panic]\n fn test_from_read_one_channel_no_properties() {\n let mut rss_bytes = \"<rss><channel><\/channel><\/rss>\".as_bytes();\n let Rss(_) = Rss::from_read(&mut rss_bytes).unwrap();\n }\n\n #[test]\n fn test_from_read_one_channel() {\n let mut rss_bytes = \"<rss><channel><title>Hello world!<\/title><description><\/description><link><\/link><\/channel><\/rss>\".as_bytes();\n let Rss(channel) = Rss::from_read(&mut rss_bytes).unwrap();\n assert_eq!(\"Hello world!\", channel.title);\n }\n}\n<commit_msg>Fix markdown link<commit_after>extern crate xml;\n\nuse std::ascii::AsciiExt;\nuse std::default::Default;\nuse std::io;\nuse xml::{Element, ElementBuilder, Parser, Xml};\n\n\ntrait ElementUtils {\n fn tag_with_text(&mut self, child_name: &'static str, child_body: &str);\n fn tag_with_text_opt(&mut self, child_name: &'static str, child_body: &Option<String>);\n}\n\n\nimpl ElementUtils for Element {\n fn tag_with_text(&mut self, child_name: &'static str, child_body: &str) {\n self.tag(elem_with_text(child_name, child_body));\n }\n\n fn tag_with_text_opt(&mut self, child_name: &'static str, child_body: &Option<String>) {\n if let Some(ref c) = *child_body {\n self.tag_with_text(child_name, &c);\n }\n }\n}\n\n\nfn elem_with_text(tag_name: &'static str, chars: &str) -> Element {\n let mut elem = Element::new(tag_name.to_string(), None, vec![]);\n elem.text(chars.to_string());\n elem\n}\n\n\ntrait ViaXml {\n fn to_xml(&self) -> Element;\n fn from_xml(element: Element) -> Result<Self, &'static str>;\n}\n\n\n\/\/\/ RSS\n\/\/\/\n\/\/\/ \"At the top level, a RSS document is a \\<rss\\> element, with a mandatory attribute called\n\/\/\/ version, that specifies the version of RSS that the document conforms to. If it conforms to\n\/\/\/ this specification, the version attribute must be 2.0.\"\n\/\/\/\n\/\/\/ [RSS 2.0 Specification § RSS]\n\/\/\/ (http:\/\/cyber.law.harvard.edu\/rss\/rss.html#whatIsRss)\n#[derive(Default)]\npub struct Rss(pub Channel);\n\nimpl ViaXml for Rss {\n fn to_xml(&self) -> Element {\n let mut rss = Element::new(\"rss\".to_string(), None, vec![(\"version\".to_string(), None, \"2.0\".to_string())]);\n\n let &Rss(ref channel) = self;\n rss.tag(channel.to_xml());\n\n rss\n }\n\n fn from_xml(rss_elem: Element) -> Result<Self, &'static str> {\n if rss_elem.name.to_ascii_lowercase() != \"rss\" {\n panic!(\"Expected <rss>, found <{}>\", rss_elem.name);\n }\n\n let channel_elem = match rss_elem.get_child(\"channel\", None) {\n Some(elem) => elem,\n None => return Err(\"No <channel> element found in <rss>\"),\n };\n\n let channel = try!(ViaXml::from_xml(channel_elem.clone()));\n\n Ok(Rss(channel))\n }\n}\n\nimpl Rss {\n pub fn to_string(&self) -> String {\n let mut ret = format!(\"{}\", Xml::PINode(\"xml version='1.0' encoding='UTF-8'\".to_string()));\n ret.push_str(&format!(\"{}\", self.to_xml()));\n ret\n }\n\n pub fn from_read(reader: &mut io::Read) -> Result<Self, &'static str> {\n let mut rss_string = String::new();\n\n if let Err(..) = reader.read_to_string(&mut rss_string) {\n return Err(\"Error reading string from reader\");\n }\n\n let mut parser = Parser::new();\n parser.feed_str(&rss_string);\n\n let mut builder = ElementBuilder::new();\n\n for event in parser {\n if let Some(Ok(element)) = builder.handle_event(event) {\n return ViaXml::from_xml(element);\n }\n }\n\n Err(\"RSS read error\")\n }\n}\n\n\n\/\/\/ Channel\n\/\/\/\n\/\/\/ \"Subordinate to the \\<rss\\> element is a single \\<channel\\> element, which contains information\n\/\/\/ about the channel (metadata) and its contents.\"\n\/\/\/\n\/\/\/ [RSS 2.0 Specification § Channel]\n\/\/\/ (http:\/\/cyber.law.harvard.edu\/rss\/rss.html#requiredChannelElements)\n\/\/\/\n\/\/\/ ## Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use rss::Channel;\n\/\/\/ use std::default::Default;\n\/\/\/\n\/\/\/ let channel = Channel {\n\/\/\/ title: \"My Blog\".to_string(),\n\/\/\/ link: \"http:\/\/myblog.com\".to_string(),\n\/\/\/ description: \"Where I write stuff\".to_string(),\n\/\/\/ items: vec![],\n\/\/\/ ..Default::default()\n\/\/\/ };\n\/\/\/ ```\n#[derive(Default)]\npub struct Channel {\n pub title: String,\n pub link: String,\n pub description: String,\n pub items: Vec<Item>,\n pub language: Option<String>,\n pub copyright: Option<String>,\n pub managing_editor: Option<String>,\n pub web_master: Option<String>,\n pub pub_date: Option<String>,\n pub last_build_date: Option<String>,\n pub categories: Vec<Category>,\n pub generator: Option<String>,\n pub docs: Option<String>,\n \/\/ pub cloud:\n \/\/ pub ttl:\n pub image: Option<String>,\n pub rating: Option<String>,\n \/\/ pub text_input:\n pub skip_hours: Option<String>,\n pub skip_days: Option<String>,\n}\n\nimpl ViaXml for Channel {\n fn to_xml(&self) -> Element {\n let mut channel = Element::new(\"channel\".to_string(), None, vec![]);\n\n channel.tag_with_text(\"title\", &self.title);\n channel.tag_with_text(\"link\", &self.link);\n channel.tag_with_text(\"description\", &self.description);\n\n for item in &self.items {\n channel.tag(item.to_xml());\n }\n\n channel.tag_with_text_opt(\"language\", &self.language);\n channel.tag_with_text_opt(\"copyright\", &self.copyright);\n channel.tag_with_text_opt(\"managingEditor\", &self.managing_editor);\n channel.tag_with_text_opt(\"webMaster\", &self.web_master);\n channel.tag_with_text_opt(\"pubDate\", &self.pub_date);\n channel.tag_with_text_opt(\"lastBuildDate\", &self.last_build_date);\n channel.tag_with_text_opt(\"generator\", &self.generator);\n channel.tag_with_text_opt(\"docs\", &self.docs);\n channel.tag_with_text_opt(\"image\", &self.image);\n channel.tag_with_text_opt(\"rating\", &self.rating);\n channel.tag_with_text_opt(\"skipHours\", &self.skip_hours);\n channel.tag_with_text_opt(\"skipDays\", &self.skip_days);\n\n for category in &self.categories {\n channel.tag(category.to_xml());\n }\n\n channel\n }\n\n fn from_xml(element: Element) -> Result<Self, &'static str> {\n let title = match element.get_child(\"title\", None) {\n Some(element) => element.content_str(),\n None => return Err(\"<channel> is missing required <title> element\"),\n };\n\n let link = match element.get_child(\"link\", None) {\n Some(element) => element.content_str(),\n None => return Err(\"<channel> is missing required <link> element\"),\n };\n\n let description = match element.get_child(\"description\", None) {\n Some(element) => element.content_str(),\n None => return Err(\"<channel> is missing required <description> element\"),\n };\n\n let items = element.get_children(\"item\", None)\n .map(|e| ViaXml::from_xml(e.clone()).unwrap())\n .collect();\n\n let categories = element.get_children(\"category\", None)\n .map(|e| ViaXml::from_xml(e.clone()).unwrap())\n .collect();\n\n Ok(Channel {\n title: title,\n link: link,\n description: description,\n items: items,\n categories: categories,\n ..Default::default() \/\/ TODO\n })\n }\n}\n\n\n\/\/\/ Item\n\/\/\/\n\/\/\/ [RSS 2.0 Specification § Item]\n\/\/\/ (http:\/\/cyber.law.harvard.edu\/rss\/rss.html#hrelementsOfLtitemgt)\n#[derive(Default)]\npub struct Item {\n pub title: Option<String>,\n pub link: Option<String>,\n pub description: Option<String>,\n pub author: Option<String>,\n pub categories: Vec<Category>,\n pub comments: Option<String>,\n \/\/ pub enclosure\n \/\/ pub guid\n \/\/ pub pubDate\n \/\/ pub source\n}\n\n\nimpl ViaXml for Item {\n fn to_xml(&self) -> Element {\n let mut item = Element::new(\"item\".to_string(), None, vec![]);\n\n item.tag_with_text_opt(\"title\", &self.title);\n item.tag_with_text_opt(\"link\", &self.link);\n item.tag_with_text_opt(\"description\", &self.description);\n item.tag_with_text_opt(\"author\", &self.author);\n item.tag_with_text_opt(\"comments\", &self.comments);\n\n for category in &self.categories {\n item.tag(category.to_xml());\n }\n\n item\n }\n\n fn from_xml(element: Element) -> Result<Self, &'static str> {\n let title = element.get_child(\"title\", None).map(Element::content_str);\n let link = element.get_child(\"link\", None).map(Element::content_str);\n let description = element.get_child(\"description\", None).map(Element::content_str);\n let author = element.get_child(\"author\", None).map(Element::content_str);\n let comments = element.get_child(\"comments\", None).map(Element::content_str);\n\n let categories = element.get_children(\"category\", None)\n .map(|e| ViaXml::from_xml(e.clone()).unwrap())\n .collect();\n\n Ok(Item {\n title: title,\n link: link,\n description: description,\n categories: categories,\n author: author,\n comments: comments,\n })\n }\n}\n\n\n\/\/\/ Category\n\/\/\/\n\/\/\/ [RSS 2.0 Specification § Category]\n\/\/\/ (http:\/\/cyber.law.harvard.edu\/rss\/rss.html#ltcategorygtSubelementOfLtitemgt)\n#[derive(Default)]\npub struct Category {\n pub domain: Option<String>,\n pub value: String,\n}\n\nimpl ViaXml for Category {\n fn to_xml(&self) -> Element {\n let mut category = match self.domain {\n Some(ref d) => Element::new(\"category\".to_string(), None, vec![(\"domain\".to_string(), None, d.clone())]),\n None => Element::new(\"category\".to_string(), None, vec![]),\n };\n category.text(self.value.clone());\n category\n }\n\n fn from_xml(elem: Element) -> Result<Self, &'static str> {\n let domain = elem.get_attribute(\"domain\", None).map(|s| s.to_string());\n let value = elem.content_str();\n\n Ok(Category {\n domain: domain,\n value: value,\n })\n }\n}\n\n\n\n#[cfg(test)]\nmod test {\n use std::default::Default;\n use std::fs::File;\n use super::{Rss, Item, Channel};\n\n #[test]\n fn test_basic_to_string() {\n let item = Item {\n title: Some(\"My first post!\".to_string()),\n link: Some(\"http:\/\/myblog.com\/post1\".to_string()),\n description: Some(\"This is my first post\".to_string()),\n ..Default::default()\n };\n\n let channel = Channel {\n title: \"My Blog\".to_string(),\n link: \"http:\/\/myblog.com\".to_string(),\n description: \"Where I write stuff\".to_string(),\n items: vec![item],\n ..Default::default()\n };\n\n let rss = Rss(channel);\n assert_eq!(rss.to_string(), \"<?xml version=\\'1.0\\' encoding=\\'UTF-8\\'?><rss version=\\'2.0\\'><channel><title>My Blog<\/title><link>http:\/\/myblog.com<\/link><description>Where I write stuff<\/description><item><title>My first post!<\/title><link>http:\/\/myblog.com\/post1<\/link><description>This is my first post<\/description><\/item><\/channel><\/rss>\");\n }\n\n #[test]\n fn test_from_file() {\n let mut file = File::open(\"test-data\/pinboard.xml\").unwrap();\n let rss = Rss::from_read(&mut file).unwrap();\n assert!(rss.to_string().len() > 0);\n }\n\n #[test]\n #[should_panic]\n fn test_from_read_no_channels() {\n let mut rss_bytes = \"<rss><\/rss>\".as_bytes();\n let Rss(_) = Rss::from_read(&mut rss_bytes).unwrap();\n }\n\n #[test]\n #[should_panic]\n fn test_from_read_one_channel_no_properties() {\n let mut rss_bytes = \"<rss><channel><\/channel><\/rss>\".as_bytes();\n let Rss(_) = Rss::from_read(&mut rss_bytes).unwrap();\n }\n\n #[test]\n fn test_from_read_one_channel() {\n let mut rss_bytes = \"<rss><channel><title>Hello world!<\/title><description><\/description><link><\/link><\/channel><\/rss>\".as_bytes();\n let Rss(channel) = Rss::from_read(&mut rss_bytes).unwrap();\n assert_eq!(\"Hello world!\", channel.title);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Syncfile: decrypts metadata, but doesn't do anything with it yet<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor(syncfile): add get_hmac function<commit_after><|endoftext|>"} {"text":"<commit_before>use std::io::{ Write, Result as IoResult };\n\nuse exec::{Step, Message};\nuse eval::Expr;\nuse data::DataMode;\nuse session::ValueID;\n\npub type StateId = usize;\npub type CounterId = usize;\n\n#[derive(Clone, Debug)]\npub struct Nfa {\n states: Vec<State>,\n initial: StateId,\n success: StateId,\n}\n\nimpl Nfa {\n pub fn new() -> Nfa {\n let mut nfa = Nfa { states: vec![], initial: 0, success: 0 };\n nfa.initial = nfa.add_state();\n nfa.success = nfa.add_state();\n nfa\n }\n\n pub fn add_state(&mut self) -> StateId {\n let id = self.states.len() as StateId;\n self.states.push(State::new());\n id\n }\n\n pub fn add_transition(&mut self, from: StateId, to: StateId, action: Action) {\n self.states[from as usize].transitions.push(Transition { target: to, action: action});\n }\n\n pub fn to_graphviz(&self, f: &mut Write) -> IoResult<()> {\n try!(writeln!(f, \"digraph G {{\"));\n for (id, state) in self.states.iter().enumerate() {\n for transition in &state.transitions {\n let colorstr = match &transition.action {\n &Action::Epsilon => r#\"fontcolor=\"gray\"\"#,\n &Action::Lower(..) => r#\"fontcolor=\"blue\"\"#,\n &Action::UpperBegin(..) | &Action::UpperEnd(..) => r#\"fontcolor=\"green\"\"#,\n _ => \"\"\n };\n try!(writeln!(f, \"{} -> {} [ label=\\\"{:?}\\\" {}];\", id, transition.target, transition.action, colorstr));\n }\n }\n try!(writeln!(f, \"}}\"));\n Ok(())\n }\n}\n\n#[derive(Clone, Debug)]\npub struct State {\n transitions: Vec<Transition>,\n}\n\nimpl State {\n fn new() -> State {\n State { transitions: vec![] }\n }\n}\n\n#[derive(Clone, Debug)]\npub struct Transition {\n target: StateId,\n action: Action,\n}\n\n#[derive(Clone, Debug)]\npub enum Action {\n Epsilon,\n Lower(Message),\n UpperBegin(Message),\n UpperEnd(Message),\n\n RepeatDnInit(CounterId, Expr), \/\/ Down-evaluate count, initialize counter to zero\n RepeatDnBack(CounterId), \/\/ Increment counter, guard on counter < count\n RepeatDnExit(CounterId), \/\/ Guard on counter == count\n\n RepeatUpInit(CounterId), \/\/ initialize counter to zero\n RepeatUpBack(CounterId), \/\/ increment counter\n RepeatUpExit(CounterId, Expr), \/\/ Guard on up-evaluate count\n\n \/\/ Down-evaluate outer variables, set counter to zero\n ForInit(CounterId, u32, Vec<(ValueID, Expr, DataMode)>),\n \/\/ guard on counter < size, pop next inner variables down,\n ForEntry(CounterId, u32, Vec<(ValueID, Expr, DataMode)>),\n \/\/ push next outer variables up, increment\n ForBack(CounterId, u32, Vec<(ValueID, Expr, DataMode)>),\n \/\/ guard on counter == size, Up-evaluate outer variables\n ForExit(CounterId, u32, Vec<(ValueID, Expr, DataMode)>),\n}\n\npub fn from_step_tree(s: &Step) -> Nfa {\n let mut nfa = Nfa::new();\n let initial = nfa.initial;\n let success = nfa.success;\n from_step_tree_inner(s, &mut nfa, initial, success);\n\n fn from_step_tree_inner(s: &Step, nfa: &mut Nfa, from: StateId, to: StateId) {\n match *s {\n Step::Nop => nfa.add_transition(from, to, Action::Epsilon),\n Step::Token(ref message) => {\n let m: Message = message.clone();\n nfa.add_transition(from, to, Action::Lower(m))\n }\n Step::TokenTop(ref message, box ref body) => {\n let is = nfa.add_state();\n let ie = nfa.add_state();\n nfa.add_transition(from, is, Action::UpperBegin(message.clone()));\n from_step_tree_inner(body, nfa, is, ie);\n nfa.add_transition(ie, to, Action::UpperEnd(message.clone()));\n }\n Step::Seq(ref steps) => {\n if let Some((last, rest)) = steps.split_last() {\n let mut conn = from;\n for c in rest.iter() {\n let next = nfa.add_state();\n from_step_tree_inner(c, nfa, conn, next);\n conn = next;\n }\n from_step_tree_inner(last, nfa, conn, to);\n } else {\n nfa.add_transition(from, to, Action::Epsilon);\n }\n }\n Step::Repeat(ref count, box ref inner, up) => {\n let start = nfa.add_state();\n let end = nfa.add_state();\n let counter = start;\n\n if up {\n if count.ignored() {\n nfa.add_transition(from, start, Action::Epsilon);\n nfa.add_transition(start, end, Action::Epsilon);\n nfa.add_transition(start, to, Action::Epsilon);\n } else {\n nfa.add_transition(from, start, Action::RepeatUpInit(counter));\n nfa.add_transition(start, end, Action::RepeatUpBack(counter));\n nfa.add_transition(start, to, Action::RepeatUpExit(counter, count.clone()));\n }\n } else {\n nfa.add_transition(from, start, Action::RepeatDnInit(counter, count.clone()));\n nfa.add_transition(start, end, Action::RepeatDnBack(counter));\n nfa.add_transition(start, to, Action::RepeatDnExit(counter));\n }\n from_step_tree_inner(inner, nfa, end, start);\n }\n Step::Foreach(width, ref vars, box ref inner) => {\n let entry = nfa.add_state();\n let start = nfa.add_state();\n let end = nfa.add_state();\n let counter = entry;\n\n nfa.add_transition(from, entry, Action::ForInit(counter, width, vars.clone()));\n nfa.add_transition(entry, start, Action::ForEntry(counter, width, vars.clone()));\n nfa.add_transition(end, entry, Action::ForBack(counter, width, vars.clone()));\n nfa.add_transition(entry, to, Action::ForExit(counter, width, vars.clone()));\n\n from_step_tree_inner(inner, nfa, start, end);\n }\n }\n }\n\n nfa\n}\n<commit_msg>nfa: change representation of initial \/ accepting states<commit_after>use std::io::{ Write, Result as IoResult };\nuse std::collections::HashSet;\n\nuse exec::{Step, Message};\nuse eval::Expr;\nuse data::DataMode;\nuse session::ValueID;\n\npub type StateId = usize;\npub type CounterId = usize;\n\n#[derive(Clone, Debug)]\npub struct Nfa {\n states: Vec<State>,\n initial: HashSet<StateId>,\n accepting: HashSet<StateId>,\n}\n\nimpl Nfa {\n pub fn new() -> Nfa {\n Nfa { states: vec![], initial: HashSet::new(), accepting: HashSet::new() }\n }\n\n pub fn add_state(&mut self) -> StateId {\n let id = self.states.len() as StateId;\n self.states.push(State::new());\n id\n }\n\n pub fn add_transition(&mut self, from: StateId, to: StateId, action: Action) {\n self.states[from as usize].transitions.push(Transition { target: to, action: action});\n }\n\n pub fn to_graphviz(&self, f: &mut Write) -> IoResult<()> {\n try!(writeln!(f, \"digraph G {{\"));\n for (id, state) in self.states.iter().enumerate() {\n for transition in &state.transitions {\n let colorstr = match &transition.action {\n &Action::Epsilon => r#\"fontcolor=\"gray\"\"#,\n &Action::Lower(..) => r#\"fontcolor=\"blue\"\"#,\n &Action::UpperBegin(..) | &Action::UpperEnd(..) => r#\"fontcolor=\"green\"\"#,\n _ => \"\"\n };\n try!(writeln!(f, \"{} -> {} [ label=\\\"{:?}\\\" {}];\", id, transition.target, transition.action, colorstr));\n }\n }\n\n for initial in &self.initial {\n try!(writeln!(f, \"start -> {};\", initial));\n }\n\n for accepting in &self.accepting {\n try!(writeln!(f, \"{} -> end;\", accepting));\n }\n\n try!(writeln!(f, \"}}\"));\n Ok(())\n }\n}\n\n#[derive(Clone, Debug)]\npub struct State {\n transitions: Vec<Transition>,\n}\n\nimpl State {\n fn new() -> State {\n State { transitions: vec![] }\n }\n}\n\n#[derive(Clone, Debug)]\npub struct Transition {\n target: StateId,\n action: Action,\n}\n\n#[derive(Clone, Debug)]\npub enum Action {\n Epsilon,\n Lower(Message),\n UpperBegin(Message),\n UpperEnd(Message),\n\n RepeatDnInit(CounterId, Expr), \/\/ Down-evaluate count, initialize counter to zero\n RepeatDnBack(CounterId), \/\/ Increment counter, guard on counter < count\n RepeatDnExit(CounterId), \/\/ Guard on counter == count\n\n RepeatUpInit(CounterId), \/\/ initialize counter to zero\n RepeatUpBack(CounterId), \/\/ increment counter\n RepeatUpExit(CounterId, Expr), \/\/ Guard on up-evaluate count\n\n \/\/ Down-evaluate outer variables, set counter to zero\n ForInit(CounterId, u32, Vec<(ValueID, Expr, DataMode)>),\n \/\/ guard on counter < size, pop next inner variables down,\n ForEntry(CounterId, u32, Vec<(ValueID, Expr, DataMode)>),\n \/\/ push next outer variables up, increment\n ForBack(CounterId, u32, Vec<(ValueID, Expr, DataMode)>),\n \/\/ guard on counter == size, Up-evaluate outer variables\n ForExit(CounterId, u32, Vec<(ValueID, Expr, DataMode)>),\n}\n\npub fn from_step_tree(s: &Step) -> Nfa {\n let mut nfa = Nfa::new();\n let initial = nfa.add_state();\n let success = nfa.add_state();\n nfa.initial.insert(initial);\n nfa.accepting.insert(success);\n from_step_tree_inner(s, &mut nfa, initial, success);\n\n fn from_step_tree_inner(s: &Step, nfa: &mut Nfa, from: StateId, to: StateId) {\n match *s {\n Step::Nop => nfa.add_transition(from, to, Action::Epsilon),\n Step::Token(ref message) => {\n let m: Message = message.clone();\n nfa.add_transition(from, to, Action::Lower(m))\n }\n Step::TokenTop(ref message, box ref body) => {\n let is = nfa.add_state();\n let ie = nfa.add_state();\n nfa.add_transition(from, is, Action::UpperBegin(message.clone()));\n from_step_tree_inner(body, nfa, is, ie);\n nfa.add_transition(ie, to, Action::UpperEnd(message.clone()));\n }\n Step::Seq(ref steps) => {\n if let Some((last, rest)) = steps.split_last() {\n let mut conn = from;\n for c in rest.iter() {\n let next = nfa.add_state();\n from_step_tree_inner(c, nfa, conn, next);\n conn = next;\n }\n from_step_tree_inner(last, nfa, conn, to);\n } else {\n nfa.add_transition(from, to, Action::Epsilon);\n }\n }\n Step::Repeat(ref count, box ref inner, up) => {\n let start = nfa.add_state();\n let end = nfa.add_state();\n let counter = start;\n\n if up {\n if count.ignored() {\n nfa.add_transition(from, start, Action::Epsilon);\n nfa.add_transition(start, end, Action::Epsilon);\n nfa.add_transition(start, to, Action::Epsilon);\n } else {\n nfa.add_transition(from, start, Action::RepeatUpInit(counter));\n nfa.add_transition(start, end, Action::RepeatUpBack(counter));\n nfa.add_transition(start, to, Action::RepeatUpExit(counter, count.clone()));\n }\n } else {\n nfa.add_transition(from, start, Action::RepeatDnInit(counter, count.clone()));\n nfa.add_transition(start, end, Action::RepeatDnBack(counter));\n nfa.add_transition(start, to, Action::RepeatDnExit(counter));\n }\n from_step_tree_inner(inner, nfa, end, start);\n }\n Step::Foreach(width, ref vars, box ref inner) => {\n let entry = nfa.add_state();\n let start = nfa.add_state();\n let end = nfa.add_state();\n let counter = entry;\n\n nfa.add_transition(from, entry, Action::ForInit(counter, width, vars.clone()));\n nfa.add_transition(entry, start, Action::ForEntry(counter, width, vars.clone()));\n nfa.add_transition(end, entry, Action::ForBack(counter, width, vars.clone()));\n nfa.add_transition(entry, to, Action::ForExit(counter, width, vars.clone()));\n\n from_step_tree_inner(inner, nfa, start, end);\n }\n }\n }\n\n nfa\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix rustc warnings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>update<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>how do namespaces work<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Revert workaround for Rust issue #24258<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add PartialOrd and Ord to default derive_enum value<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>remove useless code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Ran rustfmt<commit_after><|endoftext|>"} {"text":"<commit_before>fn main() {\n let decimal = 65.4321_f32;\n\n \/\/ Error! No implicit conversion\n let integer: u8 = decimal;\n \/\/ FIXME ^ Comment out this line\n\n \/\/ Explicit conversion\n let integer = decimal as u8;\n let character = integer as char;\n\n println!(\"Casting: {} -> {} -> {}\", decimal, integer, character);\n\n \/\/ when casting any value to an unsigned type, T, \n \/\/ std::T::MAX + 1 is added or subtracted until the value\n \/\/ fits into the new type\n\n \/\/ 1000 already fits in a u16\n println!(\"1000 as a u16 is: {}\", 1000 as u16);\n\n \/\/ 1000 - 256 - 256 - 256 = 232\n println!(\"1000 as a u8 is : {}\", 1000 as u8);\n \/\/ -1 + 256 = 255\n println!(\" -1 as a u8 is : {}\", (-1i8) as u8);\n\n \/\/ For positive numbers, this is the same as the modulus\n println!(\"1000 mod 256 is : {}\", 1000 % 256);\n\n \/\/ When casting to a signed type, the result is the same as \n \/\/ first casting to the corresponding unsigned type then \n \/\/ taking the two's complement.\n\n \/\/ Unless it already fits, of course.\n println!(\" 128 as a i16 is: {}\", 128 as i16);\n \/\/ 128 as u8 -> 128, whose two's complement in eight bits is:\n println!(\" 128 as a i8 is : {}\", 128 as i8);\n\n \/\/ repeating the example above\n \/\/ 1000 as u8 -> 232\n println!(\"1000 as a i8 is : {}\", 1000 as i8);\n \/\/ and the two's complement of 232 is -24\n println!(\" 232 as a i8 is : {}\", 232 as i8);\n\n\n}\n<commit_msg>Suppress warnings on cast by default<commit_after>\/\/ Suppress all warnings from casts which overflow.\n#![allow(overflowing_literals)]\n\nfn main() {\n let decimal = 65.4321_f32;\n\n \/\/ Error! No implicit conversion\n let integer: u8 = decimal;\n \/\/ FIXME ^ Comment out this line\n\n \/\/ Explicit conversion\n let integer = decimal as u8;\n let character = integer as char;\n\n println!(\"Casting: {} -> {} -> {}\", decimal, integer, character);\n\n \/\/ when casting any value to an unsigned type, T, \n \/\/ std::T::MAX + 1 is added or subtracted until the value\n \/\/ fits into the new type\n\n \/\/ 1000 already fits in a u16\n println!(\"1000 as a u16 is: {}\", 1000 as u16);\n\n \/\/ 1000 - 256 - 256 - 256 = 232\n println!(\"1000 as a u8 is : {}\", 1000 as u8);\n \/\/ -1 + 256 = 255\n println!(\" -1 as a u8 is : {}\", (-1i8) as u8);\n\n \/\/ For positive numbers, this is the same as the modulus\n println!(\"1000 mod 256 is : {}\", 1000 % 256);\n\n \/\/ When casting to a signed type, the result is the same as \n \/\/ first casting to the corresponding unsigned type then \n \/\/ taking the two's complement.\n\n \/\/ Unless it already fits, of course.\n println!(\" 128 as a i16 is: {}\", 128 as i16);\n \/\/ 128 as u8 -> 128, whose two's complement in eight bits is:\n println!(\" 128 as a i8 is : {}\", 128 as i8);\n\n \/\/ repeating the example above\n \/\/ 1000 as u8 -> 232\n println!(\"1000 as a i8 is : {}\", 1000 as i8);\n \/\/ and the two's complement of 232 is -24\n println!(\" 232 as a i8 is : {}\", 232 as i8);\n\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>read maps from test_output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>(test) Added regression test for nested describe! blocks.<commit_after>#![feature(phase)]\n#[phase(plugin, link)]\nextern crate stainless;\n\ndescribe!(\"top level\" {\n it \"less specific\" {\n assert_eq!(1u, 1u);\n }\n\n describe!(\"nested\" {\n it \"more specific\" {\n assert_eq!(2u, 2u);\n }\n })\n})\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix example.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>New TcpStream::connect() interface<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Initial import<commit_after>\/\/ (C)opyleft 2013 Frank Denis\n\n\/*!\n * Bloom filter for Rust\n *\n * This is a simple but fast Bloom filter implementation, that requires only\n * 2 hash functions, generated with SipHash-2-4 using randomized keys.\n *\/\n\n#[link(name = \"bloomfilter\", vers = \"0.1\")];\n#[crate_type = \"lib\"];\n#[license = \"BSD\"];\n#[warn(non_camel_case_types, non_uppercase_statics, non_uppercase_statics, unnecessary_qualification, managed_heap_memory)]\n\nextern mod extra;\n\nuse std::hash;\nuse std::num;\nuse std::rand;\nuse std::rand::Rng;\nuse extra::bitv;\n\nstruct SipHashKey {\n k1: u64,\n k2: u64\n}\n\nimpl SipHashKey {\n fn new(k1: u64, k2: u64) -> SipHashKey {\n SipHashKey {\n k1: k1,\n k2: k2\n }\n }\n\n fn new_random() -> SipHashKey {\n let mut rng = rand::task_rng();\n SipHashKey {\n k1: rand::Rand::rand(& mut rng),\n k2: rand::Rand::rand(& mut rng)\n }\n }\n}\n\n\/\/\/ Bloom filter structure\npub struct Bloom {\n priv bitmap: bitv::Bitv,\n priv bitmap_bits: u64,\n priv k_num: uint,\n priv skeys: [SipHashKey, ..2]\n}\n\nimpl Bloom {\n\n\/\/\/ Create a new bloom filter structure.\n\/\/\/ bitmap_size is the size in bytes (not bits) that will be allocated in memory\n\/\/\/ items_count is an estimation of the maximum number of items to store\n pub fn new(bitmap_size: uint, items_count: uint) -> Bloom {\n assert!(bitmap_size > 0u && items_count > 0u);\n let bitmap_bits = (bitmap_size as u64) * 8u64;\n let k_num = Bloom::optimal_k_num(bitmap_bits, items_count);\n let bitmap = bitv::Bitv::new(bitmap_bits as uint, false);\n let skeys = [ SipHashKey::new_random(), SipHashKey::new_random() ];\n Bloom {\n bitmap: bitmap,\n bitmap_bits: bitmap_bits,\n k_num: k_num,\n skeys: skeys\n }\n }\n\n\/\/\/ Record the presence of an item.\n pub fn set<T: hash::Hash>(& mut self, item: T) {\n let mut hashes = [ 0u64, 0u64 ];\n for k_i in range(0u, self.k_num) {\n let bit_offset = (self.bloom_hash(& mut hashes, &item, k_i)\n % self.bitmap_bits) as uint;\n self.bitmap.set(bit_offset, true);\n }\n }\n\n\/\/\/ Check if an item is present in the set.\n\/\/\/ There can be false positives, but no false negatives.\n pub fn check<T: hash::Hash>(&self, item: T) -> bool {\n let mut hashes = [ 0u64, 0u64 ];\n for k_i in range(0u, self.k_num) {\n let bit_offset = (self.bloom_hash(& mut hashes, &item, k_i)\n % self.bitmap_bits) as uint;\n if self.bitmap.get(bit_offset) == false {\n return false;\n }\n }\n true\n }\n\n\/\/\/ Record the presence of an item in the set,\n\/\/\/ and return the previous state of this item.\n pub fn check_and_set<T: hash::Hash>(&mut self, item: T) -> bool {\n let mut hashes = [ 0u64, 0u64 ];\n let mut found = true;\n for k_i in range(0u, self.k_num) {\n let bit_offset = (self.bloom_hash(& mut hashes, &item, k_i)\n % self.bitmap_bits) as uint;\n if self.bitmap.get(bit_offset) == false {\n found = false;\n self.bitmap.set(bit_offset, true);\n }\n }\n found\n }\n\n fn optimal_k_num(bitmap_bits: u64, items_count: uint) -> uint {\n let m = bitmap_bits as f64;\n let n = items_count as f64;\n let k_num = (m \/ n * num::ln(2.0f64).ceil()) as uint;\n num::max(k_num, 1)\n }\n\n fn bloom_hash<T: hash::Hash>(&self, hashes: & mut [u64, ..2],\n item: &T, k_i: uint) -> u64 {\n if k_i < 2 {\n let skey = self.skeys[k_i];\n let hash = (*item).hash_keyed(skey.k1, skey.k2);\n hashes[k_i] = hash;\n hash\n } else {\n hashes[0] + (((k_i as u64) * hashes[1]) % 0xffffffffffffffc5)\n }\n }\n}\n\n#[test]\nfn bloom_test_set() {\n let mut bloom = Bloom::new(10, 80);\n let key = &rand::task_rng().gen_ascii_str(16u);\n assert!(bloom.check(key) == false);\n bloom.set(&key);\n assert!(bloom.check(key.clone()) == true);\n}\n\n#[test]\nfn bloom_test_check_and_set() {\n let mut bloom = Bloom::new(10, 80);\n let key = &rand::task_rng().gen_ascii_str(16u);\n assert!(bloom.check_and_set(key) == false);\n assert!(bloom.check_and_set(key.clone()) == true);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Increase hashmap capacity to match the amount of settings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>notes<commit_after><|endoftext|>"} {"text":"<commit_before>#![deny(\n non_camel_case_types,\n non_snake_case,\n path_statements,\n trivial_numeric_casts,\n unstable_features,\n unused_allocation,\n unused_import_braces,\n unused_imports,\n unused_must_use,\n unused_mut,\n unused_qualifications,\n while_true,\n)]\n\nextern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate libimagrt;\nextern crate libimagstore;\n#[macro_use] extern crate libimagerror;\n\nuse std::result::Result as RResult;\nuse std::process::exit;\n\nuse libimagrt::runtime::Runtime;\nuse libimagstore::store::FileLockEntry;\nuse libimagerror::trace::trace_error;\n\nmod error;\nmod ui;\nmod viewer;\n\nuse error::{ViewError, ViewErrorKind};\nuse ui::build_ui;\nuse viewer::Viewer;\nuse viewer::ViewInformation;\nuse viewer::stdout::StdoutViewer;\n\ntype Result<T> = RResult<T, ViewError>;\n\nfn main() {\n let name = \"imag-view\";\n let version = &version!()[..];\n let about = \"View entries (readonly)\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1); \/\/ we can afford not-executing destructors here\n }\n };\n\n let entry_id = rt.cli().value_of(\"id\").unwrap(); \/\/ enforced by clap\n\n if rt.cli().is_present(\"versions\") {\n if let Err(e) = view_versions_of(entry_id, &rt) {\n trace_error(&e);\n exit(1); \/\/ we can afford not-executing destructors here\n }\n } else {\n let entry_version = rt.cli().value_of(\"version\");\n let view_header = rt.cli().is_present(\"view-header\");\n let view_content = rt.cli().is_present(\"view-content\");\n let view_copy = rt.cli().is_present(\"view-copy\");\n let keep_copy = rt.cli().is_present(\"keep-copy\");\n\n let scmd = rt.cli().subcommand_matches(\"view-in\");\n if scmd.is_none() {\n debug!(\"No commandline call\");\n exit(1); \/\/ we can afford not-executing destructors here\n }\n let scmd = scmd.unwrap();\n\n let viewer = {\n if scmd.is_present(\"view-in-stdout\") {\n Box::new(StdoutViewer::new())\n } else if scmd.is_present(\"view-in-ui\") {\n warn!(\"Viewing in UI is currently not supported, switch to stdout\");\n Box::new(StdoutViewer::new())\n } else if scmd.is_present(\"view-in-browser\") {\n warn!(\"Viewing in browser is currently not supported, switch to stdout\");\n Box::new(StdoutViewer::new())\n } else if scmd.is_present(\"view-in-texteditor\") {\n warn!(\"Viewing in texteditor is currently not supported, switch to stdout\");\n Box::new(StdoutViewer::new())\n } else if scmd.is_present(\"view-in-custom\") {\n warn!(\"Viewing in custom is currently not supported, switch to stdout\");\n Box::new(StdoutViewer::new())\n } else {\n Box::new(StdoutViewer::new())\n }\n };\n\n let entry = load_entry(entry_id, entry_version, &rt);\n if entry.is_err() {\n trace_error(&entry.unwrap_err());\n exit(1); \/\/ we can afford not-executing destructors here\n }\n let entry = entry.unwrap();\n\n let view_info = ViewInformation {\n entry: entry,\n view_header: view_header,\n view_content: view_content,\n view_copy: view_copy,\n keep_copy: keep_copy,\n };\n\n viewer.view(view_info);\n }\n}\n\n\/\/ TODO: This is a shameless adaption of imag-store\/src\/util.rs\nfn load_entry<'a>(id: &str,\n version: Option<&str>,\n rt: &'a Runtime)\n -> Result<FileLockEntry<'a>>\n{\n debug!(\"Checking path element for version\");\n\n let version = {\n if version.is_none() {\n let r = id.split('~').last();\n if r.is_none() {\n warn!(\"No version\");\n return Err(ViewError::new(ViewErrorKind::NoVersion, None));\n } else {\n r.unwrap()\n }\n } else {\n version.unwrap()\n }\n };\n\n debug!(\"Building path from {:?} and {:?}\", id, version);\n let mut path = rt.store().path().clone();\n\n if id.starts_with('\/') {\n path.push(format!(\"{}~{}\", &id[1..id.len()], version));\n } else {\n path.push(format!(\"{}~{}\", id, version));\n }\n\n \/\/ the above is the adaption...\n\n rt.store().retrieve(path)\n .map_err(|e| ViewError::new(ViewErrorKind::StoreError, Some(Box::new(e))))\n}\n\nfn view_versions_of(id: &str, rt: &Runtime) -> Result<()> {\n use glob::glob;\n\n let mut path = rt.store().path().clone();\n\n if id.starts_with('\/') {\n path.push(format!(\"{}~*\", &id[1..id.len()]));\n } else {\n path.push(format!(\"{}~*\", id));\n }\n\n if let Some(path) = path.to_str() {\n match glob(path) {\n Ok(paths) => {\n for entry in paths {\n match entry {\n Ok(path) => println!(\"{}\", path.file_name().and_then(|s| s.to_str()).unwrap()),\n Err(e) => trace_error(e.error()),\n }\n }\n Ok(())\n },\n Err(e) => {\n debug!(\"Error in pattern\");\n Err(ViewError::new(ViewErrorKind::PatternError, Some(Box::new(e))))\n },\n }\n } else {\n warn!(\"Could not build glob() argument!\");\n Err(ViewError::new(ViewErrorKind::GlobBuildError, None))\n }\n}\n\n<commit_msg>imag-view: Remove setup code and use helper<commit_after>#![deny(\n non_camel_case_types,\n non_snake_case,\n path_statements,\n trivial_numeric_casts,\n unstable_features,\n unused_allocation,\n unused_import_braces,\n unused_imports,\n unused_must_use,\n unused_mut,\n unused_qualifications,\n while_true,\n)]\n\nextern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate libimagrt;\nextern crate libimagstore;\n#[macro_use] extern crate libimagerror;\n\nuse std::result::Result as RResult;\nuse std::process::exit;\n\nuse libimagrt::runtime::Runtime;\nuse libimagrt::setup::generate_runtime_setup;\nuse libimagstore::store::FileLockEntry;\nuse libimagerror::trace::trace_error;\n\nmod error;\nmod ui;\nmod viewer;\n\nuse error::{ViewError, ViewErrorKind};\nuse ui::build_ui;\nuse viewer::Viewer;\nuse viewer::ViewInformation;\nuse viewer::stdout::StdoutViewer;\n\ntype Result<T> = RResult<T, ViewError>;\n\nfn main() {\n let rt = generate_runtime_setup( \"imag-view\",\n &version!()[..],\n \"View entries (readonly)\",\n build_ui);\n\n let entry_id = rt.cli().value_of(\"id\").unwrap(); \/\/ enforced by clap\n\n if rt.cli().is_present(\"versions\") {\n if let Err(e) = view_versions_of(entry_id, &rt) {\n trace_error(&e);\n exit(1); \/\/ we can afford not-executing destructors here\n }\n } else {\n let entry_version = rt.cli().value_of(\"version\");\n let view_header = rt.cli().is_present(\"view-header\");\n let view_content = rt.cli().is_present(\"view-content\");\n let view_copy = rt.cli().is_present(\"view-copy\");\n let keep_copy = rt.cli().is_present(\"keep-copy\");\n\n let scmd = rt.cli().subcommand_matches(\"view-in\");\n if scmd.is_none() {\n debug!(\"No commandline call\");\n exit(1); \/\/ we can afford not-executing destructors here\n }\n let scmd = scmd.unwrap();\n\n let viewer = {\n if scmd.is_present(\"view-in-stdout\") {\n Box::new(StdoutViewer::new())\n } else if scmd.is_present(\"view-in-ui\") {\n warn!(\"Viewing in UI is currently not supported, switch to stdout\");\n Box::new(StdoutViewer::new())\n } else if scmd.is_present(\"view-in-browser\") {\n warn!(\"Viewing in browser is currently not supported, switch to stdout\");\n Box::new(StdoutViewer::new())\n } else if scmd.is_present(\"view-in-texteditor\") {\n warn!(\"Viewing in texteditor is currently not supported, switch to stdout\");\n Box::new(StdoutViewer::new())\n } else if scmd.is_present(\"view-in-custom\") {\n warn!(\"Viewing in custom is currently not supported, switch to stdout\");\n Box::new(StdoutViewer::new())\n } else {\n Box::new(StdoutViewer::new())\n }\n };\n\n let entry = load_entry(entry_id, entry_version, &rt);\n if entry.is_err() {\n trace_error(&entry.unwrap_err());\n exit(1); \/\/ we can afford not-executing destructors here\n }\n let entry = entry.unwrap();\n\n let view_info = ViewInformation {\n entry: entry,\n view_header: view_header,\n view_content: view_content,\n view_copy: view_copy,\n keep_copy: keep_copy,\n };\n\n viewer.view(view_info);\n }\n}\n\n\/\/ TODO: This is a shameless adaption of imag-store\/src\/util.rs\nfn load_entry<'a>(id: &str,\n version: Option<&str>,\n rt: &'a Runtime)\n -> Result<FileLockEntry<'a>>\n{\n debug!(\"Checking path element for version\");\n\n let version = {\n if version.is_none() {\n let r = id.split('~').last();\n if r.is_none() {\n warn!(\"No version\");\n return Err(ViewError::new(ViewErrorKind::NoVersion, None));\n } else {\n r.unwrap()\n }\n } else {\n version.unwrap()\n }\n };\n\n debug!(\"Building path from {:?} and {:?}\", id, version);\n let mut path = rt.store().path().clone();\n\n if id.starts_with('\/') {\n path.push(format!(\"{}~{}\", &id[1..id.len()], version));\n } else {\n path.push(format!(\"{}~{}\", id, version));\n }\n\n \/\/ the above is the adaption...\n\n rt.store().retrieve(path)\n .map_err(|e| ViewError::new(ViewErrorKind::StoreError, Some(Box::new(e))))\n}\n\nfn view_versions_of(id: &str, rt: &Runtime) -> Result<()> {\n use glob::glob;\n\n let mut path = rt.store().path().clone();\n\n if id.starts_with('\/') {\n path.push(format!(\"{}~*\", &id[1..id.len()]));\n } else {\n path.push(format!(\"{}~*\", id));\n }\n\n if let Some(path) = path.to_str() {\n match glob(path) {\n Ok(paths) => {\n for entry in paths {\n match entry {\n Ok(path) => println!(\"{}\", path.file_name().and_then(|s| s.to_str()).unwrap()),\n Err(e) => trace_error(e.error()),\n }\n }\n Ok(())\n },\n Err(e) => {\n debug!(\"Error in pattern\");\n Err(ViewError::new(ViewErrorKind::PatternError, Some(Box::new(e))))\n },\n }\n } else {\n warn!(\"Could not build glob() argument!\");\n Err(ViewError::new(ViewErrorKind::GlobBuildError, None))\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix coordinate in proximity method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Document the primitive types<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix a bug introduced by 11d2c2f4569322887de59d3f88a62617dab061e5<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add encoding tests.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added CLI runner<commit_after>use std::path::PathBuf;\n\nuse log::trace;\nuse ra_ap_hir::{self as hir, Crate};\nuse ra_ap_ide_db::RootDatabase;\nuse ra_ap_paths::AbsPathBuf;\nuse ra_ap_project_model::{\n CargoConfig, CargoWorkspace, Package, ProjectManifest, ProjectWorkspace, Target, TargetKind,\n};\nuse ra_ap_vfs::Vfs;\n\nuse crate::ProjectOptions as Options;\n\npub struct Runner<'a> {\n project_dir: PathBuf,\n options: Options,\n db: &'a RootDatabase,\n vfs: &'a Vfs,\n}\n\nimpl<'a> Runner<'a> {\n pub fn new(project_dir: PathBuf, options: Options, db: &'a RootDatabase, vfs: &'a Vfs) -> Self {\n Self {\n project_dir,\n options,\n db,\n vfs,\n }\n }\n\n pub fn run<F>(&self, f: F) -> anyhow::Result<()>\n where\n F: FnOnce(Crate) -> anyhow::Result<()>,\n {\n let project_dir = std::env::current_dir()?.join(&self.project_dir);\n let abs_project_dir = AbsPathBuf::assert(project_dir);\n let manifest = ProjectManifest::discover_single(abs_project_dir.as_path())?;\n\n let config = CargoConfig {\n \/\/ Do not activate the `default` feature.\n no_default_features: false,\n\n \/\/ Activate all available features\n all_features: false,\n\n \/\/ List of features to activate.\n \/\/ This will be ignored if `cargo_all_features` is true.\n features: vec![],\n\n \/\/ Runs cargo check on launch to figure out the correct values of OUT_DIR\n load_out_dirs_from_check: false,\n\n \/\/ rustc target\n target: None,\n\n \/\/ Don't load sysroot crates (`std`, `core` & friends). Might be useful\n \/\/ when debugging isolated issues.\n no_sysroot: true,\n\n \/\/ rustc private crate source\n rustc_source: None,\n };\n\n let project_workspace = ProjectWorkspace::load(manifest, &config)?;\n\n let workspace = match project_workspace {\n ProjectWorkspace::Cargo { cargo, .. } => cargo,\n ProjectWorkspace::Json { .. } => {\n unreachable!();\n }\n };\n\n let package_idx = self.package(&workspace)?;\n let package = &workspace[package_idx];\n trace!(\"Selected package: {:#?}\", package.name);\n\n let target_idx = self.target(&workspace, package_idx)?;\n let target = &workspace[target_idx];\n trace!(\"Selected target: {:#?}\", target.name);\n\n let target_root_path = target.root.as_path();\n\n let crates = hir::Crate::all(self.db);\n\n let krate = crates.into_iter().find(|krate| {\n let vfs_path = self.vfs.file_path(krate.root_file(self.db));\n let crate_root_path = vfs_path.as_path().unwrap();\n\n crate_root_path == target_root_path\n });\n\n let krate = match krate {\n Some(krate) => krate,\n None => panic!(\"Crate not found\"),\n };\n\n let crate_name = krate.display_name(self.db).unwrap().to_string();\n trace!(\"Selected crate: {:#?}\", crate_name);\n\n f(krate)\n }\n\n fn package(&self, workspace: &CargoWorkspace) -> anyhow::Result<Package> {\n let packages: Vec<_> = workspace\n .packages()\n .filter(|idx| workspace[*idx].is_member)\n .collect();\n\n let package_count = packages.len();\n\n \/\/ If project contains no packages, bail out:\n\n if package_count < 1 {\n anyhow::bail!(\"No packages found\");\n }\n\n \/\/ If project contains a single packages, just pick it:\n\n if package_count == 1 {\n return Ok(packages[0]);\n }\n\n \/\/ If project contains multiple packages, select the one provided via options:\n\n if let Some(package_name) = &self.options.package {\n let package_idx = packages\n .into_iter()\n .find(|package_idx| {\n let package = &workspace[*package_idx];\n package.name == *package_name\n })\n .expect(&format!(\"No package with name {:?}\", package_name));\n return Ok(package_idx);\n }\n\n \/\/ If no package was provided via options bail out:\n\n let package_list_items: Vec<_> = packages\n .into_iter()\n .map(|package_idx| {\n let package = &workspace[package_idx];\n format!(\"- {}\", package.name)\n })\n .collect();\n\n let package_list = package_list_items.join(\"\\n\");\n\n Err(anyhow::anyhow!(\n indoc::indoc! {\n \"Multiple packages present in workspace,\n please explicitly select one via --package flag.\n\n Packages present in workspace:\n {}\n \"\n },\n package_list\n ))\n }\n\n fn target(&self, workspace: &CargoWorkspace, package_idx: Package) -> anyhow::Result<Target> {\n let package = &workspace[package_idx];\n\n \/\/ Retrieve list of indices for bin\/lib targets:\n\n let targets: Vec<_> = package\n .targets\n .iter()\n .cloned()\n .filter(|target_idx| {\n let target = &workspace[*target_idx];\n match target.kind {\n TargetKind::Bin => true,\n TargetKind::Lib => true,\n TargetKind::Example => false,\n TargetKind::Test => false,\n TargetKind::Bench => false,\n TargetKind::Other => false,\n }\n })\n .collect();\n\n let target_count = targets.len();\n\n \/\/ If package contains no targets, bail out:\n\n if target_count < 1 {\n anyhow::bail!(\"No targets found\");\n }\n\n \/\/ If project contains a single target, just pick it:\n\n if target_count == 1 {\n return Ok(targets[0]);\n }\n\n \/\/ If package contains multiple targets, select the one provided via options:\n\n if self.options.lib {\n let target = targets.into_iter().find(|target_idx| {\n let target = &workspace[*target_idx];\n target.kind == TargetKind::Lib\n });\n\n return target.ok_or_else(|| anyhow::anyhow!(\"No library target found\"));\n }\n\n if let Some(bin_name) = &self.options.bin {\n let target = targets.into_iter().find(|target_idx| {\n let target = &workspace[*target_idx];\n (target.kind == TargetKind::Bin) && (target.name == &bin_name[..])\n });\n\n return target\n .ok_or_else(|| anyhow::anyhow!(\"No binary target found with name {:?}\", bin_name));\n }\n\n \/\/ If no target was provided via options bail out:\n\n let target_list_items: Vec<_> = targets\n .into_iter()\n .map(|target_idx| {\n let target = &workspace[target_idx];\n match target.kind {\n TargetKind::Bin => format!(\"- {} (--bin {})\", target.name, target.name),\n TargetKind::Lib => format!(\"- {} (--lib)\", target.name),\n TargetKind::Example => unreachable!(),\n TargetKind::Test => unreachable!(),\n TargetKind::Bench => unreachable!(),\n TargetKind::Other => unreachable!(),\n }\n })\n .collect();\n\n let target_list = target_list_items.join(\"\\n\");\n\n Err(anyhow::anyhow!(\n indoc::indoc! {\n \"Multiple targets present in package,\n please explicitly select one via --lib or --bin flag.\n\n Targets present in package:\n {}\n \"\n },\n target_list\n ))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tests: Remove unused response<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed byte type<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before>use audio::ac97::AC97;\nuse audio::intelhda::IntelHDA;\n\nuse collections::vec::Vec;\n\nuse common::debug;\nuse common::queue::Queue;\n\nuse drivers::pciconfig::PciConfig;\n\nuse network::intel8254x::Intel8254x;\nuse network::rtl8139::Rtl8139;\n\nuse programs::session::Session;\n\nuse schemes::file::FileScheme;\n\nuse usb::ehci::Ehci;\nuse usb::uhci::Uhci;\nuse usb::xhci::Xhci;\n\n\/\/\/ PCI device\npub unsafe fn pci_device(session: &mut Session,\n mut pci: PciConfig,\n class_id: u32,\n subclass_id: u32,\n interface_id: u32,\n vendor_code: u32,\n device_code: u32) {\n if class_id == 0x01 && subclass_id == 0x01 {\n if let Some(module) = FileScheme::new(pci) {\n session.items.push(module);\n }\n } else if class_id == 0x0C && subclass_id == 0x03 {\n if interface_id == 0x30 {\n let base = pci.read(0x10) as usize;\n\n let module = box Xhci {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n session.items.push(module);\n } else if interface_id == 0x20 {\n let base = pci.read(0x10) as usize;\n\n let mut module = box Ehci {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n session.items.push(module);\n } else if interface_id == 0x10 {\n let base = pci.read(0x10) as usize;\n\n debug::d(\"OHCI Controller on \");\n debug::dh(base & 0xFFFFFFF0);\n debug::dl();\n } else if interface_id == 0x00 {\n session.items.push(Uhci::new(pci));\n } else {\n debug::d(\"Unknown USB interface version\\n\");\n }\n } else {\n match vendor_code {\n 0x10EC => match device_code { \/\/ REALTEK\n 0x8139 => {\n session.items.push(Rtl8139::new(pci));\n }\n _ => (),\n },\n 0x8086 => match device_code { \/\/ INTEL\n 0x100E => {\n let base = pci.read(0x10) as usize;\n let mut module = box Intel8254x {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n resources: Vec::new(),\n inbound: Queue::new(),\n outbound: Queue::new(),\n };\n module.init();\n session.items.push(module);\n }\n 0x2415 => session.items.push(AC97::new(pci)),\n 0x24C5 => session.items.push(AC97::new(pci)),\n 0x2668 => {\n let base = pci.read(0x10) as usize;\n let mut module = box IntelHDA {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n session.items.push(module);\n }\n _ => (),\n },\n _ => (),\n }\n }\n}\n\n\/\/\/ Initialize PCI session\npub unsafe fn pci_init(session: &mut Session) {\n for bus in 0..256 {\n for slot in 0..32 {\n for func in 0..8 {\n let mut pci = PciConfig::new(bus as u8, slot as u8, func as u8);\n let id = pci.read(0);\n\n if (id & 0xFFFF) != 0xFFFF {\n let class_id = pci.read(8);\n\n debug::d(\" * PCI \");\n debug::dd(bus);\n debug::d(\", \");\n debug::dd(slot);\n debug::d(\", \");\n debug::dd(func);\n debug::d(\": ID \");\n debug::dh(id as usize);\n debug::d(\" CL \");\n debug::dh(class_id as usize);\n\n for i in 0..6 {\n let bar = pci.read(i * 4 + 0x10);\n if bar > 0 {\n debug::d(\" BAR\");\n debug::dd(i as usize);\n debug::d(\": \");\n debug::dh(bar as usize);\n\n pci.write(i * 4 + 0x10, 0xFFFFFFFF);\n let size = (0xFFFFFFFF - (pci.read(i * 4 + 0x10) & 0xFFFFFFF0)) + 1;\n pci.write(i * 4 + 0x10, bar);\n\n if size > 0 {\n debug::d(\" \");\n debug::dd(size as usize);\n }\n }\n }\n\n debug::dl();\n\n pci_device(session,\n pci,\n (class_id >> 24) & 0xFF,\n (class_id >> 16) & 0xFF,\n (class_id >> 8) & 0xFF,\n id & 0xFFFF,\n (id >> 16) & 0xFFFF);\n }\n }\n }\n }\n}\n<commit_msg>Cleanup of debug calls<commit_after>use audio::ac97::AC97;\nuse audio::intelhda::IntelHDA;\n\nuse collections::vec::Vec;\n\nuse common::debug;\nuse common::queue::Queue;\n\nuse drivers::pciconfig::PciConfig;\n\nuse network::intel8254x::Intel8254x;\nuse network::rtl8139::Rtl8139;\n\nuse programs::session::Session;\n\nuse schemes::file::FileScheme;\n\nuse usb::ehci::Ehci;\nuse usb::uhci::Uhci;\nuse usb::xhci::Xhci;\n\n\/\/\/ PCI device\npub unsafe fn pci_device(session: &mut Session,\n mut pci: PciConfig,\n class_id: u32,\n subclass_id: u32,\n interface_id: u32,\n vendor_code: u32,\n device_code: u32) {\n if class_id == 0x01 && subclass_id == 0x01 {\n if let Some(module) = FileScheme::new(pci) {\n session.items.push(module);\n }\n } else if class_id == 0x0C && subclass_id == 0x03 {\n if interface_id == 0x30 {\n let base = pci.read(0x10) as usize;\n\n let module = box Xhci {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n session.items.push(module);\n } else if interface_id == 0x20 {\n let base = pci.read(0x10) as usize;\n\n let mut module = box Ehci {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n session.items.push(module);\n } else if interface_id == 0x10 {\n let base = pci.read(0x10) as usize;\n\n debug!(\"OHCI Controller on {}\\n\", base & 0xFFFFFFF0);\n } else if interface_id == 0x00 {\n session.items.push(Uhci::new(pci));\n } else {\n debug!(\"Unknown USB interface version\\n\");\n }\n } else {\n match vendor_code {\n 0x10EC => match device_code { \/\/ REALTEK\n 0x8139 => {\n session.items.push(Rtl8139::new(pci));\n }\n _ => (),\n },\n 0x8086 => match device_code { \/\/ INTEL\n 0x100E => {\n let base = pci.read(0x10) as usize;\n let mut module = box Intel8254x {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n resources: Vec::new(),\n inbound: Queue::new(),\n outbound: Queue::new(),\n };\n module.init();\n session.items.push(module);\n }\n 0x2415 => session.items.push(AC97::new(pci)),\n 0x24C5 => session.items.push(AC97::new(pci)),\n 0x2668 => {\n let base = pci.read(0x10) as usize;\n let mut module = box IntelHDA {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n session.items.push(module);\n }\n _ => (),\n },\n _ => (),\n }\n }\n}\n\n\/\/\/ Initialize PCI session\npub unsafe fn pci_init(session: &mut Session) {\n for bus in 0..256 {\n for slot in 0..32 {\n for func in 0..8 {\n let mut pci = PciConfig::new(bus as u8, slot as u8, func as u8);\n let id = pci.read(0);\n\n if (id & 0xFFFF) != 0xFFFF {\n let class_id = pci.read(8);\n\n debug!(\" * PCI {}, {}, {}: ID {:X} CL {:X}\", bus, slot, func, id, class_id);\n\n for i in 0..6 {\n let bar = pci.read(i * 4 + 0x10);\n if bar > 0 {\n debug!(\" BAR{}: {:X}\", i, bar);\n\n pci.write(i * 4 + 0x10, 0xFFFFFFFF);\n let size = (0xFFFFFFFF - (pci.read(i * 4 + 0x10) & 0xFFFFFFF0)) + 1;\n pci.write(i * 4 + 0x10, bar);\n\n if size > 0 {\n debug!(\" {}\", size);\n }\n }\n }\n\n debug::dl();\n\n pci_device(session,\n pci,\n (class_id >> 24) & 0xFF,\n (class_id >> 16) & 0xFF,\n (class_id >> 8) & 0xFF,\n id & 0xFFFF,\n (id >> 16) & 0xFFFF);\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse core::cmp;\n\nuse common::event::{KeyEvent, MouseEvent};\n\nuse drivers::pio::*;\n\nuse schemes::KScheme;\n\nuse drivers::kb_layouts::layouts;\n\n\/\/\/ PS2\npub struct Ps2 {\n \/\/\/ The data\n data: Pio8,\n \/\/\/ The command\n cmd: Pio8,\n \/\/\/ Left shift?\n lshift: bool,\n \/\/\/ Right shift?\n rshift: bool,\n \/\/\/ Caps lock?\n caps_lock: bool,\n \/\/\/ Caps lock toggle\n caps_lock_toggle: bool,\n \/\/\/ The mouse packet\n mouse_packet: [u8; 4],\n \/\/\/ Mouse packet index\n mouse_i: usize,\n \/\/\/ Mouse point x\n mouse_x: isize,\n \/\/\/ Mouse point y\n mouse_y: isize,\n \/\/\/ Layout for keyboard\n \/\/\/ Default: English\n layout: layouts::Layout,\n}\n\nimpl Ps2 {\n \/\/\/ Create new PS2 data\n pub fn new() -> Box<Self> {\n let mut module = box Ps2 {\n data: Pio8::new(0x60),\n cmd: Pio8::new(0x64),\n lshift: false,\n rshift: false,\n caps_lock: false,\n caps_lock_toggle: false,\n mouse_packet: [0; 4],\n mouse_i: 0,\n mouse_x: 0,\n mouse_y: 0,\n layout: layouts::Layout::FRENCH,\n };\n\n unsafe {\n module.keyboard_init();\n module.mouse_init();\n }\n\n module\n }\n\n unsafe fn wait0(&self) {\n while (self.cmd.read() & 1) == 0 {}\n }\n\n unsafe fn wait1(&self) {\n while (self.cmd.read() & 2) == 2 {}\n }\n\n unsafe fn keyboard_init(&mut self) {\n while (self.cmd.read() & 0x1) == 1 {\n self.data.read();\n }\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let flags = (self.data.read() & 0b00110111) | 1 | 0b10000;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(flags);\n\n \/\/ Set Defaults\n self.wait1();\n self.data.write(0xF6);\n self.wait0();\n self.data.read();\n\n \/\/ Set LEDS\n self.wait1();\n self.data.write(0xED);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(0);\n self.wait0();\n self.data.read();\n\n \/\/ Set Scancode Map:\n self.wait1();\n self.data.write(0xF0);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(1);\n self.wait0();\n self.data.read();\n }\n\n \/\/\/ Keyboard interrupt\n pub fn keyboard_interrupt(&mut self) -> Option<KeyEvent> {\n let scancode = unsafe { self.data.read() };\n\n if scancode == 0 {\n return None;\n } else if scancode == 0x2A {\n self.lshift = true;\n } else if scancode == 0xAA {\n self.lshift = false;\n } else if scancode == 0x36 {\n self.rshift = true;\n } else if scancode == 0xB6 {\n self.rshift = false;\n } else if scancode == 0x3A {\n if !self.caps_lock {\n self.caps_lock = true;\n self.caps_lock_toggle = true;\n } else {\n self.caps_lock_toggle = false;\n }\n } else if scancode == 0xBA {\n if self.caps_lock && !self.caps_lock_toggle {\n self.caps_lock = false;\n }\n }\n\n let shift;\n if self.caps_lock {\n shift = !(self.lshift || self.rshift);\n } else {\n shift = self.lshift || self.rshift;\n }\n\n return Some(KeyEvent {\n character: char_for_scancode(scancode & 0x7F, shift, self.layout),\n scancode: scancode & 0x7F,\n pressed: scancode < 0x80,\n });\n }\n\n unsafe fn mouse_cmd(&mut self, byte: u8) -> u8 {\n self.wait1();\n self.cmd.write(0xD4);\n self.wait1();\n self.data.write(byte);\n\n self.wait0();\n self.data.read()\n }\n\n \/\/\/ Initialize mouse\n pub unsafe fn mouse_init(&mut self) {\n \/\/ The Init Dance\n self.wait1();\n self.cmd.write(0xA8);\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let status = self.data.read() | 2;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(status);\n\n \/\/ Set defaults\n self.mouse_cmd(0xF6);\n\n \/\/ Enable Streaming\n self.mouse_cmd(0xF4);\n }\n\n \/\/\/ Mouse interrupt\n pub fn mouse_interrupt(&mut self) -> Option<MouseEvent> {\n let byte = unsafe { self.data.read() };\n if self.mouse_i == 0 {\n if byte & 0x8 == 0x8 {\n self.mouse_packet[0] = byte;\n self.mouse_i += 1;\n }\n } else if self.mouse_i == 1 {\n self.mouse_packet[1] = byte;\n\n self.mouse_i += 1;\n } else {\n self.mouse_packet[2] = byte;\n\n let left_button = (self.mouse_packet[0] & 1) == 1;\n let right_button = (self.mouse_packet[0] & 2) == 2;\n let middle_button = (self.mouse_packet[0] & 4) == 4;\n\n let x;\n if (self.mouse_packet[0] & 0x40) != 0x40 && self.mouse_packet[1] != 0 {\n x = self.mouse_packet[1] as isize -\n (((self.mouse_packet[0] as isize) << 4) & 0x100);\n } else {\n x = 0;\n }\n\n let y;\n if (self.mouse_packet[0] & 0x80) != 0x80 && self.mouse_packet[2] != 0 {\n y = (((self.mouse_packet[0] as isize) << 3) & 0x100) -\n self.mouse_packet[2] as isize;\n } else {\n y = 0;\n }\n\n unsafe {\n self.mouse_x = cmp::max(0,\n cmp::min((*::console).display.width as isize,\n self.mouse_x + x));\n self.mouse_y = cmp::max(0,\n cmp::min((*::console).display.height as isize,\n self.mouse_y + y));\n }\n\n self.mouse_i = 0;\n\n return Some(MouseEvent {\n x: self.mouse_x,\n y: self.mouse_y,\n left_button: left_button,\n right_button: right_button,\n middle_button: middle_button,\n });\n }\n\n return None;\n }\n\n \/\/\/ Function to change the layout of the keyboard\n pub fn change_layout(&mut self, layout: usize) {\n self.layout =\n match layout {\n 0 => layouts::Layout::ENGLISH,\n 1 => layouts::Layout::FRENCH,\n _ => layouts::Layout::ENGLISH\n }\n }\n}\n\nimpl KScheme for Ps2 {\n fn on_irq(&mut self, irq: u8) {\n if irq == 0x1 || irq == 0xC {\n self.on_poll();\n }\n }\n\n fn on_poll(&mut self) {\n loop {\n let status = unsafe { self.cmd.read() };\n if status & 0x21 == 1 {\n if let Some(key_event) = self.keyboard_interrupt() {\n key_event.trigger();\n }\n } else if status & 0x21 == 0x21 {\n if let Some(mouse_event) = self.mouse_interrupt() {\n mouse_event.trigger();\n }\n } else {\n break;\n }\n }\n }\n}\n<commit_msg>This function is now include in layouts module<commit_after>use alloc::boxed::Box;\n\nuse core::cmp;\n\nuse common::event::{KeyEvent, MouseEvent};\n\nuse drivers::pio::*;\n\nuse schemes::KScheme;\n\nuse drivers::kb_layouts::layouts;\n\n\/\/\/ PS2\npub struct Ps2 {\n \/\/\/ The data\n data: Pio8,\n \/\/\/ The command\n cmd: Pio8,\n \/\/\/ Left shift?\n lshift: bool,\n \/\/\/ Right shift?\n rshift: bool,\n \/\/\/ Caps lock?\n caps_lock: bool,\n \/\/\/ Caps lock toggle\n caps_lock_toggle: bool,\n \/\/\/ The mouse packet\n mouse_packet: [u8; 4],\n \/\/\/ Mouse packet index\n mouse_i: usize,\n \/\/\/ Mouse point x\n mouse_x: isize,\n \/\/\/ Mouse point y\n mouse_y: isize,\n \/\/\/ Layout for keyboard\n \/\/\/ Default: English\n layout: layouts::Layout,\n}\n\nimpl Ps2 {\n \/\/\/ Create new PS2 data\n pub fn new() -> Box<Self> {\n let mut module = box Ps2 {\n data: Pio8::new(0x60),\n cmd: Pio8::new(0x64),\n lshift: false,\n rshift: false,\n caps_lock: false,\n caps_lock_toggle: false,\n mouse_packet: [0; 4],\n mouse_i: 0,\n mouse_x: 0,\n mouse_y: 0,\n layout: layouts::Layout::FRENCH,\n };\n\n unsafe {\n module.keyboard_init();\n module.mouse_init();\n }\n\n module\n }\n\n unsafe fn wait0(&self) {\n while (self.cmd.read() & 1) == 0 {}\n }\n\n unsafe fn wait1(&self) {\n while (self.cmd.read() & 2) == 2 {}\n }\n\n unsafe fn keyboard_init(&mut self) {\n while (self.cmd.read() & 0x1) == 1 {\n self.data.read();\n }\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let flags = (self.data.read() & 0b00110111) | 1 | 0b10000;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(flags);\n\n \/\/ Set Defaults\n self.wait1();\n self.data.write(0xF6);\n self.wait0();\n self.data.read();\n\n \/\/ Set LEDS\n self.wait1();\n self.data.write(0xED);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(0);\n self.wait0();\n self.data.read();\n\n \/\/ Set Scancode Map:\n self.wait1();\n self.data.write(0xF0);\n self.wait0();\n self.data.read();\n\n self.wait1();\n self.data.write(1);\n self.wait0();\n self.data.read();\n }\n\n \/\/\/ Keyboard interrupt\n pub fn keyboard_interrupt(&mut self) -> Option<KeyEvent> {\n let scancode = unsafe { self.data.read() };\n\n if scancode == 0 {\n return None;\n } else if scancode == 0x2A {\n self.lshift = true;\n } else if scancode == 0xAA {\n self.lshift = false;\n } else if scancode == 0x36 {\n self.rshift = true;\n } else if scancode == 0xB6 {\n self.rshift = false;\n } else if scancode == 0x3A {\n if !self.caps_lock {\n self.caps_lock = true;\n self.caps_lock_toggle = true;\n } else {\n self.caps_lock_toggle = false;\n }\n } else if scancode == 0xBA {\n if self.caps_lock && !self.caps_lock_toggle {\n self.caps_lock = false;\n }\n }\n\n let shift;\n if self.caps_lock {\n shift = !(self.lshift || self.rshift);\n } else {\n shift = self.lshift || self.rshift;\n }\n\n return Some(KeyEvent {\n character: layouts::char_for_scancode(scancode & 0x7F, shift, &self.layout),\n scancode: scancode & 0x7F,\n pressed: scancode < 0x80,\n });\n }\n\n unsafe fn mouse_cmd(&mut self, byte: u8) -> u8 {\n self.wait1();\n self.cmd.write(0xD4);\n self.wait1();\n self.data.write(byte);\n\n self.wait0();\n self.data.read()\n }\n\n \/\/\/ Initialize mouse\n pub unsafe fn mouse_init(&mut self) {\n \/\/ The Init Dance\n self.wait1();\n self.cmd.write(0xA8);\n\n self.wait1();\n self.cmd.write(0x20);\n self.wait0();\n let status = self.data.read() | 2;\n self.wait1();\n self.cmd.write(0x60);\n self.wait1();\n self.data.write(status);\n\n \/\/ Set defaults\n self.mouse_cmd(0xF6);\n\n \/\/ Enable Streaming\n self.mouse_cmd(0xF4);\n }\n\n \/\/\/ Mouse interrupt\n pub fn mouse_interrupt(&mut self) -> Option<MouseEvent> {\n let byte = unsafe { self.data.read() };\n if self.mouse_i == 0 {\n if byte & 0x8 == 0x8 {\n self.mouse_packet[0] = byte;\n self.mouse_i += 1;\n }\n } else if self.mouse_i == 1 {\n self.mouse_packet[1] = byte;\n\n self.mouse_i += 1;\n } else {\n self.mouse_packet[2] = byte;\n\n let left_button = (self.mouse_packet[0] & 1) == 1;\n let right_button = (self.mouse_packet[0] & 2) == 2;\n let middle_button = (self.mouse_packet[0] & 4) == 4;\n\n let x;\n if (self.mouse_packet[0] & 0x40) != 0x40 && self.mouse_packet[1] != 0 {\n x = self.mouse_packet[1] as isize -\n (((self.mouse_packet[0] as isize) << 4) & 0x100);\n } else {\n x = 0;\n }\n\n let y;\n if (self.mouse_packet[0] & 0x80) != 0x80 && self.mouse_packet[2] != 0 {\n y = (((self.mouse_packet[0] as isize) << 3) & 0x100) -\n self.mouse_packet[2] as isize;\n } else {\n y = 0;\n }\n\n unsafe {\n self.mouse_x = cmp::max(0,\n cmp::min((*::console).display.width as isize,\n self.mouse_x + x));\n self.mouse_y = cmp::max(0,\n cmp::min((*::console).display.height as isize,\n self.mouse_y + y));\n }\n\n self.mouse_i = 0;\n\n return Some(MouseEvent {\n x: self.mouse_x,\n y: self.mouse_y,\n left_button: left_button,\n right_button: right_button,\n middle_button: middle_button,\n });\n }\n\n return None;\n }\n\n \/\/\/ Function to change the layout of the keyboard\n pub fn change_layout(&mut self, layout: usize) {\n self.layout =\n match layout {\n 0 => layouts::Layout::ENGLISH,\n 1 => layouts::Layout::FRENCH,\n _ => layouts::Layout::ENGLISH\n }\n }\n}\n\nimpl KScheme for Ps2 {\n fn on_irq(&mut self, irq: u8) {\n if irq == 0x1 || irq == 0xC {\n self.on_poll();\n }\n }\n\n fn on_poll(&mut self) {\n loop {\n let status = unsafe { self.cmd.read() };\n if status & 0x21 == 1 {\n if let Some(key_event) = self.keyboard_interrupt() {\n key_event.trigger();\n }\n } else if status & 0x21 == 0x21 {\n if let Some(mouse_event) = self.mouse_interrupt() {\n mouse_event.trigger();\n }\n } else {\n break;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor `Connection` to actually use `Query`<commit_after><|endoftext|>"} {"text":"<commit_before>#[derive(PartialEq)]\n#[derive(Debug)]\npub enum Token {\n Word(String),\n End,\n}\n\nenum TokenizerState {\n Default,\n DoubleQuoted,\n SingleQuoted,\n Commented,\n}\n\nfn process_character_double_quoted(tokens: &mut Vec<Token>,\n current_token: &mut String,\n chr: char) -> TokenizerState {\n match chr {\n '\"' => TokenizerState::Default,\n _ => {\n current_token.push(chr);\n TokenizerState::DoubleQuoted\n },\n }\n}\n\nfn process_character_single_quoted(tokens: &mut Vec<Token>,\n current_token: &mut String,\n chr: char) -> TokenizerState {\n match chr {\n '\\'' => TokenizerState::Default,\n _ => {\n current_token.push(chr);\n TokenizerState::SingleQuoted\n },\n }\n}\n\nfn process_character_default(tokens: &mut Vec<Token>,\n current_token: &mut String,\n chr: char) -> TokenizerState {\n let mut next_state = TokenizerState::Default;\n match chr {\n ' ' | '\\t' => {\n if !current_token.is_empty() {\n tokens.push(Token::Word(current_token.clone()));\n current_token.clear();\n }\n },\n '#' => {\n next_state = TokenizerState::Commented;\n },\n '\\n' | '\\r' | ';' => {\n if !current_token.is_empty() {\n tokens.push(Token::Word(current_token.clone()));\n current_token.clear();\n }\n tokens.push(Token::End);\n }\n '\"' => {\n next_state = TokenizerState::DoubleQuoted;\n },\n '\\'' => {\n next_state = TokenizerState::SingleQuoted;\n },\n _ => {\n current_token.push(chr);\n },\n }\n next_state\n}\n\npub fn tokenize(input: &str) -> Vec<Token> {\n let mut state = TokenizerState::Default;\n let mut tokens: Vec<Token> = vec![];\n let mut current_token: String = String::new();\n for chr in input.chars() {\n state = match state {\n TokenizerState::DoubleQuoted => \n process_character_double_quoted(&mut tokens, &mut current_token, chr),\n TokenizerState::SingleQuoted => \n process_character_single_quoted(&mut tokens, &mut current_token, chr),\n TokenizerState::Commented => TokenizerState::Commented,\n _ => process_character_default(&mut tokens, &mut current_token, chr),\n }\n }\n if !current_token.is_empty() {\n tokens.push(Token::Word(current_token.clone()));\n }\n tokens\n}\n\n#[cfg(test)]\nmod tests {\n\n use super::*;\n\n #[test]\n fn tokenize_empty_string() {\n assert!(tokenize(\"\").is_empty());\n }\n\n #[test]\n fn tokenize_single_word() {\n let expected = vec![Token::Word(\"word\".to_string())];\n assert_eq!(expected, tokenize(\"word\"));\n }\n\n #[test]\n fn tokenize_whitespace() {\n assert!(tokenize(\" \\t \").is_empty());\n }\n\n #[test]\n fn tokenize_multiple_words() {\n let expected = vec![\n Token::Word(\"one\".to_string()),\n Token::Word(\"two\".to_string()),\n Token::Word(\"three\".to_string())];\n assert_eq!(expected, tokenize(\"one two three\"));\n }\n\n #[test]\n fn tokenize_comment() {\n assert!(tokenize(\"# some text\").is_empty());\n }\n\n #[test]\n fn tokenize_end_of_line_comment() {\n let expected = vec![Token::Word(\"word\".to_string())];\n assert_eq!(expected, tokenize(\"word # more stuff\"));\n }\n\n #[test]\n fn tokenize_newline_produces_end_token() {\n let expected = vec![\n Token::Word(\"word\".to_string()),\n Token::End];\n assert_eq!(expected, tokenize(\"word\\n\"));\n }\n\n #[test]\n fn double_quotes_escape_space() {\n let expected = vec![Token::Word(\"escaped space\".to_string())];\n assert_eq!(expected, tokenize(\"\\\"escaped space\\\"\"));\n }\n\n #[test]\n fn mixed_quoted_and_unquoted() {\n let expected = vec![\n Token::Word(\"one\".to_string()),\n Token::Word(\"two# three\".to_string()),\n Token::Word(\"four\".to_string())];\n assert_eq!(expected, tokenize(\"one \\\"two# three\\\" four\"));\n }\n\n #[test]\n fn mixed_double_and_single_quotes() {\n let expected = vec![\n Token::Word(\"''\".to_string()),\n Token::Word(\"\\\"\\\"\".to_string())];\n assert_eq!(expected, tokenize(\"\\\"''\\\" '\\\"\\\"'\"));\n }\n}\n<commit_msg>Make tokenizer always put an End token at the end<commit_after>#[derive(PartialEq)]\n#[derive(Debug)]\npub enum Token {\n Word(String),\n End,\n}\n\nenum TokenizerState {\n Default,\n DoubleQuoted,\n SingleQuoted,\n Commented,\n}\n\nfn process_character_double_quoted(_: &mut Vec<Token>,\n current_token: &mut String,\n chr: char) -> TokenizerState {\n match chr {\n '\"' => TokenizerState::Default,\n _ => {\n current_token.push(chr);\n TokenizerState::DoubleQuoted\n },\n }\n}\n\nfn process_character_single_quoted(_: &mut Vec<Token>,\n current_token: &mut String,\n chr: char) -> TokenizerState {\n match chr {\n '\\'' => TokenizerState::Default,\n _ => {\n current_token.push(chr);\n TokenizerState::SingleQuoted\n },\n }\n}\n\nfn process_character_default(tokens: &mut Vec<Token>,\n current_token: &mut String,\n chr: char) -> TokenizerState {\n let mut next_state = TokenizerState::Default;\n match chr {\n ' ' | '\\t' => {\n if !current_token.is_empty() {\n tokens.push(Token::Word(current_token.clone()));\n current_token.clear();\n }\n },\n '#' => {\n next_state = TokenizerState::Commented;\n },\n '\\n' | '\\r' | ';' => {\n if !current_token.is_empty() {\n tokens.push(Token::Word(current_token.clone()));\n current_token.clear();\n }\n tokens.push(Token::End);\n }\n '\"' => {\n next_state = TokenizerState::DoubleQuoted;\n },\n '\\'' => {\n next_state = TokenizerState::SingleQuoted;\n },\n _ => {\n current_token.push(chr);\n },\n }\n next_state\n}\n\npub fn tokenize(input: &str) -> Vec<Token> {\n let mut state = TokenizerState::Default;\n let mut tokens: Vec<Token> = vec![];\n let mut current_token: String = String::new();\n for chr in input.chars() {\n state = match state {\n TokenizerState::DoubleQuoted => \n process_character_double_quoted(&mut tokens, &mut current_token, chr),\n TokenizerState::SingleQuoted => \n process_character_single_quoted(&mut tokens, &mut current_token, chr),\n TokenizerState::Commented => TokenizerState::Commented,\n _ => process_character_default(&mut tokens, &mut current_token, chr),\n }\n }\n if !current_token.is_empty() {\n tokens.push(Token::Word(current_token.clone()));\n }\n tokens.push(Token::End);\n tokens\n}\n\n#[cfg(test)]\nmod tests {\n\n use super::*;\n\n #[test]\n fn tokenize_empty_string() {\n let expected = vec![Token::End];\n assert_eq!(expected, tokenize(\"\"));\n }\n\n #[test]\n fn tokenize_single_word() {\n let expected = vec![\n Token::Word(\"word\".to_string()),\n Token::End];\n assert_eq!(expected, tokenize(\"word\"));\n }\n\n #[test]\n fn tokenize_whitespace() {\n let expected = vec![Token::End];\n assert_eq!(expected, tokenize(\" \\t \"));\n }\n\n #[test]\n fn tokenize_multiple_words() {\n let expected = vec![\n Token::Word(\"one\".to_string()),\n Token::Word(\"two\".to_string()),\n Token::Word(\"three\".to_string()),\n Token::End];\n assert_eq!(expected, tokenize(\"one two three\"));\n }\n\n #[test]\n fn tokenize_comment() {\n let expected = vec![Token::End];\n assert_eq!(expected, tokenize(\"# some text\"));\n }\n\n #[test]\n fn tokenize_end_of_line_comment() {\n let expected = vec![\n Token::Word(\"word\".to_string()),\n Token::End];\n assert_eq!(expected, tokenize(\"word # more stuff\"));\n }\n\n #[test]\n fn tokenize_newline_produces_end_token() {\n let expected = vec![\n Token::Word(\"word\".to_string()),\n Token::End];\n assert_eq!(expected, tokenize(\"word\"));\n }\n\n #[test]\n fn double_quotes_escape_space() {\n let expected = vec![\n Token::Word(\"escaped space\".to_string()),\n Token::End];\n assert_eq!(expected, tokenize(\"\\\"escaped space\\\"\"));\n }\n\n #[test]\n fn mixed_quoted_and_unquoted() {\n let expected = vec![\n Token::Word(\"one\".to_string()),\n Token::Word(\"two# three\".to_string()),\n Token::Word(\"four\".to_string()),\n Token::End];\n assert_eq!(expected, tokenize(\"one \\\"two# three\\\" four\"));\n }\n\n #[test]\n fn mixed_double_and_single_quotes() {\n let expected = vec![\n Token::Word(\"''\".to_string()),\n Token::Word(\"\\\"\\\"\".to_string()),\n Token::End];\n assert_eq!(expected, tokenize(\"\\\"''\\\" '\\\"\\\"'\"));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Work around connect being deprecated in Nightly and join not being in Stable yet.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update cfg attribute syntax in aes tests<commit_after><|endoftext|>"} {"text":"<commit_before>use toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagstore::storeid::StoreId;\n\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::result::Result;\n\nuse vcs::git::action::StoreAction;\n\nuse git2::Repository;\n\n\/\/\/ Check the configuration whether we should commit interactively\npub fn commit_interactive(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Check the configuration whether we should commit with the editor\nfn commit_with_editor(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive_editor\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive_editor' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive_editor = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive_editor'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Get the commit default message\nfn commit_default_msg<'a>(config: &'a Value, action: &'a StoreAction) -> &'a str {\n match config.lookup(\"commit.message\") {\n Some(&Value::String(ref b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.message' must be a String.\",\n action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.message'\", action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n }\n}\n\n\/\/\/ Get the commit template\n\/\/\/\n\/\/\/ TODO: Implement good template string\nfn commit_template(action: &StoreAction, id: &StoreId) -> String {\n format!(r#\"\n# Please commit your changes and remove these lines.\n#\n# You're about to commit changes via the {action} Hook\n#\n# Altered file: {id}\n#\n \"#,\n action = action,\n id = id.local().display())\n}\n\n\/\/\/ Generate a commit message\n\/\/\/\n\/\/\/ Uses the functions `commit_interactive()` and `commit_with_editor()`\n\/\/\/ or reads one from the commandline or uses the `commit_default_msg()` string to create a commit\n\/\/\/ message.\npub fn commit_message(repo: &Repository, config: &Value, action: StoreAction, id: &StoreId) -> Result<String> {\n use libimaginteraction::ask::ask_string;\n use libimagutil::edit::edit_in_tmpfile_with_command;\n use std::process::Command;\n\n if commit_interactive(config, &action) {\n if commit_with_editor(config, &action) {\n repo.config()\n .map_err_into(GHEK::GitConfigFetchError)\n .and_then(|c| c.get_string(\"core.editor\").map_err_into(GHEK::GitConfigEditorFetchError))\n .map_err_into(GHEK::ConfigError)\n .map(Command::new)\n .and_then(|cmd| {\n let mut s = commit_template(&action, id);\n edit_in_tmpfile_with_command(cmd, &mut s).map(|_| s)\n .map_err_into(GHEK::EditorError)\n })\n } else {\n Ok(ask_string(\"Commit Message\", None, false, false, None, \"> \"))\n }\n } else {\n Ok(String::from(commit_default_msg(config, &action)))\n }\n}\n\n\/\/\/ Check whether the hook should abort if the repository cannot be initialized\npub fn abort_on_repo_init_err(cfg: Option<&Value>) -> bool {\n get_bool_cfg(cfg, \"abort_on_repo_init_failure\", true, true)\n}\n\n\/\/\/ Get the branch which must be checked out before running the hook (if any).\n\/\/\/\n\/\/\/ If there is no configuration for this, this is `Ok(None)`, otherwise we try to find the\n\/\/\/ configuration `String`.\npub fn ensure_branch(cfg: Option<&Value>) -> Result<Option<String>> {\n match cfg {\n Some(cfg) => {\n match cfg.lookup(\"ensure_branch\") {\n Some(&Value::String(ref s)) => Ok(Some(s.clone())),\n Some(_) => {\n warn!(\"Configuration error, 'ensure_branch' must be a String.\");\n Err(GHEK::ConfigTypeError.into_error())\n .map_err_into(GHEK::ConfigTypeError)\n },\n None => {\n debug!(\"No key `ensure_branch'\");\n Ok(None)\n },\n }\n },\n None => Ok(None),\n }\n}\n\n\/\/\/ Check whether we should check out a branch before committing.\npub fn do_checkout_ensure_branch(cfg: Option<&Value>) -> bool {\n get_bool_cfg(cfg, \"try_checkout_ensure_branch\", true, true)\n}\n\n\/\/\/ Helper to get a boolean value from the configuration.\nfn get_bool_cfg(cfg: Option<&Value>, name: &str, on_fail: bool, on_unavail: bool) -> bool {\n cfg.map(|cfg| {\n match cfg.lookup(name) {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, '{}' must be a Boolean (true|false).\", name);\n warn!(\"Assuming '{}' now.\", on_fail);\n on_fail\n },\n None => {\n debug!(\"No key '{}' - Assuming '{}'\", name, on_unavail);\n on_unavail\n },\n }\n })\n .unwrap_or(on_unavail)\n}\n\n<commit_msg>Change abort_on_repo_init_err() to take Value instead of Option<Value><commit_after>use toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagstore::storeid::StoreId;\n\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::result::Result;\n\nuse vcs::git::action::StoreAction;\n\nuse git2::Repository;\n\n\/\/\/ Check the configuration whether we should commit interactively\npub fn commit_interactive(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Check the configuration whether we should commit with the editor\nfn commit_with_editor(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive_editor\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive_editor' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive_editor = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive_editor'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Get the commit default message\nfn commit_default_msg<'a>(config: &'a Value, action: &'a StoreAction) -> &'a str {\n match config.lookup(\"commit.message\") {\n Some(&Value::String(ref b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.message' must be a String.\",\n action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.message'\", action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n }\n}\n\n\/\/\/ Get the commit template\n\/\/\/\n\/\/\/ TODO: Implement good template string\nfn commit_template(action: &StoreAction, id: &StoreId) -> String {\n format!(r#\"\n# Please commit your changes and remove these lines.\n#\n# You're about to commit changes via the {action} Hook\n#\n# Altered file: {id}\n#\n \"#,\n action = action,\n id = id.local().display())\n}\n\n\/\/\/ Generate a commit message\n\/\/\/\n\/\/\/ Uses the functions `commit_interactive()` and `commit_with_editor()`\n\/\/\/ or reads one from the commandline or uses the `commit_default_msg()` string to create a commit\n\/\/\/ message.\npub fn commit_message(repo: &Repository, config: &Value, action: StoreAction, id: &StoreId) -> Result<String> {\n use libimaginteraction::ask::ask_string;\n use libimagutil::edit::edit_in_tmpfile_with_command;\n use std::process::Command;\n\n if commit_interactive(config, &action) {\n if commit_with_editor(config, &action) {\n repo.config()\n .map_err_into(GHEK::GitConfigFetchError)\n .and_then(|c| c.get_string(\"core.editor\").map_err_into(GHEK::GitConfigEditorFetchError))\n .map_err_into(GHEK::ConfigError)\n .map(Command::new)\n .and_then(|cmd| {\n let mut s = commit_template(&action, id);\n edit_in_tmpfile_with_command(cmd, &mut s).map(|_| s)\n .map_err_into(GHEK::EditorError)\n })\n } else {\n Ok(ask_string(\"Commit Message\", None, false, false, None, \"> \"))\n }\n } else {\n Ok(String::from(commit_default_msg(config, &action)))\n }\n}\n\n\/\/\/ Check whether the hook should abort if the repository cannot be initialized\npub fn abort_on_repo_init_err(cfg: &Value) -> bool {\n get_bool_cfg(Some(cfg), \"abort_on_repo_init_failure\", true, true)\n}\n\n\/\/\/ Get the branch which must be checked out before running the hook (if any).\n\/\/\/\n\/\/\/ If there is no configuration for this, this is `Ok(None)`, otherwise we try to find the\n\/\/\/ configuration `String`.\npub fn ensure_branch(cfg: Option<&Value>) -> Result<Option<String>> {\n match cfg {\n Some(cfg) => {\n match cfg.lookup(\"ensure_branch\") {\n Some(&Value::String(ref s)) => Ok(Some(s.clone())),\n Some(_) => {\n warn!(\"Configuration error, 'ensure_branch' must be a String.\");\n Err(GHEK::ConfigTypeError.into_error())\n .map_err_into(GHEK::ConfigTypeError)\n },\n None => {\n debug!(\"No key `ensure_branch'\");\n Ok(None)\n },\n }\n },\n None => Ok(None),\n }\n}\n\n\/\/\/ Check whether we should check out a branch before committing.\npub fn do_checkout_ensure_branch(cfg: Option<&Value>) -> bool {\n get_bool_cfg(cfg, \"try_checkout_ensure_branch\", true, true)\n}\n\n\/\/\/ Helper to get a boolean value from the configuration.\nfn get_bool_cfg(cfg: Option<&Value>, name: &str, on_fail: bool, on_unavail: bool) -> bool {\n cfg.map(|cfg| {\n match cfg.lookup(name) {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, '{}' must be a Boolean (true|false).\", name);\n warn!(\"Assuming '{}' now.\", on_fail);\n on_fail\n },\n None => {\n debug!(\"No key '{}' - Assuming '{}'\", name, on_unavail);\n on_unavail\n },\n }\n })\n .unwrap_or(on_unavail)\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::fs;\nuse std::env;\nuse std::path::PathBuf;\nuse std::process::Command;\n\nuse Mode;\nuse Compiler;\nuse builder::{Step, RunConfig, ShouldRun, Builder};\nuse util::{copy, exe, add_lib_path};\nuse compile::{self, libtest_stamp, libstd_stamp, librustc_stamp};\nuse native;\nuse channel::GitInfo;\nuse cache::Interned;\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct CleanTools {\n pub compiler: Compiler,\n pub target: Interned<String>,\n pub mode: Mode,\n}\n\nimpl Step for CleanTools {\n type Output = ();\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.never()\n }\n\n \/\/\/ Build a tool in `src\/tools`\n \/\/\/\n \/\/\/ This will build the specified tool with the specified `host` compiler in\n \/\/\/ `stage` into the normal cargo output directory.\n fn run(self, builder: &Builder) {\n let build = builder.build;\n let compiler = self.compiler;\n let target = self.target;\n let mode = self.mode;\n\n let stamp = match mode {\n Mode::Libstd => libstd_stamp(build, compiler, target),\n Mode::Libtest => libtest_stamp(build, compiler, target),\n Mode::Librustc => librustc_stamp(build, compiler, target),\n _ => panic!(),\n };\n let out_dir = build.cargo_out(compiler, Mode::Tool, target);\n build.clear_if_dirty(&out_dir, &stamp);\n }\n}\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\nstruct ToolBuild {\n compiler: Compiler,\n target: Interned<String>,\n tool: &'static str,\n mode: Mode,\n}\n\nimpl Step for ToolBuild {\n type Output = PathBuf;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.never()\n }\n\n \/\/\/ Build a tool in `src\/tools`\n \/\/\/\n \/\/\/ This will build the specified tool with the specified `host` compiler in\n \/\/\/ `stage` into the normal cargo output directory.\n fn run(self, builder: &Builder) -> PathBuf {\n let build = builder.build;\n let compiler = self.compiler;\n let target = self.target;\n let tool = self.tool;\n\n match self.mode {\n Mode::Libstd => builder.ensure(compile::Std { compiler, target }),\n Mode::Libtest => builder.ensure(compile::Test { compiler, target }),\n Mode::Librustc => builder.ensure(compile::Rustc { compiler, target }),\n Mode::Tool => panic!(\"unexpected Mode::Tool for tool build\")\n }\n\n let _folder = build.fold_output(|| format!(\"stage{}-{}\", compiler.stage, tool));\n println!(\"Building stage{} tool {} ({})\", compiler.stage, tool, target);\n\n let mut cargo = prepare_tool_cargo(builder, compiler, target, tool);\n build.run(&mut cargo);\n build.cargo_out(compiler, Mode::Tool, target).join(exe(tool, &compiler.host))\n }\n}\n\nfn prepare_tool_cargo(\n builder: &Builder,\n compiler: Compiler,\n target: Interned<String>,\n tool: &'static str,\n) -> Command {\n let build = builder.build;\n let mut cargo = builder.cargo(compiler, Mode::Tool, target, \"build\");\n let dir = build.src.join(\"src\/tools\").join(tool);\n cargo.arg(\"--manifest-path\").arg(dir.join(\"Cargo.toml\"));\n\n \/\/ We don't want to build tools dynamically as they'll be running across\n \/\/ stages and such and it's just easier if they're not dynamically linked.\n cargo.env(\"RUSTC_NO_PREFER_DYNAMIC\", \"1\");\n\n if let Some(dir) = build.openssl_install_dir(target) {\n cargo.env(\"OPENSSL_STATIC\", \"1\");\n cargo.env(\"OPENSSL_DIR\", dir);\n cargo.env(\"LIBZ_SYS_STATIC\", \"1\");\n }\n\n cargo.env(\"CFG_RELEASE_CHANNEL\", &build.config.channel);\n\n let info = GitInfo::new(&build.config, &dir);\n if let Some(sha) = info.sha() {\n cargo.env(\"CFG_COMMIT_HASH\", sha);\n }\n if let Some(sha_short) = info.sha_short() {\n cargo.env(\"CFG_SHORT_COMMIT_HASH\", sha_short);\n }\n if let Some(date) = info.commit_date() {\n cargo.env(\"CFG_COMMIT_DATE\", date);\n }\n cargo\n}\n\nmacro_rules! tool {\n ($($name:ident, $path:expr, $tool_name:expr, $mode:expr;)+) => {\n #[derive(Copy, Clone)]\n pub enum Tool {\n $(\n $name,\n )+\n }\n\n impl<'a> Builder<'a> {\n pub fn tool_exe(&self, tool: Tool) -> PathBuf {\n match tool {\n $(Tool::$name =>\n self.ensure($name {\n compiler: self.compiler(0, self.build.build),\n target: self.build.build,\n }),\n )+\n }\n }\n }\n\n $(\n #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\n pub struct $name {\n pub compiler: Compiler,\n pub target: Interned<String>,\n }\n\n impl Step for $name {\n type Output = PathBuf;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.path($path)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure($name {\n compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),\n target: run.target,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n builder.ensure(ToolBuild {\n compiler: self.compiler,\n target: self.target,\n tool: $tool_name,\n mode: $mode,\n })\n }\n }\n )+\n }\n}\n\ntool!(\n Rustbook, \"src\/tools\/rustbook\", \"rustbook\", Mode::Librustc;\n ErrorIndex, \"src\/tools\/error_index_generator\", \"error_index_generator\", Mode::Librustc;\n UnstableBookGen, \"src\/tools\/unstable-book-gen\", \"unstable-book-gen\", Mode::Libstd;\n Tidy, \"src\/tools\/tidy\", \"tidy\", Mode::Libstd;\n Linkchecker, \"src\/tools\/linkchecker\", \"linkchecker\", Mode::Libstd;\n CargoTest, \"src\/tools\/cargotest\", \"cargotest\", Mode::Libstd;\n Compiletest, \"src\/tools\/compiletest\", \"compiletest\", Mode::Libtest;\n BuildManifest, \"src\/tools\/build-manifest\", \"build-manifest\", Mode::Librustc;\n RemoteTestClient, \"src\/tools\/remote-test-client\", \"remote-test-client\", Mode::Libstd;\n RustInstaller, \"src\/tools\/rust-installer\", \"rust-installer\", Mode::Libstd;\n);\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct RemoteTestServer {\n pub compiler: Compiler,\n pub target: Interned<String>,\n}\n\nimpl Step for RemoteTestServer {\n type Output = PathBuf;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.path(\"src\/tools\/remote-test-server\")\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(RemoteTestServer {\n compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),\n target: run.target,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n builder.ensure(ToolBuild {\n compiler: self.compiler,\n target: self.target,\n tool: \"remote-test-server\",\n mode: Mode::Libstd,\n })\n }\n}\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct Rustdoc {\n pub host: Interned<String>,\n}\n\nimpl Step for Rustdoc {\n type Output = PathBuf;\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = true;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.path(\"src\/tools\/rustdoc\")\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(Rustdoc {\n host: run.host,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n let build = builder.build;\n let target_compiler = builder.compiler(builder.top_stage, self.host);\n let target = target_compiler.host;\n let build_compiler = if target_compiler.stage == 0 {\n builder.compiler(0, builder.build.build)\n } else if target_compiler.stage >= 2 {\n \/\/ Past stage 2, we consider the compiler to be ABI-compatible and hence capable of\n \/\/ building rustdoc itself.\n builder.compiler(target_compiler.stage, builder.build.build)\n } else {\n \/\/ Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise\n \/\/ we'd have stageN\/bin\/rustc and stageN\/bin\/rustdoc be effectively different stage\n \/\/ compilers, which isn't what we want.\n builder.compiler(target_compiler.stage - 1, builder.build.build)\n };\n\n builder.ensure(compile::Rustc { compiler: build_compiler, target });\n\n let _folder = build.fold_output(|| format!(\"stage{}-rustdoc\", target_compiler.stage));\n println!(\"Building rustdoc for stage{} ({})\", target_compiler.stage, target_compiler.host);\n\n let mut cargo = prepare_tool_cargo(builder, build_compiler, target, \"rustdoc\");\n build.run(&mut cargo);\n \/\/ Cargo adds a number of paths to the dylib search path on windows, which results in\n \/\/ the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the \"tool\"\n \/\/ rustdoc a different name.\n let tool_rustdoc = build.cargo_out(build_compiler, Mode::Tool, target)\n .join(exe(\"rustdoc-tool-binary\", &target_compiler.host));\n\n \/\/ don't create a stage0-sysroot\/bin directory.\n if target_compiler.stage > 0 {\n let sysroot = builder.sysroot(target_compiler);\n let bindir = sysroot.join(\"bin\");\n t!(fs::create_dir_all(&bindir));\n let bin_rustdoc = bindir.join(exe(\"rustdoc\", &*target_compiler.host));\n let _ = fs::remove_file(&bin_rustdoc);\n copy(&tool_rustdoc, &bin_rustdoc);\n bin_rustdoc\n } else {\n tool_rustdoc\n }\n }\n}\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct Cargo {\n pub compiler: Compiler,\n pub target: Interned<String>,\n}\n\nimpl Step for Cargo {\n type Output = PathBuf;\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = true;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let builder = run.builder;\n run.path(\"src\/tools\/cargo\").default_condition(builder.build.config.extended)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(Cargo {\n compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),\n target: run.target,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n builder.ensure(native::Openssl {\n target: self.target,\n });\n \/\/ Cargo depends on procedural macros, which requires a full host\n \/\/ compiler to be available, so we need to depend on that.\n builder.ensure(compile::Rustc {\n compiler: self.compiler,\n target: builder.build.build,\n });\n builder.ensure(ToolBuild {\n compiler: self.compiler,\n target: self.target,\n tool: \"cargo\",\n mode: Mode::Librustc,\n })\n }\n}\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct Rls {\n pub compiler: Compiler,\n pub target: Interned<String>,\n}\n\nimpl Step for Rls {\n type Output = PathBuf;\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = true;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let builder = run.builder;\n run.path(\"src\/tools\/rls\").default_condition(builder.build.config.extended)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(Rls {\n compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),\n target: run.target,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n builder.ensure(native::Openssl {\n target: self.target,\n });\n \/\/ RLS depends on procedural macros, which requires a full host\n \/\/ compiler to be available, so we need to depend on that.\n builder.ensure(compile::Rustc {\n compiler: self.compiler,\n target: builder.build.build,\n });\n builder.ensure(ToolBuild {\n compiler: self.compiler,\n target: self.target,\n tool: \"rls\",\n mode: Mode::Librustc,\n })\n }\n}\n\nimpl<'a> Builder<'a> {\n \/\/\/ Get a `Command` which is ready to run `tool` in `stage` built for\n \/\/\/ `host`.\n pub fn tool_cmd(&self, tool: Tool) -> Command {\n let mut cmd = Command::new(self.tool_exe(tool));\n let compiler = self.compiler(0, self.build.build);\n self.prepare_tool_cmd(compiler, &mut cmd);\n cmd\n }\n\n \/\/\/ Prepares the `cmd` provided to be able to run the `compiler` provided.\n \/\/\/\n \/\/\/ Notably this munges the dynamic library lookup path to point to the\n \/\/\/ right location to run `compiler`.\n fn prepare_tool_cmd(&self, compiler: Compiler, cmd: &mut Command) {\n let host = &compiler.host;\n let mut paths: Vec<PathBuf> = vec![\n PathBuf::from(&self.sysroot_libdir(compiler, compiler.host)),\n self.cargo_out(compiler, Mode::Tool, *host).join(\"deps\"),\n ];\n\n \/\/ On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make\n \/\/ mode) and that C compiler may need some extra PATH modification. Do\n \/\/ so here.\n if compiler.host.contains(\"msvc\") {\n let curpaths = env::var_os(\"PATH\").unwrap_or_default();\n let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();\n for &(ref k, ref v) in self.cc[&compiler.host].0.env() {\n if k != \"PATH\" {\n continue\n }\n for path in env::split_paths(v) {\n if !curpaths.contains(&path) {\n paths.push(path);\n }\n }\n }\n }\n add_lib_path(paths, cmd);\n }\n}\n<commit_msg>rustbuild: Fix dependencies of build-manifest<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::fs;\nuse std::env;\nuse std::path::PathBuf;\nuse std::process::Command;\n\nuse Mode;\nuse Compiler;\nuse builder::{Step, RunConfig, ShouldRun, Builder};\nuse util::{copy, exe, add_lib_path};\nuse compile::{self, libtest_stamp, libstd_stamp, librustc_stamp};\nuse native;\nuse channel::GitInfo;\nuse cache::Interned;\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct CleanTools {\n pub compiler: Compiler,\n pub target: Interned<String>,\n pub mode: Mode,\n}\n\nimpl Step for CleanTools {\n type Output = ();\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.never()\n }\n\n \/\/\/ Build a tool in `src\/tools`\n \/\/\/\n \/\/\/ This will build the specified tool with the specified `host` compiler in\n \/\/\/ `stage` into the normal cargo output directory.\n fn run(self, builder: &Builder) {\n let build = builder.build;\n let compiler = self.compiler;\n let target = self.target;\n let mode = self.mode;\n\n let stamp = match mode {\n Mode::Libstd => libstd_stamp(build, compiler, target),\n Mode::Libtest => libtest_stamp(build, compiler, target),\n Mode::Librustc => librustc_stamp(build, compiler, target),\n _ => panic!(),\n };\n let out_dir = build.cargo_out(compiler, Mode::Tool, target);\n build.clear_if_dirty(&out_dir, &stamp);\n }\n}\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\nstruct ToolBuild {\n compiler: Compiler,\n target: Interned<String>,\n tool: &'static str,\n mode: Mode,\n}\n\nimpl Step for ToolBuild {\n type Output = PathBuf;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.never()\n }\n\n \/\/\/ Build a tool in `src\/tools`\n \/\/\/\n \/\/\/ This will build the specified tool with the specified `host` compiler in\n \/\/\/ `stage` into the normal cargo output directory.\n fn run(self, builder: &Builder) -> PathBuf {\n let build = builder.build;\n let compiler = self.compiler;\n let target = self.target;\n let tool = self.tool;\n\n match self.mode {\n Mode::Libstd => builder.ensure(compile::Std { compiler, target }),\n Mode::Libtest => builder.ensure(compile::Test { compiler, target }),\n Mode::Librustc => builder.ensure(compile::Rustc { compiler, target }),\n Mode::Tool => panic!(\"unexpected Mode::Tool for tool build\")\n }\n\n let _folder = build.fold_output(|| format!(\"stage{}-{}\", compiler.stage, tool));\n println!(\"Building stage{} tool {} ({})\", compiler.stage, tool, target);\n\n let mut cargo = prepare_tool_cargo(builder, compiler, target, tool);\n build.run(&mut cargo);\n build.cargo_out(compiler, Mode::Tool, target).join(exe(tool, &compiler.host))\n }\n}\n\nfn prepare_tool_cargo(\n builder: &Builder,\n compiler: Compiler,\n target: Interned<String>,\n tool: &'static str,\n) -> Command {\n let build = builder.build;\n let mut cargo = builder.cargo(compiler, Mode::Tool, target, \"build\");\n let dir = build.src.join(\"src\/tools\").join(tool);\n cargo.arg(\"--manifest-path\").arg(dir.join(\"Cargo.toml\"));\n\n \/\/ We don't want to build tools dynamically as they'll be running across\n \/\/ stages and such and it's just easier if they're not dynamically linked.\n cargo.env(\"RUSTC_NO_PREFER_DYNAMIC\", \"1\");\n\n if let Some(dir) = build.openssl_install_dir(target) {\n cargo.env(\"OPENSSL_STATIC\", \"1\");\n cargo.env(\"OPENSSL_DIR\", dir);\n cargo.env(\"LIBZ_SYS_STATIC\", \"1\");\n }\n\n cargo.env(\"CFG_RELEASE_CHANNEL\", &build.config.channel);\n\n let info = GitInfo::new(&build.config, &dir);\n if let Some(sha) = info.sha() {\n cargo.env(\"CFG_COMMIT_HASH\", sha);\n }\n if let Some(sha_short) = info.sha_short() {\n cargo.env(\"CFG_SHORT_COMMIT_HASH\", sha_short);\n }\n if let Some(date) = info.commit_date() {\n cargo.env(\"CFG_COMMIT_DATE\", date);\n }\n cargo\n}\n\nmacro_rules! tool {\n ($($name:ident, $path:expr, $tool_name:expr, $mode:expr;)+) => {\n #[derive(Copy, Clone)]\n pub enum Tool {\n $(\n $name,\n )+\n }\n\n impl<'a> Builder<'a> {\n pub fn tool_exe(&self, tool: Tool) -> PathBuf {\n match tool {\n $(Tool::$name =>\n self.ensure($name {\n compiler: self.compiler(0, self.build.build),\n target: self.build.build,\n }),\n )+\n }\n }\n }\n\n $(\n #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\n pub struct $name {\n pub compiler: Compiler,\n pub target: Interned<String>,\n }\n\n impl Step for $name {\n type Output = PathBuf;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.path($path)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure($name {\n compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),\n target: run.target,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n builder.ensure(ToolBuild {\n compiler: self.compiler,\n target: self.target,\n tool: $tool_name,\n mode: $mode,\n })\n }\n }\n )+\n }\n}\n\ntool!(\n Rustbook, \"src\/tools\/rustbook\", \"rustbook\", Mode::Librustc;\n ErrorIndex, \"src\/tools\/error_index_generator\", \"error_index_generator\", Mode::Librustc;\n UnstableBookGen, \"src\/tools\/unstable-book-gen\", \"unstable-book-gen\", Mode::Libstd;\n Tidy, \"src\/tools\/tidy\", \"tidy\", Mode::Libstd;\n Linkchecker, \"src\/tools\/linkchecker\", \"linkchecker\", Mode::Libstd;\n CargoTest, \"src\/tools\/cargotest\", \"cargotest\", Mode::Libstd;\n Compiletest, \"src\/tools\/compiletest\", \"compiletest\", Mode::Libtest;\n BuildManifest, \"src\/tools\/build-manifest\", \"build-manifest\", Mode::Libstd;\n RemoteTestClient, \"src\/tools\/remote-test-client\", \"remote-test-client\", Mode::Libstd;\n RustInstaller, \"src\/tools\/rust-installer\", \"rust-installer\", Mode::Libstd;\n);\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct RemoteTestServer {\n pub compiler: Compiler,\n pub target: Interned<String>,\n}\n\nimpl Step for RemoteTestServer {\n type Output = PathBuf;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.path(\"src\/tools\/remote-test-server\")\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(RemoteTestServer {\n compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),\n target: run.target,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n builder.ensure(ToolBuild {\n compiler: self.compiler,\n target: self.target,\n tool: \"remote-test-server\",\n mode: Mode::Libstd,\n })\n }\n}\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct Rustdoc {\n pub host: Interned<String>,\n}\n\nimpl Step for Rustdoc {\n type Output = PathBuf;\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = true;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n run.path(\"src\/tools\/rustdoc\")\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(Rustdoc {\n host: run.host,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n let build = builder.build;\n let target_compiler = builder.compiler(builder.top_stage, self.host);\n let target = target_compiler.host;\n let build_compiler = if target_compiler.stage == 0 {\n builder.compiler(0, builder.build.build)\n } else if target_compiler.stage >= 2 {\n \/\/ Past stage 2, we consider the compiler to be ABI-compatible and hence capable of\n \/\/ building rustdoc itself.\n builder.compiler(target_compiler.stage, builder.build.build)\n } else {\n \/\/ Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise\n \/\/ we'd have stageN\/bin\/rustc and stageN\/bin\/rustdoc be effectively different stage\n \/\/ compilers, which isn't what we want.\n builder.compiler(target_compiler.stage - 1, builder.build.build)\n };\n\n builder.ensure(compile::Rustc { compiler: build_compiler, target });\n\n let _folder = build.fold_output(|| format!(\"stage{}-rustdoc\", target_compiler.stage));\n println!(\"Building rustdoc for stage{} ({})\", target_compiler.stage, target_compiler.host);\n\n let mut cargo = prepare_tool_cargo(builder, build_compiler, target, \"rustdoc\");\n build.run(&mut cargo);\n \/\/ Cargo adds a number of paths to the dylib search path on windows, which results in\n \/\/ the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the \"tool\"\n \/\/ rustdoc a different name.\n let tool_rustdoc = build.cargo_out(build_compiler, Mode::Tool, target)\n .join(exe(\"rustdoc-tool-binary\", &target_compiler.host));\n\n \/\/ don't create a stage0-sysroot\/bin directory.\n if target_compiler.stage > 0 {\n let sysroot = builder.sysroot(target_compiler);\n let bindir = sysroot.join(\"bin\");\n t!(fs::create_dir_all(&bindir));\n let bin_rustdoc = bindir.join(exe(\"rustdoc\", &*target_compiler.host));\n let _ = fs::remove_file(&bin_rustdoc);\n copy(&tool_rustdoc, &bin_rustdoc);\n bin_rustdoc\n } else {\n tool_rustdoc\n }\n }\n}\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct Cargo {\n pub compiler: Compiler,\n pub target: Interned<String>,\n}\n\nimpl Step for Cargo {\n type Output = PathBuf;\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = true;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let builder = run.builder;\n run.path(\"src\/tools\/cargo\").default_condition(builder.build.config.extended)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(Cargo {\n compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),\n target: run.target,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n builder.ensure(native::Openssl {\n target: self.target,\n });\n \/\/ Cargo depends on procedural macros, which requires a full host\n \/\/ compiler to be available, so we need to depend on that.\n builder.ensure(compile::Rustc {\n compiler: self.compiler,\n target: builder.build.build,\n });\n builder.ensure(ToolBuild {\n compiler: self.compiler,\n target: self.target,\n tool: \"cargo\",\n mode: Mode::Librustc,\n })\n }\n}\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\npub struct Rls {\n pub compiler: Compiler,\n pub target: Interned<String>,\n}\n\nimpl Step for Rls {\n type Output = PathBuf;\n const DEFAULT: bool = true;\n const ONLY_HOSTS: bool = true;\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let builder = run.builder;\n run.path(\"src\/tools\/rls\").default_condition(builder.build.config.extended)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure(Rls {\n compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),\n target: run.target,\n });\n }\n\n fn run(self, builder: &Builder) -> PathBuf {\n builder.ensure(native::Openssl {\n target: self.target,\n });\n \/\/ RLS depends on procedural macros, which requires a full host\n \/\/ compiler to be available, so we need to depend on that.\n builder.ensure(compile::Rustc {\n compiler: self.compiler,\n target: builder.build.build,\n });\n builder.ensure(ToolBuild {\n compiler: self.compiler,\n target: self.target,\n tool: \"rls\",\n mode: Mode::Librustc,\n })\n }\n}\n\nimpl<'a> Builder<'a> {\n \/\/\/ Get a `Command` which is ready to run `tool` in `stage` built for\n \/\/\/ `host`.\n pub fn tool_cmd(&self, tool: Tool) -> Command {\n let mut cmd = Command::new(self.tool_exe(tool));\n let compiler = self.compiler(0, self.build.build);\n self.prepare_tool_cmd(compiler, &mut cmd);\n cmd\n }\n\n \/\/\/ Prepares the `cmd` provided to be able to run the `compiler` provided.\n \/\/\/\n \/\/\/ Notably this munges the dynamic library lookup path to point to the\n \/\/\/ right location to run `compiler`.\n fn prepare_tool_cmd(&self, compiler: Compiler, cmd: &mut Command) {\n let host = &compiler.host;\n let mut paths: Vec<PathBuf> = vec![\n PathBuf::from(&self.sysroot_libdir(compiler, compiler.host)),\n self.cargo_out(compiler, Mode::Tool, *host).join(\"deps\"),\n ];\n\n \/\/ On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make\n \/\/ mode) and that C compiler may need some extra PATH modification. Do\n \/\/ so here.\n if compiler.host.contains(\"msvc\") {\n let curpaths = env::var_os(\"PATH\").unwrap_or_default();\n let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();\n for &(ref k, ref v) in self.cc[&compiler.host].0.env() {\n if k != \"PATH\" {\n continue\n }\n for path in env::split_paths(v) {\n if !curpaths.contains(&path) {\n paths.push(path);\n }\n }\n }\n }\n add_lib_path(paths, cmd);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#[allow(dead_code)]\nstruct Book {\n \/\/ `&'static str` is a reference to a string allocated in read only memory\n author: &'static str,\n title: &'static str,\n year: uint,\n}\n\n\/\/ This function takes a reference to a book\nfn borrow_book(book: &Book) {\n println!(\"I borrowed {} {} edition\", book.title, book.year);\n}\n\n\/\/ This function takes a reference to a mutable book\nfn new_edition(book: &mut Book) {\n \/\/ the fields of the book can be modified\n book.year = 2014;\n}\n\nfn main() {\n \/\/ An immutable Book\n let geb = Book {\n \/\/ string literals have type `&'static str`\n author: \"Douglas Hofstadter\",\n title: \"Gödel, Escher, Bach\",\n year: 1979,\n };\n\n \/\/ Immutably borrow `geb`\n borrow_book(&geb);\n\n \/\/ Error! Can't borrow an immutable object as mutable\n new_edition(&mut geb);\n \/\/ FIXME ^ Comment out this line\n\n \/\/ `mutable_geb` is a mutable copy of `geb`\n let mut mutable_geb = geb;\n\n \/\/ Borrow a mutable object as mutable\n new_edition(&mut mutable_geb);\n\n \/\/ Mutable objects can be immutably borrowed\n borrow_book(&mutable_geb);\n}\n<commit_msg>Fix #[derive(Copy)] on borrow\/mut\/mut.rs<commit_after>#[allow(dead_code)]\n#[derive(Copy)]\nstruct Book {\n \/\/ `&'static str` is a reference to a string allocated in read only memory\n author: &'static str,\n title: &'static str,\n year: uint,\n}\n\n\/\/ This function takes a reference to a book\nfn borrow_book(book: &Book) {\n println!(\"I borrowed {} {} edition\", book.title, book.year);\n}\n\n\/\/ This function takes a reference to a mutable book\nfn new_edition(book: &mut Book) {\n \/\/ the fields of the book can be modified\n book.year = 2014;\n}\n\nfn main() {\n \/\/ An immutable Book\n let geb = Book {\n \/\/ string literals have type `&'static str`\n author: \"Douglas Hofstadter\",\n title: \"Gödel, Escher, Bach\",\n year: 1979,\n };\n\n \/\/ Immutably borrow `geb`\n borrow_book(&geb);\n\n \/\/ Error! Can't borrow an immutable object as mutable\n new_edition(&mut geb);\n \/\/ FIXME ^ Comment out this line\n\n \/\/ `mutable_geb` is a mutable copy of `geb`\n let mut mutable_geb = geb;\n\n \/\/ Borrow a mutable object as mutable\n new_edition(&mut mutable_geb);\n\n \/\/ Mutable objects can be immutably borrowed\n borrow_book(&mutable_geb);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tests(SQS): add simple SQS test<commit_after>#![cfg(feature = \"sqs\")]\n\nextern crate rusoto;\n\nuse rusoto::sqs::SqsClient;\nuse rusoto::sqs::ListQueuesRequest;\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\nfn list_queues() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n\n let sqs = SqsClient::new(credentials, Region::EuWest1);\n\n \/\/ http:\/\/docs.aws.amazon.com\/AWSSimpleQueueService\/latest\/APIReference\/Welcome.html\n let request = ListQueuesRequest {\n ..Default::default()\n };\n sqs.list_queues(&request).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rust: Problem 8<commit_after>\/\/\/ # Largest product in a series\n\/\/\/ ## Problem 8\n\/\/\/ The four adjacent digits in the 1000-digit number that have the greatest product are 9 × 9 × 8 × 9 = 5832.\n\/\/\/ Find the thirteen adjacent digits in the 1000-digit number that have the greatest product. What is the value of this product?\n\nuse std::iter::MultiplicativeIterator;\nuse std::cmp::max;\n\nfn main() {\n let input = \"73167176531330624919225119674426574742355349194934\\\n 96983520312774506326239578318016984801869478851843\\\n 85861560789112949495459501737958331952853208805511\\\n 12540698747158523863050715693290963295227443043557\\\n 66896648950445244523161731856403098711121722383113\\\n 62229893423380308135336276614282806444486645238749\\\n 30358907296290491560440772390713810515859307960866\\\n 70172427121883998797908792274921901699720888093776\\\n 65727333001053367881220235421809751254540594752243\\\n 52584907711670556013604839586446706324415722155397\\\n 53697817977846174064955149290862569321978468622482\\\n 83972241375657056057490261407972968652414535100474\\\n 82166370484403199890008895243450658541227588666881\\\n 16427171479924442928230863465674813919123162824586\\\n 17866458359124566529476545682848912883142607690042\\\n 24219022671055626321111109370544217506941658960408\\\n 07198403850962455444362981230987879927244284909188\\\n 84580156166097919133875499200524063689912560717606\\\n 05886116467109405077541002256983155200055935729725\\\n 71636269561882670428252483600823257530420752963450\".chars();\n\n let mut nums = input.map(|c| c.to_digit(10).unwrap()).collect::<Vec<uint>>();\n let mut biggest = 0;\n\n for _ in range(0, nums.len() - 13) {\n biggest = max(biggest, nums.iter().take(13).map(|&n| n).product());\n nums.shift();\n }\n\n println!(\"{}\", biggest);\n\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\nuse super::FromBytes;\n\nconst MZAP_ENT_LEN: usize = 64;\nconst MZAP_NAME_LEN: usize = MZAP_ENT_LEN - 8 - 4 - 2;\n\n#[repr(u64)]\n#[derive(Copy, Clone, Debug)]\npub enum ZapObjectType {\n Micro = (1 << 63) + 3,\n Header = (1 << 63) + 1,\n Leaf = 1 << 63,\n}\n\n\/\/\/ Microzap\n#[repr(packed)]\npub struct MZapPhys {\n block_type: ZapObjectType, \/\/ ZapObjectType::Micro\n salt: u64,\n norm_flags: u64,\n pad: [u64; 5],\n chunk: [MZapEntPhys; 1],\n \/\/ actually variable size depending on block size\n}\n\nimpl FromBytes for MZapPhys {\n fn from_bytes(data: &[u8]) -> Option<Self> {\n if data.len() >= mem::size_of::<MZapPhys>() {\n let mzap_phys = unsafe { ptr::read(data.as_ptr() as *const MZapPhys) };\n Some(mzap_phys)\n } else {\n Option::None\n }\n }\n}\n\nimpl fmt::Debug for MZapPhys {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(f, \"MZapPhys {{\\nblock_type: {:?},\\nsalt: {:X},\\nnorm_flags: {:X},\\nchunk: [\\n\",\n self.block_type, self.salt, self.norm_flags));\n for chunk in &self.chunk {\n try!(write!(f, \"{:?}\", chunk));\n }\n try!(write!(f, \"] }}\\n\"));\n Ok(())\n }\n}\n\n#[repr(packed)]\npub struct MZapEntPhys{\n pub value: u64,\n pub cd: u32,\n pub pad: u16,\n pub name: [u8; MZAP_NAME_LEN],\n}\n\nimpl fmt::Debug for MZapEntPhys {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(f, \"MZapEntPhys {{\\nvalue: {:X},\\ncd: {:X},\\n\",\n self.value, self.cd));\n try!(write!(f, \"}}\\n\"));\n Ok(())\n }\n}\n\n\/\/\/ Fatzap\n#[repr(packed)]\npub struct ZapPhys {\n pub block_type: ZapObjectType, \/\/ ZapObjectType::Header\n pub magic: u64,\n pub ptr_table: ZapTablePhys,\n pub free_block: u64,\n pub num_leafs: u64,\n pub num_entries: u64,\n pub salt: u64,\n pub pad: [u64; 8181],\n pub leafs: [u64; 8192],\n}\n\n#[repr(packed)]\npub struct ZapTablePhys {\n pub block: u64,\n pub num_blocks: u64,\n pub shift: u64,\n pub next_block: u64,\n pub block_copied: u64,\n}\n\nconst ZAP_LEAF_MAGIC: u32 = 0x2AB1EAF;\nconst ZAP_LEAF_CHUNKSIZE: usize = 24;\n\n\/\/ The amount of space within the chunk available for the array is:\n\/\/ chunk size - space for type (1) - space for next pointer (2)\nconst ZAP_LEAF_ARRAY_BYTES: usize = ZAP_LEAF_CHUNKSIZE - 3;\n\n\/*pub struct ZapLeafPhys {\n pub header: ZapLeafHeader,\n hash: [u16; ZAP_LEAF_HASH_NUMENTRIES],\n union zap_leaf_chunk {\n entry,\n array,\n free,\n } chunks[ZapLeafChunk; ZAP_LEAF_NUMCHUNKS],\n}*\/\n\n#[repr(packed)]\npub struct ZapLeafHeader {\n pub block_type: ZapObjectType, \/\/ ZapObjectType::Leaf\n pub next: u64,\n pub prefix: u64,\n pub magic: u32,\n pub n_free: u16,\n pub n_entries: u16,\n pub prefix_len: u16,\n pub free_list: u16,\n pad2: [u8; 12],\n}\n\n#[repr(packed)]\nstruct ZapLeafEntry {\n leaf_type: u8,\n int_size: u8,\n next: u16,\n name_chunk: u16,\n name_length: u16,\n value_chunk: u16,\n value_length: u16,\n cd: u16,\n pad: [u8; 2],\n hash: u64,\n}\n\n#[repr(packed)]\nstruct ZapLeafArray {\n leaf_type: u8,\n array: [u8; ZAP_LEAF_ARRAY_BYTES],\n next: u16,\n}\n\n#[repr(packed)]\nstruct ZapLeafFree{\n free_type: u8,\n pad: [u8; ZAP_LEAF_ARRAY_BYTES],\n next: u16,\n}\n<commit_msg>Show MZapEntPhys::name<commit_after>use redox::*;\n\nuse super::FromBytes;\n\nconst MZAP_ENT_LEN: usize = 64;\nconst MZAP_NAME_LEN: usize = MZAP_ENT_LEN - 8 - 4 - 2;\n\n#[repr(u64)]\n#[derive(Copy, Clone, Debug)]\npub enum ZapObjectType {\n Micro = (1 << 63) + 3,\n Header = (1 << 63) + 1,\n Leaf = 1 << 63,\n}\n\n\/\/\/ Microzap\n#[repr(packed)]\npub struct MZapPhys {\n block_type: ZapObjectType, \/\/ ZapObjectType::Micro\n salt: u64,\n norm_flags: u64,\n pad: [u64; 5],\n chunk: [MZapEntPhys; 1],\n \/\/ actually variable size depending on block size\n}\n\nimpl FromBytes for MZapPhys {\n fn from_bytes(data: &[u8]) -> Option<Self> {\n if data.len() >= mem::size_of::<MZapPhys>() {\n let mzap_phys = unsafe { ptr::read(data.as_ptr() as *const MZapPhys) };\n Some(mzap_phys)\n } else {\n Option::None\n }\n }\n}\n\nimpl fmt::Debug for MZapPhys {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(f, \"MZapPhys {{\\nblock_type: {:?},\\nsalt: {:X},\\nnorm_flags: {:X},\\nchunk: [\\n\",\n self.block_type, self.salt, self.norm_flags));\n for chunk in &self.chunk {\n try!(write!(f, \"{:?}\", chunk));\n }\n try!(write!(f, \"] }}\\n\"));\n Ok(())\n }\n}\n\n#[repr(packed)]\npub struct MZapEntPhys{\n pub value: u64,\n pub cd: u32,\n pub pad: u16,\n pub name: [u8; MZAP_NAME_LEN],\n}\n\nimpl fmt::Debug for MZapEntPhys {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(f, \"MZapEntPhys {{\\nvalue: {:X},\\ncd: {:X},\\nname: \",\n self.value, self.cd));\n for i in 0..MZAP_NAME_LEN {\n if self.name[i] == 0 {\n break;\n }\n try!(write!(f, \"{}\", self.name[i] as char));\n }\n try!(write!(f, \"\\n}}\\n\"));\n Ok(())\n }\n}\n\n\/\/\/ Fatzap\n#[repr(packed)]\npub struct ZapPhys {\n pub block_type: ZapObjectType, \/\/ ZapObjectType::Header\n pub magic: u64,\n pub ptr_table: ZapTablePhys,\n pub free_block: u64,\n pub num_leafs: u64,\n pub num_entries: u64,\n pub salt: u64,\n pub pad: [u64; 8181],\n pub leafs: [u64; 8192],\n}\n\n#[repr(packed)]\npub struct ZapTablePhys {\n pub block: u64,\n pub num_blocks: u64,\n pub shift: u64,\n pub next_block: u64,\n pub block_copied: u64,\n}\n\nconst ZAP_LEAF_MAGIC: u32 = 0x2AB1EAF;\nconst ZAP_LEAF_CHUNKSIZE: usize = 24;\n\n\/\/ The amount of space within the chunk available for the array is:\n\/\/ chunk size - space for type (1) - space for next pointer (2)\nconst ZAP_LEAF_ARRAY_BYTES: usize = ZAP_LEAF_CHUNKSIZE - 3;\n\n\/*pub struct ZapLeafPhys {\n pub header: ZapLeafHeader,\n hash: [u16; ZAP_LEAF_HASH_NUMENTRIES],\n union zap_leaf_chunk {\n entry,\n array,\n free,\n } chunks[ZapLeafChunk; ZAP_LEAF_NUMCHUNKS],\n}*\/\n\n#[repr(packed)]\npub struct ZapLeafHeader {\n pub block_type: ZapObjectType, \/\/ ZapObjectType::Leaf\n pub next: u64,\n pub prefix: u64,\n pub magic: u32,\n pub n_free: u16,\n pub n_entries: u16,\n pub prefix_len: u16,\n pub free_list: u16,\n pad2: [u8; 12],\n}\n\n#[repr(packed)]\nstruct ZapLeafEntry {\n leaf_type: u8,\n int_size: u8,\n next: u16,\n name_chunk: u16,\n name_length: u16,\n value_chunk: u16,\n value_length: u16,\n cd: u16,\n pad: [u8; 2],\n hash: u64,\n}\n\n#[repr(packed)]\nstruct ZapLeafArray {\n leaf_type: u8,\n array: [u8; ZAP_LEAF_ARRAY_BYTES],\n next: u16,\n}\n\n#[repr(packed)]\nstruct ZapLeafFree{\n free_type: u8,\n pad: [u8; ZAP_LEAF_ARRAY_BYTES],\n next: u16,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create Spa struct. One field down, lots more to go<commit_after>\/\/ Storage pool allocator\npub struct Spa {\n name: String, \/\/ Pool name\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/\/ Entry point of thread panic, for details, see std::macros\n#[macro_export]\n#[allow_internal_unstable]\n#[stable(feature = \"core\", since = \"1.6.0\")]\nmacro_rules! panic {\n () => (\n panic!(\"explicit panic\")\n );\n ($msg:expr) => ({\n static _MSG_FILE_LINE: (&'static str, &'static str, u32) = ($msg, file!(), line!());\n $crate::panicking::panic(&_MSG_FILE_LINE)\n });\n ($fmt:expr, $($arg:tt)*) => ({\n \/\/ The leading _'s are to avoid dead code warnings if this is\n \/\/ used inside a dead function. Just `#[allow(dead_code)]` is\n \/\/ insufficient, since the user may have\n \/\/ `#[forbid(dead_code)]` and which cannot be overridden.\n static _FILE_LINE: (&'static str, u32) = (file!(), line!());\n $crate::panicking::panic_fmt(format_args!($fmt, $($arg)*), &_FILE_LINE)\n });\n}\n\n\/\/\/ Ensure that a boolean expression is `true` at runtime.\n\/\/\/\n\/\/\/ This will invoke the `panic!` macro if the provided expression cannot be\n\/\/\/ evaluated to `true` at runtime.\n\/\/\/\n\/\/\/ Assertions are always checked in both debug and release builds, and cannot\n\/\/\/ be disabled. See `debug_assert!` for assertions that are not enabled in\n\/\/\/ release builds by default.\n\/\/\/\n\/\/\/ Unsafe code relies on `assert!` to enforce run-time invariants that, if\n\/\/\/ violated could lead to unsafety.\n\/\/\/\n\/\/\/ Other use-cases of `assert!` include\n\/\/\/ [testing](https:\/\/doc.rust-lang.org\/book\/testing.html) and enforcing\n\/\/\/ run-time invariants in safe code (whose violation cannot result in unsafety).\n\/\/\/\n\/\/\/ This macro has a second version, where a custom panic message can be provided.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ \/\/ the panic message for these assertions is the stringified value of the\n\/\/\/ \/\/ expression given.\n\/\/\/ assert!(true);\n\/\/\/\n\/\/\/ fn some_computation() -> bool { true } \/\/ a very simple function\n\/\/\/\n\/\/\/ assert!(some_computation());\n\/\/\/\n\/\/\/ \/\/ assert with a custom message\n\/\/\/ let x = true;\n\/\/\/ assert!(x, \"x wasn't true!\");\n\/\/\/\n\/\/\/ let a = 3; let b = 27;\n\/\/\/ assert!(a + b == 30, \"a = {}, b = {}\", a, b);\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! assert {\n ($cond:expr) => (\n if !$cond {\n panic!(concat!(\"assertion failed: \", stringify!($cond)))\n }\n );\n ($cond:expr, $($arg:tt)+) => (\n if !$cond {\n panic!($($arg)+)\n }\n );\n}\n\n\/\/\/ Asserts that two expressions are equal to each other.\n\/\/\/\n\/\/\/ On panic, this macro will print the values of the expressions with their\n\/\/\/ debug representations.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ let a = 3;\n\/\/\/ let b = 1 + 2;\n\/\/\/ assert_eq!(a, b);\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! assert_eq {\n ($left:expr , $right:expr) => ({\n match (&$left, &$right) {\n (left_val, right_val) => {\n if !(*left_val == *right_val) {\n panic!(\"assertion failed: `(left == right)` \\\n (left: `{:?}`, right: `{:?}`)\", left_val, right_val)\n }\n }\n }\n });\n ($left:expr , $right:expr, $($arg:tt)*) => ({\n match (&($left), &($right)) {\n (left_val, right_val) => {\n if !(*left_val == *right_val) {\n panic!(\"assertion failed: `(left == right)` \\\n (left: `{:?}`, right: `{:?}`): {}\", left_val, right_val,\n format_args!($($arg)*))\n }\n }\n }\n });\n}\n\n\/\/\/ Ensure that a boolean expression is `true` at runtime.\n\/\/\/\n\/\/\/ This will invoke the `panic!` macro if the provided expression cannot be\n\/\/\/ evaluated to `true` at runtime.\n\/\/\/\n\/\/\/ Like `assert!`, this macro also has a second version, where a custom panic\n\/\/\/ message can be provided.\n\/\/\/\n\/\/\/ Unlike `assert!`, `debug_assert!` statements are only enabled in non\n\/\/\/ optimized builds by default. An optimized build will omit all\n\/\/\/ `debug_assert!` statements unless `-C debug-assertions` is passed to the\n\/\/\/ compiler. This makes `debug_assert!` useful for checks that are too\n\/\/\/ expensive to be present in a release build but may be helpful during\n\/\/\/ development.\n\/\/\/\n\/\/\/ An unchecked assertion allows a program in an inconsistent state to keep\n\/\/\/ running, which might have unexpected consequences but does not introduce\n\/\/\/ unsafety as long as this only happens in safe code. The performance cost\n\/\/\/ of assertions, is however, not measurable in general. Replacing `assert!`\n\/\/\/ with `debug_assert!` is thus only encouraged after thorough profiling, and\n\/\/\/ more importantly, only in safe code!\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ \/\/ the panic message for these assertions is the stringified value of the\n\/\/\/ \/\/ expression given.\n\/\/\/ debug_assert!(true);\n\/\/\/\n\/\/\/ fn some_expensive_computation() -> bool { true } \/\/ a very simple function\n\/\/\/ debug_assert!(some_expensive_computation());\n\/\/\/\n\/\/\/ \/\/ assert with a custom message\n\/\/\/ let x = true;\n\/\/\/ debug_assert!(x, \"x wasn't true!\");\n\/\/\/\n\/\/\/ let a = 3; let b = 27;\n\/\/\/ debug_assert!(a + b == 30, \"a = {}, b = {}\", a, b);\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! debug_assert {\n ($($arg:tt)*) => (if cfg!(debug_assertions) { assert!($($arg)*); })\n}\n\n\/\/\/ Asserts that two expressions are equal to each other.\n\/\/\/\n\/\/\/ On panic, this macro will print the values of the expressions with their\n\/\/\/ debug representations.\n\/\/\/\n\/\/\/ Unlike `assert_eq!`, `debug_assert_eq!` statements are only enabled in non\n\/\/\/ optimized builds by default. An optimized build will omit all\n\/\/\/ `debug_assert_eq!` statements unless `-C debug-assertions` is passed to the\n\/\/\/ compiler. This makes `debug_assert_eq!` useful for checks that are too\n\/\/\/ expensive to be present in a release build but may be helpful during\n\/\/\/ development.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ let a = 3;\n\/\/\/ let b = 1 + 2;\n\/\/\/ debug_assert_eq!(a, b);\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! debug_assert_eq {\n ($($arg:tt)*) => (if cfg!(debug_assertions) { assert_eq!($($arg)*); })\n}\n\n\/\/\/ Helper macro for unwrapping `Result` values while returning early with an\n\/\/\/ error if the value of the expression is `Err`. Can only be used in\n\/\/\/ functions that return `Result` because of the early return of `Err` that\n\/\/\/ it provides.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/ use std::fs::File;\n\/\/\/ use std::io::prelude::*;\n\/\/\/\n\/\/\/ fn write_to_file_using_try() -> Result<(), io::Error> {\n\/\/\/ let mut file = try!(File::create(\"my_best_friends.txt\"));\n\/\/\/ try!(file.write_all(b\"This is a list of my best friends.\"));\n\/\/\/ println!(\"I wrote to the file\");\n\/\/\/ Ok(())\n\/\/\/ }\n\/\/\/ \/\/ This is equivalent to:\n\/\/\/ fn write_to_file_using_match() -> Result<(), io::Error> {\n\/\/\/ let mut file = try!(File::create(\"my_best_friends.txt\"));\n\/\/\/ match file.write_all(b\"This is a list of my best friends.\") {\n\/\/\/ Ok(v) => v,\n\/\/\/ Err(e) => return Err(e),\n\/\/\/ }\n\/\/\/ println!(\"I wrote to the file\");\n\/\/\/ Ok(())\n\/\/\/ }\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! try {\n ($expr:expr) => (match $expr {\n $crate::result::Result::Ok(val) => val,\n $crate::result::Result::Err(err) => {\n return $crate::result::Result::Err($crate::convert::From::from(err))\n }\n })\n}\n\n\/\/\/ Use the `format!` syntax to write data into a buffer.\n\/\/\/\n\/\/\/ This macro is typically used with a buffer of `&mut `[`Write`][write].\n\/\/\/\n\/\/\/ See [`std::fmt`][fmt] for more information on format syntax.\n\/\/\/\n\/\/\/ [fmt]: ..\/std\/fmt\/index.html\n\/\/\/ [write]: ..\/std\/io\/trait.Write.html\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io::Write;\n\/\/\/\n\/\/\/ let mut w = Vec::new();\n\/\/\/ write!(&mut w, \"test\").unwrap();\n\/\/\/ write!(&mut w, \"formatted {}\", \"arguments\").unwrap();\n\/\/\/\n\/\/\/ assert_eq!(w, b\"testformatted arguments\");\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"core\", since = \"1.6.0\")]\nmacro_rules! write {\n ($dst:expr, $($arg:tt)*) => ($dst.write_fmt(format_args!($($arg)*)))\n}\n\n\/\/\/ Use the `format!` syntax to write data into a buffer, appending a newline.\n\/\/\/ On all platforms, the newline is the LINE FEED character (`\\n`\/`U+000A`)\n\/\/\/ alone (no additional CARRIAGE RETURN (`\\r`\/`U+000D`).\n\/\/\/\n\/\/\/ This macro is typically used with a buffer of `&mut `[`Write`][write].\n\/\/\/\n\/\/\/ See [`std::fmt`][fmt] for more information on format syntax.\n\/\/\/\n\/\/\/ [fmt]: ..\/std\/fmt\/index.html\n\/\/\/ [write]: ..\/std\/io\/trait.Write.html\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io::Write;\n\/\/\/\n\/\/\/ let mut w = Vec::new();\n\/\/\/ writeln!(&mut w, \"test\").unwrap();\n\/\/\/ writeln!(&mut w, \"formatted {}\", \"arguments\").unwrap();\n\/\/\/\n\/\/\/ assert_eq!(&w[..], \"test\\nformatted arguments\\n\".as_bytes());\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! writeln {\n ($dst:expr, $fmt:expr) => (\n write!($dst, concat!($fmt, \"\\n\"))\n );\n ($dst:expr, $fmt:expr, $($arg:tt)*) => (\n write!($dst, concat!($fmt, \"\\n\"), $($arg)*)\n );\n}\n\n\/\/\/ A utility macro for indicating unreachable code.\n\/\/\/\n\/\/\/ This is useful any time that the compiler can't determine that some code is unreachable. For\n\/\/\/ example:\n\/\/\/\n\/\/\/ * Match arms with guard conditions.\n\/\/\/ * Loops that dynamically terminate.\n\/\/\/ * Iterators that dynamically terminate.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ This will always panic.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Match arms:\n\/\/\/\n\/\/\/ ```\n\/\/\/ # #[allow(dead_code)]\n\/\/\/ fn foo(x: Option<i32>) {\n\/\/\/ match x {\n\/\/\/ Some(n) if n >= 0 => println!(\"Some(Non-negative)\"),\n\/\/\/ Some(n) if n < 0 => println!(\"Some(Negative)\"),\n\/\/\/ Some(_) => unreachable!(), \/\/ compile error if commented out\n\/\/\/ None => println!(\"None\")\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Iterators:\n\/\/\/\n\/\/\/ ```\n\/\/\/ # #[allow(dead_code)]\n\/\/\/ fn divide_by_three(x: u32) -> u32 { \/\/ one of the poorest implementations of x\/3\n\/\/\/ for i in 0.. {\n\/\/\/ if 3*i < i { panic!(\"u32 overflow\"); }\n\/\/\/ if x < 3*i { return i-1; }\n\/\/\/ }\n\/\/\/ unreachable!();\n\/\/\/ }\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"core\", since = \"1.6.0\")]\nmacro_rules! unreachable {\n () => ({\n panic!(\"internal error: entered unreachable code\")\n });\n ($msg:expr) => ({\n unreachable!(\"{}\", $msg)\n });\n ($fmt:expr, $($arg:tt)*) => ({\n panic!(concat!(\"internal error: entered unreachable code: \", $fmt), $($arg)*)\n });\n}\n\n\/\/\/ A standardized placeholder for marking unfinished code. It panics with the\n\/\/\/ message `\"not yet implemented\"` when executed.\n\/\/\/\n\/\/\/ This can be useful if you are prototyping and are just looking to have your\n\/\/\/ code typecheck, or if you're implementing a trait that requires multiple\n\/\/\/ methods, and you're only planning on using one of them.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Here's an example of some in-progress code. We have a trait `Foo`:\n\/\/\/\n\/\/\/ ```\n\/\/\/ trait Foo {\n\/\/\/ fn bar(&self);\n\/\/\/ fn baz(&self);\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ We want to implement `Foo` on one of our types, but we also want to work on\n\/\/\/ just `bar()` first. In order for our code to compile, we need to implement\n\/\/\/ `baz()`, so we can use `unimplemented!`:\n\/\/\/\n\/\/\/ ```\n\/\/\/ # trait Foo {\n\/\/\/ # fn bar(&self);\n\/\/\/ # fn baz(&self);\n\/\/\/ # }\n\/\/\/ struct MyStruct;\n\/\/\/\n\/\/\/ impl Foo for MyStruct {\n\/\/\/ fn bar(&self) {\n\/\/\/ \/\/ implementation goes here\n\/\/\/ }\n\/\/\/\n\/\/\/ fn baz(&self) {\n\/\/\/ \/\/ let's not worry about implementing baz() for now\n\/\/\/ unimplemented!();\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ fn main() {\n\/\/\/ let s = MyStruct;\n\/\/\/ s.bar();\n\/\/\/\n\/\/\/ \/\/ we aren't even using baz() yet, so this is fine.\n\/\/\/ }\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"core\", since = \"1.6.0\")]\nmacro_rules! unimplemented {\n () => (panic!(\"not yet implemented\"))\n}\n<commit_msg>Rollup merge of #35279 - cengizIO:master, r=brson<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/\/ Entry point of thread panic, for details, see std::macros\n#[macro_export]\n#[allow_internal_unstable]\n#[stable(feature = \"core\", since = \"1.6.0\")]\nmacro_rules! panic {\n () => (\n panic!(\"explicit panic\")\n );\n ($msg:expr) => ({\n static _MSG_FILE_LINE: (&'static str, &'static str, u32) = ($msg, file!(), line!());\n $crate::panicking::panic(&_MSG_FILE_LINE)\n });\n ($fmt:expr, $($arg:tt)*) => ({\n \/\/ The leading _'s are to avoid dead code warnings if this is\n \/\/ used inside a dead function. Just `#[allow(dead_code)]` is\n \/\/ insufficient, since the user may have\n \/\/ `#[forbid(dead_code)]` and which cannot be overridden.\n static _FILE_LINE: (&'static str, u32) = (file!(), line!());\n $crate::panicking::panic_fmt(format_args!($fmt, $($arg)*), &_FILE_LINE)\n });\n}\n\n\/\/\/ Ensure that a boolean expression is `true` at runtime.\n\/\/\/\n\/\/\/ This will invoke the `panic!` macro if the provided expression cannot be\n\/\/\/ evaluated to `true` at runtime.\n\/\/\/\n\/\/\/ Assertions are always checked in both debug and release builds, and cannot\n\/\/\/ be disabled. See `debug_assert!` for assertions that are not enabled in\n\/\/\/ release builds by default.\n\/\/\/\n\/\/\/ Unsafe code relies on `assert!` to enforce run-time invariants that, if\n\/\/\/ violated could lead to unsafety.\n\/\/\/\n\/\/\/ Other use-cases of `assert!` include\n\/\/\/ [testing](https:\/\/doc.rust-lang.org\/book\/testing.html) and enforcing\n\/\/\/ run-time invariants in safe code (whose violation cannot result in unsafety).\n\/\/\/\n\/\/\/ This macro has a second version, where a custom panic message can be provided.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ \/\/ the panic message for these assertions is the stringified value of the\n\/\/\/ \/\/ expression given.\n\/\/\/ assert!(true);\n\/\/\/\n\/\/\/ fn some_computation() -> bool { true } \/\/ a very simple function\n\/\/\/\n\/\/\/ assert!(some_computation());\n\/\/\/\n\/\/\/ \/\/ assert with a custom message\n\/\/\/ let x = true;\n\/\/\/ assert!(x, \"x wasn't true!\");\n\/\/\/\n\/\/\/ let a = 3; let b = 27;\n\/\/\/ assert!(a + b == 30, \"a = {}, b = {}\", a, b);\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! assert {\n ($cond:expr) => (\n if !$cond {\n panic!(concat!(\"assertion failed: \", stringify!($cond)))\n }\n );\n ($cond:expr, $($arg:tt)+) => (\n if !$cond {\n panic!($($arg)+)\n }\n );\n}\n\n\/\/\/ Asserts that two expressions are equal to each other.\n\/\/\/\n\/\/\/ On panic, this macro will print the values of the expressions with their\n\/\/\/ debug representations.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ let a = 3;\n\/\/\/ let b = 1 + 2;\n\/\/\/ assert_eq!(a, b);\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! assert_eq {\n ($left:expr , $right:expr) => ({\n match (&$left, &$right) {\n (left_val, right_val) => {\n if !(*left_val == *right_val) {\n panic!(\"assertion failed: `(left == right)` \\\n (left: `{:?}`, right: `{:?}`)\", left_val, right_val)\n }\n }\n }\n });\n ($left:expr , $right:expr, $($arg:tt)*) => ({\n match (&($left), &($right)) {\n (left_val, right_val) => {\n if !(*left_val == *right_val) {\n panic!(\"assertion failed: `(left == right)` \\\n (left: `{:?}`, right: `{:?}`): {}\", left_val, right_val,\n format_args!($($arg)*))\n }\n }\n }\n });\n}\n\n\/\/\/ Ensure that a boolean expression is `true` at runtime.\n\/\/\/\n\/\/\/ This will invoke the `panic!` macro if the provided expression cannot be\n\/\/\/ evaluated to `true` at runtime.\n\/\/\/\n\/\/\/ Like `assert!`, this macro also has a second version, where a custom panic\n\/\/\/ message can be provided.\n\/\/\/\n\/\/\/ Unlike `assert!`, `debug_assert!` statements are only enabled in non\n\/\/\/ optimized builds by default. An optimized build will omit all\n\/\/\/ `debug_assert!` statements unless `-C debug-assertions` is passed to the\n\/\/\/ compiler. This makes `debug_assert!` useful for checks that are too\n\/\/\/ expensive to be present in a release build but may be helpful during\n\/\/\/ development.\n\/\/\/\n\/\/\/ An unchecked assertion allows a program in an inconsistent state to keep\n\/\/\/ running, which might have unexpected consequences but does not introduce\n\/\/\/ unsafety as long as this only happens in safe code. The performance cost\n\/\/\/ of assertions, is however, not measurable in general. Replacing `assert!`\n\/\/\/ with `debug_assert!` is thus only encouraged after thorough profiling, and\n\/\/\/ more importantly, only in safe code!\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ \/\/ the panic message for these assertions is the stringified value of the\n\/\/\/ \/\/ expression given.\n\/\/\/ debug_assert!(true);\n\/\/\/\n\/\/\/ fn some_expensive_computation() -> bool { true } \/\/ a very simple function\n\/\/\/ debug_assert!(some_expensive_computation());\n\/\/\/\n\/\/\/ \/\/ assert with a custom message\n\/\/\/ let x = true;\n\/\/\/ debug_assert!(x, \"x wasn't true!\");\n\/\/\/\n\/\/\/ let a = 3; let b = 27;\n\/\/\/ debug_assert!(a + b == 30, \"a = {}, b = {}\", a, b);\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! debug_assert {\n ($($arg:tt)*) => (if cfg!(debug_assertions) { assert!($($arg)*); })\n}\n\n\/\/\/ Asserts that two expressions are equal to each other.\n\/\/\/\n\/\/\/ On panic, this macro will print the values of the expressions with their\n\/\/\/ debug representations.\n\/\/\/\n\/\/\/ Unlike `assert_eq!`, `debug_assert_eq!` statements are only enabled in non\n\/\/\/ optimized builds by default. An optimized build will omit all\n\/\/\/ `debug_assert_eq!` statements unless `-C debug-assertions` is passed to the\n\/\/\/ compiler. This makes `debug_assert_eq!` useful for checks that are too\n\/\/\/ expensive to be present in a release build but may be helpful during\n\/\/\/ development.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ let a = 3;\n\/\/\/ let b = 1 + 2;\n\/\/\/ debug_assert_eq!(a, b);\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! debug_assert_eq {\n ($($arg:tt)*) => (if cfg!(debug_assertions) { assert_eq!($($arg)*); })\n}\n\n\/\/\/ Helper macro for unwrapping `Result` values while returning early with an\n\/\/\/ error if the value of the expression is `Err`. Can only be used in\n\/\/\/ functions that return `Result` because of the early return of `Err` that\n\/\/\/ it provides.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/ use std::fs::File;\n\/\/\/ use std::io::prelude::*;\n\/\/\/\n\/\/\/ fn write_to_file_using_try() -> Result<(), io::Error> {\n\/\/\/ let mut file = try!(File::create(\"my_best_friends.txt\"));\n\/\/\/ try!(file.write_all(b\"This is a list of my best friends.\"));\n\/\/\/ println!(\"I wrote to the file\");\n\/\/\/ Ok(())\n\/\/\/ }\n\/\/\/ \/\/ This is equivalent to:\n\/\/\/ fn write_to_file_using_match() -> Result<(), io::Error> {\n\/\/\/ let mut file = try!(File::create(\"my_best_friends.txt\"));\n\/\/\/ match file.write_all(b\"This is a list of my best friends.\") {\n\/\/\/ Ok(v) => v,\n\/\/\/ Err(e) => return Err(e),\n\/\/\/ }\n\/\/\/ println!(\"I wrote to the file\");\n\/\/\/ Ok(())\n\/\/\/ }\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! try {\n ($expr:expr) => (match $expr {\n $crate::result::Result::Ok(val) => val,\n $crate::result::Result::Err(err) => {\n return $crate::result::Result::Err($crate::convert::From::from(err))\n }\n })\n}\n\n\/\/\/ Write formatted data into a buffer\n\/\/\/\n\/\/\/ This macro accepts any value with `write_fmt` method as a writer, a format string, and a list\n\/\/\/ of arguments to format.\n\/\/\/\n\/\/\/ `write_fmt` method usually comes from an implementation of [`std::fmt::Write`][fmt_write] or\n\/\/\/ [`std::io::Write`][io_write] traits. These are sometimes called 'writers'.\n\/\/\/\n\/\/\/ Passed arguments will be formatted according to the specified format string and the resulting\n\/\/\/ string will be passed to the writer.\n\/\/\/\n\/\/\/ See [`std::fmt`][fmt] for more information on format syntax.\n\/\/\/\n\/\/\/ Return value is completely dependent on the 'write_fmt' method.\n\/\/\/\n\/\/\/ Common return values are: [`Result`][enum_result], [`io::Result`][type_result]\n\/\/\/\n\/\/\/ [fmt]: ..\/std\/fmt\/index.html\n\/\/\/ [fmt_write]: ..\/std\/fmt\/trait.Write.html\n\/\/\/ [io_write]: ..\/std\/io\/trait.Write.html\n\/\/\/ [enum_result]: ..\/std\/result\/enum.Result.html\n\/\/\/ [type_result]: ..\/std\/io\/type.Result.html\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io::Write;\n\/\/\/\n\/\/\/ let mut w = Vec::new();\n\/\/\/ write!(&mut w, \"test\").unwrap();\n\/\/\/ write!(&mut w, \"formatted {}\", \"arguments\").unwrap();\n\/\/\/\n\/\/\/ assert_eq!(w, b\"testformatted arguments\");\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"core\", since = \"1.6.0\")]\nmacro_rules! write {\n ($dst:expr, $($arg:tt)*) => ($dst.write_fmt(format_args!($($arg)*)))\n}\n\n\/\/\/ Write formatted data into a buffer, with appending a newline.\n\/\/\/\n\/\/\/ On all platforms, the newline is the LINE FEED character (`\\n`\/`U+000A`) alone\n\/\/\/ (no additional CARRIAGE RETURN (`\\r`\/`U+000D`).\n\/\/\/\n\/\/\/ This macro accepts any value with `write_fmt` method as a writer, a format string, and a list\n\/\/\/ of arguments to format.\n\/\/\/\n\/\/\/ `write_fmt` method usually comes from an implementation of [`std::fmt::Write`][fmt_write] or\n\/\/\/ [`std::io::Write`][io_write] traits. These are sometimes called 'writers'.\n\/\/\/\n\/\/\/ Passed arguments will be formatted according to the specified format string and the resulting\n\/\/\/ string will be passed to the writer.\n\/\/\/\n\/\/\/ See [`std::fmt`][fmt] for more information on format syntax.\n\/\/\/\n\/\/\/ Return value is completely dependent on the 'write_fmt' method.\n\/\/\/\n\/\/\/ Common return values are: [`Result`][enum_result], [`io::Result`][type_result]\n\/\/\/\n\/\/\/ [fmt]: ..\/std\/fmt\/index.html\n\/\/\/ [fmt_write]: ..\/std\/fmt\/trait.Write.html\n\/\/\/ [io_write]: ..\/std\/io\/trait.Write.html\n\/\/\/ [enum_result]: ..\/std\/result\/enum.Result.html\n\/\/\/ [type_result]: ..\/std\/io\/type.Result.html\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io::Write;\n\/\/\/\n\/\/\/ let mut w = Vec::new();\n\/\/\/ writeln!(&mut w, \"test\").unwrap();\n\/\/\/ writeln!(&mut w, \"formatted {}\", \"arguments\").unwrap();\n\/\/\/\n\/\/\/ assert_eq!(&w[..], \"test\\nformatted arguments\\n\".as_bytes());\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nmacro_rules! writeln {\n ($dst:expr, $fmt:expr) => (\n write!($dst, concat!($fmt, \"\\n\"))\n );\n ($dst:expr, $fmt:expr, $($arg:tt)*) => (\n write!($dst, concat!($fmt, \"\\n\"), $($arg)*)\n );\n}\n\n\/\/\/ A utility macro for indicating unreachable code.\n\/\/\/\n\/\/\/ This is useful any time that the compiler can't determine that some code is unreachable. For\n\/\/\/ example:\n\/\/\/\n\/\/\/ * Match arms with guard conditions.\n\/\/\/ * Loops that dynamically terminate.\n\/\/\/ * Iterators that dynamically terminate.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ This will always panic.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Match arms:\n\/\/\/\n\/\/\/ ```\n\/\/\/ # #[allow(dead_code)]\n\/\/\/ fn foo(x: Option<i32>) {\n\/\/\/ match x {\n\/\/\/ Some(n) if n >= 0 => println!(\"Some(Non-negative)\"),\n\/\/\/ Some(n) if n < 0 => println!(\"Some(Negative)\"),\n\/\/\/ Some(_) => unreachable!(), \/\/ compile error if commented out\n\/\/\/ None => println!(\"None\")\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Iterators:\n\/\/\/\n\/\/\/ ```\n\/\/\/ # #[allow(dead_code)]\n\/\/\/ fn divide_by_three(x: u32) -> u32 { \/\/ one of the poorest implementations of x\/3\n\/\/\/ for i in 0.. {\n\/\/\/ if 3*i < i { panic!(\"u32 overflow\"); }\n\/\/\/ if x < 3*i { return i-1; }\n\/\/\/ }\n\/\/\/ unreachable!();\n\/\/\/ }\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"core\", since = \"1.6.0\")]\nmacro_rules! unreachable {\n () => ({\n panic!(\"internal error: entered unreachable code\")\n });\n ($msg:expr) => ({\n unreachable!(\"{}\", $msg)\n });\n ($fmt:expr, $($arg:tt)*) => ({\n panic!(concat!(\"internal error: entered unreachable code: \", $fmt), $($arg)*)\n });\n}\n\n\/\/\/ A standardized placeholder for marking unfinished code. It panics with the\n\/\/\/ message `\"not yet implemented\"` when executed.\n\/\/\/\n\/\/\/ This can be useful if you are prototyping and are just looking to have your\n\/\/\/ code typecheck, or if you're implementing a trait that requires multiple\n\/\/\/ methods, and you're only planning on using one of them.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ Here's an example of some in-progress code. We have a trait `Foo`:\n\/\/\/\n\/\/\/ ```\n\/\/\/ trait Foo {\n\/\/\/ fn bar(&self);\n\/\/\/ fn baz(&self);\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ We want to implement `Foo` on one of our types, but we also want to work on\n\/\/\/ just `bar()` first. In order for our code to compile, we need to implement\n\/\/\/ `baz()`, so we can use `unimplemented!`:\n\/\/\/\n\/\/\/ ```\n\/\/\/ # trait Foo {\n\/\/\/ # fn bar(&self);\n\/\/\/ # fn baz(&self);\n\/\/\/ # }\n\/\/\/ struct MyStruct;\n\/\/\/\n\/\/\/ impl Foo for MyStruct {\n\/\/\/ fn bar(&self) {\n\/\/\/ \/\/ implementation goes here\n\/\/\/ }\n\/\/\/\n\/\/\/ fn baz(&self) {\n\/\/\/ \/\/ let's not worry about implementing baz() for now\n\/\/\/ unimplemented!();\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ fn main() {\n\/\/\/ let s = MyStruct;\n\/\/\/ s.bar();\n\/\/\/\n\/\/\/ \/\/ we aren't even using baz() yet, so this is fine.\n\/\/\/ }\n\/\/\/ ```\n#[macro_export]\n#[stable(feature = \"core\", since = \"1.6.0\")]\nmacro_rules! unimplemented {\n () => (panic!(\"not yet implemented\"))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Try to read status byte<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>generic structs added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>auto merge of #12450 : FlaPer87\/rust\/issue-10682, r=alexcrichton<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for issue #10682\n\/\/ Nested `proc` usage can't use outer owned data\n\nfn work(_: ~int) {}\nfn foo(_: proc()) {}\n\npub fn main() {\n let a = ~1;\n foo(proc() { foo(proc() { work(a) }) })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add a regression test for #27583. Fixes #27583.<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for issue #27583. Unclear how useful this will be\n\/\/ going forward, since the issue in question was EXTREMELY sensitive\n\/\/ to compiler internals (like the precise numbering of nodes), but\n\/\/ what the hey.\n\n#![allow(warnings)]\n\nuse std::cell::Cell;\nuse std::marker::PhantomData;\n\npub trait Delegate<'tcx> { }\n\npub struct InferCtxt<'a, 'tcx: 'a> {\n x: PhantomData<&'a Cell<&'tcx ()>>\n}\n\npub struct MemCategorizationContext<'t, 'a: 't, 'tcx : 'a> {\n x: &'t InferCtxt<'a, 'tcx>,\n}\n\npub struct ExprUseVisitor<'d, 't, 'a: 't, 'tcx:'a+'d> {\n typer: &'t InferCtxt<'a, 'tcx>,\n mc: MemCategorizationContext<'t, 'a, 'tcx>,\n delegate: &'d mut (Delegate<'tcx>+'d),\n}\n\nimpl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> {\n pub fn new(delegate: &'d mut Delegate<'tcx>,\n typer: &'t InferCtxt<'a, 'tcx>)\n -> ExprUseVisitor<'d,'t,'a,'tcx>\n {\n ExprUseVisitor {\n typer: typer,\n mc: MemCategorizationContext::new(typer),\n delegate: delegate,\n }\n }\n}\n\nimpl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> {\n pub fn new(typer: &'t InferCtxt<'a, 'tcx>) -> MemCategorizationContext<'t, 'a, 'tcx> {\n MemCategorizationContext { x: typer }\n }\n}\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse std::cell::Cell;\nuse std::mem::swap;\n\n\/\/ Just a grab bag of stuff that you wouldn't want to actually write.\n\nfn strange() -> bool { let _x: bool = return true; }\n\nfn funny() {\n fn f(_x: ()) { }\n f(return);\n}\n\nfn what() {\n fn the(x: &Cell<bool>) {\n return while !x.get() { x.set(true); };\n }\n let i = &Cell::new(false);\n let dont = {||the(i)};\n dont();\n assert!((i.get()));\n}\n\nfn zombiejesus() {\n loop {\n while (return) {\n if (return) {\n match (return) {\n 1 => {\n if (return) {\n return\n } else {\n return\n }\n }\n _ => { return }\n };\n } else if (return) {\n return;\n }\n }\n if (return) { break; }\n }\n}\n\nfn notsure() {\n let mut _x: isize;\n let mut _y = (_x = 0) == (_x = 0);\n let mut _z = (_x = 0) < (_x = 0);\n let _a = (_x += 0) == (_x = 0);\n let _b = swap(&mut _y, &mut _z) == swap(&mut _y, &mut _z);\n}\n\nfn canttouchthis() -> usize {\n fn p() -> bool { true }\n let _a = (assert!((true)) == (assert!(p())));\n let _c = (assert!((p())) == ());\n let _b: bool = (println!(\"{}\", 0) == (return 0));\n}\n\nfn angrydome() {\n loop { if break { } }\n let mut i = 0;\n loop { i += 1; if i == 1 { match (continue) { 1 => { }, _ => panic!(\"wat\") } }\n break; }\n}\n\nfn evil_lincoln() { let _evil = println!(\"lincoln\"); }\n\nfn dots() {\n assert_eq!(String::from(\"..................................................\"),\n format!(\"{:?}\", .. .. .. .. .. .. .. .. .. .. .. .. ..\n .. .. .. .. .. .. .. .. .. .. .. ..));\n}\n\nfn you_eight() {\n assert_eq!(8, {\n macro_rules! u8 {\n (u8) => {\n mod u8 {\n pub fn u8<'u8>(u8: &'u8 u8) -> &'u8 u8 {\n \"u8\";\n u8\n }\n }\n };\n }\n\n u8!(u8);\n let &u8: &u8 = u8::u8(&8u8);\n u8\n });\n}\n\nfn fishy() {\n assert_eq!(String::from(\"><>\"),\n String::<>::from::<>(\"><>\").chars::<>().rev::<>().collect::<String>());\n}\n\npub fn main() {\n strange();\n funny();\n what();\n zombiejesus();\n notsure();\n canttouchthis();\n angrydome();\n evil_lincoln();\n dots();\n you_eight();\n fishy();\n}\n<commit_msg>add test<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nuse std::cell::Cell;\nuse std::mem::swap;\n\n\/\/ Just a grab bag of stuff that you wouldn't want to actually write.\n\nfn strange() -> bool { let _x: bool = return true; }\n\nfn funny() {\n fn f(_x: ()) { }\n f(return);\n}\n\nfn what() {\n fn the(x: &Cell<bool>) {\n return while !x.get() { x.set(true); };\n }\n let i = &Cell::new(false);\n let dont = {||the(i)};\n dont();\n assert!((i.get()));\n}\n\nfn zombiejesus() {\n loop {\n while (return) {\n if (return) {\n match (return) {\n 1 => {\n if (return) {\n return\n } else {\n return\n }\n }\n _ => { return }\n };\n } else if (return) {\n return;\n }\n }\n if (return) { break; }\n }\n}\n\nfn notsure() {\n let mut _x: isize;\n let mut _y = (_x = 0) == (_x = 0);\n let mut _z = (_x = 0) < (_x = 0);\n let _a = (_x += 0) == (_x = 0);\n let _b = swap(&mut _y, &mut _z) == swap(&mut _y, &mut _z);\n}\n\nfn canttouchthis() -> usize {\n fn p() -> bool { true }\n let _a = (assert!((true)) == (assert!(p())));\n let _c = (assert!((p())) == ());\n let _b: bool = (println!(\"{}\", 0) == (return 0));\n}\n\nfn angrydome() {\n loop { if break { } }\n let mut i = 0;\n loop { i += 1; if i == 1 { match (continue) { 1 => { }, _ => panic!(\"wat\") } }\n break; }\n}\n\nfn evil_lincoln() { let _evil = println!(\"lincoln\"); }\n\nfn dots() {\n assert_eq!(String::from(\"..................................................\"),\n format!(\"{:?}\", .. .. .. .. .. .. .. .. .. .. .. .. ..\n .. .. .. .. .. .. .. .. .. .. .. ..));\n}\n\nfn you_eight() {\n assert_eq!(8, {\n macro_rules! u8 {\n (u8) => {\n mod u8 {\n pub fn u8<'u8>(u8: &'u8 u8) -> &'u8 u8 {\n \"u8\";\n u8\n }\n }\n };\n }\n\n u8!(u8);\n let &u8: &u8 = u8::u8(&8u8);\n u8\n });\n}\n\nfn fishy() {\n assert_eq!(String::from(\"><>\"),\n String::<>::from::<>(\"><>\").chars::<>().rev::<>().collect::<String>());\n}\n\nfn union() {\n union union<'union> { union: &'union union<'union>, }\n}\n\npub fn main() {\n strange();\n funny();\n what();\n zombiejesus();\n notsure();\n canttouchthis();\n angrydome();\n evil_lincoln();\n dots();\n you_eight();\n fishy();\n union();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>De-duplicate by journey rid.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test: Check native main() signature<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Checks the signature of the implicitly generated native main()\n\/\/ entry point. It must match C's `int main(int, char **)`.\n\nfn main() {\n}\n\n\/\/ CHECK: define i32 @main(i32, i8**)\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #58725 - jamwt:fix-27949, r=Centril<commit_after>\/\/ run-pass\n\/\/\n\/\/ At one time, the `==` operator (and other binary operators) did not\n\/\/ support subtyping during type checking, and would therefore require\n\/\/ LHS and RHS to be exactly identical--i.e. to have the same lifetimes.\n\/\/\n\/\/ This was fixed in 1a7fb7dc78439a704f024609ce3dc0beb1386552.\n\n#[derive(Copy, Clone)]\nstruct Input<'a> {\n foo: &'a u32\n}\n\nimpl <'a> std::cmp::PartialEq<Input<'a>> for Input<'a> {\n fn eq(&self, other: &Input<'a>) -> bool {\n self.foo == other.foo\n }\n\n fn ne(&self, other: &Input<'a>) -> bool {\n self.foo != other.foo\n }\n}\n\n\nfn check_equal<'a, 'b>(x: Input<'a>, y: Input<'b>) -> bool {\n \/\/ Type checking error due to 'a != 'b prior to 1a7fb7dc78\n x == y\n}\n\nfn main() {\n let i = 1u32;\n let j = 1u32;\n let k = 2u32;\n\n let input_i = Input { foo: &i };\n let input_j = Input { foo: &j };\n let input_k = Input { foo: &k };\n assert!(check_equal(input_i, input_i));\n assert!(check_equal(input_i, input_j));\n assert!(!check_equal(input_i, input_k));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add user interface definition<commit_after>use clap::{Arg, App, SubCommand};\n\npub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {\n app\n .arg(Arg::with_name(\"diaryname\")\n .long(\"diary\")\n .short(\"d\")\n .takes_value(true)\n .required(false)\n .help(\"Use other than default diary\"))\n\n .subcommand(SubCommand::with_name(\"create\")\n .about(\"Create a diary entry\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"no-edit\")\n .long(\"no-edit\")\n .short(\"e\")\n .takes_value(false)\n .required(false)\n .help(\"Do not edit after creating\"))\n )\n\n .subcommand(SubCommand::with_name(\"edit\")\n .about(\"Edit a diary entry\")\n .version(\"0.1\")\n .arg(Arg::with_name(\"datetime\")\n .long(\"datetime\")\n .short(\"d\")\n .takes_value(true)\n .required(false)\n .help(\"Specify the date and time which entry should be edited. If none is\n specified, the last entry is edited. If the diary entry does not exist for\n this time, this fails. Format: YYYY-MM-DDT[HH[:mm[:ss]]]\"))\n )\n\n .subcommand(SubCommand::with_name(\"list\")\n .about(\"List diary entries\")\n .version(\"0.1\"))\n\n \/\/ TODO: Support deleting diary entries\n \/\/ .subcommand(SubCommand::with_name(\"delete\")\n \/\/ .about(\"Delete a diary entry\")\n \/\/ .version(\"0.1\")\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::result::Result as RResult;\n\nuse toml::{Table, Value};\n\npub mod error {\n use std::fmt::{Debug, Display, Formatter};\n use std::fmt;\n use std::error::Error;\n use toml;\n\n #[derive(Clone)]\n pub enum ParserErrorKind {\n TOMLParserErrors,\n MissingMainSection,\n MissingVersionInfo,\n }\n\n pub struct ParserError {\n kind: ParserErrorKind,\n cause: Option<Box<Error>>,\n }\n\n impl ParserError {\n\n pub fn new(k: ParserErrorKind, cause: Option<Box<Error>>) -> ParserError {\n ParserError {\n kind: k,\n cause: cause,\n }\n }\n\n }\n\n impl Debug for ParserError {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n try!(write!(f, \"{:?}\", self.description()));\n Ok(())\n }\n\n }\n\n impl Display for ParserError {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n try!(write!(f, \"{}\", self.description()));\n Ok(())\n }\n\n }\n\n impl Error for ParserError {\n\n fn description(&self) -> &str {\n match self.kind {\n ParserErrorKind::TOMLParserErrors => \"Several TOML-Parser-Errors\",\n ParserErrorKind::MissingMainSection => \"Missing main section\",\n ParserErrorKind::MissingVersionInfo => \"Missing version information in main section\",\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n }\n\n}\n\n\nuse self::error::ParserErrorKind;\nuse self::error::ParserError;\n\n\/**\n * EntryHeader\n *\n * This is basically a wrapper around toml::Table which provides convenience to the user of the\n * librray.\n *\/\n#[derive(Debug, Clone)]\npub struct EntryHeader {\n toml: Table,\n}\n\npub type Result<V> = RResult<V, error::ParserError>;\n\n\/**\n * Wrapper type around file header (TOML) object\n *\/\nimpl EntryHeader {\n\n \/**\n * Get a new header object with a already-filled toml table\n *\/\n pub fn new(toml: Table) -> EntryHeader {\n EntryHeader {\n toml: toml,\n }\n }\n\n \/**\n * Get the table which lives in the background\n *\/\n pub fn toml(&self) -> &Table {\n &self.toml\n }\n\n pub fn parse(s: &str) -> Result<EntryHeader> {\n use toml::Parser;\n\n let mut parser = Parser::new(s);\n parser.parse()\n .ok_or(ParserError::new(ParserErrorKind::TOMLParserErrors, None))\n .and_then(|t| verify_header_consistency(t))\n .map(|t| EntryHeader::new(t))\n }\n\n}\n\nfn verify_header_consistency(t: Table) -> Result<Table> {\n if !has_main_section(&t) {\n Err(ParserError::new(ParserErrorKind::MissingMainSection, None))\n } else if !has_imag_version_in_main_section(&t) {\n Err(ParserError::new(ParserErrorKind::MissingVersionInfo, None))\n } else {\n Ok(t)\n }\n}\n\nfn has_main_section(t: &Table) -> bool {\n t.contains_key(\"imag\") &&\n match t.get(\"imag\") {\n Some(&Value::Table(_)) => true,\n Some(_) => false,\n None => false,\n }\n}\n\nfn has_imag_version_in_main_section(t: &Table) -> bool {\n use semver::Version;\n\n match t.get(\"imag\").unwrap() {\n &Value::Table(ref sec) => {\n sec.get(\"version\")\n .and_then(|v| {\n match v {\n &Value::String(ref s) => {\n Some(Version::parse(&s[..]).is_ok())\n },\n _ => Some(false),\n }\n })\n .unwrap_or(false)\n }\n _ => false,\n }\n}\n\n\n#[cfg(test)]\nmod test {\n use std::collections::BTreeMap;\n\n use toml::Value;\n\n #[test]\n fn test_imag_section() {\n use super::has_main_section;\n\n let mut map = BTreeMap::new();\n map.insert(\"imag\".into(), Value::Table(BTreeMap::new()));\n\n assert!(has_main_section(&map));\n }\n\n #[test]\n fn test_imag_invalid_section_type() {\n use super::has_main_section;\n\n let mut map = BTreeMap::new();\n map.insert(\"imag\".into(), Value::Boolean(false));\n\n assert!(!has_main_section(&map));\n }\n\n #[test]\n fn test_imag_abscent_main_section() {\n use super::has_main_section;\n\n let mut map = BTreeMap::new();\n map.insert(\"not_imag\".into(), Value::Boolean(false));\n\n assert!(!has_main_section(&map));\n }\n\n #[test]\n fn test_main_section_without_version() {\n use super::has_imag_version_in_main_section;\n\n let mut map = BTreeMap::new();\n map.insert(\"imag\".into(), Value::Table(BTreeMap::new()));\n\n assert!(!has_imag_version_in_main_section(&map));\n }\n\n #[test]\n fn test_main_section_with_version() {\n use super::has_imag_version_in_main_section;\n\n let mut map = BTreeMap::new();\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::String(\"0.0.0\".into()));\n map.insert(\"imag\".into(), Value::Table(sub));\n\n assert!(has_imag_version_in_main_section(&map));\n }\n\n #[test]\n fn test_main_section_with_version_in_wrong_type() {\n use super::has_imag_version_in_main_section;\n\n let mut map = BTreeMap::new();\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::Boolean(false));\n map.insert(\"imag\".into(), Value::Table(sub));\n\n assert!(!has_imag_version_in_main_section(&map));\n }\n\n #[test]\n fn test_verification_good() {\n use super::verify_header_consistency;\n\n let mut header = BTreeMap::new();\n let sub = {\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::String(String::from(\"0.0.0\")));\n\n Value::Table(sub)\n };\n\n header.insert(\"imag\".into(), sub);\n\n assert!(verify_header_consistency(header).is_ok());\n }\n\n #[test]\n fn test_verification_invalid_versionstring() {\n use super::verify_header_consistency;\n\n let mut header = BTreeMap::new();\n let sub = {\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::String(String::from(\"000\")));\n\n Value::Table(sub)\n };\n\n header.insert(\"imag\".into(), sub);\n\n assert!(!verify_header_consistency(header).is_ok());\n }\n\n\n #[test]\n fn test_verification_current_version() {\n use version;\n\n use super::verify_header_consistency;\n\n let mut header = BTreeMap::new();\n let sub = {\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::String(version!()));\n\n Value::Table(sub)\n };\n\n header.insert(\"imag\".into(), sub);\n\n assert!(verify_header_consistency(header).is_ok());\n }\n}\n\n<commit_msg>Dont pass the table to the header object, but generate it in ::new()<commit_after>use std::collections::BTreeMap;\nuse std::error::Error;\nuse std::result::Result as RResult;\n\nuse toml::{Table, Value};\n\nuse self::error::ParserErrorKind;\nuse self::error::ParserError;\n\npub mod error {\n use std::fmt::{Debug, Display, Formatter};\n use std::fmt;\n use std::error::Error;\n use toml;\n\n #[derive(Clone)]\n pub enum ParserErrorKind {\n TOMLParserErrors,\n MissingMainSection,\n MissingVersionInfo,\n }\n\n pub struct ParserError {\n kind: ParserErrorKind,\n cause: Option<Box<Error>>,\n }\n\n impl ParserError {\n\n pub fn new(k: ParserErrorKind, cause: Option<Box<Error>>) -> ParserError {\n ParserError {\n kind: k,\n cause: cause,\n }\n }\n\n }\n\n impl Debug for ParserError {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n try!(write!(f, \"{:?}\", self.description()));\n Ok(())\n }\n\n }\n\n impl Display for ParserError {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n try!(write!(f, \"{}\", self.description()));\n Ok(())\n }\n\n }\n\n impl Error for ParserError {\n\n fn description(&self) -> &str {\n match self.kind {\n ParserErrorKind::TOMLParserErrors => \"Several TOML-Parser-Errors\",\n ParserErrorKind::MissingMainSection => \"Missing main section\",\n ParserErrorKind::MissingVersionInfo => \"Missing version information in main section\",\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n }\n\n}\n\n\/**\n * EntryHeader\n *\n * This is basically a wrapper around toml::Table which provides convenience to the user of the\n * librray.\n *\/\n#[derive(Debug, Clone)]\npub struct EntryHeader {\n toml: Table,\n}\n\npub type Result<V> = RResult<V, error::ParserError>;\n\n\/**\n * Wrapper type around file header (TOML) object\n *\/\nimpl EntryHeader {\n\n \/**\n * Get a new header object with a already-filled toml table\n *\n * Default header values are inserted into the header object by default.\n *\/\n pub fn new() -> EntryHeader {\n EntryHeader {\n toml: BTreeMap::new(),\n }\n }\n\n \/**\n * Get the table which lives in the background\n *\/\n pub fn toml(&self) -> &Table {\n &self.toml\n }\n\n pub fn parse(s: &str) -> Result<EntryHeader> {\n use toml::Parser;\n\n let mut parser = Parser::new(s);\n parser.parse()\n .ok_or(ParserError::new(ParserErrorKind::TOMLParserErrors, None))\n .and_then(|t| verify_header_consistency(t))\n .map(|t| {\n EntryHeader {\n toml: t\n }\n })\n }\n\n}\n\nfn verify_header_consistency(t: Table) -> Result<Table> {\n if !has_main_section(&t) {\n Err(ParserError::new(ParserErrorKind::MissingMainSection, None))\n } else if !has_imag_version_in_main_section(&t) {\n Err(ParserError::new(ParserErrorKind::MissingVersionInfo, None))\n } else {\n Ok(t)\n }\n}\n\nfn has_main_section(t: &Table) -> bool {\n t.contains_key(\"imag\") &&\n match t.get(\"imag\") {\n Some(&Value::Table(_)) => true,\n Some(_) => false,\n None => false,\n }\n}\n\nfn has_imag_version_in_main_section(t: &Table) -> bool {\n use semver::Version;\n\n match t.get(\"imag\").unwrap() {\n &Value::Table(ref sec) => {\n sec.get(\"version\")\n .and_then(|v| {\n match v {\n &Value::String(ref s) => {\n Some(Version::parse(&s[..]).is_ok())\n },\n _ => Some(false),\n }\n })\n .unwrap_or(false)\n }\n _ => false,\n }\n}\n\n\n#[cfg(test)]\nmod test {\n use std::collections::BTreeMap;\n\n use toml::Value;\n\n #[test]\n fn test_imag_section() {\n use super::has_main_section;\n\n let mut map = BTreeMap::new();\n map.insert(\"imag\".into(), Value::Table(BTreeMap::new()));\n\n assert!(has_main_section(&map));\n }\n\n #[test]\n fn test_imag_invalid_section_type() {\n use super::has_main_section;\n\n let mut map = BTreeMap::new();\n map.insert(\"imag\".into(), Value::Boolean(false));\n\n assert!(!has_main_section(&map));\n }\n\n #[test]\n fn test_imag_abscent_main_section() {\n use super::has_main_section;\n\n let mut map = BTreeMap::new();\n map.insert(\"not_imag\".into(), Value::Boolean(false));\n\n assert!(!has_main_section(&map));\n }\n\n #[test]\n fn test_main_section_without_version() {\n use super::has_imag_version_in_main_section;\n\n let mut map = BTreeMap::new();\n map.insert(\"imag\".into(), Value::Table(BTreeMap::new()));\n\n assert!(!has_imag_version_in_main_section(&map));\n }\n\n #[test]\n fn test_main_section_with_version() {\n use super::has_imag_version_in_main_section;\n\n let mut map = BTreeMap::new();\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::String(\"0.0.0\".into()));\n map.insert(\"imag\".into(), Value::Table(sub));\n\n assert!(has_imag_version_in_main_section(&map));\n }\n\n #[test]\n fn test_main_section_with_version_in_wrong_type() {\n use super::has_imag_version_in_main_section;\n\n let mut map = BTreeMap::new();\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::Boolean(false));\n map.insert(\"imag\".into(), Value::Table(sub));\n\n assert!(!has_imag_version_in_main_section(&map));\n }\n\n #[test]\n fn test_verification_good() {\n use super::verify_header_consistency;\n\n let mut header = BTreeMap::new();\n let sub = {\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::String(String::from(\"0.0.0\")));\n\n Value::Table(sub)\n };\n\n header.insert(\"imag\".into(), sub);\n\n assert!(verify_header_consistency(header).is_ok());\n }\n\n #[test]\n fn test_verification_invalid_versionstring() {\n use super::verify_header_consistency;\n\n let mut header = BTreeMap::new();\n let sub = {\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::String(String::from(\"000\")));\n\n Value::Table(sub)\n };\n\n header.insert(\"imag\".into(), sub);\n\n assert!(!verify_header_consistency(header).is_ok());\n }\n\n\n #[test]\n fn test_verification_current_version() {\n use version;\n\n use super::verify_header_consistency;\n\n let mut header = BTreeMap::new();\n let sub = {\n let mut sub = BTreeMap::new();\n sub.insert(\"version\".into(), Value::String(version!()));\n\n Value::Table(sub)\n };\n\n header.insert(\"imag\".into(), sub);\n\n assert!(verify_header_consistency(header).is_ok());\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>resolve test case<commit_after>mod x {\n pub fn g() -> uint {14}\n}\n\nfn main(){\n \/\/ should *not* shadow the module x:\n let x = 9;\n \/\/ use it to avoid warnings:\n x+3;\n assert_eq!(x::g(),14);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for assert_eq! with none Sized types<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub fn main() {\n assert_eq!([1, 2, 3][..], vec![1, 2, 3][..]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add solution to problem 20<commit_after>#[macro_use] extern crate libeuler;\n\nextern crate num;\nuse num::bigint::{BigInt, ToBigInt};\nuse num::traits::{One, Zero};\n\/\/\/ n! means n × (n − 1) × ... × 3 × 2 × 1\n\/\/\/\n\/\/\/ For example, 10! = 10 × 9 × ... × 3 × 2 × 1 = 3628800,\n\/\/\/ and the sum of the digits in the number 10! is 3 + 6 + 2 + 8 + 8 + 0 + 0 = 27.\n\/\/\/\n\/\/\/ Find the sum of the digits in the number 100!\nfn main() {\n solutions! {\n sol naive {\n let mut sum = 0;\n for c in format!(\"{}\", kinda_fact(100)).chars() {\n let v: i64 = c as i64 - '0' as i64;\n sum += v;\n }\n\n sum\n }\n }\n}\n\nfn kinda_fact(n: i64) -> BigInt {\n let ten = 10.to_bigint().unwrap();\n let nb = n.to_bigint().unwrap();\n\n match n {\n 1 => BigInt::one(),\n n => match nb * kinda_fact(n - 1) {\n ref ret if ret % &ten == BigInt::zero() => ret \/ &ten,\n ret => ret\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Only print a backtrace the first time the signal handler is called.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added Rust example<commit_after>\n\/\/ compile with\n\/\/ \trustc --crate-type dylib rustplugin.rs\n\/\/ on windows:\n\/\/\trustc --crate-type cdylib -C opt-level=3 -C link-args=-s -C prefer-dynamic rustplugin.rs\n\nuse std::os::raw::{c_void,c_char,c_uchar,c_int,c_uint,c_double};\n\n\nconst VOO_PLUGIN_API_VERSION: i32 = 2;\n\n\n\/\/ display pixel data type\n#[allow(dead_code)]\n#[allow(non_camel_case_types)] \n#[cfg(target_os = \"macos\")] \n#[repr(C)]\npub struct voo_target_space_t\n{\n x: c_uchar,\n r: c_uchar,\n g: c_uchar,\n b: c_uchar,\n}\n#[allow(dead_code)]\n#[allow(non_camel_case_types)] \n#[cfg(not(target_os = \"macos\"))] \n#[repr(C)]\npub struct voo_target_space_t\n{\n b: c_uchar,\n g: c_uchar,\n r: c_uchar,\n x: c_uchar,\n}\n\n\n#[allow(dead_code)]\n#[allow(non_camel_case_types)] \npub enum voo_colorSpace_t {\n vooColorSpace_Unknown = -1,\n vooCS_YUV,\n vooCS_XYZ,\n vooCS_YIQ,\n vooCS_RGB,\n vooCS_Gray,\n vooCS_HSV,\n vooCS_YCgCo,\n vooCS_ICtCp\n}\n\n#[allow(dead_code)]\n#[allow(non_camel_case_types)] \n#[allow(non_snake_case)] \npub enum voo_dataArrangement_t {\n vooDataArrangement_Unknown = -1,\n vooDA_planar_420,\n vooDA_planar_422,\n vooDA_planar_444,\n vooDA_planar_410,\n vooDA_planar_411,\n vooDA_uyvy,\n vooDA_yuyv,\n vooDA_yuy2,\n vooDA_nv12,\n vooDA_v210,\n vooDA_interleaved_410,\n vooDA_interleaved_411,\n vooDA_reserved0,\n vooDA_interleaved_422,\n vooDA_interleaved_444,\n vooDA_single,\n vooDA_singleDouble,\n vooDA_singleFloat,\n vooDA_planar_420double,\n vooDA_planar_422double,\n vooDA_planar_444double,\n vooDA_planar_410double,\n vooDA_planar_411double,\n vooDA_planar_420float,\n vooDA_planar_422float,\n vooDA_planar_444float,\n vooDA_planar_410float,\n vooDA_planar_411float,\n vooDA_rgb565,\n vooDA_rgb555,\n vooDA_r210,\n vooDA_v410,\n vooDA_yuv10,\n vooNumDataArrangements\n}\n\n\n#[allow(dead_code)]\n#[allow(non_camel_case_types)] \n#[allow(non_snake_case)] \npub enum voo_channelOrder_t\n{\n vooChannelOrder_Unknown = -1,\n\n vooCO_c123,\n vooCO_c231,\n vooCO_c312,\n\n vooCO_c213,\n vooCO_c321,\n vooCO_c132,\n\n vooCO_c123x,\n vooCO_c231x,\n vooCO_c312x,\n\n vooCO_c213x,\n vooCO_c321x,\n vooCO_c132x,\n\n vooNumChannelOrders\n}\n\n\n\n#[allow(dead_code)]\n#[allow(non_camel_case_types)] \n#[allow(non_snake_case)] \n#[repr(C)]\npub struct voo_sequence_t {\n\n pub filename: *const c_char,\n\n \/\/ video resolution\n pub width: c_int,\n pub height: c_int,\n\n \/\/ frames per seconds\n pub fps: c_double,\n\n \/\/ Color space, such as YUV, RGB etc.\n pub colorSpace: voo_colorSpace_t,\n\n \/\/ How the channels are packed or interleaved\n arrangement: voo_dataArrangement_t,\n\n \/\/ The order in which color channels are written\n channel_order: voo_channelOrder_t,\n\n \/\/ size in bytes of a single video frame in native format\n framesize: c_uint,\n\n \/\/ Bits per channel is normally 8 or 10-16 (valid bit depths are 1-16) (if integer)\n bitsPerChannel: c_int,\n\n \/\/ Whether the video shall be played upside down\n b_flipped: c_int,\n \/\/ Whether 16bit words shall be byte-swapped\n b_toggle_endian: c_int,\n \/\/ Whether the values (if integer) shall be treated as signed integers\n b_signed: c_int,\n\n\n reserved: [c_char; 32],\n\n}\n\n\n\/\/ structure vooya gives you in on_load_video( ... ).\n#[allow(dead_code)]\n#[allow(non_camel_case_types)] \n#[allow(non_snake_case)] \n#[repr(C)]\npub struct voo_app_info_t {\n\n \/\/ a handle to vooya's current window (what it is, is platform dependent)\n p_handle: *const c_void,\n\n \/\/ to trigger vooya to a reload a frame, use these like:\n \/\/ p_app_info.pf_trigger_reload( app_info.p_reload_cargo )\n \/\/ note that this should happen not too often.\n p_reload_cargo: *const c_void,\n\tpf_trigger_reload: extern fn(p_reload_cargo: *const c_void) -> c_int,\n \n\treserved: [c_char; 32],\n}\n\n\n\n\n\/\/ Structure you get in per-frame callback functions.\n#[allow(dead_code)]\n#[allow(non_camel_case_types)] \n#[allow(non_snake_case)] \n#[repr(C)]\npub struct voo_video_frame_metadata_t {\n\n \/\/ user data you might have provided in voo_describe( ... ) as voo_plugin_t::p_user\n p_user: *const c_void,\n\n \/\/ per-sequence user data you might have provided in voo_plugin_t::on_load_video( ... )\n p_user_video: *const c_void,\n\n \/\/ per-frame user data you might have provided in input_plugin_t::load( ... )\n p_user_frame: *const c_void,\n\n p_info: *const voo_sequence_t, \/\/ info about the current sequence\n\n \/\/ frame number, beginning at zero\n frame_idx: c_uint,\n\n flags: c_int,\n\n reserved: [c_char; 32],\n\n}\n\n#[allow(dead_code)]\n#[allow(non_upper_case_globals)]\nconst VOOPerFrameFlag_YouAlreadyProcessed: i32 = 0x01; \/\/ this frame has already been processed by you\n#[allow(dead_code)]\n#[allow(non_upper_case_globals)]\nconst VOOPerFrameFlag_IsFromCache: i32 = 0x02; \/\/ this one comes from RGB-display cache\n#[allow(dead_code)]\n#[allow(non_upper_case_globals)]\nconst VOOPerFrameFlag_IsDifference: i32 = 0x04; \/\/ this frame is a difference frame\n\n\/\/ PLUGIN CALLBACK FUNCTION STRUCT\n\/\/\n\/\/ This struct shall contain user-defined callback functions along with some metadata.\n\/\/ First the callback types:\n#[allow(dead_code)] \n#[allow(non_camel_case_types)] \nenum vooya_callback_type_t {\n vooCallback_Native,\n vooCallback_RGBOut,\n vooCallback_EOTF,\n vooCallback_Histogram\n}\n\n\n#[allow(dead_code)] \n#[allow(non_camel_case_types)] \n#[repr(C)]\npub struct vooya_callback_t\n{\n\t\/\/ The following strings must be set and be persistent throughout plugin's linkage.\n\t\/\/ uid must not be empty or NULL.\n\tuid: *const c_char, \/\/ a unique string, e.g. \"myplugin.rgb_invert.1\",\n\t \/\/ at most 63 chars in length, ANSI without any whitespace\n\tname: *const c_char, \/\/ a user-friendly, descriptive name\n\tdescription: *const c_char, \/\/ a more in-depth description\n\n \/\/ Functions vooya will call upon user's (de)selection of this callback (optional)\n on_select: unsafe extern fn( p_info: *const voo_sequence_t, p_app_info: *const voo_app_info_t, p_user: *const c_void, pp_user_video: *const *mut c_void ) -> (),\n on_deselect: unsafe extern fn( p_user: *const c_void, p_user_video: *const c_void ) -> (),\n\n \/\/ Flags to signal something to vooya (for future use)\n flags: i32,\n\n \/\/ type determines which callback signature will be called\n e_type: vooya_callback_type_t,\n\n\t\/\/ actual callback function (required, see below)\n\tmethod: *const c_void,\n\n\t\/\/ For type == vooCallback_RGBOut:\n\t\/\/ Called by vooya for each video frame with rgb data ready to be rendered,\n\t\/\/ i.e. color-converted, range-shifted to 8bit and with EOTF and image\n\t\/\/ adjustments applied. Can be used to feed the data outside of vooya as\n\t\/\/ well as to alter the data right before display.\n\t\/\/ Stride in bytes is equal to width*sizeof(voo_target_space_t).\n\t\/\/ method shall be:\n\t\/\/ unsafe extern fn( p_data: *mut voo_target_space_t, p_metadata: *const voo_video_frame_metadata_t ) -> (),\n\n\t\/\/ For type == vooCallback_Native:\n\t\/\/ Called by vooya for each video frame with native data before color\n\t\/\/ conversion to RGB 8bit, and without image adjustments. Can be used to\n\t\/\/ feed the data outside of vooya. Properties like resolution\n\t\/\/ and data format are given beforehand in on_load_video( ... ); you can\n\t\/\/ save them in p_metadata->p_user_video. \"p_data\" is the image data.\n\t\/\/ method shall be\n\t\/\/ unsafe extern fn( p_data: *mut c_uchar, p_metadata: *const voo_video_frame_metadata_t ) -> (),\n\n\t\/\/ For type == vooCallback_EOTF:\n\t\/\/ Called by vooya when a lookup-table for the transfer function is being made.\n\t\/\/ \"value\" is in the range of 0-1, representing an RGB channel value of input bit\n\t\/\/ depth (\"bits\"). \"p_user\" might be provided by you from within voo_describe(...)\n\t\/\/ and can be NULL or any custom data. The call of this function happens before\n\t\/\/ application of brightness, contrast, gamma and exposure user settings.\n\t\/\/ method shall be:\n\t\/\/ unsafe extern fn( value: c_double, bits: c_int, p_user: *const c_void ) -> c_double,\n\n\t\/\/ For type == vooCallback_Histogram:\n\t\/\/ Called by vooya for each frame if histogram calculation (and display) is enabled.\n\t\/\/ The three pointers contain the histograms for each channel respectively. Their\n\t\/\/ length is (1<<bit_depth)-1 (floating point data is put into 12bits).\n\t\/\/ method shall be:\n\t\/\/ unsafe extern fn( p_h1: *const c_uint, p_h2: *const c_uint, p_h3: *const c_uint,\n\t\/\/ p_metadata: *const voo_video_frame_metadata_t ) -> (),\n}\n\n\n\n\n\n\n\n\/\/ INPUT DESCRIPTION STRUCT\n\/\/\n\/\/ Container to provide custom input to vooya from file or from \"nowhere\".\n#[allow(dead_code)] \n#[allow(non_camel_case_types)]\n#[allow(non_snake_case)] \n#[repr(C)]\nstruct input_plugin_t {\n\n\tuid: *const c_char, \/\/ a unique string, e.g. \"myplugin.text.input\",\n\t \/\/ at most 63 chars in length, ANSI without any whitespace\n\tname: *const c_char, \/\/ a user-friendly, descriptive name (mandatory)\n\tdescription: *const c_char, \/\/ a more in-depth description\n\t\n\t\/\/ If b_fileBased is TRUE, vooya will ask for file suffixes supported by this input,\n\t\/\/ call file_suffixes( ... ), responsible( ... ) and open( ... ), and will include\n\t\/\/ this input in the file open dialog. If b_fileBased is FALSE, an entry for this input\n\t\/\/ will be displayed in the plugins-menu that the user can select as current input.\n\t\/\/ In that case, vooya will call open_nowhere( ... ).\n\tb_fileBased: i32,\n\n\t\/\/ Flags to signal something to vooya (for future use)\n\tflags: i32,\n\n reserved1: [c_char; 8],\n\n\t\/\/ If the input is file-based, responsible will be called with the file name and the\n\t\/\/ first sixteen bytes of data, which e.g. might contain magic data. p_user is\n\t\/\/ voo_plugin_t::p_user. If responsible returns TRUE, open will be called.\n\t\/\/ Only if input comes from stdin and \"--container [your input UID]\" is specified,\n\t\/\/ responsible will not be called, but open( ... ) directly.\n\t\/\/ For stdin, the filename is simply \"-\".\n\t\/\/ FIXME: filename not a c_char in Windows\n\tresponsible: unsafe extern fn( filename: *const c_char, sixteen_bytes: *const c_char, p_user: *const c_void ) -> c_int,\n\t\/\/ The global p_user pointer you may have set in voo_describe( ... )\n\t\/\/ is given here as *pp_user_seq, but you can alter it. In that case, subsequent\n\t\/\/ calls to methods of this struct will have the new, per-sequence value. This is\n\t\/\/ important on macOS, where multiple instances of this input may exist.\n\topen: unsafe extern fn( filename: *const c_char, p_app_info: *const voo_app_info_t, pp_user_seq: *const *mut c_void ) -> c_int,\n\n\t\/\/ If the input is not based on file input (b_fileBased is FALSE),\n\t\/\/ open_nowhere will be called. The global p_user pointer you may have set in\n\t\/\/ voo_describe( ... ) is given here as *pp_user_seq, but you can alter it.\n\t\/\/ In that case, subsequent calls to methods of this struct will have the new,\n\t\/\/ per-sequence value. This is important on macOS, where multiple instances\n\t\/\/ of this input may exist.\n\topen_nowhere: unsafe extern fn( p_app_info: *const voo_app_info_t, pp_user_seq: *const *mut c_void ) -> c_int,\n\n\t\/\/ Called by vooya to get information about the video you provide.\n\t\/\/ You should fill p_info with correct information to make vooya play.\n\tget_properties: unsafe extern fn( p_info: *const voo_sequence_t, p_user_seq: *const c_void ) -> c_int,\n\n\t\/\/ Client shall return the number of frames available, or ~0U if no\n\t\/\/ framecount can be given (e.g. stdin).\n\tframecount: unsafe extern fn( p_user_seq: *const c_void ) -> c_uint,\n\n\t\/\/ Shall issue a seek by the client plugin to frame number \"frame\"\n\tseek: unsafe extern fn( frame: c_uint, p_user_seq: *const c_void ) -> c_int,\n\n\t\/\/ Load contents of frame number \"frame\" into p_buffer. p_buffer has a size\n\t\/\/ appropriate to the format given by the client in get_properties( ... ).\n\t\/\/ \"pb_skipped\" shall be set by the client to FALSE if the p_buffer has been filled\n\t\/\/ with data, or to TRUE if client decided to no reload the frame if e.g. \"frame\" is\n\t\/\/ repeated. \"pp_user_frame\" can hold custom data and is later available\n\t\/\/ in voo_video_frame_metadata_t::p_user_frame.\n\tload: unsafe extern fn( frame: c_uint, p_buffer: *const c_char, pb_skipped: *const c_int, pp_user_frame: *const *mut c_void, p_user_seq: *const c_void ) -> c_int,\n\n\teof: unsafe extern fn( p_user_seq: *const c_void ) -> c_uint,\n\tgood: unsafe extern fn( p_user_seq: *const c_void ) -> c_uint,\n\treload: unsafe extern fn( p_user_seq: *const c_void ) -> c_uint,\n\tclose: unsafe extern fn( p_user_seq: *const c_void ) -> (),\n\n\t\/\/ After open( ... ) or open_nowhere( ... ), this is called.\n\t\/\/ Set pp_err to an appropriate, persistent error message or to NULL.\n\terror_msg: unsafe extern fn( pp_err: *const *mut c_char, p_user_seq: *const c_void ) -> (),\n\n\t\/\/ Called by vooya to get supported file extensions. Those are then displayed in\n\t\/\/ the \"Open file\" dialog. vooya will start with idx=0, then increment idx and\n\t\/\/ call this again as long as you return TRUE. (only called when b_fileBased is true)\n\tfile_suffixes: unsafe extern fn( idx: c_int, pp_suffix: *const *mut c_char, p_user_seq: *const c_void ) -> c_int,\n\n\t\/\/ Called by vooya to enumerate meta information tags about the video you provide.\n\t\/\/ idx is counting up for each call as long as TRUE is return. Return FALSE to finish the \n\t\/\/ enumeration. \"buffer_k\" char[64] and shall take a key, \"buffer_v\" char[1024] and\n\t\/\/ shall take a corresponding value.\n\tget_meta: unsafe extern fn( idx: c_int, buffer_k: *const c_char, buffer_v: *const c_char, p_user_seq: *const c_void ) -> c_int,\n\n\t\/\/ vooya gives you a callback that you might call whenever the sequence's number of frames\n\t\/\/ will change. Note that p_vooya_ctx must not be altered and is valid only as long as this input is bound.\n\/\/\tvoid (*cb_seq_len_changed)( void (*seq_len_callback)( void *p_vooya_ctx, unsigned int new_len ), void *p_vooya_ctx );\n\tcb_seq_len_changed: unsafe extern fn( seq_len_callback: unsafe extern fn( p_vooya_ctx: *const c_void, new_len: c_uint ) -> (), p_vooya_ctx: *const c_void ) -> (),\n\n reserved2: [c_char; 32],\n} \n\n\n\n\/\/ Most important structure, this describes the plugin\n#[allow(dead_code)] \n#[allow(non_camel_case_types)]\n#[repr(C)]\npub struct voo_plugin_t\n{\n\tvoo_version: c_int, \/\/ set this always to VOO_PLUGIN_API_VERSION\n\n\t\/\/ plugin's main name, user friendly description, copyright notice and version info\n\tname: *const c_char,\n\tdescription: *const c_char,\n\tcopyright: *const c_char,\n\tversion: *const c_char,\n\n\t\/\/ Flags to signal something to vooya (for future use)\n\tflags: c_int,\n\n\t\/\/ any user data that shall be forwarded by vooya into other callback\n\t\/\/ functions (\"void *p_user\" argument)\n\tp_user: *const c_void,\n\n\t\/\/ called by vooya before the plugin is unloaded\n\ton_unload_plugin: extern fn( p_user: *const c_void ) -> (),\n\n\treserved: [c_char; 48],\n\n\t\/\/ the plugin's callback functions\n\tcallbacks: [vooya_callback_t; 10],\n\n\t\/\/ plugin's input capabilities. See input_plugin_t above.\n\tinput: input_plugin_t\n} \n\n\n\n\n\n\/*\n\n\t------- actual plugin below -------\n\n*\/\n\n\n\n\n\n\n\n\nconst NAME: &'static [u8] = b\"vooya Plugin Written in Rust\\0\";\nconst DESCR: &'static [u8] = b\"Adds funny RGB callback to show Rust binding, hehe.\\0\";\nconst COPYRIGHT: &'static [u8] = b\"(C) Arion Neddens 2016.\\0\";\nconst VERSION: &'static [u8] = b\"ver 1.0\\0\";\n\nconst CB_UID: &'static [u8] = b\"rust.callback.0\\0\";\nconst CB_NAME: &'static [u8] = b\"Convert to gray (Rust)\\0\";\nconst CB_DESCR: &'static [u8] = b\"Fun Function to show Rust bindings.\\0\";\n\n\n\/\/ Main entry function that every plugin must implement to describe itself on startup.\n\/\/ The \"p_plugin\"-structure is provided by vooya and to be filled in the implementation.\n\/\/ This is the first function to be called and must be implemented.\n#[no_mangle]\npub unsafe extern fn voo_describe( p_plugin: *mut voo_plugin_t )\n{\n\tlet ref mut p = *p_plugin;\n\t\n\tp.voo_version = VOO_PLUGIN_API_VERSION;\n\tp.name = NAME.as_ptr() as *const c_char;\n\tp.description = DESCR.as_ptr() as *const c_char;\n\tp.copyright = COPYRIGHT.as_ptr() as *const c_char;\n\tp.version = VERSION.as_ptr() as *const c_char;\n\n\tp.callbacks[0].uid = CB_UID.as_ptr() as *const c_char;\n\tp.callbacks[0].name = CB_NAME.as_ptr() as *const c_char;\n\tp.callbacks[0].description = CB_DESCR.as_ptr() as *const c_char;\n\tp.callbacks[0].e_type = vooya_callback_type_t::vooCallback_RGBOut;\n\tp.callbacks[0].method = twizzle as *const c_void;\n\t\n}\n\n\n\n\/\/ our function which does \"something\" with an rgb buffer.\n#[no_mangle]\npub unsafe extern fn twizzle( p_data: *mut voo_target_space_t, p_metadata: *const voo_video_frame_metadata_t )\n{\n\tlet ref p_meta = *p_metadata;\n\tlet ref p_seq_info = *(p_meta.p_info);\n \n\tif 0 != (p_meta.flags & VOOPerFrameFlag_IsFromCache) { \n return;\n\t}\n\n for y in 0..p_seq_info.height {\n\n for x in 0..p_seq_info.width {\n \n \t\t\tlet ref mut p: voo_target_space_t = *p_data.offset( (x + p_seq_info.width * y) as isize );\n let luma : i32 = (130 * p.r as i32 + 256 * p.g as i32 + 50 * p.b as i32) >> 8;\n\t\t\tp.r = std::cmp::min( 255, luma ) as u8;\n\t\t\tp.g = std::cmp::min( 255, luma ) as u8;\n\t\t\tp.b = std::cmp::min( 255, luma ) as u8;\n }\n }\n}\n\n\n\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>checkpoint<commit_after>use core::raw::Slice;\n\nuse self::syscalls::wait;\n\n#[derive(Clone,Copy)]\nstruct Chunk {\n inuse: bool,\n slice: Slice<u8>\n}\n\nstruct App {\n mem: Slice<u8>,\n offset: usize,\n chunks: [Option<Chunk>; 100]\n}\n\nstatic mut app : *mut App = 0 as *mut App;\n\npub fn _start(mem_start: *mut u8, mem_size: usize) {\n let myapp = unsafe {\n app = mem_start as *mut App;\n &mut *app\n };\n myapp.mem = Slice { data: mem_start, len: mem_size };\n myapp.offset = 0;\n myapp.chunks = [None; 100];\n\n init();\n\n loop {\n wait();\n }\n}\n\nfn init(){\n}\n\nstruct Box<T: ?Sized>{ pointer: *mut T }\n\nimpl<T> Box<T> {\n\n fn new(x: T) -> Box<T> {\n use core::mem;\n let myapp = unsafe { &mut *app };\n let size = mem::size_of::<T>();\n\n \/\/ First, see if there is an available chunk of the right size\n for chunk in myapp.chunks.iter_mut() {\n match *chunk {\n Some(mut chunk) => {\n if !chunk.inuse && chunk.slice.len >= size {\n chunk.inuse = true;\n return Box { pointer: chunk.slice.data as *mut T };\n }\n },\n None => { }\n }\n }\n\n \/\/ No existing chunks match, so allocate a new one\n match myapp.chunks.iter_mut().filter(|c| c.is_none()).next() {\n Some(slot) => {\n let chunk = Chunk {\n slice: Slice {\n data: unsafe {\n myapp.mem.data.offset(myapp.offset as isize)\n },\n len: size\n },\n inuse: true\n };\n myapp.offset += size;\n *slot = Some(chunk);\n Box{ pointer: chunk.slice.data as *mut T }\n },\n None => {\n panic!(\"OOM\")\n }\n }\n }\n}\n\nmod syscalls {\n\n #[allow(improper_ctypes)]\n extern {\n fn __allow(driver_num: usize, allownum: usize, ptr: *mut (), len: usize) -> isize;\n fn __subscribe(driver_num: usize, subnum: usize, cb: usize) -> isize;\n fn __command(driver_num: usize, cmdnum: usize, arg1: usize) -> isize;\n fn __wait() -> isize;\n }\n\n\n pub fn allow(driver_num: usize, allownum: usize, ptr: *mut (), len: usize) -> isize {\n unsafe {\n __allow(driver_num, allownum, ptr, len)\n }\n }\n\n pub fn command(driver_num: usize, cmdnum: usize, arg1: usize) -> isize {\n unsafe {\n __command(driver_num, cmdnum, arg1)\n }\n }\n\n pub fn subscribe(driver_num: usize, cmdnum: usize, callback: usize) -> isize {\n unsafe {\n __subscribe(driver_num, cmdnum, callback)\n }\n }\n\n pub fn wait() -> isize {\n unsafe {\n __wait()\n }\n }\n}\n\nmod tmp006 {\n use super::syscalls::{command, subscribe};\n\n pub fn enable_tmp006() {\n command(2, 0, 0);\n }\n\n pub fn subscribe_temperature(f: fn(i16)) {\n subscribe(2, 0, f as usize);\n }\n}\n\nmod console {\n use super::syscalls::{allow, command, subscribe};\n\n pub fn putc(c: char) {\n command(0, 0, c as usize);\n }\n\n pub fn subscribe_read_line(buf: *mut u8, len: usize,\n f: fn(usize, *mut u8)) -> isize {\n let res = allow(0, 0, buf as *mut (), len);\n if res < 0 {\n res\n } else {\n subscribe(0, 0, f as usize)\n }\n }\n\n pub fn subscribe_write_done(f: fn()) -> isize {\n subscribe(0, 1, f as usize)\n }\n}\n\nmod gpio {\n use super::syscalls::command;\n\n pub fn enable_pin(pin: usize) {\n command(1, 0, pin);\n }\n\n pub fn set_pin(pin: usize) {\n command(1, 2, pin);\n }\n\n pub fn clear_pin(pin: usize) {\n command(1, 3, pin);\n }\n\n pub fn toggle_pin(pin: usize) {\n command(1, 4, pin);\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Don't omit semicolon after assertion<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue 36804<commit_after>\/\/ check-pass\n#![feature(specialization)]\n\npub struct Cloned<I>(I);\n\nimpl<'a, I, T: 'a> Iterator for Cloned<I>\nwhere\n I: Iterator<Item = &'a T>,\n T: Clone,\n{\n type Item = T;\n\n fn next(&mut self) -> Option<T> {\n unimplemented!()\n }\n}\n\nimpl<'a, I, T: 'a> Iterator for Cloned<I>\nwhere\n I: Iterator<Item = &'a T>,\n T: Copy,\n{\n fn count(self) -> usize {\n unimplemented!()\n }\n}\n\nfn main() {\n let a = [1,2,3,4];\n Cloned(a.iter()).count();\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\n#[macro_use] extern crate version;\n\nextern crate libimagnotes;\nextern crate libimagrt;\nextern crate libimagentrytag;\nextern crate libimagutil;\n\nuse std::process::exit;\n\nuse libimagrt::edit::Edit;\nuse libimagrt::runtime::Runtime;\nuse libimagnotes::note::Note;\nuse libimagutil::trace::trace_error;\n\nmod ui;\nuse ui::build_ui;\n\nfn main() {\n let name = \"imag-notes\";\n let version = &version!()[..];\n let about = \"Note taking helper\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.err().unwrap());\n exit(1);\n }\n };\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n debug!(\"Call: {}\", name);\n match name {\n \"create\" => create(&rt),\n \"delete\" => delete(&rt),\n \"edit\" => edit(&rt),\n \"list\" => list(&rt),\n _ => {\n debug!(\"Unknown command\"); \/\/ More error handling\n },\n };\n });\n}\n\nfn name_from_cli(rt: &Runtime, subcmd: &str) -> String {\n rt.cli().subcommand_matches(subcmd).unwrap().value_of(\"name\").map(String::from).unwrap()\n}\n\nfn create(rt: &Runtime) {\n let name = name_from_cli(rt, \"create\");\n Note::new(rt.store(), name.clone(), String::new())\n .map_err(|e| trace_error(&e))\n .ok();\n\n if rt.cli().subcommand_matches(\"create\").unwrap().is_present(\"edit\") {\n if !edit_entry(rt, name) {\n exit(1);\n }\n }\n}\n\nfn delete(rt: &Runtime) {\n Note::delete(rt.store(), String::from(name_from_cli(rt, \"delete\")))\n .map_err(|e| trace_error(&e))\n .map(|_| println!(\"Ok\"))\n .ok();\n}\n\nfn edit(rt: &Runtime) {\n edit_entry(rt, name_from_cli(rt, \"edit\"));\n}\n\nfn edit_entry(rt: &Runtime, name: String) -> bool {\n let note = Note::retrieve(rt.store(), name);\n if note.is_err() {\n trace_error(¬e.err().unwrap());\n warn!(\"Cannot edit nonexistent Note\");\n return false\n }\n\n let mut note = note.unwrap();\n if let Err(e) = note.edit_content(rt) {\n trace_error(&e);\n warn!(\"Editing failed\");\n return false\n }\n true\n}\n\nfn list(rt: &Runtime) {\n use std::cmp::Ordering;\n\n let iter = Note::all_notes(rt.store());\n if iter.is_err() {\n trace_error(&iter.err().unwrap());\n exit(1);\n }\n\n let mut iter = iter.unwrap()\n .filter_map(|note| {\n match note {\n Err(e) => {\n trace_error(&e);\n None\n },\n Ok(e) => Some(e)\n }\n })\n .collect::<Vec<Note>>();\n\n iter.sort_by(|note_a, note_b| {\n if let (Ok(a), Ok(b)) = (note_a.get_name(), note_b.get_name()) {\n return a.cmp(&b)\n } else {\n return Ordering::Greater;\n }\n });\n\n for note in iter {\n note.get_name()\n .map(|name| println!(\"{}\", name))\n .map_err(|e| trace_error(&e))\n .ok();\n }\n}\n\n<commit_msg>imag-notes: Replace .err().unwrap() with .unwrap_err()<commit_after>extern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\n#[macro_use] extern crate version;\n\nextern crate libimagnotes;\nextern crate libimagrt;\nextern crate libimagentrytag;\nextern crate libimagutil;\n\nuse std::process::exit;\n\nuse libimagrt::edit::Edit;\nuse libimagrt::runtime::Runtime;\nuse libimagnotes::note::Note;\nuse libimagutil::trace::trace_error;\n\nmod ui;\nuse ui::build_ui;\n\nfn main() {\n let name = \"imag-notes\";\n let version = &version!()[..];\n let about = \"Note taking helper\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n debug!(\"Call: {}\", name);\n match name {\n \"create\" => create(&rt),\n \"delete\" => delete(&rt),\n \"edit\" => edit(&rt),\n \"list\" => list(&rt),\n _ => {\n debug!(\"Unknown command\"); \/\/ More error handling\n },\n };\n });\n}\n\nfn name_from_cli(rt: &Runtime, subcmd: &str) -> String {\n rt.cli().subcommand_matches(subcmd).unwrap().value_of(\"name\").map(String::from).unwrap()\n}\n\nfn create(rt: &Runtime) {\n let name = name_from_cli(rt, \"create\");\n Note::new(rt.store(), name.clone(), String::new())\n .map_err(|e| trace_error(&e))\n .ok();\n\n if rt.cli().subcommand_matches(\"create\").unwrap().is_present(\"edit\") {\n if !edit_entry(rt, name) {\n exit(1);\n }\n }\n}\n\nfn delete(rt: &Runtime) {\n Note::delete(rt.store(), String::from(name_from_cli(rt, \"delete\")))\n .map_err(|e| trace_error(&e))\n .map(|_| println!(\"Ok\"))\n .ok();\n}\n\nfn edit(rt: &Runtime) {\n edit_entry(rt, name_from_cli(rt, \"edit\"));\n}\n\nfn edit_entry(rt: &Runtime, name: String) -> bool {\n let note = Note::retrieve(rt.store(), name);\n if note.is_err() {\n trace_error(¬e.unwrap_err());\n warn!(\"Cannot edit nonexistent Note\");\n return false\n }\n\n let mut note = note.unwrap();\n if let Err(e) = note.edit_content(rt) {\n trace_error(&e);\n warn!(\"Editing failed\");\n return false\n }\n true\n}\n\nfn list(rt: &Runtime) {\n use std::cmp::Ordering;\n\n let iter = Note::all_notes(rt.store());\n if iter.is_err() {\n trace_error(&iter.unwrap_err());\n exit(1);\n }\n\n let mut iter = iter.unwrap()\n .filter_map(|note| {\n match note {\n Err(e) => {\n trace_error(&e);\n None\n },\n Ok(e) => Some(e)\n }\n })\n .collect::<Vec<Note>>();\n\n iter.sort_by(|note_a, note_b| {\n if let (Ok(a), Ok(b)) = (note_a.get_name(), note_b.get_name()) {\n return a.cmp(&b)\n } else {\n return Ordering::Greater;\n }\n });\n\n for note in iter {\n note.get_name()\n .map(|name| println!(\"{}\", name))\n .map_err(|e| trace_error(&e))\n .ok();\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Only need to wait for a single thread to timeout<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>more work done on tableau<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Read the headers at index creation.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added multicast<commit_after>use multiqueue::{InnerSend, InnerRecv, BCast, Multicast, MultiQueue};\nuse countedindex::Index;\n\nuse std::sync::mpsc::{TrySendError, TryRecvError, RecvError};\n\n\n\/\/\/ This class is the sending half of the multicast Queue. It supports both\n\/\/\/ single and multi consumer modes with competitive performance in each case.\n\/\/\/ It only supports nonblocking writes (the futures sender being an exception)\n\/\/\/ as well as being the conduit for adding new writers.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::thread;\n\/\/\/\n\/\/\/ let (send, recv) = multiqueue::multiqueue(4);\n\/\/\/\n\/\/\/ let mut handles = vec![];\n\/\/\/\n\/\/\/ for i in 0..2 { \/\/ or n\n\/\/\/ let cur_recv = recv.add_stream();\n\/\/\/ for j in 0..2 {\n\/\/\/ let stream_consumer = cur_recv.clone();\n\/\/\/ handles.push(thread::spawn(move || {\n\/\/\/ for val in stream_consumer {\n\/\/\/ println!(\"Stream {} consumer {} got {}\", i, j, val);\n\/\/\/ }\n\/\/\/ }));\n\/\/\/ }\n\/\/\/ \/\/ cur_recv is dropped here\n\/\/\/ }\n\/\/\/\n\/\/\/ \/\/ Take notice that I drop the reader - this removes it from\n\/\/\/ \/\/ the queue, meaning that the readers in the new threads\n\/\/\/ \/\/ won't get starved by the lack of progress from recv\n\/\/\/ recv.unsubscribe();\n\/\/\/\n\/\/\/ for i in 0..10 {\n\/\/\/ \/\/ Don't do this busy loop in real stuff unless you're really sure\n\/\/\/ loop {\n\/\/\/ if send.try_send(i).is_ok() {\n\/\/\/ break;\n\/\/\/ }\n\/\/\/ }\n\/\/\/ }\n\/\/\/ drop(send);\n\/\/\/\n\/\/\/ for t in handles {\n\/\/\/ t.join();\n\/\/\/ }\n\/\/\/ \/\/ prints along the lines of\n\/\/\/ \/\/ Stream 0 consumer 1 got 2\n\/\/\/ \/\/ Stream 0 consumer 0 got 0\n\/\/\/ \/\/ Stream 1 consumer 0 got 0\n\/\/\/ \/\/ Stream 0 consumer 1 got 1\n\/\/\/ \/\/ Stream 1 consumer 1 got 1\n\/\/\/ \/\/ Stream 1 consumer 0 got 2\n\/\/\/ \/\/ etc\n\/\/\/ ```\n#[derive(Clone)]\npub struct MulticastSender<T: Clone> {\n sender: InnerSend<BCast<T>, T>,\n}\n\n\/\/\/ This class is the receiving half of the MultiQueue.\n\/\/\/ Within each stream, it supports both single and multi consumer modes\n\/\/\/ with competitive performance in each case. It supports blocking and\n\/\/\/ nonblocking read modes as well as being the conduit for adding\n\/\/\/ new streams.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::thread;\n\/\/\/\n\/\/\/ let (send, recv) = multiqueue::multiqueue(4);\n\/\/\/\n\/\/\/ let mut handles = vec![];\n\/\/\/\n\/\/\/ for i in 0..2 { \/\/ or n\n\/\/\/ let cur_recv = recv.add_stream();\n\/\/\/ for j in 0..2 {\n\/\/\/ let stream_consumer = cur_recv.clone();\n\/\/\/ handles.push(thread::spawn(move || {\n\/\/\/ for val in stream_consumer {\n\/\/\/ println!(\"Stream {} consumer {} got {}\", i, j, val);\n\/\/\/ }\n\/\/\/ }));\n\/\/\/ }\n\/\/\/ \/\/ cur_recv is dropped here\n\/\/\/ }\n\/\/\/\n\/\/\/ \/\/ Take notice that I drop the reader - this removes it from\n\/\/\/ \/\/ the queue, meaning that the readers in the new threads\n\/\/\/ \/\/ won't get starved by the lack of progress from recv\n\/\/\/ recv.unsubscribe();\n\/\/\/\n\/\/\/ for i in 0..10 {\n\/\/\/ \/\/ Don't do this busy loop in real stuff unless you're really sure\n\/\/\/ loop {\n\/\/\/ if send.try_send(i).is_ok() {\n\/\/\/ break;\n\/\/\/ }\n\/\/\/ }\n\/\/\/ }\n\/\/\/ drop(send);\n\/\/\/\n\/\/\/ for t in handles {\n\/\/\/ t.join();\n\/\/\/ }\n\/\/\/ \/\/ prints along the lines of\n\/\/\/ \/\/ Stream 0 consumer 1 got 2\n\/\/\/ \/\/ Stream 0 consumer 0 got 0\n\/\/\/ \/\/ Stream 1 consumer 0 got 0\n\/\/\/ \/\/ Stream 0 consumer 1 got 1\n\/\/\/ \/\/ Stream 1 consumer 1 got 1\n\/\/\/ \/\/ Stream 1 consumer 0 got 2\n\/\/\/ \/\/ etc\n\/\/\/ ```\n#[derive(Clone)]\npub struct MulticastReceiver<T: Clone> {\n reader: InnerRecv<BCast<T>, T>,\n}\n\n\n\/\/\/ This class is similar to the receiver, except it ensures that there\n\/\/\/ is only one consumer for the stream it owns. This means that\n\/\/\/ one can safely view the data in-place with the recv_view method family\n\/\/\/ and avoid the cost of copying it. If there's only one receiver on a stream,\n\/\/\/ it can be converted into a UniInnerRecv\n\/\/\/\n\/\/\/ # Example:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use multiqueue::multiqueue;\n\/\/\/\n\/\/\/ let (w, r) = multiqueue(10);\n\/\/\/ w.try_send(1).unwrap();\n\/\/\/ let r2 = r.clone();\n\/\/\/ \/\/ Fails since there's two receivers on the stream\n\/\/\/ assert!(r2.into_single().is_err());\n\/\/\/ let single_r = r.into_single().unwrap();\n\/\/\/ let val = match single_r.try_recv_view(|x| 2 * *x) {\n\/\/\/ Ok(val) => val,\n\/\/\/ Err(_) => panic!(\"Queue should have an element\"),\n\/\/\/ };\n\/\/\/ assert_eq!(2, val);\n\/\/\/ ```\npub struct MulticastUniReceiver<T: Clone + Sync> {\n reader: InnerRecv<BCast<T>, T>,\n}\n\nimpl<T: Clone> MulticastReceiver<T> {\n\n \/\/\/ Tries to receive a value from the queue without blocking.\n \/\/\/\n \/\/\/ # Examples:\n \/\/\/\n \/\/\/ ```\n \/\/\/ use multiqueue::multicast_queue;\n \/\/\/ let (w, r) = multicast_queue(10);\n \/\/\/ w.try_send(1).unwrap();\n \/\/\/ assert_eq!(1, r.try_recv().unwrap());\n \/\/\/ ```\n \/\/\/\n \/\/\/ ```\n \/\/\/ use multiqueue::multiqueue;\n \/\/\/ use std::thread;\n \/\/\/\n \/\/\/ let (send, recv) = multiqueue(10);\n \/\/\/\n \/\/\/ let handle = thread::spawn(move || {\n \/\/\/ for val in recv {\n \/\/\/ println!(\"Got {}\", val);\n \/\/\/ }\n \/\/\/ });\n \/\/\/\n \/\/\/ for i in 0..10 {\n \/\/\/ send.try_send(i).unwrap();\n \/\/\/ }\n \/\/\/\n \/\/\/ \/\/ Drop the sender to close the queue\n \/\/\/ drop(send);\n \/\/\/\n \/\/\/ handle.join();\n \/\/\/ ```\n pub fn try_recv(&self) -> Result<T, TryRecvError> {\n self.reader.try_recv()\n }\n\n pub fn recv(&self) -> Result<T, RecvError> {\n self.reader.recv()\n }\n\n \/\/\/ Adds a new data stream to the queue, starting at the same position\n \/\/\/ as the InnerRecv this is being called on.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use multiqueue::multiqueue;\n \/\/\/ let (w, r) = multiqueue(10);\n \/\/\/ w.try_send(1).unwrap();\n \/\/\/ assert_eq!(r.recv().unwrap(), 1);\n \/\/\/ w.try_send(1).unwrap();\n \/\/\/ let r2 = r.add_stream();\n \/\/\/ assert_eq!(r.recv().unwrap(), 1);\n \/\/\/ assert_eq!(r2.recv().unwrap(), 1);\n \/\/\/ assert!(r.try_recv().is_err());\n \/\/\/ assert!(r2.try_recv().is_err());\n \/\/\/ ```\n \/\/\/\n \/\/\/ ```\n \/\/\/ use multiqueue::multiqueue;\n \/\/\/\n \/\/\/ use std::thread;\n \/\/\/\n \/\/\/ let (send, recv) = multiqueue(4);\n \/\/\/ let mut handles = vec![];\n \/\/\/ for i in 0..2 { \/\/ or n\n \/\/\/ let cur_recv = recv.add_stream();\n \/\/\/ handles.push(thread::spawn(move || {\n \/\/\/ for val in cur_recv {\n \/\/\/ println!(\"Stream {} got {}\", i, val);\n \/\/\/ }\n \/\/\/ }));\n \/\/\/ }\n \/\/\/\n \/\/\/ \/\/ Take notice that I drop the reader - this removes it from\n \/\/\/ \/\/ the queue, meaning that the readers in the new threads\n \/\/\/ \/\/ won't get starved by the lack of progress from recv\n \/\/\/ recv.unsubscribe();\n \/\/\/\n \/\/\/ for i in 0..10 {\n \/\/\/ \/\/ Don't do this busy loop in real stuff unless you're really sure\n \/\/\/ loop {\n \/\/\/ if send.try_send(i).is_ok() {\n \/\/\/ break;\n \/\/\/ }\n \/\/\/ }\n \/\/\/ }\n \/\/\/\n \/\/\/ \/\/ Drop the sender to close the queue\n \/\/\/ drop(send);\n \/\/\/\n \/\/\/ for t in handles {\n \/\/\/ t.join();\n \/\/\/ }\n \/\/\/\n \/\/\/ ```\n\n pub fn add_stream(&self) -> MulticastReceiver<T> {\n MulticastReceiver { reader: self.reader.add_stream() }\n }\n\n \/\/\/ Removes the given reader from the queue subscription lib\n \/\/\/ Returns true if this is the last reader in a given broadcast unit\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use multiqueue::multiqueue;\n \/\/\/ let (writer, reader) = multiqueue(1);\n \/\/\/ let reader_2_1 = reader.add_stream();\n \/\/\/ let reader_2_2 = reader_2_1.clone();\n \/\/\/ writer.try_send(1).expect(\"This will succeed since queue is empty\");\n \/\/\/ reader.try_recv().expect(\"This reader can read\");\n \/\/\/ assert!(writer.try_send(1).is_err(), \"This fails since the reader2 group hasn't advanced\");\n \/\/\/ assert!(!reader_2_2.unsubscribe(), \"This returns false since reader_2_1 is still alive\");\n \/\/\/ assert!(reader_2_1.unsubscribe(),\n \/\/\/ \"This returns true since there are no readers alive in the reader_2_x group\");\n \/\/\/ writer.try_send(1).expect(\"This succeeds since the reader_2 group is not blocking\");\n \/\/\/ ```\n pub fn unsubscribe(self) -> bool {\n self.reader.unsubscribe()\n }\n}\n\nimpl<T: Clone + Sync> MulticastUniReceiver<T> {\n \/\/\/ Identical to MulticastReceiver::try_recv\n pub fn try_recv(&self) -> Result<T, TryRecvError> {\n self.reader.try_recv()\n }\n\n \/\/\/ Identical to MulticastReceiver::recv\n pub fn recv(&self) -> Result<T, RecvError> {\n self.reader.recv()\n }\n\n \/\/\/ Applies the passed function to the value in the queue without copying it out\n \/\/\/ If there is no data in the queue or the writers have disconnected,\n \/\/\/ returns an Err((F, TryRecvError))\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ use multiqueue::multicast_queue;\n \/\/\/\n \/\/\/ let (w, r) = multicase_queue(10);\n \/\/\/ let single_r = r.into_single().unwrap();\n \/\/\/ for i in 0..5 {\n \/\/\/ w.try_send(i).unwrap();\n \/\/\/ }\n \/\/\/\n \/\/\/ for i in 0..5 {\n \/\/\/ let val = match single_r.try_recv_view(|x| 1 + *x) {\n \/\/\/ Ok(val) => val,\n \/\/\/ Err(_) => panic!(\"Queue shouldn't be disconncted or empty\"),\n \/\/\/ };\n \/\/\/ assert_eq!(i + 1, val);\n \/\/\/ }\n \/\/\/ assert!(single_r.try_recv_view(|x| *x).is_err());\n \/\/\/ drop(w);\n \/\/\/ assert!(single_r.try_recv_view(|x| *x).is_err());\n \/\/\/ ```\n pub fn try_recv_view<R, F: FnOnce(&T) -> R>(&self, op: F) -> Result<R, (F, TryRecvError)> {\n let mut_w = |v: &mut T| op(v);\n self.reader.try_recv_view(mut_w)\n }\n\n \/\/\/ Applies the passed function to the value in the queue without copying it out\n \/\/\/ If there is no data in the queue, blocks until an item is pushed into the queue\n \/\/\/ or all writers disconnect\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ use multiqueue::multiqueue;\n \/\/\/\n \/\/\/ let (w, r) = multiqueue(10);\n \/\/\/ let single_r = r.into_single().unwrap();\n \/\/\/ for i in 0..5 {\n \/\/\/ w.try_send(i).unwrap();\n \/\/\/ }\n \/\/\/\n \/\/\/ for i in 0..5 {\n \/\/\/ let val = match single_r.recv_view(|x| 1 + *x) {\n \/\/\/ Ok(val) => val,\n \/\/\/ Err(_) => panic!(\"Queue shouldn't be disconncted or empty\"),\n \/\/\/ };\n \/\/\/ assert_eq!(i + 1, val);\n \/\/\/ }\n \/\/\/ drop(w);\n \/\/\/ assert!(single_r.recv_view(|x| *x).is_err());\n \/\/\/ ```\n pub fn recv_view<R, F: FnOnce(&T) -> R>(&self, op: F) -> Result<R, (F, RecvError)> {\n let mut_w = |v: &mut T| op(v);\n self.reader.recv_view(mut_w)\n }\n\n \/\/\/ Almost identical to MulticastReceiver::unsubscribe, except it doesn't\n \/\/\/ return a boolean of whether this was the last receiver on the stream\n \/\/\/ because a receiver of this type must be the last one on the stream\n pub fn unsubscribe(self) {\n self.reader.unsubscribe();\n }\n}\n\npub fn multicast_queue<T: Clone>(capacity: Index) -> (MulticastSender<T>, MulticastReceiver<T>) {\n let (send, recv) = MultiQueue::<BCast<T>, T>::new(capacity);\n (MulticastSender { sender: send }, MulticastReceiver { reader: recv })\n}\n\nunsafe impl<T: Send + Clone> Send for MulticastSender<T> {}\nunsafe impl<T: Send + Clone> Send for MulticastReceiver<T> {}\nunsafe impl<T: Send + Clone + Sync> Send for MulticastUniReceiver<T> {}\n\npub struct MulticastIter<T: Clone> {\n recv: MulticastReceiver<T>,\n}\n\nimpl<T: Clone> Iterator for MulticastIter<T> {\n type Item = T;\n\n fn next(&mut self) -> Option<T> {\n match self.recv.recv() {\n Ok(val) => Some(val),\n Err(_) => None,\n }\n }\n}\n\nimpl<T: Clone> IntoIterator for MulticastReceiver<T> {\n type Item = T;\n\n type IntoIter = MulticastIter<T>;\n\n fn into_iter(self) -> MulticastIter<T> {\n MulticastIter { recv: self }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fail with pg-to-tar.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add start_prototype, a tool used for writing entry point assembly.<commit_after>\/\/ The `start` symbol must be written purely in assembly, because it has an ABI\n\/\/ that the Rust compiler doesn't know (e.g. it does not expect the stack to be\n\/\/ set up). One way to write a correct `start` implementation is to write it in\n\/\/ Rust using the C ABI, compile that implementation, then tweak the assembly by\n\/\/ hand. This is a Rust version of `start` for developers who are working on\n\/\/ `start`.\n\/\/\n\/\/ This file is not compiled by default; it must be included manually by adding\n\/\/ `mod start_prototype;` to `lib.rs`.\n\n#[repr(C)]\nstruct RtHeader {\n start: usize,\n initial_break: usize,\n stack_top: usize,\n data_size: usize,\n data_flash_start: *const u32,\n data_ram_start: *mut u32,\n bss_size: usize,\n bss_start: *mut u8,\n}\n\n#[link_section = \".start\"]\n#[no_mangle]\nextern fn start_prototype(\n rt_header: &RtHeader,\n _memory_start: usize,\n _memory_len: usize,\n _app_break: usize,\n) -> ! {\n use crate::TockSyscalls;\n use libtock_platform::{OneArgMemop, RawSyscalls, YieldType};\n\n let pc: usize;\n #[cfg(target_arch = \"riscv32\")]\n unsafe {\n asm!(\"auipc {}, 0\", lateout(reg) pc, options(nomem, nostack, preserves_flags));\n }\n if pc != rt_header.start {\n \/\/ Binary is in an incorrect location: report an error via\n \/\/ LowLevelDebug.\n unsafe {\n TockSyscalls::four_arg_syscall(8, 1, 2, 0, 2);\n }\n \/\/ TODO: Replace with an Exit call when exit is implemented.\n loop {\n TockSyscalls::raw_yield(YieldType::Wait);\n }\n }\n\n \/\/ Set the app break.\n \/\/ TODO: Replace with Syscalls::memop_brk() when that is implemented.\n TockSyscalls::one_arg_memop(OneArgMemop::Brk, rt_header.initial_break);\n\n \/\/ Set the stack pointer.\n #[cfg(target_arch = \"riscv32\")]\n unsafe {\n asm!(\"mv sp, {}\", in(reg) rt_header.stack_top, options(nomem, preserves_flags));\n }\n\n \/\/ Copy .data into place. Uses a manual loop rather than\n \/\/ `core::ptr::copy*()` to avoid relying on `memcopy` or `memmove`.\n let mut remaining = rt_header.data_size;\n let mut src = rt_header.data_flash_start;\n let mut dest = rt_header.data_ram_start;\n while remaining > 0 {\n unsafe {\n core::ptr::write(dest, *(src));\n src = src.add(1);\n dest = dest.add(1);\n }\n remaining -= 4;\n }\n\n \/\/ Zero .bss. Uses a manual loop and volatile write to avoid relying on\n \/\/ `memset`.\n let mut remaining = rt_header.bss_size;\n let mut dest = rt_header.bss_start;\n while remaining > 0 {\n unsafe {\n core::ptr::write_volatile(dest, 0);\n dest = dest.add(1);\n }\n remaining -= 1;\n }\n\n extern {\n fn rust_start() -> !;\n }\n\n unsafe { rust_start(); }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove Clippy<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added bot.rs<commit_after>use std::io;\nuse std::io::prelude::*;\nuse std::str::FromStr;\n\n#[derive(Copy,Clone)]\nstruct Bot {\n x:i32,\n y:i32,\n}\n\nimpl Bot {\n fn new() -> Self {\n Bot {x:0,y:0}\n }\n}\n\nfn main() {\n let mut myBot = Bot::new();\n let mut moves:[u8;1] = [0;1];\n let mut moves_vec = Vec::<i32>::new();\n while moves[0] as char != '\\n' {\n io::stdin().read(&mut moves).ok().expect(\"read error\");\n let num = 1 + i32::from_str(&moves[0].to_string()[..]).unwrap() % 49;\n moves_vec.push(num);\n }\n\n let mut not_working:[u8;1] = [0;1];\n io::stdin().read(&mut not_working).ok().expect(\"read_error\");\n let not_working = 1 + i32::from_str(¬_working[0].to_string()[..]).unwrap() % 49;\n moves_vec.pop();\n for i in moves_vec {\n match i {\n 1 => { if i == not_working {} else {myBot.x +=1}},\n 2 => { if i == not_working {} else {myBot.x +=-1}}, \n 3 => { if i == not_working {} else {myBot.y +=1}},\n 4 => { if i == not_working {} else {myBot.y +=-1}},\n _ => {},\n }\n\n }\n\n println!(\"{:?}\\n{:?}\\n\",myBot.x,myBot.y);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Removes panic! calls from the Nbt implementation.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixup the indexing in mv.rs to get the appropriate args<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Don't use to_string to get number of u64 digits<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Use fill storeid when listing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix build.rs whitespace<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added AsMilliseconds trait for Duration<commit_after><|endoftext|>"} {"text":"<commit_before>struct Point { x: i32, y: i32, z: i32 }\n\nfn main() {\n let mut point = Point { x: 0, y: 0, z: 0 };\n\n {\n let borrowed_point = &point;\n let another_borrow = &point;\n\n \/\/ Data can be accessed via the references and the original owner\n println!(\"Point has coordinates: ({}, {}, {})\",\n borrowed_point.x, another_borrow.y, point.z);\n\n \/\/ Error! Can't borrow point as mutable because it's currently\n \/\/ borrowed as immutable.\n \/\/let mutable_borrow = &mut point;\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ Immutable references go out of scope\n }\n\n {\n let mutable_borrow = &mut point;\n\n \/\/ Change data via mutable reference\n mutable_borrow.x = 5;\n mutable_borrow.y = 2;\n mutable_borrow.z = 1;\n\n \/\/ Error! Can't borrow `point` as immutable because it's currently\n \/\/ borrowed as mutable.\n \/\/let y = &point.y;\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ Error! Can't print because `println!` takes an immutable reference.\n \/\/println!(\"Point Z coordinate is {}\", point.z);\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ Mutable reference goes out of scope\n }\n\n \/\/ Immutable references to point are allowed again\n println!(\"Point now has coordinates: ({}, {}, {})\",\n point.x, point.y, point.z);\n}\n<commit_msg>Clarify single mutable reference vs mutliple references<commit_after>struct Point { x: i32, y: i32, z: i32 }\n\nfn main() {\n let mut point = Point { x: 0, y: 0, z: 0 };\n\n {\n let borrowed_point = &point;\n let another_borrow = &point;\n\n \/\/ Data can be accessed via the references and the original owner\n println!(\"Point has coordinates: ({}, {}, {})\",\n borrowed_point.x, another_borrow.y, point.z);\n\n \/\/ Error! Can't borrow point as mutable because it's currently\n \/\/ borrowed as immutable.\n \/\/let mutable_borrow = &mut point;\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ Immutable references go out of scope\n }\n\n {\n let mutable_borrow = &mut point;\n\n \/\/ Change data via mutable reference\n mutable_borrow.x = 5;\n mutable_borrow.y = 2;\n mutable_borrow.z = 1;\n\n \/\/ Error! Can't borrow `point` as immutable because it's currently\n \/\/ borrowed as mutable.\n \/\/let y = &point.y;\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ Error! Can't print because `println!` takes an immutable reference.\n \/\/println!(\"Point Z coordinate is {}\", point.z);\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ Ok! Mutable references can be passed as immutable to `println!`\n println!(\"Point has coordinates: ({}, {}, {})\",\n mutable_borrow.x, mutable_borrow.y, mutable_borrow.z);\n\n \/\/ Mutable reference goes out of scope\n }\n\n \/\/ Immutable references to point are allowed again\n let borrowed_point = &point;\n println!(\"Point now has coordinates: ({}, {}, {})\",\n borrowed_point.x, borrowed_point.y, borrowed_point.z);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>minor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Created variables for DRY purposes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>removed example with ncurses menu<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate piston;\nextern crate graphics;\nextern crate piston_window;\nextern crate time;\nextern crate rand;\nextern crate ai_behavior;\nextern crate cgmath;\nextern crate opengl_graphics;\n\nmod app;\nmod entity;\nmod player;\nmod config;\nmod person;\n\nuse player::Player;\nuse entity::Entity;\n\nuse piston_window::{ PistonWindow, WindowSettings };\nuse piston::input::*;\nuse piston::event_loop::*;\nuse opengl_graphics::*;\nuse graphics::{ Image, clear, default_draw_state };\nuse graphics::rectangle::square;\nuse std::path::Path;\nuse rand::{Rng, SeedableRng, XorShiftRng};\n\nuse cgmath::rad;\nuse cgmath::{ Vector2, Vector4 };\nuse cgmath::{ Rotation2, Basis2 };\n\nfn transform_camera_coords(player : &Player, x : u32, y: u32, width : u32, height : u32) -> (i32, i32) {\n return (\n x as i32 - player.get_position().x as i32 + (width as f32 \/ 2f32) as i32 ,\n y as i32 - player.get_position().y as i32 + (height as f32 \/ 2f32) as i32\n );\n}\n\nfn draw_background(x: u32, y: u32, context: graphics::context::Context, gl_graphics: &mut GlGraphics, textures: &Vec<Texture>, seed: [u32;4], player : &mut Player) {\n let mut rng1: XorShiftRng = SeedableRng::from_seed(seed);\n let txt: &Texture = textures.get(0).unwrap();\n let (width, height) = txt.get_size();\n for i in 0..(x\/width) + 1 {\n for j in 0..(y\/height) + 1 {\n let (k, l) = transform_camera_coords(player, i, j, x, y);\n let rand = rng1.gen::<u32>() % textures.len() as u32;\n let txt: &Texture = textures.get(rand as usize).unwrap();\n let (width, height) = txt.get_size();\n let image = Image::new().rect(square((i * width) as f64, (j * height) as f64, width as f64));\n image.draw(txt, default_draw_state(), context.transform, gl_graphics);\n }\n }\n}\n\nfn main() {\n let mut rng = rand::thread_rng();\n let seed: [u32;4] = [rng.gen::<u32>(), rng.gen::<u32>(), rng.gen::<u32>(), rng.gen::<u32>()];\n let opengl = OpenGL::V3_2;\n let mut window: PistonWindow = WindowSettings::new(\"GGJ2016\", [800, 600])\n .exit_on_esc(true)\n .opengl(opengl)\n .build()\n .unwrap_or_else(|e| { panic!(\"Failed to build PistonWindow: {}\", e) });\n window.set_ups(60);\n let mut gl = GlGraphics::new(opengl);\n\n let mut app = app::App::new();\n\n let emoji = Texture::from_path(Path::new(\"assets\/img\/emoji\/77.png\")).unwrap();\n app.add_entity(Box::new(person::Person::new(emoji, Vector2::new(50.0, 50.0))));\n\n \/\/ Add player to entities (player instanciated in app)\n \/\/app.add_entity(Box::new(player::Player::new()));\n\n let mut textures :Vec<Texture>= Vec::new();\n textures.push(Texture::from_path(Path::new(\"assets\/img\/ground\/placeholder_01.jpg\")).unwrap());\n textures.push(Texture::from_path(Path::new(\"assets\/img\/ground\/placeholder_02.jpg\")).unwrap());\n\n for e in window {\n if let Some(args) = e.press_args() {\n app.key_press(args);\n println!(\"asda\");\n }\n\n if let Some(args) = e.update_args() {\n app.update(args);\n }\n\n if let Some(args) = e.render_args() {\n gl.draw(args.viewport(), |c, gl| {\n clear([0.5, 0.2, 0.9, 1.0], gl);\n draw_background(args.width, args.height, c, gl, &textures, seed, app.get_player());\n\n app.render(c, gl, args);\n });\n }\n }\n}\n<commit_msg>camera now working properly.<commit_after>extern crate piston;\nextern crate graphics;\nextern crate piston_window;\nextern crate time;\nextern crate rand;\nextern crate ai_behavior;\nextern crate cgmath;\nextern crate opengl_graphics;\n\nmod app;\nmod entity;\nmod player;\nmod config;\nmod person;\n\nuse player::Player;\nuse entity::Entity;\n\nuse piston_window::{ PistonWindow, WindowSettings };\nuse piston::input::*;\nuse piston::event_loop::*;\nuse opengl_graphics::*;\nuse graphics::{ Image, clear, default_draw_state };\nuse graphics::rectangle::square;\nuse std::path::Path;\nuse rand::{Rng, SeedableRng, XorShiftRng};\n\nuse cgmath::rad;\nuse cgmath::{ Vector2, Vector4 };\nuse cgmath::{ Rotation2, Basis2 };\n\nfn transform_camera_coords(player : &Player, x : u32, y: u32, width : u32, height : u32) -> (f64, f64) {\n return (\n x as f64 - player.get_position().x as f64 + (width as f64 \/ 2f64) as f64 ,\n y as f64 - player.get_position().y as f64 + (height as f64 \/ 2f64) as f64\n );\n}\n\nfn draw_background(x: u32, y: u32, context: graphics::context::Context, gl_graphics: &mut GlGraphics, textures: &Vec<Texture>, seed: [u32;4], player : &mut Player) {\n let mut rng1: XorShiftRng = SeedableRng::from_seed(seed);\n let txt: &Texture = textures.get(0).unwrap();\n let (width, height) = txt.get_size();\n for i in 0..(x\/width) + 1 {\n for j in 0..(y\/height) + 1 {\n let (k, l) = transform_camera_coords(player, i, j, x, y);\n let rand = rng1.gen::<u32>() % textures.len() as u32;\n let txt: &Texture = textures.get(rand as usize).unwrap();\n let (width, height) = txt.get_size();\n let image = Image::new().rect(square((i as f64 * width as f64) as f64 + k, (j as f64 * height as f64) as f64 + l, width as f64));\n println!(\"({}, {}) = ({}, {})\", (k * width as f64) as f64, (l * height as f64) as f64, k, l);\n image.draw(txt, default_draw_state(), context.transform, gl_graphics);\n }\n }\n}\n\nfn main() {\n let mut rng = rand::thread_rng();\n let seed: [u32;4] = [rng.gen::<u32>(), rng.gen::<u32>(), rng.gen::<u32>(), rng.gen::<u32>()];\n let opengl = OpenGL::V3_2;\n let mut window: PistonWindow = WindowSettings::new(\"GGJ2016\", [800, 600])\n .exit_on_esc(true)\n .opengl(opengl)\n .build()\n .unwrap_or_else(|e| { panic!(\"Failed to build PistonWindow: {}\", e) });\n window.set_ups(60);\n let mut gl = GlGraphics::new(opengl);\n\n let mut app = app::App::new();\n\n let emoji = Texture::from_path(Path::new(\"assets\/img\/emoji\/77.png\")).unwrap();\n app.add_entity(Box::new(person::Person::new(emoji, Vector2::new(50.0, 50.0))));\n\n \/\/ Add player to entities (player instanciated in app)\n \/\/app.add_entity(Box::new(player::Player::new()));\n\n let mut textures :Vec<Texture>= Vec::new();\n textures.push(Texture::from_path(Path::new(\"assets\/img\/ground\/placeholder_01.jpg\")).unwrap());\n textures.push(Texture::from_path(Path::new(\"assets\/img\/ground\/placeholder_02.jpg\")).unwrap());\n\n for e in window {\n if let Some(args) = e.press_args() {\n app.key_press(args);\n println!(\"asda\");\n }\n\n if let Some(args) = e.update_args() {\n app.update(args);\n }\n\n if let Some(args) = e.render_args() {\n gl.draw(args.viewport(), |c, gl| {\n clear([0.5, 0.2, 0.9, 1.0], gl);\n draw_background(args.width, args.height, c, gl, &textures, seed, app.get_player());\n\n app.render(c, gl, args);\n });\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Export nat literal helpers<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add time module<commit_after>\/\/! Information about frame timing.\n\/\/!\n\/\/! By default the engine will run at 60 fps (giving a delta of 16.667 ms), but it will change\n\/\/! its fixed framerate if necessary. For example, if the game fails to meet 60 fps, the engine\n\/\/! will throttle down to 30 fps (with a delta of 33.333 ms) until it can return to 60 fps. The\n\/\/! time delta doesn't represent the exact amount of time it took to complete the last frame,\n\/\/! rather it gives the current locked framerate for the game. Therefore, game code can be\n\/\/! written with the assumption of a fixed time step (i.e. the delta will be the same\n\/\/! frame-to-frame) even if the exact time step may occaisonally change in practice.\n\nuse std::time::Duration;\n\n\/\/\/ Returns the exact time between frames.\n\/\/\/\n\/\/\/ See module documentation for more information about frame timing.\npub fn delta() -> Duration {\n Duration::new(1, 0) \/ 60\n}\n\n\/\/\/ Returns the current time between frames in seconds.\n\/\/\/\n\/\/\/ See module documentation for more information about frame timing.\npub fn delta_f32() -> f32 {\n 1.0 \/ 60.0\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>using most of space on character screen now<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Smaller MAX_OBJECTS in test cfg<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>first house in the village<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Used to store the old file just in case<commit_after>extern crate sdl2;\n\n#[derive(Debug)]\npub struct DepthPoint {\n pub x: f64,\n pub y: f64,\n pub z: f64,\n pub x_z: f64,\n pub angle_view: f64,\n}\n\nimpl DepthPoint {\n pub fn new(x: f64, y: f64, z: f64) -> DepthPoint {\n DepthPoint {\n x: x, \n y: y,\n z: z,\n x_z: 0.0,\n angle_view: 0.0,\n }\n }\n\n pub fn sdl_point(&self) -> sdl2::rect::Point {\n sdl2::rect::Point::new((self.x as f64\/self.z) as i32, (self.y as f64\/self.z) as i32)\n }\n\n pub fn perspect_point(&mut self, w: i32, h: i32) -> sdl2::rect::Point { \n if self.z > -0.01 && self.z < 0.0 {\n self.z = 0.001\n }\n\n else if self.z < 0.1 { \/\/ Prevents division by nearly 0, that cause integer overflow\/underflow\n self.z = 0.11;\n }\n\n sdl2::rect::Point::new(\n ((w as f64 * self.x as f64\/self.z) + w as f64) as i32, \n ((w as f64 * self.y as f64\/self.z) + h as f64) as i32)\n }\n\n pub fn set_x (&mut self, x: f64) {\n self.x = x;\n }\n\n pub fn set_y (&mut self, y: f64) {\n self.y = y;\n }\n\n pub fn set_z (&mut self, z: f64) {\n self.z = z;\n }\n\n pub fn rotate_x_y(&mut self, cx: f64, cy: f64, angle: f64) {\n use std::f64;\n let s = f64::sin(angle);\n let c = f64::cos(angle);\n\n self.x -= cx;\n self.y -= cy;\n\n let new_x = self.x * c - self.y * s;\n let new_y = self.x * s + self.y * c;\n\n self.x = new_x + cx;\n self.y = new_y + cy;\n }\n\n pub fn rotate_x_z(&mut self, cx: f64, cz: f64, angle: f64) {\n use std::f64;\n let s = f64::sin(angle);\n let c = f64::cos(angle);\n\n self.x -= cx;\n self.z -= cz;\n\n let new_x = self.x * c - self.z * s;\n let new_z = self.x * s + self.z * c;\n\n self.x = new_x + cx;\n self.z = new_z + cz;\n self.x_z += angle;\n\n let angle_view = (f64::atan2(cx - self.x, cz - self.z) * 180.0 \/ f64::consts::PI);\n\n self.angle_view = angle_view;\n\n }\n\n pub fn rotate_y_z(&mut self, cy: f64, cz: f64, angle: f64) {\n use std::f64;\n let s = f64::sin(angle);\n let c = f64::cos(angle);\n\n self.y -= cy;\n self.z -= cz;\n\n let new_y = self.y * c - self.z * s;\n let new_z = self.y * s + self.z * c;\n\n self.y = new_y + cy;\n self.z = new_z + cz;\n }\n\n pub fn clone(&self) -> DepthPoint {\n DepthPoint {\n x: self.x, \n y: self.y,\n z: self.z,\n x_z: self.x_z,\n angle_view: self.angle_view,\n }\n }\n}\n#[derive(Debug)]\npub struct Square {\n pub points: Vec<DepthPoint>,\n pub x: f64,\n pub y: f64,\n pub z: f64,\n pub angle_view: f64,\n}\n\nimpl Square {\n pub fn new(p1: DepthPoint, p2: DepthPoint, p3: DepthPoint, p4: DepthPoint) -> Square {\n Square {\n points: vec![p1, p2, p3, p4],\n x: 0.0,\n y: 0.0,\n z: 0.0,\n angle_view: 0.0,\n }\n }\n\n pub fn flat(&mut self, w: i32, h: i32, renderer: &mut sdl2::render::Renderer, cx: f64, cy: f64, cz: f64) {\n let mut return_buffer = Vec::<sdl2::rect::Point>::new();\n for point in &mut self.points { \n use std::f64;\n let point_x = point.x;\n let point_y = point.y;\n let point_z = point.z;\n\n point.set_x(point_x+self.x);\n point.set_y(point_y+self.y);\n point.set_z(point_z+self.z);\n\n let pers_point = point.perspect_point(w, h); \n\n\n if !(point.angle_view < 100.0 && point.angle_view > 0.0\n || point.angle_view < 0.0 && point.angle_view > -100.0) {\n return_buffer.push(pers_point);\n }\n \/* CAUSES A COOL BUG WHEN UNCOMMENTED! I RECOMMEND TRYING IT OUT.\n * Bug itself happens because the points position doesn't get reset when i just jump out to the next point in the loop.\n else {\n continue;\n }\n *\/\n \n \n point.set_x(point_x);\n point.set_y(point_y);\n point.set_z(point_z);\n }\n \n let point_x = self.points[0].x;\n let point_y = self.points[0].y; \n let point_z = self.points[0].z;\n\n self.points[0].set_x(point_x+self.x);\n self.points[0].set_y(point_y+self.y);\n self.points[0].set_z(point_z+self.z);\n \n return_buffer.push(self.points[0].perspect_point(w, h));\n renderer.draw_lines(&return_buffer);\n\n self.points[0].set_x(point_x);\n self.points[0].set_y(point_y);\n self.points[0].set_z(point_z);\n }\n\n pub fn set_x (&mut self, x: f64) {\n self.x = x;\n }\n\n pub fn set_y (&mut self, y: f64) {\n self.y = y;\n }\n\n pub fn set_z (&mut self, z: f64) {\n self.z = z;\n }\n}\n\n#[derive(Debug)]\npub struct Cube {\n pub faces: Vec<Square>,\n pub x: f64,\n pub y: f64,\n pub z: f64,\n}\n\nimpl Cube {\n pub fn new(p1: Square, p2: Square, p3: Square, p4: Square) -> Cube {\n Cube {\n faces: vec![p1, p2, p3, p4],\n x: 0.0,\n y: 0.0,\n z: 0.0,\n }\n }\n\n pub fn gen_new(x: f64, y: f64, z:f64, x_s: f64, y_s: f64, z_s:f64) -> Cube {\n let face1 = Square::new(DepthPoint::new(x_s, y_s, z_s), \n DepthPoint::new(-x_s, y_s, z_s), \n DepthPoint::new(-x_s, -y_s, z_s),\n DepthPoint::new(x_s, -y_s, z_s));\n\n let face2 = Square::new(DepthPoint::new(x_s, y_s, -z_s), \n DepthPoint::new(x_s, -y_s, -z_s), \n DepthPoint::new(x_s, -y_s, z_s),\n DepthPoint::new(x_s, y_s, z_s));\n\n let face3 = Square::new(DepthPoint::new(x_s, y_s, -z_s), \n DepthPoint::new(-x_s, y_s, -z_s), \n DepthPoint::new(-x_s, -y_s, -z_s),\n DepthPoint::new(x_s, -y_s, -z_s));\n\n let face4 = Square::new(DepthPoint::new(-x_s, -y_s, -z_s), \n DepthPoint::new(-x_s, y_s, -z_s), \n DepthPoint::new(-x_s, y_s, z_s),\n DepthPoint::new(-x_s, -y_s, z_s));\n\n Cube {\n faces: vec![face1, face2, face3, face4],\n x: x,\n y: y,\n z: z,\n }\n }\n\n pub fn flat(&mut self, w: i32, h: i32, renderer: &mut sdl2::render::Renderer, cx: f64, cy: f64, cz: f64, cxy: f64, cxz: f64, cyz: f64) {\n for face in &mut self.faces {\n let self_x = self.x;\n let self_y = self.y;\n let self_z = self.z;\n\n self.x += cx;\n self.y += cy;\n self.z += cz;\n\n for point in &mut face.points {\n \/\/let point_x = point.x;\n \/\/let point_y = point.y;\n \/\/let point_z = point.z;\n point.rotate_y_z((-cy + -self_y), (-cz + -self_z), cyz);\n point.rotate_x_z((-cx + -self_x), (-cz + -self_z), cxz);\n point.rotate_x_y((-cx + -self_x), (-cy + -self_y), cxy);\n }\n\n let face_x = face.x;\n let face_y = face.y;\n let face_z = face.z;\n\n face.set_x(face_x+self.x);\n face.set_y(face_y+self.y);\n face.set_z(face_z+self.z);\n\n let flat = face.flat(w, h, renderer, cx, cy, cz);\n\n face.set_x(face_x);\n face.set_y(face_y);\n face.set_z(face_z);\n\n self.x = self_x;\n self.y = self_y;\n self.z = self_z;\n }\n }\n}\n\npub struct Lines {\n pub lines: Vec<[DepthPoint; 2]>,\n pub x: f64,\n pub y: f64,\n pub z: f64,\n}\n\nimpl Lines {\n pub fn new(line_vec: Vec<[DepthPoint; 2]>) -> Lines {\n Lines {\n lines: line_vec,\n x: 0.0,\n y: 0.0,\n z: 0.0,\n }\n }\n\n pub fn flat(&mut self, w: i32, h: i32, renderer: &mut sdl2::render::Renderer,\n cx: f64, cy: f64, cz: f64, \n cxy: f64, cxz: f64, cyz: f64,\n draw: bool) {\n for line in &mut self.lines {\n \/\/ Apply rotations\n let self_x = self.x;\n let self_y = self.y;\n let self_z = self.z;\n\n self.x += cx;\n self.y += cy;\n self.z += cz;\n\n line[0].rotate_y_z((-cy + -self_y), (-cz + -self_z), cyz);\n line[0].rotate_x_z((-cx + -self_x), (-cz + -self_z), cxz);\n line[0].rotate_x_y((-cx + -self_x), (-cy + -self_y), cxy);\n\n line[1].rotate_y_z((-cy + -self_y), (-cz + -self_z), cyz);\n line[1].rotate_x_z((-cx + -self_x), (-cz + -self_z), cxz);\n line[1].rotate_x_y((-cx + -self_x), (-cy + -self_y), cxy);\n\n if !(line[0].angle_view < 100.0 && line[0].angle_view > 0.0\n || line[0].angle_view < 0.0 && line[0].angle_view > -100.0) {\n \/\/ Grab all the positions because Rust doesn't allow for a method \n \/\/ to take its instances class variable as an argument\n let mut line_begin_x = line[0].x;\n let mut line_begin_y = line[0].y;\n let mut line_begin_z = line[0].z;\n\n let mut line_end_x = line[1].x;\n let mut line_end_y = line[1].y;\n let mut line_end_z = line[1].z;\n\n line[0].x = line_begin_x + self.x;\n line[0].y = line_begin_y + self.y;\n line[0].z = line_begin_z + self.z;\n\n line[1].x = line_end_x + self.x;\n line[1].y = line_end_y + self.y;\n line[1].z = line_end_z + self.z;\n\n \/\/ Generate 2d lines. \n let line_begin = line[0].perspect_point(w, h); \n \/\/println!(\"{:?}\", line_begin);\n let line_end = line[1].perspect_point(w, h);\n if draw {renderer.draw_lines(&[line_begin, line_end]);};\n\n line[0].x = line_begin_x;\n line[0].y = line_begin_y;\n line[0].z = line_begin_z;\n\n line[1].x = line_end_x;\n line[1].y = line_end_y;\n line[1].z = line_end_z; \n\n self.x = self_x;\n self.y = self_y;\n self.z = self_z;\n }\n \n \/\/ Set points' positions back to the ones they had before.\n self.x = self_x;\n self.y = self_y;\n self.z = self_z;\n }\n }\n}\n\npub struct Triangle {\n pub points: [DepthPoint; 3],\n pub x: f64,\n pub y: f64,\n pub z: f64,\n}\n\nimpl Triangle {\n pub fn new(p1: DepthPoint, p2: DepthPoint, p3: DepthPoint) -> Triangle {\n Triangle {\n points: [p1, p2, p3],\n x: 0.0,\n y: 0.0,\n z: 0.0,\n }\n }\n\n pub fn flat(&mut self, w: i32, h: i32, renderer: &mut sdl2::render::Renderer,\n cx: f64, cy: f64, cz: f64, \n cxy: f64, cxz: f64, cyz: f64,\n draw: bool) \n {\n\n \/\/println!(\"{:?}\", self.points[0].perspect_point(w, h).x());\n \/\/MOST PROBABLY USELESS MEMORY REALLOCATIONS, BUT SUCH IS LIFE.\n let mut lines = Lines::new(vec![[self.points[0].clone(), self.points[1].clone()],\n [self.points[1].clone(), self.points[2].clone()],\n [self.points[2].clone(), self.points[0].clone()],]);\n\n lines.flat(w, h, renderer,\n cx, cy, cz,\n cxy, cxz, cyz, \n draw);\n\n self.points = [lines.lines[0][0].clone(), lines.lines[1][0].clone(), lines.lines[2][0].clone()];\n }\n\n pub fn fill_bottom_flat(&mut self, w: i32, h: i32, renderer: &mut sdl2::render::Renderer,\n cx: f64, cy: f64, cz: f64, \n cxy: f64, cxz: f64, cyz: f64) \n {\n let mut largest_y = -2147483648; \/\/ used value from someone gave on irc because std::i32::MIN wasnt feeling like working at the moment \n let mut largest_index = 0 as usize;\n\n let p1 = self.points[0].clone();\n let p2 = self.points[1].clone();\n let p3 = self.points[2].clone();\n\n let mut iterator = 0;\n let mut sdl_points: [sdl2::rect::Point; 3] = [sdl2::rect::Point::new(0, 0); 3];\n\n for point in &mut [p1, p2, p3] {\n \/\/ Apply rotations\n let self_x = self.x;\n let self_y = self.y;\n let self_z = self.z;\n\n self.x += cx;\n self.y += cy;\n self.z += cz;\n\n point.rotate_y_z((-cy + -self_y), (-cz + -self_z), cyz);\n point.rotate_x_z((-cx + -self_x), (-cz + -self_z), cxz);\n point.rotate_x_y((-cx + -self_x), (-cy + -self_y), cxy);\n\n if !(point.angle_view < 100.0 && point.angle_view > 0.0\n || point.angle_view < 0.0 && point.angle_view > -100.0) {\n \/\/ Grab all the positions because Rust doesn't allow for a method \n \/\/ to take its instances class variable as an argument\n let mut point_x = point.x;\n let mut point_y = point.y;\n let mut point_z = point.z;\n\n point.x = point_x + self.x;\n point.y = point_y + self.y;\n point.z = point_z + self.z;\n\n \/\/ Generate 2d lines. \n let perspect_point = point.perspect_point(w, h);\n \/\/println!(\"{:?}\", perspect_point);\n sdl_points[iterator] = perspect_point;\n\n point.x = point_x;\n point.y = point_y;\n point.z = point_z;\n self.x = self_x;\n self.y = self_y;\n self.z = self_z; \n }\n \n \/\/ Set points' positions back to the ones they had before.\n self.x = self_x;\n self.y = self_y;\n self.z = self_z;\n\n iterator += 1;\n }\n\n \/\/println!(\"{:?}\", sdl_points);\n\n let flat_p1 = sdl_points[0];\n let flat_p2 = sdl_points[1];\n let flat_p3 = sdl_points[2];\n\n \/\/println!(\"{:?}\", self.points[0].perspect_point(w, h).x());\n \n let mut top: sdl2::rect::Point;\n let mut left: sdl2::rect::Point;\n let mut right: sdl2::rect::Point;\n\n \/\/ find top, left, and right.\n\n let points = [flat_p1, flat_p2, flat_p3];\n let top = points.iter().max_by_key(|p| -p.y()).unwrap().clone();\n let left = points.iter().max_by_key(|p| -p.x()).unwrap().clone();\n let right = points.iter().max_by_key(|p| p.x()).unwrap().clone();\n\n if (left.y() - top.y()) != 0 && (right.y() - top.y()) != 0 {\n let left_slope = -(left.x() - top.x()) as f64 \/ (left.y() - top.y()) as f64;\n let right_slope = -(right.x() - top.x()) as f64 \/ (right.y() - top.y()) as f64;\n\n for i in 0..left.y() - top.y() {\n renderer.draw_line(sdl2::rect::Point::new(right.x() + (right_slope * i as f64) as i32, right.y() - i),\n sdl2::rect::Point::new(left.x() + (left_slope * i as f64) as i32, left.y() - i));\n }\n println!(\"{:?} {:?}\", left_slope, right_slope);\n }\n }\n\n pub fn fill_top_flat(&mut self, w: i32, h: i32, renderer: &mut sdl2::render::Renderer,\n cx: f64, cy: f64, cz: f64, \n cxy: f64, cxz: f64, cyz: f64) \n {\n let mut largest_y = -2147483648; \/\/ used value from someone gave on irc because std::i32::MIN wasnt feeling like working at the moment \n let mut largest_index = 0 as usize;\n\n let p1 = self.points[0].clone();\n let p2 = self.points[1].clone();\n let p3 = self.points[2].clone();\n\n let mut iterator = 0;\n let mut sdl_points: [sdl2::rect::Point; 3] = [sdl2::rect::Point::new(0, 0); 3];\n\n for point in &mut [p1, p2, p3] {\n \/\/ Apply rotations\n let self_x = self.x;\n let self_y = self.y;\n let self_z = self.z;\n\n self.x += cx;\n self.y += cy;\n self.z += cz;\n\n point.rotate_y_z((-cy + -self_y), (-cz + -self_z), cyz);\n point.rotate_x_z((-cx + -self_x), (-cz + -self_z), cxz);\n point.rotate_x_y((-cx + -self_x), (-cy + -self_y), cxy);\n\n if !(point.angle_view < 100.0 && point.angle_view > 0.0\n || point.angle_view < 0.0 && point.angle_view > -100.0) {\n \/\/ Grab all the positions because Rust doesn't allow for a method \n \/\/ to take its instances class variable as an argument\n let mut point_x = point.x;\n let mut point_y = point.y;\n let mut point_z = point.z;\n\n point.x = point_x + self.x;\n point.y = point_y + self.y;\n point.z = point_z + self.z;\n\n \/\/ Generate 2d lines. \n let perspect_point = point.perspect_point(w, h);\n \/\/println!(\"{:?}\", perspect_point);\n sdl_points[iterator] = perspect_point;\n\n point.x = point_x;\n point.y = point_y;\n point.z = point_z;\n self.x = self_x;\n self.y = self_y;\n self.z = self_z; \n }\n \n \/\/ Set points' positions back to the ones they had before.\n self.x = self_x;\n self.y = self_y;\n self.z = self_z;\n\n iterator += 1;\n }\n\n \/\/println!(\"{:?}\", sdl_points);\n\n let flat_p1 = sdl_points[0];\n let flat_p2 = sdl_points[1];\n let flat_p3 = sdl_points[2];\n\n \/\/println!(\"{:?}\", self.points[0].perspect_point(w, h).x());\n \n let mut top: sdl2::rect::Point;\n let mut left: sdl2::rect::Point;\n let mut right: sdl2::rect::Point;\n\n \/\/ find top, left, and right.\n\n let points = [flat_p1, flat_p2, flat_p3];\n let top = points.iter().max_by_key(|p| p.y()).unwrap().clone();\n let left = points.iter().max_by_key(|p| -p.x()).unwrap().clone();\n let right = points.iter().max_by_key(|p| p.x()).unwrap().clone();\n\n if (left.y() - top.y()) != 0 && (right.y() - top.y()) != 0 {\n let left_slope = -(left.x() - top.x()) as f64 \/ (left.y() - top.y()) as f64;\n let right_slope = -(right.x() - top.x()) as f64 \/ (right.y() - top.y()) as f64;\n\n for i in 0..top.y() - left.y() {\n renderer.draw_line(sdl2::rect::Point::new(right.x() + (right_slope * -i as f64) as i32, right.y() + i),\n sdl2::rect::Point::new(left.x() + (left_slope * -i as f64) as i32, left.y() + i));\n }\n println!(\"{:?} {:?}\", left_slope, right_slope);\n }\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax::ast;\nuse syntax::codemap::{self, CodeMap, Span, BytePos};\nuse syntax::visit;\nuse syntax::parse::{token, parser};\nuse std::path::PathBuf;\n\nuse utils;\nuse config::Config;\n\nuse changes::ChangeSet;\nuse rewrite::{Rewrite, RewriteContext};\n\npub struct FmtVisitor<'a> {\n pub codemap: &'a CodeMap,\n pub changes: ChangeSet<'a>,\n pub last_pos: BytePos,\n \/\/ TODO RAII util for indenting\n pub block_indent: usize,\n pub config: &'a Config,\n}\n\nimpl<'a, 'v> visit::Visitor<'v> for FmtVisitor<'a> {\n fn visit_expr(&mut self, ex: &'v ast::Expr) {\n debug!(\"visit_expr: {:?} {:?}\",\n self.codemap.lookup_char_pos(ex.span.lo),\n self.codemap.lookup_char_pos(ex.span.hi));\n self.format_missing(ex.span.lo);\n let offset = self.changes.cur_offset_span(ex.span);\n let context = RewriteContext {\n codemap: self.codemap,\n config: self.config,\n block_indent: self.block_indent,\n };\n let rewrite = ex.rewrite(&context, self.config.max_width - offset, offset);\n\n if let Some(new_str) = rewrite {\n self.changes.push_str_span(ex.span, &new_str);\n self.last_pos = ex.span.hi;\n }\n }\n\n fn visit_stmt(&mut self, stmt: &'v ast::Stmt) {\n \/\/ If the stmt is actually an item, then we'll handle any missing spans\n \/\/ there. This is important because of annotations.\n \/\/ Although it might make more sense for the statement span to include\n \/\/ any annotations on the item.\n let skip_missing = match stmt.node {\n ast::Stmt_::StmtDecl(ref decl, _) => {\n match decl.node {\n ast::Decl_::DeclItem(_) => true,\n _ => false,\n }\n }\n _ => false,\n };\n if !skip_missing {\n self.format_missing_with_indent(stmt.span.lo);\n }\n visit::walk_stmt(self, stmt);\n }\n\n fn visit_block(&mut self, b: &'v ast::Block) {\n debug!(\"visit_block: {:?} {:?}\",\n self.codemap.lookup_char_pos(b.span.lo),\n self.codemap.lookup_char_pos(b.span.hi));\n self.format_missing(b.span.lo);\n\n self.changes.push_str_span(b.span, \"{\");\n self.last_pos = self.last_pos + BytePos(1);\n self.block_indent += self.config.tab_spaces;\n\n for stmt in &b.stmts {\n self.visit_stmt(&stmt)\n }\n match b.expr {\n Some(ref e) => {\n self.format_missing_with_indent(e.span.lo);\n self.visit_expr(e);\n }\n None => {}\n }\n\n self.block_indent -= self.config.tab_spaces;\n \/\/ TODO we should compress any newlines here to just one\n self.format_missing_with_indent(b.span.hi - BytePos(1));\n self.changes.push_str_span(b.span, \"}\");\n self.last_pos = b.span.hi;\n }\n\n \/\/ Note that this only gets called for function definitions. Required methods\n \/\/ on traits do not get handled here.\n fn visit_fn(&mut self,\n fk: visit::FnKind<'v>,\n fd: &'v ast::FnDecl,\n b: &'v ast::Block,\n s: Span,\n _: ast::NodeId) {\n self.format_missing_with_indent(s.lo);\n self.last_pos = s.lo;\n\n let indent = self.block_indent;\n match fk {\n visit::FkItemFn(ident,\n ref generics,\n ref unsafety,\n ref constness,\n ref abi,\n vis) => {\n let new_fn = self.rewrite_fn(indent,\n ident,\n fd,\n None,\n generics,\n unsafety,\n constness,\n abi,\n vis,\n codemap::mk_sp(s.lo, b.span.lo));\n self.changes.push_str_span(s, &new_fn);\n }\n visit::FkMethod(ident, ref sig, vis) => {\n let new_fn = self.rewrite_fn(indent,\n ident,\n fd,\n Some(&sig.explicit_self),\n &sig.generics,\n &sig.unsafety,\n &sig.constness,\n &sig.abi,\n vis.unwrap_or(ast::Visibility::Inherited),\n codemap::mk_sp(s.lo, b.span.lo));\n self.changes.push_str_span(s, &new_fn);\n }\n visit::FkFnBlock(..) => {}\n }\n\n self.last_pos = b.span.lo;\n self.visit_block(b)\n }\n\n fn visit_item(&mut self, item: &'v ast::Item) {\n \/\/ Don't look at attributes for modules.\n \/\/ We want to avoid looking at attributes in another file, which the AST\n \/\/ doesn't distinguish. FIXME This is overly conservative and means we miss\n \/\/ attributes on inline modules.\n match item.node {\n ast::Item_::ItemMod(_) => {}\n _ => {\n if self.visit_attrs(&item.attrs) {\n return;\n }\n }\n }\n\n match item.node {\n ast::Item_::ItemUse(ref vp) => {\n let vis = utils::format_visibility(item.vis);\n let offset = self.block_indent + vis.len() + \"use \".len();\n let context = RewriteContext {\n codemap: self.codemap, config: self.config, block_indent: self.block_indent };\n \/\/ 1 = \";\"\n match vp.rewrite(&context, self.config.max_width - offset - 1, offset) {\n Some(ref s) if s.len() == 0 => {\n \/\/ Format up to last newline\n let span = codemap::mk_sp(self.last_pos, item.span.lo);\n let span_end = match self.snippet(span).rfind('\\n') {\n Some(offset) => self.last_pos + BytePos(offset as u32),\n None => item.span.lo\n };\n self.format_missing(span_end);\n self.last_pos = item.span.hi;\n }\n Some(ref s) => {\n let s = format!(\"{}use {};\", vis, s);\n self.format_missing_with_indent(item.span.lo);\n self.changes.push_str_span(item.span, &s);\n self.last_pos = item.span.hi;\n }\n None => {\n self.format_missing_with_indent(item.span.lo);\n }\n }\n visit::walk_item(self, item);\n }\n ast::Item_::ItemImpl(..) |\n ast::Item_::ItemTrait(..) => {\n self.block_indent += self.config.tab_spaces;\n visit::walk_item(self, item);\n self.block_indent -= self.config.tab_spaces;\n }\n ast::Item_::ItemExternCrate(_) => {\n self.format_missing_with_indent(item.span.lo);\n let new_str = self.snippet(item.span);\n self.changes.push_str_span(item.span, &new_str);\n self.last_pos = item.span.hi;\n }\n ast::Item_::ItemStruct(ref def, ref generics) => {\n self.format_missing_with_indent(item.span.lo);\n self.visit_struct(item.ident,\n item.vis,\n def,\n generics,\n item.span);\n self.last_pos = item.span.hi;\n }\n ast::Item_::ItemEnum(ref def, ref generics) => {\n self.format_missing_with_indent(item.span.lo);\n self.visit_enum(item.ident,\n item.vis,\n def,\n generics,\n item.span);\n self.last_pos = item.span.hi;\n }\n ast::Item_::ItemMod(ref module) => {\n self.format_missing_with_indent(item.span.lo);\n self.format_mod(module, item.span, item.ident, &item.attrs);\n }\n _ => {\n visit::walk_item(self, item);\n }\n }\n }\n\n fn visit_trait_item(&mut self, ti: &'v ast::TraitItem) {\n if self.visit_attrs(&ti.attrs) {\n return;\n }\n\n if let ast::TraitItem_::MethodTraitItem(ref sig, None) = ti.node {\n self.format_missing_with_indent(ti.span.lo);\n\n let indent = self.block_indent;\n let new_fn = self.rewrite_required_fn(indent,\n ti.ident,\n sig,\n ti.span);\n\n self.changes.push_str_span(ti.span, &new_fn);\n self.last_pos = ti.span.hi;\n }\n \/\/ TODO format trait types\n\n visit::walk_trait_item(self, ti)\n }\n\n fn visit_impl_item(&mut self, ii: &'v ast::ImplItem) {\n if self.visit_attrs(&ii.attrs) {\n return;\n }\n visit::walk_impl_item(self, ii)\n }\n\n fn visit_mac(&mut self, mac: &'v ast::Mac) {\n visit::walk_mac(self, mac)\n }\n\n fn visit_mod(&mut self, m: &'v ast::Mod, s: Span, _: ast::NodeId) {\n \/\/ This is only called for the root module\n let filename = self.codemap.span_to_filename(s);\n self.format_separate_mod(m, &filename);\n }\n}\n\nimpl<'a> FmtVisitor<'a> {\n pub fn from_codemap<'b>(codemap: &'b CodeMap, config: &'b Config) -> FmtVisitor<'b> {\n FmtVisitor {\n codemap: codemap,\n changes: ChangeSet::from_codemap(codemap),\n last_pos: BytePos(0),\n block_indent: 0,\n config: config,\n }\n }\n\n pub fn snippet(&self, span: Span) -> String {\n match self.codemap.span_to_snippet(span) {\n Ok(s) => s,\n Err(_) => {\n println!(\"Couldn't make snippet for span {:?}->{:?}\",\n self.codemap.lookup_char_pos(span.lo),\n self.codemap.lookup_char_pos(span.hi));\n \"\".to_owned()\n }\n }\n }\n\n \/\/ Returns true if we should skip the following item.\n pub fn visit_attrs(&mut self, attrs: &[ast::Attribute]) -> bool {\n if attrs.len() == 0 {\n return false;\n }\n\n let first = &attrs[0];\n self.format_missing_with_indent(first.span.lo);\n\n if utils::contains_skip(attrs) {\n true\n } else {\n let rewrite = self.rewrite_attrs(attrs, self.block_indent);\n self.changes.push_str_span(first.span, &rewrite);\n let last = attrs.last().unwrap();\n self.last_pos = last.span.hi;\n false\n }\n }\n\n pub fn rewrite_attrs(&self, attrs: &[ast::Attribute], indent: usize) -> String {\n let mut result = String::new();\n let indent = utils::make_indent(indent);\n\n for (i, a) in attrs.iter().enumerate() {\n let a_str = self.snippet(a.span);\n\n if i > 0 {\n let comment = self.snippet(codemap::mk_sp(attrs[i-1].span.hi, a.span.lo));\n \/\/ This particular horror show is to preserve line breaks in between doc\n \/\/ comments. An alternative would be to force such line breaks to start\n \/\/ with the usual doc comment token.\n let multi_line = a_str.starts_with(\"\/\/\") && comment.matches('\\n').count() > 1;\n let comment = comment.trim();\n if comment.len() > 0 {\n result.push_str(&indent);\n result.push_str(comment);\n result.push('\\n');\n } else if multi_line {\n result.push('\\n');\n }\n result.push_str(&indent);\n }\n\n result.push_str(&a_str);\n\n if i < attrs.len() - 1 {\n result.push('\\n');\n }\n }\n\n result\n }\n\n fn format_mod(&mut self, m: &ast::Mod, s: Span, ident: ast::Ident, attrs: &[ast::Attribute]) {\n debug!(\"FmtVisitor::format_mod: ident: {:?}, span: {:?}\", ident, s);\n\n \/\/ Decide whether this is an inline mod or an external mod.\n let local_file_name = self.codemap.span_to_filename(s);\n let is_internal = local_file_name == self.codemap.span_to_filename(m.inner);\n\n \/\/ TODO Should rewrite properly `mod X;`\n\n if is_internal {\n debug!(\"FmtVisitor::format_mod: internal mod\");\n self.block_indent += self.config.tab_spaces;\n visit::walk_mod(self, m);\n debug!(\"... last_pos after: {:?}\", self.last_pos);\n self.block_indent -= self.config.tab_spaces;\n } else {\n debug!(\"FmtVisitor::format_mod: external mod\");\n let file_path = self.module_file(ident, attrs, local_file_name);\n let filename = file_path.to_str().unwrap();\n if self.changes.is_changed(filename) {\n \/\/ The file has already been reformatted, do nothing\n } else {\n self.format_separate_mod(m, filename);\n }\n }\n\n debug!(\"FmtVisitor::format_mod: exit\");\n }\n\n \/\/\/ Find the file corresponding to an external mod\n fn module_file(&self, id: ast::Ident, attrs: &[ast::Attribute], filename: String) -> PathBuf {\n let dir_path = {\n let mut path = PathBuf::from(&filename);\n path.pop();\n path\n };\n\n if let Some(path) = parser::Parser::submod_path_from_attr(attrs, &dir_path) {\n return path;\n }\n\n match parser::Parser::default_submod_path(id, &dir_path, &self.codemap).result {\n Ok(parser::ModulePathSuccess { path, .. }) => path,\n _ => panic!(\"Couldn't find module {}\", token::get_ident(id))\n }\n }\n\n \/\/\/ Format the content of a module into a separate file\n fn format_separate_mod(&mut self, m: &ast::Mod, filename: &str) {\n let last_pos = self.last_pos;\n let block_indent = self.block_indent;\n let filemap = self.codemap.get_filemap(filename);\n self.last_pos = filemap.start_pos;\n self.block_indent = 0;\n visit::walk_mod(self, m);\n self.format_missing(filemap.end_pos);\n self.last_pos = last_pos;\n self.block_indent = block_indent;\n }\n}\n<commit_msg>Add a helper method to format imports<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax::ast;\nuse syntax::codemap::{self, CodeMap, Span, BytePos};\nuse syntax::visit;\nuse syntax::parse::{token, parser};\nuse std::path::PathBuf;\n\nuse utils;\nuse config::Config;\n\nuse changes::ChangeSet;\nuse rewrite::{Rewrite, RewriteContext};\n\npub struct FmtVisitor<'a> {\n pub codemap: &'a CodeMap,\n pub changes: ChangeSet<'a>,\n pub last_pos: BytePos,\n \/\/ TODO RAII util for indenting\n pub block_indent: usize,\n pub config: &'a Config,\n}\n\nimpl<'a, 'v> visit::Visitor<'v> for FmtVisitor<'a> {\n fn visit_expr(&mut self, ex: &'v ast::Expr) {\n debug!(\"visit_expr: {:?} {:?}\",\n self.codemap.lookup_char_pos(ex.span.lo),\n self.codemap.lookup_char_pos(ex.span.hi));\n self.format_missing(ex.span.lo);\n let offset = self.changes.cur_offset_span(ex.span);\n let context = RewriteContext {\n codemap: self.codemap,\n config: self.config,\n block_indent: self.block_indent,\n };\n let rewrite = ex.rewrite(&context, self.config.max_width - offset, offset);\n\n if let Some(new_str) = rewrite {\n self.changes.push_str_span(ex.span, &new_str);\n self.last_pos = ex.span.hi;\n }\n }\n\n fn visit_stmt(&mut self, stmt: &'v ast::Stmt) {\n \/\/ If the stmt is actually an item, then we'll handle any missing spans\n \/\/ there. This is important because of annotations.\n \/\/ Although it might make more sense for the statement span to include\n \/\/ any annotations on the item.\n let skip_missing = match stmt.node {\n ast::Stmt_::StmtDecl(ref decl, _) => {\n match decl.node {\n ast::Decl_::DeclItem(_) => true,\n _ => false,\n }\n }\n _ => false,\n };\n if !skip_missing {\n self.format_missing_with_indent(stmt.span.lo);\n }\n visit::walk_stmt(self, stmt);\n }\n\n fn visit_block(&mut self, b: &'v ast::Block) {\n debug!(\"visit_block: {:?} {:?}\",\n self.codemap.lookup_char_pos(b.span.lo),\n self.codemap.lookup_char_pos(b.span.hi));\n self.format_missing(b.span.lo);\n\n self.changes.push_str_span(b.span, \"{\");\n self.last_pos = self.last_pos + BytePos(1);\n self.block_indent += self.config.tab_spaces;\n\n for stmt in &b.stmts {\n self.visit_stmt(&stmt)\n }\n match b.expr {\n Some(ref e) => {\n self.format_missing_with_indent(e.span.lo);\n self.visit_expr(e);\n }\n None => {}\n }\n\n self.block_indent -= self.config.tab_spaces;\n \/\/ TODO we should compress any newlines here to just one\n self.format_missing_with_indent(b.span.hi - BytePos(1));\n self.changes.push_str_span(b.span, \"}\");\n self.last_pos = b.span.hi;\n }\n\n \/\/ Note that this only gets called for function definitions. Required methods\n \/\/ on traits do not get handled here.\n fn visit_fn(&mut self,\n fk: visit::FnKind<'v>,\n fd: &'v ast::FnDecl,\n b: &'v ast::Block,\n s: Span,\n _: ast::NodeId) {\n self.format_missing_with_indent(s.lo);\n self.last_pos = s.lo;\n\n let indent = self.block_indent;\n match fk {\n visit::FkItemFn(ident,\n ref generics,\n ref unsafety,\n ref constness,\n ref abi,\n vis) => {\n let new_fn = self.rewrite_fn(indent,\n ident,\n fd,\n None,\n generics,\n unsafety,\n constness,\n abi,\n vis,\n codemap::mk_sp(s.lo, b.span.lo));\n self.changes.push_str_span(s, &new_fn);\n }\n visit::FkMethod(ident, ref sig, vis) => {\n let new_fn = self.rewrite_fn(indent,\n ident,\n fd,\n Some(&sig.explicit_self),\n &sig.generics,\n &sig.unsafety,\n &sig.constness,\n &sig.abi,\n vis.unwrap_or(ast::Visibility::Inherited),\n codemap::mk_sp(s.lo, b.span.lo));\n self.changes.push_str_span(s, &new_fn);\n }\n visit::FkFnBlock(..) => {}\n }\n\n self.last_pos = b.span.lo;\n self.visit_block(b)\n }\n\n fn visit_item(&mut self, item: &'v ast::Item) {\n \/\/ Don't look at attributes for modules.\n \/\/ We want to avoid looking at attributes in another file, which the AST\n \/\/ doesn't distinguish. FIXME This is overly conservative and means we miss\n \/\/ attributes on inline modules.\n match item.node {\n ast::Item_::ItemMod(_) => {}\n _ => {\n if self.visit_attrs(&item.attrs) {\n return;\n }\n }\n }\n\n match item.node {\n ast::Item_::ItemUse(ref vp) => {\n self.format_import(item.vis, vp, item.span);\n }\n ast::Item_::ItemImpl(..) |\n ast::Item_::ItemTrait(..) => {\n self.block_indent += self.config.tab_spaces;\n visit::walk_item(self, item);\n self.block_indent -= self.config.tab_spaces;\n }\n ast::Item_::ItemExternCrate(_) => {\n self.format_missing_with_indent(item.span.lo);\n let new_str = self.snippet(item.span);\n self.changes.push_str_span(item.span, &new_str);\n self.last_pos = item.span.hi;\n }\n ast::Item_::ItemStruct(ref def, ref generics) => {\n self.format_missing_with_indent(item.span.lo);\n self.visit_struct(item.ident,\n item.vis,\n def,\n generics,\n item.span);\n }\n ast::Item_::ItemEnum(ref def, ref generics) => {\n self.format_missing_with_indent(item.span.lo);\n self.visit_enum(item.ident,\n item.vis,\n def,\n generics,\n item.span);\n self.last_pos = item.span.hi;\n }\n ast::Item_::ItemMod(ref module) => {\n self.format_missing_with_indent(item.span.lo);\n self.format_mod(module, item.span, item.ident, &item.attrs);\n }\n _ => {\n visit::walk_item(self, item);\n }\n }\n }\n\n fn visit_trait_item(&mut self, ti: &'v ast::TraitItem) {\n if self.visit_attrs(&ti.attrs) {\n return;\n }\n\n if let ast::TraitItem_::MethodTraitItem(ref sig, None) = ti.node {\n self.format_missing_with_indent(ti.span.lo);\n\n let indent = self.block_indent;\n let new_fn = self.rewrite_required_fn(indent,\n ti.ident,\n sig,\n ti.span);\n\n self.changes.push_str_span(ti.span, &new_fn);\n self.last_pos = ti.span.hi;\n }\n \/\/ TODO format trait types\n\n visit::walk_trait_item(self, ti)\n }\n\n fn visit_impl_item(&mut self, ii: &'v ast::ImplItem) {\n if self.visit_attrs(&ii.attrs) {\n return;\n }\n visit::walk_impl_item(self, ii)\n }\n\n fn visit_mac(&mut self, mac: &'v ast::Mac) {\n visit::walk_mac(self, mac)\n }\n\n fn visit_mod(&mut self, m: &'v ast::Mod, s: Span, _: ast::NodeId) {\n \/\/ This is only called for the root module\n let filename = self.codemap.span_to_filename(s);\n self.format_separate_mod(m, &filename);\n }\n}\n\nimpl<'a> FmtVisitor<'a> {\n pub fn from_codemap<'b>(codemap: &'b CodeMap, config: &'b Config) -> FmtVisitor<'b> {\n FmtVisitor {\n codemap: codemap,\n changes: ChangeSet::from_codemap(codemap),\n last_pos: BytePos(0),\n block_indent: 0,\n config: config,\n }\n }\n\n pub fn snippet(&self, span: Span) -> String {\n match self.codemap.span_to_snippet(span) {\n Ok(s) => s,\n Err(_) => {\n println!(\"Couldn't make snippet for span {:?}->{:?}\",\n self.codemap.lookup_char_pos(span.lo),\n self.codemap.lookup_char_pos(span.hi));\n \"\".to_owned()\n }\n }\n }\n\n \/\/ Returns true if we should skip the following item.\n pub fn visit_attrs(&mut self, attrs: &[ast::Attribute]) -> bool {\n if attrs.len() == 0 {\n return false;\n }\n\n let first = &attrs[0];\n self.format_missing_with_indent(first.span.lo);\n\n if utils::contains_skip(attrs) {\n true\n } else {\n let rewrite = self.rewrite_attrs(attrs, self.block_indent);\n self.changes.push_str_span(first.span, &rewrite);\n let last = attrs.last().unwrap();\n self.last_pos = last.span.hi;\n false\n }\n }\n\n pub fn rewrite_attrs(&self, attrs: &[ast::Attribute], indent: usize) -> String {\n let mut result = String::new();\n let indent = utils::make_indent(indent);\n\n for (i, a) in attrs.iter().enumerate() {\n let a_str = self.snippet(a.span);\n\n if i > 0 {\n let comment = self.snippet(codemap::mk_sp(attrs[i-1].span.hi, a.span.lo));\n \/\/ This particular horror show is to preserve line breaks in between doc\n \/\/ comments. An alternative would be to force such line breaks to start\n \/\/ with the usual doc comment token.\n let multi_line = a_str.starts_with(\"\/\/\") && comment.matches('\\n').count() > 1;\n let comment = comment.trim();\n if comment.len() > 0 {\n result.push_str(&indent);\n result.push_str(comment);\n result.push('\\n');\n } else if multi_line {\n result.push('\\n');\n }\n result.push_str(&indent);\n }\n\n result.push_str(&a_str);\n\n if i < attrs.len() - 1 {\n result.push('\\n');\n }\n }\n\n result\n }\n\n fn format_mod(&mut self, m: &ast::Mod, s: Span, ident: ast::Ident, attrs: &[ast::Attribute]) {\n debug!(\"FmtVisitor::format_mod: ident: {:?}, span: {:?}\", ident, s);\n\n \/\/ Decide whether this is an inline mod or an external mod.\n let local_file_name = self.codemap.span_to_filename(s);\n let is_internal = local_file_name == self.codemap.span_to_filename(m.inner);\n\n \/\/ TODO Should rewrite properly `mod X;`\n\n if is_internal {\n debug!(\"FmtVisitor::format_mod: internal mod\");\n self.block_indent += self.config.tab_spaces;\n visit::walk_mod(self, m);\n debug!(\"... last_pos after: {:?}\", self.last_pos);\n self.block_indent -= self.config.tab_spaces;\n } else {\n debug!(\"FmtVisitor::format_mod: external mod\");\n let file_path = self.module_file(ident, attrs, local_file_name);\n let filename = file_path.to_str().unwrap();\n if self.changes.is_changed(filename) {\n \/\/ The file has already been reformatted, do nothing\n } else {\n self.format_separate_mod(m, filename);\n }\n }\n\n debug!(\"FmtVisitor::format_mod: exit\");\n }\n\n \/\/\/ Find the file corresponding to an external mod\n fn module_file(&self, id: ast::Ident, attrs: &[ast::Attribute], filename: String) -> PathBuf {\n let dir_path = {\n let mut path = PathBuf::from(&filename);\n path.pop();\n path\n };\n\n if let Some(path) = parser::Parser::submod_path_from_attr(attrs, &dir_path) {\n return path;\n }\n\n match parser::Parser::default_submod_path(id, &dir_path, &self.codemap).result {\n Ok(parser::ModulePathSuccess { path, .. }) => path,\n _ => panic!(\"Couldn't find module {}\", token::get_ident(id))\n }\n }\n\n \/\/\/ Format the content of a module into a separate file\n fn format_separate_mod(&mut self, m: &ast::Mod, filename: &str) {\n let last_pos = self.last_pos;\n let block_indent = self.block_indent;\n let filemap = self.codemap.get_filemap(filename);\n self.last_pos = filemap.start_pos;\n self.block_indent = 0;\n visit::walk_mod(self, m);\n self.format_missing(filemap.end_pos);\n self.last_pos = last_pos;\n self.block_indent = block_indent;\n }\n\n fn format_import(&mut self, vis: ast::Visibility, vp: &ast::ViewPath, span: Span) {\n let vis = utils::format_visibility(vis);\n let offset = self.block_indent + vis.len() + \"use \".len();\n let context = RewriteContext {\n codemap: self.codemap,\n config: self.config,\n block_indent: self.block_indent,\n };\n \/\/ 1 = \";\"\n match vp.rewrite(&context, self.config.max_width - offset - 1, offset) {\n Some(ref s) if s.len() == 0 => {\n \/\/ Format up to last newline\n let prev_span = codemap::mk_sp(self.last_pos, span.lo);\n let span_end = match self.snippet(prev_span).rfind('\\n') {\n Some(offset) => self.last_pos + BytePos(offset as u32),\n None => span.lo\n };\n self.format_missing(span_end);\n self.last_pos = span.hi;\n }\n Some(ref s) => {\n let s = format!(\"{}use {};\", vis, s);\n self.format_missing_with_indent(span.lo);\n self.changes.push_str_span(span, &s);\n self.last_pos = span.hi;\n }\n None => {\n self.format_missing_with_indent(span.lo);\n self.format_missing(span.hi);\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for the new write function<commit_after>extern crate geo;\nextern crate geo_types;\nextern crate gpx;\n\n#[cfg(test)]\nmod tests {\n use std::fs::File;\n use std::io::BufReader;\n\n use gpx::read;\n use gpx::write;\n use gpx::Gpx;\n use gpx::Link;\n use gpx::Waypoint;\n\n #[test]\n fn gpx_writer_write_unknown_gpx_version() {\n let gpx: Gpx = Default::default();\n let mut writer: Vec<u8> = Vec::new();\n \/\/ Should fail with unknown version.\n let result = write(&gpx, &mut writer);\n\n assert!(result.is_err());\n }\n\n #[test]\n fn gpx_writer_write_test_wikipedia() {\n check_write_for_example_file(\"tests\/fixtures\/wikipedia_example.gpx\");\n }\n\n #[test]\n fn gpx_writer_write_test_garmin_activity() {\n check_write_for_example_file(\"tests\/fixtures\/garmin-activity.gpx\");\n }\n\n #[test]\n fn gpx_writer_write_test_with_accuracy() {\n check_write_for_example_file(\"tests\/fixtures\/with_accuracy.gpx\");\n }\n\n fn check_write_for_example_file(filename: &str) {\n let reference_gpx = read_test_gpx_file(filename);\n let written_gpx = write_and_reread_gpx(&reference_gpx);\n\n check_metadata_equal(&reference_gpx, &written_gpx);\n check_points_equal(&reference_gpx, &written_gpx);\n }\n\n fn read_test_gpx_file(filename: &str) -> Gpx {\n let file = File::open(filename).unwrap();\n let reader = BufReader::new(file);\n\n let result = read(reader);\n assert!(result.is_ok());\n\n result.unwrap()\n }\n\n fn write_and_reread_gpx(reference_gpx: &Gpx) -> Gpx {\n let mut buffer: Vec<u8> = Vec::new();\n let result = write(&reference_gpx, &mut buffer);\n assert!(result.is_ok());\n\n let written_gpx = read(buffer.as_slice()).unwrap();\n written_gpx\n }\n\n fn check_metadata_equal(reference_gpx: &Gpx, written_gpx: &Gpx) {\n let reference = &reference_gpx.metadata;\n let written = &written_gpx.metadata;\n if reference.is_some() {\n assert!(written.is_some());\n } else {\n assert!(written.is_none());\n return;\n }\n let reference = reference.as_ref().unwrap();\n let written = written.as_ref().unwrap();\n assert_eq!(reference.name, written.name);\n assert_eq!(reference.time, written.time);\n check_links_equal(&reference.links, &written.links);\n }\n\n fn check_links_equal(reference: &Vec<Link>, written: &Vec<Link>) {\n assert_eq!(reference.len(), written.len());\n for (r, w) in reference.iter().zip(written) {\n assert_eq!(r.href, w.href);\n assert_eq!(r.text, w.text);\n }\n }\n\n fn check_points_equal(reference: &Gpx, written: &Gpx) {\n check_waypoints_equal(&reference.waypoints, &written.waypoints);\n assert_eq!(reference.tracks.len(), written.tracks.len());\n for (r_track, w_track) in reference.tracks.iter().zip(written.tracks.iter()) {\n assert_eq!(r_track.name, w_track.name);\n assert_eq!(r_track.segments.len(), w_track.segments.len());\n for (r_seg, w_seg) in r_track.segments.iter().zip(w_track.segments.iter()) {\n check_waypoints_equal(&r_seg.points, &w_seg.points);\n }\n }\n }\n\n fn check_waypoints_equal(reference: &Vec<Waypoint>, written: &Vec<Waypoint>) {\n assert_eq!(reference.len(), written.len());\n for (r_wp, w_wp) in reference.iter().zip(written) {\n assert_eq!(r_wp.point(), w_wp.point());\n assert_eq!(r_wp.elevation, w_wp.elevation);\n assert_eq!(r_wp.speed, w_wp.speed);\n assert_eq!(r_wp.time, w_wp.time);\n assert_eq!(r_wp.geoidheight, w_wp.geoidheight);\n assert_eq!(r_wp.name, w_wp.name);\n assert_eq!(r_wp.comment, w_wp.comment);\n assert_eq!(r_wp.description, w_wp.description);\n assert_eq!(r_wp.source, w_wp.source);\n check_links_equal(&r_wp.links, &w_wp.links);\n assert_eq!(r_wp.symbol, w_wp.symbol);\n assert_eq!(r_wp._type, w_wp._type);\n assert_eq!(r_wp.fix, w_wp.fix);\n assert_eq!(r_wp.sat, w_wp.sat);\n assert_eq!(r_wp.hdop, w_wp.hdop);\n assert_eq!(r_wp.vdop, w_wp.vdop);\n assert_eq!(r_wp.pdop, w_wp.pdop);\n assert_eq!(r_wp.age, w_wp.age);\n assert_eq!(r_wp.dgpsid, w_wp.dgpsid);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::num::{Zero, One, atan2};\nuse glfw;\nuse glfw::consts::*;\nuse nalgebra::vec::{Vec2, Vec3, AlgebraicVec, Cross};\nuse nalgebra::mat::{Mat4, Inv, ToHomogeneous};\nuse nalgebra::types::Iso3f64;\nuse camera::Camera;\nuse event;\n\n\/\/\/ Arc-ball camera mode. An arc-ball camera is a camera rotating around a fixed point (the focus\n\/\/\/ point) and always looking at it. The following inputs are handled:\n\/\/\/\n\/\/\/ * Left button press + drag - rotates the camera around the focus point\n\/\/\/ * Right button press + drag - translates the focus point on the plane orthogonal to the view\n\/\/\/ direction\n\/\/\/ * Scroll in\/out - zoom in\/out\n\/\/\/ * Enter key - set the focus point to the origin\n#[deriving(Clone, ToStr)]\npub struct ArcBall {\n \/\/\/ The focus point.\n priv at: Vec3<f64>,\n \/\/\/ Yaw of the camera (rotation along the y axis).\n priv yaw: f64,\n \/\/\/ Pitch of the camera (rotation along the x axis).\n priv pitch: f64,\n \/\/\/ Distance from the camera to the `at` focus point.\n priv dist: f64,\n\n \/\/\/ Increment of the yaw per unit mouse movement. The default value is 0.005.\n priv yaw_step: f64,\n \/\/\/ Increment of the pitch per unit mouse movement. The default value is 0.005.\n priv pitch_step: f64,\n \/\/\/ Increment of the distance per unit scrolling. The default value is 40.0.\n priv dist_step: f64,\n\n priv fov: f64,\n priv znear: f64,\n priv zfar: f64,\n priv projection: Mat4<f64>,\n priv proj_view: Mat4<f64>,\n priv inv_proj_view: Mat4<f64>,\n priv last_cursor_pos: Vec2<f64>\n}\n\nimpl ArcBall {\n \/\/\/ Create a new arc-ball camera.\n pub fn new(eye: Vec3<f64>, at: Vec3<f64>) -> ArcBall {\n ArcBall::new_with_frustrum(45.0f64.to_radians(), 0.1, 1024.0, eye, at)\n }\n\n \/\/\/ Creates a new arc ball camera with default sensitivity values.\n pub fn new_with_frustrum(fov: f64,\n znear: f64,\n zfar: f64,\n eye: Vec3<f64>,\n at: Vec3<f64>) -> ArcBall {\n let mut res = ArcBall {\n at: Vec3::new(0.0, 0.0, 0.0),\n yaw: 0.0,\n pitch: 0.0,\n dist: 0.0,\n yaw_step: 0.005,\n pitch_step: 0.005,\n dist_step: 40.0,\n fov: fov,\n znear: znear,\n zfar: zfar,\n projection: Mat4::new_perspective(800.0, 600.0, fov, znear, zfar),\n proj_view: Zero::zero(),\n inv_proj_view: Zero::zero(),\n last_cursor_pos: Zero::zero()\n };\n\n res.look_at_z(eye, at);\n\n res\n }\n\n \/\/\/ The arc-ball camera `yaw`.\n pub fn yaw(&self) -> f64 {\n self.yaw\n }\n\n \/\/\/ Sets the camera `yaw`. Change this to modify the rotation along the `up` axis.\n pub fn set_yaw(&mut self, yaw: f64) {\n self.yaw = yaw;\n\n self.update_restrictions();\n self.update_projviews();\n }\n\n \/\/\/ The arc-ball camera `pitch`.\n pub fn pitch(&self) -> f64 {\n self.pitch\n }\n\n \/\/\/ Sets the camera `pitch`.\n pub fn set_pitch(&mut self, pitch: f64) {\n self.pitch = pitch;\n\n self.update_restrictions();\n self.update_projviews();\n }\n\n \/\/\/ The distance from the camera position to its view point.\n pub fn dist(&self) -> f64 {\n self.dist\n }\n\n \/\/\/ Move the camera such that it is at a given distance from the view point.\n pub fn set_dist(&mut self, dist: f64) {\n self.dist = dist;\n\n self.update_restrictions();\n self.update_projviews();\n }\n\n \/\/\/ Move and orient the camera such that it looks at a specific point.\n pub fn look_at_z(&mut self, eye: Vec3<f64>, at: Vec3<f64>) {\n let dist = (eye - at).norm();\n let pitch = ((eye.y - at.y) \/ dist).acos();\n let yaw = atan2(eye.z - at.z, eye.x - at.x);\n\n self.at = at;\n self.dist = dist;\n self.yaw = yaw;\n self.pitch = pitch;\n self.update_projviews();\n }\n\n \/\/\/ Transformation applied by the camera without perspective.\n fn update_restrictions(&mut self) {\n if (self.dist < 0.00001) {\n self.dist = 0.00001\n }\n\n if (self.pitch <= 0.0001) {\n self.pitch = 0.0001\n }\n\n let _pi: f64 = Real::pi();\n if (self.pitch > _pi - 0.0001) {\n self.pitch = _pi - 0.0001\n }\n }\n\n fn handle_left_button_displacement(&mut self, dpos: &Vec2<f64>) {\n self.yaw = self.yaw + dpos.x * self.yaw_step;\n self.pitch = self.pitch - dpos.y * self.pitch_step;\n\n self.update_restrictions();\n self.update_projviews();\n }\n\n fn handle_right_button_displacement(&mut self, dpos: &Vec2<f64>) {\n let eye = self.eye();\n let dir = (self.at - eye).normalized();\n let tangent = Vec3::y().cross(&dir).normalized();\n let bitangent = dir.cross(&tangent);\n let mult = self.dist \/ 1000.0;\n\n self.at = self.at + tangent * (dpos.x * mult) + bitangent * (dpos.y * mult);\n self.update_projviews();\n }\n\n fn handle_scroll(&mut self, off: float) {\n self.dist = self.dist + self.dist_step * (off as f64) \/ 120.0;\n self.update_restrictions();\n self.update_projviews();\n }\n\n fn update_projviews(&mut self) {\n self.proj_view = self.projection * self.view_transform().inverse().unwrap().to_homogeneous();\n self.inv_proj_view = self.proj_view.inverse().unwrap();\n }\n}\n\nimpl Camera for ArcBall {\n fn clip_planes(&self) -> (f64, f64) {\n (self.znear, self.zfar)\n }\n\n fn view_transform(&self) -> Iso3f64 {\n let mut id: Iso3f64 = One::one();\n id.look_at_z(&self.eye(), &self.at, &Vec3::y());\n\n id\n }\n\n fn eye(&self) -> Vec3<f64> {\n let px = self.at.x + self.dist * self.yaw.cos() * self.pitch.sin();\n let py = self.at.y + self.dist * self.pitch.cos();\n let pz = self.at.z + self.dist * self.yaw.sin() * self.pitch.sin();\n\n Vec3::new(px, py, pz)\n }\n\n fn handle_mouse(&mut self, window: &glfw::Window, event: &event::MouseEvent) {\n match *event {\n event::CursorPos(x, y) => {\n let curr_pos = Vec2::new(x as f64, y as f64);\n\n if window.get_mouse_button(MOUSE_BUTTON_1) == PRESS {\n let dpos = curr_pos - self.last_cursor_pos;\n self.handle_left_button_displacement(&dpos)\n }\n\n if window.get_mouse_button(MOUSE_BUTTON_2) == PRESS {\n let dpos = curr_pos - self.last_cursor_pos;\n self.handle_right_button_displacement(&dpos)\n }\n\n self.last_cursor_pos = curr_pos;\n },\n event::Scroll(_, off) => self.handle_scroll(off),\n _ => { }\n }\n }\n\n fn handle_keyboard(&mut self, _: &glfw::Window, event: &event::KeyboardEvent) {\n match *event {\n event::KeyReleased(button) => if button == KEY_ENTER {\n self.at = Zero::zero();\n self.update_projviews();\n },\n _ => { }\n }\n }\n\n fn handle_framebuffer_size_change(&mut self, w: f64, h: f64) {\n self.projection = Mat4::new_perspective(w, h, self.fov, self.znear, self.zfar);\n self.update_projviews();\n }\n\n fn transformation(&self) -> Mat4<f64> {\n self.proj_view\n }\n\n fn inv_transformation(&self) -> Mat4<f64> {\n self.inv_proj_view\n }\n\n fn update(&mut self, _: &glfw::Window) { }\n}\n<commit_msg>Git access to the arc-ball look at point.<commit_after>use std::num::{Zero, One, atan2};\nuse glfw;\nuse glfw::consts::*;\nuse nalgebra::vec::{Vec2, Vec3, AlgebraicVec, Cross};\nuse nalgebra::mat::{Mat4, Inv, ToHomogeneous};\nuse nalgebra::types::Iso3f64;\nuse camera::Camera;\nuse event;\n\n\/\/\/ Arc-ball camera mode. An arc-ball camera is a camera rotating around a fixed point (the focus\n\/\/\/ point) and always looking at it. The following inputs are handled:\n\/\/\/\n\/\/\/ * Left button press + drag - rotates the camera around the focus point\n\/\/\/ * Right button press + drag - translates the focus point on the plane orthogonal to the view\n\/\/\/ direction\n\/\/\/ * Scroll in\/out - zoom in\/out\n\/\/\/ * Enter key - set the focus point to the origin\n#[deriving(Clone, ToStr)]\npub struct ArcBall {\n \/\/\/ The focus point.\n priv at: Vec3<f64>,\n \/\/\/ Yaw of the camera (rotation along the y axis).\n priv yaw: f64,\n \/\/\/ Pitch of the camera (rotation along the x axis).\n priv pitch: f64,\n \/\/\/ Distance from the camera to the `at` focus point.\n priv dist: f64,\n\n \/\/\/ Increment of the yaw per unit mouse movement. The default value is 0.005.\n priv yaw_step: f64,\n \/\/\/ Increment of the pitch per unit mouse movement. The default value is 0.005.\n priv pitch_step: f64,\n \/\/\/ Increment of the distance per unit scrolling. The default value is 40.0.\n priv dist_step: f64,\n\n priv fov: f64,\n priv znear: f64,\n priv zfar: f64,\n priv projection: Mat4<f64>,\n priv proj_view: Mat4<f64>,\n priv inv_proj_view: Mat4<f64>,\n priv last_cursor_pos: Vec2<f64>\n}\n\nimpl ArcBall {\n \/\/\/ Create a new arc-ball camera.\n pub fn new(eye: Vec3<f64>, at: Vec3<f64>) -> ArcBall {\n ArcBall::new_with_frustrum(45.0f64.to_radians(), 0.1, 1024.0, eye, at)\n }\n\n \/\/\/ Creates a new arc ball camera with default sensitivity values.\n pub fn new_with_frustrum(fov: f64,\n znear: f64,\n zfar: f64,\n eye: Vec3<f64>,\n at: Vec3<f64>) -> ArcBall {\n let mut res = ArcBall {\n at: Vec3::new(0.0, 0.0, 0.0),\n yaw: 0.0,\n pitch: 0.0,\n dist: 0.0,\n yaw_step: 0.005,\n pitch_step: 0.005,\n dist_step: 40.0,\n fov: fov,\n znear: znear,\n zfar: zfar,\n projection: Mat4::new_perspective(800.0, 600.0, fov, znear, zfar),\n proj_view: Zero::zero(),\n inv_proj_view: Zero::zero(),\n last_cursor_pos: Zero::zero()\n };\n\n res.look_at_z(eye, at);\n\n res\n }\n\n \/\/\/ The point the arc-ball is looking at.\n pub fn at(&self) -> Vec3<f64> {\n self.at\n }\n\n \/\/\/ The arc-ball camera `yaw`.\n pub fn yaw(&self) -> f64 {\n self.yaw\n }\n\n \/\/\/ Sets the camera `yaw`. Change this to modify the rotation along the `up` axis.\n pub fn set_yaw(&mut self, yaw: f64) {\n self.yaw = yaw;\n\n self.update_restrictions();\n self.update_projviews();\n }\n\n \/\/\/ The arc-ball camera `pitch`.\n pub fn pitch(&self) -> f64 {\n self.pitch\n }\n\n \/\/\/ Sets the camera `pitch`.\n pub fn set_pitch(&mut self, pitch: f64) {\n self.pitch = pitch;\n\n self.update_restrictions();\n self.update_projviews();\n }\n\n \/\/\/ The distance from the camera position to its view point.\n pub fn dist(&self) -> f64 {\n self.dist\n }\n\n \/\/\/ Move the camera such that it is at a given distance from the view point.\n pub fn set_dist(&mut self, dist: f64) {\n self.dist = dist;\n\n self.update_restrictions();\n self.update_projviews();\n }\n\n \/\/\/ Move and orient the camera such that it looks at a specific point.\n pub fn look_at_z(&mut self, eye: Vec3<f64>, at: Vec3<f64>) {\n let dist = (eye - at).norm();\n let pitch = ((eye.y - at.y) \/ dist).acos();\n let yaw = atan2(eye.z - at.z, eye.x - at.x);\n\n self.at = at;\n self.dist = dist;\n self.yaw = yaw;\n self.pitch = pitch;\n self.update_projviews();\n }\n\n \/\/\/ Transformation applied by the camera without perspective.\n fn update_restrictions(&mut self) {\n if (self.dist < 0.00001) {\n self.dist = 0.00001\n }\n\n if (self.pitch <= 0.0001) {\n self.pitch = 0.0001\n }\n\n let _pi: f64 = Real::pi();\n if (self.pitch > _pi - 0.0001) {\n self.pitch = _pi - 0.0001\n }\n }\n\n fn handle_left_button_displacement(&mut self, dpos: &Vec2<f64>) {\n self.yaw = self.yaw + dpos.x * self.yaw_step;\n self.pitch = self.pitch - dpos.y * self.pitch_step;\n\n self.update_restrictions();\n self.update_projviews();\n }\n\n fn handle_right_button_displacement(&mut self, dpos: &Vec2<f64>) {\n let eye = self.eye();\n let dir = (self.at - eye).normalized();\n let tangent = Vec3::y().cross(&dir).normalized();\n let bitangent = dir.cross(&tangent);\n let mult = self.dist \/ 1000.0;\n\n self.at = self.at + tangent * (dpos.x * mult) + bitangent * (dpos.y * mult);\n self.update_projviews();\n }\n\n fn handle_scroll(&mut self, off: float) {\n self.dist = self.dist + self.dist_step * (off as f64) \/ 120.0;\n self.update_restrictions();\n self.update_projviews();\n }\n\n fn update_projviews(&mut self) {\n self.proj_view = self.projection * self.view_transform().inverse().unwrap().to_homogeneous();\n self.inv_proj_view = self.proj_view.inverse().unwrap();\n }\n}\n\nimpl Camera for ArcBall {\n fn clip_planes(&self) -> (f64, f64) {\n (self.znear, self.zfar)\n }\n\n fn view_transform(&self) -> Iso3f64 {\n let mut id: Iso3f64 = One::one();\n id.look_at_z(&self.eye(), &self.at, &Vec3::y());\n\n id\n }\n\n fn eye(&self) -> Vec3<f64> {\n let px = self.at.x + self.dist * self.yaw.cos() * self.pitch.sin();\n let py = self.at.y + self.dist * self.pitch.cos();\n let pz = self.at.z + self.dist * self.yaw.sin() * self.pitch.sin();\n\n Vec3::new(px, py, pz)\n }\n\n fn handle_mouse(&mut self, window: &glfw::Window, event: &event::MouseEvent) {\n match *event {\n event::CursorPos(x, y) => {\n let curr_pos = Vec2::new(x as f64, y as f64);\n\n if window.get_mouse_button(MOUSE_BUTTON_1) == PRESS {\n let dpos = curr_pos - self.last_cursor_pos;\n self.handle_left_button_displacement(&dpos)\n }\n\n if window.get_mouse_button(MOUSE_BUTTON_2) == PRESS {\n let dpos = curr_pos - self.last_cursor_pos;\n self.handle_right_button_displacement(&dpos)\n }\n\n self.last_cursor_pos = curr_pos;\n },\n event::Scroll(_, off) => self.handle_scroll(off),\n _ => { }\n }\n }\n\n fn handle_keyboard(&mut self, _: &glfw::Window, event: &event::KeyboardEvent) {\n match *event {\n event::KeyReleased(button) => if button == KEY_ENTER {\n self.at = Zero::zero();\n self.update_projviews();\n },\n _ => { }\n }\n }\n\n fn handle_framebuffer_size_change(&mut self, w: f64, h: f64) {\n self.projection = Mat4::new_perspective(w, h, self.fov, self.znear, self.zfar);\n self.update_projviews();\n }\n\n fn transformation(&self) -> Mat4<f64> {\n self.proj_view\n }\n\n fn inv_transformation(&self) -> Mat4<f64> {\n self.inv_proj_view\n }\n\n fn update(&mut self, _: &glfw::Window) { }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Missing file.<commit_after>\nextern crate rust_bio_tools;\nextern crate itertools;\n#[macro_use]\nextern crate log;\nextern crate fern;\n#[macro_use]\nextern crate clap;\n\nuse clap::{App,AppSettings};\nuse itertools::Itertools;\n\nuse rust_bio_tools::bam;\nuse rust_bio_tools::fastq;\n\nfn main() {\n let yaml = load_yaml!(\"..\/cli.yaml\");\n let matches = App::from_yaml(yaml)\n .version(env!(\"CARGO_PKG_VERSION\"))\n .global_settings(&[AppSettings::SubcommandRequired,\n AppSettings::ColoredHelp])\n .get_matches();\n\n fern::init_global_logger(fern::DispatchConfig {\n format: Box::new(|msg, _, _| msg.to_owned()),\n output: vec![fern::OutputConfig::stderr()],\n level: log::LogLevelFilter::Info\n }, log::LogLevelFilter::Trace).unwrap();\n\n if let Some(matches) = matches.subcommand_matches(\"fastq-split\") {\n fastq::split::split(&matches.values_of(\"chunks\").unwrap().collect_vec())\n }\n else if let Some(matches) = matches.subcommand_matches(\"bam-depth\") {\n bam::depth::depth(&matches.value_of(\"bam-path\").unwrap(), value_t!(matches, \"max-read-length\", u32).unwrap_or(1000))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Generated and Transient Unit States<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>chase and\/or attack choice is ready<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Euclids gcd in Rust<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add ec2 module<commit_after>\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Utilities for tracing JS-managed values.\n\/\/!\n\/\/! The lifetime of DOM objects is managed by the SpiderMonkey Garbage\n\/\/! Collector. A rooted DOM object implementing the interface `Foo` is traced\n\/\/! as follows:\n\/\/!\n\/\/! 1. The GC calls `_trace` defined in `FooBinding` during the marking\n\/\/! phase. (This happens through `JSClass.trace` for non-proxy bindings, and\n\/\/! through `ProxyTraps.trace` otherwise.)\n\/\/! 2. `_trace` calls `Foo::trace()` (an implementation of `JSTraceable`).\n\/\/! This is typically derived via a #[jstraceable] annotation\n\/\/! 3. For all fields, `Foo::trace()`\n\/\/! calls `trace()` on the field.\n\/\/! For example, for fields of type `JS<T>`, `JS<T>::trace()` calls\n\/\/! `trace_reflector()`.\n\/\/! 4. `trace_reflector()` calls `trace_object()` with the `JSObject` for the\n\/\/! reflector.\n\/\/! 5. `trace_object()` calls `JS_CallTracer()` to notify the GC, which will\n\/\/! add the object to the graph, and will trace that object as well.\n\/\/!\n\/\/! The no_jsmanaged_fields!() macro adds an empty implementation of JSTraceable to\n\/\/! a datatype.\n\nuse dom::bindings::js::JS;\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse js::jsapi::{JSObject, JSTracer, JS_CallTracer, JSTRACE_OBJECT};\nuse js::jsval::JSVal;\n\nuse libc;\nuse std::rc::Rc;\nuse std::cell::{Cell, RefCell};\n\nuse url::Url;\nuse msg::constellation_msg::{PipelineId, SubpageId, WindowSizeData};\nuse net::image_cache_task::ImageCacheTask;\nuse script_traits::ScriptControlChan;\nuse std::collections::hashmap::HashMap;\nuse collections::hash::{Hash, Hasher};\nuse style::PropertyDeclarationBlock;\nuse std::comm::{Receiver, Sender};\nuse string_cache::{Atom, Namespace};\nuse js::rust::Cx;\nuse http::headers::response::HeaderCollection as ResponseHeaderCollection;\nuse http::headers::request::HeaderCollection as RequestHeaderCollection;\nuse http::method::Method;\nuse std::io::timer::Timer;\nuse script_traits::UntrustedNodeAddress;\nuse servo_msg::compositor_msg::ScriptListener;\nuse servo_msg::constellation_msg::ConstellationChan;\nuse servo_util::smallvec::{SmallVec1, SmallVec};\nuse servo_util::str::LengthOrPercentageOrAuto;\nuse layout_interface::{LayoutRPC, LayoutChan};\nuse dom::node::{Node, TrustedNodeAddress};\nuse dom::bindings::utils::WindowProxyHandler;\nuse html5ever::tree_builder::QuirksMode;\n\nimpl<T: Reflectable> JSTraceable for JS<T> {\n fn trace(&self, trc: *mut JSTracer) {\n trace_reflector(trc, \"\", self.reflector());\n }\n}\n\nno_jsmanaged_fields!(Reflector)\n\n\/\/\/ A trait to allow tracing (only) DOM objects.\npub trait JSTraceable {\n fn trace(&self, trc: *mut JSTracer);\n}\n\n\/\/\/ Trace a `JSVal`.\npub fn trace_jsval(tracer: *mut JSTracer, description: &str, val: JSVal) {\n if !val.is_markable() {\n return;\n }\n\n unsafe {\n let name = description.to_c_str();\n (*tracer).debugPrinter = None;\n (*tracer).debugPrintIndex = -1;\n (*tracer).debugPrintArg = name.as_ptr() as *const libc::c_void;\n debug!(\"tracing value {:s}\", description);\n JS_CallTracer(tracer, val.to_gcthing(), val.trace_kind());\n }\n}\n\n\/\/\/ Trace the `JSObject` held by `reflector`.\n#[allow(unrooted_must_root)]\npub fn trace_reflector(tracer: *mut JSTracer, description: &str, reflector: &Reflector) {\n trace_object(tracer, description, reflector.get_jsobject())\n}\n\n\/\/\/ Trace a `JSObject`.\npub fn trace_object(tracer: *mut JSTracer, description: &str, obj: *mut JSObject) {\n unsafe {\n let name = description.to_c_str();\n (*tracer).debugPrinter = None;\n (*tracer).debugPrintIndex = -1;\n (*tracer).debugPrintArg = name.as_ptr() as *const libc::c_void;\n debug!(\"tracing {:s}\", description);\n JS_CallTracer(tracer, obj as *mut libc::c_void, JSTRACE_OBJECT);\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for RefCell<T> {\n fn trace(&self, trc: *mut JSTracer) {\n self.borrow().trace(trc)\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for Rc<T> {\n fn trace(&self, trc: *mut JSTracer) {\n (**self).trace(trc)\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for Box<T> {\n fn trace(&self, trc: *mut JSTracer) {\n (**self).trace(trc)\n }\n}\n\nimpl<T: JSTraceable+Copy> JSTraceable for Cell<T> {\n fn trace(&self, trc: *mut JSTracer) {\n self.get().trace(trc)\n }\n}\n\nimpl JSTraceable for *mut JSObject {\n fn trace(&self, trc: *mut JSTracer) {\n trace_object(trc, \"object\", *self);\n }\n}\n\nimpl JSTraceable for JSVal {\n fn trace(&self, trc: *mut JSTracer) {\n trace_jsval(trc, \"val\", *self);\n }\n}\n\n\/\/ XXXManishearth Check if the following three are optimized to no-ops\n\/\/ if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type)\nimpl<T: JSTraceable> JSTraceable for Vec<T> {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n for e in self.iter() {\n e.trace(trc);\n }\n }\n}\n\n\/\/ XXXManishearth Check if the following three are optimized to no-ops\n\/\/ if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type)\nimpl<T: JSTraceable + 'static> JSTraceable for SmallVec1<T> {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n for e in self.iter() {\n e.trace(trc);\n }\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for Option<T> {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n self.as_ref().map(|e| e.trace(trc));\n }\n}\n\nimpl<K,V,S,H> JSTraceable for HashMap<K, V, H> where K: Eq + Hash<S> + JSTraceable,\n V: JSTraceable,\n H: Hasher<S> {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n for e in self.iter() {\n e.val0().trace(trc);\n e.val1().trace(trc);\n }\n }\n}\n\nimpl<A: JSTraceable, B: JSTraceable> JSTraceable for (A, B) {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n let (ref a, ref b) = *self;\n a.trace(trc);\n b.trace(trc);\n }\n}\n\n\nno_jsmanaged_fields!(bool, f32, f64, String, Url)\nno_jsmanaged_fields!(uint, u8, u16, u32, u64)\nno_jsmanaged_fields!(int, i8, i16, i32, i64)\nno_jsmanaged_fields!(Sender<T>)\nno_jsmanaged_fields!(Receiver<T>)\nno_jsmanaged_fields!(ImageCacheTask, ScriptControlChan)\nno_jsmanaged_fields!(Atom, Namespace, Timer)\nno_jsmanaged_fields!(PropertyDeclarationBlock)\n\/\/ These three are interdependent, if you plan to put jsmanaged data\n\/\/ in one of these make sure it is propagated properly to containing structs\nno_jsmanaged_fields!(SubpageId, WindowSizeData, PipelineId)\nno_jsmanaged_fields!(QuirksMode)\nno_jsmanaged_fields!(Cx)\nno_jsmanaged_fields!(ResponseHeaderCollection, RequestHeaderCollection, Method)\nno_jsmanaged_fields!(ConstellationChan)\nno_jsmanaged_fields!(LayoutChan)\nno_jsmanaged_fields!(WindowProxyHandler)\nno_jsmanaged_fields!(UntrustedNodeAddress)\nno_jsmanaged_fields!(LengthOrPercentageOrAuto)\n\nimpl<'a> JSTraceable for &'a str {\n #[inline]\n fn trace(&self, _: *mut JSTracer) {\n \/\/ Do nothing\n }\n}\n\nimpl<A,B> JSTraceable for fn(A) -> B {\n #[inline]\n fn trace(&self, _: *mut JSTracer) {\n \/\/ Do nothing\n }\n}\n\nimpl JSTraceable for Box<ScriptListener+'static> {\n #[inline]\n fn trace(&self, _: *mut JSTracer) {\n \/\/ Do nothing\n }\n}\n\nimpl JSTraceable for Box<LayoutRPC+'static> {\n #[inline]\n fn trace(&self, _: *mut JSTracer) {\n \/\/ Do nothing\n }\n}\n\nimpl JSTraceable for TrustedNodeAddress {\n fn trace(&self, s: *mut JSTracer) {\n let TrustedNodeAddress(addr) = *self;\n let node = addr as *const Node;\n unsafe {\n JS::from_raw(node).trace(s)\n }\n }\n}\n<commit_msg>Document bindings\/trace.rs.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n#![deny(missing_doc)]\n\n\/\/! Utilities for tracing JS-managed values.\n\/\/!\n\/\/! The lifetime of DOM objects is managed by the SpiderMonkey Garbage\n\/\/! Collector. A rooted DOM object implementing the interface `Foo` is traced\n\/\/! as follows:\n\/\/!\n\/\/! 1. The GC calls `_trace` defined in `FooBinding` during the marking\n\/\/! phase. (This happens through `JSClass.trace` for non-proxy bindings, and\n\/\/! through `ProxyTraps.trace` otherwise.)\n\/\/! 2. `_trace` calls `Foo::trace()` (an implementation of `JSTraceable`).\n\/\/! This is typically derived via a #[jstraceable] annotation\n\/\/! 3. For all fields, `Foo::trace()`\n\/\/! calls `trace()` on the field.\n\/\/! For example, for fields of type `JS<T>`, `JS<T>::trace()` calls\n\/\/! `trace_reflector()`.\n\/\/! 4. `trace_reflector()` calls `trace_object()` with the `JSObject` for the\n\/\/! reflector.\n\/\/! 5. `trace_object()` calls `JS_CallTracer()` to notify the GC, which will\n\/\/! add the object to the graph, and will trace that object as well.\n\/\/!\n\/\/! The no_jsmanaged_fields!() macro adds an empty implementation of JSTraceable to\n\/\/! a datatype.\n\nuse dom::bindings::js::JS;\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse js::jsapi::{JSObject, JSTracer, JS_CallTracer, JSTRACE_OBJECT};\nuse js::jsval::JSVal;\n\nuse libc;\nuse std::rc::Rc;\nuse std::cell::{Cell, RefCell};\n\nuse url::Url;\nuse msg::constellation_msg::{PipelineId, SubpageId, WindowSizeData};\nuse net::image_cache_task::ImageCacheTask;\nuse script_traits::ScriptControlChan;\nuse std::collections::hashmap::HashMap;\nuse collections::hash::{Hash, Hasher};\nuse style::PropertyDeclarationBlock;\nuse std::comm::{Receiver, Sender};\nuse string_cache::{Atom, Namespace};\nuse js::rust::Cx;\nuse http::headers::response::HeaderCollection as ResponseHeaderCollection;\nuse http::headers::request::HeaderCollection as RequestHeaderCollection;\nuse http::method::Method;\nuse std::io::timer::Timer;\nuse script_traits::UntrustedNodeAddress;\nuse servo_msg::compositor_msg::ScriptListener;\nuse servo_msg::constellation_msg::ConstellationChan;\nuse servo_util::smallvec::{SmallVec1, SmallVec};\nuse servo_util::str::LengthOrPercentageOrAuto;\nuse layout_interface::{LayoutRPC, LayoutChan};\nuse dom::node::{Node, TrustedNodeAddress};\nuse dom::bindings::utils::WindowProxyHandler;\nuse html5ever::tree_builder::QuirksMode;\n\n\/\/\/ A trait to allow tracing (only) DOM objects.\npub trait JSTraceable {\n \/\/\/ Trace `self`.\n fn trace(&self, trc: *mut JSTracer);\n}\n\nimpl<T: Reflectable> JSTraceable for JS<T> {\n fn trace(&self, trc: *mut JSTracer) {\n trace_reflector(trc, \"\", self.reflector());\n }\n}\n\nno_jsmanaged_fields!(Reflector)\n\n\/\/\/ Trace a `JSVal`.\npub fn trace_jsval(tracer: *mut JSTracer, description: &str, val: JSVal) {\n if !val.is_markable() {\n return;\n }\n\n unsafe {\n let name = description.to_c_str();\n (*tracer).debugPrinter = None;\n (*tracer).debugPrintIndex = -1;\n (*tracer).debugPrintArg = name.as_ptr() as *const libc::c_void;\n debug!(\"tracing value {:s}\", description);\n JS_CallTracer(tracer, val.to_gcthing(), val.trace_kind());\n }\n}\n\n\/\/\/ Trace the `JSObject` held by `reflector`.\n#[allow(unrooted_must_root)]\npub fn trace_reflector(tracer: *mut JSTracer, description: &str, reflector: &Reflector) {\n trace_object(tracer, description, reflector.get_jsobject())\n}\n\n\/\/\/ Trace a `JSObject`.\npub fn trace_object(tracer: *mut JSTracer, description: &str, obj: *mut JSObject) {\n unsafe {\n let name = description.to_c_str();\n (*tracer).debugPrinter = None;\n (*tracer).debugPrintIndex = -1;\n (*tracer).debugPrintArg = name.as_ptr() as *const libc::c_void;\n debug!(\"tracing {:s}\", description);\n JS_CallTracer(tracer, obj as *mut libc::c_void, JSTRACE_OBJECT);\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for RefCell<T> {\n fn trace(&self, trc: *mut JSTracer) {\n self.borrow().trace(trc)\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for Rc<T> {\n fn trace(&self, trc: *mut JSTracer) {\n (**self).trace(trc)\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for Box<T> {\n fn trace(&self, trc: *mut JSTracer) {\n (**self).trace(trc)\n }\n}\n\nimpl<T: JSTraceable+Copy> JSTraceable for Cell<T> {\n fn trace(&self, trc: *mut JSTracer) {\n self.get().trace(trc)\n }\n}\n\nimpl JSTraceable for *mut JSObject {\n fn trace(&self, trc: *mut JSTracer) {\n trace_object(trc, \"object\", *self);\n }\n}\n\nimpl JSTraceable for JSVal {\n fn trace(&self, trc: *mut JSTracer) {\n trace_jsval(trc, \"val\", *self);\n }\n}\n\n\/\/ XXXManishearth Check if the following three are optimized to no-ops\n\/\/ if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type)\nimpl<T: JSTraceable> JSTraceable for Vec<T> {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n for e in self.iter() {\n e.trace(trc);\n }\n }\n}\n\n\/\/ XXXManishearth Check if the following three are optimized to no-ops\n\/\/ if e.trace() is a no-op (e.g it is an no_jsmanaged_fields type)\nimpl<T: JSTraceable + 'static> JSTraceable for SmallVec1<T> {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n for e in self.iter() {\n e.trace(trc);\n }\n }\n}\n\nimpl<T: JSTraceable> JSTraceable for Option<T> {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n self.as_ref().map(|e| e.trace(trc));\n }\n}\n\nimpl<K,V,S,H> JSTraceable for HashMap<K, V, H> where K: Eq + Hash<S> + JSTraceable,\n V: JSTraceable,\n H: Hasher<S> {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n for e in self.iter() {\n e.val0().trace(trc);\n e.val1().trace(trc);\n }\n }\n}\n\nimpl<A: JSTraceable, B: JSTraceable> JSTraceable for (A, B) {\n #[inline]\n fn trace(&self, trc: *mut JSTracer) {\n let (ref a, ref b) = *self;\n a.trace(trc);\n b.trace(trc);\n }\n}\n\n\nno_jsmanaged_fields!(bool, f32, f64, String, Url)\nno_jsmanaged_fields!(uint, u8, u16, u32, u64)\nno_jsmanaged_fields!(int, i8, i16, i32, i64)\nno_jsmanaged_fields!(Sender<T>)\nno_jsmanaged_fields!(Receiver<T>)\nno_jsmanaged_fields!(ImageCacheTask, ScriptControlChan)\nno_jsmanaged_fields!(Atom, Namespace, Timer)\nno_jsmanaged_fields!(PropertyDeclarationBlock)\n\/\/ These three are interdependent, if you plan to put jsmanaged data\n\/\/ in one of these make sure it is propagated properly to containing structs\nno_jsmanaged_fields!(SubpageId, WindowSizeData, PipelineId)\nno_jsmanaged_fields!(QuirksMode)\nno_jsmanaged_fields!(Cx)\nno_jsmanaged_fields!(ResponseHeaderCollection, RequestHeaderCollection, Method)\nno_jsmanaged_fields!(ConstellationChan)\nno_jsmanaged_fields!(LayoutChan)\nno_jsmanaged_fields!(WindowProxyHandler)\nno_jsmanaged_fields!(UntrustedNodeAddress)\nno_jsmanaged_fields!(LengthOrPercentageOrAuto)\n\nimpl<'a> JSTraceable for &'a str {\n #[inline]\n fn trace(&self, _: *mut JSTracer) {\n \/\/ Do nothing\n }\n}\n\nimpl<A,B> JSTraceable for fn(A) -> B {\n #[inline]\n fn trace(&self, _: *mut JSTracer) {\n \/\/ Do nothing\n }\n}\n\nimpl JSTraceable for Box<ScriptListener+'static> {\n #[inline]\n fn trace(&self, _: *mut JSTracer) {\n \/\/ Do nothing\n }\n}\n\nimpl JSTraceable for Box<LayoutRPC+'static> {\n #[inline]\n fn trace(&self, _: *mut JSTracer) {\n \/\/ Do nothing\n }\n}\n\nimpl JSTraceable for TrustedNodeAddress {\n fn trace(&self, s: *mut JSTracer) {\n let TrustedNodeAddress(addr) = *self;\n let node = addr as *const Node;\n unsafe {\n JS::from_raw(node).trace(s)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make slice::split_at_mut example demonstrate mutability<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse Indent;\nuse utils;\nuse syntax::codemap::{self, BytePos, Span};\nuse codemap::SpanUtils;\nuse lists::{write_list, itemize_list, ListItem, ListFormatting, SeparatorTactic, definitive_tactic};\nuse types::rewrite_path;\nuse rewrite::{Rewrite, RewriteContext};\nuse visitor::FmtVisitor;\nuse std::cmp::Ordering;\n\nuse syntax::{ast, ptr};\n\nfn path_of(a: &ast::ViewPath_) -> &ast::Path {\n match a {\n &ast::ViewPath_::ViewPathSimple(_, ref p) => p,\n &ast::ViewPath_::ViewPathGlob(ref p) => p,\n &ast::ViewPath_::ViewPathList(ref p, _) => p,\n }\n}\n\nfn compare_path_segments(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {\n a.identifier.name.as_str().cmp(&b.identifier.name.as_str())\n}\n\nfn compare_paths(a: &ast::Path, b: &ast::Path) -> Ordering {\n for segment in a.segments.iter().zip(b.segments.iter()) {\n let ord = compare_path_segments(segment.0, segment.1);\n if ord != Ordering::Equal {\n return ord;\n }\n }\n a.segments.len().cmp(&b.segments.len())\n}\n\nfn compare_path_list_items(a: &ast::PathListItem, b: &ast::PathListItem) -> Ordering {\n let name_ordering = match a.node.name() {\n Some(a_name) => {\n match b.node.name() {\n Some(b_name) => a_name.name.as_str().cmp(&b_name.name.as_str()),\n None => Ordering::Greater,\n }\n }\n None => {\n match b.node.name() {\n Some(_) => Ordering::Less,\n None => Ordering::Equal,\n }\n }\n };\n if name_ordering == Ordering::Equal {\n match a.node.rename() {\n Some(a_rename) => {\n match b.node.rename() {\n Some(b_rename) => a_rename.name.as_str().cmp(&b_rename.name.as_str()),\n None => Ordering::Greater,\n }\n }\n None => {\n match b.node.name() {\n Some(_) => Ordering::Less,\n None => Ordering::Equal,\n }\n }\n }\n } else {\n name_ordering\n }\n}\n\nfn compare_path_list_item_lists(a_items: &Vec<ast::PathListItem>,\n b_items: &Vec<ast::PathListItem>)\n -> Ordering {\n let mut a = a_items.clone();\n let mut b = b_items.clone();\n a.sort_by(|a, b| compare_path_list_items(a, b));\n b.sort_by(|a, b| compare_path_list_items(a, b));\n for comparison_pair in a.iter().zip(b.iter()) {\n let ord = compare_path_list_items(comparison_pair.0, comparison_pair.1);\n if ord != Ordering::Equal {\n return ord;\n }\n }\n a.len().cmp(&b.len())\n}\n\nfn compare_view_path_types(a: &ast::ViewPath_, b: &ast::ViewPath_) -> Ordering {\n use syntax::ast::ViewPath_::*;\n match (a, b) {\n (&ViewPathSimple(..), &ViewPathSimple(..)) => Ordering::Equal,\n (&ViewPathSimple(..), _) => Ordering::Less,\n (&ViewPathGlob(_), &ViewPathSimple(..)) => Ordering::Greater,\n (&ViewPathGlob(_), &ViewPathGlob(_)) => Ordering::Equal,\n (&ViewPathGlob(_), &ViewPathList(..)) => Ordering::Less,\n (&ViewPathList(_, ref a_items), &ViewPathList(_, ref b_items)) => {\n compare_path_list_item_lists(a_items, b_items)\n }\n (&ViewPathList(..), _) => Ordering::Greater,\n }\n}\n\nfn compare_view_paths(a: &ast::ViewPath_, b: &ast::ViewPath_) -> Ordering {\n match compare_paths(path_of(a), path_of(b)) {\n Ordering::Equal => compare_view_path_types(a, b),\n cmp => cmp,\n }\n}\n\nfn compare_use_items(a: &ast::Item, b: &ast::Item) -> Option<Ordering> {\n match (&a.node, &b.node) {\n (&ast::ItemKind::Use(ref a_vp), &ast::ItemKind::Use(ref b_vp)) => {\n Some(compare_view_paths(&a_vp.node, &b_vp.node))\n }\n _ => None,\n }\n}\n\n\/\/ TODO (some day) remove unused imports, expand globs, compress many single\n\/\/ imports into a list import.\n\nimpl Rewrite for ast::ViewPath {\n \/\/ Returns an empty string when the ViewPath is empty (like foo::bar::{})\n fn rewrite(&self, context: &RewriteContext, width: usize, offset: Indent) -> Option<String> {\n match self.node {\n ast::ViewPath_::ViewPathList(_, ref path_list) if path_list.is_empty() => {\n Some(String::new())\n }\n ast::ViewPath_::ViewPathList(ref path, ref path_list) => {\n rewrite_use_list(width, offset, path, path_list, self.span, context)\n }\n ast::ViewPath_::ViewPathGlob(_) => {\n \/\/ FIXME convert to list?\n None\n }\n ast::ViewPath_::ViewPathSimple(ident, ref path) => {\n let ident_str = ident.to_string();\n \/\/ 4 = \" as \".len()\n let budget = try_opt!(width.checked_sub(ident_str.len() + 4));\n let path_str = try_opt!(rewrite_path(context, false, None, path, budget, offset));\n\n Some(if path.segments.last().unwrap().identifier == ident {\n path_str\n } else {\n format!(\"{} as {}\", path_str, ident_str)\n })\n }\n }\n }\n}\n\nimpl<'a> FmtVisitor<'a> {\n pub fn format_imports(&mut self, use_items: &[ptr::P<ast::Item>]) {\n let mut last_pos =\n use_items.first().map(|p_i| p_i.span.lo - BytePos(1)).unwrap_or(self.last_pos);\n let prefix = codemap::mk_sp(self.last_pos, last_pos);\n let mut ordered_use_items = use_items.iter()\n .map(|p_i| {\n let new_item = (&*p_i, last_pos);\n last_pos = p_i.span.hi;\n new_item\n })\n .collect::<Vec<_>>();\n \/\/ Order the imports by view-path & other import path properties\n ordered_use_items.sort_by(|a, b| compare_use_items(a.0, b.0).unwrap());\n \/\/ First, output the span before the first import\n self.format_missing(prefix.hi);\n for ordered in ordered_use_items {\n \/\/ Fake out the formatter by setting `self.last_pos` to the appropriate location before\n \/\/ each item before visiting it.\n self.last_pos = ordered.1;\n self.visit_item(&ordered.0);\n }\n self.last_pos = last_pos;\n }\n\n pub fn format_import(&mut self, vis: &ast::Visibility, vp: &ast::ViewPath, span: Span) {\n let vis = utils::format_visibility(vis);\n let mut offset = self.block_indent;\n offset.alignment += vis.len() + \"use \".len();\n \/\/ 1 = \";\"\n match vp.rewrite(&self.get_context(),\n self.config.max_width - offset.width() - 1,\n offset) {\n Some(ref s) if s.is_empty() => {\n \/\/ Format up to last newline\n let prev_span = codemap::mk_sp(self.last_pos, source!(self, span).lo);\n let span_end = match self.snippet(prev_span).rfind('\\n') {\n Some(offset) => self.last_pos + BytePos(offset as u32),\n None => source!(self, span).lo,\n };\n self.format_missing(span_end);\n self.last_pos = source!(self, span).hi;\n }\n Some(ref s) => {\n let s = format!(\"{}use {};\", vis, s);\n self.format_missing_with_indent(source!(self, span).lo);\n self.buffer.push_str(&s);\n self.last_pos = source!(self, span).hi;\n }\n None => {\n self.format_missing_with_indent(source!(self, span).lo);\n self.format_missing(source!(self, span).hi);\n }\n }\n }\n}\n\nfn rewrite_single_use_list(path_str: Option<String>, vpi: &ast::PathListItem) -> String {\n let path_item_str = if let ast::PathListItemKind::Ident { name, .. } = vpi.node {\n \/\/ A name.\n match path_str {\n Some(path_str) => format!(\"{}::{}\", path_str, name),\n None => name.to_string(),\n }\n } else {\n \/\/ `self`.\n match path_str {\n Some(path_str) => path_str,\n \/\/ This catches the import: use {self}, which is a compiler error, so we just\n \/\/ leave it alone.\n None => \"{self}\".to_owned(),\n }\n };\n\n append_alias(path_item_str, vpi)\n}\n\nfn rewrite_path_item(vpi: &&ast::PathListItem) -> Option<String> {\n let path_item_str = match vpi.node {\n ast::PathListItemKind::Ident { name, .. } => name.to_string(),\n ast::PathListItemKind::Mod { .. } => \"self\".to_owned(),\n };\n\n Some(append_alias(path_item_str, vpi))\n}\n\nfn append_alias(path_item_str: String, vpi: &ast::PathListItem) -> String {\n match vpi.node {\n ast::PathListItemKind::Ident { rename: Some(rename), .. } |\n ast::PathListItemKind::Mod { rename: Some(rename), .. } => {\n format!(\"{} as {}\", path_item_str, rename)\n }\n _ => path_item_str,\n }\n}\n\n\/\/ Pretty prints a multi-item import.\n\/\/ Assumes that path_list.len() > 0.\npub fn rewrite_use_list(width: usize,\n offset: Indent,\n path: &ast::Path,\n path_list: &[ast::PathListItem],\n span: Span,\n context: &RewriteContext)\n -> Option<String> {\n \/\/ Returns a different option to distinguish `::foo` and `foo`\n let opt_path_str = if !path.to_string().is_empty() {\n Some(path.to_string())\n } else if path.global {\n \/\/ path is absolute, we return an empty String to avoid a double `::`\n Some(String::new())\n } else {\n None\n };\n\n match path_list.len() {\n 0 => unreachable!(),\n 1 => return Some(rewrite_single_use_list(opt_path_str, &path_list[0])),\n _ => (),\n }\n\n \/\/ 2 = ::\n let path_separation_w = if opt_path_str.is_some() { 2 } else { 0 };\n \/\/ 1 = {\n let supp_indent = path.to_string().len() + path_separation_w + 1;\n \/\/ 1 = }\n let remaining_width = width.checked_sub(supp_indent + 1).unwrap_or(0);\n\n let mut items = {\n \/\/ Dummy value, see explanation below.\n let mut items = vec![ListItem::from_str(\"\")];\n let iter = itemize_list(context.codemap,\n path_list.iter(),\n \"}\",\n |vpi| vpi.span.lo,\n |vpi| vpi.span.hi,\n rewrite_path_item,\n context.codemap.span_after(span, \"{\"),\n span.hi);\n items.extend(iter);\n items\n };\n\n \/\/ We prefixed the item list with a dummy value so that we can\n \/\/ potentially move \"self\" to the front of the vector without touching\n \/\/ the rest of the items.\n let has_self = move_self_to_front(&mut items);\n let first_index = if has_self { 0 } else { 1 };\n\n if context.config.reorder_imported_names {\n items[1..].sort_by(|a, b| a.item.cmp(&b.item));\n }\n\n let tactic = definitive_tactic(&items[first_index..],\n ::lists::ListTactic::Mixed,\n remaining_width);\n let fmt = ListFormatting {\n tactic: tactic,\n separator: \",\",\n trailing_separator: SeparatorTactic::Never,\n indent: offset + supp_indent,\n \/\/ FIXME This is too conservative, and will not use all width\n \/\/ available\n \/\/ (loose 1 column (\";\"))\n width: remaining_width,\n ends_with_newline: false,\n config: context.config,\n };\n let list_str = try_opt!(write_list(&items[first_index..], &fmt));\n\n Some(match opt_path_str {\n Some(opt_path_str) => format!(\"{}::{{{}}}\", opt_path_str, list_str),\n None => format!(\"{{{}}}\", list_str),\n })\n}\n\n\/\/ Returns true when self item was found.\nfn move_self_to_front(items: &mut Vec<ListItem>) -> bool {\n match items.iter().position(|item| item.item.as_ref().map(|x| &x[..]) == Some(\"self\")) {\n Some(pos) => {\n items[0] = items.remove(pos);\n true\n }\n None => false,\n }\n}\n<commit_msg>Fix issue-1116<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse Indent;\nuse utils;\nuse syntax::codemap::{self, BytePos, Span};\nuse codemap::SpanUtils;\nuse lists::{write_list, itemize_list, ListItem, ListFormatting, SeparatorTactic, definitive_tactic};\nuse types::rewrite_path;\nuse rewrite::{Rewrite, RewriteContext};\nuse visitor::FmtVisitor;\nuse std::cmp::Ordering;\n\nuse syntax::{ast, ptr};\n\nfn path_of(a: &ast::ViewPath_) -> &ast::Path {\n match a {\n &ast::ViewPath_::ViewPathSimple(_, ref p) => p,\n &ast::ViewPath_::ViewPathGlob(ref p) => p,\n &ast::ViewPath_::ViewPathList(ref p, _) => p,\n }\n}\n\nfn compare_path_segments(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {\n a.identifier.name.as_str().cmp(&b.identifier.name.as_str())\n}\n\nfn compare_paths(a: &ast::Path, b: &ast::Path) -> Ordering {\n for segment in a.segments.iter().zip(b.segments.iter()) {\n let ord = compare_path_segments(segment.0, segment.1);\n if ord != Ordering::Equal {\n return ord;\n }\n }\n a.segments.len().cmp(&b.segments.len())\n}\n\nfn compare_path_list_items(a: &ast::PathListItem, b: &ast::PathListItem) -> Ordering {\n let name_ordering = match a.node.name() {\n Some(a_name) => {\n match b.node.name() {\n Some(b_name) => a_name.name.as_str().cmp(&b_name.name.as_str()),\n None => Ordering::Greater,\n }\n }\n None => {\n match b.node.name() {\n Some(_) => Ordering::Less,\n None => Ordering::Equal,\n }\n }\n };\n if name_ordering == Ordering::Equal {\n match a.node.rename() {\n Some(a_rename) => {\n match b.node.rename() {\n Some(b_rename) => a_rename.name.as_str().cmp(&b_rename.name.as_str()),\n None => Ordering::Greater,\n }\n }\n None => {\n match b.node.name() {\n Some(_) => Ordering::Less,\n None => Ordering::Equal,\n }\n }\n }\n } else {\n name_ordering\n }\n}\n\nfn compare_path_list_item_lists(a_items: &Vec<ast::PathListItem>,\n b_items: &Vec<ast::PathListItem>)\n -> Ordering {\n let mut a = a_items.clone();\n let mut b = b_items.clone();\n a.sort_by(|a, b| compare_path_list_items(a, b));\n b.sort_by(|a, b| compare_path_list_items(a, b));\n for comparison_pair in a.iter().zip(b.iter()) {\n let ord = compare_path_list_items(comparison_pair.0, comparison_pair.1);\n if ord != Ordering::Equal {\n return ord;\n }\n }\n a.len().cmp(&b.len())\n}\n\nfn compare_view_path_types(a: &ast::ViewPath_, b: &ast::ViewPath_) -> Ordering {\n use syntax::ast::ViewPath_::*;\n match (a, b) {\n (&ViewPathSimple(..), &ViewPathSimple(..)) => Ordering::Equal,\n (&ViewPathSimple(..), _) => Ordering::Less,\n (&ViewPathGlob(_), &ViewPathSimple(..)) => Ordering::Greater,\n (&ViewPathGlob(_), &ViewPathGlob(_)) => Ordering::Equal,\n (&ViewPathGlob(_), &ViewPathList(..)) => Ordering::Less,\n (&ViewPathList(_, ref a_items), &ViewPathList(_, ref b_items)) => {\n compare_path_list_item_lists(a_items, b_items)\n }\n (&ViewPathList(..), _) => Ordering::Greater,\n }\n}\n\nfn compare_view_paths(a: &ast::ViewPath_, b: &ast::ViewPath_) -> Ordering {\n match compare_paths(path_of(a), path_of(b)) {\n Ordering::Equal => compare_view_path_types(a, b),\n cmp => cmp,\n }\n}\n\nfn compare_use_items(a: &ast::Item, b: &ast::Item) -> Option<Ordering> {\n match (&a.node, &b.node) {\n (&ast::ItemKind::Use(ref a_vp), &ast::ItemKind::Use(ref b_vp)) => {\n Some(compare_view_paths(&a_vp.node, &b_vp.node))\n }\n _ => None,\n }\n}\n\n\/\/ TODO (some day) remove unused imports, expand globs, compress many single\n\/\/ imports into a list import.\n\nimpl Rewrite for ast::ViewPath {\n \/\/ Returns an empty string when the ViewPath is empty (like foo::bar::{})\n fn rewrite(&self, context: &RewriteContext, width: usize, offset: Indent) -> Option<String> {\n match self.node {\n ast::ViewPath_::ViewPathList(_, ref path_list) if path_list.is_empty() => {\n Some(String::new())\n }\n ast::ViewPath_::ViewPathList(ref path, ref path_list) => {\n rewrite_use_list(width, offset, path, path_list, self.span, context)\n }\n ast::ViewPath_::ViewPathGlob(_) => {\n \/\/ FIXME convert to list?\n None\n }\n ast::ViewPath_::ViewPathSimple(ident, ref path) => {\n let ident_str = ident.to_string();\n \/\/ 4 = \" as \".len()\n let budget = try_opt!(width.checked_sub(ident_str.len() + 4));\n let path_str = try_opt!(rewrite_path(context, false, None, path, budget, offset));\n\n Some(if path.segments.last().unwrap().identifier == ident {\n path_str\n } else {\n format!(\"{} as {}\", path_str, ident_str)\n })\n }\n }\n }\n}\n\nimpl<'a> FmtVisitor<'a> {\n pub fn format_imports(&mut self, use_items: &[ptr::P<ast::Item>]) {\n let mut last_pos = use_items.first()\n .and_then(|p_i| p_i.span.lo.0.checked_sub(1))\n .map(|span_lo| BytePos(span_lo))\n .unwrap_or(self.last_pos);\n let prefix = codemap::mk_sp(self.last_pos, last_pos);\n let mut ordered_use_items = use_items.iter()\n .map(|p_i| {\n let new_item = (&*p_i, last_pos);\n last_pos = p_i.span.hi;\n new_item\n })\n .collect::<Vec<_>>();\n \/\/ Order the imports by view-path & other import path properties\n ordered_use_items.sort_by(|a, b| compare_use_items(a.0, b.0).unwrap());\n \/\/ First, output the span before the first import\n self.format_missing(prefix.hi);\n for ordered in ordered_use_items {\n \/\/ Fake out the formatter by setting `self.last_pos` to the appropriate location before\n \/\/ each item before visiting it.\n self.last_pos = ordered.1;\n self.visit_item(&ordered.0);\n }\n self.last_pos = last_pos;\n }\n\n pub fn format_import(&mut self, vis: &ast::Visibility, vp: &ast::ViewPath, span: Span) {\n let vis = utils::format_visibility(vis);\n let mut offset = self.block_indent;\n offset.alignment += vis.len() + \"use \".len();\n \/\/ 1 = \";\"\n match vp.rewrite(&self.get_context(),\n self.config.max_width - offset.width() - 1,\n offset) {\n Some(ref s) if s.is_empty() => {\n \/\/ Format up to last newline\n let prev_span = codemap::mk_sp(self.last_pos, source!(self, span).lo);\n let span_end = match self.snippet(prev_span).rfind('\\n') {\n Some(offset) => self.last_pos + BytePos(offset as u32),\n None => source!(self, span).lo,\n };\n self.format_missing(span_end);\n self.last_pos = source!(self, span).hi;\n }\n Some(ref s) => {\n let s = format!(\"{}use {};\", vis, s);\n self.format_missing_with_indent(source!(self, span).lo);\n self.buffer.push_str(&s);\n self.last_pos = source!(self, span).hi;\n }\n None => {\n self.format_missing_with_indent(source!(self, span).lo);\n self.format_missing(source!(self, span).hi);\n }\n }\n }\n}\n\nfn rewrite_single_use_list(path_str: Option<String>, vpi: &ast::PathListItem) -> String {\n let path_item_str = if let ast::PathListItemKind::Ident { name, .. } = vpi.node {\n \/\/ A name.\n match path_str {\n Some(path_str) => format!(\"{}::{}\", path_str, name),\n None => name.to_string(),\n }\n } else {\n \/\/ `self`.\n match path_str {\n Some(path_str) => path_str,\n \/\/ This catches the import: use {self}, which is a compiler error, so we just\n \/\/ leave it alone.\n None => \"{self}\".to_owned(),\n }\n };\n\n append_alias(path_item_str, vpi)\n}\n\nfn rewrite_path_item(vpi: &&ast::PathListItem) -> Option<String> {\n let path_item_str = match vpi.node {\n ast::PathListItemKind::Ident { name, .. } => name.to_string(),\n ast::PathListItemKind::Mod { .. } => \"self\".to_owned(),\n };\n\n Some(append_alias(path_item_str, vpi))\n}\n\nfn append_alias(path_item_str: String, vpi: &ast::PathListItem) -> String {\n match vpi.node {\n ast::PathListItemKind::Ident { rename: Some(rename), .. } |\n ast::PathListItemKind::Mod { rename: Some(rename), .. } => {\n format!(\"{} as {}\", path_item_str, rename)\n }\n _ => path_item_str,\n }\n}\n\n\/\/ Pretty prints a multi-item import.\n\/\/ Assumes that path_list.len() > 0.\npub fn rewrite_use_list(width: usize,\n offset: Indent,\n path: &ast::Path,\n path_list: &[ast::PathListItem],\n span: Span,\n context: &RewriteContext)\n -> Option<String> {\n \/\/ Returns a different option to distinguish `::foo` and `foo`\n let opt_path_str = if !path.to_string().is_empty() {\n Some(path.to_string())\n } else if path.global {\n \/\/ path is absolute, we return an empty String to avoid a double `::`\n Some(String::new())\n } else {\n None\n };\n\n match path_list.len() {\n 0 => unreachable!(),\n 1 => return Some(rewrite_single_use_list(opt_path_str, &path_list[0])),\n _ => (),\n }\n\n \/\/ 2 = ::\n let path_separation_w = if opt_path_str.is_some() { 2 } else { 0 };\n \/\/ 1 = {\n let supp_indent = path.to_string().len() + path_separation_w + 1;\n \/\/ 1 = }\n let remaining_width = width.checked_sub(supp_indent + 1).unwrap_or(0);\n\n let mut items = {\n \/\/ Dummy value, see explanation below.\n let mut items = vec![ListItem::from_str(\"\")];\n let iter = itemize_list(context.codemap,\n path_list.iter(),\n \"}\",\n |vpi| vpi.span.lo,\n |vpi| vpi.span.hi,\n rewrite_path_item,\n context.codemap.span_after(span, \"{\"),\n span.hi);\n items.extend(iter);\n items\n };\n\n \/\/ We prefixed the item list with a dummy value so that we can\n \/\/ potentially move \"self\" to the front of the vector without touching\n \/\/ the rest of the items.\n let has_self = move_self_to_front(&mut items);\n let first_index = if has_self { 0 } else { 1 };\n\n if context.config.reorder_imported_names {\n items[1..].sort_by(|a, b| a.item.cmp(&b.item));\n }\n\n let tactic = definitive_tactic(&items[first_index..],\n ::lists::ListTactic::Mixed,\n remaining_width);\n let fmt = ListFormatting {\n tactic: tactic,\n separator: \",\",\n trailing_separator: SeparatorTactic::Never,\n indent: offset + supp_indent,\n \/\/ FIXME This is too conservative, and will not use all width\n \/\/ available\n \/\/ (loose 1 column (\";\"))\n width: remaining_width,\n ends_with_newline: false,\n config: context.config,\n };\n let list_str = try_opt!(write_list(&items[first_index..], &fmt));\n\n Some(match opt_path_str {\n Some(opt_path_str) => format!(\"{}::{{{}}}\", opt_path_str, list_str),\n None => format!(\"{{{}}}\", list_str),\n })\n}\n\n\/\/ Returns true when self item was found.\nfn move_self_to_front(items: &mut Vec<ListItem>) -> bool {\n match items.iter().position(|item| item.item.as_ref().map(|x| &x[..]) == Some(\"self\")) {\n Some(pos) => {\n items[0] = items.remove(pos);\n true\n }\n None => false,\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added src<commit_after>extern crate rustc_serialize;\nuse rustc_serialize::json;\n\n#[derive(RustcDecodable, RustcEncodable)]\nstruct Command {\n\tcid: u64,\n\tuid: u32,\n\ttarget: String,\n}\n\nfn get_string(cmd: &Command) -> String {\n\treturn json::encode(&cmd).unwrap();\n}\n\nfn main(){\n\tlet bs = Command{\n\t\tcid: 0,\n\t\tuid: 1,\n\t\ttarget: \"something\".to_string()\n\t};\n\tprintln!(\"So guess what?: {}\", get_string(&bs));\n}\n\n#[test]\nfn some_test() {\n\tassert_eq!(\n\t\tget_string(\n\t\t\t&Command{cid: 0, uid: 1, target: \"cat\".to_string()}\n\t\t), \"{\\\"cid\\\":0,\\\"uid\\\":1,\\\"target\\\":\\\"cat\\\"}\");\n}\n\n#[test]\nfn some_fail() {\n\tassert!(false);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix a reachable unreachable.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Build failure due invalid formating. Fixed.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>random spawn levels start in a big room<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>matrix: start implementing some event structures<commit_after>use rustc_serialize::json::Json;\nuse matrix::json as mjson;\n\n#[derive(Clone, Debug)]\npub struct UserID {\n pub nickname: String,\n pub homeserver: String \n}\n\nimpl UserID {\n pub fn from_str(s: &str) -> Self {\n let parts: Vec<&str> = s.split(\":\").collect();\n UserID {\n nickname: parts[0][1..].to_string(),\n homeserver: parts[1].to_string()\n }\n }\n}\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq)]\npub struct RoomID {\n pub id: String,\n pub homeserver: String\n}\n\nimpl RoomID {\n pub fn from_str(s: &str) -> Self {\n let parts: Vec<&str> = s.split(\":\").collect();\n RoomID {\n id: parts[0][1..].to_string(),\n homeserver: parts[1].to_string()\n }\n }\n}\n\npub enum MembershipAction {\n Join\n}\n\nimpl MembershipAction {\n pub fn from_str(s: &str) -> Self {\n match s {\n \"join\" => MembershipAction::Join,\n _ => panic!(\"unknown membership action {:?}\", s)\n }\n }\n}\n\npub enum RoomEvent {\n CanonicalAlias(String),\n JoinRules(String),\n Membership(UserID, MembershipAction),\n HistoryVisibility(String),\n Create,\n Aliases,\n PowerLevels\n}\n\npub enum TextEvent {\n RoomMessage(UserID, RoomID, String)\n}\n\npub enum EventData {\n Room(RoomID, RoomEvent),\n Text(TextEvent),\n}\n\npub struct Event {\n pub id: String,\n pub data: EventData\n}\n\nimpl Event {\n pub fn from_json(json: &Json) -> Self {\n let tokens: Vec<&str> = mjson::string(json, \"type\").trim().split(\".\").collect();\n assert!(tokens[0] == \"m\");\n Event {\n id: mjson::string(json, \"event_id\").to_string(),\n data: match tokens[1] {\n \"room\" =>\n Self::from_room_json(tokens[2], json),\n e => panic!(\"Unknown event {:?}!\", e)\n }\n }\n }\n\n fn from_room_json(event_type: &str, json: &Json) -> EventData {\n EventData::Room(\n RoomID::from_str(mjson::string(json, \"room_id\")),\n match event_type {\n \"canonical_alias\" =>\n RoomEvent::CanonicalAlias(mjson::string(json, \"content.alias\").to_string()),\n \"join_rules\" =>\n RoomEvent::JoinRules(mjson::string(json, \"content.join_rule\").to_string()),\n \"member\" =>\n RoomEvent::Membership(UserID::from_str(mjson::string(json, \"user_id\")), MembershipAction::from_str(mjson::string(json, \"content.membership\"))),\n \"history_visibility\" =>\n RoomEvent::HistoryVisibility(mjson::string(json, \"content.history_visibility\").to_string()),\n \"create\" =>\n RoomEvent::Create,\n \"aliases\" =>\n RoomEvent::Aliases,\n \"power_levels\" =>\n RoomEvent::PowerLevels,\n e => panic!(\"Unknown room event {:?}: {:?}\", e, json)\n }\n )\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>src\/rpc\/serialize.rs: cargo fmt<commit_after><|endoftext|>"} {"text":"<commit_before>use regex::Regex;\nuse std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse super::file::*;\n\npub struct ParserError {\n summary: String,\n parsertext: String,\n index: i32,\n explanation: Option<String>,\n}\n\nimpl ParserError {\n fn new(sum: &'static str, text: String, idx: i32, expl: &'static str) -> ParserError {\n ParserError {\n summary: String::from(sum),\n parsertext: text,\n index: idx,\n explanation: Some(String::from(expl)),\n }\n }\n\n fn short(sum: &'static str, text: String, idx: i32) -> ParserError {\n ParserError {\n summary: String::from(sum),\n parsertext: text,\n index: idx,\n explanation: None\n }\n }\n}\n\nimpl Error for ParserError {\n\n fn description(&self) -> &str {\n &self.summary[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl Debug for ParserError {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"ParserError: {}\\n\\n\", self.summary);\n\n if let Some(ref e) = self.explanation {\n write!(fmt, \"{}\\n\\n\", e);\n }\n\n write!(fmt, \"On position {}\\nin\\n{}\", self.index, self.parsertext);\n Ok(())\n }\n\n}\n\nimpl Display for ParserError {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"ParserError: {}\", self.summary);\n\n if let Some(ref e) = self.explanation {\n write!(fmt, \"\\n\\n{}\", e);\n }\n\n Ok(())\n }\n\n}\n\n\npub trait FileHeaderParser : Sized {\n fn new(spec: &FileHeaderSpec) -> Self;\n fn read(&self, string: Option<String>) -> Result<FileHeaderData, ParserError>;\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError>;\n}\n\npub trait FileDataParser<FD: FileData + Sized> : Sized {\n fn new() -> Self;\n fn read(&self, string: Option<String>) -> Result<FD, ParserError>;\n fn write(&self, data: &FD) -> Result<String, ParserError>;\n}\n\ntype TextTpl = (Option<String>, Option<String>);\n\npub struct Parser<HP, DP>\n{\n headerp : HP,\n datap : DP,\n}\n\nimpl<HP, DP> Parser<HP, DP> where\n HP: FileHeaderParser,\n{\n\n fn new(headerp: HP, datap: DP) -> Parser<HP, DP> {\n Parser {\n headerp: headerp,\n datap: datap,\n }\n }\n\n fn read<FD>(&self, s: String) -> Result<(FileHeaderData, FD), ParserError>\n where FD: FileData + Sized,\n DP: FileDataParser<FD>\n {\n let divided = divide_text(&s);\n\n if divided.is_err() {\n return Err(divided.err().unwrap());\n }\n\n let (header, data) = divided.ok().unwrap();\n\n let h_parseres = self.headerp.read(header);\n let d_parseres = self.datap.read(data);\n\n if h_parseres.is_err() {\n return Err(h_parseres.err().unwrap());\n }\n\n if d_parseres.is_err() {\n return Err(d_parseres.err().unwrap());\n }\n\n Ok((h_parseres.ok().unwrap(), d_parseres.ok().unwrap()))\n }\n\n fn write<FD>(&self, tpl : (FileHeaderData, FD)) -> Result<String, ParserError>\n where FD: FileData + Sized,\n DP: FileDataParser<FD>\n {\n let (header, data) = tpl;\n let h_text = self.headerp.write(&header);\n let d_text = self.datap.write(&data);\n\n if h_text.is_err() {\n return Err(h_text.err().unwrap());\n }\n\n if d_text.is_err() {\n return Err(d_text.err().unwrap());\n }\n\n Ok(h_text.ok().unwrap() + &d_text.ok().unwrap()[..])\n }\n}\n\nfn divide_text(text: &String) -> Result<TextTpl, ParserError> {\n let re = Regex::new(r\"(?m)^\\-\\-\\-$\\n(.*)^\\-\\-\\-$\\n(.*)\").unwrap();\n\n let captures = re.captures(&text[..]).unwrap_or(\n return Err(ParserError::new(\"Cannot run regex on text\",\n text.clone(), 0,\n \"Cannot run regex on text to divide it into header and content.\"))\n );\n\n if captures.len() != 2 {\n return Err(ParserError::new(\"Unexpected Regex output\",\n text.clone(), 0,\n \"The regex to divide text into header and content had an unexpected output.\"))\n }\n\n let header = captures.at(0).map(|s| String::from(s));\n let content = captures.at(1).map(|s| String::from(s));\n Ok((header, content))\n}\n\n<commit_msg>Move divide_text() into scope of impl Parser<commit_after>use regex::Regex;\nuse std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse super::file::*;\n\npub struct ParserError {\n summary: String,\n parsertext: String,\n index: i32,\n explanation: Option<String>,\n}\n\nimpl ParserError {\n fn new(sum: &'static str, text: String, idx: i32, expl: &'static str) -> ParserError {\n ParserError {\n summary: String::from(sum),\n parsertext: text,\n index: idx,\n explanation: Some(String::from(expl)),\n }\n }\n\n fn short(sum: &'static str, text: String, idx: i32) -> ParserError {\n ParserError {\n summary: String::from(sum),\n parsertext: text,\n index: idx,\n explanation: None\n }\n }\n}\n\nimpl Error for ParserError {\n\n fn description(&self) -> &str {\n &self.summary[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl Debug for ParserError {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"ParserError: {}\\n\\n\", self.summary);\n\n if let Some(ref e) = self.explanation {\n write!(fmt, \"{}\\n\\n\", e);\n }\n\n write!(fmt, \"On position {}\\nin\\n{}\", self.index, self.parsertext);\n Ok(())\n }\n\n}\n\nimpl Display for ParserError {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"ParserError: {}\", self.summary);\n\n if let Some(ref e) = self.explanation {\n write!(fmt, \"\\n\\n{}\", e);\n }\n\n Ok(())\n }\n\n}\n\n\npub trait FileHeaderParser : Sized {\n fn new(spec: &FileHeaderSpec) -> Self;\n fn read(&self, string: Option<String>) -> Result<FileHeaderData, ParserError>;\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError>;\n}\n\npub trait FileDataParser<FD: FileData + Sized> : Sized {\n fn new() -> Self;\n fn read(&self, string: Option<String>) -> Result<FD, ParserError>;\n fn write(&self, data: &FD) -> Result<String, ParserError>;\n}\n\ntype TextTpl = (Option<String>, Option<String>);\n\npub struct Parser<HP, DP>\n{\n headerp : HP,\n datap : DP,\n}\n\nimpl<HP, DP> Parser<HP, DP> where\n HP: FileHeaderParser,\n{\n\n fn new(headerp: HP, datap: DP) -> Parser<HP, DP> {\n Parser {\n headerp: headerp,\n datap: datap,\n }\n }\n\n fn read<FD>(&self, s: String) -> Result<(FileHeaderData, FD), ParserError>\n where FD: FileData + Sized,\n DP: FileDataParser<FD>\n {\n let divided = self.divide_text(&s);\n\n if divided.is_err() {\n return Err(divided.err().unwrap());\n }\n\n let (header, data) = divided.ok().unwrap();\n\n let h_parseres = self.headerp.read(header);\n let d_parseres = self.datap.read(data);\n\n if h_parseres.is_err() {\n return Err(h_parseres.err().unwrap());\n }\n\n if d_parseres.is_err() {\n return Err(d_parseres.err().unwrap());\n }\n\n Ok((h_parseres.ok().unwrap(), d_parseres.ok().unwrap()))\n }\n\n fn write<FD>(&self, tpl : (FileHeaderData, FD)) -> Result<String, ParserError>\n where FD: FileData + Sized,\n DP: FileDataParser<FD>\n {\n let (header, data) = tpl;\n let h_text = self.headerp.write(&header);\n let d_text = self.datap.write(&data);\n\n if h_text.is_err() {\n return Err(h_text.err().unwrap());\n }\n\n if d_text.is_err() {\n return Err(d_text.err().unwrap());\n }\n\n Ok(h_text.ok().unwrap() + &d_text.ok().unwrap()[..])\n }\n\n fn divide_text(&self, text: &String) -> Result<TextTpl, ParserError> {\n let re = Regex::new(r\"(?m)^\\-\\-\\-$\\n(.*)^\\-\\-\\-$\\n(.*)\").unwrap();\n\n let captures = re.captures(&text[..]).unwrap_or(\n return Err(ParserError::new(\"Cannot run regex on text\",\n text.clone(), 0,\n \"Cannot run regex on text to divide it into header and content.\"))\n );\n\n if captures.len() != 2 {\n return Err(ParserError::new(\"Unexpected Regex output\",\n text.clone(), 0,\n \"The regex to divide text into header and content had an unexpected output.\"))\n }\n\n let header = captures.at(0).map(|s| String::from(s));\n let content = captures.at(1).map(|s| String::from(s));\n Ok((header, content))\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add more tests<commit_after>extern crate base64;\nuse std::str;\n\n#[test]\nfn test_encode_decode_valid() {\n let examples = [\n \/\/ empty\n (\"\", \"\"),\n \/\/ no padding chars\n (\"qwe\", \"cXdl\"),\n \/\/ one padding char\n (\"qw\", \"cXc=\"),\n \/\/ two apdding chars\n (\"q\", \"cQ==\"),\n \/\/ all bytes from 0 to 127\n (\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f\\x10\\x11\\\n \\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f !\\\"#$%&\\'\\\n ()*+,-.\/0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]^_`abcdefgh\\\n ijklmnopqrstuvwxyz{|}~\\x7f\",\n \"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEy\\\n MzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2Rl\\\n ZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8=\"\n ),\n ];\n\n for &(data, encoded) in examples.iter() {\n assert_eq!(\n str::from_utf8(&base64::encode(data.as_bytes())),\n str::from_utf8(&encoded.as_bytes())\n );\n assert_eq!(\n str::from_utf8(&base64::decode(encoded.as_bytes()).unwrap()),\n str::from_utf8(&data.as_bytes())\n )\n }\n}\n\n#[test]\nfn test_decode() {\n let examples = [\n (Ok(\"qw\"), \"\\n\\n\\nc\\nX\\nc\\n=\"),\n (Err(\"Invalid padding\"), \"cXc\"),\n (Err(\"Invalid character\"), \"cXc&\"),\n ];\n\n for &(data, encoded) in examples.iter() {\n let res = base64::decode(encoded.as_bytes());\n match (res, data) {\n (Ok(res), Ok(data)) => assert_eq!(\n str::from_utf8(&res).unwrap(),\n data\n ),\n (Ok(_), Err(msg)) => panic!(\n \"base64::decode return Ok when Err({}) expected\",\n msg\n ),\n (Err(msg), Ok(_)) => panic!(\n \"base64::decode return Err({}) when it not expected\",\n msg\n ),\n (Err(msg), Err(expected)) => assert_eq!(\n msg, expected\n )\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:sparkles: add command trait for executor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add JIT example<commit_after>extern crate llvm;\nuse llvm::*;\nuse std::mem;\n\nfn main() {\n\n let context = Context::new();\n let mut module = context.module_create_with_name(\"sum\");\n let mut builder = context.create_builder();\n\n let function_type = llvm::function_type(\n context.int64_type(),\n vec![context.int64_type(), context.int64_type(), context.int64_type()],\n false);\n let mut func = module.add_function(function_type, \"fname\");\n let bb = context.append_basic_block(&mut func, \"fname\");\n builder.position_at_end(bb);\n\n \/\/ get the function's arguments\n let x = func.get_param(0).unwrap();\n let y = func.get_param(0).unwrap();\n let z = func.get_param(0).unwrap();\n\n let sum = builder.build_add(x, y, \"sum\");\n let sum = builder.build_add(sum, z, \"sum\");\n builder.build_ret(sum);\n\n module.dump();\n\n llvm::link_in_mcjit();\n llvm::initialize_native_target();\n llvm::initialize_native_asm_printer();\n\n let ee = llvm::ExecutionEngine::create_for_module(module).unwrap();\n let addr = ee.get_function_address(\"fname\").unwrap();\n\n unsafe {\n let f: extern \"C\" fn(u64, u64, u64) -> u64 = mem::transmute(addr);\n\n let x: u64 = 1;\n let y: u64 = 1;\n let z: u64 = 1;\n let res = f(x, y, z);\n\n println!(\"{} + {} + {} = {}\", x, y, z, res);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Pick more appropriate int types for ripemd160<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added hang.rs file<commit_after>\n\n\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>cleaned up geometric generators<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse collections::HashMap;\nuse std::os;\nuse std_url;\nuse std_url::Url;\n\n\/**\nCreate a URL object from a string. Does various helpful browsery things like\n\n* If there's no current url and the path looks like a file then it will\n create a file url based of the current working directory\n* If there's a current url and the new path is relative then the new url\n is based off the current url\n\n*\/\n\/\/ TODO: about:failure->\npub fn try_parse_url(str_url: &str, base_url: Option<std_url::Url>) -> Result<std_url::Url, ~str> {\n let str_url = str_url.trim_chars(& &[' ', '\\t', '\\n', '\\r', '\\x0C']).to_owned();\n let schm = std_url::get_scheme(str_url);\n let str_url = match schm {\n Err(_) => {\n if base_url.is_none() {\n \/\/ Assume we've been given a file path. If it's absolute just return\n \/\/ it, otherwise make it absolute with the cwd.\n if str_url.starts_with(\"\/\") {\n \"file:\/\/\".to_owned() + str_url\n } else {\n let mut path = os::getcwd();\n path.push(str_url);\n \/\/ FIXME (#1094): not the right way to transform a path\n \"file:\/\/\".to_owned() + path.display().to_str()\n }\n } else {\n let base_url = base_url.unwrap();\n debug!(\"parse_url: base_url: {:?}\", base_url);\n\n let mut new_url = base_url.clone();\n new_url.query = vec!();\n new_url.fragment = None;\n\n if str_url.starts_with(\"\/\/\") {\n new_url.scheme + \":\" + str_url\n } else if base_url.path.is_empty() || str_url.starts_with(\"\/\") {\n new_url.path = \"\/\".to_owned();\n new_url.to_str() + str_url.trim_left_chars(&'\/')\n } else if str_url.starts_with(\"#\") {\n new_url.to_str() + str_url\n } else { \/\/ relative path\n let base_path = base_url.path.trim_right_chars(&|c: char| c != '\/');\n new_url.path = base_path.to_owned();\n new_url.to_str() + str_url\n }\n }\n },\n Ok((scheme, page)) => {\n match scheme.as_slice() {\n \"about\" => {\n match page.as_slice() {\n \"crash\" => {\n fail!(\"about:crash\");\n }\n \"failure\" => {\n let mut path = os::self_exe_path().expect(\"can't get exe path\");\n path.push(\"..\/src\/test\/html\/failure.html\");\n \/\/ FIXME (#1094): not the right way to transform a path\n \"file:\/\/\".to_owned() + path.display().to_str()\n }\n \/\/ TODO: handle the rest of the about: pages\n _ => str_url\n }\n },\n \"data\" => {\n \/\/ Drop whitespace within data: URLs, e.g. newlines within a base64\n \/\/ src=\"...\" block. Whitespace intended as content should be\n \/\/ %-encoded or base64'd.\n str_url.chars().filter(|&c| !c.is_whitespace()).collect()\n },\n _ => str_url\n }\n }\n };\n\n std_url::from_str(str_url)\n}\n\npub fn parse_url(str_url: &str, base_url: Option<std_url::Url>) -> std_url::Url {\n \/\/ FIXME: Need to handle errors\n try_parse_url(str_url, base_url).ok().expect(\"URL parsing failed\")\n}\n\n\n#[cfg(test)]\nmod parse_url_tests {\n use super::parse_url;\n use std::os;\n\n #[test]\n fn should_create_absolute_file_url_if_base_url_is_none_and_str_url_looks_filey() {\n let file = \"local.html\";\n let url = parse_url(file, None);\n debug!(\"url: {:?}\", url);\n assert!(\"file\" == url.scheme);\n let path = os::getcwd();\n \/\/ FIXME (#1094): not the right way to transform a path\n assert!(url.path.contains(path.display().to_str()));\n }\n\n #[test]\n fn should_create_url_based_on_old_url_1() {\n let old_str = \"http:\/\/example.com\";\n let old_url = parse_url(old_str, None);\n let new_str = \"index.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/index.html\" == new_url.path);\n }\n\n #[test]\n fn should_create_url_based_on_old_url_2() {\n let old_str = \"http:\/\/example.com\/\";\n let old_url = parse_url(old_str, None);\n let new_str = \"index.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/index.html\" == new_url.path);\n }\n\n #[test]\n fn should_create_url_based_on_old_url_3() {\n let old_str = \"http:\/\/example.com\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"crumpet.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/crumpet.html\" == new_url.path);\n }\n\n #[test]\n fn should_create_url_based_on_old_url_4() {\n let old_str = \"http:\/\/example.com\/snarf\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"crumpet.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/snarf\/crumpet.html\" == new_url.path);\n }\n\n #[test]\n fn should_create_url_based_on_old_url_5() {\n let old_str = \"http:\/\/example.com\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"#top\";\n let new_url = parse_url(new_str, Some(old_url));\n\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/index.html\" == new_url.path);\n assert!(new_url.fragment == Some(\"top\".to_owned()));\n }\n\n #[test]\n fn should_create_url_based_on_old_url_6() {\n use std_url::UserInfo;\n\n let old_str = \"http:\/\/foo:bar@example.com:8080\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"#top\";\n let new_url = parse_url(new_str, Some(old_url));\n\n assert!(\"http\" == new_url.scheme);\n assert!(new_url.user == Some(UserInfo { user: \"foo\".to_owned(), pass: Some(\"bar\".to_owned()) }));\n assert!(\"example.com\" == new_url.host);\n assert!(new_url.port == Some(\"8080\".to_owned()));\n assert!(\"\/index.html\" == new_url.path);\n assert!(new_url.fragment == Some(\"top\".to_owned()));\n }\n\n #[test]\n fn should_create_url_based_on_old_url_7() {\n let old_str = \"https:\/\/example.com\/snarf\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"\/\/example.com\/crumpet.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"https\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/crumpet.html\" == new_url.path);\n }\n\n}\n\npub type UrlMap<T> = HashMap<std_url::Url, T>;\n\npub fn url_map<T: Clone + 'static>() -> UrlMap<T> {\n HashMap::new()\n}\n\n\npub fn is_image_data(uri: &str) -> bool {\n static types: &'static [&'static str] = &[&\"data:image\/png\", &\"data:image\/gif\", &\"data:image\/jpeg\"];\n types.iter().any(|&type_| uri.starts_with(type_))\n}\n\n\n<commit_msg>auto merge of #2407 : Ms2ger\/servo\/try_parse_url-owned, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse collections::HashMap;\nuse std::os;\nuse std_url;\nuse std_url::Url;\n\n\/**\nCreate a URL object from a string. Does various helpful browsery things like\n\n* If there's no current url and the path looks like a file then it will\n create a file url based of the current working directory\n* If there's a current url and the new path is relative then the new url\n is based off the current url\n\n*\/\n\/\/ TODO: about:failure->\npub fn try_parse_url(str_url: &str, base_url: Option<std_url::Url>) -> Result<std_url::Url, ~str> {\n let str_url = str_url.trim_chars(& &[' ', '\\t', '\\n', '\\r', '\\x0C']);\n let schm = std_url::get_scheme(str_url);\n let str_url = match schm {\n Err(_) => {\n if base_url.is_none() {\n \/\/ Assume we've been given a file path. If it's absolute just return\n \/\/ it, otherwise make it absolute with the cwd.\n if str_url.starts_with(\"\/\") {\n \"file:\/\/\".to_owned() + str_url\n } else {\n let mut path = os::getcwd();\n path.push(str_url);\n \/\/ FIXME (#1094): not the right way to transform a path\n \"file:\/\/\".to_owned() + path.display().to_str()\n }\n } else {\n let base_url = base_url.unwrap();\n debug!(\"parse_url: base_url: {:?}\", base_url);\n\n let mut new_url = base_url.clone();\n new_url.query = vec!();\n new_url.fragment = None;\n\n if str_url.starts_with(\"\/\/\") {\n new_url.scheme + \":\" + str_url\n } else if base_url.path.is_empty() || str_url.starts_with(\"\/\") {\n new_url.path = \"\/\".to_owned();\n new_url.to_str() + str_url.trim_left_chars(&'\/')\n } else if str_url.starts_with(\"#\") {\n new_url.to_str() + str_url\n } else { \/\/ relative path\n let base_path = base_url.path.trim_right_chars(&|c: char| c != '\/');\n new_url.path = base_path.to_owned();\n new_url.to_str() + str_url\n }\n }\n },\n Ok((scheme, page)) => {\n match scheme.as_slice() {\n \"about\" => {\n match page.as_slice() {\n \"crash\" => {\n fail!(\"about:crash\");\n }\n \"failure\" => {\n let mut path = os::self_exe_path().expect(\"can't get exe path\");\n path.push(\"..\/src\/test\/html\/failure.html\");\n \/\/ FIXME (#1094): not the right way to transform a path\n \"file:\/\/\".to_owned() + path.display().to_str()\n }\n \/\/ TODO: handle the rest of the about: pages\n _ => str_url.to_owned()\n }\n },\n \"data\" => {\n \/\/ Drop whitespace within data: URLs, e.g. newlines within a base64\n \/\/ src=\"...\" block. Whitespace intended as content should be\n \/\/ %-encoded or base64'd.\n str_url.chars().filter(|&c| !c.is_whitespace()).collect()\n },\n _ => str_url.to_owned()\n }\n }\n };\n\n std_url::from_str(str_url)\n}\n\npub fn parse_url(str_url: &str, base_url: Option<std_url::Url>) -> std_url::Url {\n \/\/ FIXME: Need to handle errors\n try_parse_url(str_url, base_url).ok().expect(\"URL parsing failed\")\n}\n\n\n#[cfg(test)]\nmod parse_url_tests {\n use super::parse_url;\n use std::os;\n\n #[test]\n fn should_create_absolute_file_url_if_base_url_is_none_and_str_url_looks_filey() {\n let file = \"local.html\";\n let url = parse_url(file, None);\n debug!(\"url: {:?}\", url);\n assert!(\"file\" == url.scheme);\n let path = os::getcwd();\n \/\/ FIXME (#1094): not the right way to transform a path\n assert!(url.path.contains(path.display().to_str()));\n }\n\n #[test]\n fn should_create_url_based_on_old_url_1() {\n let old_str = \"http:\/\/example.com\";\n let old_url = parse_url(old_str, None);\n let new_str = \"index.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/index.html\" == new_url.path);\n }\n\n #[test]\n fn should_create_url_based_on_old_url_2() {\n let old_str = \"http:\/\/example.com\/\";\n let old_url = parse_url(old_str, None);\n let new_str = \"index.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/index.html\" == new_url.path);\n }\n\n #[test]\n fn should_create_url_based_on_old_url_3() {\n let old_str = \"http:\/\/example.com\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"crumpet.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/crumpet.html\" == new_url.path);\n }\n\n #[test]\n fn should_create_url_based_on_old_url_4() {\n let old_str = \"http:\/\/example.com\/snarf\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"crumpet.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/snarf\/crumpet.html\" == new_url.path);\n }\n\n #[test]\n fn should_create_url_based_on_old_url_5() {\n let old_str = \"http:\/\/example.com\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"#top\";\n let new_url = parse_url(new_str, Some(old_url));\n\n assert!(\"http\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/index.html\" == new_url.path);\n assert!(new_url.fragment == Some(\"top\".to_owned()));\n }\n\n #[test]\n fn should_create_url_based_on_old_url_6() {\n use std_url::UserInfo;\n\n let old_str = \"http:\/\/foo:bar@example.com:8080\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"#top\";\n let new_url = parse_url(new_str, Some(old_url));\n\n assert!(\"http\" == new_url.scheme);\n assert!(new_url.user == Some(UserInfo { user: \"foo\".to_owned(), pass: Some(\"bar\".to_owned()) }));\n assert!(\"example.com\" == new_url.host);\n assert!(new_url.port == Some(\"8080\".to_owned()));\n assert!(\"\/index.html\" == new_url.path);\n assert!(new_url.fragment == Some(\"top\".to_owned()));\n }\n\n #[test]\n fn should_create_url_based_on_old_url_7() {\n let old_str = \"https:\/\/example.com\/snarf\/index.html\";\n let old_url = parse_url(old_str, None);\n let new_str = \"\/\/example.com\/crumpet.html\";\n let new_url = parse_url(new_str, Some(old_url));\n assert!(\"https\" == new_url.scheme);\n assert!(\"example.com\" == new_url.host);\n assert!(\"\/crumpet.html\" == new_url.path);\n }\n\n}\n\npub type UrlMap<T> = HashMap<std_url::Url, T>;\n\npub fn url_map<T: Clone + 'static>() -> UrlMap<T> {\n HashMap::new()\n}\n\n\npub fn is_image_data(uri: &str) -> bool {\n static types: &'static [&'static str] = &[&\"data:image\/png\", &\"data:image\/gif\", &\"data:image\/jpeg\"];\n types.iter().any(|&type_| uri.starts_with(type_))\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>NLL test for mutating &mut references<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(nll)]\n\nstruct List<T> {\n value: T,\n next: Option<Box<List<T>>>,\n}\n\nfn to_refs<T>(mut list: &mut List<T>) -> Vec<&mut T> {\n let mut result = vec![];\n loop {\n result.push(&mut list.value);\n if let Some(n) = list.next.as_mut() {\n list = n;\n } else {\n return result;\n }\n }\n}\n\nfn main() {\n let mut list = List { value: 1, next: None };\n let vec = to_refs(&mut list);\n assert_eq!(vec![&mut 1], vec);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::collections::HashMap;\nuse std::slice::IterMut;\n\nuse uuid::Uuid;\n\nuse super::engine::{HasName, HasUuid};\n\n\n\/\/\/ Map UUID and name to T items.\n#[derive(Debug)]\npub struct Table<T: HasName + HasUuid> {\n items: Vec<T>,\n name_map: HashMap<String, usize>,\n uuid_map: HashMap<Uuid, usize>,\n}\n\n\/\/\/ All operations are O(1).\n\/\/\/ The implementation does not priviledge the name key over the UUID key\n\/\/\/ in any way. They are both treated as constants once the item has been\n\/\/\/ inserted. In order to rename a T item, it must be removed, renamed, and\n\/\/\/ reinserted under the new name.\nimpl<T: HasName + HasUuid> Table<T> {\n pub fn new() -> Self {\n Table {\n items: Vec::new(),\n name_map: HashMap::new(),\n uuid_map: HashMap::new(),\n }\n\n }\n\n pub fn is_empty(&self) -> bool {\n self.items.is_empty()\n }\n\n pub fn len(&self) -> usize {\n self.items.len()\n }\n\n \/\/\/ Returns true if map has an item corresponding to this name, else false.\n pub fn contains_name(&self, name: &str) -> bool {\n self.name_map.contains_key(name)\n }\n\n \/\/\/ Returns true if map has an item corresponding to this uuid, else false.\n #[allow(dead_code)]\n pub fn contains_uuid(&self, uuid: &Uuid) -> bool {\n self.uuid_map.contains_key(uuid)\n }\n\n \/\/\/ Get item by name.\n pub fn get_by_name(&self, name: &str) -> Option<&T> {\n self.name_map.get(name).map(|index| &self.items[*index])\n }\n\n \/\/\/ Get item by uuid.\n pub fn get_by_uuid(&self, uuid: &Uuid) -> Option<&T> {\n self.uuid_map.get(uuid).map(|index| &self.items[*index])\n }\n\n \/\/\/ Get mutable item by name.\n pub fn get_mut_by_name(&mut self, name: &str) -> Option<&mut T> {\n if let Some(index) = self.name_map.get(name) {\n Some(&mut self.items[*index])\n } else {\n None\n }\n }\n\n \/\/\/ Get mutable item by uuid.\n pub fn get_mut_by_uuid(&mut self, uuid: &Uuid) -> Option<&mut T> {\n if let Some(index) = self.uuid_map.get(uuid) {\n Some(&mut self.items[*index])\n } else {\n None\n }\n }\n\n \/\/\/ A mutable iterator through Pools.\n #[allow(dead_code)]\n pub fn iter_mut(&mut self) -> IterMut<T> {\n self.items.iter_mut()\n }\n\n \/\/\/ Removes the Pool corresponding to name if there is one.\n pub fn remove_by_name(&mut self, name: &str) -> Option<T> {\n if let Some(index) = self.name_map.remove(name) {\n \/\/ There is guaranteed to be a last because there is at least\n \/\/ one index into the items.\n\n \/\/ Insert mappings for the about-to-be swapped element\n {\n let last_item = &self.items.last().unwrap();\n self.name_map.insert(last_item.name().into(), index);\n self.uuid_map.insert(last_item.uuid().clone(), index);\n }\n\n \/\/ Remove the item we want to remove and also the uuid mapping\n let item = self.items.swap_remove(index);\n self.uuid_map.remove(item.uuid());\n\n \/\/ Remove the name again, in case there is only one item.\n self.name_map.remove(name);\n\n Some(item)\n } else {\n None\n }\n }\n\n \/\/\/ Removes the Pool corresponding to the uuid if there is one.\n pub fn remove_by_uuid(&mut self, uuid: &Uuid) -> Option<T> {\n if let Some(index) = self.uuid_map.remove(uuid) {\n \/\/ There is guaranteed to be a last because there is at least\n \/\/ one index into the items.\n\n \/\/ Insert mappings for the about-to-be swapped element\n {\n let last_item = &self.items.last().unwrap();\n self.name_map.insert(last_item.name().into(), index);\n self.uuid_map.insert(last_item.uuid().clone(), index);\n }\n\n \/\/ Remove the item we want to remove and also the uuid mapping\n let item = self.items.swap_remove(index);\n self.name_map.remove(item.name());\n\n \/\/ Remove the uuid again, in case there is only one item.\n self.uuid_map.remove(uuid);\n\n Some(item)\n } else {\n None\n }\n }\n\n \/\/\/ Inserts an item for given uuid and name.\n \/\/\/ Returns a list of the items displaced, which may be empty if no items\n \/\/\/ are displaced, have one entry if the uuid and the name map to the same\n \/\/\/ item, and may have two entries if the uuid and the name map to\n \/\/\/ different items.\n pub fn insert(&mut self, item: T) -> Vec<T> {\n let name_item = self.remove_by_name(item.name());\n let uuid_item = self.remove_by_uuid(item.uuid());\n\n let future_last_index = self.items.len();\n self.name_map.insert(item.name().into(), future_last_index);\n self.uuid_map.insert(item.uuid().clone(), future_last_index);\n\n self.items.push(item);\n\n match (name_item, uuid_item) {\n (None, None) => vec![],\n (None, Some(item)) => vec![item],\n (Some(item), None) => vec![item],\n (Some(p1), Some(p2)) => vec![p1, p2],\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n\n use rand;\n use uuid::Uuid;\n\n use super::super::engine::{HasName, HasUuid};\n\n use super::Table;\n\n #[derive(Debug)]\n struct TestThing {\n name: String,\n uuid: Uuid,\n stuff: u32,\n }\n\n impl TestThing {\n pub fn new(name: &str, uuid: &Uuid) -> TestThing {\n TestThing {\n name: name.to_owned(),\n uuid: uuid.clone(),\n stuff: rand::random::<u32>(),\n }\n }\n }\n\n impl HasUuid for TestThing {\n fn uuid(&self) -> &Uuid {\n &self.uuid\n }\n }\n\n impl HasName for TestThing {\n fn name(&self) -> &str {\n &self.name\n }\n }\n\n #[test]\n \/\/\/ Remove a test object by its uuid.\n \/\/\/ Mutate the removed test object.\n \/\/\/ Verify that the table is now empty and that removing by name yields\n \/\/\/ no result.\n fn remove_existing_item() {\n let mut t: Table<TestThing> = Table::new();\n let uuid = Uuid::new_v4();\n let name = \"name\";\n t.insert(TestThing::new(&name, &uuid));\n assert!(t.get_by_name(&name).is_some());\n assert!(t.get_by_uuid(&uuid).is_some());\n let thing = t.remove_by_uuid(&uuid);\n assert!(thing.is_some());\n let mut thing = thing.unwrap();\n thing.stuff = 0;\n assert!(t.is_empty());\n assert!(t.remove_by_name(&name).is_none());\n assert!(t.get_by_name(&name).is_none());\n assert!(t.get_by_uuid(&uuid).is_none());\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with same keys.\n \/\/\/ The previously inserted thing should be returned.\n \/\/\/ You can't insert the identical thing, because that would be a move.\n \/\/\/ This is good, because then you can't have a thing that is both in\n \/\/\/ the table and not in the table.\n fn insert_same_keys() {\n let mut t: Table<TestThing> = Table::new();\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, &uuid);\n let thing_key = thing.stuff;\n let displaced = t.insert(thing);\n\n \/\/ There was nothing previously, so displaced must be empty.\n assert!(displaced.is_empty());\n\n \/\/ t now contains the inserted thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(&uuid));\n assert!(t.get_by_uuid(&uuid).unwrap().stuff == thing_key);\n\n \/\/ Add another thing with the same keys.\n let thing2 = TestThing::new(&name, &uuid);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(thing2);\n\n \/\/ It has displaced the old thing.\n assert!(displaced.len() == 1);\n let ref displaced_item = displaced[0];\n assert!(displaced_item.name() == name);\n assert!(displaced_item.uuid() == &uuid);\n\n \/\/ But it contains a thing with the same keys.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(&uuid));\n assert!(t.get_by_uuid(&uuid).unwrap().stuff == thing_key2);\n assert!(t.len() == 1);\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with the same name.\n \/\/\/ The previously inserted thing should be returned.\n fn insert_same_name() {\n let mut t: Table<TestThing> = Table::new();\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, &uuid);\n let thing_key = thing.stuff;\n\n \/\/ There was nothing in the table before, so displaced is empty.\n let displaced = t.insert(thing);\n assert!(displaced.is_empty());\n\n \/\/ t now contains thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(&uuid));\n\n \/\/ Insert new item with different UUID.\n let uuid2 = Uuid::new_v4();\n let thing2 = TestThing::new(&name, &uuid2);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(thing2);\n\n \/\/ The items displaced consist exactly of the first item.\n assert!(displaced.len() == 1);\n let ref displaced_item = displaced[0];\n assert!(displaced_item.name() == name);\n assert!(displaced_item.uuid() == &uuid);\n assert!(displaced_item.stuff == thing_key);\n\n \/\/ The table contains the new item and has no memory of the old.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(&uuid2));\n assert!(!t.contains_uuid(&uuid));\n assert!(t.get_by_uuid(&uuid2).unwrap().stuff == thing_key2);\n assert!(t.get_by_name(&name).unwrap().stuff == thing_key2);\n assert!(t.len() == 1);\n }\n}\n<commit_msg>Specify a global invariant for the table<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::collections::HashMap;\nuse std::slice::IterMut;\n\nuse uuid::Uuid;\n\nuse super::engine::{HasName, HasUuid};\n\n\n\/\/\/ Map UUID and name to T items.\n#[derive(Debug)]\npub struct Table<T: HasName + HasUuid> {\n items: Vec<T>,\n name_map: HashMap<String, usize>,\n uuid_map: HashMap<Uuid, usize>,\n}\n\n\/\/\/ All operations are O(1).\n\/\/\/ The implementation does not priviledge the name key over the UUID key\n\/\/\/ in any way. They are both treated as constants once the item has been\n\/\/\/ inserted. In order to rename a T item, it must be removed, renamed, and\n\/\/\/ reinserted under the new name.\nimpl<T: HasName + HasUuid> Table<T> {\n pub fn new() -> Self {\n Table {\n items: Vec::new(),\n name_map: HashMap::new(),\n uuid_map: HashMap::new(),\n }\n\n }\n\n pub fn is_empty(&self) -> bool {\n self.items.is_empty()\n }\n\n pub fn len(&self) -> usize {\n self.items.len()\n }\n\n \/\/\/ Returns true if map has an item corresponding to this name, else false.\n pub fn contains_name(&self, name: &str) -> bool {\n self.name_map.contains_key(name)\n }\n\n \/\/\/ Returns true if map has an item corresponding to this uuid, else false.\n #[allow(dead_code)]\n pub fn contains_uuid(&self, uuid: &Uuid) -> bool {\n self.uuid_map.contains_key(uuid)\n }\n\n \/\/\/ Get item by name.\n pub fn get_by_name(&self, name: &str) -> Option<&T> {\n self.name_map.get(name).map(|index| &self.items[*index])\n }\n\n \/\/\/ Get item by uuid.\n pub fn get_by_uuid(&self, uuid: &Uuid) -> Option<&T> {\n self.uuid_map.get(uuid).map(|index| &self.items[*index])\n }\n\n \/\/\/ Get mutable item by name.\n pub fn get_mut_by_name(&mut self, name: &str) -> Option<&mut T> {\n if let Some(index) = self.name_map.get(name) {\n Some(&mut self.items[*index])\n } else {\n None\n }\n }\n\n \/\/\/ Get mutable item by uuid.\n pub fn get_mut_by_uuid(&mut self, uuid: &Uuid) -> Option<&mut T> {\n if let Some(index) = self.uuid_map.get(uuid) {\n Some(&mut self.items[*index])\n } else {\n None\n }\n }\n\n \/\/\/ A mutable iterator through Pools.\n #[allow(dead_code)]\n pub fn iter_mut(&mut self) -> IterMut<T> {\n self.items.iter_mut()\n }\n\n \/\/\/ Removes the Pool corresponding to name if there is one.\n pub fn remove_by_name(&mut self, name: &str) -> Option<T> {\n if let Some(index) = self.name_map.remove(name) {\n \/\/ There is guaranteed to be a last because there is at least\n \/\/ one index into the items.\n\n \/\/ Insert mappings for the about-to-be swapped element\n {\n let last_item = &self.items.last().unwrap();\n self.name_map.insert(last_item.name().into(), index);\n self.uuid_map.insert(last_item.uuid().clone(), index);\n }\n\n \/\/ Remove the item we want to remove and also the uuid mapping\n let item = self.items.swap_remove(index);\n self.uuid_map.remove(item.uuid());\n\n \/\/ Remove the name again, in case there is only one item.\n self.name_map.remove(name);\n\n Some(item)\n } else {\n None\n }\n }\n\n \/\/\/ Removes the Pool corresponding to the uuid if there is one.\n pub fn remove_by_uuid(&mut self, uuid: &Uuid) -> Option<T> {\n if let Some(index) = self.uuid_map.remove(uuid) {\n \/\/ There is guaranteed to be a last because there is at least\n \/\/ one index into the items.\n\n \/\/ Insert mappings for the about-to-be swapped element\n {\n let last_item = &self.items.last().unwrap();\n self.name_map.insert(last_item.name().into(), index);\n self.uuid_map.insert(last_item.uuid().clone(), index);\n }\n\n \/\/ Remove the item we want to remove and also the uuid mapping\n let item = self.items.swap_remove(index);\n self.name_map.remove(item.name());\n\n \/\/ Remove the uuid again, in case there is only one item.\n self.uuid_map.remove(uuid);\n\n Some(item)\n } else {\n None\n }\n }\n\n \/\/\/ Inserts an item for given uuid and name.\n \/\/\/ Returns a list of the items displaced, which may be empty if no items\n \/\/\/ are displaced, have one entry if the uuid and the name map to the same\n \/\/\/ item, and may have two entries if the uuid and the name map to\n \/\/\/ different items.\n pub fn insert(&mut self, item: T) -> Vec<T> {\n let name_item = self.remove_by_name(item.name());\n let uuid_item = self.remove_by_uuid(item.uuid());\n\n let future_last_index = self.items.len();\n self.name_map.insert(item.name().into(), future_last_index);\n self.uuid_map.insert(item.uuid().clone(), future_last_index);\n\n self.items.push(item);\n\n match (name_item, uuid_item) {\n (None, None) => vec![],\n (None, Some(item)) => vec![item],\n (Some(item), None) => vec![item],\n (Some(p1), Some(p2)) => vec![p1, p2],\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n\n use rand;\n use uuid::Uuid;\n\n use super::super::engine::{HasName, HasUuid};\n\n use super::Table;\n\n #[derive(Debug)]\n struct TestThing {\n name: String,\n uuid: Uuid,\n stuff: u32,\n }\n\n \/\/ A global invariant checker for the table.\n \/\/ Verifies proper relationship between internal data structures.\n fn table_invariant<T>(table: &Table<T>) -> ()\n where T: HasName + HasUuid\n {\n let ref items = table.items;\n let ref name_map = table.name_map;\n let ref uuid_map = table.uuid_map;\n for i in 0..items.len() {\n let name = items[i].name();\n let uuid = items[i].uuid();\n assert!(name_map.get(name).unwrap() == &i);\n assert!(uuid_map.get(uuid).unwrap() == &i);\n }\n\n for name in name_map.keys() {\n let index = name_map.get(name).unwrap();\n assert!(items[*index].name() == name);\n }\n\n for uuid in uuid_map.keys() {\n let index = uuid_map.get(uuid).unwrap();\n assert!(items[*index].uuid() == uuid);\n }\n\n }\n\n impl TestThing {\n pub fn new(name: &str, uuid: &Uuid) -> TestThing {\n TestThing {\n name: name.to_owned(),\n uuid: uuid.clone(),\n stuff: rand::random::<u32>(),\n }\n }\n }\n\n impl HasUuid for TestThing {\n fn uuid(&self) -> &Uuid {\n &self.uuid\n }\n }\n\n impl HasName for TestThing {\n fn name(&self) -> &str {\n &self.name\n }\n }\n\n #[test]\n \/\/\/ Remove a test object by its uuid.\n \/\/\/ Mutate the removed test object.\n \/\/\/ Verify that the table is now empty and that removing by name yields\n \/\/\/ no result.\n fn remove_existing_item() {\n let mut t: Table<TestThing> = Table::new();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n t.insert(TestThing::new(&name, &uuid));\n table_invariant(&t);\n\n assert!(t.get_by_name(&name).is_some());\n assert!(t.get_by_uuid(&uuid).is_some());\n let thing = t.remove_by_uuid(&uuid);\n table_invariant(&t);\n assert!(thing.is_some());\n let mut thing = thing.unwrap();\n thing.stuff = 0;\n assert!(t.is_empty());\n assert!(t.remove_by_name(&name).is_none());\n table_invariant(&t);\n\n assert!(t.get_by_name(&name).is_none());\n assert!(t.get_by_uuid(&uuid).is_none());\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with same keys.\n \/\/\/ The previously inserted thing should be returned.\n \/\/\/ You can't insert the identical thing, because that would be a move.\n \/\/\/ This is good, because then you can't have a thing that is both in\n \/\/\/ the table and not in the table.\n fn insert_same_keys() {\n let mut t: Table<TestThing> = Table::new();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, &uuid);\n let thing_key = thing.stuff;\n let displaced = t.insert(thing);\n table_invariant(&t);\n\n \/\/ There was nothing previously, so displaced must be empty.\n assert!(displaced.is_empty());\n\n \/\/ t now contains the inserted thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(&uuid));\n assert!(t.get_by_uuid(&uuid).unwrap().stuff == thing_key);\n\n \/\/ Add another thing with the same keys.\n let thing2 = TestThing::new(&name, &uuid);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(thing2);\n table_invariant(&t);\n\n \/\/ It has displaced the old thing.\n assert!(displaced.len() == 1);\n let ref displaced_item = displaced[0];\n assert!(displaced_item.name() == name);\n assert!(displaced_item.uuid() == &uuid);\n\n \/\/ But it contains a thing with the same keys.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(&uuid));\n assert!(t.get_by_uuid(&uuid).unwrap().stuff == thing_key2);\n assert!(t.len() == 1);\n }\n\n #[test]\n \/\/\/ Insert a thing and then insert another thing with the same name.\n \/\/\/ The previously inserted thing should be returned.\n fn insert_same_name() {\n let mut t: Table<TestThing> = Table::new();\n table_invariant(&t);\n\n let uuid = Uuid::new_v4();\n let name = \"name\";\n let thing = TestThing::new(&name, &uuid);\n let thing_key = thing.stuff;\n\n \/\/ There was nothing in the table before, so displaced is empty.\n let displaced = t.insert(thing);\n table_invariant(&t);\n assert!(displaced.is_empty());\n\n \/\/ t now contains thing.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(&uuid));\n\n \/\/ Insert new item with different UUID.\n let uuid2 = Uuid::new_v4();\n let thing2 = TestThing::new(&name, &uuid2);\n let thing_key2 = thing2.stuff;\n let displaced = t.insert(thing2);\n table_invariant(&t);\n\n \/\/ The items displaced consist exactly of the first item.\n assert!(displaced.len() == 1);\n let ref displaced_item = displaced[0];\n assert!(displaced_item.name() == name);\n assert!(displaced_item.uuid() == &uuid);\n assert!(displaced_item.stuff == thing_key);\n\n \/\/ The table contains the new item and has no memory of the old.\n assert!(t.contains_name(&name));\n assert!(t.contains_uuid(&uuid2));\n assert!(!t.contains_uuid(&uuid));\n assert!(t.get_by_uuid(&uuid2).unwrap().stuff == thing_key2);\n assert!(t.get_by_name(&name).unwrap().stuff == thing_key2);\n assert!(t.len() == 1);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>refactor(scanner): remove TextBuffer<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add build.rs for building with git hashes embedded<commit_after>use std::process::Command;\n\nfn main() {\n let output = Command::new(\"git\")\n .args(&[\"rev-parse\", \"--short\", \"HEAD\"])\n .output()\n .expect(\"Git command ran unsuccessfully.\");\n\n let git_hash = String::from_utf8(output.stdout).unwrap();\n\n println!(\"cargo:rustc-env=GIT_HASH={}\", git_hash);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added script to print performance event stats.<commit_after>extern crate perfcnt;\nextern crate x86;\nextern crate phf;\n\nuse x86::perfcnt::{core_counters, uncore_counters};\nuse x86::perfcnt::intel::description::{IntelPerformanceCounterDescription};\nuse x86::perfcnt::intel::counters;\n\nfn print_stats(year: &'static str, name: &'static str, size: usize) {\n println!(\"{}, {}, {}\", year, name, size);\n}\n\nfn main() {\n \/\/ 2008, 4\n let cc = (\"Bonnell core\", counters::BONNELL_CORE);\n print_stats(\"2008\", cc.0, cc.1.len());\n\n \/\/ 2008, 4\n let cc = (\"Nehalem EP core\", counters::NEHALEMEP_CORE);\n print_stats(\"2008\", cc.0, cc.1.len());\n\n let cc = (\"Nehalem EX core\", counters::NEHALEMEX_CORE);\n print_stats(\"2008\", cc.0, cc.1.len());\n\n \/\/ 2010, 4\n let cc = (\"Westmere EP DP core\", counters::WESTMEREEP_DP_CORE);\n print_stats(\"2010\", cc.0, cc.1.len());\n\n let cc = (\"Westmere EP SP core\", counters::WESTMEREEP_SP_CORE);\n print_stats(\"2010\", cc.0, cc.1.len());\n\n let cc = (\"Westmere EX\", counters::WESTMEREEX_CORE);\n print_stats(\"2010\", cc.0, cc.1.len());\n\n \/\/ 2011, 8\n let cc_uncore = (\"SandyBridge uncore\", counters::SANDYBRIDGE_UNCORE);\n let cc_core = (\"SandyBridge core\", counters::SANDYBRIDGE_CORE);\n print_stats(\"2011\", \"SandyBridge\", cc_core.1.len()+cc_uncore.1.len());\n\n \/\/ 2011, 8\n let cc_core = (\"Jaketown core\", counters::JAKETOWN_CORE);\n let cc_uncore = (\"Jaketown uncore\", counters::JAKETOWN_UNCORE);\n print_stats(\"2011\", \"Jaketown\", cc_core.1.len()+cc_uncore.1.len());\n\n\n \/\/ 2012, 8\n let cc_uncore = (\"IvyBridge uncore\", counters::IVYBRIDGE_UNCORE);\n let cc_core = (\"IvyBridge core\", counters::IVYBRIDGE_CORE);\n print_stats(\"2012\", \"IvyBridge\", cc_core.1.len()+cc_uncore.1.len());\n\n \/\/ 2013, 8\n let cc_core = (\"IvyTown core\", counters::IVYTOWN_CORE);\n let cc_uncore = (\"Ivytown uncore\", counters::IVYTOWN_UNCORE);\n print_stats(\"2012\", \"IvyTown\", cc_core.1.len()+cc_uncore.1.len());\n\n\n \/\/ 2013, 8\n let cc = (\"Silvermont core\", counters::SILVERMONT_CORE);\n print_stats(\"2013\", \"Silvermont\", cc.1.len());\n\n\n \/\/ 2013, 8\n let cc_uncore = (\"Haswell uncore\", counters::HASWELL_UNCORE);\n let cc_core = (\"Haswell core\", counters::HASWELL_CORE);\n print_stats(\"2013\", \"Haswell\", cc_core.1.len()+cc_uncore.1.len());\n\n \/\/ 2013, 8\n let cc_core = (\"HaswellX core\", counters::HASWELLX_CORE);\n let cc_uncore = (\"HaswellX uncore\", counters::HASWELLX_UNCORE);\n print_stats(\"2013\", \"HaswellX\", cc_core.1.len()+cc_uncore.1.len());\n\n \/\/ 2015, 8\n let cc_core = (\"Broadwell core\", counters::BROADWELL_CORE);\n let cc_uncore = (\"Broadwell uncore\", counters::BROADWELL_UNCORE);\n print_stats(\"2015\", \"Broadwell\", cc_core.1.len()+cc_uncore.1.len());\n\n \/\/ 2015, 8\n let cc_uncore = (\"Broadwell DE uncore\", counters::BROADWELLDE_UNCORE);\n let cc_core = (\"Broadwell DE core\", counters::BROADWELLDE_CORE);\n print_stats(\"2015\", \"Broadwell DE\", cc_core.1.len()+cc_uncore.1.len());\n\n \/\/ 2015, 8\n let cc = (\"Skylake core\", counters::SKYLAKE_CORE);\n print_stats(\"2015\", \"Skylake\", cc.1.len());\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary to string from touch<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>pwm: add initial example application (WIP)<commit_after>\/\/ Copyright 2015, Paul Osborne <osbpau@gmail.com>\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/license\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n#![feature(plugin, no_std, core, start)]\n#![crate_type=\"staticlib\"]\n#![no_std]\n#![plugin(macro_platformtree)]\n\nextern crate core;\nextern crate zinc;\n#[macro_use] #[no_link] extern crate macro_platformtree;\n\nuse zinc::hal::timer::Timer;\nuse zinc::hal::lpc17xx::pwm;\nuse zinc::hal::pwm::PWMOutput;\n\n\/\/ This example shows use of the RGB LED that is availble on the MBED\n\/\/ Application Board. The LED is connected to 3 pins coming\n\/\/ from the MBED LPC1768. Here's the mapping:\n\/\/\n\/\/ - RGB_RED => p23 => p2.3 (PWM1.4)\n\/\/ - RGB_GREEN => p24 => p2.2 (PWM1.3)\n\/\/ - RGB_BLUE => p25 => p2.1 (PWM1.2)\n\nplatformtree!(\n lpc17xx@mcu {\n clock {\n source = \"main-oscillator\";\n source_frequency = 12_000_000;\n pll {\n m = 50;\n n = 3;\n divisor = 4;\n }\n }\n\n timer {\n timer@1 {\n counter = 25;\n divisor = 4;\n }\n }\n\n gpio {\n 2 {\n rgb_blue@3 {\n direction = \"out\";\n function = \"pwm1_4\";\n }\n rgb_green@2 {\n direction = \"out\";\n function = \"pwm1_3\";\n }\n rgb_red@1 {\n direction = \"out\";\n function = \"pwm1_2\";\n }\n }\n }\n }\n\n os {\n single_task {\n loop = \"run\";\n args {\n timer = &timer;\n rgb_red = &rgb_red;\n rgb_green = &rgb_green;\n rgb_blue = &rgb_blue;\n }\n }\n }\n);\n\nfn do_color(timer: &Timer, pwm: &mut pwm::PWM) {\n for i in 0..100 {\n pwm.write(i as f32 \/ 100.0f32);\n timer.wait_ms(10);\n }\n}\n\nfn run(args: &pt::run_args) {\n let mut pwm_red = pwm::PWM::new(pwm::PWMChannel::CHANNEL3);\n let mut pwm_green = pwm::PWM::new(pwm::PWMChannel::CHANNEL2);\n let mut pwm_blue = pwm::PWM::new(pwm::PWMChannel::CHANNEL1);\n\n pwm_red.set_period_us(10_000);\n pwm_green.set_period_us(10_000);\n pwm_blue.set_period_us(10_000);\n\n loop {\n pwm_red.write(1.0);\n pwm_green.write(1.0);\n pwm_blue.write(1.0);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(type_macros, concat_idents, rustc_attrs)]\n#![allow(unused)]\n\n#[derive(Debug)] struct FooBar;\n#[derive(Debug)] struct Baz<T>(T, concat_idents!(Foo, Bar));\n\n#[rustc_error]\nfn main() {} \/\/~ ERROR compilation successful\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test of conversion to box dyn error<commit_after>mod drop;\n\nuse self::drop::DetectDrop;\nuse anyhow::Error;\nuse std::error::Error as StdError;\nuse std::sync::atomic::AtomicBool;\nuse std::sync::atomic::Ordering::SeqCst;\nuse std::sync::Arc;\n\n#[test]\nfn test_convert() {\n let has_dropped = Arc::new(AtomicBool::new(false));\n let error = Error::new(DetectDrop::new(&has_dropped));\n let box_dyn = Box::<dyn StdError + Send + Sync>::from(error);\n assert_eq!(\"oh no!\", box_dyn.to_string());\n drop(box_dyn);\n assert!(has_dropped.load(SeqCst));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #27154 - apasel422:issue-10436, r=Gankro<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn works<T>(x: T) -> Vec<T> { vec![x] }\n\nfn also_works<T: Clone>(x: T) -> Vec<T> { vec![x] }\n\nfn main() {\n let _: Vec<usize> = works(0);\n let _: Vec<usize> = also_works(0);\n let _ = works(0);\n let _ = also_works(0);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for drop order in async functions.<commit_after>\/\/ aux-build:arc_wake.rs\n\/\/ edition:2018\n\/\/ run-pass\n\n#![allow(unused_variables)]\n#![feature(async_await, await_macro, futures_api)]\n\nextern crate arc_wake;\n\nuse arc_wake::ArcWake;\nuse std::cell::RefCell;\nuse std::future::Future;\nuse std::sync::Arc;\nuse std::task::Context;\n\nstruct EmptyWaker;\n\nimpl ArcWake for EmptyWaker {\n fn wake(self: Arc<Self>) {}\n}\n\n#[derive(Debug, Eq, PartialEq)]\nenum DropOrder {\n Function,\n Val(&'static str),\n}\n\nstruct D(&'static str, Arc<RefCell<Vec<DropOrder>>>);\n\nimpl Drop for D {\n fn drop(&mut self) {\n self.1.borrow_mut().push(DropOrder::Val(self.0));\n }\n}\n\nasync fn foo(x: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n}\n\nasync fn bar(x: D, _: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n}\n\nasync fn baz((x, _): (D, D)) {\n x.1.borrow_mut().push(DropOrder::Function);\n}\n\nasync fn foobar(x: D, (a, _, _c): (D, D, D), _: D, _y: D) {\n x.1.borrow_mut().push(DropOrder::Function);\n}\n\nfn main() {\n let empty = Arc::new(EmptyWaker);\n let waker = ArcWake::into_waker(empty);\n let mut cx = Context::from_waker(&waker);\n\n use DropOrder::*;\n\n \/\/ Currently, the `bar` and `foobar` tests do not output the same order as the equivalent\n \/\/ non-async functions. This is because the drop order of captured variables doesn't match the\n \/\/ drop order of arguments in a function.\n\n let af = Arc::new(RefCell::new(Vec::new()));\n let mut fut = Box::pin(foo(D(\"x\", af.clone()), D(\"_y\", af.clone())));\n let _ = fut.as_mut().poll(&mut cx);\n assert_eq!(*af.borrow(), &[Function, Val(\"_y\"), Val(\"x\")]);\n\n let af = Arc::new(RefCell::new(Vec::new()));\n let mut fut = Box::pin(bar(D(\"x\", af.clone()), D(\"_\", af.clone())));\n let _ = fut.as_mut().poll(&mut cx);\n assert_eq!(*af.borrow(), &[Function, Val(\"x\"), Val(\"_\")]);\n\n let af = Arc::new(RefCell::new(Vec::new()));\n let mut fut = Box::pin(baz((D(\"x\", af.clone()), D(\"_\", af.clone()))));\n let _ = fut.as_mut().poll(&mut cx);\n assert_eq!(*af.borrow(), &[Function, Val(\"x\"), Val(\"_\")]);\n\n let af = Arc::new(RefCell::new(Vec::new()));\n let mut fut = Box::pin(foobar(\n D(\"x\", af.clone()),\n (D(\"a\", af.clone()), D(\"_\", af.clone()), D(\"_c\", af.clone())),\n D(\"_\", af.clone()),\n D(\"_y\", af.clone()),\n ));\n let _ = fut.as_mut().poll(&mut cx);\n assert_eq!(*af.borrow(), &[\n Function, Val(\"_y\"), Val(\"_c\"), Val(\"a\"), Val(\"x\"), Val(\"_\"), Val(\"_\"),\n ]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add aliases attribute check<commit_after>#![feature(optin_builtin_traits)]\n\n\/\/ @has auto_aliases\/trait.Bar.html '\/\/h3[@aliases=\"auto_aliases::Foo\"]' 'impl Bar for Foo'\npub struct Foo;\n\npub auto trait Bar {}\n<|endoftext|>"} {"text":"<commit_before>extern crate rocksdb;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\nuse std::path::{PathBuf, Path};\n\nuse rocksdb::{DB, Options};\n\n\/\/ Ensures that DB::Destroy is called for this database when DBPath is dropped.\npub struct DBPath {\n path: PathBuf\n}\n\nimpl DBPath {\n \/\/ Suffixes the given `prefix` with a timestamp to ensure that subsequent test runs don't reuse\n \/\/ an old database in case of panics prior to Drop being called.\n pub fn new(prefix: &str) -> DBPath {\n let current_time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();\n let path = format!(\n \"{}.{}.{}\",\n prefix,\n current_time.as_secs(),\n current_time.subsec_nanos()\n );\n\n DBPath { path: PathBuf::from(path) }\n }\n}\n\nimpl Drop for DBPath {\n fn drop(&mut self) {\n let opts = Options::default();\n DB::destroy(&opts, &self.path).unwrap();\n }\n}\n\nimpl AsRef<Path> for DBPath {\n fn as_ref(&self) -> &Path {\n &self.path\n }\n}\n\n<commit_msg>Changing comments to doc-comments [skip ci]<commit_after>extern crate rocksdb;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\nuse std::path::{PathBuf, Path};\n\nuse rocksdb::{DB, Options};\n\n\/\/\/ Ensures that DB::Destroy is called for this database when DBPath is dropped.\npub struct DBPath {\n path: PathBuf\n}\n\nimpl DBPath {\n \/\/\/ Suffixes the given `prefix` with a timestamp to ensure that subsequent test runs don't reuse\n \/\/\/ an old database in case of panics prior to Drop being called.\n pub fn new(prefix: &str) -> DBPath {\n let current_time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();\n let path = format!(\n \"{}.{}.{}\",\n prefix,\n current_time.as_secs(),\n current_time.subsec_nanos()\n );\n\n DBPath { path: PathBuf::from(path) }\n }\n}\n\nimpl Drop for DBPath {\n fn drop(&mut self) {\n let opts = Options::default();\n DB::destroy(&opts, &self.path).unwrap();\n }\n}\n\nimpl AsRef<Path> for DBPath {\n fn as_ref(&self) -> &Path {\n &self.path\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0 rls\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [Crate] = &[Crate(\"rustc\"), Crate(\"rustc_trans\")];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n Crate(\"ar\"),\n Crate(\"arena\"),\n Crate(\"backtrace\"),\n Crate(\"backtrace-sys\"),\n Crate(\"bitflags\"),\n Crate(\"build_helper\"),\n Crate(\"byteorder\"),\n Crate(\"cc\"),\n Crate(\"cfg-if\"),\n Crate(\"cmake\"),\n Crate(\"filetime\"),\n Crate(\"flate2\"),\n Crate(\"fmt_macros\"),\n Crate(\"fuchsia-zircon\"),\n Crate(\"fuchsia-zircon-sys\"),\n Crate(\"graphviz\"),\n Crate(\"jobserver\"),\n Crate(\"kernel32-sys\"),\n Crate(\"lazy_static\"),\n Crate(\"libc\"),\n Crate(\"log\"),\n Crate(\"log_settings\"),\n Crate(\"miniz-sys\"),\n Crate(\"num_cpus\"),\n Crate(\"owning_ref\"),\n Crate(\"parking_lot\"),\n Crate(\"parking_lot_core\"),\n Crate(\"rand\"),\n Crate(\"redox_syscall\"),\n Crate(\"rustc\"),\n Crate(\"rustc-demangle\"),\n Crate(\"rustc_allocator\"),\n Crate(\"rustc_apfloat\"),\n Crate(\"rustc_back\"),\n Crate(\"rustc_binaryen\"),\n Crate(\"rustc_const_eval\"),\n Crate(\"rustc_const_math\"),\n Crate(\"rustc_cratesio_shim\"),\n Crate(\"rustc_data_structures\"),\n Crate(\"rustc_errors\"),\n Crate(\"rustc_incremental\"),\n Crate(\"rustc_llvm\"),\n Crate(\"rustc_mir\"),\n Crate(\"rustc_platform_intrinsics\"),\n Crate(\"rustc_trans\"),\n Crate(\"rustc_trans_utils\"),\n Crate(\"serialize\"),\n Crate(\"smallvec\"),\n Crate(\"stable_deref_trait\"),\n Crate(\"syntax\"),\n Crate(\"syntax_pos\"),\n Crate(\"tempdir\"),\n Crate(\"unicode-width\"),\n Crate(\"winapi\"),\n Crate(\"winapi-build\"),\n Crate(\"proc_macro\"),\n Crate(\"winapi-i686-pc-windows-gnu\"),\n Crate(\"winapi-x86_64-pc-windows-gnu\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\nimpl<'a> Crate<'a> {\n pub fn from_str(s: &'a str) -> Self {\n let mut parts = s.split(\" \");\n let name = parts.next().unwrap();\n\n Crate(name)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<Crate<'a>>,\n krate: Crate<'a>,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate) {\n unapproved.insert(krate);\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let krate = Crate::from_str(dep);\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate);\n\n unapproved.append(&mut bad);\n }\n\n unapproved\n}\n<commit_msg>different versions may have different deps<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0 rls\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [CrateVersion] = &[\n CrateVersion(\"rustc\", \"0.0.0\"),\n CrateVersion(\"rustc_trans\", \"0.0.0\"),\n];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n\/\/ Crate(\"ar\"),\n\/\/ Crate(\"arena\"),\n\/\/ Crate(\"backtrace\"),\n\/\/ Crate(\"backtrace-sys\"),\n\/\/ Crate(\"bitflags\"),\n\/\/ Crate(\"build_helper\"),\n\/\/ Crate(\"byteorder\"),\n\/\/ Crate(\"cc\"),\n\/\/ Crate(\"cfg-if\"),\n\/\/ Crate(\"cmake\"),\n\/\/ Crate(\"filetime\"),\n\/\/ Crate(\"flate2\"),\n\/\/ Crate(\"fmt_macros\"),\n\/\/ Crate(\"fuchsia-zircon\"),\n\/\/ Crate(\"fuchsia-zircon-sys\"),\n\/\/ Crate(\"graphviz\"),\n\/\/ Crate(\"jobserver\"),\n\/\/ Crate(\"kernel32-sys\"),\n\/\/ Crate(\"lazy_static\"),\n\/\/ Crate(\"libc\"),\n\/\/ Crate(\"log\"),\n\/\/ Crate(\"log_settings\"),\n\/\/ Crate(\"miniz-sys\"),\n\/\/ Crate(\"num_cpus\"),\n\/\/ Crate(\"owning_ref\"),\n\/\/ Crate(\"parking_lot\"),\n\/\/ Crate(\"parking_lot_core\"),\n\/\/ Crate(\"rand\"),\n\/\/ Crate(\"redox_syscall\"),\n\/\/ Crate(\"rustc\"),\n\/\/ Crate(\"rustc-demangle\"),\n\/\/ Crate(\"rustc_allocator\"),\n\/\/ Crate(\"rustc_apfloat\"),\n\/\/ Crate(\"rustc_back\"),\n\/\/ Crate(\"rustc_binaryen\"),\n\/\/ Crate(\"rustc_const_eval\"),\n\/\/ Crate(\"rustc_const_math\"),\n\/\/ Crate(\"rustc_cratesio_shim\"),\n\/\/ Crate(\"rustc_data_structures\"),\n\/\/ Crate(\"rustc_errors\"),\n\/\/ Crate(\"rustc_incremental\"),\n\/\/ Crate(\"rustc_llvm\"),\n\/\/ Crate(\"rustc_mir\"),\n\/\/ Crate(\"rustc_platform_intrinsics\"),\n\/\/ Crate(\"rustc_trans\"),\n\/\/ Crate(\"rustc_trans_utils\"),\n\/\/ Crate(\"serialize\"),\n\/\/ Crate(\"smallvec\"),\n\/\/ Crate(\"stable_deref_trait\"),\n\/\/ Crate(\"syntax\"),\n\/\/ Crate(\"syntax_pos\"),\n\/\/ Crate(\"tempdir\"),\n\/\/ Crate(\"unicode-width\"),\n\/\/ Crate(\"winapi\"),\n\/\/ Crate(\"winapi-build\"),\n\/\/ Crate(\"winapi-i686-pc-windows-gnu\"),\n\/\/ Crate(\"winapi-x86_64-pc-windows-gnu\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct CrateVersion<'a>(&'a str, &'a str); \/\/ (name, version)\n\nimpl<'a> Crate<'a> {\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\nimpl<'a> CrateVersion<'a> {\n pub fn from_str(s: &'a str) -> Self {\n let mut parts = s.split(\" \");\n let name = parts.next().unwrap();\n let version = parts.next().unwrap();\n\n CrateVersion(name, version)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} {}\", self.0, self.1)\n }\n}\n\nimpl<'a> From<CrateVersion<'a>> for Crate<'a> {\n fn from(cv: CrateVersion<'a>) -> Crate<'a> {\n Crate(cv.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<CrateVersion<'a>>,\n krate: CrateVersion<'a>,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate.into()) {\n unapproved.insert(krate.into());\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let krate = CrateVersion::from_str(dep);\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate);\n\n unapproved.append(&mut bad);\n }\n\n unapproved\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added test file<commit_after>extern crate inkwell;\n\nuse self::inkwell::AddressSpace;\nuse self::inkwell::context::Context;\nuse self::inkwell::values::{BasicValue, InstructionOpcode};\n\n#[test]\nfn test_operands() {\n let context = Context::create();\n let module = context.create_module(\"ivs\");\n let builder = context.create_builder();\n let void_type = context.void_type();\n let i64_type = context.i64_type();\n let f32_type = context.f32_type();\n let f32_ptr_type = f32_type.ptr_type(AddressSpace::Generic);\n let fn_type = void_type.fn_type(&[f32_ptr_type.into()], false);\n\n let function = module.add_function(\"take_f32_ptr\", fn_type, None);\n let basic_block = context.append_basic_block(&function, \"entry\");\n\n builder.position_at_end(&basic_block);\n\n let arg1 = function.get_first_param().unwrap().into_pointer_value();\n let f32_val = f32_type.const_float(::std::f64::consts::PI);\n let store_instruction = builder.build_store(arg1, f32_val);\n let free_instruction = builder.build_free(arg1);\n let return_instruction = builder.build_return(None);\n\n assert!(arg1.as_instruction_value().is_none());\n\n \/\/ Test operands\n assert_eq!(store_instruction.get_num_operands(), 2);\n assert_eq!(free_instruction.get_num_operands(), 2);\n\n let store_operand0 = store_instruction.get_operand(0).unwrap();\n let store_operand1 = store_instruction.get_operand(1).unwrap();\n\n assert_eq!(store_operand0, f32_val); \/\/ f32 const\n assert_eq!(store_operand1, arg1); \/\/ f32* arg1\n assert!(store_instruction.get_operand(2).is_none());\n assert!(store_instruction.get_operand(3).is_none());\n assert!(store_instruction.get_operand(4).is_none());\n\n let free_operand0 = free_instruction.get_operand(0).unwrap();\n let free_operand1 = free_instruction.get_operand(1).unwrap();\n let free_operand0_instruction = free_operand0.as_instruction_value().unwrap();\n\n assert!(free_operand0.is_pointer_value()); \/\/ (implictly casted) i8* arg1\n assert!(free_operand1.is_pointer_value()); \/\/ Free function ptr\n assert_eq!(free_operand0_instruction.get_opcode(), InstructionOpcode::BitCast);\n assert_eq!(free_operand0_instruction.get_operand(0).unwrap(), arg1);\n \/\/ assert_eq!(free_operand0_instruction.get_operand(1).unwrap(), arg1);\n assert!(free_instruction.get_operand(2).is_none());\n assert!(free_instruction.get_operand(3).is_none());\n assert!(free_instruction.get_operand(4).is_none());\n\n assert!(module.verify().is_ok());\n\n free_instruction.set_operand(0, arg1);\n\n \/\/ Module is no longer valid because free takes an i8* not f32*\n assert!(module.verify().is_err());\n\n free_instruction.set_operand(0, free_operand0);\n\n assert!(module.verify().is_ok());\n\n \/\/ No-op, free only has two operands\n free_instruction.set_operand(2, free_operand0);\n\n assert!(module.verify().is_ok());\n\n assert_eq!(return_instruction.get_num_operands(), 0);\n assert!(return_instruction.get_operand(0).is_none());\n assert!(return_instruction.get_operand(1).is_none());\n assert!(return_instruction.get_operand(2).is_none());\n\n \/\/ Test Uses\n \/\/ These instructions\/calls don't return any ir value so they aren't used anywhere\n \/\/ TODO: Test on instruction that is used\n assert!(store_instruction.get_first_use().is_none());\n assert!(free_instruction.get_first_use().is_none());\n assert!(return_instruction.get_first_use().is_none());\n\n \/\/ However their operands are used\n let store_operand_use0 = store_instruction.get_operand_use(0).unwrap();\n let store_operand_use1 = store_instruction.get_operand_use(1).unwrap();\n\n \/\/ in \"store float 0x400921FB60000000, float* %0\"\n \/\/ The const float is only used once, so it has no subsequent use\n \/\/ However the 2nd operand %0 is used in the subsequent (implicit) bitcast\n \/\/ TODO: Test with successful next use\n assert!(store_operand_use0.get_next_use().is_none());\n assert!(store_operand_use1.get_next_use().is_none()); \/\/ REVIEW: Why is this none?\n\n assert_eq!(store_operand_use0.get_user(), store_instruction);\n assert_eq!(store_operand_use1.get_user(), store_instruction);\n assert_eq!(store_operand_use0.get_used_value(), f32_val);\n assert_eq!(store_operand_use1.get_used_value(), arg1);\n\n assert!(store_instruction.get_operand_use(2).is_none());\n assert!(store_instruction.get_operand_use(3).is_none());\n assert!(store_instruction.get_operand_use(4).is_none());\n assert!(store_instruction.get_operand_use(5).is_none());\n assert!(store_instruction.get_operand_use(6).is_none());\n\n let free_operand_use0 = free_instruction.get_operand_use(0).unwrap();\n let free_operand_use1 = free_instruction.get_operand_use(1).unwrap();\n\n assert!(free_operand_use0.get_next_use().is_none());\n assert!(free_operand_use1.get_next_use().is_none());\n\n\n\n\n\n\n assert!(free_instruction.get_operand_use(2).is_none());\n assert!(free_instruction.get_operand_use(3).is_none());\n assert!(free_instruction.get_operand_use(4).is_none());\n assert!(free_instruction.get_operand_use(5).is_none());\n assert!(free_instruction.get_operand_use(6).is_none());\n\n\n assert!(false, \"\\n{}\", module.print_to_string().to_string());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #95.<commit_after>struct Foo {\n _inner: i32,\n}\n\nfn main() {\n unsafe {\n let foo = Foo {\n _inner: std::mem::uninitialized(),\n };\n let _bar = foo;\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0, rls\n \"colored\", \/\/ MPL-2.0, rustfmt\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [CrateVersion] = &[\n CrateVersion(\"rustc\", \"0.0.0\"),\n CrateVersion(\"rustc_codegen_llvm\", \"0.0.0\"),\n];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n Crate(\"aho-corasick\"),\n Crate(\"ar\"),\n Crate(\"arrayvec\"),\n Crate(\"atty\"),\n Crate(\"backtrace\"),\n Crate(\"backtrace-sys\"),\n Crate(\"bitflags\"),\n Crate(\"byteorder\"),\n Crate(\"cc\"),\n Crate(\"cfg-if\"),\n Crate(\"cmake\"),\n Crate(\"crossbeam-deque\"),\n Crate(\"crossbeam-epoch\"),\n Crate(\"crossbeam-utils\"),\n Crate(\"either\"),\n Crate(\"ena\"),\n Crate(\"env_logger\"),\n Crate(\"filetime\"),\n Crate(\"flate2\"),\n Crate(\"fuchsia-zircon\"),\n Crate(\"fuchsia-zircon-sys\"),\n Crate(\"getopts\"),\n Crate(\"humantime\"),\n Crate(\"jobserver\"),\n Crate(\"kernel32-sys\"),\n Crate(\"lazy_static\"),\n Crate(\"libc\"),\n Crate(\"log\"),\n Crate(\"log_settings\"),\n Crate(\"memchr\"),\n Crate(\"memoffset\"),\n Crate(\"miniz-sys\"),\n Crate(\"nodrop\"),\n Crate(\"num_cpus\"),\n Crate(\"owning_ref\"),\n Crate(\"parking_lot\"),\n Crate(\"parking_lot_core\"),\n Crate(\"quick-error\"),\n Crate(\"rand\"),\n Crate(\"redox_syscall\"),\n Crate(\"redox_termios\"),\n Crate(\"regex\"),\n Crate(\"regex-syntax\"),\n Crate(\"remove_dir_all\"),\n Crate(\"rustc-demangle\"),\n Crate(\"rustc-rayon\"),\n Crate(\"rustc-rayon-core\"),\n Crate(\"scoped-tls\"),\n Crate(\"scopeguard\"),\n Crate(\"smallvec\"),\n Crate(\"stable_deref_trait\"),\n Crate(\"tempdir\"),\n Crate(\"termcolor\"),\n Crate(\"terminon\"),\n Crate(\"termion\"),\n Crate(\"thread_local\"),\n Crate(\"ucd-util\"),\n Crate(\"unicode-width\"),\n Crate(\"unreachable\"),\n Crate(\"utf8-ranges\"),\n Crate(\"void\"),\n Crate(\"winapi\"),\n Crate(\"winapi-build\"),\n Crate(\"winapi-i686-pc-windows-gnu\"),\n Crate(\"winapi-x86_64-pc-windows-gnu\"),\n Crate(\"wincolor\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct CrateVersion<'a>(&'a str, &'a str); \/\/ (name, version)\n\nimpl<'a> Crate<'a> {\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\nimpl<'a> CrateVersion<'a> {\n \/\/\/ Returns the struct and whether or not the dep is in-tree\n pub fn from_str(s: &'a str) -> (Self, bool) {\n let mut parts = s.split(\" \");\n let name = parts.next().unwrap();\n let version = parts.next().unwrap();\n let path = parts.next().unwrap();\n\n let is_path_dep = path.starts_with(\"(path+\");\n\n (CrateVersion(name, version), is_path_dep)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} {}\", self.0, self.1)\n }\n}\n\nimpl<'a> From<CrateVersion<'a>> for Crate<'a> {\n fn from(cv: CrateVersion<'a>) -> Crate<'a> {\n Crate(cv.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate, false);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<CrateVersion<'a>>,\n krate: CrateVersion<'a>,\n must_be_on_whitelist: bool,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this path is in-tree, we don't require it to be on the whitelist\n if must_be_on_whitelist {\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate.into()) {\n unapproved.insert(krate.into());\n }\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let (krate, is_path_dep) = CrateVersion::from_str(dep);\n\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate, !is_path_dep);\n unapproved.append(&mut bad);\n }\n\n unapproved\n}\n<commit_msg>extend the crate whitelist to include rustc-hash, chalk<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Check license of third-party deps by inspecting src\/vendor\n\nuse std::collections::{BTreeSet, HashSet};\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::process::Command;\n\nuse serde_json;\n\nstatic LICENSES: &'static [&'static str] = &[\n \"MIT\/Apache-2.0\",\n \"MIT \/ Apache-2.0\",\n \"Apache-2.0\/MIT\",\n \"Apache-2.0 \/ MIT\",\n \"MIT OR Apache-2.0\",\n \"MIT\",\n \"Unlicense\/MIT\",\n];\n\n\/\/\/ These are exceptions to Rust's permissive licensing policy, and\n\/\/\/ should be considered bugs. Exceptions are only allowed in Rust\n\/\/\/ tooling. It is _crucial_ that no exception crates be dependencies\n\/\/\/ of the Rust runtime (std \/ test).\nstatic EXCEPTIONS: &'static [&'static str] = &[\n \"mdbook\", \/\/ MPL2, mdbook\n \"openssl\", \/\/ BSD+advertising clause, cargo, mdbook\n \"pest\", \/\/ MPL2, mdbook via handlebars\n \"thread-id\", \/\/ Apache-2.0, mdbook\n \"toml-query\", \/\/ MPL-2.0, mdbook\n \"is-match\", \/\/ MPL-2.0, mdbook\n \"cssparser\", \/\/ MPL-2.0, rustdoc\n \"smallvec\", \/\/ MPL-2.0, rustdoc\n \"fuchsia-zircon-sys\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo\n \"fuchsia-zircon\", \/\/ BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)\n \"cssparser-macros\", \/\/ MPL-2.0, rustdoc\n \"selectors\", \/\/ MPL-2.0, rustdoc\n \"clippy_lints\", \/\/ MPL-2.0, rls\n \"colored\", \/\/ MPL-2.0, rustfmt\n];\n\n\/\/\/ Which crates to check against the whitelist?\nstatic WHITELIST_CRATES: &'static [CrateVersion] = &[\n CrateVersion(\"rustc\", \"0.0.0\"),\n CrateVersion(\"rustc_codegen_llvm\", \"0.0.0\"),\n];\n\n\/\/\/ Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.\nstatic WHITELIST: &'static [Crate] = &[\n Crate(\"aho-corasick\"),\n Crate(\"ar\"),\n Crate(\"arrayvec\"),\n Crate(\"atty\"),\n Crate(\"backtrace\"),\n Crate(\"backtrace-sys\"),\n Crate(\"bitflags\"),\n Crate(\"byteorder\"),\n Crate(\"cc\"),\n Crate(\"chalk-engine\"),\n Crate(\"chalk-macros\"),\n Crate(\"cfg-if\"),\n Crate(\"cmake\"),\n Crate(\"crossbeam-deque\"),\n Crate(\"crossbeam-epoch\"),\n Crate(\"crossbeam-utils\"),\n Crate(\"either\"),\n Crate(\"ena\"),\n Crate(\"env_logger\"),\n Crate(\"filetime\"),\n Crate(\"flate2\"),\n Crate(\"fuchsia-zircon\"),\n Crate(\"fuchsia-zircon-sys\"),\n Crate(\"getopts\"),\n Crate(\"humantime\"),\n Crate(\"jobserver\"),\n Crate(\"kernel32-sys\"),\n Crate(\"lazy_static\"),\n Crate(\"libc\"),\n Crate(\"log\"),\n Crate(\"log_settings\"),\n Crate(\"memchr\"),\n Crate(\"memoffset\"),\n Crate(\"miniz-sys\"),\n Crate(\"nodrop\"),\n Crate(\"num_cpus\"),\n Crate(\"owning_ref\"),\n Crate(\"parking_lot\"),\n Crate(\"parking_lot_core\"),\n Crate(\"quick-error\"),\n Crate(\"rand\"),\n Crate(\"redox_syscall\"),\n Crate(\"redox_termios\"),\n Crate(\"regex\"),\n Crate(\"regex-syntax\"),\n Crate(\"remove_dir_all\"),\n Crate(\"rustc-demangle\"),\n Crate(\"rustc-hash\"),\n Crate(\"rustc-rayon\"),\n Crate(\"rustc-rayon-core\"),\n Crate(\"scoped-tls\"),\n Crate(\"scopeguard\"),\n Crate(\"smallvec\"),\n Crate(\"stable_deref_trait\"),\n Crate(\"tempdir\"),\n Crate(\"termcolor\"),\n Crate(\"terminon\"),\n Crate(\"termion\"),\n Crate(\"thread_local\"),\n Crate(\"ucd-util\"),\n Crate(\"unicode-width\"),\n Crate(\"unreachable\"),\n Crate(\"utf8-ranges\"),\n Crate(\"void\"),\n Crate(\"winapi\"),\n Crate(\"winapi-build\"),\n Crate(\"winapi-i686-pc-windows-gnu\"),\n Crate(\"winapi-x86_64-pc-windows-gnu\"),\n Crate(\"wincolor\"),\n];\n\n\/\/ Some types for Serde to deserialize the output of `cargo metadata` to...\n\n#[derive(Deserialize)]\nstruct Output {\n resolve: Resolve,\n}\n\n#[derive(Deserialize)]\nstruct Resolve {\n nodes: Vec<ResolveNode>,\n}\n\n#[derive(Deserialize)]\nstruct ResolveNode {\n id: String,\n dependencies: Vec<String>,\n}\n\n\/\/\/ A unique identifier for a crate\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct Crate<'a>(&'a str); \/\/ (name,)\n\n#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]\nstruct CrateVersion<'a>(&'a str, &'a str); \/\/ (name, version)\n\nimpl<'a> Crate<'a> {\n pub fn id_str(&self) -> String {\n format!(\"{} \", self.0)\n }\n}\n\nimpl<'a> CrateVersion<'a> {\n \/\/\/ Returns the struct and whether or not the dep is in-tree\n pub fn from_str(s: &'a str) -> (Self, bool) {\n let mut parts = s.split(\" \");\n let name = parts.next().unwrap();\n let version = parts.next().unwrap();\n let path = parts.next().unwrap();\n\n let is_path_dep = path.starts_with(\"(path+\");\n\n (CrateVersion(name, version), is_path_dep)\n }\n\n pub fn id_str(&self) -> String {\n format!(\"{} {}\", self.0, self.1)\n }\n}\n\nimpl<'a> From<CrateVersion<'a>> for Crate<'a> {\n fn from(cv: CrateVersion<'a>) -> Crate<'a> {\n Crate(cv.0)\n }\n}\n\n\/\/\/ Checks the dependency at the given path. Changes `bad` to `true` if a check failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the license is correct.\npub fn check(path: &Path, bad: &mut bool) {\n \/\/ Check licences\n let path = path.join(\"vendor\");\n assert!(path.exists(), \"vendor directory missing\");\n let mut saw_dir = false;\n for dir in t!(path.read_dir()) {\n saw_dir = true;\n let dir = t!(dir);\n\n \/\/ skip our exceptions\n if EXCEPTIONS.iter().any(|exception| {\n dir.path()\n .to_str()\n .unwrap()\n .contains(&format!(\"src\/vendor\/{}\", exception))\n }) {\n continue;\n }\n\n let toml = dir.path().join(\"Cargo.toml\");\n *bad = *bad || !check_license(&toml);\n }\n assert!(saw_dir, \"no vendored source\");\n}\n\n\/\/\/ Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check\n\/\/\/ failed.\n\/\/\/\n\/\/\/ Specifically, this checks that the dependencies are on the WHITELIST.\npub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {\n \/\/ Get dependencies from cargo metadata\n let resolve = get_deps(path, cargo);\n\n \/\/ Get the whitelist into a convenient form\n let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();\n\n \/\/ Check dependencies\n let mut visited = BTreeSet::new();\n let mut unapproved = BTreeSet::new();\n for &krate in WHITELIST_CRATES.iter() {\n let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate, false);\n unapproved.append(&mut bad);\n }\n\n if unapproved.len() > 0 {\n println!(\"Dependencies not on the whitelist:\");\n for dep in unapproved {\n println!(\"* {}\", dep.id_str());\n }\n *bad = true;\n }\n}\n\nfn check_license(path: &Path) -> bool {\n if !path.exists() {\n panic!(\"{} does not exist\", path.display());\n }\n let mut contents = String::new();\n t!(t!(File::open(path)).read_to_string(&mut contents));\n\n let mut found_license = false;\n for line in contents.lines() {\n if !line.starts_with(\"license\") {\n continue;\n }\n let license = extract_license(line);\n if !LICENSES.contains(&&*license) {\n println!(\"invalid license {} in {}\", license, path.display());\n return false;\n }\n found_license = true;\n break;\n }\n if !found_license {\n println!(\"no license in {}\", path.display());\n return false;\n }\n\n true\n}\n\nfn extract_license(line: &str) -> String {\n let first_quote = line.find('\"');\n let last_quote = line.rfind('\"');\n if let (Some(f), Some(l)) = (first_quote, last_quote) {\n let license = &line[f + 1..l];\n license.into()\n } else {\n \"bad-license-parse\".into()\n }\n}\n\n\/\/\/ Get the dependencies of the crate at the given path using `cargo metadata`.\nfn get_deps(path: &Path, cargo: &Path) -> Resolve {\n \/\/ Run `cargo metadata` to get the set of dependencies\n let output = Command::new(cargo)\n .arg(\"metadata\")\n .arg(\"--format-version\")\n .arg(\"1\")\n .arg(\"--manifest-path\")\n .arg(path.join(\"Cargo.toml\"))\n .output()\n .expect(\"Unable to run `cargo metadata`\")\n .stdout;\n let output = String::from_utf8_lossy(&output);\n let output: Output = serde_json::from_str(&output).unwrap();\n\n output.resolve\n}\n\n\/\/\/ Checks the dependencies of the given crate from the given cargo metadata to see if they are on\n\/\/\/ the whitelist. Returns a list of illegal dependencies.\nfn check_crate_whitelist<'a, 'b>(\n whitelist: &'a HashSet<Crate>,\n resolve: &'a Resolve,\n visited: &'b mut BTreeSet<CrateVersion<'a>>,\n krate: CrateVersion<'a>,\n must_be_on_whitelist: bool,\n) -> BTreeSet<Crate<'a>> {\n \/\/ Will contain bad deps\n let mut unapproved = BTreeSet::new();\n\n \/\/ Check if we have already visited this crate\n if visited.contains(&krate) {\n return unapproved;\n }\n\n visited.insert(krate);\n\n \/\/ If this path is in-tree, we don't require it to be on the whitelist\n if must_be_on_whitelist {\n \/\/ If this dependency is not on the WHITELIST, add to bad set\n if !whitelist.contains(&krate.into()) {\n unapproved.insert(krate.into());\n }\n }\n\n \/\/ Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)\n let to_check = resolve\n .nodes\n .iter()\n .find(|n| n.id.starts_with(&krate.id_str()))\n .expect(\"crate does not exist\");\n\n for dep in to_check.dependencies.iter() {\n let (krate, is_path_dep) = CrateVersion::from_str(dep);\n\n let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate, !is_path_dep);\n unapproved.append(&mut bad);\n }\n\n unapproved\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add graph_tree file<commit_after>\/\/! A tree represented via a petgraph graph, used for way-cooler's\n\/\/! layout.\n\nuse petgraph::EdgeDirection;\nuse petgraph::graph::{Graph, EdgeDirection, Node, Neighbors, NodeIndex};\n\nuse layout::{Container, ContainerType};\n\n\/\/\/ Node used in the layout tree\npub type Node = Node<Container>;\n\n\/\/\/ Layout tree implemented with petgraph.\n#[derive(Debug)]\npub struct Tree {\n graph: Graph<Container, ()>, \/\/ Directed graph\n root: NodeIndex\n}\n\nimpl Tree {\n \/\/\/ Creates a new layout tree with a root node.\n pub fn new() -> Tree {\n let mut graph = Graph::new();\n let root_ix = graph.add_node(Container::Root);\n Tree { graph: graph, root: root_it }\n }\n\n \/\/\/ Adds a new child to a node at the index, returning the edge index\n \/\/\/ of their connection and the index of the new node.\n \/\/ TODO should this return a result like the old API?\n pub fn add_child(&mut self, parent_ix: NodeIndex, val: Container)\n -> (EdgeIndex, NodeIndex) {\n let parent = self.graph.node_weight(parent_ix)\n .expect(\"add_child: parent not found\");\n if !parent.get_type().can_have_child(val.get_type()) {\n panic!(\"Attempted to give a {:?} a {:?} child!\",\n parent.get_type(), child.get_type())\n }\n let child_ix = self.graph.add_node(val);\n let edge_ix = self.graph.update_edge(parent_ix, child_ix, ());\n (edge_ix, child_ix)\n }\n\n \/\/\/ Add an existing node (detached in the graph) to the tree.\n \/\/\/ Note that floating nodes shouldn't exist for too long.\n pub fn attach_child(&mut self, parent_ix: NodeIndex, child_ix: NodeIndex)\n -> EdgeIndex {\n \/\/ Make sure the child doesn't have a parent\n if cfg!(debug_assertions) && self.has_parent(child_ix) {\n panic!(\"attach_child: child had a parent!\")\n }\n\n let parent_type = self.graph.node_weight(parent_ix)\n .expect(\"attach_child: parent not found\").get_type();\n let child_type = self.graph.node_weight(child_ix)\n .expect(\"attach_child: child not found\").get_type();\n\n if !parent_type.can_have_child(child_type) {\n panic!(\"Attempted to give a {:?} a {:?} child!\",\n parent_type, child_type);\n }\n\n return self.graph.update_edge(parent_ix, child_ix, ())\n }\n\n \/\/\/ Detaches a node from the tree (causing there to be two trees).\n \/\/\/ This should only be done temporarily.\n pub fn detach(&mut self, node_ix: NodeIndex) {\n let mut result: Option<NodeIndex> = None;\n if let Some(edge) = if cfg!(debug_assertions) {\n let edges = self.graph\n .neighbors_directed(node_ix, EdgeDirection::Incoming);\n let result = edges.next();\n if edges.next().is_some() {\n panic!(\"detach: node had more than one parent!\")\n }\n }\n else {\n self.graph.neighbors_directed(node_ix, EdgeDirection::Incoming)\n .next()\n } {\n self.graph.remove_edge(edge);\n }\n }\n\n \/\/\/ Moves a node between two indices\n pub fn move_node(&mut self, node_ix: NodeIndex, new_parent: NodeIndex) {\n self.detach(node_ix);\n self.attach_child(new_parent, node_ix);\n }\n\n \/\/\/ Whether a node has a parent\n #[allow(dead_code)]\n pub fn has_parent(&self, node_ix: NodeIndex) -> bool {\n let neighbors = self.graph\n .neighbors_directed(node_ix, EdgeDirection::Incoming);\n match neighbors.iter().count() {\n 0 => false,\n 1 => true,\n _ => panic!(\"Node has more than one parent!\")\n }\n }\n\n \/\/\/ Gets the parent of a node, if the node exists\n pub fn parent_of(&self, node_ix: NodeIndex) -> Option<NodeIndex> {\n let neighbors = self.graph\n .neighbors_directed(node_ix, EdgeDirection::Incoming);\n if cfg!(debug_assertions) {\n let result = neighbors.next();\n if neighbors.next().is_some() {\n panic!(\"parent_of: node has multiple parents!\")\n }\n result\n }\n else {\n neighbors.next()\n }\n }\n\n \/\/\/ Gets an iterator to the children of a node.\n \/\/\/\n \/\/\/ Will return an empty iterator if the node has no children or\n \/\/\/ if the node does not exist.\n pub fn children_of(&self, node_ix: NodeIndex) -> Iter<NodeIndex> {\n self.graph.neighbors_directed(node_ix, EdgeDirection::Outgoing)\n }\n\n \/\/\/ Gets the container of the given node.\n pub fn get(&self, node_ix: NodeIndex) -> &Container {\n self.graph.node_weight(node_ix)\n .expect(\"get: node not found\")\n }\n\n \/\/\/ Gets a mutable reference to a given node\n pub fn get_mut(&mut self, node_ix: NodeIndex) -> &mut Container {\n self.graph.node_weight_mut(node_ix)\n .expect(\"get_mut: node not found\")\n }\n\n \/\/\/ Gets the ContainerType of the selected node\n pub fn node_type(&self, node_ix: NodeIndex) -> ContainerType {\n let node = self.graph.node_weight(node_ix)\n .expect(\"node_type: node not found\");\n node.get_type()\n }\n\n \/\/\/ Attempts to get an ancestor matching the matching type\n pub fn ancestor_of_type(&self, node_ix: NodeIndex,\n container_type: ContainerType) -> Option<NodeIndex> {\n let mut curr_ix = node_ix;\n while let Some(parent_ix) = self.parent_of(curr_ix) {\n curr_ix = parent_ix;\n let parent = self.graph.node_weight(parent_ix)\n .expect(\"ancestor_of_type: parent_of invalid\");\n if parent.get_type() == container_type() {\n return Some(parent_ix)\n }\n curr_ix = parent_ix;\n }\n return None;\n }\n\n \/\/\/ Attempts to get a descendant of the matching type\n pub fn descendant_of_type(&self, node_ix: NodeIndex,\n container_type: ContainerType) -> Option<NodeIndex> {\n \/\/ TODO if self == type?\n for child in self.children_of(curr_ix) {\n if let Some(desc) = self.descendant_of_type(child, container_type) {\n return Some(desc)\n }\n }\n return None\n }\n\n \/\/\/ Finds a node by the view handle.\n pub fn descendant_with_handle(&self, node: NodeIndex, handle: &WlcView)\n -> Option<NodeIndex> {\n match self.get(node) {\n &Container::View { ref node_handle, .. } => {\n if node_handle == handle {\n Some(node)\n }\n else {\n None\n }\n },\n _ => {\n for child in self.children_of(node) {\n if let Some(view) = self.descendant_with_handle(handle) {\n return Some(view)\n }\n }\n return None\n }\n }\n }\n\n \/\/\/ Sets the node and its children's visibility\n pub fn set_family_visible(&mut self, node_ix: NodeIndex, visible: bool) {\n self.get_mut(node_ix).set_visibility(visible);\n for child in self.children_of(node_ix) {\n self.get_mut(child).set_visibility(visible);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>easage-pack: Bump from 0.0.1 to 0.0.2<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Allow PFADD with no value<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Example taken from RFC 1238 text\n\n\/\/ https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1238-nonparametric-dropck.md\n\/\/ #example-of-the-unguarded-escape-hatch\n\n#![feature(dropck_parametricity)]\nuse std::cell::Cell;\n\nstruct Concrete<'a>(u32, Cell<Option<&'a Concrete<'a>>>);\n\nstruct Foo<T> { data: Vec<T> }\n\nimpl<T> Drop for Foo<T> {\n #[unsafe_destructor_blind_to_params] \/\/ This is the UGEH attribute\n fn drop(&mut self) { }\n}\n\nfn main() {\n let mut foo = Foo { data: Vec::new() };\n foo.data.push(Concrete(0, Cell::new(None)));\n foo.data.push(Concrete(0, Cell::new(None)));\n\n foo.data[0].1.set(Some(&foo.data[1]));\n foo.data[1].1.set(Some(&foo.data[0]));\n}\n\n<commit_msg>placate check-pretty via comment rearrangement.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Example taken from RFC 1238 text\n\n\/\/ https:\/\/github.com\/rust-lang\/rfcs\/blob\/master\/text\/1238-nonparametric-dropck.md\n\/\/ #example-of-the-unguarded-escape-hatch\n\n#![feature(dropck_parametricity)]\nuse std::cell::Cell;\n\nstruct Concrete<'a>(u32, Cell<Option<&'a Concrete<'a>>>);\n\nstruct Foo<T> { data: Vec<T> }\n\nimpl<T> Drop for Foo<T> {\n \/\/ Below is the UGEH attribute\n #[unsafe_destructor_blind_to_params]\n fn drop(&mut self) { }\n}\n\nfn main() {\n let mut foo = Foo { data: Vec::new() };\n foo.data.push(Concrete(0, Cell::new(None)));\n foo.data.push(Concrete(0, Cell::new(None)));\n\n foo.data[0].1.set(Some(&foo.data[1]));\n foo.data[1].1.set(Some(&foo.data[0]));\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ascii::AsciiExt;\nuse std::cmp;\n\n#[derive(Debug, Clone)]\npub struct LogDirective {\n pub name: Option<String>,\n pub level: u32,\n}\n\npub const LOG_LEVEL_NAMES: [&'static str; 5] = [\"ERROR\", \"WARN\", \"INFO\", \"DEBUG\", \"TRACE\"];\n\n\/\/\/ Parse an individual log level that is either a number or a symbolic log level\nfn parse_log_level(level: &str) -> Option<u32> {\n level.parse::<u32>()\n .ok()\n .or_else(|| {\n let pos = LOG_LEVEL_NAMES.iter().position(|&name| name.eq_ignore_ascii_case(level));\n pos.map(|p| p as u32 + 1)\n })\n .map(|p| cmp::min(p, ::MAX_LOG_LEVEL))\n}\n\n\/\/\/ Parse a logging specification string (e.g: \"crate1,crate2::mod3,crate3::x=1\/foo\")\n\/\/\/ and return a vector with log directives.\n\/\/\/\n\/\/\/ Valid log levels are 0-255, with the most likely ones being 1-4 (defined in\n\/\/\/ std::). Also supports string log levels of error, warn, info, and debug\npub fn parse_logging_spec(spec: &str) -> (Vec<LogDirective>, Option<String>) {\n let mut dirs = Vec::new();\n\n let mut parts = spec.split('\/');\n let mods = parts.next();\n let filter = parts.next();\n if parts.next().is_some() {\n println!(\"warning: invalid logging spec '{}', ignoring it (too many '\/'s)\",\n spec);\n return (dirs, None);\n }\n if let Some(m) = mods {\n for s in m.split(',') {\n if s.is_empty() {\n continue;\n }\n let mut parts = s.split('=');\n let (log_level, name) = match (parts.next(),\n parts.next().map(|s| s.trim()),\n parts.next()) {\n (Some(part0), None, None) => {\n \/\/ if the single argument is a log-level string or number,\n \/\/ treat that as a global fallback\n match parse_log_level(part0) {\n Some(num) => (num, None),\n None => (::MAX_LOG_LEVEL, Some(part0)),\n }\n }\n (Some(part0), Some(\"\"), None) => (::MAX_LOG_LEVEL, Some(part0)),\n (Some(part0), Some(part1), None) => {\n match parse_log_level(part1) {\n Some(num) => (num, Some(part0)),\n _ => {\n println!(\"warning: invalid logging spec '{}', ignoring it\", part1);\n continue;\n }\n }\n }\n _ => {\n println!(\"warning: invalid logging spec '{}', ignoring it\", s);\n continue;\n }\n };\n dirs.push(LogDirective {\n name: name.map(str::to_owned),\n level: log_level,\n });\n }\n }\n\n (dirs, filter.map(str::to_owned))\n}\n\n#[cfg(test)]\nmod tests {\n use super::parse_logging_spec;\n\n #[test]\n fn parse_logging_spec_valid() {\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=1,crate1::mod2,crate2=4\");\n assert_eq!(dirs.len(), 3);\n assert_eq!(dirs[0].name, Some(\"crate1::mod1\".to_owned()));\n assert_eq!(dirs[0].level, 1);\n\n assert_eq!(dirs[1].name, Some(\"crate1::mod2\".to_owned()));\n assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL);\n\n assert_eq!(dirs[2].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[2].level, 4);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_invalid_crate() {\n \/\/ test parse_logging_spec with multiple = in specification\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=1=2,crate2=4\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, 4);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_invalid_log_level() {\n \/\/ test parse_logging_spec with 'noNumber' as log level\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=noNumber,crate2=4\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, 4);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_string_log_level() {\n \/\/ test parse_logging_spec with 'warn' as log level\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=wrong,crate2=warn\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, ::WARN);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_empty_log_level() {\n \/\/ test parse_logging_spec with '' as log level\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=wrong,crate2=\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_global() {\n \/\/ test parse_logging_spec with no crate\n let (dirs, filter) = parse_logging_spec(\"warn,crate2=4\");\n assert_eq!(dirs.len(), 2);\n assert_eq!(dirs[0].name, None);\n assert_eq!(dirs[0].level, 2);\n assert_eq!(dirs[1].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[1].level, 4);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_valid_filter() {\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=1,crate1::mod2,crate2=4\/abc\");\n assert_eq!(dirs.len(), 3);\n assert_eq!(dirs[0].name, Some(\"crate1::mod1\".to_owned()));\n assert_eq!(dirs[0].level, 1);\n\n assert_eq!(dirs[1].name, Some(\"crate1::mod2\".to_owned()));\n assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL);\n\n assert_eq!(dirs[2].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[2].level, 4);\n assert!(filter.is_some() && filter.unwrap().to_owned() == \"abc\");\n }\n\n #[test]\n fn parse_logging_spec_invalid_crate_filter() {\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=1=2,crate2=4\/a.c\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, 4);\n assert!(filter.is_some() && filter.unwrap().to_owned() == \"a.c\");\n }\n\n #[test]\n fn parse_logging_spec_empty_with_filter() {\n let (dirs, filter) = parse_logging_spec(\"crate1\/a*c\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate1\".to_owned()));\n assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL);\n assert!(filter.is_some() && filter.unwrap().to_owned() == \"a*c\");\n }\n}\n<commit_msg>run rustfmt on liblog<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ascii::AsciiExt;\nuse std::cmp;\n\n#[derive(Debug, Clone)]\npub struct LogDirective {\n pub name: Option<String>,\n pub level: u32,\n}\n\npub const LOG_LEVEL_NAMES: [&'static str; 5] = [\"ERROR\", \"WARN\", \"INFO\", \"DEBUG\", \"TRACE\"];\n\n\/\/\/ Parse an individual log level that is either a number or a symbolic log level\nfn parse_log_level(level: &str) -> Option<u32> {\n level.parse::<u32>()\n .ok()\n .or_else(|| {\n let pos = LOG_LEVEL_NAMES.iter().position(|&name| name.eq_ignore_ascii_case(level));\n pos.map(|p| p as u32 + 1)\n })\n .map(|p| cmp::min(p, ::MAX_LOG_LEVEL))\n}\n\n\/\/\/ Parse a logging specification string (e.g: \"crate1,crate2::mod3,crate3::x=1\/foo\")\n\/\/\/ and return a vector with log directives.\n\/\/\/\n\/\/\/ Valid log levels are 0-255, with the most likely ones being 1-4 (defined in\n\/\/\/ std::). Also supports string log levels of error, warn, info, and debug\npub fn parse_logging_spec(spec: &str) -> (Vec<LogDirective>, Option<String>) {\n let mut dirs = Vec::new();\n\n let mut parts = spec.split('\/');\n let mods = parts.next();\n let filter = parts.next();\n if parts.next().is_some() {\n println!(\"warning: invalid logging spec '{}', ignoring it (too many '\/'s)\",\n spec);\n return (dirs, None);\n }\n if let Some(m) = mods {\n for s in m.split(',') {\n if s.is_empty() {\n continue;\n }\n let mut parts = s.split('=');\n let (log_level, name) =\n match (parts.next(), parts.next().map(|s| s.trim()), parts.next()) {\n (Some(part0), None, None) => {\n \/\/ if the single argument is a log-level string or number,\n \/\/ treat that as a global fallback\n match parse_log_level(part0) {\n Some(num) => (num, None),\n None => (::MAX_LOG_LEVEL, Some(part0)),\n }\n }\n (Some(part0), Some(\"\"), None) => (::MAX_LOG_LEVEL, Some(part0)),\n (Some(part0), Some(part1), None) => {\n match parse_log_level(part1) {\n Some(num) => (num, Some(part0)),\n _ => {\n println!(\"warning: invalid logging spec '{}', ignoring it\", part1);\n continue;\n }\n }\n }\n _ => {\n println!(\"warning: invalid logging spec '{}', ignoring it\", s);\n continue;\n }\n };\n dirs.push(LogDirective {\n name: name.map(str::to_owned),\n level: log_level,\n });\n }\n }\n\n (dirs, filter.map(str::to_owned))\n}\n\n#[cfg(test)]\nmod tests {\n use super::parse_logging_spec;\n\n #[test]\n fn parse_logging_spec_valid() {\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=1,crate1::mod2,crate2=4\");\n assert_eq!(dirs.len(), 3);\n assert_eq!(dirs[0].name, Some(\"crate1::mod1\".to_owned()));\n assert_eq!(dirs[0].level, 1);\n\n assert_eq!(dirs[1].name, Some(\"crate1::mod2\".to_owned()));\n assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL);\n\n assert_eq!(dirs[2].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[2].level, 4);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_invalid_crate() {\n \/\/ test parse_logging_spec with multiple = in specification\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=1=2,crate2=4\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, 4);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_invalid_log_level() {\n \/\/ test parse_logging_spec with 'noNumber' as log level\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=noNumber,crate2=4\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, 4);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_string_log_level() {\n \/\/ test parse_logging_spec with 'warn' as log level\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=wrong,crate2=warn\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, ::WARN);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_empty_log_level() {\n \/\/ test parse_logging_spec with '' as log level\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=wrong,crate2=\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_global() {\n \/\/ test parse_logging_spec with no crate\n let (dirs, filter) = parse_logging_spec(\"warn,crate2=4\");\n assert_eq!(dirs.len(), 2);\n assert_eq!(dirs[0].name, None);\n assert_eq!(dirs[0].level, 2);\n assert_eq!(dirs[1].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[1].level, 4);\n assert!(filter.is_none());\n }\n\n #[test]\n fn parse_logging_spec_valid_filter() {\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=1,crate1::mod2,crate2=4\/abc\");\n assert_eq!(dirs.len(), 3);\n assert_eq!(dirs[0].name, Some(\"crate1::mod1\".to_owned()));\n assert_eq!(dirs[0].level, 1);\n\n assert_eq!(dirs[1].name, Some(\"crate1::mod2\".to_owned()));\n assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL);\n\n assert_eq!(dirs[2].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[2].level, 4);\n assert!(filter.is_some() && filter.unwrap().to_owned() == \"abc\");\n }\n\n #[test]\n fn parse_logging_spec_invalid_crate_filter() {\n let (dirs, filter) = parse_logging_spec(\"crate1::mod1=1=2,crate2=4\/a.c\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate2\".to_owned()));\n assert_eq!(dirs[0].level, 4);\n assert!(filter.is_some() && filter.unwrap().to_owned() == \"a.c\");\n }\n\n #[test]\n fn parse_logging_spec_empty_with_filter() {\n let (dirs, filter) = parse_logging_spec(\"crate1\/a*c\");\n assert_eq!(dirs.len(), 1);\n assert_eq!(dirs[0].name, Some(\"crate1\".to_owned()));\n assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL);\n assert!(filter.is_some() && filter.unwrap().to_owned() == \"a*c\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse attr::{AttrMetaMethods, HasAttrs};\nuse errors::Handler;\nuse feature_gate::GatedCfgAttr;\nuse fold::Folder;\nuse {ast, fold, attr};\nuse visit;\nuse codemap::{Spanned, respan};\nuse ptr::P;\n\nuse util::small_vector::SmallVector;\n\npub trait CfgFolder: fold::Folder {\n fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T>;\n fn visit_unconfigurable_expr(&mut self, _expr: &ast::Expr) {}\n}\n\n\/\/\/ A folder that strips out items that do not belong in the current\n\/\/\/ configuration.\npub struct StripUnconfigured<'a> {\n diag: CfgDiagReal<'a, 'a>,\n config: &'a ast::CrateConfig,\n}\n\nimpl<'a> CfgFolder for StripUnconfigured<'a> {\n fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {\n if in_cfg(self.config, node.attrs(), &mut self.diag) {\n Some(node)\n } else {\n None\n }\n }\n\n fn visit_unconfigurable_expr(&mut self, expr: &ast::Expr) {\n if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(a)) {\n let msg = \"removing an expression is not supported in this position\";\n self.diag.diag.span_err(attr.span, msg);\n }\n }\n}\n\n\/\/ Support conditional compilation by transforming the AST, stripping out\n\/\/ any items that do not belong in the current configuration\npub fn strip_unconfigured_items(diagnostic: &Handler, krate: ast::Crate,\n feature_gated_cfgs: &mut Vec<GatedCfgAttr>)\n -> ast::Crate\n{\n \/\/ Need to do this check here because cfg runs before feature_gates\n check_for_gated_stmt_expr_attributes(&krate, feature_gated_cfgs);\n\n let krate = process_cfg_attr(diagnostic, krate, feature_gated_cfgs);\n\n StripUnconfigured {\n config: &krate.config.clone(),\n diag: CfgDiagReal {\n diag: diagnostic,\n feature_gated_cfgs: feature_gated_cfgs,\n },\n }.fold_crate(krate)\n}\n\nimpl<T: CfgFolder> fold::Folder for T {\n fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod {\n ast::ForeignMod {\n abi: foreign_mod.abi,\n items: foreign_mod.items.into_iter().filter_map(|item| self.configure(item)).collect(),\n }\n }\n\n fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {\n let fold_struct = |this: &mut Self, vdata| match vdata {\n ast::VariantData::Struct(fields, id) => {\n let fields = fields.into_iter().filter_map(|field| this.configure(field));\n ast::VariantData::Struct(fields.collect(), id)\n }\n ast::VariantData::Tuple(fields, id) => {\n let fields = fields.into_iter().filter_map(|field| this.configure(field));\n ast::VariantData::Tuple(fields.collect(), id)\n }\n ast::VariantData::Unit(id) => ast::VariantData::Unit(id)\n };\n\n let item = match item {\n ast::ItemKind::Impl(u, o, a, b, c, items) => {\n let items = items.into_iter().filter_map(|item| self.configure(item)).collect();\n ast::ItemKind::Impl(u, o, a, b, c, items)\n }\n ast::ItemKind::Trait(u, a, b, items) => {\n let items = items.into_iter().filter_map(|item| self.configure(item)).collect();\n ast::ItemKind::Trait(u, a, b, items)\n }\n ast::ItemKind::Struct(def, generics) => {\n ast::ItemKind::Struct(fold_struct(self, def), generics)\n }\n ast::ItemKind::Enum(def, generics) => {\n let variants = def.variants.into_iter().filter_map(|v| {\n self.configure(v).map(|v| {\n Spanned {\n node: ast::Variant_ {\n name: v.node.name,\n attrs: v.node.attrs,\n data: fold_struct(self, v.node.data),\n disr_expr: v.node.disr_expr,\n },\n span: v.span\n }\n })\n });\n ast::ItemKind::Enum(ast::EnumDef {\n variants: variants.collect(),\n }, generics)\n }\n item => item,\n };\n\n fold::noop_fold_item_kind(item, self)\n }\n\n fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {\n \/\/ If an expr is valid to cfg away it will have been removed by the\n \/\/ outer stmt or expression folder before descending in here.\n \/\/ Anything else is always required, and thus has to error out\n \/\/ in case of a cfg attr.\n \/\/\n \/\/ NB: This is intentionally not part of the fold_expr() function\n \/\/ in order for fold_opt_expr() to be able to avoid this check\n self.visit_unconfigurable_expr(&expr);\n fold_expr(self, expr)\n }\n\n fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {\n self.configure(expr).map(|expr| fold_expr(self, expr))\n }\n\n fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVector<ast::Stmt> {\n self.configure(stmt).map(|stmt| fold::noop_fold_stmt(stmt, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {\n fold::noop_fold_mac(mac, self)\n }\n\n fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {\n self.configure(item).map(|item| SmallVector::one(item.map(|i| self.fold_item_simple(i))))\n .unwrap_or(SmallVector::zero())\n }\n}\n\nfn fold_expr<F: CfgFolder>(folder: &mut F, expr: P<ast::Expr>) -> P<ast::Expr> {\n expr.map(|ast::Expr {id, span, node, attrs}| {\n fold::noop_fold_expr(ast::Expr {\n id: id,\n node: match node {\n ast::ExprKind::Match(m, arms) => {\n ast::ExprKind::Match(m, arms.into_iter()\n .filter_map(|a| folder.configure(a))\n .collect())\n }\n _ => node\n },\n span: span,\n attrs: attrs,\n }, folder)\n })\n}\n\nfn is_cfg(attr: &ast::Attribute) -> bool {\n attr.check_name(\"cfg\")\n}\n\n\/\/ Determine if an item should be translated in the current crate\n\/\/ configuration based on the item's attributes\nfn in_cfg<T: CfgDiag>(cfg: &[P<ast::MetaItem>],\n attrs: &[ast::Attribute],\n diag: &mut T) -> bool {\n attrs.iter().all(|attr| {\n let mis = match attr.node.value.node {\n ast::MetaItemKind::List(_, ref mis) if is_cfg(&attr) => mis,\n _ => return true\n };\n\n if mis.len() != 1 {\n diag.emit_error(|diagnostic| {\n diagnostic.span_err(attr.span, \"expected 1 cfg-pattern\");\n });\n return true;\n }\n\n attr::cfg_matches(cfg, &mis[0], diag)\n })\n}\n\nstruct CfgAttrFolder<'a, T> {\n diag: T,\n config: &'a ast::CrateConfig,\n}\n\n\/\/ Process `#[cfg_attr]`.\nfn process_cfg_attr(diagnostic: &Handler, krate: ast::Crate,\n feature_gated_cfgs: &mut Vec<GatedCfgAttr>) -> ast::Crate {\n let mut fld = CfgAttrFolder {\n diag: CfgDiagReal {\n diag: diagnostic,\n feature_gated_cfgs: feature_gated_cfgs,\n },\n config: &krate.config.clone(),\n };\n fld.fold_crate(krate)\n}\n\nimpl<'a, T: CfgDiag> fold::Folder for CfgAttrFolder<'a, T> {\n fn fold_attribute(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {\n if !attr.check_name(\"cfg_attr\") {\n return fold::noop_fold_attribute(attr, self);\n }\n\n let attr_list = match attr.meta_item_list() {\n Some(attr_list) => attr_list,\n None => {\n self.diag.emit_error(|diag| {\n diag.span_err(attr.span,\n \"expected `#[cfg_attr(<cfg pattern>, <attr>)]`\");\n });\n return None;\n }\n };\n let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) {\n (2, Some(cfg), Some(mi)) => (cfg, mi),\n _ => {\n self.diag.emit_error(|diag| {\n diag.span_err(attr.span,\n \"expected `#[cfg_attr(<cfg pattern>, <attr>)]`\");\n });\n return None;\n }\n };\n\n if attr::cfg_matches(&self.config[..], &cfg, &mut self.diag) {\n Some(respan(mi.span, ast::Attribute_ {\n id: attr::mk_attr_id(),\n style: attr.node.style,\n value: mi.clone(),\n is_sugared_doc: false,\n }))\n } else {\n None\n }\n }\n\n \/\/ Need the ability to run pre-expansion.\n fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {\n fold::noop_fold_mac(mac, self)\n }\n}\n\nfn check_for_gated_stmt_expr_attributes(krate: &ast::Crate,\n discovered: &mut Vec<GatedCfgAttr>) {\n let mut v = StmtExprAttrFeatureVisitor {\n config: &krate.config,\n discovered: discovered,\n };\n visit::walk_crate(&mut v, krate);\n}\n\n\/\/\/ To cover this feature, we need to discover all attributes\n\/\/\/ so we need to run before cfg.\nstruct StmtExprAttrFeatureVisitor<'a, 'b> {\n config: &'a ast::CrateConfig,\n discovered: &'b mut Vec<GatedCfgAttr>,\n}\n\n\/\/ Runs the cfg_attr and cfg folders locally in \"silent\" mode\n\/\/ to discover attribute use on stmts or expressions ahead of time\nimpl<'v, 'a, 'b> visit::Visitor<'v> for StmtExprAttrFeatureVisitor<'a, 'b> {\n fn visit_stmt(&mut self, s: &'v ast::Stmt) {\n \/\/ check if there even are any attributes on this node\n let stmt_attrs = s.node.attrs();\n if stmt_attrs.len() > 0 {\n \/\/ attributes on items are fine\n if let ast::StmtKind::Decl(ref decl, _) = s.node {\n if let ast::DeclKind::Item(_) = decl.node {\n visit::walk_stmt(self, s);\n return;\n }\n }\n\n \/\/ flag the offending attributes\n for attr in stmt_attrs {\n self.discovered.push(GatedCfgAttr::GatedAttr(attr.span));\n }\n\n \/\/ if the node does not end up being cfg-d away, walk down\n if node_survives_cfg(stmt_attrs, self.config) {\n visit::walk_stmt(self, s);\n }\n } else {\n visit::walk_stmt(self, s);\n }\n }\n\n fn visit_expr(&mut self, ex: &'v ast::Expr) {\n \/\/ check if there even are any attributes on this node\n let expr_attrs = ex.attrs();\n if expr_attrs.len() > 0 {\n\n \/\/ flag the offending attributes\n for attr in expr_attrs {\n self.discovered.push(GatedCfgAttr::GatedAttr(attr.span));\n }\n\n \/\/ if the node does not end up being cfg-d away, walk down\n if node_survives_cfg(expr_attrs, self.config) {\n visit::walk_expr(self, ex);\n }\n } else {\n visit::walk_expr(self, ex);\n }\n }\n\n fn visit_foreign_item(&mut self, i: &'v ast::ForeignItem) {\n if node_survives_cfg(&i.attrs, self.config) {\n visit::walk_foreign_item(self, i);\n }\n }\n\n fn visit_item(&mut self, i: &'v ast::Item) {\n if node_survives_cfg(&i.attrs, self.config) {\n visit::walk_item(self, i);\n }\n }\n\n fn visit_impl_item(&mut self, ii: &'v ast::ImplItem) {\n if node_survives_cfg(&ii.attrs, self.config) {\n visit::walk_impl_item(self, ii);\n }\n }\n\n fn visit_trait_item(&mut self, ti: &'v ast::TraitItem) {\n if node_survives_cfg(&ti.attrs, self.config) {\n visit::walk_trait_item(self, ti);\n }\n }\n\n fn visit_struct_field(&mut self, s: &'v ast::StructField) {\n if node_survives_cfg(&s.attrs, self.config) {\n visit::walk_struct_field(self, s);\n }\n }\n\n fn visit_variant(&mut self, v: &'v ast::Variant,\n g: &'v ast::Generics, item_id: ast::NodeId) {\n if node_survives_cfg(&v.node.attrs, self.config) {\n visit::walk_variant(self, v, g, item_id);\n }\n }\n\n fn visit_arm(&mut self, a: &'v ast::Arm) {\n if node_survives_cfg(&a.attrs, self.config) {\n visit::walk_arm(self, a);\n }\n }\n\n \/\/ This visitor runs pre expansion, so we need to prevent\n \/\/ the default panic here\n fn visit_mac(&mut self, mac: &'v ast::Mac) {\n visit::walk_mac(self, mac)\n }\n}\n\npub trait CfgDiag {\n fn emit_error<F>(&mut self, f: F) where F: FnMut(&Handler);\n fn flag_gated<F>(&mut self, f: F) where F: FnMut(&mut Vec<GatedCfgAttr>);\n}\n\npub struct CfgDiagReal<'a, 'b> {\n pub diag: &'a Handler,\n pub feature_gated_cfgs: &'b mut Vec<GatedCfgAttr>,\n}\n\nimpl<'a, 'b> CfgDiag for CfgDiagReal<'a, 'b> {\n fn emit_error<F>(&mut self, mut f: F) where F: FnMut(&Handler) {\n f(self.diag)\n }\n fn flag_gated<F>(&mut self, mut f: F) where F: FnMut(&mut Vec<GatedCfgAttr>) {\n f(self.feature_gated_cfgs)\n }\n}\n\nstruct CfgDiagSilent {\n error: bool,\n}\n\nimpl CfgDiag for CfgDiagSilent {\n fn emit_error<F>(&mut self, _: F) where F: FnMut(&Handler) {\n self.error = true;\n }\n fn flag_gated<F>(&mut self, _: F) where F: FnMut(&mut Vec<GatedCfgAttr>) {}\n}\n\nfn node_survives_cfg(attrs: &[ast::Attribute],\n config: &ast::CrateConfig) -> bool {\n let mut survives_cfg = true;\n\n for attr in attrs {\n let mut fld = CfgAttrFolder {\n diag: CfgDiagSilent { error: false },\n config: config,\n };\n let attr = fld.fold_attribute(attr.clone());\n\n \/\/ In case of error we can just return true,\n \/\/ since the actual cfg folders will end compilation anyway.\n\n if fld.diag.error { return true; }\n\n survives_cfg &= attr.map(|attr| {\n let mut diag = CfgDiagSilent { error: false };\n let r = in_cfg(config, &[attr], &mut diag);\n if diag.error { return true; }\n r\n }).unwrap_or(true)\n }\n\n survives_cfg\n}\n<commit_msg>Move cfg_attr processing and stmt\/expr attribute gated feature checking into `StripUnconfigured`<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse attr::{AttrMetaMethods, HasAttrs};\nuse errors::Handler;\nuse feature_gate::GatedCfgAttr;\nuse fold::Folder;\nuse {ast, fold, attr};\nuse codemap::{Spanned, respan};\nuse ptr::P;\n\nuse util::small_vector::SmallVector;\n\npub trait CfgFolder: fold::Folder {\n fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T>;\n fn visit_stmt_or_expr_attrs(&mut self, _attrs: &[ast::Attribute]) {}\n fn visit_unconfigurable_expr(&mut self, _expr: &ast::Expr) {}\n}\n\n\/\/\/ A folder that strips out items that do not belong in the current\n\/\/\/ configuration.\npub struct StripUnconfigured<'a> {\n diag: CfgDiagReal<'a, 'a>,\n config: &'a ast::CrateConfig,\n}\n\nimpl<'a> StripUnconfigured<'a> {\n \/\/ Determine if an item should be translated in the current crate\n \/\/ configuration based on the item's attributes\n fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool {\n attrs.iter().all(|attr| {\n let mis = match attr.node.value.node {\n ast::MetaItemKind::List(_, ref mis) if is_cfg(&attr) => mis,\n _ => return true\n };\n\n if mis.len() != 1 {\n self.diag.emit_error(|diagnostic| {\n diagnostic.span_err(attr.span, \"expected 1 cfg-pattern\");\n });\n return true;\n }\n\n attr::cfg_matches(self.config, &mis[0], &mut self.diag)\n })\n }\n\n fn process_cfg_attrs(&mut self, attrs: Vec<ast::Attribute>) -> Vec<ast::Attribute> {\n attrs.into_iter().filter_map(|attr| self.process_cfg_attr(attr)).collect()\n }\n\n fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {\n if !attr.check_name(\"cfg_attr\") {\n return Some(attr);\n }\n\n let attr_list = match attr.meta_item_list() {\n Some(attr_list) => attr_list,\n None => {\n let msg = \"expected `#[cfg_attr(<cfg pattern>, <attr>)]`\";\n self.diag.diag.span_err(attr.span, msg);\n return None;\n }\n };\n let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) {\n (2, Some(cfg), Some(mi)) => (cfg, mi),\n _ => {\n let msg = \"expected `#[cfg_attr(<cfg pattern>, <attr>)]`\";\n self.diag.diag.span_err(attr.span, msg);\n return None;\n }\n };\n\n if attr::cfg_matches(self.config, &cfg, &mut self.diag) {\n Some(respan(mi.span, ast::Attribute_ {\n id: attr::mk_attr_id(),\n style: attr.node.style,\n value: mi.clone(),\n is_sugared_doc: false,\n }))\n } else {\n None\n }\n }\n}\n\nimpl<'a> CfgFolder for StripUnconfigured<'a> {\n fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {\n let node = node.map_attrs(|attrs| self.process_cfg_attrs(attrs));\n if self.in_cfg(node.attrs()) { Some(node) } else { None }\n }\n\n fn visit_stmt_or_expr_attrs(&mut self, attrs: &[ast::Attribute]) {\n \/\/ flag the offending attributes\n for attr in attrs.iter() {\n self.diag.feature_gated_cfgs.push(GatedCfgAttr::GatedAttr(attr.span));\n }\n }\n\n fn visit_unconfigurable_expr(&mut self, expr: &ast::Expr) {\n if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(a)) {\n let msg = \"removing an expression is not supported in this position\";\n self.diag.diag.span_err(attr.span, msg);\n }\n }\n}\n\n\/\/ Support conditional compilation by transforming the AST, stripping out\n\/\/ any items that do not belong in the current configuration\npub fn strip_unconfigured_items(diagnostic: &Handler, krate: ast::Crate,\n feature_gated_cfgs: &mut Vec<GatedCfgAttr>)\n -> ast::Crate\n{\n StripUnconfigured {\n config: &krate.config.clone(),\n diag: CfgDiagReal {\n diag: diagnostic,\n feature_gated_cfgs: feature_gated_cfgs,\n },\n }.fold_crate(krate)\n}\n\nimpl<T: CfgFolder> fold::Folder for T {\n fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod {\n ast::ForeignMod {\n abi: foreign_mod.abi,\n items: foreign_mod.items.into_iter().filter_map(|item| {\n self.configure(item).map(|item| fold::noop_fold_foreign_item(item, self))\n }).collect(),\n }\n }\n\n fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {\n let fold_struct = |this: &mut Self, vdata| match vdata {\n ast::VariantData::Struct(fields, id) => {\n let fields = fields.into_iter().filter_map(|field| this.configure(field));\n ast::VariantData::Struct(fields.collect(), id)\n }\n ast::VariantData::Tuple(fields, id) => {\n let fields = fields.into_iter().filter_map(|field| this.configure(field));\n ast::VariantData::Tuple(fields.collect(), id)\n }\n ast::VariantData::Unit(id) => ast::VariantData::Unit(id)\n };\n\n let item = match item {\n ast::ItemKind::Impl(u, o, a, b, c, items) => {\n let items = items.into_iter().filter_map(|item| self.configure(item)).collect();\n ast::ItemKind::Impl(u, o, a, b, c, items)\n }\n ast::ItemKind::Trait(u, a, b, items) => {\n let items = items.into_iter().filter_map(|item| self.configure(item)).collect();\n ast::ItemKind::Trait(u, a, b, items)\n }\n ast::ItemKind::Struct(def, generics) => {\n ast::ItemKind::Struct(fold_struct(self, def), generics)\n }\n ast::ItemKind::Enum(def, generics) => {\n let variants = def.variants.into_iter().filter_map(|v| {\n self.configure(v).map(|v| {\n Spanned {\n node: ast::Variant_ {\n name: v.node.name,\n attrs: v.node.attrs,\n data: fold_struct(self, v.node.data),\n disr_expr: v.node.disr_expr,\n },\n span: v.span\n }\n })\n });\n ast::ItemKind::Enum(ast::EnumDef {\n variants: variants.collect(),\n }, generics)\n }\n item => item,\n };\n\n fold::noop_fold_item_kind(item, self)\n }\n\n fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {\n self.visit_stmt_or_expr_attrs(expr.attrs());\n \/\/ If an expr is valid to cfg away it will have been removed by the\n \/\/ outer stmt or expression folder before descending in here.\n \/\/ Anything else is always required, and thus has to error out\n \/\/ in case of a cfg attr.\n \/\/\n \/\/ NB: This is intentionally not part of the fold_expr() function\n \/\/ in order for fold_opt_expr() to be able to avoid this check\n self.visit_unconfigurable_expr(&expr);\n fold_expr(self, expr)\n }\n\n fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {\n self.configure(expr).map(|expr| fold_expr(self, expr))\n }\n\n fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVector<ast::Stmt> {\n let is_item = match stmt.node {\n ast::StmtKind::Decl(ref decl, _) => match decl.node {\n ast::DeclKind::Item(_) => true,\n _ => false,\n },\n _ => false,\n };\n\n \/\/ avoid calling `visit_stmt_or_expr_attrs` on items\n if !is_item {\n self.visit_stmt_or_expr_attrs(stmt.attrs());\n }\n\n self.configure(stmt).map(|stmt| fold::noop_fold_stmt(stmt, self))\n .unwrap_or(SmallVector::zero())\n }\n\n fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {\n fold::noop_fold_mac(mac, self)\n }\n\n fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {\n self.configure(item).map(|item| SmallVector::one(item.map(|i| self.fold_item_simple(i))))\n .unwrap_or(SmallVector::zero())\n }\n}\n\nfn fold_expr<F: CfgFolder>(folder: &mut F, expr: P<ast::Expr>) -> P<ast::Expr> {\n expr.map(|ast::Expr {id, span, node, attrs}| {\n fold::noop_fold_expr(ast::Expr {\n id: id,\n node: match node {\n ast::ExprKind::Match(m, arms) => {\n ast::ExprKind::Match(m, arms.into_iter()\n .filter_map(|a| folder.configure(a))\n .collect())\n }\n _ => node\n },\n span: span,\n attrs: attrs,\n }, folder)\n })\n}\n\nfn is_cfg(attr: &ast::Attribute) -> bool {\n attr.check_name(\"cfg\")\n}\n\npub trait CfgDiag {\n fn emit_error<F>(&mut self, f: F) where F: FnMut(&Handler);\n fn flag_gated<F>(&mut self, f: F) where F: FnMut(&mut Vec<GatedCfgAttr>);\n}\n\npub struct CfgDiagReal<'a, 'b> {\n pub diag: &'a Handler,\n pub feature_gated_cfgs: &'b mut Vec<GatedCfgAttr>,\n}\n\nimpl<'a, 'b> CfgDiag for CfgDiagReal<'a, 'b> {\n fn emit_error<F>(&mut self, mut f: F) where F: FnMut(&Handler) {\n f(self.diag)\n }\n fn flag_gated<F>(&mut self, mut f: F) where F: FnMut(&mut Vec<GatedCfgAttr>) {\n f(self.feature_gated_cfgs)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>k20::uart: Port to ioregs!<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add idempotency test for issue 5399<commit_after>\/\/ rustfmt-max_width: 140\n\nimpl NotificationRepository {\n fn set_status_changed(\n &self,\n repo_tx_conn: &RepoTxConn,\n rid: &RoutableId,\n changed_at: NaiveDateTime,\n ) -> NukeResult<Option<NotificationStatus>> {\n repo_tx_conn.run(move |conn| {\n let res = diesel::update(client_notification::table)\n .filter(\n client_notification::routable_id.eq(DieselRoutableId(rid.clone())).and(\n client_notification::changed_at\n .lt(changed_at)\n .or(client_notification::changed_at.is_null()),\n ),\n )\n .set(client_notification::changed_at.eq(changed_at))\n .returning((\n client_notification::id,\n client_notification::changed_at,\n client_notification::polled_at,\n client_notification::notified_at,\n ))\n .get_result::<(Uuid, Option<NaiveDateTime>, Option<NaiveDateTime>, Option<NaiveDateTime>)>(conn)\n .optional()?;\n\n match res {\n Some(row) => {\n let client_id = client_contract::table\n .inner_join(client_notification::table)\n .filter(client_notification::id.eq(row.0))\n .select(client_contract::client_id)\n .get_result::<Uuid>(conn)?;\n\n Ok(Some(NotificationStatus {\n client_id: client_id.into(),\n changed_at: row.1,\n polled_at: row.2,\n notified_at: row.3,\n }))\n }\n None => Ok(None),\n }\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make ToLua require a sized type.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Event handling (mouse, keyboard, controller, touch screen, etc.)\n\/\/!\n\/\/! See [`Event`](enum.Event.html) for more information.\n\/\/!\n\/\/! # Unstable\n\/\/!\n\/\/! There are still many unanswered questions about the design of the events API in the turtle\n\/\/! crate. This module may change or be completely removed in the future. There will definitely\n\/\/! be *some* events API in the future, but it may end up looking different than it does today.\n\nuse serde::{Serialize, Deserialize};\nuse glutin::{\n dpi::{LogicalSize, PhysicalPosition},\n event::{self as glutin_event, WindowEvent, KeyboardInput},\n};\n\nuse crate::Point;\n\n\/\/\/ Possible events returned from [`Drawing::poll_event()`](..\/struct.Drawing.html#method.poll_event).\n\/\/\/\n\/\/\/ Events are used to make programs more interactive. See that method's documentation for more\n\/\/\/ information about how to use events.\n#[non_exhaustive]\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\npub enum Event {\n \/\/\/ Sent when a keyboard key is pressed or released\n Key(Key, PressedState),\n\n \/\/\/ Sent when a mouse button is pressed or released\n MouseButton(MouseButton, PressedState),\n\n \/\/\/ Sent when the mouse is moving. Only sent when the mouse is over the window.\n \/\/\/ `x` and `y` represent the new coordinates of where the mouse is currently.\n \/\/\/\n \/\/\/ Coordinates are relative to the center of the window.\n MouseMove(Point),\n\n \/\/\/ Sent when the mouse is scrolled. Only sent when the mouse is over the window.\n \/\/\/ `x` and `y` are in scroll ticks.\n MouseScroll { x: f64, y: f64 },\n\n \/\/\/ Sent when the window gets resized\n WindowResized { width: u32, height: u32 },\n\n \/\/\/ Sent when the window focus changes\n \/\/\/\n \/\/\/ The boolean value is true if the window is in focus.\n WindowFocused(bool),\n \/\/\/ Sent when the cursor enters or leaves the window\n \/\/\/\n \/\/\/ The boolean value is true if the cursor entered the window, and false if it left.\n WindowCursor(bool),\n\n \/\/\/ Sent when the window is closed\n WindowClosed,\n}\n\nimpl Event {\n \/\/\/ Returns `None` if the input event is not a supported variant of `Event`\n #[cfg_attr(any(feature = \"test\", test), allow(dead_code))]\n pub(crate) fn from_window_event(\n event: WindowEvent,\n scale_factor: f64,\n to_logical: impl FnOnce(PhysicalPosition<f64>) -> Point,\n ) -> Option<Self> {\n match event {\n WindowEvent::Resized(size) => {\n let LogicalSize {width, height} = size.to_logical(scale_factor);\n Some(Event::WindowResized {width, height})\n },\n\n WindowEvent::KeyboardInput {input: KeyboardInput {state, virtual_keycode, ..}, ..} => {\n Some(Event::Key(\n Key::from_keycode(virtual_keycode?)?,\n PressedState::from_state(state),\n ))\n },\n WindowEvent::CursorEntered {..} => Some(Event::WindowCursor(true)),\n WindowEvent::CursorLeft {..} => Some(Event::WindowCursor(false)),\n WindowEvent::CursorMoved {position, ..} => {\n Some(Event::MouseMove(to_logical(position)))\n },\n WindowEvent::MouseInput {state, button, ..} => Some(Event::MouseButton(\n MouseButton::from_button(button)?,\n PressedState::from_state(state),\n )),\n WindowEvent::Focused(focused) => Some(Event::WindowFocused(focused)),\n WindowEvent::Destroyed => Some(Event::WindowClosed),\n\n WindowEvent::Moved(_) |\n WindowEvent::CloseRequested |\n WindowEvent::DroppedFile(_) |\n WindowEvent::HoveredFile(_) |\n WindowEvent::HoveredFileCancelled |\n WindowEvent::ReceivedCharacter(_) |\n WindowEvent::ModifiersChanged(_) |\n WindowEvent::MouseWheel {..} |\n WindowEvent::TouchpadPressure {..} |\n WindowEvent::AxisMotion {..} |\n WindowEvent::Touch(_) |\n WindowEvent::ScaleFactorChanged {..} |\n WindowEvent::ThemeChanged(_) => None, \/\/ Not supported\n }\n }\n}\n\n\/\/TODO: Documentation\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]\npub enum PressedState {\n Pressed,\n Released,\n}\n\nimpl PressedState {\n #[cfg_attr(any(feature = \"test\", test), allow(dead_code))]\n fn from_state(state: glutin_event::ElementState) -> PressedState {\n match state {\n glutin_event::ElementState::Pressed => PressedState::Pressed,\n glutin_event::ElementState::Released => PressedState::Released,\n }\n }\n}\n\n\/\/TODO: Documentation\n#[non_exhaustive]\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]\npub enum Key {\n \/\/\/ The '1' key over the letters.\n Num1,\n \/\/\/ The '2' key over the letters.\n Num2,\n \/\/\/ The '3' key over the letters.\n Num3,\n \/\/\/ The '4' key over the letters.\n Num4,\n \/\/\/ The '5' key over the letters.\n Num5,\n \/\/\/ The '6' key over the letters.\n Num6,\n \/\/\/ The '7' key over the letters.\n Num7,\n \/\/\/ The '8' key over the letters.\n Num8,\n \/\/\/ The '9' key over the letters.\n Num9,\n \/\/\/ The '0' key over the letters.\n Num0,\n\n A,\n B,\n C,\n D,\n E,\n F,\n G,\n H,\n I,\n J,\n K,\n L,\n M,\n N,\n O,\n P,\n Q,\n R,\n S,\n T,\n U,\n V,\n W,\n X,\n Y,\n Z,\n\n \/\/\/ The Escape key, next to F1\n Esc,\n\n F1,\n F2,\n F3,\n F4,\n F5,\n F6,\n F7,\n F8,\n F9,\n F10,\n F11,\n F12,\n F13,\n F14,\n F15,\n F16,\n F17,\n F18,\n F19,\n F20,\n F21,\n F22,\n F23,\n F24,\n\n Home,\n Delete,\n End,\n \/\/\/ The PageDown (PgDn) key\n PageDown,\n \/\/\/ The PageUp (PgUp) key\n PageUp,\n \/\/\/ The backspace key, right over Enter\/Return\n Backspace,\n \/\/\/ The Enter\/Return key, under Backspace\n Return,\n \/\/\/ The spacebar key\n Space,\n\n \/\/\/ The up arrow key\n UpArrow,\n \/\/\/ The left arrow key\n LeftArrow,\n \/\/\/ The right arrow key\n RightArrow,\n \/\/\/ The down arrow key\n DownArrow,\n\n Numpad0,\n Numpad1,\n Numpad2,\n Numpad3,\n Numpad4,\n Numpad5,\n Numpad6,\n Numpad7,\n Numpad8,\n Numpad9,\n NumpadComma,\n NumpadEnter,\n NumpadEquals,\n\n Apostrophe,\n At,\n Backslash,\n Backtick,\n Colon,\n Comma,\n Decimal,\n Divide,\n Equals,\n Minus,\n Multiply,\n Period,\n Plus,\n \/\/\/ The left bracket `[` key\n LeftBracket,\n \/\/\/ The left bracket `]` key\n RightBracket,\n Semicolon,\n Slash,\n Tab,\n}\n\nimpl Key {\n #[cfg_attr(any(feature = \"test\", test), allow(dead_code))]\n fn from_keycode(key: glutin_event::VirtualKeyCode) -> Option<Self> {\n use glutin_event::VirtualKeyCode::*;\n #[deny(unreachable_patterns, unused_variables)]\n Some(match key {\n Key1 => Key::Num1,\n Key2 => Key::Num2,\n Key3 => Key::Num3,\n Key4 => Key::Num4,\n Key5 => Key::Num5,\n Key6 => Key::Num6,\n Key7 => Key::Num7,\n Key8 => Key::Num8,\n Key9 => Key::Num9,\n Key0 => Key::Num0,\n\n A => Key::A,\n B => Key::B,\n C => Key::C,\n D => Key::D,\n E => Key::E,\n F => Key::F,\n G => Key::G,\n H => Key::H,\n I => Key::I,\n J => Key::J,\n K => Key::K,\n L => Key::L,\n M => Key::M,\n N => Key::N,\n O => Key::O,\n P => Key::P,\n Q => Key::Q,\n R => Key::R,\n S => Key::S,\n T => Key::T,\n U => Key::U,\n V => Key::V,\n W => Key::W,\n X => Key::X,\n Y => Key::Y,\n Z => Key::Z,\n\n Escape => Key::Esc,\n\n F1 => Key::F1,\n F2 => Key::F2,\n F3 => Key::F3,\n F4 => Key::F4,\n F5 => Key::F5,\n F6 => Key::F6,\n F7 => Key::F7,\n F8 => Key::F8,\n F9 => Key::F9,\n F10 => Key::F10,\n F11 => Key::F11,\n F12 => Key::F12,\n F13 => Key::F13,\n F14 => Key::F14,\n F15 => Key::F15,\n F16 => Key::F16,\n F17 => Key::F17,\n F18 => Key::F18,\n F19 => Key::F19,\n F20 => Key::F20,\n F21 => Key::F21,\n F22 => Key::F22,\n F23 => Key::F23,\n F24 => Key::F24,\n\n Home => Key::Home,\n Delete => Key::Delete,\n End => Key::End,\n PageDown => Key::PageDown,\n PageUp => Key::PageUp,\n Back => Key::Backspace,\n Return => Key::Return,\n Space => Key::Space,\n\n Left => Key::LeftArrow,\n Up => Key::UpArrow,\n Right => Key::RightArrow,\n Down => Key::DownArrow,\n\n Numpad0 => Key::Numpad0,\n Numpad1 => Key::Numpad1,\n Numpad2 => Key::Numpad2,\n Numpad3 => Key::Numpad3,\n Numpad4 => Key::Numpad4,\n Numpad5 => Key::Numpad5,\n Numpad6 => Key::Numpad6,\n Numpad7 => Key::Numpad7,\n Numpad8 => Key::Numpad8,\n Numpad9 => Key::Numpad9,\n\n Apostrophe => Key::Apostrophe,\n At => Key::At,\n Backslash => Key::Backslash,\n Colon => Key::Colon,\n Comma => Key::Comma,\n Equals => Key::Equals,\n Grave => Key::Backtick,\n LBracket => Key::LeftBracket,\n NumpadAdd | Plus => Key::Plus,\n NumpadComma => Key::NumpadComma,\n NumpadDecimal => Key::Decimal,\n NumpadDivide => Key::Divide,\n NumpadEnter => Key::NumpadEnter,\n NumpadEquals => Key::NumpadEquals,\n NumpadMultiply | Asterisk => Key::Multiply,\n NumpadSubtract | Minus => Key::Minus,\n Period => Key::Period,\n RBracket => Key::RightBracket,\n Semicolon => Key::Semicolon,\n Slash => Key::Slash,\n Tab => Key::Tab,\n\n \/\/ Unsupported keys (could be changed in the future)\n Snapshot |\n Scroll |\n Pause |\n Insert |\n Compose |\n Caret |\n Numlock |\n AbntC1 |\n AbntC2 |\n Apps |\n Ax |\n Calculator |\n Capital |\n Convert |\n Kana |\n Kanji |\n LAlt |\n LControl |\n LShift |\n LWin |\n Mail |\n MediaSelect |\n MediaStop |\n Mute |\n MyComputer |\n NavigateForward |\n NavigateBackward |\n NextTrack |\n NoConvert |\n OEM102 |\n PlayPause |\n Power |\n PrevTrack |\n RAlt |\n RControl |\n RShift |\n RWin |\n Sleep |\n Stop |\n Sysrq |\n Underline |\n Unlabeled |\n VolumeDown |\n VolumeUp |\n Wake |\n WebBack |\n WebFavorites |\n WebForward |\n WebHome |\n WebRefresh |\n WebSearch |\n WebStop |\n Yen |\n Copy |\n Paste |\n Cut => return None,\n })\n }\n}\n\n\/\/TODO: Documentation\n#[non_exhaustive]\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]\npub enum MouseButton {\n \/\/\/ The left mouse button\n LeftButton,\n \/\/\/ The middle mouse button\n MiddleButton,\n \/\/\/ The right mouse button\n RightButton,\n}\n\nimpl MouseButton {\n #[cfg_attr(any(feature = \"test\", test), allow(dead_code))]\n fn from_button(button: glutin_event::MouseButton) -> Option<Self> {\n use glutin_event::MouseButton::*;\n #[deny(unreachable_patterns, unused_variables)]\n match button {\n Left => Some(MouseButton::LeftButton),\n Middle => Some(MouseButton::MiddleButton),\n Right => Some(MouseButton::RightButton),\n Other(_) => None,\n }\n }\n}\n<commit_msg>Fix over->above, under->below in doc comments<commit_after>\/\/! Event handling (mouse, keyboard, controller, touch screen, etc.)\n\/\/!\n\/\/! See [`Event`](enum.Event.html) for more information.\n\/\/!\n\/\/! # Unstable\n\/\/!\n\/\/! There are still many unanswered questions about the design of the events API in the turtle\n\/\/! crate. This module may change or be completely removed in the future. There will definitely\n\/\/! be *some* events API in the future, but it may end up looking different than it does today.\n\nuse serde::{Serialize, Deserialize};\nuse glutin::{\n dpi::{LogicalSize, PhysicalPosition},\n event::{self as glutin_event, WindowEvent, KeyboardInput},\n};\n\nuse crate::Point;\n\n\/\/\/ Possible events returned from [`Drawing::poll_event()`](..\/struct.Drawing.html#method.poll_event).\n\/\/\/\n\/\/\/ Events are used to make programs more interactive. See that method's documentation for more\n\/\/\/ information about how to use events.\n#[non_exhaustive]\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\npub enum Event {\n \/\/\/ Sent when a keyboard key is pressed or released\n Key(Key, PressedState),\n\n \/\/\/ Sent when a mouse button is pressed or released\n MouseButton(MouseButton, PressedState),\n\n \/\/\/ Sent when the mouse is moving. Only sent when the mouse is over the window.\n \/\/\/ `x` and `y` represent the new coordinates of where the mouse is currently.\n \/\/\/\n \/\/\/ Coordinates are relative to the center of the window.\n MouseMove(Point),\n\n \/\/\/ Sent when the mouse is scrolled. Only sent when the mouse is over the window.\n \/\/\/ `x` and `y` are in scroll ticks.\n MouseScroll { x: f64, y: f64 },\n\n \/\/\/ Sent when the window gets resized\n WindowResized { width: u32, height: u32 },\n\n \/\/\/ Sent when the window focus changes\n \/\/\/\n \/\/\/ The boolean value is true if the window is in focus.\n WindowFocused(bool),\n \/\/\/ Sent when the cursor enters or leaves the window\n \/\/\/\n \/\/\/ The boolean value is true if the cursor entered the window, and false if it left.\n WindowCursor(bool),\n\n \/\/\/ Sent when the window is closed\n WindowClosed,\n}\n\nimpl Event {\n \/\/\/ Returns `None` if the input event is not a supported variant of `Event`\n #[cfg_attr(any(feature = \"test\", test), allow(dead_code))]\n pub(crate) fn from_window_event(\n event: WindowEvent,\n scale_factor: f64,\n to_logical: impl FnOnce(PhysicalPosition<f64>) -> Point,\n ) -> Option<Self> {\n match event {\n WindowEvent::Resized(size) => {\n let LogicalSize {width, height} = size.to_logical(scale_factor);\n Some(Event::WindowResized {width, height})\n },\n\n WindowEvent::KeyboardInput {input: KeyboardInput {state, virtual_keycode, ..}, ..} => {\n Some(Event::Key(\n Key::from_keycode(virtual_keycode?)?,\n PressedState::from_state(state),\n ))\n },\n WindowEvent::CursorEntered {..} => Some(Event::WindowCursor(true)),\n WindowEvent::CursorLeft {..} => Some(Event::WindowCursor(false)),\n WindowEvent::CursorMoved {position, ..} => {\n Some(Event::MouseMove(to_logical(position)))\n },\n WindowEvent::MouseInput {state, button, ..} => Some(Event::MouseButton(\n MouseButton::from_button(button)?,\n PressedState::from_state(state),\n )),\n WindowEvent::Focused(focused) => Some(Event::WindowFocused(focused)),\n WindowEvent::Destroyed => Some(Event::WindowClosed),\n\n WindowEvent::Moved(_) |\n WindowEvent::CloseRequested |\n WindowEvent::DroppedFile(_) |\n WindowEvent::HoveredFile(_) |\n WindowEvent::HoveredFileCancelled |\n WindowEvent::ReceivedCharacter(_) |\n WindowEvent::ModifiersChanged(_) |\n WindowEvent::MouseWheel {..} |\n WindowEvent::TouchpadPressure {..} |\n WindowEvent::AxisMotion {..} |\n WindowEvent::Touch(_) |\n WindowEvent::ScaleFactorChanged {..} |\n WindowEvent::ThemeChanged(_) => None, \/\/ Not supported\n }\n }\n}\n\n\/\/TODO: Documentation\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]\npub enum PressedState {\n Pressed,\n Released,\n}\n\nimpl PressedState {\n #[cfg_attr(any(feature = \"test\", test), allow(dead_code))]\n fn from_state(state: glutin_event::ElementState) -> PressedState {\n match state {\n glutin_event::ElementState::Pressed => PressedState::Pressed,\n glutin_event::ElementState::Released => PressedState::Released,\n }\n }\n}\n\n\/\/TODO: Documentation\n#[non_exhaustive]\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]\npub enum Key {\n \/\/\/ The '1' key above the letters.\n Num1,\n \/\/\/ The '2' key above the letters.\n Num2,\n \/\/\/ The '3' key above the letters.\n Num3,\n \/\/\/ The '4' key above the letters.\n Num4,\n \/\/\/ The '5' key above the letters.\n Num5,\n \/\/\/ The '6' key above the letters.\n Num6,\n \/\/\/ The '7' key above the letters.\n Num7,\n \/\/\/ The '8' key above the letters.\n Num8,\n \/\/\/ The '9' key above the letters.\n Num9,\n \/\/\/ The '0' key above the letters.\n Num0,\n\n A,\n B,\n C,\n D,\n E,\n F,\n G,\n H,\n I,\n J,\n K,\n L,\n M,\n N,\n O,\n P,\n Q,\n R,\n S,\n T,\n U,\n V,\n W,\n X,\n Y,\n Z,\n\n \/\/\/ The Escape key, next to F1\n Esc,\n\n F1,\n F2,\n F3,\n F4,\n F5,\n F6,\n F7,\n F8,\n F9,\n F10,\n F11,\n F12,\n F13,\n F14,\n F15,\n F16,\n F17,\n F18,\n F19,\n F20,\n F21,\n F22,\n F23,\n F24,\n\n Home,\n Delete,\n End,\n \/\/\/ The PageDown (PgDn) key\n PageDown,\n \/\/\/ The PageUp (PgUp) key\n PageUp,\n \/\/\/ The backspace key, right above Enter\/Return\n Backspace,\n \/\/\/ The Enter\/Return key, below Backspace\n Return,\n \/\/\/ The spacebar key\n Space,\n\n \/\/\/ The up arrow key\n UpArrow,\n \/\/\/ The left arrow key\n LeftArrow,\n \/\/\/ The right arrow key\n RightArrow,\n \/\/\/ The down arrow key\n DownArrow,\n\n Numpad0,\n Numpad1,\n Numpad2,\n Numpad3,\n Numpad4,\n Numpad5,\n Numpad6,\n Numpad7,\n Numpad8,\n Numpad9,\n NumpadComma,\n NumpadEnter,\n NumpadEquals,\n\n Apostrophe,\n At,\n Backslash,\n Backtick,\n Colon,\n Comma,\n Decimal,\n Divide,\n Equals,\n Minus,\n Multiply,\n Period,\n Plus,\n \/\/\/ The left bracket `[` key\n LeftBracket,\n \/\/\/ The left bracket `]` key\n RightBracket,\n Semicolon,\n Slash,\n Tab,\n}\n\nimpl Key {\n #[cfg_attr(any(feature = \"test\", test), allow(dead_code))]\n fn from_keycode(key: glutin_event::VirtualKeyCode) -> Option<Self> {\n use glutin_event::VirtualKeyCode::*;\n #[deny(unreachable_patterns, unused_variables)]\n Some(match key {\n Key1 => Key::Num1,\n Key2 => Key::Num2,\n Key3 => Key::Num3,\n Key4 => Key::Num4,\n Key5 => Key::Num5,\n Key6 => Key::Num6,\n Key7 => Key::Num7,\n Key8 => Key::Num8,\n Key9 => Key::Num9,\n Key0 => Key::Num0,\n\n A => Key::A,\n B => Key::B,\n C => Key::C,\n D => Key::D,\n E => Key::E,\n F => Key::F,\n G => Key::G,\n H => Key::H,\n I => Key::I,\n J => Key::J,\n K => Key::K,\n L => Key::L,\n M => Key::M,\n N => Key::N,\n O => Key::O,\n P => Key::P,\n Q => Key::Q,\n R => Key::R,\n S => Key::S,\n T => Key::T,\n U => Key::U,\n V => Key::V,\n W => Key::W,\n X => Key::X,\n Y => Key::Y,\n Z => Key::Z,\n\n Escape => Key::Esc,\n\n F1 => Key::F1,\n F2 => Key::F2,\n F3 => Key::F3,\n F4 => Key::F4,\n F5 => Key::F5,\n F6 => Key::F6,\n F7 => Key::F7,\n F8 => Key::F8,\n F9 => Key::F9,\n F10 => Key::F10,\n F11 => Key::F11,\n F12 => Key::F12,\n F13 => Key::F13,\n F14 => Key::F14,\n F15 => Key::F15,\n F16 => Key::F16,\n F17 => Key::F17,\n F18 => Key::F18,\n F19 => Key::F19,\n F20 => Key::F20,\n F21 => Key::F21,\n F22 => Key::F22,\n F23 => Key::F23,\n F24 => Key::F24,\n\n Home => Key::Home,\n Delete => Key::Delete,\n End => Key::End,\n PageDown => Key::PageDown,\n PageUp => Key::PageUp,\n Back => Key::Backspace,\n Return => Key::Return,\n Space => Key::Space,\n\n Left => Key::LeftArrow,\n Up => Key::UpArrow,\n Right => Key::RightArrow,\n Down => Key::DownArrow,\n\n Numpad0 => Key::Numpad0,\n Numpad1 => Key::Numpad1,\n Numpad2 => Key::Numpad2,\n Numpad3 => Key::Numpad3,\n Numpad4 => Key::Numpad4,\n Numpad5 => Key::Numpad5,\n Numpad6 => Key::Numpad6,\n Numpad7 => Key::Numpad7,\n Numpad8 => Key::Numpad8,\n Numpad9 => Key::Numpad9,\n\n Apostrophe => Key::Apostrophe,\n At => Key::At,\n Backslash => Key::Backslash,\n Colon => Key::Colon,\n Comma => Key::Comma,\n Equals => Key::Equals,\n Grave => Key::Backtick,\n LBracket => Key::LeftBracket,\n NumpadAdd | Plus => Key::Plus,\n NumpadComma => Key::NumpadComma,\n NumpadDecimal => Key::Decimal,\n NumpadDivide => Key::Divide,\n NumpadEnter => Key::NumpadEnter,\n NumpadEquals => Key::NumpadEquals,\n NumpadMultiply | Asterisk => Key::Multiply,\n NumpadSubtract | Minus => Key::Minus,\n Period => Key::Period,\n RBracket => Key::RightBracket,\n Semicolon => Key::Semicolon,\n Slash => Key::Slash,\n Tab => Key::Tab,\n\n \/\/ Unsupported keys (could be changed in the future)\n Snapshot |\n Scroll |\n Pause |\n Insert |\n Compose |\n Caret |\n Numlock |\n AbntC1 |\n AbntC2 |\n Apps |\n Ax |\n Calculator |\n Capital |\n Convert |\n Kana |\n Kanji |\n LAlt |\n LControl |\n LShift |\n LWin |\n Mail |\n MediaSelect |\n MediaStop |\n Mute |\n MyComputer |\n NavigateForward |\n NavigateBackward |\n NextTrack |\n NoConvert |\n OEM102 |\n PlayPause |\n Power |\n PrevTrack |\n RAlt |\n RControl |\n RShift |\n RWin |\n Sleep |\n Stop |\n Sysrq |\n Underline |\n Unlabeled |\n VolumeDown |\n VolumeUp |\n Wake |\n WebBack |\n WebFavorites |\n WebForward |\n WebHome |\n WebRefresh |\n WebSearch |\n WebStop |\n Yen |\n Copy |\n Paste |\n Cut => return None,\n })\n }\n}\n\n\/\/TODO: Documentation\n#[non_exhaustive]\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]\npub enum MouseButton {\n \/\/\/ The left mouse button\n LeftButton,\n \/\/\/ The middle mouse button\n MiddleButton,\n \/\/\/ The right mouse button\n RightButton,\n}\n\nimpl MouseButton {\n #[cfg_attr(any(feature = \"test\", test), allow(dead_code))]\n fn from_button(button: glutin_event::MouseButton) -> Option<Self> {\n use glutin_event::MouseButton::*;\n #[deny(unreachable_patterns, unused_variables)]\n match button {\n Left => Some(MouseButton::LeftButton),\n Middle => Some(MouseButton::MiddleButton),\n Right => Some(MouseButton::RightButton),\n Other(_) => None,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse regex::Regex;\n\npub mod id;\npub mod id_type;\npub mod header;\npub mod hash;\n\n\nuse module::Module;\nuse storage::file::id::*;\nuse storage::file::id_type::FileIDType;\nuse storage::file::hash::FileHash;\nuse super::parser::{FileHeaderParser, Parser, ParserError};\n\nuse self::header::spec::*;\nuse self::header::data::*;\n\n\/*\n * Internal abstract view on a file. Does not exist on the FS and is just kept\n * internally until it is written to disk.\n *\/\npub struct File {\n pub owning_module_name : &'static str,\n pub header : FileHeaderData,\n pub data : String,\n pub id : FileID,\n}\n\nimpl File {\n\n pub fn owner_name(&self) -> &'static str {\n self.owning_module_name\n }\n\n pub fn header(&self) -> &FileHeaderData {\n &self.header\n }\n\n pub fn data(&self) -> &String {\n &self.data\n }\n\n pub fn contents(&self) -> (&FileHeaderData, &String) {\n (self.header(), self.data())\n }\n\n pub fn id(&self) -> &FileID {\n &self.id\n }\n\n pub fn matches_with(&self, r: &Regex) -> bool {\n r.is_match(&self.data[..]) || self.header.matches_with(r)\n }\n\n}\n\nimpl Display for File {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt,\n\"[File] Owner : '{:?}'\n FileID: '{:?}'\n Header: '{:?}'\n Data : '{:?}'\",\n self.owning_module_name,\n self.header,\n self.data,\n self.id);\n Ok(())\n }\n\n}\n\nimpl Debug for File {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt,\n\"[File] Owner : '{:?}'\n FileID: '{:?}'\n Header: '{:?}'\n Data : '{:?}'\",\n self.owning_module_name,\n self.header,\n self.data,\n self.id);\n Ok(())\n }\n\n}\n\n#[cfg(test)]\nmod test {\n \/\/ we use the JSON parser here, so we can generate FileHeaderData\n use storage::json::parser::JsonHeaderParser;\n use super::match_header_spec;\n use storage::parser::{FileHeaderParser, ParserError};\n use storage::file::FileHeaderData as FHD;\n use storage::file::FileHeaderSpec as FHS;\n\n #[test]\n fn test_spec_matching() {\n let text = String::from(\"{\\\"a\\\": 1, \\\"b\\\": -2}\");\n let spec = FHS::Map {\n keys: vec![\n FHS::Key {\n name: String::from(\"a\"),\n value_type: Box::new(FHS::UInteger)\n },\n FHS::Key {\n name: String::from(\"b\"),\n value_type: Box::new(FHS::Integer)\n }\n ]\n };\n\n let parser = JsonHeaderParser::new(Some(spec.clone()));\n let datares = parser.read(Some(text.clone()));\n assert!(datares.is_ok(), \"Text could not be parsed: '{}'\", text);\n let data = datares.unwrap();\n\n let matchres = match_header_spec(&spec, &data);\n assert!(matchres.is_none(), \"Matching returns error: {:?}\", matchres);\n }\n}\n\n<commit_msg>Fix: Debug for File implementation<commit_after>use std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse regex::Regex;\n\npub mod id;\npub mod id_type;\npub mod header;\npub mod hash;\n\n\nuse module::Module;\nuse storage::file::id::*;\nuse storage::file::id_type::FileIDType;\nuse storage::file::hash::FileHash;\nuse super::parser::{FileHeaderParser, Parser, ParserError};\n\nuse self::header::spec::*;\nuse self::header::data::*;\n\n\/*\n * Internal abstract view on a file. Does not exist on the FS and is just kept\n * internally until it is written to disk.\n *\/\npub struct File {\n pub owning_module_name : &'static str,\n pub header : FileHeaderData,\n pub data : String,\n pub id : FileID,\n}\n\nimpl File {\n\n pub fn owner_name(&self) -> &'static str {\n self.owning_module_name\n }\n\n pub fn header(&self) -> &FileHeaderData {\n &self.header\n }\n\n pub fn data(&self) -> &String {\n &self.data\n }\n\n pub fn contents(&self) -> (&FileHeaderData, &String) {\n (self.header(), self.data())\n }\n\n pub fn id(&self) -> &FileID {\n &self.id\n }\n\n pub fn matches_with(&self, r: &Regex) -> bool {\n r.is_match(&self.data[..]) || self.header.matches_with(r)\n }\n\n}\n\nimpl Display for File {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt,\n\"[File] Owner : '{:?}'\n FileID: '{:?}'\n Header: '{:?}'\n Data : '{:?}'\",\n self.owning_module_name,\n self.header,\n self.data,\n self.id);\n Ok(())\n }\n\n}\n\nimpl Debug for File {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt,\n\"[File] Owner : '{:?}'\n FileID: '{:?}'\n Header: '{:?}'\n Data : '{:?}'\",\n self.owning_module_name,\n self.id,\n self.header,\n self.data);\n Ok(())\n }\n\n}\n\n#[cfg(test)]\nmod test {\n \/\/ we use the JSON parser here, so we can generate FileHeaderData\n use storage::json::parser::JsonHeaderParser;\n use super::match_header_spec;\n use storage::parser::{FileHeaderParser, ParserError};\n use storage::file::FileHeaderData as FHD;\n use storage::file::FileHeaderSpec as FHS;\n\n #[test]\n fn test_spec_matching() {\n let text = String::from(\"{\\\"a\\\": 1, \\\"b\\\": -2}\");\n let spec = FHS::Map {\n keys: vec![\n FHS::Key {\n name: String::from(\"a\"),\n value_type: Box::new(FHS::UInteger)\n },\n FHS::Key {\n name: String::from(\"b\"),\n value_type: Box::new(FHS::Integer)\n }\n ]\n };\n\n let parser = JsonHeaderParser::new(Some(spec.clone()));\n let datares = parser.read(Some(text.clone()));\n assert!(datares.is_ok(), \"Text could not be parsed: '{}'\", text);\n let data = datares.unwrap();\n\n let matchres = match_header_spec(&spec, &data);\n assert!(matchres.is_none(), \"Matching returns error: {:?}\", matchres);\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\nuse redox::time::*;\n\npub struct FileManager {\n folder_icon: BMPFile,\n audio_icon: BMPFile,\n bin_icon: BMPFile,\n image_icon: BMPFile,\n source_icon: BMPFile,\n script_icon: BMPFile,\n text_icon: BMPFile,\n file_icon: BMPFile,\n files: Vec<String>,\n selected: isize,\n last_mouse_event: MouseEvent,\n click_time: Duration,\n}\n\nfn load_icon(path: &str) -> BMPFile {\n let mut resource = File::open(&(\"file:\/\/\/ui\/mimetypes\/\".to_string() + path + \".bmp\"));\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n BMPFile::from_data(&vec)\n}\n\nimpl FileManager {\n pub fn new() -> Self {\n FileManager {\n folder_icon: load_icon(\"inode-directory\"),\n audio_icon: load_icon(\"audio-x-wav\"),\n bin_icon: load_icon(\"application-x-executable\"),\n image_icon: load_icon(\"image-x-generic\"),\n source_icon: load_icon(\"text-x-makefile\"),\n script_icon: load_icon(\"text-x-script\"),\n text_icon: load_icon(\"text-x-generic\"),\n file_icon: load_icon(\"unknown\"),\n files: Vec::new(),\n selected: -1,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n }\n click_time: Duration::new(0, 0),\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n window.set([255, 255, 255, 255]);\n\n let mut i = 0;\n let mut row = 0;\n for string in self.files.iter() {\n if i == self.selected {\n let width = window.width();\n window.rect(0, 32 * row as isize, width, 32, [224, 224, 224, 255]);\n }\n\n if string.ends_with('\/') {\n window.image(0,\n 32 * row as isize,\n self.folder_icon.width(),\n self.folder_icon.height(),\n self.folder_icon.as_slice());\n } else if string.ends_with(\".wav\") {\n window.image(0,\n 32 * row as isize,\n self.audio_icon.width(),\n self.audio_icon.height(),\n self.audio_icon.as_slice());\n } else if string.ends_with(\".bin\") {\n window.image(0,\n 32 * row as isize,\n self.bin_icon.width(),\n self.bin_icon.height(),\n self.bin_icon.as_slice());\n } else if string.ends_with(\".bmp\") {\n window.image(0,\n 32 * row as isize,\n self.image_icon.width(),\n self.image_icon.height(),\n self.image_icon.as_slice());\n } else if string.ends_with(\".rs\") || string.ends_with(\".asm\") || string.ends_with(\".list\") {\n window.image(0,\n 32 * row as isize,\n self.source_icon.width(),\n self.source_icon.height(),\n self.source_icon.as_slice());\n } else if string.ends_with(\".sh\") || string.ends_with(\".lua\") {\n window.image(0,\n 32 * row as isize,\n self.script_icon.width(),\n self.script_icon.height(),\n self.script_icon.as_slice());\n } else if string.ends_with(\".md\") || string.ends_with(\".txt\") {\n window.image(0,\n 32 * row as isize,\n self.text_icon.width(),\n self.text_icon.height(),\n self.text_icon.as_slice());\n } else {\n window.image(0,\n 32 * row as isize,\n self.file_icon.width(),\n self.file_icon.height(),\n self.file_icon.as_slice());\n }\n\n let mut col = 0;\n for c in string.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n [0, 0, 0, 255]);\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n window.sync();\n }\n\n fn main(&mut self, path: &str) {\n let mut width = 160;\n let mut height = 0;\n {\n let mut resource = File::open(path);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n for file in unsafe { String::from_utf8_unchecked(vec) }.split('\\n') {\n if width < 40 + (file.len() + 1) * 8 {\n width = 40 + (file.len() + 1) * 8;\n }\n self.files.push(file.to_string());\n }\n\n if height < self.files.len() * 32 {\n height = self.files.len() * 32;\n }\n }\n\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n width,\n height,\n &path);\n\n self.draw_content(&mut window);\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => break,\n K_HOME => self.selected = 0,\n K_UP => if self.selected > 0 {\n self.selected -= 1;\n },\n K_END => self.selected = self.files.len() as isize - 1,\n K_DOWN => if self.selected < self.files.len() as isize - 1 {\n self.selected += 1;\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n if self.selected >= 0 &&\n self.selected < self.files.len() as isize {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n }\n }\n _ => {\n let mut i = 0;\n for file in self.files.iter() {\n if file.starts_with(key_event.character) {\n self.selected = i;\n break;\n }\n i += 1;\n }\n }\n },\n }\n\n self.draw_content(&mut window);\n }\n }\n EventOption::Mouse(mouse_event) => {\n let mut redraw = false;\n let mut i = 0;\n let mut row = 0;\n for file in self.files.iter() {\n let mut col = 0;\n for c in file.chars() {\n if mouse_event.y >= 32 * row as isize &&\n mouse_event.y < 32 * row as isize + 32 {\n let click_time = Duration::realtime();\n if self.selected == i {\n if click_time - self.click_time < Duration::new(0, 500 * NANOS_PER_MILLI) {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n self.click_time = Duration::new(0, 0);\n }\n } else {\n self.selected = i;\n self.click_time = click_time;\n }\n redraw = true;\n }\n\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n if redraw {\n self.draw_content(&mut window);\n }\n\n if mouse_event.left_button \n self.last_mouse_event = mouse_event;\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => FileManager::new().main(arg),\n Option::None => FileManager::new().main(\"file:\/\/\/\"),\n }\n}\n<commit_msg>Double click<commit_after>use redox::*;\nuse redox::time::*;\n\npub struct FileManager {\n folder_icon: BMPFile,\n audio_icon: BMPFile,\n bin_icon: BMPFile,\n image_icon: BMPFile,\n source_icon: BMPFile,\n script_icon: BMPFile,\n text_icon: BMPFile,\n file_icon: BMPFile,\n files: Vec<String>,\n selected: isize,\n last_mouse_event: MouseEvent,\n click_time: Duration,\n}\n\nfn load_icon(path: &str) -> BMPFile {\n let mut resource = File::open(&(\"file:\/\/\/ui\/mimetypes\/\".to_string() + path + \".bmp\"));\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n BMPFile::from_data(&vec)\n}\n\nimpl FileManager {\n pub fn new() -> Self {\n FileManager {\n folder_icon: load_icon(\"inode-directory\"),\n audio_icon: load_icon(\"audio-x-wav\"),\n bin_icon: load_icon(\"application-x-executable\"),\n image_icon: load_icon(\"image-x-generic\"),\n source_icon: load_icon(\"text-x-makefile\"),\n script_icon: load_icon(\"text-x-script\"),\n text_icon: load_icon(\"text-x-generic\"),\n file_icon: load_icon(\"unknown\"),\n files: Vec::new(),\n selected: -1,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n },\n click_time: Duration::new(0, 0),\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n window.set([255, 255, 255, 255]);\n\n let mut i = 0;\n let mut row = 0;\n for string in self.files.iter() {\n if i == self.selected {\n let width = window.width();\n window.rect(0, 32 * row as isize, width, 32, [224, 224, 224, 255]);\n }\n\n if string.ends_with('\/') {\n window.image(0,\n 32 * row as isize,\n self.folder_icon.width(),\n self.folder_icon.height(),\n self.folder_icon.as_slice());\n } else if string.ends_with(\".wav\") {\n window.image(0,\n 32 * row as isize,\n self.audio_icon.width(),\n self.audio_icon.height(),\n self.audio_icon.as_slice());\n } else if string.ends_with(\".bin\") {\n window.image(0,\n 32 * row as isize,\n self.bin_icon.width(),\n self.bin_icon.height(),\n self.bin_icon.as_slice());\n } else if string.ends_with(\".bmp\") {\n window.image(0,\n 32 * row as isize,\n self.image_icon.width(),\n self.image_icon.height(),\n self.image_icon.as_slice());\n } else if string.ends_with(\".rs\") || string.ends_with(\".asm\") || string.ends_with(\".list\") {\n window.image(0,\n 32 * row as isize,\n self.source_icon.width(),\n self.source_icon.height(),\n self.source_icon.as_slice());\n } else if string.ends_with(\".sh\") || string.ends_with(\".lua\") {\n window.image(0,\n 32 * row as isize,\n self.script_icon.width(),\n self.script_icon.height(),\n self.script_icon.as_slice());\n } else if string.ends_with(\".md\") || string.ends_with(\".txt\") {\n window.image(0,\n 32 * row as isize,\n self.text_icon.width(),\n self.text_icon.height(),\n self.text_icon.as_slice());\n } else {\n window.image(0,\n 32 * row as isize,\n self.file_icon.width(),\n self.file_icon.height(),\n self.file_icon.as_slice());\n }\n\n let mut col = 0;\n for c in string.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n [0, 0, 0, 255]);\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n window.sync();\n }\n\n fn main(&mut self, path: &str) {\n let mut width = 160;\n let mut height = 0;\n {\n let mut resource = File::open(path);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n for file in unsafe { String::from_utf8_unchecked(vec) }.split('\\n') {\n if width < 40 + (file.len() + 1) * 8 {\n width = 40 + (file.len() + 1) * 8;\n }\n self.files.push(file.to_string());\n }\n\n if height < self.files.len() * 32 {\n height = self.files.len() * 32;\n }\n }\n\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n width,\n height,\n &path);\n\n self.draw_content(&mut window);\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => break,\n K_HOME => self.selected = 0,\n K_UP => if self.selected > 0 {\n self.selected -= 1;\n },\n K_END => self.selected = self.files.len() as isize - 1,\n K_DOWN => if self.selected < self.files.len() as isize - 1 {\n self.selected += 1;\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n if self.selected >= 0 &&\n self.selected < self.files.len() as isize {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n }\n }\n _ => {\n let mut i = 0;\n for file in self.files.iter() {\n if file.starts_with(key_event.character) {\n self.selected = i;\n break;\n }\n i += 1;\n }\n }\n },\n }\n\n self.draw_content(&mut window);\n }\n }\n EventOption::Mouse(mouse_event) => {\n let mut redraw = false;\n let mut i = 0;\n let mut row = 0;\n for file in self.files.iter() {\n let mut col = 0;\n for c in file.chars() {\n if mouse_event.y >= 32 * row as isize &&\n mouse_event.y < 32 * row as isize + 32 {\n self.selected = i;\n redraw = true;\n }\n\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n if redraw {\n self.draw_content(&mut window);\n }\n\n \/\/Check for double click\n if mouse_event.left_button {\n let click_time = Duration::realtime();\n\n if click_time - self.click_time < Duration::new(0, 500 * NANOS_PER_MILLI)\n && (self.last_mouse_event.x - mouse_event.x).abs() <= 4\n && (self.last_mouse_event.y - mouse_event.y).abs() <= 4 {\n if self.selected >= 0 && self.selected < self.files.len() as isize {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n }\n self.click_time = Duration::new(0, 0);\n } else {\n self.click_time = click_time;\n }\n }\n self.last_mouse_event = mouse_event;\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => FileManager::new().main(arg),\n Option::None => FileManager::new().main(\"file:\/\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>version work<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixing buffer issues.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add straggling code sample.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add card struct<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix(sync): if a native file already exists, but there is no syncdb, check to see if syncdb needs update<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add ! NEGATE<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> #23 Added error checking to window creation<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Feat(command): add option to use custom config file<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\nregister_long_diagnostics! {\n\nE0373: r##\"\nThis error occurs when an attempt is made to use data captured by a closure,\nwhen that data may no longer exist. It's most commonly seen when attempting to\nreturn a closure:\n\n```\nfn foo() -> Box<Fn(u32) -> u32> {\n let x = 0u32;\n Box::new(|y| x + y)\n}\n```\n\nNotice that `x` is stack-allocated by `foo()`. By default, Rust captures\nclosed-over data by reference. This means that once `foo()` returns, `x` no\nlonger exists. An attempt to access `x` within the closure would thus be unsafe.\n\nAnother situation where this might be encountered is when spawning threads:\n\n```\nfn foo() {\n let x = 0u32;\n let y = 1u32;\n\n let thr = std::thread::spawn(|| {\n x + y\n });\n}\n```\n\nSince our new thread runs in parallel, the stack frame containing `x` and `y`\nmay well have disappeared by the time we try to use them. Even if we call\n`thr.join()` within foo (which blocks until `thr` has completed, ensuring the\nstack frame won't disappear), we will not succeed: the compiler cannot prove\nthat this behaviour is safe, and so won't let us do it.\n\nThe solution to this problem is usually to switch to using a `move` closure.\nThis approach moves (or copies, where possible) data into the closure, rather\nthan taking references to it. For example:\n\n```\nfn foo() -> Box<Fn(u32) -> u32> {\n let x = 0u32;\n Box::new(move |y| x + y)\n}\n```\n\nNow that the closure has its own copy of the data, there's no need to worry\nabout safety.\n\"##,\n\nE0381: r##\"\nIt is not allowed to use or capture an uninitialized variable. For example:\n\n```\nfn main() {\n let x: i32;\n let y = x; \/\/ error, use of possibly uninitialized variable\n```\n\nTo fix this, ensure that any declared variables are initialized before being\nused.\n\"##,\n\nE0382: r##\"\nThis error occurs when an attempt is made to use a variable after its contents\nhave been moved elsewhere. For example:\n\n```\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = MyStruct{ s: 5u32 };\n let y = x;\n x.s = 6;\n println!(\"{}\", x.s);\n}\n```\n\nSince `MyStruct` is a type that is not marked `Copy`, the data gets moved out\nof `x` when we set `y`. This is fundamental to Rust's ownership system: outside\nof workarounds like `Rc`, a value cannot be owned by more than one variable.\n\nIf we own the type, the easiest way to address this problem is to implement\n`Copy` and `Clone` on it, as shown below. This allows `y` to copy the\ninformation in `x`, while leaving the original version owned by `x`. Subsequent\nchanges to `x` will not be reflected when accessing `y`.\n\n```\n#[derive(Copy, Clone)]\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = MyStruct{ s: 5u32 };\n let y = x;\n x.s = 6;\n println!(\"{}\", x.s);\n}\n```\n\nAlternatively, if we don't control the struct's definition, or mutable shared\nownership is truly required, we can use `Rc` and `RefCell`:\n\n```\nuse std::cell::RefCell;\nuse std::rc::Rc;\n\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = Rc::new(RefCell::new(MyStruct{ s: 5u32 }));\n let y = x.clone();\n x.borrow_mut().s = 6;\n println!(\"{}\", x.borrow.s);\n}\n```\n\nWith this approach, x and y share ownership of the data via the `Rc` (reference\ncount type). `RefCell` essentially performs runtime borrow checking: ensuring\nthat at most one writer or multiple readers can access the data at any one time.\n\nIf you wish to learn more about ownership in Rust, start with the chapter in the\nBook:\n\nhttps:\/\/doc.rust-lang.org\/book\/ownership.html\n\"##,\n\nE0384: r##\"\nThis error occurs when an attempt is made to reassign an immutable variable.\nFor example:\n\n```\nfn main(){\n let x = 3;\n x = 5; \/\/ error, reassignment of immutable variable\n}\n```\n\nBy default, variables in Rust are immutable. To fix this error, add the keyword\n`mut` after the keyword `let` when declaring the variable. For example:\n\n```\nfn main(){\n let mut x = 3;\n x = 5;\n}\n```\n\"##\n\n}\n\nregister_diagnostics! {\n E0383, \/\/ partial reinitialization of uninitialized structure\n E0385, \/\/ {} in an aliasable location\n E0386, \/\/ {} in an immutable container\n E0387, \/\/ {} in a captured outer variable in an `Fn` closure\n E0388, \/\/ {} in a static location\n E0389 \/\/ {} in a `&` reference\n}\n<commit_msg>rollup merge of #27605: AlisdairO\/diagnostics387<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\nregister_long_diagnostics! {\n\nE0373: r##\"\nThis error occurs when an attempt is made to use data captured by a closure,\nwhen that data may no longer exist. It's most commonly seen when attempting to\nreturn a closure:\n\n```\nfn foo() -> Box<Fn(u32) -> u32> {\n let x = 0u32;\n Box::new(|y| x + y)\n}\n```\n\nNotice that `x` is stack-allocated by `foo()`. By default, Rust captures\nclosed-over data by reference. This means that once `foo()` returns, `x` no\nlonger exists. An attempt to access `x` within the closure would thus be unsafe.\n\nAnother situation where this might be encountered is when spawning threads:\n\n```\nfn foo() {\n let x = 0u32;\n let y = 1u32;\n\n let thr = std::thread::spawn(|| {\n x + y\n });\n}\n```\n\nSince our new thread runs in parallel, the stack frame containing `x` and `y`\nmay well have disappeared by the time we try to use them. Even if we call\n`thr.join()` within foo (which blocks until `thr` has completed, ensuring the\nstack frame won't disappear), we will not succeed: the compiler cannot prove\nthat this behaviour is safe, and so won't let us do it.\n\nThe solution to this problem is usually to switch to using a `move` closure.\nThis approach moves (or copies, where possible) data into the closure, rather\nthan taking references to it. For example:\n\n```\nfn foo() -> Box<Fn(u32) -> u32> {\n let x = 0u32;\n Box::new(move |y| x + y)\n}\n```\n\nNow that the closure has its own copy of the data, there's no need to worry\nabout safety.\n\"##,\n\nE0381: r##\"\nIt is not allowed to use or capture an uninitialized variable. For example:\n\n```\nfn main() {\n let x: i32;\n let y = x; \/\/ error, use of possibly uninitialized variable\n```\n\nTo fix this, ensure that any declared variables are initialized before being\nused.\n\"##,\n\nE0382: r##\"\nThis error occurs when an attempt is made to use a variable after its contents\nhave been moved elsewhere. For example:\n\n```\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = MyStruct{ s: 5u32 };\n let y = x;\n x.s = 6;\n println!(\"{}\", x.s);\n}\n```\n\nSince `MyStruct` is a type that is not marked `Copy`, the data gets moved out\nof `x` when we set `y`. This is fundamental to Rust's ownership system: outside\nof workarounds like `Rc`, a value cannot be owned by more than one variable.\n\nIf we own the type, the easiest way to address this problem is to implement\n`Copy` and `Clone` on it, as shown below. This allows `y` to copy the\ninformation in `x`, while leaving the original version owned by `x`. Subsequent\nchanges to `x` will not be reflected when accessing `y`.\n\n```\n#[derive(Copy, Clone)]\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = MyStruct{ s: 5u32 };\n let y = x;\n x.s = 6;\n println!(\"{}\", x.s);\n}\n```\n\nAlternatively, if we don't control the struct's definition, or mutable shared\nownership is truly required, we can use `Rc` and `RefCell`:\n\n```\nuse std::cell::RefCell;\nuse std::rc::Rc;\n\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = Rc::new(RefCell::new(MyStruct{ s: 5u32 }));\n let y = x.clone();\n x.borrow_mut().s = 6;\n println!(\"{}\", x.borrow.s);\n}\n```\n\nWith this approach, x and y share ownership of the data via the `Rc` (reference\ncount type). `RefCell` essentially performs runtime borrow checking: ensuring\nthat at most one writer or multiple readers can access the data at any one time.\n\nIf you wish to learn more about ownership in Rust, start with the chapter in the\nBook:\n\nhttps:\/\/doc.rust-lang.org\/book\/ownership.html\n\"##,\n\nE0384: r##\"\nThis error occurs when an attempt is made to reassign an immutable variable.\nFor example:\n\n```\nfn main(){\n let x = 3;\n x = 5; \/\/ error, reassignment of immutable variable\n}\n```\n\nBy default, variables in Rust are immutable. To fix this error, add the keyword\n`mut` after the keyword `let` when declaring the variable. For example:\n\n```\nfn main(){\n let mut x = 3;\n x = 5;\n}\n```\n\"##,\n\nE0387: r##\"\nThis error occurs when an attempt is made to mutate or mutably reference data\nthat a closure has captured immutably. Examples of this error are shown below:\n\n```\n\/\/ Accepts a function or a closure that captures its environment immutably.\n\/\/ Closures passed to foo will not be able to mutate their closed-over state.\nfn foo<F: Fn()>(f: F) { }\n\n\/\/ Attempts to mutate closed-over data. Error message reads:\n\/\/ `cannot assign to data in a captured outer variable...`\nfn mutable() {\n let mut x = 0u32;\n foo(|| x = 2);\n}\n\n\/\/ Attempts to take a mutable reference to closed-over data. Error message\n\/\/ reads: `cannot borrow data mutably in a captured outer variable...`\nfn mut_addr() {\n let mut x = 0u32;\n foo(|| { let y = &mut x; });\n}\n```\n\nThe problem here is that foo is defined as accepting a parameter of type `Fn`.\nClosures passed into foo will thus be inferred to be of type `Fn`, meaning that\nthey capture their context immutably.\n\nIf the definition of `foo` is under your control, the simplest solution is to\ncapture the data mutably. This can be done by defining `foo` to take FnMut\nrather than Fn:\n\n```\nfn foo<F: FnMut()>(f: F) { }\n```\n\nAlternatively, we can consider using the `Cell` and `RefCell` types to achieve\ninterior mutability through a shared reference. Our example's `mutable` function\ncould be redefined as below:\n\n```\nuse std::cell::Cell;\n\nfn mutable() {\n let x = Cell::new(0u32);\n foo(|| x.set(2));\n}\n```\n\nYou can read more about cell types in the API documentation:\n\nhttps:\/\/doc.rust-lang.org\/std\/cell\/\n\"##\n\n}\n\nregister_diagnostics! {\n E0383, \/\/ partial reinitialization of uninitialized structure\n E0385, \/\/ {} in an aliasable location\n E0386, \/\/ {} in an immutable container\n E0388, \/\/ {} in a static location\n E0389 \/\/ {} in a `&` reference\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)] \/\/ not used on all platforms\n\nuse collections::BTreeMap;\nuse ptr;\nuse sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};\n\npub type Key = usize;\n\ntype Dtor = unsafe extern fn(*mut u8);\n\nstatic NEXT_KEY: AtomicUsize = ATOMIC_USIZE_INIT;\n\nstatic mut KEYS: *mut BTreeMap<Key, Option<Dtor>> = ptr::null_mut();\n\n#[thread_local]\nstatic mut LOCALS: *mut BTreeMap<Key, *mut u8> = ptr::null_mut();\n\nunsafe fn keys() -> &'static mut BTreeMap<Key, Option<Dtor>> {\n if KEYS == ptr::null_mut() {\n KEYS = Box::into_raw(Box::new(BTreeMap::new()));\n }\n &mut *KEYS\n}\n\nunsafe fn locals() -> &'static mut BTreeMap<Key, *mut u8> {\n if LOCALS == ptr::null_mut() {\n LOCALS = Box::into_raw(Box::new(BTreeMap::new()));\n }\n &mut *LOCALS\n}\n\n#[inline]\npub unsafe fn create(dtor: Option<Dtor>) -> Key {\n let key = NEXT_KEY.fetch_add(1, Ordering::SeqCst);\n keys().insert(key, dtor);\n key\n}\n\n#[inline]\npub unsafe fn get(key: Key) -> *mut u8 {\n if let Some(&entry) = locals().get(&key) {\n entry\n } else {\n ptr::null_mut()\n }\n}\n\n#[inline]\npub unsafe fn set(key: Key, value: *mut u8) {\n locals().insert(key, value);\n}\n\n#[inline]\npub unsafe fn destroy(key: Key) {\n keys().remove(&key);\n}\n<commit_msg>Implement requires_synchronized_create() for Redox<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)] \/\/ not used on all platforms\n\nuse collections::BTreeMap;\nuse ptr;\nuse sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};\n\npub type Key = usize;\n\ntype Dtor = unsafe extern fn(*mut u8);\n\nstatic NEXT_KEY: AtomicUsize = ATOMIC_USIZE_INIT;\n\nstatic mut KEYS: *mut BTreeMap<Key, Option<Dtor>> = ptr::null_mut();\n\n#[thread_local]\nstatic mut LOCALS: *mut BTreeMap<Key, *mut u8> = ptr::null_mut();\n\nunsafe fn keys() -> &'static mut BTreeMap<Key, Option<Dtor>> {\n if KEYS == ptr::null_mut() {\n KEYS = Box::into_raw(Box::new(BTreeMap::new()));\n }\n &mut *KEYS\n}\n\nunsafe fn locals() -> &'static mut BTreeMap<Key, *mut u8> {\n if LOCALS == ptr::null_mut() {\n LOCALS = Box::into_raw(Box::new(BTreeMap::new()));\n }\n &mut *LOCALS\n}\n\n#[inline]\npub unsafe fn create(dtor: Option<Dtor>) -> Key {\n let key = NEXT_KEY.fetch_add(1, Ordering::SeqCst);\n keys().insert(key, dtor);\n key\n}\n\n#[inline]\npub unsafe fn get(key: Key) -> *mut u8 {\n if let Some(&entry) = locals().get(&key) {\n entry\n } else {\n ptr::null_mut()\n }\n}\n\n#[inline]\npub unsafe fn set(key: Key, value: *mut u8) {\n locals().insert(key, value);\n}\n\n#[inline]\npub unsafe fn destroy(key: Key) {\n keys().remove(&key);\n}\n\n#[inline]\npub fn requires_synchronized_create() -> bool {\n false\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>adding more cmds to docopt parsing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update main.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>testing: add missing mock file<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Match prefix free decoding result<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>on hover effect for tile<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>window lying about width\/height is a piston problem<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ Checks that we correctly codegen extern \"C\" functions returning structs.\n\/\/ See issue #52638.\n\n\/\/ compile-flags: -O --target=sparc64-unknown-linux-gnu --crate-type=rlib\n#![feature(no_core, lang_items)]\n#![no_core]\n\n#[lang=\"sized\"]\ntrait Sized { }\n#[lang=\"freeze\"]\ntrait Freeze { }\n#[lang=\"copy\"]\ntrait Copy { }\n\n#[repr(C)]\npub struct Bool {\n b: bool,\n}\n\n\/\/ CHECK: define i64 @structbool()\n\/\/ CHECK-NEXT: start:\n\/\/ CHECK-NEXT: ret i64 72057594037927936\n#[no_mangle]\npub extern \"C\" fn structbool() -> Bool {\n Bool { b: true }\n}\n<commit_msg>Only run the sparc-abi test on sparc<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ Checks that we correctly codegen extern \"C\" functions returning structs.\n\/\/ See issue #52638.\n\n\/\/ only-sparc64\n\/\/ compile-flags: -O --target=sparc64-unknown-linux-gnu --crate-type=rlib\n#![feature(no_core, lang_items)]\n#![no_core]\n\n#[lang=\"sized\"]\ntrait Sized { }\n#[lang=\"freeze\"]\ntrait Freeze { }\n#[lang=\"copy\"]\ntrait Copy { }\n\n#[repr(C)]\npub struct Bool {\n b: bool,\n}\n\n\/\/ CHECK: define i64 @structbool()\n\/\/ CHECK-NEXT: start:\n\/\/ CHECK-NEXT: ret i64 72057594037927936\n#[no_mangle]\npub extern \"C\" fn structbool() -> Bool {\n Bool { b: true }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct ret;\nstruct obj;\n\nimpl obj {\n fn func() -> ret {\n ret\n }\n}\n\nfn func() -> ret {\n ret\n}\n\nfn main() {\n obj::func.x();\n \/\/~^ ERROR no method named `x` found for type `fn() -> ret {obj::func}` in the current scope\n \/\/~^^ NOTE obj::func is a function, perhaps you wish to call it\n func.x();\n \/\/~^ ERROR no method named `x` found for type `fn() -> ret {func}` in the current scope\n \/\/~^^ NOTE func is a function, perhaps you wish to call it\n}\n<commit_msg>Add Help and Suggestion to issue-29124 tests<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct ret;\nstruct obj;\n\nimpl obj {\n fn func() -> ret {\n ret\n }\n}\n\nfn func() -> ret {\n ret\n}\n\nfn main() {\n obj::func.x();\n \/\/~^ ERROR no method named `x` found for type `fn() -> ret {obj::func}` in the current scope\n \/\/~^^ NOTE obj::func is a function, perhaps you wish to call it\n \/\/~^^^ HELP try calling the base function:\n \/\/~| SUGGESTION obj::func().x();\n func.x();\n \/\/~^ ERROR no method named `x` found for type `fn() -> ret {func}` in the current scope\n \/\/~^^ NOTE func is a function, perhaps you wish to call it\n \/\/~^^^ HELP try calling the base function:\n \/\/~| SUGGESTION func().x();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #31212<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This checks that a path that cannot be resolved because of an indeterminate import\n\/\/ does not trigger an ICE.\n\nmod foo {\n pub use self::*; \/\/~ ERROR unresolved\n}\n\nfn main() {\n foo::f(); \/\/~ ERROR unresolved\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>use neg_assign<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Use VcardBuilder instead of Vcard itself<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate libc;\n\nuse std::io;\nuse std::io::fs;\nuse std::io::File;\nuse std::path::Path;\nuse std::collections::HashMap;\n\nuse document::Document;\n\npub struct Runner;\n\nimpl Runner {\n pub fn run(path_string: &str) {\n let base_path = Path::new(path_string);\n let documents_path = base_path.join(\"_posts\");\n let layout_path = base_path.join(\"_layouts\");\n let index_path = base_path.join(\"index.tpl\");\n let build_path = base_path.join(\"build\");\n\n println!(\"Generating site in {}\\n\", path_string);\n\n let index = Runner::parse_document(&index_path);\n let posts = Runner::parse_documents(&documents_path);\n let post_path = Runner::create_dirs(&build_path);\n\n Runner::create_files(&build_path, &post_path, &layout_path, index, posts);\n }\n\n fn parse_documents(path: &Path) -> Vec<Document> {\n match fs::readdir(path) {\n Ok(paths) => paths.iter().filter_map( |path|\n if path.extension_str().unwrap() == \"tpl\" {\n Some(Runner::parse_document(path))\n }else{\n None\n }\n ).collect(),\n \/\/ TODO panic!\n Err(e) => fail!(\"Path {} doesn't exist\\n\", path.display())\n }\n }\n\n fn parse_document(path: &Path) -> Document {\n let attributes = Runner::extract_attributes(path);\n let content = Runner::extract_content(path);\n\n Document::new(\n attributes,\n content,\n path.filestem_str().unwrap().to_string() + \".html\",\n )\n }\n\n fn parse_file(path: &Path) -> String {\n match File::open(path) {\n \/\/ TODO handle IOResult\n Ok(mut x) => x.read_to_string().unwrap(),\n \/\/ TODO panic!\n Err(e) => fail!(\"File {} doesn't exist\\n\", path.display())\n }\n }\n\n fn create_dirs(build_path: &Path) -> Path {\n let postpath = build_path.join(\"posts\");\n\n fs::mkdir(build_path, io::USER_RWX);\n fs::mkdir(&postpath, io::USER_RWX);\n\n \/\/ TODO: copy non cobalt relevant folders into \/build folder (assets, stylesheets, etc...)\n\n println!(\"Directory {} created\\n\", build_path.display());\n\n return postpath;\n }\n\n fn create_files(index_path: &Path, document_path: &Path, layout_path: &Path, index: Document, documents: Vec<Document>) {\n index.create_file(index_path, layout_path);\n\n for document in documents.iter() {\n document.create_file(document_path, layout_path);\n }\n }\n\n\n fn extract_attributes(path: &Path) -> HashMap<String, String> {\n let mut attributes = HashMap::new();\n attributes.insert(\"name\".to_string(), path.filestem_str().unwrap().to_string());\n\n let content = Runner::parse_file(path);\n\n if content.as_slice().contains(\"---\") {\n let mut content_splits = content.as_slice().split_str(\"---\");\n\n let attribute_string = content_splits.nth(0u).unwrap();\n\n for attribute_line in attribute_string.split_str(\"\\n\") {\n if !attribute_line.contains_char(':') {\n continue;\n }\n\n let mut attribute_split = attribute_line.split(':');\n\n \/\/ TODO: Refactor, find a better way for doing this\n \/\/ .nth() method is consuming the iterator and therefore the 0th index on the second method\n \/\/ is in real index 1\n let key = attribute_split.nth(0u).unwrap().trim_chars(' ').to_string().clone();\n let value = attribute_split.nth(0u).unwrap().trim_chars(' ').to_string().clone();\n\n attributes.insert(key, value);\n }\n }\n\n return attributes;\n }\n\n fn extract_content(path: &Path) -> String {\n let content = Runner::parse_file(path);\n\n if content.as_slice().contains(\"---\") {\n let mut content_splits = content.as_slice().split_str(\"---\");\n\n return content_splits.nth(1u).unwrap().to_string();\n }\n\n return content;\n }\n}\n<commit_msg>use _ if variable isn't used in matched block<commit_after>extern crate libc;\n\nuse std::io;\nuse std::io::fs;\nuse std::io::File;\nuse std::path::Path;\nuse std::collections::HashMap;\n\nuse document::Document;\n\npub struct Runner;\n\nimpl Runner {\n pub fn run(path_string: &str) {\n let base_path = Path::new(path_string);\n let documents_path = base_path.join(\"_posts\");\n let layout_path = base_path.join(\"_layouts\");\n let index_path = base_path.join(\"index.tpl\");\n let build_path = base_path.join(\"build\");\n\n println!(\"Generating site in {}\\n\", path_string);\n\n let index = Runner::parse_document(&index_path);\n let posts = Runner::parse_documents(&documents_path);\n let post_path = Runner::create_dirs(&build_path);\n\n Runner::create_files(&build_path, &post_path, &layout_path, index, posts);\n }\n\n fn parse_documents(path: &Path) -> Vec<Document> {\n match fs::readdir(path) {\n Ok(paths) => paths.iter().filter_map( |path|\n if path.extension_str().unwrap() == \"tpl\" {\n Some(Runner::parse_document(path))\n }else{\n None\n }\n ).collect(),\n \/\/ TODO panic!\n Err(_) => fail!(\"Path {} doesn't exist\\n\", path.display())\n }\n }\n\n fn parse_document(path: &Path) -> Document {\n let attributes = Runner::extract_attributes(path);\n let content = Runner::extract_content(path);\n\n Document::new(\n attributes,\n content,\n path.filestem_str().unwrap().to_string() + \".html\",\n )\n }\n\n fn parse_file(path: &Path) -> String {\n match File::open(path) {\n \/\/ TODO handle IOResult\n Ok(mut x) => x.read_to_string().unwrap(),\n \/\/ TODO panic!\n Err(_) => fail!(\"File {} doesn't exist\\n\", path.display())\n }\n }\n\n fn create_dirs(build_path: &Path) -> Path {\n let postpath = build_path.join(\"posts\");\n\n fs::mkdir(build_path, io::USER_RWX);\n fs::mkdir(&postpath, io::USER_RWX);\n\n \/\/ TODO: copy non cobalt relevant folders into \/build folder (assets, stylesheets, etc...)\n\n println!(\"Directory {} created\\n\", build_path.display());\n\n return postpath;\n }\n\n fn create_files(index_path: &Path, document_path: &Path, layout_path: &Path, index: Document, documents: Vec<Document>) {\n index.create_file(index_path, layout_path);\n\n for document in documents.iter() {\n document.create_file(document_path, layout_path);\n }\n }\n\n\n fn extract_attributes(path: &Path) -> HashMap<String, String> {\n let mut attributes = HashMap::new();\n attributes.insert(\"name\".to_string(), path.filestem_str().unwrap().to_string());\n\n let content = Runner::parse_file(path);\n\n if content.as_slice().contains(\"---\") {\n let mut content_splits = content.as_slice().split_str(\"---\");\n\n let attribute_string = content_splits.nth(0u).unwrap();\n\n for attribute_line in attribute_string.split_str(\"\\n\") {\n if !attribute_line.contains_char(':') {\n continue;\n }\n\n let mut attribute_split = attribute_line.split(':');\n\n \/\/ TODO: Refactor, find a better way for doing this\n \/\/ .nth() method is consuming the iterator and therefore the 0th index on the second method\n \/\/ is in real index 1\n let key = attribute_split.nth(0u).unwrap().trim_chars(' ').to_string().clone();\n let value = attribute_split.nth(0u).unwrap().trim_chars(' ').to_string().clone();\n\n attributes.insert(key, value);\n }\n }\n\n return attributes;\n }\n\n fn extract_content(path: &Path) -> String {\n let content = Runner::parse_file(path);\n\n if content.as_slice().contains(\"---\") {\n let mut content_splits = content.as_slice().split_str(\"---\");\n\n return content_splits.nth(1u).unwrap().to_string();\n }\n\n return content;\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate core;\n\nuse std::io;\nuse std::fs;\nuse std::fs::PathExt;\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::collections::HashMap;\nuse self::core::str::StrExt;\nuse std::ffi::OsStr;\nuse liquid::Value;\n\nuse document::Document;\nuse util;\n\npub fn build(source: &Path, dest: &Path, layout_str: &str, posts_str: &str) -> io::Result<()>{\n \/\/ TODO make configurable\n let template_extensions = [OsStr::new(\"tpl\") \/*, OsStr::new(\"md\")*\/];\n\n let layouts_path = source.join(layout_str);\n let posts_path = source.join(posts_str);\n\n let mut layouts : HashMap<String, String> = HashMap::new();\n\n \/\/ go through the layout directory and add\n \/\/ filename -> text content to the layout map\n match fs::walk_dir(&layouts_path) {\n Ok(files) => for layout in files {\n let layout = try!(layout).path();\n if layout.is_file() {\n let mut text = String::new();\n try!(try!(File::open(&layout)).read_to_string(&mut text));\n layouts.insert(layout.as_path().file_name().unwrap().to_str().unwrap().to_string(), text);\n }\n },\n Err(_) => println!(\"Warning: No layout path found ({})\\n\", source.display())\n };\n\n let mut documents = vec![];\n let mut post_data = vec![];\n\n \/\/ walk source directory and find files that are written in\n \/\/ a template file extension\n for p in try!(fs::walk_dir(source)) {\n let p = p.unwrap().path();\n let path = p.as_path();\n \/\/ check for file extensions\n if template_extensions.contains(&path.extension().unwrap_or(OsStr::new(\"\")))\n \/\/ check that file is not in the layouts folder\n && path.parent() != Some(layouts_path.as_path()) {\n let doc = parse_document(&path, source);\n if path.parent() == Some(posts_path.as_path()){\n post_data.push(Value::Object(doc.get_attributes()));\n }\n documents.push(doc);\n }\n }\n\n for doc in documents.iter() {\n try!(doc.create_file(dest, &layouts, &post_data));\n }\n\n \/\/ copy everything\n if source != dest {\n try!(util::copy_recursive_filter(source, dest, &|p| -> bool {\n !p.file_name().unwrap().to_str().unwrap_or(\"\").starts_with(\".\")\n && !template_extensions.contains(&p.extension().unwrap_or(OsStr::new(\"\")))\n && p != dest\n && p != layouts_path.as_path()\n }));\n }\n\n Ok(())\n}\n\nfn parse_document(path: &Path, source: &Path) -> Document {\n let attributes = extract_attributes(path);\n let content = extract_content(path).unwrap();\n let new_path = path.relative_from(source).unwrap();\n \/\/ let markdown = path.extension().unwrap_or(OsStr::new(\"\")) == OsStr::new(\"md\");\n\n Document::new(\n new_path.to_str().unwrap().to_string(),\n attributes,\n content,\n \/\/ markdown\n )\n}\n\nfn parse_file(path: &Path) -> io::Result<String> {\n let mut file = try!(File::open(path));\n let mut text = String::new();\n try!(file.read_to_string(&mut text));\n Ok(text)\n}\n\nfn extract_attributes(path: &Path) -> HashMap<String, String> {\n let mut attributes = HashMap::new();\n attributes.insert(\"name\".to_string(), path.file_stem().unwrap().to_str().unwrap().to_string());\n\n let content = parse_file(path).unwrap();\n\n if content.contains(\"---\") {\n let mut content_splits = content.split(\"---\");\n\n let attribute_string = content_splits.nth(0).unwrap();\n\n for attribute_line in attribute_string.split(\"\\n\") {\n if !attribute_line.contains_char(':') {\n continue;\n }\n\n let mut attribute_split = attribute_line.split(':');\n\n \/\/ TODO: Refactor, find a better way for doing this\n \/\/ .nth() method is consuming the iterator and therefore the 0th index on the second method\n \/\/ is in real index 1\n let key = attribute_split.nth(0).unwrap().trim_matches(' ').to_string().clone();\n let value = attribute_split.nth(0).unwrap().trim_matches(' ').to_string().clone();\n\n attributes.insert(key, value);\n }\n }\n\n return attributes;\n}\n\nfn extract_content(path: &Path) -> io::Result<String> {\n let content = try!(parse_file(path));\n\n if content.contains(\"---\") {\n let mut content_splits = content.split(\"---\");\n\n return Ok(content_splits.nth(1).unwrap().to_string());\n }\n\n return Ok(content);\n}\n<commit_msg>readability<commit_after>extern crate core;\n\nuse std::io;\nuse std::fs;\nuse std::fs::PathExt;\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\nuse std::collections::HashMap;\nuse self::core::str::StrExt;\nuse std::ffi::OsStr;\nuse liquid::Value;\n\nuse document::Document;\nuse util;\n\npub fn build(source: &Path, dest: &Path, layout_str: &str, posts_str: &str) -> io::Result<()>{\n \/\/ TODO make configurable\n let template_extensions = [OsStr::new(\"tpl\") \/*, OsStr::new(\"md\")*\/];\n\n let layouts_path = source.join(layout_str);\n let posts_path = source.join(posts_str);\n\n let mut layouts : HashMap<String, String> = HashMap::new();\n\n \/\/ go through the layout directory and add\n \/\/ filename -> text content to the layout map\n match fs::walk_dir(&layouts_path) {\n Ok(files) => for layout in files {\n let layout = try!(layout).path();\n if layout.is_file() {\n let mut text = String::new();\n try!(try!(File::open(&layout)).read_to_string(&mut text));\n layouts.insert(layout.as_path().file_name().unwrap().to_str().unwrap().to_string(), text);\n }\n },\n Err(_) => println!(\"Warning: No layout path found ({})\\n\", source.display())\n };\n\n let mut documents = vec![];\n let mut post_data = vec![];\n\n \/\/ walk source directory and find files that are written in\n \/\/ a template file extension\n for p in try!(fs::walk_dir(source)) {\n let p = p.unwrap().path();\n let path = p.as_path();\n \/\/ check for file extensions\n if template_extensions.contains(&path.extension().unwrap_or(OsStr::new(\"\")))\n \/\/ check that file is not in the layouts folder\n && path.parent() != Some(layouts_path.as_path()) {\n let doc = parse_document(&path, source);\n if path.parent() == Some(posts_path.as_path()){\n post_data.push(Value::Object(doc.get_attributes()));\n }\n documents.push(doc);\n }\n }\n\n for doc in documents.iter() {\n try!(doc.create_file(dest, &layouts, &post_data));\n }\n\n \/\/ copy everything\n if source != dest {\n try!(util::copy_recursive_filter(source, dest, &|p| -> bool {\n !p.file_name().unwrap().to_str().unwrap_or(\"\").starts_with(\".\")\n && !template_extensions.contains(&p.extension().unwrap_or(OsStr::new(\"\")))\n && p != dest\n && p != layouts_path.as_path()\n }));\n }\n\n Ok(())\n}\n\nfn parse_document(path: &Path, source: &Path) -> Document {\n let attributes = extract_attributes(path);\n let content = extract_content(path).unwrap();\n let new_path = path.relative_from(source).unwrap();\n \/\/ let markdown = path.extension().unwrap_or(OsStr::new(\"\")) == OsStr::new(\"md\");\n\n Document::new(\n new_path.to_str().unwrap().to_string(),\n attributes,\n content,\n \/\/ markdown\n )\n}\n\nfn parse_file(path: &Path) -> io::Result<String> {\n let mut file = try!(File::open(path));\n let mut text = String::new();\n try!(file.read_to_string(&mut text));\n Ok(text)\n}\n\nfn extract_attributes(path: &Path) -> HashMap<String, String> {\n let mut attributes = HashMap::new();\n attributes.insert(\"name\".to_string(), path.file_stem().unwrap().to_str().unwrap().to_string());\n\n let content = parse_file(path).unwrap();\n\n if content.contains(\"---\") {\n let mut content_splits = content.split(\"---\");\n\n let attribute_string = content_splits.nth(0).unwrap();\n\n for attribute_line in attribute_string.split(\"\\n\") {\n if !attribute_line.contains_char(':') {\n continue;\n }\n\n let attribute_split: Vec<&str> = attribute_line.split(':').collect();\n\n let key = attribute_split[0].trim_matches(' ').to_string().clone();\n let value = attribute_split[1].trim_matches(' ').to_string().clone();\n\n attributes.insert(key, value);\n }\n }\n\n return attributes;\n}\n\nfn extract_content(path: &Path) -> io::Result<String> {\n let content = try!(parse_file(path));\n\n if content.contains(\"---\") {\n let mut content_splits = content.split(\"---\");\n\n return Ok(content_splits.nth(1).unwrap().to_string());\n }\n\n return Ok(content);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>stat rust version<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added random weights and biases. Need to control with function. Added pretty print.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>extend view in direction of target<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added file that will have the sound primitives.<commit_after>\/*\n * Sound\n * Sound is handled by libao.\n * When there's a instruction regarding the usage of sound, the interpreter\n * will call a function in this file.\n *\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added Audio object<commit_after>\/\/\/ Represents a audio file\n#[derive(Clone, Serialize, Deserialize, Debug)]\npub struct Audio {\n pub file_id: String,\n pub duration: i64,\n pub performer: Option<String>,\n pub title: Option<String>,\n pub mime_type: Option<String>,\n pub file_size: Option<i64>,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>addding basic graph implementation in rust<commit_after>enum State {\n WHITE,\n GRAY,\n BLACK,\n}\n\npub trait Traversal {\n fn bfs(&self) {\n\n }\n fn dfs(&self) {\n \n }\n}\n\nimpl Traversal for Graph {\n fn bfs(&self) {\n\n }\n fn dfs(&self) {\n\n }\n}\n\nuse State::*;\n\npub struct Vertex {\n v_id:u32,\n data:String,\n state:State,\n in_degree:u64,\n out_degree:u64,\n}\n\nimpl Vertex {\n fn new(v_id:u32,data:&str) -> Self {\n let data = data.to_string();\n Vertex {v_id:v_id,data:data,state:WHITE,in_degree:0,out_degree:0}\n }\n}\n \npub struct Graph {\n v_size:u64,\n v_vec:Vec<Vertex>,\n e_vec:Vec<(Vertex,Vertex)>,\n}\n\nimpl Graph {\n pub fn new(nodes:u64) -> Self {\n Graph {v_size:0,v_vec:vec![],e_vec:vec![]}\n }\n\n pub fn add_vertex(&mut self,v:Vertex) {\n self.v_vec.push(v);\n }\n}\n#[test]\nfn graph_init() {\n let mut graph = Graph::new(12);\n let v1 = Vertex::new(0,\"Bob\");\n let v2 = Vertex::new(1,\"Alice\");\n let v3 = Vertex::new(2,\"Sam\");\n let v4 = Vertex::new(3,\"Derek\");\n graph.add_vertex(v1);\n graph.add_vertex(v2);\n graph.add_vertex(v3);\n graph.add_vertex(v4);\n}\n<|endoftext|>"} {"text":"<commit_before>use std::fs::{File, OpenOptions};\nuse std::io;\nuse std::io::{Read, Seek, SeekFrom, Write};\nuse std::path::{Display, Path, PathBuf};\n\nuse crate::util::errors::CargoResult;\nuse crate::util::Config;\nuse anyhow::Context as _;\nuse cargo_util::paths;\nuse sys::*;\nuse termcolor::Color::Cyan;\n\n#[derive(Debug)]\npub struct FileLock {\n f: Option<File>,\n path: PathBuf,\n state: State,\n}\n\n#[derive(PartialEq, Debug)]\nenum State {\n Unlocked,\n Shared,\n Exclusive,\n}\n\nimpl FileLock {\n \/\/\/ Returns the underlying file handle of this lock.\n pub fn file(&self) -> &File {\n self.f.as_ref().unwrap()\n }\n\n \/\/\/ Returns the underlying path that this lock points to.\n \/\/\/\n \/\/\/ Note that special care must be taken to ensure that the path is not\n \/\/\/ referenced outside the lifetime of this lock.\n pub fn path(&self) -> &Path {\n assert_ne!(self.state, State::Unlocked);\n &self.path\n }\n\n \/\/\/ Returns the parent path containing this file\n pub fn parent(&self) -> &Path {\n assert_ne!(self.state, State::Unlocked);\n self.path.parent().unwrap()\n }\n\n \/\/\/ Removes all sibling files to this locked file.\n \/\/\/\n \/\/\/ This can be useful if a directory is locked with a sentinel file but it\n \/\/\/ needs to be cleared out as it may be corrupt.\n pub fn remove_siblings(&self) -> CargoResult<()> {\n let path = self.path();\n for entry in path.parent().unwrap().read_dir()? {\n let entry = entry?;\n if Some(&entry.file_name()[..]) == path.file_name() {\n continue;\n }\n let kind = entry.file_type()?;\n if kind.is_dir() {\n paths::remove_dir_all(entry.path())?;\n } else {\n paths::remove_file(entry.path())?;\n }\n }\n Ok(())\n }\n}\n\nimpl Read for FileLock {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.file().read(buf)\n }\n}\n\nimpl Seek for FileLock {\n fn seek(&mut self, to: SeekFrom) -> io::Result<u64> {\n self.file().seek(to)\n }\n}\n\nimpl Write for FileLock {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.file().write(buf)\n }\n\n fn flush(&mut self) -> io::Result<()> {\n self.file().flush()\n }\n}\n\nimpl Drop for FileLock {\n fn drop(&mut self) {\n if self.state != State::Unlocked {\n if let Some(f) = self.f.take() {\n let _ = unlock(&f);\n }\n }\n }\n}\n\n\/\/\/ A \"filesystem\" is intended to be a globally shared, hence locked, resource\n\/\/\/ in Cargo.\n\/\/\/\n\/\/\/ The `Path` of a filesystem cannot be learned unless it's done in a locked\n\/\/\/ fashion, and otherwise functions on this structure are prepared to handle\n\/\/\/ concurrent invocations across multiple instances of Cargo.\n#[derive(Clone, Debug)]\npub struct Filesystem {\n root: PathBuf,\n}\n\nimpl Filesystem {\n \/\/\/ Creates a new filesystem to be rooted at the given path.\n pub fn new(path: PathBuf) -> Filesystem {\n Filesystem { root: path }\n }\n\n \/\/\/ Like `Path::join`, creates a new filesystem rooted at this filesystem\n \/\/\/ joined with the given path.\n pub fn join<T: AsRef<Path>>(&self, other: T) -> Filesystem {\n Filesystem::new(self.root.join(other))\n }\n\n \/\/\/ Like `Path::push`, pushes a new path component onto this filesystem.\n pub fn push<T: AsRef<Path>>(&mut self, other: T) {\n self.root.push(other);\n }\n\n \/\/\/ Consumes this filesystem and returns the underlying `PathBuf`.\n \/\/\/\n \/\/\/ Note that this is a relatively dangerous operation and should be used\n \/\/\/ with great caution!.\n pub fn into_path_unlocked(self) -> PathBuf {\n self.root\n }\n\n \/\/\/ Returns the underlying `Path`.\n \/\/\/\n \/\/\/ Note that this is a relatively dangerous operation and should be used\n \/\/\/ with great caution!.\n pub fn as_path_unlocked(&self) -> &Path {\n &self.root\n }\n\n \/\/\/ Creates the directory pointed to by this filesystem.\n \/\/\/\n \/\/\/ Handles errors where other Cargo processes are also attempting to\n \/\/\/ concurrently create this directory.\n pub fn create_dir(&self) -> CargoResult<()> {\n paths::create_dir_all(&self.root)\n }\n\n \/\/\/ Returns an adaptor that can be used to print the path of this\n \/\/\/ filesystem.\n pub fn display(&self) -> Display<'_> {\n self.root.display()\n }\n\n \/\/\/ Opens exclusive access to a file, returning the locked version of a\n \/\/\/ file.\n \/\/\/\n \/\/\/ This function will create a file at `path` if it doesn't already exist\n \/\/\/ (including intermediate directories), and then it will acquire an\n \/\/\/ exclusive lock on `path`. If the process must block waiting for the\n \/\/\/ lock, the `msg` is printed to `config`.\n \/\/\/\n \/\/\/ The returned file can be accessed to look at the path and also has\n \/\/\/ read\/write access to the underlying file.\n pub fn open_rw<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>\n where\n P: AsRef<Path>,\n {\n self.open(\n path.as_ref(),\n OpenOptions::new().read(true).write(true).create(true),\n State::Exclusive,\n config,\n msg,\n )\n }\n\n \/\/\/ Opens shared access to a file, returning the locked version of a file.\n \/\/\/\n \/\/\/ This function will fail if `path` doesn't already exist, but if it does\n \/\/\/ then it will acquire a shared lock on `path`. If the process must block\n \/\/\/ waiting for the lock, the `msg` is printed to `config`.\n \/\/\/\n \/\/\/ The returned file can be accessed to look at the path and also has read\n \/\/\/ access to the underlying file. Any writes to the file will return an\n \/\/\/ error.\n pub fn open_ro<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>\n where\n P: AsRef<Path>,\n {\n self.open(\n path.as_ref(),\n OpenOptions::new().read(true),\n State::Shared,\n config,\n msg,\n )\n }\n\n fn open(\n &self,\n path: &Path,\n opts: &OpenOptions,\n state: State,\n config: &Config,\n msg: &str,\n ) -> CargoResult<FileLock> {\n let path = self.root.join(path);\n\n \/\/ If we want an exclusive lock then if we fail because of NotFound it's\n \/\/ likely because an intermediate directory didn't exist, so try to\n \/\/ create the directory and then continue.\n let f = opts\n .open(&path)\n .or_else(|e| {\n if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {\n paths::create_dir_all(path.parent().unwrap())?;\n Ok(opts.open(&path)?)\n } else {\n Err(anyhow::Error::from(e))\n }\n })\n .with_context(|| format!(\"failed to open: {}\", path.display()))?;\n match state {\n State::Exclusive => {\n acquire(config, msg, &path, &|| try_lock_exclusive(&f), &|| {\n lock_exclusive(&f)\n })?;\n }\n State::Shared => {\n acquire(config, msg, &path, &|| try_lock_shared(&f), &|| {\n lock_shared(&f)\n })?;\n }\n State::Unlocked => {}\n }\n Ok(FileLock {\n f: Some(f),\n path,\n state,\n })\n }\n}\n\nimpl PartialEq<Path> for Filesystem {\n fn eq(&self, other: &Path) -> bool {\n self.root == other\n }\n}\n\nimpl PartialEq<Filesystem> for Path {\n fn eq(&self, other: &Filesystem) -> bool {\n self == other.root\n }\n}\n\n\/\/\/ Acquires a lock on a file in a \"nice\" manner.\n\/\/\/\n\/\/\/ Almost all long-running blocking actions in Cargo have a status message\n\/\/\/ associated with them as we're not sure how long they'll take. Whenever a\n\/\/\/ conflicted file lock happens, this is the case (we're not sure when the lock\n\/\/\/ will be released).\n\/\/\/\n\/\/\/ This function will acquire the lock on a `path`, printing out a nice message\n\/\/\/ to the console if we have to wait for it. It will first attempt to use `try`\n\/\/\/ to acquire a lock on the crate, and in the case of contention it will emit a\n\/\/\/ status message based on `msg` to `config`'s shell, and then use `block` to\n\/\/\/ block waiting to acquire a lock.\n\/\/\/\n\/\/\/ Returns an error if the lock could not be acquired or if any error other\n\/\/\/ than a contention error happens.\nfn acquire(\n config: &Config,\n msg: &str,\n path: &Path,\n lock_try: &dyn Fn() -> io::Result<()>,\n lock_block: &dyn Fn() -> io::Result<()>,\n) -> CargoResult<()> {\n \/\/ File locking on Unix is currently implemented via `flock`, which is known\n \/\/ to be broken on NFS. We could in theory just ignore errors that happen on\n \/\/ NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking\n \/\/ forever**, even if the \"non-blocking\" flag is passed!\n \/\/\n \/\/ As a result, we just skip all file locks entirely on NFS mounts. That\n \/\/ should avoid calling any `flock` functions at all, and it wouldn't work\n \/\/ there anyway.\n \/\/\n \/\/ [1]: https:\/\/github.com\/rust-lang\/cargo\/issues\/2615\n if is_on_nfs_mount(path) {\n return Ok(());\n }\n\n match lock_try() {\n Ok(()) => return Ok(()),\n\n \/\/ In addition to ignoring NFS which is commonly not working we also\n \/\/ just ignore locking on filesystems that look like they don't\n \/\/ implement file locking.\n Err(e) if error_unsupported(&e) => return Ok(()),\n\n Err(e) => {\n if !error_contended(&e) {\n let e = anyhow::Error::from(e);\n let cx = format!(\"failed to lock file: {}\", path.display());\n return Err(e.context(cx));\n }\n }\n }\n let msg = format!(\"waiting for file lock on {}\", msg);\n config.shell().status_with_color(\"Blocking\", &msg, Cyan)?;\n\n lock_block().with_context(|| format!(\"failed to lock file: {}\", path.display()))?;\n return Ok(());\n\n #[cfg(all(target_os = \"linux\", not(target_env = \"musl\")))]\n fn is_on_nfs_mount(path: &Path) -> bool {\n use std::ffi::CString;\n use std::mem;\n use std::os::unix::prelude::*;\n\n let path = match CString::new(path.as_os_str().as_bytes()) {\n Ok(path) => path,\n Err(_) => return false,\n };\n\n unsafe {\n let mut buf: libc::statfs = mem::zeroed();\n let r = libc::statfs(path.as_ptr(), &mut buf);\n\n r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32\n }\n }\n\n #[cfg(any(not(target_os = \"linux\"), target_env = \"musl\"))]\n fn is_on_nfs_mount(_path: &Path) -> bool {\n false\n }\n}\n\n#[cfg(unix)]\nmod sys {\n use std::fs::File;\n use std::io::{Error, Result};\n use std::os::unix::io::AsRawFd;\n\n pub(super) fn lock_shared(file: &File) -> Result<()> {\n flock(file, libc::LOCK_SH)\n }\n\n pub(super) fn lock_exclusive(file: &File) -> Result<()> {\n flock(file, libc::LOCK_EX)\n }\n\n pub(super) fn try_lock_shared(file: &File) -> Result<()> {\n flock(file, libc::LOCK_SH | libc::LOCK_NB)\n }\n\n pub(super) fn try_lock_exclusive(file: &File) -> Result<()> {\n flock(file, libc::LOCK_EX | libc::LOCK_NB)\n }\n\n pub(super) fn unlock(file: &File) -> Result<()> {\n flock(file, libc::LOCK_UN)\n }\n\n pub(super) fn error_contended(err: &Error) -> bool {\n err.raw_os_error().map_or(false, |x| x == libc::EWOULDBLOCK)\n }\n\n pub(super) fn error_unsupported(err: &Error) -> bool {\n match err.raw_os_error() {\n \/\/ Unfortunately, depending on the target, these may or may not be the same.\n \/\/ For targets in which they are the same, the duplicate pattern causes a warning.\n #[allow(unreachable_patterns)]\n Some(libc::ENOTSUP | libc::EOPNOTSUPP) => true,\n #[cfg(any(target_os = \"linux\", target_os = \"android\"))]\n Some(libc::ENOSYS) => true,\n _ => false,\n }\n }\n\n #[cfg(not(target_os = \"solaris\"))]\n fn flock(file: &File, flag: libc::c_int) -> Result<()> {\n let ret = unsafe { libc::flock(file.as_raw_fd(), flag) };\n if ret < 0 {\n Err(Error::last_os_error())\n } else {\n Ok(())\n }\n }\n\n #[cfg(target_os = \"solaris\")]\n fn flock(file: &File, flag: libc::c_int) -> Result<()> {\n \/\/ Solaris lacks flock(), so simply succeed with a no-op\n Ok(())\n }\n}\n\n#[cfg(windows)]\nmod sys {\n use std::fs::File;\n use std::io::{Error, Result};\n use std::mem;\n use std::os::windows::io::AsRawHandle;\n\n use winapi::shared::minwindef::DWORD;\n use winapi::shared::winerror::{ERROR_INVALID_FUNCTION, ERROR_LOCK_VIOLATION};\n use winapi::um::fileapi::{LockFileEx, UnlockFile};\n use winapi::um::minwinbase::{LOCKFILE_EXCLUSIVE_LOCK, LOCKFILE_FAIL_IMMEDIATELY};\n\n pub(super) fn lock_shared(file: &File) -> Result<()> {\n lock_file(file, 0)\n }\n\n pub(super) fn lock_exclusive(file: &File) -> Result<()> {\n lock_file(file, LOCKFILE_EXCLUSIVE_LOCK)\n }\n\n pub(super) fn try_lock_shared(file: &File) -> Result<()> {\n lock_file(file, LOCKFILE_FAIL_IMMEDIATELY)\n }\n\n pub(super) fn try_lock_exclusive(file: &File) -> Result<()> {\n lock_file(file, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY)\n }\n\n pub(super) fn error_contended(err: &Error) -> bool {\n err.raw_os_error()\n .map_or(false, |x| x == ERROR_LOCK_VIOLATION as i32)\n }\n\n pub(super) fn error_unsupported(err: &Error) -> bool {\n err.raw_os_error()\n .map_or(false, |x| x == ERROR_INVALID_FUNCTION as i32)\n }\n\n pub(super) fn unlock(file: &File) -> Result<()> {\n unsafe {\n let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0);\n if ret == 0 {\n Err(Error::last_os_error())\n } else {\n Ok(())\n }\n }\n }\n\n fn lock_file(file: &File, flags: DWORD) -> Result<()> {\n unsafe {\n let mut overlapped = mem::zeroed();\n let ret = LockFileEx(file.as_raw_handle(), flags, 0, !0, !0, &mut overlapped);\n if ret == 0 {\n Err(Error::last_os_error())\n } else {\n Ok(())\n }\n }\n }\n}\n<commit_msg>No longer gate unsupported file locking behind Linux as OS<commit_after>use std::fs::{File, OpenOptions};\nuse std::io;\nuse std::io::{Read, Seek, SeekFrom, Write};\nuse std::path::{Display, Path, PathBuf};\n\nuse crate::util::errors::CargoResult;\nuse crate::util::Config;\nuse anyhow::Context as _;\nuse cargo_util::paths;\nuse sys::*;\nuse termcolor::Color::Cyan;\n\n#[derive(Debug)]\npub struct FileLock {\n f: Option<File>,\n path: PathBuf,\n state: State,\n}\n\n#[derive(PartialEq, Debug)]\nenum State {\n Unlocked,\n Shared,\n Exclusive,\n}\n\nimpl FileLock {\n \/\/\/ Returns the underlying file handle of this lock.\n pub fn file(&self) -> &File {\n self.f.as_ref().unwrap()\n }\n\n \/\/\/ Returns the underlying path that this lock points to.\n \/\/\/\n \/\/\/ Note that special care must be taken to ensure that the path is not\n \/\/\/ referenced outside the lifetime of this lock.\n pub fn path(&self) -> &Path {\n assert_ne!(self.state, State::Unlocked);\n &self.path\n }\n\n \/\/\/ Returns the parent path containing this file\n pub fn parent(&self) -> &Path {\n assert_ne!(self.state, State::Unlocked);\n self.path.parent().unwrap()\n }\n\n \/\/\/ Removes all sibling files to this locked file.\n \/\/\/\n \/\/\/ This can be useful if a directory is locked with a sentinel file but it\n \/\/\/ needs to be cleared out as it may be corrupt.\n pub fn remove_siblings(&self) -> CargoResult<()> {\n let path = self.path();\n for entry in path.parent().unwrap().read_dir()? {\n let entry = entry?;\n if Some(&entry.file_name()[..]) == path.file_name() {\n continue;\n }\n let kind = entry.file_type()?;\n if kind.is_dir() {\n paths::remove_dir_all(entry.path())?;\n } else {\n paths::remove_file(entry.path())?;\n }\n }\n Ok(())\n }\n}\n\nimpl Read for FileLock {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.file().read(buf)\n }\n}\n\nimpl Seek for FileLock {\n fn seek(&mut self, to: SeekFrom) -> io::Result<u64> {\n self.file().seek(to)\n }\n}\n\nimpl Write for FileLock {\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n self.file().write(buf)\n }\n\n fn flush(&mut self) -> io::Result<()> {\n self.file().flush()\n }\n}\n\nimpl Drop for FileLock {\n fn drop(&mut self) {\n if self.state != State::Unlocked {\n if let Some(f) = self.f.take() {\n let _ = unlock(&f);\n }\n }\n }\n}\n\n\/\/\/ A \"filesystem\" is intended to be a globally shared, hence locked, resource\n\/\/\/ in Cargo.\n\/\/\/\n\/\/\/ The `Path` of a filesystem cannot be learned unless it's done in a locked\n\/\/\/ fashion, and otherwise functions on this structure are prepared to handle\n\/\/\/ concurrent invocations across multiple instances of Cargo.\n#[derive(Clone, Debug)]\npub struct Filesystem {\n root: PathBuf,\n}\n\nimpl Filesystem {\n \/\/\/ Creates a new filesystem to be rooted at the given path.\n pub fn new(path: PathBuf) -> Filesystem {\n Filesystem { root: path }\n }\n\n \/\/\/ Like `Path::join`, creates a new filesystem rooted at this filesystem\n \/\/\/ joined with the given path.\n pub fn join<T: AsRef<Path>>(&self, other: T) -> Filesystem {\n Filesystem::new(self.root.join(other))\n }\n\n \/\/\/ Like `Path::push`, pushes a new path component onto this filesystem.\n pub fn push<T: AsRef<Path>>(&mut self, other: T) {\n self.root.push(other);\n }\n\n \/\/\/ Consumes this filesystem and returns the underlying `PathBuf`.\n \/\/\/\n \/\/\/ Note that this is a relatively dangerous operation and should be used\n \/\/\/ with great caution!.\n pub fn into_path_unlocked(self) -> PathBuf {\n self.root\n }\n\n \/\/\/ Returns the underlying `Path`.\n \/\/\/\n \/\/\/ Note that this is a relatively dangerous operation and should be used\n \/\/\/ with great caution!.\n pub fn as_path_unlocked(&self) -> &Path {\n &self.root\n }\n\n \/\/\/ Creates the directory pointed to by this filesystem.\n \/\/\/\n \/\/\/ Handles errors where other Cargo processes are also attempting to\n \/\/\/ concurrently create this directory.\n pub fn create_dir(&self) -> CargoResult<()> {\n paths::create_dir_all(&self.root)\n }\n\n \/\/\/ Returns an adaptor that can be used to print the path of this\n \/\/\/ filesystem.\n pub fn display(&self) -> Display<'_> {\n self.root.display()\n }\n\n \/\/\/ Opens exclusive access to a file, returning the locked version of a\n \/\/\/ file.\n \/\/\/\n \/\/\/ This function will create a file at `path` if it doesn't already exist\n \/\/\/ (including intermediate directories), and then it will acquire an\n \/\/\/ exclusive lock on `path`. If the process must block waiting for the\n \/\/\/ lock, the `msg` is printed to `config`.\n \/\/\/\n \/\/\/ The returned file can be accessed to look at the path and also has\n \/\/\/ read\/write access to the underlying file.\n pub fn open_rw<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>\n where\n P: AsRef<Path>,\n {\n self.open(\n path.as_ref(),\n OpenOptions::new().read(true).write(true).create(true),\n State::Exclusive,\n config,\n msg,\n )\n }\n\n \/\/\/ Opens shared access to a file, returning the locked version of a file.\n \/\/\/\n \/\/\/ This function will fail if `path` doesn't already exist, but if it does\n \/\/\/ then it will acquire a shared lock on `path`. If the process must block\n \/\/\/ waiting for the lock, the `msg` is printed to `config`.\n \/\/\/\n \/\/\/ The returned file can be accessed to look at the path and also has read\n \/\/\/ access to the underlying file. Any writes to the file will return an\n \/\/\/ error.\n pub fn open_ro<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>\n where\n P: AsRef<Path>,\n {\n self.open(\n path.as_ref(),\n OpenOptions::new().read(true),\n State::Shared,\n config,\n msg,\n )\n }\n\n fn open(\n &self,\n path: &Path,\n opts: &OpenOptions,\n state: State,\n config: &Config,\n msg: &str,\n ) -> CargoResult<FileLock> {\n let path = self.root.join(path);\n\n \/\/ If we want an exclusive lock then if we fail because of NotFound it's\n \/\/ likely because an intermediate directory didn't exist, so try to\n \/\/ create the directory and then continue.\n let f = opts\n .open(&path)\n .or_else(|e| {\n if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {\n paths::create_dir_all(path.parent().unwrap())?;\n Ok(opts.open(&path)?)\n } else {\n Err(anyhow::Error::from(e))\n }\n })\n .with_context(|| format!(\"failed to open: {}\", path.display()))?;\n match state {\n State::Exclusive => {\n acquire(config, msg, &path, &|| try_lock_exclusive(&f), &|| {\n lock_exclusive(&f)\n })?;\n }\n State::Shared => {\n acquire(config, msg, &path, &|| try_lock_shared(&f), &|| {\n lock_shared(&f)\n })?;\n }\n State::Unlocked => {}\n }\n Ok(FileLock {\n f: Some(f),\n path,\n state,\n })\n }\n}\n\nimpl PartialEq<Path> for Filesystem {\n fn eq(&self, other: &Path) -> bool {\n self.root == other\n }\n}\n\nimpl PartialEq<Filesystem> for Path {\n fn eq(&self, other: &Filesystem) -> bool {\n self == other.root\n }\n}\n\n\/\/\/ Acquires a lock on a file in a \"nice\" manner.\n\/\/\/\n\/\/\/ Almost all long-running blocking actions in Cargo have a status message\n\/\/\/ associated with them as we're not sure how long they'll take. Whenever a\n\/\/\/ conflicted file lock happens, this is the case (we're not sure when the lock\n\/\/\/ will be released).\n\/\/\/\n\/\/\/ This function will acquire the lock on a `path`, printing out a nice message\n\/\/\/ to the console if we have to wait for it. It will first attempt to use `try`\n\/\/\/ to acquire a lock on the crate, and in the case of contention it will emit a\n\/\/\/ status message based on `msg` to `config`'s shell, and then use `block` to\n\/\/\/ block waiting to acquire a lock.\n\/\/\/\n\/\/\/ Returns an error if the lock could not be acquired or if any error other\n\/\/\/ than a contention error happens.\nfn acquire(\n config: &Config,\n msg: &str,\n path: &Path,\n lock_try: &dyn Fn() -> io::Result<()>,\n lock_block: &dyn Fn() -> io::Result<()>,\n) -> CargoResult<()> {\n \/\/ File locking on Unix is currently implemented via `flock`, which is known\n \/\/ to be broken on NFS. We could in theory just ignore errors that happen on\n \/\/ NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking\n \/\/ forever**, even if the \"non-blocking\" flag is passed!\n \/\/\n \/\/ As a result, we just skip all file locks entirely on NFS mounts. That\n \/\/ should avoid calling any `flock` functions at all, and it wouldn't work\n \/\/ there anyway.\n \/\/\n \/\/ [1]: https:\/\/github.com\/rust-lang\/cargo\/issues\/2615\n if is_on_nfs_mount(path) {\n return Ok(());\n }\n\n match lock_try() {\n Ok(()) => return Ok(()),\n\n \/\/ In addition to ignoring NFS which is commonly not working we also\n \/\/ just ignore locking on filesystems that look like they don't\n \/\/ implement file locking.\n Err(e) if error_unsupported(&e) => return Ok(()),\n\n Err(e) => {\n if !error_contended(&e) {\n let e = anyhow::Error::from(e);\n let cx = format!(\"failed to lock file: {}\", path.display());\n return Err(e.context(cx));\n }\n }\n }\n let msg = format!(\"waiting for file lock on {}\", msg);\n config.shell().status_with_color(\"Blocking\", &msg, Cyan)?;\n\n lock_block().with_context(|| format!(\"failed to lock file: {}\", path.display()))?;\n return Ok(());\n\n #[cfg(all(target_os = \"linux\", not(target_env = \"musl\")))]\n fn is_on_nfs_mount(path: &Path) -> bool {\n use std::ffi::CString;\n use std::mem;\n use std::os::unix::prelude::*;\n\n let path = match CString::new(path.as_os_str().as_bytes()) {\n Ok(path) => path,\n Err(_) => return false,\n };\n\n unsafe {\n let mut buf: libc::statfs = mem::zeroed();\n let r = libc::statfs(path.as_ptr(), &mut buf);\n\n r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32\n }\n }\n\n #[cfg(any(not(target_os = \"linux\"), target_env = \"musl\"))]\n fn is_on_nfs_mount(_path: &Path) -> bool {\n false\n }\n}\n\n#[cfg(unix)]\nmod sys {\n use std::fs::File;\n use std::io::{Error, Result};\n use std::os::unix::io::AsRawFd;\n\n pub(super) fn lock_shared(file: &File) -> Result<()> {\n flock(file, libc::LOCK_SH)\n }\n\n pub(super) fn lock_exclusive(file: &File) -> Result<()> {\n flock(file, libc::LOCK_EX)\n }\n\n pub(super) fn try_lock_shared(file: &File) -> Result<()> {\n flock(file, libc::LOCK_SH | libc::LOCK_NB)\n }\n\n pub(super) fn try_lock_exclusive(file: &File) -> Result<()> {\n flock(file, libc::LOCK_EX | libc::LOCK_NB)\n }\n\n pub(super) fn unlock(file: &File) -> Result<()> {\n flock(file, libc::LOCK_UN)\n }\n\n pub(super) fn error_contended(err: &Error) -> bool {\n err.raw_os_error().map_or(false, |x| x == libc::EWOULDBLOCK)\n }\n\n pub(super) fn error_unsupported(err: &Error) -> bool {\n match err.raw_os_error() {\n \/\/ Unfortunately, depending on the target, these may or may not be the same.\n \/\/ For targets in which they are the same, the duplicate pattern causes a warning.\n #[allow(unreachable_patterns)]\n Some(libc::ENOTSUP | libc::EOPNOTSUPP) => true,\n Some(libc::ENOSYS) => true,\n _ => false,\n }\n }\n\n #[cfg(not(target_os = \"solaris\"))]\n fn flock(file: &File, flag: libc::c_int) -> Result<()> {\n let ret = unsafe { libc::flock(file.as_raw_fd(), flag) };\n if ret < 0 {\n Err(Error::last_os_error())\n } else {\n Ok(())\n }\n }\n\n #[cfg(target_os = \"solaris\")]\n fn flock(file: &File, flag: libc::c_int) -> Result<()> {\n \/\/ Solaris lacks flock(), so simply succeed with a no-op\n Ok(())\n }\n}\n\n#[cfg(windows)]\nmod sys {\n use std::fs::File;\n use std::io::{Error, Result};\n use std::mem;\n use std::os::windows::io::AsRawHandle;\n\n use winapi::shared::minwindef::DWORD;\n use winapi::shared::winerror::{ERROR_INVALID_FUNCTION, ERROR_LOCK_VIOLATION};\n use winapi::um::fileapi::{LockFileEx, UnlockFile};\n use winapi::um::minwinbase::{LOCKFILE_EXCLUSIVE_LOCK, LOCKFILE_FAIL_IMMEDIATELY};\n\n pub(super) fn lock_shared(file: &File) -> Result<()> {\n lock_file(file, 0)\n }\n\n pub(super) fn lock_exclusive(file: &File) -> Result<()> {\n lock_file(file, LOCKFILE_EXCLUSIVE_LOCK)\n }\n\n pub(super) fn try_lock_shared(file: &File) -> Result<()> {\n lock_file(file, LOCKFILE_FAIL_IMMEDIATELY)\n }\n\n pub(super) fn try_lock_exclusive(file: &File) -> Result<()> {\n lock_file(file, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY)\n }\n\n pub(super) fn error_contended(err: &Error) -> bool {\n err.raw_os_error()\n .map_or(false, |x| x == ERROR_LOCK_VIOLATION as i32)\n }\n\n pub(super) fn error_unsupported(err: &Error) -> bool {\n err.raw_os_error()\n .map_or(false, |x| x == ERROR_INVALID_FUNCTION as i32)\n }\n\n pub(super) fn unlock(file: &File) -> Result<()> {\n unsafe {\n let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0);\n if ret == 0 {\n Err(Error::last_os_error())\n } else {\n Ok(())\n }\n }\n }\n\n fn lock_file(file: &File, flags: DWORD) -> Result<()> {\n unsafe {\n let mut overlapped = mem::zeroed();\n let ret = LockFileEx(file.as_raw_handle(), flags, 0, !0, !0, &mut overlapped);\n if ret == 0 {\n Err(Error::last_os_error())\n } else {\n Ok(())\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Change target directory of completions to target\/<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>no ; after match expression<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>build: write pkginfo to build dir, fix builddate<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>NamedGetter completed and fixed issues<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::DOMRefCell;\nuse dom::bindings::codegen::Bindings::URLSearchParamsBinding;\nuse dom::bindings::codegen::Bindings::URLSearchParamsBinding::URLSearchParamsMethods;\nuse dom::bindings::codegen::UnionTypes::StringOrURLSearchParams;\nuse dom::bindings::codegen::UnionTypes::StringOrURLSearchParams::{eURLSearchParams, eString};\nuse dom::bindings::error::{Fallible};\nuse dom::bindings::global::GlobalRef;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::utils::{Reflector, reflect_dom_object};\n\nuse util::str::DOMString;\n\nuse encoding::all::UTF_8;\nuse encoding::types::{EncodingRef, EncoderTrap};\n\nuse std::collections::HashMap;\nuse std::collections::hash_map::Entry::{Occupied, Vacant};\nuse std::fmt::radix;\nuse std::ascii::OwnedAsciiExt;\n\n#[dom_struct]\npub struct URLSearchParams {\n reflector_: Reflector,\n data: DOMRefCell<HashMap<DOMString, Vec<DOMString>>>,\n}\n\nimpl URLSearchParams {\n fn new_inherited() -> URLSearchParams {\n URLSearchParams {\n reflector_: Reflector::new(),\n data: DOMRefCell::new(HashMap::new()),\n }\n }\n\n pub fn new(global: GlobalRef) -> Temporary<URLSearchParams> {\n reflect_dom_object(box URLSearchParams::new_inherited(), global, URLSearchParamsBinding::Wrap)\n }\n\n pub fn Constructor(global: GlobalRef, init: Option<StringOrURLSearchParams>) -> Fallible<Temporary<URLSearchParams>> {\n let usp = URLSearchParams::new(global).root();\n match init {\n Some(eString(_s)) => {\n \/\/ XXXManishearth we need to parse the input here\n \/\/ http:\/\/url.spec.whatwg.org\/#concept-urlencoded-parser\n \/\/ We can use rust-url's implementation here:\n \/\/ https:\/\/github.com\/SimonSapin\/rust-url\/blob\/master\/form_urlencoded.rs#L29\n },\n Some(eURLSearchParams(u)) => {\n let u = u.root();\n let usp = usp.r();\n let mut map = usp.data.borrow_mut();\n \/\/ FIXME(https:\/\/github.com\/rust-lang\/rust\/issues\/23338)\n let r = u.r();\n let data = r.data.borrow();\n *map = data.clone();\n },\n None => {}\n }\n Ok(Temporary::from_rooted(usp.r()))\n }\n}\n\nimpl<'a> URLSearchParamsMethods for JSRef<'a, URLSearchParams> {\n fn Append(self, name: DOMString, value: DOMString) {\n let mut data = self.data.borrow_mut();\n\n match data.entry(name) {\n Occupied(entry) => entry.into_mut().push(value),\n Vacant(entry) => {\n entry.insert(vec!(value));\n }\n }\n\n self.update_steps();\n }\n\n fn Delete(self, name: DOMString) {\n self.data.borrow_mut().remove(&name);\n self.update_steps();\n }\n\n fn Get(self, name: DOMString) -> Option<DOMString> {\n \/\/ FIXME(https:\/\/github.com\/rust-lang\/rust\/issues\/23338)\n let data = self.data.borrow();\n data.get(&name).map(|v| v[0].clone())\n }\n\n fn Has(self, name: DOMString) -> bool {\n \/\/ FIXME(https:\/\/github.com\/rust-lang\/rust\/issues\/23338)\n let data = self.data.borrow();\n data.contains_key(&name)\n }\n\n fn Set(self, name: DOMString, value: DOMString) {\n self.data.borrow_mut().insert(name, vec!(value));\n self.update_steps();\n }\n}\n\npub trait URLSearchParamsHelpers {\n fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8>;\n fn update_steps(&self);\n}\n\nimpl URLSearchParamsHelpers for URLSearchParams {\n fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8> {\n \/\/ http:\/\/url.spec.whatwg.org\/#concept-urlencoded-serializer\n fn serialize_string(value: &DOMString, encoding: EncodingRef) -> Vec<u8> {\n \/\/ http:\/\/url.spec.whatwg.org\/#concept-urlencoded-byte-serializer\n\n let value = value.as_slice();\n \/\/ XXXManishearth should this be a strict encoding? Can unwrap()ing the result fail?\n let value = encoding.encode(value, EncoderTrap::Replace).unwrap();\n let mut buf = vec!();\n for i in value.iter() {\n let append = match *i {\n 0x20 => vec!(0x2B),\n 0x2A | 0x2D | 0x2E |\n 0x30 ... 0x39 | 0x41 ... 0x5A |\n 0x5F | 0x61...0x7A => vec!(*i),\n a => {\n \/\/ http:\/\/url.spec.whatwg.org\/#percent-encode\n let mut encoded = vec!(0x25); \/\/ %\n let s = format!(\"{}\", radix(a, 16)).into_ascii_uppercase();\n let bytes = s.as_bytes();\n encoded.push_all(bytes);\n encoded\n }\n };\n buf.push_all(append.as_slice());\n }\n buf\n }\n let encoding = encoding.unwrap_or(UTF_8 as EncodingRef);\n let mut buf = vec!();\n let mut first_pair = true;\n for (k, v) in self.data.borrow().iter() {\n let name = serialize_string(k, encoding);\n for val in v.iter() {\n let value = serialize_string(val, encoding);\n if first_pair {\n first_pair = false;\n } else {\n buf.push(0x26); \/\/ &\n }\n buf.push_all(name.as_slice());\n buf.push(0x3D); \/\/ =\n buf.push_all(value.as_slice())\n }\n }\n buf\n }\n\n fn update_steps(&self) {\n \/\/ XXXManishearth Implement this when the URL interface is implemented\n \/\/ http:\/\/url.spec.whatwg.org\/#concept-uq-update\n }\n}\n<commit_msg>auto merge of #5319 : frewsxcv\/servo\/urlsearchparams, r=Ms2ger<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::DOMRefCell;\nuse dom::bindings::codegen::Bindings::URLSearchParamsBinding;\nuse dom::bindings::codegen::Bindings::URLSearchParamsBinding::URLSearchParamsMethods;\nuse dom::bindings::codegen::UnionTypes::StringOrURLSearchParams;\nuse dom::bindings::codegen::UnionTypes::StringOrURLSearchParams::{eURLSearchParams, eString};\nuse dom::bindings::error::{Fallible};\nuse dom::bindings::global::GlobalRef;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::utils::{Reflector, reflect_dom_object};\n\nuse util::str::DOMString;\n\nuse encoding::all::UTF_8;\nuse encoding::types::{EncodingRef, EncoderTrap};\n\nuse std::collections::HashMap;\nuse std::collections::hash_map::Entry::{Occupied, Vacant};\nuse std::fmt::radix;\nuse std::ascii::OwnedAsciiExt;\n\n#[dom_struct]\npub struct URLSearchParams {\n reflector_: Reflector,\n data: DOMRefCell<HashMap<DOMString, Vec<DOMString>>>,\n}\n\nimpl URLSearchParams {\n fn new_inherited() -> URLSearchParams {\n URLSearchParams {\n reflector_: Reflector::new(),\n data: DOMRefCell::new(HashMap::new()),\n }\n }\n\n pub fn new(global: GlobalRef) -> Temporary<URLSearchParams> {\n reflect_dom_object(box URLSearchParams::new_inherited(), global,\n URLSearchParamsBinding::Wrap)\n }\n\n pub fn Constructor(global: GlobalRef, init: Option<StringOrURLSearchParams>) ->\n Fallible<Temporary<URLSearchParams>> {\n let usp = URLSearchParams::new(global).root();\n match init {\n Some(eString(_s)) => {\n \/\/ XXXManishearth we need to parse the input here\n \/\/ http:\/\/url.spec.whatwg.org\/#concept-urlencoded-parser\n \/\/ We can use rust-url's implementation here:\n \/\/ https:\/\/github.com\/SimonSapin\/rust-url\/blob\/master\/form_urlencoded.rs#L29\n },\n Some(eURLSearchParams(u)) => {\n let u = u.root();\n let usp = usp.r();\n let mut map = usp.data.borrow_mut();\n \/\/ FIXME(https:\/\/github.com\/rust-lang\/rust\/issues\/23338)\n let r = u.r();\n let data = r.data.borrow();\n *map = data.clone();\n },\n None => {}\n }\n Ok(Temporary::from_rooted(usp.r()))\n }\n}\n\nimpl<'a> URLSearchParamsMethods for JSRef<'a, URLSearchParams> {\n fn Append(self, name: DOMString, value: DOMString) {\n let mut data = self.data.borrow_mut();\n\n match data.entry(name) {\n Occupied(entry) => entry.into_mut().push(value),\n Vacant(entry) => {\n entry.insert(vec!(value));\n }\n }\n\n self.update_steps();\n }\n\n fn Delete(self, name: DOMString) {\n self.data.borrow_mut().remove(&name);\n self.update_steps();\n }\n\n fn Get(self, name: DOMString) -> Option<DOMString> {\n \/\/ FIXME(https:\/\/github.com\/rust-lang\/rust\/issues\/23338)\n let data = self.data.borrow();\n data.get(&name).map(|v| v[0].clone())\n }\n\n fn Has(self, name: DOMString) -> bool {\n \/\/ FIXME(https:\/\/github.com\/rust-lang\/rust\/issues\/23338)\n let data = self.data.borrow();\n data.contains_key(&name)\n }\n\n fn Set(self, name: DOMString, value: DOMString) {\n self.data.borrow_mut().insert(name, vec!(value));\n self.update_steps();\n }\n}\n\npub trait URLSearchParamsHelpers {\n fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8>;\n fn update_steps(&self);\n}\n\nimpl URLSearchParamsHelpers for URLSearchParams {\n fn serialize(&self, encoding: Option<EncodingRef>) -> Vec<u8> {\n \/\/ http:\/\/url.spec.whatwg.org\/#concept-urlencoded-serializer\n fn serialize_string(value: &DOMString, encoding: EncodingRef) -> Vec<u8> {\n \/\/ http:\/\/url.spec.whatwg.org\/#concept-urlencoded-byte-serializer\n\n let value = value.as_slice();\n \/\/ XXXManishearth should this be a strict encoding? Can unwrap()ing the result fail?\n let value = encoding.encode(value, EncoderTrap::Replace).unwrap();\n\n \/\/ Step 1.\n let mut buf = vec!();\n\n \/\/ Step 2.\n for i in &value {\n let append = match *i {\n \/\/ Convert spaces:\n \/\/ ' ' => '+'\n 0x20 => vec!(0x2B),\n\n \/\/ Retain the following characters:\n \/\/ '*', '-', '.', '0'...'9', 'A'...'Z', '_', 'a'...'z'\n 0x2A | 0x2D | 0x2E | 0x30...0x39 |\n 0x41...0x5A | 0x5F | 0x61...0x7A => vec!(*i),\n\n \/\/ Encode everything else using 'percented-encoded bytes'\n \/\/ http:\/\/url.spec.whatwg.org\/#percent-encode\n a => {\n let mut encoded = vec!(0x25); \/\/ %\n let s = format!(\"{}\", radix(a, 16)).into_ascii_uppercase();\n let bytes = s.as_bytes();\n encoded.push_all(bytes);\n encoded\n }\n };\n buf.push_all(&append);\n }\n\n \/\/ Step 3.\n buf\n }\n let encoding = encoding.unwrap_or(UTF_8 as EncodingRef);\n let mut buf = vec!();\n let mut first_pair = true;\n for (k, v) in self.data.borrow().iter() {\n let name = serialize_string(k, encoding);\n for val in v {\n let value = serialize_string(val, encoding);\n if first_pair {\n first_pair = false;\n } else {\n buf.push(0x26); \/\/ &\n }\n buf.push_all(&name);\n buf.push(0x3D); \/\/ =\n buf.push_all(&value)\n }\n }\n buf\n }\n\n fn update_steps(&self) {\n \/\/ XXXManishearth Implement this when the URL interface is implemented\n \/\/ http:\/\/url.spec.whatwg.org\/#concept-uq-update\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>core: derive additional traits for exposed types<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>retag<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix one clippy warning.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> change to make all tests run<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix(doctest): work around lack of `Copy`ability in stdlib structs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Iter & IterMut<commit_after><|endoftext|>"} {"text":"<commit_before>use rustc_serialize::json;\nuse rustc_serialize::base64::FromBase64;\nuse error::Error;\n\n#[derive(RustcDecodable, RustcEncodable)]\npub struct Header {\n pub typ: String,\n pub alg: Option<String>,\n}\n\nimpl Header {\n pub fn parse(raw: &str) -> Result<Header, Error> {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n let header = try!(json::decode(&*s));\n\n Ok(header)\n }\n}\n<commit_msg>Add test for header<commit_after>use rustc_serialize::json;\nuse rustc_serialize::base64::FromBase64;\nuse error::Error;\n\n#[derive(RustcDecodable, RustcEncodable)]\npub struct Header {\n pub typ: String,\n pub alg: Option<String>,\n}\n\nimpl Header {\n pub fn parse(raw: &str) -> Result<Header, Error> {\n let data = try!(raw.from_base64());\n let s = try!(String::from_utf8(data));\n let header = try!(json::decode(&*s));\n\n Ok(header)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use header::Header;\n\n #[test]\n fn parse() {\n let enc = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let header = Header::parse(enc).unwrap();\n\n assert_eq!(header.typ, \"JWT\");\n assert_eq!(header.alg.unwrap(), \"HS256\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor Chunk debug to be procedural<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adds Rust Problem 8<commit_after>\/\/\/ Problem 8\n\/\/\/ The four adjacent digits in the 1000-digit number that have the greatest \n\/\/\/ product are 9 × 9 × 8 × 9 = 5832.\n\/\/\/ \n\/\/\/ 73167176531330624919225119674426574742355349194934\n\/\/\/ 96983520312774506326239578318016984801869478851843\n\/\/\/ 85861560789112949495459501737958331952853208805511\n\/\/\/ 12540698747158523863050715693290963295227443043557\n\/\/\/ 66896648950445244523161731856403098711121722383113\n\/\/\/ 62229893423380308135336276614282806444486645238749\n\/\/\/ 30358907296290491560440772390713810515859307960866\n\/\/\/ 70172427121883998797908792274921901699720888093776\n\/\/\/ 65727333001053367881220235421809751254540594752243\n\/\/\/ 52584907711670556013604839586446706324415722155397\n\/\/\/ 53697817977846174064955149290862569321978468622482\n\/\/\/ 83972241375657056057490261407972968652414535100474\n\/\/\/ 82166370484403199890008895243450658541227588666881\n\/\/\/ 16427171479924442928230863465674813919123162824586\n\/\/\/ 17866458359124566529476545682848912883142607690042\n\/\/\/ 24219022671055626321111109370544217506941658960408\n\/\/\/ 07198403850962455444362981230987879927244284909188\n\/\/\/ 84580156166097919133875499200524063689912560717606\n\/\/\/ 05886116467109405077541002256983155200055935729725\n\/\/\/ 71636269561882670428252483600823257530420752963450\n\/\/\/ \n\/\/\/ Find the thirteen adjacent digits in the 1000-digit number that have the \n\/\/\/ greatest product. What is the value of this product?\nfn main() {\n \/\/ How many adjacent digits to check?\n let adj: usize = 13;\n let num: String = \"73167176531330624919225119674426574742355349194934\\\n 96983520312774506326239578318016984801869478851843\\\n 85861560789112949495459501737958331952853208805511\\\n 12540698747158523863050715693290963295227443043557\\\n 66896648950445244523161731856403098711121722383113\\\n 62229893423380308135336276614282806444486645238749\\\n 30358907296290491560440772390713810515859307960866\\\n 70172427121883998797908792274921901699720888093776\\\n 65727333001053367881220235421809751254540594752243\\\n 52584907711670556013604839586446706324415722155397\\\n 53697817977846174064955149290862569321978468622482\\\n 83972241375657056057490261407972968652414535100474\\\n 82166370484403199890008895243450658541227588666881\\\n 16427171479924442928230863465674813919123162824586\\\n 17866458359124566529476545682848912883142607690042\\\n 24219022671055626321111109370544217506941658960408\\\n 07198403850962455444362981230987879927244284909188\\\n 84580156166097919133875499200524063689912560717606\\\n 05886116467109405077541002256983155200055935729725\\\n 71636269561882670428252483600823257530420752963450\".to_string();\n \/\/ let mut digits: Vec<u8> = Vec::new();\n\n \/\/ for c in num.chars() {\n \/\/ digits.push(try!(c.to_digit(10)))\n \/\/ }\n \n let digits: Vec<u8> = num.chars()\n .map(|c| c.to_digit(10).unwrap() as u8)\n .collect();\n\n let mut window: Vec<u8> = Vec::new();\n let mut max: u64 = 0;\n let mut max_window: Vec<u8> = vec![0u8; adj];\n\n for d in digits {\n if window.len() < adj {\n window.push(d);\n } else {\n let mut mul: u64 = 1;\n for v in &window {\n mul *= *v as u64;\n }\n if mul > max {\n max = mul;\n max_window.clone_from_slice(&window);\n }\n\n window.push(d);\n window = window.split_off(1);\n }\n }\n\n print!(\"Answer: {} [digits: \", max);\n for d in max_window {\n print!(\"{}\", d);\n }\n print!(\"]\\n\");\n}<|endoftext|>"} {"text":"<commit_before>use orbclient;\nuse super::super::geometry;\nuse std;\n\n\/\/\/ Draw a triangle, from 9 floats representing vertice coordinates\n#[allow(dead_code)]\npub fn triangle_p(x1: f32, y1: f32, z1: f32,\n x2: f32, y2: f32, z2: f32,\n x3: f32, y3: f32, z3: f32, \n color: orbclient::Color, window: &mut orbclient::window::Window) {\n use super::{perpective, screen};\n use std::cmp::{min, max};\n \n \/\/ Calculate perspective for points.\n let (x1, y1) = perpective(x1, y1, z1);\n let (x2, y2) = perpective(x2, y2, z2);\n let (x3, y3) = perpective(x3, y3, z3);\n \n let scr_width = window.width() as i32;\n let scr_height = window.height() as i32;\n \n \/\/ Change f32 points into drawable i32, based on screen width, \n let (x1, y1) = screen(x1, y1, scr_width, scr_height);\n let (x2, y2) = screen(x2, y2, scr_width, scr_height);\n let (x3, y3) = screen(x3, y3, scr_width, scr_height);\n \n let x_low = min(x1, min(x2, x3));\n let x_max = max(x1, max(x2, x3));\n let y_low = min(y1, min(y2, y3));\n let y_max = max(y1, max(y2, y3));\n \n {\n let x1 = x1 as f32;\n let y1 = y1 as f32;\n let x2 = x2 as f32;\n let y2 = y2 as f32;\n let x3 = x3 as f32;\n let y3 = y3 as f32;\n\n for x in x_low..x_max {\n for y in y_low..y_max {\n let mut alpha: f32;\n let mut beta: f32;\n let mut gamma: f32;\n \n let x = x as f32;\n let y = y as f32;\n \n alpha = ((y2 - y3)*(x - x3) + (x3 - x2)*(y - y3)) \/ ((y2 - y3)*(x1 - x3) + (x3 - x2)*(y1 - y3));\n beta = ((y3 - y1)*(x - x3) + (x1 - x3)*(y - y3)) \/ ((y2 - y3)*(x1 - x3) + (x3 - x2)*(y1 - y3));\n gamma = 1.0 - alpha - beta;\n \n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n window.pixel(x as i32, y as i32, color);\n }\n }\n }\n }\n \n \n window.line(x1, y1, x2, y2, color);\n window.line(x2, y2, x3, y3, color);\n window.line(x3, y3, x1, y1, color);\n}\n \npub fn triangle_s(triangle: geometry::Triangle,\n color: orbclient::Color, window: &mut orbclient::window::Window) {\n triangle_p(triangle.p1.x, triangle.p1.y, triangle.p1.z,\n triangle.p2.x, triangle.p2.y, triangle.p2.z,\n triangle.p3.x, triangle.p3.y, triangle.p3.z,\n color, window); \n}<commit_msg>Fix lack of reference<commit_after>use orbclient;\nuse super::super::geometry;\nuse std;\n\n\/\/\/ Draw a triangle, from 9 floats representing vertice coordinates\n#[allow(dead_code)]\npub fn triangle_p(x1: f32, y1: f32, z1: f32,\n x2: f32, y2: f32, z2: f32,\n x3: f32, y3: f32, z3: f32, \n color: orbclient::Color, window: &mut orbclient::window::Window) {\n use super::{perpective, screen};\n use std::cmp::{min, max};\n \n \/\/ Calculate perspective for points.\n let (x1, y1) = perpective(x1, y1, z1);\n let (x2, y2) = perpective(x2, y2, z2);\n let (x3, y3) = perpective(x3, y3, z3);\n \n let scr_width = window.width() as i32;\n let scr_height = window.height() as i32;\n \n \/\/ Change f32 points into drawable i32, based on screen width, \n let (x1, y1) = screen(x1, y1, scr_width, scr_height);\n let (x2, y2) = screen(x2, y2, scr_width, scr_height);\n let (x3, y3) = screen(x3, y3, scr_width, scr_height);\n \n let x_low = min(x1, min(x2, x3));\n let x_max = max(x1, max(x2, x3));\n let y_low = min(y1, min(y2, y3));\n let y_max = max(y1, max(y2, y3));\n \n {\n let x1 = x1 as f32;\n let y1 = y1 as f32;\n let x2 = x2 as f32;\n let y2 = y2 as f32;\n let x3 = x3 as f32;\n let y3 = y3 as f32;\n\n for x in x_low..x_max {\n for y in y_low..y_max {\n let mut alpha: f32;\n let mut beta: f32;\n let mut gamma: f32;\n \n let x = x as f32;\n let y = y as f32;\n \n alpha = ((y2 - y3)*(x - x3) + (x3 - x2)*(y - y3)) \/ ((y2 - y3)*(x1 - x3) + (x3 - x2)*(y1 - y3));\n beta = ((y3 - y1)*(x - x3) + (x1 - x3)*(y - y3)) \/ ((y2 - y3)*(x1 - x3) + (x3 - x2)*(y1 - y3));\n gamma = 1.0 - alpha - beta;\n \n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n window.pixel(x as i32, y as i32, color);\n }\n }\n }\n }\n \n \n window.line(x1, y1, x2, y2, color);\n window.line(x2, y2, x3, y3, color);\n window.line(x3, y3, x1, y1, color);\n}\n \npub fn triangle_s(triangle: &geometry::Triangle,\n color: orbclient::Color, window: &mut orbclient::window::Window) {\n triangle_p(triangle.p1.x, triangle.p1.y, triangle.p1.z,\n triangle.p2.x, triangle.p2.y, triangle.p2.z,\n triangle.p3.x, triangle.p3.y, triangle.p3.z,\n color, window); \n}<|endoftext|>"} {"text":"<commit_before><commit_msg>else conditional added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #74671 - rust-lang:const-generics-coerce-unsized, r=nikomatsakis<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Functions for dealing with devices.\n\nuse std::fs::File;\nuse std::io::{Seek, Write, SeekFrom};\nuse std::fs::OpenOptions;\nuse std::os::unix::prelude::AsRawFd;\nuse std::path::Path;\n\nuse devicemapper::consts::SECTOR_SIZE;\nuse devicemapper::{Bytes, Sectors};\n\nuse super::super::errors::{EngineResult, EngineError};\n\nioctl!(read blkgetsize64 with 0x12, 114; u64);\n\npub fn blkdev_size(file: &File) -> EngineResult<Bytes> {\n let mut val: u64 = 0;\n\n match unsafe { blkgetsize64(file.as_raw_fd(), &mut val) } {\n Err(x) => Err(EngineError::Nix(x)),\n Ok(_) => Ok(Bytes(val)),\n }\n}\n\n\/\/\/ Write buf at offset length times.\npub fn write_sectors(path: &Path,\n offset: Sectors,\n length: Sectors,\n buf: &[u8; SECTOR_SIZE])\n -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(path));\n\n try!(f.seek(SeekFrom::Start(*offset)));\n for _ in 0..*length {\n try!(f.write_all(buf));\n }\n\n try!(f.flush());\n Ok(())\n}\n\n\/\/\/ Zero sectors at the given offset for length sectors.\npub fn wipe_sectors(path: &Path, offset: Sectors, length: Sectors) -> EngineResult<()> {\n write_sectors(path, offset, length, &[0u8; SECTOR_SIZE])\n}\n<commit_msg>Broaden argument type for wipe_sectors and write_sectors<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Functions for dealing with devices.\n\nuse std::fs::File;\nuse std::io::{Seek, Write, SeekFrom};\nuse std::fs::OpenOptions;\nuse std::os::unix::prelude::AsRawFd;\nuse std::path::Path;\n\nuse devicemapper::consts::SECTOR_SIZE;\nuse devicemapper::{Bytes, Sectors};\n\nuse super::super::errors::{EngineResult, EngineError};\n\nioctl!(read blkgetsize64 with 0x12, 114; u64);\n\npub fn blkdev_size(file: &File) -> EngineResult<Bytes> {\n let mut val: u64 = 0;\n\n match unsafe { blkgetsize64(file.as_raw_fd(), &mut val) } {\n Err(x) => Err(EngineError::Nix(x)),\n Ok(_) => Ok(Bytes(val)),\n }\n}\n\n\/\/\/ Write buf at offset length times.\npub fn write_sectors<P: AsRef<Path>>(path: P,\n offset: Sectors,\n length: Sectors,\n buf: &[u8; SECTOR_SIZE])\n -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(path));\n\n try!(f.seek(SeekFrom::Start(*offset)));\n for _ in 0..*length {\n try!(f.write_all(buf));\n }\n\n try!(f.flush());\n Ok(())\n}\n\n\/\/\/ Zero sectors at the given offset for length sectors.\npub fn wipe_sectors<P: AsRef<Path>>(path: P, offset: Sectors, length: Sectors) -> EngineResult<()> {\n write_sectors(path, offset, length, &[0u8; SECTOR_SIZE])\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>command refactor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary clone() call<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Liveness analysis which computes liveness of MIR local variables at the boundary of basic blocks\n\/\/!\n\/\/! This analysis considers references as being used only at the point of the\n\/\/! borrow. This means that this does not track uses because of references that\n\/\/! already exist:\n\/\/!\n\/\/! ```Rust\n\/\/! fn foo() {\n\/\/! x = 0;\n\/\/! \/\/ `x` is live here\n\/\/! GLOBAL = &x: *const u32;\n\/\/! \/\/ but not here, even while it can be accessed through `GLOBAL`.\n\/\/! foo();\n\/\/! x = 1;\n\/\/! \/\/ `x` is live again here, because it is assigned to `OTHER_GLOBAL`\n\/\/! OTHER_GLOBAL = &x: *const u32;\n\/\/! \/\/ ...\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! This means that users of this analysis still have to check whether\n\/\/! pre-existing references can be used to access the value (e.g. at movable\n\/\/! generator yield points, all pre-existing references are invalidated, so this\n\/\/! doesn't matter).\n\nuse rustc::mir::*;\nuse rustc::mir::visit::{LvalueContext, Visitor};\nuse rustc_data_structures::indexed_vec::{IndexVec, Idx};\nuse rustc_data_structures::indexed_set::IdxSetBuf;\nuse util::pretty::{write_basic_block, dump_enabled, write_mir_intro};\nuse rustc::mir::transform::MirSource;\nuse rustc::ty::item_path;\nuse std::path::{PathBuf, Path};\nuse std::fs;\nuse rustc::ty::TyCtxt;\nuse std::io::{self, Write};\n\npub type LocalSet = IdxSetBuf<Local>;\n\n#[derive(Eq, PartialEq, Clone)]\nstruct DefsUses {\n defs: LocalSet,\n uses: LocalSet,\n}\n\nimpl DefsUses {\n fn apply(&self, bits: &mut LocalSet) -> bool {\n bits.subtract(&self.defs) | bits.union(&self.uses)\n }\n\n fn add_def(&mut self, index: Local) {\n \/\/ If it was used already in the block, remove that use\n \/\/ now that we found a definition.\n \/\/\n \/\/ Example:\n \/\/\n \/\/ \/\/ Defs = {X}, Uses = {}\n \/\/ X = 5\n \/\/ \/\/ Defs = {}, Uses = {X}\n \/\/ use(X)\n self.uses.remove(&index);\n self.defs.add(&index);\n }\n\n fn add_use(&mut self, index: Local) {\n \/\/ Inverse of above.\n \/\/\n \/\/ Example:\n \/\/\n \/\/ \/\/ Defs = {}, Uses = {X}\n \/\/ use(X)\n \/\/ \/\/ Defs = {X}, Uses = {}\n \/\/ X = 5\n \/\/ \/\/ Defs = {}, Uses = {X}\n \/\/ use(X)\n self.defs.remove(&index);\n self.uses.add(&index);\n }\n}\n\nimpl<'tcx> Visitor<'tcx> for DefsUses {\n fn visit_local(&mut self,\n &local: &Local,\n context: LvalueContext<'tcx>,\n _: Location) {\n match context {\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ DEFS\n\n LvalueContext::Store |\n\n \/\/ We let Call defined the result in both the success and\n \/\/ unwind cases. This is not really correct, however it\n \/\/ does not seem to be observable due to the way that we\n \/\/ generate MIR. See the test case\n \/\/ `mir-opt\/nll\/liveness-call-subtlety.rs`. To do things\n \/\/ properly, we would apply the def in call only to the\n \/\/ input from the success path and not the unwind\n \/\/ path. -nmatsakis\n LvalueContext::Call |\n\n \/\/ Storage live and storage dead aren't proper defines, but we can ignore\n \/\/ values that come before them.\n LvalueContext::StorageLive |\n LvalueContext::StorageDead => {\n self.add_def(local);\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ USES\n\n LvalueContext::Projection(..) |\n\n \/\/ Borrows only consider their local used at the point of the borrow.\n \/\/ This won't affect the results since we use this analysis for generators\n \/\/ and we only care about the result at suspension points. Borrows cannot\n \/\/ cross suspension points so this behavior is unproblematic.\n LvalueContext::Borrow { .. } |\n\n LvalueContext::Inspect |\n LvalueContext::Consume |\n LvalueContext::Validate |\n\n \/\/ We consider drops to always be uses of locals.\n \/\/ Drop eloboration should be run before this analysis otherwise\n \/\/ the results might be too pessimistic.\n LvalueContext::Drop => {\n self.add_use(local);\n }\n }\n }\n}\n\nfn block<'tcx>(b: &BasicBlockData<'tcx>, locals: usize) -> DefsUses {\n let mut visitor = DefsUses {\n defs: LocalSet::new_empty(locals),\n uses: LocalSet::new_empty(locals),\n };\n\n let dummy_location = Location { block: BasicBlock::new(0), statement_index: 0 };\n\n \/\/ Visit the various parts of the basic block in reverse. If we go\n \/\/ forward, the logic in `add_def` and `add_use` would be wrong.\n visitor.visit_terminator(BasicBlock::new(0), b.terminator(), dummy_location);\n for statement in b.statements.iter().rev() {\n visitor.visit_statement(BasicBlock::new(0), statement, dummy_location);\n }\n\n visitor\n}\n\n\/\/ This gives the result of the liveness analysis at the boundary of basic blocks\npub struct LivenessResult {\n pub ins: IndexVec<BasicBlock, LocalSet>,\n pub outs: IndexVec<BasicBlock, LocalSet>,\n}\n\npub fn liveness_of_locals<'tcx>(mir: &Mir<'tcx>) -> LivenessResult {\n let locals = mir.local_decls.len();\n let def_use: IndexVec<_, _> = mir.basic_blocks().iter().map(|b| {\n block(b, locals)\n }).collect();\n\n let copy = |from: &IndexVec<BasicBlock, LocalSet>, to: &mut IndexVec<BasicBlock, LocalSet>| {\n for (i, set) in to.iter_enumerated_mut() {\n set.clone_from(&from[i]);\n }\n };\n\n let mut ins: IndexVec<_, _> = mir.basic_blocks()\n .indices()\n .map(|_| LocalSet::new_empty(locals)).collect();\n let mut outs = ins.clone();\n\n let mut ins_ = ins.clone();\n let mut outs_ = outs.clone();\n\n loop {\n copy(&ins, &mut ins_);\n copy(&outs, &mut outs_);\n\n for b in mir.basic_blocks().indices().rev() {\n \/\/ out = ∪ {ins of successors}\n outs[b].clear();\n for &successor in mir.basic_blocks()[b].terminator().successors().into_iter() {\n outs[b].union(&ins[successor]);\n }\n\n \/\/ in = use ∪ (out - def)\n ins[b].clone_from(&outs[b]);\n\n \/\/ FIXME use the return value to detect if we have changed things\n def_use[b].apply(&mut ins[b]);\n }\n\n if ins_ == ins && outs_ == outs {\n break;\n }\n }\n\n LivenessResult {\n ins,\n outs,\n }\n}\n\nimpl LivenessResult {\n \/\/\/ Walks backwards through the statements\/terminator in the given\n \/\/\/ basic block `block`. At each point within `block`, invokes\n \/\/\/ the callback `op` with the current location and the set of\n \/\/\/ variables that are live on entry to that location.\n pub fn simulate_block<'tcx, OP>(&self,\n mir: &Mir<'tcx>,\n block: BasicBlock,\n mut callback: OP)\n where OP: FnMut(Location, &LocalSet)\n {\n let data = &mir[block];\n\n \/\/ Get a copy of the bits on exit from the block.\n let mut bits = self.outs[block].clone();\n\n \/\/ Start with the maximal statement index -- i.e., right before\n \/\/ the terminator executes.\n let mut statement_index = data.statements.len();\n\n \/\/ Compute liveness right before terminator and invoke callback.\n let terminator_location = Location { block, statement_index };\n let terminator_defs_uses = self.defs_uses(mir, terminator_location, &data.terminator);\n terminator_defs_uses.apply(&mut bits);\n callback(terminator_location, &bits);\n\n \/\/ Compute liveness before each statement (in rev order) and invoke callback.\n for statement in data.statements.iter().rev() {\n statement_index -= 1;\n let statement_location = Location { block, statement_index };\n let statement_defs_uses = self.defs_uses(mir, statement_location, statement);\n statement_defs_uses.apply(&mut bits);\n callback(statement_location, &bits);\n }\n\n assert_eq!(bits, self.ins[block]);\n }\n\n fn defs_uses<'tcx, V>(&self,\n mir: &Mir<'tcx>,\n location: Location,\n thing: &V)\n -> DefsUses\n where V: MirVisitable<'tcx>,\n {\n let locals = mir.local_decls.len();\n let mut visitor = DefsUses {\n defs: LocalSet::new_empty(locals),\n uses: LocalSet::new_empty(locals),\n };\n\n \/\/ Visit the various parts of the basic block in reverse. If we go\n \/\/ forward, the logic in `add_def` and `add_use` would be wrong.\n thing.apply(location, &mut visitor);\n\n visitor\n }\n}\n\ntrait MirVisitable<'tcx> {\n fn apply<V>(&self, location: Location, visitor: &mut V)\n where V: Visitor<'tcx>;\n}\n\nimpl<'tcx> MirVisitable<'tcx> for Statement<'tcx> {\n fn apply<V>(&self, location: Location, visitor: &mut V)\n where V: Visitor<'tcx>\n {\n visitor.visit_statement(location.block,\n self,\n location)\n }\n}\n\nimpl<'tcx> MirVisitable<'tcx> for Option<Terminator<'tcx>> {\n fn apply<V>(&self, location: Location, visitor: &mut V)\n where V: Visitor<'tcx>\n {\n visitor.visit_terminator(location.block,\n self.as_ref().unwrap(),\n location)\n }\n}\n\npub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n source: MirSource,\n mir: &Mir<'tcx>,\n result: &LivenessResult) {\n if !dump_enabled(tcx, pass_name, source) {\n return;\n }\n let node_path = item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 below\n tcx.item_path_str(tcx.hir.local_def_id(source.item_id()))\n });\n dump_matched_mir_node(tcx, pass_name, &node_path,\n source, mir, result);\n}\n\nfn dump_matched_mir_node<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n node_path: &str,\n source: MirSource,\n mir: &Mir<'tcx>,\n result: &LivenessResult) {\n let mut file_path = PathBuf::new();\n if let Some(ref file_dir) = tcx.sess.opts.debugging_opts.dump_mir_dir {\n let p = Path::new(file_dir);\n file_path.push(p);\n };\n let file_name = format!(\"rustc.node{}{}-liveness.mir\",\n source.item_id(), pass_name);\n file_path.push(&file_name);\n let _ = fs::File::create(&file_path).and_then(|mut file| {\n writeln!(file, \"\/\/ MIR local liveness analysis for `{}`\", node_path)?;\n writeln!(file, \"\/\/ source = {:?}\", source)?;\n writeln!(file, \"\/\/ pass_name = {}\", pass_name)?;\n writeln!(file, \"\")?;\n write_mir_fn(tcx, source, mir, &mut file, result)?;\n Ok(())\n });\n}\n\npub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir<'tcx>,\n w: &mut Write,\n result: &LivenessResult)\n -> io::Result<()> {\n write_mir_intro(tcx, src, mir, w)?;\n for block in mir.basic_blocks().indices() {\n let print = |w: &mut Write, prefix, result: &IndexVec<BasicBlock, LocalSet>| {\n let live: Vec<String> = mir.local_decls.indices()\n .filter(|i| result[block].contains(i))\n .map(|i| format!(\"{:?}\", i))\n .collect();\n writeln!(w, \"{} {{{}}}\", prefix, live.join(\", \"))\n };\n print(w, \" \", &result.ins)?;\n write_basic_block(tcx, block, mir, &mut |_, _| Ok(()), w)?;\n print(w, \" \", &result.outs)?;\n if block.index() + 1 != mir.basic_blocks().len() {\n writeln!(w, \"\")?;\n }\n }\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n<commit_msg>avoid unnecessary copies in liveness computation<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Liveness analysis which computes liveness of MIR local variables at the boundary of basic blocks\n\/\/!\n\/\/! This analysis considers references as being used only at the point of the\n\/\/! borrow. This means that this does not track uses because of references that\n\/\/! already exist:\n\/\/!\n\/\/! ```Rust\n\/\/! fn foo() {\n\/\/! x = 0;\n\/\/! \/\/ `x` is live here\n\/\/! GLOBAL = &x: *const u32;\n\/\/! \/\/ but not here, even while it can be accessed through `GLOBAL`.\n\/\/! foo();\n\/\/! x = 1;\n\/\/! \/\/ `x` is live again here, because it is assigned to `OTHER_GLOBAL`\n\/\/! OTHER_GLOBAL = &x: *const u32;\n\/\/! \/\/ ...\n\/\/! }\n\/\/! ```\n\/\/!\n\/\/! This means that users of this analysis still have to check whether\n\/\/! pre-existing references can be used to access the value (e.g. at movable\n\/\/! generator yield points, all pre-existing references are invalidated, so this\n\/\/! doesn't matter).\n\nuse rustc::mir::*;\nuse rustc::mir::visit::{LvalueContext, Visitor};\nuse rustc_data_structures::indexed_vec::{IndexVec, Idx};\nuse rustc_data_structures::indexed_set::IdxSetBuf;\nuse util::pretty::{write_basic_block, dump_enabled, write_mir_intro};\nuse rustc::mir::transform::MirSource;\nuse rustc::ty::item_path;\nuse std::path::{PathBuf, Path};\nuse std::fs;\nuse rustc::ty::TyCtxt;\nuse std::io::{self, Write};\n\npub type LocalSet = IdxSetBuf<Local>;\n\n#[derive(Eq, PartialEq, Clone)]\nstruct DefsUses {\n defs: LocalSet,\n uses: LocalSet,\n}\n\nimpl DefsUses {\n fn apply(&self, bits: &mut LocalSet) -> bool {\n bits.subtract(&self.defs) | bits.union(&self.uses)\n }\n\n fn add_def(&mut self, index: Local) {\n \/\/ If it was used already in the block, remove that use\n \/\/ now that we found a definition.\n \/\/\n \/\/ Example:\n \/\/\n \/\/ \/\/ Defs = {X}, Uses = {}\n \/\/ X = 5\n \/\/ \/\/ Defs = {}, Uses = {X}\n \/\/ use(X)\n self.uses.remove(&index);\n self.defs.add(&index);\n }\n\n fn add_use(&mut self, index: Local) {\n \/\/ Inverse of above.\n \/\/\n \/\/ Example:\n \/\/\n \/\/ \/\/ Defs = {}, Uses = {X}\n \/\/ use(X)\n \/\/ \/\/ Defs = {X}, Uses = {}\n \/\/ X = 5\n \/\/ \/\/ Defs = {}, Uses = {X}\n \/\/ use(X)\n self.defs.remove(&index);\n self.uses.add(&index);\n }\n}\n\nimpl<'tcx> Visitor<'tcx> for DefsUses {\n fn visit_local(&mut self,\n &local: &Local,\n context: LvalueContext<'tcx>,\n _: Location) {\n match context {\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ DEFS\n\n LvalueContext::Store |\n\n \/\/ We let Call defined the result in both the success and\n \/\/ unwind cases. This is not really correct, however it\n \/\/ does not seem to be observable due to the way that we\n \/\/ generate MIR. See the test case\n \/\/ `mir-opt\/nll\/liveness-call-subtlety.rs`. To do things\n \/\/ properly, we would apply the def in call only to the\n \/\/ input from the success path and not the unwind\n \/\/ path. -nmatsakis\n LvalueContext::Call |\n\n \/\/ Storage live and storage dead aren't proper defines, but we can ignore\n \/\/ values that come before them.\n LvalueContext::StorageLive |\n LvalueContext::StorageDead => {\n self.add_def(local);\n }\n\n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \/\/ USES\n\n LvalueContext::Projection(..) |\n\n \/\/ Borrows only consider their local used at the point of the borrow.\n \/\/ This won't affect the results since we use this analysis for generators\n \/\/ and we only care about the result at suspension points. Borrows cannot\n \/\/ cross suspension points so this behavior is unproblematic.\n LvalueContext::Borrow { .. } |\n\n LvalueContext::Inspect |\n LvalueContext::Consume |\n LvalueContext::Validate |\n\n \/\/ We consider drops to always be uses of locals.\n \/\/ Drop eloboration should be run before this analysis otherwise\n \/\/ the results might be too pessimistic.\n LvalueContext::Drop => {\n self.add_use(local);\n }\n }\n }\n}\n\nfn block<'tcx>(b: &BasicBlockData<'tcx>, locals: usize) -> DefsUses {\n let mut visitor = DefsUses {\n defs: LocalSet::new_empty(locals),\n uses: LocalSet::new_empty(locals),\n };\n\n let dummy_location = Location { block: BasicBlock::new(0), statement_index: 0 };\n\n \/\/ Visit the various parts of the basic block in reverse. If we go\n \/\/ forward, the logic in `add_def` and `add_use` would be wrong.\n visitor.visit_terminator(BasicBlock::new(0), b.terminator(), dummy_location);\n for statement in b.statements.iter().rev() {\n visitor.visit_statement(BasicBlock::new(0), statement, dummy_location);\n }\n\n visitor\n}\n\n\/\/ This gives the result of the liveness analysis at the boundary of basic blocks\npub struct LivenessResult {\n pub ins: IndexVec<BasicBlock, LocalSet>,\n pub outs: IndexVec<BasicBlock, LocalSet>,\n}\n\npub fn liveness_of_locals<'tcx>(mir: &Mir<'tcx>) -> LivenessResult {\n let locals = mir.local_decls.len();\n let def_use: IndexVec<_, _> = mir.basic_blocks().iter().map(|b| {\n block(b, locals)\n }).collect();\n\n let mut ins: IndexVec<_, _> = mir.basic_blocks()\n .indices()\n .map(|_| LocalSet::new_empty(locals))\n .collect();\n let mut outs = ins.clone();\n\n let mut changed = true;\n let mut bits = LocalSet::new_empty(locals);\n while changed {\n changed = false;\n\n for b in mir.basic_blocks().indices().rev() {\n \/\/ outs[b] = ∪ {ins of successors}\n bits.clear();\n for &successor in mir.basic_blocks()[b].terminator().successors().into_iter() {\n bits.union(&ins[successor]);\n }\n outs[b].clone_from(&bits);\n\n \/\/ bits = use ∪ (bits - def)\n def_use[b].apply(&mut bits);\n\n \/\/ update bits on entry and flag if they have changed\n if ins[b] != bits {\n ins[b].clone_from(&bits);\n changed = true;\n }\n }\n }\n\n LivenessResult {\n ins,\n outs,\n }\n}\n\nimpl LivenessResult {\n \/\/\/ Walks backwards through the statements\/terminator in the given\n \/\/\/ basic block `block`. At each point within `block`, invokes\n \/\/\/ the callback `op` with the current location and the set of\n \/\/\/ variables that are live on entry to that location.\n pub fn simulate_block<'tcx, OP>(&self,\n mir: &Mir<'tcx>,\n block: BasicBlock,\n mut callback: OP)\n where OP: FnMut(Location, &LocalSet)\n {\n let data = &mir[block];\n\n \/\/ Get a copy of the bits on exit from the block.\n let mut bits = self.outs[block].clone();\n\n \/\/ Start with the maximal statement index -- i.e., right before\n \/\/ the terminator executes.\n let mut statement_index = data.statements.len();\n\n \/\/ Compute liveness right before terminator and invoke callback.\n let terminator_location = Location { block, statement_index };\n let terminator_defs_uses = self.defs_uses(mir, terminator_location, &data.terminator);\n terminator_defs_uses.apply(&mut bits);\n callback(terminator_location, &bits);\n\n \/\/ Compute liveness before each statement (in rev order) and invoke callback.\n for statement in data.statements.iter().rev() {\n statement_index -= 1;\n let statement_location = Location { block, statement_index };\n let statement_defs_uses = self.defs_uses(mir, statement_location, statement);\n statement_defs_uses.apply(&mut bits);\n callback(statement_location, &bits);\n }\n\n assert_eq!(bits, self.ins[block]);\n }\n\n fn defs_uses<'tcx, V>(&self,\n mir: &Mir<'tcx>,\n location: Location,\n thing: &V)\n -> DefsUses\n where V: MirVisitable<'tcx>,\n {\n let locals = mir.local_decls.len();\n let mut visitor = DefsUses {\n defs: LocalSet::new_empty(locals),\n uses: LocalSet::new_empty(locals),\n };\n\n \/\/ Visit the various parts of the basic block in reverse. If we go\n \/\/ forward, the logic in `add_def` and `add_use` would be wrong.\n thing.apply(location, &mut visitor);\n\n visitor\n }\n}\n\ntrait MirVisitable<'tcx> {\n fn apply<V>(&self, location: Location, visitor: &mut V)\n where V: Visitor<'tcx>;\n}\n\nimpl<'tcx> MirVisitable<'tcx> for Statement<'tcx> {\n fn apply<V>(&self, location: Location, visitor: &mut V)\n where V: Visitor<'tcx>\n {\n visitor.visit_statement(location.block,\n self,\n location)\n }\n}\n\nimpl<'tcx> MirVisitable<'tcx> for Option<Terminator<'tcx>> {\n fn apply<V>(&self, location: Location, visitor: &mut V)\n where V: Visitor<'tcx>\n {\n visitor.visit_terminator(location.block,\n self.as_ref().unwrap(),\n location)\n }\n}\n\npub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n source: MirSource,\n mir: &Mir<'tcx>,\n result: &LivenessResult) {\n if !dump_enabled(tcx, pass_name, source) {\n return;\n }\n let node_path = item_path::with_forced_impl_filename_line(|| { \/\/ see notes on #41697 below\n tcx.item_path_str(tcx.hir.local_def_id(source.item_id()))\n });\n dump_matched_mir_node(tcx, pass_name, &node_path,\n source, mir, result);\n}\n\nfn dump_matched_mir_node<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n pass_name: &str,\n node_path: &str,\n source: MirSource,\n mir: &Mir<'tcx>,\n result: &LivenessResult) {\n let mut file_path = PathBuf::new();\n if let Some(ref file_dir) = tcx.sess.opts.debugging_opts.dump_mir_dir {\n let p = Path::new(file_dir);\n file_path.push(p);\n };\n let file_name = format!(\"rustc.node{}{}-liveness.mir\",\n source.item_id(), pass_name);\n file_path.push(&file_name);\n let _ = fs::File::create(&file_path).and_then(|mut file| {\n writeln!(file, \"\/\/ MIR local liveness analysis for `{}`\", node_path)?;\n writeln!(file, \"\/\/ source = {:?}\", source)?;\n writeln!(file, \"\/\/ pass_name = {}\", pass_name)?;\n writeln!(file, \"\")?;\n write_mir_fn(tcx, source, mir, &mut file, result)?;\n Ok(())\n });\n}\n\npub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &Mir<'tcx>,\n w: &mut Write,\n result: &LivenessResult)\n -> io::Result<()> {\n write_mir_intro(tcx, src, mir, w)?;\n for block in mir.basic_blocks().indices() {\n let print = |w: &mut Write, prefix, result: &IndexVec<BasicBlock, LocalSet>| {\n let live: Vec<String> = mir.local_decls.indices()\n .filter(|i| result[block].contains(i))\n .map(|i| format!(\"{:?}\", i))\n .collect();\n writeln!(w, \"{} {{{}}}\", prefix, live.join(\", \"))\n };\n print(w, \" \", &result.ins)?;\n write_basic_block(tcx, block, mir, &mut |_, _| Ok(()), w)?;\n print(w, \" \", &result.outs)?;\n if block.index() + 1 != mir.basic_blocks().len() {\n writeln!(w, \"\")?;\n }\n }\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement obj group reg's<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse text::glyph::CharIndex;\n\n#[deriving(PartialEq)]\npub enum CompressionMode {\n CompressNone,\n CompressWhitespace,\n CompressWhitespaceNewline,\n DiscardNewline\n}\n\n\/\/ ported from Gecko's nsTextFrameUtils::TransformText.\n\/\/\n\/\/ High level TODOs:\n\/\/\n\/\/ * Issue #113: consider incoming text state (arabic, etc)\n\/\/ and propagate outgoing text state (dual of above)\n\/\/\n\/\/ * Issue #114: record skipped and kept chars for mapping original to new text\n\/\/\n\/\/ * Untracked: various edge cases for bidi, CJK, etc.\npub fn transform_text(text: &str,\n mode: CompressionMode,\n incoming_whitespace: bool,\n output_text: &mut String,\n new_line_pos: &mut Vec<CharIndex>)\n -> bool {\n let out_whitespace = match mode {\n CompressionMode::CompressNone | CompressionMode::DiscardNewline => {\n let mut new_line_index = CharIndex(0);\n for ch in text.chars() {\n if is_discardable_char(ch, mode) {\n \/\/ TODO: record skipped char\n } else {\n \/\/ TODO: record kept char\n if ch == '\\t' {\n \/\/ TODO: set \"has tab\" flag\n } else if ch == '\\n' {\n \/\/ Save new-line's position for line-break\n \/\/ This value is relative(not absolute)\n new_line_pos.push(new_line_index);\n new_line_index = CharIndex(0);\n }\n\n if ch != '\\n' {\n new_line_index = new_line_index + CharIndex(1);\n }\n output_text.push(ch);\n }\n }\n text.len() > 0 && is_in_whitespace(text.char_at_reverse(0), mode)\n },\n\n CompressionMode::CompressWhitespace | CompressionMode::CompressWhitespaceNewline => {\n let mut in_whitespace: bool = incoming_whitespace;\n for ch in text.chars() {\n \/\/ TODO: discard newlines between CJK chars\n let mut next_in_whitespace: bool = is_in_whitespace(ch, mode);\n\n if !next_in_whitespace {\n if is_always_discardable_char(ch) {\n \/\/ revert whitespace setting, since this char was discarded\n next_in_whitespace = in_whitespace;\n \/\/ TODO: record skipped char\n } else {\n \/\/ TODO: record kept char\n output_text.push(ch);\n }\n } else { \/* next_in_whitespace; possibly add a space char *\/\n if in_whitespace {\n \/\/ TODO: record skipped char\n } else {\n \/\/ TODO: record kept char\n output_text.push(' ');\n }\n }\n \/\/ save whitespace context for next char\n in_whitespace = next_in_whitespace;\n } \/* \/for str::each_char *\/\n in_whitespace\n }\n };\n\n return out_whitespace;\n\n fn is_in_whitespace(ch: char, mode: CompressionMode) -> bool {\n match (ch, mode) {\n (' ', _) => true,\n ('\\t', _) => true,\n ('\\n', CompressionMode::CompressWhitespaceNewline) => true,\n (_, _) => false\n }\n }\n\n fn is_discardable_char(ch: char, mode: CompressionMode) -> bool {\n if is_always_discardable_char(ch) {\n return true;\n }\n match mode {\n CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => ch == '\\n',\n _ => false\n }\n }\n\n fn is_always_discardable_char(_ch: char) -> bool {\n \/\/ TODO: check for bidi control chars, soft hyphens.\n false\n }\n}\n\npub fn float_to_fixed(before: int, f: f64) -> i32 {\n ((1i32 << before as uint) as f64 * f) as i32\n}\n\npub fn fixed_to_float(before: int, f: i32) -> f64 {\n f as f64 * 1.0f64 \/ ((1i32 << before as uint) as f64)\n}\n\npub fn fixed_to_rounded_int(before: int, f: i32) -> int {\n let half = 1i32 << (before-1) as uint;\n if f > 0i32 {\n ((half + f) >> before as uint) as int\n } else {\n -((half - f) >> before as uint) as int\n }\n}\n\n\/* Generate a 32-bit TrueType tag from its 4 characters *\/\npub fn true_type_tag(a: char, b: char, c: char, d: char) -> u32 {\n let a = a as u32;\n let b = b as u32;\n let c = c as u32;\n let d = d as u32;\n (a << 24 | b << 16 | c << 8 | d) as u32\n}\n\n#[test]\nfn test_true_type_tag() {\n assert_eq!(true_type_tag('c', 'm', 'a', 'p'), 0x_63_6D_61_70_u32);\n}\n\n#[test]\nfn test_transform_compress_none() {\n let test_strs = [\n \" foo bar\",\n \"foo bar \",\n \"foo\\n bar\",\n \"foo \\nbar\",\n \" foo bar \\nbaz\",\n \"foo bar baz\",\n \"foobarbaz\\n\\n\",\n ];\n\n let mode = CompressionMode::CompressNone;\n for &test in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, true, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(trimmed_str.as_slice(), test)\n }\n}\n\n#[test]\nfn test_transform_discard_newline() {\n let test_strs = [\n (\" foo bar\", \" foo bar\"),\n (\"foo bar \", \"foo bar \"),\n (\"foo\\n bar\", \"foo bar\"),\n (\"foo \\nbar\", \"foo bar\"),\n (\" foo bar \\nbaz\", \" foo bar baz\"),\n (\"foo bar baz\", \"foo bar baz\"),\n (\"foobarbaz\\n\\n\", \"foobarbaz\"),\n ];\n\n let mode = CompressionMode::DiscardNewline;\n for &(test, oracle) in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, true, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(trimmed_str.as_slice(), oracle)\n }\n}\n\n#[test]\nfn test_transform_compress_whitespace() {\n let test_strs = [\n (\" foo bar\", \"foo bar\"),\n (\"foo bar \", \"foo bar \"),\n (\"foo\\n bar\", \"foo\\n bar\"),\n (\"foo \\nbar\", \"foo \\nbar\"),\n (\" foo bar \\nbaz\", \"foo bar \\nbaz\"),\n (\"foo bar baz\", \"foo bar baz\"),\n (\"foobarbaz\\n\\n\", \"foobarbaz\\n\\n\"),\n ];\n\n let mode = CompressionMode::CompressWhitespace;\n for &(test, oracle) in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, true, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(&*trimmed_str, oracle)\n }\n}\n\n#[test]\nfn test_transform_compress_whitespace_newline() {\n let test_strs = vec![\n (\" foo bar\", \"foo bar\"),\n (\"foo bar \", \"foo bar \"),\n (\"foo\\n bar\", \"foo bar\"),\n (\"foo \\nbar\", \"foo bar\"),\n (\" foo bar \\nbaz\", \"foo bar baz\"),\n (\"foo bar baz\", \"foo bar baz\"),\n (\"foobarbaz\\n\\n\", \"foobarbaz \"),\n ];\n\n let mode = CompressionMode::CompressWhitespaceNewline;\n for &(test, oracle) in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, true, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(&*trimmed_str, oracle)\n }\n}\n\n#[test]\nfn test_transform_compress_whitespace_newline_no_incoming() {\n let test_strs = [\n (\" foo bar\", \" foo bar\"),\n (\"\\nfoo bar\", \" foo bar\"),\n (\"foo bar \", \"foo bar \"),\n (\"foo\\n bar\", \"foo bar\"),\n (\"foo \\nbar\", \"foo bar\"),\n (\" foo bar \\nbaz\", \" foo bar baz\"),\n (\"foo bar baz\", \"foo bar baz\"),\n (\"foobarbaz\\n\\n\", \"foobarbaz \"),\n ];\n\n let mode = CompressionMode::CompressWhitespaceNewline;\n for &(test, oracle) in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, false, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(trimmed_str.as_slice(), oracle)\n }\n}\n<commit_msg>Reformat the transform_text tests.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse text::glyph::CharIndex;\n\n#[deriving(PartialEq)]\npub enum CompressionMode {\n CompressNone,\n CompressWhitespace,\n CompressWhitespaceNewline,\n DiscardNewline\n}\n\n\/\/ ported from Gecko's nsTextFrameUtils::TransformText.\n\/\/\n\/\/ High level TODOs:\n\/\/\n\/\/ * Issue #113: consider incoming text state (arabic, etc)\n\/\/ and propagate outgoing text state (dual of above)\n\/\/\n\/\/ * Issue #114: record skipped and kept chars for mapping original to new text\n\/\/\n\/\/ * Untracked: various edge cases for bidi, CJK, etc.\npub fn transform_text(text: &str,\n mode: CompressionMode,\n incoming_whitespace: bool,\n output_text: &mut String,\n new_line_pos: &mut Vec<CharIndex>)\n -> bool {\n let out_whitespace = match mode {\n CompressionMode::CompressNone | CompressionMode::DiscardNewline => {\n let mut new_line_index = CharIndex(0);\n for ch in text.chars() {\n if is_discardable_char(ch, mode) {\n \/\/ TODO: record skipped char\n } else {\n \/\/ TODO: record kept char\n if ch == '\\t' {\n \/\/ TODO: set \"has tab\" flag\n } else if ch == '\\n' {\n \/\/ Save new-line's position for line-break\n \/\/ This value is relative(not absolute)\n new_line_pos.push(new_line_index);\n new_line_index = CharIndex(0);\n }\n\n if ch != '\\n' {\n new_line_index = new_line_index + CharIndex(1);\n }\n output_text.push(ch);\n }\n }\n text.len() > 0 && is_in_whitespace(text.char_at_reverse(0), mode)\n },\n\n CompressionMode::CompressWhitespace | CompressionMode::CompressWhitespaceNewline => {\n let mut in_whitespace: bool = incoming_whitespace;\n for ch in text.chars() {\n \/\/ TODO: discard newlines between CJK chars\n let mut next_in_whitespace: bool = is_in_whitespace(ch, mode);\n\n if !next_in_whitespace {\n if is_always_discardable_char(ch) {\n \/\/ revert whitespace setting, since this char was discarded\n next_in_whitespace = in_whitespace;\n \/\/ TODO: record skipped char\n } else {\n \/\/ TODO: record kept char\n output_text.push(ch);\n }\n } else { \/* next_in_whitespace; possibly add a space char *\/\n if in_whitespace {\n \/\/ TODO: record skipped char\n } else {\n \/\/ TODO: record kept char\n output_text.push(' ');\n }\n }\n \/\/ save whitespace context for next char\n in_whitespace = next_in_whitespace;\n } \/* \/for str::each_char *\/\n in_whitespace\n }\n };\n\n return out_whitespace;\n\n fn is_in_whitespace(ch: char, mode: CompressionMode) -> bool {\n match (ch, mode) {\n (' ', _) => true,\n ('\\t', _) => true,\n ('\\n', CompressionMode::CompressWhitespaceNewline) => true,\n (_, _) => false\n }\n }\n\n fn is_discardable_char(ch: char, mode: CompressionMode) -> bool {\n if is_always_discardable_char(ch) {\n return true;\n }\n match mode {\n CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => ch == '\\n',\n _ => false\n }\n }\n\n fn is_always_discardable_char(_ch: char) -> bool {\n \/\/ TODO: check for bidi control chars, soft hyphens.\n false\n }\n}\n\npub fn float_to_fixed(before: int, f: f64) -> i32 {\n ((1i32 << before as uint) as f64 * f) as i32\n}\n\npub fn fixed_to_float(before: int, f: i32) -> f64 {\n f as f64 * 1.0f64 \/ ((1i32 << before as uint) as f64)\n}\n\npub fn fixed_to_rounded_int(before: int, f: i32) -> int {\n let half = 1i32 << (before-1) as uint;\n if f > 0i32 {\n ((half + f) >> before as uint) as int\n } else {\n -((half - f) >> before as uint) as int\n }\n}\n\n\/* Generate a 32-bit TrueType tag from its 4 characters *\/\npub fn true_type_tag(a: char, b: char, c: char, d: char) -> u32 {\n let a = a as u32;\n let b = b as u32;\n let c = c as u32;\n let d = d as u32;\n (a << 24 | b << 16 | c << 8 | d) as u32\n}\n\n#[test]\nfn test_true_type_tag() {\n assert_eq!(true_type_tag('c', 'm', 'a', 'p'), 0x_63_6D_61_70_u32);\n}\n\n#[test]\nfn test_transform_compress_none() {\n let test_strs = [\n \" foo bar\",\n \"foo bar \",\n \"foo\\n bar\",\n \"foo \\nbar\",\n \" foo bar \\nbaz\",\n \"foo bar baz\",\n \"foobarbaz\\n\\n\",\n ];\n\n let mode = CompressionMode::CompressNone;\n for &test in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, true, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(trimmed_str.as_slice(), test)\n }\n}\n\n#[test]\nfn test_transform_discard_newline() {\n let test_strs = [\n (\" foo bar\",\n \" foo bar\"),\n\n (\"foo bar \",\n \"foo bar \"),\n\n (\"foo\\n bar\",\n \"foo bar\"),\n\n (\"foo \\nbar\",\n \"foo bar\"),\n\n (\" foo bar \\nbaz\",\n \" foo bar baz\"),\n\n (\"foo bar baz\",\n \"foo bar baz\"),\n\n (\"foobarbaz\\n\\n\",\n \"foobarbaz\"),\n ];\n\n let mode = CompressionMode::DiscardNewline;\n for &(test, oracle) in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, true, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(trimmed_str.as_slice(), oracle)\n }\n}\n\n#[test]\nfn test_transform_compress_whitespace() {\n let test_strs = [\n (\" foo bar\",\n \"foo bar\"),\n\n (\"foo bar \",\n \"foo bar \"),\n\n (\"foo\\n bar\",\n \"foo\\n bar\"),\n\n (\"foo \\nbar\",\n \"foo \\nbar\"),\n\n (\" foo bar \\nbaz\",\n \"foo bar \\nbaz\"),\n\n (\"foo bar baz\",\n \"foo bar baz\"),\n\n (\"foobarbaz\\n\\n\",\n \"foobarbaz\\n\\n\"),\n ];\n\n let mode = CompressionMode::CompressWhitespace;\n for &(test, oracle) in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, true, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(&*trimmed_str, oracle)\n }\n}\n\n#[test]\nfn test_transform_compress_whitespace_newline() {\n let test_strs = vec![\n (\" foo bar\",\n \"foo bar\"),\n\n (\"foo bar \",\n \"foo bar \"),\n\n (\"foo\\n bar\",\n \"foo bar\"),\n\n (\"foo \\nbar\",\n \"foo bar\"),\n\n (\" foo bar \\nbaz\",\n \"foo bar baz\"),\n\n (\"foo bar baz\",\n \"foo bar baz\"),\n\n (\"foobarbaz\\n\\n\",\n \"foobarbaz \"),\n ];\n\n let mode = CompressionMode::CompressWhitespaceNewline;\n for &(test, oracle) in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, true, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(&*trimmed_str, oracle)\n }\n}\n\n#[test]\nfn test_transform_compress_whitespace_newline_no_incoming() {\n let test_strs = [\n (\" foo bar\",\n \" foo bar\"),\n\n (\"\\nfoo bar\",\n \" foo bar\"),\n\n (\"foo bar \",\n \"foo bar \"),\n\n (\"foo\\n bar\",\n \"foo bar\"),\n\n (\"foo \\nbar\",\n \"foo bar\"),\n\n (\" foo bar \\nbaz\",\n \" foo bar baz\"),\n\n (\"foo bar baz\",\n \"foo bar baz\"),\n\n (\"foobarbaz\\n\\n\",\n \"foobarbaz \"),\n ];\n\n let mode = CompressionMode::CompressWhitespaceNewline;\n for &(test, oracle) in test_strs.iter() {\n let mut new_line_pos = vec![];\n let mut trimmed_str = String::new();\n transform_text(test, mode, false, &mut trimmed_str, &mut new_line_pos);\n assert_eq!(trimmed_str.as_slice(), oracle)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>clean up util<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>turn several chacha functions into macros<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::Bindings::MessageEventBinding;\nuse dom::bindings::codegen::InheritTypes::{EventCast, MessageEventDerived};\nuse dom::bindings::error::Fallible;\nuse dom::bindings::global::GlobalRef;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::trace::Traceable;\nuse dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};\nuse dom::event::{Event, EventMethods, MessageEventTypeId};\n\nuse servo_util::str::DOMString;\n\nuse js::jsapi::JSContext;\nuse js::jsval::JSVal;\n\n#[deriving(Encodable)]\npub struct MessageEvent {\n event: Event,\n data: Traceable<JSVal>,\n origin: DOMString,\n lastEventId: DOMString,\n}\n\nimpl MessageEventDerived for Event {\n fn is_messageevent(&self) -> bool {\n self.type_id == MessageEventTypeId\n }\n}\n\nimpl MessageEvent {\n pub fn new_inherited(data: JSVal, origin: DOMString, lastEventId: DOMString)\n -> MessageEvent {\n MessageEvent {\n event: Event::new_inherited(MessageEventTypeId),\n data: Traceable::new(data),\n origin: origin,\n lastEventId: lastEventId,\n }\n }\n\n pub fn new(global: &GlobalRef, type_: DOMString,\n bubbles: bool, cancelable: bool,\n data: JSVal, origin: DOMString, lastEventId: DOMString)\n -> Temporary<MessageEvent> {\n let ev = reflect_dom_object(box MessageEvent::new_inherited(data, origin, lastEventId),\n global,\n MessageEventBinding::Wrap).root();\n let event: &JSRef<Event> = EventCast::from_ref(&*ev);\n event.InitEvent(type_, bubbles, cancelable);\n Temporary::from_rooted(&*ev)\n }\n\n pub fn Constructor(global: &GlobalRef,\n type_: DOMString,\n init: &MessageEventBinding::MessageEventInit)\n -> Fallible<Temporary<MessageEvent>> {\n let ev = MessageEvent::new(global, type_, init.parent.bubbles, init.parent.cancelable,\n init.data, init.origin.clone(), init.lastEventId.clone());\n Ok(ev)\n }\n}\n\npub trait MessageEventMethods {\n fn Data(&self, cx: *mut JSContext) -> JSVal;\n fn Origin(&self) -> DOMString;\n fn LastEventId(&self) -> DOMString;\n}\n\nimpl<'a> MessageEventMethods for JSRef<'a, MessageEvent> {\n fn Data(&self, _cx: *mut JSContext) -> JSVal {\n *self.data\n }\n\n fn Origin(&self) -> DOMString {\n self.origin.clone()\n }\n\n fn LastEventId(&self) -> DOMString {\n self.lastEventId.clone()\n }\n}\n\nimpl Reflectable for MessageEvent {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.event.reflector()\n }\n}\n<commit_msg>Implement MessageEvent::dispatch.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::Bindings::MessageEventBinding;\nuse dom::bindings::codegen::InheritTypes::{EventCast, MessageEventDerived};\nuse dom::bindings::conversions::ToJSValConvertible;\nuse dom::bindings::error::Fallible;\nuse dom::bindings::global::GlobalRef;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::trace::Traceable;\nuse dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};\nuse dom::event::{Event, EventMethods, MessageEventTypeId};\nuse dom::eventtarget::{EventTarget, EventTargetHelpers};\n\nuse servo_util::str::DOMString;\n\nuse js::jsapi::JSContext;\nuse js::jsval::JSVal;\n\n#[deriving(Encodable)]\npub struct MessageEvent {\n event: Event,\n data: Traceable<JSVal>,\n origin: DOMString,\n lastEventId: DOMString,\n}\n\nimpl MessageEventDerived for Event {\n fn is_messageevent(&self) -> bool {\n self.type_id == MessageEventTypeId\n }\n}\n\nimpl MessageEvent {\n pub fn new_inherited(data: JSVal, origin: DOMString, lastEventId: DOMString)\n -> MessageEvent {\n MessageEvent {\n event: Event::new_inherited(MessageEventTypeId),\n data: Traceable::new(data),\n origin: origin,\n lastEventId: lastEventId,\n }\n }\n\n pub fn new(global: &GlobalRef, type_: DOMString,\n bubbles: bool, cancelable: bool,\n data: JSVal, origin: DOMString, lastEventId: DOMString)\n -> Temporary<MessageEvent> {\n let ev = reflect_dom_object(box MessageEvent::new_inherited(data, origin, lastEventId),\n global,\n MessageEventBinding::Wrap).root();\n let event: &JSRef<Event> = EventCast::from_ref(&*ev);\n event.InitEvent(type_, bubbles, cancelable);\n Temporary::from_rooted(&*ev)\n }\n\n pub fn Constructor(global: &GlobalRef,\n type_: DOMString,\n init: &MessageEventBinding::MessageEventInit)\n -> Fallible<Temporary<MessageEvent>> {\n let ev = MessageEvent::new(global, type_, init.parent.bubbles, init.parent.cancelable,\n init.data, init.origin.clone(), init.lastEventId.clone());\n Ok(ev)\n }\n}\n\nimpl MessageEvent {\n pub fn dispatch(target: &JSRef<EventTarget>,\n scope: &GlobalRef,\n message: DOMString) {\n let messageevent = MessageEvent::new(\n scope, \"message\".to_string(), false, false,\n message.to_jsval(scope.get_cx()),\n \"\".to_string(), \"\".to_string()).root();\n let event: &JSRef<Event> = EventCast::from_ref(&*messageevent);\n target.dispatch_event_with_target(None, &*event).unwrap();\n }\n}\n\npub trait MessageEventMethods {\n fn Data(&self, cx: *mut JSContext) -> JSVal;\n fn Origin(&self) -> DOMString;\n fn LastEventId(&self) -> DOMString;\n}\n\nimpl<'a> MessageEventMethods for JSRef<'a, MessageEvent> {\n fn Data(&self, _cx: *mut JSContext) -> JSVal {\n *self.data\n }\n\n fn Origin(&self) -> DOMString {\n self.origin.clone()\n }\n\n fn LastEventId(&self) -> DOMString {\n self.lastEventId.clone()\n }\n}\n\nimpl Reflectable for MessageEvent {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.event.reflector()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #782 - RalfJung:generator, r=RalfJung<commit_after>#![feature(generators, generator_trait)]\n\nuse std::{\n ops::{Generator, GeneratorState},\n pin::Pin,\n};\n\nfn firstn() -> impl Generator<Yield = u64, Return = ()> {\n static move || {\n let mut num = 0;\n let num = &mut num;\n\n yield *num;\n *num += 1; \/\/~ ERROR dangling pointer was dereferenced\n }\n}\n\nstruct GeneratorIteratorAdapter<G>(G);\n\nimpl<G> Iterator for GeneratorIteratorAdapter<G>\nwhere\n G: Generator<Return = ()>,\n{\n type Item = G::Yield;\n\n fn next(&mut self) -> Option<Self::Item> {\n let me = unsafe { Pin::new_unchecked(&mut self.0) };\n match me.resume() {\n GeneratorState::Yielded(x) => Some(x),\n GeneratorState::Complete(_) => None,\n }\n }\n}\n\nfn main() {\n let mut generator_iterator_2 = {\n let mut generator_iterator = GeneratorIteratorAdapter(firstn());\n generator_iterator.next(); \/\/ pin it\n\n generator_iterator \/\/ move it\n }; \/\/ *deallocate* generator_iterator\n\n generator_iterator_2.next(); \/\/ and use moved value\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added test for reference fields in structs<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for inline asm.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![feature(asm)]\n#![crate_type=\"rlib\"]\n\n\n\n\/\/ Change template -------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_template(a: i32) -> i32 {\n let c: i32;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(c)\n : \"0\"(a)\n :\n :\n );\n }\n c\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_dirty(label=\"HirBody\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"HirBody\", cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_template(a: i32) -> i32 {\n let c: i32;\n unsafe {\n asm!(\"add 2, $0\"\n : \"=r\"(c)\n : \"0\"(a)\n :\n :\n );\n }\n c\n}\n\n\n\n\/\/ Change output -------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_output(a: i32) -> i32 {\n let mut _out1: i32 = 0;\n let mut _out2: i32 = 0;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out1)\n : \"0\"(a)\n :\n :\n );\n }\n _out1\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_dirty(label=\"HirBody\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"HirBody\", cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_output(a: i32) -> i32 {\n let mut _out1: i32 = 0;\n let mut _out2: i32 = 0;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out2)\n : \"0\"(a)\n :\n :\n );\n }\n _out1\n}\n\n\n\n\/\/ Change input -------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_input(_a: i32, _b: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n :\n :\n );\n }\n _out\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_dirty(label=\"HirBody\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"HirBody\", cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_input(_a: i32, _b: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_b)\n :\n :\n );\n }\n _out\n}\n\n\n\n\/\/ Change input constraint -----------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_input_constraint(_a: i32, _b: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a), \"r\"(_b)\n :\n :\n );\n }\n _out\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_dirty(label=\"HirBody\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"HirBody\", cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_input_constraint(_a: i32, _b: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"r\"(_a), \"0\"(_b)\n :\n :\n );\n }\n _out\n}\n\n\n\n\/\/ Change clobber --------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_clobber(_a: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n :\n :\n );\n }\n _out\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_dirty(label=\"HirBody\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"HirBody\", cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_clobber(_a: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n : \"eax\"\n :\n );\n }\n _out\n}\n\n\n\n\/\/ Change options --------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_options(_a: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n :\n :\n );\n }\n _out\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_dirty(label=\"HirBody\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"HirBody\", cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\nfn change_options(_a: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n :\n : \"volatile\"\n );\n }\n _out\n}\n\n\n\n<commit_msg>Update inline asm fingerprint hash tests<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for inline asm.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![feature(asm)]\n#![crate_type=\"rlib\"]\n\n\n\n\/\/ Change template -------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_template(a: i32) -> i32 {\n let c: i32;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(c)\n : \"0\"(a)\n :\n :\n );\n }\n c\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(cfg=\"cfail2\", except=\"HirBody, MirValidated, MirOptimized\")]\n#[rustc_clean(cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_template(a: i32) -> i32 {\n let c: i32;\n unsafe {\n asm!(\"add 2, $0\"\n : \"=r\"(c)\n : \"0\"(a)\n :\n :\n );\n }\n c\n}\n\n\n\n\/\/ Change output -------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_output(a: i32) -> i32 {\n let mut _out1: i32 = 0;\n let mut _out2: i32 = 0;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out1)\n : \"0\"(a)\n :\n :\n );\n }\n _out1\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(cfg=\"cfail2\", except=\"HirBody, MirValidated, MirOptimized\")]\n#[rustc_clean(cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_output(a: i32) -> i32 {\n let mut _out1: i32 = 0;\n let mut _out2: i32 = 0;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out2)\n : \"0\"(a)\n :\n :\n );\n }\n _out1\n}\n\n\n\n\/\/ Change input -------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_input(_a: i32, _b: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n :\n :\n );\n }\n _out\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(cfg=\"cfail2\", except=\"HirBody, MirValidated, MirOptimized\")]\n#[rustc_clean(cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_input(_a: i32, _b: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_b)\n :\n :\n );\n }\n _out\n}\n\n\n\n\/\/ Change input constraint -----------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_input_constraint(_a: i32, _b: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a), \"r\"(_b)\n :\n :\n );\n }\n _out\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(cfg=\"cfail2\", except=\"HirBody, MirValidated, MirOptimized\")]\n#[rustc_clean(cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_input_constraint(_a: i32, _b: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"r\"(_a), \"0\"(_b)\n :\n :\n );\n }\n _out\n}\n\n\n\n\/\/ Change clobber --------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_clobber(_a: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n :\n :\n );\n }\n _out\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(cfg=\"cfail2\", except=\"HirBody, MirValidated, MirOptimized\")]\n#[rustc_clean(cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_clobber(_a: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n : \"eax\"\n :\n );\n }\n _out\n}\n\n\n\n\/\/ Change options --------------------------------------------------------------\n#[cfg(cfail1)]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_options(_a: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n :\n :\n );\n }\n _out\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(cfg=\"cfail2\", except=\"HirBody, MirValidated, MirOptimized\")]\n#[rustc_clean(cfg=\"cfail3\")]\n#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\npub fn change_options(_a: i32) -> i32 {\n let _out;\n unsafe {\n asm!(\"add 1, $0\"\n : \"=r\"(_out)\n : \"0\"(_a)\n :\n : \"volatile\"\n );\n }\n _out\n}\n\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #63394 - jackh726:issue-36804, r=jonas-schievink<commit_after>\/\/ check-pass\n#![feature(specialization)]\n\npub struct Cloned<I>(I);\n\nimpl<'a, I, T: 'a> Iterator for Cloned<I>\nwhere\n I: Iterator<Item = &'a T>,\n T: Clone,\n{\n type Item = T;\n\n fn next(&mut self) -> Option<T> {\n unimplemented!()\n }\n}\n\nimpl<'a, I, T: 'a> Iterator for Cloned<I>\nwhere\n I: Iterator<Item = &'a T>,\n T: Copy,\n{\n fn count(self) -> usize {\n unimplemented!()\n }\n}\n\nfn main() {\n let a = [1,2,3,4];\n Cloned(a.iter()).count();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added some impl macros<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Second-order benchmark<commit_after>\/\/! FRP benchmarks from https:\/\/github.com\/tsurucapital\/frp-benchmarks\n#![feature(test)]\n\nextern crate test;\nextern crate rand;\nextern crate carboxyl;\n\nuse test::Bencher;\nuse rand::{XorShiftRng, sample};\nuse carboxyl::Sink;\n\n\n\/\/\/ Second-order benchmark.\n\/\/\/\n\/\/\/ Generate `n_sinks` `Stream<()>`, then for each stream create a `Cell<i32>`\n\/\/\/ that counts the number of firings. Create a `Stream<Cell<i32>>` that every\n\/\/\/ 10 network steps sequentially moves to the next cell. Create a `Cell<i32>`\n\/\/\/ from this stream. At each network step, fire 10 `Stream<()>` at random,\n\/\/\/ then print the current value of the `Cell<i32>`.\n\/\/\/\n\/\/\/ Benchmark the time required for `n_steps` steps.\nfn second_order(n_sinks: usize, n_steps: usize, b: &mut Bencher) {\n \/\/ Setup network\n let stepper = Sink::<usize>::new();\n let sinks = (0..n_sinks)\n .map(|_| Sink::<()>::new())\n .collect::<Vec<_>>();\n let counters = sinks.iter()\n .map(|sink| sink.stream().scan(0, |n, _| n + 1))\n .collect::<Vec<_>>();\n let walker = {\n let counters = counters.clone();\n stepper.stream().map(move |k| counters[k \/ 10].clone())\n };\n let cell = walker.hold(counters[0].clone()).switch();\n\n \/\/ Feed events\n let mut rng = XorShiftRng::new_unseeded();\n b.iter(|| for i in 0..n_steps {\n stepper.send(i);\n for sink in sample(&mut rng, sinks.iter(), 10) {\n sink.send(());\n }\n format!(\"{}\", cell.sample());\n });\n}\n\n#[bench]\nfn second_order_100(b: &mut Bencher) {\n second_order(1_000, 100, b);\n}\n\n#[bench]\nfn second_order_1k(b: &mut Bencher) {\n second_order(1_000, 1_000, b);\n}\n\n#[bench]\nfn second_order_10k(b: &mut Bencher) {\n second_order(1_000, 10_000, b);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix: add missing swap<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z identify_regions -Z span_free_formats\n\/\/ ignore-tidy-linelength\n\n\/\/ Regression test for #43457: an `EndRegion` was missing from output\n\/\/ because compiler was using a faulty means for region map lookup.\n\nuse std::cell::RefCell;\n\nfn rc_refcell_test(r: RefCell<i32>) {\n r.borrow_mut();\n}\n\nfn main() { }\n\n\/\/ END RUST SOURCE\n\/\/ START rustc.node5.SimplifyCfg-qualify-consts.after.mir\n\/\/\n\/\/ fn rc_refcell_test(_1: std::cell::RefCell<i32>) -> () {\n\/\/ let mut _0: ();\n\/\/ scope 1 {\n\/\/ let _2: std::cell::RefCell<i32>;\n\/\/ }\n\/\/ let mut _3: std::cell::RefMut<'17ds, i32>;\n\/\/ let mut _4: &'17ds std::cell::RefCell<i32>;\n\/\/\n\/\/ bb0: {\n\/\/ StorageLive(_2);\n\/\/ _2 = _1;\n\/\/ StorageLive(_4);\n\/\/ _4 = &'17ds _2;\n\/\/ _3 = const <std::cell::RefCell<T>>::borrow_mut(_4) -> bb1;\n\/\/ }\n\/\/\n\/\/ bb1: {\n\/\/ drop(_3) -> bb2;\n\/\/ }\n\/\/\n\/\/ bb2: {\n\/\/ StorageDead(_4);\n\/\/ EndRegion('17ds);\n\/\/ _0 = ();\n\/\/ StorageDead(_2);\n\/\/ return;\n\/\/ }\n\/\/ }\n<commit_msg>Update regression test to explicit enable `EndRegion` emission.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z identify_regions -Z span_free_formats -Z emit-end-regions\n\/\/ ignore-tidy-linelength\n\n\/\/ Regression test for #43457: an `EndRegion` was missing from output\n\/\/ because compiler was using a faulty means for region map lookup.\n\nuse std::cell::RefCell;\n\nfn rc_refcell_test(r: RefCell<i32>) {\n r.borrow_mut();\n}\n\nfn main() { }\n\n\/\/ END RUST SOURCE\n\/\/ START rustc.node5.SimplifyCfg-qualify-consts.after.mir\n\/\/\n\/\/ fn rc_refcell_test(_1: std::cell::RefCell<i32>) -> () {\n\/\/ let mut _0: ();\n\/\/ scope 1 {\n\/\/ let _2: std::cell::RefCell<i32>;\n\/\/ }\n\/\/ let mut _3: std::cell::RefMut<'17ds, i32>;\n\/\/ let mut _4: &'17ds std::cell::RefCell<i32>;\n\/\/\n\/\/ bb0: {\n\/\/ StorageLive(_2);\n\/\/ _2 = _1;\n\/\/ StorageLive(_4);\n\/\/ _4 = &'17ds _2;\n\/\/ _3 = const <std::cell::RefCell<T>>::borrow_mut(_4) -> bb1;\n\/\/ }\n\/\/\n\/\/ bb1: {\n\/\/ drop(_3) -> bb2;\n\/\/ }\n\/\/\n\/\/ bb2: {\n\/\/ StorageDead(_4);\n\/\/ EndRegion('17ds);\n\/\/ _0 = ();\n\/\/ StorageDead(_2);\n\/\/ return;\n\/\/ }\n\/\/ }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use window ratio when constructing projection matrix.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add udp echo server example<commit_after>use mio::net::UdpSocket;\nuse mio::{Events, Interests, Poll, Token};\nuse std::io;\n\nconst IN: Token = Token(0);\n\nfn main() {\n \/\/ Set up a new socket on port 9000 to listen on.\n let socket = UdpSocket::bind(\"0.0.0.0:9000\".parse().unwrap()).unwrap();\n \/\/ Initialize poller.\n let mut poll = Poll::new().unwrap();\n \/\/ Register our socket with the token IN (defined above) and an interest\n \/\/ in being `READABLE`.\n poll.registry()\n .register(&socket, IN, Interests::READABLE)\n .unwrap();\n\n \/\/ Prepare a buffer for the number of events we can handle at a time.\n \/\/ Someone might wat to echo really fast so lets give it some size.\n let mut events = Events::with_capacity(1024);\n \/\/ Initialize a buffer for the UDP datagram\n let mut buf = [0; 65535];\n \/\/ Main loop\n loop {\n \/\/ Poll if we have events waiting for us on the socket.\n poll.poll(&mut events, None).unwrap();\n \/\/ If we do iterate throuigh them\n for event in events.iter() {\n \/\/ Validate the token we registered our socket with,\n \/\/ in this example it will only ever be one but we\n \/\/ make sure it's valid non the less.\n match event.token() {\n IN => loop {\n \/\/ In this loop we receive from the socket as long as we\n \/\/ can read data\n match socket.recv_from(&mut buf) {\n Ok((n, from_addr)) => {\n \/\/ Send the data right back from where it came from.\n let _r = socket.send_to(&buf[..n], from_addr);\n }\n Err(e) => {\n \/\/ If we failed to receive data we have two cases\n if e.kind() == io::ErrorKind::WouldBlock {\n \/\/ If the reason was `WouldBlock` we know\n \/\/ our socket has no more data to give so\n \/\/ we can return to the poll to wait politely.\n break;\n } else {\n \/\/ If it was any other kind of error, something\n \/\/ went wrong and we panic.\n panic!(\"error: {:?}\", e)\n }\n }\n }\n },\n \/\/ We only have IN as a token, so this should never ever be hit\n _ => unreachable!(),\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use regex::Regex;\nuse toml::Value;\n\nuse libimagstore::store::Entry;\n\nuse builtin::header::field_path::FieldPath;\nuse filter::Filter;\n\n\/\/\/ Check whether certain header field in a entry is equal to a value\npub struct FieldGrep {\n header_field_path: FieldPath,\n grep: Regex,\n}\n\nimpl FieldGrep {\n\n pub fn new(path: FieldPath, grep: Regex) -> FieldGrep {\n FieldGrep {\n header_field_path: path,\n grep: grep,\n }\n }\n\n}\n\nimpl Filter for FieldGrep {\n\n fn filter(&self, e: &Entry) -> bool {\n e.get_header()\n .read(&self.header_field_path[..])\n .map(|v| {\n match v {\n Some(Value::String(s)) => self.grep.captures(&s[..]).is_some(),\n _ => false,\n }\n })\n .unwrap_or(false)\n }\n\n}\n\n\n<commit_msg>Add field_grep filter<commit_after>use regex::Regex;\nuse toml::Value;\n\nuse libimagstore::store::Entry;\n\nuse builtin::header::field_path::FieldPath;\nuse builtin::header::field_predicate::FieldPredicate;\nuse builtin::header::field_predicate::Predicate;\nuse filter::Filter;\n\nstruct EqGrep{\n regex: Regex\n}\n\nimpl Predicate for EqGrep {\n\n fn evaluate(&self, v: Value) -> bool {\n match v {\n Value::String(s) => self.regex.captures(&s[..]).is_some(),\n _ => false,\n }\n }\n\n}\n\n\/\/\/ Check whether certain header field in a entry is equal to a value\npub struct FieldGrep {\n filter: FieldPredicate<EqGrep>,\n}\n\nimpl FieldGrep {\n\n pub fn new(path: FieldPath, grep: Regex) -> FieldGrep {\n FieldGrep {\n filter: FieldPredicate::new(path, Box::new(EqGrep { regex: grep})),\n }\n }\n\n}\n\nimpl Filter for FieldGrep {\n\n fn filter(&self, e: &Entry) -> bool {\n self.filter.filter(e)\n }\n\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Initial commit<commit_after>use std::env;\nuse std::error::Error;\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\n\n\/\/ Lookup table\n\/\/ We can generate this but it's small enough to hardcode\nconst TABLE : [u32; 256] = [ \n 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, 0x706AF48F,\n 0xE963A535, 0x9E6495A3, 0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988,\n 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91, 0x1DB71064, 0x6AB020F2,\n 0xF3B97148, 0x84BE41DE, 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7,\n 0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, 0x14015C4F, 0x63066CD9,\n 0xFA0F3D63, 0x8D080DF5, 0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172,\n 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, 0x35B5A8FA, 0x42B2986C,\n 0xDBBBC9D6, 0xACBCF940, 0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59,\n 0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116, 0x21B4F4B5, 0x56B3C423,\n 0xCFBA9599, 0xB8BDA50F, 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924,\n 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, 0x76DC4190, 0x01DB7106,\n 0x98D220BC, 0xEFD5102A, 0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433,\n 0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818, 0x7F6A0DBB, 0x086D3D2D,\n 0x91646C97, 0xE6635C01, 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E,\n 0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457, 0x65B0D9C6, 0x12B7E950,\n 0x8BBEB8EA, 0xFCB9887C, 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65,\n 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, 0x4ADFA541, 0x3DD895D7,\n 0xA4D1C46D, 0xD3D6F4FB, 0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0,\n 0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9, 0x5005713C, 0x270241AA,\n 0xBE0B1010, 0xC90C2086, 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F,\n 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, 0x59B33D17, 0x2EB40D81,\n 0xB7BD5C3B, 0xC0BA6CAD, 0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A,\n 0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683, 0xE3630B12, 0x94643B84,\n 0x0D6D6A3E, 0x7A6A5AA8, 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1,\n 0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE, 0xF762575D, 0x806567CB,\n 0x196C3671, 0x6E6B06E7, 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC,\n 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, 0xD6D6A3E8, 0xA1D1937E,\n 0x38D8C2C4, 0x4FDFF252, 0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B,\n 0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60, 0xDF60EFC3, 0xA867DF55,\n 0x316E8EEF, 0x4669BE79, 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236,\n 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, 0xC5BA3BBE, 0xB2BD0B28,\n 0x2BB45A92, 0x5CB36A04, 0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D,\n 0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A, 0x9C0906A9, 0xEB0E363F,\n 0x72076785, 0x05005713, 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38,\n 0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, 0x86D3D2D4, 0xF1D4E242,\n 0x68DDB3F8, 0x1FDA836E, 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777,\n 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, 0x8F659EFF, 0xF862AE69,\n 0x616BFFD3, 0x166CCF45, 0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2,\n 0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB, 0xAED16A4A, 0xD9D65ADC,\n 0x40DF0B66, 0x37D83BF0, 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9,\n 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, 0xBAD03605, 0xCDD70693,\n 0x54DE5729, 0x23D967BF, 0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94,\n 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D\n ];\n\nfn main() {\n\t\n\tlet args : Vec<_> = env::args().collect();\n\n\tif args.len() < 2 {\n\t\tprint_usage();\n\t\treturn;\n\t}\n\n\t\/\/ Get the file name from the commandline\n\t\/\/ arguments\n\tlet filename = &args[1];\n\n\t\/\/ Open the file\n\tlet mut file = match File::open(filename) {\n\t Ok(file) => file,\n\t Err(why) => panic!(\"Couldn't open {}: {}\", filename,\n Error::description(&why)),\n\t};\n \n \t\/\/ Read the contents of the file into a byte vector\n\tlet mut contents : Vec<u8> = Vec::new();\n\tfile.read_to_end(&mut contents).unwrap();\n\n\t\/\/ Compute the check sum\n\tlet crc = compute_checksum(&contents);\n\n\t\/\/ Print it\n\tprintln!(\"{0:x}\", crc);\n}\n\nfn compute_checksum(input : &[u8]) -> u32 {\n\n\t\/\/ Start an initial value of 0xFFFFFFFF - all bits are set\n input.iter().fold(0xFFFFFFFFu32, \n \t| acc, &item | {\n \t\t\/\/ Generate an index based on the low bits of our current \n \t\t\/\/ CRC XOR'd with current item in our array\n\t\t\tlet index = (acc & 0xFF) ^ (item as u32);\n\n\t\t\t\/\/ Right shift the crc by 8 bits and XOR it with the value\n\t\t\t\/\/ in the look up table\n\t\t\t(acc >> 8) ^ (TABLE[index as usize] as u32)\n\t}) ^ 0xFFFFFFFFu32 \/\/ XOR the final CRC with -1\n\n}\n\nfn print_usage() {\n\tlet app_path = env::args().next().unwrap().clone();\n\n\tlet path = Path::new(&app_path);\n\tprintln!(\"usage: {} <filename>\", \n\t\t\/\/ Display the file name only, not the full path\n\t\tpath.file_name().unwrap().to_str().unwrap());\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Added triangle gradient example<commit_after>extern crate tetrahedrane;\n\nuse tetrahedrane::vid::*;\nuse tetrahedrane::start;\nuse tetrahedrane::shaders;\n\nfn main() {\n \/\/ Make a new window\n let mut window = start::Window::new(640, 480, \"Hello World!\", 1 as usize);\n\n let mut shaders: Vec<Shader> = Vec::new();\n\n \/\/ Create a triangle\n let mut triangle = Triangle::new(DepthPoint::new(0.0, -0.5, 3.0), \n DepthPoint::new(0.5, 0.5, 3.0), \n DepthPoint::new(-0.5, 0.5, 3.0), \n 0.0, 0.0, 0.0,\n Color::new(200, 200, 200));\n\n \/\/ Add a shader\n shaders.push(shaders::filled_triangle_gradient(1));\n\n \/\/ Apply shader to the triangle\n triangle.shader_ids[0] = 1;\n\n loop {\n window.window.set(Color::new(20, 40, 60).orb_color());\n window.window.set(Color::new(20, 40, 60).orb_color());\n\n triangle.coord_rotate_x_y(0.0, 0.0, 0.01);\n triangle.coord_rotate_x_z(0.0, 3.0, 0.02);\n triangle.coord_rotate_y_z(0.0, 3.0, 0.05);\n\n \/\/ Render the triangle with shaders.\n window.render(triangle, &shaders); \n\n window.window.sync();\n\n std::thread::sleep(std::time::Duration::from_millis(16));\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement CacheMiddleware<commit_after>use moka::future::Cache;\nuse std::time::Duration;\nuse tide::{\n http::{\n content::{AcceptEncoding, ContentEncoding, Encoding},\n headers::{HeaderName, HeaderValues},\n Method, Mime,\n },\n Body, Middleware, Next, Request, Response, StatusCode,\n};\nuse wildmatch::WildMatch;\n\n#[derive(Clone)]\npub struct CacheMiddleware {\n cache: Cache<String, (StatusCode, Mime, Vec<u8>, Vec<(HeaderName, HeaderValues)>)>,\n skip_cache_wildmatch: Option<Vec<WildMatch>>,\n}\n\nimpl CacheMiddleware {\n pub fn new(\n prefix: String,\n cache_ttl_sec: u64,\n cache_tti_sec: u64,\n skip_cache_path: Option<Vec<String>>,\n ) -> Self {\n log::info!(\n \"Setting up response cache for path {} with time-to-live: {}s and time-to-idle: {}s\",\n &prefix,\n &cache_ttl_sec,\n &cache_tti_sec,\n );\n let cache = Cache::builder()\n .time_to_live(Duration::from_secs(cache_ttl_sec))\n .time_to_idle(Duration::from_secs(cache_tti_sec))\n .build();\n let skip_cache_wildmatch = match skip_cache_path {\n Some(skip_cache_path) => Some(\n skip_cache_path\n .iter()\n .map(|path| WildMatch::new(path))\n .collect(),\n ),\n None => None,\n };\n\n Self {\n cache,\n skip_cache_wildmatch,\n }\n }\n}\n\n#[tide::utils::async_trait]\nimpl<State: Clone + Send + Sync + 'static> Middleware<State> for CacheMiddleware {\n async fn handle(&self, req: Request<State>, next: Next<'_, State>) -> tide::Result {\n let url_path: String = req.url().path().to_string();\n let skip_cache: bool = {\n if req.method() != Method::Get {\n true\n } else {\n match &self.skip_cache_wildmatch {\n Some(skip_cache_wildmatch) => skip_cache_wildmatch\n .iter()\n .find(|wildmatch_path| wildmatch_path.matches(&url_path))\n .is_some(),\n None => false,\n }\n }\n };\n\n let cache_key: Option<String> = if skip_cache {\n None\n } else {\n let accepts = AcceptEncoding::from_headers(&req)?;\n let encoding: String = if let Some(mut accepts) = accepts {\n let encoding =\n accepts.negotiate(&[Encoding::Brotli, Encoding::Gzip, Encoding::Deflate])?;\n encoding.to_string()\n } else {\n \"\".to_string()\n };\n\n Some(get_cache_key(&url_path, &encoding))\n };\n\n if let Some(cache_key) = &cache_key {\n \/\/ Try to find this request from the cache\n let cached_response = &self.cache.get(&cache_key.clone());\n if let Some((status, mime, body_as_bytes, headers)) = cached_response {\n log::debug!(\"Cache hit: {}\", cache_key);\n let mut body = Body::from_bytes(body_as_bytes.to_vec());\n body.set_mime(mime.clone());\n let mut res_builder = Response::builder(status.clone()).body(body);\n for (header_name, header_values) in headers {\n res_builder = res_builder.header(header_name, header_values);\n }\n return Ok(res_builder.build());\n }\n }\n\n let mut res: Response = next.run(req).await;\n if let Some(cache_key) = cache_key {\n let status = res.status();\n if !status.is_success() {\n return Ok(res);\n }\n let response_encoding =\n if let Some(response_encoding) = ContentEncoding::from_headers(&res).unwrap() {\n response_encoding.to_string()\n } else {\n \"\".to_string()\n };\n let response_cache_key = get_cache_key(&url_path, &response_encoding);\n if response_cache_key != cache_key {\n \/\/ For some reason tide_compress didn't encode the way we thought it would\n log::error!(\n \"Encoding mismatch with cache, expected: '{}' but was '{}'\",\n &cache_key,\n response_cache_key\n );\n return Ok(res);\n }\n\n log::debug!(\"Cache_miss: {}\", cache_key);\n let cache_headers: Vec<(HeaderName, HeaderValues)> = res\n .iter()\n .map(|(header_name, header_values)| (header_name.clone(), header_values.clone()))\n .collect();\n let body = res.take_body();\n let mime = body.mime().clone();\n let body_as_bytes = body.into_bytes().await.unwrap();\n\n self.cache\n .insert(\n cache_key,\n (status, mime.clone(), body_as_bytes.clone(), cache_headers),\n )\n .await;\n let mut body = Body::from_bytes(body_as_bytes);\n body.set_mime(mime.clone());\n res.set_body(body);\n Ok(res)\n } else {\n \/\/ There is no caching for this request, just return the response we got\n log::debug!(\"Skip cache: {}\", url_path);\n Ok(res)\n }\n }\n}\n\nfn get_cache_key(url_path: &str, encoding: &str) -> String {\n format!(\"{} {}\", url_path, encoding)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix cursor movement<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate serde_json;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate task_hookrs;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagerror;\nextern crate libimagtodo;\n\nuse std::process::exit;\nuse std::process::{Command, Stdio};\nuse std::io::stdin;\n\nuse toml::Value;\n\nuse task_hookrs::import::import_tasks;\nuse task_hookrs::status::TaskStatus;\n\nuse libimagrt::runtime::Runtime;\nuse libimagrt::setup::generate_runtime_setup;\nuse libimagtodo::task::Task;\nuse libimagerror::trace::trace_error;\n\nmod ui;\n\nuse ui::build_ui;\nfn main() {\n let rt = generate_runtime_setup(\"imag-todo\",\n &version!()[..],\n \"Interface with taskwarrior\",\n build_ui);\n\n match rt.cli().subcommand_name() {\n Some(\"tw-hook\") => tw_hook(&rt),\n Some(\"list\") => list(&rt),\n _ => unreachable!(),\n } \/\/ end match scmd\n} \/\/ end main\n\nfn tw_hook(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"tw-hook\").unwrap();\n if subcmd.is_present(\"add\") {\n let stdin = stdin();\n let stdin = stdin.lock(); \/\/ implements BufRead which is required for `Task::import()`\n\n match Task::import(rt.store(), stdin) {\n Ok((_, line, uuid)) => info!(\"{}\\nTask {} stored in imag\", line, uuid),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n } else if subcmd.is_present(\"delete\") {\n \/\/ The used hook is \"on-modify\". This hook gives two json-objects\n \/\/ per usage und wants one (the second one) back.\n let stdin = stdin();\n let stdin = stdin.lock();\n\n match import_tasks(stdin) {\n Ok(ttasks) => for (counter, ttask) in ttasks.iter().enumerate() {\n if counter % 2 == 1 {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n \/\/ Taskwarrior does not have the concept of deleted tasks, but only modified\n \/\/ ones.\n \/\/\n \/\/ Here we check if the status of a task is deleted and if yes, we delete it\n \/\/ from the store.\n if *ttask.status() == TaskStatus::Deleted {\n match Task::delete_by_uuid(rt.store(), *ttask.uuid()) {\n Ok(_) => println!(\"Deleted task {}\", *ttask.uuid()),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n }\n } \/\/ end if c % 2\n },\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n }\n } else {\n \/\/ Should not be possible, as one argument is required via\n \/\/ ArgGroup\n unreachable!();\n }\n}\n\nfn list(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let verbose = subcmd.is_present(\"verbose\");\n\n let res = Task::all(rt.store()) \/\/ get all tasks\n .map(|iter| { \/\/ and if this succeeded\n \/\/ filter out the ones were we can read the uuid\n let uuids : Vec<_> = iter.filter_map(|t| match t {\n Ok(v) => match v.get_header().read(\"todo.uuid\") {\n Ok(Some(Value::String(ref u))) => Some(u.clone()),\n Ok(Some(_)) => {\n warn!(\"Header type error\");\n None\n },\n Ok(None) => None,\n Err(e) => {\n trace_error(&e);\n None\n }\n },\n Err(e) => {\n trace_error(&e);\n None\n }\n })\n .collect();\n\n \/\/ compose a `task` call with them, ...\n let outstring = if verbose { \/\/ ... if verbose\n let output = Command::new(\"task\")\n .stdin(Stdio::null())\n .args(&uuids)\n .spawn()\n .unwrap_or_else(|e| {\n trace_error(&e);\n panic!(\"Failed to execute `task` on the commandline. I'm dying now.\");\n })\n .wait_with_output()\n .unwrap_or_else(|e| panic!(\"failed to unwrap output: {}\", e));\n\n String::from_utf8(output.stdout)\n .unwrap_or_else(|e| panic!(\"failed to execute: {}\", e))\n } else { \/\/ ... else just join them\n uuids.join(\"\\n\")\n };\n\n \/\/ and then print that\n println!(\"{}\", outstring);\n });\n\n if let Err(e) = res {\n trace_error(&e);\n }\n}\n\n<commit_msg>We must actually use println!() here<commit_after>extern crate clap;\nextern crate glob;\n#[macro_use] extern crate log;\nextern crate serde_json;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate task_hookrs;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagerror;\nextern crate libimagtodo;\n\nuse std::process::exit;\nuse std::process::{Command, Stdio};\nuse std::io::stdin;\n\nuse toml::Value;\n\nuse task_hookrs::import::import_tasks;\nuse task_hookrs::status::TaskStatus;\n\nuse libimagrt::runtime::Runtime;\nuse libimagrt::setup::generate_runtime_setup;\nuse libimagtodo::task::Task;\nuse libimagerror::trace::trace_error;\n\nmod ui;\n\nuse ui::build_ui;\nfn main() {\n let rt = generate_runtime_setup(\"imag-todo\",\n &version!()[..],\n \"Interface with taskwarrior\",\n build_ui);\n\n match rt.cli().subcommand_name() {\n Some(\"tw-hook\") => tw_hook(&rt),\n Some(\"list\") => list(&rt),\n _ => unreachable!(),\n } \/\/ end match scmd\n} \/\/ end main\n\nfn tw_hook(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"tw-hook\").unwrap();\n if subcmd.is_present(\"add\") {\n let stdin = stdin();\n let stdin = stdin.lock(); \/\/ implements BufRead which is required for `Task::import()`\n\n match Task::import(rt.store(), stdin) {\n Ok((_, line, uuid)) => println!(\"{}\\nTask {} stored in imag\", line, uuid),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n } else if subcmd.is_present(\"delete\") {\n \/\/ The used hook is \"on-modify\". This hook gives two json-objects\n \/\/ per usage und wants one (the second one) back.\n let stdin = stdin();\n let stdin = stdin.lock();\n\n match import_tasks(stdin) {\n Ok(ttasks) => for (counter, ttask) in ttasks.iter().enumerate() {\n if counter % 2 == 1 {\n \/\/ Only every second task is needed, the first one is the\n \/\/ task before the change, and the second one after\n \/\/ the change. The (maybe modified) second one is\n \/\/ expected by taskwarrior.\n match serde_json::ser::to_string(&ttask) {\n Ok(val) => println!(\"{}\", val),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n\n \/\/ Taskwarrior does not have the concept of deleted tasks, but only modified\n \/\/ ones.\n \/\/\n \/\/ Here we check if the status of a task is deleted and if yes, we delete it\n \/\/ from the store.\n if *ttask.status() == TaskStatus::Deleted {\n match Task::delete_by_uuid(rt.store(), *ttask.uuid()) {\n Ok(_) => println!(\"Deleted task {}\", *ttask.uuid()),\n Err(e) => {\n trace_error(&e);\n exit(1);\n }\n }\n }\n } \/\/ end if c % 2\n },\n Err(e) => {\n trace_error(&e);\n exit(1);\n },\n }\n } else {\n \/\/ Should not be possible, as one argument is required via\n \/\/ ArgGroup\n unreachable!();\n }\n}\n\nfn list(rt: &Runtime) {\n let subcmd = rt.cli().subcommand_matches(\"list\").unwrap();\n let verbose = subcmd.is_present(\"verbose\");\n\n let res = Task::all(rt.store()) \/\/ get all tasks\n .map(|iter| { \/\/ and if this succeeded\n \/\/ filter out the ones were we can read the uuid\n let uuids : Vec<_> = iter.filter_map(|t| match t {\n Ok(v) => match v.get_header().read(\"todo.uuid\") {\n Ok(Some(Value::String(ref u))) => Some(u.clone()),\n Ok(Some(_)) => {\n warn!(\"Header type error\");\n None\n },\n Ok(None) => None,\n Err(e) => {\n trace_error(&e);\n None\n }\n },\n Err(e) => {\n trace_error(&e);\n None\n }\n })\n .collect();\n\n \/\/ compose a `task` call with them, ...\n let outstring = if verbose { \/\/ ... if verbose\n let output = Command::new(\"task\")\n .stdin(Stdio::null())\n .args(&uuids)\n .spawn()\n .unwrap_or_else(|e| {\n trace_error(&e);\n panic!(\"Failed to execute `task` on the commandline. I'm dying now.\");\n })\n .wait_with_output()\n .unwrap_or_else(|e| panic!(\"failed to unwrap output: {}\", e));\n\n String::from_utf8(output.stdout)\n .unwrap_or_else(|e| panic!(\"failed to execute: {}\", e))\n } else { \/\/ ... else just join them\n uuids.join(\"\\n\")\n };\n\n \/\/ and then print that\n println!(\"{}\", outstring);\n });\n\n if let Err(e) = res {\n trace_error(&e);\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary mut on variable.<commit_after><|endoftext|>"} {"text":"<commit_before>use super::geometry::*;\n\nfn rotate(x: f32, y: f32, around_x: f32, around_y: f32, angle: f32) -> (f32, f32) {\n use std::f32;\n \n let s = f32::sin(angle);\n let c = f32::cos(angle);\n \n let x = x - around_x;\n let y = y - around_y;\n \n let x = x * c - y * s;\n let y = x * s + y * c;\n \n (x + around_x, y + around_y)\n}\n\npub trait Rotation {\n fn rotate_x_y(&mut self, x: f32, y: f32, angle: f32);\n fn rotate_x_z(&mut self, x: f32, y: f32, angle: f32);\n fn rotate_y_z(&mut self, x: f32, y: f32, angle: f32);\n}\n\nimpl Rotation for Point {\n fn rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n let (x, y) = rotate(self.x, self.y, x, y, angle);\n self.x = x;\n self.y = y;\n }\n \n fn rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n let (x, z) = rotate(self.x, self.z, x, z, angle);\n self.x = x;\n self.z = z;\n }\n \n fn rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n let (y, z) = rotate(self.y, self.z, y, z, angle);\n self.y = y;\n self.z = z;\n }\n}\n\nimpl Rotation for Triangle {\n fn rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n self.p1.rotate_x_y(x, y, angle);\n self.p2.rotate_x_y(x, y, angle);\n self.p3.rotate_x_y(x, y, angle);\n }\n \n fn rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n self.p1.rotate_x_z(x, z, angle);\n self.p2.rotate_x_z(x, z, angle);\n self.p3.rotate_x_z(x, z, angle);\n }\n \n fn rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n self.p1.rotate_y_z(y, z, angle);\n self.p2.rotate_y_z(y, z, angle);\n self.p3.rotate_y_z(y, z, angle);\n }\n}<commit_msg>Add some docstrings<commit_after>use super::geometry::*;\n\nfn rotate(x: f32, y: f32, around_x: f32, around_y: f32, angle: f32) -> (f32, f32) {\n use std::f32;\n \n let s = f32::sin(angle);\n let c = f32::cos(angle);\n \n let x = x - around_x;\n let y = y - around_y;\n \n let x = x * c - y * s;\n let y = x * s + y * c;\n \n (x + around_x, y + around_y)\n}\n\npub trait Rotation {\n fn rotate_x_y(&mut self, x: f32, y: f32, angle: f32);\n fn rotate_x_z(&mut self, x: f32, y: f32, angle: f32);\n fn rotate_y_z(&mut self, x: f32, y: f32, angle: f32);\n}\n\nimpl Rotation for Point {\n \/\/\/ Rotates on x and y axes. You shouldn't rotate a point you rotated\n \/\/\/ earlier, instead keep rotating the initial one.\n \/\/\/ For some reason, it breaks for some reason if you keep rotating too much\n fn rotate_x_y(&mut self, x: f32, y: f32, angle: f32) -> Point {\n let (x, y) = rotate(self.x, self.y, x, y, angle);\n Point {\n x: x,\n y: y,\n z: self.z,\n }\n }\n \n \/\/\/ Rotate on x and z axes. Same error as with rotate_x_y\n fn rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n let (x, z) = rotate(self.x, self.z, x, z, angle);\n self.x = x;\n self.z = z;\n }\n \n \/\/\/ Rotate on y and z axes. Same error as with rotate_x_y\n fn rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n let (y, z) = rotate(self.y, self.z, y, z, angle);\n self.y = y;\n self.z = z;\n }\n}\n\nimpl Rotation for Triangle {\n fn rotate_x_y(&mut self, x: f32, y: f32, angle: f32) {\n self.p1.rotate_x_y(x, y, angle);\n self.p2.rotate_x_y(x, y, angle);\n self.p3.rotate_x_y(x, y, angle);\n }\n \n fn rotate_x_z(&mut self, x: f32, z: f32, angle: f32) {\n self.p1.rotate_x_z(x, z, angle);\n self.p2.rotate_x_z(x, z, angle);\n self.p3.rotate_x_z(x, z, angle);\n }\n \n fn rotate_y_z(&mut self, y: f32, z: f32, angle: f32) {\n self.p1.rotate_y_z(y, z, angle);\n self.p2.rotate_y_z(y, z, angle);\n self.p3.rotate_y_z(y, z, angle);\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add some code output test for python.<commit_after>use json_typegen_shared::{codegen, ImportStyle, Options, OutputMode};\n\n\/\/\/ Function to test AST equality, not string equality\nfn code_output_test(name: &str, input: &str, expected: &str) {\n let mut options = Options::default();\n options.import_style = ImportStyle::AssumeExisting;\n options.output_mode = OutputMode::PythonPydantic;\n let res = codegen(name, input, options);\n let output = res.unwrap();\n let expected = expected.trim();\n let output = output.trim();\n assert_eq!(\n output,\n expected,\n \"\\n\\nUnexpected output code:\\n input: {}\\n output:\\n{}\\n expected: {}\",\n input,\n output,\n expected\n );\n}\n\n#[test]\nfn empty_object() {\n code_output_test(\n \"Root\",\n r##\"\n {}\n \"##,\n r##\"\nclass Root(BaseModel):\n pass\n\"##,\n );\n}\n\n#[test]\nfn list_of_numbers() {\n code_output_test(\n \"Numbers\",\n r##\"\n [1, 2, 3]\n \"##,\n \"Numbers = list[int]\"\n );\n}\n\n#[test]\nfn point() {\n code_output_test(\n \"Point\",\n r##\"\n {\n \"x\": 2,\n \"y\": 3\n }\n \"##,\n r##\"\nclass Point(BaseModel):\n x: int\n y: int\n\"##,\n );\n}\n\n#[test]\nfn optionals() {\n code_output_test(\n \"Opts\",\n r##\"\n [\n {\n \"in_both\": 5,\n \"missing\": 5,\n \"has_null\": 5\n },\n {\n \"in_both\": 5,\n \"has_null\": null,\n \"added\": 5\n }\n ]\n \"##,\n r##\"\nclass Opt(BaseModel):\n in_both: int\n missing: Optional[int]\n has_null: Optional[int]\n added: Optional[int]\n\n\nOpts = list[Opt]\n\"##,\n );\n}\n\n#[test]\nfn fallback() {\n code_output_test(\n \"FallbackExamples\",\n r##\"\n [\n {\n \"only_null\": null,\n \"conflicting\": 5,\n \"empty_array\": []\n },\n {\n \"only_null\": null,\n \"conflicting\": \"five\",\n \"empty_array\": []\n }\n ]\n \"##,\n r##\"\nclass FallbackExample(BaseModel):\n only_null: Any\n conflicting: Any\n empty_array: list[Any]\n\n\nFallbackExamples = list[FallbackExample]\n\"##,\n );\n}\n\n#[test]\nfn nesting() {\n code_output_test(\n \"NestedTypes\",\n r##\"\n [\n {\n \"nested\": {\n \"a\": 5,\n \"doubly_nested\": { \"c\": 10 }\n },\n \"in_array\": [{ \"b\": 5 }]\n }\n ]\n \"##,\n r##\"\nclass DoublyNested(BaseModel):\n c: int\n\n\nclass Nested(BaseModel):\n a: int\n doubly_nested: DoublyNested\n\n\nclass InArray(BaseModel):\n b: int\n\n\nclass NestedType(BaseModel):\n nested: Nested\n in_array: list[InArray]\n\n\nNestedTypes = list[NestedType]\n\"##,\n );\n}\n\n#[test]\nfn tuple() {\n code_output_test(\n \"Pagination\",\n r##\"\n [\n {\n \"pages\": 1,\n \"items\": 3\n },\n [\n {\n \"name\": \"John\"\n },\n {\n \"name\": \"James\"\n },\n {\n \"name\": \"Jake\"\n }\n ]\n ]\n \"##,\n r##\"\nclass Pagination2(BaseModel):\n pages: int\n items: int\n\n\nclass Pagination3(BaseModel):\n name: str\n\n\nPagination = tuple[Pagination2, list[Pagination3]]\n\"##,\n );\n}\n\n#[test]\nfn rename() {\n code_output_test(\n \"Renamed\",\n r##\"\n {\n \"class\": 5\n }\n \"##,\n r##\"\nclass Renamed(BaseModel):\n class_field: int = Field(alias=\"class\")\n\"##,\n );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>languages\/rust\/learn-rust\/3-chapter\/13-code.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>ok, done with 5, and almost 7<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add an unit test for condition branch.<commit_after>#![feature(libc)]\n\nextern crate libc;\nextern crate llvm;\n\nuse llvm::*;\nuse llvm::Attribute::*;\n\n#[test]\npub fn test() {\n let ctx = Context::new();\n let module = Module::new(\"simple\", &ctx);\n let func = module.add_function(\"fib\", Type::get::<fn(u64) -> u64>(&ctx));\n func.add_attributes(&[NoUnwind, ReadNone]);\n let value = &func[0];\n \n let entry = func.append(\"entry\");\n let then_bb = func.append(\"then_block\");\n let else_bb = func.append(\"else_block\");\n let merge_bb = func.append(\"merge_bb\");\n \n let builder = Builder::new(&ctx);\n builder.position_at_end(entry);\n \n let local = builder.create_alloca(Type::get::<u64>(&ctx));\n \n let cond = builder.create_cmp(value, 5u64.compile(&ctx), Predicate::LessThan);\n builder.create_cond_br(cond, then_bb, Some(else_bb));\n \n builder.position_at_end(then_bb);\n builder.create_store(8u64.compile(&ctx), local);\n builder.create_br(merge_bb);\n \n builder.position_at_end(else_bb);\n builder.create_store(16u64.compile(&ctx), local);\n builder.create_br(merge_bb);\n \n builder.position_at_end(merge_bb);\n let ret_val = builder.create_load(local);\n builder.create_ret(ret_val);\n \n module.verify().unwrap();\n let ee = JitEngine::new(&module, JitOptions {opt_level: 0}).unwrap();\n ee.with_function(func, |fib: extern fn(u64) -> u64| {\n for i in 0..10 {\n if i < 5 {\n assert_eq!(8, fib(i));\n } else {\n assert_eq!(16, fib(i));\n }\n }\n });\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a task spawning benchmark<commit_after>use std;\nimport std::vec;\nimport std::task;\nimport std::uint;\nimport std::str;\n\nfn f(n: uint) {\n let i = 0u;\n while i < n {\n task::join(spawn g());\n i += 1u;\n }\n}\n\nfn g() {}\n\nfn main(args: vec[str]) {\n\n let n = if vec::len(args) < 2u {\n 10u\n } else {\n uint::parse_buf(str::bytes(args.(1)), 10u)\n };\n let i = 0u;\n while i < n {\n spawn f(n);\n i += 1u;\n }\n}<|endoftext|>"} {"text":"<commit_before>extern crate bufstream;\nextern crate git2;\nextern crate cargotest;\nextern crate hamcrest;\n\nuse std::collections::HashSet;\nuse std::io::prelude::*;\nuse std::net::TcpListener;\nuse std::thread;\n\nuse bufstream::BufStream;\nuse cargotest::support::paths;\nuse cargotest::support::{project, execs};\nuse hamcrest::assert_that;\n\n\/\/ Test that HTTP auth is offered from `credential.helper`\n#[test]\nfn http_auth_offered() {\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n\n fn headers(rdr: &mut BufRead) -> HashSet<String> {\n let valid = [\"GET\", \"Authorization\", \"Accept\", \"User-Agent\"];\n rdr.lines().map(|s| s.unwrap())\n .take_while(|s| s.len() > 2)\n .map(|s| s.trim().to_string())\n .filter(|s| {\n valid.iter().any(|prefix| s.starts_with(*prefix))\n })\n .collect()\n }\n\n let t = thread::spawn(move|| {\n let mut conn = BufStream::new(server.accept().unwrap().0);\n let req = headers(&mut conn);\n let user_agent = if cfg!(windows) {\n \"User-Agent: git\/1.0 (libgit2 0.25.1)\"\n } else {\n \"User-Agent: git\/2.0 (libgit2 0.25.1)\"\n };\n conn.write_all(b\"\\\n HTTP\/1.1 401 Unauthorized\\r\\n\\\n WWW-Authenticate: Basic realm=\\\"wheee\\\"\\r\\n\n \\r\\n\\\n \").unwrap();\n assert_eq!(req, vec![\n \"GET \/foo\/bar\/info\/refs?service=git-upload-pack HTTP\/1.1\",\n \"Accept: *\/*\",\n user_agent,\n ].into_iter().map(|s| s.to_string()).collect());\n drop(conn);\n\n let mut conn = BufStream::new(server.accept().unwrap().0);\n let req = headers(&mut conn);\n conn.write_all(b\"\\\n HTTP\/1.1 401 Unauthorized\\r\\n\\\n WWW-Authenticate: Basic realm=\\\"wheee\\\"\\r\\n\n \\r\\n\\\n \").unwrap();\n assert_eq!(req, vec![\n \"GET \/foo\/bar\/info\/refs?service=git-upload-pack HTTP\/1.1\",\n \"Authorization: Basic Zm9vOmJhcg==\",\n \"Accept: *\/*\",\n user_agent,\n ].into_iter().map(|s| s.to_string()).collect());\n });\n\n let script = project(\"script\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"script\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {\n println!(\"username=foo\");\n println!(\"password=bar\");\n }\n \"#);\n\n assert_that(script.cargo_process(\"build\").arg(\"-v\"),\n execs().with_status(0));\n let script = script.bin(\"script\");\n\n let config = paths::home().join(\".gitconfig\");\n let mut config = git2::Config::open(&config).unwrap();\n config.set_str(\"credential.helper\",\n &script.display().to_string()).unwrap();\n\n let p = project(\"foo\")\n .file(\"Cargo.toml\", &format!(r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n\n [dependencies.bar]\n git = \"http:\/\/127.0.0.1:{}\/foo\/bar\"\n \"#, addr.port()))\n .file(\"src\/main.rs\", \"\")\n .file(\".cargo\/config\",\"\\\n [net]\n retry = 0\n \");\n\n assert_that(p.cargo_process(\"build\"),\n execs().with_status(101).with_stderr(&format!(\"\\\n[UPDATING] git repository `http:\/\/{addr}\/foo\/bar`\n[ERROR] failed to load source for a dependency on `bar`\n\nCaused by:\n Unable to update http:\/\/{addr}\/foo\/bar\n\nCaused by:\n failed to clone into: [..]\n\nCaused by:\n failed to authenticate when downloading repository\nattempted to find username\/password via `credential.helper`, but [..]\n\nTo learn more, run the command again with --verbose.\n\",\n addr = addr)));\n\n t.join().ok().unwrap();\n}\n\n\/\/ Boy, sure would be nice to have a TLS implementation in rust!\n#[test]\nfn https_something_happens() {\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let t = thread::spawn(move|| {\n let mut conn = server.accept().unwrap().0;\n drop(conn.write(b\"1234\"));\n drop(conn.shutdown(std::net::Shutdown::Write));\n drop(conn.read(&mut [0; 16]));\n });\n\n let p = project(\"foo\")\n .file(\"Cargo.toml\", &format!(r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n\n [dependencies.bar]\n git = \"https:\/\/127.0.0.1:{}\/foo\/bar\"\n \"#, addr.port()))\n .file(\"src\/main.rs\", \"\")\n .file(\".cargo\/config\",\"\\\n [net]\n retry = 0\n \");\n\n assert_that(p.cargo_process(\"build\").arg(\"-v\"),\n execs().with_status(101).with_stderr_contains(&format!(\"\\\n[UPDATING] git repository `https:\/\/{addr}\/foo\/bar`\n\", addr = addr))\n .with_stderr_contains(&format!(\"\\\nCaused by:\n {errmsg}\n\",\n errmsg = if cfg!(windows) {\n \"[[..]] failed to send request: [..]\\n\"\n } else if cfg!(target_os = \"macos\") {\n \/\/ OSX is difficult to tests as some builds may use\n \/\/ Security.framework and others may use OpenSSL. In that case let's\n \/\/ just not verify the error message here.\n \"[..]\"\n } else {\n \"[..] SSL error: [..]\"\n })));\n\n t.join().ok().unwrap();\n}\n\n\/\/ Boy, sure would be nice to have an SSH implementation in rust!\n#[test]\nfn ssh_something_happens() {\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let t = thread::spawn(move|| {\n drop(server.accept().unwrap());\n });\n\n let p = project(\"foo\")\n .file(\"Cargo.toml\", &format!(r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n\n [dependencies.bar]\n git = \"ssh:\/\/127.0.0.1:{}\/foo\/bar\"\n \"#, addr.port()))\n .file(\"src\/main.rs\", \"\");\n\n assert_that(p.cargo_process(\"build\").arg(\"-v\"),\n execs().with_status(101).with_stderr_contains(&format!(\"\\\n[UPDATING] git repository `ssh:\/\/{addr}\/foo\/bar`\n\", addr = addr))\n .with_stderr_contains(\"\\\nCaused by:\n [[..]] Failed to start SSH session: Failed getting banner\n\"));\n t.join().ok().unwrap();\n}\n<commit_msg>fix build-auth test<commit_after>extern crate bufstream;\nextern crate git2;\nextern crate cargotest;\nextern crate hamcrest;\n\nuse std::collections::HashSet;\nuse std::io::prelude::*;\nuse std::net::TcpListener;\nuse std::thread;\n\nuse bufstream::BufStream;\nuse cargotest::support::paths;\nuse cargotest::support::{project, execs};\nuse hamcrest::assert_that;\n\n\/\/ Test that HTTP auth is offered from `credential.helper`\n#[test]\nfn http_auth_offered() {\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n\n fn headers(rdr: &mut BufRead) -> HashSet<String> {\n let valid = [\"GET\", \"Authorization\", \"Accept\", \"User-Agent\"];\n rdr.lines().map(|s| s.unwrap())\n .take_while(|s| s.len() > 2)\n .map(|s| s.trim().to_string())\n .filter(|s| {\n valid.iter().any(|prefix| s.starts_with(*prefix))\n })\n .collect()\n }\n\n let t = thread::spawn(move|| {\n let mut conn = BufStream::new(server.accept().unwrap().0);\n let req = headers(&mut conn);\n let user_agent = if cfg!(windows) {\n \"User-Agent: git\/1.0 (libgit2 0.25.0)\"\n } else {\n \"User-Agent: git\/2.0 (libgit2 0.25.0)\"\n };\n conn.write_all(b\"\\\n HTTP\/1.1 401 Unauthorized\\r\\n\\\n WWW-Authenticate: Basic realm=\\\"wheee\\\"\\r\\n\n \\r\\n\\\n \").unwrap();\n assert_eq!(req, vec![\n \"GET \/foo\/bar\/info\/refs?service=git-upload-pack HTTP\/1.1\",\n \"Accept: *\/*\",\n user_agent,\n ].into_iter().map(|s| s.to_string()).collect());\n drop(conn);\n\n let mut conn = BufStream::new(server.accept().unwrap().0);\n let req = headers(&mut conn);\n conn.write_all(b\"\\\n HTTP\/1.1 401 Unauthorized\\r\\n\\\n WWW-Authenticate: Basic realm=\\\"wheee\\\"\\r\\n\n \\r\\n\\\n \").unwrap();\n assert_eq!(req, vec![\n \"GET \/foo\/bar\/info\/refs?service=git-upload-pack HTTP\/1.1\",\n \"Authorization: Basic Zm9vOmJhcg==\",\n \"Accept: *\/*\",\n user_agent,\n ].into_iter().map(|s| s.to_string()).collect());\n });\n\n let script = project(\"script\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"script\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {\n println!(\"username=foo\");\n println!(\"password=bar\");\n }\n \"#);\n\n assert_that(script.cargo_process(\"build\").arg(\"-v\"),\n execs().with_status(0));\n let script = script.bin(\"script\");\n\n let config = paths::home().join(\".gitconfig\");\n let mut config = git2::Config::open(&config).unwrap();\n config.set_str(\"credential.helper\",\n &script.display().to_string()).unwrap();\n\n let p = project(\"foo\")\n .file(\"Cargo.toml\", &format!(r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n\n [dependencies.bar]\n git = \"http:\/\/127.0.0.1:{}\/foo\/bar\"\n \"#, addr.port()))\n .file(\"src\/main.rs\", \"\")\n .file(\".cargo\/config\",\"\\\n [net]\n retry = 0\n \");\n\n assert_that(p.cargo_process(\"build\"),\n execs().with_status(101).with_stderr(&format!(\"\\\n[UPDATING] git repository `http:\/\/{addr}\/foo\/bar`\n[ERROR] failed to load source for a dependency on `bar`\n\nCaused by:\n Unable to update http:\/\/{addr}\/foo\/bar\n\nCaused by:\n failed to clone into: [..]\n\nCaused by:\n failed to authenticate when downloading repository\nattempted to find username\/password via `credential.helper`, but [..]\n\nTo learn more, run the command again with --verbose.\n\",\n addr = addr)));\n\n t.join().ok().unwrap();\n}\n\n\/\/ Boy, sure would be nice to have a TLS implementation in rust!\n#[test]\nfn https_something_happens() {\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let t = thread::spawn(move|| {\n let mut conn = server.accept().unwrap().0;\n drop(conn.write(b\"1234\"));\n drop(conn.shutdown(std::net::Shutdown::Write));\n drop(conn.read(&mut [0; 16]));\n });\n\n let p = project(\"foo\")\n .file(\"Cargo.toml\", &format!(r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n\n [dependencies.bar]\n git = \"https:\/\/127.0.0.1:{}\/foo\/bar\"\n \"#, addr.port()))\n .file(\"src\/main.rs\", \"\")\n .file(\".cargo\/config\",\"\\\n [net]\n retry = 0\n \");\n\n assert_that(p.cargo_process(\"build\").arg(\"-v\"),\n execs().with_status(101).with_stderr_contains(&format!(\"\\\n[UPDATING] git repository `https:\/\/{addr}\/foo\/bar`\n\", addr = addr))\n .with_stderr_contains(&format!(\"\\\nCaused by:\n {errmsg}\n\",\n errmsg = if cfg!(windows) {\n \"[[..]] failed to send request: [..]\\n\"\n } else if cfg!(target_os = \"macos\") {\n \/\/ OSX is difficult to tests as some builds may use\n \/\/ Security.framework and others may use OpenSSL. In that case let's\n \/\/ just not verify the error message here.\n \"[..]\"\n } else {\n \"[..] SSL error: [..]\"\n })));\n\n t.join().ok().unwrap();\n}\n\n\/\/ Boy, sure would be nice to have an SSH implementation in rust!\n#[test]\nfn ssh_something_happens() {\n let server = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = server.local_addr().unwrap();\n let t = thread::spawn(move|| {\n drop(server.accept().unwrap());\n });\n\n let p = project(\"foo\")\n .file(\"Cargo.toml\", &format!(r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n\n [dependencies.bar]\n git = \"ssh:\/\/127.0.0.1:{}\/foo\/bar\"\n \"#, addr.port()))\n .file(\"src\/main.rs\", \"\");\n\n assert_that(p.cargo_process(\"build\").arg(\"-v\"),\n execs().with_status(101).with_stderr_contains(&format!(\"\\\n[UPDATING] git repository `ssh:\/\/{addr}\/foo\/bar`\n\", addr = addr))\n .with_stderr_contains(\"\\\nCaused by:\n [[..]] Failed to start SSH session: Failed getting banner\n\"));\n t.join().ok().unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>a quick method to compute modulo n with rounding towards -infinity.<commit_after>\/\/\/ This module provides useful discrete mathematics functions\n\nuse std::num::abs;\n\n#[inline]\npub fn mod_n (x : int, n : int) -> uint {\n let x = x % n;\n if x < 0{\n (x + abs(n)) as uint\n }else{\n x as uint\n }\n}\n\n\/******************************************************\n *\n * Unit tests follow.\n *\n *******************************************************\/\n\n\n#[cfg(test)]\nmod test{\n #[test]\n fn test_modulo_n(){\n assert_eq!(super::mod_n(4, 5), 4);\n assert_eq!(super::mod_n(6, 5), 1);\n assert_eq!(super::mod_n(-3, 5), 2);\n assert_eq!(super::mod_n(-5, -5), 0);\n assert_eq!(super::mod_n(-6, -5), 4);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\n#[macro_use] extern crate version;\n\nextern crate libimagnotes;\nextern crate libimagrt;\nextern crate libimagentrytag;\nextern crate libimagerror;\n\nuse std::process::exit;\n\nuse libimagrt::edit::Edit;\nuse libimagrt::runtime::Runtime;\nuse libimagrt::setup::generate_runtime_setup;\nuse libimagnotes::note::Note;\nuse libimagerror::trace::trace_error;\n\nmod ui;\nuse ui::build_ui;\n\nfn main() {\n let rt = generate_runtime_setup(\"imag-notes\",\n &version!()[..],\n \"Note taking helper\",\n build_ui);\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n debug!(\"Call: {}\", name);\n match name {\n \"create\" => create(&rt),\n \"delete\" => delete(&rt),\n \"edit\" => edit(&rt),\n \"list\" => list(&rt),\n _ => {\n debug!(\"Unknown command\"); \/\/ More error handling\n },\n };\n });\n}\n\nfn name_from_cli(rt: &Runtime, subcmd: &str) -> String {\n rt.cli().subcommand_matches(subcmd).unwrap().value_of(\"name\").map(String::from).unwrap()\n}\n\nfn create(rt: &Runtime) {\n let name = name_from_cli(rt, \"create\");\n Note::new(rt.store(), name.clone(), String::new())\n .map_err(|e| trace_error(&e))\n .ok();\n\n if rt.cli().subcommand_matches(\"create\").unwrap().is_present(\"edit\") &&\n !edit_entry(rt, name) {\n exit(1);\n }\n}\n\nfn delete(rt: &Runtime) {\n Note::delete(rt.store(), String::from(name_from_cli(rt, \"delete\")))\n .map_err(|e| trace_error(&e))\n .map(|_| println!(\"Ok\"))\n .ok();\n}\n\nfn edit(rt: &Runtime) {\n edit_entry(rt, name_from_cli(rt, \"edit\"));\n}\n\nfn edit_entry(rt: &Runtime, name: String) -> bool {\n let note = Note::retrieve(rt.store(), name);\n if note.is_err() {\n trace_error(¬e.unwrap_err());\n warn!(\"Cannot edit nonexistent Note\");\n return false\n }\n\n let mut note = note.unwrap();\n if let Err(e) = note.edit_content(rt) {\n trace_error(&e);\n warn!(\"Editing failed\");\n return false\n }\n true\n}\n\nfn list(rt: &Runtime) {\n use std::cmp::Ordering;\n\n let iter = Note::all_notes(rt.store());\n if iter.is_err() {\n trace_error(&iter.unwrap_err());\n exit(1);\n }\n\n let mut iter = iter.unwrap()\n .filter_map(|note| {\n match note {\n Err(e) => {\n trace_error(&e);\n None\n },\n Ok(e) => Some(e)\n }\n })\n .collect::<Vec<Note>>();\n\n iter.sort_by(|note_a, note_b| {\n if let (Ok(a), Ok(b)) = (note_a.get_name(), note_b.get_name()) {\n return a.cmp(&b)\n } else {\n return Ordering::Greater;\n }\n });\n\n for note in iter {\n note.get_name()\n .map(|name| println!(\"{}\", name))\n .map_err(|e| trace_error(&e))\n .ok();\n }\n}\n\n<commit_msg>Rewrite edit_entry() to use Store::get()<commit_after>extern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\n#[macro_use] extern crate version;\n\nextern crate libimagnotes;\nextern crate libimagrt;\nextern crate libimagentrytag;\nextern crate libimagerror;\n\nuse std::process::exit;\n\nuse libimagrt::edit::Edit;\nuse libimagrt::runtime::Runtime;\nuse libimagrt::setup::generate_runtime_setup;\nuse libimagnotes::note::Note;\nuse libimagerror::trace::trace_error;\n\nmod ui;\nuse ui::build_ui;\n\nfn main() {\n let rt = generate_runtime_setup(\"imag-notes\",\n &version!()[..],\n \"Note taking helper\",\n build_ui);\n\n rt.cli()\n .subcommand_name()\n .map(|name| {\n debug!(\"Call: {}\", name);\n match name {\n \"create\" => create(&rt),\n \"delete\" => delete(&rt),\n \"edit\" => edit(&rt),\n \"list\" => list(&rt),\n _ => {\n debug!(\"Unknown command\"); \/\/ More error handling\n },\n };\n });\n}\n\nfn name_from_cli(rt: &Runtime, subcmd: &str) -> String {\n rt.cli().subcommand_matches(subcmd).unwrap().value_of(\"name\").map(String::from).unwrap()\n}\n\nfn create(rt: &Runtime) {\n let name = name_from_cli(rt, \"create\");\n Note::new(rt.store(), name.clone(), String::new())\n .map_err(|e| trace_error(&e))\n .ok();\n\n if rt.cli().subcommand_matches(\"create\").unwrap().is_present(\"edit\") &&\n !edit_entry(rt, name) {\n exit(1);\n }\n}\n\nfn delete(rt: &Runtime) {\n Note::delete(rt.store(), String::from(name_from_cli(rt, \"delete\")))\n .map_err(|e| trace_error(&e))\n .map(|_| println!(\"Ok\"))\n .ok();\n}\n\nfn edit(rt: &Runtime) {\n edit_entry(rt, name_from_cli(rt, \"edit\"));\n}\n\nfn edit_entry(rt: &Runtime, name: String) -> bool {\n let mut note = match Note::get(rt.store(), name) {\n Ok(Some(note)) => note,\n Ok(None) => {\n warn!(\"Cannot edit nonexistent Note\");\n return false\n },\n Err(e) => {\n trace_error(&e);\n warn!(\"Cannot edit nonexistent Note\");\n return false\n },\n };\n\n if let Err(e) = note.edit_content(rt) {\n trace_error(&e);\n warn!(\"Editing failed\");\n return false\n }\n true\n}\n\nfn list(rt: &Runtime) {\n use std::cmp::Ordering;\n\n let iter = Note::all_notes(rt.store());\n if iter.is_err() {\n trace_error(&iter.unwrap_err());\n exit(1);\n }\n\n let mut iter = iter.unwrap()\n .filter_map(|note| {\n match note {\n Err(e) => {\n trace_error(&e);\n None\n },\n Ok(e) => Some(e)\n }\n })\n .collect::<Vec<Note>>();\n\n iter.sort_by(|note_a, note_b| {\n if let (Ok(a), Ok(b)) = (note_a.get_name(), note_b.get_name()) {\n return a.cmp(&b)\n } else {\n return Ordering::Greater;\n }\n });\n\n for note in iter {\n note.get_name()\n .map(|name| println!(\"{}\", name))\n .map_err(|e| trace_error(&e))\n .ok();\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>#![deny(\n non_camel_case_types,\n non_snake_case,\n path_statements,\n trivial_numeric_casts,\n unstable_features,\n unused_allocation,\n unused_import_braces,\n unused_imports,\n unused_must_use,\n unused_mut,\n unused_qualifications,\n while_true,\n)]\n\nextern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagutil;\n#[macro_use] extern crate libimagerror;\n\nuse libimagrt::runtime::Runtime;\nuse std::process::exit;\n\nmod error;\nmod ui;\nmod create;\nmod retrieve;\nmod update;\nmod delete;\nmod util;\n\nuse ui::build_ui;\nuse create::create;\nuse retrieve::retrieve;\nuse update::update;\nuse delete::delete;\n\nfn main() {\n let name = \"imag-store\";\n let version = &version!()[..];\n let about = \"Direct interface to the store. Use with great care!\";\n let ui = build_ui(Runtime::get_default_cli_builder(name, version, about));\n let rt = {\n let rt = Runtime::new(ui);\n if rt.is_ok() {\n rt.unwrap()\n } else {\n println!(\"Could not set up Runtime\");\n println!(\"{:?}\", rt.unwrap_err());\n exit(1);\n }\n };\n\n rt.cli()\n .subcommand_name()\n .map_or_else(\n || {\n debug!(\"No command\");\n \/\/ More error handling\n },\n |name| {\n debug!(\"Call: {}\", name);\n match name {\n \"create\" => create(&rt),\n \"retrieve\" => retrieve(&rt),\n \"update\" => update(&rt),\n \"delete\" => delete(&rt),\n _ => {\n debug!(\"Unknown command\");\n \/\/ More error handling\n },\n };\n }\n )\n}\n\n<commit_msg>imag-store: Remove setup code and use helper<commit_after>#![deny(\n non_camel_case_types,\n non_snake_case,\n path_statements,\n trivial_numeric_casts,\n unstable_features,\n unused_allocation,\n unused_import_braces,\n unused_imports,\n unused_must_use,\n unused_mut,\n unused_qualifications,\n while_true,\n)]\n\nextern crate clap;\n#[macro_use] extern crate log;\nextern crate semver;\nextern crate toml;\n#[macro_use] extern crate version;\n\nextern crate libimagrt;\nextern crate libimagstore;\nextern crate libimagutil;\n#[macro_use] extern crate libimagerror;\n\nuse libimagrt::setup::generate_runtime_setup;\n\nmod error;\nmod ui;\nmod create;\nmod retrieve;\nmod update;\nmod delete;\nmod util;\n\nuse ui::build_ui;\nuse create::create;\nuse retrieve::retrieve;\nuse update::update;\nuse delete::delete;\n\nfn main() {\n let rt = generate_runtime_setup(\"imag-store\",\n &version!()[..],\n \"Direct interface to the store. Use with great care!\",\n build_ui);\n\n rt.cli()\n .subcommand_name()\n .map_or_else(\n || {\n debug!(\"No command\");\n \/\/ More error handling\n },\n |name| {\n debug!(\"Call: {}\", name);\n match name {\n \"create\" => create(&rt),\n \"retrieve\" => retrieve(&rt),\n \"update\" => update(&rt),\n \"delete\" => delete(&rt),\n _ => {\n debug!(\"Unknown command\");\n \/\/ More error handling\n },\n };\n }\n )\n}\n\n<|endoftext|>"} {"text":"<commit_before>pub struct Point {\n pub x: f32,\n pub y: f32,\n pub z: f32,\n}\n\nimpl Point {\n pub fn new(x: f32, y: f32, z: f32) -> Point {\n Point {\n x: x,\n y: y,\n z: z,\n }\n }\n\n pub fn make_2d(&self) -> FlatPoint {\n FlatPoint {\n x: self.x \/ self.z,\n y: self.y \/ self.z,\n }\n }\n}\n\n#[derive(Clone, Copy, Debug)]\npub struct FlatPoint {\n pub x: f32,\n pub y: f32,\n}\n\nimpl FlatPoint {\n pub fn screen_point(&self, screen_h: i32) -> (i32, i32) {\n (screen_h\/2 + (self.x * (screen_h\/2) as f32) as i32, screen_h\/2 + (self.y * (screen_h\/2) as f32) as i32)\n }\n}\n\npub struct Triangle<'a> {\n pub p1: Point,\n pub p2: Point,\n pub p3: Point,\n pub uv_p1: FlatPoint,\n pub uv_p2: FlatPoint,\n pub uv_p3: FlatPoint,\n texture: &'a super::render::Texture,\n}\n\nimpl<'a> Triangle<'a> {\n pub fn new(p1: Point, p2: Point, p3: Point, texture: &'a super::render::Texture) -> Triangle<'a> {\n Triangle {\n p1: p1,\n p2: p2,\n p3: p3,\n uv_p1: FlatPoint {x: 0.0, y: 0.0},\n uv_p2: FlatPoint {x: 1.0, y: 0.0},\n uv_p3: FlatPoint {x: 0.0, y: 1.0},\n texture: texture,\n }\n }\n\n pub fn make_2d(&self) -> FlatTriangle<'a> {\n FlatTriangle {\n p1: self.p1.make_2d(),\n p2: self.p2.make_2d(),\n p3: self.p3.make_2d(),\n uv_p1: self.uv_p1.clone(),\n uv_p2: self.uv_p2.clone(),\n uv_p3: self.uv_p3.clone(),\n texture: self.texture,\n }\n }\n}\n\n#[derive(Debug)]\npub struct FlatTriangle<'a> {\n pub p1: FlatPoint,\n pub p2: FlatPoint,\n pub p3: FlatPoint,\n pub uv_p1: FlatPoint,\n pub uv_p2: FlatPoint,\n pub uv_p3: FlatPoint,\n texture: &'a super::render::Texture,\n}\n\nimpl<'a> FlatTriangle<'a> {\n pub fn get_barycentric(&self, x: u32, y: u32, screen_h: i32) -> (f32, f32, f32) {\n #[derive(Debug)]\n struct ScreenPoint {\n x: f32,\n y: f32,\n }\n\n let (p1x, p1y) = self.p1.screen_point(screen_h);\n let (p2x, p2y) = self.p2.screen_point(screen_h);\n let (p3x, p3y) = self.p3.screen_point(screen_h);\n\n let p1 = ScreenPoint {x: p1x as f32, y: p1y as f32};\n let p2 = ScreenPoint {x: p2x as f32, y: p2y as f32};\n let p3 = ScreenPoint {x: p3x as f32, y: p3y as f32};\n\n \/\/println!(\"{:?} {:?} {:?}\", p1, p2, p3);\n\n let p = ScreenPoint {x: x as f32, y: y as f32};\n\n \/\/println!(\"{:?}\", ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y)));\n\n let alpha = ((p2.y - p3.y)*(p.x - p3.x) + (p3.x - p2.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let beta = ((p3.y - p1.y)*(p.x - p3.x) + (p1.x - p3.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let gamma = 1.0 - alpha - beta;\n\n (alpha, beta, gamma)\n }\n\n pub fn inside(&self, x: u32, y: u32, screen_h: i32) -> bool {\n let (alpha, beta, gamma) = self.get_barycentric(x, y, screen_h);\n \/\/println!(\"{:?} {:?} {:?}\", alpha, beta, gamma);\n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n true\n } else {\n false\n }\n }\n}\n<commit_msg>Added get_z_from_barycentric and better float to pixel function<commit_after>pub struct Point {\n pub x: f32,\n pub y: f32,\n pub z: f32,\n}\n\nimpl Point {\n pub fn new(x: f32, y: f32, z: f32) -> Point {\n Point {\n x: x,\n y: y,\n z: z,\n }\n }\n\n pub fn make_2d(&self) -> FlatPoint {\n FlatPoint {\n x: self.x \/ self.z,\n y: self.y \/ self.z,\n }\n }\n}\n\n#[derive(Clone, Copy, Debug)]\npub struct FlatPoint {\n pub x: f32,\n pub y: f32,\n}\n\nimpl FlatPoint {\n pub fn screen_point(&self, screen_h: i32, screen_w: i32) -> (i32, i32) {\n \/\/(screen_h\/2 + (self.x * (screen_h\/2) as f32) as i32, screen_h\/2 + (self.y * (screen_h\/2) as f32) as i32)\n (((self.x) * screen_h as f32) as i32 + screen_w \/ 2,\n ((self.y) * screen_h as f32) as i32 + screen_h \/ 2)\n }\n}\n\npub struct Triangle<'a> {\n pub p1: Point,\n pub p2: Point,\n pub p3: Point,\n pub uv_p1: FlatPoint,\n pub uv_p2: FlatPoint,\n pub uv_p3: FlatPoint,\n texture: &'a super::render::Texture,\n}\n\nimpl<'a> Triangle<'a> {\n pub fn new(p1: Point, p2: Point, p3: Point, texture: &'a super::render::Texture) -> Triangle<'a> {\n Triangle {\n p1: p1,\n p2: p2,\n p3: p3,\n uv_p1: FlatPoint {x: 0.0, y: 0.0},\n uv_p2: FlatPoint {x: 1.0, y: 0.0},\n uv_p3: FlatPoint {x: 0.0, y: 1.0},\n texture: texture,\n }\n }\n\n pub fn make_2d(&self) -> FlatTriangle<'a> {\n FlatTriangle {\n p1: self.p1.make_2d(),\n p2: self.p2.make_2d(),\n p3: self.p3.make_2d(),\n uv_p1: self.uv_p1.clone(),\n uv_p2: self.uv_p2.clone(),\n uv_p3: self.uv_p3.clone(),\n texture: self.texture,\n }\n }\n\n pub fn z_from_barycentric(&self, alpha: f32, beta: f32, gamma: f32) -> f32 {\n self.p1.z * alpha + self.p2.z * beta + self.p3.z * gamma\n }\n}\n\n#[derive(Debug)]\npub struct FlatTriangle<'a> {\n pub p1: FlatPoint,\n pub p2: FlatPoint,\n pub p3: FlatPoint,\n pub uv_p1: FlatPoint,\n pub uv_p2: FlatPoint,\n pub uv_p3: FlatPoint,\n texture: &'a super::render::Texture,\n}\n\nimpl<'a> FlatTriangle<'a> {\n pub fn get_barycentric(&self, x: u32, y: u32, screen_h: i32, screen_w: i32) -> (f32, f32, f32) {\n #[derive(Debug)]\n struct ScreenPoint {\n x: f32,\n y: f32,\n }\n\n let (p1x, p1y) = self.p1.screen_point(screen_h, screen_w);\n let (p2x, p2y) = self.p2.screen_point(screen_h, screen_w);\n let (p3x, p3y) = self.p3.screen_point(screen_h, screen_w);\n\n let p1 = ScreenPoint {x: p1x as f32, y: p1y as f32};\n let p2 = ScreenPoint {x: p2x as f32, y: p2y as f32};\n let p3 = ScreenPoint {x: p3x as f32, y: p3y as f32};\n\n \/\/println!(\"{:?} {:?} {:?}\", p1, p2, p3);\n\n let p = ScreenPoint {x: x as f32, y: y as f32};\n\n \/\/println!(\"{:?}\", ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y)));\n\n let alpha = ((p2.y - p3.y)*(p.x - p3.x) + (p3.x - p2.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let beta = ((p3.y - p1.y)*(p.x - p3.x) + (p1.x - p3.x)*(p.y - p3.y)) \/ ((p2.y - p3.y)*(p1.x - p3.x) + (p3.x - p2.x)*(p1.y - p3.y));\n let gamma = 1.0 - alpha - beta;\n\n (alpha, beta, gamma)\n }\n\n pub fn inside(&self, x: u32, y: u32, screen_h: i32, screen_w: i32) -> bool {\n let (alpha, beta, gamma) = self.get_barycentric(x, y, screen_h, screen_w);\n \/\/println!(\"{:?} {:?} {:?}\", alpha, beta, gamma);\n if alpha > 0.0 && beta > 0.0 && gamma > 0.0 {\n true\n } else {\n false\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Runtime services\n\/\/!\n\/\/! The `rt` module provides a narrow set of runtime services,\n\/\/! including the global heap (exported in `heap`) and unwinding and\n\/\/! backtrace support. The APIs in this module are highly unstable,\n\/\/! and should be considered as private implementation details for the\n\/\/! time being.\n\n#![unstable(feature = \"rt\",\n reason = \"this public module should not exist and is highly likely \\\n to disappear\",\n issue = \"0\")]\n#![doc(hidden)]\n\n\n\/\/ Reexport some of our utilities which are expected by other crates.\npub use panicking::{begin_panic, begin_panic_fmt, update_panic_count};\n\n#[cfg(not(any(test, stage0)))]\n#[lang = \"start\"]\nfn lang_start<T: ::termination::Termination + 'static>\n (main: fn() -> T, argc: isize, argv: *const *const u8) -> !\n{\n use panic;\n use sys;\n use sys_common;\n use sys_common::thread_info;\n use thread::Thread;\n use process;\n #[cfg(not(feature = \"backtrace\"))]\n use mem;\n\n sys::init();\n\n process::exit(unsafe {\n let main_guard = sys::thread::guard::init();\n sys::stack_overflow::init();\n\n \/\/ Next, set up the current Thread with the guard information we just\n \/\/ created. Note that this isn't necessary in general for new threads,\n \/\/ but we just do this to name the main thread and to give it correct\n \/\/ info about the stack bounds.\n let thread = Thread::new(Some(\"main\".to_owned()));\n thread_info::set(main_guard, thread);\n\n \/\/ Store our args if necessary in a squirreled away location\n sys::args::init(argc, argv);\n\n \/\/ Let's run some code!\n #[cfg(feature = \"backtrace\")]\n let exit_code = panic::catch_unwind(|| {\n ::sys_common::backtrace::__rust_begin_short_backtrace(move || main().report())\n });\n #[cfg(not(feature = \"backtrace\"))]\n let exit_code = panic::catch_unwind(|| main().report());\n\n sys_common::cleanup();\n exit_code.unwrap_or(101)\n });\n}\n\n#[cfg(all(not(test), stage0))]\n#[lang = \"start\"]\nfn lang_start(main: fn(), argc: isize, argv: *const *const u8) -> isize {\n use panic;\n use sys;\n use sys_common;\n use sys_common::thread_info;\n use thread::Thread;\n #[cfg(not(feature = \"backtrace\"))]\n use mem;\n\n sys::init();\n\n let failed = unsafe {\n let main_guard = sys::thread::guard::init();\n sys::stack_overflow::init();\n\n \/\/ Next, set up the current Thread with the guard information we just\n \/\/ created. Note that this isn't necessary in general for new threads,\n \/\/ but we just do this to name the main thread and to give it correct\n \/\/ info about the stack bounds.\n let thread = Thread::new(Some(\"main\".to_owned()));\n thread_info::set(main_guard, thread);\n\n \/\/ Store our args if necessary in a squirreled away location\n sys::args::init(argc, argv);\n\n \/\/ Let's run some code!\n #[cfg(feature = \"backtrace\")]\n let res = panic::catch_unwind(|| {\n ::sys_common::backtrace::__rust_begin_short_backtrace(main)\n });\n #[cfg(not(feature = \"backtrace\"))]\n let res = panic::catch_unwind(mem::transmute::<_, fn()>(main));\n sys_common::cleanup();\n res.is_err()\n };\n\n if failed {\n 101\n } else {\n 0\n }\n}\n<commit_msg>Use move for optimization purposes<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Runtime services\n\/\/!\n\/\/! The `rt` module provides a narrow set of runtime services,\n\/\/! including the global heap (exported in `heap`) and unwinding and\n\/\/! backtrace support. The APIs in this module are highly unstable,\n\/\/! and should be considered as private implementation details for the\n\/\/! time being.\n\n#![unstable(feature = \"rt\",\n reason = \"this public module should not exist and is highly likely \\\n to disappear\",\n issue = \"0\")]\n#![doc(hidden)]\n\n\n\/\/ Reexport some of our utilities which are expected by other crates.\npub use panicking::{begin_panic, begin_panic_fmt, update_panic_count};\n\n#[cfg(not(any(test, stage0)))]\n#[lang = \"start\"]\nfn lang_start<T: ::termination::Termination + 'static>\n (main: fn() -> T, argc: isize, argv: *const *const u8) -> !\n{\n use panic;\n use sys;\n use sys_common;\n use sys_common::thread_info;\n use thread::Thread;\n use process;\n #[cfg(not(feature = \"backtrace\"))]\n use mem;\n\n sys::init();\n\n process::exit(unsafe {\n let main_guard = sys::thread::guard::init();\n sys::stack_overflow::init();\n\n \/\/ Next, set up the current Thread with the guard information we just\n \/\/ created. Note that this isn't necessary in general for new threads,\n \/\/ but we just do this to name the main thread and to give it correct\n \/\/ info about the stack bounds.\n let thread = Thread::new(Some(\"main\".to_owned()));\n thread_info::set(main_guard, thread);\n\n \/\/ Store our args if necessary in a squirreled away location\n sys::args::init(argc, argv);\n\n \/\/ Let's run some code!\n #[cfg(feature = \"backtrace\")]\n let exit_code = panic::catch_unwind(|| {\n ::sys_common::backtrace::__rust_begin_short_backtrace(move || main().report())\n });\n #[cfg(not(feature = \"backtrace\"))]\n let exit_code = panic::catch_unwind(move || main().report());\n\n sys_common::cleanup();\n exit_code.unwrap_or(101)\n });\n}\n\n#[cfg(all(not(test), stage0))]\n#[lang = \"start\"]\nfn lang_start(main: fn(), argc: isize, argv: *const *const u8) -> isize {\n use panic;\n use sys;\n use sys_common;\n use sys_common::thread_info;\n use thread::Thread;\n #[cfg(not(feature = \"backtrace\"))]\n use mem;\n\n sys::init();\n\n let failed = unsafe {\n let main_guard = sys::thread::guard::init();\n sys::stack_overflow::init();\n\n \/\/ Next, set up the current Thread with the guard information we just\n \/\/ created. Note that this isn't necessary in general for new threads,\n \/\/ but we just do this to name the main thread and to give it correct\n \/\/ info about the stack bounds.\n let thread = Thread::new(Some(\"main\".to_owned()));\n thread_info::set(main_guard, thread);\n\n \/\/ Store our args if necessary in a squirreled away location\n sys::args::init(argc, argv);\n\n \/\/ Let's run some code!\n #[cfg(feature = \"backtrace\")]\n let res = panic::catch_unwind(|| {\n ::sys_common::backtrace::__rust_begin_short_backtrace(main)\n });\n #[cfg(not(feature = \"backtrace\"))]\n let res = panic::catch_unwind(mem::transmute::<_, fn()>(main));\n sys_common::cleanup();\n res.is_err()\n };\n\n if failed {\n 101\n } else {\n 0\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>WIP: utf8<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make stdin always available<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add child_test<commit_after>extern crate yoga;\n\nuse yoga::{Direction, Node, StyleUnit, Undefined};\n\n#[test]\nfn test_reset_layout_when_child_removed() {\n\tlet mut root = Node::new();\n\n\tlet mut root_child0 = Node::new();\n\troot_child0.set_width(StyleUnit::Point(100.0.into()));\n\troot_child0.set_height(StyleUnit::Point(100.0.into()));\n\troot.insert_child(&mut root_child0, 0);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tassert_eq!(0, root_child0.get_layout_left() as i32);\n\tassert_eq!(0, root_child0.get_layout_top() as i32);\n\tassert_eq!(100, root_child0.get_layout_width() as i32);\n\tassert_eq!(100, root_child0.get_layout_height() as i32);\n\n\troot.remove_child(&mut root_child0);\n\n\tassert_eq!(0, root_child0.get_layout_left() as i32);\n\tassert_eq!(0, root_child0.get_layout_top() as i32);\n\tassert!(root_child0.get_layout_width().is_nan());\n\tassert!(root_child0.get_layout_height().is_nan());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Increase integration test allowed epsilon to twice standard error<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added solution for problem 1.<commit_after>\/\/\/ If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.\n\/\/\/ Find the sum of all the multiples of 3 or 5 below 1000.\n\nfn main() {\n println!(\"Finding the sum of all the multiples of 3 or 5 below 1000\");\n let limit = 1000;\n iterative_solution(limit);\n hof_solution(limit);\n}\n\n\/\/\/ Functional solution taking advantage of Rust's higher order functions.\nfn hof_solution(limit: i32) {\n let sum: i32 = (0..limit).filter(|n| n % 3 == 0 || n % 5 == 0).fold(0, |sum, i| sum + i);\n println!(\"Functional solution: {}\", sum);\n}\n\n\/\/\/ Standard iterative solution.\nfn iterative_solution(limit: i32) {\n let mut sum = 0;\n for n in 0..limit {\n if n % 3 == 0 || n % 5 == 0 {\n sum += n;\n }\n }\n println!(\"Iterative Solution: {}\", sum);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>bi-directional communication spike<commit_after>extern crate gtk;\n\nuse gtk::Builder;\n\n\npub struct GladeObjectFactory {\n builder: Builder\n}\n\nimpl GladeObjectFactory {\n pub fn new() -> GladeObjectFactory {\n \/\/ Load glade file \n let glade_str = include_str!(\"ui.glade\");\n let builder = Builder::new_from_string(glade_str);\n GladeObjectFactory {\n builder: builder\n }\n }\n\n pub fn get<T: gtk::IsA<gtk::Object>>(&self, name: &'static str) -> T {\n if let Some(gtk_obj) = self.builder.get_object(name) {\n return gtk_obj;\n }\n panic!(format!(\"UI file corrupted. Unknown element of this type '{}'\", name));\n } \n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ FIXME: merge with `bitvec`\n\nuse std::mem;\n\npub type Word = usize;\n\n\/\/\/ `BitSlice` provides helper methods for treating a `[Word]`\n\/\/\/ as a bitvector.\npub trait BitSlice {\n fn clear_bit(&mut self, idx: usize) -> bool;\n fn set_bit(&mut self, idx: usize) -> bool;\n fn get_bit(&self, idx: usize) -> bool;\n}\n\nimpl BitSlice for [Word] {\n \/\/\/ Clears bit at `idx` to 0; returns true iff this changed `self.`\n #[inline]\n fn clear_bit(&mut self, idx: usize) -> bool {\n let words = self;\n debug!(\"clear_bit: words={} idx={}\",\n bits_to_string(words, words.len() * mem::size_of::<Word>()), bit_str(idx));\n let BitLookup { word, bit_in_word, bit_mask } = bit_lookup(idx);\n debug!(\"word={} bit_in_word={} bit_mask={}\", word, bit_in_word, bit_mask);\n let oldv = words[word];\n let newv = oldv & !bit_mask;\n words[word] = newv;\n oldv != newv\n }\n\n \/\/\/ Sets bit at `idx` to 1; returns true iff this changed `self.`\n #[inline]\n fn set_bit(&mut self, idx: usize) -> bool {\n let words = self;\n debug!(\"set_bit: words={} idx={}\",\n bits_to_string(words, words.len() * mem::size_of::<Word>()), bit_str(idx));\n let BitLookup { word, bit_in_word, bit_mask } = bit_lookup(idx);\n debug!(\"word={} bit_in_word={} bit_mask={}\", word, bit_in_word, bit_mask);\n let oldv = words[word];\n let newv = oldv | bit_mask;\n words[word] = newv;\n oldv != newv\n }\n\n \/\/\/ Extracts value of bit at `idx` in `self`.\n #[inline]\n fn get_bit(&self, idx: usize) -> bool {\n let words = self;\n let BitLookup { word, bit_mask, .. } = bit_lookup(idx);\n (words[word] & bit_mask) != 0\n }\n}\n\nstruct BitLookup {\n \/\/\/ An index of the word holding the bit in original `[Word]` of query.\n word: usize,\n \/\/\/ Index of the particular bit within the word holding the bit.\n bit_in_word: usize,\n \/\/\/ Word with single 1-bit set corresponding to where the bit is located.\n bit_mask: Word,\n}\n\n#[inline]\nfn bit_lookup(bit: usize) -> BitLookup {\n let word_bits = mem::size_of::<Word>() * 8;\n let word = bit \/ word_bits;\n let bit_in_word = bit % word_bits;\n let bit_mask = 1 << bit_in_word;\n BitLookup { word: word, bit_in_word: bit_in_word, bit_mask: bit_mask }\n}\n\n\nfn bit_str(bit: usize) -> String {\n let byte = bit >> 3;\n let lobits = 1 << (bit & 0b111);\n format!(\"[{}:{}-{:02x}]\", bit, byte, lobits)\n}\n\npub fn bits_to_string(words: &[Word], bits: usize) -> String {\n let mut result = String::new();\n let mut sep = '[';\n\n \/\/ Note: this is a little endian printout of bytes.\n\n \/\/ i tracks how many bits we have printed so far.\n let mut i = 0;\n for &word in words.iter() {\n let mut v = word;\n loop { \/\/ for each byte in `v`:\n let remain = bits - i;\n \/\/ If less than a byte remains, then mask just that many bits.\n let mask = if remain <= 8 { (1 << remain) - 1 } else { 0xFF };\n assert!(mask <= 0xFF);\n let byte = v & mask;\n\n result.push(sep);\n result.push_str(&format!(\"{:02x}\", byte));\n\n if remain <= 8 { break; }\n v >>= 8;\n i += 8;\n sep = '-';\n }\n }\n result.push(']');\n return result\n}\n\n#[inline]\npub fn bitwise<Op:BitwiseOperator>(out_vec: &mut [Word],\n in_vec: &[Word],\n op: &Op) -> bool {\n assert_eq!(out_vec.len(), in_vec.len());\n let mut changed = false;\n for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec) {\n let old_val = *out_elt;\n let new_val = op.join(old_val, *in_elt);\n *out_elt = new_val;\n changed |= old_val != new_val;\n }\n changed\n}\n\npub trait BitwiseOperator {\n \/\/\/ Applies some bit-operation pointwise to each of the bits in the two inputs.\n fn join(&self, pred1: Word, pred2: Word) -> Word;\n}\n\npub struct Intersect;\nimpl BitwiseOperator for Intersect {\n #[inline]\n fn join(&self, a: Word, b: Word) -> Word { a & b }\n}\npub struct Union;\nimpl BitwiseOperator for Union {\n #[inline]\n fn join(&self, a: Word, b: Word) -> Word { a | b }\n}\npub struct Subtract;\nimpl BitwiseOperator for Subtract {\n #[inline]\n fn join(&self, a: Word, b: Word) -> Word { a & !b }\n}\n<commit_msg>Fix bitslice printing.<commit_after>\/\/ Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ FIXME: merge with `bitvec`\n\nuse std::mem;\n\npub type Word = usize;\n\n\/\/\/ `BitSlice` provides helper methods for treating a `[Word]`\n\/\/\/ as a bitvector.\npub trait BitSlice {\n fn clear_bit(&mut self, idx: usize) -> bool;\n fn set_bit(&mut self, idx: usize) -> bool;\n fn get_bit(&self, idx: usize) -> bool;\n}\n\nimpl BitSlice for [Word] {\n \/\/\/ Clears bit at `idx` to 0; returns true iff this changed `self.`\n #[inline]\n fn clear_bit(&mut self, idx: usize) -> bool {\n let words = self;\n debug!(\"clear_bit: words={} idx={}\",\n bits_to_string(words, words.len() * mem::size_of::<Word>() * 8), idx);\n let BitLookup { word, bit_in_word, bit_mask } = bit_lookup(idx);\n debug!(\"word={} bit_in_word={} bit_mask=0x{:x}\", word, bit_in_word, bit_mask);\n let oldv = words[word];\n let newv = oldv & !bit_mask;\n words[word] = newv;\n oldv != newv\n }\n\n \/\/\/ Sets bit at `idx` to 1; returns true iff this changed `self.`\n #[inline]\n fn set_bit(&mut self, idx: usize) -> bool {\n let words = self;\n debug!(\"set_bit: words={} idx={}\",\n bits_to_string(words, words.len() * mem::size_of::<Word>() * 8), idx);\n let BitLookup { word, bit_in_word, bit_mask } = bit_lookup(idx);\n debug!(\"word={} bit_in_word={} bit_mask={}\", word, bit_in_word, bit_mask);\n let oldv = words[word];\n let newv = oldv | bit_mask;\n words[word] = newv;\n oldv != newv\n }\n\n \/\/\/ Extracts value of bit at `idx` in `self`.\n #[inline]\n fn get_bit(&self, idx: usize) -> bool {\n let words = self;\n let BitLookup { word, bit_mask, .. } = bit_lookup(idx);\n (words[word] & bit_mask) != 0\n }\n}\n\nstruct BitLookup {\n \/\/\/ An index of the word holding the bit in original `[Word]` of query.\n word: usize,\n \/\/\/ Index of the particular bit within the word holding the bit.\n bit_in_word: usize,\n \/\/\/ Word with single 1-bit set corresponding to where the bit is located.\n bit_mask: Word,\n}\n\n#[inline]\nfn bit_lookup(bit: usize) -> BitLookup {\n let word_bits = mem::size_of::<Word>() * 8;\n let word = bit \/ word_bits;\n let bit_in_word = bit % word_bits;\n let bit_mask = 1 << bit_in_word;\n BitLookup { word: word, bit_in_word: bit_in_word, bit_mask: bit_mask }\n}\n\npub fn bits_to_string(words: &[Word], bits: usize) -> String {\n let mut result = String::new();\n let mut sep = '[';\n\n \/\/ Note: this is a little endian printout of bytes.\n\n \/\/ i tracks how many bits we have printed so far.\n let mut i = 0;\n for &word in words.iter() {\n let mut v = word;\n for _ in 0..mem::size_of::<Word>() { \/\/ for each byte in `v`:\n let remain = bits - i;\n \/\/ If less than a byte remains, then mask just that many bits.\n let mask = if remain <= 8 { (1 << remain) - 1 } else { 0xFF };\n assert!(mask <= 0xFF);\n let byte = v & mask;\n\n result.push(sep);\n result.push_str(&format!(\"{:02x}\", byte));\n\n if remain <= 8 { break; }\n v >>= 8;\n i += 8;\n sep = '-';\n }\n sep = '|';\n }\n result.push(']');\n return result\n}\n\n#[inline]\npub fn bitwise<Op:BitwiseOperator>(out_vec: &mut [Word],\n in_vec: &[Word],\n op: &Op) -> bool {\n assert_eq!(out_vec.len(), in_vec.len());\n let mut changed = false;\n for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec) {\n let old_val = *out_elt;\n let new_val = op.join(old_val, *in_elt);\n *out_elt = new_val;\n changed |= old_val != new_val;\n }\n changed\n}\n\npub trait BitwiseOperator {\n \/\/\/ Applies some bit-operation pointwise to each of the bits in the two inputs.\n fn join(&self, pred1: Word, pred2: Word) -> Word;\n}\n\npub struct Intersect;\nimpl BitwiseOperator for Intersect {\n #[inline]\n fn join(&self, a: Word, b: Word) -> Word { a & b }\n}\npub struct Union;\nimpl BitwiseOperator for Union {\n #[inline]\n fn join(&self, a: Word, b: Word) -> Word { a | b }\n}\npub struct Subtract;\nimpl BitwiseOperator for Subtract {\n #[inline]\n fn join(&self, a: Word, b: Word) -> Word { a & !b }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ \"Tifflin\" Kernel\n\/\/ - By John Hodge (thePowersGang)\n\/\/\n\/\/ Core\/lib\/vec.rs\n\/\/! Dynamically growable vector type\nuse core::iter::{FromIterator};\nuse core::{ops,fmt};\nuse heap::ArrayAlloc;\n\n\/\/\/ Growable array of items\npub struct Vec<T>\n{\n\tdata: ArrayAlloc<T>,\n\tsize: usize,\n}\n\n\/\/\/ Owning iterator\npub struct MoveItems<T>\n{\n\tdata: ArrayAlloc<T>,\n\tcount: usize,\n\tofs: usize,\n}\n\nimpl<T> Vec<T>\n{\n\t\/\/pub static EMPTY: Vec<T> = Vec { data: Unique(::memory::heap::ZERO_ALLOC), size: 0, capacity: 0 };\n\t\n\t\/\/\/ Create a new, empty vector\n\tpub fn new() -> Vec<T>\n\t{\n\t\tVec::with_capacity(0)\n\t}\n\t\/\/\/ Create a vector with an initialised capacity\n\tpub fn with_capacity(size: usize) -> Vec<T>\n\t{\n\t\tVec {\n\t\t\tdata: ArrayAlloc::new(size),\n\t\t\tsize: 0,\n\t\t}\n\t}\n\t\/\/\/ Populate vector using a provided callback\n\tpub fn from_fn<Fcn>(length: usize, op: Fcn) -> Vec<T>\n\twhere\n\t\tFcn: Fn(usize) -> T\n\t{\n\t\tlet mut ret = Vec::with_capacity(length);\n\t\tfor i in (0 .. length) {\n\t\t\tret.push( op(i) );\n\t\t}\n\t\tret\n\t}\n\n\t\/\/\/ Obtain a mutable pointer to an item within the vector\n\tfn get_mut_ptr(&mut self, index: usize) -> *mut T\n\t{\n\t\tassert!(index < self.size, \"Vec<{}>::get_mut_ptr(): Index out of range, {} >= {}\", type_name!(T), index, self.size);\n\t\tself.data.get_ptr_mut(index)\n\t}\n\t\n\t\/\/\/ Move contents into an iterator (consuming self)\n\tpub fn into_iter(mut self) -> MoveItems<T>\n\t{\n\t\tlet dataptr = ::core::mem::replace(&mut self.data, ArrayAlloc::new(0));\n\t\tlet count = self.size;\n\t\t::core::mem::forget(self);\n\t\tMoveItems {\n\t\t\tdata: dataptr,\n\t\t\tofs: 0,\n\t\t\tcount: count,\n\t\t}\n\t}\n\n\t\/\/\/ Clear the vector\n\tpub fn clear(&mut self)\n\t{\n\t\tself.truncate(0);\n\t}\n\t\n\tfn reserve_cap(&mut self, size: usize)\n\t{\n\t\t\/\/let newcap = ::lib::num::round_up(size, 1 << (64-size.leading_zeros()));\n\t\tlet newcap = size.next_power_of_two();\n\t\tif size > self.data.count()\n\t\t{\n\t\t\tif self.data.resize(newcap)\n\t\t\t{\n\t\t\t\t\/\/ All good\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tlet mut newdata = ArrayAlloc::new(newcap);\n\t\t\t\t\/\/ SAFE: Moves only items within the valid region\n\t\t\t\tunsafe {\n\t\t\t\t\tfor i in (0 .. self.size) {\n\t\t\t\t\t\tlet val = self.move_ent(i as usize);\n\t\t\t\t\t\t::core::ptr::write(newdata.get_ptr_mut(i), val);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t\/\/log_debug!(\"Vec<{}>::reserve_cap({}): newdata={:?}\", type_name!(T), size, newdata);\n\t\t\t\tself.data = newdata;\n\t\t\t}\n\t\t}\n\t}\n\t\n\t\/\/\/ Reserve space in the vector for `extras` new elements\n\tpub fn reserve(&mut self, extras: usize) {\n\t\tlet newcap = self.size + extras;\n\t\tself.reserve_cap(newcap);\n\t}\n\t\n\t\/\/\/ Move out of a slot in the vector, leaving unitialise memory in its place\n\tunsafe fn move_ent(&mut self, pos: usize) -> T\n\t{\n\t\t::core::ptr::read(self.data.get_ptr(pos))\n\t}\n\n\t\/\/\/ Insert an item at the specified index (moving subsequent items up)\t\n\tpub fn insert(&mut self, pos: usize, value: T)\n\t{\n\t\t\/\/ Expand by one element\n\t\tlet ns = self.size + 1;\n\t\tself.reserve_cap(ns);\n\t\tself.size = ns;\n\t\tunsafe\n\t\t{\n\t\t\t\/\/ Move elements (pos .. len) to (pos+1 .. len+1)\n\t\t\tfor i in (pos .. self.size).rev()\n\t\t\t{\n\t\t\t\tlet src = self.data.get_ptr( i );\n\t\t\t\tlet dst = self.data.get_ptr_mut( i+1 );\n\t\t\t\t::core::ptr::write(dst, ::core::ptr::read(src));\n\t\t\t}\n\t\t\t\/\/ Store new element\n\t\t\t::core::ptr::write( self.data.get_ptr_mut(pos), value );\n\t\t}\n\t}\n\tpub fn remove(&mut self, pos: usize) -> T {\n\t\tassert!(pos < self.size);\n\t\tunsafe\n\t\t{\n\t\t\tlet rv = ::core::ptr::read( self.data.get_ptr_mut(pos) );\n\t\t\t\/\/ Move elements (pos+1 .. len) to (pos .. len-1)\n\t\t\tfor i in (pos+1 .. self.size)\n\t\t\t{\n\t\t\t\tlet src = self.data.get_ptr( i );\n\t\t\t\tlet dst = self.data.get_ptr_mut( i-1 );\n\t\t\t\t::core::ptr::write(dst, ::core::ptr::read(src));\n\t\t\t}\n\t\t\tself.size -= 1;\n\t\t\trv\n\t\t}\n\t}\n\t\n\t\/\/\/ Truncate a vector to the given size\n\tpub fn truncate(&mut self, newsize: usize)\n\t{\n\t\tif newsize < self.size\n\t\t{\n\t\t\t\/\/ SAFE: Drops items from new length to old length and invalidates them\n\t\t\tunsafe\n\t\t\t{\n\t\t\t\tfor i in (newsize .. self.size) {\n\t\t\t\t\t::core::mem::drop( ::core::ptr::read(self.get_mut_ptr(i) as *const T) );\n\t\t\t\t}\n\t\t\t\tself.size = newsize;\n\t\t\t}\n\t\t}\n\t}\n\t\n\tfn slice_mut(&mut self) -> &mut [T]\n\t{\n\t\t\/\/ SAFE: Slices only valid region\n\t\tunsafe { ::core::slice::from_raw_parts_mut(self.data.get_base_mut(), self.size) }\n\t}\n\tfn as_slice(&self) -> &[T]\n\t{\n\t\t\/\/ SAFE: Slices only valid region\n\t\tunsafe { ::core::slice::from_raw_parts(self.data.get_base() as *const T, self.size) }\n\t}\n\t\n\tpub fn push(&mut self, t: T)\n\t{\n\t\tlet pos = self.size;\n\t\tself.reserve(1);\n\t\tself.size += 1;\n\t\tlet ptr = self.get_mut_ptr(pos);\n\t\t\/\/ SAFE: Writes to newly validated position\n\t\tunsafe { ::core::ptr::write(ptr, t); }\n\t}\n\tpub fn pop(&mut self) -> Option<T>\n\t{\n\t\tif self.size == 0\n\t\t{\n\t\t\tNone\n\t\t}\n\t\telse\n\t\t{\n\t\t\tself.size -= 1;\n\t\t\tlet pos = self.size;\n\t\t\t\/\/ SAFE: Moves from newly invalidated position\n\t\t\tSome( unsafe { self.move_ent(pos) } )\n\t\t}\n\t}\n}\n\n#[macro_export]\n\/\/\/ Create a new vector in a manner similar array literals\n\/\/\/\n\/\/\/ ```\n\/\/\/ vec![1u32, 2, 3, 4]\n\/\/\/ ```\nmacro_rules! vec\n{\n\t($( $v:expr ),*) => ({\n\t\tlet mut v = $crate::lib::Vec::new();\n\t\tv.reserve( _count!( $($v),* ) );\n\t\t$( v.push($v); )*\n\t\tv\n\t\t});\n\t($v:expr; $c:expr) => ({\n\t\t$crate::lib::Vec::from_elem($c, $v)\n\t\t});\n}\npub fn from_elem<T: Clone>(e: T, count: usize) -> Vec<T> {\n\tVec::from_elem(count, e)\n}\n\nimpl<T: Clone> Vec<T>\n{\n\tpub fn resize(&mut self, new_len: usize, value: T)\n\t{\n\t\tif self.len() > new_len {\n\t\t\tself.truncate(new_len);\n\t\t}\n\t\telse {\n\t\t\tself.reserve_cap(new_len);\n\t\t\tfor _ in self.size .. new_len {\n\t\t\t\tself.push(value.clone());\n\t\t\t}\n\t\t}\n\t}\n\t\n\tpub fn from_elem(size: usize, elem: T) -> Vec<T>\n\t{\n\t\tlet mut ret = Vec::with_capacity(size);\n\t\tfor _ in 0 .. size-1 {\n\t\t\tret.push(elem.clone());\n\t\t}\n\t\tret.push(elem);\n\t\tret\n\t}\n\t\n\tpub fn push_all(&mut self, other: &[T])\n\t{\n\t\tself.reserve(other.len());\n\t\tfor v in other.iter() {\n\t\t\tself.push(v.clone());\n\t\t}\n\t}\n}\n\nimpl<T> ops::Deref for Vec<T> {\n\ttype Target = [T];\n\tfn deref(&self) -> &[T] {\n\t\tself.as_slice()\n\t}\n}\nimpl<T> AsRef<[T]> for Vec<T> {\n\tfn as_ref(&self) -> &[T] {\n\t\tself.as_slice()\n\t}\n}\nimpl<T> ops::DerefMut for Vec<T>\n{\n\tfn deref_mut(&mut self) -> &mut [T] {\n\t\tself.slice_mut()\n\t}\n}\nimpl<T> AsMut<[T]> for Vec<T> {\n\tfn as_mut(&mut self) -> &mut [T] {\n\t\tself.slice_mut()\n\t}\n}\nimpl<T> ops::Drop for Vec<T>\n{\n\tfn drop(&mut self)\n\t{\n\t\t\/\/ SAFE: Drops only items within the valid region\n\t\tunsafe {\n\t\t\twhile self.size > 0 {\n\t\t\t\tself.size -= 1;\n\t\t\t\tlet idx = self.size;\n\t\t\t\tlet ptr = self.data.get_ptr(idx) as *const T;\n\t\t\t\t::core::mem::drop( ::core::ptr::read(ptr) );\n\t\t\t}\n\t\t}\n\t}\n}\n\nimpl<T: Clone> Clone for Vec<T> {\n\tfn clone(&self) -> Vec<T> {\n\t\tself.iter().cloned().collect()\n\t}\n}\nimpl<'a, T: Clone> From<&'a [T]> for Vec<T> {\n\tfn from(v: &[T]) -> Vec<T> {\n\t\tv.iter().cloned().collect()\n\t}\n}\n\nimpl<T> ::core::default::Default for Vec<T>\n{\n\tfn default() -> Vec<T> { Vec::new() }\n}\n\n\/\/ ---\n\/\/ Delegating implementations\n\/\/ ---\nimpl<T: PartialEq> PartialEq for Vec<T> {\n\tfn eq(&self, other: &Self) -> bool {\n\t\tPartialEq::eq(self.as_slice(), other.as_slice())\n\t}\n}\nimpl<T: Eq> Eq for Vec<T> {\n}\nimpl<T: PartialOrd> PartialOrd for Vec<T> {\n\tfn partial_cmp(&self, other: &Self) -> Option<::core::cmp::Ordering> {\n\t\tPartialOrd::partial_cmp(self.as_slice(), other.as_slice())\n\t}\n}\nimpl<T: Ord> Ord for Vec<T> {\n\tfn cmp(&self, other: &Self) -> ::core::cmp::Ordering {\n\t\tOrd::cmp(self.as_slice(), other.as_slice())\n\t}\n}\n\nimpl<T: fmt::Debug> fmt::Debug for Vec<T> {\n\tfn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\t\tfmt::Debug::fmt(&**self, f)\n\t}\n}\n\n\/\/ ---\n\/\/ Indexing implementations\n\/\/ ---\nmacro_rules! vec_index {\n\t($T:ident -> $rv:ty : $($idx:ty)*) => { $(\n\t\timpl<$T> ops::Index<$idx> for Vec<$T>\n\t\t{\n\t\t\ttype Output = $rv;\n\t\t\tfn index<'a>(&'a self, index: $idx) -> &'a $rv\n\t\t\t{\n\t\t\t\t&self.as_slice()[index]\n\t\t\t}\n\t\t}\n\t\timpl<$T> ops::IndexMut<$idx> for Vec<$T>\n\t\t{\n\t\t\tfn index_mut<'a>(&'a mut self, index: $idx) -> &'a mut $rv\n\t\t\t{\n\t\t\t\t&mut self.slice_mut()[index]\n\t\t\t}\n\t\t}\n\t\t)* }\n\t}\nvec_index!{ T -> T : usize }\nvec_index!{ T -> [T] : ops::Range<usize> ops::RangeTo<usize> ops::RangeFrom<usize> ops::RangeFull }\n\nimpl<T> FromIterator<T> for Vec<T>\n{\n\tfn from_iter<IT>(src: IT) -> Vec<T>\n\twhere\n\t\tIT: ::core::iter::IntoIterator<Item=T>\n\t{\n\t\tlet iterator = src.into_iter();\n\t\tlet mut ret = Vec::new();\n\t\tif let (_, Some(size)) = iterator.size_hint()\n\t\t{\n\t\t\tret.reserve_cap(size);\n\t\t}\n\t\tfor val in iterator\n\t\t{\n\t\t\tret.push(val);\n\t\t}\n\t\tret\n\t}\n}\n\nimpl<T> ::core::iter::Extend<T> for Vec<T>\n{\n\tfn extend<I: IntoIterator<Item=T>>(&mut self, src: I)\n\t{\n\t\tlet iter = src.into_iter();\n\t\t\n\t\tif let (_, Some(size)) = iter.size_hint()\n\t\t{\n\t\t\tself.reserve(size);\n\t\t}\n\t\tfor val in iter {\n\t\t\tself.push(val);\n\t\t}\n\t}\n}\n\nimpl<T> IntoIterator for Vec<T>\n{\n\ttype IntoIter = MoveItems<T>;\n\ttype Item = T;\n\tfn into_iter(self) -> MoveItems<T> {\n\t\tself.into_iter()\n\t}\n}\nimpl<'a, T> IntoIterator for &'a Vec<T>\n{\n\ttype IntoIter = ::core::slice::Iter<'a,T>;\n\ttype Item = &'a T;\n\t\n\tfn into_iter(self) -> ::core::slice::Iter<'a, T> {\n\t\tself.iter()\n\t}\n}\n\nimpl<'a, T> IntoIterator for &'a mut Vec<T>\n{\n\ttype IntoIter = ::core::slice::IterMut<'a,T>;\n\ttype Item = &'a mut T;\n\t\n\tfn into_iter(self) -> ::core::slice::IterMut<'a, T> {\n\t\tself.iter_mut()\n\t}\n}\n\nimpl<T> MoveItems<T>\n{\n\t\/\/\/ Pop an item from the iterator\n\tfn pop_item(&mut self) -> T\n\t{\n\t\tassert!(self.ofs < self.count);\n\t\t\/\/ SAFE: Bounds checked above\n\t\tlet v: T = unsafe {\n\t\t\tlet ptr = self.data.get_ptr(self.ofs);\n\t\t\t::core::ptr::read(ptr as *const _)\n\t\t\t};\n\t\tself.ofs += 1;\n\t\tv\n\t}\n}\n\nimpl<T> Iterator for MoveItems<T>\n{\n\ttype Item = T;\n\tfn next(&mut self) -> Option<T>\n\t{\n\t\tif self.ofs == self.count\n\t\t{\n\t\t\tNone\n\t\t}\n\t\telse\n\t\t{\n\t\t\tSome( self.pop_item() )\n\t\t}\n\t}\n}\n\nimpl<T> ops::Drop for MoveItems<T>\n{\n\tfn drop(&mut self)\n\t{\n\t\tfor _ in (self.ofs .. self.count) {\n\t\t\tself.pop_item();\n\t\t}\n\t}\n}\n\n\/\/ vim: ft=rust\n<commit_msg>Usermode\/libstd - Fix bug in Vec::insert<commit_after>\/\/ \"Tifflin\" Kernel\n\/\/ - By John Hodge (thePowersGang)\n\/\/\n\/\/ Core\/lib\/vec.rs\n\/\/! Dynamically growable vector type\nuse core::iter::{FromIterator};\nuse core::{ops,fmt};\nuse heap::ArrayAlloc;\n\n\/\/\/ Growable array of items\npub struct Vec<T>\n{\n\tdata: ArrayAlloc<T>,\n\tsize: usize,\n}\n\n\/\/\/ Owning iterator\npub struct MoveItems<T>\n{\n\tdata: ArrayAlloc<T>,\n\tcount: usize,\n\tofs: usize,\n}\n\nimpl<T> Vec<T>\n{\n\t\/\/pub static EMPTY: Vec<T> = Vec { data: Unique(::memory::heap::ZERO_ALLOC), size: 0, capacity: 0 };\n\t\n\t\/\/\/ Create a new, empty vector\n\tpub fn new() -> Vec<T>\n\t{\n\t\tVec::with_capacity(0)\n\t}\n\t\/\/\/ Create a vector with an initialised capacity\n\tpub fn with_capacity(size: usize) -> Vec<T>\n\t{\n\t\tVec {\n\t\t\tdata: ArrayAlloc::new(size),\n\t\t\tsize: 0,\n\t\t}\n\t}\n\t\/\/\/ Populate vector using a provided callback\n\tpub fn from_fn<Fcn>(length: usize, op: Fcn) -> Vec<T>\n\twhere\n\t\tFcn: Fn(usize) -> T\n\t{\n\t\tlet mut ret = Vec::with_capacity(length);\n\t\tfor i in (0 .. length) {\n\t\t\tret.push( op(i) );\n\t\t}\n\t\tret\n\t}\n\n\t\/\/\/ Obtain a mutable pointer to an item within the vector\n\tfn get_mut_ptr(&mut self, index: usize) -> *mut T\n\t{\n\t\tassert!(index < self.size, \"Vec<{}>::get_mut_ptr(): Index out of range, {} >= {}\", type_name!(T), index, self.size);\n\t\tself.data.get_ptr_mut(index)\n\t}\n\t\n\t\/\/\/ Move contents into an iterator (consuming self)\n\tpub fn into_iter(mut self) -> MoveItems<T>\n\t{\n\t\tlet dataptr = ::core::mem::replace(&mut self.data, ArrayAlloc::new(0));\n\t\tlet count = self.size;\n\t\t::core::mem::forget(self);\n\t\tMoveItems {\n\t\t\tdata: dataptr,\n\t\t\tofs: 0,\n\t\t\tcount: count,\n\t\t}\n\t}\n\n\t\/\/\/ Clear the vector\n\tpub fn clear(&mut self)\n\t{\n\t\tself.truncate(0);\n\t}\n\t\n\tfn reserve_cap(&mut self, size: usize)\n\t{\n\t\t\/\/let newcap = ::lib::num::round_up(size, 1 << (64-size.leading_zeros()));\n\t\tlet newcap = size.next_power_of_two();\n\t\tif size > self.data.count()\n\t\t{\n\t\t\tif self.data.resize(newcap)\n\t\t\t{\n\t\t\t\t\/\/ All good\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tlet mut newdata = ArrayAlloc::new(newcap);\n\t\t\t\t\/\/ SAFE: Moves only items within the valid region\n\t\t\t\tunsafe {\n\t\t\t\t\tfor i in (0 .. self.size) {\n\t\t\t\t\t\tlet val = self.move_ent(i as usize);\n\t\t\t\t\t\t::core::ptr::write(newdata.get_ptr_mut(i), val);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t\/\/log_debug!(\"Vec<{}>::reserve_cap({}): newdata={:?}\", type_name!(T), size, newdata);\n\t\t\t\tself.data = newdata;\n\t\t\t}\n\t\t}\n\t}\n\t\n\t\/\/\/ Reserve space in the vector for `extras` new elements\n\tpub fn reserve(&mut self, extras: usize) {\n\t\tlet newcap = self.size + extras;\n\t\tself.reserve_cap(newcap);\n\t}\n\t\n\t\/\/\/ Move out of a slot in the vector, leaving unitialise memory in its place\n\tunsafe fn move_ent(&mut self, pos: usize) -> T\n\t{\n\t\t::core::ptr::read(self.data.get_ptr(pos))\n\t}\n\n\t\/\/\/ Insert an item at the specified index (moving subsequent items up)\t\n\tpub fn insert(&mut self, pos: usize, value: T)\n\t{\n\t\t\/\/ Expand by one element\n\t\tlet ns = self.size + 1;\n\t\tself.reserve_cap(ns);\n\t\tunsafe\n\t\t{\n\t\t\t\/\/ Move elements (pos .. len) to (pos+1 .. len+1)\n\t\t\tfor i in (pos .. self.size).rev()\n\t\t\t{\n\t\t\t\tlet src = self.data.get_ptr( i );\n\t\t\t\tlet dst = self.data.get_ptr_mut( i+1 );\n\t\t\t\t::core::ptr::write(dst, ::core::ptr::read(src));\n\t\t\t}\n\t\t\t\/\/ Store new element\n\t\t\t::core::ptr::write( self.data.get_ptr_mut(pos), value );\n\t\t}\n\t\tself.size = ns;\n\t}\n\tpub fn remove(&mut self, pos: usize) -> T {\n\t\tassert!(pos < self.size);\n\t\tunsafe\n\t\t{\n\t\t\tlet rv = ::core::ptr::read( self.data.get_ptr_mut(pos) );\n\t\t\t\/\/ Move elements (pos+1 .. len) to (pos .. len-1)\n\t\t\tfor i in (pos+1 .. self.size)\n\t\t\t{\n\t\t\t\tlet src = self.data.get_ptr( i );\n\t\t\t\tlet dst = self.data.get_ptr_mut( i-1 );\n\t\t\t\t::core::ptr::write(dst, ::core::ptr::read(src));\n\t\t\t}\n\t\t\tself.size -= 1;\n\t\t\trv\n\t\t}\n\t}\n\t\n\t\/\/\/ Truncate a vector to the given size\n\tpub fn truncate(&mut self, newsize: usize)\n\t{\n\t\tif newsize < self.size\n\t\t{\n\t\t\t\/\/ SAFE: Drops items from new length to old length and invalidates them\n\t\t\tunsafe\n\t\t\t{\n\t\t\t\tfor i in (newsize .. self.size) {\n\t\t\t\t\t::core::mem::drop( ::core::ptr::read(self.get_mut_ptr(i) as *const T) );\n\t\t\t\t}\n\t\t\t\tself.size = newsize;\n\t\t\t}\n\t\t}\n\t}\n\t\n\tfn slice_mut(&mut self) -> &mut [T]\n\t{\n\t\t\/\/ SAFE: Slices only valid region\n\t\tunsafe { ::core::slice::from_raw_parts_mut(self.data.get_base_mut(), self.size) }\n\t}\n\tfn as_slice(&self) -> &[T]\n\t{\n\t\t\/\/ SAFE: Slices only valid region\n\t\tunsafe { ::core::slice::from_raw_parts(self.data.get_base() as *const T, self.size) }\n\t}\n\t\n\tpub fn push(&mut self, t: T)\n\t{\n\t\tlet pos = self.size;\n\t\tself.reserve(1);\n\t\tself.size += 1;\n\t\tlet ptr = self.get_mut_ptr(pos);\n\t\t\/\/ SAFE: Writes to newly validated position\n\t\tunsafe { ::core::ptr::write(ptr, t); }\n\t}\n\tpub fn pop(&mut self) -> Option<T>\n\t{\n\t\tif self.size == 0\n\t\t{\n\t\t\tNone\n\t\t}\n\t\telse\n\t\t{\n\t\t\tself.size -= 1;\n\t\t\tlet pos = self.size;\n\t\t\t\/\/ SAFE: Moves from newly invalidated position\n\t\t\tSome( unsafe { self.move_ent(pos) } )\n\t\t}\n\t}\n}\n\n#[macro_export]\n\/\/\/ Create a new vector in a manner similar array literals\n\/\/\/\n\/\/\/ ```\n\/\/\/ vec![1u32, 2, 3, 4]\n\/\/\/ ```\nmacro_rules! vec\n{\n\t($( $v:expr ),*) => ({\n\t\tlet mut v = $crate::lib::Vec::new();\n\t\tv.reserve( _count!( $($v),* ) );\n\t\t$( v.push($v); )*\n\t\tv\n\t\t});\n\t($v:expr; $c:expr) => ({\n\t\t$crate::lib::Vec::from_elem($c, $v)\n\t\t});\n}\npub fn from_elem<T: Clone>(e: T, count: usize) -> Vec<T> {\n\tVec::from_elem(count, e)\n}\n\nimpl<T: Clone> Vec<T>\n{\n\tpub fn resize(&mut self, new_len: usize, value: T)\n\t{\n\t\tif self.len() > new_len {\n\t\t\tself.truncate(new_len);\n\t\t}\n\t\telse {\n\t\t\tself.reserve_cap(new_len);\n\t\t\tfor _ in self.size .. new_len {\n\t\t\t\tself.push(value.clone());\n\t\t\t}\n\t\t}\n\t}\n\t\n\tpub fn from_elem(size: usize, elem: T) -> Vec<T>\n\t{\n\t\tlet mut ret = Vec::with_capacity(size);\n\t\tfor _ in 0 .. size-1 {\n\t\t\tret.push(elem.clone());\n\t\t}\n\t\tret.push(elem);\n\t\tret\n\t}\n\t\n\tpub fn push_all(&mut self, other: &[T])\n\t{\n\t\tself.reserve(other.len());\n\t\tfor v in other.iter() {\n\t\t\tself.push(v.clone());\n\t\t}\n\t}\n}\n\nimpl<T> ops::Deref for Vec<T> {\n\ttype Target = [T];\n\tfn deref(&self) -> &[T] {\n\t\tself.as_slice()\n\t}\n}\nimpl<T> AsRef<[T]> for Vec<T> {\n\tfn as_ref(&self) -> &[T] {\n\t\tself.as_slice()\n\t}\n}\nimpl<T> ops::DerefMut for Vec<T>\n{\n\tfn deref_mut(&mut self) -> &mut [T] {\n\t\tself.slice_mut()\n\t}\n}\nimpl<T> AsMut<[T]> for Vec<T> {\n\tfn as_mut(&mut self) -> &mut [T] {\n\t\tself.slice_mut()\n\t}\n}\nimpl<T> ops::Drop for Vec<T>\n{\n\tfn drop(&mut self)\n\t{\n\t\t\/\/ SAFE: Drops only items within the valid region\n\t\tunsafe {\n\t\t\twhile self.size > 0 {\n\t\t\t\tself.size -= 1;\n\t\t\t\tlet idx = self.size;\n\t\t\t\tlet ptr = self.data.get_ptr(idx) as *const T;\n\t\t\t\t::core::mem::drop( ::core::ptr::read(ptr) );\n\t\t\t}\n\t\t}\n\t}\n}\n\nimpl<T: Clone> Clone for Vec<T> {\n\tfn clone(&self) -> Vec<T> {\n\t\tself.iter().cloned().collect()\n\t}\n}\nimpl<'a, T: Clone> From<&'a [T]> for Vec<T> {\n\tfn from(v: &[T]) -> Vec<T> {\n\t\tv.iter().cloned().collect()\n\t}\n}\n\nimpl<T> ::core::default::Default for Vec<T>\n{\n\tfn default() -> Vec<T> { Vec::new() }\n}\n\n\/\/ ---\n\/\/ Delegating implementations\n\/\/ ---\nimpl<T: PartialEq> PartialEq for Vec<T> {\n\tfn eq(&self, other: &Self) -> bool {\n\t\tPartialEq::eq(self.as_slice(), other.as_slice())\n\t}\n}\nimpl<T: Eq> Eq for Vec<T> {\n}\nimpl<T: PartialOrd> PartialOrd for Vec<T> {\n\tfn partial_cmp(&self, other: &Self) -> Option<::core::cmp::Ordering> {\n\t\tPartialOrd::partial_cmp(self.as_slice(), other.as_slice())\n\t}\n}\nimpl<T: Ord> Ord for Vec<T> {\n\tfn cmp(&self, other: &Self) -> ::core::cmp::Ordering {\n\t\tOrd::cmp(self.as_slice(), other.as_slice())\n\t}\n}\n\nimpl<T: fmt::Debug> fmt::Debug for Vec<T> {\n\tfn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\t\tfmt::Debug::fmt(&**self, f)\n\t}\n}\n\n\/\/ ---\n\/\/ Indexing implementations\n\/\/ ---\nmacro_rules! vec_index {\n\t($T:ident -> $rv:ty : $($idx:ty)*) => { $(\n\t\timpl<$T> ops::Index<$idx> for Vec<$T>\n\t\t{\n\t\t\ttype Output = $rv;\n\t\t\tfn index<'a>(&'a self, index: $idx) -> &'a $rv\n\t\t\t{\n\t\t\t\t&self.as_slice()[index]\n\t\t\t}\n\t\t}\n\t\timpl<$T> ops::IndexMut<$idx> for Vec<$T>\n\t\t{\n\t\t\tfn index_mut<'a>(&'a mut self, index: $idx) -> &'a mut $rv\n\t\t\t{\n\t\t\t\t&mut self.slice_mut()[index]\n\t\t\t}\n\t\t}\n\t\t)* }\n\t}\nvec_index!{ T -> T : usize }\nvec_index!{ T -> [T] : ops::Range<usize> ops::RangeTo<usize> ops::RangeFrom<usize> ops::RangeFull }\n\nimpl<T> FromIterator<T> for Vec<T>\n{\n\tfn from_iter<IT>(src: IT) -> Vec<T>\n\twhere\n\t\tIT: ::core::iter::IntoIterator<Item=T>\n\t{\n\t\tlet iterator = src.into_iter();\n\t\tlet mut ret = Vec::new();\n\t\tif let (_, Some(size)) = iterator.size_hint()\n\t\t{\n\t\t\tret.reserve_cap(size);\n\t\t}\n\t\tfor val in iterator\n\t\t{\n\t\t\tret.push(val);\n\t\t}\n\t\tret\n\t}\n}\n\nimpl<T> ::core::iter::Extend<T> for Vec<T>\n{\n\tfn extend<I: IntoIterator<Item=T>>(&mut self, src: I)\n\t{\n\t\tlet iter = src.into_iter();\n\t\t\n\t\tif let (_, Some(size)) = iter.size_hint()\n\t\t{\n\t\t\tself.reserve(size);\n\t\t}\n\t\tfor val in iter {\n\t\t\tself.push(val);\n\t\t}\n\t}\n}\n\nimpl<T> IntoIterator for Vec<T>\n{\n\ttype IntoIter = MoveItems<T>;\n\ttype Item = T;\n\tfn into_iter(self) -> MoveItems<T> {\n\t\tself.into_iter()\n\t}\n}\nimpl<'a, T> IntoIterator for &'a Vec<T>\n{\n\ttype IntoIter = ::core::slice::Iter<'a,T>;\n\ttype Item = &'a T;\n\t\n\tfn into_iter(self) -> ::core::slice::Iter<'a, T> {\n\t\tself.iter()\n\t}\n}\n\nimpl<'a, T> IntoIterator for &'a mut Vec<T>\n{\n\ttype IntoIter = ::core::slice::IterMut<'a,T>;\n\ttype Item = &'a mut T;\n\t\n\tfn into_iter(self) -> ::core::slice::IterMut<'a, T> {\n\t\tself.iter_mut()\n\t}\n}\n\nimpl<T> MoveItems<T>\n{\n\t\/\/\/ Pop an item from the iterator\n\tfn pop_item(&mut self) -> T\n\t{\n\t\tassert!(self.ofs < self.count);\n\t\t\/\/ SAFE: Bounds checked above\n\t\tlet v: T = unsafe {\n\t\t\tlet ptr = self.data.get_ptr(self.ofs);\n\t\t\t::core::ptr::read(ptr as *const _)\n\t\t\t};\n\t\tself.ofs += 1;\n\t\tv\n\t}\n}\n\nimpl<T> Iterator for MoveItems<T>\n{\n\ttype Item = T;\n\tfn next(&mut self) -> Option<T>\n\t{\n\t\tif self.ofs == self.count\n\t\t{\n\t\t\tNone\n\t\t}\n\t\telse\n\t\t{\n\t\t\tSome( self.pop_item() )\n\t\t}\n\t}\n}\n\nimpl<T> ops::Drop for MoveItems<T>\n{\n\tfn drop(&mut self)\n\t{\n\t\tfor _ in (self.ofs .. self.count) {\n\t\t\tself.pop_item();\n\t\t}\n\t}\n}\n\n\/\/ vim: ft=rust\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add caller_location tests to Miri<commit_after>#![feature(track_caller, core_intrinsics)]\n\nuse std::panic::Location;\n\n#[track_caller]\nfn tracked() -> &'static Location<'static> {\n Location::caller() \/\/ most importantly, we never get line 7\n}\n\nfn nested_intrinsic() -> &'static Location<'static> {\n Location::caller()\n}\n\nfn nested_tracked() -> &'static Location<'static> {\n tracked()\n}\n\nmacro_rules! caller_location_from_macro {\n () => (core::panic::Location::caller());\n}\n\nfn main() {\n let location = Location::caller();\n assert_eq!(location.file(), file!());\n assert_eq!(location.line(), 23);\n assert_eq!(location.column(), 20);\n\n let tracked = tracked();\n assert_eq!(tracked.file(), file!());\n assert_eq!(tracked.line(), 28);\n assert_eq!(tracked.column(), 19);\n\n let nested = nested_intrinsic();\n assert_eq!(nested.file(), file!());\n assert_eq!(nested.line(), 11);\n assert_eq!(nested.column(), 5);\n\n let contained = nested_tracked();\n assert_eq!(contained.file(), file!());\n assert_eq!(contained.line(), 15);\n assert_eq!(contained.column(), 5);\n\n \/\/ `Location::caller()` in a macro should behave similarly to `file!` and `line!`,\n \/\/ i.e. point to where the macro was invoked, instead of the macro itself.\n let inmacro = caller_location_from_macro!();\n assert_eq!(inmacro.file(), file!());\n assert_eq!(inmacro.line(), 45);\n assert_eq!(inmacro.column(), 19);\n\n let intrinsic = core::intrinsics::caller_location();\n assert_eq!(intrinsic.file(), file!());\n assert_eq!(intrinsic.line(), 50);\n assert_eq!(intrinsic.column(), 21);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added derive_clone example to parse-macros.<commit_after>#![feature(plugin)]\n#![plugin(parse_generics_poc)]\n\n#[macro_use] extern crate custom_derive;\n#[macro_use] extern crate parse_macros;\n\nmacro_rules! Clone_mac {\n (\n () $($tail:tt)*\n ) => {\n parse_item! {\n then Clone_mac! { @item },\n $($tail)*\n }\n };\n\n (\n @item\n enum {\n attrs: $_attrs:tt,\n vis: $_vis:tt,\n name: $name:ident,\n generics: {\n constr: [$($constr:tt)*],\n params: [$($params:tt)*],\n ltimes: $_ltimes:tt,\n tnames: [$($tnames:ident,)*],\n },\n where: {\n preds: [$($preds:tt)*],\n },\n variants: [$($vars:tt,)*],\n $($_enum_tail:tt)*\n }\n ) => {\n Clone_mac! {\n @inject_where\n (impl<$($constr)*> Clone for $name<$($params)*>),\n where ($($tnames: Clone,)* $($preds)*)\n ({\n fn clone(&self) -> Self {\n match *self {\n $(\n Clone_mac!(@var_match_pat $name, $vars)\n => Clone_mac!(@var_match_body $name, $vars),\n )*\n }\n }\n })\n }\n };\n\n (\n @var_match_pat\n $name:ident,\n {\n ord: $_ord:tt,\n attrs: $_attrs:tt,\n kind: unitary,\n name: $vname:ident,\n fields: (),\n num_fields: 0,\n }\n ) => {\n $name::$vname\n };\n\n (\n @var_match_body\n $name:ident,\n {\n ord: $_ord:tt,\n attrs: $_attrs:tt,\n kind: unitary,\n name: $vname:ident,\n fields: (),\n num_fields: 0,\n }\n ) => {\n $name::$vname\n };\n\n (\n @var_match_pat\n $name:ident,\n {\n ord: $_ord:tt,\n attrs: $_attrs:tt,\n kind: tuple,\n name: $vname:ident,\n fields: [\n $(\n {\n ord: ($_ford:tt, $ford_ident:ident),\n attrs: $_fattrs:tt,\n vis: $_fvis:tt,\n ty: $_fty:ty,\n },\n )+\n ],\n num_fields: $_num_fields:tt,\n }\n ) => {\n $name::$vname($(ref $ford_ident,)+)\n };\n\n (\n @var_match_body\n $name:ident,\n {\n ord: $_ord:tt,\n attrs: $_attrs:tt,\n kind: tuple,\n name: $vname:ident,\n fields: [\n $(\n {\n ord: ($_ford:tt, $ford_ident:ident),\n attrs: $_fattrs:tt,\n vis: $_fvis:tt,\n ty: $_fty:ty,\n },\n )+\n ],\n num_fields: $_num_fields:tt,\n }\n ) => {\n $name::$vname($(Clone::clone($ford_ident),)+)\n };\n\n (\n @var_match_pat\n $name:ident,\n {\n ord: $_ord:tt,\n attrs: $_attrs:tt,\n kind: record,\n name: $vname:ident,\n fields: [\n $(\n {\n ord: $_ford:tt,\n attrs: $_fattrs:tt,\n vis: $_fvis:tt,\n ty: $_fty:ty,\n name: $fname:ident,\n },\n )+\n ],\n num_fields: $_num_fields:tt,\n }\n ) => {\n $name::$vname { $(ref $fname,)+ }\n };\n\n (\n @var_match_body\n $name:ident,\n {\n ord: $_ord:tt,\n attrs: $_attrs:tt,\n kind: record,\n name: $vname:ident,\n fields: [\n $(\n {\n ord: $_ford:tt,\n attrs: $_fattrs:tt,\n vis: $_fvis:tt,\n ty: $_fty:ty,\n name: $fname:ident,\n },\n )+\n ],\n num_fields: $_num_fields:tt,\n }\n ) => {\n $name::$vname { $($fname: Clone::clone($fname),)+ }\n };\n\n (\n @inject_where\n ($($before:tt)*),\n where ($(,)*)\n ($($after:tt)*)\n ) => {\n Clone_mac! {\n @as_item\n $($before)* $($after)*\n }\n };\n\n (\n @inject_where\n ($($before:tt)*),\n where ($($preds:tt)+)\n ($($after:tt)*)\n ) => {\n Clone_mac! {\n @as_item\n $($before)* where $($preds)* $($after)*\n }\n };\n\n (@as_item $i:item) => { $i };\n}\n\ncustom_derive! {\n #[derive(Copy, Clone_mac)]\n enum EnumA {}\n}\n\ncustom_derive! {\n #[derive(Copy, Clone_mac)]\n enum EnumB { A }\n}\n\ncustom_derive! {\n #[derive(Copy, Clone_mac)]\n enum EnumC { A, B, C }\n}\n\ncustom_derive! {\n #[derive(Copy, Clone_mac)]\n enum EnumD { A, B(i32), C(u8, u8, u8) }\n}\n\ncustom_derive! {\n #[derive(Copy, Clone_mac)]\n enum EnumE { A { r: u8, g: u8, b: u8, } }\n}\n\nfn main() {\n if false { let _: EnumA = panic!(); }\n let _ = EnumB::A.clone();\n let _ = EnumC::A.clone();\n let _ = EnumC::B.clone();\n let _ = EnumC::C.clone();\n let _ = EnumD::A.clone();\n let _ = EnumD::B(42).clone();\n let _ = EnumD::C(1, 2, 3).clone();\n let _ = (EnumE::A { r: 1, g: 2, b: 3 }).clone();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add DiaryId type<commit_after>use std::convert::Into;\nuse std::str::FromStr;\n\nuse chrono::naive::datetime::NaiveDateTime;\nuse chrono::naive::time::NaiveTime;\nuse chrono::naive::date::NaiveDate;\nuse chrono::Datelike;\nuse chrono::Timelike;\nuse regex::Regex;\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\n\nuse module_path::ModuleEntryPath;\n\n#[derive(Debug, Clone)]\npub struct DiaryId {\n name: String,\n year: i32,\n month: u32,\n day: u32,\n hour: u32,\n minute: u32,\n}\n\nimpl DiaryId {\n\n pub fn new(name: String, y: i32, m: u32, d: u32, h: u32, min: u32) -> DiaryId {\n DiaryId {\n name: name,\n year: y,\n month: m,\n day: d,\n hour: h,\n minute: min,\n }\n }\n\n pub fn from_datetime<DT: Datelike + Timelike>(diary_name: String, dt: DT) -> DiaryId {\n DiaryId::new(diary_name, dt.year(), dt.month(), dt.day(), dt.hour(), dt.minute())\n }\n\n pub fn diary_name(&self) -> &String {\n &self.name\n }\n\n pub fn year(&self) -> i32 {\n self.year\n }\n\n pub fn month(&self) -> u32 {\n self.month\n }\n\n pub fn day(&self) -> u32 {\n self.day\n }\n\n pub fn hour(&self) -> u32 {\n self.hour\n }\n\n pub fn minute(&self) -> u32 {\n self.minute\n }\n\n}\n\nimpl IntoStoreId for DiaryId {\n\n fn into_storeid(self) -> StoreId {\n let s : String = self.into();\n ModuleEntryPath::new(s).into_storeid()\n }\n\n}\n\nimpl Into<String> for DiaryId {\n\n fn into(self) -> String {\n format!(\"{}\/{}\/{}-{}-{}:{}\",\n self.name, self.year, self.month, self.day, self.hour, self.minute)\n }\n\n}\n\nimpl Into<NaiveDateTime> for DiaryId {\n\n fn into(self) -> NaiveDateTime {\n let d = NaiveDate::from_ymd(self.year, self.month, self.day);\n let t = NaiveTime::from_hms(self.hour, self.minute, 0);\n NaiveDateTime::new(d, t)\n }\n\n}\n\npub trait FromStoreId : Sized {\n\n fn from_storeid(&StoreId) -> Option<Self>;\n\n}\n\nimpl FromStoreId for DiaryId {\n\n fn from_storeid(s: &StoreId) -> Option<DiaryId> {\n lazy_static! {\n static ref RE: Regex = Regex::new(r\"(?x)\n (.*)\n \/(?P<name>(.*))\n \/(?P<year>\\d{4})\n \/(?P<month>\\d{2})\n -(?P<day>\\d{2})\n -(?P<hour>\\d{2})\n :(?P<minute>\\d{2})\n \"\n ).unwrap();\n }\n\n s.to_str()\n .map(|s| { debug!(\"StoreId = {:?}\", s); s })\n .and_then(|s| RE.captures(s))\n .and_then(|caps| {\n let name = caps.at(0);\n let year = caps.at(1);\n let month = caps.at(2);\n let day = caps.at(3);\n let hour = caps.at(4);\n let minute = caps.at(5);\n\n debug!(\"some? name = {:?}\", name.is_some());\n debug!(\"some? year = {:?}\", year.is_some());\n debug!(\"some? month = {:?}\", month.is_some());\n debug!(\"some? day = {:?}\", day.is_some());\n debug!(\"some? hour = {:?}\", hour.is_some());\n debug!(\"some? minute = {:?}\", minute.is_some());\n\n if [name, year, month, day, hour, minute].iter().all(|x| x.is_some()) {\n let year = {\n match i32::from_str(year.unwrap()) {\n Ok(x) => x,\n Err(_) => return None,\n }\n };\n\n let month = {\n match u32::from_str(month.unwrap()) {\n Ok(x) => x,\n Err(_) => return None,\n }\n };\n\n let day = {\n match u32::from_str(day.unwrap()) {\n Ok(x) => x,\n Err(_) => return None,\n }\n };\n\n let hour = {\n match u32::from_str(hour.unwrap()) {\n Ok(x) => x,\n Err(_) => return None,\n }\n };\n\n let minute = {\n match u32::from_str(minute.unwrap()) {\n Ok(x) => x,\n Err(_) => return None,\n }\n };\n\n Some(DiaryId {\n name : String::from(name.unwrap()),\n year : year,\n month : month,\n day : day,\n hour : hour,\n minute : minute,\n })\n } else {\n None\n }\n })\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse boxed::Box;\nuse convert::Into;\nuse error;\nuse fmt;\nuse marker::{Send, Sync};\nuse option::Option::{self, Some, None};\nuse result;\nuse sys;\n\n\/\/\/ A specialized [`Result`](..\/result\/enum.Result.html) type for I\/O\n\/\/\/ operations.\n\/\/\/\n\/\/\/ This type is broadly used across `std::io` for any operation which may\n\/\/\/ produce an error.\n\/\/\/\n\/\/\/ This typedef is generally used to avoid writing out `io::Error` directly and\n\/\/\/ is otherwise a direct mapping to `Result`.\n\/\/\/\n\/\/\/ While usual Rust style is to import types directly, aliases of `Result`\n\/\/\/ often are not, to make it easier to distinguish between them. `Result` is\n\/\/\/ generally assumed to be `std::result::Result`, and so users of this alias\n\/\/\/ will generally use `io::Result` instead of shadowing the prelude's import\n\/\/\/ of `std::result::Result`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ A convenience function that bubbles an `io::Result` to its caller:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/\n\/\/\/ fn get_string() -> io::Result<String> {\n\/\/\/ let mut buffer = String::new();\n\/\/\/\n\/\/\/ try!(io::stdin().read_line(&mut buffer));\n\/\/\/\n\/\/\/ Ok(buffer)\n\/\/\/ }\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type Result<T> = result::Result<T, Error>;\n\n\/\/\/ The error type for I\/O operations of the `Read`, `Write`, `Seek`, and\n\/\/\/ associated traits.\n\/\/\/\n\/\/\/ Errors mostly originate from the underlying OS, but custom instances of\n\/\/\/ `Error` can be created with crafted error messages and a particular value of\n\/\/\/ `ErrorKind`.\n#[derive(Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Error {\n repr: Repr,\n}\n\nenum Repr {\n Os(i32),\n Custom(Box<Custom>),\n}\n\n#[derive(Debug)]\nstruct Custom {\n kind: ErrorKind,\n error: Box<error::Error+Send+Sync>,\n}\n\n\/\/\/ A list specifying general categories of I\/O error.\n\/\/\/\n\/\/\/ This list is intended to grow over time and it is not recommended to\n\/\/\/ exhaustively match against it.\n#[derive(Copy, PartialEq, Eq, Clone, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[allow(deprecated)]\npub enum ErrorKind {\n \/\/\/ An entity was not found, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotFound,\n \/\/\/ The operation lacked the necessary privileges to complete.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n PermissionDenied,\n \/\/\/ The connection was refused by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionRefused,\n \/\/\/ The connection was reset by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionReset,\n \/\/\/ The connection was aborted (terminated) by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionAborted,\n \/\/\/ The network operation failed because it was not connected yet.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotConnected,\n \/\/\/ A socket address could not be bound because the address is already in\n \/\/\/ use elsewhere.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrInUse,\n \/\/\/ A nonexistent interface was requested or the requested address was not\n \/\/\/ local.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrNotAvailable,\n \/\/\/ The operation failed because a pipe was closed.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n BrokenPipe,\n \/\/\/ An entity already exists, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AlreadyExists,\n \/\/\/ The operation needs to block to complete, but the blocking operation was\n \/\/\/ requested to not occur.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WouldBlock,\n \/\/\/ A parameter was incorrect.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n InvalidInput,\n \/\/\/ Data not valid for the operation were encountered.\n \/\/\/\n \/\/\/ Unlike `InvalidInput`, this typically means that the operation\n \/\/\/ parameters were valid, however the error was caused by malformed\n \/\/\/ input data.\n \/\/\/\n \/\/\/ For example, a function that reads a file into a string will error with\n \/\/\/ `InvalidData` if the file's contents are not valid UTF-8.\n #[stable(feature = \"io_invalid_data\", since = \"1.2.0\")]\n InvalidData,\n \/\/\/ The I\/O operation's timeout expired, causing it to be canceled.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n TimedOut,\n \/\/\/ An error returned when an operation could not be completed because a\n \/\/\/ call to `write` returned `Ok(0)`.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it wrote a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ written.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WriteZero,\n \/\/\/ This operation was interrupted.\n \/\/\/\n \/\/\/ Interrupted operations can typically be retried.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Interrupted,\n \/\/\/ Any I\/O error not part of this list.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Other,\n\n \/\/\/ An error returned when an operation could not be completed because an\n \/\/\/ \"end of file\" was reached prematurely.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it read a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ read.\n #[stable(feature = \"read_exact\", since = \"1.6.0\")]\n UnexpectedEof,\n\n \/\/\/ Any I\/O error not part of this list.\n #[unstable(feature = \"io_error_internals\",\n reason = \"better expressed through extensible enums that this \\\n enum cannot be exhaustively matched against\",\n issue = \"0\")]\n #[doc(hidden)]\n __Nonexhaustive,\n}\n\nimpl Error {\n \/\/\/ Creates a new I\/O error from a known kind of error as well as an\n \/\/\/ arbitrary error payload.\n \/\/\/\n \/\/\/ This function is used to generically create I\/O errors which do not\n \/\/\/ originate from the OS itself. The `error` argument is an arbitrary\n \/\/\/ payload which will be contained in this `Error`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::io::{Error, ErrorKind};\n \/\/\/\n \/\/\/ \/\/ errors can be created from strings\n \/\/\/ let custom_error = Error::new(ErrorKind::Other, \"oh no!\");\n \/\/\/\n \/\/\/ \/\/ errors can also be created from other errors\n \/\/\/ let custom_error2 = Error::new(ErrorKind::Interrupted, custom_error);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn new<E>(kind: ErrorKind, error: E) -> Error\n where E: Into<Box<error::Error+Send+Sync>>\n {\n Self::_new(kind, error.into())\n }\n\n fn _new(kind: ErrorKind, error: Box<error::Error+Send+Sync>) -> Error {\n Error {\n repr: Repr::Custom(Box::new(Custom {\n kind: kind,\n error: error,\n }))\n }\n }\n\n \/\/\/ Returns an error representing the last OS error which occurred.\n \/\/\/\n \/\/\/ This function reads the value of `errno` for the target platform (e.g.\n \/\/\/ `GetLastError` on Windows) and will return a corresponding instance of\n \/\/\/ `Error` for the error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn last_os_error() -> Error {\n Error::from_raw_os_error(sys::os::errno() as i32)\n }\n\n \/\/\/ Creates a new instance of an `Error` from a particular OS error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn from_raw_os_error(code: i32) -> Error {\n Error { repr: Repr::Os(code) }\n }\n\n \/\/\/ Returns the OS error that this error represents (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `last_os_error` or\n \/\/\/ `from_raw_os_error`, then this function will return `Some`, otherwise\n \/\/\/ it will return `None`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn raw_os_error(&self) -> Option<i32> {\n match self.repr {\n Repr::Os(i) => Some(i),\n Repr::Custom(..) => None,\n }\n }\n\n \/\/\/ Returns a reference to the inner error wrapped by this error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_ref(&self) -> Option<&(error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => Some(&*c.error),\n }\n }\n\n \/\/\/ Returns a mutable reference to the inner error wrapped by this error\n \/\/\/ (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_mut(&mut self) -> Option<&mut (error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref mut c) => Some(&mut *c.error),\n }\n }\n\n \/\/\/ Consumes the `Error`, returning its inner error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn into_inner(self) -> Option<Box<error::Error+Send+Sync>> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(c) => Some(c.error)\n }\n }\n\n \/\/\/ Returns the corresponding `ErrorKind` for this error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn kind(&self) -> ErrorKind {\n match self.repr {\n Repr::Os(code) => sys::decode_error_kind(code),\n Repr::Custom(ref c) => c.kind,\n }\n }\n}\n\nimpl fmt::Debug for Repr {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Repr::Os(ref code) =>\n fmt.debug_struct(\"Os\").field(\"code\", code)\n .field(\"message\", &sys::os::error_string(*code)).finish(),\n Repr::Custom(ref c) => fmt.debug_tuple(\"Custom\").field(c).finish(),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Display for Error {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match self.repr {\n Repr::Os(code) => {\n let detail = sys::os::error_string(code);\n write!(fmt, \"{} (os error {})\", detail, code)\n }\n Repr::Custom(ref c) => c.error.fmt(fmt),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl error::Error for Error {\n fn description(&self) -> &str {\n match self.repr {\n Repr::Os(..) => match self.kind() {\n ErrorKind::NotFound => \"entity not found\",\n ErrorKind::PermissionDenied => \"permission denied\",\n ErrorKind::ConnectionRefused => \"connection refused\",\n ErrorKind::ConnectionReset => \"connection reset\",\n ErrorKind::ConnectionAborted => \"connection aborted\",\n ErrorKind::NotConnected => \"not connected\",\n ErrorKind::AddrInUse => \"address in use\",\n ErrorKind::AddrNotAvailable => \"address not available\",\n ErrorKind::BrokenPipe => \"broken pipe\",\n ErrorKind::AlreadyExists => \"entity already exists\",\n ErrorKind::WouldBlock => \"operation would block\",\n ErrorKind::InvalidInput => \"invalid input parameter\",\n ErrorKind::InvalidData => \"invalid data\",\n ErrorKind::TimedOut => \"timed out\",\n ErrorKind::WriteZero => \"write zero\",\n ErrorKind::Interrupted => \"operation interrupted\",\n ErrorKind::Other => \"other os error\",\n ErrorKind::UnexpectedEof => \"unexpected end of file\",\n ErrorKind::__Nonexhaustive => unreachable!()\n },\n Repr::Custom(ref c) => c.error.description(),\n }\n }\n\n fn cause(&self) -> Option<&error::Error> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => c.error.cause(),\n }\n }\n}\n\nfn _assert_error_is_sync_send() {\n fn _is_sync_send<T: Sync+Send>() {}\n _is_sync_send::<Error>();\n}\n\n#[cfg(test)]\nmod test {\n use prelude::v1::*;\n use super::{Error, ErrorKind};\n use error;\n use fmt;\n use sys::os::error_string;\n\n #[test]\n fn test_debug_error() {\n let code = 6;\n let msg = error_string(code);\n let err = Error { repr: super::Repr::Os(code) };\n let expected = format!(\"Error {{ repr: Os {{ code: {:?}, message: {:?} }} }}\", code, msg);\n assert_eq!(format!(\"{:?}\", err), expected);\n }\n\n #[test]\n fn test_downcasting() {\n #[derive(Debug)]\n struct TestError;\n\n impl fmt::Display for TestError {\n fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result {\n Ok(())\n }\n }\n\n impl error::Error for TestError {\n fn description(&self) -> &str {\n \"asdf\"\n }\n }\n\n \/\/ we have to call all of these UFCS style right now since method\n \/\/ resolution won't implicitly drop the Send+Sync bounds\n let mut err = Error::new(ErrorKind::Other, TestError);\n assert!(err.get_ref().unwrap().is::<TestError>());\n assert_eq!(\"asdf\", err.get_ref().unwrap().description());\n assert!(err.get_mut().unwrap().is::<TestError>());\n let extracted = err.into_inner().unwrap();\n extracted.downcast::<TestError>().unwrap();\n }\n}\n<commit_msg>Rollup merge of #34612 - frewsxcv:io-error-from_raw_os_error, r=steveklabnik<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse boxed::Box;\nuse convert::Into;\nuse error;\nuse fmt;\nuse marker::{Send, Sync};\nuse option::Option::{self, Some, None};\nuse result;\nuse sys;\n\n\/\/\/ A specialized [`Result`](..\/result\/enum.Result.html) type for I\/O\n\/\/\/ operations.\n\/\/\/\n\/\/\/ This type is broadly used across `std::io` for any operation which may\n\/\/\/ produce an error.\n\/\/\/\n\/\/\/ This typedef is generally used to avoid writing out `io::Error` directly and\n\/\/\/ is otherwise a direct mapping to `Result`.\n\/\/\/\n\/\/\/ While usual Rust style is to import types directly, aliases of `Result`\n\/\/\/ often are not, to make it easier to distinguish between them. `Result` is\n\/\/\/ generally assumed to be `std::result::Result`, and so users of this alias\n\/\/\/ will generally use `io::Result` instead of shadowing the prelude's import\n\/\/\/ of `std::result::Result`.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ A convenience function that bubbles an `io::Result` to its caller:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::io;\n\/\/\/\n\/\/\/ fn get_string() -> io::Result<String> {\n\/\/\/ let mut buffer = String::new();\n\/\/\/\n\/\/\/ try!(io::stdin().read_line(&mut buffer));\n\/\/\/\n\/\/\/ Ok(buffer)\n\/\/\/ }\n\/\/\/ ```\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type Result<T> = result::Result<T, Error>;\n\n\/\/\/ The error type for I\/O operations of the `Read`, `Write`, `Seek`, and\n\/\/\/ associated traits.\n\/\/\/\n\/\/\/ Errors mostly originate from the underlying OS, but custom instances of\n\/\/\/ `Error` can be created with crafted error messages and a particular value of\n\/\/\/ `ErrorKind`.\n#[derive(Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct Error {\n repr: Repr,\n}\n\nenum Repr {\n Os(i32),\n Custom(Box<Custom>),\n}\n\n#[derive(Debug)]\nstruct Custom {\n kind: ErrorKind,\n error: Box<error::Error+Send+Sync>,\n}\n\n\/\/\/ A list specifying general categories of I\/O error.\n\/\/\/\n\/\/\/ This list is intended to grow over time and it is not recommended to\n\/\/\/ exhaustively match against it.\n#[derive(Copy, PartialEq, Eq, Clone, Debug)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\n#[allow(deprecated)]\npub enum ErrorKind {\n \/\/\/ An entity was not found, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotFound,\n \/\/\/ The operation lacked the necessary privileges to complete.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n PermissionDenied,\n \/\/\/ The connection was refused by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionRefused,\n \/\/\/ The connection was reset by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionReset,\n \/\/\/ The connection was aborted (terminated) by the remote server.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n ConnectionAborted,\n \/\/\/ The network operation failed because it was not connected yet.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n NotConnected,\n \/\/\/ A socket address could not be bound because the address is already in\n \/\/\/ use elsewhere.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrInUse,\n \/\/\/ A nonexistent interface was requested or the requested address was not\n \/\/\/ local.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AddrNotAvailable,\n \/\/\/ The operation failed because a pipe was closed.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n BrokenPipe,\n \/\/\/ An entity already exists, often a file.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n AlreadyExists,\n \/\/\/ The operation needs to block to complete, but the blocking operation was\n \/\/\/ requested to not occur.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WouldBlock,\n \/\/\/ A parameter was incorrect.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n InvalidInput,\n \/\/\/ Data not valid for the operation were encountered.\n \/\/\/\n \/\/\/ Unlike `InvalidInput`, this typically means that the operation\n \/\/\/ parameters were valid, however the error was caused by malformed\n \/\/\/ input data.\n \/\/\/\n \/\/\/ For example, a function that reads a file into a string will error with\n \/\/\/ `InvalidData` if the file's contents are not valid UTF-8.\n #[stable(feature = \"io_invalid_data\", since = \"1.2.0\")]\n InvalidData,\n \/\/\/ The I\/O operation's timeout expired, causing it to be canceled.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n TimedOut,\n \/\/\/ An error returned when an operation could not be completed because a\n \/\/\/ call to `write` returned `Ok(0)`.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it wrote a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ written.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n WriteZero,\n \/\/\/ This operation was interrupted.\n \/\/\/\n \/\/\/ Interrupted operations can typically be retried.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Interrupted,\n \/\/\/ Any I\/O error not part of this list.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n Other,\n\n \/\/\/ An error returned when an operation could not be completed because an\n \/\/\/ \"end of file\" was reached prematurely.\n \/\/\/\n \/\/\/ This typically means that an operation could only succeed if it read a\n \/\/\/ particular number of bytes but only a smaller number of bytes could be\n \/\/\/ read.\n #[stable(feature = \"read_exact\", since = \"1.6.0\")]\n UnexpectedEof,\n\n \/\/\/ Any I\/O error not part of this list.\n #[unstable(feature = \"io_error_internals\",\n reason = \"better expressed through extensible enums that this \\\n enum cannot be exhaustively matched against\",\n issue = \"0\")]\n #[doc(hidden)]\n __Nonexhaustive,\n}\n\nimpl Error {\n \/\/\/ Creates a new I\/O error from a known kind of error as well as an\n \/\/\/ arbitrary error payload.\n \/\/\/\n \/\/\/ This function is used to generically create I\/O errors which do not\n \/\/\/ originate from the OS itself. The `error` argument is an arbitrary\n \/\/\/ payload which will be contained in this `Error`.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::io::{Error, ErrorKind};\n \/\/\/\n \/\/\/ \/\/ errors can be created from strings\n \/\/\/ let custom_error = Error::new(ErrorKind::Other, \"oh no!\");\n \/\/\/\n \/\/\/ \/\/ errors can also be created from other errors\n \/\/\/ let custom_error2 = Error::new(ErrorKind::Interrupted, custom_error);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn new<E>(kind: ErrorKind, error: E) -> Error\n where E: Into<Box<error::Error+Send+Sync>>\n {\n Self::_new(kind, error.into())\n }\n\n fn _new(kind: ErrorKind, error: Box<error::Error+Send+Sync>) -> Error {\n Error {\n repr: Repr::Custom(Box::new(Custom {\n kind: kind,\n error: error,\n }))\n }\n }\n\n \/\/\/ Returns an error representing the last OS error which occurred.\n \/\/\/\n \/\/\/ This function reads the value of `errno` for the target platform (e.g.\n \/\/\/ `GetLastError` on Windows) and will return a corresponding instance of\n \/\/\/ `Error` for the error code.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn last_os_error() -> Error {\n Error::from_raw_os_error(sys::os::errno() as i32)\n }\n\n \/\/\/ Creates a new instance of an `Error` from a particular OS error code.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ On Linux:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # if cfg!(target_os = \"linux\") {\n \/\/\/ use std::io;\n \/\/\/\n \/\/\/ let error = io::Error::from_raw_os_error(98);\n \/\/\/ assert_eq!(error.kind(), io::ErrorKind::AddrInUse);\n \/\/\/ # }\n \/\/\/ ```\n \/\/\/\n \/\/\/ On Windows:\n \/\/\/\n \/\/\/ ```\n \/\/\/ # if cfg!(windows) {\n \/\/\/ use std::io;\n \/\/\/\n \/\/\/ let error = io::Error::from_raw_os_error(10048);\n \/\/\/ assert_eq!(error.kind(), io::ErrorKind::AddrInUse);\n \/\/\/ # }\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn from_raw_os_error(code: i32) -> Error {\n Error { repr: Repr::Os(code) }\n }\n\n \/\/\/ Returns the OS error that this error represents (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `last_os_error` or\n \/\/\/ `from_raw_os_error`, then this function will return `Some`, otherwise\n \/\/\/ it will return `None`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn raw_os_error(&self) -> Option<i32> {\n match self.repr {\n Repr::Os(i) => Some(i),\n Repr::Custom(..) => None,\n }\n }\n\n \/\/\/ Returns a reference to the inner error wrapped by this error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_ref(&self) -> Option<&(error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => Some(&*c.error),\n }\n }\n\n \/\/\/ Returns a mutable reference to the inner error wrapped by this error\n \/\/\/ (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn get_mut(&mut self) -> Option<&mut (error::Error+Send+Sync+'static)> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref mut c) => Some(&mut *c.error),\n }\n }\n\n \/\/\/ Consumes the `Error`, returning its inner error (if any).\n \/\/\/\n \/\/\/ If this `Error` was constructed via `new` then this function will\n \/\/\/ return `Some`, otherwise it will return `None`.\n #[stable(feature = \"io_error_inner\", since = \"1.3.0\")]\n pub fn into_inner(self) -> Option<Box<error::Error+Send+Sync>> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(c) => Some(c.error)\n }\n }\n\n \/\/\/ Returns the corresponding `ErrorKind` for this error.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn kind(&self) -> ErrorKind {\n match self.repr {\n Repr::Os(code) => sys::decode_error_kind(code),\n Repr::Custom(ref c) => c.kind,\n }\n }\n}\n\nimpl fmt::Debug for Repr {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Repr::Os(ref code) =>\n fmt.debug_struct(\"Os\").field(\"code\", code)\n .field(\"message\", &sys::os::error_string(*code)).finish(),\n Repr::Custom(ref c) => fmt.debug_tuple(\"Custom\").field(c).finish(),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Display for Error {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n match self.repr {\n Repr::Os(code) => {\n let detail = sys::os::error_string(code);\n write!(fmt, \"{} (os error {})\", detail, code)\n }\n Repr::Custom(ref c) => c.error.fmt(fmt),\n }\n }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl error::Error for Error {\n fn description(&self) -> &str {\n match self.repr {\n Repr::Os(..) => match self.kind() {\n ErrorKind::NotFound => \"entity not found\",\n ErrorKind::PermissionDenied => \"permission denied\",\n ErrorKind::ConnectionRefused => \"connection refused\",\n ErrorKind::ConnectionReset => \"connection reset\",\n ErrorKind::ConnectionAborted => \"connection aborted\",\n ErrorKind::NotConnected => \"not connected\",\n ErrorKind::AddrInUse => \"address in use\",\n ErrorKind::AddrNotAvailable => \"address not available\",\n ErrorKind::BrokenPipe => \"broken pipe\",\n ErrorKind::AlreadyExists => \"entity already exists\",\n ErrorKind::WouldBlock => \"operation would block\",\n ErrorKind::InvalidInput => \"invalid input parameter\",\n ErrorKind::InvalidData => \"invalid data\",\n ErrorKind::TimedOut => \"timed out\",\n ErrorKind::WriteZero => \"write zero\",\n ErrorKind::Interrupted => \"operation interrupted\",\n ErrorKind::Other => \"other os error\",\n ErrorKind::UnexpectedEof => \"unexpected end of file\",\n ErrorKind::__Nonexhaustive => unreachable!()\n },\n Repr::Custom(ref c) => c.error.description(),\n }\n }\n\n fn cause(&self) -> Option<&error::Error> {\n match self.repr {\n Repr::Os(..) => None,\n Repr::Custom(ref c) => c.error.cause(),\n }\n }\n}\n\nfn _assert_error_is_sync_send() {\n fn _is_sync_send<T: Sync+Send>() {}\n _is_sync_send::<Error>();\n}\n\n#[cfg(test)]\nmod test {\n use prelude::v1::*;\n use super::{Error, ErrorKind};\n use error;\n use fmt;\n use sys::os::error_string;\n\n #[test]\n fn test_debug_error() {\n let code = 6;\n let msg = error_string(code);\n let err = Error { repr: super::Repr::Os(code) };\n let expected = format!(\"Error {{ repr: Os {{ code: {:?}, message: {:?} }} }}\", code, msg);\n assert_eq!(format!(\"{:?}\", err), expected);\n }\n\n #[test]\n fn test_downcasting() {\n #[derive(Debug)]\n struct TestError;\n\n impl fmt::Display for TestError {\n fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result {\n Ok(())\n }\n }\n\n impl error::Error for TestError {\n fn description(&self) -> &str {\n \"asdf\"\n }\n }\n\n \/\/ we have to call all of these UFCS style right now since method\n \/\/ resolution won't implicitly drop the Send+Sync bounds\n let mut err = Error::new(ErrorKind::Other, TestError);\n assert!(err.get_ref().unwrap().is::<TestError>());\n assert_eq!(\"asdf\", err.get_ref().unwrap().description());\n assert!(err.get_mut().unwrap().is::<TestError>());\n let extracted = err.into_inner().unwrap();\n extracted.downcast::<TestError>().unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>fn main() {\n \/\/ (all the type annotations are superfluous)\n \/\/ A reference to a string allocated in read only memory\n let pangram: &'static str = \"the quick brown fox jumps over the lazy dog\";\n println!(\"Pangram: {}\", pangram);\n\n \/\/ Iterate over words in reverse, no new string is allocated\n println!(\"Words in reverse\");\n for word in pangram.words().rev() {\n println!(\"> {}\", word);\n }\n\n \/\/ Copy chars into a vector, sort and remove duplicates\n let mut chars: Vec<char> = pangram.chars().collect();\n chars.sort();\n chars.dedup();\n\n \/\/ Create an empty and growable `String`\n let mut string = String::new();\n for c in chars.into_iter() {\n \/\/ Insert a char at the end of string\n string.push_char(c);\n \/\/ Insert a string at the end of string\n string.push_str(\", \");\n }\n\n \/\/ The trimmed string is a slice to the original string, hence no new\n \/\/ allocation is performed\n let trimmed_str: &str = string.as_slice()\n .trim_chars([' ', ',']\n .as_slice());\n println!(\"Used characters: {}\", trimmed_str);\n\n \/\/ Heap allocate a string\n let alice = String::from_str(\"I like dogs\");\n \/\/ Allocate new memory and store the modified string there\n let bob: String = alice.replace(\"dog\", \"cat\");\n\n println!(\"Alice says: {}\", alice);\n println!(\"Bob says: {}\", bob);\n}\n<commit_msg>string.push_char() renamed to .push()<commit_after>fn main() {\n \/\/ (all the type annotations are superfluous)\n \/\/ A reference to a string allocated in read only memory\n let pangram: &'static str = \"the quick brown fox jumps over the lazy dog\";\n println!(\"Pangram: {}\", pangram);\n\n \/\/ Iterate over words in reverse, no new string is allocated\n println!(\"Words in reverse\");\n for word in pangram.words().rev() {\n println!(\"> {}\", word);\n }\n\n \/\/ Copy chars into a vector, sort and remove duplicates\n let mut chars: Vec<char> = pangram.chars().collect();\n chars.sort();\n chars.dedup();\n\n \/\/ Create an empty and growable `String`\n let mut string = String::new();\n for c in chars.into_iter() {\n \/\/ Insert a char at the end of string\n string.push(c);\n \/\/ Insert a string at the end of string\n string.push_str(\", \");\n }\n\n \/\/ The trimmed string is a slice to the original string, hence no new\n \/\/ allocation is performed\n let trimmed_str: &str = string.as_slice()\n .trim_chars([' ', ',']\n .as_slice());\n println!(\"Used characters: {}\", trimmed_str);\n\n \/\/ Heap allocate a string\n let alice = String::from_str(\"I like dogs\");\n \/\/ Allocate new memory and store the modified string there\n let bob: String = alice.replace(\"dog\", \"cat\");\n\n println!(\"Alice says: {}\", alice);\n println!(\"Bob says: {}\", bob);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>WIP start_match_3 instruction<commit_after>use crate::term::{boxed, lterm::LTerm};\n\n\/\/\/ Match buffer is a part of `BinaryMatchState`\npub struct BinaryMatchBuffer {\n pub orig: LTerm,\n pub base: *const u8,\n pub offset: usize,\n pub bit_size: usize,\n}\n\n\/\/\/ Matchstate is stored on heap as a heap object. Followed by 1 or more save\n\/\/\/ offset `LTerm`s.\npub struct BinaryMatchState {\n pub bin_header: boxed::binary::Binary,\n pub mb: BinaryMatchBuffer,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>slow version<commit_after>pub fn get_skyline(buildings: Vec<Vec<i32>>) -> Vec<Vec<i32>> {\n \/\/ PROBLEM here\n let mut bucket: [(i32, i32); 2147483647] = [(0, 0); 2147483647]; \/\/ [(start(0)|end(1), height), length]\n for building in buildings {\n bucket[building[0] as usize] = (0, building[2]);\n bucket[building[1] as usize] = (1, building[2]);\n }\n\n let mut result: Vec<Vec<i32>> = vec![];\n let mut last: (i32, i32) = (1, 0);\n for (i, d) in bucket.iter().enumerate() {\n if *d == (0, 0) {\n continue;\n }\n\n match (d.0, last.0) {\n \/\/ this is start, last is end\n (0, 1) => result.push(vec![i as i32, d.1]),\n\n \/\/ this is end, last is start\n (1, 0) => {\n if d.1 > last.1 {\n result.push(vec![i as i32, last.1]);\n } else if d.1 == last.1 {\n result.push(vec![i as i32, 0])\n }\n }\n\n \/\/ this is start, last is start\n (0, 0) => {\n if d.1 > last.1 {\n result.push(vec![i as i32, d.1]);\n }\n }\n\n \/\/ this is end, last is end\n (1, 1) => {\n if let Some(e) = result.last() {\n if e[1] == 0 {\n result.pop();\n }\n }\n result.push(vec![i as i32, 0])\n }\n\n _ => (),\n }\n last = d.clone();\n }\n\n result\n}\n\nfn main() {\n dbg!(get_skyline(vec![\n vec![2, 9, 10],\n vec![3, 7, 15],\n vec![5, 12, 12],\n vec![15, 20, 10],\n vec![19, 24, 8]\n ]));\n\n assert_eq!(\n vec![vec![0, 3], vec![1, 0]],\n get_skyline(vec![vec![0, 1, 3]])\n );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for ICE. Fix #5153.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ error-pattern: type `@Foo:'static` does not implement any method in scope named `foo`\n\ntrait Foo {\n fn foo(~self);\n}\n\nimpl Foo for int {\n fn foo(~self) { }\n}\n\nfn main() {\n (@5 as @Foo).foo();\n}\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\n\nuse semver::Version;\nuse std::fmt::{Display, Debug, Formatter};\nuse std::fmt::Error as FmtError;\nuse std::result::Result as RResult;\n\nuse libimagerror::into::IntoError;\n\nuse error::StoreErrorKind as SEK;\nuse store::Result;\nuse store::Store;\n\n\/\/\/ The Index into the Store\n#[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)]\npub struct StoreId {\n base: Option<PathBuf>,\n id: PathBuf,\n}\n\nimpl StoreId {\n\n pub fn new(base: Option<PathBuf>, id: PathBuf) -> Result<StoreId> {\n StoreId::new_baseless(id).map(|mut sid| { sid.base = base; sid })\n }\n\n pub fn new_baseless(id: PathBuf) -> Result<StoreId> {\n if id.is_absolute() {\n Err(SEK::StoreIdLocalPartAbsoluteError.into_error())\n } else {\n Ok(StoreId {\n base: None,\n id: id\n })\n }\n }\n\n pub fn without_base(mut self) -> StoreId {\n self.base = None;\n self\n }\n\n pub fn storified(self, store: &Store) -> StoreId {\n StoreId {\n base: Some(store.path().clone()),\n id: self.id\n }\n }\n\n pub fn exists(&self) -> bool {\n let pb : PathBuf = self.clone().into();\n pb.exists()\n }\n\n pub fn is_file(&self) -> bool {\n true\n }\n\n pub fn is_dir(&self) -> bool {\n false\n }\n\n pub fn to_str(&self) -> Result<String> {\n if self.base.is_some() {\n let mut base = self.base.as_ref().cloned().unwrap();\n base.push(self.id.clone());\n base\n } else {\n self.id.clone()\n }\n .to_str()\n .map(String::from)\n .ok_or(SEK::StoreIdHandlingError.into_error())\n }\n\n}\n\nimpl Into<PathBuf> for StoreId {\n\n fn into(self) -> PathBuf {\n let mut base = self.base.unwrap_or(PathBuf::from(\"\/\"));\n base.push(self.id);\n base\n }\n\n}\n\nimpl Display for StoreId {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n match self.id.to_str() {\n Some(s) => write!(fmt, \"{}\", s),\n None => write!(fmt, \"{}\", self.id.to_string_lossy()),\n }\n }\n\n}\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\npub trait IntoStoreId {\n fn into_storeid(self) -> Result<StoreId>;\n}\n\nimpl IntoStoreId for StoreId {\n fn into_storeid(self) -> Result<StoreId> {\n Ok(self)\n }\n}\n\npub fn build_entry_path(store: &Store, path_elem: &str) -> Result<PathBuf> {\n debug!(\"Checking path element for version\");\n if path_elem.split('~').last().map_or(false, |v| Version::parse(v).is_err()) {\n debug!(\"Version cannot be parsed from {:?}\", path_elem);\n debug!(\"Path does not contain version!\");\n return Err(SEK::StorePathLacksVersion.into());\n }\n debug!(\"Version checking succeeded\");\n\n debug!(\"Building path from {:?}\", path_elem);\n let mut path = store.path().clone();\n\n if path_elem.starts_with('\/') {\n path.push(&path_elem[1..]);\n } else {\n path.push(path_elem);\n }\n\n Ok(path)\n}\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n \/\/\/ A helper module to create valid module entry paths\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n use $crate::storeid::StoreId;\n use $crate::store::Result;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(self) -> Result<$crate::storeid::StoreId> {\n StoreId::new(None, self.0)\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n iter: Box<Iterator<Item = StoreId>>,\n}\n\nimpl Debug for StoreIdIterator {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"StoreIdIterator\")\n }\n\n}\n\nimpl StoreIdIterator {\n\n pub fn new(iter: Box<Iterator<Item = StoreId>>) -> StoreIdIterator {\n StoreIdIterator {\n iter: iter,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.iter.next()\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(), \"test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<commit_msg>Add StoreId::with_base()<commit_after>use std::path::PathBuf;\n\nuse semver::Version;\nuse std::fmt::{Display, Debug, Formatter};\nuse std::fmt::Error as FmtError;\nuse std::result::Result as RResult;\n\nuse libimagerror::into::IntoError;\n\nuse error::StoreErrorKind as SEK;\nuse store::Result;\nuse store::Store;\n\n\/\/\/ The Index into the Store\n#[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)]\npub struct StoreId {\n base: Option<PathBuf>,\n id: PathBuf,\n}\n\nimpl StoreId {\n\n pub fn new(base: Option<PathBuf>, id: PathBuf) -> Result<StoreId> {\n StoreId::new_baseless(id).map(|mut sid| { sid.base = base; sid })\n }\n\n pub fn new_baseless(id: PathBuf) -> Result<StoreId> {\n if id.is_absolute() {\n Err(SEK::StoreIdLocalPartAbsoluteError.into_error())\n } else {\n Ok(StoreId {\n base: None,\n id: id\n })\n }\n }\n\n pub fn without_base(mut self) -> StoreId {\n self.base = None;\n self\n }\n\n pub fn with_base(mut self, base: PathBuf) -> Self {\n self.base = Some(base);\n self\n }\n\n pub fn storified(self, store: &Store) -> StoreId {\n StoreId {\n base: Some(store.path().clone()),\n id: self.id\n }\n }\n\n pub fn exists(&self) -> bool {\n let pb : PathBuf = self.clone().into();\n pb.exists()\n }\n\n pub fn is_file(&self) -> bool {\n true\n }\n\n pub fn is_dir(&self) -> bool {\n false\n }\n\n pub fn to_str(&self) -> Result<String> {\n if self.base.is_some() {\n let mut base = self.base.as_ref().cloned().unwrap();\n base.push(self.id.clone());\n base\n } else {\n self.id.clone()\n }\n .to_str()\n .map(String::from)\n .ok_or(SEK::StoreIdHandlingError.into_error())\n }\n\n}\n\nimpl Into<PathBuf> for StoreId {\n\n fn into(self) -> PathBuf {\n let mut base = self.base.unwrap_or(PathBuf::from(\"\/\"));\n base.push(self.id);\n base\n }\n\n}\n\nimpl Display for StoreId {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n match self.id.to_str() {\n Some(s) => write!(fmt, \"{}\", s),\n None => write!(fmt, \"{}\", self.id.to_string_lossy()),\n }\n }\n\n}\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\npub trait IntoStoreId {\n fn into_storeid(self) -> Result<StoreId>;\n}\n\nimpl IntoStoreId for StoreId {\n fn into_storeid(self) -> Result<StoreId> {\n Ok(self)\n }\n}\n\npub fn build_entry_path(store: &Store, path_elem: &str) -> Result<PathBuf> {\n debug!(\"Checking path element for version\");\n if path_elem.split('~').last().map_or(false, |v| Version::parse(v).is_err()) {\n debug!(\"Version cannot be parsed from {:?}\", path_elem);\n debug!(\"Path does not contain version!\");\n return Err(SEK::StorePathLacksVersion.into());\n }\n debug!(\"Version checking succeeded\");\n\n debug!(\"Building path from {:?}\", path_elem);\n let mut path = store.path().clone();\n\n if path_elem.starts_with('\/') {\n path.push(&path_elem[1..]);\n } else {\n path.push(path_elem);\n }\n\n Ok(path)\n}\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n \/\/\/ A helper module to create valid module entry paths\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n use $crate::storeid::StoreId;\n use $crate::store::Result;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(self) -> Result<$crate::storeid::StoreId> {\n StoreId::new(None, self.0)\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n iter: Box<Iterator<Item = StoreId>>,\n}\n\nimpl Debug for StoreIdIterator {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"StoreIdIterator\")\n }\n\n}\n\nimpl StoreIdIterator {\n\n pub fn new(iter: Box<Iterator<Item = StoreId>>) -> StoreIdIterator {\n StoreIdIterator {\n iter: iter,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.iter.next()\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(), \"test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>improve a bit the graphite reporter (and give it an actual test)<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! This reader composes frames of bytes started with a 4 byte frame header indicating the size of\n\/\/! the buffer. An exact size buffer will be allocated once the 4 byte frame header is received.\n\nuse std::io::{self, Read, Error, ErrorKind};\nuse std::collections::VecDeque;\nuse std::mem;\n\n#[derive(Debug)]\npub struct FrameReader {\n frames: Frames\n}\n\nimpl FrameReader {\n pub fn new(max_frame_size: u32) -> FrameReader {\n FrameReader {\n frames: Frames::new(max_frame_size)\n }\n }\n\n pub fn read<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n self.frames.read(reader)\n }\n\n pub fn iter_mut(&mut self) -> Iter {\n Iter {\n frames: &mut self.frames\n }\n }\n}\n\npub struct Iter<'a> {\n frames: &'a mut Frames\n}\n\nimpl<'a> Iterator for Iter<'a> {\n type Item = Vec<u8>;\n\n fn next(&mut self) -> Option<Vec<u8>> {\n self.frames.completed_frames.pop_front()\n }\n}\n\n#[derive(Debug)]\nstruct Frames {\n max_frame_size: u32,\n bytes_read: usize,\n header: [u8; 4],\n reading_header: bool,\n current: Vec<u8>,\n completed_frames: VecDeque<Vec<u8>>\n}\n\nimpl Frames {\n pub fn new(max_frame_size: u32) -> Frames {\n Frames {\n max_frame_size: max_frame_size,\n bytes_read: 0,\n header: [0; 4],\n reading_header: true,\n current: Vec::with_capacity(0),\n completed_frames: VecDeque::new()\n }\n }\n\n \/\/\/ Will read as much data as possible and build up frames to be retrieved from the iterator.\n \/\/\/\n \/\/\/ Will stop reading when 0 bytes are retrieved from the latest call to `do_read` or the error\n \/\/\/ kind is io::ErrorKind::WouldBlock.\n \/\/\/\n \/\/\/ Returns an error or the total amount of bytes read.\n fn read<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n let mut total_bytes_read = 0;\n loop {\n match self.do_read(reader) {\n Ok(0) => return Ok(total_bytes_read),\n Ok(bytes_read) => {\n total_bytes_read += bytes_read;\n },\n Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => return Ok(total_bytes_read),\n Err(e) => return Err(e)\n }\n }\n }\n\n fn do_read<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n if self.reading_header {\n self.read_header(reader)\n } else {\n self.read_value(reader)\n }\n }\n\n \/\/ TODO: Return an error if size is greater than max_frame_size\n fn read_header<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n let bytes_read = try!(reader.read(&mut self.header[self.bytes_read..]));\n self.bytes_read += bytes_read;\n if self.bytes_read == 4 {\n let len = unsafe { u32::from_be(mem::transmute(self.header)) };\n self.bytes_read = 0;\n self.reading_header = false;\n self.current = Vec::with_capacity(len as usize);\n unsafe { self.current.set_len(len as usize); }\n }\n Ok(bytes_read)\n }\n\n fn read_value<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n let bytes_read = try!(reader.read(&mut self.current[self.bytes_read..]));\n self.bytes_read += bytes_read;\n if self.bytes_read == self.current.len() {\n self.completed_frames.push_back(mem::replace(&mut self.current, Vec::new()));\n self.bytes_read = 0;\n self.reading_header = true;\n }\n Ok(bytes_read)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::{mem, thread};\n use std::io::Cursor;\n use std::io::Write;\n use std::net::{TcpListener, TcpStream};\n use super::FrameReader;\n\n #[test]\n fn partial_and_complete_reads() {\n let buf1 = String::from(\"Hello World\").into_bytes();\n let buf2 = String::from(\"Hi.\").into_bytes();\n let header1: [u8; 4] = unsafe { mem::transmute((buf1.len() as u32).to_be()) };\n let header2: [u8; 4] = unsafe { mem::transmute((buf2.len() as u32).to_be()) };\n\n let mut reader = FrameReader::new(1024);\n\n \/\/ Write a partial header\n let mut header = Cursor::new(&header1[0..2]);\n let bytes_read = reader.read(&mut header).unwrap();\n assert_eq!(2, bytes_read);\n assert_eq!(None, reader.iter_mut().next());\n\n \/\/ Complete writing just the header\n let mut header = Cursor::new(&header1[2..]);\n let bytes_read = reader.read(&mut header).unwrap();\n assert_eq!(2, bytes_read);\n assert_eq!(None, reader.iter_mut().next());\n\n \/\/ Write a partial value\n let mut data = Cursor::new(&buf1[0..5]);\n let bytes_read = reader.read(&mut data).unwrap();\n assert_eq!(5, bytes_read);\n assert_eq!(None, reader.iter_mut().next());\n\n \/\/ Complete writing the first value\n let mut data = Cursor::new(&buf1[5..]);\n let bytes_read = reader.read(&mut data).unwrap();\n assert_eq!(6, bytes_read);\n let val = reader.iter_mut().next().unwrap();\n assert_eq!(buf1, val);\n\n \/\/ Write an entire header and value\n let mut data = Cursor::new(Vec::with_capacity(7));\n assert_eq!(4, data.write(&header2).unwrap());\n assert_eq!(3, data.write(&buf2).unwrap());\n data.set_position(0);\n let bytes_read = reader.read(&mut data).unwrap();\n assert_eq!(7, bytes_read);\n assert_eq!(buf2, reader.iter_mut().next().unwrap());\n }\n\n const IP: &'static str = \"127.0.0.1:5003\";\n \/\/\/ Test that we never get an io error, but instead get Ok(0) when the call to read would block\n #[test]\n fn would_block() {\n let listener = TcpListener::bind(IP).unwrap();\n let h = thread::spawn(move || {\n for stream in listener.incoming() {\n if let Ok(mut conn) = stream {\n conn.set_nonblocking(true).unwrap();\n let mut reader = FrameReader::new(1024);\n let result = reader.read(&mut conn);\n assert_matches!(result, Ok(0));\n return;\n }\n }\n });\n\n let _ = TcpStream::connect(IP).unwrap();\n h.join().unwrap();\n }\n}\n<commit_msg>Fix warnings<commit_after>\/\/! This reader composes frames of bytes started with a 4 byte frame header indicating the size of\n\/\/! the buffer. An exact size buffer will be allocated once the 4 byte frame header is received.\n\nuse std::io::{self, Read};\nuse std::collections::VecDeque;\nuse std::mem;\n\n#[derive(Debug)]\npub struct FrameReader {\n frames: Frames\n}\n\nimpl FrameReader {\n pub fn new(max_frame_size: u32) -> FrameReader {\n FrameReader {\n frames: Frames::new(max_frame_size)\n }\n }\n\n pub fn read<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n self.frames.read(reader)\n }\n\n pub fn iter_mut(&mut self) -> Iter {\n Iter {\n frames: &mut self.frames\n }\n }\n}\n\npub struct Iter<'a> {\n frames: &'a mut Frames\n}\n\nimpl<'a> Iterator for Iter<'a> {\n type Item = Vec<u8>;\n\n fn next(&mut self) -> Option<Vec<u8>> {\n self.frames.completed_frames.pop_front()\n }\n}\n\n#[derive(Debug)]\nstruct Frames {\n max_frame_size: u32,\n bytes_read: usize,\n header: [u8; 4],\n reading_header: bool,\n current: Vec<u8>,\n completed_frames: VecDeque<Vec<u8>>\n}\n\nimpl Frames {\n pub fn new(max_frame_size: u32) -> Frames {\n Frames {\n max_frame_size: max_frame_size,\n bytes_read: 0,\n header: [0; 4],\n reading_header: true,\n current: Vec::with_capacity(0),\n completed_frames: VecDeque::new()\n }\n }\n\n \/\/\/ Will read as much data as possible and build up frames to be retrieved from the iterator.\n \/\/\/\n \/\/\/ Will stop reading when 0 bytes are retrieved from the latest call to `do_read` or the error\n \/\/\/ kind is io::ErrorKind::WouldBlock.\n \/\/\/\n \/\/\/ Returns an error or the total amount of bytes read.\n fn read<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n let mut total_bytes_read = 0;\n loop {\n match self.do_read(reader) {\n Ok(0) => return Ok(total_bytes_read),\n Ok(bytes_read) => {\n total_bytes_read += bytes_read;\n },\n Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => return Ok(total_bytes_read),\n Err(e) => return Err(e)\n }\n }\n }\n\n fn do_read<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n if self.reading_header {\n self.read_header(reader)\n } else {\n self.read_value(reader)\n }\n }\n\n \/\/ TODO: Return an error if size is greater than max_frame_size\n fn read_header<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n let bytes_read = try!(reader.read(&mut self.header[self.bytes_read..]));\n self.bytes_read += bytes_read;\n if self.bytes_read == 4 {\n let len = unsafe { u32::from_be(mem::transmute(self.header)) };\n self.bytes_read = 0;\n self.reading_header = false;\n self.current = Vec::with_capacity(len as usize);\n unsafe { self.current.set_len(len as usize); }\n }\n Ok(bytes_read)\n }\n\n fn read_value<T: Read>(&mut self, reader: &mut T) -> io::Result<usize> {\n let bytes_read = try!(reader.read(&mut self.current[self.bytes_read..]));\n self.bytes_read += bytes_read;\n if self.bytes_read == self.current.len() {\n self.completed_frames.push_back(mem::replace(&mut self.current, Vec::new()));\n self.bytes_read = 0;\n self.reading_header = true;\n }\n Ok(bytes_read)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use std::{mem, thread};\n use std::io::Cursor;\n use std::io::Write;\n use std::net::{TcpListener, TcpStream};\n use super::FrameReader;\n\n #[test]\n fn partial_and_complete_reads() {\n let buf1 = String::from(\"Hello World\").into_bytes();\n let buf2 = String::from(\"Hi.\").into_bytes();\n let header1: [u8; 4] = unsafe { mem::transmute((buf1.len() as u32).to_be()) };\n let header2: [u8; 4] = unsafe { mem::transmute((buf2.len() as u32).to_be()) };\n\n let mut reader = FrameReader::new(1024);\n\n \/\/ Write a partial header\n let mut header = Cursor::new(&header1[0..2]);\n let bytes_read = reader.read(&mut header).unwrap();\n assert_eq!(2, bytes_read);\n assert_eq!(None, reader.iter_mut().next());\n\n \/\/ Complete writing just the header\n let mut header = Cursor::new(&header1[2..]);\n let bytes_read = reader.read(&mut header).unwrap();\n assert_eq!(2, bytes_read);\n assert_eq!(None, reader.iter_mut().next());\n\n \/\/ Write a partial value\n let mut data = Cursor::new(&buf1[0..5]);\n let bytes_read = reader.read(&mut data).unwrap();\n assert_eq!(5, bytes_read);\n assert_eq!(None, reader.iter_mut().next());\n\n \/\/ Complete writing the first value\n let mut data = Cursor::new(&buf1[5..]);\n let bytes_read = reader.read(&mut data).unwrap();\n assert_eq!(6, bytes_read);\n let val = reader.iter_mut().next().unwrap();\n assert_eq!(buf1, val);\n\n \/\/ Write an entire header and value\n let mut data = Cursor::new(Vec::with_capacity(7));\n assert_eq!(4, data.write(&header2).unwrap());\n assert_eq!(3, data.write(&buf2).unwrap());\n data.set_position(0);\n let bytes_read = reader.read(&mut data).unwrap();\n assert_eq!(7, bytes_read);\n assert_eq!(buf2, reader.iter_mut().next().unwrap());\n }\n\n const IP: &'static str = \"127.0.0.1:5003\";\n \/\/\/ Test that we never get an io error, but instead get Ok(0) when the call to read would block\n #[test]\n fn would_block() {\n let listener = TcpListener::bind(IP).unwrap();\n let h = thread::spawn(move || {\n for stream in listener.incoming() {\n if let Ok(mut conn) = stream {\n conn.set_nonblocking(true).unwrap();\n let mut reader = FrameReader::new(1024);\n let result = reader.read(&mut conn);\n assert_matches!(result, Ok(0));\n return;\n }\n }\n });\n\n let _ = TcpStream::connect(IP).unwrap();\n h.join().unwrap();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::io::Read;\nuse std::io::Write;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult = Result<(), StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(basepath: String) -> StorageBackend {\n StorageBackend {\n basepath: basepath,\n }\n }\n\n fn build<M: Module>(rt: &Runtime, m: &M) -> StorageBackend {\n let path = rt.get_rtp() + m.name() + \"\/store\";\n StorageBackend::new(path)\n }\n\n fn get_file_ids(&self) -> Option<Vec<FileID>> {\n let list = glob(&self.basepath[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n v.push(from_pathbuf(&path));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<'a, HP>(&self, f: File, p: &Parser<HP>) ->\n Result<BackendOperationResult, ParserError>\n where HP: FileHeaderParser<'a>\n {\n let written = p.write(f.contents());\n if let Ok(string) = written {\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n Ok(Ok(()))\n } else {\n Err(written.err().unwrap())\n }\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<'a, HP>(&self, f: File, p: &Parser<HP>)\n -> Result<BackendOperationResult, ParserError>\n where HP: FileHeaderParser<'a>\n {\n let contents = p.write(f.contents());\n\n if contents.is_err() {\n return Err(contents.err().unwrap());\n }\n\n let content = contents.unwrap();\n\n let path = self.build_filepath(&f);\n if let Err(_) = FSFile::open(&path) {\n return Ok(Err(StorageBackendError::new(\n String::from(\"File::open()\"),\n format!(\"Tried to open '{}'\", path),\n String::from(\"Tried to update contents of this file, though file doesn't exist\"),\n None)))\n }\n\n if let Ok(mut file) = FSFile::create(&path) {\n if let Err(writeerr) = file.write_all(&content.clone().into_bytes()) {\n return Ok(Err(StorageBackendError::new(\n String::from(\"File::write()\"),\n format!(\"Tried to write '{}'\", path),\n String::from(\"Tried to write contents of this file, though operation did not succeed\"),\n Some(content))))\n }\n }\n\n Ok(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, id: FileID, p: &Parser<HP>) -> Option<File>\n where HP: FileHeaderParser<'a>\n {\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(id, h, d))).ok()\n } else {\n None\n }\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.id())\n }\n\n fn build_filepath_with_id(&self, id: FileID) -> String {\n self.basepath.clone() + &id[..]\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub explanation: String, \/\/ A long, user friendly description\n pub dataDump: Option<String> \/\/ Data dump, if any\n}\n\nimpl StorageBackendError {\n fn new(action: String,\n desc : String,\n explan: String,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: action,\n desc: desc,\n explanation: explan,\n dataDump: data,\n }\n }\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\\n\\n{}\",\n self.action, self.desc, self.explanation)\n }\n}\n\n<commit_msg>Fix: structure field `dataDump` should have a snake case name such as `data_dump`<commit_after>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::io::Read;\nuse std::io::Write;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult = Result<(), StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(basepath: String) -> StorageBackend {\n StorageBackend {\n basepath: basepath,\n }\n }\n\n fn build<M: Module>(rt: &Runtime, m: &M) -> StorageBackend {\n let path = rt.get_rtp() + m.name() + \"\/store\";\n StorageBackend::new(path)\n }\n\n fn get_file_ids(&self) -> Option<Vec<FileID>> {\n let list = glob(&self.basepath[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n v.push(from_pathbuf(&path));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<'a, HP>(&self, f: File, p: &Parser<HP>) ->\n Result<BackendOperationResult, ParserError>\n where HP: FileHeaderParser<'a>\n {\n let written = p.write(f.contents());\n if let Ok(string) = written {\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n Ok(Ok(()))\n } else {\n Err(written.err().unwrap())\n }\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<'a, HP>(&self, f: File, p: &Parser<HP>)\n -> Result<BackendOperationResult, ParserError>\n where HP: FileHeaderParser<'a>\n {\n let contents = p.write(f.contents());\n\n if contents.is_err() {\n return Err(contents.err().unwrap());\n }\n\n let content = contents.unwrap();\n\n let path = self.build_filepath(&f);\n if let Err(_) = FSFile::open(&path) {\n return Ok(Err(StorageBackendError::new(\n String::from(\"File::open()\"),\n format!(\"Tried to open '{}'\", path),\n String::from(\"Tried to update contents of this file, though file doesn't exist\"),\n None)))\n }\n\n if let Ok(mut file) = FSFile::create(&path) {\n if let Err(writeerr) = file.write_all(&content.clone().into_bytes()) {\n return Ok(Err(StorageBackendError::new(\n String::from(\"File::write()\"),\n format!(\"Tried to write '{}'\", path),\n String::from(\"Tried to write contents of this file, though operation did not succeed\"),\n Some(content))))\n }\n }\n\n Ok(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, id: FileID, p: &Parser<HP>) -> Option<File>\n where HP: FileHeaderParser<'a>\n {\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(id, h, d))).ok()\n } else {\n None\n }\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.id())\n }\n\n fn build_filepath_with_id(&self, id: FileID) -> String {\n self.basepath.clone() + &id[..]\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub explanation: String, \/\/ A long, user friendly description\n pub data_dump: Option<String> \/\/ Data dump, if any\n}\n\nimpl StorageBackendError {\n fn new(action: String,\n desc : String,\n explan: String,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: action,\n desc: desc,\n explanation: explan,\n data_dump: data,\n }\n }\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\\n\\n{}\",\n self.action, self.desc, self.explanation)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>use super::column::Column;\nuse super::table::Table;\nuse std::io::{Write, Read, Seek};\n\n\/\/\/ The Rows struct represents the file level implementation\n\/\/\/ of the table\npub struct Rows<B: Write + Read + Seek> {\n buf: B,\n \/\/ TODO:add column_size, header_offset \n}\n\n\/\/ TODO: implement insert_row and select_row\nimpl<B: Write + Read + Seek> Rows<B> {\n pub fn new(buf: B) -> Self {\n Rows {\n buf: buf\n }\n }\n}\n<commit_msg>small format fixes<commit_after>use super::column::Column;\nuse super::table::Table;\nuse std::io::{Write, Read, Seek};\n\n\/\/\/ The Rows struct represents the file level implementation\n\/\/\/ of the table\npub struct Rows<B: Write + Read + Seek> {\n buf: B,\n \/\/ TODO:add column_size, header_offset\n}\n\n\/\/ TODO: implement insert_row and select_row\nimpl<B: Write + Read + Seek> Rows<B> {\n pub fn new(buf: B) -> Self {\n Rows {\n buf: buf\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Move default config values to Default impl<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adds missing output units<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>start new<commit_after>fn complex_number_multiply<'a>(x: &'a str, y: &'a str) -> String {}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat(examples) add poly_interp example<commit_after>\/\/! # `poly_interp` example\n\/\/!\n\/\/! In this example, we generate a random `n` degree polynomial,\n\/\/! evaluate it on `n+1` points, and use those points to reconstruct\n\/\/! the polynomial by solving a linear equation.\n\nextern crate linxal;\nextern crate ndarray;\nextern crate rand;\n\nuse rand::thread_rng;\nuse rand::distributions::{Normal, Range, Sample, IndependentSample};\nuse linxal::solve_linear::SolveLinear;\nuse ndarray::{Array, arr1, Ix};\n\n\/\/\/ Evalutate a polynomial f with coefficients `coefs` at `x`.\n\/\/\/\n\/\/\/ Input coefficients are ordered for lowest order to highest order.\nfn eval_poly(x: f32, coefs: &[f32]) -> f32 {\n \/\/ horner's rule\n coefs.iter().rev().fold(0.0, |acc, c| acc * x + *c)\n}\n\n\/\/\/ Returns the row (1.0, x, x^2, ..., x^n)\nfn vandermonde_row(x: f32, n: usize) -> Array<f32, Ix> {\n let mut v: Vec<f32> = Vec::with_capacity(n + 1);\n let mut r = 1.0;\n v.push(1.0);\n for _ in 1..(n + 1) {\n r *= x;\n v.push(r);\n }\n Array::from_vec(v)\n}\n\nfn main() {\n let mut rng = thread_rng();\n let mut coef_gen = Range::new(-1.0, 1.0);\n\n let n = 10;\n let coefs = Array::from_iter((0..n+1).map(|_| coef_gen.ind_sample(&mut rng)));\n\n let samples = Array::linspace(-1.0, 1.0, n+1);\n\n \/\/ Create the van der monde matrix.\n let mut a = Array::default((n+1, n+1));\n for (x, mut row) in samples.iter().zip(a.outer_iter_mut()) {\n row.assign(&vandermonde_row(*x, n));\n }\n\n \/\/ Create the solution matrix.\n let mut b = Array::default(n+1);\n for (i, x) in samples.iter().enumerate() {\n b[i] = eval_poly(*x, coefs.as_slice().unwrap());\n }\n\n \/\/ Use least squares to fit the matrix.\n let fitted_coefs = SolveLinear::compute(&a, &b);\n assert!(fitted_coefs.is_ok());\n\n println!(\"Fitted Coefficients:\\n{:?}\", fitted_coefs.unwrap());\n println!(\"\");\n println!(\"Actual Coefficients:\\n{:?}\", coefs);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a randomization example<commit_after>#[cfg(not(feature = \"random\"))]\nfn main() {\n println!(\"You can't use the `rand` integration without the \\\"random\\\" feature\");\n}\n\n#[cfg(feature = \"random\")]\nfn main() {\n use palette::{FromColor, Hsl, Hsv, Hwb, Pixel, RgbHue, Srgb};\n\n use image::{GenericImage, GenericImageView, RgbImage};\n use rand::Rng;\n\n let mut image = RgbImage::new(512, 256);\n let mut rng = rand_mt::Mt::default();\n\n \/\/ RGB\n {\n let mut sub_image = image.sub_image(0, 0, 128, 128);\n let (width, height) = sub_image.dimensions();\n for x in 0..width {\n for y in 0..height {\n let random_color = Srgb::<f32>::new(rng.gen(), rng.gen(), rng.gen());\n sub_image.put_pixel(x, y, image::Rgb(random_color.into_format().into_raw()));\n }\n }\n }\n\n {\n let mut sub_image = image.sub_image(0, 128, 128, 128);\n let (width, height) = sub_image.dimensions();\n for x in 0..width {\n for y in 0..height {\n let random_color = rng.gen::<Srgb>();\n sub_image.put_pixel(x, y, image::Rgb(random_color.into_format().into_raw()));\n }\n }\n }\n\n \/\/ HSV\n {\n let mut sub_image = image.sub_image(128, 0, 128, 128);\n let (width, height) = sub_image.dimensions();\n for x in 0..width {\n for y in 0..height {\n let random_color =\n Srgb::from_color(Hsv::new(rng.gen::<RgbHue>(), rng.gen(), rng.gen()));\n sub_image.put_pixel(x, y, image::Rgb(random_color.into_format().into_raw()));\n }\n }\n }\n\n {\n let mut sub_image = image.sub_image(128, 128, 128, 128);\n let (width, height) = sub_image.dimensions();\n for x in 0..width {\n for y in 0..height {\n let random_color = Srgb::from_color(rng.gen::<Hsv>());\n sub_image.put_pixel(x, y, image::Rgb(random_color.into_format().into_raw()));\n }\n }\n }\n\n \/\/ HSL\n {\n let mut sub_image = image.sub_image(256, 0, 128, 128);\n let (width, height) = sub_image.dimensions();\n for x in 0..width {\n for y in 0..height {\n let random_color =\n Srgb::from_color(Hsl::new(rng.gen::<RgbHue>(), rng.gen(), rng.gen()));\n sub_image.put_pixel(x, y, image::Rgb(random_color.into_format().into_raw()));\n }\n }\n }\n\n {\n let mut sub_image = image.sub_image(256, 128, 128, 128);\n let (width, height) = sub_image.dimensions();\n for x in 0..width {\n for y in 0..height {\n let random_color = Srgb::from_color(rng.gen::<Hsl>());\n sub_image.put_pixel(x, y, image::Rgb(random_color.into_format().into_raw()));\n }\n }\n }\n\n \/\/ HWB\n {\n let mut sub_image = image.sub_image(384, 0, 128, 128);\n let (width, height) = sub_image.dimensions();\n for x in 0..width {\n for y in 0..height {\n let random_color =\n Srgb::from_color(Hwb::new(rng.gen::<RgbHue>(), rng.gen(), rng.gen()));\n sub_image.put_pixel(x, y, image::Rgb(random_color.into_format().into_raw()));\n }\n }\n }\n\n {\n let mut sub_image = image.sub_image(384, 128, 128, 128);\n let (width, height) = sub_image.dimensions();\n for x in 0..width {\n for y in 0..height {\n let random_color = Srgb::from_color(rng.gen::<Hwb>());\n sub_image.put_pixel(x, y, image::Rgb(random_color.into_format().into_raw()));\n }\n }\n }\n\n let _ = std::fs::create_dir(\"example-data\/output\");\n match image.save(\"example-data\/output\/random.png\") {\n Ok(()) => println!(\"see 'example-data\/output\/random.png' for the result\"),\n Err(e) => println!(\"failed to write 'example-data\/output\/random.png': {}\", e),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::sync::Mutex;\n\nmacro_rules! cargo_script {\n ($($args:expr),* $(,)*) => {\n {\n extern crate tempdir;\n use std::process::Command;\n\n let cargo_lock = ::util::CARGO_MUTEX.lock().expect(\"could not acquire Cargo mutext\");\n\n let temp_dir = tempdir::TempDir::new(\"cargo-script-test\").unwrap();\n let cmd_str;\n let out = {\n let mut cmd = Command::new(\"target\/debug\/cargo-script\");\n cmd.arg(\"script\");\n cmd.arg(\"--pkg-path\").arg(temp_dir.path());\n $(\n cmd.arg($args);\n )*\n\n cmd_str = format!(\"{:?}\", cmd);\n\n cmd.output()\n .map(::util::Output::from)\n };\n\n if let Ok(out) = out.as_ref() {\n println!(\"cargo-script cmd: {}\", cmd_str);\n println!(\"cargo-script stdout:\");\n println!(\"-----\");\n println!(\"{}\", out.stdout);\n println!(\"-----\");\n println!(\"cargo-script stderr:\");\n println!(\"-----\");\n println!(\"{}\", out.stderr);\n println!(\"-----\");\n }\n\n drop(temp_dir);\n drop(cargo_lock);\n\n out\n }\n };\n}\n\nmacro_rules! with_output_marker {\n (prelude $p:expr; $e:expr) => {\n format!(concat!($p, \"{}\", $e), ::util::OUTPUT_MARKER_CODE)\n };\n\n ($e:expr) => {\n format!(concat!(\"{}\", $e), ::util::OUTPUT_MARKER_CODE)\n };\n}\n\nlazy_static! {\n #[doc(hidden)]\n pub static ref CARGO_MUTEX: Mutex<()> = Mutex::new(());\n}\n\npub const OUTPUT_MARKER: &'static str = \"--output--\";\npub const OUTPUT_MARKER_CODE: &'static str = \"println!(\\\"--output--\\\");\";\n\npub struct Output {\n pub status: ::std::process::ExitStatus,\n pub stdout: String,\n pub stderr: String,\n}\n\nimpl Output {\n pub fn stdout_output(&self) -> &str {\n assert!(self.success());\n for (i, _) in self.stdout.match_indices(OUTPUT_MARKER) {\n let before_cp = self.stdout[..i].chars().rev().next().unwrap();\n if !(before_cp == '\\r' || before_cp == '\\n') { continue; }\n let after = &self.stdout[i+OUTPUT_MARKER.len()..];\n let after_cp = after.chars().next().unwrap();\n if !(after_cp == '\\r' || after_cp == '\\n') { continue; }\n return after;\n }\n panic!(\"could not find `{}` in script output\", OUTPUT_MARKER);\n }\n\n pub fn success(&self) -> bool {\n self.status.success()\n }\n}\n\nimpl From<::std::process::Output> for Output {\n fn from(v: ::std::process::Output) -> Self {\n Output {\n status: v.status,\n stdout: String::from_utf8(v.stdout).unwrap(),\n stderr: String::from_utf8(v.stderr).unwrap(),\n }\n }\n}\n<commit_msg>Fix tests on Rust 1.4.<commit_after>use std::sync::Mutex;\n\nmacro_rules! cargo_script {\n ($($args:expr),* $(,)*) => {\n {\n extern crate tempdir;\n use std::process::Command;\n\n let cargo_lock = ::util::CARGO_MUTEX.lock().expect(\"could not acquire Cargo mutext\");\n\n let temp_dir = tempdir::TempDir::new(\"cargo-script-test\").unwrap();\n let cmd_str;\n let out = {\n let mut cmd = Command::new(\"target\/debug\/cargo-script\");\n cmd.arg(\"script\");\n cmd.arg(\"--pkg-path\").arg(temp_dir.path());\n $(\n cmd.arg($args);\n )*\n\n cmd_str = format!(\"{:?}\", cmd);\n\n cmd.output()\n .map(::util::Output::from)\n };\n\n if let Ok(out) = out.as_ref() {\n println!(\"cargo-script cmd: {}\", cmd_str);\n println!(\"cargo-script stdout:\");\n println!(\"-----\");\n println!(\"{}\", out.stdout);\n println!(\"-----\");\n println!(\"cargo-script stderr:\");\n println!(\"-----\");\n println!(\"{}\", out.stderr);\n println!(\"-----\");\n }\n\n drop(temp_dir);\n drop(cargo_lock);\n\n out\n }\n };\n}\n\nmacro_rules! with_output_marker {\n (prelude $p:expr; $e:expr) => {\n format!(concat!($p, \"{}\", $e), ::util::OUTPUT_MARKER_CODE)\n };\n\n ($e:expr) => {\n format!(concat!(\"{}\", $e), ::util::OUTPUT_MARKER_CODE)\n };\n}\n\nlazy_static! {\n #[doc(hidden)]\n pub static ref CARGO_MUTEX: Mutex<()> = Mutex::new(());\n}\n\npub const OUTPUT_MARKER: &'static str = \"--output--\";\npub const OUTPUT_MARKER_CODE: &'static str = \"println!(\\\"--output--\\\");\";\n\npub struct Output {\n pub status: ::std::process::ExitStatus,\n pub stdout: String,\n pub stderr: String,\n}\n\nimpl Output {\n pub fn stdout_output(&self) -> &str {\n assert!(self.success());\n for marker in self.stdout.matches(OUTPUT_MARKER) {\n let i = subslice_offset(&self.stdout, marker).unwrap();\n let before_cp = self.stdout[..i].chars().rev().next().unwrap();\n if !(before_cp == '\\r' || before_cp == '\\n') { continue; }\n let after = &self.stdout[i+OUTPUT_MARKER.len()..];\n let after_cp = after.chars().next().unwrap();\n if !(after_cp == '\\r' || after_cp == '\\n') { continue; }\n return after;\n }\n panic!(\"could not find `{}` in script output\", OUTPUT_MARKER);\n }\n\n pub fn success(&self) -> bool {\n self.status.success()\n }\n}\n\nimpl From<::std::process::Output> for Output {\n fn from(v: ::std::process::Output) -> Self {\n Output {\n status: v.status,\n stdout: String::from_utf8(v.stdout).unwrap(),\n stderr: String::from_utf8(v.stderr).unwrap(),\n }\n }\n}\n\nfn subslice_offset(outer: &str, inner: &str) -> Option<usize> {\n let outer_beg = outer.as_ptr() as usize;\n let inner = inner.as_ptr() as usize;\n if inner < outer_beg || inner > outer_beg.wrapping_add(outer.len()) {\n None\n } else {\n Some(inner.wrapping_sub(outer_beg))\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n }\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => break,\n K_BKSP => if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n },\n K_DEL => if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len() - 1];\n },\n K_F5 => self.reload(&mut window),\n K_F6 => self.save(&mut window),\n K_HOME => self.offset = 0,\n K_UP => {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n K_LEFT => if self.offset > 0 {\n self.offset -= 1;\n },\n K_RIGHT => if self.offset < self.string.len() {\n self.offset += 1;\n },\n K_END => self.offset = self.string.len(),\n K_DOWN => {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n _ => match key_event.character {\n '\\0' => (),\n _ => {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &key_event.character.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n },\n }\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<commit_msg>Make editor behave more Vi-like<commit_after>use redox::*;\n\n#[derive(Copy, Clone)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn up(&mut self) {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n use self::Mode::*;\n match (mode, key_event.scancode) {\n (Insert, K_ESC) => mode = Normal,\n (Insert, K_BKSP) => if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n },\n (Insert, K_DEL) => if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len() - 1];\n },\n (_, K_F5) => self.reload(&mut window),\n (_, K_F6) => self.save(&mut window),\n (_, K_HOME) => self.offset = 0,\n (_, K_UP) => self.up(),\n (_, K_LEFT) => self.left(),\n (_, K_RIGHT) => self.right(),\n (_, K_END) => self.offset = self.string.len(),\n (_, K_DOWN) => self.down(),\n (m, _) => match (m, key_event.character) {\n (Normal, 'i') => mode = Insert,\n (Normal, 'h') => self.left(),\n (Normal, 'l') => self.right(),\n (Normal, 'k') => self.up(),\n (Normal, 'j') => self.down(),\n (Normal, 'G') => self.offset = self.string.len(),\n (Normal, 'a') => {\n self.right();\n mode = Insert;\n },\n (Insert, '\\0') => (),\n (Insert, _) => {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &key_event.character.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n },\n _ => {},\n }\n }\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ TODO: Refactor using a matrix for performance\n\nuse redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<commit_msg>Fix indentation<commit_after>\/\/ TODO: Refactor using a matrix for performance\n\nuse redox::*;\n\nmod cmd;\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub enum Mode {\n Insert,\n Normal,\n}\n\npub struct Editor {\n url: String,\n file: Option<File>,\n string: String,\n offset: usize,\n scroll_x: isize,\n scroll_y: isize,\n}\n\nimpl Editor {\n #[inline(never)]\n pub fn new() -> Self {\n Editor {\n url: String::new(),\n file: Option::None,\n string: String::new(),\n offset: 0,\n scroll_x: 0,\n scroll_y: 0,\n }\n }\n\n fn backspace(&mut self, window: &mut Window) {\n if self.offset > 0 {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset - 1].to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset -= 1;\n }\n }\n\n fn delete(&mut self, window: &mut Window) {\n if self.offset < self.string.len() {\n window.set_title(&format!(\"{}{}{}\",\"Editor (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &self.string[self.offset + 1 .. self.string.len()];\n }\n }\n\n \/\/ TODO: Add methods for multiple movements\n fn up(&mut self) {\n let mut new_offset = 0;\n for i in 2..self.offset {\n match self.string.as_bytes()[self.offset - i] {\n 0 => break,\n 10 => {\n new_offset = self.offset - i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn left(&mut self) {\n if self.offset > 0 {\n self.offset -= 1;\n }\n }\n\n fn right(&mut self) {\n if self.offset < self.string.len() {\n self.offset += 1;\n }\n }\n\n fn down(&mut self) {\n let mut new_offset = self.string.len();\n for i in self.offset..self.string.len() {\n match self.string.as_bytes()[i] {\n 0 => break,\n 10 => {\n new_offset = i + 1;\n break;\n }\n _ => (),\n }\n }\n self.offset = new_offset;\n }\n\n fn cur(&self) -> char {\n self.string.chars().nth(self.offset).unwrap_or('\\0')\n }\n\n fn insert(&mut self, c: char, window: &mut Window) {\n window.set_title(&format!(\"{}{}{}\",\"self (\", &self.url, \") Changed\"));\n self.string = self.string[0 .. self.offset].to_string() +\n &c.to_string() +\n &self.string[self.offset .. self.string.len()];\n self.offset += 1;\n }\n\n fn reload(&mut self, window: &mut Window) {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \")\"));\n self.offset = 0;\n self.scroll_x = 0;\n self.scroll_y = 0;\n\n match self.file {\n Option::Some(ref mut file) => {\n file.seek(Seek::Start(0));\n let mut vec: Vec<u8> = Vec::new();\n file.read_to_end(&mut vec);\n self.string = unsafe { String::from_utf8_unchecked(vec) };\n },\n Option::None => self.string = String::new(),\n }\n }\n\n fn save(&mut self, window: &mut Window) {\n match self.file {\n Option::Some(ref mut file) => {\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") Saved\"));\n file.seek(Seek::Start(0));\n file.write(&self.string.as_bytes());\n file.sync();\n }\n Option::None => {\n \/\/TODO: Ask for file to save to\n window.set_title(&(\"Editor (\".to_string() + &self.url + \") No Open File\"));\n }\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n let mut redraw = false;\n\n {\n window.set([255, 255, 255, 255]);\n\n let scroll_x = self.scroll_x;\n let scroll_y = self.scroll_y;\n\n let mut offset = 0;\n\n let mut col = -scroll_x;\n let cols = window.width() as isize \/ 8;\n\n let mut row = -scroll_y;\n let rows = window.height() as isize \/ 16;\n\n for c in self.string.chars() {\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n if c == '\\n' {\n col = -scroll_x;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, c, [0, 0, 0, 255]);\n }\n col += 1;\n }\n\n offset += 1;\n }\n\n if offset == self.offset {\n if col >= 0 && col < cols && row >= 0 && row < rows {\n window.char(8 * col, 16 * row, '_', [128, 128, 128, 255]);\n } else {\n if col < 0 { \/\/Too far to the left\n self.scroll_x += col;\n } else if col >= cols { \/\/Too far to the right\n self.scroll_x += cols - col + 1;\n }\n if row < 0 { \/\/Too far up\n self.scroll_y += row;\n } else if row >= rows { \/\/Too far down\n self.scroll_y += rows - row + 1;\n }\n\n redraw = true;\n }\n }\n\n window.sync();\n }\n\n if redraw {\n self.draw_content(window);\n }\n }\n\n fn main(&mut self, url: &str) {\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n 576,\n 400,\n &(\"Editor (\".to_string() + url + \")\"));\n\n self.url = url.to_string();\n self.file = Option::Some(File::open(&self.url));\n\n self.reload(&mut window);\n self.draw_content(&mut window);\n\n let mut mode = Mode::Normal;\n\n let mut last_change = String::new();\n let mut multiplier: Option<u32> = None;\n let mut swap = 0;\n let mut period = String::new();\n let mut is_recording = false;\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n cmd::exec(self, &mut mode, &mut multiplier, &mut last_change, key_event, &mut window, &mut swap, &mut period, &mut is_recording);\n\n self.draw_content(&mut window);\n }\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => Editor::new().main(&arg),\n Option::None => Editor::new().main(\"none:\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added merge sort<commit_after>pub fn sort<T: PartialOrd + Copy>(arr: &mut [T]) {\n let len = arr.len();\n if len > 1 {\n sort(&mut arr[..(len \/ 2)]);\n sort(&mut arr[(len \/ 2)..]);\n\n merge(arr);\n }\n}\n\nfn merge<T: PartialOrd + Copy>(arr: &mut [T]) {\n let len = arr.len();\n let left_len = len \/ 2;\n let right_len = len - left_len;\n\n let mut left = Vec::<T>::with_capacity(left_len);\n let mut right = Vec::<T>::with_capacity(right_len);\n\n left.extend_from_slice(&arr[..left_len]);\n right.extend_from_slice(&arr[left_len..]);\n\n let mut i = 0;\n let mut j = 0;\n\n for k in 0..len {\n if i < left_len && (j == right_len || left[i] < right[j]) {\n arr[k] = left[i];\n i = i + 1;\n } else {\n arr[k] = right[j];\n j = j + 1;\n }\n }\n}\n\n#[test]\nfn test_sort() {\n let mut arr = [-5, 4, 1, -3, 2];\n\n sort(&mut arr);\n\n assert!(arr == [-5, -3, 1, 2, 4]);\n}\n\n#[bench]\nfn bench_sort(b: &mut ::test::Bencher) {\n b.iter(|| {\n let mut arr: Vec<u32> = (0..1000).rev().collect();\n\n sort(&mut arr);\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for ICE #4579<commit_after>#![allow(clippy::single_match)]\n\nuse std::ptr;\n\nfn main() {\n match Some(0_usize) {\n Some(_) => {\n let s = \"012345\";\n unsafe { ptr::read(s.as_ptr().offset(1) as *const [u8; 5]) };\n },\n _ => (),\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>unconditionally trim path seperators<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::attr::AttrValue;\nuse dom::bindings::codegen::Bindings::HTMLImageElementBinding;\nuse dom::bindings::codegen::Bindings::HTMLImageElementBinding::HTMLImageElementMethods;\nuse dom::bindings::codegen::InheritTypes::{NodeCast, ElementCast, HTMLElementCast, HTMLImageElementDerived};\nuse dom::bindings::js::{JS, JSRef, Temporary};\nuse dom::bindings::trace::Untraceable;\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::document::Document;\nuse dom::element::{Element, HTMLImageElementTypeId};\nuse dom::element::AttributeHandlers;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{Node, ElementNodeTypeId, NodeHelpers, window_from_node};\nuse dom::virtualmethods::VirtualMethods;\nuse servo_net::image_cache_task;\nuse servo_util::geometry::to_px;\nuse servo_util::str::DOMString;\nuse string_cache::Atom;\n\nuse url::{Url, UrlParser};\n\nuse std::cell::RefCell;\n\n#[jstraceable]\n#[must_root]\npub struct HTMLImageElement {\n pub htmlelement: HTMLElement,\n image: Untraceable<RefCell<Option<Url>>>,\n}\n\nimpl HTMLImageElementDerived for EventTarget {\n fn is_htmlimageelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLImageElementTypeId))\n }\n}\n\ntrait PrivateHTMLImageElementHelpers {\n fn update_image(self, value: Option<(DOMString, &Url)>);\n}\n\nimpl<'a> PrivateHTMLImageElementHelpers for JSRef<'a, HTMLImageElement> {\n \/\/\/ Makes the local `image` member match the status of the `src` attribute and starts\n \/\/\/ prefetching the image. This method must be called after `src` is changed.\n fn update_image(self, value: Option<(DOMString, &Url)>) {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n let document = node.owner_doc().root();\n let window = document.deref().window.root();\n let image_cache = &window.image_cache_task;\n match value {\n None => {\n *self.image.deref().borrow_mut() = None;\n }\n Some((src, base_url)) => {\n let img_url = UrlParser::new().base_url(base_url).parse(src.as_slice());\n \/\/ FIXME: handle URL parse errors more gracefully.\n let img_url = img_url.unwrap();\n *self.image.deref().borrow_mut() = Some(img_url.clone());\n\n \/\/ inform the image cache to load this, but don't store a\n \/\/ handle.\n \/\/\n \/\/ TODO (Issue #84): don't prefetch if we are within a\n \/\/ <noscript> tag.\n image_cache.send(image_cache_task::Prefetch(img_url));\n }\n }\n }\n}\n\nimpl HTMLImageElement {\n fn new_inherited(localName: DOMString, document: JSRef<Document>) -> HTMLImageElement {\n HTMLImageElement {\n htmlelement: HTMLElement::new_inherited(HTMLImageElementTypeId, localName, document),\n image: Untraceable::new(RefCell::new(None)),\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(localName: DOMString, document: JSRef<Document>) -> Temporary<HTMLImageElement> {\n let element = HTMLImageElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLImageElementBinding::Wrap)\n }\n}\n\npub trait LayoutHTMLImageElementHelpers {\n unsafe fn image(&self) -> Option<Url>;\n}\n\nimpl LayoutHTMLImageElementHelpers for JS<HTMLImageElement> {\n unsafe fn image(&self) -> Option<Url> {\n (*self.unsafe_get()).image.borrow().clone()\n }\n}\n\nimpl<'a> HTMLImageElementMethods for JSRef<'a, HTMLImageElement> {\n make_getter!(Alt)\n\n fn SetAlt(self, alt: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"alt\", alt)\n }\n\n make_getter!(Src)\n\n fn SetSrc(self, src: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_url_attribute(\"src\", src)\n }\n\n make_getter!(UseMap)\n\n fn SetUseMap(self, use_map: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"usemap\", use_map)\n }\n\n make_bool_getter!(IsMap)\n\n fn SetIsMap(self, is_map: bool) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"ismap\", is_map.to_string())\n }\n\n fn Width(self) -> u32 {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n let rect = node.get_bounding_content_box();\n to_px(rect.size.width) as u32\n }\n\n fn SetWidth(self, width: u32) {\n let elem: JSRef<Element> = ElementCast::from_ref(self);\n elem.set_uint_attribute(\"width\", width)\n }\n\n fn Height(self) -> u32 {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n let rect = node.get_bounding_content_box();\n to_px(rect.size.height) as u32\n }\n\n fn SetHeight(self, height: u32) {\n let elem: JSRef<Element> = ElementCast::from_ref(self);\n elem.set_uint_attribute(\"height\", height)\n }\n\n make_getter!(Name)\n\n fn SetName(self, name: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"name\", name)\n }\n\n make_getter!(Align)\n\n fn SetAlign(self, align: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"align\", align)\n }\n\n make_uint_getter!(Hspace)\n\n fn SetHspace(self, hspace: u32) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_uint_attribute(\"hspace\", hspace)\n }\n\n make_uint_getter!(Vspace)\n\n fn SetVspace(self, vspace: u32) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_uint_attribute(\"vspace\", vspace)\n }\n\n make_getter!(LongDesc)\n\n fn SetLongDesc(self, longdesc: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"longdesc\", longdesc)\n }\n\n make_getter!(Border)\n\n fn SetBorder(self, border: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"border\", border)\n }\n}\n\nimpl<'a> VirtualMethods for JSRef<'a, HTMLImageElement> {\n fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods> {\n let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);\n Some(htmlelement as &VirtualMethods)\n }\n\n fn after_set_attr(&self, name: &Atom, value: DOMString) {\n match self.super_type() {\n Some(ref s) => s.after_set_attr(name, value.clone()),\n _ => (),\n }\n\n if \"src\" == name.as_slice() {\n let window = window_from_node(*self).root();\n let url = window.deref().get_url();\n self.update_image(Some((value, &url)));\n }\n }\n\n fn before_remove_attr(&self, name: &Atom, value: DOMString) {\n match self.super_type() {\n Some(ref s) => s.before_remove_attr(name, value.clone()),\n _ => (),\n }\n\n if \"src\" == name.as_slice() {\n self.update_image(None);\n }\n }\n\n fn parse_plain_attribute(&self, name: &str, value: DOMString) -> AttrValue {\n match name {\n \"width\" | \"height\" | \"hspace\" | \"vspace\" => AttrValue::from_u32(value, 0),\n _ => self.super_type().unwrap().parse_plain_attribute(name, value),\n }\n }\n}\n\nimpl Reflectable for HTMLImageElement {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.htmlelement.reflector()\n }\n}\n<commit_msg>Remove Untraceable from htmlimageelement.rs<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::attr::AttrValue;\nuse dom::bindings::codegen::Bindings::HTMLImageElementBinding;\nuse dom::bindings::codegen::Bindings::HTMLImageElementBinding::HTMLImageElementMethods;\nuse dom::bindings::codegen::InheritTypes::{NodeCast, ElementCast, HTMLElementCast, HTMLImageElementDerived};\nuse dom::bindings::js::{JS, JSRef, Temporary};\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::document::Document;\nuse dom::element::{Element, HTMLImageElementTypeId};\nuse dom::element::AttributeHandlers;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{Node, ElementNodeTypeId, NodeHelpers, window_from_node};\nuse dom::virtualmethods::VirtualMethods;\nuse servo_net::image_cache_task;\nuse servo_util::geometry::to_px;\nuse servo_util::str::DOMString;\nuse string_cache::Atom;\n\nuse url::{Url, UrlParser};\n\nuse std::cell::RefCell;\n\n#[jstraceable]\n#[must_root]\npub struct HTMLImageElement {\n pub htmlelement: HTMLElement,\n image: RefCell<Option<Url>>,\n}\n\nimpl HTMLImageElementDerived for EventTarget {\n fn is_htmlimageelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLImageElementTypeId))\n }\n}\n\ntrait PrivateHTMLImageElementHelpers {\n fn update_image(self, value: Option<(DOMString, &Url)>);\n}\n\nimpl<'a> PrivateHTMLImageElementHelpers for JSRef<'a, HTMLImageElement> {\n \/\/\/ Makes the local `image` member match the status of the `src` attribute and starts\n \/\/\/ prefetching the image. This method must be called after `src` is changed.\n fn update_image(self, value: Option<(DOMString, &Url)>) {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n let document = node.owner_doc().root();\n let window = document.deref().window.root();\n let image_cache = &window.image_cache_task;\n match value {\n None => {\n *self.image.borrow_mut() = None;\n }\n Some((src, base_url)) => {\n let img_url = UrlParser::new().base_url(base_url).parse(src.as_slice());\n \/\/ FIXME: handle URL parse errors more gracefully.\n let img_url = img_url.unwrap();\n *self.image.borrow_mut() = Some(img_url.clone());\n\n \/\/ inform the image cache to load this, but don't store a\n \/\/ handle.\n \/\/\n \/\/ TODO (Issue #84): don't prefetch if we are within a\n \/\/ <noscript> tag.\n image_cache.send(image_cache_task::Prefetch(img_url));\n }\n }\n }\n}\n\nimpl HTMLImageElement {\n fn new_inherited(localName: DOMString, document: JSRef<Document>) -> HTMLImageElement {\n HTMLImageElement {\n htmlelement: HTMLElement::new_inherited(HTMLImageElementTypeId, localName, document),\n image: RefCell::new(None),\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(localName: DOMString, document: JSRef<Document>) -> Temporary<HTMLImageElement> {\n let element = HTMLImageElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLImageElementBinding::Wrap)\n }\n}\n\npub trait LayoutHTMLImageElementHelpers {\n unsafe fn image(&self) -> Option<Url>;\n}\n\nimpl LayoutHTMLImageElementHelpers for JS<HTMLImageElement> {\n unsafe fn image(&self) -> Option<Url> {\n (*self.unsafe_get()).image.borrow().clone()\n }\n}\n\nimpl<'a> HTMLImageElementMethods for JSRef<'a, HTMLImageElement> {\n make_getter!(Alt)\n\n fn SetAlt(self, alt: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"alt\", alt)\n }\n\n make_getter!(Src)\n\n fn SetSrc(self, src: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_url_attribute(\"src\", src)\n }\n\n make_getter!(UseMap)\n\n fn SetUseMap(self, use_map: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"usemap\", use_map)\n }\n\n make_bool_getter!(IsMap)\n\n fn SetIsMap(self, is_map: bool) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"ismap\", is_map.to_string())\n }\n\n fn Width(self) -> u32 {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n let rect = node.get_bounding_content_box();\n to_px(rect.size.width) as u32\n }\n\n fn SetWidth(self, width: u32) {\n let elem: JSRef<Element> = ElementCast::from_ref(self);\n elem.set_uint_attribute(\"width\", width)\n }\n\n fn Height(self) -> u32 {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n let rect = node.get_bounding_content_box();\n to_px(rect.size.height) as u32\n }\n\n fn SetHeight(self, height: u32) {\n let elem: JSRef<Element> = ElementCast::from_ref(self);\n elem.set_uint_attribute(\"height\", height)\n }\n\n make_getter!(Name)\n\n fn SetName(self, name: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"name\", name)\n }\n\n make_getter!(Align)\n\n fn SetAlign(self, align: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"align\", align)\n }\n\n make_uint_getter!(Hspace)\n\n fn SetHspace(self, hspace: u32) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_uint_attribute(\"hspace\", hspace)\n }\n\n make_uint_getter!(Vspace)\n\n fn SetVspace(self, vspace: u32) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_uint_attribute(\"vspace\", vspace)\n }\n\n make_getter!(LongDesc)\n\n fn SetLongDesc(self, longdesc: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"longdesc\", longdesc)\n }\n\n make_getter!(Border)\n\n fn SetBorder(self, border: DOMString) {\n let element: JSRef<Element> = ElementCast::from_ref(self);\n element.set_string_attribute(\"border\", border)\n }\n}\n\nimpl<'a> VirtualMethods for JSRef<'a, HTMLImageElement> {\n fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods> {\n let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);\n Some(htmlelement as &VirtualMethods)\n }\n\n fn after_set_attr(&self, name: &Atom, value: DOMString) {\n match self.super_type() {\n Some(ref s) => s.after_set_attr(name, value.clone()),\n _ => (),\n }\n\n if \"src\" == name.as_slice() {\n let window = window_from_node(*self).root();\n let url = window.deref().get_url();\n self.update_image(Some((value, &url)));\n }\n }\n\n fn before_remove_attr(&self, name: &Atom, value: DOMString) {\n match self.super_type() {\n Some(ref s) => s.before_remove_attr(name, value.clone()),\n _ => (),\n }\n\n if \"src\" == name.as_slice() {\n self.update_image(None);\n }\n }\n\n fn parse_plain_attribute(&self, name: &str, value: DOMString) -> AttrValue {\n match name {\n \"width\" | \"height\" | \"hspace\" | \"vspace\" => AttrValue::from_u32(value, 0),\n _ => self.super_type().unwrap().parse_plain_attribute(name, value),\n }\n }\n}\n\nimpl Reflectable for HTMLImageElement {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n self.htmlelement.reflector()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2401<commit_after>\/\/ https:\/\/leetcode.com\/problems\/longest-nice-subarray\/\npub fn longest_nice_subarray(nums: Vec<i32>) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", longest_nice_subarray(vec![1, 3, 8, 48, 10])); \/\/ 3\n println!(\"{}\", longest_nice_subarray(vec![3, 1, 5, 11, 13])); \/\/ 1\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix broken pipe errors<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for sparc64 ABI issue<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ Checks that we correctly codegen extern \"C\" functions returning structs.\n\/\/ See issue #52638.\n\n\/\/ compile-flags: -O --target=sparc64-unknown-linux-gnu --crate-type=rlib\n#![feature(no_core, lang_items)]\n#![no_core]\n\n#[lang=\"sized\"]\ntrait Sized { }\n#[lang=\"freeze\"]\ntrait Freeze { }\n#[lang=\"copy\"]\ntrait Copy { }\n\n#[repr(C)]\npub struct Bool {\n b: bool,\n}\n\n\/\/ CHECK: define i64 @structbool()\n\/\/ CHECK-NEXT: start:\n\/\/ CHECK-NEXT: ret i64 72057594037927936\n#[no_mangle]\npub extern \"C\" fn structbool() -> Bool {\n Bool { b: true }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>regression test for #39362<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nenum Foo {\n Bar { bar: Bar, id: usize }\n}\n\nenum Bar {\n A, B, C, D, E, F\n}\n\nfn test(f: Foo) {\n match f {\n \/\/~^ ERROR non-exhaustive patterns\n \/\/~| patterns\n Foo::Bar { bar: Bar::A, .. } => (),\n Foo::Bar { bar: Bar::B, .. } => (),\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>:art: Improve the expect message<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Cookie headers, as defined in RFC 6265\n\nuse time::Tm;\nuse std::collections::TreeMap;\n\n\/\/\/ The headers used to set a cookie.\n\/\/\/\n\/\/\/ These headers are defined by [RFC 6265](http:\/\/tools.ietf.org\/html\/rfc6265)\npub struct HeaderCollection {\n \/\/\/ An absolute date\/time at which this cookie should expire.\n pub expires: Option<Tm>,\n \/\/\/ A relative time (in seconds) at which this cookie should expire.\n pub max_age: Option<u32>,\n \/\/\/ The scope of the cookie.\n \/\/\/\n \/\/\/ If set, the browser will send this cookie to the set domain and all subdomains.\n \/\/\/ If not set, the browser will only send this cookie to the originating domain.\n \/\/\/\n \/\/\/ This may only be set to the sending domain and its subdomains.\n pub domain: Option<String>,\n \/\/\/ The scope of the cookie.\n pub path: Option<String>,\n \/\/\/ A cookie with this flag should only be sent over secured\/encrypted connections.\n \/\/\/\n \/\/\/ This will be respected by the browser.\n pub secure: bool,\n \/\/\/ A cookie with this flag is only accessible through HTTP and HTTPS.\n \/\/\/\n \/\/\/ This helps to prevent Javascript and, specifically, XSS attacks.\n pub http_only: bool,\n \/\/\/ Any additional headers.\n \/\/\/\n \/\/\/ This may be any sequence of valid characters.\n \/\/\/\n \/\/\/ Extensions will be separated with `;`.\n \/\/\/ If a value is specified in the `Map`, the extension will be\n \/\/\/ written as `[key]=[value]`.\n pub extensions: Option<TreeMap<String, Option<String>>>\n}\n\nimpl HeaderCollection {\n #[doc(hidden)]\n pub fn to_cookie_av(self) -> String {\n let mut options = String::new()\n .append(head(\"Expires\", self.expires, |v| v.rfc822()).as_slice())\n .append(head(\"Max-Age\", self.max_age, |v| v.to_str()).as_slice())\n .append(head(\"Domain\", self.domain, |v| v).as_slice())\n .append(head(\"Path\", self.path, |v| v).as_slice());\n if self.secure { options.push_str(\"; Secure\"); }\n if self.http_only { options.push_str(\"; Http-Only\"); }\n match self.extensions {\n Some(map) => {\n for (header, value) in map.iter() {\n options.push_str(extension(header, value.clone()).as_slice());\n }\n },\n None => ()\n }\n options\n }\n}\n\nimpl HeaderCollection {\n \/\/\/ Convenience function for a set of empty cookie headers\n pub fn empty() -> HeaderCollection {\n HeaderCollection {\n expires: None,\n max_age: None,\n domain: None,\n path: None, \n secure: false,\n http_only: false,\n extensions: None\n }\n }\n\n \/\/\/ Convenience function for a set of cookie headers\n \/\/\/ that will expire the cookie in `seconds` seconds\n pub fn aged(seconds: u32) -> HeaderCollection {\n HeaderCollection {\n expires: None,\n max_age: Some(seconds),\n domain: None,\n path: None, \n secure: false,\n http_only: false,\n extensions: None\n }\n }\n\n \/\/\/ Convenience function for a set of cookie headers\n \/\/\/ declaring the cookie `Secure` and `HttpOnly`\n pub fn secured() -> HeaderCollection {\n HeaderCollection {\n expires: None,\n max_age: None,\n domain: None,\n path: None, \n secure: true,\n http_only: true,\n extensions: None\n }\n }\n}\n\nfn head<V>(header: &str, value: Option<V>, mutator: |V| -> String) -> String {\n match value {\n Some(val) => {\n \/\/ Delimit from previous cookie\/options\n \"; \".to_string()\n \/\/ Add the header\n .append(header).append(\"=\")\n \/\/ Add the mutated value\n .append(mutator(val).as_slice())\n },\n None => String::new()\n }\n}\n\nfn extension(header: &String, value: Option<String>) -> String {\n match value {\n Some(val) => head(header.as_slice(), Some(val), |v| v),\n None => header.clone()\n }\n}\n<commit_msg>(fix) Fixed extension headers in response.<commit_after>\/\/! Cookie headers, as defined in RFC 6265\n\nuse time::Tm;\nuse std::collections::TreeMap;\n\n\/\/\/ The headers used to set a cookie.\n\/\/\/\n\/\/\/ These headers are defined by [RFC 6265](http:\/\/tools.ietf.org\/html\/rfc6265)\npub struct HeaderCollection {\n \/\/\/ An absolute date\/time at which this cookie should expire.\n pub expires: Option<Tm>,\n \/\/\/ A relative time (in seconds) at which this cookie should expire.\n pub max_age: Option<u32>,\n \/\/\/ The scope of the cookie.\n \/\/\/\n \/\/\/ If set, the browser will send this cookie to the set domain and all subdomains.\n \/\/\/ If not set, the browser will only send this cookie to the originating domain.\n \/\/\/\n \/\/\/ This may only be set to the sending domain and its subdomains.\n pub domain: Option<String>,\n \/\/\/ The scope of the cookie.\n pub path: Option<String>,\n \/\/\/ A cookie with this flag should only be sent over secured\/encrypted connections.\n \/\/\/\n \/\/\/ This will be respected by the browser.\n pub secure: bool,\n \/\/\/ A cookie with this flag is only accessible through HTTP and HTTPS.\n \/\/\/\n \/\/\/ This helps to prevent Javascript and, specifically, XSS attacks.\n pub http_only: bool,\n \/\/\/ Any additional headers.\n \/\/\/\n \/\/\/ This may be any sequence of valid characters.\n \/\/\/\n \/\/\/ Extensions will be separated with `;`.\n \/\/\/ If a value is specified in the `Map`, the extension will be\n \/\/\/ written as `[key]=[value]`.\n pub extensions: Option<TreeMap<String, Option<String>>>\n}\n\nimpl HeaderCollection {\n #[doc(hidden)]\n pub fn to_cookie_av(self) -> String {\n let mut options = String::new()\n .append(head(\"Expires\", self.expires, |v| v.rfc822()).as_slice())\n .append(head(\"Max-Age\", self.max_age, |v| v.to_str()).as_slice())\n .append(head(\"Domain\", self.domain, |v| v).as_slice())\n .append(head(\"Path\", self.path, |v| v).as_slice());\n if self.secure { options.push_str(\"; Secure\"); }\n if self.http_only { options.push_str(\"; Http-Only\"); }\n match self.extensions {\n Some(map) => {\n for (header, value) in map.iter() {\n options.push_str(extension(header, value.clone()).as_slice());\n }\n },\n None => ()\n }\n options\n }\n}\n\nimpl HeaderCollection {\n \/\/\/ Convenience function for a set of empty cookie headers\n pub fn empty() -> HeaderCollection {\n HeaderCollection {\n expires: None,\n max_age: None,\n domain: None,\n path: None, \n secure: false,\n http_only: false,\n extensions: None\n }\n }\n\n \/\/\/ Convenience function for a set of cookie headers\n \/\/\/ that will expire the cookie in `seconds` seconds\n pub fn aged(seconds: u32) -> HeaderCollection {\n HeaderCollection {\n expires: None,\n max_age: Some(seconds),\n domain: None,\n path: None, \n secure: false,\n http_only: false,\n extensions: None\n }\n }\n\n \/\/\/ Convenience function for a set of cookie headers\n \/\/\/ declaring the cookie `Secure` and `HttpOnly`\n pub fn secured() -> HeaderCollection {\n HeaderCollection {\n expires: None,\n max_age: None,\n domain: None,\n path: None, \n secure: true,\n http_only: true,\n extensions: None\n }\n }\n}\n\nfn head<V>(header: &str, value: Option<V>, mutator: |V| -> String) -> String {\n match value {\n Some(val) => {\n \/\/ Delimit from previous cookie\/options\n \"; \".to_string()\n \/\/ Add the header\n .append(header).append(\"=\")\n \/\/ Add the mutated value\n .append(mutator(val).as_slice())\n },\n None => String::new()\n }\n}\n\nfn extension(header: &String, value: Option<String>) -> String {\n match value {\n Some(val) => head(header.as_slice(), Some(val), |v| v),\n None => \"; \".to_string().append(header.as_slice())\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::BindingDeclarations::HTMLDivElementBinding;\nuse dom::bindings::codegen::InheritTypes::HTMLDivElementDerived;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::error::ErrorResult;\nuse dom::document::Document;\nuse dom::element::HTMLDivElementTypeId;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{Node, ElementNodeTypeId};\nuse servo_util::str::DOMString;\n\n#[deriving(Encodable)]\npub struct HTMLDivElement {\n pub htmlelement: HTMLElement\n}\n\nimpl HTMLDivElementDerived for EventTarget {\n fn is_htmldivelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLDivElementTypeId))\n }\n}\n\nimpl HTMLDivElement {\n pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLDivElement {\n HTMLDivElement {\n htmlelement: HTMLElement::new_inherited(HTMLDivElementTypeId, localName, document)\n }\n }\n\n pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLDivElement> {\n let element = HTMLDivElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLDivElementBinding::Wrap)\n }\n}\n\npub trait HTMLDivElementMethods {\n fn Align(&self) -> DOMString;\n fn SetAlign(&mut self, _align: DOMString) -> ErrorResult;\n}\n\nimpl<'a> HTMLDivElementMethods for JSRef<'a, HTMLDivElement> {\n fn Align(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetAlign(&mut self, _align: DOMString) -> ErrorResult {\n Ok(())\n }\n}\n<commit_msg>Remove needless '&mut self' from HTMLDivElementMethods.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::BindingDeclarations::HTMLDivElementBinding;\nuse dom::bindings::codegen::InheritTypes::HTMLDivElementDerived;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::error::ErrorResult;\nuse dom::document::Document;\nuse dom::element::HTMLDivElementTypeId;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{Node, ElementNodeTypeId};\nuse servo_util::str::DOMString;\n\n#[deriving(Encodable)]\npub struct HTMLDivElement {\n pub htmlelement: HTMLElement\n}\n\nimpl HTMLDivElementDerived for EventTarget {\n fn is_htmldivelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLDivElementTypeId))\n }\n}\n\nimpl HTMLDivElement {\n pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLDivElement {\n HTMLDivElement {\n htmlelement: HTMLElement::new_inherited(HTMLDivElementTypeId, localName, document)\n }\n }\n\n pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLDivElement> {\n let element = HTMLDivElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLDivElementBinding::Wrap)\n }\n}\n\npub trait HTMLDivElementMethods {\n fn Align(&self) -> DOMString;\n fn SetAlign(&self, _align: DOMString) -> ErrorResult;\n}\n\nimpl<'a> HTMLDivElementMethods for JSRef<'a, HTMLDivElement> {\n fn Align(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetAlign(&self, _align: DOMString) -> ErrorResult {\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>immutable reference added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added code for s02c12: breaking aes ecb. Needs work on main loop. Won't compile.<commit_after>\nuse common::{err, ascii, base64};\nuse common::cipher::{aes, oracle};\n\n\nstruct CipherBox {\n key: Vec<u8>,\n data: Vec<u8>,\n mode: aes::Mode\n}\n\n\nimpl CipherBox {\n fn new(&self, data: &str, mode: &aes::Mode) -> Result<Self, err::Error> {\n Ok(CipherBox {\n key: try!(key::random(mode.blocksize)),\n data: try!(ascii::str_to_raw(&data)),\n mode: mode\n })\n }\n\n fn gen(&self, prefix: &str) -> Result<Vec<u8>, err::Error> {\n let mut final_input = try!(ascii::str_to_raw(&prefix));\n final_input.extend(&self.data);\n\n aes::encrypt(&final_input, &self.key, &self.mode)\n }\n}\n\n\npub fn break_aes_ecb(cipherbox: &CipherBox) -> Result<String, err::Error> {\n let blocksize = try!(detect_block_size(&cipherbox, max_blocksize));\n ctry!(!try!(oracle::detect_aes_ecb(&try!(cipherbox.gen(&String::from(&[65, 2*blocksize]))), blocksize)),\n \"cipher is not aes ecb, can't break with this module\");\n\n let range: Range<u8> = Range{ start: 1, end: 127 };\n\n let mut plaintext = String::new();\n let mut block_no: usize = 0;\n\n for i in ... {\n let prefix = String::from(&[65, 15]);\n\n let cipher = try!(cipherbox.gen(&prefix));\n\n let dict = try!(make_dict(&prefix, &cipherbox, &range));\n\n let cipher_block = cipher.chunks().nth(block_no);\n\n let m = dict.chunks().filter(|&ch| ch == cipher_block).collect();\n ctry!(m.len() == 0 || m.len() > 1, format!(\"no match or multiple matches for character at pos: {}\", i));\n\n let dec_char = m\n \n plaintext.push(dec_char);\n\n }\n Ok(plaintext)\n}\n\n\npub fn make_dict(prefix: &str, cipherbox: &CipherBox, range: &Range<u8>) -> Result<Vec<Vec<u8>>, err::Error> {\n let mut dict = Vec::<Vec<u8>>::new();\n let mut plain = String::from(&prefix);\n\n for i in range {\n plain.push(i as char);\n let cipher = try!(cipherbox.gen(&plain));\n dict.push(cipher);\n plain.pop();\n }\n Ok(dict)\n}\n\n\npub fn detect_block_size(cipherbox: &CipherBox, max: usize) -> Result<usize, err::Error> {\n let len1 = try!(cipherbox.gen(\"\")).len();\n\n let mut prefix = String::from(\"A\");\n for i in 0 .. max {\n let len2 = try!(cipherbox.gen(&prefix)).len();\n if len2 > len1 {\n return Ok(len2 - len1);\n }\n prefix.push('A');\n }\n mkerr!(\"failed to detect cipher block size\")\n}\n\n\npub fn init_cipherbox(filepath: &str) -> Result<CipherBox, err::Error> {\n let plainbase64 = try!(util::read_file_to_str(&filepath));\n let plainraw = try!(base64::base64_to_raw(&try!(ascii::filter_whitespace(&plainbase64))));\n let plaintext = try!(ascii::raw_to_str(&plainraw));\n\n CipherBox::new(&plaintext, &aes::ecb_128_pkcs7)\n}\n\n\npub fn interactive() -> u32 {\n let input_filepath = match env::args().nth(2) {\n Some(v) => v,\n None => { println!(\"please specify input data (base64 encoded) filepath\"); return 1; }\n };\n\n let cipherbox = rtry!(init_cipherbox(&input_filepath), 1);\n let plaintext = rtry!(break_aes_ecb(&cipherbox), 1);\n\n println!(\"{}\", plaintext);\n 0\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #18<commit_after>const triangle: &[&[uint]] = &[\n &[75],\n &[95, 64],\n &[17, 47, 82],\n &[18, 35, 87, 10],\n &[20, 04, 82, 47, 65],\n &[19, 01, 23, 75, 03, 34],\n &[88, 02, 77, 73, 07, 63, 67],\n &[99, 65, 04, 28, 06, 16, 70, 92],\n &[41, 41, 26, 56, 83, 40, 80, 70, 33],\n &[41, 48, 72, 33, 47, 32, 37, 16, 94, 29],\n &[53, 71, 44, 65, 25, 43, 91, 52, 97, 51, 14],\n &[70, 11, 33, 28, 77, 73, 17, 78, 39, 68, 17, 57],\n &[91, 71, 52, 38, 17, 14, 91, 43, 58, 50, 27, 29, 48],\n &[63, 66, 04, 68, 89, 53, 67, 30, 73, 16, 69, 87, 40, 31],\n &[04, 62, 98, 27, 23, 09, 70, 98, 73, 93, 38, 53, 60, 04, 23]\n];\n\nfn main() {\n let init = vec::view(triangle, 0, triangle.len() - 1);\n let last = vec::last(triangle);\n let answer = do init.foldr(vec::from_slice(last)) |elm, prev| {\n do vec::from_fn(elm.len()) |i| {\n elm[i] + uint::max(prev[i], prev[i + 1])\n }\n };\n io::println(fmt!(\"%u\", answer[0]));\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Build script needs an entry point on non-Windows<commit_after><|endoftext|>"} {"text":"<commit_before>use super::*;\nuse redox::*;\n\nimpl Editor {\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n \/\/ TODO: Only draw when relevant for the window\n let x = self.x();\n let y = self.y();\n \/\/ Redraw window\n self.window.set(Color::BLACK);\n\n self.window.rect(8 * (x - self.scroll_y) as isize,\n 16 * (y - self.scroll_x) as isize,\n 8,\n 16,\n Color::WHITE);\n\n for (y, row) in self.text.iter().enumerate() {\n for (x, c) in row.iter().enumerate() {\n if self.x() == x && self.y() == y {\n self.window.char(8 * (x - self.scroll_y) as isize,\n 16 * (y - self.scroll_x) as isize,\n *c,\n Color::BLACK);\n } else {\n self.window.char(8 * (x - self.scroll_y) as isize,\n 16 * (y - self.scroll_x) as isize,\n *c,\n Color::WHITE);\n }\n }\n }\n let h = self.window.height();\n let w = self.window.width();\n self.window.rect(0, h as isize - 18, w, 18, Color::rgba(74, 74, 74, 255));\n\n for (n, c) in (if self.status_bar.mode.len() > w \/ (8 * 4) {\n self.status_bar.mode.chars().take(w \/ (8 * 4) - 5).chain(vec!['.', '.', '.']).collect::<Vec<_>>()\n } else {\n self.status_bar.mode.chars().collect()\n }).into_iter().enumerate() {\n self.window.char(n as isize * 8, h as isize - 16 - 1, if c == '\\t' { ' ' } else { c }, [255, 255, 255, 255]);\n }\n\n self.window.sync();\n }\n}\n\n\/\/\/ The statubar (showing various info about the current state of the editor)\npub struct StatusBar {\n \/\/\/ The current mode\n pub mode: String,\n \/\/\/ The cureent char\n pub file: String,\n \/\/\/ The current command\n pub cmd: String,\n \/\/\/ A message (such as an error or other info to the user)\n pub msg: String,\n}\n\nimpl StatusBar {\n \/\/\/ Create new status bar\n pub fn new() -> Self {\n StatusBar {\n mode: \"Normal\".to_string(),\n file: String::new(),\n cmd: String::new(),\n msg: String::new(),\n }\n }\n}\n<commit_msg>Make it up-to-date with libredox<commit_after>use super::*;\nuse redox::*;\n\nimpl Editor {\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n \/\/ TODO: Only draw when relevant for the window\n let x = self.x();\n let y = self.y();\n \/\/ Redraw window\n self.window.set(Color::BLACK);\n\n self.window.rect(8 * (x - self.scroll_y) as isize,\n 16 * (y - self.scroll_x) as isize,\n 8,\n 16,\n Color::WHITE);\n\n for (y, row) in self.text.iter().enumerate() {\n for (x, c) in row.iter().enumerate() {\n if self.x() == x && self.y() == y {\n self.window.char(8 * (x - self.scroll_y) as isize,\n 16 * (y - self.scroll_x) as isize,\n *c,\n Color::BLACK);\n } else {\n self.window.char(8 * (x - self.scroll_y) as isize,\n 16 * (y - self.scroll_x) as isize,\n *c,\n Color::WHITE);\n }\n }\n }\n let h = self.window.height();\n let w = self.window.width();\n self.window.rect(0, h as isize - 18, w, 18, Color::rgba(74, 74, 74, 255));\n\n for (n, c) in (if self.status_bar.mode.len() > w \/ (8 * 4) {\n self.status_bar.mode.chars().take(w \/ (8 * 4) - 5).chain(vec!['.', '.', '.']).collect::<Vec<_>>()\n } else {\n self.status_bar.mode.chars().collect()\n }).into_iter().enumerate() {\n self.window.char(n as isize * 8, h as isize - 16 - 1, if c == '\\t' { ' ' } else { c }, Color::WHITE);\n }\n\n self.window.sync();\n }\n}\n\n\/\/\/ The statubar (showing various info about the current state of the editor)\npub struct StatusBar {\n \/\/\/ The current mode\n pub mode: String,\n \/\/\/ The cureent char\n pub file: String,\n \/\/\/ The current command\n pub cmd: String,\n \/\/\/ A message (such as an error or other info to the user)\n pub msg: String,\n}\n\nimpl StatusBar {\n \/\/\/ Create new status bar\n pub fn new() -> Self {\n StatusBar {\n mode: \"Normal\".to_string(),\n file: String::new(),\n cmd: String::new(),\n msg: String::new(),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Removed inline assembly in favor of calling x86 library.<commit_after><|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse std::path::Path;\nuse std::fmt::{Debug, Formatter, Error as FmtError};\nuse std::result::Result as RResult;\n\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::trace_error;\nuse libimagstore::hook::Hook;\nuse libimagstore::hook::accessor::StoreIdAccessor;\nuse libimagstore::hook::accessor::{HookDataAccessor, HookDataAccessorProvider};\nuse libimagstore::hook::error::HookError as HE;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::position::HookPosition;\nuse libimagstore::hook::result::HookResult;\nuse libimagstore::storeid::StoreId;\nuse libimagutil::debug_result::*;\n\nuse vcs::git::error::GitHookError as GHE;\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::error::MapIntoHookError;\nuse vcs::git::result::Result;\nuse vcs::git::runtime::Runtime as GRuntime;\n\npub struct UpdateHook {\n storepath: PathBuf,\n\n runtime: GRuntime,\n\n position: HookPosition,\n}\n\nimpl UpdateHook {\n\n pub fn new(storepath: PathBuf, p: HookPosition) -> UpdateHook {\n UpdateHook {\n runtime: GRuntime::new(&storepath),\n storepath: storepath,\n position: p,\n }\n }\n\n}\n\nimpl Debug for UpdateHook {\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"UpdateHook(storepath={:?}, repository={}, pos={:?}, cfg={:?}\",\n self.storepath,\n (if self.runtime.has_repository() { \"Some(_)\" } else { \"None\" }),\n self.position,\n self.runtime.has_config())\n }\n}\n\nimpl Hook for UpdateHook {\n\n fn name(&self) -> &'static str {\n \"stdhook_git_update\"\n }\n\n fn set_config(&mut self, config: &Value) {\n if let Err(e) = self.runtime.set_config(config) {\n trace_error(&e);\n }\n }\n\n}\n\nimpl HookDataAccessorProvider for UpdateHook {\n\n fn accessor(&self) -> HookDataAccessor {\n HookDataAccessor::StoreIdAccess(self)\n }\n}\n\nimpl StoreIdAccessor for UpdateHook {\n\n \/\/\/ The implementation of the UpdateHook\n \/\/\/\n \/\/\/ # Scope\n \/\/\/\n \/\/\/ This hook takes the git index and commits it either interactively or with a default message,\n \/\/\/ if there is no configuration for an interactive commit.\n \/\/\/\n fn access(&self, id: &StoreId) -> HookResult<()> {\n use vcs::git::action::StoreAction;\n use vcs::git::config::commit_message;\n use vcs::git::error::MapIntoHookError;\n use vcs::git::util::fetch_index;\n use git2::{Reference as GitReference, Repository, Error as Git2Error};\n use git2::{ADD_DEFAULT, STATUS_WT_NEW, STATUS_WT_MODIFIED, IndexMatchedPath};\n\n debug!(\"[GIT UPDATE HOOK]: {:?}\", id);\n\n let action = StoreAction::Update;\n let cfg = try!(self.runtime.config_value_or_err(&action));\n let repo = try!(self.runtime.repository(&action));\n let mut index = try!(fetch_index(repo, &action));\n\n let signature = try!(\n repo.signature()\n .map_err_into(GHEK::MkSignature)\n .map_dbg_err_str(\"Failed to fetch signature\")\n .map_into_hook_error()\n );\n\n let head = try!(\n repo.head()\n .map_err_into(GHEK::HeadFetchError)\n .map_dbg_err_str(\"Failed to fetch HEAD\")\n .map_into_hook_error()\n );\n\n let file_status = try!(\n repo\n .status_file(id.local())\n .map_dbg_err_str(\"Failed to fetch file status\")\n .map_dbg_err(|e| format!(\"\\t-> {:?}\", e))\n .map_err_into(GHEK::RepositoryFileStatusError)\n .map_into_hook_error()\n );\n\n let cb = &mut |path: &Path, _matched_spec: &[u8]| -> i32 {\n if file_status.contains(STATUS_WT_NEW) || file_status.contains(STATUS_WT_MODIFIED) {\n debug!(\"[GIT CREATE HOOK]: File is modified\/new: {}\", path.display());\n 0\n } else {\n debug!(\"[GIT CREATE HOOK]: Ignoring file: {}\", path.display());\n 1\n }\n };\n\n try!(\n index.add_all(&[id.local()], ADD_DEFAULT, Some(cb as &mut IndexMatchedPath))\n .map_err_into(GHEK::RepositoryPathAddingError)\n .map_dbg_err_str(\"Failed to add to index\")\n .map_into_hook_error()\n );\n\n let mut parents = Vec::new();\n {\n let commit = try!(\n repo.find_commit(head.target().unwrap())\n .map_err_into(GHEK::RepositoryParentFetchingError)\n .map_dbg_err_str(\"Failed to find commit HEAD\")\n .map_into_hook_error()\n );\n parents.push(commit);\n }\n\n \/\/ for converting from Vec<Commit> to Vec<&Commit>\n let parents = parents.iter().collect::<Vec<_>>();\n\n let tree_id = try!(\n index.write_tree()\n .map_err_into(GHEK::RepositoryIndexWritingError)\n .map_dbg_err_str(\"Failed to write tree\")\n .map_into_hook_error()\n );\n\n\n let tree = try!(\n repo.find_tree(tree_id)\n .map_err_into(GHEK::RepositoryParentFetchingError)\n .map_dbg_err_str(\"Failed to find tree\")\n .map_into_hook_error()\n );\n\n let message = try!(commit_message(cfg, StoreAction::Update)\n .map_dbg_err_str(\"Failed to get commit message\"));\n\n repo.commit(Some(\"HEAD\"), &signature, &signature, &message, &tree, &parents)\n .map_dbg_str(\"Committed\")\n .map_dbg_err_str(\"Failed to commit\")\n .map_err_into(GHEK::RepositoryCommittingError)\n .map_into_hook_error()\n .map(|_| ())\n }\n\n}\n\n<commit_msg>Write the tree before finding the parent commits<commit_after>use std::path::PathBuf;\nuse std::path::Path;\nuse std::fmt::{Debug, Formatter, Error as FmtError};\nuse std::result::Result as RResult;\n\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::trace_error;\nuse libimagstore::hook::Hook;\nuse libimagstore::hook::accessor::StoreIdAccessor;\nuse libimagstore::hook::accessor::{HookDataAccessor, HookDataAccessorProvider};\nuse libimagstore::hook::error::HookError as HE;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::position::HookPosition;\nuse libimagstore::hook::result::HookResult;\nuse libimagstore::storeid::StoreId;\nuse libimagutil::debug_result::*;\n\nuse vcs::git::error::GitHookError as GHE;\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::error::MapIntoHookError;\nuse vcs::git::result::Result;\nuse vcs::git::runtime::Runtime as GRuntime;\n\npub struct UpdateHook {\n storepath: PathBuf,\n\n runtime: GRuntime,\n\n position: HookPosition,\n}\n\nimpl UpdateHook {\n\n pub fn new(storepath: PathBuf, p: HookPosition) -> UpdateHook {\n UpdateHook {\n runtime: GRuntime::new(&storepath),\n storepath: storepath,\n position: p,\n }\n }\n\n}\n\nimpl Debug for UpdateHook {\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"UpdateHook(storepath={:?}, repository={}, pos={:?}, cfg={:?}\",\n self.storepath,\n (if self.runtime.has_repository() { \"Some(_)\" } else { \"None\" }),\n self.position,\n self.runtime.has_config())\n }\n}\n\nimpl Hook for UpdateHook {\n\n fn name(&self) -> &'static str {\n \"stdhook_git_update\"\n }\n\n fn set_config(&mut self, config: &Value) {\n if let Err(e) = self.runtime.set_config(config) {\n trace_error(&e);\n }\n }\n\n}\n\nimpl HookDataAccessorProvider for UpdateHook {\n\n fn accessor(&self) -> HookDataAccessor {\n HookDataAccessor::StoreIdAccess(self)\n }\n}\n\nimpl StoreIdAccessor for UpdateHook {\n\n \/\/\/ The implementation of the UpdateHook\n \/\/\/\n \/\/\/ # Scope\n \/\/\/\n \/\/\/ This hook takes the git index and commits it either interactively or with a default message,\n \/\/\/ if there is no configuration for an interactive commit.\n \/\/\/\n fn access(&self, id: &StoreId) -> HookResult<()> {\n use vcs::git::action::StoreAction;\n use vcs::git::config::commit_message;\n use vcs::git::error::MapIntoHookError;\n use vcs::git::util::fetch_index;\n use git2::{Reference as GitReference, Repository, Error as Git2Error};\n use git2::{ADD_DEFAULT, STATUS_WT_NEW, STATUS_WT_MODIFIED, IndexMatchedPath};\n\n debug!(\"[GIT UPDATE HOOK]: {:?}\", id);\n\n let action = StoreAction::Update;\n let cfg = try!(self.runtime.config_value_or_err(&action));\n let repo = try!(self.runtime.repository(&action));\n let mut index = try!(fetch_index(repo, &action));\n\n let signature = try!(\n repo.signature()\n .map_err_into(GHEK::MkSignature)\n .map_dbg_err_str(\"Failed to fetch signature\")\n .map_into_hook_error()\n );\n\n let head = try!(\n repo.head()\n .map_err_into(GHEK::HeadFetchError)\n .map_dbg_err_str(\"Failed to fetch HEAD\")\n .map_into_hook_error()\n );\n\n let file_status = try!(\n repo\n .status_file(id.local())\n .map_dbg_err_str(\"Failed to fetch file status\")\n .map_dbg_err(|e| format!(\"\\t-> {:?}\", e))\n .map_err_into(GHEK::RepositoryFileStatusError)\n .map_into_hook_error()\n );\n\n let cb = &mut |path: &Path, _matched_spec: &[u8]| -> i32 {\n if file_status.contains(STATUS_WT_NEW) || file_status.contains(STATUS_WT_MODIFIED) {\n debug!(\"[GIT CREATE HOOK]: File is modified\/new: {}\", path.display());\n 0\n } else {\n debug!(\"[GIT CREATE HOOK]: Ignoring file: {}\", path.display());\n 1\n }\n };\n\n try!(\n index.add_all(&[id.local()], ADD_DEFAULT, Some(cb as &mut IndexMatchedPath))\n .map_err_into(GHEK::RepositoryPathAddingError)\n .map_dbg_err_str(\"Failed to add to index\")\n .map_into_hook_error()\n );\n\n let tree_id = try!(\n index.write_tree()\n .map_err_into(GHEK::RepositoryIndexWritingError)\n .map_dbg_err_str(\"Failed to write tree\")\n .map_into_hook_error()\n );\n\n let mut parents = Vec::new();\n {\n let commit = try!(\n repo.find_commit(head.target().unwrap())\n .map_err_into(GHEK::RepositoryParentFetchingError)\n .map_dbg_err_str(\"Failed to find commit HEAD\")\n .map_into_hook_error()\n );\n parents.push(commit);\n }\n\n \/\/ for converting from Vec<Commit> to Vec<&Commit>\n let parents = parents.iter().collect::<Vec<_>>();\n\n let tree = try!(\n repo.find_tree(tree_id)\n .map_err_into(GHEK::RepositoryParentFetchingError)\n .map_dbg_err_str(\"Failed to find tree\")\n .map_into_hook_error()\n );\n\n let message = try!(commit_message(cfg, StoreAction::Update)\n .map_dbg_err_str(\"Failed to get commit message\"));\n\n repo.commit(Some(\"HEAD\"), &signature, &signature, &message, &tree, &parents)\n .map_dbg_str(\"Committed\")\n .map_dbg_err_str(\"Failed to commit\")\n .map_err_into(GHEK::RepositoryCommittingError)\n .map_into_hook_error()\n .map(|_| ())\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Jump search in Rust<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add new function PacketType::id<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for helpful error messages<commit_after>use std::process;\n\nfn assert_output_contains(output: &process::Output, needle: &str) {\n let stdout = String::from_utf8_lossy(&output.stdout);\n let stderr = String::from_utf8_lossy(&output.stderr);\n if stderr.to_lowercase().find(needle).is_none() && stdout.to_lowercase().find(needle).is_none() {\n panic!(\"Match was unsuccessful.\\nSTDOUT: {:?}\\nSTDERR: {:?}\\nNEEDLE: {:?}\",\n stdout, stderr, needle);\n }\n}\n\n\nmod cargo_add {\n use super::assert_output_contains;\n use std::process;\n\n #[test]\n fn invalid_arguments() {\n \/\/ - should state that the argument is invalid\n \/\/ - should provide usage information\n let call = process::Command::new(\"target\/debug\/cargo-add\")\n .output().unwrap();\n\n assert!(!call.status.success());\n assert_output_contains(&call, \"invalid argument\");\n assert_output_contains(&call, \"usage:\");\n\n\n let call = process::Command::new(\"target\/debug\/cargo-add\")\n .arg(\"invalid\").arg(\"arguments\").arg(\"here\")\n .output().unwrap();\n\n assert!(!call.status.success());\n assert_output_contains(&call, \"invalid argument\");\n assert_output_contains(&call, \"usage:\");\n }\n\n #[test]\n fn nonexistant_files() {\n \/\/ - should state that file could not be found\n\n let call = process::Command::new(\"target\/debug\/cargo-add\")\n .arg(\"add\") \/* always necessary *\/ .arg(\"pkg\")\n .arg(\"--manifest-path\").arg(\"this-file-doesnt-exist.txt\")\n .output().unwrap();\n\n assert!(!call.status.success());\n assert_output_contains(&call, \"no such file or directory\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::collections::BTreeMap;\nuse std::ops::{Deref, DerefMut};\nuse std::io::BufRead;\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse uuid::Uuid;\n\nuse task_hookrs::task::Task as TTask;\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagstore::store::{FileLockEntry, Store};\nuse libimagstore::storeid::{IntoStoreId, StoreIdIterator, StoreId};\nuse module_path::ModuleEntryPath;\n\nuse error::{TodoError, TodoErrorKind, MapErrInto};\nuse result::Result;\n\n\/\/\/ Task struct containing a `FileLockEntry`\n#[derive(Debug)]\npub struct Task<'a>(FileLockEntry<'a>);\n\nimpl<'a> Task<'a> {\n\n \/\/\/ Concstructs a new `Task` with a `FileLockEntry`\n pub fn new(fle: FileLockEntry<'a>) -> Task<'a> {\n Task(fle)\n }\n\n pub fn import<R: BufRead>(store: &'a Store, mut r: R) -> Result<(Task<'a>, Uuid)> {\n let mut line = String::new();\n r.read_line(&mut line);\n import_task(&line.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| {\n let uuid = t.uuid().clone();\n t.into_task(store).map(|t| (t, uuid))\n })\n }\n\n \/\/\/ Get a task from an import string. That is: read the imported string, get the UUID from it\n \/\/\/ and try to load this UUID from store.\n \/\/\/\n \/\/\/ Possible return values are:\n \/\/\/\n \/\/\/ * Ok(Ok(Task))\n \/\/\/ * Ok(Err(String)) - where the String is the String read from the `r` parameter\n \/\/\/ * Err(_) - where the error is an error that happened during evaluation\n \/\/\/\n pub fn get_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<RResult<Task<'a>, String>>\n {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::get_from_string(store, line)\n }\n\n \/\/\/ Get a task from a String. The String is expected to contain the JSON-representation of the\n \/\/\/ Task to get from the store (only the UUID really matters in this case)\n \/\/\/\n \/\/\/ For an explanation on the return values see `Task::get_from_import()`.\n pub fn get_from_string(store: &'a Store, s: String) -> Result<RResult<Task<'a>, String>> {\n import_task(s.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .map(|t| t.uuid().clone())\n .and_then(|uuid| Task::get_from_uuid(store, uuid))\n .and_then(|o| match o {\n None => Ok(Err(s)),\n Some(t) => Ok(Ok(t)),\n })\n }\n\n \/\/\/ Get a task from an UUID.\n \/\/\/\n \/\/\/ If there is no task with this UUID, this returns `Ok(None)`.\n pub fn get_from_uuid(store: &'a Store, uuid: Uuid) -> Result<Option<Task<'a>>> {\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n store.get(store_id)\n .map(|o| o.map(Task::new))\n .map_err_into(TodoErrorKind::StoreError)\n }\n\n \/\/\/ Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to\n \/\/\/ implicitely create the task if it does not exist.\n pub fn retrieve_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<Task<'a>> {\n unimplemented!()\n }\n\n \/\/\/ Retrieve a task from a String. The String is expected to contain the JSON-representation of\n \/\/\/ the Task to retrieve from the store (only the UUID really matters in this case)\n pub fn retrieve_from_string(store: &'a Store, s: String) -> Result<Task<'a>> {\n Task::get_from_string(store, s)\n .and_then(|opt| match opt {\n Ok(task) => Ok(task),\n Err(string) => import_task(string.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| t.into_task(store)),\n })\n }\n\n pub fn delete_by_uuid(store: &Store, uuid: Uuid) -> Result<()> {\n store.delete(ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid())\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all_as_ids(store: &Store) -> Result<StoreIdIterator> {\n store.retrieve_for_module(\"todo\/taskwarrior\")\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all(store: &Store) -> Result<TaskIterator> {\n Task::all_as_ids(store)\n .map(|iter| TaskIterator::new(store, iter))\n }\n\n}\n\nimpl<'a> Deref for Task<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Task<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n\n\/\/\/ A trait to get a `libimagtodo::task::Task` out of the implementing object.\n\/\/\/ This Task struct is merely a wrapper for a `FileLockEntry`, therefore the function name\n\/\/\/ `into_filelockentry`.\npub trait IntoTask<'a> {\n\n \/\/\/ # Usage\n \/\/\/ ```ignore\n \/\/\/ use std::io::stdin;\n \/\/\/\n \/\/\/ use task_hookrs::task::Task;\n \/\/\/ use task_hookrs::import::import;\n \/\/\/ use libimagstore::store::{Store, FileLockEntry};\n \/\/\/\n \/\/\/ if let Ok(task_hookrs_task) = import(stdin()) {\n \/\/\/ \/\/ Store is given at runtime\n \/\/\/ let task = task_hookrs_task.into_filelockentry(store);\n \/\/\/ println!(\"Task with uuid: {}\", task.flentry.get_header().get(\"todo.uuid\"));\n \/\/\/ }\n \/\/\/ ```\n fn into_task(self, store : &'a Store) -> Result<Task<'a>>;\n\n}\n\nimpl<'a> IntoTask<'a> for TTask {\n\n fn into_task(self, store : &'a Store) -> Result<Task<'a>> {\n let uuid = self.uuid();\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n match store.retrieve(store_id) {\n Err(e) => return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(mut fle) => {\n {\n let mut header = fle.get_header_mut();\n match header.read(\"todo\") {\n Ok(None) => {\n if let Err(e) = header.set(\"todo\", Value::Table(BTreeMap::new())) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n Ok(Some(_)) => { }\n Err(e) => {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n if let Err(e) = header.set(\"todo.uuid\", Value::String(format!(\"{}\",uuid))) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n \/\/ If none of the errors above have returned the function, everything is fine\n Ok(Task::new(fle))\n }\n }\n }\n\n}\n\ntrait FromStoreId {\n fn from_storeid<'a>(&'a Store, StoreId) -> Result<Task<'a>>;\n}\n\nimpl<'a> FromStoreId for Task<'a> {\n\n fn from_storeid<'b>(store: &'b Store, id: StoreId) -> Result<Task<'b>> {\n match store.retrieve(id) {\n Err(e) => Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(c) => Ok(Task::new( c )),\n }\n }\n}\n\npub struct TaskIterator<'a> {\n store: &'a Store,\n iditer: StoreIdIterator,\n}\n\nimpl<'a> TaskIterator<'a> {\n\n pub fn new(store: &'a Store, iditer: StoreIdIterator) -> TaskIterator<'a> {\n TaskIterator {\n store: store,\n iditer: iditer,\n }\n }\n\n}\n\nimpl<'a> Iterator for TaskIterator<'a> {\n type Item = Result<Task<'a>>;\n\n fn next(&mut self) -> Option<Result<Task<'a>>> {\n self.iditer.next().map(|id| Task::from_storeid(self.store, id))\n }\n}\n\n<commit_msg>Impl Task::retrieve_from_import()<commit_after>use std::collections::BTreeMap;\nuse std::ops::{Deref, DerefMut};\nuse std::io::BufRead;\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse uuid::Uuid;\n\nuse task_hookrs::task::Task as TTask;\nuse task_hookrs::import::{import_task, import_tasks};\n\nuse libimagstore::store::{FileLockEntry, Store};\nuse libimagstore::storeid::{IntoStoreId, StoreIdIterator, StoreId};\nuse module_path::ModuleEntryPath;\n\nuse error::{TodoError, TodoErrorKind, MapErrInto};\nuse result::Result;\n\n\/\/\/ Task struct containing a `FileLockEntry`\n#[derive(Debug)]\npub struct Task<'a>(FileLockEntry<'a>);\n\nimpl<'a> Task<'a> {\n\n \/\/\/ Concstructs a new `Task` with a `FileLockEntry`\n pub fn new(fle: FileLockEntry<'a>) -> Task<'a> {\n Task(fle)\n }\n\n pub fn import<R: BufRead>(store: &'a Store, mut r: R) -> Result<(Task<'a>, Uuid)> {\n let mut line = String::new();\n r.read_line(&mut line);\n import_task(&line.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| {\n let uuid = t.uuid().clone();\n t.into_task(store).map(|t| (t, uuid))\n })\n }\n\n \/\/\/ Get a task from an import string. That is: read the imported string, get the UUID from it\n \/\/\/ and try to load this UUID from store.\n \/\/\/\n \/\/\/ Possible return values are:\n \/\/\/\n \/\/\/ * Ok(Ok(Task))\n \/\/\/ * Ok(Err(String)) - where the String is the String read from the `r` parameter\n \/\/\/ * Err(_) - where the error is an error that happened during evaluation\n \/\/\/\n pub fn get_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<RResult<Task<'a>, String>>\n {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::get_from_string(store, line)\n }\n\n \/\/\/ Get a task from a String. The String is expected to contain the JSON-representation of the\n \/\/\/ Task to get from the store (only the UUID really matters in this case)\n \/\/\/\n \/\/\/ For an explanation on the return values see `Task::get_from_import()`.\n pub fn get_from_string(store: &'a Store, s: String) -> Result<RResult<Task<'a>, String>> {\n import_task(s.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .map(|t| t.uuid().clone())\n .and_then(|uuid| Task::get_from_uuid(store, uuid))\n .and_then(|o| match o {\n None => Ok(Err(s)),\n Some(t) => Ok(Ok(t)),\n })\n }\n\n \/\/\/ Get a task from an UUID.\n \/\/\/\n \/\/\/ If there is no task with this UUID, this returns `Ok(None)`.\n pub fn get_from_uuid(store: &'a Store, uuid: Uuid) -> Result<Option<Task<'a>>> {\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n store.get(store_id)\n .map(|o| o.map(Task::new))\n .map_err_into(TodoErrorKind::StoreError)\n }\n\n \/\/\/ Same as Task::get_from_import() but uses Store::retrieve() rather than Store::get(), to\n \/\/\/ implicitely create the task if it does not exist.\n pub fn retrieve_from_import<R: BufRead>(store: &'a Store, mut r: R) -> Result<Task<'a>> {\n let mut line = String::new();\n r.read_line(&mut line);\n Task::retrieve_from_string(store, line)\n }\n\n \/\/\/ Retrieve a task from a String. The String is expected to contain the JSON-representation of\n \/\/\/ the Task to retrieve from the store (only the UUID really matters in this case)\n pub fn retrieve_from_string(store: &'a Store, s: String) -> Result<Task<'a>> {\n Task::get_from_string(store, s)\n .and_then(|opt| match opt {\n Ok(task) => Ok(task),\n Err(string) => import_task(string.as_str())\n .map_err_into(TodoErrorKind::ImportError)\n .and_then(|t| t.into_task(store)),\n })\n }\n\n pub fn delete_by_uuid(store: &Store, uuid: Uuid) -> Result<()> {\n store.delete(ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid())\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all_as_ids(store: &Store) -> Result<StoreIdIterator> {\n store.retrieve_for_module(\"todo\/taskwarrior\")\n .map_err(|e| TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n\n pub fn all(store: &Store) -> Result<TaskIterator> {\n Task::all_as_ids(store)\n .map(|iter| TaskIterator::new(store, iter))\n }\n\n}\n\nimpl<'a> Deref for Task<'a> {\n type Target = FileLockEntry<'a>;\n\n fn deref(&self) -> &FileLockEntry<'a> {\n &self.0\n }\n\n}\n\nimpl<'a> DerefMut for Task<'a> {\n\n fn deref_mut(&mut self) -> &mut FileLockEntry<'a> {\n &mut self.0\n }\n\n}\n\n\/\/\/ A trait to get a `libimagtodo::task::Task` out of the implementing object.\n\/\/\/ This Task struct is merely a wrapper for a `FileLockEntry`, therefore the function name\n\/\/\/ `into_filelockentry`.\npub trait IntoTask<'a> {\n\n \/\/\/ # Usage\n \/\/\/ ```ignore\n \/\/\/ use std::io::stdin;\n \/\/\/\n \/\/\/ use task_hookrs::task::Task;\n \/\/\/ use task_hookrs::import::import;\n \/\/\/ use libimagstore::store::{Store, FileLockEntry};\n \/\/\/\n \/\/\/ if let Ok(task_hookrs_task) = import(stdin()) {\n \/\/\/ \/\/ Store is given at runtime\n \/\/\/ let task = task_hookrs_task.into_filelockentry(store);\n \/\/\/ println!(\"Task with uuid: {}\", task.flentry.get_header().get(\"todo.uuid\"));\n \/\/\/ }\n \/\/\/ ```\n fn into_task(self, store : &'a Store) -> Result<Task<'a>>;\n\n}\n\nimpl<'a> IntoTask<'a> for TTask {\n\n fn into_task(self, store : &'a Store) -> Result<Task<'a>> {\n let uuid = self.uuid();\n let store_id = ModuleEntryPath::new(format!(\"taskwarrior\/{}\", uuid)).into_storeid();\n\n match store.retrieve(store_id) {\n Err(e) => return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(mut fle) => {\n {\n let mut header = fle.get_header_mut();\n match header.read(\"todo\") {\n Ok(None) => {\n if let Err(e) = header.set(\"todo\", Value::Table(BTreeMap::new())) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n Ok(Some(_)) => { }\n Err(e) => {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n if let Err(e) = header.set(\"todo.uuid\", Value::String(format!(\"{}\",uuid))) {\n return Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e))))\n }\n }\n\n \/\/ If none of the errors above have returned the function, everything is fine\n Ok(Task::new(fle))\n }\n }\n }\n\n}\n\ntrait FromStoreId {\n fn from_storeid<'a>(&'a Store, StoreId) -> Result<Task<'a>>;\n}\n\nimpl<'a> FromStoreId for Task<'a> {\n\n fn from_storeid<'b>(store: &'b Store, id: StoreId) -> Result<Task<'b>> {\n match store.retrieve(id) {\n Err(e) => Err(TodoError::new(TodoErrorKind::StoreError, Some(Box::new(e)))),\n Ok(c) => Ok(Task::new( c )),\n }\n }\n}\n\npub struct TaskIterator<'a> {\n store: &'a Store,\n iditer: StoreIdIterator,\n}\n\nimpl<'a> TaskIterator<'a> {\n\n pub fn new(store: &'a Store, iditer: StoreIdIterator) -> TaskIterator<'a> {\n TaskIterator {\n store: store,\n iditer: iditer,\n }\n }\n\n}\n\nimpl<'a> Iterator for TaskIterator<'a> {\n type Item = Result<Task<'a>>;\n\n fn next(&mut self) -> Option<Result<Task<'a>>> {\n self.iditer.next().map(|id| Task::from_storeid(self.store, id))\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse super::parser::{FileHeaderParser, Parser, ParserError};\nuse storage::file_id::*;\n\n#[derive(Debug)]\npub enum FileHeaderSpec {\n Null,\n Bool,\n Integer,\n UInteger,\n Float,\n Text,\n Key { name: &'static str, value_type: Box<FileHeaderSpec> },\n Map { keys: Vec<FileHeaderSpec> },\n Array { allowed_types: Vec<FileHeaderSpec> },\n}\n\n#[derive(Debug)]\npub enum FileHeaderData {\n Null,\n Bool(bool),\n Integer(i64),\n UInteger(u64),\n Float(f64),\n Text(String),\n Key { name: String, value: Box<FileHeaderData> },\n Map { keys: Vec<FileHeaderData> },\n Array { values: Box<Vec<FileHeaderData>> },\n}\n\nimpl Display for FileHeaderSpec {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n match self {\n &FileHeaderSpec::Null => write!(fmt, \"NULL\"),\n &FileHeaderSpec::Bool => write!(fmt, \"Bool\"),\n &FileHeaderSpec::Integer => write!(fmt, \"Integer\"),\n &FileHeaderSpec::UInteger => write!(fmt, \"UInteger\"),\n &FileHeaderSpec::Float => write!(fmt, \"Float\"),\n &FileHeaderSpec::Text => write!(fmt, \"Text\"),\n &FileHeaderSpec::Key{name: ref n, value_type: ref vt} => {\n write!(fmt, \"Key({:?}) -> {:?}\", n, vt)\n }\n &FileHeaderSpec::Map{keys: ref ks} => {\n write!(fmt, \"Map -> {:?}\", ks)\n }\n &FileHeaderSpec::Array{allowed_types: ref at} => {\n write!(fmt, \"Array({:?})\", at)\n }\n }\n }\n\n}\n\npub struct MatchError<'a> {\n summary: String,\n expected: &'a FileHeaderSpec,\n found: &'a FileHeaderData\n}\n\nimpl<'a> MatchError<'a> {\n\n pub fn new(s: String,\n ex: &'a FileHeaderSpec,\n found: &'a FileHeaderData) -> MatchError<'a> {\n MatchError {\n summary: s,\n expected: ex,\n found: found,\n }\n }\n\n pub fn format(&self) -> String {\n format!(\"MatchError: {:?}\\nExpected: {:?}\\nFound: {:?}\\n\",\n self.summary, self.expected, self.found)\n }\n}\n\nimpl<'a> Error for MatchError<'a> {\n\n fn description(&self) -> &str {\n &self.summary[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl<'a> Debug for MatchError<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"{}\", self.format());\n Ok(())\n }\n\n}\n\nimpl<'a> Display for MatchError<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"{}\", self.format());\n Ok(())\n }\n\n}\n\npub fn match_header_spec<'a>(spec: &'a FileHeaderSpec, data: &'a FileHeaderData)\n -> Option<MatchError<'a>>\n{\n match (spec, data) {\n (&FileHeaderSpec::Null, &FileHeaderData::Null) => { }\n (&FileHeaderSpec::Bool, &FileHeaderData::Bool(_)) => { }\n (&FileHeaderSpec::Integer, &FileHeaderData::Integer(_)) => { }\n (&FileHeaderSpec::UInteger, &FileHeaderData::UInteger(_)) => { }\n (&FileHeaderSpec::Float, &FileHeaderData::Float(_)) => { }\n (&FileHeaderSpec::Text, &FileHeaderData::Text(_)) => { }\n\n (\n &FileHeaderSpec::Key{name: ref kname, value_type: ref vtype},\n &FileHeaderData::Key{name: ref n, value: ref val}\n ) => {\n if kname != n {\n unimplemented!();\n }\n return match_header_spec(&*vtype, &*val);\n }\n\n (\n &FileHeaderSpec::Map{keys: ref sks},\n &FileHeaderData::Map{keys: ref dks}\n ) => {\n for (s, d) in sks.iter().zip(dks.iter()) {\n let res = match_header_spec(s, d);\n if res.is_some() {\n return res;\n }\n }\n }\n\n (\n &FileHeaderSpec::Array{allowed_types: ref vtypes},\n &FileHeaderData::Array{values: ref vs}\n ) => {\n for (t, v) in vtypes.iter().zip(vs.iter()) {\n let res = match_header_spec(t, v);\n if res.is_some() {\n return res;\n }\n }\n }\n\n (k, v) => {\n return Some(MatchError::new(String::from(\"Expected type does not match found type\"),\n k, v\n ))\n }\n }\n None\n}\n\npub struct File {\n header : FileHeaderData,\n data : String,\n id : FileID,\n}\n\nimpl<'a> File {\n\n fn new<HP>(prs: &Parser<HP>, path: &String) -> Result<File, ParserError>\n where HP: FileHeaderParser<'a>\n {\n File::read_file(path).and_then(|p| prs.read(p))\n .and_then(|(h, d)|\n Ok(File {\n header: h,\n data: d,\n id: from_path_string(path),\n }))\n }\n\n fn getID(&self) -> FileID {\n self.id.clone()\n }\n\n fn read_file(p: &String) -> Result<String, ParserError> {\n Ok(String::from(\"\"))\n }\n\n}\n\n<commit_msg>Store the file handle in our internal File type<commit_after>use std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse super::parser::{FileHeaderParser, Parser, ParserError};\nuse storage::file_id::*;\n\nuse std::fs::File as FSFile;\n\n#[derive(Debug)]\npub enum FileHeaderSpec {\n Null,\n Bool,\n Integer,\n UInteger,\n Float,\n Text,\n Key { name: &'static str, value_type: Box<FileHeaderSpec> },\n Map { keys: Vec<FileHeaderSpec> },\n Array { allowed_types: Vec<FileHeaderSpec> },\n}\n\n#[derive(Debug)]\npub enum FileHeaderData {\n Null,\n Bool(bool),\n Integer(i64),\n UInteger(u64),\n Float(f64),\n Text(String),\n Key { name: String, value: Box<FileHeaderData> },\n Map { keys: Vec<FileHeaderData> },\n Array { values: Box<Vec<FileHeaderData>> },\n}\n\nimpl Display for FileHeaderSpec {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n match self {\n &FileHeaderSpec::Null => write!(fmt, \"NULL\"),\n &FileHeaderSpec::Bool => write!(fmt, \"Bool\"),\n &FileHeaderSpec::Integer => write!(fmt, \"Integer\"),\n &FileHeaderSpec::UInteger => write!(fmt, \"UInteger\"),\n &FileHeaderSpec::Float => write!(fmt, \"Float\"),\n &FileHeaderSpec::Text => write!(fmt, \"Text\"),\n &FileHeaderSpec::Key{name: ref n, value_type: ref vt} => {\n write!(fmt, \"Key({:?}) -> {:?}\", n, vt)\n }\n &FileHeaderSpec::Map{keys: ref ks} => {\n write!(fmt, \"Map -> {:?}\", ks)\n }\n &FileHeaderSpec::Array{allowed_types: ref at} => {\n write!(fmt, \"Array({:?})\", at)\n }\n }\n }\n\n}\n\npub struct MatchError<'a> {\n summary: String,\n expected: &'a FileHeaderSpec,\n found: &'a FileHeaderData\n}\n\nimpl<'a> MatchError<'a> {\n\n pub fn new(s: String,\n ex: &'a FileHeaderSpec,\n found: &'a FileHeaderData) -> MatchError<'a> {\n MatchError {\n summary: s,\n expected: ex,\n found: found,\n }\n }\n\n pub fn format(&self) -> String {\n format!(\"MatchError: {:?}\\nExpected: {:?}\\nFound: {:?}\\n\",\n self.summary, self.expected, self.found)\n }\n}\n\nimpl<'a> Error for MatchError<'a> {\n\n fn description(&self) -> &str {\n &self.summary[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl<'a> Debug for MatchError<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"{}\", self.format());\n Ok(())\n }\n\n}\n\nimpl<'a> Display for MatchError<'a> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"{}\", self.format());\n Ok(())\n }\n\n}\n\npub fn match_header_spec<'a>(spec: &'a FileHeaderSpec, data: &'a FileHeaderData)\n -> Option<MatchError<'a>>\n{\n match (spec, data) {\n (&FileHeaderSpec::Null, &FileHeaderData::Null) => { }\n (&FileHeaderSpec::Bool, &FileHeaderData::Bool(_)) => { }\n (&FileHeaderSpec::Integer, &FileHeaderData::Integer(_)) => { }\n (&FileHeaderSpec::UInteger, &FileHeaderData::UInteger(_)) => { }\n (&FileHeaderSpec::Float, &FileHeaderData::Float(_)) => { }\n (&FileHeaderSpec::Text, &FileHeaderData::Text(_)) => { }\n\n (\n &FileHeaderSpec::Key{name: ref kname, value_type: ref vtype},\n &FileHeaderData::Key{name: ref n, value: ref val}\n ) => {\n if kname != n {\n unimplemented!();\n }\n return match_header_spec(&*vtype, &*val);\n }\n\n (\n &FileHeaderSpec::Map{keys: ref sks},\n &FileHeaderData::Map{keys: ref dks}\n ) => {\n for (s, d) in sks.iter().zip(dks.iter()) {\n let res = match_header_spec(s, d);\n if res.is_some() {\n return res;\n }\n }\n }\n\n (\n &FileHeaderSpec::Array{allowed_types: ref vtypes},\n &FileHeaderData::Array{values: ref vs}\n ) => {\n for (t, v) in vtypes.iter().zip(vs.iter()) {\n let res = match_header_spec(t, v);\n if res.is_some() {\n return res;\n }\n }\n }\n\n (k, v) => {\n return Some(MatchError::new(String::from(\"Expected type does not match found type\"),\n k, v\n ))\n }\n }\n None\n}\n\npub struct File {\n header : FileHeaderData,\n data : String,\n id : FileID,\n handle : Option<FSFile>,\n}\n\nimpl<'a> File {\n\n fn new<HP>(prs: &Parser<HP>, path: &String) -> Result<File, ParserError>\n where HP: FileHeaderParser<'a>\n {\n File::read_file(path).and_then(|p| prs.read(p))\n .and_then(|(h, d)|\n Ok(File {\n header: h,\n data: d,\n id: from_path_string(path),\n handle: None,\n }))\n }\n\n pub fn from_handle(id: FileID, f: FSFile) -> File {\n use std::io::Read;\n\n let mut contents = String::new();\n f.read_to_string(&mut contents);\n\n File {\n header: FileHeaderData::Null,\n data: contents,\n id: id,\n handle: Some(f)\n }\n }\n\n fn getID(&self) -> FileID {\n self.id.clone()\n }\n\n fn read_file(p: &String) -> Result<String, ParserError> {\n Ok(String::from(\"\"))\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>finished \"syntetic\" backend for cyrillic<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Allow various empty reflexives to work<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test(mac): disable mapping tests, they are specific to windows right now<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add tuple exercise from @ruipserra! 😻<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> start work on net:link<commit_after>\/*\n This Source Code Form is subject to the terms of the Mozilla Public\n License, v. 2.0. If a copy of the MPL was not distributed with this\n file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n*\/\n\n\/\/! The link is the layer which emit and receive UDP datagrams using the `net::packet::Packet`\n\/\/! It uses `net::connection::Connection` to track state in an UDP exchange between two hosts\n\nuse net::connection::*;\nuse std::net::*;\n\nquick_error! {\n #[derive(Debug)]\n pub enum LinkError {\n BadAddress(descr: String) {\n description(descr)\n }\n }\n}\n\n#[derive(Debug, PartialEq, Eq)]\npub struct Link {\n remote_addr: SocketAddr\n}\n\nfn check_addr_v4(addr: &Ipv4Addr) -> bool {\n match addr.octets() {\n [127,0,0,0] => false,\n [0,0,0,0] => true,\n [127,_,_,_] => true,\n _ => false\n }\n}\n\nfn check_addr_v6(addr: &Ipv6Addr) -> bool {\n addr.is_loopback()\n}\n\nimpl Link {\n fn new_server(remote: &SocketAddr) -> Result<Link, LinkError> {\n let check = match remote {\n &SocketAddr::V4(addr_v4) => check_addr_v4(addr_v4.ip()),\n &SocketAddr::V6(addr_v6) => check_addr_v6(addr_v6.ip())\n };\n if check {\n Ok(Link{\n remote_addr: remote.clone()\n })\n } else {\n Err(LinkError::BadAddress(\n format!(\"server must have local address. {} is a remote address\", remote)\n ))\n }\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n\n use super::*;\n use test_utils::*;\n use test_utils::ResType::*;\n use std::net::SocketAddr;\n use std::str::FromStr;\n\n #[test]\n fn server_only_on_localhost() {\n \/\/ server on remote is forbidden\n\n let addrs = [\n \/\/ ipv4\n Bad(\"192.168.1.1:0\"), Bad(\"192.168.1.2:0\"), Bad(\"10.10.1.1:0\"), Bad(\"1.1.1.1:0\"), Bad(\"8.8.8.8:0\"),\n Good(\"127.0.0.1:0\"), Good(\"0.0.0.0:0\"), Good(\"127.1.1.1:0\"), Good(\"127.127.127.127:0\"),\n \/\/ ipv4 - special case\n Bad(\"127.0.0.0:0\"),\n \/\/ ipv6\n Bad(\"[2001:db8::211:22ff:fe33:4455]:0\"),\n Bad(\"[2021:db8::211:22ff:fe33:4455]:0\"),\n Bad(\"[2001:db8::211:22ff:fe33:5555]:0\"),\n Bad(\"[2001:470:26:307:0503:c039:de6b:18d4]:0\"),\n Bad(\"[2001:470:26:307:6484:691f:1e4f:9ef6]:0\"),\n Bad(\"[2001:470:26:307:d90e:988a:779b:af51]:0\"),\n Good(\"[::1]:0\")\n ];\n\n let addrs = addrs.iter()\n .map(|str| match str {\n &Bad(addr) => Bad(SocketAddr::from_str(addr)\n .expect(format!(\"BadAddr: {}\", addr).as_str())),\n &Good(addr) => Good(SocketAddr::from_str(addr)\n .expect(format!(\"BadAddr: {}\", addr).as_str()))\n });\n test_loop_with_result(addrs, &Link::new_server)\n }\n\n \/*\n #[test]\n fn init_link() {\n let serv_addr : std::net::SocketAddr = \"127.0.0.1:4242\".parse().unwrap();\n\n let server = Link::server(serv_addr);\n let client = Link::client(serv_addr)\n }\n *\/\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fixed echo of text<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(globs)]\n\nextern crate piston;\n\nextern crate hgl;\nextern crate gl;\n\nuse piston::{\n AssetStore, \n Game, \n GameIteratorSettings,\n GameWindowSDL2, \n GameWindowSettings, \n RenderArgs\n};\n\nuse std::mem::size_of;\nuse hgl::{Shader, Program, Triangles, Vbo, Vao};\n\n#[allow(dead_code)]\npub struct App {\n program: Program,\n vao: Vao,\n vbo: Vbo\n}\n\nstatic VERTEX_SHADER: &'static str = r\"\n attribute vec2 position;\n \n void main() {\n gl_Position = vec4(position, 0.0, 1.0);\n }\n\";\n\nstatic FRAGMENT_SHADER: &'static str = r\"\n void main() {\n gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n }\n\";\n\nimpl App {\n \/\/\/ Creates a new application.\n pub fn new() -> App {\n let vao = Vao::new();\n vao.bind();\n\n let program = Program::link([Shader::compile(VERTEX_SHADER, hgl::VertexShader).unwrap(),\n Shader::compile(FRAGMENT_SHADER, hgl::FragmentShader).unwrap()]).unwrap();\n program.bind_frag(0, \"out_color\");\n program.bind();\n\n let vbo = Vbo::from_data([\n 0.0f32, 0.5, 1.0, 0.0, 0.0,\n 0.5, -0.5, 0.0, 1.0, 0.0,\n -0.5, -0.5, 0.0, 0.0, 1.0\n ], hgl::StaticDraw);\n\n vao.enable_attrib(&program, \"position\", gl::FLOAT, 2, 5*size_of::<f32>() as i32, 0);\n vao.enable_attrib(&program, \"color\", gl::FLOAT, 3, 5*size_of::<f32>() as i32, 2*size_of::<f32>());\n vbo.bind();\n\n App {\n program: program,\n vao: vao,\n vbo: vbo\n }\n }\n}\n\nimpl Game for App {\n fn render(&mut self, args: &mut RenderArgs) {\n gl::Viewport(0, 0, args.width as i32, args.height as i32);\n gl::ClearColor(0.0, 0.0, 0.0, 0.1);\n gl::Clear(gl::COLOR_BUFFER_BIT);\n self.vao.draw_array(Triangles, 0, 3);\n }\n}\n\nfn main() {\n let mut window = GameWindowSDL2::new(\n GameWindowSettings {\n title: \"Test\".to_string(),\n size: [800, 600],\n fullscreen: false,\n exit_on_esc: true\n }\n );\n\n let game_iter_settings = GameIteratorSettings {\n updates_per_second: 120,\n max_frames_per_second: 60,\n };\n App::new().run(&mut window, &mut AssetStore::empty(), &game_iter_settings);\n}\n\n<commit_msg>Removed use of 'out_color'<commit_after>#![feature(globs)]\n\nextern crate piston;\n\nextern crate hgl;\nextern crate gl;\n\nuse piston::{\n AssetStore, \n Game, \n GameIteratorSettings,\n GameWindowSDL2, \n GameWindowSettings, \n RenderArgs\n};\n\nuse std::mem::size_of;\nuse hgl::{Shader, Program, Triangles, Vbo, Vao};\n\n#[allow(dead_code)]\npub struct App {\n program: Program,\n vao: Vao,\n vbo: Vbo\n}\n\nstatic VERTEX_SHADER: &'static str = r\"\n attribute vec2 position;\n \n void main() {\n gl_Position = vec4(position, 0.0, 1.0);\n }\n\";\n\nstatic FRAGMENT_SHADER: &'static str = r\"\n void main() {\n gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);\n }\n\";\n\nimpl App {\n \/\/\/ Creates a new application.\n pub fn new() -> App {\n let vao = Vao::new();\n vao.bind();\n\n let program = Program::link([Shader::compile(VERTEX_SHADER, hgl::VertexShader).unwrap(),\n Shader::compile(FRAGMENT_SHADER, hgl::FragmentShader).unwrap()]).unwrap();\n program.bind();\n\n let vbo = Vbo::from_data([\n 0.0f32, 0.5, 1.0, 0.0, 0.0,\n 0.5, -0.5, 0.0, 1.0, 0.0,\n -0.5, -0.5, 0.0, 0.0, 1.0\n ], hgl::StaticDraw);\n\n vao.enable_attrib(&program, \"position\", gl::FLOAT, 2, 5*size_of::<f32>() as i32, 0);\n vao.enable_attrib(&program, \"color\", gl::FLOAT, 3, 5*size_of::<f32>() as i32, 2*size_of::<f32>());\n vbo.bind();\n\n App {\n program: program,\n vao: vao,\n vbo: vbo\n }\n }\n}\n\nimpl Game for App {\n fn render(&mut self, args: &mut RenderArgs) {\n gl::Viewport(0, 0, args.width as i32, args.height as i32);\n gl::ClearColor(0.0, 0.0, 0.0, 0.1);\n gl::Clear(gl::COLOR_BUFFER_BIT);\n self.vao.draw_array(Triangles, 0, 3);\n }\n}\n\nfn main() {\n let mut window = GameWindowSDL2::new(\n GameWindowSettings {\n title: \"Test\".to_string(),\n size: [800, 600],\n fullscreen: false,\n exit_on_esc: true\n }\n );\n\n let game_iter_settings = GameIteratorSettings {\n updates_per_second: 120,\n max_frames_per_second: 60,\n };\n App::new().run(&mut window, &mut AssetStore::empty(), &game_iter_settings);\n}\n\n<|endoftext|>"} {"text":"<commit_before>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::file::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl Command {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n let path = arg.clone();\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut string = String::new();\n for i in 2..args.len() {\n if let Some(arg) = args.get(i) {\n if i >= 3 {\n string.push_str(\" \");\n }\n string.push_str(arg);\n }\n }\n string.push_str(\"\\r\\n\\r\\n\");\n\n match file.write(&string.as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + &c.name) + \" exit\";\n\n commands.push(Command {\n name: \"help\".to_string(),\n main: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl Application {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if arg.len() > 0 {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n }\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if self.modes.len() > 0 {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(\"Terminal\");\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command == \"exit\" {\n break;\n } else if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>Refactor send command<commit_after>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::file::*;\nuse redox::io::*;\nuse redox::console::*;\nuse redox::env::*;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl Command {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n let path = arg.clone();\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>| {\n let path;\n match args.get(1) {\n Some(arg) => path = arg.clone(),\n None => path = String::new(),\n }\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\".to_string(),\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + &c.name) + \" exit\";\n\n commands.push(Command {\n name: \"help\".to_string(),\n main: box move |args: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl Application {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &String) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\" {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + \"\\n\" + &variable.name + \"=\" + &variable.value;\n }\n println!(\"{}\", variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if arg.len() > 0 {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n }\n }\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if self.modes.len() > 0 {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].to_string();\n let mut value = cmd[i + 1 .. cmd.len()].to_string();\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name,\n value: value,\n });\n }\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n console_title(\"Terminal\");\n\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# {}\", command);\n self.on_command(&command);\n }\n\n while let Some(command) = readln!() {\n println!(\"# {}\", command);\n if command == \"exit\" {\n break;\n } else if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implements quick sort<commit_after>\/\/Implements http:\/\/rosettacode.org\/wiki\/Sorting_algorithms\/Quicksort\nfn quick_sort<T: Ord + Clone>(v: &mut[T]) {\n let vLen = v.len();\n if vLen < 2 {\n return;\n }\n let mut leftIndex = 0;\n let mut rightIndex = vLen-1;\n let pivot: T = v[vLen\/2].clone();\n while leftIndex <= rightIndex {\n if v[leftIndex] < pivot {\n leftIndex = leftIndex+1;\n continue;\n }\n if v[rightIndex] > pivot {\n rightIndex = rightIndex-1;\n continue;\n }\n v.swap(leftIndex, rightIndex);\n leftIndex = leftIndex+1;\n rightIndex = rightIndex-1;\n }\n \n {\n let leftArray = v.mut_slice(0, rightIndex+1);\n quick_sort(leftArray);\n }\n {\n let rightArray = v.mut_slice(leftIndex, vLen);\n quick_sort(rightArray);\n }\n}\n\nfn main() {\n let mut numbers = [4, 65, 2, -31, 0, 99, 2, 83, 782, 1];\n quick_sort(numbers);\n}\n\n#[cfg(test)]\nfn check_sort<T: Ord>(v: &[T]) {\n if v.len() > 1 {\n for i in range(0, v.len()-1) {\n assert!(v[i] <= v[i+1]);\n }\n }\n}\n\n#[test]\nfn test_rosetta_vector() {\n let mut numbers = [4, 65, 2, -31, 0, 99, 2, 83, 782, 1];\n quick_sort(numbers);\n check_sort(numbers);\n}\n\n#[test]\nfn test_empty_vector() {\n let mut numbers: ~[int] = ~[];\n quick_sort(numbers);\n check_sort(numbers);\n}\n\n#[test]\nfn test_one_element_vector() {\n let mut numbers = [0];\n quick_sort(numbers);\n check_sort(numbers);\n}\n\n#[test]\nfn test_repeat_vector() {\n let mut numbers = [1, 1, 1, 1, 1];\n quick_sort(numbers);\n check_sort(numbers);\n}\n\n#[test]\nfn test_worst_case_vector() {\n let mut numbers = [20, 10, 0, -1, -5];\n quick_sort(numbers);\n check_sort(numbers);\n}\n\n#[test]\nfn test_already_sorted_vector() {\n let mut numbers = [-1, 0, 3, 6, 99];\n quick_sort(numbers);\n check_sort(numbers);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse ruru::AnyObject;\ntrait Wrap {\n fn wrap(self) -> AnyObject;\n}\n\n#[allow(unused_variables)]\npub mod storeid {\n use std::path::PathBuf;\n\n use ruru::{Class, Object, AnyObject, Boolean, RString, NilClass};\n\n use libimagstore::storeid::StoreId;\n\n wrappable_struct!(StoreId, StoreIdWrapper, STOREID_WRAPPER);\n class!(RStoreId);\n\n use store::Wrap;\n impl Wrap for StoreId {\n fn wrap(self) -> AnyObject {\n Class::from_existing(\"RStoreId\").wrap_data(self, &*STOREID_WRAPPER)\n }\n }\n\n methods!(\n RStoreId,\n itself,\n\n fn r_storeid_new(base: RString, id: RString) -> AnyObject {\n let base = match base.map(|b| b.to_string()).map(PathBuf::from) {\n Ok(base) => base,\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n };\n\n let id = match id.map(|id| id.to_string()).map(PathBuf::from) {\n Ok(id) => id,\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n };\n\n match StoreId::new(Some(base), id) {\n Ok(sid) => Class::from_existing(\"RStoreId\").wrap_data(sid, &*STOREID_WRAPPER),\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n }\n }\n\n fn r_storeid_new_baseless(id: RString) -> AnyObject {\n let id = match id.map(|id| id.to_string()).map(PathBuf::from) {\n Ok(id) => id,\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n };\n\n match StoreId::new(None, id) {\n Ok(sid) => Class::from_existing(\"RStoreId\").wrap_data(sid, &*STOREID_WRAPPER),\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n }\n }\n\n fn r_storeid_without_base() -> RStoreId {\n let withoutbase : StoreId = itself.get_data(&*STOREID_WRAPPER).clone().without_base();\n Class::from_existing(\"RStoreId\").wrap_data(withoutbase, &*STOREID_WRAPPER)\n }\n\n fn r_storeid_with_base(base: RString) -> AnyObject {\n let base : PathBuf = match base.map(|b| b.to_string()).map(PathBuf::from) {\n Ok(pb) => pb,\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Error: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n };\n let withoutbase : StoreId = itself.get_data(&*STOREID_WRAPPER).clone().with_base(base);\n Class::from_existing(\"RStoreId\").wrap_data(withoutbase, &*STOREID_WRAPPER)\n }\n\n fn r_storeid_into_pathbuf() -> AnyObject {\n itself.get_data(&*STOREID_WRAPPER)\n .clone()\n .into_pathbuf()\n \/\/ TODO: No unwraps\n .map(|pb| pb.to_str().map(String::from).unwrap())\n .as_ref()\n .map(|s| AnyObject::from(RString::new(s).value()))\n \/\/ TODO: Exception!\n .unwrap_or(AnyObject::from(NilClass::new().value()))\n }\n\n fn r_storeid_exists() -> Boolean {\n Boolean::new(itself.get_data(&*STOREID_WRAPPER).exists())\n }\n\n fn r_storeid_to_str() -> AnyObject {\n itself.get_data(&*STOREID_WRAPPER)\n .to_str()\n .as_ref()\n .map(|s| AnyObject::from(RString::new(s).value()))\n \/\/ TODO: Exception!\n .unwrap_or(AnyObject::from(NilClass::new().value()))\n }\n\n fn r_storeid_local() -> RString {\n let local = itself.get_data(&*STOREID_WRAPPER).local();\n let local = local.to_str().unwrap(); \/\/ TODO: No unwraps\n RString::new(local)\n }\n\n );\n\n pub fn setup() -> Class {\n let mut class = Class::new(\"RStoreId\", None);\n class.define(|itself| {\n itself.def_self(\"new\" , r_storeid_new);\n itself.def_self(\"new_baseless\" , r_storeid_new_baseless);\n\n itself.def(\"without_base\" , r_storeid_without_base);\n itself.def(\"with_base\" , r_storeid_with_base);\n itself.def(\"into_pathbuf\" , r_storeid_into_pathbuf);\n itself.def(\"exists\" , r_storeid_exists);\n itself.def(\"to_str\" , r_storeid_to_str);\n itself.def(\"local\" , r_storeid_local);\n });\n class\n }\n\n}\n\n#[allow(unused_variables)]\npub mod store {\n pub mod entry {\n use std::collections::BTreeMap;\n use std::error::Error;\n use std::ops::Deref;\n use std::ops::DerefMut;\n\n use ruru::{Class, Object, AnyObject, Boolean, RString, VM, Hash, NilClass};\n\n use libimagstore::store::FileLockEntry as FLE;\n use libimagstore::store::EntryHeader;\n use libimagstore::store::EntryContent;\n use libimagstore::store::Entry;\n\n use ruby_utils::IntoToml;\n use toml_utils::IntoRuby;\n use store::Wrap;\n\n pub struct FLECustomWrapper(Box<FLE<'static>>);\n\n impl Deref for FLECustomWrapper {\n type Target = Box<FLE<'static>>;\n\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n }\n\n impl DerefMut for FLECustomWrapper {\n fn deref_mut(&mut self) -> &mut Self::Target {\n &mut self.0\n }\n }\n\n wrappable_struct!(FLECustomWrapper, FileLockEntryWrapper, FLE_WRAPPER);\n class!(RFileLockEntry);\n\n methods!(\n RFileLockEntry,\n itself,\n\n fn r_get_location() -> AnyObject {\n itself.get_data(&*FLE_WRAPPER).get_location().clone().wrap()\n }\n\n fn r_get_header() -> AnyObject {\n itself.get_data(&*FLE_WRAPPER).get_header().clone().wrap()\n }\n\n fn r_set_header(hdr: Hash) -> NilClass {\n use ruby_utils::IntoToml;\n use toml_utils::IntoRuby;\n use toml::Value;\n\n let mut header = itself.get_data(&*FLE_WRAPPER).get_header_mut();\n\n if let Err(ref error) = hdr { \/\/ raise exception if \"hdr\" is not a Hash\n VM::raise(error.to_exception(), error.description());\n return NilClass::new();\n }\n\n let hdr = match hdr.unwrap().into_toml() {\n Value::Table(t) => *header = EntryHeader::from(t),\n _ => {\n let ec = Class::from_existing(\"RuntimeError\");\n VM::raise(ec, \"Something weird happened. Hash seems to be not a Hash\");\n return NilClass::new();\n },\n };\n\n NilClass::new()\n }\n\n fn r_get_content() -> AnyObject {\n itself.get_data(&*FLE_WRAPPER).get_content().clone().wrap()\n }\n\n );\n\n wrappable_struct!(EntryHeader, EntryHeaderWrapper, ENTRY_HEADER_WRAPPER);\n class!(REntryHeader);\n\n impl Wrap for EntryHeader {\n fn wrap(self) -> AnyObject {\n Class::from_existing(\"REntryHeader\").wrap_data(self, &*ENTRY_HEADER_WRAPPER)\n }\n }\n\n methods!(\n REntryHeader,\n itself,\n\n fn r_entry_header_new() -> AnyObject {\n EntryHeader::new().wrap()\n }\n\n fn r_entry_header_insert(spec: RString, obj: AnyObject) -> Boolean {\n if let Err(ref error) = spec { \/\/ raise exception if \"spec\" is not a String\n VM::raise(error.to_exception(), error.description());\n return Boolean::new(false);\n }\n\n let spec = spec.unwrap().to_string(); \/\/ safe because of check above.\n let obj = obj.unwrap(); \/\/ possibly not safe... TODO\n\n match itself.get_data(&*ENTRY_HEADER_WRAPPER).insert(&spec, obj.into_toml()) {\n Ok(b) => Boolean::new(b),\n Err(e) => {\n VM::raise(Class::from_existing(\"RuntimeError\"), e.description());\n return Boolean::new(false);\n }\n }\n }\n\n fn r_entry_header_set(spec: RString, obj: AnyObject) -> AnyObject {\n use ruru::NilClass;\n\n if let Err(ref error) = spec { \/\/ raise exception if \"spec\" is not a String\n VM::raise(error.to_exception(), error.description());\n return Boolean::new(false).to_any_object();\n }\n\n let spec = spec.unwrap().to_string(); \/\/ safe because of check above.\n let obj = obj.unwrap(); \/\/ possibly not safe... TODO\n\n match itself.get_data(&*ENTRY_HEADER_WRAPPER).set(&spec, obj.into_toml()) {\n Ok(Some(v)) => v.into_ruby(),\n Ok(None) => NilClass::new().to_any_object(),\n Err(e) => {\n VM::raise(Class::from_existing(\"RuntimeError\"), e.description());\n return Boolean::new(false).to_any_object();\n }\n }\n }\n\n fn r_entry_header_get(spec: RString) -> AnyObject {\n use ruru::NilClass;\n\n if let Err(ref error) = spec { \/\/ raise exception if \"spec\" is not a String\n VM::raise(error.to_exception(), error.description());\n return Boolean::new(false).to_any_object();\n }\n\n let spec = spec.unwrap().to_string(); \/\/ safe because of check above.\n\n match itself.get_data(&*ENTRY_HEADER_WRAPPER).read(&spec) {\n Ok(Some(v)) => v.into_ruby(),\n Ok(None) => NilClass::new().to_any_object(),\n Err(e) => {\n VM::raise(Class::from_existing(\"RuntimeError\"), e.description());\n return Boolean::new(false).to_any_object();\n }\n }\n }\n\n );\n\n wrappable_struct!(EntryContent, EntryContentWrapper, ENTRY_CONTENT_WRAPPER);\n class!(REntryContent);\n\n impl Wrap for EntryContent {\n fn wrap(self) -> AnyObject {\n Class::from_existing(\"REntryContent\").wrap_data(self, &*ENTRY_CONTENT_WRAPPER)\n }\n }\n\n wrappable_struct!(Entry, EntryWrapper, ENTRY_WRAPPER);\n class!(REntry);\n\n pub fn setup_filelockentry() -> Class {\n let mut class = Class::new(\"RFileLockEntry\", None);\n class\n }\n\n pub fn setup_entryheader() -> Class {\n let mut class = Class::new(\"REntryHeader\", None);\n class.define(|itself| {\n itself.def(\"insert\", r_entry_header_insert);\n itself.def(\"set\" , r_entry_header_set);\n itself.def(\"read\" , r_entry_header_get);\n });\n class\n }\n\n pub fn setup_entrycontent() -> Class {\n let string = Class::from_existing(\"String\");\n let mut class = Class::new(\"REntryContent\", Some(&string));\n class\n }\n }\n\n}\n\n<commit_msg>Add r_set_content() for FileLockEntry<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\nuse ruru::AnyObject;\ntrait Wrap {\n fn wrap(self) -> AnyObject;\n}\n\n#[allow(unused_variables)]\npub mod storeid {\n use std::path::PathBuf;\n\n use ruru::{Class, Object, AnyObject, Boolean, RString, NilClass};\n\n use libimagstore::storeid::StoreId;\n\n wrappable_struct!(StoreId, StoreIdWrapper, STOREID_WRAPPER);\n class!(RStoreId);\n\n use store::Wrap;\n impl Wrap for StoreId {\n fn wrap(self) -> AnyObject {\n Class::from_existing(\"RStoreId\").wrap_data(self, &*STOREID_WRAPPER)\n }\n }\n\n methods!(\n RStoreId,\n itself,\n\n fn r_storeid_new(base: RString, id: RString) -> AnyObject {\n let base = match base.map(|b| b.to_string()).map(PathBuf::from) {\n Ok(base) => base,\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n };\n\n let id = match id.map(|id| id.to_string()).map(PathBuf::from) {\n Ok(id) => id,\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n };\n\n match StoreId::new(Some(base), id) {\n Ok(sid) => Class::from_existing(\"RStoreId\").wrap_data(sid, &*STOREID_WRAPPER),\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n }\n }\n\n fn r_storeid_new_baseless(id: RString) -> AnyObject {\n let id = match id.map(|id| id.to_string()).map(PathBuf::from) {\n Ok(id) => id,\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n };\n\n match StoreId::new(None, id) {\n Ok(sid) => Class::from_existing(\"RStoreId\").wrap_data(sid, &*STOREID_WRAPPER),\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Building StoreId object failed: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n }\n }\n\n fn r_storeid_without_base() -> RStoreId {\n let withoutbase : StoreId = itself.get_data(&*STOREID_WRAPPER).clone().without_base();\n Class::from_existing(\"RStoreId\").wrap_data(withoutbase, &*STOREID_WRAPPER)\n }\n\n fn r_storeid_with_base(base: RString) -> AnyObject {\n let base : PathBuf = match base.map(|b| b.to_string()).map(PathBuf::from) {\n Ok(pb) => pb,\n Err(e) => {\n \/\/ TODO: Exception!\n error!(\"Error: {:?}\", e);\n return AnyObject::from(NilClass::new().value());\n },\n };\n let withoutbase : StoreId = itself.get_data(&*STOREID_WRAPPER).clone().with_base(base);\n Class::from_existing(\"RStoreId\").wrap_data(withoutbase, &*STOREID_WRAPPER)\n }\n\n fn r_storeid_into_pathbuf() -> AnyObject {\n itself.get_data(&*STOREID_WRAPPER)\n .clone()\n .into_pathbuf()\n \/\/ TODO: No unwraps\n .map(|pb| pb.to_str().map(String::from).unwrap())\n .as_ref()\n .map(|s| AnyObject::from(RString::new(s).value()))\n \/\/ TODO: Exception!\n .unwrap_or(AnyObject::from(NilClass::new().value()))\n }\n\n fn r_storeid_exists() -> Boolean {\n Boolean::new(itself.get_data(&*STOREID_WRAPPER).exists())\n }\n\n fn r_storeid_to_str() -> AnyObject {\n itself.get_data(&*STOREID_WRAPPER)\n .to_str()\n .as_ref()\n .map(|s| AnyObject::from(RString::new(s).value()))\n \/\/ TODO: Exception!\n .unwrap_or(AnyObject::from(NilClass::new().value()))\n }\n\n fn r_storeid_local() -> RString {\n let local = itself.get_data(&*STOREID_WRAPPER).local();\n let local = local.to_str().unwrap(); \/\/ TODO: No unwraps\n RString::new(local)\n }\n\n );\n\n pub fn setup() -> Class {\n let mut class = Class::new(\"RStoreId\", None);\n class.define(|itself| {\n itself.def_self(\"new\" , r_storeid_new);\n itself.def_self(\"new_baseless\" , r_storeid_new_baseless);\n\n itself.def(\"without_base\" , r_storeid_without_base);\n itself.def(\"with_base\" , r_storeid_with_base);\n itself.def(\"into_pathbuf\" , r_storeid_into_pathbuf);\n itself.def(\"exists\" , r_storeid_exists);\n itself.def(\"to_str\" , r_storeid_to_str);\n itself.def(\"local\" , r_storeid_local);\n });\n class\n }\n\n}\n\n#[allow(unused_variables)]\npub mod store {\n pub mod entry {\n use std::collections::BTreeMap;\n use std::error::Error;\n use std::ops::Deref;\n use std::ops::DerefMut;\n\n use ruru::{Class, Object, AnyObject, Boolean, RString, VM, Hash, NilClass};\n\n use libimagstore::store::FileLockEntry as FLE;\n use libimagstore::store::EntryHeader;\n use libimagstore::store::EntryContent;\n use libimagstore::store::Entry;\n\n use ruby_utils::IntoToml;\n use toml_utils::IntoRuby;\n use store::Wrap;\n\n pub struct FLECustomWrapper(Box<FLE<'static>>);\n\n impl Deref for FLECustomWrapper {\n type Target = Box<FLE<'static>>;\n\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n }\n\n impl DerefMut for FLECustomWrapper {\n fn deref_mut(&mut self) -> &mut Self::Target {\n &mut self.0\n }\n }\n\n wrappable_struct!(FLECustomWrapper, FileLockEntryWrapper, FLE_WRAPPER);\n class!(RFileLockEntry);\n\n methods!(\n RFileLockEntry,\n itself,\n\n fn r_get_location() -> AnyObject {\n itself.get_data(&*FLE_WRAPPER).get_location().clone().wrap()\n }\n\n fn r_get_header() -> AnyObject {\n itself.get_data(&*FLE_WRAPPER).get_header().clone().wrap()\n }\n\n fn r_set_header(hdr: Hash) -> NilClass {\n use ruby_utils::IntoToml;\n use toml_utils::IntoRuby;\n use toml::Value;\n\n let mut header = itself.get_data(&*FLE_WRAPPER).get_header_mut();\n\n if let Err(ref error) = hdr { \/\/ raise exception if \"hdr\" is not a Hash\n VM::raise(error.to_exception(), error.description());\n return NilClass::new();\n }\n\n let hdr = match hdr.unwrap().into_toml() {\n Value::Table(t) => *header = EntryHeader::from(t),\n _ => {\n let ec = Class::from_existing(\"RuntimeError\");\n VM::raise(ec, \"Something weird happened. Hash seems to be not a Hash\");\n return NilClass::new();\n },\n };\n\n NilClass::new()\n }\n\n fn r_get_content() -> AnyObject {\n itself.get_data(&*FLE_WRAPPER).get_content().clone().wrap()\n }\n\n fn r_set_content(ctt: RString) -> NilClass {\n use ruby_utils::IntoToml;\n use toml_utils::IntoRuby;\n use toml::Value;\n\n let mut content = itself.get_data(&*FLE_WRAPPER).get_content_mut();\n\n if let Err(ref error) = ctt { \/\/ raise exception if \"ctt\" is not a String\n VM::raise(error.to_exception(), error.description());\n return NilClass::new();\n }\n\n let hdr = match ctt.unwrap().into_toml() {\n Value::String(s) => *content = s,\n _ => {\n let ec = Class::from_existing(\"RuntimeError\");\n VM::raise(ec, \"Something weird happened. String seems to be not a String\");\n return NilClass::new();\n },\n };\n\n NilClass::new()\n }\n\n );\n\n wrappable_struct!(EntryHeader, EntryHeaderWrapper, ENTRY_HEADER_WRAPPER);\n class!(REntryHeader);\n\n impl Wrap for EntryHeader {\n fn wrap(self) -> AnyObject {\n Class::from_existing(\"REntryHeader\").wrap_data(self, &*ENTRY_HEADER_WRAPPER)\n }\n }\n\n methods!(\n REntryHeader,\n itself,\n\n fn r_entry_header_new() -> AnyObject {\n EntryHeader::new().wrap()\n }\n\n fn r_entry_header_insert(spec: RString, obj: AnyObject) -> Boolean {\n if let Err(ref error) = spec { \/\/ raise exception if \"spec\" is not a String\n VM::raise(error.to_exception(), error.description());\n return Boolean::new(false);\n }\n\n let spec = spec.unwrap().to_string(); \/\/ safe because of check above.\n let obj = obj.unwrap(); \/\/ possibly not safe... TODO\n\n match itself.get_data(&*ENTRY_HEADER_WRAPPER).insert(&spec, obj.into_toml()) {\n Ok(b) => Boolean::new(b),\n Err(e) => {\n VM::raise(Class::from_existing(\"RuntimeError\"), e.description());\n return Boolean::new(false);\n }\n }\n }\n\n fn r_entry_header_set(spec: RString, obj: AnyObject) -> AnyObject {\n use ruru::NilClass;\n\n if let Err(ref error) = spec { \/\/ raise exception if \"spec\" is not a String\n VM::raise(error.to_exception(), error.description());\n return Boolean::new(false).to_any_object();\n }\n\n let spec = spec.unwrap().to_string(); \/\/ safe because of check above.\n let obj = obj.unwrap(); \/\/ possibly not safe... TODO\n\n match itself.get_data(&*ENTRY_HEADER_WRAPPER).set(&spec, obj.into_toml()) {\n Ok(Some(v)) => v.into_ruby(),\n Ok(None) => NilClass::new().to_any_object(),\n Err(e) => {\n VM::raise(Class::from_existing(\"RuntimeError\"), e.description());\n return Boolean::new(false).to_any_object();\n }\n }\n }\n\n fn r_entry_header_get(spec: RString) -> AnyObject {\n use ruru::NilClass;\n\n if let Err(ref error) = spec { \/\/ raise exception if \"spec\" is not a String\n VM::raise(error.to_exception(), error.description());\n return Boolean::new(false).to_any_object();\n }\n\n let spec = spec.unwrap().to_string(); \/\/ safe because of check above.\n\n match itself.get_data(&*ENTRY_HEADER_WRAPPER).read(&spec) {\n Ok(Some(v)) => v.into_ruby(),\n Ok(None) => NilClass::new().to_any_object(),\n Err(e) => {\n VM::raise(Class::from_existing(\"RuntimeError\"), e.description());\n return Boolean::new(false).to_any_object();\n }\n }\n }\n\n );\n\n wrappable_struct!(EntryContent, EntryContentWrapper, ENTRY_CONTENT_WRAPPER);\n class!(REntryContent);\n\n impl Wrap for EntryContent {\n fn wrap(self) -> AnyObject {\n Class::from_existing(\"REntryContent\").wrap_data(self, &*ENTRY_CONTENT_WRAPPER)\n }\n }\n\n wrappable_struct!(Entry, EntryWrapper, ENTRY_WRAPPER);\n class!(REntry);\n\n pub fn setup_filelockentry() -> Class {\n let mut class = Class::new(\"RFileLockEntry\", None);\n class\n }\n\n pub fn setup_entryheader() -> Class {\n let mut class = Class::new(\"REntryHeader\", None);\n class.define(|itself| {\n itself.def(\"insert\", r_entry_header_insert);\n itself.def(\"set\" , r_entry_header_set);\n itself.def(\"read\" , r_entry_header_get);\n });\n class\n }\n\n pub fn setup_entrycontent() -> Class {\n let string = Class::from_existing(\"String\");\n let mut class = Class::new(\"REntryContent\", Some(&string));\n class\n }\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Solve problem 2455<commit_after>\/\/ https:\/\/leetcode.com\/problems\/average-value-of-even-numbers-that-are-divisible-by-three\/\npub fn average_value(nums: Vec<i32>) -> i32 {\n let mut sum = 0;\n let mut count = 0;\n for num in nums.into_iter() {\n if num % 2 == 0 && num % 3 == 0 {\n sum += num;\n count += 1;\n }\n }\n if count == 0 {\n 0\n } else {\n sum \/ count\n }\n}\n\nfn main() {\n println!(\"{}\", average_value(vec![1, 3, 6, 10, 12, 15])); \/\/ 9\n println!(\"{}\", average_value(vec![1, 2, 4, 7, 10])); \/\/ 0\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rust cycle adapter<commit_after>use std::iter::{once, repeat};\n\nlet fizzes = repeat(\"\").take(2).chain(once(\"fizz\")).cycle();\nlet buzzes = repeat(\"\").take(4).chain(once(\"buzz\")).cycle();\n\nlet fizzes_buzzes = fizzes.zip(buzzes);\n\nlet fizz_buzz = (1..100).zip(fizzes_buzzes)\n .map(|tuple|\n match tuple {\n (i, (\"\", \"\")) => i.to_string(),\n (_, (fizz, buzz)) => format!(\"{}{}\", fizz, buzz)\n }\n );\n\n for line in fizz_buzz {\n println!(\"{}\", line);\n }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rename PatitionDescription to PartitionEntry<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add draft for bench tests<commit_after>#![feature(phase)]\n\nextern crate httpd = \"tiny-http\";\n\nuse std::io::Command;\n\n#[test]\n#[ignore]\n\/\/ TODO: obtain time\nfn curl_bench() {\n let (server, port) = httpd::Server::new_with_random_port().unwrap();\n let num_requests = 10u;\n\n match Command::new(\"curl\")\n .arg(\"-s\")\n .arg(format!(\"http:\/\/localhost:{}\/?[1-{}]\", port, num_requests).as_slice())\n .output()\n {\n Ok(p) => p,\n Err(err) => return, \/\/ ignoring test\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a regression test for #18119.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nconst X: u8 = 1;\nstatic Y: u8 = 1;\nfn foo() {}\n\nimpl X {}\n\/\/~^ ERROR use of undeclared type name `X`\nimpl Y {}\n\/\/~^ ERROR use of undeclared type name `Y`\nimpl foo {}\n\/\/~^ ERROR use of undeclared type name `foo`\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub struct Foo { a: u32 }\npub struct Pass<'a, 'tcx: 'a>(&'a mut &'a (), &'a &'tcx ());\n\nimpl<'a, 'tcx> Pass<'a, 'tcx>\n{\n pub fn tcx(&self) -> &'a &'tcx () { self.1 }\n fn lol(&mut self, b: &Foo)\n {\n b.c; \/\/~ ERROR no field with that name was found\n self.tcx();\n }\n}\n\nfn main() {}\n<commit_msg>Add comment explaining purpose of test<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that we do not see uninformative region-related errors\n\/\/ when we get some basic type-checking failure. See #30580.\n\npub struct Foo { a: u32 }\npub struct Pass<'a, 'tcx: 'a>(&'a mut &'a (), &'a &'tcx ());\n\nimpl<'a, 'tcx> Pass<'a, 'tcx>\n{\n pub fn tcx(&self) -> &'a &'tcx () { self.1 }\n fn lol(&mut self, b: &Foo)\n {\n b.c; \/\/~ ERROR no field with that name was found\n self.tcx();\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_attrs)]\n\nmacro_rules! null { ($i:tt) => {} }\nmacro_rules! apply_null {\n ($i:item) => { null! { $i } }\n}\n\n#[rustc_error]\nfn main() { \/\/~ ERROR compilation successful\n apply_null!(#[cfg(all())] fn f() {});\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate toml;\nextern crate rustc_serialize;\n\nuse std::collections::HashMap;\nuse std::env;\nuse std::fs::File;\nuse std::io::{self, Read, Write};\nuse std::path::{PathBuf, Path};\nuse std::process::{Command, Stdio};\n\nstatic HOSTS: &'static [&'static str] = &[\n \"aarch64-unknown-linux-gnu\",\n \"arm-unknown-linux-gnueabi\",\n \"arm-unknown-linux-gnueabihf\",\n \"armv7-unknown-linux-gnueabihf\",\n \"i686-apple-darwin\",\n \"i686-pc-windows-gnu\",\n \"i686-pc-windows-msvc\",\n \"i686-unknown-linux-gnu\",\n \"mips-unknown-linux-gnu\",\n \"mips64-unknown-linux-gnuabi64\",\n \"mips64el-unknown-linux-gnuabi64\",\n \"mipsel-unknown-linux-gnu\",\n \"powerpc-unknown-linux-gnu\",\n \"powerpc64-unknown-linux-gnu\",\n \"powerpc64le-unknown-linux-gnu\",\n \"s390x-unknown-linux-gnu\",\n \"x86_64-apple-darwin\",\n \"x86_64-pc-windows-gnu\",\n \"x86_64-pc-windows-msvc\",\n \"x86_64-unknown-freebsd\",\n \"x86_64-unknown-linux-gnu\",\n \"x86_64-unknown-netbsd\",\n];\n\nstatic TARGETS: &'static [&'static str] = &[\n \"aarch64-apple-ios\",\n \"aarch64-linux-android\",\n \"aarch64-unknown-linux-gnu\",\n \"arm-linux-androideabi\",\n \"arm-unknown-linux-gnueabi\",\n \"arm-unknown-linux-gnueabihf\",\n \"arm-unknown-linux-musleabi\",\n \"arm-unknown-linux-musleabihf\",\n \"armv7-apple-ios\",\n \"armv7-linux-androideabi\",\n \"armv7-unknown-linux-gnueabihf\",\n \"armv7-unknown-linux-musleabihf\",\n \"armv7s-apple-ios\",\n \"asmjs-unknown-emscripten\",\n \"i386-apple-ios\",\n \"i586-pc-windows-msvc\",\n \"i586-unknown-linux-gnu\",\n \"i686-apple-darwin\",\n \"i686-linux-android\",\n \"i686-pc-windows-gnu\",\n \"i686-pc-windows-msvc\",\n \"i686-unknown-freebsd\",\n \"i686-unknown-linux-gnu\",\n \"i686-unknown-linux-musl\",\n \"mips-unknown-linux-gnu\",\n \"mips-unknown-linux-musl\",\n \"mips64-unknown-linux-gnuabi64\",\n \"mips64el-unknown-linux-gnuabi64\",\n \"mipsel-unknown-linux-gnu\",\n \"mipsel-unknown-linux-musl\",\n \"powerpc-unknown-linux-gnu\",\n \"powerpc64-unknown-linux-gnu\",\n \"powerpc64le-unknown-linux-gnu\",\n \"s390x-unknown-linux-gnu\",\n \"wasm32-unknown-emscripten\",\n \"x86_64-apple-darwin\",\n \"x86_64-apple-ios\",\n \"x86_64-pc-windows-gnu\",\n \"x86_64-pc-windows-msvc\",\n \"x86_64-rumprun-netbsd\",\n \"x86_64-unknown-freebsd\",\n \"x86_64-unknown-linux-gnu\",\n \"x86_64-unknown-linux-musl\",\n \"x86_64-unknown-netbsd\",\n];\n\nstatic MINGW: &'static [&'static str] = &[\n \"i686-pc-windows-gnu\",\n \"x86_64-pc-windows-gnu\",\n];\n\n#[derive(RustcEncodable)]\nstruct Manifest {\n manifest_version: String,\n date: String,\n pkg: HashMap<String, Package>,\n}\n\n#[derive(RustcEncodable)]\nstruct Package {\n version: String,\n target: HashMap<String, Target>,\n}\n\n#[derive(RustcEncodable)]\nstruct Target {\n available: bool,\n url: Option<String>,\n hash: Option<String>,\n components: Option<Vec<Component>>,\n extensions: Option<Vec<Component>>,\n}\n\n#[derive(RustcEncodable)]\nstruct Component {\n pkg: String,\n target: String,\n}\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {}\", stringify!($e), e),\n })\n}\n\nstruct Builder {\n channel: String,\n input: PathBuf,\n output: PathBuf,\n gpg_passphrase: String,\n digests: HashMap<String, String>,\n s3_address: String,\n date: String,\n rust_version: String,\n cargo_version: String,\n}\n\nfn main() {\n let mut args = env::args().skip(1);\n let input = PathBuf::from(args.next().unwrap());\n let output = PathBuf::from(args.next().unwrap());\n let date = args.next().unwrap();\n let channel = args.next().unwrap();\n let s3_address = args.next().unwrap();\n let mut passphrase = String::new();\n t!(io::stdin().read_to_string(&mut passphrase));\n\n Builder {\n channel: channel,\n input: input,\n output: output,\n gpg_passphrase: passphrase,\n digests: HashMap::new(),\n s3_address: s3_address,\n date: date,\n rust_version: String::new(),\n cargo_version: String::new(),\n }.build();\n}\n\nimpl Builder {\n fn build(&mut self) {\n self.rust_version = self.version(\"rust\", \"x86_64-unknown-linux-gnu\");\n self.cargo_version = self.version(\"cargo\", \"x86_64-unknown-linux-gnu\");\n\n self.digest_and_sign();\n let manifest = self.build_manifest();\n let manifest = toml::encode(&manifest).to_string();\n\n let filename = format!(\"channel-rust-{}.toml\", self.channel);\n self.write_manifest(&manifest, &filename);\n\n if self.channel != \"beta\" && self.channel != \"nightly\" {\n self.write_manifest(&manifest, \"channel-rust-stable.toml\");\n }\n }\n\n fn digest_and_sign(&mut self) {\n for file in t!(self.input.read_dir()).map(|e| t!(e).path()) {\n let filename = file.file_name().unwrap().to_str().unwrap();\n let digest = self.hash(&file);\n self.sign(&file);\n assert!(self.digests.insert(filename.to_string(), digest).is_none());\n }\n }\n\n fn build_manifest(&mut self) -> Manifest {\n let mut manifest = Manifest {\n manifest_version: \"2\".to_string(),\n date: self.date.to_string(),\n pkg: HashMap::new(),\n };\n\n self.package(\"rustc\", &mut manifest.pkg, HOSTS);\n self.package(\"cargo\", &mut manifest.pkg, HOSTS);\n self.package(\"rust-mingw\", &mut manifest.pkg, MINGW);\n self.package(\"rust-std\", &mut manifest.pkg, TARGETS);\n self.package(\"rust-docs\", &mut manifest.pkg, TARGETS);\n self.package(\"rust-src\", &mut manifest.pkg, &[\"*\"]);\n\n let mut pkg = Package {\n version: self.cached_version(\"rust\").to_string(),\n target: HashMap::new(),\n };\n for host in HOSTS {\n let filename = self.filename(\"rust\", host);\n let digest = match self.digests.remove(&filename) {\n Some(digest) => digest,\n None => {\n pkg.target.insert(host.to_string(), Target {\n available: false,\n url: None,\n hash: None,\n components: None,\n extensions: None,\n });\n continue\n }\n };\n let mut components = Vec::new();\n let mut extensions = Vec::new();\n\n \/\/ rustc\/rust-std\/cargo are all required, and so is rust-mingw if it's\n \/\/ available for the target.\n components.extend(vec![\n Component { pkg: \"rustc\".to_string(), target: host.to_string() },\n Component { pkg: \"rust-std\".to_string(), target: host.to_string() },\n Component { pkg: \"cargo\".to_string(), target: host.to_string() },\n ]);\n if host.contains(\"pc-windows-gnu\") {\n components.push(Component {\n pkg: \"rust-mingw\".to_string(),\n target: host.to_string(),\n });\n }\n\n \/\/ Docs, other standard libraries, and the source package are all\n \/\/ optional.\n extensions.push(Component {\n pkg: \"rust-docs\".to_string(),\n target: host.to_string(),\n });\n for target in TARGETS {\n if target != host {\n extensions.push(Component {\n pkg: \"rust-std\".to_string(),\n target: target.to_string(),\n });\n }\n }\n extensions.push(Component {\n pkg: \"rust-src\".to_string(),\n target: \"*\".to_string(),\n });\n\n pkg.target.insert(host.to_string(), Target {\n available: true,\n url: Some(self.url(\"rust\", host)),\n hash: Some(to_hex(digest.as_ref())),\n components: Some(components),\n extensions: Some(extensions),\n });\n }\n manifest.pkg.insert(\"rust\".to_string(), pkg);\n\n return manifest\n }\n\n fn package(&mut self,\n pkgname: &str,\n dst: &mut HashMap<String, Package>,\n targets: &[&str]) {\n let targets = targets.iter().map(|name| {\n let filename = self.filename(pkgname, name);\n let digest = match self.digests.remove(&filename) {\n Some(digest) => digest,\n None => {\n return (name.to_string(), Target {\n available: false,\n url: None,\n hash: None,\n components: None,\n extensions: None,\n })\n }\n };\n\n (name.to_string(), Target {\n available: true,\n url: Some(self.url(pkgname, name)),\n hash: Some(digest),\n components: None,\n extensions: None,\n })\n }).collect();\n\n dst.insert(pkgname.to_string(), Package {\n version: self.cached_version(pkgname).to_string(),\n target: targets,\n });\n }\n\n fn url(&self, component: &str, target: &str) -> String {\n format!(\"{}\/{}\/{}\",\n self.s3_address,\n self.date,\n self.filename(component, target))\n }\n\n fn filename(&self, component: &str, target: &str) -> String {\n if component == \"rust-src\" {\n format!(\"rust-src-{}.tar.gz\", self.channel)\n } else {\n format!(\"{}-{}-{}.tar.gz\", component, self.channel, target)\n }\n }\n\n fn cached_version(&self, component: &str) -> &str {\n if component == \"cargo\" {\n &self.cargo_version\n } else {\n &self.rust_version\n }\n }\n\n fn version(&self, component: &str, target: &str) -> String {\n let mut cmd = Command::new(\"tar\");\n let filename = self.filename(component, target);\n cmd.arg(\"xf\")\n .arg(self.input.join(&filename))\n .arg(format!(\"{}\/version\", filename.replace(\".tar.gz\", \"\")))\n .arg(\"-O\");\n let version = t!(cmd.output());\n if !version.status.success() {\n panic!(\"failed to learn version:\\n\\n{:?}\\n\\n{}\\n\\n{}\",\n cmd,\n String::from_utf8_lossy(&version.stdout),\n String::from_utf8_lossy(&version.stderr));\n }\n String::from_utf8_lossy(&version.stdout).trim().to_string()\n }\n\n fn hash(&self, path: &Path) -> String {\n let sha = t!(Command::new(\"shasum\")\n .arg(\"-a\").arg(\"256\")\n .arg(path)\n .output());\n assert!(sha.status.success());\n\n let filename = path.file_name().unwrap().to_str().unwrap();\n let sha256 = self.output.join(format!(\"{}.sha256\", filename));\n t!(t!(File::create(&sha256)).write_all(&sha.stdout));\n\n let stdout = String::from_utf8_lossy(&sha.stdout);\n stdout.split_whitespace().next().unwrap().to_string()\n }\n\n fn sign(&self, path: &Path) {\n let filename = path.file_name().unwrap().to_str().unwrap();\n let asc = self.output.join(format!(\"{}.asc\", filename));\n println!(\"signing: {:?}\", path);\n let mut cmd = Command::new(\"gpg\");\n cmd.arg(\"--no-tty\")\n .arg(\"--yes\")\n .arg(\"--passphrase-fd\").arg(\"0\")\n .arg(\"--armor\")\n .arg(\"--output\").arg(&asc)\n .arg(\"--detach-sign\").arg(path)\n .stdin(Stdio::piped());\n let mut child = t!(cmd.spawn());\n t!(child.stdin.take().unwrap().write_all(self.gpg_passphrase.as_bytes()));\n assert!(t!(child.wait()).success());\n }\n\n fn write_manifest(&self, manifest: &str, name: &str) {\n let dst = self.output.join(name);\n t!(t!(File::create(&dst)).write_all(manifest.as_bytes()));\n self.hash(&dst);\n self.sign(&dst);\n }\n}\n\nfn to_hex(digest: &[u8]) -> String {\n let mut ret = String::new();\n for byte in digest {\n ret.push(hex((byte & 0xf0) >> 4));\n ret.push(hex(byte & 0xf));\n }\n return ret;\n\n fn hex(b: u8) -> char {\n match b {\n 0...9 => (b'0' + b) as char,\n _ => (b'a' + b - 10) as char,\n }\n }\n}\n<commit_msg>Fix a manifest-generation bug on beta<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate toml;\nextern crate rustc_serialize;\n\nuse std::collections::HashMap;\nuse std::env;\nuse std::fs::File;\nuse std::io::{self, Read, Write};\nuse std::path::{PathBuf, Path};\nuse std::process::{Command, Stdio};\n\nstatic HOSTS: &'static [&'static str] = &[\n \"aarch64-unknown-linux-gnu\",\n \"arm-unknown-linux-gnueabi\",\n \"arm-unknown-linux-gnueabihf\",\n \"armv7-unknown-linux-gnueabihf\",\n \"i686-apple-darwin\",\n \"i686-pc-windows-gnu\",\n \"i686-pc-windows-msvc\",\n \"i686-unknown-linux-gnu\",\n \"mips-unknown-linux-gnu\",\n \"mips64-unknown-linux-gnuabi64\",\n \"mips64el-unknown-linux-gnuabi64\",\n \"mipsel-unknown-linux-gnu\",\n \"powerpc-unknown-linux-gnu\",\n \"powerpc64-unknown-linux-gnu\",\n \"powerpc64le-unknown-linux-gnu\",\n \"s390x-unknown-linux-gnu\",\n \"x86_64-apple-darwin\",\n \"x86_64-pc-windows-gnu\",\n \"x86_64-pc-windows-msvc\",\n \"x86_64-unknown-freebsd\",\n \"x86_64-unknown-linux-gnu\",\n \"x86_64-unknown-netbsd\",\n];\n\nstatic TARGETS: &'static [&'static str] = &[\n \"aarch64-apple-ios\",\n \"aarch64-linux-android\",\n \"aarch64-unknown-linux-gnu\",\n \"arm-linux-androideabi\",\n \"arm-unknown-linux-gnueabi\",\n \"arm-unknown-linux-gnueabihf\",\n \"arm-unknown-linux-musleabi\",\n \"arm-unknown-linux-musleabihf\",\n \"armv7-apple-ios\",\n \"armv7-linux-androideabi\",\n \"armv7-unknown-linux-gnueabihf\",\n \"armv7-unknown-linux-musleabihf\",\n \"armv7s-apple-ios\",\n \"asmjs-unknown-emscripten\",\n \"i386-apple-ios\",\n \"i586-pc-windows-msvc\",\n \"i586-unknown-linux-gnu\",\n \"i686-apple-darwin\",\n \"i686-linux-android\",\n \"i686-pc-windows-gnu\",\n \"i686-pc-windows-msvc\",\n \"i686-unknown-freebsd\",\n \"i686-unknown-linux-gnu\",\n \"i686-unknown-linux-musl\",\n \"mips-unknown-linux-gnu\",\n \"mips-unknown-linux-musl\",\n \"mips64-unknown-linux-gnuabi64\",\n \"mips64el-unknown-linux-gnuabi64\",\n \"mipsel-unknown-linux-gnu\",\n \"mipsel-unknown-linux-musl\",\n \"powerpc-unknown-linux-gnu\",\n \"powerpc64-unknown-linux-gnu\",\n \"powerpc64le-unknown-linux-gnu\",\n \"s390x-unknown-linux-gnu\",\n \"wasm32-unknown-emscripten\",\n \"x86_64-apple-darwin\",\n \"x86_64-apple-ios\",\n \"x86_64-pc-windows-gnu\",\n \"x86_64-pc-windows-msvc\",\n \"x86_64-rumprun-netbsd\",\n \"x86_64-unknown-freebsd\",\n \"x86_64-unknown-linux-gnu\",\n \"x86_64-unknown-linux-musl\",\n \"x86_64-unknown-netbsd\",\n];\n\nstatic MINGW: &'static [&'static str] = &[\n \"i686-pc-windows-gnu\",\n \"x86_64-pc-windows-gnu\",\n];\n\n#[derive(RustcEncodable)]\nstruct Manifest {\n manifest_version: String,\n date: String,\n pkg: HashMap<String, Package>,\n}\n\n#[derive(RustcEncodable)]\nstruct Package {\n version: String,\n target: HashMap<String, Target>,\n}\n\n#[derive(RustcEncodable)]\nstruct Target {\n available: bool,\n url: Option<String>,\n hash: Option<String>,\n components: Option<Vec<Component>>,\n extensions: Option<Vec<Component>>,\n}\n\n#[derive(RustcEncodable)]\nstruct Component {\n pkg: String,\n target: String,\n}\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {}\", stringify!($e), e),\n })\n}\n\nstruct Builder {\n channel: String,\n input: PathBuf,\n output: PathBuf,\n gpg_passphrase: String,\n digests: HashMap<String, String>,\n s3_address: String,\n date: String,\n rust_version: String,\n cargo_version: String,\n}\n\nfn main() {\n let mut args = env::args().skip(1);\n let input = PathBuf::from(args.next().unwrap());\n let output = PathBuf::from(args.next().unwrap());\n let date = args.next().unwrap();\n let channel = args.next().unwrap();\n let s3_address = args.next().unwrap();\n let mut passphrase = String::new();\n t!(io::stdin().read_to_string(&mut passphrase));\n\n Builder {\n channel: channel,\n input: input,\n output: output,\n gpg_passphrase: passphrase,\n digests: HashMap::new(),\n s3_address: s3_address,\n date: date,\n rust_version: String::new(),\n cargo_version: String::new(),\n }.build();\n}\n\nimpl Builder {\n fn build(&mut self) {\n self.rust_version = self.version(\"rust\", \"x86_64-unknown-linux-gnu\");\n self.cargo_version = self.version(\"cargo\", \"x86_64-unknown-linux-gnu\");\n\n self.digest_and_sign();\n let manifest = self.build_manifest();\n let manifest = toml::encode(&manifest).to_string();\n\n let filename = format!(\"channel-rust-{}.toml\", self.channel);\n self.write_manifest(&manifest, &filename);\n\n if self.channel != \"beta\" && self.channel != \"nightly\" {\n self.write_manifest(&manifest, \"channel-rust-stable.toml\");\n }\n }\n\n fn digest_and_sign(&mut self) {\n for file in t!(self.input.read_dir()).map(|e| t!(e).path()) {\n let filename = file.file_name().unwrap().to_str().unwrap();\n let digest = self.hash(&file);\n self.sign(&file);\n assert!(self.digests.insert(filename.to_string(), digest).is_none());\n }\n }\n\n fn build_manifest(&mut self) -> Manifest {\n let mut manifest = Manifest {\n manifest_version: \"2\".to_string(),\n date: self.date.to_string(),\n pkg: HashMap::new(),\n };\n\n self.package(\"rustc\", &mut manifest.pkg, HOSTS);\n self.package(\"cargo\", &mut manifest.pkg, HOSTS);\n self.package(\"rust-mingw\", &mut manifest.pkg, MINGW);\n self.package(\"rust-std\", &mut manifest.pkg, TARGETS);\n self.package(\"rust-docs\", &mut manifest.pkg, TARGETS);\n self.package(\"rust-src\", &mut manifest.pkg, &[\"*\"]);\n\n let mut pkg = Package {\n version: self.cached_version(\"rust\").to_string(),\n target: HashMap::new(),\n };\n for host in HOSTS {\n let filename = self.filename(\"rust\", host);\n let digest = match self.digests.remove(&filename) {\n Some(digest) => digest,\n None => {\n pkg.target.insert(host.to_string(), Target {\n available: false,\n url: None,\n hash: None,\n components: None,\n extensions: None,\n });\n continue\n }\n };\n let mut components = Vec::new();\n let mut extensions = Vec::new();\n\n \/\/ rustc\/rust-std\/cargo are all required, and so is rust-mingw if it's\n \/\/ available for the target.\n components.extend(vec![\n Component { pkg: \"rustc\".to_string(), target: host.to_string() },\n Component { pkg: \"rust-std\".to_string(), target: host.to_string() },\n Component { pkg: \"cargo\".to_string(), target: host.to_string() },\n ]);\n if host.contains(\"pc-windows-gnu\") {\n components.push(Component {\n pkg: \"rust-mingw\".to_string(),\n target: host.to_string(),\n });\n }\n\n \/\/ Docs, other standard libraries, and the source package are all\n \/\/ optional.\n extensions.push(Component {\n pkg: \"rust-docs\".to_string(),\n target: host.to_string(),\n });\n for target in TARGETS {\n if target != host {\n extensions.push(Component {\n pkg: \"rust-std\".to_string(),\n target: target.to_string(),\n });\n }\n }\n extensions.push(Component {\n pkg: \"rust-src\".to_string(),\n target: \"*\".to_string(),\n });\n\n pkg.target.insert(host.to_string(), Target {\n available: true,\n url: Some(self.url(\"rust\", host)),\n hash: Some(to_hex(digest.as_ref())),\n components: Some(components),\n extensions: Some(extensions),\n });\n }\n manifest.pkg.insert(\"rust\".to_string(), pkg);\n\n return manifest\n }\n\n fn package(&mut self,\n pkgname: &str,\n dst: &mut HashMap<String, Package>,\n targets: &[&str]) {\n let targets = targets.iter().map(|name| {\n let filename = self.filename(pkgname, name);\n let digest = match self.digests.remove(&filename) {\n Some(digest) => digest,\n None => {\n return (name.to_string(), Target {\n available: false,\n url: None,\n hash: None,\n components: None,\n extensions: None,\n })\n }\n };\n\n (name.to_string(), Target {\n available: true,\n url: Some(self.url(pkgname, name)),\n hash: Some(digest),\n components: None,\n extensions: None,\n })\n }).collect();\n\n dst.insert(pkgname.to_string(), Package {\n version: self.cached_version(pkgname).to_string(),\n target: targets,\n });\n }\n\n fn url(&self, component: &str, target: &str) -> String {\n format!(\"{}\/{}\/{}\",\n self.s3_address,\n self.date,\n self.filename(component, target))\n }\n\n fn filename(&self, component: &str, target: &str) -> String {\n if component == \"rust-src\" {\n format!(\"rust-src-{}.tar.gz\", self.channel)\n } else if component == \"cargo\" {\n format!(\"cargo-nightly-{}.tar.gz\", target)\n } else {\n format!(\"{}-{}-{}.tar.gz\", component, self.channel, target)\n }\n }\n\n fn cached_version(&self, component: &str) -> &str {\n if component == \"cargo\" {\n &self.cargo_version\n } else {\n &self.rust_version\n }\n }\n\n fn version(&self, component: &str, target: &str) -> String {\n let mut cmd = Command::new(\"tar\");\n let filename = self.filename(component, target);\n cmd.arg(\"xf\")\n .arg(self.input.join(&filename))\n .arg(format!(\"{}\/version\", filename.replace(\".tar.gz\", \"\")))\n .arg(\"-O\");\n let version = t!(cmd.output());\n if !version.status.success() {\n panic!(\"failed to learn version:\\n\\n{:?}\\n\\n{}\\n\\n{}\",\n cmd,\n String::from_utf8_lossy(&version.stdout),\n String::from_utf8_lossy(&version.stderr));\n }\n String::from_utf8_lossy(&version.stdout).trim().to_string()\n }\n\n fn hash(&self, path: &Path) -> String {\n let sha = t!(Command::new(\"shasum\")\n .arg(\"-a\").arg(\"256\")\n .arg(path)\n .output());\n assert!(sha.status.success());\n\n let filename = path.file_name().unwrap().to_str().unwrap();\n let sha256 = self.output.join(format!(\"{}.sha256\", filename));\n t!(t!(File::create(&sha256)).write_all(&sha.stdout));\n\n let stdout = String::from_utf8_lossy(&sha.stdout);\n stdout.split_whitespace().next().unwrap().to_string()\n }\n\n fn sign(&self, path: &Path) {\n let filename = path.file_name().unwrap().to_str().unwrap();\n let asc = self.output.join(format!(\"{}.asc\", filename));\n println!(\"signing: {:?}\", path);\n let mut cmd = Command::new(\"gpg\");\n cmd.arg(\"--no-tty\")\n .arg(\"--yes\")\n .arg(\"--passphrase-fd\").arg(\"0\")\n .arg(\"--armor\")\n .arg(\"--output\").arg(&asc)\n .arg(\"--detach-sign\").arg(path)\n .stdin(Stdio::piped());\n let mut child = t!(cmd.spawn());\n t!(child.stdin.take().unwrap().write_all(self.gpg_passphrase.as_bytes()));\n assert!(t!(child.wait()).success());\n }\n\n fn write_manifest(&self, manifest: &str, name: &str) {\n let dst = self.output.join(name);\n t!(t!(File::create(&dst)).write_all(manifest.as_bytes()));\n self.hash(&dst);\n self.sign(&dst);\n }\n}\n\nfn to_hex(digest: &[u8]) -> String {\n let mut ret = String::new();\n for byte in digest {\n ret.push(hex((byte & 0xf0) >> 4));\n ret.push(hex(byte & 0xf));\n }\n return ret;\n\n fn hex(b: u8) -> char {\n match b {\n 0...9 => (b'0' + b) as char,\n _ => (b'a' + b - 10) as char,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add BotMessage support<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Super'ing to a mod<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>One way flow between processes. SEDA lite.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>REMEMBER TO TRACK THE FILES, TICKI<commit_after>\/\/! This is a temporary implementation of rsa until liboctavo is ported to redox\n\/\/! Note that this is not secure. It's trivial to crack. This implementation is just\n\/\/! for testing.\n\nfn mod_pow(b: u64, e: u64, m: u64) -> u64 {\n let mut c = 1;\n let mut e_prime = 0;\n\n loop {\n e_prime += 1;\n c = (b * c) % m;\n\n if e_prime >= e {\n break;\n }\n }\n\n c\n}\n\nfn encrypt(msg: u64, key: (u64, u64)) -> u64 {\n mod_pow(msg, key.1, key.0)\n}\nfn decrypt(enc_msg: u64, key: (u64, u64)) -> u64 {\n mod_pow(enc_msg, key.1, key.0)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Start logging stats on ingest.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add example for child properties<commit_after>\/\/! # Child Properties\n\/\/!\n\/\/! This sample demonstrates how to set child properties.\n\n#![crate_type = \"bin\"]\n\nextern crate gtk;\n\nuse gtk::{BoxExt, Button, ContainerExt, Inhibit, Label, PackType, WidgetExt, Window, WindowType};\nuse gtk::Orientation::Vertical;\n\nfn main() {\n gtk::init().unwrap();\n\n let vbox = gtk::Box::new(Vertical, 0);\n\n let plus_button = Button::new_with_label(\"+\");\n vbox.add(&plus_button);\n \/\/ Set some child properties.\n \/\/ These calls need to be added after the Widget is added to the Box.\n vbox.set_child_expand(&plus_button, true);\n vbox.set_child_fill(&plus_button, true);\n vbox.set_child_padding(&plus_button, 50);\n vbox.set_child_pack_type(&plus_button, PackType::End);\n\n let counter_label = Label::new(\"0\");\n vbox.add(&counter_label);\n\n let minus_button = Button::new_with_label(\"-\");\n vbox.add(&minus_button);\n\n let window = Window::new(WindowType::Toplevel);\n\n window.add(&vbox);\n\n window.connect_delete_event(|_, _| {\n gtk::main_quit();\n Inhibit(false)\n });\n\n window.show_all();\n\n gtk::main();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #54367 - spastorino:add-thread-local-static-borrow-test, r=pnkfelix<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ run-pass\n\/\/\n\/\/ FIXME(#54366) - We probably shouldn't allow #[thread_local] static mut to get a 'static lifetime.\n\n#![feature(nll)]\n#![feature(thread_local)]\n\n#[thread_local]\nstatic mut X1: u64 = 0;\n\nstruct S1 {\n a: &'static mut u64,\n}\n\nimpl S1 {\n fn new(_x: u64) -> S1 {\n S1 {\n a: unsafe { &mut X1 },\n }\n }\n}\n\nfn main() {\n S1::new(0).a;\n}\n<|endoftext|>"} {"text":"<commit_before>use geom::matrix::{Matrix4, identity};\nuse geom::size::Size2D;\nuse opengles::gl2::{GLuint, delete_textures};\n\nuse std::cmp::FuzzyEq;\nuse dvec::DVec;\n\npub enum Format {\n ARGB32Format,\n RGB24Format\n}\n\npub enum Layer {\n ContainerLayerKind(@ContainerLayer),\n ImageLayerKind(@ImageLayer),\n TiledImageLayerKind(@TiledImageLayer)\n}\n\nimpl Layer {\n pure fn with_common<T>(&self, f: &fn(&mut CommonLayer) -> T) -> T {\n match *self {\n ContainerLayerKind(container_layer) => f(&mut container_layer.common),\n ImageLayerKind(image_layer) => f(&mut image_layer.common),\n TiledImageLayerKind(tiled_image_layer) => f(&mut tiled_image_layer.common)\n }\n }\n}\n\npub struct CommonLayer {\n mut parent: Option<Layer>,\n mut prev_sibling: Option<Layer>,\n mut next_sibling: Option<Layer>,\n\n mut transform: Matrix4<f32>,\n}\n\nimpl CommonLayer {\n \/\/ FIXME: Workaround for cross-crate bug regarding mutability of class fields\n fn set_transform(new_transform: Matrix4<f32>) {\n self.transform = new_transform;\n }\n}\n\npub fn CommonLayer() -> CommonLayer {\n CommonLayer {\n parent: None,\n prev_sibling: None,\n next_sibling: None,\n transform: identity(0.0f32),\n }\n}\n\n\npub struct ContainerLayer {\n mut common: CommonLayer,\n mut first_child: Option<Layer>,\n mut last_child: Option<Layer>,\n}\n\n\npub fn ContainerLayer() -> ContainerLayer {\n ContainerLayer {\n common: CommonLayer(),\n first_child: None,\n last_child: None,\n }\n}\n\nimpl ContainerLayer {\n fn each_child(&const self, f: &fn(Layer) -> bool) {\n let mut child_opt = self.first_child;\n while !child_opt.is_none() {\n let child = child_opt.get();\n if !f(child) { break; }\n child_opt = child.with_common(|x| x.next_sibling);\n }\n }\n\n \/\/\/ Only works when the child is disconnected from the layer tree.\n fn add_child(&const self, new_child: Layer) {\n do new_child.with_common |new_child_common| {\n assert new_child_common.parent.is_none();\n assert new_child_common.prev_sibling.is_none();\n assert new_child_common.next_sibling.is_none();\n\n match self.first_child {\n None => {}\n Some(copy first_child) => {\n do first_child.with_common |first_child_common| {\n assert first_child_common.prev_sibling.is_none();\n first_child_common.prev_sibling = Some(new_child);\n new_child_common.next_sibling = Some(first_child);\n }\n }\n }\n\n self.first_child = Some(new_child);\n\n match self.last_child {\n None => self.last_child = Some(new_child),\n Some(_) => {}\n }\n }\n }\n}\n\npub type WithDataFn = &fn(&[u8]);\n\npub trait ImageData {\n fn size() -> Size2D<uint>;\n\n \/\/ NB: stride is in pixels, like OpenGL GL_UNPACK_ROW_LENGTH.\n fn stride() -> uint;\n\n fn format() -> Format;\n fn with_data(WithDataFn);\n}\n\npub struct Image {\n data: @ImageData,\n mut texture: Option<GLuint>,\n\n drop {\n match copy self.texture {\n None => {\n \/\/ Nothing to do.\n }\n Some(texture) => {\n delete_textures(&[texture]);\n }\n }\n }\n}\n\npub impl Image {\n static fn new(data: @ImageData) -> Image {\n Image { data: data, texture: None }\n }\n}\n\n\/\/\/ Basic image data is a simple image data store that just owns the pixel data in memory.\npub struct BasicImageData {\n size: Size2D<uint>,\n stride: uint,\n format: Format,\n data: ~[u8]\n}\n\npub impl BasicImageData {\n static fn new(size: Size2D<uint>, stride: uint, format: Format, data: ~[u8]) ->\n BasicImageData {\n BasicImageData {\n size: size,\n stride: stride,\n format: format,\n data: move data\n }\n }\n}\n\npub impl BasicImageData : ImageData {\n fn size() -> Size2D<uint> { self.size }\n fn stride() -> uint { self.stride }\n fn format() -> Format { self.format }\n fn with_data(f: WithDataFn) { f(self.data) }\n}\n\npub struct ImageLayer {\n mut common: CommonLayer,\n mut image: @layers::Image,\n}\n\nimpl ImageLayer {\n \/\/ FIXME: Workaround for cross-crate bug\n fn set_image(new_image: @layers::Image) {\n self.image = new_image;\n }\n}\n\npub fn ImageLayer(image: @layers::Image) -> ImageLayer {\n ImageLayer {\n common : CommonLayer(),\n image : image,\n }\n}\n\npub struct TiledImageLayer {\n mut common: CommonLayer,\n tiles: DVec<@layers::Image>,\n mut tiles_across: uint,\n}\n\npub fn TiledImageLayer(in_tiles: &[@layers::Image], tiles_across: uint) -> TiledImageLayer {\n let tiles = DVec();\n for in_tiles.each |tile| {\n tiles.push(*tile);\n }\n\n TiledImageLayer {\n common: CommonLayer(),\n tiles: tiles,\n tiles_across: tiles_across\n }\n}\n\n<commit_msg>Don't copy the tiles DVec<commit_after>use geom::matrix::{Matrix4, identity};\nuse geom::size::Size2D;\nuse opengles::gl2::{GLuint, delete_textures};\n\nuse std::cmp::FuzzyEq;\nuse dvec::DVec;\n\npub enum Format {\n ARGB32Format,\n RGB24Format\n}\n\npub enum Layer {\n ContainerLayerKind(@ContainerLayer),\n ImageLayerKind(@ImageLayer),\n TiledImageLayerKind(@TiledImageLayer)\n}\n\nimpl Layer {\n pure fn with_common<T>(&self, f: &fn(&mut CommonLayer) -> T) -> T {\n match *self {\n ContainerLayerKind(container_layer) => f(&mut container_layer.common),\n ImageLayerKind(image_layer) => f(&mut image_layer.common),\n TiledImageLayerKind(tiled_image_layer) => f(&mut tiled_image_layer.common)\n }\n }\n}\n\npub struct CommonLayer {\n mut parent: Option<Layer>,\n mut prev_sibling: Option<Layer>,\n mut next_sibling: Option<Layer>,\n\n mut transform: Matrix4<f32>,\n}\n\nimpl CommonLayer {\n \/\/ FIXME: Workaround for cross-crate bug regarding mutability of class fields\n fn set_transform(new_transform: Matrix4<f32>) {\n self.transform = new_transform;\n }\n}\n\npub fn CommonLayer() -> CommonLayer {\n CommonLayer {\n parent: None,\n prev_sibling: None,\n next_sibling: None,\n transform: identity(0.0f32),\n }\n}\n\n\npub struct ContainerLayer {\n mut common: CommonLayer,\n mut first_child: Option<Layer>,\n mut last_child: Option<Layer>,\n}\n\n\npub fn ContainerLayer() -> ContainerLayer {\n ContainerLayer {\n common: CommonLayer(),\n first_child: None,\n last_child: None,\n }\n}\n\nimpl ContainerLayer {\n fn each_child(&const self, f: &fn(Layer) -> bool) {\n let mut child_opt = self.first_child;\n while !child_opt.is_none() {\n let child = child_opt.get();\n if !f(child) { break; }\n child_opt = child.with_common(|x| x.next_sibling);\n }\n }\n\n \/\/\/ Only works when the child is disconnected from the layer tree.\n fn add_child(&const self, new_child: Layer) {\n do new_child.with_common |new_child_common| {\n assert new_child_common.parent.is_none();\n assert new_child_common.prev_sibling.is_none();\n assert new_child_common.next_sibling.is_none();\n\n match self.first_child {\n None => {}\n Some(copy first_child) => {\n do first_child.with_common |first_child_common| {\n assert first_child_common.prev_sibling.is_none();\n first_child_common.prev_sibling = Some(new_child);\n new_child_common.next_sibling = Some(first_child);\n }\n }\n }\n\n self.first_child = Some(new_child);\n\n match self.last_child {\n None => self.last_child = Some(new_child),\n Some(_) => {}\n }\n }\n }\n}\n\npub type WithDataFn = &fn(&[u8]);\n\npub trait ImageData {\n fn size() -> Size2D<uint>;\n\n \/\/ NB: stride is in pixels, like OpenGL GL_UNPACK_ROW_LENGTH.\n fn stride() -> uint;\n\n fn format() -> Format;\n fn with_data(WithDataFn);\n}\n\npub struct Image {\n data: @ImageData,\n mut texture: Option<GLuint>,\n\n drop {\n match copy self.texture {\n None => {\n \/\/ Nothing to do.\n }\n Some(texture) => {\n delete_textures(&[texture]);\n }\n }\n }\n}\n\npub impl Image {\n static fn new(data: @ImageData) -> Image {\n Image { data: data, texture: None }\n }\n}\n\n\/\/\/ Basic image data is a simple image data store that just owns the pixel data in memory.\npub struct BasicImageData {\n size: Size2D<uint>,\n stride: uint,\n format: Format,\n data: ~[u8]\n}\n\npub impl BasicImageData {\n static fn new(size: Size2D<uint>, stride: uint, format: Format, data: ~[u8]) ->\n BasicImageData {\n BasicImageData {\n size: size,\n stride: stride,\n format: format,\n data: move data\n }\n }\n}\n\npub impl BasicImageData : ImageData {\n fn size() -> Size2D<uint> { self.size }\n fn stride() -> uint { self.stride }\n fn format() -> Format { self.format }\n fn with_data(f: WithDataFn) { f(self.data) }\n}\n\npub struct ImageLayer {\n mut common: CommonLayer,\n mut image: @layers::Image,\n}\n\nimpl ImageLayer {\n \/\/ FIXME: Workaround for cross-crate bug\n fn set_image(new_image: @layers::Image) {\n self.image = new_image;\n }\n}\n\npub fn ImageLayer(image: @layers::Image) -> ImageLayer {\n ImageLayer {\n common : CommonLayer(),\n image : image,\n }\n}\n\npub struct TiledImageLayer {\n mut common: CommonLayer,\n tiles: DVec<@layers::Image>,\n mut tiles_across: uint,\n}\n\npub fn TiledImageLayer(in_tiles: &[@layers::Image], tiles_across: uint) -> TiledImageLayer {\n let tiles = DVec();\n for in_tiles.each |tile| {\n tiles.push(*tile);\n }\n\n TiledImageLayer {\n common: CommonLayer(),\n tiles: move tiles,\n tiles_across: tiles_across\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>update join_channels method<commit_after><|endoftext|>"} {"text":"<commit_before>use crate::io::ErrorKind;\nuse std::io::{self, BufWriter, Write};\nuse std::process::exit;\nuse std::sync::mpsc::{sync_channel, Receiver, SyncSender};\nuse std::thread::{self, JoinHandle};\n\nuse crate::{constants::*, refiner};\nuse regex::Regex;\n\nconst HUNK_HEADER: &str = \"\\x1b[36m\"; \/\/ Cyan\n\nlazy_static! {\n static ref STATIC_HEADER_PREFIXES: Vec<(&'static str, &'static str)> = vec![\n (\"diff \", BOLD),\n (\"index \", BOLD),\n (\"--- \", BOLD),\n (\"+++ \", BOLD),\n (\"@@ \", HUNK_HEADER),\n ];\n static ref ANSI_COLOR_REGEX: Regex = Regex::new(\"\\x1b[^m]*m\").unwrap();\n}\n\n#[must_use]\nfn get_fixed_highlight(line: &str) -> &str {\n for static_header_prefix in STATIC_HEADER_PREFIXES.iter() {\n let prefix = static_header_prefix.0;\n if line.starts_with(prefix) {\n return static_header_prefix.1;\n }\n }\n\n return \"\";\n}\n\nfn print<W: io::Write + Send>(stream: &mut BufWriter<W>, text: &str) {\n if let Err(error) = stream.write_all(text.as_bytes()) {\n if error.kind() == ErrorKind::BrokenPipe {\n \/\/ This is fine, somebody probably just quit their pager before it\n \/\/ was done reading our output.\n exit(0);\n }\n\n panic!(\"Error writing diff to pager: {:?}\", error);\n }\n}\n\n\/\/ FIXME: Don't call this \"Stringinator\"!!\nstruct Stringinator {\n \/\/ FIXME: This should be an Option<String>, which only contains the result\n \/\/ if the computation is done\n result: String,\n}\n\nimpl Stringinator {\n pub fn from_string(result: String) -> Stringinator {\n return Stringinator { result };\n }\n\n pub fn is_empty(&self) -> bool {\n return self.result.is_empty();\n }\n}\n\npub struct LineCollector {\n old_text: String,\n new_text: String,\n plain_text: String,\n consumer_thread: Option<JoinHandle<()>>,\n\n \/\/ FIXME: I'd rather have had a SyncSender of some trait here. That would\n \/\/ enable us to have two separate result implementations, one which just\n \/\/ returns a string and another that does a background computation first.\n \/\/ But I failed to figure out how when I tried, more Googling needed!\n queue_putter: SyncSender<Stringinator>,\n}\n\nimpl Drop for LineCollector {\n fn drop(&mut self) {\n \/\/ Flush any outstanding lines. This can be done in any order, at most\n \/\/ one of them is going to do anything anyway.\n self.drain_oldnew();\n self.drain_plain();\n\n \/\/ Tell the consumer thread to drain and quit. Sending an empty string\n \/\/ like this is the secret handshake for requesting a shutdown.\n self.queue_putter\n .send(Stringinator::from_string(\"\".to_string()))\n .unwrap();\n\n \/\/ Wait for the consumer thread to finish\n \/\/ https:\/\/stackoverflow.com\/q\/57670145\/473672\n self.consumer_thread.take().map(JoinHandle::join);\n }\n}\n\nimpl LineCollector {\n pub fn new<W: io::Write + Send + 'static>(output: W) -> LineCollector {\n \/\/ Allocate a queue where we can push our futures to the consumer thread\n \/\/\n \/\/ FIXME: The queue should be bounded to 2x the number of logical CPUs.\n \/\/ 1x for the entries that need CPU time for diffing, and another 1x\n \/\/ that just contain text to print and won't need any processing time.\n let (queue_putter, queue_getter): (SyncSender<Stringinator>, Receiver<Stringinator>) =\n sync_channel(16);\n\n \/\/ This thread takes futures and prints their results\n let consumer = thread::spawn(move || {\n let mut output = BufWriter::new(output);\n\n loop {\n if let Ok(print_me) = queue_getter.recv() {\n if print_me.is_empty() {\n \/\/ Secret handshake received, done!\n break;\n }\n print(&mut output, &print_me.result);\n }\n }\n });\n\n return LineCollector {\n old_text: String::from(\"\"),\n new_text: String::from(\"\"),\n plain_text: String::from(\"\"),\n consumer_thread: Some(consumer),\n queue_putter,\n };\n }\n\n fn drain_oldnew(&mut self) {\n if self.old_text.is_empty() && self.new_text.is_empty() {\n return;\n }\n\n \/\/ FIXME: This should be enqueued as a future containing the refiner::format() call\n let mut output = String::new();\n for line in refiner::format(&self.old_text, &self.new_text) {\n output.push_str(&line);\n output.push('\\n');\n }\n self.queue_putter\n .send(Stringinator::from_string(output))\n .unwrap();\n\n self.old_text.clear();\n self.new_text.clear();\n }\n\n fn drain_plain(&mut self) {\n if self.plain_text.is_empty() {\n return;\n }\n\n \/\/ FIXME: Create an already-resolved future returning this text, then\n \/\/ store that future in our queue.\n self.queue_putter\n .send(Stringinator::from_string(String::from(&self.plain_text)))\n .unwrap();\n\n self.plain_text.clear();\n }\n\n fn consume_plain_line(&mut self, line: &str) {\n self.drain_oldnew();\n self.plain_text.push_str(line);\n self.plain_text.push('\\n');\n }\n\n fn consume_old_line(&mut self, line: &str) {\n self.drain_plain();\n self.old_text.push_str(&line[1..]);\n self.old_text.push('\\n');\n }\n\n fn consume_new_line(&mut self, line: &str) {\n self.drain_plain();\n self.new_text.push_str(&line[1..]);\n self.new_text.push('\\n');\n }\n\n fn consume_no_eof_newline_marker(&mut self) {\n if !self.new_text.is_empty() {\n \/\/ New section comes after old, so if we get in here it's a new\n \/\/ section that doesn't end in a newline. Remove its trailing\n \/\/ newline.\n assert!(self.new_text.pop().unwrap() == '\\n');\n return;\n }\n\n if !self.old_text.is_empty() {\n \/\/ Old text doesn't end in a newline, remove its trailing newline\n assert!(self.old_text.pop().unwrap() == '\\n');\n return;\n }\n\n \/\/ It's a piece of unchanged text that doesn't end in a newline, just\n \/\/ consume the colorized marker as plain text\n self.consume_plain_line(&format!(\n \"{}{}{}\",\n NO_EOF_NEWLINE_COLOR, &NO_EOF_NEWLINE_MARKER, NORMAL\n ))\n }\n\n pub fn consume_line(&mut self, line: String) {\n \/\/ Strip out incoming ANSI formatting. This enables us to highlight\n \/\/ already-colored input.\n let line = ANSI_COLOR_REGEX.replace_all(&line, \"\");\n\n let fixed_highlight = get_fixed_highlight(&line);\n if !fixed_highlight.is_empty() {\n self.consume_plain_line(&format!(\"{}{}{}\", fixed_highlight, line, NORMAL));\n return;\n }\n\n if line.is_empty() {\n self.consume_plain_line(\"\");\n return;\n }\n\n if line.starts_with('-') {\n self.consume_old_line(&line);\n return;\n }\n\n if line.starts_with('+') {\n self.consume_new_line(&line);\n return;\n }\n\n if line == NO_EOF_NEWLINE_MARKER {\n self.consume_no_eof_newline_marker();\n return;\n }\n\n self.consume_plain_line(&line);\n }\n}\n<commit_msg>Add a Stringinator getter<commit_after>use crate::io::ErrorKind;\nuse std::io::{self, BufWriter, Write};\nuse std::process::exit;\nuse std::sync::mpsc::{sync_channel, Receiver, SyncSender};\nuse std::thread::{self, JoinHandle};\n\nuse crate::{constants::*, refiner};\nuse regex::Regex;\n\nconst HUNK_HEADER: &str = \"\\x1b[36m\"; \/\/ Cyan\n\nlazy_static! {\n static ref STATIC_HEADER_PREFIXES: Vec<(&'static str, &'static str)> = vec![\n (\"diff \", BOLD),\n (\"index \", BOLD),\n (\"--- \", BOLD),\n (\"+++ \", BOLD),\n (\"@@ \", HUNK_HEADER),\n ];\n static ref ANSI_COLOR_REGEX: Regex = Regex::new(\"\\x1b[^m]*m\").unwrap();\n}\n\n#[must_use]\nfn get_fixed_highlight(line: &str) -> &str {\n for static_header_prefix in STATIC_HEADER_PREFIXES.iter() {\n let prefix = static_header_prefix.0;\n if line.starts_with(prefix) {\n return static_header_prefix.1;\n }\n }\n\n return \"\";\n}\n\nfn print<W: io::Write + Send>(stream: &mut BufWriter<W>, text: &str) {\n if let Err(error) = stream.write_all(text.as_bytes()) {\n if error.kind() == ErrorKind::BrokenPipe {\n \/\/ This is fine, somebody probably just quit their pager before it\n \/\/ was done reading our output.\n exit(0);\n }\n\n panic!(\"Error writing diff to pager: {:?}\", error);\n }\n}\n\n\/\/ FIXME: Don't call this \"Stringinator\"!!\nstruct Stringinator {\n \/\/ FIXME: This should be an Option<String>, which only contains the result\n \/\/ if the computation is done\n result: String,\n}\n\nimpl Stringinator {\n pub fn from_string(result: String) -> Stringinator {\n return Stringinator { result };\n }\n\n pub fn is_empty(&self) -> bool {\n return self.result.is_empty();\n }\n\n pub fn get(&self) -> &str {\n return &self.result;\n }\n}\n\npub struct LineCollector {\n old_text: String,\n new_text: String,\n plain_text: String,\n consumer_thread: Option<JoinHandle<()>>,\n\n \/\/ FIXME: I'd rather have had a SyncSender of some trait here. That would\n \/\/ enable us to have two separate result implementations, one which just\n \/\/ returns a string and another that does a background computation first.\n \/\/ But I failed to figure out how when I tried, more Googling needed!\n queue_putter: SyncSender<Stringinator>,\n}\n\nimpl Drop for LineCollector {\n fn drop(&mut self) {\n \/\/ Flush any outstanding lines. This can be done in any order, at most\n \/\/ one of them is going to do anything anyway.\n self.drain_oldnew();\n self.drain_plain();\n\n \/\/ Tell the consumer thread to drain and quit. Sending an empty string\n \/\/ like this is the secret handshake for requesting a shutdown.\n self.queue_putter\n .send(Stringinator::from_string(\"\".to_string()))\n .unwrap();\n\n \/\/ Wait for the consumer thread to finish\n \/\/ https:\/\/stackoverflow.com\/q\/57670145\/473672\n self.consumer_thread.take().map(JoinHandle::join);\n }\n}\n\nimpl LineCollector {\n pub fn new<W: io::Write + Send + 'static>(output: W) -> LineCollector {\n \/\/ Allocate a queue where we can push our futures to the consumer thread\n \/\/\n \/\/ FIXME: The queue should be bounded to 2x the number of logical CPUs.\n \/\/ 1x for the entries that need CPU time for diffing, and another 1x\n \/\/ that just contain text to print and won't need any processing time.\n let (queue_putter, queue_getter): (SyncSender<Stringinator>, Receiver<Stringinator>) =\n sync_channel(16);\n\n \/\/ This thread takes futures and prints their results\n let consumer = thread::spawn(move || {\n let mut output = BufWriter::new(output);\n\n loop {\n if let Ok(print_me) = queue_getter.recv() {\n if print_me.is_empty() {\n \/\/ Secret handshake received, done!\n break;\n }\n print(&mut output, print_me.get());\n }\n }\n });\n\n return LineCollector {\n old_text: String::from(\"\"),\n new_text: String::from(\"\"),\n plain_text: String::from(\"\"),\n consumer_thread: Some(consumer),\n queue_putter,\n };\n }\n\n fn drain_oldnew(&mut self) {\n if self.old_text.is_empty() && self.new_text.is_empty() {\n return;\n }\n\n \/\/ FIXME: This should be enqueued as a future containing the refiner::format() call\n let mut output = String::new();\n for line in refiner::format(&self.old_text, &self.new_text) {\n output.push_str(&line);\n output.push('\\n');\n }\n self.queue_putter\n .send(Stringinator::from_string(output))\n .unwrap();\n\n self.old_text.clear();\n self.new_text.clear();\n }\n\n fn drain_plain(&mut self) {\n if self.plain_text.is_empty() {\n return;\n }\n\n \/\/ FIXME: Create an already-resolved future returning this text, then\n \/\/ store that future in our queue.\n self.queue_putter\n .send(Stringinator::from_string(String::from(&self.plain_text)))\n .unwrap();\n\n self.plain_text.clear();\n }\n\n fn consume_plain_line(&mut self, line: &str) {\n self.drain_oldnew();\n self.plain_text.push_str(line);\n self.plain_text.push('\\n');\n }\n\n fn consume_old_line(&mut self, line: &str) {\n self.drain_plain();\n self.old_text.push_str(&line[1..]);\n self.old_text.push('\\n');\n }\n\n fn consume_new_line(&mut self, line: &str) {\n self.drain_plain();\n self.new_text.push_str(&line[1..]);\n self.new_text.push('\\n');\n }\n\n fn consume_no_eof_newline_marker(&mut self) {\n if !self.new_text.is_empty() {\n \/\/ New section comes after old, so if we get in here it's a new\n \/\/ section that doesn't end in a newline. Remove its trailing\n \/\/ newline.\n assert!(self.new_text.pop().unwrap() == '\\n');\n return;\n }\n\n if !self.old_text.is_empty() {\n \/\/ Old text doesn't end in a newline, remove its trailing newline\n assert!(self.old_text.pop().unwrap() == '\\n');\n return;\n }\n\n \/\/ It's a piece of unchanged text that doesn't end in a newline, just\n \/\/ consume the colorized marker as plain text\n self.consume_plain_line(&format!(\n \"{}{}{}\",\n NO_EOF_NEWLINE_COLOR, &NO_EOF_NEWLINE_MARKER, NORMAL\n ))\n }\n\n pub fn consume_line(&mut self, line: String) {\n \/\/ Strip out incoming ANSI formatting. This enables us to highlight\n \/\/ already-colored input.\n let line = ANSI_COLOR_REGEX.replace_all(&line, \"\");\n\n let fixed_highlight = get_fixed_highlight(&line);\n if !fixed_highlight.is_empty() {\n self.consume_plain_line(&format!(\"{}{}{}\", fixed_highlight, line, NORMAL));\n return;\n }\n\n if line.is_empty() {\n self.consume_plain_line(\"\");\n return;\n }\n\n if line.starts_with('-') {\n self.consume_old_line(&line);\n return;\n }\n\n if line.starts_with('+') {\n self.consume_new_line(&line);\n return;\n }\n\n if line == NO_EOF_NEWLINE_MARKER {\n self.consume_no_eof_newline_marker();\n return;\n }\n\n self.consume_plain_line(&line);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add AWS Workspaces integration tests<commit_after>#![cfg(feature = \"workspaces\")]\n\nextern crate rusoto;\n\nuse rusoto::workspaces::{WorkspacesClient, DescribeWorkspacesRequest};\nuse rusoto::{DefaultCredentialsProvider, Region};\n\n#[test]\nfn should_describe_workspaces() {\n let credentials = DefaultCredentialsProvider::new().unwrap();\n let client = WorkspacesClient::new(credentials, Region::UsEast1);\n\n let request = DescribeWorkspacesRequest::default();\n\n match client.describe_workspaces(&request) {\n Ok(response) => {\n println!(\"{:#?}\", response); \n assert!(true) \n },\n Err(err) => panic!(\"Expected OK response, got {:#?}\", err)\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added more benchmark functions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>04 - variables<commit_after>fn main() {\n let an_integer = 1u;\n let a_boolean = true;\n let unit = ();\n\n \/\/ copy `an_integer` into `copied_integer`\n let copied_integer = an_integer;\n\n println!(\"An integer: {}\", copied_integer);\n println!(\"A boolean: {}\", a_boolean);\n println!(\"Meet the unit value: {}\", unit);\n\n \/\/ The compiler warns about unused variables; these warnings can be\n \/\/ silenced by prefixing the variable name with an underscore\n let _unused_variable = 3u;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added benchmarks comparing ImplicitTree to BTreeMap<commit_after>#![feature(test)]\nextern crate test;\nextern crate rand;\nextern crate implicit_tree;\nuse std::time;\n\nuse std::collections::BTreeMap;\nuse std::time::Duration;\nuse rand::{XorShiftRng, SeedableRng, Rng};\n\nuse implicit_tree::{ImplicitTree, ImplicitTreeRefill, DriverFromTo};\n\ntype Tree = ImplicitTree<usize>;\n\nfn btree_single_delete_n(n: usize, rm_items: usize, iters: u64) {\n let mut rng = XorShiftRng::from_seed([1,2,3,4]);\n let mut elapsed_nanos = 0;\n for _ in 0..iters {\n let mut btmap = BTreeMap::new();\n for i in 0..n {\n btmap.insert(i, i);\n }\n\n let keys = {\n let mut keys = vec![];\n let mut pool: Vec<_> = (0..n).collect();\n\n for i in 0..rm_items {\n let n = rng.gen_range(0, n - i);\n let next = pool.swap_remove(n);\n keys.push(next);\n }\n\n keys\n };\n\n let start = time::SystemTime::now();\n for i in 0..rm_items {\n let x = btmap.remove(&keys[i]);\n test::black_box(x);\n }\n let elapsed = start.elapsed().unwrap();\n elapsed_nanos += nanos(elapsed);\n }\n\n println!(\"average time to delete {} elements from BTreeMap of {} elements: {}ns\", rm_items, n, elapsed_nanos\/iters)\n}\n\nfn imptree_single_delete_n(n: usize, rm_items: usize, iters: u64) {\n let mut rng = XorShiftRng::from_seed([1,2,3,4]);\n let mut elapsed_nanos = 0;\n\n let elems: Vec<_> = (1..n+1).collect();\n\n let tree = Tree::new(elems);\n let mut copy = tree.clone();\n let mut output = Vec::with_capacity(tree.size());\n\n for _ in 0..iters {\n let keys = {\n let mut pool: Vec<_> = (1..n+1).collect();\n let mut keys = vec![];\n\n for i in 0..rm_items {\n let r = rng.gen_range(0, n-i);\n let next = pool.swap_remove(r);\n keys.push(next);\n }\n\n keys\n };\n\n copy.refill(&tree);\n\n\n let start = time::SystemTime::now();\n for i in 0..rm_items {\n output.truncate(0);\n let x = copy.delete_bulk(&mut DriverFromTo::new(keys[i], keys[i]), &mut output);\n test::black_box(x);\n }\n let elapsed = start.elapsed().unwrap();\n elapsed_nanos += nanos(elapsed);\n }\n\n println!(\"average time to delete {} elements from implicit_tree of {} elements: {}ns\", rm_items, n, elapsed_nanos\/iters)\n}\n\n\n\nfn btree_bulk_delete_n(n: usize, rm_items: usize, iters: u64) {\n let mut rng = XorShiftRng::from_seed([1,2,3,4]);\n let mut elapsed_nanos = 0;\n for _ in 0..iters {\n let mut btmap = BTreeMap::new();\n for i in 0..n {\n btmap.insert(i, i);\n }\n\n let from =\n if n > rm_items { rng.gen_range(0, n - rm_items) }\n else { 0 };\n let keys: Vec<_> = (from..n).collect();\n\n let start = time::SystemTime::now();\n for i in 0..rm_items {\n let x = btmap.remove(&keys[i]);\n test::black_box(x);\n }\n let elapsed = start.elapsed().unwrap();\n elapsed_nanos += nanos(elapsed);\n }\n\n println!(\"average time to bulk delete {} elements from BTreeMap of {} elements: {}ns\", rm_items, n, elapsed_nanos\/iters)\n}\n\nfn imptree_bulk_delete_n(n: usize, rm_items: usize, iters: u64) {\n let mut rng = XorShiftRng::from_seed([1,2,3,4]);\n let mut elapsed_nanos = 0;\n\n let elems: Vec<_> = (1..n+1).collect();\n let tree = Tree::new(elems);\n let mut copy = tree.clone();\n let mut output = Vec::with_capacity(tree.size());\n\n\n for _ in 0..iters {\n let from =\n if n > rm_items { rng.gen_range(0, n - rm_items) }\n else { 0 };\n output.truncate(0);\n copy.refill(&tree);\n\n let start = time::SystemTime::now();\n let x = copy.delete_bulk(&mut DriverFromTo::new(from, from+rm_items), &mut output);\n test::black_box(x);\n let elapsed = start.elapsed().unwrap();\n elapsed_nanos += nanos(elapsed);\n }\n\n println!(\"average time to bulk delete {} elements from implicit_tree of {} elements: {}ns\", rm_items, n, elapsed_nanos\/iters)\n}\n\n#[inline]\nfn nanos(d: Duration) -> u64 {\n d.as_secs()*1000000000 + d.subsec_nanos() as u64\n}\n\n\nfn main() {\n imptree_bulk_delete_n(100, 100, 10000000);\n\n\n imptree_bulk_delete_n(100, 100, 5000000);\n imptree_bulk_delete_n(1000, 100, 1200000);\n imptree_bulk_delete_n(10000, 100, 500000);\n imptree_bulk_delete_n(100000, 100, 30000);\n\n btree_bulk_delete_n(100, 100, 200000);\n btree_bulk_delete_n(1000, 100, 200000);\n btree_bulk_delete_n(10000, 100, 20000);\n btree_bulk_delete_n(100000, 100, 5000);\n\n imptree_single_delete_n(100, 100, 100000);\n imptree_single_delete_n(1000, 100, 30000);\n imptree_single_delete_n(10000, 100, 10000);\n imptree_single_delete_n(100000, 100, 800);\n\n btree_single_delete_n(100, 100, 100000);\n btree_single_delete_n(1000, 100, 30000);\n btree_single_delete_n(10000, 100, 10000);\n btree_single_delete_n(100000, 100, 800);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ Original implementation taken from rust-memchr\n\/\/ Copyright 2015 Andrew Gallant, bluss and Nicolas Koch\n\nuse cmp;\nuse mem;\n\nconst LO_U64: u64 = 0x0101010101010101;\nconst HI_U64: u64 = 0x8080808080808080;\n\n\/\/ use truncation\nconst LO_USIZE: usize = LO_U64 as usize;\nconst HI_USIZE: usize = HI_U64 as usize;\n\n\/\/\/ Return `true` if `x` contains any zero byte.\n\/\/\/\n\/\/\/ From *Matters Computational*, J. Arndt\n\/\/\/\n\/\/\/ \"The idea is to subtract one from each of the bytes and then look for\n\/\/\/ bytes where the borrow propagated all the way to the most significant\n\/\/\/ bit.\"\n#[inline]\nfn contains_zero_byte(x: usize) -> bool {\n x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0\n}\n\n#[cfg(target_pointer_width = \"16\")]\n#[inline]\nfn repeat_byte(b: u8) -> usize {\n (b as usize) << 8 | b as usize\n}\n\n#[cfg(not(target_pointer_width = \"16\"))]\n#[inline]\nfn repeat_byte(b: u8) -> usize {\n (b as usize) * (::usize::MAX \/ 255)\n}\n\n\/\/\/ Return the first index matching the byte `x` in `text`.\npub fn memchr(x: u8, text: &[u8]) -> Option<usize> {\n \/\/ Scan for a single byte value by reading two `usize` words at a time.\n \/\/\n \/\/ Split `text` in three parts\n \/\/ - unaligned initial part, before the first word aligned address in text\n \/\/ - body, scan by 2 words at a time\n \/\/ - the last remaining part, < 2 word size\n let len = text.len();\n let ptr = text.as_ptr();\n let usize_bytes = mem::size_of::<usize>();\n\n \/\/ search up to an aligned boundary\n let mut offset = ptr.align_offset(usize_bytes);\n if offset > 0 {\n offset = cmp::min(offset, len);\n if let Some(index) = text[..offset].iter().position(|elt| *elt == x) {\n return Some(index);\n }\n }\n\n \/\/ search the body of the text\n let repeated_x = repeat_byte(x);\n\n if len >= 2 * usize_bytes {\n while offset <= len - 2 * usize_bytes {\n unsafe {\n let u = *(ptr.offset(offset as isize) as *const usize);\n let v = *(ptr.offset((offset + usize_bytes) as isize) as *const usize);\n\n \/\/ break if there is a matching byte\n let zu = contains_zero_byte(u ^ repeated_x);\n let zv = contains_zero_byte(v ^ repeated_x);\n if zu || zv {\n break;\n }\n }\n offset += usize_bytes * 2;\n }\n }\n\n \/\/ find the byte after the point the body loop stopped\n text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i)\n}\n\n\/\/\/ Return the last index matching the byte `x` in `text`.\npub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {\n \/\/ Scan for a single byte value by reading two `usize` words at a time.\n \/\/\n \/\/ Split `text` in three parts\n \/\/ - unaligned tail, after the last word aligned address in text\n \/\/ - body, scan by 2 words at a time\n \/\/ - the first remaining bytes, < 2 word size\n let len = text.len();\n let ptr = text.as_ptr();\n let usize_bytes = mem::size_of::<usize>();\n\n \/\/ search to an aligned boundary\n let end_align = (ptr as usize + len) & (usize_bytes - 1);\n let mut offset;\n if end_align > 0 {\n offset = if end_align >= len { 0 } else { len - end_align };\n if let Some(index) = text[offset..].iter().rposition(|elt| *elt == x) {\n return Some(offset + index);\n }\n } else {\n offset = len;\n }\n\n \/\/ search the body of the text\n let repeated_x = repeat_byte(x);\n\n while offset >= 2 * usize_bytes {\n unsafe {\n let u = *(ptr.offset(offset as isize - 2 * usize_bytes as isize) as *const usize);\n let v = *(ptr.offset(offset as isize - usize_bytes as isize) as *const usize);\n\n \/\/ break if there is a matching byte\n let zu = contains_zero_byte(u ^ repeated_x);\n let zv = contains_zero_byte(v ^ repeated_x);\n if zu || zv {\n break;\n }\n }\n offset -= 2 * usize_bytes;\n }\n\n \/\/ find the byte before the point the body loop stopped\n text[..offset].iter().rposition(|elt| *elt == x)\n}\n<commit_msg>make memrchr use align_offset<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ Original implementation taken from rust-memchr\n\/\/ Copyright 2015 Andrew Gallant, bluss and Nicolas Koch\n\nuse cmp;\nuse mem;\n\nconst LO_U64: u64 = 0x0101010101010101;\nconst HI_U64: u64 = 0x8080808080808080;\n\n\/\/ use truncation\nconst LO_USIZE: usize = LO_U64 as usize;\nconst HI_USIZE: usize = HI_U64 as usize;\n\n\/\/\/ Return `true` if `x` contains any zero byte.\n\/\/\/\n\/\/\/ From *Matters Computational*, J. Arndt\n\/\/\/\n\/\/\/ \"The idea is to subtract one from each of the bytes and then look for\n\/\/\/ bytes where the borrow propagated all the way to the most significant\n\/\/\/ bit.\"\n#[inline]\nfn contains_zero_byte(x: usize) -> bool {\n x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0\n}\n\n#[cfg(target_pointer_width = \"16\")]\n#[inline]\nfn repeat_byte(b: u8) -> usize {\n (b as usize) << 8 | b as usize\n}\n\n#[cfg(not(target_pointer_width = \"16\"))]\n#[inline]\nfn repeat_byte(b: u8) -> usize {\n (b as usize) * (::usize::MAX \/ 255)\n}\n\n\/\/\/ Return the first index matching the byte `x` in `text`.\npub fn memchr(x: u8, text: &[u8]) -> Option<usize> {\n \/\/ Scan for a single byte value by reading two `usize` words at a time.\n \/\/\n \/\/ Split `text` in three parts\n \/\/ - unaligned initial part, before the first word aligned address in text\n \/\/ - body, scan by 2 words at a time\n \/\/ - the last remaining part, < 2 word size\n let len = text.len();\n let ptr = text.as_ptr();\n let usize_bytes = mem::size_of::<usize>();\n\n \/\/ search up to an aligned boundary\n let mut offset = ptr.align_offset(usize_bytes);\n if offset > 0 {\n offset = cmp::min(offset, len);\n if let Some(index) = text[..offset].iter().position(|elt| *elt == x) {\n return Some(index);\n }\n }\n\n \/\/ search the body of the text\n let repeated_x = repeat_byte(x);\n\n if len >= 2 * usize_bytes {\n while offset <= len - 2 * usize_bytes {\n unsafe {\n let u = *(ptr.offset(offset as isize) as *const usize);\n let v = *(ptr.offset((offset + usize_bytes) as isize) as *const usize);\n\n \/\/ break if there is a matching byte\n let zu = contains_zero_byte(u ^ repeated_x);\n let zv = contains_zero_byte(v ^ repeated_x);\n if zu || zv {\n break;\n }\n }\n offset += usize_bytes * 2;\n }\n }\n\n \/\/ find the byte after the point the body loop stopped\n text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i)\n}\n\n\/\/\/ Return the last index matching the byte `x` in `text`.\npub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {\n \/\/ Scan for a single byte value by reading two `usize` words at a time.\n \/\/\n \/\/ Split `text` in three parts\n \/\/ - unaligned tail, after the last word aligned address in text\n \/\/ - body, scan by 2 words at a time\n \/\/ - the first remaining bytes, < 2 word size\n let len = text.len();\n let ptr = text.as_ptr();\n let usize_bytes = mem::size_of::<usize>();\n\n \/\/ a version of align_offset that says how much must be *subtracted*\n \/\/ from a pointer to be aligned.\n #[inline(always)]\n fn align_offset_down(ptr: *const u8, align: usize) -> usize {\n let align_offset = ptr.align_offset(align);\n if align_offset > align {\n \/\/ Not possible to align\n usize::max_value()\n } else if align_offset == 0 {\n 0\n } else {\n \/\/ E.g. if align=8 and we have to add 1, then we can also subtract 7.\n align - align_offset\n }\n }\n\n \/\/ search to an aligned boundary\n let end_align = align_offset_down(unsafe { ptr.offset(len as isize) }, usize_bytes);\n let mut offset;\n if end_align > 0 {\n offset = if end_align >= len { 0 } else { len - end_align };\n if let Some(index) = text[offset..].iter().rposition(|elt| *elt == x) {\n return Some(offset + index);\n }\n } else {\n offset = len;\n }\n\n \/\/ search the body of the text\n let repeated_x = repeat_byte(x);\n\n while offset >= 2 * usize_bytes {\n unsafe {\n let u = *(ptr.offset(offset as isize - 2 * usize_bytes as isize) as *const usize);\n let v = *(ptr.offset(offset as isize - usize_bytes as isize) as *const usize);\n\n \/\/ break if there is a matching byte\n let zu = contains_zero_byte(u ^ repeated_x);\n let zv = contains_zero_byte(v ^ repeated_x);\n if zu || zv {\n break;\n }\n }\n offset -= 2 * usize_bytes;\n }\n\n \/\/ find the byte before the point the body loop stopped\n text[..offset].iter().rposition(|elt| *elt == x)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Problem: rustfmt fails on a line that overshoots 100 chars<commit_after><|endoftext|>"} {"text":"<commit_before>use super::*;\nuse redox::*;\n\nimpl Editor {\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n \/\/ Redraw window\n self.window.set([0, 0, 0, 255]);\n\n for (y, row) in self.text.iter().enumerate() {\n for (x, c) in row.iter().enumerate() {\n if self.cursor().x == x && self.cursor().y == y {\n self.window.rect(8 * (y - self.scroll_y) as isize,\n 16 * (x - self.scroll_x) as isize,\n 8,\n 16,\n [255, 255, 255, 255]);\n self.window.char(8 * (y - self.scroll_y) as isize,\n 16 * (x - self.scroll_x) as isize,\n *c,\n [0, 0, 0, 255]);\n } else {\n self.window.char(8 * (y - self.scroll_y) as isize,\n 16 * (x - self.scroll_x) as isize,\n *c,\n [255, 255, 255, 255]);\n }\n }\n }\n self.window.sync();\n }\n}\n<commit_msg>Swap x and y<commit_after>use super::*;\nuse redox::*;\n\nimpl Editor {\n \/\/\/ Redraw the window\n pub fn redraw(&mut self) {\n \/\/ Redraw window\n self.window.set([0, 0, 0, 255]);\n\n for (y, row) in self.text.iter().enumerate() {\n for (x, c) in row.iter().enumerate() {\n if self.cursor().x == x && self.cursor().y == y {\n self.window.rect(8 * (x - self.scroll_y) as isize,\n 16 * (y - self.scroll_x) as isize,\n 8,\n 16,\n [255, 255, 255, 255]);\n self.window.char(8 * (x - self.scroll_y) as isize,\n 16 * (y - self.scroll_x) as isize,\n *c,\n [0, 0, 0, 255]);\n } else {\n self.window.char(8 * (x - self.scroll_y) as isize,\n 16 * (y - self.scroll_x) as isize,\n *c,\n [255, 255, 255, 255]);\n }\n }\n }\n self.window.sync();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>CLI interface setup<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(feature = \"allocator_api\",\n reason = \"the precise API and guarantees it provides may be tweaked \\\n slightly, especially to possibly take into account the \\\n types being stored to make room for a future \\\n tracing garbage collector\",\n issue = \"32838\")]\n\nuse core::intrinsics::{min_align_of_val, size_of_val};\nuse core::mem::{self, ManuallyDrop};\nuse core::usize;\n\n#[doc(inline)]\npub use core::alloc::*;\n\n#[doc(hidden)]\npub mod __core {\n pub use core::*;\n}\n\nextern \"Rust\" {\n #[allocator]\n #[rustc_allocator_nounwind]\n fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8;\n #[cold]\n #[rustc_allocator_nounwind]\n fn __rust_oom(err: *const u8) -> !;\n #[rustc_allocator_nounwind]\n fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);\n #[rustc_allocator_nounwind]\n fn __rust_usable_size(layout: *const u8,\n min: *mut usize,\n max: *mut usize);\n #[rustc_allocator_nounwind]\n fn __rust_realloc(ptr: *mut u8,\n old_size: usize,\n old_align: usize,\n new_size: usize,\n new_align: usize,\n err: *mut u8) -> *mut u8;\n #[rustc_allocator_nounwind]\n fn __rust_alloc_zeroed(size: usize, align: usize, err: *mut u8) -> *mut u8;\n #[rustc_allocator_nounwind]\n fn __rust_alloc_excess(size: usize,\n align: usize,\n excess: *mut usize,\n err: *mut u8) -> *mut u8;\n #[rustc_allocator_nounwind]\n fn __rust_realloc_excess(ptr: *mut u8,\n old_size: usize,\n old_align: usize,\n new_size: usize,\n new_align: usize,\n excess: *mut usize,\n err: *mut u8) -> *mut u8;\n #[rustc_allocator_nounwind]\n fn __rust_grow_in_place(ptr: *mut u8,\n old_size: usize,\n old_align: usize,\n new_size: usize,\n new_align: usize) -> u8;\n #[rustc_allocator_nounwind]\n fn __rust_shrink_in_place(ptr: *mut u8,\n old_size: usize,\n old_align: usize,\n new_size: usize,\n new_align: usize) -> u8;\n}\n\n#[derive(Copy, Clone, Default, Debug)]\npub struct Heap;\n\nunsafe impl Alloc for Heap {\n #[inline]\n unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let ptr = __rust_alloc(layout.size(),\n layout.align(),\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n Ok(ptr)\n }\n }\n\n #[inline]\n #[cold]\n fn oom(&mut self, err: AllocErr) -> ! {\n unsafe {\n __rust_oom(&err as *const AllocErr as *const u8)\n }\n }\n\n #[inline]\n unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {\n __rust_dealloc(ptr, layout.size(), layout.align())\n }\n\n #[inline]\n fn usable_size(&self, layout: &Layout) -> (usize, usize) {\n let mut min = 0;\n let mut max = 0;\n unsafe {\n __rust_usable_size(layout as *const Layout as *const u8,\n &mut min,\n &mut max);\n }\n (min, max)\n }\n\n #[inline]\n unsafe fn realloc(&mut self,\n ptr: *mut u8,\n layout: Layout,\n new_layout: Layout)\n -> Result<*mut u8, AllocErr>\n {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let ptr = __rust_realloc(ptr,\n layout.size(),\n layout.align(),\n new_layout.size(),\n new_layout.align(),\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n mem::forget(err);\n Ok(ptr)\n }\n }\n\n #[inline]\n unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let ptr = __rust_alloc_zeroed(layout.size(),\n layout.align(),\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n Ok(ptr)\n }\n }\n\n #[inline]\n unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let mut size = 0;\n let ptr = __rust_alloc_excess(layout.size(),\n layout.align(),\n &mut size,\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n Ok(Excess(ptr, size))\n }\n }\n\n #[inline]\n unsafe fn realloc_excess(&mut self,\n ptr: *mut u8,\n layout: Layout,\n new_layout: Layout) -> Result<Excess, AllocErr> {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let mut size = 0;\n let ptr = __rust_realloc_excess(ptr,\n layout.size(),\n layout.align(),\n new_layout.size(),\n new_layout.align(),\n &mut size,\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n Ok(Excess(ptr, size))\n }\n }\n\n #[inline]\n unsafe fn grow_in_place(&mut self,\n ptr: *mut u8,\n layout: Layout,\n new_layout: Layout)\n -> Result<(), CannotReallocInPlace>\n {\n debug_assert!(new_layout.size() >= layout.size());\n debug_assert!(new_layout.align() == layout.align());\n let ret = __rust_grow_in_place(ptr,\n layout.size(),\n layout.align(),\n new_layout.size(),\n new_layout.align());\n if ret != 0 {\n Ok(())\n } else {\n Err(CannotReallocInPlace)\n }\n }\n\n #[inline]\n unsafe fn shrink_in_place(&mut self,\n ptr: *mut u8,\n layout: Layout,\n new_layout: Layout) -> Result<(), CannotReallocInPlace> {\n debug_assert!(new_layout.size() <= layout.size());\n debug_assert!(new_layout.align() == layout.align());\n let ret = __rust_shrink_in_place(ptr,\n layout.size(),\n layout.align(),\n new_layout.size(),\n new_layout.align());\n if ret != 0 {\n Ok(())\n } else {\n Err(CannotReallocInPlace)\n }\n }\n}\n\n\/\/\/ The allocator for unique pointers.\n\/\/ This function must not unwind. If it does, MIR trans will fail.\n#[cfg(not(test))]\n#[lang = \"exchange_malloc\"]\n#[inline]\nunsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {\n if size == 0 {\n align as *mut u8\n } else {\n let layout = Layout::from_size_align_unchecked(size, align);\n Heap.alloc(layout).unwrap_or_else(|err| {\n Heap.oom(err)\n })\n }\n}\n\n#[cfg_attr(not(test), lang = \"box_free\")]\n#[inline]\npub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {\n let size = size_of_val(&*ptr);\n let align = min_align_of_val(&*ptr);\n \/\/ We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.\n if size != 0 {\n let layout = Layout::from_size_align_unchecked(size, align);\n Heap.dealloc(ptr as *mut u8, layout);\n }\n}\n\n#[cfg(test)]\nmod tests {\n extern crate test;\n use self::test::Bencher;\n use boxed::Box;\n use heap::{Heap, Alloc, Layout};\n\n #[test]\n fn allocate_zeroed() {\n unsafe {\n let layout = Layout::from_size_align(1024, 1).unwrap();\n let ptr = Heap.alloc_zeroed(layout.clone())\n .unwrap_or_else(|e| Heap.oom(e));\n\n let end = ptr.offset(layout.size() as isize);\n let mut i = ptr;\n while i < end {\n assert_eq!(*i, 0);\n i = i.offset(1);\n }\n Heap.dealloc(ptr, layout);\n }\n }\n\n #[bench]\n fn alloc_owned_small(b: &mut Bencher) {\n b.iter(|| {\n let _: Box<_> = box 10;\n })\n }\n}\n<commit_msg>Rename the Heap type to Global<commit_after>\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(feature = \"allocator_api\",\n reason = \"the precise API and guarantees it provides may be tweaked \\\n slightly, especially to possibly take into account the \\\n types being stored to make room for a future \\\n tracing garbage collector\",\n issue = \"32838\")]\n\nuse core::intrinsics::{min_align_of_val, size_of_val};\nuse core::mem::{self, ManuallyDrop};\nuse core::usize;\n\n#[doc(inline)]\npub use core::alloc::*;\n\n#[doc(hidden)]\npub mod __core {\n pub use core::*;\n}\n\nextern \"Rust\" {\n #[allocator]\n #[rustc_allocator_nounwind]\n fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8;\n #[cold]\n #[rustc_allocator_nounwind]\n fn __rust_oom(err: *const u8) -> !;\n #[rustc_allocator_nounwind]\n fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);\n #[rustc_allocator_nounwind]\n fn __rust_usable_size(layout: *const u8,\n min: *mut usize,\n max: *mut usize);\n #[rustc_allocator_nounwind]\n fn __rust_realloc(ptr: *mut u8,\n old_size: usize,\n old_align: usize,\n new_size: usize,\n new_align: usize,\n err: *mut u8) -> *mut u8;\n #[rustc_allocator_nounwind]\n fn __rust_alloc_zeroed(size: usize, align: usize, err: *mut u8) -> *mut u8;\n #[rustc_allocator_nounwind]\n fn __rust_alloc_excess(size: usize,\n align: usize,\n excess: *mut usize,\n err: *mut u8) -> *mut u8;\n #[rustc_allocator_nounwind]\n fn __rust_realloc_excess(ptr: *mut u8,\n old_size: usize,\n old_align: usize,\n new_size: usize,\n new_align: usize,\n excess: *mut usize,\n err: *mut u8) -> *mut u8;\n #[rustc_allocator_nounwind]\n fn __rust_grow_in_place(ptr: *mut u8,\n old_size: usize,\n old_align: usize,\n new_size: usize,\n new_align: usize) -> u8;\n #[rustc_allocator_nounwind]\n fn __rust_shrink_in_place(ptr: *mut u8,\n old_size: usize,\n old_align: usize,\n new_size: usize,\n new_align: usize) -> u8;\n}\n\n#[derive(Copy, Clone, Default, Debug)]\npub struct Global;\n\n#[unstable(feature = \"allocator_api\", issue = \"32838\")]\n#[rustc_deprecated(since = \"1.27.0\", reason = \"type renamed to `Global`\")]\npub use self::Global as Heap;\n\n\nunsafe impl Alloc for Global {\n #[inline]\n unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let ptr = __rust_alloc(layout.size(),\n layout.align(),\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n Ok(ptr)\n }\n }\n\n #[inline]\n #[cold]\n fn oom(&mut self, err: AllocErr) -> ! {\n unsafe {\n __rust_oom(&err as *const AllocErr as *const u8)\n }\n }\n\n #[inline]\n unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {\n __rust_dealloc(ptr, layout.size(), layout.align())\n }\n\n #[inline]\n fn usable_size(&self, layout: &Layout) -> (usize, usize) {\n let mut min = 0;\n let mut max = 0;\n unsafe {\n __rust_usable_size(layout as *const Layout as *const u8,\n &mut min,\n &mut max);\n }\n (min, max)\n }\n\n #[inline]\n unsafe fn realloc(&mut self,\n ptr: *mut u8,\n layout: Layout,\n new_layout: Layout)\n -> Result<*mut u8, AllocErr>\n {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let ptr = __rust_realloc(ptr,\n layout.size(),\n layout.align(),\n new_layout.size(),\n new_layout.align(),\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n mem::forget(err);\n Ok(ptr)\n }\n }\n\n #[inline]\n unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let ptr = __rust_alloc_zeroed(layout.size(),\n layout.align(),\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n Ok(ptr)\n }\n }\n\n #[inline]\n unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let mut size = 0;\n let ptr = __rust_alloc_excess(layout.size(),\n layout.align(),\n &mut size,\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n Ok(Excess(ptr, size))\n }\n }\n\n #[inline]\n unsafe fn realloc_excess(&mut self,\n ptr: *mut u8,\n layout: Layout,\n new_layout: Layout) -> Result<Excess, AllocErr> {\n let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());\n let mut size = 0;\n let ptr = __rust_realloc_excess(ptr,\n layout.size(),\n layout.align(),\n new_layout.size(),\n new_layout.align(),\n &mut size,\n &mut *err as *mut AllocErr as *mut u8);\n if ptr.is_null() {\n Err(ManuallyDrop::into_inner(err))\n } else {\n Ok(Excess(ptr, size))\n }\n }\n\n #[inline]\n unsafe fn grow_in_place(&mut self,\n ptr: *mut u8,\n layout: Layout,\n new_layout: Layout)\n -> Result<(), CannotReallocInPlace>\n {\n debug_assert!(new_layout.size() >= layout.size());\n debug_assert!(new_layout.align() == layout.align());\n let ret = __rust_grow_in_place(ptr,\n layout.size(),\n layout.align(),\n new_layout.size(),\n new_layout.align());\n if ret != 0 {\n Ok(())\n } else {\n Err(CannotReallocInPlace)\n }\n }\n\n #[inline]\n unsafe fn shrink_in_place(&mut self,\n ptr: *mut u8,\n layout: Layout,\n new_layout: Layout) -> Result<(), CannotReallocInPlace> {\n debug_assert!(new_layout.size() <= layout.size());\n debug_assert!(new_layout.align() == layout.align());\n let ret = __rust_shrink_in_place(ptr,\n layout.size(),\n layout.align(),\n new_layout.size(),\n new_layout.align());\n if ret != 0 {\n Ok(())\n } else {\n Err(CannotReallocInPlace)\n }\n }\n}\n\n\/\/\/ The allocator for unique pointers.\n\/\/ This function must not unwind. If it does, MIR trans will fail.\n#[cfg(not(test))]\n#[lang = \"exchange_malloc\"]\n#[inline]\nunsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {\n if size == 0 {\n align as *mut u8\n } else {\n let layout = Layout::from_size_align_unchecked(size, align);\n Global.alloc(layout).unwrap_or_else(|err| {\n Global.oom(err)\n })\n }\n}\n\n#[cfg_attr(not(test), lang = \"box_free\")]\n#[inline]\npub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {\n let size = size_of_val(&*ptr);\n let align = min_align_of_val(&*ptr);\n \/\/ We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.\n if size != 0 {\n let layout = Layout::from_size_align_unchecked(size, align);\n Global.dealloc(ptr as *mut u8, layout);\n }\n}\n\n#[cfg(test)]\nmod tests {\n extern crate test;\n use self::test::Bencher;\n use boxed::Box;\n use heap::{Global, Alloc, Layout};\n\n #[test]\n fn allocate_zeroed() {\n unsafe {\n let layout = Layout::from_size_align(1024, 1).unwrap();\n let ptr = Global.alloc_zeroed(layout.clone())\n .unwrap_or_else(|e| Global.oom(e));\n\n let end = ptr.offset(layout.size() as isize);\n let mut i = ptr;\n while i < end {\n assert_eq!(*i, 0);\n i = i.offset(1);\n }\n Global.dealloc(ptr, layout);\n }\n }\n\n #[bench]\n fn alloc_owned_small(b: &mut Bencher) {\n b.iter(|| {\n let _: Box<_> = box 10;\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A module for working with borrowed data.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n\/\/\/ A trait for borrowing data.\n\/\/\/\n\/\/\/ In Rust, it is common to provide different representations of a type for\n\/\/\/ different use cases. For instance, storage location and management for a\n\/\/\/ value can be specifically chosen as appropriate for a particular use via\n\/\/\/ pointer types such as [`Box<T>`] or [`Rc<T>`]. Beyond these generic\n\/\/\/ wrappers that can be used with any type, some types provide optional\n\/\/\/ facets providing potentially costly functionality. An example for such a\n\/\/\/ type is [`String`] which adds the ability to extend a string to the basic\n\/\/\/ [`str`]. This requires keeping additional information unnecessary for a\n\/\/\/ simple, immutable string.\n\/\/\/\n\/\/\/ These types provide access to the underlying data through references\n\/\/\/ to the type of that data. They are said to be ‘borrowed as’ that type.\n\/\/\/ For instance, a [`Box<T>`] can be borrowed as `T` while a [`String`]\n\/\/\/ can be borrowed as `str`.\n\/\/\/\n\/\/\/ Types express that they can be borrowed as some type `T` by implementing\n\/\/\/ `Borrow<T>`, providing a reference to a `T` in the trait’s\n\/\/\/ [`borrow`] method. A type is free to borrow as several different types.\n\/\/\/ If it wishes to mutably borrow as the type – allowing the underlying data\n\/\/\/ to be modified, it can additionally implement [`BorrowMut<T>`].\n\/\/\/\n\/\/\/ Further, when providing implementations for additional traits, it needs\n\/\/\/ to be considered whether they should behave identical to those of the\n\/\/\/ underlying type as a consequence of acting as a representation of that\n\/\/\/ underlying type. Generic code typically uses `Borrow<T>` when it relies\n\/\/\/ on the identical behavior of these additional trait implementations.\n\/\/\/ These traits will likely appear as additional trait bounds.\n\/\/\/\n\/\/\/ If generic code merely needs to work for all types that can\n\/\/\/ provide a reference to related type `T`, it is often better to use\n\/\/\/ [`AsRef<T>`] as more types can safely implement it.\n\/\/\/\n\/\/\/ [`AsRef<T>`]: ..\/..\/std\/convert\/trait.AsRef.html\n\/\/\/ [`BorrowMut<T>`]: trait.BorrowMut.html\n\/\/\/ [`Box<T>`]: ..\/..\/std\/boxed\/struct.Box.html\n\/\/\/ [`Mutex<T>`]: ..\/..\/std\/sync\/struct.Mutex.html\n\/\/\/ [`Rc<T>`]: ..\/..\/std\/rc\/struct.Rc.html\n\/\/\/ [`str`]: ..\/..\/std\/primitive.str.html\n\/\/\/ [`String`]: ..\/..\/std\/string\/struct.String.html\n\/\/\/ [`borrow`]: #tymethod.borrow\n\/\/\/\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ As a data collection, [`HashMap<K, V>`] owns both keys and values. If\n\/\/\/ the key’s actual data is wrapped in a managing type of some kind, it\n\/\/\/ should, however, still be possible to search for a value using a\n\/\/\/ reference to the key’s data. For instance, if the key is a string, then\n\/\/\/ it is likely stored with the hash map as a [`String`], while it should\n\/\/\/ be possible to search using a [`&str`][`str`]. Thus, `insert` needs to\n\/\/\/ operate on a `String` while `get` needs to be able to use a `&str`.\n\/\/\/\n\/\/\/ Slightly simplified, the relevant parts of `HashMap<K, V>` look like\n\/\/\/ this:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::borrow::Borrow;\n\/\/\/ use std::hash::Hash;\n\/\/\/\n\/\/\/ pub struct HashMap<K, V> {\n\/\/\/ # marker: ::std::marker::PhantomData<(K, V)>,\n\/\/\/ \/\/ fields omitted\n\/\/\/ }\n\/\/\/\n\/\/\/ impl<K, V> HashMap<K, V> {\n\/\/\/ pub fn insert(&self, key: K, value: V) -> Option<V>\n\/\/\/ where K: Hash + Eq\n\/\/\/ {\n\/\/\/ # unimplemented!()\n\/\/\/ \/\/ ...\n\/\/\/ }\n\/\/\/\n\/\/\/ pub fn get<Q>(&self, k: &Q) -> Option<&V>\n\/\/\/ where\n\/\/\/ K: Borrow<Q>,\n\/\/\/ Q: Hash + Eq + ?Sized\n\/\/\/ {\n\/\/\/ # unimplemented!()\n\/\/\/ \/\/ ...\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ The entire hash map is generic over a key type `K`. Because these keys\n\/\/\/ are stored with the hash map, this type has to own the key’s data.\n\/\/\/ When inserting a key-value pair, the map is given such a `K` and needs\n\/\/\/ to find the correct hash bucket and check if the key is already present\n\/\/\/ based on that `K`. It therefore requires `K: Hash + Eq`.\n\/\/\/\n\/\/\/ When searching for a value in the map, however, having to provide a\n\/\/\/ reference to a `K` as the key to search for would require to always\n\/\/\/ create such an owned value. For string keys, this would mean a `String`\n\/\/\/ value needs to be created just for the search for cases where only a\n\/\/\/ `str` is available.\n\/\/\/\n\/\/\/ Instead, the `get` method is generic over the type of the underlying key\n\/\/\/ data, called `Q` in the method signature above. It states that `K`\n\/\/\/ borrows as a `Q` by requiring that `K: Borrow<Q>`. By additionally\n\/\/\/ requiring `Q: Hash + Eq`, it signals the requirement that `K` and `Q`\n\/\/\/ have implementations of the `Hash` and `Eq` traits that produce identical\n\/\/\/ results.\n\/\/\/\n\/\/\/ The implementation of `get` relies in particular on identical\n\/\/\/ implementations of `Hash` by determining the key’s hash bucket by calling\n\/\/\/ `Hash::hash` on the `Q` value even though it inserted the key based on\n\/\/\/ the hash value calculated from the `K` value.\n\/\/\/\n\/\/\/ As a consequence, the hash map breaks if a `K` wrapping a `Q` value\n\/\/\/ produces a different hash than `Q`. For instance, imagine you have a\n\/\/\/ type that wraps a string but compares ASCII letters ignoring their case:\n\/\/\/\n\/\/\/ ```\n\/\/\/ pub struct CaseInsensitiveString(String);\n\/\/\/\n\/\/\/ impl PartialEq for CaseInsensitiveString {\n\/\/\/ fn eq(&self, other: &Self) -> bool {\n\/\/\/ self.0.eq_ignore_ascii_case(&other.0)\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ impl Eq for CaseInsensitiveString { }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Because two equal values need to produce the same hash value, the\n\/\/\/ implementation of `Hash` needs to ignore ASCII case, too:\n\/\/\/\n\/\/\/ ```\n\/\/\/ # use std::hash::{Hash, Hasher};\n\/\/\/ # pub struct CaseInsensitiveString(String);\n\/\/\/ impl Hash for CaseInsensitiveString {\n\/\/\/ fn hash<H: Hasher>(&self, state: &mut H) {\n\/\/\/ for c in self.0.as_bytes() {\n\/\/\/ c.to_ascii_lowercase().hash(state)\n\/\/\/ }\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Can `CaseInsensitiveString` implement `Borrow<str>`? It certainly can\n\/\/\/ provide a reference to a string slice via its contained owned string.\n\/\/\/ But because its `Hash` implementation differs, it behaves differently\n\/\/\/ from `str` and therefore must not, in fact, implement `Borrow<str>`.\n\/\/\/ If it wants to allow others access to the underlying `str`, it can do\n\/\/\/ that via `AsRef<str>` which doesn’t carry any extra requirements.\n\/\/\/\n\/\/\/ [`Hash`]: ..\/..\/std\/hash\/trait.Hash.html\n\/\/\/ [`HashMap<K, V>`]: ..\/..\/std\/collections\/struct.HashMap.html\n\/\/\/ [`String`]: ..\/..\/std\/string\/struct.String.html\n\/\/\/ [`str`]: ..\/..\/std\/primitive.str.html\n\/\/\/\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait Borrow<Borrowed: ?Sized> {\n \/\/\/ Immutably borrows from an owned value.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::borrow::Borrow;\n \/\/\/\n \/\/\/ fn check<T: Borrow<str>>(s: T) {\n \/\/\/ assert_eq!(\"Hello\", s.borrow());\n \/\/\/ }\n \/\/\/\n \/\/\/ let s = \"Hello\".to_string();\n \/\/\/\n \/\/\/ check(s);\n \/\/\/\n \/\/\/ let s = \"Hello\";\n \/\/\/\n \/\/\/ check(s);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn borrow(&self) -> &Borrowed;\n}\n\n\/\/\/ A trait for mutably borrowing data.\n\/\/\/\n\/\/\/ As a companion to [`Borrow<T>`] this trait allows a type to borrow as\n\/\/\/ an underlying type by providing a mutable reference. See [`Borrow<T>`]\n\/\/\/ for more information on borrowing as another type.\n\/\/\/\n\/\/\/ [`Borrow<T>`]: trait.Borrow.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait BorrowMut<Borrowed: ?Sized> : Borrow<Borrowed> {\n \/\/\/ Mutably borrows from an owned value.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::borrow::BorrowMut;\n \/\/\/\n \/\/\/ fn check<T: BorrowMut<[i32]>>(mut v: T) {\n \/\/\/ assert_eq!(&mut [1, 2, 3], v.borrow_mut());\n \/\/\/ }\n \/\/\/\n \/\/\/ let v = vec![1, 2, 3];\n \/\/\/\n \/\/\/ check(v);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn borrow_mut(&mut self) -> &mut Borrowed;\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<T: ?Sized> Borrow<T> for T {\n fn borrow(&self) -> &T { self }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<T: ?Sized> BorrowMut<T> for T {\n fn borrow_mut(&mut self) -> &mut T { self }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, T: ?Sized> Borrow<T> for &'a T {\n fn borrow(&self) -> &T { &**self }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, T: ?Sized> Borrow<T> for &'a mut T {\n fn borrow(&self) -> &T { &**self }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, T: ?Sized> BorrowMut<T> for &'a mut T {\n fn borrow_mut(&mut self) -> &mut T { &mut **self }\n}\n<commit_msg>Fix formatting.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! A module for working with borrowed data.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\n\/\/\/ A trait for borrowing data.\n\/\/\/\n\/\/\/ In Rust, it is common to provide different representations of a type for\n\/\/\/ different use cases. For instance, storage location and management for a\n\/\/\/ value can be specifically chosen as appropriate for a particular use via\n\/\/\/ pointer types such as [`Box<T>`] or [`Rc<T>`]. Beyond these generic\n\/\/\/ wrappers that can be used with any type, some types provide optional\n\/\/\/ facets providing potentially costly functionality. An example for such a\n\/\/\/ type is [`String`] which adds the ability to extend a string to the basic\n\/\/\/ [`str`]. This requires keeping additional information unnecessary for a\n\/\/\/ simple, immutable string.\n\/\/\/\n\/\/\/ These types provide access to the underlying data through references\n\/\/\/ to the type of that data. They are said to be ‘borrowed as’ that type.\n\/\/\/ For instance, a [`Box<T>`] can be borrowed as `T` while a [`String`]\n\/\/\/ can be borrowed as `str`.\n\/\/\/\n\/\/\/ Types express that they can be borrowed as some type `T` by implementing\n\/\/\/ `Borrow<T>`, providing a reference to a `T` in the trait’s\n\/\/\/ [`borrow`] method. A type is free to borrow as several different types.\n\/\/\/ If it wishes to mutably borrow as the type – allowing the underlying data\n\/\/\/ to be modified, it can additionally implement [`BorrowMut<T>`].\n\/\/\/\n\/\/\/ Further, when providing implementations for additional traits, it needs\n\/\/\/ to be considered whether they should behave identical to those of the\n\/\/\/ underlying type as a consequence of acting as a representation of that\n\/\/\/ underlying type. Generic code typically uses `Borrow<T>` when it relies\n\/\/\/ on the identical behavior of these additional trait implementations.\n\/\/\/ These traits will likely appear as additional trait bounds.\n\/\/\/\n\/\/\/ If generic code merely needs to work for all types that can\n\/\/\/ provide a reference to related type `T`, it is often better to use\n\/\/\/ [`AsRef<T>`] as more types can safely implement it.\n\/\/\/\n\/\/\/ [`AsRef<T>`]: ..\/..\/std\/convert\/trait.AsRef.html\n\/\/\/ [`BorrowMut<T>`]: trait.BorrowMut.html\n\/\/\/ [`Box<T>`]: ..\/..\/std\/boxed\/struct.Box.html\n\/\/\/ [`Mutex<T>`]: ..\/..\/std\/sync\/struct.Mutex.html\n\/\/\/ [`Rc<T>`]: ..\/..\/std\/rc\/struct.Rc.html\n\/\/\/ [`str`]: ..\/..\/std\/primitive.str.html\n\/\/\/ [`String`]: ..\/..\/std\/string\/struct.String.html\n\/\/\/ [`borrow`]: #tymethod.borrow\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ As a data collection, [`HashMap<K, V>`] owns both keys and values. If\n\/\/\/ the key’s actual data is wrapped in a managing type of some kind, it\n\/\/\/ should, however, still be possible to search for a value using a\n\/\/\/ reference to the key’s data. For instance, if the key is a string, then\n\/\/\/ it is likely stored with the hash map as a [`String`], while it should\n\/\/\/ be possible to search using a [`&str`][`str`]. Thus, `insert` needs to\n\/\/\/ operate on a `String` while `get` needs to be able to use a `&str`.\n\/\/\/\n\/\/\/ Slightly simplified, the relevant parts of `HashMap<K, V>` look like\n\/\/\/ this:\n\/\/\/\n\/\/\/ ```\n\/\/\/ use std::borrow::Borrow;\n\/\/\/ use std::hash::Hash;\n\/\/\/\n\/\/\/ pub struct HashMap<K, V> {\n\/\/\/ # marker: ::std::marker::PhantomData<(K, V)>,\n\/\/\/ \/\/ fields omitted\n\/\/\/ }\n\/\/\/\n\/\/\/ impl<K, V> HashMap<K, V> {\n\/\/\/ pub fn insert(&self, key: K, value: V) -> Option<V>\n\/\/\/ where K: Hash + Eq\n\/\/\/ {\n\/\/\/ # unimplemented!()\n\/\/\/ \/\/ ...\n\/\/\/ }\n\/\/\/\n\/\/\/ pub fn get<Q>(&self, k: &Q) -> Option<&V>\n\/\/\/ where\n\/\/\/ K: Borrow<Q>,\n\/\/\/ Q: Hash + Eq + ?Sized\n\/\/\/ {\n\/\/\/ # unimplemented!()\n\/\/\/ \/\/ ...\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ The entire hash map is generic over a key type `K`. Because these keys\n\/\/\/ are stored with the hash map, this type has to own the key’s data.\n\/\/\/ When inserting a key-value pair, the map is given such a `K` and needs\n\/\/\/ to find the correct hash bucket and check if the key is already present\n\/\/\/ based on that `K`. It therefore requires `K: Hash + Eq`.\n\/\/\/\n\/\/\/ When searching for a value in the map, however, having to provide a\n\/\/\/ reference to a `K` as the key to search for would require to always\n\/\/\/ create such an owned value. For string keys, this would mean a `String`\n\/\/\/ value needs to be created just for the search for cases where only a\n\/\/\/ `str` is available.\n\/\/\/\n\/\/\/ Instead, the `get` method is generic over the type of the underlying key\n\/\/\/ data, called `Q` in the method signature above. It states that `K`\n\/\/\/ borrows as a `Q` by requiring that `K: Borrow<Q>`. By additionally\n\/\/\/ requiring `Q: Hash + Eq`, it signals the requirement that `K` and `Q`\n\/\/\/ have implementations of the `Hash` and `Eq` traits that produce identical\n\/\/\/ results.\n\/\/\/\n\/\/\/ The implementation of `get` relies in particular on identical\n\/\/\/ implementations of `Hash` by determining the key’s hash bucket by calling\n\/\/\/ `Hash::hash` on the `Q` value even though it inserted the key based on\n\/\/\/ the hash value calculated from the `K` value.\n\/\/\/\n\/\/\/ As a consequence, the hash map breaks if a `K` wrapping a `Q` value\n\/\/\/ produces a different hash than `Q`. For instance, imagine you have a\n\/\/\/ type that wraps a string but compares ASCII letters ignoring their case:\n\/\/\/\n\/\/\/ ```\n\/\/\/ pub struct CaseInsensitiveString(String);\n\/\/\/\n\/\/\/ impl PartialEq for CaseInsensitiveString {\n\/\/\/ fn eq(&self, other: &Self) -> bool {\n\/\/\/ self.0.eq_ignore_ascii_case(&other.0)\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ impl Eq for CaseInsensitiveString { }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Because two equal values need to produce the same hash value, the\n\/\/\/ implementation of `Hash` needs to ignore ASCII case, too:\n\/\/\/\n\/\/\/ ```\n\/\/\/ # use std::hash::{Hash, Hasher};\n\/\/\/ # pub struct CaseInsensitiveString(String);\n\/\/\/ impl Hash for CaseInsensitiveString {\n\/\/\/ fn hash<H: Hasher>(&self, state: &mut H) {\n\/\/\/ for c in self.0.as_bytes() {\n\/\/\/ c.to_ascii_lowercase().hash(state)\n\/\/\/ }\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\n\/\/\/\n\/\/\/ Can `CaseInsensitiveString` implement `Borrow<str>`? It certainly can\n\/\/\/ provide a reference to a string slice via its contained owned string.\n\/\/\/ But because its `Hash` implementation differs, it behaves differently\n\/\/\/ from `str` and therefore must not, in fact, implement `Borrow<str>`.\n\/\/\/ If it wants to allow others access to the underlying `str`, it can do\n\/\/\/ that via `AsRef<str>` which doesn’t carry any extra requirements.\n\/\/\/\n\/\/\/ [`Hash`]: ..\/..\/std\/hash\/trait.Hash.html\n\/\/\/ [`HashMap<K, V>`]: ..\/..\/std\/collections\/struct.HashMap.html\n\/\/\/ [`String`]: ..\/..\/std\/string\/struct.String.html\n\/\/\/ [`str`]: ..\/..\/std\/primitive.str.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait Borrow<Borrowed: ?Sized> {\n \/\/\/ Immutably borrows from an owned value.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::borrow::Borrow;\n \/\/\/\n \/\/\/ fn check<T: Borrow<str>>(s: T) {\n \/\/\/ assert_eq!(\"Hello\", s.borrow());\n \/\/\/ }\n \/\/\/\n \/\/\/ let s = \"Hello\".to_string();\n \/\/\/\n \/\/\/ check(s);\n \/\/\/\n \/\/\/ let s = \"Hello\";\n \/\/\/\n \/\/\/ check(s);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn borrow(&self) -> &Borrowed;\n}\n\n\/\/\/ A trait for mutably borrowing data.\n\/\/\/\n\/\/\/ As a companion to [`Borrow<T>`] this trait allows a type to borrow as\n\/\/\/ an underlying type by providing a mutable reference. See [`Borrow<T>`]\n\/\/\/ for more information on borrowing as another type.\n\/\/\/\n\/\/\/ [`Borrow<T>`]: trait.Borrow.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait BorrowMut<Borrowed: ?Sized> : Borrow<Borrowed> {\n \/\/\/ Mutably borrows from an owned value.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use std::borrow::BorrowMut;\n \/\/\/\n \/\/\/ fn check<T: BorrowMut<[i32]>>(mut v: T) {\n \/\/\/ assert_eq!(&mut [1, 2, 3], v.borrow_mut());\n \/\/\/ }\n \/\/\/\n \/\/\/ let v = vec![1, 2, 3];\n \/\/\/\n \/\/\/ check(v);\n \/\/\/ ```\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn borrow_mut(&mut self) -> &mut Borrowed;\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<T: ?Sized> Borrow<T> for T {\n fn borrow(&self) -> &T { self }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<T: ?Sized> BorrowMut<T> for T {\n fn borrow_mut(&mut self) -> &mut T { self }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, T: ?Sized> Borrow<T> for &'a T {\n fn borrow(&self) -> &T { &**self }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, T: ?Sized> Borrow<T> for &'a mut T {\n fn borrow(&self) -> &T { &**self }\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl<'a, T: ?Sized> BorrowMut<T> for &'a mut T {\n fn borrow_mut(&mut self) -> &mut T { &mut **self }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add attribute struct in AST. Add some omitted grammar in AST structs.<commit_after><|endoftext|>"} {"text":"<commit_before>use std::path::BytesContainer;\nuse std::str::from_utf8;\nuse std::io::{IoError, IoResult, FileNotFound};\n\nuse http::server::request::AbsolutePath;\nuse http::method::{Get, Head};\nuse http::status::{ InternalServerError };\n\nuse request;\nuse response;\nuse middleware::{Action, Halt, Continue, Middleware};\nuse nickel_error::{ NickelError, ErrorWithStatusCode };\n\n\/\/ this should be much simpler after unboxed closures land in Rust.\n\n#[deriving(Clone)]\npub struct StaticFilesHandler {\n root_path: Path\n}\n\nimpl Middleware for StaticFilesHandler {\n fn invoke (&self, req: &mut request::Request, res: &mut response::Response) -> Result<Action, NickelError> {\n match req.origin.method {\n Get | Head => {\n match self.with_file(self.extract_path(req), res) {\n Ok(()) => Ok(Halt),\n Err(err) => match err.kind {\n \/\/ We shouldn't assume the StaticFileHandler to be the last middleware in the stack.\n \/\/ Therefore it's important to continue in case of FileNotFound errors.\n FileNotFound => Ok(Continue),\n _ => Err(NickelError::new(\"Unknown Error\", ErrorWithStatusCode(InternalServerError)))\n }\n }\n },\n _ => Ok(Continue)\n }\n }\n}\n\nimpl StaticFilesHandler {\n pub fn new (root_path: &str) -> StaticFilesHandler {\n let checked_path = Path::new(root_path);\n StaticFilesHandler {\n root_path: checked_path\n }\n }\n\n fn extract_path(&self, req: &mut request::Request) -> Option<String> {\n match req.origin.request_uri {\n AbsolutePath(ref path) => {\n println!(\"{} {}{}\",req.origin.method, from_utf8(self.root_path.container_as_bytes()).unwrap(), path);\n let mut relative_path = path.clone();\n if relative_path.eq(&\"\/\".to_string()) {\n relative_path = \"index.html\".to_string();\n } else {\n relative_path.shift_char();\n }\n Some(relative_path)\n }\n _ => None\n }\n }\n\n fn with_file(&self, relative_path: Option<String>, res: &mut response::Response) -> IoResult<()> {\n match relative_path {\n Some(path) => res.send_file(&self.root_path.join(Path::new(path))),\n None => Err(IoError::last_error())\n }\n }\n}\n<commit_msg>static_files: Simplify + less allocation.<commit_after>use std::path::BytesContainer;\nuse std::io::{IoError, IoResult, FileNotFound};\n\nuse http::server::request::AbsolutePath;\nuse http::method::{Get, Head};\nuse http::status::{ InternalServerError };\n\nuse request;\nuse response;\nuse middleware::{Action, Halt, Continue, Middleware};\nuse nickel_error::{ NickelError, ErrorWithStatusCode };\n\n\/\/ this should be much simpler after unboxed closures land in Rust.\n\n#[deriving(Clone)]\npub struct StaticFilesHandler {\n root_path: Path\n}\n\nimpl Middleware for StaticFilesHandler {\n fn invoke (&self, req: &mut request::Request, res: &mut response::Response)\n -> Result<Action, NickelError> {\n match req.origin.method {\n Get | Head => {\n match self.with_file(self.extract_path(req), res) {\n Ok(()) => Ok(Halt),\n Err(err) => match err.kind {\n \/\/ We shouldn't assume the StaticFileHandler to be the last middleware in the stack.\n \/\/ Therefore it's important to continue in case of FileNotFound errors.\n FileNotFound => Ok(Continue),\n _ => Err(NickelError::new(format!(\"Unknown Error ({})\", err),\n ErrorWithStatusCode(InternalServerError)))\n }\n }\n },\n _ => Ok(Continue)\n }\n }\n}\n\nimpl StaticFilesHandler {\n pub fn new (root_path: &str) -> StaticFilesHandler {\n StaticFilesHandler {\n root_path: Path::new(root_path)\n }\n }\n\n fn extract_path<'a>(&self, req: &'a mut request::Request) -> Option<&'a str> {\n match req.origin.request_uri {\n AbsolutePath(ref path) => {\n println!(\"{} {}{}\", req.origin.method, self.root_path.display(), path);\n\n match path.as_slice() {\n \"\/\" => Some(\"index.html\"),\n path => Some(path.slice_from(1)),\n }\n }\n _ => None\n }\n }\n\n fn with_file<T: BytesContainer>(&self, relative_path: Option<T>, res: &mut response::Response)\n -> IoResult<()> {\n match relative_path {\n Some(path) => res.send_file(&self.root_path.join(path)),\n None => Err(IoError::last_error())\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::cell::{Cell, RefCell};\nuse std::collections::vec_deque::{Drain, VecDeque};\nuse std::convert::From;\nuse std::rc::Rc;\n\nuse dbus::Path;\nuse dbus::tree::{DataType, MTFn, ObjectPath};\n\nuse uuid::Uuid;\n\nuse engine::Engine;\n\nmacro_attr! {\n #[derive(Clone, Copy)]\n #[allow(non_camel_case_types)]\n pub enum DbusErrorEnum {\n OK,\n ERROR,\n\n ALREADY_EXISTS,\n BUSY,\n IO_ERROR,\n INTERNAL_ERROR,\n NIX_ERROR,\n NOTFOUND,\n }\n}\n\n\/\/\/ Get the u16 value of this ErrorEnum constructor.\nimpl From<DbusErrorEnum> for u16 {\n fn from(e: DbusErrorEnum) -> u16 {\n e as u16\n }\n}\n\nimpl DbusErrorEnum {\n pub fn get_error_string(&self) -> &str {\n match *self {\n DbusErrorEnum::OK => \"Ok\",\n DbusErrorEnum::ERROR => \"A general error happened\",\n DbusErrorEnum::ALREADY_EXISTS => \"Already exists\",\n DbusErrorEnum::BUSY => \"Operation can not be performed at this time\",\n DbusErrorEnum::INTERNAL_ERROR => \"Internal error\",\n DbusErrorEnum::IO_ERROR => \"IO error during operation\",\n DbusErrorEnum::NIX_ERROR => \"System error during operation\",\n DbusErrorEnum::NOTFOUND => \"Not found\",\n }\n }\n}\n\n#[derive(Debug)]\npub enum DeferredAction {\n Add(ObjectPath<MTFn<TData>, TData>),\n Remove(Path<'static>),\n}\n\n\/\/\/ Context for an object path.\n\/\/\/ Contains the object path of the parent as a Path and the UUID of the\n\/\/\/ object itself.\n#[derive(Debug)]\npub struct OPContext {\n pub parent: Path<'static>,\n pub uuid: Uuid,\n}\n\nimpl OPContext {\n pub fn new(parent: Path<'static>, uuid: Uuid) -> OPContext {\n OPContext {\n parent: parent,\n uuid: uuid,\n }\n }\n}\n\n#[derive(Debug, Clone)]\npub struct DbusContext {\n pub next_index: Rc<Cell<u64>>,\n pub engine: Rc<RefCell<Engine>>,\n pub actions: Rc<RefCell<ActionQueue>>,\n}\n\nimpl DbusContext {\n pub fn new(engine: Rc<RefCell<Engine>>) -> DbusContext {\n DbusContext {\n actions: Rc::new(RefCell::new(ActionQueue::default())),\n engine: engine,\n next_index: Rc::new(Cell::new(0)),\n }\n }\n\n \/\/\/ Generates a new id for object paths.\n \/\/\/ It is assumed that, while Stratisd is running, it will never generate\n \/\/\/ more than 2^64 object paths. If it turns out that this is a bad\n \/\/\/ assumption, the solution is to use unbounded integers.\n pub fn get_next_id(&self) -> u64 {\n self.next_index.set(self.next_index.get() + 1);\n self.next_index.get()\n }\n}\n\n#[derive(Default, Debug)]\npub struct TData;\nimpl DataType for TData {\n type ObjectPath = Option<OPContext>;\n type Property = ();\n type Interface = ();\n type Method = ();\n type Signal = ();\n type Tree = DbusContext;\n}\n\n\/\/\/ An action queue.\n\/\/\/ Add and remove actions are pushed onto the queue.\n\/\/\/ The queue can also be drained.\n#[derive(Debug, Default)]\npub struct ActionQueue {\n queue: VecDeque<DeferredAction>,\n}\n\nimpl ActionQueue {\n \/\/\/ Push an Add action onto the back of the queue.\n pub fn push_add(&mut self, object_path: ObjectPath<MTFn<TData>, TData>) {\n self.queue.push_back(DeferredAction::Add(object_path))\n }\n\n \/\/\/ Push a Remove action onto the back of the queue.\n pub fn push_remove(&mut self, object_path: Path<'static>) {\n self.queue.push_back(DeferredAction::Remove(object_path))\n }\n\n \/\/\/ Drain the queue.\n pub fn drain(&mut self) -> Drain<DeferredAction> {\n self.queue.drain(..)\n }\n}\n<commit_msg>Have DbusErrorEnum derive Debug<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::cell::{Cell, RefCell};\nuse std::collections::vec_deque::{Drain, VecDeque};\nuse std::convert::From;\nuse std::rc::Rc;\n\nuse dbus::Path;\nuse dbus::tree::{DataType, MTFn, ObjectPath};\n\nuse uuid::Uuid;\n\nuse engine::Engine;\n\nmacro_attr! {\n #[derive(Clone, Copy, Debug)]\n #[allow(non_camel_case_types)]\n pub enum DbusErrorEnum {\n OK,\n ERROR,\n\n ALREADY_EXISTS,\n BUSY,\n IO_ERROR,\n INTERNAL_ERROR,\n NIX_ERROR,\n NOTFOUND,\n }\n}\n\n\/\/\/ Get the u16 value of this ErrorEnum constructor.\nimpl From<DbusErrorEnum> for u16 {\n fn from(e: DbusErrorEnum) -> u16 {\n e as u16\n }\n}\n\nimpl DbusErrorEnum {\n pub fn get_error_string(&self) -> &str {\n match *self {\n DbusErrorEnum::OK => \"Ok\",\n DbusErrorEnum::ERROR => \"A general error happened\",\n DbusErrorEnum::ALREADY_EXISTS => \"Already exists\",\n DbusErrorEnum::BUSY => \"Operation can not be performed at this time\",\n DbusErrorEnum::INTERNAL_ERROR => \"Internal error\",\n DbusErrorEnum::IO_ERROR => \"IO error during operation\",\n DbusErrorEnum::NIX_ERROR => \"System error during operation\",\n DbusErrorEnum::NOTFOUND => \"Not found\",\n }\n }\n}\n\n#[derive(Debug)]\npub enum DeferredAction {\n Add(ObjectPath<MTFn<TData>, TData>),\n Remove(Path<'static>),\n}\n\n\/\/\/ Context for an object path.\n\/\/\/ Contains the object path of the parent as a Path and the UUID of the\n\/\/\/ object itself.\n#[derive(Debug)]\npub struct OPContext {\n pub parent: Path<'static>,\n pub uuid: Uuid,\n}\n\nimpl OPContext {\n pub fn new(parent: Path<'static>, uuid: Uuid) -> OPContext {\n OPContext {\n parent: parent,\n uuid: uuid,\n }\n }\n}\n\n#[derive(Debug, Clone)]\npub struct DbusContext {\n pub next_index: Rc<Cell<u64>>,\n pub engine: Rc<RefCell<Engine>>,\n pub actions: Rc<RefCell<ActionQueue>>,\n}\n\nimpl DbusContext {\n pub fn new(engine: Rc<RefCell<Engine>>) -> DbusContext {\n DbusContext {\n actions: Rc::new(RefCell::new(ActionQueue::default())),\n engine: engine,\n next_index: Rc::new(Cell::new(0)),\n }\n }\n\n \/\/\/ Generates a new id for object paths.\n \/\/\/ It is assumed that, while Stratisd is running, it will never generate\n \/\/\/ more than 2^64 object paths. If it turns out that this is a bad\n \/\/\/ assumption, the solution is to use unbounded integers.\n pub fn get_next_id(&self) -> u64 {\n self.next_index.set(self.next_index.get() + 1);\n self.next_index.get()\n }\n}\n\n#[derive(Default, Debug)]\npub struct TData;\nimpl DataType for TData {\n type ObjectPath = Option<OPContext>;\n type Property = ();\n type Interface = ();\n type Method = ();\n type Signal = ();\n type Tree = DbusContext;\n}\n\n\/\/\/ An action queue.\n\/\/\/ Add and remove actions are pushed onto the queue.\n\/\/\/ The queue can also be drained.\n#[derive(Debug, Default)]\npub struct ActionQueue {\n queue: VecDeque<DeferredAction>,\n}\n\nimpl ActionQueue {\n \/\/\/ Push an Add action onto the back of the queue.\n pub fn push_add(&mut self, object_path: ObjectPath<MTFn<TData>, TData>) {\n self.queue.push_back(DeferredAction::Add(object_path))\n }\n\n \/\/\/ Push a Remove action onto the back of the queue.\n pub fn push_remove(&mut self, object_path: Path<'static>) {\n self.queue.push_back(DeferredAction::Remove(object_path))\n }\n\n \/\/\/ Drain the queue.\n pub fn drain(&mut self) -> Drain<DeferredAction> {\n self.queue.drain(..)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reduce references to glslangValidator<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test that exercises these cases in bind.<commit_after>\/\/ xfail-stage0\n\nfn fix_help[A,B](@fn (@fn (&A) -> B, &A) -> B f, &A x) -> B {\n ret f(@bind fix_help(f, _), x);\n}\n\nfn fix[A,B](@fn (@fn (&A) -> B, &A) -> B f) -> (@fn(&A) -> B) {\n ret @bind fix_help(f, _);\n}\n\nfn fact_(@fn (&int) -> int f, &int n) -> int {\n \/\/ fun fact 0 = 1\n ret if (n == 0) { 1 } else { n*f(n-1) };\n}\n\nfn main() {\n auto fact = fix(@fact_);\n assert(fact(5) == 120);\n assert(fact(2) == 2);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>change controller url<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unused imports and debug code and switch to logging macros<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Platform-dependent platform abstraction\n\/\/!\n\/\/! The `std::sys` module is the abstracted interface through which\n\/\/! `std` talks to the underlying operating system. It has different\n\/\/! implementations for different operating system families, today\n\/\/! just Unix and Windows, and initial support for Redox.\n\/\/!\n\/\/! The centralization of platform-specific code in this module is\n\/\/! enforced by the \"platform abstraction layer\" tidy script in\n\/\/! `tools\/tidy\/src\/pal.rs`.\n\/\/!\n\/\/! This module is closely related to the platform-independent system\n\/\/! integration code in `std::sys_common`. See that module's\n\/\/! documentation for details.\n\/\/!\n\/\/! In the future it would be desirable for the independent\n\/\/! implementations of this module to be extracted to their own crates\n\/\/! that `std` can link to, thus enabling their implementation\n\/\/! out-of-tree via crate replacement. Though due to the complex\n\/\/! inter-dependencies within `std` that will be a challenging goal to\n\/\/! achieve.\n\n#![allow(missing_debug_implementations)]\n\ncfg_if! {\n if #[cfg(unix)] {\n mod unix;\n pub use self::unix::*;\n } else if #[cfg(windows)] {\n mod windows;\n pub use self::windows::*;\n } else if #[cfg(target_os = \"cloudabi\")] {\n mod cloudabi;\n pub use self::cloudabi::*;\n } else if #[cfg(target_os = \"redox\")] {\n mod redox;\n pub use self::redox::*;\n } else if #[cfg(target_arch = \"wasm32\")] {\n mod wasm;\n pub use self::wasm::*;\n } else {\n compile_error!(\"libstd doesn't compile for this platform yet\");\n }\n}\n\n\/\/ Import essential modules from both platforms when documenting. These are\n\/\/ then later used in the `std::os` module when documenting, for example,\n\/\/ Windows when we're compiling for Linux.\n\n#[cfg(dox)]\ncfg_if! {\n if #[cfg(any(unix, target_os = \"redox\"))] {\n \/\/ On unix we'll document what's already available\n pub use self::ext as unix_ext;\n } else if #[cfg(target_arch = \"wasm32\")] {\n \/\/ On wasm right now the module below doesn't compile (missing things\n \/\/ in `libc` which is empty) so just omit everything with an empty module\n #[unstable(issue = \"0\", feature = \"std_internals\")]\n pub mod unix_ext {}\n } else {\n \/\/ On other platforms like Windows document the bare bones of unix\n use os::linux as platform;\n #[path = \"unix\/ext\/mod.rs\"]\n pub mod unix_ext;\n }\n}\n\n#[cfg(dox)]\ncfg_if! {\n if #[cfg(windows)] {\n \/\/ On windows we'll just be documenting what's already available\n pub use self::ext as windows_ext;\n } else if #[cfg(target_arch = \"wasm32\")] {\n \/\/ On wasm right now the shim below doesn't compile, so just omit it\n #[unstable(issue = \"0\", feature = \"std_internals\")]\n pub mod windows_ext {}\n } else {\n \/\/ On all other platforms (aka linux\/osx\/etc) then pull in a \"minimal\"\n \/\/ amount of windows goop which ends up compiling\n #[macro_use]\n #[path = \"windows\/compat.rs\"]\n mod compat;\n\n #[path = \"windows\/c.rs\"]\n mod c;\n\n #[path = \"windows\/ext\/mod.rs\"]\n pub mod windows_ext;\n }\n}\n<commit_msg>Make the documentation build work on CloudABI.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Platform-dependent platform abstraction\n\/\/!\n\/\/! The `std::sys` module is the abstracted interface through which\n\/\/! `std` talks to the underlying operating system. It has different\n\/\/! implementations for different operating system families, today\n\/\/! just Unix and Windows, and initial support for Redox.\n\/\/!\n\/\/! The centralization of platform-specific code in this module is\n\/\/! enforced by the \"platform abstraction layer\" tidy script in\n\/\/! `tools\/tidy\/src\/pal.rs`.\n\/\/!\n\/\/! This module is closely related to the platform-independent system\n\/\/! integration code in `std::sys_common`. See that module's\n\/\/! documentation for details.\n\/\/!\n\/\/! In the future it would be desirable for the independent\n\/\/! implementations of this module to be extracted to their own crates\n\/\/! that `std` can link to, thus enabling their implementation\n\/\/! out-of-tree via crate replacement. Though due to the complex\n\/\/! inter-dependencies within `std` that will be a challenging goal to\n\/\/! achieve.\n\n#![allow(missing_debug_implementations)]\n\ncfg_if! {\n if #[cfg(unix)] {\n mod unix;\n pub use self::unix::*;\n } else if #[cfg(windows)] {\n mod windows;\n pub use self::windows::*;\n } else if #[cfg(target_os = \"cloudabi\")] {\n mod cloudabi;\n pub use self::cloudabi::*;\n } else if #[cfg(target_os = \"redox\")] {\n mod redox;\n pub use self::redox::*;\n } else if #[cfg(target_arch = \"wasm32\")] {\n mod wasm;\n pub use self::wasm::*;\n } else {\n compile_error!(\"libstd doesn't compile for this platform yet\");\n }\n}\n\n\/\/ Import essential modules from both platforms when documenting. These are\n\/\/ then later used in the `std::os` module when documenting, for example,\n\/\/ Windows when we're compiling for Linux.\n\n#[cfg(dox)]\ncfg_if! {\n if #[cfg(any(unix, target_os = \"redox\"))] {\n \/\/ On unix we'll document what's already available\n pub use self::ext as unix_ext;\n } else if #[cfg(any(target_os = \"cloudabi\", target_arch = \"wasm32\"))] {\n \/\/ On CloudABI and wasm right now the module below doesn't compile\n \/\/ (missing things in `libc` which is empty) so just omit everything\n \/\/ with an empty module\n #[unstable(issue = \"0\", feature = \"std_internals\")]\n pub mod unix_ext {}\n } else {\n \/\/ On other platforms like Windows document the bare bones of unix\n use os::linux as platform;\n #[path = \"unix\/ext\/mod.rs\"]\n pub mod unix_ext;\n }\n}\n\n#[cfg(dox)]\ncfg_if! {\n if #[cfg(windows)] {\n \/\/ On windows we'll just be documenting what's already available\n pub use self::ext as windows_ext;\n } else if #[cfg(any(target_os = \"cloudabi\", target_arch = \"wasm32\"))] {\n \/\/ On CloudABI and wasm right now the shim below doesn't compile, so\n \/\/ just omit it\n #[unstable(issue = \"0\", feature = \"std_internals\")]\n pub mod windows_ext {}\n } else {\n \/\/ On all other platforms (aka linux\/osx\/etc) then pull in a \"minimal\"\n \/\/ amount of windows goop which ends up compiling\n #[macro_use]\n #[path = \"windows\/compat.rs\"]\n mod compat;\n\n #[path = \"windows\/c.rs\"]\n mod c;\n\n #[path = \"windows\/ext\/mod.rs\"]\n pub mod windows_ext;\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add all metadata block data types<commit_after>pub enum BlockData<'a> {\n StreamInfo(StreamInfo<'a>),\n Padding(u32),\n Application(Application<'a>),\n SeekTable(Vec<SeekPoint>),\n VorbisComment(VorbisComment<'a>),\n CueSheet(CueSheet<'a>),\n Picture(Picture<'a>),\n Unknown(&'a [u8]),\n}\n\npub struct StreamInfo<'a> {\n pub min_block_size: u16,\n pub max_block_size: u16,\n pub min_frame_size: u32,\n pub max_frame_size: u32,\n pub sample_rate: u32,\n pub channels: u8,\n pub bits_per_sample: u8,\n pub total_samples: u64,\n pub md5_sum: &'a str,\n}\n\npub struct Application<'a> {\n pub id: &'a str,\n pub data: &'a [u8],\n}\n\npub struct SeekPoint {\n pub sample_number: u64,\n pub stream_offset: u64,\n pub frame_samples: u16,\n}\n\npub struct VorbisComment<'a> {\n pub vendor_string: &'a str,\n pub comments: Vec<&'a str>,\n}\n\npub struct CueSheet<'a> {\n pub media_catalog_number: &'a str,\n pub lead_in: u64,\n pub is_cd: bool,\n pub tracks: Vec<CueSheetTrack<'a>>,\n}\n\npub struct CueSheetTrack<'a> {\n pub offset: u64,\n pub number: u8,\n pub isrc: &'a str,\n pub isnt_audio: bool,\n pub is_pre_emphasis: bool,\n pub indices: Vec<CueSheetTrackIndex>,\n}\n\npub struct CueSheetTrackIndex {\n pub offset: u64,\n pub number: u8,\n}\n\npub struct Picture<'a> {\n pub picture_type: PictureType,\n pub mime_type: &'a str,\n pub description: &'a str,\n pub width: u32,\n pub height: u32,\n pub depth: u32,\n pub colors: u32,\n pub data: &'a [u8],\n}\n\npub enum PictureType {\n Other,\n FileIconStandard,\n FileIcon,\n FrontCover,\n BackCover,\n LeafletPage,\n Media,\n LeadArtist,\n Artist,\n Conductor,\n Band,\n Composer,\n Lyricist,\n RecordingLocation,\n DuringRecording,\n DuringPerformace,\n VideoScreenCapture,\n Fish,\n Illustration,\n BandLogoType,\n PublisherLogoType,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create a stdout logging example<commit_after>\/\/\/ An example of sending data to a Prometheus server with a local webserver\nextern crate iron;\nextern crate metrics;\nextern crate histogram;\n\nuse iron::prelude::*;\nuse iron::status;\nuse metrics::metrics::{Counter, Gauge, Meter, Metric, StdCounter, StdGauge, StdMeter};\nuse metrics::registry::{Registry, StdRegistry};\nuse metrics::reporter::ConsoleReporter;\nuse std::sync::Arc;\nuse histogram::*;\nuse std::thread;\n\nfn main() {\n println!(\"WebServer Starting\");\n extern crate hyper;\n thread::spawn(|| {\n let m = StdMeter::new();\n m.mark(100);\n\n let c = StdCounter::new();\n c.inc();\n\n let g = StdGauge::new();\n g.set(1);\n let mut h = Histogram::configure().max_value(100).precision(1).build().unwrap();\n h.increment_by(1, 1).unwrap();\n\n\n let mut r = StdRegistry::new();\n r.insert(\"meter1\", Metric::Meter(m.clone()));\n r.insert(\"counter1\", Metric::Counter(c.clone()));\n r.insert(\"gauge1\", Metric::Gauge(g.clone()));\n r.insert(\"histogram\", Metric::Histogram(h));\n\n let arc_registry = Arc::new(r);\n let reporter = ConsoleReporter::new(arc_registry, \"test\");\n reporter.start(500);\n loop {\n c.inc()\n }\n });\n Iron::new(|_: &mut Request| Ok(Response::with(status::NotFound)))\n .http(\"0.0.0.0:3000\")\n .unwrap();\n println!(\"WebServer Running\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor(sync): cut down on dup logging a bit<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix pointer deref<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014-2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ FIXME: This needs an audit for correctness and completeness.\n\nuse abi::{FnType, ArgType, LayoutExt, Reg, RegKind, Uniform};\nuse context::CodegenCx;\n\nfn is_homogeneous_aggregate<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &mut ArgType<'tcx>)\n -> Option<Uniform> {\n arg.layout.homogeneous_aggregate(cx).and_then(|unit| {\n \/\/ Ensure we have at most eight uniquely addressable members.\n if arg.layout.size > unit.size.checked_mul(8, cx).unwrap() {\n return None;\n }\n\n let valid_unit = match unit.kind {\n RegKind::Integer => false,\n RegKind::Float => true,\n RegKind::Vector => arg.layout.size.bits() == 128\n };\n\n if valid_unit {\n Some(Uniform {\n unit,\n total: arg.layout.size\n })\n } else {\n None\n }\n })\n}\n\nfn classify_ret_ty<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, ret: &mut ArgType<'tcx>) {\n if !ret.layout.is_aggregate() {\n ret.extend_integer_width_to(64);\n return;\n }\n\n if let Some(uniform) = is_homogeneous_aggregate(cx, ret) {\n ret.cast_to(uniform);\n return;\n }\n let size = ret.layout.size;\n let bits = size.bits();\n if bits <= 256 {\n let unit = if bits <= 8 {\n Reg::i8()\n } else if bits <= 16 {\n Reg::i16()\n } else if bits <= 32 {\n Reg::i32()\n } else {\n Reg::i64()\n };\n\n ret.cast_to(Uniform {\n unit,\n total: size\n });\n return;\n }\n\n \/\/ don't return aggregates in registers\n ret.make_indirect();\n}\n\nfn classify_arg_ty<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &mut ArgType<'tcx>) {\n if !arg.layout.is_aggregate() {\n arg.extend_integer_width_to(64);\n return;\n }\n\n if let Some(uniform) = is_homogeneous_aggregate(cx, arg) {\n arg.cast_to(uniform);\n return;\n }\n\n let total = arg.layout.size;\n if total.bits() > 128 {\n arg.make_indirect(cx);\n return;\n }\n\n arg.cast_to(Uniform {\n unit: Reg::i64(),\n total\n });\n}\n\npub fn compute_abi_info<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, fty: &mut FnType<'tcx>) {\n if !fty.ret.is_ignore() {\n classify_ret_ty(cx, &mut fty.ret);\n }\n\n for arg in &mut fty.args {\n if arg.is_ignore() { continue; }\n classify_arg_ty(cx, arg);\n }\n}\n<commit_msg>Removed uneeded argument to make_indirect.<commit_after>\/\/ Copyright 2014-2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ FIXME: This needs an audit for correctness and completeness.\n\nuse abi::{FnType, ArgType, LayoutExt, Reg, RegKind, Uniform};\nuse context::CodegenCx;\n\nfn is_homogeneous_aggregate<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &mut ArgType<'tcx>)\n -> Option<Uniform> {\n arg.layout.homogeneous_aggregate(cx).and_then(|unit| {\n \/\/ Ensure we have at most eight uniquely addressable members.\n if arg.layout.size > unit.size.checked_mul(8, cx).unwrap() {\n return None;\n }\n\n let valid_unit = match unit.kind {\n RegKind::Integer => false,\n RegKind::Float => true,\n RegKind::Vector => arg.layout.size.bits() == 128\n };\n\n if valid_unit {\n Some(Uniform {\n unit,\n total: arg.layout.size\n })\n } else {\n None\n }\n })\n}\n\nfn classify_ret_ty<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, ret: &mut ArgType<'tcx>) {\n if !ret.layout.is_aggregate() {\n ret.extend_integer_width_to(64);\n return;\n }\n\n if let Some(uniform) = is_homogeneous_aggregate(cx, ret) {\n ret.cast_to(uniform);\n return;\n }\n let size = ret.layout.size;\n let bits = size.bits();\n if bits <= 256 {\n let unit = if bits <= 8 {\n Reg::i8()\n } else if bits <= 16 {\n Reg::i16()\n } else if bits <= 32 {\n Reg::i32()\n } else {\n Reg::i64()\n };\n\n ret.cast_to(Uniform {\n unit,\n total: size\n });\n return;\n }\n\n \/\/ don't return aggregates in registers\n ret.make_indirect();\n}\n\nfn classify_arg_ty<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &mut ArgType<'tcx>) {\n if !arg.layout.is_aggregate() {\n arg.extend_integer_width_to(64);\n return;\n }\n\n if let Some(uniform) = is_homogeneous_aggregate(cx, arg) {\n arg.cast_to(uniform);\n return;\n }\n\n let total = arg.layout.size;\n if total.bits() > 128 {\n arg.make_indirect();\n return;\n }\n\n arg.cast_to(Uniform {\n unit: Reg::i64(),\n total\n });\n}\n\npub fn compute_abi_info<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, fty: &mut FnType<'tcx>) {\n if !fty.ret.is_ignore() {\n classify_ret_ty(cx, &mut fty.ret);\n }\n\n for arg in &mut fty.args {\n if arg.is_ignore() { continue; }\n classify_arg_ty(cx, arg);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>simple echo server.<commit_after>#[macro_use]\nextern crate log;\nextern crate env_logger;\n\nextern crate actix_http;\nextern crate actix_net;\nextern crate futures;\nextern crate http;\nextern crate bytes;\n\nuse actix_http::{h1, Response, Request};\nuse bytes::Bytes;\nuse actix_net::server::Server;\nuse actix_net::service::NewServiceExt;\nuse futures::Future;\nuse http::header::{HeaderValue};\nuse actix_http::HttpMessage;\nuse std::env;\n\nfn main() {\n env::set_var(\"RUST_LOG\", \"echo=info\");\n env_logger::init();\n\n Server::new().bind(\"echo\", \"127.0.0.1:8080\", || {\n h1::H1Service::build()\n .client_timeout(1000)\n .client_disconnect(1000)\n .server_hostname(\"localhost\")\n .finish(|_req: Request| {\n _req.body()\n .limit(512)\n .and_then(|bytes: Bytes| {\n info!(\"request body: {:?}\", bytes);\n let mut res = Response::Ok();\n res.header(\"x-head\", HeaderValue::from_static(\"dummy value!\"));\n Ok(res.body(bytes))\n })\n })\n .map(|_| ())\n }).unwrap().run();\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add stop_when method to Iterator implementations in solution for Day4<commit_after><|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\n\nuse git2::{Index, Repository};\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::{MapErrTrace, trace_error};\nuse libimagstore::hook::error::CustomData;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\nuse libimagutil::debug_result::*;\n\nuse vcs::git::action::StoreAction;\nuse vcs::git::result::Result;\nuse vcs::git::error::{MapErrInto, GitHookErrorKind as GHEK};\n\n\/\/\/ Runtime object for git hook implementations.\n\/\/\/\n\/\/\/ Contains some utility functionality to hold the repository and the configuration for the hooks.\npub struct Runtime {\n repository: Option<Repository>,\n config: Option<Value>,\n}\n\nimpl Runtime {\n\n \/\/\/ Build a `Runtime` object, pass the store path to build the `Repository` instance the\n \/\/\/ `Runtime` has to contain.\n \/\/\/\n \/\/\/ If the building of the `Repository` fails, this function `trace_error()`s the error and\n \/\/\/ returns a `Runtime` object that does _not_ contain a `Repository`.\n pub fn new(storepath: &PathBuf) -> Runtime {\n Runtime {\n repository: Repository::open(storepath).map_err_trace().ok(),\n config: None,\n }\n }\n\n \/\/\/ Set the configuration for the `Runtime`. Always returns `Ok(())`.\n pub fn set_config(&mut self, cfg: &Value) -> Result<()> {\n self.config = Some(cfg.clone());\n Ok(())\n }\n\n \/\/\/ Check whether the `Runtime` has a `Repository`\n pub fn has_repository(&self) -> bool {\n self.repository.is_some()\n }\n\n \/\/\/ Check whether the `Runtime` has a configuration\n pub fn has_config(&self) -> bool {\n self.config.is_some()\n }\n\n \/\/\/ Get the the config value by reference or get an `Err()` which can be returned to the callee\n \/\/\/ of the Hook.\n \/\/\/\n \/\/\/ The `action` Argument is required in case of `Err()` so the error message can be build\n \/\/\/ correctly.\n pub fn config_value_or_err(&self, action: &StoreAction) -> HookResult<&Value> {\n self.config\n .as_ref()\n .ok_or(GHEK::NoConfigError.into_error())\n .map_err_into(GHEK::ConfigError)\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_err(|e| e.with_custom_data(CustomData::default().aborting(false)))\n .map_dbg_err(|_| {\n format!(\"[GIT {} HOOK]: Couldn't get Value object from config\", action.uppercase())\n })\n }\n\n \/\/\/ Get the `Repository` object from the `Runtime` or an `Err()` that can be returned to the\n \/\/\/ callee of the Hook.\n \/\/\/\n \/\/\/ The `action` Argument is required in case of `Err()` so the error message can be build\n \/\/\/ correctly.\n pub fn repository(&self, action: &StoreAction) -> HookResult<&Repository> {\n use vcs::git::error::MapIntoHookError;\n\n debug!(\"[GIT {} HOOK]: Getting repository\", action.uppercase());\n self.repository\n .as_ref()\n .ok_or(GHEK::MkRepo.into_error())\n .map_err_into(GHEK::RepositoryError)\n .map_into_hook_error()\n .map_dbg_err(|_| format!(\"[GIT {} HOOK]: Couldn't fetch Repository\", action.uppercase()))\n .map_dbg(|_| format!(\"[GIT {} HOOK]: Repository object fetched\", action.uppercase()))\n }\n\n \/\/\/ Ensure that the branch that is put in the configuration file is checked out, if any.\n pub fn ensure_cfg_branch_is_checked_out(&self, action: &StoreAction) -> HookResult<()> {\n use vcs::git::config::ensure_branch;\n use vcs::git::config::do_checkout_ensure_branch;\n\n if !do_checkout_ensure_branch(self.config.as_ref()) {\n return Ok(())\n }\n\n debug!(\"[GIT {} HOOK]: Ensuring branch checkout\", action.uppercase());\n let head = try!(self\n .repository(action)\n .and_then(|r| {\n debug!(\"[GIT {} HOOK]: Repository fetched, getting head\", action.uppercase());\n r.head()\n .map_dbg_err_str(\"Couldn't fetch HEAD\")\n .map_dbg_err(|e| format!(\"\\tbecause = {:?}\", e))\n .map_err_into(GHEK::HeadFetchError)\n .map_err(|e| e.into())\n }));\n debug!(\"[GIT {} HOOK]: HEAD fetched\", action.uppercase());\n\n \/\/ TODO: Fail if not on branch? hmmh... I'm not sure\n if !head.is_branch() {\n debug!(\"[GIT {} HOOK]: HEAD is not a branch\", action.uppercase());\n return Err(GHEK::NotOnBranch.into_error().into());\n }\n debug!(\"[GIT {} HOOK]: HEAD is a branch\", action.uppercase());\n\n \/\/ Check out appropriate branch ... or fail\n match ensure_branch(self.config.as_ref()) {\n Ok(Some(s)) => {\n debug!(\"[GIT {} HOOK]: We have to ensure branch: {}\", action.uppercase(), s);\n match head.name().map(|name| {\n debug!(\"[GIT {} HOOK]: {} == {}\", action.uppercase(), name, s);\n name == s\n }) {\n Some(b) => {\n if b {\n debug!(\"[GIT {} HOOK]: Branch already checked out.\", action.uppercase());\n Ok(())\n } else {\n debug!(\"[GIT {} HOOK]: Branch not checked out.\", action.uppercase());\n unimplemented!()\n }\n },\n\n None => Err(GHEK::RepositoryBranchNameFetchingError.into_error())\n .map_err_into(GHEK::RepositoryBranchError)\n .map_err_into(GHEK::RepositoryError),\n }\n },\n Ok(None) => {\n debug!(\"[GIT {} HOOK]: No branch to checkout\", action.uppercase());\n Ok(())\n },\n\n Err(e) => Err(e).map_err_into(GHEK::RepositoryError),\n }\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_dbg_str(\"[GIT CREATE HOOK]: Branch checked out\")\n }\n\n \/\/\/ Check whether the WD is \"dirty\" - whether there is a diff to the repository\n \/\/\/ This function returns false if there is no `Repository` object in the `Runtime`\n pub fn repo_is_dirty(&self, index: &Index) -> bool {\n match self.repository.as_ref() {\n Some(repo) => {\n repo.diff_index_to_workdir(Some(index), None)\n .map_dbg_str(\"Fetched diff: Index <-> WD\")\n .map_dbg_err_str(\"Failed to fetch diff: Index <-> WD\")\n .map(|diff| diff.deltas().count() != 0)\n .unwrap_or(false)\n },\n\n None => {\n debug!(\"No repository: Cannot fetch diff: Index <-> WD\");\n false\n }\n }\n\n }\n\n}\n\n<commit_msg>Remove unused import<commit_after>use std::path::PathBuf;\n\nuse git2::{Index, Repository};\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::MapErrTrace;\nuse libimagstore::hook::error::CustomData;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\nuse libimagutil::debug_result::*;\n\nuse vcs::git::action::StoreAction;\nuse vcs::git::result::Result;\nuse vcs::git::error::{MapErrInto, GitHookErrorKind as GHEK};\n\n\/\/\/ Runtime object for git hook implementations.\n\/\/\/\n\/\/\/ Contains some utility functionality to hold the repository and the configuration for the hooks.\npub struct Runtime {\n repository: Option<Repository>,\n config: Option<Value>,\n}\n\nimpl Runtime {\n\n \/\/\/ Build a `Runtime` object, pass the store path to build the `Repository` instance the\n \/\/\/ `Runtime` has to contain.\n \/\/\/\n \/\/\/ If the building of the `Repository` fails, this function `trace_error()`s the error and\n \/\/\/ returns a `Runtime` object that does _not_ contain a `Repository`.\n pub fn new(storepath: &PathBuf) -> Runtime {\n Runtime {\n repository: Repository::open(storepath).map_err_trace().ok(),\n config: None,\n }\n }\n\n \/\/\/ Set the configuration for the `Runtime`. Always returns `Ok(())`.\n pub fn set_config(&mut self, cfg: &Value) -> Result<()> {\n self.config = Some(cfg.clone());\n Ok(())\n }\n\n \/\/\/ Check whether the `Runtime` has a `Repository`\n pub fn has_repository(&self) -> bool {\n self.repository.is_some()\n }\n\n \/\/\/ Check whether the `Runtime` has a configuration\n pub fn has_config(&self) -> bool {\n self.config.is_some()\n }\n\n \/\/\/ Get the the config value by reference or get an `Err()` which can be returned to the callee\n \/\/\/ of the Hook.\n \/\/\/\n \/\/\/ The `action` Argument is required in case of `Err()` so the error message can be build\n \/\/\/ correctly.\n pub fn config_value_or_err(&self, action: &StoreAction) -> HookResult<&Value> {\n self.config\n .as_ref()\n .ok_or(GHEK::NoConfigError.into_error())\n .map_err_into(GHEK::ConfigError)\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_err(|e| e.with_custom_data(CustomData::default().aborting(false)))\n .map_dbg_err(|_| {\n format!(\"[GIT {} HOOK]: Couldn't get Value object from config\", action.uppercase())\n })\n }\n\n \/\/\/ Get the `Repository` object from the `Runtime` or an `Err()` that can be returned to the\n \/\/\/ callee of the Hook.\n \/\/\/\n \/\/\/ The `action` Argument is required in case of `Err()` so the error message can be build\n \/\/\/ correctly.\n pub fn repository(&self, action: &StoreAction) -> HookResult<&Repository> {\n use vcs::git::error::MapIntoHookError;\n\n debug!(\"[GIT {} HOOK]: Getting repository\", action.uppercase());\n self.repository\n .as_ref()\n .ok_or(GHEK::MkRepo.into_error())\n .map_err_into(GHEK::RepositoryError)\n .map_into_hook_error()\n .map_dbg_err(|_| format!(\"[GIT {} HOOK]: Couldn't fetch Repository\", action.uppercase()))\n .map_dbg(|_| format!(\"[GIT {} HOOK]: Repository object fetched\", action.uppercase()))\n }\n\n \/\/\/ Ensure that the branch that is put in the configuration file is checked out, if any.\n pub fn ensure_cfg_branch_is_checked_out(&self, action: &StoreAction) -> HookResult<()> {\n use vcs::git::config::ensure_branch;\n use vcs::git::config::do_checkout_ensure_branch;\n\n if !do_checkout_ensure_branch(self.config.as_ref()) {\n return Ok(())\n }\n\n debug!(\"[GIT {} HOOK]: Ensuring branch checkout\", action.uppercase());\n let head = try!(self\n .repository(action)\n .and_then(|r| {\n debug!(\"[GIT {} HOOK]: Repository fetched, getting head\", action.uppercase());\n r.head()\n .map_dbg_err_str(\"Couldn't fetch HEAD\")\n .map_dbg_err(|e| format!(\"\\tbecause = {:?}\", e))\n .map_err_into(GHEK::HeadFetchError)\n .map_err(|e| e.into())\n }));\n debug!(\"[GIT {} HOOK]: HEAD fetched\", action.uppercase());\n\n \/\/ TODO: Fail if not on branch? hmmh... I'm not sure\n if !head.is_branch() {\n debug!(\"[GIT {} HOOK]: HEAD is not a branch\", action.uppercase());\n return Err(GHEK::NotOnBranch.into_error().into());\n }\n debug!(\"[GIT {} HOOK]: HEAD is a branch\", action.uppercase());\n\n \/\/ Check out appropriate branch ... or fail\n match ensure_branch(self.config.as_ref()) {\n Ok(Some(s)) => {\n debug!(\"[GIT {} HOOK]: We have to ensure branch: {}\", action.uppercase(), s);\n match head.name().map(|name| {\n debug!(\"[GIT {} HOOK]: {} == {}\", action.uppercase(), name, s);\n name == s\n }) {\n Some(b) => {\n if b {\n debug!(\"[GIT {} HOOK]: Branch already checked out.\", action.uppercase());\n Ok(())\n } else {\n debug!(\"[GIT {} HOOK]: Branch not checked out.\", action.uppercase());\n unimplemented!()\n }\n },\n\n None => Err(GHEK::RepositoryBranchNameFetchingError.into_error())\n .map_err_into(GHEK::RepositoryBranchError)\n .map_err_into(GHEK::RepositoryError),\n }\n },\n Ok(None) => {\n debug!(\"[GIT {} HOOK]: No branch to checkout\", action.uppercase());\n Ok(())\n },\n\n Err(e) => Err(e).map_err_into(GHEK::RepositoryError),\n }\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_dbg_str(\"[GIT CREATE HOOK]: Branch checked out\")\n }\n\n \/\/\/ Check whether the WD is \"dirty\" - whether there is a diff to the repository\n \/\/\/ This function returns false if there is no `Repository` object in the `Runtime`\n pub fn repo_is_dirty(&self, index: &Index) -> bool {\n match self.repository.as_ref() {\n Some(repo) => {\n repo.diff_index_to_workdir(Some(index), None)\n .map_dbg_str(\"Fetched diff: Index <-> WD\")\n .map_dbg_err_str(\"Failed to fetch diff: Index <-> WD\")\n .map(|diff| diff.deltas().count() != 0)\n .unwrap_or(false)\n },\n\n None => {\n debug!(\"No repository: Cannot fetch diff: Index <-> WD\");\n false\n }\n }\n\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/! Create window.\n\n\/\/ External crates.\nuse collections::deque::Deque;\nuse collections::ringbuf::RingBuf;\nuse glfw;\nuse gl;\n\n\/\/ Local crate.\nuse event;\nuse keyboard;\nuse mouse;\nuse game_window::{\n GameWindow,\n};\nuse game_window_settings::GameWindowSettings;\n\n\/\/\/ Contains stuff for game window.\npub struct GameWindowGLFW {\n \/\/\/ The window.\n window: glfw::Window,\n \/\/\/ Receives events from window.\n events: Receiver<(f64, glfw::WindowEvent)>,\n \/\/\/ GLFW context.\n glfw: glfw::Glfw,\n \/\/\/ Game window settings.\n settings: GameWindowSettings,\n event_queue: RingBuf<event::Event>,\n}\n\nimpl GameWindowGLFW {\n fn flush_messages(&mut self) {\n if self.event_queue.len() != 0 {\n return;\n }\n\n self.glfw.poll_events();\n for (_, event) in glfw::flush_messages(&self.events) {\n match event {\n glfw::KeyEvent(glfw::KeyEscape, _, glfw::Press, _)\n if self.settings.exit_on_esc => {\n self.window.set_should_close(true);\n },\n glfw::KeyEvent(key, _, glfw::Press, _) => {\n self.event_queue.push_back(\n event::KeyPressed(glfw_map_key(key)));\n },\n glfw::KeyEvent(key, _, glfw::Release, _) => {\n self.event_queue.push_back(\n event::KeyReleased(glfw_map_key(key)));\n },\n glfw::MouseButtonEvent(button, glfw::Press, _) => {\n self.event_queue.push_back(\n event::MouseButtonPressed(glfw_map_mouse(button)));\n },\n glfw::MouseButtonEvent(button, glfw::Release, _) => {\n self.event_queue.push_back(\n event::MouseButtonReleased(glfw_map_mouse(button)));\n },\n glfw::CursorPosEvent(x, y) => {\n self.event_queue.push_back(\n event::MouseMoved(x, y, None));\n },\n _ => {},\n }\n }\n }\n}\n\nimpl GameWindow for GameWindowGLFW {\n fn new(settings: GameWindowSettings) -> GameWindowGLFW {\n use glfw::Context;\n\n \/\/ Create GLFW window.\n let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();\n let (window, events) = glfw.create_window(\n settings.size[0],\n settings.size[1],\n settings.title.as_slice(), glfw::Windowed\n ).expect(\"Failed to create GLFW window.\");\n window.set_key_polling(true);\n window.set_mouse_button_polling(true);\n window.set_cursor_pos_polling(true);\n \/\/ or polling all event\n \/\/window.set_all_polling(true);\n window.make_current();\n\n \/\/ Load the OpenGL function pointers\n gl::load_with(|s| glfw.get_proc_address(s));\n\n GameWindowGLFW {\n window: window,\n events: events,\n glfw: glfw,\n settings: settings,\n event_queue: RingBuf::<event::Event>::new(),\n }\n }\n\n fn get_settings<'a>(&'a self) -> &'a GameWindowSettings {\n &self.settings\n }\n\n fn should_close(&self) -> bool {\n self.window.should_close()\n }\n\n fn swap_buffers(&self) {\n use glfw::Context;\n\n self.window.swap_buffers();\n }\n\n fn poll_event(&mut self) -> event::Event {\n self.flush_messages();\n\n if self.event_queue.len() != 0 {\n self.event_queue.pop_front().unwrap()\n } else {\n event::NoEvent\n }\n }\n}\n\nfn glfw_map_key(keycode: glfw::Key) -> keyboard::Key {\n match keycode {\n glfw::Key0 => keyboard::D0,\n glfw::Key1 => keyboard::D1,\n glfw::Key2 => keyboard::D2,\n glfw::Key3 => keyboard::D3,\n glfw::Key4 => keyboard::D4,\n glfw::Key5 => keyboard::D5,\n glfw::Key6 => keyboard::D6,\n glfw::Key7 => keyboard::D7,\n glfw::Key8 => keyboard::D8,\n glfw::Key9 => keyboard::D9,\n glfw::KeyA => keyboard::A,\n glfw::KeyB => keyboard::B,\n glfw::KeyC => keyboard::C,\n glfw::KeyD => keyboard::D,\n glfw::KeyE => keyboard::E,\n glfw::KeyF => keyboard::F,\n glfw::KeyG => keyboard::G,\n glfw::KeyH => keyboard::H,\n glfw::KeyI => keyboard::I,\n glfw::KeyJ => keyboard::J,\n glfw::KeyK => keyboard::K,\n glfw::KeyL => keyboard::L,\n glfw::KeyM => keyboard::M,\n glfw::KeyN => keyboard::N,\n glfw::KeyO => keyboard::O,\n glfw::KeyP => keyboard::P,\n glfw::KeyQ => keyboard::Q,\n glfw::KeyR => keyboard::R,\n glfw::KeyS => keyboard::S,\n glfw::KeyT => keyboard::T,\n glfw::KeyU => keyboard::U,\n glfw::KeyV => keyboard::V,\n glfw::KeyW => keyboard::W,\n glfw::KeyX => keyboard::X,\n glfw::KeyY => keyboard::Y,\n glfw::KeyZ => keyboard::Z,\n glfw::KeyApostrophe => keyboard::Unknown,\n glfw::KeyBackslash => keyboard::Backslash,\n glfw::KeyBackspace => keyboard::Backspace,\n glfw::KeyCapsLock => keyboard::CapsLock,\n glfw::KeyDelete => keyboard::Delete,\n glfw::KeyComma => keyboard::Comma,\n glfw::KeyDown => keyboard::Down,\n glfw::KeyEnd => keyboard::End,\n glfw::KeyEnter => keyboard::Return,\n glfw::KeyEqual => keyboard::Equals,\n glfw::KeyEscape => keyboard::Escape,\n glfw::KeyF1 => keyboard::F1,\n glfw::KeyF2 => keyboard::F2,\n glfw::KeyF3 => keyboard::F3,\n glfw::KeyF4 => keyboard::F4,\n glfw::KeyF5 => keyboard::F5,\n glfw::KeyF6 => keyboard::F6,\n glfw::KeyF7 => keyboard::F7,\n glfw::KeyF8 => keyboard::F8,\n glfw::KeyF9 => keyboard::F9,\n glfw::KeyF10 => keyboard::F10,\n glfw::KeyF11 => keyboard::F11,\n glfw::KeyF12 => keyboard::F12,\n glfw::KeyF13 => keyboard::F13,\n glfw::KeyF14 => keyboard::F14,\n glfw::KeyF15 => keyboard::F15,\n glfw::KeyF16 => keyboard::F16,\n glfw::KeyF17 => keyboard::F17,\n glfw::KeyF18 => keyboard::F18,\n glfw::KeyF19 => keyboard::F19,\n glfw::KeyF20 => keyboard::F20,\n glfw::KeyF21 => keyboard::F21,\n glfw::KeyF22 => keyboard::F22,\n glfw::KeyF23 => keyboard::F23,\n glfw::KeyF24 => keyboard::F24,\n \/\/ Possibly next code.\n glfw::KeyF25 => keyboard::Unknown,\n glfw::KeyKp0 => keyboard::NumPad0,\n glfw::KeyKp1 => keyboard::NumPad1,\n glfw::KeyKp2 => keyboard::NumPad2,\n glfw::KeyKp3 => keyboard::NumPad3,\n glfw::KeyKp4 => keyboard::NumPad4,\n glfw::KeyKp5 => keyboard::NumPad5,\n glfw::KeyKp6 => keyboard::NumPad6,\n glfw::KeyKp7 => keyboard::NumPad7,\n glfw::KeyKp8 => keyboard::NumPad8,\n glfw::KeyKp9 => keyboard::NumPad9,\n glfw::KeyKpDecimal => keyboard::NumPadDecimal,\n glfw::KeyKpDivide => keyboard::NumPadDivide,\n glfw::KeyKpMultiply => keyboard::NumPadMultiply,\n glfw::KeyKpSubtract => keyboard::NumPadMinus,\n glfw::KeyKpAdd => keyboard::NumPadPlus,\n glfw::KeyKpEnter => keyboard::NumPadEnter,\n glfw::KeyKpEqual => keyboard::NumPadEquals,\n glfw::KeyLeftShift => keyboard::LShift,\n glfw::KeyLeftControl => keyboard::LCtrl,\n glfw::KeyLeftAlt => keyboard::LAlt,\n glfw::KeyLeftSuper => keyboard::LGui,\n glfw::KeyRightShift => keyboard::RShift,\n glfw::KeyRightControl => keyboard::RCtrl,\n glfw::KeyRightAlt => keyboard::RAlt,\n glfw::KeyRightSuper => keyboard::RGui,\n \/\/ Map to backslash?\n glfw::KeyGraveAccent => keyboard::Unknown,\n glfw::KeyHome => keyboard::Home,\n glfw::KeyInsert => keyboard::Insert,\n glfw::KeyLeft => keyboard::Left,\n glfw::KeyLeftBracket => keyboard::LeftBracket,\n glfw::KeyMenu => keyboard::Menu,\n glfw::KeyMinus => keyboard::Minus,\n glfw::KeyNumLock => keyboard::NumLockClear,\n glfw::KeyPageDown => keyboard::PageDown,\n glfw::KeyPageUp => keyboard::PageUp,\n glfw::KeyPause => keyboard::Pause,\n glfw::KeyPeriod => keyboard::Period,\n glfw::KeyPrintScreen => keyboard::PrintScreen,\n glfw::KeyRight => keyboard::Right,\n glfw::KeyRightBracket => keyboard::RightBracket,\n glfw::KeyScrollLock => keyboard::ScrollLock,\n glfw::KeySemicolon => keyboard::Semicolon,\n glfw::KeySlash => keyboard::Slash,\n glfw::KeySpace => keyboard::Space,\n glfw::KeyTab => keyboard::Tab,\n glfw::KeyUp => keyboard::Up,\n glfw::KeyWorld1 => keyboard::Unknown,\n glfw::KeyWorld2 => keyboard::Unknown,\n \/\/ _ => keyboard::Unknown,\n }\n}\n\nfn glfw_map_mouse(mouse_button: glfw::MouseButton) -> mouse::Button {\n match mouse_button {\n glfw::MouseButton1 => mouse::Left,\n glfw::MouseButton2 => mouse::Right,\n glfw::MouseButton3 => mouse::Middle,\n glfw::MouseButton4 => mouse::X1,\n glfw::MouseButton5 => mouse::X2,\n glfw::MouseButton6 => mouse::Button6,\n glfw::MouseButton7 => mouse::Button7,\n glfw::MouseButton8 => mouse::Button8,\n }\n}\n\n<commit_msg>Update game_window_glfw.rs<commit_after>\/\/! Create window.\n\n\/\/ External crates.\nuse collections::Deque;\nuse collections::ringbuf::RingBuf;\nuse glfw;\nuse gl;\n\n\/\/ Local crate.\nuse event;\nuse keyboard;\nuse mouse;\nuse game_window::{\n GameWindow,\n};\nuse game_window_settings::GameWindowSettings;\n\n\/\/\/ Contains stuff for game window.\npub struct GameWindowGLFW {\n \/\/\/ The window.\n window: glfw::Window,\n \/\/\/ Receives events from window.\n events: Receiver<(f64, glfw::WindowEvent)>,\n \/\/\/ GLFW context.\n glfw: glfw::Glfw,\n \/\/\/ Game window settings.\n settings: GameWindowSettings,\n event_queue: RingBuf<event::Event>,\n}\n\nimpl GameWindowGLFW {\n fn flush_messages(&mut self) {\n if self.event_queue.len() != 0 {\n return;\n }\n\n self.glfw.poll_events();\n for (_, event) in glfw::flush_messages(&self.events) {\n match event {\n glfw::KeyEvent(glfw::KeyEscape, _, glfw::Press, _)\n if self.settings.exit_on_esc => {\n self.window.set_should_close(true);\n },\n glfw::KeyEvent(key, _, glfw::Press, _) => {\n self.event_queue.push_back(\n event::KeyPressed(glfw_map_key(key)));\n },\n glfw::KeyEvent(key, _, glfw::Release, _) => {\n self.event_queue.push_back(\n event::KeyReleased(glfw_map_key(key)));\n },\n glfw::MouseButtonEvent(button, glfw::Press, _) => {\n self.event_queue.push_back(\n event::MouseButtonPressed(glfw_map_mouse(button)));\n },\n glfw::MouseButtonEvent(button, glfw::Release, _) => {\n self.event_queue.push_back(\n event::MouseButtonReleased(glfw_map_mouse(button)));\n },\n glfw::CursorPosEvent(x, y) => {\n self.event_queue.push_back(\n event::MouseMoved(x, y, None));\n },\n _ => {},\n }\n }\n }\n}\n\nimpl GameWindow for GameWindowGLFW {\n fn new(settings: GameWindowSettings) -> GameWindowGLFW {\n use glfw::Context;\n\n \/\/ Create GLFW window.\n let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();\n let (window, events) = glfw.create_window(\n settings.size[0],\n settings.size[1],\n settings.title.as_slice(), glfw::Windowed\n ).expect(\"Failed to create GLFW window.\");\n window.set_key_polling(true);\n window.set_mouse_button_polling(true);\n window.set_cursor_pos_polling(true);\n \/\/ or polling all event\n \/\/window.set_all_polling(true);\n window.make_current();\n\n \/\/ Load the OpenGL function pointers\n gl::load_with(|s| glfw.get_proc_address(s));\n\n GameWindowGLFW {\n window: window,\n events: events,\n glfw: glfw,\n settings: settings,\n event_queue: RingBuf::<event::Event>::new(),\n }\n }\n\n fn get_settings<'a>(&'a self) -> &'a GameWindowSettings {\n &self.settings\n }\n\n fn should_close(&self) -> bool {\n self.window.should_close()\n }\n\n fn swap_buffers(&self) {\n use glfw::Context;\n\n self.window.swap_buffers();\n }\n\n fn poll_event(&mut self) -> event::Event {\n self.flush_messages();\n\n if self.event_queue.len() != 0 {\n self.event_queue.pop_front().unwrap()\n } else {\n event::NoEvent\n }\n }\n}\n\nfn glfw_map_key(keycode: glfw::Key) -> keyboard::Key {\n match keycode {\n glfw::Key0 => keyboard::D0,\n glfw::Key1 => keyboard::D1,\n glfw::Key2 => keyboard::D2,\n glfw::Key3 => keyboard::D3,\n glfw::Key4 => keyboard::D4,\n glfw::Key5 => keyboard::D5,\n glfw::Key6 => keyboard::D6,\n glfw::Key7 => keyboard::D7,\n glfw::Key8 => keyboard::D8,\n glfw::Key9 => keyboard::D9,\n glfw::KeyA => keyboard::A,\n glfw::KeyB => keyboard::B,\n glfw::KeyC => keyboard::C,\n glfw::KeyD => keyboard::D,\n glfw::KeyE => keyboard::E,\n glfw::KeyF => keyboard::F,\n glfw::KeyG => keyboard::G,\n glfw::KeyH => keyboard::H,\n glfw::KeyI => keyboard::I,\n glfw::KeyJ => keyboard::J,\n glfw::KeyK => keyboard::K,\n glfw::KeyL => keyboard::L,\n glfw::KeyM => keyboard::M,\n glfw::KeyN => keyboard::N,\n glfw::KeyO => keyboard::O,\n glfw::KeyP => keyboard::P,\n glfw::KeyQ => keyboard::Q,\n glfw::KeyR => keyboard::R,\n glfw::KeyS => keyboard::S,\n glfw::KeyT => keyboard::T,\n glfw::KeyU => keyboard::U,\n glfw::KeyV => keyboard::V,\n glfw::KeyW => keyboard::W,\n glfw::KeyX => keyboard::X,\n glfw::KeyY => keyboard::Y,\n glfw::KeyZ => keyboard::Z,\n glfw::KeyApostrophe => keyboard::Unknown,\n glfw::KeyBackslash => keyboard::Backslash,\n glfw::KeyBackspace => keyboard::Backspace,\n glfw::KeyCapsLock => keyboard::CapsLock,\n glfw::KeyDelete => keyboard::Delete,\n glfw::KeyComma => keyboard::Comma,\n glfw::KeyDown => keyboard::Down,\n glfw::KeyEnd => keyboard::End,\n glfw::KeyEnter => keyboard::Return,\n glfw::KeyEqual => keyboard::Equals,\n glfw::KeyEscape => keyboard::Escape,\n glfw::KeyF1 => keyboard::F1,\n glfw::KeyF2 => keyboard::F2,\n glfw::KeyF3 => keyboard::F3,\n glfw::KeyF4 => keyboard::F4,\n glfw::KeyF5 => keyboard::F5,\n glfw::KeyF6 => keyboard::F6,\n glfw::KeyF7 => keyboard::F7,\n glfw::KeyF8 => keyboard::F8,\n glfw::KeyF9 => keyboard::F9,\n glfw::KeyF10 => keyboard::F10,\n glfw::KeyF11 => keyboard::F11,\n glfw::KeyF12 => keyboard::F12,\n glfw::KeyF13 => keyboard::F13,\n glfw::KeyF14 => keyboard::F14,\n glfw::KeyF15 => keyboard::F15,\n glfw::KeyF16 => keyboard::F16,\n glfw::KeyF17 => keyboard::F17,\n glfw::KeyF18 => keyboard::F18,\n glfw::KeyF19 => keyboard::F19,\n glfw::KeyF20 => keyboard::F20,\n glfw::KeyF21 => keyboard::F21,\n glfw::KeyF22 => keyboard::F22,\n glfw::KeyF23 => keyboard::F23,\n glfw::KeyF24 => keyboard::F24,\n \/\/ Possibly next code.\n glfw::KeyF25 => keyboard::Unknown,\n glfw::KeyKp0 => keyboard::NumPad0,\n glfw::KeyKp1 => keyboard::NumPad1,\n glfw::KeyKp2 => keyboard::NumPad2,\n glfw::KeyKp3 => keyboard::NumPad3,\n glfw::KeyKp4 => keyboard::NumPad4,\n glfw::KeyKp5 => keyboard::NumPad5,\n glfw::KeyKp6 => keyboard::NumPad6,\n glfw::KeyKp7 => keyboard::NumPad7,\n glfw::KeyKp8 => keyboard::NumPad8,\n glfw::KeyKp9 => keyboard::NumPad9,\n glfw::KeyKpDecimal => keyboard::NumPadDecimal,\n glfw::KeyKpDivide => keyboard::NumPadDivide,\n glfw::KeyKpMultiply => keyboard::NumPadMultiply,\n glfw::KeyKpSubtract => keyboard::NumPadMinus,\n glfw::KeyKpAdd => keyboard::NumPadPlus,\n glfw::KeyKpEnter => keyboard::NumPadEnter,\n glfw::KeyKpEqual => keyboard::NumPadEquals,\n glfw::KeyLeftShift => keyboard::LShift,\n glfw::KeyLeftControl => keyboard::LCtrl,\n glfw::KeyLeftAlt => keyboard::LAlt,\n glfw::KeyLeftSuper => keyboard::LGui,\n glfw::KeyRightShift => keyboard::RShift,\n glfw::KeyRightControl => keyboard::RCtrl,\n glfw::KeyRightAlt => keyboard::RAlt,\n glfw::KeyRightSuper => keyboard::RGui,\n \/\/ Map to backslash?\n glfw::KeyGraveAccent => keyboard::Unknown,\n glfw::KeyHome => keyboard::Home,\n glfw::KeyInsert => keyboard::Insert,\n glfw::KeyLeft => keyboard::Left,\n glfw::KeyLeftBracket => keyboard::LeftBracket,\n glfw::KeyMenu => keyboard::Menu,\n glfw::KeyMinus => keyboard::Minus,\n glfw::KeyNumLock => keyboard::NumLockClear,\n glfw::KeyPageDown => keyboard::PageDown,\n glfw::KeyPageUp => keyboard::PageUp,\n glfw::KeyPause => keyboard::Pause,\n glfw::KeyPeriod => keyboard::Period,\n glfw::KeyPrintScreen => keyboard::PrintScreen,\n glfw::KeyRight => keyboard::Right,\n glfw::KeyRightBracket => keyboard::RightBracket,\n glfw::KeyScrollLock => keyboard::ScrollLock,\n glfw::KeySemicolon => keyboard::Semicolon,\n glfw::KeySlash => keyboard::Slash,\n glfw::KeySpace => keyboard::Space,\n glfw::KeyTab => keyboard::Tab,\n glfw::KeyUp => keyboard::Up,\n glfw::KeyWorld1 => keyboard::Unknown,\n glfw::KeyWorld2 => keyboard::Unknown,\n \/\/ _ => keyboard::Unknown,\n }\n}\n\nfn glfw_map_mouse(mouse_button: glfw::MouseButton) -> mouse::Button {\n match mouse_button {\n glfw::MouseButton1 => mouse::Left,\n glfw::MouseButton2 => mouse::Right,\n glfw::MouseButton3 => mouse::Middle,\n glfw::MouseButton4 => mouse::X1,\n glfw::MouseButton5 => mouse::X2,\n glfw::MouseButton6 => mouse::Button6,\n glfw::MouseButton7 => mouse::Button7,\n glfw::MouseButton8 => mouse::Button8,\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary newlines in generated source<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rust fmt<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Wrong use order<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Structs and such<commit_after>use std::io;\n\nstruct MeanFeatures {\n wordlen: i32,\t\/\/ The length of each of these\n sentlen: i32,\t\/\/ characteristics will be averaged\n paralen: i32,\t\/\/ and compared with other source.\n}\n\nstruct FreqFeatures {\n comma: i32,\n semicolon: i32,\n quote: i32,\n bangs: i32,\n dashes: i32,\n ands: i32,\n buts: i32,\n however: i32,\n condition: i32,\t\t\/\/ Did not want to create confusion with if.\n thats: i32,\n more: i32,\n musts: i32,\n mights: i32,\n thises: i32,\n very: i32,\n}\n\nfn main() {\n unimplemented!();\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Test missing field in remote struct<commit_after>#[macro_use]\nextern crate serde_derive;\n\nmod remote {\n pub struct S {\n pub a: u8,\n pub b: u8,\n }\n}\n\n#[derive(Serialize, Deserialize)] \/\/~ ERROR: missing field `b` in initializer of `remote::S`\n#[serde(remote = \"remote::S\")]\nstruct S {\n a: u8, \/\/~^^^ ERROR: missing field `b` in initializer of `remote::S`\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor libimagtodo to fit new store iterator interface<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>refactor: moved some functions to util mod<commit_after>use std::env;\nuse std::path::{Path, PathBuf};\nuse std::fs::{self, File};\nuse std::io::Read;\n\nuse toml::{self, Table};\n\nuse error::{CliErrorKind, CliResult};\n\npub fn toml_from_file<P: AsRef<Path>>(p: P) -> CliResult<Box<Table>> {\n debugln!(\"executing; from_file; file={:?}\", p.as_ref());\n let mut f = try!(File::open(p.as_ref()));\n\n let mut s = String::new();\n try!(f.read_to_string(&mut s));\n\n let mut parser = toml::Parser::new(&s);\n if let Some(toml) = parser.parse() {\n return Ok(Box::new(toml));\n }\n\n \/\/ On err\n let mut error_str = format!(\"could not parse input as TOML\\n\");\n for error in parser.errors.iter() {\n let (loline, locol) = parser.to_linecol(error.lo);\n let (hiline, hicol) = parser.to_linecol(error.hi);\n error_str.push_str(&format!(\"{:?}:{}:{}{} {}\\n\",\n f,\n loline + 1,\n locol + 1,\n if loline != hiline || locol != hicol {\n format!(\"-{}:{}\", hiline + 1, hicol + 1)\n } else {\n \"\".to_owned()\n },\n error.desc));\n }\n Err(From::from(CliErrorKind::Generic(error_str)))\n}\n\npub fn find_manifest_file(file: &str) -> CliResult<PathBuf> {\n let mut pwd = try!(env::current_dir());\n\n loop {\n let manifest = pwd.join(file);\n if let Ok(metadata) = fs::metadata(&manifest) {\n if metadata.is_file() {\n return Ok(manifest);\n }\n }\n\n let pwd2 = pwd.clone();\n let parent = pwd2.parent();\n if let None = parent {\n break;\n }\n pwd = parent.unwrap().to_path_buf();\n }\n\n Err(From::from(CliErrorKind::Generic(format!(\"Could not find `{}` in `{}` or any \\\n parent directory, or it isn't a valid \\\n lock-file\",\n file,\n pwd.display()))))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make CollectedMetrics private<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Generate EOI position in `gen_position_map`.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add new code generator integration tests<commit_after>#[path=\"..\/src\/grammar.rs\"]\nmod grammar;\n#[path=\"..\/src\/parser.rs\"]\nmod parser;\n#[path=\"..\/src\/codegen.rs\"]\nmod codegen;\n#[path=\"..\/src\/lexicon.rs\"]\nmod lexicon;\n#[path=\"..\/src\/tokenizer.rs\"]\nmod tokenizer;\n#[path=\"..\/src\/transformer.rs\"]\nmod transformer;\n\nfn output_program(input_program: &str) -> Result<String, Vec<String>> {\n let program = parser::parse(input_program.into());\n let ast = transformer::traverse(program);\n codegen::generate_code(ast)\n}\n\n#[test]\nfn convert_const_to_var_in_global_scope() {\n assert_eq!(output_program(\"const pi = 3.14\"),\n Ok(\"var pi = 3.14;\".into()));\n}\n\n#[test]\nfn convert_let_to_var_in_global_scope() {\n assert_eq!(output_program(\"let pi = 3.14\"),\n Ok(\"var pi = 3.14;\".into()));\n}\n\n#[test]\nfn dont_touch_var_in_global_scope() {\n assert_eq!(output_program(\"var pi = 3.14\"),\n Ok(\"var pi = 3.14;\".into()));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make UniqueContactPathGenerator pub<commit_after><|endoftext|>"} {"text":"<commit_before>use runtime::Runtime;\nuse storage::backend::{StorageBackendError, StorageBackend};\n\nuse module::Module;\nuse module::ModuleError;\nuse module::CommandResult;\nuse module::CommandEnv;\n\nuse module::bm::header::build_header;\nuse module::bm::header::get_tags_from_header;\nuse storage::json::parser::JsonHeaderParser;\nuse storage::parser::{Parser, FileHeaderParser};\nuse storage::file::File;\nuse ui::file::{FilePrinter, TablePrinter};\nuse std::vec::IntoIter;\n\nuse clap::ArgMatches;\nuse regex::Regex;\n\npub fn add_command(module: &Module, env: CommandEnv) -> CommandResult {\n use url::Url;\n\n let url = env.matches.value_of(\"url\").unwrap();\n\n if let Err(e) = Url::parse(url) {\n info!(\"Not an URL: '{}'\", url);\n info!(\" this will turn into an hard error before 0.1.0\");\n debug!(\"URL parsing error: {:?}\", e);\n }\n\n let tags = get_tags(env.rt, env.matches);\n info!(\"Adding url '{}' with tags '{:?}'\", url, tags);\n\n let header = build_header(&String::from(url), &tags);\n let file = File::new_with_header(module, header);\n let parser = Parser::new(JsonHeaderParser::new(None));\n let putres = env.bk.put_file(file, &parser);\n\n putres.map_err(|sberr| {\n let mut err = ModuleError::new(\"Storage Backend Error\");\n err.caused_by = Some(Box::new(sberr));\n err\n })\n}\n\npub fn list_command(module: &Module, env: CommandEnv) -> CommandResult {\n let printer = TablePrinter::new(env.rt.is_verbose(), env.rt.is_debugging());\n let files = get_filtered_files_from_backend(module, &env);\n\n debug!(\"Printing files now\");\n files.map(|f| printer.print_files(f));\n\n Ok(())\n}\n\npub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {\n let checked : bool = run_removal_checking(&env);\n debug!(\"Checked mode: {}\", checked);\n if let Some(id) = get_id(env.rt, env.matches) {\n debug!(\"Remove by id: {}\", id);\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n let file = env.bk.get_file_by_id(module, &id.into(), &parser).unwrap();\n debug!(\"Remove file : {:?}\", file);\n\n if let Err(e) = env.bk.remove_file(module, file, checked) {\n debug!(\"Remove failed\");\n let mut err = ModuleError::new(\"Removing file failed\");\n err.caused_by = Some(Box::new(e));\n Err(err)\n } else {\n debug!(\"Remove worked\");\n Ok(())\n }\n } else {\n debug!(\"Remove more than one file\");\n\n get_filtered_files_from_backend(module, &env).and_then(|files| {\n let nfiles = files.len();\n info!(\"Removing {} Files\", nfiles);\n\n let errs = files.map(|file| {\n debug!(\"Remove file: {:?}\", file);\n env.bk.remove_file(module, file, checked)\n })\n .filter(|e| e.is_err())\n .map(|e| {\n let err = e.err().unwrap();\n warn!(\"Error occured in Filesystem operation: {}\", err);\n err\n })\n .collect::<Vec<StorageBackendError>>();\n\n let nerrs = errs.len();\n\n if nerrs != 0 {\n warn!(\"{} Errors occured while removing {} files\", nerrs, nfiles);\n let moderr = ModuleError::new(\"File removal failed\");\n\n \/\/ TODO : Collect StorageBackendErrors\n\n Err(moderr)\n } else {\n Ok(())\n }\n })\n }\n}\n\n\/*\n *\n * Private helpers\n *\n *\/\n\nfn get_filtered_files_from_backend<'a>(module: &'a Module,\n env: &CommandEnv)\n -> Result<IntoIter<File<'a>>, ModuleError>\n{\n let parser = Parser::new(JsonHeaderParser::new(None));\n let tags = get_tags(env.rt, env.matches);\n debug!(\"Tags: {:?}\", tags);\n env.bk.iter_files(module, &parser)\n .map(|files| {\n let f = files.filter(|file| {\n debug!(\"Backend returns file: {:?}\", file);\n if tags.len() != 0 {\n debug!(\"Checking tags of: {:?}\", file.id());\n get_tags_from_header(&file.header()).iter()\n .any(|t| tags.contains(t))\n } else {\n true\n }\n }).filter(|file| {\n debug!(\"Checking matches of: {:?}\", file.id());\n get_matcher(env.rt, env.matches)\n .and_then(|r| Some(file.matches_with(&r)))\n .unwrap_or(true)\n }).collect::<Vec<File>>();\n f.into_iter()\n }).map_err(|e| {\n debug!(\"Error from Backend: {:?}\", e);\n let mut merr = ModuleError::new(\"Could not filter files\");\n merr.caused_by = Some(Box::new(e));\n merr\n })\n}\n\nfn get_tags<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Vec<String> {\n debug!(\"Fetching tags from commandline\");\n sub.value_of(\"tags\").and_then(|tags|\n Some(tags.split(\",\")\n .into_iter()\n .map(|s| s.to_string())\n .filter(|e|\n if e.contains(\" \") {\n warn!(\"Tag contains spaces: '{}'\", e);\n false\n } else {\n true\n }).collect()\n )\n ).or(Some(vec![])).unwrap()\n\n}\n\nfn get_matcher<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<Regex> {\n debug!(\"Fetching matcher from commandline\");\n if let Some(s) = sub.value_of(\"match\") {\n if let Ok(r) = Regex::new(s) {\n return Some(r)\n } else {\n error!(\"Regex error, continuing without regex\");\n }\n }\n None\n\n}\n\nfn get_id<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<String> {\n debug!(\"Fetching id from commandline\");\n sub.value_of(\"id\").and_then(|s| Some(String::from(s)))\n}\n\n\/*\n * Checks whether the commandline call was set to run the removal \"checked\",\n * so if another entry from the store refers to this ID, do not remove the file.\n *\/\nfn run_removal_checking(env: &CommandEnv) -> bool {\n env.matches.is_present(\"check\")\n}\n<commit_msg>Ensure we dont crash if there are no files found<commit_after>use runtime::Runtime;\nuse storage::backend::{StorageBackendError, StorageBackend};\n\nuse module::Module;\nuse module::ModuleError;\nuse module::CommandResult;\nuse module::CommandEnv;\n\nuse module::bm::header::build_header;\nuse module::bm::header::get_tags_from_header;\nuse storage::json::parser::JsonHeaderParser;\nuse storage::parser::{Parser, FileHeaderParser};\nuse storage::file::File;\nuse ui::file::{FilePrinter, TablePrinter};\nuse std::vec::IntoIter;\n\nuse clap::ArgMatches;\nuse regex::Regex;\n\npub fn add_command(module: &Module, env: CommandEnv) -> CommandResult {\n use url::Url;\n\n let url = env.matches.value_of(\"url\").unwrap();\n\n if let Err(e) = Url::parse(url) {\n info!(\"Not an URL: '{}'\", url);\n info!(\" this will turn into an hard error before 0.1.0\");\n debug!(\"URL parsing error: {:?}\", e);\n }\n\n let tags = get_tags(env.rt, env.matches);\n info!(\"Adding url '{}' with tags '{:?}'\", url, tags);\n\n let header = build_header(&String::from(url), &tags);\n let file = File::new_with_header(module, header);\n let parser = Parser::new(JsonHeaderParser::new(None));\n let putres = env.bk.put_file(file, &parser);\n\n putres.map_err(|sberr| {\n let mut err = ModuleError::new(\"Storage Backend Error\");\n err.caused_by = Some(Box::new(sberr));\n err\n })\n}\n\npub fn list_command(module: &Module, env: CommandEnv) -> CommandResult {\n let printer = TablePrinter::new(env.rt.is_verbose(), env.rt.is_debugging());\n let files = get_filtered_files_from_backend(module, &env);\n\n debug!(\"Printing files now\");\n files.map(|f| printer.print_files(f));\n\n Ok(())\n}\n\npub fn remove_command(module: &Module, env: CommandEnv) -> CommandResult {\n let checked : bool = run_removal_checking(&env);\n debug!(\"Checked mode: {}\", checked);\n if let Some(id) = get_id(env.rt, env.matches) {\n debug!(\"Remove by id: {}\", id);\n\n let parser = Parser::new(JsonHeaderParser::new(None));\n env.bk.get_file_by_id(module, &id.into(), &parser).map(|file| {\n debug!(\"Remove file : {:?}\", file);\n\n if let Err(e) = env.bk.remove_file(module, file, checked) {\n debug!(\"Remove failed\");\n let mut err = ModuleError::new(\"Removing file failed\");\n err.caused_by = Some(Box::new(e));\n Err(err)\n } else {\n info!(\"Remove worked\");\n Ok(())\n }\n }).unwrap_or({\n info!(\"No files found\");\n Ok(())\n })\n } else {\n debug!(\"Remove more than one file\");\n\n get_filtered_files_from_backend(module, &env).and_then(|files| {\n let nfiles = files.len();\n info!(\"Removing {} Files\", nfiles);\n\n let errs = files.map(|file| {\n debug!(\"Remove file: {:?}\", file);\n env.bk.remove_file(module, file, checked)\n })\n .filter(|e| e.is_err())\n .map(|e| {\n let err = e.err().unwrap();\n warn!(\"Error occured in Filesystem operation: {}\", err);\n err\n })\n .collect::<Vec<StorageBackendError>>();\n\n let nerrs = errs.len();\n\n if nerrs != 0 {\n warn!(\"{} Errors occured while removing {} files\", nerrs, nfiles);\n let moderr = ModuleError::new(\"File removal failed\");\n\n \/\/ TODO : Collect StorageBackendErrors\n\n Err(moderr)\n } else {\n Ok(())\n }\n })\n }\n}\n\n\/*\n *\n * Private helpers\n *\n *\/\n\nfn get_filtered_files_from_backend<'a>(module: &'a Module,\n env: &CommandEnv)\n -> Result<IntoIter<File<'a>>, ModuleError>\n{\n let parser = Parser::new(JsonHeaderParser::new(None));\n let tags = get_tags(env.rt, env.matches);\n debug!(\"Tags: {:?}\", tags);\n env.bk.iter_files(module, &parser)\n .map(|files| {\n let f = files.filter(|file| {\n debug!(\"Backend returns file: {:?}\", file);\n if tags.len() != 0 {\n debug!(\"Checking tags of: {:?}\", file.id());\n get_tags_from_header(&file.header()).iter()\n .any(|t| tags.contains(t))\n } else {\n true\n }\n }).filter(|file| {\n debug!(\"Checking matches of: {:?}\", file.id());\n get_matcher(env.rt, env.matches)\n .and_then(|r| Some(file.matches_with(&r)))\n .unwrap_or(true)\n }).collect::<Vec<File>>();\n f.into_iter()\n }).map_err(|e| {\n debug!(\"Error from Backend: {:?}\", e);\n let mut merr = ModuleError::new(\"Could not filter files\");\n merr.caused_by = Some(Box::new(e));\n merr\n })\n}\n\nfn get_tags<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Vec<String> {\n debug!(\"Fetching tags from commandline\");\n sub.value_of(\"tags\").and_then(|tags|\n Some(tags.split(\",\")\n .into_iter()\n .map(|s| s.to_string())\n .filter(|e|\n if e.contains(\" \") {\n warn!(\"Tag contains spaces: '{}'\", e);\n false\n } else {\n true\n }).collect()\n )\n ).or(Some(vec![])).unwrap()\n\n}\n\nfn get_matcher<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<Regex> {\n debug!(\"Fetching matcher from commandline\");\n if let Some(s) = sub.value_of(\"match\") {\n if let Ok(r) = Regex::new(s) {\n return Some(r)\n } else {\n error!(\"Regex error, continuing without regex\");\n }\n }\n None\n\n}\n\nfn get_id<'a>(rt: &Runtime, sub: &ArgMatches<'a, 'a>) -> Option<String> {\n debug!(\"Fetching id from commandline\");\n sub.value_of(\"id\").and_then(|s| Some(String::from(s)))\n}\n\n\/*\n * Checks whether the commandline call was set to run the removal \"checked\",\n * so if another entry from the store refers to this ID, do not remove the file.\n *\/\nfn run_removal_checking(env: &CommandEnv) -> bool {\n env.matches.is_present(\"check\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2390<commit_after>\/\/ https:\/\/leetcode.com\/problems\/removing-stars-from-a-string\/\npub fn remove_stars(s: String) -> String {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", remove_stars(\"leet**cod*e\".to_string())); \/\/ \"lecoe\"\n println!(\"{}\", remove_stars(\"erase*****\".to_string())); \/\/ \"\"\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>HasMap's size<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Dijkstra<commit_after>use std::fs::File;\nuse std::io::BufReader;\nuse std::io::BufRead;\nuse std::collections::VecDeque;\nuse std::collections::BTreeSet;\n\nconst NODES_COUNT: usize = 5000;\n\ntype VertexTyp = i32;\ntype WeightTyp = f64;\n\nconst MAX_WEIGHT: WeightTyp = std::f64::INFINITY;\n\n#[derive(Clone)]\nstruct Neighbor {\n target: VertexTyp,\n weight: WeightTyp\n}\n\nimpl Neighbor {\n\tfn new(target: VertexTyp, weight: WeightTyp) -> Neighbor {\n\t\tNeighbor { target: target, weight: weight }\n\t}\n}\n\n\/\/ http:\/\/stackoverflow.com\/questions\/28247990\/how-to-do-a-binary-search-on-a-vec-of-floats\n#[derive(PartialEq, PartialOrd, Clone)]\nstruct NonNan(f64);\nimpl Eq for NonNan {}\nimpl Ord for NonNan {\n\tfn cmp(&self, other: &NonNan) -> std::cmp::Ordering {\n\t\tself.partial_cmp(other).unwrap()\n\t}\n}\n\ntype AdjListTyp = Vec<Vec<Neighbor>>;\n\nfn main() {\n\tlet file = File::open(\"others\/test.csv\").unwrap();\n\tlet reader = BufReader::new(&file);\n\n\tlet mut adj_list: AdjListTyp = Vec::with_capacity(NODES_COUNT);\n\tadj_list.extend(std::iter::repeat(Vec::new()).take(NODES_COUNT));\n\n\tfor line in reader.lines() {\n\t\tlet res = line.unwrap();\n\t\tlet mut split = res.split(\",\");\n\t\tlet from = split.next().unwrap().parse::<usize>().unwrap();\n\t\tlet to = split.next().unwrap().parse::<VertexTyp>().unwrap();\n\t\tlet weight = split.next().unwrap().parse::<WeightTyp>().unwrap();\n\t\t\n\t\tadj_list[from].push(Neighbor::new(to, weight));\n\t}\n\n\tlet mut min_dist: Vec<WeightTyp> = Vec::new();\n\tlet mut previous: Vec<VertexTyp> = Vec::new();\n\tdijkstra_compute_paths(0, &adj_list, &mut min_dist, &mut previous);\n\tprintln!(\"Distance from 0 to 4: {}\", min_dist[4]);\n\n\tlet path: VecDeque<VertexTyp> = dijkstra_shortest_path(4, &previous);\n\tprint!(\"Path : \");\n\tfor node in path {\n\t\tprint!(\"{} \", node);\n\t}\n\tprintln!(\"\");\n}\n\nfn dijkstra_compute_paths(source: VertexTyp, adj_list: &AdjListTyp,\n\t\t\t\t\t\t\tmin_dist: &mut Vec<WeightTyp>, previous: &mut Vec<VertexTyp>) {\n\tmin_dist.clear();\n\tmin_dist.extend(std::iter::repeat(MAX_WEIGHT).take(NODES_COUNT));\n\n\tmin_dist[source as usize] = 0.0;\n\t\n\tprevious.clear();\n\tprevious.extend(std::iter::repeat(-1).take(NODES_COUNT));\n\n\tlet mut vertex_queue: BTreeSet<(NonNan, VertexTyp)> = BTreeSet::new();\n\tvertex_queue.insert((NonNan(min_dist[source as usize]), source));\n\n\twhile !vertex_queue.is_empty() {\n\t\tlet first = vertex_queue.iter().cloned().next().unwrap();\n\t\tvertex_queue.remove(&first);\n\t\tlet (NonNan(dist), u) = first;\n\n\t\t\/\/ Visit each edge exiting first\n\t\tlet neighbors = &adj_list[first.1 as usize];\n\t\tfor neighbor in neighbors {\n\t\t\tlet v = neighbor.target as usize;\n\t\t\tlet weight = neighbor.weight;\n\n\t\t\tlet dist_through_first = dist + weight;\n\t\t\tif dist_through_first < min_dist[v] {\n\t\t\t\tvertex_queue.remove(&(NonNan(min_dist[v]), v as VertexTyp));\n\n\t\t\t\tmin_dist[v] = dist_through_first;\n\t\t\t\tprevious[v] = u;\n\t\t\t\tvertex_queue.insert((NonNan(min_dist[v]), v as VertexTyp));\n\t\t\t}\n\t\t}\n\t}\n}\n\nfn dijkstra_shortest_path(vertex: VertexTyp, previous: &Vec<VertexTyp>) -> VecDeque<VertexTyp> {\n\tlet mut done = false;\n\tlet mut path: VecDeque<VertexTyp> = VecDeque::new();\n\tlet mut vertex = vertex;\n\twhile !done {\n\t\tpath.push_front(vertex);\n\t\tvertex = previous[vertex as usize];\n\t\tif vertex == -1 {\n\t\t\tdone = true;\n\t\t}\n\t}\n\tpath\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Clippy pass<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Pieces pertaining to the HTTP message protocol.\nuse std::borrow::Cow;\n\nuse header::Connection;\nuse header::ConnectionOption::{KeepAlive, Close};\nuse header::Headers;\nuse version::HttpVersion;\nuse version::HttpVersion::{Http10, Http11};\n\npub use self::message::{HttpMessage, RequestHead, ResponseHead, Protocol};\n\npub mod h1;\npub mod h2;\npub mod message;\n\n\/\/\/ The raw status code and reason-phrase.\n#[derive(Clone, PartialEq, Debug)]\npub struct RawStatus(pub u16, pub Cow<'static, str>);\n\n\/\/\/ Checks if a connection should be kept alive.\n#[inline]\npub fn should_keep_alive(version: HttpVersion, headers: &Headers) -> bool {\n trace!(\"should_keep_alive( {:?}, {:?} )\", version, headers.get::<Connection>());\n match (version, headers.get::<Connection>()) {\n (Http10, Some(conn)) if !conn.contains(&KeepAlive) => false,\n (Http11, Some(conn)) if conn.contains(&Close) => false,\n _ => true\n }\n}\n<commit_msg>fix(http): no longer keep alive for Http1.0 if no Connection header<commit_after>\/\/! Pieces pertaining to the HTTP message protocol.\nuse std::borrow::Cow;\n\nuse header::Connection;\nuse header::ConnectionOption::{KeepAlive, Close};\nuse header::Headers;\nuse version::HttpVersion;\nuse version::HttpVersion::{Http10, Http11};\n\npub use self::message::{HttpMessage, RequestHead, ResponseHead, Protocol};\n\npub mod h1;\npub mod h2;\npub mod message;\n\n\/\/\/ The raw status code and reason-phrase.\n#[derive(Clone, PartialEq, Debug)]\npub struct RawStatus(pub u16, pub Cow<'static, str>);\n\n\/\/\/ Checks if a connection should be kept alive.\n#[inline]\npub fn should_keep_alive(version: HttpVersion, headers: &Headers) -> bool {\n trace!(\"should_keep_alive( {:?}, {:?} )\", version, headers.get::<Connection>());\n match (version, headers.get::<Connection>()) {\n (Http10, None) => false,\n (Http10, Some(conn)) if !conn.contains(&KeepAlive) => false,\n (Http11, Some(conn)) if conn.contains(&Close) => false,\n _ => true\n }\n}\n\n#[test]\nfn test_should_keep_alive() {\n let mut headers = Headers::new();\n\n assert!(!should_keep_alive(Http10, &headers));\n assert!(should_keep_alive(Http11, &headers));\n\n headers.set(Connection::close());\n assert!(!should_keep_alive(Http10, &headers));\n assert!(!should_keep_alive(Http11, &headers));\n\n headers.set(Connection::keep_alive());\n assert!(should_keep_alive(Http10, &headers));\n assert!(should_keep_alive(Http11, &headers));\n}\n<|endoftext|>"} {"text":"<commit_before>use std::cmp::max;\nuse std::collections::VecDeque;\nuse std::mem::size_of;\nuse std::{ptr, slice};\n\nuse super::{Color, Event, Font, Image, Rect};\n\nuse system::error::{Error, Result, EINVAL};\n\npub struct Window {\n pub x: i32,\n pub y: i32,\n image: Image,\n title: String,\n title_image: Image,\n events: VecDeque<Event>,\n}\n\nimpl Window {\n pub fn new(x: i32, y: i32, w: i32, h: i32, title: String) -> Window {\n let mut title_image = Image::new(title.chars().count() as i32 * 8, 16);\n title_image.as_roi().set(Color::rgba(0, 0, 0, 0));\n {\n let mut x = 0;\n for c in title.chars() {\n title_image.roi(&Rect::new(x, 0, 8, 16)).blend(&Font::render(c, Color::rgb(255, 255, 255)).as_roi());\n x += 8;\n }\n }\n\n Window {\n x: x,\n y: y,\n image: Image::new(w, h),\n title: title,\n title_image: title_image,\n events: VecDeque::new()\n }\n }\n\n pub fn width(&self) -> i32 {\n self.image.width()\n }\n\n pub fn height(&self) -> i32 {\n self.image.height()\n }\n\n pub fn rect(&self) -> Rect {\n Rect::new(self.x, self.y, self.width(), self.height())\n }\n\n pub fn title_rect(&self) -> Rect {\n if self.title.is_empty() {\n Rect::default()\n } else {\n Rect::new(self.x, self.y - 18, self.width(), 18)\n }\n }\n\n pub fn exit_contains(&self, x: i32, y: i32) -> bool {\n ! self.title.is_empty() && x >= max(self.x, self.x + self.width() - 10) && y >= self.y - 18 && x < self.x + self.width() && y < self.y\n }\n\n pub fn draw_title(&mut self, image: &mut Image, rect: &Rect, focused: bool) {\n let title_rect = self.title_rect();\n let title_intersect = rect.intersection(&title_rect);\n if ! title_intersect.is_empty() {\n if focused {\n image.roi(&title_intersect).set(Color::rgb(192, 192, 192));\n } else {\n image.roi(&title_intersect).set(Color::rgb(64, 64, 64));\n }\n\n {\n let image_rect = Rect::new(title_rect.left() + 4, title_rect.top() + 1, self.title_image.width(), self.title_image.height());\n let image_intersect = rect.intersection(&image_rect);\n if ! image_intersect.is_empty() {\n image.roi(&image_intersect).blend(&self.title_image.roi(&image_intersect.offset(-image_rect.left(), -image_rect.top())));\n }\n }\n\n let x = max(self.x + 2, self.x + self.width() - 10);\n if x + 10 <= self.x + self.width() {\n let mut font_image = Font::render('X', Color::rgb(255, 255, 255));\n let image_rect = Rect::new(x, title_rect.top() + 1, font_image.width(), font_image.height());\n let image_intersect = rect.intersection(&image_rect);\n if ! image_intersect.is_empty() {\n image.roi(&image_intersect).blend(&font_image.roi(&image_intersect.offset(-image_rect.left(), -image_rect.top())));\n }\n }\n }\n }\n\n pub fn draw(&mut self, image: &mut Image, rect: &Rect) {\n let self_rect = self.rect();\n let intersect = self_rect.intersection(&rect);\n if ! intersect.is_empty() {\n image.roi(&intersect).blit(&self.image.roi(&intersect.offset(-self_rect.left(), -self_rect.top())));\n }\n }\n\n pub fn event(&mut self, event: Event) {\n self.events.push_back(event);\n }\n\n pub fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n if buf.len() >= size_of::<Event>() {\n let mut i = 0;\n while i <= buf.len() - size_of::<Event>() {\n if let Some(event) = self.events.pop_front() {\n unsafe { ptr::write(buf.as_mut_ptr().offset(i as isize) as *mut Event, event) };\n i += size_of::<Event>();\n } else {\n break;\n }\n }\n Ok(i)\n } else {\n Err(Error::new(EINVAL))\n }\n }\n\n pub fn write(&mut self, buf: &[u8]) -> Result<usize> {\n let old = self.image.data_mut();\n let new = unsafe { slice::from_raw_parts(buf.as_ptr() as *const Color, buf.len() \/ size_of::<Color>()) };\n\n let mut i = 0;\n while i < old.len() && i < new.len() {\n old[i] = new[i];\n i += 1;\n }\n\n Ok(i * size_of::<Color>())\n }\n\n pub fn path(&self, buf: &mut [u8]) -> Result<usize> {\n let mut i = 0;\n let path_str = format!(\"orbital:\/{}\/{}\/{}\/{}\/{}\", self.x, self.y, self.width(), self.height(), self.title);\n let path = path_str.as_bytes();\n while i < buf.len() && i < path.len() {\n buf[i] = path[i];\n i += 1;\n }\n Ok(i)\n }\n}\n<commit_msg>Correct rendering of title<commit_after>use std::cmp::max;\nuse std::collections::VecDeque;\nuse std::mem::size_of;\nuse std::{ptr, slice};\n\nuse super::{Color, Event, Font, Image, Rect};\n\nuse system::error::{Error, Result, EINVAL};\n\npub struct Window {\n pub x: i32,\n pub y: i32,\n image: Image,\n title: String,\n events: VecDeque<Event>,\n}\n\nimpl Window {\n pub fn new(x: i32, y: i32, w: i32, h: i32, title: String) -> Window {\n Window {\n x: x,\n y: y,\n image: Image::new(w, h),\n title: title,\n events: VecDeque::new()\n }\n }\n\n pub fn width(&self) -> i32 {\n self.image.width()\n }\n\n pub fn height(&self) -> i32 {\n self.image.height()\n }\n\n pub fn rect(&self) -> Rect {\n Rect::new(self.x, self.y, self.width(), self.height())\n }\n\n pub fn title_rect(&self) -> Rect {\n if self.title.is_empty() {\n Rect::default()\n } else {\n Rect::new(self.x, self.y - 18, self.width(), 18)\n }\n }\n\n pub fn exit_contains(&self, x: i32, y: i32) -> bool {\n ! self.title.is_empty() && x >= max(self.x, self.x + self.width() - 10) && y >= self.y - 18 && x < self.x + self.width() && y < self.y\n }\n\n pub fn draw_title(&mut self, image: &mut Image, rect: &Rect, focused: bool) {\n let title_rect = self.title_rect();\n let title_intersect = rect.intersection(&title_rect);\n if ! title_intersect.is_empty() {\n if focused {\n image.roi(&title_intersect).set(Color::rgb(192, 192, 192));\n } else {\n image.roi(&title_intersect).set(Color::rgb(64, 64, 64));\n }\n\n let mut x = self.x + 2;\n for c in self.title.chars() {\n if x < max(self.x + 2, self.x + self.width() - 10) {\n let mut font_image = Font::render(c, Color::rgb(255, 255, 255));\n let image_rect = Rect::new(x, title_rect.top() + 1, font_image.width(), font_image.height());\n let image_intersect = rect.intersection(&image_rect);\n if ! image_intersect.is_empty() {\n image.roi(&image_intersect).blend(&font_image.roi(&image_intersect.offset(-image_rect.left(), -image_rect.top())));\n }\n x += 8;\n } else {\n break;\n }\n }\n\n x = max(self.x + 2, self.x + self.width() - 10);\n if x + 10 <= self.x + self.width() {\n let mut font_image = Font::render('X', Color::rgb(255, 255, 255));\n let image_rect = Rect::new(x, title_rect.top() + 1, font_image.width(), font_image.height());\n let image_intersect = rect.intersection(&image_rect);\n if ! image_intersect.is_empty() {\n image.roi(&image_intersect).blend(&font_image.roi(&image_intersect.offset(-image_rect.left(), -image_rect.top())));\n }\n }\n }\n }\n\n pub fn draw(&mut self, image: &mut Image, rect: &Rect) {\n let self_rect = self.rect();\n let intersect = self_rect.intersection(&rect);\n if ! intersect.is_empty() {\n image.roi(&intersect).blit(&self.image.roi(&intersect.offset(-self_rect.left(), -self_rect.top())));\n }\n }\n\n pub fn event(&mut self, event: Event) {\n self.events.push_back(event);\n }\n\n pub fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n if buf.len() >= size_of::<Event>() {\n let mut i = 0;\n while i <= buf.len() - size_of::<Event>() {\n if let Some(event) = self.events.pop_front() {\n unsafe { ptr::write(buf.as_mut_ptr().offset(i as isize) as *mut Event, event) };\n i += size_of::<Event>();\n } else {\n break;\n }\n }\n Ok(i)\n } else {\n Err(Error::new(EINVAL))\n }\n }\n\n pub fn write(&mut self, buf: &[u8]) -> Result<usize> {\n let old = self.image.data_mut();\n let new = unsafe { slice::from_raw_parts(buf.as_ptr() as *const Color, buf.len() \/ size_of::<Color>()) };\n\n let mut i = 0;\n while i < old.len() && i < new.len() {\n old[i] = new[i];\n i += 1;\n }\n\n Ok(i * size_of::<Color>())\n }\n\n pub fn path(&self, buf: &mut [u8]) -> Result<usize> {\n let mut i = 0;\n let path_str = format!(\"orbital:\/{}\/{}\/{}\/{}\/{}\", self.x, self.y, self.width(), self.height(), self.title);\n let path = path_str.as_bytes();\n while i < buf.len() && i < path.len() {\n buf[i] = path[i];\n i += 1;\n }\n Ok(i)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::BindingDeclarations::HTMLHtmlElementBinding;\nuse dom::bindings::codegen::InheritTypes::HTMLHtmlElementDerived;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::error::ErrorResult;\nuse dom::document::Document;\nuse dom::element::HTMLHtmlElementTypeId;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{Node, ElementNodeTypeId};\nuse servo_util::str::DOMString;\n\n#[deriving(Encodable)]\npub struct HTMLHtmlElement {\n pub htmlelement: HTMLElement\n}\n\nimpl HTMLHtmlElementDerived for EventTarget {\n fn is_htmlhtmlelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLHtmlElementTypeId))\n }\n}\n\nimpl HTMLHtmlElement {\n pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLHtmlElement {\n HTMLHtmlElement {\n htmlelement: HTMLElement::new_inherited(HTMLHtmlElementTypeId, localName, document)\n }\n }\n\n pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLHtmlElement> {\n let element = HTMLHtmlElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLHtmlElementBinding::Wrap)\n }\n}\n\npub trait HTMLHtmlElementMethods {\n fn Version(&self) -> DOMString;\n fn SetVersion(&mut self, _version: DOMString) -> ErrorResult;\n}\n\nimpl<'a> HTMLHtmlElementMethods for JSRef<'a, HTMLHtmlElement> {\n fn Version(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetVersion(&mut self, _version: DOMString) -> ErrorResult {\n Ok(())\n }\n}\n<commit_msg>Remove needless '&mut self' from HTMLHtmlElementMethods.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::BindingDeclarations::HTMLHtmlElementBinding;\nuse dom::bindings::codegen::InheritTypes::HTMLHtmlElementDerived;\nuse dom::bindings::js::{JSRef, Temporary};\nuse dom::bindings::error::ErrorResult;\nuse dom::document::Document;\nuse dom::element::HTMLHtmlElementTypeId;\nuse dom::eventtarget::{EventTarget, NodeTargetTypeId};\nuse dom::htmlelement::HTMLElement;\nuse dom::node::{Node, ElementNodeTypeId};\nuse servo_util::str::DOMString;\n\n#[deriving(Encodable)]\npub struct HTMLHtmlElement {\n pub htmlelement: HTMLElement\n}\n\nimpl HTMLHtmlElementDerived for EventTarget {\n fn is_htmlhtmlelement(&self) -> bool {\n self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLHtmlElementTypeId))\n }\n}\n\nimpl HTMLHtmlElement {\n pub fn new_inherited(localName: DOMString, document: &JSRef<Document>) -> HTMLHtmlElement {\n HTMLHtmlElement {\n htmlelement: HTMLElement::new_inherited(HTMLHtmlElementTypeId, localName, document)\n }\n }\n\n pub fn new(localName: DOMString, document: &JSRef<Document>) -> Temporary<HTMLHtmlElement> {\n let element = HTMLHtmlElement::new_inherited(localName, document);\n Node::reflect_node(box element, document, HTMLHtmlElementBinding::Wrap)\n }\n}\n\npub trait HTMLHtmlElementMethods {\n fn Version(&self) -> DOMString;\n fn SetVersion(&self, _version: DOMString) -> ErrorResult;\n}\n\nimpl<'a> HTMLHtmlElementMethods for JSRef<'a, HTMLHtmlElement> {\n fn Version(&self) -> DOMString {\n \"\".to_owned()\n }\n\n fn SetVersion(&self, _version: DOMString) -> ErrorResult {\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>`FromHexError` isn't a good choice to use it in `FromStr`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue #13446<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ Used to cause ICE\n\nstatic VEC: [u32, ..256] = vec!(); \/\/~ ERROR mismatched types\n\nfn main() {}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #21573 - japaric:gh19660, r=huonw<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ error-pattern: requires `copy` lang_item\n\n#![feature(lang_items, start)]\n#![no_std]\n\n#[lang = \"sized\"]\ntrait Sized {}\n\n#[start]\nfn main(_: int, _: *const *const u8) -> int {\n 0\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #36089 - apasel422:issue-24204, r=alexcrichton<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)]\n\ntrait MultiDispatch<T> {\n type O;\n}\n\ntrait Trait: Sized {\n type A: MultiDispatch<Self::B, O = Self>;\n type B;\n\n fn new<U>(u: U) -> <Self::A as MultiDispatch<U>>::O where Self::A : MultiDispatch<U>;\n}\n\nfn test<T: Trait<B=i32>>(b: i32) -> T where T::A: MultiDispatch<i32> { T::new(b) }\n\/\/~^ ERROR type mismatch resolving\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::ty;\nuse rustc_data_structures::indexed_vec::Idx;\nuse rustc_errors::DiagnosticBuilder;\nuse syntax_pos::Span;\n\nuse borrow_check::MirBorrowckCtxt;\nuse dataflow::move_paths::{IllegalMoveOrigin, IllegalMoveOriginKind};\nuse dataflow::move_paths::{LookupResult, MoveError, MovePathIndex};\nuse util::borrowck_errors::{BorrowckErrors, Origin};\n\n\/\/ Often when desugaring a pattern match we may have many individual moves in\n\/\/ MIR that are all part of one operation from the user's point-of-view. For\n\/\/ example:\n\/\/\n\/\/ let (x, y) = foo()\n\/\/\n\/\/ would move x from the 0 field of some temporary, and y from the 1 field. We\n\/\/ group such errors together for cleaner error reporting.\n\/\/\n\/\/ Errors are kept separate if they are from places with different parent move\n\/\/ paths. For example, this generates two errors:\n\/\/\n\/\/ let (&x, &y) = (&String::new(), &String::new());\n#[derive(Debug)]\nenum GroupedMoveError<'tcx> {\n \/\/ Match place can't be moved from\n \/\/ e.g. match x[0] { s => (), } where x: &[String]\n MovesFromMatchPlace {\n span: Span,\n move_from: Place<'tcx>,\n kind: IllegalMoveOriginKind<'tcx>,\n binds_to: Vec<Local>,\n },\n \/\/ Part of a pattern can't be moved from,\n \/\/ e.g. match &String::new() { &x => (), }\n MovesFromPattern {\n span: Span,\n move_from: MovePathIndex,\n kind: IllegalMoveOriginKind<'tcx>,\n binds_to: Vec<Local>,\n },\n \/\/ Everything that isn't from pattern matching.\n OtherIllegalMove {\n span: Span,\n kind: IllegalMoveOriginKind<'tcx>,\n },\n}\n\nimpl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> {\n pub(crate) fn report_move_errors(&self, move_errors: Vec<MoveError<'tcx>>) {\n let grouped_errors = self.group_move_errors(move_errors);\n for error in grouped_errors {\n self.report(error);\n }\n }\n\n fn group_move_errors(&self, errors: Vec<MoveError<'tcx>>) -> Vec<GroupedMoveError<'tcx>> {\n let mut grouped_errors = Vec::new();\n for error in errors {\n self.append_to_grouped_errors(&mut grouped_errors, error);\n }\n grouped_errors\n }\n\n fn append_to_grouped_errors(\n &self,\n grouped_errors: &mut Vec<GroupedMoveError<'tcx>>,\n error: MoveError<'tcx>,\n ) {\n match error {\n MoveError::UnionMove { .. } => {\n unimplemented!(\"don't know how to report union move errors yet.\")\n }\n MoveError::IllegalMove {\n cannot_move_out_of: IllegalMoveOrigin { location, kind },\n } => {\n let stmt_source_info = self.mir.source_info(location);\n if let Some(StatementKind::Assign(\n Place::Local(local),\n Rvalue::Use(Operand::Move(move_from)),\n )) = self.mir.basic_blocks()[location.block]\n .statements\n .get(location.statement_index)\n .map(|stmt| &stmt.kind)\n {\n let local_decl = &self.mir.local_decls[*local];\n if let Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {\n opt_match_place: Some((ref opt_match_place, match_span)),\n binding_mode: _,\n opt_ty_info: _,\n }))) = local_decl.is_user_variable\n {\n \/\/ opt_match_place is the\n \/\/ match_span is the span of the expression being matched on\n \/\/ match *x.y { ... } match_place is Some(*x.y)\n \/\/ ^^^^ match_span is the span of *x.y\n \/\/ opt_match_place is None for let [mut] x = ... statements,\n \/\/ whether or not the right-hand side is a place expression\n\n \/\/ HACK use scopes to determine if this assignment is\n \/\/ the initialization of a variable.\n \/\/ FIXME(matthewjasper) This would probably be more\n \/\/ reliable if it used the ever initialized dataflow\n \/\/ but move errors are currently reported before the\n \/\/ rest of borrowck has run.\n if self\n .mir\n .is_sub_scope(local_decl.source_info.scope, stmt_source_info.scope)\n {\n self.append_binding_error(\n grouped_errors,\n kind,\n move_from,\n *local,\n opt_match_place,\n match_span,\n );\n }\n return;\n }\n }\n grouped_errors.push(GroupedMoveError::OtherIllegalMove {\n span: stmt_source_info.span,\n kind,\n });\n }\n }\n }\n\n fn append_binding_error(\n &self,\n grouped_errors: &mut Vec<GroupedMoveError<'tcx>>,\n kind: IllegalMoveOriginKind<'tcx>,\n move_from: &Place<'tcx>,\n bind_to: Local,\n match_place: &Option<Place<'tcx>>,\n match_span: Span,\n ) {\n debug!(\n \"append_to_grouped_errors(match_place={:?}, match_span={:?})\",\n match_place, match_span\n );\n\n let from_simple_let = match_place.is_none();\n let match_place = match_place.as_ref().unwrap_or(move_from);\n\n match self.move_data.rev_lookup.find(match_place) {\n \/\/ Error with the match place\n LookupResult::Parent(_) => {\n for ge in &mut *grouped_errors {\n if let GroupedMoveError::MovesFromMatchPlace { span, binds_to, .. } = ge {\n if match_span == *span {\n debug!(\"appending local({:?}) to list\", bind_to);\n if !binds_to.is_empty() {\n binds_to.push(bind_to);\n }\n return;\n }\n }\n }\n debug!(\"found a new move error location\");\n\n \/\/ Don't need to point to x in let x = ... .\n let binds_to = if from_simple_let {\n vec![]\n } else {\n vec![bind_to]\n };\n grouped_errors.push(GroupedMoveError::MovesFromMatchPlace {\n span: match_span,\n move_from: match_place.clone(),\n kind,\n binds_to,\n });\n }\n \/\/ Error with the pattern\n LookupResult::Exact(_) => {\n let mpi = match self.move_data.rev_lookup.find(move_from) {\n LookupResult::Parent(Some(mpi)) => mpi,\n \/\/ move_from should be a projection from match_place.\n _ => unreachable!(\"Probably not unreachable...\"),\n };\n for ge in &mut *grouped_errors {\n if let GroupedMoveError::MovesFromPattern {\n span,\n move_from: other_mpi,\n binds_to,\n ..\n } = ge\n {\n if match_span == *span && mpi == *other_mpi {\n debug!(\"appending local({:?}) to list\", bind_to);\n binds_to.push(bind_to);\n return;\n }\n }\n }\n debug!(\"found a new move error location\");\n grouped_errors.push(GroupedMoveError::MovesFromPattern {\n span: match_span,\n move_from: mpi,\n kind,\n binds_to: vec![bind_to],\n });\n }\n };\n }\n\n fn report(&self, error: GroupedMoveError<'tcx>) {\n let (mut err, err_span) = {\n let (span, kind): (Span, &IllegalMoveOriginKind) = match error {\n GroupedMoveError::MovesFromMatchPlace { span, ref kind, .. }\n | GroupedMoveError::MovesFromPattern { span, ref kind, .. }\n | GroupedMoveError::OtherIllegalMove { span, ref kind } => (span, kind),\n };\n let origin = Origin::Mir;\n (\n match kind {\n IllegalMoveOriginKind::Static => {\n self.tcx.cannot_move_out_of(span, \"static item\", origin)\n }\n IllegalMoveOriginKind::BorrowedContent { target_place: place } => {\n \/\/ Inspect the type of the content behind the\n \/\/ borrow to provide feedback about why this\n \/\/ was a move rather than a copy.\n let ty = place.ty(self.mir, self.tcx).to_ty(self.tcx);\n match ty.sty {\n ty::TyArray(..) | ty::TySlice(..) => self\n .tcx\n .cannot_move_out_of_interior_noncopy(span, ty, None, origin),\n ty::TyClosure(def_id, closure_substs)\n if !self.mir.upvar_decls.is_empty()\n && {\n match place {\n Place::Projection(ref proj) => {\n proj.base == Place::Local(Local::new(1))\n }\n Place::Local(_) | Place::Static(_) => unreachable!(),\n }\n } =>\n {\n let closure_kind_ty =\n closure_substs.closure_kind_ty(def_id, self.tcx);\n let closure_kind = closure_kind_ty.to_opt_closure_kind();\n let place_description = match closure_kind {\n Some(ty::ClosureKind::Fn) => {\n \"captured variable in an `Fn` closure\"\n }\n Some(ty::ClosureKind::FnMut) => {\n \"captured variable in an `FnMut` closure\"\n }\n Some(ty::ClosureKind::FnOnce) => {\n bug!(\"closure kind does not match first argument type\")\n }\n None => bug!(\"closure kind not inferred by borrowck\"),\n };\n self.tcx.cannot_move_out_of(span, place_description, origin)\n }\n _ => self\n .tcx\n .cannot_move_out_of(span, \"borrowed content\", origin),\n }\n }\n IllegalMoveOriginKind::InteriorOfTypeWithDestructor { container_ty: ty } => {\n self.tcx\n .cannot_move_out_of_interior_of_drop(span, ty, origin)\n }\n IllegalMoveOriginKind::InteriorOfSliceOrArray { ty, is_index } => self\n .tcx\n .cannot_move_out_of_interior_noncopy(span, ty, Some(*is_index), origin),\n },\n span,\n )\n };\n\n self.add_move_hints(error, &mut err, err_span);\n err.emit();\n }\n\n fn add_move_hints(\n &self,\n error: GroupedMoveError<'tcx>,\n err: &mut DiagnosticBuilder<'a>,\n span: Span,\n ) {\n match error {\n GroupedMoveError::MovesFromMatchPlace {\n mut binds_to,\n move_from,\n ..\n } => {\n \/\/ Ok to suggest a borrow, since the target can't be moved from\n \/\/ anyway.\n if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) {\n match move_from {\n Place::Projection(ref proj)\n if self.suitable_to_remove_deref(proj, &snippet) =>\n {\n err.span_suggestion(\n span,\n \"consider removing this dereference operator\",\n format!(\"{}\", &snippet[1..]),\n );\n }\n _ => {\n err.span_suggestion(\n span,\n \"consider using a reference instead\",\n format!(\"&{}\", snippet),\n );\n }\n }\n\n binds_to.sort();\n binds_to.dedup();\n for local in binds_to {\n let bind_to = &self.mir.local_decls[local];\n let binding_span = bind_to.source_info.span;\n err.span_label(\n binding_span,\n format!(\n \"move occurs because {} has type `{}`, \\\n which does not implement the `Copy` trait\",\n bind_to.name.unwrap(),\n bind_to.ty\n ),\n );\n }\n }\n }\n GroupedMoveError::MovesFromPattern { mut binds_to, .. } => {\n \/\/ Suggest ref, since there might be a move in\n \/\/ another match arm\n binds_to.sort();\n binds_to.dedup();\n for local in binds_to {\n let bind_to = &self.mir.local_decls[local];\n let binding_span = bind_to.source_info.span;\n\n \/\/ Suggest ref mut when the user has already written mut.\n let ref_kind = match bind_to.mutability {\n Mutability::Not => \"ref\",\n Mutability::Mut => \"ref mut\",\n };\n match bind_to.name {\n Some(name) => {\n err.span_suggestion(\n binding_span,\n \"to prevent move, use ref or ref mut\",\n format!(\"{} {:?}\", ref_kind, name),\n );\n }\n None => {\n err.span_label(\n span,\n format!(\"Local {:?} is not suitable for ref\", bind_to),\n );\n }\n }\n }\n }\n \/\/ Nothing to suggest.\n GroupedMoveError::OtherIllegalMove { .. } => (),\n }\n }\n\n fn suitable_to_remove_deref(&self, proj: &PlaceProjection<'tcx>, snippet: &str) -> bool {\n let is_shared_ref = |ty: ty::Ty| match ty.sty {\n ty::TypeVariants::TyRef(.., hir::Mutability::MutImmutable) => true,\n _ => false,\n };\n\n proj.elem == ProjectionElem::Deref && snippet.starts_with('*') && match proj.base {\n Place::Local(local) => {\n let local_decl = &self.mir.local_decls[local];\n \/\/ If this is a temporary, then this could be from an\n \/\/ overloaded * operator.\n local_decl.is_user_variable.is_some() && is_shared_ref(local_decl.ty)\n }\n Place::Static(ref st) => is_shared_ref(st.ty),\n Place::Projection(ref proj) => match proj.elem {\n ProjectionElem::Field(_, ty) => is_shared_ref(ty),\n _ => false,\n },\n }\n }\n}\n<commit_msg>Fix #52416 - ice for move errors in unsafe blocks<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::ty;\nuse rustc_data_structures::indexed_vec::Idx;\nuse rustc_errors::DiagnosticBuilder;\nuse syntax_pos::Span;\n\nuse borrow_check::MirBorrowckCtxt;\nuse dataflow::move_paths::{IllegalMoveOrigin, IllegalMoveOriginKind};\nuse dataflow::move_paths::{LookupResult, MoveError, MovePathIndex};\nuse util::borrowck_errors::{BorrowckErrors, Origin};\n\n\/\/ Often when desugaring a pattern match we may have many individual moves in\n\/\/ MIR that are all part of one operation from the user's point-of-view. For\n\/\/ example:\n\/\/\n\/\/ let (x, y) = foo()\n\/\/\n\/\/ would move x from the 0 field of some temporary, and y from the 1 field. We\n\/\/ group such errors together for cleaner error reporting.\n\/\/\n\/\/ Errors are kept separate if they are from places with different parent move\n\/\/ paths. For example, this generates two errors:\n\/\/\n\/\/ let (&x, &y) = (&String::new(), &String::new());\n#[derive(Debug)]\nenum GroupedMoveError<'tcx> {\n \/\/ Match place can't be moved from\n \/\/ e.g. match x[0] { s => (), } where x: &[String]\n MovesFromMatchPlace {\n span: Span,\n move_from: Place<'tcx>,\n kind: IllegalMoveOriginKind<'tcx>,\n binds_to: Vec<Local>,\n },\n \/\/ Part of a pattern can't be moved from,\n \/\/ e.g. match &String::new() { &x => (), }\n MovesFromPattern {\n span: Span,\n move_from: MovePathIndex,\n kind: IllegalMoveOriginKind<'tcx>,\n binds_to: Vec<Local>,\n },\n \/\/ Everything that isn't from pattern matching.\n OtherIllegalMove {\n span: Span,\n kind: IllegalMoveOriginKind<'tcx>,\n },\n}\n\nimpl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> {\n pub(crate) fn report_move_errors(&self, move_errors: Vec<MoveError<'tcx>>) {\n let grouped_errors = self.group_move_errors(move_errors);\n for error in grouped_errors {\n self.report(error);\n }\n }\n\n fn group_move_errors(&self, errors: Vec<MoveError<'tcx>>) -> Vec<GroupedMoveError<'tcx>> {\n let mut grouped_errors = Vec::new();\n for error in errors {\n self.append_to_grouped_errors(&mut grouped_errors, error);\n }\n grouped_errors\n }\n\n fn append_to_grouped_errors(\n &self,\n grouped_errors: &mut Vec<GroupedMoveError<'tcx>>,\n error: MoveError<'tcx>,\n ) {\n match error {\n MoveError::UnionMove { .. } => {\n unimplemented!(\"don't know how to report union move errors yet.\")\n }\n MoveError::IllegalMove {\n cannot_move_out_of: IllegalMoveOrigin { location, kind },\n } => {\n let stmt_source_info = self.mir.source_info(location);\n if let Some(StatementKind::Assign(\n Place::Local(local),\n Rvalue::Use(Operand::Move(move_from)),\n )) = self.mir.basic_blocks()[location.block]\n .statements\n .get(location.statement_index)\n .map(|stmt| &stmt.kind)\n {\n let local_decl = &self.mir.local_decls[*local];\n \/\/ opt_match_place is the\n \/\/ match_span is the span of the expression being matched on\n \/\/ match *x.y { ... } match_place is Some(*x.y)\n \/\/ ^^^^ match_span is the span of *x.y\n \/\/\n \/\/ opt_match_place is None for let [mut] x = ... statements,\n \/\/ whether or not the right-hand side is a place expression\n if let Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {\n opt_match_place: Some((ref opt_match_place, match_span)),\n binding_mode: _,\n opt_ty_info: _,\n }))) = local_decl.is_user_variable\n {\n \/\/ HACK use scopes to determine if this assignment is\n \/\/ the initialization of a variable.\n \/\/ FIXME(matthewjasper) This would probably be more\n \/\/ reliable if it used the ever initialized dataflow\n \/\/ but move errors are currently reported before the\n \/\/ rest of borrowck has run.\n if self\n .mir\n .is_sub_scope(local_decl.source_info.scope, stmt_source_info.scope)\n {\n self.append_binding_error(\n grouped_errors,\n kind,\n move_from,\n *local,\n opt_match_place,\n match_span,\n );\n return;\n }\n }\n }\n grouped_errors.push(GroupedMoveError::OtherIllegalMove {\n span: stmt_source_info.span,\n kind,\n });\n }\n }\n }\n\n fn append_binding_error(\n &self,\n grouped_errors: &mut Vec<GroupedMoveError<'tcx>>,\n kind: IllegalMoveOriginKind<'tcx>,\n move_from: &Place<'tcx>,\n bind_to: Local,\n match_place: &Option<Place<'tcx>>,\n match_span: Span,\n ) {\n debug!(\n \"append_to_grouped_errors(match_place={:?}, match_span={:?})\",\n match_place, match_span\n );\n\n let from_simple_let = match_place.is_none();\n let match_place = match_place.as_ref().unwrap_or(move_from);\n\n match self.move_data.rev_lookup.find(match_place) {\n \/\/ Error with the match place\n LookupResult::Parent(_) => {\n for ge in &mut *grouped_errors {\n if let GroupedMoveError::MovesFromMatchPlace { span, binds_to, .. } = ge {\n if match_span == *span {\n debug!(\"appending local({:?}) to list\", bind_to);\n if !binds_to.is_empty() {\n binds_to.push(bind_to);\n }\n return;\n }\n }\n }\n debug!(\"found a new move error location\");\n\n \/\/ Don't need to point to x in let x = ... .\n let binds_to = if from_simple_let {\n vec![]\n } else {\n vec![bind_to]\n };\n grouped_errors.push(GroupedMoveError::MovesFromMatchPlace {\n span: match_span,\n move_from: match_place.clone(),\n kind,\n binds_to,\n });\n }\n \/\/ Error with the pattern\n LookupResult::Exact(_) => {\n let mpi = match self.move_data.rev_lookup.find(move_from) {\n LookupResult::Parent(Some(mpi)) => mpi,\n \/\/ move_from should be a projection from match_place.\n _ => unreachable!(\"Probably not unreachable...\"),\n };\n for ge in &mut *grouped_errors {\n if let GroupedMoveError::MovesFromPattern {\n span,\n move_from: other_mpi,\n binds_to,\n ..\n } = ge\n {\n if match_span == *span && mpi == *other_mpi {\n debug!(\"appending local({:?}) to list\", bind_to);\n binds_to.push(bind_to);\n return;\n }\n }\n }\n debug!(\"found a new move error location\");\n grouped_errors.push(GroupedMoveError::MovesFromPattern {\n span: match_span,\n move_from: mpi,\n kind,\n binds_to: vec![bind_to],\n });\n }\n };\n }\n\n fn report(&self, error: GroupedMoveError<'tcx>) {\n let (mut err, err_span) = {\n let (span, kind): (Span, &IllegalMoveOriginKind) = match error {\n GroupedMoveError::MovesFromMatchPlace { span, ref kind, .. }\n | GroupedMoveError::MovesFromPattern { span, ref kind, .. }\n | GroupedMoveError::OtherIllegalMove { span, ref kind } => (span, kind),\n };\n let origin = Origin::Mir;\n (\n match kind {\n IllegalMoveOriginKind::Static => {\n self.tcx.cannot_move_out_of(span, \"static item\", origin)\n }\n IllegalMoveOriginKind::BorrowedContent { target_place: place } => {\n \/\/ Inspect the type of the content behind the\n \/\/ borrow to provide feedback about why this\n \/\/ was a move rather than a copy.\n let ty = place.ty(self.mir, self.tcx).to_ty(self.tcx);\n match ty.sty {\n ty::TyArray(..) | ty::TySlice(..) => self\n .tcx\n .cannot_move_out_of_interior_noncopy(span, ty, None, origin),\n ty::TyClosure(def_id, closure_substs)\n if !self.mir.upvar_decls.is_empty()\n && {\n match place {\n Place::Projection(ref proj) => {\n proj.base == Place::Local(Local::new(1))\n }\n Place::Local(_) | Place::Static(_) => unreachable!(),\n }\n } =>\n {\n let closure_kind_ty =\n closure_substs.closure_kind_ty(def_id, self.tcx);\n let closure_kind = closure_kind_ty.to_opt_closure_kind();\n let place_description = match closure_kind {\n Some(ty::ClosureKind::Fn) => {\n \"captured variable in an `Fn` closure\"\n }\n Some(ty::ClosureKind::FnMut) => {\n \"captured variable in an `FnMut` closure\"\n }\n Some(ty::ClosureKind::FnOnce) => {\n bug!(\"closure kind does not match first argument type\")\n }\n None => bug!(\"closure kind not inferred by borrowck\"),\n };\n self.tcx.cannot_move_out_of(span, place_description, origin)\n }\n _ => self\n .tcx\n .cannot_move_out_of(span, \"borrowed content\", origin),\n }\n }\n IllegalMoveOriginKind::InteriorOfTypeWithDestructor { container_ty: ty } => {\n self.tcx\n .cannot_move_out_of_interior_of_drop(span, ty, origin)\n }\n IllegalMoveOriginKind::InteriorOfSliceOrArray { ty, is_index } => self\n .tcx\n .cannot_move_out_of_interior_noncopy(span, ty, Some(*is_index), origin),\n },\n span,\n )\n };\n\n self.add_move_hints(error, &mut err, err_span);\n err.emit();\n }\n\n fn add_move_hints(\n &self,\n error: GroupedMoveError<'tcx>,\n err: &mut DiagnosticBuilder<'a>,\n span: Span,\n ) {\n match error {\n GroupedMoveError::MovesFromMatchPlace {\n mut binds_to,\n move_from,\n ..\n } => {\n \/\/ Ok to suggest a borrow, since the target can't be moved from\n \/\/ anyway.\n if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) {\n match move_from {\n Place::Projection(ref proj)\n if self.suitable_to_remove_deref(proj, &snippet) =>\n {\n err.span_suggestion(\n span,\n \"consider removing this dereference operator\",\n format!(\"{}\", &snippet[1..]),\n );\n }\n _ => {\n err.span_suggestion(\n span,\n \"consider using a reference instead\",\n format!(\"&{}\", snippet),\n );\n }\n }\n\n binds_to.sort();\n binds_to.dedup();\n for local in binds_to {\n let bind_to = &self.mir.local_decls[local];\n let binding_span = bind_to.source_info.span;\n err.span_label(\n binding_span,\n format!(\n \"move occurs because {} has type `{}`, \\\n which does not implement the `Copy` trait\",\n bind_to.name.unwrap(),\n bind_to.ty\n ),\n );\n }\n }\n }\n GroupedMoveError::MovesFromPattern { mut binds_to, .. } => {\n \/\/ Suggest ref, since there might be a move in\n \/\/ another match arm\n binds_to.sort();\n binds_to.dedup();\n for local in binds_to {\n let bind_to = &self.mir.local_decls[local];\n let binding_span = bind_to.source_info.span;\n\n \/\/ Suggest ref mut when the user has already written mut.\n let ref_kind = match bind_to.mutability {\n Mutability::Not => \"ref\",\n Mutability::Mut => \"ref mut\",\n };\n match bind_to.name {\n Some(name) => {\n err.span_suggestion(\n binding_span,\n \"to prevent move, use ref or ref mut\",\n format!(\"{} {:?}\", ref_kind, name),\n );\n }\n None => {\n err.span_label(\n span,\n format!(\"Local {:?} is not suitable for ref\", bind_to),\n );\n }\n }\n }\n }\n \/\/ Nothing to suggest.\n GroupedMoveError::OtherIllegalMove { .. } => (),\n }\n }\n\n fn suitable_to_remove_deref(&self, proj: &PlaceProjection<'tcx>, snippet: &str) -> bool {\n let is_shared_ref = |ty: ty::Ty| match ty.sty {\n ty::TypeVariants::TyRef(.., hir::Mutability::MutImmutable) => true,\n _ => false,\n };\n\n proj.elem == ProjectionElem::Deref && snippet.starts_with('*') && match proj.base {\n Place::Local(local) => {\n let local_decl = &self.mir.local_decls[local];\n \/\/ If this is a temporary, then this could be from an\n \/\/ overloaded * operator.\n local_decl.is_user_variable.is_some() && is_shared_ref(local_decl.ty)\n }\n Place::Static(ref st) => is_shared_ref(st.ty),\n Place::Projection(ref proj) => match proj.elem {\n ProjectionElem::Field(_, ty) => is_shared_ref(ty),\n _ => false,\n },\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse deriving::generic::*;\nuse deriving::generic::ty::*;\n\nuse syntax::ast::{MetaItem, Expr, BinOpKind, ItemKind, VariantData};\nuse syntax::codemap::Span;\nuse syntax::ext::base::{ExtCtxt, Annotatable};\nuse syntax::ext::build::AstBuilder;\nuse syntax::parse::token::InternedString;\nuse syntax::ptr::P;\n\nfn is_clike_enum(item: &Annotatable) -> bool {\n match *item {\n Annotatable::Item(ref item) => {\n match item.node {\n ItemKind::Enum(ref enum_def, _) => {\n enum_def.variants.iter().all(|v|\n if let VariantData::Unit(..) = v.node.data {\n true\n } else {\n false\n }\n )\n }\n _ => false,\n }\n }\n _ => false,\n }\n}\n\npub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Annotatable,\n push: &mut FnMut(Annotatable))\n{\n \/\/ structures are equal if all fields are equal, and non equal, if\n \/\/ any fields are not equal or if the enum variants are different\n fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {\n cs_fold(\n true, \/\/ use foldl\n |cx, span, subexpr, self_f, other_fs| {\n let other_f = match (other_fs.len(), other_fs.get(0)) {\n (1, Some(o_f)) => o_f,\n _ => cx.span_bug(span, \"not exactly 2 arguments in `derive(PartialEq)`\")\n };\n\n let eq = cx.expr_binary(span, BinOpKind::Eq, self_f, other_f.clone());\n\n cx.expr_binary(span, BinOpKind::And, subexpr, eq)\n },\n cx.expr_bool(span, true),\n Box::new(|cx, span, _, _| cx.expr_bool(span, false)),\n cx, span, substr)\n }\n fn cs_ne(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {\n cs_fold(\n true, \/\/ use foldl\n |cx, span, subexpr, self_f, other_fs| {\n let other_f = match (other_fs.len(), other_fs.get(0)) {\n (1, Some(o_f)) => o_f,\n _ => cx.span_bug(span, \"not exactly 2 arguments in `derive(PartialEq)`\")\n };\n\n let eq = cx.expr_binary(span, BinOpKind::Ne, self_f, other_f.clone());\n\n cx.expr_binary(span, BinOpKind::Or, subexpr, eq)\n },\n cx.expr_bool(span, false),\n Box::new(|cx, span, _, _| cx.expr_bool(span, true)),\n cx, span, substr)\n }\n\n macro_rules! md {\n ($name:expr, $f:ident) => { {\n let inline = cx.meta_word(span, InternedString::new(\"inline\"));\n let attrs = vec!(cx.attribute(span, inline));\n MethodDef {\n name: $name,\n generics: LifetimeBounds::empty(),\n explicit_self: borrowed_explicit_self(),\n args: vec!(borrowed_self()),\n ret_ty: Literal(path_local!(bool)),\n attributes: attrs,\n is_unsafe: false,\n combine_substructure: combine_substructure(Box::new(|a, b, c| {\n $f(a, b, c)\n }))\n }\n } }\n }\n\n \/\/ avoid defining `ne` if we can\n let mut methods = vec![md!(\"eq\", cs_eq)];\n if !is_clike_enum(item) {\n methods.push(md!(\"ne\", cs_ne));\n }\n\n let trait_def = TraitDef {\n span: span,\n attributes: Vec::new(),\n path: path_std!(cx, core::cmp::PartialEq),\n additional_bounds: Vec::new(),\n generics: LifetimeBounds::empty(),\n is_unsafe: false,\n methods: methods,\n associated_types: Vec::new(),\n };\n trait_def.expand(cx, mitem, item, push)\n}\n<commit_msg>derive: Skip PartialEq::ne for any zero-field enum or struct<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse deriving::generic::*;\nuse deriving::generic::ty::*;\n\nuse syntax::ast::{MetaItem, Expr, BinOpKind, ItemKind};\nuse syntax::codemap::Span;\nuse syntax::ext::base::{ExtCtxt, Annotatable};\nuse syntax::ext::build::AstBuilder;\nuse syntax::parse::token::InternedString;\nuse syntax::ptr::P;\n\nfn is_type_without_fields(item: &Annotatable) -> bool {\n if let Annotatable::Item(ref item) = *item {\n match item.node {\n ItemKind::Enum(ref enum_def, _) => {\n enum_def.variants.iter().all(|v| v.node.data.fields().is_empty())\n }\n ItemKind::Struct(ref variant_data, _) => {\n variant_data.fields().is_empty()\n }\n _ => false\n }\n } else {\n false\n }\n}\n\npub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,\n span: Span,\n mitem: &MetaItem,\n item: &Annotatable,\n push: &mut FnMut(Annotatable))\n{\n \/\/ structures are equal if all fields are equal, and non equal, if\n \/\/ any fields are not equal or if the enum variants are different\n fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {\n cs_fold(\n true, \/\/ use foldl\n |cx, span, subexpr, self_f, other_fs| {\n let other_f = match (other_fs.len(), other_fs.get(0)) {\n (1, Some(o_f)) => o_f,\n _ => cx.span_bug(span, \"not exactly 2 arguments in `derive(PartialEq)`\")\n };\n\n let eq = cx.expr_binary(span, BinOpKind::Eq, self_f, other_f.clone());\n\n cx.expr_binary(span, BinOpKind::And, subexpr, eq)\n },\n cx.expr_bool(span, true),\n Box::new(|cx, span, _, _| cx.expr_bool(span, false)),\n cx, span, substr)\n }\n fn cs_ne(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {\n cs_fold(\n true, \/\/ use foldl\n |cx, span, subexpr, self_f, other_fs| {\n let other_f = match (other_fs.len(), other_fs.get(0)) {\n (1, Some(o_f)) => o_f,\n _ => cx.span_bug(span, \"not exactly 2 arguments in `derive(PartialEq)`\")\n };\n\n let eq = cx.expr_binary(span, BinOpKind::Ne, self_f, other_f.clone());\n\n cx.expr_binary(span, BinOpKind::Or, subexpr, eq)\n },\n cx.expr_bool(span, false),\n Box::new(|cx, span, _, _| cx.expr_bool(span, true)),\n cx, span, substr)\n }\n\n macro_rules! md {\n ($name:expr, $f:ident) => { {\n let inline = cx.meta_word(span, InternedString::new(\"inline\"));\n let attrs = vec!(cx.attribute(span, inline));\n MethodDef {\n name: $name,\n generics: LifetimeBounds::empty(),\n explicit_self: borrowed_explicit_self(),\n args: vec!(borrowed_self()),\n ret_ty: Literal(path_local!(bool)),\n attributes: attrs,\n is_unsafe: false,\n combine_substructure: combine_substructure(Box::new(|a, b, c| {\n $f(a, b, c)\n }))\n }\n } }\n }\n\n \/\/ avoid defining `ne` if we can\n \/\/ c-like enums, enums without any fields and structs without fields\n \/\/ can safely define only `eq`.\n let mut methods = vec![md!(\"eq\", cs_eq)];\n if !is_type_without_fields(item) {\n methods.push(md!(\"ne\", cs_ne));\n }\n\n let trait_def = TraitDef {\n span: span,\n attributes: Vec::new(),\n path: path_std!(cx, core::cmp::PartialEq),\n additional_bounds: Vec::new(),\n generics: LifetimeBounds::empty(),\n is_unsafe: false,\n methods: methods,\n associated_types: Vec::new(),\n };\n trait_def.expand(cx, mitem, item, push)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update slicing syntax<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix for new StoreId::new() interface<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test error for `...` in expressions<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z parse-only -Z continue-parse-after-error\n\n\/\/ Make sure that inclusive ranges with `...` syntax don't parse.\n\n#![feature(inclusive_range_syntax, inclusive_range)]\n\nuse std::ops::RangeToInclusive;\n\nfn return_range_to() -> RangeToInclusive<i32> {\n return ...1; \/\/~ERROR `...` syntax cannot be used in expressions\n \/\/~^HELP Use `..` if you need an exclusive range (a < b)\n \/\/~^^HELP or `..=` if you need an inclusive range (a <= b)\n}\n\npub fn main() {\n let x = ...0; \/\/~ERROR `...` syntax cannot be used in expressions\n \/\/~^HELP Use `..` if you need an exclusive range (a < b)\n \/\/~^^HELP or `..=` if you need an inclusive range (a <= b)\n\n let x = 5...5; \/\/~ERROR `...` syntax cannot be used in expressions\n \/\/~^HELP Use `..` if you need an exclusive range (a < b)\n \/\/~^^HELP or `..=` if you need an inclusive range (a <= b)\n\n for _ in 0...1 {} \/\/~ERROR `...` syntax cannot be used in expressions\n \/\/~^HELP Use `..` if you need an exclusive range (a < b)\n \/\/~^^HELP or `..=` if you need an inclusive range (a <= b)\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #70155.<commit_after>\/\/ check-pass\n#![allow(incomplete_features)]\n#![feature(const_generics)]\n\n\/\/ Regression test for #70155:\n\/\/ `RangeInclusive` should be usable with const generics\n\nstruct S<const R: std::ops::RangeInclusive<usize>>;\n\nconst C : S<{ 0 ..= 999 }> = S;\n\npub fn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #75695 - JohnTitor:regression-test, r=Dylan-DPC<commit_after>\/\/ build-pass\n\n\/\/ Regression test for #72793.\n\/\/ FIXME: This still shows ICE with `-Zmir-opt-level=2`.\n\n#![feature(type_alias_impl_trait)]\n\ntrait T { type Item; }\n\ntype Alias<'a> = impl T<Item = &'a ()>;\n\nstruct S;\nimpl<'a> T for &'a S {\n type Item = &'a ();\n}\n\nfn filter_positive<'a>() -> Alias<'a> {\n &S\n}\n\nfn with_positive(fun: impl Fn(Alias<'_>)) {\n fun(filter_positive());\n}\n\nfn main() {\n with_positive(|_| ());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Resolves issue #1 - properly create config directory and file if they don't exist - save defaults to that file after creation. load can then progress<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add dimension_test<commit_after>#[macro_use]\nextern crate yoga;\n\nuse yoga::{Direction, Node, Point, Undefined};\nuse yoga::FlexStyle::*;\n\n#[test]\nfn test_wrap_child() {\n\tlet mut root = Node::new();\n\n\tlet mut root_child_0 = Node::new();\n\n\tstyle!(root_child_0,\n\t\tWidth(100 pt),\n\t\tHeight(100 pt)\n\t);\n\n\troot.insert_child(&mut root_child_0, 0);\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tlet root_layout = root.get_layout();\n\tlet child_layout = root_child_0.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_layout.left);\n\tassert_eq!(0.0, child_layout.top);\n\tassert_eq!(100.0, child_layout.width);\n\tassert_eq!(100.0, child_layout.height);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::RTL);\n\n\tlet root_layout = root.get_layout();\n\tlet child_layout = root_child_0.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_layout.left);\n\tassert_eq!(0.0, child_layout.top);\n\tassert_eq!(100.0, child_layout.width);\n\tassert_eq!(100.0, child_layout.height);\n}\n\n#[test]\nfn test_wrap_grandchild() {\n\tlet mut root = Node::new();\n\n\tlet mut root_child_0 = Node::new();\n\n\tlet mut root_child_0_child_0 = Node::new();\n\n\tstyle!(root_child_0_child_0,\n\t\tWidth(100 pt),\n\t\tHeight(100 pt)\n\t);\n\n\troot.insert_child(&mut root_child_0, 0);\n\troot_child_0.insert_child(&mut root_child_0_child_0, 0);\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_0_child_0_layout = root_child_0.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(100.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_0_child_0_layout.left);\n\tassert_eq!(0.0, child_0_child_0_layout.top);\n\tassert_eq!(100.0, child_0_child_0_layout.width);\n\tassert_eq!(100.0, child_0_child_0_layout.height);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::RTL);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_0_child_0_layout = root_child_0.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(100.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_0_child_0_layout.left);\n\tassert_eq!(0.0, child_0_child_0_layout.top);\n\tassert_eq!(100.0, child_0_child_0_layout.width);\n\tassert_eq!(100.0, child_0_child_0_layout.height);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>template for new rust files<commit_after>\/\/ This file is part of rust-web\/twig\n\/\/\n\/\/ For the copyright and license information, please view the LICENSE\n\/\/ file that was distributed with this source code.\n\n\/\/\/ Twig library for rust\n\/\/\/\n\/\/\/ @author NAME <EMAIL>\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ imports \/\/\n\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ exports \/\/\n\/\/\/\/\/\/\/\/\/\/\/\/\/\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![sanitizer_runtime]\n#![feature(alloc_system)]\n#![feature(sanitizer_runtime)]\n#![feature(staged_api)]\n#![no_std]\n#![unstable(feature = \"sanitizer_runtime_lib\",\n reason = \"internal implementation detail of sanitizers\",\n issue = \"0\")]\n\nextern crate alloc_system;\n\nuse alloc_system::System;\n\n#[global_allocator]\nstatic ALLOC: System = System;\n<commit_msg>[nll] librustc_msan: enable feature(nll) for bootstrap<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![sanitizer_runtime]\n#![feature(alloc_system)]\n#![cfg_attr(not(stage0), feature(nll))]\n#![feature(sanitizer_runtime)]\n#![feature(staged_api)]\n#![no_std]\n#![unstable(feature = \"sanitizer_runtime_lib\",\n reason = \"internal implementation detail of sanitizers\",\n issue = \"0\")]\n\nextern crate alloc_system;\n\nuse alloc_system::System;\n\n#[global_allocator]\nstatic ALLOC: System = System;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for collatz-conjecture case<commit_after>\/\/ return Ok(x) where x is the number of steps required to reach 1\npub fn collatz(n: u64) -> Result<u64, &'static str> {\n if n < 1 {\n return Err(\"Invalid number\");\n }\n\n let mut result = n;\n let mut steps = 0;\n\n while result != 1 {\n if result % 2 == 0 {\n result \/= 2;\n } else {\n result = 3 * result + 1;\n }\n\n steps += 1;\n }\n\n Ok(steps)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add \/launcher to api<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::borrow_set::{BorrowSet, BorrowData};\nuse borrow_check::place_ext::PlaceExt;\n\nuse rustc;\nuse rustc::hir;\nuse rustc::hir::def_id::DefId;\nuse rustc::middle::region;\nuse rustc::mir::{self, Location, Place, Mir};\nuse rustc::ty::TyCtxt;\nuse rustc::ty::RegionKind;\nuse rustc::ty::RegionKind::ReScope;\n\nuse rustc_data_structures::bitslice::BitwiseOperator;\nuse rustc_data_structures::indexed_set::IdxSet;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse rustc_data_structures::sync::Lrc;\n\nuse dataflow::{BitDenotation, BlockSets, InitialFlow};\npub use dataflow::indexes::BorrowIndex;\nuse borrow_check::nll::region_infer::RegionInferenceContext;\nuse borrow_check::nll::ToRegionVid;\n\nuse std::rc::Rc;\n\n\/\/\/ `Borrows` stores the data used in the analyses that track the flow\n\/\/\/ of borrows.\n\/\/\/\n\/\/\/ It uniquely identifies every borrow (`Rvalue::Ref`) by a\n\/\/\/ `BorrowIndex`, and maps each such index to a `BorrowData`\n\/\/\/ describing the borrow. These indexes are used for representing the\n\/\/\/ borrows in compact bitvectors.\npub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> {\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n scope_tree: Lrc<region::ScopeTree>,\n root_scope: Option<region::Scope>,\n\n borrow_set: Rc<BorrowSet<'tcx>>,\n\n \/\/\/ NLL region inference context with which NLL queries should be resolved\n nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,\n}\n\nimpl<'a, 'gcx, 'tcx> Borrows<'a, 'gcx, 'tcx> {\n crate fn new(\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,\n def_id: DefId,\n body_id: Option<hir::BodyId>,\n borrow_set: &Rc<BorrowSet<'tcx>>\n ) -> Self {\n let scope_tree = tcx.region_scope_tree(def_id);\n let root_scope = body_id.map(|body_id| {\n region::Scope::CallSite(tcx.hir.body(body_id).value.hir_id.local_id)\n });\n\n Borrows {\n tcx: tcx,\n mir: mir,\n borrow_set: borrow_set.clone(),\n scope_tree,\n root_scope,\n nonlexical_regioncx,\n }\n }\n\n crate fn borrows(&self) -> &IndexVec<BorrowIndex, BorrowData<'tcx>> { &self.borrow_set.borrows }\n\n pub fn scope_tree(&self) -> &Lrc<region::ScopeTree> { &self.scope_tree }\n\n pub fn location(&self, idx: BorrowIndex) -> &Location {\n &self.borrow_set.borrows[idx].reserve_location\n }\n\n \/\/\/ Add all borrows to the kill set, if those borrows are out of scope at `location`.\n \/\/\/ That means either they went out of either a nonlexical scope, if we care about those\n \/\/\/ at the moment, or the location represents a lexical EndRegion\n fn kill_loans_out_of_scope_at_location(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n let regioncx = &self.nonlexical_regioncx;\n\n \/\/ NOTE: The state associated with a given `location`\n \/\/ reflects the dataflow on entry to the statement. If it\n \/\/ does not contain `borrow_region`, then then that means\n \/\/ that the statement at `location` kills the borrow.\n \/\/\n \/\/ We are careful always to call this function *before* we\n \/\/ set up the gen-bits for the statement or\n \/\/ termanator. That way, if the effect of the statement or\n \/\/ terminator *does* introduce a new loan of the same\n \/\/ region, then setting that gen-bit will override any\n \/\/ potential kill introduced here.\n for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {\n let borrow_region = borrow_data.region.to_region_vid();\n if !regioncx.region_contains_point(borrow_region, location) {\n sets.kill(&borrow_index);\n }\n }\n }\n\n fn kill_borrows_on_local(&self,\n sets: &mut BlockSets<BorrowIndex>,\n local: &rustc::mir::Local)\n {\n if let Some(borrow_indexes) = self.borrow_set.local_map.get(local) {\n sets.kill_all(borrow_indexes);\n }\n }\n}\n\nimpl<'a, 'gcx, 'tcx> BitDenotation for Borrows<'a, 'gcx, 'tcx> {\n type Idx = BorrowIndex;\n fn name() -> &'static str { \"borrows\" }\n fn bits_per_block(&self) -> usize {\n self.borrow_set.borrows.len() * 2\n }\n\n fn start_block_effect(&self, _entry_set: &mut IdxSet<BorrowIndex>) {\n \/\/ no borrows of code region_scopes have been taken prior to\n \/\/ function execution, so this method has no effect on\n \/\/ `_sets`.\n }\n\n fn before_statement_effect(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n debug!(\"Borrows::before_statement_effect sets: {:?} location: {:?}\", sets, location);\n self.kill_loans_out_of_scope_at_location(sets, location);\n }\n\n fn statement_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {\n debug!(\"Borrows::statement_effect sets: {:?} location: {:?}\", sets, location);\n\n let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {\n panic!(\"could not find block at location {:?}\", location);\n });\n let stmt = block.statements.get(location.statement_index).unwrap_or_else(|| {\n panic!(\"could not find statement at location {:?}\");\n });\n\n self.kill_loans_out_of_scope_at_location(sets, location);\n\n match stmt.kind {\n mir::StatementKind::EndRegion(_) => {\n }\n\n mir::StatementKind::Assign(ref lhs, ref rhs) => {\n \/\/ Make sure there are no remaining borrows for variables\n \/\/ that are assigned over.\n if let Place::Local(ref local) = *lhs {\n \/\/ FIXME: Handle the case in which we're assigning over\n \/\/ a projection (`foo.bar`).\n self.kill_borrows_on_local(sets, local);\n }\n\n \/\/ NOTE: if\/when the Assign case is revised to inspect\n \/\/ the assigned_place here, make sure to also\n \/\/ re-consider the current implementations of the\n \/\/ propagate_call_return method.\n\n if let mir::Rvalue::Ref(region, _, ref place) = *rhs {\n if place.is_unsafe_place(self.tcx, self.mir) { return; }\n let index = self.borrow_set.location_map.get(&location).unwrap_or_else(|| {\n panic!(\"could not find BorrowIndex for location {:?}\", location);\n });\n\n if let RegionKind::ReEmpty = region {\n \/\/ If the borrowed value dies before the borrow is used, the region for\n \/\/ the borrow can be empty. Don't track the borrow in that case.\n sets.kill(&index);\n return\n }\n\n assert!(self.borrow_set.region_map.get(region).unwrap_or_else(|| {\n panic!(\"could not find BorrowIndexs for region {:?}\", region);\n }).contains(&index));\n sets.gen(&index);\n\n \/\/ Issue #46746: Two-phase borrows handles\n \/\/ stmts of form `Tmp = &mut Borrow` ...\n match lhs {\n Place::Local(..) | Place::Static(..) => {} \/\/ okay\n Place::Projection(..) => {\n \/\/ ... can assign into projections,\n \/\/ e.g. `box (&mut _)`. Current\n \/\/ conservative solution: force\n \/\/ immediate activation here.\n sets.gen(&index);\n }\n }\n }\n }\n\n mir::StatementKind::StorageDead(local) => {\n \/\/ Make sure there are no remaining borrows for locals that\n \/\/ are gone out of scope.\n self.kill_borrows_on_local(sets, &local)\n }\n\n mir::StatementKind::InlineAsm { ref outputs, ref asm, .. } => {\n for (output, kind) in outputs.iter().zip(&asm.outputs) {\n if !kind.is_indirect && !kind.is_rw {\n \/\/ Make sure there are no remaining borrows for direct\n \/\/ output variables.\n if let Place::Local(ref local) = *output {\n \/\/ FIXME: Handle the case in which we're assigning over\n \/\/ a projection (`foo.bar`).\n self.kill_borrows_on_local(sets, local);\n }\n }\n }\n }\n\n mir::StatementKind::SetDiscriminant { .. } |\n mir::StatementKind::StorageLive(..) |\n mir::StatementKind::Validate(..) |\n mir::StatementKind::UserAssertTy(..) |\n mir::StatementKind::Nop => {}\n\n }\n }\n\n fn before_terminator_effect(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n debug!(\"Borrows::before_terminator_effect sets: {:?} location: {:?}\", sets, location);\n self.kill_loans_out_of_scope_at_location(sets, location);\n }\n\n fn terminator_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {\n debug!(\"Borrows::terminator_effect sets: {:?} location: {:?}\", sets, location);\n\n let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {\n panic!(\"could not find block at location {:?}\", location);\n });\n\n let term = block.terminator();\n self.kill_loans_out_of_scope_at_location(sets, location);\n\n\n match term.kind {\n mir::TerminatorKind::Resume |\n mir::TerminatorKind::Return |\n mir::TerminatorKind::GeneratorDrop => {\n \/\/ When we return from the function, then all `ReScope`-style regions\n \/\/ are guaranteed to have ended.\n \/\/ Normally, there would be `EndRegion` statements that come before,\n \/\/ and hence most of these loans will already be dead -- but, in some cases\n \/\/ like unwind paths, we do not always emit `EndRegion` statements, so we\n \/\/ add some kills here as a \"backup\" and to avoid spurious error messages.\n for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {\n if let ReScope(scope) = borrow_data.region {\n \/\/ Check that the scope is not actually a scope from a function that is\n \/\/ a parent of our closure. Note that the CallSite scope itself is\n \/\/ *outside* of the closure, for some weird reason.\n if let Some(root_scope) = self.root_scope {\n if *scope != root_scope &&\n self.scope_tree.is_subscope_of(*scope, root_scope)\n {\n sets.kill(&borrow_index);\n }\n }\n }\n }\n }\n mir::TerminatorKind::Abort |\n mir::TerminatorKind::SwitchInt {..} |\n mir::TerminatorKind::Drop {..} |\n mir::TerminatorKind::DropAndReplace {..} |\n mir::TerminatorKind::Call {..} |\n mir::TerminatorKind::Assert {..} |\n mir::TerminatorKind::Yield {..} |\n mir::TerminatorKind::Goto {..} |\n mir::TerminatorKind::FalseEdges {..} |\n mir::TerminatorKind::FalseUnwind {..} |\n mir::TerminatorKind::Unreachable => {}\n }\n }\n\n fn propagate_call_return(&self,\n _in_out: &mut IdxSet<BorrowIndex>,\n _call_bb: mir::BasicBlock,\n _dest_bb: mir::BasicBlock,\n _dest_place: &mir::Place) {\n \/\/ there are no effects on borrows from method call return...\n \/\/\n \/\/ ... but if overwriting a place can affect flow state, then\n \/\/ latter is not true; see NOTE on Assign case in\n \/\/ statement_effect_on_borrows.\n }\n}\n\nimpl<'a, 'gcx, 'tcx> BitwiseOperator for Borrows<'a, 'gcx, 'tcx> {\n #[inline]\n fn join(&self, pred1: usize, pred2: usize) -> usize {\n pred1 | pred2 \/\/ union effects of preds when computing reservations\n }\n}\n\nimpl<'a, 'gcx, 'tcx> InitialFlow for Borrows<'a, 'gcx, 'tcx> {\n #[inline]\n fn bottom_value() -> bool {\n false \/\/ bottom = nothing is reserved or activated yet\n }\n}\n\n<commit_msg>Instrument `statement_effect_on_borrows` for the `lhs = &place` case.<commit_after>\/\/ Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::borrow_set::{BorrowSet, BorrowData};\nuse borrow_check::place_ext::PlaceExt;\n\nuse rustc;\nuse rustc::hir;\nuse rustc::hir::def_id::DefId;\nuse rustc::middle::region;\nuse rustc::mir::{self, Location, Place, Mir};\nuse rustc::ty::TyCtxt;\nuse rustc::ty::RegionKind;\nuse rustc::ty::RegionKind::ReScope;\n\nuse rustc_data_structures::bitslice::BitwiseOperator;\nuse rustc_data_structures::indexed_set::IdxSet;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse rustc_data_structures::sync::Lrc;\n\nuse dataflow::{BitDenotation, BlockSets, InitialFlow};\npub use dataflow::indexes::BorrowIndex;\nuse borrow_check::nll::region_infer::RegionInferenceContext;\nuse borrow_check::nll::ToRegionVid;\n\nuse std::rc::Rc;\n\n\/\/\/ `Borrows` stores the data used in the analyses that track the flow\n\/\/\/ of borrows.\n\/\/\/\n\/\/\/ It uniquely identifies every borrow (`Rvalue::Ref`) by a\n\/\/\/ `BorrowIndex`, and maps each such index to a `BorrowData`\n\/\/\/ describing the borrow. These indexes are used for representing the\n\/\/\/ borrows in compact bitvectors.\npub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> {\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n scope_tree: Lrc<region::ScopeTree>,\n root_scope: Option<region::Scope>,\n\n borrow_set: Rc<BorrowSet<'tcx>>,\n\n \/\/\/ NLL region inference context with which NLL queries should be resolved\n nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,\n}\n\nimpl<'a, 'gcx, 'tcx> Borrows<'a, 'gcx, 'tcx> {\n crate fn new(\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,\n def_id: DefId,\n body_id: Option<hir::BodyId>,\n borrow_set: &Rc<BorrowSet<'tcx>>\n ) -> Self {\n let scope_tree = tcx.region_scope_tree(def_id);\n let root_scope = body_id.map(|body_id| {\n region::Scope::CallSite(tcx.hir.body(body_id).value.hir_id.local_id)\n });\n\n Borrows {\n tcx: tcx,\n mir: mir,\n borrow_set: borrow_set.clone(),\n scope_tree,\n root_scope,\n nonlexical_regioncx,\n }\n }\n\n crate fn borrows(&self) -> &IndexVec<BorrowIndex, BorrowData<'tcx>> { &self.borrow_set.borrows }\n\n pub fn scope_tree(&self) -> &Lrc<region::ScopeTree> { &self.scope_tree }\n\n pub fn location(&self, idx: BorrowIndex) -> &Location {\n &self.borrow_set.borrows[idx].reserve_location\n }\n\n \/\/\/ Add all borrows to the kill set, if those borrows are out of scope at `location`.\n \/\/\/ That means either they went out of either a nonlexical scope, if we care about those\n \/\/\/ at the moment, or the location represents a lexical EndRegion\n fn kill_loans_out_of_scope_at_location(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n let regioncx = &self.nonlexical_regioncx;\n\n \/\/ NOTE: The state associated with a given `location`\n \/\/ reflects the dataflow on entry to the statement. If it\n \/\/ does not contain `borrow_region`, then then that means\n \/\/ that the statement at `location` kills the borrow.\n \/\/\n \/\/ We are careful always to call this function *before* we\n \/\/ set up the gen-bits for the statement or\n \/\/ termanator. That way, if the effect of the statement or\n \/\/ terminator *does* introduce a new loan of the same\n \/\/ region, then setting that gen-bit will override any\n \/\/ potential kill introduced here.\n for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {\n let borrow_region = borrow_data.region.to_region_vid();\n if !regioncx.region_contains_point(borrow_region, location) {\n sets.kill(&borrow_index);\n }\n }\n }\n\n fn kill_borrows_on_local(&self,\n sets: &mut BlockSets<BorrowIndex>,\n local: &rustc::mir::Local)\n {\n if let Some(borrow_indexes) = self.borrow_set.local_map.get(local) {\n sets.kill_all(borrow_indexes);\n }\n }\n}\n\nimpl<'a, 'gcx, 'tcx> BitDenotation for Borrows<'a, 'gcx, 'tcx> {\n type Idx = BorrowIndex;\n fn name() -> &'static str { \"borrows\" }\n fn bits_per_block(&self) -> usize {\n self.borrow_set.borrows.len() * 2\n }\n\n fn start_block_effect(&self, _entry_set: &mut IdxSet<BorrowIndex>) {\n \/\/ no borrows of code region_scopes have been taken prior to\n \/\/ function execution, so this method has no effect on\n \/\/ `_sets`.\n }\n\n fn before_statement_effect(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n debug!(\"Borrows::before_statement_effect sets: {:?} location: {:?}\", sets, location);\n self.kill_loans_out_of_scope_at_location(sets, location);\n }\n\n fn statement_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {\n debug!(\"Borrows::statement_effect sets: {:?} location: {:?}\", sets, location);\n\n let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {\n panic!(\"could not find block at location {:?}\", location);\n });\n let stmt = block.statements.get(location.statement_index).unwrap_or_else(|| {\n panic!(\"could not find statement at location {:?}\");\n });\n\n self.kill_loans_out_of_scope_at_location(sets, location);\n\n match stmt.kind {\n mir::StatementKind::EndRegion(_) => {\n }\n\n mir::StatementKind::Assign(ref lhs, ref rhs) => {\n \/\/ Make sure there are no remaining borrows for variables\n \/\/ that are assigned over.\n if let Place::Local(ref local) = *lhs {\n \/\/ FIXME: Handle the case in which we're assigning over\n \/\/ a projection (`foo.bar`).\n self.kill_borrows_on_local(sets, local);\n }\n\n \/\/ NOTE: if\/when the Assign case is revised to inspect\n \/\/ the assigned_place here, make sure to also\n \/\/ re-consider the current implementations of the\n \/\/ propagate_call_return method.\n\n if let mir::Rvalue::Ref(region, _, ref place) = *rhs {\n if place.is_unsafe_place(self.tcx, self.mir) { return; }\n let index = self.borrow_set.location_map.get(&location).unwrap_or_else(|| {\n panic!(\"could not find BorrowIndex for location {:?}\", location);\n });\n\n if let RegionKind::ReEmpty = region {\n \/\/ If the borrowed value dies before the borrow is used, the region for\n \/\/ the borrow can be empty. Don't track the borrow in that case.\n debug!(\"Borrows::statement_effect_on_borrows \\\n location: {:?} stmt: {:?} has empty region, killing {:?}\",\n location, stmt.kind, index);\n sets.kill(&index);\n return\n } else {\n debug!(\"Borrows::statement_effect_on_borrows location: {:?} stmt: {:?}\",\n location, stmt.kind);\n }\n\n assert!(self.borrow_set.region_map.get(region).unwrap_or_else(|| {\n panic!(\"could not find BorrowIndexs for region {:?}\", region);\n }).contains(&index));\n sets.gen(&index);\n\n \/\/ Issue #46746: Two-phase borrows handles\n \/\/ stmts of form `Tmp = &mut Borrow` ...\n match lhs {\n Place::Local(..) | Place::Static(..) => {} \/\/ okay\n Place::Projection(..) => {\n \/\/ ... can assign into projections,\n \/\/ e.g. `box (&mut _)`. Current\n \/\/ conservative solution: force\n \/\/ immediate activation here.\n sets.gen(&index);\n }\n }\n }\n }\n\n mir::StatementKind::StorageDead(local) => {\n \/\/ Make sure there are no remaining borrows for locals that\n \/\/ are gone out of scope.\n self.kill_borrows_on_local(sets, &local)\n }\n\n mir::StatementKind::InlineAsm { ref outputs, ref asm, .. } => {\n for (output, kind) in outputs.iter().zip(&asm.outputs) {\n if !kind.is_indirect && !kind.is_rw {\n \/\/ Make sure there are no remaining borrows for direct\n \/\/ output variables.\n if let Place::Local(ref local) = *output {\n \/\/ FIXME: Handle the case in which we're assigning over\n \/\/ a projection (`foo.bar`).\n self.kill_borrows_on_local(sets, local);\n }\n }\n }\n }\n\n mir::StatementKind::SetDiscriminant { .. } |\n mir::StatementKind::StorageLive(..) |\n mir::StatementKind::Validate(..) |\n mir::StatementKind::UserAssertTy(..) |\n mir::StatementKind::Nop => {}\n\n }\n }\n\n fn before_terminator_effect(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n debug!(\"Borrows::before_terminator_effect sets: {:?} location: {:?}\", sets, location);\n self.kill_loans_out_of_scope_at_location(sets, location);\n }\n\n fn terminator_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {\n debug!(\"Borrows::terminator_effect sets: {:?} location: {:?}\", sets, location);\n\n let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {\n panic!(\"could not find block at location {:?}\", location);\n });\n\n let term = block.terminator();\n self.kill_loans_out_of_scope_at_location(sets, location);\n\n\n match term.kind {\n mir::TerminatorKind::Resume |\n mir::TerminatorKind::Return |\n mir::TerminatorKind::GeneratorDrop => {\n \/\/ When we return from the function, then all `ReScope`-style regions\n \/\/ are guaranteed to have ended.\n \/\/ Normally, there would be `EndRegion` statements that come before,\n \/\/ and hence most of these loans will already be dead -- but, in some cases\n \/\/ like unwind paths, we do not always emit `EndRegion` statements, so we\n \/\/ add some kills here as a \"backup\" and to avoid spurious error messages.\n for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {\n if let ReScope(scope) = borrow_data.region {\n \/\/ Check that the scope is not actually a scope from a function that is\n \/\/ a parent of our closure. Note that the CallSite scope itself is\n \/\/ *outside* of the closure, for some weird reason.\n if let Some(root_scope) = self.root_scope {\n if *scope != root_scope &&\n self.scope_tree.is_subscope_of(*scope, root_scope)\n {\n sets.kill(&borrow_index);\n }\n }\n }\n }\n }\n mir::TerminatorKind::Abort |\n mir::TerminatorKind::SwitchInt {..} |\n mir::TerminatorKind::Drop {..} |\n mir::TerminatorKind::DropAndReplace {..} |\n mir::TerminatorKind::Call {..} |\n mir::TerminatorKind::Assert {..} |\n mir::TerminatorKind::Yield {..} |\n mir::TerminatorKind::Goto {..} |\n mir::TerminatorKind::FalseEdges {..} |\n mir::TerminatorKind::FalseUnwind {..} |\n mir::TerminatorKind::Unreachable => {}\n }\n }\n\n fn propagate_call_return(&self,\n _in_out: &mut IdxSet<BorrowIndex>,\n _call_bb: mir::BasicBlock,\n _dest_bb: mir::BasicBlock,\n _dest_place: &mir::Place) {\n \/\/ there are no effects on borrows from method call return...\n \/\/\n \/\/ ... but if overwriting a place can affect flow state, then\n \/\/ latter is not true; see NOTE on Assign case in\n \/\/ statement_effect_on_borrows.\n }\n}\n\nimpl<'a, 'gcx, 'tcx> BitwiseOperator for Borrows<'a, 'gcx, 'tcx> {\n #[inline]\n fn join(&self, pred1: usize, pred2: usize) -> usize {\n pred1 | pred2 \/\/ union effects of preds when computing reservations\n }\n}\n\nimpl<'a, 'gcx, 'tcx> InitialFlow for Borrows<'a, 'gcx, 'tcx> {\n #[inline]\n fn bottom_value() -> bool {\n false \/\/ bottom = nothing is reserved or activated yet\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tests: adds tests for querying indices<commit_after>extern crate clap;\nextern crate regex;\n\ninclude!(\"..\/clap-test.rs\");\n\nuse clap::{App, ArgMatches, Arg, ErrorKind};\n\n#[test]\nfn indices_mult_opts() {\n\tlet m = App::new(\"ind\")\n\t\t.arg(Arg::with_name(\"exclude\")\n\t\t\t.short(\"e\")\n\t\t\t.takes_value(true)\n\t\t\t.multiple(true))\n\t\t.arg(Arg::with_name(\"include\")\n\t\t\t.short(\"i\")\n\t\t\t.takes_value(true)\n\t\t\t.multiple(true))\n\t\t.get_matches_from(vec![\"ind\", \"-e\", \"A\", \"B\", \"-i\", \"B\", \"C\", \"-e\", \"C\"]);\n\n\t\tassert_eq!(m.indices_of(\"exclude\").unwrap().collect::<Vec<_>>(), &[2, 3, 8]);\n\t\tassert_eq!(m.indices_of(\"include\").unwrap().collect::<Vec<_>>(), &[5, 6]);\n}\n\n#[test]\nfn index_mult_opts() {\n\tlet m = App::new(\"ind\")\n\t\t.arg(Arg::with_name(\"exclude\")\n\t\t\t.short(\"e\")\n\t\t\t.takes_value(true)\n\t\t\t.multiple(true))\n\t\t.arg(Arg::with_name(\"include\")\n\t\t\t.short(\"i\")\n\t\t\t.takes_value(true)\n\t\t\t.multiple(true))\n\t\t.get_matches_from(vec![\"ind\", \"-e\", \"A\", \"B\", \"-i\", \"B\", \"C\", \"-e\", \"C\"]);\n\n\t\tassert_eq!(m.index_of(\"exclude\"), Some(2));\n\t\tassert_eq!(m.index_of(\"include\"), Some(5));\n}\n\n#[test]\nfn index_flag() {\n\tlet m = App::new(\"ind\")\n\t\t.arg(Arg::with_name(\"exclude\")\n\t\t\t.short(\"e\"))\n\t\t.arg(Arg::with_name(\"include\")\n\t\t\t.short(\"i\"))\n\t\t.get_matches_from(vec![\"ind\", \"-e\", \"-i\"]);\n\n\t\tassert_eq!(m.index_of(\"exclude\"), Some(1));\n\t\tassert_eq!(m.index_of(\"include\"), Some(2));\n}\n\n#[test]\nfn index_flags() {\n\tlet m = App::new(\"ind\")\n\t\t.arg(Arg::with_name(\"exclude\")\n\t\t\t.short(\"e\")\n\t\t\t.multiple(true))\n\t\t.arg(Arg::with_name(\"include\")\n\t\t\t.short(\"i\")\n\t\t\t.multiple(true))\n\t\t.get_matches_from(vec![\"ind\", \"-e\", \"-i\", \"-e\", \"-e\", \"-i\"]);\n\n\t\tassert_eq!(m.index_of(\"exclude\"), Some(1));\n\t\tassert_eq!(m.index_of(\"include\"), Some(2));\n}\n\n#[test]\nfn indices_mult_flags() {\n\tlet m = App::new(\"ind\")\n\t\t.arg(Arg::with_name(\"exclude\")\n\t\t\t.short(\"e\")\n\t\t\t.multiple(true))\n\t\t.arg(Arg::with_name(\"include\")\n\t\t\t.short(\"i\")\n\t\t\t.multiple(true))\n\t\t.get_matches_from(vec![\"ind\", \"-e\", \"-i\", \"-e\", \"-e\", \"-i\"]);\n\n\t\tassert_eq!(m.indices_of(\"exclude\").unwrap().collect::<Vec<_>>(), &[1, 3, 4]);\n\t\tassert_eq!(m.indices_of(\"include\").unwrap().collect::<Vec<_>>(), &[2, 5]);\n}\n\n#[test]\nfn indices_mult_flags_combined() {\n\tlet m = App::new(\"ind\")\n\t\t.arg(Arg::with_name(\"exclude\")\n\t\t\t.short(\"e\")\n\t\t\t.multiple(true))\n\t\t.arg(Arg::with_name(\"include\")\n\t\t\t.short(\"i\")\n\t\t\t.multiple(true))\n\t\t.get_matches_from(vec![\"ind\", \"-eieei\"]);\n\n\t\tassert_eq!(m.indices_of(\"exclude\").unwrap().collect::<Vec<_>>(), &[1, 3, 4]);\n\t\tassert_eq!(m.indices_of(\"include\").unwrap().collect::<Vec<_>>(), &[2, 5]);\n}\n\n#[test]\nfn indices_mult_flags_opt_combined() {\n\tlet m = App::new(\"ind\")\n\t\t.arg(Arg::with_name(\"exclude\")\n\t\t\t.short(\"e\")\n\t\t\t.multiple(true))\n\t\t.arg(Arg::with_name(\"include\")\n\t\t\t.short(\"i\")\n\t\t\t.multiple(true))\n\t\t.arg(Arg::with_name(\"option\")\n\t\t\t.short(\"0\")\n\t\t\t.takes_value(true))\n\t\t.get_matches_from(vec![\"ind\", \"-eieeio\", \"val\"]);\n\n\t\tassert_eq!(m.indices_of(\"exclude\").unwrap().collect::<Vec<_>>(), &[1, 3, 4]);\n\t\tassert_eq!(m.indices_of(\"include\").unwrap().collect::<Vec<_>>(), &[2, 5]);\n\t\tassert_eq!(m.indices_of(\"option\").unwrap().collect::<Vec<_>>(), &[7]);\n}\n\n#[test]\nfn indices_mult_flags_opt_combined_eq() {\n\tlet m = App::new(\"ind\")\n\t\t.arg(Arg::with_name(\"exclude\")\n\t\t\t.short(\"e\")\n\t\t\t.multiple(true))\n\t\t.arg(Arg::with_name(\"include\")\n\t\t\t.short(\"i\")\n\t\t\t.multiple(true))\n\t\t.arg(Arg::with_name(\"option\")\n\t\t\t.short(\"0\")\n\t\t\t.takes_value(true))\n\t\t.get_matches_from(vec![\"ind\", \"-eieeio=val\"]);\n\n\t\tassert_eq!(m.indices_of(\"exclude\").unwrap().collect::<Vec<_>>(), &[1, 3, 4]);\n\t\tassert_eq!(m.indices_of(\"include\").unwrap().collect::<Vec<_>>(), &[2, 5]);\n\t\tassert_eq!(m.indices_of(\"option\").unwrap().collect::<Vec<_>>(), &[7]);\n}\n\n#[test]\nfn indices_mult_opt_value_delim_eq() {\n let m = App::new(\"myapp\")\n\t .arg(Arg::with_name(\"option\")\n\t\t .short(\"o\")\n\t\t .takes_value(true)\n\t\t .multiple(true))\n\t .get_matches_from(vec![\"myapp\", \"-o=val1,val2,val3\"]);\n assert_eq!(m.indices_of(\"option\").unwrap().collect::<Vec<_>>(), &[2, 3, 4]);\n}\n\n#[test]\nfn indices_mult_opt_mult_flag() {\n let m = App::new(\"myapp\")\n .arg(Arg::with_name(\"option\")\n\t .short(\"o\")\n\t .takes_value(true)\n\t .multiple(true))\n .arg(Arg::with_name(\"flag\")\n\t .short(\"f\")\n\t .multiple(true))\n .get_matches_from(vec![\"myapp\", \"-o\", \"val1\", \"-f\", \"-o\", \"val2\", \"-f\"]);\n\n assert_eq!(m.indices_of(\"option\").unwrap().collect::<Vec<_>>(), &[2, 5]);\n assert_eq!(m.indices_of(\"flag\").unwrap().collect::<Vec<_>>(), &[3, 6]);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test case for #1894<commit_after>fn main() {\n \/\/ Make sure closing over can be a last use\n let q = ~10;\n let addr = ptr::addr_of(*q);\n let f = fn@() -> *int { ptr::addr_of(*q) };\n assert addr == f();\n\n \/\/ But only when it really is the last use\n let q = ~20;\n let f = fn@() -> *int { ptr::addr_of(*q) };\n assert ptr::addr_of(*q) != f();\n\n \/\/ Ensure function arguments and box arguments interact sanely.\n fn call_me(x: fn() -> int, y: ~int) { assert x() == *y; }\n let q = ~30;\n call_me({|| *q}, q);\n\n \/\/ Check that no false positives are found in loops.\n let q = ~40, p = 10;\n while true {\n let i = q;\n p += *i;\n if p > 100 { break; }\n }\n\n \/\/ Verify that blocks can't interfere with each other.\n fn two_blocks(a: fn(), b: fn()) { a(); b(); a(); b(); }\n let q = ~50;\n two_blocks({|| let a = q; assert *a == 50;},\n {|| let a = q; assert *a == 50;});\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>bench: Add threadring shootout benchmark<commit_after>\/\/ Based on threadring.erlang by Jira Isa\nuse std;\n\nconst n_threads: int = 503;\n\nfn start(+token: int) {\n import iter::*;\n\n let p = comm::port();\n let ch = iter::foldl(bind int::range(2, n_threads + 1, _),\n comm::chan(p)) { |ch, i|\n \/\/ FIXME: Some twiddling because we don't have a standard\n \/\/ reverse range function yet\n let id = n_threads + 2 - i;\n let {to_child, _} = task::spawn_connected::<int, int> {|p, _ch|\n roundtrip(id, p, ch)\n };\n to_child\n };\n comm::send(ch, token);\n roundtrip(1, p, ch);\n}\n\nfn roundtrip(id: int, p: comm::port<int>, ch: comm::chan<int>) {\n while (true) {\n alt comm::recv(p) {\n 1 {\n std::io::println(#fmt(\"%d\\n\", id));\n ret;\n }\n token {\n #debug(\"%d %d\", id, token);\n comm::send(ch, token - 1);\n if token <= n_threads {\n ret;\n }\n }\n }\n }\n}\n\nfn main(args: [str]) {\n let token = if vec::len(args) < 2u {\n 1000\n } else {\n int::from_str(args[1])\n };\n\n start(token);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Stopped implementing instructions and instead took some notes on the instruction format, to decode similar data quickly on the fly.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Store Binary Opcode<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>f opcode match<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::place_ext::PlaceExt;\nuse dataflow::indexes::BorrowIndex;\nuse rustc::mir::traversal;\nuse rustc::mir::visit::{PlaceContext, Visitor};\nuse rustc::mir::{self, Location, Mir, Place};\nuse rustc::ty::{Region, TyCtxt};\nuse rustc::util::nodemap::{FxHashMap, FxHashSet};\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse std::fmt;\nuse std::hash::Hash;\nuse std::ops::Index;\n\ncrate struct BorrowSet<'tcx> {\n \/\/\/ The fundamental map relating bitvector indexes to the borrows\n \/\/\/ in the MIR.\n crate borrows: IndexVec<BorrowIndex, BorrowData<'tcx>>,\n\n \/\/\/ Each borrow is also uniquely identified in the MIR by the\n \/\/\/ `Location` of the assignment statement in which it appears on\n \/\/\/ the right hand side; we map each such location to the\n \/\/\/ corresponding `BorrowIndex`.\n crate location_map: FxHashMap<Location, BorrowIndex>,\n\n \/\/\/ Locations which activate borrows.\n \/\/\/ NOTE: A given location may activate more than one borrow in the future\n \/\/\/ when more general two-phase borrow support is introduced, but for now we\n \/\/\/ only need to store one borrow index\n crate activation_map: FxHashMap<Location, Vec<BorrowIndex>>,\n\n \/\/\/ Every borrow has a region; this maps each such regions back to\n \/\/\/ its borrow-indexes.\n crate region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,\n\n \/\/\/ Map from local to all the borrows on that local\n crate local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,\n}\n\nimpl<'tcx> Index<BorrowIndex> for BorrowSet<'tcx> {\n type Output = BorrowData<'tcx>;\n\n fn index(&self, index: BorrowIndex) -> &BorrowData<'tcx> {\n &self.borrows[index]\n }\n}\n\n\/\/\/ Location where a two phase borrow is activated, if a borrow\n\/\/\/ is in fact a two phase borrow.\n#[derive(Copy, Clone, PartialEq, Eq, Debug)]\ncrate enum TwoPhaseActivation {\n NotTwoPhase,\n NotActivated,\n ActivatedAt(Location),\n}\n\n#[derive(Debug)]\ncrate struct BorrowData<'tcx> {\n \/\/\/ Location where the borrow reservation starts.\n \/\/\/ In many cases, this will be equal to the activation location but not always.\n crate reserve_location: Location,\n \/\/\/ Location where the borrow is activated.\n crate activation_location: TwoPhaseActivation,\n \/\/\/ What kind of borrow this is\n crate kind: mir::BorrowKind,\n \/\/\/ The region for which this borrow is live\n crate region: Region<'tcx>,\n \/\/\/ Place from which we are borrowing\n crate borrowed_place: mir::Place<'tcx>,\n \/\/\/ Place to which the borrow was stored\n crate assigned_place: mir::Place<'tcx>,\n}\n\nimpl<'tcx> fmt::Display for BorrowData<'tcx> {\n fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {\n let kind = match self.kind {\n mir::BorrowKind::Shared => \"\",\n mir::BorrowKind::Unique => \"uniq \",\n mir::BorrowKind::Mut { .. } => \"mut \",\n };\n let region = format!(\"{}\", self.region);\n let region = if region.len() > 0 {\n format!(\"{} \", region)\n } else {\n region\n };\n write!(w, \"&{}{}{:?}\", region, kind, self.borrowed_place)\n }\n}\n\nimpl<'tcx> BorrowSet<'tcx> {\n pub fn build(tcx: TyCtxt<'_, '_, 'tcx>, mir: &Mir<'tcx>) -> Self {\n let mut visitor = GatherBorrows {\n tcx,\n mir,\n idx_vec: IndexVec::new(),\n location_map: FxHashMap(),\n activation_map: FxHashMap(),\n region_map: FxHashMap(),\n local_map: FxHashMap(),\n pending_activations: FxHashMap(),\n };\n\n for (block, block_data) in traversal::preorder(mir) {\n visitor.visit_basic_block_data(block, block_data);\n }\n\n BorrowSet {\n borrows: visitor.idx_vec,\n location_map: visitor.location_map,\n activation_map: visitor.activation_map,\n region_map: visitor.region_map,\n local_map: visitor.local_map,\n }\n }\n\n crate fn activations_at_location(&self, location: Location) -> &[BorrowIndex] {\n self.activation_map\n .get(&location)\n .map(|activations| &activations[..])\n .unwrap_or(&[])\n }\n}\n\nstruct GatherBorrows<'a, 'gcx: 'tcx, 'tcx: 'a> {\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,\n location_map: FxHashMap<Location, BorrowIndex>,\n activation_map: FxHashMap<Location, Vec<BorrowIndex>>,\n region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,\n local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,\n\n \/\/\/ When we encounter a 2-phase borrow statement, it will always\n \/\/\/ be assigning into a temporary TEMP:\n \/\/\/\n \/\/\/ TEMP = &foo\n \/\/\/\n \/\/\/ We add TEMP into this map with `b`, where `b` is the index of\n \/\/\/ the borrow. When we find a later use of this activation, we\n \/\/\/ remove from the map (and add to the \"tombstone\" set below).\n pending_activations: FxHashMap<mir::Local, BorrowIndex>,\n}\n\nimpl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {\n fn visit_assign(\n &mut self,\n block: mir::BasicBlock,\n assigned_place: &mir::Place<'tcx>,\n rvalue: &mir::Rvalue<'tcx>,\n location: mir::Location,\n ) {\n if let mir::Rvalue::Ref(region, kind, ref borrowed_place) = *rvalue {\n if borrowed_place.is_unsafe_place(self.tcx, self.mir) {\n return;\n }\n\n let borrow = BorrowData {\n kind,\n region,\n reserve_location: location,\n activation_location: TwoPhaseActivation::NotTwoPhase,\n borrowed_place: borrowed_place.clone(),\n assigned_place: assigned_place.clone(),\n };\n let idx = self.idx_vec.push(borrow);\n self.location_map.insert(location, idx);\n\n self.insert_as_pending_if_two_phase(location, &assigned_place, region, kind, idx);\n\n insert(&mut self.region_map, ®ion, idx);\n if let Some(local) = borrowed_place.root_local() {\n insert(&mut self.local_map, &local, idx);\n }\n }\n\n return self.super_assign(block, assigned_place, rvalue, location);\n\n fn insert<'a, K, V>(map: &'a mut FxHashMap<K, FxHashSet<V>>, k: &K, v: V)\n where\n K: Clone + Eq + Hash,\n V: Eq + Hash,\n {\n map.entry(k.clone()).or_insert(FxHashSet()).insert(v);\n }\n }\n\n fn visit_place(\n &mut self,\n place: &mir::Place<'tcx>,\n context: PlaceContext<'tcx>,\n location: Location,\n ) {\n self.super_place(place, context, location);\n\n \/\/ We found a use of some temporary TEMP...\n if let Place::Local(temp) = place {\n \/\/ ... check whether we (earlier) saw a 2-phase borrow like\n \/\/\n \/\/ TMP = &mut place\n match self.pending_activations.get(temp) {\n Some(&borrow_index) => {\n let borrow_data = &mut self.idx_vec[borrow_index];\n\n \/\/ Watch out: the use of TMP in the borrow itself\n \/\/ doesn't count as an activation. =)\n if borrow_data.reserve_location == location && context == PlaceContext::Store {\n return;\n }\n\n if let TwoPhaseActivation::ActivatedAt(other_location) =\n borrow_data.activation_location {\n span_bug!(\n self.mir.source_info(location).span,\n \"found two uses for 2-phase borrow temporary {:?}: \\\n {:?} and {:?}\",\n temp,\n location,\n other_location,\n );\n }\n\n \/\/ Otherwise, this is the unique later use\n \/\/ that we expect.\n borrow_data.activation_location = match context {\n \/\/ The use of TMP in a shared borrow does not\n \/\/ count as an actual activation.\n PlaceContext::Borrow { kind: mir::BorrowKind::Shared, .. } => {\n TwoPhaseActivation::NotActivated\n }\n _ => {\n \/\/ Double check: We should have found an activation for every pending\n \/\/ activation.\n assert_eq!(\n borrow_data.activation_location,\n TwoPhaseActivation::NotActivated,\n \"never found an activation for this borrow!\",\n );\n\n self.activation_map\n .entry(location)\n .or_insert(Vec::new())\n .push(borrow_index);\n TwoPhaseActivation::ActivatedAt(location)\n }\n };\n }\n\n None => {}\n }\n }\n }\n\n fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: mir::Location) {\n if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {\n \/\/ double-check that we already registered a BorrowData for this\n\n let borrow_index = self.location_map[&location];\n let borrow_data = &self.idx_vec[borrow_index];\n assert_eq!(borrow_data.reserve_location, location);\n assert_eq!(borrow_data.kind, kind);\n assert_eq!(borrow_data.region, region);\n assert_eq!(borrow_data.borrowed_place, *place);\n }\n\n return self.super_rvalue(rvalue, location);\n }\n\n fn visit_statement(\n &mut self,\n block: mir::BasicBlock,\n statement: &mir::Statement<'tcx>,\n location: Location,\n ) {\n return self.super_statement(block, statement, location);\n }\n}\n\nimpl<'a, 'gcx, 'tcx> GatherBorrows<'a, 'gcx, 'tcx> {\n \/\/\/ Returns true if the borrow represented by `kind` is\n \/\/\/ allowed to be split into separate Reservation and\n \/\/\/ Activation phases.\n fn allow_two_phase_borrow(&self, kind: mir::BorrowKind) -> bool {\n self.tcx.two_phase_borrows()\n && (kind.allows_two_phase_borrow()\n || self.tcx.sess.opts.debugging_opts.two_phase_beyond_autoref)\n }\n\n \/\/\/ If this is a two-phase borrow, then we will record it\n \/\/\/ as \"pending\" until we find the activating use.\n fn insert_as_pending_if_two_phase(\n &mut self,\n start_location: Location,\n assigned_place: &mir::Place<'tcx>,\n region: Region<'tcx>,\n kind: mir::BorrowKind,\n borrow_index: BorrowIndex,\n ) {\n debug!(\n \"Borrows::insert_as_pending_if_two_phase({:?}, {:?}, {:?}, {:?})\",\n start_location, assigned_place, region, borrow_index,\n );\n\n if !self.allow_two_phase_borrow(kind) {\n debug!(\" -> {:?}\", start_location);\n return;\n }\n\n \/\/ When we encounter a 2-phase borrow statement, it will always\n \/\/ be assigning into a temporary TEMP:\n \/\/\n \/\/ TEMP = &foo\n \/\/\n \/\/ so extract `temp`.\n let temp = if let &mir::Place::Local(temp) = assigned_place {\n temp\n } else {\n span_bug!(\n self.mir.source_info(start_location).span,\n \"expected 2-phase borrow to assign to a local, not `{:?}`\",\n assigned_place,\n );\n };\n\n \/\/ Consider the borrow not activated.\n let borrow_data = &mut self.idx_vec[borrow_index];\n borrow_data.activation_location = TwoPhaseActivation::NotActivated;\n\n \/\/ Insert `temp` into the list of pending activations. From\n \/\/ now on, we'll be on the lookout for a use of it. Note that\n \/\/ we are guaranteed that this use will come after the\n \/\/ assignment.\n let old_value = self.pending_activations.insert(temp, borrow_index);\n assert!(old_value.is_none());\n }\n}\n<commit_msg>Improve comments.<commit_after>\/\/ Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::place_ext::PlaceExt;\nuse dataflow::indexes::BorrowIndex;\nuse rustc::mir::traversal;\nuse rustc::mir::visit::{PlaceContext, Visitor};\nuse rustc::mir::{self, Location, Mir, Place};\nuse rustc::ty::{Region, TyCtxt};\nuse rustc::util::nodemap::{FxHashMap, FxHashSet};\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse std::fmt;\nuse std::hash::Hash;\nuse std::ops::Index;\n\ncrate struct BorrowSet<'tcx> {\n \/\/\/ The fundamental map relating bitvector indexes to the borrows\n \/\/\/ in the MIR.\n crate borrows: IndexVec<BorrowIndex, BorrowData<'tcx>>,\n\n \/\/\/ Each borrow is also uniquely identified in the MIR by the\n \/\/\/ `Location` of the assignment statement in which it appears on\n \/\/\/ the right hand side; we map each such location to the\n \/\/\/ corresponding `BorrowIndex`.\n crate location_map: FxHashMap<Location, BorrowIndex>,\n\n \/\/\/ Locations which activate borrows.\n \/\/\/ NOTE: A given location may activate more than one borrow in the future\n \/\/\/ when more general two-phase borrow support is introduced, but for now we\n \/\/\/ only need to store one borrow index\n crate activation_map: FxHashMap<Location, Vec<BorrowIndex>>,\n\n \/\/\/ Every borrow has a region; this maps each such regions back to\n \/\/\/ its borrow-indexes.\n crate region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,\n\n \/\/\/ Map from local to all the borrows on that local\n crate local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,\n}\n\nimpl<'tcx> Index<BorrowIndex> for BorrowSet<'tcx> {\n type Output = BorrowData<'tcx>;\n\n fn index(&self, index: BorrowIndex) -> &BorrowData<'tcx> {\n &self.borrows[index]\n }\n}\n\n\/\/\/ Location where a two phase borrow is activated, if a borrow\n\/\/\/ is in fact a two phase borrow.\n#[derive(Copy, Clone, PartialEq, Eq, Debug)]\ncrate enum TwoPhaseActivation {\n NotTwoPhase,\n NotActivated,\n ActivatedAt(Location),\n}\n\n#[derive(Debug)]\ncrate struct BorrowData<'tcx> {\n \/\/\/ Location where the borrow reservation starts.\n \/\/\/ In many cases, this will be equal to the activation location but not always.\n crate reserve_location: Location,\n \/\/\/ Location where the borrow is activated.\n crate activation_location: TwoPhaseActivation,\n \/\/\/ What kind of borrow this is\n crate kind: mir::BorrowKind,\n \/\/\/ The region for which this borrow is live\n crate region: Region<'tcx>,\n \/\/\/ Place from which we are borrowing\n crate borrowed_place: mir::Place<'tcx>,\n \/\/\/ Place to which the borrow was stored\n crate assigned_place: mir::Place<'tcx>,\n}\n\nimpl<'tcx> fmt::Display for BorrowData<'tcx> {\n fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {\n let kind = match self.kind {\n mir::BorrowKind::Shared => \"\",\n mir::BorrowKind::Unique => \"uniq \",\n mir::BorrowKind::Mut { .. } => \"mut \",\n };\n let region = format!(\"{}\", self.region);\n let region = if region.len() > 0 {\n format!(\"{} \", region)\n } else {\n region\n };\n write!(w, \"&{}{}{:?}\", region, kind, self.borrowed_place)\n }\n}\n\nimpl<'tcx> BorrowSet<'tcx> {\n pub fn build(tcx: TyCtxt<'_, '_, 'tcx>, mir: &Mir<'tcx>) -> Self {\n let mut visitor = GatherBorrows {\n tcx,\n mir,\n idx_vec: IndexVec::new(),\n location_map: FxHashMap(),\n activation_map: FxHashMap(),\n region_map: FxHashMap(),\n local_map: FxHashMap(),\n pending_activations: FxHashMap(),\n };\n\n for (block, block_data) in traversal::preorder(mir) {\n visitor.visit_basic_block_data(block, block_data);\n }\n\n BorrowSet {\n borrows: visitor.idx_vec,\n location_map: visitor.location_map,\n activation_map: visitor.activation_map,\n region_map: visitor.region_map,\n local_map: visitor.local_map,\n }\n }\n\n crate fn activations_at_location(&self, location: Location) -> &[BorrowIndex] {\n self.activation_map\n .get(&location)\n .map(|activations| &activations[..])\n .unwrap_or(&[])\n }\n}\n\nstruct GatherBorrows<'a, 'gcx: 'tcx, 'tcx: 'a> {\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,\n location_map: FxHashMap<Location, BorrowIndex>,\n activation_map: FxHashMap<Location, Vec<BorrowIndex>>,\n region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,\n local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,\n\n \/\/\/ When we encounter a 2-phase borrow statement, it will always\n \/\/\/ be assigning into a temporary TEMP:\n \/\/\/\n \/\/\/ TEMP = &foo\n \/\/\/\n \/\/\/ We add TEMP into this map with `b`, where `b` is the index of\n \/\/\/ the borrow. When we find a later use of this activation, we\n \/\/\/ remove from the map (and add to the \"tombstone\" set below).\n pending_activations: FxHashMap<mir::Local, BorrowIndex>,\n}\n\nimpl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {\n fn visit_assign(\n &mut self,\n block: mir::BasicBlock,\n assigned_place: &mir::Place<'tcx>,\n rvalue: &mir::Rvalue<'tcx>,\n location: mir::Location,\n ) {\n if let mir::Rvalue::Ref(region, kind, ref borrowed_place) = *rvalue {\n if borrowed_place.is_unsafe_place(self.tcx, self.mir) {\n return;\n }\n\n let borrow = BorrowData {\n kind,\n region,\n reserve_location: location,\n activation_location: TwoPhaseActivation::NotTwoPhase,\n borrowed_place: borrowed_place.clone(),\n assigned_place: assigned_place.clone(),\n };\n let idx = self.idx_vec.push(borrow);\n self.location_map.insert(location, idx);\n\n self.insert_as_pending_if_two_phase(location, &assigned_place, region, kind, idx);\n\n insert(&mut self.region_map, ®ion, idx);\n if let Some(local) = borrowed_place.root_local() {\n insert(&mut self.local_map, &local, idx);\n }\n }\n\n return self.super_assign(block, assigned_place, rvalue, location);\n\n fn insert<'a, K, V>(map: &'a mut FxHashMap<K, FxHashSet<V>>, k: &K, v: V)\n where\n K: Clone + Eq + Hash,\n V: Eq + Hash,\n {\n map.entry(k.clone()).or_insert(FxHashSet()).insert(v);\n }\n }\n\n fn visit_place(\n &mut self,\n place: &mir::Place<'tcx>,\n context: PlaceContext<'tcx>,\n location: Location,\n ) {\n self.super_place(place, context, location);\n\n \/\/ We found a use of some temporary TEMP...\n if let Place::Local(temp) = place {\n \/\/ ... check whether we (earlier) saw a 2-phase borrow like\n \/\/\n \/\/ TMP = &mut place\n match self.pending_activations.get(temp) {\n Some(&borrow_index) => {\n let borrow_data = &mut self.idx_vec[borrow_index];\n\n \/\/ Watch out: the use of TMP in the borrow itself\n \/\/ doesn't count as an activation. =)\n if borrow_data.reserve_location == location && context == PlaceContext::Store {\n return;\n }\n\n if let TwoPhaseActivation::ActivatedAt(other_location) =\n borrow_data.activation_location {\n span_bug!(\n self.mir.source_info(location).span,\n \"found two uses for 2-phase borrow temporary {:?}: \\\n {:?} and {:?}\",\n temp,\n location,\n other_location,\n );\n }\n\n \/\/ Otherwise, this is the unique later use\n \/\/ that we expect.\n borrow_data.activation_location = match context {\n \/\/ The use of TMP in a shared borrow does not\n \/\/ count as an actual activation.\n PlaceContext::Borrow { kind: mir::BorrowKind::Shared, .. } => {\n TwoPhaseActivation::NotActivated\n }\n _ => {\n \/\/ Double check: This borrow is indeed a two-phase borrow (that is,\n \/\/ we are 'transitioning' from `NotActivated` to `ActivatedAt`) and\n \/\/ we've not found any other activations (checked above).\n assert_eq!(\n borrow_data.activation_location,\n TwoPhaseActivation::NotActivated,\n \"never found an activation for this borrow!\",\n );\n\n self.activation_map\n .entry(location)\n .or_insert(Vec::new())\n .push(borrow_index);\n TwoPhaseActivation::ActivatedAt(location)\n }\n };\n }\n\n None => {}\n }\n }\n }\n\n fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: mir::Location) {\n if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {\n \/\/ double-check that we already registered a BorrowData for this\n\n let borrow_index = self.location_map[&location];\n let borrow_data = &self.idx_vec[borrow_index];\n assert_eq!(borrow_data.reserve_location, location);\n assert_eq!(borrow_data.kind, kind);\n assert_eq!(borrow_data.region, region);\n assert_eq!(borrow_data.borrowed_place, *place);\n }\n\n return self.super_rvalue(rvalue, location);\n }\n\n fn visit_statement(\n &mut self,\n block: mir::BasicBlock,\n statement: &mir::Statement<'tcx>,\n location: Location,\n ) {\n return self.super_statement(block, statement, location);\n }\n}\n\nimpl<'a, 'gcx, 'tcx> GatherBorrows<'a, 'gcx, 'tcx> {\n \/\/\/ Returns true if the borrow represented by `kind` is\n \/\/\/ allowed to be split into separate Reservation and\n \/\/\/ Activation phases.\n fn allow_two_phase_borrow(&self, kind: mir::BorrowKind) -> bool {\n self.tcx.two_phase_borrows()\n && (kind.allows_two_phase_borrow()\n || self.tcx.sess.opts.debugging_opts.two_phase_beyond_autoref)\n }\n\n \/\/\/ If this is a two-phase borrow, then we will record it\n \/\/\/ as \"pending\" until we find the activating use.\n fn insert_as_pending_if_two_phase(\n &mut self,\n start_location: Location,\n assigned_place: &mir::Place<'tcx>,\n region: Region<'tcx>,\n kind: mir::BorrowKind,\n borrow_index: BorrowIndex,\n ) {\n debug!(\n \"Borrows::insert_as_pending_if_two_phase({:?}, {:?}, {:?}, {:?})\",\n start_location, assigned_place, region, borrow_index,\n );\n\n if !self.allow_two_phase_borrow(kind) {\n debug!(\" -> {:?}\", start_location);\n return;\n }\n\n \/\/ When we encounter a 2-phase borrow statement, it will always\n \/\/ be assigning into a temporary TEMP:\n \/\/\n \/\/ TEMP = &foo\n \/\/\n \/\/ so extract `temp`.\n let temp = if let &mir::Place::Local(temp) = assigned_place {\n temp\n } else {\n span_bug!(\n self.mir.source_info(start_location).span,\n \"expected 2-phase borrow to assign to a local, not `{:?}`\",\n assigned_place,\n );\n };\n\n \/\/ Consider the borrow not activated to start. When we find an activation, we'll update\n \/\/ this field.\n let borrow_data = &mut self.idx_vec[borrow_index];\n borrow_data.activation_location = TwoPhaseActivation::NotActivated;\n\n \/\/ Insert `temp` into the list of pending activations. From\n \/\/ now on, we'll be on the lookout for a use of it. Note that\n \/\/ we are guaranteed that this use will come after the\n \/\/ assignment.\n let old_value = self.pending_activations.insert(temp, borrow_index);\n assert!(old_value.is_none());\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse spec::{LinkerFlavor, PanicStrategy, Target, TargetOptions, TargetResult};\n\npub fn target() -> TargetResult {\n Ok(Target {\n llvm_target: \"msp430-none-elf\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"16\".to_string(),\n target_c_int_width: \"16\".to_string(),\n data_layout: \"e-m:e-p:16:16-i32:16-i64:16-f32:16-f64:16-a:8-n8:16-S16\".to_string(),\n arch: \"msp430\".to_string(),\n target_os: \"none\".to_string(),\n target_env: String::new(),\n target_vendor: String::new(),\n linker_flavor: LinkerFlavor::Gcc,\n\n options: TargetOptions {\n executables: true,\n\n \/\/ The LLVM backend currently can't generate object files. To\n \/\/ workaround this LLVM generates assembly files which then we feed\n \/\/ to gcc to get object files. For this reason we have a hard\n \/\/ dependency on this specific gcc.\n asm_args: vec![\"-mcpu=msp430\".to_string()],\n linker: Some(\"msp430-elf-gcc\".to_string()),\n no_integrated_as: true,\n\n \/\/ There are no atomic CAS instructions available in the MSP430\n \/\/ instruction set\n max_atomic_width: Some(16),\n atomic_cas: false,\n\n \/\/ Because these devices have very little resources having an\n \/\/ unwinder is too onerous so we default to \"abort\" because the\n \/\/ \"unwind\" strategy is very rare.\n panic_strategy: PanicStrategy::Abort,\n\n \/\/ Similarly, one almost always never wants to use relocatable\n \/\/ code because of the extra costs it involves.\n relocation_model: \"static\".to_string(),\n\n \/\/ Right now we invoke an external assembler and this isn't\n \/\/ compatible with multiple codegen units, and plus we probably\n \/\/ don't want to invoke that many gcc instances.\n default_codegen_units: Some(1),\n\n \/\/ Since MSP430 doesn't meaningfully support faulting on illegal\n \/\/ instructions, LLVM generates a call to abort() function instead\n \/\/ of a trap instruction. Such calls are 4 bytes long, and that is\n \/\/ too much overhead for such small target.\n trap_unreachable: false,\n\n \/\/ See the thumb_base.rs file for an explanation of this value\n emit_debug_gdb_scripts: false,\n\n .. Default::default( )\n }\n })\n}\n<commit_msg>msp430: remove the whole Atomic* API<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse spec::{LinkerFlavor, PanicStrategy, Target, TargetOptions, TargetResult};\n\npub fn target() -> TargetResult {\n Ok(Target {\n llvm_target: \"msp430-none-elf\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"16\".to_string(),\n target_c_int_width: \"16\".to_string(),\n data_layout: \"e-m:e-p:16:16-i32:16-i64:16-f32:16-f64:16-a:8-n8:16-S16\".to_string(),\n arch: \"msp430\".to_string(),\n target_os: \"none\".to_string(),\n target_env: String::new(),\n target_vendor: String::new(),\n linker_flavor: LinkerFlavor::Gcc,\n\n options: TargetOptions {\n executables: true,\n\n \/\/ The LLVM backend currently can't generate object files. To\n \/\/ workaround this LLVM generates assembly files which then we feed\n \/\/ to gcc to get object files. For this reason we have a hard\n \/\/ dependency on this specific gcc.\n asm_args: vec![\"-mcpu=msp430\".to_string()],\n linker: Some(\"msp430-elf-gcc\".to_string()),\n no_integrated_as: true,\n\n \/\/ There are no atomic CAS instructions available in the MSP430\n \/\/ instruction set, and the LLVM backend doesn't currently support\n \/\/ compiler fences so the Atomic* API is missing on this target.\n \/\/ When the LLVM backend gains support for compile fences uncomment\n \/\/ the `singlethread: true` line and set `max_atomic_width` to\n \/\/ `Some(16)`.\n max_atomic_width: Some(0),\n atomic_cas: false,\n \/\/ singlethread: true,\n\n \/\/ Because these devices have very little resources having an\n \/\/ unwinder is too onerous so we default to \"abort\" because the\n \/\/ \"unwind\" strategy is very rare.\n panic_strategy: PanicStrategy::Abort,\n\n \/\/ Similarly, one almost always never wants to use relocatable\n \/\/ code because of the extra costs it involves.\n relocation_model: \"static\".to_string(),\n\n \/\/ Right now we invoke an external assembler and this isn't\n \/\/ compatible with multiple codegen units, and plus we probably\n \/\/ don't want to invoke that many gcc instances.\n default_codegen_units: Some(1),\n\n \/\/ Since MSP430 doesn't meaningfully support faulting on illegal\n \/\/ instructions, LLVM generates a call to abort() function instead\n \/\/ of a trap instruction. Such calls are 4 bytes long, and that is\n \/\/ too much overhead for such small target.\n trap_unreachable: false,\n\n \/\/ See the thumb_base.rs file for an explanation of this value\n emit_debug_gdb_scripts: false,\n\n .. Default::default( )\n }\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add examples folder<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add example that calculates the hash of provided files<commit_after>extern crate hash;\n\nuse std::env;\nuse std::hash::Hasher;\nuse std::fs::File;\nuse std::io::{BufRead,BufReader};\nuse hash::XxHash;\n\nfn main() {\n for arg in env::args().skip(1) {\n let f = File::open(&arg).unwrap();\n let mut f = BufReader::new(f);\n\n let mut hasher = XxHash::with_seed(0);\n\n loop {\n let consumed = {\n let bytes = f.fill_buf().unwrap();\n if bytes.len() == 0 { break }\n hasher.write(bytes);\n bytes.len()\n };\n f.consume(consumed);\n }\n\n println!(\"{:16x} {}\", hasher.finish(), arg);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Documentation of album<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue #13214<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ defining static with struct that contains enum\n\/\/ with &'static str variant used to cause ICE\n\npub enum Foo {\n Bar,\n Baz(&'static str),\n}\n\npub static TEST: Test = Test {\n foo: Bar,\n c: 'a'\n};\n\npub struct Test {\n foo: Foo,\n c: char,\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for issue #18412<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(tuple_indexing)]\n\n\/\/ Test that non-static methods can be assigned to local variables as\n\/\/ function pointers.\n\ntrait Foo {\n fn foo(&self) -> uint;\n}\n\nstruct A(uint);\n\nimpl A {\n fn bar(&self) -> uint { self.0 }\n}\n\nimpl Foo for A {\n fn foo(&self) -> uint { self.bar() }\n}\n\nfn main() {\n let f = A::bar;\n let g = Foo::foo;\n let a = A(42);\n\n assert_eq!(f(&a), g(&a));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #24236 - aturon:issue-19097, r=alexcrichton<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ regression test for #19097\n\nstruct Foo<T>(T);\n\nimpl<'a, T> Foo<&'a T> {\n fn foo(&self) {}\n}\nimpl<'a, T> Foo<&'a mut T> {\n fn foo(&self) {}\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #23036<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::collections::HashMap;\nuse std::path::Path;\n\nfn main() {\n let mut map = HashMap::new();\n map.insert(Path::new(\"a\"), 0);\n map.get(Path::new(\"a\"));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Wrap external errors in Error type<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make CharacterAttributes::description optional<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! This module contains the machinery necessary to detect infinite loops\n\/\/! during const-evaluation by taking snapshots of the state of the interpreter\n\/\/! at regular intervals.\n\n\/\/ This lives in `interpret` because it needs access to all sots of private state. However,\n\/\/ it is not used by the general miri engine, just by CTFE.\n\nuse std::hash::{Hash, Hasher};\nuse std::mem;\n\nuse rustc::ich::{StableHashingContext, StableHashingContextProvider};\nuse rustc::mir;\nuse rustc::mir::interpret::{\n AllocId, Pointer, Scalar, ScalarMaybeUndef,\n Relocations, Allocation, UndefMask,\n EvalResult, EvalErrorKind,\n};\n\nuse rustc::ty::{self, TyCtxt};\nuse rustc::ty::layout::Align;\nuse rustc_data_structures::fx::FxHashSet;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};\nuse syntax::ast::Mutability;\nuse syntax::source_map::Span;\n\nuse super::eval_context::{LocalValue, StackPopCleanup};\nuse super::{Frame, Memory, Operand, MemPlace, Place, Value};\nuse const_eval::CompileTimeInterpreter;\n\n#[derive(Default)]\npub(crate) struct InfiniteLoopDetector<'a, 'mir, 'tcx: 'a + 'mir> {\n \/\/\/ The set of all `EvalSnapshot` *hashes* observed by this detector.\n \/\/\/\n \/\/\/ When a collision occurs in this table, we store the full snapshot in\n \/\/\/ `snapshots`.\n hashes: FxHashSet<u64>,\n\n \/\/\/ The set of all `EvalSnapshot`s observed by this detector.\n \/\/\/\n \/\/\/ An `EvalSnapshot` will only be fully cloned once it has caused a\n \/\/\/ collision in `hashes`. As a result, the detector must observe at least\n \/\/\/ *two* full cycles of an infinite loop before it triggers.\n snapshots: FxHashSet<EvalSnapshot<'a, 'mir, 'tcx>>,\n}\n\nimpl<'a, 'mir, 'tcx> InfiniteLoopDetector<'a, 'mir, 'tcx>\n{\n pub fn observe_and_analyze<'b>(\n &mut self,\n tcx: &TyCtxt<'b, 'tcx, 'tcx>,\n span: Span,\n memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,\n stack: &[Frame<'mir, 'tcx>],\n ) -> EvalResult<'tcx, ()> {\n \/\/ Compute stack's hash before copying anything\n let mut hcx = tcx.get_stable_hashing_context();\n let mut hasher = StableHasher::<u64>::new();\n stack.hash_stable(&mut hcx, &mut hasher);\n let hash = hasher.finish();\n\n \/\/ Check if we know that hash already\n if self.hashes.is_empty() {\n \/\/ FIXME(#49980): make this warning a lint\n tcx.sess.span_warn(span,\n \"Constant evaluating a complex constant, this might take some time\");\n }\n if self.hashes.insert(hash) {\n \/\/ No collision\n return Ok(())\n }\n\n \/\/ We need to make a full copy. NOW things that to get really expensive.\n info!(\"snapshotting the state of the interpreter\");\n\n if self.snapshots.insert(EvalSnapshot::new(memory, stack)) {\n \/\/ Spurious collision or first cycle\n return Ok(())\n }\n\n \/\/ Second cycle\n Err(EvalErrorKind::InfiniteLoop.into())\n }\n}\n\ntrait SnapshotContext<'a> {\n fn resolve(&'a self, id: &AllocId) -> Option<&'a Allocation>;\n}\n\n\/\/\/ Taking a snapshot of the evaluation context produces a view of\n\/\/\/ the state of the interpreter that is invariant to `AllocId`s.\ntrait Snapshot<'a, Ctx: SnapshotContext<'a>> {\n type Item;\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item;\n}\n\nmacro_rules! __impl_snapshot_field {\n ($field:ident, $ctx:expr) => ($field.snapshot($ctx));\n ($field:ident, $ctx:expr, $delegate:expr) => ($delegate);\n}\n\nmacro_rules! impl_snapshot_for {\n \/\/ FIXME(mark-i-m): Some of these should be `?` rather than `*`.\n (enum $enum_name:ident {\n $( $variant:ident $( ( $($field:ident $(-> $delegate:expr)*),* ) )* ),* $(,)*\n }) => {\n\n impl<'a, Ctx> self::Snapshot<'a, Ctx> for $enum_name\n where Ctx: self::SnapshotContext<'a>,\n {\n type Item = $enum_name<AllocIdSnapshot<'a>>;\n\n #[inline]\n fn snapshot(&self, __ctx: &'a Ctx) -> Self::Item {\n match *self {\n $(\n $enum_name::$variant $( ( $(ref $field),* ) )* =>\n $enum_name::$variant $(\n ( $( __impl_snapshot_field!($field, __ctx $(, $delegate)*) ),* ),\n )*\n )*\n }\n }\n }\n };\n\n \/\/ FIXME(mark-i-m): same here.\n (struct $struct_name:ident { $($field:ident $(-> $delegate:expr)*),* $(,)* }) => {\n impl<'a, Ctx> self::Snapshot<'a, Ctx> for $struct_name\n where Ctx: self::SnapshotContext<'a>,\n {\n type Item = $struct_name<AllocIdSnapshot<'a>>;\n\n #[inline]\n fn snapshot(&self, __ctx: &'a Ctx) -> Self::Item {\n let $struct_name {\n $(ref $field),*\n } = *self;\n\n $struct_name {\n $( $field: __impl_snapshot_field!($field, __ctx $(, $delegate)*) ),*\n }\n }\n }\n };\n}\n\nimpl<'a, Ctx, T> Snapshot<'a, Ctx> for Option<T>\n where Ctx: SnapshotContext<'a>,\n T: Snapshot<'a, Ctx>\n{\n type Item = Option<<T as Snapshot<'a, Ctx>>::Item>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Some(x) => Some(x.snapshot(ctx)),\n None => None,\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocIdSnapshot<'a>(Option<AllocationSnapshot<'a>>);\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for AllocId\n where Ctx: SnapshotContext<'a>,\n{\n type Item = AllocIdSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n AllocIdSnapshot(ctx.resolve(self).map(|alloc| alloc.snapshot(ctx)))\n }\n}\n\nimpl_snapshot_for!(struct Pointer {\n alloc_id,\n offset -> *offset, \/\/ just copy offset verbatim\n});\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Scalar\n where Ctx: SnapshotContext<'a>,\n{\n type Item = Scalar<AllocIdSnapshot<'a>>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Scalar::Ptr(p) => Scalar::Ptr(p.snapshot(ctx)),\n Scalar::Bits{ size, bits } => Scalar::Bits {\n size: *size,\n bits: *bits,\n },\n }\n }\n}\n\nimpl_snapshot_for!(enum ScalarMaybeUndef {\n Scalar(s),\n Undef,\n});\n\nimpl_stable_hash_for!(struct ::interpret::MemPlace {\n ptr,\n align,\n extra,\n});\nimpl_snapshot_for!(struct MemPlace {\n ptr,\n extra,\n align -> *align, \/\/ just copy alignment verbatim\n});\n\n\/\/ Can't use the macro here because that does not support named enum fields.\nimpl<'a> HashStable<StableHashingContext<'a>> for Place {\n fn hash_stable<W: StableHasherResult>(\n &self, hcx: &mut StableHashingContext<'a>,\n hasher: &mut StableHasher<W>)\n {\n mem::discriminant(self).hash_stable(hcx, hasher);\n match self {\n Place::Ptr(mem_place) => mem_place.hash_stable(hcx, hasher),\n\n Place::Local { frame, local } => {\n frame.hash_stable(hcx, hasher);\n local.hash_stable(hcx, hasher);\n },\n }\n }\n}\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Place\n where Ctx: SnapshotContext<'a>,\n{\n type Item = Place<AllocIdSnapshot<'a>>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Place::Ptr(p) => Place::Ptr(p.snapshot(ctx)),\n\n Place::Local{ frame, local } => Place::Local{\n frame: *frame,\n local: *local,\n },\n }\n }\n}\n\nimpl_stable_hash_for!(enum ::interpret::Value {\n Scalar(x),\n ScalarPair(x, y),\n});\nimpl_snapshot_for!(enum Value {\n Scalar(s),\n ScalarPair(s, t),\n});\n\nimpl_stable_hash_for!(enum ::interpret::Operand {\n Immediate(x),\n Indirect(x),\n});\nimpl_snapshot_for!(enum Operand {\n Immediate(v),\n Indirect(m),\n});\n\nimpl_stable_hash_for!(enum ::interpret::LocalValue {\n Dead,\n Live(x),\n});\nimpl_snapshot_for!(enum LocalValue {\n Live(v),\n Dead,\n});\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Relocations\n where Ctx: SnapshotContext<'a>,\n{\n type Item = Relocations<AllocIdSnapshot<'a>>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n Relocations::from_presorted(self.iter()\n .map(|(size, id)| (*size, id.snapshot(ctx)))\n .collect())\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocationSnapshot<'a> {\n bytes: &'a [u8],\n relocations: Relocations<AllocIdSnapshot<'a>>,\n undef_mask: &'a UndefMask,\n align: &'a Align,\n mutability: &'a Mutability,\n}\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation\n where Ctx: SnapshotContext<'a>,\n{\n type Item = AllocationSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Allocation { bytes, relocations, undef_mask, align, mutability } = self;\n\n AllocationSnapshot {\n bytes,\n undef_mask,\n align,\n mutability,\n relocations: relocations.snapshot(ctx),\n }\n }\n}\n\n\/\/ Can't use the macro here because that does not support named enum fields.\nimpl<'a> HashStable<StableHashingContext<'a>> for StackPopCleanup {\n fn hash_stable<W: StableHasherResult>(\n &self,\n hcx: &mut StableHashingContext<'a>,\n hasher: &mut StableHasher<W>)\n {\n mem::discriminant(self).hash_stable(hcx, hasher);\n match self {\n StackPopCleanup::Goto(ref block) => block.hash_stable(hcx, hasher),\n StackPopCleanup::None { cleanup } => cleanup.hash_stable(hcx, hasher),\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct FrameSnapshot<'a, 'tcx: 'a> {\n instance: &'a ty::Instance<'tcx>,\n span: &'a Span,\n return_to_block: &'a StackPopCleanup,\n return_place: Place<AllocIdSnapshot<'a>>,\n locals: IndexVec<mir::Local, LocalValue<AllocIdSnapshot<'a>>>,\n block: &'a mir::BasicBlock,\n stmt: usize,\n}\n\n\/\/ Not using the macro because that does not support types depending on 'tcx\nimpl<'a, 'mir, 'tcx: 'mir> HashStable<StableHashingContext<'a>> for Frame<'mir, 'tcx> {\n fn hash_stable<W: StableHasherResult>(\n &self,\n hcx: &mut StableHashingContext<'a>,\n hasher: &mut StableHasher<W>) {\n\n let Frame {\n mir,\n instance,\n span,\n return_to_block,\n return_place,\n locals,\n block,\n stmt,\n } = self;\n\n (mir, instance, span, return_to_block).hash_stable(hcx, hasher);\n (return_place, locals, block, stmt).hash_stable(hcx, hasher);\n }\n}\nimpl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>\n where Ctx: SnapshotContext<'a>,\n{\n type Item = FrameSnapshot<'a, 'tcx>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Frame {\n mir: _,\n instance,\n span,\n return_to_block,\n return_place,\n locals,\n block,\n stmt,\n } = self;\n\n FrameSnapshot {\n instance,\n span,\n return_to_block,\n block,\n stmt: *stmt,\n return_place: return_place.snapshot(ctx),\n locals: locals.iter().map(|local| local.snapshot(ctx)).collect(),\n }\n }\n}\n\nimpl<'a, 'b, 'mir, 'tcx: 'a+'mir> SnapshotContext<'b>\n for Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>\n{\n fn resolve(&'b self, id: &AllocId) -> Option<&'b Allocation> {\n self.get(*id).ok()\n }\n}\n\n\/\/\/ The virtual machine state during const-evaluation at a given point in time.\n\/\/\/ We assume the `CompileTimeInterpreter` has no interesting extra state that\n\/\/\/ is worth considering here.\nstruct EvalSnapshot<'a, 'mir, 'tcx: 'a + 'mir> {\n memory: Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,\n stack: Vec<Frame<'mir, 'tcx>>,\n}\n\nimpl<'a, 'mir, 'tcx: 'a + 'mir> EvalSnapshot<'a, 'mir, 'tcx>\n{\n fn new(\n memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,\n stack: &[Frame<'mir, 'tcx>]\n ) -> Self {\n EvalSnapshot {\n memory: memory.clone(),\n stack: stack.into(),\n }\n }\n\n \/\/ Used to compare two snapshots\n fn snapshot(&'b self)\n -> Vec<FrameSnapshot<'b, 'tcx>>\n {\n \/\/ Start with the stack, iterate and recursively snapshot\n self.stack.iter().map(|frame| frame.snapshot(&self.memory)).collect()\n }\n\n}\n\nimpl<'a, 'mir, 'tcx> Hash for EvalSnapshot<'a, 'mir, 'tcx>\n{\n fn hash<H: Hasher>(&self, state: &mut H) {\n \/\/ Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)\n let mut hcx = self.memory.tcx.get_stable_hashing_context();\n let mut hasher = StableHasher::<u64>::new();\n self.hash_stable(&mut hcx, &mut hasher);\n hasher.finish().hash(state)\n }\n}\n\n\/\/ Not using the macro because we need special handling for `memory`, which the macro\n\/\/ does not support at the same time as the extra bounds on the type.\nimpl<'a, 'b, 'mir, 'tcx> HashStable<StableHashingContext<'b>>\n for EvalSnapshot<'a, 'mir, 'tcx>\n{\n fn hash_stable<W: StableHasherResult>(\n &self,\n hcx: &mut StableHashingContext<'b>,\n hasher: &mut StableHasher<W>)\n {\n \/\/ Not hashing memory: Avoid hashing memory all the time during execution\n let EvalSnapshot{ memory: _, stack } = self;\n stack.hash_stable(hcx, hasher);\n }\n}\n\nimpl<'a, 'mir, 'tcx> Eq for EvalSnapshot<'a, 'mir, 'tcx>\n{}\n\nimpl<'a, 'mir, 'tcx> PartialEq for EvalSnapshot<'a, 'mir, 'tcx>\n{\n fn eq(&self, other: &Self) -> bool {\n \/\/ FIXME: This looks to be a *ridicolously expensive* comparison operation.\n \/\/ Doesn't this make tons of copies? Either `snapshot` is very badly named,\n \/\/ or it does!\n self.snapshot() == other.snapshot()\n }\n}\n<commit_msg>fix comment<commit_after>\/\/! This module contains the machinery necessary to detect infinite loops\n\/\/! during const-evaluation by taking snapshots of the state of the interpreter\n\/\/! at regular intervals.\n\n\/\/ This lives in `interpret` because it needs access to all sots of private state. However,\n\/\/ it is not used by the general miri engine, just by CTFE.\n\nuse std::hash::{Hash, Hasher};\nuse std::mem;\n\nuse rustc::ich::{StableHashingContext, StableHashingContextProvider};\nuse rustc::mir;\nuse rustc::mir::interpret::{\n AllocId, Pointer, Scalar, ScalarMaybeUndef,\n Relocations, Allocation, UndefMask,\n EvalResult, EvalErrorKind,\n};\n\nuse rustc::ty::{self, TyCtxt};\nuse rustc::ty::layout::Align;\nuse rustc_data_structures::fx::FxHashSet;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};\nuse syntax::ast::Mutability;\nuse syntax::source_map::Span;\n\nuse super::eval_context::{LocalValue, StackPopCleanup};\nuse super::{Frame, Memory, Operand, MemPlace, Place, Value};\nuse const_eval::CompileTimeInterpreter;\n\n#[derive(Default)]\npub(crate) struct InfiniteLoopDetector<'a, 'mir, 'tcx: 'a + 'mir> {\n \/\/\/ The set of all `EvalSnapshot` *hashes* observed by this detector.\n \/\/\/\n \/\/\/ When a collision occurs in this table, we store the full snapshot in\n \/\/\/ `snapshots`.\n hashes: FxHashSet<u64>,\n\n \/\/\/ The set of all `EvalSnapshot`s observed by this detector.\n \/\/\/\n \/\/\/ An `EvalSnapshot` will only be fully cloned once it has caused a\n \/\/\/ collision in `hashes`. As a result, the detector must observe at least\n \/\/\/ *two* full cycles of an infinite loop before it triggers.\n snapshots: FxHashSet<EvalSnapshot<'a, 'mir, 'tcx>>,\n}\n\nimpl<'a, 'mir, 'tcx> InfiniteLoopDetector<'a, 'mir, 'tcx>\n{\n pub fn observe_and_analyze<'b>(\n &mut self,\n tcx: &TyCtxt<'b, 'tcx, 'tcx>,\n span: Span,\n memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,\n stack: &[Frame<'mir, 'tcx>],\n ) -> EvalResult<'tcx, ()> {\n \/\/ Compute stack's hash before copying anything\n let mut hcx = tcx.get_stable_hashing_context();\n let mut hasher = StableHasher::<u64>::new();\n stack.hash_stable(&mut hcx, &mut hasher);\n let hash = hasher.finish();\n\n \/\/ Check if we know that hash already\n if self.hashes.is_empty() {\n \/\/ FIXME(#49980): make this warning a lint\n tcx.sess.span_warn(span,\n \"Constant evaluating a complex constant, this might take some time\");\n }\n if self.hashes.insert(hash) {\n \/\/ No collision\n return Ok(())\n }\n\n \/\/ We need to make a full copy. NOW things that to get really expensive.\n info!(\"snapshotting the state of the interpreter\");\n\n if self.snapshots.insert(EvalSnapshot::new(memory, stack)) {\n \/\/ Spurious collision or first cycle\n return Ok(())\n }\n\n \/\/ Second cycle\n Err(EvalErrorKind::InfiniteLoop.into())\n }\n}\n\ntrait SnapshotContext<'a> {\n fn resolve(&'a self, id: &AllocId) -> Option<&'a Allocation>;\n}\n\n\/\/\/ Taking a snapshot of the evaluation context produces a view of\n\/\/\/ the state of the interpreter that is invariant to `AllocId`s.\ntrait Snapshot<'a, Ctx: SnapshotContext<'a>> {\n type Item;\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item;\n}\n\nmacro_rules! __impl_snapshot_field {\n ($field:ident, $ctx:expr) => ($field.snapshot($ctx));\n ($field:ident, $ctx:expr, $delegate:expr) => ($delegate);\n}\n\nmacro_rules! impl_snapshot_for {\n \/\/ FIXME(mark-i-m): Some of these should be `?` rather than `*`.\n (enum $enum_name:ident {\n $( $variant:ident $( ( $($field:ident $(-> $delegate:expr)*),* ) )* ),* $(,)*\n }) => {\n\n impl<'a, Ctx> self::Snapshot<'a, Ctx> for $enum_name\n where Ctx: self::SnapshotContext<'a>,\n {\n type Item = $enum_name<AllocIdSnapshot<'a>>;\n\n #[inline]\n fn snapshot(&self, __ctx: &'a Ctx) -> Self::Item {\n match *self {\n $(\n $enum_name::$variant $( ( $(ref $field),* ) )* =>\n $enum_name::$variant $(\n ( $( __impl_snapshot_field!($field, __ctx $(, $delegate)*) ),* ),\n )*\n )*\n }\n }\n }\n };\n\n \/\/ FIXME(mark-i-m): same here.\n (struct $struct_name:ident { $($field:ident $(-> $delegate:expr)*),* $(,)* }) => {\n impl<'a, Ctx> self::Snapshot<'a, Ctx> for $struct_name\n where Ctx: self::SnapshotContext<'a>,\n {\n type Item = $struct_name<AllocIdSnapshot<'a>>;\n\n #[inline]\n fn snapshot(&self, __ctx: &'a Ctx) -> Self::Item {\n let $struct_name {\n $(ref $field),*\n } = *self;\n\n $struct_name {\n $( $field: __impl_snapshot_field!($field, __ctx $(, $delegate)*) ),*\n }\n }\n }\n };\n}\n\nimpl<'a, Ctx, T> Snapshot<'a, Ctx> for Option<T>\n where Ctx: SnapshotContext<'a>,\n T: Snapshot<'a, Ctx>\n{\n type Item = Option<<T as Snapshot<'a, Ctx>>::Item>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Some(x) => Some(x.snapshot(ctx)),\n None => None,\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocIdSnapshot<'a>(Option<AllocationSnapshot<'a>>);\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for AllocId\n where Ctx: SnapshotContext<'a>,\n{\n type Item = AllocIdSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n AllocIdSnapshot(ctx.resolve(self).map(|alloc| alloc.snapshot(ctx)))\n }\n}\n\nimpl_snapshot_for!(struct Pointer {\n alloc_id,\n offset -> *offset, \/\/ just copy offset verbatim\n});\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Scalar\n where Ctx: SnapshotContext<'a>,\n{\n type Item = Scalar<AllocIdSnapshot<'a>>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Scalar::Ptr(p) => Scalar::Ptr(p.snapshot(ctx)),\n Scalar::Bits{ size, bits } => Scalar::Bits {\n size: *size,\n bits: *bits,\n },\n }\n }\n}\n\nimpl_snapshot_for!(enum ScalarMaybeUndef {\n Scalar(s),\n Undef,\n});\n\nimpl_stable_hash_for!(struct ::interpret::MemPlace {\n ptr,\n align,\n extra,\n});\nimpl_snapshot_for!(struct MemPlace {\n ptr,\n extra,\n align -> *align, \/\/ just copy alignment verbatim\n});\n\n\/\/ Can't use the macro here because that does not support named enum fields.\nimpl<'a> HashStable<StableHashingContext<'a>> for Place {\n fn hash_stable<W: StableHasherResult>(\n &self, hcx: &mut StableHashingContext<'a>,\n hasher: &mut StableHasher<W>)\n {\n mem::discriminant(self).hash_stable(hcx, hasher);\n match self {\n Place::Ptr(mem_place) => mem_place.hash_stable(hcx, hasher),\n\n Place::Local { frame, local } => {\n frame.hash_stable(hcx, hasher);\n local.hash_stable(hcx, hasher);\n },\n }\n }\n}\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Place\n where Ctx: SnapshotContext<'a>,\n{\n type Item = Place<AllocIdSnapshot<'a>>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n match self {\n Place::Ptr(p) => Place::Ptr(p.snapshot(ctx)),\n\n Place::Local{ frame, local } => Place::Local{\n frame: *frame,\n local: *local,\n },\n }\n }\n}\n\nimpl_stable_hash_for!(enum ::interpret::Value {\n Scalar(x),\n ScalarPair(x, y),\n});\nimpl_snapshot_for!(enum Value {\n Scalar(s),\n ScalarPair(s, t),\n});\n\nimpl_stable_hash_for!(enum ::interpret::Operand {\n Immediate(x),\n Indirect(x),\n});\nimpl_snapshot_for!(enum Operand {\n Immediate(v),\n Indirect(m),\n});\n\nimpl_stable_hash_for!(enum ::interpret::LocalValue {\n Dead,\n Live(x),\n});\nimpl_snapshot_for!(enum LocalValue {\n Live(v),\n Dead,\n});\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for Relocations\n where Ctx: SnapshotContext<'a>,\n{\n type Item = Relocations<AllocIdSnapshot<'a>>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n Relocations::from_presorted(self.iter()\n .map(|(size, id)| (*size, id.snapshot(ctx)))\n .collect())\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct AllocationSnapshot<'a> {\n bytes: &'a [u8],\n relocations: Relocations<AllocIdSnapshot<'a>>,\n undef_mask: &'a UndefMask,\n align: &'a Align,\n mutability: &'a Mutability,\n}\n\nimpl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation\n where Ctx: SnapshotContext<'a>,\n{\n type Item = AllocationSnapshot<'a>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Allocation { bytes, relocations, undef_mask, align, mutability } = self;\n\n AllocationSnapshot {\n bytes,\n undef_mask,\n align,\n mutability,\n relocations: relocations.snapshot(ctx),\n }\n }\n}\n\n\/\/ Can't use the macro here because that does not support named enum fields.\nimpl<'a> HashStable<StableHashingContext<'a>> for StackPopCleanup {\n fn hash_stable<W: StableHasherResult>(\n &self,\n hcx: &mut StableHashingContext<'a>,\n hasher: &mut StableHasher<W>)\n {\n mem::discriminant(self).hash_stable(hcx, hasher);\n match self {\n StackPopCleanup::Goto(ref block) => block.hash_stable(hcx, hasher),\n StackPopCleanup::None { cleanup } => cleanup.hash_stable(hcx, hasher),\n }\n }\n}\n\n#[derive(Eq, PartialEq)]\nstruct FrameSnapshot<'a, 'tcx: 'a> {\n instance: &'a ty::Instance<'tcx>,\n span: &'a Span,\n return_to_block: &'a StackPopCleanup,\n return_place: Place<AllocIdSnapshot<'a>>,\n locals: IndexVec<mir::Local, LocalValue<AllocIdSnapshot<'a>>>,\n block: &'a mir::BasicBlock,\n stmt: usize,\n}\n\n\/\/ Not using the macro because that does not support types depending on two lifetimes\nimpl<'a, 'mir, 'tcx: 'mir> HashStable<StableHashingContext<'a>> for Frame<'mir, 'tcx> {\n fn hash_stable<W: StableHasherResult>(\n &self,\n hcx: &mut StableHashingContext<'a>,\n hasher: &mut StableHasher<W>) {\n\n let Frame {\n mir,\n instance,\n span,\n return_to_block,\n return_place,\n locals,\n block,\n stmt,\n } = self;\n\n (mir, instance, span, return_to_block).hash_stable(hcx, hasher);\n (return_place, locals, block, stmt).hash_stable(hcx, hasher);\n }\n}\nimpl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>\n where Ctx: SnapshotContext<'a>,\n{\n type Item = FrameSnapshot<'a, 'tcx>;\n\n fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {\n let Frame {\n mir: _,\n instance,\n span,\n return_to_block,\n return_place,\n locals,\n block,\n stmt,\n } = self;\n\n FrameSnapshot {\n instance,\n span,\n return_to_block,\n block,\n stmt: *stmt,\n return_place: return_place.snapshot(ctx),\n locals: locals.iter().map(|local| local.snapshot(ctx)).collect(),\n }\n }\n}\n\nimpl<'a, 'b, 'mir, 'tcx: 'a+'mir> SnapshotContext<'b>\n for Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>\n{\n fn resolve(&'b self, id: &AllocId) -> Option<&'b Allocation> {\n self.get(*id).ok()\n }\n}\n\n\/\/\/ The virtual machine state during const-evaluation at a given point in time.\n\/\/\/ We assume the `CompileTimeInterpreter` has no interesting extra state that\n\/\/\/ is worth considering here.\nstruct EvalSnapshot<'a, 'mir, 'tcx: 'a + 'mir> {\n memory: Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,\n stack: Vec<Frame<'mir, 'tcx>>,\n}\n\nimpl<'a, 'mir, 'tcx: 'a + 'mir> EvalSnapshot<'a, 'mir, 'tcx>\n{\n fn new(\n memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,\n stack: &[Frame<'mir, 'tcx>]\n ) -> Self {\n EvalSnapshot {\n memory: memory.clone(),\n stack: stack.into(),\n }\n }\n\n \/\/ Used to compare two snapshots\n fn snapshot(&'b self)\n -> Vec<FrameSnapshot<'b, 'tcx>>\n {\n \/\/ Start with the stack, iterate and recursively snapshot\n self.stack.iter().map(|frame| frame.snapshot(&self.memory)).collect()\n }\n\n}\n\nimpl<'a, 'mir, 'tcx> Hash for EvalSnapshot<'a, 'mir, 'tcx>\n{\n fn hash<H: Hasher>(&self, state: &mut H) {\n \/\/ Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)\n let mut hcx = self.memory.tcx.get_stable_hashing_context();\n let mut hasher = StableHasher::<u64>::new();\n self.hash_stable(&mut hcx, &mut hasher);\n hasher.finish().hash(state)\n }\n}\n\n\/\/ Not using the macro because we need special handling for `memory`, which the macro\n\/\/ does not support at the same time as the extra bounds on the type.\nimpl<'a, 'b, 'mir, 'tcx> HashStable<StableHashingContext<'b>>\n for EvalSnapshot<'a, 'mir, 'tcx>\n{\n fn hash_stable<W: StableHasherResult>(\n &self,\n hcx: &mut StableHashingContext<'b>,\n hasher: &mut StableHasher<W>)\n {\n \/\/ Not hashing memory: Avoid hashing memory all the time during execution\n let EvalSnapshot{ memory: _, stack } = self;\n stack.hash_stable(hcx, hasher);\n }\n}\n\nimpl<'a, 'mir, 'tcx> Eq for EvalSnapshot<'a, 'mir, 'tcx>\n{}\n\nimpl<'a, 'mir, 'tcx> PartialEq for EvalSnapshot<'a, 'mir, 'tcx>\n{\n fn eq(&self, other: &Self) -> bool {\n \/\/ FIXME: This looks to be a *ridicolously expensive* comparison operation.\n \/\/ Doesn't this make tons of copies? Either `snapshot` is very badly named,\n \/\/ or it does!\n self.snapshot() == other.snapshot()\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse hair::cx::Cx;\nuse rustc::middle::region::CodeExtent;\nuse rustc::middle::ty::{FnOutput, Ty};\nuse rustc::mir::repr::*;\nuse rustc_data_structures::fnv::FnvHashMap;\nuse rustc_front::hir;\n\nuse syntax::ast;\nuse syntax::codemap::Span;\n\npub struct Builder<'a, 'tcx: 'a> {\n hir: Cx<'a, 'tcx>,\n cfg: CFG<'tcx>,\n scopes: Vec<scope::Scope<'tcx>>,\n loop_scopes: Vec<scope::LoopScope>,\n unit_temp: Lvalue<'tcx>,\n var_decls: Vec<VarDecl<'tcx>>,\n var_indices: FnvHashMap<ast::NodeId, u32>,\n temp_decls: Vec<TempDecl<'tcx>>,\n}\n\nstruct CFG<'tcx> {\n basic_blocks: Vec<BasicBlockData<'tcx>>,\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ The `BlockAnd` \"monad\" packages up the new basic block along with a\n\/\/ produced value (sometimes just unit, of course). The `unpack!`\n\/\/ macro (and methods below) makes working with `BlockAnd` much more\n\/\/ convenient.\n\n#[must_use] \/\/ if you don't use one of these results, you're leaving a dangling edge\npub struct BlockAnd<T>(BasicBlock, T);\n\ntrait BlockAndExtension {\n fn and<T>(self, v: T) -> BlockAnd<T>;\n fn unit(self) -> BlockAnd<()>;\n}\n\nimpl BlockAndExtension for BasicBlock {\n fn and<T>(self, v: T) -> BlockAnd<T> {\n BlockAnd(self, v)\n }\n\n fn unit(self) -> BlockAnd<()> {\n BlockAnd(self, ())\n }\n}\n\n\/\/\/ Update a block pointer and return the value.\n\/\/\/ Use it like `let x = unpack!(block = self.foo(block, foo))`.\nmacro_rules! unpack {\n ($x:ident = $c:expr) => {\n {\n let BlockAnd(b, v) = $c;\n $x = b;\n v\n }\n };\n\n ($c:expr) => {\n {\n let BlockAnd(b, ()) = $c;\n b\n }\n };\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ construct() -- the main entry point for building MIR for a function\n\npub fn construct<'a,'tcx>(mut hir: Cx<'a,'tcx>,\n _span: Span,\n implicit_arguments: Vec<Ty<'tcx>>,\n explicit_arguments: Vec<(Ty<'tcx>, &'tcx hir::Pat)>,\n argument_extent: CodeExtent,\n return_ty: FnOutput<'tcx>,\n ast_block: &'tcx hir::Block)\n -> Mir<'tcx> {\n let cfg = CFG { basic_blocks: vec![] };\n\n \/\/ it's handy to have a temporary of type `()` sometimes, so make\n \/\/ one from the start and keep it available\n let temp_decls = vec![TempDecl::<'tcx> { ty: hir.unit_ty() }];\n let unit_temp = Lvalue::Temp(0);\n\n let mut builder = Builder {\n hir: hir,\n cfg: cfg,\n scopes: vec![],\n loop_scopes: vec![],\n temp_decls: temp_decls,\n var_decls: vec![],\n var_indices: FnvHashMap(),\n unit_temp: unit_temp,\n };\n\n assert_eq!(builder.cfg.start_new_block(), START_BLOCK);\n assert_eq!(builder.cfg.start_new_block(), END_BLOCK);\n assert_eq!(builder.cfg.start_new_block(), DIVERGE_BLOCK);\n\n let mut block = START_BLOCK;\n let arg_decls = unpack!(block = builder.args_and_body(block,\n implicit_arguments,\n explicit_arguments,\n argument_extent,\n ast_block));\n\n builder.cfg.terminate(block, Terminator::Goto { target: END_BLOCK });\n builder.cfg.terminate(END_BLOCK, Terminator::Return);\n\n Mir {\n basic_blocks: builder.cfg.basic_blocks,\n var_decls: builder.var_decls,\n arg_decls: arg_decls,\n temp_decls: builder.temp_decls,\n return_ty: return_ty,\n }\n}\n\nimpl<'a,'tcx> Builder<'a,'tcx> {\n fn args_and_body(&mut self,\n mut block: BasicBlock,\n implicit_arguments: Vec<Ty<'tcx>>,\n explicit_arguments: Vec<(Ty<'tcx>, &'tcx hir::Pat)>,\n argument_extent: CodeExtent,\n ast_block: &'tcx hir::Block)\n -> BlockAnd<Vec<ArgDecl<'tcx>>>\n {\n self.in_scope(argument_extent, block, |this| {\n let arg_decls = {\n let implicit_arg_decls = implicit_arguments.into_iter()\n .map(|ty| ArgDecl { ty: ty });\n\n \/\/ to start, translate the argument patterns and collect the\n \/\/ argument types.\n let explicit_arg_decls =\n explicit_arguments\n .into_iter()\n .enumerate()\n .map(|(index, (ty, pattern))| {\n let lvalue = Lvalue::Arg(index as u32);\n let pattern = this.hir.irrefutable_pat(pattern);\n unpack!(block = this.lvalue_into_pattern(block,\n argument_extent,\n pattern,\n &lvalue));\n ArgDecl { ty: ty }\n });\n\n implicit_arg_decls.chain(explicit_arg_decls).collect()\n };\n\n \/\/ start the first basic block and translate the body\n unpack!(block = this.ast_block(&Lvalue::ReturnPointer, block, ast_block));\n\n block.and(arg_decls)\n })\n }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Builder methods are broken up into modules, depending on what kind\n\/\/ of thing is being translated. Note that they use the `unpack` macro\n\/\/ above extensively.\n\nmod block;\nmod cfg;\nmod expr;\nmod into;\nmod matches;\nmod misc;\nmod scope;\nmod stmt;\n<commit_msg>Rollup merge of #30630 - tsion:mir-closure-args, r=nagisa<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse hair::cx::Cx;\nuse rustc::middle::region::CodeExtent;\nuse rustc::middle::ty::{FnOutput, Ty};\nuse rustc::mir::repr::*;\nuse rustc_data_structures::fnv::FnvHashMap;\nuse rustc_front::hir;\n\nuse syntax::ast;\nuse syntax::codemap::Span;\n\npub struct Builder<'a, 'tcx: 'a> {\n hir: Cx<'a, 'tcx>,\n cfg: CFG<'tcx>,\n scopes: Vec<scope::Scope<'tcx>>,\n loop_scopes: Vec<scope::LoopScope>,\n unit_temp: Lvalue<'tcx>,\n var_decls: Vec<VarDecl<'tcx>>,\n var_indices: FnvHashMap<ast::NodeId, u32>,\n temp_decls: Vec<TempDecl<'tcx>>,\n}\n\nstruct CFG<'tcx> {\n basic_blocks: Vec<BasicBlockData<'tcx>>,\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ The `BlockAnd` \"monad\" packages up the new basic block along with a\n\/\/ produced value (sometimes just unit, of course). The `unpack!`\n\/\/ macro (and methods below) makes working with `BlockAnd` much more\n\/\/ convenient.\n\n#[must_use] \/\/ if you don't use one of these results, you're leaving a dangling edge\npub struct BlockAnd<T>(BasicBlock, T);\n\ntrait BlockAndExtension {\n fn and<T>(self, v: T) -> BlockAnd<T>;\n fn unit(self) -> BlockAnd<()>;\n}\n\nimpl BlockAndExtension for BasicBlock {\n fn and<T>(self, v: T) -> BlockAnd<T> {\n BlockAnd(self, v)\n }\n\n fn unit(self) -> BlockAnd<()> {\n BlockAnd(self, ())\n }\n}\n\n\/\/\/ Update a block pointer and return the value.\n\/\/\/ Use it like `let x = unpack!(block = self.foo(block, foo))`.\nmacro_rules! unpack {\n ($x:ident = $c:expr) => {\n {\n let BlockAnd(b, v) = $c;\n $x = b;\n v\n }\n };\n\n ($c:expr) => {\n {\n let BlockAnd(b, ()) = $c;\n b\n }\n };\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ construct() -- the main entry point for building MIR for a function\n\npub fn construct<'a,'tcx>(mut hir: Cx<'a,'tcx>,\n _span: Span,\n implicit_arguments: Vec<Ty<'tcx>>,\n explicit_arguments: Vec<(Ty<'tcx>, &'tcx hir::Pat)>,\n argument_extent: CodeExtent,\n return_ty: FnOutput<'tcx>,\n ast_block: &'tcx hir::Block)\n -> Mir<'tcx> {\n let cfg = CFG { basic_blocks: vec![] };\n\n \/\/ it's handy to have a temporary of type `()` sometimes, so make\n \/\/ one from the start and keep it available\n let temp_decls = vec![TempDecl::<'tcx> { ty: hir.unit_ty() }];\n let unit_temp = Lvalue::Temp(0);\n\n let mut builder = Builder {\n hir: hir,\n cfg: cfg,\n scopes: vec![],\n loop_scopes: vec![],\n temp_decls: temp_decls,\n var_decls: vec![],\n var_indices: FnvHashMap(),\n unit_temp: unit_temp,\n };\n\n assert_eq!(builder.cfg.start_new_block(), START_BLOCK);\n assert_eq!(builder.cfg.start_new_block(), END_BLOCK);\n assert_eq!(builder.cfg.start_new_block(), DIVERGE_BLOCK);\n\n let mut block = START_BLOCK;\n let arg_decls = unpack!(block = builder.args_and_body(block,\n implicit_arguments,\n explicit_arguments,\n argument_extent,\n ast_block));\n\n builder.cfg.terminate(block, Terminator::Goto { target: END_BLOCK });\n builder.cfg.terminate(END_BLOCK, Terminator::Return);\n\n Mir {\n basic_blocks: builder.cfg.basic_blocks,\n var_decls: builder.var_decls,\n arg_decls: arg_decls,\n temp_decls: builder.temp_decls,\n return_ty: return_ty,\n }\n}\n\nimpl<'a,'tcx> Builder<'a,'tcx> {\n fn args_and_body(&mut self,\n mut block: BasicBlock,\n implicit_arguments: Vec<Ty<'tcx>>,\n explicit_arguments: Vec<(Ty<'tcx>, &'tcx hir::Pat)>,\n argument_extent: CodeExtent,\n ast_block: &'tcx hir::Block)\n -> BlockAnd<Vec<ArgDecl<'tcx>>>\n {\n self.in_scope(argument_extent, block, |this| {\n \/\/ to start, translate the argument patterns and collect the argument types.\n let implicits = implicit_arguments.into_iter().map(|ty| (ty, None));\n let explicits = explicit_arguments.into_iter().map(|(ty, pat)| (ty, Some(pat)));\n let arg_decls =\n implicits\n .chain(explicits)\n .enumerate()\n .map(|(index, (ty, pattern))| {\n if let Some(pattern) = pattern {\n let lvalue = Lvalue::Arg(index as u32);\n let pattern = this.hir.irrefutable_pat(pattern);\n unpack!(block = this.lvalue_into_pattern(block,\n argument_extent,\n pattern,\n &lvalue));\n }\n ArgDecl { ty: ty }\n })\n .collect();\n\n \/\/ start the first basic block and translate the body\n unpack!(block = this.ast_block(&Lvalue::ReturnPointer, block, ast_block));\n\n block.and(arg_decls)\n })\n }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Builder methods are broken up into modules, depending on what kind\n\/\/ of thing is being translated. Note that they use the `unpack` macro\n\/\/ above extensively.\n\nmod block;\nmod cfg;\nmod expr;\nmod into;\nmod matches;\nmod misc;\nmod scope;\nmod stmt;\n<|endoftext|>"} {"text":"<commit_before>use request::Request;\nuse response::Response;\nuse nickel_error::NickelError;\npub use self::Action::{Continue, Halt};\n\npub type MiddlewareResult = Result<Action, NickelError>;\n\n#[deriving(PartialEq)]\npub enum Action {\n Continue,\n Halt\n}\n\n\/\/ the usage of + Send is weird here because what we really want is + Static\n\/\/ but that's not possible as of today. We have to use + Send for now.\npub trait Middleware: Send + Sync {\n fn invoke<'a, 'b>(&'a self, _req: &mut Request<'b, 'a>, _res: &mut Response) -> MiddlewareResult {\n Ok(Continue)\n }\n}\n\npub trait ErrorHandler: Send + Sync {\n fn invoke(&self, _err: &NickelError, _req: &mut Request, _res: &mut Response) -> MiddlewareResult {\n Ok(Continue)\n }\n}\n\nimpl ErrorHandler for fn(&NickelError, &Request, &mut Response) -> MiddlewareResult {\n fn invoke(&self, err: &NickelError, req: &mut Request, res: &mut Response) -> MiddlewareResult {\n (*self)(err, req, res)\n }\n}\n\npub struct MiddlewareStack {\n handlers: Vec<Box<Middleware + Send + Sync>>,\n error_handlers: Vec<Box<ErrorHandler + Send + Sync>>\n}\n\nimpl MiddlewareStack {\n pub fn add_middleware<T: Middleware> (&mut self, handler: T) {\n self.handlers.push(box handler);\n }\n\n pub fn add_error_handler<T: ErrorHandler> (&mut self, handler: T) {\n self.error_handlers.push(box handler);\n }\n\n pub fn invoke<'a, 'b>(&'a self, req: &mut Request<'b, 'a>, res: &mut Response) {\n for handler in self.handlers.iter() {\n match handler.invoke(req, res) {\n Ok(Halt) => {\n debug!(\"{} {} {} {}\", req.origin.method, req.origin.remote_addr, req.origin.request_uri, res.origin.status);\n return\n }\n Ok(Continue) => {},\n Err(mut err) => {\n warn!(\"{} {} {} {}\", req.origin.method, req.origin.remote_addr, req.origin.request_uri, err.kind);\n for error_handler in self.error_handlers.iter().rev() {\n match error_handler.invoke(&err, req, res) {\n Ok(Continue) => {},\n Ok(Halt) => return,\n \/\/ change the error so that other ErrorHandler\n \/\/ down the stack receive the new error.\n Err(new_err) => err = new_err,\n }\n }\n }\n }\n }\n }\n\n pub fn new () -> MiddlewareStack {\n MiddlewareStack{\n handlers: Vec::new(),\n error_handlers: Vec::new()\n }\n }\n}\n<commit_msg>refactor(errorhandler): generic return type for blanket impl<commit_after>use request::Request;\nuse response::Response;\nuse nickel_error::NickelError;\nuse middleware_handler::ResponseFinalizer;\npub use self::Action::{Continue, Halt};\n\npub type MiddlewareResult = Result<Action, NickelError>;\n\n#[deriving(PartialEq)]\npub enum Action {\n Continue,\n Halt\n}\n\n\/\/ the usage of + Send is weird here because what we really want is + Static\n\/\/ but that's not possible as of today. We have to use + Send for now.\npub trait Middleware: Send + Sync {\n fn invoke<'a, 'b>(&'a self, _req: &mut Request<'b, 'a>, _res: &mut Response) -> MiddlewareResult {\n Ok(Continue)\n }\n}\n\npub trait ErrorHandler: Send + Sync {\n fn invoke(&self, _err: &NickelError, _req: &mut Request, _res: &mut Response) -> MiddlewareResult {\n Ok(Continue)\n }\n}\n\nimpl<R> ErrorHandler for fn(&NickelError, &Request, &mut Response) -> R\n where R: ResponseFinalizer {\n fn invoke(&self, err: &NickelError, req: &mut Request, res: &mut Response) -> MiddlewareResult {\n let r = (*self)(err, req, res);\n r.respond(res)\n }\n}\n\npub struct MiddlewareStack {\n handlers: Vec<Box<Middleware + Send + Sync>>,\n error_handlers: Vec<Box<ErrorHandler + Send + Sync>>\n}\n\nimpl MiddlewareStack {\n pub fn add_middleware<T: Middleware> (&mut self, handler: T) {\n self.handlers.push(box handler);\n }\n\n pub fn add_error_handler<T: ErrorHandler> (&mut self, handler: T) {\n self.error_handlers.push(box handler);\n }\n\n pub fn invoke<'a, 'b>(&'a self, req: &mut Request<'b, 'a>, res: &mut Response) {\n for handler in self.handlers.iter() {\n match handler.invoke(req, res) {\n Ok(Halt) => {\n debug!(\"{} {} {} {}\", req.origin.method, req.origin.remote_addr, req.origin.request_uri, res.origin.status);\n return\n }\n Ok(Continue) => {},\n Err(mut err) => {\n warn!(\"{} {} {} {}\", req.origin.method, req.origin.remote_addr, req.origin.request_uri, err.kind);\n for error_handler in self.error_handlers.iter().rev() {\n match error_handler.invoke(&err, req, res) {\n Ok(Continue) => {},\n Ok(Halt) => return,\n \/\/ change the error so that other ErrorHandler\n \/\/ down the stack receive the new error.\n Err(new_err) => err = new_err,\n }\n }\n }\n }\n }\n }\n\n pub fn new () -> MiddlewareStack {\n MiddlewareStack{\n handlers: Vec::new(),\n error_handlers: Vec::new()\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>more multiprobe progress<commit_after>use std::cmp::Ordering;\n\nfn bucket_distance(fi: f64, hi: f64, delta: i32, W: f64) -> f64 {\n if delta == -1 {\n W*(fi - hi)\n } else if delta == 1 {\n W - W*(fi - hi)\n } else {\n 0 as f64\n }\n}\nfn compute_sorted_i_delta<T>(q: &[T], f_sig: &[f64], h_sig: &[f64], W: f64) -> Vec<(usize, i32)> {\n let mut intermediate_vec: Vec<((usize,i32), f64)> = f_sig.iter().zip(h_sig.iter()).enumerate().flat_map(|(i, (fi, hi))| {\n vec![((i, 1), bucket_distance(*fi, *hi, 1, W)),\n ((i, -1), bucket_distance(*fi, *hi, -1, W))].into_iter()\n }).collect();\n intermediate_vec.sort_by(|a, b| {\n if a.1 > b.1 {\n Ordering::Greater\n } else if a.1 < b.1 {\n Ordering::Less\n } else {\n Ordering::Equal\n }\n });\n intermediate_vec.iter().map(|a| {a.0}).collect()\n}\n#[test]\nfn sorted_delta_test() {\n let test_q = vec![1.0,2.0,3.0,4.0,5.0];\n let f_sig = vec![1.5,1.2,2.2];\n let h_sig = vec![1.0,1.0,2.0];\n let W = 10.0;\n compute_sorted_i_delta(&test_q, &f_sig, &h_sig, W);\n}\nfn score_set(perturbation_set: &[usize], square_zj_list: &[f64]) -> f64 {\n perturbation_set.iter().map(|ind| {square_zj_list[*ind]}).sum()\n}\n\n#[derive(PartialEq)]\nstruct PerturbationSet<'a> {\n data: Vec<usize>,\n zj_list: &'a Vec<f64>\n}\n\nimpl<'a> Eq for PerturbationSet<'a> {}\n\nimpl<'a> Ord for PerturbationSet<'a> {\n fn cmp(&self, other: &PerturbationSet) -> Ordering {\n let self_score = score_set(&(self.data), self.zj_list);\n let other_score = score_set(&(other.data), other.zj_list);\n if (self_score - other_score) <= 1e-6 {\n Ordering::Equal\n } else if self_score > other_score {\n Ordering::Greater\n } else {\n Ordering::Less\n }\n }\n}\n\nimpl<'a> PartialOrd for PerturbationSet<'a> {\n fn partial_cmp(&self, other: &PerturbationSet) -> Option<Ordering> {\n let self_score = score_set(&(self.data), self.zj_list);\n let other_score = score_set(&(other.data), other.zj_list);\n self_score.partial_cmp(&other_score)\n }\n}\nimpl<'a> PerturbationSet<'a> {\n fn shift(&self) -> PerturbationSet {\n let mut new_data = Vec::new();\n let max_val = self.data.iter().max().unwrap();\n for x in self.data.iter() {\n if *x == *max_val {\n new_data.push(*x+1);\n } else {\n new_data.push(*x);\n }\n }\n PerturbationSet {\n data: new_data,\n zj_list: self.zj_list\n }\n }\n fn expand(&self) -> PerturbationSet {\n let mut new_data = Vec::new();\n let max_val = self.data.iter().max().unwrap();\n for x in self.data.iter() {\n new_data.push(*x);\n }\n new_data.push(*max_val + 1);\n PerturbationSet {\n data: new_data,\n zj_list: self.zj_list\n }\n } \n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix clippy: No need to wrap in Err(_) here<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add create without state method to WidgetFactory<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Reorder stuff in eq<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adicionando Estrutura de dados de um Grafo para a linguagem de programação Rust, baseado na Orientação a Objetos do Rust e com alto desempenho avaliado via entregas de atividades envolvendo grafos no beecrowd<commit_after>\/*\n\tAutor: Gustavo Michels de Camargo\n\n\tProjeto: Algoritmo estrural de um Grafo\n\n\n*\/\n\n\nuse std::{collections::HashMap};\n\n\/\/ Matriz de Vector usada ara construir o Grafo.\ntype MatrizGrafo = Vec<Vec<isize>>;\n\n\n\n\/\/ O dicionario serve para podermos dar entrada a qualquer String e ter como abstrair suas posições dentro.\n\/\/ da matriz numerica, isso serve apenas para fins de uso, não requerer transcrever um Nodo com X para valor numerico.\n#[derive(PartialEq, Eq, Clone, Debug)]\nstruct Grafo {\n\tmatriz: MatrizGrafo,\n\tdicionario: HashMap<String, usize>,\n\tbicondicional: bool\n}\n\ntrait Projeto {\n\t\/\/ Geral\n\tfn new(tamanho: usize, tipo: &str) -> Grafo;\n\n\t\/\/ Funcçoes envolvendo o dicionario - Usuario\n\tfn usr_pegar_indice(&self, chave: String) -> usize;\n\tfn usr_pegar_chave(&self, indice: usize) -> String;\n\n\t\/\/ Usuario\n\tfn usr_adicionar_conexao(&mut self, a: String, b: String, valor: isize);\n\tfn usr_remover_conexao(&mut self, a: String, b: String);\n\tfn usr_numero_conexoes(&self, no: String) -> usize;\n\tfn usr_verificar_se_existe_conexao(&self, a: String, b: String) -> bool;\n\tfn usr_conexoes(&self, a: String) -> Vec<String>;\n\n\t\/\/ Maquina \/ uso para os Algoritmos\n\tfn adicionar_conexao(&mut self, a: usize, b: usize, valor: isize);\n\tfn remover_conexao(&mut self, a: usize, b: usize);\n\tfn numero_conexoes(&self, no: usize) -> usize;\n\tfn verificar_se_existe_conexao(&self, a: usize, b: usize) -> bool;\n\tfn conexoes(&self, a: usize) -> Vec<usize>;\n\n\t\/\/ Algoritmos que atuam sobre Grafos\n}\n\n\n\/\/ Este é a implementação da \"Classe\" de um grafo.\nimpl Projeto for Grafo {\n\t\/\/ Tamanho: Numero Maximo de Vertices que a matriz ppode usar de 0 até tamanho.\n\t\/\/ Em notação matematica relativa a limites: [0, tamanho).\n\t\/\/ Toda função que começa com 'usr_'(usr = usuario) é a versão da função que deve ser usada para interagir diretamente com o usuario.\n\t\/\/ As funções de mesmo nome mas sem 'usr_' deve ser usada apenas dentro dos algoritmos, como o Dijkstra para menor caminho entre dois nos.\n\t\/\/ Fiz assim para otimizar processamento e descartar necessidade de acessar e \n\t\/\/ consultaro dicionario o tempo todo quando se apenas como Objetivo encontrar um menor caminho com Dijkstra por exemplo.\n\t\n\t\/\/ Apenas essa função foge a regra por ser universal\n\tfn new(tamanho: usize, tipo: &str) -> Grafo {\n\t\tGrafo {\n\t\t\tmatriz: vec![vec![-1; tamanho]; tamanho],\n\t\t\tdicionario: HashMap::new(),\n\t\t\tbicondicional: match tipo {\n\t\t\t\t\"->\" => false, \/\/ Condicional\n\t\t\t\t\"<->\" | _ => true \/\/ Bicondicional\n\t\t\t}\n\t\t}\n\t}\n\n\t\/\/ ---- Funções para uso direto do usuario ----\n\n\t\/\/ Retorna o indice da matriz relacionada a chave\n\tfn usr_pegar_indice(&self, chave: String) -> usize {\n\t\tif self.dicionario.contains_key(&chave) {\n\t\t\treturn (&self.dicionario.get(&chave)).unwrap().clone();\n\t\t}\n\t\t\n\t\treturn 0;\n\t}\n\n\t\/\/ Retorna a chave do dicionario relacionada ao valor do indice da matriz do grafo\n\tfn usr_pegar_chave(&self, indice: usize) -> String {\n\t\tfor (key, value) in self.dicionario.iter() {\n\t\t\tif *value == indice {\n\t\t\t\treturn (*key).clone();\n\t\t\t}\n\t\t}\n\n\t\treturn \"\".to_string();\n\t}\n\t\n\t\/\/ Conecta Dois vertices\n\tfn usr_adicionar_conexao(&mut self, a: String, b: String, valor: isize) {\n\t\tif !self.dicionario.contains_key(&a){\n\t\t\tlet num: usize = self.dicionario.len();\n\t\t\tself.dicionario.insert(a.to_owned(), num);\n\t\t}\n\n\t\tif !self.dicionario.contains_key(&b){\n\t\t\tlet num: usize = self.dicionario.len();\n\t\t\tself.dicionario.insert(b.to_owned(), num);\n\t\t}\n\n\t\tlet (valor_a, valor_b): (usize, usize) = (self.usr_pegar_indice(a), self.usr_pegar_indice(b));\n\n\t\tself.matriz[valor_a][valor_b] = valor;\n\n\t\tif self.bicondicional {\n\t\t\tself.matriz[valor_b][valor_a] = valor;\n\t\t}\n\t\t\n\t}\n\n\tfn usr_remover_conexao(&mut self, a: String, b: String) {\n\t\tlet (valor_a, valor_b): (usize, usize) = (self.usr_pegar_indice(a), self.usr_pegar_indice(b));\n\n\t\tself.matriz[valor_a][valor_b] = -1;\n\t\tself.matriz[valor_b][valor_a] = -1;\n\t}\n\n\t\/\/ Retorba o numero de vertices na qual ele se conecta\n\tfn usr_numero_conexoes(&self, no: String) -> usize {\n\t\tself.matriz[self.usr_pegar_indice(no)].iter()\n\t\t\t\t\t\t.filter(|x| **x >= 0)\n\t\t\t\t\t\t.collect::<Vec<&isize>>()\n\t\t\t\t\t\t.len() as usize | 0\n\t}\n\n\t\/\/ Verifica se dois nos estão conectados\n\tfn usr_verificar_se_existe_conexao(&self, a: String, b: String) -> bool {\n\t\tself.matriz[self.usr_pegar_indice(a)][self.usr_pegar_indice(b)] >= 0\n\t}\n\n\n\tfn usr_conexoes(&self, a: String) -> Vec<String> {\n\t\tlet mut result: Vec<String> = Vec::new();\n\t\tlet a_value: usize = self.usr_pegar_indice(a);\n\n\t\tfor i in 0..self.matriz[a_value].len() {\n\t\t\tif a_value != i && self.matriz[a_value][i] > -1 {\n\t\t\t\tresult.push(self.usr_pegar_chave(i));\n\t\t\t}\n\t\t}\n\n\t\treturn result;\n\t}\n\n\n\n\t\/\/ ---- Funções para uso direto dos algoritmos (tem melhor perfomance) ----\n\n\n\t\/\/ Retorna um array dos indice de todos os nos na qual o no 'a' se conecta\n\t\/\/ Esera usar esta função apenas para usos proprios dentro do Grafo, como em algoritmos tipo dijkstra\n\t\/\/ Conecta Dois vertices\n\tfn adicionar_conexao(&mut self, a: usize, b: usize, valor: isize) {\n\t\tself.matriz[a][b] = valor;\n\n\t\tif self.bicondicional {\n\t\t\tself.matriz[b][a] = valor;\n\t\t}\n\t}\n\n\tfn remover_conexao(&mut self, a: usize, b: usize) {\n\t\tself.matriz[a][b] = -1;\n\t\tself.matriz[b][a] = -1;\n\t}\n\n\t\/\/ Retorba o numero de vertices na qual ele se conecta\n\tfn numero_conexoes(&self, no: usize) -> usize {\n\t\tself.matriz[no].iter()\n\t\t\t\t\t\t.filter(|x| **x >= 0)\n\t\t\t\t\t\t.collect::<Vec<&isize>>()\n\t\t\t\t\t\t.len() as usize | 0\n\t}\n\n\tfn verificar_se_existe_conexao(&self, a: usize, b: usize) -> bool {\n\t\tself.matriz[a][b] >= 0\n\t}\n\n\tfn conexoes(&self, a: usize) -> Vec<usize> {\n\t\tlet mut result: Vec<usize> = Vec::new();\n\n\t\tfor i in 0..self.matriz[a].len() {\n\t\t\tif a != i && self.matriz[a][i] > -1 {\n\t\t\t\tresult.push(i);\n\t\t\t}\n\t\t}\n\t\treturn result;\n\t}\n\n\n\t\/\/ ---- Funções dos algoritmos ----\n\n\n}\n\n\n\/\/ Main\nfn main() {\n\t\/\/ Grafo com no maximo mil vertices\n\t \/\/ \"->\" = Grafo condicional, \"<->\" = Grafo Bicondicional\n\tlet mut grafo: Grafo = Grafo::new(1000, \"->\");\n\t\n\tgrafo.usr_adicionar_conexao(0.to_string(), 1.to_string(), 1);\n\tgrafo.usr_adicionar_conexao(1.to_string(), 2.to_string(), 1);\n\tgrafo.usr_adicionar_conexao(2.to_string(), 3.to_string(), 1);\n\tgrafo.usr_adicionar_conexao(3.to_string(), 0.to_string(), 1);\n\n\tprintln!(\"{:?}\", grafo.conexoes(2));\n}\n\n\n#[cfg(test)]\nmod test {\n\tuse super::*;\n\n\t#[test]\n\tfn usr_numero_conexoes() {\n\t\tlet mut grafo: Grafo = Grafo::new(1000, \"->\");\n\n\t\tfor i in 1..1000 {\n\t\t\tgrafo.usr_adicionar_conexao(0.to_string(), i.to_string(), 1);\n\t\t}\n\n\t\tassert_eq!(grafo.usr_numero_conexoes(0.to_string()), 999);\n\t\tassert_eq!(grafo.usr_numero_conexoes(0.to_string()), grafo.numero_conexoes(0));\n\t}\n\n\t#[test]\n\tfn numero_conexoes() {\n\t\tlet mut grafo: Grafo = Grafo::new(1000, \"<->\");\n\n\t\tfor i in 1..1000 {\n\t\t\tgrafo.adicionar_conexao(0, i, 1);\n\t\t}\n\n\t\tassert_eq!(grafo.numero_conexoes(0), 999);\n\t\tassert_eq!(grafo.numero_conexoes(0), grafo.usr_numero_conexoes(0.to_string()));\n\t}\n} \n<|endoftext|>"} {"text":"<commit_before><commit_msg>Day 20 in rust<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests\/lib.rs: tests for enclosure fetching<commit_after>extern crate pink_spider;\n\n#[test]\nfn it_works() {\n assert!(true);\n}\n\n#[test]\nfn fetch_apple_music_track() {\n let song = pink_spider::apple_music::fetch_song(\"jp\", \"1160715431\").unwrap();\n let track = pink_spider::model::Track::from_am_song(&song);\n assert_eq!(track.identifier, \"1160715431\");\n assert_eq!(track.title, \"A Short Film\");\n assert_eq!(track.owner_name.unwrap_or_default(), \"LILI LIMIT\");\n let artists = track.artists.unwrap();\n let artist = &artists[0];\n assert_eq!(artist.name, \"LILI LIMIT\");\n assert!(artist.clone().artwork_url.unwrap().len() > 0);\n}\n\n#[test]\nfn fetch_apple_music_album() {\n let am_album = pink_spider::apple_music::fetch_album(\"jp\", \"1160715126\").unwrap();\n let album = pink_spider::model::Album::from_am_album(&am_album);\n assert_eq!(album.identifier, \"1160715126\");\n assert_eq!(album.title, \"a.k.a\");\n assert_eq!(album.owner_name.unwrap_or_default(), \"LILI LIMIT\");\n let artists = album.artists.unwrap();\n let artist = &artists[0];\n assert_eq!(artist.name, \"LILI LIMIT\");\n assert!(artist.clone().artwork_url.unwrap().len() > 0);\n}\n\n#[test]\nfn fetch_spotify_track() {\n let sp_track = pink_spider::spotify::fetch_track(\"3n3Ppam7vgaVa1iaRUc9Lp\").unwrap();\n let track = pink_spider::model::Track::from_sp_track(&sp_track);\n assert_eq!(track.identifier, \"3n3Ppam7vgaVa1iaRUc9Lp\");\n assert_eq!(track.title, \"Mr. Brightside\");\n assert_eq!(track.owner_name.unwrap_or_default(), \"The Killers\");\n let artists = track.artists.unwrap();\n let artist = &artists[0];\n assert_eq!(artist.name, \"The Killers\");\n println!(\"{:?}\", artist);\n assert!(artist.clone().artwork_url.unwrap().len() > 0);\n}\n\n#[test]\nfn fetch_spotify_album() {\n let sp_album = pink_spider::spotify::fetch_album(\"4OHNH3sDzIxnmUADXzv2kT\").unwrap();\n let album = pink_spider::model::Album::from_sp_album(&sp_album);\n assert_eq!(album.identifier, \"4OHNH3sDzIxnmUADXzv2kT\");\n assert_eq!(album.title, \"Hot Fuss (Deluxe Version)\");\n assert_eq!(album.owner_name.unwrap_or_default(), \"The Killers\");\n let artists = album.artists.unwrap();\n let artist = &artists[0];\n assert_eq!(artist.name, \"The Killers\");\n assert!(artist.clone().artwork_url.unwrap().len() > 0);\n assert!(album.tracks.len() > 0);\n}\n\n#[test]\nfn fetch_soundcloud_track() {\n let sc_track = pink_spider::soundcloud::fetch_track(\"371851634\").unwrap();\n let track = pink_spider::model::Track::from_sc_track(&sc_track);\n assert_eq!(track.identifier, \"371851634\");\n assert_eq!(track.title, \"Down Wit That\");\n assert_eq!(track.owner_name.unwrap_or_default(), \"\\\"Chance The Rapper\\\"\");\n let artists = track.artists.unwrap();\n let artist = &artists[0];\n assert_eq!(artist.name, \"\\\"Chance The Rapper\\\"\");\n assert!(artist.clone().artwork_url.unwrap().len() > 0);\n}\n\n#[test]\nfn fetch_youtube_track() {\n let yt_video = pink_spider::youtube::fetch_video(\"Wr5f6hpYxmE\").unwrap();\n let track = pink_spider::model::Track::from_yt_video(&yt_video);\n assert_eq!(track.identifier, \"Wr5f6hpYxmE\");\n assert_eq!(track.title, \"Cornelius 『あなたがいるなら』If You're Here\");\n assert_eq!(track.owner_name.unwrap_or_default(), \"corneliusofficial\");\n let artists = track.artists.unwrap();\n let artist = &artists[0];\n assert_eq!(artist.name, \"corneliusofficial\");\n assert!(artist.clone().artwork_url.unwrap().len() > 0);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Created rough framework for simulated broker<commit_after>\/\/! Simulated broker used for backtests. Contains facilities for simulating trades,\n\/\/! managing balances, and reporting on statistics from previous trades.\n\nuse algobot_util::tick*;\n\n\/\/\/ A simulated broker that is used as the endpoint for trading activity in backtests.\npub struct Broker {\n accounts: Vec<Ledger>\n}\n\n\/\/ TODO: Wire TickSink into Broker so that the broker always receives up-to-date data\n\n\/\/\/ Settings for the simulated broker that determine things like trade fees,\n\/\/\/ estimated slippage, etc.\npub struct BrokerSettings {\n\n}\n\nimpl Broker {\n pub fn new(settings: BrokerSettings) {\n unimplemented!();\n }\n\n \/\/\/ Called each time a new tick is released by the backtester\n pub fn tick(t: SymbolTick) {\n unimplemented!();\n }\n}\n\n\/\/\/ Any action that the platform can take using the broker\n#[derive(Debug)]\npub enum BrokerAction {\n MarketBuy{symbol: String, size: usize},\n MarketStop{symbol: String, size: usize, stop: f64}\n}\n\n\/\/\/ A simulated account that keeps track of open positions, historical trades, and\n\/\/\/ manages balances.\nstruct Ledger {\n\n}\n\nimpl Ledger {\n pub fn new(starting_balance: usize) {\n unimplemented!();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Missing dot<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test: Adda test that all arms of an alt may result in fail<commit_after>\/\/ When all branches of an alt expression result in fail, the entire\n\/\/ alt expression results in fail.\n\nfn main() {\n auto x = alt (true) {\n case (true) {\n 10\n }\n case (true) {\n alt (true) {\n case (true) {\n fail\n }\n case (false) {\n fail\n }\n }\n }\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added example of converting trees to vectors.<commit_after>extern crate presort;\n\nuse presort::PresortedVec;\n\nstruct TreeNode<T> {\n data: T,\n children: Vec<Tree<T>>,\n}\n\ntype Tree<T> = Box<TreeNode<T>>;\n\nfn tree<T>(data: T, children: Vec<Tree<T>>) -> Tree<T> {\n Box::new(TreeNode { data: data, children: children })\n}\n\nfn dump<T: Ord>(tree: &Tree<T>, vec: &mut PresortedVec<T>) {\n panic!(\"Not done yet\");\n}\n\n#[test]\nfn test_tree() {\n let tree = tree(37, vec![]);\n let mut vec = PresortedVec::new();\n dump(&tree, &mut vec);\n assert_eq!(vec.sorted_iter().collect::<Vec<&usize>>(), vec![&37]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[No-auto] lib\/domain\/mail: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Specialized demo of SDL for use on platforms that really want to\n\/\/! initialize and own the main routine and thread. This is meant to be\n\/\/! linked with object code compiled from one of the SDL_main variants\n\/\/! that one can find beneath <SDL-distribution>\/src\/main\/.\n\/\/!\n\/\/! For example on Mac OS X one can build a runnable program from this\n\/\/! with something along the lines of:\n\/\/!\n\/\/! rustc src\/sdl-demo\/sdl_main.rs -L. \\\n\/\/! -C link-args=\"-lSDLmain -lSDL -Wl,-framework,Cocoa\"\n\n#![no_main]\n\nextern crate sdl;\n\nuse std::rand::Rng;\n\nuse sdl::video::{SurfaceFlag, VideoFlag};\nuse sdl::event::{Event, Key};\n\n#[no_mangle]\n#[allow(non_snake_case)]\npub extern \"C\" fn SDL_main() {\n real_main()\n}\n\npub fn real_main() {\n sdl::init([sdl::InitFlag::Video].as_slice());\n sdl::wm::set_caption(\"rust-sdl demo - video\", \"rust-sdl\");\n\n let mut rng = std::rand::thread_rng();\n let screen = match sdl::video::set_video_mode(800, 600, 32,\n [SurfaceFlag::HWSurface].as_slice(),\n [VideoFlag::DoubleBuf].as_slice()) {\n Ok(screen) => screen,\n Err(err) => panic!(\"failed to set video mode: {}\", err)\n };\n\n \/\/ Note: You'll want to put this and the flip call inside the main loop\n \/\/ but we don't as to not startle epileptics\n for i in range(0us, 10) {\n for j in range(0us, 10) {\n screen.fill_rect(Some(sdl::Rect {\n x: (i as i16) * 800 \/ 10,\n y: (j as i16) * 600 \/ 10,\n w: 800 \/ 10,\n h: 600 \/ 10\n }), rng.gen::<sdl::video::Color>());\n }\n }\n\n screen.flip();\n\n 'main : loop {\n 'event : loop {\n match sdl::event::poll_event() {\n Event::Quit => break 'main,\n Event::None => break 'event,\n Event::Key(k, _, _, _)\n if k == Key::Escape\n => break 'main,\n _ => {}\n }\n }\n }\n\n sdl::quit();\n}\n<commit_msg>sdl_main: fix range and slice warnings<commit_after>\/\/! Specialized demo of SDL for use on platforms that really want to\n\/\/! initialize and own the main routine and thread. This is meant to be\n\/\/! linked with object code compiled from one of the SDL_main variants\n\/\/! that one can find beneath <SDL-distribution>\/src\/main\/.\n\/\/!\n\/\/! For example on Mac OS X one can build a runnable program from this\n\/\/! with something along the lines of:\n\/\/!\n\/\/! rustc src\/sdl-demo\/sdl_main.rs -L. \\\n\/\/! -C link-args=\"-lSDLmain -lSDL -Wl,-framework,Cocoa\"\n\n#![no_main]\n\nextern crate sdl;\n\nuse std::rand::Rng;\n\nuse sdl::video::{SurfaceFlag, VideoFlag};\nuse sdl::event::{Event, Key};\n\n#[no_mangle]\n#[allow(non_snake_case)]\npub extern \"C\" fn SDL_main() {\n real_main()\n}\n\npub fn real_main() {\n sdl::init(&[sdl::InitFlag::Video]);\n sdl::wm::set_caption(\"rust-sdl demo - video\", \"rust-sdl\");\n\n let mut rng = std::rand::thread_rng();\n let screen = match sdl::video::set_video_mode(800, 600, 32,\n &[SurfaceFlag::HWSurface],\n &[VideoFlag::DoubleBuf]) {\n Ok(screen) => screen,\n Err(err) => panic!(\"failed to set video mode: {}\", err)\n };\n\n \/\/ Note: You'll want to put this and the flip call inside the main loop\n \/\/ but we don't as to not startle epileptics\n for i in 0us..10 {\n for j in 0us..10 {\n screen.fill_rect(Some(sdl::Rect {\n x: (i as i16) * 800 \/ 10,\n y: (j as i16) * 600 \/ 10,\n w: 800 \/ 10,\n h: 600 \/ 10\n }), rng.gen::<sdl::video::Color>());\n }\n }\n\n screen.flip();\n\n 'main : loop {\n 'event : loop {\n match sdl::event::poll_event() {\n Event::Quit => break 'main,\n Event::None => break 'event,\n Event::Key(k, _, _, _)\n if k == Key::Escape\n => break 'main,\n _ => {}\n }\n }\n }\n\n sdl::quit();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Basic shuffle<commit_after>\n\n\nfn main(){\n\n\n let x: usize = 5;\n\n let mut vec = vec![0; x];\n let i:usize;\n\n let mut v_copy ;\n\n for i in 0..x{\n vec[i] = i;\n println!(\"{}\",vec[i]);\n }\n\n v_copy = create_new(&vec);\n\n\n let mut j : usize;\n j = 0;\n for i in 0..x\/2{\n vec[j] = v_copy[i];\n vec[j+1] = v_copy[i+x\/2];\n j += 2;\n }\n\n for i in 0..x{\n println!(\"{}\",vec[i]);\n }\n\n \n}\n\n\nfn create_new<T: Clone>(vec: &[T]) -> Vec<T> {\n let mut newvec = vec.to_owned();\n newvec\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse collections::vec::Vec;\n\nuse core::intrinsics::volatile_load;\nuse core::{mem, slice};\n\nuse drivers::mmio::Mmio;\nuse drivers::pci::config::PciConfig;\n\nuse schemes::KScheme;\n\nuse super::hci::{UsbHci, UsbMsg};\nuse super::setup::Setup;\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Gtd {\n flags: u32,\n buffer: u32,\n next: u32,\n end: u32,\n}\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Ed {\n flags: u32,\n tail: u32,\n head: u32,\n next: u32,\n}\n\nconst CTRL_CBSR: u32 = 0b11;\nconst CTRL_PLE: u32 = 1 << 2;\nconst CTRL_IE: u32 = 1 << 3;\nconst CTRL_CLE: u32 = 1 << 4;\nconst CTRL_BLE: u32 = 1 << 5;\nconst CTRL_HCFS: u32 = 0b11 << 6;\nconst CTRL_IR: u32 = 1 << 8;\nconst CTRL_RWC: u32 = 1 << 9;\nconst CTRL_RWE: u32 = 1 << 10;\n\nconst CMD_STS_HCR: u32 = 1;\nconst CMD_STS_CLF: u32 = 1 << 1;\nconst CMD_STS_BLF: u32 = 1 << 2;\nconst CMD_STS_OCR: u32 = 1 << 3;\n\nconst PORT_STS_CCS: u32 = 1;\nconst PORT_STS_PES: u32 = 1 << 1;\nconst PORT_STS_PSS: u32 = 1 << 2;\nconst PORT_STS_POCI: u32 = 1 << 3;\nconst PORT_STS_PPS: u32 = 1 << 8;\nconst PORT_STS_LSDA: u32 = 1 << 9;\nconst PORT_STS_CSC: u32 = 1 << 16;\nconst PORT_STS_PESC: u32 = 1 << 17;\nconst PORT_STS_PSSC: u32 = 1 << 18;\nconst PORT_STS_OCIC: u32 = 1 << 19;\nconst PORT_STS_PRSC: u32 = 1 << 20;\n\n#[repr(packed)]\npub struct OhciRegs {\n pub revision: Mmio<u32>,\n pub control: Mmio<u32>,\n pub cmd_sts: Mmio<u32>,\n pub int_sts: Mmio<u32>,\n pub int_en: Mmio<u32>,\n pub int_dis: Mmio<u32>,\n pub hcca: Mmio<u32>,\n pub period_current: Mmio<u32>,\n pub control_head: Mmio<u32>,\n pub control_current: Mmio<u32>,\n pub bulk_head: Mmio<u32>,\n pub bulk_current: Mmio<u32>,\n pub done_head: Mmio<u32>,\n pub fm_interval: Mmio<u32>,\n pub fm_remain: Mmio<u32>,\n pub fm_num: Mmio<u32>,\n pub periodic_start: Mmio<u32>,\n pub ls_thresh: Mmio<u32>,\n pub rh_desc_a: Mmio<u32>,\n pub rh_desc_b: Mmio<u32>,\n pub rh_sts: Mmio<u32>,\n pub port_sts: [Mmio<u32>; 15],\n}\n\npub struct Ohci {\n pub regs: &'static mut OhciRegs,\n pub irq: u8,\n}\n\nimpl KScheme for Ohci {\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n \/\/ d(\"OHCI IRQ\\n\");\n }\n }\n\n fn on_poll(&mut self) {\n }\n}\n\nimpl Ohci {\n pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {\n pci.flag(4, 4, true); \/\/ Bus mastering\n\n let base = pci.read(0x10) as usize & 0xFFFFFFF0;\n let regs = &mut *(base as *mut OhciRegs);\n\n let mut module = box Ohci {\n regs: regs,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n\n module.init();\n\n return module;\n }\n\n pub unsafe fn init(&mut self) {\n debugln!(\"OHCI on: {:X}, IRQ: {:X}\", (self.regs as *mut OhciRegs) as usize, self.irq);\n\n debugln!(\"Reset: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = ctrl & (0xFFFFFFFF - CTRL_HCFS);\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Enable: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = (ctrl & (0xFFFFFFFF - CTRL_HCFS)) | 0b10 << 6;\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Port Enumeration: {:X}\", self.regs.control.read());\n\n let ndp = self.regs.rh_desc_a.read() & 0xF;\n for i in 0..ndp as usize {\n debugln!(\"Port {}: {:X}\", i, self.regs.port_sts[i].read());\n\n if self.regs.port_sts[i].readf(PORT_STS_CCS) {\n debugln!(\"Device\");\n\n debugln!(\"Enable\");\n while ! self.regs.port_sts[i].readf(PORT_STS_PES) {\n self.regs.port_sts[i].writef(PORT_STS_PES, true);\n }\n\n self.device(i as u8);\n }\n }\n }\n}\n\n\nimpl UsbHci for Ohci {\n fn msg(&mut self, address: u8, endpoint: u8, msgs: &[UsbMsg]) -> usize {\n let mut tds = Vec::new();\n for msg in msgs.iter().rev() {\n let link_ptr = match tds.last() {\n Some(td) => (td as *const Gtd) as u32,\n None => 0\n };\n\n match *msg {\n UsbMsg::Setup(setup) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b00 << 19,\n buffer: (setup as *const Setup) as u32,\n next: link_ptr,\n end: (setup as *const Setup) as u32 + mem::size_of::<Setup>() as u32\n }),\n UsbMsg::In(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::InIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::Out(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::OutIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n })\n }\n }\n\n let mut count = 0;\n\n if ! tds.is_empty() {\n let ed = box Ed {\n \/\/TODO: Remove 1 << 13, it sets it to low speed\n flags: 0x3FF << 16 | 1 << 13 | (endpoint as u32) << 7 | address as u32,\n tail: 0,\n head: (tds.last().unwrap() as *const Gtd) as u32,\n next: 0\n };\n\n \/\/TODO: Calculate actual bytes\n for td in tds.iter().rev() {\n count += (td.end - td.buffer) as usize;\n }\n\n self.regs.control_head.write((&*ed as *const Ed) as u32);\n while ! self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, true);\n }\n while ! self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, true);\n }\n\n for td in tds.iter().rev() {\n while unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28 == 0b1111 << 28 {\n \/\/unsafe { context_switch(false) };\n }\n let condition = (unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28) >> 28;\n if condition != 0 {\n debugln!(\"Condition: {:X}\", condition);\n break;\n }\n }\n\n while self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, false);\n }\n while self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, false);\n }\n self.regs.control_head.write(0);\n }\n\n count\n }\n}\n<commit_msg>Prevent halts in virtualbox<commit_after>use alloc::boxed::Box;\n\nuse collections::vec::Vec;\n\nuse core::intrinsics::volatile_load;\nuse core::{mem, slice};\n\nuse drivers::mmio::Mmio;\nuse drivers::pci::config::PciConfig;\n\nuse schemes::KScheme;\n\nuse super::hci::{UsbHci, UsbMsg};\nuse super::setup::Setup;\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Gtd {\n flags: u32,\n buffer: u32,\n next: u32,\n end: u32,\n}\n\n#[repr(packed)]\n#[derive(Copy, Clone, Debug, Default)]\nstruct Ed {\n flags: u32,\n tail: u32,\n head: u32,\n next: u32,\n}\n\nconst CTRL_CBSR: u32 = 0b11;\nconst CTRL_PLE: u32 = 1 << 2;\nconst CTRL_IE: u32 = 1 << 3;\nconst CTRL_CLE: u32 = 1 << 4;\nconst CTRL_BLE: u32 = 1 << 5;\nconst CTRL_HCFS: u32 = 0b11 << 6;\nconst CTRL_IR: u32 = 1 << 8;\nconst CTRL_RWC: u32 = 1 << 9;\nconst CTRL_RWE: u32 = 1 << 10;\n\nconst CMD_STS_HCR: u32 = 1;\nconst CMD_STS_CLF: u32 = 1 << 1;\nconst CMD_STS_BLF: u32 = 1 << 2;\nconst CMD_STS_OCR: u32 = 1 << 3;\n\nconst PORT_STS_CCS: u32 = 1;\nconst PORT_STS_PES: u32 = 1 << 1;\nconst PORT_STS_PSS: u32 = 1 << 2;\nconst PORT_STS_POCI: u32 = 1 << 3;\nconst PORT_STS_PPS: u32 = 1 << 8;\nconst PORT_STS_LSDA: u32 = 1 << 9;\nconst PORT_STS_CSC: u32 = 1 << 16;\nconst PORT_STS_PESC: u32 = 1 << 17;\nconst PORT_STS_PSSC: u32 = 1 << 18;\nconst PORT_STS_OCIC: u32 = 1 << 19;\nconst PORT_STS_PRSC: u32 = 1 << 20;\n\n#[repr(packed)]\npub struct OhciRegs {\n pub revision: Mmio<u32>,\n pub control: Mmio<u32>,\n pub cmd_sts: Mmio<u32>,\n pub int_sts: Mmio<u32>,\n pub int_en: Mmio<u32>,\n pub int_dis: Mmio<u32>,\n pub hcca: Mmio<u32>,\n pub period_current: Mmio<u32>,\n pub control_head: Mmio<u32>,\n pub control_current: Mmio<u32>,\n pub bulk_head: Mmio<u32>,\n pub bulk_current: Mmio<u32>,\n pub done_head: Mmio<u32>,\n pub fm_interval: Mmio<u32>,\n pub fm_remain: Mmio<u32>,\n pub fm_num: Mmio<u32>,\n pub periodic_start: Mmio<u32>,\n pub ls_thresh: Mmio<u32>,\n pub rh_desc_a: Mmio<u32>,\n pub rh_desc_b: Mmio<u32>,\n pub rh_sts: Mmio<u32>,\n pub port_sts: [Mmio<u32>; 15],\n}\n\npub struct Ohci {\n pub regs: &'static mut OhciRegs,\n pub irq: u8,\n}\n\nimpl KScheme for Ohci {\n fn on_irq(&mut self, irq: u8) {\n if irq == self.irq {\n \/\/ d(\"OHCI IRQ\\n\");\n }\n }\n\n fn on_poll(&mut self) {\n }\n}\n\nimpl Ohci {\n pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {\n pci.flag(4, 4, true); \/\/ Bus mastering\n\n let base = pci.read(0x10) as usize & 0xFFFFFFF0;\n let regs = &mut *(base as *mut OhciRegs);\n\n let mut module = box Ohci {\n regs: regs,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n\n module.init();\n\n return module;\n }\n\n pub unsafe fn init(&mut self) {\n debugln!(\"OHCI on: {:X}, IRQ: {:X}\", (self.regs as *mut OhciRegs) as usize, self.irq);\n\n debugln!(\"Reset: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = ctrl & (0xFFFFFFFF - CTRL_HCFS);\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Enable: {:X}\", self.regs.control.read());\n loop {\n let ctrl = self.regs.control.read();\n let desired_ctrl = (ctrl & (0xFFFFFFFF - CTRL_HCFS)) | 0b10 << 6;\n if ctrl != desired_ctrl {\n self.regs.control.write(desired_ctrl);\n } else {\n break;\n }\n }\n\n debugln!(\"Port Enumeration: {:X}\", self.regs.control.read());\n\n let ndp = self.regs.rh_desc_a.read() & 0xF;\n for i in 0..ndp as usize {\n debugln!(\"Port {}: {:X}\", i, self.regs.port_sts[i].read());\n\n if self.regs.port_sts[i].readf(PORT_STS_CCS) {\n debugln!(\"Device\");\n\n debugln!(\"Enable\");\n while ! self.regs.port_sts[i].readf(PORT_STS_PES) {\n self.regs.port_sts[i].writef(PORT_STS_PES, true);\n }\n\n self.device(i as u8);\n }\n }\n }\n}\n\n\nimpl UsbHci for Ohci {\n fn msg(&mut self, address: u8, endpoint: u8, msgs: &[UsbMsg]) -> usize {\n let mut tds = Vec::new();\n for msg in msgs.iter().rev() {\n let link_ptr = match tds.last() {\n Some(td) => (td as *const Gtd) as u32,\n None => 0\n };\n\n match *msg {\n UsbMsg::Setup(setup) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b00 << 19,\n buffer: (setup as *const Setup) as u32,\n next: link_ptr,\n end: (setup as *const Setup) as u32 + mem::size_of::<Setup>() as u32\n }),\n UsbMsg::In(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::InIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b10 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::Out(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n }),\n UsbMsg::OutIso(ref data) => tds.push(Gtd {\n flags: 0b1111 << 28 | 0b01 << 19,\n buffer: data.as_ptr() as u32,\n next: link_ptr,\n end: data.as_ptr() as u32 + data.len() as u32\n })\n }\n }\n\n let mut count = 0;\n\n if ! tds.is_empty() {\n let ed = box Ed {\n \/\/TODO: Remove 1 << 13, it sets it to low speed\n flags: 0x3FF << 16 | 1 << 13 | (endpoint as u32) << 7 | address as u32,\n tail: 0,\n head: (tds.last().unwrap() as *const Gtd) as u32,\n next: 0\n };\n\n \/\/TODO: Calculate actual bytes\n for td in tds.iter().rev() {\n count += (td.end - td.buffer) as usize;\n }\n\n self.regs.control_head.write((&*ed as *const Ed) as u32);\n while ! self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, true);\n }\n while ! self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, true);\n }\n\n for td in tds.iter().rev() {\n let mut spin = 1000000;\n while unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28 == 0b1111 << 28 && spin > 0 {\n spin -= 1;\n \/\/unsafe { context_switch(false) };\n }\n let condition = (unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28) >> 28;\n if condition != 0 {\n debugln!(\"Condition: {:X}\", condition);\n break;\n }\n }\n\n \/*\n while self.regs.cmd_sts.readf(CMD_STS_CLF) {\n self.regs.cmd_sts.writef(CMD_STS_CLF, false);\n }\n *\/\n while self.regs.control.readf(CTRL_CLE) {\n self.regs.control.writef(CTRL_CLE, false);\n }\n self.regs.control_head.write(0);\n }\n\n count\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Generate test cases with a macro.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test iterator chain type length blowup<commit_after>\/\/ run-pass\n\/\/! This snippet causes the type length to blowup exponentially,\n\/\/! so check that we don't accidentially exceed the type length limit.\n\/\/ FIXME: Once the size of iterator adaptors is further reduced,\n\/\/ increase the complexity of this test.\n\nfn main() {\n let c = 2;\n let bv = vec![2];\n let b = bv\n .iter()\n .filter(|a| **a == c);\n\n let _a = vec![1, 2, 3]\n .into_iter()\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .filter(|a| b.clone().any(|b| *b == *a))\n .collect::<Vec<_>>();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Minor refactorings for the guessing game<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Implement a Poisson Disc distribution algorithm.\n\nuse rand;\nuse std::cmp;\n\nuse coord::Coord;\nuse surface::points::Points;\n\n\/\/\/ Container for constructing different randomly sampled distributions.\npub struct Distribution;\n\nimpl Distribution {\n \/\/\/ Return a set of points that have been generated using a Poisson disk sampling\n \/\/\/ algorithm. They will be separated from each other at minimum by an input distance.\n pub fn poisson(rmin: f64, size_x: f64, size_y: f64) -> Points {\n self::density::PoissonDistribution::new(rmin, size_x, size_y)\n }\n\n \/\/\/ Return a set of an input number of points that have been generated using\n \/\/\/ a blue noise sampling algorithm.\n pub fn blue_noise(num_points: u64, size_x: f64, size_y: f64) -> Points {\n self::number::BlueNoiseDistribution::new(num_points, size_x, size_y)\n }\n}\n\nmod number {\n use rand::distributions::IndependentSample;\n use super::*;\n\n pub struct BlueNoiseDistribution;\n\n impl BlueNoiseDistribution {\n pub fn new(num_points: u64, size_x: f64, size_y: f64) -> Points {\n let mut coords = vec![gen_coord(size_x, size_y)];\n const NUM_CANDIDATES_MULTIPLIER: u64 = 1;\n\n for i in 1..num_points {\n let mut current_best = gen_coord(size_x, size_y);\n let mut max_dist = calc_min_dist(current_best, &coords, size_x, size_y);\n\n for _ in 0..(NUM_CANDIDATES_MULTIPLIER * i) {\n let candidate = gen_coord(size_x, size_y);\n let dist = calc_min_dist(candidate, &coords, size_x, size_y);\n\n if dist > max_dist {\n max_dist = dist;\n current_best = candidate;\n }\n };\n\n coords.push(current_best);\n }\n\n Points {\n box_size: Coord::new(size_x, size_y, 0.0),\n coords,\n }\n }\n }\n\n pub fn calc_min_dist(coord: Coord, samples: &[Coord], size_x: f64, size_y: f64) -> f64 {\n use std::f64;\n samples.iter()\n .fold(f64::MAX, |dist, &other| {\n dist.min(calc_toroidal_distance(coord, other, size_x, size_y))\n })\n }\n\n pub fn calc_toroidal_distance(coord: Coord, other: Coord, size_x: f64, size_y: f64) -> f64 {\n let mut dx = (coord.x - other.x).abs();\n let mut dy = (coord.y - other.y).abs();\n\n if dx > size_x \/ 2.0 {\n dx = size_x - dx;\n }\n\n if dy > size_y \/ 2.0 {\n dy = size_y - dy;\n }\n\n dx.powi(2) + dy.powi(2)\n }\n\n fn gen_coord(dx: f64, dy: f64) -> Coord {\n let mut rng = rand::thread_rng();\n let range_x = rand::distributions::Range::new(0.0, dx);\n let range_y = rand::distributions::Range::new(0.0, dy);\n\n Coord::new(range_x.ind_sample(&mut rng), range_y.ind_sample(&mut rng), 0.0)\n }\n}\n\nmod density {\n use rand::distributions::IndependentSample;\n use super::*;\n\n pub struct PoissonDistribution;\n\n impl PoissonDistribution {\n pub fn new(rmin: f64, size_x: f64, size_y: f64) -> Points {\n let mut grid = PoissonGrid::new(rmin, size_x, size_y);\n\n let init_coord = gen_grid_coord(size_x, size_y);\n let mut active: Vec<Coord> = vec![init_coord];\n grid.set_coord(init_coord).expect(\"There was an error when creating the Poisson disc distribution\");\n\n while !active.is_empty() {\n let index = select_coordinate(&active);\n\n if let Some(candidate) = find_candidate(&active[index], &grid) {\n if grid.set_coord(candidate).is_ok() {\n active.push(candidate);\n }\n } else {\n active.remove(index);\n };\n }\n\n Points {\n box_size: Coord::new(size_x, size_y, 0.0),\n coords: grid.into_coords(),\n }\n }\n }\n\n struct PoissonGrid {\n spacing: f64,\n rmin: f64,\n size: (f64, f64),\n shape: (usize, usize),\n cells: Vec<Option<Coord>>,\n }\n\n impl PoissonGrid {\n fn new(rmin: f64, size_x: f64, size_y: f64) -> PoissonGrid {\n let a = rmin \/ 2.0f64.sqrt();\n let nx = (size_x \/ a).ceil() as usize;\n let ny = (size_y \/ a).ceil() as usize;\n\n PoissonGrid {\n spacing: a,\n rmin: rmin,\n size: (size_x, size_y),\n shape: (nx, ny),\n cells: vec![None; nx * ny],\n }\n }\n\n fn cell_at_position(&self, col: usize, row: usize) -> usize {\n let (nx, _) = self.shape;\n row * nx + col\n }\n\n fn cell_at_coord(&self, coord: &Coord) -> usize {\n let col = (coord.x \/ self.spacing).floor() as usize;\n let row = (coord.y \/ self.spacing).floor() as usize;\n self.cell_at_position(col, row)\n }\n\n fn collision(&self, coord: &Coord) -> bool {\n let index = self.cell_at_coord(&coord);\n self.get_neighbours(index)\n .iter()\n .filter_map(|opt| opt.map(|c| c.distance(*coord)))\n .any(|r| r < self.rmin)\n }\n\n fn get_neighbours(&self, index: usize) -> Vec<Option<Coord>> {\n let (nx, ny) = self.shape;\n let i = (index % nx) as isize;\n let j = (index \/ nx) as isize;\n\n let mut neighbours = Vec::new();\n\n \/\/ Since rmin = sqrt(2) * spacing we need to check cells\n \/\/ that are up to two positions away\n let (imin, imax) = (cmp::max(0, i - 2), cmp::min(nx as isize, i + 3));\n let (jmin, jmax) = (cmp::max(0, j - 2), cmp::min(ny as isize, j + 3));\n\n for col in imin..imax {\n for row in jmin..jmax {\n let neighbour_index = self.cell_at_position(col as usize, row as usize);\n neighbours.push(self.cells[neighbour_index]);\n }\n }\n\n neighbours\n }\n\n fn into_coords(self) -> Vec<Coord> {\n self.cells.iter().filter_map(|&c| c).collect()\n }\n\n fn set_coord(&mut self, coord: Coord) -> Result<(), &'static str> {\n let index = self.cell_at_coord(&coord);\n\n \/\/ Consistency check for the algorithm: this should never be reached\n \/\/ but we prefer to not panic if it happens\n if self.cells[index].is_some() {\n return Err(\"Cannot add a coordinate to a cell which is already occupied\");\n }\n\n self.cells[index] = Some(coord);\n Ok(())\n }\n }\n\n fn find_candidate(coord: &Coord, grid: &PoissonGrid) -> Option<Coord> {\n const NUM_CANDIDATES: usize = 30;\n\n for _ in 0..NUM_CANDIDATES {\n let candidate = gen_coord_around(&coord, &grid);\n\n if !grid.collision(&candidate) {\n return Some(candidate);\n }\n }\n\n None\n }\n\n fn gen_coord_around(coord: &Coord, grid: &PoissonGrid) -> Coord {\n use std::f64::consts::PI;\n let mut rng = rand::thread_rng();\n let range_dr = rand::distributions::Range::new(grid.rmin, 2.0 * grid.rmin);\n let range_angle = rand::distributions::Range::new(0.0, 2.0 * PI);\n\n let (max_x, max_y) = grid.size;\n\n loop {\n let dr = range_dr.ind_sample(&mut rng);\n let angle = range_angle.ind_sample(&mut rng);\n let x = coord.x + dr * angle.cos();\n let y = coord.y + dr * angle.sin();\n\n if x >= 0.0 && x < max_x && y >= 0.0 && y < max_y {\n return Coord::new(x, y, 0.0);\n }\n }\n }\n\n fn gen_grid_coord(x: f64, y: f64) -> Coord {\n let mut rng = rand::thread_rng();\n let range_x = rand::distributions::Range::new(0.0, x);\n let range_y = rand::distributions::Range::new(0.0, y);\n\n Coord::new(range_x.ind_sample(&mut rng), range_y.ind_sample(&mut rng), 0.0)\n }\n\n fn select_coordinate(coords: &Vec<Coord>) -> usize {\n let mut rng = rand::thread_rng();\n let range = rand::distributions::Range::new(0, coords.len());\n\n range.ind_sample(&mut rng)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn create_poisson_distribution() {\n let rmin = 1.0;\n let (size_x, size_y) = (5.0, 10.0);\n let distribution = density::PoissonDistribution::new(rmin, size_x, size_y);\n\n \/\/ We can only assert that no coordinates are within the minimum\n \/\/ distance of each other, or outside the box.\n assert!(distribution.coords.len() > 0);\n\n for (i, &x1) in distribution.coords.iter().enumerate() {\n assert!(x1.x >= 0.0 && x1.x <= size_x);\n assert!(x1.y >= 0.0 && x1.y <= size_y);\n\n for &x2 in distribution.coords.iter().skip(i + 1) {\n assert!(Coord::distance(x1, x2) >= rmin);\n }\n }\n }\n\n #[test]\n fn create_blue_noise_distribution() {\n let num_points = 152;\n let (size_x, size_y) = (5.0, 10.0);\n let distribution = number::BlueNoiseDistribution::new(num_points, size_x, size_y);\n\n \/\/ We can only (easily) assert that we have the input number of points\n \/\/ and that none are outside of the box.\n assert_eq!(distribution.coords.len() , num_points as usize);\n\n for coord in distribution.coords {\n assert!(coord.x >= 0.0 && coord.x <= size_x);\n assert!(coord.y >= 0.0 && coord.y <= size_y);\n }\n }\n\n #[test]\n fn calculate_toroidal_distance_is_squared_and_wraps_coordinates_to_closest() {\n let coord = Coord::new(0.0, 0.0, 10.0); \/\/ skip the z value\n let other = Coord::new(1.5, 2.5, -10.0);\n\n let (size_x, size_y) = (2.0_f64, 3.0_f64);\n\n \/\/ Both coordinates are closer by wrapping around! Also, ignore the z coordinate!\n let dist = (size_x - 1.5).powi(2) + (size_y - 2.5).powi(2);\n\n assert_eq!(number::calc_toroidal_distance(coord, other, size_x, size_y), dist);\n }\n\n #[test]\n fn calculate_min_distance_squared_between_a_coordinate_and_a_set() {\n let coord = Coord::new(2.0, 2.0, 2.0);\n\n let candidates = vec![\n Coord::new(0.0, 0.0, 0.0), \/\/ distance squared: 2^2 + 2^2 = 8\n Coord::new(3.0, 1.0, 3.0), \/\/ 1^2 + 1^2 = 2, minimum!\n Coord::new(5.0, 5.0, 5.0) \/\/ 3^2 + 3^2 = 18\n ];\n\n let dist = 2.0 * 1.0_f64.powi(2);\n\n let (size_x, size_y) = (10.0, 10.0);\n\n assert_eq!(number::calc_min_dist(coord, &candidates, size_x, size_y), dist);\n }\n}\n<commit_msg>Comment about performance improvements for `BlueNoise` construction<commit_after>\/\/! Implement a Poisson Disc distribution algorithm.\n\nuse rand;\nuse std::cmp;\n\nuse coord::Coord;\nuse surface::points::Points;\n\n\/\/\/ Container for constructing different randomly sampled distributions.\npub struct Distribution;\n\nimpl Distribution {\n \/\/\/ Return a set of points that have been generated using a Poisson disk sampling\n \/\/\/ algorithm. They will be separated from each other at minimum by an input distance.\n pub fn poisson(rmin: f64, size_x: f64, size_y: f64) -> Points {\n self::density::PoissonDistribution::new(rmin, size_x, size_y)\n }\n\n \/\/\/ Return a set of an input number of points that have been generated using\n \/\/\/ a blue noise sampling algorithm.\n pub fn blue_noise(num_points: u64, size_x: f64, size_y: f64) -> Points {\n self::number::BlueNoiseDistribution::new(num_points, size_x, size_y)\n }\n}\n\nmod number {\n use rand::distributions::IndependentSample;\n use super::*;\n\n pub struct BlueNoiseDistribution;\n\n impl BlueNoiseDistribution {\n \/\/ This algorithm is quite expensive since every candidate has to be checked\n \/\/ against every coordinate that has been constructed, and the number of candidates\n \/\/ which we create increases equally. Thus it scales very poorly with the number\n \/\/ of points that are created.\n \/\/\n \/\/ Could be sped up by implementing a grid for the final coordinates and compare\n \/\/ every candidate to those inside a neighbouring area only. Also by making it\n \/\/ parallel, but more power cannot substitute bad algorithms.\n \/\/\n \/\/ **Note that before any performance improvements are made, a benchmark test should\n \/\/ be created!**\n pub fn new(num_points: u64, size_x: f64, size_y: f64) -> Points {\n let mut coords = vec![gen_coord(size_x, size_y)];\n const NUM_CANDIDATES_MULTIPLIER: u64 = 1;\n\n for i in 1..num_points {\n let mut current_best = gen_coord(size_x, size_y);\n let mut max_dist = calc_min_dist(current_best, &coords, size_x, size_y);\n\n for _ in 0..(NUM_CANDIDATES_MULTIPLIER * i) {\n let candidate = gen_coord(size_x, size_y);\n let dist = calc_min_dist(candidate, &coords, size_x, size_y);\n\n if dist > max_dist {\n max_dist = dist;\n current_best = candidate;\n }\n };\n\n coords.push(current_best);\n }\n\n Points {\n box_size: Coord::new(size_x, size_y, 0.0),\n coords,\n }\n }\n }\n\n pub fn calc_min_dist(coord: Coord, samples: &[Coord], size_x: f64, size_y: f64) -> f64 {\n use std::f64::MAX;\n\n samples.iter()\n .fold(MAX, |dist, &other| {\n dist.min(calc_toroidal_distance(coord, other, size_x, size_y))\n })\n }\n\n pub fn calc_toroidal_distance(coord: Coord, other: Coord, size_x: f64, size_y: f64) -> f64 {\n let mut dx = (coord.x - other.x).abs();\n let mut dy = (coord.y - other.y).abs();\n\n if dx > size_x \/ 2.0 {\n dx = size_x - dx;\n }\n\n if dy > size_y \/ 2.0 {\n dy = size_y - dy;\n }\n\n dx.powi(2) + dy.powi(2)\n }\n\n fn gen_coord(dx: f64, dy: f64) -> Coord {\n let mut rng = rand::thread_rng();\n let range_x = rand::distributions::Range::new(0.0, dx);\n let range_y = rand::distributions::Range::new(0.0, dy);\n\n Coord::new(range_x.ind_sample(&mut rng), range_y.ind_sample(&mut rng), 0.0)\n }\n}\n\nmod density {\n use rand::distributions::IndependentSample;\n use super::*;\n\n pub struct PoissonDistribution;\n\n impl PoissonDistribution {\n pub fn new(rmin: f64, size_x: f64, size_y: f64) -> Points {\n let mut grid = PoissonGrid::new(rmin, size_x, size_y);\n\n let init_coord = gen_grid_coord(size_x, size_y);\n let mut active: Vec<Coord> = vec![init_coord];\n grid.set_coord(init_coord).expect(\"There was an error when creating the Poisson disc distribution\");\n\n while !active.is_empty() {\n let index = select_coordinate(&active);\n\n if let Some(candidate) = find_candidate(&active[index], &grid) {\n if grid.set_coord(candidate).is_ok() {\n active.push(candidate);\n }\n } else {\n active.remove(index);\n };\n }\n\n Points {\n box_size: Coord::new(size_x, size_y, 0.0),\n coords: grid.into_coords(),\n }\n }\n }\n\n struct PoissonGrid {\n spacing: f64,\n rmin: f64,\n size: (f64, f64),\n shape: (usize, usize),\n cells: Vec<Option<Coord>>,\n }\n\n impl PoissonGrid {\n fn new(rmin: f64, size_x: f64, size_y: f64) -> PoissonGrid {\n let a = rmin \/ 2.0f64.sqrt();\n let nx = (size_x \/ a).ceil() as usize;\n let ny = (size_y \/ a).ceil() as usize;\n\n PoissonGrid {\n spacing: a,\n rmin: rmin,\n size: (size_x, size_y),\n shape: (nx, ny),\n cells: vec![None; nx * ny],\n }\n }\n\n fn cell_at_position(&self, col: usize, row: usize) -> usize {\n let (nx, _) = self.shape;\n row * nx + col\n }\n\n fn cell_at_coord(&self, coord: &Coord) -> usize {\n let col = (coord.x \/ self.spacing).floor() as usize;\n let row = (coord.y \/ self.spacing).floor() as usize;\n self.cell_at_position(col, row)\n }\n\n fn collision(&self, coord: &Coord) -> bool {\n let index = self.cell_at_coord(&coord);\n self.get_neighbours(index)\n .iter()\n .filter_map(|opt| opt.map(|c| c.distance(*coord)))\n .any(|r| r < self.rmin)\n }\n\n fn get_neighbours(&self, index: usize) -> Vec<Option<Coord>> {\n let (nx, ny) = self.shape;\n let i = (index % nx) as isize;\n let j = (index \/ nx) as isize;\n\n let mut neighbours = Vec::new();\n\n \/\/ Since rmin = sqrt(2) * spacing we need to check cells\n \/\/ that are up to two positions away\n let (imin, imax) = (cmp::max(0, i - 2), cmp::min(nx as isize, i + 3));\n let (jmin, jmax) = (cmp::max(0, j - 2), cmp::min(ny as isize, j + 3));\n\n for col in imin..imax {\n for row in jmin..jmax {\n let neighbour_index = self.cell_at_position(col as usize, row as usize);\n neighbours.push(self.cells[neighbour_index]);\n }\n }\n\n neighbours\n }\n\n fn into_coords(self) -> Vec<Coord> {\n self.cells.iter().filter_map(|&c| c).collect()\n }\n\n fn set_coord(&mut self, coord: Coord) -> Result<(), &'static str> {\n let index = self.cell_at_coord(&coord);\n\n \/\/ Consistency check for the algorithm: this should never be reached\n \/\/ but we prefer to not panic if it happens\n if self.cells[index].is_some() {\n return Err(\"Cannot add a coordinate to a cell which is already occupied\");\n }\n\n self.cells[index] = Some(coord);\n Ok(())\n }\n }\n\n fn find_candidate(coord: &Coord, grid: &PoissonGrid) -> Option<Coord> {\n const NUM_CANDIDATES: usize = 30;\n\n for _ in 0..NUM_CANDIDATES {\n let candidate = gen_coord_around(&coord, &grid);\n\n if !grid.collision(&candidate) {\n return Some(candidate);\n }\n }\n\n None\n }\n\n fn gen_coord_around(coord: &Coord, grid: &PoissonGrid) -> Coord {\n use std::f64::consts::PI;\n let mut rng = rand::thread_rng();\n let range_dr = rand::distributions::Range::new(grid.rmin, 2.0 * grid.rmin);\n let range_angle = rand::distributions::Range::new(0.0, 2.0 * PI);\n\n let (max_x, max_y) = grid.size;\n\n loop {\n let dr = range_dr.ind_sample(&mut rng);\n let angle = range_angle.ind_sample(&mut rng);\n let x = coord.x + dr * angle.cos();\n let y = coord.y + dr * angle.sin();\n\n if x >= 0.0 && x < max_x && y >= 0.0 && y < max_y {\n return Coord::new(x, y, 0.0);\n }\n }\n }\n\n fn gen_grid_coord(x: f64, y: f64) -> Coord {\n let mut rng = rand::thread_rng();\n let range_x = rand::distributions::Range::new(0.0, x);\n let range_y = rand::distributions::Range::new(0.0, y);\n\n Coord::new(range_x.ind_sample(&mut rng), range_y.ind_sample(&mut rng), 0.0)\n }\n\n fn select_coordinate(coords: &Vec<Coord>) -> usize {\n let mut rng = rand::thread_rng();\n let range = rand::distributions::Range::new(0, coords.len());\n\n range.ind_sample(&mut rng)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn create_poisson_distribution() {\n let rmin = 1.0;\n let (size_x, size_y) = (5.0, 10.0);\n let distribution = density::PoissonDistribution::new(rmin, size_x, size_y);\n\n \/\/ We can only assert that no coordinates are within the minimum\n \/\/ distance of each other, or outside the box.\n assert!(distribution.coords.len() > 0);\n\n for (i, &x1) in distribution.coords.iter().enumerate() {\n assert!(x1.x >= 0.0 && x1.x <= size_x);\n assert!(x1.y >= 0.0 && x1.y <= size_y);\n\n for &x2 in distribution.coords.iter().skip(i + 1) {\n assert!(Coord::distance(x1, x2) >= rmin);\n }\n }\n }\n\n #[test]\n fn create_blue_noise_distribution() {\n let num_points = 152;\n let (size_x, size_y) = (5.0, 10.0);\n let distribution = number::BlueNoiseDistribution::new(num_points, size_x, size_y);\n\n \/\/ We can only (easily) assert that we have the input number of points\n \/\/ and that none are outside of the box.\n assert_eq!(distribution.coords.len() , num_points as usize);\n\n for coord in distribution.coords {\n assert!(coord.x >= 0.0 && coord.x <= size_x);\n assert!(coord.y >= 0.0 && coord.y <= size_y);\n }\n }\n\n #[test]\n fn calculate_toroidal_distance_is_squared_and_wraps_coordinates_to_closest() {\n let coord = Coord::new(0.0, 0.0, 10.0); \/\/ skip the z value\n let other = Coord::new(1.5, 2.5, -10.0);\n\n let (size_x, size_y) = (2.0_f64, 3.0_f64);\n\n \/\/ Both coordinates are closer by wrapping around! Also, ignore the z coordinate!\n let dist = (size_x - 1.5).powi(2) + (size_y - 2.5).powi(2);\n\n assert_eq!(number::calc_toroidal_distance(coord, other, size_x, size_y), dist);\n }\n\n #[test]\n fn calculate_min_distance_squared_between_a_coordinate_and_a_set() {\n let coord = Coord::new(2.0, 2.0, 2.0);\n\n let candidates = vec![\n Coord::new(0.0, 0.0, 0.0), \/\/ distance squared: 2^2 + 2^2 = 8\n Coord::new(3.0, 1.0, 3.0), \/\/ 1^2 + 1^2 = 2, minimum!\n Coord::new(5.0, 5.0, 5.0) \/\/ 3^2 + 3^2 = 18\n ];\n\n let dist = 2.0 * 1.0_f64.powi(2);\n\n let (size_x, size_y) = (10.0, 10.0);\n\n assert_eq!(number::calc_min_dist(coord, &candidates, size_x, size_y), dist);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(default_type_params)]\nextern crate curl;\nextern crate http;\nextern crate hyper;\n\nextern crate test;\n\nuse std::fmt::{mod, Show};\nuse std::from_str::from_str;\nuse std::io::{IoResult, MemReader};\nuse std::io::net::ip::{SocketAddr, ToSocketAddr};\nuse std::os;\nuse std::path::BytesContainer;\n\nuse http::connecter::Connecter;\n\nuse hyper::net;\n\nstatic README: &'static [u8] = include_bin!(\"..\/README.md\");\n\n\nstruct MockStream {\n read: MemReader,\n}\n\nimpl Clone for MockStream {\n fn clone(&self) -> MockStream {\n MockStream::new()\n }\n}\n\nimpl MockStream {\n fn new() -> MockStream {\n let head = b\"HTTP\/1.1 200 OK\\r\\nServer: Mock\\r\\n\\r\\n\";\n let mut res = head.to_vec();\n res.push_all(README);\n MockStream {\n read: MemReader::new(res),\n }\n }\n}\n\nimpl Reader for MockStream {\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {\n self.read.read(buf)\n }\n}\n\nimpl Writer for MockStream {\n fn write(&mut self, _msg: &[u8]) -> IoResult<()> {\n \/\/ we're mocking, what do we care.\n Ok(())\n }\n}\n\n#[bench]\nfn bench_mock_curl(b: &mut test::Bencher) {\n let mut cwd = os::getcwd();\n cwd.push(\"README.md\");\n let s = format!(\"file:\/\/{}\", cwd.container_as_str().unwrap());\n let url = s.as_slice();\n b.iter(|| {\n curl::http::handle()\n .get(url)\n .header(\"X-Foo\", \"Bar\")\n .exec()\n .unwrap()\n });\n}\n\nstruct Foo;\n\nimpl hyper::header::Header for Foo {\n fn header_name(_: Option<Foo>) -> &'static str {\n \"x-foo\"\n }\n fn parse_header(_: &[Vec<u8>]) -> Option<Foo> {\n None\n }\n}\n\nimpl hyper::header::HeaderFormat for Foo {\n fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n \"Bar\".fmt(fmt)\n }\n}\n\nimpl net::NetworkStream for MockStream {\n fn peer_name(&mut self) -> IoResult<SocketAddr> {\n Ok(from_str(\"127.0.0.1:1337\").unwrap())\n }\n}\n\nimpl net::NetworkConnector for MockStream {\n fn connect<To: ToSocketAddr>(_addr: To, _scheme: &str) -> IoResult<MockStream> {\n Ok(MockStream::new())\n }\n\n}\n\n#[bench]\nfn bench_mock_hyper(b: &mut test::Bencher) {\n let url = \"http:\/\/127.0.0.1:1337\/\";\n b.iter(|| {\n let mut req = hyper::client::Request::with_stream::<MockStream>(\n hyper::Get, hyper::Url::parse(url).unwrap()).unwrap();\n req.headers_mut().set(Foo);\n\n req\n .start().unwrap()\n .send().unwrap()\n .read_to_string().unwrap()\n });\n}\n\nimpl Connecter for MockStream {\n fn connect(_addr: SocketAddr, _host: &str, _use_ssl: bool) -> IoResult<MockStream> {\n Ok(MockStream::new())\n }\n}\n\n#[bench]\nfn bench_mock_http(b: &mut test::Bencher) {\n let url = \"http:\/\/127.0.0.1:1337\/\";\n b.iter(|| {\n let mut req: http::client::RequestWriter<MockStream> = http::client::RequestWriter::new(\n http::method::Get,\n hyper::Url::parse(url).unwrap()\n ).unwrap();\n req.headers.extensions.insert(\"x-foo\".to_string(), \"Bar\".to_string());\n \/\/ cant unwrap because Err contains RequestWriter, which does not implement Show\n let mut res = match req.read_response() {\n Ok(res) => res,\n Err(..) => panic!(\"http response failed\")\n };\n res.read_to_string().unwrap();\n });\n}\n\n<commit_msg>(fix) Fix benchmarks for rust API changes.<commit_after>#![feature(default_type_params)]\nextern crate curl;\nextern crate http;\nextern crate hyper;\n\nextern crate test;\n\nuse std::fmt::{mod, Show};\nuse std::str::from_str;\nuse std::io::{IoResult, MemReader};\nuse std::io::net::ip::{SocketAddr, ToSocketAddr};\nuse std::os;\nuse std::path::BytesContainer;\n\nuse http::connecter::Connecter;\n\nuse hyper::net;\n\nstatic README: &'static [u8] = include_bin!(\"..\/README.md\");\n\n\nstruct MockStream {\n read: MemReader,\n}\n\nimpl Clone for MockStream {\n fn clone(&self) -> MockStream {\n MockStream::new()\n }\n}\n\nimpl MockStream {\n fn new() -> MockStream {\n let head = b\"HTTP\/1.1 200 OK\\r\\nServer: Mock\\r\\n\\r\\n\";\n let mut res = head.to_vec();\n res.push_all(README);\n MockStream {\n read: MemReader::new(res),\n }\n }\n}\n\nimpl Reader for MockStream {\n fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {\n self.read.read(buf)\n }\n}\n\nimpl Writer for MockStream {\n fn write(&mut self, _msg: &[u8]) -> IoResult<()> {\n \/\/ we're mocking, what do we care.\n Ok(())\n }\n}\n\n#[bench]\nfn bench_mock_curl(b: &mut test::Bencher) {\n let mut cwd = os::getcwd().unwrap();\n cwd.push(\"README.md\");\n let s = format!(\"file:\/\/{}\", cwd.container_as_str().unwrap());\n let url = s.as_slice();\n b.iter(|| {\n curl::http::handle()\n .get(url)\n .header(\"X-Foo\", \"Bar\")\n .exec()\n .unwrap()\n });\n}\n\nstruct Foo;\n\nimpl hyper::header::Header for Foo {\n fn header_name(_: Option<Foo>) -> &'static str {\n \"x-foo\"\n }\n fn parse_header(_: &[Vec<u8>]) -> Option<Foo> {\n None\n }\n}\n\nimpl hyper::header::HeaderFormat for Foo {\n fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n \"Bar\".fmt(fmt)\n }\n}\n\nimpl net::NetworkStream for MockStream {\n fn peer_name(&mut self) -> IoResult<SocketAddr> {\n Ok(from_str(\"127.0.0.1:1337\").unwrap())\n }\n}\n\nimpl net::NetworkConnector for MockStream {\n fn connect<To: ToSocketAddr>(_addr: To, _scheme: &str) -> IoResult<MockStream> {\n Ok(MockStream::new())\n }\n\n}\n\n#[bench]\nfn bench_mock_hyper(b: &mut test::Bencher) {\n let url = \"http:\/\/127.0.0.1:1337\/\";\n b.iter(|| {\n let mut req = hyper::client::Request::with_stream::<MockStream>(\n hyper::Get, hyper::Url::parse(url).unwrap()).unwrap();\n req.headers_mut().set(Foo);\n\n req\n .start().unwrap()\n .send().unwrap()\n .read_to_string().unwrap()\n });\n}\n\nimpl Connecter for MockStream {\n fn connect(_addr: SocketAddr, _host: &str, _use_ssl: bool) -> IoResult<MockStream> {\n Ok(MockStream::new())\n }\n}\n\n#[bench]\nfn bench_mock_http(b: &mut test::Bencher) {\n let url = \"http:\/\/127.0.0.1:1337\/\";\n b.iter(|| {\n let mut req: http::client::RequestWriter<MockStream> = http::client::RequestWriter::new(\n http::method::Get,\n hyper::Url::parse(url).unwrap()\n ).unwrap();\n req.headers.extensions.insert(\"x-foo\".to_string(), \"Bar\".to_string());\n \/\/ cant unwrap because Err contains RequestWriter, which does not implement Show\n let mut res = match req.read_response() {\n Ok(res) => res,\n Err(..) => panic!(\"http response failed\")\n };\n res.read_to_string().unwrap();\n });\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse eutil::slice_to_str;\nuse libc::{c_int};\nuse std::boxed;\nuse std::collections::BTreeMap;\nuse string::{cef_string_userfree_utf16_alloc, cef_string_userfree_utf16_free};\nuse string::{cef_string_utf16_set};\nuse types::{cef_string_map_t, cef_string_t};\n\n\/\/cef_string_map\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_alloc() -> *mut cef_string_map_t {\n boxed::into_raw(box BTreeMap::new())\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_size(sm: *mut cef_string_map_t) -> c_int {\n unsafe {\n if sm.is_null() { return 0; }\n (*sm).len() as c_int\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_append(sm: *mut cef_string_map_t, key: *const cef_string_t, value: *const cef_string_t) -> c_int {\n unsafe {\n if sm.is_null() { return 0; }\n slice_to_str((*key).str as *const u8, (*key).length as usize, |result| {\n let csv = cef_string_userfree_utf16_alloc();\n cef_string_utf16_set((*value).str as *const u16, (*value).length, csv, 1);\n (*sm).insert(result.to_owned(), csv);\n 1\n })\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_find(sm: *mut cef_string_map_t, key: *const cef_string_t, value: *mut cef_string_t) -> c_int {\n unsafe {\n if sm.is_null() { return 0; }\n slice_to_str((*key).str as *const u8, (*key).length as usize, |result| {\n match (*sm).get(result) {\n Some(s) => {\n cef_string_utf16_set((**s).str as *const u16, (**s).length, value, 1);\n 1\n }\n None => 0\n }\n })\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_key(sm: *mut cef_string_map_t, index: c_int, value: *mut cef_string_t) -> c_int {\n unsafe {\n if index < 0 || sm.is_null() { return 0; }\n if index as usize > (*sm).len() - 1 { return 0; }\n\n match (*sm).keys().nth(index as usize) {\n Some(k) => {\n cef_string_utf16_set(k.as_bytes().as_ptr() as *const u16,\n k.len() as u64,\n value,\n 1);\n 1\n },\n None => 0,\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_value(sm: *mut cef_string_map_t, index: c_int, value: *mut cef_string_t) -> c_int {\n unsafe {\n if index < 0 || sm.is_null() { return 0; }\n if index as usize > (*sm).len() - 1 { return 0; }\n\n match (*sm).values().nth(index as usize) {\n Some(val) => {\n cef_string_utf16_set((**val).str as *const u16, (**val).length, value, 1);\n 1\n },\n None => 0,\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_clear(sm: *mut cef_string_map_t) {\n unsafe {\n if sm.is_null() { return; }\n for val in (*sm).values() {\n cef_string_userfree_utf16_free(*val);\n }\n (*sm).clear();\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_free(sm: *mut cef_string_map_t) {\n unsafe {\n if sm.is_null() { return; }\n cef_string_map_clear(sm);\n drop(Box::from_raw(sm));\n }\n}\n<commit_msg>Return the result of cef_string_utf16_set in string_map.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse eutil::slice_to_str;\nuse libc::{c_int};\nuse std::boxed;\nuse std::collections::BTreeMap;\nuse string::{cef_string_userfree_utf16_alloc, cef_string_userfree_utf16_free};\nuse string::{cef_string_utf16_set};\nuse types::{cef_string_map_t, cef_string_t};\n\n\/\/cef_string_map\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_alloc() -> *mut cef_string_map_t {\n boxed::into_raw(box BTreeMap::new())\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_size(sm: *mut cef_string_map_t) -> c_int {\n unsafe {\n if sm.is_null() { return 0; }\n (*sm).len() as c_int\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_append(sm: *mut cef_string_map_t, key: *const cef_string_t, value: *const cef_string_t) -> c_int {\n unsafe {\n if sm.is_null() { return 0; }\n slice_to_str((*key).str as *const u8, (*key).length as usize, |result| {\n let csv = cef_string_userfree_utf16_alloc();\n cef_string_utf16_set((*value).str as *const u16, (*value).length, csv, 1);\n (*sm).insert(result.to_owned(), csv);\n 1\n })\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_find(sm: *mut cef_string_map_t, key: *const cef_string_t, value: *mut cef_string_t) -> c_int {\n unsafe {\n if sm.is_null() { return 0; }\n slice_to_str((*key).str as *const u8, (*key).length as usize, |result| {\n match (*sm).get(result) {\n Some(s) => {\n cef_string_utf16_set((**s).str as *const u16, (**s).length, value, 1)\n }\n None => 0\n }\n })\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_key(sm: *mut cef_string_map_t, index: c_int, value: *mut cef_string_t) -> c_int {\n unsafe {\n if index < 0 || sm.is_null() { return 0; }\n if index as usize > (*sm).len() - 1 { return 0; }\n\n match (*sm).keys().nth(index as usize) {\n Some(k) => {\n cef_string_utf16_set(k.as_bytes().as_ptr() as *const u16,\n k.len() as u64,\n value,\n 1)\n },\n None => 0,\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_value(sm: *mut cef_string_map_t, index: c_int, value: *mut cef_string_t) -> c_int {\n unsafe {\n if index < 0 || sm.is_null() { return 0; }\n if index as usize > (*sm).len() - 1 { return 0; }\n\n match (*sm).values().nth(index as usize) {\n Some(val) => {\n cef_string_utf16_set((**val).str as *const u16, (**val).length, value, 1);\n 1\n },\n None => 0,\n }\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_clear(sm: *mut cef_string_map_t) {\n unsafe {\n if sm.is_null() { return; }\n for val in (*sm).values() {\n cef_string_userfree_utf16_free(*val);\n }\n (*sm).clear();\n }\n}\n\n#[no_mangle]\npub extern \"C\" fn cef_string_map_free(sm: *mut cef_string_map_t) {\n unsafe {\n if sm.is_null() { return; }\n cef_string_map_clear(sm);\n drop(Box::from_raw(sm));\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 Developers of the Rand project.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ https:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or https:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(custom_inner_attributes)]\n#![feature(test)]\n\n\/\/ Rustfmt splits macro invocations to shorten lines; in this case longer-lines are more readable\n#![rustfmt::skip]\n\nextern crate test;\n\nconst RAND_BENCH_N: u64 = 1000;\n\nuse std::mem::size_of;\nuse test::Bencher;\n\nuse rand::prelude::*;\nuse rand_distr::*;\n\n\/\/ At this time, distributions are optimised for 64-bit platforms.\nuse rand_pcg::Pcg64Mcg;\n\nmacro_rules! distr_int {\n ($fnn:ident, $ty:ty, $distr:expr) => {\n #[bench]\n fn $fnn(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = $distr;\n\n b.iter(|| {\n let mut accum = 0 as $ty;\n for _ in 0..RAND_BENCH_N {\n let x: $ty = distr.sample(&mut rng);\n accum = accum.wrapping_add(x);\n }\n accum\n });\n b.bytes = size_of::<$ty>() as u64 * RAND_BENCH_N;\n }\n };\n}\n\nmacro_rules! distr_float {\n ($fnn:ident, $ty:ty, $distr:expr) => {\n #[bench]\n fn $fnn(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = $distr;\n\n b.iter(|| {\n let mut accum = 0.0;\n for _ in 0..RAND_BENCH_N {\n let x: $ty = distr.sample(&mut rng);\n accum += x;\n }\n accum\n });\n b.bytes = size_of::<$ty>() as u64 * RAND_BENCH_N;\n }\n };\n}\n\nmacro_rules! distr {\n ($fnn:ident, $ty:ty, $distr:expr) => {\n #[bench]\n fn $fnn(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = $distr;\n\n b.iter(|| {\n let mut accum = 0u32;\n for _ in 0..RAND_BENCH_N {\n let x: $ty = distr.sample(&mut rng);\n accum = accum.wrapping_add(x as u32);\n }\n accum\n });\n b.bytes = size_of::<$ty>() as u64 * RAND_BENCH_N;\n }\n };\n}\n\nmacro_rules! distr_arr {\n ($fnn:ident, $ty:ty, $distr:expr) => {\n #[bench]\n fn $fnn(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = $distr;\n\n b.iter(|| {\n let mut accum = 0u32;\n for _ in 0..RAND_BENCH_N {\n let x: $ty = distr.sample(&mut rng);\n accum = accum.wrapping_add(x[0] as u32);\n }\n accum\n });\n b.bytes = size_of::<$ty>() as u64 * RAND_BENCH_N;\n }\n };\n}\n\n\n\/\/ distributions\ndistr_float!(distr_exp, f64, Exp::new(1.23 * 4.56).unwrap());\ndistr_float!(distr_normal, f64, Normal::new(-1.23, 4.56).unwrap());\ndistr_float!(distr_log_normal, f64, LogNormal::new(-1.23, 4.56).unwrap());\ndistr_float!(distr_gamma_large_shape, f64, Gamma::new(10., 1.0).unwrap());\ndistr_float!(distr_gamma_small_shape, f64, Gamma::new(0.1, 1.0).unwrap());\ndistr_float!(distr_beta_small_param, f64, Beta::new(0.1, 0.1).unwrap());\ndistr_float!(distr_beta_large_param_similar, f64, Beta::new(101., 95.).unwrap());\ndistr_float!(distr_beta_large_param_different, f64, Beta::new(10., 1000.).unwrap());\ndistr_float!(distr_beta_mixed_param, f64, Beta::new(0.5, 100.).unwrap());\ndistr_float!(distr_cauchy, f64, Cauchy::new(4.2, 6.9).unwrap());\ndistr_float!(distr_triangular, f64, Triangular::new(0., 1., 0.9).unwrap());\ndistr_int!(distr_binomial, u64, Binomial::new(20, 0.7).unwrap());\ndistr_int!(distr_binomial_small, u64, Binomial::new(1000000, 1e-30).unwrap());\ndistr_float!(distr_poisson, f64, Poisson::new(4.0).unwrap());\ndistr!(distr_bernoulli, bool, Bernoulli::new(0.18).unwrap());\ndistr_arr!(distr_circle, [f64; 2], UnitCircle);\ndistr_arr!(distr_sphere, [f64; 3], UnitSphere);\n\n\/\/ Weighted\ndistr_int!(distr_weighted_i8, usize, WeightedIndex::new(&[1i8, 2, 3, 4, 12, 0, 2, 1]).unwrap());\ndistr_int!(distr_weighted_u32, usize, WeightedIndex::new(&[1u32, 2, 3, 4, 12, 0, 2, 1]).unwrap());\ndistr_int!(distr_weighted_f64, usize, WeightedIndex::new(&[1.0f64, 0.001, 1.0\/3.0, 4.01, 0.0, 3.3, 22.0, 0.001]).unwrap());\ndistr_int!(distr_weighted_large_set, usize, WeightedIndex::new((0..10000).rev().chain(1..10001)).unwrap());\n\ndistr_int!(distr_weighted_alias_method_i8, usize, WeightedAliasIndex::new(vec![1i8, 2, 3, 4, 12, 0, 2, 1]).unwrap());\ndistr_int!(distr_weighted_alias_method_u32, usize, WeightedAliasIndex::new(vec![1u32, 2, 3, 4, 12, 0, 2, 1]).unwrap());\ndistr_int!(distr_weighted_alias_method_f64, usize, WeightedAliasIndex::new(vec![1.0f64, 0.001, 1.0\/3.0, 4.01, 0.0, 3.3, 22.0, 0.001]).unwrap());\ndistr_int!(distr_weighted_alias_method_large_set, usize, WeightedAliasIndex::new((0..10000).rev().chain(1..10001).collect()).unwrap());\n\n\n#[bench]\nfn dist_iter(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = Normal::new(-2.71828, 3.14159).unwrap();\n let mut iter = distr.sample_iter(&mut rng);\n\n b.iter(|| {\n let mut accum = 0.0;\n for _ in 0..RAND_BENCH_N {\n accum += iter.next().unwrap();\n }\n accum\n });\n b.bytes = size_of::<f64>() as u64 * RAND_BENCH_N;\n}\n\nmacro_rules! sample_binomial {\n ($name:ident, $n:expr, $p:expr) => {\n #[bench]\n fn $name(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_rng(&mut thread_rng()).unwrap();\n let (n, p) = ($n, $p);\n b.iter(|| {\n let d = Binomial::new(n, p).unwrap();\n rng.sample(d)\n })\n }\n };\n}\n\nsample_binomial!(misc_binomial_1, 1, 0.9);\nsample_binomial!(misc_binomial_10, 10, 0.9);\nsample_binomial!(misc_binomial_100, 100, 0.99);\nsample_binomial!(misc_binomial_1000, 1000, 0.01);\nsample_binomial!(misc_binomial_1e12, 1000_000_000_000, 0.2);\n<commit_msg>Benchmark specialized distribution implementations<commit_after>\/\/ Copyright 2018 Developers of the Rand project.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ https:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or https:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(custom_inner_attributes)]\n#![feature(test)]\n\n\/\/ Rustfmt splits macro invocations to shorten lines; in this case longer-lines are more readable\n#![rustfmt::skip]\n\nextern crate test;\n\nconst RAND_BENCH_N: u64 = 1000;\n\nuse std::mem::size_of;\nuse test::Bencher;\n\nuse rand::prelude::*;\nuse rand_distr::*;\n\n\/\/ At this time, distributions are optimised for 64-bit platforms.\nuse rand_pcg::Pcg64Mcg;\n\nmacro_rules! distr_int {\n ($fnn:ident, $ty:ty, $distr:expr) => {\n #[bench]\n fn $fnn(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = $distr;\n\n b.iter(|| {\n let mut accum = 0 as $ty;\n for _ in 0..RAND_BENCH_N {\n let x: $ty = distr.sample(&mut rng);\n accum = accum.wrapping_add(x);\n }\n accum\n });\n b.bytes = size_of::<$ty>() as u64 * RAND_BENCH_N;\n }\n };\n}\n\nmacro_rules! distr_float {\n ($fnn:ident, $ty:ty, $distr:expr) => {\n #[bench]\n fn $fnn(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = $distr;\n\n b.iter(|| {\n let mut accum = 0.0;\n for _ in 0..RAND_BENCH_N {\n let x: $ty = distr.sample(&mut rng);\n accum += x;\n }\n accum\n });\n b.bytes = size_of::<$ty>() as u64 * RAND_BENCH_N;\n }\n };\n}\n\nmacro_rules! distr {\n ($fnn:ident, $ty:ty, $distr:expr) => {\n #[bench]\n fn $fnn(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = $distr;\n\n b.iter(|| {\n let mut accum = 0u32;\n for _ in 0..RAND_BENCH_N {\n let x: $ty = distr.sample(&mut rng);\n accum = accum.wrapping_add(x as u32);\n }\n accum\n });\n b.bytes = size_of::<$ty>() as u64 * RAND_BENCH_N;\n }\n };\n}\n\nmacro_rules! distr_arr {\n ($fnn:ident, $ty:ty, $distr:expr) => {\n #[bench]\n fn $fnn(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = $distr;\n\n b.iter(|| {\n let mut accum = 0u32;\n for _ in 0..RAND_BENCH_N {\n let x: $ty = distr.sample(&mut rng);\n accum = accum.wrapping_add(x[0] as u32);\n }\n accum\n });\n b.bytes = size_of::<$ty>() as u64 * RAND_BENCH_N;\n }\n };\n}\n\n\n\/\/ distributions\ndistr_float!(distr_exp, f64, Exp::new(1.23 * 4.56).unwrap());\ndistr_float!(distr_exp1_specialized, f64, Exp1);\ndistr_float!(distr_exp1_general, f64, Exp::new(1.).unwrap());\ndistr_float!(distr_normal, f64, Normal::new(-1.23, 4.56).unwrap());\ndistr_float!(distr_standardnormal_specialized, f64, StandardNormal);\ndistr_float!(distr_standardnormal_general, f64, Normal::new(0., 1.).unwrap());\ndistr_float!(distr_log_normal, f64, LogNormal::new(-1.23, 4.56).unwrap());\ndistr_float!(distr_gamma_large_shape, f64, Gamma::new(10., 1.0).unwrap());\ndistr_float!(distr_gamma_small_shape, f64, Gamma::new(0.1, 1.0).unwrap());\ndistr_float!(distr_beta_small_param, f64, Beta::new(0.1, 0.1).unwrap());\ndistr_float!(distr_beta_large_param_similar, f64, Beta::new(101., 95.).unwrap());\ndistr_float!(distr_beta_large_param_different, f64, Beta::new(10., 1000.).unwrap());\ndistr_float!(distr_beta_mixed_param, f64, Beta::new(0.5, 100.).unwrap());\ndistr_float!(distr_cauchy, f64, Cauchy::new(4.2, 6.9).unwrap());\ndistr_float!(distr_triangular, f64, Triangular::new(0., 1., 0.9).unwrap());\ndistr_int!(distr_binomial, u64, Binomial::new(20, 0.7).unwrap());\ndistr_int!(distr_binomial_small, u64, Binomial::new(1000000, 1e-30).unwrap());\ndistr_float!(distr_poisson, f64, Poisson::new(4.0).unwrap());\ndistr!(distr_bernoulli, bool, Bernoulli::new(0.18).unwrap());\ndistr_arr!(distr_circle, [f64; 2], UnitCircle);\ndistr_arr!(distr_sphere, [f64; 3], UnitSphere);\n\n\/\/ Weighted\ndistr_int!(distr_weighted_i8, usize, WeightedIndex::new(&[1i8, 2, 3, 4, 12, 0, 2, 1]).unwrap());\ndistr_int!(distr_weighted_u32, usize, WeightedIndex::new(&[1u32, 2, 3, 4, 12, 0, 2, 1]).unwrap());\ndistr_int!(distr_weighted_f64, usize, WeightedIndex::new(&[1.0f64, 0.001, 1.0\/3.0, 4.01, 0.0, 3.3, 22.0, 0.001]).unwrap());\ndistr_int!(distr_weighted_large_set, usize, WeightedIndex::new((0..10000).rev().chain(1..10001)).unwrap());\n\ndistr_int!(distr_weighted_alias_method_i8, usize, WeightedAliasIndex::new(vec![1i8, 2, 3, 4, 12, 0, 2, 1]).unwrap());\ndistr_int!(distr_weighted_alias_method_u32, usize, WeightedAliasIndex::new(vec![1u32, 2, 3, 4, 12, 0, 2, 1]).unwrap());\ndistr_int!(distr_weighted_alias_method_f64, usize, WeightedAliasIndex::new(vec![1.0f64, 0.001, 1.0\/3.0, 4.01, 0.0, 3.3, 22.0, 0.001]).unwrap());\ndistr_int!(distr_weighted_alias_method_large_set, usize, WeightedAliasIndex::new((0..10000).rev().chain(1..10001).collect()).unwrap());\n\n\n#[bench]\nfn dist_iter(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_entropy();\n let distr = Normal::new(-2.71828, 3.14159).unwrap();\n let mut iter = distr.sample_iter(&mut rng);\n\n b.iter(|| {\n let mut accum = 0.0;\n for _ in 0..RAND_BENCH_N {\n accum += iter.next().unwrap();\n }\n accum\n });\n b.bytes = size_of::<f64>() as u64 * RAND_BENCH_N;\n}\n\nmacro_rules! sample_binomial {\n ($name:ident, $n:expr, $p:expr) => {\n #[bench]\n fn $name(b: &mut Bencher) {\n let mut rng = Pcg64Mcg::from_rng(&mut thread_rng()).unwrap();\n let (n, p) = ($n, $p);\n b.iter(|| {\n let d = Binomial::new(n, p).unwrap();\n rng.sample(d)\n })\n }\n };\n}\n\nsample_binomial!(misc_binomial_1, 1, 0.9);\nsample_binomial!(misc_binomial_10, 10, 0.9);\nsample_binomial!(misc_binomial_100, 100, 0.99);\nsample_binomial!(misc_binomial_1000, 1000, 0.01);\nsample_binomial!(misc_binomial_1e12, 1000_000_000_000, 0.2);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Move the vkCmdBindDescriptorSets call into a bind() method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>stupidly forgot to add this file<commit_after>pub trait LSHTable<'a, T, O> : Sync + Send {\n fn query_multiprobe(&self, v: &'a T, multiprobe_limit: usize) -> Vec<usize>;\n fn query_vec(&self, v: &'a T) -> Vec<usize>;\n}\n<|endoftext|>"} {"text":"<commit_before>#[crate_id = \"base64#1.0.0\"];\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Jordy Dickinson <jordy.dickinson@gmail.com>\n *\n * For the full copyright and license information, please view the LICENSE file\n * that was distributed with this source code.\n *\/\n\nextern mod extra;\n\nuse std::char;\nuse std::io::{File, stdin, stdout};\nuse std::os;\nuse std::str;\n\nuse extra::getopts::groups::{\n getopts,\n optflag,\n optopt,\n usage\n};\nuse extra::base64;\nuse extra::base64::{FromBase64, ToBase64};\n\nfn main() {\n let args = ~os::args();\n let opts = ~[\n optflag(\"d\", \"decode\", \"decode data\"),\n optflag(\"i\", \"ignore-garbage\",\n \"when decoding, ignore non-alphabetic characters\"),\n optopt(\"w\", \"wrap\",\n \"wrap encoded lines after COLS character (default 76, 0 to \\\n disable wrapping)\", \"COLS\"),\n optflag(\"h\", \"help\", \"display this help text and exit\"),\n optflag(\"V\", \"version\", \"output version information and exit\")\n ];\n let matches = match getopts(args.tail(), opts) {\n Ok(m) => m,\n Err(e) => {\n error!(\"error: {:s}\", e.to_err_msg());\n fail!()\n }\n };\n\n let progname = args[0].clone();\n let usage = usage(\"Base64 encode or decode FILE, or standard input, to \\\n standard output.\", opts);\n let mode = if matches.opt_present(\"help\") {\n Help\n } else if matches.opt_present(\"version\") {\n Version\n } else if matches.opt_present(\"decode\") {\n Decode\n } else {\n Encode\n };\n let ignore_garbage = matches.opt_present(\"ignore-garbage\");\n let line_wrap = match matches.opt_str(\"wrap\") {\n Some(s) => match from_str(s) {\n Some(s) => s,\n None => {\n error!(\"error: {:s}\", \"Argument to option 'wrap' \\\n improperly formatted.\");\n fail!()\n }\n },\n None => 76\n };\n let mut input = if matches.free.is_empty() || matches.free[0] == ~\"-\" {\n ~stdin() as ~Reader\n } else {\n let path = Path::new(matches.free[0]);\n ~File::open(&path) as ~Reader\n };\n\n match mode {\n Decode => decode(input, ignore_garbage),\n Encode => encode(input, line_wrap),\n Help => help(progname, usage),\n Version => version()\n }\n}\n\nfn decode(input: &mut Reader, ignore_garbage: bool) {\n let mut to_decode = str::from_utf8_owned(input.read_to_end());\n\n to_decode = to_decode.replace(\"\\n\", \"\");\n\n if ignore_garbage {\n let standard_chars =\n bytes!(\"ABCDEFGHIJKLMNOPQRSTUVWXYZ\",\n \"abcdefghijklmnopqrstuvwxyz\",\n \"0123456789+\/\").map(|b| char::from_u32(*b as u32).unwrap());\n\n to_decode = to_decode\n .trim_chars(&|c| !standard_chars.contains(&c))\n .to_owned();\n }\n\n match to_decode.from_base64() {\n Ok(bytes) => {\n let mut out = stdout();\n\n out.write(bytes);\n out.flush();\n }\n Err(s) => {\n error!(\"error: {:s}\", s);\n fail!()\n }\n }\n}\n\nfn encode(input: &mut Reader, line_wrap: uint) {\n let b64_conf = base64::Config {\n char_set: base64::Standard,\n pad: true,\n line_length: match line_wrap {\n 0 => None,\n _ => Some(line_wrap)\n }\n };\n let to_encode = input.read_to_end();\n let mut encoded = to_encode.to_base64(b64_conf);\n\n \/\/ To my knowledge, RFC 3548 does not specify which line endings to use. It\n \/\/ seems that rust's base64 algorithm uses CRLF as prescribed by RFC 2045.\n \/\/ However, since GNU base64 outputs only LF (presumably because that is\n \/\/ the standard UNIX line ending), we strip CRs from the output to maintain\n \/\/ compatibility.\n encoded = encoded.replace(\"\\r\", \"\");\n\n println(encoded);\n}\n\nfn help(progname: &str, usage: &str) {\n println!(\"Usage: {:s} [OPTION]... [FILE]\", progname);\n println(\"\");\n println(usage);\n\n let msg = ~\"With no FILE, or when FILE is -, read standard input.\\n\\n\\\n The data are encoded as described for the base64 alphabet in RFC \\\n 3548. When\\ndecoding, the input may contain newlines in addition \\\n to the bytes of the formal\\nbase64 alphabet. Use --ignore-garbage \\\n to attempt to recover from any other\\nnon-alphabet bytes in the \\\n encoded stream.\";\n\n println(msg);\n}\n\nfn version() {\n println(\"base64 1.0.0\");\n}\n\nenum Mode {\n Decode,\n Encode,\n Help,\n Version\n}\n\n<commit_msg>Use same crate_id form as other utils<commit_after>#[crate_id(name=\"base64\", vers=\"1.0.0\", author=\"Jordy Dickinson\")];\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Jordy Dickinson <jordy.dickinson@gmail.com>\n *\n * For the full copyright and license information, please view the LICENSE file\n * that was distributed with this source code.\n *\/\n\nextern mod extra;\n\nuse std::char;\nuse std::io::{File, stdin, stdout};\nuse std::os;\nuse std::str;\n\nuse extra::getopts::groups::{\n getopts,\n optflag,\n optopt,\n usage\n};\nuse extra::base64;\nuse extra::base64::{FromBase64, ToBase64};\n\nfn main() {\n let args = ~os::args();\n let opts = ~[\n optflag(\"d\", \"decode\", \"decode data\"),\n optflag(\"i\", \"ignore-garbage\",\n \"when decoding, ignore non-alphabetic characters\"),\n optopt(\"w\", \"wrap\",\n \"wrap encoded lines after COLS character (default 76, 0 to \\\n disable wrapping)\", \"COLS\"),\n optflag(\"h\", \"help\", \"display this help text and exit\"),\n optflag(\"V\", \"version\", \"output version information and exit\")\n ];\n let matches = match getopts(args.tail(), opts) {\n Ok(m) => m,\n Err(e) => {\n error!(\"error: {:s}\", e.to_err_msg());\n fail!()\n }\n };\n\n let progname = args[0].clone();\n let usage = usage(\"Base64 encode or decode FILE, or standard input, to \\\n standard output.\", opts);\n let mode = if matches.opt_present(\"help\") {\n Help\n } else if matches.opt_present(\"version\") {\n Version\n } else if matches.opt_present(\"decode\") {\n Decode\n } else {\n Encode\n };\n let ignore_garbage = matches.opt_present(\"ignore-garbage\");\n let line_wrap = match matches.opt_str(\"wrap\") {\n Some(s) => match from_str(s) {\n Some(s) => s,\n None => {\n error!(\"error: {:s}\", \"Argument to option 'wrap' \\\n improperly formatted.\");\n fail!()\n }\n },\n None => 76\n };\n let mut input = if matches.free.is_empty() || matches.free[0] == ~\"-\" {\n ~stdin() as ~Reader\n } else {\n let path = Path::new(matches.free[0]);\n ~File::open(&path) as ~Reader\n };\n\n match mode {\n Decode => decode(input, ignore_garbage),\n Encode => encode(input, line_wrap),\n Help => help(progname, usage),\n Version => version()\n }\n}\n\nfn decode(input: &mut Reader, ignore_garbage: bool) {\n let mut to_decode = str::from_utf8_owned(input.read_to_end());\n\n to_decode = to_decode.replace(\"\\n\", \"\");\n\n if ignore_garbage {\n let standard_chars =\n bytes!(\"ABCDEFGHIJKLMNOPQRSTUVWXYZ\",\n \"abcdefghijklmnopqrstuvwxyz\",\n \"0123456789+\/\").map(|b| char::from_u32(*b as u32).unwrap());\n\n to_decode = to_decode\n .trim_chars(&|c| !standard_chars.contains(&c))\n .to_owned();\n }\n\n match to_decode.from_base64() {\n Ok(bytes) => {\n let mut out = stdout();\n\n out.write(bytes);\n out.flush();\n }\n Err(s) => {\n error!(\"error: {:s}\", s);\n fail!()\n }\n }\n}\n\nfn encode(input: &mut Reader, line_wrap: uint) {\n let b64_conf = base64::Config {\n char_set: base64::Standard,\n pad: true,\n line_length: match line_wrap {\n 0 => None,\n _ => Some(line_wrap)\n }\n };\n let to_encode = input.read_to_end();\n let mut encoded = to_encode.to_base64(b64_conf);\n\n \/\/ To my knowledge, RFC 3548 does not specify which line endings to use. It\n \/\/ seems that rust's base64 algorithm uses CRLF as prescribed by RFC 2045.\n \/\/ However, since GNU base64 outputs only LF (presumably because that is\n \/\/ the standard UNIX line ending), we strip CRs from the output to maintain\n \/\/ compatibility.\n encoded = encoded.replace(\"\\r\", \"\");\n\n println(encoded);\n}\n\nfn help(progname: &str, usage: &str) {\n println!(\"Usage: {:s} [OPTION]... [FILE]\", progname);\n println(\"\");\n println(usage);\n\n let msg = ~\"With no FILE, or when FILE is -, read standard input.\\n\\n\\\n The data are encoded as described for the base64 alphabet in RFC \\\n 3548. When\\ndecoding, the input may contain newlines in addition \\\n to the bytes of the formal\\nbase64 alphabet. Use --ignore-garbage \\\n to attempt to recover from any other\\nnon-alphabet bytes in the \\\n encoded stream.\";\n\n println(msg);\n}\n\nfn version() {\n println(\"base64 1.0.0\");\n}\n\nenum Mode {\n Decode,\n Encode,\n Help,\n Version\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add check for handling equal radial gradients<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! An actor-based remote devtools server implementation. Only tested with\n\/\/! nightly Firefox versions at time of writing. Largely based on\n\/\/! reverse-engineering of Firefox chrome devtool logs and reading of\n\/\/! [code](http:\/\/mxr.mozilla.org\/mozilla-central\/source\/toolkit\/devtools\/server\/).\n\n#![crate_name = \"devtools\"]\n#![crate_type = \"rlib\"]\n\n#![feature(box_syntax, core, rustc_private)]\n#![feature(collections, std_misc)]\n#![feature(io)]\n#![feature(net)]\n#![feature(old_io)]\n\n#![allow(non_snake_case)]\n\n#[macro_use]\nextern crate log;\n\nextern crate collections;\nextern crate core;\nextern crate devtools_traits;\nextern crate \"rustc-serialize\" as rustc_serialize;\nextern crate msg;\nextern crate time;\nextern crate util;\n\nuse actor::{Actor, ActorRegistry};\nuse actors::console::ConsoleActor;\nuse actors::worker::WorkerActor;\nuse actors::inspector::InspectorActor;\nuse actors::root::RootActor;\nuse actors::tab::TabActor;\nuse actors::timeline::TimelineActor;\nuse protocol::JsonPacketStream;\n\nuse devtools_traits::{ConsoleMessage, DevtoolsControlMsg};\nuse devtools_traits::{DevtoolsPageInfo, DevtoolScriptControlMsg};\nuse msg::constellation_msg::{PipelineId, WorkerId};\nuse util::task::spawn_named;\n\nuse std::borrow::ToOwned;\nuse std::cell::RefCell;\nuse std::collections::HashMap;\nuse std::sync::mpsc::{channel, Receiver, Sender, RecvError};\nuse std::net::{TcpListener, TcpStream, Shutdown};\nuse std::sync::{Arc, Mutex};\nuse time::precise_time_ns;\n\nmod actor;\n\/\/\/ Corresponds to http:\/\/mxr.mozilla.org\/mozilla-central\/source\/toolkit\/devtools\/server\/actors\/\nmod actors {\n pub mod console;\n pub mod framerate;\n pub mod memory;\n pub mod inspector;\n pub mod root;\n pub mod tab;\n pub mod timeline;\n pub mod worker;\n}\nmod protocol;\n\n#[derive(RustcEncodable)]\nstruct ConsoleAPICall {\n from: String,\n __type__: String,\n message: ConsoleMsg,\n}\n\n#[derive(RustcEncodable)]\nstruct ConsoleMsg {\n level: String,\n timeStamp: u64,\n arguments: Vec<String>,\n filename: String,\n lineNumber: u32,\n columnNumber: u32,\n}\n\n\/\/\/ Spin up a devtools server that listens for connections on the specified port.\npub fn start_server(port: u16) -> Sender<DevtoolsControlMsg> {\n let (sender, receiver) = channel();\n {\n let sender = sender.clone();\n spawn_named(\"Devtools\".to_owned(), move || {\n run_server(sender, receiver, port)\n });\n }\n sender\n}\n\nfn run_server(sender: Sender<DevtoolsControlMsg>,\n receiver: Receiver<DevtoolsControlMsg>,\n port: u16) {\n let listener = TcpListener::bind(&(\"127.0.0.1\", port)).unwrap();\n\n let mut registry = ActorRegistry::new();\n\n let root = box RootActor {\n tabs: vec!(),\n };\n\n registry.register(root);\n registry.find::<RootActor>(\"root\");\n\n let actors = registry.create_shareable();\n\n let mut accepted_connections: Vec<TcpStream> = Vec::new();\n\n let mut actor_pipelines: HashMap<PipelineId, String> = HashMap::new();\n\n\n \/\/\/ Process the input from a single devtools client until EOF.\n fn handle_client(actors: Arc<Mutex<ActorRegistry>>, mut stream: TcpStream) {\n println!(\"connection established to {}\", stream.peer_addr().unwrap());\n {\n let actors = actors.lock().unwrap();\n let msg = actors.find::<RootActor>(\"root\").encodable();\n stream.write_json_packet(&msg);\n }\n\n 'outer: loop {\n match stream.read_json_packet() {\n Ok(json_packet) => {\n match actors.lock().unwrap().handle_message(json_packet.as_object().unwrap(),\n &mut stream) {\n Ok(()) => {},\n Err(()) => {\n println!(\"error: devtools actor stopped responding\");\n let _ = stream.shutdown(Shutdown::Both);\n break 'outer\n }\n }\n }\n Err(e) => {\n println!(\"error: {}\", e.description());\n break 'outer\n }\n }\n }\n }\n\n \/\/ We need separate actor representations for each script global that exists;\n \/\/ clients can theoretically connect to multiple globals simultaneously.\n \/\/ TODO: move this into the root or tab modules?\n fn handle_new_global(actors: Arc<Mutex<ActorRegistry>>,\n ids: (PipelineId, Option<WorkerId>),\n scriptSender: Sender<DevtoolScriptControlMsg>,\n actor_pipelines: &mut HashMap<PipelineId, String>,\n page_info: DevtoolsPageInfo) {\n let mut actors = actors.lock().unwrap();\n\n let (pipeline, worker_id) = ids;\n\n let mut actor_workers: HashMap<(PipelineId, WorkerId), String> = HashMap::new();\n\n \/\/TODO: move all this actor creation into a constructor method on TabActor\n let (tab, console, inspector, timeline) = {\n let console = ConsoleActor {\n name: actors.new_name(\"console\"),\n script_chan: scriptSender.clone(),\n pipeline: pipeline,\n streams: RefCell::new(Vec::new()),\n };\n let inspector = InspectorActor {\n name: actors.new_name(\"inspector\"),\n walker: RefCell::new(None),\n pageStyle: RefCell::new(None),\n highlighter: RefCell::new(None),\n script_chan: scriptSender.clone(),\n pipeline: pipeline,\n };\n\n let timeline = TimelineActor::new(actors.new_name(\"timeline\"),\n pipeline,\n scriptSender);\n\n let DevtoolsPageInfo { title, url } = page_info;\n let tab = TabActor {\n name: actors.new_name(\"tab\"),\n title: title,\n url: url.serialize(),\n console: console.name(),\n inspector: inspector.name(),\n timeline: timeline.name(),\n };\n\n let root = actors.find_mut::<RootActor>(\"root\");\n root.tabs.push(tab.name.clone());\n (tab, console, inspector, timeline)\n };\n\n if let Some(id) = worker_id {\n let worker = WorkerActor {\n name: actors.new_name(\"worker\"),\n id: id,\n };\n actor_workers.insert((pipeline, id), worker.name.clone());\n actors.register(box worker);\n }\n\n actor_pipelines.insert(pipeline, tab.name.clone());\n actors.register(box tab);\n actors.register(box console);\n actors.register(box inspector);\n actors.register(box timeline);\n }\n\n fn handle_console_message(actors: Arc<Mutex<ActorRegistry>>,\n id: PipelineId,\n console_message: ConsoleMessage,\n actor_pipelines: &HashMap<PipelineId, String>) {\n let console_actor_name = find_console_actor(actors.clone(), id, actor_pipelines);\n let actors = actors.lock().unwrap();\n let console_actor = actors.find::<ConsoleActor>(&console_actor_name);\n match console_message {\n ConsoleMessage::LogMessage(message, filename, lineNumber, columnNumber) => {\n let msg = ConsoleAPICall {\n from: console_actor.name.clone(),\n __type__: \"consoleAPICall\".to_string(),\n message: ConsoleMsg {\n level: \"log\".to_string(),\n timeStamp: precise_time_ns(),\n arguments: vec!(message),\n filename: filename,\n lineNumber: lineNumber,\n columnNumber: columnNumber,\n },\n };\n for stream in console_actor.streams.borrow_mut().iter_mut() {\n stream.write_json_packet(&msg);\n }\n }\n }\n }\n\n fn find_console_actor(actors: Arc<Mutex<ActorRegistry>>,\n id: PipelineId,\n actor_pipelines: &HashMap<PipelineId, String>) -> String {\n let actors = actors.lock().unwrap();\n let ref tab_actor_name = (*actor_pipelines)[id];\n let tab_actor = actors.find::<TabActor>(tab_actor_name);\n let console_actor_name = tab_actor.console.clone();\n return console_actor_name;\n }\n\n spawn_named(\"DevtoolsClientAcceptor\".to_owned(), move || {\n \/\/ accept connections and process them, spawning a new task for each one\n for stream in listener.incoming() {\n \/\/ connection succeeded\n sender.send(DevtoolsControlMsg::AddClient(stream.unwrap())).unwrap();\n }\n });\n\n loop {\n match receiver.recv() {\n Ok(DevtoolsControlMsg::AddClient(stream)) => {\n let actors = actors.clone();\n accepted_connections.push(stream.try_clone().unwrap());\n spawn_named(\"DevtoolsClientHandler\".to_owned(), move || {\n handle_client(actors, stream.try_clone().unwrap())\n })\n }\n Ok(DevtoolsControlMsg::ServerExitMsg) | Err(RecvError) => break,\n Ok(DevtoolsControlMsg::NewGlobal(ids, scriptSender, pageinfo)) =>\n handle_new_global(actors.clone(), ids, scriptSender, &mut actor_pipelines,\n pageinfo),\n Ok(DevtoolsControlMsg::SendConsoleMessage(id, console_message)) =>\n handle_console_message(actors.clone(), id, console_message,\n &actor_pipelines),\n }\n }\n\n for connection in accepted_connections.iter_mut() {\n let _ = connection.shutdown(Shutdown::Both);\n }\n}\n<commit_msg>Move actor_workers HashMap outside handle_new_global(). [Issue#5735]<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! An actor-based remote devtools server implementation. Only tested with\n\/\/! nightly Firefox versions at time of writing. Largely based on\n\/\/! reverse-engineering of Firefox chrome devtool logs and reading of\n\/\/! [code](http:\/\/mxr.mozilla.org\/mozilla-central\/source\/toolkit\/devtools\/server\/).\n\n#![crate_name = \"devtools\"]\n#![crate_type = \"rlib\"]\n\n#![feature(box_syntax, core, rustc_private)]\n#![feature(collections, std_misc)]\n#![feature(io)]\n#![feature(net)]\n#![feature(old_io)]\n\n#![allow(non_snake_case)]\n\n#[macro_use]\nextern crate log;\n\nextern crate collections;\nextern crate core;\nextern crate devtools_traits;\nextern crate \"rustc-serialize\" as rustc_serialize;\nextern crate msg;\nextern crate time;\nextern crate util;\n\nuse actor::{Actor, ActorRegistry};\nuse actors::console::ConsoleActor;\nuse actors::worker::WorkerActor;\nuse actors::inspector::InspectorActor;\nuse actors::root::RootActor;\nuse actors::tab::TabActor;\nuse actors::timeline::TimelineActor;\nuse protocol::JsonPacketStream;\n\nuse devtools_traits::{ConsoleMessage, DevtoolsControlMsg};\nuse devtools_traits::{DevtoolsPageInfo, DevtoolScriptControlMsg};\nuse msg::constellation_msg::{PipelineId, WorkerId};\nuse util::task::spawn_named;\n\nuse std::borrow::ToOwned;\nuse std::cell::RefCell;\nuse std::collections::HashMap;\nuse std::sync::mpsc::{channel, Receiver, Sender, RecvError};\nuse std::net::{TcpListener, TcpStream, Shutdown};\nuse std::sync::{Arc, Mutex};\nuse time::precise_time_ns;\n\nmod actor;\n\/\/\/ Corresponds to http:\/\/mxr.mozilla.org\/mozilla-central\/source\/toolkit\/devtools\/server\/actors\/\nmod actors {\n pub mod console;\n pub mod framerate;\n pub mod memory;\n pub mod inspector;\n pub mod root;\n pub mod tab;\n pub mod timeline;\n pub mod worker;\n}\nmod protocol;\n\n#[derive(RustcEncodable)]\nstruct ConsoleAPICall {\n from: String,\n __type__: String,\n message: ConsoleMsg,\n}\n\n#[derive(RustcEncodable)]\nstruct ConsoleMsg {\n level: String,\n timeStamp: u64,\n arguments: Vec<String>,\n filename: String,\n lineNumber: u32,\n columnNumber: u32,\n}\n\n\/\/\/ Spin up a devtools server that listens for connections on the specified port.\npub fn start_server(port: u16) -> Sender<DevtoolsControlMsg> {\n let (sender, receiver) = channel();\n {\n let sender = sender.clone();\n spawn_named(\"Devtools\".to_owned(), move || {\n run_server(sender, receiver, port)\n });\n }\n sender\n}\n\nfn run_server(sender: Sender<DevtoolsControlMsg>,\n receiver: Receiver<DevtoolsControlMsg>,\n port: u16) {\n let listener = TcpListener::bind(&(\"127.0.0.1\", port)).unwrap();\n\n let mut registry = ActorRegistry::new();\n\n let root = box RootActor {\n tabs: vec!(),\n };\n\n registry.register(root);\n registry.find::<RootActor>(\"root\");\n\n let actors = registry.create_shareable();\n\n let mut accepted_connections: Vec<TcpStream> = Vec::new();\n\n let mut actor_pipelines: HashMap<PipelineId, String> = HashMap::new();\n\n let mut actor_workers: HashMap<(PipelineId, WorkerId), String> = HashMap::new();\n\n\n \/\/\/ Process the input from a single devtools client until EOF.\n fn handle_client(actors: Arc<Mutex<ActorRegistry>>, mut stream: TcpStream) {\n println!(\"connection established to {}\", stream.peer_addr().unwrap());\n {\n let actors = actors.lock().unwrap();\n let msg = actors.find::<RootActor>(\"root\").encodable();\n stream.write_json_packet(&msg);\n }\n\n 'outer: loop {\n match stream.read_json_packet() {\n Ok(json_packet) => {\n match actors.lock().unwrap().handle_message(json_packet.as_object().unwrap(),\n &mut stream) {\n Ok(()) => {},\n Err(()) => {\n println!(\"error: devtools actor stopped responding\");\n let _ = stream.shutdown(Shutdown::Both);\n break 'outer\n }\n }\n }\n Err(e) => {\n println!(\"error: {}\", e.description());\n break 'outer\n }\n }\n }\n }\n\n \/\/ We need separate actor representations for each script global that exists;\n \/\/ clients can theoretically connect to multiple globals simultaneously.\n \/\/ TODO: move this into the root or tab modules?\n fn handle_new_global(actors: Arc<Mutex<ActorRegistry>>,\n ids: (PipelineId, Option<WorkerId>),\n scriptSender: Sender<DevtoolScriptControlMsg>,\n actor_pipelines: &mut HashMap<PipelineId, String>,\n actor_workers: &mut HashMap<(PipelineId, WorkerId), String>,\n page_info: DevtoolsPageInfo) {\n let mut actors = actors.lock().unwrap();\n\n let (pipeline, worker_id) = ids;\n\n \/\/TODO: move all this actor creation into a constructor method on TabActor\n let (tab, console, inspector, timeline) = {\n let console = ConsoleActor {\n name: actors.new_name(\"console\"),\n script_chan: scriptSender.clone(),\n pipeline: pipeline,\n streams: RefCell::new(Vec::new()),\n };\n let inspector = InspectorActor {\n name: actors.new_name(\"inspector\"),\n walker: RefCell::new(None),\n pageStyle: RefCell::new(None),\n highlighter: RefCell::new(None),\n script_chan: scriptSender.clone(),\n pipeline: pipeline,\n };\n\n let timeline = TimelineActor::new(actors.new_name(\"timeline\"),\n pipeline,\n scriptSender);\n\n let DevtoolsPageInfo { title, url } = page_info;\n let tab = TabActor {\n name: actors.new_name(\"tab\"),\n title: title,\n url: url.serialize(),\n console: console.name(),\n inspector: inspector.name(),\n timeline: timeline.name(),\n };\n\n let root = actors.find_mut::<RootActor>(\"root\");\n root.tabs.push(tab.name.clone());\n (tab, console, inspector, timeline)\n };\n\n if let Some(id) = worker_id {\n let worker = WorkerActor {\n name: actors.new_name(\"worker\"),\n id: id,\n };\n actor_workers.insert((pipeline, id), worker.name.clone());\n actors.register(box worker);\n }\n\n actor_pipelines.insert(pipeline, tab.name.clone());\n actors.register(box tab);\n actors.register(box console);\n actors.register(box inspector);\n actors.register(box timeline);\n }\n\n fn handle_console_message(actors: Arc<Mutex<ActorRegistry>>,\n id: PipelineId,\n console_message: ConsoleMessage,\n actor_pipelines: &HashMap<PipelineId, String>) {\n let console_actor_name = find_console_actor(actors.clone(), id, actor_pipelines);\n let actors = actors.lock().unwrap();\n let console_actor = actors.find::<ConsoleActor>(&console_actor_name);\n match console_message {\n ConsoleMessage::LogMessage(message, filename, lineNumber, columnNumber) => {\n let msg = ConsoleAPICall {\n from: console_actor.name.clone(),\n __type__: \"consoleAPICall\".to_string(),\n message: ConsoleMsg {\n level: \"log\".to_string(),\n timeStamp: precise_time_ns(),\n arguments: vec!(message),\n filename: filename,\n lineNumber: lineNumber,\n columnNumber: columnNumber,\n },\n };\n for stream in console_actor.streams.borrow_mut().iter_mut() {\n stream.write_json_packet(&msg);\n }\n }\n }\n }\n\n fn find_console_actor(actors: Arc<Mutex<ActorRegistry>>,\n id: PipelineId,\n actor_pipelines: &HashMap<PipelineId, String>) -> String {\n let actors = actors.lock().unwrap();\n let ref tab_actor_name = (*actor_pipelines)[id];\n let tab_actor = actors.find::<TabActor>(tab_actor_name);\n let console_actor_name = tab_actor.console.clone();\n return console_actor_name;\n }\n\n spawn_named(\"DevtoolsClientAcceptor\".to_owned(), move || {\n \/\/ accept connections and process them, spawning a new task for each one\n for stream in listener.incoming() {\n \/\/ connection succeeded\n sender.send(DevtoolsControlMsg::AddClient(stream.unwrap())).unwrap();\n }\n });\n\n loop {\n match receiver.recv() {\n Ok(DevtoolsControlMsg::AddClient(stream)) => {\n let actors = actors.clone();\n accepted_connections.push(stream.try_clone().unwrap());\n spawn_named(\"DevtoolsClientHandler\".to_owned(), move || {\n handle_client(actors, stream.try_clone().unwrap())\n })\n }\n Ok(DevtoolsControlMsg::ServerExitMsg) | Err(RecvError) => break,\n Ok(DevtoolsControlMsg::NewGlobal(ids, scriptSender, pageinfo)) =>\n handle_new_global(actors.clone(), ids, scriptSender, &mut actor_pipelines,\n &mut actor_workers, pageinfo),\n Ok(DevtoolsControlMsg::SendConsoleMessage(id, console_message)) =>\n handle_console_message(actors.clone(), id, console_message,\n &actor_pipelines),\n }\n }\n\n for connection in accepted_connections.iter_mut() {\n let _ = connection.shutdown(Shutdown::Both);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added power set implementation<commit_after>\/\/ Given a set, generate its power set, which is the set of all subsets of that\n\/\/ set: http:\/\/rosettacode.org\/wiki\/Power_set\n\nuse std::vec::Vec;\nuse std::slice::Items;\n\n\/\/ If set == {}\n\/\/ return {{}}\n\/\/ else if set == {a} U rest\n\/\/ return power_set(rest) U ({a} U each set in power_set(rest))\nfn power_set<'a, T: Clone>(items: &mut Items<'a,T>) -> Vec<Vec<T>> {\n let mut power = Vec::new();\n match items.next() {\n None => power.push(Vec::new()),\n Some(item) => {\n for set in power_set(items).iter() {\n power.push(set.clone());\n power.push(set.clone().append_one(item.clone()));\n }\n }\n }\n power\n}\n\n#[test]\nfn test() {\n let set = Vec::<int>::new();\n let power = power_set(&mut set.iter());\n assert!(power == vec!(vec!()));\n\n let mut set = Vec::<int>::new();\n set.push(1);\n set.push(2);\n set.push(3);\n let power = power_set(&mut set.iter());\n assert!(power == vec!(vec!(), vec!(1), vec!(2), vec!(2, 1),\n vec!(3), vec!(3, 1), vec!(3, 2), vec!(3, 2, 1)));\n}\n\n#[cfg(not(test))]\nfn main() {\n let mut set = Vec::<int>::new();\n set.push(1);\n set.push(2);\n set.push(3);\n set.push(4);\n let power = power_set(&mut set.iter());\n println!(\"Set : {}\", set);\n println!(\"Power Set: {}\", power);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>examples: Add an example of using the async client<commit_after>extern crate solicit;\n\nuse solicit::http::Header;\nuse solicit::client::Client;\nuse solicit::http::client::CleartextConnector;\nuse std::thread;\nuse std::str;\n\nfn main() {\n \/\/ Connect to a server that supports HTTP\/2\n let connector = CleartextConnector::new(\"http2bin.org\");\n let client = Client::with_connector(connector).unwrap();\n\n \/\/ Issue 5 requests from 5 different threads concurrently and wait for all\n \/\/ threads to receive their response.\n let threads: Vec<_> = (0..5).map(|i| {\n let this = client.clone();\n thread::spawn(move || {\n let resp = this.get(b\"\/get\", &[\n \/\/ A fully static header\n Header::new(&b\"x-solicit\"[..], &b\"Hello\"[..]),\n \/\/ A header with a static name, but dynamically allocated value\n Header::new(&b\"x-solicit\"[..], vec![b'0' + i as u8]),\n ]).unwrap();\n let response = resp.recv().unwrap();\n println!(\"Thread {} got response ... {}\", i, response.status_code().ok().unwrap());\n println!(\"The response contains the following headers:\");\n for header in response.headers.iter() {\n println!(\" {}: {}\",\n str::from_utf8(header.name()).unwrap(),\n str::from_utf8(header.value()).unwrap());\n }\n println!(\"Body:\");\n println!(\"{}\", str::from_utf8(&response.body).unwrap());\n })\n }).collect();\n\n let _: Vec<_> = threads.into_iter().map(|thread| thread.join()).collect();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add small test<commit_after>extern crate dimensioned as dim;\n\nuse dim::si::{m};\nuse dim::{Pow, Root, Sqrt, Cbrt};\nuse dim::peano::{P2, P3, P6};\n\n\n#[test]\nfn pows() {\n let x = 2.0*m;\n assert_eq!(x, P2::pow(x).sqrt());\n assert_eq!(x, P3::pow(x).cbrt());\n assert_eq!(x, P6::root(P6::pow(x)));\n println!(\"{}, {}\", x, x*x);\n}\n<|endoftext|>"} {"text":"<commit_before>use Renderable;\nuse std::collections::HashMap;\n\npub struct Template<'a>{\n elements: Vec<Box<Renderable +'a>>\n}\n\nimpl<'a> Renderable for Template<'a> {\n fn render (&self, context: &HashMap<String, String>) -> String{\n self.elements.iter().fold(String::new(), |fold, val| fold + val.render(context).as_slice())\n }\n}\n\nimpl<'a> Template<'a> {\n pub fn new(elements: Vec<Box<Renderable>>) -> Template<'a> {\n Template{elements: elements}\n }\n}\n\n<commit_msg>Fix weird Rust lifetime update<commit_after>use Renderable;\nuse std::collections::HashMap;\n\npub struct Template<'a>{\n elements: Vec<Box<Renderable +'a>>\n}\n\nimpl<'a> Renderable for Template<'a> {\n fn render (&self, context: &HashMap<String, String>) -> String{\n self.elements.iter().fold(String::new(), |fold, val| fold + val.render(context).as_slice())\n }\n}\n\nimpl<'a> Template<'a> {\n pub fn new(elements: Vec<Box<Renderable +'a>>) -> Template<'a> {\n Template{elements: elements}\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module contains everything needed to instantiate an interpreter.\n\/\/! This separation exists to ensure that no fancy miri features like\n\/\/! interpreting common C functions leak into CTFE.\n\nuse std::borrow::{Borrow, Cow};\nuse std::hash::Hash;\n\nuse rustc::hir::def_id::DefId;\nuse rustc::mir;\nuse rustc::ty::{self, Ty, layout::{Size, TyLayout}, query::TyCtxtAt};\n\nuse super::{\n Allocation, AllocId, EvalResult, Scalar,\n EvalContext, PlaceTy, OpTy, Pointer, MemoryKind,\n};\n\n\/\/\/ Classifying memory accesses\npub enum MemoryAccess {\n Read,\n Write,\n}\n\n\/\/\/ Whether this kind of memory is allowed to leak\npub trait MayLeak: Copy {\n fn may_leak(self) -> bool;\n}\n\n\/\/\/ The functionality needed by memory to manage its allocations\npub trait AllocMap<K: Hash + Eq, V> {\n \/\/\/ Test if the map contains the given key.\n \/\/\/ Deliberately takes `&mut` because that is sufficient, and some implementations\n \/\/\/ can be more efficient then (using `RefCell::get_mut`).\n fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool\n where K: Borrow<Q>;\n\n \/\/\/ Insert new entry into the map.\n fn insert(&mut self, k: K, v: V) -> Option<V>;\n\n \/\/\/ Remove entry from the map.\n fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>\n where K: Borrow<Q>;\n\n \/\/\/ Return data based the keys and values in the map.\n fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;\n\n \/\/\/ Return a reference to entry `k`. If no such entry exists, call\n \/\/\/ `vacant` and either forward its error, or add its result to the map\n \/\/\/ and return a reference to *that*.\n fn get_or<E>(\n &self,\n k: K,\n vacant: impl FnOnce() -> Result<V, E>\n ) -> Result<&V, E>;\n\n \/\/\/ Return a mutable reference to entry `k`. If no such entry exists, call\n \/\/\/ `vacant` and either forward its error, or add its result to the map\n \/\/\/ and return a reference to *that*.\n fn get_mut_or<E>(\n &mut self,\n k: K,\n vacant: impl FnOnce() -> Result<V, E>\n ) -> Result<&mut V, E>;\n}\n\n\/\/\/ Methods of this trait signifies a point where CTFE evaluation would fail\n\/\/\/ and some use case dependent behaviour can instead be applied.\npub trait Machine<'a, 'mir, 'tcx>: Sized {\n \/\/\/ Additional memory kinds a machine wishes to distinguish from the builtin ones\n type MemoryKinds: ::std::fmt::Debug + MayLeak + Eq + 'static;\n\n \/\/\/ Tag tracked alongside every pointer. This is used to implement \"Stacked Borrows\"\n \/\/\/ <https:\/\/www.ralfj.de\/blog\/2018\/08\/07\/stacked-borrows.html>.\n type PointerTag: ::std::fmt::Debug + Default + Copy + Eq + Hash + 'static;\n\n \/\/\/ Extra data stored in every allocation.\n type AllocExtra: ::std::fmt::Debug + Default + Clone;\n\n \/\/\/ Memory's allocation map\n type MemoryMap:\n AllocMap<\n AllocId,\n (MemoryKind<Self::MemoryKinds>, Allocation<Self::PointerTag, Self::AllocExtra>)\n > +\n Default +\n Clone;\n\n \/\/\/ The memory kind to use for copied statics -- or None if those are not supported.\n \/\/\/ Statics are copied under two circumstances: When they are mutated, and when\n \/\/\/ `static_with_default_tag` or `find_foreign_static` (see below) returns an owned allocation\n \/\/\/ that is added to the memory so that the work is not done twice.\n const STATIC_KIND: Option<Self::MemoryKinds>;\n\n \/\/\/ Whether to enforce the validity invariant\n fn enforce_validity(ecx: &EvalContext<'a, 'mir, 'tcx, Self>) -> bool;\n\n \/\/\/ Called before a basic block terminator is executed.\n \/\/\/ You can use this to detect endlessly running programs.\n fn before_terminator(ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx>;\n\n \/\/\/ Entry point to all function calls.\n \/\/\/\n \/\/\/ Returns either the mir to use for the call, or `None` if execution should\n \/\/\/ just proceed (which usually means this hook did all the work that the\n \/\/\/ called function should usually have done). In the latter case, it is\n \/\/\/ this hook's responsibility to call `goto_block(ret)` to advance the instruction pointer!\n \/\/\/ (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR\n \/\/\/ nor just jump to `ret`, but instead push their own stack frame.)\n \/\/\/ Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them\n \/\/\/ was used.\n fn find_fn(\n ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n instance: ty::Instance<'tcx>,\n args: &[OpTy<'tcx, Self::PointerTag>],\n dest: Option<PlaceTy<'tcx, Self::PointerTag>>,\n ret: Option<mir::BasicBlock>,\n ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>>;\n\n \/\/\/ Directly process an intrinsic without pushing a stack frame.\n \/\/\/ If this returns successfully, the engine will take care of jumping to the next block.\n fn call_intrinsic(\n ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n instance: ty::Instance<'tcx>,\n args: &[OpTy<'tcx, Self::PointerTag>],\n dest: PlaceTy<'tcx, Self::PointerTag>,\n ) -> EvalResult<'tcx>;\n\n \/\/\/ Called for read access to a foreign static item.\n \/\/\/\n \/\/\/ This will only be called once per static and machine; the result is cached in\n \/\/\/ the machine memory. (This relies on `AllocMap::get_or` being able to add the\n \/\/\/ owned allocation to the map even when the map is shared.)\n fn find_foreign_static(\n tcx: TyCtxtAt<'a, 'tcx, 'tcx>,\n def_id: DefId,\n ) -> EvalResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag, Self::AllocExtra>>>;\n\n \/\/\/ Called to turn an allocation obtained from the `tcx` into one that has\n \/\/\/ the appropriate tags on each pointer.\n \/\/\/\n \/\/\/ This should avoid copying if no work has to be done! If this returns an owned\n \/\/\/ allocation (because a copy had to be done to add the tags), machine memory will\n \/\/\/ cache the result. (This relies on `AllocMap::get_or` being able to add the\n \/\/\/ owned allocation to the map even when the map is shared.)\n fn static_with_default_tag(\n alloc: &'_ Allocation\n ) -> Cow<'_, Allocation<Self::PointerTag, Self::AllocExtra>>;\n\n \/\/\/ Called for all binary operations on integer(-like) types when one operand is a pointer\n \/\/\/ value, and for the `Offset` operation that is inherently about pointers.\n \/\/\/\n \/\/\/ Returns a (value, overflowed) pair if the operation succeeded\n fn ptr_op(\n ecx: &EvalContext<'a, 'mir, 'tcx, Self>,\n bin_op: mir::BinOp,\n left: Scalar<Self::PointerTag>,\n left_layout: TyLayout<'tcx>,\n right: Scalar<Self::PointerTag>,\n right_layout: TyLayout<'tcx>,\n ) -> EvalResult<'tcx, (Scalar<Self::PointerTag>, bool)>;\n\n \/\/\/ Heap allocations via the `box` keyword.\n fn box_alloc(\n ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n dest: PlaceTy<'tcx, Self::PointerTag>,\n ) -> EvalResult<'tcx>;\n\n \/\/\/ Hook for performing extra checks on a memory access.\n \/\/\/\n \/\/\/ Takes read-only access to the allocation so we can keep all the memory read\n \/\/\/ operations take `&self`. Use a `RefCell` in `AllocExtra` if you\n \/\/\/ need to mutate.\n #[inline]\n fn memory_accessed(\n _alloc: &Allocation<Self::PointerTag, Self::AllocExtra>,\n _ptr: Pointer<Self::PointerTag>,\n _size: Size,\n _access: MemoryAccess,\n ) -> EvalResult<'tcx> {\n Ok(())\n }\n\n \/\/\/ Hook for performing extra checks when memory gets deallocated.\n #[inline]\n fn memory_deallocated(\n _alloc: &mut Allocation<Self::PointerTag, Self::AllocExtra>,\n _id: AllocId,\n ) -> EvalResult<'tcx> {\n Ok(())\n }\n\n \/\/\/ Executed when evaluating the `&` operator: Creating a new reference.\n \/\/\/ This has the chance to adjust the tag.\n fn tag_reference(\n ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n ptr: Pointer<Self::PointerTag>,\n pointee_ty: Ty<'tcx>,\n pointee_size: Size,\n borrow_kind: mir::BorrowKind,\n ) -> EvalResult<'tcx, Self::PointerTag>;\n\n \/\/\/ Executed when evaluating the `*` operator: Following a reference.\n \/\/\/ This has the change to adjust the tag.\n fn tag_dereference(\n ecx: &EvalContext<'a, 'mir, 'tcx, Self>,\n ptr: Pointer<Self::PointerTag>,\n ptr_ty: Ty<'tcx>,\n ) -> EvalResult<'tcx, Self::PointerTag>;\n\n \/\/\/ Execute a validation operation\n #[inline]\n fn validation_op(\n _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n _op: ::rustc::mir::ValidationOp,\n _operand: &::rustc::mir::ValidationOperand<'tcx, ::rustc::mir::Place<'tcx>>,\n ) -> EvalResult<'tcx> {\n Ok(())\n }\n}\n<commit_msg>repeat after me: Clone, Copy, Debug<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module contains everything needed to instantiate an interpreter.\n\/\/! This separation exists to ensure that no fancy miri features like\n\/\/! interpreting common C functions leak into CTFE.\n\nuse std::borrow::{Borrow, Cow};\nuse std::hash::Hash;\n\nuse rustc::hir::def_id::DefId;\nuse rustc::mir;\nuse rustc::ty::{self, Ty, layout::{Size, TyLayout}, query::TyCtxtAt};\n\nuse super::{\n Allocation, AllocId, EvalResult, Scalar,\n EvalContext, PlaceTy, OpTy, Pointer, MemoryKind,\n};\n\n\/\/\/ Classifying memory accesses\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\npub enum MemoryAccess {\n Read,\n Write,\n}\n\n\/\/\/ Whether this kind of memory is allowed to leak\npub trait MayLeak: Copy {\n fn may_leak(self) -> bool;\n}\n\n\/\/\/ The functionality needed by memory to manage its allocations\npub trait AllocMap<K: Hash + Eq, V> {\n \/\/\/ Test if the map contains the given key.\n \/\/\/ Deliberately takes `&mut` because that is sufficient, and some implementations\n \/\/\/ can be more efficient then (using `RefCell::get_mut`).\n fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool\n where K: Borrow<Q>;\n\n \/\/\/ Insert new entry into the map.\n fn insert(&mut self, k: K, v: V) -> Option<V>;\n\n \/\/\/ Remove entry from the map.\n fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>\n where K: Borrow<Q>;\n\n \/\/\/ Return data based the keys and values in the map.\n fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;\n\n \/\/\/ Return a reference to entry `k`. If no such entry exists, call\n \/\/\/ `vacant` and either forward its error, or add its result to the map\n \/\/\/ and return a reference to *that*.\n fn get_or<E>(\n &self,\n k: K,\n vacant: impl FnOnce() -> Result<V, E>\n ) -> Result<&V, E>;\n\n \/\/\/ Return a mutable reference to entry `k`. If no such entry exists, call\n \/\/\/ `vacant` and either forward its error, or add its result to the map\n \/\/\/ and return a reference to *that*.\n fn get_mut_or<E>(\n &mut self,\n k: K,\n vacant: impl FnOnce() -> Result<V, E>\n ) -> Result<&mut V, E>;\n}\n\n\/\/\/ Methods of this trait signifies a point where CTFE evaluation would fail\n\/\/\/ and some use case dependent behaviour can instead be applied.\npub trait Machine<'a, 'mir, 'tcx>: Sized {\n \/\/\/ Additional memory kinds a machine wishes to distinguish from the builtin ones\n type MemoryKinds: ::std::fmt::Debug + MayLeak + Eq + 'static;\n\n \/\/\/ Tag tracked alongside every pointer. This is used to implement \"Stacked Borrows\"\n \/\/\/ <https:\/\/www.ralfj.de\/blog\/2018\/08\/07\/stacked-borrows.html>.\n type PointerTag: ::std::fmt::Debug + Default + Copy + Eq + Hash + 'static;\n\n \/\/\/ Extra data stored in every allocation.\n type AllocExtra: ::std::fmt::Debug + Default + Clone;\n\n \/\/\/ Memory's allocation map\n type MemoryMap:\n AllocMap<\n AllocId,\n (MemoryKind<Self::MemoryKinds>, Allocation<Self::PointerTag, Self::AllocExtra>)\n > +\n Default +\n Clone;\n\n \/\/\/ The memory kind to use for copied statics -- or None if those are not supported.\n \/\/\/ Statics are copied under two circumstances: When they are mutated, and when\n \/\/\/ `static_with_default_tag` or `find_foreign_static` (see below) returns an owned allocation\n \/\/\/ that is added to the memory so that the work is not done twice.\n const STATIC_KIND: Option<Self::MemoryKinds>;\n\n \/\/\/ Whether to enforce the validity invariant\n fn enforce_validity(ecx: &EvalContext<'a, 'mir, 'tcx, Self>) -> bool;\n\n \/\/\/ Called before a basic block terminator is executed.\n \/\/\/ You can use this to detect endlessly running programs.\n fn before_terminator(ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx>;\n\n \/\/\/ Entry point to all function calls.\n \/\/\/\n \/\/\/ Returns either the mir to use for the call, or `None` if execution should\n \/\/\/ just proceed (which usually means this hook did all the work that the\n \/\/\/ called function should usually have done). In the latter case, it is\n \/\/\/ this hook's responsibility to call `goto_block(ret)` to advance the instruction pointer!\n \/\/\/ (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR\n \/\/\/ nor just jump to `ret`, but instead push their own stack frame.)\n \/\/\/ Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them\n \/\/\/ was used.\n fn find_fn(\n ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n instance: ty::Instance<'tcx>,\n args: &[OpTy<'tcx, Self::PointerTag>],\n dest: Option<PlaceTy<'tcx, Self::PointerTag>>,\n ret: Option<mir::BasicBlock>,\n ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>>;\n\n \/\/\/ Directly process an intrinsic without pushing a stack frame.\n \/\/\/ If this returns successfully, the engine will take care of jumping to the next block.\n fn call_intrinsic(\n ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n instance: ty::Instance<'tcx>,\n args: &[OpTy<'tcx, Self::PointerTag>],\n dest: PlaceTy<'tcx, Self::PointerTag>,\n ) -> EvalResult<'tcx>;\n\n \/\/\/ Called for read access to a foreign static item.\n \/\/\/\n \/\/\/ This will only be called once per static and machine; the result is cached in\n \/\/\/ the machine memory. (This relies on `AllocMap::get_or` being able to add the\n \/\/\/ owned allocation to the map even when the map is shared.)\n fn find_foreign_static(\n tcx: TyCtxtAt<'a, 'tcx, 'tcx>,\n def_id: DefId,\n ) -> EvalResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag, Self::AllocExtra>>>;\n\n \/\/\/ Called to turn an allocation obtained from the `tcx` into one that has\n \/\/\/ the appropriate tags on each pointer.\n \/\/\/\n \/\/\/ This should avoid copying if no work has to be done! If this returns an owned\n \/\/\/ allocation (because a copy had to be done to add the tags), machine memory will\n \/\/\/ cache the result. (This relies on `AllocMap::get_or` being able to add the\n \/\/\/ owned allocation to the map even when the map is shared.)\n fn static_with_default_tag(\n alloc: &'_ Allocation\n ) -> Cow<'_, Allocation<Self::PointerTag, Self::AllocExtra>>;\n\n \/\/\/ Called for all binary operations on integer(-like) types when one operand is a pointer\n \/\/\/ value, and for the `Offset` operation that is inherently about pointers.\n \/\/\/\n \/\/\/ Returns a (value, overflowed) pair if the operation succeeded\n fn ptr_op(\n ecx: &EvalContext<'a, 'mir, 'tcx, Self>,\n bin_op: mir::BinOp,\n left: Scalar<Self::PointerTag>,\n left_layout: TyLayout<'tcx>,\n right: Scalar<Self::PointerTag>,\n right_layout: TyLayout<'tcx>,\n ) -> EvalResult<'tcx, (Scalar<Self::PointerTag>, bool)>;\n\n \/\/\/ Heap allocations via the `box` keyword.\n fn box_alloc(\n ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n dest: PlaceTy<'tcx, Self::PointerTag>,\n ) -> EvalResult<'tcx>;\n\n \/\/\/ Hook for performing extra checks on a memory access.\n \/\/\/\n \/\/\/ Takes read-only access to the allocation so we can keep all the memory read\n \/\/\/ operations take `&self`. Use a `RefCell` in `AllocExtra` if you\n \/\/\/ need to mutate.\n #[inline]\n fn memory_accessed(\n _alloc: &Allocation<Self::PointerTag, Self::AllocExtra>,\n _ptr: Pointer<Self::PointerTag>,\n _size: Size,\n _access: MemoryAccess,\n ) -> EvalResult<'tcx> {\n Ok(())\n }\n\n \/\/\/ Hook for performing extra checks when memory gets deallocated.\n #[inline]\n fn memory_deallocated(\n _alloc: &mut Allocation<Self::PointerTag, Self::AllocExtra>,\n _id: AllocId,\n ) -> EvalResult<'tcx> {\n Ok(())\n }\n\n \/\/\/ Executed when evaluating the `&` operator: Creating a new reference.\n \/\/\/ This has the chance to adjust the tag.\n fn tag_reference(\n ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n ptr: Pointer<Self::PointerTag>,\n pointee_ty: Ty<'tcx>,\n pointee_size: Size,\n borrow_kind: mir::BorrowKind,\n ) -> EvalResult<'tcx, Self::PointerTag>;\n\n \/\/\/ Executed when evaluating the `*` operator: Following a reference.\n \/\/\/ This has the change to adjust the tag.\n fn tag_dereference(\n ecx: &EvalContext<'a, 'mir, 'tcx, Self>,\n ptr: Pointer<Self::PointerTag>,\n ptr_ty: Ty<'tcx>,\n ) -> EvalResult<'tcx, Self::PointerTag>;\n\n \/\/\/ Execute a validation operation\n #[inline]\n fn validation_op(\n _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,\n _op: ::rustc::mir::ValidationOp,\n _operand: &::rustc::mir::ValidationOperand<'tcx, ::rustc::mir::Place<'tcx>>,\n ) -> EvalResult<'tcx> {\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for #14227<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern {\n pub static symbol: ();\n}\nstatic CRASH: () = symbol; \/\/~ cannot refer to other statics by value\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added async example which makes multiple requests (#351)<commit_after>#![deny(warnings)]\n\nextern crate futures;\nextern crate reqwest;\nextern crate tokio;\nextern crate serde;\n#[macro_use] extern crate serde_derive;\nextern crate serde_json;\n\nuse futures::Future;\nuse reqwest::async::{Client, Response};\n\n#[derive(Deserialize, Debug)]\nstruct Slideshow {\n title: String,\n author: String,\n}\n\n#[derive(Deserialize, Debug)]\nstruct SlideshowContainer {\n slideshow: Slideshow,\n}\n\nfn fetch() -> impl Future<Item=(), Error=()> {\n let client = Client::new();\n\n let json = |mut res : Response | {\n res.json::<SlideshowContainer>()\n };\n\n let request1 =\n client\n .get(\"https:\/\/httpbin.org\/json\")\n .send()\n .and_then(json);\n\n let request2 =\n client\n .get(\"https:\/\/httpbin.org\/json\")\n .send()\n .and_then(json);\n\n request1.join(request2)\n .map(|(res1, res2)|{\n println!(\"{:?}\", res1);\n println!(\"{:?}\", res2);\n })\n .map_err(|err| {\n println!(\"stdout error: {}\", err);\n })\n}\n\nfn main() {\n tokio::run(fetch());\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-arm stdcall isn't suppported\n\nextern \"stdcall\" {\n fn printf(_: *const u8, ...); \/\/~ ERROR: variadic function must have C or cdecl calling\n}\n\nextern {\n fn foo(f: isize, x: u8, ...);\n}\n\nextern \"C\" fn bar(f: isize, x: u8) {}\n\nfn main() {\n \/\/ errors below are no longer checked because error above aborts\n \/\/ compilation; see variadic-ffi-3.rs for corresponding test.\n unsafe {\n foo();\n foo(1);\n\n let x: unsafe extern \"C\" fn(f: isize, x: u8) = foo;\n let y: extern \"C\" fn(f: isize, x: u8, ...) = bar;\n\n foo(1, 2, 3f32);\n foo(1, 2, true);\n foo(1, 2, 1i8);\n foo(1, 2, 1u8);\n foo(1, 2, 1i16);\n foo(1, 2, 1u16);\n }\n}\n<commit_msg>Ignore variadic FFI test on AArch64<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-arm stdcall isn't suppported\n\/\/ ignore-aarch64 stdcall isn't suppported\n\nextern \"stdcall\" {\n fn printf(_: *const u8, ...); \/\/~ ERROR: variadic function must have C or cdecl calling\n}\n\nextern {\n fn foo(f: isize, x: u8, ...);\n}\n\nextern \"C\" fn bar(f: isize, x: u8) {}\n\nfn main() {\n \/\/ errors below are no longer checked because error above aborts\n \/\/ compilation; see variadic-ffi-3.rs for corresponding test.\n unsafe {\n foo();\n foo(1);\n\n let x: unsafe extern \"C\" fn(f: isize, x: u8) = foo;\n let y: extern \"C\" fn(f: isize, x: u8, ...) = bar;\n\n foo(1, 2, 3f32);\n foo(1, 2, true);\n foo(1, 2, 1i8);\n foo(1, 2, 1u8);\n foo(1, 2, 1i16);\n foo(1, 2, 1u16);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add simple example.<commit_after>extern crate igd;\n\nfn main() {\n match igd::search_gateway() {\n Err(ref err) => println!(\"{:?}\", err),\n Ok(local_soaddr) => {\n match igd::get_external_ip(local_soaddr) {\n Err(ref err) => println!(\"{:?}\", err),\n Ok(ext_addr) => {\n println!(\"Local gateway: {}, Extern gateway: {}\", local_soaddr, ext_addr);\n },\n }\n },\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Drawing v1.0.0<commit_after>extern crate turtle;\n\nuse turtle::{Turtle, Color};\n\nfn main() {\n let mut turtle = Turtle::new();\n turtle.drawing_mut().set_title(\"Version 1.0.0!!!\");\n\n turtle.drawing_mut().set_background_color(\"#FFEB3B\");\n turtle.set_pen_size(5.0);\n\n turtle.set_speed(\"instant\");\n turtle.pen_up();\n turtle.go_to((350.0, 178.0));\n turtle.pen_down();\n\n bg_lines(&mut turtle);\n\n turtle.pen_up();\n turtle.go_to((-270.0, -200.0));\n turtle.set_heading(90.0);\n turtle.pen_down();\n\n turtle.set_speed(\"normal\");\n turtle.set_pen_color(\"#2196F3\");\n turtle.set_fill_color(Color::from(\"#00E5FF\").with_alpha(0.75));\n\n one(&mut turtle);\n\n turtle.set_speed(25);\n\n turtle.pen_up();\n turtle.left(90.0);\n turtle.backward(50.0);\n turtle.pen_down();\n\n small_circle(&mut turtle);\n\n turtle.pen_up();\n turtle.backward(150.0);\n turtle.pen_down();\n\n zero(&mut turtle);\n\n turtle.pen_up();\n turtle.backward(150.0);\n turtle.pen_down();\n\n small_circle(&mut turtle);\n\n turtle.pen_up();\n turtle.backward(150.0);\n turtle.pen_down();\n\n zero(&mut turtle);\n}\n\nfn bg_lines(turtle: &mut Turtle) {\n turtle.set_pen_color(\"#76FF03\");\n turtle.set_heading(165.0);\n turtle.forward(280.0);\n\n turtle.left(147.0);\n turtle.forward(347.0);\n\n turtle.right(158.0);\n turtle.forward(547.0);\n\n turtle.left(138.0);\n turtle.forward(539.0);\n\n turtle.right(168.0);\n turtle.forward(477.0);\n\n turtle.left(154.0);\n turtle.forward(377.0);\n\n turtle.right(158.0);\n turtle.forward(329.0);\n}\n\nfn small_circle(turtle: &mut Turtle) {\n turtle.begin_fill();\n for _ in 0..90 {\n turtle.forward(2.0);\n turtle.backward(1.0);\n turtle.right(4.0);\n }\n turtle.end_fill();\n}\n\nfn one(turtle: &mut Turtle) {\n turtle.begin_fill();\n for _ in 0..2 {\n turtle.forward(420.0);\n turtle.left(90.0);\n turtle.forward(50.0);\n turtle.left(90.0);\n }\n turtle.end_fill();\n}\n\nfn zero(turtle: &mut Turtle) {\n turtle.begin_fill();\n for _ in 0..2 {\n arc_right(turtle);\n arc_forward(turtle);\n }\n turtle.end_fill();\n}\n\nfn arc_right(turtle: &mut Turtle) {\n \/\/ Draw an arc that moves right faster than it moves forward\n for i in 0..90 {\n turtle.forward(3.0);\n turtle.right((90 - i) as f64 \/ 45.0);\n }\n}\n\nfn arc_forward(turtle: &mut Turtle) {\n \/\/ Draw an arc that moves forward faster than it moves right\n for i in 0..90 {\n turtle.forward(3.0);\n turtle.right(i as f64 \/ 45.0);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove cargo.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added example for response parsing<commit_after>use imap_proto::parse_response;\nuse std::io::Write;\n\nfn main() -> std::io::Result<()> {\n loop {\n let line = {\n print!(\"Enter IMAP4REV1 response: \");\n std::io::stdout().flush().unwrap();\n\n let mut line = String::new();\n std::io::stdin().read_line(&mut line)?;\n line\n };\n\n match parse_response(line.replace(\"\\n\", \"\\r\\n\").as_bytes()) {\n Ok((remaining, command)) => {\n println!(\"{:#?}\", command);\n\n if !remaining.is_empty() {\n println!(\"Remaining data in buffer: {:?}\", remaining);\n }\n }\n Err(_) => {\n println!(\"Error parsing the response. Is it correct? Exiting.\");\n break;\n }\n }\n }\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #39<commit_after>extern mod std;\nuse std::map::{ HashMap };\n\nextern mod euler;\nuse euler::arith::{ isqrt };\nuse euler::calc::{ get_gcd };\n\nfn main() {\n \/\/ a + b + c = 2m(m + n) <= L\n \/\/ 1 <= n <= L \/ 2m - m\n \/\/ if n == 1, a + b + c = 2m^2 + 2m <= L\n \/\/ m <= (sqrt(1 + L) - 1)\/2\n let limit = 1000;\n let map = HashMap::<uint, uint>();\n\n for uint::range(1, (isqrt(1 + limit) - 1) \/ 2) |m| {\n for uint::range(1, uint::min(1 + limit \/ (2 * m) - m, m)) |n| {\n if (m - n) % 2 == 0 { loop; }\n if get_gcd(m, n) != 1 { loop; }\n let (a, b, c) = (m * m - n * n, 2 * m * n, m * m + n * n);\n let s = a + b + c;\n for uint::range(1, limit \/ s + 1) |k| {\n map.insert(k * s, map.find(k * s).get_default(0) + 1);\n }\n }\n }\n\n let mut max_key = 0;\n let mut max_val = 0;\n for map.each |k, v| {\n if max_val < v {\n max_key = k;\n max_val = v;\n }\n }\n\n io::println(fmt!(\"answer: %?\", max_key));\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>prototype dynamic api<commit_after>\/\/\/ `NodeBuilder` is a high level object,\n\/\/\/ usable for fast prototyping and creating app from services list.\n\nuse toml::Value;\nuse std::iter;\n\ntype CommandId = &'static str;\n\nstatic CORE_COMMANDS_LIST: [CommandId; 5] = [\n \"run\",\n \"generate-testnet\",\n \"generate-template\",\n \"add-validator\",\n \"init\",\n];\n\ntype ExtendedCommands = BTreeMap<Command, ExtendedCommand>;\n\nstruct NodeBuilder {\n commands: ExtendedCommands,\n services: Box<Service>\n}\n\nimpl NodeBuilder {\n\n fn new() -> NodeBuilder\n {\n let commands = CORE_COMMANDS_LIST\n .iter()\n .zip(iter::repeat(Vec::new()))\n .collect()\n NodeBuilder {\n commands\n }\n }\n\n fn with_service<S: ServiceFactory>(mut self, service: S) -> NodeBuilder {\n \/\/TODO: take endpoints, etc...\n for command in self.commands.iter_mut() {\n command.push(service.command(command));\n }\n self\n }\n\n fn to_node(self) -> Node {\n self.parse_cmd()\n }\n\n fn run(self) {\n self.to_node().run()\n }\n}\n\nstruct Argument {\n pub short_name: String,\n pub long_name: String,\n pub required: bool,\n pub help: String,\n}\n\nimpl Argument {\n fn into_clap(&self) -> clap::Arg {\n Arg::with_name(&self.long_name)\n .short(&self.short_name)\n .long(&self.long_name)\n .about(&self.help)\n .required(self.required)\n }\n}\n\nstruct Context {\n values: BTreeMap<String, Value>\n}\n\ntrait CommandExtender {\n fn args(&self ) -> Vec<Argument>;\n fn execute(&self, context: Context) -> Result<Context, Box<Error>>;\n}\n\n\/*\n#[Debug, Copy, Clone, Eq, PartialEq]\npub struct ExtendedCommand {\n name: &'static str,\n services: Vec<Box<CommandExtender>>\n}\n*\/\n\ntrait ExtendedCommand {\n\n fn id(&self) -> CommandId;\n fn name(&self) -> &str {\n self.id()\n }\n \n fn extend(&mut self, extender: Box<CommandExtender>);\n\n fn about(&self) -> &str;\n\n pub fn execute(&self);\n}\n\n\ntrait ServiceFactory {\n \/\/\\TODO we could move \n \/\/ `service_name` and `service_id` from `Service` trait into this one\n \/\/fn name() -> &'static str;\n \/\/\/ return `CommandExtender` for specific `CommandId`\n fn command(command: CommandId) -> Box<CommandExtender>;\n \/\/\/ create new service, from context, returned by `run` command.\n fn make_service(self, run_context: &Context) -> Box<Service>;\n}\n\n\nstruct ClapBacked;\n\nimpl ClapBackend {\n fn execute(commands: &ExtendedCommands) {\n let mut app = App::new(command.about())\n .version(env!(\"CARGO_PKG_VERSION\"))\n .author(\"Vladimir M. <vladimir.motylenko@xdev.re>\")\n .about(command.about());\n\n for c in commands.iter() {\n let app = app.subcommand(ClapBackend::into_subcommand(c));\n }\n\n let matches = app.get_matches();\n let command = commands.get(matches.subcommand()); \n command.expect(\"Subcommand not found\").execute();\n }\n\n fn into_subcommand<'a, 'a>(command: &'a ExtendedCommand) -> App<'a, 'a>{\n let command_args = command.args()\n .iter()\n .map(|command|command.into_clap());\n let mut subcommand = SubCommand::with_name(command.name())\n .about(command.about());\n for command in command_args {\n subcommand = subcommand.arg(command);\n }\n\n subcommand\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Routine to compute the strongly connected components (SCCs) of a\n\/\/! graph, as well as the resulting DAG if each SCC is replaced with a\n\/\/! node in the graph. This uses Tarjan's algorithm that completes in\n\/\/! O(n) time.\n\nuse fx::FxHashSet;\nuse graph::{DirectedGraph, WithNumNodes, WithSuccessors};\nuse indexed_vec::{Idx, IndexVec};\nuse std::ops::Range;\n\nmod test;\n\n\/\/\/ Strongly connected components (SCC) of a graph. The type `N` is\n\/\/\/ the index type for the graph nodes and `S` is the index type for\n\/\/\/ the SCCs. We can map from each node to the SCC that it\n\/\/\/ participates in, and we also have the successors of each SCC.\npub struct Sccs<N: Idx, S: Idx> {\n \/\/\/ For each node, what is the SCC index of the SCC to which it\n \/\/\/ belongs.\n scc_indices: IndexVec<N, S>,\n\n \/\/\/ Data about each SCC.\n scc_data: SccData<S>,\n}\n\nstruct SccData<S: Idx> {\n \/\/\/ For each SCC, the range of `all_successors` where its\n \/\/\/ successors can be found.\n ranges: IndexVec<S, Range<usize>>,\n\n \/\/\/ Contains the succcessors for all the Sccs, concatenated. The\n \/\/\/ range of indices corresponding to a given SCC is found in its\n \/\/\/ SccData.\n all_successors: Vec<S>,\n}\n\nimpl<N: Idx, S: Idx> Sccs<N, S> {\n pub fn new(graph: &(impl DirectedGraph<Node = N> + WithNumNodes + WithSuccessors)) -> Self {\n SccsConstruction::construct(graph)\n }\n\n \/\/\/ Returns the number of SCCs in the graph.\n pub fn num_sccs(&self) -> usize {\n self.scc_data.len()\n }\n\n \/\/\/ Returns the number of SCCs in the graph.\n pub fn all_sccs(&self) -> impl Iterator<Item = S> {\n (0 .. self.scc_data.len()).map(S::new)\n }\n\n \/\/\/ Returns the SCC to which a node `r` belongs.\n pub fn scc(&self, r: N) -> S {\n self.scc_indices[r]\n }\n\n \/\/\/ Returns the successors of the given SCC.\n pub fn successors(&self, scc: S) -> &[S] {\n self.scc_data.successors(scc)\n }\n}\n\nimpl<S: Idx> SccData<S> {\n \/\/\/ Number of SCCs,\n fn len(&self) -> usize {\n self.ranges.len()\n }\n\n \/\/\/ Returns the successors of the given SCC.\n fn successors(&self, scc: S) -> &[S] {\n \/\/ Annoyingly, `range` does not implement `Copy`, so we have\n \/\/ to do `range.start..range.end`:\n let range = &self.ranges[scc];\n &self.all_successors[range.start..range.end]\n }\n\n \/\/\/ Creates a new SCC with `successors` as its successors and\n \/\/\/ returns the resulting index.\n fn create_scc(&mut self, successors: impl IntoIterator<Item = S>) -> S {\n \/\/ Store the successors on `scc_successors_vec`, remembering\n \/\/ the range of indices.\n let all_successors_start = self.all_successors.len();\n self.all_successors.extend(successors);\n let all_successors_end = self.all_successors.len();\n\n debug!(\n \"create_scc({:?}) successors={:?}\",\n self.ranges.len(),\n &self.all_successors[all_successors_start..all_successors_end],\n );\n\n self.ranges.push(all_successors_start..all_successors_end)\n }\n}\n\nstruct SccsConstruction<'c, G: DirectedGraph + WithNumNodes + WithSuccessors + 'c, S: Idx> {\n graph: &'c G,\n\n \/\/\/ The state of each node; used during walk to record the stack\n \/\/\/ and after walk to record what cycle each node ended up being\n \/\/\/ in.\n node_states: IndexVec<G::Node, NodeState<G::Node, S>>,\n\n \/\/\/ The stack of nodes that we are visiting as part of the DFS.\n node_stack: Vec<G::Node>,\n\n \/\/\/ The stack of successors: as we visit a node, we mark our\n \/\/\/ position in this stack, and when we encounter a successor SCC,\n \/\/\/ we push it on the stack. When we complete an SCC, we can pop\n \/\/\/ everything off the stack that was found along the way.\n successors_stack: Vec<S>,\n\n \/\/\/ A set used to strip duplicates. As we accumulate successors\n \/\/\/ into the successors_stack, we sometimes get duplicate entries.\n \/\/\/ We use this set to remove those -- we keep it around between\n \/\/\/ successors to amortize memory allocation costs.\n duplicate_set: FxHashSet<S>,\n\n scc_data: SccData<S>,\n}\n\n#[derive(Copy, Clone, Debug)]\nenum NodeState<N, S> {\n \/\/\/ This node has not yet been visited as part of the DFS.\n \/\/\/\n \/\/\/ After SCC construction is complete, this state ought to be\n \/\/\/ impossible.\n NotVisited,\n\n \/\/\/ This node is currently being walk as part of our DFS. It is on\n \/\/\/ the stack at the depth `depth`.\n \/\/\/\n \/\/\/ After SCC construction is complete, this state ought to be\n \/\/\/ impossible.\n BeingVisited { depth: usize },\n\n \/\/\/ Indicates that this node is a member of the given cycle.\n InCycle { scc_index: S },\n\n \/\/\/ Indicates that this node is a member of whatever cycle\n \/\/\/ `parent` is a member of. This state is transient: whenever we\n \/\/\/ see it, we try to overwrite it with the current state of\n \/\/\/ `parent` (this is the \"path compression\" step of a union-find\n \/\/\/ algorithm).\n InCycleWith { parent: N },\n}\n\n#[derive(Copy, Clone, Debug)]\nenum WalkReturn<S> {\n Cycle { min_depth: usize },\n Complete { scc_index: S },\n}\n\nimpl<'c, G, S> SccsConstruction<'c, G, S>\nwhere\n G: DirectedGraph + WithNumNodes + WithSuccessors,\n S: Idx,\n{\n \/\/\/ Identifies SCCs in the graph `G` and computes the resulting\n \/\/\/ DAG. This uses a variant of [Tarjan's\n \/\/\/ algorithm][wikipedia]. The high-level summary of the algorithm\n \/\/\/ is that we do a depth-first search. Along the way, we keep a\n \/\/\/ stack of each node whose successors are being visited. We\n \/\/\/ track the depth of each node on this stack (there is no depth\n \/\/\/ if the node is not on the stack). When we find that some node\n \/\/\/ N with depth D can reach some other node N' with lower depth\n \/\/\/ D' (i.e., D' < D), we know that N, N', and all nodes in\n \/\/\/ between them on the stack are part of an SCC.\n \/\/\/\n \/\/\/ For each node, we track the lowest depth of any successor we\n \/\/\/ have found, along with that\n \/\/\/\n \/\/\/ [wikipedia]: https:\/\/bit.ly\/2EZIx84\n fn construct(graph: &'c G) -> Sccs<G::Node, S> {\n let num_nodes = graph.num_nodes();\n\n let mut this = Self {\n graph,\n node_states: IndexVec::from_elem_n(NodeState::NotVisited, num_nodes),\n node_stack: Vec::with_capacity(num_nodes),\n successors_stack: Vec::new(),\n scc_data: SccData {\n ranges: IndexVec::new(),\n all_successors: Vec::new(),\n },\n duplicate_set: FxHashSet::default(),\n };\n\n let scc_indices = (0..num_nodes)\n .map(G::Node::new)\n .map(|node| match this.walk_node(0, node) {\n WalkReturn::Complete { scc_index } => scc_index,\n WalkReturn::Cycle { min_depth } => panic!(\n \"`walk_node(0, {:?})` returned cycle with depth {:?}\",\n node, min_depth\n ),\n })\n .collect();\n\n Sccs {\n scc_indices,\n scc_data: this.scc_data,\n }\n }\n\n fn walk_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {\n debug!(\"walk_node(depth = {:?}, node = {:?})\", depth, node);\n match self.find_state(node) {\n NodeState::InCycle { scc_index } => WalkReturn::Complete { scc_index },\n\n NodeState::BeingVisited { depth: min_depth } => WalkReturn::Cycle { min_depth },\n\n NodeState::NotVisited => self.walk_unvisited_node(depth, node),\n\n NodeState::InCycleWith { parent } => panic!(\n \"`find_state` returned `InCycleWith({:?})`, which ought to be impossible\",\n parent\n ),\n }\n }\n\n \/\/\/ Fetches the state of the node `r`. If `r` is recorded as being\n \/\/\/ in a cycle with some other node `r2`, then fetches the state\n \/\/\/ of `r2` (and updates `r` to reflect current result). This is\n \/\/\/ basically the \"find\" part of a standard union-find algorithm\n \/\/\/ (with path compression).\n fn find_state(&mut self, r: G::Node) -> NodeState<G::Node, S> {\n debug!(\"find_state(r = {:?} in state {:?})\", r, self.node_states[r]);\n match self.node_states[r] {\n NodeState::InCycle { scc_index } => NodeState::InCycle { scc_index },\n NodeState::BeingVisited { depth } => NodeState::BeingVisited { depth },\n NodeState::NotVisited => NodeState::NotVisited,\n NodeState::InCycleWith { parent } => {\n let parent_state = self.find_state(parent);\n debug!(\"find_state: parent_state = {:?}\", parent_state);\n match parent_state {\n NodeState::InCycle { .. } => {\n self.node_states[r] = parent_state;\n parent_state\n }\n\n NodeState::BeingVisited { depth } => {\n self.node_states[r] = NodeState::InCycleWith {\n parent: self.node_stack[depth],\n };\n parent_state\n }\n\n NodeState::NotVisited | NodeState::InCycleWith { .. } => {\n panic!(\"invalid parent state: {:?}\", parent_state)\n }\n }\n }\n }\n }\n\n \/\/\/ Walks a node that has never been visited before.\n fn walk_unvisited_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {\n debug!(\n \"walk_unvisited_node(depth = {:?}, node = {:?})\",\n depth, node\n );\n\n debug_assert!(match self.node_states[node] {\n NodeState::NotVisited => true,\n _ => false,\n });\n\n self.node_states[node] = NodeState::BeingVisited { depth };\n self.node_stack.push(node);\n\n \/\/ Walk each successor of the node, looking to see if any of\n \/\/ them can reach a node that is presently on the stack. If\n \/\/ so, that means they can also reach us.\n let mut min_depth = depth;\n let mut min_cycle_root = node;\n let successors_len = self.successors_stack.len();\n for successor_node in self.graph.successors(node) {\n debug!(\n \"walk_unvisited_node: node = {:?} successor_ode = {:?}\",\n node, successor_node\n );\n match self.walk_node(depth + 1, successor_node) {\n WalkReturn::Cycle {\n min_depth: successor_min_depth,\n } => {\n assert!(successor_min_depth <= depth);\n if successor_min_depth < min_depth {\n debug!(\n \"walk_unvisited_node: node = {:?} successor_min_depth = {:?}\",\n node, successor_min_depth\n );\n min_depth = successor_min_depth;\n min_cycle_root = successor_node;\n }\n }\n\n WalkReturn::Complete {\n scc_index: successor_scc_index,\n } => {\n debug!(\n \"walk_unvisited_node: node = {:?} successor_scc_index = {:?}\",\n node, successor_scc_index\n );\n self.successors_stack.push(successor_scc_index);\n }\n }\n }\n\n let r = self.node_stack.pop();\n debug_assert_eq!(r, Some(node));\n\n if min_depth == depth {\n \/\/ Note that successor stack may have duplicates, so we\n \/\/ want to remove those:\n let deduplicated_successors = {\n let duplicate_set = &mut self.duplicate_set;\n duplicate_set.clear();\n self.successors_stack\n .drain(successors_len..)\n .filter(move |&i| duplicate_set.insert(i))\n };\n let scc_index = self.scc_data.create_scc(deduplicated_successors);\n self.node_states[node] = NodeState::InCycle { scc_index };\n WalkReturn::Complete { scc_index }\n } else {\n \/\/ We are not the head of the cycle. Return back to our\n \/\/ caller. They will take ownership of the\n \/\/ `self.successors` data that we pushed.\n self.node_states[node] = NodeState::InCycleWith {\n parent: min_cycle_root,\n };\n WalkReturn::Cycle { min_depth }\n }\n }\n}\n<commit_msg>nit: clarify \"keep it around\" comment<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Routine to compute the strongly connected components (SCCs) of a\n\/\/! graph, as well as the resulting DAG if each SCC is replaced with a\n\/\/! node in the graph. This uses Tarjan's algorithm that completes in\n\/\/! O(n) time.\n\nuse fx::FxHashSet;\nuse graph::{DirectedGraph, WithNumNodes, WithSuccessors};\nuse indexed_vec::{Idx, IndexVec};\nuse std::ops::Range;\n\nmod test;\n\n\/\/\/ Strongly connected components (SCC) of a graph. The type `N` is\n\/\/\/ the index type for the graph nodes and `S` is the index type for\n\/\/\/ the SCCs. We can map from each node to the SCC that it\n\/\/\/ participates in, and we also have the successors of each SCC.\npub struct Sccs<N: Idx, S: Idx> {\n \/\/\/ For each node, what is the SCC index of the SCC to which it\n \/\/\/ belongs.\n scc_indices: IndexVec<N, S>,\n\n \/\/\/ Data about each SCC.\n scc_data: SccData<S>,\n}\n\nstruct SccData<S: Idx> {\n \/\/\/ For each SCC, the range of `all_successors` where its\n \/\/\/ successors can be found.\n ranges: IndexVec<S, Range<usize>>,\n\n \/\/\/ Contains the succcessors for all the Sccs, concatenated. The\n \/\/\/ range of indices corresponding to a given SCC is found in its\n \/\/\/ SccData.\n all_successors: Vec<S>,\n}\n\nimpl<N: Idx, S: Idx> Sccs<N, S> {\n pub fn new(graph: &(impl DirectedGraph<Node = N> + WithNumNodes + WithSuccessors)) -> Self {\n SccsConstruction::construct(graph)\n }\n\n \/\/\/ Returns the number of SCCs in the graph.\n pub fn num_sccs(&self) -> usize {\n self.scc_data.len()\n }\n\n \/\/\/ Returns the number of SCCs in the graph.\n pub fn all_sccs(&self) -> impl Iterator<Item = S> {\n (0 .. self.scc_data.len()).map(S::new)\n }\n\n \/\/\/ Returns the SCC to which a node `r` belongs.\n pub fn scc(&self, r: N) -> S {\n self.scc_indices[r]\n }\n\n \/\/\/ Returns the successors of the given SCC.\n pub fn successors(&self, scc: S) -> &[S] {\n self.scc_data.successors(scc)\n }\n}\n\nimpl<S: Idx> SccData<S> {\n \/\/\/ Number of SCCs,\n fn len(&self) -> usize {\n self.ranges.len()\n }\n\n \/\/\/ Returns the successors of the given SCC.\n fn successors(&self, scc: S) -> &[S] {\n \/\/ Annoyingly, `range` does not implement `Copy`, so we have\n \/\/ to do `range.start..range.end`:\n let range = &self.ranges[scc];\n &self.all_successors[range.start..range.end]\n }\n\n \/\/\/ Creates a new SCC with `successors` as its successors and\n \/\/\/ returns the resulting index.\n fn create_scc(&mut self, successors: impl IntoIterator<Item = S>) -> S {\n \/\/ Store the successors on `scc_successors_vec`, remembering\n \/\/ the range of indices.\n let all_successors_start = self.all_successors.len();\n self.all_successors.extend(successors);\n let all_successors_end = self.all_successors.len();\n\n debug!(\n \"create_scc({:?}) successors={:?}\",\n self.ranges.len(),\n &self.all_successors[all_successors_start..all_successors_end],\n );\n\n self.ranges.push(all_successors_start..all_successors_end)\n }\n}\n\nstruct SccsConstruction<'c, G: DirectedGraph + WithNumNodes + WithSuccessors + 'c, S: Idx> {\n graph: &'c G,\n\n \/\/\/ The state of each node; used during walk to record the stack\n \/\/\/ and after walk to record what cycle each node ended up being\n \/\/\/ in.\n node_states: IndexVec<G::Node, NodeState<G::Node, S>>,\n\n \/\/\/ The stack of nodes that we are visiting as part of the DFS.\n node_stack: Vec<G::Node>,\n\n \/\/\/ The stack of successors: as we visit a node, we mark our\n \/\/\/ position in this stack, and when we encounter a successor SCC,\n \/\/\/ we push it on the stack. When we complete an SCC, we can pop\n \/\/\/ everything off the stack that was found along the way.\n successors_stack: Vec<S>,\n\n \/\/\/ A set used to strip duplicates. As we accumulate successors\n \/\/\/ into the successors_stack, we sometimes get duplicate entries.\n \/\/\/ We use this set to remove those -- we also keep its storage\n \/\/\/ around between successors to amortize memory allocation costs.\n duplicate_set: FxHashSet<S>,\n\n scc_data: SccData<S>,\n}\n\n#[derive(Copy, Clone, Debug)]\nenum NodeState<N, S> {\n \/\/\/ This node has not yet been visited as part of the DFS.\n \/\/\/\n \/\/\/ After SCC construction is complete, this state ought to be\n \/\/\/ impossible.\n NotVisited,\n\n \/\/\/ This node is currently being walk as part of our DFS. It is on\n \/\/\/ the stack at the depth `depth`.\n \/\/\/\n \/\/\/ After SCC construction is complete, this state ought to be\n \/\/\/ impossible.\n BeingVisited { depth: usize },\n\n \/\/\/ Indicates that this node is a member of the given cycle.\n InCycle { scc_index: S },\n\n \/\/\/ Indicates that this node is a member of whatever cycle\n \/\/\/ `parent` is a member of. This state is transient: whenever we\n \/\/\/ see it, we try to overwrite it with the current state of\n \/\/\/ `parent` (this is the \"path compression\" step of a union-find\n \/\/\/ algorithm).\n InCycleWith { parent: N },\n}\n\n#[derive(Copy, Clone, Debug)]\nenum WalkReturn<S> {\n Cycle { min_depth: usize },\n Complete { scc_index: S },\n}\n\nimpl<'c, G, S> SccsConstruction<'c, G, S>\nwhere\n G: DirectedGraph + WithNumNodes + WithSuccessors,\n S: Idx,\n{\n \/\/\/ Identifies SCCs in the graph `G` and computes the resulting\n \/\/\/ DAG. This uses a variant of [Tarjan's\n \/\/\/ algorithm][wikipedia]. The high-level summary of the algorithm\n \/\/\/ is that we do a depth-first search. Along the way, we keep a\n \/\/\/ stack of each node whose successors are being visited. We\n \/\/\/ track the depth of each node on this stack (there is no depth\n \/\/\/ if the node is not on the stack). When we find that some node\n \/\/\/ N with depth D can reach some other node N' with lower depth\n \/\/\/ D' (i.e., D' < D), we know that N, N', and all nodes in\n \/\/\/ between them on the stack are part of an SCC.\n \/\/\/\n \/\/\/ For each node, we track the lowest depth of any successor we\n \/\/\/ have found, along with that\n \/\/\/\n \/\/\/ [wikipedia]: https:\/\/bit.ly\/2EZIx84\n fn construct(graph: &'c G) -> Sccs<G::Node, S> {\n let num_nodes = graph.num_nodes();\n\n let mut this = Self {\n graph,\n node_states: IndexVec::from_elem_n(NodeState::NotVisited, num_nodes),\n node_stack: Vec::with_capacity(num_nodes),\n successors_stack: Vec::new(),\n scc_data: SccData {\n ranges: IndexVec::new(),\n all_successors: Vec::new(),\n },\n duplicate_set: FxHashSet::default(),\n };\n\n let scc_indices = (0..num_nodes)\n .map(G::Node::new)\n .map(|node| match this.walk_node(0, node) {\n WalkReturn::Complete { scc_index } => scc_index,\n WalkReturn::Cycle { min_depth } => panic!(\n \"`walk_node(0, {:?})` returned cycle with depth {:?}\",\n node, min_depth\n ),\n })\n .collect();\n\n Sccs {\n scc_indices,\n scc_data: this.scc_data,\n }\n }\n\n fn walk_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {\n debug!(\"walk_node(depth = {:?}, node = {:?})\", depth, node);\n match self.find_state(node) {\n NodeState::InCycle { scc_index } => WalkReturn::Complete { scc_index },\n\n NodeState::BeingVisited { depth: min_depth } => WalkReturn::Cycle { min_depth },\n\n NodeState::NotVisited => self.walk_unvisited_node(depth, node),\n\n NodeState::InCycleWith { parent } => panic!(\n \"`find_state` returned `InCycleWith({:?})`, which ought to be impossible\",\n parent\n ),\n }\n }\n\n \/\/\/ Fetches the state of the node `r`. If `r` is recorded as being\n \/\/\/ in a cycle with some other node `r2`, then fetches the state\n \/\/\/ of `r2` (and updates `r` to reflect current result). This is\n \/\/\/ basically the \"find\" part of a standard union-find algorithm\n \/\/\/ (with path compression).\n fn find_state(&mut self, r: G::Node) -> NodeState<G::Node, S> {\n debug!(\"find_state(r = {:?} in state {:?})\", r, self.node_states[r]);\n match self.node_states[r] {\n NodeState::InCycle { scc_index } => NodeState::InCycle { scc_index },\n NodeState::BeingVisited { depth } => NodeState::BeingVisited { depth },\n NodeState::NotVisited => NodeState::NotVisited,\n NodeState::InCycleWith { parent } => {\n let parent_state = self.find_state(parent);\n debug!(\"find_state: parent_state = {:?}\", parent_state);\n match parent_state {\n NodeState::InCycle { .. } => {\n self.node_states[r] = parent_state;\n parent_state\n }\n\n NodeState::BeingVisited { depth } => {\n self.node_states[r] = NodeState::InCycleWith {\n parent: self.node_stack[depth],\n };\n parent_state\n }\n\n NodeState::NotVisited | NodeState::InCycleWith { .. } => {\n panic!(\"invalid parent state: {:?}\", parent_state)\n }\n }\n }\n }\n }\n\n \/\/\/ Walks a node that has never been visited before.\n fn walk_unvisited_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {\n debug!(\n \"walk_unvisited_node(depth = {:?}, node = {:?})\",\n depth, node\n );\n\n debug_assert!(match self.node_states[node] {\n NodeState::NotVisited => true,\n _ => false,\n });\n\n self.node_states[node] = NodeState::BeingVisited { depth };\n self.node_stack.push(node);\n\n \/\/ Walk each successor of the node, looking to see if any of\n \/\/ them can reach a node that is presently on the stack. If\n \/\/ so, that means they can also reach us.\n let mut min_depth = depth;\n let mut min_cycle_root = node;\n let successors_len = self.successors_stack.len();\n for successor_node in self.graph.successors(node) {\n debug!(\n \"walk_unvisited_node: node = {:?} successor_ode = {:?}\",\n node, successor_node\n );\n match self.walk_node(depth + 1, successor_node) {\n WalkReturn::Cycle {\n min_depth: successor_min_depth,\n } => {\n assert!(successor_min_depth <= depth);\n if successor_min_depth < min_depth {\n debug!(\n \"walk_unvisited_node: node = {:?} successor_min_depth = {:?}\",\n node, successor_min_depth\n );\n min_depth = successor_min_depth;\n min_cycle_root = successor_node;\n }\n }\n\n WalkReturn::Complete {\n scc_index: successor_scc_index,\n } => {\n debug!(\n \"walk_unvisited_node: node = {:?} successor_scc_index = {:?}\",\n node, successor_scc_index\n );\n self.successors_stack.push(successor_scc_index);\n }\n }\n }\n\n let r = self.node_stack.pop();\n debug_assert_eq!(r, Some(node));\n\n if min_depth == depth {\n \/\/ Note that successor stack may have duplicates, so we\n \/\/ want to remove those:\n let deduplicated_successors = {\n let duplicate_set = &mut self.duplicate_set;\n duplicate_set.clear();\n self.successors_stack\n .drain(successors_len..)\n .filter(move |&i| duplicate_set.insert(i))\n };\n let scc_index = self.scc_data.create_scc(deduplicated_successors);\n self.node_states[node] = NodeState::InCycle { scc_index };\n WalkReturn::Complete { scc_index }\n } else {\n \/\/ We are not the head of the cycle. Return back to our\n \/\/ caller. They will take ownership of the\n \/\/ `self.successors` data that we pushed.\n self.node_states[node] = NodeState::InCycleWith {\n parent: min_cycle_root,\n };\n WalkReturn::Cycle { min_depth }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Commit cpu<commit_after>use crate::text::{Attributes, Text};\nuse crate::widgets::{Widget, WidgetStream};\nuse anyhow::{anyhow, Result};\nuse async_stream::try_stream;\nuse std::fs::File;\nuse std::io::BufRead;\nuse std::io::BufReader;\nuse std::time::Duration;\n\npub struct Cpu {\n attr: Attributes,\n cpu_data: CpuData,\n}\n\nimpl Cpu {\n pub fn new(attr: Attributes) -> Result<Cpu> {\n let cpu_data = CpuData::get_values()?;\n Ok(Cpu { attr, cpu_data })\n }\n}\n\nstruct CpuData {\n user_time: i64,\n nice_time: i64,\n system_time: i64,\n idle_time: i64,\n total_time: i64,\n iowait_time: i64,\n}\n\nimpl CpuData {\n fn get_values() -> Result<CpuData> {\n \/\/ https:\/\/www.kernel.org\/doc\/Documentation\/filesystems\/proc.txt\n let file = File::open(\"\/proc\/stat\")?;\n let mut cpu_line = String::new();\n let mut reader = BufReader::new(file);\n reader.read_line(&mut cpu_line)?;\n let val: Vec<&str> = cpu_line\n .split(' ')\n .filter(|item| item != &\"cpu\" && !item.is_empty())\n .collect();\n let mut cpu_data = CpuData {\n user_time: 0,\n nice_time: 0,\n system_time: 0,\n idle_time: 0,\n total_time: 0,\n iowait_time: 0,\n };\n println!(\"{}\", val.len());\n match val[..] {\n [ref user, ref nice, ref system, ref idle, ref iowait, ..] => {\n let user_time = user.parse().unwrap();\n let nice_time = nice.parse().unwrap();\n let system_time = system.parse().unwrap();\n let idle_time = idle.parse().unwrap();\n let iowait_time = iowait.parse().unwrap();\n cpu_data.user_time = user_time;\n cpu_data.nice_time = nice_time;\n cpu_data.system_time = system_time;\n cpu_data.idle_time = idle_time;\n cpu_data.iowait_time = iowait_time;\n cpu_data.total_time = user_time + nice_time + system_time;\n }\n _ => return Err(anyhow!(\"Missing data in \/proc\/stat\")),\n }\n Ok(cpu_data)\n }\n}\n\nimpl Widget for Cpu {\n fn into_stream(mut self: Box<Self>) -> Result<WidgetStream> {\n let stream = try_stream! {\n loop {\n let cpu_data = CpuData::get_values()?;\n\n \/\/ Based on htop https:\/\/stackoverflow.com\/a\/23376195\/1651941\n let prev_idle = self.cpu_data.idle_time;\n let prev_non_idle = self.cpu_data.total_time;\n\n let idle = cpu_data.idle_time;\n let non_idle = cpu_data.total_time;\n\n let prev_total = prev_idle + prev_non_idle;\n let total = idle + non_idle;\n\n \/\/ let total_diff = total - prev_total;\n \/\/ let idle_diff = idle - prev_idle;\n let total_diff = total;\n let idle_diff = idle;\n\n \/\/ https:\/\/github.com\/jaor\/xmobar\/blob\/61d075d3c275366c3344d59c058d7dd0baf21ef2\/src\/Xmobar\/Plugins\/Monitors\/Cpu.hs#L128\n let previous = self.cpu_data;\n let current = cpu_data;\n let diff_total = (current.user_time - previous.user_time) +\n (current.nice_time - previous.nice_time) +\n (current.system_time - previous.system_time) +\n (current.idle_time - previous.idle_time) +\n (current.iowait_time - previous.iowait_time) +\n (current.total_time - previous.total_time);\n let percentage = match diff_total {\n 0 => 0.0,\n _ => (current.total_time - previous.total_time) as f64 \/ diff_total as f64\n };\n let text = format!(\"<span foreground=\\\"#808080\\\">[<\/span>Cpu: {}%<span foreground=\\\"#808080\\\">]<\/span>\", (percentage * 100.0) as u64);\n let texts = vec![Text {\n attr: self.attr.clone(),\n text,\n stretch: false,\n markup: true\n }];\n\n self.cpu_data = current;\n\n yield texts;\n\n let sleep_for = Duration::from_secs(10);\n tokio::time::sleep(sleep_for).await;\n }\n };\n\n Ok(Box::pin(stream))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #332 - jimmycuadra:arg-docs, r=kbknapp<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add SpaceCommand<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Basic heap support<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate crypto;\nextern crate rustc_serialize;\n\nuse crypto::digest::Digest;\nuse crypto::hmac::Hmac;\nuse crypto::mac::Mac;\nuse rustc_serialize::base64::{\n self,\n CharacterSet,\n Newline,\n ToBase64,\n};\nuse header::Header;\nuse claims::Claims;\n\npub mod error;\npub mod header;\npub mod claims;\n\npub struct Token {\n header: Header,\n claims: Claims,\n}\n\nconst BASE_CONFIG: base64::Config = base64::Config {\n char_set: CharacterSet::Standard,\n newline: Newline::LF,\n pad: false,\n line_length: None,\n};\n\nfn sign<D: Digest>(data: &str, key: &str, digest: D) -> String {\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n let mac = hmac.result();\n let code = mac.code();\n (*code).to_base64(BASE_CONFIG)\n}\n\n#[cfg(test)]\nmod tests {\n use sign;\n use crypto::sha2::Sha256;\n\n #[test]\n pub fn sign_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let real_sig = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, \"secret\", Sha256::new());\n\n assert_eq!(sig, real_sig);\n }\n}\n<commit_msg>Add verify function<commit_after>extern crate crypto;\nextern crate rustc_serialize;\n\nuse crypto::digest::Digest;\nuse crypto::hmac::Hmac;\nuse crypto::mac::{\n Mac,\n MacResult,\n};\nuse rustc_serialize::base64::{\n self,\n CharacterSet,\n FromBase64,\n Newline,\n ToBase64,\n};\nuse header::Header;\nuse claims::Claims;\n\npub mod error;\npub mod header;\npub mod claims;\n\npub struct Token {\n header: Header,\n claims: Claims,\n}\n\nconst BASE_CONFIG: base64::Config = base64::Config {\n char_set: CharacterSet::Standard,\n newline: Newline::LF,\n pad: false,\n line_length: None,\n};\n\nfn sign<D: Digest>(data: &str, key: &str, digest: D) -> String {\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n let mac = hmac.result();\n let code = mac.code();\n (*code).to_base64(BASE_CONFIG)\n}\n\nfn verify<D: Digest>(target: &str, data: &str, key: &str, digest: D) -> bool {\n let target_bytes = match target.from_base64() {\n Ok(x) => x,\n Err(_) => return false,\n };\n let target_mac = MacResult::new_from_owned(target_bytes);\n\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n hmac.result() == target_mac\n}\n\n#[cfg(test)]\nmod tests {\n use sign;\n use verify;\n use crypto::sha2::Sha256;\n\n #[test]\n pub fn sign_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let real_sig = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, \"secret\", Sha256::new());\n\n assert_eq!(sig, real_sig);\n }\n\n #[test]\n pub fn verify_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let target = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n assert!(verify(target, &*data, \"secret\", Sha256::new()));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>lib.rs escaped the prior commit<commit_after>#[macro_use]\nextern crate enum_primitive;\nextern crate modbus;\nextern crate byteorder;\nextern crate rustc_serialize;\nextern crate tokio_core;\nextern crate tokio_proto;\n\npub mod block ;\npub use block::BlankRegisters;\n\nuse tokio_core::io::{Io, Codec, Framed, EasyBuf};\n\nuse modbus::{Coil,binary,Reason,ExceptionCode,tcp};\nuse byteorder::{BigEndian, ReadBytesExt,WriteBytesExt};\nuse std::io::Cursor;\nuse tokio_core::reactor::Core;\nuse tokio_proto::pipeline::ServerProto;\n\nuse std::io::{self, ErrorKind, Write,Read};\nuse enum_primitive::FromPrimitive;\n\nenum_from_primitive! {\n#[derive(Copy, Clone, Debug, PartialEq)]\npub enum FunctionCode {\n ReadCoils = 0x01,\n ReadDiscreteInputs = 0x02,\n ReadHoldingRegisters = 0x03,\n ReadInputRegisters = 0x04,\n WriteSingleCoil = 0x05,\n WriteSingleRegister = 0x06,\n WriteMultipleCoils = 0x0f,\n WriteMultipleRegisters = 0x10\n}\n}\n\n\n#[derive(Default)]\npub struct ModbusTCPCodec;\n\ntype Code = u8;\ntype Count = u8;\ntype Address = u16;\ntype Quantity = u16;\ntype Value = u16;\ntype Values = Vec<u16>;\n\n\n#[derive(Debug)]\npub enum ModbusResponsePDU {\n ReadCoilsResponse{code:Code,byte_count:Count,\n coil_status: Vec<u8>},\n ReadDiscreteInputsResponse{ code:Code,\n byte_count: Count,\n input_status: Vec<u8>},\n ReadHoldingRegistersResponse { code:Code,\n byte_count: Count,\n values:Values},\n ReadInputRegistersResponse{code:Code,\n byte_count: Count,\n values:Values},\n WriteSingleCoilResponse { code:Code,address:Address,value:Value},\n WriteSingleRegisterResponse { code:Code,address:Address,value:Value},\n WriteMultipleCoilsResponse { code:Code, address:Address, quantity:Quantity},\n WriteMultipleRegistersResponse { code:Code, address:Address, quantity:Quantity},\n ModbusErrorResponse {code:Code, exception_code:Code}\n}\n\nimpl ModbusResponsePDU {\n fn encode (&self) -> Vec<u8> {\n let mut buff:Vec<u8> = Vec::new(); \n match *self {\n ModbusResponsePDU::ReadHoldingRegistersResponse{\n code:c,byte_count:b,\n values: ref v\n } => {\n buff.write_u8(c);\n buff.write_u8(b);\n buff.write(binary::unpack_bytes(v).as_slice());\n },\n ModbusResponsePDU::ReadInputRegistersResponse{\n code:c,byte_count:b,\n values: ref v\n } => {\n buff.write_u8(c);\n buff.write_u8(b);\n buff.write(binary::unpack_bytes(v).as_slice());\n },\n ModbusResponsePDU::ReadDiscreteInputsResponse{\n code:c,byte_count:b,\n input_status:ref s\n } => {\n buff.write_u8(c);\n buff.write_u8(b);\n buff.write(s);\n },\n ModbusResponsePDU::ReadCoilsResponse{\n code:c,byte_count:b,\n coil_status:ref s\n } => {\n buff.write_u8(c);\n buff.write_u8(b);\n buff.write(s);\n },\n ModbusResponsePDU::ModbusErrorResponse{code:c,exception_code:e} => {\n buff.write_u8(c);\n buff.write_u8(e);\n },\n ModbusResponsePDU::WriteMultipleRegistersResponse{\n code:c,address:a,quantity:q } => {\n buff.write_u8(c);\n buff.write_u16::<BigEndian>(a);\n buff.write_u16::<BigEndian>(q);\n },\n ModbusResponsePDU::WriteMultipleCoilsResponse{\n code:c,address:a,quantity:q } => {\n buff.write_u8(c);\n buff.write_u16::<BigEndian>(a);\n buff.write_u16::<BigEndian>(q);\n },\n ModbusResponsePDU::WriteSingleCoilResponse{\n code:c,address:a,value:q } => {\n buff.write_u8(c);\n buff.write_u16::<BigEndian>(a);\n buff.write_u16::<BigEndian>(q);\n },\n ModbusResponsePDU::WriteSingleRegisterResponse{\n code:c,address:a,value:q } => {\n buff.write_u8(c);\n buff.write_u16::<BigEndian>(a);\n buff.write_u16::<BigEndian>(q);\n }\n }\n buff\n }\n \n\n}\n\/\/ This could be imported from modbus::tcp. \n#[derive(RustcEncodable, RustcDecodable,Debug)]\n#[repr(packed)]\npub struct Header {\n tid: u16,\n pid: u16,\n len: u16,\n uid: u8,\n}\n\nimpl Header {\n fn encode (&self) -> Vec<u8>{\n let mut buff:Vec<u8> = Vec::new(); \n buff.write_u16::<BigEndian>(self.tid);\n buff.write_u16::<BigEndian>(self.pid);\n buff.write_u16::<BigEndian>(self.len);\n buff.write_u8(self.uid);\n buff\n }\n}\n\n\n#[derive(Debug,Clone)]\npub struct ModbusFooter {\n byte_count:u8,\n data : Vec<u8>\n}\n#[derive(Debug,Clone)]\npub struct ModbusRequestPDU {\n code: u8,\n address: u16,\n \/\/ specifies quanity for some instructions,\n \/\/ value for others. \n q_or_v:u16,\n addl: Option<ModbusFooter>\n}\n\n#[derive(Debug)]\npub struct ModbusTCPRequest {\n pub header: Header,\n pub pdu: ModbusRequestPDU\n}\n#[derive(Debug)]\npub struct ModbusTCPResponse {\n pub header: Header,\n pub pdu: ModbusResponsePDU\n}\n\nfn parse_mbap (from: &[u8]) -> Header {\n let mut rdr = Cursor::new(from);\n Header{\n tid: rdr.read_u16::<BigEndian>().unwrap(),\n pid: rdr.read_u16::<BigEndian>().unwrap(),\n len: rdr.read_u16::<BigEndian>().unwrap(),\n uid: rdr.read_u8().unwrap(),\n }\n}\n\nfn parse_modbus_request_pdu(from: &[u8]) -> ModbusRequestPDU {\n let mut rdr = Cursor::new(from);\n\n let code = rdr.read_u8().unwrap();\n let address = rdr.read_u16::<BigEndian>().unwrap();\n let count = rdr.read_u16::<BigEndian>().unwrap();\n let mut addl = None;\n\n match FunctionCode::from_u8(code).unwrap() {\n FunctionCode::WriteMultipleCoils |\n FunctionCode::WriteMultipleRegisters => {\n let mut buffer = Vec::new();\n let byte_count = rdr.read_u8().unwrap();\n rdr.read_to_end(&mut buffer);\n addl = Some(ModbusFooter{\n byte_count:byte_count,\n data: buffer\n });\n println!(\"addl {:?}\",addl);\n },\n _ => {\n\n }\n \n };\n ModbusRequestPDU{\n code:code as u8,\n address:address,\n q_or_v: count,\n addl:addl\n }\n}\n\n\n\nimpl Codec for ModbusTCPCodec {\n \/\/ \n type In = ModbusTCPRequest;\n type Out = ModbusTCPResponse;\n\n \/\/ Attempt to decode a message from the given buffer if a complete\n \/\/ message is available; returns `Ok(None)` if the buffer does not yet\n \/\/ hold a complete message.\n\n \/\/ Read first 12 bytes.\n \/\/ Decide if more are needed. \n fn decode(&mut self, buf: &mut EasyBuf) -> std::io::Result<Option<Self::In>> {\n if buf.len() < 12 {\n Ok(None)\n } else {\n let mut length:usize = 0;\n let mut code:u8=0;\n let mut byte_count:usize = 0 ;\n \/\/ Scope created just for z so it goes away before we run parse()\n {\n let z = buf.as_slice();\n code = z[7] as u8;\n length = match FunctionCode::from_u8(code).unwrap() {\n FunctionCode::WriteMultipleCoils |\n FunctionCode::WriteMultipleRegisters => {\n if buf.len() == 12 {\n 0;\n }\n byte_count = z[12] as usize;\n if buf.len() >= byte_count + 13 {\n byte_count+13\n } else {\n 0\n }\n },\n _ => 12\n }\n }\n let S = &buf.drain_to(length);\n let s = S.as_slice();\n\n match length {\n 0 => Ok(None),\n\n _ => {\n Ok(Some(ModbusTCPRequest {\n header:parse_mbap(&s[0..7]),\n pdu:parse_modbus_request_pdu(&s[7..length])\n }))\n }\n }\n }\n }\n\n \/\/ Attempt to decode a message assuming that the given buffer contains\n \/\/ *all* remaining input data.\n fn decode_eof(&mut self, buf: &mut EasyBuf) -> io::Result<ModbusTCPRequest> {\n let s = buf.as_slice();\n Ok(ModbusTCPRequest {\n header:parse_mbap(&s[0..7]),\n pdu:parse_modbus_request_pdu(&s[7..buf.len()])\n })\n }\n\n fn encode(&mut self, item: ModbusTCPResponse, into: &mut Vec<u8>) -> io::Result<()> {\n into.write(item.header.encode().as_slice());\n into.write(item.pdu.encode().as_slice());\n Ok(())\n }\n}\n\n\n\npub struct ModbusTCPProto;\n\nimpl<T: Io + 'static> ServerProto<T> for ModbusTCPProto {\n type Request = ModbusTCPRequest;\n type Response = ModbusTCPResponse;\n type Transport = Framed<T, ModbusTCPCodec>;\n type BindTransport = ::std::result::Result<Self::Transport,io::Error>;\n\n fn bind_transport(&self, io: T) -> Self::BindTransport {\n Ok(io.framed(ModbusTCPCodec))\n }\n}\nmod test;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #161 - goffrie:patch-1, r=KiChjang<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Adding docs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove docs about signatures, maps, and sets.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Minor comment update<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove bounds checks in rom reads<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Select random image from first 100 results from Derpibooru.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add EKS `create-cluster` example<commit_after>\/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n * SPDX-License-Identifier: Apache-2.0.\n *\/\n\nuse aws_config::meta::region::RegionProviderChain;\nuse aws_sdk_eks::model::VpcConfigRequest;\nuse aws_sdk_eks::Region;\nuse structopt::StructOpt;\n\n#[derive(Debug, StructOpt)]\nstruct Opt {\n \/\/\/ The region\n #[structopt(short, long)]\n region: Option<String>,\n\n #[structopt(short, long)]\n cluster_name: String,\n\n \/\/\/ Role ARN for the cluster\n \/\/\/ To create a role-arn:\n \/\/\/\n \/\/\/ 1. Follow instructions to create an IAM role:\n \/\/\/ https:\/\/docs.aws.amazon.com\/eks\/latest\/userguide\/service_IAM_role.html\n \/\/\/\n \/\/\/ 2. Copy role arn\n #[structopt(long)]\n role_arn: String,\n\n \/\/\/ subnet id\n \/\/\/\n \/\/\/ At least two subnet ids must be specified. The subnet ids must be in two separate AZs\n #[structopt(short, long)]\n subnet_id: Vec<String>,\n}\n\n#[tokio::main]\nasync fn main() -> Result<(), aws_sdk_eks::Error> {\n let Opt {\n region,\n cluster_name,\n role_arn,\n subnet_id,\n } = Opt::from_args();\n let region_provider = RegionProviderChain::first_try(region.map(Region::new))\n .or_default_provider()\n .or_else(Region::new(\"us-west-2\"));\n let shared_config = aws_config::from_env().region(region_provider).load().await;\n let client = aws_sdk_eks::Client::new(&shared_config);\n\n let cluster = client\n .create_cluster()\n .name(&cluster_name)\n .role_arn(role_arn)\n .resources_vpc_config(\n VpcConfigRequest::builder()\n .set_subnet_ids(Some(subnet_id))\n .build(),\n )\n .send()\n .await?;\n println!(\"cluster created: {:?}\", cluster);\n\n let cluster_deleted = client.delete_cluster().name(&cluster_name).send().await?;\n println!(\"cluster deleted: {:?}\", cluster_deleted);\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before>use inflector::Inflector;\n\nuse botocore::{Member, Operation, Service, Shape};\nuse super::GenerateProtocol;\n\npub struct QueryGenerator;\n\nimpl GenerateProtocol for QueryGenerator {\n fn generate_methods(&self, service: &Service) -> String {\n service.operations.values().map(|operation| {\n format!(\n \"{documentation}\n{method_signature} {{\n let mut request = SignedRequest::new(\n \\\"{http_method}\\\",\n \\\"{endpoint_prefix}\\\",\n self.region,\n \\\"{request_uri}\\\",\n );\n let mut params = Params::new();\n\n params.put(\\\"Action\\\", \\\"{operation_name}\\\");\n {serialize_input}\n\n request.set_params(params);\n\n let result = request.sign_and_execute(try!(self.credentials_provider.credentials()));\n let status = result.status.to_u16();\n let mut reader = EventReader::new(result);\n let mut stack = XmlResponseFromAws::new(reader.events().peekable());\n stack.next();\n stack.next();\n\n match status {{\n 200 => {{\n {method_return_value}\n }}\n status_code => Err(AwsError::new(\n format!(\\\"HTTP response code for {operation_name}: {{}}\\\", status_code)\n ))\n }}\n}}\n \",\n documentation = generate_documentation(operation),\n http_method = &operation.http.method,\n endpoint_prefix = &service.metadata.endpoint_prefix,\n method_return_value = generate_method_return_value(operation),\n method_signature = generate_method_signature(operation),\n operation_name = &operation.name,\n request_uri = &operation.http.request_uri,\n serialize_input = generate_method_input_serialization(operation),\n )\n }).collect::<Vec<String>>().join(\"\\n\")\n }\n\n fn generate_prelude(&self) -> String {\n \"use std::collections::HashMap;\n use std::str::{FromStr, from_utf8};\n\n use xml::EventReader;\n\n use credential::ProvideAwsCredentials;\n use error::AwsError;\n use param::{Params, ServiceParams};\n use region::Region;\n use signature::SignedRequest;\n use xmlutil::{Next, Peek, XmlParseError, XmlResponseFromAws};\n use xmlutil::{characters, end_element, peek_at_name, start_element};\n \".to_owned()\n }\n\n fn generate_struct_attributes(&self) -> String {\n \"#[derive(Debug, Default)]\".to_owned()\n }\n\n fn generate_support_types(&self, name: &str, shape: &Shape, service: &Service) -> Option<String> {\n Some(format!(\n \"\/\/\/ Deserializes `{name}` from XML.\n struct {name}Deserializer;\n impl {name}Deserializer {{\n fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T)\n -> Result<{name}, XmlParseError> {{\n {deserializer_body}\n }}\n }}\n\n \/\/\/ Serialize `{name}` contents to a `SignedRequest`.\n struct {name}Serializer;\n impl {name}Serializer {{\n {serializer_signature} {{\n {serializer_body}\n }}\n }}\n \",\n deserializer_body = generate_deserializer_body(name, shape, service),\n name = name,\n serializer_body = generate_serializer_body(shape),\n serializer_signature = generate_serializer_signature(name, shape),\n ))\n }\n}\n\nfn generate_documentation(operation: &Operation) -> String {\n match operation.documentation {\n Some(ref docs) => format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")),\n None => \"\".to_owned(),\n }\n}\n\nfn generate_method_input_serialization(operation: &Operation) -> String {\n if operation.input.is_some() {\n format!(\n \"{input_type}Serializer::serialize(&mut params, \\\"\\\", &input);\",\n input_type = operation.input.as_ref().unwrap().shape,\n )\n } else {\n String::new()\n }\n}\n\nfn generate_method_return_value(operation: &Operation) -> String {\n if operation.output.is_some() {\n format!(\n \"Ok(try!({output_type}Deserializer::deserialize(\\\"{output_type}\\\", &mut stack)))\",\n output_type = &operation.output.as_ref().unwrap().shape,\n )\n } else {\n \"Ok(())\".to_owned()\n }\n}\n\nfn generate_method_signature(operation: &Operation) -> String {\n if operation.input.is_some() {\n format!(\n \"pub fn {operation_name}(&mut self, input: &{input_type}) -> Result<{output_type}, AwsError>\",\n input_type = operation.input.as_ref().unwrap().shape,\n operation_name = operation.name.to_snake_case(),\n output_type = &operation.output_shape_or(\"()\"),\n )\n } else {\n format!(\n \"pub fn {operation_name}(&mut self) -> Result<{output_type}, AwsError>\",\n operation_name = operation.name.to_snake_case(),\n output_type = &operation.output_shape_or(\"()\"),\n )\n }\n}\n\nfn generate_deserializer_body(name: &str, shape: &Shape, service: &Service) -> String {\n match &shape.shape_type[..] {\n \"list\" => generate_list_deserializer(shape),\n \"map\" => generate_map_deserializer(shape),\n \"structure\" => generate_struct_deserializer(name, shape, service),\n _ => generate_primitive_deserializer(shape),\n }\n}\n\nfn generate_list_deserializer(shape: &Shape) -> String {\n format!(\n \"\n let mut obj = vec![];\n\n while try!(peek_at_name(stack)) == tag_name {{\n obj.push(try!({member_name}Deserializer::deserialize(tag_name, stack)));\n }}\n\n Ok(obj)\n \",\n member_name = shape.member()\n )\n}\n\nfn generate_map_deserializer(shape: &Shape) -> String {\n let key = shape.key.as_ref().unwrap();\n let value = shape.value.as_ref().unwrap();\n\n format!(\n \"\n let mut obj = HashMap::new();\n\n while try!(peek_at_name(stack)) == tag_name {{\n try!(start_element(tag_name, stack));\n let key = try!({key_type_name}Deserializer::deserialize(\\\"{key_tag_name}\\\", stack));\n let value = try!({value_type_name}Deserializer::deserialize(\\\"{value_tag_name}\\\", stack));\n obj.insert(key, value);\n try!(end_element(tag_name, stack));\n }}\n\n Ok(obj)\n \",\n key_tag_name = key.tag_name(),\n key_type_name = key.shape,\n value_tag_name = value.tag_name(),\n value_type_name = value.shape,\n )\n}\n\nfn generate_primitive_deserializer(shape: &Shape) -> String {\n let statement = match &shape.shape_type[..] {\n \"string\" | \"timestamp\" => \"try!(characters(stack))\",\n \"integer\" => \"i32::from_str(try!(characters(stack)).as_ref()).unwrap()\",\n \"double\" => \"f32::from_str(try!(characters(stack)).as_ref()).unwrap()\",\n \"blob\" => \"try!(characters(stack)).into_bytes()\",\n \"boolean\" => \"bool::from_str(try!(characters(stack)).as_ref()).unwrap()\",\n shape_type => panic!(\"Unknown primitive shape type: {}\", shape_type),\n };\n\n format!(\n \"try!(start_element(tag_name, stack));\n let obj = {statement};\n try!(end_element(tag_name, stack));\n\n Ok(obj)\n \",\n statement = statement,\n )\n}\n\nfn generate_struct_deserializer(name: &str, shape: &Shape, service: &Service) -> String {\n if shape.members.as_ref().unwrap().is_empty() {\n return format!(\n \"try!(start_element(tag_name, stack));\n\n let obj = {name}::default();\n\n try!(end_element(tag_name, stack));\n\n Ok(obj)\n \",\n name = name,\n );\n }\n\n format!(\n \"try!(start_element(tag_name, stack));\n\n let mut obj = {name}::default();\n\n loop {{\n match &try!(peek_at_name(stack))[..] {{\n {struct_field_deserializers}\n _ => break,\n }}\n }}\n\n try!(end_element(tag_name, stack));\n\n Ok(obj)\n \",\n name = name,\n struct_field_deserializers = generate_struct_field_deserializers(shape, service),\n )\n}\n\nfn generate_struct_field_deserializers(shape: &Shape, service: &Service) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n \/\/ look up member.shape in all_shapes. use that shape.member.location_name\n let mut location_name = member_name.to_string();\n let mut parse_expression = generate_struct_field_parse_expression(shape, member_name, member, None);\n\n match service.shape_for_member(member) {\n Some(ref child_shape) => {\n match child_shape.flattened {\n Some(_) => {\n match child_shape.member {\n Some(ref child_member) => {\n match child_member.location_name {\n Some(ref loc_name) => {\n location_name = loc_name.to_string();\n parse_expression = generate_struct_field_parse_expression(shape, member_name, member, Some(&location_name));\n },\n None => (),\n }\n },\n None => (),\n }\n }\n None => (),\n };\n },\n None => (),\n }\n format!(\n \"\\\"{location_name}\\\" => {{\n obj.{field_name} = {parse_expression};\n continue;\n }}\",\n field_name = member_name.to_snake_case(),\n parse_expression = parse_expression,\n location_name = location_name,\n )\n\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\nfn generate_struct_field_parse_expression(\n shape: &Shape,\n member_name: &str,\n member: &Member,\n location_name: Option<&String>,\n) -> String {\n\n let location_to_use = match location_name {\n Some(loc) => loc.to_string(),\n None => member_name.to_string(),\n };\n let expression = format!(\n \"try!({name}Deserializer::deserialize(\\\"{location}\\\", stack))\",\n name = member.shape,\n location = location_to_use,\n );\n\n if shape.required(member_name) {\n expression\n } else {\n format!(\"Some({})\", expression)\n }\n}\n\nfn generate_serializer_body(shape: &Shape) -> String {\n match &shape.shape_type[..] {\n \"list\" => generate_list_serializer(shape),\n \"map\" => generate_map_serializer(shape),\n \"structure\" => generate_struct_serializer(shape),\n _ => generate_primitive_serializer(shape),\n }\n}\n\nfn generate_serializer_signature(name: &str, shape: &Shape) -> String {\n if &shape.shape_type[..] == \"structure\" && shape.members.as_ref().unwrap().is_empty() {\n format!(\"fn serialize(_params: &mut Params, name: &str, _obj: &{})\", name)\n } else {\n format!(\"fn serialize(params: &mut Params, name: &str, obj: &{})\", name)\n }\n}\n\nfn generate_list_serializer(shape: &Shape) -> String {\n format!(\n \"for (index, element) in obj.iter().enumerate() {{\n let key = format!(\\\"{{}}.{{}}\\\", name, index);\n {name}Serializer::serialize(params, &key, element);\n}}\n \",\n name = shape.member(),\n )\n}\n\nfn generate_map_serializer(shape: &Shape) -> String {\n format!(\n \"for (index, (key, value)) in obj.iter().enumerate() {{\n let prefix = format!(\\\"{{}}.{{}}\\\", name, index);\n {key_name}Serializer::serialize(\n params,\n &format!(\\\"{{}}.{{}}\\\", prefix, \\\"{key_name}\\\"),\n key,\n );\n {value_name}Serializer::serialize(\n params,\n &format!(\\\"{{}}.{{}}\\\", prefix, \\\"{value_name}\\\"),\n value,\n );\n}}\n \",\n key_name = shape.key(),\n value_name = shape.value(),\n )\n}\n\nfn generate_struct_serializer(shape: &Shape) -> String {\n format!(\n \"let mut prefix = name.to_string();\nif prefix != \\\"\\\" {{\n prefix.push_str(\\\".\\\");\n}}\n\n{struct_field_serializers}\n \",\n struct_field_serializers = generate_struct_field_serializers(shape),\n )\n}\n\nfn generate_struct_field_serializers(shape: &Shape) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n if shape.required(member_name) {\n format!(\n \"{member_shape_name}Serializer::serialize(\n params,\n &format!(\\\"{{}}{{}}\\\", prefix, \\\"{tag_name}\\\"),\n &obj.{field_name},\n);\n \",\n field_name = member_name.to_snake_case(),\n member_shape_name = member.shape,\n tag_name = member_name,\n )\n } else {\n format!(\n \"if let Some(ref field_value) = obj.{field_name} {{\n {member_shape_name}Serializer::serialize(\n params,\n &format!(\\\"{{}}{{}}\\\", prefix, \\\"{tag_name}\\\"),\n field_value,\n );\n}}\n \",\n field_name = member_name.to_snake_case(),\n member_shape_name = member.shape,\n tag_name = member.tag_name(),\n )\n }\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\nfn generate_primitive_serializer(shape: &Shape) -> String {\n let expression = match &shape.shape_type[..] {\n \"string\" | \"timestamp\" => \"obj\",\n \"integer\" | \"double\" | \"boolean\" => \"&obj.to_string()\",\n \"blob\" => \"from_utf8(obj).unwrap()\",\n shape_type => panic!(\"Unknown primitive shape type: {}\", shape_type),\n };\n\n format!(\"params.put(name, {});\", expression)\n}\n<commit_msg>feat: clean up nested `match` with `if let`<commit_after>use inflector::Inflector;\n\nuse botocore::{Member, Operation, Service, Shape};\nuse super::GenerateProtocol;\n\npub struct QueryGenerator;\n\nimpl GenerateProtocol for QueryGenerator {\n fn generate_methods(&self, service: &Service) -> String {\n service.operations.values().map(|operation| {\n format!(\n \"{documentation}\n{method_signature} {{\n let mut request = SignedRequest::new(\n \\\"{http_method}\\\",\n \\\"{endpoint_prefix}\\\",\n self.region,\n \\\"{request_uri}\\\",\n );\n let mut params = Params::new();\n\n params.put(\\\"Action\\\", \\\"{operation_name}\\\");\n {serialize_input}\n\n request.set_params(params);\n\n let result = request.sign_and_execute(try!(self.credentials_provider.credentials()));\n let status = result.status.to_u16();\n let mut reader = EventReader::new(result);\n let mut stack = XmlResponseFromAws::new(reader.events().peekable());\n stack.next();\n stack.next();\n\n match status {{\n 200 => {{\n {method_return_value}\n }}\n status_code => Err(AwsError::new(\n format!(\\\"HTTP response code for {operation_name}: {{}}\\\", status_code)\n ))\n }}\n}}\n \",\n documentation = generate_documentation(operation),\n http_method = &operation.http.method,\n endpoint_prefix = &service.metadata.endpoint_prefix,\n method_return_value = generate_method_return_value(operation),\n method_signature = generate_method_signature(operation),\n operation_name = &operation.name,\n request_uri = &operation.http.request_uri,\n serialize_input = generate_method_input_serialization(operation),\n )\n }).collect::<Vec<String>>().join(\"\\n\")\n }\n\n fn generate_prelude(&self) -> String {\n \"use std::collections::HashMap;\n use std::str::{FromStr, from_utf8};\n\n use xml::EventReader;\n\n use credential::ProvideAwsCredentials;\n use error::AwsError;\n use param::{Params, ServiceParams};\n use region::Region;\n use signature::SignedRequest;\n use xmlutil::{Next, Peek, XmlParseError, XmlResponseFromAws};\n use xmlutil::{characters, end_element, peek_at_name, start_element};\n \".to_owned()\n }\n\n fn generate_struct_attributes(&self) -> String {\n \"#[derive(Debug, Default)]\".to_owned()\n }\n\n fn generate_support_types(&self, name: &str, shape: &Shape, service: &Service) -> Option<String> {\n Some(format!(\n \"\/\/\/ Deserializes `{name}` from XML.\n struct {name}Deserializer;\n impl {name}Deserializer {{\n fn deserialize<'a, T: Peek + Next>(tag_name: &str, stack: &mut T)\n -> Result<{name}, XmlParseError> {{\n {deserializer_body}\n }}\n }}\n\n \/\/\/ Serialize `{name}` contents to a `SignedRequest`.\n struct {name}Serializer;\n impl {name}Serializer {{\n {serializer_signature} {{\n {serializer_body}\n }}\n }}\n \",\n deserializer_body = generate_deserializer_body(name, shape, service),\n name = name,\n serializer_body = generate_serializer_body(shape),\n serializer_signature = generate_serializer_signature(name, shape),\n ))\n }\n}\n\nfn generate_documentation(operation: &Operation) -> String {\n match operation.documentation {\n Some(ref docs) => format!(\"#[doc=\\\"{}\\\"]\", docs.replace(\"\\\"\", \"\\\\\\\"\")),\n None => \"\".to_owned(),\n }\n}\n\nfn generate_method_input_serialization(operation: &Operation) -> String {\n if operation.input.is_some() {\n format!(\n \"{input_type}Serializer::serialize(&mut params, \\\"\\\", &input);\",\n input_type = operation.input.as_ref().unwrap().shape,\n )\n } else {\n String::new()\n }\n}\n\nfn generate_method_return_value(operation: &Operation) -> String {\n if operation.output.is_some() {\n format!(\n \"Ok(try!({output_type}Deserializer::deserialize(\\\"{output_type}\\\", &mut stack)))\",\n output_type = &operation.output.as_ref().unwrap().shape,\n )\n } else {\n \"Ok(())\".to_owned()\n }\n}\n\nfn generate_method_signature(operation: &Operation) -> String {\n if operation.input.is_some() {\n format!(\n \"pub fn {operation_name}(&mut self, input: &{input_type}) -> Result<{output_type}, AwsError>\",\n input_type = operation.input.as_ref().unwrap().shape,\n operation_name = operation.name.to_snake_case(),\n output_type = &operation.output_shape_or(\"()\"),\n )\n } else {\n format!(\n \"pub fn {operation_name}(&mut self) -> Result<{output_type}, AwsError>\",\n operation_name = operation.name.to_snake_case(),\n output_type = &operation.output_shape_or(\"()\"),\n )\n }\n}\n\nfn generate_deserializer_body(name: &str, shape: &Shape, service: &Service) -> String {\n match &shape.shape_type[..] {\n \"list\" => generate_list_deserializer(shape),\n \"map\" => generate_map_deserializer(shape),\n \"structure\" => generate_struct_deserializer(name, shape, service),\n _ => generate_primitive_deserializer(shape),\n }\n}\n\nfn generate_list_deserializer(shape: &Shape) -> String {\n format!(\n \"\n let mut obj = vec![];\n\n while try!(peek_at_name(stack)) == tag_name {{\n obj.push(try!({member_name}Deserializer::deserialize(tag_name, stack)));\n }}\n\n Ok(obj)\n \",\n member_name = shape.member()\n )\n}\n\nfn generate_map_deserializer(shape: &Shape) -> String {\n let key = shape.key.as_ref().unwrap();\n let value = shape.value.as_ref().unwrap();\n\n format!(\n \"\n let mut obj = HashMap::new();\n\n while try!(peek_at_name(stack)) == tag_name {{\n try!(start_element(tag_name, stack));\n let key = try!({key_type_name}Deserializer::deserialize(\\\"{key_tag_name}\\\", stack));\n let value = try!({value_type_name}Deserializer::deserialize(\\\"{value_tag_name}\\\", stack));\n obj.insert(key, value);\n try!(end_element(tag_name, stack));\n }}\n\n Ok(obj)\n \",\n key_tag_name = key.tag_name(),\n key_type_name = key.shape,\n value_tag_name = value.tag_name(),\n value_type_name = value.shape,\n )\n}\n\nfn generate_primitive_deserializer(shape: &Shape) -> String {\n let statement = match &shape.shape_type[..] {\n \"string\" | \"timestamp\" => \"try!(characters(stack))\",\n \"integer\" => \"i32::from_str(try!(characters(stack)).as_ref()).unwrap()\",\n \"double\" => \"f32::from_str(try!(characters(stack)).as_ref()).unwrap()\",\n \"blob\" => \"try!(characters(stack)).into_bytes()\",\n \"boolean\" => \"bool::from_str(try!(characters(stack)).as_ref()).unwrap()\",\n shape_type => panic!(\"Unknown primitive shape type: {}\", shape_type),\n };\n\n format!(\n \"try!(start_element(tag_name, stack));\n let obj = {statement};\n try!(end_element(tag_name, stack));\n\n Ok(obj)\n \",\n statement = statement,\n )\n}\n\nfn generate_struct_deserializer(name: &str, shape: &Shape, service: &Service) -> String {\n if shape.members.as_ref().unwrap().is_empty() {\n return format!(\n \"try!(start_element(tag_name, stack));\n\n let obj = {name}::default();\n\n try!(end_element(tag_name, stack));\n\n Ok(obj)\n \",\n name = name,\n );\n }\n\n format!(\n \"try!(start_element(tag_name, stack));\n\n let mut obj = {name}::default();\n\n loop {{\n match &try!(peek_at_name(stack))[..] {{\n {struct_field_deserializers}\n _ => break,\n }}\n }}\n\n try!(end_element(tag_name, stack));\n\n Ok(obj)\n \",\n name = name,\n struct_field_deserializers = generate_struct_field_deserializers(shape, service),\n )\n}\n\nfn generate_struct_field_deserializers(shape: &Shape, service: &Service) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n \/\/ look up member.shape in all_shapes. use that shape.member.location_name\n let mut location_name = member_name.to_string();\n\n let parse_expression_location_name = if let Some(ref child_shape) = service.shape_for_member(member) {\n if child_shape.flattened.is_some() {\n if let Some(ref child_member) = child_shape.member {\n if let Some(ref loc_name) = child_member.location_name {\n location_name = loc_name.to_string();\n Some(&location_name)\n } else {\n None\n }\n } else {\n None\n }\n } else {\n None\n }\n } else {\n None\n };\n let parse_expression = generate_struct_field_parse_expression(shape, member_name, member, parse_expression_location_name);\n format!(\n \"\\\"{location_name}\\\" => {{\n obj.{field_name} = {parse_expression};\n continue;\n }}\",\n field_name = member_name.to_snake_case(),\n parse_expression = parse_expression,\n location_name = location_name,\n )\n\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\nfn generate_struct_field_parse_expression(\n shape: &Shape,\n member_name: &str,\n member: &Member,\n location_name: Option<&String>,\n) -> String {\n\n let location_to_use = match location_name {\n Some(loc) => loc.to_string(),\n None => member_name.to_string(),\n };\n let expression = format!(\n \"try!({name}Deserializer::deserialize(\\\"{location}\\\", stack))\",\n name = member.shape,\n location = location_to_use,\n );\n\n if shape.required(member_name) {\n expression\n } else {\n format!(\"Some({})\", expression)\n }\n}\n\nfn generate_serializer_body(shape: &Shape) -> String {\n match &shape.shape_type[..] {\n \"list\" => generate_list_serializer(shape),\n \"map\" => generate_map_serializer(shape),\n \"structure\" => generate_struct_serializer(shape),\n _ => generate_primitive_serializer(shape),\n }\n}\n\nfn generate_serializer_signature(name: &str, shape: &Shape) -> String {\n if &shape.shape_type[..] == \"structure\" && shape.members.as_ref().unwrap().is_empty() {\n format!(\"fn serialize(_params: &mut Params, name: &str, _obj: &{})\", name)\n } else {\n format!(\"fn serialize(params: &mut Params, name: &str, obj: &{})\", name)\n }\n}\n\nfn generate_list_serializer(shape: &Shape) -> String {\n format!(\n \"for (index, element) in obj.iter().enumerate() {{\n let key = format!(\\\"{{}}.{{}}\\\", name, index);\n {name}Serializer::serialize(params, &key, element);\n}}\n \",\n name = shape.member(),\n )\n}\n\nfn generate_map_serializer(shape: &Shape) -> String {\n format!(\n \"for (index, (key, value)) in obj.iter().enumerate() {{\n let prefix = format!(\\\"{{}}.{{}}\\\", name, index);\n {key_name}Serializer::serialize(\n params,\n &format!(\\\"{{}}.{{}}\\\", prefix, \\\"{key_name}\\\"),\n key,\n );\n {value_name}Serializer::serialize(\n params,\n &format!(\\\"{{}}.{{}}\\\", prefix, \\\"{value_name}\\\"),\n value,\n );\n}}\n \",\n key_name = shape.key(),\n value_name = shape.value(),\n )\n}\n\nfn generate_struct_serializer(shape: &Shape) -> String {\n format!(\n \"let mut prefix = name.to_string();\nif prefix != \\\"\\\" {{\n prefix.push_str(\\\".\\\");\n}}\n\n{struct_field_serializers}\n \",\n struct_field_serializers = generate_struct_field_serializers(shape),\n )\n}\n\nfn generate_struct_field_serializers(shape: &Shape) -> String {\n shape.members.as_ref().unwrap().iter().map(|(member_name, member)| {\n if shape.required(member_name) {\n format!(\n \"{member_shape_name}Serializer::serialize(\n params,\n &format!(\\\"{{}}{{}}\\\", prefix, \\\"{tag_name}\\\"),\n &obj.{field_name},\n);\n \",\n field_name = member_name.to_snake_case(),\n member_shape_name = member.shape,\n tag_name = member_name,\n )\n } else {\n format!(\n \"if let Some(ref field_value) = obj.{field_name} {{\n {member_shape_name}Serializer::serialize(\n params,\n &format!(\\\"{{}}{{}}\\\", prefix, \\\"{tag_name}\\\"),\n field_value,\n );\n}}\n \",\n field_name = member_name.to_snake_case(),\n member_shape_name = member.shape,\n tag_name = member.tag_name(),\n )\n }\n }).collect::<Vec<String>>().join(\"\\n\")\n}\n\nfn generate_primitive_serializer(shape: &Shape) -> String {\n let expression = match &shape.shape_type[..] {\n \"string\" | \"timestamp\" => \"obj\",\n \"integer\" | \"double\" | \"boolean\" => \"&obj.to_string()\",\n \"blob\" => \"from_utf8(obj).unwrap()\",\n shape_type => panic!(\"Unknown primitive shape type: {}\", shape_type),\n };\n\n format!(\"params.put(name, {});\", expression)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Comment the code that enables keyboard input polling<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>adding bt_messages.rs<commit_after>use std::io::Read;\nuse std::io::Result;\n\npub enum Message {\n KeepAlive,\n\n}\n\n\/\/\/len_prefix is big endian\n\/\/\/might want to use traits instead of returning an enum... haven't decided yet. would save a match\npub fn decode_message <T> (len_prefix: &[u8; 4], stream: &mut T) -> Message where T:Read {\n let i: u32 = (\n len_prefix[3] as u32\n | ((len_prefix[2] as u32) << 8)\n | ((len_prefix[1] as u32) << 16)\n | ((len_prefix[0] as u32) << 24));\n match i {\n 0 => Message::KeepAlive,\n _ => {\n let mut id_buf = [0; 1];\n let _ = stream.read(&mut id_buf);\n let id = id_buf[0];\n\n println!(\"temp\");\n Message::KeepAlive\n }\n }\n}\n\npub fn test () {\n struct MockStream;\n\n impl Read for MockStream {\n fn read (&mut self, buf: &mut [u8]) -> Result<usize> {\n buf[0] = 0;\n buf[1] = 0;\n buf[2] = 0;\n buf[3] = 0;\n Ok(4)\n }\n }\n\n let mut stream = MockStream;\n let mut buf = [1; 4];\n stream.read(&mut buf);\n decode_message(&buf, &mut stream);\n}\n\n#[test]\nfn test_decode () {\n\n struct MockStream;\n\n impl Read for MockStream {\n fn read (&mut self, buf: &mut [u8]) -> Result<usize> {\n buf[0] = 0;\n buf[1] = 0;\n buf[2] = 0;\n buf[3] = 0;\n Ok(4)\n }\n }\n\n let mut stream = MockStream;\n let mut buf = [1; 4];\n stream.read(&mut buf);\n decode_message(&buf, stream);\n}\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse std::process::Command;\nuse std::env;\nuse std::io::stderr;\nuse std::io::Write;\n\npub use clap::App;\n\nuse clap::{Arg, ArgMatches};\nuse log;\nuse log::LogLevelFilter;\n\nuse configuration::Configuration;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\nuse error::MapErrInto;\nuse logger::ImagLogger;\n\nuse libimagstore::store::Store;\n\n#[derive(Debug)]\npub struct Runtime<'a> {\n rtp: PathBuf,\n configuration: Option<Configuration>,\n cli_matches: ArgMatches<'a>,\n store: Store,\n}\n\nimpl<'a> Runtime<'a> {\n\n \/**\n * Gets the CLI spec for the program and retreives the config file path (or uses the default on\n * in $HOME\/.imag\/config, $XDG_CONFIG_DIR\/imag\/config or from env(\"$IMAG_CONFIG\")\n * and builds the Runtime object with it.\n *\n * The cli_spec object should be initially build with the ::get_default_cli_builder() function.\n *\n *\/\n pub fn new(cli_spec: App<'a, 'a>) -> Result<Runtime<'a>, RuntimeError> {\n use std::env;\n\n use libimagstore::hook::position::HookPosition as HP;\n use libimagstore::hook::Hook;\n use libimagstore::error::StoreErrorKind;\n use libimagstorestdhook::debug::DebugHook;\n use libimagstorestdhook::vcs::git::delete::DeleteHook as GitDeleteHook;\n use libimagstorestdhook::vcs::git::update::UpdateHook as GitUpdateHook;\n use libimagerror::trace::trace_error;\n use libimagerror::trace::trace_error_dbg;\n use libimagerror::into::IntoError;\n\n use configuration::error::ConfigErrorKind;\n\n let matches = cli_spec.get_matches();\n\n let is_debugging = matches.is_present(\"debugging\");\n let is_verbose = matches.is_present(\"verbosity\");\n let colored = !matches.is_present(\"no-color-output\");\n\n Runtime::init_logger(is_debugging, is_verbose, colored);\n\n let rtp : PathBuf = matches.value_of(\"runtimepath\")\n .map_or_else(|| {\n env::var(\"HOME\")\n .map(PathBuf::from)\n .map(|mut p| { p.push(\".imag\"); p})\n .unwrap_or_else(|_| {\n panic!(\"You seem to be $HOME-less. Please get a $HOME before using this software. We are sorry for you and hope you have some accommodation anyways.\");\n })\n }, PathBuf::from);\n let storepath = matches.value_of(\"storepath\")\n .map_or_else(|| {\n let mut spath = rtp.clone();\n spath.push(\"store\");\n spath\n }, PathBuf::from);\n\n let configpath = matches.value_of(\"config\")\n .map_or_else(|| rtp.clone(), PathBuf::from);\n\n let cfg = match Configuration::new(&configpath) {\n Err(e) => if e.err_type() != ConfigErrorKind::NoConfigFileFound {\n return Err(RuntimeErrorKind::Instantiate.into_error_with_cause(Box::new(e)));\n } else {\n warn!(\"No config file found.\");\n warn!(\"Continuing without configuration file\");\n None\n },\n\n Ok(mut cfg) => {\n if let Err(e) = cfg.override_config(get_override_specs(&matches)) {\n error!(\"Could not apply config overrides\");\n trace_error(&e);\n\n \/\/ TODO: continue question (interactive)\n }\n\n Some(cfg)\n }\n };\n\n let store_config = match cfg {\n Some(ref c) => c.store_config().cloned(),\n None => None,\n };\n\n if is_debugging {\n write!(stderr(), \"Config: {:?}\\n\", cfg).ok();\n write!(stderr(), \"Store-config: {:?}\\n\", store_config).ok();\n }\n\n Store::new(storepath.clone(), store_config).map(|mut store| {\n \/\/ If we are debugging, generate hooks for all positions\n if is_debugging {\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(DebugHook::new(HP::PreCreate)) , \"debug\", HP::PreCreate),\n (Box::new(DebugHook::new(HP::PostCreate)) , \"debug\", HP::PostCreate),\n (Box::new(DebugHook::new(HP::PreRetrieve)) , \"debug\", HP::PreRetrieve),\n (Box::new(DebugHook::new(HP::PostRetrieve)) , \"debug\", HP::PostRetrieve),\n (Box::new(DebugHook::new(HP::PreUpdate)) , \"debug\", HP::PreUpdate),\n (Box::new(DebugHook::new(HP::PostUpdate)) , \"debug\", HP::PostUpdate),\n (Box::new(DebugHook::new(HP::PreDelete)) , \"debug\", HP::PreDelete),\n (Box::new(DebugHook::new(HP::PostDelete)) , \"debug\", HP::PostDelete),\n ];\n\n \/\/ If hook registration fails, trace the error and warn, but continue.\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n }\n\n let sp = storepath;\n\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(GitDeleteHook::new(sp.clone(), HP::PreDelete)) , \"vcs\", HP::PreDelete),\n (Box::new(GitUpdateHook::new(sp, HP::PostUpdate)) , \"vcs\", HP::PostUpdate),\n ];\n\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n\n Runtime {\n cli_matches: matches,\n configuration: cfg,\n rtp: rtp,\n store: store,\n }\n })\n .map_err_into(RuntimeErrorKind::Instantiate)\n }\n\n \/**\n * Get a commandline-interface builder object from `clap`\n *\n * This commandline interface builder object already contains some predefined interface flags:\n * * -v | --verbose for verbosity\n * * --debug for debugging\n * * -c <file> | --config <file> for alternative configuration file\n * * -r <path> | --rtp <path> for alternative runtimepath\n * * --store <path> for alternative store path\n * Each has the appropriate help text included.\n *\n * The `appname` shall be \"imag-<command>\".\n *\/\n pub fn get_default_cli_builder(appname: &'a str,\n version: &'a str,\n about: &'a str)\n -> App<'a, 'a>\n {\n App::new(appname)\n .version(version)\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .about(about)\n .arg(Arg::with_name(Runtime::arg_verbosity_name())\n .short(\"v\")\n .long(\"verbose\")\n .help(\"Enables verbosity\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_debugging_name())\n .long(\"debug\")\n .help(\"Enables debugging output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_no_color_output_name())\n .long(\"no-color\")\n .help(\"Disable color output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_config_name())\n .long(\"config\")\n .help(\"Path to alternative config file\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_config_override_name())\n .long(\"override-config\")\n .help(\"Override a configuration settings. Use 'key=value' pairs, where the key is a path in the TOML configuration. The value must be present in the configuration and be convertible to the type of the configuration setting. If the argument does not contain a '=', it gets ignored. Setting Arrays and Tables is not yet supported.\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_runtimepath_name())\n .long(\"rtp\")\n .help(\"Alternative runtimepath\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_storepath_name())\n .long(\"store\")\n .help(\"Alternative storepath. Must be specified as full path, can be outside of the RTP\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_editor_name())\n .long(\"editor\")\n .help(\"Set editor\")\n .required(false)\n .takes_value(true))\n }\n\n pub fn arg_names() -> Vec<&'static str> {\n vec![\n Runtime::arg_verbosity_name(),\n Runtime::arg_debugging_name(),\n Runtime::arg_no_color_output_name(),\n Runtime::arg_config_name(),\n Runtime::arg_config_override_name(),\n Runtime::arg_runtimepath_name(),\n Runtime::arg_storepath_name(),\n Runtime::arg_editor_name(),\n ]\n }\n\n pub fn arg_verbosity_name() -> &'static str {\n \"verbosity\"\n }\n\n pub fn arg_debugging_name() -> &'static str {\n \"debugging\"\n }\n\n pub fn arg_no_color_output_name() -> &'static str {\n \"no-color-output\"\n }\n\n pub fn arg_config_name() -> &'static str {\n \"config\"\n }\n\n pub fn arg_config_override_name() -> &'static str {\n \"config-override\"\n }\n\n pub fn arg_runtimepath_name() -> &'static str {\n \"runtimepath\"\n }\n\n pub fn arg_storepath_name() -> &'static str {\n \"storepath\"\n }\n\n pub fn arg_editor_name() -> &'static str {\n \"editor\"\n }\n\n \/**\n * Initialize the internal logger\n *\/\n fn init_logger(is_debugging: bool, is_verbose: bool, colored: bool) {\n use std::env::var as env_var;\n use env_logger;\n\n if env_var(\"IMAG_LOG_ENV\").is_ok() {\n env_logger::init().unwrap();\n } else {\n let lvl = if is_debugging {\n LogLevelFilter::Debug\n } else if is_verbose {\n LogLevelFilter::Info\n } else {\n LogLevelFilter::Warn\n };\n\n log::set_logger(|max_log_lvl| {\n max_log_lvl.set(lvl);\n debug!(\"Init logger with {}\", lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()).with_color(colored))\n })\n .map_err(|_| {\n panic!(\"Could not setup logger\");\n })\n .ok();\n }\n }\n\n \/**\n * Get the verbosity flag value\n *\/\n pub fn is_verbose(&self) -> bool {\n self.cli_matches.is_present(\"verbosity\")\n }\n\n \/**\n * Get the debugging flag value\n *\/\n pub fn is_debugging(&self) -> bool {\n self.cli_matches.is_present(\"debugging\")\n }\n\n \/**\n * Get the runtimepath\n *\/\n pub fn rtp(&self) -> &PathBuf {\n &self.rtp\n }\n\n \/**\n * Get the commandline interface matches\n *\/\n pub fn cli(&self) -> &ArgMatches {\n &self.cli_matches\n }\n\n \/**\n * Get the configuration object\n *\/\n pub fn config(&self) -> Option<&Configuration> {\n self.configuration.as_ref()\n }\n\n \/**\n * Get the store object\n *\/\n pub fn store(&self) -> &Store {\n &self.store\n }\n\n pub fn editor(&self) -> Option<Command> {\n self.cli()\n .value_of(\"editor\")\n .map(String::from)\n .or({\n match self.configuration {\n Some(ref c) => c.editor().cloned(),\n _ => None,\n }\n })\n .or(env::var(\"EDITOR\").ok())\n .map(Command::new)\n }\n}\n\nfn get_override_specs(matches: &ArgMatches) -> Vec<String> {\n matches\n .values_of(\"config-override\")\n .map(|values| {\n values\n .filter(|s| {\n let b = s.contains(\"=\");\n if !b { warn!(\"override '{}' does not contain '=' - will be ignored!\", s); }\n b\n })\n .map(String::from)\n .collect()\n })\n .unwrap_or(vec![])\n}\n\n<commit_msg>DeleteHook should be executed after the delete action<commit_after>use std::path::PathBuf;\nuse std::process::Command;\nuse std::env;\nuse std::io::stderr;\nuse std::io::Write;\n\npub use clap::App;\n\nuse clap::{Arg, ArgMatches};\nuse log;\nuse log::LogLevelFilter;\n\nuse configuration::Configuration;\nuse error::RuntimeError;\nuse error::RuntimeErrorKind;\nuse error::MapErrInto;\nuse logger::ImagLogger;\n\nuse libimagstore::store::Store;\n\n#[derive(Debug)]\npub struct Runtime<'a> {\n rtp: PathBuf,\n configuration: Option<Configuration>,\n cli_matches: ArgMatches<'a>,\n store: Store,\n}\n\nimpl<'a> Runtime<'a> {\n\n \/**\n * Gets the CLI spec for the program and retreives the config file path (or uses the default on\n * in $HOME\/.imag\/config, $XDG_CONFIG_DIR\/imag\/config or from env(\"$IMAG_CONFIG\")\n * and builds the Runtime object with it.\n *\n * The cli_spec object should be initially build with the ::get_default_cli_builder() function.\n *\n *\/\n pub fn new(cli_spec: App<'a, 'a>) -> Result<Runtime<'a>, RuntimeError> {\n use std::env;\n\n use libimagstore::hook::position::HookPosition as HP;\n use libimagstore::hook::Hook;\n use libimagstore::error::StoreErrorKind;\n use libimagstorestdhook::debug::DebugHook;\n use libimagstorestdhook::vcs::git::delete::DeleteHook as GitDeleteHook;\n use libimagstorestdhook::vcs::git::update::UpdateHook as GitUpdateHook;\n use libimagerror::trace::trace_error;\n use libimagerror::trace::trace_error_dbg;\n use libimagerror::into::IntoError;\n\n use configuration::error::ConfigErrorKind;\n\n let matches = cli_spec.get_matches();\n\n let is_debugging = matches.is_present(\"debugging\");\n let is_verbose = matches.is_present(\"verbosity\");\n let colored = !matches.is_present(\"no-color-output\");\n\n Runtime::init_logger(is_debugging, is_verbose, colored);\n\n let rtp : PathBuf = matches.value_of(\"runtimepath\")\n .map_or_else(|| {\n env::var(\"HOME\")\n .map(PathBuf::from)\n .map(|mut p| { p.push(\".imag\"); p})\n .unwrap_or_else(|_| {\n panic!(\"You seem to be $HOME-less. Please get a $HOME before using this software. We are sorry for you and hope you have some accommodation anyways.\");\n })\n }, PathBuf::from);\n let storepath = matches.value_of(\"storepath\")\n .map_or_else(|| {\n let mut spath = rtp.clone();\n spath.push(\"store\");\n spath\n }, PathBuf::from);\n\n let configpath = matches.value_of(\"config\")\n .map_or_else(|| rtp.clone(), PathBuf::from);\n\n let cfg = match Configuration::new(&configpath) {\n Err(e) => if e.err_type() != ConfigErrorKind::NoConfigFileFound {\n return Err(RuntimeErrorKind::Instantiate.into_error_with_cause(Box::new(e)));\n } else {\n warn!(\"No config file found.\");\n warn!(\"Continuing without configuration file\");\n None\n },\n\n Ok(mut cfg) => {\n if let Err(e) = cfg.override_config(get_override_specs(&matches)) {\n error!(\"Could not apply config overrides\");\n trace_error(&e);\n\n \/\/ TODO: continue question (interactive)\n }\n\n Some(cfg)\n }\n };\n\n let store_config = match cfg {\n Some(ref c) => c.store_config().cloned(),\n None => None,\n };\n\n if is_debugging {\n write!(stderr(), \"Config: {:?}\\n\", cfg).ok();\n write!(stderr(), \"Store-config: {:?}\\n\", store_config).ok();\n }\n\n Store::new(storepath.clone(), store_config).map(|mut store| {\n \/\/ If we are debugging, generate hooks for all positions\n if is_debugging {\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(DebugHook::new(HP::PreCreate)) , \"debug\", HP::PreCreate),\n (Box::new(DebugHook::new(HP::PostCreate)) , \"debug\", HP::PostCreate),\n (Box::new(DebugHook::new(HP::PreRetrieve)) , \"debug\", HP::PreRetrieve),\n (Box::new(DebugHook::new(HP::PostRetrieve)) , \"debug\", HP::PostRetrieve),\n (Box::new(DebugHook::new(HP::PreUpdate)) , \"debug\", HP::PreUpdate),\n (Box::new(DebugHook::new(HP::PostUpdate)) , \"debug\", HP::PostUpdate),\n (Box::new(DebugHook::new(HP::PreDelete)) , \"debug\", HP::PreDelete),\n (Box::new(DebugHook::new(HP::PostDelete)) , \"debug\", HP::PostDelete),\n ];\n\n \/\/ If hook registration fails, trace the error and warn, but continue.\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n }\n\n let sp = storepath;\n\n let hooks : Vec<(Box<Hook>, &str, HP)> = vec![\n (Box::new(GitDeleteHook::new(sp.clone(), HP::PostDelete)) , \"vcs\", HP::PostDelete),\n (Box::new(GitUpdateHook::new(sp, HP::PostUpdate)) , \"vcs\", HP::PostUpdate),\n ];\n\n for (hook, aspectname, position) in hooks {\n if let Err(e) = store.register_hook(position, &String::from(aspectname), hook) {\n if e.err_type() == StoreErrorKind::HookRegisterError {\n trace_error_dbg(&e);\n warn!(\"Registering debug hook with store failed\");\n } else {\n trace_error(&e);\n };\n }\n }\n\n Runtime {\n cli_matches: matches,\n configuration: cfg,\n rtp: rtp,\n store: store,\n }\n })\n .map_err_into(RuntimeErrorKind::Instantiate)\n }\n\n \/**\n * Get a commandline-interface builder object from `clap`\n *\n * This commandline interface builder object already contains some predefined interface flags:\n * * -v | --verbose for verbosity\n * * --debug for debugging\n * * -c <file> | --config <file> for alternative configuration file\n * * -r <path> | --rtp <path> for alternative runtimepath\n * * --store <path> for alternative store path\n * Each has the appropriate help text included.\n *\n * The `appname` shall be \"imag-<command>\".\n *\/\n pub fn get_default_cli_builder(appname: &'a str,\n version: &'a str,\n about: &'a str)\n -> App<'a, 'a>\n {\n App::new(appname)\n .version(version)\n .author(\"Matthias Beyer <mail@beyermatthias.de>\")\n .about(about)\n .arg(Arg::with_name(Runtime::arg_verbosity_name())\n .short(\"v\")\n .long(\"verbose\")\n .help(\"Enables verbosity\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_debugging_name())\n .long(\"debug\")\n .help(\"Enables debugging output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_no_color_output_name())\n .long(\"no-color\")\n .help(\"Disable color output\")\n .required(false)\n .takes_value(false))\n\n .arg(Arg::with_name(Runtime::arg_config_name())\n .long(\"config\")\n .help(\"Path to alternative config file\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_config_override_name())\n .long(\"override-config\")\n .help(\"Override a configuration settings. Use 'key=value' pairs, where the key is a path in the TOML configuration. The value must be present in the configuration and be convertible to the type of the configuration setting. If the argument does not contain a '=', it gets ignored. Setting Arrays and Tables is not yet supported.\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_runtimepath_name())\n .long(\"rtp\")\n .help(\"Alternative runtimepath\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_storepath_name())\n .long(\"store\")\n .help(\"Alternative storepath. Must be specified as full path, can be outside of the RTP\")\n .required(false)\n .takes_value(true))\n\n .arg(Arg::with_name(Runtime::arg_editor_name())\n .long(\"editor\")\n .help(\"Set editor\")\n .required(false)\n .takes_value(true))\n }\n\n pub fn arg_names() -> Vec<&'static str> {\n vec![\n Runtime::arg_verbosity_name(),\n Runtime::arg_debugging_name(),\n Runtime::arg_no_color_output_name(),\n Runtime::arg_config_name(),\n Runtime::arg_config_override_name(),\n Runtime::arg_runtimepath_name(),\n Runtime::arg_storepath_name(),\n Runtime::arg_editor_name(),\n ]\n }\n\n pub fn arg_verbosity_name() -> &'static str {\n \"verbosity\"\n }\n\n pub fn arg_debugging_name() -> &'static str {\n \"debugging\"\n }\n\n pub fn arg_no_color_output_name() -> &'static str {\n \"no-color-output\"\n }\n\n pub fn arg_config_name() -> &'static str {\n \"config\"\n }\n\n pub fn arg_config_override_name() -> &'static str {\n \"config-override\"\n }\n\n pub fn arg_runtimepath_name() -> &'static str {\n \"runtimepath\"\n }\n\n pub fn arg_storepath_name() -> &'static str {\n \"storepath\"\n }\n\n pub fn arg_editor_name() -> &'static str {\n \"editor\"\n }\n\n \/**\n * Initialize the internal logger\n *\/\n fn init_logger(is_debugging: bool, is_verbose: bool, colored: bool) {\n use std::env::var as env_var;\n use env_logger;\n\n if env_var(\"IMAG_LOG_ENV\").is_ok() {\n env_logger::init().unwrap();\n } else {\n let lvl = if is_debugging {\n LogLevelFilter::Debug\n } else if is_verbose {\n LogLevelFilter::Info\n } else {\n LogLevelFilter::Warn\n };\n\n log::set_logger(|max_log_lvl| {\n max_log_lvl.set(lvl);\n debug!(\"Init logger with {}\", lvl);\n Box::new(ImagLogger::new(lvl.to_log_level().unwrap()).with_color(colored))\n })\n .map_err(|_| {\n panic!(\"Could not setup logger\");\n })\n .ok();\n }\n }\n\n \/**\n * Get the verbosity flag value\n *\/\n pub fn is_verbose(&self) -> bool {\n self.cli_matches.is_present(\"verbosity\")\n }\n\n \/**\n * Get the debugging flag value\n *\/\n pub fn is_debugging(&self) -> bool {\n self.cli_matches.is_present(\"debugging\")\n }\n\n \/**\n * Get the runtimepath\n *\/\n pub fn rtp(&self) -> &PathBuf {\n &self.rtp\n }\n\n \/**\n * Get the commandline interface matches\n *\/\n pub fn cli(&self) -> &ArgMatches {\n &self.cli_matches\n }\n\n \/**\n * Get the configuration object\n *\/\n pub fn config(&self) -> Option<&Configuration> {\n self.configuration.as_ref()\n }\n\n \/**\n * Get the store object\n *\/\n pub fn store(&self) -> &Store {\n &self.store\n }\n\n pub fn editor(&self) -> Option<Command> {\n self.cli()\n .value_of(\"editor\")\n .map(String::from)\n .or({\n match self.configuration {\n Some(ref c) => c.editor().cloned(),\n _ => None,\n }\n })\n .or(env::var(\"EDITOR\").ok())\n .map(Command::new)\n }\n}\n\nfn get_override_specs(matches: &ArgMatches) -> Vec<String> {\n matches\n .values_of(\"config-override\")\n .map(|values| {\n values\n .filter(|s| {\n let b = s.contains(\"=\");\n if !b { warn!(\"override '{}' does not contain '=' - will be ignored!\", s); }\n b\n })\n .map(String::from)\n .collect()\n })\n .unwrap_or(vec![])\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use Command::spawn() instead of exec() on Windows<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Set the step pin to low after a step<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added `remove_item` to `Store`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>use aliases for users.report request<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>run all samples through the parser and look for no errors<commit_after>extern crate warbot;\n\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::Path;\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"\/tests.rs\"));\n\nfn run_file(name: &str) {\n let mut file_path = Path::new(\"tests\/fodder\").join(name);\n file_path.set_extension(\"txt\");\n\n let mut file = match File::open(&file_path) {\n Err(e) => {\n println!(\"Couldn't open {}: {}\", file_path.display(), e);\n panic!(e);\n },\n Ok(f) => f\n };\n\n let mut contents = String::new();\n file.read_to_string(&mut contents).unwrap();\n for raw_line in contents.split('\\n') {\n let line = raw_line.trim().to_owned();\n if line.len() == 0 {\n continue;\n }\n if line.starts_with(\"Output\") {\n continue;\n }\n if line.starts_with(\"Round\") {\n continue;\n }\n if line.starts_with(\"#\") {\n continue;\n }\n match warbot::parser::parse(line) {\n Ok(_) => {},\n Err(e) => panic!(\"{:?}\", e)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore(doctests): remove unnecessary `rust` marker<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>tests: Unit test Kind::from_bytes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>html_root_url.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement Cursor's next method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>clean old movie pictures<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>git work<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_id = \"piston\"]\n#![deny(missing_doc)]\n#![deny(dead_code)]\n#![feature(globs)]\n#![feature(macro_rules)]\n#![feature(phase)]\n\n\/\/! A user friendly graphics engine.\n\nextern crate time;\nextern crate graphics;\nextern crate log;\nextern crate collections;\nextern crate gl;\nextern crate glfw;\nextern crate sdl2;\nextern crate sdl2_mixer;\nextern crate image;\nextern crate libc;\nextern crate debug;\n\npub use Game = game::Game;\n\npub use Render = game_iterator::Render;\npub use Update = game_iterator::Update;\npub use KeyPress = game_iterator::KeyPress;\npub use KeyRelease = game_iterator::KeyRelease;\npub use MousePress = game_iterator::MousePress;\npub use MouseRelease = game_iterator::MouseRelease;\npub use MouseMove = game_iterator::MouseMove;\npub use MouseRelativeMove = game_iterator::MouseRelativeMove;\n\npub use GameEvent = game_iterator::GameEvent;\npub use GameIterator = game_iterator::GameIterator;\npub use GameIteratorSettings = game_iterator::GameIteratorSettings;\npub use RenderArgs = game_iterator::RenderArgs;\npub use UpdateArgs = game_iterator::UpdateArgs;\npub use KeyPressArgs = game_iterator::KeyPressArgs;\npub use KeyReleaseArgs = game_iterator::KeyReleaseArgs;\npub use MousePressArgs = game_iterator::MousePressArgs;\npub use MouseReleaseArgs = game_iterator::MouseReleaseArgs;\npub use MouseMoveArgs = game_iterator::MouseMoveArgs;\npub use MouseRelativeMoveArgs = game_iterator::MouseRelativeMoveArgs;\n\npub use GameWindow = game_window::GameWindow;\npub use GameWindowSDL2 = game_window_sdl2::GameWindowSDL2;\npub use GameWindowGLFW = game_window_glfw::GameWindowGLFW;\npub use GameWindowSettings = game_window_settings::GameWindowSettings;\npub use Gl = gl_back_end::Gl;\npub use AssetStore = asset_store::AssetStore;\npub use Texture = texture::Texture;\n\npub use Music = music::Music;\npub use Sound = sound::Sound;\npub use AudioBackEnd = audio_back_end::AudioBackEnd;\npub use AudioSDL2 = sdl2_audio_back_end::AudioSDL2;\npub use MusicSDL2 = sdl2_audio_back_end::MusicSDL2;\npub use SoundSDL2 = sdl2_audio_back_end::SoundSDL2;\n\npub mod shader_utils;\npub mod game_window;\npub mod keyboard;\npub mod event;\npub mod mouse;\n\nmod game;\nmod game_iterator;\nmod game_window_sdl2;\nmod game_window_glfw;\nmod game_window_settings;\nmod gl_back_end;\nmod asset_store;\nmod texture;\n\nmod music;\nmod sound;\nmod audio_back_end;\nmod sdl2_audio_back_end;<commit_msg>Replaced `#[deny(dead_code)]` with `#[warn(dead_code)]`<commit_after>#![crate_id = \"piston\"]\n#![deny(missing_doc)]\n#![warn(dead_code)]\n#![feature(globs)]\n#![feature(macro_rules)]\n#![feature(phase)]\n\n\/\/! A user friendly graphics engine.\n\nextern crate time;\nextern crate graphics;\nextern crate log;\nextern crate collections;\nextern crate gl;\nextern crate glfw;\nextern crate sdl2;\nextern crate sdl2_mixer;\nextern crate image;\nextern crate libc;\nextern crate debug;\n\npub use Game = game::Game;\n\npub use Render = game_iterator::Render;\npub use Update = game_iterator::Update;\npub use KeyPress = game_iterator::KeyPress;\npub use KeyRelease = game_iterator::KeyRelease;\npub use MousePress = game_iterator::MousePress;\npub use MouseRelease = game_iterator::MouseRelease;\npub use MouseMove = game_iterator::MouseMove;\npub use MouseRelativeMove = game_iterator::MouseRelativeMove;\n\npub use GameEvent = game_iterator::GameEvent;\npub use GameIterator = game_iterator::GameIterator;\npub use GameIteratorSettings = game_iterator::GameIteratorSettings;\npub use RenderArgs = game_iterator::RenderArgs;\npub use UpdateArgs = game_iterator::UpdateArgs;\npub use KeyPressArgs = game_iterator::KeyPressArgs;\npub use KeyReleaseArgs = game_iterator::KeyReleaseArgs;\npub use MousePressArgs = game_iterator::MousePressArgs;\npub use MouseReleaseArgs = game_iterator::MouseReleaseArgs;\npub use MouseMoveArgs = game_iterator::MouseMoveArgs;\npub use MouseRelativeMoveArgs = game_iterator::MouseRelativeMoveArgs;\n\npub use GameWindow = game_window::GameWindow;\npub use GameWindowSDL2 = game_window_sdl2::GameWindowSDL2;\npub use GameWindowGLFW = game_window_glfw::GameWindowGLFW;\npub use GameWindowSettings = game_window_settings::GameWindowSettings;\npub use Gl = gl_back_end::Gl;\npub use AssetStore = asset_store::AssetStore;\npub use Texture = texture::Texture;\n\npub use Music = music::Music;\npub use Sound = sound::Sound;\npub use AudioBackEnd = audio_back_end::AudioBackEnd;\npub use AudioSDL2 = sdl2_audio_back_end::AudioSDL2;\npub use MusicSDL2 = sdl2_audio_back_end::MusicSDL2;\npub use SoundSDL2 = sdl2_audio_back_end::SoundSDL2;\n\npub mod shader_utils;\npub mod game_window;\npub mod keyboard;\npub mod event;\npub mod mouse;\n\nmod game;\nmod game_iterator;\nmod game_window_sdl2;\nmod game_window_glfw;\nmod game_window_settings;\nmod gl_back_end;\nmod asset_store;\nmod texture;\n\nmod music;\nmod sound;\nmod audio_back_end;\nmod sdl2_audio_back_end;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Update to the last rust-nightly.<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(macro_rules)]\n\nuse std::cmp::Ordering::{Greater, Equal, Less};\n\npub struct Slide<T: Iterator<Item=A>, A> {\n iter: T,\n n: usize,\n window: Vec<A>\n}\n\nmacro_rules! return_if(\n ($cond:expr, $value:expr) => (\n if $cond {\n return $value;\n }\n );\n);\n\nimpl<A: Clone, T: Iterator<Item=A>> Slide<T, A> {\n fn push_window(&mut self) -> bool {\n let iter_next = self.iter.next();\n let is_some = iter_next.is_some();\n\n if is_some {\n self.window.push(iter_next.unwrap());\n }\n\n is_some\n }\n\n fn new(iter: T, n: usize) -> Slide<T, A> {\n Slide{\n iter: iter,\n n: n,\n window: Vec::with_capacity(n + 1)\n }\n }\n}\n\nimpl<A: Clone, T: Iterator<Item=A>> Iterator for Slide<T, A> {\n type Item = Vec<A>;\n\n fn next(&mut self) -> Option<Vec<A>> {\n return_if!(self.n == 0, None);\n return_if!(!self.push_window(), None);\n\n loop {\n let window_status = self.window.len().cmp(&self.n);\n\n match window_status {\n Greater => { self.window.remove(0); }\n Equal => { return Some(self.window.clone()); }\n Less => { return_if!(!self.push_window(), None); }\n }\n }\n }\n}\n\npub trait SlideIterator<T: Iterator<Item=A>, A> {\n fn slide(self, n: usize) -> Slide<T, A>;\n}\n\nimpl<A: Clone, T: Iterator<Item=A>> SlideIterator<T, A> for T {\n fn slide(self, n: usize) -> Slide<T, A> {\n Slide::new(self, n)\n }\n}\n\n#[test]\nfn test_slide() {\n let mut slide_iter = vec![1i8, 2, 3, 4, 5].into_iter().slide(3);\n assert_eq!(slide_iter.next().unwrap(), vec![1, 2, 3]);\n assert_eq!(slide_iter.next().unwrap(), vec![2, 3, 4]);\n assert_eq!(slide_iter.next().unwrap(), vec![3, 4, 5]);\n assert!(slide_iter.next().is_none());\n}\n\n#[test]\nfn test_slide_equal_window() {\n let mut slide_iter = vec![1i8, 2, 3, 4, 5].into_iter().slide(5);\n assert_eq!(slide_iter.next().unwrap(), vec![1, 2, 3, 4, 5]);\n assert!(slide_iter.next().is_none());\n}\n\n#[test]\nfn test_slide_zero_window() {\n let mut slide_iter = vec![1i8, 2, 3, 4, 5].into_iter().slide(0);\n assert!(slide_iter.next().is_none());\n}\n\n#[test]\nfn test_slide_overlong_window() {\n let mut slide_iter = vec![1i8, 2, 3, 4, 5].into_iter().slide(7);\n assert!(slide_iter.next().is_none());\n}\n<commit_msg>Remove #![feature(macro_rules)] annotation<commit_after>use std::cmp::Ordering::{Greater, Equal, Less};\n\npub struct Slide<T: Iterator<Item=A>, A> {\n iter: T,\n n: usize,\n window: Vec<A>\n}\n\nmacro_rules! return_if(\n ($cond:expr, $value:expr) => (\n if $cond {\n return $value;\n }\n );\n);\n\nimpl<A: Clone, T: Iterator<Item=A>> Slide<T, A> {\n fn push_window(&mut self) -> bool {\n let iter_next = self.iter.next();\n let is_some = iter_next.is_some();\n\n if is_some {\n self.window.push(iter_next.unwrap());\n }\n\n is_some\n }\n\n fn new(iter: T, n: usize) -> Slide<T, A> {\n Slide{\n iter: iter,\n n: n,\n window: Vec::with_capacity(n + 1)\n }\n }\n}\n\nimpl<A: Clone, T: Iterator<Item=A>> Iterator for Slide<T, A> {\n type Item = Vec<A>;\n\n fn next(&mut self) -> Option<Vec<A>> {\n return_if!(self.n == 0, None);\n return_if!(!self.push_window(), None);\n\n loop {\n let window_status = self.window.len().cmp(&self.n);\n\n match window_status {\n Greater => { self.window.remove(0); }\n Equal => { return Some(self.window.clone()); }\n Less => { return_if!(!self.push_window(), None); }\n }\n }\n }\n}\n\npub trait SlideIterator<T: Iterator<Item=A>, A> {\n fn slide(self, n: usize) -> Slide<T, A>;\n}\n\nimpl<A: Clone, T: Iterator<Item=A>> SlideIterator<T, A> for T {\n fn slide(self, n: usize) -> Slide<T, A> {\n Slide::new(self, n)\n }\n}\n\n#[test]\nfn test_slide() {\n let mut slide_iter = vec![1i8, 2, 3, 4, 5].into_iter().slide(3);\n assert_eq!(slide_iter.next().unwrap(), vec![1, 2, 3]);\n assert_eq!(slide_iter.next().unwrap(), vec![2, 3, 4]);\n assert_eq!(slide_iter.next().unwrap(), vec![3, 4, 5]);\n assert!(slide_iter.next().is_none());\n}\n\n#[test]\nfn test_slide_equal_window() {\n let mut slide_iter = vec![1i8, 2, 3, 4, 5].into_iter().slide(5);\n assert_eq!(slide_iter.next().unwrap(), vec![1, 2, 3, 4, 5]);\n assert!(slide_iter.next().is_none());\n}\n\n#[test]\nfn test_slide_zero_window() {\n let mut slide_iter = vec![1i8, 2, 3, 4, 5].into_iter().slide(0);\n assert!(slide_iter.next().is_none());\n}\n\n#[test]\nfn test_slide_overlong_window() {\n let mut slide_iter = vec![1i8, 2, 3, 4, 5].into_iter().slide(7);\n assert!(slide_iter.next().is_none());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tests: Unit test Archive::read_entry_metadata_table<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014-2015 Sam Doshi (sam@metal-fish.co.uk)\n\/\/ Copyright 2013-2014 Philippe Delrieu (philippe.delrieu@free.fr)\n\/\/\n\/\/ Licensed under the MIT License <LICENSE or http:\/\/opensource.org\/licenses\/MIT>.\n\/\/ This file may not be copied, modified, or distributed except according to those terms.\n\nextern crate libc;\n\nuse std::fmt;\nuse std::ptr;\nuse libc::c_char;\n\nmod ffi;\n\n\n\/\/ Types\n\/\/ -----\n\/\/\/ Used by PortMidi to refer to a Midi device\npub type PortMidiDeviceId = i32;\npub type PortMidiResult<T> = Result<T, PortMidiError>;\n\n\n\/\/ Errors\n\/\/ ------\n#[derive(Copy, Debug, PartialEq, Eq)]\npub enum PortMidiError {\n HostError,\n InvalidDeviceId,\n InsufficientMemory,\n BufferTooSmall,\n BufferOverflow,\n BadPtr,\n BadData,\n InternalError,\n BufferMaxSize\n}\n\nfn from_pm_error(pm_error: ffi::PmError) -> PortMidiResult<()> {\n match pm_error {\n ffi::PmError::PmNoError => Ok(()),\n ffi::PmError::PmGotData => Ok(()),\n ffi::PmError::PmHostError => Err(PortMidiError::HostError),\n ffi::PmError::PmInvalidDeviceId => Err(PortMidiError::InvalidDeviceId),\n ffi::PmError::PmInsufficientMemory => Err(PortMidiError::InsufficientMemory),\n ffi::PmError::PmBufferTooSmall => Err(PortMidiError::BufferTooSmall),\n ffi::PmError::PmBufferOverflow => Err(PortMidiError::BufferOverflow),\n ffi::PmError::PmBadPtr => Err(PortMidiError::BadPtr),\n ffi::PmError::PmBadData => Err(PortMidiError::BadData),\n ffi::PmError::PmInternalError => Err(PortMidiError::InternalError),\n ffi::PmError::PmBufferMaxSize => Err(PortMidiError::BufferMaxSize),\n }\n}\n\nimpl fmt::Display for PortMidiError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{:?}\", self)\n }\n}\n\n\n\/\/ Global fns\n\/\/ ----------\n\/\/\/ `initialize` initalizes the underlying PortMidi C library, call this\n\/\/\/ before using the library.\n\/\/\/\n\/\/\/ Once initialized, PortMidi will no longer pickup any new Midi devices that are\n\/\/\/ connected, i.e. it does not support hot plugging.\npub fn initialize() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Initialize()\n })\n}\n\n\/\/\/ `terminate` terminates the underlying PortMidi C library, call this\n\/\/\/ after using the library.\npub fn terminate() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Terminate()\n })\n}\n\n\/\/\/ Return the number of devices. This number will not change during the lifetime\n\/\/\/ of the program.\npub fn count_devices() -> PortMidiDeviceId {\n unsafe {\n ffi::Pm_CountDevices()\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default input, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_input_device_id() -> Option<PortMidiDeviceId> {\n let id = unsafe { ffi::Pm_GetDefaultInputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default output, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_output_device_id() -> Option<PortMidiDeviceId> {\n let id = unsafe { ffi::Pm_GetDefaultOutputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\n\/\/ DeviceInfo\n\/\/ ----------\n\/\/\/ Represents what we know about a device\n#[derive(Clone, Debug)]\npub struct DeviceInfo {\n \/\/\/ The `PortMidiDeviceId` used with `OutputPort::new` and `InputPort::new`\n pub device_id: PortMidiDeviceId,\n \/\/\/ The name of the device\n pub name: String,\n \/\/\/ Is the device an input\n pub input: bool,\n \/\/\/ Is the device an output\n pub output: bool\n}\n\nimpl DeviceInfo {\n fn wrap(device_id: PortMidiDeviceId, device_info: *const ffi::PmDeviceInfo) -> DeviceInfo {\n let name = unsafe {\n let bytes = std::ffi::CStr::from_ptr((*device_info).name).to_bytes();\n std::str::from_utf8_unchecked(bytes).to_string()\n };\n let input = unsafe { (*device_info).input };\n let output = unsafe { (*device_info).output };\n\n DeviceInfo {\n device_id: device_id,\n name: name,\n input: input > 0,\n output: output > 0\n }\n }\n}\n\n\/\/\/ Returns a `DeviceInfo` with information about a device, or `None` if\n\/\/\/ it does not exist\npub fn get_device_info(device_id: PortMidiDeviceId) -> Option<DeviceInfo> {\n let info = unsafe { ffi::Pm_GetDeviceInfo(device_id) };\n if info.is_null() {\n None\n }\n else {\n Some(DeviceInfo::wrap(device_id, info))\n }\n}\n\n\n\/\/ Midi events\n\/\/ -----------\n\/\/\/ Represents a single midi message, see also `MidiEvent`\n\/\/\/\n\/\/\/ TODO: should we use u8?\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\npub struct MidiMessage {\n pub status: i8,\n pub data1: i8,\n pub data2: i8,\n}\n\nimpl MidiMessage {\n fn wrap(cmessage : ffi::PmMessage) -> MidiMessage {\n MidiMessage {\n status: ((cmessage) & 0xFF) as i8,\n data1 : (((cmessage) >> 8) & 0xFF) as i8,\n data2 : (((cmessage) >> 16) & 0xFF) as i8,\n }\n }\n\n fn unwrap(&self) -> ffi::PmMessage {\n ((((self.data2 as i32) << 16) & 0xFF0000) |\n (((self.data1 as i32) << 8) & 0xFF00) |\n ((self.status as i32) & 0xFF)) as i32\n }\n}\n\n\/\/\/ Represents a time stamped midi event. See also `MidiMessage`\n\/\/\/\n\/\/\/ See the PortMidi documentation for how SysEx and midi realtime messages\n\/\/\/ are handled\n\/\/\/\n\/\/\/ TODO: what to do about the timestamp?\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\npub struct MidiEvent {\n pub message : MidiMessage,\n pub timestamp : ffi::PmTimestamp,\n}\n\nimpl MidiEvent {\n fn wrap(event: ffi::PmEvent) -> MidiEvent {\n MidiEvent {\n message: MidiMessage::wrap(event.message),\n timestamp : event.timestamp,\n }\n }\n\n fn unwrap(&self) -> ffi::PmEvent {\n ffi::PmEvent {\n message: self.message.unwrap(),\n timestamp: self.timestamp,\n }\n }\n}\n\n\n\/\/ Input\n\/\/ -----\n\/\/\/ Representation of an input midi port\n#[allow(missing_copy_implementations)]\npub struct InputPort {\n pm_stream : *const ffi::PortMidiStream,\n input_device : ffi::PmDeviceId,\n buffer_size : i32,\n}\n\nimpl InputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(input_device : PortMidiDeviceId, buffer_size: i32) -> InputPort {\n InputPort {\n pm_stream : ptr::null(),\n input_device : input_device,\n buffer_size : buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenInput(&self.pm_stream, self.input_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null())\n })\n }\n\n \/\/\/ Reads a single `MidiEvent` if one is avaible\n \/\/\/\n \/\/\/ A `Result` of `None` means no event was available.\n \/\/\/\n \/\/\/ See the PortMidi documentation for information on how it deals with input\n \/\/\/ overflows\n pub fn read(&mut self) -> PortMidiResult<Option<MidiEvent>> {\n use std::num::FromPrimitive;\n \/\/get one note a the time\n let mut event = ffi::PmEvent { message : 0, timestamp : 0 };\n let no_of_notes = unsafe { ffi::Pm_Read(self.pm_stream, &mut event, 1) };\n match no_of_notes {\n y if y == 0 => Ok(None),\n y if y > 0 => Ok(Some(MidiEvent::wrap(event))),\n _ => {\n \/\/ if it's negative it's an error, convert it\n let maybe_pm_error: Option<ffi::PmError> = FromPrimitive::from_i32(no_of_notes);\n if let Some(pm_error) = maybe_pm_error {\n from_pm_error(pm_error).map(|_| None)\n }\n else {\n \/\/ what should we do, if we can't convert the error no?\n \/\/ should we panic?\n Ok(None)\n }\n }\n }\n }\n\n \/\/\/ `poll` tests if there is input available, either returing a bool or an error\n pub fn poll(&self) -> PortMidiResult<bool> {\n let pm_error = unsafe { ffi::Pm_Poll(self.pm_stream) };\n match pm_error {\n ffi::PmError::PmNoError => Ok(false),\n ffi::PmError::PmGotData => Ok(true),\n other => from_pm_error(other).map(|_| false)\n }\n }\n\n \/\/\/ Closes the input, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Output\n\/\/ ------\n\/\/\/ Representation of an output midi port\n#[allow(missing_copy_implementations)]\npub struct OutputPort {\n pm_stream: *const ffi::PortMidiStream,\n output_device: ffi::PmDeviceId,\n buffer_size: i32,\n}\n\nimpl OutputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(output_device: PortMidiDeviceId, buffer_size: i32) -> OutputPort {\n OutputPort {\n pm_stream: ptr::null(),\n output_device: output_device,\n buffer_size: buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenOutput(&self.pm_stream, self.output_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null(), 0)\n })\n }\n\n \/\/\/ Terminates outgoing messages immediately\n \/\/\/\n \/\/\/ The caller should immediately close the output port, this may\n \/\/\/ result in transmission of a partial midi message. Note, not all platforms\n \/\/\/ support abort.\n pub fn abort(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Abort(self.pm_stream)\n })\n }\n\n \/\/\/ Closes the midi stream, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/\/\/ Write a single `MidiEvent`\n pub fn write_event(&mut self, midi_event: MidiEvent) -> PortMidiResult<()> {\n let event = midi_event.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_Write(self.pm_stream, &event, 1)\n })\n }\n\n \/\/\/ Write a single `MidiMessage` immediately\n pub fn write_message(&mut self, midi_message: MidiMessage) -> PortMidiResult<()> {\n let message = midi_message.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_WriteShort(self.pm_stream, 0, message)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Old code\n\/\/ --------\n\/** Translate portmidi error number into human readable message.\n* These strings are constants (set at compile time) so client has\n* no need to allocate storage\n*\/\npub fn get_error_text(error_code: ffi::PmError) -> String {\n unsafe {\n let error_text = ffi::Pm_GetErrorText(error_code);\n let bytes = std::ffi::CStr::from_ptr(error_text).to_bytes();\n std::str::from_utf8_unchecked(bytes).to_string()\n }\n}\n\n\/** Translate portmidi host error into human readable message.\n These strings are computed at run time, so client has to allocate storage.\n After this routine executes, the host error is cleared.\n*\/\npub fn get_host_error_text(msg: *const c_char, len: i32) {\n unsafe {\n ffi::Pm_GetHostErrorText(msg, len);\n }\n}\n\npub const HDRLENGTH: i32 = 50;\npub const PM_HOST_ERROR_MSG_LEN: i32 = 256;\n\n<commit_msg>add features `core` and `std_misc`<commit_after>\/\/ Copyright 2014-2015 Sam Doshi (sam@metal-fish.co.uk)\n\/\/ Copyright 2013-2014 Philippe Delrieu (philippe.delrieu@free.fr)\n\/\/\n\/\/ Licensed under the MIT License <LICENSE or http:\/\/opensource.org\/licenses\/MIT>.\n\/\/ This file may not be copied, modified, or distributed except according to those terms.\n#![feature(core, std_misc)]\n\nextern crate libc;\n\nuse std::fmt;\nuse std::ptr;\nuse libc::c_char;\n\nmod ffi;\n\n\n\/\/ Types\n\/\/ -----\n\/\/\/ Used by PortMidi to refer to a Midi device\npub type PortMidiDeviceId = i32;\npub type PortMidiResult<T> = Result<T, PortMidiError>;\n\n\n\/\/ Errors\n\/\/ ------\n#[derive(Copy, Debug, PartialEq, Eq)]\npub enum PortMidiError {\n HostError,\n InvalidDeviceId,\n InsufficientMemory,\n BufferTooSmall,\n BufferOverflow,\n BadPtr,\n BadData,\n InternalError,\n BufferMaxSize\n}\n\nfn from_pm_error(pm_error: ffi::PmError) -> PortMidiResult<()> {\n match pm_error {\n ffi::PmError::PmNoError => Ok(()),\n ffi::PmError::PmGotData => Ok(()),\n ffi::PmError::PmHostError => Err(PortMidiError::HostError),\n ffi::PmError::PmInvalidDeviceId => Err(PortMidiError::InvalidDeviceId),\n ffi::PmError::PmInsufficientMemory => Err(PortMidiError::InsufficientMemory),\n ffi::PmError::PmBufferTooSmall => Err(PortMidiError::BufferTooSmall),\n ffi::PmError::PmBufferOverflow => Err(PortMidiError::BufferOverflow),\n ffi::PmError::PmBadPtr => Err(PortMidiError::BadPtr),\n ffi::PmError::PmBadData => Err(PortMidiError::BadData),\n ffi::PmError::PmInternalError => Err(PortMidiError::InternalError),\n ffi::PmError::PmBufferMaxSize => Err(PortMidiError::BufferMaxSize),\n }\n}\n\nimpl fmt::Display for PortMidiError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{:?}\", self)\n }\n}\n\n\n\/\/ Global fns\n\/\/ ----------\n\/\/\/ `initialize` initalizes the underlying PortMidi C library, call this\n\/\/\/ before using the library.\n\/\/\/\n\/\/\/ Once initialized, PortMidi will no longer pickup any new Midi devices that are\n\/\/\/ connected, i.e. it does not support hot plugging.\npub fn initialize() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Initialize()\n })\n}\n\n\/\/\/ `terminate` terminates the underlying PortMidi C library, call this\n\/\/\/ after using the library.\npub fn terminate() -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Terminate()\n })\n}\n\n\/\/\/ Return the number of devices. This number will not change during the lifetime\n\/\/\/ of the program.\npub fn count_devices() -> PortMidiDeviceId {\n unsafe {\n ffi::Pm_CountDevices()\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default input, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_input_device_id() -> Option<PortMidiDeviceId> {\n let id = unsafe { ffi::Pm_GetDefaultInputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\/\/\/ Gets the `PortMidiDeviceId` for the default output, or `None` if\n\/\/\/ there isn't one\n\/\/\/\n\/\/\/ See the PortMidi documentation for details of how to set the default device\npub fn get_default_output_device_id() -> Option<PortMidiDeviceId> {\n let id = unsafe { ffi::Pm_GetDefaultOutputDeviceID() };\n if id == ffi::PM_NO_DEVICE {\n None\n }\n else {\n Some(id)\n }\n}\n\n\n\/\/ DeviceInfo\n\/\/ ----------\n\/\/\/ Represents what we know about a device\n#[derive(Clone, Debug)]\npub struct DeviceInfo {\n \/\/\/ The `PortMidiDeviceId` used with `OutputPort::new` and `InputPort::new`\n pub device_id: PortMidiDeviceId,\n \/\/\/ The name of the device\n pub name: String,\n \/\/\/ Is the device an input\n pub input: bool,\n \/\/\/ Is the device an output\n pub output: bool\n}\n\nimpl DeviceInfo {\n fn wrap(device_id: PortMidiDeviceId, device_info: *const ffi::PmDeviceInfo) -> DeviceInfo {\n let name = unsafe {\n let bytes = std::ffi::CStr::from_ptr((*device_info).name).to_bytes();\n std::str::from_utf8_unchecked(bytes).to_string()\n };\n let input = unsafe { (*device_info).input };\n let output = unsafe { (*device_info).output };\n\n DeviceInfo {\n device_id: device_id,\n name: name,\n input: input > 0,\n output: output > 0\n }\n }\n}\n\n\/\/\/ Returns a `DeviceInfo` with information about a device, or `None` if\n\/\/\/ it does not exist\npub fn get_device_info(device_id: PortMidiDeviceId) -> Option<DeviceInfo> {\n let info = unsafe { ffi::Pm_GetDeviceInfo(device_id) };\n if info.is_null() {\n None\n }\n else {\n Some(DeviceInfo::wrap(device_id, info))\n }\n}\n\n\n\/\/ Midi events\n\/\/ -----------\n\/\/\/ Represents a single midi message, see also `MidiEvent`\n\/\/\/\n\/\/\/ TODO: should we use u8?\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\npub struct MidiMessage {\n pub status: i8,\n pub data1: i8,\n pub data2: i8,\n}\n\nimpl MidiMessage {\n fn wrap(cmessage : ffi::PmMessage) -> MidiMessage {\n MidiMessage {\n status: ((cmessage) & 0xFF) as i8,\n data1 : (((cmessage) >> 8) & 0xFF) as i8,\n data2 : (((cmessage) >> 16) & 0xFF) as i8,\n }\n }\n\n fn unwrap(&self) -> ffi::PmMessage {\n ((((self.data2 as i32) << 16) & 0xFF0000) |\n (((self.data1 as i32) << 8) & 0xFF00) |\n ((self.status as i32) & 0xFF)) as i32\n }\n}\n\n\/\/\/ Represents a time stamped midi event. See also `MidiMessage`\n\/\/\/\n\/\/\/ See the PortMidi documentation for how SysEx and midi realtime messages\n\/\/\/ are handled\n\/\/\/\n\/\/\/ TODO: what to do about the timestamp?\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\npub struct MidiEvent {\n pub message : MidiMessage,\n pub timestamp : ffi::PmTimestamp,\n}\n\nimpl MidiEvent {\n fn wrap(event: ffi::PmEvent) -> MidiEvent {\n MidiEvent {\n message: MidiMessage::wrap(event.message),\n timestamp : event.timestamp,\n }\n }\n\n fn unwrap(&self) -> ffi::PmEvent {\n ffi::PmEvent {\n message: self.message.unwrap(),\n timestamp: self.timestamp,\n }\n }\n}\n\n\n\/\/ Input\n\/\/ -----\n\/\/\/ Representation of an input midi port\n#[allow(missing_copy_implementations)]\npub struct InputPort {\n pm_stream : *const ffi::PortMidiStream,\n input_device : ffi::PmDeviceId,\n buffer_size : i32,\n}\n\nimpl InputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(input_device : PortMidiDeviceId, buffer_size: i32) -> InputPort {\n InputPort {\n pm_stream : ptr::null(),\n input_device : input_device,\n buffer_size : buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenInput(&self.pm_stream, self.input_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null())\n })\n }\n\n \/\/\/ Reads a single `MidiEvent` if one is avaible\n \/\/\/\n \/\/\/ A `Result` of `None` means no event was available.\n \/\/\/\n \/\/\/ See the PortMidi documentation for information on how it deals with input\n \/\/\/ overflows\n pub fn read(&mut self) -> PortMidiResult<Option<MidiEvent>> {\n use std::num::FromPrimitive;\n \/\/get one note a the time\n let mut event = ffi::PmEvent { message : 0, timestamp : 0 };\n let no_of_notes = unsafe { ffi::Pm_Read(self.pm_stream, &mut event, 1) };\n match no_of_notes {\n y if y == 0 => Ok(None),\n y if y > 0 => Ok(Some(MidiEvent::wrap(event))),\n _ => {\n \/\/ if it's negative it's an error, convert it\n let maybe_pm_error: Option<ffi::PmError> = FromPrimitive::from_i32(no_of_notes);\n if let Some(pm_error) = maybe_pm_error {\n from_pm_error(pm_error).map(|_| None)\n }\n else {\n \/\/ what should we do, if we can't convert the error no?\n \/\/ should we panic?\n Ok(None)\n }\n }\n }\n }\n\n \/\/\/ `poll` tests if there is input available, either returing a bool or an error\n pub fn poll(&self) -> PortMidiResult<bool> {\n let pm_error = unsafe { ffi::Pm_Poll(self.pm_stream) };\n match pm_error {\n ffi::PmError::PmNoError => Ok(false),\n ffi::PmError::PmGotData => Ok(true),\n other => from_pm_error(other).map(|_| false)\n }\n }\n\n \/\/\/ Closes the input, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Output\n\/\/ ------\n\/\/\/ Representation of an output midi port\n#[allow(missing_copy_implementations)]\npub struct OutputPort {\n pm_stream: *const ffi::PortMidiStream,\n output_device: ffi::PmDeviceId,\n buffer_size: i32,\n}\n\nimpl OutputPort {\n \/\/\/ Construct a new `InputPort` for `input_device`\n pub fn new(output_device: PortMidiDeviceId, buffer_size: i32) -> OutputPort {\n OutputPort {\n pm_stream: ptr::null(),\n output_device: output_device,\n buffer_size: buffer_size,\n }\n }\n\n \/\/\/ Open the port returning an error if there is a problem\n pub fn open(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_OpenOutput(&self.pm_stream, self.output_device, ptr::null(),\n self.buffer_size, ptr::null(), ptr::null(), 0)\n })\n }\n\n \/\/\/ Terminates outgoing messages immediately\n \/\/\/\n \/\/\/ The caller should immediately close the output port, this may\n \/\/\/ result in transmission of a partial midi message. Note, not all platforms\n \/\/\/ support abort.\n pub fn abort(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Abort(self.pm_stream)\n })\n }\n\n \/\/\/ Closes the midi stream, flushing any pending buffers\n \/\/\/\n \/\/\/ PortMidi attempts to close open streams when the application\n \/\/\/ exits, but this can be difficult under Windows\n \/\/\/ (according to the PortMidi documentation).\n pub fn close(&mut self) -> PortMidiResult<()> {\n from_pm_error(unsafe {\n ffi::Pm_Close(self.pm_stream)\n })\n }\n\n \/\/\/ Write a single `MidiEvent`\n pub fn write_event(&mut self, midi_event: MidiEvent) -> PortMidiResult<()> {\n let event = midi_event.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_Write(self.pm_stream, &event, 1)\n })\n }\n\n \/\/\/ Write a single `MidiMessage` immediately\n pub fn write_message(&mut self, midi_message: MidiMessage) -> PortMidiResult<()> {\n let message = midi_message.unwrap();\n from_pm_error(unsafe {\n ffi::Pm_WriteShort(self.pm_stream, 0, message)\n })\n }\n\n \/*\n * Test whether stream has a pending host error. Normally, the client finds\n * out about errors through returned error codes, but some errors can occur\n * asynchronously where the client does not\n * explicitly call a function, and therefore cannot receive an error code.\n * The client can test for a pending error using has_host_error(). If true,\n * the error can be accessed and cleared by calling get_Error_text().\n * Errors are also cleared by calling other functions that can return\n * errors, e.g. open_input(), open_output(), read(), write(). The\n * client does not need to call Pm_HasHostError(). Any pending error will be\n * reported the next time the client performs an explicit function call on\n * the stream, e.g. an input or output operation. Until the error is cleared,\n * no new error codes will be obtained, even for a different stream.\n *\/\n pub fn has_host_error(&self) -> bool {\n unsafe {\n ffi::Pm_HasHostError(self.pm_stream) > 0\n }\n }\n}\n\n\n\/\/ Old code\n\/\/ --------\n\/** Translate portmidi error number into human readable message.\n* These strings are constants (set at compile time) so client has\n* no need to allocate storage\n*\/\npub fn get_error_text(error_code: ffi::PmError) -> String {\n unsafe {\n let error_text = ffi::Pm_GetErrorText(error_code);\n let bytes = std::ffi::CStr::from_ptr(error_text).to_bytes();\n std::str::from_utf8_unchecked(bytes).to_string()\n }\n}\n\n\/** Translate portmidi host error into human readable message.\n These strings are computed at run time, so client has to allocate storage.\n After this routine executes, the host error is cleared.\n*\/\npub fn get_host_error_text(msg: *const c_char, len: i32) {\n unsafe {\n ffi::Pm_GetHostErrorText(msg, len);\n }\n}\n\npub const HDRLENGTH: i32 = 50;\npub const PM_HOST_ERROR_MSG_LEN: i32 = 256;\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>I'm sorry<commit_after>pub mod build;\npub mod bundle;\npub mod util;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>12 - use import<commit_after>\/\/ Bind the `deeply::nested::function` path to `other_function`\nuse deeply::nested::function as other_function;\n\nfn function() {\n println!(\"called `function()`\");\n}\n\nmod deeply {\n pub mod nested {\n pub fn function() {\n println!(\"called `deeply::nested::function()`\")\n }\n }\n}\n\nfn main() {\n \/\/ Easier access to `deeply::nested::function`\n other_function();\n\n println!(\"Entering block\");\n {\n \/\/ This is equivalent to `use deeply::nested::function as function`\n \/\/ This `function` will shadow the outer one\n use deeply::nested::function;\n\n function();\n\n println!(\"Leaving block\");\n\n \/\/ `use` bindings have a local scope, in this case the `function`\n \/\/ shadowing is only available in this scope\n }\n\n function();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add new<commit_after>fn add_strings(nums1:) -> i32 {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add a publisher_confirms example<commit_after>use futures_executor::LocalPool;\nuse lapin::{\n message::DeliveryResult, options::*, publisher_confirm::Confirmation, types::FieldTable,\n BasicProperties, Connection, ConnectionProperties,\n};\nuse log::info;\n\nfn main() {\n std::env::set_var(\"RUST_LOG\", \"trace\");\n\n env_logger::init();\n\n let addr = std::env::var(\"AMQP_ADDR\").unwrap_or_else(|_| \"amqp:\/\/127.0.0.1:5672\/%2f\".into());\n let mut executor = LocalPool::new();\n\n executor.run_until(async {\n let conn = Connection::connect(&addr, ConnectionProperties::default())\n .await\n .expect(\"connection error\");\n\n info!(\"CONNECTED\");\n\n \/\/send channel\n let channel_a = conn.create_channel().await.expect(\"create_channel\");\n \/\/receive channel\n let channel_b = conn.create_channel().await.expect(\"create_channel\");\n info!(\"[{}] state: {:?}\", line!(), conn.status().state());\n\n \/\/create the hello queue\n let queue = channel_a\n .queue_declare(\n \"hello\",\n QueueDeclareOptions::default(),\n FieldTable::default(),\n )\n .await\n .expect(\"queue_declare\");\n info!(\"[{}] state: {:?}\", line!(), conn.status().state());\n info!(\"[{}] declared queue: {:?}\", line!(), queue);\n\n let queue = channel_a\n .confirm_select(ConfirmSelectOptions::default())\n .await\n .expect(\"confirm_select\");\n info!(\"[{}] state: {:?}\", line!(), conn.status().state());\n info!(\"Enabled publisher-confirms: {:?}\", queue);\n\n let chan = channel_b.clone();\n info!(\"will consume\");\n channel_b\n .basic_consume(\n \"hello\",\n \"my_consumer\",\n BasicConsumeOptions::default(),\n FieldTable::default(),\n )\n .await\n .expect(\"basic_consume\")\n .set_delegate(Box::new(move |delivery: DeliveryResult| {\n info!(\"received message: {:?}\", delivery);\n if let Ok(Some(delivery)) = delivery {\n chan.basic_ack(delivery.delivery_tag, BasicAckOptions::default())\n .wait() \/\/ await is hard to handle here\n .expect(\"basic_ack\");\n }\n }));\n info!(\"[{}] state: {:?}\", line!(), conn.status().state());\n\n info!(\"will publish\");\n let payload = b\"Hello world!\";\n let confirm = channel_a\n .basic_publish(\n \"\",\n \"hello\",\n BasicPublishOptions::default(),\n payload.to_vec(),\n BasicProperties::default(),\n )\n .await\n .expect(\"basic_publish\")\n .await; \/\/ Wait for this specific ack\/nack\n assert_eq!(confirm, Confirmation::Ack);\n info!(\"[{}] state: {:?}\", line!(), conn.status().state());\n\n for _ in 1..=2 {\n channel_a\n .basic_publish(\n \"\",\n \"hello\",\n BasicPublishOptions::default(),\n payload.to_vec(),\n BasicProperties::default(),\n )\n .await\n .expect(\"basic_publish\"); \/\/ Drop the PublisherConfirm instead for waiting for it ...\n }\n\n \/\/ ... and wait for all pending ack\/nack afterwards instead of individually in the above loop\n let returned = channel_a\n .wait_for_confirms()\n .await\n .expect(\"wait for confirms\");\n assert!(returned.is_empty());\n\n std::thread::sleep(std::time::Duration::from_millis(2000));\n conn.close(200, \"OK\").await.expect(\"connection close\");\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Renamed example.rs from src\/<commit_after>extern crate orbclient;\n\nuse std::cmp::max;\nuse std::env;\n\nuse orbclient::{BmpFile, Color, EventOption, Window};\n\nfn main() {\n let path = match env::args().nth(1) {\n Some(arg) => arg,\n None => \"res\/redox.bmp\".to_string(),\n };\n\n let bmp = BmpFile::from_path(&path);\n let mut window = Window::new(-1,\n -1,\n max(32, bmp.width() as u32),\n max(32, bmp.height() as u32),\n &path)\n .unwrap();\n window.set(Color::rgb(0, 0, 0));\n window.image(0, 0, bmp.width() as u32, bmp.height() as u32, &bmp);\n window.sync();\n\n loop {\n for event in window.events() {\n println!(\"{:?}\", event.to_option());\n if let EventOption::Quit(_) = event.to_option() {\n return;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>writer mod: rename Entry::name to Entry::path<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Do Problem 08 with cheat<commit_after>fn main() {\n let mut list =\n ~['a', 'a', 'a', 'a', 'b', 'c', 'c', 'a', 'a', 'd', 'e', 'e', 'e', 'e'];\n\n list.dedup(); \/\/ Cheat\n println!(\"{:?}\", list);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Editor commands should be split at whitespace<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add simple utility to show frontend properties.<commit_after>\/\/ Copyright 2015 Ilkka Rauta\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nextern crate linuxdvb;\n\nuse std::error::Error;\nuse std::path::Path;\n\nuse linuxdvb::{Frontend,ReadWriteMode,BlockMode};\nuse linuxdvb::properties::GetProperty as GP;\n\ntype SimpleResult<T> = Result<T, Box<Error>>;\n\nfn show_frontend_properties(path: &Path) -> SimpleResult<()> {\n let frontend = try!(Frontend::open(path, ReadWriteMode::ReadOnly, BlockMode::NonBlocking));\n let get_properties = [\n GP::DtvApiVersion,\n GP::DtvAtscmhFicVer,\n GP::DtvAtscmhNog,\n GP::DtvAtscmhParadeId,\n GP::DtvAtscmhPrc,\n GP::DtvAtscmhRsCodeModePri,\n GP::DtvAtscmhRsCodeModeSec,\n GP::DtvAtscmhRsFrameEnsemble,\n GP::DtvAtscmhRsFrameMode,\n GP::DtvAtscmhScccBlockMode,\n GP::DtvAtscmhScccCodeModeA,\n GP::DtvAtscmhScccCodeModeB,\n GP::DtvAtscmhScccCodeModeC,\n GP::DtvAtscmhScccCodeModeD,\n GP::DtvAtscmhSgn,\n GP::DtvAtscmhTnog,\n GP::DtvBandwidthHz,\n GP::DtvCodeRateHp,\n GP::DtvCodeRateLp,\n GP::DtvDeliverySystem,\n GP::DtvFrequency,\n GP::DtvGuardInterval,\n GP::DtvHierarchy,\n GP::DtvInnerFec,\n GP::DtvInterleaving,\n GP::DtvInversion,\n GP::DtvIsdbtLayerEnabled,\n GP::DtvIsdbtLayeraFec,\n GP::DtvIsdbtLayeraModulation,\n GP::DtvIsdbtLayeraSegmentCount,\n GP::DtvIsdbtLayeraTimeInterleaving,\n GP::DtvIsdbtLayerbFec,\n GP::DtvIsdbtLayerbModulation,\n GP::DtvIsdbtLayerbSegmentCount,\n GP::DtvIsdbtLayerbTimeInterleaving,\n GP::DtvIsdbtLayercFec,\n GP::DtvIsdbtLayercModulation,\n GP::DtvIsdbtLayercSegmentCount,\n GP::DtvIsdbtLayercTimeInterleaving,\n GP::DtvIsdbtPartialReception,\n GP::DtvIsdbtSbSegmentCount,\n GP::DtvIsdbtSbSegmentIdx,\n GP::DtvIsdbtSbSubchannelId,\n GP::DtvIsdbtSoundBroadcasting,\n GP::DtvLna,\n GP::DtvModulation,\n GP::DtvPilot,\n GP::DtvRolloff,\n GP::DtvStatCnr,\n GP::DtvStatErrorBlockCount,\n GP::DtvStatPostErrorBitCount,\n GP::DtvStatPostTotalBitCount,\n GP::DtvStatPreErrorBitCount,\n GP::DtvStatPreTotalBitCount,\n GP::DtvStatSignalStrength,\n GP::DtvStatTotalBlockCount,\n GP::DtvStreamId,\n GP::DtvSymbolRate,\n GP::DtvTone,\n GP::DtvTransmissionMode,\n GP::DtvVoltage,\n GP::DtvEnumDelsys,\n ];\n let properties = try!(frontend.get_properties(&get_properties));\n println!(\"{:#?}\", properties);\n Ok(())\n}\n\n\nfn main() {\n let string_path = match std::env::args().nth(1) {\n Some(string_path) => string_path,\n None => {\n println!(\"Specify frontend device\");\n return;\n }\n };\n if let Err(error) = show_frontend_properties(Path::new(&string_path)) {\n println!(\"Error: {:?}\", error);\n };\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::io;\nuse std::collections::{HashMap, HashSet};\nuse std::fs::File;\nuse std::io::ErrorKind;\nuse std::io::{Seek, Write, SeekFrom};\nuse std::fs::{OpenOptions, read_dir};\nuse std::os::unix::prelude::AsRawFd;\nuse std::path::{Path, PathBuf};\nuse std::str::FromStr;\nuse std::thread;\nuse std::time::Duration;\n\nuse devicemapper::consts::SECTOR_SIZE;\n\nuse devicemapper::Device;\nuse devicemapper::types::{Bytes, Sectors};\nuse time::Timespec;\nuse uuid::Uuid;\n\nuse consts::IEC;\nuse engine::{DevUuid, EngineResult, EngineError, ErrorEnum, PoolUuid};\nuse super::metadata::{StaticHeader, BDA, validate_mda_size};\nuse super::engine::DevOwnership;\npub use super::BlockDevSave;\n\nconst MIN_DEV_SIZE: Bytes = Bytes(IEC::Gi as u64);\n\nioctl!(read blkgetsize64 with 0x12, 114; u64);\n\npub fn blkdev_size(file: &File) -> EngineResult<Bytes> {\n let mut val: u64 = 0;\n\n match unsafe { blkgetsize64(file.as_raw_fd(), &mut val) } {\n Err(x) => Err(EngineError::Nix(x)),\n Ok(_) => Ok(Bytes(val)),\n }\n}\n\n\/\/\/ Resolve a list of Paths of some sort to a set of unique Devices.\n\/\/\/ Return an IOError if there was a problem resolving any particular device.\npub fn resolve_devices(paths: &[&Path]) -> io::Result<HashSet<Device>> {\n let mut devices = HashSet::new();\n for path in paths {\n let dev = try!(Device::from_str(&path.to_string_lossy()));\n devices.insert(dev);\n }\n Ok(devices)\n}\n\n\/\/\/ Find all Stratis Blockdevs.\n\/\/\/\n\/\/\/ Returns a map of pool uuids to maps of blockdev uuids to blockdevs.\npub fn find_all() -> EngineResult<HashMap<PoolUuid, HashMap<DevUuid, BlockDev>>> {\n\n \/\/\/ If a Path refers to a valid Stratis blockdev, return a BlockDev\n \/\/\/ struct. Otherwise, return None. Return an error if there was\n \/\/\/ a problem inspecting the device.\n fn setup(devnode: &Path) -> EngineResult<Option<BlockDev>> {\n let mut f = try!(OpenOptions::new()\n .read(true)\n .open(devnode));\n\n if let Some(bda) = BDA::load(&mut f).ok() {\n let dev = try!(Device::from_str(&devnode.to_string_lossy()));\n Ok(Some(BlockDev {\n dev: dev,\n devnode: devnode.to_owned(),\n bda: bda,\n }))\n } else {\n Ok(None)\n }\n }\n\n let mut pool_map = HashMap::new();\n for dir_e in try!(read_dir(\"\/dev\")) {\n let devnode = match dir_e {\n Ok(d) => d.path(),\n Err(_) => continue,\n };\n\n match setup(&devnode) {\n Ok(Some(blockdev)) => {\n pool_map.entry(blockdev.pool_uuid().clone())\n .or_insert_with(HashMap::new)\n .insert(blockdev.uuid().clone(), blockdev);\n }\n _ => continue,\n };\n }\n\n Ok(pool_map)\n}\n\n\/\/\/ Write buf at offset length times.\npub fn write_sectors(path: &Path,\n offset: Sectors,\n length: Sectors,\n buf: &[u8; SECTOR_SIZE])\n -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(path));\n\n try!(f.seek(SeekFrom::Start(*offset)));\n for _ in 0..*length {\n try!(f.write_all(buf));\n }\n\n try!(f.flush());\n Ok(())\n}\n\n\/\/\/ Zero sectors at the given offset for length sectors.\npub fn wipe_sectors(path: &Path, offset: Sectors, length: Sectors) -> EngineResult<()> {\n write_sectors(path, offset, length, &[0u8; SECTOR_SIZE])\n}\n\n\/\/\/ Initialize multiple blockdevs at once. This allows all of them\n\/\/\/ to be checked for usability before writing to any of them.\npub fn initialize(pool_uuid: &PoolUuid,\n devices: HashSet<Device>,\n mda_size: Sectors,\n force: bool)\n -> EngineResult<Vec<BlockDev>> {\n \/\/\/ Gets device information, returns an error if problem with obtaining\n \/\/\/ that information.\n \/\/\/ Returns a tuple with the blockdev's path, its size in bytes,\n \/\/\/ its ownership as determined by calling determine_ownership(),\n \/\/\/ and an open File handle, all of which are needed later.\n fn dev_info(dev: &Device) -> EngineResult<(PathBuf, Bytes, DevOwnership, File)> {\n let devnode = try!(dev.path().ok_or_else(|| {\n io::Error::new(ErrorKind::InvalidInput,\n format!(\"could not get device node from dev {}\", dev.dstr()))\n }));\n let mut f = try!(OpenOptions::new()\n .read(true)\n .write(true)\n .open(&devnode)\n .map_err(|_| {\n io::Error::new(ErrorKind::PermissionDenied,\n format!(\"Could not open {}\", devnode.display()))\n }));\n\n let dev_size = try!(blkdev_size(&f));\n\n let ownership = match StaticHeader::determine_ownership(&mut f) {\n Ok(ownership) => ownership,\n Err(err) => {\n let error_message = format!(\"{} for device {}\", err, devnode.display());\n return Err(EngineError::Engine(ErrorEnum::Invalid, error_message));\n }\n };\n\n Ok((devnode, dev_size, ownership, f))\n }\n\n \/\/\/ Filter devices for admission to pool based on dev_infos.\n \/\/\/ If there is an error finding out the info, return that error.\n \/\/\/ Also, return an error if a device is not appropriate for this pool.\n fn filter_devs<I>(dev_infos: I,\n pool_uuid: &PoolUuid,\n force: bool)\n -> EngineResult<Vec<(Device, (PathBuf, Bytes, File))>>\n where I: Iterator<Item = (Device, EngineResult<(PathBuf, Bytes, DevOwnership, File)>)>\n {\n let mut add_devs = Vec::new();\n for (dev, dev_result) in dev_infos {\n let (devnode, dev_size, ownership, f) = try!(dev_result);\n if dev_size < MIN_DEV_SIZE {\n let error_message = format!(\"{} too small, minimum {} bytes\",\n devnode.display(),\n MIN_DEV_SIZE);\n return Err(EngineError::Engine(ErrorEnum::Invalid, error_message));\n };\n match ownership {\n DevOwnership::Unowned => add_devs.push((dev, (devnode, dev_size, f))),\n DevOwnership::Theirs => {\n if !force {\n let error_str = format!(\"First 8K of {} not zeroed\", devnode.display());\n return Err(EngineError::Engine(ErrorEnum::Invalid, error_str));\n } else {\n add_devs.push((dev, (devnode, dev_size, f)))\n }\n }\n DevOwnership::Ours(uuid) => {\n if *pool_uuid != uuid {\n let error_str = format!(\"Device {} already belongs to Stratis pool {}\",\n devnode.display(),\n uuid);\n return Err(EngineError::Engine(ErrorEnum::Invalid, error_str));\n }\n }\n }\n }\n Ok(add_devs)\n }\n\n try!(validate_mda_size(mda_size));\n\n let dev_infos = devices.into_iter().map(|d: Device| (d, dev_info(&d)));\n\n let add_devs = try!(filter_devs(dev_infos, pool_uuid, force));\n\n \/\/ TODO: Fix this code. We should deal with any number of blockdevs\n \/\/\n if add_devs.len() < 2 {\n return Err(EngineError::Engine(ErrorEnum::Error,\n \"Need at least 2 blockdevs to create a pool\".into()));\n }\n\n let mut bds = Vec::new();\n for (dev, (devnode, dev_size, mut f)) in add_devs {\n\n let bda = try!(BDA::initialize(&mut f,\n pool_uuid,\n &Uuid::new_v4(),\n mda_size,\n dev_size.sectors()));\n\n let bd = BlockDev {\n dev: dev,\n devnode: devnode.clone(),\n bda: bda,\n };\n bds.push(bd);\n }\n Ok(bds)\n}\n\n\n#[derive(Debug)]\npub struct BlockDev {\n dev: Device,\n pub devnode: PathBuf,\n bda: BDA,\n}\n\nimpl BlockDev {\n pub fn to_save(&self) -> BlockDevSave {\n BlockDevSave {\n devnode: self.devnode.clone(),\n total_size: self.size(),\n }\n }\n\n pub fn wipe_metadata(self) -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(&self.devnode));\n BDA::wipe(&mut f)\n }\n\n \/\/\/ Get the \"x:y\" device string for this blockdev\n pub fn dstr(&self) -> String {\n self.dev.dstr()\n }\n\n pub fn save_state(&mut self, time: &Timespec, metadata: &[u8]) -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(&self.devnode));\n self.bda.save_state(time, metadata, &mut f)\n }\n\n pub fn load_state(&self) -> EngineResult<Option<Vec<u8>>> {\n let mut f = try!(OpenOptions::new().read(true).open(&self.devnode));\n self.bda.load_state(&mut f)\n }\n\n \/\/\/ List the available-for-upper-layer-use range in this blockdev.\n pub fn avail_range(&self) -> (Sectors, Sectors) {\n let start = self.bda.size();\n let size = self.size();\n \/\/ Blockdev size is at least MIN_DEV_SIZE, so this can fail only if\n \/\/ size of metadata area exceeds 1 GiB. Initial metadata area size\n \/\/ is 4 MiB.\n assert!(start <= size);\n (start, size - start)\n }\n\n \/\/\/ The \/dev\/mapper\/<name> device is not immediately available for use.\n \/\/\/ TODO: Implement wait for event or poll.\n pub fn wait_for_dm() {\n thread::sleep(Duration::from_millis(500))\n }\n\n \/\/\/ The device's UUID.\n pub fn uuid(&self) -> &DevUuid {\n self.bda.dev_uuid()\n }\n\n \/\/\/ The device's pool's UUID.\n pub fn pool_uuid(&self) -> &PoolUuid {\n self.bda.pool_uuid()\n }\n\n \/\/\/ The device's size.\n pub fn size(&self) -> Sectors {\n self.bda.dev_size()\n }\n\n \/\/\/ Last time metadata was written to this device.\n pub fn last_update_time(&self) -> Option<&Timespec> {\n self.bda.last_update_time()\n }\n}\n<commit_msg>Add function to get the available range as a segment<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::io;\nuse std::collections::{HashMap, HashSet};\nuse std::fs::File;\nuse std::io::ErrorKind;\nuse std::io::{Seek, Write, SeekFrom};\nuse std::fs::{OpenOptions, read_dir};\nuse std::os::unix::prelude::AsRawFd;\nuse std::path::{Path, PathBuf};\nuse std::str::FromStr;\nuse std::thread;\nuse std::time::Duration;\n\nuse devicemapper::consts::SECTOR_SIZE;\nuse devicemapper::Device;\nuse devicemapper::segment::Segment;\nuse devicemapper::types::{Bytes, Sectors};\nuse time::Timespec;\nuse uuid::Uuid;\n\nuse consts::IEC;\nuse engine::{DevUuid, EngineResult, EngineError, ErrorEnum, PoolUuid};\nuse super::metadata::{StaticHeader, BDA, validate_mda_size};\nuse super::engine::DevOwnership;\npub use super::BlockDevSave;\n\nconst MIN_DEV_SIZE: Bytes = Bytes(IEC::Gi as u64);\n\nioctl!(read blkgetsize64 with 0x12, 114; u64);\n\npub fn blkdev_size(file: &File) -> EngineResult<Bytes> {\n let mut val: u64 = 0;\n\n match unsafe { blkgetsize64(file.as_raw_fd(), &mut val) } {\n Err(x) => Err(EngineError::Nix(x)),\n Ok(_) => Ok(Bytes(val)),\n }\n}\n\n\/\/\/ Resolve a list of Paths of some sort to a set of unique Devices.\n\/\/\/ Return an IOError if there was a problem resolving any particular device.\npub fn resolve_devices(paths: &[&Path]) -> io::Result<HashSet<Device>> {\n let mut devices = HashSet::new();\n for path in paths {\n let dev = try!(Device::from_str(&path.to_string_lossy()));\n devices.insert(dev);\n }\n Ok(devices)\n}\n\n\/\/\/ Find all Stratis Blockdevs.\n\/\/\/\n\/\/\/ Returns a map of pool uuids to maps of blockdev uuids to blockdevs.\npub fn find_all() -> EngineResult<HashMap<PoolUuid, HashMap<DevUuid, BlockDev>>> {\n\n \/\/\/ If a Path refers to a valid Stratis blockdev, return a BlockDev\n \/\/\/ struct. Otherwise, return None. Return an error if there was\n \/\/\/ a problem inspecting the device.\n fn setup(devnode: &Path) -> EngineResult<Option<BlockDev>> {\n let mut f = try!(OpenOptions::new()\n .read(true)\n .open(devnode));\n\n if let Some(bda) = BDA::load(&mut f).ok() {\n let dev = try!(Device::from_str(&devnode.to_string_lossy()));\n Ok(Some(BlockDev {\n dev: dev,\n devnode: devnode.to_owned(),\n bda: bda,\n }))\n } else {\n Ok(None)\n }\n }\n\n let mut pool_map = HashMap::new();\n for dir_e in try!(read_dir(\"\/dev\")) {\n let devnode = match dir_e {\n Ok(d) => d.path(),\n Err(_) => continue,\n };\n\n match setup(&devnode) {\n Ok(Some(blockdev)) => {\n pool_map.entry(blockdev.pool_uuid().clone())\n .or_insert_with(HashMap::new)\n .insert(blockdev.uuid().clone(), blockdev);\n }\n _ => continue,\n };\n }\n\n Ok(pool_map)\n}\n\n\/\/\/ Write buf at offset length times.\npub fn write_sectors(path: &Path,\n offset: Sectors,\n length: Sectors,\n buf: &[u8; SECTOR_SIZE])\n -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(path));\n\n try!(f.seek(SeekFrom::Start(*offset)));\n for _ in 0..*length {\n try!(f.write_all(buf));\n }\n\n try!(f.flush());\n Ok(())\n}\n\n\/\/\/ Zero sectors at the given offset for length sectors.\npub fn wipe_sectors(path: &Path, offset: Sectors, length: Sectors) -> EngineResult<()> {\n write_sectors(path, offset, length, &[0u8; SECTOR_SIZE])\n}\n\n\/\/\/ Initialize multiple blockdevs at once. This allows all of them\n\/\/\/ to be checked for usability before writing to any of them.\npub fn initialize(pool_uuid: &PoolUuid,\n devices: HashSet<Device>,\n mda_size: Sectors,\n force: bool)\n -> EngineResult<Vec<BlockDev>> {\n \/\/\/ Gets device information, returns an error if problem with obtaining\n \/\/\/ that information.\n \/\/\/ Returns a tuple with the blockdev's path, its size in bytes,\n \/\/\/ its ownership as determined by calling determine_ownership(),\n \/\/\/ and an open File handle, all of which are needed later.\n fn dev_info(dev: &Device) -> EngineResult<(PathBuf, Bytes, DevOwnership, File)> {\n let devnode = try!(dev.path().ok_or_else(|| {\n io::Error::new(ErrorKind::InvalidInput,\n format!(\"could not get device node from dev {}\", dev.dstr()))\n }));\n let mut f = try!(OpenOptions::new()\n .read(true)\n .write(true)\n .open(&devnode)\n .map_err(|_| {\n io::Error::new(ErrorKind::PermissionDenied,\n format!(\"Could not open {}\", devnode.display()))\n }));\n\n let dev_size = try!(blkdev_size(&f));\n\n let ownership = match StaticHeader::determine_ownership(&mut f) {\n Ok(ownership) => ownership,\n Err(err) => {\n let error_message = format!(\"{} for device {}\", err, devnode.display());\n return Err(EngineError::Engine(ErrorEnum::Invalid, error_message));\n }\n };\n\n Ok((devnode, dev_size, ownership, f))\n }\n\n \/\/\/ Filter devices for admission to pool based on dev_infos.\n \/\/\/ If there is an error finding out the info, return that error.\n \/\/\/ Also, return an error if a device is not appropriate for this pool.\n fn filter_devs<I>(dev_infos: I,\n pool_uuid: &PoolUuid,\n force: bool)\n -> EngineResult<Vec<(Device, (PathBuf, Bytes, File))>>\n where I: Iterator<Item = (Device, EngineResult<(PathBuf, Bytes, DevOwnership, File)>)>\n {\n let mut add_devs = Vec::new();\n for (dev, dev_result) in dev_infos {\n let (devnode, dev_size, ownership, f) = try!(dev_result);\n if dev_size < MIN_DEV_SIZE {\n let error_message = format!(\"{} too small, minimum {} bytes\",\n devnode.display(),\n MIN_DEV_SIZE);\n return Err(EngineError::Engine(ErrorEnum::Invalid, error_message));\n };\n match ownership {\n DevOwnership::Unowned => add_devs.push((dev, (devnode, dev_size, f))),\n DevOwnership::Theirs => {\n if !force {\n let error_str = format!(\"First 8K of {} not zeroed\", devnode.display());\n return Err(EngineError::Engine(ErrorEnum::Invalid, error_str));\n } else {\n add_devs.push((dev, (devnode, dev_size, f)))\n }\n }\n DevOwnership::Ours(uuid) => {\n if *pool_uuid != uuid {\n let error_str = format!(\"Device {} already belongs to Stratis pool {}\",\n devnode.display(),\n uuid);\n return Err(EngineError::Engine(ErrorEnum::Invalid, error_str));\n }\n }\n }\n }\n Ok(add_devs)\n }\n\n try!(validate_mda_size(mda_size));\n\n let dev_infos = devices.into_iter().map(|d: Device| (d, dev_info(&d)));\n\n let add_devs = try!(filter_devs(dev_infos, pool_uuid, force));\n\n \/\/ TODO: Fix this code. We should deal with any number of blockdevs\n \/\/\n if add_devs.len() < 2 {\n return Err(EngineError::Engine(ErrorEnum::Error,\n \"Need at least 2 blockdevs to create a pool\".into()));\n }\n\n let mut bds = Vec::new();\n for (dev, (devnode, dev_size, mut f)) in add_devs {\n\n let bda = try!(BDA::initialize(&mut f,\n pool_uuid,\n &Uuid::new_v4(),\n mda_size,\n dev_size.sectors()));\n\n let bd = BlockDev {\n dev: dev,\n devnode: devnode.clone(),\n bda: bda,\n };\n bds.push(bd);\n }\n Ok(bds)\n}\n\n\n#[derive(Debug)]\npub struct BlockDev {\n dev: Device,\n pub devnode: PathBuf,\n bda: BDA,\n}\n\nimpl BlockDev {\n pub fn to_save(&self) -> BlockDevSave {\n BlockDevSave {\n devnode: self.devnode.clone(),\n total_size: self.size(),\n }\n }\n\n pub fn wipe_metadata(self) -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(&self.devnode));\n BDA::wipe(&mut f)\n }\n\n \/\/\/ Get the \"x:y\" device string for this blockdev\n pub fn dstr(&self) -> String {\n self.dev.dstr()\n }\n\n pub fn save_state(&mut self, time: &Timespec, metadata: &[u8]) -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(&self.devnode));\n self.bda.save_state(time, metadata, &mut f)\n }\n\n pub fn load_state(&self) -> EngineResult<Option<Vec<u8>>> {\n let mut f = try!(OpenOptions::new().read(true).open(&self.devnode));\n self.bda.load_state(&mut f)\n }\n\n \/\/\/ List the available-for-upper-layer-use range in this blockdev.\n fn avail_range(&self) -> (Sectors, Sectors) {\n let start = self.bda.size();\n let size = self.size();\n \/\/ Blockdev size is at least MIN_DEV_SIZE, so this can fail only if\n \/\/ size of metadata area exceeds 1 GiB. Initial metadata area size\n \/\/ is 4 MiB.\n assert!(start <= size);\n (start, size - start)\n }\n\n \/\/\/ Return the available range as a segment\n pub fn avail_range_segment(&self) -> Segment {\n let (start, length) = self.avail_range();\n Segment::new(self.dev, start, length)\n }\n\n \/\/\/ The \/dev\/mapper\/<name> device is not immediately available for use.\n \/\/\/ TODO: Implement wait for event or poll.\n pub fn wait_for_dm() {\n thread::sleep(Duration::from_millis(500))\n }\n\n \/\/\/ The device's UUID.\n pub fn uuid(&self) -> &DevUuid {\n self.bda.dev_uuid()\n }\n\n \/\/\/ The device's pool's UUID.\n pub fn pool_uuid(&self) -> &PoolUuid {\n self.bda.pool_uuid()\n }\n\n \/\/\/ The device's size.\n pub fn size(&self) -> Sectors {\n self.bda.dev_size()\n }\n\n \/\/\/ Last time metadata was written to this device.\n pub fn last_update_time(&self) -> Option<&Timespec> {\n self.bda.last_update_time()\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\n\n\/\/ nvp implementation version\npub const NV_VERSION: i32 = 0;\n\n\/\/ nvlist header\n\/\/#[derive(Debug)]\npub struct NvList {\n pub version: i32,\n pub nvflag: u32, \/\/ persistent flags\n pub pairs: Vec<(String, NvValue)>,\n}\n\nimpl NvList {\n pub fn new(nvflag: u32) -> Self {\n NvList {\n version: NV_VERSION,\n nvflag: nvflag,\n pairs: Vec::new(),\n }\n }\n\n pub fn find(&self, name: &str) -> Option<&NvValue> {\n for pair in &self.pairs {\n if pair.0.as_str() == name {\n return Some(&pair.1);\n }\n }\n None\n }\n}\n\nimpl fmt::Debug for NvList {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(f, \"NvList {{ version: {:X}, nvflag: {:X}, pairs: [\\n\", self.version, self.nvflag));\n for &(ref name, ref value) in &self.pairs {\n try!(write!(f, \"{} : {:?}\\n\", name, value));\n }\n try!(write!(f, \"] }}\\n\"));\n Ok(())\n }\n}\n\n\/\/ TODO Auto implement Debug. format! currently crashes with big u32 values\n\/\/#[derive(Debug)]\npub enum NvValue {\n Unknown,\n Boolean,\n Byte(u8),\n Int16(i16),\n Uint16(u16),\n Int32(i32),\n Uint32(u32),\n Int64(i64),\n Uint64(u64),\n String(String),\n ByteArray(Vec<u8>),\n Int16Array(Vec<i16>),\n Uint16Array(Vec<u16>),\n Int32Array(Vec<i32>),\n Uint32Array(Vec<u32>),\n Int64Array(Vec<i64>),\n Uint64Array(Vec<u64>),\n StringArray(Vec<String>),\n HrTime(i64),\n NvList(Box<NvList>),\n NvListArray(Vec<Box<NvList>>),\n BooleanValue(bool),\n Int8(i8),\n Uint8(u8),\n BooleanArray(Vec<bool>),\n Int8Array(Vec<i8>),\n Uint8Array(Vec<u8>),\n}\n\nimpl NvValue {\n pub fn data_type(&self) -> DataType {\n match *self {\n NvValue::Unknown => DataType::Unknown,\n NvValue::Boolean => DataType::Boolean,\n NvValue::Byte(_) => DataType::Byte,\n NvValue::Int16(_) => DataType::Int16,\n NvValue::Uint16(_) => DataType::Uint16,\n NvValue::Int32(_) => DataType::Int32,\n NvValue::Uint32(_) => DataType::Uint32,\n NvValue::Int64(_) => DataType::Int64,\n NvValue::Uint64(_) => DataType::Uint64,\n NvValue::String(_) => DataType::String,\n NvValue::ByteArray(_) => DataType::ByteArray,\n NvValue::Int16Array(_) => DataType::Int16Array,\n NvValue::Uint16Array(_) => DataType::Uint16Array,\n NvValue::Int32Array(_) => DataType::Int32Array,\n NvValue::Uint32Array(_) => DataType::Uint32Array,\n NvValue::Int64Array(_) => DataType::Int64Array,\n NvValue::Uint64Array(_) => DataType::Uint64Array,\n NvValue::StringArray(_) => DataType::StringArray,\n NvValue::HrTime(_) => DataType::HrTime,\n NvValue::NvList(_) => DataType::NvList,\n NvValue::NvListArray(_) => DataType::NvListArray,\n NvValue::BooleanValue(_) => DataType::BooleanValue,\n NvValue::Int8(_) => DataType::Int8,\n NvValue::Uint8(_) => DataType::Uint8,\n NvValue::BooleanArray(_) => DataType::BooleanArray,\n NvValue::Int8Array(_) => DataType::Int8Array,\n NvValue::Uint8Array(_) => DataType::Uint8Array,\n }\n }\n\n pub fn num_elements(&self) -> usize {\n match *self {\n NvValue::Unknown => 1,\n NvValue::Boolean => 1,\n NvValue::Byte(_) => 1,\n NvValue::Int16(_) => 1,\n NvValue::Uint16(_) => 1,\n NvValue::Int32(_) => 1,\n NvValue::Uint32(_) => 1,\n NvValue::Int64(_) => 1,\n NvValue::Uint64(_) => 1,\n NvValue::String(_) => 1,\n NvValue::ByteArray(ref a) => a.len(),\n NvValue::Int16Array(ref a) => a.len(),\n NvValue::Uint16Array(ref a) => a.len(),\n NvValue::Int32Array(ref a) => a.len(),\n NvValue::Uint32Array(ref a) => a.len(),\n NvValue::Int64Array(ref a) => a.len(),\n NvValue::Uint64Array(ref a) => a.len(),\n NvValue::StringArray(ref a) => a.len(),\n NvValue::HrTime(_) => 1,\n NvValue::NvList(_) => 1,\n NvValue::NvListArray(ref a) => a.len(),\n NvValue::BooleanValue(_) => 1,\n NvValue::Int8(_) => 1,\n NvValue::Uint8(_) => 1,\n NvValue::BooleanArray(ref a) => a.len(),\n NvValue::Int8Array(ref a) => a.len(),\n NvValue::Uint8Array(ref a) => a.len(),\n }\n }\n}\n\nimpl fmt::Debug for NvValue {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n NvValue::Int64(v) => write!(f, \"Int64(0x{:X})\", v),\n NvValue::Uint64(v) => write!(f, \"Uint64(0x{:X})\", v),\n NvValue::NvList(ref v) => write!(f, \"NvList({:?})\", **v),\n NvValue::NvListArray(ref v) => {\n try!(write!(f, \"NvListArray([\"));\n for nv_list in v {\n try!(write!(f, \"NvList({:?})\", **nv_list));\n }\n write!(f, \"])\")\n },\n NvValue::String(ref v) => { write!(f, \"String({})\", v) },\n _ => write!(f, \"{:?}\", self),\n }\n }\n}\n\n#[derive(Copy, Clone, Debug)]\npub enum DataType {\n Unknown = 0,\n Boolean,\n Byte,\n Int16,\n Uint16,\n Int32,\n Uint32,\n Int64,\n Uint64,\n String,\n ByteArray,\n Int16Array,\n Uint16Array,\n Int32Array,\n Uint32Array,\n Int64Array,\n Uint64Array,\n StringArray,\n HrTime,\n NvList,\n NvListArray,\n BooleanValue,\n Int8,\n Uint8,\n BooleanArray,\n Int8Array,\n Uint8Array,\n}\n\nimpl DataType {\n pub fn from_u8(u: u8) -> Option<DataType> {\n match u {\n 0 => Some(DataType::Unknown),\n 1 => Some(DataType::Boolean),\n 2 => Some(DataType::Byte),\n 3 => Some(DataType::Int16),\n 4 => Some(DataType::Uint16),\n 5 => Some(DataType::Int32),\n 6 => Some(DataType::Uint32),\n 7 => Some(DataType::Int64),\n 8 => Some(DataType::Uint64),\n 9 => Some(DataType::String),\n 10 => Some(DataType::ByteArray),\n 11 => Some(DataType::Int16Array),\n 12 => Some(DataType::Uint16Array),\n 13 => Some(DataType::Int32Array),\n 14 => Some(DataType::Uint32Array),\n 15 => Some(DataType::Int64Array),\n 16 => Some(DataType::Uint64Array),\n 17 => Some(DataType::StringArray),\n 18 => Some(DataType::HrTime),\n 19 => Some(DataType::NvList),\n 20 => Some(DataType::NvListArray),\n 21 => Some(DataType::BooleanValue),\n 22 => Some(DataType::Int8),\n 23 => Some(DataType::Uint8),\n 24 => Some(DataType::BooleanArray),\n 25 => Some(DataType::Int8Array),\n 26 => Some(DataType::Uint8Array),\n _ => None,\n }\n }\n\n pub fn to_u8(self) -> u8 {\n match self {\n DataType::Unknown => 0,\n DataType::Boolean => 1,\n DataType::Byte => 2,\n DataType::Int16 => 3,\n DataType::Uint16 => 4,\n DataType::Int32 => 5,\n DataType::Uint32 => 6,\n DataType::Int64 => 7,\n DataType::Uint64 => 8,\n DataType::String => 9,\n DataType::ByteArray => 10,\n DataType::Int16Array => 11,\n DataType::Uint16Array => 12,\n DataType::Int32Array => 13,\n DataType::Uint32Array => 14,\n DataType::Int64Array => 15,\n DataType::Uint64Array => 16,\n DataType::StringArray => 17,\n DataType::HrTime => 18,\n DataType::NvList => 19,\n DataType::NvListArray => 20,\n DataType::BooleanValue => 21,\n DataType::Int8 => 22,\n DataType::Uint8 => 23,\n DataType::BooleanArray => 24,\n DataType::Int8Array => 25,\n DataType::Uint8Array => 26,\n }\n }\n}\n<commit_msg>NvList::find_mut<commit_after>use redox::*;\n\n\/\/ nvp implementation version\npub const NV_VERSION: i32 = 0;\n\n\/\/ nvlist header\n\/\/#[derive(Debug)]\npub struct NvList {\n pub version: i32,\n pub nvflag: u32, \/\/ persistent flags\n pub pairs: Vec<(String, NvValue)>,\n}\n\nimpl NvList {\n pub fn new(nvflag: u32) -> Self {\n NvList {\n version: NV_VERSION,\n nvflag: nvflag,\n pairs: Vec::new(),\n }\n }\n\n pub fn find(&self, name: &str) -> Option<&NvValue> {\n for pair in &self.pairs {\n if pair.0.as_str() == name {\n return Some(&pair.1);\n }\n }\n None\n }\n\n pub fn find_mut(&mut self, name: &str) -> Option<&mut NvValue> {\n for pair in &mut self.pairs {\n if pair.0.as_str() == name {\n return Some(&mut pair.1);\n }\n }\n None\n }\n}\n\nimpl fmt::Debug for NvList {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n try!(write!(f, \"NvList {{ version: {:X}, nvflag: {:X}, pairs: [\\n\", self.version, self.nvflag));\n for &(ref name, ref value) in &self.pairs {\n try!(write!(f, \"{} : {:?}\\n\", name, value));\n }\n try!(write!(f, \"] }}\\n\"));\n Ok(())\n }\n}\n\n\/\/ TODO Auto implement Debug. format! currently crashes with big u32 values\n\/\/#[derive(Debug)]\npub enum NvValue {\n Unknown,\n Boolean,\n Byte(u8),\n Int16(i16),\n Uint16(u16),\n Int32(i32),\n Uint32(u32),\n Int64(i64),\n Uint64(u64),\n String(String),\n ByteArray(Vec<u8>),\n Int16Array(Vec<i16>),\n Uint16Array(Vec<u16>),\n Int32Array(Vec<i32>),\n Uint32Array(Vec<u32>),\n Int64Array(Vec<i64>),\n Uint64Array(Vec<u64>),\n StringArray(Vec<String>),\n HrTime(i64),\n NvList(Box<NvList>),\n NvListArray(Vec<Box<NvList>>),\n BooleanValue(bool),\n Int8(i8),\n Uint8(u8),\n BooleanArray(Vec<bool>),\n Int8Array(Vec<i8>),\n Uint8Array(Vec<u8>),\n}\n\nimpl NvValue {\n pub fn data_type(&self) -> DataType {\n match *self {\n NvValue::Unknown => DataType::Unknown,\n NvValue::Boolean => DataType::Boolean,\n NvValue::Byte(_) => DataType::Byte,\n NvValue::Int16(_) => DataType::Int16,\n NvValue::Uint16(_) => DataType::Uint16,\n NvValue::Int32(_) => DataType::Int32,\n NvValue::Uint32(_) => DataType::Uint32,\n NvValue::Int64(_) => DataType::Int64,\n NvValue::Uint64(_) => DataType::Uint64,\n NvValue::String(_) => DataType::String,\n NvValue::ByteArray(_) => DataType::ByteArray,\n NvValue::Int16Array(_) => DataType::Int16Array,\n NvValue::Uint16Array(_) => DataType::Uint16Array,\n NvValue::Int32Array(_) => DataType::Int32Array,\n NvValue::Uint32Array(_) => DataType::Uint32Array,\n NvValue::Int64Array(_) => DataType::Int64Array,\n NvValue::Uint64Array(_) => DataType::Uint64Array,\n NvValue::StringArray(_) => DataType::StringArray,\n NvValue::HrTime(_) => DataType::HrTime,\n NvValue::NvList(_) => DataType::NvList,\n NvValue::NvListArray(_) => DataType::NvListArray,\n NvValue::BooleanValue(_) => DataType::BooleanValue,\n NvValue::Int8(_) => DataType::Int8,\n NvValue::Uint8(_) => DataType::Uint8,\n NvValue::BooleanArray(_) => DataType::BooleanArray,\n NvValue::Int8Array(_) => DataType::Int8Array,\n NvValue::Uint8Array(_) => DataType::Uint8Array,\n }\n }\n\n pub fn num_elements(&self) -> usize {\n match *self {\n NvValue::Unknown => 1,\n NvValue::Boolean => 1,\n NvValue::Byte(_) => 1,\n NvValue::Int16(_) => 1,\n NvValue::Uint16(_) => 1,\n NvValue::Int32(_) => 1,\n NvValue::Uint32(_) => 1,\n NvValue::Int64(_) => 1,\n NvValue::Uint64(_) => 1,\n NvValue::String(_) => 1,\n NvValue::ByteArray(ref a) => a.len(),\n NvValue::Int16Array(ref a) => a.len(),\n NvValue::Uint16Array(ref a) => a.len(),\n NvValue::Int32Array(ref a) => a.len(),\n NvValue::Uint32Array(ref a) => a.len(),\n NvValue::Int64Array(ref a) => a.len(),\n NvValue::Uint64Array(ref a) => a.len(),\n NvValue::StringArray(ref a) => a.len(),\n NvValue::HrTime(_) => 1,\n NvValue::NvList(_) => 1,\n NvValue::NvListArray(ref a) => a.len(),\n NvValue::BooleanValue(_) => 1,\n NvValue::Int8(_) => 1,\n NvValue::Uint8(_) => 1,\n NvValue::BooleanArray(ref a) => a.len(),\n NvValue::Int8Array(ref a) => a.len(),\n NvValue::Uint8Array(ref a) => a.len(),\n }\n }\n}\n\nimpl fmt::Debug for NvValue {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n NvValue::Int64(v) => write!(f, \"Int64(0x{:X})\", v),\n NvValue::Uint64(v) => write!(f, \"Uint64(0x{:X})\", v),\n NvValue::NvList(ref v) => write!(f, \"NvList({:?})\", **v),\n NvValue::NvListArray(ref v) => {\n try!(write!(f, \"NvListArray([\"));\n for nv_list in v {\n try!(write!(f, \"NvList({:?})\", **nv_list));\n }\n write!(f, \"])\")\n },\n NvValue::String(ref v) => { write!(f, \"String({})\", v) },\n _ => write!(f, \"{:?}\", self),\n }\n }\n}\n\n#[derive(Copy, Clone, Debug)]\npub enum DataType {\n Unknown = 0,\n Boolean,\n Byte,\n Int16,\n Uint16,\n Int32,\n Uint32,\n Int64,\n Uint64,\n String,\n ByteArray,\n Int16Array,\n Uint16Array,\n Int32Array,\n Uint32Array,\n Int64Array,\n Uint64Array,\n StringArray,\n HrTime,\n NvList,\n NvListArray,\n BooleanValue,\n Int8,\n Uint8,\n BooleanArray,\n Int8Array,\n Uint8Array,\n}\n\nimpl DataType {\n pub fn from_u8(u: u8) -> Option<DataType> {\n match u {\n 0 => Some(DataType::Unknown),\n 1 => Some(DataType::Boolean),\n 2 => Some(DataType::Byte),\n 3 => Some(DataType::Int16),\n 4 => Some(DataType::Uint16),\n 5 => Some(DataType::Int32),\n 6 => Some(DataType::Uint32),\n 7 => Some(DataType::Int64),\n 8 => Some(DataType::Uint64),\n 9 => Some(DataType::String),\n 10 => Some(DataType::ByteArray),\n 11 => Some(DataType::Int16Array),\n 12 => Some(DataType::Uint16Array),\n 13 => Some(DataType::Int32Array),\n 14 => Some(DataType::Uint32Array),\n 15 => Some(DataType::Int64Array),\n 16 => Some(DataType::Uint64Array),\n 17 => Some(DataType::StringArray),\n 18 => Some(DataType::HrTime),\n 19 => Some(DataType::NvList),\n 20 => Some(DataType::NvListArray),\n 21 => Some(DataType::BooleanValue),\n 22 => Some(DataType::Int8),\n 23 => Some(DataType::Uint8),\n 24 => Some(DataType::BooleanArray),\n 25 => Some(DataType::Int8Array),\n 26 => Some(DataType::Uint8Array),\n _ => None,\n }\n }\n\n pub fn to_u8(self) -> u8 {\n match self {\n DataType::Unknown => 0,\n DataType::Boolean => 1,\n DataType::Byte => 2,\n DataType::Int16 => 3,\n DataType::Uint16 => 4,\n DataType::Int32 => 5,\n DataType::Uint32 => 6,\n DataType::Int64 => 7,\n DataType::Uint64 => 8,\n DataType::String => 9,\n DataType::ByteArray => 10,\n DataType::Int16Array => 11,\n DataType::Uint16Array => 12,\n DataType::Int32Array => 13,\n DataType::Uint32Array => 14,\n DataType::Int64Array => 15,\n DataType::Uint64Array => 16,\n DataType::StringArray => 17,\n DataType::HrTime => 18,\n DataType::NvList => 19,\n DataType::NvListArray => 20,\n DataType::BooleanValue => 21,\n DataType::Int8 => 22,\n DataType::Uint8 => 23,\n DataType::BooleanArray => 24,\n DataType::Int8Array => 25,\n DataType::Uint8Array => 26,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Ignore Broken Pipe errors when writing list<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement \"Strip comments from a string\".<commit_after>\/\/ http:\/\/rosettacode.org\/wiki\/Strip_comments_from_a_string\n\nfn strip_comments(str: &str) -> &str {\n let markers = ['#', ';'];\n str.find(markers.as_slice()).map_or(str, |i| str.slice_to(i)).trim_right()\n}\n\n#[test]\nfn test_strip_comments() {\n let inputs = [\"apples, pears # and bananas\",\n \"apples, pears ; and bananas\",\n \"apples, pears \"];\n let output = \"apples, pears\";\n\n for &input in inputs.iter() {\n assert_eq!(strip_comments(input), output)\n }\n}\n\n#[cfg(not(test))]\nfn main() {\n let inputs = [\"apples, pears # and bananas\",\n \"apples, pears ; and bananas\",\n \"apples, pears \"];\n\n for &input in inputs.iter() {\n println!(\"Input: {}\\nStripped: {}\", input, strip_comments(input))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>testsuite: Add compile-fail test for #5883<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::io;\n\nstruct Struct {\n r: io::Reader \/\/~ ERROR reference to trait `io::Reader` where a type is expected\n}\n\nfn new_struct(r: io::Reader) -> Struct { \/\/~ ERROR reference to trait `io::Reader` where a type is expected\n Struct { r: r }\n}\n\ntrait Curve {}\nenum E {X(Curve)} \/\/~ ERROR reference to trait `Curve` where a type is expected\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a run-pass test for linked failure<commit_after>\/\/ -*- rust -*-\n\n\/\/ error-pattern:1 == 2\nuse std;\nimport std::task;\nimport std::comm::port;\nimport std::comm::recv;\n\nfn child() { assert (1 == 2); }\n\nfn parent() {\n \/\/ Since this task isn't supervised it won't bring down the whole\n \/\/ process\n task::unsupervise();\n let p = port::<int>();\n let f = child;\n task::spawn(f);\n let x = recv(p);\n}\n\nfn main() {\n let f = parent;\n task::spawn(f);\n}<|endoftext|>"} {"text":"<commit_before>use audio::ac97::AC97;\nuse audio::intelhda::IntelHDA;\n\nuse core::cell::UnsafeCell;\n\nuse common::debug;\n\nuse disk::ahci::Ahci;\nuse disk::ide::Ide;\n\nuse env::Environment;\n\nuse network::intel8254x::Intel8254x;\nuse network::rtl8139::Rtl8139;\n\nuse schemes::file::FileScheme;\n\nuse usb::ehci::Ehci;\nuse usb::ohci::Ohci;\nuse usb::uhci::Uhci;\nuse usb::xhci::Xhci;\n\nuse super::config::PciConfig;\nuse super::common::class::*;\nuse super::common::subclass::*;\nuse super::common::programming_interface::*;\nuse super::common::vendorid::*;\nuse super::common::deviceid::*;\n\n\/\/\/ PCI device\npub unsafe fn pci_device(env: &mut Environment,\n mut pci: PciConfig,\n class_id: u8,\n subclass_id: u8,\n interface_id: u8,\n vendor_code: u16,\n device_code: u16) {\n if class_id == MASS_STORAGE {\n if subclass_id == IDE {\n if let Some(module) = FileScheme::new(Ide::disks(pci)) {\n env.schemes.push(UnsafeCell::new(module));\n }\n } else if subclass_id == SATA && interface_id == AHCI {\n if let Some(module) = FileScheme::new(Ahci::disks(pci)) {\n env.schemes.push(UnsafeCell::new(module));\n }\n }\n } else if class_id == SERIAL_BUS && subclass_id == USB {\n if interface_id == XHCI {\n let base = pci.read(0x10) as usize;\n\n let mut module = box Xhci {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n env.schemes.push(UnsafeCell::new(module));\n } else if interface_id == EHCI {\n env.schemes.push(UnsafeCell::new(Ehci::new(pci)));\n } else if interface_id == OHCI {\n env.schemes.push(UnsafeCell::new(Ohci::new(pci)));\n } else if interface_id == UHCI {\n env.schemes.push(UnsafeCell::new(Uhci::new(pci)));\n } else {\n debug!(\"Unknown USB interface version {:X}\\n\", interface_id);\n }\n } else {\n match (vendor_code, device_code) {\n (REALTEK, RTL8139) => env.schemes.push(UnsafeCell::new(Rtl8139::new(pci))),\n (INTEL, GBE_82540EM) => env.schemes.push(UnsafeCell::new(Intel8254x::new(pci))),\n (INTEL, AC97_82801AA) => env.schemes.push(UnsafeCell::new(AC97::new(pci))),\n (INTEL, AC97_ICH4) => env.schemes.push(UnsafeCell::new(AC97::new(pci))),\n (INTEL, INTELHDA_ICH6) => {\n let base = pci.read(0x10) as usize;\n let mut module = box IntelHDA {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n env.schemes.push(UnsafeCell::new(module));\n }\n _ => (),\n }\n }\n}\n\n\/\/\/ Initialize PCI session\npub unsafe fn pci_init(env: &mut Environment) {\n for bus in 0..256 {\n for slot in 0..32 {\n for func in 0..8 {\n let mut pci = PciConfig::new(bus as u8, slot as u8, func as u8);\n let id = pci.read(0);\n\n if (id & 0xFFFF) != 0xFFFF {\n let class_id = pci.read(8);\n\n debug!(\" * PCI {}, {}, {}: ID {:X} CL {:X}\",\n bus,\n slot,\n func,\n id,\n class_id);\n\n for i in 0..6 {\n let bar = pci.read(i * 4 + 0x10);\n if bar > 0 {\n debug!(\" BAR{}: {:X}\", i, bar);\n\n pci.write(i * 4 + 0x10, 0xFFFFFFFF);\n let size = (0xFFFFFFFF - (pci.read(i * 4 + 0x10) & 0xFFFFFFF0)) + 1;\n pci.write(i * 4 + 0x10, bar);\n\n if size > 0 {\n debug!(\" {}\", size);\n }\n }\n }\n\n debug::dl();\n\n pci_device(env,\n pci,\n ((class_id >> 24) & 0xFF) as u8,\n ((class_id >> 16) & 0xFF) as u8,\n ((class_id >> 8) & 0xFF) as u8,\n (id & 0xFFFF) as u16,\n ((id >> 16) & 0xFFFF) as u16);\n }\n }\n }\n }\n}\n<commit_msg>[PCI binding] Use match instead of if-else sequense<commit_after>use audio::ac97::AC97;\nuse audio::intelhda::IntelHDA;\n\nuse core::cell::UnsafeCell;\n\nuse common::debug;\n\nuse disk::ahci::Ahci;\nuse disk::ide::Ide;\n\nuse env::Environment;\n\nuse network::intel8254x::Intel8254x;\nuse network::rtl8139::Rtl8139;\n\nuse schemes::file::FileScheme;\n\nuse usb::ehci::Ehci;\nuse usb::ohci::Ohci;\nuse usb::uhci::Uhci;\nuse usb::xhci::Xhci;\n\nuse super::config::PciConfig;\nuse super::common::class::*;\nuse super::common::subclass::*;\nuse super::common::programming_interface::*;\nuse super::common::vendorid::*;\nuse super::common::deviceid::*;\n\n\/\/\/ PCI device\npub unsafe fn pci_device(env: &mut Environment,\n mut pci: PciConfig,\n class_id: u8,\n subclass_id: u8,\n interface_id: u8,\n vendor_code: u16,\n device_code: u16) {\n match (class_id, subclass_id, interface_id) {\n (MASS_STORAGE, IDE, _) => {\n if let Some(module) = FileScheme::new(Ide::disks(pci)) {\n env.schemes.push(UnsafeCell::new(module));\n }\n }\n (MASS_STORAGE, SATA, AHCI) => {\n if let Some(module) = FileScheme::new(Ahci::disks(pci)) {\n env.schemes.push(UnsafeCell::new(module));\n }\n }\n (SERIAL_BUS, USB, UHCI) => env.schemes.push(UnsafeCell::new(Uhci::new(pci))),\n (SERIAL_BUS, USB, OHCI) => env.schemes.push(UnsafeCell::new(Ohci::new(pci))),\n (SERIAL_BUS, USB, EHCI) => env.schemes.push(UnsafeCell::new(Ehci::new(pci))),\n (SERIAL_BUS, USB, XHCI) => {\n let base = pci.read(0x10) as usize;\n let mut module = box Xhci {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n env.schemes.push(UnsafeCell::new(module));\n }\n _ => {\n match (vendor_code, device_code) {\n (REALTEK, RTL8139) => env.schemes.push(UnsafeCell::new(Rtl8139::new(pci))),\n (INTEL, GBE_82540EM) => env.schemes.push(UnsafeCell::new(Intel8254x::new(pci))),\n (INTEL, AC97_82801AA) => env.schemes.push(UnsafeCell::new(AC97::new(pci))),\n (INTEL, AC97_ICH4) => env.schemes.push(UnsafeCell::new(AC97::new(pci))),\n (INTEL, INTELHDA_ICH6) => {\n let base = pci.read(0x10) as usize;\n let mut module = box IntelHDA {\n pci: pci,\n base: base & 0xFFFFFFF0,\n memory_mapped: base & 1 == 0,\n irq: pci.read(0x3C) as u8 & 0xF,\n };\n module.init();\n env.schemes.push(UnsafeCell::new(module));\n }\n _ => (),\n }\n }\n }\n}\n\n\/\/\/ Initialize PCI session\npub unsafe fn pci_init(env: &mut Environment) {\n for bus in 0..256 {\n for slot in 0..32 {\n for func in 0..8 {\n let mut pci = PciConfig::new(bus as u8, slot as u8, func as u8);\n let id = pci.read(0);\n\n if (id & 0xFFFF) != 0xFFFF {\n let class_id = pci.read(8);\n\n debug!(\" * PCI {}, {}, {}: ID {:X} CL {:X}\",\n bus,\n slot,\n func,\n id,\n class_id);\n\n for i in 0..6 {\n let bar = pci.read(i * 4 + 0x10);\n if bar > 0 {\n debug!(\" BAR{}: {:X}\", i, bar);\n\n pci.write(i * 4 + 0x10, 0xFFFFFFFF);\n let size = (0xFFFFFFFF - (pci.read(i * 4 + 0x10) & 0xFFFFFFF0)) + 1;\n pci.write(i * 4 + 0x10, bar);\n\n if size > 0 {\n debug!(\" {}\", size);\n }\n }\n }\n\n debug::dl();\n\n pci_device(env,\n pci,\n ((class_id >> 24) & 0xFF) as u8,\n ((class_id >> 16) & 0xFF) as u8,\n ((class_id >> 8) & 0xFF) as u8,\n (id & 0xFFFF) as u16,\n ((id >> 16) & 0xFFFF) as u16);\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application<'static> = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ main: box|args: &Vec<String>| {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command<'a> {\n pub name: &'a str,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl<'a> Command<'a> {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n\n commands.push(Command {\n name: \"cat\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read: {}\", path),\n }\n } else {\n println!(\"Failed to open file: {}\", path);\n }\n }),\n });\n\n commands.push(Command {\n name: \"cd\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(path) => {\n if !change_cwd(&path) {\n println!(\"Bad path: {}\", path);\n }\n }\n None => println!(\"No path given\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"echo\",\n main: Box::new(|args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n }),\n });\n\n commands.push(Command {\n name: \"else\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"exec\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n }),\n });\n\n commands.push(Command {\n name: \"exit\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"fi\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"if\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"ls\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(dir) = read_dir(&path) {\n for entry in dir {\n println!(\"{}\", entry.path());\n }\n } else {\n println!(\"Failed to open directory: {}\", path);\n }\n }),\n });\n\n commands.push(Command {\n name: \"pwd\",\n main: Box::new(|_: &Vec<String>| {\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n println!(\"{}\", path);\n } else {\n println!(\"Could not get the path\");\n }\n } else {\n println!(\"Could not open the working directory\");\n }\n }),\n });\n\n commands.push(Command {\n name: \"read\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"run\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n }),\n });\n\n commands.push(Command {\n name: \"sleep\",\n main: Box::new(|args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n }),\n });\n\n commands.push(Command {\n name: \"send\",\n main: Box::new(|args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n }),\n });\n\n \/\/ Simple command to create a file, in the current directory\n \/\/ The file has got the name given as the first argument of the command\n \/\/ If the command have no arguments, the command don't create the file\n commands.push(Command {\n name: \"touch\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(file_name) => if File::create(file_name).is_none() {\n println!(\"Failed to create: {}\", file_name);\n },\n None => println!(\"No name provided\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"url_hex\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n }),\n });\n\n commands.push(Command {\n name: \"wget\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n }),\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + c.name);\n\n commands.push(Command {\n name: \"help\",\n main: Box::new(move |_: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n }),\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application<'a> {\n commands: Vec<Command<'a>>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl<'a> Application<'a> {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &str) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if command_string == \"$\" {\n for variable in self.variables.iter() {\n println!(\"{}={}\", variable.name, variable.value);\n }\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n } else {\n println!(\"No right hand side\");\n }\n } else {\n println!(\"No comparison operator\");\n }\n } else {\n println!(\"No left hand side\");\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n if cmd == \"read\" {\n for i in 1..args.len() {\n if let Some(arg_original) = args.get(i) {\n let arg = arg_original.trim();\n print!(\"{}=\", arg);\n if let Some(value_original) = readln!() {\n let value = value_original.trim();\n self.set_var(arg, value);\n }\n }\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].trim();\n let mut value = cmd[i + 1 .. cmd.len()].trim().to_string();\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n self.set_var(name, &value);\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n\n pub fn set_var(&mut self, name: &str, value: &str){\n if name.is_empty() {\n return;\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value.to_string();\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name.to_string(),\n value: value.to_string(),\n });\n }\n }\n\n \/\/\/ Method to return the current directory\n \/\/\/ If the current directory canno't be find, a default string (\"?\") will be returned\n pub fn get_current_directory(&mut self) -> String {\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n \/\/ Return the current path\n return path\n }\n \/\/ Return a default string if the path canno't be find\n else {\n return \"?\".to_string()\n }\n }\n else {\n return \"?\".to_string()\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"user@redox:{}# {}\", self.get_current_directory(), command);\n self.on_command(&command);\n }\n\n loop {\n for mode in self.modes.iter().rev() {\n if mode.value {\n print!(\"+ \");\n } else {\n print!(\"- \");\n }\n }\n print!(\"user@redox:{}# \", self.get_current_directory());\n if let Some(command_original) = readln!() {\n let command = command_original.trim();\n if command == \"exit\" {\n println!(\"Exit temporarily blocked (due to using terminal as init)\")\n \/\/break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n } else {\n println!(\"Failed to read from stdin\");\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = Box::new(Application::new());\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>Rustify get_current_directory<commit_after>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application<'static> = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ main: box|args: &Vec<String>| {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command<'a> {\n pub name: &'a str,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl<'a> Command<'a> {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n\n commands.push(Command {\n name: \"cat\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read: {}\", path),\n }\n } else {\n println!(\"Failed to open file: {}\", path);\n }\n }),\n });\n\n commands.push(Command {\n name: \"cd\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(path) => {\n if !change_cwd(&path) {\n println!(\"Bad path: {}\", path);\n }\n }\n None => println!(\"No path given\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"echo\",\n main: Box::new(|args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n }),\n });\n\n commands.push(Command {\n name: \"else\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"exec\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n }),\n });\n\n commands.push(Command {\n name: \"exit\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"fi\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"if\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"ls\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(dir) = read_dir(&path) {\n for entry in dir {\n println!(\"{}\", entry.path());\n }\n } else {\n println!(\"Failed to open directory: {}\", path);\n }\n }),\n });\n\n commands.push(Command {\n name: \"pwd\",\n main: Box::new(|_: &Vec<String>| {\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n println!(\"{}\", path);\n } else {\n println!(\"Could not get the path\");\n }\n } else {\n println!(\"Could not open the working directory\");\n }\n }),\n });\n\n commands.push(Command {\n name: \"read\",\n main: Box::new(|_: &Vec<String>| {}),\n });\n\n commands.push(Command {\n name: \"run\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n }),\n });\n\n commands.push(Command {\n name: \"sleep\",\n main: Box::new(|args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n }),\n });\n\n commands.push(Command {\n name: \"send\",\n main: Box::new(|args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n }),\n });\n\n \/\/ Simple command to create a file, in the current directory\n \/\/ The file has got the name given as the first argument of the command\n \/\/ If the command have no arguments, the command don't create the file\n commands.push(Command {\n name: \"touch\",\n main: Box::new(|args: &Vec<String>| {\n match args.get(1) {\n Some(file_name) => if File::create(file_name).is_none() {\n println!(\"Failed to create: {}\", file_name);\n },\n None => println!(\"No name provided\")\n }\n }),\n });\n\n commands.push(Command {\n name: \"url_hex\",\n main: Box::new(|args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n }),\n });\n\n commands.push(Command {\n name: \"wget\",\n main: Box::new(|args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n }),\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + c.name);\n\n commands.push(Command {\n name: \"help\",\n main: Box::new(move |_: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n }),\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application<'a> {\n commands: Vec<Command<'a>>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl<'a> Application<'a> {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &str) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if command_string == \"$\" {\n for variable in self.variables.iter() {\n println!(\"{}={}\", variable.name, variable.value);\n }\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n } else {\n println!(\"No right hand side\");\n }\n } else {\n println!(\"No comparison operator\");\n }\n } else {\n println!(\"No left hand side\");\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n if cmd == \"read\" {\n for i in 1..args.len() {\n if let Some(arg_original) = args.get(i) {\n let arg = arg_original.trim();\n print!(\"{}=\", arg);\n if let Some(value_original) = readln!() {\n let value = value_original.trim();\n self.set_var(arg, value);\n }\n }\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].trim();\n let mut value = cmd[i + 1 .. cmd.len()].trim().to_string();\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n self.set_var(name, &value);\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n\n pub fn set_var(&mut self, name: &str, value: &str){\n if name.is_empty() {\n return;\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value.to_string();\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name.to_string(),\n value: value.to_string(),\n });\n }\n }\n\n \/\/\/ Method to return the current directory\n \/\/\/ If the current directory cannot be found, a default string (\"?\") will be returned\n pub fn get_current_directory(&mut self) -> String {\n \/\/ Return the current path\n File::open(\"\")\n .and_then(|file| file.path())\n .unwrap_or(\"?\".to_string())\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"user@redox:{}# {}\", self.get_current_directory(), command);\n self.on_command(&command);\n }\n\n loop {\n for mode in self.modes.iter().rev() {\n if mode.value {\n print!(\"+ \");\n } else {\n print!(\"- \");\n }\n }\n print!(\"user@redox:{}# \", self.get_current_directory());\n if let Some(command_original) = readln!() {\n let command = command_original.trim();\n if command == \"exit\" {\n println!(\"Exit temporarily blocked (due to using terminal as init)\")\n \/\/break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n } else {\n println!(\"Failed to read from stdin\");\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = Box::new(Application::new());\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove redundant import<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Simplify get_data_for_date by extracting crate filtering.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Some lints that are built in to the compiler.\n\/\/!\n\/\/! These are the built-in lints that are emitted direct in the main\n\/\/! compiler code, rather than using their own custom pass. Those\n\/\/! lints are all available in `rustc_lint::builtin`.\n\nuse lint::{LintPass, LateLintPass, LintArray};\n\ndeclare_lint! {\n pub CONST_ERR,\n Warn,\n \"constant evaluation detected erroneous expression\"\n}\n\ndeclare_lint! {\n pub UNUSED_IMPORTS,\n Warn,\n \"imports that are never used\"\n}\n\ndeclare_lint! {\n pub UNUSED_EXTERN_CRATES,\n Warn,\n \"extern crates that are never used\"\n}\n\ndeclare_lint! {\n pub UNUSED_QUALIFICATIONS,\n Allow,\n \"detects unnecessarily qualified names\"\n}\n\ndeclare_lint! {\n pub UNKNOWN_LINTS,\n Warn,\n \"unrecognized lint attribute\"\n}\n\ndeclare_lint! {\n pub UNUSED_VARIABLES,\n Warn,\n \"detect variables which are not used in any way\"\n}\n\ndeclare_lint! {\n pub UNUSED_ASSIGNMENTS,\n Warn,\n \"detect assignments that will never be read\"\n}\n\ndeclare_lint! {\n pub DEAD_CODE,\n Warn,\n \"detect unused, unexported items\"\n}\n\ndeclare_lint! {\n pub UNREACHABLE_CODE,\n Warn,\n \"detects unreachable code paths\"\n}\n\ndeclare_lint! {\n pub UNREACHABLE_PATTERNS,\n Warn,\n \"detects unreachable patterns\"\n}\n\ndeclare_lint! {\n pub UNUSED_MACROS,\n Warn,\n \"detects macros that were not used\"\n}\n\ndeclare_lint! {\n pub WARNINGS,\n Warn,\n \"mass-change the level for lints which produce warnings\"\n}\n\ndeclare_lint! {\n pub UNUSED_FEATURES,\n Warn,\n \"unused or unknown features found in crate-level #[feature] directives\"\n}\n\ndeclare_lint! {\n pub STABLE_FEATURES,\n Warn,\n \"stable features found in #[feature] directive\"\n}\n\ndeclare_lint! {\n pub UNKNOWN_CRATE_TYPES,\n Deny,\n \"unknown crate type found in #[crate_type] directive\"\n}\n\ndeclare_lint! {\n pub FAT_PTR_TRANSMUTES,\n Allow,\n \"detects transmutes of fat pointers\"\n}\n\ndeclare_lint! {\n pub TRIVIAL_CASTS,\n Allow,\n \"detects trivial casts which could be removed\"\n}\n\ndeclare_lint! {\n pub TRIVIAL_NUMERIC_CASTS,\n Allow,\n \"detects trivial casts of numeric types which could be removed\"\n}\n\ndeclare_lint! {\n pub PRIVATE_IN_PUBLIC,\n Warn,\n \"detect private items in public interfaces not caught by the old implementation\"\n}\n\ndeclare_lint! {\n pub PUB_USE_OF_PRIVATE_EXTERN_CRATE,\n Deny,\n \"detect public reexports of private extern crates\"\n}\n\ndeclare_lint! {\n pub INVALID_TYPE_PARAM_DEFAULT,\n Deny,\n \"type parameter default erroneously allowed in invalid location\"\n}\n\ndeclare_lint! {\n pub RENAMED_AND_REMOVED_LINTS,\n Warn,\n \"lints that have been renamed or removed\"\n}\n\ndeclare_lint! {\n pub RESOLVE_TRAIT_ON_DEFAULTED_UNIT,\n Deny,\n \"attempt to resolve a trait on an expression whose type cannot be inferred but which \\\n currently defaults to ()\"\n}\n\ndeclare_lint! {\n pub SAFE_EXTERN_STATICS,\n Deny,\n \"safe access to extern statics was erroneously allowed\"\n}\n\ndeclare_lint! {\n pub PATTERNS_IN_FNS_WITHOUT_BODY,\n Warn,\n \"patterns in functions without body were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub EXTRA_REQUIREMENT_IN_IMPL,\n Deny,\n \"detects extra requirements in impls that were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub LEGACY_DIRECTORY_OWNERSHIP,\n Deny,\n \"non-inline, non-`#[path]` modules (e.g. `mod foo;`) were erroneously allowed in some files \\\n not named `mod.rs`\"\n}\n\ndeclare_lint! {\n pub LEGACY_IMPORTS,\n Deny,\n \"detects names that resolve to ambiguous glob imports with RFC 1560\"\n}\n\ndeclare_lint! {\n pub LEGACY_CONSTRUCTOR_VISIBILITY,\n Deny,\n \"detects use of struct constructors that would be invisible with new visibility rules\"\n}\n\ndeclare_lint! {\n pub MISSING_FRAGMENT_SPECIFIER,\n Deny,\n \"detects missing fragment specifiers in unused `macro_rules!` patterns\"\n}\n\ndeclare_lint! {\n pub PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,\n Deny,\n \"detects parenthesized generic parameters in type and module names\"\n}\n\ndeclare_lint! {\n pub LATE_BOUND_LIFETIME_ARGUMENTS,\n Warn,\n \"detects generic lifetime arguments in path segments with late bound lifetime parameters\"\n}\n\ndeclare_lint! {\n pub DEPRECATED,\n Warn,\n \"detects use of deprecated items\"\n}\n\ndeclare_lint! {\n pub UNUSED_UNSAFE,\n Warn,\n \"unnecessary use of an `unsafe` block\"\n}\n\n\/\/\/ Does nothing as a lint pass, but registers some `Lint`s\n\/\/\/ which are used by other parts of the compiler.\n#[derive(Copy, Clone)]\npub struct HardwiredLints;\n\nimpl LintPass for HardwiredLints {\n fn get_lints(&self) -> LintArray {\n lint_array!(\n UNUSED_IMPORTS,\n UNUSED_EXTERN_CRATES,\n UNUSED_QUALIFICATIONS,\n UNKNOWN_LINTS,\n UNUSED_VARIABLES,\n UNUSED_ASSIGNMENTS,\n DEAD_CODE,\n UNREACHABLE_CODE,\n UNREACHABLE_PATTERNS,\n UNUSED_MACROS,\n WARNINGS,\n UNUSED_FEATURES,\n STABLE_FEATURES,\n UNKNOWN_CRATE_TYPES,\n FAT_PTR_TRANSMUTES,\n TRIVIAL_CASTS,\n TRIVIAL_NUMERIC_CASTS,\n PRIVATE_IN_PUBLIC,\n PUB_USE_OF_PRIVATE_EXTERN_CRATE,\n INVALID_TYPE_PARAM_DEFAULT,\n CONST_ERR,\n RENAMED_AND_REMOVED_LINTS,\n RESOLVE_TRAIT_ON_DEFAULTED_UNIT,\n SAFE_EXTERN_STATICS,\n PATTERNS_IN_FNS_WITHOUT_BODY,\n EXTRA_REQUIREMENT_IN_IMPL,\n LEGACY_DIRECTORY_OWNERSHIP,\n LEGACY_IMPORTS,\n LEGACY_CONSTRUCTOR_VISIBILITY,\n MISSING_FRAGMENT_SPECIFIER,\n PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,\n LATE_BOUND_LIFETIME_ARGUMENTS,\n DEPRECATED,\n UNUSED_UNSAFE\n )\n }\n}\n\nimpl<'a, 'tcx> LateLintPass<'a, 'tcx> for HardwiredLints {}\n<commit_msg>Auto merge of #44825 - dtolnay:cratelint, r=aturon<commit_after>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Some lints that are built in to the compiler.\n\/\/!\n\/\/! These are the built-in lints that are emitted direct in the main\n\/\/! compiler code, rather than using their own custom pass. Those\n\/\/! lints are all available in `rustc_lint::builtin`.\n\nuse lint::{LintPass, LateLintPass, LintArray};\n\ndeclare_lint! {\n pub CONST_ERR,\n Warn,\n \"constant evaluation detected erroneous expression\"\n}\n\ndeclare_lint! {\n pub UNUSED_IMPORTS,\n Warn,\n \"imports that are never used\"\n}\n\ndeclare_lint! {\n pub UNUSED_EXTERN_CRATES,\n Allow,\n \"extern crates that are never used\"\n}\n\ndeclare_lint! {\n pub UNUSED_QUALIFICATIONS,\n Allow,\n \"detects unnecessarily qualified names\"\n}\n\ndeclare_lint! {\n pub UNKNOWN_LINTS,\n Warn,\n \"unrecognized lint attribute\"\n}\n\ndeclare_lint! {\n pub UNUSED_VARIABLES,\n Warn,\n \"detect variables which are not used in any way\"\n}\n\ndeclare_lint! {\n pub UNUSED_ASSIGNMENTS,\n Warn,\n \"detect assignments that will never be read\"\n}\n\ndeclare_lint! {\n pub DEAD_CODE,\n Warn,\n \"detect unused, unexported items\"\n}\n\ndeclare_lint! {\n pub UNREACHABLE_CODE,\n Warn,\n \"detects unreachable code paths\"\n}\n\ndeclare_lint! {\n pub UNREACHABLE_PATTERNS,\n Warn,\n \"detects unreachable patterns\"\n}\n\ndeclare_lint! {\n pub UNUSED_MACROS,\n Warn,\n \"detects macros that were not used\"\n}\n\ndeclare_lint! {\n pub WARNINGS,\n Warn,\n \"mass-change the level for lints which produce warnings\"\n}\n\ndeclare_lint! {\n pub UNUSED_FEATURES,\n Warn,\n \"unused or unknown features found in crate-level #[feature] directives\"\n}\n\ndeclare_lint! {\n pub STABLE_FEATURES,\n Warn,\n \"stable features found in #[feature] directive\"\n}\n\ndeclare_lint! {\n pub UNKNOWN_CRATE_TYPES,\n Deny,\n \"unknown crate type found in #[crate_type] directive\"\n}\n\ndeclare_lint! {\n pub FAT_PTR_TRANSMUTES,\n Allow,\n \"detects transmutes of fat pointers\"\n}\n\ndeclare_lint! {\n pub TRIVIAL_CASTS,\n Allow,\n \"detects trivial casts which could be removed\"\n}\n\ndeclare_lint! {\n pub TRIVIAL_NUMERIC_CASTS,\n Allow,\n \"detects trivial casts of numeric types which could be removed\"\n}\n\ndeclare_lint! {\n pub PRIVATE_IN_PUBLIC,\n Warn,\n \"detect private items in public interfaces not caught by the old implementation\"\n}\n\ndeclare_lint! {\n pub PUB_USE_OF_PRIVATE_EXTERN_CRATE,\n Deny,\n \"detect public reexports of private extern crates\"\n}\n\ndeclare_lint! {\n pub INVALID_TYPE_PARAM_DEFAULT,\n Deny,\n \"type parameter default erroneously allowed in invalid location\"\n}\n\ndeclare_lint! {\n pub RENAMED_AND_REMOVED_LINTS,\n Warn,\n \"lints that have been renamed or removed\"\n}\n\ndeclare_lint! {\n pub RESOLVE_TRAIT_ON_DEFAULTED_UNIT,\n Deny,\n \"attempt to resolve a trait on an expression whose type cannot be inferred but which \\\n currently defaults to ()\"\n}\n\ndeclare_lint! {\n pub SAFE_EXTERN_STATICS,\n Deny,\n \"safe access to extern statics was erroneously allowed\"\n}\n\ndeclare_lint! {\n pub PATTERNS_IN_FNS_WITHOUT_BODY,\n Warn,\n \"patterns in functions without body were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub EXTRA_REQUIREMENT_IN_IMPL,\n Deny,\n \"detects extra requirements in impls that were erroneously allowed\"\n}\n\ndeclare_lint! {\n pub LEGACY_DIRECTORY_OWNERSHIP,\n Deny,\n \"non-inline, non-`#[path]` modules (e.g. `mod foo;`) were erroneously allowed in some files \\\n not named `mod.rs`\"\n}\n\ndeclare_lint! {\n pub LEGACY_IMPORTS,\n Deny,\n \"detects names that resolve to ambiguous glob imports with RFC 1560\"\n}\n\ndeclare_lint! {\n pub LEGACY_CONSTRUCTOR_VISIBILITY,\n Deny,\n \"detects use of struct constructors that would be invisible with new visibility rules\"\n}\n\ndeclare_lint! {\n pub MISSING_FRAGMENT_SPECIFIER,\n Deny,\n \"detects missing fragment specifiers in unused `macro_rules!` patterns\"\n}\n\ndeclare_lint! {\n pub PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,\n Deny,\n \"detects parenthesized generic parameters in type and module names\"\n}\n\ndeclare_lint! {\n pub LATE_BOUND_LIFETIME_ARGUMENTS,\n Warn,\n \"detects generic lifetime arguments in path segments with late bound lifetime parameters\"\n}\n\ndeclare_lint! {\n pub DEPRECATED,\n Warn,\n \"detects use of deprecated items\"\n}\n\ndeclare_lint! {\n pub UNUSED_UNSAFE,\n Warn,\n \"unnecessary use of an `unsafe` block\"\n}\n\n\/\/\/ Does nothing as a lint pass, but registers some `Lint`s\n\/\/\/ which are used by other parts of the compiler.\n#[derive(Copy, Clone)]\npub struct HardwiredLints;\n\nimpl LintPass for HardwiredLints {\n fn get_lints(&self) -> LintArray {\n lint_array!(\n UNUSED_IMPORTS,\n UNUSED_EXTERN_CRATES,\n UNUSED_QUALIFICATIONS,\n UNKNOWN_LINTS,\n UNUSED_VARIABLES,\n UNUSED_ASSIGNMENTS,\n DEAD_CODE,\n UNREACHABLE_CODE,\n UNREACHABLE_PATTERNS,\n UNUSED_MACROS,\n WARNINGS,\n UNUSED_FEATURES,\n STABLE_FEATURES,\n UNKNOWN_CRATE_TYPES,\n FAT_PTR_TRANSMUTES,\n TRIVIAL_CASTS,\n TRIVIAL_NUMERIC_CASTS,\n PRIVATE_IN_PUBLIC,\n PUB_USE_OF_PRIVATE_EXTERN_CRATE,\n INVALID_TYPE_PARAM_DEFAULT,\n CONST_ERR,\n RENAMED_AND_REMOVED_LINTS,\n RESOLVE_TRAIT_ON_DEFAULTED_UNIT,\n SAFE_EXTERN_STATICS,\n PATTERNS_IN_FNS_WITHOUT_BODY,\n EXTRA_REQUIREMENT_IN_IMPL,\n LEGACY_DIRECTORY_OWNERSHIP,\n LEGACY_IMPORTS,\n LEGACY_CONSTRUCTOR_VISIBILITY,\n MISSING_FRAGMENT_SPECIFIER,\n PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,\n LATE_BOUND_LIFETIME_ARGUMENTS,\n DEPRECATED,\n UNUSED_UNSAFE\n )\n }\n}\n\nimpl<'a, 'tcx> LateLintPass<'a, 'tcx> for HardwiredLints {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for Windows\n\n#![allow(bad_style)]\n\nuse prelude::v1::*;\nuse os::windows::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io;\nuse libc::{c_int, c_void};\nuse ops::Range;\nuse os::windows::ffi::EncodeWide;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse sys::{c, cvt};\nuse sys::handle::Handle;\n\nuse super::to_u16s;\n\npub fn errno() -> i32 {\n unsafe { c::GetLastError() as i32 }\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errnum: i32) -> String {\n \/\/ This value is calculated from the macro\n \/\/ MAKELANGID(LANG_SYSTEM_DEFAULT, SUBLANG_SYS_DEFAULT)\n let langId = 0x0800 as c::DWORD;\n\n let mut buf = [0 as c::WCHAR; 2048];\n\n unsafe {\n let res = c::FormatMessageW(c::FORMAT_MESSAGE_FROM_SYSTEM |\n c::FORMAT_MESSAGE_IGNORE_INSERTS,\n ptr::null_mut(),\n errnum as c::DWORD,\n langId,\n buf.as_mut_ptr(),\n buf.len() as c::DWORD,\n ptr::null()) as usize;\n if res == 0 {\n \/\/ Sometimes FormatMessageW can fail e.g. system doesn't like langId,\n let fm_err = errno();\n return format!(\"OS Error {} (FormatMessageW() returned error {})\",\n errnum, fm_err);\n }\n\n match String::from_utf16(&buf[..res]) {\n Ok(mut msg) => {\n \/\/ Trim trailing CRLF inserted by FormatMessageW\n let len = msg.trim_right().len();\n msg.truncate(len);\n msg\n },\n Err(..) => format!(\"OS Error {} (FormatMessageW() returned \\\n invalid UTF-16)\", errnum),\n }\n }\n}\n\npub struct Env {\n base: c::LPWCH,\n cur: c::LPWCH,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n\n fn next(&mut self) -> Option<(OsString, OsString)> {\n loop {\n unsafe {\n if *self.cur == 0 { return None }\n let p = &*self.cur as *const u16;\n let mut len = 0;\n while *p.offset(len) != 0 {\n len += 1;\n }\n let s = slice::from_raw_parts(p, len as usize);\n self.cur = self.cur.offset(len + 1);\n\n \/\/ Windows allows environment variables to start with an equals\n \/\/ symbol (in any other position, this is the separator between\n \/\/ variable name and value). Since`s` has at least length 1 at\n \/\/ this point (because the empty string terminates the array of\n \/\/ environment variables), we can safely slice.\n let pos = match s[1..].iter().position(|&u| u == b'=' as u16).map(|p| p + 1) {\n Some(p) => p,\n None => continue,\n };\n return Some((\n OsStringExt::from_wide(&s[..pos]),\n OsStringExt::from_wide(&s[pos+1..]),\n ))\n }\n }\n }\n}\n\nimpl Drop for Env {\n fn drop(&mut self) {\n unsafe { c::FreeEnvironmentStringsW(self.base); }\n }\n}\n\npub fn env() -> Env {\n unsafe {\n let ch = c::GetEnvironmentStringsW();\n if ch as usize == 0 {\n panic!(\"failure getting env string from OS: {}\",\n io::Error::last_os_error());\n }\n Env { base: ch, cur: ch }\n }\n}\n\npub struct SplitPaths<'a> {\n data: EncodeWide<'a>,\n must_yield: bool,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n SplitPaths {\n data: unparsed.encode_wide(),\n must_yield: true,\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option<PathBuf> {\n \/\/ On Windows, the PATH environment variable is semicolon separated.\n \/\/ Double quotes are used as a way of introducing literal semicolons\n \/\/ (since c:\\some;dir is a valid Windows path). Double quotes are not\n \/\/ themselves permitted in path names, so there is no way to escape a\n \/\/ double quote. Quoted regions can appear in arbitrary locations, so\n \/\/\n \/\/ c:\\foo;c:\\som\"e;di\"r;c:\\bar\n \/\/\n \/\/ Should parse as [c:\\foo, c:\\some;dir, c:\\bar].\n \/\/\n \/\/ (The above is based on testing; there is no clear reference available\n \/\/ for the grammar.)\n\n\n let must_yield = self.must_yield;\n self.must_yield = false;\n\n let mut in_progress = Vec::new();\n let mut in_quote = false;\n for b in self.data.by_ref() {\n if b == '\"' as u16 {\n in_quote = !in_quote;\n } else if b == ';' as u16 && !in_quote {\n self.must_yield = true;\n break\n } else {\n in_progress.push(b)\n }\n }\n\n if !must_yield && in_progress.is_empty() {\n None\n } else {\n Some(super::os2path(&in_progress))\n }\n }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>\n where I: Iterator<Item=T>, T: AsRef<OsStr>\n{\n let mut joined = Vec::new();\n let sep = b';' as u16;\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref();\n if i > 0 { joined.push(sep) }\n let v = path.encode_wide().collect::<Vec<u16>>();\n if v.contains(&(b'\"' as u16)) {\n return Err(JoinPathsError)\n } else if v.contains(&sep) {\n joined.push(b'\"' as u16);\n joined.extend_from_slice(&v[..]);\n joined.push(b'\"' as u16);\n } else {\n joined.extend_from_slice(&v[..]);\n }\n }\n\n Ok(OsStringExt::from_wide(&joined[..]))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains `\\\"`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result<PathBuf> {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetModuleFileNameW(ptr::null_mut(), buf, sz)\n }, super::os2path)\n}\n\npub fn getcwd() -> io::Result<PathBuf> {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetCurrentDirectoryW(sz, buf)\n }, super::os2path)\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n let p: &OsStr = p.as_ref();\n let mut p = p.encode_wide().collect::<Vec<_>>();\n p.push(0);\n\n cvt(unsafe {\n c::SetCurrentDirectoryW(p.as_ptr())\n }).map(|_| ())\n}\n\npub fn getenv(k: &OsStr) -> io::Result<Option<OsString>> {\n let k = try!(to_u16s(k));\n let res = super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetEnvironmentVariableW(k.as_ptr(), buf, sz)\n }, |buf| {\n OsStringExt::from_wide(buf)\n });\n match res {\n Ok(value) => Ok(Some(value)),\n Err(e) => {\n if e.raw_os_error() == Some(c::ERROR_ENVVAR_NOT_FOUND as i32) {\n Ok(None)\n } else {\n Err(e)\n }\n }\n }\n}\n\npub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> {\n let k = try!(to_u16s(k));\n let v = try!(to_u16s(v));\n\n cvt(unsafe {\n c::SetEnvironmentVariableW(k.as_ptr(), v.as_ptr())\n }).map(|_| ())\n}\n\npub fn unsetenv(n: &OsStr) -> io::Result<()> {\n let v = try!(to_u16s(n));\n cvt(unsafe {\n c::SetEnvironmentVariableW(v.as_ptr(), ptr::null())\n }).map(|_| ())\n}\n\npub struct Args {\n range: Range<isize>,\n cur: *mut *mut u16,\n}\n\nimpl Iterator for Args {\n type Item = OsString;\n fn next(&mut self) -> Option<OsString> {\n self.range.next().map(|i| unsafe {\n let ptr = *self.cur.offset(i);\n let mut len = 0;\n while *ptr.offset(len) != 0 { len += 1; }\n\n \/\/ Push it onto the list.\n let ptr = ptr as *const u16;\n let buf = slice::from_raw_parts(ptr, len as usize);\n OsStringExt::from_wide(buf)\n })\n }\n fn size_hint(&self) -> (usize, Option<usize>) { self.range.size_hint() }\n}\n\nimpl ExactSizeIterator for Args {\n fn len(&self) -> usize { self.range.len() }\n}\n\nimpl Drop for Args {\n fn drop(&mut self) {\n \/\/ self.cur can be null if CommandLineToArgvW previously failed,\n \/\/ but LocalFree ignores NULL pointers\n unsafe { c::LocalFree(self.cur as *mut c_void); }\n }\n}\n\npub fn args() -> Args {\n unsafe {\n let mut nArgs: c_int = 0;\n let lpCmdLine = c::GetCommandLineW();\n let szArgList = c::CommandLineToArgvW(lpCmdLine, &mut nArgs);\n\n \/\/ szArcList can be NULL if CommandLinToArgvW failed,\n \/\/ but in that case nArgs is 0 so we won't actually\n \/\/ try to read a null pointer\n Args { cur: szArgList, range: 0..(nArgs as isize) }\n }\n}\n\npub fn temp_dir() -> PathBuf {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetTempPathW(sz, buf)\n }, super::os2path).unwrap()\n}\n\npub fn home_dir() -> Option<PathBuf> {\n ::env::var_os(\"HOME\").or_else(|| {\n ::env::var_os(\"USERPROFILE\")\n }).map(PathBuf::from).or_else(|| unsafe {\n let me = c::GetCurrentProcess();\n let mut token = ptr::null_mut();\n if c::OpenProcessToken(me, c::TOKEN_READ, &mut token) == 0 {\n return None\n }\n let _handle = Handle::new(token);\n super::fill_utf16_buf(|buf, mut sz| {\n match c::GetUserProfileDirectoryW(token, buf, &mut sz) {\n 0 if c::GetLastError() != 0 => 0,\n 0 => sz,\n n => n as c::DWORD,\n }\n }, super::os2path).ok()\n })\n}\n\npub fn exit(code: i32) -> ! {\n unsafe { c::ExitProcess(code as c::UINT) }\n}\n<commit_msg>Auto merge of #31557 - retep998:house-directory, r=alexcrichton<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of `std::os` functionality for Windows\n\n#![allow(bad_style)]\n\nuse prelude::v1::*;\nuse os::windows::prelude::*;\n\nuse error::Error as StdError;\nuse ffi::{OsString, OsStr};\nuse fmt;\nuse io;\nuse libc::{c_int, c_void};\nuse ops::Range;\nuse os::windows::ffi::EncodeWide;\nuse path::{self, PathBuf};\nuse ptr;\nuse slice;\nuse sys::{c, cvt};\nuse sys::handle::Handle;\n\nuse super::to_u16s;\n\npub fn errno() -> i32 {\n unsafe { c::GetLastError() as i32 }\n}\n\n\/\/\/ Gets a detailed string description for the given error number.\npub fn error_string(errnum: i32) -> String {\n \/\/ This value is calculated from the macro\n \/\/ MAKELANGID(LANG_SYSTEM_DEFAULT, SUBLANG_SYS_DEFAULT)\n let langId = 0x0800 as c::DWORD;\n\n let mut buf = [0 as c::WCHAR; 2048];\n\n unsafe {\n let res = c::FormatMessageW(c::FORMAT_MESSAGE_FROM_SYSTEM |\n c::FORMAT_MESSAGE_IGNORE_INSERTS,\n ptr::null_mut(),\n errnum as c::DWORD,\n langId,\n buf.as_mut_ptr(),\n buf.len() as c::DWORD,\n ptr::null()) as usize;\n if res == 0 {\n \/\/ Sometimes FormatMessageW can fail e.g. system doesn't like langId,\n let fm_err = errno();\n return format!(\"OS Error {} (FormatMessageW() returned error {})\",\n errnum, fm_err);\n }\n\n match String::from_utf16(&buf[..res]) {\n Ok(mut msg) => {\n \/\/ Trim trailing CRLF inserted by FormatMessageW\n let len = msg.trim_right().len();\n msg.truncate(len);\n msg\n },\n Err(..) => format!(\"OS Error {} (FormatMessageW() returned \\\n invalid UTF-16)\", errnum),\n }\n }\n}\n\npub struct Env {\n base: c::LPWCH,\n cur: c::LPWCH,\n}\n\nimpl Iterator for Env {\n type Item = (OsString, OsString);\n\n fn next(&mut self) -> Option<(OsString, OsString)> {\n loop {\n unsafe {\n if *self.cur == 0 { return None }\n let p = &*self.cur as *const u16;\n let mut len = 0;\n while *p.offset(len) != 0 {\n len += 1;\n }\n let s = slice::from_raw_parts(p, len as usize);\n self.cur = self.cur.offset(len + 1);\n\n \/\/ Windows allows environment variables to start with an equals\n \/\/ symbol (in any other position, this is the separator between\n \/\/ variable name and value). Since`s` has at least length 1 at\n \/\/ this point (because the empty string terminates the array of\n \/\/ environment variables), we can safely slice.\n let pos = match s[1..].iter().position(|&u| u == b'=' as u16).map(|p| p + 1) {\n Some(p) => p,\n None => continue,\n };\n return Some((\n OsStringExt::from_wide(&s[..pos]),\n OsStringExt::from_wide(&s[pos+1..]),\n ))\n }\n }\n }\n}\n\nimpl Drop for Env {\n fn drop(&mut self) {\n unsafe { c::FreeEnvironmentStringsW(self.base); }\n }\n}\n\npub fn env() -> Env {\n unsafe {\n let ch = c::GetEnvironmentStringsW();\n if ch as usize == 0 {\n panic!(\"failure getting env string from OS: {}\",\n io::Error::last_os_error());\n }\n Env { base: ch, cur: ch }\n }\n}\n\npub struct SplitPaths<'a> {\n data: EncodeWide<'a>,\n must_yield: bool,\n}\n\npub fn split_paths(unparsed: &OsStr) -> SplitPaths {\n SplitPaths {\n data: unparsed.encode_wide(),\n must_yield: true,\n }\n}\n\nimpl<'a> Iterator for SplitPaths<'a> {\n type Item = PathBuf;\n fn next(&mut self) -> Option<PathBuf> {\n \/\/ On Windows, the PATH environment variable is semicolon separated.\n \/\/ Double quotes are used as a way of introducing literal semicolons\n \/\/ (since c:\\some;dir is a valid Windows path). Double quotes are not\n \/\/ themselves permitted in path names, so there is no way to escape a\n \/\/ double quote. Quoted regions can appear in arbitrary locations, so\n \/\/\n \/\/ c:\\foo;c:\\som\"e;di\"r;c:\\bar\n \/\/\n \/\/ Should parse as [c:\\foo, c:\\some;dir, c:\\bar].\n \/\/\n \/\/ (The above is based on testing; there is no clear reference available\n \/\/ for the grammar.)\n\n\n let must_yield = self.must_yield;\n self.must_yield = false;\n\n let mut in_progress = Vec::new();\n let mut in_quote = false;\n for b in self.data.by_ref() {\n if b == '\"' as u16 {\n in_quote = !in_quote;\n } else if b == ';' as u16 && !in_quote {\n self.must_yield = true;\n break\n } else {\n in_progress.push(b)\n }\n }\n\n if !must_yield && in_progress.is_empty() {\n None\n } else {\n Some(super::os2path(&in_progress))\n }\n }\n}\n\n#[derive(Debug)]\npub struct JoinPathsError;\n\npub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>\n where I: Iterator<Item=T>, T: AsRef<OsStr>\n{\n let mut joined = Vec::new();\n let sep = b';' as u16;\n\n for (i, path) in paths.enumerate() {\n let path = path.as_ref();\n if i > 0 { joined.push(sep) }\n let v = path.encode_wide().collect::<Vec<u16>>();\n if v.contains(&(b'\"' as u16)) {\n return Err(JoinPathsError)\n } else if v.contains(&sep) {\n joined.push(b'\"' as u16);\n joined.extend_from_slice(&v[..]);\n joined.push(b'\"' as u16);\n } else {\n joined.extend_from_slice(&v[..]);\n }\n }\n\n Ok(OsStringExt::from_wide(&joined[..]))\n}\n\nimpl fmt::Display for JoinPathsError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"path segment contains `\\\"`\".fmt(f)\n }\n}\n\nimpl StdError for JoinPathsError {\n fn description(&self) -> &str { \"failed to join paths\" }\n}\n\npub fn current_exe() -> io::Result<PathBuf> {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetModuleFileNameW(ptr::null_mut(), buf, sz)\n }, super::os2path)\n}\n\npub fn getcwd() -> io::Result<PathBuf> {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetCurrentDirectoryW(sz, buf)\n }, super::os2path)\n}\n\npub fn chdir(p: &path::Path) -> io::Result<()> {\n let p: &OsStr = p.as_ref();\n let mut p = p.encode_wide().collect::<Vec<_>>();\n p.push(0);\n\n cvt(unsafe {\n c::SetCurrentDirectoryW(p.as_ptr())\n }).map(|_| ())\n}\n\npub fn getenv(k: &OsStr) -> io::Result<Option<OsString>> {\n let k = try!(to_u16s(k));\n let res = super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetEnvironmentVariableW(k.as_ptr(), buf, sz)\n }, |buf| {\n OsStringExt::from_wide(buf)\n });\n match res {\n Ok(value) => Ok(Some(value)),\n Err(e) => {\n if e.raw_os_error() == Some(c::ERROR_ENVVAR_NOT_FOUND as i32) {\n Ok(None)\n } else {\n Err(e)\n }\n }\n }\n}\n\npub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> {\n let k = try!(to_u16s(k));\n let v = try!(to_u16s(v));\n\n cvt(unsafe {\n c::SetEnvironmentVariableW(k.as_ptr(), v.as_ptr())\n }).map(|_| ())\n}\n\npub fn unsetenv(n: &OsStr) -> io::Result<()> {\n let v = try!(to_u16s(n));\n cvt(unsafe {\n c::SetEnvironmentVariableW(v.as_ptr(), ptr::null())\n }).map(|_| ())\n}\n\npub struct Args {\n range: Range<isize>,\n cur: *mut *mut u16,\n}\n\nimpl Iterator for Args {\n type Item = OsString;\n fn next(&mut self) -> Option<OsString> {\n self.range.next().map(|i| unsafe {\n let ptr = *self.cur.offset(i);\n let mut len = 0;\n while *ptr.offset(len) != 0 { len += 1; }\n\n \/\/ Push it onto the list.\n let ptr = ptr as *const u16;\n let buf = slice::from_raw_parts(ptr, len as usize);\n OsStringExt::from_wide(buf)\n })\n }\n fn size_hint(&self) -> (usize, Option<usize>) { self.range.size_hint() }\n}\n\nimpl ExactSizeIterator for Args {\n fn len(&self) -> usize { self.range.len() }\n}\n\nimpl Drop for Args {\n fn drop(&mut self) {\n \/\/ self.cur can be null if CommandLineToArgvW previously failed,\n \/\/ but LocalFree ignores NULL pointers\n unsafe { c::LocalFree(self.cur as *mut c_void); }\n }\n}\n\npub fn args() -> Args {\n unsafe {\n let mut nArgs: c_int = 0;\n let lpCmdLine = c::GetCommandLineW();\n let szArgList = c::CommandLineToArgvW(lpCmdLine, &mut nArgs);\n\n \/\/ szArcList can be NULL if CommandLinToArgvW failed,\n \/\/ but in that case nArgs is 0 so we won't actually\n \/\/ try to read a null pointer\n Args { cur: szArgList, range: 0..(nArgs as isize) }\n }\n}\n\npub fn temp_dir() -> PathBuf {\n super::fill_utf16_buf(|buf, sz| unsafe {\n c::GetTempPathW(sz, buf)\n }, super::os2path).unwrap()\n}\n\npub fn home_dir() -> Option<PathBuf> {\n ::env::var_os(\"HOME\").or_else(|| {\n ::env::var_os(\"USERPROFILE\")\n }).map(PathBuf::from).or_else(|| unsafe {\n let me = c::GetCurrentProcess();\n let mut token = ptr::null_mut();\n if c::OpenProcessToken(me, c::TOKEN_READ, &mut token) == 0 {\n return None\n }\n let _handle = Handle::new(token);\n super::fill_utf16_buf(|buf, mut sz| {\n match c::GetUserProfileDirectoryW(token, buf, &mut sz) {\n 0 if c::GetLastError() != c::ERROR_INSUFFICIENT_BUFFER => 0,\n 0 => sz,\n _ => sz - 1, \/\/ sz includes the null terminator\n }\n }, super::os2path).ok()\n })\n}\n\npub fn exit(code: i32) -> ! {\n unsafe { c::ExitProcess(code as c::UINT) }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add xfail'd test for #6762.<commit_after>\/\/xfail-test\n\n\/\/ Creating a stack closure which references an owned pointer and then\n\/\/ transferring ownership of the owned box before invoking the stack\n\/\/ closure results in a crash.\n\nfn twice(x: ~uint) -> uint\n{\n *x * 2\n}\n\nfn invoke(f : &fn() -> uint)\n{\n f();\n}\n\nfn main()\n{\n let x : ~uint = ~9;\n let sq : &fn() -> uint = || { *x * *x };\n\n twice(x);\n invoke(sq);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>fix(version): `from_str` has been deprecated in favor of `parse()`<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_name = \"input\"]\n#![deny(missing_doc)]\n#![feature(globs)]\n#![feature(struct_variant)]\n\n\/\/! A flexible structure for user interactions\n\/\/! to be used in window frameworks and widgets libraries.\n\npub mod keyboard;\npub mod mouse;\n\n\/\/\/ Models different kinds of buttons.\n#[deriving(Clone)]\npub enum Button {\n \/\/\/ A keyboard button.\n Keyboard(keyboard::Key),\n \/\/\/ A mouse button.\n Mouse(mouse::Button),\n}\n\n\/\/\/ Models different kinds of motion.\n#[deriving(Clone)]\npub enum Motion {\n \/\/\/ x and y in window coordinates.\n MouseCursor(f64, f64),\n \/\/\/ x and y in relative coordinates.\n MouseRelative(f64, f64),\n \/\/\/ x and y in scroll ticks.\n MouseScroll(f64, f64),\n}\n\n\/\/\/ Models input events.\n#[deriving(Clone)]\npub enum InputEvent {\n \/\/\/ Pressed a button.\n Press(Button),\n \/\/\/ Released a button.\n Release(Button),\n \/\/\/ Moved mouse cursor.\n Move(Motion),\n \/\/\/ Text (usually from keyboard).\n Text(String),\n}\n\n<commit_msg>Derving Eq for Button<commit_after>#![crate_name = \"input\"]\n#![deny(missing_doc)]\n#![feature(globs)]\n#![feature(struct_variant)]\n\n\/\/! A flexible structure for user interactions\n\/\/! to be used in window frameworks and widgets libraries.\n\npub mod keyboard;\npub mod mouse;\n\n\/\/\/ Models different kinds of buttons.\n#[deriving(Clone, PartialEq, Eq)]\npub enum Button {\n \/\/\/ A keyboard button.\n Keyboard(keyboard::Key),\n \/\/\/ A mouse button.\n Mouse(mouse::Button),\n}\n\n\/\/\/ Models different kinds of motion.\n#[deriving(Clone)]\npub enum Motion {\n \/\/\/ x and y in window coordinates.\n MouseCursor(f64, f64),\n \/\/\/ x and y in relative coordinates.\n MouseRelative(f64, f64),\n \/\/\/ x and y in scroll ticks.\n MouseScroll(f64, f64),\n}\n\n\/\/\/ Models input events.\n#[deriving(Clone)]\npub enum InputEvent {\n \/\/\/ Pressed a button.\n Press(Button),\n \/\/\/ Released a button.\n Release(Button),\n \/\/\/ Moved mouse cursor.\n Move(Motion),\n \/\/\/ Text (usually from keyboard).\n Text(String),\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>version work<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed entity allocation bug<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rename Abs to Opaque<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add doc examples for `Url::has_authority`.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>updated to use gclient<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test_heapsort<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>cleanup: remove the I2CMaster trait<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add code to complete some of topics methods<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate libc;\n\nuse std::ffi::{OsStr, OsString};\nuse std::fmt::Debug;\nuse std::fs::File;\nuse std::io;\nuse std::mem;\nuse std::os::unix::io::{FromRawFd, IntoRawFd, RawFd};\nuse std::path::{Path, PathBuf};\nuse std::process::{Command, Stdio, Output};\n\npub trait Expression: Clone + Debug {\n fn exec(&self, context: IoContext) -> Result;\n\n fn run(&self) -> Result {\n let context = IoContext {\n stdin: CloneableStdio::Inherit,\n stdout: CloneableStdio::Inherit,\n stderr: CloneableStdio::Inherit,\n };\n self.exec(context)\n }\n}\n\n#[derive(Debug, Clone)]\npub struct ArgvCommand {\n argv: Vec<OsString>,\n stdout: Option<PathBuf>,\n}\n\nimpl ArgvCommand {\n pub fn new<T: AsRef<OsStr>>(prog: T) -> ArgvCommand {\n ArgvCommand{\n argv: vec![prog.as_ref().to_owned()],\n stdout: None,\n }\n }\n\n pub fn arg<T: AsRef<OsStr>>(&mut self, arg: T) -> &mut Self {\n self.argv.push(arg.as_ref().to_owned());\n self\n }\n\n pub fn stdout<T: AsRef<Path>>(&mut self, path: T) -> &mut Self {\n self.stdout = Some(path.as_ref().to_owned());\n self\n }\n}\n\nimpl Expression for ArgvCommand {\n fn exec(&self, context: IoContext) -> Result {\n let IoContext{stdin, stdout, stderr} = context;\n \/\/ Create a Command and add the args.\n let mut command = Command::new(&self.argv[0]);\n command.args(&self.argv[1..]);\n command.stdin(stdin.to_stdio());\n command.stdout(stdout.to_stdio());\n command.stderr(stderr.to_stdio());\n if let Some(ref path) = self.stdout {\n let file = try!(File::create(path));\n command.stdout(unsafe {\n FromRawFd::from_raw_fd(file.into_raw_fd())\n });\n }\n let exit_status = try!(command.status());\n Ok(Output{\n stdout: Vec::new(),\n stderr: Vec::new(),\n status: exit_status,\n })\n }\n}\n\n#[derive(Debug, Clone)]\npub struct Pipe {\n \/\/ TODO: Make this hold any Expression.\n left: ArgvCommand,\n right: ArgvCommand,\n}\n\nimpl Pipe {\n pub fn new(left: &ArgvCommand, right: &ArgvCommand) -> Pipe {\n Pipe{left: left.clone(), right: right.clone()}\n }\n}\n\nimpl Expression for Pipe {\n fn exec(&self, context: IoContext) -> Result {\n let IoContext{stdin, stdout, stderr} = context;\n let (read_pipe, write_pipe) = open_pipe();\n let left_context = IoContext{\n stdin: stdin,\n stdout: CloneableStdio::Fd(write_pipe),\n stderr: stderr.clone(),\n };\n let right_context = IoContext{\n stdin: CloneableStdio::Fd(read_pipe),\n stdout: stdout,\n stderr: stderr,\n };\n let left_clone = self.left.clone();\n let left_thread = std::thread::spawn(move || {\n left_clone.exec(left_context)\n });\n let right_result = self.right.exec(right_context);\n let left_result = left_thread.join().unwrap(); \/\/ TODO: handle errors here?\n match right_result {\n Err(_) => right_result,\n _ => left_result,\n }\n }\n}\n\npub type Result = std::result::Result<Output, Error>;\n\n#[derive(Debug)]\npub enum Error {\n Io(io::Error),\n Status(Output),\n}\n\nimpl From<io::Error> for Error {\n fn from(err: io::Error) -> Error {\n Error::Io(err)\n }\n}\n\n#[derive(Clone, Debug)]\npub struct IoContext {\n stdin: CloneableStdio,\n stdout: CloneableStdio,\n stderr: CloneableStdio,\n}\n\n#[derive(Clone, Debug)]\nenum CloneableStdio {\n Inherit,\n Fd(CloneableFd),\n}\n\nimpl CloneableStdio {\n fn to_stdio(self) -> Stdio {\n match self {\n CloneableStdio::Inherit => Stdio::inherit(),\n CloneableStdio::Fd(fd) => unsafe {\n FromRawFd::from_raw_fd(fd.into_raw_fd())\n },\n }\n }\n}\n\n#[derive(Debug)]\nstruct CloneableFd {\n \/\/ The struct *owns* this file descriptor, and will close it in drop().\n fd: RawFd,\n}\n\nimpl Clone for CloneableFd {\n fn clone(&self) -> Self {\n let new_fd = unsafe { libc::dup(self.fd) };\n assert!(new_fd >= 0);\n unsafe { FromRawFd::from_raw_fd(new_fd) }\n }\n}\n\nimpl FromRawFd for CloneableFd {\n unsafe fn from_raw_fd(fd: RawFd) -> Self {\n CloneableFd{fd: fd}\n }\n}\n\nimpl IntoRawFd for CloneableFd {\n fn into_raw_fd(self) -> RawFd {\n let fd = self.fd;\n mem::forget(self); \/\/ prevent drop() from closing the fd\n fd\n }\n}\n\nimpl Drop for CloneableFd {\n fn drop(&mut self) {\n let error = unsafe { libc::close(self.fd) };\n assert_eq!(error, 0);\n }\n}\n\n\/\/ (read, write)\n\/\/ TODO: error handling\nfn open_pipe() -> (CloneableFd, CloneableFd) {\n unsafe {\n let mut pipes = [0, 0];\n let error = libc::pipe(pipes.as_mut_ptr());\n assert_eq!(error, 0);\n \/\/ prevent child processes from inheriting these by default\n for fd in &pipes {\n let ret = libc::ioctl(*fd, libc::FIOCLEX);\n assert_eq!(ret, 0);\n }\n (FromRawFd::from_raw_fd(pipes[0]), FromRawFd::from_raw_fd(pipes[1]))\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::{ArgvCommand, Pipe, Expression};\n use std::fs::File;\n use std::io::prelude::*;\n use std::path::PathBuf;\n use std::process::Command;\n use std::str::from_utf8;\n\n fn mktemp() -> PathBuf {\n \/\/ TODO: use duct for this :)\n let output = Command::new(\"mktemp\").output().unwrap();\n let path: PathBuf = from_utf8(&output.stdout).unwrap().trim().into();\n println!(\"here's the path we're using: {:?}\", path);\n path\n }\n\n #[test]\n fn test_run() {\n let result = ArgvCommand::new(\"true\").arg(\"foo\").arg(\"bar\").run();\n assert!(result.unwrap().status.success());\n }\n\n #[test]\n fn test_stdout() {\n let path = mktemp();\n let result = ArgvCommand::new(\"echo\").arg(\"hi\").stdout(&path).run();\n assert!(result.unwrap().status.success());\n let mut contents = String::new();\n File::open(&path).unwrap().read_to_string(&mut contents).unwrap();\n assert_eq!(contents, \"hi\\n\");\n }\n\n #[test]\n fn test_pipe() {\n let mut left = ArgvCommand::new(\"echo\");\n left.arg(\"hi\");\n let mut right = ArgvCommand::new(\"sed\");\n right.arg(\"s\/i\/o\/\");\n let path = mktemp();\n right.stdout(&path);\n let pipe = Pipe::new(&left, &right);\n let result = pipe.run();\n assert!(result.unwrap().status.success());\n let mut contents = String::new();\n File::open(&path).unwrap().read_to_string(&mut contents).unwrap();\n assert_eq!(contents, \"ho\\n\");\n }\n}\n<commit_msg>define the .read() method to some approximation<commit_after>extern crate libc;\n\nuse std::ffi::{OsStr, OsString};\nuse std::fmt::Debug;\nuse std::fs::File;\nuse std::io;\nuse std::io::prelude::*;\nuse std::mem;\nuse std::os::unix::io::{FromRawFd, IntoRawFd, RawFd};\nuse std::path::{Path, PathBuf};\nuse std::process::{Command, Stdio, Output, ExitStatus};\nuse std::thread::JoinHandle;\n\npub trait Expression: Clone + Debug {\n fn exec(&self, context: IoContext) -> io::Result<ExitStatus>;\n\n fn run(&self) -> Result<Output, Error> {\n let context = IoContext {\n stdin: CloneableStdio::Inherit,\n stdout: CloneableStdio::Inherit,\n stderr: CloneableStdio::Inherit,\n };\n let status = try!(self.exec(context));\n Ok(Output{\n status: status,\n stdout: Vec::new(),\n stderr: Vec::new(),\n })\n }\n\n fn read(&self) -> Result<String, Error> {\n let (stdout, stdout_reader) = pipe_with_reader_thread();\n let context = IoContext {\n stdin: CloneableStdio::Inherit,\n stdout: CloneableStdio::Fd(stdout),\n stderr: CloneableStdio::Inherit,\n };\n let status = try!(self.exec(context));\n let output = Output{\n status: status,\n stdout: try!(stdout_reader.join().unwrap()),\n stderr: Vec::new(),\n };\n if output.status.success() {\n \/\/ TODO: should only trim newlines\n Ok(try!(String::from_utf8(output.stdout)).trim_right().to_string())\n } else {\n Err(Error::Status(output))\n }\n }\n}\n\n#[derive(Debug, Clone)]\npub struct ArgvCommand {\n argv: Vec<OsString>,\n stdout: Option<PathBuf>,\n}\n\nimpl ArgvCommand {\n pub fn new<T: AsRef<OsStr>>(prog: T) -> ArgvCommand {\n ArgvCommand{\n argv: vec![prog.as_ref().to_owned()],\n stdout: None,\n }\n }\n\n pub fn arg<T: AsRef<OsStr>>(&mut self, arg: T) -> &mut Self {\n self.argv.push(arg.as_ref().to_owned());\n self\n }\n\n pub fn stdout<T: AsRef<Path>>(&mut self, path: T) -> &mut Self {\n self.stdout = Some(path.as_ref().to_owned());\n self\n }\n}\n\nimpl Expression for ArgvCommand {\n fn exec(&self, context: IoContext) -> io::Result<ExitStatus> {\n let IoContext{stdin, stdout, stderr} = context;\n \/\/ Create a Command and add the args.\n let mut command = Command::new(&self.argv[0]);\n command.args(&self.argv[1..]);\n command.stdin(stdin.to_stdio());\n command.stdout(stdout.to_stdio());\n command.stderr(stderr.to_stdio());\n if let Some(ref path) = self.stdout {\n let file = try!(File::create(path));\n command.stdout(unsafe {\n FromRawFd::from_raw_fd(file.into_raw_fd())\n });\n }\n Ok(try!(command.status()))\n }\n}\n\n#[derive(Debug, Clone)]\npub struct Pipe {\n \/\/ TODO: Make this hold any Expression.\n left: ArgvCommand,\n right: ArgvCommand,\n}\n\nimpl Pipe {\n pub fn new(left: &ArgvCommand, right: &ArgvCommand) -> Pipe {\n Pipe{left: left.clone(), right: right.clone()}\n }\n}\n\nimpl Expression for Pipe {\n fn exec(&self, context: IoContext) -> io::Result<ExitStatus> {\n let IoContext{stdin, stdout, stderr} = context;\n let (read_pipe, write_pipe) = open_pipe();\n let left_context = IoContext{\n stdin: stdin,\n stdout: CloneableStdio::Fd(write_pipe),\n stderr: stderr.clone(),\n };\n let right_context = IoContext{\n stdin: CloneableStdio::Fd(read_pipe),\n stdout: stdout,\n stderr: stderr,\n };\n let left_clone = self.left.clone();\n let left_thread = std::thread::spawn(move || {\n left_clone.exec(left_context)\n });\n let right_status = self.right.exec(right_context);\n let left_status = left_thread.join().unwrap(); \/\/ TODO: handle errors here?\n match right_status {\n Err(_) => right_status,\n _ => left_status,\n }\n }\n}\n\n#[derive(Debug)]\npub enum Error {\n Io(io::Error),\n Utf8(std::string::FromUtf8Error),\n Status(Output),\n}\n\nimpl From<io::Error> for Error {\n fn from(err: io::Error) -> Error {\n Error::Io(err)\n }\n}\n\nimpl From<std::string::FromUtf8Error> for Error {\n fn from(err: std::string::FromUtf8Error) -> Error {\n Error::Utf8(err)\n }\n}\n\n#[derive(Clone, Debug)]\npub struct IoContext {\n stdin: CloneableStdio,\n stdout: CloneableStdio,\n stderr: CloneableStdio,\n}\n\n#[derive(Clone, Debug)]\nenum CloneableStdio {\n Inherit,\n Fd(CloneableFd),\n}\n\nimpl CloneableStdio {\n fn to_stdio(self) -> Stdio {\n match self {\n CloneableStdio::Inherit => Stdio::inherit(),\n CloneableStdio::Fd(fd) => unsafe {\n FromRawFd::from_raw_fd(fd.into_raw_fd())\n },\n }\n }\n}\n\n#[derive(Debug)]\nstruct CloneableFd {\n \/\/ The struct *owns* this file descriptor, and will close it in drop().\n fd: RawFd,\n}\n\nimpl Clone for CloneableFd {\n fn clone(&self) -> Self {\n let new_fd = unsafe { libc::dup(self.fd) };\n assert!(new_fd >= 0);\n unsafe { FromRawFd::from_raw_fd(new_fd) }\n }\n}\n\nimpl FromRawFd for CloneableFd {\n unsafe fn from_raw_fd(fd: RawFd) -> Self {\n CloneableFd{fd: fd}\n }\n}\n\nimpl IntoRawFd for CloneableFd {\n fn into_raw_fd(self) -> RawFd {\n let fd = self.fd;\n mem::forget(self); \/\/ prevent drop() from closing the fd\n fd\n }\n}\n\nimpl Drop for CloneableFd {\n fn drop(&mut self) {\n let error = unsafe { libc::close(self.fd) };\n assert_eq!(error, 0);\n }\n}\n\n\/\/ (read, write)\n\/\/ TODO: error handling\nfn open_pipe() -> (CloneableFd, CloneableFd) {\n unsafe {\n let mut pipes = [0, 0];\n let error = libc::pipe(pipes.as_mut_ptr());\n assert_eq!(error, 0);\n \/\/ prevent child processes from inheriting these by default\n for fd in &pipes {\n let ret = libc::ioctl(*fd, libc::FIOCLEX);\n assert_eq!(ret, 0);\n }\n (FromRawFd::from_raw_fd(pipes[0]), FromRawFd::from_raw_fd(pipes[1]))\n }\n}\n\nfn pipe_with_reader_thread() -> (CloneableFd, JoinHandle<io::Result<Vec<u8>>>) {\n let (read_pipe, write_pipe) = open_pipe();\n let thread = std::thread::spawn(move || {\n let mut read_file = unsafe { File::from_raw_fd(read_pipe.into_raw_fd()) };\n let mut output = Vec::new();\n try!(read_file.read_to_end(&mut output));\n Ok(output)\n });\n (write_pipe, thread)\n}\n\n#[cfg(test)]\nmod test {\n use super::{ArgvCommand, Pipe, Expression};\n use std::fs::File;\n use std::io::prelude::*;\n use std::path::PathBuf;\n use std::process::Command;\n use std::str::from_utf8;\n\n fn mktemp() -> PathBuf {\n \/\/ TODO: use duct for this :)\n let output = Command::new(\"mktemp\").output().unwrap();\n let path: PathBuf = from_utf8(&output.stdout).unwrap().trim().into();\n println!(\"here's the path we're using: {:?}\", path);\n path\n }\n\n #[test]\n fn test_run() {\n let result = ArgvCommand::new(\"true\").arg(\"foo\").arg(\"bar\").run();\n assert!(result.unwrap().status.success());\n }\n\n #[test]\n fn test_read() {\n let output = ArgvCommand::new(\"echo\").arg(\"hi\").read().unwrap();\n assert_eq!(output, \"hi\");\n }\n\n #[test]\n fn test_stdout() {\n let path = mktemp();\n let result = ArgvCommand::new(\"echo\").arg(\"hi\").stdout(&path).run();\n assert!(result.unwrap().status.success());\n let mut contents = String::new();\n File::open(&path).unwrap().read_to_string(&mut contents).unwrap();\n assert_eq!(contents, \"hi\\n\");\n }\n\n #[test]\n fn test_pipe() {\n let mut left = ArgvCommand::new(\"echo\");\n left.arg(\"hi\");\n let mut right = ArgvCommand::new(\"sed\");\n right.arg(\"s\/i\/o\/\");\n let pipe = Pipe::new(&left, &right);\n let output = pipe.read().unwrap();\n assert_eq!(output, \"ho\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>connect to rewarders on all children<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Accept closure (not reference to closure)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>event.rs in lib.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Re-add accidentally deleted xcb.rs<commit_after>\/*\nCopyright (C) 2013 James Miller <james@aatch.net>\n\nPermission is hereby granted, free of charge, to any\nperson obtaining a copy of this software and associated\ndocumentation files (the \"Software\"), to deal in the\nSoftware without restriction, including without\nlimitation the rights to use, copy, modify, merge,\npublish, distribute, sublicense, and\/or sell copies of\nthe Software, and to permit persons to whom the Software\nis furnished to do so, subject to the following\nconditions:\n\nThe above copyright notice and this permission notice\nshall be included in all copies or substantial portions\nof the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\nANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\nTO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\nPARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\nSHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\nIN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\n*\/\n\n#[link(name=\"xcb\",\n vers=\"0.3\",\n uuid=\"ef466d26-0620-4f5f-a1d2-1bb9c628e101\",\n url= \"https:\/\/github.com\/Aatch\/rust-xcb\")];\n\n#[license = \"MIT\"];\n#[crate_type=\"lib\"];\n\npub mod ffi {\n pub mod xproto;\n pub mod base;\n\n pub mod bigreq;\n pub mod composite;\n pub mod damage;\n pub mod dpms;\n pub mod dri2;\n\n \/\/pub mod ge; not sure about this one...\n\n pub mod glx;\n pub mod randr;\n pub mod record;\n pub mod render;\n pub mod screensaver;\n pub mod shape;\n pub mod shm;\n pub mod sync;\n pub mod xc_misc;\n pub mod xevie;\n pub mod xf86dri;\n \/\/pub mod xf86vidmode; Same with this one...\n pub mod xfixes;\n pub mod xinerama;\n pub mod xinput;\n \/\/pub mod xkb;\n pub mod xprint;\n\n #[cfg(enable_xselinux)]\n pub mod xselinux;\n pub mod xtest;\n pub mod xv;\n pub mod xvmc;\n}\n\npub mod base;\npub mod macro;\n\npub mod xproto;\n\npub mod xinerama;\n\npub mod bigreq;\npub mod composite;\npub mod xfixes;\npub mod render;\npub mod shape;\n\/*\npub mod damage;\npub mod dpms;\npub mod dri2;\n\n\/\/pub mod ge; not sure about this one...\n\npub mod glx;\npub mod randr;\npub mod record;\npub mod screensaver;\npub mod shm;\npub mod sync;\npub mod xc_misc;\npub mod xevie;\npub mod xf86dri;\n\/\/pub mod xf86vidmode; Same with this one...\npub mod xinput;\n\/\/pub mod xkb;\npub mod xprint;\n\n#[cfg(enable_xselinux)]\npub mod xselinux;\npub mod xtest;\npub mod xv;\npub mod xvmc;\n*\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>04 - declare first<commit_after>fn main() {\n \/\/ Declare a variable\n let a_variable;\n\n {\n let x = 2i;\n\n \/\/ Initialize the variable\n a_variable = x * x;\n }\n\n println!(\"a variable: {}\", a_variable);\n\n let another_variable;\n\n \/\/ Error! Use of uninitialized variable\n \/\/ println!(\"another variable: {}\", another_variable);\n\n another_variable = 1i;\n\n println!(\"another variable: {}\", another_variable);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add brainfuck interpreter<commit_after>use self::Command::*;\n\nstruct Tape {\n\tarray: [isize; 30000],\n\tpos: usize\n}\n\nimpl Tape {\n\tfn new() -> Tape {\n\t\tTape {\n\t\t\tarray: [0; 30000],\n\t\t\tpos: 0\n\t\t}\n\t}\n\n\tfn run(&mut self, commands: &[Command]) {\n\t\tfor command in commands.iter() {\n\t\t\tmatch *command {\n\t\t\t\tIncrementPos => self.pos += 1,\n\t\t\t\tDecrementPos => self.pos -= 1,\n\t\t\t\tIncrementByte => self.array[self.pos] += 1,\n\t\t\t\tDecrementByte => self.array[self.pos] -= 1,\n\t\t\t\tOutputByte => print!(\"{}\", self.array[self.pos] as u8 as char),\n\t\t\t\tInputByte => unimplemented!(),\n\t\t\t\tLoop(ref command_loop) => while self.array[self.pos] != 0 {\n\t\t\t\t\tself.run(&command_loop)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}\n\nenum Command {\n\tIncrementPos,\n\tDecrementPos,\n\tIncrementByte,\n\tDecrementByte,\n\tOutputByte,\n\tInputByte,\n\tLoop(Vec<Command>)\n}\n\ntrait ParseBrainfuck {\n\tfn parse(&self) -> Vec<Command>;\n\tfn _parse(&self) -> (Vec<Command>, u32);\n}\n\nimpl<'a> ParseBrainfuck for &'a [u8] {\n\tfn parse(&self) -> Vec<Command> {\n\t\tlet sanitized: Vec<u8> = (**self).iter().filter(|&c| b\"><+-.,[]\".contains(c)).map(|&c| c).collect();\n\t\t(&*sanitized)._parse().0\n\t}\n\n\tfn _parse(&self) -> (Vec<Command>, u32) {\n\t\tlet mut commands = Vec::new();\n\t\tlet len = self.len();\n\t\tlet mut index = 0;\n\n\t\twhile index < len {\n\t\t\tmatch self[index] {\n\t\t\t\tb'>' => commands.push(IncrementPos),\n\t\t\t\tb'<' => commands.push(DecrementPos),\n\t\t\t\tb'+' => commands.push(IncrementByte),\n\t\t\t\tb'-' => commands.push(DecrementByte),\n\t\t\t\tb'.' => commands.push(OutputByte),\n\t\t\t\tb',' => commands.push(InputByte),\n\t\t\t\tb'[' => {\n\t\t\t\t\tindex += 1;\n\t\t\t\t\tlet (parsed, count) = (&self[index..])._parse();\n\t\t\t\t\tcommands.push(Loop(parsed));\n\t\t\t\t\tindex += count as usize;\n\t\t\t\t},\n\t\t\t\tb']' => break,\n\t\t\t\t_ => unreachable!()\n\t\t\t}\n\t\t\tindex += 1;\n\t\t}\n\n\t\t(commands, index as u32)\n\t}\n}\n\nfn main() {\n\tlet mut tape = Tape::new();\n\/\/\tlet helloworld = b\"++++++++++[>+++++++>++++++++++>+++>+<<<<-]>++.>+.+++++++..+++.>++.<<+++++++++++++++.>.+++.------.--------.>+.>.\";\n\/\/\tlet mandelbrot = b\"+++++++++++++[->++>>>+++++>++>+<<<<<<]>>>>>++++++>--->>>>>>>>>>+++++++++++++++[[>>>>>>>>>]+[<<<<<<<<<]>>>>>>>>>-]+[>>>>>>>>[-]>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>[-]+<<<<<<<+++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>>>+>>>>>>>>>>>>>>>>>>>>>>>>>>>+<<<<<<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+[>>>>>>[>>>>>>>[-]>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>[-]+<<<<<<++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>>+<<<<<<+++++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>>+<<<<<<<<<<<<<<<<[<<<<<<<<<]>>>[[-]>>>>>>[>>>>>>>[-<<<<<<+>>>>>>]<<<<<<[->>>>>>+<<+<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<+<<<+<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<+<<<<<]>>>>>>>>>+++++++++++++++[[>>>>>>>>>]+>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>[-<<<<+>>>>]<<<<[->>>>+<<<<<[->>[-<<+>>]<<[->>+>>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<[>[-]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<<]<+<<<<<<<<<]>>>>>>>>>[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<<<[->>>[-<<<+>>>]<<<[->>>+>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<<]>>[->>>>>>>>>+<<<<<<<<<]<<+>>>>>>>>]<<<<<<<<<[>[-]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<<]<+<<<<<<<<<]>>>>>>>>>[>>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>]>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>>>>>>]<<<<<<<<<-<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+>>>>>>>>>>>>>>>>>>>>>+<<<[<<<<<<<<<]>>>>>>>>>[>>>[-<<<->>>]+<<<[->>>->[-<<<<+>>>>]<<<<[->>>>+<<<<<<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>[-<<<<->>>>]+<<<<[->>>>-<[-<<<+>>>]<<<[->>>+<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]<<<<<<<[->+>>>-<<<<]>>>>>>>>>++++++++++++++++++++++++++>>[-<<<<+>>>>]<<<<[->>>>+<<[-]<<]>>[<<<<<<<+<[-<+>>>>+<<[-]]>[-<<[->+>>>-<<<<]>>>]>>>>>>>>>>>>>[>>[-]>[-]>[-]>>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-]>>>>>>[>>>>>[-<<<<+>>>>]<<<<[->>>>+<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>[-<<<<<<<<<+>>>>>>>>>]>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>>>>>>]+>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<<<[->>[-<<+>>]<<[->>+>+<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<[>[-]<->>>[-<<<+>[<->-<<<<<<<+>>>>>>>]<[->+<]>>>]<<[->>+<<]<+<<<<<<<<<]>>>>>>>>>[>>>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<<<[->>[-<<+>>]<<[->>+>>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<[>[-]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<<]<+<<<<<<<<<]>>>>>>>>>[>>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>]>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>]>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>>>>>>]<<<<<<<<<-<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[>>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<<<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>>[-]>>>]<<<<<<<<<[<<<<<<<<<]>>>>+>[-<-<<<<+>>>>>]>[-<<<<<<[->>>>>+<++<<<<]>>>>>[-<<<<<+>>>>>]<->+>]<[->+<]<<<<<[->>>>>+<<<<<]>>>>>>[-]<<<<<<+>>>>[-<<<<->>>>]+<<<<[->>>>->>>>>[>>[-<<->>]+<<[->>->[-<<<+>>>]<<<[->>>+<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>+<]]+>>>[-<<<->>>]+<<<[->>>-<[-<<+>>]<<[->>+<<<<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>[-<<<<+>>>>]<<<<[->>>>+>>>>>[>+>>[-<<->>]<<[->>+<<]>>>>>>>>]<<<<<<<<+<[>[->>>>>+<<<<[->>>>-<<<<<<<<<<<<<<+>>>>>>>>>>>[->>>+<<<]<]>[->>>-<<<<<<<<<<<<<<+>>>>>>>>>>>]<<]>[->>>>+<<<[->>>-<<<<<<<<<<<<<<+>>>>>>>>>>>]<]>[->>>+<<<]<<<<<<<<<<<<]>>>>[-]<<<<]>>>[-<<<+>>>]<<<[->>>+>>>>>>[>+>[-<->]<[->+<]>>>>>>>>]<<<<<<<<+<[>[->>>>>+<<<[->>>-<<<<<<<<<<<<<<+>>>>>>>>>>[->>>>+<<<<]>]<[->>>>-<<<<<<<<<<<<<<+>>>>>>>>>>]<]>>[->>>+<<<<[->>>>-<<<<<<<<<<<<<<+>>>>>>>>>>]>]<[->>>>+<<<<]<<<<<<<<<<<]>>>>>>+<<<<<<]]>>>>[-<<<<+>>>>]<<<<[->>>>+>>>>>[>>>>>>>>>]<<<<<<<<<[>[->>>>>+<<<<[->>>>-<<<<<<<<<<<<<<+>>>>>>>>>>>[->>>+<<<]<]>[->>>-<<<<<<<<<<<<<<+>>>>>>>>>>>]<<]>[->>>>+<<<[->>>-<<<<<<<<<<<<<<+>>>>>>>>>>>]<]>[->>>+<<<]<<<<<<<<<<<<]]>[-]>>[-]>[-]>>>>>[>>[-]>[-]>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>[-<<<<+>>>>]<<<<[->>>>+<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>>>>>>]+>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>[-<<<<+>>>>]<<<<[->>>>+<<<<<[->>[-<<+>>]<<[->>+>+<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<[>[-]<->>>[-<<<+>[<->-<<<<<<<+>>>>>>>]<[->+<]>>>]<<[->>+<<]<+<<<<<<<<<]>>>>>>>>>[>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>]>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>[-]>>>>+++++++++++++++[[>>>>>>>>>]<<<<<<<<<-<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[>>>[-<<<->>>]+<<<[->>>->[-<<<<+>>>>]<<<<[->>>>+<<<<<<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>[-<<<<->>>>]+<<<<[->>>>-<[-<<<+>>>]<<<[->>>+<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-<<<+>>>]<<<[->>>+>>>>>>[>+>>>[-<<<->>>]<<<[->>>+<<<]>>>>>>>>]<<<<<<<<+<[>[->+>[-<-<<<<<<<<<<+>>>>>>>>>>>>[-<<+>>]<]>[-<<-<<<<<<<<<<+>>>>>>>>>>>>]<<<]>>[-<+>>[-<<-<<<<<<<<<<+>>>>>>>>>>>>]<]>[-<<+>>]<<<<<<<<<<<<<]]>>>>[-<<<<+>>>>]<<<<[->>>>+>>>>>[>+>>[-<<->>]<<[->>+<<]>>>>>>>>]<<<<<<<<+<[>[->+>>[-<<-<<<<<<<<<<+>>>>>>>>>>>[-<+>]>]<[-<-<<<<<<<<<<+>>>>>>>>>>>]<<]>>>[-<<+>[-<-<<<<<<<<<<+>>>>>>>>>>>]>]<[-<+>]<<<<<<<<<<<<]>>>>>+<<<<<]>>>>>>>>>[>>>[-]>[-]>[-]>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-]>[-]>>>>>[>>>>>>>[-<<<<<<+>>>>>>]<<<<<<[->>>>>>+<<<<+<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>+>[-<-<<<<+>>>>>]>>[-<<<<<<<[->>>>>+<++<<<<]>>>>>[-<<<<<+>>>>>]<->+>>]<<[->>+<<]<<<<<[->>>>>+<<<<<]+>>>>[-<<<<->>>>]+<<<<[->>>>->>>>>[>>>[-<<<->>>]+<<<[->>>-<[-<<+>>]<<[->>+<<<<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>[-<<->>]+<<[->>->[-<<<+>>>]<<<[->>>+<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-<<<+>>>]<<<[->>>+>>>>>>[>+>[-<->]<[->+<]>>>>>>>>]<<<<<<<<+<[>[->>>>+<<[->>-<<<<<<<<<<<<<+>>>>>>>>>>[->>>+<<<]>]<[->>>-<<<<<<<<<<<<<+>>>>>>>>>>]<]>>[->>+<<<[->>>-<<<<<<<<<<<<<+>>>>>>>>>>]>]<[->>>+<<<]<<<<<<<<<<<]>>>>>[-]>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<+<<<<<]]>>>>[-<<<<+>>>>]<<<<[->>>>+>>>>>[>+>>[-<<->>]<<[->>+<<]>>>>>>>>]<<<<<<<<+<[>[->>>>+<<<[->>>-<<<<<<<<<<<<<+>>>>>>>>>>>[->>+<<]<]>[->>-<<<<<<<<<<<<<+>>>>>>>>>>>]<<]>[->>>+<<[->>-<<<<<<<<<<<<<+>>>>>>>>>>>]<]>[->>+<<]<<<<<<<<<<<<]]>>>>[-]<<<<]>>>>[-<<<<+>>>>]<<<<[->>>>+>[-]>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<+<<<<<]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[->>>>+<<<[->>>-<<<<<<<<<<<<<+>>>>>>>>>>>[->>+<<]<]>[->>-<<<<<<<<<<<<<+>>>>>>>>>>>]<<]>[->>>+<<[->>-<<<<<<<<<<<<<+>>>>>>>>>>>]<]>[->>+<<]<<<<<<<<<<<<]]>>>>>>>>>[>>[-]>[-]>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>[-]>[-]>>>>>[>>>>>[-<<<<+>>>>]<<<<[->>>>+<<<+<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+<<<+<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>>>>>>]+>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]>[-]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>[-<<<<+>>>>]<<<<[->>>>+<<<<<[->>[-<<+>>]<<[->>+>>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<]>[->>>>>>>>>+<<<<<<<<<]<+>>>>>>>>]<<<<<<<<<[>[-]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<<]<+<<<<<<<<<]>>>>>>>>>[>+>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>->>>>>[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<<<[->>>[-<<<+>>>]<<<[->>>+>+<<<<]+>>>>>>>>>]<<<<<<<<[<<<<<<<<<]]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>>[->>>>>>>>>+<<<<<<<<<]<<<<<<<<<<<]>>[->>>>>>>>>+<<<<<<<<<]<<+>>>>>>>>]<<<<<<<<<[>[-]<->>>>[-<<<<+>[<->-<<<<<<+>>>>>>]<[->+<]>>>>]<<<[->>>+<<<]<+<<<<<<<<<]>>>>>>>>>[>>>>[-<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<+>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>]>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>+++++++++++++++[[>>>>>>>>>]<<<<<<<<<-<<<<<<<<<[<<<<<<<<<]>>>>>>>>>-]+>>>>>>>>>>>>>>>>>>>>>+<<<[<<<<<<<<<]>>>>>>>>>[>>>[-<<<->>>]+<<<[->>>->[-<<<<+>>>>]<<<<[->>>>+<<<<<<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>[-<<<<->>>>]+<<<<[->>>>-<[-<<<+>>>]<<<[->>>+<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>->>[-<<<<+>>>>]<<<<[->>>>+<<[-]<<]>>]<<+>>>>[-<<<<->>>>]+<<<<[->>>>-<<<<<<.>>]>>>>[-<<<<<<<.>>>>>>>]<<<[-]>[-]>[-]>[-]>[-]>[-]>>>[>[-]>[-]>[-]>[-]>[-]>[-]>>>]<<<<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>[-]>>>>]<<<<<<<<<[<<<<<<<<<]>+++++++++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>+>>>>>>>>>+<<<<<<<<<<<<<<[<<<<<<<<<]>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+[-]>>[>>>>>>>>>]<<<<<<<<<[>>>>>>>[-<<<<<<+>>>>>>]<<<<<<[->>>>>>+<<<<<<<[<<<<<<<<<]>>>>>>>[-]+>>>]<<<<<<<<<<]]>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+>>[>+>>>>[-<<<<->>>>]<<<<[->>>>+<<<<]>>>>>>>>]<<+<<<<<<<[>>>>>[->>+<<]<<<<<<<<<<<<<<]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[-]<->>>>>>>[-<<<<<<<+>[<->-<<<+>>>]<[->+<]>>>>>>>]<<<<<<[->>>>>>+<<<<<<]<+<<<<<<<<<]>>>>>>>-<<<<[-]+<<<]+>>>>>>>[-<<<<<<<->>>>>>>]+<<<<<<<[->>>>>>>->>[>>>>>[->>+<<]>>>>]<<<<<<<<<[>[-]<->>>>>>>[-<<<<<<<+>[<->-<<<+>>>]<[->+<]>>>>>>>]<<<<<<[->>>>>>+<<<<<<]<+<<<<<<<<<]>+++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>+<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>[-<<<<<->>>>>]+<<<<<[->>>>>->>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<<<<<<<<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>>>>[-<<<<<<<->>>>>>>]+<<<<<<<[->>>>>>>-<<[-<<<<<+>>>>>]<<<<<[->>>>>+<<<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>[-]<<<+++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>-<<<<<[<<<<<<<<<]]>>>]<<<<.>>>>>>>>>>[>>>>>>[-]>>>]<<<<<<<<<[<<<<<<<<<]>++++++++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>+>>>>>>>>>+<<<<<<<<<<<<<<<[<<<<<<<<<]>>>>>>>>[-<<<<<<<<+>>>>>>>>]<<<<<<<<[->>>>>>>>+[-]>[>>>>>>>>>]<<<<<<<<<[>>>>>>>>[-<<<<<<<+>>>>>>>]<<<<<<<[->>>>>>>+<<<<<<<<[<<<<<<<<<]>>>>>>>>[-]+>>]<<<<<<<<<<]]>>>>>>>>[-<<<<<<<<+>>>>>>>>]<<<<<<<<[->>>>>>>>+>[>+>>>>>[-<<<<<->>>>>]<<<<<[->>>>>+<<<<<]>>>>>>>>]<+<<<<<<<<[>>>>>>[->>+<<]<<<<<<<<<<<<<<<]>>>>>>>>>[>>>>>>>>>]<<<<<<<<<[>[-]<->>>>>>>>[-<<<<<<<<+>[<->-<<+>>]<[->+<]>>>>>>>>]<<<<<<<[->>>>>>>+<<<<<<<]<+<<<<<<<<<]>>>>>>>>-<<<<<[-]+<<<]+>>>>>>>>[-<<<<<<<<->>>>>>>>]+<<<<<<<<[->>>>>>>>->[>>>>>>[->>+<<]>>>]<<<<<<<<<[>[-]<->>>>>>>>[-<<<<<<<<+>[<->-<<+>>]<[->+<]>>>>>>>>]<<<<<<<[->>>>>>>+<<<<<<<]<+<<<<<<<<<]>+++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>+>>>>>>>>>>>>>>>>>>>>>>>>>>>+<<<<<<[<<<<<<<<<]>>>>>>>>>[>>>>>>[-<<<<<<->>>>>>]+<<<<<<[->>>>>>->>[-<<<<<<<<+>>>>>>>>]<<<<<<<<[->>>>>>>>+<<<<<<<<<<<<<<<<<[<<<<<<<<<]>>>>[-]+>>>>>[>>>>>>>>>]>+<]]+>>>>>>>>[-<<<<<<<<->>>>>>>>]+<<<<<<<<[->>>>>>>>-<<[-<<<<<<+>>>>>>]<<<<<<[->>>>>>+<<<<<<<<<<<<<<<[<<<<<<<<<]>>>[-]+>>>>>>[>>>>>>>>>]>[-]+<]]+>[-<[>>>>>>>>>]<<<<<<<<]>>>>>>>>]<<<<<<<<<[<<<<<<<<<]>>>>[-]<<<+++++[-[->>>>>>>>>+<<<<<<<<<]>>>>>>>>>]>>>>>->>>>>>>>>>>>>>>>>>>>>>>>>>>-<<<<<<[<<<<<<<<<]]>>>]\";\n\tlet fizzbuzz = b\"++++++++++[>++++++++++<-]>>++++++++++>->>>>>>>>>>>>>>>>-->+++++++[->++++++++++<]>[->+>+>+>+<<<<]+++>>+++>>>++++++++[-<++++<++++<++++>>>]+++++[-<++++<++++>>]>>-->++++++[->+++++++++++<]>[->+>+>+>+<<<<]+++++>>+>++++++>++++++>++++++++[-<++++<++++<++++>>>]++++++[-<+++<+++<+++>>>]>>-->---+[-<+]-<[+[->+]-<<->>>+>[-]++[-->++]-->+++[---++[--<++]---->>-<+>[++++[----<++++]--[>]++[-->++]--<]>++[--+[-<+]->>[-]+++++[---->++++]-->[->+<]>>[.>]++[-->++]]-->+++]---+[-<+]->>-[+>>>+[-<+]->>>++++++++++<<[->+>-[>+>>]>[+[-<+>]>+>>]<<<<<<]>>[-]>>>++++++++++<[->-[>+>>]>[+[-<+>]>+>>]<<<<<]>[-]>>[>++++++[-<++++++++>]<.<<+>+>[-]]<[<[->-<]++++++[->++++++++<]>.[-]]<<++++++[-<++++++++>]<.[-]<<[-<+>]+[-<+]->>]+[-]<<<.>>>+[-<+]-<<]\";\n\n\tlet program = fizzbuzz.as_ref().parse();\n\n\ttape.run(&program);\n}<|endoftext|>"} {"text":"<commit_before>use redox::Box;\nuse redox::{self, cmp, env};\nuse redox::collections::BTreeMap;\nuse redox::fs::{self, File};\nuse redox::io::{Read, Seek, SeekFrom};\nuse redox::time::{self, Duration};\nuse redox::vec::Vec;\nuse redox::string::{String, ToString};\n\nuse orbital::{event, BmpFile, Color, EventOption, MouseEvent, Window};\n\nstruct FileType {\n description: &'static str,\n icon: BmpFile,\n}\n\n\nimpl FileType {\n fn new(desc: &'static str, icon: &str) -> FileType {\n FileType { description: desc, icon: load_icon(icon) }\n }\n\n}\n\nstruct FileTypesInfo {\n file_types: BTreeMap<&'static str, FileType>,\n}\n\nimpl FileTypesInfo {\n pub fn new () -> FileTypesInfo {\n let mut file_types = BTreeMap::<&'static str, FileType>::new();\n file_types.insert(\"\/\",\n FileType::new(\"Folder\", \"inode-directory\"));\n file_types.insert(\"wav\",\n FileType::new(\"WAV audio\", \"audio-x-wav\"));\n file_types.insert(\"bin\",\n FileType::new(\"Executable\", \"application-x-executable\"));\n file_types.insert(\"bmp\",\n FileType::new(\"Bitmap Image\", \"image-x-generic\"));\n file_types.insert(\"rs\",\n FileType::new(\"Rust source code\", \"text-x-makefile\"));\n file_types.insert(\"crate\",\n FileType::new(\"Rust crate\", \"application-x-archive\"));\n file_types.insert(\"rlib\",\n FileType::new(\"Static Rust library\", \"application-x-object\"));\n file_types.insert(\"asm\",\n FileType::new(\"Assembly source\", \"text-x-makefile\"));\n file_types.insert(\"list\",\n FileType::new(\"Disassembly source\", \"text-x-makefile\"));\n file_types.insert(\"c\",\n FileType::new(\"C source code\", \"text-x-csrc\"));\n file_types.insert(\"cpp\",\n FileType::new(\"C++ source code\", \"text-x-c++src\"));\n file_types.insert(\"h\",\n FileType::new(\"C header\", \"text-x-chdr\"));\n file_types.insert(\"sh\",\n FileType::new(\"Shell script\", \"text-x-script\"));\n file_types.insert(\"lua\",\n FileType::new(\"Lua script\", \"text-x-script\"));\n file_types.insert(\"txt\",\n FileType::new(\"Plain text document\", \"text-x-generic\"));\n file_types.insert(\"md\",\n FileType::new(\"Markdown document\", \"text-x-generic\"));\n file_types.insert(\"toml\",\n FileType::new(\"TOML document\", \"text-x-generic\"));\n file_types.insert(\"json\",\n FileType::new(\"JSON document\", \"text-x-generic\"));\n file_types.insert(\"REDOX\",\n FileType::new(\"Redox package\", \"text-x-generic\"));\n file_types.insert(\"\",\n FileType::new(\"Unknown file\", \"unknown\"));\n FileTypesInfo { file_types: file_types }\n }\n\n pub fn description_for(&self, file_name: &str) -> String {\n if file_name.ends_with('\/') {\n self.file_types[\"\/\"].description.to_string()\n } else {\n let pos = file_name.rfind('.').unwrap_or(0) + 1;\n let ext = &file_name[pos..];\n if self.file_types.contains_key(ext) {\n self.file_types[ext].description.to_string()\n } else {\n self.file_types[\"\"].description.to_string()\n }\n }\n }\n\n pub fn icon_for(&self, file_name: &str) -> &BmpFile {\n if file_name.ends_with('\/') {\n &self.file_types[\"\/\"].icon\n } else {\n let pos = file_name.rfind('.').unwrap_or(0) + 1;\n let ext = &file_name[pos..];\n if self.file_types.contains_key(ext) {\n &self.file_types[ext].icon\n } else {\n &self.file_types[\"\"].icon\n }\n }\n }\n}\n\nenum FileManagerCommand {\n ChangeDir(String),\n Execute(String),\n Redraw,\n Quit,\n}\n\npub struct FileManager {\n file_types_info: FileTypesInfo,\n files: Vec<String>,\n file_sizes: Vec<String>,\n selected: isize,\n last_mouse_event: MouseEvent,\n click_time: Duration,\n window: Box<Window>,\n}\n\nfn load_icon(path: &str) -> BmpFile {\n let mut vec: Vec<u8> = Vec::new();\n if let Some(mut file) = File::open(&(\"file:\/\/\/ui\/mimetypes\/\".to_string() + path + \".bmp\")) {\n file.read_to_end(&mut vec);\n }\n BmpFile::from_data(&vec)\n}\n\nimpl FileManager {\n pub fn new() -> Self {\n FileManager {\n file_types_info: FileTypesInfo::new(),\n files: Vec::new(),\n file_sizes: Vec::new(),\n selected: -1,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n },\n click_time: Duration::new(0, 0),\n window: Window::new(-1,-1,0,0,\"\").unwrap(),\n }\n }\n\n fn draw_content(&mut self) {\n self.window.set(Color::WHITE);\n\n let mut i = 0;\n let mut row = 0;\n let column = {\n let mut tmp = [0, 0];\n for string in self.files.iter() {\n if tmp[0] < string.len() {\n tmp[0] = string.len();\n }\n }\n\n tmp[0] += 1;\n\n for file_size in self.file_sizes.iter() {\n if tmp[1] < file_size.len() {\n tmp[1] = file_size.len();\n }\n }\n\n tmp[1] += tmp[0] + 1;\n tmp\n };\n for (file_name, file_size) in self.files.iter().zip(self.file_sizes.iter()) {\n if i == self.selected {\n let width = self.window.width();\n self.window.rect(0, 32 * row as isize, width, 32, Color::rgba(224, 224, 224, 255));\n }\n\n let icon = self.file_types_info.icon_for(&file_name);\n self.window.image(0,\n 32 * row as isize,\n icon.width(),\n icon.height(),\n icon.as_slice());\n\n let mut col = 0;\n for c in file_name.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < self.window.width() \/ 8 && row < self.window.height() \/ 32 {\n self.window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n Color::BLACK);\n col += 1;\n }\n }\n if col >= self.window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n\n col = column[0];\n\n for c in file_size.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < self.window.width() \/ 8 && row < self.window.height() \/ 32 {\n self.window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n Color::BLACK);\n col += 1;\n }\n }\n if col >= self.window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n\n col = column[1];\n\n let description = self.file_types_info.description_for(&file_name);\n for c in description.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < self.window.width() \/ 8 && row < self.window.height() \/ 32 {\n self.window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n Color::BLACK);\n col += 1;\n }\n }\n if col >= self.window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n\n row += 1;\n i += 1;\n }\n\n self.window.sync();\n }\n\n fn set_path(&mut self, path: &str) {\n let mut width = [48, 48, 48];\n let mut height = 0;\n if let Some(readdir) = fs::read_dir(path) {\n self.files.clear();\n for entry in readdir {\n self.files.push(entry.path().to_string());\n self.file_sizes.push(\n \/\/ When the entry is a folder\n if entry.path().ends_with('\/') {\n let count = match fs::read_dir(&(path.to_string() + entry.path())) {\n Some(entry_readdir) => entry_readdir.count(),\n None => 0\n };\n\n if count == 1 {\n \"1 entry\".to_string()\n } else {\n format!(\"{} entries\", count)\n }\n } else {\n match File::open(&(path.to_string() + entry.path())) {\n Some(mut file) => match file.seek(SeekFrom::End(0)) {\n Some(size) => {\n if size >= 1_000_000_000 {\n format!(\"{:.1} GB\", (size as f64)\/1_000_000_000.0)\n } else if size >= 1_000_000 {\n format!(\"{:.1} MB\", (size as f64)\/1_000_000.0)\n } else if size >= 1_000 {\n format!(\"{:.1} KB\", (size as f64)\/1_000.0)\n } else {\n format!(\"{:.1} bytes\", size)\n }\n }\n None => \"Failed to seek\".to_string()\n },\n None => \"Failed to open\".to_string()\n }\n }\n );\n \/\/ Unwrapping the last file size will not panic since it has\n \/\/ been at least pushed once in the vector\n let description = self.file_types_info.description_for(entry.path());\n width[0] = cmp::max(width[0], 48 + (entry.path().len()) * 8);\n width[1] = cmp::max(width[1], 8 + (self.file_sizes.last().unwrap().len()) * 8);\n width[2] = cmp::max(width[2], 8 + (description.len()) * 8);\n }\n\n if height < self.files.len() * 32 {\n height = self.files.len() * 32;\n }\n }\n \/\/ TODO: HACK ALERT - should use resize whenver that gets added\n self.window = Window::new(self.window.x(),\n self.window.y(),\n width.iter().sum(),\n height,\n &path).unwrap();\n self.draw_content();\n }\n\n fn event_loop(&mut self) -> Option<FileManagerCommand> {\n let mut redraw = false;\n let mut command = None;\n if let Some(event) = self.window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n event::K_ESC => return Some(FileManagerCommand::Quit),\n event::K_HOME => self.selected = 0,\n event::K_UP => if self.selected > 0 {\n self.selected -= 1;\n redraw = true\n },\n event::K_END => self.selected = self.files.len() as isize - 1,\n event::K_DOWN => if self.selected < self.files.len() as isize - 1 {\n self.selected += 1;\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n if self.selected >= 0 &&\n self.selected < self.files.len() as isize {\n match self.files.get(self.selected as usize) {\n Some(file) => {\n if file.ends_with('\/') {\n command = Some(FileManagerCommand::ChangeDir(file.clone()));\n } else {\n command = Some(FileManagerCommand::Execute(file.clone()));\n }\n },\n None => (),\n }\n }\n }\n _ => {\n let mut i = 0;\n for file in self.files.iter() {\n if file.starts_with(key_event.character) {\n self.selected = i;\n break;\n }\n i += 1;\n }\n }\n },\n }\n if command.is_none() && redraw {\n command = Some(FileManagerCommand::Redraw);\n }\n }\n }\n EventOption::Mouse(mouse_event) => {\n let mut redraw = false;\n let mut i = 0;\n let mut row = 0;\n for file in self.files.iter() {\n let mut col = 0;\n for c in file.chars() {\n if mouse_event.y >= 32 * row as isize &&\n mouse_event.y < 32 * row as isize + 32 {\n self.selected = i;\n }\n\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < self.window.width() \/ 8 && row < self.window.height() \/ 32 {\n col += 1;\n }\n }\n if col >= self.window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1; }\n\n \/\/Check for double click\n if mouse_event.left_button {\n let click_time = Duration::realtime();\n\n if click_time - self.click_time < Duration::new(0, 500 * time::NANOS_PER_MILLI)\n && self.last_mouse_event.x == mouse_event.x\n && self.last_mouse_event.y == mouse_event.y {\n if self.selected >= 0 && self.selected < self.files.len() as isize {\n if let Some(file) = self.files.get(self.selected as usize) {\n if file.ends_with('\/') {\n command = Some(FileManagerCommand::ChangeDir(file.clone()));\n } else {\n command = Some(FileManagerCommand::Execute(file.clone()));\n }\n }\n }\n self.click_time = Duration::new(0, 0);\n } else {\n self.click_time = click_time;\n }\n }\n self.last_mouse_event = mouse_event;\n }\n EventOption::Quit(quit_event) => command = Some(FileManagerCommand::Quit),\n _ => (),\n }\n }\n command\n }\n\n fn main(&mut self, path: &str) {\n let mut current_path = path.to_string();\n self.set_path(path);\n while true {\n match self.event_loop() {\n Some(event) => { \n match event {\n FileManagerCommand::ChangeDir(dir) => { \n current_path = current_path + &dir;\n self.set_path(¤t_path);\n },\n FileManagerCommand::Execute(cmd) => { File::exec(&(current_path.clone() + &cmd)); } ,\n FileManagerCommand::Redraw => (),\n FileManagerCommand::Quit => break,\n };\n self.draw_content();\n },\n None => (),\n };\n }\n\n }\n}\n\npub fn main() {\n match env::args().get(1) {\n Some(arg) => FileManager::new().main(arg),\n None => FileManager::new().main(\"file:\/\/\/\"),\n }\n}\n<commit_msg>Fix file manager redraw<commit_after>use redox::Box;\nuse redox::{self, cmp, env};\nuse redox::collections::BTreeMap;\nuse redox::fs::{self, File};\nuse redox::io::{Read, Seek, SeekFrom};\nuse redox::time::{self, Duration};\nuse redox::vec::Vec;\nuse redox::string::{String, ToString};\n\nuse orbital::{event, BmpFile, Color, EventOption, MouseEvent, Window};\n\nstruct FileType {\n description: &'static str,\n icon: BmpFile,\n}\n\n\nimpl FileType {\n fn new(desc: &'static str, icon: &str) -> FileType {\n FileType { description: desc, icon: load_icon(icon) }\n }\n\n}\n\nstruct FileTypesInfo {\n file_types: BTreeMap<&'static str, FileType>,\n}\n\nimpl FileTypesInfo {\n pub fn new () -> FileTypesInfo {\n let mut file_types = BTreeMap::<&'static str, FileType>::new();\n file_types.insert(\"\/\",\n FileType::new(\"Folder\", \"inode-directory\"));\n file_types.insert(\"wav\",\n FileType::new(\"WAV audio\", \"audio-x-wav\"));\n file_types.insert(\"bin\",\n FileType::new(\"Executable\", \"application-x-executable\"));\n file_types.insert(\"bmp\",\n FileType::new(\"Bitmap Image\", \"image-x-generic\"));\n file_types.insert(\"rs\",\n FileType::new(\"Rust source code\", \"text-x-makefile\"));\n file_types.insert(\"crate\",\n FileType::new(\"Rust crate\", \"application-x-archive\"));\n file_types.insert(\"rlib\",\n FileType::new(\"Static Rust library\", \"application-x-object\"));\n file_types.insert(\"asm\",\n FileType::new(\"Assembly source\", \"text-x-makefile\"));\n file_types.insert(\"list\",\n FileType::new(\"Disassembly source\", \"text-x-makefile\"));\n file_types.insert(\"c\",\n FileType::new(\"C source code\", \"text-x-csrc\"));\n file_types.insert(\"cpp\",\n FileType::new(\"C++ source code\", \"text-x-c++src\"));\n file_types.insert(\"h\",\n FileType::new(\"C header\", \"text-x-chdr\"));\n file_types.insert(\"sh\",\n FileType::new(\"Shell script\", \"text-x-script\"));\n file_types.insert(\"lua\",\n FileType::new(\"Lua script\", \"text-x-script\"));\n file_types.insert(\"txt\",\n FileType::new(\"Plain text document\", \"text-x-generic\"));\n file_types.insert(\"md\",\n FileType::new(\"Markdown document\", \"text-x-generic\"));\n file_types.insert(\"toml\",\n FileType::new(\"TOML document\", \"text-x-generic\"));\n file_types.insert(\"json\",\n FileType::new(\"JSON document\", \"text-x-generic\"));\n file_types.insert(\"REDOX\",\n FileType::new(\"Redox package\", \"text-x-generic\"));\n file_types.insert(\"\",\n FileType::new(\"Unknown file\", \"unknown\"));\n FileTypesInfo { file_types: file_types }\n }\n\n pub fn description_for(&self, file_name: &str) -> String {\n if file_name.ends_with('\/') {\n self.file_types[\"\/\"].description.to_string()\n } else {\n let pos = file_name.rfind('.').unwrap_or(0) + 1;\n let ext = &file_name[pos..];\n if self.file_types.contains_key(ext) {\n self.file_types[ext].description.to_string()\n } else {\n self.file_types[\"\"].description.to_string()\n }\n }\n }\n\n pub fn icon_for(&self, file_name: &str) -> &BmpFile {\n if file_name.ends_with('\/') {\n &self.file_types[\"\/\"].icon\n } else {\n let pos = file_name.rfind('.').unwrap_or(0) + 1;\n let ext = &file_name[pos..];\n if self.file_types.contains_key(ext) {\n &self.file_types[ext].icon\n } else {\n &self.file_types[\"\"].icon\n }\n }\n }\n}\n\nenum FileManagerCommand {\n ChangeDir(String),\n Execute(String),\n Redraw,\n Quit,\n}\n\npub struct FileManager {\n file_types_info: FileTypesInfo,\n files: Vec<String>,\n file_sizes: Vec<String>,\n selected: isize,\n last_mouse_event: MouseEvent,\n click_time: Duration,\n window: Box<Window>,\n}\n\nfn load_icon(path: &str) -> BmpFile {\n let mut vec: Vec<u8> = Vec::new();\n if let Some(mut file) = File::open(&(\"file:\/\/\/ui\/mimetypes\/\".to_string() + path + \".bmp\")) {\n file.read_to_end(&mut vec);\n }\n BmpFile::from_data(&vec)\n}\n\nimpl FileManager {\n pub fn new() -> Self {\n FileManager {\n file_types_info: FileTypesInfo::new(),\n files: Vec::new(),\n file_sizes: Vec::new(),\n selected: -1,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n },\n click_time: Duration::new(0, 0),\n window: Window::new(-1,-1,0,0,\"\").unwrap(),\n }\n }\n\n fn draw_content(&mut self) {\n self.window.set(Color::WHITE);\n\n let mut i = 0;\n let mut row = 0;\n let column = {\n let mut tmp = [0, 0];\n for string in self.files.iter() {\n if tmp[0] < string.len() {\n tmp[0] = string.len();\n }\n }\n\n tmp[0] += 1;\n\n for file_size in self.file_sizes.iter() {\n if tmp[1] < file_size.len() {\n tmp[1] = file_size.len();\n }\n }\n\n tmp[1] += tmp[0] + 1;\n tmp\n };\n for (file_name, file_size) in self.files.iter().zip(self.file_sizes.iter()) {\n if i == self.selected {\n let width = self.window.width();\n self.window.rect(0, 32 * row as isize, width, 32, Color::rgba(224, 224, 224, 255));\n }\n\n let icon = self.file_types_info.icon_for(&file_name);\n self.window.image(0,\n 32 * row as isize,\n icon.width(),\n icon.height(),\n icon.as_slice());\n\n let mut col = 0;\n for c in file_name.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < self.window.width() \/ 8 && row < self.window.height() \/ 32 {\n self.window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n Color::BLACK);\n col += 1;\n }\n }\n if col >= self.window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n\n col = column[0];\n\n for c in file_size.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < self.window.width() \/ 8 && row < self.window.height() \/ 32 {\n self.window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n Color::BLACK);\n col += 1;\n }\n }\n if col >= self.window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n\n col = column[1];\n\n let description = self.file_types_info.description_for(&file_name);\n for c in description.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < self.window.width() \/ 8 && row < self.window.height() \/ 32 {\n self.window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n Color::BLACK);\n col += 1;\n }\n }\n if col >= self.window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n\n row += 1;\n i += 1;\n }\n\n self.window.sync();\n }\n\n fn set_path(&mut self, path: &str) {\n let mut width = [48, 48, 48];\n let mut height = 0;\n if let Some(readdir) = fs::read_dir(path) {\n self.files.clear();\n for entry in readdir {\n self.files.push(entry.path().to_string());\n self.file_sizes.push(\n \/\/ When the entry is a folder\n if entry.path().ends_with('\/') {\n let count = match fs::read_dir(&(path.to_string() + entry.path())) {\n Some(entry_readdir) => entry_readdir.count(),\n None => 0\n };\n\n if count == 1 {\n \"1 entry\".to_string()\n } else {\n format!(\"{} entries\", count)\n }\n } else {\n match File::open(&(path.to_string() + entry.path())) {\n Some(mut file) => match file.seek(SeekFrom::End(0)) {\n Some(size) => {\n if size >= 1_000_000_000 {\n format!(\"{:.1} GB\", (size as f64)\/1_000_000_000.0)\n } else if size >= 1_000_000 {\n format!(\"{:.1} MB\", (size as f64)\/1_000_000.0)\n } else if size >= 1_000 {\n format!(\"{:.1} KB\", (size as f64)\/1_000.0)\n } else {\n format!(\"{:.1} bytes\", size)\n }\n }\n None => \"Failed to seek\".to_string()\n },\n None => \"Failed to open\".to_string()\n }\n }\n );\n \/\/ Unwrapping the last file size will not panic since it has\n \/\/ been at least pushed once in the vector\n let description = self.file_types_info.description_for(entry.path());\n width[0] = cmp::max(width[0], 48 + (entry.path().len()) * 8);\n width[1] = cmp::max(width[1], 8 + (self.file_sizes.last().unwrap().len()) * 8);\n width[2] = cmp::max(width[2], 8 + (description.len()) * 8);\n }\n\n if height < self.files.len() * 32 {\n height = self.files.len() * 32;\n }\n }\n \/\/ TODO: HACK ALERT - should use resize whenver that gets added\n self.window = Window::new(self.window.x(),\n self.window.y(),\n width.iter().sum(),\n height,\n &path).unwrap();\n self.draw_content();\n }\n\n fn event_loop(&mut self) -> Option<FileManagerCommand> {\n let mut redraw = false;\n let mut command = None;\n if let Some(event) = self.window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n event::K_ESC => return Some(FileManagerCommand::Quit),\n event::K_HOME => self.selected = 0,\n event::K_UP => if self.selected > 0 {\n self.selected -= 1;\n redraw = true;\n },\n event::K_END => self.selected = self.files.len() as isize - 1,\n event::K_DOWN => if self.selected < self.files.len() as isize - 1 {\n self.selected += 1;\n redraw = true;\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n if self.selected >= 0 &&\n self.selected < self.files.len() as isize {\n match self.files.get(self.selected as usize) {\n Some(file) => {\n if file.ends_with('\/') {\n command = Some(FileManagerCommand::ChangeDir(file.clone()));\n } else {\n command = Some(FileManagerCommand::Execute(file.clone()));\n }\n },\n None => (),\n }\n }\n }\n _ => {\n let mut i = 0;\n for file in self.files.iter() {\n if file.starts_with(key_event.character) {\n self.selected = i;\n break;\n }\n i += 1;\n }\n }\n },\n }\n if command.is_none() && redraw {\n command = Some(FileManagerCommand::Redraw);\n }\n }\n }\n EventOption::Mouse(mouse_event) => {\n redraw = true;\n let mut i = 0;\n let mut row = 0;\n for file in self.files.iter() {\n let mut col = 0;\n for c in file.chars() {\n if mouse_event.y >= 32 * row as isize &&\n mouse_event.y < 32 * row as isize + 32 {\n self.selected = i;\n }\n\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < self.window.width() \/ 8 && row < self.window.height() \/ 32 {\n col += 1;\n }\n }\n if col >= self.window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1; }\n\n \/\/Check for double click\n if mouse_event.left_button {\n let click_time = Duration::realtime();\n\n if click_time - self.click_time < Duration::new(0, 500 * time::NANOS_PER_MILLI)\n && self.last_mouse_event.x == mouse_event.x\n && self.last_mouse_event.y == mouse_event.y {\n if self.selected >= 0 && self.selected < self.files.len() as isize {\n if let Some(file) = self.files.get(self.selected as usize) {\n if file.ends_with('\/') {\n command = Some(FileManagerCommand::ChangeDir(file.clone()));\n } else {\n command = Some(FileManagerCommand::Execute(file.clone()));\n }\n }\n }\n self.click_time = Duration::new(0, 0);\n } else {\n self.click_time = click_time;\n }\n }\n self.last_mouse_event = mouse_event;\n\n if command.is_none() && redraw {\n command = Some(FileManagerCommand::Redraw);\n }\n }\n EventOption::Quit(quit_event) => command = Some(FileManagerCommand::Quit),\n _ => (),\n }\n }\n command\n }\n\n fn main(&mut self, path: &str) {\n let mut current_path = path.to_string();\n self.set_path(path);\n loop {\n if let Some(event) = self.event_loop() {\n match event {\n FileManagerCommand::ChangeDir(dir) => {\n current_path = current_path + &dir;\n self.set_path(¤t_path);\n },\n FileManagerCommand::Execute(cmd) => { File::exec(&(current_path.clone() + &cmd)); } ,\n FileManagerCommand::Redraw => (),\n FileManagerCommand::Quit => break,\n };\n self.draw_content();\n }\n }\n\n }\n}\n\npub fn main() {\n match env::args().get(1) {\n Some(arg) => FileManager::new().main(arg),\n None => FileManager::new().main(\"file:\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::*;\nuse redox::time::*;\n\npub struct FileManager {\n folder_icon: BMPFile,\n audio_icon: BMPFile,\n bin_icon: BMPFile,\n image_icon: BMPFile,\n source_icon: BMPFile,\n script_icon: BMPFile,\n text_icon: BMPFile,\n file_icon: BMPFile,\n files: Vec<String>,\n selected: isize,\n last_mouse_event: MouseEvent,\n click_time: Duration,\n}\n\nfn load_icon(path: &str) -> BMPFile {\n let mut resource = File::open(&(\"file:\/\/\/ui\/mimetypes\/\".to_string() + path + \".bmp\"));\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n BMPFile::from_data(&vec)\n}\n\nimpl FileManager {\n pub fn new() -> Self {\n FileManager {\n folder_icon: load_icon(\"inode-directory\"),\n audio_icon: load_icon(\"audio-x-wav\"),\n bin_icon: load_icon(\"application-x-executable\"),\n image_icon: load_icon(\"image-x-generic\"),\n source_icon: load_icon(\"text-x-makefile\"),\n script_icon: load_icon(\"text-x-script\"),\n text_icon: load_icon(\"text-x-generic\"),\n file_icon: load_icon(\"unknown\"),\n files: Vec::new(),\n selected: -1,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n }\n click_time: Duration::new(0, 0),\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n window.set([255, 255, 255, 255]);\n\n let mut i = 0;\n let mut row = 0;\n for string in self.files.iter() {\n if i == self.selected {\n let width = window.width();\n window.rect(0, 32 * row as isize, width, 32, [224, 224, 224, 255]);\n }\n\n if string.ends_with('\/') {\n window.image(0,\n 32 * row as isize,\n self.folder_icon.width(),\n self.folder_icon.height(),\n self.folder_icon.as_slice());\n } else if string.ends_with(\".wav\") {\n window.image(0,\n 32 * row as isize,\n self.audio_icon.width(),\n self.audio_icon.height(),\n self.audio_icon.as_slice());\n } else if string.ends_with(\".bin\") {\n window.image(0,\n 32 * row as isize,\n self.bin_icon.width(),\n self.bin_icon.height(),\n self.bin_icon.as_slice());\n } else if string.ends_with(\".bmp\") {\n window.image(0,\n 32 * row as isize,\n self.image_icon.width(),\n self.image_icon.height(),\n self.image_icon.as_slice());\n } else if string.ends_with(\".rs\") || string.ends_with(\".asm\") || string.ends_with(\".list\") {\n window.image(0,\n 32 * row as isize,\n self.source_icon.width(),\n self.source_icon.height(),\n self.source_icon.as_slice());\n } else if string.ends_with(\".sh\") || string.ends_with(\".lua\") {\n window.image(0,\n 32 * row as isize,\n self.script_icon.width(),\n self.script_icon.height(),\n self.script_icon.as_slice());\n } else if string.ends_with(\".md\") || string.ends_with(\".txt\") {\n window.image(0,\n 32 * row as isize,\n self.text_icon.width(),\n self.text_icon.height(),\n self.text_icon.as_slice());\n } else {\n window.image(0,\n 32 * row as isize,\n self.file_icon.width(),\n self.file_icon.height(),\n self.file_icon.as_slice());\n }\n\n let mut col = 0;\n for c in string.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n [0, 0, 0, 255]);\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n window.sync();\n }\n\n fn main(&mut self, path: &str) {\n let mut width = 160;\n let mut height = 0;\n {\n let mut resource = File::open(path);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n for file in unsafe { String::from_utf8_unchecked(vec) }.split('\\n') {\n if width < 40 + (file.len() + 1) * 8 {\n width = 40 + (file.len() + 1) * 8;\n }\n self.files.push(file.to_string());\n }\n\n if height < self.files.len() * 32 {\n height = self.files.len() * 32;\n }\n }\n\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n width,\n height,\n &path);\n\n self.draw_content(&mut window);\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => break,\n K_HOME => self.selected = 0,\n K_UP => if self.selected > 0 {\n self.selected -= 1;\n },\n K_END => self.selected = self.files.len() as isize - 1,\n K_DOWN => if self.selected < self.files.len() as isize - 1 {\n self.selected += 1;\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n if self.selected >= 0 &&\n self.selected < self.files.len() as isize {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n }\n }\n _ => {\n let mut i = 0;\n for file in self.files.iter() {\n if file.starts_with(key_event.character) {\n self.selected = i;\n break;\n }\n i += 1;\n }\n }\n },\n }\n\n self.draw_content(&mut window);\n }\n }\n EventOption::Mouse(mouse_event) => {\n let mut redraw = false;\n let mut i = 0;\n let mut row = 0;\n for file in self.files.iter() {\n let mut col = 0;\n for c in file.chars() {\n if mouse_event.y >= 32 * row as isize &&\n mouse_event.y < 32 * row as isize + 32 {\n let click_time = Duration::realtime();\n if self.selected == i {\n if click_time - self.click_time < Duration::new(0, 500 * NANOS_PER_MILLI) {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n self.click_time = Duration::new(0, 0);\n }\n } else {\n self.selected = i;\n self.click_time = click_time;\n }\n redraw = true;\n }\n\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n if redraw {\n self.draw_content(&mut window);\n }\n \n self.last_mouse_event = mouse_event;\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => FileManager::new().main(arg),\n Option::None => FileManager::new().main(\"file:\/\/\/\"),\n }\n}\n<commit_msg>WIP double click<commit_after>use redox::*;\nuse redox::time::*;\n\npub struct FileManager {\n folder_icon: BMPFile,\n audio_icon: BMPFile,\n bin_icon: BMPFile,\n image_icon: BMPFile,\n source_icon: BMPFile,\n script_icon: BMPFile,\n text_icon: BMPFile,\n file_icon: BMPFile,\n files: Vec<String>,\n selected: isize,\n last_mouse_event: MouseEvent,\n click_time: Duration,\n}\n\nfn load_icon(path: &str) -> BMPFile {\n let mut resource = File::open(&(\"file:\/\/\/ui\/mimetypes\/\".to_string() + path + \".bmp\"));\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n BMPFile::from_data(&vec)\n}\n\nimpl FileManager {\n pub fn new() -> Self {\n FileManager {\n folder_icon: load_icon(\"inode-directory\"),\n audio_icon: load_icon(\"audio-x-wav\"),\n bin_icon: load_icon(\"application-x-executable\"),\n image_icon: load_icon(\"image-x-generic\"),\n source_icon: load_icon(\"text-x-makefile\"),\n script_icon: load_icon(\"text-x-script\"),\n text_icon: load_icon(\"text-x-generic\"),\n file_icon: load_icon(\"unknown\"),\n files: Vec::new(),\n selected: -1,\n last_mouse_event: MouseEvent {\n x: 0,\n y: 0,\n left_button: false,\n middle_button: false,\n right_button: false,\n }\n click_time: Duration::new(0, 0),\n }\n }\n\n fn draw_content(&mut self, window: &mut Window) {\n window.set([255, 255, 255, 255]);\n\n let mut i = 0;\n let mut row = 0;\n for string in self.files.iter() {\n if i == self.selected {\n let width = window.width();\n window.rect(0, 32 * row as isize, width, 32, [224, 224, 224, 255]);\n }\n\n if string.ends_with('\/') {\n window.image(0,\n 32 * row as isize,\n self.folder_icon.width(),\n self.folder_icon.height(),\n self.folder_icon.as_slice());\n } else if string.ends_with(\".wav\") {\n window.image(0,\n 32 * row as isize,\n self.audio_icon.width(),\n self.audio_icon.height(),\n self.audio_icon.as_slice());\n } else if string.ends_with(\".bin\") {\n window.image(0,\n 32 * row as isize,\n self.bin_icon.width(),\n self.bin_icon.height(),\n self.bin_icon.as_slice());\n } else if string.ends_with(\".bmp\") {\n window.image(0,\n 32 * row as isize,\n self.image_icon.width(),\n self.image_icon.height(),\n self.image_icon.as_slice());\n } else if string.ends_with(\".rs\") || string.ends_with(\".asm\") || string.ends_with(\".list\") {\n window.image(0,\n 32 * row as isize,\n self.source_icon.width(),\n self.source_icon.height(),\n self.source_icon.as_slice());\n } else if string.ends_with(\".sh\") || string.ends_with(\".lua\") {\n window.image(0,\n 32 * row as isize,\n self.script_icon.width(),\n self.script_icon.height(),\n self.script_icon.as_slice());\n } else if string.ends_with(\".md\") || string.ends_with(\".txt\") {\n window.image(0,\n 32 * row as isize,\n self.text_icon.width(),\n self.text_icon.height(),\n self.text_icon.as_slice());\n } else {\n window.image(0,\n 32 * row as isize,\n self.file_icon.width(),\n self.file_icon.height(),\n self.file_icon.as_slice());\n }\n\n let mut col = 0;\n for c in string.chars() {\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n window.char(8 * col as isize + 40,\n 32 * row as isize + 8,\n c,\n [0, 0, 0, 255]);\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n window.sync();\n }\n\n fn main(&mut self, path: &str) {\n let mut width = 160;\n let mut height = 0;\n {\n let mut resource = File::open(path);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n for file in unsafe { String::from_utf8_unchecked(vec) }.split('\\n') {\n if width < 40 + (file.len() + 1) * 8 {\n width = 40 + (file.len() + 1) * 8;\n }\n self.files.push(file.to_string());\n }\n\n if height < self.files.len() * 32 {\n height = self.files.len() * 32;\n }\n }\n\n let mut window = Window::new((rand() % 400 + 50) as isize,\n (rand() % 300 + 50) as isize,\n width,\n height,\n &path);\n\n self.draw_content(&mut window);\n\n while let Option::Some(event) = window.poll() {\n match event.to_option() {\n EventOption::Key(key_event) => {\n if key_event.pressed {\n match key_event.scancode {\n K_ESC => break,\n K_HOME => self.selected = 0,\n K_UP => if self.selected > 0 {\n self.selected -= 1;\n },\n K_END => self.selected = self.files.len() as isize - 1,\n K_DOWN => if self.selected < self.files.len() as isize - 1 {\n self.selected += 1;\n },\n _ => match key_event.character {\n '\\0' => (),\n '\\n' => {\n if self.selected >= 0 &&\n self.selected < self.files.len() as isize {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n }\n }\n _ => {\n let mut i = 0;\n for file in self.files.iter() {\n if file.starts_with(key_event.character) {\n self.selected = i;\n break;\n }\n i += 1;\n }\n }\n },\n }\n\n self.draw_content(&mut window);\n }\n }\n EventOption::Mouse(mouse_event) => {\n let mut redraw = false;\n let mut i = 0;\n let mut row = 0;\n for file in self.files.iter() {\n let mut col = 0;\n for c in file.chars() {\n if mouse_event.y >= 32 * row as isize &&\n mouse_event.y < 32 * row as isize + 32 {\n let click_time = Duration::realtime();\n if self.selected == i {\n if click_time - self.click_time < Duration::new(0, 500 * NANOS_PER_MILLI) {\n match self.files.get(self.selected as usize) {\n Option::Some(file) => OpenEvent {\n url_string: path.to_string() + &file,\n }.trigger(),\n Option::None => (),\n }\n self.click_time = Duration::new(0, 0);\n }\n } else {\n self.selected = i;\n self.click_time = click_time;\n }\n redraw = true;\n }\n\n if c == '\\n' {\n col = 0;\n row += 1;\n } else if c == '\\t' {\n col += 8 - col % 8;\n } else {\n if col < window.width() \/ 8 && row < window.height() \/ 32 {\n col += 1;\n }\n }\n if col >= window.width() \/ 8 {\n col = 0;\n row += 1;\n }\n }\n row += 1;\n i += 1;\n }\n\n if redraw {\n self.draw_content(&mut window);\n }\n\n if mouse_event.left_button \n self.last_mouse_event = mouse_event;\n }\n _ => (),\n }\n }\n }\n}\n\npub fn main() {\n match args().get(1) {\n Option::Some(arg) => FileManager::new().main(arg),\n Option::None => FileManager::new().main(\"file:\/\/\/\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add another unwind test<commit_after>\/\/ error-pattern:fail\n\nfn a() { }\n\nfn b() { fail; }\n\nfn main() {\n let x = [0];\n a();\n let y = [0];\n b();\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>style(Whitespace): Remove some whitespace<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added nbertagnolli's solution to the collatz 2-tag problem<commit_after>\/\/\/ This File takes a string of 'a' characters and performs a 2-tag\n\/\/\/ collatz reduction on it\n\n\n\/\/ Import necessary libraries\nuse std::io;\nuse std::collections::HashMap;\n\nfn main() {\n println!(\"Enter a string of n a's where n is the number you want to start from\");\n\n \/\/ Create a holder for the raw input\n let mut raw_input = String::new();\n\n \/\/ Read from standard in\n io::stdin()\n .read_line(&mut raw_input)\n .expect(\"Failed to read line\");\n\n \/\/ Print out the input that was entered\n \/\/ string.len() returns the number of bytes in the string\n \/\/ subtract 1 for the newline character at the end.\n raw_input.pop();\n let str_len = raw_input.chars().count();\n\n println!(\"Your string is of size {}\", str_len);\n \/\/ TODO:: Check for only a's\n \n \/\/ apply the tag system\n collatz(raw_input);\n}\n\n\n\/\/\/ Reduces a string of a's to a single a by using a 2-tag system and the collatz conjecture\n\/\/\/\n\/\/\/ # Args\n\/\/\/ s: String of n a's to be reduce\n\/\/\/\n\/\/\/ # Returns\n\/\/\/ Void: No return but it will print out each tag reduction as it reduces\nfn collatz(s: String) -> () {\n \/\/ Create a map for the two tag system\n \/\/ Unfortunately Rust does not have a map literal syntax you would need to create\n \/\/ a macro for this, see stackoverflow.com\/questions\/27582739\/how-do-i-create-a-hashmap-lieteral\n let mut tag_map: HashMap<char, String> = HashMap::new();\n tag_map.insert('a', \"bc\".to_string());\n tag_map.insert('b', \"a\".to_string());\n tag_map.insert('c', \"aaa\".to_string());\n\n \/\/ Calculate the initial string length subtract 1 for newline character\n let mut str_len = s.chars().count() - 1;\n \n \/\/ Create a mutable version of the string to work with\n let mut tag_str: String = s.chars().collect();\n println!(\"{}\", tag_str);\n \n \/\/ While the string is greater than 1 use the tag system rules to update and print the string\n while str_len > 1 {\n \/\/ Extract the first character\n let first: Option<char> = tag_str.chars().nth(0);\n \n \/\/ Remove the first two characters\n tag_str = tag_str.chars().skip(2).take(str_len).collect();\n\n \/\/ Append the new tag at the end of the string\n let add_str: &str = tag_map.get(&first.unwrap()).unwrap();\n \/\/ let poop = [tag_str, &*add_str].concat();\n \/\/println!(\"pattern is: {}\", add_str);\n tag_str.push_str(add_str);\n\n \/\/ Print the new string\n println!(\"{}\", tag_str);\n\n \/\/ Recalculate string length\n str_len = tag_str.chars().count();\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added tests for loading the 2.0 sample models<commit_after>\nextern crate gltf;\n\nuse gltf::Generic::V2;\n\n#[test]\nfn import_v2() {\n let assets = [\n \/\/ These are currently the only available 2.0 compliant sample assets\n \"glTF-Sample-Models\/2.0\/Corset\/glTF\/Corset.gltf\",\n \"glTF-Sample-Models\/2.0\/BoomBox\/glTF\/BoomBox.gltf\",\n \"glTF-Sample-Models\/2.0\/Lantern\/glTF\/Lantern.gltf\",\n ];\n for asset in &assets {\n match gltf::import(asset) {\n Ok(V2(_)) => {},\n Ok(_) => { println!(\"import() detected wrong version\"); panic!() },\n Err(err) => { println!(\"{:?}\", err); panic!() },\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for all.html<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![crate_name = \"foo\"]\n\n\/\/ @has foo\/all.html '\/\/a[@href=\"struct.Struct.html\"]' 'Struct'\n\/\/ @has foo\/all.html '\/\/a[@href=\"enum.Enum.html\"]' 'Enum'\n\/\/ @has foo\/all.html '\/\/a[@href=\"union.Union.html\"]' 'Union'\n\/\/ @has foo\/all.html '\/\/a[@href=\"constant.CONST.html\"]' 'CONST'\n\/\/ @has foo\/all.html '\/\/a[@href=\"static.STATIC.html\"]' 'STATIC'\n\/\/ @has foo\/all.html '\/\/a[@href=\"fn.function.html\"]' 'function'\n\npub struct Struct;\npub enum Enum {\n X,\n Y,\n}\npub union Union {\n x: u32,\n}\npub const CONST: u32 = 0;\npub static STATIC: &str = \"baguette\";\npub fn function() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>command work for windows<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Some refactoring in parser::shell_expand<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix autobackup_interval<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue-60390<commit_after>\/\/ check-pass\n\/\/ compile-flags: --emit=mir,link\n\/\/ Regression test for #60390, this ICE requires `--emit=mir` flag.\n\nfn main() {\n enum Inner { Member(u32) };\n Inner::Member(0);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>cfail test for std::env::args()<commit_after>\/\/error-pattern: no mir for `std\n\nfn main() {\n let x = std::env::args();\n assert_eq!(x.count(), 1);\n}\n<|endoftext|>"} {"text":"<commit_before>use crate::util;\nuse core::cell::Cell;\nuse libtock_core::result::{ENOMEM, SUCCESS};\nuse libtock_core::{callback, syscalls};\n\nconst DRIVER_NUMBER: usize = 0x30003;\n\nmod command_nr {\n pub const TRANSMIT: usize = 1;\n pub const RECEIVE: usize = 2;\n pub const EMULATE: usize = 3;\n pub const CONFIGURE: usize = 4;\n pub const FRAMEDELAYMAX: usize = 5;\n}\n\nmod subscribe_nr {\n pub const TRANSMIT: usize = 1;\n pub const RECEIVE: usize = 2;\n pub const SELECT: usize = 3;\n}\n\nmod allow_nr {\n pub const TRANSMIT: usize = 1;\n pub const RECEIVE: usize = 2;\n}\n\npub enum TransOrRecvStatus {\n Error,\n InvalidBuffer,\n OOM,\n Success,\n}\n\npub struct NfcTag {}\n\nimpl NfcTag {\n pub fn enable_emulation() -> bool {\n NfcTag::emulate(true)\n }\n\n pub fn disable_emulation() -> bool {\n NfcTag::emulate(false)\n }\n\n fn emulate(enabled: bool) -> bool {\n syscalls::command(DRIVER_NUMBER, command_nr::EMULATE, enabled as usize, 0).is_ok()\n }\n\n \/\/\/ Subscribe to the tag being SELECTED callback.\n pub fn selected() -> bool {\n let is_selected = Cell::new(false);\n let mut is_selected_alarm = || is_selected.set(true);\n let subscription = syscalls::subscribe::<callback::Identity0Consumer, _>(\n DRIVER_NUMBER,\n subscribe_nr::SELECT,\n &mut is_selected_alarm,\n );\n if subscription.is_err() {\n return false;\n }\n\n util::yieldk_for(|| is_selected.get());\n true\n }\n\n \/\/\/ Configure the tag type command.\n pub fn configure(tag_type: u8) -> bool {\n syscalls::command(DRIVER_NUMBER, command_nr::CONFIGURE, tag_type as usize, 0).is_ok()\n }\n\n \/\/\/ Set the maximum frame delay value to support transmission with the reader.\n pub fn set_framedelaymax(delay: u32) -> bool {\n syscalls::command(DRIVER_NUMBER, command_nr::FRAMEDELAYMAX, delay as usize, 0).is_ok()\n }\n\n \/\/\/ 1. Share with the driver a buffer.\n \/\/\/ 2. Subscribe to having a successful receive callback.\n \/\/\/ 3. Issue the request for reception.\n pub fn receive(buf: &mut [u8; 256]) -> TransOrRecvStatus {\n let result = syscalls::allow(DRIVER_NUMBER, allow_nr::RECEIVE, buf);\n if result.is_err() {\n return TransOrRecvStatus::InvalidBuffer;\n }\n\n let done = Cell::new(false);\n let mut alarm = || done.set(true);\n let subscription = syscalls::subscribe::<callback::Identity0Consumer, _>(\n DRIVER_NUMBER,\n subscribe_nr::RECEIVE,\n &mut alarm,\n );\n if subscription.is_err() {\n return TransOrRecvStatus::Error;\n }\n\n let result_code =\n unsafe { syscalls::raw::command(DRIVER_NUMBER, command_nr::RECEIVE, 0, 0) };\n match result_code {\n SUCCESS => (),\n ENOMEM => return TransOrRecvStatus::OOM,\n _ => return TransOrRecvStatus::Error,\n }\n\n util::yieldk_for(|| done.get());\n TransOrRecvStatus::Success\n }\n\n \/\/\/ 1. Share with the driver a buffer containing the app's reply.\n \/\/\/ 2. Subscribe to having a successful transmission callback.\n \/\/\/ 3. Issue the request for transmitting.\n pub fn transmit(buf: &mut [u8], amount: usize) -> TransOrRecvStatus {\n let result = syscalls::allow(DRIVER_NUMBER, allow_nr::TRANSMIT, buf);\n if result.is_err() {\n return TransOrRecvStatus::InvalidBuffer;\n }\n\n let done = Cell::new(false);\n let mut alarm = || done.set(true);\n let subscription = syscalls::subscribe::<callback::Identity0Consumer, _>(\n DRIVER_NUMBER,\n subscribe_nr::TRANSMIT,\n &mut alarm,\n );\n if subscription.is_err() {\n return TransOrRecvStatus::Error;\n }\n\n let result_code = syscalls::command(DRIVER_NUMBER, command_nr::TRANSMIT, amount, 0);\n if result_code.is_err() {\n return TransOrRecvStatus::Error;\n }\n\n util::yieldk_for(|| done.get());\n TransOrRecvStatus::Success\n }\n}\n<commit_msg>Updated Functions Return Types<commit_after>use crate::result::TockError;\nuse crate::util;\nuse core::cell::Cell;\nuse core::mem;\nuse libtock_core::{callback, syscalls};\n\nconst DRIVER_NUMBER: usize = 0x30003;\n\nmod command_nr {\n pub const TRANSMIT: usize = 1;\n pub const RECEIVE: usize = 2;\n pub const EMULATE: usize = 3;\n pub const CONFIGURE: usize = 4;\n pub const FRAMEDELAYMAX: usize = 5;\n}\n\nmod subscribe_nr {\n pub const TRANSMIT: usize = 1;\n pub const RECEIVE: usize = 2;\n pub const SELECT: usize = 3;\n}\n\nmod allow_nr {\n pub const TRANSMIT: usize = 1;\n pub const RECEIVE: usize = 2;\n}\n\n#[allow(dead_code)]\npub struct RecvOp {\n pub result_code: usize,\n pub recv_amount: usize,\n}\n\npub struct NfcTag {}\n\nimpl NfcTag {\n pub fn enable_emulation() -> bool {\n NfcTag::emulate(true)\n }\n\n pub fn disable_emulation() -> bool {\n NfcTag::emulate(false)\n }\n\n fn emulate(enabled: bool) -> bool {\n syscalls::command(DRIVER_NUMBER, command_nr::EMULATE, enabled as usize, 0).is_ok()\n }\n\n \/\/\/ Subscribe to the tag being SELECTED callback.\n pub fn selected() -> bool {\n let is_selected = Cell::new(false);\n let mut is_selected_alarm = || is_selected.set(true);\n let subscription = syscalls::subscribe::<callback::Identity0Consumer, _>(\n DRIVER_NUMBER,\n subscribe_nr::SELECT,\n &mut is_selected_alarm,\n );\n if subscription.is_err() {\n return false;\n }\n\n util::yieldk_for(|| is_selected.get());\n true\n }\n\n \/\/\/ Configure the tag type command.\n pub fn configure(tag_type: u8) -> bool {\n syscalls::command(DRIVER_NUMBER, command_nr::CONFIGURE, tag_type as usize, 0).is_ok()\n }\n\n \/\/\/ Set the maximum frame delay value to support transmission with the reader.\n pub fn set_framedelaymax(delay: u32) -> bool {\n syscalls::command(DRIVER_NUMBER, command_nr::FRAMEDELAYMAX, delay as usize, 0).is_ok()\n }\n\n \/\/\/ 1. Share with the driver a buffer.\n \/\/\/ 2. Subscribe to having a successful receive callback.\n \/\/\/ 3. Issue the request for reception.\n pub fn receive(buf: &mut [u8; 256]) -> Result<RecvOp, TockError> {\n let result = syscalls::allow(DRIVER_NUMBER, allow_nr::RECEIVE, buf)?;\n \/\/ set callback with 2 arguments, to receive ReturnCode and RX Amount\n let done = Cell::new(false);\n let result_code = Cell::new(None);\n let recv_amount = Cell::new(None);\n let mut callback = |result, amount| {\n result_code.set(Some(result));\n recv_amount.set(Some(amount));\n done.set(true)\n };\n let subscription = syscalls::subscribe::<callback::Identity2Consumer, _>(\n DRIVER_NUMBER,\n subscribe_nr::RECEIVE,\n &mut callback,\n )?;\n syscalls::command(DRIVER_NUMBER, command_nr::RECEIVE, 0, 0)?;\n util::yieldk_for(|| done.get());\n mem::drop(subscription);\n mem::drop(result);\n Ok(RecvOp {\n result_code: result_code.get().unwrap(),\n recv_amount: recv_amount.get().unwrap(),\n })\n }\n\n \/\/\/ 1. Share with the driver a buffer containing the app's reply.\n \/\/\/ 2. Subscribe to having a successful transmission callback.\n \/\/\/ 3. Issue the request for transmitting.\n pub fn transmit(buf: &mut [u8], amount: usize) -> Result<usize, TockError> {\n let result = syscalls::allow(DRIVER_NUMBER, allow_nr::TRANSMIT, buf)?;\n \/\/ set callback with 1 argument, to receive ReturnCode\n let done = Cell::new(false);\n let result_code = Cell::new(None);\n let mut callback = |result| {\n result_code.set(Some(result));\n done.set(true)\n };\n let subscription = syscalls::subscribe::<callback::Identity1Consumer, _>(\n DRIVER_NUMBER,\n subscribe_nr::TRANSMIT,\n &mut callback,\n )?;\n syscalls::command(DRIVER_NUMBER, command_nr::TRANSMIT, amount, 0)?;\n util::yieldk_for(|| done.get());\n mem::drop(subscription);\n mem::drop(result);\n Ok(result_code.get().unwrap())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore: libgit<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test!!<commit_after><|endoftext|>"} {"text":"<commit_before>use rustc::hir::def_id::DefId;\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::ty::{self, Predicate, TyCtxt};\nuse std::borrow::Cow;\nuse syntax_pos::Span;\n\nmod helper {\n pub struct IsMinConstFn(());\n \/\/\/ This should only ever be used *once* and then passed around as a token.\n pub fn ensure_that_you_really_intended_to_create_an_instance_of_this() -> IsMinConstFn {\n IsMinConstFn(())\n }\n}\n\nuse self::helper::*;\n\ntype McfResult = Result<IsMinConstFn, (Span, Cow<'static, str>)>;\n\npub fn is_min_const_fn(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n def_id: DefId,\n mir: &'a Mir<'tcx>,\n) -> McfResult {\n let mut current = def_id;\n loop {\n let predicates = tcx.predicates_of(current);\n for predicate in &predicates.predicates {\n match predicate {\n | Predicate::RegionOutlives(_)\n | Predicate::TypeOutlives(_)\n | Predicate::WellFormed(_)\n | Predicate::ConstEvaluatable(..) => continue,\n | Predicate::ObjectSafe(_) => {\n bug!(\"object safe predicate on function: {:#?}\", predicate)\n }\n Predicate::ClosureKind(..) => {\n bug!(\"closure kind predicate on function: {:#?}\", predicate)\n }\n Predicate::Subtype(_) => bug!(\"subtype predicate on function: {:#?}\", predicate),\n Predicate::Projection(_) => {\n let span = tcx.def_span(current);\n \/\/ we'll hit a `Predicate::Trait` later which will report an error\n tcx.sess\n .delay_span_bug(span, \"projection without trait bound\");\n continue;\n }\n Predicate::Trait(pred) => {\n if Some(pred.def_id()) == tcx.lang_items().sized_trait() {\n continue;\n }\n match pred.skip_binder().self_ty().sty {\n ty::Param(ref p) => {\n let generics = tcx.generics_of(current);\n let def = generics.type_param(p, tcx);\n let span = tcx.def_span(def.def_id);\n return Err((\n span,\n \"trait bounds other than `Sized` \\\n on const fn parameters are unstable\"\n .into(),\n ));\n }\n \/\/ other kinds of bounds are either tautologies\n \/\/ or cause errors in other passes\n _ => continue,\n }\n }\n }\n }\n match predicates.parent {\n Some(parent) => current = parent,\n None => break,\n }\n }\n\n let mut token = ensure_that_you_really_intended_to_create_an_instance_of_this();\n\n for local in mir.vars_iter() {\n return Err((\n mir.local_decls[local].source_info.span,\n \"local variables in const fn are unstable\".into(),\n ));\n }\n for local in &mir.local_decls {\n token = check_ty(tcx, local.ty, local.source_info.span, token)?;\n }\n \/\/ impl trait is gone in MIR, so check the return type manually\n token = check_ty(\n tcx,\n tcx.fn_sig(def_id).output().skip_binder(),\n mir.local_decls.iter().next().unwrap().source_info.span,\n token,\n )?;\n\n for bb in mir.basic_blocks() {\n token = check_terminator(tcx, mir, bb.terminator(), token)?;\n for stmt in &bb.statements {\n token = check_statement(tcx, mir, stmt, token)?;\n }\n }\n Ok(token)\n}\n\nfn check_ty(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n ty: ty::Ty<'tcx>,\n span: Span,\n token: IsMinConstFn,\n) -> McfResult {\n for ty in ty.walk() {\n match ty.sty {\n ty::Ref(_, _, hir::Mutability::MutMutable) => return Err((\n span,\n \"mutable references in const fn are unstable\".into(),\n )),\n ty::Anon(..) => return Err((span, \"`impl Trait` in const fn is unstable\".into())),\n ty::FnPtr(..) => {\n return Err((span, \"function pointers in const fn are unstable\".into()))\n }\n ty::Dynamic(preds, _) => {\n for pred in preds.iter() {\n match pred.skip_binder() {\n | ty::ExistentialPredicate::AutoTrait(_)\n | ty::ExistentialPredicate::Projection(_) => {\n return Err((\n span,\n \"trait bounds other than `Sized` \\\n on const fn parameters are unstable\"\n .into(),\n ))\n }\n ty::ExistentialPredicate::Trait(trait_ref) => {\n if Some(trait_ref.def_id) != tcx.lang_items().sized_trait() {\n return Err((\n span,\n \"trait bounds other than `Sized` \\\n on const fn parameters are unstable\"\n .into(),\n ));\n }\n }\n }\n }\n }\n _ => {}\n }\n }\n Ok(token)\n}\n\nfn check_rvalue(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n rvalue: &Rvalue<'tcx>,\n span: Span,\n token: IsMinConstFn,\n) -> McfResult {\n match rvalue {\n Rvalue::Repeat(operand, _) | Rvalue::Use(operand) => {\n check_operand(tcx, mir, operand, span, token)\n }\n Rvalue::Len(place) | Rvalue::Discriminant(place) | Rvalue::Ref(_, _, place) => {\n check_place(tcx, mir, place, span, token, PlaceMode::Read)\n }\n Rvalue::Cast(_, operand, cast_ty) => {\n use rustc::ty::cast::CastTy;\n let cast_in = CastTy::from_ty(operand.ty(mir, tcx)).expect(\"bad input type for cast\");\n let cast_out = CastTy::from_ty(cast_ty).expect(\"bad output type for cast\");\n match (cast_in, cast_out) {\n (CastTy::Ptr(_), CastTy::Int(_)) | (CastTy::FnPtr, CastTy::Int(_)) => Err((\n span,\n \"casting pointers to ints is unstable in const fn\".into(),\n )),\n (CastTy::RPtr(_), CastTy::Float) => bug!(),\n (CastTy::RPtr(_), CastTy::Int(_)) => bug!(),\n (CastTy::Ptr(_), CastTy::RPtr(_)) => bug!(),\n _ => check_operand(tcx, mir, operand, span, token),\n }\n }\n \/\/ binops are fine on integers\n Rvalue::BinaryOp(_, lhs, rhs) | Rvalue::CheckedBinaryOp(_, lhs, rhs) => {\n let token = check_operand(tcx, mir, lhs, span, token)?;\n let token = check_operand(tcx, mir, rhs, span, token)?;\n let ty = lhs.ty(mir, tcx);\n if ty.is_integral() || ty.is_bool() || ty.is_char() {\n Ok(token)\n } else {\n Err((\n span,\n \"only int, `bool` and `char` operations are stable in const fn\".into(),\n ))\n }\n }\n \/\/ checked by regular const fn checks\n Rvalue::NullaryOp(..) => Ok(token),\n Rvalue::UnaryOp(_, operand) => {\n let ty = operand.ty(mir, tcx);\n if ty.is_integral() || ty.is_bool() {\n check_operand(tcx, mir, operand, span, token)\n } else {\n Err((\n span,\n \"only int and `bool` operations are stable in const fn\".into(),\n ))\n }\n }\n Rvalue::Aggregate(_, operands) => {\n let mut token = token;\n for operand in operands {\n token = check_operand(tcx, mir, operand, span, token)?;\n }\n Ok(token)\n }\n }\n}\n\nenum PlaceMode {\n Assign,\n Read,\n}\n\nfn check_statement(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n statement: &Statement<'tcx>,\n token: IsMinConstFn,\n) -> McfResult {\n let span = statement.source_info.span;\n match &statement.kind {\n StatementKind::Assign(place, rval) => {\n let token = check_place(tcx, mir, place, span, token, PlaceMode::Assign)?;\n check_rvalue(tcx, mir, rval, span, token)\n }\n\n StatementKind::ReadForMatch(_) => Err((span, \"match in const fn is unstable\".into())),\n\n \/\/ just an assignment\n StatementKind::SetDiscriminant { .. } => Ok(token),\n\n | StatementKind::InlineAsm { .. } => {\n Err((span, \"cannot use inline assembly in const fn\".into()))\n }\n\n \/\/ These are all NOPs\n | StatementKind::StorageLive(_)\n | StatementKind::StorageDead(_)\n | StatementKind::Validate(..)\n | StatementKind::EndRegion(_)\n | StatementKind::UserAssertTy(..)\n | StatementKind::Nop => Ok(token),\n }\n}\n\nfn check_operand(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n operand: &Operand<'tcx>,\n span: Span,\n token: IsMinConstFn,\n) -> McfResult {\n match operand {\n Operand::Move(place) | Operand::Copy(place) => {\n check_place(tcx, mir, place, span, token, PlaceMode::Read)\n }\n Operand::Constant(_) => Ok(token),\n }\n}\n\nfn check_place(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n place: &Place<'tcx>,\n span: Span,\n token: IsMinConstFn,\n mode: PlaceMode,\n) -> McfResult {\n match place {\n Place::Local(l) => match mode {\n PlaceMode::Assign => match mir.local_kind(*l) {\n LocalKind::Temp | LocalKind::ReturnPointer => Ok(token),\n LocalKind::Arg | LocalKind::Var => {\n Err((span, \"assignments in const fn are unstable\".into()))\n }\n },\n PlaceMode::Read => Ok(token),\n },\n \/\/ promoteds are always fine, they are essentially constants\n Place::Promoted(_) => Ok(token),\n Place::Static(_) => Err((span, \"cannot access `static` items in const fn\".into())),\n Place::Projection(proj) => {\n match proj.elem {\n | ProjectionElem::Deref | ProjectionElem::Field(..) | ProjectionElem::Index(_) => {\n check_place(tcx, mir, &proj.base, span, token, mode)\n }\n \/\/ slice patterns are unstable\n | ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } => {\n return Err((span, \"slice patterns in const fn are unstable\".into()))\n }\n | ProjectionElem::Downcast(..) => {\n Err((span, \"`match` or `if let` in `const fn` is unstable\".into()))\n }\n }\n }\n }\n}\n\nfn check_terminator(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n terminator: &Terminator<'tcx>,\n token: IsMinConstFn,\n) -> McfResult {\n let span = terminator.source_info.span;\n match &terminator.kind {\n | TerminatorKind::Goto { .. }\n | TerminatorKind::Return\n | TerminatorKind::Resume => Ok(token),\n\n TerminatorKind::Drop { location, .. } => {\n check_place(tcx, mir, location, span, token, PlaceMode::Read)\n }\n TerminatorKind::DropAndReplace { location, value, .. } => {\n let token = check_place(tcx, mir, location, span, token, PlaceMode::Read)?;\n check_operand(tcx, mir, value, span, token)\n },\n TerminatorKind::SwitchInt { .. } => Err((\n span,\n \"`if`, `match`, `&&` and `||` are not stable in const fn\".into(),\n )),\n | TerminatorKind::Abort | TerminatorKind::Unreachable => {\n Err((span, \"const fn with unreachable code is not stable\".into()))\n }\n | TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {\n Err((span, \"const fn generators are unstable\".into()))\n }\n\n TerminatorKind::Call {\n func,\n args,\n destination: _,\n cleanup: _,\n } => {\n let fn_ty = func.ty(mir, tcx);\n if let ty::FnDef(def_id, _) = fn_ty.sty {\n if tcx.is_min_const_fn(def_id) {\n let mut token = check_operand(tcx, mir, func, span, token)?;\n\n for arg in args {\n token = check_operand(tcx, mir, arg, span, token)?;\n }\n Ok(token)\n } else {\n Err((\n span,\n \"can only call other `min_const_fn` within a `min_const_fn`\".into(),\n ))\n }\n } else {\n Err((span, \"can only call other const fns within const fn\".into()))\n }\n }\n\n TerminatorKind::Assert {\n cond,\n expected: _,\n msg: _,\n target: _,\n cleanup: _,\n } => check_operand(tcx, mir, cond, span, token),\n\n | TerminatorKind::FalseEdges { .. } | TerminatorKind::FalseUnwind { .. } => span_bug!(\n terminator.source_info.span,\n \"min_const_fn encountered `{:#?}`\",\n terminator\n ),\n }\n}\n<commit_msg>Get rid of token passing<commit_after>use rustc::hir::def_id::DefId;\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::ty::{self, Predicate, TyCtxt};\nuse std::borrow::Cow;\nuse syntax_pos::Span;\n\ntype McfResult = Result<(), (Span, Cow<'static, str>)>;\n\npub fn is_min_const_fn(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n def_id: DefId,\n mir: &'a Mir<'tcx>,\n) -> McfResult {\n let mut current = def_id;\n loop {\n let predicates = tcx.predicates_of(current);\n for predicate in &predicates.predicates {\n match predicate {\n | Predicate::RegionOutlives(_)\n | Predicate::TypeOutlives(_)\n | Predicate::WellFormed(_)\n | Predicate::ConstEvaluatable(..) => continue,\n | Predicate::ObjectSafe(_) => {\n bug!(\"object safe predicate on function: {:#?}\", predicate)\n }\n Predicate::ClosureKind(..) => {\n bug!(\"closure kind predicate on function: {:#?}\", predicate)\n }\n Predicate::Subtype(_) => bug!(\"subtype predicate on function: {:#?}\", predicate),\n Predicate::Projection(_) => {\n let span = tcx.def_span(current);\n \/\/ we'll hit a `Predicate::Trait` later which will report an error\n tcx.sess\n .delay_span_bug(span, \"projection without trait bound\");\n continue;\n }\n Predicate::Trait(pred) => {\n if Some(pred.def_id()) == tcx.lang_items().sized_trait() {\n continue;\n }\n match pred.skip_binder().self_ty().sty {\n ty::Param(ref p) => {\n let generics = tcx.generics_of(current);\n let def = generics.type_param(p, tcx);\n let span = tcx.def_span(def.def_id);\n return Err((\n span,\n \"trait bounds other than `Sized` \\\n on const fn parameters are unstable\"\n .into(),\n ));\n }\n \/\/ other kinds of bounds are either tautologies\n \/\/ or cause errors in other passes\n _ => continue,\n }\n }\n }\n }\n match predicates.parent {\n Some(parent) => current = parent,\n None => break,\n }\n }\n\n for local in mir.vars_iter() {\n return Err((\n mir.local_decls[local].source_info.span,\n \"local variables in const fn are unstable\".into(),\n ));\n }\n for local in &mir.local_decls {\n check_ty(tcx, local.ty, local.source_info.span)?;\n }\n \/\/ impl trait is gone in MIR, so check the return type manually\n check_ty(\n tcx,\n tcx.fn_sig(def_id).output().skip_binder(),\n mir.local_decls.iter().next().unwrap().source_info.span,\n )?;\n\n for bb in mir.basic_blocks() {\n check_terminator(tcx, mir, bb.terminator())?;\n for stmt in &bb.statements {\n check_statement(tcx, mir, stmt)?;\n }\n }\n Ok(())\n}\n\nfn check_ty(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n ty: ty::Ty<'tcx>,\n span: Span,\n) -> McfResult {\n for ty in ty.walk() {\n match ty.sty {\n ty::Ref(_, _, hir::Mutability::MutMutable) => return Err((\n span,\n \"mutable references in const fn are unstable\".into(),\n )),\n ty::Anon(..) => return Err((span, \"`impl Trait` in const fn is unstable\".into())),\n ty::FnPtr(..) => {\n return Err((span, \"function pointers in const fn are unstable\".into()))\n }\n ty::Dynamic(preds, _) => {\n for pred in preds.iter() {\n match pred.skip_binder() {\n | ty::ExistentialPredicate::AutoTrait(_)\n | ty::ExistentialPredicate::Projection(_) => {\n return Err((\n span,\n \"trait bounds other than `Sized` \\\n on const fn parameters are unstable\"\n .into(),\n ))\n }\n ty::ExistentialPredicate::Trait(trait_ref) => {\n if Some(trait_ref.def_id) != tcx.lang_items().sized_trait() {\n return Err((\n span,\n \"trait bounds other than `Sized` \\\n on const fn parameters are unstable\"\n .into(),\n ));\n }\n }\n }\n }\n }\n _ => {}\n }\n }\n Ok(())\n}\n\nfn check_rvalue(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n rvalue: &Rvalue<'tcx>,\n span: Span,\n) -> McfResult {\n match rvalue {\n Rvalue::Repeat(operand, _) | Rvalue::Use(operand) => {\n check_operand(tcx, mir, operand, span)\n }\n Rvalue::Len(place) | Rvalue::Discriminant(place) | Rvalue::Ref(_, _, place) => {\n check_place(tcx, mir, place, span, PlaceMode::Read)\n }\n Rvalue::Cast(_, operand, cast_ty) => {\n use rustc::ty::cast::CastTy;\n let cast_in = CastTy::from_ty(operand.ty(mir, tcx)).expect(\"bad input type for cast\");\n let cast_out = CastTy::from_ty(cast_ty).expect(\"bad output type for cast\");\n match (cast_in, cast_out) {\n (CastTy::Ptr(_), CastTy::Int(_)) | (CastTy::FnPtr, CastTy::Int(_)) => Err((\n span,\n \"casting pointers to ints is unstable in const fn\".into(),\n )),\n (CastTy::RPtr(_), CastTy::Float) => bug!(),\n (CastTy::RPtr(_), CastTy::Int(_)) => bug!(),\n (CastTy::Ptr(_), CastTy::RPtr(_)) => bug!(),\n _ => check_operand(tcx, mir, operand, span),\n }\n }\n \/\/ binops are fine on integers\n Rvalue::BinaryOp(_, lhs, rhs) | Rvalue::CheckedBinaryOp(_, lhs, rhs) => {\n check_operand(tcx, mir, lhs, span)?;\n check_operand(tcx, mir, rhs, span)?;\n let ty = lhs.ty(mir, tcx);\n if ty.is_integral() || ty.is_bool() || ty.is_char() {\n Ok(())\n } else {\n Err((\n span,\n \"only int, `bool` and `char` operations are stable in const fn\".into(),\n ))\n }\n }\n \/\/ checked by regular const fn checks\n Rvalue::NullaryOp(..) => Ok(()),\n Rvalue::UnaryOp(_, operand) => {\n let ty = operand.ty(mir, tcx);\n if ty.is_integral() || ty.is_bool() {\n check_operand(tcx, mir, operand, span)\n } else {\n Err((\n span,\n \"only int and `bool` operations are stable in const fn\".into(),\n ))\n }\n }\n Rvalue::Aggregate(_, operands) => {\n for operand in operands {\n check_operand(tcx, mir, operand, span)?;\n }\n Ok(())\n }\n }\n}\n\nenum PlaceMode {\n Assign,\n Read,\n}\n\nfn check_statement(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n statement: &Statement<'tcx>,\n) -> McfResult {\n let span = statement.source_info.span;\n match &statement.kind {\n StatementKind::Assign(place, rval) => {\n check_place(tcx, mir, place, span, PlaceMode::Assign)?;\n check_rvalue(tcx, mir, rval, span)\n }\n\n StatementKind::ReadForMatch(_) => Err((span, \"match in const fn is unstable\".into())),\n\n \/\/ just an assignment\n StatementKind::SetDiscriminant { .. } => Ok(()),\n\n | StatementKind::InlineAsm { .. } => {\n Err((span, \"cannot use inline assembly in const fn\".into()))\n }\n\n \/\/ These are all NOPs\n | StatementKind::StorageLive(_)\n | StatementKind::StorageDead(_)\n | StatementKind::Validate(..)\n | StatementKind::EndRegion(_)\n | StatementKind::UserAssertTy(..)\n | StatementKind::Nop => Ok(()),\n }\n}\n\nfn check_operand(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n operand: &Operand<'tcx>,\n span: Span,\n) -> McfResult {\n match operand {\n Operand::Move(place) | Operand::Copy(place) => {\n check_place(tcx, mir, place, span, PlaceMode::Read)\n }\n Operand::Constant(_) => Ok(()),\n }\n}\n\nfn check_place(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n place: &Place<'tcx>,\n span: Span,\n mode: PlaceMode,\n) -> McfResult {\n match place {\n Place::Local(l) => match mode {\n PlaceMode::Assign => match mir.local_kind(*l) {\n LocalKind::Temp | LocalKind::ReturnPointer => Ok(()),\n LocalKind::Arg | LocalKind::Var => {\n Err((span, \"assignments in const fn are unstable\".into()))\n }\n },\n PlaceMode::Read => Ok(()),\n },\n \/\/ promoteds are always fine, they are essentially constants\n Place::Promoted(_) => Ok(()),\n Place::Static(_) => Err((span, \"cannot access `static` items in const fn\".into())),\n Place::Projection(proj) => {\n match proj.elem {\n | ProjectionElem::Deref | ProjectionElem::Field(..) | ProjectionElem::Index(_) => {\n check_place(tcx, mir, &proj.base, span, mode)\n }\n \/\/ slice patterns are unstable\n | ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } => {\n return Err((span, \"slice patterns in const fn are unstable\".into()))\n }\n | ProjectionElem::Downcast(..) => {\n Err((span, \"`match` or `if let` in `const fn` is unstable\".into()))\n }\n }\n }\n }\n}\n\nfn check_terminator(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n terminator: &Terminator<'tcx>,\n) -> McfResult {\n let span = terminator.source_info.span;\n match &terminator.kind {\n | TerminatorKind::Goto { .. }\n | TerminatorKind::Return\n | TerminatorKind::Resume => Ok(()),\n\n TerminatorKind::Drop { location, .. } => {\n check_place(tcx, mir, location, span, PlaceMode::Read)\n }\n TerminatorKind::DropAndReplace { location, value, .. } => {\n check_place(tcx, mir, location, span, PlaceMode::Read)?;\n check_operand(tcx, mir, value, span)\n },\n TerminatorKind::SwitchInt { .. } => Err((\n span,\n \"`if`, `match`, `&&` and `||` are not stable in const fn\".into(),\n )),\n | TerminatorKind::Abort | TerminatorKind::Unreachable => {\n Err((span, \"const fn with unreachable code is not stable\".into()))\n }\n | TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {\n Err((span, \"const fn generators are unstable\".into()))\n }\n\n TerminatorKind::Call {\n func,\n args,\n destination: _,\n cleanup: _,\n } => {\n let fn_ty = func.ty(mir, tcx);\n if let ty::FnDef(def_id, _) = fn_ty.sty {\n if tcx.is_min_const_fn(def_id) {\n check_operand(tcx, mir, func, span)?;\n\n for arg in args {\n check_operand(tcx, mir, arg, span)?;\n }\n Ok(())\n } else {\n Err((\n span,\n \"can only call other `min_const_fn` within a `min_const_fn`\".into(),\n ))\n }\n } else {\n Err((span, \"can only call other const fns within const fn\".into()))\n }\n }\n\n TerminatorKind::Assert {\n cond,\n expected: _,\n msg: _,\n target: _,\n cleanup: _,\n } => check_operand(tcx, mir, cond, span),\n\n | TerminatorKind::FalseEdges { .. } | TerminatorKind::FalseUnwind { .. } => span_bug!(\n terminator.source_info.span,\n \"min_const_fn encountered `{:#?}`\",\n terminator\n ),\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate toml;\n\nuse std::process::exit;\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::{Path, PathBuf};\nuse std::collections::BTreeMap;\nuse std::str::FromStr;\nuse serde_json;\n\n#[derive(Debug, Clone)]\npub struct BookConfig {\n root: PathBuf,\n pub dest: PathBuf,\n pub src: PathBuf,\n pub theme_path: PathBuf,\n\n pub title: String,\n pub author: String,\n pub description: String,\n\n pub indent_spaces: i32,\n multilingual: bool,\n}\n\nimpl BookConfig {\n pub fn new(root: &Path) -> Self {\n BookConfig {\n root: root.to_owned(),\n dest: root.join(\"book\"),\n src: root.join(\"src\"),\n theme_path: root.join(\"theme\"),\n\n title: String::new(),\n author: String::new(),\n description: String::new(),\n\n indent_spaces: 4, \/\/ indentation used for SUMMARY.md\n multilingual: false,\n }\n }\n\n pub fn read_config(&mut self, root: &Path) -> &mut Self {\n\n debug!(\"[fn]: read_config\");\n\n let read_file = |path: PathBuf| -> String {\n let mut data = String::new();\n let mut f: File = match File::open(&path) {\n Ok(x) => x,\n Err(_) => {\n error!(\"[*]: Failed to open {:?}\", &path);\n exit(2);\n }\n };\n if let Err(_) = f.read_to_string(&mut data) {\n error!(\"[*]: Failed to read {:?}\", &path);\n exit(2);\n }\n data\n };\n\n \/\/ Read book.toml or book.json if exists\n\n if Path::new(root.join(\"book.toml\").as_os_str()).exists() {\n\n debug!(\"[*]: Reading config\");\n let data = read_file(root.join(\"book.toml\"));\n self.parse_from_toml_string(&data);\n\n } else if Path::new(root.join(\"book.json\").as_os_str()).exists() {\n\n debug!(\"[*]: Reading config\");\n let data = read_file(root.join(\"book.json\"));\n self.parse_from_json_string(&data);\n\n } else {\n debug!(\"[*]: No book.toml or book.json was found, using defaults.\");\n }\n\n self\n }\n\n pub fn parse_from_toml_string(&mut self, data: &String) -> &mut Self {\n\n let mut parser = toml::Parser::new(&data);\n\n let config = match parser.parse() {\n Some(x) => {x},\n None => {\n error!(\"[*]: Toml parse errors in book.toml: {:?}\", parser.errors);\n exit(2);\n }\n };\n\n self.parse_from_btreemap(&config);\n\n self\n }\n\n \/\/\/ Parses the string to JSON and converts it to BTreeMap<String, toml::Value>.\n pub fn parse_from_json_string(&mut self, data: &String) -> &mut Self {\n\n let c: serde_json::Value = match serde_json::from_str(&data) {\n Ok(x) => x,\n Err(e) => {\n error!(\"[*]: JSON parse errors in book.json: {:?}\", e);\n exit(2);\n }\n };\n\n let config = json_object_to_btreemap(&c.as_object().unwrap());\n self.parse_from_btreemap(&config);\n\n self\n }\n\n pub fn parse_from_btreemap(&mut self, config: &BTreeMap<String, toml::Value>) -> &mut Self {\n\n \/\/ Title, author, description\n if let Some(a) = config.get(\"title\") {\n self.title = a.to_string().replace(\"\\\"\", \"\");\n }\n if let Some(a) = config.get(\"author\") {\n self.author = a.to_string().replace(\"\\\"\", \"\");\n }\n if let Some(a) = config.get(\"description\") {\n self.description = a.to_string().replace(\"\\\"\", \"\");\n }\n\n \/\/ Destination folder\n if let Some(a) = config.get(\"dest\") {\n let mut dest = PathBuf::from(&a.to_string().replace(\"\\\"\", \"\"));\n\n \/\/ If path is relative make it absolute from the parent directory of src\n if dest.is_relative() {\n dest = self.get_root().join(&dest);\n }\n self.set_dest(&dest);\n }\n\n \/\/ Source folder\n if let Some(a) = config.get(\"src\") {\n let mut src = PathBuf::from(&a.to_string().replace(\"\\\"\", \"\"));\n if src.is_relative() {\n src = self.get_root().join(&src);\n }\n self.set_src(&src);\n }\n\n \/\/ Theme path folder\n if let Some(a) = config.get(\"theme_path\") {\n let mut theme_path = PathBuf::from(&a.to_string().replace(\"\\\"\", \"\"));\n if theme_path.is_relative() {\n theme_path = self.get_root().join(&theme_path);\n }\n self.set_theme_path(&theme_path);\n }\n\n self\n }\n\n pub fn get_root(&self) -> &Path {\n &self.root\n }\n\n pub fn set_root(&mut self, root: &Path) -> &mut Self {\n self.root = root.to_owned();\n self\n }\n\n pub fn get_dest(&self) -> &Path {\n &self.dest\n }\n\n pub fn set_dest(&mut self, dest: &Path) -> &mut Self {\n self.dest = dest.to_owned();\n self\n }\n\n pub fn get_src(&self) -> &Path {\n &self.src\n }\n\n pub fn set_src(&mut self, src: &Path) -> &mut Self {\n self.src = src.to_owned();\n self\n }\n\n pub fn get_theme_path(&self) -> &Path {\n &self.theme_path\n }\n\n pub fn set_theme_path(&mut self, theme_path: &Path) -> &mut Self {\n self.theme_path = theme_path.to_owned();\n self\n }\n}\n\npub fn json_object_to_btreemap(json: &serde_json::Map<String, serde_json::Value>) -> BTreeMap<String, toml::Value> {\n let mut config: BTreeMap<String, toml::Value> = BTreeMap::new();\n\n for (key, value) in json.iter() {\n config.insert(\n String::from_str(key).unwrap(),\n json_value_to_toml_value(value.to_owned())\n );\n }\n\n config\n}\n\npub fn json_value_to_toml_value(json: serde_json::Value) -> toml::Value {\n match json {\n serde_json::Value::Null => toml::Value::String(\"\".to_string()),\n serde_json::Value::Bool(x) => toml::Value::Boolean(x),\n serde_json::Value::I64(x) => toml::Value::Integer(x),\n serde_json::Value::U64(x) => toml::Value::Integer(x as i64),\n serde_json::Value::F64(x) => toml::Value::Float(x),\n serde_json::Value::String(x) => toml::Value::String(x),\n serde_json::Value::Array(x) => {\n toml::Value::Array(x.iter().map(|v| json_value_to_toml_value(v.to_owned())).collect())\n },\n serde_json::Value::Object(x) => {\n toml::Value::Table(json_object_to_btreemap(&x))\n },\n }\n}\n<commit_msg>Clean up some Path code in bookconfig<commit_after>extern crate toml;\n\nuse std::process::exit;\nuse std::fs::File;\nuse std::io::Read;\nuse std::path::{Path, PathBuf};\nuse std::collections::BTreeMap;\nuse std::str::FromStr;\nuse serde_json;\n\n#[derive(Debug, Clone)]\npub struct BookConfig {\n root: PathBuf,\n pub dest: PathBuf,\n pub src: PathBuf,\n pub theme_path: PathBuf,\n\n pub title: String,\n pub author: String,\n pub description: String,\n\n pub indent_spaces: i32,\n multilingual: bool,\n}\n\nimpl BookConfig {\n pub fn new(root: &Path) -> Self {\n BookConfig {\n root: root.to_owned(),\n dest: root.join(\"book\"),\n src: root.join(\"src\"),\n theme_path: root.join(\"theme\"),\n\n title: String::new(),\n author: String::new(),\n description: String::new(),\n\n indent_spaces: 4, \/\/ indentation used for SUMMARY.md\n multilingual: false,\n }\n }\n\n pub fn read_config(&mut self, root: &Path) -> &mut Self {\n\n debug!(\"[fn]: read_config\");\n\n let read_file = |path: PathBuf| -> String {\n let mut data = String::new();\n let mut f: File = match File::open(&path) {\n Ok(x) => x,\n Err(_) => {\n error!(\"[*]: Failed to open {:?}\", &path);\n exit(2);\n }\n };\n if let Err(_) = f.read_to_string(&mut data) {\n error!(\"[*]: Failed to read {:?}\", &path);\n exit(2);\n }\n data\n };\n\n \/\/ Read book.toml or book.json if exists\n\n if root.join(\"book.toml\").exists() {\n\n debug!(\"[*]: Reading config\");\n let data = read_file(root.join(\"book.toml\"));\n self.parse_from_toml_string(&data);\n\n } else if root.join(\"book.json\").exists() {\n\n debug!(\"[*]: Reading config\");\n let data = read_file(root.join(\"book.json\"));\n self.parse_from_json_string(&data);\n\n } else {\n debug!(\"[*]: No book.toml or book.json was found, using defaults.\");\n }\n\n self\n }\n\n pub fn parse_from_toml_string(&mut self, data: &String) -> &mut Self {\n\n let mut parser = toml::Parser::new(&data);\n\n let config = match parser.parse() {\n Some(x) => {x},\n None => {\n error!(\"[*]: Toml parse errors in book.toml: {:?}\", parser.errors);\n exit(2);\n }\n };\n\n self.parse_from_btreemap(&config);\n\n self\n }\n\n \/\/\/ Parses the string to JSON and converts it to BTreeMap<String, toml::Value>.\n pub fn parse_from_json_string(&mut self, data: &String) -> &mut Self {\n\n let c: serde_json::Value = match serde_json::from_str(&data) {\n Ok(x) => x,\n Err(e) => {\n error!(\"[*]: JSON parse errors in book.json: {:?}\", e);\n exit(2);\n }\n };\n\n let config = json_object_to_btreemap(&c.as_object().unwrap());\n self.parse_from_btreemap(&config);\n\n self\n }\n\n pub fn parse_from_btreemap(&mut self, config: &BTreeMap<String, toml::Value>) -> &mut Self {\n\n \/\/ Title, author, description\n if let Some(a) = config.get(\"title\") {\n self.title = a.to_string().replace(\"\\\"\", \"\");\n }\n if let Some(a) = config.get(\"author\") {\n self.author = a.to_string().replace(\"\\\"\", \"\");\n }\n if let Some(a) = config.get(\"description\") {\n self.description = a.to_string().replace(\"\\\"\", \"\");\n }\n\n \/\/ Destination folder\n if let Some(a) = config.get(\"dest\") {\n let mut dest = PathBuf::from(&a.to_string().replace(\"\\\"\", \"\"));\n\n \/\/ If path is relative make it absolute from the parent directory of src\n if dest.is_relative() {\n dest = self.get_root().join(&dest);\n }\n self.set_dest(&dest);\n }\n\n \/\/ Source folder\n if let Some(a) = config.get(\"src\") {\n let mut src = PathBuf::from(&a.to_string().replace(\"\\\"\", \"\"));\n if src.is_relative() {\n src = self.get_root().join(&src);\n }\n self.set_src(&src);\n }\n\n \/\/ Theme path folder\n if let Some(a) = config.get(\"theme_path\") {\n let mut theme_path = PathBuf::from(&a.to_string().replace(\"\\\"\", \"\"));\n if theme_path.is_relative() {\n theme_path = self.get_root().join(&theme_path);\n }\n self.set_theme_path(&theme_path);\n }\n\n self\n }\n\n pub fn get_root(&self) -> &Path {\n &self.root\n }\n\n pub fn set_root(&mut self, root: &Path) -> &mut Self {\n self.root = root.to_owned();\n self\n }\n\n pub fn get_dest(&self) -> &Path {\n &self.dest\n }\n\n pub fn set_dest(&mut self, dest: &Path) -> &mut Self {\n self.dest = dest.to_owned();\n self\n }\n\n pub fn get_src(&self) -> &Path {\n &self.src\n }\n\n pub fn set_src(&mut self, src: &Path) -> &mut Self {\n self.src = src.to_owned();\n self\n }\n\n pub fn get_theme_path(&self) -> &Path {\n &self.theme_path\n }\n\n pub fn set_theme_path(&mut self, theme_path: &Path) -> &mut Self {\n self.theme_path = theme_path.to_owned();\n self\n }\n}\n\npub fn json_object_to_btreemap(json: &serde_json::Map<String, serde_json::Value>) -> BTreeMap<String, toml::Value> {\n let mut config: BTreeMap<String, toml::Value> = BTreeMap::new();\n\n for (key, value) in json.iter() {\n config.insert(\n String::from_str(key).unwrap(),\n json_value_to_toml_value(value.to_owned())\n );\n }\n\n config\n}\n\npub fn json_value_to_toml_value(json: serde_json::Value) -> toml::Value {\n match json {\n serde_json::Value::Null => toml::Value::String(\"\".to_string()),\n serde_json::Value::Bool(x) => toml::Value::Boolean(x),\n serde_json::Value::I64(x) => toml::Value::Integer(x),\n serde_json::Value::U64(x) => toml::Value::Integer(x as i64),\n serde_json::Value::F64(x) => toml::Value::Float(x),\n serde_json::Value::String(x) => toml::Value::String(x),\n serde_json::Value::Array(x) => {\n toml::Value::Array(x.iter().map(|v| json_value_to_toml_value(v.to_owned())).collect())\n },\n serde_json::Value::Object(x) => {\n toml::Value::Table(json_object_to_btreemap(&x))\n },\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add ripped ISR from lpc17xx<commit_after>\/\/ Zinc, the bare metal stack for rust.\n\/\/ Copyright 2014 Vladimir \"farcaller\" Pouzanov <farcaller@gmail.com>\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse core::option::Option;\nuse core::option::Option::{Some, None};\n\nextern {\n fn main();\n fn __STACK_BASE();\n\n fn isr_nmi();\n fn isr_hardfault();\n fn isr_mmfault();\n fn isr_busfault();\n fn isr_usagefault();\n\n fn isr_svcall();\n fn isr_pendsv();\n fn isr_systick();\n\n fn isr_debugmon();\n fn isr_reserved_1();\n}\n\n#[no_mangle]\npub unsafe extern fn isr_handler_wrapper() {\n asm!(\".weak isr_nmi, isr_hardfault, isr_mmfault, isr_busfault\n .weak isr_usagefault, isr_svcall, isr_pendsv, isr_systick\n .weak isr_debugmon\n .weak isr_reserved_1\n\n .thumb_func\n isr_nmi:\n\n .thumb_func\n isr_hardfault:\n\n .thumb_func\n isr_mmfault:\n\n .thumb_func\n isr_busfault:\n\n .thumb_func\n isr_usagefault:\n\n .thumb_func\n isr_svcall:\n\n .thumb_func\n isr_pendsv:\n\n .thumb_func\n isr_systick:\n\n b isr_default_fault\n\n .thumb_func\n isr_default_fault:\n mrs r0, psp\n mrs r1, msp\n ldr r2, [r0, 0x18]\n ldr r3, [r1, 0x18]\n bkpt\" :::: \"volatile\");\n}\n\n#[allow(non_upper_case_globals)]\nconst ISRCount: usize = 16;\n\n#[link_section=\".isr_vector\"]\n#[allow(non_upper_case_globals)]\n#[no_mangle]\npub static ISRVectors: [Option<unsafe extern fn()>; ISRCount] = [\n Some(__STACK_BASE),\n Some(main), \/\/ Reset\n Some(isr_nmi), \/\/ NMI\n Some(isr_hardfault), \/\/ Hard Fault\n Some(isr_mmfault), \/\/ CM3 Memory Management Fault\n Some(isr_busfault), \/\/ CM3 Bus Fault\n Some(isr_usagefault), \/\/ CM3 Usage Fault\n Some(isr_reserved_1), \/\/ Reserved - Used as NXP Checksum\n None, \/\/ Reserved\n None, \/\/ Reserved\n None, \/\/ Reserved\n Some(isr_svcall), \/\/ SVCall\n Some(isr_debugmon), \/\/ Reserved for debug\n None, \/\/ Reserved\n Some(isr_pendsv), \/\/ PendSV\n Some(isr_systick), \/\/ SysTick\n];\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove outdated comment<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Removed an out of date todo.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Path raytracer initial (buggy) version.<commit_after>#![allow(unstable)]\n#![feature(box_syntax)]\n\nuse std::io::{BufferedWriter, File};\nuse std::ops::{Add, Sub, Mul};\nuse std::num::Float;\nuse std::default::Default;\nuse std::rand::random;\nuse std::thread::Thread;\nuse std::sync::Arc;\n\n#[derive(Show, Copy, Clone, Default)]\nstruct Vector {\n x: f64,\n y: f64,\n z: f64\n}\n\n#[derive(Show, Copy, Clone, Default)]\nstruct Ray {\n o: Vector,\n d: Vector\n}\n\n#[derive(Show, Clone, Default)]\nstruct Sphere {\n radius: f64,\n position: Vector,\n emission: Vector,\n color: Vector,\n}\n\n#[derive(Show, Default)]\nstruct Camera {\n eye: Ray, \/\/ origin and direction of cam\n \/\/ Field of view:\n right: Vector, \/\/ right vector\n up: Vector, \/\/ up vector\n}\n\ntrait Shape {\n fn intersect(self, r: Ray) -> f64;\n}\n\ntrait ShapeRef {\n fn color(self, r: &Ray, t: f64) -> Vector;\n}\n\n\nimpl Shape for Sphere {\n fn intersect(self, r: Ray) -> f64 {\n \/\/ Solve t^2*d.d + 2*t*(o-p).d + (o-p).(o-p)-R^2 = 0\n let eps = 1e-4;\n let op = &self.position - &r.o;\n let b = op.dot(&r.d);\n let mut det = b * b - op.dot(&op) + self.radius * self.radius;\n\n if det < 0.0 {\n return 0.0;\n } else {\n det = det.sqrt();\n }\n\n if (b - det) > eps {\n return b-det;\n }\n\n if (b + det) > eps {\n return b+det;\n }\n\n return 0.0;\n }\n}\n\nimpl<'a> Add for &'a Vector {\n type Output = Vector;\n\n fn add(self, other: &'a Vector) -> Vector {\n Vector {x: self.x + other.x, y: self.y + other.y, z: self.z + other.z}\n }\n}\n\nimpl<'a> Sub for &'a Vector {\n type Output = Vector;\n\n fn sub(self, other: &'a Vector) -> Vector {\n Vector {x: self.x - other.x, y: self.y - other.y, z: self.z - other.z}\n }\n}\n\nimpl<'a> Mul for &'a Vector {\n type Output = Vector;\n\n fn mul(self, other: &'a Vector) -> Vector {\n Vector {x: self.x * other.x, y: self.y * other.y, z: self.z * other.z}\n }\n}\n\ntrait VectorOps {\n fn smul(self, rhs: f64) -> Vector;\n fn norm(self) -> Vector;\n fn cross(self, rhs: Vector) -> Vector;\n fn dot(&self, rhs: &Vector) -> f64;\n}\n\nimpl VectorOps for Vector {\n\n fn smul(self, other: f64) -> Vector {\n Vector {x: self.x * other, y: self.y * other, z: self.z * other}\n }\n\n fn norm(self) -> Vector {\n let normalize = 1.0 \/ (self.x * self.x + self.y * self.y + self.z * self.z).sqrt() ;\n self.smul( normalize )\n }\n\n fn cross(self, b: Vector) -> Vector {\n Vector{x: self.y * b.z - self.z * b.y, y: self.z * b.x - self.x * b.z, z: self.x * b.y - self.y * b.x}\n }\n\n fn dot(&self, other: &Vector) -> f64 {\n (*self).x * (*other).x + (*self).y * (*other).y + (*self).z * (*other).z\n }\n}\n\nfn clamp(x: f64) -> f64\n{\n if x < 0.0 { \n return 0.0;\n }\n if x > 1.0 {\n return 1.0;\n }\n\n x\n}\n\nfn to_int(x: f64) -> i64\n{\n (clamp(x).powf(1.0 \/ 2.2) * 255.0 + 0.5) as i64\n}\n\nfn intersect(r: Ray, t: &mut f64, id: &mut usize) -> bool\n{\n let inf = 10e20f64;\n *t = inf;\n for (i, sphere) in SPHERES.iter().enumerate() {\n let d: f64 = sphere.clone().intersect(r.clone());\n if d != 0.0 && d < *t {\n *t = d;\n *id = i;\n }\n\n }\n return *t < inf;\n\n}\n\nstatic SPHERES: [Sphere; 9] = [\n Sphere{radius:1e5 as f64, position: Vector{ x: (1e5 + 1.0) as f64, y: 40.8 as f64, z: 81.6}, emission: Vector{x: 0.0, y: 0.0, z: 0.0 }, color: Vector{x: 0.75,y: 0.25,z: 0.25}}, \/\/ Left \n Sphere{radius:1e5 as f64, position: Vector{ x: -1e5 as f64 + 99.0,y: 40.8 as f64, z: 81.6}, emission: Vector{x: 0.0, y: 0.0, z: 0.0 }, color: Vector{x: 0.25,y: 0.25,z: 0.75}}, \/\/ Rght \n Sphere{radius:1e5 as f64, position: Vector{ x: 50 as f64, y: 40.8 as f64, z: 1e5 as f64}, emission: Vector{x: 0.0, y: 0.0, z: 0.0 }, color: Vector{x: 0.75,y: 0.75,z: 0.75}}, \/\/ Back \n Sphere{radius:1e5 as f64, position: Vector{ x: 50 as f64, y: 40.8 as f64, z: -1e5+600 as f64}, emission: Vector{x: 0.0, y: 0.0, z: 0.0 }, color: Vector{x: 1.0, y: 1.0, z: 1.0 }}, \/\/ Frnt \n Sphere{radius:1e5 as f64, position: Vector{ x: 50 as f64, y: 1e5 as f64, z: 81.6}, emission: Vector{x: 0.0, y: 0.0, z: 0.0 }, color: Vector{x: 0.75,y: 0.75,z: 0.75}}, \/\/ Botm \n Sphere{radius:1e5 as f64, position: Vector{ x: 50 as f64, y: -1e5+81.6 as f64,z: 81.6}, emission: Vector{x: 0.0, y: 0.0, z: 0.0 }, color: Vector{x: 0.75,y: 0.75,z: 0.75}}, \/\/ Top \n Sphere{radius:16.5, position: Vector{ x: 27.0, y: 16.5 as f64, z: 47.0}, emission: Vector{x: 0.0, y: 0.0, z: 0.0 }, color: Vector{x: 0.999, y: 0.999, z: 0.999}}, \/\/ Mirr \n Sphere{radius:16.5, position: Vector{ x: 73.0, y: 16.5 as f64, z: 78.0}, emission: Vector{x: 0.0, y: 0.0, z: 0.0 }, color: Vector{x: 0.999, y: 0.999, z: 0.999}}, \/\/ Glas \n Sphere{radius:600 as f64, position: Vector{ x: 50 as f64, y: 681.6-0.27 as f64, z: 81.6}, emission: Vector{x: 12.0, y: 12.0, z: 12.0}, color: Vector{x: 1.0, y: 1.0, z: 1.0}}, \/\/Lite \n];\n\nfn get_ray(cam: &Camera, a: usize, b: usize) -> Ray {\n \n let w = cam.eye.d.norm().smul(-1.0);\n let u = cam.up.cross(w).norm();\n let v = w.cross(u);\n\n let u0 = -1.0;\n let v0 = -1.0;\n let u1 = 1.0;\n let v1 = 1.0;\n let d = 2.0;\n\n let across = u.smul(u1-u0);\n let up = v.smul(v1-v0);\n let an = (a as f64) \/ HEIGHT as f64;\n let bn = (b as f64) \/ WIDTH as f64;\n\n let corner = &(&(&cam.eye.o + &u.smul(u0)) + &v.smul(v0)) - &w.smul(d);\n let target = &( &corner + &across.smul(an)) + &up.smul(bn);\n Ray{o: cam.eye.o, d: (&target-&cam.eye.o).norm()}\n}\n\n\nconst WIDTH: usize = 1024;\nconst HEIGHT: usize = 768;\n\n\nstatic PI: f64 = 3.14159265358979323846264338327950288_f64;\n\nfn get_light(ray: Ray, depth: usize) -> Vector{ \n\n if depth > 5 {\n return Default::default();\n }\n\n let mut t: f64 = 0.0;\n let mut id: usize = 0;\n if intersect(ray, &mut t, &mut id) {\n let r1: f64 = 2.0 * std::rand::random() * PI;\n let r2: f64 = std::rand::random();\n let r2s: f64 = r2.sqrt();\n\n let x: Vector = &ray.o + &ray.d.smul(t); \/\/ Hitpoint\n let n: Vector = (&x - &SPHERES[id].position).norm();\n let nl = if n.dot(&ray.d) < 0.0 { n } else { n.smul(-1.0) };\n \n let w = nl;\n let u = if w.x > 0.1 { Vector{x: 0.0, y: 1.0, z: 0.0} } else { Vector{x: 1.0, y: 0.0, z: 0.0 } }.norm();\n let v = w.cross(u);\n\n let d = &(&u.smul(r1.cos()*r2s) + &v.smul(r1.sin()*r2s)) + &w.smul((1.0-r2).sqrt());\n \n\n return &SPHERES[id].emission + &(&SPHERES[id].color * &get_light(Ray{o: x, d: d}, depth+1));\n }\n\n return Default::default();\n\n}\n\nstatic NTHREADS: usize = 4;\n\nfn main() {\n let mut cam: Camera = Default::default();\n cam.eye.o = Vector {x: 50.0, y: 52.0, z: 295.6};\n cam.eye.d = Vector {x: 0.0, y: -0.042612, z: -1.0};\n cam.up = Vector{x: 1.0, y: 0.0, z: 0.0};\n let samples = 500;\n let mut output = box [[Vector{x: 0.0, y: 0.0, z: 0.0}; WIDTH]; HEIGHT];\n\n for i in range(0, HEIGHT) {\n print!(\"\\rRaytracing... ({:.0}%)\", 100.0 - ((WIDTH*(HEIGHT-i)*samples) as f64) \/ ((WIDTH*HEIGHT*samples) as f64) * 100.0);\n for j in range(0, WIDTH) {\n let mut r: Vector = Default::default();\n\n for _ in range(0, samples) {\n let ray: Ray = get_ray(&cam, i, j);\n r = &r + &get_light(ray, 0).smul(1.0\/samples as f64);\n }\n output[i][j] = Vector{ x: clamp(r.x), y: clamp(r.y), z: clamp(r.z) };\n }\n }\n\n println!(\"\\nWriting Image...\");\n let file = File::create(&Path::new(\"image.ppm\"));\n let mut writer = BufferedWriter::new(file);\n\n writer.write(format!(\"P3\\n{} {}\\n{}\\n\", WIDTH, HEIGHT, 255).as_bytes()).ok();\n for i in range(0, HEIGHT) {\n for j in range(0, WIDTH) {\n writer.write(format!(\"{} {} {} \", to_int(output[i][j].x), to_int(output[i][j].y), to_int(output[i][j].z)).as_bytes()).ok();\n }\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>remove some tests that are now in Malachite<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Change parameter X to A<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse fs;\nuse os::windows::raw;\nuse net;\nuse sys_common::{self, AsInner, FromInner, IntoInner};\nuse sys;\nuse sys::c;\n\n\/\/\/ Raw HANDLEs.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type RawHandle = raw::HANDLE;\n\n\/\/\/ Raw SOCKETs.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type RawSocket = raw::SOCKET;\n\n\/\/\/ Extract raw handles.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait AsRawHandle {\n \/\/\/ Extracts the raw handle, without taking any ownership.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn as_raw_handle(&self) -> RawHandle;\n}\n\n\/\/\/ Construct I\/O objects from raw handles.\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\npub trait FromRawHandle {\n \/\/\/ Constructs a new I\/O object from the specified raw handle.\n \/\/\/\n \/\/\/ This function will **consume ownership** of the handle given,\n \/\/\/ passing responsibility for closing the handle to the returned\n \/\/\/ object.\n \/\/\/\n \/\/\/ This function is also unsafe as the primitives currently returned\n \/\/\/ have the contract that they are the sole owner of the file\n \/\/\/ descriptor they are wrapping. Usage of this function could\n \/\/\/ accidentally allow violating this contract which can cause memory\n \/\/\/ unsafety in code that relies on it being true.\n #[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\n unsafe fn from_raw_handle(handle: RawHandle) -> Self;\n}\n\n\/\/\/ A trait to express the ability to consume an object and acquire ownership of\n\/\/\/ its raw `HANDLE`.\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\npub trait IntoRawHandle {\n \/\/\/ Consumes this object, returning the raw underlying handle.\n \/\/\/\n \/\/\/ This function **transfers ownership** of the underlying handle to the\n \/\/\/ caller. Callers are then the unique owners of the handle and must close\n \/\/\/ it once it's no longer needed.\n #[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\n fn into_raw_handle(self) -> RawHandle;\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl AsRawHandle for fs::File {\n fn as_raw_handle(&self) -> RawHandle {\n self.as_inner().handle().raw() as RawHandle\n }\n}\n\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\nimpl FromRawHandle for fs::File {\n unsafe fn from_raw_handle(handle: RawHandle) -> fs::File {\n let handle = handle as c::HANDLE;\n fs::File::from_inner(sys::fs::File::from_inner(handle))\n }\n}\n\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\nimpl IntoRawHandle for fs::File {\n fn into_raw_handle(self) -> RawHandle {\n self.into_inner().into_handle().into_raw() as *mut _\n }\n}\n\n\/\/\/ Extract raw sockets.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait AsRawSocket {\n \/\/\/ Extracts the underlying raw socket from this object.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn as_raw_socket(&self) -> RawSocket;\n}\n\n\/\/\/ Create I\/O objects from raw sockets.\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\npub trait FromRawSocket {\n \/\/\/ Creates a new I\/O object from the given raw socket.\n \/\/\/\n \/\/\/ This function will **consume ownership** of the socket provided and\n \/\/\/ it will be closed when the returned object goes out of scope.\n \/\/\/\n \/\/\/ This function is also unsafe as the primitives currently returned\n \/\/\/ have the contract that they are the sole owner of the file\n \/\/\/ descriptor they are wrapping. Usage of this function could\n \/\/\/ accidentally allow violating this contract which can cause memory\n \/\/\/ unsafety in code that relies on it being true.\n #[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\n unsafe fn from_raw_socket(sock: RawSocket) -> Self;\n}\n\n\/\/\/ A trait to express the ability to consume an object and acquire ownership of\n\/\/\/ its raw `SOCKET`.\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\npub trait IntoRawSocket {\n \/\/\/ Consumes this object, returning the raw underlying socket.\n \/\/\/\n \/\/\/ This function **transfers ownership** of the underlying socket to the\n \/\/\/ caller. Callers are then the unique owners of the socket and must close\n \/\/\/ it once it's no longer needed.\n #[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\n fn into_raw_socket(self) -> RawSocket;\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl AsRawSocket for net::TcpStream {\n fn as_raw_socket(&self) -> RawSocket {\n *self.as_inner().socket().as_inner()\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl AsRawSocket for net::TcpListener {\n fn as_raw_socket(&self) -> RawSocket {\n *self.as_inner().socket().as_inner()\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl AsRawSocket for net::UdpSocket {\n fn as_raw_socket(&self) -> RawSocket {\n *self.as_inner().socket().as_inner()\n }\n}\n\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\nimpl FromRawSocket for net::TcpStream {\n unsafe fn from_raw_socket(sock: RawSocket) -> net::TcpStream {\n let sock = sys::net::Socket::from_inner(sock);\n net::TcpStream::from_inner(sys_common::net::TcpStream::from_inner(sock))\n }\n}\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\nimpl FromRawSocket for net::TcpListener {\n unsafe fn from_raw_socket(sock: RawSocket) -> net::TcpListener {\n let sock = sys::net::Socket::from_inner(sock);\n net::TcpListener::from_inner(sys_common::net::TcpListener::from_inner(sock))\n }\n}\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\nimpl FromRawSocket for net::UdpSocket {\n unsafe fn from_raw_socket(sock: RawSocket) -> net::UdpSocket {\n let sock = sys::net::Socket::from_inner(sock);\n net::UdpSocket::from_inner(sys_common::net::UdpSocket::from_inner(sock))\n }\n}\n\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\nimpl IntoRawSocket for net::TcpStream {\n fn into_raw_socket(self) -> RawSocket {\n self.into_inner().into_socket().into_inner()\n }\n}\n\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\nimpl IntoRawSocket for net::TcpListener {\n fn into_raw_socket(self) -> RawSocket {\n self.into_inner().into_socket().into_inner()\n }\n}\n\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\nimpl IntoRawSocket for net::UdpSocket {\n fn into_raw_socket(self) -> RawSocket {\n self.into_inner().into_socket().into_inner()\n }\n}\n<commit_msg>Implement AsRawHandle for Std* on Windows<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse fs;\nuse os::windows::raw;\nuse net;\nuse sys_common::{self, AsInner, FromInner, IntoInner};\nuse sys;\nuse io;\nuse sys::c;\n\n\/\/\/ Raw HANDLEs.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type RawHandle = raw::HANDLE;\n\n\/\/\/ Raw SOCKETs.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub type RawSocket = raw::SOCKET;\n\n\/\/\/ Extract raw handles.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait AsRawHandle {\n \/\/\/ Extracts the raw handle, without taking any ownership.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn as_raw_handle(&self) -> RawHandle;\n}\n\n\/\/\/ Construct I\/O objects from raw handles.\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\npub trait FromRawHandle {\n \/\/\/ Constructs a new I\/O object from the specified raw handle.\n \/\/\/\n \/\/\/ This function will **consume ownership** of the handle given,\n \/\/\/ passing responsibility for closing the handle to the returned\n \/\/\/ object.\n \/\/\/\n \/\/\/ This function is also unsafe as the primitives currently returned\n \/\/\/ have the contract that they are the sole owner of the file\n \/\/\/ descriptor they are wrapping. Usage of this function could\n \/\/\/ accidentally allow violating this contract which can cause memory\n \/\/\/ unsafety in code that relies on it being true.\n #[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\n unsafe fn from_raw_handle(handle: RawHandle) -> Self;\n}\n\n\/\/\/ A trait to express the ability to consume an object and acquire ownership of\n\/\/\/ its raw `HANDLE`.\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\npub trait IntoRawHandle {\n \/\/\/ Consumes this object, returning the raw underlying handle.\n \/\/\/\n \/\/\/ This function **transfers ownership** of the underlying handle to the\n \/\/\/ caller. Callers are then the unique owners of the handle and must close\n \/\/\/ it once it's no longer needed.\n #[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\n fn into_raw_handle(self) -> RawHandle;\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl AsRawHandle for fs::File {\n fn as_raw_handle(&self) -> RawHandle {\n self.as_inner().handle().raw() as RawHandle\n }\n}\n\n#[stable(feature = \"asraw_stdio\", since = \"1.21.0\")]\nimpl AsRawHandle for io::Stdin {\n fn as_raw_handle(&self) -> RawHandle {\n unsafe { c::GetStdHandle(c::STD_INPUT_HANDLE) } as RawHandle\n }\n}\n\n#[stable(feature = \"asraw_stdio\", since = \"1.21.0\")]\nimpl AsRawHandle for io::Stdout {\n fn as_raw_handle(&self) -> RawHandle {\n unsafe { c::GetStdHandle(c::STD_OUTPUT_HANDLE) } as RawHandle\n }\n}\n\n#[stable(feature = \"asraw_stdio\", since = \"1.21.0\")]\nimpl AsRawHandle for io::Stderr {\n fn as_raw_handle(&self) -> RawHandle {\n unsafe { c::GetStdHandle(c::STD_ERROR_HANDLE) } as RawHandle\n }\n}\n\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\nimpl FromRawHandle for fs::File {\n unsafe fn from_raw_handle(handle: RawHandle) -> fs::File {\n let handle = handle as c::HANDLE;\n fs::File::from_inner(sys::fs::File::from_inner(handle))\n }\n}\n\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\nimpl IntoRawHandle for fs::File {\n fn into_raw_handle(self) -> RawHandle {\n self.into_inner().into_handle().into_raw() as *mut _\n }\n}\n\n\/\/\/ Extract raw sockets.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait AsRawSocket {\n \/\/\/ Extracts the underlying raw socket from this object.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n fn as_raw_socket(&self) -> RawSocket;\n}\n\n\/\/\/ Create I\/O objects from raw sockets.\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\npub trait FromRawSocket {\n \/\/\/ Creates a new I\/O object from the given raw socket.\n \/\/\/\n \/\/\/ This function will **consume ownership** of the socket provided and\n \/\/\/ it will be closed when the returned object goes out of scope.\n \/\/\/\n \/\/\/ This function is also unsafe as the primitives currently returned\n \/\/\/ have the contract that they are the sole owner of the file\n \/\/\/ descriptor they are wrapping. Usage of this function could\n \/\/\/ accidentally allow violating this contract which can cause memory\n \/\/\/ unsafety in code that relies on it being true.\n #[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\n unsafe fn from_raw_socket(sock: RawSocket) -> Self;\n}\n\n\/\/\/ A trait to express the ability to consume an object and acquire ownership of\n\/\/\/ its raw `SOCKET`.\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\npub trait IntoRawSocket {\n \/\/\/ Consumes this object, returning the raw underlying socket.\n \/\/\/\n \/\/\/ This function **transfers ownership** of the underlying socket to the\n \/\/\/ caller. Callers are then the unique owners of the socket and must close\n \/\/\/ it once it's no longer needed.\n #[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\n fn into_raw_socket(self) -> RawSocket;\n}\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl AsRawSocket for net::TcpStream {\n fn as_raw_socket(&self) -> RawSocket {\n *self.as_inner().socket().as_inner()\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl AsRawSocket for net::TcpListener {\n fn as_raw_socket(&self) -> RawSocket {\n *self.as_inner().socket().as_inner()\n }\n}\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl AsRawSocket for net::UdpSocket {\n fn as_raw_socket(&self) -> RawSocket {\n *self.as_inner().socket().as_inner()\n }\n}\n\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\nimpl FromRawSocket for net::TcpStream {\n unsafe fn from_raw_socket(sock: RawSocket) -> net::TcpStream {\n let sock = sys::net::Socket::from_inner(sock);\n net::TcpStream::from_inner(sys_common::net::TcpStream::from_inner(sock))\n }\n}\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\nimpl FromRawSocket for net::TcpListener {\n unsafe fn from_raw_socket(sock: RawSocket) -> net::TcpListener {\n let sock = sys::net::Socket::from_inner(sock);\n net::TcpListener::from_inner(sys_common::net::TcpListener::from_inner(sock))\n }\n}\n#[stable(feature = \"from_raw_os\", since = \"1.1.0\")]\nimpl FromRawSocket for net::UdpSocket {\n unsafe fn from_raw_socket(sock: RawSocket) -> net::UdpSocket {\n let sock = sys::net::Socket::from_inner(sock);\n net::UdpSocket::from_inner(sys_common::net::UdpSocket::from_inner(sock))\n }\n}\n\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\nimpl IntoRawSocket for net::TcpStream {\n fn into_raw_socket(self) -> RawSocket {\n self.into_inner().into_socket().into_inner()\n }\n}\n\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\nimpl IntoRawSocket for net::TcpListener {\n fn into_raw_socket(self) -> RawSocket {\n self.into_inner().into_socket().into_inner()\n }\n}\n\n#[stable(feature = \"into_raw_os\", since = \"1.4.0\")]\nimpl IntoRawSocket for net::UdpSocket {\n fn into_raw_socket(self) -> RawSocket {\n self.into_inner().into_socket().into_inner()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a test that checks that unary structs can be mutably borrowed.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Tests that unary structs can be mutably borrowed.\n\nstruct Empty;\n\nimpl Iterator<int> for Empty {\n fn next(&mut self) -> Option<int> { None }\n}\n\nfn do_something_with(a : &mut Iterator<int>) {\n println!(\"{}\", a.next())\n}\n\nfn main() {\n do_something_with(&mut Empty);\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ this used to cause exponential code-size blowup during LLVM passes.\n\/\/ min-llvm-version 3.9\n\n#![feature(test)]\n\nextern crate test;\n\nstruct MayUnwind;\n\nimpl Drop for MayUnwind {\n fn drop(&mut self) {\n if test::black_box(false) {\n panic!()\n }\n }\n}\n\nstruct DS<U> {\n may_unwind: MayUnwind,\n name: String,\n next: U,\n}\n\nfn add<U>(ds: DS<U>, name: String) -> DS<DS<U>> {\n DS {\n may_unwind: MayUnwind,\n name: \"?\".to_owned(),\n next: ds,\n }\n}\n\nfn main() {\n let deserializers = DS { may_unwind: MayUnwind, name: \"?\".to_owned(), next: () };\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 0.7s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 1.3s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 2.4s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 6.7s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 26.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 114.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 228.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 400.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 800.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 1600.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 3200.0s\n}\n<commit_msg>Ignore test for not-closed issue<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ this used to cause exponential code-size blowup during LLVM passes.\n\/\/ ignore-test FIXME #41696\n\/\/ min-llvm-version 3.9\n\n#![feature(test)]\n\nextern crate test;\n\nstruct MayUnwind;\n\nimpl Drop for MayUnwind {\n fn drop(&mut self) {\n if test::black_box(false) {\n panic!()\n }\n }\n}\n\nstruct DS<U> {\n may_unwind: MayUnwind,\n name: String,\n next: U,\n}\n\nfn add<U>(ds: DS<U>, name: String) -> DS<DS<U>> {\n DS {\n may_unwind: MayUnwind,\n name: \"?\".to_owned(),\n next: ds,\n }\n}\n\nfn main() {\n let deserializers = DS { may_unwind: MayUnwind, name: \"?\".to_owned(), next: () };\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned());\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 0.7s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 1.3s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 2.4s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 6.7s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 26.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 114.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 228.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 400.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 800.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 1600.0s\n let deserializers = add(deserializers, \"?\".to_owned()); \/\/ 3200.0s\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Integer and floating-point number formatting\n\n#![allow(deprecated)]\n\n\nuse fmt;\nuse ops::{Div, Rem, Sub};\nuse str;\nuse slice;\nuse ptr;\nuse mem;\n\n#[doc(hidden)]\ntrait Int: PartialEq + PartialOrd + Div<Output=Self> + Rem<Output=Self> +\n Sub<Output=Self> + Copy {\n fn zero() -> Self;\n fn from_u8(u: u8) -> Self;\n fn to_u8(&self) -> u8;\n fn to_u16(&self) -> u16;\n fn to_u32(&self) -> u32;\n fn to_u64(&self) -> u64;\n fn to_u128(&self) -> u128;\n}\n\nmacro_rules! doit {\n ($($t:ident)*) => ($(impl Int for $t {\n fn zero() -> $t { 0 }\n fn from_u8(u: u8) -> $t { u as $t }\n fn to_u8(&self) -> u8 { *self as u8 }\n fn to_u16(&self) -> u16 { *self as u16 }\n fn to_u32(&self) -> u32 { *self as u32 }\n fn to_u64(&self) -> u64 { *self as u64 }\n fn to_u128(&self) -> u128 { *self as u128 }\n })*)\n}\ndoit! { i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize }\n\n\/\/\/ A type that represents a specific radix\n#[doc(hidden)]\ntrait GenericRadix {\n \/\/\/ The number of digits.\n fn base(&self) -> u8;\n\n \/\/\/ A radix-specific prefix string.\n fn prefix(&self) -> &'static str {\n \"\"\n }\n\n \/\/\/ Converts an integer to corresponding radix digit.\n fn digit(&self, x: u8) -> u8;\n\n \/\/\/ Format an integer using the radix using a formatter.\n fn fmt_int<T: Int>(&self, mut x: T, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ The radix can be as low as 2, so we need a buffer of at least 128\n \/\/ characters for a base 2 number.\n let zero = T::zero();\n let is_nonnegative = x >= zero;\n let mut buf = [0; 128];\n let mut curr = buf.len();\n let base = T::from_u8(self.base());\n if is_nonnegative {\n \/\/ Accumulate each digit of the number from the least significant\n \/\/ to the most significant figure.\n for byte in buf.iter_mut().rev() {\n let n = x % base; \/\/ Get the current place value.\n x = x \/ base; \/\/ Deaccumulate the number.\n *byte = self.digit(n.to_u8()); \/\/ Store the digit in the buffer.\n curr -= 1;\n if x == zero {\n \/\/ No more digits left to accumulate.\n break\n };\n }\n } else {\n \/\/ Do the same as above, but accounting for two's complement.\n for byte in buf.iter_mut().rev() {\n let n = zero - (x % base); \/\/ Get the current place value.\n x = x \/ base; \/\/ Deaccumulate the number.\n *byte = self.digit(n.to_u8()); \/\/ Store the digit in the buffer.\n curr -= 1;\n if x == zero {\n \/\/ No more digits left to accumulate.\n break\n };\n }\n }\n let buf = unsafe { str::from_utf8_unchecked(&buf[curr..]) };\n f.pad_integral(is_nonnegative, self.prefix(), buf)\n }\n}\n\n\/\/\/ A binary (base 2) radix\n#[derive(Clone, PartialEq)]\nstruct Binary;\n\n\/\/\/ An octal (base 8) radix\n#[derive(Clone, PartialEq)]\nstruct Octal;\n\n\/\/\/ A decimal (base 10) radix\n#[derive(Clone, PartialEq)]\nstruct Decimal;\n\n\/\/\/ A hexadecimal (base 16) radix, formatted with lower-case characters\n#[derive(Clone, PartialEq)]\nstruct LowerHex;\n\n\/\/\/ A hexadecimal (base 16) radix, formatted with upper-case characters\n#[derive(Clone, PartialEq)]\nstruct UpperHex;\n\nmacro_rules! radix {\n ($T:ident, $base:expr, $prefix:expr, $($x:pat => $conv:expr),+) => {\n impl GenericRadix for $T {\n fn base(&self) -> u8 { $base }\n fn prefix(&self) -> &'static str { $prefix }\n fn digit(&self, x: u8) -> u8 {\n match x {\n $($x => $conv,)+\n x => panic!(\"number not in the range 0..{}: {}\", self.base() - 1, x),\n }\n }\n }\n }\n}\n\nradix! { Binary, 2, \"0b\", x @ 0 ... 2 => b'0' + x }\nradix! { Octal, 8, \"0o\", x @ 0 ... 7 => b'0' + x }\nradix! { Decimal, 10, \"\", x @ 0 ... 9 => b'0' + x }\nradix! { LowerHex, 16, \"0x\", x @ 0 ... 9 => b'0' + x,\n x @ 10 ... 15 => b'a' + (x - 10) }\nradix! { UpperHex, 16, \"0x\", x @ 0 ... 9 => b'0' + x,\n x @ 10 ... 15 => b'A' + (x - 10) }\n\nmacro_rules! int_base {\n ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::$Trait for $T {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n $Radix.fmt_int(*self as $U, f)\n }\n }\n }\n}\n\nmacro_rules! debug {\n ($T:ident) => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::Debug for $T {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(self, f)\n }\n }\n }\n}\n\nmacro_rules! integer {\n ($Int:ident, $Uint:ident) => {\n int_base! { Binary for $Int as $Uint -> Binary }\n int_base! { Octal for $Int as $Uint -> Octal }\n int_base! { LowerHex for $Int as $Uint -> LowerHex }\n int_base! { UpperHex for $Int as $Uint -> UpperHex }\n debug! { $Int }\n\n int_base! { Binary for $Uint as $Uint -> Binary }\n int_base! { Octal for $Uint as $Uint -> Octal }\n int_base! { LowerHex for $Uint as $Uint -> LowerHex }\n int_base! { UpperHex for $Uint as $Uint -> UpperHex }\n debug! { $Uint }\n }\n}\ninteger! { isize, usize }\ninteger! { i8, u8 }\ninteger! { i16, u16 }\ninteger! { i32, u32 }\ninteger! { i64, u64 }\ninteger! { i128, u128 }\n\nconst DEC_DIGITS_LUT: &'static[u8] =\n b\"0001020304050607080910111213141516171819\\\n 2021222324252627282930313233343536373839\\\n 4041424344454647484950515253545556575859\\\n 6061626364656667686970717273747576777879\\\n 8081828384858687888990919293949596979899\";\n\nmacro_rules! impl_Display {\n ($($t:ident),*: $conv_fn:ident) => ($(\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::Display for $t {\n #[allow(unused_comparisons)]\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let is_nonnegative = *self >= 0;\n let mut n = if is_nonnegative {\n self.$conv_fn()\n } else {\n \/\/ convert the negative num to positive by summing 1 to it's 2 complement\n (!self.$conv_fn()).wrapping_add(1)\n };\n let mut buf: [u8; 39] = unsafe { mem::uninitialized() };\n let mut curr = buf.len() as isize;\n let buf_ptr = buf.as_mut_ptr();\n let lut_ptr = DEC_DIGITS_LUT.as_ptr();\n\n unsafe {\n \/\/ need at least 16 bits for the 4-characters-at-a-time to work.\n if ::mem::size_of::<$t>() >= 2 {\n \/\/ eagerly decode 4 characters at a time\n while n >= 10000 {\n let rem = (n % 10000) as isize;\n n \/= 10000;\n\n let d1 = (rem \/ 100) << 1;\n let d2 = (rem % 100) << 1;\n curr -= 4;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n ptr::copy_nonoverlapping(lut_ptr.offset(d2), buf_ptr.offset(curr + 2), 2);\n }\n }\n\n \/\/ if we reach here numbers are <= 9999, so at most 4 chars long\n let mut n = n as isize; \/\/ possibly reduce 64bit math\n\n \/\/ decode 2 more chars, if > 2 chars\n if n >= 100 {\n let d1 = (n % 100) << 1;\n n \/= 100;\n curr -= 2;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n }\n\n \/\/ decode last 1 or 2 chars\n if n < 10 {\n curr -= 1;\n *buf_ptr.offset(curr) = (n as u8) + b'0';\n } else {\n let d1 = n << 1;\n curr -= 2;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n }\n }\n\n let buf_slice = unsafe {\n str::from_utf8_unchecked(\n slice::from_raw_parts(buf_ptr.offset(curr), buf.len() - curr as usize))\n };\n f.pad_integral(is_nonnegative, \"\", buf_slice)\n }\n })*);\n}\n\nimpl_Display!(i8, u8, i16, u16, i32, u32: to_u32);\nimpl_Display!(i64, u64: to_u64);\nimpl_Display!(i128, u128: to_u128);\n#[cfg(target_pointer_width = \"16\")]\nimpl_Display!(isize, usize: to_u16);\n#[cfg(target_pointer_width = \"32\")]\nimpl_Display!(isize, usize: to_u32);\n#[cfg(target_pointer_width = \"64\")]\nimpl_Display!(isize, usize: to_u64);\n<commit_msg>Reject '2' as a binary digit in internals of 'b' formatting<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Integer and floating-point number formatting\n\n#![allow(deprecated)]\n\n\nuse fmt;\nuse ops::{Div, Rem, Sub};\nuse str;\nuse slice;\nuse ptr;\nuse mem;\n\n#[doc(hidden)]\ntrait Int: PartialEq + PartialOrd + Div<Output=Self> + Rem<Output=Self> +\n Sub<Output=Self> + Copy {\n fn zero() -> Self;\n fn from_u8(u: u8) -> Self;\n fn to_u8(&self) -> u8;\n fn to_u16(&self) -> u16;\n fn to_u32(&self) -> u32;\n fn to_u64(&self) -> u64;\n fn to_u128(&self) -> u128;\n}\n\nmacro_rules! doit {\n ($($t:ident)*) => ($(impl Int for $t {\n fn zero() -> $t { 0 }\n fn from_u8(u: u8) -> $t { u as $t }\n fn to_u8(&self) -> u8 { *self as u8 }\n fn to_u16(&self) -> u16 { *self as u16 }\n fn to_u32(&self) -> u32 { *self as u32 }\n fn to_u64(&self) -> u64 { *self as u64 }\n fn to_u128(&self) -> u128 { *self as u128 }\n })*)\n}\ndoit! { i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize }\n\n\/\/\/ A type that represents a specific radix\n#[doc(hidden)]\ntrait GenericRadix {\n \/\/\/ The number of digits.\n fn base(&self) -> u8;\n\n \/\/\/ A radix-specific prefix string.\n fn prefix(&self) -> &'static str {\n \"\"\n }\n\n \/\/\/ Converts an integer to corresponding radix digit.\n fn digit(&self, x: u8) -> u8;\n\n \/\/\/ Format an integer using the radix using a formatter.\n fn fmt_int<T: Int>(&self, mut x: T, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ The radix can be as low as 2, so we need a buffer of at least 128\n \/\/ characters for a base 2 number.\n let zero = T::zero();\n let is_nonnegative = x >= zero;\n let mut buf = [0; 128];\n let mut curr = buf.len();\n let base = T::from_u8(self.base());\n if is_nonnegative {\n \/\/ Accumulate each digit of the number from the least significant\n \/\/ to the most significant figure.\n for byte in buf.iter_mut().rev() {\n let n = x % base; \/\/ Get the current place value.\n x = x \/ base; \/\/ Deaccumulate the number.\n *byte = self.digit(n.to_u8()); \/\/ Store the digit in the buffer.\n curr -= 1;\n if x == zero {\n \/\/ No more digits left to accumulate.\n break\n };\n }\n } else {\n \/\/ Do the same as above, but accounting for two's complement.\n for byte in buf.iter_mut().rev() {\n let n = zero - (x % base); \/\/ Get the current place value.\n x = x \/ base; \/\/ Deaccumulate the number.\n *byte = self.digit(n.to_u8()); \/\/ Store the digit in the buffer.\n curr -= 1;\n if x == zero {\n \/\/ No more digits left to accumulate.\n break\n };\n }\n }\n let buf = unsafe { str::from_utf8_unchecked(&buf[curr..]) };\n f.pad_integral(is_nonnegative, self.prefix(), buf)\n }\n}\n\n\/\/\/ A binary (base 2) radix\n#[derive(Clone, PartialEq)]\nstruct Binary;\n\n\/\/\/ An octal (base 8) radix\n#[derive(Clone, PartialEq)]\nstruct Octal;\n\n\/\/\/ A decimal (base 10) radix\n#[derive(Clone, PartialEq)]\nstruct Decimal;\n\n\/\/\/ A hexadecimal (base 16) radix, formatted with lower-case characters\n#[derive(Clone, PartialEq)]\nstruct LowerHex;\n\n\/\/\/ A hexadecimal (base 16) radix, formatted with upper-case characters\n#[derive(Clone, PartialEq)]\nstruct UpperHex;\n\nmacro_rules! radix {\n ($T:ident, $base:expr, $prefix:expr, $($x:pat => $conv:expr),+) => {\n impl GenericRadix for $T {\n fn base(&self) -> u8 { $base }\n fn prefix(&self) -> &'static str { $prefix }\n fn digit(&self, x: u8) -> u8 {\n match x {\n $($x => $conv,)+\n x => panic!(\"number not in the range 0..{}: {}\", self.base() - 1, x),\n }\n }\n }\n }\n}\n\nradix! { Binary, 2, \"0b\", x @ 0 ... 1 => b'0' + x }\nradix! { Octal, 8, \"0o\", x @ 0 ... 7 => b'0' + x }\nradix! { Decimal, 10, \"\", x @ 0 ... 9 => b'0' + x }\nradix! { LowerHex, 16, \"0x\", x @ 0 ... 9 => b'0' + x,\n x @ 10 ... 15 => b'a' + (x - 10) }\nradix! { UpperHex, 16, \"0x\", x @ 0 ... 9 => b'0' + x,\n x @ 10 ... 15 => b'A' + (x - 10) }\n\nmacro_rules! int_base {\n ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::$Trait for $T {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n $Radix.fmt_int(*self as $U, f)\n }\n }\n }\n}\n\nmacro_rules! debug {\n ($T:ident) => {\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::Debug for $T {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n fmt::Display::fmt(self, f)\n }\n }\n }\n}\n\nmacro_rules! integer {\n ($Int:ident, $Uint:ident) => {\n int_base! { Binary for $Int as $Uint -> Binary }\n int_base! { Octal for $Int as $Uint -> Octal }\n int_base! { LowerHex for $Int as $Uint -> LowerHex }\n int_base! { UpperHex for $Int as $Uint -> UpperHex }\n debug! { $Int }\n\n int_base! { Binary for $Uint as $Uint -> Binary }\n int_base! { Octal for $Uint as $Uint -> Octal }\n int_base! { LowerHex for $Uint as $Uint -> LowerHex }\n int_base! { UpperHex for $Uint as $Uint -> UpperHex }\n debug! { $Uint }\n }\n}\ninteger! { isize, usize }\ninteger! { i8, u8 }\ninteger! { i16, u16 }\ninteger! { i32, u32 }\ninteger! { i64, u64 }\ninteger! { i128, u128 }\n\nconst DEC_DIGITS_LUT: &'static[u8] =\n b\"0001020304050607080910111213141516171819\\\n 2021222324252627282930313233343536373839\\\n 4041424344454647484950515253545556575859\\\n 6061626364656667686970717273747576777879\\\n 8081828384858687888990919293949596979899\";\n\nmacro_rules! impl_Display {\n ($($t:ident),*: $conv_fn:ident) => ($(\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n impl fmt::Display for $t {\n #[allow(unused_comparisons)]\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let is_nonnegative = *self >= 0;\n let mut n = if is_nonnegative {\n self.$conv_fn()\n } else {\n \/\/ convert the negative num to positive by summing 1 to it's 2 complement\n (!self.$conv_fn()).wrapping_add(1)\n };\n let mut buf: [u8; 39] = unsafe { mem::uninitialized() };\n let mut curr = buf.len() as isize;\n let buf_ptr = buf.as_mut_ptr();\n let lut_ptr = DEC_DIGITS_LUT.as_ptr();\n\n unsafe {\n \/\/ need at least 16 bits for the 4-characters-at-a-time to work.\n if ::mem::size_of::<$t>() >= 2 {\n \/\/ eagerly decode 4 characters at a time\n while n >= 10000 {\n let rem = (n % 10000) as isize;\n n \/= 10000;\n\n let d1 = (rem \/ 100) << 1;\n let d2 = (rem % 100) << 1;\n curr -= 4;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n ptr::copy_nonoverlapping(lut_ptr.offset(d2), buf_ptr.offset(curr + 2), 2);\n }\n }\n\n \/\/ if we reach here numbers are <= 9999, so at most 4 chars long\n let mut n = n as isize; \/\/ possibly reduce 64bit math\n\n \/\/ decode 2 more chars, if > 2 chars\n if n >= 100 {\n let d1 = (n % 100) << 1;\n n \/= 100;\n curr -= 2;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n }\n\n \/\/ decode last 1 or 2 chars\n if n < 10 {\n curr -= 1;\n *buf_ptr.offset(curr) = (n as u8) + b'0';\n } else {\n let d1 = n << 1;\n curr -= 2;\n ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);\n }\n }\n\n let buf_slice = unsafe {\n str::from_utf8_unchecked(\n slice::from_raw_parts(buf_ptr.offset(curr), buf.len() - curr as usize))\n };\n f.pad_integral(is_nonnegative, \"\", buf_slice)\n }\n })*);\n}\n\nimpl_Display!(i8, u8, i16, u16, i32, u32: to_u32);\nimpl_Display!(i64, u64: to_u64);\nimpl_Display!(i128, u128: to_u128);\n#[cfg(target_pointer_width = \"16\")]\nimpl_Display!(isize, usize: to_u16);\n#[cfg(target_pointer_width = \"32\")]\nimpl_Display!(isize, usize: to_u32);\n#[cfg(target_pointer_width = \"64\")]\nimpl_Display!(isize, usize: to_u64);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Re-enable and address compiler warnings from renderervk.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for #2953<commit_after>\/\/ #2953\n\nmacro_rules! demo {\n ($a:ident <- $b:expr) => {};\n}\n\nfn main() {\n demo!(i <- 0);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>languages\/rust\/learn-rust\/3-chapter\/17-code.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Removes some asserts from ParameterBlock<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ CarbonReporter sends a message to a carbon end point at a regular basis.\nuse registry::{Registry, StdRegistry};\nuse std::time::Duration;\nuse std::thread;\nuse std::sync::Arc;\nuse reporter::Reporter;\nuse metrics::{CounterSnapshot, GaugeSnapshot, MeterSnapshot};\nuse histogram::Histogram;\nuse time;\nuse time::Timespec;\nuse std::net::TcpStream;\nuse std::io::Write;\nuse std::io::Error;\n\nstruct CarbonStream {\n graphite_stream: Option<TcpStream>,\n host_and_port: String,\n}\n\n\/\/ TODO perhaps we autodiscover the host and port\n\/\/\npub struct CarbonReporter {\n host_and_port: String,\n prefix: &'static str,\n registry: Arc<StdRegistry<'static>>,\n reporter_name: &'static str,\n}\n\nimpl CarbonStream {\n pub fn new(host_and_port: String) -> Self {\n CarbonStream {\n host_and_port: host_and_port,\n graphite_stream: None,\n }\n }\n\n pub fn connect(&mut self) -> Result<String, Error> {\n let graphite_stream = try!(TcpStream::connect(&*self.host_and_port));\n self.graphite_stream = Some(graphite_stream);\n Ok(String::from(\"\"))\n }\n\n pub fn write(&mut self,\n metric_path: String,\n value: String,\n timespec: Timespec)\n -> Result<String, Error> {\n let seconds_in_ms = (timespec.sec * 1000) as u32;\n let nseconds_in_ms = (timespec.nsec \/ 1000) as u32;\n let timestamp = seconds_in_ms + nseconds_in_ms;\n match self.graphite_stream {\n Some(ref mut stream) => {\n let carbon_command = format!(\"{} {} {}\\n\", metric_path, value, timestamp)\n .into_bytes();\n try!(stream.write_all(&carbon_command));\n }\n None => {\n try!(self.reconnect_stream());\n try!(self.write(metric_path, value, timespec));\n }\n }\n Ok(String::from(\"\"))\n }\n fn reconnect_stream(&mut self) -> Result<String, Error> {\n \/\/ TODO 123 is made up\n println!(\"Waiting 123ms and then reconnecting\");\n thread::sleep(Duration::from_millis(123));\n self.connect()\n }\n}\n\nimpl Reporter for CarbonReporter {\n fn get_unique_reporter_name(&self) -> &'static str {\n self.reporter_name\n }\n}\n\nfn prefix(metric_line: String, prefix_str: &'static str) -> String {\n format!(\"{}.{}\", prefix_str, metric_line)\n}\n\nfn send_meter_metric(metric_name: &str,\n meter: MeterSnapshot,\n carbon: &mut CarbonStream,\n prefix_str: &'static str,\n ts: Timespec)\n -> Result<String, Error> {\n\n let count = meter.count.to_string();\n let m1_rate = meter.rates[0].to_string();\n let m5_rate = meter.rates[1].to_string();\n let m15_rate = meter.rates[2].to_string();\n let mean_rate = meter.mean.to_string();\n try!(carbon.write(prefix(format!(\"{}.count\", metric_name), prefix_str),\n count,\n ts));\n try!(carbon.write(prefix(format!(\"{}.m1\", metric_name), prefix_str),\n m1_rate,\n ts));\n try!(carbon.write(prefix(format!(\"{}.m5\", metric_name), prefix_str),\n m5_rate,\n ts));\n try!(carbon.write(prefix(format!(\"{}.m15\", metric_name), prefix_str),\n m15_rate,\n ts));\n try!(carbon.write(prefix(format!(\"{}.mean\", metric_name), prefix_str),\n mean_rate,\n ts));\n Ok(String::from(\"\"))\n}\n\nfn send_gauge_metric(metric_name: &str,\n gauge: GaugeSnapshot,\n carbon: &mut CarbonStream,\n prefix_str: &'static str,\n ts: Timespec)\n -> Result<String, Error> {\n try!(carbon.write(prefix(format!(\"{}\", metric_name), prefix_str),\n gauge.value.to_string(),\n ts));\n Ok(String::from(\"\"))\n}\n\nfn send_counter_metric(metric_name: &str,\n counter: CounterSnapshot,\n carbon: &mut CarbonStream,\n prefix_str: &'static str,\n ts: Timespec)\n -> Result<String, Error> {\n try!(carbon.write(prefix(format!(\"{}\", metric_name), prefix_str),\n counter.value.to_string(),\n ts));\n Ok(String::from(\"\"))\n}\nfn send_histogram_metric(metric_name: &str,\n histogram: &mut Histogram,\n carbon: &mut CarbonStream,\n prefix_str: &'static str,\n ts: Timespec)\n -> Result<String, Error> {\n let count = histogram.into_iter().count();\n \/\/ let sum = histogram.sum();\n \/\/ let mean = sum \/ count;\n let max = histogram.percentile(100.0).unwrap();\n let min = histogram.percentile(0.0).unwrap();\n\n let p50 = histogram.percentile(50.0).unwrap();\n let p75 = histogram.percentile(75.0).unwrap();\n let p95 = histogram.percentile(95.0).unwrap();\n let p98 = histogram.percentile(98.0).unwrap();\n let p99 = histogram.percentile(99.0).unwrap();\n let p999 = histogram.percentile(99.9).unwrap();\n let p9999 = histogram.percentile(99.99).unwrap();\n let p99999 = histogram.percentile(99.999).unwrap();\n\n try!(carbon.write(prefix(format!(\"{}.count\", metric_name), prefix_str),\n count.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.max\", metric_name), prefix_str),\n max.to_string(),\n ts));\n\n \/\/ carbon\n \/\/ .write(prefix(format!(\"{}.mean\", metric_name), prefix_str),\n \/\/ mean.into_string(),\n \/\/ ts);\n\n try!(carbon.write(prefix(format!(\"{}.min\", metric_name), prefix_str),\n min.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p50\", metric_name), prefix_str),\n p50.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p75\", metric_name), prefix_str),\n p75.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p95\", metric_name), prefix_str),\n p95.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p98\", metric_name), prefix_str),\n p98.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p99\", metric_name), prefix_str),\n p99.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p999\", metric_name), prefix_str),\n p999.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p9999\", metric_name), prefix_str),\n p9999.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p99999\", metric_name), prefix_str),\n p99999.to_string(),\n ts));\n Ok(String::from(\"\"))\n}\n\nimpl CarbonReporter {\n pub fn new(registry: Arc<StdRegistry<'static>>,\n reporter_name: &'static str,\n host_and_port: String,\n prefix: &'static str)\n -> Self {\n CarbonReporter {\n host_and_port: host_and_port,\n prefix: prefix,\n registry: registry,\n reporter_name: reporter_name,\n }\n }\n\n fn report_to_carbon_continuously(self,\n delay_ms: u64)\n -> thread::JoinHandle<Result<String, Error>> {\n use metrics::MetricValue::{Counter, Gauge, Histogram, Meter};\n\n let prefix = self.prefix;\n let host_and_port = self.host_and_port.clone();\n let mut carbon = CarbonStream::new(host_and_port);\n let registry = self.registry.clone();\n thread::spawn(move || {\n loop {\n let ts = time::now().to_timespec();\n for metric_name in ®istry.get_metrics_names() {\n let metric = registry.get(metric_name);\n try!(match metric.export_metric() {\n Meter(x) => send_meter_metric(metric_name, x, &mut carbon, prefix, ts),\n Gauge(x) => send_gauge_metric(metric_name, x, &mut carbon, prefix, ts),\n Counter(x) => send_counter_metric(metric_name, x, &mut carbon, prefix, ts),\n Histogram(mut x) => {\n send_histogram_metric(metric_name, &mut x, &mut carbon, prefix, ts)\n }\n });\n }\n thread::sleep(Duration::from_millis(delay_ms));\n }\n })\n }\n\n pub fn start(self, delay_ms: u64) {\n self.report_to_carbon_continuously(delay_ms);\n }\n}\n\n#[cfg(test)]\nmod test {\n use histogram::*;\n use metrics::{Counter, Gauge, Meter, StdCounter, StdGauge, StdMeter};\n use registry::{Registry, StdRegistry};\n use std::sync::Arc;\n use super::CarbonReporter;\n\n #[test]\n fn meter() {\n let m = StdMeter::new();\n m.mark(100);\n\n let mut c = StdCounter::new();\n c.inc();\n\n let mut g = StdGauge::default();\n g.set(1.2);\n\n let mut h = Histogram::configure()\n .max_value(100)\n .precision(1)\n .build()\n .unwrap();\n\n h.increment_by(1, 1).unwrap();\n\n let mut r = StdRegistry::new();\n r.insert(\"meter1\", m);\n r.insert(\"counter1\", c);\n r.insert(\"gauge1\", g);\n r.insert(\"histogram\", h);\n\n let arc_registry = Arc::new(r);\n CarbonReporter::new(arc_registry.clone(),\n \"test\",\n \"localhost:0\".to_string(),\n \"asd.asdf\");\n }\n}\n<commit_msg>Use Result<(), Error>, not Result<String, Error>.<commit_after>\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ CarbonReporter sends a message to a carbon end point at a regular basis.\nuse registry::{Registry, StdRegistry};\nuse std::time::Duration;\nuse std::thread;\nuse std::sync::Arc;\nuse reporter::Reporter;\nuse metrics::{CounterSnapshot, GaugeSnapshot, MeterSnapshot};\nuse histogram::Histogram;\nuse time;\nuse time::Timespec;\nuse std::net::TcpStream;\nuse std::io::Write;\nuse std::io::Error;\n\nstruct CarbonStream {\n graphite_stream: Option<TcpStream>,\n host_and_port: String,\n}\n\n\/\/ TODO perhaps we autodiscover the host and port\n\/\/\npub struct CarbonReporter {\n host_and_port: String,\n prefix: &'static str,\n registry: Arc<StdRegistry<'static>>,\n reporter_name: &'static str,\n}\n\nimpl CarbonStream {\n pub fn new(host_and_port: String) -> Self {\n CarbonStream {\n host_and_port: host_and_port,\n graphite_stream: None,\n }\n }\n\n pub fn connect(&mut self) -> Result<(), Error> {\n let graphite_stream = try!(TcpStream::connect(&*self.host_and_port));\n self.graphite_stream = Some(graphite_stream);\n Ok(())\n }\n\n pub fn write(&mut self,\n metric_path: String,\n value: String,\n timespec: Timespec)\n -> Result<(), Error> {\n let seconds_in_ms = (timespec.sec * 1000) as u32;\n let nseconds_in_ms = (timespec.nsec \/ 1000) as u32;\n let timestamp = seconds_in_ms + nseconds_in_ms;\n match self.graphite_stream {\n Some(ref mut stream) => {\n let carbon_command = format!(\"{} {} {}\\n\", metric_path, value, timestamp)\n .into_bytes();\n try!(stream.write_all(&carbon_command));\n }\n None => {\n try!(self.reconnect_stream());\n try!(self.write(metric_path, value, timespec));\n }\n }\n Ok(())\n }\n fn reconnect_stream(&mut self) -> Result<(), Error> {\n \/\/ TODO 123 is made up\n println!(\"Waiting 123ms and then reconnecting\");\n thread::sleep(Duration::from_millis(123));\n self.connect()\n }\n}\n\nimpl Reporter for CarbonReporter {\n fn get_unique_reporter_name(&self) -> &'static str {\n self.reporter_name\n }\n}\n\nfn prefix(metric_line: String, prefix_str: &'static str) -> String {\n format!(\"{}.{}\", prefix_str, metric_line)\n}\n\nfn send_meter_metric(metric_name: &str,\n meter: MeterSnapshot,\n carbon: &mut CarbonStream,\n prefix_str: &'static str,\n ts: Timespec)\n -> Result<(), Error> {\n\n let count = meter.count.to_string();\n let m1_rate = meter.rates[0].to_string();\n let m5_rate = meter.rates[1].to_string();\n let m15_rate = meter.rates[2].to_string();\n let mean_rate = meter.mean.to_string();\n try!(carbon.write(prefix(format!(\"{}.count\", metric_name), prefix_str),\n count,\n ts));\n try!(carbon.write(prefix(format!(\"{}.m1\", metric_name), prefix_str),\n m1_rate,\n ts));\n try!(carbon.write(prefix(format!(\"{}.m5\", metric_name), prefix_str),\n m5_rate,\n ts));\n try!(carbon.write(prefix(format!(\"{}.m15\", metric_name), prefix_str),\n m15_rate,\n ts));\n try!(carbon.write(prefix(format!(\"{}.mean\", metric_name), prefix_str),\n mean_rate,\n ts));\n Ok(())\n}\n\nfn send_gauge_metric(metric_name: &str,\n gauge: GaugeSnapshot,\n carbon: &mut CarbonStream,\n prefix_str: &'static str,\n ts: Timespec)\n -> Result<(), Error> {\n try!(carbon.write(prefix(format!(\"{}\", metric_name), prefix_str),\n gauge.value.to_string(),\n ts));\n Ok(())\n}\n\nfn send_counter_metric(metric_name: &str,\n counter: CounterSnapshot,\n carbon: &mut CarbonStream,\n prefix_str: &'static str,\n ts: Timespec)\n -> Result<(), Error> {\n try!(carbon.write(prefix(format!(\"{}\", metric_name), prefix_str),\n counter.value.to_string(),\n ts));\n Ok(())\n}\nfn send_histogram_metric(metric_name: &str,\n histogram: &mut Histogram,\n carbon: &mut CarbonStream,\n prefix_str: &'static str,\n ts: Timespec)\n -> Result<(), Error> {\n let count = histogram.into_iter().count();\n \/\/ let sum = histogram.sum();\n \/\/ let mean = sum \/ count;\n let max = histogram.percentile(100.0).unwrap();\n let min = histogram.percentile(0.0).unwrap();\n\n let p50 = histogram.percentile(50.0).unwrap();\n let p75 = histogram.percentile(75.0).unwrap();\n let p95 = histogram.percentile(95.0).unwrap();\n let p98 = histogram.percentile(98.0).unwrap();\n let p99 = histogram.percentile(99.0).unwrap();\n let p999 = histogram.percentile(99.9).unwrap();\n let p9999 = histogram.percentile(99.99).unwrap();\n let p99999 = histogram.percentile(99.999).unwrap();\n\n try!(carbon.write(prefix(format!(\"{}.count\", metric_name), prefix_str),\n count.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.max\", metric_name), prefix_str),\n max.to_string(),\n ts));\n\n \/\/ carbon\n \/\/ .write(prefix(format!(\"{}.mean\", metric_name), prefix_str),\n \/\/ mean.into_string(),\n \/\/ ts);\n\n try!(carbon.write(prefix(format!(\"{}.min\", metric_name), prefix_str),\n min.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p50\", metric_name), prefix_str),\n p50.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p75\", metric_name), prefix_str),\n p75.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p95\", metric_name), prefix_str),\n p95.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p98\", metric_name), prefix_str),\n p98.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p99\", metric_name), prefix_str),\n p99.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p999\", metric_name), prefix_str),\n p999.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p9999\", metric_name), prefix_str),\n p9999.to_string(),\n ts));\n\n try!(carbon.write(prefix(format!(\"{}.p99999\", metric_name), prefix_str),\n p99999.to_string(),\n ts));\n Ok(())\n}\n\nimpl CarbonReporter {\n pub fn new(registry: Arc<StdRegistry<'static>>,\n reporter_name: &'static str,\n host_and_port: String,\n prefix: &'static str)\n -> Self {\n CarbonReporter {\n host_and_port: host_and_port,\n prefix: prefix,\n registry: registry,\n reporter_name: reporter_name,\n }\n }\n\n fn report_to_carbon_continuously(self, delay_ms: u64) -> thread::JoinHandle<Result<(), Error>> {\n use metrics::MetricValue::{Counter, Gauge, Histogram, Meter};\n\n let prefix = self.prefix;\n let host_and_port = self.host_and_port.clone();\n let mut carbon = CarbonStream::new(host_and_port);\n let registry = self.registry.clone();\n thread::spawn(move || {\n loop {\n let ts = time::now().to_timespec();\n for metric_name in ®istry.get_metrics_names() {\n let metric = registry.get(metric_name);\n try!(match metric.export_metric() {\n Meter(x) => send_meter_metric(metric_name, x, &mut carbon, prefix, ts),\n Gauge(x) => send_gauge_metric(metric_name, x, &mut carbon, prefix, ts),\n Counter(x) => send_counter_metric(metric_name, x, &mut carbon, prefix, ts),\n Histogram(mut x) => {\n send_histogram_metric(metric_name, &mut x, &mut carbon, prefix, ts)\n }\n });\n }\n thread::sleep(Duration::from_millis(delay_ms));\n }\n })\n }\n\n pub fn start(self, delay_ms: u64) {\n self.report_to_carbon_continuously(delay_ms);\n }\n}\n\n#[cfg(test)]\nmod test {\n use histogram::*;\n use metrics::{Counter, Gauge, Meter, StdCounter, StdGauge, StdMeter};\n use registry::{Registry, StdRegistry};\n use std::sync::Arc;\n use super::CarbonReporter;\n\n #[test]\n fn meter() {\n let m = StdMeter::new();\n m.mark(100);\n\n let mut c = StdCounter::new();\n c.inc();\n\n let mut g = StdGauge::default();\n g.set(1.2);\n\n let mut h = Histogram::configure()\n .max_value(100)\n .precision(1)\n .build()\n .unwrap();\n\n h.increment_by(1, 1).unwrap();\n\n let mut r = StdRegistry::new();\n r.insert(\"meter1\", m);\n r.insert(\"counter1\", c);\n r.insert(\"gauge1\", g);\n r.insert(\"histogram\", h);\n\n let arc_registry = Arc::new(r);\n CarbonReporter::new(arc_registry.clone(),\n \"test\",\n \"localhost:0\".to_string(),\n \"asd.asdf\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Review blackboard documentation<commit_after><|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::build(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n fn build<M: Module>(rt: &Runtime, m: &M) -> StorageBackend {\n let path = rt.get_rtp() + m.name() + \"\/store\";\n \/\/ TODO: Don't use \"\/store\" but value from configuration\n debug!(\"Building StorageBackend for {}\", path);\n StorageBackend::new(path)\n }\n\n fn get_file_ids(&self, m: &Module) -> Option<Vec<FileID>> {\n let list = glob(&self.prefix_of_files_for_module(m)[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n debug!(\" - File: {:?}\", path);\n v.push(from_pathbuf(&path));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n pub fn iter_ids(&self, m: &Module) -> Option<IntoIter<FileID>>\n {\n glob(&self.prefix_of_files_for_module(m)[..]).and_then(|globlist| {\n let v = globlist.filter_map(Result::ok)\n .map(|pbuf| from_pathbuf(&pbuf))\n .collect::<Vec<FileID>>()\n .into_iter();\n Ok(v)\n }).ok()\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Option<IntoIter<File<'a>>>\n where HP: FileHeaderParser\n {\n self.iter_ids(m).and_then(|ids| {\n Some(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n .into_iter())\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success reading file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::build(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id : '{}'\", id);\n self.prefix_of_files_for_module(owner) + \"-\" + &id[..] + \".imag\"\n }\n\n fn build_filepath_with_id(&self, id: FileID) -> String {\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" id : '{}'\", id);\n self.basepath.clone() + &id[..]\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n fn new(action: String,\n desc : String,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: action,\n desc: desc,\n data_dump: data,\n caused_by: None,\n }\n }\n\n fn build(action: &'static str,\n desc: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n dataDump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::build(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n<commit_msg>Pass owner to file path builder helper function<commit_after>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::build(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n fn build<M: Module>(rt: &Runtime, m: &M) -> StorageBackend {\n let path = rt.get_rtp() + m.name() + \"\/store\";\n \/\/ TODO: Don't use \"\/store\" but value from configuration\n debug!(\"Building StorageBackend for {}\", path);\n StorageBackend::new(path)\n }\n\n fn get_file_ids(&self, m: &Module) -> Option<Vec<FileID>> {\n let list = glob(&self.prefix_of_files_for_module(m)[..]);\n\n if let Ok(globlist) = list {\n let mut v = vec![];\n for entry in globlist {\n if let Ok(path) = entry {\n debug!(\" - File: {:?}\", path);\n v.push(from_pathbuf(&path));\n } else {\n \/\/ Entry is not a path\n }\n }\n\n Some(v)\n } else {\n None\n }\n }\n\n pub fn iter_ids(&self, m: &Module) -> Option<IntoIter<FileID>>\n {\n glob(&self.prefix_of_files_for_module(m)[..]).and_then(|globlist| {\n let v = globlist.filter_map(Result::ok)\n .map(|pbuf| from_pathbuf(&pbuf))\n .collect::<Vec<FileID>>()\n .into_iter();\n Ok(v)\n }).ok()\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Option<IntoIter<File<'a>>>\n where HP: FileHeaderParser\n {\n self.iter_ids(m).and_then(|ids| {\n Some(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n .into_iter())\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::build(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::build(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success reading file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::build(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" id : '{}'\", id);\n self.basepath.clone() + owner.name() + \"-\" + &id[..] + \".imag\"\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n fn new(action: String,\n desc : String,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: action,\n desc: desc,\n data_dump: data,\n caused_by: None,\n }\n }\n\n fn build(action: &'static str,\n desc: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n dataDump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::build(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\n\nuse super::file::FileID;\nuse super::file::File;\nuse module::Module;\n\ntype BackendOperationResult = Result<(), StorageBackendError>;\n\npub struct StorageBackend<'a> {\n basepath: String,\n module: &'a Module,\n}\n\nimpl<'a> StorageBackend<'a> {\n\n fn new() -> StorageBackend<'a> {\n }\n\n fn getFileList() -> Vec<(String, FileID)> {\n }\n\n fn createEmpty() -> FileID {\n }\n\n fn createFile() -> File {\n }\n\n fn writeFile(f: File) -> BackendOperationResult {\n }\n\n fn createFileWithContent(content: String) -> BackendOperationResult {\n }\n\n fn readFile(id: FileID) -> String {\n }\n\n \/\/ TODO: Meta files are not covered yet\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError;\n\nimpl StorageBackendError {\n}\n\nimpl Error for StorageBackendError {\n}\n\nimpl Display for StorageBackendError {\n}\n\n<commit_msg>Implement StorageBackendError<commit_after>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\n\nuse super::file::FileID;\nuse super::file::File;\nuse module::Module;\n\ntype BackendOperationResult = Result<(), StorageBackendError>;\n\npub struct StorageBackend<'a> {\n basepath: String,\n module: &'a Module,\n}\n\nimpl<'a> StorageBackend<'a> {\n\n fn new() -> StorageBackend<'a> {\n }\n\n fn getFileList() -> Vec<(String, FileID)> {\n }\n\n fn createEmpty() -> FileID {\n }\n\n fn createFile() -> File {\n }\n\n fn writeFile(f: File) -> BackendOperationResult {\n }\n\n fn createFileWithContent(content: String) -> BackendOperationResult {\n }\n\n fn readFile(id: FileID) -> String {\n }\n\n \/\/ TODO: Meta files are not covered yet\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub explanation: String, \/\/ A long, user friendly description\n pub dataDump: Option<String> \/\/ Data dump, if any\n}\n\nimpl StorageBackendError {\n fn new(action: &'static str,\n desc : &'static str,\n explan: &'static str,\n data : Option<String>) -> StorageBackendError\n {\n StorageBackendError {\n action: String::from(action),\n desc: String::from(desc),\n explanation: String::from(explan),\n dataDump: data,\n }\n }\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n None\n }\n\n}\n\nimpl Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\\n\\n{}\",\n self.action, self.desc, self.explanation)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::new(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n pub fn iter_ids(&self, m: &Module) -> Result<IntoIter<FileID>, StorageBackendError>\n {\n glob(&self.prefix_of_files_for_module(m)[..])\n .and_then(|globlist| {\n let v = globlist.filter_map(Result::ok)\n .map(|pbuf| FileID::from(&pbuf))\n .collect::<Vec<FileID>>()\n .into_iter();\n Ok(v)\n })\n .map_err(|e| {\n let serr = StorageBackendError::new(\n \"iter_ids()\",\n \"Cannot iter on file ids\",\n None);\n serr\n })\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Result<IntoIter<File<'a>>, StorageBackendError>\n where HP: FileHeaderParser\n {\n self.iter_ids(m)\n .and_then(|ids| {\n Ok(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n .into_iter())\n })\n .map_err(|e| {\n let serr = StorageBackendError::new(\n \"iter_files()\",\n \"Cannot iter on files\",\n None);\n serr\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success opening file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::new(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n let idstr : String = id.clone().into();\n let idtype : FileIDType = id.into();\n let typestr : String = idtype.into();\n\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id: '{}'\", idstr);\n debug!(\" type: '{}'\", typestr);\n\n self.prefix_of_files_for_module(owner) +\n \"-\" + &typestr[..] +\n \"-\" + &idstr[..] +\n \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n\n fn new<S>(action: S, desc: S, data: Option<String>) -> StorageBackendError\n where S: Into<String>\n {\n StorageBackendError {\n action: action.into(),\n desc: desc.into(),\n data_dump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl<'a> Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::new(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n<commit_msg>StorageBackend::iter_ids(): Add caused_by() usage<commit_after>use std::error::Error;\nuse std::fmt::Display;\nuse std::fmt::Formatter;\nuse std::fmt::Result as FMTResult;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::vec::Vec;\nuse std::fs::File as FSFile;\nuse std::fs::create_dir_all;\nuse std::fs::remove_file;\nuse std::io::Read;\nuse std::io::Write;\nuse std::vec::IntoIter;\n\nuse glob::glob;\nuse glob::Paths;\n\nuse storage::file::File;\nuse storage::file_id::*;\nuse storage::parser::{FileHeaderParser, Parser, ParserError};\n\nuse module::Module;\nuse runtime::Runtime;\n\npub type BackendOperationResult<T = ()> = Result<T, StorageBackendError>;\n\npub struct StorageBackend {\n basepath: String,\n storepath: String,\n}\n\nimpl StorageBackend {\n\n pub fn new(rt: &Runtime) -> BackendOperationResult<StorageBackend> {\n let storepath = rt.get_rtp() + \"\/store\/\";\n debug!(\"Trying to create {}\", storepath);\n create_dir_all(&storepath).and_then(|_| {\n debug!(\"Creating succeeded, constructing backend instance\");\n Ok(StorageBackend {\n basepath: rt.get_rtp(),\n storepath: storepath.clone(),\n })\n }).or_else(|e| {\n debug!(\"Creating failed, constructing error instance\");\n let mut serr = StorageBackendError::new(\n \"create_dir_all()\",\n \"Could not create store directories\",\n Some(storepath)\n );\n serr.caused_by = Some(Box::new(e));\n Err(serr)\n })\n }\n\n pub fn iter_ids(&self, m: &Module) -> Result<IntoIter<FileID>, StorageBackendError>\n {\n glob(&self.prefix_of_files_for_module(m)[..])\n .and_then(|globlist| {\n Ok(globlist.filter_map(Result::ok)\n .map(|pbuf| FileID::from(&pbuf))\n .collect::<Vec<FileID>>()\n .into_iter())\n })\n .map_err(|e| {\n let serr = StorageBackendError::new(\n \"iter_ids()\",\n \"Cannot iter on file ids\",\n None);\n \/\/ Why the hack is Error not implemented for glob::PatternError\n \/\/ serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n pub fn iter_files<'a, HP>(&self, m: &'a Module, p: &Parser<HP>)\n -> Result<IntoIter<File<'a>>, StorageBackendError>\n where HP: FileHeaderParser\n {\n self.iter_ids(m)\n .and_then(|ids| {\n Ok(ids.filter_map(|id| self.get_file_by_id(m, &id, p))\n .collect::<Vec<File>>()\n .into_iter())\n })\n .map_err(|e| {\n let serr = StorageBackendError::new(\n \"iter_files()\",\n \"Cannot iter on files\",\n None);\n serr\n })\n }\n\n \/*\n * Write a file to disk.\n *\n * The file is moved to this function as the file won't be edited afterwards\n *\/\n pub fn put_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let written = write_with_parser(&f, p);\n if written.is_err() { return Err(written.err().unwrap()); }\n let string = written.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::create(&path).map(|mut file| {\n debug!(\"Created file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write_all()\",\n \"Could not write out File contents\",\n None\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not create file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::create()\",\n \"Creating file on disk failed\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Update a file. We have the UUID and can find the file on FS with it and\n * then replace its contents with the contents of the passed file object\n *\/\n pub fn update_file<HP>(&self, f: File, p: &Parser<HP>) -> BackendOperationResult\n where HP: FileHeaderParser\n {\n let contents = write_with_parser(&f, p);\n if contents.is_err() { return Err(contents.err().unwrap()); }\n let string = contents.unwrap();\n\n let path = self.build_filepath(&f);\n debug!(\"Writing file: {}\", path);\n debug!(\" string: {}\", string);\n\n FSFile::open(&path).map(|mut file| {\n debug!(\"Open file at '{}'\", path);\n file.write_all(&string.clone().into_bytes())\n .map_err(|ioerr| {\n debug!(\"Could not write file\");\n let mut err = StorageBackendError::new(\n \"File::write()\",\n \"Tried to write contents of this file, though operation did not succeed\",\n Some(string)\n );\n err.caused_by = Some(Box::new(ioerr));\n err\n })\n }).map_err(|writeerr| {\n debug!(\"Could not write file at '{}'\", path);\n let mut err = StorageBackendError::new(\n \"File::open()\",\n \"Tried to update contents of this file, though file doesn't exist\",\n None\n );\n err.caused_by = Some(Box::new(writeerr));\n err\n }).and(Ok(()))\n }\n\n \/*\n * Find a file by its ID and return it if found. Return nothing if not\n * found, of course.\n *\n * TODO: Needs refactoring, as there might be an error when reading from\n * disk OR the id just does not exist.\n *\/\n pub fn get_file_by_id<'a, HP>(&self, m: &'a Module, id: &FileID, p: &Parser<HP>) -> Option<File<'a>>\n where HP: FileHeaderParser\n {\n debug!(\"Searching for file with id '{}'\", id);\n if let Ok(mut fs) = FSFile::open(self.build_filepath_with_id(m, id.clone())) {\n let mut s = String::new();\n fs.read_to_string(&mut s);\n debug!(\"Success opening file with id '{}'\", id);\n debug!(\"Parsing to internal structure now\");\n p.read(s).and_then(|(h, d)| Ok(File::from_parser_result(m, id.clone(), h, d))).ok()\n } else {\n debug!(\"No file with id '{}'\", id);\n None\n }\n }\n\n pub fn remove_file(&self, m: &Module, file: File, checked: bool) -> BackendOperationResult {\n if checked {\n error!(\"Checked remove not implemented yet. I will crash now\");\n unimplemented!()\n }\n\n debug!(\"Doing unchecked remove\");\n info!(\"Going to remove file: {}\", file);\n\n let fp = self.build_filepath(&file);\n remove_file(fp).map_err(|e| {\n let mut serr = StorageBackendError::new(\n \"remove_file()\",\n \"File removal failed\",\n Some(format!(\"{}\", file))\n );\n serr.caused_by = Some(Box::new(e));\n serr\n })\n }\n\n fn build_filepath(&self, f: &File) -> String {\n self.build_filepath_with_id(f.owner(), f.id())\n }\n\n fn build_filepath_with_id(&self, owner: &Module, id: FileID) -> String {\n let idstr : String = id.clone().into();\n let idtype : FileIDType = id.into();\n let typestr : String = idtype.into();\n\n debug!(\"Building filepath with id\");\n debug!(\" basepath: '{}'\", self.basepath);\n debug!(\" storepath: '{}'\", self.storepath);\n debug!(\" id: '{}'\", idstr);\n debug!(\" type: '{}'\", typestr);\n\n self.prefix_of_files_for_module(owner) +\n \"-\" + &typestr[..] +\n \"-\" + &idstr[..] +\n \".imag\"\n }\n\n fn prefix_of_files_for_module(&self, m: &Module) -> String {\n self.storepath.clone() + m.name()\n }\n\n}\n\n#[derive(Debug)]\npub struct StorageBackendError {\n pub action: String, \/\/ The file system action in words\n pub desc: String, \/\/ A short description\n pub data_dump: Option<String>, \/\/ Data dump, if any\n pub caused_by: Option<Box<Error>>, \/\/ caused from this error\n}\n\nimpl StorageBackendError {\n\n fn new<S>(action: S, desc: S, data: Option<String>) -> StorageBackendError\n where S: Into<String>\n {\n StorageBackendError {\n action: action.into(),\n desc: desc.into(),\n data_dump: data,\n caused_by: None,\n }\n }\n\n}\n\nimpl Error for StorageBackendError {\n\n fn description(&self) -> &str {\n &self.desc[..]\n }\n\n fn cause(&self) -> Option<&Error> {\n self.caused_by.as_ref().map(|e| &**e)\n }\n\n}\n\nimpl<'a> Display for StorageBackendError {\n fn fmt(&self, f: &mut Formatter) -> FMTResult {\n write!(f, \"StorageBackendError[{}]: {}\",\n self.action, self.desc)\n }\n}\n\n\nfn write_with_parser<'a, HP>(f: &File, p: &Parser<HP>) -> Result<String, StorageBackendError>\n where HP: FileHeaderParser\n{\n p.write(f.contents())\n .or_else(|err| {\n let mut serr = StorageBackendError::new(\n \"Parser::write()\",\n \"Cannot translate internal representation of file contents into on-disk representation\",\n None\n );\n serr.caused_by = Some(Box::new(err));\n Err(serr)\n })\n}\n<|endoftext|>"} {"text":"<commit_before>use std::path::{Path, PathBuf};\n\npub type FileID = String;\n\npub fn from_path_string(s: &String) -> FileID {\n String::from(\"\")\n}\n\npub fn from_path(p: &Path) -> FileID {\n String::from(\"\")\n}\n\npub fn from_pathbuf(p: &PathBuf) -> FileID {\n from_path(p.as_path())\n}\n\n<commit_msg>Fix: use unimplemented instead of false implementation<commit_after>use std::path::{Path, PathBuf};\n\npub type FileID = String;\n\npub fn from_path_string(s: &String) -> FileID {\n unimplemented!()\n}\n\npub fn from_path(p: &Path) -> FileID {\n unimplemented!()\n}\n\npub fn from_pathbuf(p: &PathBuf) -> FileID {\n from_path(p.as_path())\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added an example for test rewrite<commit_after>extern crate sourcemap;\n\nuse std::env;\nuse std::fs;\nuse std::io::Read;\nuse std::path::Path;\n\nuse sourcemap::{decode, DecodedMap, RewriteOptions, SourceMap};\n\n\nfn test(sm: &SourceMap) {\n for (src_id, source) in sm.sources().enumerate() {\n let path = Path::new(source);\n if path.is_file() {\n let mut f = fs::File::open(&path).unwrap();\n let mut contents = String::new();\n if f.read_to_string(&mut contents).ok().is_none() {\n continue;\n }\n if Some(contents.as_str()) != sm.get_source_contents(src_id as u32) {\n println!(\" !!! {}\", source);\n }\n }\n }\n}\n\nfn load_from_reader<R: Read>(mut rdr: R) -> SourceMap {\n match decode(&mut rdr).unwrap() {\n DecodedMap::Regular(sm) => sm,\n DecodedMap::Index(idx) => idx.flatten_and_rewrite(&RewriteOptions {\n load_local_source_contents: true,\n ..Default::default()\n }).unwrap(),\n }\n}\n\nfn main() {\n let args : Vec<_> = env::args().collect();\n let mut f = fs::File::open(&args[1]).unwrap();\n let sm = load_from_reader(&mut f);\n println!(\"before dump\");\n test(&sm);\n\n println!(\"after dump\");\n let mut json : Vec<u8> = vec![];\n sm.to_writer(&mut json).unwrap();\n let sm = load_from_reader(json.as_slice());\n test(&sm);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>udata is a thing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added experimental parallel IBBA. Experiments show speedups ranging from 4x-10x depending on the problem (on fractus with 11 worker threads). Short running problems experience a slight slow down relative to the serial implementation, but usually returns tighter bounds.<commit_after>\/\/ Cooperative optimization solver\nuse std::collections::BinaryHeap;\nuse std::io::Write;\nextern crate rand;\n\n#[macro_use(max)]\nextern crate gelpia_utils;\nextern crate ga;\nextern crate gr;\n\nuse ga::{ea, Individual};\n\nuse gelpia_utils::{Quple, INF, NINF, Flt, Parameters, eps_tol};\n\nuse gr::{GI, width_box, split_box, midpoint_box};\n\nuse std::sync::{Barrier, RwLock, Arc, RwLockWriteGuard};\n\nuse std::sync::atomic::{AtomicBool, Ordering, AtomicUsize};\n\nuse std::thread;\n\nuse std::time::Duration;\n\nextern crate function;\nuse function::FuncObj;\n\nextern crate args;\nuse args::{process_args};\n\nextern crate threadpool;\nuse threadpool::ThreadPool;\nuse std::sync::mpsc::channel;\nextern crate time;\n\n\/\/\/ Returns the guaranteed upperbound for the algorithm\n\/\/\/ from the queue.\nfn get_upper_bound(q: &RwLockWriteGuard<Vec<Quple>>,\n f_best_high: f64) -> f64{\n let mut max = f_best_high;\n for qi in q.iter() {\n max = max!{max, qi.fdata.upper()};\n }\n max\n}\n\nfn log_max(q: &RwLockWriteGuard<Vec<Quple>>,\n f_best_low: f64,\n f_best_high: f64) {\n let max = get_upper_bound(q, f_best_high);\n let _ = writeln!(&mut std::io::stderr(),\n \"lb: {}, possible ub: {}, guaranteed ub: {}\",\n f_best_low,\n f_best_high,\n max);\n}\n\nfn print_q(q: &RwLockWriteGuard<BinaryHeap<Quple>>) {\n let mut lq: BinaryHeap<Quple> = (*q).clone();\n while lq.len() != 0 {\n let qi = lq.pop().unwrap();\n let (gen, v, fx) = (qi.pf, qi.p, qi.fdata);\n print!(\"[{}, {}, {}], \", v, gen, qi.fdata.to_string());\n }\n println!(\"\\n\");\n}\n\n\/\/\/ Returns a tuple (function_estimate, eval_interval)\n\/\/\/ # Arguments\n\/\/\/ * `f` - The function to evaluate with\n\/\/\/ * `input` - The input domain\nfn est_func(f: &FuncObj, input: &Vec<GI>) -> (Flt, GI) {\n let mid = midpoint_box(input);\n let est_m = f.call(&mid);\n let fsx = f.call(&input);\n let fsx_u = f.call(&input.iter()\n .map(|&si| GI::new_p(si.upper()))\n .collect::<Vec<_>>());\n let fsx_l = f.call(&input.iter()\n .map(|&si| GI::new_p(si.lower()))\n .collect::<Vec<_>>());\n let est_max = est_m.lower().max(fsx_u.lower()).max(fsx_l.lower());\n (est_max, fsx)\n}\n\n\/\/ Returns the upper bound, the domain where this bound occurs and a status\n\/\/ flag indicating whether the answer is complete for the problem.\nfn ibba(x_0: Vec<GI>, e_x: Flt, e_f: Flt, e_f_r: Flt,\n f_bestag: Arc<RwLock<Flt>>, \n f_best_shared: Arc<RwLock<Flt>>,\n x_bestbb: Arc<RwLock<Vec<GI>>>,\n b1: Arc<Barrier>, b2: Arc<Barrier>, \n q: Arc<RwLock<Vec<Quple>>>, \n sync: Arc<AtomicBool>, stop: Arc<AtomicBool>,\n f: FuncObj,\n logging: bool, max_iters: u32)\n -> (Flt, Flt, Vec<GI>) {\n let mut best_x = x_0.clone();\n\n let iters = Arc::new(AtomicUsize::new(0));\n let (est_max, first_val) = est_func(&f, &x_0);\n {\n q.write().unwrap().push(Quple{p: est_max, pf: 0, data: x_0.clone(),\n fdata: first_val});\n }\n let mut f_best_low = est_max;\n let mut f_best_high = est_max;\n\n let n_workers = 11;\n let n_jobs = n_workers;\n let pool = ThreadPool::new(n_workers);\n \n while q.read().unwrap().len() != 0 && !stop.load(Ordering::Acquire) {\n if max_iters != 0 && iters.load(Ordering::Acquire) as u32 >= max_iters {\n break;\n }\n if sync.load(Ordering::Acquire) {\n \/\/ Ugly: Update the update thread's view of the best branch bound.\n *f_best_shared.write().unwrap() = f_best_low;\n b1.wait();\n b2.wait();\n }\n {\n \/\/ Take q as writable during an iteration\n let q = q.write().unwrap();\n\n let fbl_orig = f_best_low;\n f_best_low = max!(f_best_low, *f_bestag.read().unwrap());\n\n if iters.load(Ordering::Acquire) % 2048 == 0 {\n let guaranteed_bound =\n get_upper_bound(&q, f_best_high);\n if (guaranteed_bound - f_best_high).abs() < e_f {\n f_best_high = guaranteed_bound;\n break;\n }\n }\n \n if logging && fbl_orig != f_best_low {\n log_max(&q, f_best_low, f_best_high);\n }\n }\n\n let p_q_len = {\n let mut q = q.write().unwrap();\n q.sort();\n q.len()\/n_workers + 1\n };\n \n\/* let mut p_q = vec![];\n {\n let mut total_len = 0;\n let q = q.write().unwrap();\n let q_len = q.len();\n for i in 0..n_workers {\n let mut elems = vec![];\n for j in 0..p_q_len {\n if i + j*n_workers >= q_len {\n break;\n }\n elems.push(q[i+j*n_workers].clone());\n }\n total_len += elems.len();\n p_q.push(elems);\n }\n } *\/\n \n let outer_barr = Arc::new(Barrier::new(n_workers + 1));\n\n let (qtx, qrx) = channel();\n let (htx, hrx) = channel();\n let (ltx, lrx) = channel();\n \n for i in 0..n_workers {\n let inner_barr = outer_barr.clone();\n\/\/ let elems = p_q[i].clone();\n let _f = f.clone();\n let qtx = qtx.clone();\n let htx = htx.clone();\n let ltx = ltx.clone();\n let f_bestag = f_bestag.clone();\n let iters = iters.clone();\n let lqi = q.clone();\n pool.execute(move || {\n let mut l_f_best_high = f_best_high;\n let mut l_best_x = vec![];\n \n let mut l_f_best_low = f_best_low;\n let mut l_best_low_x = vec![];\n\n let mut lqo = vec![];\n let mut used = false;\n let lqi = lqi.read().unwrap();\n\/\/ let elems_len = elems.len();\n\n for j in 0..p_q_len {\n if i + j*n_workers >= lqi.len() { break };\n used = true;\n let ref elem = lqi[i + j*n_workers];\n let ref x = elem.data;\n let ref iter_est = elem.p;\n let ref fx = elem.fdata;\n let ref gen = elem.pf;\n \/\/let (ref x, iter_est, fx, gen) = ;\n \n if fx.upper() < l_f_best_low ||\n width_box(&x, e_x) ||\n eps_tol(*fx, *iter_est, e_f, e_f_r) {\n if l_f_best_high < fx.upper() {\n l_f_best_high = fx.upper();\n l_best_x = x.clone();\n \/\/ htx.send((fx.upper(), x.clone())).unwrap();\n }\n }\n else {\n let (x_s, is_split) = split_box(&x);\n for sx in x_s {\n let (est_max, fsx) = est_func(&_f, &sx);\n if l_f_best_low < est_max {\n l_f_best_low = est_max;\n l_best_low_x = sx.clone();\n \/\/ ltx.send((est_max, sx.clone())).unwrap();\n }\n iters.fetch_add(1, Ordering::Release);\n if is_split && fsx.upper() > f_best_low &&\n fsx.upper() > *f_bestag.read().unwrap() {\n lqo.push(Quple{p: est_max,\n pf: gen+1,\n data: sx,\n fdata: fsx});\n }\n }\n }\n }\n ltx.send((l_f_best_low, l_best_low_x, used)).unwrap();\n htx.send((l_f_best_high, l_best_x, used)).unwrap();\n lqo.sort();\n qtx.send(lqo).unwrap();\n inner_barr.wait();\n });\n }\n outer_barr.wait();\n drop(qtx);\n drop(htx);\n drop(ltx);\n\n for li in lrx.iter() {\n let (lb, lx, non_empty) = li;\n if non_empty && f_best_low < lb {\n f_best_low = lb;\n *x_bestbb.write().unwrap() = lx.clone();\n }\n }\n\n for hi in hrx.iter() {\n let (ub, ux, non_empty) = hi;\n if non_empty && f_best_high < ub {\n f_best_high = ub;\n best_x = ux.clone();\n }\n }\n {\n let mut lq = q.write().unwrap();\n *lq = vec![];\n for qis in qrx.iter() {\n for qi in &qis {\n if qi.fdata.upper() > f_best_low {\n lq.push(qi.clone());\n }\n }\n }\n }\n }\n println!(\"{}\", iters.load(Ordering::Acquire));\n stop.store(true, Ordering::Release);\n (f_best_low, f_best_high, best_x)\n}\n\nfn update(stop: Arc<AtomicBool>, _sync: Arc<AtomicBool>,\n _b1: Arc<Barrier>, _b2: Arc<Barrier>,\n _f: FuncObj,\n timeout: u32) {\n let start = time::get_time();\n let one_sec = Duration::new(1, 0);\n 'out: while !stop.load(Ordering::Acquire) {\n \/\/ Timer code...\n thread::sleep(one_sec);\n if timeout > 0 &&\n (time::get_time() - start).num_seconds() >= timeout as i64 { \n let _ = writeln!(&mut std::io::stderr(), \"Stopping early...\");\n stop.store(true, Ordering::Release);\n break 'out;\n }\n }\n}\n\n\nfn main() {\n let args = process_args();\n \n let ref x_0 = args.domain;\n let ref fo = args.function;\n let x_err = args.x_error;\n let y_err = args.y_error;\n let y_rel = args.y_error_rel;\n let seed = args.seed;\n \n \/\/ Early out if there are no input variables...\n if x_0.len() == 0 {\n let result = fo.call(&x_0);\n println!(\"[[{},{}], {{}}]\", result.lower(), result.upper());\n return\n }\n \n let q_inner: Vec<Quple> = Vec::new();\n let q = Arc::new(RwLock::new(q_inner));\n \n let population_inner: Vec<Individual> = Vec::new();\n let population = Arc::new(RwLock::new(population_inner));\n \n let b1 = Arc::new(Barrier::new(3));\n let b2 = Arc::new(Barrier::new(3));\n \n let sync = Arc::new(AtomicBool::new(false));\n let stop = Arc::new(AtomicBool::new(false));\n \n let f_bestag: Arc<RwLock<Flt>> = Arc::new(RwLock::new(NINF));\n let f_best_shared: Arc<RwLock<Flt>> = Arc::new(RwLock::new(NINF));\n \n let x_e = x_0.clone();\n let x_i = x_0.clone();\n \n let x_bestbb = Arc::new(RwLock::new(x_0.clone()));\n \n let ibba_thread = \n {\n let q = q.clone();\n let b1 = b1.clone();\n let b2 = b2.clone();\n let f_bestag = f_bestag.clone();\n let f_best_shared = f_best_shared.clone();\n let x_bestbb = x_bestbb.clone();\n let sync = sync.clone();\n let stop = stop.clone();\n let fo_c = fo.clone();\n let logging = args.logging;\n let iters= args.iters;\n thread::Builder::new().name(\"IBBA\".to_string()).spawn(move || {\n ibba(x_i, x_err, y_err, y_rel,\n f_bestag, f_best_shared,\n x_bestbb,\n b1, b2, q, sync, stop, fo_c, logging, iters)\n })};\n \n let ea_thread = \n {\n let population = population.clone();\n let f_bestag = f_bestag.clone();\n let x_bestbb = x_bestbb.clone();\n let sync = sync.clone();\n let stop = stop.clone();\n let b1 = b1.clone();\n let b2 = b2.clone();\n let fo_c = fo.clone();\n let factor = x_e.len();\n thread::Builder::new().name(\"EA\".to_string()).spawn(move || {\n ea(x_e, Parameters{population: 50*factor, \/\/1000,\n selection: 8, \/\/4,\n elitism: 5, \/\/2,\n mutation: 0.4_f64,\/\/0.3_f64,\n crossover: 0.0_f64, \/\/ 0.5_f64\n seed: seed,\n },\n population, \n f_bestag, \n x_bestbb,\n b1, b2,\n stop, sync, fo_c)\n })};\n\n \/\/ pending finding out how to kill threads\n \/\/let update_thread = \n {\n let sync = sync.clone();\n let stop = stop.clone();\n let b1 = b1.clone();\n let b2 = b2.clone();\n let fo_c = fo.clone();\n let to = args.timeout.clone();\n let _ = thread::Builder::new().name(\"Update\".to_string()).spawn(move || {\n update(stop, sync, b1, b2, fo_c, to)\n });};\n \n let result = ibba_thread.unwrap().join();\n let ea_result = ea_thread.unwrap().join();\n\n\n \/\/ Join EA and Update here pending stop signaling.\n if result.is_ok() {\n let (min, mut max, mut interval) = result.unwrap();\n \/\/ Go through all remaining intervals from IBBA to find the true\n \/\/ max\n let mut lq = q.write().unwrap();\n while lq.len() != 0 {\n let ref top = lq.pop().unwrap();\n let (ub, dom) = (top.fdata.upper(), &top.data);\n if ub > max {\n max = ub;\n interval = dom.clone();\n }\n }\n println!(\"[[{},{}], {{\", min, max);\n for i in 0..args.names.len() {\n println!(\"'{}' : {},\", args.names[i], interval[i].to_string());\n }\n println!(\"}}]\");\n \n}\nelse {println!(\"error\")}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>blank message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>blank message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Do not print data if output is pipe, except if that behaviour is overridden<commit_after><|endoftext|>"} {"text":"<commit_before>use core::ops::DerefMut;\n\nuse redox::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd);\n }\n })\n}\n\/* } Magic Macros *\/\n\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>\n}\n\nimpl Command {\n pub fn vec() -> Vec<Command> {\n let mut commands: Vec<Command> = Vec::new();\n\n commands.push(Command {\n name: \"break\".to_string(),\n main: box |args: &Vec<String>|{\n unsafe{\n asm!(\"int 3\" : : : : \"intel\");\n }\n }\n });\n\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>|{\n let mut echo = String::new();\n let mut first = true;\n for i in 1..args.len() {\n match args.get(i) {\n Option::Some(arg) => {\n if first {\n first = false\n }else{\n echo = echo + \" \";\n }\n echo = echo + arg;\n },\n Option::None => ()\n }\n }\n println!(echo);\n }\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>|{\n match args.get(1) {\n Option::Some(arg) => OpenEvent{ url_string: arg.clone() }.trigger(),\n Option::None => ()\n }\n }\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>|{\n match args.get(1) {\n Option::Some(arg) => {\n let path = arg.clone();\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n let commands = String::from_utf8(&vec);\n for command in commands.split(\"\\n\".to_string()) {\n exec!(command);\n }\n },\n Option::None => ()\n }\n }\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec<u8> = Vec::new();\n for i in 2..args.len() {\n match args.get(i) {\n Option::Some(arg) => {\n if i == 2 {\n vec.push_all(&arg.to_utf8())\n }else{\n vec.push_all(&(\" \".to_string() + arg).to_utf8())\n }\n },\n Option::None => vec = Vec::new()\n }\n }\n vec.push_all(&\"\\r\\n\\r\\n\".to_string().to_utf8());\n\n match resource.write(&vec.as_slice()) {\n Option::Some(size) => println!(\"Wrote \".to_string() + size + \" bytes\"),\n Option::None => println!(\"Failed to write\".to_string())\n }\n\n vec = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(size) => println!(String::from_utf8(&vec)),\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec<u8> = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(_) => println!(String::from_utf8(&vec)),\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec<u8> = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + ' ' + String::from_num_radix(*byte as usize, 16);\n }\n println!(line);\n },\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n return commands;\n }\n}\n\npub struct Variable {\n pub name: String,\n pub value: String\n}\n\npub struct Mode {\n value: bool\n}\n\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>\n}\n\nimpl Application {\n pub fn new() -> Application {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new()\n };\n }\n\n fn on_command(&mut self, command_string: &String){\n \/\/Comment\n if command_string[0] == '#' {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\".to_string() {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + '\\n' + &variable.name + \"=\" + &variable.value;\n }\n println!(&variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(\" \".to_string()) {\n if arg.len() > 0 {\n if arg[0] == '$' {\n let name = arg.substr(1, arg.len() - 1);\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n }else{\n args.push(arg);\n }\n }\n }\n\n \/\/Execute commands\n match args.get(0) {\n Option::Some(cmd) => {\n if *cmd == \"if\".to_string() {\n let mut value = false;\n\n match args.get(1) {\n Option::Some(left) => match args.get(2) {\n Option::Some(cmp) => match args.get(3) {\n Option::Some(right) => {\n if *cmp == \"==\".to_string() {\n value = *left == *right;\n }else if *cmp == \"!=\".to_string() {\n value = *left != *right;\n }else if *cmp == \">\".to_string() {\n value = left.to_num_signed() > right.to_num_signed();\n }else if *cmp == \">=\".to_string() {\n value = left.to_num_signed() >= right.to_num_signed();\n }else if *cmp == \"<\".to_string() {\n value = left.to_num_signed() < right.to_num_signed();\n }else if *cmp == \"<=\".to_string() {\n value = left.to_num_signed() <= right.to_num_signed();\n }else{\n println!(&(\"Unknown comparison: \".to_string() + cmp));\n }\n },\n Option::None => ()\n },\n Option::None => ()\n },\n Option::None => ()\n }\n\n self.modes.insert(0, Mode{\n value: value\n });\n return;\n }\n\n if *cmd == \"else\".to_string() {\n let mut syntax_error = false;\n match self.modes.get(0) {\n Option::Some(mode) => mode.value = !mode.value,\n Option::None => syntax_error = true\n }\n if syntax_error {\n println!(&\"Syntax error: else found with no previous if\".to_string());\n }\n return;\n }\n\n if *cmd == \"fi\".to_string() {\n let mut syntax_error = false;\n match self.modes.remove(0) {\n Option::Some(_) => (),\n Option::None => syntax_error = true\n }\n if syntax_error {\n println!(&\"Syntax error: fi found with no previous if\".to_string());\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if ! mode.value {\n return;\n }\n }\n\n \/\/Set variables\n match cmd.find(\"=\".to_string()) {\n Option::Some(i) => {\n let name = cmd.substr(0, i);\n let mut value = cmd.substr(i + 1, cmd.len() - i - 1);\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n match args.get(i) {\n Option::Some(arg) => value = value + ' ' + arg.clone(),\n Option::None => ()\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Option::Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n Option::None => break\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n }else{\n for variable in self.variables.iter() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable{\n name: name,\n value: value\n });\n }\n return;\n },\n Option::None => ()\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if command.name == *cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n let mut help = \"Commands:\".to_string();\n for command in self.commands.iter() {\n help = help + ' ' + &command.name;\n }\n println!(&help);\n },\n Option::None => ()\n }\n }\n\n fn main(&mut self){\n console_title(&\"Terminal\".to_string());\n\n while let Option::Some(command) = readln!() {\n println!(\"# \".to_string() + &command);\n if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main(){\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>Terminal will run given scripts<commit_after>use core::ops::DerefMut;\n\nuse redox::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application = 0 as *mut Application;\n\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd);\n }\n })\n}\n\/* } Magic Macros *\/\n\npub struct Command {\n pub name: String,\n pub main: Box<Fn(&Vec<String>)>\n}\n\nimpl Command {\n pub fn vec() -> Vec<Command> {\n let mut commands: Vec<Command> = Vec::new();\n\n commands.push(Command {\n name: \"break\".to_string(),\n main: box |args: &Vec<String>|{\n unsafe{\n asm!(\"int 3\" : : : : \"intel\");\n }\n }\n });\n\n commands.push(Command {\n name: \"echo\".to_string(),\n main: box |args: &Vec<String>|{\n let mut echo = String::new();\n let mut first = true;\n for i in 1..args.len() {\n match args.get(i) {\n Option::Some(arg) => {\n if first {\n first = false\n }else{\n echo = echo + \" \";\n }\n echo = echo + arg;\n },\n Option::None => ()\n }\n }\n println!(echo);\n }\n });\n\n commands.push(Command {\n name: \"open\".to_string(),\n main: box |args: &Vec<String>|{\n match args.get(1) {\n Option::Some(arg) => OpenEvent{ url_string: arg.clone() }.trigger(),\n Option::None => ()\n }\n }\n });\n\n commands.push(Command {\n name: \"run\".to_string(),\n main: box |args: &Vec<String>|{\n match args.get(1) {\n Option::Some(arg) => {\n let path = arg.clone();\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n let commands = String::from_utf8(&vec);\n for command in commands.split(\"\\n\".to_string()) {\n exec!(command);\n }\n },\n Option::None => ()\n }\n }\n });\n\n commands.push(Command {\n name: \"send\".to_string(),\n main: box |args: &Vec<String>|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec<u8> = Vec::new();\n for i in 2..args.len() {\n match args.get(i) {\n Option::Some(arg) => {\n if i == 2 {\n vec.push_all(&arg.to_utf8())\n }else{\n vec.push_all(&(\" \".to_string() + arg).to_utf8())\n }\n },\n Option::None => vec = Vec::new()\n }\n }\n vec.push_all(&\"\\r\\n\\r\\n\".to_string().to_utf8());\n\n match resource.write(&vec.as_slice()) {\n Option::Some(size) => println!(\"Wrote \".to_string() + size + \" bytes\"),\n Option::None => println!(\"Failed to write\".to_string())\n }\n\n vec = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(size) => println!(String::from_utf8(&vec)),\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n commands.push(Command {\n name: \"url\".to_string(),\n main: box |args: &Vec<String>|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec<u8> = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(_) => println!(String::from_utf8(&vec)),\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n commands.push(Command {\n name: \"url_hex\".to_string(),\n main: box |args: &Vec<String>|{\n let path;\n match args.get(1) {\n Option::Some(arg) => path = arg.clone(),\n Option::None => path = String::new()\n }\n println!(\"URL: \".to_string() + &path);\n\n let mut resource = File::open(&path);\n\n let mut vec: Vec<u8> = Vec::new();\n match resource.read_to_end(&mut vec) {\n Option::Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + ' ' + String::from_num_radix(*byte as usize, 16);\n }\n println!(line);\n },\n Option::None => println!(\"Failed to read\".to_string())\n }\n }\n });\n\n return commands;\n }\n}\n\npub struct Variable {\n pub name: String,\n pub value: String\n}\n\npub struct Mode {\n value: bool\n}\n\npub struct Application {\n commands: Vec<Command>,\n variables: Vec<Variable>,\n modes: Vec<Mode>\n}\n\nimpl Application {\n pub fn new() -> Application {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new()\n };\n }\n\n fn on_command(&mut self, command_string: &String){\n \/\/Comment\n if command_string[0] == '#' {\n return;\n }\n\n \/\/Show variables\n if *command_string == \"$\".to_string() {\n let mut variables = String::new();\n for variable in self.variables.iter() {\n variables = variables + '\\n' + &variable.name + \"=\" + &variable.value;\n }\n println!(&variables);\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(\" \".to_string()) {\n if arg.len() > 0 {\n if arg[0] == '$' {\n let name = arg.substr(1, arg.len() - 1);\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n }else{\n args.push(arg);\n }\n }\n }\n\n \/\/Execute commands\n match args.get(0) {\n Option::Some(cmd) => {\n if *cmd == \"if\".to_string() {\n let mut value = false;\n\n match args.get(1) {\n Option::Some(left) => match args.get(2) {\n Option::Some(cmp) => match args.get(3) {\n Option::Some(right) => {\n if *cmp == \"==\".to_string() {\n value = *left == *right;\n }else if *cmp == \"!=\".to_string() {\n value = *left != *right;\n }else if *cmp == \">\".to_string() {\n value = left.to_num_signed() > right.to_num_signed();\n }else if *cmp == \">=\".to_string() {\n value = left.to_num_signed() >= right.to_num_signed();\n }else if *cmp == \"<\".to_string() {\n value = left.to_num_signed() < right.to_num_signed();\n }else if *cmp == \"<=\".to_string() {\n value = left.to_num_signed() <= right.to_num_signed();\n }else{\n println!(&(\"Unknown comparison: \".to_string() + cmp));\n }\n },\n Option::None => ()\n },\n Option::None => ()\n },\n Option::None => ()\n }\n\n self.modes.insert(0, Mode{\n value: value\n });\n return;\n }\n\n if *cmd == \"else\".to_string() {\n let mut syntax_error = false;\n match self.modes.get(0) {\n Option::Some(mode) => mode.value = !mode.value,\n Option::None => syntax_error = true\n }\n if syntax_error {\n println!(&\"Syntax error: else found with no previous if\".to_string());\n }\n return;\n }\n\n if *cmd == \"fi\".to_string() {\n let mut syntax_error = false;\n match self.modes.remove(0) {\n Option::Some(_) => (),\n Option::None => syntax_error = true\n }\n if syntax_error {\n println!(&\"Syntax error: fi found with no previous if\".to_string());\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if ! mode.value {\n return;\n }\n }\n\n \/\/Set variables\n match cmd.find(\"=\".to_string()) {\n Option::Some(i) => {\n let name = cmd.substr(0, i);\n let mut value = cmd.substr(i + 1, cmd.len() - i - 1);\n\n if name.len() == 0 {\n return;\n }\n\n for i in 1..args.len() {\n match args.get(i) {\n Option::Some(arg) => value = value + ' ' + arg.clone(),\n Option::None => ()\n }\n }\n\n if value.len() == 0 {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Option::Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n Option::None => break\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n }else{\n for variable in self.variables.iter() {\n if variable.name == name {\n variable.value = value;\n return;\n }\n }\n\n self.variables.push(Variable{\n name: name,\n value: value\n });\n }\n return;\n },\n Option::None => ()\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if command.name == *cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n let mut help = \"Commands:\".to_string();\n for command in self.commands.iter() {\n help = help + ' ' + &command.name;\n }\n println!(&help);\n },\n Option::None => ()\n }\n }\n\n fn main(&mut self){\n console_title(&\"Terminal\".to_string());\n \n if let Option::Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"# \".to_string() + &command);\n self.on_command(&command);\n }\n\n while let Option::Some(command) = readln!() {\n println!(\"# \".to_string() + &command);\n if command.len() > 0 {\n self.on_command(&command);\n }\n }\n }\n}\n\npub fn main(){\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create main.rs<commit_after>extern crate ws;\nextern crate serde;\nextern crate serde_json;\n\nuse serde_json::Value;\nuse serde_json::value::Map;\n\nstruct BenchHandler {\n ws: ws::Sender,\n count: u32\n}\n\nimpl ws::Handler for BenchHandler {\n fn on_message(&mut self, msg: ws::Message) -> ws::Result<()> {\n if let Some((Some(msg_type), payload)) = msg.into_text().ok()\n .map(|v| v.as_str().to_owned())\n .and_then(|body: String| serde_json::from_str(body.as_str()).ok())\n .and_then(|j: Value| j.as_object().map(move |obj: &Map<String, Value>| {\n let t = obj.get(\"type\").and_then(|t| t.as_str()).map(|s| s.to_owned());\n let p: Value = obj.get(\"payload\").unwrap_or(&Value::Null).clone();\n (t, p)\n })) {\n match msg_type.as_ref() {\n \"echo\" => {\n try!(self.ws.send(format!(\"{}\", payload)));\n },\n \"broadcast\" => {\n try!(self.ws.broadcast(format!(\"{}\", payload)));\n try!(self.ws.send(format!(\"{{\\\"type\\\": \\\"broadcastResult\\\", \\\"listenCount\\\": {}, \\\"payload\\\": {}}}\", self.count, payload)))\n },\n _ => {}\n };\n };\n Ok(())\n }\n\n fn on_open(&mut self, _: ws::Handshake) -> ws::Result<()> {\n self.count += 1;\n println!(\"Connection Open! {}\", self.count);\n Ok(())\n }\n\n fn on_close(&mut self,_: ws::CloseCode, _: &str) {\n self.count -= 1;\n println!(\"Connection Closed! {}\", self.count);\n }\n}\n\nfn main() {\n if let Err(error) = ws::listen(\"127.0.0.1:3012\", |out| {\n BenchHandler { ws: out, count: 0 }\n }) {\n println!(\"Failed to create WebSocket due to {:?}\", error);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make `write` more general and get better code reuse.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove hint about secret number<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Docs for metric sinks and formatters<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #382 - sru:tests, r=kbknapp<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_private)]\n\nextern crate rustc;\nextern crate rustc_driver;\nextern crate rustc_lint;\nextern crate rustc_metadata;\nextern crate rustc_errors;\nextern crate rustc_codegen_utils;\nextern crate syntax;\n\nuse rustc::session::{build_session, Session};\nuse rustc::session::config::{basic_options, Input,\n OutputType, OutputTypes};\nuse rustc_driver::driver::{compile_input, CompileController};\nuse rustc_metadata::cstore::CStore;\nuse rustc_errors::registry::Registry;\nuse syntax::codemap::FileName;\nuse rustc_codegen_utils::codegen_backend::CodegenBackend;\n\nuse std::path::PathBuf;\nuse std::rc::Rc;\n\nfn main() {\n let src = r#\"\n fn main() {}\n \"#;\n\n let args: Vec<String> = std::env::args().collect();\n\n if args.len() < 4 {\n panic!(\"expected rustc path\");\n }\n\n let tmpdir = PathBuf::from(&args[1]);\n\n let mut sysroot = PathBuf::from(&args[3]);\n sysroot.pop();\n sysroot.pop();\n\n compile(src.to_string(), tmpdir.join(\"out\"), sysroot.clone());\n\n compile(src.to_string(), tmpdir.join(\"out\"), sysroot.clone());\n}\n\nfn basic_sess(sysroot: PathBuf) -> (Session, Rc<CStore>, Box<CodegenBackend>) {\n let mut opts = basic_options();\n opts.output_types = OutputTypes::new(&[(OutputType::Exe, None)]);\n opts.maybe_sysroot = Some(sysroot);\n if let Ok(linker) = std::env::var(\"RUSTC_LINKER\") {\n opts.cg.linker = Some(linker.into());\n }\n\n let descriptions = Registry::new(&rustc::DIAGNOSTICS);\n let sess = build_session(opts, None, descriptions);\n let codegen_backend = rustc_driver::get_codegen_backend(&sess);\n let cstore = Rc::new(CStore::new(codegen_backend.metadata_loader()));\n rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));\n (sess, cstore, codegen_backend)\n}\n\nfn compile(code: String, output: PathBuf, sysroot: PathBuf) {\n syntax::with_globals(|| {\n let (sess, cstore, codegen_backend) = basic_sess(sysroot);\n let control = CompileController::basic();\n let input = Input::Str { name: FileName::Anon, input: code };\n let _ = compile_input(\n codegen_backend,\n &sess,\n &cstore,\n &None,\n &input,\n &None,\n &Some(output),\n None,\n &control\n );\n });\n}\n<commit_msg>Create thread-pool<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_private)]\n\nextern crate rustc;\nextern crate rustc_driver;\nextern crate rustc_lint;\nextern crate rustc_metadata;\nextern crate rustc_errors;\nextern crate rustc_codegen_utils;\nextern crate syntax;\n\nuse rustc::session::{build_session, Session};\nuse rustc::session::config::{basic_options, Input, Options,\n OutputType, OutputTypes};\nuse rustc_driver::driver::{self, compile_input, CompileController};\nuse rustc_metadata::cstore::CStore;\nuse rustc_errors::registry::Registry;\nuse syntax::codemap::FileName;\nuse rustc_codegen_utils::codegen_backend::CodegenBackend;\n\nuse std::path::PathBuf;\nuse std::rc::Rc;\n\nfn main() {\n let src = r#\"\n fn main() {}\n \"#;\n\n let args: Vec<String> = std::env::args().collect();\n\n if args.len() < 4 {\n panic!(\"expected rustc path\");\n }\n\n let tmpdir = PathBuf::from(&args[1]);\n\n let mut sysroot = PathBuf::from(&args[3]);\n sysroot.pop();\n sysroot.pop();\n\n compile(src.to_string(), tmpdir.join(\"out\"), sysroot.clone());\n\n compile(src.to_string(), tmpdir.join(\"out\"), sysroot.clone());\n}\n\nfn basic_sess(opts: Options) -> (Session, Rc<CStore>, Box<CodegenBackend>) {\n let descriptions = Registry::new(&rustc::DIAGNOSTICS);\n let sess = build_session(opts, None, descriptions);\n let codegen_backend = rustc_driver::get_codegen_backend(&sess);\n let cstore = Rc::new(CStore::new(codegen_backend.metadata_loader()));\n rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));\n (sess, cstore, codegen_backend)\n}\n\nfn compile(code: String, output: PathBuf, sysroot: PathBuf) {\n syntax::with_globals(|| {\n let mut opts = basic_options();\n opts.output_types = OutputTypes::new(&[(OutputType::Exe, None)]);\n opts.maybe_sysroot = Some(sysroot);\n if let Ok(linker) = std::env::var(\"RUSTC_LINKER\") {\n opts.cg.linker = Some(linker.into());\n }\n driver::spawn_thread_pool(opts, |opts| {\n let (sess, cstore, codegen_backend) = basic_sess(opts);\n let control = CompileController::basic();\n let input = Input::Str { name: FileName::Anon, input: code };\n let _ = compile_input(\n codegen_backend,\n &sess,\n &cstore,\n &None,\n &input,\n &None,\n &Some(output),\n None,\n &control\n );\n });\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>72 times faster<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/! The filesystem abstraction code\n\/\/!\n\/\/! # Problem\n\/\/!\n\/\/! First, we had a compiletime backend for the store. This means that the actual filesystem\n\/\/! operations were compiled into the store either as real filesystem operations (in a normal debug\n\/\/! or release build) but as a in-memory variant in the 'test' case.\n\/\/! So tests did not hit the filesystem when running.\n\/\/! This gave us us the possibility to run tests concurrently with multiple\n\/\/! stores that did not interfere with eachother.\n\/\/!\n\/\/! This approach worked perfectly well until we started to test not the\n\/\/! store itself but crates that depend on the store implementation.\n\/\/! When running tests in a crate that depends on the store, the store\n\/\/! itself was compiled with the filesystem-hitting-backend.\n\/\/! This was problematic, as tests could not be implemented without hitting\n\/\/! the filesystem.\n\/\/!\n\/\/! Hence we implemented this.\n\/\/!\n\/\/! # Implementation\n\/\/!\n\/\/! The filesystem is abstracted via a trait `FileAbstraction` which\n\/\/! contains the essential functions for working with the filesystem.\n\/\/!\n\/\/! Two implementations are provided in the code:\n\/\/!\n\/\/! * FSFileAbstraction\n\/\/! * InMemoryFileAbstraction\n\/\/!\n\/\/! whereas the first actually works with the filesystem and the latter\n\/\/! works with an in-memory HashMap that is used as filesystem.\n\/\/!\n\/\/! Further, the trait `FileAbstractionInstance` was introduced for\n\/\/! functions which are executed on actual instances of content from the\n\/\/! filesystem, which was previousely tied into the general abstraction\n\/\/! mechanism.\n\/\/!\n\/\/! So, the `FileAbstraction` trait is for working with the filesystem, the\n\/\/! `FileAbstractionInstance` trait is for working with instances of content\n\/\/! from the filesystem (speak: actual Files).\n\/\/!\n\/\/! In case of the `FSFileAbstractionInstance`, which is the implementation\n\/\/! of the `FileAbstractionInstance` for the actual filesystem-hitting code,\n\/\/! the underlying resource is managed like with the old code before.\n\/\/! The `InMemoryFileAbstractionInstance` implementation is corrosponding to\n\/\/! the `InMemoryFileAbstraction` implementation - for the in-memory\n\/\/! \"filesystem\".\n\/\/!\n\/\/! The implementation of the `get_file_content()` function had to be\n\/\/! changed to return a `String` rather than a `&mut Read` because of\n\/\/! lifetime issues.\n\/\/! This change is store-internally and the API of the store itself was not\n\/\/! affected.\n\/\/!\n\nuse std::path::PathBuf;\nuse std::fmt::Debug;\n\nuse error::StoreError as SE;\n\npub use self::fs::FSFileAbstraction;\npub use self::fs::FSFileAbstractionInstance;\npub use self::inmemory::InMemoryFileAbstraction;\npub use self::inmemory::InMemoryFileAbstractionInstance;\n\n\/\/ TODO:\n\/\/ This whole thing can be written better with a trait based mechanism that is embedded into the\n\/\/ store. However it would mean rewriting most things to be generic which can be a pain in the ass.\n\n\/\/\/ An abstraction trait over filesystem actions\npub trait FileAbstraction : Debug {\n fn remove_file(&self, path: &PathBuf) -> Result<(), SE>;\n fn copy(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE>;\n fn rename(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE>;\n fn create_dir_all(&self, _: &PathBuf) -> Result<(), SE>;\n\n fn new_instance(&self, p: PathBuf) -> Box<FileAbstractionInstance>;\n}\n\n\/\/\/ An abstraction trait over actions on files\npub trait FileAbstractionInstance : Debug {\n fn get_file_content(&mut self) -> Result<String, SE>;\n fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE>;\n}\n\nmod fs {\n use std::fs::{File, OpenOptions, create_dir_all, remove_file, copy, rename};\n use std::io::{Seek, SeekFrom, Read};\n use std::path::{Path, PathBuf};\n\n use error::{MapErrInto, StoreError as SE, StoreErrorKind as SEK};\n\n use super::FileAbstraction;\n use super::FileAbstractionInstance;\n\n #[derive(Debug)]\n pub enum FSFileAbstractionInstance {\n Absent(PathBuf),\n File(File, PathBuf)\n }\n\n impl FileAbstractionInstance for FSFileAbstractionInstance {\n\n \/**\n * Get the content behind this file\n *\/\n fn get_file_content(&mut self) -> Result<String, SE> {\n debug!(\"Getting lazy file: {:?}\", self);\n let (file, path) = match *self {\n FSFileAbstractionInstance::File(ref mut f, _) => return {\n \/\/ We seek to the beginning of the file since we expect each\n \/\/ access to the file to be in a different context\n try!(f.seek(SeekFrom::Start(0))\n .map_err_into(SEK::FileNotSeeked));\n\n let mut s = String::new();\n f.read_to_string(&mut s)\n .map_err_into(SEK::IoError)\n .map(|_| s)\n },\n FSFileAbstractionInstance::Absent(ref p) =>\n (try!(open_file(p).map_err_into(SEK::FileNotFound)), p.clone()),\n };\n *self = FSFileAbstractionInstance::File(file, path);\n if let FSFileAbstractionInstance::File(ref mut f, _) = *self {\n let mut s = String::new();\n f.read_to_string(&mut s)\n .map_err_into(SEK::IoError)\n .map(|_| s)\n } else {\n unreachable!()\n }\n }\n\n \/**\n * Write the content of this file\n *\/\n fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {\n use std::io::Write;\n let (file, path) = match *self {\n FSFileAbstractionInstance::File(ref mut f, _) => return {\n \/\/ We seek to the beginning of the file since we expect each\n \/\/ access to the file to be in a different context\n try!(f.seek(SeekFrom::Start(0))\n .map_err_into(SEK::FileNotCreated));\n f.write_all(buf).map_err_into(SEK::FileNotWritten)\n },\n FSFileAbstractionInstance::Absent(ref p) =>\n (try!(create_file(p).map_err_into(SEK::FileNotCreated)), p.clone()),\n };\n *self = FSFileAbstractionInstance::File(file, path);\n if let FSFileAbstractionInstance::File(ref mut f, _) = *self {\n return f.write_all(buf).map_err_into(SEK::FileNotWritten);\n }\n unreachable!();\n }\n\n }\n\n \/\/\/ `FSFileAbstraction` state type\n \/\/\/\n \/\/\/ A lazy file is either absent, but a path to it is available, or it is present.\n #[derive(Debug)]\n pub struct FSFileAbstraction {\n }\n\n impl FSFileAbstraction {\n pub fn new() -> FSFileAbstraction {\n FSFileAbstraction { }\n }\n }\n\n impl FileAbstraction for FSFileAbstraction {\n\n fn remove_file(&self, path: &PathBuf) -> Result<(), SE> {\n remove_file(path).map_err_into(SEK::FileNotRemoved)\n }\n\n fn copy(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n copy(from, to).map_err_into(SEK::FileNotCopied).map(|_| ())\n }\n\n fn rename(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n rename(from, to).map_err_into(SEK::FileNotRenamed)\n }\n\n fn create_dir_all(&self, path: &PathBuf) -> Result<(), SE> {\n create_dir_all(path).map_err_into(SEK::DirNotCreated)\n }\n\n fn new_instance(&self, p: PathBuf) -> Box<FileAbstractionInstance> {\n Box::new(FSFileAbstractionInstance::Absent(p))\n }\n }\n\n fn open_file<A: AsRef<Path>>(p: A) -> ::std::io::Result<File> {\n OpenOptions::new().write(true).read(true).open(p)\n }\n\n fn create_file<A: AsRef<Path>>(p: A) -> ::std::io::Result<File> {\n if let Some(parent) = p.as_ref().parent() {\n debug!(\"Implicitely creating directory: {:?}\", parent);\n if let Err(e) = create_dir_all(parent) {\n return Err(e);\n }\n }\n OpenOptions::new().write(true).read(true).create(true).open(p)\n }\n\n}\n\nmod inmemory {\n use error::StoreError as SE;\n use error::StoreErrorKind as SEK;\n use std::io::Read;\n use std::io::Cursor;\n use std::path::PathBuf;\n use std::collections::HashMap;\n use std::sync::Mutex;\n use std::cell::RefCell;\n use std::sync::Arc;\n\n use libimagerror::into::IntoError;\n\n use super::FileAbstraction;\n use super::FileAbstractionInstance;\n use error::MapErrInto;\n\n type Backend = Arc<Mutex<RefCell<HashMap<PathBuf, Cursor<Vec<u8>>>>>>;\n\n \/\/\/ `FileAbstraction` type, this is the Test version!\n \/\/\/\n \/\/\/ A lazy file is either absent, but a path to it is available, or it is present.\n #[derive(Debug)]\n pub struct InMemoryFileAbstractionInstance {\n fs_abstraction: Backend,\n absent_path: PathBuf,\n }\n\n impl InMemoryFileAbstractionInstance {\n\n pub fn new(fs: Backend, pb: PathBuf) -> InMemoryFileAbstractionInstance {\n InMemoryFileAbstractionInstance {\n fs_abstraction: fs,\n absent_path: pb\n }\n }\n\n }\n\n impl FileAbstractionInstance for InMemoryFileAbstractionInstance {\n\n \/**\n * Get the mutable file behind a InMemoryFileAbstraction object\n *\/\n fn get_file_content(&mut self) -> Result<String, SE> {\n debug!(\"Getting lazy file: {:?}\", self);\n\n let p = self.absent_path.clone();\n match self.fs_abstraction.lock() {\n Ok(mut mtx) => {\n mtx.get_mut()\n .get_mut(&p)\n .ok_or(SEK::FileNotFound.into_error())\n .and_then(|t| {\n let mut s = String::new();\n t.read_to_string(&mut s)\n .map_err_into(SEK::IoError)\n .map(|_| s)\n })\n }\n\n Err(_) => Err(SEK::LockError.into_error())\n }\n }\n\n fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {\n match *self {\n InMemoryFileAbstractionInstance { ref absent_path, .. } => {\n let mut mtx = self.fs_abstraction.lock().expect(\"Locking Mutex failed\");\n let mut backend = mtx.get_mut();\n\n if let Some(ref mut cur) = backend.get_mut(absent_path) {\n let mut vec = cur.get_mut();\n vec.clear();\n vec.extend_from_slice(buf);\n return Ok(());\n }\n let vec = Vec::from(buf);\n backend.insert(absent_path.clone(), Cursor::new(vec));\n return Ok(());\n },\n };\n }\n }\n\n #[derive(Debug)]\n pub struct InMemoryFileAbstraction {\n virtual_filesystem: Backend,\n }\n\n impl InMemoryFileAbstraction {\n\n pub fn new() -> InMemoryFileAbstraction {\n InMemoryFileAbstraction {\n virtual_filesystem: Arc::new(Mutex::new(RefCell::new(HashMap::new()))),\n }\n }\n\n pub fn backend(&self) -> &Backend {\n &self.virtual_filesystem\n }\n\n }\n\n impl FileAbstraction for InMemoryFileAbstraction {\n\n fn remove_file(&self, path: &PathBuf) -> Result<(), SE> {\n debug!(\"Removing: {:?}\", path);\n self.backend()\n .lock()\n .expect(\"Locking Mutex failed\")\n .get_mut()\n .remove(path)\n .map(|_| ())\n .ok_or(SEK::FileNotFound.into_error())\n }\n\n fn copy(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n debug!(\"Copying : {:?} -> {:?}\", from, to);\n let mut mtx = self.backend().lock().expect(\"Locking Mutex failed\");\n let mut backend = mtx.get_mut();\n\n let a = try!(backend.get(from).cloned().ok_or(SEK::FileNotFound.into_error()));\n backend.insert(to.clone(), a);\n debug!(\"Copying: {:?} -> {:?} worked\", from, to);\n Ok(())\n }\n\n fn rename(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n debug!(\"Renaming: {:?} -> {:?}\", from, to);\n let mut mtx = self.backend().lock().expect(\"Locking Mutex failed\");\n let mut backend = mtx.get_mut();\n\n let a = try!(backend.get(from).cloned().ok_or(SEK::FileNotFound.into_error()));\n backend.insert(to.clone(), a);\n debug!(\"Renaming: {:?} -> {:?} worked\", from, to);\n Ok(())\n }\n\n fn create_dir_all(&self, _: &PathBuf) -> Result<(), SE> {\n Ok(())\n }\n\n fn new_instance(&self, p: PathBuf) -> Box<FileAbstractionInstance> {\n Box::new(InMemoryFileAbstractionInstance::new(self.backend().clone(), p))\n }\n }\n\n}\n\n#[cfg(test)]\nmod test {\n use super::FileAbstractionInstance;\n use super::inmemory::InMemoryFileAbstraction;\n use super::inmemory::InMemoryFileAbstractionInstance;\n use std::path::PathBuf;\n\n #[test]\n fn lazy_file() {\n let fs = InMemoryFileAbstraction::new();\n\n let mut path = PathBuf::from(\"\/tests\");\n path.set_file_name(\"test1\");\n let mut lf = InMemoryFileAbstractionInstance::new(fs.backend().clone(), path);\n lf.write_file_content(b\"Hello World\").unwrap();\n let bah = lf.get_file_content().unwrap();\n assert_eq!(bah, \"Hello World\");\n }\n\n}\n<commit_msg>Remove \"TODO\"-comment<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/! The filesystem abstraction code\n\/\/!\n\/\/! # Problem\n\/\/!\n\/\/! First, we had a compiletime backend for the store. This means that the actual filesystem\n\/\/! operations were compiled into the store either as real filesystem operations (in a normal debug\n\/\/! or release build) but as a in-memory variant in the 'test' case.\n\/\/! So tests did not hit the filesystem when running.\n\/\/! This gave us us the possibility to run tests concurrently with multiple\n\/\/! stores that did not interfere with eachother.\n\/\/!\n\/\/! This approach worked perfectly well until we started to test not the\n\/\/! store itself but crates that depend on the store implementation.\n\/\/! When running tests in a crate that depends on the store, the store\n\/\/! itself was compiled with the filesystem-hitting-backend.\n\/\/! This was problematic, as tests could not be implemented without hitting\n\/\/! the filesystem.\n\/\/!\n\/\/! Hence we implemented this.\n\/\/!\n\/\/! # Implementation\n\/\/!\n\/\/! The filesystem is abstracted via a trait `FileAbstraction` which\n\/\/! contains the essential functions for working with the filesystem.\n\/\/!\n\/\/! Two implementations are provided in the code:\n\/\/!\n\/\/! * FSFileAbstraction\n\/\/! * InMemoryFileAbstraction\n\/\/!\n\/\/! whereas the first actually works with the filesystem and the latter\n\/\/! works with an in-memory HashMap that is used as filesystem.\n\/\/!\n\/\/! Further, the trait `FileAbstractionInstance` was introduced for\n\/\/! functions which are executed on actual instances of content from the\n\/\/! filesystem, which was previousely tied into the general abstraction\n\/\/! mechanism.\n\/\/!\n\/\/! So, the `FileAbstraction` trait is for working with the filesystem, the\n\/\/! `FileAbstractionInstance` trait is for working with instances of content\n\/\/! from the filesystem (speak: actual Files).\n\/\/!\n\/\/! In case of the `FSFileAbstractionInstance`, which is the implementation\n\/\/! of the `FileAbstractionInstance` for the actual filesystem-hitting code,\n\/\/! the underlying resource is managed like with the old code before.\n\/\/! The `InMemoryFileAbstractionInstance` implementation is corrosponding to\n\/\/! the `InMemoryFileAbstraction` implementation - for the in-memory\n\/\/! \"filesystem\".\n\/\/!\n\/\/! The implementation of the `get_file_content()` function had to be\n\/\/! changed to return a `String` rather than a `&mut Read` because of\n\/\/! lifetime issues.\n\/\/! This change is store-internally and the API of the store itself was not\n\/\/! affected.\n\/\/!\n\nuse std::path::PathBuf;\nuse std::fmt::Debug;\n\nuse error::StoreError as SE;\n\npub use self::fs::FSFileAbstraction;\npub use self::fs::FSFileAbstractionInstance;\npub use self::inmemory::InMemoryFileAbstraction;\npub use self::inmemory::InMemoryFileAbstractionInstance;\n\n\/\/\/ An abstraction trait over filesystem actions\npub trait FileAbstraction : Debug {\n fn remove_file(&self, path: &PathBuf) -> Result<(), SE>;\n fn copy(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE>;\n fn rename(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE>;\n fn create_dir_all(&self, _: &PathBuf) -> Result<(), SE>;\n\n fn new_instance(&self, p: PathBuf) -> Box<FileAbstractionInstance>;\n}\n\n\/\/\/ An abstraction trait over actions on files\npub trait FileAbstractionInstance : Debug {\n fn get_file_content(&mut self) -> Result<String, SE>;\n fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE>;\n}\n\nmod fs {\n use std::fs::{File, OpenOptions, create_dir_all, remove_file, copy, rename};\n use std::io::{Seek, SeekFrom, Read};\n use std::path::{Path, PathBuf};\n\n use error::{MapErrInto, StoreError as SE, StoreErrorKind as SEK};\n\n use super::FileAbstraction;\n use super::FileAbstractionInstance;\n\n #[derive(Debug)]\n pub enum FSFileAbstractionInstance {\n Absent(PathBuf),\n File(File, PathBuf)\n }\n\n impl FileAbstractionInstance for FSFileAbstractionInstance {\n\n \/**\n * Get the content behind this file\n *\/\n fn get_file_content(&mut self) -> Result<String, SE> {\n debug!(\"Getting lazy file: {:?}\", self);\n let (file, path) = match *self {\n FSFileAbstractionInstance::File(ref mut f, _) => return {\n \/\/ We seek to the beginning of the file since we expect each\n \/\/ access to the file to be in a different context\n try!(f.seek(SeekFrom::Start(0))\n .map_err_into(SEK::FileNotSeeked));\n\n let mut s = String::new();\n f.read_to_string(&mut s)\n .map_err_into(SEK::IoError)\n .map(|_| s)\n },\n FSFileAbstractionInstance::Absent(ref p) =>\n (try!(open_file(p).map_err_into(SEK::FileNotFound)), p.clone()),\n };\n *self = FSFileAbstractionInstance::File(file, path);\n if let FSFileAbstractionInstance::File(ref mut f, _) = *self {\n let mut s = String::new();\n f.read_to_string(&mut s)\n .map_err_into(SEK::IoError)\n .map(|_| s)\n } else {\n unreachable!()\n }\n }\n\n \/**\n * Write the content of this file\n *\/\n fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {\n use std::io::Write;\n let (file, path) = match *self {\n FSFileAbstractionInstance::File(ref mut f, _) => return {\n \/\/ We seek to the beginning of the file since we expect each\n \/\/ access to the file to be in a different context\n try!(f.seek(SeekFrom::Start(0))\n .map_err_into(SEK::FileNotCreated));\n f.write_all(buf).map_err_into(SEK::FileNotWritten)\n },\n FSFileAbstractionInstance::Absent(ref p) =>\n (try!(create_file(p).map_err_into(SEK::FileNotCreated)), p.clone()),\n };\n *self = FSFileAbstractionInstance::File(file, path);\n if let FSFileAbstractionInstance::File(ref mut f, _) = *self {\n return f.write_all(buf).map_err_into(SEK::FileNotWritten);\n }\n unreachable!();\n }\n\n }\n\n \/\/\/ `FSFileAbstraction` state type\n \/\/\/\n \/\/\/ A lazy file is either absent, but a path to it is available, or it is present.\n #[derive(Debug)]\n pub struct FSFileAbstraction {\n }\n\n impl FSFileAbstraction {\n pub fn new() -> FSFileAbstraction {\n FSFileAbstraction { }\n }\n }\n\n impl FileAbstraction for FSFileAbstraction {\n\n fn remove_file(&self, path: &PathBuf) -> Result<(), SE> {\n remove_file(path).map_err_into(SEK::FileNotRemoved)\n }\n\n fn copy(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n copy(from, to).map_err_into(SEK::FileNotCopied).map(|_| ())\n }\n\n fn rename(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n rename(from, to).map_err_into(SEK::FileNotRenamed)\n }\n\n fn create_dir_all(&self, path: &PathBuf) -> Result<(), SE> {\n create_dir_all(path).map_err_into(SEK::DirNotCreated)\n }\n\n fn new_instance(&self, p: PathBuf) -> Box<FileAbstractionInstance> {\n Box::new(FSFileAbstractionInstance::Absent(p))\n }\n }\n\n fn open_file<A: AsRef<Path>>(p: A) -> ::std::io::Result<File> {\n OpenOptions::new().write(true).read(true).open(p)\n }\n\n fn create_file<A: AsRef<Path>>(p: A) -> ::std::io::Result<File> {\n if let Some(parent) = p.as_ref().parent() {\n debug!(\"Implicitely creating directory: {:?}\", parent);\n if let Err(e) = create_dir_all(parent) {\n return Err(e);\n }\n }\n OpenOptions::new().write(true).read(true).create(true).open(p)\n }\n\n}\n\nmod inmemory {\n use error::StoreError as SE;\n use error::StoreErrorKind as SEK;\n use std::io::Read;\n use std::io::Cursor;\n use std::path::PathBuf;\n use std::collections::HashMap;\n use std::sync::Mutex;\n use std::cell::RefCell;\n use std::sync::Arc;\n\n use libimagerror::into::IntoError;\n\n use super::FileAbstraction;\n use super::FileAbstractionInstance;\n use error::MapErrInto;\n\n type Backend = Arc<Mutex<RefCell<HashMap<PathBuf, Cursor<Vec<u8>>>>>>;\n\n \/\/\/ `FileAbstraction` type, this is the Test version!\n \/\/\/\n \/\/\/ A lazy file is either absent, but a path to it is available, or it is present.\n #[derive(Debug)]\n pub struct InMemoryFileAbstractionInstance {\n fs_abstraction: Backend,\n absent_path: PathBuf,\n }\n\n impl InMemoryFileAbstractionInstance {\n\n pub fn new(fs: Backend, pb: PathBuf) -> InMemoryFileAbstractionInstance {\n InMemoryFileAbstractionInstance {\n fs_abstraction: fs,\n absent_path: pb\n }\n }\n\n }\n\n impl FileAbstractionInstance for InMemoryFileAbstractionInstance {\n\n \/**\n * Get the mutable file behind a InMemoryFileAbstraction object\n *\/\n fn get_file_content(&mut self) -> Result<String, SE> {\n debug!(\"Getting lazy file: {:?}\", self);\n\n let p = self.absent_path.clone();\n match self.fs_abstraction.lock() {\n Ok(mut mtx) => {\n mtx.get_mut()\n .get_mut(&p)\n .ok_or(SEK::FileNotFound.into_error())\n .and_then(|t| {\n let mut s = String::new();\n t.read_to_string(&mut s)\n .map_err_into(SEK::IoError)\n .map(|_| s)\n })\n }\n\n Err(_) => Err(SEK::LockError.into_error())\n }\n }\n\n fn write_file_content(&mut self, buf: &[u8]) -> Result<(), SE> {\n match *self {\n InMemoryFileAbstractionInstance { ref absent_path, .. } => {\n let mut mtx = self.fs_abstraction.lock().expect(\"Locking Mutex failed\");\n let mut backend = mtx.get_mut();\n\n if let Some(ref mut cur) = backend.get_mut(absent_path) {\n let mut vec = cur.get_mut();\n vec.clear();\n vec.extend_from_slice(buf);\n return Ok(());\n }\n let vec = Vec::from(buf);\n backend.insert(absent_path.clone(), Cursor::new(vec));\n return Ok(());\n },\n };\n }\n }\n\n #[derive(Debug)]\n pub struct InMemoryFileAbstraction {\n virtual_filesystem: Backend,\n }\n\n impl InMemoryFileAbstraction {\n\n pub fn new() -> InMemoryFileAbstraction {\n InMemoryFileAbstraction {\n virtual_filesystem: Arc::new(Mutex::new(RefCell::new(HashMap::new()))),\n }\n }\n\n pub fn backend(&self) -> &Backend {\n &self.virtual_filesystem\n }\n\n }\n\n impl FileAbstraction for InMemoryFileAbstraction {\n\n fn remove_file(&self, path: &PathBuf) -> Result<(), SE> {\n debug!(\"Removing: {:?}\", path);\n self.backend()\n .lock()\n .expect(\"Locking Mutex failed\")\n .get_mut()\n .remove(path)\n .map(|_| ())\n .ok_or(SEK::FileNotFound.into_error())\n }\n\n fn copy(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n debug!(\"Copying : {:?} -> {:?}\", from, to);\n let mut mtx = self.backend().lock().expect(\"Locking Mutex failed\");\n let mut backend = mtx.get_mut();\n\n let a = try!(backend.get(from).cloned().ok_or(SEK::FileNotFound.into_error()));\n backend.insert(to.clone(), a);\n debug!(\"Copying: {:?} -> {:?} worked\", from, to);\n Ok(())\n }\n\n fn rename(&self, from: &PathBuf, to: &PathBuf) -> Result<(), SE> {\n debug!(\"Renaming: {:?} -> {:?}\", from, to);\n let mut mtx = self.backend().lock().expect(\"Locking Mutex failed\");\n let mut backend = mtx.get_mut();\n\n let a = try!(backend.get(from).cloned().ok_or(SEK::FileNotFound.into_error()));\n backend.insert(to.clone(), a);\n debug!(\"Renaming: {:?} -> {:?} worked\", from, to);\n Ok(())\n }\n\n fn create_dir_all(&self, _: &PathBuf) -> Result<(), SE> {\n Ok(())\n }\n\n fn new_instance(&self, p: PathBuf) -> Box<FileAbstractionInstance> {\n Box::new(InMemoryFileAbstractionInstance::new(self.backend().clone(), p))\n }\n }\n\n}\n\n#[cfg(test)]\nmod test {\n use super::FileAbstractionInstance;\n use super::inmemory::InMemoryFileAbstraction;\n use super::inmemory::InMemoryFileAbstractionInstance;\n use std::path::PathBuf;\n\n #[test]\n fn lazy_file() {\n let fs = InMemoryFileAbstraction::new();\n\n let mut path = PathBuf::from(\"\/tests\");\n path.set_file_name(\"test1\");\n let mut lf = InMemoryFileAbstractionInstance::new(fs.backend().clone(), path);\n lf.write_file_content(b\"Hello World\").unwrap();\n let bah = lf.get_file_content().unwrap();\n assert_eq!(bah, \"Hello World\");\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>initial<commit_after>use std::os;\nuse std::io::File;\nuse std::collections::HashMap;\nuse std::collections::hash_map::Entry::{Occupied, Vacant};\n\n\nfn search(needle: String, haystack: Vec<char>) -> Option<(usize, usize)> {\n let needle: Vec<char> = needle.chars().collect();\n\n let mut indexed_haystack = HashMap::new();\n for (idx, c) in haystack.iter().enumerate() {\n match indexed_haystack.entry(c) {\n Vacant(entry) => { entry.insert(vec![idx]); },\n Occupied(mut entry) => { (*entry.get_mut()).push(idx); }\n }\n }\n\n let possible_starts: &Vec<usize>;\n match indexed_haystack.get(&needle[0]) {\n Some(ref entries) => {\n possible_starts = *entries;\n },\n None => {\n return None;\n }\n }\n\n let second_pos: &Vec<usize>;\n match indexed_haystack.get(&needle[1]) {\n None => { return None; }\n Some(ref entries) => {\n second_pos = *entries;\n }\n }\n let mut possible_steps: Vec<(usize, usize)> = Vec::new();\n for first_pos in possible_starts.iter() {\n for second_pos in second_pos.iter() {\n if first_pos < second_pos {\n\n let start = *first_pos;\n let step = *second_pos - *first_pos;\n\n if haystack.iter().skip(start).enumerate().filter(|&(idx, _)| { idx % step == 0 }).map(|(_, char)| { char })\n .zip(needle.iter())\n .all(|(&x, &y)| { x == y }) {\n return Some((start, step));\n }\n }\n }\n }\n\n\n None\n}\n\nfn main() {\n let file_to_search = &os::args()[1];\n println!(\"Reading in {}\", file_to_search);\n let haystack: Vec<char> = File::open(&Path::new(file_to_search)).read_to_string().unwrap().chars().filter(|&x| { x.is_alphabetic() }).map(|x| { x.to_lowercase() }).collect();\n\n let needle = (&os::args()[2]).to_string();\n println!(\"Looking for {}\", needle);\n match search(needle, haystack) {\n None => { println!(\"Not found\"); }\n Some((start, step)) => { println!(\"Found starting at {} with step of {}\", start, step); }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make StoreId::is_in_collection() generic over AsRef<str><commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add support for padded range expansion. (#733)<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\/ A graph similarity score using neighbor matching according to [this paper][1].\n\/\/\/\n\/\/\/ [1]: http:\/\/arxiv.org\/abs\/1009.5290 \"2010, Mladen Nikolic, Measuring Similarity\n\/\/\/ of Graph Nodes by Neighbor Matching\"\n\nextern crate nalgebra;\nextern crate munkres;\n\nuse nalgebra::{DMat, Shape, ApproxEq};\nuse munkres::{WeightMatrix, solve_assignment};\nuse std::cmp;\nuse std::mem;\n\npub type Idx = u32;\n\ntrait Edges {\n \/\/\/ The number of edges\n fn len(&self) -> usize;\n\n \/\/\/ Returns the target node of the nth-edge\n fn nth_edge(&self, n: usize) -> usize;\n\n \/\/\/ Returns the nth edge weight\n fn nth_edge_weight(&self, _n: usize) -> f32 {\n panic!();\n }\n}\n\nimpl<'a> Edges for &'a [Idx] {\n #[inline]\n fn len(&self) -> usize {\n let x: &[Idx] = self;\n x.len()\n }\n #[inline]\n fn nth_edge(&self, n: usize) -> usize {\n self[n] as usize\n }\n}\n\n#[inline]\n\/\/\/ Calculates the similarity of two nodes `i` and `j`.\n\/\/\/\n\/\/\/ `n_i` contains the neighborhood of i (either in or out neighbors, not both)\n\/\/\/ `n_j` contains the neighborhood of j (either in or out neighbors, not both)\n\/\/\/ `x` the similarity matrix.\nfn s_next<T: Edges>(n_i: T, n_j: T, x: &DMat<f32>) -> f32 {\n let max_deg = cmp::max(n_i.len(), n_j.len());\n let min_deg = cmp::min(n_i.len(), n_j.len());\n\n if min_deg == 0 {\n \/\/ in the paper, 0\/0 is defined as 1.0\n return 1.0;\n }\n\n assert!(min_deg > 0 && max_deg > 0);\n\n \/\/ map indicies from 0..min(degree) to the node indices\n let mapidx = |(a, b)| (n_i.nth_edge(a), n_j.nth_edge(b));\n\n let mut w = WeightMatrix::from_fn(min_deg, |ab| x[mapidx(ab)]);\n\n let assignment = solve_assignment(&mut w);\n assert!(assignment.len() == min_deg);\n\n let sum: f32 = assignment.iter().fold(0.0, |acc, &ab| acc + x[mapidx(ab)]);\n\n return sum \/ max_deg as f32;\n}\n\n#[inline]\n\/\/\/ Calculates x[k+1]\n\/\/\/\n\/\/\/ `node_color_scale((i,j))`: If two nodes `i` (of graph A) and `j` (of graph B)\n\/\/\/ are of different color, this can be set to return 0.0. Alternatively a\n\/\/\/ node-color distance (within 0...1) could be used to penalize.\nfn next_x<F>(x: &DMat<f32>,\n new_x: &mut DMat<f32>,\n in_a: &[Vec<Idx>],\n in_b: &[Vec<Idx>],\n out_a: &[Vec<Idx>],\n out_b: &[Vec<Idx>],\n node_color_scale: F)\n where F: Fn((usize, usize)) -> f32\n{\n let shape = x.shape();\n assert!(shape == new_x.shape());\n\n for i in 0..shape.0 {\n for j in 0..shape.1 {\n let in_i: &[Idx] = &in_a[i];\n let in_j: &[Idx] = &in_b[j];\n let out_i: &[Idx] = &out_a[i];\n let out_j: &[Idx] = &out_b[j];\n new_x[(i, j)] = node_color_scale((i, j)) *\n (s_next(in_i, in_j, x) + s_next(out_i, out_j, x)) \/\n 2.0;\n }\n }\n}\n\n#[derive(Debug)]\npub struct NodeSimilarityMatrix<'a> {\n graph_a: Graph<'a>,\n graph_b: Graph<'a>,\n \/\/ current version of similarity matrix\n current: DMat<f32>,\n \/\/ previous version of similarity matrix\n previous: DMat<f32>,\n \/\/ current number of iterations\n num_iters: usize,\n}\n\n#[derive(Copy, Clone, Debug)]\npub struct Graph<'a> {\n in_edges: &'a [Vec<Idx>],\n out_edges: &'a [Vec<Idx>],\n}\n\nimpl<'a> Graph<'a> {\n pub fn new<'b>(in_edges: &'b [Vec<Idx>], out_edges: &'b [Vec<Idx>]) -> Graph<'b> {\n assert!(in_edges.len() == out_edges.len());\n Graph {\n in_edges: in_edges,\n out_edges: out_edges,\n }\n }\n\n fn num_nodes(&self) -> usize {\n let n = self.in_edges.len();\n assert!(n == self.out_edges.len());\n n\n }\n\n fn node_degree(&self, node_idx: usize) -> usize {\n self.in_edges[node_idx].len() + self.out_edges[node_idx].len()\n }\n}\n\nimpl<'a> NodeSimilarityMatrix<'a> {\n pub fn new<'b, F>(graph_a: Graph<'b>,\n graph_b: Graph<'b>,\n node_color_scale: &F)\n -> NodeSimilarityMatrix<'b>\n where F: Fn((usize, usize)) -> f32\n {\n \/\/ `x` is the node-similarity matrix.\n \/\/ we initialize `x`, so that x[i,j]=1 for all i in A.edges() and j in\n \/\/ B.edges().\n let x: DMat<f32> = DMat::from_fn(graph_a.num_nodes(), graph_b.num_nodes(), |i, j| {\n node_color_scale((i, j)) *\n if graph_a.node_degree(i) > 0 && graph_b.node_degree(j) > 0 {\n 1.0\n } else {\n 0.0\n }\n });\n\n let new_x: DMat<f32> = DMat::new_zeros(graph_a.num_nodes(), graph_b.num_nodes());\n\n NodeSimilarityMatrix {\n graph_a: graph_a,\n graph_b: graph_b,\n current: x,\n previous: new_x,\n num_iters: 0,\n }\n }\n\n fn in_eps(&self, eps: f32) -> bool {\n self.previous.approx_eq_eps(&self.current, &eps)\n }\n\n \/\/\/ Calculates the next iteration of the similarity matrix.\n pub fn next<F>(&mut self, node_color_scale: &F)\n where F: Fn((usize, usize)) -> f32\n {\n next_x(&self.current,\n &mut self.previous,\n self.graph_a.in_edges,\n self.graph_b.in_edges,\n self.graph_a.out_edges,\n self.graph_b.out_edges,\n node_color_scale);\n mem::swap(&mut self.previous, &mut self.current);\n self.num_iters += 1;\n }\n\n #[inline]\n \/\/\/ Iteratively calculate the similarity matrix.\n \/\/\/\n \/\/\/ `eps`: When to stop the iteration\n \/\/\/ `stop_after_iter`: Stop after iteration (Calculate x(stop_after_iter))\n pub fn iterate<F>(&mut self, eps: f32, stop_after_iter: usize, node_color_scale: &F)\n where F: Fn((usize, usize)) -> f32\n {\n for _ in 0..stop_after_iter {\n if self.in_eps(eps) {\n break;\n }\n self.next(node_color_scale);\n }\n }\n\n pub fn matrix(&self) -> &DMat<f32> {\n &self.current\n }\n\n pub fn num_iterations(&self) -> usize {\n self.num_iters\n }\n\n fn optimal_node_assignment(&self, n: usize) -> Vec<(usize, usize)> {\n let x = &self.current;\n assert!(n > 0);\n let mut w = WeightMatrix::from_fn(n, |ij| x[ij]);\n let assignment = solve_assignment(&mut w);\n assert!(assignment.len() == n);\n assignment\n }\n\n fn score_optimal_sum(&self, n: usize) -> f32 {\n self.optimal_node_assignment(n).iter().fold(0.0, |acc, &ab| acc + self.current[ab])\n }\n\n \/\/\/ Calculate a measure how good the edge weights match up.\n \/\/\/\n \/\/\/ We start by calculating the optimal node assignment between nodes of graph A and\n \/\/\/ graph B, then compare all outgoing edges of similar nodes by again using an\n \/\/\/ assignment.\n pub fn score_outgoing_edge_weights(&self) -> f32 {\n \/\/ XXX\n 0.0\n }\n\n \/\/\/ Sums the optimal assignment of the node similarities and normalizes (divides)\n \/\/\/ by the min degree of both graphs.\n \/\/\/ Used as default in the paper.\n pub fn score_sum_norm_min_degree(&self) -> f32 {\n let x = &self.current;\n let n = cmp::min(x.nrows(), x.ncols());\n if n > 0 {\n self.score_optimal_sum(n) \/ n as f32\n } else {\n 0.0\n }\n }\n\n \/\/\/ Sums the optimal assignment of the node similarities and normalizes (divides)\n \/\/\/ by the min degree of both graphs.\n \/\/\/ Penalizes the difference in size of graphs.\n pub fn score_sum_norm_max_degree(&self) -> f32 {\n let x = &self.current;\n let n = cmp::min(x.nrows(), x.ncols());\n let m = cmp::max(x.nrows(), x.ncols());\n\n if n > 0 {\n assert!(m > 0);\n self.score_optimal_sum(n) \/ m as f32\n } else {\n 0.0\n }\n }\n\n \/\/\/ Calculates the average over the whole node similarity matrix. This is faster,\n \/\/\/ as no assignment has to be found. \"Graphs with greater number of automorphisms\n \/\/\/ would be considered to be more self-similar than graphs without automorphisms.\"\n pub fn score_average(&self) -> f32 {\n let x = &self.current;\n let n = cmp::min(x.nrows(), x.ncols());\n if n > 0 {\n let sum: f32 = x.as_vec().iter().fold(0.0, |acc, &v| acc + v);\n let len = x.as_vec().len();\n assert!(len > 0);\n sum \/ len as f32\n } else {\n 0.0\n }\n }\n}\n\n#[test]\nfn test_matrix() {\n \/\/ A: 0 --> 1\n let in_a = vec![vec![], vec![0]];\n let out_a = vec![vec![1], vec![]];\n\n \/\/ B: 0 <-- 1\n let in_b = vec![vec![1], vec![]];\n let out_b = vec![vec![], vec![0]];\n\n let node_color = |_| 1.0;\n let mut s = NodeSimilarityMatrix::new(Graph::new(&in_a, &out_a),\n Graph::new(&in_b, &out_b),\n &node_color);\n s.iterate(0.1, 100, &node_color);\n\n println!(\"{:?}\", s);\n assert_eq!(1, s.num_iterations());\n let mat = s.matrix();\n assert_eq!(2, mat.nrows());\n assert_eq!(2, mat.ncols());\n\n \/\/ A and B are isomorphic\n assert_eq!(1.0, mat[(0, 0)]);\n assert_eq!(1.0, mat[(0, 1)]);\n assert_eq!(1.0, mat[(1, 0)]);\n assert_eq!(1.0, mat[(1, 1)]);\n}\n\n#[test]\nfn test_matrix_iter1() {\n let in_a = vec![vec![0, 0, 0]];\n let out_a = vec![vec![0, 0, 0]];\n\n let in_b = vec![vec![0, 0, 0, 0, 0]];\n let out_b = vec![vec![0, 0, 0, 0, 0]];\n\n let node_color = |_| 1.0;\n let mut s = NodeSimilarityMatrix::new(Graph::new(&in_a, &out_a),\n Graph::new(&in_b, &out_b),\n &node_color);\n s.iterate(0.1, 1, &node_color);\n\n assert_eq!(1, s.num_iterations());\n let mat = s.matrix();\n assert_eq!(3.0 \/ 5.0, mat[(0, 0)]);\n}\n\n\n#[test]\nfn test_score() {\n \/\/ A: 0 --> 1\n let in_a = vec![vec![], vec![0]];\n let out_a = vec![vec![1], vec![]];\n\n \/\/ B: 0 <-- 1\n let in_b = vec![vec![1], vec![]];\n let out_b = vec![vec![], vec![0]];\n\n let node_color = |_| 1.0;\n let mut s = NodeSimilarityMatrix::new(Graph::new(&in_a, &out_a),\n Graph::new(&in_b, &out_b),\n &node_color);\n s.iterate(0.1, 100, &node_color);\n\n assert_eq!(1, s.num_iterations());\n\n \/\/ The score is 1.0 <=> A and B are isomorphic\n assert_eq!(1.0, s.score_sum_norm_min_degree());\n\n \/\/ The score is 1.0 <=> A and B are isomorphic\n assert_eq!(1.0, s.score_sum_norm_max_degree());\n}\n<commit_msg>Rename NodeSimilarityMatrix -> GraphSimilarityMatrix<commit_after>\/\/\/ A graph similarity score using neighbor matching according to [this paper][1].\n\/\/\/\n\/\/\/ [1]: http:\/\/arxiv.org\/abs\/1009.5290 \"2010, Mladen Nikolic, Measuring Similarity\n\/\/\/ of Graph Nodes by Neighbor Matching\"\n\nextern crate nalgebra;\nextern crate munkres;\n\nuse nalgebra::{DMat, Shape, ApproxEq};\nuse munkres::{WeightMatrix, solve_assignment};\nuse std::cmp;\nuse std::mem;\n\npub type Idx = u32;\n\ntrait Edges {\n \/\/\/ The number of edges\n fn len(&self) -> usize;\n\n \/\/\/ Returns the target node of the nth-edge\n fn nth_edge(&self, n: usize) -> usize;\n\n \/\/\/ Returns the nth edge weight\n fn nth_edge_weight(&self, _n: usize) -> f32 {\n panic!();\n }\n}\n\nimpl<'a> Edges for &'a [Idx] {\n #[inline]\n fn len(&self) -> usize {\n let x: &[Idx] = self;\n x.len()\n }\n #[inline]\n fn nth_edge(&self, n: usize) -> usize {\n self[n] as usize\n }\n}\n\n#[inline]\n\/\/\/ Calculates the similarity of two nodes `i` and `j`.\n\/\/\/\n\/\/\/ `n_i` contains the neighborhood of i (either in or out neighbors, not both)\n\/\/\/ `n_j` contains the neighborhood of j (either in or out neighbors, not both)\n\/\/\/ `x` the similarity matrix.\nfn s_next<T: Edges>(n_i: T, n_j: T, x: &DMat<f32>) -> f32 {\n let max_deg = cmp::max(n_i.len(), n_j.len());\n let min_deg = cmp::min(n_i.len(), n_j.len());\n\n if min_deg == 0 {\n \/\/ in the paper, 0\/0 is defined as 1.0\n return 1.0;\n }\n\n assert!(min_deg > 0 && max_deg > 0);\n\n \/\/ map indicies from 0..min(degree) to the node indices\n let mapidx = |(a, b)| (n_i.nth_edge(a), n_j.nth_edge(b));\n\n let mut w = WeightMatrix::from_fn(min_deg, |ab| x[mapidx(ab)]);\n\n let assignment = solve_assignment(&mut w);\n assert!(assignment.len() == min_deg);\n\n let sum: f32 = assignment.iter().fold(0.0, |acc, &ab| acc + x[mapidx(ab)]);\n\n return sum \/ max_deg as f32;\n}\n\n#[inline]\n\/\/\/ Calculates x[k+1]\n\/\/\/\n\/\/\/ `node_color_scale((i,j))`: If two nodes `i` (of graph A) and `j` (of graph B)\n\/\/\/ are of different color, this can be set to return 0.0. Alternatively a\n\/\/\/ node-color distance (within 0...1) could be used to penalize.\nfn next_x<F>(x: &DMat<f32>,\n new_x: &mut DMat<f32>,\n in_a: &[Vec<Idx>],\n in_b: &[Vec<Idx>],\n out_a: &[Vec<Idx>],\n out_b: &[Vec<Idx>],\n node_color_scale: F)\n where F: Fn((usize, usize)) -> f32\n{\n let shape = x.shape();\n assert!(shape == new_x.shape());\n\n for i in 0..shape.0 {\n for j in 0..shape.1 {\n let in_i: &[Idx] = &in_a[i];\n let in_j: &[Idx] = &in_b[j];\n let out_i: &[Idx] = &out_a[i];\n let out_j: &[Idx] = &out_b[j];\n new_x[(i, j)] = node_color_scale((i, j)) *\n (s_next(in_i, in_j, x) + s_next(out_i, out_j, x)) \/\n 2.0;\n }\n }\n}\n\n#[derive(Debug)]\npub struct GraphSimilarityMatrix<'a> {\n graph_a: Graph<'a>,\n graph_b: Graph<'a>,\n \/\/ current version of similarity matrix\n current: DMat<f32>,\n \/\/ previous version of similarity matrix\n previous: DMat<f32>,\n \/\/ current number of iterations\n num_iters: usize,\n}\n\n#[derive(Copy, Clone, Debug)]\npub struct Graph<'a> {\n in_edges: &'a [Vec<Idx>],\n out_edges: &'a [Vec<Idx>],\n}\n\nimpl<'a> Graph<'a> {\n pub fn new<'b>(in_edges: &'b [Vec<Idx>], out_edges: &'b [Vec<Idx>]) -> Graph<'b> {\n assert!(in_edges.len() == out_edges.len());\n Graph {\n in_edges: in_edges,\n out_edges: out_edges,\n }\n }\n\n fn num_nodes(&self) -> usize {\n let n = self.in_edges.len();\n assert!(n == self.out_edges.len());\n n\n }\n\n fn node_degree(&self, node_idx: usize) -> usize {\n self.in_edges[node_idx].len() + self.out_edges[node_idx].len()\n }\n}\n\nimpl<'a> GraphSimilarityMatrix<'a> {\n pub fn new<'b, F>(graph_a: Graph<'b>,\n graph_b: Graph<'b>,\n node_color_scale: &F)\n -> GraphSimilarityMatrix<'b>\n where F: Fn((usize, usize)) -> f32\n {\n \/\/ `x` is the node-similarity matrix.\n \/\/ we initialize `x`, so that x[i,j]=1 for all i in A.edges() and j in\n \/\/ B.edges().\n let x: DMat<f32> = DMat::from_fn(graph_a.num_nodes(), graph_b.num_nodes(), |i, j| {\n node_color_scale((i, j)) *\n if graph_a.node_degree(i) > 0 && graph_b.node_degree(j) > 0 {\n 1.0\n } else {\n 0.0\n }\n });\n\n let new_x: DMat<f32> = DMat::new_zeros(graph_a.num_nodes(), graph_b.num_nodes());\n\n GraphSimilarityMatrix {\n graph_a: graph_a,\n graph_b: graph_b,\n current: x,\n previous: new_x,\n num_iters: 0,\n }\n }\n\n fn in_eps(&self, eps: f32) -> bool {\n self.previous.approx_eq_eps(&self.current, &eps)\n }\n\n \/\/\/ Calculates the next iteration of the similarity matrix.\n pub fn next<F>(&mut self, node_color_scale: &F)\n where F: Fn((usize, usize)) -> f32\n {\n next_x(&self.current,\n &mut self.previous,\n self.graph_a.in_edges,\n self.graph_b.in_edges,\n self.graph_a.out_edges,\n self.graph_b.out_edges,\n node_color_scale);\n mem::swap(&mut self.previous, &mut self.current);\n self.num_iters += 1;\n }\n\n #[inline]\n \/\/\/ Iteratively calculate the similarity matrix.\n \/\/\/\n \/\/\/ `eps`: When to stop the iteration\n \/\/\/ `stop_after_iter`: Stop after iteration (Calculate x(stop_after_iter))\n pub fn iterate<F>(&mut self, eps: f32, stop_after_iter: usize, node_color_scale: &F)\n where F: Fn((usize, usize)) -> f32\n {\n for _ in 0..stop_after_iter {\n if self.in_eps(eps) {\n break;\n }\n self.next(node_color_scale);\n }\n }\n\n pub fn matrix(&self) -> &DMat<f32> {\n &self.current\n }\n\n pub fn num_iterations(&self) -> usize {\n self.num_iters\n }\n\n fn optimal_node_assignment(&self, n: usize) -> Vec<(usize, usize)> {\n let x = &self.current;\n assert!(n > 0);\n let mut w = WeightMatrix::from_fn(n, |ij| x[ij]);\n let assignment = solve_assignment(&mut w);\n assert!(assignment.len() == n);\n assignment\n }\n\n fn score_optimal_sum(&self, n: usize) -> f32 {\n self.optimal_node_assignment(n).iter().fold(0.0, |acc, &ab| acc + self.current[ab])\n }\n\n \/\/\/ Calculate a measure how good the edge weights match up.\n \/\/\/\n \/\/\/ We start by calculating the optimal node assignment between nodes of graph A and\n \/\/\/ graph B, then compare all outgoing edges of similar nodes by again using an\n \/\/\/ assignment.\n pub fn score_outgoing_edge_weights(&self) -> f32 {\n \/\/ XXX\n 0.0\n }\n\n \/\/\/ Sums the optimal assignment of the node similarities and normalizes (divides)\n \/\/\/ by the min degree of both graphs.\n \/\/\/ Used as default in the paper.\n pub fn score_sum_norm_min_degree(&self) -> f32 {\n let x = &self.current;\n let n = cmp::min(x.nrows(), x.ncols());\n if n > 0 {\n self.score_optimal_sum(n) \/ n as f32\n } else {\n 0.0\n }\n }\n\n \/\/\/ Sums the optimal assignment of the node similarities and normalizes (divides)\n \/\/\/ by the min degree of both graphs.\n \/\/\/ Penalizes the difference in size of graphs.\n pub fn score_sum_norm_max_degree(&self) -> f32 {\n let x = &self.current;\n let n = cmp::min(x.nrows(), x.ncols());\n let m = cmp::max(x.nrows(), x.ncols());\n\n if n > 0 {\n assert!(m > 0);\n self.score_optimal_sum(n) \/ m as f32\n } else {\n 0.0\n }\n }\n\n \/\/\/ Calculates the average over the whole node similarity matrix. This is faster,\n \/\/\/ as no assignment has to be found. \"Graphs with greater number of automorphisms\n \/\/\/ would be considered to be more self-similar than graphs without automorphisms.\"\n pub fn score_average(&self) -> f32 {\n let x = &self.current;\n let n = cmp::min(x.nrows(), x.ncols());\n if n > 0 {\n let sum: f32 = x.as_vec().iter().fold(0.0, |acc, &v| acc + v);\n let len = x.as_vec().len();\n assert!(len > 0);\n sum \/ len as f32\n } else {\n 0.0\n }\n }\n}\n\n#[test]\nfn test_matrix() {\n \/\/ A: 0 --> 1\n let in_a = vec![vec![], vec![0]];\n let out_a = vec![vec![1], vec![]];\n\n \/\/ B: 0 <-- 1\n let in_b = vec![vec![1], vec![]];\n let out_b = vec![vec![], vec![0]];\n\n let node_color = |_| 1.0;\n let mut s = GraphSimilarityMatrix::new(Graph::new(&in_a, &out_a),\n Graph::new(&in_b, &out_b),\n &node_color);\n s.iterate(0.1, 100, &node_color);\n\n println!(\"{:?}\", s);\n assert_eq!(1, s.num_iterations());\n let mat = s.matrix();\n assert_eq!(2, mat.nrows());\n assert_eq!(2, mat.ncols());\n\n \/\/ A and B are isomorphic\n assert_eq!(1.0, mat[(0, 0)]);\n assert_eq!(1.0, mat[(0, 1)]);\n assert_eq!(1.0, mat[(1, 0)]);\n assert_eq!(1.0, mat[(1, 1)]);\n}\n\n#[test]\nfn test_matrix_iter1() {\n let in_a = vec![vec![0, 0, 0]];\n let out_a = vec![vec![0, 0, 0]];\n\n let in_b = vec![vec![0, 0, 0, 0, 0]];\n let out_b = vec![vec![0, 0, 0, 0, 0]];\n\n let node_color = |_| 1.0;\n let mut s = GraphSimilarityMatrix::new(Graph::new(&in_a, &out_a),\n Graph::new(&in_b, &out_b),\n &node_color);\n s.iterate(0.1, 1, &node_color);\n\n assert_eq!(1, s.num_iterations());\n let mat = s.matrix();\n assert_eq!(3.0 \/ 5.0, mat[(0, 0)]);\n}\n\n\n#[test]\nfn test_score() {\n \/\/ A: 0 --> 1\n let in_a = vec![vec![], vec![0]];\n let out_a = vec![vec![1], vec![]];\n\n \/\/ B: 0 <-- 1\n let in_b = vec![vec![1], vec![]];\n let out_b = vec![vec![], vec![0]];\n\n let node_color = |_| 1.0;\n let mut s = GraphSimilarityMatrix::new(Graph::new(&in_a, &out_a),\n Graph::new(&in_b, &out_b),\n &node_color);\n s.iterate(0.1, 100, &node_color);\n\n assert_eq!(1, s.num_iterations());\n\n \/\/ The score is 1.0 <=> A and B are isomorphic\n assert_eq!(1.0, s.score_sum_norm_min_degree());\n\n \/\/ The score is 1.0 <=> A and B are isomorphic\n assert_eq!(1.0, s.score_sum_norm_max_degree());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>The deriving Component macro (\\#[component]) is now \\#[SparkleComponent]<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression tests to ensure stable drop order<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code, unreachable_code)]\n\nuse std::cell::RefCell;\nuse std::rc::Rc;\nuse std::panic::{self, AssertUnwindSafe, UnwindSafe};\n\n\/\/ This struct is used to record the order in which elements are dropped\nstruct PushOnDrop {\n vec: Rc<RefCell<Vec<u32>>>,\n val: u32\n}\n\nimpl PushOnDrop {\n fn new(val: u32, vec: Rc<RefCell<Vec<u32>>>) -> PushOnDrop {\n PushOnDrop { vec, val }\n }\n}\n\nimpl Drop for PushOnDrop {\n fn drop(&mut self) {\n self.vec.borrow_mut().push(self.val)\n }\n}\n\nimpl UnwindSafe for PushOnDrop { }\n\n\/\/ Structs\nstruct TestStruct {\n x: PushOnDrop,\n y: PushOnDrop,\n z: PushOnDrop\n}\n\n\/\/ Tuple structs\nstruct TestTupleStruct(PushOnDrop, PushOnDrop, PushOnDrop);\n\n\/\/ Enum variants\nenum TestEnum {\n Tuple(PushOnDrop, PushOnDrop, PushOnDrop),\n Struct { x: PushOnDrop, y: PushOnDrop, z: PushOnDrop }\n}\n\nfn test_drop_tuple() {\n \/\/ Tuple fields are dropped in the same order they are declared\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let test_tuple = (PushOnDrop::new(1, dropped_fields.clone()),\n PushOnDrop::new(2, dropped_fields.clone()));\n drop(test_tuple);\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n\n \/\/ Panic during construction means that fields are treated as local variables\n \/\/ Therefore they are dropped in reverse order of initialization\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let cloned = AssertUnwindSafe(dropped_fields.clone());\n panic::catch_unwind(|| {\n (PushOnDrop::new(2, cloned.clone()),\n PushOnDrop::new(1, cloned.clone()),\n panic!(\"this panic is catched :D\"));\n }).err().unwrap();\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n}\n\nfn test_drop_struct() {\n \/\/ Struct fields are dropped in the same order they are declared\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let test_struct = TestStruct {\n x: PushOnDrop::new(1, dropped_fields.clone()),\n y: PushOnDrop::new(2, dropped_fields.clone()),\n z: PushOnDrop::new(3, dropped_fields.clone()),\n };\n drop(test_struct);\n assert_eq!(*dropped_fields.borrow(), &[1, 2, 3]);\n\n \/\/ The same holds for tuple structs\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let test_tuple_struct = TestTupleStruct(PushOnDrop::new(1, dropped_fields.clone()),\n PushOnDrop::new(2, dropped_fields.clone()),\n PushOnDrop::new(3, dropped_fields.clone()));\n drop(test_tuple_struct);\n assert_eq!(*dropped_fields.borrow(), &[1, 2, 3]);\n\n \/\/ Panic during struct construction means that fields are treated as local variables\n \/\/ Therefore they are dropped in reverse order of initialization\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let cloned = AssertUnwindSafe(dropped_fields.clone());\n panic::catch_unwind(|| {\n TestStruct {\n x: PushOnDrop::new(2, cloned.clone()),\n y: PushOnDrop::new(1, cloned.clone()),\n z: panic!(\"this panic is catched :D\")\n };\n }).err().unwrap();\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n\n \/\/ Test with different initialization order\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let cloned = AssertUnwindSafe(dropped_fields.clone());\n panic::catch_unwind(|| {\n TestStruct {\n y: PushOnDrop::new(2, cloned.clone()),\n x: PushOnDrop::new(1, cloned.clone()),\n z: panic!(\"this panic is catched :D\")\n };\n }).err().unwrap();\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n\n \/\/ The same holds for tuple structs\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let cloned = AssertUnwindSafe(dropped_fields.clone());\n panic::catch_unwind(|| {\n TestTupleStruct(PushOnDrop::new(2, cloned.clone()),\n PushOnDrop::new(1, cloned.clone()),\n panic!(\"this panic is catched :D\"));\n }).err().unwrap();\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n}\n\nfn test_drop_enum() {\n \/\/ Enum variants are dropped in the same order they are declared\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let test_struct_enum = TestEnum::Struct {\n x: PushOnDrop::new(1, dropped_fields.clone()),\n y: PushOnDrop::new(2, dropped_fields.clone()),\n z: PushOnDrop::new(3, dropped_fields.clone())\n };\n drop(test_struct_enum);\n assert_eq!(*dropped_fields.borrow(), &[1, 2, 3]);\n\n \/\/ The same holds for tuple enum variants\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let test_tuple_enum = TestEnum::Tuple(PushOnDrop::new(1, dropped_fields.clone()),\n PushOnDrop::new(2, dropped_fields.clone()),\n PushOnDrop::new(3, dropped_fields.clone()));\n drop(test_tuple_enum);\n assert_eq!(*dropped_fields.borrow(), &[1, 2, 3]);\n\n \/\/ Panic during enum construction means that fields are treated as local variables\n \/\/ Therefore they are dropped in reverse order of initialization\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let cloned = AssertUnwindSafe(dropped_fields.clone());\n panic::catch_unwind(|| {\n TestEnum::Struct {\n x: PushOnDrop::new(2, cloned.clone()),\n y: PushOnDrop::new(1, cloned.clone()),\n z: panic!(\"this panic is catched :D\")\n };\n }).err().unwrap();\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n\n \/\/ Test with different initialization order\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let cloned = AssertUnwindSafe(dropped_fields.clone());\n panic::catch_unwind(|| {\n TestEnum::Struct {\n y: PushOnDrop::new(2, cloned.clone()),\n x: PushOnDrop::new(1, cloned.clone()),\n z: panic!(\"this panic is catched :D\")\n };\n }).err().unwrap();\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n\n \/\/ The same holds for tuple enum variants\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let cloned = AssertUnwindSafe(dropped_fields.clone());\n panic::catch_unwind(|| {\n TestEnum::Tuple(PushOnDrop::new(2, cloned.clone()),\n PushOnDrop::new(1, cloned.clone()),\n panic!(\"this panic is catched :D\"));\n }).err().unwrap();\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n}\n\nfn test_drop_list() {\n \/\/ Elements in a Vec are dropped in the same order they are pushed\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let xs = vec![PushOnDrop::new(1, dropped_fields.clone()),\n PushOnDrop::new(2, dropped_fields.clone()),\n PushOnDrop::new(3, dropped_fields.clone())];\n drop(xs);\n assert_eq!(*dropped_fields.borrow(), &[1, 2, 3]);\n\n \/\/ The same holds for arrays\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let xs = [PushOnDrop::new(1, dropped_fields.clone()),\n PushOnDrop::new(2, dropped_fields.clone()),\n PushOnDrop::new(3, dropped_fields.clone())];\n drop(xs);\n assert_eq!(*dropped_fields.borrow(), &[1, 2, 3]);\n\n \/\/ Panic during vec construction means that fields are treated as local variables\n \/\/ Therefore they are dropped in reverse order of initialization\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let cloned = AssertUnwindSafe(dropped_fields.clone());\n panic::catch_unwind(|| {\n vec![\n PushOnDrop::new(2, cloned.clone()),\n PushOnDrop::new(1, cloned.clone()),\n panic!(\"this panic is catched :D\")\n ];\n }).err().unwrap();\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n\n \/\/ The same holds for arrays\n let dropped_fields = Rc::new(RefCell::new(Vec::new()));\n let cloned = AssertUnwindSafe(dropped_fields.clone());\n panic::catch_unwind(|| {\n [\n PushOnDrop::new(2, cloned.clone()),\n PushOnDrop::new(1, cloned.clone()),\n panic!(\"this panic is catched :D\")\n ];\n }).err().unwrap();\n assert_eq!(*dropped_fields.borrow(), &[1, 2]);\n}\n\nfn main() {\n test_drop_tuple();\n test_drop_struct();\n test_drop_enum();\n test_drop_list();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fixes #1499: a test that shape code can walk explicit disr values<commit_after>tag color {\n red = 0xff0000;\n green = 0x00ff00;\n blue = 0x0000ff;\n black = 0x000000;\n white = 0xFFFFFF;\n}\n\nfn main() {\n assert uint::to_str(red as uint, 10u) == #fmt[\"%?\", red];\n assert uint::to_str(green as uint, 10u) == #fmt[\"%?\", green];\n assert uint::to_str(white as uint, 10u) == #fmt[\"%?\", white];\n}\n\n<|endoftext|>"} {"text":"<commit_before>use header::{Header, Raw};\nuse std::fmt;\nuse std::str::FromStr;\nuse header::parsing::from_one_raw_str;\nuse url::idna::domain_to_unicode;\n\n\/\/\/ The `Host` header.\n\/\/\/\n\/\/\/ HTTP\/1.1 requires that all requests include a `Host` header, and so hyper\n\/\/\/ client requests add one automatically.\n\/\/\/\n\/\/\/ Currently is just a String, but it should probably become a better type,\n\/\/\/ like `url::Host` or something.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/ ```\n\/\/\/ use hyper::header::{Headers, Host};\n\/\/\/\n\/\/\/ let mut headers = Headers::new();\n\/\/\/ headers.set(\n\/\/\/ Host{\n\/\/\/ hostname: \"hyper.rs\".to_owned(),\n\/\/\/ port: None,\n\/\/\/ }\n\/\/\/ );\n\/\/\/ ```\n\/\/\/ ```\n\/\/\/ use hyper::header::{Headers, Host};\n\/\/\/\n\/\/\/ let mut headers = Headers::new();\n\/\/\/ headers.set(\n\/\/\/ Host{\n\/\/\/ hostname: \"hyper.rs\".to_owned(),\n\/\/\/ port: Some(8080),\n\/\/\/ }\n\/\/\/ );\n\/\/\/ ```\n#[derive(Clone, PartialEq, Debug)]\npub struct Host {\n \/\/\/ The hostname, such a example.domain.\n pub hostname: String,\n \/\/\/ An optional port number.\n pub port: Option<u16>\n}\n\nimpl Header for Host {\n fn header_name() -> &'static str {\n static NAME: &'static str = \"Host\";\n NAME\n }\n\n fn parse_header(raw: &Raw) -> ::Result<Host> {\n from_one_raw_str(raw)\n }\n\n fn fmt_header(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self.port {\n None | Some(80) | Some(443) => f.write_str(&self.hostname[..]),\n Some(port) => write!(f, \"{}:{}\", self.hostname, port)\n }\n }\n}\n\nimpl fmt::Display for Host {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.fmt_header(f)\n }\n}\n\nimpl FromStr for Host {\n type Err = ::Error;\n\n fn from_str(s: &str) -> ::Result<Host> {\n let (host_port, res) = domain_to_unicode(s);\n if res.is_err() {\n return Err(::Error::Header)\n }\n let idx = host_port.rfind(':');\n let port = idx.and_then(\n |idx| s[idx + 1..].parse().ok()\n );\n let hostname = match idx {\n None => host_port,\n Some(idx) => host_port[..idx].to_owned()\n };\n Ok(Host {\n hostname: hostname,\n port: port\n })\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::Host;\n use header::Header;\n\n\n #[test]\n fn test_host() {\n let host = Header::parse_header(&vec![b\"foo.com\".to_vec()].into());\n assert_eq!(host.ok(), Some(Host {\n hostname: \"foo.com\".to_owned(),\n port: None\n }));\n\n\n let host = Header::parse_header(&vec![b\"foo.com:8080\".to_vec()].into());\n assert_eq!(host.ok(), Some(Host {\n hostname: \"foo.com\".to_owned(),\n port: Some(8080)\n }));\n }\n}\n\nbench_header!(bench, Host, { vec![b\"foo.com:3000\".to_vec()] });\n<commit_msg>fix(headers): Allow IPv6 Addresses in Host header<commit_after>use header::{Header, Raw};\nuse std::fmt;\nuse std::str::FromStr;\nuse header::parsing::from_one_raw_str;\nuse url::idna::domain_to_unicode;\n\n\/\/\/ The `Host` header.\n\/\/\/\n\/\/\/ HTTP\/1.1 requires that all requests include a `Host` header, and so hyper\n\/\/\/ client requests add one automatically.\n\/\/\/\n\/\/\/ Currently is just a String, but it should probably become a better type,\n\/\/\/ like `url::Host` or something.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/ ```\n\/\/\/ use hyper::header::{Headers, Host};\n\/\/\/\n\/\/\/ let mut headers = Headers::new();\n\/\/\/ headers.set(\n\/\/\/ Host{\n\/\/\/ hostname: \"hyper.rs\".to_owned(),\n\/\/\/ port: None,\n\/\/\/ }\n\/\/\/ );\n\/\/\/ ```\n\/\/\/ ```\n\/\/\/ use hyper::header::{Headers, Host};\n\/\/\/\n\/\/\/ let mut headers = Headers::new();\n\/\/\/ headers.set(\n\/\/\/ Host{\n\/\/\/ hostname: \"hyper.rs\".to_owned(),\n\/\/\/ port: Some(8080),\n\/\/\/ }\n\/\/\/ );\n\/\/\/ ```\n#[derive(Clone, PartialEq, Debug)]\npub struct Host {\n \/\/\/ The hostname, such a example.domain.\n pub hostname: String,\n \/\/\/ An optional port number.\n pub port: Option<u16>\n}\n\nimpl Header for Host {\n fn header_name() -> &'static str {\n static NAME: &'static str = \"Host\";\n NAME\n }\n\n fn parse_header(raw: &Raw) -> ::Result<Host> {\n from_one_raw_str(raw)\n }\n\n fn fmt_header(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match self.port {\n None | Some(80) | Some(443) => f.write_str(&self.hostname[..]),\n Some(port) => write!(f, \"{}:{}\", self.hostname, port)\n }\n }\n}\n\nimpl fmt::Display for Host {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n self.fmt_header(f)\n }\n}\n\nimpl FromStr for Host {\n type Err = ::Error;\n\n fn from_str(s: &str) -> ::Result<Host> {\n let idx = s.rfind(':');\n let port = idx.and_then(\n |idx| s[idx + 1..].parse().ok()\n );\n let hostname_encoded = match port {\n None => s,\n Some(_) => &s[..idx.unwrap()]\n };\n\n let hostname = if hostname_encoded.starts_with(\"[\") {\n if !hostname_encoded.ends_with(\"]\") {\n return Err(::Error::Header)\n }\n hostname_encoded.to_owned()\n } else {\n let (hostname, res) = domain_to_unicode(hostname_encoded);\n if res.is_err() {\n return Err(::Error::Header)\n }\n hostname\n };\n\n Ok(Host {\n hostname: hostname,\n port: port\n })\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::Host;\n use header::Header;\n\n\n #[test]\n fn test_host() {\n let host = Header::parse_header(&vec![b\"foo.com\".to_vec()].into());\n assert_eq!(host.ok(), Some(Host {\n hostname: \"foo.com\".to_owned(),\n port: None\n }));\n\n\n let host = Header::parse_header(&vec![b\"foo.com:8080\".to_vec()].into());\n assert_eq!(host.ok(), Some(Host {\n hostname: \"foo.com\".to_owned(),\n port: Some(8080)\n }));\n\n let host = Header::parse_header([b\"foo.com\".to_vec()].as_ref());\n assert_eq!(host.ok(), Some(Host {\n hostname: \"foo.com\".to_owned(),\n port: None\n }));\n\n let host = Header::parse_header([b\"[::1]:8080\".to_vec()].as_ref());\n assert_eq!(host.ok(), Some(Host {\n hostname: \"[::1]\".to_owned(),\n port: Some(8080)\n }));\n\n let host = Header::parse_header([b\"[::1]\".to_vec()].as_ref());\n assert_eq!(host.ok(), Some(Host {\n hostname: \"[::1]\".to_owned(),\n port: None\n }));\n }\n}\n\nbench_header!(bench, Host, { vec![b\"foo.com:3000\".to_vec()] });\n<|endoftext|>"} {"text":"<commit_before><commit_msg>shadow interface<commit_after><|endoftext|>"} {"text":"<commit_before>use core::ptr;\n\n#[lang=\"stack_exhausted\"]\nextern \"C\" fn stack_exhausted() {}\n\n#[lang=\"eh_personality\"]\nextern \"C\" fn eh_personality() {}\n\n#[no_mangle]\npub unsafe extern \"C\" fn memcmp(a: *mut i8, b: *const i8, len: usize) -> i32 {\n for i in 0..len {\n let c_a = ptr::read(a.offset(i as isize));\n let c_b = ptr::read(b.offset(i as isize));\n if c_a != c_b {\n return c_a as i32 - c_b as i32;\n }\n }\n return 0;\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn memmove(dst: *mut u8, src: *const u8, len: usize) {\n if src < dst {\n for i_reverse in 0..len as isize {\n let i = len as isize - i_reverse - 1;\n ptr::write(dst.offset(i), ptr::read(src.offset(i)));\n }\n } else {\n for i in 0..len as isize {\n ptr::write(dst.offset(i), ptr::read(src.offset(i)));\n }\n }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn memcpy(dst: *mut u8, src: *const u8, len: usize) {\n for i in 0..len as isize {\n ptr::write(dst.offset(i), ptr::read(src.offset(i)));\n }\n}\n\n#[no_mangle]\npub unsafe extern \"C\" fn memset(dst: *mut u8, c: i32, len: usize) {\n for i in 0..len as isize {\n ptr::write(dst.offset(i), c as u8);\n }\n}\n\n#[no_mangle]\n#[cfg(target_arch = \"x86\")]\n\/\/\/ 64 bit remainder on 32 bit arch\npub extern \"C\" fn __umoddi3(mut a: u64, mut b: u64) -> u64 {\n let mut hig = a >> 32; \/\/ The first 32 bits of a\n let mut d = 1;\n\n if hig >= b {\n hig \/= b;\n a -= (hig * b) << 32;\n }\n\n while b > 0 && b < a {\n b *= 2;\n d *= 2;\n }\n\n loop {\n if a >= b {\n a -= b;\n }\n b >>= 1;\n d >>= 1;\n\n if d == 0 {\n break;\n }\n }\n\n a\n}\n\n#[no_mangle]\n#[cfg(target_arch = \"x86\")]\n\/\/\/ 64 bit division on 32 bit arch\npub extern \"C\" fn __udivdi3(mut a: u64, mut b: u64) -> u64 {\n let mut res = 0;\n let mut hig = a >> 32; \/\/ The first 32 bits of a\n let mut d = 1;\n\n if hig >= b {\n hig \/= b;\n res = hig << 32;\n a -= (hig * b) << 32;\n }\n\n while b > 0 && b < a {\n b *= 2;\n d *= 2;\n }\n\n loop {\n if a >= b {\n a -= b;\n res += d;\n }\n b >>= 1;\n d >>= 1;\n\n if d == 0 {\n break;\n }\n }\n\n res\n}\n\n#[no_mangle]\n#[cfg(target_arch = \"x86\")]\n\/\/\/ 64 bit division and rem on 32 bit arch\npub extern \"C\" fn __udivremi3(mut a: u64, mut b: u64) -> (u64, u64) {\n let mut res = 0;\n let mut hig = a >> 32; \/\/ The first 32 bits of a\n let mut d = 1;\n\n if hig >= b {\n hig \/= b;\n res = hig << 32;\n a -= (hig * b) << 32;\n }\n\n while b > 0 && b < a {\n b *= 2;\n d *= 2;\n }\n\n loop {\n if a >= b {\n a -= b;\n res += d;\n }\n b >>= 1;\n d >>= 1;\n\n if d == 0 {\n break;\n }\n }\n\n (res, a)\n}\n<commit_msg>Fix bugs and improve performance in memcmp, memset, memcpy, and memmove<commit_after>#[lang=\"stack_exhausted\"]\nextern \"C\" fn stack_exhausted() {}\n\n#[lang=\"eh_personality\"]\nextern \"C\" fn eh_personality() {}\n\n\/\/\/ Memcpy\n\/\/\/\n\/\/\/ Copy N bytes of memory from one location to another.\n#[no_mangle]\npub unsafe extern fn memcpy(dest: *mut u8, src: *const u8,\n n: usize) -> *mut u8 {\n let mut i = 0;\n while i < n {\n *dest.offset(i as isize) = *src.offset(i as isize);\n i += 1;\n }\n\n dest\n}\n\n\/\/\/ Memmove\n\/\/\/\n\/\/\/ Copy N bytes of memory from src to dest. The memory areas may overlap.\n#[no_mangle]\npub unsafe extern fn memmove(dest: *mut u8, src: *const u8,\n n: usize) -> *mut u8 {\n if src < dest as *const u8 {\n let mut i = n;\n while i != 0 {\n i -= 1;\n *dest.offset(i as isize) = *src.offset(i as isize);\n }\n } else {\n let mut i = 0;\n while i < n {\n *dest.offset(i as isize) = *src.offset(i as isize);\n i += 1;\n }\n }\n\n dest\n}\n\n\/\/\/ Memset\n\/\/\/\n\/\/\/ Fill a block of memory with a specified value.\n#[no_mangle]\npub unsafe extern fn memset(s: *mut u8, c: i32, n: usize) -> *mut u8 {\n let mut i = 0;\n while i < n {\n *s.offset(i as isize) = c as u8;\n i += 1;\n }\n\n s\n}\n\n\/\/\/ Memcmp\n\/\/\/\n\/\/\/ Compare two blocks of memory.\n#[no_mangle]\npub unsafe extern fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32 {\n let mut i = 0;\n\n while i < n {\n let a = *s1.offset(i as isize);\n let b = *s2.offset(i as isize);\n if a != b {\n return a as i32 - b as i32\n }\n i += 1;\n }\n\n 0\n}\n\n\/\/\/ 64 bit remainder on 32 bit arch\n#[no_mangle]\n#[cfg(target_arch = \"x86\")]\npub extern \"C\" fn __umoddi3(mut a: u64, mut b: u64) -> u64 {\n let mut hig = a >> 32; \/\/ The first 32 bits of a\n let mut d = 1;\n\n if hig >= b {\n hig \/= b;\n a -= (hig * b) << 32;\n }\n\n while b > 0 && b < a {\n b *= 2;\n d *= 2;\n }\n\n loop {\n if a >= b {\n a -= b;\n }\n b >>= 1;\n d >>= 1;\n\n if d == 0 {\n break;\n }\n }\n\n a\n}\n\n\/\/\/ 64 bit division on 32 bit arch\n#[no_mangle]\n#[cfg(target_arch = \"x86\")]\npub extern \"C\" fn __udivdi3(mut a: u64, mut b: u64) -> u64 {\n let mut res = 0;\n let mut hig = a >> 32; \/\/ The first 32 bits of a\n let mut d = 1;\n\n if hig >= b {\n hig \/= b;\n res = hig << 32;\n a -= (hig * b) << 32;\n }\n\n while b > 0 && b < a {\n b *= 2;\n d *= 2;\n }\n\n loop {\n if a >= b {\n a -= b;\n res += d;\n }\n b >>= 1;\n d >>= 1;\n\n if d == 0 {\n break;\n }\n }\n\n res\n}\n\n#[no_mangle]\n#[cfg(target_arch = \"x86\")]\n\/\/\/ 64 bit division and rem on 32 bit arch\npub extern \"C\" fn __udivremi3(mut a: u64, mut b: u64) -> (u64, u64) {\n let mut res = 0;\n let mut hig = a >> 32; \/\/ The first 32 bits of a\n let mut d = 1;\n\n if hig >= b {\n hig \/= b;\n res = hig << 32;\n a -= (hig * b) << 32;\n }\n\n while b > 0 && b < a {\n b *= 2;\n d *= 2;\n }\n\n loop {\n if a >= b {\n a -= b;\n res += d;\n }\n b >>= 1;\n d >>= 1;\n\n if d == 0 {\n break;\n }\n }\n\n (res, a)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example code<commit_after>extern crate secret;\n\nuse secret::SecretService;\n\nfn main() {\n let secret_service = SecretService::get().ok().unwrap();\n println!(\"Session algorithms: {}\", secret_service.get_session_algorithms().unwrap());\n println!(\"Number of collections: {}\\n\", secret_service.get_collections().unwrap().len());\n for secret_collection in secret_service.get_collections().unwrap() {\n println!(\"Label for collection: {}\\n\", secret_collection.get_label());\n if secret_collection.get_locked() {\n println!(\"Collection is locked\");\n }\n let all_items = match secret_collection.get_items(){\n Some(items) => items,\n None => continue\n };\n\n for secret_item in all_items {\n println!(\"Label for item: {}\", secret_item.get_label());\n secret_item.load_secret();\n let secret_value = secret_item.get_secret().unwrap();\n println!(\"ContentType for item: {}\", secret_value.get_content_type());\n println!(\"SecretValue for item: {}\\n\", secret_value.get().unwrap());\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>refactor(src\/main.rs) Proper exit status.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>tweak limits<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Log only emojistats-related messages; enable debug logging<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove need to hold enter<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Create main.rs<commit_after>\n\npub mod card;\npub mod card_points;\n\n#[allow(dead_code)]\nfn main() {\n\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(collections, core, exit_status, io, libc, old_io, old_path, os, std_misc)]\n\n\/\/ Other platforms than macos don't need std_misc but you can't\n\/\/ use #[cfg] on features.\n#![allow(unused_features)]\n\nextern crate ansi_term;\nextern crate datetime;\nextern crate getopts;\nextern crate locale;\nextern crate natord;\nextern crate number_prefix;\nextern crate pad;\nextern crate users;\n\n#[cfg(feature=\"git\")]\nextern crate git2;\n\nuse std::env;\nuse std::old_io::{fs, FileType};\nuse std::os::num_cpus;\nuse std::sync::mpsc::{channel, sync_channel};\nuse std::thread;\n\nuse dir::Dir;\nuse file::File;\nuse options::{Options, View};\nuse output::lines_view;\n\npub mod column;\npub mod dir;\npub mod file;\npub mod filetype;\npub mod options;\npub mod output;\npub mod term;\npub mod xattr;\n\n#[cfg(not(test))]\nstruct Exa<'a> {\n count: usize,\n options: Options,\n dirs: Vec<Path>,\n files: Vec<File<'a>>,\n}\n\n#[cfg(not(test))]\nimpl<'a> Exa<'a> {\n fn new(options: Options) -> Exa<'a> {\n Exa {\n count: 0,\n options: options,\n dirs: Vec::new(),\n files: Vec::new(),\n }\n }\n\n fn load(&mut self, files: &[String]) {\n \/\/ Separate the user-supplied paths into directories and files.\n \/\/ Files are shown first, and then each directory is expanded\n \/\/ and listed second.\n\n let is_tree = self.options.dir_action.is_tree();\n let total_files = files.len();\n\n \/\/ Denotes the maxinum number of concurrent threads\n let (thread_capacity_tx, thread_capacity_rs) = sync_channel(8 * num_cpus());\n\n \/\/ Communication between consumer thread and producer threads\n enum StatResult<'a> {\n File(File<'a>),\n Path(Path),\n Error\n }\n let (results_tx, results_rx) = channel();\n\n \/\/ Spawn consumer thread\n let _consumer = thread::scoped(move || {\n for _ in 0..total_files {\n\n \/\/ Make room for more producer threads\n let _ = thread_capacity_rs.recv();\n\n \/\/ Receive a producer's result\n match results_rx.recv() {\n Ok(result) => match result {\n StatResult::File(file) => self.files.push(file),\n StatResult::Path(path) => self.dirs.push(path),\n StatResult::Error => ()\n },\n Err(_) => unreachable!()\n }\n self.count += 1;\n }\n });\n\n for file in files.iter() {\n let file = file.clone();\n let results_tx = results_tx.clone();\n\n \/\/ Block until there is room for another thread\n let _ = thread_capacity_tx.send(());\n\n \/\/ Spawn producer thread\n thread::spawn(move || {\n let path = Path::new(file.clone());\n match fs::stat(&path) {\n Ok(stat) => {\n if stat.kind == FileType::Directory {\n if is_tree {\n let _ = results_tx.send(StatResult::File(File::with_stat(stat, &path, None, true)));\n }\n else {\n let _ = results_tx.send(StatResult::Path(path));\n }\n }\n else {\n let _ = results_tx.send(StatResult::File(File::with_stat(stat, &path, None, false)));\n }\n }\n Err(e) => {\n println!(\"{}: {}\", file, e);\n let _ = results_tx.send(StatResult::Error);\n }\n }\n });\n }\n\n }\n\n fn print_files(&self) {\n if !self.files.is_empty() {\n self.print(None, &self.files[..]);\n }\n }\n\n fn print_dirs(&mut self) {\n let mut first = self.files.is_empty();\n\n \/\/ Directories are put on a stack rather than just being iterated through,\n \/\/ as the vector can change as more directories are added.\n loop {\n let dir_path = match self.dirs.pop() {\n None => break,\n Some(f) => f,\n };\n\n \/\/ Put a gap between directories, or between the list of files and the\n \/\/ first directory.\n if first {\n first = false;\n }\n else {\n print!(\"\\n\");\n }\n\n match Dir::readdir(&dir_path) {\n Ok(ref dir) => {\n let mut files = dir.files(false);\n self.options.transform_files(&mut files);\n\n \/\/ When recursing, add any directories to the dirs stack\n \/\/ backwards: the *last* element of the stack is used each\n \/\/ time, so by inserting them backwards, they get displayed in\n \/\/ the correct sort order.\n if let Some(recurse_opts) = self.options.dir_action.recurse_options() {\n let depth = dir_path.components().filter(|&c| c != b\".\").count() + 1;\n if !recurse_opts.tree && !recurse_opts.is_too_deep(depth) {\n for dir in files.iter().filter(|f| f.stat.kind == FileType::Directory).rev() {\n self.dirs.push(dir.path.clone());\n }\n }\n }\n\n if self.count > 1 {\n println!(\"{}:\", dir_path.display());\n }\n self.count += 1;\n\n self.print(Some(dir), &files[..]);\n }\n Err(e) => {\n println!(\"{}: {}\", dir_path.display(), e);\n return;\n }\n };\n }\n }\n\n fn print(&self, dir: Option<&Dir>, files: &[File]) {\n match self.options.view {\n View::Grid(g) => g.view(files),\n View::Details(d) => d.view(dir, files),\n View::Lines => lines_view(files),\n }\n }\n}\n\n#[cfg(not(test))]\nfn main() {\n let args: Vec<String> = env::args().collect();\n\n match Options::getopts(args.tail()) {\n Ok((options, paths)) => {\n let mut exa = Exa::new(options);\n exa.load(&paths);\n exa.print_files();\n exa.print_dirs();\n },\n Err(e) => {\n println!(\"{}\", e);\n env::set_exit_status(e.error_code());\n },\n };\n}\n<commit_msg>clean up consumer to make code more readable<commit_after>#![feature(collections, core, exit_status, io, libc, old_io, old_path, os, std_misc)]\n\n\/\/ Other platforms than macos don't need std_misc but you can't\n\/\/ use #[cfg] on features.\n#![allow(unused_features)]\n\nextern crate ansi_term;\nextern crate datetime;\nextern crate getopts;\nextern crate locale;\nextern crate natord;\nextern crate number_prefix;\nextern crate pad;\nextern crate users;\n\n#[cfg(feature=\"git\")]\nextern crate git2;\n\nuse std::env;\nuse std::old_io::{fs, FileType};\nuse std::os::num_cpus;\nuse std::sync::mpsc::{channel, sync_channel};\nuse std::thread;\n\nuse dir::Dir;\nuse file::File;\nuse options::{Options, View};\nuse output::lines_view;\n\npub mod column;\npub mod dir;\npub mod file;\npub mod filetype;\npub mod options;\npub mod output;\npub mod term;\npub mod xattr;\n\n#[cfg(not(test))]\nstruct Exa<'a> {\n count: usize,\n options: Options,\n dirs: Vec<Path>,\n files: Vec<File<'a>>,\n}\n\n#[cfg(not(test))]\nimpl<'a> Exa<'a> {\n fn new(options: Options) -> Exa<'a> {\n Exa {\n count: 0,\n options: options,\n dirs: Vec::new(),\n files: Vec::new(),\n }\n }\n\n fn load(&mut self, files: &[String]) {\n \/\/ Separate the user-supplied paths into directories and files.\n \/\/ Files are shown first, and then each directory is expanded\n \/\/ and listed second.\n\n let is_tree = self.options.dir_action.is_tree();\n let total_files = files.len();\n\n \/\/ Denotes the maxinum number of concurrent threads\n let (thread_capacity_tx, thread_capacity_rs) = sync_channel(8 * num_cpus());\n\n \/\/ Communication between consumer thread and producer threads\n enum StatResult<'a> {\n File(File<'a>),\n Path(Path),\n Error\n }\n let (results_tx, results_rx) = channel();\n\n \/\/ Spawn consumer thread\n let _consumer = thread::scoped(move || {\n for _ in 0..total_files {\n\n \/\/ Make room for more producer threads\n let _ = thread_capacity_rs.recv();\n\n \/\/ Receive a producer's result\n match results_rx.recv() {\n Ok(result) => match result {\n StatResult::File(file) => self.files.push(file),\n StatResult::Path(path) => self.dirs.push(path),\n StatResult::Error => ()\n },\n Err(_) => unreachable!()\n }\n self.count += 1;\n }\n });\n\n for file in files.iter() {\n let file = file.clone();\n let results_tx = results_tx.clone();\n\n \/\/ Block until there is room for another thread\n let _ = thread_capacity_tx.send(());\n\n \/\/ Spawn producer thread\n thread::spawn(move || {\n let path = Path::new(file.clone());\n let _ = results_tx.send(match fs::stat(&path) {\n Ok(stat) => {\n if stat.kind != FileType::Directory {\n StatResult::File(File::with_stat(stat, &path, None, false))\n }\n else if is_tree {\n StatResult::File(File::with_stat(stat, &path, None, true))\n }\n else {\n StatResult::Path(path)\n }\n }\n Err(e) => {\n println!(\"{}: {}\", file, e);\n StatResult::Error\n }\n });\n });\n }\n\n }\n\n fn print_files(&self) {\n if !self.files.is_empty() {\n self.print(None, &self.files[..]);\n }\n }\n\n fn print_dirs(&mut self) {\n let mut first = self.files.is_empty();\n\n \/\/ Directories are put on a stack rather than just being iterated through,\n \/\/ as the vector can change as more directories are added.\n loop {\n let dir_path = match self.dirs.pop() {\n None => break,\n Some(f) => f,\n };\n\n \/\/ Put a gap between directories, or between the list of files and the\n \/\/ first directory.\n if first {\n first = false;\n }\n else {\n print!(\"\\n\");\n }\n\n match Dir::readdir(&dir_path) {\n Ok(ref dir) => {\n let mut files = dir.files(false);\n self.options.transform_files(&mut files);\n\n \/\/ When recursing, add any directories to the dirs stack\n \/\/ backwards: the *last* element of the stack is used each\n \/\/ time, so by inserting them backwards, they get displayed in\n \/\/ the correct sort order.\n if let Some(recurse_opts) = self.options.dir_action.recurse_options() {\n let depth = dir_path.components().filter(|&c| c != b\".\").count() + 1;\n if !recurse_opts.tree && !recurse_opts.is_too_deep(depth) {\n for dir in files.iter().filter(|f| f.stat.kind == FileType::Directory).rev() {\n self.dirs.push(dir.path.clone());\n }\n }\n }\n\n if self.count > 1 {\n println!(\"{}:\", dir_path.display());\n }\n self.count += 1;\n\n self.print(Some(dir), &files[..]);\n }\n Err(e) => {\n println!(\"{}: {}\", dir_path.display(), e);\n return;\n }\n };\n }\n }\n\n fn print(&self, dir: Option<&Dir>, files: &[File]) {\n match self.options.view {\n View::Grid(g) => g.view(files),\n View::Details(d) => d.view(dir, files),\n View::Lines => lines_view(files),\n }\n }\n}\n\n#[cfg(not(test))]\nfn main() {\n let args: Vec<String> = env::args().collect();\n\n match Options::getopts(args.tail()) {\n Ok((options, paths)) => {\n let mut exa = Exa::new(options);\n exa.load(&paths);\n exa.print_files();\n exa.print_dirs();\n },\n Err(e) => {\n println!(\"{}\", e);\n env::set_exit_status(e.error_code());\n },\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>#4 - (Most) panics from exponentiation overflowing have been presented.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Change sum function to take an interator instead of a vector<commit_after><|endoftext|>"} {"text":"<commit_before> use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') | (Normal, ' ') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') | (Normal, '\\n') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 'Z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset >= 0 {\n clipboard.push(editor.cur());\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'Y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = editor.cur().to_string() + clipboard;\n editor.left();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.right();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<commit_msg>Add more aliases<commit_after> use redox::*;\n\n\/\/ TODO: Structure using loops\n\/\/ TODO: Make capital commands doing the reverse of the command\n\/\/ Y: Yank before the cursor\n\/\/ D: Delete before the cursor.\n\nuse super::Mode;\nuse super::Mode::*;\nuse super::Editor;\n\n\/\/ TODO: Move vars to `Editor`\npub fn exec(editor: &mut Editor, mode: &mut Mode, multiplier: &mut Option<u32>, last_change: &mut String, key_event: KeyEvent, window: &mut Window, swap: &mut usize, period: &mut String, is_recording: &mut bool, clipboard: &mut String) {\n match (*mode, key_event.scancode) {\n (Insert, K_ESC) => {\n *mode = Normal;\n },\n (Insert, K_BKSP) => editor.backspace(window),\n (Insert, K_DEL) => editor.delete(window),\n (_, K_F5) => editor.reload(window),\n (_, K_F6) => editor.save(window),\n (_, K_HOME) => editor.offset = 0,\n (_, K_UP) => editor.up(),\n (_, K_LEFT) => editor.left(),\n (_, K_RIGHT) => editor.right(),\n (_, K_END) => editor.offset = editor.string.len(),\n (_, K_DOWN) => editor.down(),\n (m, _) => {\n let (no_mult, mut times) = match *multiplier {\n Some(n) => (false, n),\n None => (true, 1),\n };\n let mut is_none = false;\n\n match (*mode, key_event.character) {\n (Normal, '0') if !no_mult => times *= 10,\n\n (Normal, '1') if no_mult => times = 1,\n (Normal, '1') => times = times * 10 + 1,\n\n (Normal, '2') if no_mult => times = 2,\n (Normal, '2') => times = times * 10 + 2,\n\n (Normal, '3') if no_mult => times = 3,\n (Normal, '3') => times = times * 10 + 3,\n\n (Normal, '4') if no_mult => times = 4,\n (Normal, '4') => times = times * 10 + 4,\n\n (Normal, '5') if no_mult => times = 5,\n (Normal, '5') => times = times * 10 + 5,\n\n (Normal, '6') if no_mult => times = 6,\n (Normal, '6') => times = times * 10 + 6,\n\n (Normal, '7') if no_mult => times = 7,\n (Normal, '7') => times = times * 10 + 7,\n\n (Normal, '8') if no_mult => times = 8,\n (Normal, '8') => times = times * 10 + 8,\n\n (Normal, '9') if no_mult => times = 9,\n (Normal, '9') => times = times * 10 + 9,\n (_, _) => {\n if *is_recording {\n if key_event.character == ',' {\n *is_recording = false;\n } else {\n period.push(key_event.character);\n }\n } else {\n for _ in 0 .. times {\n match (m, key_event.character) {\n (Normal, 'i') => {\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'h') | (Normal, ' ') => editor.left(),\n (Normal, 'l') => editor.right(),\n (Normal, 'k') => editor.up(),\n (Normal, 'j') | (Normal, '\\n') => editor.down(),\n (Normal, 'K') => {\n for _ in 1..15 {\n editor.up();\n }\n },\n (Normal, 'J') => {\n for _ in 1..15 {\n editor.down();\n }\n },\n (Normal, 'g') => editor.offset = 0,\n (Normal, 'G') => editor.offset = editor.string.len(),\n (Normal, 'a') => {\n editor.right();\n *mode = Insert;\n *last_change = editor.string.clone();\n },\n (Normal, 'x') => editor.delete(window),\n (Normal, 'X') => editor.backspace(window),\n (Normal, 'u') => {\n editor.offset = 0;\n ::core::mem::swap(last_change, &mut editor.string);\n },\n (Normal, 'c') => {\n ::core::mem::swap(&mut editor.offset, swap);\n },\n (Normal, 'z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = clipboard.clone().to_string() + &editor.cur().to_string();\n editor.delete(window);\n }\n },\n (Normal, 'Z') => {\n *clipboard = String::new();\n while editor.cur() != '\\n' &&\n editor.offset >= 0 {\n clipboard.push(editor.cur());\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 's') => {\n editor.delete(window);\n *mode = Insert;\n },\n (Normal, 'o') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n editor.insert('\\n', window);\n *mode = Insert;\n },\n (Normal, 'O') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.insert('\\n', window);\n editor.left();\n *mode = Insert;\n },\n (Normal, '^') | (Normal, 'H') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 0 {\n editor.left();\n }\n editor.right();\n while (editor.cur() == ' ' ||\n editor.cur() == '\\t') &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n clipboard.push(editor.cur());\n editor.right();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.left();\n }\n },\n (Normal, 'Y') => {\n *clipboard = String::new();\n let mut mov = 1;\n while editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n *clipboard = editor.cur().to_string() + clipboard;\n editor.left();\n mov += 1;\n }\n\n for _ in 1..mov {\n editor.right();\n }\n },\n (Normal, 'p') => {\n for c in clipboard.chars() {\n editor.insert(c, window);\n }\n },\n (Normal, '$') | (Normal, 'L') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, '0') => {\n editor.left();\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.left();\n }\n editor.right();\n },\n (Normal, 'd') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset < editor.string.len() {\n editor.delete(window);\n }\n },\n (Normal, 'D') => {\n while editor.cur() != '\\n' &&\n editor.cur() != '\\0' &&\n editor.offset >= 1 {\n editor.backspace(window);\n editor.left();\n }\n editor.right();\n },\n (Normal, 'w') => {\n editor.save(window);\n },\n (Normal, 'e') => {\n editor.right();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'b') => {\n editor.left();\n while editor.cur() != '.' &&\n editor.cur() != '{' &&\n editor.cur() != ',' &&\n editor.cur() != ' ' &&\n editor.cur() != '}' &&\n editor.cur() != '(' &&\n editor.cur() != ')' &&\n editor.cur() != '[' &&\n editor.cur() != ']' &&\n editor.cur() != ';' &&\n editor.cur() != '\"' &&\n editor.cur() != '\\'' &&\n editor.cur() != '\\n' &&\n editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, 'E') => {\n editor.right();\n while editor.cur() != ' ' && editor.offset < editor.string.len() {\n editor.right();\n }\n },\n (Normal, 'B') => {\n editor.left();\n while editor.cur() != ' ' && editor.offset >= 1 {\n editor.left();\n }\n },\n (Normal, ',') => {\n *is_recording = true;\n *period = String::new();\n },\n (Normal, '%') => {\n match editor.cur() {\n '(' | '[' | '{' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.right();\n i += match editor.cur() {\n '(' | '[' | '{' => 1,\n ')' | ']' | '}' => -1,\n _ => 0,\n };\n }\n },\n ')' | ']' | '}' => {\n let mut i = 1;\n while i != 0 &&\n editor.offset < editor.string.len() {\n editor.left();\n i += match editor.cur() {\n '(' | '[' | '{' => -1,\n ')' | ']' | '}' => 1,\n _ => 0,\n };\n }\n },\n _ => {},\n\n }\n },\n (Normal, '!') => {\n for c in period.clone().chars() {\n exec(editor, mode, multiplier, last_change, KeyEvent {\n character: c,\n scancode: 0,\n pressed: true,\n }, window, swap, period, is_recording, clipboard);\n }\n },\n (Insert, '\\0') => (),\n (Insert, c) => {\n editor.insert(c, window);\n },\n _ => {},\n }\n }\n }\n is_none = true;\n }\n }\n\n if !is_none {\n *multiplier = Some(times);\n } else {\n *multiplier = None;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implemented collision routines but I feel like there's quite a bit of borrow checker fight ahead<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Database receives monitor clients<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>11 - unused<commit_after>fn used_function() {}\n\n\/\/ `#[allow(dead_code)]` is an attribute that disables the `dead_code` lint\n#[allow(dead_code)]\nfn unused_function() {}\n\nfn noisy_unused_function() {}\n\/\/ FIXME ^ Add an attribute to suppress the warning\n\nfn main() {\n used_function();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>error: 'request.0.id' does not live long enough<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Initial attempt<commit_after>extern crate classreader;\n\nuse classreader::*;\nuse std::env;\n\nfn main() {\n let file_name = env::args().nth(1).expect(\"usage: javamoose <class file>\");\n println!(\"Loading class file {}\", file_name);\n let class = ClassReader::new_from_path(&file_name).unwrap();\n\n assert_eq!(0xCAFEBABE, class.magic);\n println!(\"class: {}\", get_class_name(&class));\n println!(\"method count: {}\", class.methods.len());\n println!(\"field count: {}\", class.fields.len());\n println!(\"code size: {}\", get_total_code_size(&class));\n\n\n \/\/ println!(\"{:?}\", class);\n\n println!(\"Done!\");\n}\n\nfn get_const(class: &Class, i: usize) -> &ConstantPoolInfo {\n &class.constant_pool[i - 1]\n}\n\nfn get_string(class: &Class, index: usize) -> String {\n match get_const(class, index) {\n &ConstantPoolInfo::Utf8(ref s) => s.clone(),\n _ => \"?\".to_string()\n }\n}\n\n\nfn get_class_name(class: &Class) -> String {\n match get_const(class, class.this_class as usize) {\n &ConstantPoolInfo::Class(index) => get_string(class, index as usize),\n _ => \"?\".to_string()\n }\n}\n\nfn get_total_code_size(class: &Class) -> usize {\n let mut sum: usize = 0;\n for m in &class.methods {\n for a in &m.attributes {\n sum = match a {\n &Attribute::Code{ref code, ..} => sum + &code.len(),\n _ => sum\n };\n }\n }\n sum\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added initial Arduino code to turn an LED on.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add math module<commit_after>use std;\n\npub use std::f32::consts::PI as PI;\n\npub struct Mat4(pub [[f32; 4]; 4]);\n\nimpl std::ops::Deref for Mat4 {\n type Target = [[f32; 4]; 4];\n\n fn deref(&self) -> &[[f32; 4]; 4] {\n &self.0\n }\n}\n\nimpl std::ops::Mul for Mat4 {\n type Output = Self;\n\n fn mul(self, rhs: Self) -> Self {\n let mut result = [[0.0; 4]; 4];\n for i in 0..4 {\n for j in 0..4 {\n for k in 0..4 {\n result[i][j] += self[k][j] * rhs[i][k];\n }\n }\n }\n Mat4(result)\n }\n}\n\nimpl Mat4 {\n pub fn identity() -> Self {\n Mat4([[1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 1.0]])\n }\n\n pub fn rotation_x(theta: f32) -> Self {\n let s = theta.sin();\n let c = theta.cos();\n Mat4([[1.0, 0.0, 0.0, 0.0],\n [0.0, c, s, 0.0],\n [0.0, -s, c, 0.0],\n [0.0, 0.0, 0.0, 1.0]])\n }\n\n pub fn rotation_y(theta: f32) -> Self {\n let s = theta.sin();\n let c = theta.cos();\n Mat4([[ c, 0.0, s, 0.0],\n [0.0, 1.0, 0.0, 0.0],\n [ -s, 0.0, c, 0.0],\n [0.0, 0.0, 0.0, 1.0]])\n }\n\n pub fn rotation_z(theta: f32) -> Self {\n let s = theta.sin();\n let c = theta.cos();\n Mat4([[ c, s, 0.0, 0.0],\n [ -s, c, 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 1.0]])\n }\n\n pub fn translation(x: f32, y: f32, z: f32) -> Self {\n Mat4([[1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [ x, y, z, 1.0]])\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added destructure enum example<commit_after>\/\/ Must derive `Debug` so `println!` can be used.\n\/\/ `allow` required to silence warnings because only\n\/\/ one variant is used.\n#[allow(dead_code)]\n#[derive(Debug)]\nenum Color {\n \/\/ These 3 are specified solely by their name.\n Red,\n Blue,\n Green,\n \/\/ This requires 3 `i32`s and a name.\n RGB(i32, i32, i32),\n}\n\nfn main() {\n let color = Color::RGB(122, 17, 40);\n \/\/ TODO ^ Try different variants for `color`\n\n println!(\"What color is it?\");\n \/\/ An `enum` can be destructured using a `match`.\n match color {\n Color::Red => println!(\"The color is Red!\"),\n Color::Blue => println!(\"The color is Blue!\"),\n Color::Green => println!(\"The color is Green!\"),\n Color::RGB(r, g, b) => {\n println!(\"Red: {:?}, green: {:?}, and blue: {:?}!:\", r, g, b);\n },\n \/\/ Don't need another arm because all variants have been examined\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove bounds checks in sram<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Started assembler. Reads file until blank line reached. Input validation.<commit_after>extern mod extra;\n\nuse std::path::Path;\nuse std::os;\nuse std::io;\n\nuse extra::fileinput::*;\n\nfn main() {\n\tlet args : ~[~str] = os::args();\n\t\n\tif args.len() == 2 {\n\t\tlet source = FileInput::from_args();\n\n\t\tif Path(args[1]).exists() {\n\t\t\tlet mut i = 0;\n\t\t\tdo source.each_line |line| {\n\t\t\t\tif !line.is_empty() {\n\t\t\t\t\tio::println(fmt!(\"%d: %s\", i, line));\n\t\t\t\t\ti += 1;\n\t\t\t\t\ttrue\n\t\t\t\t} else {\n\t\t\t\t\tfalse\n\t\t\t\t}\n\t\t\t};\n\t\t} else {\n\t\t\tprintln(\"error: file does not exist.\");\n\t\t}\n\t} else {\n\t\tprintln(\"usage: uasm <input file>\");\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>MEM_PROVIDER allocation<commit_after><|endoftext|>"} {"text":"<commit_before>use toml::Value;\n\nuse libimagerror::into::IntoError;\n\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::result::Result;\n\nuse vcs::git::action::StoreAction;\n\nuse git2::Repository;\n\n\/\/\/ Check the configuration whether we should commit interactively\npub fn commit_interactive(config: &Value) -> bool {\n match config.lookup(\"commit.interactive\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_update.commit.interactive' must be a Boolean.\");\n warn!(\"Defaulting to commit.interactive = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_update.commit.interactive'\");\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Check the configuration whether we should commit with the editor\nfn commit_with_editor(config: &Value) -> bool {\n match config.lookup(\"commit.interactive_editor\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_update.commit.interactive_editor' must be a Boolean.\");\n warn!(\"Defaulting to commit.interactive_editor = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_update.commit.interactive_editor'\");\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Get the commit default message\nfn commit_default_msg<'a>(config: &'a Value) -> &'a str {\n match config.lookup(\"commit.message\") {\n Some(&Value::String(ref b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_update.commit.message' must be a String.\");\n warn!(\"Defaulting to commit.message = 'Update'\");\n \"Update\"\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_update.commit.message'\");\n warn!(\"Defaulting to commit.message = 'Update'\");\n \"Update\"\n }\n }\n}\n\n\/\/\/ Get the commit template\n\/\/\/\n\/\/\/ TODO: Implement good template string\nfn commit_template() -> &'static str {\n \"Commit template\"\n}\n\n\/\/\/ Generate a commit message\n\/\/\/\n\/\/\/ Uses the functions `commit_interactive()` and `commit_with_editor()`\n\/\/\/ or reads one from the commandline or uses the `commit_default_msg()` string to create a commit\n\/\/\/ message.\npub fn commit_message(repo: &Repository, config: &Value, action: StoreAction) -> Result<String> {\n use libimaginteraction::ask::ask_string;\n use libimagutil::edit::edit_in_tmpfile_with_command;\n use std::process::Command;\n\n if commit_interactive(config) {\n if commit_with_editor(config) {\n repo.config()\n .map_err_into(GHEK::GitConfigFetchError)\n .and_then(|c| c.get_string(\"core.editor\").map_err_into(GHEK::GitConfigEditorFetchError))\n .map_err_into(GHEK::ConfigError)\n .map(Command::new)\n .and_then(|cmd| {\n let mut s = String::from(commit_template());\n edit_in_tmpfile_with_command(cmd, &mut s).map(|_| s)\n .map_err_into(GHEK::EditorError)\n })\n } else {\n Ok(ask_string(\"Commit Message\", None, false, false, None, \"> \"))\n }\n } else {\n Ok(String::from(commit_default_msg(config)))\n }\n}\n\n\/\/\/ Check whether the hook should abort if the repository cannot be initialized\npub fn abort_on_repo_init_err(cfg: Option<&Value>) -> bool {\n get_bool_cfg(cfg, \"abort_on_repo_init_failure\", true, true)\n}\n\n\/\/\/ Get the branch which must be checked out before running the hook (if any).\n\/\/\/\n\/\/\/ If there is no configuration for this, this is `Ok(None)`, otherwise we try to find the\n\/\/\/ configuration `String`.\npub fn ensure_branch(cfg: Option<&Value>) -> Result<Option<String>> {\n match cfg {\n Some(cfg) => {\n match cfg.lookup(\"ensure_branch\") {\n Some(&Value::String(ref s)) => Ok(Some(s.clone())),\n Some(_) => {\n warn!(\"Configuration error, 'ensure_branch' must be a String.\");\n Err(GHEK::ConfigTypeError.into_error())\n .map_err_into(GHEK::ConfigTypeError)\n },\n None => {\n debug!(\"No key `ensure_branch'\");\n Ok(None)\n },\n }\n },\n None => Ok(None),\n }\n}\n\n\/\/\/ Check whether we should check out a branch before committing.\npub fn do_checkout_ensure_branch(cfg: Option<&Value>) -> bool {\n get_bool_cfg(cfg, \"try_checkout_ensure_branch\", true, true)\n}\n\n\/\/\/ Helper to get a boolean value from the configuration.\nfn get_bool_cfg(cfg: Option<&Value>, name: &str, on_fail: bool, on_unavail: bool) -> bool {\n cfg.map(|cfg| {\n match cfg.lookup(name) {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, '{}' must be a Boolean (true|false).\", name);\n warn!(\"Assuming '{}' now.\", on_fail);\n on_fail\n },\n None => {\n debug!(\"No key '{}' - Assuming '{}'\", name, on_unavail);\n on_unavail\n },\n }\n })\n .unwrap_or(on_unavail)\n}\n\n<commit_msg>Fix warn!() output to be action sensitive<commit_after>use toml::Value;\n\nuse libimagerror::into::IntoError;\n\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::result::Result;\n\nuse vcs::git::action::StoreAction;\n\nuse git2::Repository;\n\n\/\/\/ Check the configuration whether we should commit interactively\npub fn commit_interactive(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Check the configuration whether we should commit with the editor\nfn commit_with_editor(config: &Value, action: &StoreAction) -> bool {\n match config.lookup(\"commit.interactive_editor\") {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.interactive_editor' must be a Boolean.\",\n action);\n warn!(\"Defaulting to commit.interactive_editor = false\");\n false\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.interactive_editor'\", action);\n warn!(\"Defaulting to false\");\n false\n }\n }\n}\n\n\/\/\/ Get the commit default message\nfn commit_default_msg<'a>(config: &'a Value, action: &'a StoreAction) -> &'a str {\n match config.lookup(\"commit.message\") {\n Some(&Value::String(ref b)) => b,\n Some(_) => {\n warn!(\"Configuration error, 'store.hooks.stdhook_git_{}.commit.message' must be a String.\",\n action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n None => {\n warn!(\"Unavailable configuration for\");\n warn!(\"\\t'store.hooks.stdhook_git_{}.commit.message'\", action);\n warn!(\"Defaulting to commit.message = '{}'\", action.as_commit_message());\n action.as_commit_message()\n }\n }\n}\n\n\/\/\/ Get the commit template\n\/\/\/\n\/\/\/ TODO: Implement good template string\nfn commit_template() -> &'static str {\n \"Commit template\"\n}\n\n\/\/\/ Generate a commit message\n\/\/\/\n\/\/\/ Uses the functions `commit_interactive()` and `commit_with_editor()`\n\/\/\/ or reads one from the commandline or uses the `commit_default_msg()` string to create a commit\n\/\/\/ message.\npub fn commit_message(repo: &Repository, config: &Value, action: StoreAction) -> Result<String> {\n use libimaginteraction::ask::ask_string;\n use libimagutil::edit::edit_in_tmpfile_with_command;\n use std::process::Command;\n\n if commit_interactive(config, &action) {\n if commit_with_editor(config, &action) {\n repo.config()\n .map_err_into(GHEK::GitConfigFetchError)\n .and_then(|c| c.get_string(\"core.editor\").map_err_into(GHEK::GitConfigEditorFetchError))\n .map_err_into(GHEK::ConfigError)\n .map(Command::new)\n .and_then(|cmd| {\n let mut s = String::from(commit_template());\n edit_in_tmpfile_with_command(cmd, &mut s).map(|_| s)\n .map_err_into(GHEK::EditorError)\n })\n } else {\n Ok(ask_string(\"Commit Message\", None, false, false, None, \"> \"))\n }\n } else {\n Ok(String::from(commit_default_msg(config, &action)))\n }\n}\n\n\/\/\/ Check whether the hook should abort if the repository cannot be initialized\npub fn abort_on_repo_init_err(cfg: Option<&Value>) -> bool {\n get_bool_cfg(cfg, \"abort_on_repo_init_failure\", true, true)\n}\n\n\/\/\/ Get the branch which must be checked out before running the hook (if any).\n\/\/\/\n\/\/\/ If there is no configuration for this, this is `Ok(None)`, otherwise we try to find the\n\/\/\/ configuration `String`.\npub fn ensure_branch(cfg: Option<&Value>) -> Result<Option<String>> {\n match cfg {\n Some(cfg) => {\n match cfg.lookup(\"ensure_branch\") {\n Some(&Value::String(ref s)) => Ok(Some(s.clone())),\n Some(_) => {\n warn!(\"Configuration error, 'ensure_branch' must be a String.\");\n Err(GHEK::ConfigTypeError.into_error())\n .map_err_into(GHEK::ConfigTypeError)\n },\n None => {\n debug!(\"No key `ensure_branch'\");\n Ok(None)\n },\n }\n },\n None => Ok(None),\n }\n}\n\n\/\/\/ Check whether we should check out a branch before committing.\npub fn do_checkout_ensure_branch(cfg: Option<&Value>) -> bool {\n get_bool_cfg(cfg, \"try_checkout_ensure_branch\", true, true)\n}\n\n\/\/\/ Helper to get a boolean value from the configuration.\nfn get_bool_cfg(cfg: Option<&Value>, name: &str, on_fail: bool, on_unavail: bool) -> bool {\n cfg.map(|cfg| {\n match cfg.lookup(name) {\n Some(&Value::Boolean(b)) => b,\n Some(_) => {\n warn!(\"Configuration error, '{}' must be a Boolean (true|false).\", name);\n warn!(\"Assuming '{}' now.\", on_fail);\n on_fail\n },\n None => {\n debug!(\"No key '{}' - Assuming '{}'\", name, on_unavail);\n on_unavail\n },\n }\n })\n .unwrap_or(on_unavail)\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added yarn example<commit_after>extern crate indicatif;\nextern crate rand;\n\nuse std::thread;\nuse std::time::{Instant, Duration};\nuse rand::Rng;\n\nuse indicatif::{ProgressBar, ProgressStyle, MultiProgress, HumanDuration, style};\n\n\nstatic PACKAGES: &'static [&'static str] = &[\n \"fs-events\",\n \"my-awesome-module\",\n \"emoji-speaker\",\n \"wrap-ansi\",\n \"stream-browserify\",\n \"acorn-dynamic-import\",\n];\n\nstatic COMMANDS: &'static [&'static str] = &[\n \"cmake .\",\n \"make\",\n \"make clean\",\n \"gcc foo.c -o foo\",\n \"gcc bar.c -o bar\",\n \".\/helper.sh rebuild-cache\",\n \"make all-clean\",\n \"make test\",\n];\n\n\npub fn main() {\n let mut rng = rand::thread_rng();\n let started = Instant::now();\n let spinner_style = ProgressStyle::default_spinner()\n .tick_chars(\"⠁⠂⠄⡀⢀⠠⠐⠈ \")\n .template(\"{prefix:.bold.dim} {spinner} {msg}\");\n let progress_style = ProgressStyle::default_spinner()\n .template(\"{prefix:.bold.dim} {msg}\\n{wide_bar} {pos}\/{len}\");\n\n println!(\"{} 🔍 Resolving packages...\", style(\"[1\/4]\").bold().dim());\n println!(\"{} 🚚 Fetching packages...\", style(\"[2\/4]\").bold().dim());\n\n let deps = 1232;\n let pb = ProgressBar::new(deps);\n pb.set_style(progress_style);\n pb.set_prefix(\"[3\/4]\");\n pb.set_message(\"🔗 Linking dependencies...\");\n\n for _ in 0..deps {\n pb.inc(1);\n thread::sleep(Duration::from_millis(3));\n }\n\n pb.finish_and_clear();\n println!(\"{} 🔗 Linking dependencies...\", style(\"[3\/4]\").bold().dim());\n\n println!(\"{} 📃 Building fresh packages...\", style(\"[4\/4]\").bold().dim());\n let m = MultiProgress::new();\n for i in 0..4 {\n let count = rng.gen_range(30, 80);\n let pb = m.add(ProgressBar::new(count));\n pb.set_style(spinner_style.clone());\n pb.set_prefix(&format!(\"[{}\/?]\", i + 1));\n let _ = thread::spawn(move || {\n let mut rng = rand::thread_rng();\n let pkg = rng.choose(PACKAGES).unwrap();\n for _ in 0..count {\n let cmd = rng.choose(COMMANDS).unwrap();\n pb.set_message(&format!(\"{}: {}\", pkg, cmd));\n pb.inc(1);\n thread::sleep(Duration::from_millis(rng.gen_range(25, 200)));\n }\n pb.finish_with_message(\"waiting...\");\n });\n }\n m.join_and_clear().unwrap();\n\n println!(\"✨ Done in {}\", HumanDuration(started.elapsed()));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a benchmark counting Pythagorean triples<commit_after>\/\/! How many Pythagorean triples exist less than or equal to a million?\n\/\/! i.e. a²+b²=c² and a,b,c ≤ 1000000\n\n#![feature(test)]\n\nextern crate num;\nextern crate rayon;\nextern crate test;\n\nuse num::Integer;\nuse rayon::par_iter::*;\nuse std::f64::INFINITY;\nuse std::ops::Add;\n\n\/\/\/ Use Euclid's formula to count Pythagorean triples\n\/\/\/\n\/\/\/ https:\/\/en.wikipedia.org\/wiki\/Pythagorean_triple#Generating_a_triple\n\/\/\/\n\/\/\/ For coprime integers m and n, with m > n and m-n is odd, then\n\/\/\/ a = m²-n², b = 2mn, c = m²+n²\n\/\/\/\n\/\/\/ This is a coprime triple. Multiplying by factors k covers all triples.\nfn par_euclid(m_weight: f64, n_weight: f64) -> u32 {\n (1u32 .. 1000).into_par_iter().weight(m_weight).map(|m| {\n (1 .. m).into_par_iter().weight(n_weight)\n .filter(|n| (m - n).is_odd() && m.gcd(n) == 1)\n .map(|n| 1000000 \/ (m*m + n*n))\n .sum()\n }).sum()\n}\n\n\/\/\/ Same as par_euclid, without using rayon.\nfn euclid() -> u32 {\n (1u32 .. 1000).map(|m| {\n (1 .. m)\n .filter(|n| (m - n).is_odd() && m.gcd(n) == 1)\n .map(|n| 1000000 \/ (m*m + n*n))\n .fold(0, Add::add)\n }).fold(0, Add::add)\n}\n\n#[bench]\n\/\/\/ Benchmark without rayon at all\nfn euclid_serial(b: &mut test::Bencher) {\n let count = euclid();\n b.iter(|| assert_eq!(euclid(), count))\n}\n\n#[bench]\n\/\/\/ Use zero weights to force it fully serialized.\nfn euclid_faux_serial(b: &mut test::Bencher) {\n rayon::initialize();\n let count = euclid();\n b.iter(|| assert_eq!(par_euclid(0.0, 0.0), count))\n}\n\n#[bench]\n\/\/\/ Use the default weights (1.0)\nfn euclid_default(b: &mut test::Bencher) {\n rayon::initialize();\n let count = euclid();\n b.iter(|| assert_eq!(par_euclid(1.0, 1.0), count))\n}\n\n#[bench]\n\/\/\/ Use infinite weight to force the outer loop parallelized.\nfn euclid_parallel_outer(b: &mut test::Bencher) {\n rayon::initialize();\n let count = euclid();\n b.iter(|| assert_eq!(par_euclid(INFINITY, 1.0), count))\n}\n\n#[bench]\n\/\/\/ Use infinite weights to force it fully parallelized.\nfn euclid_parallel_full(b: &mut test::Bencher) {\n rayon::initialize();\n let count = euclid();\n b.iter(|| assert_eq!(par_euclid(INFINITY, INFINITY), count))\n}\n<|endoftext|>"} {"text":"<commit_before>use std::io;\nuse std::slice;\nuse std::str;\nuse std::sync::Arc;\n\nuse httparse;\n\nuse io2::Parse;\n\npub struct Request {\n method: Slice,\n path: Slice,\n version: u8,\n \/\/ TODO: use a small vec to avoid this unconditional allocation\n headers: Vec<(Slice, Slice)>,\n data: Arc<Vec<u8>>,\n}\n\ntype Slice = (usize, usize);\n\npub struct RequestHeaders<'req> {\n headers: slice::Iter<'req, (Slice, Slice)>,\n req: &'req Request,\n}\n\nimpl Request {\n pub fn method(&self) -> &str {\n str::from_utf8(self.slice(&self.method)).unwrap()\n }\n\n pub fn path(&self) -> &str {\n str::from_utf8(self.slice(&self.path)).unwrap()\n }\n\n pub fn version(&self) -> u8 {\n self.version\n }\n\n pub fn headers(&self) -> RequestHeaders {\n RequestHeaders {\n headers: self.headers.iter(),\n req: self,\n }\n }\n\n fn slice(&self, s: &Slice) -> &[u8] {\n &self.data[s.0..s.1]\n }\n}\n\nimpl Parse for Request {\n type Parser = ();\n \/\/ FiXME: probably want a different error type\n type Error = io::Error;\n\n fn parse(_: &mut (),\n buf: &Arc<Vec<u8>>,\n offset: usize)\n -> Option<Result<(Request, usize), io::Error>> {\n let mut headers = [httparse::EMPTY_HEADER; 16];\n let mut r = httparse::Request::new(&mut headers);\n let status = match r.parse(&buf[offset..]) {\n Ok(status) => status,\n Err(e) => {\n return Some(Err(io::Error::new(io::ErrorKind::Other,\n format!(\"failed to parse http request: {:?}\", e))))\n }\n };\n let toslice = |a: &[u8]| {\n let start = a.as_ptr() as usize - buf.as_ptr() as usize;\n assert!(start < buf.len());\n (start, start + a.len())\n };\n match status {\n httparse::Status::Complete(amt) => {\n Some(Ok((Request {\n method: toslice(r.method.unwrap().as_bytes()),\n path: toslice(r.path.unwrap().as_bytes()),\n version: r.version.unwrap(),\n headers: r.headers\n .iter()\n .map(|h| (toslice(h.name.as_bytes()), toslice(h.value)))\n .collect(),\n data: buf.clone(),\n }, amt)))\n }\n httparse::Status::Partial => None\n }\n }\n}\n\nimpl<'req> Iterator for RequestHeaders<'req> {\n type Item = (&'req str, &'req [u8]);\n\n fn next(&mut self) -> Option<(&'req str, &'req [u8])> {\n self.headers.next().map(|&(ref a, ref b)| {\n let a = str::from_utf8(self.req.slice(a)).unwrap();\n let b = self.req.slice(b);\n (a, b)\n })\n }\n}\n<commit_msg>Add comment that HTTP request parsing should be fixed<commit_after>use std::io;\nuse std::slice;\nuse std::str;\nuse std::sync::Arc;\n\nuse httparse;\n\nuse io2::Parse;\n\npub struct Request {\n method: Slice,\n path: Slice,\n version: u8,\n \/\/ TODO: use a small vec to avoid this unconditional allocation\n headers: Vec<(Slice, Slice)>,\n data: Arc<Vec<u8>>,\n}\n\ntype Slice = (usize, usize);\n\npub struct RequestHeaders<'req> {\n headers: slice::Iter<'req, (Slice, Slice)>,\n req: &'req Request,\n}\n\nimpl Request {\n pub fn method(&self) -> &str {\n str::from_utf8(self.slice(&self.method)).unwrap()\n }\n\n pub fn path(&self) -> &str {\n str::from_utf8(self.slice(&self.path)).unwrap()\n }\n\n pub fn version(&self) -> u8 {\n self.version\n }\n\n pub fn headers(&self) -> RequestHeaders {\n RequestHeaders {\n headers: self.headers.iter(),\n req: self,\n }\n }\n\n fn slice(&self, s: &Slice) -> &[u8] {\n &self.data[s.0..s.1]\n }\n}\n\nimpl Parse for Request {\n type Parser = ();\n \/\/ FiXME: probably want a different error type\n type Error = io::Error;\n\n fn parse(_: &mut (),\n buf: &Arc<Vec<u8>>,\n offset: usize)\n -> Option<Result<(Request, usize), io::Error>> {\n \/\/ TODO: we should grow this headers array if parsing fails and asks for\n \/\/ more headers\n let mut headers = [httparse::EMPTY_HEADER; 16];\n let mut r = httparse::Request::new(&mut headers);\n let status = match r.parse(&buf[offset..]) {\n Ok(status) => status,\n Err(e) => {\n return Some(Err(io::Error::new(io::ErrorKind::Other,\n format!(\"failed to parse http request: {:?}\", e))))\n }\n };\n let toslice = |a: &[u8]| {\n let start = a.as_ptr() as usize - buf.as_ptr() as usize;\n assert!(start < buf.len());\n (start, start + a.len())\n };\n match status {\n httparse::Status::Complete(amt) => {\n Some(Ok((Request {\n method: toslice(r.method.unwrap().as_bytes()),\n path: toslice(r.path.unwrap().as_bytes()),\n version: r.version.unwrap(),\n headers: r.headers\n .iter()\n .map(|h| (toslice(h.name.as_bytes()), toslice(h.value)))\n .collect(),\n data: buf.clone(),\n }, amt)))\n }\n httparse::Status::Partial => None\n }\n }\n}\n\nimpl<'req> Iterator for RequestHeaders<'req> {\n type Item = (&'req str, &'req [u8]);\n\n fn next(&mut self) -> Option<(&'req str, &'req [u8])> {\n self.headers.next().map(|&(ref a, ref b)| {\n let a = str::from_utf8(self.req.slice(a)).unwrap();\n let b = self.req.slice(b);\n (a, b)\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test to check if inlining works for any operand<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z span_free_formats\n\n\/\/ Tests that MIR inliner works for any operand\n\nfn main() {\n println!(\"{}\", bar());\n}\n\n#[inline(always)]\nfn foo(x: i32, y: i32) -> bool {\n x == y\n}\n\nfn bar() -> bool {\n let f = foo;\n f(1, -1)\n}\n\n\/\/ END RUST SOURCE\n\/\/ START rustc.bar.Inline.after.mir\n\/\/ ...\n\/\/ bb0: {\n\/\/ ...\n\/\/ _0 = Eq(move _3, move _4);\n\/\/ ...\n\/\/ return;\n\/\/ }\n\/\/ ...\n\/\/ END rustc.bar.Inline.after.mir\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Keep a record of this paranoia-induced test of swapchain behaviour<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Create main.rs<commit_after>\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix Tests<commit_after><|endoftext|>"} {"text":"<commit_before>#[macro_use]\nextern crate glium;\n\nuse glium::Surface;\nuse glium::glutin;\n\nmod support;\n\n#[derive(Copy, Clone, Debug)]\nstruct PerInstance {\n pub id: u32,\n pub w_position: (f32, f32, f32),\n pub color: (f32, f32, f32),\n}\nimplement_vertex!(PerInstance, id, w_position, color);\n\nfn main() {\n use glium::DisplayBuild;\n\n \/\/ building the display, ie. the main object\n let display = glutin::WindowBuilder::new()\n .with_depth_buffer(24)\n .build_glium()\n .unwrap();\n\n \/\/ building the vertex and index buffers\n let vertex_buffer = support::load_wavefront(&display, include_bytes!(\"support\/teapot.obj\"));\n\n \/\/ the program\n let program = program!(&display,\n 140 => {\n vertex: \"\n #version 140\n\n uniform mat4 persp_matrix;\n uniform mat4 view_matrix;\n\n in uint id;\n in vec3 w_position;\n in vec3 color;\n in vec3 position;\n in vec3 normal;\n out vec3 v_normal;\n out vec3 v_color;\n\n void main() {\n v_normal = normal;\n v_color = color;\n gl_Position = persp_matrix * view_matrix * vec4(position * 0.005 + w_position, 1.0);\n }\n \",\n\n fragment: \"\n #version 140\n\n in vec3 v_normal;\n in vec3 v_color;\n out vec4 f_color;\n\n const vec3 LIGHT = vec3(-0.2, 0.8, 0.1);\n\n void main() {\n float lum = max(dot(normalize(v_normal), normalize(LIGHT)), 0.0);\n vec3 color = (0.3 + 0.7 * lum) * v_color;\n f_color = vec4(color, 1.0);\n }\n \",\n },\n ).unwrap();\n\n let picking_program = program!(&display,\n 140 => {\n vertex: \"\n #version 140\n\n uniform mat4 persp_matrix;\n uniform mat4 view_matrix;\n\n in uint id;\n in vec3 w_position;\n in vec3 color;\n in vec3 position;\n in vec3 normal;\n flat out uint v_id;\n\n void main() {\n v_id = id;\n gl_Position = persp_matrix * view_matrix * vec4(position * 0.005 + w_position, 1.0);\n }\n \",\n\n fragment: \"\n #version 140\n\n flat in uint v_id;\n out uint f_id;\n\n void main() {\n f_id = v_id;\n }\n \",\n },\n ).unwrap();\n\n \/\/\n let mut camera = support::camera::CameraState::new();\n camera.set_position((0.0, 0.0, 1.5));\n camera.set_direction((0.0, 0.0, 1.0));\n\n \/\/id's must be unique and != 0\n let mut per_instance = vec![\n PerInstance { id: 1, w_position: (-1.0, 0.0, 0.0), color: (1.0, 0.0, 0.0)},\n PerInstance { id: 2, w_position: ( 0.0, 0.0, 0.0), color: (1.0, 0.0, 0.0)},\n PerInstance { id: 3, w_position: ( 1.0, 0.0, 0.0), color: (1.0, 0.0, 0.0)},\n ];\n per_instance.sort_by(|a, b| a.id.cmp(&b.id));\n let original = per_instance.clone();\n\n let mut picking_attachments: Option<(glium::texture::UnsignedTexture2d, glium::framebuffer::DepthRenderBuffer)> = None;\n let picking_pbo: glium::texture::pixel_buffer::PixelBuffer<u32>\n = glium::texture::pixel_buffer::PixelBuffer::new_empty(&display, 1);\n\n\n let mut cursor_position: Option<(i32, i32)> = None;\n\n \/\/ the main loop\n support::start_loop(|| {\n camera.update();\n\n\n \/\/ determing which object has been picked at the previous frame\n let picked_object = {\n let data = picking_pbo.read().map(|d| d[0]).unwrap_or(0);\n if data != 0 {\n per_instance.binary_search_by(|x| x.id.cmp(&data)).ok()\n } else {\n None\n }\n };\n\n if let Some(index) = picked_object {\n per_instance[index as usize] = PerInstance {\n id: per_instance[index as usize].id,\n w_position: per_instance[index as usize].w_position,\n color: (0.0, 1.0, 0.0)\n };\n } else {\n per_instance = original.clone();\n }\n\n \/\/ building the uniforms\n let uniforms = uniform! {\n persp_matrix: camera.get_perspective(),\n view_matrix: camera.get_view(),\n };\n\n \/\/ draw parameters\n let params = glium::DrawParameters {\n depth: glium::Depth {\n test: glium::DepthTest::IfLess,\n write: true,\n .. Default::default()\n },\n .. Default::default()\n };\n\n let per_instance_buffer = glium::vertex::VertexBuffer::new(&display, &per_instance).unwrap();\n\n \/\/ drawing a frame\n let mut target = display.draw();\n target.clear_color_and_depth((0.0, 0.0, 0.0, 0.0), 1.0);\n\n \/\/update picking texture\n if picking_attachments.is_none() || (\n picking_attachments.as_ref().unwrap().0.get_width(),\n picking_attachments.as_ref().unwrap().0.get_height().unwrap()\n ) != target.get_dimensions() {\n let (width, height) = target.get_dimensions();\n picking_attachments = Some((\n glium::texture::UnsignedTexture2d::empty_with_format(\n &display,\n glium::texture::UncompressedUintFormat::U32,\n glium::texture::MipmapsOption::NoMipmap,\n width, height,\n ).unwrap(),\n glium::framebuffer::DepthRenderBuffer::new(\n &display,\n glium::texture::DepthFormat::F32,\n width, height,\n ).unwrap()\n ))\n }\n\n \/\/ drawing the models and pass the picking texture\n if let Some((ref picking_texture, ref depth_buffer)) = picking_attachments {\n \/\/clearing the picking texture\n picking_texture.main_level().first_layer().into_image(None).unwrap().raw_clear_buffer([0u32, 0, 0, 0]);\n\n let mut picking_target = glium::framebuffer::SimpleFrameBuffer::with_depth_buffer(&display, picking_texture, depth_buffer).unwrap();\n picking_target.clear_depth(1.0);\n picking_target.draw((&vertex_buffer, per_instance_buffer.per_instance().unwrap()),\n &glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList),\n &picking_program, &uniforms, ¶ms).unwrap();\n }\n target.draw((&vertex_buffer, per_instance_buffer.per_instance().unwrap()),\n &glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList),\n &program, &uniforms, ¶ms).unwrap();\n target.finish().unwrap();\n\n\n \/\/ committing into the picking pbo\n if let (Some(cursor), Some(&(ref picking_texture, _))) = (cursor_position, picking_attachments.as_ref()) {\n let read_target = glium::Rect {\n \/\/left: ((cursor.0 as f32 + 1.0) * 0.5 * picking_texture.get_width() as f32) as u32,\n \/\/bottom: ((cursor.1 as f32 + 1.0) * 0.5 * picking_texture.get_height().unwrap() as f32) as u32,\n left: cursor.0 as u32,\n bottom: picking_texture.get_height().unwrap() - cursor.1 as u32,\n width: 1,\n height: 1,\n };\n\n if read_target.left < picking_texture.get_width()\n && read_target.bottom < picking_texture.get_height().unwrap() {\n \/\/println!(\"writing to pixel_buffer\");\n picking_texture.main_level()\n .first_layer()\n .into_image(None).unwrap()\n .raw_read_to_pixel_buffer(&read_target, &picking_pbo);\n } else {\n picking_pbo.write(&[0]);\n }\n } else {\n picking_pbo.write(&[0]);\n }\n\n \/\/ polling and handling the events received by the window\n for event in display.poll_events() {\n match event {\n glutin::Event::Closed => return support::Action::Stop,\n glutin::Event::MouseMoved(m) => cursor_position = Some(m),\n ev => camera.process_input(&ev),\n }\n }\n\n support::Action::Continue\n });\n}\n<commit_msg>removed some stale comments<commit_after>#[macro_use]\nextern crate glium;\n\nuse glium::Surface;\nuse glium::glutin;\n\nmod support;\n\n#[derive(Copy, Clone, Debug)]\nstruct PerInstance {\n pub id: u32,\n pub w_position: (f32, f32, f32),\n pub color: (f32, f32, f32),\n}\nimplement_vertex!(PerInstance, id, w_position, color);\n\nfn main() {\n use glium::DisplayBuild;\n\n \/\/ building the display, ie. the main object\n let display = glutin::WindowBuilder::new()\n .with_depth_buffer(24)\n .build_glium()\n .unwrap();\n\n \/\/ building the vertex and index buffers\n let vertex_buffer = support::load_wavefront(&display, include_bytes!(\"support\/teapot.obj\"));\n\n \/\/ the program\n let program = program!(&display,\n 140 => {\n vertex: \"\n #version 140\n\n uniform mat4 persp_matrix;\n uniform mat4 view_matrix;\n\n in uint id;\n in vec3 w_position;\n in vec3 color;\n in vec3 position;\n in vec3 normal;\n out vec3 v_normal;\n out vec3 v_color;\n\n void main() {\n v_normal = normal;\n v_color = color;\n gl_Position = persp_matrix * view_matrix * vec4(position * 0.005 + w_position, 1.0);\n }\n \",\n\n fragment: \"\n #version 140\n\n in vec3 v_normal;\n in vec3 v_color;\n out vec4 f_color;\n\n const vec3 LIGHT = vec3(-0.2, 0.8, 0.1);\n\n void main() {\n float lum = max(dot(normalize(v_normal), normalize(LIGHT)), 0.0);\n vec3 color = (0.3 + 0.7 * lum) * v_color;\n f_color = vec4(color, 1.0);\n }\n \",\n },\n ).unwrap();\n\n \/\/ the picking program\n let picking_program = program!(&display,\n 140 => {\n vertex: \"\n #version 140\n\n uniform mat4 persp_matrix;\n uniform mat4 view_matrix;\n\n in uint id;\n in vec3 w_position;\n in vec3 color;\n in vec3 position;\n in vec3 normal;\n flat out uint v_id;\n\n void main() {\n v_id = id;\n gl_Position = persp_matrix * view_matrix * vec4(position * 0.005 + w_position, 1.0);\n }\n \",\n\n fragment: \"\n #version 140\n\n flat in uint v_id;\n out uint f_id;\n\n void main() {\n f_id = v_id;\n }\n \",\n },\n ).unwrap();\n\n let mut camera = support::camera::CameraState::new();\n camera.set_position((0.0, 0.0, 1.5));\n camera.set_direction((0.0, 0.0, 1.0));\n\n \/\/id's must be unique and != 0\n let mut per_instance = vec![\n PerInstance { id: 1, w_position: (-1.0, 0.0, 0.0), color: (1.0, 0.0, 0.0)},\n PerInstance { id: 2, w_position: ( 0.0, 0.0, 0.0), color: (1.0, 0.0, 0.0)},\n PerInstance { id: 3, w_position: ( 1.0, 0.0, 0.0), color: (1.0, 0.0, 0.0)},\n ];\n per_instance.sort_by(|a, b| a.id.cmp(&b.id));\n let original = per_instance.clone();\n\n let mut picking_attachments: Option<(glium::texture::UnsignedTexture2d, glium::framebuffer::DepthRenderBuffer)> = None;\n let picking_pbo: glium::texture::pixel_buffer::PixelBuffer<u32>\n = glium::texture::pixel_buffer::PixelBuffer::new_empty(&display, 1);\n\n\n let mut cursor_position: Option<(i32, i32)> = None;\n\n \/\/ the main loop\n support::start_loop(|| {\n camera.update();\n\n\n \/\/ determing which object has been picked at the previous frame\n let picked_object = {\n let data = picking_pbo.read().map(|d| d[0]).unwrap_or(0);\n if data != 0 {\n per_instance.binary_search_by(|x| x.id.cmp(&data)).ok()\n } else {\n None\n }\n };\n\n if let Some(index) = picked_object {\n per_instance[index as usize] = PerInstance {\n id: per_instance[index as usize].id,\n w_position: per_instance[index as usize].w_position,\n color: (0.0, 1.0, 0.0)\n };\n } else {\n per_instance = original.clone();\n }\n\n \/\/ building the uniforms\n let uniforms = uniform! {\n persp_matrix: camera.get_perspective(),\n view_matrix: camera.get_view(),\n };\n\n \/\/ draw parameters\n let params = glium::DrawParameters {\n depth: glium::Depth {\n test: glium::DepthTest::IfLess,\n write: true,\n .. Default::default()\n },\n .. Default::default()\n };\n\n let per_instance_buffer = glium::vertex::VertexBuffer::new(&display, &per_instance).unwrap();\n\n \/\/ drawing a frame\n let mut target = display.draw();\n target.clear_color_and_depth((0.0, 0.0, 0.0, 0.0), 1.0);\n\n \/\/update picking texture\n if picking_attachments.is_none() || (\n picking_attachments.as_ref().unwrap().0.get_width(),\n picking_attachments.as_ref().unwrap().0.get_height().unwrap()\n ) != target.get_dimensions() {\n let (width, height) = target.get_dimensions();\n picking_attachments = Some((\n glium::texture::UnsignedTexture2d::empty_with_format(\n &display,\n glium::texture::UncompressedUintFormat::U32,\n glium::texture::MipmapsOption::NoMipmap,\n width, height,\n ).unwrap(),\n glium::framebuffer::DepthRenderBuffer::new(\n &display,\n glium::texture::DepthFormat::F32,\n width, height,\n ).unwrap()\n ))\n }\n\n \/\/ drawing the models and pass the picking texture\n if let Some((ref picking_texture, ref depth_buffer)) = picking_attachments {\n \/\/clearing the picking texture\n picking_texture.main_level().first_layer().into_image(None).unwrap().raw_clear_buffer([0u32, 0, 0, 0]);\n\n let mut picking_target = glium::framebuffer::SimpleFrameBuffer::with_depth_buffer(&display, picking_texture, depth_buffer).unwrap();\n picking_target.clear_depth(1.0);\n picking_target.draw((&vertex_buffer, per_instance_buffer.per_instance().unwrap()),\n &glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList),\n &picking_program, &uniforms, ¶ms).unwrap();\n }\n target.draw((&vertex_buffer, per_instance_buffer.per_instance().unwrap()),\n &glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList),\n &program, &uniforms, ¶ms).unwrap();\n target.finish().unwrap();\n\n\n \/\/ committing into the picking pbo\n if let (Some(cursor), Some(&(ref picking_texture, _))) = (cursor_position, picking_attachments.as_ref()) {\n let read_target = glium::Rect {\n left: cursor.0 as u32,\n bottom: picking_texture.get_height().unwrap() - cursor.1 as u32,\n width: 1,\n height: 1,\n };\n\n if read_target.left < picking_texture.get_width()\n && read_target.bottom < picking_texture.get_height().unwrap() {\n picking_texture.main_level()\n .first_layer()\n .into_image(None).unwrap()\n .raw_read_to_pixel_buffer(&read_target, &picking_pbo);\n } else {\n picking_pbo.write(&[0]);\n }\n } else {\n picking_pbo.write(&[0]);\n }\n\n \/\/ polling and handling the events received by the window\n for event in display.poll_events() {\n match event {\n glutin::Event::Closed => return support::Action::Stop,\n glutin::Event::MouseMoved(m) => cursor_position = Some(m),\n ev => camera.process_input(&ev),\n }\n }\n\n support::Action::Continue\n });\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module implements the `Any` trait, which enables dynamic typing\n\/\/! of any `'static` type through runtime reflection.\n\/\/!\n\/\/! `Any` itself can be used to get a `TypeId`, and has more features when used\n\/\/! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and\n\/\/! `as_ref` methods, to test if the contained value is of a given type, and to\n\/\/! get a reference to the inner value as a type. As `&mut Any`, there is also\n\/\/! the `as_mut` method, for getting a mutable reference to the inner value.\n\/\/! `Box<Any>` adds the `move` method, which will unwrap a `Box<T>` from the\n\/\/! object. See the extension traits (`*Ext`) for the full details.\n\/\/!\n\/\/! Note that &Any is limited to testing whether a value is of a specified\n\/\/! concrete type, and cannot be used to test whether a type implements a trait.\n\/\/!\n\/\/! # Examples\n\/\/!\n\/\/! Consider a situation where we want to log out a value passed to a function.\n\/\/! We know the value we're working on implements Debug, but we don't know its\n\/\/! concrete type. We want to give special treatment to certain types: in this\n\/\/! case printing out the length of String values prior to their value.\n\/\/! We don't know the concrete type of our value at compile time, so we need to\n\/\/! use runtime reflection instead.\n\/\/!\n\/\/! ```rust\n\/\/! use std::fmt::Debug;\n\/\/! use std::any::Any;\n\/\/!\n\/\/! \/\/ Logger function for any type that implements Debug.\n\/\/! fn log<T: Any + Debug>(value: &T) {\n\/\/! let value_any = value as &Any;\n\/\/!\n\/\/! \/\/ try to convert our value to a String. If successful, we want to\n\/\/! \/\/ output the String's length as well as its value. If not, it's a\n\/\/! \/\/ different type: just print it out unadorned.\n\/\/! match value_any.downcast_ref::<String>() {\n\/\/! Some(as_string) => {\n\/\/! println!(\"String ({}): {}\", as_string.len(), as_string);\n\/\/! }\n\/\/! None => {\n\/\/! println!(\"{:?}\", value);\n\/\/! }\n\/\/! }\n\/\/! }\n\/\/!\n\/\/! \/\/ This function wants to log its parameter out prior to doing work with it.\n\/\/! fn do_work<T: Any + Debug>(value: &T) {\n\/\/! log(value);\n\/\/! \/\/ ...do some other work\n\/\/! }\n\/\/!\n\/\/! fn main() {\n\/\/! let my_string = \"Hello World\".to_string();\n\/\/! do_work(&my_string);\n\/\/!\n\/\/! let my_i8: i8 = 100;\n\/\/! do_work(&my_i8);\n\/\/! }\n\/\/! ```\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse fmt;\nuse marker::Send;\nuse mem::transmute;\nuse option::Option::{self, Some, None};\nuse raw::TraitObject;\nuse intrinsics;\nuse marker::{Reflect, Sized};\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Any trait\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n\/\/\/ A type to emulate dynamic typing.\n\/\/\/\n\/\/\/ Every type with no non-`'static` references implements `Any`.\n\/\/\/ See the [module-level documentation][mod] for more details.\n\/\/\/\n\/\/\/ [mod]: index.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait Any: Reflect + 'static {\n \/\/\/ Gets the `TypeId` of `self`.\n #[unstable(feature = \"get_type_id\",\n reason = \"this method will likely be replaced by an associated static\",\n issue = \"27745\")]\n fn get_type_id(&self) -> TypeId;\n}\n\nimpl<T: Reflect + 'static> Any for T {\n fn get_type_id(&self) -> TypeId { TypeId::of::<T>() }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Extension methods for Any trait objects.\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Debug for Any {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.pad(\"Any\")\n }\n}\n\n\/\/ Ensure that the result of e.g. joining a thread can be printed and\n\/\/ hence used with `unwrap`. May eventually no longer be needed if\n\/\/ dispatch works with upcasting.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Debug for Any + Send {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.pad(\"Any\")\n }\n}\n\nimpl Any {\n \/\/\/ Returns true if the boxed type is the same as `T`\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn is<T: Any>(&self) -> bool {\n \/\/ Get TypeId of the type this function is instantiated with\n let t = TypeId::of::<T>();\n\n \/\/ Get TypeId of the type in the trait object\n let boxed = self.get_type_id();\n\n \/\/ Compare both TypeIds on equality\n t == boxed\n }\n\n \/\/\/ Returns some reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn downcast_ref<T: Any>(&self) -> Option<&T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute(self);\n\n \/\/ Extract the data pointer\n Some(&*(to.data as *const T))\n }\n } else {\n None\n }\n }\n\n \/\/\/ Returns some mutable reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn downcast_mut<T: Any>(&mut self) -> Option<&mut T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute(self);\n\n \/\/ Extract the data pointer\n Some(&mut *(to.data as *const T as *mut T))\n }\n } else {\n None\n }\n }\n}\n\nimpl Any+Send {\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn is<T: Any>(&self) -> bool {\n Any::is::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn downcast_ref<T: Any>(&self) -> Option<&T> {\n Any::downcast_ref::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn downcast_mut<T: Any>(&mut self) -> Option<&mut T> {\n Any::downcast_mut::<T>(self)\n }\n}\n\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ TypeID and its methods\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n\/\/\/ A `TypeId` represents a globally unique identifier for a type.\n\/\/\/\n\/\/\/ Each `TypeId` is an opaque object which does not allow inspection of what's\n\/\/\/ inside but does allow basic operations such as cloning, comparison,\n\/\/\/ printing, and showing.\n\/\/\/\n\/\/\/ A `TypeId` is currently only available for types which ascribe to `'static`,\n\/\/\/ but this limitation may be removed in the future.\n#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct TypeId {\n t: u64,\n}\n\nimpl TypeId {\n \/\/\/ Returns the `TypeId` of the type this generic function has been\n \/\/\/ instantiated with\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn of<T: ?Sized + Reflect + 'static>() -> TypeId {\n TypeId {\n t: unsafe { intrinsics::type_id::<T>() },\n }\n }\n}\n<commit_msg>Any docs: as_ref doesn't exist anymore<commit_after>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module implements the `Any` trait, which enables dynamic typing\n\/\/! of any `'static` type through runtime reflection.\n\/\/!\n\/\/! `Any` itself can be used to get a `TypeId`, and has more features when used\n\/\/! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and\n\/\/! `downcast_ref` methods, to test if the contained value is of a given type,\n\/\/! and to get a reference to the inner value as a type. As `&mut Any`, there\n\/\/! is also the `downcast_mut` method, for getting a mutable reference to the\n\/\/! inner value. `Box<Any>` adds the `move` method, which will unwrap a\n\/\/! `Box<T>` from the object. See the extension traits (`*Ext`) for the full\n\/\/! details.\n\/\/!\n\/\/! Note that &Any is limited to testing whether a value is of a specified\n\/\/! concrete type, and cannot be used to test whether a type implements a trait.\n\/\/!\n\/\/! # Examples\n\/\/!\n\/\/! Consider a situation where we want to log out a value passed to a function.\n\/\/! We know the value we're working on implements Debug, but we don't know its\n\/\/! concrete type. We want to give special treatment to certain types: in this\n\/\/! case printing out the length of String values prior to their value.\n\/\/! We don't know the concrete type of our value at compile time, so we need to\n\/\/! use runtime reflection instead.\n\/\/!\n\/\/! ```rust\n\/\/! use std::fmt::Debug;\n\/\/! use std::any::Any;\n\/\/!\n\/\/! \/\/ Logger function for any type that implements Debug.\n\/\/! fn log<T: Any + Debug>(value: &T) {\n\/\/! let value_any = value as &Any;\n\/\/!\n\/\/! \/\/ try to convert our value to a String. If successful, we want to\n\/\/! \/\/ output the String's length as well as its value. If not, it's a\n\/\/! \/\/ different type: just print it out unadorned.\n\/\/! match value_any.downcast_ref::<String>() {\n\/\/! Some(as_string) => {\n\/\/! println!(\"String ({}): {}\", as_string.len(), as_string);\n\/\/! }\n\/\/! None => {\n\/\/! println!(\"{:?}\", value);\n\/\/! }\n\/\/! }\n\/\/! }\n\/\/!\n\/\/! \/\/ This function wants to log its parameter out prior to doing work with it.\n\/\/! fn do_work<T: Any + Debug>(value: &T) {\n\/\/! log(value);\n\/\/! \/\/ ...do some other work\n\/\/! }\n\/\/!\n\/\/! fn main() {\n\/\/! let my_string = \"Hello World\".to_string();\n\/\/! do_work(&my_string);\n\/\/!\n\/\/! let my_i8: i8 = 100;\n\/\/! do_work(&my_i8);\n\/\/! }\n\/\/! ```\n\n#![stable(feature = \"rust1\", since = \"1.0.0\")]\n\nuse fmt;\nuse marker::Send;\nuse mem::transmute;\nuse option::Option::{self, Some, None};\nuse raw::TraitObject;\nuse intrinsics;\nuse marker::{Reflect, Sized};\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Any trait\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n\/\/\/ A type to emulate dynamic typing.\n\/\/\/\n\/\/\/ Every type with no non-`'static` references implements `Any`.\n\/\/\/ See the [module-level documentation][mod] for more details.\n\/\/\/\n\/\/\/ [mod]: index.html\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub trait Any: Reflect + 'static {\n \/\/\/ Gets the `TypeId` of `self`.\n #[unstable(feature = \"get_type_id\",\n reason = \"this method will likely be replaced by an associated static\",\n issue = \"27745\")]\n fn get_type_id(&self) -> TypeId;\n}\n\nimpl<T: Reflect + 'static> Any for T {\n fn get_type_id(&self) -> TypeId { TypeId::of::<T>() }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Extension methods for Any trait objects.\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Debug for Any {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.pad(\"Any\")\n }\n}\n\n\/\/ Ensure that the result of e.g. joining a thread can be printed and\n\/\/ hence used with `unwrap`. May eventually no longer be needed if\n\/\/ dispatch works with upcasting.\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\nimpl fmt::Debug for Any + Send {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.pad(\"Any\")\n }\n}\n\nimpl Any {\n \/\/\/ Returns true if the boxed type is the same as `T`\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn is<T: Any>(&self) -> bool {\n \/\/ Get TypeId of the type this function is instantiated with\n let t = TypeId::of::<T>();\n\n \/\/ Get TypeId of the type in the trait object\n let boxed = self.get_type_id();\n\n \/\/ Compare both TypeIds on equality\n t == boxed\n }\n\n \/\/\/ Returns some reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn downcast_ref<T: Any>(&self) -> Option<&T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute(self);\n\n \/\/ Extract the data pointer\n Some(&*(to.data as *const T))\n }\n } else {\n None\n }\n }\n\n \/\/\/ Returns some mutable reference to the boxed value if it is of type `T`, or\n \/\/\/ `None` if it isn't.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn downcast_mut<T: Any>(&mut self) -> Option<&mut T> {\n if self.is::<T>() {\n unsafe {\n \/\/ Get the raw representation of the trait object\n let to: TraitObject = transmute(self);\n\n \/\/ Extract the data pointer\n Some(&mut *(to.data as *const T as *mut T))\n }\n } else {\n None\n }\n }\n}\n\nimpl Any+Send {\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn is<T: Any>(&self) -> bool {\n Any::is::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn downcast_ref<T: Any>(&self) -> Option<&T> {\n Any::downcast_ref::<T>(self)\n }\n\n \/\/\/ Forwards to the method defined on the type `Any`.\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n #[inline]\n pub fn downcast_mut<T: Any>(&mut self) -> Option<&mut T> {\n Any::downcast_mut::<T>(self)\n }\n}\n\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ TypeID and its methods\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n\/\/\/ A `TypeId` represents a globally unique identifier for a type.\n\/\/\/\n\/\/\/ Each `TypeId` is an opaque object which does not allow inspection of what's\n\/\/\/ inside but does allow basic operations such as cloning, comparison,\n\/\/\/ printing, and showing.\n\/\/\/\n\/\/\/ A `TypeId` is currently only available for types which ascribe to `'static`,\n\/\/\/ but this limitation may be removed in the future.\n#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]\n#[stable(feature = \"rust1\", since = \"1.0.0\")]\npub struct TypeId {\n t: u64,\n}\n\nimpl TypeId {\n \/\/\/ Returns the `TypeId` of the type this generic function has been\n \/\/\/ instantiated with\n #[stable(feature = \"rust1\", since = \"1.0.0\")]\n pub fn of<T: ?Sized + Reflect + 'static>() -> TypeId {\n TypeId {\n t: unsafe { intrinsics::type_id::<T>() },\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! # The Rust Core Library\n\/\/!\n\/\/! The Rust Core Library is the dependency-free foundation of [The\n\/\/! Rust Standard Library](..\/std\/index.html). It is the portable glue\n\/\/! between the language and its libraries, defining the intrinsic and\n\/\/! primitive building blocks of all Rust code. It links to no\n\/\/! upstream libraries, no system libraries, and no libc.\n\/\/!\n\/\/! The core library is *minimal*: it isn't even aware of heap allocation,\n\/\/! nor does it provide concurrency or I\/O. These things require\n\/\/! platform integration, and this library is platform-agnostic.\n\/\/!\n\/\/! *It is not recommended to use the core library*. The stable\n\/\/! functionality of libcore is reexported from the\n\/\/! [standard library](..\/std\/index.html). The composition of this library is\n\/\/! subject to change over time; only the interface exposed through libstd is\n\/\/! intended to be stable.\n\/\/!\n\/\/! # How to use the core library\n\/\/!\n\/\/ FIXME: Fill me in with more detail when the interface settles\n\/\/! This library is built on the assumption of a few existing symbols:\n\/\/!\n\/\/! * `memcpy`, `memcmp`, `memset` - These are core memory routines which are\n\/\/! often generated by LLVM. Additionally, this library can make explicit\n\/\/! calls to these functions. Their signatures are the same as found in C.\n\/\/! These functions are often provided by the system libc, but can also be\n\/\/! provided by the [rlibc crate](https:\/\/crates.io\/crates\/rlibc).\n\/\/!\n\/\/! * `rust_begin_unwind` - This function takes three arguments, a\n\/\/! `fmt::Arguments`, a `&str`, and a `u32`. These three arguments dictate\n\/\/! the panic message, the file at which panic was invoked, and the line.\n\/\/! It is up to consumers of this core library to define this panic\n\/\/! function; it is only required to never return.\n\n\/\/ Since libcore defines many fundamental lang items, all tests live in a\n\/\/ separate crate, libcoretest, to avoid bizarre issues.\n\n\/\/ Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"core\"]\n#![unstable(feature = \"core\",\n reason = \"the libcore library has not yet been scrutinized for \\\n stabilization in terms of structure and naming\",\n issue = \"27701\")]\n#![staged_api]\n#![crate_type = \"rlib\"]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\",\n html_playground_url = \"https:\/\/play.rust-lang.org\/\",\n issue_tracker_base_url = \"https:\/\/github.com\/rust-lang\/rust\/issues\/\")]\n#![doc(test(no_crate_inject, attr(allow(unused_variables), deny(warnings))))]\n\n#![no_core]\n#![allow(raw_pointer_derive)]\n#![deny(missing_docs)]\n\n#![feature(allow_internal_unstable)]\n#![feature(associated_type_defaults)]\n#![feature(concat_idents)]\n#![feature(const_fn)]\n#![feature(custom_attribute)]\n#![feature(fundamental)]\n#![feature(intrinsics)]\n#![feature(lang_items)]\n#![feature(no_core)]\n#![feature(on_unimplemented)]\n#![feature(optin_builtin_traits)]\n#![feature(reflect)]\n#![feature(rustc_attrs)]\n#![feature(unwind_attributes)]\n#![cfg_attr(stage0, feature(simd))]\n#![cfg_attr(not(stage0), feature(repr_simd, platform_intrinsics))]\n#![feature(staged_api)]\n#![feature(unboxed_closures)]\n\n#[macro_use]\nmod macros;\n\n#[macro_use]\nmod cmp_macros;\n\n#[path = \"num\/float_macros.rs\"]\n#[macro_use]\nmod float_macros;\n\n#[path = \"num\/int_macros.rs\"]\n#[macro_use]\nmod int_macros;\n\n#[path = \"num\/uint_macros.rs\"]\n#[macro_use]\nmod uint_macros;\n\n#[path = \"num\/isize.rs\"] pub mod isize;\n#[path = \"num\/i8.rs\"] pub mod i8;\n#[path = \"num\/i16.rs\"] pub mod i16;\n#[path = \"num\/i32.rs\"] pub mod i32;\n#[path = \"num\/i64.rs\"] pub mod i64;\n\n#[path = \"num\/usize.rs\"] pub mod usize;\n#[path = \"num\/u8.rs\"] pub mod u8;\n#[path = \"num\/u16.rs\"] pub mod u16;\n#[path = \"num\/u32.rs\"] pub mod u32;\n#[path = \"num\/u64.rs\"] pub mod u64;\n\n#[path = \"num\/f32.rs\"] pub mod f32;\n#[path = \"num\/f64.rs\"] pub mod f64;\n\n#[macro_use]\npub mod num;\n\n\/* The libcore prelude, not as all-encompassing as the libstd prelude *\/\n\npub mod prelude;\n\n\/* Core modules for ownership management *\/\n\npub mod intrinsics;\npub mod mem;\npub mod nonzero;\npub mod ptr;\n\n\/* Core language traits *\/\n\npub mod marker;\npub mod ops;\npub mod cmp;\npub mod clone;\npub mod default;\npub mod convert;\npub mod borrow;\n\n\/* Core types and methods on primitives *\/\n\npub mod any;\npub mod array;\npub mod sync;\npub mod cell;\npub mod char;\npub mod panicking;\npub mod iter;\npub mod option;\npub mod raw;\npub mod result;\n\n#[cfg(stage0)]\n#[path = \"simd_old.rs\"]\npub mod simd;\n#[cfg(not(stage0))]\npub mod simd;\n\npub mod slice;\npub mod str;\npub mod hash;\npub mod fmt;\n\n\/\/ note: does not need to be public\nmod tuple;\n<commit_msg>Remove claims of dependency-free libcore<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! # The Rust Core Library\n\/\/!\n\/\/! The Rust Core Library is the dependency-free[^free] foundation of [The\n\/\/! Rust Standard Library](..\/std\/index.html). It is the portable glue\n\/\/! between the language and its libraries, defining the intrinsic and\n\/\/! primitive building blocks of all Rust code. It links to no\n\/\/! upstream libraries, no system libraries, and no libc.\n\/\/!\n\/\/! [^free]: Strictly speaking, there are some symbols which are needed but\n\/\/! they aren't always neccesary.\n\/\/!\n\/\/! The core library is *minimal*: it isn't even aware of heap allocation,\n\/\/! nor does it provide concurrency or I\/O. These things require\n\/\/! platform integration, and this library is platform-agnostic.\n\/\/!\n\/\/! *It is not recommended to use the core library*. The stable\n\/\/! functionality of libcore is reexported from the\n\/\/! [standard library](..\/std\/index.html). The composition of this library is\n\/\/! subject to change over time; only the interface exposed through libstd is\n\/\/! intended to be stable.\n\/\/!\n\/\/! # How to use the core library\n\/\/!\n\/\/ FIXME: Fill me in with more detail when the interface settles\n\/\/! This library is built on the assumption of a few existing symbols:\n\/\/!\n\/\/! * `memcpy`, `memcmp`, `memset` - These are core memory routines which are\n\/\/! often generated by LLVM. Additionally, this library can make explicit\n\/\/! calls to these functions. Their signatures are the same as found in C.\n\/\/! These functions are often provided by the system libc, but can also be\n\/\/! provided by the [rlibc crate](https:\/\/crates.io\/crates\/rlibc).\n\/\/!\n\/\/! * `rust_begin_unwind` - This function takes three arguments, a\n\/\/! `fmt::Arguments`, a `&str`, and a `u32`. These three arguments dictate\n\/\/! the panic message, the file at which panic was invoked, and the line.\n\/\/! It is up to consumers of this core library to define this panic\n\/\/! function; it is only required to never return.\n\n\/\/ Since libcore defines many fundamental lang items, all tests live in a\n\/\/ separate crate, libcoretest, to avoid bizarre issues.\n\n\/\/ Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"core\"]\n#![unstable(feature = \"core\",\n reason = \"the libcore library has not yet been scrutinized for \\\n stabilization in terms of structure and naming\",\n issue = \"27701\")]\n#![staged_api]\n#![crate_type = \"rlib\"]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\",\n html_playground_url = \"https:\/\/play.rust-lang.org\/\",\n issue_tracker_base_url = \"https:\/\/github.com\/rust-lang\/rust\/issues\/\")]\n#![doc(test(no_crate_inject, attr(allow(unused_variables), deny(warnings))))]\n\n#![no_core]\n#![allow(raw_pointer_derive)]\n#![deny(missing_docs)]\n\n#![feature(allow_internal_unstable)]\n#![feature(associated_type_defaults)]\n#![feature(concat_idents)]\n#![feature(const_fn)]\n#![feature(custom_attribute)]\n#![feature(fundamental)]\n#![feature(intrinsics)]\n#![feature(lang_items)]\n#![feature(no_core)]\n#![feature(on_unimplemented)]\n#![feature(optin_builtin_traits)]\n#![feature(reflect)]\n#![feature(rustc_attrs)]\n#![feature(unwind_attributes)]\n#![cfg_attr(stage0, feature(simd))]\n#![cfg_attr(not(stage0), feature(repr_simd, platform_intrinsics))]\n#![feature(staged_api)]\n#![feature(unboxed_closures)]\n\n#[macro_use]\nmod macros;\n\n#[macro_use]\nmod cmp_macros;\n\n#[path = \"num\/float_macros.rs\"]\n#[macro_use]\nmod float_macros;\n\n#[path = \"num\/int_macros.rs\"]\n#[macro_use]\nmod int_macros;\n\n#[path = \"num\/uint_macros.rs\"]\n#[macro_use]\nmod uint_macros;\n\n#[path = \"num\/isize.rs\"] pub mod isize;\n#[path = \"num\/i8.rs\"] pub mod i8;\n#[path = \"num\/i16.rs\"] pub mod i16;\n#[path = \"num\/i32.rs\"] pub mod i32;\n#[path = \"num\/i64.rs\"] pub mod i64;\n\n#[path = \"num\/usize.rs\"] pub mod usize;\n#[path = \"num\/u8.rs\"] pub mod u8;\n#[path = \"num\/u16.rs\"] pub mod u16;\n#[path = \"num\/u32.rs\"] pub mod u32;\n#[path = \"num\/u64.rs\"] pub mod u64;\n\n#[path = \"num\/f32.rs\"] pub mod f32;\n#[path = \"num\/f64.rs\"] pub mod f64;\n\n#[macro_use]\npub mod num;\n\n\/* The libcore prelude, not as all-encompassing as the libstd prelude *\/\n\npub mod prelude;\n\n\/* Core modules for ownership management *\/\n\npub mod intrinsics;\npub mod mem;\npub mod nonzero;\npub mod ptr;\n\n\/* Core language traits *\/\n\npub mod marker;\npub mod ops;\npub mod cmp;\npub mod clone;\npub mod default;\npub mod convert;\npub mod borrow;\n\n\/* Core types and methods on primitives *\/\n\npub mod any;\npub mod array;\npub mod sync;\npub mod cell;\npub mod char;\npub mod panicking;\npub mod iter;\npub mod option;\npub mod raw;\npub mod result;\n\n#[cfg(stage0)]\n#[path = \"simd_old.rs\"]\npub mod simd;\n#[cfg(not(stage0))]\npub mod simd;\n\npub mod slice;\npub mod str;\npub mod hash;\npub mod fmt;\n\n\/\/ note: does not need to be public\nmod tuple;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add some basic `Option` methods<commit_after><|endoftext|>"} {"text":"<commit_before>use column::Alignment::Left;\nuse file::File;\nuse super::lines::lines_view;\n\nuse std::cmp::max;\nuse std::iter::{AdditiveIterator, repeat};\n\n#[derive(PartialEq, Debug, Copy)]\npub struct Grid {\n pub across: bool,\n pub console_width: usize,\n}\n\nimpl Grid {\n fn fit_into_grid(&self, files: &[File]) -> Option<(usize, Vec<usize>)> {\n \/\/ TODO: this function could almost certainly be optimised...\n \/\/ surely not *all* of the numbers of lines are worth searching through!\n\n \/\/ Instead of numbers of columns, try to find the fewest number of *lines*\n \/\/ that the output will fit in.\n for num_lines in 1 .. files.len() {\n\n \/\/ The number of columns is the number of files divided by the number\n \/\/ of lines, *rounded up*.\n let mut num_columns = files.len() \/ num_lines;\n if files.len() % num_lines != 0 {\n num_columns += 1;\n }\n\n \/\/ Early abort: if there are so many columns that the width of the\n \/\/ *column separators* is bigger than the width of the screen, then\n \/\/ don't even try to tabulate it.\n \/\/ This is actually a necessary check, because the width is stored as\n \/\/ a usize, and making it go negative makes it huge instead, but it\n \/\/ also serves as a speed-up.\n let separator_width = (num_columns - 1) * 2;\n if self.console_width < separator_width {\n continue;\n }\n\n \/\/ Remove the separator width from the available space.\n let adjusted_width = self.console_width - separator_width;\n\n \/\/ Find the width of each column by adding the lengths of the file\n \/\/ names in that column up.\n let mut column_widths: Vec<usize> = repeat(0).take(num_columns).collect();\n for (index, file) in files.iter().enumerate() {\n let index = if self.across {\n index % num_columns\n }\n else {\n index \/ num_lines\n };\n column_widths[index] = max(column_widths[index], file.name.len());\n }\n\n \/\/ If they all fit in the terminal, combined, then success!\n if column_widths.iter().map(|&x| x).sum() < adjusted_width {\n return Some((num_lines, column_widths));\n }\n }\n\n \/\/ If you get here you have really long file names.\n return None;\n }\n\n pub fn view(&self, files: &[File]) {\n if let Some((num_lines, widths)) = self.fit_into_grid(files) {\n for y in 0 .. num_lines {\n for x in 0 .. widths.len() {\n let num = if self.across {\n y * widths.len() + x\n }\n else {\n y + num_lines * x\n };\n\n \/\/ Show whitespace in the place of trailing files\n if num >= files.len() {\n continue;\n }\n\n let ref file = files[num];\n let styled_name = file.file_colour().paint(file.name.as_slice()).to_string();\n if x == widths.len() - 1 {\n \/\/ The final column doesn't need to have trailing spaces\n print!(\"{}\", styled_name);\n }\n else {\n assert!(widths[x] >= file.name.len());\n print!(\"{}\", Left.pad_string(&styled_name, widths[x] - file.name.len() + 2));\n }\n }\n print!(\"\\n\");\n }\n }\n else {\n \/\/ Drop down to lines view if the file names are too big for a grid\n lines_view(files);\n }\n }\n}\n<commit_msg>Use file_name_width in grid<commit_after>use column::Alignment::Left;\nuse file::File;\nuse super::lines::lines_view;\n\nuse std::cmp::max;\nuse std::iter::{AdditiveIterator, repeat};\n\n#[derive(PartialEq, Debug, Copy)]\npub struct Grid {\n pub across: bool,\n pub console_width: usize,\n}\n\nimpl Grid {\n fn fit_into_grid(&self, files: &[File]) -> Option<(usize, Vec<usize>)> {\n \/\/ TODO: this function could almost certainly be optimised...\n \/\/ surely not *all* of the numbers of lines are worth searching through!\n\n \/\/ Instead of numbers of columns, try to find the fewest number of *lines*\n \/\/ that the output will fit in.\n for num_lines in 1 .. files.len() {\n\n \/\/ The number of columns is the number of files divided by the number\n \/\/ of lines, *rounded up*.\n let mut num_columns = files.len() \/ num_lines;\n if files.len() % num_lines != 0 {\n num_columns += 1;\n }\n\n \/\/ Early abort: if there are so many columns that the width of the\n \/\/ *column separators* is bigger than the width of the screen, then\n \/\/ don't even try to tabulate it.\n \/\/ This is actually a necessary check, because the width is stored as\n \/\/ a usize, and making it go negative makes it huge instead, but it\n \/\/ also serves as a speed-up.\n let separator_width = (num_columns - 1) * 2;\n if self.console_width < separator_width {\n continue;\n }\n\n \/\/ Remove the separator width from the available space.\n let adjusted_width = self.console_width - separator_width;\n\n \/\/ Find the width of each column by adding the lengths of the file\n \/\/ names in that column up.\n let mut column_widths: Vec<usize> = repeat(0).take(num_columns).collect();\n for (index, file) in files.iter().enumerate() {\n let index = if self.across {\n index % num_columns\n }\n else {\n index \/ num_lines\n };\n column_widths[index] = max(column_widths[index], file.file_name_width());\n }\n\n \/\/ If they all fit in the terminal, combined, then success!\n if column_widths.iter().map(|&x| x).sum() < adjusted_width {\n return Some((num_lines, column_widths));\n }\n }\n\n \/\/ If you get here you have really long file names.\n return None;\n }\n\n pub fn view(&self, files: &[File]) {\n if let Some((num_lines, widths)) = self.fit_into_grid(files) {\n for y in 0 .. num_lines {\n for x in 0 .. widths.len() {\n let num = if self.across {\n y * widths.len() + x\n }\n else {\n y + num_lines * x\n };\n\n \/\/ Show whitespace in the place of trailing files\n if num >= files.len() {\n continue;\n }\n\n let ref file = files[num];\n let styled_name = file.file_colour().paint(file.name.as_slice()).to_string();\n if x == widths.len() - 1 {\n \/\/ The final column doesn't need to have trailing spaces\n print!(\"{}\", styled_name);\n }\n else {\n assert!(widths[x] >= file.file_name_width());\n print!(\"{}\", Left.pad_string(&styled_name, widths[x] - file.file_name_width() + 2));\n }\n }\n print!(\"\\n\");\n }\n }\n else {\n \/\/ Drop down to lines view if the file names are too big for a grid\n lines_view(files);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Only Collect the First Character<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Ante implementation in Rust<commit_after>\/\/ Copyright (c) 2013-2104 Michael Dvorkin\n\/\/ Ante is an esoteric programming language where all you've got is a deck of cards.\n\/\/\n\/\/ This is Ante implementation in Rust.\n\nextern crate regex;\nextern crate num;\n\nuse std::io::File;\nuse std::collections::HashMap;\n\nuse regex::Regex;\nuse num::bigint::BigInt;\n\n\nstruct Card {\n\trank: u32,\n\tsuit: u32\n}\n\nstruct Ante {\n\tpc: int, \/\/ Program counter (index within ante.code)\n\tline: int, \/\/ Current line number.\n code: Vec<Card>, \/\/ Array of cards.\n vars: HashMap<char, uint>, \/\/ Four registers hashed by suit.\n labels: HashMap<uint, uint>, \/\/ Labels for ante.pc to jump to.\n buffer: Vec<char> \/\/ Buffer to collect UTF-8 character bytes.\n}\n\nimpl Ante {\n fn new() -> Ante {\n let mut vars = HashMap::new();\n vars.insert('♦', 0);\n vars.insert('♥', 0);\n vars.insert('♠', 0);\n vars.insert('♣', 0);\n\n Ante {\n pc: 0,\n line: 0,\n code: vec![],\n vars: vars,\n labels: HashMap::new(),\n buffer: vec![]\n }\n }\n\n fn run(& mut self, filename: &str) {\n let mut file = File::open(&Path::new(filename));\n let program = file.read_to_string().unwrap();\n println!(\"file: {}\", program);\n }\n}\n\n\nfn main() {\n println!(\"usage: ante filename.ante\");\n Ante::new().run(\"hello.ante\".as_slice());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add demo.rs<commit_after><|endoftext|>"} {"text":"<commit_before>use hir::AsName;\nuse syntax::{\n ast::{self, edit::AstNodeEdit, make},\n AstNode,\n};\nuse test_utils::mark;\n\nuse crate::{\n assist_context::{AssistContext, Assists},\n AssistId, AssistKind,\n};\n\n\/\/ Assist: extract_assignment\n\/\/\n\/\/ Extracts variable assigment to outside an if or match statement.\n\/\/\n\/\/ ```\n\/\/ fn main() {\n\/\/ let mut foo = 6;\n\/\/\n\/\/ if true {\n\/\/ <|>foo = 5;\n\/\/ } else {\n\/\/ foo = 4;\n\/\/ }\n\/\/ }\n\/\/ ```\n\/\/ ->\n\/\/ ```\n\/\/ fn main() {\n\/\/ let mut foo = 6;\n\/\/\n\/\/ foo = if true {\n\/\/ 5\n\/\/ } else {\n\/\/ 4\n\/\/ };\n\/\/ }\n\/\/ ```\npub(crate) fn extract_assigment(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {\n let name = ctx.find_node_at_offset::<ast::NameRef>()?.as_name();\n\n let if_statement = ctx.find_node_at_offset::<ast::IfExpr>()?;\n\n let new_stmt = exprify_if(&if_statement, &name)?.indent(if_statement.indent_level());\n let expr_stmt = make::expr_stmt(new_stmt);\n\n acc.add(\n AssistId(\"extract_assignment\", AssistKind::RefactorExtract),\n \"Extract assignment\",\n if_statement.syntax().text_range(),\n move |edit| {\n edit.replace(if_statement.syntax().text_range(), format!(\"{} = {};\", name, expr_stmt));\n },\n )\n}\n\nfn exprify_if(statement: &ast::IfExpr, name: &hir::Name) -> Option<ast::Expr> {\n let then_branch = exprify_block(&statement.then_branch()?, name)?;\n let else_branch = match statement.else_branch()? {\n ast::ElseBranch::Block(block) => ast::ElseBranch::Block(exprify_block(&block, name)?),\n ast::ElseBranch::IfExpr(expr) => {\n mark::hit!(test_extract_assigment_chained_if);\n ast::ElseBranch::IfExpr(ast::IfExpr::cast(\n exprify_if(&expr, name)?.syntax().to_owned(),\n )?)\n }\n };\n Some(make::expr_if(statement.condition()?, then_branch, Some(else_branch)))\n}\n\nfn exprify_block(block: &ast::BlockExpr, name: &hir::Name) -> Option<ast::BlockExpr> {\n if block.expr().is_some() {\n return None;\n }\n\n let mut stmts: Vec<_> = block.statements().collect();\n let stmt = stmts.pop()?;\n\n if let ast::Stmt::ExprStmt(stmt) = stmt {\n if let ast::Expr::BinExpr(expr) = stmt.expr()? {\n if expr.op_kind()? == ast::BinOp::Assignment\n && &expr.lhs()?.name_ref()?.as_name() == name\n {\n \/\/ The last statement in the block is an assignment to the name we want\n return Some(make::block_expr(stmts, Some(expr.rhs()?)));\n }\n }\n }\n None\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n use crate::tests::{check_assist, check_assist_not_applicable};\n\n #[test]\n fn test_extract_assignment() {\n check_assist(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n <|>a = 2;\n } else {\n a = 3;\n }\n}\"#,\n r#\"\nfn foo() {\n let mut a = 1;\n\n a = if true {\n 2\n } else {\n 3\n };\n}\"#,\n );\n }\n\n #[test]\n fn test_extract_assignment_not_last_not_applicable() {\n check_assist_not_applicable(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n <|>a = 2;\n b = a;\n } else {\n a = 3;\n }\n}\"#,\n )\n }\n\n #[test]\n fn test_extract_assignment_chained_if() {\n mark::check!(test_extract_assigment_chained_if);\n check_assist(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n <|>a = 2;\n } else if false {\n a = 3;\n } else {\n a = 4;\n }\n}\"#,\n r#\"\nfn foo() {\n let mut a = 1;\n\n a = if true {\n 2\n } else if false {\n 3\n } else {\n 4\n };\n}\"#,\n );\n }\n\n #[test]\n fn test_extract_assigment_retains_stmts() {\n check_assist(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n let b = 2;\n <|>a = 2;\n } else {\n let b = 3;\n a = 3;\n }\n}\"#,\n r#\"\nfn foo() {\n let mut a = 1;\n\n a = if true {\n let b = 2;\n 2\n } else {\n let b = 3;\n 3\n };\n}\"#,\n )\n }\n\n #[test]\n fn extract_assignment_let_stmt_not_applicable() {\n check_assist_not_applicable(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n let b = if true {\n <|>a = 2\n } else {\n a = 3\n };\n}\"#,\n )\n }\n\n #[test]\n fn extract_assignment_missing_assigment_not_applicable() {\n check_assist_not_applicable(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n <|>a = 2;\n } else {}\n}\"#,\n )\n }\n}\n<commit_msg>Add support for MatchExpr to extract_assigment assist<commit_after>use hir::AsName;\nuse syntax::{\n ast::{self, edit::AstNodeEdit, make},\n AstNode,\n};\nuse test_utils::mark;\n\nuse crate::{\n assist_context::{AssistContext, Assists},\n AssistId, AssistKind,\n};\n\n\/\/ Assist: extract_assignment\n\/\/\n\/\/ Extracts variable assigment to outside an if or match statement.\n\/\/\n\/\/ ```\n\/\/ fn main() {\n\/\/ let mut foo = 6;\n\/\/\n\/\/ if true {\n\/\/ <|>foo = 5;\n\/\/ } else {\n\/\/ foo = 4;\n\/\/ }\n\/\/ }\n\/\/ ```\n\/\/ ->\n\/\/ ```\n\/\/ fn main() {\n\/\/ let mut foo = 6;\n\/\/\n\/\/ foo = if true {\n\/\/ 5\n\/\/ } else {\n\/\/ 4\n\/\/ };\n\/\/ }\n\/\/ ```\npub(crate) fn extract_assigment(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {\n let name = ctx.find_node_at_offset::<ast::NameRef>()?.as_name();\n\n let (old_stmt, new_stmt) = if let Some(if_expr) = ctx.find_node_at_offset::<ast::IfExpr>() {\n (\n ast::Expr::cast(if_expr.syntax().to_owned())?,\n exprify_if(&if_expr, &name)?.indent(if_expr.indent_level()),\n )\n } else if let Some(match_expr) = ctx.find_node_at_offset::<ast::MatchExpr>() {\n (ast::Expr::cast(match_expr.syntax().to_owned())?, exprify_match(&match_expr, &name)?)\n } else {\n return None;\n };\n\n let expr_stmt = make::expr_stmt(new_stmt);\n\n acc.add(\n AssistId(\"extract_assignment\", AssistKind::RefactorExtract),\n \"Extract assignment\",\n old_stmt.syntax().text_range(),\n move |edit| {\n edit.replace(old_stmt.syntax().text_range(), format!(\"{} = {};\", name, expr_stmt));\n },\n )\n}\n\nfn exprify_match(match_expr: &ast::MatchExpr, name: &hir::Name) -> Option<ast::Expr> {\n let new_arm_list = match_expr\n .match_arm_list()?\n .arms()\n .map(|arm| {\n if let ast::Expr::BlockExpr(block) = arm.expr()? {\n let new_block = exprify_block(&block, name)?.indent(block.indent_level());\n Some(arm.replace_descendant(block, new_block))\n } else {\n None\n }\n })\n .collect::<Option<Vec<_>>>()?;\n let new_arm_list = match_expr\n .match_arm_list()?\n .replace_descendants(match_expr.match_arm_list()?.arms().zip(new_arm_list));\n Some(make::expr_match(match_expr.expr()?, new_arm_list))\n}\n\nfn exprify_if(statement: &ast::IfExpr, name: &hir::Name) -> Option<ast::Expr> {\n let then_branch = exprify_block(&statement.then_branch()?, name)?;\n let else_branch = match statement.else_branch()? {\n ast::ElseBranch::Block(ref block) => ast::ElseBranch::Block(exprify_block(block, name)?),\n ast::ElseBranch::IfExpr(expr) => {\n mark::hit!(test_extract_assigment_chained_if);\n ast::ElseBranch::IfExpr(ast::IfExpr::cast(\n exprify_if(&expr, name)?.syntax().to_owned(),\n )?)\n }\n };\n Some(make::expr_if(statement.condition()?, then_branch, Some(else_branch)))\n}\n\nfn exprify_block(block: &ast::BlockExpr, name: &hir::Name) -> Option<ast::BlockExpr> {\n if block.expr().is_some() {\n return None;\n }\n\n let mut stmts: Vec<_> = block.statements().collect();\n let stmt = stmts.pop()?;\n\n if let ast::Stmt::ExprStmt(stmt) = stmt {\n if let ast::Expr::BinExpr(expr) = stmt.expr()? {\n if expr.op_kind()? == ast::BinOp::Assignment\n && &expr.lhs()?.name_ref()?.as_name() == name\n {\n \/\/ The last statement in the block is an assignment to the name we want\n return Some(make::block_expr(stmts, Some(expr.rhs()?)));\n }\n }\n }\n None\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n use crate::tests::{check_assist, check_assist_not_applicable};\n\n #[test]\n fn test_extract_assignment_if() {\n check_assist(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n <|>a = 2;\n } else {\n a = 3;\n }\n}\"#,\n r#\"\nfn foo() {\n let mut a = 1;\n\n a = if true {\n 2\n } else {\n 3\n };\n}\"#,\n );\n }\n\n #[test]\n fn test_extract_assignment_match() {\n check_assist(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n match 1 {\n 1 => {\n <|>a = 2;\n },\n 2 => {\n a = 3;\n },\n 3 => {\n a = 4;\n }\n }\n}\"#,\n r#\"\nfn foo() {\n let mut a = 1;\n\n a = match 1 {\n 1 => {\n 2\n },\n 2 => {\n 3\n },\n 3 => {\n 4\n }\n };\n}\"#,\n );\n }\n\n #[test]\n fn test_extract_assignment_not_last_not_applicable() {\n check_assist_not_applicable(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n <|>a = 2;\n b = a;\n } else {\n a = 3;\n }\n}\"#,\n )\n }\n\n #[test]\n fn test_extract_assignment_chained_if() {\n mark::check!(test_extract_assigment_chained_if);\n check_assist(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n <|>a = 2;\n } else if false {\n a = 3;\n } else {\n a = 4;\n }\n}\"#,\n r#\"\nfn foo() {\n let mut a = 1;\n\n a = if true {\n 2\n } else if false {\n 3\n } else {\n 4\n };\n}\"#,\n );\n }\n\n #[test]\n fn test_extract_assigment_retains_stmts() {\n check_assist(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n let b = 2;\n <|>a = 2;\n } else {\n let b = 3;\n a = 3;\n }\n}\"#,\n r#\"\nfn foo() {\n let mut a = 1;\n\n a = if true {\n let b = 2;\n 2\n } else {\n let b = 3;\n 3\n };\n}\"#,\n )\n }\n\n #[test]\n fn extract_assignment_let_stmt_not_applicable() {\n check_assist_not_applicable(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n let b = if true {\n <|>a = 2\n } else {\n a = 3\n };\n}\"#,\n )\n }\n\n #[test]\n fn extract_assignment_if_missing_assigment_not_applicable() {\n check_assist_not_applicable(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n if true {\n <|>a = 2;\n } else {}\n}\"#,\n )\n }\n\n #[test]\n fn extract_assignment_match_missing_assigment_not_applicable() {\n check_assist_not_applicable(\n extract_assigment,\n r#\"\nfn foo() {\n let mut a = 1;\n\n match 1 {\n 1 => {\n <|>a = 2;\n },\n 2 => {\n a = 3;\n },\n 3 => {},\n }\n}\"#,\n )\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2349<commit_after>\/\/ https:\/\/leetcode.com\/problems\/design-a-number-container-system\/\nstruct NumberContainers {}\n\nimpl NumberContainers {\n fn new() -> Self {\n todo!()\n }\n\n fn change(&self, index: i32, number: i32) {\n todo!()\n }\n\n fn find(&self, number: i32) -> i32 {\n todo!()\n }\n}\n\nfn main() {\n let nc = NumberContainers::new();\n println!(\"{}\", nc.find(10)); \/\/ -1\n nc.change(2, 10);\n nc.change(1, 10);\n nc.change(3, 10);\n nc.change(5, 10);\n println!(\"{}\", nc.find(10)); \/\/ 1\n nc.change(1, 20);\n println!(\"{}\", nc.find(10)); \/\/ 2.\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Skeleton for problem 2419<commit_after>\/\/ https:\/\/leetcode.com\/problems\/longest-subarray-with-maximum-bitwise-and\/\npub fn longest_subarray(nums: Vec<i32>) -> i32 {\n todo!()\n}\n\nfn main() {\n println!(\"{}\", longest_subarray(vec![1, 2, 3, 3, 2, 2])); \/\/ 2\n println!(\"{}\", longest_subarray(vec![1, 2, 3, 4])); \/\/ 1\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>New data structure: functional association list<commit_after>use std::rc::Rc;\nuse std::clone::Clone;\n\ntype Assoc<K, V> = Option<Rc<AssocNode<K, V>>>;\n\n#[derive(Debug)]\nstruct AssocNode<K : PartialEq, V> {\n k: K,\n v: V,\n next: Assoc<K,V>\n}\n\ntrait AssocFind<K: PartialEq, V> {\n fn find(&self, &K) -> Option<&V>;\n fn set(&self, K, V) -> Assoc<K, V>;\n}\n\nimpl<K : PartialEq, V> AssocFind<K, V> for Assoc<K, V> {\n fn find(&self, target: &K) -> Option<&V> {\n match self {\n &None => None,\n &Some(ref node) => {\n if (*node).k == *target {\n Some(&node.v)\n } else {\n (*node).next.find(target)\n }\n }\n }\n }\n\n fn set(&self, k: K, v: V) -> Assoc<K, V> {\n Some(Rc::new(AssocNode {\n k: k, v: v, next: self.clone()\n }))\n }\n}\n\n#[test]\nfn test_assoc() {\n let mt : Assoc<i32, i32> = None;\n let a1 = mt.set(5,6);\n let a2 = a1.set(6,7);\n let a_override = a2.set(5,500);\n let a_override2 = a2.set(5,500);\n\n assert_eq!(mt.find(&5), None);\n assert_eq!(a1.find(&6), None);\n assert_eq!(a2.find(&999), None);\n assert_eq!(a_override.find(&999), None);\n assert_eq!(a1.find(&5), Some(&6));\n assert_eq!(a2.find(&5), Some(&6));\n assert_eq!(a2.find(&6), Some(&7));\n assert_eq!(a2.find(&5), Some(&6));\n assert_eq!(a_override.find(&5), Some(&500));\n assert_eq!(a_override.find(&6), Some(&7));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Tests for `[alias]` config command aliases.\n\nuse std::env;\n\nuse cargo_test_support::tools::echo_subcommand;\nuse cargo_test_support::{basic_bin_manifest, project};\n\n#[cargo_test]\nfn alias_incorrect_config_type() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = 5\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_status(101)\n .with_stderr(\n \"\\\n[ERROR] invalid configuration for key `alias.b-cargo-test`\nexpected a list, but found a integer for [..]\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_malformed_config_string() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = `\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_status(101)\n .with_stderr(\n \"\\\n[ERROR] could not load Cargo configuration\n\nCaused by:\n could not parse TOML configuration in `[..]\/config`\n\nCaused by:\n [..]\n\nCaused by:\n TOML parse error at line [..]\n |\n 3 | b-cargo-test = `\n | ^\n Unexpected ```\n Expected quoted string\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_malformed_config_list() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = [1, 2]\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_status(101)\n .with_stderr(\n \"\\\n[ERROR] could not load Cargo configuration\n\nCaused by:\n failed to load TOML configuration from `[..]\/config`\n\nCaused by:\n [..] `alias`\n\nCaused by:\n [..] `b-cargo-test`\n\nCaused by:\n expected string but found integer in list\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_config() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = \"build\"\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_stderr_contains(\n \"\\\n[COMPILING] foo v0.5.0 [..]\n[RUNNING] `rustc --crate-name foo [..]\",\n )\n .run();\n}\n\n#[cargo_test]\nfn dependent_alias() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = \"build\"\n a-cargo-test = [\"b-cargo-test\", \"-v\"]\n \"#,\n )\n .build();\n\n p.cargo(\"a-cargo-test\")\n .with_stderr_contains(\n \"\\\n[COMPILING] foo v0.5.0 [..]\n[RUNNING] `rustc --crate-name foo [..]\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_shadowing_external_subcommand() {\n let echo = echo_subcommand();\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n echo = \"build\"\n \"#,\n )\n .build();\n\n let mut paths: Vec<_> = env::split_paths(&env::var_os(\"PATH\").unwrap_or_default()).collect();\n paths.push(echo.target_debug_dir());\n let path = env::join_paths(paths).unwrap();\n\n p.cargo(\"echo\")\n .env(\"PATH\", &path)\n .with_stderr(\"\\\n[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]\/cargo-echo\/target\/debug\/cargo-echo[EXE]`\nThis was previously accepted but is being phased out; it will become a hard error in a future release.\nFor more information, see issue #10049 <https:\/\/github.com\/rust-lang\/cargo\/issues\/10049>.\n[COMPILING] foo v0.5.0 [..]\n[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn default_args_alias() {\n let echo = echo_subcommand();\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n echo = \"echo --flag1 --flag2\"\n test-1 = \"echo\"\n build = \"build --verbose\"\n \"#,\n )\n .build();\n\n let mut paths: Vec<_> = env::split_paths(&env::var_os(\"PATH\").unwrap_or_default()).collect();\n paths.push(echo.target_debug_dir());\n let path = env::join_paths(paths).unwrap();\n\n p.cargo(\"echo\")\n .env(\"PATH\", &path)\n .with_status(101)\n .with_stderr(\"\\\n[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]\/cargo-echo\/target\/debug\/cargo-echo[EXE]`\nThis was previously accepted but is being phased out; it will become a hard error in a future release.\nFor more information, see issue #10049 <https:\/\/github.com\/rust-lang\/cargo\/issues\/10049>.\nerror: alias echo has unresolvable recursive definition: echo -> echo\n\",\n )\n .run();\n\n p.cargo(\"test-1\")\n .env(\"PATH\", &path)\n .with_status(101)\n .with_stderr(\"\\\n[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]\/cargo-echo\/target\/debug\/cargo-echo[EXE]`\nThis was previously accepted but is being phased out; it will become a hard error in a future release.\nFor more information, see issue #10049 <https:\/\/github.com\/rust-lang\/cargo\/issues\/10049>.\nerror: alias test-1 has unresolvable recursive definition: test-1 -> echo -> echo\n\",\n )\n .run();\n\n \/\/ Builtins are not expanded by rule\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command\n[COMPILING] foo v0.5.0 ([..])\n[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn corecursive_alias() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n test-1 = \"test-2 --flag1\"\n test-2 = \"test-3 --flag2\"\n test-3 = \"test-1 --flag3\"\n \"#,\n )\n .build();\n\n p.cargo(\"test-1\")\n .with_status(101)\n .with_stderr(\n \"error: alias test-1 has unresolvable recursive definition: test-1 -> test-2 -> test-3 -> test-1\",\n )\n .run();\n\n p.cargo(\"test-2\")\n .with_status(101)\n .with_stderr(\n \"error: alias test-2 has unresolvable recursive definition: test-2 -> test-3 -> test-1 -> test-2\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_list_test() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = [\"build\", \"--release\"]\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_stderr_contains(\"[COMPILING] foo v0.5.0 [..]\")\n .with_stderr_contains(\"[RUNNING] `rustc --crate-name [..]\")\n .run();\n}\n\n#[cargo_test]\nfn alias_with_flags_config() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = \"build --release\"\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_stderr_contains(\"[COMPILING] foo v0.5.0 [..]\")\n .with_stderr_contains(\"[RUNNING] `rustc --crate-name foo [..]\")\n .run();\n}\n\n#[cargo_test]\nfn alias_cannot_shadow_builtin_command() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n build = \"fetch\"\n \"#,\n )\n .build();\n\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command\n[COMPILING] foo v0.5.0 ([..])\n[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_override_builtin_alias() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b = \"run\"\n \"#,\n )\n .build();\n\n p.cargo(\"b\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.5.0 ([..])\n[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n[RUNNING] `target\/debug\/foo[EXE]`\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn builtin_alias_takes_options() {\n \/\/ #6381\n let p = project()\n .file(\"src\/lib.rs\", \"\")\n .file(\n \"examples\/ex1.rs\",\n r#\"fn main() { println!(\"{}\", std::env::args().skip(1).next().unwrap()) }\"#,\n )\n .build();\n\n p.cargo(\"r --example ex1 -- asdf\").with_stdout(\"asdf\").run();\n}\n\n#[cargo_test]\nfn global_options_with_alias() {\n \/\/ Check that global options are passed through.\n let p = project().file(\"src\/lib.rs\", \"\").build();\n\n p.cargo(\"-v c\")\n .with_stderr(\n \"\\\n[CHECKING] foo [..]\n[RUNNING] `rustc [..]\n[FINISHED] dev [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn weird_check() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .build();\n\n p.cargo(\"-- check --invalid_argument -some-other-argument\")\n .with_status(101)\n .with_stderr(\n \"\\\n[ERROR] trailing arguments after built-in command `check` are unsupported: `--invalid_argument -some-other-argument`\n\nTo pass the arguments to the subcommand, remove `--`\n\",\n )\n .run();\n}\n<commit_msg>Add built-in alias shadowing not warning test<commit_after>\/\/! Tests for `[alias]` config command aliases.\n\nuse std::env;\n\nuse cargo_test_support::tools::echo_subcommand;\nuse cargo_test_support::{basic_bin_manifest, project};\n\n#[cargo_test]\nfn alias_incorrect_config_type() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = 5\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_status(101)\n .with_stderr(\n \"\\\n[ERROR] invalid configuration for key `alias.b-cargo-test`\nexpected a list, but found a integer for [..]\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_malformed_config_string() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = `\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_status(101)\n .with_stderr(\n \"\\\n[ERROR] could not load Cargo configuration\n\nCaused by:\n could not parse TOML configuration in `[..]\/config`\n\nCaused by:\n [..]\n\nCaused by:\n TOML parse error at line [..]\n |\n 3 | b-cargo-test = `\n | ^\n Unexpected ```\n Expected quoted string\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_malformed_config_list() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = [1, 2]\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_status(101)\n .with_stderr(\n \"\\\n[ERROR] could not load Cargo configuration\n\nCaused by:\n failed to load TOML configuration from `[..]\/config`\n\nCaused by:\n [..] `alias`\n\nCaused by:\n [..] `b-cargo-test`\n\nCaused by:\n expected string but found integer in list\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_config() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = \"build\"\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_stderr_contains(\n \"\\\n[COMPILING] foo v0.5.0 [..]\n[RUNNING] `rustc --crate-name foo [..]\",\n )\n .run();\n}\n\n#[cargo_test]\nfn dependent_alias() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = \"build\"\n a-cargo-test = [\"b-cargo-test\", \"-v\"]\n \"#,\n )\n .build();\n\n p.cargo(\"a-cargo-test\")\n .with_stderr_contains(\n \"\\\n[COMPILING] foo v0.5.0 [..]\n[RUNNING] `rustc --crate-name foo [..]\",\n )\n .run();\n}\n\n#[cargo_test]\nfn builtin_alias_shadowing_external_subcommand() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .executable(\"cargo-t\", \"\")\n .build();\n\n let mut paths: Vec<_> = env::split_paths(&env::var_os(\"PATH\").unwrap_or_default()).collect();\n paths.push(p.root());\n let path = env::join_paths(paths).unwrap();\n\n p.cargo(\"t\")\n .env(\"PATH\", &path)\n .with_stderr(\n \"\\\n[COMPILING] foo v0.5.0 [..]\n[FINISHED] test [unoptimized + debuginfo] target(s) in [..]\n[RUNNING] unittests src\/main.rs [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_shadowing_external_subcommand() {\n let echo = echo_subcommand();\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n echo = \"build\"\n \"#,\n )\n .build();\n\n let mut paths: Vec<_> = env::split_paths(&env::var_os(\"PATH\").unwrap_or_default()).collect();\n paths.push(echo.target_debug_dir());\n let path = env::join_paths(paths).unwrap();\n\n p.cargo(\"echo\")\n .env(\"PATH\", &path)\n .with_stderr(\"\\\n[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]\/cargo-echo\/target\/debug\/cargo-echo[EXE]`\nThis was previously accepted but is being phased out; it will become a hard error in a future release.\nFor more information, see issue #10049 <https:\/\/github.com\/rust-lang\/cargo\/issues\/10049>.\n[COMPILING] foo v0.5.0 [..]\n[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn default_args_alias() {\n let echo = echo_subcommand();\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n echo = \"echo --flag1 --flag2\"\n test-1 = \"echo\"\n build = \"build --verbose\"\n \"#,\n )\n .build();\n\n let mut paths: Vec<_> = env::split_paths(&env::var_os(\"PATH\").unwrap_or_default()).collect();\n paths.push(echo.target_debug_dir());\n let path = env::join_paths(paths).unwrap();\n\n p.cargo(\"echo\")\n .env(\"PATH\", &path)\n .with_status(101)\n .with_stderr(\"\\\n[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]\/cargo-echo\/target\/debug\/cargo-echo[EXE]`\nThis was previously accepted but is being phased out; it will become a hard error in a future release.\nFor more information, see issue #10049 <https:\/\/github.com\/rust-lang\/cargo\/issues\/10049>.\nerror: alias echo has unresolvable recursive definition: echo -> echo\n\",\n )\n .run();\n\n p.cargo(\"test-1\")\n .env(\"PATH\", &path)\n .with_status(101)\n .with_stderr(\"\\\n[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]\/cargo-echo\/target\/debug\/cargo-echo[EXE]`\nThis was previously accepted but is being phased out; it will become a hard error in a future release.\nFor more information, see issue #10049 <https:\/\/github.com\/rust-lang\/cargo\/issues\/10049>.\nerror: alias test-1 has unresolvable recursive definition: test-1 -> echo -> echo\n\",\n )\n .run();\n\n \/\/ Builtins are not expanded by rule\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command\n[COMPILING] foo v0.5.0 ([..])\n[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn corecursive_alias() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n test-1 = \"test-2 --flag1\"\n test-2 = \"test-3 --flag2\"\n test-3 = \"test-1 --flag3\"\n \"#,\n )\n .build();\n\n p.cargo(\"test-1\")\n .with_status(101)\n .with_stderr(\n \"error: alias test-1 has unresolvable recursive definition: test-1 -> test-2 -> test-3 -> test-1\",\n )\n .run();\n\n p.cargo(\"test-2\")\n .with_status(101)\n .with_stderr(\n \"error: alias test-2 has unresolvable recursive definition: test-2 -> test-3 -> test-1 -> test-2\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_list_test() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = [\"build\", \"--release\"]\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_stderr_contains(\"[COMPILING] foo v0.5.0 [..]\")\n .with_stderr_contains(\"[RUNNING] `rustc --crate-name [..]\")\n .run();\n}\n\n#[cargo_test]\nfn alias_with_flags_config() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b-cargo-test = \"build --release\"\n \"#,\n )\n .build();\n\n p.cargo(\"b-cargo-test -v\")\n .with_stderr_contains(\"[COMPILING] foo v0.5.0 [..]\")\n .with_stderr_contains(\"[RUNNING] `rustc --crate-name foo [..]\")\n .run();\n}\n\n#[cargo_test]\nfn alias_cannot_shadow_builtin_command() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n build = \"fetch\"\n \"#,\n )\n .build();\n\n p.cargo(\"build\")\n .with_stderr(\n \"\\\n[WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command\n[COMPILING] foo v0.5.0 ([..])\n[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn alias_override_builtin_alias() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .file(\n \".cargo\/config\",\n r#\"\n [alias]\n b = \"run\"\n \"#,\n )\n .build();\n\n p.cargo(\"b\")\n .with_stderr(\n \"\\\n[COMPILING] foo v0.5.0 ([..])\n[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n[RUNNING] `target\/debug\/foo[EXE]`\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn builtin_alias_takes_options() {\n \/\/ #6381\n let p = project()\n .file(\"src\/lib.rs\", \"\")\n .file(\n \"examples\/ex1.rs\",\n r#\"fn main() { println!(\"{}\", std::env::args().skip(1).next().unwrap()) }\"#,\n )\n .build();\n\n p.cargo(\"r --example ex1 -- asdf\").with_stdout(\"asdf\").run();\n}\n\n#[cargo_test]\nfn global_options_with_alias() {\n \/\/ Check that global options are passed through.\n let p = project().file(\"src\/lib.rs\", \"\").build();\n\n p.cargo(\"-v c\")\n .with_stderr(\n \"\\\n[CHECKING] foo [..]\n[RUNNING] `rustc [..]\n[FINISHED] dev [..]\n\",\n )\n .run();\n}\n\n#[cargo_test]\nfn weird_check() {\n let p = project()\n .file(\"Cargo.toml\", &basic_bin_manifest(\"foo\"))\n .file(\"src\/main.rs\", \"fn main() {}\")\n .build();\n\n p.cargo(\"-- check --invalid_argument -some-other-argument\")\n .with_status(101)\n .with_stderr(\n \"\\\n[ERROR] trailing arguments after built-in command `check` are unsupported: `--invalid_argument -some-other-argument`\n\nTo pass the arguments to the subcommand, remove `--`\n\",\n )\n .run();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #26547 - nham:test-19538, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntrait Foo {\n fn foo<T>(&self, val: T);\n}\n\ntrait Bar: Foo { }\n\npub struct Thing;\n\nimpl Foo for Thing {\n fn foo<T>(&self, val: T) { }\n}\n\nimpl Bar for Thing { }\n\nfn main() {\n let mut thing = Thing;\n let test: &mut Bar = &mut thing;\n \/\/~^ ERROR cannot convert to a trait object because trait `Bar` is not object-safe\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct S(String);\n\nimpl S {\n fn f(self: *mut S) -> String { self.0 }\n \/\/~^ ERROR invalid `self` type\n}\n\nfn main() { S(\"\".to_owned()).f(); }\n<commit_msg>update error message in test\/compile-fail\/issue-26194.rs<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct S(String);\n\nimpl S {\n fn f(self: *mut S) -> String { self.0 }\n \/\/~^ ERROR raw pointer `self` is unstable\n}\n\nfn main() { S(\"\".to_owned()).f(); }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add a test for auto_serialize2<commit_after>extern mod std;\n\n\/\/ These tests used to be separate files, but I wanted to refactor all\n\/\/ the common code.\n\nuse cmp::Eq;\nuse std::ebml2;\nuse io::Writer;\nuse std::serialization2::{Serializer, Serializable, deserialize};\nuse std::prettyprint2;\n\nfn test_ser_and_deser<A:Eq Serializable>(\n a1: A,\n expected: ~str\n) {\n\n \/\/ check the pretty printer:\n let s = io::with_str_writer(|w| a1.serialize(w));\n debug!(\"s == %?\", s);\n assert s == expected;\n\n \/\/ check the EBML serializer:\n let bytes = do io::with_bytes_writer |wr| {\n let ebml_w = ebml2::Serializer(wr);\n a1.serialize(ebml_w)\n };\n let d = ebml2::Doc(@bytes);\n let a2: A = deserialize(ebml2::Deserializer(d));\n assert a1 == a2;\n}\n\n#[auto_serialize2]\nenum Expr {\n Val(uint),\n Plus(@Expr, @Expr),\n Minus(@Expr, @Expr)\n}\n\nimpl AnEnum : cmp::Eq {\n pure fn eq(&&other: AnEnum) -> bool {\n self.v == other.v\n }\n pure fn ne(&&other: AnEnum) -> bool { !self.eq(other) }\n}\n\nimpl Point : cmp::Eq {\n pure fn eq(&&other: Point) -> bool {\n self.x == other.x && self.y == other.y\n }\n pure fn ne(&&other: Point) -> bool { !self.eq(other) }\n}\n\nimpl<T:cmp::Eq> Quark<T> : cmp::Eq {\n pure fn eq(&&other: Quark<T>) -> bool {\n match self {\n Top(ref q) => match other {\n Top(ref r) => q == r,\n Bottom(_) => false\n },\n Bottom(ref q) => match other {\n Top(_) => false,\n Bottom(ref r) => q == r\n }\n }\n }\n pure fn ne(&&other: Quark<T>) -> bool { !self.eq(other) }\n}\n\nimpl CLike : cmp::Eq {\n pure fn eq(&&other: CLike) -> bool {\n self as int == other as int\n }\n pure fn ne(&&other: CLike) -> bool { !self.eq(other) }\n}\n\nimpl Expr : cmp::Eq {\n pure fn eq(&&other: Expr) -> bool {\n match self {\n Val(e0a) => {\n match other {\n Val(e0b) => e0a == e0b,\n _ => false\n }\n }\n Plus(e0a, e1a) => {\n match other {\n Plus(e0b, e1b) => e0a == e0b && e1a == e1b,\n _ => false\n }\n }\n Minus(e0a, e1a) => {\n match other {\n Minus(e0b, e1b) => e0a == e0b && e1a == e1b,\n _ => false\n }\n }\n }\n }\n pure fn ne(&&other: Expr) -> bool { !self.eq(other) }\n}\n\n#[auto_serialize2]\ntype Spanned<T> = {lo: uint, hi: uint, node: T};\n\nimpl<T:cmp::Eq> Spanned<T> : cmp::Eq {\n pure fn eq(&&other: Spanned<T>) -> bool {\n self.lo == other.lo && self.hi == other.hi && self.node.eq(other.node)\n }\n pure fn ne(&&other: Spanned<T>) -> bool { !self.eq(other) }\n}\n\n#[auto_serialize2]\ntype SomeRec = {v: ~[uint]};\n\n#[auto_serialize2]\nenum AnEnum = SomeRec;\n\n#[auto_serialize2]\ntype Point = {x: uint, y: uint};\n\n#[auto_serialize2]\nenum Quark<T> {\n Top(T),\n Bottom(T)\n}\n\n#[auto_serialize2]\nenum CLike { A, B, C }\n\nfn main() {\n\n test_ser_and_deser(Plus(@Minus(@Val(3u), @Val(10u)),\n @Plus(@Val(22u), @Val(5u))),\n ~\"Plus(@Minus(@Val(3u), @Val(10u)), \\\n @Plus(@Val(22u), @Val(5u)))\");\n\n test_ser_and_deser({lo: 0u, hi: 5u, node: 22u},\n ~\"{lo: 0u, hi: 5u, node: 22u}\");\n\n test_ser_and_deser(AnEnum({v: ~[1u, 2u, 3u]}),\n ~\"AnEnum({v: [1u, 2u, 3u]})\");\n\n test_ser_and_deser({x: 3u, y: 5u}, ~\"{x: 3u, y: 5u}\");\n\n test_ser_and_deser(~[1u, 2u, 3u], ~\"[1u, 2u, 3u]\");\n\n test_ser_and_deser(Top(22u), ~\"Top(22u)\");\n test_ser_and_deser(Bottom(222u), ~\"Bottom(222u)\");\n\n test_ser_and_deser(A, ~\"A\");\n test_ser_and_deser(B, ~\"B\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Tests: barebones FCS test.<commit_after>extern crate aprs;\nuse aprs::fcs;\n\nextern crate data_encoding;\nuse data_encoding::HEXLOWER as HEX;\n\n#[path=\"..\/tests\/test_constants.rs\"]\nmod test_constants;\n\n#[test]\n\/\/#[ignore]\nfn test_fcs_checksum() {\n}\n\n\n#[test]\n#[ignore]\nfn test_fcs_validate() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix broke pipe panics<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example that lists genl families<commit_after>extern crate neli;\nuse neli::consts::{CtrlAttr, CtrlCmd, GenlId, NlFamily, NlmF, Nlmsg};\nuse neli::err::NlError;\nuse neli::genl::Genlmsghdr;\nuse neli::nl::Nlmsghdr;\nuse neli::nlattr::Nlattr;\nuse neli::socket::NlSocket;\nuse neli::Nl;\nuse neli::StreamReadBuffer;\n\nconst GENL_VERSION: u8 = 2;\n\n\/\/ This example attempts to mimic the \"genl ctrl list\" command. For simplicity, it only outputs\n\/\/ the name and identifier of each generic netlink family.\n\nfn main() -> Result<(), NlError> {\n let mut socket = NlSocket::connect(NlFamily::Generic, None, None, true)?;\n\n let attrs: Vec<Nlattr<CtrlAttr>> = vec![];\n let genlhdr = Genlmsghdr::new(CtrlCmd::Getfamily, GENL_VERSION, attrs)?;\n let nlhdr = {\n let len = None;\n let nl_type = GenlId::Ctrl;\n let flags = vec![NlmF::Request, NlmF::Dump];\n let seq = None;\n let pid = None;\n let payload = genlhdr;\n Nlmsghdr::new(len, nl_type, flags, seq, pid, payload)\n };\n socket.send_nl(nlhdr)?;\n\n loop {\n let response = socket.recv_nl::<GenlId, Genlmsghdr<CtrlCmd>>(None)?;\n\n if let GenlId::UnrecognizedVariant(id) = response.nl_type {\n match Nlmsg::from(id) {\n \/\/ This example could be improved by reinterpreting the payload as an Nlmsgerr\n \/\/ struct and printing the specific error encountered.\n Nlmsg::Error => panic!(\"An error occurred while retrieving available families.\"),\n Nlmsg::Done => break,\n _ => {}\n }\n }\n\n let mut handle = response.nl_payload.get_attr_handle::<CtrlAttr>();\n handle.parse_nested_attributes()?;\n\n for attr in handle.iter().unwrap() {\n match &attr.nla_type {\n CtrlAttr::FamilyName => {\n let mut mem = StreamReadBuffer::new(&attr.payload);\n mem.set_size_hint(attr.payload.len() - 1);\n let name = String::deserialize(&mut mem)?;\n println!(\"{}\", name);\n }\n CtrlAttr::FamilyId => {\n let mut mem = StreamReadBuffer::new(&attr.payload);\n let id = u16::deserialize(&mut mem)?;\n println!(\"\\tID: 0x{:x}\", id);\n }\n _ => {}\n }\n }\n }\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>devices: pcie: add pcie port structure<commit_after>\/\/ Copyright 2022 The ChromiumOS Authors.\n\/\/ Use of this source code is governed by a BSD-style license that can be\n\/\/ found in the LICENSE file.\n\nuse std::str::FromStr;\nuse std::sync::Arc;\n\nuse crate::pci::pci_configuration::PciCapabilityID;\nuse crate::pci::{MsiConfig, PciAddress, PciDeviceError};\n\nuse crate::pci::pcie::pci_bridge::PciBridgeBusRange;\nuse crate::pci::pcie::pcie_device::PmcConfig;\nuse crate::pci::pcie::pcie_host::PcieHostPort;\nuse crate::pci::pcie::*;\n\nuse base::warn;\nuse data_model::DataInit;\nuse resources::{Alloc, SystemAllocator};\nuse sync::Mutex;\n\n\/\/ reserve 8MB memory window\nconst PCIE_BR_MEM_SIZE: u64 = 0x80_0000;\n\/\/ reserve 64MB prefetch window\nconst PCIE_BR_PREF_MEM_SIZE: u64 = 0x400_0000;\n\npub struct PciePort {\n device_id: u16,\n debug_label: String,\n pci_address: Option<PciAddress>,\n bus_range: PciBridgeBusRange,\n pcie_host: Option<PcieHostPort>,\n pcie_cap_reg_idx: Option<usize>,\n pmc_cap_reg_idx: Option<usize>,\n msi_config: Option<Arc<Mutex<MsiConfig>>>,\n pmc_config: PmcConfig,\n\n slot_control: Option<u16>,\n slot_status: u16,\n root_control: u16,\n root_status: u32,\n\n hp_interrupt_pending: bool,\n pme_pending_request_id: Option<PciAddress>,\n prepare_hotplug: bool,\n removed_downstream_valid: bool,\n}\n\nimpl PciePort {\n \/\/\/ Constructs a new PCIE port\n pub fn new(\n device_id: u16,\n debug_label: String,\n primary_bus_num: u8,\n secondary_bus_num: u8,\n slot_implemented: bool,\n ) -> Self {\n let bus_range = PciBridgeBusRange {\n primary: primary_bus_num,\n secondary: secondary_bus_num,\n subordinate: secondary_bus_num,\n };\n PciePort {\n device_id,\n debug_label,\n pci_address: None,\n bus_range,\n pcie_host: None,\n pcie_cap_reg_idx: None,\n pmc_cap_reg_idx: None,\n msi_config: None,\n pmc_config: PmcConfig::new(),\n\n slot_control: if slot_implemented {\n Some(PCIE_SLTCTL_PIC_OFF | PCIE_SLTCTL_AIC_OFF)\n } else {\n None\n },\n slot_status: 0,\n root_control: 0,\n root_status: 0,\n\n hp_interrupt_pending: false,\n pme_pending_request_id: None,\n prepare_hotplug: false,\n removed_downstream_valid: false,\n }\n }\n\n pub fn new_from_host(pcie_host: PcieHostPort, slot_implemented: bool) -> Self {\n let bus_range = pcie_host.get_bus_range();\n PciePort {\n device_id: pcie_host.read_device_id(),\n debug_label: pcie_host.host_name(),\n pci_address: None,\n bus_range,\n pcie_host: Some(pcie_host),\n pcie_cap_reg_idx: None,\n pmc_cap_reg_idx: None,\n msi_config: None,\n pmc_config: PmcConfig::new(),\n\n slot_control: if slot_implemented {\n Some(PCIE_SLTCTL_PIC_OFF | PCIE_SLTCTL_AIC_OFF)\n } else {\n None\n },\n slot_status: 0,\n root_control: 0,\n root_status: 0,\n\n hp_interrupt_pending: false,\n pme_pending_request_id: None,\n prepare_hotplug: false,\n removed_downstream_valid: false,\n }\n }\n\n pub fn get_device_id(&self) -> u16 {\n self.device_id\n }\n\n pub fn debug_label(&self) -> String {\n self.debug_label.clone()\n }\n\n pub fn allocate_address(\n &mut self,\n resources: &mut SystemAllocator,\n ) -> std::result::Result<PciAddress, PciDeviceError> {\n if self.pci_address.is_none() {\n match &self.pcie_host {\n Some(host) => {\n let address = PciAddress::from_str(&host.host_name())\n .map_err(|e| PciDeviceError::PciAddressParseFailure(host.host_name(), e))?;\n if resources.reserve_pci(\n Alloc::PciBar {\n bus: address.bus,\n dev: address.dev,\n func: address.func,\n bar: 0,\n },\n host.host_name(),\n ) {\n self.pci_address = Some(address);\n } else {\n self.pci_address = None;\n }\n }\n None => match resources.allocate_pci(self.bus_range.primary, self.debug_label()) {\n Some(Alloc::PciBar {\n bus,\n dev,\n func,\n bar: _,\n }) => self.pci_address = Some(PciAddress { bus, dev, func }),\n _ => self.pci_address = None,\n },\n }\n }\n self.pci_address.ok_or(PciDeviceError::PciAllocationFailed)\n }\n\n fn read_pcie_cap(&self, offset: usize, data: &mut u32) {\n if offset == PCIE_SLTCTL_OFFSET {\n *data = ((self.slot_status as u32) << 16) | (self.get_slot_control() as u32);\n } else if offset == PCIE_ROOTCTL_OFFSET {\n *data = self.root_control as u32;\n } else if offset == PCIE_ROOTSTA_OFFSET {\n *data = self.root_status;\n }\n }\n\n fn write_pcie_cap(&mut self, offset: usize, data: &[u8]) {\n self.removed_downstream_valid = false;\n match offset {\n PCIE_SLTCTL_OFFSET => {\n let value = match u16::from_slice(data) {\n Some(&v) => v,\n None => {\n warn!(\"write SLTCTL isn't word, len: {}\", data.len());\n return;\n }\n };\n\n \/\/ if slot is populated, power indicator is off,\n \/\/ it will detach devices\n let old_control = self.get_slot_control();\n match self.slot_control.as_mut() {\n Some(v) => *v = value,\n None => return,\n }\n if (self.slot_status & PCIE_SLTSTA_PDS != 0)\n && (value & PCIE_SLTCTL_PIC_OFF == PCIE_SLTCTL_PIC_OFF)\n && (old_control & PCIE_SLTCTL_PIC_OFF != PCIE_SLTCTL_PIC_OFF)\n {\n self.removed_downstream_valid = true;\n self.slot_status &= !PCIE_SLTSTA_PDS;\n self.slot_status |= PCIE_SLTSTA_PDC;\n self.trigger_hp_interrupt();\n }\n\n if old_control != value {\n \/\/ send Command completed events\n self.slot_status |= PCIE_SLTSTA_CC;\n self.trigger_cc_interrupt();\n }\n }\n PCIE_SLTSTA_OFFSET => {\n if self.slot_control.is_none() {\n return;\n }\n let value = match u16::from_slice(data) {\n Some(v) => *v,\n None => {\n warn!(\"write SLTSTA isn't word, len: {}\", data.len());\n return;\n }\n };\n if value & PCIE_SLTSTA_ABP != 0 {\n self.slot_status &= !PCIE_SLTSTA_ABP;\n }\n if value & PCIE_SLTSTA_PFD != 0 {\n self.slot_status &= !PCIE_SLTSTA_PFD;\n }\n if value & PCIE_SLTSTA_PDC != 0 {\n self.slot_status &= !PCIE_SLTSTA_PDC;\n }\n if value & PCIE_SLTSTA_CC != 0 {\n self.slot_status &= !PCIE_SLTSTA_CC;\n }\n if value & PCIE_SLTSTA_DLLSC != 0 {\n self.slot_status &= !PCIE_SLTSTA_DLLSC;\n }\n }\n PCIE_ROOTCTL_OFFSET => match u16::from_slice(data) {\n Some(v) => self.root_control = *v,\n None => warn!(\"write root control isn't word, len: {}\", data.len()),\n },\n PCIE_ROOTSTA_OFFSET => match u32::from_slice(data) {\n Some(v) => {\n if *v & PCIE_ROOTSTA_PME_STATUS != 0 {\n if let Some(request_id) = self.pme_pending_request_id {\n self.root_status &= !PCIE_ROOTSTA_PME_PENDING;\n let req_id = ((request_id.bus as u32) << 8)\n | ((request_id.dev as u32) << 3)\n | (request_id.func as u32);\n self.root_status &= !PCIE_ROOTSTA_PME_REQ_ID_MASK;\n self.root_status |= req_id;\n self.root_status |= PCIE_ROOTSTA_PME_STATUS;\n self.pme_pending_request_id = None;\n self.trigger_pme_interrupt();\n } else {\n self.root_status &= !PCIE_ROOTSTA_PME_STATUS;\n if self.hp_interrupt_pending {\n self.hp_interrupt_pending = false;\n self.trigger_hp_interrupt();\n }\n }\n }\n }\n None => warn!(\"write root status isn't dword, len: {}\", data.len()),\n },\n _ => (),\n }\n }\n\n pub fn read_config(&self, reg_idx: usize, data: &mut u32) {\n if let Some(pcie_cap_reg_idx) = self.pcie_cap_reg_idx {\n if reg_idx >= pcie_cap_reg_idx && reg_idx < pcie_cap_reg_idx + (PCIE_CAP_LEN \/ 4) {\n let offset = (reg_idx - pcie_cap_reg_idx) * 4;\n self.read_pcie_cap(offset, data);\n }\n }\n if let Some(pmc_cap_reg_idx) = self.pmc_cap_reg_idx {\n if reg_idx == pmc_cap_reg_idx + PMC_CAP_CONTROL_STATE_OFFSET {\n self.pmc_config.read(data);\n }\n }\n if let Some(host) = &self.pcie_host {\n host.read_config(reg_idx, data);\n }\n }\n\n pub fn write_config(&mut self, reg_idx: usize, offset: u64, data: &[u8]) {\n if let Some(pcie_cap_reg_idx) = self.pcie_cap_reg_idx {\n if reg_idx >= pcie_cap_reg_idx && reg_idx < pcie_cap_reg_idx + (PCIE_CAP_LEN \/ 4) {\n let delta = ((reg_idx - pcie_cap_reg_idx) * 4) + offset as usize;\n self.write_pcie_cap(delta, data);\n }\n }\n if let Some(pmc_cap_reg_idx) = self.pmc_cap_reg_idx {\n if reg_idx == pmc_cap_reg_idx + PMC_CAP_CONTROL_STATE_OFFSET {\n let old_status = self.pmc_config.get_power_status();\n self.pmc_config.write(offset, data);\n let new_status = self.pmc_config.get_power_status();\n if old_status == PciDevicePower::D3\n && new_status == PciDevicePower::D0\n && self.prepare_hotplug\n {\n if let Some(host) = self.pcie_host.as_mut() {\n host.hotplug_probe();\n self.prepare_hotplug = false;\n }\n }\n }\n }\n if let Some(host) = self.pcie_host.as_mut() {\n host.write_config(reg_idx, offset, data);\n }\n }\n\n pub fn set_capability_reg_idx(&mut self, id: PciCapabilityID, reg_idx: usize) {\n match id {\n PciCapabilityID::PciExpress => self.pcie_cap_reg_idx = Some(reg_idx),\n PciCapabilityID::PowerManagement => self.pmc_cap_reg_idx = Some(reg_idx),\n _ => (),\n }\n }\n\n pub fn get_bus_range(&self) -> Option<PciBridgeBusRange> {\n Some(self.bus_range)\n }\n\n pub fn get_bridge_window_size(&self) -> (u64, u64) {\n if let Some(host) = &self.pcie_host {\n host.get_bridge_window_size()\n } else {\n (PCIE_BR_MEM_SIZE, PCIE_BR_PREF_MEM_SIZE)\n }\n }\n\n fn get_slot_control(&self) -> u16 {\n if let Some(slot_control) = self.slot_control {\n return slot_control;\n }\n 0\n }\n\n pub fn clone_interrupt(&mut self, msi_config: Arc<Mutex<MsiConfig>>) {\n self.msi_config = Some(msi_config);\n }\n\n pub fn hotplug_implemented(&self) -> bool {\n self.slot_control.is_some()\n }\n\n fn trigger_interrupt(&self) {\n if let Some(msi_config) = &self.msi_config {\n let msi_config = msi_config.lock();\n if msi_config.is_msi_enabled() {\n msi_config.trigger()\n }\n }\n }\n\n fn trigger_cc_interrupt(&self) {\n if (self.get_slot_control() & PCIE_SLTCTL_CCIE) != 0\n && (self.slot_status & PCIE_SLTSTA_CC) != 0\n {\n self.trigger_interrupt()\n }\n }\n\n fn trigger_hp_interrupt(&self) {\n let slot_control = self.get_slot_control();\n if (slot_control & PCIE_SLTCTL_HPIE) != 0\n && (self.slot_status & slot_control & (PCIE_SLTCTL_ABPE | PCIE_SLTCTL_PDCE)) != 0\n {\n self.trigger_interrupt()\n }\n }\n\n fn trigger_pme_interrupt(&self) {\n if (self.root_control & PCIE_ROOTCTL_PME_ENABLE) != 0\n && (self.root_status & PCIE_ROOTSTA_PME_STATUS) != 0\n {\n self.trigger_interrupt()\n }\n }\n\n pub fn inject_pme(&mut self) {\n if (self.root_status & PCIE_ROOTSTA_PME_STATUS) != 0 {\n self.root_status |= PCIE_ROOTSTA_PME_PENDING;\n self.pme_pending_request_id = self.pci_address;\n } else {\n let request_id = self.pci_address.unwrap();\n let req_id = ((request_id.bus as u32) << 8)\n | ((request_id.dev as u32) << 3)\n | (request_id.func as u32);\n self.root_status &= !PCIE_ROOTSTA_PME_REQ_ID_MASK;\n self.root_status |= req_id;\n self.pme_pending_request_id = None;\n self.root_status |= PCIE_ROOTSTA_PME_STATUS;\n self.trigger_pme_interrupt();\n }\n }\n\n pub fn trigger_hp_or_pme_interrupt(&mut self) {\n if self.pmc_config.should_trigger_pme() {\n self.hp_interrupt_pending = true;\n self.inject_pme();\n } else {\n self.trigger_hp_interrupt();\n }\n }\n\n pub fn is_host(&self) -> bool {\n self.pcie_host.is_some()\n }\n\n pub fn hot_unplug(&mut self) {\n if let Some(host) = self.pcie_host.as_mut() {\n host.hot_unplug()\n }\n }\n\n pub fn is_match(&self, host_addr: PciAddress) -> Option<u8> {\n let _ = self.slot_control?;\n\n if (host_addr.bus >= self.bus_range.secondary\n && host_addr.bus <= self.bus_range.subordinate)\n || self.pcie_host.is_none()\n {\n Some(self.bus_range.secondary)\n } else {\n None\n }\n }\n\n pub fn removed_downstream_valid(&self) -> bool {\n self.removed_downstream_valid\n }\n\n pub fn set_slot_status(&mut self, flag: u16) {\n self.slot_status |= flag;\n }\n\n pub fn should_trigger_pme(&mut self) -> bool {\n self.pmc_config.should_trigger_pme()\n }\n\n pub fn prepare_hotplug(&mut self) {\n self.prepare_hotplug = true;\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test case for #62506.<commit_after>\/\/ Output = String caused an ICE whereas Output = &'static str compiled successfully.\n\/\/ Broken MIR: generator contains type std::string::String in MIR,\n\/\/ but typeck only knows about {<S as T>::Future, ()}\n\/\/ check-pass\n\/\/ edition:2018\n\n#![feature(async_await)]\nuse std::future::Future;\n\npub trait T {\n type Future: Future<Output = String>;\n fn bar() -> Self::Future;\n}\npub async fn foo<S>() where S: T {\n S::bar().await;\n S::bar().await;\n}\npub fn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Algorithm citation:\n\/\/! A Simple, Fast Dominance Algorithm.\n\/\/! Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy\n\/\/! Rice Computer Science TS-06-33870\n\/\/! <https:\/\/www.cs.rice.edu\/~keith\/EMBED\/dom.pdf>\n\nuse super::super::indexed_vec::{Idx, IndexVec};\nuse super::iterate::reverse_post_order;\nuse super::ControlFlowGraph;\n\nuse std::fmt;\n\n#[cfg(test)]\nmod test;\n\npub fn dominators<G: ControlFlowGraph>(graph: &G) -> Dominators<G::Node> {\n let start_node = graph.start_node();\n let rpo = reverse_post_order(graph, start_node);\n dominators_given_rpo(graph, &rpo)\n}\n\npub fn dominators_given_rpo<G: ControlFlowGraph>(\n graph: &G,\n rpo: &[G::Node],\n) -> Dominators<G::Node> {\n let start_node = graph.start_node();\n assert_eq!(rpo[0], start_node);\n\n \/\/ compute the post order index (rank) for each node\n let mut post_order_rank: IndexVec<G::Node, usize> =\n IndexVec::from_elem_n(usize::default(), graph.num_nodes());\n for (index, node) in rpo.iter().rev().cloned().enumerate() {\n post_order_rank[node] = index;\n }\n\n let mut immediate_dominators: IndexVec<G::Node, Option<G::Node>> =\n IndexVec::from_elem_n(Option::default(), graph.num_nodes());\n immediate_dominators[start_node] = Some(start_node);\n\n let mut changed = true;\n while changed {\n changed = false;\n\n for &node in &rpo[1..] {\n let mut new_idom = None;\n for pred in graph.predecessors(node) {\n if immediate_dominators[pred].is_some() {\n \/\/ (*)\n \/\/ (*) dominators for `pred` have been calculated\n new_idom = intersect_opt(\n &post_order_rank,\n &immediate_dominators,\n new_idom,\n Some(pred),\n );\n }\n }\n\n if new_idom != immediate_dominators[node] {\n immediate_dominators[node] = new_idom;\n changed = true;\n }\n }\n }\n\n Dominators {\n post_order_rank,\n immediate_dominators,\n }\n}\n\nfn intersect_opt<Node: Idx>(\n post_order_rank: &IndexVec<Node, usize>,\n immediate_dominators: &IndexVec<Node, Option<Node>>,\n node1: Option<Node>,\n node2: Option<Node>,\n) -> Option<Node> {\n match (node1, node2) {\n (None, None) => None,\n (Some(n), None) | (None, Some(n)) => Some(n),\n (Some(n1), Some(n2)) => Some(intersect(post_order_rank, immediate_dominators, n1, n2)),\n }\n}\n\nfn intersect<Node: Idx>(\n post_order_rank: &IndexVec<Node, usize>,\n immediate_dominators: &IndexVec<Node, Option<Node>>,\n mut node1: Node,\n mut node2: Node,\n) -> Node {\n while node1 != node2 {\n while post_order_rank[node1] < post_order_rank[node2] {\n node1 = immediate_dominators[node1].unwrap();\n }\n\n while post_order_rank[node2] < post_order_rank[node1] {\n node2 = immediate_dominators[node2].unwrap();\n }\n }\n\n node1\n}\n\n#[derive(Clone, Debug)]\npub struct Dominators<N: Idx> {\n post_order_rank: IndexVec<N, usize>,\n immediate_dominators: IndexVec<N, Option<N>>,\n}\n\nimpl<Node: Idx> Dominators<Node> {\n pub fn is_reachable(&self, node: Node) -> bool {\n self.immediate_dominators[node].is_some()\n }\n\n pub fn immediate_dominator(&self, node: Node) -> Node {\n assert!(self.is_reachable(node), \"node {:?} is not reachable\", node);\n self.immediate_dominators[node].unwrap()\n }\n\n pub fn dominators(&self, node: Node) -> Iter<Node> {\n assert!(self.is_reachable(node), \"node {:?} is not reachable\", node);\n Iter {\n dominators: self,\n node: Some(node),\n }\n }\n\n pub fn is_dominated_by(&self, node: Node, dom: Node) -> bool {\n \/\/ FIXME -- could be optimized by using post-order-rank\n self.dominators(node).any(|n| n == dom)\n }\n\n #[cfg(test)]\n fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {\n &self.immediate_dominators\n }\n}\n\npub struct Iter<'dom, Node: Idx + 'dom> {\n dominators: &'dom Dominators<Node>,\n node: Option<Node>,\n}\n\nimpl<'dom, Node: Idx> Iterator for Iter<'dom, Node> {\n type Item = Node;\n\n fn next(&mut self) -> Option<Self::Item> {\n if let Some(node) = self.node {\n let dom = self.dominators.immediate_dominator(node);\n if dom == node {\n self.node = None; \/\/ reached the root\n } else {\n self.node = Some(dom);\n }\n return Some(node);\n } else {\n return None;\n }\n }\n}\n\npub struct DominatorTree<N: Idx> {\n root: N,\n children: IndexVec<N, Vec<N>>,\n}\n\nimpl<Node: Idx> DominatorTree<Node> {\n pub fn children(&self, node: Node) -> &[Node] {\n &self.children[node]\n }\n}\n\nimpl<Node: Idx> fmt::Debug for DominatorTree<Node> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt::Debug::fmt(\n &DominatorTreeNode {\n tree: self,\n node: self.root,\n },\n fmt,\n )\n }\n}\n\nstruct DominatorTreeNode<'tree, Node: Idx> {\n tree: &'tree DominatorTree<Node>,\n node: Node,\n}\n\nimpl<'tree, Node: Idx> fmt::Debug for DominatorTreeNode<'tree, Node> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let subtrees: Vec<_> = self.tree\n .children(self.node)\n .iter()\n .map(|&child| DominatorTreeNode {\n tree: self.tree,\n node: child,\n })\n .collect();\n fmt.debug_tuple(\"\")\n .field(&self.node)\n .field(&subtrees)\n .finish()\n }\n}\n<commit_msg>micro-optimize dominator code<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Algorithm citation:\n\/\/! A Simple, Fast Dominance Algorithm.\n\/\/! Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy\n\/\/! Rice Computer Science TS-06-33870\n\/\/! <https:\/\/www.cs.rice.edu\/~keith\/EMBED\/dom.pdf>\n\nuse super::super::indexed_vec::{Idx, IndexVec};\nuse super::iterate::reverse_post_order;\nuse super::ControlFlowGraph;\n\nuse std::fmt;\n\n#[cfg(test)]\nmod test;\n\npub fn dominators<G: ControlFlowGraph>(graph: &G) -> Dominators<G::Node> {\n let start_node = graph.start_node();\n let rpo = reverse_post_order(graph, start_node);\n dominators_given_rpo(graph, &rpo)\n}\n\npub fn dominators_given_rpo<G: ControlFlowGraph>(\n graph: &G,\n rpo: &[G::Node],\n) -> Dominators<G::Node> {\n let start_node = graph.start_node();\n assert_eq!(rpo[0], start_node);\n\n \/\/ compute the post order index (rank) for each node\n let mut post_order_rank: IndexVec<G::Node, usize> =\n (0..graph.num_nodes()).map(|_| 0).collect();\n for (index, node) in rpo.iter().rev().cloned().enumerate() {\n post_order_rank[node] = index;\n }\n\n let mut immediate_dominators: IndexVec<G::Node, Option<G::Node>> =\n (0..graph.num_nodes()).map(|_| None).collect();\n immediate_dominators[start_node] = Some(start_node);\n\n let mut changed = true;\n while changed {\n changed = false;\n\n for &node in &rpo[1..] {\n let mut new_idom = None;\n for pred in graph.predecessors(node) {\n if immediate_dominators[pred].is_some() {\n \/\/ (*)\n \/\/ (*) dominators for `pred` have been calculated\n new_idom = intersect_opt(\n &post_order_rank,\n &immediate_dominators,\n new_idom,\n Some(pred),\n );\n }\n }\n\n if new_idom != immediate_dominators[node] {\n immediate_dominators[node] = new_idom;\n changed = true;\n }\n }\n }\n\n Dominators {\n post_order_rank,\n immediate_dominators,\n }\n}\n\nfn intersect_opt<Node: Idx>(\n post_order_rank: &IndexVec<Node, usize>,\n immediate_dominators: &IndexVec<Node, Option<Node>>,\n node1: Option<Node>,\n node2: Option<Node>,\n) -> Option<Node> {\n match (node1, node2) {\n (None, None) => None,\n (Some(n), None) | (None, Some(n)) => Some(n),\n (Some(n1), Some(n2)) => Some(intersect(post_order_rank, immediate_dominators, n1, n2)),\n }\n}\n\nfn intersect<Node: Idx>(\n post_order_rank: &IndexVec<Node, usize>,\n immediate_dominators: &IndexVec<Node, Option<Node>>,\n mut node1: Node,\n mut node2: Node,\n) -> Node {\n while node1 != node2 {\n while post_order_rank[node1] < post_order_rank[node2] {\n node1 = immediate_dominators[node1].unwrap();\n }\n\n while post_order_rank[node2] < post_order_rank[node1] {\n node2 = immediate_dominators[node2].unwrap();\n }\n }\n\n node1\n}\n\n#[derive(Clone, Debug)]\npub struct Dominators<N: Idx> {\n post_order_rank: IndexVec<N, usize>,\n immediate_dominators: IndexVec<N, Option<N>>,\n}\n\nimpl<Node: Idx> Dominators<Node> {\n pub fn is_reachable(&self, node: Node) -> bool {\n self.immediate_dominators[node].is_some()\n }\n\n pub fn immediate_dominator(&self, node: Node) -> Node {\n assert!(self.is_reachable(node), \"node {:?} is not reachable\", node);\n self.immediate_dominators[node].unwrap()\n }\n\n pub fn dominators(&self, node: Node) -> Iter<Node> {\n assert!(self.is_reachable(node), \"node {:?} is not reachable\", node);\n Iter {\n dominators: self,\n node: Some(node),\n }\n }\n\n pub fn is_dominated_by(&self, node: Node, dom: Node) -> bool {\n \/\/ FIXME -- could be optimized by using post-order-rank\n self.dominators(node).any(|n| n == dom)\n }\n\n #[cfg(test)]\n fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {\n &self.immediate_dominators\n }\n}\n\npub struct Iter<'dom, Node: Idx + 'dom> {\n dominators: &'dom Dominators<Node>,\n node: Option<Node>,\n}\n\nimpl<'dom, Node: Idx> Iterator for Iter<'dom, Node> {\n type Item = Node;\n\n fn next(&mut self) -> Option<Self::Item> {\n if let Some(node) = self.node {\n let dom = self.dominators.immediate_dominator(node);\n if dom == node {\n self.node = None; \/\/ reached the root\n } else {\n self.node = Some(dom);\n }\n return Some(node);\n } else {\n return None;\n }\n }\n}\n\npub struct DominatorTree<N: Idx> {\n root: N,\n children: IndexVec<N, Vec<N>>,\n}\n\nimpl<Node: Idx> DominatorTree<Node> {\n pub fn children(&self, node: Node) -> &[Node] {\n &self.children[node]\n }\n}\n\nimpl<Node: Idx> fmt::Debug for DominatorTree<Node> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n fmt::Debug::fmt(\n &DominatorTreeNode {\n tree: self,\n node: self.root,\n },\n fmt,\n )\n }\n}\n\nstruct DominatorTreeNode<'tree, Node: Idx> {\n tree: &'tree DominatorTree<Node>,\n node: Node,\n}\n\nimpl<'tree, Node: Idx> fmt::Debug for DominatorTreeNode<'tree, Node> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n let subtrees: Vec<_> = self.tree\n .children(self.node)\n .iter()\n .map(|&child| DominatorTreeNode {\n tree: self.tree,\n node: child,\n })\n .collect();\n fmt.debug_tuple(\"\")\n .field(&self.node)\n .field(&subtrees)\n .finish()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement lexing of identifiers.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add core::default<commit_after>#![feature(core)]\nextern crate core;\n\n#[cfg(test)]\nmod tests {\n struct A {\n\tvalue: u32\n }\n\n impl Default for A {\n\tfn default() -> Self {\n\t A { value: 68 }\n\t}\n }\n\n #[derive(Debug, PartialEq)]\n enum Kind {\n\tA,\n\tB,\n\tC\n }\n\n impl Default for Kind {\n\tfn default() -> Kind {\n\t Kind::A\n\t}\n }\n\n #[test]\n fn default_test1() {\n\tlet a: A = A::default();\n\tassert_eq!(a.value, 68);\n }\n\n #[test]\n fn default_test2() {\n\tlet a: A = Default::default();\n\tassert_eq!(a.value, 68);\n }\n\n #[test]\n fn default_test3() {\n\tlet a: A = Default::default();\n\tassert_eq!(a.value, 68);\n }\n\n #[test]\n fn default_test4() {\n\tlet (x, y): (Option<String>, f64) = Default::default();\n\tassert_eq!(x, None::<String>);\n\tassert_eq!(y, 0.0);\n }\n\n #[test]\n fn default_test5() {\n\tlet (a, b, (c, d)): (i32, u32, (bool, bool)) = Default::default();\n\tassert_eq!(a, 0);\n\tassert_eq!(b, 0);\n\tassert_eq!(c, false);\n\tassert_eq!(d, false);\n }\n\n #[test]\n fn default_test6() {\n\tassert_eq!(<()>::default(), ());\n\tassert_eq!(bool::default(), false);\n\tassert_eq!(char::default(), '\\x00');\n\tassert_eq!(usize::default(), 0);\n\tassert_eq!(u8::default(), 0);\n\tassert_eq!(u16::default(), 0);\n\tassert_eq!(u32::default(), 0);\n\tassert_eq!(u64::default(), 0);\n\tassert_eq!(isize::default(), 0);\n\tassert_eq!(i8::default(), 0);\n\tassert_eq!(i16::default(), 0);\n\tassert_eq!(i32::default(), 0);\n\tassert_eq!(i64::default(), 0);\n\tassert_eq!(f32::default(), 0.0f32);\n\tassert_eq!(f64::default(), 0.0f64);\n }\n #[test]\n fn default_test7() {\n\tlet x: Kind = Default::default();\n\tlet y: Kind = Kind::default();\n\n\tassert_eq!(x, Kind::A);\n\tassert_eq!(y, Kind::A);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added arch<commit_after><|endoftext|>"} {"text":"<commit_before>use std::{fmt, mem, usize};\nuse std::iter::IntoIterator;\nuse std::ops::{Index, IndexMut};\nuse token::Token;\n\n\/\/\/ A preallocated chunk of memory for storing objects of the same type.\npub struct Slab<T> {\n \/\/ Chunk of memory\n entries: Vec<Entry<T>>,\n \/\/ Number of elements currently in the slab\n len: usize,\n \/\/ The token offset\n off: usize,\n \/\/ Offset of the next available slot in the slab. Set to the slab's\n \/\/ capacity when the slab is full.\n nxt: usize,\n}\n\nconst MAX: usize = usize::MAX;\n\nunsafe impl<T> Send for Slab<T> where T: Send {}\n\n\/\/ TODO: Once NonZero lands, use it to optimize the layout\nimpl<T> Slab<T> {\n pub fn new(cap: usize) -> Slab<T> {\n Slab::new_starting_at(Token(0), cap)\n }\n\n pub fn new_starting_at(offset: Token, cap: usize) -> Slab<T> {\n assert!(cap <= MAX, \"capacity too large\");\n \/\/ TODO:\n \/\/ - Rename to with_capacity\n \/\/ - Use a power of 2 capacity\n \/\/ - Ensure that mem size is less than usize::MAX\n\n let entries = Vec::with_capacity(cap);\n\n Slab {\n entries: entries,\n len: 0,\n off: offset.as_usize(),\n nxt: 0,\n }\n }\n\n #[inline]\n pub fn count(&self) -> usize {\n self.len as usize\n }\n\n #[inline]\n pub fn is_empty(&self) -> bool {\n self.len == 0\n }\n\n #[inline]\n pub fn remaining(&self) -> usize {\n (self.entries.capacity() - self.len) as usize\n }\n\n #[inline]\n pub fn has_remaining(&self) -> bool {\n self.remaining() > 0\n }\n\n #[inline]\n pub fn contains(&self, idx: Token) -> bool {\n if idx.as_usize() < self.off {\n return false;\n }\n\n let idx = self.token_to_idx(idx);\n\n if idx < self.entries.len() {\n return self.entries[idx].in_use();\n }\n\n false\n }\n\n pub fn get(&self, idx: Token) -> Option<&T> {\n assert!(self.contains(idx), \"slab does not contain token `{:?}`\", idx);\n\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n if idx < self.entries.len() {\n return self.entries[idx].val.as_ref();\n }\n }\n\n None\n }\n\n pub fn get_mut(&mut self, idx: Token) -> Option<&mut T> {\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n if idx < self.entries.len() {\n return self.entries[idx].val.as_mut();\n }\n }\n\n None\n }\n\n pub fn insert(&mut self, val: T) -> Result<Token, T> {\n let idx = self.nxt;\n\n if idx == self.entries.len() {\n \/\/ Using an uninitialized entry\n if idx == self.entries.capacity() {\n \/\/ No more capacity\n debug!(\"slab out of capacity; cap={}\", self.entries.capacity());\n return Err(val);\n }\n\n self.entries.push(Entry {\n nxt: MAX,\n val: Some(val),\n });\n\n self.len += 1;\n self.nxt = self.len;\n }\n else {\n self.len += 1;\n self.nxt = self.entries[idx].put(val);\n }\n\n Ok(self.idx_to_token(idx))\n }\n\n \/\/\/ Releases the given slot\n pub fn remove(&mut self, idx: Token) -> Option<T> {\n \/\/ Cast to usize\n let idx = self.token_to_idx(idx);\n\n if idx > self.entries.len() {\n return None;\n }\n\n match self.entries[idx].remove(self.nxt) {\n Some(v) => {\n self.nxt = idx;\n self.len -= 1;\n Some(v)\n }\n None => None\n }\n }\n\n pub fn iter(&self) -> SlabIter<T> {\n SlabIter {\n slab: self,\n cur_idx: 0,\n yielded: 0\n }\n }\n\n pub fn iter_mut(&mut self) -> SlabMutIter<T> {\n SlabMutIter { iter: self.iter() }\n }\n\n #[inline]\n fn validate_idx(&self, idx: usize) -> usize {\n if idx < self.entries.len() {\n return idx;\n }\n\n panic!(\"invalid index {} -- greater than capacity {}\", idx, self.entries.capacity());\n }\n\n fn token_to_idx(&self, token: Token) -> usize {\n token.as_usize() - self.off\n }\n\n fn idx_to_token(&self, idx: usize) -> Token {\n Token(idx as usize + self.off)\n }\n}\n\nimpl<T> Index<Token> for Slab<T> {\n type Output = T;\n\n fn index<'a>(&'a self, idx: Token) -> &'a T {\n let idx = self.token_to_idx(idx);\n let idx = self.validate_idx(idx);\n\n self.entries[idx].val.as_ref()\n .expect(\"invalid index\")\n }\n}\n\nimpl<T> IndexMut<Token> for Slab<T> {\n fn index_mut<'a>(&'a mut self, idx: Token) -> &'a mut T {\n let idx = self.token_to_idx(idx);\n let idx = self.validate_idx(idx);\n\n self.entries[idx].val.as_mut()\n .expect(\"invalid index\")\n }\n}\n\nimpl<T> fmt::Debug for Slab<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"Slab {{ len: {}, cap: {} }}\", self.len, self.entries.capacity())\n }\n}\n\n\/\/ Holds the values in the slab.\nstruct Entry<T> {\n nxt: usize,\n val: Option<T>,\n}\n\nimpl<T> Entry<T> {\n #[inline]\n fn put(&mut self, val: T) -> usize{\n let ret = self.nxt;\n self.val = Some(val);\n ret\n }\n\n fn remove(&mut self, nxt: usize) -> Option<T> {\n if self.in_use() {\n self.nxt = nxt;\n self.val.take()\n } else {\n None\n }\n }\n\n #[inline]\n fn in_use(&self) -> bool {\n self.val.is_some()\n }\n}\n\npub struct SlabIter<'a, T: 'a> {\n slab: &'a Slab<T>,\n cur_idx: usize,\n yielded: usize\n}\n\nimpl<'a, T> Iterator for SlabIter<'a, T> {\n type Item = &'a T;\n\n fn next(&mut self) -> Option<&'a T> {\n while self.yielded < self.slab.len {\n match self.slab.entries[self.cur_idx].val {\n Some(ref v) => {\n self.cur_idx += 1;\n self.yielded += 1;\n return Some(v);\n }\n None => {\n self.cur_idx += 1;\n }\n }\n }\n\n None\n }\n}\n\npub struct SlabMutIter<'a, T: 'a> {\n iter: SlabIter<'a, T>,\n}\n\nimpl<'a, T> Iterator for SlabMutIter<'a, T> {\n type Item = &'a mut T;\n\n fn next(&mut self) -> Option<&'a mut T> {\n unsafe { mem::transmute(self.iter.next()) }\n }\n}\n\nimpl<'a, T> IntoIterator for &'a Slab<T> {\n type Item = &'a T;\n type IntoIter = SlabIter<'a, T>;\n\n fn into_iter(self) -> SlabIter<'a, T> {\n self.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Slab<T> {\n type Item = &'a mut T;\n type IntoIter = SlabMutIter<'a, T>;\n\n fn into_iter(self) -> SlabMutIter<'a, T> {\n self.iter_mut()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::Slab;\n use {Token};\n\n #[test]\n fn test_insertion() {\n let mut slab = Slab::new(1);\n let token = slab.insert(10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], 10);\n }\n\n #[test]\n fn test_repeated_insertion() {\n let mut slab = Slab::new(10);\n\n for i in (0..10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], i + 10);\n }\n\n slab.insert(20).err().expect(\"Inserted when full\");\n }\n\n #[test]\n fn test_repeated_insertion_and_removal() {\n let mut slab = Slab::new(10);\n let mut tokens = vec![];\n\n for i in (0..10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n tokens.push(token);\n assert_eq!(slab[token], i + 10);\n }\n\n for &i in tokens.iter() {\n slab.remove(i);\n }\n\n slab.insert(20).ok().expect(\"Failed to insert in newly empty slab\");\n }\n\n #[test]\n fn test_insertion_when_full() {\n let mut slab = Slab::new(1);\n slab.insert(10).ok().expect(\"Failed to insert\");\n slab.insert(10).err().expect(\"Inserted into a full slab\");\n }\n\n #[test]\n fn test_removal_is_successful() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(10).ok().expect(\"Failed to insert\");\n slab.remove(t1);\n let t2 = slab.insert(20).ok().expect(\"Failed to insert\");\n assert_eq!(slab[t2], 20);\n }\n\n #[test]\n fn test_mut_retrieval() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(\"foo\".to_string()).ok().expect(\"Failed to insert\");\n\n slab[t1].push_str(\"bar\");\n\n assert_eq!(&slab[t1][..], \"foobar\");\n }\n\n #[test]\n #[should_panic]\n fn test_reusing_slots_1() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n let t1 = slab.insert(456).unwrap();\n\n assert!(slab.count() == 2);\n assert!(slab.remaining() == 14);\n\n slab.remove(t0);\n\n assert!(slab.count() == 1, \"actual={}\", slab.count());\n assert!(slab.remaining() == 15);\n\n slab.remove(t1);\n\n assert!(slab.count() == 0);\n assert!(slab.remaining() == 16);\n\n let _ = slab[t1];\n }\n\n #[test]\n fn test_reusing_slots_2() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n\n assert!(slab[t0] == 123);\n assert!(slab.remove(t0) == Some(123));\n\n let t0 = slab.insert(456).unwrap();\n\n assert!(slab[t0] == 456);\n\n let t1 = slab.insert(789).unwrap();\n\n assert!(slab[t0] == 456);\n assert!(slab[t1] == 789);\n\n assert!(slab.remove(t0).unwrap() == 456);\n assert!(slab.remove(t1).unwrap() == 789);\n\n assert!(slab.count() == 0);\n }\n\n #[test]\n #[should_panic]\n fn test_accessing_out_of_bounds() {\n let slab = Slab::<usize>::new(16);\n slab[Token(0)];\n }\n\n #[test]\n fn test_contains() {\n let mut slab = Slab::new_starting_at(Token(5),16);\n assert!(!slab.contains(Token(0)));\n\n let tok = slab.insert(111).unwrap();\n assert!(slab.contains(tok));\n }\n\n #[test]\n fn test_iter() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in (0..4) {\n slab.insert(i).unwrap();\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![0, 1, 2, 3]);\n\n slab.remove(Token(1));\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![0, 2, 3]);\n }\n\n #[test]\n fn test_iter_mut() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in (0..4) {\n slab.insert(i).unwrap();\n }\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![1, 2, 3, 4]);\n\n slab.remove(Token(2));\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![2, 3, 5]);\n }\n}\n<commit_msg>Remove stray assert statement in Slab<commit_after>use std::{fmt, mem, usize};\nuse std::iter::IntoIterator;\nuse std::ops::{Index, IndexMut};\nuse token::Token;\n\n\/\/\/ A preallocated chunk of memory for storing objects of the same type.\npub struct Slab<T> {\n \/\/ Chunk of memory\n entries: Vec<Entry<T>>,\n \/\/ Number of elements currently in the slab\n len: usize,\n \/\/ The token offset\n off: usize,\n \/\/ Offset of the next available slot in the slab. Set to the slab's\n \/\/ capacity when the slab is full.\n nxt: usize,\n}\n\nconst MAX: usize = usize::MAX;\n\nunsafe impl<T> Send for Slab<T> where T: Send {}\n\n\/\/ TODO: Once NonZero lands, use it to optimize the layout\nimpl<T> Slab<T> {\n pub fn new(cap: usize) -> Slab<T> {\n Slab::new_starting_at(Token(0), cap)\n }\n\n pub fn new_starting_at(offset: Token, cap: usize) -> Slab<T> {\n assert!(cap <= MAX, \"capacity too large\");\n \/\/ TODO:\n \/\/ - Rename to with_capacity\n \/\/ - Use a power of 2 capacity\n \/\/ - Ensure that mem size is less than usize::MAX\n\n let entries = Vec::with_capacity(cap);\n\n Slab {\n entries: entries,\n len: 0,\n off: offset.as_usize(),\n nxt: 0,\n }\n }\n\n #[inline]\n pub fn count(&self) -> usize {\n self.len as usize\n }\n\n #[inline]\n pub fn is_empty(&self) -> bool {\n self.len == 0\n }\n\n #[inline]\n pub fn remaining(&self) -> usize {\n (self.entries.capacity() - self.len) as usize\n }\n\n #[inline]\n pub fn has_remaining(&self) -> bool {\n self.remaining() > 0\n }\n\n #[inline]\n pub fn contains(&self, idx: Token) -> bool {\n if idx.as_usize() < self.off {\n return false;\n }\n\n let idx = self.token_to_idx(idx);\n\n if idx < self.entries.len() {\n return self.entries[idx].in_use();\n }\n\n false\n }\n\n pub fn get(&self, idx: Token) -> Option<&T> {\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n if idx < self.entries.len() {\n return self.entries[idx].val.as_ref();\n }\n }\n\n None\n }\n\n pub fn get_mut(&mut self, idx: Token) -> Option<&mut T> {\n let idx = self.token_to_idx(idx);\n\n if idx <= MAX {\n if idx < self.entries.len() {\n return self.entries[idx].val.as_mut();\n }\n }\n\n None\n }\n\n pub fn insert(&mut self, val: T) -> Result<Token, T> {\n let idx = self.nxt;\n\n if idx == self.entries.len() {\n \/\/ Using an uninitialized entry\n if idx == self.entries.capacity() {\n \/\/ No more capacity\n debug!(\"slab out of capacity; cap={}\", self.entries.capacity());\n return Err(val);\n }\n\n self.entries.push(Entry {\n nxt: MAX,\n val: Some(val),\n });\n\n self.len += 1;\n self.nxt = self.len;\n }\n else {\n self.len += 1;\n self.nxt = self.entries[idx].put(val);\n }\n\n Ok(self.idx_to_token(idx))\n }\n\n \/\/\/ Releases the given slot\n pub fn remove(&mut self, idx: Token) -> Option<T> {\n \/\/ Cast to usize\n let idx = self.token_to_idx(idx);\n\n if idx > self.entries.len() {\n return None;\n }\n\n match self.entries[idx].remove(self.nxt) {\n Some(v) => {\n self.nxt = idx;\n self.len -= 1;\n Some(v)\n }\n None => None\n }\n }\n\n pub fn iter(&self) -> SlabIter<T> {\n SlabIter {\n slab: self,\n cur_idx: 0,\n yielded: 0\n }\n }\n\n pub fn iter_mut(&mut self) -> SlabMutIter<T> {\n SlabMutIter { iter: self.iter() }\n }\n\n #[inline]\n fn validate_idx(&self, idx: usize) -> usize {\n if idx < self.entries.len() {\n return idx;\n }\n\n panic!(\"invalid index {} -- greater than capacity {}\", idx, self.entries.capacity());\n }\n\n fn token_to_idx(&self, token: Token) -> usize {\n token.as_usize() - self.off\n }\n\n fn idx_to_token(&self, idx: usize) -> Token {\n Token(idx as usize + self.off)\n }\n}\n\nimpl<T> Index<Token> for Slab<T> {\n type Output = T;\n\n fn index<'a>(&'a self, idx: Token) -> &'a T {\n let idx = self.token_to_idx(idx);\n let idx = self.validate_idx(idx);\n\n self.entries[idx].val.as_ref()\n .expect(\"invalid index\")\n }\n}\n\nimpl<T> IndexMut<Token> for Slab<T> {\n fn index_mut<'a>(&'a mut self, idx: Token) -> &'a mut T {\n let idx = self.token_to_idx(idx);\n let idx = self.validate_idx(idx);\n\n self.entries[idx].val.as_mut()\n .expect(\"invalid index\")\n }\n}\n\nimpl<T> fmt::Debug for Slab<T> {\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n write!(fmt, \"Slab {{ len: {}, cap: {} }}\", self.len, self.entries.capacity())\n }\n}\n\n\/\/ Holds the values in the slab.\nstruct Entry<T> {\n nxt: usize,\n val: Option<T>,\n}\n\nimpl<T> Entry<T> {\n #[inline]\n fn put(&mut self, val: T) -> usize{\n let ret = self.nxt;\n self.val = Some(val);\n ret\n }\n\n fn remove(&mut self, nxt: usize) -> Option<T> {\n if self.in_use() {\n self.nxt = nxt;\n self.val.take()\n } else {\n None\n }\n }\n\n #[inline]\n fn in_use(&self) -> bool {\n self.val.is_some()\n }\n}\n\npub struct SlabIter<'a, T: 'a> {\n slab: &'a Slab<T>,\n cur_idx: usize,\n yielded: usize\n}\n\nimpl<'a, T> Iterator for SlabIter<'a, T> {\n type Item = &'a T;\n\n fn next(&mut self) -> Option<&'a T> {\n while self.yielded < self.slab.len {\n match self.slab.entries[self.cur_idx].val {\n Some(ref v) => {\n self.cur_idx += 1;\n self.yielded += 1;\n return Some(v);\n }\n None => {\n self.cur_idx += 1;\n }\n }\n }\n\n None\n }\n}\n\npub struct SlabMutIter<'a, T: 'a> {\n iter: SlabIter<'a, T>,\n}\n\nimpl<'a, T> Iterator for SlabMutIter<'a, T> {\n type Item = &'a mut T;\n\n fn next(&mut self) -> Option<&'a mut T> {\n unsafe { mem::transmute(self.iter.next()) }\n }\n}\n\nimpl<'a, T> IntoIterator for &'a Slab<T> {\n type Item = &'a T;\n type IntoIter = SlabIter<'a, T>;\n\n fn into_iter(self) -> SlabIter<'a, T> {\n self.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Slab<T> {\n type Item = &'a mut T;\n type IntoIter = SlabMutIter<'a, T>;\n\n fn into_iter(self) -> SlabMutIter<'a, T> {\n self.iter_mut()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::Slab;\n use {Token};\n\n #[test]\n fn test_insertion() {\n let mut slab = Slab::new(1);\n let token = slab.insert(10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], 10);\n }\n\n #[test]\n fn test_repeated_insertion() {\n let mut slab = Slab::new(10);\n\n for i in (0..10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n assert_eq!(slab[token], i + 10);\n }\n\n slab.insert(20).err().expect(\"Inserted when full\");\n }\n\n #[test]\n fn test_repeated_insertion_and_removal() {\n let mut slab = Slab::new(10);\n let mut tokens = vec![];\n\n for i in (0..10) {\n let token = slab.insert(i + 10).ok().expect(\"Failed to insert\");\n tokens.push(token);\n assert_eq!(slab[token], i + 10);\n }\n\n for &i in tokens.iter() {\n slab.remove(i);\n }\n\n slab.insert(20).ok().expect(\"Failed to insert in newly empty slab\");\n }\n\n #[test]\n fn test_insertion_when_full() {\n let mut slab = Slab::new(1);\n slab.insert(10).ok().expect(\"Failed to insert\");\n slab.insert(10).err().expect(\"Inserted into a full slab\");\n }\n\n #[test]\n fn test_removal_is_successful() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(10).ok().expect(\"Failed to insert\");\n slab.remove(t1);\n let t2 = slab.insert(20).ok().expect(\"Failed to insert\");\n assert_eq!(slab[t2], 20);\n }\n\n #[test]\n fn test_mut_retrieval() {\n let mut slab = Slab::new(1);\n let t1 = slab.insert(\"foo\".to_string()).ok().expect(\"Failed to insert\");\n\n slab[t1].push_str(\"bar\");\n\n assert_eq!(&slab[t1][..], \"foobar\");\n }\n\n #[test]\n #[should_panic]\n fn test_reusing_slots_1() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n let t1 = slab.insert(456).unwrap();\n\n assert!(slab.count() == 2);\n assert!(slab.remaining() == 14);\n\n slab.remove(t0);\n\n assert!(slab.count() == 1, \"actual={}\", slab.count());\n assert!(slab.remaining() == 15);\n\n slab.remove(t1);\n\n assert!(slab.count() == 0);\n assert!(slab.remaining() == 16);\n\n let _ = slab[t1];\n }\n\n #[test]\n fn test_reusing_slots_2() {\n let mut slab = Slab::new(16);\n\n let t0 = slab.insert(123).unwrap();\n\n assert!(slab[t0] == 123);\n assert!(slab.remove(t0) == Some(123));\n\n let t0 = slab.insert(456).unwrap();\n\n assert!(slab[t0] == 456);\n\n let t1 = slab.insert(789).unwrap();\n\n assert!(slab[t0] == 456);\n assert!(slab[t1] == 789);\n\n assert!(slab.remove(t0).unwrap() == 456);\n assert!(slab.remove(t1).unwrap() == 789);\n\n assert!(slab.count() == 0);\n }\n\n #[test]\n #[should_panic]\n fn test_accessing_out_of_bounds() {\n let slab = Slab::<usize>::new(16);\n slab[Token(0)];\n }\n\n #[test]\n fn test_contains() {\n let mut slab = Slab::new_starting_at(Token(5),16);\n assert!(!slab.contains(Token(0)));\n\n let tok = slab.insert(111).unwrap();\n assert!(slab.contains(tok));\n }\n\n #[test]\n fn test_iter() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in (0..4) {\n slab.insert(i).unwrap();\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![0, 1, 2, 3]);\n\n slab.remove(Token(1));\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![0, 2, 3]);\n }\n\n #[test]\n fn test_iter_mut() {\n let mut slab: Slab<u32> = Slab::new_starting_at(Token(0), 4);\n for i in (0..4) {\n slab.insert(i).unwrap();\n }\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![1, 2, 3, 4]);\n\n slab.remove(Token(2));\n for e in slab.iter_mut() {\n *e = *e + 1;\n }\n\n let vals: Vec<u32> = slab.iter().map(|r| *r).collect();\n assert_eq!(vals, vec![2, 3, 5]);\n }\n\n #[test]\n fn test_get() {\n let mut slab = Slab::new(16);\n let tok = slab.insert(5).unwrap();\n assert_eq!(slab.get(tok), Some(&5));\n assert_eq!(slab.get(Token(1)), None);\n assert_eq!(slab.get(Token(23)), None);\n }\n\n #[test]\n fn test_get_mut() {\n let mut slab = Slab::new(16);\n let tok = slab.insert(5u32).unwrap();\n {\n let mut_ref = slab.get_mut(tok).unwrap();\n assert_eq!(*mut_ref, 5);\n *mut_ref = 12;\n }\n assert_eq!(slab[tok], 12);\n assert_eq!(slab.get_mut(Token(1)), None);\n assert_eq!(slab.get_mut(Token(23)), None);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use std::collections::BTreeMap;\nuse std::io::{stdout, Write};\n\nuse super::peg::Job;\nuse super::input_editor::readln;\nuse super::status::{SUCCESS, FAILURE};\n\npub struct Variables {\n variables: BTreeMap<String, String>,\n}\n\nimpl Variables {\n pub fn new() -> Variables {\n Variables { variables: BTreeMap::new() }\n }\n\n pub fn read<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n let mut out = stdout();\n for arg in args.into_iter().skip(1) {\n print!(\"{}=\", arg.as_ref().trim());\n if let Err(message) = out.flush() {\n println!(\"{}: Failed to flush stdout\", message);\n return FAILURE;\n }\n if let Some(value) = readln() {\n self.set_var(arg.as_ref(), value.trim());\n }\n }\n SUCCESS\n }\n\n pub fn let_<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n let mut args = args.into_iter();\n match (args.next(), args.next()) {\n (Some(key), Some(value)) => {\n self.variables.insert(key.as_ref().to_string(), value.as_ref().to_string());\n }\n (Some(key), None) => {\n self.variables.remove(key.as_ref());\n }\n _ => {\n for (key, value) in self.variables.iter() {\n println!(\"{}={}\", key, value);\n }\n }\n }\n SUCCESS\n }\n\n pub fn set_var(&mut self, name: &str, value: &str) {\n if !name.is_empty() {\n if value.is_empty() {\n self.variables.remove(&name.to_string());\n } else {\n self.variables.insert(name.to_string(), value.to_string());\n }\n }\n }\n\n pub fn expand_job(&self, job: &Job) -> Job {\n \/\/ TODO don't copy everything\n Job::from_vec_string(job.args\n .iter()\n .map(|original: &String| self.expand_string(&original).to_string())\n .collect(),\n job.background)\n }\n\n #[inline]\n fn expand_string<'a>(&'a self, original: &'a str) -> &'a str {\n if original.starts_with(\"$\") {\n if let Some(value) = self.variables.get(&original[1..]) {\n &value\n } else {\n \"\"\n }\n } else {\n original\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn undefined_variable_expands_to_empty_string() {\n let variables = Variables::new();\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"\", expanded);\n }\n\n #[test]\n fn let_and_expand_a_variable() {\n let mut variables = Variables::new();\n variables.let_(vec![\"FOO\", \"BAR\"]);\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"BAR\", expanded);\n }\n\n #[test]\n fn set_var_and_expand_a_variable() {\n let mut variables = Variables::new();\n variables.set_var(\"FOO\", \"BAR\");\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"BAR\", expanded);\n }\n\n #[test]\n fn remove_a_variable_with_let() {\n let mut variables = Variables::new();\n variables.let_(vec![\"FOO\", \"BAR\"]);\n variables.let_(vec![\"FOO\"]);\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"\", expanded);\n }\n}\n<commit_msg>Fix the let command<commit_after>use std::collections::BTreeMap;\nuse std::io::{stdout, Write};\n\nuse super::peg::Job;\nuse super::input_editor::readln;\nuse super::status::{SUCCESS, FAILURE};\n\npub struct Variables {\n variables: BTreeMap<String, String>,\n}\n\nimpl Variables {\n pub fn new() -> Variables {\n Variables { variables: BTreeMap::new() }\n }\n\n pub fn read<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n let mut out = stdout();\n for arg in args.into_iter().skip(1) {\n print!(\"{}=\", arg.as_ref().trim());\n if let Err(message) = out.flush() {\n println!(\"{}: Failed to flush stdout\", message);\n return FAILURE;\n }\n if let Some(value) = readln() {\n self.set_var(arg.as_ref(), value.trim());\n }\n }\n SUCCESS\n }\n\n pub fn let_<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n let args = args.into_iter();\n let string: String = args.skip(1).fold(String::new(), |string, x| string + x.as_ref());\n let mut split = string.split('=');\n match (split.next().and_then(|x| if x == \"\" { None } else { Some(x) }), split.next()) {\n (Some(key), Some(value)) => {\n self.variables.insert(key.to_string(), value.to_string());\n },\n (Some(key), None) => {\n self.variables.remove(key);\n },\n _ => {\n for (key, value) in self.variables.iter() {\n println!(\"{}={}\", key, value);\n }\n }\n }\n SUCCESS\n }\n\n pub fn set_var(&mut self, name: &str, value: &str) {\n if !name.is_empty() {\n if value.is_empty() {\n self.variables.remove(&name.to_string());\n } else {\n self.variables.insert(name.to_string(), value.to_string());\n }\n }\n }\n\n pub fn expand_job(&self, job: &Job) -> Job {\n \/\/ TODO don't copy everything\n Job::from_vec_string(job.args\n .iter()\n .map(|original: &String| self.expand_string(&original).to_string())\n .collect(),\n job.background)\n }\n\n #[inline]\n fn expand_string<'a>(&'a self, original: &'a str) -> &'a str {\n if original.starts_with(\"$\") {\n if let Some(value) = self.variables.get(&original[1..]) {\n &value\n } else {\n \"\"\n }\n } else {\n original\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn undefined_variable_expands_to_empty_string() {\n let variables = Variables::new();\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"\", expanded);\n }\n\n #[test]\n fn let_and_expand_a_variable() {\n let mut variables = Variables::new();\n variables.let_(vec![\"FOO\", \"BAR\"]);\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"BAR\", expanded);\n }\n\n #[test]\n fn set_var_and_expand_a_variable() {\n let mut variables = Variables::new();\n variables.set_var(\"FOO\", \"BAR\");\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"BAR\", expanded);\n }\n\n #[test]\n fn remove_a_variable_with_let() {\n let mut variables = Variables::new();\n variables.let_(vec![\"FOO\", \"BAR\"]);\n variables.let_(vec![\"FOO\"]);\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"\", expanded);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! A simple parser for a tiny subset of HTML.\n\/\/!\n\/\/! Can parse basic opening and closing tags, and text nodes.\n\/\/!\n\/\/! Not yet supported:\n\/\/!\n\/\/! * Attributes\n\/\/! * Comments\n\/\/! * Doctypes and processing instructions\n\/\/! * Self-closing tags\n\/\/! * Non-well-formed markup\n\/\/! * Character entities\n\nuse dom;\nuse std::collections::hashmap::HashMap;\n\n\/\/\/ Parse an HTML document and return the root element.\npub fn parse(source: String) -> dom::Node {\n let mut nodes = Parser {\n pos: 0u,\n input: source,\n }.parse_nodes();\n\n \/\/ If the document contains a root `<html>` element, just return it. Otherwise, create one.\n let has_root = nodes.len() == 1 && match nodes[0].node_type {\n dom::Element(ref elem) if elem.tag_name.as_slice() == \"html\" => true,\n _ => false\n };\n if has_root {\n nodes.swap_remove(0).unwrap()\n } else {\n dom::elem(\"html\".to_string(), HashMap::new(), nodes)\n }\n}\n\nstruct Parser {\n pos: uint,\n input: String,\n}\n\nimpl Parser {\n \/\/\/ Parse a sequence of sibling nodes.\n fn parse_nodes(&mut self) -> Vec<dom::Node> {\n let mut nodes = vec!();\n loop {\n self.consume_whitespace();\n if self.eof() || self.starts_with(\"<\/\") {\n break;\n }\n nodes.push(self.parse_node());\n }\n nodes\n }\n\n \/\/\/ Parse a single node.\n fn parse_node(&mut self) -> dom::Node {\n match self.next_char() {\n '<' => self.parse_element(),\n _ => self.parse_text()\n }\n }\n\n \/\/\/ Parse a single element, including its open tag, contents, and closing tag.\n fn parse_element(&mut self) -> dom::Node {\n \/\/ Opening tag.\n assert!(self.consume_char() == '<');\n let tag_name = self.parse_tag_name();\n let attrs = self.parse_attributes();\n assert!(self.consume_char() == '>');\n\n \/\/ Contents.\n let children = self.parse_nodes();\n\n \/\/ Closing tag.\n assert!(self.consume_char() == '<');\n assert!(self.consume_char() == '\/');\n assert!(self.parse_tag_name() == tag_name);\n assert!(self.consume_char() == '>');\n\n dom::elem(tag_name, attrs, children)\n }\n\n \/\/ Helper functions for parse_element:\n\n fn parse_tag_name(&mut self) -> String {\n self.consume_while(|c| match c {\n 'a'..'z' | 'A'..'Z' | '0'..'9' => true,\n _ => false\n })\n }\n\n fn parse_attributes(&mut self) -> dom::AttrMap {\n let mut attributes = HashMap::new();\n loop {\n self.consume_whitespace();\n match self.next_char() {\n '>' => break,\n _ => {\n let (name, value) = self.parse_attr();\n attributes.insert(name, value);\n }\n }\n }\n attributes\n }\n\n \/\/ name=\"value\"\n fn parse_attr(&mut self) -> (String, String) {\n let name = self.parse_tag_name();\n assert!(self.consume_char() == '=');\n let value = self.parse_attr_value();\n (name, value)\n }\n\n fn parse_attr_value(&mut self) -> String {\n let open_quote = self.consume_char();\n assert!(open_quote == '\"' || open_quote == '\\'');\n let value = self.consume_while(|c| c != open_quote);\n assert!(!self.eof() && self.consume_char() == open_quote);\n value\n }\n\n \/\/\/ Parse a text node.\n fn parse_text(&mut self) -> dom::Node {\n dom::text(self.consume_while(|c| c != '<'))\n }\n\n \/\/\/ Consume and discard zero or more whitespace characters.\n fn consume_whitespace(&mut self) {\n self.consume_while(|c| c.is_whitespace());\n }\n\n \/\/\/ Consume characters until `test` returns false.\n fn consume_while(&mut self, test: |char| -> bool) -> String {\n let mut result = String::new();\n while !self.eof() && test(self.next_char()) {\n result.push_char(self.consume_char());\n }\n result\n }\n\n \/\/\/ Return the current character, and advance self.pos to the next character.\n fn consume_char(&mut self) -> char {\n let range = self.input.as_slice().char_range_at(self.pos);\n self.pos = range.next;\n range.ch\n }\n\n \/\/\/ Read the current character without consuming it.\n fn next_char(&self) -> char {\n self.input.as_slice().char_at(self.pos)\n }\n\n \/\/\/ Does the current input start with the given string?\n fn starts_with(&self, s: &str) -> bool {\n self.input.as_slice().slice_from(self.pos).starts_with(s)\n }\n\n \/\/\/ Return true if all input is consumed.\n fn eof(&self) -> bool {\n self.pos >= self.input.len()\n }\n}\n<commit_msg>Minor code cleanup<commit_after>\/\/! A simple parser for a tiny subset of HTML.\n\/\/!\n\/\/! Can parse basic opening and closing tags, and text nodes.\n\/\/!\n\/\/! Not yet supported:\n\/\/!\n\/\/! * Attributes\n\/\/! * Comments\n\/\/! * Doctypes and processing instructions\n\/\/! * Self-closing tags\n\/\/! * Non-well-formed markup\n\/\/! * Character entities\n\nuse dom;\nuse std::collections::hashmap::HashMap;\n\n\/\/\/ Parse an HTML document and return the root element.\npub fn parse(source: String) -> dom::Node {\n let mut nodes = Parser {\n pos: 0u,\n input: source,\n }.parse_nodes();\n\n \/\/ If the document contains a root `<html>` element, just return it. Otherwise, create one.\n let has_root = nodes.len() == 1 && match nodes[0].node_type {\n dom::Element(ref elem) if elem.tag_name.as_slice() == \"html\" => true,\n _ => false\n };\n if has_root {\n nodes.swap_remove(0).unwrap()\n } else {\n dom::elem(\"html\".to_string(), HashMap::new(), nodes)\n }\n}\n\nstruct Parser {\n pos: uint,\n input: String,\n}\n\nimpl Parser {\n \/\/\/ Parse a sequence of sibling nodes.\n fn parse_nodes(&mut self) -> Vec<dom::Node> {\n let mut nodes = vec!();\n loop {\n self.consume_whitespace();\n if self.eof() || self.starts_with(\"<\/\") {\n break;\n }\n nodes.push(self.parse_node());\n }\n nodes\n }\n\n \/\/\/ Parse a single node.\n fn parse_node(&mut self) -> dom::Node {\n match self.next_char() {\n '<' => self.parse_element(),\n _ => self.parse_text()\n }\n }\n\n \/\/\/ Parse a single element, including its open tag, contents, and closing tag.\n fn parse_element(&mut self) -> dom::Node {\n \/\/ Opening tag.\n assert!(self.consume_char() == '<');\n let tag_name = self.parse_tag_name();\n let attrs = self.parse_attributes();\n assert!(self.consume_char() == '>');\n\n \/\/ Contents.\n let children = self.parse_nodes();\n\n \/\/ Closing tag.\n assert!(self.consume_char() == '<');\n assert!(self.consume_char() == '\/');\n assert!(self.parse_tag_name() == tag_name);\n assert!(self.consume_char() == '>');\n\n dom::elem(tag_name, attrs, children)\n }\n\n \/\/\/ Parse a tag or attribute name.\n fn parse_tag_name(&mut self) -> String {\n self.consume_while(|c| match c {\n 'a'..'z' | 'A'..'Z' | '0'..'9' => true,\n _ => false\n })\n }\n\n \/\/\/ Parse a list of name=\"value\" pairs, separated by whitespace.\n fn parse_attributes(&mut self) -> dom::AttrMap {\n let mut attributes = HashMap::new();\n loop {\n self.consume_whitespace();\n match self.next_char() {\n '>' => break,\n _ => {\n let (name, value) = self.parse_attr();\n attributes.insert(name, value);\n }\n }\n }\n attributes\n }\n\n \/\/\/ Parse a single name=\"value\" pair.\n fn parse_attr(&mut self) -> (String, String) {\n let name = self.parse_tag_name();\n assert!(self.consume_char() == '=');\n let value = self.parse_attr_value();\n (name, value)\n }\n\n \/\/\/ Parse a quoted value.\n fn parse_attr_value(&mut self) -> String {\n let open_quote = self.consume_char();\n assert!(open_quote == '\"' || open_quote == '\\'');\n let value = self.consume_while(|c| c != open_quote);\n assert!(self.consume_char() == open_quote);\n value\n }\n\n \/\/\/ Parse a text node.\n fn parse_text(&mut self) -> dom::Node {\n dom::text(self.consume_while(|c| c != '<'))\n }\n\n \/\/\/ Consume and discard zero or more whitespace characters.\n fn consume_whitespace(&mut self) {\n self.consume_while(|c| c.is_whitespace());\n }\n\n \/\/\/ Consume characters until `test` returns false.\n fn consume_while(&mut self, test: |char| -> bool) -> String {\n let mut result = String::new();\n while !self.eof() && test(self.next_char()) {\n result.push_char(self.consume_char());\n }\n result\n }\n\n \/\/\/ Return the current character, and advance self.pos to the next character.\n fn consume_char(&mut self) -> char {\n let range = self.input.as_slice().char_range_at(self.pos);\n self.pos = range.next;\n range.ch\n }\n\n \/\/\/ Read the current character without consuming it.\n fn next_char(&self) -> char {\n self.input.as_slice().char_at(self.pos)\n }\n\n \/\/\/ Does the current input start with the given string?\n fn starts_with(&self, s: &str) -> bool {\n self.input.as_slice().slice_from(self.pos).starts_with(s)\n }\n\n \/\/\/ Return true if all input is consumed.\n fn eof(&self) -> bool {\n self.pos >= self.input.len()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Parallel version of hello world<commit_after>fn main() {\n do 10.times {\n do spawn {\n let greeting_message = \"Hello?\";\n println(greeting_message);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Chore(clippy): allow let_and_return<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Differentiate between string and number literals.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>http: some notes and formatting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>use gui object in main<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Render multiple inputs\/outputs on nodes.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added frame truncation feature.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added extra::bitv<commit_after>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[allow(missing_doc)];\n\nuse std::container::Container;\nuse std::option::{Option, Some, None};\nuse std::iter::{Iterator, DoubleEndedIterator, Invert, range};\nuse std::ops;\nuse std::uint;\nuse std::fail;\n\n#[deriving(Clone)]\nstruct SmallBitv {\n \/\/\/ only the lowest nbits of this value are used. the rest is undefined.\n bits: uint\n}\n\n\/\/\/ a mask that has a 1 for each defined bit in a small_bitv, assuming n bits\n#[inline]\nfn small_mask(nbits: uint) -> uint {\n (1 << nbits) - 1\n}\n\nimpl SmallBitv {\n pub fn new(bits: uint) -> SmallBitv {\n SmallBitv {bits: bits}\n }\n\n #[inline]\n pub fn bits_op(&mut self,\n right_bits: uint,\n nbits: uint,\n f: |uint, uint| -> uint)\n -> bool {\n let mask = small_mask(nbits);\n let old_b: uint = self.bits;\n let new_b = f(old_b, right_bits);\n self.bits = new_b;\n mask & old_b != mask & new_b\n }\n\n #[inline]\n pub fn union(&mut self, s: &SmallBitv, nbits: uint) -> bool {\n self.bits_op(s.bits, nbits, |u1, u2| u1 | u2)\n }\n\n #[inline]\n pub fn intersect(&mut self, s: &SmallBitv, nbits: uint) -> bool {\n self.bits_op(s.bits, nbits, |u1, u2| u1 & u2)\n }\n\n #[inline]\n pub fn become(&mut self, s: &SmallBitv, nbits: uint) -> bool {\n self.bits_op(s.bits, nbits, |_u1, u2| u2)\n }\n\n #[inline]\n pub fn difference(&mut self, s: &SmallBitv, nbits: uint) -> bool {\n self.bits_op(s.bits, nbits, |u1, u2| u1 & !u2)\n }\n\n #[inline]\n pub fn get(&self, i: uint) -> bool {\n (self.bits & (1 << i)) != 0\n }\n\n #[inline]\n pub fn set(&mut self, i: uint, x: bool) {\n if x {\n self.bits |= 1<<i;\n }\n else {\n self.bits &= !(1<<i);\n }\n }\n\n #[inline]\n pub fn equals(&self, b: &SmallBitv, nbits: uint) -> bool {\n let mask = small_mask(nbits);\n mask & self.bits == mask & b.bits\n }\n\n #[inline]\n pub fn clear(&mut self) { self.bits = 0; }\n\n #[inline]\n pub fn set_all(&mut self) { self.bits = !0; }\n\n #[inline]\n pub fn is_true(&self, nbits: uint) -> bool {\n small_mask(nbits) & !self.bits == 0\n }\n\n #[inline]\n pub fn is_false(&self, nbits: uint) -> bool {\n small_mask(nbits) & self.bits == 0\n }\n\n #[inline]\n pub fn negate(&mut self) { self.bits = !self.bits; }\n}\n\n#[deriving(Clone)]\nenum BitvVariant { Small(SmallBitv) }\n\nenum Op {Union, Intersect, Assign, Difference}\n\n\/\/\/ The bitvector type\n#[deriving(Clone)]\npub struct Bitv {\n \/\/\/ Internal representation of the bit vector (small or large)\n priv rep: BitvVariant,\n \/\/\/ The number of valid bits in the internal representation\n priv nbits: uint\n}\n\nfn die() -> ! {\n fail::abort();\n}\n\nimpl Bitv {\n #[inline]\n fn do_op(&mut self, op: Op, other: &Bitv) -> bool {\n if self.nbits != other.nbits {\n die();\n }\n match self.rep {\n Small(ref mut s) => match other.rep {\n Small(ref s1) => match op {\n Union => s.union(s1, self.nbits),\n Intersect => s.intersect(s1, self.nbits),\n Assign => s.become(s1, self.nbits),\n Difference => s.difference(s1, self.nbits)\n }\n }\n }\n }\n}\n\nimpl Bitv {\n pub fn new(nbits: uint, init: bool) -> Bitv {\n let rep = if nbits <= uint::bits {\n Small(SmallBitv::new(if init {!0} else {0}))\n }\n else { die() };\n Bitv {rep: rep, nbits: nbits}\n }\n\n \/**\n * Calculates the union of two bitvectors\n *\n * Sets `self` to the union of `self` and `v1`. Both bitvectors must be\n * the same length. Returns `true` if `self` changed.\n *\/\n #[inline]\n pub fn union(&mut self, v1: &Bitv) -> bool { self.do_op(Union, v1) }\n\n \/**\n * Calculates the intersection of two bitvectors\n *\n * Sets `self` to the intersection of `self` and `v1`. Both bitvectors\n * must be the same length. Returns `true` if `self` changed.\n *\/\n #[inline]\n pub fn intersect(&mut self, v1: &Bitv) -> bool {\n self.do_op(Intersect, v1)\n }\n\n \/**\n * Assigns the value of `v1` to `self`\n *\n * Both bitvectors must be the same length. Returns `true` if `self` was\n * changed\n *\/\n #[inline]\n pub fn assign(&mut self, v: &Bitv) -> bool { self.do_op(Assign, v) }\n\n \/\/\/ Retrieve the value at index `i`\n #[inline]\n pub fn get(&self, i: uint) -> bool {\n \/\/ assert!((i < self.nbits));\n match self.rep {\n Small(ref s) => s.get(i)\n }\n }\n\n \/**\n * Set the value of a bit at a given index\n *\n * `i` must be less than the length of the bitvector.\n *\/\n #[inline]\n pub fn set(&mut self, i: uint, x: bool) {\n \/\/ assert!((i < self.nbits));\n match self.rep {\n Small(ref mut s) => s.set(i, x)\n }\n }\n\n \/**\n * Compares two bitvectors\n *\n * Both bitvectors must be the same length. Returns `true` if both\n * bitvectors contain identical elements.\n *\/\n #[inline]\n pub fn equal(&self, v1: &Bitv) -> bool {\n if self.nbits != v1.nbits { return false; }\n match self.rep {\n Small(ref b) => match v1.rep {\n Small(ref b1) => b.equals(b1, self.nbits),\n }\n }\n }\n\n \/\/\/ Set all bits to 0\n #[inline]\n pub fn clear(&mut self) {\n match self.rep {\n Small(ref mut b) => b.clear(),\n }\n }\n\n \/\/\/ Set all bits to 1\n #[inline]\n pub fn set_all(&mut self) {\n match self.rep {\n Small(ref mut b) => b.set_all(),\n }\n }\n\n \/\/\/ Invert all bits\n #[inline]\n pub fn negate(&mut self) {\n match self.rep {\n Small(ref mut s) => s.negate(),\n }\n }\n\n \/**\n * Calculate the difference between two bitvectors\n *\n * Sets each element of `v0` to the value of that element minus the\n * element of `v1` at the same index. Both bitvectors must be the same\n * length.\n *\n * Returns `true` if `v0` was changed.\n *\/\n #[inline]\n pub fn difference(&mut self, v: &Bitv) -> bool {\n self.do_op(Difference, v)\n }\n\n \/\/\/ Returns `true` if all bits are 1\n #[inline]\n pub fn is_true(&self) -> bool {\n match self.rep {\n Small(ref b) => b.is_true(self.nbits),\n }\n }\n\n #[inline]\n pub fn iter<'a>(&'a self) -> BitvIterator<'a> {\n BitvIterator {bitv: self, next_idx: 0, end_idx: self.nbits}\n }\n\n #[inline]\n pub fn rev_iter<'a>(&'a self) -> Invert<BitvIterator<'a>> {\n self.iter().invert()\n }\n\n \/\/\/ Returns `true` if all bits are 0\n pub fn is_false(&self) -> bool {\n match self.rep {\n Small(ref b) => b.is_false(self.nbits),\n }\n }\n\n pub fn init_to_vec(&self, i: uint) -> uint {\n return if self.get(i) { 1 } else { 0 };\n }\n\n \/**\n * Compare a bitvector to a vector of `bool`.\n *\n * Both the bitvector and vector must have the same length.\n *\/\n pub fn eq_vec(&self, v: &[bool]) -> bool {\n \/\/ assert_eq!(self.nbits, v.len());\n let mut i = 0;\n while i < self.nbits {\n if self.get(i) != v[i] { return false; }\n i = i + 1;\n }\n true\n }\n\n pub fn ones(&self, f: |uint| -> bool) -> bool {\n range(0u, self.nbits).advance(|i| !self.get(i) || f(i))\n }\n\n}\n\n\/**\n * Transform a byte-vector into a `Bitv`. Each byte becomes 8 bits,\n * with the most significant bits of each byte coming first. Each\n * bit becomes `true` if equal to 1 or `false` if equal to 0.\n *\/\npub fn from_bytes(bytes: &[u8]) -> Bitv {\n from_fn(bytes.len() * 8, |i| {\n let b = bytes[i \/ 8] as uint;\n let offset = i % 8;\n b >> (7 - offset) & 1 == 1\n })\n}\n\n\/**\n * Transform a `[bool]` into a `Bitv` by converting each `bool` into a bit.\n *\/\npub fn from_bools(bools: &[bool]) -> Bitv {\n from_fn(bools.len(), |i| bools[i])\n}\n\n\/**\n * Create a `Bitv` of the specified length where the value at each\n * index is `f(index)`.\n *\/\npub fn from_fn(len: uint, f: |index: uint| -> bool) -> Bitv {\n let mut bitv = Bitv::new(len, false);\n for i in range(0u, len) {\n bitv.set(i, f(i));\n }\n bitv\n}\n\nimpl ops::Index<uint,bool> for Bitv {\n fn index(&self, i: &uint) -> bool {\n self.get(*i)\n }\n}\n\n\/\/\/ An iterator for `Bitv`.\npub struct BitvIterator<'a> {\n priv bitv: &'a Bitv,\n priv next_idx: uint,\n priv end_idx: uint,\n}\n\nimpl<'a> Iterator<bool> for BitvIterator<'a> {\n #[inline]\n fn next(&mut self) -> Option<bool> {\n if self.next_idx != self.end_idx {\n let idx = self.next_idx;\n self.next_idx += 1;\n Some(self.bitv.get(idx))\n } else {\n None\n }\n }\n\n fn size_hint(&self) -> (uint, Option<uint>) {\n let rem = self.end_idx - self.next_idx;\n (rem, Some(rem))\n }\n}\n\nimpl<'a> DoubleEndedIterator<bool> for BitvIterator<'a> {\n #[inline]\n fn next_back(&mut self) -> Option<bool> {\n if self.next_idx != self.end_idx {\n self.end_idx -= 1;\n Some(self.bitv.get(self.end_idx))\n } else {\n None\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reduce the detail level to something that retains a decent framerate<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix formatted output, use seperator for lists<commit_after><|endoftext|>"} {"text":"<commit_before>use toml::Value;\nuse hook::position::HookPosition;\n\n\/\/\/ Check whether the configuration is valid for the store\n\/\/\/\n\/\/\/ The passed `Value` _must be_ the `[store]` sub-tree of the configuration. Otherwise this will\n\/\/\/ fail.\n\/\/\/\n\/\/\/ It checks whether the configuration looks like the store wants it to be:\n\/\/\/\n\/\/\/ ```toml\n\/\/\/ [store]\n\/\/\/ pre-create-hook-aspects = [ \"misc\", \"encryption\", \"version-control\"]\n\/\/\/\n\/\/\/ [[aspects.misc]]\n\/\/\/ parallel = true\n\/\/\/ [[aspects.encryption]]\n\/\/\/ parallel = false\n\/\/\/ [[aspects.version-control]]\n\/\/\/ parallel = false\n\/\/\/\n\/\/\/ [[hooks.gnupg]]\n\/\/\/ aspect = \"encryption\"\n\/\/\/ key = \"0x123456789\"\n\/\/\/\n\/\/\/ [[hooks.git]]\n\/\/\/ aspect = \"version-control\"\n\/\/\/ ```\n\/\/\/\n\/\/\/ It checks:\n\/\/\/ * Whether all the maps are there (whether store, store.aspects, store.aspects.example are all\n\/\/\/ maps)\n\/\/\/ * Whether each aspect configuration has a \"parallel = <Boolean>\" setting\n\/\/\/ * Whether each hook congfiguration has a \"aspect = <String>\" setting\n\/\/\/\n\/\/\/ It does NOT check:\n\/\/\/ * Whether all aspects which are used in the hook configuration are also configured\n\/\/\/\npub fn config_is_valid(config: &Value) -> bool {\n use std::collections::BTreeMap;\n\n fn has_key_with_map(v: &BTreeMap<String, Value>, key: &str) -> bool {\n v.get(key).map(|t| match t { &Value::Table(_) => true, _ => false }).unwrap_or(false)\n }\n\n fn has_key_with_string_ary(v: &BTreeMap<String, Value>, key: &str) -> bool {\n v.get(key)\n .map(|t| match t {\n &Value::Array(ref a) => a.iter().all(|elem| {\n match elem {\n &Value::String(_) => true,\n _ => false,\n }\n }),\n _ => false\n }).unwrap_or(false)\n }\n\n \/\/\/ Check that\n \/\/\/ * the top-level configuration\n \/\/\/ * is a table\n \/\/\/ * where all entries of a key `section` (eg. \"hooks\" or \"aspects\")\n \/\/\/ * Are maps\n \/\/\/ * where each has a key `key` (eg. \"aspect\" or \"parallel\")\n \/\/\/ * which fullfills constraint `f` (typecheck)\n fn check_all_inner_maps_have_key_with<F>(store_config: &BTreeMap<String, Value>,\n section: &str,\n key: &str,\n f: F)\n -> bool\n where F: Fn(&Value) -> bool\n {\n store_config.get(section) \/\/ The store config has the section `section`\n .map(|section_table| {\n match section_table { \/\/ which is\n &Value::Table(ref section_table) => \/\/ a table\n section_table\n .values() \/\/ which has values,\n .all(|cfg| { \/\/ and all of these values\n match cfg {\n &Value::Table(ref hook_config) => { \/\/ are tables\n hook_config.get(key) \/\/ with a key\n \/\/ fullfilling this constraint\n .map(|hook_aspect| f(&hook_aspect))\n .unwrap_or(false)\n },\n _ => false,\n }\n }),\n _ => false,\n }\n })\n .unwrap_or(false)\n }\n\n match config {\n &Value::Table(ref t) => {\n has_key_with_string_ary(t, \"pre-read-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-read-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-create-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-create-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-retrieve-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-retrieve-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-update-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-update-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-delete-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-delete-hook-aspects\") &&\n\n \/\/ The section \"hooks\" has maps which have a key \"aspect\" which has a value of type\n \/\/ String\n check_all_inner_maps_have_key_with(t, \"hooks\", \"aspect\", |asp| {\n match asp { &Value::String(_) => true, _ => false }\n }) &&\n\n \/\/ The section \"aspects\" has maps which have a key \"parllel\" which has a value of type\n \/\/ Boolean\n check_all_inner_maps_have_key_with(t, \"aspects\", \"parallel\", |asp| {\n match asp { &Value::Boolean(_) => true, _ => false, }\n })\n }\n _ => false,\n }\n}\n\npub fn get_pre_read_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-read-hook-aspects\", value)\n}\n\npub fn get_post_read_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-read-hook-aspects\", value)\n}\n\npub fn get_pre_create_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-create-hook-aspects\", value)\n}\n\npub fn get_post_create_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-create-hook-aspects\", value)\n}\n\npub fn get_pre_retrieve_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-retrieve-hook-aspects\", value)\n}\n\npub fn get_post_retrieve_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-retrieve-hook-aspects\", value)\n}\n\npub fn get_pre_update_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-update-hook-aspects\", value)\n}\n\npub fn get_post_update_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-update-hook-aspects\", value)\n}\n\npub fn get_pre_delete_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-delete-hook-aspects\", value)\n}\n\npub fn get_post_delete_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-delete-hook-aspects\", value)\n}\n\nfn get_aspect_names_for_aspect_position(config_name: &'static str, value: &Value) -> Vec<String> {\n let mut v = vec![];\n\n match value {\n &Value::Table(ref t) => {\n match t.get(config_name) {\n Some(&Value::Array(ref a)) => {\n for elem in a {\n match elem {\n &Value::String(ref s) => v.push(s.clone()),\n _ => warn!(\"Non-String in configuration, inside '{}'\", config_name),\n }\n }\n },\n _ => warn!(\"'{}' configuration key should contain Array, does not\", config_name),\n };\n },\n _ => warn!(\"Configuration is not a table\"),\n }\n v\n}\n\n\n<commit_msg>Add AspectConfig type<commit_after>use toml::Value;\nuse hook::position::HookPosition;\n\n\/\/\/ Check whether the configuration is valid for the store\n\/\/\/\n\/\/\/ The passed `Value` _must be_ the `[store]` sub-tree of the configuration. Otherwise this will\n\/\/\/ fail.\n\/\/\/\n\/\/\/ It checks whether the configuration looks like the store wants it to be:\n\/\/\/\n\/\/\/ ```toml\n\/\/\/ [store]\n\/\/\/ pre-create-hook-aspects = [ \"misc\", \"encryption\", \"version-control\"]\n\/\/\/\n\/\/\/ [[aspects.misc]]\n\/\/\/ parallel = true\n\/\/\/ [[aspects.encryption]]\n\/\/\/ parallel = false\n\/\/\/ [[aspects.version-control]]\n\/\/\/ parallel = false\n\/\/\/\n\/\/\/ [[hooks.gnupg]]\n\/\/\/ aspect = \"encryption\"\n\/\/\/ key = \"0x123456789\"\n\/\/\/\n\/\/\/ [[hooks.git]]\n\/\/\/ aspect = \"version-control\"\n\/\/\/ ```\n\/\/\/\n\/\/\/ It checks:\n\/\/\/ * Whether all the maps are there (whether store, store.aspects, store.aspects.example are all\n\/\/\/ maps)\n\/\/\/ * Whether each aspect configuration has a \"parallel = <Boolean>\" setting\n\/\/\/ * Whether each hook congfiguration has a \"aspect = <String>\" setting\n\/\/\/\n\/\/\/ It does NOT check:\n\/\/\/ * Whether all aspects which are used in the hook configuration are also configured\n\/\/\/\npub fn config_is_valid(config: &Value) -> bool {\n use std::collections::BTreeMap;\n\n fn has_key_with_map(v: &BTreeMap<String, Value>, key: &str) -> bool {\n v.get(key).map(|t| match t { &Value::Table(_) => true, _ => false }).unwrap_or(false)\n }\n\n fn has_key_with_string_ary(v: &BTreeMap<String, Value>, key: &str) -> bool {\n v.get(key)\n .map(|t| match t {\n &Value::Array(ref a) => a.iter().all(|elem| {\n match elem {\n &Value::String(_) => true,\n _ => false,\n }\n }),\n _ => false\n }).unwrap_or(false)\n }\n\n \/\/\/ Check that\n \/\/\/ * the top-level configuration\n \/\/\/ * is a table\n \/\/\/ * where all entries of a key `section` (eg. \"hooks\" or \"aspects\")\n \/\/\/ * Are maps\n \/\/\/ * where each has a key `key` (eg. \"aspect\" or \"parallel\")\n \/\/\/ * which fullfills constraint `f` (typecheck)\n fn check_all_inner_maps_have_key_with<F>(store_config: &BTreeMap<String, Value>,\n section: &str,\n key: &str,\n f: F)\n -> bool\n where F: Fn(&Value) -> bool\n {\n store_config.get(section) \/\/ The store config has the section `section`\n .map(|section_table| {\n match section_table { \/\/ which is\n &Value::Table(ref section_table) => \/\/ a table\n section_table\n .values() \/\/ which has values,\n .all(|cfg| { \/\/ and all of these values\n match cfg {\n &Value::Table(ref hook_config) => { \/\/ are tables\n hook_config.get(key) \/\/ with a key\n \/\/ fullfilling this constraint\n .map(|hook_aspect| f(&hook_aspect))\n .unwrap_or(false)\n },\n _ => false,\n }\n }),\n _ => false,\n }\n })\n .unwrap_or(false)\n }\n\n match config {\n &Value::Table(ref t) => {\n has_key_with_string_ary(t, \"pre-read-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-read-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-create-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-create-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-retrieve-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-retrieve-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-update-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-update-hook-aspects\") &&\n has_key_with_string_ary(t, \"pre-delete-hook-aspects\") &&\n has_key_with_string_ary(t, \"post-delete-hook-aspects\") &&\n\n \/\/ The section \"hooks\" has maps which have a key \"aspect\" which has a value of type\n \/\/ String\n check_all_inner_maps_have_key_with(t, \"hooks\", \"aspect\", |asp| {\n match asp { &Value::String(_) => true, _ => false }\n }) &&\n\n \/\/ The section \"aspects\" has maps which have a key \"parllel\" which has a value of type\n \/\/ Boolean\n check_all_inner_maps_have_key_with(t, \"aspects\", \"parallel\", |asp| {\n match asp { &Value::Boolean(_) => true, _ => false, }\n })\n }\n _ => false,\n }\n}\n\npub fn get_pre_read_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-read-hook-aspects\", value)\n}\n\npub fn get_post_read_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-read-hook-aspects\", value)\n}\n\npub fn get_pre_create_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-create-hook-aspects\", value)\n}\n\npub fn get_post_create_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-create-hook-aspects\", value)\n}\n\npub fn get_pre_retrieve_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-retrieve-hook-aspects\", value)\n}\n\npub fn get_post_retrieve_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-retrieve-hook-aspects\", value)\n}\n\npub fn get_pre_update_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-update-hook-aspects\", value)\n}\n\npub fn get_post_update_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-update-hook-aspects\", value)\n}\n\npub fn get_pre_delete_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"pre-delete-hook-aspects\", value)\n}\n\npub fn get_post_delete_aspect_names(value: &Value) -> Vec<String> {\n get_aspect_names_for_aspect_position(\"post-delete-hook-aspects\", value)\n}\n\n#[derive(Debug)]\npub struct AspectConfig {\n parallel: bool,\n config: Value,\n}\n\nimpl AspectConfig {\n\n pub fn new(init: Value) -> AspectConfig {\n let parallel = AspectConfig::is_parallel(&init);\n AspectConfig {\n config: init,\n parallel: parallel,\n }\n }\n\n pub fn config(&self) -> &Value {\n &self.config\n }\n\n fn is_parallel(init: &Value) -> bool {\n match init {\n &Value::Table(ref t) =>\n t.get(\"parallel\")\n .map(|value| {\n match value {\n &Value::Boolean(b) => b,\n _ => false,\n }\n })\n .unwrap_or(false),\n _ => false,\n }\n }\n\n \/\/\/ Get the aspect configuration for an aspect.\n \/\/\/\n \/\/\/ Pass the store configuration object, this searches in `[aspects][<aspect_name>]`.\n \/\/\/\n \/\/\/ Returns `None` if one of the keys in the chain is not available\n fn get_for(v: Value, aspect_name: &str) -> Option<AspectConfig> {\n unimplemented!()\n }\n\n}\n\nfn get_aspect_names_for_aspect_position(config_name: &'static str, value: &Value) -> Vec<String> {\n let mut v = vec![];\n\n match value {\n &Value::Table(ref t) => {\n match t.get(config_name) {\n Some(&Value::Array(ref a)) => {\n for elem in a {\n match elem {\n &Value::String(ref s) => v.push(s.clone()),\n _ => warn!(\"Non-String in configuration, inside '{}'\", config_name),\n }\n }\n },\n _ => warn!(\"'{}' configuration key should contain Array, does not\", config_name),\n };\n },\n _ => warn!(\"Configuration is not a table\"),\n }\n v\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make memory_address public<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Improve early-out logic for background pixels<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Move duration+envelope clock counters to vsu<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make example even better<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix for flash clock config at frequency 20<x<=40<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of the install aspects of the compiler.\n\/\/!\n\/\/! This module is responsible for installing the standard library,\n\/\/! compiler, and documentation.\n\nuse std::env;\nuse std::fs;\nuse std::path::{Path, PathBuf, Component};\nuse std::process::Command;\n\nuse dist::{self, pkgname, sanitize_sh, tmpdir};\n\nuse builder::{Builder, RunConfig, ShouldRun, Step};\nuse cache::Interned;\n\npub fn install_docs(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"docs\", \"rust-docs\", stage, Some(host));\n}\n\npub fn install_std(builder: &Builder, stage: u32) {\n for target in &builder.build.targets {\n install_sh(builder, \"std\", \"rust-std\", stage, Some(*target));\n }\n}\n\npub fn install_cargo(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"cargo\", \"cargo\", stage, Some(host));\n}\n\npub fn install_rls(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"rls\", \"rls\", stage, Some(host));\n}\n\npub fn install_analysis(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"analysis\", \"rust-analysis\", stage, Some(host));\n}\n\npub fn install_src(builder: &Builder, stage: u32) {\n install_sh(builder, \"src\", \"rust-src\", stage, None);\n}\npub fn install_rustc(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"rustc\", \"rustc\", stage, Some(host));\n}\n\nfn install_sh(\n builder: &Builder,\n package: &str,\n name: &str,\n stage: u32,\n host: Option<Interned<String>>\n) {\n let build = builder.build;\n println!(\"Install {} stage{} ({:?})\", package, stage, host);\n\n let prefix_default = PathBuf::from(\"\/usr\/local\");\n let sysconfdir_default = PathBuf::from(\"\/etc\");\n let docdir_default = PathBuf::from(\"share\/doc\/rust\");\n let bindir_default = PathBuf::from(\"bin\");\n let libdir_default = PathBuf::from(\"lib\");\n let mandir_default = PathBuf::from(\"share\/man\");\n let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);\n let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);\n let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);\n let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);\n let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);\n let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);\n\n let sysconfdir = prefix.join(sysconfdir);\n let docdir = prefix.join(docdir);\n let bindir = prefix.join(bindir);\n let libdir = prefix.join(libdir);\n let mandir = prefix.join(mandir);\n\n let destdir = env::var_os(\"DESTDIR\").map(PathBuf::from);\n\n let prefix = add_destdir(&prefix, &destdir);\n let sysconfdir = add_destdir(&sysconfdir, &destdir);\n let docdir = add_destdir(&docdir, &destdir);\n let bindir = add_destdir(&bindir, &destdir);\n let libdir = add_destdir(&libdir, &destdir);\n let mandir = add_destdir(&mandir, &destdir);\n\n let empty_dir = build.out.join(\"tmp\/empty_dir\");\n\n t!(fs::create_dir_all(&empty_dir));\n let package_name = if let Some(host) = host {\n format!(\"{}-{}\", pkgname(build, name), host)\n } else {\n pkgname(build, name)\n };\n\n let mut cmd = Command::new(\"sh\");\n cmd.current_dir(&empty_dir)\n .arg(sanitize_sh(&tmpdir(build).join(&package_name).join(\"install.sh\")))\n .arg(format!(\"--prefix={}\", sanitize_sh(&prefix)))\n .arg(format!(\"--sysconfdir={}\", sanitize_sh(&sysconfdir)))\n .arg(format!(\"--docdir={}\", sanitize_sh(&docdir)))\n .arg(format!(\"--bindir={}\", sanitize_sh(&bindir)))\n .arg(format!(\"--libdir={}\", sanitize_sh(&libdir)))\n .arg(format!(\"--mandir={}\", sanitize_sh(&mandir)))\n .arg(\"--disable-ldconfig\");\n build.run(&mut cmd);\n t!(fs::remove_dir_all(&empty_dir));\n}\n\nfn add_destdir(path: &Path, destdir: &Option<PathBuf>) -> PathBuf {\n let mut ret = match *destdir {\n Some(ref dest) => dest.clone(),\n None => return path.to_path_buf(),\n };\n for part in path.components() {\n match part {\n Component::Normal(s) => ret.push(s),\n _ => {}\n }\n }\n ret\n}\n\nmacro_rules! install {\n (($sel:ident, $builder:ident, $_config:ident),\n $($name:ident,\n $path:expr,\n $default_cond:expr,\n only_hosts: $only_hosts:expr,\n $run_item:block $(, $c:ident)*;)+) => {\n $(\n #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\n pub struct $name {\n pub stage: u32,\n pub target: Interned<String>,\n pub host: Interned<String>,\n }\n\n impl Step for $name {\n type Output = ();\n const DEFAULT: bool = true;\n const ONLY_BUILD_TARGETS: bool = true;\n const ONLY_HOSTS: bool = $only_hosts;\n $(const $c: bool = true;)*\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let $_config = &run.builder.config;\n run.path($path).default_condition($default_cond)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure($name {\n stage: run.builder.top_stage,\n target: run.target,\n host: run.host,\n });\n }\n\n fn run($sel, $builder: &Builder) {\n $run_item\n }\n })+\n }\n}\n\ninstall!((self, builder, _config),\n Docs, \"src\/doc\", _config.docs, only_hosts: false, {\n builder.ensure(dist::Docs { stage: self.stage, host: self.target });\n install_docs(builder, self.stage, self.target);\n };\n Std, \"src\/libstd\", true, only_hosts: true, {\n builder.ensure(dist::Std {\n compiler: builder.compiler(self.stage, self.host),\n target: self.target\n });\n install_std(builder, self.stage);\n };\n Cargo, \"cargo\", _config.extended, only_hosts: true, {\n builder.ensure(dist::Cargo { stage: self.stage, target: self.target });\n install_cargo(builder, self.stage, self.target);\n };\n Rls, \"rls\", _config.extended, only_hosts: true, {\n builder.ensure(dist::Rls { stage: self.stage, target: self.target });\n install_rls(builder, self.stage, self.target);\n };\n Analysis, \"analysis\", _config.extended, only_hosts: false, {\n builder.ensure(dist::Analysis {\n compiler: builder.compiler(self.stage, self.host),\n target: self.target\n });\n install_analysis(builder, self.stage, self.target);\n };\n Src, \"src\", _config.extended, only_hosts: true, {\n builder.ensure(dist::Src);\n install_src(builder, self.stage);\n }, ONLY_BUILD;\n Rustc, \"src\/librustc\", true, only_hosts: true, {\n builder.ensure(dist::Rustc {\n compiler: builder.compiler(self.stage, self.target),\n });\n install_rustc(builder, self.stage, self.target);\n };\n);\n<commit_msg>Ensure dist::Std for every libstd target. (Closes: #42320)<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of the install aspects of the compiler.\n\/\/!\n\/\/! This module is responsible for installing the standard library,\n\/\/! compiler, and documentation.\n\nuse std::env;\nuse std::fs;\nuse std::path::{Path, PathBuf, Component};\nuse std::process::Command;\n\nuse dist::{self, pkgname, sanitize_sh, tmpdir};\n\nuse builder::{Builder, RunConfig, ShouldRun, Step};\nuse cache::Interned;\n\npub fn install_docs(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"docs\", \"rust-docs\", stage, Some(host));\n}\n\npub fn install_std(builder: &Builder, stage: u32, target: Interned<String>) {\n install_sh(builder, \"std\", \"rust-std\", stage, Some(target));\n}\n\npub fn install_cargo(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"cargo\", \"cargo\", stage, Some(host));\n}\n\npub fn install_rls(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"rls\", \"rls\", stage, Some(host));\n}\n\npub fn install_analysis(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"analysis\", \"rust-analysis\", stage, Some(host));\n}\n\npub fn install_src(builder: &Builder, stage: u32) {\n install_sh(builder, \"src\", \"rust-src\", stage, None);\n}\npub fn install_rustc(builder: &Builder, stage: u32, host: Interned<String>) {\n install_sh(builder, \"rustc\", \"rustc\", stage, Some(host));\n}\n\nfn install_sh(\n builder: &Builder,\n package: &str,\n name: &str,\n stage: u32,\n host: Option<Interned<String>>\n) {\n let build = builder.build;\n println!(\"Install {} stage{} ({:?})\", package, stage, host);\n\n let prefix_default = PathBuf::from(\"\/usr\/local\");\n let sysconfdir_default = PathBuf::from(\"\/etc\");\n let docdir_default = PathBuf::from(\"share\/doc\/rust\");\n let bindir_default = PathBuf::from(\"bin\");\n let libdir_default = PathBuf::from(\"lib\");\n let mandir_default = PathBuf::from(\"share\/man\");\n let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);\n let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);\n let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);\n let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);\n let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);\n let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);\n\n let sysconfdir = prefix.join(sysconfdir);\n let docdir = prefix.join(docdir);\n let bindir = prefix.join(bindir);\n let libdir = prefix.join(libdir);\n let mandir = prefix.join(mandir);\n\n let destdir = env::var_os(\"DESTDIR\").map(PathBuf::from);\n\n let prefix = add_destdir(&prefix, &destdir);\n let sysconfdir = add_destdir(&sysconfdir, &destdir);\n let docdir = add_destdir(&docdir, &destdir);\n let bindir = add_destdir(&bindir, &destdir);\n let libdir = add_destdir(&libdir, &destdir);\n let mandir = add_destdir(&mandir, &destdir);\n\n let empty_dir = build.out.join(\"tmp\/empty_dir\");\n\n t!(fs::create_dir_all(&empty_dir));\n let package_name = if let Some(host) = host {\n format!(\"{}-{}\", pkgname(build, name), host)\n } else {\n pkgname(build, name)\n };\n\n let mut cmd = Command::new(\"sh\");\n cmd.current_dir(&empty_dir)\n .arg(sanitize_sh(&tmpdir(build).join(&package_name).join(\"install.sh\")))\n .arg(format!(\"--prefix={}\", sanitize_sh(&prefix)))\n .arg(format!(\"--sysconfdir={}\", sanitize_sh(&sysconfdir)))\n .arg(format!(\"--docdir={}\", sanitize_sh(&docdir)))\n .arg(format!(\"--bindir={}\", sanitize_sh(&bindir)))\n .arg(format!(\"--libdir={}\", sanitize_sh(&libdir)))\n .arg(format!(\"--mandir={}\", sanitize_sh(&mandir)))\n .arg(\"--disable-ldconfig\");\n build.run(&mut cmd);\n t!(fs::remove_dir_all(&empty_dir));\n}\n\nfn add_destdir(path: &Path, destdir: &Option<PathBuf>) -> PathBuf {\n let mut ret = match *destdir {\n Some(ref dest) => dest.clone(),\n None => return path.to_path_buf(),\n };\n for part in path.components() {\n match part {\n Component::Normal(s) => ret.push(s),\n _ => {}\n }\n }\n ret\n}\n\nmacro_rules! install {\n (($sel:ident, $builder:ident, $_config:ident),\n $($name:ident,\n $path:expr,\n $default_cond:expr,\n only_hosts: $only_hosts:expr,\n $run_item:block $(, $c:ident)*;)+) => {\n $(\n #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\n pub struct $name {\n pub stage: u32,\n pub target: Interned<String>,\n pub host: Interned<String>,\n }\n\n impl Step for $name {\n type Output = ();\n const DEFAULT: bool = true;\n const ONLY_BUILD_TARGETS: bool = true;\n const ONLY_HOSTS: bool = $only_hosts;\n $(const $c: bool = true;)*\n\n fn should_run(run: ShouldRun) -> ShouldRun {\n let $_config = &run.builder.config;\n run.path($path).default_condition($default_cond)\n }\n\n fn make_run(run: RunConfig) {\n run.builder.ensure($name {\n stage: run.builder.top_stage,\n target: run.target,\n host: run.host,\n });\n }\n\n fn run($sel, $builder: &Builder) {\n $run_item\n }\n })+\n }\n}\n\ninstall!((self, builder, _config),\n Docs, \"src\/doc\", _config.docs, only_hosts: false, {\n builder.ensure(dist::Docs { stage: self.stage, host: self.target });\n install_docs(builder, self.stage, self.target);\n };\n Std, \"src\/libstd\", true, only_hosts: true, {\n for target in &builder.build.targets {\n builder.ensure(dist::Std {\n compiler: builder.compiler(self.stage, self.host),\n target: *target\n });\n install_std(builder, self.stage, *target);\n }\n };\n Cargo, \"cargo\", _config.extended, only_hosts: true, {\n builder.ensure(dist::Cargo { stage: self.stage, target: self.target });\n install_cargo(builder, self.stage, self.target);\n };\n Rls, \"rls\", _config.extended, only_hosts: true, {\n builder.ensure(dist::Rls { stage: self.stage, target: self.target });\n install_rls(builder, self.stage, self.target);\n };\n Analysis, \"analysis\", _config.extended, only_hosts: false, {\n builder.ensure(dist::Analysis {\n compiler: builder.compiler(self.stage, self.host),\n target: self.target\n });\n install_analysis(builder, self.stage, self.target);\n };\n Src, \"src\", _config.extended, only_hosts: true, {\n builder.ensure(dist::Src);\n install_src(builder, self.stage);\n }, ONLY_BUILD;\n Rustc, \"src\/librustc\", true, only_hosts: true, {\n builder.ensure(dist::Rustc {\n compiler: builder.compiler(self.stage, self.target),\n });\n install_rustc(builder, self.stage, self.target);\n };\n);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Automatically build any out-of-date SPIR-V when using Vulkan<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test case<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn main() {\n let a = &~1;\n let mut c: ~int;\n match copy *a {\n b => c = b\n };\n log(error, *c);\n log(error, *a);\n\n\/*\n for os::args().each |arg| {\n match copy *arg {\n s => { }\n }\n }\n*\/\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test.<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-pretty pretty-printing is unhygienic\n\n#![feature(decl_macro)]\n#![allow(unused)]\n\nmacro m($S:ident, $x:ident) {\n $S { $x: 0 }\n}\n\nmod foo {\n struct S { x: i32 }\n\n fn f() { ::m!(S, x); }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add map and filter to builtins<commit_after><|endoftext|>"} {"text":"<commit_before>#![allow(unstable)]\n\nuse std::io::fs::File;\nuse std::io::{Writer, Seek, IoResult};\nuse layout::{LayoutBox, Rect};\nuse painting::{DisplayCommand, build_display_list};\n\n\nfn px_to_pt(value: f32) -> f32 {\n \/\/ 96px = 1in = 72pt\n \/\/ value * 1px = value * 96px \/ 96 = value * 72pt \/ 96 = (value * 0.75) * 1pt\n value * 0.75\n}\n\n\npub fn render(layout_root: &LayoutBox, bounds: Rect, file: &mut File) -> IoResult<()> {\n let display_list = build_display_list(layout_root);\n let mut pdf = try!(Pdf::new(file));\n \/\/ We map CSS pt to Poscript points (which is the default length unit in PDF).\n try!(pdf.render_page(px_to_pt(bounds.width), px_to_pt(bounds.height), |mut page| {\n for item in display_list.iter() {\n page.paint_item(item);\n }\n Ok(())\n }));\n pdf.finish()\n}\n\n\nstruct Pdf<'a, W: 'a + Writer + Seek> {\n output: &'a mut W,\n object_offsets: Vec<i64>,\n page_objects_ids: Vec<usize>,\n}\n\nconst ROOT_OBJECT_ID: usize = 1;\nconst PAGES_OBJECT_ID: usize = 2;\n\nimpl<'a, W: Writer + Seek> Pdf<'a, W> {\n fn new(output: &'a mut W) -> IoResult<Pdf<'a, W>> {\n \/\/ FIXME: Find out the lowest version that contains the features we’re using.\n try!(output.write(b\"%PDF-1.7\\n%\\xB5\\xED\\xAE\\xFB\\n\"));\n Ok(Pdf {\n output: output,\n \/\/ Object ID 0 is special in PDF.\n \/\/ We reserve IDs 1 and 2 for the catalog and page tree.\n object_offsets: vec![-1, -1, -1],\n page_objects_ids: vec![],\n })\n }\n\n fn render_page<F>(&mut self, width: f32, height: f32, render_contents: F) -> IoResult<()>\n where F: FnOnce(Page) -> IoResult<()> {\n let (contents_object_id, content_length) =\n try!(self.write_new_object(move |contents_object_id, pdf| {\n \/\/ Guess the ID of the next object. (We’ll assert it below.)\n try!(write!(pdf.output, \"<< \/Length {} 0 R\\n\", contents_object_id + 1));\n try!(write!(pdf.output, \">>\\n\"));\n try!(write!(pdf.output, \"stream\\n\"));\n\n let start = try!(pdf.output.tell());\n let page = Page;\n try!(render_contents(page));\n let end = try!(pdf.output.tell());\n\n try!(write!(pdf.output, \"endstream\\n\"));\n Ok((contents_object_id, end - start))\n }));\n try!(self.write_new_object(|length_object_id, pdf| {\n assert!(length_object_id == contents_object_id + 1);\n write!(pdf.output, \"{}\\n\", content_length)\n }));\n let page_object_id = try!(self.write_new_object(|page_object_id, pdf| {\n try!(write!(pdf.output, \"<< \/Type \/Page\\n\"));\n try!(write!(pdf.output, \" \/Parent {} 0 R\\n\", PAGES_OBJECT_ID));\n try!(write!(pdf.output, \" \/Resources << >>\\n\"));\n try!(write!(pdf.output, \" \/MediaBox [ 0 0 {} {} ]\\n\", width, height));\n try!(write!(pdf.output, \" \/Contents {} 0 R\\n\", contents_object_id));\n try!(write!(pdf.output, \">>\\n\"));\n Ok(page_object_id)\n }));\n self.page_objects_ids.push(page_object_id);\n Ok(())\n }\n\n fn write_new_object<F, T>(&mut self, write_content: F) -> IoResult<T>\n where F: FnOnce(usize, &mut Pdf<W>) -> IoResult<T> {\n let id = self.object_offsets.len();\n \/\/ `as i64` here would only overflow for PDF files bigger than 2**63 bytes\n self.object_offsets.push(try!(self.output.tell()) as i64);\n self._write_object(id, move |pdf| write_content(id, pdf))\n }\n\n fn write_object<F, T>(&mut self, id: usize, write_content: F) -> IoResult<T>\n where F: FnOnce(&mut Pdf<W>) -> IoResult<T> {\n assert!(self.object_offsets[id] == -1);\n \/\/ `as i64` here would only overflow for PDF files bigger than 2**63 bytes\n self.object_offsets[id] = try!(self.output.tell()) as i64;\n self._write_object(id, write_content)\n }\n\n fn _write_object<F, T>(&mut self, id: usize, write_content: F) -> IoResult<T>\n where F: FnOnce(&mut Pdf<W>) -> IoResult<T> {\n try!(write!(self.output, \"{} 0 obj\\n\", id));\n let result = try!(write_content(self));\n try!(write!(self.output, \"endobj\\n\"));\n Ok(result)\n }\n\n fn finish(mut self) -> IoResult<()> {\n self._finish()\n }\n\n fn _finish(&mut self) -> IoResult<()> {\n try!(self.write_object(PAGES_OBJECT_ID, |pdf| {\n try!(write!(pdf.output, \"<< \/Type \/Pages\\n\"));\n try!(write!(pdf.output, \" \/Count {}\\n\", pdf.page_objects_ids.len()));\n try!(write!(pdf.output, \" \/Kids [ \"));\n for &page_object_id in pdf.page_objects_ids.iter() {\n try!(write!(pdf.output, \"{} 0 R \", page_object_id));\n }\n try!(write!(pdf.output, \"]\\n\"));\n try!(write!(pdf.output, \">>\\n\"));\n Ok(())\n }));\n try!(self.write_object(ROOT_OBJECT_ID, |pdf| {\n try!(write!(pdf.output, \"<< \/Type \/Catalog\\n\"));\n try!(write!(pdf.output, \" \/Pages {} 0 R\\n\", PAGES_OBJECT_ID));\n try!(write!(pdf.output, \">>\\n\"));\n Ok(())\n }));\n let startxref = try!(self.output.tell());\n try!(write!(self.output, \"xref\\n\"));\n try!(write!(self.output, \"0 {}\\n\", self.object_offsets.len()));\n \/\/ Object 0 is special\n try!(write!(self.output, \"0000000000 65535 f \\n\"));\n \/\/ Use [1..] to skip object 0 in self.object_offsets.\n for &offset in self.object_offsets[1..].iter() {\n assert!(offset >= 0);\n try!(write!(self.output, \"{:010} 00000 n \\n\", offset));\n }\n try!(write!(self.output, \"trailer\\n\"));\n try!(write!(self.output, \"<< \/Size {}\\n\", self.object_offsets.len()));\n try!(write!(self.output, \" \/Root {} 0 R\\n\", ROOT_OBJECT_ID));\n try!(write!(self.output, \">>\\n\"));\n try!(write!(self.output, \"startxref\\n\"));\n try!(write!(self.output, \"{}\\n\", startxref));\n try!(write!(self.output, \"%%EOF\\n\"));\n Ok(())\n }\n}\n\n\nstruct Page;\n\nimpl Page {\n fn paint_item(&mut self, item: &DisplayCommand) {\n }\n}\n<commit_msg>PDF backend: paint SolidColor display commands.<commit_after>#![allow(unstable)]\n\nuse std::io::fs::File;\nuse std::io::{Writer, Seek, IoResult};\nuse layout::{LayoutBox, Rect};\nuse painting::{DisplayCommand, build_display_list};\n\n\nfn px_to_pt(value: f32) -> f32 {\n \/\/ 96px = 1in = 72pt\n \/\/ value * 1px = value * 96px \/ 96 = value * 72pt \/ 96 = (value * 0.75) * 1pt\n value * 0.75\n}\n\n\npub fn render(layout_root: &LayoutBox, bounds: Rect, file: &mut File) -> IoResult<()> {\n let display_list = build_display_list(layout_root);\n let mut pdf = try!(Pdf::new(file));\n \/\/ We map CSS pt to Poscript points (which is the default length unit in PDF).\n try!(pdf.render_page(px_to_pt(bounds.width), px_to_pt(bounds.height), |output| {\n for item in display_list.iter() {\n try!(render_item(item, output));\n }\n Ok(())\n }));\n pdf.finish()\n}\n\n\nfn render_item<W: Writer>(item: &DisplayCommand, output: &mut W) -> IoResult<()> {\n match *item {\n DisplayCommand::SolidColor(color, rect) => {\n write!(output, \"{} {} {} sc {} {} {} {} re f\\n\",\n \/\/ FIMXE: alpha transparency\n color.r, color.g, color.b,\n rect.x, rect.y, rect.width, rect.height)\n }\n }\n}\n\n\nstruct Pdf<'a, W: 'a + Writer + Seek> {\n output: &'a mut W,\n object_offsets: Vec<i64>,\n page_objects_ids: Vec<usize>,\n}\n\nconst ROOT_OBJECT_ID: usize = 1;\nconst PAGES_OBJECT_ID: usize = 2;\n\nimpl<'a, W: Writer + Seek> Pdf<'a, W> {\n fn new(output: &'a mut W) -> IoResult<Pdf<'a, W>> {\n \/\/ FIXME: Find out the lowest version that contains the features we’re using.\n try!(output.write(b\"%PDF-1.7\\n%\\xB5\\xED\\xAE\\xFB\\n\"));\n Ok(Pdf {\n output: output,\n \/\/ Object ID 0 is special in PDF.\n \/\/ We reserve IDs 1 and 2 for the catalog and page tree.\n object_offsets: vec![-1, -1, -1],\n page_objects_ids: vec![],\n })\n }\n\n fn render_page<F>(&mut self, width: f32, height: f32, render_contents: F) -> IoResult<()>\n where F: FnOnce(&mut W) -> IoResult<()> {\n let (contents_object_id, content_length) =\n try!(self.write_new_object(move |contents_object_id, pdf| {\n \/\/ Guess the ID of the next object. (We’ll assert it below.)\n try!(write!(pdf.output, \"<< \/Length {} 0 R\\n\", contents_object_id + 1));\n try!(write!(pdf.output, \">>\\n\"));\n try!(write!(pdf.output, \"stream\\n\"));\n\n let start = try!(pdf.output.tell());\n try!(write!(pdf.output, \"\/DeviceRGB cs \/DeviceRGB CS\\n\"));\n try!(write!(pdf.output, \"0.75 0 0 -0.75 0 {} cm\\n\", height));\n try!(render_contents(pdf.output));\n let end = try!(pdf.output.tell());\n\n try!(write!(pdf.output, \"endstream\\n\"));\n Ok((contents_object_id, end - start))\n }));\n try!(self.write_new_object(|length_object_id, pdf| {\n assert!(length_object_id == contents_object_id + 1);\n write!(pdf.output, \"{}\\n\", content_length)\n }));\n let page_object_id = try!(self.write_new_object(|page_object_id, pdf| {\n try!(write!(pdf.output, \"<< \/Type \/Page\\n\"));\n try!(write!(pdf.output, \" \/Parent {} 0 R\\n\", PAGES_OBJECT_ID));\n try!(write!(pdf.output, \" \/Resources << >>\\n\"));\n try!(write!(pdf.output, \" \/MediaBox [ 0 0 {} {} ]\\n\", width, height));\n try!(write!(pdf.output, \" \/Contents {} 0 R\\n\", contents_object_id));\n try!(write!(pdf.output, \">>\\n\"));\n Ok(page_object_id)\n }));\n self.page_objects_ids.push(page_object_id);\n Ok(())\n }\n\n fn write_new_object<F, T>(&mut self, write_content: F) -> IoResult<T>\n where F: FnOnce(usize, &mut Pdf<W>) -> IoResult<T> {\n let id = self.object_offsets.len();\n \/\/ `as i64` here would only overflow for PDF files bigger than 2**63 bytes\n self.object_offsets.push(try!(self.output.tell()) as i64);\n self._write_object(id, move |pdf| write_content(id, pdf))\n }\n\n fn write_object_with_id<F, T>(&mut self, id: usize, write_content: F) -> IoResult<T>\n where F: FnOnce(&mut Pdf<W>) -> IoResult<T> {\n assert!(self.object_offsets[id] == -1);\n \/\/ `as i64` here would only overflow for PDF files bigger than 2**63 bytes\n self.object_offsets[id] = try!(self.output.tell()) as i64;\n self._write_object(id, write_content)\n }\n\n fn _write_object<F, T>(&mut self, id: usize, write_content: F) -> IoResult<T>\n where F: FnOnce(&mut Pdf<W>) -> IoResult<T> {\n try!(write!(self.output, \"{} 0 obj\\n\", id));\n let result = try!(write_content(self));\n try!(write!(self.output, \"endobj\\n\"));\n Ok(result)\n }\n\n fn finish(mut self) -> IoResult<()> {\n self._finish()\n }\n\n fn _finish(&mut self) -> IoResult<()> {\n try!(self.write_object_with_id(PAGES_OBJECT_ID, |pdf| {\n try!(write!(pdf.output, \"<< \/Type \/Pages\\n\"));\n try!(write!(pdf.output, \" \/Count {}\\n\", pdf.page_objects_ids.len()));\n try!(write!(pdf.output, \" \/Kids [ \"));\n for &page_object_id in pdf.page_objects_ids.iter() {\n try!(write!(pdf.output, \"{} 0 R \", page_object_id));\n }\n try!(write!(pdf.output, \"]\\n\"));\n try!(write!(pdf.output, \">>\\n\"));\n Ok(())\n }));\n try!(self.write_object_with_id(ROOT_OBJECT_ID, |pdf| {\n try!(write!(pdf.output, \"<< \/Type \/Catalog\\n\"));\n try!(write!(pdf.output, \" \/Pages {} 0 R\\n\", PAGES_OBJECT_ID));\n try!(write!(pdf.output, \">>\\n\"));\n Ok(())\n }));\n let startxref = try!(self.output.tell());\n try!(write!(self.output, \"xref\\n\"));\n try!(write!(self.output, \"0 {}\\n\", self.object_offsets.len()));\n \/\/ Object 0 is special\n try!(write!(self.output, \"0000000000 65535 f \\n\"));\n \/\/ Use [1..] to skip object 0 in self.object_offsets.\n for &offset in self.object_offsets[1..].iter() {\n assert!(offset >= 0);\n try!(write!(self.output, \"{:010} 00000 n \\n\", offset));\n }\n try!(write!(self.output, \"trailer\\n\"));\n try!(write!(self.output, \"<< \/Size {}\\n\", self.object_offsets.len()));\n try!(write!(self.output, \" \/Root {} 0 R\\n\", ROOT_OBJECT_ID));\n try!(write!(self.output, \">>\\n\"));\n try!(write!(self.output, \"startxref\\n\"));\n try!(write!(self.output, \"{}\\n\", startxref));\n try!(write!(self.output, \"%%EOF\\n\"));\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! # The Rust Core Library\n\/\/!\n\/\/! The Rust Core Library is the dependency-free[^free] foundation of [The\n\/\/! Rust Standard Library](..\/std\/index.html). It is the portable glue\n\/\/! between the language and its libraries, defining the intrinsic and\n\/\/! primitive building blocks of all Rust code. It links to no\n\/\/! upstream libraries, no system libraries, and no libc.\n\/\/!\n\/\/! [^free]: Strictly speaking, there are some symbols which are needed but\n\/\/! they aren't always necessary.\n\/\/!\n\/\/! The core library is *minimal*: it isn't even aware of heap allocation,\n\/\/! nor does it provide concurrency or I\/O. These things require\n\/\/! platform integration, and this library is platform-agnostic.\n\/\/!\n\/\/! # How to use the core library\n\/\/!\n\/\/! Please note that all of these details are currently not considered stable.\n\/\/!\n\/\/ FIXME: Fill me in with more detail when the interface settles\n\/\/! This library is built on the assumption of a few existing symbols:\n\/\/!\n\/\/! * `memcpy`, `memcmp`, `memset` - These are core memory routines which are\n\/\/! often generated by LLVM. Additionally, this library can make explicit\n\/\/! calls to these functions. Their signatures are the same as found in C.\n\/\/! These functions are often provided by the system libc, but can also be\n\/\/! provided by the [rlibc crate](https:\/\/crates.io\/crates\/rlibc).\n\/\/!\n\/\/! * `rust_begin_panic` - This function takes four arguments, a\n\/\/! `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments\n\/\/! dictate the panic message, the file at which panic was invoked, and the\n\/\/! line and column inside the file. It is up to consumers of this core\n\/\/! library to define this panic function; it is only required to never\n\/\/! return. This requires a `lang` attribute named `panic_fmt`.\n\/\/!\n\/\/! * `rust_eh_personality` - is used by the failure mechanisms of the\n\/\/! compiler. This is often mapped to GCC's personality function, but crates\n\/\/! which do not trigger a panic can be assured that this function is never\n\/\/! called. The `lang` attribute is called `eh_personality`.\n\n\/\/ Since libcore defines many fundamental lang items, all tests live in a\n\/\/ separate crate, libcoretest, to avoid bizarre issues.\n\/\/\n\/\/ Here we explicitly #[cfg]-out this whole crate when testing. If we don't do\n\/\/ this, both the generated test artifact and the linked libtest (which\n\/\/ transitively includes libcore) will both define the same set of lang items,\n\/\/ and this will cause the E0152 \"duplicate lang item found\" error. See\n\/\/ discussion in #50466 for details.\n\/\/\n\/\/ This cfg won't affect doc tests.\n#![cfg(not(test))]\n\n#![stable(feature = \"core\", since = \"1.6.0\")]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\",\n html_playground_url = \"https:\/\/play.rust-lang.org\/\",\n issue_tracker_base_url = \"https:\/\/github.com\/rust-lang\/rust\/issues\/\",\n test(no_crate_inject, attr(deny(warnings))),\n test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]\n\n#![no_core]\n#![deny(missing_docs)]\n#![deny(missing_debug_implementations)]\n\n#![feature(allow_internal_unstable)]\n#![feature(arbitrary_self_types)]\n#![feature(asm)]\n#![feature(associated_type_defaults)]\n#![feature(attr_literals)]\n#![feature(cfg_target_has_atomic)]\n#![feature(concat_idents)]\n#![feature(const_fn)]\n#![feature(core_float)]\n#![feature(custom_attribute)]\n#![feature(doc_cfg)]\n#![feature(doc_spotlight)]\n#![feature(extern_types)]\n#![feature(fundamental)]\n#![feature(intrinsics)]\n#![feature(iterator_flatten)]\n#![feature(iterator_repeat_with)]\n#![feature(lang_items)]\n#![feature(link_llvm_intrinsics)]\n#![feature(never_type)]\n#![feature(exhaustive_patterns)]\n#![feature(macro_at_most_once_rep)]\n#![feature(no_core)]\n#![feature(on_unimplemented)]\n#![feature(optin_builtin_traits)]\n#![feature(prelude_import)]\n#![feature(repr_simd, platform_intrinsics)]\n#![feature(rustc_attrs)]\n#![feature(rustc_const_unstable)]\n#![feature(simd_ffi)]\n#![feature(core_slice_ext)]\n#![feature(core_str_ext)]\n#![feature(specialization)]\n#![feature(staged_api)]\n#![feature(stmt_expr_attributes)]\n#![feature(unboxed_closures)]\n#![feature(untagged_unions)]\n#![feature(unwind_attributes)]\n#![feature(doc_alias)]\n#![feature(inclusive_range_methods)]\n#![feature(mmx_target_feature)]\n#![feature(tbm_target_feature)]\n#![feature(sse4a_target_feature)]\n#![feature(arm_target_feature)]\n#![feature(powerpc_target_feature)]\n#![feature(mips_target_feature)]\n#![feature(aarch64_target_feature)]\n#![feature(const_slice_len)]\n#![feature(const_str_as_bytes)]\n#![feature(const_str_len)]\n\n#[prelude_import]\n#[allow(unused)]\nuse prelude::v1::*;\n\n#[macro_use]\nmod macros;\n\n#[macro_use]\nmod internal_macros;\n\n#[path = \"num\/int_macros.rs\"]\n#[macro_use]\nmod int_macros;\n\n#[path = \"num\/uint_macros.rs\"]\n#[macro_use]\nmod uint_macros;\n\n#[path = \"num\/isize.rs\"] pub mod isize;\n#[path = \"num\/i8.rs\"] pub mod i8;\n#[path = \"num\/i16.rs\"] pub mod i16;\n#[path = \"num\/i32.rs\"] pub mod i32;\n#[path = \"num\/i64.rs\"] pub mod i64;\n#[path = \"num\/i128.rs\"] pub mod i128;\n\n#[path = \"num\/usize.rs\"] pub mod usize;\n#[path = \"num\/u8.rs\"] pub mod u8;\n#[path = \"num\/u16.rs\"] pub mod u16;\n#[path = \"num\/u32.rs\"] pub mod u32;\n#[path = \"num\/u64.rs\"] pub mod u64;\n#[path = \"num\/u128.rs\"] pub mod u128;\n\n#[path = \"num\/f32.rs\"] pub mod f32;\n#[path = \"num\/f64.rs\"] pub mod f64;\n\n#[macro_use]\npub mod num;\n\n\/* The libcore prelude, not as all-encompassing as the libstd prelude *\/\n\npub mod prelude;\n\n\/* Core modules for ownership management *\/\n\npub mod intrinsics;\npub mod mem;\npub mod ptr;\npub mod hint;\n\n\/* Core language traits *\/\n\npub mod marker;\npub mod ops;\npub mod cmp;\npub mod clone;\npub mod default;\npub mod convert;\npub mod borrow;\n\n\/* Core types and methods on primitives *\/\n\npub mod any;\npub mod array;\npub mod ascii;\npub mod sync;\npub mod cell;\npub mod char;\npub mod panic;\npub mod panicking;\npub mod iter;\npub mod option;\npub mod raw;\npub mod result;\n\npub mod slice;\npub mod str;\npub mod hash;\npub mod fmt;\npub mod time;\n\npub mod unicode;\n\n\/* Heap memory allocator trait *\/\n#[allow(missing_docs)]\npub mod alloc;\n\n#[unstable(feature = \"allocator_api\", issue = \"32838\")]\n#[rustc_deprecated(since = \"1.27.0\", reason = \"module renamed to `alloc`\")]\n\/\/\/ Use the `alloc` module instead.\npub mod heap {\n pub use alloc::*;\n}\n\n\/\/ note: does not need to be public\nmod iter_private;\nmod nonzero;\nmod tuple;\nmod unit;\n\n\/\/ Pull in the the `coresimd` crate directly into libcore. This is where all the\n\/\/ architecture-specific (and vendor-specific) intrinsics are defined. AKA\n\/\/ things like SIMD and such. Note that the actual source for all this lies in a\n\/\/ different repository, rust-lang-nursery\/stdsimd. That's why the setup here is\n\/\/ a bit wonky.\n#[allow(unused_macros)]\nmacro_rules! test_v16 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v32 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v64 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v128 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v256 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v512 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }\n#[path = \"..\/stdsimd\/coresimd\/mod.rs\"]\n#[allow(missing_docs, missing_debug_implementations, dead_code, unused_imports)]\n#[unstable(feature = \"stdsimd\", issue = \"48556\")]\n#[cfg(not(stage0))] \/\/ allow changes to how stdsimd works in stage0\nmod coresimd;\n\n#[unstable(feature = \"stdsimd\", issue = \"48556\")]\n#[cfg(not(stage0))]\npub use coresimd::simd;\n#[stable(feature = \"simd_arch\", since = \"1.27.0\")]\n#[cfg(not(stage0))]\npub use coresimd::arch;\n<commit_msg>lib.rs don't beautiful<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! # The Rust Core Library\n\/\/!\n\/\/! The Rust Core Library is the dependency-free[^free] foundation of [The\n\/\/! Rust Standard Library](..\/std\/index.html). It is the portable glue\n\/\/! between the language and its libraries, defining the intrinsic and\n\/\/! primitive building blocks of all Rust code. It links to no\n\/\/! upstream libraries, no system libraries, and no libc.\n\/\/!\n\/\/! [^free]: Strictly speaking, there are some symbols which are needed but\n\/\/! they aren't always necessary.\n\/\/!\n\/\/! The core library is *minimal*: it isn't even aware of heap allocation,\n\/\/! nor does it provide concurrency or I\/O. These things require\n\/\/! platform integration, and this library is platform-agnostic.\n\/\/!\n\/\/! # How to use the core library\n\/\/!\n\/\/! Please note that all of these details are currently not considered stable.\n\/\/!\n\/\/ FIXME: Fill me in with more detail when the interface settles\n\/\/! This library is built on the assumption of a few existing symbols:\n\/\/!\n\/\/! * `memcpy`, `memcmp`, `memset` - These are core memory routines which are\n\/\/! often generated by LLVM. Additionally, this library can make explicit\n\/\/! calls to these functions. Their signatures are the same as found in C.\n\/\/! These functions are often provided by the system libc, but can also be\n\/\/! provided by the [rlibc crate](https:\/\/crates.io\/crates\/rlibc).\n\/\/!\n\/\/! * `rust_begin_panic` - This function takes four arguments, a\n\/\/! `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments\n\/\/! dictate the panic message, the file at which panic was invoked, and the\n\/\/! line and column inside the file. It is up to consumers of this core\n\/\/! library to define this panic function; it is only required to never\n\/\/! return. This requires a `lang` attribute named `panic_fmt`.\n\/\/!\n\/\/! * `rust_eh_personality` - is used by the failure mechanisms of the\n\/\/! compiler. This is often mapped to GCC's personality function, but crates\n\/\/! which do not trigger a panic can be assured that this function is never\n\/\/! called. The `lang` attribute is called `eh_personality`.\n\n\/\/ Since libcore defines many fundamental lang items, all tests live in a\n\/\/ separate crate, libcoretest, to avoid bizarre issues.\n\/\/\n\/\/ Here we explicitly #[cfg]-out this whole crate when testing. If we don't do\n\/\/ this, both the generated test artifact and the linked libtest (which\n\/\/ transitively includes libcore) will both define the same set of lang items,\n\/\/ and this will cause the E0152 \"duplicate lang item found\" error. See\n\/\/ discussion in #50466 for details.\n\/\/\n\/\/ This cfg won't affect doc tests.\n#![cfg(not(test))]\n\n#![stable(feature = \"core\", since = \"1.6.0\")]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\",\n html_playground_url = \"https:\/\/play.rust-lang.org\/\",\n issue_tracker_base_url = \"https:\/\/github.com\/rust-lang\/rust\/issues\/\",\n test(no_crate_inject, attr(deny(warnings))),\n test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]\n\n#![no_core]\n#![deny(missing_docs)]\n#![deny(missing_debug_implementations)]\n\n#![feature(allow_internal_unstable)]\n#![feature(arbitrary_self_types)]\n#![feature(asm)]\n#![feature(associated_type_defaults)]\n#![feature(attr_literals)]\n#![feature(cfg_target_has_atomic)]\n#![feature(concat_idents)]\n#![feature(const_fn)]\n#![feature(core_float)]\n#![feature(custom_attribute)]\n#![feature(doc_cfg)]\n#![feature(doc_spotlight)]\n#![feature(extern_types)]\n#![feature(fundamental)]\n#![feature(intrinsics)]\n#![feature(iterator_flatten)]\n#![feature(iterator_repeat_with)]\n#![feature(lang_items)]\n#![feature(link_llvm_intrinsics)]\n#![feature(never_type)]\n#![feature(exhaustive_patterns)]\n#![feature(macro_at_most_once_rep)]\n#![feature(no_core)]\n#![feature(on_unimplemented)]\n#![feature(optin_builtin_traits)]\n#![feature(prelude_import)]\n#![feature(repr_simd, platform_intrinsics)]\n#![feature(rustc_attrs)]\n#![feature(rustc_const_unstable)]\n#![feature(simd_ffi)]\n#![feature(core_slice_ext)]\n#![feature(core_str_ext)]\n#![feature(specialization)]\n#![feature(staged_api)]\n#![feature(stmt_expr_attributes)]\n#![feature(unboxed_closures)]\n#![feature(untagged_unions)]\n#![feature(unwind_attributes)]\n#![feature(doc_alias)]\n#![feature(inclusive_range_methods)]\n#![feature(mmx_target_feature)]\n#![feature(tbm_target_feature)]\n#![feature(sse4a_target_feature)]\n#![feature(arm_target_feature)]\n#![feature(powerpc_target_feature)]\n#![feature(mips_target_feature)]\n#![feature(aarch64_target_feature)]\n#![feature(const_slice_len)]\n#![feature(const_str_as_bytes)]\n#![feature(const_str_len)]\n\n#[prelude_import]\n#[allow(unused)]\nuse prelude::v1::*;\n\n#[macro_use]\nmod macros;\n\n#[macro_use]\nmod internal_macros;\n\n#[path = \"num\/int_macros.rs\"]\n#[macro_use]\nmod int_macros;\n\n#[path = \"num\/uint_macros.rs\"]\n#[macro_use]\nmod uint_macros;\n\n#[path = \"num\/isize.rs\"] pub mod isize;\n#[path = \"num\/i8.rs\"] pub mod i8;\n#[path = \"num\/i16.rs\"] pub mod i16;\n#[path = \"num\/i32.rs\"] pub mod i32;\n#[path = \"num\/i64.rs\"] pub mod i64;\n#[path = \"num\/i128.rs\"] pub mod i128;\n\n#[path = \"num\/usize.rs\"] pub mod usize;\n#[path = \"num\/u8.rs\"] pub mod u8;\n#[path = \"num\/u16.rs\"] pub mod u16;\n#[path = \"num\/u32.rs\"] pub mod u32;\n#[path = \"num\/u64.rs\"] pub mod u64;\n#[path = \"num\/u128.rs\"] pub mod u128;\n\n#[path = \"num\/f32.rs\"] pub mod f32;\n#[path = \"num\/f64.rs\"] pub mod f64;\n\n#[macro_use]\npub mod num;\n\n\/* The libcore prelude, not as all-encompassing as the libstd prelude *\/\n\npub mod prelude;\n\n\/* Core modules for ownership management *\/\n\npub mod intrinsics;\npub mod mem;\npub mod ptr;\npub mod hint;\n\n\/* Core language traits *\/\n\npub mod marker;\npub mod ops;\npub mod cmp;\npub mod clone;\npub mod default;\npub mod convert;\npub mod borrow;\n\n\/* Core types and methods on primitives *\/\n\npub mod any;\npub mod array;\npub mod ascii;\npub mod sync;\npub mod cell;\npub mod char;\npub mod panic;\npub mod panicking;\npub mod iter;\npub mod option;\npub mod raw;\npub mod result;\n\npub mod slice;\npub mod str;\npub mod hash;\npub mod fmt;\npub mod time;\n\npub mod unicode;\n\n\/* Heap memory allocator trait *\/\n#[allow(missing_docs)]\npub mod alloc;\n\n#[unstable(feature = \"allocator_api\", issue = \"32838\")]\n#[rustc_deprecated(since = \"1.27.0\", reason = \"module renamed to `alloc`\")]\n\/\/\/ Use the `alloc` module instead.\npub mod heap {\n pub use alloc::*;\n}\n\n\/\/ note: does not need to be public\nmod iter_private;\nmod nonzero;\nmod tuple;\nmod unit;\n\n\/\/ Pull in the the `coresimd` crate directly into libcore. This is where all the\n\/\/ architecture-specific (and vendor-specific) intrinsics are defined. AKA\n\/\/ things like SIMD and such. Note that the actual source for all this lies in a\n\/\/ different repository, rust-lang-nursery\/stdsimd. That's why the setup here is\n\/\/ a bit wonky.\n#[allow(unused_macros)]\nmacro_rules! test_v16 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v32 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v64 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v128 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v256 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! test_v512 { ($item:item) => {}; }\n#[allow(unused_macros)]\nmacro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }\n#[path = \"..\/stdsimd\/coresimd\/mod.rs\"]\n#[allow(missing_docs, missing_debug_implementations, dead_code, unused_imports)]\n#[unstable(feature = \"stdsimd\", issue = \"48556\")]\n#[cfg(not(stage0))] \/\/ allow changes to how stdsimd works in stage0\nmod coresimd;\n\n#[unstable(feature = \"stdsimd\", issue = \"48556\")]\n#[cfg(not(stage0))]\npub use coresimd::simd;\n#[stable(feature = \"simd_arch\", since = \"1.27.0\")]\n#[cfg(not(stage0))]\npub use coresimd::arch;\n<|endoftext|>"} {"text":"<commit_before>use std::collections::BTreeMap;\nuse std::io::{stdout, Write};\n\nuse super::peg::{Pipeline, Job};\nuse super::input_editor::readln;\nuse super::status::{SUCCESS, FAILURE};\n\npub struct Variables {\n variables: BTreeMap<String, String>,\n}\n\nimpl Variables {\n pub fn new() -> Variables {\n Variables { variables: BTreeMap::new() }\n }\n\n pub fn read<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n let mut out = stdout();\n for arg in args.into_iter().skip(1) {\n print!(\"{}=\", arg.as_ref().trim());\n if let Err(message) = out.flush() {\n println!(\"{}: Failed to flush stdout\", message);\n return FAILURE;\n }\n if let Some(value) = readln() {\n self.set_var(arg.as_ref(), value.trim());\n }\n }\n SUCCESS\n }\n\n pub fn let_<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n let args = args.into_iter();\n let string: String = args.skip(1).fold(String::new(), |string, x| string + x.as_ref());\n let mut split = string.split('=');\n match (split.next().and_then(|x| if x == \"\" { None } else { Some(x) }), split.next()) {\n (Some(key), Some(value)) => {\n self.variables.insert(key.to_string(), value.to_string());\n },\n (Some(key), None) => {\n self.variables.remove(key);\n },\n _ => {\n for (key, value) in self.variables.iter() {\n println!(\"{}={}\", key, value);\n }\n }\n }\n SUCCESS\n }\n\n pub fn set_var(&mut self, name: &str, value: &str) {\n if !name.is_empty() {\n if value.is_empty() {\n self.variables.remove(&name.to_string());\n } else {\n self.variables.insert(name.to_string(), value.to_string());\n }\n }\n }\n\n pub fn expand_pipeline(&self, pipeline: &Pipeline) -> Pipeline {\n \/\/ TODO don't copy everything\n Pipeline::new(pipeline.jobs.iter().map(|job| {self.expand_job(job)}).collect())\n }\n\n pub fn expand_job(&self, job: &Job) -> Job {\n \/\/ TODO don't copy everything\n Job::from_vec_string(job.args\n .iter()\n .map(|original: &String| self.expand_string(&original).to_string())\n .collect(),\n job.background)\n }\n\n #[inline]\n pub fn expand_string<'a>(&'a self, original: &'a str) -> String {\n let mut new = original.to_owned();\n for (i, _) in original.match_indices(\"$\") {\n let mut var_name = \"\".to_owned();\n for (i, c) in original.char_indices().skip(i+1) { \/\/ skip the dollar sign\n let mut replace_string = false;\n if c.is_alphanumeric() || c == '_' {\n var_name.push(c);\n if i == original.len() - 1 {\n replace_string = true;\n }\n } else {\n replace_string = true;\n }\n if replace_string {\n let value: &str = match self.variables.get(&var_name) {\n Some(v) => &v,\n None => \"\"\n };\n new = new.replace(&format!(\"${}\", var_name), value);\n break;\n }\n } \n }\n new.clone()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn undefined_variable_expands_to_empty_string() {\n let variables = Variables::new();\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"\", &expanded);\n }\n\n #[test]\n fn let_and_expand_a_variable() {\n let mut variables = Variables::new();\n variables.let_(vec![\"let\", \"FOO\", \"=\", \"BAR\"]);\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"BAR\", &expanded);\n }\n\n #[test]\n fn set_var_and_expand_a_variable() {\n let mut variables = Variables::new();\n variables.set_var(\"FOO\", \"BAR\");\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"BAR\", &expanded);\n }\n\n #[test]\n fn remove_a_variable_with_let() {\n let mut variables = Variables::new();\n variables.let_(vec![\"let\", \"FOO\", \"=\", \"BAR\"]);\n variables.let_(vec![\"let\", \"FOO\"]);\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"\", &expanded);\n }\n\n #[test]\n fn expand_several_variables() {\n let mut variables = Variables::new();\n variables.let_(vec![\"let\", \"FOO\", \"=\", \"BAR\"]);\n variables.let_(vec![\"let\", \"X\", \"=\", \"Y\"]);\n let expanded = variables.expand_string(\"variables: $FOO $X\");\n assert_eq!(\"variables: BAR Y\", &expanded);\n }\n}\n<commit_msg>Escape with backslash<commit_after>use std::collections::BTreeMap;\nuse std::io::{stdout, Write};\n\nuse super::peg::{Pipeline, Job};\nuse super::input_editor::readln;\nuse super::status::{SUCCESS, FAILURE};\n\npub struct Variables {\n variables: BTreeMap<String, String>,\n}\n\nimpl Variables {\n pub fn new() -> Variables {\n Variables { variables: BTreeMap::new() }\n }\n\n pub fn read<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n let mut out = stdout();\n for arg in args.into_iter().skip(1) {\n print!(\"{}=\", arg.as_ref().trim());\n if let Err(message) = out.flush() {\n println!(\"{}: Failed to flush stdout\", message);\n return FAILURE;\n }\n if let Some(value) = readln() {\n self.set_var(arg.as_ref(), value.trim());\n }\n }\n SUCCESS\n }\n\n pub fn let_<I: IntoIterator>(&mut self, args: I) -> i32\n where I::Item: AsRef<str>\n {\n let args = args.into_iter();\n let string: String = args.skip(1).fold(String::new(), |string, x| string + x.as_ref());\n let mut split = string.split('=');\n match (split.next().and_then(|x| if x == \"\" { None } else { Some(x) }), split.next()) {\n (Some(key), Some(value)) => {\n self.variables.insert(key.to_string(), value.to_string());\n },\n (Some(key), None) => {\n self.variables.remove(key);\n },\n _ => {\n for (key, value) in self.variables.iter() {\n println!(\"{}={}\", key, value);\n }\n }\n }\n SUCCESS\n }\n\n pub fn set_var(&mut self, name: &str, value: &str) {\n if !name.is_empty() {\n if value.is_empty() {\n self.variables.remove(&name.to_string());\n } else {\n self.variables.insert(name.to_string(), value.to_string());\n }\n }\n }\n\n pub fn expand_pipeline(&self, pipeline: &Pipeline) -> Pipeline {\n \/\/ TODO don't copy everything\n Pipeline::new(pipeline.jobs.iter().map(|job| {self.expand_job(job)}).collect())\n }\n\n pub fn expand_job(&self, job: &Job) -> Job {\n \/\/ TODO don't copy everything\n Job::from_vec_string(job.args\n .iter()\n .map(|original: &String| self.expand_string(&original).to_string())\n .collect(),\n job.background)\n }\n\n fn replace_substring(string: &mut String, start: usize, end: usize, replacement: &str) {\n let string_start = string.chars().take(start).collect::<String>();\n let string_end = string.chars().skip(end+1).collect::<String>();\n *string = string_start + replacement + &string_end;\n }\n\n #[inline]\n pub fn expand_string<'a>(&'a self, original: &'a str) -> String {\n let mut new = original.to_owned();\n let mut replacements: Vec<(usize, usize, String)> = vec![];\n for (n, _) in original.match_indices(\"$\") {\n if n > 0 {\n if let Some(c) = original.chars().nth(n-1) {\n if c == '\\\\' {\n continue;\n }\n }\n }\n let mut var_name = \"\".to_owned();\n for (i, c) in original.char_indices().skip(n+1) { \/\/ skip the dollar sign\n if c.is_alphanumeric() || c == '_' {\n var_name.push(c);\n if i == original.len() - 1 {\n replacements.push((n, i, var_name.clone()));\n break;\n }\n } else {\n replacements.push((n, i-1, var_name.clone()));\n break;\n }\n }\n }\n\n for &(start, end, ref var_name) in replacements.iter().rev() {\n let value: &str = match self.variables.get(var_name) {\n Some(v) => &v,\n None => \"\"\n };\n Variables::replace_substring(&mut new, start, end, value);\n }\n new.clone()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn undefined_variable_expands_to_empty_string() {\n let variables = Variables::new();\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"\", &expanded);\n }\n\n #[test]\n fn let_and_expand_a_variable() {\n let mut variables = Variables::new();\n variables.let_(vec![\"let\", \"FOO\", \"=\", \"BAR\"]);\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"BAR\", &expanded);\n }\n\n #[test]\n fn set_var_and_expand_a_variable() {\n let mut variables = Variables::new();\n variables.set_var(\"FOO\", \"BAR\");\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"BAR\", &expanded);\n }\n\n #[test]\n fn remove_a_variable_with_let() {\n let mut variables = Variables::new();\n variables.let_(vec![\"let\", \"FOO\", \"=\", \"BAR\"]);\n variables.let_(vec![\"let\", \"FOO\"]);\n let expanded = variables.expand_string(\"$FOO\");\n assert_eq!(\"\", &expanded);\n }\n\n #[test]\n fn expand_several_variables() {\n let mut variables = Variables::new();\n variables.let_(vec![\"let\", \"FOO\", \"=\", \"BAR\"]);\n variables.let_(vec![\"let\", \"X\", \"=\", \"Y\"]);\n let expanded = variables.expand_string(\"variables: $FOO $X\");\n assert_eq!(\"variables: BAR Y\", &expanded);\n }\n\n #[test]\n fn replace_substring() {\n let mut string = \"variable: $FOO\".to_owned();\n Variables::replace_substring(&mut string, 10, 13, \"BAR\");\n assert_eq!(\"variable: BAR\", string);\n }\n\n #[test]\n fn escape_with_backslash() {\n let mut variables = Variables::new();\n let expanded = variables.expand_string(\"\\\\$FOO\");\n assert_eq!(\"\\\\$FOO\", &expanded);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate diesel;\nextern crate r2d2;\n\nuse diesel::{Connection, ConnectionError};\nuse diesel::pg::PgConnection;\nuse r2d2::ManageConnection;\nuse std::convert::Into;\n\npub struct ConnectionManager {\n database_url: String,\n}\n\nimpl ConnectionManager {\n pub fn new<S: Into<String>>(database_url: S) -> Self {\n ConnectionManager {\n database_url: database_url.into(),\n }\n }\n}\n\npub enum Error {\n ConnectionError(ConnectionError),\n QueryError(diesel::result::Error),\n}\n\nimpl ManageConnection for ConnectionManager {\n type Connection = PgConnection;\n type Error = Error;\n\n fn connect(&self) -> Result<PgConnection, Error> {\n PgConnection::establish(&self.database_url)\n .map_err(Error::ConnectionError)\n }\n\n fn is_valid(&self, conn: &mut PgConnection) -> Result<(), Error> {\n conn.execute(\"SELECT 1\").map(|_| ()).map_err(Error::QueryError)\n }\n\n fn has_broken(&self, _conn: &mut PgConnection) -> bool {\n false\n }\n}\n<commit_msg>Decouple the library from PG<commit_after>extern crate diesel;\nextern crate r2d2;\n\nuse diesel::{Connection, ConnectionError};\nuse r2d2::ManageConnection;\nuse std::convert::Into;\nuse std::marker::PhantomData;\n\npub struct ConnectionManager<T> {\n database_url: String,\n _marker: PhantomData<T>,\n}\n\nunsafe impl<T: Send + 'static> Sync for ConnectionManager<T> {\n}\n\nimpl<T> ConnectionManager<T> {\n pub fn new<S: Into<String>>(database_url: S) -> Self {\n ConnectionManager {\n database_url: database_url.into(),\n _marker: PhantomData,\n }\n }\n}\n\npub enum Error {\n ConnectionError(ConnectionError),\n QueryError(diesel::result::Error),\n}\n\nimpl<T> ManageConnection for ConnectionManager<T> where\n T: Connection + Send + 'static,\n{\n type Connection = T;\n type Error = Error;\n\n fn connect(&self) -> Result<T, Error> {\n T::establish(&self.database_url)\n .map_err(Error::ConnectionError)\n }\n\n fn is_valid(&self, conn: &mut T) -> Result<(), Error> {\n conn.execute(\"SELECT 1\").map(|_| ()).map_err(Error::QueryError)\n }\n\n fn has_broken(&self, _conn: &mut T) -> bool {\n false\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Actually commit the implementation.<commit_after>use std::io;\nuse std::io::prelude::*;\nuse itertools::Itertools;\nuse common;\n\n\n#[derive(Default)]\npub struct Day16 {\n}\n\nimpl Day16 {\n pub fn new() -> Day16 {\n Default::default()\n }\n\n fn solve(&self, input: &mut io::Read, target_size: usize) -> String {\n let mut reader = io::BufReader::new(input);\n let mut line = String::new();\n reader.read_line(&mut line).expect(\"No input!\");\n\n let data: Vec<bool> = line.trim().chars().map(|x| x == '1').collect();\n let filled = fill(&data, target_size);\n let sum = checksum(&filled);\n\n sum.iter().map(|b| if *b { '1' } else { '0' }).collect()\n }\n}\n\n\nimpl common::Solution for Day16 {\n\n fn part1(&mut self, input: &mut io::Read) -> String {\n self.solve(input, 272)\n }\n\n fn part2(&mut self, input: &mut io::Read) -> String {\n self.solve(input, 35651584)\n }\n}\n\n\nfn fill(initial: &[bool], size: usize) -> Vec<bool> {\n let mut a = initial.to_vec();\n while a.len() < size {\n let mut b = a.clone();\n a.push(false);\n b.reverse();\n a.extend(b.iter().map(|x| !x));\n }\n\n a.truncate(size);\n a\n}\n\nfn checksum(initial: &[bool]) -> Vec<bool> {\n let mut checksum = initial.to_vec();\n while checksum.len() % 2 == 0 {\n let mut new_checksum = Vec::new();\n for (a, b) in checksum.into_iter().tuples() {\n new_checksum.push(a == b);\n }\n checksum = new_checksum;\n }\n checksum\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn test_fill() {\n assert_eq!(vec![true, false, false, false, false,\n false, true, true, true, true,\n false, false, true, false, false,\n false, false, true, true, true], fill(&[true, false, false, false, false], 20));\n }\n\n #[test]\n fn test_checksum() {\n let result = checksum(&[true, false, false, false, false,\n false, true, true, true, true,\n false, false, true, false, false,\n false, false, true, true, true]);\n assert_eq!(vec![false, true, true, false, false], result);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate redox;\n\n\/\/To use this, please install zfs-fuse\nuse redox::*;\n\nuse core::ptr;\n\nmod nvpair;\nmod nvstream;\nmod xdr;\n\npub struct ZFS {\n disk: File,\n}\n\nimpl ZFS {\n pub fn new(disk: File) -> Self {\n ZFS { disk: disk }\n }\n\n \/\/TODO: Error handling\n pub fn read(&mut self, start: usize, length: usize) -> Vec<u8> {\n let mut ret: Vec<u8> = Vec::new();\n\n for sector in start..start + length {\n \/\/TODO: Check error\n self.disk.seek(Seek::Start(sector * 512));\n\n let mut data: [u8; 512] = [0; 512];\n self.disk.read(&mut data);\n\n for i in 0..512 {\n ret.push(data[i]);\n }\n }\n\n return ret;\n }\n\n pub fn write(&mut self, block: usize, data: &[u8; 512]) {\n self.disk.seek(Seek::Start(block * 512));\n self.disk.write(data);\n }\n}\n\n#[repr(packed)]\npub struct VdevLabel {\n pub blank: [u8; 8 * 1024],\n pub boot_header: [u8; 8 * 1024],\n pub nv_pairs: [u8; 112 * 1024],\n pub uberblocks: [Uberblock; 128],\n}\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct Uberblock {\n pub magic: u64,\n pub version: u64,\n pub txg: u64,\n pub guid_sum: u64,\n pub timestamp: u64,\n pub rootbp: BlockPtr,\n}\n\nimpl Uberblock {\n pub fn magic_little() -> u64 {\n return 0x0cb1ba00;\n }\n\n pub fn magic_big() -> u64 {\n return 0x00bab10c;\n }\n\n pub fn from(data: &Vec<u8>) -> Option<Self> {\n if data.len() >= 1024 {\n let uberblock = unsafe { ptr::read(data.as_ptr() as *const Uberblock) };\n if uberblock.magic == Uberblock::magic_little() {\n println!(\"Little Magic\");\n return Option::Some(uberblock);\n } else if uberblock.magic == Uberblock::magic_big() {\n println!(\"Big Magic\");\n return Option::Some(uberblock);\n } else if uberblock.magic > 0 {\n println!(\"Unknown Magic: {:X}\", uberblock.magic as usize);\n }\n }\n\n Option::None\n }\n}\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct DVAddr {\n pub vdev: u64,\n pub offset: u64,\n}\n\nimpl DVAddr {\n \/\/\/ Sector address is the offset plus two vdev labels and one boot block (4 MB, or 8192 sectors)\n pub fn sector(&self) -> u64 {\n self.offset + 0x2000\n }\n}\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct BlockPtr {\n pub dvas: [DVAddr; 3],\n pub flags_size: u64,\n pub padding: [u64; 3],\n pub birth_txg: u64,\n pub fill_count: u64,\n pub checksum: [u64; 4],\n}\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct Gang {\n pub bps: [BlockPtr; 3],\n pub padding: [u64; 14],\n pub magic: u64,\n pub checksum: u64,\n}\n\nimpl Gang {\n pub fn magic() -> u64 {\n return 0x117a0cb17ada1002;\n }\n}\n\n\/\/TODO: Find a way to remove all the to_string's\npub fn main() {\n console_title(&\"ZFS\".to_string());\n\n let red = [255, 127, 127, 255];\n let green = [127, 255, 127, 255];\n let blue = [127, 127, 255, 255];\n\n println!(\"Type open zfs.img to open the image file\");\n println!(\"This may take up to 30 seconds\");\n\n let mut zfs_option: Option<ZFS> = Option::None;\n\n while let Option::Some(line) = readln!() {\n let mut args: Vec<String> = Vec::new();\n for arg in line.split(' ') {\n args.push(arg.to_string());\n }\n\n if let Option::Some(command) = args.get(0) {\n println!(\"# {}\", line);\n\n let mut close = false;\n match zfs_option {\n Option::Some(ref mut zfs) => {\n if *command == \"uber\".to_string() {\n \/\/128 KB of ubers after 128 KB of other stuff\n let mut newest_uberblock: Option<Uberblock> = Option::None;\n for i in 0..128 {\n match Uberblock::from(&zfs.read(256 + i * 2, 2)) {\n Option::Some(uberblock) => {\n let mut newest = false;\n match newest_uberblock {\n Option::Some(previous) => {\n if uberblock.txg > previous.txg {\n newest = true;\n }\n }\n Option::None => newest = true,\n }\n\n if newest {\n newest_uberblock = Option::Some(uberblock);\n }\n }\n Option::None => (), \/\/Invalid uberblock\n }\n }\n\n match newest_uberblock {\n Option::Some(uberblock) => {\n println_color!(green, \"Newest Uberblock\");\n \/\/TODO: Do not use as usize\n println!(\"Magic: {:X}\", uberblock.magic as usize);\n println!(\"Version: {}\", uberblock.version as usize);\n println!(\"TXG: {}\", uberblock.txg as usize);\n println!(\"Timestamp: {}\", uberblock.timestamp as usize);\n println!(\"MOS: {}\",\n uberblock.rootbp.dvas[0].sector() as usize);\n }\n Option::None => println_color!(red, \"No valid uberblock found!\"),\n }\n } else if *command == \"list\".to_string() {\n println_color!(green, \"List volumes\");\n } else if *command == \"dump\".to_string() {\n match args.get(1) {\n Option::Some(arg) => {\n let sector = arg.to_num();\n println_color!(green, \"Dump sector: {}\", sector);\n\n let data = zfs.read(sector, 1);\n for i in 0..data.len() {\n if i % 32 == 0 {\n print!(\"\\n{:X}:\", i);\n }\n if let Option::Some(byte) = data.get(i) {\n print!(\" {:X}\", *byte);\n } else {\n println!(\" !\");\n }\n }\n print!(\"\\n\");\n }\n Option::None => println_color!(red, \"No sector specified!\"),\n }\n } else if *command == \"close\".to_string() {\n println_color!(red, \"Closing\");\n close = true;\n } else {\n println_color!(blue, \"Commands: uber list dump close\");\n }\n }\n Option::None => {\n if *command == \"open\".to_string() {\n match args.get(1) {\n Option::Some(arg) => {\n println_color!(green, \"Open: {}\", arg);\n zfs_option = Option::Some(ZFS::new(File::open(arg)));\n }\n Option::None => println_color!(red, \"No file specified!\"),\n }\n } else {\n println_color!(blue, \"Commands: open\");\n }\n }\n }\n if close {\n zfs_option = Option::None;\n }\n }\n }\n}\n<commit_msg>Prettied up ZFS::read<commit_after>extern crate redox;\n\n\/\/To use this, please install zfs-fuse\nuse redox::*;\n\nuse core::ptr;\n\nmod nvpair;\nmod nvstream;\nmod xdr;\n\npub struct ZFS {\n disk: File,\n}\n\nimpl ZFS {\n pub fn new(disk: File) -> Self {\n ZFS { disk: disk }\n }\n\n \/\/TODO: Error handling\n pub fn read(&mut self, start: usize, length: usize) -> Vec<u8> {\n let mut ret: Vec<u8> = vec![0; length*512];\n\n for sector in start..start + length {\n \/\/TODO: Check error\n self.disk.seek(Seek::Start(sector * 512));\n\n self.disk.read(&mut ret[sector*512..(sector+1)*512]);\n }\n\n return ret;\n }\n\n pub fn write(&mut self, block: usize, data: &[u8; 512]) {\n self.disk.seek(Seek::Start(block * 512));\n self.disk.write(data);\n }\n}\n\n#[repr(packed)]\npub struct VdevLabel {\n pub blank: [u8; 8 * 1024],\n pub boot_header: [u8; 8 * 1024],\n pub nv_pairs: [u8; 112 * 1024],\n pub uberblocks: [Uberblock; 128],\n}\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct Uberblock {\n pub magic: u64,\n pub version: u64,\n pub txg: u64,\n pub guid_sum: u64,\n pub timestamp: u64,\n pub rootbp: BlockPtr,\n}\n\nimpl Uberblock {\n pub fn magic_little() -> u64 {\n return 0x0cb1ba00;\n }\n\n pub fn magic_big() -> u64 {\n return 0x00bab10c;\n }\n\n pub fn from(data: &Vec<u8>) -> Option<Self> {\n if data.len() >= 1024 {\n let uberblock = unsafe { ptr::read(data.as_ptr() as *const Uberblock) };\n if uberblock.magic == Uberblock::magic_little() {\n println!(\"Little Magic\");\n return Option::Some(uberblock);\n } else if uberblock.magic == Uberblock::magic_big() {\n println!(\"Big Magic\");\n return Option::Some(uberblock);\n } else if uberblock.magic > 0 {\n println!(\"Unknown Magic: {:X}\", uberblock.magic as usize);\n }\n }\n\n Option::None\n }\n}\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct DVAddr {\n pub vdev: u64,\n pub offset: u64,\n}\n\nimpl DVAddr {\n \/\/\/ Sector address is the offset plus two vdev labels and one boot block (4 MB, or 8192 sectors)\n pub fn sector(&self) -> u64 {\n self.offset + 0x2000\n }\n}\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct BlockPtr {\n pub dvas: [DVAddr; 3],\n pub flags_size: u64,\n pub padding: [u64; 3],\n pub birth_txg: u64,\n pub fill_count: u64,\n pub checksum: [u64; 4],\n}\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct Gang {\n pub bps: [BlockPtr; 3],\n pub padding: [u64; 14],\n pub magic: u64,\n pub checksum: u64,\n}\n\nimpl Gang {\n pub fn magic() -> u64 {\n return 0x117a0cb17ada1002;\n }\n}\n\n\/\/TODO: Find a way to remove all the to_string's\npub fn main() {\n console_title(&\"ZFS\".to_string());\n\n let red = [255, 127, 127, 255];\n let green = [127, 255, 127, 255];\n let blue = [127, 127, 255, 255];\n\n println!(\"Type open zfs.img to open the image file\");\n println!(\"This may take up to 30 seconds\");\n\n let mut zfs_option: Option<ZFS> = Option::None;\n\n while let Option::Some(line) = readln!() {\n let mut args: Vec<String> = Vec::new();\n for arg in line.split(' ') {\n args.push(arg.to_string());\n }\n\n if let Option::Some(command) = args.get(0) {\n println!(\"# {}\", line);\n\n let mut close = false;\n match zfs_option {\n Option::Some(ref mut zfs) => {\n if *command == \"uber\".to_string() {\n \/\/128 KB of ubers after 128 KB of other stuff\n let mut newest_uberblock: Option<Uberblock> = Option::None;\n for i in 0..128 {\n match Uberblock::from(&zfs.read(256 + i * 2, 2)) {\n Option::Some(uberblock) => {\n let mut newest = false;\n match newest_uberblock {\n Option::Some(previous) => {\n if uberblock.txg > previous.txg {\n newest = true;\n }\n }\n Option::None => newest = true,\n }\n\n if newest {\n newest_uberblock = Option::Some(uberblock);\n }\n }\n Option::None => (), \/\/Invalid uberblock\n }\n }\n\n match newest_uberblock {\n Option::Some(uberblock) => {\n println_color!(green, \"Newest Uberblock\");\n \/\/TODO: Do not use as usize\n println!(\"Magic: {:X}\", uberblock.magic as usize);\n println!(\"Version: {}\", uberblock.version as usize);\n println!(\"TXG: {}\", uberblock.txg as usize);\n println!(\"Timestamp: {}\", uberblock.timestamp as usize);\n println!(\"MOS: {}\",\n uberblock.rootbp.dvas[0].sector() as usize);\n }\n Option::None => println_color!(red, \"No valid uberblock found!\"),\n }\n } else if *command == \"list\".to_string() {\n println_color!(green, \"List volumes\");\n } else if *command == \"dump\".to_string() {\n match args.get(1) {\n Option::Some(arg) => {\n let sector = arg.to_num();\n println_color!(green, \"Dump sector: {}\", sector);\n\n let data = zfs.read(sector, 1);\n for i in 0..data.len() {\n if i % 32 == 0 {\n print!(\"\\n{:X}:\", i);\n }\n if let Option::Some(byte) = data.get(i) {\n print!(\" {:X}\", *byte);\n } else {\n println!(\" !\");\n }\n }\n print!(\"\\n\");\n }\n Option::None => println_color!(red, \"No sector specified!\"),\n }\n } else if *command == \"close\".to_string() {\n println_color!(red, \"Closing\");\n close = true;\n } else {\n println_color!(blue, \"Commands: uber list dump close\");\n }\n }\n Option::None => {\n if *command == \"open\".to_string() {\n match args.get(1) {\n Option::Some(arg) => {\n println_color!(green, \"Open: {}\", arg);\n zfs_option = Option::Some(ZFS::new(File::open(arg)));\n }\n Option::None => println_color!(red, \"No file specified!\"),\n }\n } else {\n println_color!(blue, \"Commands: open\");\n }\n }\n }\n if close {\n zfs_option = Option::None;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>o Best_shuffle entry w\/permutations and quadratic implementations o all tests passing o toml entry o rustfmt<commit_after>\/\/ http:\/\/rosettacode.org\/wiki\/Best_shuffle\n\n\/\/\n\nextern crate permutohedron;\nextern crate rand;\n\nuse std::cmp::{min, Ordering};\nuse std::env;\nuse rand::{thread_rng, Rng};\nuse std::str;\n\nconst WORDS: &'static [&'static str] = &[\"abracadabra\", \"seesaw\", \"elk\", \"grrrrrr\", \"up\", \"a\"];\n\n#[derive(Eq)]\nstruct Solution {\n original: String,\n shuffled: String,\n score: usize,\n}\n\n\/\/ Ordering trait implementations are only needed for the permutations method\nimpl PartialOrd for Solution {\n fn partial_cmp(&self, other: &Solution) -> Option<Ordering> {\n match (self.score, other.score) {\n (s, o) if s < o => Some(Ordering::Less),\n (s, o) if s > o => Some(Ordering::Greater),\n (s, o) if s == o => Some(Ordering::Equal),\n _ => None,\n }\n }\n}\n\n\nimpl PartialEq for Solution {\n fn eq(&self, other: &Solution) -> bool {\n match (self.score, other.score) {\n (s, o) if s == o => true,\n _ => false,\n }\n }\n}\n\nimpl Ord for Solution {\n fn cmp(&self, other: &Solution) -> Ordering {\n match (self.score, other.score) {\n (s, o) if s < o => Ordering::Less,\n (s, o) if s > o => Ordering::Greater,\n _ => Ordering::Equal,\n }\n }\n}\n\nfn _help() {\n println!(\"Usage: best_shuffle <word1> <word2> ...\");\n}\n\nfn main() {\n let args: Vec<String> = env::args().collect();\n let mut words: Vec<String> = vec![];\n\n match args.len() {\n 1 => {\n for w in WORDS.iter() {\n words.push(String::from(*w));\n }\n }\n _ => {\n for w in args.split_at(1).1 {\n words.push(w.clone());\n }\n }\n }\n\n let solutions = words.iter().map(|w| best_shuffle(w)).collect::<Vec<_>>();\n\n for s in solutions {\n println!(\"{}, {}, ({})\", s.original, s.shuffled, s.score);\n }\n}\n\n\/\/ Implementation iterating over all permutations\nfn _best_shuffle_perm(w: &String) -> Solution {\n let mut soln = Solution {\n original: w.clone(),\n shuffled: w.clone(),\n score: w.len(),\n };\n let w_bytes: Vec<u8> = w.clone().into_bytes();\n let mut permutocopy = w_bytes.clone();\n let mut permutations = permutohedron::Heap::new(&mut permutocopy);\n while let Some(p) = permutations.next_permutation() {\n \/\/ println!(\"testing permutation {:?}\", str::from_utf8(p).unwrap());\n let hamm = hamming(&w_bytes, p);\n soln = min(soln,\n Solution {\n original: w.clone(),\n shuffled: String::from(str::from_utf8(p).unwrap()),\n score: hamm,\n });\n \/\/ Accept the solution if score 0 found\n if hamm == 0 {\n break;\n }\n }\n soln\n}\n\n\/\/ Quadratic implementation\nfn best_shuffle(w: &String) -> Solution {\n let w_bytes: Vec<u8> = w.clone().into_bytes();\n let mut shuffled_bytes: Vec<u8> = w.clone().into_bytes();\n\n \/\/ Shuffle once\n let sh: &mut [u8] = shuffled_bytes.as_mut_slice();\n thread_rng().shuffle(sh);\n\n \/\/ Swap wherever it doesn't decrease the score\n for i in 0..sh.len() {\n for j in 0..sh.len() {\n if (i == j) | (sh[i] == w_bytes[j]) | (sh[j] == w_bytes[i]) | (sh[i] == sh[j]) {\n continue;\n }\n sh.swap(i, j);\n break;\n }\n }\n\n let res = String::from(str::from_utf8(sh).unwrap());\n let res_bytes: Vec<u8> = res.clone().into_bytes();\n Solution {\n original: w.clone(),\n shuffled: res,\n score: hamming(&w_bytes, &res_bytes),\n }\n}\n\nfn hamming(w0: &Vec<u8>, w1: &Vec<u8>) -> usize {\n w0.iter().zip(w1.iter()).filter(|z| z.0 == z.1).count()\n}\n\n#[cfg(test)]\nmod tests {\n use super::{best_shuffle, _best_shuffle_perm};\n\n #[test]\n fn test_best_shuffle_perm() {\n let mut s0 = _best_shuffle_perm(&String::from(\"seesaw\"));\n assert_eq!(s0.score, 0);\n\n s0 = _best_shuffle_perm(&String::from(\"elk\"));\n assert_eq!(s0.score, 0);\n\n s0 = _best_shuffle_perm(&String::from(\"grrrrrr\"));\n assert_eq!(s0.score, 5);\n\n s0 = _best_shuffle_perm(&String::from(\"up\"));\n assert_eq!(s0.shuffled, \"pu\");\n assert_eq!(s0.score, 0);\n\n s0 = _best_shuffle_perm(&String::from(\"a\"));\n assert_eq!(s0.shuffled, \"a\");\n assert_eq!(s0.score, 1);\n }\n\n #[test]\n fn test_best_shuffle() {\n let mut s0 = best_shuffle(&String::from(\"abracadabra\"));\n assert_eq!(s0.score, 0);\n\n s0 = best_shuffle(&String::from(\"seesaw\"));\n assert_eq!(s0.score, 0);\n\n s0 = best_shuffle(&String::from(\"elk\"));\n assert_eq!(s0.score, 0);\n\n s0 = best_shuffle(&String::from(\"grrrrrr\"));\n assert_eq!(s0.score, 5);\n\n s0 = best_shuffle(&String::from(\"up\"));\n assert_eq!(s0.shuffled, \"pu\");\n assert_eq!(s0.score, 0);\n\n s0 = best_shuffle(&String::from(\"a\"));\n assert_eq!(s0.shuffled, \"a\");\n assert_eq!(s0.score, 1);\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add true\/false built-in<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add a test that empty records don't parse<commit_after>\/\/ error-pattern:unexpected token: '}'\n\/\/ Issue #1200\n\ntype t = {};\n\nfn main() {\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>futures: add another example<commit_after>\/\/ Long and nested future chains can quickly result in large generic types.\n#![type_length_limit=\"16777216\"]\n\nuse env_logger;\nuse failure::Error;\nuse futures::{future::Future, Stream};\nuse lapin_futures as lapin;\nuse crate::lapin::channel::{BasicConsumeOptions, BasicPublishOptions, BasicProperties, ConfirmSelectOptions, QueueDeclareOptions};\nuse crate::lapin::client::ConnectionOptions;\nuse crate::lapin::types::FieldTable;\nuse log::{debug, info};\nuse tokio;\nuse tokio::net::TcpStream;\nuse tokio::runtime::Runtime;\n\nfn main() {\n env_logger::init();\n\n let addr = std::env::var(\"AMQP_ADDR\").unwrap_or_else(|_| \"127.0.0.1:5672\".to_string()).parse().unwrap();\n\n Runtime::new().unwrap().block_on_all(\n TcpStream::connect(&addr).map_err(Error::from).and_then(|stream| {\n lapin::client::Client::connect(stream, ConnectionOptions {\n frame_max: 65535,\n ..Default::default()\n }).map_err(Error::from)\n }).map(|(client, heartbeat)| {\n tokio::spawn(heartbeat.map_err(|e| eprintln!(\"heartbeat error: {}\", e)));\n client\n }).and_then(|client| {\n let publisher = client.create_confirm_channel(ConfirmSelectOptions::default()).and_then(|pub_channel| {\n let id = pub_channel.id;\n info!(\"created publisher channel with id: {}\", id);\n\n pub_channel.queue_declare(\"hello\", QueueDeclareOptions::default(), FieldTable::new()).and_then(move |_| {\n info!(\"publisher channel {} declared queue {}\", id, \"hello\");\n futures::stream::repeat(b\"hello\".to_vec()).for_each(move |msg| {\n pub_channel.basic_publish(\n \"\",\n \"hello\",\n msg,\n BasicPublishOptions::default(),\n BasicProperties::default().with_user_id(\"guest\".to_string()).with_reply_to(\"foobar\".to_string())\n ).map(|confirmation| {\n info!(\"publish got confirmation: {:?}\", confirmation)\n })\n })\n })\n });\n \n tokio::spawn(publisher.map_err(|_| ()));\n \n client.create_confirm_channel(ConfirmSelectOptions::default()).and_then(|sub_channel| {\n let id = sub_channel.id;\n info!(\"created subscriber channel with id: {}\", id);\n\n let ch = sub_channel.clone();\n\n sub_channel.queue_declare(\"hello\", QueueDeclareOptions::default(), FieldTable::new()).and_then(move |queue| {\n info!(\"subscriber channel {} declared queue {}\", id, \"hello\");\n sub_channel.basic_consume(&queue, \"my_consumer\", BasicConsumeOptions::default(), FieldTable::new())\n }).and_then(|stream| {\n info!(\"got consumer stream\");\n\n stream.for_each(move |message| {\n debug!(\"got message: {:?}\", message);\n info!(\"decoded message: {:?}\", std::str::from_utf8(&message.data).unwrap());\n ch.basic_ack(message.delivery_tag, false)\n })\n })\n }).map_err(Error::from)\n }).map_err(|err| eprintln!(\"An error occured: {}\", err))\n ).expect(\"runtime exited with failure\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test PartialEq multidispatch<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(default_type_params)]\n\n#[deriving(PartialEq)]\nstruct Bar;\nstruct Baz;\nstruct Foo;\nstruct Fu;\n\nimpl PartialEq for Baz { fn eq(&self, _: &Baz) -> bool { true } }\n\nimpl PartialEq<Fu> for Foo { fn eq(&self, _: &Fu) -> bool { true } }\nimpl PartialEq<Foo> for Fu { fn eq(&self, _: &Foo) -> bool { true } }\n\nimpl PartialEq<Bar> for Foo { fn eq(&self, _: &Bar) -> bool { false } }\nimpl PartialEq<Foo> for Bar { fn eq(&self, _: &Foo) -> bool { false } }\n\nfn main() {\n assert!(Bar != Foo);\n assert!(Foo != Bar);\n\n assert!(Bar == Bar);\n\n assert!(Baz == Baz);\n\n assert!(Foo == Fu);\n assert!(Fu == Foo);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add symbol table<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,\n\/\/ refer to the AUTHORS file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Private functions and items to be used with the high-level library wrapper.\n\nuse std::cast;\nuse std::libc::*;\nuse std::local_data;\nuse std::str;\n\nuse super::*;\n\n\/\/ Global callbacks\n\nstatic error_fun_tls_key: local_data::Key<ErrorFun> = &local_data::Key;\n\npub extern \"C\" fn error_callback(error: c_int, description: *c_char) {\n unsafe {\n do local_data::get(error_fun_tls_key) |data| {\n do data.map |& &cb| {\n cb(error, str::raw::from_c_str(description))\n };\n }\n }\n}\n\npub fn set_error_fun(cbfun: ErrorFun, f: &fn(ffi::GLFWerrorfun) ) {\n local_data::set(error_fun_tls_key, cbfun);\n f(error_callback);\n}\n\nstatic monitor_fun_tls_key: local_data::Key<MonitorFun> = &local_data::Key;\n\npub extern \"C\" fn monitor_callback(monitor: *ffi::GLFWmonitor, event: c_int) {\n do local_data::get(monitor_fun_tls_key) |data| {\n do data.map |& &cb| {\n cb(&Monitor { ptr: monitor }, event)\n };\n }\n}\n\npub fn set_monitor_fun(cbfun: MonitorFun, f: &fn(ffi::GLFWmonitorfun) ) {\n local_data::set(monitor_fun_tls_key, cbfun);\n f(monitor_callback);\n}\n\n\n\/\/ External window callbacks\n\nunsafe fn chan_from_ptr(ptr: *c_void) -> &Chan<WindowEvent> { cast::transmute(ptr) }\n\nmacro_rules! window_callback(\n (fn $name:ident () => $event:ident) => (\n pub extern \"C\" fn $name(window: *ffi::GLFWwindow) {\n let chan = unsafe { chan_from_ptr(ffi::glfwGetWindowUserPointer(window)) };\n chan.send($event);\n }\n );\n (fn $name:ident ($($ext_arg:ident: $ext_arg_ty:ty),*) => $event:ident($($arg_conv:expr),*)) => (\n pub extern \"C\" fn $name(window: *ffi::GLFWwindow $(, $ext_arg: $ext_arg_ty)*) {\n let chan = unsafe { chan_from_ptr(ffi::glfwGetWindowUserPointer(window)) };\n chan.send($event( $( $arg_conv),* ));\n }\n );\n (fn $name:ident ($($ext_arg:ident: $ext_arg_ty:ty),*) => $event:ident { $($fname:ident : $arg_conv:expr),* }) => (\n pub extern \"C\" fn $name(window: *ffi::GLFWwindow $(, $ext_arg: $ext_arg_ty)*) {\n let chan = unsafe { chan_from_ptr(ffi::glfwGetWindowUserPointer(window)) };\n chan.send($event{ $( $fname : $arg_conv),* });\n }\n );\n)\n\nwindow_callback!(fn window_pos_callback(xpos: c_int, ypos: c_int) => Pos { xpos: xpos as int, ypos: ypos as int })\nwindow_callback!(fn window_size_callback(width: c_int, height: c_int) => Size { width: width as int, height: height as int })\nwindow_callback!(fn window_close_callback() => Close)\nwindow_callback!(fn window_refresh_callback() => Refresh)\nwindow_callback!(fn window_focus_callback(focused: c_int) => Focus(focused as bool))\nwindow_callback!(fn window_iconify_callback(iconified: c_int) => Iconify(iconified as bool))\nwindow_callback!(fn framebuffer_size_callback(width: c_int, height: c_int) => FrameBufferSize { width: width as int, height: height as int })\nwindow_callback!(fn mouse_button_callback(button: c_int, action: c_int, mods: c_int) => MouseButton{ button: button, action: action, mods: mods })\nwindow_callback!(fn cursor_pos_callback(xpos: c_double, ypos: c_double) => CursorPos { xpos: xpos as float, ypos: ypos as float })\nwindow_callback!(fn cursor_enter_callback(entered: c_int) => CursorEnter(entered as bool))\nwindow_callback!(fn scroll_callback(xpos: c_double, ypos: c_double) => Scroll { xpos: xpos as float, ypos: ypos as float })\nwindow_callback!(fn key_callback(key: c_int, scancode: c_int, action: c_int, mods: c_int) => Key { key: key, scancode: scancode, action: action, mods: mods })\nwindow_callback!(fn char_callback(character: c_uint) => Char(character as char))\n<commit_msg>Simplify extern callbacks<commit_after>\/\/ Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,\n\/\/ refer to the AUTHORS file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! Private functions and items to be used with the high-level library wrapper.\n\nuse std::cast;\nuse std::libc::*;\nuse std::local_data;\nuse std::str;\n\nuse super::*;\n\n\/\/ Global callbacks\n\nstatic error_fun_tls_key: local_data::Key<ErrorFun> = &local_data::Key;\n\npub extern \"C\" fn error_callback(error: c_int, description: *c_char) {\n unsafe {\n do local_data::get(error_fun_tls_key) |data| {\n do data.map |& &cb| {\n cb(error, str::raw::from_c_str(description))\n };\n }\n }\n}\n\npub fn set_error_fun(cbfun: ErrorFun, f: &fn(ffi::GLFWerrorfun) ) {\n local_data::set(error_fun_tls_key, cbfun);\n f(error_callback);\n}\n\nstatic monitor_fun_tls_key: local_data::Key<MonitorFun> = &local_data::Key;\n\npub extern \"C\" fn monitor_callback(monitor: *ffi::GLFWmonitor, event: c_int) {\n do local_data::get(monitor_fun_tls_key) |data| {\n do data.map |& &cb| {\n cb(&Monitor { ptr: monitor }, event)\n };\n }\n}\n\npub fn set_monitor_fun(cbfun: MonitorFun, f: &fn(ffi::GLFWmonitorfun) ) {\n local_data::set(monitor_fun_tls_key, cbfun);\n f(monitor_callback);\n}\n\n\/\/ External window callbacks\n#[fixed_stack_segment]\nunsafe fn get_chan(window: *ffi::GLFWwindow) -> &Chan<WindowEvent> {\n cast::transmute(ffi::glfwGetWindowUserPointer(window))\n}\n\nmacro_rules! window_callback(\n (fn $name:ident () => $event:ident) => (\n pub extern \"C\" fn $name(window: *ffi::GLFWwindow) {\n unsafe { get_chan(window).send($event); }\n }\n );\n (fn $name:ident ($($ext_arg:ident: $ext_arg_ty:ty),*) => $event:ident($($arg_conv:expr),*)) => (\n pub extern \"C\" fn $name(window: *ffi::GLFWwindow $(, $ext_arg: $ext_arg_ty)*) {\n unsafe { get_chan(window).send($event( $( $arg_conv),* )); }\n }\n );\n (fn $name:ident ($($ext_arg:ident: $ext_arg_ty:ty),*) => $event:ident { $($fname:ident : $arg_conv:expr),* }) => (\n pub extern \"C\" fn $name(window: *ffi::GLFWwindow $(, $ext_arg: $ext_arg_ty)*) {\n unsafe { get_chan(window).send($event{ $( $fname : $arg_conv),* }); }\n }\n );\n)\n\nwindow_callback!(fn window_pos_callback(xpos: c_int, ypos: c_int) => Pos { xpos: xpos as int, ypos: ypos as int })\nwindow_callback!(fn window_size_callback(width: c_int, height: c_int) => Size { width: width as int, height: height as int })\nwindow_callback!(fn window_close_callback() => Close)\nwindow_callback!(fn window_refresh_callback() => Refresh)\nwindow_callback!(fn window_focus_callback(focused: c_int) => Focus(focused as bool))\nwindow_callback!(fn window_iconify_callback(iconified: c_int) => Iconify(iconified as bool))\nwindow_callback!(fn framebuffer_size_callback(width: c_int, height: c_int) => FrameBufferSize { width: width as int, height: height as int })\nwindow_callback!(fn mouse_button_callback(button: c_int, action: c_int, mods: c_int) => MouseButton{ button: button, action: action, mods: mods })\nwindow_callback!(fn cursor_pos_callback(xpos: c_double, ypos: c_double) => CursorPos { xpos: xpos as float, ypos: ypos as float })\nwindow_callback!(fn cursor_enter_callback(entered: c_int) => CursorEnter(entered as bool))\nwindow_callback!(fn scroll_callback(xpos: c_double, ypos: c_double) => Scroll { xpos: xpos as float, ypos: ypos as float })\nwindow_callback!(fn key_callback(key: c_int, scancode: c_int, action: c_int, mods: c_int) => Key { key: key, scancode: scancode, action: action, mods: mods })\nwindow_callback!(fn char_callback(character: c_uint) => Char(character as char))\n<|endoftext|>"} {"text":"<commit_before><commit_msg>refactor: fixes a typo in the app settings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make sure that generics are internalized in executables even with -Zshare-generics<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -C no-prepopulate-passes -Zshare-generics=yes\n\n\/\/ Check that local generics are internalized if they are in the same CGU\n\n\/\/ CHECK: define internal {{.*}} @_ZN34local_generics_in_exe_internalized3foo{{.*}}\npub fn foo<T>(x: T, y: T) -> (T, T) {\n (x, y)\n}\n\nfn main() {\n let _ = foo(0u8, 1u8);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added a file I forgot in previous commit<commit_after>\/\/ Copyright 2015 Adrien Champion. See the COPYRIGHT file at the top-level\n\/\/ directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse term::{ Term, TermSet } ;\n\nuse Domain ;\n\n\/** A chain is an increasing-ordered list containing values and\nrepresentative \/ equivalence class pairs.\n\nIt is ordered on the values. *\/\n#[derive(PartialEq, Eq, Clone)]\npub enum Chain< Val: Domain, Info: PartialEq + Eq + Clone > {\n \/** Empty chain. *\/\n Nil,\n \/** Chain constructor. *\/\n Cons(Val, Term, Info, Box< Chain<Val, Info> >),\n}\nimpl<Val: Domain, Info: PartialEq + Eq + Clone> Chain<Val, Info> {\n \/** Empty chain. *\/\n #[inline]\n pub fn nil() -> Self { Chain::Nil }\n \/** Chain constructor. *\/\n #[inline]\n pub fn cons(self, v: Val, t: Term, s: Info) -> Self {\n Chain::Cons(v, t, s, Box::new(self))\n }\n \/** Checks if a chain is empty. *\/\n #[inline]\n pub fn is_empty(& self) -> bool { * self == Chain::Nil }\n \/** Returns the top value of a chain, if any. *\/\n #[inline]\n pub fn top_value(& self) -> Option<(Val, Term)> {\n use self::Chain::* ;\n match * self {\n Cons(ref v, ref rep, _, _) => Some( (v.clone(), rep.clone()) ),\n Nil => None,\n }\n }\n\n \/** Returns the longest subchain of a chain the values of which are\n all greater than or equal to some value, and the rest of the chain.\n\n First subchain is a vector of representatives and is sorted in **increasing\n order** on their value (which have been removed at this point).\n The second subchain is an actual `Chain` and is still sorted in **decreasing\n order**. *\/\n pub fn split_at(mut self, value: & Val) -> (Vec<Term>, Self) {\n use self::Chain::* ;\n let mut res = Vec::with_capacity(3) ;\n loop {\n if let Cons(val, rep, set, tail) = self {\n if value <= & val {\n res.push(rep) ;\n self = * tail\n } else {\n \/\/ We have `val < value`, stop here.\n self = Cons(val, rep, set, tail) ;\n break\n }\n } else {\n \/\/ Chain is empty, we done.\n break\n }\n }\n (res, self)\n }\n\n \/** Reverses the first chain and appends it to the second one. *\/\n #[inline]\n pub fn rev_append(mut self, mut that: Self) -> Self {\n use self::Chain::* ;\n while let Cons(val, term, set, tail) = self {\n that = Cons( val, term, set, Box::new(that) ) ;\n self = * tail\n }\n that\n }\n \/** Reverses a chain. *\/\n #[inline]\n pub fn rev(self) -> Self {\n self.rev_append(Chain::Nil)\n }\n}\nimpl<Val: Domain> Chain<Val, TermSet> {\n \/** Maps to `Chain<Val, ()>`, calling a function on each element. *\/\n pub fn map_to_unit<\n Input, F: Fn(& mut Input, Val, Term, TermSet)\n >(mut self, f: F, i: & mut Input) -> Chain<Val, ()> {\n use self::Chain::* ;\n let mut res = Nil ;\n while let Cons(val, rep, set, tail) = self {\n self = * tail ;\n f(i, val.clone(), rep.clone(), set) ;\n res = res.cons(val, rep, ())\n }\n res.rev()\n }\n\n \/** Inserts a term in a chain given its value. *\/\n pub fn insert(mut self, v: Val, t: Term) -> Result<Self, String> {\n use self::Chain::* ;\n use std::cmp::Ordering::* ;\n let mut prefix = Nil ;\n loop {\n if let Cons(val, term, mut set, tail) = self {\n match val.cmp(& v) {\n Less => return Ok(\n \/\/ Insert term found as a new node in the chain.\n prefix.rev_append(\n Cons(val, term, set, tail).cons(v, t, TermSet::new())\n )\n ),\n Equal => {\n \/\/ Insert term in the set of this node.\n debug_assert!( ! set.contains(& t) ) ;\n let _ = set.insert(t) ;\n return Ok( prefix.rev_append( Cons(val, term, set, tail) ) )\n },\n Greater => {\n \/\/ Need to go deeper, iterating.\n prefix = prefix.cons(val, term, set) ;\n self = * tail\n },\n }\n } else {\n \/\/ Reached end of list, inserting.\n return Ok(\n prefix.rev_append( Nil.cons(v, t, TermSet::new()) )\n )\n }\n }\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add is_pinned<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Invalid error type name Fixed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for alias shadowing external subcommand<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Push Opcode<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add simple_draw example for testing and demonstrating the new Draw API<commit_after>extern crate nannou;\n\nuse nannou::prelude::*;\n\nfn main() {\n nannou::run(model, event, view);\n}\n\nstruct Model {\n window: WindowId,\n}\n\nfn model(app: &App) -> Model {\n let window = app.new_window().build().unwrap();\n Model { window }\n}\n\nfn event(_app: &App, model: Model, _event: Event) -> Model {\n model\n}\n\nfn view(app: &App, model: &Model, frame: Frame) -> Frame {\n \/\/ Begin drawing \n let draw = app.draw(model.window).unwrap();\n\n \/\/ Clear the background to blue.\n draw.background().rgb(0.0, 0.0, 1.0);\n\n \/\/ Short-hand helper functions.\n draw.ellipse()\n .x_y(app.mouse.x, app.mouse.y)\n .w_h(app.window.width * 0.5, app.window.height * 0.5)\n .rgb(1.0, 0.0, 0.0);\n\n \/\/ Write the result of our drawing to the window's OpenGL frame.\n draw.to_frame(app, &frame).unwrap();\n\n \/\/ Return the drawn frame.\n frame\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #54582<commit_after>\/\/ run-pass\n\npub trait Stage: Sync {}\n\npub enum Enum {\n A,\n B,\n}\n\nimpl Stage for Enum {}\n\npub static ARRAY: [(&Stage, &str); 1] = [\n (&Enum::A, \"\"),\n];\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix fomatting with cargofmt<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Graph algorithms.\n\/\/!\n\/\/! It is a goal to gradually migrate the algorithms to be based on graph traits\n\/\/! so that they are generally applicable. For now, most of these use only the\n\/\/! **Graph** type.\n\nuse std::collections::BinaryHeap;\nuse std::borrow::{Borrow};\n\nuse super::{\n Graph,\n Directed,\n Undirected,\n EdgeDirection,\n EdgeType,\n Outgoing,\n Incoming,\n Dfs,\n};\nuse scored::MinScored;\nuse super::visit::{\n Visitable,\n VisitMap,\n};\nuse super::unionfind::UnionFind;\nuse super::graph::{\n NodeIndex,\n IndexType,\n};\n\npub use super::isomorphism::{\n is_isomorphic,\n is_isomorphic_matching,\n};\npub use super::dijkstra::dijkstra;\n\n\/\/\/ Return `true` if the input graph contains a cycle.\n\/\/\/\n\/\/\/ Always treats the input graph as if undirected.\npub fn is_cyclic_undirected<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> bool\n where Ty: EdgeType,\n Ix: IndexType,\n{\n let mut edge_sets = UnionFind::new(g.node_count());\n for edge in g.raw_edges() {\n let (a, b) = (edge.source(), edge.target());\n\n \/\/ union the two vertices of the edge\n \/\/ -- if they were already the same, then we have a cycle\n if !edge_sets.union(a.index(), b.index()) {\n return true\n }\n }\n false\n}\n\n\/\/\/ **Deprecated: Renamed to `is_cyclic_undirected`.**\npub fn is_cyclic<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> bool\n where Ty: EdgeType,\n Ix: IndexType,\n{\n is_cyclic_undirected(g)\n}\n\n\/\/\/ Perform a topological sort of a directed graph `g`.\n\/\/\/\n\/\/\/ Visit each node in order (if it is part of a topological order).\n\/\/\/\n\/\/\/ You can pass `g` as either **&Graph** or **&mut Graph**, and it\n\/\/\/ will be passed on to the visitor closure.\n#[inline]\nfn toposort_generic<N, E, Ix, G, F>(mut g: G, mut visit: F)\n where Ix: IndexType,\n G: Borrow<Graph<N, E, Directed, Ix>>,\n F: FnMut(&mut G, NodeIndex<Ix>),\n{\n let mut ordered = g.borrow().visit_map();\n let mut tovisit = Vec::new();\n\n \/\/ find all initial nodes\n tovisit.extend(g.borrow().externals(Incoming));\n\n \/\/ Take an unvisited element and find which of its neighbors are next\n while let Some(nix) = tovisit.pop() {\n if ordered.is_visited(&nix) {\n continue;\n }\n visit(&mut g, nix);\n ordered.visit(nix);\n for neigh in g.borrow().neighbors_directed(nix, Outgoing) {\n \/\/ Look at each neighbor, and those that only have incoming edges\n \/\/ from the already ordered list, they are the next to visit.\n if g.borrow().neighbors_directed(neigh, Incoming).all(|b| ordered.is_visited(&b)) {\n tovisit.push(neigh);\n }\n }\n }\n}\n\n\/\/\/ Return `true` if the input directed graph contains a cycle.\n\/\/\/\n\/\/\/ Using the topological sort algorithm.\npub fn is_cyclic_directed<N, E, Ix>(g: &Graph<N, E, Directed, Ix>) -> bool\n where Ix: IndexType,\n{\n let mut n_ordered = 0;\n toposort_generic(g, |_, _| n_ordered += 1);\n n_ordered != g.node_count()\n}\n\n\/\/\/ Perform a topological sort of a directed graph.\n\/\/\/\n\/\/\/ Return a vector of nodes in topological order: each node is ordered\n\/\/\/ before its successors.\n\/\/\/\n\/\/\/ If the returned vec contains less than all the nodes of the graph, then\n\/\/\/ the graph was cyclic.\npub fn toposort<N, E, Ix>(g: &Graph<N, E, Directed, Ix>) -> Vec<NodeIndex<Ix>>\n where Ix: IndexType,\n{\n let mut order = Vec::with_capacity(g.node_count());\n toposort_generic(g, |_, ix| order.push(ix));\n order\n}\n\n\/\/\/ Compute the *strongly connected components* using Kosaraju's algorithm.\n\/\/\/\n\/\/\/ Return a vector where each element is an scc.\n\/\/\/\n\/\/\/ For an undirected graph, the sccs are simply the connected components.\npub fn scc<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> Vec<Vec<NodeIndex<Ix>>>\n where Ty: EdgeType,\n Ix: IndexType,\n{\n let mut dfs = Dfs::empty(g);\n\n \/\/ First phase, reverse dfs pass, compute finishing times.\n \/\/ http:\/\/stackoverflow.com\/a\/26780899\/161659\n let mut finished = g.visit_map();\n let mut finish_order = Vec::new();\n for i in 0..g.node_count() {\n let nindex = NodeIndex::new(i);\n if dfs.discovered.is_visited(&nindex) {\n continue\n }\n\n dfs.stack.push(nindex);\n while let Some(&nx) = dfs.stack.last() {\n if dfs.discovered.visit(nx) {\n \/\/ First time visiting `nx`: Push neighbors, don't pop `nx`\n for succ in g.neighbors_directed(nx.clone(), EdgeDirection::Incoming) {\n if !dfs.discovered.is_visited(&succ) {\n dfs.stack.push(succ);\n }\n }\n } else {\n dfs.stack.pop();\n if finished.visit(nx) {\n \/\/ Second time: All reachable nodes must have been finished\n finish_order.push(nx);\n }\n }\n }\n }\n\n dfs.discovered.clear();\n let mut sccs = Vec::new();\n\n \/\/ Second phase\n \/\/ Process in decreasing finishing time order\n for &nindex in finish_order.iter().rev() {\n if dfs.discovered.is_visited(&nindex) {\n continue;\n }\n \/\/ Move to the leader node.\n dfs.move_to(nindex);\n \/\/let leader = nindex;\n let mut scc = Vec::new();\n while let Some(nx) = dfs.next(g) {\n scc.push(nx);\n }\n sccs.push(scc);\n }\n sccs\n}\n\n\/\/\/ Condense every strongly connected component into a single node and return the result.\n\/\/\/\n\/\/\/ If `make_acyclic` is true, self-loops and multi edges are ignored, guaranteeing that\n\/\/\/ the output is acyclic.\npub fn condensation<N, E, Ty, Ix>(g: Graph<N, E, Ty, Ix>, make_acyclic: bool) -> Graph<Vec<N>, E, Ty, Ix>\n where Ty: EdgeType,\n Ix: IndexType,\n{\n let sccs = scc(&g);\n let mut condensed: Graph<Vec<N>, E, Ty, Ix> = Graph::with_capacity(sccs.len(), g.edge_count());\n\n \/\/ Build a map from old indices to new ones.\n let mut node_map = vec![NodeIndex::end(); g.node_count()];\n for comp in sccs {\n let new_nix = condensed.add_node(Vec::new());\n for nix in comp {\n node_map[nix.index()] = new_nix;\n }\n }\n\n \/\/ Consume nodes and edges of the old graph and insert them into the new one.\n let (nodes, edges) = g.destructure();\n for (nix, node) in nodes.into_iter().enumerate() {\n condensed.node_weight_mut(node_map[nix]).unwrap().push(node.weight);\n }\n for edge in edges {\n let source = node_map[edge.source().index()];\n let target = node_map[edge.target().index()];\n if make_acyclic {\n if source != target {\n condensed.update_edge(source, target, edge.weight);\n }\n } else {\n condensed.add_edge(source, target, edge.weight);\n }\n }\n condensed\n}\n\n\/\/\/ Return the number of connected components of the graph.\n\/\/\/\n\/\/\/ For a directed graph, this is the *weakly* connected components.\npub fn connected_components<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> usize\n where Ty: EdgeType,\n Ix: IndexType,\n{\n let mut vertex_sets = UnionFind::new(g.node_count());\n for edge in g.raw_edges() {\n let (a, b) = (edge.source(), edge.target());\n\n \/\/ union the two vertices of the edge\n vertex_sets.union(a.index(), b.index());\n }\n let mut labels = vertex_sets.into_labeling();\n labels.sort();\n labels.dedup();\n labels.len()\n}\n\n\n\/\/\/ Compute a *minimum spanning tree* of a graph.\n\/\/\/\n\/\/\/ Treat the input graph as undirected.\n\/\/\/\n\/\/\/ Using Kruskal's algorithm with runtime **O(|E| log |E|)**. We actually\n\/\/\/ return a minimum spanning forest, i.e. a minimum spanning tree for each connected\n\/\/\/ component of the graph.\n\/\/\/\n\/\/\/ The resulting graph has all the vertices of the input graph (with identical node indices),\n\/\/\/ and **|V| - c** edges, where **c** is the number of connected components in `g`.\npub fn min_spanning_tree<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>)\n -> Graph<N, E, Undirected, Ix>\n where N: Clone,\n E: Clone + PartialOrd,\n Ty: EdgeType,\n Ix: IndexType,\n{\n if g.node_count() == 0 {\n return Graph::with_capacity(0, 0)\n }\n\n \/\/ Create a mst skeleton by copying all nodes\n let mut mst = Graph::with_capacity(g.node_count(), g.node_count() - 1);\n for node in g.raw_nodes() {\n mst.add_node(node.weight.clone());\n }\n\n \/\/ Initially each vertex is its own disjoint subgraph, track the connectedness\n \/\/ of the pre-MST with a union & find datastructure.\n let mut subgraphs = UnionFind::new(g.node_count());\n\n let mut sort_edges = BinaryHeap::with_capacity(g.edge_count());\n for edge in g.raw_edges() {\n sort_edges.push(MinScored(edge.weight.clone(), (edge.source(), edge.target())));\n }\n\n \/\/ Kruskal's algorithm.\n \/\/ Algorithm is this:\n \/\/\n \/\/ 1. Create a pre-MST with all the vertices and no edges.\n \/\/ 2. Repeat:\n \/\/\n \/\/ a. Remove the shortest edge from the original graph.\n \/\/ b. If the edge connects two disjoint trees in the pre-MST,\n \/\/ add the edge.\n while let Some(MinScored(score, (a, b))) = sort_edges.pop() {\n \/\/ check if the edge would connect two disjoint parts\n if subgraphs.union(a.index(), b.index()) {\n mst.add_edge(a, b, score);\n }\n }\n\n debug_assert!(mst.node_count() == g.node_count());\n debug_assert!(mst.edge_count() < g.node_count());\n mst\n}\n<commit_msg>condensation: Use indexing notation<commit_after>\/\/! Graph algorithms.\n\/\/!\n\/\/! It is a goal to gradually migrate the algorithms to be based on graph traits\n\/\/! so that they are generally applicable. For now, most of these use only the\n\/\/! **Graph** type.\n\nuse std::collections::BinaryHeap;\nuse std::borrow::{Borrow};\n\nuse super::{\n Graph,\n Directed,\n Undirected,\n EdgeDirection,\n EdgeType,\n Outgoing,\n Incoming,\n Dfs,\n};\nuse scored::MinScored;\nuse super::visit::{\n Visitable,\n VisitMap,\n};\nuse super::unionfind::UnionFind;\nuse super::graph::{\n NodeIndex,\n IndexType,\n};\n\npub use super::isomorphism::{\n is_isomorphic,\n is_isomorphic_matching,\n};\npub use super::dijkstra::dijkstra;\n\n\/\/\/ Return `true` if the input graph contains a cycle.\n\/\/\/\n\/\/\/ Always treats the input graph as if undirected.\npub fn is_cyclic_undirected<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> bool\n where Ty: EdgeType,\n Ix: IndexType,\n{\n let mut edge_sets = UnionFind::new(g.node_count());\n for edge in g.raw_edges() {\n let (a, b) = (edge.source(), edge.target());\n\n \/\/ union the two vertices of the edge\n \/\/ -- if they were already the same, then we have a cycle\n if !edge_sets.union(a.index(), b.index()) {\n return true\n }\n }\n false\n}\n\n\/\/\/ **Deprecated: Renamed to `is_cyclic_undirected`.**\npub fn is_cyclic<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> bool\n where Ty: EdgeType,\n Ix: IndexType,\n{\n is_cyclic_undirected(g)\n}\n\n\/\/\/ Perform a topological sort of a directed graph `g`.\n\/\/\/\n\/\/\/ Visit each node in order (if it is part of a topological order).\n\/\/\/\n\/\/\/ You can pass `g` as either **&Graph** or **&mut Graph**, and it\n\/\/\/ will be passed on to the visitor closure.\n#[inline]\nfn toposort_generic<N, E, Ix, G, F>(mut g: G, mut visit: F)\n where Ix: IndexType,\n G: Borrow<Graph<N, E, Directed, Ix>>,\n F: FnMut(&mut G, NodeIndex<Ix>),\n{\n let mut ordered = g.borrow().visit_map();\n let mut tovisit = Vec::new();\n\n \/\/ find all initial nodes\n tovisit.extend(g.borrow().externals(Incoming));\n\n \/\/ Take an unvisited element and find which of its neighbors are next\n while let Some(nix) = tovisit.pop() {\n if ordered.is_visited(&nix) {\n continue;\n }\n visit(&mut g, nix);\n ordered.visit(nix);\n for neigh in g.borrow().neighbors_directed(nix, Outgoing) {\n \/\/ Look at each neighbor, and those that only have incoming edges\n \/\/ from the already ordered list, they are the next to visit.\n if g.borrow().neighbors_directed(neigh, Incoming).all(|b| ordered.is_visited(&b)) {\n tovisit.push(neigh);\n }\n }\n }\n}\n\n\/\/\/ Return `true` if the input directed graph contains a cycle.\n\/\/\/\n\/\/\/ Using the topological sort algorithm.\npub fn is_cyclic_directed<N, E, Ix>(g: &Graph<N, E, Directed, Ix>) -> bool\n where Ix: IndexType,\n{\n let mut n_ordered = 0;\n toposort_generic(g, |_, _| n_ordered += 1);\n n_ordered != g.node_count()\n}\n\n\/\/\/ Perform a topological sort of a directed graph.\n\/\/\/\n\/\/\/ Return a vector of nodes in topological order: each node is ordered\n\/\/\/ before its successors.\n\/\/\/\n\/\/\/ If the returned vec contains less than all the nodes of the graph, then\n\/\/\/ the graph was cyclic.\npub fn toposort<N, E, Ix>(g: &Graph<N, E, Directed, Ix>) -> Vec<NodeIndex<Ix>>\n where Ix: IndexType,\n{\n let mut order = Vec::with_capacity(g.node_count());\n toposort_generic(g, |_, ix| order.push(ix));\n order\n}\n\n\/\/\/ Compute the *strongly connected components* using Kosaraju's algorithm.\n\/\/\/\n\/\/\/ Return a vector where each element is an scc.\n\/\/\/\n\/\/\/ For an undirected graph, the sccs are simply the connected components.\npub fn scc<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> Vec<Vec<NodeIndex<Ix>>>\n where Ty: EdgeType,\n Ix: IndexType,\n{\n let mut dfs = Dfs::empty(g);\n\n \/\/ First phase, reverse dfs pass, compute finishing times.\n \/\/ http:\/\/stackoverflow.com\/a\/26780899\/161659\n let mut finished = g.visit_map();\n let mut finish_order = Vec::new();\n for i in 0..g.node_count() {\n let nindex = NodeIndex::new(i);\n if dfs.discovered.is_visited(&nindex) {\n continue\n }\n\n dfs.stack.push(nindex);\n while let Some(&nx) = dfs.stack.last() {\n if dfs.discovered.visit(nx) {\n \/\/ First time visiting `nx`: Push neighbors, don't pop `nx`\n for succ in g.neighbors_directed(nx.clone(), EdgeDirection::Incoming) {\n if !dfs.discovered.is_visited(&succ) {\n dfs.stack.push(succ);\n }\n }\n } else {\n dfs.stack.pop();\n if finished.visit(nx) {\n \/\/ Second time: All reachable nodes must have been finished\n finish_order.push(nx);\n }\n }\n }\n }\n\n dfs.discovered.clear();\n let mut sccs = Vec::new();\n\n \/\/ Second phase\n \/\/ Process in decreasing finishing time order\n for &nindex in finish_order.iter().rev() {\n if dfs.discovered.is_visited(&nindex) {\n continue;\n }\n \/\/ Move to the leader node.\n dfs.move_to(nindex);\n \/\/let leader = nindex;\n let mut scc = Vec::new();\n while let Some(nx) = dfs.next(g) {\n scc.push(nx);\n }\n sccs.push(scc);\n }\n sccs\n}\n\n\/\/\/ Condense every strongly connected component into a single node and return the result.\n\/\/\/\n\/\/\/ If `make_acyclic` is true, self-loops and multi edges are ignored, guaranteeing that\n\/\/\/ the output is acyclic.\npub fn condensation<N, E, Ty, Ix>(g: Graph<N, E, Ty, Ix>, make_acyclic: bool) -> Graph<Vec<N>, E, Ty, Ix>\n where Ty: EdgeType,\n Ix: IndexType,\n{\n let sccs = scc(&g);\n let mut condensed: Graph<Vec<N>, E, Ty, Ix> = Graph::with_capacity(sccs.len(), g.edge_count());\n\n \/\/ Build a map from old indices to new ones.\n let mut node_map = vec![NodeIndex::end(); g.node_count()];\n for comp in sccs {\n let new_nix = condensed.add_node(Vec::new());\n for nix in comp {\n node_map[nix.index()] = new_nix;\n }\n }\n\n \/\/ Consume nodes and edges of the old graph and insert them into the new one.\n let (nodes, edges) = g.destructure();\n for (nix, node) in nodes.into_iter().enumerate() {\n condensed[node_map[nix]].push(node.weight);\n }\n for edge in edges {\n let source = node_map[edge.source().index()];\n let target = node_map[edge.target().index()];\n if make_acyclic {\n if source != target {\n condensed.update_edge(source, target, edge.weight);\n }\n } else {\n condensed.add_edge(source, target, edge.weight);\n }\n }\n condensed\n}\n\n\/\/\/ Return the number of connected components of the graph.\n\/\/\/\n\/\/\/ For a directed graph, this is the *weakly* connected components.\npub fn connected_components<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> usize\n where Ty: EdgeType,\n Ix: IndexType,\n{\n let mut vertex_sets = UnionFind::new(g.node_count());\n for edge in g.raw_edges() {\n let (a, b) = (edge.source(), edge.target());\n\n \/\/ union the two vertices of the edge\n vertex_sets.union(a.index(), b.index());\n }\n let mut labels = vertex_sets.into_labeling();\n labels.sort();\n labels.dedup();\n labels.len()\n}\n\n\n\/\/\/ Compute a *minimum spanning tree* of a graph.\n\/\/\/\n\/\/\/ Treat the input graph as undirected.\n\/\/\/\n\/\/\/ Using Kruskal's algorithm with runtime **O(|E| log |E|)**. We actually\n\/\/\/ return a minimum spanning forest, i.e. a minimum spanning tree for each connected\n\/\/\/ component of the graph.\n\/\/\/\n\/\/\/ The resulting graph has all the vertices of the input graph (with identical node indices),\n\/\/\/ and **|V| - c** edges, where **c** is the number of connected components in `g`.\npub fn min_spanning_tree<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>)\n -> Graph<N, E, Undirected, Ix>\n where N: Clone,\n E: Clone + PartialOrd,\n Ty: EdgeType,\n Ix: IndexType,\n{\n if g.node_count() == 0 {\n return Graph::with_capacity(0, 0)\n }\n\n \/\/ Create a mst skeleton by copying all nodes\n let mut mst = Graph::with_capacity(g.node_count(), g.node_count() - 1);\n for node in g.raw_nodes() {\n mst.add_node(node.weight.clone());\n }\n\n \/\/ Initially each vertex is its own disjoint subgraph, track the connectedness\n \/\/ of the pre-MST with a union & find datastructure.\n let mut subgraphs = UnionFind::new(g.node_count());\n\n let mut sort_edges = BinaryHeap::with_capacity(g.edge_count());\n for edge in g.raw_edges() {\n sort_edges.push(MinScored(edge.weight.clone(), (edge.source(), edge.target())));\n }\n\n \/\/ Kruskal's algorithm.\n \/\/ Algorithm is this:\n \/\/\n \/\/ 1. Create a pre-MST with all the vertices and no edges.\n \/\/ 2. Repeat:\n \/\/\n \/\/ a. Remove the shortest edge from the original graph.\n \/\/ b. If the edge connects two disjoint trees in the pre-MST,\n \/\/ add the edge.\n while let Some(MinScored(score, (a, b))) = sort_edges.pop() {\n \/\/ check if the edge would connect two disjoint parts\n if subgraphs.union(a.index(), b.index()) {\n mst.add_edge(a, b, score);\n }\n }\n\n debug_assert!(mst.node_count() == g.node_count());\n debug_assert!(mst.edge_count() < g.node_count());\n mst\n}\n<|endoftext|>"} {"text":"<commit_before>use std::fmt;\nuse std::borrow::Cow;\n\nuse escape::Escaped;\n\n\/\/\/ An [HTML attribute](https:\/\/www.w3.org\/TR\/html\/syntax.html#attributes-0).\n\/\/\/\n\/\/\/ The name for the attribute will not be validated, you must ensure it meets\n\/\/\/ the requirements specified in the spec yourself.\n\/\/\/\n\/\/\/ The value for the attribute will be escaped automatically. If it is an\n\/\/\/ empty string then the attribute will be written with the 'Empty attribute\n\/\/\/ syntax'.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let attr = hamlet::attr::Attribute::new(\"id\", \"foo\");\n\/\/\/ assert_eq!(format!(\"{}\", attr), \"id=\\\"foo\\\"\");\n\/\/\/ ```\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let attr = hamlet::attr::Attribute::new(\"id\", \"bar & baz\");\n\/\/\/ assert_eq!(format!(\"{}\", attr), \"id=\\\"bar & baz\\\"\");\n\/\/\/ ```\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let attr = hamlet::attr::Attribute::new(\"invalid=id\", \"foo\");\n\/\/\/ assert_eq!(format!(\"{}\", attr), \"invalid=id=\\\"foo\\\"\");\n\/\/\/ ```\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let attr = hamlet::attr::Attribute::new(\"checked\", \"\");\n\/\/\/ assert_eq!(format!(\"{}\", attr), \"checked\");\n\/\/\/ ```\n#[derive(Clone, Debug, PartialEq, Eq)]\npub struct Attribute<'a> {\n pub name: Cow<'a, str>,\n pub value: Cow<'a, str>,\n}\n\nimpl<'a> Attribute<'a> {\n \/\/\/ Create an attribute, useful to avoid having to convert strings to\n \/\/\/ `Cow<str>` yourself.\n \/\/\/\n \/\/\/ Generally this shouldn't be used directly by end users, it's likely\n \/\/\/ that there are builder APIs or macros available that make attribute\n \/\/\/ construction easier, for example the modification methods on\n \/\/\/ [`AttributeList`](struct.AttributeList.html#methods) or the\n \/\/\/ [`attrs!`](macro.attrs!.html) macro.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use std::borrow::Cow;\n \/\/\/ use hamlet::attr::Attribute;\n \/\/\/\n \/\/\/ let foo = \"foo\".to_owned();\n \/\/\/ let foo2 = foo.clone();\n \/\/\/ assert_eq!(\n \/\/\/ Attribute::new(\"id\", foo),\n \/\/\/ Attribute {\n \/\/\/ name: Cow::Borrowed(\"id\"),\n \/\/\/ value: Cow::Owned(foo2),\n \/\/\/ });\n \/\/\/ ```\n pub fn new<N, V>(name: N, value: V) -> Attribute<'a>\n where N: Into<Cow<'a, str>>,\n V: Into<Cow<'a, str>>\n {\n Attribute {\n name: name.into(),\n value: value.into(),\n }\n }\n}\n\nimpl<'a> fmt::Display for Attribute<'a> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n if self.value == \"\" {\n write!(f, \"{}\", self.name)\n } else {\n write!(f, \"{}=\\\"{}\\\"\", self.name.as_ref(), Escaped(&self.value))\n }\n }\n}\n\n#[derive(Clone, Debug)]\n\/\/\/ A list of [`Attribute`](.\/struct.Attribute.html)s.\n\/\/\/\n\/\/\/ This is stored as a plain list instead of a set as in most cases it will\n\/\/\/ be a small collection over which linear search will be more efficient.\npub struct AttributeList<'a>(Cow<'a, [Attribute<'a>]>);\n\nimpl<'a> AttributeList<'a> {\n \/\/\/ Return an empty `AttributeList`\n pub fn empty() -> AttributeList<'a> {\n AttributeList(Cow::Borrowed(&[]))\n }\n\n \/\/\/ Note that this does not check for duplicate attribute names. Generally,\n \/\/\/ end users are not expected to call this method, and instead use\n \/\/\/ high-level builder APIs or macros available to make construction easier,\n \/\/\/ such as the provided [`attrs!`](.\/macro.attrs!.html) macro.\n pub fn from_vec(attrs: Vec<Attribute<'a>>) -> AttributeList<'a> {\n AttributeList(Cow::Owned(attrs))\n }\n\n pub fn into_vec(self) -> Vec<Attribute<'a>> {\n self.0.into_owned()\n }\n\n \/\/\/ Try and get the value of an attribute.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let attrs = attrs!(id = \"foo\");\n \/\/\/ assert_eq!(attrs.get(\"id\"), Some(\"foo\"));\n \/\/\/ # }\n \/\/\/ ```\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let attrs = attrs!(id = \"foo\");\n \/\/\/ assert_eq!(attrs.get(\"class\"), None);\n \/\/\/ # }\n \/\/\/ ```\n pub fn get<S>(&self, name: S) -> Option<&str>\n where S: AsRef<str>\n {\n self.0.iter().find(|attr| attr.name == name.as_ref()).map(|a| a.value.as_ref())\n }\n\n \/\/\/ Unconditionally set an attribute to a value. If the attribute already\n \/\/\/ exists in the set will update its value, otherwise will add a new\n \/\/\/ attribute to the set.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let mut attrs = attrs!(id = \"foo\");\n \/\/\/\n \/\/\/ attrs.set(\"id\", \"bar\");\n \/\/\/\n \/\/\/ assert_eq!(attrs.get(\"id\"), Some(\"bar\"));\n \/\/\/ # }\n \/\/\/ ```\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let mut attrs = attrs!(id = \"foo\");\n \/\/\/\n \/\/\/ attrs.set(\"class\", \"bar\");\n \/\/\/\n \/\/\/ assert_eq!(attrs.get(\"class\"), Some(\"bar\"));\n \/\/\/ # }\n \/\/\/ ```\n pub fn set<N, V>(&mut self, name: N, value: V)\n where N: Into<Cow<'a, str>>,\n V: Into<Cow<'a, str>>\n {\n let (name, value) = (name.into(), value.into());\n let attrs = self.0.to_mut();\n\n if let Some(pos) = attrs.iter().position(|attr| attr.name == name) {\n attrs[pos].value = value;\n } else {\n attrs.push(Attribute::new(name, value));\n }\n }\n\n \/\/\/ Removes and returns the attribute it if there was one.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let mut attrs = attrs!(id = \"foo\");\n \/\/\/\n \/\/\/ assert_eq!(attrs.remove(\"id\").map(|a| a.value).unwrap().as_ref(), \"foo\");\n \/\/\/ # }\n \/\/\/ ```\n pub fn remove<S>(&mut self, name: S) -> Option<Attribute<'a>>\n where S: AsRef<str>\n {\n let attrs = self.0.to_mut();\n\n if let Some(pos) = attrs.iter().position(|attr| attr.name.as_ref() == name.as_ref()) {\n Some(attrs.remove(pos))\n } else {\n None\n }\n }\n\n pub fn iter<'b>(&'b self) -> Iter<'b, 'a> {\n Iter {\n inner: self.0.as_ref(),\n index: 0,\n }\n }\n}\n\npub struct Iter<'b, 'a: 'b> {\n inner: &'b [Attribute<'a>],\n index: usize,\n}\n\nimpl<'a, 'b> Iterator for Iter<'b, 'a> {\n type Item = &'b Attribute<'a>;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.index += 1;\n self.inner.get(self.index - 1)\n }\n}\n<commit_msg>Minor documentation wording<commit_after>use std::fmt;\nuse std::borrow::Cow;\n\nuse escape::Escaped;\n\n\/\/\/ An [HTML attribute](https:\/\/www.w3.org\/TR\/html\/syntax.html#attributes-0).\n\/\/\/\n\/\/\/ The name for the attribute will not be validated, you must ensure it meets\n\/\/\/ the requirements specified in the spec yourself.\n\/\/\/\n\/\/\/ The value for the attribute will be escaped automatically. If it is an\n\/\/\/ empty string then the attribute will be written with the 'Empty attribute\n\/\/\/ syntax'.\n\/\/\/\n\/\/\/ # Examples\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let attr = hamlet::attr::Attribute::new(\"id\", \"foo\");\n\/\/\/ assert_eq!(format!(\"{}\", attr), \"id=\\\"foo\\\"\");\n\/\/\/ ```\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let attr = hamlet::attr::Attribute::new(\"id\", \"bar & baz\");\n\/\/\/ assert_eq!(format!(\"{}\", attr), \"id=\\\"bar & baz\\\"\");\n\/\/\/ ```\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let attr = hamlet::attr::Attribute::new(\"invalid=id\", \"foo\");\n\/\/\/ assert_eq!(format!(\"{}\", attr), \"invalid=id=\\\"foo\\\"\");\n\/\/\/ ```\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ let attr = hamlet::attr::Attribute::new(\"checked\", \"\");\n\/\/\/ assert_eq!(format!(\"{}\", attr), \"checked\");\n\/\/\/ ```\n#[derive(Clone, Debug, PartialEq, Eq)]\npub struct Attribute<'a> {\n pub name: Cow<'a, str>,\n pub value: Cow<'a, str>,\n}\n\nimpl<'a> Attribute<'a> {\n \/\/\/ Create an attribute, useful to avoid having to convert strings to\n \/\/\/ `Cow<str>` yourself.\n \/\/\/\n \/\/\/ Generally this shouldn't be used directly by end users, it's likely\n \/\/\/ that there are builder APIs or macros available that make attribute\n \/\/\/ construction easier, for example the modification methods on\n \/\/\/ [`AttributeList`](struct.AttributeList.html#methods) or the\n \/\/\/ [`attrs!`](macro.attrs!.html) macro.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ use std::borrow::Cow;\n \/\/\/ use hamlet::attr::Attribute;\n \/\/\/\n \/\/\/ let foo = \"foo\".to_owned();\n \/\/\/ let foo2 = foo.clone();\n \/\/\/ assert_eq!(\n \/\/\/ Attribute::new(\"id\", foo),\n \/\/\/ Attribute {\n \/\/\/ name: Cow::Borrowed(\"id\"),\n \/\/\/ value: Cow::Owned(foo2),\n \/\/\/ });\n \/\/\/ ```\n pub fn new<N, V>(name: N, value: V) -> Attribute<'a>\n where N: Into<Cow<'a, str>>,\n V: Into<Cow<'a, str>>\n {\n Attribute {\n name: name.into(),\n value: value.into(),\n }\n }\n}\n\nimpl<'a> fmt::Display for Attribute<'a> {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n if self.value == \"\" {\n write!(f, \"{}\", self.name)\n } else {\n write!(f, \"{}=\\\"{}\\\"\", self.name.as_ref(), Escaped(&self.value))\n }\n }\n}\n\n#[derive(Clone, Debug)]\n\/\/\/ A list of [`Attribute`](.\/struct.Attribute.html)s.\n\/\/\/\n\/\/\/ This is stored as a plain list instead of a set as in most cases it will\n\/\/\/ be a small collection over which a linear search will be more efficient.\npub struct AttributeList<'a>(Cow<'a, [Attribute<'a>]>);\n\nimpl<'a> AttributeList<'a> {\n \/\/\/ Return an empty `AttributeList`\n pub fn empty() -> AttributeList<'a> {\n AttributeList(Cow::Borrowed(&[]))\n }\n\n \/\/\/ Note that this does not check for duplicate attribute names. Generally,\n \/\/\/ end users are not expected to call this method, and instead use\n \/\/\/ high-level builder APIs or macros available to make construction easier,\n \/\/\/ such as the provided [`attrs!`](.\/macro.attrs!.html) macro.\n pub fn from_vec(attrs: Vec<Attribute<'a>>) -> AttributeList<'a> {\n AttributeList(Cow::Owned(attrs))\n }\n\n pub fn into_vec(self) -> Vec<Attribute<'a>> {\n self.0.into_owned()\n }\n\n \/\/\/ Try and get the value of an attribute.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let attrs = attrs!(id = \"foo\");\n \/\/\/ assert_eq!(attrs.get(\"id\"), Some(\"foo\"));\n \/\/\/ # }\n \/\/\/ ```\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let attrs = attrs!(id = \"foo\");\n \/\/\/ assert_eq!(attrs.get(\"class\"), None);\n \/\/\/ # }\n \/\/\/ ```\n pub fn get<S>(&self, name: S) -> Option<&str>\n where S: AsRef<str>\n {\n self.0.iter().find(|attr| attr.name == name.as_ref()).map(|a| a.value.as_ref())\n }\n\n \/\/\/ Unconditionally set an attribute to a value. If the attribute already\n \/\/\/ exists in the list, update its value, otherwise add a new attribute to\n \/\/\/ the list.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let mut attrs = attrs!(id = \"foo\");\n \/\/\/\n \/\/\/ attrs.set(\"id\", \"bar\");\n \/\/\/\n \/\/\/ assert_eq!(attrs.get(\"id\"), Some(\"bar\"));\n \/\/\/ # }\n \/\/\/ ```\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let mut attrs = attrs!(id = \"foo\");\n \/\/\/\n \/\/\/ attrs.set(\"class\", \"bar\");\n \/\/\/\n \/\/\/ assert_eq!(attrs.get(\"class\"), Some(\"bar\"));\n \/\/\/ # }\n \/\/\/ ```\n pub fn set<N, V>(&mut self, name: N, value: V)\n where N: Into<Cow<'a, str>>,\n V: Into<Cow<'a, str>>\n {\n let (name, value) = (name.into(), value.into());\n let attrs = self.0.to_mut();\n\n if let Some(pos) = attrs.iter().position(|attr| attr.name == name) {\n attrs[pos].value = value;\n } else {\n attrs.push(Attribute::new(name, value));\n }\n }\n\n \/\/\/ Removes and returns the attribute if there was one.\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```rust\n \/\/\/ # #[macro_use] extern crate hamlet;\n \/\/\/ # fn main() {\n \/\/\/ let mut attrs = attrs!(id = \"foo\");\n \/\/\/\n \/\/\/ assert_eq!(attrs.remove(\"id\").map(|a| a.value).unwrap().as_ref(), \"foo\");\n \/\/\/ # }\n \/\/\/ ```\n pub fn remove<S>(&mut self, name: S) -> Option<Attribute<'a>>\n where S: AsRef<str>\n {\n let attrs = self.0.to_mut();\n\n if let Some(pos) = attrs.iter().position(|attr| attr.name.as_ref() == name.as_ref()) {\n Some(attrs.remove(pos))\n } else {\n None\n }\n }\n\n pub fn iter<'b>(&'b self) -> Iter<'b, 'a> {\n Iter {\n inner: self.0.as_ref(),\n index: 0,\n }\n }\n}\n\npub struct Iter<'b, 'a: 'b> {\n inner: &'b [Attribute<'a>],\n index: usize,\n}\n\nimpl<'a, 'b> Iterator for Iter<'b, 'a> {\n type Item = &'b Attribute<'a>;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.index += 1;\n self.inner.get(self.index - 1)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use hyper::header::AccessControlAllowOrigin;\nuse hyper::server::Request;\nuse hyper::net::HttpStream;\n\npub fn read_origin(req: &Request<HttpStream>) -> Option<String> {\n\tmatch req.headers().get_raw(\"origin\") {\n\t\tSome(ref v) if v.len() == 1 => {\n\t\t\tString::from_utf8(v[0].clone()).ok()\n\t\t},\n\t\t_ => None\n\t}\n}\n\npub fn get_cors_header(allowed: &Option<Vec<AccessControlAllowOrigin>>, origin: &Option<String>) -> Option<AccessControlAllowOrigin> {\n\n\tif allowed.is_none() {\n\t\treturn None;\n\t}\n\tlet allowed = allowed.as_ref().unwrap();\n\n\tmatch *origin {\n\t\tSome(ref origin) => {\n\t\t\tallowed.iter().find(|cors| {\n\t\t\t\tmatch **cors {\n\t\t\t\t\tAccessControlAllowOrigin::Any => true,\n\t\t\t\t\tAccessControlAllowOrigin::Value(ref val) if val == origin => true,\n\t\t\t\t\t_ => false\n\t\t\t\t}\n\t\t\t}).cloned()\n\t\t},\n\t\tNone => {\n\t\t\tallowed.iter().find(|cors| **cors == AccessControlAllowOrigin::Null).cloned()\n\t\t},\n\t}\n}\n\n\n#[cfg(test)]\nmod tests {\n\tuse super::*;\n\tuse hyper::header::AccessControlAllowOrigin;\n\n\t#[test]\n\tfn should_return_none_when_there_are_no_cors_domains() {\n\t\t\/\/ given\n\t\tlet origin = None;\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(&None, &origin);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, None);\n\t}\n\n\t#[test]\n\tfn should_return_none_for_empty_origin() {\n\t\t\/\/ given\n\t\tlet origin = None;\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(\n\t\t\t&Some(vec![AccessControlAllowOrigin::Value(\"http:\/\/ethereum.org\".into())]),\n\t\t\t&origin\n\t\t);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, None);\n\t}\n\n\t#[test]\n\tfn should_return_none_for_empty_list() {\n\t\t\/\/ given\n\t\tlet origin = None;\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(&Some(Vec::new()), &origin);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, None);\n\t}\n\n\t#[test]\n\tfn should_return_none_for_not_matching_origin() {\n\t\t\/\/ given\n\t\tlet origin = Some(\"http:\/\/ethcore.io\".into());\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(\n\t\t\t&Some(vec![AccessControlAllowOrigin::Value(\"http:\/\/ethereum.org\".into())]),\n\t\t\t&origin\n\t\t);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, None);\n\t}\n\n\t#[test]\n\tfn should_return_specific_origin_if_we_allow_any() {\n\t\t\/\/ given\n\t\tlet origin = Some(\"http:\/\/ethcore.io\".into());\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(&Some(vec![AccessControlAllowOrigin::Any]), &origin);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, Some(AccessControlAllowOrigin::Value(\"http:\/\/ethcore.io\".into())));\n\t}\n\n\t#[test]\n\tfn should_return_null_only_if_null_is_set_and_origin_is_not_defined() {\n\t\t\/\/ given\n\t\tlet origin = None;\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(\n\t\t\t&Some(vec![AccessControlAllowOrigin::Null]),\n\t\t\t&origin\n\t\t);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, Some(AccessControlAllowOrigin::Null));\n\t}\n\n\t#[test]\n\tfn should_return_specific_origin_if_there_is_a_match() {\n\t\t\/\/ given\n\t\tlet origin = Some(\"http:\/\/ethcore.io\".into());\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(\n\t\t\t&Some(vec![AccessControlAllowOrigin::Value(\"http:\/\/ethereum.org\".into()), AccessControlAllowOrigin::Value(\"http:\/\/ethcore.io\".into())]),\n\t\t\t&origin\n\t\t\t);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, Some(AccessControlAllowOrigin::Value(\"http:\/\/ethcore.io\".into())));\n\t}\n}\n<commit_msg>Fixing test<commit_after>use hyper::header::AccessControlAllowOrigin;\nuse hyper::server::Request;\nuse hyper::net::HttpStream;\n\npub fn read_origin(req: &Request<HttpStream>) -> Option<String> {\n\tmatch req.headers().get_raw(\"origin\") {\n\t\tSome(ref v) if v.len() == 1 => {\n\t\t\tString::from_utf8(v[0].clone()).ok()\n\t\t},\n\t\t_ => None\n\t}\n}\n\npub fn get_cors_header(allowed: &Option<Vec<AccessControlAllowOrigin>>, origin: &Option<String>) -> Option<AccessControlAllowOrigin> {\n\n\tif allowed.is_none() {\n\t\treturn None;\n\t}\n\tlet allowed = allowed.as_ref().unwrap();\n\n\tmatch *origin {\n\t\tSome(ref origin) => {\n\t\t\tallowed.iter().find(|cors| {\n\t\t\t\tmatch **cors {\n\t\t\t\t\tAccessControlAllowOrigin::Any => true,\n\t\t\t\t\tAccessControlAllowOrigin::Value(ref val) if val == origin => true,\n\t\t\t\t\t_ => false\n\t\t\t\t}\n\t\t\t}).map(|cors| {\n\t\t\t\tmatch *cors {\n\t\t\t\t\tAccessControlAllowOrigin::Any => AccessControlAllowOrigin::Value(origin.clone()),\n\t\t\t\t\tref cors => cors.clone(),\n\t\t\t\t}\n\t\t\t})\n\t\t},\n\t\tNone => {\n\t\t\tallowed.iter().find(|cors| **cors == AccessControlAllowOrigin::Null).cloned()\n\t\t},\n\t}\n}\n\n\n#[cfg(test)]\nmod tests {\n\tuse super::*;\n\tuse hyper::header::AccessControlAllowOrigin;\n\n\t#[test]\n\tfn should_return_none_when_there_are_no_cors_domains() {\n\t\t\/\/ given\n\t\tlet origin = None;\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(&None, &origin);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, None);\n\t}\n\n\t#[test]\n\tfn should_return_none_for_empty_origin() {\n\t\t\/\/ given\n\t\tlet origin = None;\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(\n\t\t\t&Some(vec![AccessControlAllowOrigin::Value(\"http:\/\/ethereum.org\".into())]),\n\t\t\t&origin\n\t\t);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, None);\n\t}\n\n\t#[test]\n\tfn should_return_none_for_empty_list() {\n\t\t\/\/ given\n\t\tlet origin = None;\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(&Some(Vec::new()), &origin);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, None);\n\t}\n\n\t#[test]\n\tfn should_return_none_for_not_matching_origin() {\n\t\t\/\/ given\n\t\tlet origin = Some(\"http:\/\/ethcore.io\".into());\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(\n\t\t\t&Some(vec![AccessControlAllowOrigin::Value(\"http:\/\/ethereum.org\".into())]),\n\t\t\t&origin\n\t\t);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, None);\n\t}\n\n\t#[test]\n\tfn should_return_specific_origin_if_we_allow_any() {\n\t\t\/\/ given\n\t\tlet origin = Some(\"http:\/\/ethcore.io\".into());\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(&Some(vec![AccessControlAllowOrigin::Any]), &origin);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, Some(AccessControlAllowOrigin::Value(\"http:\/\/ethcore.io\".into())));\n\t}\n\n\t#[test]\n\tfn should_return_null_only_if_null_is_set_and_origin_is_not_defined() {\n\t\t\/\/ given\n\t\tlet origin = None;\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(\n\t\t\t&Some(vec![AccessControlAllowOrigin::Null]),\n\t\t\t&origin\n\t\t);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, Some(AccessControlAllowOrigin::Null));\n\t}\n\n\t#[test]\n\tfn should_return_specific_origin_if_there_is_a_match() {\n\t\t\/\/ given\n\t\tlet origin = Some(\"http:\/\/ethcore.io\".into());\n\n\t\t\/\/ when\n\t\tlet res = get_cors_header(\n\t\t\t&Some(vec![AccessControlAllowOrigin::Value(\"http:\/\/ethereum.org\".into()), AccessControlAllowOrigin::Value(\"http:\/\/ethcore.io\".into())]),\n\t\t\t&origin\n\t\t\t);\n\n\t\t\/\/ then\n\t\tassert_eq!(res, Some(AccessControlAllowOrigin::Value(\"http:\/\/ethcore.io\".into())));\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ SOS: the Stupid Operating System\n\/\/ by Eliza Weisman (eliza@elizas.website)\n\/\/\n\/\/ Copyright (c) 2015-2017 Eliza Weisman\n\/\/ Released under the terms of the MIT license. See `LICENSE` in the root\n\/\/ directory of this repository for more information.\n\/\/\n\/\/! # SOS kernel\n\/\/! This crate contains the kernel for SOS, the Stupid Operating System.\n\/\/!\n\/\/! # SOS: the Stupid Operating System\n\/\/! SOS is a simple, tiny toy OS implemented in Rust. It targets the `x86`,\n\/\/! `x86_64`, and ARM v7 CPU architectures.\n\/\/!\n\/\/! I'm writing this mostly for fun, to learn more about OS design and kernel\n\/\/! hacking, so don't expect anything new or exciting out of this project.\n\/\/!\n\/\/! SOS is copyright 2015-2017 Eliza Weisman, and is released under the terms\n\/\/! of the MIT license.\n\n#![crate_name = \"sos_kernel\"]\n\n#![doc(html_root_url = \"https:\/\/hawkw.github.io\/sos-kernel\/\")]\n\n#![feature( lang_items, asm, naked_functions )]\n#![feature( linkage )]\n#![feature( const_fn\n , slice_patterns\n , associated_consts\n , type_ascription\n , custom_derive )]\n#![feature( collections )]\n\n#![cfg_attr(feature=\"clippy\", feature(plugin))]\n#![cfg_attr(feature=\"clippy\", plugin(clippy))]\n#![cfg_attr( any(target_arch = \"x86_64\", target_arch=\"x86\")\n , feature(abi_x86_interrupt))]\n\n#![no_std]\n#![cfg_attr(not(test), no_main)]\n\n\/\/ -- non-SOS dependencies --------------------------------------------------\n#[macro_use] extern crate lazy_static;\n#[macro_use] extern crate bitflags;\n#[macro_use] extern crate log;\n\nextern crate collections;\nextern crate rlibc;\nextern crate spin;\n\n\/\/ -- SOS dependencies ------------------------------------------------------\n#[macro_use] extern crate vga;\n\nextern crate alloc;\nextern crate cpu;\nextern crate elf;\nextern crate util;\nextern crate memory;\n\n#[macro_use] pub mod io;\n\npub mod heap;\npub mod params;\npub mod arch;\npub mod logger;\n\n\/\/\/ SOS version number\npub const VERSION_STRING: &'static str\n = concat!(\"Stupid Operating System v\", env!(\"CARGO_PKG_VERSION\"));\n\nuse params::InitParams;\n\n\/\/\/ Kernel main loop\npub fn kernel_main() -> ! {\n let mut a_vec = collections::vec::Vec::<usize>::new();\n info!(target: \"test\", \"Created a vector in kernel space! {:?}\", a_vec);\n a_vec.push(1);\n info!(target: \"test\", \"pushed to vec: {:?}\", a_vec);\n a_vec.push(2);\n info!(target: \"test\", \"pushed to vec: {:?}\", a_vec);\n\n loop { }\n}\n\n\/\/\/ Kernel initialization function called from ASM\n\/\/\/\n\/\/\/ The kernel main loop expects to be passed the address of a valid\n\/\/\/ Multiboot 2 info struct. It's the bootloader's responsibility to ensure\n\/\/\/ that this is passed in the correct register as expected by the calling\n\/\/\/ convention (`edi` on x86). If this isn't there, you can expect to have a\n\/\/\/ bad problem and not go to space today.\npub fn kernel_init(params: &InitParams) {\n kinfoln!(\"Hello from the kernel!\");\n\n \/\/ -- initialize interrupts ----------------------------------------------\n kinfoln!(dots: \" . \", \"Initializing interrupts:\");\n unsafe {\n arch::interrupts::initialize();\n };\n kinfoln!(dots: \" . \", target: \"Enabling interrupts\", \"[ OKAY ]\");\n\n \/\/ -- initialize the heap ------------------------------------------------\n\n if unsafe { heap::initialize(params) }.is_ok() {\n kinfoln!( dots: \" . \", target: \"Intializing heap\"\n , \"[ OKAY ]\"\n );\n kinfoln!( dots: \" . . \"\n , \"Heap begins at {:#x} and ends at {:#x}\"\n , params.heap_base, params.heap_top);\n } else {\n kinfoln!( dots: \" . \", target: \"Intializing heap\"\n , \"[ FAIL ]\"\n );\n }\n\n println!(\"\\n{} {}-bit\\n\", VERSION_STRING, arch::ARCH_BITS);\n \/\/ -- call into kernel main loop ------------------------------------------\n \/\/ (currently, this does nothing)\n kernel_main()\n\n}\n\n\n\/\/\/ This fake `main` function exists only to placate `cargo test`.\n#[cfg(test)]\nfn main() {\n\n}\n<commit_msg>doc(kernel_init): update incorrect docs in<commit_after>\/\/\n\/\/ SOS: the Stupid Operating System\n\/\/ by Eliza Weisman (eliza@elizas.website)\n\/\/\n\/\/ Copyright (c) 2015-2017 Eliza Weisman\n\/\/ Released under the terms of the MIT license. See `LICENSE` in the root\n\/\/ directory of this repository for more information.\n\/\/\n\/\/! # SOS kernel\n\/\/! This crate contains the kernel for SOS, the Stupid Operating System.\n\/\/!\n\/\/! # SOS: the Stupid Operating System\n\/\/! SOS is a simple, tiny toy OS implemented in Rust. It targets the `x86`,\n\/\/! `x86_64`, and ARM v7 CPU architectures.\n\/\/!\n\/\/! I'm writing this mostly for fun, to learn more about OS design and kernel\n\/\/! hacking, so don't expect anything new or exciting out of this project.\n\/\/!\n\/\/! SOS is copyright 2015-2017 Eliza Weisman, and is released under the terms\n\/\/! of the MIT license.\n\n#![crate_name = \"sos_kernel\"]\n\n#![doc(html_root_url = \"https:\/\/hawkw.github.io\/sos-kernel\/\")]\n\n#![feature( lang_items, asm, naked_functions )]\n#![feature( linkage )]\n#![feature( const_fn\n , slice_patterns\n , associated_consts\n , type_ascription\n , custom_derive )]\n#![feature( collections )]\n\n#![cfg_attr(feature=\"clippy\", feature(plugin))]\n#![cfg_attr(feature=\"clippy\", plugin(clippy))]\n#![cfg_attr( any(target_arch = \"x86_64\", target_arch=\"x86\")\n , feature(abi_x86_interrupt))]\n\n#![no_std]\n#![cfg_attr(not(test), no_main)]\n\n\/\/ -- non-SOS dependencies --------------------------------------------------\n#[macro_use] extern crate lazy_static;\n#[macro_use] extern crate bitflags;\n#[macro_use] extern crate log;\n\nextern crate collections;\nextern crate rlibc;\nextern crate spin;\n\n\/\/ -- SOS dependencies ------------------------------------------------------\n#[macro_use] extern crate vga;\n\nextern crate alloc;\nextern crate cpu;\nextern crate elf;\nextern crate util;\nextern crate memory;\n\n#[macro_use] pub mod io;\n\npub mod heap;\npub mod params;\npub mod arch;\npub mod logger;\n\n\/\/\/ SOS version number\npub const VERSION_STRING: &'static str\n = concat!(\"Stupid Operating System v\", env!(\"CARGO_PKG_VERSION\"));\n\nuse params::InitParams;\n\n\/\/\/ Kernel main loop\npub fn kernel_main() -> ! {\n let mut a_vec = collections::vec::Vec::<usize>::new();\n info!(target: \"test\", \"Created a vector in kernel space! {:?}\", a_vec);\n a_vec.push(1);\n info!(target: \"test\", \"pushed to vec: {:?}\", a_vec);\n a_vec.push(2);\n info!(target: \"test\", \"pushed to vec: {:?}\", a_vec);\n\n loop { }\n}\n\n\/\/\/ Cross-architecture kernel initialization.\n\/\/\/\n\/\/\/ This function is called by the arch specific init function.\npub fn kernel_init(params: &InitParams) {\n kinfoln!(\"Hello from the kernel!\");\n\n \/\/ -- initialize interrupts ----------------------------------------------\n kinfoln!(dots: \" . \", \"Initializing interrupts:\");\n \/\/ TODO: this whole function *may* want to just be made `unsafe`...\n unsafe { arch::interrupts::initialize(); };\n kinfoln!(dots: \" . \", target: \"Enabling interrupts\", \"[ OKAY ]\");\n\n \/\/ -- initialize the heap ------------------------------------------------\n\n if unsafe { heap::initialize(params) }.is_ok() {\n kinfoln!( dots: \" . \", target: \"Intializing heap\"\n , \"[ OKAY ]\"\n );\n kinfoln!( dots: \" . . \"\n , \"Heap begins at {:#x} and ends at {:#x}\"\n , params.heap_base, params.heap_top);\n } else {\n kinfoln!( dots: \" . \", target: \"Intializing heap\"\n , \"[ FAIL ]\"\n );\n }\n\n println!(\"\\n{} {}-bit\\n\", VERSION_STRING, arch::ARCH_BITS);\n \/\/ -- call into kernel main loop ------------------------------------------\n \/\/ (currently, this does nothing)\n kernel_main()\n\n}\n\n\n\/\/\/ This fake `main` function exists only to placate `cargo test`.\n#[cfg(test)]\nfn main() {\n\n}\n<|endoftext|>"} {"text":"<commit_before>#![cfg_attr(test, deny(warnings))]\n\nextern crate atty;\nextern crate chrono;\n#[macro_use]\nextern crate clap;\nextern crate colored;\nextern crate doug;\nextern crate serde;\nextern crate serde_json;\n\nuse std::io::stdout;\n\nuse atty::Stream;\nuse clap::{App, AppSettings, Arg, Shell, SubCommand};\nuse colored::Colorize;\n\nuse doug::*;\nuse std::process;\n\nfn main() {\n if !atty::is(Stream::Stdout) {\n colored::control::set_override(false);\n }\n\n let mut cli =\n App::new(\"Doug\")\n .version(crate_version!())\n .about(\"A time tracking command-line utility\")\n .author(crate_authors!())\n .settings(&[\n AppSettings::DeriveDisplayOrder,\n AppSettings::GlobalVersion,\n AppSettings::SubcommandRequiredElseHelp,\n AppSettings::VersionlessSubcommands,\n AppSettings::DisableHelpSubcommand,\n AppSettings::ColorAuto,\n ]).arg(\n Arg::with_name(\"path\")\n .short(\"p\")\n .long(\"path\")\n .help(\"Path to load settings file from. (default: ~\/.doug\/settings.json)\"),\n ).subcommand(\n SubCommand::with_name(\"start\")\n .about(\"Track new or existing project\")\n .arg(Arg::with_name(\"project\").help(\n \"project to track. If missing, start subcommand behaves like restart.\",\n )),\n ).subcommand(\n SubCommand::with_name(\"status\")\n .about(\"Display elapsed time, start time, and running project name\")\n .arg(\n Arg::with_name(\"t\")\n .short(\"t\")\n .long(\"time\")\n .help(\"Print time for currently tracked project.\"),\n ).arg(Arg::with_name(\"s\").short(\"s\").long(\"simple\").help(\n \"Print running project name or nothing if there isn't a running project.\",\n )),\n ).subcommand(SubCommand::with_name(\"stop\").about(\"Stop any running projects\"))\n .subcommand(SubCommand::with_name(\"s\").about(\"Stop any running projects\").settings(&[AppSettings::Hidden, AppSettings::HidePossibleValuesInHelp]))\n .subcommand(\n SubCommand::with_name(\"cancel\")\n .about(\"Stop running project and remove most recent time interval\"),\n ).subcommand(SubCommand::with_name(\"restart\").about(\"Track last running project\"))\n .subcommand(SubCommand::with_name(\"r\").about(\"Track last running project\").settings(&[AppSettings::Hidden, AppSettings::HidePossibleValuesInHelp]))\n .subcommand(\n SubCommand::with_name(\"log\").about(\"Display time intervals across all projects\"),\n ).subcommand(\n SubCommand::with_name(\"report\")\n .about(\"Display aggregate time from projects\")\n .arg(\n Arg::with_name(\"year\")\n .short(\"y\")\n .long(\"year\")\n .help(\"Limit report to past year. Use multiple to increase interval.\")\n .overrides_with_all(&[\"month\", \"week\", \"day\", \"from\", \"to\"])\n .multiple(true),\n ).arg(\n Arg::with_name(\"month\")\n .short(\"m\")\n .long(\"month\")\n .help(\"Limit report to past month. Use multiple to increase interval.\")\n .overrides_with_all(&[\"year\", \"week\", \"day\", \"from\", \"to\"])\n .multiple(true),\n ).arg(\n Arg::with_name(\"week\")\n .short(\"w\")\n .long(\"week\")\n .help(\"Limit report to past week. Use multiple to increase interval.\")\n .overrides_with_all(&[\"year\", \"month\", \"day\", \"from\", \"to\"])\n .multiple(true),\n ).arg(\n Arg::with_name(\"day\")\n .short(\"d\")\n .long(\"day\")\n .help(\"Limit report to past day. Use multiple to increase interval.\")\n .overrides_with_all(&[\"year\", \"month\", \"week\", \"from\", \"to\"])\n .multiple(true),\n ).arg(\n Arg::with_name(\"from\")\n .short(\"f\")\n .long(\"from\")\n .help(\"Date when report should start (e.g. 2018-1-1)\")\n .overrides_with_all(&[\"year\", \"month\", \"week\", \"day\"])\n .takes_value(true),\n ).arg(\n Arg::with_name(\"to\")\n .short(\"t\")\n .long(\"to\")\n .help(\"Date when report should end (e.g. 2018-1-20)\")\n .overrides_with_all(&[\"year\", \"month\", \"week\", \"day\"])\n .takes_value(true),\n ),\n ).subcommand(\n SubCommand::with_name(\"amend\")\n .about(\"Change name of currently running project\")\n .arg(\n Arg::with_name(\"project\")\n .help(\"new project name\")\n .required(true),\n ),\n ).subcommand(\n SubCommand::with_name(\"edit\")\n .about(\"Edit last frame or currently running frame\")\n .arg(\n Arg::with_name(\"start\")\n .short(\"s\")\n .long(\"start\")\n .help(\"starting date\")\n .takes_value(true),\n ).arg(\n Arg::with_name(\"end\")\n .short(\"e\")\n .long(\"end\")\n .help(\"ending date\")\n .takes_value(true),\n ),\n )\n .subcommand(\n SubCommand::with_name(\"settings\")\n .about(\"configure doug settings\")\n .arg(\n Arg::with_name(\"path\")\n .short(\"p\")\n .long(\"path\")\n .takes_value(true)\n .help(\"path to store data file\")\n .long_help(\"path to store data file. this only affects the data file location. settings are stored in $HOME.\")\n ).arg(\n Arg::with_name(\"clear\")\n .short(\"c\")\n .long(\"clear\")\n .help(\"clear settings file\")\n )\n ).subcommand(\n SubCommand::with_name(\"generate-completions\")\n .about(\"Generate completions\")\n .arg(\n Arg::with_name(\"shell\")\n .help(\"shell to generate completion for (default: bash).\")\n .short(\"s\")\n .long(\"shell\")\n .possible_values(&[\"bash\", \"zsh\", \"fish\", \"powershell\"])\n .case_insensitive(true)\n .default_value(\"bash\")\n .takes_value(true),\n ),\n ).subcommand(\n SubCommand::with_name(\"delete\")\n .about(\"Delete all intervals for project\")\n .arg(\n Arg::with_name(\"project\")\n .help(\"new project name\")\n .required(true),\n ),\n );\n\n let matches = cli.clone().get_matches();\n\n let mut doug = match Doug::new(matches.value_of(\"path\")) {\n Ok(x) => x,\n Err(e) => {\n eprintln!(\"Error: {}\", e);\n process::exit(1)\n }\n };\n\n let results = match matches.subcommand() {\n (\"start\", Some(matches)) | (\"s\", Some(matches)) => match matches.value_of(\"project\") {\n Some(project) => doug.start(project),\n \/\/ Restart last project if not argument is provided\n None => doug.restart(),\n },\n (\"amend\", Some(matches)) => match matches.value_of(\"project\") {\n Some(project) => doug.amend(project),\n None => Err(\"Missing project name\".to_string()),\n },\n (\"delete\", Some(matches)) => match matches.value_of(\"project\") {\n Some(project) => doug.delete(project),\n None => Err(\"missing project name\".to_string()),\n },\n (\"status\", Some(matches)) => doug.status(matches.is_present(\"s\"), matches.is_present(\"t\")),\n (\"report\", Some(matches)) => doug.report(\n matches.occurrences_of(\"year\") as i32,\n matches.occurrences_of(\"month\") as i32,\n matches.occurrences_of(\"week\") as i32,\n matches.occurrences_of(\"day\") as i32,\n matches.value_of(\"from\"),\n matches.value_of(\"to\"),\n ),\n (\"generate-completions\", Some(matches)) => match matches.value_of(\"shell\") {\n Some(\"bash\") => {\n cli.gen_completions_to(\"doug\", Shell::Bash, &mut stdout());\n Ok(None)\n }\n Some(\"zsh\") => {\n cli.gen_completions_to(\"doug\", Shell::Zsh, &mut stdout());\n Ok(None)\n }\n Some(\"fish\") => {\n cli.gen_completions_to(\"doug\", Shell::Fish, &mut stdout());\n Ok(None)\n }\n Some(\"powershell\") => {\n cli.gen_completions_to(\"doug\", Shell::PowerShell, &mut stdout());\n Ok(None)\n }\n _ => Err(\"Invalid option\".to_string()),\n },\n (\"edit\", Some(matches)) => doug.edit(matches.value_of(\"start\"), matches.value_of(\"end\")),\n (\"stop\", Some(_)) => doug.stop(),\n (\"cancel\", Some(_)) => doug.cancel(),\n (\"restart\", Some(_)) | (\"r\", Some(_)) => doug.restart(),\n (\"log\", Some(_)) => doug.log(),\n (\"settings\", Some(matches)) => {\n doug.settings(matches.value_of(\"path\"), matches.is_present(\"clear\"))\n }\n (_, Some(_)) | (_, None) => unreachable!(),\n };\n\n match results {\n Ok(Some(m)) => {\n \/\/ There are some inconsistencies for outputs so some commands return new lines and\n \/\/ some don't\n println!(\"{}\", m.trim_right())\n }\n \/\/ nothing to print\n Ok(None) => {}\n Err(e) => {\n eprintln!(\"{} {}\", \"Error:\".red(), e);\n process::exit(1)\n }\n }\n}\n<commit_msg>remove unnecessary `extern crate` usage<commit_after>#![cfg_attr(test, deny(warnings))]\n\nuse std::io::stdout;\n\nuse atty::Stream;\nuse clap::{crate_authors, crate_version, App, AppSettings, Arg, Shell, SubCommand};\nuse colored::Colorize;\n\nuse doug::*;\nuse std::process;\n\nfn main() {\n if !atty::is(Stream::Stdout) {\n colored::control::set_override(false);\n }\n\n let mut cli =\n App::new(\"Doug\")\n .version(crate_version!())\n .about(\"A time tracking command-line utility\")\n .author(crate_authors!())\n .settings(&[\n AppSettings::DeriveDisplayOrder,\n AppSettings::GlobalVersion,\n AppSettings::SubcommandRequiredElseHelp,\n AppSettings::VersionlessSubcommands,\n AppSettings::DisableHelpSubcommand,\n AppSettings::ColorAuto,\n ]).arg(\n Arg::with_name(\"path\")\n .short(\"p\")\n .long(\"path\")\n .help(\"Path to load settings file from. (default: ~\/.doug\/settings.json)\"),\n ).subcommand(\n SubCommand::with_name(\"start\")\n .about(\"Track new or existing project\")\n .arg(Arg::with_name(\"project\").help(\n \"project to track. If missing, start subcommand behaves like restart.\",\n )),\n ).subcommand(\n SubCommand::with_name(\"status\")\n .about(\"Display elapsed time, start time, and running project name\")\n .arg(\n Arg::with_name(\"t\")\n .short(\"t\")\n .long(\"time\")\n .help(\"Print time for currently tracked project.\"),\n ).arg(Arg::with_name(\"s\").short(\"s\").long(\"simple\").help(\n \"Print running project name or nothing if there isn't a running project.\",\n )),\n ).subcommand(SubCommand::with_name(\"stop\").about(\"Stop any running projects\"))\n .subcommand(SubCommand::with_name(\"s\").about(\"Stop any running projects\").settings(&[AppSettings::Hidden, AppSettings::HidePossibleValuesInHelp]))\n .subcommand(\n SubCommand::with_name(\"cancel\")\n .about(\"Stop running project and remove most recent time interval\"),\n ).subcommand(SubCommand::with_name(\"restart\").about(\"Track last running project\"))\n .subcommand(SubCommand::with_name(\"r\").about(\"Track last running project\").settings(&[AppSettings::Hidden, AppSettings::HidePossibleValuesInHelp]))\n .subcommand(\n SubCommand::with_name(\"log\").about(\"Display time intervals across all projects\"),\n ).subcommand(\n SubCommand::with_name(\"report\")\n .about(\"Display aggregate time from projects\")\n .arg(\n Arg::with_name(\"year\")\n .short(\"y\")\n .long(\"year\")\n .help(\"Limit report to past year. Use multiple to increase interval.\")\n .overrides_with_all(&[\"month\", \"week\", \"day\", \"from\", \"to\"])\n .multiple(true),\n ).arg(\n Arg::with_name(\"month\")\n .short(\"m\")\n .long(\"month\")\n .help(\"Limit report to past month. Use multiple to increase interval.\")\n .overrides_with_all(&[\"year\", \"week\", \"day\", \"from\", \"to\"])\n .multiple(true),\n ).arg(\n Arg::with_name(\"week\")\n .short(\"w\")\n .long(\"week\")\n .help(\"Limit report to past week. Use multiple to increase interval.\")\n .overrides_with_all(&[\"year\", \"month\", \"day\", \"from\", \"to\"])\n .multiple(true),\n ).arg(\n Arg::with_name(\"day\")\n .short(\"d\")\n .long(\"day\")\n .help(\"Limit report to past day. Use multiple to increase interval.\")\n .overrides_with_all(&[\"year\", \"month\", \"week\", \"from\", \"to\"])\n .multiple(true),\n ).arg(\n Arg::with_name(\"from\")\n .short(\"f\")\n .long(\"from\")\n .help(\"Date when report should start (e.g. 2018-1-1)\")\n .overrides_with_all(&[\"year\", \"month\", \"week\", \"day\"])\n .takes_value(true),\n ).arg(\n Arg::with_name(\"to\")\n .short(\"t\")\n .long(\"to\")\n .help(\"Date when report should end (e.g. 2018-1-20)\")\n .overrides_with_all(&[\"year\", \"month\", \"week\", \"day\"])\n .takes_value(true),\n ),\n ).subcommand(\n SubCommand::with_name(\"amend\")\n .about(\"Change name of currently running project\")\n .arg(\n Arg::with_name(\"project\")\n .help(\"new project name\")\n .required(true),\n ),\n ).subcommand(\n SubCommand::with_name(\"edit\")\n .about(\"Edit last frame or currently running frame\")\n .arg(\n Arg::with_name(\"start\")\n .short(\"s\")\n .long(\"start\")\n .help(\"starting date\")\n .takes_value(true),\n ).arg(\n Arg::with_name(\"end\")\n .short(\"e\")\n .long(\"end\")\n .help(\"ending date\")\n .takes_value(true),\n ),\n )\n .subcommand(\n SubCommand::with_name(\"settings\")\n .about(\"configure doug settings\")\n .arg(\n Arg::with_name(\"path\")\n .short(\"p\")\n .long(\"path\")\n .takes_value(true)\n .help(\"path to store data file\")\n .long_help(\"path to store data file. this only affects the data file location. settings are stored in $HOME.\")\n ).arg(\n Arg::with_name(\"clear\")\n .short(\"c\")\n .long(\"clear\")\n .help(\"clear settings file\")\n )\n ).subcommand(\n SubCommand::with_name(\"generate-completions\")\n .about(\"Generate completions\")\n .arg(\n Arg::with_name(\"shell\")\n .help(\"shell to generate completion for (default: bash).\")\n .short(\"s\")\n .long(\"shell\")\n .possible_values(&[\"bash\", \"zsh\", \"fish\", \"powershell\"])\n .case_insensitive(true)\n .default_value(\"bash\")\n .takes_value(true),\n ),\n ).subcommand(\n SubCommand::with_name(\"delete\")\n .about(\"Delete all intervals for project\")\n .arg(\n Arg::with_name(\"project\")\n .help(\"new project name\")\n .required(true),\n ),\n );\n\n let matches = cli.clone().get_matches();\n\n let mut doug = match Doug::new(matches.value_of(\"path\")) {\n Ok(x) => x,\n Err(e) => {\n eprintln!(\"Error: {}\", e);\n process::exit(1)\n }\n };\n\n let results = match matches.subcommand() {\n (\"start\", Some(matches)) | (\"s\", Some(matches)) => match matches.value_of(\"project\") {\n Some(project) => doug.start(project),\n \/\/ Restart last project if not argument is provided\n None => doug.restart(),\n },\n (\"amend\", Some(matches)) => match matches.value_of(\"project\") {\n Some(project) => doug.amend(project),\n None => Err(\"Missing project name\".to_string()),\n },\n (\"delete\", Some(matches)) => match matches.value_of(\"project\") {\n Some(project) => doug.delete(project),\n None => Err(\"missing project name\".to_string()),\n },\n (\"status\", Some(matches)) => doug.status(matches.is_present(\"s\"), matches.is_present(\"t\")),\n (\"report\", Some(matches)) => doug.report(\n matches.occurrences_of(\"year\") as i32,\n matches.occurrences_of(\"month\") as i32,\n matches.occurrences_of(\"week\") as i32,\n matches.occurrences_of(\"day\") as i32,\n matches.value_of(\"from\"),\n matches.value_of(\"to\"),\n ),\n (\"generate-completions\", Some(matches)) => match matches.value_of(\"shell\") {\n Some(\"bash\") => {\n cli.gen_completions_to(\"doug\", Shell::Bash, &mut stdout());\n Ok(None)\n }\n Some(\"zsh\") => {\n cli.gen_completions_to(\"doug\", Shell::Zsh, &mut stdout());\n Ok(None)\n }\n Some(\"fish\") => {\n cli.gen_completions_to(\"doug\", Shell::Fish, &mut stdout());\n Ok(None)\n }\n Some(\"powershell\") => {\n cli.gen_completions_to(\"doug\", Shell::PowerShell, &mut stdout());\n Ok(None)\n }\n _ => Err(\"Invalid option\".to_string()),\n },\n (\"edit\", Some(matches)) => doug.edit(matches.value_of(\"start\"), matches.value_of(\"end\")),\n (\"stop\", Some(_)) => doug.stop(),\n (\"cancel\", Some(_)) => doug.cancel(),\n (\"restart\", Some(_)) | (\"r\", Some(_)) => doug.restart(),\n (\"log\", Some(_)) => doug.log(),\n (\"settings\", Some(matches)) => {\n doug.settings(matches.value_of(\"path\"), matches.is_present(\"clear\"))\n }\n (_, Some(_)) | (_, None) => unreachable!(),\n };\n\n match results {\n Ok(Some(m)) => {\n \/\/ There are some inconsistencies for outputs so some commands return new lines and\n \/\/ some don't\n println!(\"{}\", m.trim_right())\n }\n \/\/ nothing to print\n Ok(None) => {}\n Err(e) => {\n eprintln!(\"{} {}\", \"Error:\".red(), e);\n process::exit(1)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use crate::plot_data::PlotData;\nuse anyhow::{anyhow, Result};\nuse chrono::prelude::*;\nuse crossterm::event::{KeyEvent, KeyModifiers};\nuse crossterm::{\n event::{self, DisableMouseCapture, EnableMouseCapture, Event as CEvent, KeyCode},\n execute,\n terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},\n};\nuse dns_lookup::lookup_host;\nuse pinger::{ping, PingResult};\nuse std::io;\nuse std::io::Write;\nuse std::iter;\nuse std::net::IpAddr;\nuse std::ops::Add;\nuse std::process::{Command, Stdio};\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::mpsc::Sender;\nuse std::sync::{mpsc, Arc};\nuse std::thread;\nuse std::thread::JoinHandle;\nuse std::time::{Duration, Instant};\nuse structopt::StructOpt;\nuse tui::backend::CrosstermBackend;\nuse tui::layout::{Constraint, Direction, Layout};\nuse tui::style::{Color, Style};\nuse tui::text::Span;\nuse tui::widgets::{Axis, Block, Borders, Chart, Dataset};\nuse tui::Terminal;\nmod plot_data;\n\n#[derive(Debug, StructOpt)]\n#[structopt(name = \"gping\", about = \"Ping, but with a graph.\")]\nstruct Args {\n #[structopt(\n long,\n help = \"Graph the execution time for a list of commands rather than pinging hosts\"\n )]\n cmd: bool,\n #[structopt(\n short = \"n\",\n long,\n help = \"Watch interval seconds (provide partial seconds like '0.5')\",\n default_value = \"0.5\"\n )]\n watch_interval: f32,\n #[structopt(help = \"Hosts or IPs to ping, or commands to run if --cmd is provided.\")]\n hosts_or_commands: Vec<String>,\n #[structopt(\n short,\n long,\n default_value = \"30\",\n help = \"Determines the number of seconds to display in the graph.\"\n )]\n buffer: u64,\n \/\/\/ Resolve ping targets to IPv4 address\n #[structopt(short = \"4\", conflicts_with = \"ipv6\")]\n ipv4: bool,\n \/\/\/ Resolve ping targets to IPv6 address\n #[structopt(short = \"6\", conflicts_with = \"ipv4\")]\n ipv6: bool,\n}\n\nstruct App {\n data: Vec<PlotData>,\n display_interval: chrono::Duration,\n started: chrono::DateTime<Local>,\n}\n\nimpl App {\n fn new(data: Vec<PlotData>, buffer: u64) -> Self {\n App {\n data,\n display_interval: chrono::Duration::from_std(Duration::from_secs(buffer)).unwrap(),\n started: Local::now(),\n }\n }\n\n fn update(&mut self, host_idx: usize, item: Option<Duration>) {\n let host = &mut self.data[host_idx];\n host.update(item);\n }\n\n fn y_axis_bounds(&self) -> [f64; 2] {\n \/\/ Find the Y axis bounds for our chart.\n \/\/ This is trickier than the x-axis. We iterate through all our PlotData structs\n \/\/ and find the min\/max of all the values. Then we add a 10% buffer to them.\n let iter = self\n .data\n .iter()\n .map(|b| b.data.as_slice())\n .flatten()\n .map(|v| v.1);\n let min = iter.clone().fold(f64::INFINITY, |a, b| a.min(b));\n let max = iter.fold(0f64, |a, b| a.max(b));\n \/\/ Add a 10% buffer to the top and bottom\n let max_10_percent = (max * 10_f64) \/ 100_f64;\n let min_10_percent = (min * 10_f64) \/ 100_f64;\n [min - min_10_percent, max + max_10_percent]\n }\n\n fn x_axis_bounds(&self) -> [f64; 2] {\n let now = Local::now();\n let now_idx;\n let before_idx;\n if (now - self.started) < self.display_interval {\n now_idx = (self.started + self.display_interval).timestamp_millis() as f64 \/ 1_000f64;\n before_idx = self.started.timestamp_millis() as f64 \/ 1_000f64;\n } else {\n now_idx = now.timestamp_millis() as f64 \/ 1_000f64;\n let before = now - self.display_interval;\n before_idx = before.timestamp_millis() as f64 \/ 1_000f64;\n }\n\n [before_idx, now_idx]\n }\n\n fn x_axis_labels(&self, bounds: [f64; 2]) -> Vec<Span> {\n let lower = NaiveDateTime::from_timestamp(bounds[0] as i64, 0);\n let upper = NaiveDateTime::from_timestamp(bounds[1] as i64, 0);\n let diff = (upper - lower) \/ 2;\n let midpoint = lower + diff;\n return vec![\n Span::raw(format!(\"{:?}\", lower.time())),\n Span::raw(format!(\"{:?}\", midpoint.time())),\n Span::raw(format!(\"{:?}\", upper.time())),\n ];\n }\n\n fn y_axis_labels(&self, bounds: [f64; 2]) -> Vec<Span> {\n \/\/ Create 7 labels for our y axis, based on the y-axis bounds we computed above.\n let min = bounds[0];\n let max = bounds[1];\n\n let difference = max - min;\n let num_labels = 7;\n \/\/ Split difference into one chunk for each of the 7 labels\n let increment = Duration::from_micros((difference \/ num_labels as f64) as u64);\n let duration = Duration::from_micros(min as u64);\n\n (0..num_labels)\n .map(|i| Span::raw(format!(\"{:?}\", duration.add(increment * i))))\n .collect()\n }\n}\n\n#[derive(Debug)]\nenum Update {\n Result(Duration),\n Timeout,\n Unknown,\n}\n\nimpl From<PingResult> for Update {\n fn from(result: PingResult) -> Self {\n match result {\n PingResult::Pong(duration, _) => Update::Result(duration),\n PingResult::Timeout(_) => Update::Timeout,\n PingResult::Unknown(_) => Update::Unknown,\n }\n }\n}\n\n#[derive(Debug)]\nenum Event {\n Update(usize, Update),\n Input(KeyEvent),\n}\n\nfn start_cmd_thread(\n watch_cmd: &str,\n host_id: usize,\n watch_interval: f32,\n cmd_tx: Sender<Event>,\n kill_event: Arc<AtomicBool>,\n) -> JoinHandle<Result<()>> {\n let mut words = watch_cmd.split_ascii_whitespace();\n let cmd = words\n .next()\n .expect(\"Must specify a command to watch\")\n .to_string();\n let cmd_args = words\n .into_iter()\n .map(|w| w.to_string())\n .collect::<Vec<String>>();\n\n let interval = Duration::from_millis((watch_interval * 1000.0) as u64);\n\n \/\/ Pump cmd watches into the queue\n thread::spawn(move || -> Result<()> {\n while !kill_event.load(Ordering::Acquire) {\n let start = Instant::now();\n let mut child = Command::new(&cmd)\n .args(&cmd_args)\n .stderr(Stdio::null())\n .stdout(Stdio::null())\n .spawn()?;\n let status = child.wait()?;\n let duration = start.elapsed();\n let update = if status.success() {\n Update::Result(duration)\n } else {\n Update::Timeout\n };\n cmd_tx.send(Event::Update(host_id, update))?;\n thread::sleep(interval);\n }\n Ok(())\n })\n}\n\nfn start_ping_thread(\n host: String,\n host_id: usize,\n ping_tx: Sender<Event>,\n kill_event: Arc<AtomicBool>,\n) -> JoinHandle<Result<()>> {\n \/\/ Pump ping messages into the queue\n thread::spawn(move || -> Result<()> {\n let stream = ping(host)?;\n while !kill_event.load(Ordering::Acquire) {\n ping_tx.send(Event::Update(host_id, stream.recv()?.into()))?;\n }\n Ok(())\n })\n}\n\nfn get_host_ipaddr(host: &str, force_ipv4: bool, force_ipv6: bool) -> Result<String> {\n let ipaddr: Vec<IpAddr> = match lookup_host(host) {\n Ok(ip) => ip,\n Err(_) => return Err(anyhow!(\"Could not resolve hostname {}\", host)),\n };\n let ipaddr = if force_ipv4 {\n ipaddr\n .iter()\n .find(|ip| matches!(ip, IpAddr::V4(_)))\n .ok_or_else(|| anyhow!(\"Could not resolve '{}' to IPv4\", host))\n } else if force_ipv6 {\n ipaddr\n .iter()\n .find(|ip| matches!(ip, IpAddr::V6(_)))\n .ok_or_else(|| anyhow!(\"Could not resolve '{}' to IPv6\", host))\n } else {\n ipaddr\n .first()\n .ok_or_else(|| anyhow!(\"Could not resolve '{}' to IP\", host))\n };\n Ok(ipaddr?.to_string())\n}\n\nfn main() -> Result<()> {\n let args = Args::from_args();\n\n let mut data = vec![];\n\n for (idx, host_or_cmd) in args.hosts_or_commands.iter().enumerate() {\n let display = match args.cmd {\n true => host_or_cmd.to_string(),\n false => format!(\n \"{} ({})\",\n host_or_cmd,\n get_host_ipaddr(host_or_cmd, args.ipv4, args.ipv6)?\n ),\n };\n data.push(PlotData::new(\n display,\n args.buffer,\n Style::default().fg(Color::Indexed(idx as u8 + 1)),\n ));\n }\n\n let mut app = App::new(data, args.buffer);\n enable_raw_mode()?;\n let mut stdout = io::stdout();\n execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;\n let backend = CrosstermBackend::new(stdout);\n\n let mut terminal = Terminal::new(backend)?;\n\n terminal.clear()?;\n\n let (key_tx, rx) = mpsc::channel();\n\n let mut threads = vec![];\n\n let killed = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false));\n\n for (host_id, host_or_cmd) in args.hosts_or_commands.iter().cloned().enumerate() {\n if args.cmd {\n let cmd_thread = start_cmd_thread(\n &host_or_cmd,\n host_id,\n args.watch_interval,\n key_tx.clone(),\n std::sync::Arc::clone(&killed),\n );\n threads.push(cmd_thread);\n } else {\n threads.push(start_ping_thread(\n host_or_cmd,\n host_id,\n key_tx.clone(),\n std::sync::Arc::clone(&killed),\n ));\n }\n }\n\n \/\/ Pump keyboard messages into the queue\n let killed_thread = std::sync::Arc::clone(&killed);\n let key_thread = thread::spawn(move || -> Result<()> {\n while !killed_thread.load(Ordering::Acquire) {\n if event::poll(Duration::from_millis(100))? {\n if let CEvent::Key(key) = event::read()? {\n key_tx.send(Event::Input(key))?;\n }\n }\n }\n Ok(())\n });\n threads.push(key_thread);\n\n loop {\n match rx.recv()? {\n Event::Update(host_id, update) => {\n match update {\n Update::Result(duration) => app.update(host_id, Some(duration)),\n Update::Timeout => app.update(host_id, None),\n Update::Unknown => (),\n };\n terminal.draw(|f| {\n \/\/ Split our\n let chunks = Layout::default()\n .direction(Direction::Vertical)\n .margin(1)\n .constraints(\n iter::repeat(Constraint::Length(1))\n .take(app.data.len())\n .chain(iter::once(Constraint::Percentage(10)))\n .collect::<Vec<_>>()\n .as_ref(),\n )\n .split(f.size());\n\n let total_chunks = chunks.len();\n\n let header_chunks = chunks[0..total_chunks - 1].to_owned();\n let chart_chunk = chunks[total_chunks - 1].to_owned();\n\n for (plot_data, chunk) in app.data.iter().zip(header_chunks) {\n let header_layout = Layout::default()\n .direction(Direction::Horizontal)\n .constraints(\n [\n Constraint::Percentage(25),\n Constraint::Percentage(25),\n Constraint::Percentage(25),\n Constraint::Percentage(25),\n ]\n .as_ref(),\n )\n .split(chunk);\n\n for (area, paragraph) in\n header_layout.into_iter().zip(plot_data.header_stats())\n {\n f.render_widget(paragraph, area);\n }\n }\n\n let datasets: Vec<Dataset> = app.data.iter().map(|d| d.into()).collect();\n\n let y_axis_bounds = app.y_axis_bounds();\n let x_axis_bounds = app.x_axis_bounds();\n\n let chart = Chart::new(datasets)\n .block(Block::default().borders(Borders::NONE))\n .x_axis(\n Axis::default()\n .style(Style::default().fg(Color::Gray))\n .bounds(x_axis_bounds)\n .labels(app.x_axis_labels(x_axis_bounds)),\n )\n .y_axis(\n Axis::default()\n .style(Style::default().fg(Color::Gray))\n .bounds(y_axis_bounds)\n .labels(app.y_axis_labels(y_axis_bounds)),\n );\n\n f.render_widget(chart, chart_chunk)\n })?;\n }\n Event::Input(input) => match input.code {\n KeyCode::Char('q') | KeyCode::Esc => {\n killed.store(true, Ordering::Release);\n break;\n }\n KeyCode::Char('c') if input.modifiers == KeyModifiers::CONTROL => {\n killed.store(true, Ordering::Release);\n break;\n }\n _ => {}\n },\n }\n }\n\n for thread in threads {\n thread.join().unwrap()?;\n }\n\n disable_raw_mode()?;\n execute!(\n terminal.backend_mut(),\n LeaveAlternateScreen,\n DisableMouseCapture\n )?;\n terminal.show_cursor()?;\n\n Ok(())\n}\n<commit_msg>Update x_axis_labels<commit_after>use crate::plot_data::PlotData;\nuse anyhow::{anyhow, Result};\nuse chrono::prelude::*;\nuse crossterm::event::{KeyEvent, KeyModifiers};\nuse crossterm::{\n event::{self, DisableMouseCapture, EnableMouseCapture, Event as CEvent, KeyCode},\n execute,\n terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},\n};\nuse dns_lookup::lookup_host;\nuse pinger::{ping, PingResult};\nuse std::io;\nuse std::io::Write;\nuse std::iter;\nuse std::net::IpAddr;\nuse std::ops::Add;\nuse std::process::{Command, Stdio};\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::mpsc::Sender;\nuse std::sync::{mpsc, Arc};\nuse std::thread;\nuse std::thread::JoinHandle;\nuse std::time::{Duration, Instant};\nuse structopt::StructOpt;\nuse tui::backend::CrosstermBackend;\nuse tui::layout::{Constraint, Direction, Layout};\nuse tui::style::{Color, Style};\nuse tui::text::Span;\nuse tui::widgets::{Axis, Block, Borders, Chart, Dataset};\nuse tui::Terminal;\nmod plot_data;\n\n#[derive(Debug, StructOpt)]\n#[structopt(name = \"gping\", about = \"Ping, but with a graph.\")]\nstruct Args {\n #[structopt(\n long,\n help = \"Graph the execution time for a list of commands rather than pinging hosts\"\n )]\n cmd: bool,\n #[structopt(\n short = \"n\",\n long,\n help = \"Watch interval seconds (provide partial seconds like '0.5')\",\n default_value = \"0.5\"\n )]\n watch_interval: f32,\n #[structopt(help = \"Hosts or IPs to ping, or commands to run if --cmd is provided.\")]\n hosts_or_commands: Vec<String>,\n #[structopt(\n short,\n long,\n default_value = \"30\",\n help = \"Determines the number of seconds to display in the graph.\"\n )]\n buffer: u64,\n \/\/\/ Resolve ping targets to IPv4 address\n #[structopt(short = \"4\", conflicts_with = \"ipv6\")]\n ipv4: bool,\n \/\/\/ Resolve ping targets to IPv6 address\n #[structopt(short = \"6\", conflicts_with = \"ipv4\")]\n ipv6: bool,\n}\n\nstruct App {\n data: Vec<PlotData>,\n display_interval: chrono::Duration,\n started: chrono::DateTime<Local>,\n}\n\nimpl App {\n fn new(data: Vec<PlotData>, buffer: u64) -> Self {\n App {\n data,\n display_interval: chrono::Duration::from_std(Duration::from_secs(buffer)).unwrap(),\n started: Local::now(),\n }\n }\n\n fn update(&mut self, host_idx: usize, item: Option<Duration>) {\n let host = &mut self.data[host_idx];\n host.update(item);\n }\n\n fn y_axis_bounds(&self) -> [f64; 2] {\n \/\/ Find the Y axis bounds for our chart.\n \/\/ This is trickier than the x-axis. We iterate through all our PlotData structs\n \/\/ and find the min\/max of all the values. Then we add a 10% buffer to them.\n let iter = self\n .data\n .iter()\n .map(|b| b.data.as_slice())\n .flatten()\n .map(|v| v.1);\n let min = iter.clone().fold(f64::INFINITY, |a, b| a.min(b));\n let max = iter.fold(0f64, |a, b| a.max(b));\n \/\/ Add a 10% buffer to the top and bottom\n let max_10_percent = (max * 10_f64) \/ 100_f64;\n let min_10_percent = (min * 10_f64) \/ 100_f64;\n [min - min_10_percent, max + max_10_percent]\n }\n\n fn x_axis_bounds(&self) -> [f64; 2] {\n let now = Local::now();\n let now_idx;\n let before_idx;\n if (now - self.started) < self.display_interval {\n now_idx = (self.started + self.display_interval).timestamp_millis() as f64 \/ 1_000f64;\n before_idx = self.started.timestamp_millis() as f64 \/ 1_000f64;\n } else {\n now_idx = now.timestamp_millis() as f64 \/ 1_000f64;\n let before = now - self.display_interval;\n before_idx = before.timestamp_millis() as f64 \/ 1_000f64;\n }\n\n [before_idx, now_idx]\n }\n\n fn x_axis_labels(&self, bounds: [f64; 2]) -> Vec<Span> {\n let lower_utc = NaiveDateTime::from_timestamp(bounds[0] as i64, 0);\n let upper_utc = NaiveDateTime::from_timestamp(bounds[1] as i64, 0);\n let lower = Local::from_utc_datetime(&Local, &lower_utc);\n let upper = Local::from_utc_datetime(&Local, &upper_utc);\n let diff = (upper - lower) \/ 2;\n let midpoint = lower + diff;\n return vec![\n Span::raw(format!(\"{:?}\", lower.time())),\n Span::raw(format!(\"{:?}\", midpoint.time())),\n Span::raw(format!(\"{:?}\", upper.time())),\n ];\n }\n\n fn y_axis_labels(&self, bounds: [f64; 2]) -> Vec<Span> {\n \/\/ Create 7 labels for our y axis, based on the y-axis bounds we computed above.\n let min = bounds[0];\n let max = bounds[1];\n\n let difference = max - min;\n let num_labels = 7;\n \/\/ Split difference into one chunk for each of the 7 labels\n let increment = Duration::from_micros((difference \/ num_labels as f64) as u64);\n let duration = Duration::from_micros(min as u64);\n\n (0..num_labels)\n .map(|i| Span::raw(format!(\"{:?}\", duration.add(increment * i))))\n .collect()\n }\n}\n\n#[derive(Debug)]\nenum Update {\n Result(Duration),\n Timeout,\n Unknown,\n}\n\nimpl From<PingResult> for Update {\n fn from(result: PingResult) -> Self {\n match result {\n PingResult::Pong(duration, _) => Update::Result(duration),\n PingResult::Timeout(_) => Update::Timeout,\n PingResult::Unknown(_) => Update::Unknown,\n }\n }\n}\n\n#[derive(Debug)]\nenum Event {\n Update(usize, Update),\n Input(KeyEvent),\n}\n\nfn start_cmd_thread(\n watch_cmd: &str,\n host_id: usize,\n watch_interval: f32,\n cmd_tx: Sender<Event>,\n kill_event: Arc<AtomicBool>,\n) -> JoinHandle<Result<()>> {\n let mut words = watch_cmd.split_ascii_whitespace();\n let cmd = words\n .next()\n .expect(\"Must specify a command to watch\")\n .to_string();\n let cmd_args = words\n .into_iter()\n .map(|w| w.to_string())\n .collect::<Vec<String>>();\n\n let interval = Duration::from_millis((watch_interval * 1000.0) as u64);\n\n \/\/ Pump cmd watches into the queue\n thread::spawn(move || -> Result<()> {\n while !kill_event.load(Ordering::Acquire) {\n let start = Instant::now();\n let mut child = Command::new(&cmd)\n .args(&cmd_args)\n .stderr(Stdio::null())\n .stdout(Stdio::null())\n .spawn()?;\n let status = child.wait()?;\n let duration = start.elapsed();\n let update = if status.success() {\n Update::Result(duration)\n } else {\n Update::Timeout\n };\n cmd_tx.send(Event::Update(host_id, update))?;\n thread::sleep(interval);\n }\n Ok(())\n })\n}\n\nfn start_ping_thread(\n host: String,\n host_id: usize,\n ping_tx: Sender<Event>,\n kill_event: Arc<AtomicBool>,\n) -> JoinHandle<Result<()>> {\n \/\/ Pump ping messages into the queue\n thread::spawn(move || -> Result<()> {\n let stream = ping(host)?;\n while !kill_event.load(Ordering::Acquire) {\n ping_tx.send(Event::Update(host_id, stream.recv()?.into()))?;\n }\n Ok(())\n })\n}\n\nfn get_host_ipaddr(host: &str, force_ipv4: bool, force_ipv6: bool) -> Result<String> {\n let ipaddr: Vec<IpAddr> = match lookup_host(host) {\n Ok(ip) => ip,\n Err(_) => return Err(anyhow!(\"Could not resolve hostname {}\", host)),\n };\n let ipaddr = if force_ipv4 {\n ipaddr\n .iter()\n .find(|ip| matches!(ip, IpAddr::V4(_)))\n .ok_or_else(|| anyhow!(\"Could not resolve '{}' to IPv4\", host))\n } else if force_ipv6 {\n ipaddr\n .iter()\n .find(|ip| matches!(ip, IpAddr::V6(_)))\n .ok_or_else(|| anyhow!(\"Could not resolve '{}' to IPv6\", host))\n } else {\n ipaddr\n .first()\n .ok_or_else(|| anyhow!(\"Could not resolve '{}' to IP\", host))\n };\n Ok(ipaddr?.to_string())\n}\n\nfn main() -> Result<()> {\n let args = Args::from_args();\n\n let mut data = vec![];\n\n for (idx, host_or_cmd) in args.hosts_or_commands.iter().enumerate() {\n let display = match args.cmd {\n true => host_or_cmd.to_string(),\n false => format!(\n \"{} ({})\",\n host_or_cmd,\n get_host_ipaddr(host_or_cmd, args.ipv4, args.ipv6)?\n ),\n };\n data.push(PlotData::new(\n display,\n args.buffer,\n Style::default().fg(Color::Indexed(idx as u8 + 1)),\n ));\n }\n\n let mut app = App::new(data, args.buffer);\n enable_raw_mode()?;\n let mut stdout = io::stdout();\n execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;\n let backend = CrosstermBackend::new(stdout);\n\n let mut terminal = Terminal::new(backend)?;\n\n terminal.clear()?;\n\n let (key_tx, rx) = mpsc::channel();\n\n let mut threads = vec![];\n\n let killed = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false));\n\n for (host_id, host_or_cmd) in args.hosts_or_commands.iter().cloned().enumerate() {\n if args.cmd {\n let cmd_thread = start_cmd_thread(\n &host_or_cmd,\n host_id,\n args.watch_interval,\n key_tx.clone(),\n std::sync::Arc::clone(&killed),\n );\n threads.push(cmd_thread);\n } else {\n threads.push(start_ping_thread(\n host_or_cmd,\n host_id,\n key_tx.clone(),\n std::sync::Arc::clone(&killed),\n ));\n }\n }\n\n \/\/ Pump keyboard messages into the queue\n let killed_thread = std::sync::Arc::clone(&killed);\n let key_thread = thread::spawn(move || -> Result<()> {\n while !killed_thread.load(Ordering::Acquire) {\n if event::poll(Duration::from_millis(100))? {\n if let CEvent::Key(key) = event::read()? {\n key_tx.send(Event::Input(key))?;\n }\n }\n }\n Ok(())\n });\n threads.push(key_thread);\n\n loop {\n match rx.recv()? {\n Event::Update(host_id, update) => {\n match update {\n Update::Result(duration) => app.update(host_id, Some(duration)),\n Update::Timeout => app.update(host_id, None),\n Update::Unknown => (),\n };\n terminal.draw(|f| {\n \/\/ Split our\n let chunks = Layout::default()\n .direction(Direction::Vertical)\n .margin(1)\n .constraints(\n iter::repeat(Constraint::Length(1))\n .take(app.data.len())\n .chain(iter::once(Constraint::Percentage(10)))\n .collect::<Vec<_>>()\n .as_ref(),\n )\n .split(f.size());\n\n let total_chunks = chunks.len();\n\n let header_chunks = chunks[0..total_chunks - 1].to_owned();\n let chart_chunk = chunks[total_chunks - 1].to_owned();\n\n for (plot_data, chunk) in app.data.iter().zip(header_chunks) {\n let header_layout = Layout::default()\n .direction(Direction::Horizontal)\n .constraints(\n [\n Constraint::Percentage(25),\n Constraint::Percentage(25),\n Constraint::Percentage(25),\n Constraint::Percentage(25),\n ]\n .as_ref(),\n )\n .split(chunk);\n\n for (area, paragraph) in\n header_layout.into_iter().zip(plot_data.header_stats())\n {\n f.render_widget(paragraph, area);\n }\n }\n\n let datasets: Vec<Dataset> = app.data.iter().map(|d| d.into()).collect();\n\n let y_axis_bounds = app.y_axis_bounds();\n let x_axis_bounds = app.x_axis_bounds();\n\n let chart = Chart::new(datasets)\n .block(Block::default().borders(Borders::NONE))\n .x_axis(\n Axis::default()\n .style(Style::default().fg(Color::Gray))\n .bounds(x_axis_bounds)\n .labels(app.x_axis_labels(x_axis_bounds)),\n )\n .y_axis(\n Axis::default()\n .style(Style::default().fg(Color::Gray))\n .bounds(y_axis_bounds)\n .labels(app.y_axis_labels(y_axis_bounds)),\n );\n\n f.render_widget(chart, chart_chunk)\n })?;\n }\n Event::Input(input) => match input.code {\n KeyCode::Char('q') | KeyCode::Esc => {\n killed.store(true, Ordering::Release);\n break;\n }\n KeyCode::Char('c') if input.modifiers == KeyModifiers::CONTROL => {\n killed.store(true, Ordering::Release);\n break;\n }\n _ => {}\n },\n }\n }\n\n for thread in threads {\n thread.join().unwrap()?;\n }\n\n disable_raw_mode()?;\n execute!(\n terminal.backend_mut(),\n LeaveAlternateScreen,\n DisableMouseCapture\n )?;\n terminal.show_cursor()?;\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright (c) 2017-2018 Rene van der Meer\n\/\/\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a\n\/\/ copy of this software and associated documentation files (the \"Software\"),\n\/\/ to deal in the Software without restriction, including without limitation\n\/\/ the rights to use, copy, modify, merge, publish, distribute, sublicense,\n\/\/ and\/or sell copies of the Software, and to permit persons to whom the\n\/\/ Software is furnished to do so, subject to the following conditions:\n\/\/\n\/\/ The above copyright notice and this permission notice shall be included in\n\/\/ all copies or substantial portions of the Software.\n\/\/\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\/\/ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\/\/ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n\/\/ THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\/\/ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n\/\/ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\/\/ DEALINGS IN THE SOFTWARE.\n\nuse libc;\nuse std::ffi::CString;\nuse std::ptr;\n\n\/\/ Find user ID for specified user\npub fn user_to_uid(name: &str) -> Option<u32> {\n if let Ok(name_cstr) = CString::new(name) {\n unsafe {\n let buf = &mut [0 as libc::c_char; 4096];\n let mut res: *mut libc::passwd = ptr::null_mut();\n let mut pwd = libc::passwd {\n pw_name: ptr::null_mut(),\n pw_passwd: ptr::null_mut(),\n pw_uid: 0,\n pw_gid: 0,\n pw_gecos: ptr::null_mut(),\n pw_dir: ptr::null_mut(),\n pw_shell: ptr::null_mut(),\n };\n\n if libc::getpwnam_r(\n name_cstr.as_ptr(),\n &mut pwd,\n buf.as_mut_ptr(),\n buf.len(),\n &mut res,\n ) == 0\n && res as usize > 0\n {\n return Some((*res).pw_uid);\n }\n }\n }\n\n None\n}\n\n\/\/ Find group ID for specified group\npub fn group_to_gid(name: &str) -> Option<u32> {\n if let Ok(name_cstr) = CString::new(name) {\n unsafe {\n let buf = &mut [0 as libc::c_char; 4096];\n let mut res: *mut libc::group = ptr::null_mut();\n let mut grp = libc::group {\n gr_name: ptr::null_mut(),\n gr_passwd: ptr::null_mut(),\n gr_gid: 0,\n gr_mem: ptr::null_mut(),\n };\n\n if libc::getgrnam_r(\n name_cstr.as_ptr(),\n &mut grp,\n buf.as_mut_ptr(),\n buf.len(),\n &mut res,\n ) == 0\n && res as usize > 0\n {\n return Some((*res).gr_gid);\n }\n }\n }\n\n None\n}\n<commit_msg>Reduce unsafe scope<commit_after>\/\/ Copyright (c) 2017-2018 Rene van der Meer\n\/\/\n\/\/ Permission is hereby granted, free of charge, to any person obtaining a\n\/\/ copy of this software and associated documentation files (the \"Software\"),\n\/\/ to deal in the Software without restriction, including without limitation\n\/\/ the rights to use, copy, modify, merge, publish, distribute, sublicense,\n\/\/ and\/or sell copies of the Software, and to permit persons to whom the\n\/\/ Software is furnished to do so, subject to the following conditions:\n\/\/\n\/\/ The above copyright notice and this permission notice shall be included in\n\/\/ all copies or substantial portions of the Software.\n\/\/\n\/\/ THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\/\/ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\/\/ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n\/\/ THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\/\/ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n\/\/ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\/\/ DEALINGS IN THE SOFTWARE.\n\nuse libc;\nuse std::ffi::CString;\nuse std::ptr;\n\n\/\/ Find user ID for specified user\npub fn user_to_uid(name: &str) -> Option<u32> {\n if let Ok(name_cstr) = CString::new(name) {\n let buf = &mut [0 as libc::c_char; 4096];\n let mut res: *mut libc::passwd = ptr::null_mut();\n let mut pwd = libc::passwd {\n pw_name: ptr::null_mut(),\n pw_passwd: ptr::null_mut(),\n pw_uid: 0,\n pw_gid: 0,\n pw_gecos: ptr::null_mut(),\n pw_dir: ptr::null_mut(),\n pw_shell: ptr::null_mut(),\n };\n\n unsafe {\n if libc::getpwnam_r(\n name_cstr.as_ptr(),\n &mut pwd,\n buf.as_mut_ptr(),\n buf.len(),\n &mut res,\n ) == 0\n && res as usize > 0\n {\n return Some((*res).pw_uid);\n }\n }\n }\n\n None\n}\n\n\/\/ Find group ID for specified group\npub fn group_to_gid(name: &str) -> Option<u32> {\n if let Ok(name_cstr) = CString::new(name) {\n let buf = &mut [0 as libc::c_char; 4096];\n let mut res: *mut libc::group = ptr::null_mut();\n let mut grp = libc::group {\n gr_name: ptr::null_mut(),\n gr_passwd: ptr::null_mut(),\n gr_gid: 0,\n gr_mem: ptr::null_mut(),\n };\n\n unsafe {\n if libc::getgrnam_r(\n name_cstr.as_ptr(),\n &mut grp,\n buf.as_mut_ptr(),\n buf.len(),\n &mut res,\n ) == 0\n && res as usize > 0\n {\n return Some((*res).gr_gid);\n }\n }\n }\n\n None\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Yes.<commit_after>extern crate termion;\n\nuse termion::{clear, color, cursor};\n\nuse std::{time, thread};\n\nconst COMMUNISM: &'static str = r#\"\n !######### # \n !########! ##! \n !########! ### \n !########## #### \n ######### ##### ###### \n !###! !####! ###### \n ! ##### ######! \n !####! ####### \n ##### ####### \n !####! #######! \n ####!######## \n ## ########## \n ,######! !############# \n ,#### ########################!####! \n ,####' ##################!' ##### \n ,####' ####### !####! \n ####' #####\n ~## ##~\n\"#;\n\nfn main() {\n let mut state = 0;\n\n println!(\"\\n{}{}{}{}{}{}\", cursor::Hide, clear::All, cursor::Goto(1, 1), color::Fg(color::Black), color::Bg(color::Red), COMMUNISM);\n loop {\n println!(\"{}{} ☭ GAY ☭ SPACE ☭ COMMUNISM ☭ \", cursor::Goto(1, 1), color::Bg(color::AnsiValue(state)));\n println!(\"{}{} WILL PREVAIL, COMRADES! \", cursor::Goto(1, 20), color::Bg(color::AnsiValue(state)));\n\n state += 1;\n state %= 8;\n\n thread::sleep(time::Duration::from_millis(90));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test: imag-ids reports id after it was created<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/ Toml -> Ruby translation primitives\n\nuse ruru::AnyObject;\nuse toml::Value;\n\npub trait AsRuby : Sized {\n fn as_ruby(&self) -> AnyObject;\n}\n\npub trait IntoRuby : AsRuby {\n fn into_ruby(self) -> AnyObject {\n self.as_ruby()\n }\n}\nimpl<T: AsRuby> IntoRuby for T { }\n\nimpl AsRuby for Value {\n\n fn as_ruby(&self) -> AnyObject {\n unimplemented!()\n }\n\n}\n\n<commit_msg>Add Value::as_ruby() impl<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/ Toml -> Ruby translation primitives\n\nuse ruru::{Object, AnyObject, RString, Fixnum, Float, Boolean, Hash, Array};\nuse toml::Value;\n\npub trait AsRuby : Sized {\n fn as_ruby(&self) -> AnyObject;\n}\n\npub trait IntoRuby : AsRuby {\n fn into_ruby(self) -> AnyObject {\n self.as_ruby()\n }\n}\nimpl<T: AsRuby> IntoRuby for T { }\n\nimpl AsRuby for Value {\n\n fn as_ruby(&self) -> AnyObject {\n match *self {\n Value::String(ref s) => RString::new(&s).to_any_object(),\n Value::Integer(i) => Fixnum::new(i).to_any_object(),\n Value::Float(f) => Float::new(f).to_any_object(),\n Value::Boolean(b) => Boolean::new(b).to_any_object(),\n Value::Datetime(ref s) => RString::new(&s).to_any_object(),\n Value::Array(ref a) => {\n let mut arr = Array::new();\n for obj in a.into_iter().map(AsRuby::as_ruby) {\n arr.push(obj);\n }\n arr.to_any_object()\n },\n Value::Table(ref t) => {\n let mut h = Hash::new();\n for (k, v) in t.into_iter() {\n let key = RString::new(k).to_any_object();\n let v = v.as_ruby();\n h.store(key, v);\n }\n h.to_any_object()\n },\n }\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Rust version<commit_after>\/\/ ShortURL (https:\/\/github.com\/delight-im\/ShortURL)\n\/\/ Copyright (c) delight.im (https:\/\/www.delight.im\/), andra.xyz (http:\/\/andra.xyz\/)\n\/\/ Licensed under the MIT License (https:\/\/opensource.org\/licenses\/MIT)\n\n\/\/\/ # ShortURL\n\/\/\/ Bijective conversion between natural numbers (IDs) (`usize`) and short strings (`String`)\n\/\/\/\n\/\/\/ short_url::encode(usize) takes an ID and turns it into a short string\n\/\/\/ short_url::decode(String) takes a short string and turns it into an ID\n\/\/\/\n\/\/\/ ## Features\n\/\/\/ * large alphabet (51 chars) and thus very short resulting strings\n\/\/\/ * proof against offensive words (removed 'a', 'e', 'i', 'o' and 'u')\n\/\/\/ * unambiguous (removed 'I', 'l', '1', 'O' and '0')\n\/\/\/\n\/\/\/ ## Example\n\/\/\/ * 123456789 <=> pgK8p\n\nmod short_url {\n\n\tstatic ALPHABET: &str = \"23456789bcdfghjkmnpqrstvwxyzBCDFGHJKLMNPQRSTVWXYZ-_\";\n\tstatic BASE: usize = 51;\n\n\tpub fn encode(mut id: usize) -> String {\n\t\tlet mut string: String = format!(\"\");\n\t\twhile id > 0 {\n\t\t\tstring.push_str(&ALPHABET[(id % BASE)..(id % BASE + 1)]);\n\t\t\tid = id \/ BASE;\n\t\t}\n\t\tstring.chars().rev().collect()\n\t}\n\n\tpub fn decode(string: String) -> usize {\n\t\tlet mut number: usize = 0;\n\t\tfor c in string.chars() {\n\t\t\tnumber = number * BASE + ALPHABET.find(c).unwrap();\n\t\t}\n\t\tnumber\n\t}\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added isPalindrome for rust<commit_after>#[test]\nfn test_valid_palindrome() {\n assert!(is_palindrome(\"racecar\"));\n}\n\n#[test]\nfn test_invalid_palindrome() {\n assert!(!is_palindrome(\"doggo\"));\n}\n\npub fn is_palindrome(x: &str) -> bool {\n x.chars().eq(x.chars().rev())\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(reason = \"not public\", issue = \"0\", feature = \"fd\")]\n\nuse cmp;\nuse io::{self, Read};\nuse libc::{self, c_int, c_void, ssize_t};\nuse mem;\nuse sync::atomic::{AtomicBool, Ordering};\nuse sys::cvt;\nuse sys_common::AsInner;\n\n#[derive(Debug)]\npub struct FileDesc {\n fd: c_int,\n}\n\nfn max_len() -> usize {\n \/\/ The maximum read limit on most posix-like systems is `SSIZE_MAX`,\n \/\/ with the man page quoting that if the count of bytes to read is\n \/\/ greater than `SSIZE_MAX` the result is \"unspecified\".\n \/\/\n \/\/ On macOS, however, apparently the 64-bit libc is either buggy or\n \/\/ intentionally showing odd behavior by rejecting any read with a size\n \/\/ larger than or equal to INT_MAX. To handle both of these the read\n \/\/ size is capped on both platforms.\n if cfg!(target_os = \"macos\") {\n <c_int>::max_value() as usize - 1\n } else {\n <ssize_t>::max_value() as usize\n }\n}\n\nimpl FileDesc {\n pub fn new(fd: c_int) -> FileDesc {\n FileDesc { fd: fd }\n }\n\n pub fn raw(&self) -> c_int { self.fd }\n\n \/\/\/ Extracts the actual filedescriptor without closing it.\n pub fn into_raw(self) -> c_int {\n let fd = self.fd;\n mem::forget(self);\n fd\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n let ret = cvt(unsafe {\n libc::read(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n cmp::min(buf.len(), max_len()))\n })?;\n Ok(ret as usize)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec<u8>) -> io::Result<usize> {\n let mut me = self;\n (&mut me).read_to_end(buf)\n }\n\n pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result<usize> {\n #[cfg(target_os = \"android\")]\n use super::android::cvt_pread64;\n\n #[cfg(target_os = \"emscripten\")]\n unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64)\n -> io::Result<isize>\n {\n use convert::TryInto;\n use libc::pread64;\n \/\/ pread64 on emscripten actually takes a 32 bit offset\n if let Ok(o) = offset.try_into() {\n cvt(pread64(fd, buf, count, o))\n } else {\n Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot pread >2GB\"))\n }\n }\n\n #[cfg(not(any(target_os = \"android\", target_os = \"emscripten\")))]\n unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64)\n -> io::Result<isize>\n {\n #[cfg(target_os = \"linux\")]\n use libc::pread64;\n #[cfg(not(target_os = \"linux\"))]\n use libc::pread as pread64;\n cvt(pread64(fd, buf, count, offset))\n }\n\n unsafe {\n cvt_pread64(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n cmp::min(buf.len(), max_len()),\n offset as i64)\n .map(|n| n as usize)\n }\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result<usize> {\n let ret = cvt(unsafe {\n libc::write(self.fd,\n buf.as_ptr() as *const c_void,\n cmp::min(buf.len(), max_len()))\n })?;\n Ok(ret as usize)\n }\n\n pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result<usize> {\n #[cfg(target_os = \"android\")]\n use super::android::cvt_pwrite64;\n\n #[cfg(target_os = \"emscripten\")]\n unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64)\n -> io::Result<isize>\n {\n use convert::TryInto;\n use libc::pwrite64;\n \/\/ pwrite64 on emscripten actually takes a 32 bit offset\n if let Ok(o) = offset.try_into() {\n cvt(pwrite64(fd, buf, count, o))\n } else {\n Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot pwrite >2GB\"))\n }\n }\n\n #[cfg(not(any(target_os = \"android\", target_os = \"emscripten\")))]\n unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64)\n -> io::Result<isize>\n {\n #[cfg(target_os = \"linux\")]\n use libc::pwrite64;\n #[cfg(not(target_os = \"linux\"))]\n use libc::pwrite as pwrite64;\n cvt(pwrite64(fd, buf, count, offset))\n }\n\n unsafe {\n cvt_pwrite64(self.fd,\n buf.as_ptr() as *const c_void,\n cmp::min(buf.len(), max_len()),\n offset as i64)\n .map(|n| n as usize)\n }\n }\n\n #[cfg(target_os = \"linux\")]\n pub fn get_cloexec(&self) -> io::Result<bool> {\n unsafe {\n Ok((cvt(libc::fcntl(self.fd, libc::F_GETFD))? & libc::FD_CLOEXEC) != 0)\n }\n }\n\n #[cfg(not(any(target_env = \"newlib\",\n target_os = \"solaris\",\n target_os = \"emscripten\",\n target_os = \"fuchsia\",\n target_os = \"l4re\",\n target_os = \"haiku\")))]\n pub fn set_cloexec(&self) -> io::Result<()> {\n unsafe {\n cvt(libc::ioctl(self.fd, libc::FIOCLEX))?;\n Ok(())\n }\n }\n #[cfg(any(target_env = \"newlib\",\n target_os = \"solaris\",\n target_os = \"emscripten\",\n target_os = \"fuchsia\",\n target_os = \"l4re\",\n target_os = \"haiku\"))]\n pub fn set_cloexec(&self) -> io::Result<()> {\n unsafe {\n let previous = cvt(libc::fcntl(self.fd, libc::F_GETFD))?;\n let new = previous | libc::FD_CLOEXEC;\n if new != previous {\n cvt(libc::fcntl(self.fd, libc::F_SETFD, new))?;\n }\n Ok(())\n }\n }\n\n #[cfg(target_os = \"linux\")]\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n unsafe {\n let v = nonblocking as c_int;\n cvt(libc::ioctl(self.fd, libc::FIONBIO, &v))?;\n Ok(())\n }\n }\n\n #[cfg(not(target_os = \"linux\"))]\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n unsafe {\n let previous = cvt(libc::fcntl(self.fd, libc::F_GETFL))?;\n let new = if nonblocking {\n previous | libc::O_NONBLOCK\n } else {\n previous & !libc::O_NONBLOCK\n };\n if new != previous {\n cvt(libc::fcntl(self.fd, libc::F_SETFL, new))?;\n }\n Ok(())\n }\n }\n\n pub fn duplicate(&self) -> io::Result<FileDesc> {\n \/\/ We want to atomically duplicate this file descriptor and set the\n \/\/ CLOEXEC flag, and currently that's done via F_DUPFD_CLOEXEC. This\n \/\/ flag, however, isn't supported on older Linux kernels (earlier than\n \/\/ 2.6.24).\n \/\/\n \/\/ To detect this and ensure that CLOEXEC is still set, we\n \/\/ follow a strategy similar to musl [1] where if passing\n \/\/ F_DUPFD_CLOEXEC causes `fcntl` to return EINVAL it means it's not\n \/\/ supported (the third parameter, 0, is always valid), so we stop\n \/\/ trying that.\n \/\/\n \/\/ Also note that Android doesn't have F_DUPFD_CLOEXEC, but get it to\n \/\/ resolve so we at least compile this.\n \/\/\n \/\/ [1]: http:\/\/comments.gmane.org\/gmane.linux.lib.musl.general\/2963\n #[cfg(any(target_os = \"android\", target_os = \"haiku\"))]\n use libc::F_DUPFD as F_DUPFD_CLOEXEC;\n #[cfg(not(any(target_os = \"android\", target_os=\"haiku\")))]\n use libc::F_DUPFD_CLOEXEC;\n\n let make_filedesc = |fd| {\n let fd = FileDesc::new(fd);\n fd.set_cloexec()?;\n Ok(fd)\n };\n static TRY_CLOEXEC: AtomicBool =\n AtomicBool::new(!cfg!(target_os = \"android\"));\n let fd = self.raw();\n if TRY_CLOEXEC.load(Ordering::Relaxed) {\n match cvt(unsafe { libc::fcntl(fd, F_DUPFD_CLOEXEC, 0) }) {\n \/\/ We *still* call the `set_cloexec` method as apparently some\n \/\/ linux kernel at some point stopped setting CLOEXEC even\n \/\/ though it reported doing so on F_DUPFD_CLOEXEC.\n Ok(fd) => {\n return Ok(if cfg!(target_os = \"linux\") {\n make_filedesc(fd)?\n } else {\n FileDesc::new(fd)\n })\n }\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {\n TRY_CLOEXEC.store(false, Ordering::Relaxed);\n }\n Err(e) => return Err(e),\n }\n }\n cvt(unsafe { libc::fcntl(fd, libc::F_DUPFD, 0) }).and_then(make_filedesc)\n }\n}\n\nimpl<'a> Read for &'a FileDesc {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n}\n\nimpl AsInner<c_int> for FileDesc {\n fn as_inner(&self) -> &c_int { &self.fd }\n}\n\nimpl Drop for FileDesc {\n fn drop(&mut self) {\n \/\/ Note that errors are ignored when closing a file descriptor. The\n \/\/ reason for this is that if an error occurs we don't actually know if\n \/\/ the file descriptor was closed or not, and if we retried (for\n \/\/ something like EINTR), we might close another valid file descriptor\n \/\/ opened after we closed ours.\n let _ = unsafe { libc::close(self.fd) };\n }\n}\n<commit_msg>Rollup merge of #53981 - fbernier:patch-1, r=sfackler<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![unstable(reason = \"not public\", issue = \"0\", feature = \"fd\")]\n\nuse cmp;\nuse io::{self, Read, Initializer};\nuse libc::{self, c_int, c_void, ssize_t};\nuse mem;\nuse sync::atomic::{AtomicBool, Ordering};\nuse sys::cvt;\nuse sys_common::AsInner;\n\n#[derive(Debug)]\npub struct FileDesc {\n fd: c_int,\n}\n\nfn max_len() -> usize {\n \/\/ The maximum read limit on most posix-like systems is `SSIZE_MAX`,\n \/\/ with the man page quoting that if the count of bytes to read is\n \/\/ greater than `SSIZE_MAX` the result is \"unspecified\".\n \/\/\n \/\/ On macOS, however, apparently the 64-bit libc is either buggy or\n \/\/ intentionally showing odd behavior by rejecting any read with a size\n \/\/ larger than or equal to INT_MAX. To handle both of these the read\n \/\/ size is capped on both platforms.\n if cfg!(target_os = \"macos\") {\n <c_int>::max_value() as usize - 1\n } else {\n <ssize_t>::max_value() as usize\n }\n}\n\nimpl FileDesc {\n pub fn new(fd: c_int) -> FileDesc {\n FileDesc { fd: fd }\n }\n\n pub fn raw(&self) -> c_int { self.fd }\n\n \/\/\/ Extracts the actual filedescriptor without closing it.\n pub fn into_raw(self) -> c_int {\n let fd = self.fd;\n mem::forget(self);\n fd\n }\n\n pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {\n let ret = cvt(unsafe {\n libc::read(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n cmp::min(buf.len(), max_len()))\n })?;\n Ok(ret as usize)\n }\n\n pub fn read_to_end(&self, buf: &mut Vec<u8>) -> io::Result<usize> {\n let mut me = self;\n (&mut me).read_to_end(buf)\n }\n\n pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result<usize> {\n #[cfg(target_os = \"android\")]\n use super::android::cvt_pread64;\n\n #[cfg(target_os = \"emscripten\")]\n unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64)\n -> io::Result<isize>\n {\n use convert::TryInto;\n use libc::pread64;\n \/\/ pread64 on emscripten actually takes a 32 bit offset\n if let Ok(o) = offset.try_into() {\n cvt(pread64(fd, buf, count, o))\n } else {\n Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot pread >2GB\"))\n }\n }\n\n #[cfg(not(any(target_os = \"android\", target_os = \"emscripten\")))]\n unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64)\n -> io::Result<isize>\n {\n #[cfg(target_os = \"linux\")]\n use libc::pread64;\n #[cfg(not(target_os = \"linux\"))]\n use libc::pread as pread64;\n cvt(pread64(fd, buf, count, offset))\n }\n\n unsafe {\n cvt_pread64(self.fd,\n buf.as_mut_ptr() as *mut c_void,\n cmp::min(buf.len(), max_len()),\n offset as i64)\n .map(|n| n as usize)\n }\n }\n\n pub fn write(&self, buf: &[u8]) -> io::Result<usize> {\n let ret = cvt(unsafe {\n libc::write(self.fd,\n buf.as_ptr() as *const c_void,\n cmp::min(buf.len(), max_len()))\n })?;\n Ok(ret as usize)\n }\n\n pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result<usize> {\n #[cfg(target_os = \"android\")]\n use super::android::cvt_pwrite64;\n\n #[cfg(target_os = \"emscripten\")]\n unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64)\n -> io::Result<isize>\n {\n use convert::TryInto;\n use libc::pwrite64;\n \/\/ pwrite64 on emscripten actually takes a 32 bit offset\n if let Ok(o) = offset.try_into() {\n cvt(pwrite64(fd, buf, count, o))\n } else {\n Err(io::Error::new(io::ErrorKind::InvalidInput,\n \"cannot pwrite >2GB\"))\n }\n }\n\n #[cfg(not(any(target_os = \"android\", target_os = \"emscripten\")))]\n unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64)\n -> io::Result<isize>\n {\n #[cfg(target_os = \"linux\")]\n use libc::pwrite64;\n #[cfg(not(target_os = \"linux\"))]\n use libc::pwrite as pwrite64;\n cvt(pwrite64(fd, buf, count, offset))\n }\n\n unsafe {\n cvt_pwrite64(self.fd,\n buf.as_ptr() as *const c_void,\n cmp::min(buf.len(), max_len()),\n offset as i64)\n .map(|n| n as usize)\n }\n }\n\n #[cfg(target_os = \"linux\")]\n pub fn get_cloexec(&self) -> io::Result<bool> {\n unsafe {\n Ok((cvt(libc::fcntl(self.fd, libc::F_GETFD))? & libc::FD_CLOEXEC) != 0)\n }\n }\n\n #[cfg(not(any(target_env = \"newlib\",\n target_os = \"solaris\",\n target_os = \"emscripten\",\n target_os = \"fuchsia\",\n target_os = \"l4re\",\n target_os = \"haiku\")))]\n pub fn set_cloexec(&self) -> io::Result<()> {\n unsafe {\n cvt(libc::ioctl(self.fd, libc::FIOCLEX))?;\n Ok(())\n }\n }\n #[cfg(any(target_env = \"newlib\",\n target_os = \"solaris\",\n target_os = \"emscripten\",\n target_os = \"fuchsia\",\n target_os = \"l4re\",\n target_os = \"haiku\"))]\n pub fn set_cloexec(&self) -> io::Result<()> {\n unsafe {\n let previous = cvt(libc::fcntl(self.fd, libc::F_GETFD))?;\n let new = previous | libc::FD_CLOEXEC;\n if new != previous {\n cvt(libc::fcntl(self.fd, libc::F_SETFD, new))?;\n }\n Ok(())\n }\n }\n\n #[cfg(target_os = \"linux\")]\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n unsafe {\n let v = nonblocking as c_int;\n cvt(libc::ioctl(self.fd, libc::FIONBIO, &v))?;\n Ok(())\n }\n }\n\n #[cfg(not(target_os = \"linux\"))]\n pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> {\n unsafe {\n let previous = cvt(libc::fcntl(self.fd, libc::F_GETFL))?;\n let new = if nonblocking {\n previous | libc::O_NONBLOCK\n } else {\n previous & !libc::O_NONBLOCK\n };\n if new != previous {\n cvt(libc::fcntl(self.fd, libc::F_SETFL, new))?;\n }\n Ok(())\n }\n }\n\n pub fn duplicate(&self) -> io::Result<FileDesc> {\n \/\/ We want to atomically duplicate this file descriptor and set the\n \/\/ CLOEXEC flag, and currently that's done via F_DUPFD_CLOEXEC. This\n \/\/ flag, however, isn't supported on older Linux kernels (earlier than\n \/\/ 2.6.24).\n \/\/\n \/\/ To detect this and ensure that CLOEXEC is still set, we\n \/\/ follow a strategy similar to musl [1] where if passing\n \/\/ F_DUPFD_CLOEXEC causes `fcntl` to return EINVAL it means it's not\n \/\/ supported (the third parameter, 0, is always valid), so we stop\n \/\/ trying that.\n \/\/\n \/\/ Also note that Android doesn't have F_DUPFD_CLOEXEC, but get it to\n \/\/ resolve so we at least compile this.\n \/\/\n \/\/ [1]: http:\/\/comments.gmane.org\/gmane.linux.lib.musl.general\/2963\n #[cfg(any(target_os = \"android\", target_os = \"haiku\"))]\n use libc::F_DUPFD as F_DUPFD_CLOEXEC;\n #[cfg(not(any(target_os = \"android\", target_os=\"haiku\")))]\n use libc::F_DUPFD_CLOEXEC;\n\n let make_filedesc = |fd| {\n let fd = FileDesc::new(fd);\n fd.set_cloexec()?;\n Ok(fd)\n };\n static TRY_CLOEXEC: AtomicBool =\n AtomicBool::new(!cfg!(target_os = \"android\"));\n let fd = self.raw();\n if TRY_CLOEXEC.load(Ordering::Relaxed) {\n match cvt(unsafe { libc::fcntl(fd, F_DUPFD_CLOEXEC, 0) }) {\n \/\/ We *still* call the `set_cloexec` method as apparently some\n \/\/ linux kernel at some point stopped setting CLOEXEC even\n \/\/ though it reported doing so on F_DUPFD_CLOEXEC.\n Ok(fd) => {\n return Ok(if cfg!(target_os = \"linux\") {\n make_filedesc(fd)?\n } else {\n FileDesc::new(fd)\n })\n }\n Err(ref e) if e.raw_os_error() == Some(libc::EINVAL) => {\n TRY_CLOEXEC.store(false, Ordering::Relaxed);\n }\n Err(e) => return Err(e),\n }\n }\n cvt(unsafe { libc::fcntl(fd, libc::F_DUPFD, 0) }).and_then(make_filedesc)\n }\n}\n\nimpl<'a> Read for &'a FileDesc {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n (**self).read(buf)\n }\n\n #[inline]\n unsafe fn initializer(&self) -> Initializer {\n Initializer::nop()\n }\n}\n\nimpl AsInner<c_int> for FileDesc {\n fn as_inner(&self) -> &c_int { &self.fd }\n}\n\nimpl Drop for FileDesc {\n fn drop(&mut self) {\n \/\/ Note that errors are ignored when closing a file descriptor. The\n \/\/ reason for this is that if an error occurs we don't actually know if\n \/\/ the file descriptor was closed or not, and if we retried (for\n \/\/ something like EINTR), we might close another valid file descriptor\n \/\/ opened after we closed ours.\n let _ = unsafe { libc::close(self.fd) };\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>generate ioctl bindings for x86_64<commit_after>\/* automatically generated by rust-bindgen *\/\n\npub type __u32 = :: std :: os :: raw :: c_uint ; pub const _HIDIOCGRDESCSIZE : __u32 = 2147764225 ; pub const _HIDIOCGRDESC : __u32 = 2416199682 ;<|endoftext|>"} {"text":"<commit_before><commit_msg>Initial backend for `videoroom` example Rust application<commit_after>use crate::participant::ParticipantConnection;\nuse crate::room::RoomId;\nuse crate::rooms_registry::RoomsRegistry;\nuse actix_web::web::{Data, Payload, Query};\nuse actix_web::{web, App, Error, HttpRequest, HttpResponse, HttpServer};\nuse actix_web_actors::ws;\nuse mediasoup::prelude::*;\nuse serde::Deserialize;\nuse std::num::{NonZeroU32, NonZeroU8};\n\nmod room {\n use crate::participant::ParticipantId;\n use event_listener_primitives::{Bag, BagOnce, HandlerId};\n use mediasoup::prelude::*;\n use parking_lot::Mutex;\n use serde::{Deserialize, Serialize};\n use std::collections::HashMap;\n use std::fmt;\n use std::sync::{Arc, Weak};\n use uuid::Uuid;\n\n #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Deserialize, Serialize)]\n pub struct RoomId(Uuid);\n\n impl fmt::Display for RoomId {\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n fmt::Display::fmt(&self.0, f)\n }\n }\n\n impl RoomId {\n pub fn new() -> Self {\n Self(Uuid::new_v4())\n }\n }\n\n #[derive(Default)]\n struct Handlers {\n producer_add: Bag<Box<dyn Fn(&ParticipantId, &Producer) + Send + Sync>>,\n producer_remove: Bag<Box<dyn Fn(&ParticipantId, &ProducerId) + Send + Sync>>,\n close: BagOnce<Box<dyn FnOnce() + Send>>,\n }\n\n struct Inner {\n id: RoomId,\n handlers: Handlers,\n clients: Mutex<HashMap<ParticipantId, Vec<Producer>>>,\n }\n\n impl fmt::Debug for Inner {\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n f.debug_struct(\"Inner\")\n .field(\"id\", &self.id)\n .field(\"handlers\", &\"...\")\n .field(\"clients\", &self.clients)\n .finish()\n }\n }\n\n \/\/\/ Room holds producers of the participants such that other participants can consume audio and\n \/\/\/ video tracks of each other\n #[derive(Debug, Clone)]\n pub struct Room {\n inner: Arc<Inner>,\n }\n\n impl Drop for Room {\n fn drop(&mut self) {\n println!(\"Room {} closed\", self.inner.id);\n\n self.inner.handlers.close.call_simple();\n }\n }\n\n impl Room {\n \/\/\/ Create new `Room` with random `RoomId`\n pub fn new() -> Self {\n Self::new_with_id(RoomId::new())\n }\n\n \/\/\/ Create new `Room` with a specific `RoomId`\n pub fn new_with_id(id: RoomId) -> Room {\n println!(\"Room {} created\", id);\n\n Self {\n inner: Arc::new(Inner {\n id,\n handlers: Handlers::default(),\n clients: Mutex::default(),\n }),\n }\n }\n\n \/\/\/ ID of the room\n pub fn id(&self) -> RoomId {\n self.inner.id\n }\n\n \/\/\/ Add producer to the room, this will trigger notifications to other participants that\n \/\/\/ will be able to consume it\n pub fn add_producer(&self, participant_id: ParticipantId, producer: Producer) {\n self.inner\n .clients\n .lock()\n .entry(participant_id)\n .or_default()\n .push(producer.clone());\n\n self.inner.handlers.producer_add.call(|callback| {\n callback(&participant_id, &producer);\n });\n }\n\n \/\/\/ Remove participant and all of its associated producers\n pub fn remove_participant(&self, participant_id: &ParticipantId) {\n let producers = self.inner.clients.lock().remove(participant_id);\n\n for producer in producers.unwrap_or_default() {\n let producer_id = &producer.id();\n self.inner.handlers.producer_remove.call(|callback| {\n callback(participant_id, producer_id);\n });\n }\n }\n\n \/\/\/ Get all producers of all participants, useful when new participant connects and needs to\n \/\/\/ consume tracks of everyone who is already in the room\n pub fn get_all_producers(&self) -> Vec<(ParticipantId, ProducerId)> {\n self.inner\n .clients\n .lock()\n .iter()\n .map(|(participant_id, producers)| {\n let participant_id = *participant_id;\n producers\n .iter()\n .map(move |producer| (participant_id, producer.id()))\n })\n .flatten()\n .collect()\n }\n\n \/\/\/ Subscribe to notifications when new producer is added to the room\n pub fn on_producer_add<F: Fn(&ParticipantId, &Producer) + Send + Sync + 'static>(\n &self,\n callback: F,\n ) -> HandlerId {\n self.inner.handlers.producer_add.add(Box::new(callback))\n }\n\n \/\/\/ Subscribe to notifications when producer is removed from the room\n pub fn on_producer_remove<F: Fn(&ParticipantId, &ProducerId) + Send + Sync + 'static>(\n &self,\n callback: F,\n ) -> HandlerId {\n self.inner.handlers.producer_remove.add(Box::new(callback))\n }\n\n \/\/\/ Subscribe to notification when room is closed\n pub fn on_close<F: FnOnce() + Send + 'static>(&self, callback: F) -> HandlerId {\n self.inner.handlers.close.add(Box::new(callback))\n }\n\n \/\/\/ Get `WeakRoom` that can later be upgraded to `Room`, but will not prevent room from\n \/\/\/ being destroyed\n pub fn downgrade(&self) -> WeakRoom {\n WeakRoom {\n inner: Arc::downgrade(&self.inner),\n }\n }\n }\n\n \/\/\/ Similar to `Room`, but doesn't prevent room from being destroyed\n #[derive(Debug, Clone)]\n pub struct WeakRoom {\n inner: Weak<Inner>,\n }\n\n impl WeakRoom {\n \/\/\/ Upgrade `WeakRoom` to `Room`, may return `None` if underlying room was destroyed already\n pub fn upgrade(&self) -> Option<Room> {\n self.inner.upgrade().map(|inner| Room { inner })\n }\n }\n}\n\nmod rooms_registry {\n use crate::room::{Room, RoomId, WeakRoom};\n use parking_lot::Mutex;\n use std::collections::hash_map::Entry;\n use std::collections::HashMap;\n use std::sync::Arc;\n\n #[derive(Debug, Default, Clone)]\n pub struct RoomsRegistry {\n \/\/ We store `WeakRoom` instead of full `Room` to avoid cycles and to not prevent rooms from\n \/\/ being destroyed when last participant disconnects\n rooms: Arc<Mutex<HashMap<RoomId, WeakRoom>>>,\n }\n\n impl RoomsRegistry {\n \/\/\/ Retrieves existing room or creates a new one with specified `RoomId`\n pub fn get_or_create_room(&self, room_id: RoomId) -> Room {\n let mut rooms = self.rooms.lock();\n match rooms.entry(room_id) {\n Entry::Occupied(mut entry) => match entry.get().upgrade() {\n Some(room) => room,\n None => {\n let room = Room::new_with_id(room_id);\n entry.insert(room.downgrade());\n room.on_close({\n let room_id = room.id();\n let rooms = Arc::clone(&self.rooms);\n\n move || {\n rooms.lock().remove(&room_id);\n }\n })\n .detach();\n room\n }\n },\n Entry::Vacant(entry) => {\n let room = Room::new_with_id(room_id);\n entry.insert(room.downgrade());\n room.on_close({\n let room_id = room.id();\n let rooms = Arc::clone(&self.rooms);\n\n move || {\n rooms.lock().remove(&room_id);\n }\n })\n .detach();\n room\n }\n }\n }\n\n \/\/\/ Create new room with random `RoomId`\n pub fn create_room(&self) -> Room {\n let mut rooms = self.rooms.lock();\n let room = Room::new();\n rooms.insert(room.id(), room.downgrade());\n room.on_close({\n let room_id = room.id();\n let rooms = Arc::clone(&self.rooms);\n\n move || {\n rooms.lock().remove(&room_id);\n }\n })\n .detach();\n room\n }\n }\n}\n\nmod participant {\n use crate::participant::messages::{\n ClientMessage, InternalMessage, ServerMessage, TransportOptions,\n };\n use crate::room::Room;\n use actix::prelude::*;\n use actix_web_actors::ws;\n use event_listener_primitives::HandlerId;\n use mediasoup::prelude::*;\n use serde::{Deserialize, Serialize};\n use std::collections::HashMap;\n use std::fmt;\n use uuid::Uuid;\n\n mod messages {\n use crate::participant::ParticipantId;\n use crate::room::RoomId;\n use actix::prelude::*;\n use mediasoup::prelude::*;\n use serde::{Deserialize, Serialize};\n\n \/\/\/ Data structure containing all the necessary information about transport options required\n \/\/\/ from the server to establish transport connection on the client\n #[derive(Serialize)]\n #[serde(rename_all = \"camelCase\")]\n pub struct TransportOptions {\n pub id: TransportId,\n pub dtls_parameters: DtlsParameters,\n pub ice_candidates: Vec<IceCandidate>,\n pub ice_parameters: IceParameters,\n }\n\n \/\/\/ Server messages sent to the client\n #[derive(Serialize, Message)]\n #[serde(tag = \"action\")]\n #[rtype(result = \"()\")]\n pub enum ServerMessage {\n \/\/\/ Initialization message with consumer\/producer transport options and Router's RTP\n \/\/\/ capabilities necessary to establish WebRTC transport connection client-side\n #[serde(rename_all = \"camelCase\")]\n Init {\n room_id: RoomId,\n consumer_transport_options: TransportOptions,\n producer_transport_options: TransportOptions,\n router_rtp_capabilities: RtpCapabilitiesFinalized,\n },\n \/\/\/ Notification that new producer was added to the room\n #[serde(rename_all = \"camelCase\")]\n ProducerAdded {\n participant_id: ParticipantId,\n producer_id: ProducerId,\n },\n \/\/\/ Notification that producer was removed from the room\n #[serde(rename_all = \"camelCase\")]\n ProducerRemoved {\n participant_id: ParticipantId,\n producer_id: ProducerId,\n },\n \/\/\/ Notification that producer transport was connected successfully (in case of error\n \/\/\/ connection is just dropped, in real-world application you probably want to handle it\n \/\/\/ better)\n ConnectedProducerTransport,\n \/\/\/ Notification that producer was created on the server\n #[serde(rename_all = \"camelCase\")]\n Produced { id: ProducerId },\n \/\/\/ Notification that consumer transport was connected successfully (in case of error\n \/\/\/ connection is just dropped, in real-world application you probably want to handle it\n \/\/\/ better)\n ConnectedConsumerTransport,\n \/\/\/ Notification that consumer was successfully created server-side, client can resume\n \/\/\/ the consumer after this\n #[serde(rename_all = \"camelCase\")]\n Consumed {\n id: ConsumerId,\n producer_id: ProducerId,\n kind: MediaKind,\n rtp_parameters: RtpParameters,\n },\n }\n\n \/\/\/ Client messages sent to the server\n #[derive(Deserialize, Message)]\n #[serde(tag = \"action\")]\n #[rtype(result = \"()\")]\n pub enum ClientMessage {\n \/\/\/ Client-side initialization with its RTP capabilities, in this simple case we expect\n \/\/\/ those to match server Router's RTP capabilities\n #[serde(rename_all = \"camelCase\")]\n Init { rtp_capabilities: RtpCapabilities },\n \/\/\/ Request to connect producer transport with client-side DTLS parameters\n #[serde(rename_all = \"camelCase\")]\n ConnectProducerTransport { dtls_parameters: DtlsParameters },\n \/\/\/ Request to produce a new audio or video track with specified RTP parameters\n #[serde(rename_all = \"camelCase\")]\n Produce {\n kind: MediaKind,\n rtp_parameters: RtpParameters,\n },\n \/\/\/ Request to connect consumer transport with client-side DTLS parameters\n #[serde(rename_all = \"camelCase\")]\n ConnectConsumerTransport { dtls_parameters: DtlsParameters },\n \/\/\/ Request to consume specified producer\n #[serde(rename_all = \"camelCase\")]\n Consume { producer_id: ProducerId },\n \/\/\/ Request to resume consumer that was previously created\n #[serde(rename_all = \"camelCase\")]\n ConsumerResume { id: ConsumerId },\n }\n\n \/\/\/ Internal actor messages for convenience\n #[derive(Message)]\n #[rtype(result = \"()\")]\n pub enum InternalMessage {\n \/\/\/ Save producer in connection-specific hashmap to prevent it from being destroyed\n SaveProducer(Producer),\n \/\/\/ Save consumer in connection-specific hashmap to prevent it from being destroyed\n SaveConsumer(Consumer),\n \/\/\/ Stop\/close the WebSocket connection\n Stop,\n }\n }\n\n #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Deserialize, Serialize)]\n pub struct ParticipantId(Uuid);\n\n impl fmt::Display for ParticipantId {\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n fmt::Display::fmt(&self.0, f)\n }\n }\n\n impl ParticipantId {\n fn new() -> Self {\n Self(Uuid::new_v4())\n }\n }\n\n \/\/\/ Consumer\/producer transports pair for the client\n struct Transports {\n consumer: WebRtcTransport,\n producer: WebRtcTransport,\n }\n\n \/\/\/ Actor that will represent WebSocket connection from the client, it will handle inbound and\n \/\/\/ outbound WebSocket messages in JSON.\n \/\/\/\n \/\/\/ See https:\/\/actix.rs\/docs\/websockets\/ for official `actix-web` documentation.\n pub struct ParticipantConnection {\n id: ParticipantId,\n \/\/\/ RTP capabilities received from the client\n client_rtp_capabilities: Option<RtpCapabilities>,\n \/\/\/ Consumers associated with this client, preventing them from being destroyed\n consumers: HashMap<ConsumerId, Consumer>,\n \/\/\/ Producers associated with this client, preventing them from being destroyed\n producers: Vec<Producer>,\n \/\/\/ Producers associated with this client, useful to get its RTP capabilities later\n router: Router,\n \/\/\/ Consumer and producer transports associated with this client\n transports: Transports,\n \/\/\/ Room to which the client belongs\n room: Room,\n \/\/\/ Event handlers that were attached and need to be removed when participant connection is\n \/\/\/ destroyed\n attached_handlers: Vec<HandlerId>,\n }\n\n impl Drop for ParticipantConnection {\n fn drop(&mut self) {\n self.room.remove_participant(&self.id);\n }\n }\n\n impl ParticipantConnection {\n \/\/\/ Create a new instance representing WebSocket connection\n pub async fn new(worker_manager: &WorkerManager, room: Room) -> Result<Self, String> {\n let worker = worker_manager\n .create_worker(WorkerSettings::default())\n .await\n .map_err(|error| format!(\"Failed to create worker: {}\", error))?;\n let router = worker\n .create_router(RouterOptions::new(crate::media_codecs()))\n .await\n .map_err(|error| format!(\"Failed to create router: {}\", error))?;\n\n \/\/ We know that for videoroom example we'll need 2 transports, so we can create both\n \/\/ right away. This may not be the case for real-world applications or you may create\n \/\/ this at a different time and\/or in different order.\n let transport_options =\n WebRtcTransportOptions::new(TransportListenIps::new(TransportListenIp {\n ip: \"127.0.0.1\".parse().unwrap(),\n announced_ip: None,\n }));\n let producer_transport = router\n .create_webrtc_transport(transport_options.clone())\n .await\n .map_err(|error| format!(\"Failed to create producer transport: {}\", error))?;\n\n let consumer_transport = router\n .create_webrtc_transport(transport_options)\n .await\n .map_err(|error| format!(\"Failed to create consumer transport: {}\", error))?;\n\n Ok(Self {\n id: ParticipantId::new(),\n client_rtp_capabilities: None,\n consumers: HashMap::new(),\n producers: vec![],\n router,\n transports: Transports {\n consumer: consumer_transport,\n producer: producer_transport,\n },\n room,\n attached_handlers: Vec::new(),\n })\n }\n }\n\n impl Actor for ParticipantConnection {\n type Context = ws::WebsocketContext<Self>;\n\n fn started(&mut self, ctx: &mut Self::Context) {\n println!(\"[participant_id {}] WebSocket connection created\", self.id);\n\n \/\/ We know that both consumer and producer transports will be used, so we sent server\n \/\/ information about both in an initialization message alongside with router\n \/\/ capabilities to the client right after WebSocket connection is established\n let server_init_message = ServerMessage::Init {\n room_id: self.room.id(),\n consumer_transport_options: TransportOptions {\n id: self.transports.consumer.id(),\n dtls_parameters: self.transports.consumer.dtls_parameters(),\n ice_candidates: self.transports.consumer.ice_candidates().clone(),\n ice_parameters: self.transports.consumer.ice_parameters().clone(),\n },\n producer_transport_options: TransportOptions {\n id: self.transports.producer.id(),\n dtls_parameters: self.transports.producer.dtls_parameters(),\n ice_candidates: self.transports.producer.ice_candidates().clone(),\n ice_parameters: self.transports.producer.ice_parameters().clone(),\n },\n router_rtp_capabilities: self.router.rtp_capabilities().clone(),\n };\n\n let address = ctx.address();\n address.do_send(server_init_message);\n\n \/\/ Listen for new producers added to the room\n self.attached_handlers.push(self.room.on_producer_add({\n let own_participant_id = self.id;\n let address = address.clone();\n\n move |participant_id, producer| {\n if &own_participant_id == participant_id {\n return;\n }\n address.do_send(ServerMessage::ProducerAdded {\n participant_id: *participant_id,\n producer_id: producer.id(),\n });\n }\n }));\n\n \/\/ Listen for producers removed from the the room\n self.attached_handlers.push(self.room.on_producer_remove({\n let own_participant_id = self.id;\n let address = address.clone();\n\n move |participant_id, producer_id| {\n if &own_participant_id == participant_id {\n return;\n }\n address.do_send(ServerMessage::ProducerRemoved {\n participant_id: *participant_id,\n producer_id: *producer_id,\n });\n }\n }));\n\n \/\/ Notify client about any producers that already exist in the room\n for (participant_id, producer_id) in self.room.get_all_producers() {\n address.do_send(ServerMessage::ProducerAdded {\n participant_id,\n producer_id,\n });\n }\n }\n\n fn stopped(&mut self, _ctx: &mut Self::Context) {\n println!(\"[participant_id {}] WebSocket connection closed\", self.id);\n }\n }\n\n impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for ParticipantConnection {\n fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {\n \/\/ Here we handle incoming WebSocket messages, intentionally not handling continuation\n \/\/ messages since we know all messages will fit into a single frame, but in real-world\n \/\/ apps you need to handle continuation frames too (`ws::Message::Continuation`)\n match msg {\n Ok(ws::Message::Ping(msg)) => {\n ctx.pong(&msg);\n }\n Ok(ws::Message::Pong(_)) => {}\n Ok(ws::Message::Text(text)) => match serde_json::from_str::<ClientMessage>(&text) {\n Ok(message) => {\n \/\/ Parse JSON into an enum and just send it back to the actor to be\n \/\/ processed by another handler below, it is much more convenient to just\n \/\/ parse it in one place and have typed data structure everywhere else\n ctx.address().do_send(message);\n }\n Err(error) => {\n eprint!(\"Failed to parse client message: {}\\n{}\", error, text);\n }\n },\n Ok(ws::Message::Binary(bin)) => {\n eprint!(\"Unexpected binary message: {:?}\", bin);\n }\n Ok(ws::Message::Close(reason)) => {\n ctx.close(reason);\n ctx.stop();\n }\n _ => ctx.stop(),\n }\n }\n }\n\n impl Handler<ClientMessage> for ParticipantConnection {\n type Result = ();\n\n fn handle(&mut self, message: ClientMessage, ctx: &mut Self::Context) {\n match message {\n ClientMessage::Init { rtp_capabilities } => {\n \/\/ We need to know client's RTP capabilities, those are sent using\n \/\/ initialization message and are stored in connection struct for future use\n self.client_rtp_capabilities.replace(rtp_capabilities);\n }\n ClientMessage::ConnectProducerTransport { dtls_parameters } => {\n let participant_id = self.id;\n let address = ctx.address();\n let transport = self.transports.producer.clone();\n \/\/ Establish connection for producer transport using DTLS parameters received\n \/\/ from the client, but doing so in a background task since this handler is\n \/\/ synchronous\n actix::spawn(async move {\n match transport\n .connect(WebRtcTransportRemoteParameters { dtls_parameters })\n .await\n {\n Ok(_) => {\n address.do_send(ServerMessage::ConnectedProducerTransport);\n println!(\n \"[participant_id {}] Producer transport connected\",\n participant_id,\n );\n }\n Err(error) => {\n eprint!(\"Failed to connect producer transport: {}\", error);\n address.do_send(InternalMessage::Stop);\n }\n }\n });\n }\n ClientMessage::Produce {\n kind,\n rtp_parameters,\n } => {\n let participant_id = self.id;\n let address = ctx.address();\n let transport = self.transports.producer.clone();\n let room = self.room.clone();\n \/\/ Use producer transport to create a new producer on the server with given RTP\n \/\/ parameters\n actix::spawn(async move {\n match transport\n .produce(ProducerOptions::new(kind, rtp_parameters))\n .await\n {\n Ok(producer) => {\n let id = producer.id();\n address.do_send(ServerMessage::Produced { id });\n \/\/ Add producer to the room so that others can consume it\n room.add_producer(participant_id, producer.clone());\n \/\/ Producer is stored in a hashmap since if we don't do it, it will\n \/\/ get destroyed as soon as its instance goes out out scope\n address.do_send(InternalMessage::SaveProducer(producer));\n println!(\n \"[participant_id {}] {:?} producer created: {}\",\n participant_id, kind, id,\n );\n }\n Err(error) => {\n eprint!(\n \"[participant_id {}] Failed to create {:?} producer: {}\",\n participant_id, kind, error\n );\n address.do_send(InternalMessage::Stop);\n }\n }\n });\n }\n ClientMessage::ConnectConsumerTransport { dtls_parameters } => {\n let participant_id = self.id;\n let address = ctx.address();\n let transport = self.transports.consumer.clone();\n \/\/ The same as producer transport, but for consumer transport\n actix::spawn(async move {\n match transport\n .connect(WebRtcTransportRemoteParameters { dtls_parameters })\n .await\n {\n Ok(_) => {\n address.do_send(ServerMessage::ConnectedConsumerTransport);\n println!(\n \"[participant_id {}] Consumer transport connected\",\n participant_id,\n );\n }\n Err(error) => {\n eprint!(\n \"[participant_id {}] Failed to connect consumer transport: {}\",\n participant_id, error,\n );\n address.do_send(InternalMessage::Stop);\n }\n }\n });\n }\n ClientMessage::Consume { producer_id } => {\n let participant_id = self.id;\n let address = ctx.address();\n let transport = self.transports.consumer.clone();\n let rtp_capabilities = match self.client_rtp_capabilities.clone() {\n Some(rtp_capabilities) => rtp_capabilities,\n None => {\n eprintln!(\n \"[participant_id {}] Client should send RTP capabilities before \\\n consuming\",\n participant_id,\n );\n return;\n }\n };\n \/\/ Create consumer for given producer ID, while first making sure that RTP\n \/\/ capabilities were sent by the client prior to that\n actix::spawn(async move {\n let mut options = ConsumerOptions::new(producer_id, rtp_capabilities);\n options.paused = true;\n\n match transport.consume(options).await {\n Ok(consumer) => {\n let id = consumer.id();\n let kind = consumer.kind();\n let rtp_parameters = consumer.rtp_parameters().clone();\n address.do_send(ServerMessage::Consumed {\n id,\n producer_id,\n kind,\n rtp_parameters,\n });\n \/\/ Consumer is stored in a hashmap since if we don't do it, it will\n \/\/ get destroyed as soon as its instance goes out out scope\n address.do_send(InternalMessage::SaveConsumer(consumer));\n println!(\n \"[participant_id {}] {:?} consumer created: {}\",\n participant_id, kind, id,\n );\n }\n Err(error) => {\n eprint!(\n \"[participant_id {}] Failed to create consumer: {}\",\n participant_id, error,\n );\n address.do_send(InternalMessage::Stop);\n }\n }\n });\n }\n ClientMessage::ConsumerResume { id } => {\n if let Some(consumer) = self.consumers.get(&id).cloned() {\n let participant_id = self.id;\n actix::spawn(async move {\n match consumer.resume().await {\n Ok(_) => {\n println!(\n \"[participant_id {}] Successfully resumed {:?} consumer {}\",\n participant_id,\n consumer.kind(),\n consumer.id(),\n );\n }\n Err(error) => {\n println!(\n \"[participant_id {}] Failed to resume {:?} consumer {}: {}\",\n participant_id,\n consumer.kind(),\n consumer.id(),\n error,\n );\n }\n }\n });\n }\n }\n }\n }\n }\n\n \/\/\/ Simple handler that will transform typed server messages into JSON and send them over to the\n \/\/\/ client over WebSocket connection\n impl Handler<ServerMessage> for ParticipantConnection {\n type Result = ();\n\n fn handle(&mut self, message: ServerMessage, ctx: &mut Self::Context) {\n ctx.text(serde_json::to_string(&message).unwrap());\n }\n }\n\n \/\/\/ Convenience handler for internal messages, these actions require mutable access to the\n \/\/\/ connection struct and having such message handler makes it easy to use from background tasks\n \/\/\/ where otherwise Mutex would have to be used instead\n impl Handler<InternalMessage> for ParticipantConnection {\n type Result = ();\n\n fn handle(&mut self, message: InternalMessage, ctx: &mut Self::Context) {\n match message {\n InternalMessage::Stop => {\n ctx.stop();\n }\n InternalMessage::SaveProducer(producer) => {\n \/\/ Retain producer to prevent it from being destroyed\n self.producers.push(producer);\n }\n InternalMessage::SaveConsumer(consumer) => {\n self.consumers.insert(consumer.id(), consumer);\n }\n }\n }\n }\n}\n\n\/\/\/ List of codecs that SFU will accept from clients\nfn media_codecs() -> Vec<RtpCodecCapability> {\n vec![\n RtpCodecCapability::Audio {\n mime_type: MimeTypeAudio::Opus,\n preferred_payload_type: None,\n clock_rate: NonZeroU32::new(48000).unwrap(),\n channels: NonZeroU8::new(2).unwrap(),\n parameters: RtpCodecParametersParameters::from([(\"useinbandfec\", 1_u32.into())]),\n rtcp_feedback: vec![RtcpFeedback::TransportCc],\n },\n RtpCodecCapability::Video {\n mime_type: MimeTypeVideo::Vp8,\n preferred_payload_type: None,\n clock_rate: NonZeroU32::new(90000).unwrap(),\n parameters: RtpCodecParametersParameters::default(),\n rtcp_feedback: vec![\n RtcpFeedback::Nack,\n RtcpFeedback::NackPli,\n RtcpFeedback::CcmFir,\n RtcpFeedback::GoogRemb,\n RtcpFeedback::TransportCc,\n ],\n },\n ]\n}\n\n#[derive(Debug, Deserialize)]\n#[serde(rename_all = \"camelCase\")]\nstruct QueryParameters {\n room_id: Option<RoomId>,\n}\n\n\/\/\/ Function that receives HTTP request on WebSocket route and upgrades it to WebSocket connection.\n\/\/\/\n\/\/\/ See https:\/\/actix.rs\/docs\/websockets\/ for official `actix-web` documentation.\nasync fn ws_index(\n query_parameters: Query<QueryParameters>,\n request: HttpRequest,\n worker_manager: Data<WorkerManager>,\n rooms_registry: Data<RoomsRegistry>,\n stream: Payload,\n) -> Result<HttpResponse, Error> {\n let room = match query_parameters.room_id {\n Some(room_id) => rooms_registry.get_or_create_room(room_id),\n None => rooms_registry.create_room(),\n };\n\n match ParticipantConnection::new(&worker_manager, room).await {\n Ok(echo_server) => ws::start(echo_server, &request, stream),\n Err(error) => {\n eprintln!(\"{}\", error);\n\n Ok(HttpResponse::InternalServerError().finish())\n }\n }\n}\n\n#[actix_web::main]\nasync fn main() -> std::io::Result<()> {\n env_logger::init();\n\n \/\/ We will reuse the same worker manager across all connections, this is more than enough for\n \/\/ this use case\n let worker_manager = Data::new(WorkerManager::new());\n \/\/ Rooms registry will hold all the active rooms\n let rooms_registry = Data::new(RoomsRegistry::default());\n HttpServer::new(move || {\n App::new()\n .app_data(worker_manager.clone())\n .app_data(rooms_registry.clone())\n .route(\"\/ws\", web::get().to(ws_index))\n })\n \/\/ 2 threads is plenty for this example, default is to have as many threads as CPU cores\n .workers(2)\n .bind(\"127.0.0.1:3000\")?\n .run()\n .await\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Arithmetic expression parser and evaluator<commit_after>#[macro_use]\nextern crate nom;\n\nuse nom::{IResult,digit, multispace};\n\nuse std::str;\nuse std::str::FromStr;\n\nnamed!(parens<i64>, delimited!(\n delimited!(opt!(multispace), tag!(\"(\"), opt!(multispace)),\n expr,\n delimited!(opt!(multispace), tag!(\")\"), opt!(multispace))\n )\n);\n\nnamed!(factor<i64>,\n alt!(\n map_res!(\n map_res!(\n delimited!(opt!(multispace), digit, opt!(multispace)),\n str::from_utf8\n ),\n FromStr::from_str\n )\n | parens\n )\n);\n\nnamed!(term <i64>,\n chain!(\n mut acc: factor ~\n many0!(\n alt!(\n tap!(mul: preceded!(tag!(\"*\"), factor) => acc = acc * mul) |\n tap!(div: preceded!(tag!(\"\/\"), factor) => acc = acc \/ div)\n )\n ),\n || { return acc }\n )\n);\n\nnamed!(expr <i64>,\n chain!(\n mut acc: term ~\n many0!(\n alt!(\n tap!(add: preceded!(tag!(\"+\"), term) => acc = acc + add) |\n tap!(sub: preceded!(tag!(\"-\"), term) => acc = acc - sub)\n )\n ),\n || { return acc }\n )\n);\n\n#[test]\nfn factor_test() {\n assert_eq!(factor(&b\"3\"[..]), IResult::Done(&b\"\"[..], 3));\n assert_eq!(factor(&b\" 12\"[..]), IResult::Done(&b\"\"[..], 12));\n assert_eq!(factor(&b\"537 \"[..]), IResult::Done(&b\"\"[..], 537));\n assert_eq!(factor(&b\" 24 \"[..]), IResult::Done(&b\"\"[..], 24));\n}\n\n\n#[test]\nfn term_test() {\n assert_eq!(term(&b\" 12 *2 \/ 3\"[..]), IResult::Done(&b\"\"[..], 8));\n assert_eq!(term(&b\" 2* 3 *2 *2 \/ 3\"[..]), IResult::Done(&b\"\"[..], 8));\n assert_eq!(term(&b\" 48 \/ 3\/2\"[..]), IResult::Done(&b\"\"[..], 8));\n}\n\n#[test]\nfn expr_test() {\n assert_eq!(expr(&b\" 1 + 2 \"[..]), IResult::Done(&b\"\"[..], 3));\n assert_eq!(expr(&b\" 12 + 6 - 4+ 3\"[..]), IResult::Done(&b\"\"[..], 17));\n assert_eq!(expr(&b\" 1 + 2*3 + 4\"[..]), IResult::Done(&b\"\"[..], 11));\n}\n\n#[test]\nfn parens_test() {\n assert_eq!(expr(&b\" ( 2 )\"[..]), IResult::Done(&b\"\"[..], 2));\n assert_eq!(expr(&b\" 2* ( 3 + 4 ) \"[..]), IResult::Done(&b\"\"[..], 14));\n assert_eq!(expr(&b\" 2*2 \/ ( 5 - 1) + 3\"[..]), IResult::Done(&b\"\"[..], 4));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>update man page\/help message<commit_after><|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse std::path::Path;\nuse std::borrow::Borrow;\nuse std::ops::Deref;\n\nuse semver::Version;\nuse std::fmt::{Debug, Formatter};\nuse std::fmt::Error as FmtError;\nuse std::result::Result as RResult;\n\nuse error::{StoreError, StoreErrorKind};\nuse store::Result;\nuse store::Store;\n\n\/\/\/ The Index into the Store\n#[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)]\npub struct StoreId(PathBuf);\n\nimpl Into<PathBuf> for StoreId {\n\n fn into(self) -> PathBuf {\n self.0\n }\n\n}\n\nimpl Deref for StoreId {\n type Target = PathBuf;\n\n fn deref(&self) -> &PathBuf {\n &self.0\n }\n\n}\n\nimpl From<PathBuf> for StoreId {\n\n fn from(pb: PathBuf) -> StoreId {\n StoreId(pb)\n }\n\n}\n\nimpl From<String> for StoreId {\n\n fn from(string: String) -> StoreId {\n StoreId(string.into())\n }\n\n}\n\nimpl AsRef<Path> for StoreId {\n\n fn as_ref(&self) -> &Path {\n self.0.as_ref()\n }\n\n}\n\nimpl Borrow<Path> for StoreId {\n\n fn borrow(&self) -> &Path {\n self.0.borrow()\n }\n\n}\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\npub trait IntoStoreId {\n fn into_storeid(self) -> StoreId;\n}\n\nimpl IntoStoreId for PathBuf {\n fn into_storeid(self) -> StoreId {\n StoreId(self)\n }\n}\n\nimpl IntoStoreId for StoreId {\n fn into_storeid(self) -> StoreId {\n self\n }\n}\n\npub fn build_entry_path(store: &Store, path_elem: &str) -> Result<PathBuf> {\n debug!(\"Checking path element for version\");\n if path_elem.split('~').last().map_or(false, |v| Version::parse(v).is_err()) {\n debug!(\"Version cannot be parsed from {:?}\", path_elem);\n debug!(\"Path does not contain version!\");\n return Err(StoreError::new(StoreErrorKind::StorePathLacksVersion, None));\n }\n debug!(\"Version checking succeeded\");\n\n debug!(\"Building path from {:?}\", path_elem);\n let mut path = store.path().clone();\n\n if path_elem.starts_with('\/') {\n path.push(&path_elem[1..]);\n } else {\n path.push(path_elem);\n }\n\n Ok(path)\n}\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n \/\/\/ A helper module to create valid module entry paths\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n use $crate::storeid::StoreId;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(self) -> $crate::storeid::StoreId {\n StoreId::from(self.0)\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n iter: Box<Iterator<Item = StoreId>>,\n}\n\nimpl Debug for StoreIdIterator {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"StoreIdIterator\")\n }\n\n}\n\nimpl StoreIdIterator {\n\n pub fn new(iter: Box<Iterator<Item = StoreId>>) -> StoreIdIterator {\n StoreIdIterator {\n iter: iter,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.iter.next()\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(), \"test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<commit_msg>Shorten type names in import<commit_after>use std::path::PathBuf;\nuse std::path::Path;\nuse std::borrow::Borrow;\nuse std::ops::Deref;\n\nuse semver::Version;\nuse std::fmt::{Debug, Formatter};\nuse std::fmt::Error as FmtError;\nuse std::result::Result as RResult;\n\nuse error::{StoreError as SE, StoreErrorKind as SEK};\nuse store::Result;\nuse store::Store;\n\n\/\/\/ The Index into the Store\n#[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)]\npub struct StoreId(PathBuf);\n\nimpl Into<PathBuf> for StoreId {\n\n fn into(self) -> PathBuf {\n self.0\n }\n\n}\n\nimpl Deref for StoreId {\n type Target = PathBuf;\n\n fn deref(&self) -> &PathBuf {\n &self.0\n }\n\n}\n\nimpl From<PathBuf> for StoreId {\n\n fn from(pb: PathBuf) -> StoreId {\n StoreId(pb)\n }\n\n}\n\nimpl From<String> for StoreId {\n\n fn from(string: String) -> StoreId {\n StoreId(string.into())\n }\n\n}\n\nimpl AsRef<Path> for StoreId {\n\n fn as_ref(&self) -> &Path {\n self.0.as_ref()\n }\n\n}\n\nimpl Borrow<Path> for StoreId {\n\n fn borrow(&self) -> &Path {\n self.0.borrow()\n }\n\n}\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\npub trait IntoStoreId {\n fn into_storeid(self) -> StoreId;\n}\n\nimpl IntoStoreId for PathBuf {\n fn into_storeid(self) -> StoreId {\n StoreId(self)\n }\n}\n\nimpl IntoStoreId for StoreId {\n fn into_storeid(self) -> StoreId {\n self\n }\n}\n\npub fn build_entry_path(store: &Store, path_elem: &str) -> Result<PathBuf> {\n debug!(\"Checking path element for version\");\n if path_elem.split('~').last().map_or(false, |v| Version::parse(v).is_err()) {\n debug!(\"Version cannot be parsed from {:?}\", path_elem);\n debug!(\"Path does not contain version!\");\n return Err(SE::new(SEK::StorePathLacksVersion, None));\n }\n debug!(\"Version checking succeeded\");\n\n debug!(\"Building path from {:?}\", path_elem);\n let mut path = store.path().clone();\n\n if path_elem.starts_with('\/') {\n path.push(&path_elem[1..]);\n } else {\n path.push(path_elem);\n }\n\n Ok(path)\n}\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n \/\/\/ A helper module to create valid module entry paths\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n use $crate::storeid::StoreId;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(self) -> $crate::storeid::StoreId {\n StoreId::from(self.0)\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n iter: Box<Iterator<Item = StoreId>>,\n}\n\nimpl Debug for StoreIdIterator {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"StoreIdIterator\")\n }\n\n}\n\nimpl StoreIdIterator {\n\n pub fn new(iter: Box<Iterator<Item = StoreId>>) -> StoreIdIterator {\n StoreIdIterator {\n iter: iter,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.iter.next()\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(), \"test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix(ignore): fix windows issue with line endings<commit_after><|endoftext|>"} {"text":"<commit_before>#![feature(rustc_private)]\n#![feature(once_cell)]\n#![cfg_attr(feature = \"deny-warnings\", deny(warnings))]\n\/\/ warn on lints, that are included in `rust-lang\/rust`s bootstrap\n#![warn(rust_2018_idioms, unused_lifetimes)]\n\/\/ warn on rustc internal lints\n#![deny(rustc::internal)]\n\n\/\/ FIXME: switch to something more ergonomic here, once available.\n\/\/ (Currently there is no way to opt into sysroot crates without `extern crate`.)\nextern crate rustc_driver;\nextern crate rustc_errors;\nextern crate rustc_interface;\nextern crate rustc_middle;\n\nuse rustc_interface::interface;\nuse rustc_middle::ty::TyCtxt;\nuse rustc_tools_util::VersionInfo;\n\nuse std::borrow::Cow;\nuse std::env;\nuse std::lazy::SyncLazy;\nuse std::ops::Deref;\nuse std::panic;\nuse std::path::{Path, PathBuf};\nuse std::process::{exit, Command};\n\n\/\/\/ If a command-line option matches `find_arg`, then apply the predicate `pred` on its value. If\n\/\/\/ true, then return it. The parameter is assumed to be either `--arg=value` or `--arg value`.\nfn arg_value<'a, T: Deref<Target = str>>(\n args: &'a [T],\n find_arg: &str,\n pred: impl Fn(&str) -> bool,\n) -> Option<&'a str> {\n let mut args = args.iter().map(Deref::deref);\n while let Some(arg) = args.next() {\n let mut arg = arg.splitn(2, '=');\n if arg.next() != Some(find_arg) {\n continue;\n }\n\n match arg.next().or_else(|| args.next()) {\n Some(v) if pred(v) => return Some(v),\n _ => {},\n }\n }\n None\n}\n\n#[test]\nfn test_arg_value() {\n let args = &[\"--bar=bar\", \"--foobar\", \"123\", \"--foo\"];\n\n assert_eq!(arg_value(&[] as &[&str], \"--foobar\", |_| true), None);\n assert_eq!(arg_value(args, \"--bar\", |_| false), None);\n assert_eq!(arg_value(args, \"--bar\", |_| true), Some(\"bar\"));\n assert_eq!(arg_value(args, \"--bar\", |p| p == \"bar\"), Some(\"bar\"));\n assert_eq!(arg_value(args, \"--bar\", |p| p == \"foo\"), None);\n assert_eq!(arg_value(args, \"--foobar\", |p| p == \"foo\"), None);\n assert_eq!(arg_value(args, \"--foobar\", |p| p == \"123\"), Some(\"123\"));\n assert_eq!(arg_value(args, \"--foo\", |_| true), None);\n}\n\nstruct DefaultCallbacks;\nimpl rustc_driver::Callbacks for DefaultCallbacks {}\n\nstruct ClippyCallbacks;\nimpl rustc_driver::Callbacks for ClippyCallbacks {\n fn config(&mut self, config: &mut interface::Config) {\n let previous = config.register_lints.take();\n config.register_lints = Some(Box::new(move |sess, mut lint_store| {\n \/\/ technically we're ~guaranteed that this is none but might as well call anything that\n \/\/ is there already. Certainly it can't hurt.\n if let Some(previous) = &previous {\n (previous)(sess, lint_store);\n }\n\n let conf = clippy_lints::read_conf(&[], &sess);\n clippy_lints::register_plugins(&mut lint_store, &sess, &conf);\n clippy_lints::register_pre_expansion_lints(&mut lint_store);\n clippy_lints::register_renamed(&mut lint_store);\n }));\n\n \/\/ FIXME: #4825; This is required, because Clippy lints that are based on MIR have to be\n \/\/ run on the unoptimized MIR. On the other hand this results in some false negatives. If\n \/\/ MIR passes can be enabled \/ disabled separately, we should figure out, what passes to\n \/\/ use for Clippy.\n config.opts.debugging_opts.mir_opt_level = 0;\n }\n}\n\nfn display_help() {\n println!(\n \"\\\nChecks a package to catch common mistakes and improve your Rust code.\n\nUsage:\n cargo clippy [options] [--] [<opts>...]\n\nCommon options:\n -h, --help Print this message\n --rustc Pass all args to rustc\n -V, --version Print version info and exit\n\nOther options are the same as `cargo check`.\n\nTo allow or deny a lint from the command line you can use `cargo clippy --`\nwith:\n\n -W --warn OPT Set lint warnings\n -A --allow OPT Set lint allowed\n -D --deny OPT Set lint denied\n -F --forbid OPT Set lint forbidden\n\nYou can use tool lints to allow or deny lints from your code, eg.:\n\n #[allow(clippy::needless_lifetimes)]\n\"\n );\n}\n\nconst BUG_REPORT_URL: &str = \"https:\/\/github.com\/rust-lang\/rust-clippy\/issues\/new\";\n\nstatic ICE_HOOK: SyncLazy<Box<dyn Fn(&panic::PanicInfo<'_>) + Sync + Send + 'static>> = SyncLazy::new(|| {\n let hook = panic::take_hook();\n panic::set_hook(Box::new(|info| report_clippy_ice(info, BUG_REPORT_URL)));\n hook\n});\n\nfn report_clippy_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {\n \/\/ Invoke our ICE handler, which prints the actual panic message and optionally a backtrace\n (*ICE_HOOK)(info);\n\n \/\/ Separate the output with an empty line\n eprintln!();\n\n let emitter = Box::new(rustc_errors::emitter::EmitterWriter::stderr(\n rustc_errors::ColorConfig::Auto,\n None,\n false,\n false,\n None,\n false,\n ));\n let handler = rustc_errors::Handler::with_emitter(true, None, emitter);\n\n \/\/ a .span_bug or .bug call has already printed what\n \/\/ it wants to print.\n if !info.payload().is::<rustc_errors::ExplicitBug>() {\n let d = rustc_errors::Diagnostic::new(rustc_errors::Level::Bug, \"unexpected panic\");\n handler.emit_diagnostic(&d);\n }\n\n let version_info = rustc_tools_util::get_version_info!();\n\n let xs: Vec<Cow<'static, str>> = vec![\n \"the compiler unexpectedly panicked. this is a bug.\".into(),\n format!(\"we would appreciate a bug report: {}\", bug_report_url).into(),\n format!(\"Clippy version: {}\", version_info).into(),\n ];\n\n for note in &xs {\n handler.note_without_error(¬e);\n }\n\n \/\/ If backtraces are enabled, also print the query stack\n let backtrace = env::var_os(\"RUST_BACKTRACE\").map_or(false, |x| &x != \"0\");\n\n let num_frames = if backtrace { None } else { Some(2) };\n\n TyCtxt::try_print_query_stack(&handler, num_frames);\n}\n\nfn toolchain_path(home: Option<String>, toolchain: Option<String>) -> Option<PathBuf> {\n home.and_then(|home| {\n toolchain.map(|toolchain| {\n let mut path = PathBuf::from(home);\n path.push(\"toolchains\");\n path.push(toolchain);\n path\n })\n })\n}\n\npub fn main() {\n rustc_driver::init_rustc_env_logger();\n SyncLazy::force(&ICE_HOOK);\n exit(rustc_driver::catch_with_exit_code(move || {\n let mut orig_args: Vec<String> = env::args().collect();\n\n \/\/ Get the sysroot, looking from most specific to this invocation to the least:\n \/\/ - command line\n \/\/ - runtime environment\n \/\/ - SYSROOT\n \/\/ - RUSTUP_HOME, MULTIRUST_HOME, RUSTUP_TOOLCHAIN, MULTIRUST_TOOLCHAIN\n \/\/ - sysroot from rustc in the path\n \/\/ - compile-time environment\n \/\/ - SYSROOT\n \/\/ - RUSTUP_HOME, MULTIRUST_HOME, RUSTUP_TOOLCHAIN, MULTIRUST_TOOLCHAIN\n let sys_root_arg = arg_value(&orig_args, \"--sysroot\", |_| true);\n let have_sys_root_arg = sys_root_arg.is_some();\n let sys_root = sys_root_arg\n .map(PathBuf::from)\n .or_else(|| std::env::var(\"SYSROOT\").ok().map(PathBuf::from))\n .or_else(|| {\n let home = std::env::var(\"RUSTUP_HOME\")\n .or_else(|_| std::env::var(\"MULTIRUST_HOME\"))\n .ok();\n let toolchain = std::env::var(\"RUSTUP_TOOLCHAIN\")\n .or_else(|_| std::env::var(\"MULTIRUST_TOOLCHAIN\"))\n .ok();\n toolchain_path(home, toolchain)\n })\n .or_else(|| {\n Command::new(\"rustc\")\n .arg(\"--print\")\n .arg(\"sysroot\")\n .output()\n .ok()\n .and_then(|out| String::from_utf8(out.stdout).ok())\n .map(|s| PathBuf::from(s.trim()))\n })\n .or_else(|| option_env!(\"SYSROOT\").map(PathBuf::from))\n .or_else(|| {\n let home = option_env!(\"RUSTUP_HOME\")\n .or(option_env!(\"MULTIRUST_HOME\"))\n .map(ToString::to_string);\n let toolchain = option_env!(\"RUSTUP_TOOLCHAIN\")\n .or(option_env!(\"MULTIRUST_TOOLCHAIN\"))\n .map(ToString::to_string);\n toolchain_path(home, toolchain)\n })\n .map(|pb| pb.to_string_lossy().to_string())\n .expect(\"need to specify SYSROOT env var during clippy compilation, or use rustup or multirust\");\n\n \/\/ make \"clippy-driver --rustc\" work like a subcommand that passes further args to \"rustc\"\n \/\/ for example `clippy-driver --rustc --version` will print the rustc version that clippy-driver\n \/\/ uses\n if let Some(pos) = orig_args.iter().position(|arg| arg == \"--rustc\") {\n orig_args.remove(pos);\n orig_args[0] = \"rustc\".to_string();\n\n \/\/ if we call \"rustc\", we need to pass --sysroot here as well\n let mut args: Vec<String> = orig_args.clone();\n if !have_sys_root_arg {\n args.extend(vec![\"--sysroot\".into(), sys_root]);\n };\n\n return rustc_driver::RunCompiler::new(&args, &mut DefaultCallbacks).run();\n }\n\n if orig_args.iter().any(|a| a == \"--version\" || a == \"-V\") {\n let version_info = rustc_tools_util::get_version_info!();\n println!(\"{}\", version_info);\n exit(0);\n }\n\n \/\/ Setting RUSTC_WRAPPER causes Cargo to pass 'rustc' as the first argument.\n \/\/ We're invoking the compiler programmatically, so we ignore this\/\n let wrapper_mode = orig_args.get(1).map(Path::new).and_then(Path::file_stem) == Some(\"rustc\".as_ref());\n\n if wrapper_mode {\n \/\/ we still want to be able to invoke it normally though\n orig_args.remove(1);\n }\n\n if !wrapper_mode && (orig_args.iter().any(|a| a == \"--help\" || a == \"-h\") || orig_args.len() == 1) {\n display_help();\n exit(0);\n }\n\n \/\/ this conditional check for the --sysroot flag is there so users can call\n \/\/ `clippy_driver` directly\n \/\/ without having to pass --sysroot or anything\n let mut args: Vec<String> = orig_args.clone();\n if !have_sys_root_arg {\n args.extend(vec![\"--sysroot\".into(), sys_root]);\n };\n\n let mut no_deps = false;\n let clippy_args = env::var(\"CLIPPY_ARGS\")\n .unwrap_or_default()\n .split(\"__CLIPPY_HACKERY__\")\n .filter_map(|s| match s {\n \"\" => None,\n \"--no-deps\" => {\n no_deps = true;\n None\n },\n _ => Some(s.to_string()),\n })\n .chain(vec![\"--cfg\".into(), r#\"feature=\"cargo-clippy\"\"#.into()])\n .collect::<Vec<String>>();\n\n \/\/ this check ensures that dependencies are built but not linted and the final\n \/\/ crate is linted but not built\n let clippy_disabled = env::var(\"CLIPPY_TESTS\").map_or(false, |val| val != \"true\")\n || arg_value(&orig_args, \"--cap-lints\", |val| val == \"allow\").is_some()\n || no_deps && env::var(\"CARGO_PRIMARY_PACKAGE\").is_err();\n\n if !clippy_disabled {\n args.extend(clippy_args);\n }\n let mut clippy = ClippyCallbacks;\n let mut default = DefaultCallbacks;\n let callbacks: &mut (dyn rustc_driver::Callbacks + Send) =\n if clippy_disabled { &mut default } else { &mut clippy };\n rustc_driver::RunCompiler::new(&args, callbacks).run()\n }))\n}\n<commit_msg>Apply suggestion regarding clippy_enabled bool<commit_after>#![feature(rustc_private)]\n#![feature(once_cell)]\n#![cfg_attr(feature = \"deny-warnings\", deny(warnings))]\n\/\/ warn on lints, that are included in `rust-lang\/rust`s bootstrap\n#![warn(rust_2018_idioms, unused_lifetimes)]\n\/\/ warn on rustc internal lints\n#![deny(rustc::internal)]\n\n\/\/ FIXME: switch to something more ergonomic here, once available.\n\/\/ (Currently there is no way to opt into sysroot crates without `extern crate`.)\nextern crate rustc_driver;\nextern crate rustc_errors;\nextern crate rustc_interface;\nextern crate rustc_middle;\n\nuse rustc_interface::interface;\nuse rustc_middle::ty::TyCtxt;\nuse rustc_tools_util::VersionInfo;\n\nuse std::borrow::Cow;\nuse std::env;\nuse std::lazy::SyncLazy;\nuse std::ops::Deref;\nuse std::panic;\nuse std::path::{Path, PathBuf};\nuse std::process::{exit, Command};\n\n\/\/\/ If a command-line option matches `find_arg`, then apply the predicate `pred` on its value. If\n\/\/\/ true, then return it. The parameter is assumed to be either `--arg=value` or `--arg value`.\nfn arg_value<'a, T: Deref<Target = str>>(\n args: &'a [T],\n find_arg: &str,\n pred: impl Fn(&str) -> bool,\n) -> Option<&'a str> {\n let mut args = args.iter().map(Deref::deref);\n while let Some(arg) = args.next() {\n let mut arg = arg.splitn(2, '=');\n if arg.next() != Some(find_arg) {\n continue;\n }\n\n match arg.next().or_else(|| args.next()) {\n Some(v) if pred(v) => return Some(v),\n _ => {},\n }\n }\n None\n}\n\n#[test]\nfn test_arg_value() {\n let args = &[\"--bar=bar\", \"--foobar\", \"123\", \"--foo\"];\n\n assert_eq!(arg_value(&[] as &[&str], \"--foobar\", |_| true), None);\n assert_eq!(arg_value(args, \"--bar\", |_| false), None);\n assert_eq!(arg_value(args, \"--bar\", |_| true), Some(\"bar\"));\n assert_eq!(arg_value(args, \"--bar\", |p| p == \"bar\"), Some(\"bar\"));\n assert_eq!(arg_value(args, \"--bar\", |p| p == \"foo\"), None);\n assert_eq!(arg_value(args, \"--foobar\", |p| p == \"foo\"), None);\n assert_eq!(arg_value(args, \"--foobar\", |p| p == \"123\"), Some(\"123\"));\n assert_eq!(arg_value(args, \"--foo\", |_| true), None);\n}\n\nstruct DefaultCallbacks;\nimpl rustc_driver::Callbacks for DefaultCallbacks {}\n\nstruct ClippyCallbacks;\nimpl rustc_driver::Callbacks for ClippyCallbacks {\n fn config(&mut self, config: &mut interface::Config) {\n let previous = config.register_lints.take();\n config.register_lints = Some(Box::new(move |sess, mut lint_store| {\n \/\/ technically we're ~guaranteed that this is none but might as well call anything that\n \/\/ is there already. Certainly it can't hurt.\n if let Some(previous) = &previous {\n (previous)(sess, lint_store);\n }\n\n let conf = clippy_lints::read_conf(&[], &sess);\n clippy_lints::register_plugins(&mut lint_store, &sess, &conf);\n clippy_lints::register_pre_expansion_lints(&mut lint_store);\n clippy_lints::register_renamed(&mut lint_store);\n }));\n\n \/\/ FIXME: #4825; This is required, because Clippy lints that are based on MIR have to be\n \/\/ run on the unoptimized MIR. On the other hand this results in some false negatives. If\n \/\/ MIR passes can be enabled \/ disabled separately, we should figure out, what passes to\n \/\/ use for Clippy.\n config.opts.debugging_opts.mir_opt_level = 0;\n }\n}\n\nfn display_help() {\n println!(\n \"\\\nChecks a package to catch common mistakes and improve your Rust code.\n\nUsage:\n cargo clippy [options] [--] [<opts>...]\n\nCommon options:\n -h, --help Print this message\n --rustc Pass all args to rustc\n -V, --version Print version info and exit\n\nOther options are the same as `cargo check`.\n\nTo allow or deny a lint from the command line you can use `cargo clippy --`\nwith:\n\n -W --warn OPT Set lint warnings\n -A --allow OPT Set lint allowed\n -D --deny OPT Set lint denied\n -F --forbid OPT Set lint forbidden\n\nYou can use tool lints to allow or deny lints from your code, eg.:\n\n #[allow(clippy::needless_lifetimes)]\n\"\n );\n}\n\nconst BUG_REPORT_URL: &str = \"https:\/\/github.com\/rust-lang\/rust-clippy\/issues\/new\";\n\nstatic ICE_HOOK: SyncLazy<Box<dyn Fn(&panic::PanicInfo<'_>) + Sync + Send + 'static>> = SyncLazy::new(|| {\n let hook = panic::take_hook();\n panic::set_hook(Box::new(|info| report_clippy_ice(info, BUG_REPORT_URL)));\n hook\n});\n\nfn report_clippy_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {\n \/\/ Invoke our ICE handler, which prints the actual panic message and optionally a backtrace\n (*ICE_HOOK)(info);\n\n \/\/ Separate the output with an empty line\n eprintln!();\n\n let emitter = Box::new(rustc_errors::emitter::EmitterWriter::stderr(\n rustc_errors::ColorConfig::Auto,\n None,\n false,\n false,\n None,\n false,\n ));\n let handler = rustc_errors::Handler::with_emitter(true, None, emitter);\n\n \/\/ a .span_bug or .bug call has already printed what\n \/\/ it wants to print.\n if !info.payload().is::<rustc_errors::ExplicitBug>() {\n let d = rustc_errors::Diagnostic::new(rustc_errors::Level::Bug, \"unexpected panic\");\n handler.emit_diagnostic(&d);\n }\n\n let version_info = rustc_tools_util::get_version_info!();\n\n let xs: Vec<Cow<'static, str>> = vec![\n \"the compiler unexpectedly panicked. this is a bug.\".into(),\n format!(\"we would appreciate a bug report: {}\", bug_report_url).into(),\n format!(\"Clippy version: {}\", version_info).into(),\n ];\n\n for note in &xs {\n handler.note_without_error(¬e);\n }\n\n \/\/ If backtraces are enabled, also print the query stack\n let backtrace = env::var_os(\"RUST_BACKTRACE\").map_or(false, |x| &x != \"0\");\n\n let num_frames = if backtrace { None } else { Some(2) };\n\n TyCtxt::try_print_query_stack(&handler, num_frames);\n}\n\nfn toolchain_path(home: Option<String>, toolchain: Option<String>) -> Option<PathBuf> {\n home.and_then(|home| {\n toolchain.map(|toolchain| {\n let mut path = PathBuf::from(home);\n path.push(\"toolchains\");\n path.push(toolchain);\n path\n })\n })\n}\n\n#[allow(clippy::too_many_lines)]\npub fn main() {\n rustc_driver::init_rustc_env_logger();\n SyncLazy::force(&ICE_HOOK);\n exit(rustc_driver::catch_with_exit_code(move || {\n let mut orig_args: Vec<String> = env::args().collect();\n\n \/\/ Get the sysroot, looking from most specific to this invocation to the least:\n \/\/ - command line\n \/\/ - runtime environment\n \/\/ - SYSROOT\n \/\/ - RUSTUP_HOME, MULTIRUST_HOME, RUSTUP_TOOLCHAIN, MULTIRUST_TOOLCHAIN\n \/\/ - sysroot from rustc in the path\n \/\/ - compile-time environment\n \/\/ - SYSROOT\n \/\/ - RUSTUP_HOME, MULTIRUST_HOME, RUSTUP_TOOLCHAIN, MULTIRUST_TOOLCHAIN\n let sys_root_arg = arg_value(&orig_args, \"--sysroot\", |_| true);\n let have_sys_root_arg = sys_root_arg.is_some();\n let sys_root = sys_root_arg\n .map(PathBuf::from)\n .or_else(|| std::env::var(\"SYSROOT\").ok().map(PathBuf::from))\n .or_else(|| {\n let home = std::env::var(\"RUSTUP_HOME\")\n .or_else(|_| std::env::var(\"MULTIRUST_HOME\"))\n .ok();\n let toolchain = std::env::var(\"RUSTUP_TOOLCHAIN\")\n .or_else(|_| std::env::var(\"MULTIRUST_TOOLCHAIN\"))\n .ok();\n toolchain_path(home, toolchain)\n })\n .or_else(|| {\n Command::new(\"rustc\")\n .arg(\"--print\")\n .arg(\"sysroot\")\n .output()\n .ok()\n .and_then(|out| String::from_utf8(out.stdout).ok())\n .map(|s| PathBuf::from(s.trim()))\n })\n .or_else(|| option_env!(\"SYSROOT\").map(PathBuf::from))\n .or_else(|| {\n let home = option_env!(\"RUSTUP_HOME\")\n .or(option_env!(\"MULTIRUST_HOME\"))\n .map(ToString::to_string);\n let toolchain = option_env!(\"RUSTUP_TOOLCHAIN\")\n .or(option_env!(\"MULTIRUST_TOOLCHAIN\"))\n .map(ToString::to_string);\n toolchain_path(home, toolchain)\n })\n .map(|pb| pb.to_string_lossy().to_string())\n .expect(\"need to specify SYSROOT env var during clippy compilation, or use rustup or multirust\");\n\n \/\/ make \"clippy-driver --rustc\" work like a subcommand that passes further args to \"rustc\"\n \/\/ for example `clippy-driver --rustc --version` will print the rustc version that clippy-driver\n \/\/ uses\n if let Some(pos) = orig_args.iter().position(|arg| arg == \"--rustc\") {\n orig_args.remove(pos);\n orig_args[0] = \"rustc\".to_string();\n\n \/\/ if we call \"rustc\", we need to pass --sysroot here as well\n let mut args: Vec<String> = orig_args.clone();\n if !have_sys_root_arg {\n args.extend(vec![\"--sysroot\".into(), sys_root]);\n };\n\n return rustc_driver::RunCompiler::new(&args, &mut DefaultCallbacks).run();\n }\n\n if orig_args.iter().any(|a| a == \"--version\" || a == \"-V\") {\n let version_info = rustc_tools_util::get_version_info!();\n println!(\"{}\", version_info);\n exit(0);\n }\n\n \/\/ Setting RUSTC_WRAPPER causes Cargo to pass 'rustc' as the first argument.\n \/\/ We're invoking the compiler programmatically, so we ignore this\/\n let wrapper_mode = orig_args.get(1).map(Path::new).and_then(Path::file_stem) == Some(\"rustc\".as_ref());\n\n if wrapper_mode {\n \/\/ we still want to be able to invoke it normally though\n orig_args.remove(1);\n }\n\n if !wrapper_mode && (orig_args.iter().any(|a| a == \"--help\" || a == \"-h\") || orig_args.len() == 1) {\n display_help();\n exit(0);\n }\n\n \/\/ this conditional check for the --sysroot flag is there so users can call\n \/\/ `clippy_driver` directly\n \/\/ without having to pass --sysroot or anything\n let mut args: Vec<String> = orig_args.clone();\n if !have_sys_root_arg {\n args.extend(vec![\"--sysroot\".into(), sys_root]);\n };\n\n let mut no_deps = false;\n let clippy_args = env::var(\"CLIPPY_ARGS\")\n .unwrap_or_default()\n .split(\"__CLIPPY_HACKERY__\")\n .filter_map(|s| match s {\n \"\" => None,\n \"--no-deps\" => {\n no_deps = true;\n None\n },\n _ => Some(s.to_string()),\n })\n .chain(vec![\"--cfg\".into(), r#\"feature=\"cargo-clippy\"\"#.into()])\n .collect::<Vec<String>>();\n\n \/\/ this check ensures that dependencies are built but not linted and the final\n \/\/ crate is linted but not built\n let clippy_tests_set = env::var(\"CLIPPY_TESTS\").map_or(false, |val| val == \"true\");\n let cap_lints_allow = arg_value(&orig_args, \"--cap-lints\", |val| val == \"allow\").is_some();\n let in_primary_package = env::var(\"CARGO_PRIMARY_PACKAGE\").is_ok();\n\n let clippy_enabled = clippy_tests_set || (!cap_lints_allow && (!no_deps || in_primary_package));\n if clippy_enabled {\n args.extend(clippy_args);\n }\n\n let mut clippy = ClippyCallbacks;\n let mut default = DefaultCallbacks;\n let callbacks: &mut (dyn rustc_driver::Callbacks + Send) =\n if clippy_enabled { &mut clippy } else { &mut default };\n\n rustc_driver::RunCompiler::new(&args, callbacks).run()\n }))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fields in m.receipt are optional.<commit_after><|endoftext|>"} {"text":"<commit_before>#![allow(dead_code)]\nextern crate rand;\n\npub struct World {\n\tsize: u64,\n}\n\nimpl World {\n\t\/\/ enmine();\n\t\/\/ calc_neighbors()\n}\n\npub struct Chunk {\n\tmines: [u16;16], \/\/ mines\n\tvis: [u16;16], \/\/ visibility\n\tnhb: [u64;16], \/\/ neighbors\n}\n\nimpl Chunk {\n\tpub fn new() -> Chunk {\n\t\tChunk {\n\t\t\tmines: [0;16],\n\t\t\tvis: [0;16],\n\t\t\tnhb: [0;16],\n\t\t}\n\t}\n\t\n\tpub fn click (&mut self, row: u8, col: u8) {\n\t\tif (row < 15) & (col < 15) {\n\t\t\tself.vis[row as usize] = self.vis[row as usize] | (1u16 << (15-col));\n\t\t}\n\t}\n\n\tpub fn is_clicked (&self, row: u8, col: u8) -> bool {\n\t\tif (row < 15) & (col < 15) {\n\t\t\t(self.vis[row as usize] & (1u16 << (15-col))) == (1u16 << (15-col))\n\t\t}\n\t\telse { false }\n\t}\n}\n\n\n#[test]\nfn test_chunk_construct() {\n\tlet c = Chunk::new();\n\tfor i in 0..15 { assert_eq!(c.mines[i], 0); }\n\tfor i in 0..15 { assert_eq!(c.vis[i], 0); }\n\tfor i in 0..15 { assert_eq!(c.nhb[i], 0); }\n}\n\n#[test]\nfn test_chunk_click() {\n\tlet mut c = Chunk::new();\n\tc.click(0, 2);\n\tassert_eq!(c.vis[0], 8192);\n}\n\n#[test]\nfn test_chunk_is_clicked() {\n\tlet mut vec = Vec::new();\n\tlet mut c = Chunk::new();\n\tfor _ in 0..15 { vec.push((rand::random::<u8>()%15, rand::random::<u8>()%15)); }\n\tfor i in &vec { c.click(i.0, i.1); };\n\tfor i in &vec { assert!(c.is_clicked(i.0, i.1)); }\n}\n\n\n<commit_msg>Started chunk creation implementation<commit_after>#![allow(dead_code)]\nextern crate rand;\nuse std::collections::HashMap;\n\nconst MIN_M: u8 = 16;\nconst MAX_M: u8 = 64;\n\npub struct World {\n\tallocated: u64,\n\tactivated: u64,\n\tboard: HashMap<(i32,i32),Chunk>,\n}\n\nimpl World {\n\tpub fn new() -> World {\n\t\tWorld {\n\t\t\tallocated: 0,\n\t\t\tactivated: 0,\n\t\t\tboard: HashMap::new(),\n\t\t}\n\t}\n\t\n\tfn chunk_create(&mut self, x: i32, y: i32) {\n\t\t\/\/ TODO: add current chunk, surrounding chunks if not exist\n\t\t\/\/\t\t calculate neighbors for current chunk\n\t\t\/\/\t\t increment size\n\t}\n\t\n\tfn chunk_add(&mut self, x: i32, y: i32) {\n\t\tself.board.insert((x,y), Chunk::new());\n\t}\n\n\tfn calc_neighbors(&mut self) {}\n}\n\nenum ChunkStat {\n\tMined,\n\tNeighbored,\n\tWon,\n}\n\npub struct Chunk {\n\tstat: ChunkStat, \/\/ status\n\tmines: [u16;16], \/\/ mines\n\tvis: [u16;16], \/\/ visibility\n\tnhb: [u64;16], \/\/ neighbors\n}\n\nimpl Chunk {\n\tpub fn new() -> Chunk {\n\t\tlet mut c = Chunk {\n\t\t\tstat: ChunkStat::Mined,\n\t\t\tmines: [0;16],\n\t\t\tvis: [0;16],\n\t\t\tnhb: [0;16],\n\t\t};\n\n\t\tfor _ in 1..(rand::random::<u8>()%(MAX_M-MIN_M)+MIN_M) {\n\t\t\t\/\/ duplicate entries are not of consequence.\n\t\t\tc.enmine(rand::random::<u8>()%16, rand::random::<u8>()%16);\n\t\t}\n\t\t\n\t\treturn c;\n\t}\n\t\n\tfn enmine(&mut self, row: u8, col: u8) {\n\t\tif (row < 15) & (col < 15) {\n\t\t\tself.mines[row as usize] = self.mines[row as usize] | (1u16 << (15-col));\n\t\t}\n\t}\n\t\n\tpub fn click (&mut self, row: u8, col: u8) {\n\t\tif (row < 15) & (col < 15) {\n\t\t\tself.vis[row as usize] = self.vis[row as usize] | (1u16 << (15-col));\n\t\t}\n\t}\n\n\tpub fn is_clicked (&self, row: u8, col: u8) -> bool {\n\t\tif (row < 15) & (col < 15) {\n\t\t\t(self.vis[row as usize] & (1u16 << (15-col))) == (1u16 << (15-col))\n\t\t}\n\t\telse { false }\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before>use std::sys;\nuse std::libc;\nuse std::num::One;\nuse std::ptr;\nuse glcore::*;\nuse nalgebra::traits::homogeneous::ToHomogeneous;\nuse nalgebra::traits::indexable::Indexable;\nuse nalgebra::adaptors::transform::Transform;\nuse nalgebra::adaptors::rotmat::Rotmat;\nuse nalgebra::mat::{Mat3, Mat4};\nuse nalgebra::vec::Vec3;\n\ntype Transform3d = Transform<Rotmat<Mat3<f64>>, Vec3<f64>>;\ntype Scale3d = Mat3<GLfloat>;\n\npub enum Geometry\n{ VerticesTriangles(~[Vec3<f32>], ~[(GLuint, GLuint, GLuint)]) }\n\npub struct GeometryIndices\n{\n priv offset: uint,\n priv size: i32,\n priv element_buffer: GLuint,\n priv normal_buffer: GLuint,\n priv vertex_buffer: GLuint\n}\n\nimpl GeometryIndices\n{\n pub fn new(offset: uint,\n size: i32,\n element_buffer: GLuint,\n normal_buffer: GLuint,\n vertex_buffer: GLuint) -> GeometryIndices\n {\n GeometryIndices {\n offset: offset,\n size: size,\n element_buffer: element_buffer,\n normal_buffer: normal_buffer,\n vertex_buffer: vertex_buffer\n }\n }\n}\n\npub struct Object\n{\n priv scale: Scale3d,\n priv transform: Transform3d,\n priv color: Vec3<f32>,\n priv igeometry: GeometryIndices,\n priv geometry: Option<Geometry>\n}\n\nimpl Object\n{\n pub fn new(igeometry: GeometryIndices,\n r: f32,\n g: f32,\n b: f32,\n sx: GLfloat,\n sy: GLfloat,\n sz: GLfloat,\n geometry: Option<Geometry>) -> Object\n {\n Object {\n scale: Mat3::new( [\n sx, 0.0, 0.0,\n 0.0, sy, 0.0,\n 0.0, 0.0, sz,\n ] ),\n transform: One::one(),\n igeometry: igeometry,\n geometry: geometry,\n color: Vec3::new([r, g, b])\n }\n }\n\n pub fn upload(&self,\n pos_attrib: u32,\n normal_attrib: u32,\n color_location: i32,\n transform_location: i32,\n scale_location: i32,\n normal_transform_location: i32)\n {\n let formated_transform: Mat4<f64> = self.transform.to_homogeneous();\n let formated_ntransform: Mat3<f64> = self.transform.submat().submat();\n\n \/\/ we convert the matrix elements and do the transposition at the same time\n let transform_glf = Mat4::new ([\n formated_transform.at((0, 0)) as GLfloat,\n formated_transform.at((1, 0)) as GLfloat,\n formated_transform.at((2, 0)) as GLfloat,\n formated_transform.at((3, 0)) as GLfloat,\n\n formated_transform.at((0, 1)) as GLfloat,\n formated_transform.at((1, 1)) as GLfloat,\n formated_transform.at((2, 1)) as GLfloat,\n formated_transform.at((3, 1)) as GLfloat,\n\n formated_transform.at((0, 2)) as GLfloat,\n formated_transform.at((1, 2)) as GLfloat,\n formated_transform.at((2, 2)) as GLfloat,\n formated_transform.at((3, 2)) as GLfloat,\n\n formated_transform.at((0, 3)) as GLfloat,\n formated_transform.at((1, 3)) as GLfloat,\n formated_transform.at((2, 3)) as GLfloat,\n formated_transform.at((3, 3)) as GLfloat,\n ]);\n\n let ntransform_glf = Mat3::new ([\n formated_ntransform.at((0, 0)) as GLfloat,\n formated_ntransform.at((1, 0)) as GLfloat,\n formated_ntransform.at((2, 0)) as GLfloat,\n formated_ntransform.at((0, 1)) as GLfloat,\n formated_ntransform.at((1, 1)) as GLfloat,\n formated_ntransform.at((2, 1)) as GLfloat,\n formated_ntransform.at((0, 2)) as GLfloat,\n formated_ntransform.at((1, 2)) as GLfloat,\n formated_ntransform.at((2, 2)) as GLfloat,\n ]);\n\n unsafe {\n glUniformMatrix4fv(transform_location,\n 1,\n GL_FALSE,\n ptr::to_unsafe_ptr(&transform_glf.mij[0]));\n\n glUniformMatrix3fv(normal_transform_location,\n 1,\n GL_FALSE,\n ptr::to_unsafe_ptr(&ntransform_glf.mij[0]));\n\n glUniformMatrix3fv(scale_location,\n 1,\n GL_FALSE,\n ptr::to_unsafe_ptr(&self.scale.mij[0]));\n\n glUniform3f(color_location, self.color.at[0], self.color.at[1], self.color.at[2]);\n\n \/\/ FIXME: we should not switch the buffers if the last drawn shape uses the same.\n glBindBuffer(GL_ARRAY_BUFFER, self.igeometry.vertex_buffer);\n glVertexAttribPointer(pos_attrib,\n 3,\n GL_FLOAT,\n GL_FALSE,\n 3 * sys::size_of::<GLfloat>() as GLsizei,\n ptr::null());\n\n glBindBuffer(GL_ARRAY_BUFFER, self.igeometry.normal_buffer);\n glVertexAttribPointer(normal_attrib,\n 3,\n GL_FLOAT,\n GL_FALSE,\n 3 * sys::size_of::<GLfloat>() as GLsizei,\n ptr::null());\n\n glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.igeometry.element_buffer);\n glDrawElements(GL_TRIANGLES,\n self.igeometry.size,\n GL_UNSIGNED_INT,\n self.igeometry.offset * sys::size_of::<GLuint>() as *libc::c_void);\n }\n }\n\n pub fn transformation<'r>(&'r mut self) -> &'r mut Transform3d\n { &mut self.transform }\n\n pub fn set_color(@mut self, r: f32, g: f32, b: f32) -> @mut Object\n {\n self.color.at[0] = r;\n self.color.at[1] = g;\n self.color.at[2] = b;\n\n self\n }\n}\n<commit_msg>Add accessors to objects geometry.<commit_after>use std::sys;\nuse std::libc;\nuse std::num::One;\nuse std::ptr;\nuse glcore::*;\nuse nalgebra::traits::homogeneous::ToHomogeneous;\nuse nalgebra::traits::indexable::Indexable;\nuse nalgebra::adaptors::transform::Transform;\nuse nalgebra::adaptors::rotmat::Rotmat;\nuse nalgebra::mat::{Mat3, Mat4};\nuse nalgebra::vec::Vec3;\n\ntype Transform3d = Transform<Rotmat<Mat3<f64>>, Vec3<f64>>;\ntype Scale3d = Mat3<GLfloat>;\n\npub enum Geometry\n{ VerticesTriangles(~[Vec3<f32>], ~[(GLuint, GLuint, GLuint)]) }\n\npub struct GeometryIndices\n{\n priv offset: uint,\n priv size: i32,\n priv element_buffer: GLuint,\n priv normal_buffer: GLuint,\n priv vertex_buffer: GLuint\n}\n\nimpl GeometryIndices\n{\n pub fn new(offset: uint,\n size: i32,\n element_buffer: GLuint,\n normal_buffer: GLuint,\n vertex_buffer: GLuint) -> GeometryIndices\n {\n GeometryIndices {\n offset: offset,\n size: size,\n element_buffer: element_buffer,\n normal_buffer: normal_buffer,\n vertex_buffer: vertex_buffer\n }\n }\n}\n\npub struct Object\n{\n priv scale: Scale3d,\n priv transform: Transform3d,\n priv color: Vec3<f32>,\n priv igeometry: GeometryIndices,\n priv geometry: Option<Geometry>\n}\n\nimpl Object\n{\n pub fn new(igeometry: GeometryIndices,\n r: f32,\n g: f32,\n b: f32,\n sx: GLfloat,\n sy: GLfloat,\n sz: GLfloat,\n geometry: Option<Geometry>) -> Object\n {\n Object {\n scale: Mat3::new( [\n sx, 0.0, 0.0,\n 0.0, sy, 0.0,\n 0.0, 0.0, sz,\n ] ),\n transform: One::one(),\n igeometry: igeometry,\n geometry: geometry,\n color: Vec3::new([r, g, b])\n }\n }\n\n pub fn upload(&self,\n pos_attrib: u32,\n normal_attrib: u32,\n color_location: i32,\n transform_location: i32,\n scale_location: i32,\n normal_transform_location: i32)\n {\n let formated_transform: Mat4<f64> = self.transform.to_homogeneous();\n let formated_ntransform: Mat3<f64> = self.transform.submat().submat();\n\n \/\/ we convert the matrix elements and do the transposition at the same time\n let transform_glf = Mat4::new ([\n formated_transform.at((0, 0)) as GLfloat,\n formated_transform.at((1, 0)) as GLfloat,\n formated_transform.at((2, 0)) as GLfloat,\n formated_transform.at((3, 0)) as GLfloat,\n\n formated_transform.at((0, 1)) as GLfloat,\n formated_transform.at((1, 1)) as GLfloat,\n formated_transform.at((2, 1)) as GLfloat,\n formated_transform.at((3, 1)) as GLfloat,\n\n formated_transform.at((0, 2)) as GLfloat,\n formated_transform.at((1, 2)) as GLfloat,\n formated_transform.at((2, 2)) as GLfloat,\n formated_transform.at((3, 2)) as GLfloat,\n\n formated_transform.at((0, 3)) as GLfloat,\n formated_transform.at((1, 3)) as GLfloat,\n formated_transform.at((2, 3)) as GLfloat,\n formated_transform.at((3, 3)) as GLfloat,\n ]);\n\n let ntransform_glf = Mat3::new ([\n formated_ntransform.at((0, 0)) as GLfloat,\n formated_ntransform.at((1, 0)) as GLfloat,\n formated_ntransform.at((2, 0)) as GLfloat,\n formated_ntransform.at((0, 1)) as GLfloat,\n formated_ntransform.at((1, 1)) as GLfloat,\n formated_ntransform.at((2, 1)) as GLfloat,\n formated_ntransform.at((0, 2)) as GLfloat,\n formated_ntransform.at((1, 2)) as GLfloat,\n formated_ntransform.at((2, 2)) as GLfloat,\n ]);\n\n unsafe {\n glUniformMatrix4fv(transform_location,\n 1,\n GL_FALSE,\n ptr::to_unsafe_ptr(&transform_glf.mij[0]));\n\n glUniformMatrix3fv(normal_transform_location,\n 1,\n GL_FALSE,\n ptr::to_unsafe_ptr(&ntransform_glf.mij[0]));\n\n glUniformMatrix3fv(scale_location,\n 1,\n GL_FALSE,\n ptr::to_unsafe_ptr(&self.scale.mij[0]));\n\n glUniform3f(color_location, self.color.at[0], self.color.at[1], self.color.at[2]);\n\n \/\/ FIXME: we should not switch the buffers if the last drawn shape uses the same.\n glBindBuffer(GL_ARRAY_BUFFER, self.igeometry.vertex_buffer);\n glVertexAttribPointer(pos_attrib,\n 3,\n GL_FLOAT,\n GL_FALSE,\n 3 * sys::size_of::<GLfloat>() as GLsizei,\n ptr::null());\n\n glBindBuffer(GL_ARRAY_BUFFER, self.igeometry.normal_buffer);\n glVertexAttribPointer(normal_attrib,\n 3,\n GL_FLOAT,\n GL_FALSE,\n 3 * sys::size_of::<GLfloat>() as GLsizei,\n ptr::null());\n\n glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.igeometry.element_buffer);\n glDrawElements(GL_TRIANGLES,\n self.igeometry.size,\n GL_UNSIGNED_INT,\n self.igeometry.offset * sys::size_of::<GLuint>() as *libc::c_void);\n }\n }\n\n pub fn transformation<'r>(&'r mut self) -> &'r mut Transform3d\n { &mut self.transform }\n\n pub fn geometry<'r>(&'r self) -> &'r Option<Geometry>\n { &'r self.geometry }\n\n pub fn geometry_mut<'r>(&'r mut self) -> &'r mut Option<Geometry>\n { &'r mut self.geometry }\n\n pub fn set_color(@mut self, r: f32, g: f32, b: f32) -> @mut Object\n {\n self.color.at[0] = r;\n self.color.at[1] = g;\n self.color.at[2] = b;\n\n self\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add proper error handling in parser<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Only updated history once<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>siiiiiiiiiiimple<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add shutdown mechanism.<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::bindings::utils::{DOMString, null_str_as_empty};\nuse dom::bindings::codegen::FormDataBinding;\nuse dom::blob::Blob;\nuse script_task::{page_from_context};\n\nuse js::jsapi::{JSObject, JSContext};\n\nuse std::hashmap::HashMap;\n\nenum FormDatum {\n StringData(DOMString),\n BlobData { blob: @mut Blob, name: DOMString }\n}\n\npub struct FormData {\n data: HashMap<~str, FormDatum>,\n reflector_: Reflector\n}\n\nimpl FormData {\n pub fn new() -> @mut FormData {\n @mut FormData {\n data: HashMap::new(),\n reflector_: Reflector::new()\n }\n }\n\n pub fn init_wrapper(@mut self, cx: *JSContext, scope: *JSObject) {\n self.wrap_object_shared(cx, scope);\n }\n\n pub fn Append(&mut self, name: &DOMString, value: @mut Blob, filename: Option<DOMString>) {\n let blob = BlobData {\n blob: value,\n name: filename.unwrap_or_default(Some(~\"default\"))\n };\n self.data.insert(null_str_as_empty(name), blob);\n }\n\n pub fn Append_(&mut self, name: &DOMString, value: &DOMString) {\n self.data.insert(null_str_as_empty(name), StringData((*value).clone()));\n }\n}\n\nimpl Reflectable for FormData {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n &self.reflector_\n }\n\n fn mut_reflector<'a>(&'a mut self) -> &'a mut Reflector {\n &mut self.reflector_\n }\n\n fn wrap_object_shared(@mut self, cx: *JSContext, scope: *JSObject) -> *JSObject {\n FormDataBinding::Wrap(cx, scope, self)\n }\n\n fn GetParentObject(&self, cx: *JSContext) -> Option<@mut Reflectable> {\n let page = page_from_context(cx);\n unsafe {\n Some((*page).frame.get_ref().window as @mut Reflectable)\n }\n }\n}\n<commit_msg>Create FormData reflectors eagerly.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::utils::{Reflectable, Reflector};\nuse dom::bindings::utils::{DOMString, null_str_as_empty};\nuse dom::bindings::codegen::FormDataBinding;\nuse dom::blob::Blob;\nuse dom::window::Window;\n\nuse js::jsapi::{JSObject, JSContext};\n\nuse std::hashmap::HashMap;\n\nenum FormDatum {\n StringData(DOMString),\n BlobData { blob: @mut Blob, name: DOMString }\n}\n\npub struct FormData {\n data: HashMap<~str, FormDatum>,\n reflector_: Reflector,\n window: @mut Window,\n}\n\nimpl FormData {\n pub fn new_inherited(window: @mut Window) -> FormData {\n FormData {\n data: HashMap::new(),\n reflector_: Reflector::new(),\n window: window,\n }\n }\n\n pub fn new(window: @mut Window) -> @mut FormData {\n let formdata = @mut FormData::new_inherited(window);\n let cx = window.get_cx();\n let scope = window.reflector().get_jsobject();\n if FormDataBinding::Wrap(cx, scope, formdata).is_null() {\n fail!(\"FormDataBinding::Wrap failed\");\n }\n assert!(formdata.reflector().get_jsobject().is_not_null());\n formdata\n }\n\n pub fn Append(&mut self, name: &DOMString, value: @mut Blob, filename: Option<DOMString>) {\n let blob = BlobData {\n blob: value,\n name: filename.unwrap_or_default(Some(~\"default\"))\n };\n self.data.insert(null_str_as_empty(name), blob);\n }\n\n pub fn Append_(&mut self, name: &DOMString, value: &DOMString) {\n self.data.insert(null_str_as_empty(name), StringData((*value).clone()));\n }\n}\n\nimpl Reflectable for FormData {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n &self.reflector_\n }\n\n fn mut_reflector<'a>(&'a mut self) -> &'a mut Reflector {\n &mut self.reflector_\n }\n\n fn wrap_object_shared(@mut self, _cx: *JSContext, _scope: *JSObject) -> *JSObject {\n unreachable!();\n }\n\n fn GetParentObject(&self, _cx: *JSContext) -> Option<@mut Reflectable> {\n Some(self.window as @mut Reflectable)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse dep_graph::{DepNodeIndex, SerializedDepNodeIndex};\nuse rustc_data_structures::fx::FxHashMap;\nuse rustc_data_structures::indexed_vec::Idx;\nuse errors::Diagnostic;\nuse rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque,\n SpecializedDecoder};\nuse session::Session;\nuse std::borrow::Cow;\nuse std::cell::RefCell;\nuse std::collections::BTreeMap;\nuse std::mem;\nuse syntax::codemap::{CodeMap, StableFilemapId};\nuse syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP};\n\npub struct OnDiskCache<'sess> {\n prev_diagnostics: FxHashMap<SerializedDepNodeIndex, Vec<Diagnostic>>,\n\n _prev_filemap_starts: BTreeMap<BytePos, StableFilemapId>,\n codemap: &'sess CodeMap,\n\n current_diagnostics: RefCell<FxHashMap<DepNodeIndex, Vec<Diagnostic>>>,\n}\n\n#[derive(RustcEncodable, RustcDecodable)]\nstruct Header {\n prev_filemap_starts: BTreeMap<BytePos, StableFilemapId>,\n}\n\n#[derive(RustcEncodable, RustcDecodable)]\nstruct Body {\n diagnostics: Vec<(SerializedDepNodeIndex, Vec<Diagnostic>)>,\n}\n\nimpl<'sess> OnDiskCache<'sess> {\n pub fn new_empty(codemap: &'sess CodeMap) -> OnDiskCache<'sess> {\n OnDiskCache {\n prev_diagnostics: FxHashMap(),\n _prev_filemap_starts: BTreeMap::new(),\n codemap,\n current_diagnostics: RefCell::new(FxHashMap()),\n }\n }\n\n pub fn new(sess: &'sess Session, data: &[u8]) -> OnDiskCache<'sess> {\n debug_assert!(sess.opts.incremental.is_some());\n\n let mut decoder = opaque::Decoder::new(&data[..], 0);\n let header = Header::decode(&mut decoder).unwrap();\n\n let prev_diagnostics: FxHashMap<_, _> = {\n let mut decoder = CacheDecoder {\n opaque: decoder,\n codemap: sess.codemap(),\n prev_filemap_starts: &header.prev_filemap_starts,\n };\n let body = Body::decode(&mut decoder).unwrap();\n body.diagnostics.into_iter().collect()\n };\n\n OnDiskCache {\n prev_diagnostics,\n _prev_filemap_starts: header.prev_filemap_starts,\n codemap: sess.codemap(),\n current_diagnostics: RefCell::new(FxHashMap()),\n }\n }\n\n pub fn serialize<'a, 'tcx, E>(&self,\n encoder: &mut E)\n -> Result<(), E::Error>\n where E: Encoder\n {\n let prev_filemap_starts: BTreeMap<_, _> = self\n .codemap\n .files()\n .iter()\n .map(|fm| (fm.start_pos, StableFilemapId::new(fm)))\n .collect();\n\n Header { prev_filemap_starts }.encode(encoder)?;\n\n let diagnostics: Vec<(SerializedDepNodeIndex, Vec<Diagnostic>)> =\n self.current_diagnostics\n .borrow()\n .iter()\n .map(|(k, v)| (SerializedDepNodeIndex::new(k.index()), v.clone()))\n .collect();\n\n Body { diagnostics }.encode(encoder)?;\n\n Ok(())\n }\n\n pub fn load_diagnostics(&self,\n dep_node_index: SerializedDepNodeIndex)\n -> Vec<Diagnostic> {\n self.prev_diagnostics.get(&dep_node_index).cloned().unwrap_or(vec![])\n }\n\n pub fn store_diagnostics(&self,\n dep_node_index: DepNodeIndex,\n diagnostics: Vec<Diagnostic>) {\n let mut current_diagnostics = self.current_diagnostics.borrow_mut();\n let prev = current_diagnostics.insert(dep_node_index, diagnostics);\n debug_assert!(prev.is_none());\n }\n\n pub fn store_diagnostics_for_anon_node(&self,\n dep_node_index: DepNodeIndex,\n mut diagnostics: Vec<Diagnostic>) {\n let mut current_diagnostics = self.current_diagnostics.borrow_mut();\n\n let x = current_diagnostics.entry(dep_node_index).or_insert_with(|| {\n mem::replace(&mut diagnostics, Vec::new())\n });\n\n x.extend(diagnostics.into_iter());\n }\n}\n\nimpl<'a> SpecializedDecoder<Span> for CacheDecoder<'a> {\n fn specialized_decode(&mut self) -> Result<Span, Self::Error> {\n let lo = BytePos::decode(self)?;\n let hi = BytePos::decode(self)?;\n\n if let Some((prev_filemap_start, filemap_id)) = self.find_filemap_prev_bytepos(lo) {\n if let Some(current_filemap) = self.codemap.filemap_by_stable_id(filemap_id) {\n let lo = (lo + current_filemap.start_pos) - prev_filemap_start;\n let hi = (hi + current_filemap.start_pos) - prev_filemap_start;\n return Ok(Span::new(lo, hi, NO_EXPANSION));\n }\n }\n\n Ok(DUMMY_SP)\n }\n}\n\nstruct CacheDecoder<'a> {\n opaque: opaque::Decoder<'a>,\n codemap: &'a CodeMap,\n prev_filemap_starts: &'a BTreeMap<BytePos, StableFilemapId>,\n}\n\nimpl<'a> CacheDecoder<'a> {\n fn find_filemap_prev_bytepos(&self,\n prev_bytepos: BytePos)\n -> Option<(BytePos, StableFilemapId)> {\n for (start, id) in self.prev_filemap_starts.range(BytePos(0) ... prev_bytepos).rev() {\n return Some((*start, *id))\n }\n\n None\n }\n}\n\nmacro_rules! decoder_methods {\n ($($name:ident -> $ty:ty;)*) => {\n $(fn $name(&mut self) -> Result<$ty, Self::Error> {\n self.opaque.$name()\n })*\n }\n}\n\nimpl<'sess> Decoder for CacheDecoder<'sess> {\n type Error = String;\n\n decoder_methods! {\n read_nil -> ();\n\n read_u128 -> u128;\n read_u64 -> u64;\n read_u32 -> u32;\n read_u16 -> u16;\n read_u8 -> u8;\n read_usize -> usize;\n\n read_i128 -> i128;\n read_i64 -> i64;\n read_i32 -> i32;\n read_i16 -> i16;\n read_i8 -> i8;\n read_isize -> isize;\n\n read_bool -> bool;\n read_f64 -> f64;\n read_f32 -> f32;\n read_char -> char;\n read_str -> Cow<str>;\n }\n\n fn error(&mut self, err: &str) -> Self::Error {\n self.opaque.error(err)\n }\n}\n<commit_msg>incr.comp.: Add documentation for OnDiskCache.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse dep_graph::{DepNodeIndex, SerializedDepNodeIndex};\nuse rustc_data_structures::fx::FxHashMap;\nuse rustc_data_structures::indexed_vec::Idx;\nuse errors::Diagnostic;\nuse rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque,\n SpecializedDecoder};\nuse session::Session;\nuse std::borrow::Cow;\nuse std::cell::RefCell;\nuse std::collections::BTreeMap;\nuse std::mem;\nuse syntax::codemap::{CodeMap, StableFilemapId};\nuse syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP};\n\n\/\/\/ `OnDiskCache` provides an interface to incr. comp. data cached from the\n\/\/\/ previous compilation session. This data will eventually include the results\n\/\/\/ of a few selected queries (like `typeck_tables_of` and `mir_optimized`) and\n\/\/\/ any diagnostics that have been emitted during a query.\npub struct OnDiskCache<'sess> {\n \/\/ The diagnostics emitted during the previous compilation session.\n prev_diagnostics: FxHashMap<SerializedDepNodeIndex, Vec<Diagnostic>>,\n\n \/\/ This field collects all Diagnostics emitted during the current\n \/\/ compilation session.\n current_diagnostics: RefCell<FxHashMap<DepNodeIndex, Vec<Diagnostic>>>,\n\n \/\/ This will eventually be needed for creating Decoders that can rebase\n \/\/ spans.\n _prev_filemap_starts: BTreeMap<BytePos, StableFilemapId>,\n codemap: &'sess CodeMap,\n}\n\n\/\/ This type is used only for (de-)serialization.\n#[derive(RustcEncodable, RustcDecodable)]\nstruct Header {\n prev_filemap_starts: BTreeMap<BytePos, StableFilemapId>,\n}\n\n\/\/ This type is used only for (de-)serialization.\n#[derive(RustcEncodable, RustcDecodable)]\nstruct Body {\n diagnostics: Vec<(SerializedDepNodeIndex, Vec<Diagnostic>)>,\n}\n\nimpl<'sess> OnDiskCache<'sess> {\n \/\/\/ Create a new OnDiskCache instance from the serialized data in `data`.\n \/\/\/ Note that the current implementation (which only deals with diagnostics\n \/\/\/ so far) will eagerly deserialize the complete cache. Once we are\n \/\/\/ dealing with larger amounts of data (i.e. cached query results),\n \/\/\/ deserialization will need to happen lazily.\n pub fn new(sess: &'sess Session, data: &[u8]) -> OnDiskCache<'sess> {\n debug_assert!(sess.opts.incremental.is_some());\n\n let mut decoder = opaque::Decoder::new(&data[..], 0);\n let header = Header::decode(&mut decoder).unwrap();\n\n let prev_diagnostics: FxHashMap<_, _> = {\n let mut decoder = CacheDecoder {\n opaque: decoder,\n codemap: sess.codemap(),\n prev_filemap_starts: &header.prev_filemap_starts,\n };\n let body = Body::decode(&mut decoder).unwrap();\n body.diagnostics.into_iter().collect()\n };\n\n OnDiskCache {\n prev_diagnostics,\n _prev_filemap_starts: header.prev_filemap_starts,\n codemap: sess.codemap(),\n current_diagnostics: RefCell::new(FxHashMap()),\n }\n }\n\n pub fn new_empty(codemap: &'sess CodeMap) -> OnDiskCache<'sess> {\n OnDiskCache {\n prev_diagnostics: FxHashMap(),\n _prev_filemap_starts: BTreeMap::new(),\n codemap,\n current_diagnostics: RefCell::new(FxHashMap()),\n }\n }\n\n pub fn serialize<'a, 'tcx, E>(&self,\n encoder: &mut E)\n -> Result<(), E::Error>\n where E: Encoder\n {\n let prev_filemap_starts: BTreeMap<_, _> = self\n .codemap\n .files()\n .iter()\n .map(|fm| (fm.start_pos, StableFilemapId::new(fm)))\n .collect();\n\n Header { prev_filemap_starts }.encode(encoder)?;\n\n let diagnostics: Vec<(SerializedDepNodeIndex, Vec<Diagnostic>)> =\n self.current_diagnostics\n .borrow()\n .iter()\n .map(|(k, v)| (SerializedDepNodeIndex::new(k.index()), v.clone()))\n .collect();\n\n Body { diagnostics }.encode(encoder)?;\n\n Ok(())\n }\n\n \/\/\/ Load a diagnostic emitted during the previous compilation session.\n pub fn load_diagnostics(&self,\n dep_node_index: SerializedDepNodeIndex)\n -> Vec<Diagnostic> {\n self.prev_diagnostics.get(&dep_node_index).cloned().unwrap_or(vec![])\n }\n\n \/\/\/ Store a diagnostic emitted during the current compilation session.\n \/\/\/ Anything stored like this will be available via `load_diagnostics` in\n \/\/\/ the next compilation session.\n pub fn store_diagnostics(&self,\n dep_node_index: DepNodeIndex,\n diagnostics: Vec<Diagnostic>) {\n let mut current_diagnostics = self.current_diagnostics.borrow_mut();\n let prev = current_diagnostics.insert(dep_node_index, diagnostics);\n debug_assert!(prev.is_none());\n }\n\n \/\/\/ Store a diagnostic emitted during computation of an anonymous query.\n \/\/\/ Since many anonymous queries can share the same `DepNode`, we aggregate\n \/\/\/ them -- as opposed to regular queries where we assume that there is a\n \/\/\/ 1:1 relationship between query-key and `DepNode`.\n pub fn store_diagnostics_for_anon_node(&self,\n dep_node_index: DepNodeIndex,\n mut diagnostics: Vec<Diagnostic>) {\n let mut current_diagnostics = self.current_diagnostics.borrow_mut();\n\n let x = current_diagnostics.entry(dep_node_index).or_insert_with(|| {\n mem::replace(&mut diagnostics, Vec::new())\n });\n\n x.extend(diagnostics.into_iter());\n }\n}\n\n\/\/\/ A decoder that can read the incr. comp. cache. It is similar to the one\n\/\/\/ we use for crate metadata decoding in that it can rebase spans and\n\/\/\/ eventually will also handle things that contain `Ty` instances.\nstruct CacheDecoder<'a> {\n opaque: opaque::Decoder<'a>,\n codemap: &'a CodeMap,\n prev_filemap_starts: &'a BTreeMap<BytePos, StableFilemapId>,\n}\n\nimpl<'a> CacheDecoder<'a> {\n fn find_filemap_prev_bytepos(&self,\n prev_bytepos: BytePos)\n -> Option<(BytePos, StableFilemapId)> {\n for (start, id) in self.prev_filemap_starts.range(BytePos(0) ... prev_bytepos).rev() {\n return Some((*start, *id))\n }\n\n None\n }\n}\n\nmacro_rules! decoder_methods {\n ($($name:ident -> $ty:ty;)*) => {\n $(fn $name(&mut self) -> Result<$ty, Self::Error> {\n self.opaque.$name()\n })*\n }\n}\n\nimpl<'sess> Decoder for CacheDecoder<'sess> {\n type Error = String;\n\n decoder_methods! {\n read_nil -> ();\n\n read_u128 -> u128;\n read_u64 -> u64;\n read_u32 -> u32;\n read_u16 -> u16;\n read_u8 -> u8;\n read_usize -> usize;\n\n read_i128 -> i128;\n read_i64 -> i64;\n read_i32 -> i32;\n read_i16 -> i16;\n read_i8 -> i8;\n read_isize -> isize;\n\n read_bool -> bool;\n read_f64 -> f64;\n read_f32 -> f32;\n read_char -> char;\n read_str -> Cow<str>;\n }\n\n fn error(&mut self, err: &str) -> Self::Error {\n self.opaque.error(err)\n }\n}\n\nimpl<'a> SpecializedDecoder<Span> for CacheDecoder<'a> {\n fn specialized_decode(&mut self) -> Result<Span, Self::Error> {\n let lo = BytePos::decode(self)?;\n let hi = BytePos::decode(self)?;\n\n if let Some((prev_filemap_start, filemap_id)) = self.find_filemap_prev_bytepos(lo) {\n if let Some(current_filemap) = self.codemap.filemap_by_stable_id(filemap_id) {\n let lo = (lo + current_filemap.start_pos) - prev_filemap_start;\n let hi = (hi + current_filemap.start_pos) - prev_filemap_start;\n return Ok(Span::new(lo, hi, NO_EXPANSION));\n }\n }\n\n Ok(DUMMY_SP)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>game_of_stones<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for bowling case<commit_after>type Frame = (u32, u32, u32);\n\nconst MAX_FRAME: usize = 10;\nconst FRAME_WINDOW: usize = 3;\nconst FRAME_SIZE: usize = MAX_FRAME + FRAME_WINDOW - 1;\nconst INVALID_SCORE: u32 = std::u32::MAX;\n\n#[derive(Default)]\npub struct BowlingGame {\n record: [Frame; FRAME_SIZE],\n count: usize,\n}\n\nimpl BowlingGame {\n pub fn new() -> BowlingGame {\n BowlingGame {\n record: [(INVALID_SCORE, INVALID_SCORE, INVALID_SCORE); FRAME_SIZE],\n count: 0,\n }\n }\n\n pub fn roll(&mut self, pins: u32) -> Result<u32, String> {\n if pins > 10 {\n return Err(\"Invalid pins\".to_string());\n }\n\n match self.count {\n 0...8 => roll_in_normal_frame(self, pins),\n 9 => roll_in_final_frame(self, pins),\n _ => Err(\"No more pins\".to_string()),\n }\n }\n\n pub fn score(&self) -> Result<u32, String> {\n if self.count != MAX_FRAME {\n return Err(\"Unfinished games\".to_string());\n }\n\n let r = &self.record;\n let v = r.windows(FRAME_WINDOW).map(|v| count(v)).sum();\n\n Ok(v)\n }\n}\n\nfn roll_in_normal_frame(v: &mut BowlingGame, pins: u32) -> Result<u32, String> {\n let f = &mut v.record[v.count];\n\n match *f {\n (INVALID_SCORE, _, _) if pins == 10 => {\n f.0 = pins;\n v.count += 1;\n Ok(0)\n }\n (INVALID_SCORE, _, _) if pins < 10 => {\n f.0 = pins;\n Ok(0)\n }\n (a, INVALID_SCORE, _) if pins > 10 - a => Err(\"Invalid second pins\".to_string()),\n (a, INVALID_SCORE, _) if pins <= 10 - a => {\n f.1 = pins;\n v.count += 1;\n Ok(1)\n }\n _ => panic!(\"Should not reach here\"),\n }\n}\n\nfn roll_in_final_frame(v: &mut BowlingGame, pins: u32) -> Result<u32, String> {\n let f = &mut v.record[v.count];\n\n match *f {\n (INVALID_SCORE, _, _) => {\n f.0 = pins;\n Ok(0)\n }\n (10, INVALID_SCORE, _) => {\n f.1 = pins;\n Ok(1)\n }\n (a, INVALID_SCORE, _) if pins > 10 - a => Err(\"Invalid second pins\".to_string()),\n (a, INVALID_SCORE, _) if pins < 10 - a => {\n f.1 = pins;\n v.count += 1;\n Ok(1)\n }\n (a, INVALID_SCORE, _) if pins == 10 - a => {\n f.1 = pins;\n Ok(1)\n }\n (10, 10, INVALID_SCORE) => {\n f.2 = pins;\n v.count += 1;\n Ok(2)\n }\n (10, b, INVALID_SCORE) if b != 10 && pins > 10 - b => Err(\"Invalid third pins\".to_string()),\n (a, b, INVALID_SCORE) if a + b < 10 => Err(\"Invalid third pins\".to_string()),\n (a, b, INVALID_SCORE) if a + b >= 10 => {\n f.2 = pins;\n v.count += 1;\n Ok(2)\n }\n _ => panic!(\"Should not reach here\"),\n }\n}\n\nfn count(frames: &[Frame]) -> u32 {\n let f = &frames[1];\n if f.0 == INVALID_SCORE {\n return count_final_frame(frames);\n }\n\n count_normal_frame(frames)\n}\n\nfn count_normal_frame(frames: &[Frame]) -> u32 {\n let f = &frames[0];\n let s = &frames[1];\n let t = &frames[2];\n\n match *f {\n (10, _, _) if s.0 == 10 && t.0 != INVALID_SCORE => 10 + s.0 + t.0,\n (10, _, _) if s.0 == 10 && t.0 == INVALID_SCORE => 10 + s.0 + s.1,\n (10, _, _) if s.0 != 10 => 10 + s.0 + s.1,\n (a, b, _) if a + b == 10 => 10 + s.0,\n (a, b, _) if a + b < 10 => a + b,\n _ => panic!(\"Should not reach here\"),\n }\n}\n\nfn count_final_frame(frames: &[Frame]) -> u32 {\n let f = &frames[0];\n\n match *f {\n (10, b, c) => 10 + b + c,\n (a, b, c) if a + b == 10 => a + b + c,\n (a, b, INVALID_SCORE) => a + b,\n _ => panic!(\"Should not reach here\"),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Create unique StdRng instances for arguments of run_inc<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Derive `Clone` for `Datum`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use a collection of descriptor sets per swapchain image<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Replace the explicit subpass dependency with a stricter semaphore on the queue submission<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n\ntypeck.rs, an introduction\n\nThe type checker is responsible for:\n\n1. Determining the type of each expression\n2. Resolving methods and traits\n3. Guaranteeing that most type rules are met (\"most?\", you say, \"why most?\"\n Well, dear reader, read on)\n\nThe main entry point is `check_crate()`. Type checking operates in\nseveral major phases:\n\n1. The collect phase first passes over all items and determines their\n type, without examining their \"innards\".\n\n2. Variance inference then runs to compute the variance of each parameter\n\n3. Coherence checks for overlapping or orphaned impls\n\n4. Finally, the check phase then checks function bodies and so forth.\n Within the check phase, we check each function body one at a time\n (bodies of function expressions are checked as part of the\n containing function). Inference is used to supply types wherever\n they are unknown. The actual checking of a function itself has\n several phases (check, regionck, writeback), as discussed in the\n documentation for the `check` module.\n\nThe type checker is defined into various submodules which are documented\nindependently:\n\n- astconv: converts the AST representation of types\n into the `ty` representation\n\n- collect: computes the types of each top-level item and enters them into\n the `cx.tcache` table for later use\n\n- coherence: enforces coherence rules, builds some tables\n\n- variance: variance inference\n\n- check: walks over function bodies and type checks them, inferring types for\n local variables, type parameters, etc as necessary.\n\n- infer: finds the types to use for each type variable such that\n all subtyping and assignment constraints are met. In essence, the check\n module specifies the constraints, and the infer module solves them.\n\n# Note\n\nThis API is completely unstable and subject to change.\n\n*\/\n\n#![crate_name = \"rustc_typeck\"]\n#![unstable(feature = \"rustc_private\", issue = \"27812\")]\n#![crate_type = \"dylib\"]\n#![crate_type = \"rlib\"]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\")]\n#![cfg_attr(not(stage0), deny(warnings))]\n\n#![allow(non_camel_case_types)]\n\n#![feature(box_patterns)]\n#![feature(box_syntax)]\n#![feature(iter_arith)]\n#![feature(quote)]\n#![feature(rustc_diagnostic_macros)]\n#![feature(rustc_private)]\n#![feature(staged_api)]\n#![feature(question_mark)]\n\n#[macro_use] extern crate log;\n#[macro_use] extern crate syntax;\n\nextern crate arena;\nextern crate fmt_macros;\n#[macro_use] extern crate rustc;\nextern crate rustc_platform_intrinsics as intrinsics;\nextern crate rustc_back;\nextern crate rustc_const_math;\nextern crate rustc_const_eval;\n\npub use rustc::dep_graph;\npub use rustc::hir;\npub use rustc::lint;\npub use rustc::middle;\npub use rustc::session;\npub use rustc::util;\n\nuse dep_graph::DepNode;\nuse hir::map as hir_map;\nuse hir::def::Def;\nuse rustc::infer::TypeOrigin;\nuse rustc::ty::subst::Substs;\nuse rustc::ty::{self, Ty, TyCtxt, TypeFoldable};\nuse rustc::traits::ProjectionMode;\nuse session::{config, CompileResult};\nuse util::common::time;\n\nuse syntax::codemap::Span;\nuse syntax::ast;\nuse syntax::abi::Abi;\n\nuse std::cell::RefCell;\n\n\/\/ NB: This module needs to be declared first so diagnostics are\n\/\/ registered before they are used.\npub mod diagnostics;\n\npub mod check;\npub mod check_unused;\nmod rscope;\nmod astconv;\npub mod collect;\nmod constrained_type_params;\npub mod coherence;\npub mod variance;\n\npub struct TypeAndSubsts<'tcx> {\n pub substs: Substs<'tcx>,\n pub ty: Ty<'tcx>,\n}\n\npub struct CrateCtxt<'a, 'tcx: 'a> {\n \/\/ A mapping from method call sites to traits that have that method.\n pub trait_map: hir::TraitMap,\n\n \/\/\/ A vector of every trait accessible in the whole crate\n \/\/\/ (i.e. including those from subcrates). This is used only for\n \/\/\/ error reporting, and so is lazily initialised and generally\n \/\/\/ shouldn't taint the common path (hence the RefCell).\n pub all_traits: RefCell<Option<check::method::AllTraitsVec>>,\n\n \/\/\/ This stack is used to identify cycles in the user's source.\n \/\/\/ Note that these cycles can cross multiple items.\n pub stack: RefCell<Vec<collect::AstConvRequest>>,\n\n pub tcx: TyCtxt<'a, 'tcx, 'tcx>,\n}\n\n\/\/ Functions that write types into the node type table\nfn write_ty_to_tcx<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, node_id: ast::NodeId, ty: Ty<'tcx>) {\n debug!(\"write_ty_to_tcx({}, {:?})\", node_id, ty);\n assert!(!ty.needs_infer());\n ccx.tcx.node_type_insert(node_id, ty);\n}\n\nfn write_substs_to_tcx<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,\n node_id: ast::NodeId,\n item_substs: ty::ItemSubsts<'tcx>) {\n if !item_substs.is_noop() {\n debug!(\"write_substs_to_tcx({}, {:?})\",\n node_id,\n item_substs);\n\n assert!(!item_substs.substs.types.needs_infer());\n\n ccx.tcx.tables.borrow_mut().item_substs.insert(node_id, item_substs);\n }\n}\n\nfn lookup_full_def(tcx: TyCtxt, sp: Span, id: ast::NodeId) -> Def {\n match tcx.def_map.borrow().get(&id) {\n Some(x) => x.full_def(),\n None => {\n span_fatal!(tcx.sess, sp, E0242, \"internal error looking up a definition\")\n }\n }\n}\n\nfn require_c_abi_if_variadic(tcx: TyCtxt,\n decl: &hir::FnDecl,\n abi: Abi,\n span: Span) {\n if decl.variadic && abi != Abi::C {\n span_err!(tcx.sess, span, E0045,\n \"variadic function must have C calling convention\");\n }\n}\n\npub fn emit_type_err<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,\n span: Span,\n found_ty: Ty<'tcx>,\n expected_ty: Ty<'tcx>,\n terr: &ty::error::TypeError<'tcx>,\n msg: &str) {\n let mut err = struct_span_err!(tcx.sess, span, E0211, \"{}\", msg);\n err = err.span_label(span, &terr);\n err = err.note_expected_found(&\"type\", &expected_ty, &found_ty);\n tcx.note_and_explain_type_err(&mut err, terr, span);\n err.emit();\n}\n\nfn require_same_types<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,\n span: Span,\n t1: Ty<'tcx>,\n t2: Ty<'tcx>,\n msg: &str)\n -> bool {\n ccx.tcx.infer_ctxt(None, None, ProjectionMode::AnyFinal).enter(|infcx| {\n if let Err(err) = infcx.eq_types(false, TypeOrigin::Misc(span), t1, t2) {\n emit_type_err(infcx.tcx, span, t1, t2, &err, msg);\n false\n } else {\n true\n }\n })\n}\n\nfn check_main_fn_ty(ccx: &CrateCtxt,\n main_id: ast::NodeId,\n main_span: Span) {\n let tcx = ccx.tcx;\n let main_t = tcx.node_id_to_type(main_id);\n match main_t.sty {\n ty::TyFnDef(..) => {\n match tcx.map.find(main_id) {\n Some(hir_map::NodeItem(it)) => {\n match it.node {\n hir::ItemFn(_, _, _, _, ref ps, _)\n if ps.is_parameterized() => {\n span_err!(ccx.tcx.sess, main_span, E0131,\n \"main function is not allowed to have type parameters\");\n return;\n }\n _ => ()\n }\n }\n _ => ()\n }\n let main_def_id = tcx.map.local_def_id(main_id);\n let substs = tcx.mk_substs(Substs::empty());\n let se_ty = tcx.mk_fn_def(main_def_id, substs,\n tcx.mk_bare_fn(ty::BareFnTy {\n unsafety: hir::Unsafety::Normal,\n abi: Abi::Rust,\n sig: ty::Binder(ty::FnSig {\n inputs: Vec::new(),\n output: ty::FnConverging(tcx.mk_nil()),\n variadic: false\n })\n }));\n\n require_same_types(ccx, main_span, main_t, se_ty,\n \"main function has wrong type\");\n }\n _ => {\n span_bug!(main_span,\n \"main has a non-function type: found `{}`\",\n main_t);\n }\n }\n}\n\nfn check_start_fn_ty(ccx: &CrateCtxt,\n start_id: ast::NodeId,\n start_span: Span) {\n let tcx = ccx.tcx;\n let start_t = tcx.node_id_to_type(start_id);\n match start_t.sty {\n ty::TyFnDef(..) => {\n match tcx.map.find(start_id) {\n Some(hir_map::NodeItem(it)) => {\n match it.node {\n hir::ItemFn(_,_,_,_,ref ps,_)\n if ps.is_parameterized() => {\n span_err!(tcx.sess, start_span, E0132,\n \"start function is not allowed to have type parameters\");\n return;\n }\n _ => ()\n }\n }\n _ => ()\n }\n\n let start_def_id = ccx.tcx.map.local_def_id(start_id);\n let substs = tcx.mk_substs(Substs::empty());\n let se_ty = tcx.mk_fn_def(start_def_id, substs,\n tcx.mk_bare_fn(ty::BareFnTy {\n unsafety: hir::Unsafety::Normal,\n abi: Abi::Rust,\n sig: ty::Binder(ty::FnSig {\n inputs: vec!(\n tcx.types.isize,\n tcx.mk_imm_ptr(tcx.mk_imm_ptr(tcx.types.u8))\n ),\n output: ty::FnConverging(tcx.types.isize),\n variadic: false,\n }),\n }));\n\n require_same_types(ccx, start_span, start_t, se_ty,\n \"start function has wrong type\");\n }\n _ => {\n span_bug!(start_span,\n \"start has a non-function type: found `{}`\",\n start_t);\n }\n }\n}\n\nfn check_for_entry_fn(ccx: &CrateCtxt) {\n let tcx = ccx.tcx;\n let _task = tcx.dep_graph.in_task(DepNode::CheckEntryFn);\n match *tcx.sess.entry_fn.borrow() {\n Some((id, sp)) => match tcx.sess.entry_type.get() {\n Some(config::EntryMain) => check_main_fn_ty(ccx, id, sp),\n Some(config::EntryStart) => check_start_fn_ty(ccx, id, sp),\n Some(config::EntryNone) => {}\n None => bug!(\"entry function without a type\")\n },\n None => {}\n }\n}\n\npub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n trait_map: hir::TraitMap)\n -> CompileResult {\n let time_passes = tcx.sess.time_passes();\n let ccx = CrateCtxt {\n trait_map: trait_map,\n all_traits: RefCell::new(None),\n stack: RefCell::new(Vec::new()),\n tcx: tcx\n };\n\n \/\/ this ensures that later parts of type checking can assume that items\n \/\/ have valid types and not error\n tcx.sess.track_errors(|| {\n time(time_passes, \"type collecting\", ||\n collect::collect_item_types(&ccx));\n\n })?;\n\n time(time_passes, \"variance inference\", ||\n variance::infer_variance(tcx));\n\n tcx.sess.track_errors(|| {\n time(time_passes, \"coherence checking\", ||\n coherence::check_coherence(&ccx));\n })?;\n\n time(time_passes, \"wf checking\", || check::check_wf_new(&ccx))?;\n\n time(time_passes, \"item-types checking\", || check::check_item_types(&ccx))?;\n\n time(time_passes, \"item-bodies checking\", || check::check_item_bodies(&ccx))?;\n\n time(time_passes, \"drop-impl checking\", || check::check_drop_impls(&ccx))?;\n\n check_unused::check_crate(tcx);\n check_for_entry_fn(&ccx);\n\n let err_count = tcx.sess.err_count();\n if err_count == 0 {\n Ok(())\n } else {\n Err(err_count)\n }\n}\n\n__build_diagnostic_array! { librustc_typeck, DIAGNOSTICS }\n<commit_msg>Update span_label call to new api<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n\ntypeck.rs, an introduction\n\nThe type checker is responsible for:\n\n1. Determining the type of each expression\n2. Resolving methods and traits\n3. Guaranteeing that most type rules are met (\"most?\", you say, \"why most?\"\n Well, dear reader, read on)\n\nThe main entry point is `check_crate()`. Type checking operates in\nseveral major phases:\n\n1. The collect phase first passes over all items and determines their\n type, without examining their \"innards\".\n\n2. Variance inference then runs to compute the variance of each parameter\n\n3. Coherence checks for overlapping or orphaned impls\n\n4. Finally, the check phase then checks function bodies and so forth.\n Within the check phase, we check each function body one at a time\n (bodies of function expressions are checked as part of the\n containing function). Inference is used to supply types wherever\n they are unknown. The actual checking of a function itself has\n several phases (check, regionck, writeback), as discussed in the\n documentation for the `check` module.\n\nThe type checker is defined into various submodules which are documented\nindependently:\n\n- astconv: converts the AST representation of types\n into the `ty` representation\n\n- collect: computes the types of each top-level item and enters them into\n the `cx.tcache` table for later use\n\n- coherence: enforces coherence rules, builds some tables\n\n- variance: variance inference\n\n- check: walks over function bodies and type checks them, inferring types for\n local variables, type parameters, etc as necessary.\n\n- infer: finds the types to use for each type variable such that\n all subtyping and assignment constraints are met. In essence, the check\n module specifies the constraints, and the infer module solves them.\n\n# Note\n\nThis API is completely unstable and subject to change.\n\n*\/\n\n#![crate_name = \"rustc_typeck\"]\n#![unstable(feature = \"rustc_private\", issue = \"27812\")]\n#![crate_type = \"dylib\"]\n#![crate_type = \"rlib\"]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\")]\n#![cfg_attr(not(stage0), deny(warnings))]\n\n#![allow(non_camel_case_types)]\n\n#![feature(box_patterns)]\n#![feature(box_syntax)]\n#![feature(iter_arith)]\n#![feature(quote)]\n#![feature(rustc_diagnostic_macros)]\n#![feature(rustc_private)]\n#![feature(staged_api)]\n#![feature(question_mark)]\n\n#[macro_use] extern crate log;\n#[macro_use] extern crate syntax;\n\nextern crate arena;\nextern crate fmt_macros;\n#[macro_use] extern crate rustc;\nextern crate rustc_platform_intrinsics as intrinsics;\nextern crate rustc_back;\nextern crate rustc_const_math;\nextern crate rustc_const_eval;\n\npub use rustc::dep_graph;\npub use rustc::hir;\npub use rustc::lint;\npub use rustc::middle;\npub use rustc::session;\npub use rustc::util;\n\nuse dep_graph::DepNode;\nuse hir::map as hir_map;\nuse hir::def::Def;\nuse rustc::infer::TypeOrigin;\nuse rustc::ty::subst::Substs;\nuse rustc::ty::{self, Ty, TyCtxt, TypeFoldable};\nuse rustc::traits::ProjectionMode;\nuse session::{config, CompileResult};\nuse util::common::time;\n\nuse syntax::codemap::Span;\nuse syntax::ast;\nuse syntax::abi::Abi;\n\nuse std::cell::RefCell;\n\n\/\/ NB: This module needs to be declared first so diagnostics are\n\/\/ registered before they are used.\npub mod diagnostics;\n\npub mod check;\npub mod check_unused;\nmod rscope;\nmod astconv;\npub mod collect;\nmod constrained_type_params;\npub mod coherence;\npub mod variance;\n\npub struct TypeAndSubsts<'tcx> {\n pub substs: Substs<'tcx>,\n pub ty: Ty<'tcx>,\n}\n\npub struct CrateCtxt<'a, 'tcx: 'a> {\n \/\/ A mapping from method call sites to traits that have that method.\n pub trait_map: hir::TraitMap,\n\n \/\/\/ A vector of every trait accessible in the whole crate\n \/\/\/ (i.e. including those from subcrates). This is used only for\n \/\/\/ error reporting, and so is lazily initialised and generally\n \/\/\/ shouldn't taint the common path (hence the RefCell).\n pub all_traits: RefCell<Option<check::method::AllTraitsVec>>,\n\n \/\/\/ This stack is used to identify cycles in the user's source.\n \/\/\/ Note that these cycles can cross multiple items.\n pub stack: RefCell<Vec<collect::AstConvRequest>>,\n\n pub tcx: TyCtxt<'a, 'tcx, 'tcx>,\n}\n\n\/\/ Functions that write types into the node type table\nfn write_ty_to_tcx<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, node_id: ast::NodeId, ty: Ty<'tcx>) {\n debug!(\"write_ty_to_tcx({}, {:?})\", node_id, ty);\n assert!(!ty.needs_infer());\n ccx.tcx.node_type_insert(node_id, ty);\n}\n\nfn write_substs_to_tcx<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,\n node_id: ast::NodeId,\n item_substs: ty::ItemSubsts<'tcx>) {\n if !item_substs.is_noop() {\n debug!(\"write_substs_to_tcx({}, {:?})\",\n node_id,\n item_substs);\n\n assert!(!item_substs.substs.types.needs_infer());\n\n ccx.tcx.tables.borrow_mut().item_substs.insert(node_id, item_substs);\n }\n}\n\nfn lookup_full_def(tcx: TyCtxt, sp: Span, id: ast::NodeId) -> Def {\n match tcx.def_map.borrow().get(&id) {\n Some(x) => x.full_def(),\n None => {\n span_fatal!(tcx.sess, sp, E0242, \"internal error looking up a definition\")\n }\n }\n}\n\nfn require_c_abi_if_variadic(tcx: TyCtxt,\n decl: &hir::FnDecl,\n abi: Abi,\n span: Span) {\n if decl.variadic && abi != Abi::C {\n span_err!(tcx.sess, span, E0045,\n \"variadic function must have C calling convention\");\n }\n}\n\npub fn emit_type_err<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,\n span: Span,\n found_ty: Ty<'tcx>,\n expected_ty: Ty<'tcx>,\n terr: &ty::error::TypeError<'tcx>,\n msg: &str) {\n let mut err = struct_span_err!(tcx.sess, span, E0211, \"{}\", msg);\n err.span_label(span, &terr);\n err.note_expected_found(&\"type\", &expected_ty, &found_ty);\n tcx.note_and_explain_type_err(&mut err, terr, span);\n err.emit();\n}\n\nfn require_same_types<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,\n span: Span,\n t1: Ty<'tcx>,\n t2: Ty<'tcx>,\n msg: &str)\n -> bool {\n ccx.tcx.infer_ctxt(None, None, ProjectionMode::AnyFinal).enter(|infcx| {\n if let Err(err) = infcx.eq_types(false, TypeOrigin::Misc(span), t1, t2) {\n emit_type_err(infcx.tcx, span, t1, t2, &err, msg);\n false\n } else {\n true\n }\n })\n}\n\nfn check_main_fn_ty(ccx: &CrateCtxt,\n main_id: ast::NodeId,\n main_span: Span) {\n let tcx = ccx.tcx;\n let main_t = tcx.node_id_to_type(main_id);\n match main_t.sty {\n ty::TyFnDef(..) => {\n match tcx.map.find(main_id) {\n Some(hir_map::NodeItem(it)) => {\n match it.node {\n hir::ItemFn(_, _, _, _, ref ps, _)\n if ps.is_parameterized() => {\n span_err!(ccx.tcx.sess, main_span, E0131,\n \"main function is not allowed to have type parameters\");\n return;\n }\n _ => ()\n }\n }\n _ => ()\n }\n let main_def_id = tcx.map.local_def_id(main_id);\n let substs = tcx.mk_substs(Substs::empty());\n let se_ty = tcx.mk_fn_def(main_def_id, substs,\n tcx.mk_bare_fn(ty::BareFnTy {\n unsafety: hir::Unsafety::Normal,\n abi: Abi::Rust,\n sig: ty::Binder(ty::FnSig {\n inputs: Vec::new(),\n output: ty::FnConverging(tcx.mk_nil()),\n variadic: false\n })\n }));\n\n require_same_types(ccx, main_span, main_t, se_ty,\n \"main function has wrong type\");\n }\n _ => {\n span_bug!(main_span,\n \"main has a non-function type: found `{}`\",\n main_t);\n }\n }\n}\n\nfn check_start_fn_ty(ccx: &CrateCtxt,\n start_id: ast::NodeId,\n start_span: Span) {\n let tcx = ccx.tcx;\n let start_t = tcx.node_id_to_type(start_id);\n match start_t.sty {\n ty::TyFnDef(..) => {\n match tcx.map.find(start_id) {\n Some(hir_map::NodeItem(it)) => {\n match it.node {\n hir::ItemFn(_,_,_,_,ref ps,_)\n if ps.is_parameterized() => {\n span_err!(tcx.sess, start_span, E0132,\n \"start function is not allowed to have type parameters\");\n return;\n }\n _ => ()\n }\n }\n _ => ()\n }\n\n let start_def_id = ccx.tcx.map.local_def_id(start_id);\n let substs = tcx.mk_substs(Substs::empty());\n let se_ty = tcx.mk_fn_def(start_def_id, substs,\n tcx.mk_bare_fn(ty::BareFnTy {\n unsafety: hir::Unsafety::Normal,\n abi: Abi::Rust,\n sig: ty::Binder(ty::FnSig {\n inputs: vec!(\n tcx.types.isize,\n tcx.mk_imm_ptr(tcx.mk_imm_ptr(tcx.types.u8))\n ),\n output: ty::FnConverging(tcx.types.isize),\n variadic: false,\n }),\n }));\n\n require_same_types(ccx, start_span, start_t, se_ty,\n \"start function has wrong type\");\n }\n _ => {\n span_bug!(start_span,\n \"start has a non-function type: found `{}`\",\n start_t);\n }\n }\n}\n\nfn check_for_entry_fn(ccx: &CrateCtxt) {\n let tcx = ccx.tcx;\n let _task = tcx.dep_graph.in_task(DepNode::CheckEntryFn);\n match *tcx.sess.entry_fn.borrow() {\n Some((id, sp)) => match tcx.sess.entry_type.get() {\n Some(config::EntryMain) => check_main_fn_ty(ccx, id, sp),\n Some(config::EntryStart) => check_start_fn_ty(ccx, id, sp),\n Some(config::EntryNone) => {}\n None => bug!(\"entry function without a type\")\n },\n None => {}\n }\n}\n\npub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n trait_map: hir::TraitMap)\n -> CompileResult {\n let time_passes = tcx.sess.time_passes();\n let ccx = CrateCtxt {\n trait_map: trait_map,\n all_traits: RefCell::new(None),\n stack: RefCell::new(Vec::new()),\n tcx: tcx\n };\n\n \/\/ this ensures that later parts of type checking can assume that items\n \/\/ have valid types and not error\n tcx.sess.track_errors(|| {\n time(time_passes, \"type collecting\", ||\n collect::collect_item_types(&ccx));\n\n })?;\n\n time(time_passes, \"variance inference\", ||\n variance::infer_variance(tcx));\n\n tcx.sess.track_errors(|| {\n time(time_passes, \"coherence checking\", ||\n coherence::check_coherence(&ccx));\n })?;\n\n time(time_passes, \"wf checking\", || check::check_wf_new(&ccx))?;\n\n time(time_passes, \"item-types checking\", || check::check_item_types(&ccx))?;\n\n time(time_passes, \"item-bodies checking\", || check::check_item_bodies(&ccx))?;\n\n time(time_passes, \"drop-impl checking\", || check::check_drop_impls(&ccx))?;\n\n check_unused::check_crate(tcx);\n check_for_entry_fn(&ccx);\n\n let err_count = tcx.sess.err_count();\n if err_count == 0 {\n Ok(())\n } else {\n Err(err_count)\n }\n}\n\n__build_diagnostic_array! { librustc_typeck, DIAGNOSTICS }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Test yielding in crust functions<commit_after>native mod rustrt {\n fn rust_dbg_call(cb: *u8,\n data: ctypes::uintptr_t) -> ctypes::uintptr_t;\n}\n\ncrust fn cb(data: ctypes::uintptr_t) -> ctypes::uintptr_t {\n if data == 1u {\n data\n } else {\n count(data - 1u) + count(data - 1u)\n }\n}\n\nfn count(n: uint) -> uint {\n task::yield();\n rustrt::rust_dbg_call(cb, n)\n}\n\nfn main() {\n iter::repeat(10u) {||\n task::spawn {||\n let result = count(5u);\n #debug(\"result = %?\", result);\n assert result == 16u;\n };\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for #29048. Fixes #29048.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub struct Chan;\npub struct ChanSelect<'c, T> {\n chans: Vec<(&'c Chan, T)>,\n}\nimpl<'c, T> ChanSelect<'c, T> {\n pub fn add_recv_ret(&mut self, chan: &'c Chan, ret: T)\n {\n self.chans.push((chan, ret));\n }\n}\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Passing structs via FFI should work regardless of whether\n\/\/ they get passed in multiple registers, byval pointers or the stack\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct Rect {\n a: i32,\n b: i32,\n c: i32,\n d: i32\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct BiggerRect {\n s: Rect,\n a: i32,\n b: i32\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct FloatRect {\n a: i32,\n b: i32,\n c: f64\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct Huge {\n a: i32,\n b: i32,\n c: i32,\n d: i32,\n e: i32\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct FloatPoint {\n x: f64,\n y: f64\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct FloatOne {\n x: f64,\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct IntOdd {\n a: i8,\n b: i8,\n c: i8,\n}\n\n#[link(name = \"test\", kind = \"static\")]\nextern {\n fn byval_rect(a: i32, b: i32, c: i32, d: i32, e: i32, s: Rect);\n\n fn byval_many_rect(a: i32, b: i32, c: i32, d: i32, e: i32, f: i32, s: Rect);\n\n fn byval_rect_floats(a: f32, b: f32, c: f64, d: f32, e: f32,\n f: f32, g: f64, s: Rect, t: FloatRect);\n\n fn byval_rect_with_float(a: i32, b: i32, c: f32, d: i32, e: i32, f: i32, s: Rect);\n\n fn byval_rect_with_many_huge(a: Huge, b: Huge, c: Huge, d: Huge, e: Huge, f: Huge, g: Rect);\n\n fn split_rect(a: i32, b: i32, s: Rect);\n\n fn split_rect_floats(a: f32, b: f32, s: FloatRect);\n\n fn split_rect_with_floats(a: i32, b: i32, c: f32, d: i32, e: f32, f: i32, s: Rect);\n\n fn split_and_byval_rect(a: i32, b: i32, c: i32, s: Rect, t: Rect);\n\n fn split_ret_byval_struct(a: i32, b: i32, s: Rect) -> Rect;\n\n fn sret_byval_struct(a: i32, b: i32, c: i32, d: i32, s: Rect) -> BiggerRect;\n\n fn sret_split_struct(a: i32, b: i32, s: Rect) -> BiggerRect;\n\n fn huge_struct(s: Huge) -> Huge;\n\n fn float_point(p: FloatPoint) -> FloatPoint;\n\n fn float_one(f: FloatOne) -> FloatOne;\n\n fn int_odd(i: IntOdd) -> IntOdd;\n}\n\nfn main() {\n let s = Rect { a: 553, b: 554, c: 555, d: 556 };\n let t = BiggerRect { s: s, a: 27834, b: 7657 };\n let u = FloatRect { a: 3489, b: 3490, c: 8. };\n let v = Huge { a: 5647, b: 5648, c: 5649, d: 5650, e: 5651 };\n let p = FloatPoint { x: 5., y: -3. };\n let f1 = FloatOne { x: 7. };\n let i = IntOdd { a: 1, b: 2, c: 3 };\n\n unsafe {\n byval_rect(1, 2, 3, 4, 5, s);\n byval_many_rect(1, 2, 3, 4, 5, 6, s);\n byval_rect_floats(1., 2., 3., 4., 5., 6., 7., s, u);\n byval_rect_with_float(1, 2, 3.0, 4, 5, 6, s);\n byval_rect_with_many_huge(v, v, v, v, v, v, Rect {\n a: 123,\n b: 456,\n c: 789,\n d: 420\n });\n split_rect(1, 2, s);\n split_rect_floats(1., 2., u);\n split_rect_with_floats(1, 2, 3.0, 4, 5.0, 6, s);\n split_and_byval_rect(1, 2, 3, s, s);\n split_rect(1, 2, s);\n assert_eq!(huge_struct(v), v);\n assert_eq!(split_ret_byval_struct(1, 2, s), s);\n assert_eq!(sret_byval_struct(1, 2, 3, 4, s), t);\n assert_eq!(sret_split_struct(1, 2, s), t);\n assert_eq!(float_point(p), p);\n assert_eq!(int_odd(i), i);\n\n \/\/ mingw64-gcc uses the wrong ABI:\n \/\/ https:\/\/gcc.gnu.org\/bugzilla\/show_bug.cgi?id=82028\n #[cfg(not(all(windows, target_arch = \"x86_64\", target_env = \"gnu\")))]\n assert_eq!(float_one(f1), f1);\n }\n}\n<commit_msg>Exclude all windows-gnu from the float_one test<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Passing structs via FFI should work regardless of whether\n\/\/ they get passed in multiple registers, byval pointers or the stack\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct Rect {\n a: i32,\n b: i32,\n c: i32,\n d: i32\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct BiggerRect {\n s: Rect,\n a: i32,\n b: i32\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct FloatRect {\n a: i32,\n b: i32,\n c: f64\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct Huge {\n a: i32,\n b: i32,\n c: i32,\n d: i32,\n e: i32\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct FloatPoint {\n x: f64,\n y: f64\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct FloatOne {\n x: f64,\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n#[repr(C)]\nstruct IntOdd {\n a: i8,\n b: i8,\n c: i8,\n}\n\n#[link(name = \"test\", kind = \"static\")]\nextern {\n fn byval_rect(a: i32, b: i32, c: i32, d: i32, e: i32, s: Rect);\n\n fn byval_many_rect(a: i32, b: i32, c: i32, d: i32, e: i32, f: i32, s: Rect);\n\n fn byval_rect_floats(a: f32, b: f32, c: f64, d: f32, e: f32,\n f: f32, g: f64, s: Rect, t: FloatRect);\n\n fn byval_rect_with_float(a: i32, b: i32, c: f32, d: i32, e: i32, f: i32, s: Rect);\n\n fn byval_rect_with_many_huge(a: Huge, b: Huge, c: Huge, d: Huge, e: Huge, f: Huge, g: Rect);\n\n fn split_rect(a: i32, b: i32, s: Rect);\n\n fn split_rect_floats(a: f32, b: f32, s: FloatRect);\n\n fn split_rect_with_floats(a: i32, b: i32, c: f32, d: i32, e: f32, f: i32, s: Rect);\n\n fn split_and_byval_rect(a: i32, b: i32, c: i32, s: Rect, t: Rect);\n\n fn split_ret_byval_struct(a: i32, b: i32, s: Rect) -> Rect;\n\n fn sret_byval_struct(a: i32, b: i32, c: i32, d: i32, s: Rect) -> BiggerRect;\n\n fn sret_split_struct(a: i32, b: i32, s: Rect) -> BiggerRect;\n\n fn huge_struct(s: Huge) -> Huge;\n\n fn float_point(p: FloatPoint) -> FloatPoint;\n\n fn float_one(f: FloatOne) -> FloatOne;\n\n fn int_odd(i: IntOdd) -> IntOdd;\n}\n\nfn main() {\n let s = Rect { a: 553, b: 554, c: 555, d: 556 };\n let t = BiggerRect { s: s, a: 27834, b: 7657 };\n let u = FloatRect { a: 3489, b: 3490, c: 8. };\n let v = Huge { a: 5647, b: 5648, c: 5649, d: 5650, e: 5651 };\n let p = FloatPoint { x: 5., y: -3. };\n let f1 = FloatOne { x: 7. };\n let i = IntOdd { a: 1, b: 2, c: 3 };\n\n unsafe {\n byval_rect(1, 2, 3, 4, 5, s);\n byval_many_rect(1, 2, 3, 4, 5, 6, s);\n byval_rect_floats(1., 2., 3., 4., 5., 6., 7., s, u);\n byval_rect_with_float(1, 2, 3.0, 4, 5, 6, s);\n byval_rect_with_many_huge(v, v, v, v, v, v, Rect {\n a: 123,\n b: 456,\n c: 789,\n d: 420\n });\n split_rect(1, 2, s);\n split_rect_floats(1., 2., u);\n split_rect_with_floats(1, 2, 3.0, 4, 5.0, 6, s);\n split_and_byval_rect(1, 2, 3, s, s);\n split_rect(1, 2, s);\n assert_eq!(huge_struct(v), v);\n assert_eq!(split_ret_byval_struct(1, 2, s), s);\n assert_eq!(sret_byval_struct(1, 2, 3, 4, s), t);\n assert_eq!(sret_split_struct(1, 2, s), t);\n assert_eq!(float_point(p), p);\n assert_eq!(int_odd(i), i);\n\n \/\/ MSVC\/GCC\/Clang are not consistent in the ABI of single-float aggregates.\n \/\/ x86_64: https:\/\/gcc.gnu.org\/bugzilla\/show_bug.cgi?id=82028\n \/\/ i686: https:\/\/gcc.gnu.org\/bugzilla\/show_bug.cgi?id=82041\n #[cfg(not(all(windows, target_env = \"gnu\")))]\n assert_eq!(float_one(f1), f1);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>ICH: Add test case for call expressions.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for function and method call expressions.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![crate_type=\"rlib\"]\n\nfn callee1(_x: u32, _y: i64) {}\nfn callee2(_x: u32, _y: i64) {}\n\n\n\/\/ Change Callee (Function) ----------------------------------------------------\n#[cfg(cfail1)]\npub fn change_callee_function() {\n callee1(1, 2)\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_callee_function() {\n callee2(1, 2)\n}\n\n\n\n\/\/ Change Argument (Function) --------------------------------------------------\n#[cfg(cfail1)]\npub fn change_argument_function() {\n callee1(1, 2)\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_argument_function() {\n callee1(1, 3)\n}\n\n\n\n\/\/ Change Callee Indirectly (Function) -----------------------------------------\nmod change_callee_indirectly_function {\n #[cfg(cfail1)]\n use super::callee1 as callee;\n #[cfg(not(cfail1))]\n use super::callee2 as callee;\n\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn change_callee_indirectly_function() {\n callee(1, 2)\n }\n}\n\n\nstruct Struct;\nimpl Struct {\n fn method1(&self, _x: char, _y: bool) {}\n fn method2(&self, _x: char, _y: bool) {}\n}\n\n\/\/ Change Callee (Method) ------------------------------------------------------\n#[cfg(cfail1)]\npub fn change_callee_method() {\n let s = Struct;\n s.method1('x', true);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_callee_method() {\n let s = Struct;\n s.method2('x', true);\n}\n\n\n\n\/\/ Change Argument (Method) ----------------------------------------------------\n#[cfg(cfail1)]\npub fn change_argument_method() {\n let s = Struct;\n s.method1('x', true);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_argument_method() {\n let s = Struct;\n s.method1('y', true);\n}\n\n\n\n\/\/ Change Callee (Method, UFCS) ------------------------------------------------\n#[cfg(cfail1)]\npub fn change_ufcs_callee_method() {\n let s = Struct;\n Struct::method1(&s, 'x', true);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_ufcs_callee_method() {\n let s = Struct;\n Struct::method2(&s, 'x', true);\n}\n\n\n\n\/\/ Change Argument (Method, UFCS) ----------------------------------------------\n#[cfg(cfail1)]\npub fn change_argument_method_ufcs() {\n let s = Struct;\n Struct::method1(&s, 'x', true);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_argument_method_ufcs() {\n let s = Struct;\n Struct::method1(&s, 'x', false);\n}\n\n\n\n\/\/ Change To UFCS --------------------------------------------------------------\n#[cfg(cfail1)]\npub fn change_to_ufcs() {\n let s = Struct;\n s.method1('x', true);\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_to_ufcs() {\n let s = Struct;\n Struct::method1(&s, 'x', true);\n}\n\n\nstruct Struct2;\nimpl Struct2 {\n fn method1(&self, _x: char, _y: bool) {}\n}\n\n\/\/ Change UFCS Callee Indirectly -----------------------------------------------\nmod change_ufcs_callee_indirectly {\n #[cfg(cfail1)]\n use super::Struct as Struct;\n #[cfg(not(cfail1))]\n use super::Struct2 as Struct;\n\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn change_ufcs_callee_indirectly() {\n let s = Struct;\n Struct::method1(&s, 'q', false)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rustfmt.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The Rust compiler.\n\/\/!\n\/\/! # Note\n\/\/!\n\/\/! This API is completely unstable and subject to change.\n\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\")]\n#![deny(warnings)]\n\n#![feature(box_syntax)]\n\nuse std::any::Any;\nuse std::io::prelude::*;\nuse std::io::{self, Cursor};\nuse std::fs::File;\nuse std::path::Path;\nuse std::sync::mpsc;\n\nuse owning_ref::{ErasedBoxRef, OwningRef};\nuse ar::{Archive, Builder, Header};\nuse flate2::Compression;\nuse flate2::write::DeflateEncoder;\n\nuse syntax::symbol::Symbol;\nuse rustc::hir::def_id::LOCAL_CRATE;\nuse rustc::session::Session;\nuse rustc::session::config::{CrateType, OutputFilenames};\nuse rustc::ty::TyCtxt;\nuse rustc::ty::maps::Providers;\nuse rustc::middle::cstore::EncodedMetadata;\nuse rustc::middle::cstore::MetadataLoader as MetadataLoaderTrait;\nuse rustc::dep_graph::{DepGraph, DepNode, DepKind};\nuse rustc_back::target::Target;\nuse link::{build_link_meta, out_filename};\n\npub trait TransCrate {\n type MetadataLoader: MetadataLoaderTrait;\n type OngoingCrateTranslation;\n type TranslatedCrate;\n\n fn metadata_loader() -> Box<MetadataLoaderTrait>;\n fn provide_local(_providers: &mut Providers);\n fn provide_extern(_providers: &mut Providers);\n fn trans_crate<'a, 'tcx>(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n rx: mpsc::Receiver<Box<Any + Send>>\n ) -> Self::OngoingCrateTranslation;\n fn join_trans(\n trans: Self::OngoingCrateTranslation,\n sess: &Session,\n dep_graph: &DepGraph\n ) -> Self::TranslatedCrate;\n fn link_binary(sess: &Session, trans: &Self::TranslatedCrate, outputs: &OutputFilenames);\n fn dump_incremental_data(trans: &Self::TranslatedCrate);\n}\n\npub struct DummyTransCrate;\n\nimpl TransCrate for DummyTransCrate {\n type MetadataLoader = DummyMetadataLoader;\n type OngoingCrateTranslation = ();\n type TranslatedCrate = ();\n\n fn metadata_loader() -> Box<MetadataLoaderTrait> {\n box DummyMetadataLoader(())\n }\n\n fn provide_local(_providers: &mut Providers) {\n bug!(\"DummyTransCrate::provide_local\");\n }\n\n fn provide_extern(_providers: &mut Providers) {\n bug!(\"DummyTransCrate::provide_extern\");\n }\n\n fn trans_crate<'a, 'tcx>(\n _tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _rx: mpsc::Receiver<Box<Any + Send>>\n ) -> Self::OngoingCrateTranslation {\n bug!(\"DummyTransCrate::trans_crate\");\n }\n\n fn join_trans(\n _trans: Self::OngoingCrateTranslation,\n _sess: &Session,\n _dep_graph: &DepGraph\n ) -> Self::TranslatedCrate {\n bug!(\"DummyTransCrate::join_trans\");\n }\n\n fn link_binary(_sess: &Session, _trans: &Self::TranslatedCrate, _outputs: &OutputFilenames) {\n bug!(\"DummyTransCrate::link_binary\");\n }\n\n fn dump_incremental_data(_trans: &Self::TranslatedCrate) {\n bug!(\"DummyTransCrate::dump_incremental_data\");\n }\n}\n\npub struct DummyMetadataLoader(());\n\nimpl MetadataLoaderTrait for DummyMetadataLoader {\n fn get_rlib_metadata(\n &self,\n _target: &Target,\n _filename: &Path\n ) -> Result<ErasedBoxRef<[u8]>, String> {\n bug!(\"DummyMetadataLoader::get_rlib_metadata\");\n }\n\n fn get_dylib_metadata(\n &self,\n _target: &Target,\n _filename: &Path\n ) -> Result<ErasedBoxRef<[u8]>, String> {\n bug!(\"DummyMetadataLoader::get_dylib_metadata\");\n }\n}\n\npub struct NoLlvmMetadataLoader;\n\nimpl MetadataLoaderTrait for NoLlvmMetadataLoader {\n fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result<ErasedBoxRef<[u8]>, String> {\n let file = File::open(filename)\n .map_err(|e| format!(\"metadata file open err: {:?}\", e))?;\n let mut archive = Archive::new(file);\n\n while let Some(entry_result) = archive.next_entry() {\n let mut entry = entry_result\n .map_err(|e| format!(\"metadata section read err: {:?}\", e))?;\n if entry.header().identifier() == \"rust.metadata.bin\" {\n let mut buf = Vec::new();\n io::copy(&mut entry, &mut buf).unwrap();\n let buf: OwningRef<Vec<u8>, [u8]> = OwningRef::new(buf).into();\n return Ok(buf.map_owner_box().erase_owner());\n }\n }\n\n Err(\"Couldnt find metadata section\".to_string())\n }\n\n fn get_dylib_metadata(\n &self,\n _target: &Target,\n _filename: &Path,\n ) -> Result<ErasedBoxRef<[u8]>, String> {\n \/\/ FIXME: Support reading dylibs from llvm enabled rustc\n self.get_rlib_metadata(_target, _filename)\n }\n}\n\npub struct MetadataOnlyTransCrate;\npub struct OngoingCrateTranslation {\n metadata: EncodedMetadata,\n metadata_version: Vec<u8>,\n crate_name: Symbol,\n}\npub struct TranslatedCrate(OngoingCrateTranslation);\n\nimpl MetadataOnlyTransCrate {\n #[allow(dead_code)]\n pub fn new() -> Self {\n MetadataOnlyTransCrate\n }\n}\n\nimpl TransCrate for MetadataOnlyTransCrate {\n type MetadataLoader = NoLlvmMetadataLoader;\n type OngoingCrateTranslation = OngoingCrateTranslation;\n type TranslatedCrate = TranslatedCrate;\n\n fn metadata_loader() -> Box<MetadataLoaderTrait> {\n box NoLlvmMetadataLoader\n }\n\n fn provide_local(_providers: &mut Providers) {}\n fn provide_extern(_providers: &mut Providers) {}\n\n fn trans_crate<'a, 'tcx>(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _rx: mpsc::Receiver<Box<Any + Send>>\n ) -> Self::OngoingCrateTranslation {\n ::check_for_rustc_errors_attr(tcx);\n let _ = tcx.link_args(LOCAL_CRATE);\n let _ = tcx.native_libraries(LOCAL_CRATE);\n tcx.sess.abort_if_errors();\n\n let crate_hash = tcx.dep_graph\n .fingerprint_of(&DepNode::new_no_params(DepKind::Krate))\n .unwrap();\n let link_meta = build_link_meta(crate_hash);\n let exported_symbols = ::find_exported_symbols(tcx);\n let (metadata, _hashes) = tcx.encode_metadata(&link_meta, &exported_symbols);\n\n OngoingCrateTranslation {\n metadata: metadata,\n metadata_version: tcx.metadata_encoding_version().to_vec(),\n crate_name: tcx.crate_name(LOCAL_CRATE),\n }\n }\n\n fn join_trans(\n trans: Self::OngoingCrateTranslation,\n _sess: &Session,\n _dep_graph: &DepGraph,\n ) -> Self::TranslatedCrate {\n TranslatedCrate(trans)\n }\n\n fn link_binary(sess: &Session, trans: &Self::TranslatedCrate, outputs: &OutputFilenames) {\n for &crate_type in sess.opts.crate_types.iter() {\n if crate_type != CrateType::CrateTypeRlib && crate_type != CrateType::CrateTypeDylib {\n continue;\n }\n let output_name =\n out_filename(sess, crate_type, &outputs, &trans.0.crate_name.as_str());\n let mut compressed = trans.0.metadata_version.clone();\n let metadata = if crate_type == CrateType::CrateTypeDylib {\n DeflateEncoder::new(&mut compressed, Compression::Fast)\n .write_all(&trans.0.metadata.raw_data)\n .unwrap();\n &compressed\n } else {\n &trans.0.metadata.raw_data\n };\n let mut builder = Builder::new(File::create(&output_name).unwrap());\n let header = Header::new(\"rust.metadata.bin\".to_string(), metadata.len() as u64);\n builder.append(&header, Cursor::new(metadata)).unwrap();\n }\n\n if !sess.opts.crate_types.contains(&CrateType::CrateTypeRlib)\n && !sess.opts.crate_types.contains(&CrateType::CrateTypeDylib) {\n sess.fatal(\"Executables are not supported by the metadata-only backend.\");\n }\n }\n\n fn dump_incremental_data(_trans: &Self::TranslatedCrate) {}\n}\n<commit_msg>Fix error<commit_after>\/\/ Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! The Rust compiler.\n\/\/!\n\/\/! # Note\n\/\/!\n\/\/! This API is completely unstable and subject to change.\n\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\")]\n#![deny(warnings)]\n\n#![feature(box_syntax)]\n\nuse std::any::Any;\nuse std::io::prelude::*;\nuse std::io::{self, Cursor};\nuse std::fs::File;\nuse std::path::Path;\nuse std::sync::mpsc;\n\nuse owning_ref::{ErasedBoxRef, OwningRef};\nuse ar::{Archive, Builder, Header};\nuse flate2::Compression;\nuse flate2::write::DeflateEncoder;\n\nuse syntax::symbol::Symbol;\nuse rustc::hir::def_id::LOCAL_CRATE;\nuse rustc::session::Session;\nuse rustc::session::config::{CrateType, OutputFilenames};\nuse rustc::ty::TyCtxt;\nuse rustc::ty::maps::Providers;\nuse rustc::middle::cstore::EncodedMetadata;\nuse rustc::middle::cstore::MetadataLoader as MetadataLoaderTrait;\nuse rustc::dep_graph::{DepGraph, DepNode, DepKind};\nuse rustc_back::target::Target;\nuse link::{build_link_meta, out_filename};\n\npub trait TransCrate {\n type MetadataLoader: MetadataLoaderTrait;\n type OngoingCrateTranslation;\n type TranslatedCrate;\n\n fn metadata_loader() -> Box<MetadataLoaderTrait>;\n fn provide_local(_providers: &mut Providers);\n fn provide_extern(_providers: &mut Providers);\n fn trans_crate<'a, 'tcx>(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n rx: mpsc::Receiver<Box<Any + Send>>\n ) -> Self::OngoingCrateTranslation;\n fn join_trans(\n trans: Self::OngoingCrateTranslation,\n sess: &Session,\n dep_graph: &DepGraph\n ) -> Self::TranslatedCrate;\n fn link_binary(sess: &Session, trans: &Self::TranslatedCrate, outputs: &OutputFilenames);\n fn dump_incremental_data(trans: &Self::TranslatedCrate);\n}\n\npub struct DummyTransCrate;\n\nimpl TransCrate for DummyTransCrate {\n type MetadataLoader = DummyMetadataLoader;\n type OngoingCrateTranslation = ();\n type TranslatedCrate = ();\n\n fn metadata_loader() -> Box<MetadataLoaderTrait> {\n box DummyMetadataLoader(())\n }\n\n fn provide_local(_providers: &mut Providers) {\n bug!(\"DummyTransCrate::provide_local\");\n }\n\n fn provide_extern(_providers: &mut Providers) {\n bug!(\"DummyTransCrate::provide_extern\");\n }\n\n fn trans_crate<'a, 'tcx>(\n _tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _rx: mpsc::Receiver<Box<Any + Send>>\n ) -> Self::OngoingCrateTranslation {\n bug!(\"DummyTransCrate::trans_crate\");\n }\n\n fn join_trans(\n _trans: Self::OngoingCrateTranslation,\n _sess: &Session,\n _dep_graph: &DepGraph\n ) -> Self::TranslatedCrate {\n bug!(\"DummyTransCrate::join_trans\");\n }\n\n fn link_binary(_sess: &Session, _trans: &Self::TranslatedCrate, _outputs: &OutputFilenames) {\n bug!(\"DummyTransCrate::link_binary\");\n }\n\n fn dump_incremental_data(_trans: &Self::TranslatedCrate) {\n bug!(\"DummyTransCrate::dump_incremental_data\");\n }\n}\n\npub struct DummyMetadataLoader(());\n\nimpl MetadataLoaderTrait for DummyMetadataLoader {\n fn get_rlib_metadata(\n &self,\n _target: &Target,\n _filename: &Path\n ) -> Result<ErasedBoxRef<[u8]>, String> {\n bug!(\"DummyMetadataLoader::get_rlib_metadata\");\n }\n\n fn get_dylib_metadata(\n &self,\n _target: &Target,\n _filename: &Path\n ) -> Result<ErasedBoxRef<[u8]>, String> {\n bug!(\"DummyMetadataLoader::get_dylib_metadata\");\n }\n}\n\npub struct NoLlvmMetadataLoader;\n\nimpl MetadataLoaderTrait for NoLlvmMetadataLoader {\n fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result<ErasedBoxRef<[u8]>, String> {\n let file = File::open(filename)\n .map_err(|e| format!(\"metadata file open err: {:?}\", e))?;\n let mut archive = Archive::new(file);\n\n while let Some(entry_result) = archive.next_entry() {\n let mut entry = entry_result\n .map_err(|e| format!(\"metadata section read err: {:?}\", e))?;\n if entry.header().identifier() == \"rust.metadata.bin\" {\n let mut buf = Vec::new();\n io::copy(&mut entry, &mut buf).unwrap();\n let buf: OwningRef<Vec<u8>, [u8]> = OwningRef::new(buf).into();\n return Ok(buf.map_owner_box().erase_owner());\n }\n }\n\n Err(\"Couldnt find metadata section\".to_string())\n }\n\n fn get_dylib_metadata(\n &self,\n _target: &Target,\n _filename: &Path,\n ) -> Result<ErasedBoxRef<[u8]>, String> {\n \/\/ FIXME: Support reading dylibs from llvm enabled rustc\n self.get_rlib_metadata(_target, _filename)\n }\n}\n\npub struct MetadataOnlyTransCrate;\npub struct OngoingCrateTranslation {\n metadata: EncodedMetadata,\n metadata_version: Vec<u8>,\n crate_name: Symbol,\n}\npub struct TranslatedCrate(OngoingCrateTranslation);\n\nimpl MetadataOnlyTransCrate {\n #[allow(dead_code)]\n pub fn new() -> Self {\n MetadataOnlyTransCrate\n }\n}\n\nimpl TransCrate for MetadataOnlyTransCrate {\n type MetadataLoader = NoLlvmMetadataLoader;\n type OngoingCrateTranslation = OngoingCrateTranslation;\n type TranslatedCrate = TranslatedCrate;\n\n fn metadata_loader() -> Box<MetadataLoaderTrait> {\n box NoLlvmMetadataLoader\n }\n\n fn provide_local(_providers: &mut Providers) {}\n fn provide_extern(_providers: &mut Providers) {}\n\n fn trans_crate<'a, 'tcx>(\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n _rx: mpsc::Receiver<Box<Any + Send>>\n ) -> Self::OngoingCrateTranslation {\n ::check_for_rustc_errors_attr(tcx);\n let _ = tcx.link_args(LOCAL_CRATE);\n let _ = tcx.native_libraries(LOCAL_CRATE);\n tcx.sess.abort_if_errors();\n\n let crate_hash = tcx.dep_graph\n .fingerprint_of(&DepNode::new_no_params(DepKind::Krate));\n let link_meta = build_link_meta(crate_hash);\n let exported_symbols = ::find_exported_symbols(tcx);\n let (metadata, _hashes) = tcx.encode_metadata(&link_meta, &exported_symbols);\n\n OngoingCrateTranslation {\n metadata: metadata,\n metadata_version: tcx.metadata_encoding_version().to_vec(),\n crate_name: tcx.crate_name(LOCAL_CRATE),\n }\n }\n\n fn join_trans(\n trans: Self::OngoingCrateTranslation,\n _sess: &Session,\n _dep_graph: &DepGraph,\n ) -> Self::TranslatedCrate {\n TranslatedCrate(trans)\n }\n\n fn link_binary(sess: &Session, trans: &Self::TranslatedCrate, outputs: &OutputFilenames) {\n for &crate_type in sess.opts.crate_types.iter() {\n if crate_type != CrateType::CrateTypeRlib && crate_type != CrateType::CrateTypeDylib {\n continue;\n }\n let output_name =\n out_filename(sess, crate_type, &outputs, &trans.0.crate_name.as_str());\n let mut compressed = trans.0.metadata_version.clone();\n let metadata = if crate_type == CrateType::CrateTypeDylib {\n DeflateEncoder::new(&mut compressed, Compression::Fast)\n .write_all(&trans.0.metadata.raw_data)\n .unwrap();\n &compressed\n } else {\n &trans.0.metadata.raw_data\n };\n let mut builder = Builder::new(File::create(&output_name).unwrap());\n let header = Header::new(\"rust.metadata.bin\".to_string(), metadata.len() as u64);\n builder.append(&header, Cursor::new(metadata)).unwrap();\n }\n\n if !sess.opts.crate_types.contains(&CrateType::CrateTypeRlib)\n && !sess.opts.crate_types.contains(&CrateType::CrateTypeDylib) {\n sess.fatal(\"Executables are not supported by the metadata-only backend.\");\n }\n }\n\n fn dump_incremental_data(_trans: &Self::TranslatedCrate) {}\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Method lookup: the secret sauce of Rust. See `README.md`.\n\nuse check::FnCtxt;\nuse hir::def::Def;\nuse hir::def_id::DefId;\nuse rustc::ty::subst::Substs;\nuse rustc::traits;\nuse rustc::ty::{self, ToPredicate, ToPolyTraitRef, TraitRef, TypeFoldable};\nuse rustc::ty::subst::Subst;\nuse rustc::infer::{self, InferOk};\n\nuse syntax::ast;\nuse syntax_pos::Span;\n\nuse rustc::hir;\n\npub use self::MethodError::*;\npub use self::CandidateSource::*;\n\npub use self::suggest::AllTraitsVec;\n\nmod confirm;\npub mod probe;\nmod suggest;\n\nuse self::probe::{IsSuggestion, ProbeScope};\n\n#[derive(Clone, Copy, Debug)]\npub struct MethodCallee<'tcx> {\n \/\/\/ Impl method ID, for inherent methods, or trait method ID, otherwise.\n pub def_id: DefId,\n pub substs: &'tcx Substs<'tcx>,\n\n \/\/\/ Instantiated method signature, i.e. it has been\n \/\/\/ substituted, normalized, and has had late-bound\n \/\/\/ lifetimes replaced with inference variables.\n pub sig: ty::FnSig<'tcx>,\n}\n\npub enum MethodError<'tcx> {\n \/\/ Did not find an applicable method, but we did find various near-misses that may work.\n NoMatch(NoMatchData<'tcx>),\n\n \/\/ Multiple methods might apply.\n Ambiguity(Vec<CandidateSource>),\n\n \/\/ Using a `Fn`\/`FnMut`\/etc method on a raw closure type before we have inferred its kind.\n ClosureAmbiguity(\/\/ DefId of fn trait\n DefId),\n\n \/\/ Found an applicable method, but it is not visible.\n PrivateMatch(Def),\n}\n\n\/\/ Contains a list of static methods that may apply, a list of unsatisfied trait predicates which\n\/\/ could lead to matches if satisfied, and a list of not-in-scope traits which may work.\npub struct NoMatchData<'tcx> {\n pub static_candidates: Vec<CandidateSource>,\n pub unsatisfied_predicates: Vec<TraitRef<'tcx>>,\n pub out_of_scope_traits: Vec<DefId>,\n pub mode: probe::Mode,\n}\n\nimpl<'tcx> NoMatchData<'tcx> {\n pub fn new(static_candidates: Vec<CandidateSource>,\n unsatisfied_predicates: Vec<TraitRef<'tcx>>,\n out_of_scope_traits: Vec<DefId>,\n mode: probe::Mode)\n -> Self {\n NoMatchData {\n static_candidates: static_candidates,\n unsatisfied_predicates: unsatisfied_predicates,\n out_of_scope_traits: out_of_scope_traits,\n mode: mode,\n }\n }\n}\n\n\/\/ A pared down enum describing just the places from which a method\n\/\/ candidate can arise. Used for error reporting only.\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\npub enum CandidateSource {\n ImplSource(DefId),\n TraitSource(\/\/ trait id\n DefId),\n}\n\nimpl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {\n \/\/\/ Determines whether the type `self_ty` supports a method name `method_name` or not.\n pub fn method_exists(&self,\n span: Span,\n method_name: ast::Name,\n self_ty: ty::Ty<'tcx>,\n call_expr_id: ast::NodeId,\n allow_private: bool)\n -> bool {\n let mode = probe::Mode::MethodCall;\n match self.probe_for_name(span, mode, method_name, IsSuggestion(false),\n self_ty, call_expr_id, ProbeScope::TraitsInScope) {\n Ok(..) => true,\n Err(NoMatch(..)) => false,\n Err(Ambiguity(..)) => true,\n Err(ClosureAmbiguity(..)) => true,\n Err(PrivateMatch(..)) => allow_private,\n }\n }\n\n \/\/\/ Performs method lookup. If lookup is successful, it will return the callee\n \/\/\/ and store an appropriate adjustment for the self-expr. In some cases it may\n \/\/\/ report an error (e.g., invoking the `drop` method).\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ Given a method call like `foo.bar::<T1,...Tn>(...)`:\n \/\/\/\n \/\/\/ * `fcx`: the surrounding `FnCtxt` (!)\n \/\/\/ * `span`: the span for the method call\n \/\/\/ * `method_name`: the name of the method being called (`bar`)\n \/\/\/ * `self_ty`: the (unadjusted) type of the self expression (`foo`)\n \/\/\/ * `supplied_method_types`: the explicit method type parameters, if any (`T1..Tn`)\n \/\/\/ * `self_expr`: the self expression (`foo`)\n pub fn lookup_method(&self,\n self_ty: ty::Ty<'tcx>,\n segment: &hir::PathSegment,\n span: Span,\n call_expr: &'gcx hir::Expr,\n self_expr: &'gcx hir::Expr)\n -> Result<MethodCallee<'tcx>, MethodError<'tcx>> {\n debug!(\"lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})\",\n segment.name,\n self_ty,\n call_expr,\n self_expr);\n\n let pick = self.lookup_probe(\n span,\n segment.name,\n self_ty,\n call_expr,\n ProbeScope::TraitsInScope\n )?;\n\n if let Some(import_id) = pick.import_id {\n let import_def_id = self.tcx.hir.local_def_id(import_id);\n debug!(\"used_trait_import: {:?}\", import_def_id);\n self.tables.borrow_mut().used_trait_imports.insert(import_def_id);\n }\n\n self.tcx.check_stability(pick.item.def_id, call_expr.id, span);\n\n let result = self.confirm_method(span,\n self_expr,\n call_expr,\n self_ty,\n pick.clone(),\n segment);\n\n if result.rerun {\n \/\/ We probe again, taking all traits into account (not only those in scope).\n if let Ok(new_pick) = self.lookup_probe(span,\n segment.name,\n self_ty,\n call_expr,\n ProbeScope::AllTraits) {\n \/\/ If we find a different result, the caller probably forgot to import the trait.\n \/\/ We span an error with an appropriate help message.\n if new_pick != pick {\n let error = MethodError::NoMatch(\n NoMatchData::new(Vec::new(),\n Vec::new(),\n vec![new_pick.item.container.id()],\n probe::Mode::MethodCall)\n );\n self.report_method_error(span,\n self_ty,\n segment.name,\n Some(self_expr),\n error,\n None);\n }\n }\n }\n\n Ok(result.callee)\n }\n\n fn lookup_probe(&self,\n span: Span,\n method_name: ast::Name,\n self_ty: ty::Ty<'tcx>,\n call_expr: &'gcx hir::Expr,\n scope: ProbeScope)\n -> probe::PickResult<'tcx> {\n let mode = probe::Mode::MethodCall;\n let self_ty = self.resolve_type_vars_if_possible(&self_ty);\n self.probe_for_name(span, mode, method_name, IsSuggestion(false),\n self_ty, call_expr.id, scope)\n }\n\n \/\/\/ `lookup_method_in_trait` is used for overloaded operators.\n \/\/\/ It does a very narrow slice of what the normal probe\/confirm path does.\n \/\/\/ In particular, it doesn't really do any probing: it simply constructs\n \/\/\/ an obligation for aparticular trait with the given self-type and checks\n \/\/\/ whether that trait is implemented.\n \/\/\/\n \/\/\/ FIXME(#18741) -- It seems likely that we can consolidate some of this\n \/\/\/ code with the other method-lookup code. In particular, the second half\n \/\/\/ of this method is basically the same as confirmation.\n pub fn lookup_method_in_trait(&self,\n span: Span,\n m_name: ast::Name,\n trait_def_id: DefId,\n self_ty: ty::Ty<'tcx>,\n opt_input_types: Option<&[ty::Ty<'tcx>]>)\n -> Option<InferOk<'tcx, MethodCallee<'tcx>>> {\n debug!(\"lookup_in_trait_adjusted(self_ty={:?}, \\\n m_name={}, trait_def_id={:?})\",\n self_ty,\n m_name,\n trait_def_id);\n\n \/\/ Construct a trait-reference `self_ty : Trait<input_tys>`\n let substs = Substs::for_item(self.tcx,\n trait_def_id,\n |def, _| self.region_var_for_def(span, def),\n |def, substs| {\n if def.index == 0 {\n self_ty\n } else if let Some(ref input_types) = opt_input_types {\n input_types[def.index as usize - 1]\n } else {\n self.type_var_for_def(span, def, substs)\n }\n });\n\n let trait_ref = ty::TraitRef::new(trait_def_id, substs);\n\n \/\/ Construct an obligation\n let poly_trait_ref = trait_ref.to_poly_trait_ref();\n let obligation =\n traits::Obligation::misc(span,\n self.body_id,\n self.param_env,\n poly_trait_ref.to_predicate());\n\n \/\/ Now we want to know if this can be matched\n let mut selcx = traits::SelectionContext::new(self);\n if !selcx.evaluate_obligation(&obligation) {\n debug!(\"--> Cannot match obligation\");\n return None; \/\/ Cannot be matched, no such method resolution is possible.\n }\n\n \/\/ Trait must have a method named `m_name` and it should not have\n \/\/ type parameters or early-bound regions.\n let tcx = self.tcx;\n let method_item = self.associated_item(trait_def_id, m_name).unwrap();\n let def_id = method_item.def_id;\n let generics = tcx.generics_of(def_id);\n assert_eq!(generics.types.len(), 0);\n assert_eq!(generics.regions.len(), 0);\n\n debug!(\"lookup_in_trait_adjusted: method_item={:?}\", method_item);\n let mut obligations = vec![];\n\n \/\/ Instantiate late-bound regions and substitute the trait\n \/\/ parameters into the method type to get the actual method type.\n \/\/\n \/\/ NB: Instantiate late-bound regions first so that\n \/\/ `instantiate_type_scheme` can normalize associated types that\n \/\/ may reference those regions.\n let fn_sig = tcx.fn_sig(def_id);\n let fn_sig = self.replace_late_bound_regions_with_fresh_var(span,\n infer::FnCall,\n &fn_sig).0;\n let fn_sig = fn_sig.subst(self.tcx, substs);\n let fn_sig = match self.normalize_associated_types_in_as_infer_ok(span, &fn_sig) {\n InferOk { value, obligations: o } => {\n obligations.extend(o);\n value\n }\n };\n\n \/\/ Register obligations for the parameters. This will include the\n \/\/ `Self` parameter, which in turn has a bound of the main trait,\n \/\/ so this also effectively registers `obligation` as well. (We\n \/\/ used to register `obligation` explicitly, but that resulted in\n \/\/ double error messages being reported.)\n \/\/\n \/\/ Note that as the method comes from a trait, it should not have\n \/\/ any late-bound regions appearing in its bounds.\n let bounds = self.tcx.predicates_of(def_id).instantiate(self.tcx, substs);\n let bounds = match self.normalize_associated_types_in_as_infer_ok(span, &bounds) {\n InferOk { value, obligations: o } => {\n obligations.extend(o);\n value\n }\n };\n assert!(!bounds.has_escaping_regions());\n\n let cause = traits::ObligationCause::misc(span, self.body_id);\n obligations.extend(traits::predicates_for_generics(cause.clone(),\n self.param_env,\n &bounds));\n\n \/\/ Also add an obligation for the method type being well-formed.\n let method_ty = tcx.mk_fn_ptr(ty::Binder(fn_sig));\n debug!(\"lookup_in_trait_adjusted: matched method method_ty={:?} obligation={:?}\",\n method_ty,\n obligation);\n obligations.push(traits::Obligation::new(cause,\n self.param_env,\n ty::Predicate::WellFormed(method_ty)));\n\n let callee = MethodCallee {\n def_id: def_id,\n substs: trait_ref.substs,\n sig: fn_sig,\n };\n\n debug!(\"callee = {:?}\", callee);\n\n Some(InferOk {\n obligations,\n value: callee\n })\n }\n\n pub fn resolve_ufcs(&self,\n span: Span,\n method_name: ast::Name,\n self_ty: ty::Ty<'tcx>,\n expr_id: ast::NodeId)\n -> Result<Def, MethodError<'tcx>> {\n let mode = probe::Mode::Path;\n let pick = self.probe_for_name(span, mode, method_name, IsSuggestion(false),\n self_ty, expr_id, ProbeScope::TraitsInScope)?;\n\n if let Some(import_id) = pick.import_id {\n let import_def_id = self.tcx.hir.local_def_id(import_id);\n debug!(\"used_trait_import: {:?}\", import_def_id);\n self.tables.borrow_mut().used_trait_imports.insert(import_def_id);\n }\n\n let def = pick.item.def();\n self.tcx.check_stability(def.def_id(), expr_id, span);\n\n Ok(def)\n }\n\n \/\/\/ Find item with name `item_name` defined in impl\/trait `def_id`\n \/\/\/ and return it, or `None`, if no such item was defined there.\n pub fn associated_item(&self, def_id: DefId, item_name: ast::Name)\n -> Option<ty::AssociatedItem> {\n let ident = self.tcx.adjust(item_name, def_id, self.body_id).0;\n self.tcx.associated_items(def_id).find(|item| item.name.to_ident() == ident)\n }\n}\n<commit_msg>Handle ambiguous cases<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Method lookup: the secret sauce of Rust. See `README.md`.\n\nuse check::FnCtxt;\nuse hir::def::Def;\nuse hir::def_id::DefId;\nuse rustc::ty::subst::Substs;\nuse rustc::traits;\nuse rustc::ty::{self, ToPredicate, ToPolyTraitRef, TraitRef, TypeFoldable};\nuse rustc::ty::subst::Subst;\nuse rustc::infer::{self, InferOk};\n\nuse syntax::ast;\nuse syntax_pos::Span;\n\nuse rustc::hir;\n\npub use self::MethodError::*;\npub use self::CandidateSource::*;\n\npub use self::suggest::AllTraitsVec;\n\nmod confirm;\npub mod probe;\nmod suggest;\n\nuse self::probe::{IsSuggestion, ProbeScope};\n\n#[derive(Clone, Copy, Debug)]\npub struct MethodCallee<'tcx> {\n \/\/\/ Impl method ID, for inherent methods, or trait method ID, otherwise.\n pub def_id: DefId,\n pub substs: &'tcx Substs<'tcx>,\n\n \/\/\/ Instantiated method signature, i.e. it has been\n \/\/\/ substituted, normalized, and has had late-bound\n \/\/\/ lifetimes replaced with inference variables.\n pub sig: ty::FnSig<'tcx>,\n}\n\npub enum MethodError<'tcx> {\n \/\/ Did not find an applicable method, but we did find various near-misses that may work.\n NoMatch(NoMatchData<'tcx>),\n\n \/\/ Multiple methods might apply.\n Ambiguity(Vec<CandidateSource>),\n\n \/\/ Using a `Fn`\/`FnMut`\/etc method on a raw closure type before we have inferred its kind.\n ClosureAmbiguity(\/\/ DefId of fn trait\n DefId),\n\n \/\/ Found an applicable method, but it is not visible.\n PrivateMatch(Def),\n}\n\n\/\/ Contains a list of static methods that may apply, a list of unsatisfied trait predicates which\n\/\/ could lead to matches if satisfied, and a list of not-in-scope traits which may work.\npub struct NoMatchData<'tcx> {\n pub static_candidates: Vec<CandidateSource>,\n pub unsatisfied_predicates: Vec<TraitRef<'tcx>>,\n pub out_of_scope_traits: Vec<DefId>,\n pub mode: probe::Mode,\n}\n\nimpl<'tcx> NoMatchData<'tcx> {\n pub fn new(static_candidates: Vec<CandidateSource>,\n unsatisfied_predicates: Vec<TraitRef<'tcx>>,\n out_of_scope_traits: Vec<DefId>,\n mode: probe::Mode)\n -> Self {\n NoMatchData {\n static_candidates: static_candidates,\n unsatisfied_predicates: unsatisfied_predicates,\n out_of_scope_traits: out_of_scope_traits,\n mode: mode,\n }\n }\n}\n\n\/\/ A pared down enum describing just the places from which a method\n\/\/ candidate can arise. Used for error reporting only.\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\npub enum CandidateSource {\n ImplSource(DefId),\n TraitSource(\/\/ trait id\n DefId),\n}\n\nimpl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {\n \/\/\/ Determines whether the type `self_ty` supports a method name `method_name` or not.\n pub fn method_exists(&self,\n span: Span,\n method_name: ast::Name,\n self_ty: ty::Ty<'tcx>,\n call_expr_id: ast::NodeId,\n allow_private: bool)\n -> bool {\n let mode = probe::Mode::MethodCall;\n match self.probe_for_name(span, mode, method_name, IsSuggestion(false),\n self_ty, call_expr_id, ProbeScope::TraitsInScope) {\n Ok(..) => true,\n Err(NoMatch(..)) => false,\n Err(Ambiguity(..)) => true,\n Err(ClosureAmbiguity(..)) => true,\n Err(PrivateMatch(..)) => allow_private,\n }\n }\n\n \/\/\/ Performs method lookup. If lookup is successful, it will return the callee\n \/\/\/ and store an appropriate adjustment for the self-expr. In some cases it may\n \/\/\/ report an error (e.g., invoking the `drop` method).\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ Given a method call like `foo.bar::<T1,...Tn>(...)`:\n \/\/\/\n \/\/\/ * `fcx`: the surrounding `FnCtxt` (!)\n \/\/\/ * `span`: the span for the method call\n \/\/\/ * `method_name`: the name of the method being called (`bar`)\n \/\/\/ * `self_ty`: the (unadjusted) type of the self expression (`foo`)\n \/\/\/ * `supplied_method_types`: the explicit method type parameters, if any (`T1..Tn`)\n \/\/\/ * `self_expr`: the self expression (`foo`)\n pub fn lookup_method(&self,\n self_ty: ty::Ty<'tcx>,\n segment: &hir::PathSegment,\n span: Span,\n call_expr: &'gcx hir::Expr,\n self_expr: &'gcx hir::Expr)\n -> Result<MethodCallee<'tcx>, MethodError<'tcx>> {\n debug!(\"lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})\",\n segment.name,\n self_ty,\n call_expr,\n self_expr);\n\n let pick = self.lookup_probe(\n span,\n segment.name,\n self_ty,\n call_expr,\n ProbeScope::TraitsInScope\n )?;\n\n if let Some(import_id) = pick.import_id {\n let import_def_id = self.tcx.hir.local_def_id(import_id);\n debug!(\"used_trait_import: {:?}\", import_def_id);\n self.tables.borrow_mut().used_trait_imports.insert(import_def_id);\n }\n\n self.tcx.check_stability(pick.item.def_id, call_expr.id, span);\n\n let result = self.confirm_method(span,\n self_expr,\n call_expr,\n self_ty,\n pick.clone(),\n segment);\n\n if result.rerun {\n \/\/ We probe again, taking all traits into account (not only those in scope).\n let candidates =\n match self.lookup_probe(span,\n segment.name,\n self_ty,\n call_expr,\n ProbeScope::AllTraits) {\n Ok(ref new_pick) if *new_pick != pick => vec![new_pick.item.container.id()],\n Err(MethodError::Ambiguity(ref sources)) => {\n sources.iter()\n .filter_map(|source| {\n match *source {\n \/\/ Note: this cannot come from an inherent impl,\n \/\/ because the first probe succeeded.\n ImplSource(def) => self.tcx.trait_id_of_impl(def),\n TraitSource(_) => None,\n }\n })\n .collect()\n }\n _ => Vec::new(),\n };\n\n \/\/ If we find a different result, the caller probably forgot to import a trait.\n \/\/ We span an error with an appropriate help message.\n if !candidates.is_empty() {\n let error = MethodError::NoMatch(\n NoMatchData::new(Vec::new(), Vec::new(), candidates, probe::Mode::MethodCall)\n );\n self.report_method_error(span,\n self_ty,\n segment.name,\n Some(self_expr),\n error,\n None);\n }\n }\n\n Ok(result.callee)\n }\n\n fn lookup_probe(&self,\n span: Span,\n method_name: ast::Name,\n self_ty: ty::Ty<'tcx>,\n call_expr: &'gcx hir::Expr,\n scope: ProbeScope)\n -> probe::PickResult<'tcx> {\n let mode = probe::Mode::MethodCall;\n let self_ty = self.resolve_type_vars_if_possible(&self_ty);\n self.probe_for_name(span, mode, method_name, IsSuggestion(false),\n self_ty, call_expr.id, scope)\n }\n\n \/\/\/ `lookup_method_in_trait` is used for overloaded operators.\n \/\/\/ It does a very narrow slice of what the normal probe\/confirm path does.\n \/\/\/ In particular, it doesn't really do any probing: it simply constructs\n \/\/\/ an obligation for aparticular trait with the given self-type and checks\n \/\/\/ whether that trait is implemented.\n \/\/\/\n \/\/\/ FIXME(#18741) -- It seems likely that we can consolidate some of this\n \/\/\/ code with the other method-lookup code. In particular, the second half\n \/\/\/ of this method is basically the same as confirmation.\n pub fn lookup_method_in_trait(&self,\n span: Span,\n m_name: ast::Name,\n trait_def_id: DefId,\n self_ty: ty::Ty<'tcx>,\n opt_input_types: Option<&[ty::Ty<'tcx>]>)\n -> Option<InferOk<'tcx, MethodCallee<'tcx>>> {\n debug!(\"lookup_in_trait_adjusted(self_ty={:?}, \\\n m_name={}, trait_def_id={:?})\",\n self_ty,\n m_name,\n trait_def_id);\n\n \/\/ Construct a trait-reference `self_ty : Trait<input_tys>`\n let substs = Substs::for_item(self.tcx,\n trait_def_id,\n |def, _| self.region_var_for_def(span, def),\n |def, substs| {\n if def.index == 0 {\n self_ty\n } else if let Some(ref input_types) = opt_input_types {\n input_types[def.index as usize - 1]\n } else {\n self.type_var_for_def(span, def, substs)\n }\n });\n\n let trait_ref = ty::TraitRef::new(trait_def_id, substs);\n\n \/\/ Construct an obligation\n let poly_trait_ref = trait_ref.to_poly_trait_ref();\n let obligation =\n traits::Obligation::misc(span,\n self.body_id,\n self.param_env,\n poly_trait_ref.to_predicate());\n\n \/\/ Now we want to know if this can be matched\n let mut selcx = traits::SelectionContext::new(self);\n if !selcx.evaluate_obligation(&obligation) {\n debug!(\"--> Cannot match obligation\");\n return None; \/\/ Cannot be matched, no such method resolution is possible.\n }\n\n \/\/ Trait must have a method named `m_name` and it should not have\n \/\/ type parameters or early-bound regions.\n let tcx = self.tcx;\n let method_item = self.associated_item(trait_def_id, m_name).unwrap();\n let def_id = method_item.def_id;\n let generics = tcx.generics_of(def_id);\n assert_eq!(generics.types.len(), 0);\n assert_eq!(generics.regions.len(), 0);\n\n debug!(\"lookup_in_trait_adjusted: method_item={:?}\", method_item);\n let mut obligations = vec![];\n\n \/\/ Instantiate late-bound regions and substitute the trait\n \/\/ parameters into the method type to get the actual method type.\n \/\/\n \/\/ NB: Instantiate late-bound regions first so that\n \/\/ `instantiate_type_scheme` can normalize associated types that\n \/\/ may reference those regions.\n let fn_sig = tcx.fn_sig(def_id);\n let fn_sig = self.replace_late_bound_regions_with_fresh_var(span,\n infer::FnCall,\n &fn_sig).0;\n let fn_sig = fn_sig.subst(self.tcx, substs);\n let fn_sig = match self.normalize_associated_types_in_as_infer_ok(span, &fn_sig) {\n InferOk { value, obligations: o } => {\n obligations.extend(o);\n value\n }\n };\n\n \/\/ Register obligations for the parameters. This will include the\n \/\/ `Self` parameter, which in turn has a bound of the main trait,\n \/\/ so this also effectively registers `obligation` as well. (We\n \/\/ used to register `obligation` explicitly, but that resulted in\n \/\/ double error messages being reported.)\n \/\/\n \/\/ Note that as the method comes from a trait, it should not have\n \/\/ any late-bound regions appearing in its bounds.\n let bounds = self.tcx.predicates_of(def_id).instantiate(self.tcx, substs);\n let bounds = match self.normalize_associated_types_in_as_infer_ok(span, &bounds) {\n InferOk { value, obligations: o } => {\n obligations.extend(o);\n value\n }\n };\n assert!(!bounds.has_escaping_regions());\n\n let cause = traits::ObligationCause::misc(span, self.body_id);\n obligations.extend(traits::predicates_for_generics(cause.clone(),\n self.param_env,\n &bounds));\n\n \/\/ Also add an obligation for the method type being well-formed.\n let method_ty = tcx.mk_fn_ptr(ty::Binder(fn_sig));\n debug!(\"lookup_in_trait_adjusted: matched method method_ty={:?} obligation={:?}\",\n method_ty,\n obligation);\n obligations.push(traits::Obligation::new(cause,\n self.param_env,\n ty::Predicate::WellFormed(method_ty)));\n\n let callee = MethodCallee {\n def_id: def_id,\n substs: trait_ref.substs,\n sig: fn_sig,\n };\n\n debug!(\"callee = {:?}\", callee);\n\n Some(InferOk {\n obligations,\n value: callee\n })\n }\n\n pub fn resolve_ufcs(&self,\n span: Span,\n method_name: ast::Name,\n self_ty: ty::Ty<'tcx>,\n expr_id: ast::NodeId)\n -> Result<Def, MethodError<'tcx>> {\n let mode = probe::Mode::Path;\n let pick = self.probe_for_name(span, mode, method_name, IsSuggestion(false),\n self_ty, expr_id, ProbeScope::TraitsInScope)?;\n\n if let Some(import_id) = pick.import_id {\n let import_def_id = self.tcx.hir.local_def_id(import_id);\n debug!(\"used_trait_import: {:?}\", import_def_id);\n self.tables.borrow_mut().used_trait_imports.insert(import_def_id);\n }\n\n let def = pick.item.def();\n self.tcx.check_stability(def.def_id(), expr_id, span);\n\n Ok(def)\n }\n\n \/\/\/ Find item with name `item_name` defined in impl\/trait `def_id`\n \/\/\/ and return it, or `None`, if no such item was defined there.\n pub fn associated_item(&self, def_id: DefId, item_name: ast::Name)\n -> Option<ty::AssociatedItem> {\n let ident = self.tcx.adjust(item_name, def_id, self.body_id).0;\n self.tcx.associated_items(def_id).find(|item| item.name.to_ident() == ident)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat: trait default behave<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added unit tests for three more testnet failures, the latter of which I'm still working on<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Create functions for getting config filenames<commit_after>\/\/! System functions for `budget`\n\n\/\/\/ Acquire the home directory's path\nfn get_homedir_string() -> Option<String> {\n use std::env;\n \/\/ Attempt to retrieve the home directory, if we fail, return None\n match env::home_dir() {\n \/\/ Take the homedir path, convert it to a string, and append the filename\n Some(path) => Some(String::from(\n path.to_string_lossy().into_owned())),\n None => None\n }\n}\n\n\/\/\/ Acquire the path to the config file\npub fn get_config_file_name(file_name: &str) -> Option<String> {\n \/\/ TODO: Reimplement this to not suck\n\n \/\/ Try getting the home directory string, and concatenate it with the filename\n match get_homedir_string() {\n Some(path) => Some(path + \"\/\" + &file_name),\n None => None\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::cell::RefCell;\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::RandomState;\nuse std::iter::FromIterator;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::rc::Rc;\nuse std::vec::Vec;\n\nuse uuid::Uuid;\n\nuse super::super::engine::{Filesystem, HasName, HasUuid, Pool};\nuse super::super::errors::{EngineError, EngineResult, ErrorEnum};\nuse super::super::structures::Table;\nuse super::super::types::{FilesystemUuid, PoolUuid, RenameAction, Redundancy};\n\nuse super::blockdev::SimDev;\nuse super::filesystem::SimFilesystem;\nuse super::randomization::Randomizer;\n\n#[derive(Debug)]\npub struct SimPool {\n name: String,\n pool_uuid: PoolUuid,\n pub block_devs: HashMap<PathBuf, SimDev>,\n pub filesystems: Table<SimFilesystem>,\n redundancy: Redundancy,\n rdm: Rc<RefCell<Randomizer>>,\n}\n\nimpl SimPool {\n pub fn new(rdm: Rc<RefCell<Randomizer>>,\n name: &str,\n paths: &[&Path],\n redundancy: Redundancy)\n -> SimPool {\n\n let devices: HashSet<_, RandomState> = HashSet::from_iter(paths);\n let device_pairs = devices\n .iter()\n .map(|p| (p.to_path_buf(), SimDev::new(rdm.clone(), p)));\n let new_pool = SimPool {\n name: name.to_owned(),\n pool_uuid: Uuid::new_v4(),\n block_devs: HashMap::from_iter(device_pairs),\n filesystems: Table::default(),\n redundancy: redundancy,\n rdm: rdm.clone(),\n };\n\n new_pool\n }\n\n pub fn check(&mut self) -> () {}\n}\n\nimpl Pool for SimPool {\n fn add_blockdevs(&mut self, paths: &[&Path], _force: bool) -> EngineResult<Vec<PathBuf>> {\n let rdm = self.rdm.clone();\n let devices: HashSet<_, RandomState> = HashSet::from_iter(paths);\n let device_pairs = devices\n .iter()\n .map(|p| (p.to_path_buf(), SimDev::new(rdm.clone(), p)));\n self.block_devs.extend(device_pairs);\n Ok(devices.iter().map(|d| d.to_path_buf()).collect())\n }\n\n fn destroy_filesystems<'a, 'b>(&'a mut self,\n fs_uuids: &[&'b FilesystemUuid])\n -> EngineResult<Vec<&'b FilesystemUuid>> {\n destroy_filesystems!{self; fs_uuids}\n }\n\n fn destroy(self) -> EngineResult<()> {\n \/\/ Nothing to do here.\n Ok(())\n }\n\n fn create_filesystems<'a, 'b>(&'a mut self,\n specs: &[&'b str])\n -> EngineResult<Vec<(&'b str, FilesystemUuid)>> {\n let names: HashSet<_, RandomState> = HashSet::from_iter(specs);\n for name in names.iter() {\n if self.filesystems.contains_name(name) {\n return Err(EngineError::Engine(ErrorEnum::AlreadyExists, name.to_string()));\n }\n }\n\n let mut result = Vec::new();\n for name in names.iter() {\n let uuid = Uuid::new_v4();\n let new_filesystem = SimFilesystem::new(uuid, name);\n self.filesystems.insert(new_filesystem);\n result.push((**name, uuid));\n }\n\n Ok(result)\n }\n\n fn rename_filesystem(&mut self,\n uuid: &FilesystemUuid,\n new_name: &str)\n -> EngineResult<RenameAction> {\n rename_filesystem!{self; uuid; new_name}\n }\n\n fn rename(&mut self, name: &str) {\n self.name = name.to_owned();\n }\n\n fn get_filesystem(&mut self, uuid: &FilesystemUuid) -> Option<&mut Filesystem> {\n get_filesystem!(self; uuid)\n }\n}\n\nimpl HasUuid for SimPool {\n fn uuid(&self) -> &PoolUuid {\n &self.pool_uuid\n }\n}\n\nimpl HasName for SimPool {\n fn name(&self) -> &str {\n &self.name\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n\n use std::path::Path;\n\n use uuid::Uuid;\n\n use engine::Engine;\n use engine::ErrorEnum;\n use engine::EngineError;\n use engine::RenameAction;\n\n use super::super::SimEngine;\n\n #[test]\n \/\/\/ Renaming a filesystem on an empty pool always works\n fn rename_empty() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.rename_filesystem(&Uuid::new_v4(), \"new_name\") {\n Ok(RenameAction::NoSource) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Renaming a filesystem to another filesystem should work if new name not taken\n fn rename_happens() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n let infos = pool.create_filesystems(&[\"old_name\"]).unwrap();\n assert!(match pool.rename_filesystem(&infos[0].1, \"new_name\") {\n Ok(RenameAction::Renamed) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Renaming a filesystem to another filesystem should fail if new name taken\n fn rename_fails() {\n let old_name = \"old_name\";\n let new_name = \"new_name\";\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n let results = pool.create_filesystems(&[old_name, new_name]).unwrap();\n let old_uuid = results.iter().find(|x| x.0 == old_name).unwrap().1;\n assert!(match pool.rename_filesystem(&old_uuid, new_name) {\n Err(EngineError::Engine(ErrorEnum::AlreadyExists, _)) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Renaming should succeed if old_name absent, new present\n fn rename_no_op() {\n let new_name = \"new_name\";\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n pool.create_filesystems(&[new_name]).unwrap();\n assert!(match pool.rename_filesystem(&Uuid::new_v4(), new_name) {\n Ok(RenameAction::NoSource) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Removing an empty list of filesystems should always succeed\n fn destroy_fs_empty() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.destroy_filesystems(&[]) {\n Ok(names) => names.is_empty(),\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Removing a non-empty list of filesystems should succeed on empty pool\n fn destroy_fs_some() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(pool.destroy_filesystems(&[&Uuid::new_v4()]).is_ok());\n }\n\n #[test]\n \/\/\/ Removing a non-empty list of filesystems should succeed on any pool\n fn destroy_fs_any() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n let fs_results = pool.create_filesystems(&[\"fs_name\"]).unwrap();\n let fs_uuid = fs_results[0].1;\n assert!(match pool.destroy_filesystems(&[&fs_uuid, &Uuid::new_v4()]) {\n Ok(filesystems) => filesystems == vec![&fs_uuid],\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Creating an empty list of filesystems should succeed, always\n fn create_fs_none() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.create_filesystems(&[]) {\n Ok(names) => names.is_empty(),\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Creating a non-empty list of filesystems always succeeds.\n fn create_fs_some() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.create_filesystems(&[\"name\"]) {\n Ok(names) => (names.len() == 1) & (names[0].0 == \"name\"),\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Creating a an already existing filesystem fails.\n fn create_fs_conflict() {\n let fs_name = \"fs_name\";\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n pool.create_filesystems(&[fs_name]).unwrap();\n assert!(match pool.create_filesystems(&[fs_name]) {\n Err(EngineError::Engine(ErrorEnum::AlreadyExists, _)) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Requesting identical filesystems succeeds.\n fn create_fs_dups() {\n let fs_name = \"fs_name\";\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.create_filesystems(&[fs_name, fs_name]) {\n Ok(names) => (names.len() == 1) & (names[0].0 == fs_name),\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Adding a list of devices to an empty pool should yield list.\n fn add_device_empty() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n let devices = [Path::new(\"\/s\/a\"), Path::new(\"\/s\/b\")];\n assert!(match pool.add_blockdevs(&devices, false) {\n Ok(devs) => devs.len() == devices.len(),\n _ => false,\n });\n }\n}\n<commit_msg>Remove unnecessary let-statement<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\nuse std::cell::RefCell;\nuse std::collections::{HashMap, HashSet};\nuse std::collections::hash_map::RandomState;\nuse std::iter::FromIterator;\nuse std::path::Path;\nuse std::path::PathBuf;\nuse std::rc::Rc;\nuse std::vec::Vec;\n\nuse uuid::Uuid;\n\nuse super::super::engine::{Filesystem, HasName, HasUuid, Pool};\nuse super::super::errors::{EngineError, EngineResult, ErrorEnum};\nuse super::super::structures::Table;\nuse super::super::types::{FilesystemUuid, PoolUuid, RenameAction, Redundancy};\n\nuse super::blockdev::SimDev;\nuse super::filesystem::SimFilesystem;\nuse super::randomization::Randomizer;\n\n#[derive(Debug)]\npub struct SimPool {\n name: String,\n pool_uuid: PoolUuid,\n pub block_devs: HashMap<PathBuf, SimDev>,\n pub filesystems: Table<SimFilesystem>,\n redundancy: Redundancy,\n rdm: Rc<RefCell<Randomizer>>,\n}\n\nimpl SimPool {\n pub fn new(rdm: Rc<RefCell<Randomizer>>,\n name: &str,\n paths: &[&Path],\n redundancy: Redundancy)\n -> SimPool {\n\n let devices: HashSet<_, RandomState> = HashSet::from_iter(paths);\n let device_pairs = devices\n .iter()\n .map(|p| (p.to_path_buf(), SimDev::new(rdm.clone(), p)));\n SimPool {\n name: name.to_owned(),\n pool_uuid: Uuid::new_v4(),\n block_devs: HashMap::from_iter(device_pairs),\n filesystems: Table::default(),\n redundancy: redundancy,\n rdm: rdm.clone(),\n }\n }\n\n pub fn check(&mut self) -> () {}\n}\n\nimpl Pool for SimPool {\n fn add_blockdevs(&mut self, paths: &[&Path], _force: bool) -> EngineResult<Vec<PathBuf>> {\n let rdm = self.rdm.clone();\n let devices: HashSet<_, RandomState> = HashSet::from_iter(paths);\n let device_pairs = devices\n .iter()\n .map(|p| (p.to_path_buf(), SimDev::new(rdm.clone(), p)));\n self.block_devs.extend(device_pairs);\n Ok(devices.iter().map(|d| d.to_path_buf()).collect())\n }\n\n fn destroy_filesystems<'a, 'b>(&'a mut self,\n fs_uuids: &[&'b FilesystemUuid])\n -> EngineResult<Vec<&'b FilesystemUuid>> {\n destroy_filesystems!{self; fs_uuids}\n }\n\n fn destroy(self) -> EngineResult<()> {\n \/\/ Nothing to do here.\n Ok(())\n }\n\n fn create_filesystems<'a, 'b>(&'a mut self,\n specs: &[&'b str])\n -> EngineResult<Vec<(&'b str, FilesystemUuid)>> {\n let names: HashSet<_, RandomState> = HashSet::from_iter(specs);\n for name in names.iter() {\n if self.filesystems.contains_name(name) {\n return Err(EngineError::Engine(ErrorEnum::AlreadyExists, name.to_string()));\n }\n }\n\n let mut result = Vec::new();\n for name in names.iter() {\n let uuid = Uuid::new_v4();\n let new_filesystem = SimFilesystem::new(uuid, name);\n self.filesystems.insert(new_filesystem);\n result.push((**name, uuid));\n }\n\n Ok(result)\n }\n\n fn rename_filesystem(&mut self,\n uuid: &FilesystemUuid,\n new_name: &str)\n -> EngineResult<RenameAction> {\n rename_filesystem!{self; uuid; new_name}\n }\n\n fn rename(&mut self, name: &str) {\n self.name = name.to_owned();\n }\n\n fn get_filesystem(&mut self, uuid: &FilesystemUuid) -> Option<&mut Filesystem> {\n get_filesystem!(self; uuid)\n }\n}\n\nimpl HasUuid for SimPool {\n fn uuid(&self) -> &PoolUuid {\n &self.pool_uuid\n }\n}\n\nimpl HasName for SimPool {\n fn name(&self) -> &str {\n &self.name\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n\n use std::path::Path;\n\n use uuid::Uuid;\n\n use engine::Engine;\n use engine::ErrorEnum;\n use engine::EngineError;\n use engine::RenameAction;\n\n use super::super::SimEngine;\n\n #[test]\n \/\/\/ Renaming a filesystem on an empty pool always works\n fn rename_empty() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.rename_filesystem(&Uuid::new_v4(), \"new_name\") {\n Ok(RenameAction::NoSource) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Renaming a filesystem to another filesystem should work if new name not taken\n fn rename_happens() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n let infos = pool.create_filesystems(&[\"old_name\"]).unwrap();\n assert!(match pool.rename_filesystem(&infos[0].1, \"new_name\") {\n Ok(RenameAction::Renamed) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Renaming a filesystem to another filesystem should fail if new name taken\n fn rename_fails() {\n let old_name = \"old_name\";\n let new_name = \"new_name\";\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n let results = pool.create_filesystems(&[old_name, new_name]).unwrap();\n let old_uuid = results.iter().find(|x| x.0 == old_name).unwrap().1;\n assert!(match pool.rename_filesystem(&old_uuid, new_name) {\n Err(EngineError::Engine(ErrorEnum::AlreadyExists, _)) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Renaming should succeed if old_name absent, new present\n fn rename_no_op() {\n let new_name = \"new_name\";\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n pool.create_filesystems(&[new_name]).unwrap();\n assert!(match pool.rename_filesystem(&Uuid::new_v4(), new_name) {\n Ok(RenameAction::NoSource) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Removing an empty list of filesystems should always succeed\n fn destroy_fs_empty() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.destroy_filesystems(&[]) {\n Ok(names) => names.is_empty(),\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Removing a non-empty list of filesystems should succeed on empty pool\n fn destroy_fs_some() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(pool.destroy_filesystems(&[&Uuid::new_v4()]).is_ok());\n }\n\n #[test]\n \/\/\/ Removing a non-empty list of filesystems should succeed on any pool\n fn destroy_fs_any() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine.create_pool(\"name\", &[], None, false).unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n let fs_results = pool.create_filesystems(&[\"fs_name\"]).unwrap();\n let fs_uuid = fs_results[0].1;\n assert!(match pool.destroy_filesystems(&[&fs_uuid, &Uuid::new_v4()]) {\n Ok(filesystems) => filesystems == vec![&fs_uuid],\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Creating an empty list of filesystems should succeed, always\n fn create_fs_none() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.create_filesystems(&[]) {\n Ok(names) => names.is_empty(),\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Creating a non-empty list of filesystems always succeeds.\n fn create_fs_some() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.create_filesystems(&[\"name\"]) {\n Ok(names) => (names.len() == 1) & (names[0].0 == \"name\"),\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Creating a an already existing filesystem fails.\n fn create_fs_conflict() {\n let fs_name = \"fs_name\";\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n pool.create_filesystems(&[fs_name]).unwrap();\n assert!(match pool.create_filesystems(&[fs_name]) {\n Err(EngineError::Engine(ErrorEnum::AlreadyExists, _)) => true,\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Requesting identical filesystems succeeds.\n fn create_fs_dups() {\n let fs_name = \"fs_name\";\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n assert!(match pool.create_filesystems(&[fs_name, fs_name]) {\n Ok(names) => (names.len() == 1) & (names[0].0 == fs_name),\n _ => false,\n });\n }\n\n #[test]\n \/\/\/ Adding a list of devices to an empty pool should yield list.\n fn add_device_empty() {\n let mut engine = SimEngine::default();\n let (uuid, _) = engine\n .create_pool(\"pool_name\", &[], None, false)\n .unwrap();\n let pool = engine.get_pool(&uuid).unwrap();\n let devices = [Path::new(\"\/s\/a\"), Path::new(\"\/s\/b\")];\n assert!(match pool.add_blockdevs(&devices, false) {\n Ok(devs) => devs.len() == devices.len(),\n _ => false,\n });\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adding rot<commit_after>\n\/\/\/ Rotation\npub struct Rot {\n pub s: f32,\n pub c: f32\n}\n\nimpl Rot {\n pub fn new() -> Rot {\n Rot {\n s: 0.0,\n c: 1.0\n }\n }\n\n \/\/\/ Initialize from an angle in radians\n pub fn new_angle(angle: f32) -> Rot {\n Rot {\n s: angle.sin(),\n c: angle.cos()\n }\n }\n\n pub fn set(&mut self, angle: f32) {\n self.s = angle.sin();\n self.c = angle.cos();\n }\n\n \/\/\/ Set to the identity rotation\n pub fn set_identity(&mut self) {\n self.s = 0.0;\n self.c = 1.0;\n }\n\n \/\/\/ Get the angle in radians\n pub fn get_angle() -> f32 {\n s.atan2(c)\n }\n\n \/\/\/ Get the x-axis\n pub fn get_x_axis(&mut self) -> Vec2 {\n Vec2::new(self.c, self.s)\n }\n\n \/\/\/ Get the u axis\n pub fn get_y_axis(&mut self) -> Vec2 {\n Vec2::new(-self.s, self.c)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>multiline closures added<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>dynamic dispatch in closures as traits<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added todo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Disable SPIR-V optimiser as the water tessellation shader fails to optimise on Linux (issue #47)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>function pointers added to closure<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of various bits and pieces of the `panic!` macro and\n\/\/! associated runtime pieces.\n\/\/!\n\/\/! Specifically, this module contains the implementation of:\n\/\/!\n\/\/! * Panic hooks\n\/\/! * Executing a panic up to doing the actual implementation\n\/\/! * Shims around \"try\"\n\nuse prelude::v1::*;\nuse io::prelude::*;\n\nuse any::Any;\nuse cell::Cell;\nuse cell::RefCell;\nuse fmt;\nuse intrinsics;\nuse mem;\nuse raw;\nuse sys_common::rwlock::RWLock;\nuse sync::atomic::{AtomicBool, Ordering};\nuse sys::stdio::Stderr;\nuse sys_common::backtrace;\nuse sys_common::thread_info;\nuse sys_common::util;\nuse thread;\n\nthread_local! {\n pub static LOCAL_STDERR: RefCell<Option<Box<Write + Send>>> = {\n RefCell::new(None)\n }\n}\n\nthread_local! { pub static PANIC_COUNT: Cell<usize> = Cell::new(0) }\n\n\/\/ Binary interface to the panic runtime that the standard library depends on.\n\/\/\n\/\/ The standard library is tagged with `#![needs_panic_runtime]` (introduced in\n\/\/ RFC 1513) to indicate that it requires some other crate tagged with\n\/\/ `#![panic_runtime]` to exist somewhere. Each panic runtime is intended to\n\/\/ implement these symbols (with the same signatures) so we can get matched up\n\/\/ to them.\n\/\/\n\/\/ One day this may look a little less ad-hoc with the compiler helping out to\n\/\/ hook up these functions, but it is not this day!\n#[allow(improper_ctypes)]\nextern {\n fn __rust_maybe_catch_panic(f: fn(*mut u8),\n data: *mut u8,\n data_ptr: *mut usize,\n vtable_ptr: *mut usize) -> u32;\n #[unwind]\n fn __rust_start_panic(data: usize, vtable: usize) -> u32;\n}\n\n#[derive(Copy, Clone)]\nenum Hook {\n Default,\n Custom(*mut (Fn(&PanicInfo) + 'static + Sync + Send)),\n}\n\nstatic HOOK_LOCK: RWLock = RWLock::new();\nstatic mut HOOK: Hook = Hook::Default;\nstatic FIRST_PANIC: AtomicBool = AtomicBool::new(true);\n\n\/\/\/ Registers a custom panic hook, replacing any that was previously registered.\n\/\/\/\n\/\/\/ The panic hook is invoked when a thread panics, but before the panic runtime\n\/\/\/ is invoked. As such, the hook will run with both the aborting and unwinding\n\/\/\/ runtimes. The default hook prints a message to standard error and generates\n\/\/\/ a backtrace if requested, but this behavior can be customized with the\n\/\/\/ `set_hook` and `take_hook` functions.\n\/\/\/\n\/\/\/ The hook is provided with a `PanicInfo` struct which contains information\n\/\/\/ about the origin of the panic, including the payload passed to `panic!` and\n\/\/\/ the source code location from which the panic originated.\n\/\/\/\n\/\/\/ The panic hook is a global resource.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ Panics if called from a panicking thread.\n#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\npub fn set_hook(hook: Box<Fn(&PanicInfo) + 'static + Sync + Send>) {\n if thread::panicking() {\n panic!(\"cannot modify the panic hook from a panicking thread\");\n }\n\n unsafe {\n HOOK_LOCK.write();\n let old_hook = HOOK;\n HOOK = Hook::Custom(Box::into_raw(hook));\n HOOK_LOCK.write_unlock();\n\n if let Hook::Custom(ptr) = old_hook {\n Box::from_raw(ptr);\n }\n }\n}\n\n\/\/\/ Unregisters the current panic hook, returning it.\n\/\/\/\n\/\/\/ If no custom hook is registered, the default hook will be returned.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ Panics if called from a panicking thread.\n#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\npub fn take_hook() -> Box<Fn(&PanicInfo) + 'static + Sync + Send> {\n if thread::panicking() {\n panic!(\"cannot modify the panic hook from a panicking thread\");\n }\n\n unsafe {\n HOOK_LOCK.write();\n let hook = HOOK;\n HOOK = Hook::Default;\n HOOK_LOCK.write_unlock();\n\n match hook {\n Hook::Default => Box::new(default_hook),\n Hook::Custom(ptr) => {Box::from_raw(ptr)} \/\/ FIXME #30530\n }\n }\n}\n\n\/\/\/ A struct providing information about a panic.\n#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\npub struct PanicInfo<'a> {\n payload: &'a (Any + Send),\n location: Location<'a>,\n}\n\nimpl<'a> PanicInfo<'a> {\n \/\/\/ Returns the payload associated with the panic.\n \/\/\/\n \/\/\/ This will commonly, but not always, be a `&'static str` or `String`.\n #[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n pub fn payload(&self) -> &(Any + Send) {\n self.payload\n }\n\n \/\/\/ Returns information about the location from which the panic originated,\n \/\/\/ if available.\n \/\/\/\n \/\/\/ This method will currently always return `Some`, but this may change\n \/\/\/ in future versions.\n #[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n pub fn location(&self) -> Option<&Location> {\n Some(&self.location)\n }\n}\n\n\/\/\/ A struct containing information about the location of a panic.\n#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\npub struct Location<'a> {\n file: &'a str,\n line: u32,\n}\n\nimpl<'a> Location<'a> {\n \/\/\/ Returns the name of the source file from which the panic originated.\n #[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n pub fn file(&self) -> &str {\n self.file\n }\n\n \/\/\/ Returns the line number from which the panic originated.\n #[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n pub fn line(&self) -> u32 {\n self.line\n }\n}\n\nfn default_hook(info: &PanicInfo) {\n let panics = PANIC_COUNT.with(|c| c.get());\n\n \/\/ If this is a double panic, make sure that we print a backtrace\n \/\/ for this panic. Otherwise only print it if logging is enabled.\n let log_backtrace = panics >= 2 || backtrace::log_enabled();\n\n let file = info.location.file;\n let line = info.location.line;\n\n let msg = match info.payload.downcast_ref::<&'static str>() {\n Some(s) => *s,\n None => match info.payload.downcast_ref::<String>() {\n Some(s) => &s[..],\n None => \"Box<Any>\",\n }\n };\n let mut err = Stderr::new().ok();\n let thread = thread_info::current_thread();\n let name = thread.as_ref().and_then(|t| t.name()).unwrap_or(\"<unnamed>\");\n\n let write = |err: &mut ::io::Write| {\n let _ = writeln!(err, \"thread '{}' panicked at '{}', {}:{}\",\n name, msg, file, line);\n\n if log_backtrace {\n let _ = backtrace::write(err);\n } else if FIRST_PANIC.compare_and_swap(true, false, Ordering::SeqCst) {\n let _ = writeln!(err, \"note: Run with `RUST_BACKTRACE=1` for a backtrace.\");\n }\n };\n\n let prev = LOCAL_STDERR.with(|s| s.borrow_mut().take());\n match (prev, err.as_mut()) {\n (Some(mut stderr), _) => {\n write(&mut *stderr);\n let mut s = Some(stderr);\n LOCAL_STDERR.with(|slot| {\n *slot.borrow_mut() = s.take();\n });\n }\n (None, Some(ref mut err)) => { write(err) }\n _ => {}\n }\n}\n\n\/\/\/ Invoke a closure, capturing the cause of an unwinding panic if one occurs.\npub unsafe fn try<R, F: FnOnce() -> R>(f: F) -> Result<R, Box<Any + Send>> {\n let mut slot = None;\n let mut f = Some(f);\n let ret;\n\n {\n let mut to_run = || {\n slot = Some(f.take().unwrap()());\n };\n let fnptr = get_call(&mut to_run);\n let dataptr = &mut to_run as *mut _ as *mut u8;\n let mut any_data = 0;\n let mut any_vtable = 0;\n let fnptr = mem::transmute::<fn(&mut _), fn(*mut u8)>(fnptr);\n let r = __rust_maybe_catch_panic(fnptr,\n dataptr,\n &mut any_data,\n &mut any_vtable);\n if r == 0 {\n ret = Ok(());\n } else {\n PANIC_COUNT.with(|s| {\n let prev = s.get();\n s.set(prev - 1);\n });\n ret = Err(mem::transmute(raw::TraitObject {\n data: any_data as *mut _,\n vtable: any_vtable as *mut _,\n }));\n }\n }\n\n debug_assert!(PANIC_COUNT.with(|c| c.get() == 0));\n return ret.map(|()| {\n slot.take().unwrap()\n });\n\n fn get_call<F: FnMut()>(_: &mut F) -> fn(&mut F) {\n call\n }\n\n fn call<F: FnMut()>(f: &mut F) {\n f()\n }\n}\n\n\/\/\/ Determines whether the current thread is unwinding because of panic.\npub fn panicking() -> bool {\n PANIC_COUNT.with(|c| c.get() != 0)\n}\n\n\/\/\/ Entry point of panic from the libcore crate.\n#[cfg(not(test))]\n#[lang = \"panic_fmt\"]\n#[unwind]\npub extern fn rust_begin_panic(msg: fmt::Arguments,\n file: &'static str,\n line: u32) -> ! {\n begin_panic_fmt(&msg, &(file, line))\n}\n\n\/\/\/ The entry point for panicking with a formatted message.\n\/\/\/\n\/\/\/ This is designed to reduce the amount of code required at the call\n\/\/\/ site as much as possible (so that `panic!()` has as low an impact\n\/\/\/ on (e.g.) the inlining of other functions as possible), by moving\n\/\/\/ the actual formatting into this shared place.\n#[unstable(feature = \"libstd_sys_internals\",\n reason = \"used by the panic! macro\",\n issue = \"0\")]\n#[inline(never)] #[cold]\npub fn begin_panic_fmt(msg: &fmt::Arguments,\n file_line: &(&'static str, u32)) -> ! {\n use fmt::Write;\n\n \/\/ We do two allocations here, unfortunately. But (a) they're\n \/\/ required with the current scheme, and (b) we don't handle\n \/\/ panic + OOM properly anyway (see comment in begin_panic\n \/\/ below).\n\n let mut s = String::new();\n let _ = s.write_fmt(*msg);\n begin_panic(s, file_line)\n}\n\n\/\/\/ This is the entry point of panicking for panic!() and assert!().\n#[unstable(feature = \"libstd_sys_internals\",\n reason = \"used by the panic! macro\",\n issue = \"0\")]\n#[inline(never)] #[cold] \/\/ avoid code bloat at the call sites as much as possible\npub fn begin_panic<M: Any + Send>(msg: M, file_line: &(&'static str, u32)) -> ! {\n \/\/ Note that this should be the only allocation performed in this code path.\n \/\/ Currently this means that panic!() on OOM will invoke this code path,\n \/\/ but then again we're not really ready for panic on OOM anyway. If\n \/\/ we do start doing this, then we should propagate this allocation to\n \/\/ be performed in the parent of this thread instead of the thread that's\n \/\/ panicking.\n\n rust_panic_with_hook(Box::new(msg), file_line)\n}\n\n\/\/\/ Executes the primary logic for a panic, including checking for recursive\n\/\/\/ panics and panic hooks.\n\/\/\/\n\/\/\/ This is the entry point or panics from libcore, formatted panics, and\n\/\/\/ `Box<Any>` panics. Here we'll verify that we're not panicking recursively,\n\/\/\/ run panic hooks, and then delegate to the actual implementation of panics.\n#[inline(never)]\n#[cold]\nfn rust_panic_with_hook(msg: Box<Any + Send>,\n file_line: &(&'static str, u32)) -> ! {\n let (file, line) = *file_line;\n\n let panics = PANIC_COUNT.with(|c| {\n let prev = c.get();\n c.set(prev + 1);\n prev\n });\n\n \/\/ If this is the third nested call (e.g. panics == 2, this is 0-indexed),\n \/\/ the panic hook probably triggered the last panic, otherwise the\n \/\/ double-panic check would have aborted the process. In this case abort the\n \/\/ process real quickly as we don't want to try calling it again as it'll\n \/\/ probably just panic again.\n if panics > 1 {\n util::dumb_print(format_args!(\"thread panicked while processing \\\n panic. aborting.\\n\"));\n unsafe { intrinsics::abort() }\n }\n\n unsafe {\n let info = PanicInfo {\n payload: &*msg,\n location: Location {\n file: file,\n line: line,\n },\n };\n HOOK_LOCK.read();\n match HOOK {\n Hook::Default => default_hook(&info),\n Hook::Custom(ptr) => (*ptr)(&info),\n }\n HOOK_LOCK.read_unlock();\n }\n\n if panics > 0 {\n \/\/ If a thread panics while it's already unwinding then we\n \/\/ have limited options. Currently our preference is to\n \/\/ just abort. In the future we may consider resuming\n \/\/ unwinding or otherwise exiting the thread cleanly.\n util::dumb_print(format_args!(\"thread panicked while panicking. \\\n aborting.\\n\"));\n unsafe { intrinsics::abort() }\n }\n\n rust_panic(msg)\n}\n\n\/\/\/ A private no-mangle function on which to slap yer breakpoints.\n#[no_mangle]\n#[allow(private_no_mangle_fns)] \/\/ yes we get it, but we like breakpoints\npub fn rust_panic(msg: Box<Any + Send>) -> ! {\n let code = unsafe {\n let obj = mem::transmute::<_, raw::TraitObject>(msg);\n __rust_start_panic(obj.data as usize, obj.vtable as usize)\n };\n rtabort!(\"failed to initiate panic, error {}\", code)\n}\n<commit_msg>Revert \"Refactored code to access TLS only in case of panic\"<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Implementation of various bits and pieces of the `panic!` macro and\n\/\/! associated runtime pieces.\n\/\/!\n\/\/! Specifically, this module contains the implementation of:\n\/\/!\n\/\/! * Panic hooks\n\/\/! * Executing a panic up to doing the actual implementation\n\/\/! * Shims around \"try\"\n\nuse prelude::v1::*;\nuse io::prelude::*;\n\nuse any::Any;\nuse cell::Cell;\nuse cell::RefCell;\nuse fmt;\nuse intrinsics;\nuse mem;\nuse raw;\nuse sys_common::rwlock::RWLock;\nuse sync::atomic::{AtomicBool, Ordering};\nuse sys::stdio::Stderr;\nuse sys_common::backtrace;\nuse sys_common::thread_info;\nuse sys_common::util;\nuse thread;\n\nthread_local! {\n pub static LOCAL_STDERR: RefCell<Option<Box<Write + Send>>> = {\n RefCell::new(None)\n }\n}\n\nthread_local! { pub static PANIC_COUNT: Cell<usize> = Cell::new(0) }\n\n\/\/ Binary interface to the panic runtime that the standard library depends on.\n\/\/\n\/\/ The standard library is tagged with `#![needs_panic_runtime]` (introduced in\n\/\/ RFC 1513) to indicate that it requires some other crate tagged with\n\/\/ `#![panic_runtime]` to exist somewhere. Each panic runtime is intended to\n\/\/ implement these symbols (with the same signatures) so we can get matched up\n\/\/ to them.\n\/\/\n\/\/ One day this may look a little less ad-hoc with the compiler helping out to\n\/\/ hook up these functions, but it is not this day!\n#[allow(improper_ctypes)]\nextern {\n fn __rust_maybe_catch_panic(f: fn(*mut u8),\n data: *mut u8,\n data_ptr: *mut usize,\n vtable_ptr: *mut usize) -> u32;\n #[unwind]\n fn __rust_start_panic(data: usize, vtable: usize) -> u32;\n}\n\n#[derive(Copy, Clone)]\nenum Hook {\n Default,\n Custom(*mut (Fn(&PanicInfo) + 'static + Sync + Send)),\n}\n\nstatic HOOK_LOCK: RWLock = RWLock::new();\nstatic mut HOOK: Hook = Hook::Default;\nstatic FIRST_PANIC: AtomicBool = AtomicBool::new(true);\n\n\/\/\/ Registers a custom panic hook, replacing any that was previously registered.\n\/\/\/\n\/\/\/ The panic hook is invoked when a thread panics, but before the panic runtime\n\/\/\/ is invoked. As such, the hook will run with both the aborting and unwinding\n\/\/\/ runtimes. The default hook prints a message to standard error and generates\n\/\/\/ a backtrace if requested, but this behavior can be customized with the\n\/\/\/ `set_hook` and `take_hook` functions.\n\/\/\/\n\/\/\/ The hook is provided with a `PanicInfo` struct which contains information\n\/\/\/ about the origin of the panic, including the payload passed to `panic!` and\n\/\/\/ the source code location from which the panic originated.\n\/\/\/\n\/\/\/ The panic hook is a global resource.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ Panics if called from a panicking thread.\n#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\npub fn set_hook(hook: Box<Fn(&PanicInfo) + 'static + Sync + Send>) {\n if thread::panicking() {\n panic!(\"cannot modify the panic hook from a panicking thread\");\n }\n\n unsafe {\n HOOK_LOCK.write();\n let old_hook = HOOK;\n HOOK = Hook::Custom(Box::into_raw(hook));\n HOOK_LOCK.write_unlock();\n\n if let Hook::Custom(ptr) = old_hook {\n Box::from_raw(ptr);\n }\n }\n}\n\n\/\/\/ Unregisters the current panic hook, returning it.\n\/\/\/\n\/\/\/ If no custom hook is registered, the default hook will be returned.\n\/\/\/\n\/\/\/ # Panics\n\/\/\/\n\/\/\/ Panics if called from a panicking thread.\n#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\npub fn take_hook() -> Box<Fn(&PanicInfo) + 'static + Sync + Send> {\n if thread::panicking() {\n panic!(\"cannot modify the panic hook from a panicking thread\");\n }\n\n unsafe {\n HOOK_LOCK.write();\n let hook = HOOK;\n HOOK = Hook::Default;\n HOOK_LOCK.write_unlock();\n\n match hook {\n Hook::Default => Box::new(default_hook),\n Hook::Custom(ptr) => {Box::from_raw(ptr)} \/\/ FIXME #30530\n }\n }\n}\n\n\/\/\/ A struct providing information about a panic.\n#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\npub struct PanicInfo<'a> {\n payload: &'a (Any + Send),\n location: Location<'a>,\n}\n\nimpl<'a> PanicInfo<'a> {\n \/\/\/ Returns the payload associated with the panic.\n \/\/\/\n \/\/\/ This will commonly, but not always, be a `&'static str` or `String`.\n #[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n pub fn payload(&self) -> &(Any + Send) {\n self.payload\n }\n\n \/\/\/ Returns information about the location from which the panic originated,\n \/\/\/ if available.\n \/\/\/\n \/\/\/ This method will currently always return `Some`, but this may change\n \/\/\/ in future versions.\n #[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n pub fn location(&self) -> Option<&Location> {\n Some(&self.location)\n }\n}\n\n\/\/\/ A struct containing information about the location of a panic.\n#[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\npub struct Location<'a> {\n file: &'a str,\n line: u32,\n}\n\nimpl<'a> Location<'a> {\n \/\/\/ Returns the name of the source file from which the panic originated.\n #[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n pub fn file(&self) -> &str {\n self.file\n }\n\n \/\/\/ Returns the line number from which the panic originated.\n #[stable(feature = \"panic_hooks\", since = \"1.10.0\")]\n pub fn line(&self) -> u32 {\n self.line\n }\n}\n\nfn default_hook(info: &PanicInfo) {\n let panics = PANIC_COUNT.with(|c| c.get());\n\n \/\/ If this is a double panic, make sure that we print a backtrace\n \/\/ for this panic. Otherwise only print it if logging is enabled.\n let log_backtrace = panics >= 2 || backtrace::log_enabled();\n\n let file = info.location.file;\n let line = info.location.line;\n\n let msg = match info.payload.downcast_ref::<&'static str>() {\n Some(s) => *s,\n None => match info.payload.downcast_ref::<String>() {\n Some(s) => &s[..],\n None => \"Box<Any>\",\n }\n };\n let mut err = Stderr::new().ok();\n let thread = thread_info::current_thread();\n let name = thread.as_ref().and_then(|t| t.name()).unwrap_or(\"<unnamed>\");\n\n let write = |err: &mut ::io::Write| {\n let _ = writeln!(err, \"thread '{}' panicked at '{}', {}:{}\",\n name, msg, file, line);\n\n if log_backtrace {\n let _ = backtrace::write(err);\n } else if FIRST_PANIC.compare_and_swap(true, false, Ordering::SeqCst) {\n let _ = writeln!(err, \"note: Run with `RUST_BACKTRACE=1` for a backtrace.\");\n }\n };\n\n let prev = LOCAL_STDERR.with(|s| s.borrow_mut().take());\n match (prev, err.as_mut()) {\n (Some(mut stderr), _) => {\n write(&mut *stderr);\n let mut s = Some(stderr);\n LOCAL_STDERR.with(|slot| {\n *slot.borrow_mut() = s.take();\n });\n }\n (None, Some(ref mut err)) => { write(err) }\n _ => {}\n }\n}\n\n\/\/\/ Invoke a closure, capturing the cause of an unwinding panic if one occurs.\npub unsafe fn try<R, F: FnOnce() -> R>(f: F) -> Result<R, Box<Any + Send>> {\n let mut slot = None;\n let mut f = Some(f);\n let ret = PANIC_COUNT.with(|s| {\n let prev = s.get();\n s.set(0);\n\n let mut to_run = || {\n slot = Some(f.take().unwrap()());\n };\n let fnptr = get_call(&mut to_run);\n let dataptr = &mut to_run as *mut _ as *mut u8;\n let mut any_data = 0;\n let mut any_vtable = 0;\n let fnptr = mem::transmute::<fn(&mut _), fn(*mut u8)>(fnptr);\n let r = __rust_maybe_catch_panic(fnptr,\n dataptr,\n &mut any_data,\n &mut any_vtable);\n s.set(prev);\n\n if r == 0 {\n Ok(())\n } else {\n Err(mem::transmute(raw::TraitObject {\n data: any_data as *mut _,\n vtable: any_vtable as *mut _,\n }))\n }\n });\n\n return ret.map(|()| {\n slot.take().unwrap()\n });\n\n fn get_call<F: FnMut()>(_: &mut F) -> fn(&mut F) {\n call\n }\n\n fn call<F: FnMut()>(f: &mut F) {\n f()\n }\n}\n\n\/\/\/ Determines whether the current thread is unwinding because of panic.\npub fn panicking() -> bool {\n PANIC_COUNT.with(|c| c.get() != 0)\n}\n\n\/\/\/ Entry point of panic from the libcore crate.\n#[cfg(not(test))]\n#[lang = \"panic_fmt\"]\n#[unwind]\npub extern fn rust_begin_panic(msg: fmt::Arguments,\n file: &'static str,\n line: u32) -> ! {\n begin_panic_fmt(&msg, &(file, line))\n}\n\n\/\/\/ The entry point for panicking with a formatted message.\n\/\/\/\n\/\/\/ This is designed to reduce the amount of code required at the call\n\/\/\/ site as much as possible (so that `panic!()` has as low an impact\n\/\/\/ on (e.g.) the inlining of other functions as possible), by moving\n\/\/\/ the actual formatting into this shared place.\n#[unstable(feature = \"libstd_sys_internals\",\n reason = \"used by the panic! macro\",\n issue = \"0\")]\n#[inline(never)] #[cold]\npub fn begin_panic_fmt(msg: &fmt::Arguments,\n file_line: &(&'static str, u32)) -> ! {\n use fmt::Write;\n\n \/\/ We do two allocations here, unfortunately. But (a) they're\n \/\/ required with the current scheme, and (b) we don't handle\n \/\/ panic + OOM properly anyway (see comment in begin_panic\n \/\/ below).\n\n let mut s = String::new();\n let _ = s.write_fmt(*msg);\n begin_panic(s, file_line)\n}\n\n\/\/\/ This is the entry point of panicking for panic!() and assert!().\n#[unstable(feature = \"libstd_sys_internals\",\n reason = \"used by the panic! macro\",\n issue = \"0\")]\n#[inline(never)] #[cold] \/\/ avoid code bloat at the call sites as much as possible\npub fn begin_panic<M: Any + Send>(msg: M, file_line: &(&'static str, u32)) -> ! {\n \/\/ Note that this should be the only allocation performed in this code path.\n \/\/ Currently this means that panic!() on OOM will invoke this code path,\n \/\/ but then again we're not really ready for panic on OOM anyway. If\n \/\/ we do start doing this, then we should propagate this allocation to\n \/\/ be performed in the parent of this thread instead of the thread that's\n \/\/ panicking.\n\n rust_panic_with_hook(Box::new(msg), file_line)\n}\n\n\/\/\/ Executes the primary logic for a panic, including checking for recursive\n\/\/\/ panics and panic hooks.\n\/\/\/\n\/\/\/ This is the entry point or panics from libcore, formatted panics, and\n\/\/\/ `Box<Any>` panics. Here we'll verify that we're not panicking recursively,\n\/\/\/ run panic hooks, and then delegate to the actual implementation of panics.\n#[inline(never)]\n#[cold]\nfn rust_panic_with_hook(msg: Box<Any + Send>,\n file_line: &(&'static str, u32)) -> ! {\n let (file, line) = *file_line;\n\n let panics = PANIC_COUNT.with(|c| {\n let prev = c.get();\n c.set(prev + 1);\n prev\n });\n\n \/\/ If this is the third nested call (e.g. panics == 2, this is 0-indexed),\n \/\/ the panic hook probably triggered the last panic, otherwise the\n \/\/ double-panic check would have aborted the process. In this case abort the\n \/\/ process real quickly as we don't want to try calling it again as it'll\n \/\/ probably just panic again.\n if panics > 1 {\n util::dumb_print(format_args!(\"thread panicked while processing \\\n panic. aborting.\\n\"));\n unsafe { intrinsics::abort() }\n }\n\n unsafe {\n let info = PanicInfo {\n payload: &*msg,\n location: Location {\n file: file,\n line: line,\n },\n };\n HOOK_LOCK.read();\n match HOOK {\n Hook::Default => default_hook(&info),\n Hook::Custom(ptr) => (*ptr)(&info),\n }\n HOOK_LOCK.read_unlock();\n }\n\n if panics > 0 {\n \/\/ If a thread panics while it's already unwinding then we\n \/\/ have limited options. Currently our preference is to\n \/\/ just abort. In the future we may consider resuming\n \/\/ unwinding or otherwise exiting the thread cleanly.\n util::dumb_print(format_args!(\"thread panicked while panicking. \\\n aborting.\\n\"));\n unsafe { intrinsics::abort() }\n }\n\n rust_panic(msg)\n}\n\n\/\/\/ A private no-mangle function on which to slap yer breakpoints.\n#[no_mangle]\n#[allow(private_no_mangle_fns)] \/\/ yes we get it, but we like breakpoints\npub fn rust_panic(msg: Box<Any + Send>) -> ! {\n let code = unsafe {\n let obj = mem::transmute::<_, raw::TraitObject>(msg);\n __rust_start_panic(obj.data as usize, obj.vtable as usize)\n };\n rtabort!(\"failed to initiate panic, error {}\", code)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add component-wise map, map2 and fold performance tests<commit_after>\/**\n * Component-wise map and fold function speed tests. For best results, compile\n * with the optimise flag (-O). These functions would allow for even more generic\n * operations on dimensional data structures. Map seems to be faster than hand\n * unrolling for add_t, but map2 for add_v is slower. A combination of map2 and\n * foldl is faster for dot product.\n *\/\n\nextern mod std;\nuse std::time::precise_time_ns;\nuse cast::transmute;\nuse vec::raw::buf_as_slice;\nuse ptr::to_unsafe_ptr;\nuse cmp::Eq;\nuse num::from_int;\n\npub struct Vec4<T> { x: T, y: T, z: T, w: T }\n\npub mod Vec4 {\n #[inline(always)]\n pub pure fn new<T>(x: T, y: T, z: T, w: T) -> Vec4<T> {\n Vec4 { x: move x, y: move y, z: move z, w: move w }\n }\n}\n\npub impl<T:Copy Num> Vec4<T> {\n #[inline(always)]\n pure fn index(i: uint) -> T {\n unsafe { do buf_as_slice(\n transmute::<*Vec4<T>, *T>(\n to_unsafe_ptr(&self)), 4) |slice| { slice[i] }\n }\n }\n \n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \n #[inline(always)]\n pure fn map(f: fn&(a: &T) -> T) -> Vec4<T> {\n Vec4::new(f(&self[0]),\n f(&self[1]),\n f(&self[2]),\n f(&self[3]))\n }\n \n #[inline(always)]\n pure fn map2(other: &Vec4<T>, f: fn&(a: &T, b: &T) -> T) -> Vec4<T> {\n Vec4::new(f(&self[0], &other[0]),\n f(&self[1], &other[1]),\n f(&self[2], &other[2]),\n f(&self[3], &other[3]))\n }\n \n pure fn foldl<U: Copy>(z: U, p: &fn(t: T, u: &U) -> U) -> U {\n p(self[3], &p(self[2], &p(self[1], &p(self[0], &z))))\n }\n pure fn foldr<U: Copy>(z: U, p: &fn(t: &T, u: U) -> U) -> U {\n p(&self[0], p(&self[1], p(&self[2], p(&self[3], z))))\n }\n \n \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n \n #[inline(always)]\n pure fn mul_t(value: T) -> Vec4<T> {\n Vec4::new(self[0] * value,\n self[1] * value,\n self[2] * value,\n self[3] * value)\n }\n \n #[inline(always)]\n pure fn mul_t_map(value: T) -> Vec4<T> {\n do self.map |a| { a * value }\n }\n \n #[inline(always)]\n pure fn add_v(other: &Vec4<T>) -> Vec4<T> {\n Vec4::new(self[0] + other[0],\n self[1] + other[1],\n self[2] + other[2],\n self[3] + other[3])\n }\n \n #[inline(always)]\n pure fn add_v_map2(other: &Vec4<T>) -> Vec4<T> {\n do self.map2(other) |a, b| { a + *b }\n }\n \n #[inline(always)]\n pure fn dot(other: &Vec4<T>) -> T {\n self[0] * other[0] +\n self[1] * other[1] +\n self[2] * other[2] +\n self[3] * other[3]\n }\n \n #[inline(always)]\n pure fn dot_foldl(other: &Vec4<T>) -> T {\n self.map2(other, |a, b| { a * *b })\n .foldl(from_int(0), |t, u| { t + *u })\n }\n}\n\npub impl<T:Copy Num Eq> Vec4<T>: Eq {\n #[inline(always)]\n pure fn eq(other: &Vec4<T>) -> bool {\n self[0] == other[0] &&\n self[1] == other[1] &&\n self[2] == other[2] &&\n self[3] == other[3]\n }\n \n #[inline(always)]\n pure fn ne(other: &Vec4<T>) -> bool {\n !(self == *other)\n }\n}\n\nfn main() {\n let n_tests = 10000;\n \n \/\/ Map\n \n let a = Vec4::new(1f, 2f, 3f, 4f);\n let b = Vec4::new(5f, 6f, 7f, 8f);\n \n let mul_t_avg = do test_avg_time_ns(n_tests) {\n assert a.mul_t(8f) == Vec4::new(8f, 16f, 24f, 32f);\n };\n \n let mul_t_map_avg = do test_avg_time_ns(n_tests) {\n assert a.mul_t_map(8f) == Vec4::new(8f, 16f, 24f, 32f);\n };\n \n let min = [mul_t_avg, mul_t_map_avg].min();\n \n io::println(fmt!(\"mul_t: %d = %d\", mul_t_avg as int, (mul_t_avg - min) as int));\n io::println(fmt!(\"mul_t_map: %d = %d\", mul_t_map_avg as int, (mul_t_map_avg - min) as int));\n \n \/\/ Zip\n \n let add_v_avg = do test_avg_time_ns(n_tests) {\n assert a.add_v(&b) == Vec4::new( 6f, 8f, 10f, 12f);\n };\n \n let add_v_map2_avg = do test_avg_time_ns(n_tests) {\n assert a.add_v_map2(&b) == Vec4::new( 6f, 8f, 10f, 12f);\n };\n \n let min = [add_v_avg, add_v_map2_avg].min();\n \n io::println(fmt!(\"add_v: %d = %d\", add_v_avg as int, (add_v_avg - min) as int));\n io::println(fmt!(\"add_v_map2: %d = %d\", add_v_map2_avg as int, (add_v_map2_avg - min) as int));\n \n \/\/ Dot\n \n let dot_avg = do test_avg_time_ns(n_tests) {\n assert a.dot(&b) == 70f;\n };\n \n let dot_foldl_avg = do test_avg_time_ns(n_tests) {\n assert a.dot_foldl(&b) == 70f;\n };\n \n let min = [dot_avg, dot_foldl_avg].min();\n \n io::println(fmt!(\"dot: %d = %d\", dot_avg as int, (dot_avg - min) as int));\n io::println(fmt!(\"dot_foldl: %d = %d\", dot_foldl_avg as int, (dot_foldl_avg - min) as int));\n \n}\n\nfn test_avg_time_ns(n: uint, f: fn&()) -> u64 {\n \n let mut total = 0;\n for n.times {\n let start_time = precise_time_ns();\n \n f();\n \n total += precise_time_ns() - start_time;\n }\n \n return total \/ (n as u64);\n}<|endoftext|>"} {"text":"<commit_before>#![cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\n#![feature(const_fn, asm, repr_transparent)]\n#![no_std]\n#![cfg_attr(test, allow(unused_features))]\n\n#[macro_use]\nextern crate bitflags;\nextern crate raw_cpuid;\n#[cfg(feature = \"performance-counter\")]\n#[macro_use]\nextern crate phf;\n\n#[cfg(test)]\n#[macro_use]\nextern crate std;\n\n#[cfg(target_arch = \"x86\")]\nuse core::arch::x86 as arch;\n#[cfg(target_arch = \"x86_64\")]\nuse core::arch::x86_64 as arch;\n\nmacro_rules! bit {\n ($x:expr) => {\n 1 << $x\n };\n}\n\npub mod bits16;\npub mod bits32;\npub mod bits64;\n\npub mod controlregs;\npub mod dtables;\npub mod io;\npub mod irq;\npub mod msr;\npub mod segmentation;\npub mod task;\npub mod time;\npub mod tlb;\n\n#[cfg(feature = \"performance-counter\")]\npub mod perfcnt;\n\npub mod current {\n #[cfg(target_arch = \"x86\")]\n pub use bits32::*;\n #[cfg(target_arch = \"x86_64\")]\n pub use bits64::*;\n}\n\npub mod cpuid {\n pub use raw_cpuid::*;\n}\n\n#[cfg(not(test))]\nmod std {\n pub use core::fmt;\n pub use core::ops;\n pub use core::option;\n}\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n#[repr(u8)]\n\/\/\/ x86 Protection levels\n\/\/\/ Note: This should not contain values larger than 2 bits, otherwise\n\/\/\/ segment descriptor code needs to be adjusted accordingly.\npub enum Ring {\n Ring0 = 0b00,\n Ring1 = 0b01,\n Ring2 = 0b10,\n Ring3 = 0b11,\n}\n\n#[inline(always)]\npub unsafe fn halt() {\n asm!(\"hlt\" :::: \"volatile\");\n}\n<commit_msg>Allow stable features so docs.rs compiles with version 1.28.<commit_after>#![cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\n#![allow(stable_features)]\n#![feature(const_fn, asm, repr_transparent)]\n#![no_std]\n#![cfg_attr(test, allow(unused_features))]\n\n#[macro_use]\nextern crate bitflags;\nextern crate raw_cpuid;\n#[cfg(feature = \"performance-counter\")]\n#[macro_use]\nextern crate phf;\n\n#[cfg(test)]\n#[macro_use]\nextern crate std;\n\n#[cfg(target_arch = \"x86\")]\nuse core::arch::x86 as arch;\n#[cfg(target_arch = \"x86_64\")]\nuse core::arch::x86_64 as arch;\n\nmacro_rules! bit {\n ($x:expr) => {\n 1 << $x\n };\n}\n\npub mod bits16;\npub mod bits32;\npub mod bits64;\n\npub mod controlregs;\npub mod dtables;\npub mod io;\npub mod irq;\npub mod msr;\npub mod segmentation;\npub mod task;\npub mod time;\npub mod tlb;\n\n#[cfg(feature = \"performance-counter\")]\npub mod perfcnt;\n\npub mod current {\n #[cfg(target_arch = \"x86\")]\n pub use bits32::*;\n #[cfg(target_arch = \"x86_64\")]\n pub use bits64::*;\n}\n\npub mod cpuid {\n pub use raw_cpuid::*;\n}\n\n#[cfg(not(test))]\nmod std {\n pub use core::fmt;\n pub use core::ops;\n pub use core::option;\n}\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n#[repr(u8)]\n\/\/\/ x86 Protection levels\n\/\/\/ Note: This should not contain values larger than 2 bits, otherwise\n\/\/\/ segment descriptor code needs to be adjusted accordingly.\npub enum Ring {\n Ring0 = 0b00,\n Ring1 = 0b01,\n Ring2 = 0b10,\n Ring3 = 0b11,\n}\n\n#[inline(always)]\npub unsafe fn halt() {\n asm!(\"hlt\" :::: \"volatile\");\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/!Log tools.\n\nuse std::old_io::{self, Writer};\nuse std::sync::Mutex;\n\n\/\/\/Common trait for log tools.\npub trait Log {\n\t\/\/\/Print a note to the log.\n\tfn note(&self, message: &str);\n\t\/\/\/Print a warning to the log.\n\tfn warning(&self, message: &str);\n\t\/\/\/Print an error to the log.\n\tfn error(&self, message: &str);\n}\n\n\/\/\/Log tool for printing to standard output.\npub struct StdOut;\n\nimpl Log for StdOut {\n\tfn note(&self, message: &str) {\n\t\tprintln!(\"note: {}\", message);\n\t}\n\n\tfn warning(&self, message: &str) {\n\t\tprintln!(\"warning: {}\", message);\n\t}\n\n\tfn error(&self, message: &str) {\n\t\tprintln!(\"error: {}\", message);\n\t}\n}\n\n\/\/\/Log tool for printing to a file.\npub struct File {\n\tfile: Mutex<old_io::File>\n}\n\nimpl File {\n\tpub fn new(file: old_io::File) -> File {\n\t\tFile {\n\t\t\tfile: Mutex::new(file)\n\t\t}\n\t}\n}\n\nimpl Log for File {\n\tfn note(&self, message: &str) {\n\t\tlet mut f = match self.file.lock() {\n\t\t\tOk(f) => f,\n\t\t\tErr(e) => e.into_inner()\n\t\t};\n\t\twrite!(f, \"note: {}\", message);\n\t}\n\n\tfn warning(&self, message: &str) {\n\t\tlet mut f = match self.file.lock() {\n\t\t\tOk(f) => f,\n\t\t\tErr(e) => e.into_inner()\n\t\t};\n\t\twrite!(f, \"warning: {}\", message);\n\t}\n\n\tfn error(&self, message: &str) {\n\t\tlet mut f = match self.file.lock() {\n\t\t\tOk(f) => f,\n\t\t\tErr(e) => e.into_inner()\n\t\t};\n\t\twrite!(f, \"error: {}\", message);\n\t}\n}\n\n#[cfg(test)]\nmod test {\n\tuse std::old_io::{self, TempDir};\n\tuse log;\n\tuse Server;\n\tuse Context;\n\tuse Response;\n\n\tfn handler(_c: Context, _w: Response) {}\n\n\t#[test]\n\tfn log_to_file() {\n\t\tlet dir = TempDir::new(\"log_to_file\").unwrap();\n\t\tlet file = old_io::File::create(&dir.path().join(\"test.log\")).unwrap();\n\t\tServer::new().handlers(handler).log(log::File::new(file)).build();\n\t}\n}<commit_msg>Return IoResult from log methods<commit_after>\/\/!Log tools.\n\nuse std::old_io::{self, Writer, IoResult};\nuse std::sync::Mutex;\n\n\/\/\/Common trait for log tools.\npub trait Log {\n\t\/\/\/Print a note to the log.\n\tfn note(&self, message: &str) -> IoResult<()>;\n\t\/\/\/Print a warning to the log.\n\tfn warning(&self, message: &str) -> IoResult<()>;\n\t\/\/\/Print an error to the log.\n\tfn error(&self, message: &str) -> IoResult<()>;\n}\n\n\/\/\/Log tool for printing to standard output.\npub struct StdOut;\n\nimpl Log for StdOut {\n\tfn note(&self, message: &str) -> IoResult<()> {\n\t\tprintln!(\"note: {}\", message);\n\t\tOk(())\n\t}\n\n\tfn warning(&self, message: &str) -> IoResult<()> {\n\t\tprintln!(\"warning: {}\", message);\n\t\tOk(())\n\t}\n\n\tfn error(&self, message: &str) -> IoResult<()> {\n\t\tprintln!(\"error: {}\", message);\n\t\tOk(())\n\t}\n}\n\n\/\/\/Log tool for printing to a file.\npub struct File {\n\tfile: Mutex<old_io::File>\n}\n\nimpl File {\n\tpub fn new(file: old_io::File) -> File {\n\t\tFile {\n\t\t\tfile: Mutex::new(file)\n\t\t}\n\t}\n}\n\nimpl Log for File {\n\tfn note(&self, message: &str) -> IoResult<()> {\n\t\tlet mut f = match self.file.lock() {\n\t\t\tOk(f) => f,\n\t\t\tErr(e) => e.into_inner()\n\t\t};\n\t\twrite!(f, \"note: {}\", message)\n\t}\n\n\tfn warning(&self, message: &str) -> IoResult<()> {\n\t\tlet mut f = match self.file.lock() {\n\t\t\tOk(f) => f,\n\t\t\tErr(e) => e.into_inner()\n\t\t};\n\t\twrite!(f, \"warning: {}\", message)\n\t}\n\n\tfn error(&self, message: &str) -> IoResult<()> {\n\t\tlet mut f = match self.file.lock() {\n\t\t\tOk(f) => f,\n\t\t\tErr(e) => e.into_inner()\n\t\t};\n\t\twrite!(f, \"error: {}\", message)\n\t}\n}\n\n#[cfg(test)]\nmod test {\n\tuse std::old_io::{self, TempDir};\n\tuse log;\n\tuse Server;\n\tuse Context;\n\tuse Response;\n\n\tfn handler(_c: Context, _w: Response) {}\n\n\t#[test]\n\tfn log_to_file() {\n\t\tlet dir = TempDir::new(\"log_to_file\").unwrap();\n\t\tlet file = old_io::File::create(&dir.path().join(\"test.log\")).unwrap();\n\t\tServer::new().handlers(handler).log(log::File::new(file)).build();\n\t}\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Make the namespace structure safe.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added type example<commit_after>fn main() {\n \/\/ Type annotated variable\n let _a_float: f64 = 1.0;\n\n \/\/ This variable is an `i32`\n let mut _an_integer = 5i32;\n\n \/\/ Error! The type of a variable can't be changed\n \/\/ an_integer = true;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix macos<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Upgraded to latest Gfx<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>(feat) Add a prelude module that is meant to be glob imported.<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate crypto;\nextern crate rustc_serialize;\n\nuse crypto::digest::Digest;\nuse crypto::hmac::Hmac;\nuse crypto::mac::Mac;\nuse rustc_serialize::base64::{\n self,\n CharacterSet,\n Newline,\n ToBase64,\n};\n\npub mod error;\npub mod header;\npub mod claims;\n\nconst BASE_CONFIG: base64::Config = base64::Config {\n char_set: CharacterSet::Standard,\n newline: Newline::LF,\n pad: false,\n line_length: None,\n};\n\nfn sign<D: Digest>(data: &str, key: &str, digest: D) -> String {\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n let mac = hmac.result();\n let code = mac.code();\n (*code).to_base64(BASE_CONFIG)\n}\n\n#[cfg(test)]\nmod tests {\n use sign;\n use crypto::sha2::Sha256;\n\n #[test]\n pub fn sign_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let real_sig = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, \"secret\", Sha256::new());\n\n assert_eq!(sig, real_sig);\n }\n}\n<commit_msg>Add token type<commit_after>extern crate crypto;\nextern crate rustc_serialize;\n\nuse crypto::digest::Digest;\nuse crypto::hmac::Hmac;\nuse crypto::mac::Mac;\nuse rustc_serialize::base64::{\n self,\n CharacterSet,\n Newline,\n ToBase64,\n};\nuse header::Header;\nuse claims::Claims;\n\npub mod error;\npub mod header;\npub mod claims;\n\npub struct Token {\n header: Header,\n claims: Claims,\n}\n\nconst BASE_CONFIG: base64::Config = base64::Config {\n char_set: CharacterSet::Standard,\n newline: Newline::LF,\n pad: false,\n line_length: None,\n};\n\nfn sign<D: Digest>(data: &str, key: &str, digest: D) -> String {\n let mut hmac = Hmac::new(digest, key.as_bytes());\n hmac.input(data.as_bytes());\n\n let mac = hmac.result();\n let code = mac.code();\n (*code).to_base64(BASE_CONFIG)\n}\n\n#[cfg(test)]\nmod tests {\n use sign;\n use crypto::sha2::Sha256;\n\n #[test]\n pub fn sign_data() {\n let header = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\";\n let claims = \"eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9\";\n let real_sig = \"TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ\";\n let data = format!(\"{}.{}\", header, claims);\n\n let sig = sign(&*data, \"secret\", Sha256::new());\n\n assert_eq!(sig, real_sig);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>(fix) Document type aliases.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Not actually sure if this will work but we might be able to re-export the various libraries we build so gossyp can be a library too<commit_after>extern crate serde_json;\nextern crate gossyp_base;\nextern crate gossyp_toolkit;\nextern crate gossyp_lang;\n\npub use gossyp_base::*;\npub use gossyp_toolkit::*;\npub use gossyp_lang::*;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add some prototypical objet relational mapping<commit_after>macro_rules! rdf{\n (\n $(\n class $name:ident = $id:expr {\n $(\n $prop_name:ident : $prop_type:ty = $prop_id:expr\n ),*\n }\n )*\n ) => (\n $(\n pub struct $name{\n pub id: String\n }\n impl $name{\n pub fn id() -> &'static str{ $id }\n $(\n pub fn $prop_name () -> Option<$prop_type>{\n None\n }\n )*\n }\n )*\n )\n}\n\nmacro_rules! rules{\n (\n {\n $($rule:tt)*\n }=>{\n $($implies:tt)*\n }\n ) => (\n\n )\n}\n\nrdf!{\n class Term = \"http:\/\/www.w3.org\/1999\/02\/22-rdf-syntax-ns#Term\" {\n name: BNode = \"...\"\n }\n class BNode = \"...\"{\n\n }\n class RdfTerm = \"...\"{\n\n }\n class Literal = \"...\"{\n\n }\n class RdfGraph = \"...\"{\n\n }\n class Symbol = \"...\"{\n\n }\n class Formula = \"...\"{\n statements: Symbol = \"...\"\n }\n}\n\nrules!{\n {\n Simon Wohnort Laufach .\n } => {\n |s, p, o| name\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove prepresent_command_buffers, and use command_buffers instead<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add comment processing in tokenizer and fix mistakes.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fixed parenthesis level in argparser<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Document Database::new<commit_after><|endoftext|>"} {"text":"<commit_before>use std::sync::Arc;\n\nuse {Wake, Tokens};\nuse stream::{Stream, StreamResult};\n\n\/\/\/ A stream which \"fuse\"s a stream once it's terminated.\n\/\/\/\n\/\/\/ Normally streams can behave unpredictably after they've terminated or\n\/\/\/ returned an error, but `Fuse` is always defined to return `None` from `poll`\n\/\/\/ after terination\/errors, and afterwards all calls to `schedule` will be\n\/\/\/ ignored.\npub struct Fuse<S> {\n stream: Option<S>,\n}\n\npub fn new<S: Stream>(s: S) -> Fuse<S> {\n Fuse { stream: Some(s) }\n}\n\nimpl<S: Stream> Stream for Fuse<S> {\n type Item = S::Item;\n type Error = S::Error;\n\n fn poll(&mut self, tokens: &Tokens) -> Option<StreamResult<S::Item, S::Error>> {\n if let Some(mut s) = self.stream.take() {\n let res = s.poll(tokens);\n match res {\n None => self.stream = Some(s),\n Some(Ok(Some(_))) => self.stream = Some(s),\n _ => {},\n }\n res\n } else {\n Some(Ok(None))\n }\n }\n\n fn schedule(&mut self, wake: Arc<Wake>) {\n if let Some(ref mut stream) = self.stream {\n stream.schedule(wake)\n }\n }\n}\n\nimpl<S> Fuse<S> {\n pub fn is_done(&self) -> bool {\n self.stream.is_none()\n }\n}\n<commit_msg>Don't end Stream::fuse on an error, only on Ok(None)<commit_after>use std::sync::Arc;\n\nuse {Wake, Tokens};\nuse stream::{Stream, StreamResult};\n\n\/\/\/ A stream which \"fuse\"s a stream once it's terminated.\n\/\/\/\n\/\/\/ Normally streams can behave unpredictably after they've terminated or\n\/\/\/ returned an error, but `Fuse` is always defined to return `None` from `poll`\n\/\/\/ after terination\/errors, and afterwards all calls to `schedule` will be\n\/\/\/ ignored.\npub struct Fuse<S> {\n stream: Option<S>,\n}\n\npub fn new<S: Stream>(s: S) -> Fuse<S> {\n Fuse { stream: Some(s) }\n}\n\nimpl<S: Stream> Stream for Fuse<S> {\n type Item = S::Item;\n type Error = S::Error;\n\n fn poll(&mut self, tokens: &Tokens) -> Option<StreamResult<S::Item, S::Error>> {\n let ret = self.stream.as_mut().and_then(|s| s.poll(tokens));\n if let Some(Ok(None)) = ret {\n self.stream = None;\n }\n return ret\n }\n\n fn schedule(&mut self, wake: Arc<Wake>) {\n if let Some(ref mut stream) = self.stream {\n stream.schedule(wake)\n }\n }\n}\n\nimpl<S> Fuse<S> {\n pub fn is_done(&self) -> bool {\n self.stream.is_none()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Resouce loader (empty) module<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test data for u8<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>getting travis to work<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nmacro_rules! m {\n ( $( any_token $field_rust_type )* ) => {}; \/\/~ ERROR missing fragment\n}\n\nfn main() {\n m!();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Explain the purpose of src\/buffered.rs.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for assert!() hygiene in edition 2021.<commit_after>\/\/ check-pass\n\/\/ edition:2021\n\n#![no_implicit_prelude]\n\nfn main() {\n assert!(true, \"hoi\");\n assert!(false, \"hoi {}\", 123);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>examples\/8puzzle: Programming Assignment: 8 Puzzle TODO: fails at test case 28<commit_after>\/\/ http:\/\/coursera.cs.princeton.edu\/algs4\/assignments\/8puzzle.html\nextern crate algs4;\n\nuse std::io::prelude::*;\nuse std::io;\nuse std::fmt;\nuse std::cmp::Ordering;\n\nuse algs4::priority_queues::MinPQ;\nuse algs4::priority_queues::binary_heaps::BinaryHeapMinPQ;\n\n#[derive(Debug, Clone, PartialEq, Eq)]\npub struct Board {\n blocks: Vec<Vec<usize>>,\n n: usize\n}\n\nimpl Board {\n pub fn new(blocks: Vec<Vec<usize>>) -> Board {\n assert!(blocks.len() == blocks[0].len());\n let len = blocks.len();\n Board { blocks: blocks, n: len }\n }\n\n pub fn dimension(&self) -> usize {\n return self.n\n }\n pub fn hamming(&self) -> usize {\n let mut num = 0;\n for i in 0 .. self.n {\n for j in 0 .. self.n {\n if self.blocks[i][j] != i * self.n + j + 1 && self.blocks[i][j] != 0 {\n num += 1;\n }\n }\n }\n num\n }\n\n pub fn manhattan(&self) -> usize {\n let mut distance = 0;\n for i in 0 .. self.n {\n for j in 0 .. self.n {\n \/\/let val = i * self.n + j + 1;\n \/\/ if val == 0 {\n \/\/ continue;\n \/\/ }\n let val = self.blocks[i][j];\n if val != i * self.n + j + 1 && val != 0 {\n \/\/ current block's val is not in position\n let actual_row = (val - 1) \/ self.n;\n let actual_col = (val - 1) % self.n;\n\n let dist = (actual_row as isize - i as isize).abs() + (actual_col as isize - j as isize).abs();\n distance += dist as usize;\n }\n }\n }\n distance\n }\n\n pub fn is_goal(&self) -> bool {\n self.hamming() == 0\n }\n\n fn position_of(&self, val: usize) -> (usize, usize) {\n for i in 0 .. self.n {\n for j in 0 .. self.n {\n if self.blocks[i][j] == val {\n return (i, j)\n }\n }\n }\n (9999, 9999)\n }\n\n pub fn neighbors(&self) -> Vec<Board> {\n let (row, col) = self.position_of(0);\n let mut positions = Vec::new();\n if row >= 1 {\n positions.push((row-1, col));\n }\n if row < self.n - 1{\n positions.push((row+1, col));\n }\n if col >= 1 {\n positions.push((row, col-1));\n }\n if col < self.n - 1{\n positions.push((row, col+1));\n }\n\n let mut ret = Vec::new();\n for (r, c) in positions {\n let mut b = self.clone();\n b.blocks[row][col] = b.blocks[r][c];\n b.blocks[r][c] = 0;\n ret.push(b);\n }\n ret\n }\n}\n\nimpl fmt::Display for Board {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n for row in self.blocks.iter() {\n try!(write!(f, \"|\"));\n for val in row.iter() {\n try!(write!(f, \"{:-3}\", val));\n }\n try!(write!(f, \" |\\n\"));\n }\n Ok(())\n }\n}\n\n\n\/\/ impl PartialOrd for Board {\n\/\/ fn partial_cmp(&self, other: &Board) -> Option<Ordering> {\n\/\/ self.manhattan().partial_cmp(&other.manhattan())\n\/\/ }\n\/\/ }\n\n\/\/ a search Node\n#[derive(PartialEq)]\npub struct Node {\n board: Board,\n moves: usize,\n operations: Vec<Board>\n}\n\nimpl Node {\n pub fn new(board: Board, moves: usize, operations: Vec<Board>) -> Node {\n Node {\n board: board,\n moves: moves,\n operations: operations\n }\n }\n\n #[inline]\n fn priority(&self) -> usize {\n self.board.manhattan() + self.moves\n }\n\n fn take(self) -> (Board, usize, Vec<Board>) {\n (self.board, self.moves, self.operations)\n }\n}\n\nimpl PartialOrd for Node {\n fn partial_cmp(&self, other: &Node) -> Option<Ordering> {\n self.priority().partial_cmp(&other.priority())\n }\n}\n\npub struct Solver {\n solvable: bool,\n solution: Vec<Board>,\n moves: usize,\n}\n\nimpl Solver {\n pub fn new(initial: Board) -> Solver {\n let mut pq: BinaryHeapMinPQ<Node> = BinaryHeapMinPQ::with_capacity(10240);\n\n pq.insert(Node::new(initial.clone(), 0, Vec::new()));\n\n let max_iteration = initial.dimension().pow(3);\n let mut visited = Vec::new();\n\n visited.push(initial.clone());\n while !pq.is_empty() {\n let (b, mut moves, mut operations) = pq.del_min().unwrap().take();\n if operations.contains(&b) {\n continue;\n }\n\n operations.push(b.clone());\n\n if b.is_goal() {\n return Solver {\n solvable: true,\n solution: operations,\n moves: moves\n }\n }\n\n moves += 1;\n \/\/println!(\"moves => {:2} size={}\", moves, visited.len());\n for neighbor in b.neighbors() {\n if !visited.contains(&neighbor) {\n visited.push(neighbor.clone());\n pq.insert(Node::new(neighbor, moves, operations.clone()));\n }\n }\n\n if moves > max_iteration {\n break;\n }\n\n }\n\n Solver {\n solvable: false,\n solution: Vec::new(),\n moves: 0\n }\n }\n}\n\n\nimpl fmt::Display for Solver {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n if self.solvable {\n try!(writeln!(f, \"Minimum number of moves = {}\", self.moves));\n for m in self.solution.iter() {\n try!(writeln!(f, \"{}\", m));\n }\n } else {\n try!(writeln!(f, \"No solution possible\"));\n }\n Ok(())\n }\n}\n\n\nfn main() {\n let mut lines = io::BufReader::new(io::stdin()).lines();\n let n = lines.next().unwrap().unwrap().parse().unwrap();\n\n let mut blks: Vec<Vec<usize>> = Vec::new();\n for _ in 0 .. n {\n let segs: Vec<usize> = lines.next().unwrap().unwrap().split(' ').\n filter(|s| !s.is_empty()).map(|n| n.parse().unwrap()).collect();\n blks.push(segs);\n }\n\n let b = Board::new(blks);\n let solver = Solver::new(b);\n println!(\"{}\", solver);\n\n}\n\n\n#[test]\nfn test_solver() {\n \/\/ let blks = vec![\n \/\/ vec![8, 1, 3],\n \/\/ vec![4, 0, 2],\n \/\/ vec![7, 6, 5],\n \/\/ ];\n let blks = vec![\n vec![0, 1, 3],\n vec![4, 2, 5],\n vec![7, 8, 6],\n ];\n \/\/ unsolveable\n \/\/ let blks = vec![\n \/\/ vec![1, 2, 3],\n \/\/ vec![4, 5, 6],\n \/\/ vec![8, 7, 0],\n \/\/ ];\n\n let b = Board::new(blks);\n println!(\"block:\\n{}\", b);\n println!(\"hamming => {}\", b.hamming());\n println!(\"manhattan => {}\", b.manhattan());\n println!(\"is goal => {}\", b.is_goal());\n\n for i in b.neighbors() {\n println!(\"neighbor:\\n{}\", i);\n }\n\n let solver = Solver::new(b);\n println!(\"{}\", solver);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Disable H2 ALPN.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Version using chennels. Needs cleanup.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Create a fibonacci code sample<commit_after>fn fib(n: i64) -> i64 {\n if n == 0 {\n 0\n } else if n == 1 {\n 1\n } else {\n fib(n - 1) + fib(n - 2)\n }\n}\n\nfn main() {\n let argument = 10;\n\n println!(\"fib({}) = {}\", argument, fib(argument));\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Memory profiling functions.\n\nuse libc::{c_char,c_int,c_void,size_t};\nuse std::borrow::ToOwned;\nuse std::ffi::CString;\nuse std::old_io::timer::sleep;\n#[cfg(target_os=\"linux\")]\nuse std::old_io::File;\nuse std::mem;\nuse std::mem::size_of;\n#[cfg(target_os=\"linux\")]\nuse std::env::page_size;\nuse std::ptr::null_mut;\nuse std::sync::mpsc::{Sender, channel, Receiver};\nuse std::time::duration::Duration;\nuse task::spawn_named;\n#[cfg(target_os=\"macos\")]\nuse task_info::task_basic_info::{virtual_size,resident_size};\n\npub struct MemoryProfilerChan(pub Sender<MemoryProfilerMsg>);\n\nimpl MemoryProfilerChan {\n pub fn send(&self, msg: MemoryProfilerMsg) {\n let MemoryProfilerChan(ref c) = *self;\n c.send(msg).unwrap();\n }\n}\n\npub enum MemoryProfilerMsg {\n \/\/\/ Message used to force print the memory profiling metrics.\n Print,\n \/\/\/ Tells the memory profiler to shut down.\n Exit,\n}\n\npub struct MemoryProfiler {\n pub port: Receiver<MemoryProfilerMsg>,\n}\n\nimpl MemoryProfiler {\n pub fn create(period: Option<f64>) -> MemoryProfilerChan {\n let (chan, port) = channel();\n match period {\n Some(period) => {\n let period = Duration::milliseconds((period * 1000f64) as i64);\n let chan = chan.clone();\n spawn_named(\"Memory profiler timer\".to_owned(), move || {\n loop {\n sleep(period);\n if chan.send(MemoryProfilerMsg::Print).is_err() {\n break;\n }\n }\n });\n \/\/ Spawn the memory profiler.\n spawn_named(\"Memory profiler\".to_owned(), move || {\n let memory_profiler = MemoryProfiler::new(port);\n memory_profiler.start();\n });\n }\n None => {\n \/\/ No-op to handle messages when the memory profiler is\n \/\/ inactive.\n spawn_named(\"Memory profiler\".to_owned(), move || {\n loop {\n match port.recv() {\n Err(_) | Ok(MemoryProfilerMsg::Exit) => break,\n _ => {}\n }\n }\n });\n }\n }\n\n MemoryProfilerChan(chan)\n }\n\n pub fn new(port: Receiver<MemoryProfilerMsg>) -> MemoryProfiler {\n MemoryProfiler {\n port: port\n }\n }\n\n pub fn start(&self) {\n loop {\n match self.port.recv() {\n Ok(msg) => {\n if !self.handle_msg(msg) {\n break\n }\n }\n _ => break\n }\n }\n }\n\n fn handle_msg(&self, msg: MemoryProfilerMsg) -> bool {\n match msg {\n MemoryProfilerMsg::Print => {\n self.handle_print_msg();\n true\n },\n MemoryProfilerMsg::Exit => false\n }\n }\n\n fn print_measurement(path: &str, nbytes: Option<u64>) {\n match nbytes {\n Some(nbytes) => {\n let mebi = 1024f64 * 1024f64;\n println!(\"{:24}: {:12.2}\", path, (nbytes as f64) \/ mebi);\n }\n None => {\n println!(\"{:24}: {:>12}\", path, \"???\");\n }\n }\n }\n\n fn handle_print_msg(&self) {\n println!(\"{:24}: {:12}\", \"_category_\", \"_size (MiB)_\");\n\n \/\/ Virtual and physical memory usage, as reported by the OS.\n MemoryProfiler::print_measurement(\"vsize\", get_vsize());\n MemoryProfiler::print_measurement(\"resident\", get_resident());\n\n \/\/ Total number of bytes allocated by the application on the system\n \/\/ heap.\n MemoryProfiler::print_measurement(\"system-heap-allocated\",\n get_system_heap_allocated());\n\n \/\/ The descriptions of the following jemalloc measurements are taken\n \/\/ directly from the jemalloc documentation.\n\n \/\/ \"Total number of bytes allocated by the application.\"\n MemoryProfiler::print_measurement(\"jemalloc-heap-allocated\",\n get_jemalloc_stat(\"stats.allocated\"));\n\n \/\/ \"Total number of bytes in active pages allocated by the application.\n \/\/ This is a multiple of the page size, and greater than or equal to\n \/\/ |stats.allocated|.\"\n MemoryProfiler::print_measurement(\"jemalloc-heap-active\",\n get_jemalloc_stat(\"stats.active\"));\n\n \/\/ \"Total number of bytes in chunks mapped on behalf of the application.\n \/\/ This is a multiple of the chunk size, and is at least as large as\n \/\/ |stats.active|. This does not include inactive chunks.\"\n MemoryProfiler::print_measurement(\"jemalloc-heap-mapped\",\n get_jemalloc_stat(\"stats.mapped\"));\n\n println!(\"\");\n }\n}\n\n#[cfg(target_os=\"linux\")]\nextern {\n fn mallinfo() -> struct_mallinfo;\n}\n\n#[cfg(target_os=\"linux\")]\n#[repr(C)]\npub struct struct_mallinfo {\n arena: c_int,\n ordblks: c_int,\n smblks: c_int,\n hblks: c_int,\n hblkhd: c_int,\n usmblks: c_int,\n fsmblks: c_int,\n uordblks: c_int,\n fordblks: c_int,\n keepcost: c_int,\n}\n\n#[cfg(target_os=\"linux\")]\nfn get_system_heap_allocated() -> Option<u64> {\n let mut info: struct_mallinfo;\n unsafe {\n info = mallinfo();\n }\n \/\/ The documentation in the glibc man page makes it sound like |uordblks|\n \/\/ would suffice, but that only gets the small allocations that are put in\n \/\/ the brk heap. We need |hblkhd| as well to get the larger allocations\n \/\/ that are mmapped.\n Some((info.hblkhd + info.uordblks) as u64)\n}\n\n#[cfg(not(target_os=\"linux\"))]\nfn get_system_heap_allocated() -> Option<u64> {\n None\n}\n\nextern {\n fn je_mallctl(name: *const c_char, oldp: *mut c_void, oldlenp: *mut size_t,\n newp: *mut c_void, newlen: size_t) -> c_int;\n}\n\nfn get_jemalloc_stat(value_name: &str) -> Option<u64> {\n \/\/ Before we request the measurement of interest, we first send an \"epoch\"\n \/\/ request. Without that jemalloc gives cached statistics(!) which can be\n \/\/ highly inaccurate.\n let epoch_name = \"epoch\";\n let epoch_c_name = CString::from_slice(epoch_name.as_bytes());\n let mut epoch: u64 = 0;\n let epoch_ptr = &mut epoch as *mut _ as *mut c_void;\n let mut epoch_len = size_of::<u64>() as size_t;\n\n let value_c_name = CString::from_slice(value_name.as_bytes());\n let mut value: size_t = 0;\n let value_ptr = &mut value as *mut _ as *mut c_void;\n let mut value_len = size_of::<size_t>() as size_t;\n\n let mut rv: c_int;\n unsafe {\n \/\/ Using the same values for the `old` and `new` parameters is enough\n \/\/ to get the statistics updated.\n rv = je_mallctl(epoch_c_name.as_ptr(), epoch_ptr, &mut epoch_len, epoch_ptr, epoch_len);\n if rv == 0 {\n rv = je_mallctl(value_c_name.as_ptr(), value_ptr, &mut value_len, null_mut(), 0);\n }\n }\n if rv == 0 { Some(value as u64) } else { None }\n}\n\n\/\/ Like std::macros::try!, but for Option<>.\nmacro_rules! option_try(\n ($e:expr) => (match $e { Some(e) => e, None => return None })\n);\n\n#[cfg(target_os=\"linux\")]\nfn get_proc_self_statm_field(field: uint) -> Option<u64> {\n let mut f = File::open(&Path::new(\"\/proc\/self\/statm\"));\n match f.read_to_string() {\n Ok(contents) => {\n let s = option_try!(contents.as_slice().words().nth(field));\n let npages: u64 = option_try!(s.parse().ok());\n Some(npages * (page_size() as u64))\n }\n Err(_) => None\n }\n}\n\n#[cfg(target_os=\"linux\")]\nfn get_vsize() -> Option<u64> {\n get_proc_self_statm_field(0)\n}\n\n#[cfg(target_os=\"linux\")]\nfn get_resident() -> Option<u64> {\n get_proc_self_statm_field(1)\n}\n\n#[cfg(target_os=\"macos\")]\nfn get_vsize() -> Option<u64> {\n virtual_size()\n}\n\n#[cfg(target_os=\"macos\")]\nfn get_resident() -> Option<u64> {\n resident_size()\n}\n\n#[cfg(not(any(target_os=\"linux\", target_os = \"macos\")))]\nfn get_vsize() -> Option<u64> {\n None\n}\n\n#[cfg(not(any(target_os=\"linux\", target_os = \"macos\")))]\nfn get_resident() -> Option<u64> {\n None\n}\n<commit_msg>auto merge of #4927 : Ms2ger\/servo\/mem, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Memory profiling functions.\n\nuse libc::{c_char,c_int,c_void,size_t};\nuse std::borrow::ToOwned;\nuse std::ffi::CString;\nuse std::old_io::timer::sleep;\n#[cfg(target_os=\"linux\")]\nuse std::old_io::File;\nuse std::mem::size_of;\n#[cfg(target_os=\"linux\")]\nuse std::env::page_size;\nuse std::ptr::null_mut;\nuse std::sync::mpsc::{Sender, channel, Receiver};\nuse std::time::duration::Duration;\nuse task::spawn_named;\n#[cfg(target_os=\"macos\")]\nuse task_info::task_basic_info::{virtual_size,resident_size};\n\npub struct MemoryProfilerChan(pub Sender<MemoryProfilerMsg>);\n\nimpl MemoryProfilerChan {\n pub fn send(&self, msg: MemoryProfilerMsg) {\n let MemoryProfilerChan(ref c) = *self;\n c.send(msg).unwrap();\n }\n}\n\npub enum MemoryProfilerMsg {\n \/\/\/ Message used to force print the memory profiling metrics.\n Print,\n \/\/\/ Tells the memory profiler to shut down.\n Exit,\n}\n\npub struct MemoryProfiler {\n pub port: Receiver<MemoryProfilerMsg>,\n}\n\nimpl MemoryProfiler {\n pub fn create(period: Option<f64>) -> MemoryProfilerChan {\n let (chan, port) = channel();\n match period {\n Some(period) => {\n let period = Duration::milliseconds((period * 1000f64) as i64);\n let chan = chan.clone();\n spawn_named(\"Memory profiler timer\".to_owned(), move || {\n loop {\n sleep(period);\n if chan.send(MemoryProfilerMsg::Print).is_err() {\n break;\n }\n }\n });\n \/\/ Spawn the memory profiler.\n spawn_named(\"Memory profiler\".to_owned(), move || {\n let memory_profiler = MemoryProfiler::new(port);\n memory_profiler.start();\n });\n }\n None => {\n \/\/ No-op to handle messages when the memory profiler is\n \/\/ inactive.\n spawn_named(\"Memory profiler\".to_owned(), move || {\n loop {\n match port.recv() {\n Err(_) | Ok(MemoryProfilerMsg::Exit) => break,\n _ => {}\n }\n }\n });\n }\n }\n\n MemoryProfilerChan(chan)\n }\n\n pub fn new(port: Receiver<MemoryProfilerMsg>) -> MemoryProfiler {\n MemoryProfiler {\n port: port\n }\n }\n\n pub fn start(&self) {\n loop {\n match self.port.recv() {\n Ok(msg) => {\n if !self.handle_msg(msg) {\n break\n }\n }\n _ => break\n }\n }\n }\n\n fn handle_msg(&self, msg: MemoryProfilerMsg) -> bool {\n match msg {\n MemoryProfilerMsg::Print => {\n self.handle_print_msg();\n true\n },\n MemoryProfilerMsg::Exit => false\n }\n }\n\n fn print_measurement(path: &str, nbytes: Option<u64>) {\n match nbytes {\n Some(nbytes) => {\n let mebi = 1024f64 * 1024f64;\n println!(\"{:24}: {:12.2}\", path, (nbytes as f64) \/ mebi);\n }\n None => {\n println!(\"{:24}: {:>12}\", path, \"???\");\n }\n }\n }\n\n fn handle_print_msg(&self) {\n println!(\"{:24}: {:12}\", \"_category_\", \"_size (MiB)_\");\n\n \/\/ Virtual and physical memory usage, as reported by the OS.\n MemoryProfiler::print_measurement(\"vsize\", get_vsize());\n MemoryProfiler::print_measurement(\"resident\", get_resident());\n\n \/\/ Total number of bytes allocated by the application on the system\n \/\/ heap.\n MemoryProfiler::print_measurement(\"system-heap-allocated\",\n get_system_heap_allocated());\n\n \/\/ The descriptions of the following jemalloc measurements are taken\n \/\/ directly from the jemalloc documentation.\n\n \/\/ \"Total number of bytes allocated by the application.\"\n MemoryProfiler::print_measurement(\"jemalloc-heap-allocated\",\n get_jemalloc_stat(\"stats.allocated\"));\n\n \/\/ \"Total number of bytes in active pages allocated by the application.\n \/\/ This is a multiple of the page size, and greater than or equal to\n \/\/ |stats.allocated|.\"\n MemoryProfiler::print_measurement(\"jemalloc-heap-active\",\n get_jemalloc_stat(\"stats.active\"));\n\n \/\/ \"Total number of bytes in chunks mapped on behalf of the application.\n \/\/ This is a multiple of the chunk size, and is at least as large as\n \/\/ |stats.active|. This does not include inactive chunks.\"\n MemoryProfiler::print_measurement(\"jemalloc-heap-mapped\",\n get_jemalloc_stat(\"stats.mapped\"));\n\n println!(\"\");\n }\n}\n\n#[cfg(target_os=\"linux\")]\nextern {\n fn mallinfo() -> struct_mallinfo;\n}\n\n#[cfg(target_os=\"linux\")]\n#[repr(C)]\npub struct struct_mallinfo {\n arena: c_int,\n ordblks: c_int,\n smblks: c_int,\n hblks: c_int,\n hblkhd: c_int,\n usmblks: c_int,\n fsmblks: c_int,\n uordblks: c_int,\n fordblks: c_int,\n keepcost: c_int,\n}\n\n#[cfg(target_os=\"linux\")]\nfn get_system_heap_allocated() -> Option<u64> {\n let mut info: struct_mallinfo;\n unsafe {\n info = mallinfo();\n }\n \/\/ The documentation in the glibc man page makes it sound like |uordblks|\n \/\/ would suffice, but that only gets the small allocations that are put in\n \/\/ the brk heap. We need |hblkhd| as well to get the larger allocations\n \/\/ that are mmapped.\n Some((info.hblkhd + info.uordblks) as u64)\n}\n\n#[cfg(not(target_os=\"linux\"))]\nfn get_system_heap_allocated() -> Option<u64> {\n None\n}\n\nextern {\n fn je_mallctl(name: *const c_char, oldp: *mut c_void, oldlenp: *mut size_t,\n newp: *mut c_void, newlen: size_t) -> c_int;\n}\n\nfn get_jemalloc_stat(value_name: &str) -> Option<u64> {\n \/\/ Before we request the measurement of interest, we first send an \"epoch\"\n \/\/ request. Without that jemalloc gives cached statistics(!) which can be\n \/\/ highly inaccurate.\n let epoch_name = \"epoch\";\n let epoch_c_name = CString::from_slice(epoch_name.as_bytes());\n let mut epoch: u64 = 0;\n let epoch_ptr = &mut epoch as *mut _ as *mut c_void;\n let mut epoch_len = size_of::<u64>() as size_t;\n\n let value_c_name = CString::from_slice(value_name.as_bytes());\n let mut value: size_t = 0;\n let value_ptr = &mut value as *mut _ as *mut c_void;\n let mut value_len = size_of::<size_t>() as size_t;\n\n \/\/ Using the same values for the `old` and `new` parameters is enough\n \/\/ to get the statistics updated.\n let rv = unsafe {\n je_mallctl(epoch_c_name.as_ptr(), epoch_ptr, &mut epoch_len, epoch_ptr,\n epoch_len)\n };\n if rv != 0 {\n return None;\n }\n\n let rv = unsafe {\n je_mallctl(value_c_name.as_ptr(), value_ptr, &mut value_len,\n null_mut(), 0)\n };\n if rv != 0 {\n return None;\n }\n\n Some(value as u64)\n}\n\n\/\/ Like std::macros::try!, but for Option<>.\nmacro_rules! option_try(\n ($e:expr) => (match $e { Some(e) => e, None => return None })\n);\n\n#[cfg(target_os=\"linux\")]\nfn get_proc_self_statm_field(field: uint) -> Option<u64> {\n let mut f = File::open(&Path::new(\"\/proc\/self\/statm\"));\n match f.read_to_string() {\n Ok(contents) => {\n let s = option_try!(contents.as_slice().words().nth(field));\n let npages: u64 = option_try!(s.parse().ok());\n Some(npages * (page_size() as u64))\n }\n Err(_) => None\n }\n}\n\n#[cfg(target_os=\"linux\")]\nfn get_vsize() -> Option<u64> {\n get_proc_self_statm_field(0)\n}\n\n#[cfg(target_os=\"linux\")]\nfn get_resident() -> Option<u64> {\n get_proc_self_statm_field(1)\n}\n\n#[cfg(target_os=\"macos\")]\nfn get_vsize() -> Option<u64> {\n virtual_size()\n}\n\n#[cfg(target_os=\"macos\")]\nfn get_resident() -> Option<u64> {\n resident_size()\n}\n\n#[cfg(not(any(target_os=\"linux\", target_os = \"macos\")))]\nfn get_vsize() -> Option<u64> {\n None\n}\n\n#[cfg(not(any(target_os=\"linux\", target_os = \"macos\")))]\nfn get_resident() -> Option<u64> {\n None\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>cmd: Include exit code\/reason on failure<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Only create directory if it does not exist<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: PathIterBuilder should not yield directories<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix types<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implement io::native::stdio<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse libc;\nuse option::Option;\nuse rt::io::{Reader, Writer};\nuse super::file;\n\n\/\/\/ Creates a new handle to the stdin of this process\npub fn stdin() -> StdIn { StdIn::new() }\n\/\/\/ Creates a new handle to the stdout of this process\npub fn stdout() -> StdOut { StdOut::new(libc::STDOUT_FILENO) }\n\/\/\/ Creates a new handle to the stderr of this process\npub fn stderr() -> StdOut { StdOut::new(libc::STDERR_FILENO) }\n\npub fn print(s: &str) {\n stdout().write(s.as_bytes())\n}\n\npub fn println(s: &str) {\n let mut out = stdout();\n out.write(s.as_bytes());\n out.write(['\\n' as u8]);\n}\n\npub struct StdIn {\n priv fd: file::FileDesc\n}\n\nimpl StdIn {\n \/\/\/ Duplicates the stdin file descriptor, returning an io::Reader\n #[fixed_stack_segment] #[inline(never)]\n pub fn new() -> StdIn {\n let fd = unsafe { libc::dup(libc::STDIN_FILENO) };\n StdIn { fd: file::FileDesc::new(fd) }\n }\n}\n\nimpl Reader for StdIn {\n fn read(&mut self, buf: &mut [u8]) -> Option<uint> { self.fd.read(buf) }\n fn eof(&mut self) -> bool { self.fd.eof() }\n}\n\npub struct StdOut {\n priv fd: file::FileDesc\n}\n\nimpl StdOut {\n \/\/\/ Duplicates the specified file descriptor, returning an io::Writer\n #[fixed_stack_segment] #[inline(never)]\n pub fn new(fd: file::fd_t) -> StdOut {\n let fd = unsafe { libc::dup(fd) };\n StdOut { fd: file::FileDesc::new(fd) }\n }\n}\n\nimpl Writer for StdOut {\n fn write(&mut self, buf: &[u8]) { self.fd.write(buf) }\n fn flush(&mut self) { self.fd.flush() }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\n\/\/ Error messages for EXXXX errors.\n\/\/ Each message should start and end with a new line, and be wrapped to 80 characters.\n\/\/ In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable.\nregister_long_diagnostics! {\n\nE0178: r##\"\nIn types, the `+` type operator has low precedence, so it is often necessary\nto use parentheses.\n\nFor example:\n\n```compile_fail,E0178\ntrait Foo {}\n\nstruct Bar<'a> {\n w: &'a Foo + Copy, \/\/ error, use &'a (Foo + Copy)\n x: &'a Foo + 'a, \/\/ error, use &'a (Foo + 'a)\n y: &'a mut Foo + 'a, \/\/ error, use &'a mut (Foo + 'a)\n z: fn() -> Foo + 'a, \/\/ error, use fn() -> (Foo + 'a)\n}\n```\n\nMore details can be found in [RFC 438].\n\n[RFC 438]: https:\/\/github.com\/rust-lang\/rfcs\/pull\/438\n\"##,\n\nE0536: r##\"\nThe `not` cfg-predicate was malformed.\n\nErroneous code example:\n\n```compile_fail,E0536\n#[cfg(not())] \/\/ error: expected 1 cfg-pattern\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `not` predicate expects one cfg-pattern. Example:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0537: r##\"\nAn unknown predicate was used inside the `cfg` attribute.\n\nErroneous code example:\n\n```compile_fail,E0537\n#[cfg(unknown())] \/\/ error: invalid predicate `unknown`\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `cfg` attribute supports only three kinds of predicates:\n\n * any\n * all\n * not\n\nExample:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0541: r##\"\nAn unknown meta item was used.\n\nErroneous code example:\n\n```compile_fail,E0541\n#[deprecated(\n since=\"1.0.0\",\n \/\/ error: unknown meta item\n reason=\"Example invalid meta item. Should be 'note'\")\n]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. The keys provided\nmust be one of the valid keys for the specified attribute.\n\nTo fix the problem, either remove the unknown meta item, or rename it if you\nprovided the wrong name.\n\nIn the erroneous code example above, the wrong name was provided, so changing\nto a correct one it will fix the error. Example:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"This is a valid meta item for the deprecated attribute.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0552: r##\"\nA unrecognized representation attribute was used.\n\nErroneous code example:\n\n```compile_fail,E0552\n#[repr(D)] \/\/ error: unrecognized representation hint\nstruct MyStruct {\n my_field: usize\n}\n```\n\nYou can use a `repr` attribute to tell the compiler how you want a struct or\nenum to be laid out in memory.\n\nMake sure you're using one of the supported options:\n\n```\n#[repr(C)] \/\/ ok!\nstruct MyStruct {\n my_field: usize\n}\n```\n\nFor more information about specifying representations, see the [\"Alternative\nRepresentations\" section] of the Rustonomicon.\n\n[\"Alternative Representations\" section]: https:\/\/doc.rust-lang.org\/nomicon\/other-reprs.html\n\"##,\n\nE0554: r##\"\nFeature attributes are only allowed on the nightly release channel. Stable or\nbeta compilers will not comply.\n\nExample of erroneous code (on a stable compiler):\n\n```ignore (depends on release channel)\n#![feature(non_ascii_idents)] \/\/ error: #![feature] may not be used on the\n \/\/ stable release channel\n```\n\nIf you need the feature, make sure to use a nightly release of the compiler\n(but be warned that the feature may be removed or altered in the future).\n\"##,\n\nE0557: r##\"\nA feature attribute named a feature that has been removed.\n\nErroneous code example:\n\n```compile_fail,E0557\n#![feature(managed_boxes)] \/\/ error: feature has been removed\n```\n\nDelete the offending feature attribute.\n\"##,\n\nE0565: r##\"\nA literal was used in an attribute that doesn't support literals.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#![feature(attr_literals)]\n\n#[inline(\"always\")] \/\/ error: unsupported literal\npub fn something() {}\n```\n\nLiterals in attributes are new and largely unsupported. Work to support literals\nwhere appropriate is ongoing. Try using an unquoted name instead:\n\n```\n#[inline(always)]\npub fn something() {}\n```\n\"##,\n\nE0583: r##\"\nA file wasn't found for an out-of-line module.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\nmod file_that_doesnt_exist; \/\/ error: file not found for module\n\nfn main() {}\n```\n\nPlease be sure that a file corresponding to the module exists. If you\nwant to use a module named `file_that_doesnt_exist`, you need to have a file\nnamed `file_that_doesnt_exist.rs` or `file_that_doesnt_exist\/mod.rs` in the\nsame directory.\n\"##,\n\nE0585: r##\"\nA documentation comment that doesn't document anything was found.\n\nErroneous code example:\n\n```compile_fail,E0585\nfn main() {\n \/\/ The following doc comment will fail:\n \/\/\/ This is a useless doc comment!\n}\n```\n\nDocumentation comments need to be followed by items, including functions,\ntypes, modules, etc. Examples:\n\n```\n\/\/\/ I'm documenting the following struct:\nstruct Foo;\n\n\/\/\/ I'm documenting the following function:\nfn foo() {}\n```\n\"##,\n\nE0586: r##\"\nAn inclusive range was used with no end.\n\nErroneous code example:\n\n```compile_fail,E0586\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=]; \/\/ error: inclusive range was used with no end\n}\n```\n\nAn inclusive range needs an end in order to *include* it. If you just need a\nstart and no end, use a non-inclusive range (with `..`):\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..]; \/\/ ok!\n}\n```\n\nOr put an end to your inclusive range:\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=3]; \/\/ ok!\n}\n```\n\"##,\n\nE0589: r##\"\nThe value of `N` that was specified for `repr(align(N))` was not a power\nof two, or was greater than 2^29.\n\n```compile_fail,E0589\n#[repr(align(15))] \/\/ error: invalid `repr(align)` attribute: not a power of two\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0658: r##\"\nAn unstable feature was used.\n\nErroneous code example:\n\n```compile_fail,E658\n#[repr(u128)] \/\/ error: use of unstable library feature 'repr128'\nenum Foo {\n Bar(u64),\n}\n```\n\nIf you're using a stable or a beta version of rustc, you won't be able to use\nany unstable features. In order to do so, please switch to a nightly version of\nrustc (by using rustup).\n\nIf you're using a nightly version of rustc, just add the corresponding feature\nto be able to use it:\n\n```\n#![feature(repr128)]\n\n#[repr(u128)] \/\/ ok!\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0633: r##\"\nThe `unwind` attribute was malformed.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#[unwind()] \/\/ error: expected one argument\npub extern fn something() {}\n\nfn main() {}\n```\n\nThe `#[unwind]` attribute should be used as follows:\n\n- `#[unwind(aborts)]` -- specifies that if a non-Rust ABI function\n should abort the process if it attempts to unwind. This is the safer\n and preferred option.\n\n- `#[unwind(allowed)]` -- specifies that a non-Rust ABI function\n should be allowed to unwind. This can easily result in Undefined\n Behavior (UB), so be careful.\n\nNB. The default behavior here is \"allowed\", but this is unspecified\nand likely to change in the future.\n\n\"##,\n\n}\n\nregister_diagnostics! {\n E0538, \/\/ multiple [same] items\n E0539, \/\/ incorrect meta item\n E0540, \/\/ multiple rustc_deprecated attributes\n E0542, \/\/ missing 'since'\n E0543, \/\/ missing 'reason'\n E0544, \/\/ multiple stability levels\n E0545, \/\/ incorrect 'issue'\n E0546, \/\/ missing 'feature'\n E0547, \/\/ missing 'issue'\n E0548, \/\/ incorrect stability attribute type\n E0549, \/\/ rustc_deprecated attribute must be paired with either stable or unstable attribute\n E0550, \/\/ multiple deprecated attributes\n E0551, \/\/ incorrect meta item\n E0553, \/\/ multiple rustc_const_unstable attributes\n E0555, \/\/ malformed feature attribute, expected #![feature(...)]\n E0556, \/\/ malformed feature, expected just one word\n E0584, \/\/ file for module `..` found at both .. and ..\n E0629, \/\/ missing 'feature' (rustc_const_unstable)\n E0630, \/\/ rustc_const_unstable attribute must be paired with stable\/unstable attribute\n E0693, \/\/ incorrect `repr(align)` attribute format\n E0694, \/\/ an unknown tool name found in scoped attributes\n}\n<commit_msg>Long diagnostic for E0538<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\n\/\/ Error messages for EXXXX errors.\n\/\/ Each message should start and end with a new line, and be wrapped to 80 characters.\n\/\/ In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable.\nregister_long_diagnostics! {\n\nE0178: r##\"\nIn types, the `+` type operator has low precedence, so it is often necessary\nto use parentheses.\n\nFor example:\n\n```compile_fail,E0178\ntrait Foo {}\n\nstruct Bar<'a> {\n w: &'a Foo + Copy, \/\/ error, use &'a (Foo + Copy)\n x: &'a Foo + 'a, \/\/ error, use &'a (Foo + 'a)\n y: &'a mut Foo + 'a, \/\/ error, use &'a mut (Foo + 'a)\n z: fn() -> Foo + 'a, \/\/ error, use fn() -> (Foo + 'a)\n}\n```\n\nMore details can be found in [RFC 438].\n\n[RFC 438]: https:\/\/github.com\/rust-lang\/rfcs\/pull\/438\n\"##,\n\nE0536: r##\"\nThe `not` cfg-predicate was malformed.\n\nErroneous code example:\n\n```compile_fail,E0536\n#[cfg(not())] \/\/ error: expected 1 cfg-pattern\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `not` predicate expects one cfg-pattern. Example:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0537: r##\"\nAn unknown predicate was used inside the `cfg` attribute.\n\nErroneous code example:\n\n```compile_fail,E0537\n#[cfg(unknown())] \/\/ error: invalid predicate `unknown`\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `cfg` attribute supports only three kinds of predicates:\n\n * any\n * all\n * not\n\nExample:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0538: r##\"\nAttribute contains multiple of the same meta item.\n\nErroneous code example:\n\n```compile_fail,E0538\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\",\n note=\"Second deprecation note.\" \/\/ error: multiple same meta item\n)]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. Each key may only be\nused once in each attribute.\n\nTo fix the problem, remove all but one of the meta items with the same key.\n\nExample:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0541: r##\"\nAn unknown meta item was used.\n\nErroneous code example:\n\n```compile_fail,E0541\n#[deprecated(\n since=\"1.0.0\",\n \/\/ error: unknown meta item\n reason=\"Example invalid meta item. Should be 'note'\")\n]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. The keys provided\nmust be one of the valid keys for the specified attribute.\n\nTo fix the problem, either remove the unknown meta item, or rename it if you\nprovided the wrong name.\n\nIn the erroneous code example above, the wrong name was provided, so changing\nto a correct one it will fix the error. Example:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"This is a valid meta item for the deprecated attribute.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0552: r##\"\nA unrecognized representation attribute was used.\n\nErroneous code example:\n\n```compile_fail,E0552\n#[repr(D)] \/\/ error: unrecognized representation hint\nstruct MyStruct {\n my_field: usize\n}\n```\n\nYou can use a `repr` attribute to tell the compiler how you want a struct or\nenum to be laid out in memory.\n\nMake sure you're using one of the supported options:\n\n```\n#[repr(C)] \/\/ ok!\nstruct MyStruct {\n my_field: usize\n}\n```\n\nFor more information about specifying representations, see the [\"Alternative\nRepresentations\" section] of the Rustonomicon.\n\n[\"Alternative Representations\" section]: https:\/\/doc.rust-lang.org\/nomicon\/other-reprs.html\n\"##,\n\nE0554: r##\"\nFeature attributes are only allowed on the nightly release channel. Stable or\nbeta compilers will not comply.\n\nExample of erroneous code (on a stable compiler):\n\n```ignore (depends on release channel)\n#![feature(non_ascii_idents)] \/\/ error: #![feature] may not be used on the\n \/\/ stable release channel\n```\n\nIf you need the feature, make sure to use a nightly release of the compiler\n(but be warned that the feature may be removed or altered in the future).\n\"##,\n\nE0557: r##\"\nA feature attribute named a feature that has been removed.\n\nErroneous code example:\n\n```compile_fail,E0557\n#![feature(managed_boxes)] \/\/ error: feature has been removed\n```\n\nDelete the offending feature attribute.\n\"##,\n\nE0565: r##\"\nA literal was used in an attribute that doesn't support literals.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#![feature(attr_literals)]\n\n#[inline(\"always\")] \/\/ error: unsupported literal\npub fn something() {}\n```\n\nLiterals in attributes are new and largely unsupported. Work to support literals\nwhere appropriate is ongoing. Try using an unquoted name instead:\n\n```\n#[inline(always)]\npub fn something() {}\n```\n\"##,\n\nE0583: r##\"\nA file wasn't found for an out-of-line module.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\nmod file_that_doesnt_exist; \/\/ error: file not found for module\n\nfn main() {}\n```\n\nPlease be sure that a file corresponding to the module exists. If you\nwant to use a module named `file_that_doesnt_exist`, you need to have a file\nnamed `file_that_doesnt_exist.rs` or `file_that_doesnt_exist\/mod.rs` in the\nsame directory.\n\"##,\n\nE0585: r##\"\nA documentation comment that doesn't document anything was found.\n\nErroneous code example:\n\n```compile_fail,E0585\nfn main() {\n \/\/ The following doc comment will fail:\n \/\/\/ This is a useless doc comment!\n}\n```\n\nDocumentation comments need to be followed by items, including functions,\ntypes, modules, etc. Examples:\n\n```\n\/\/\/ I'm documenting the following struct:\nstruct Foo;\n\n\/\/\/ I'm documenting the following function:\nfn foo() {}\n```\n\"##,\n\nE0586: r##\"\nAn inclusive range was used with no end.\n\nErroneous code example:\n\n```compile_fail,E0586\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=]; \/\/ error: inclusive range was used with no end\n}\n```\n\nAn inclusive range needs an end in order to *include* it. If you just need a\nstart and no end, use a non-inclusive range (with `..`):\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..]; \/\/ ok!\n}\n```\n\nOr put an end to your inclusive range:\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=3]; \/\/ ok!\n}\n```\n\"##,\n\nE0589: r##\"\nThe value of `N` that was specified for `repr(align(N))` was not a power\nof two, or was greater than 2^29.\n\n```compile_fail,E0589\n#[repr(align(15))] \/\/ error: invalid `repr(align)` attribute: not a power of two\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0658: r##\"\nAn unstable feature was used.\n\nErroneous code example:\n\n```compile_fail,E658\n#[repr(u128)] \/\/ error: use of unstable library feature 'repr128'\nenum Foo {\n Bar(u64),\n}\n```\n\nIf you're using a stable or a beta version of rustc, you won't be able to use\nany unstable features. In order to do so, please switch to a nightly version of\nrustc (by using rustup).\n\nIf you're using a nightly version of rustc, just add the corresponding feature\nto be able to use it:\n\n```\n#![feature(repr128)]\n\n#[repr(u128)] \/\/ ok!\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0633: r##\"\nThe `unwind` attribute was malformed.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#[unwind()] \/\/ error: expected one argument\npub extern fn something() {}\n\nfn main() {}\n```\n\nThe `#[unwind]` attribute should be used as follows:\n\n- `#[unwind(aborts)]` -- specifies that if a non-Rust ABI function\n should abort the process if it attempts to unwind. This is the safer\n and preferred option.\n\n- `#[unwind(allowed)]` -- specifies that a non-Rust ABI function\n should be allowed to unwind. This can easily result in Undefined\n Behavior (UB), so be careful.\n\nNB. The default behavior here is \"allowed\", but this is unspecified\nand likely to change in the future.\n\n\"##,\n\n}\n\nregister_diagnostics! {\n E0539, \/\/ incorrect meta item\n E0540, \/\/ multiple rustc_deprecated attributes\n E0542, \/\/ missing 'since'\n E0543, \/\/ missing 'reason'\n E0544, \/\/ multiple stability levels\n E0545, \/\/ incorrect 'issue'\n E0546, \/\/ missing 'feature'\n E0547, \/\/ missing 'issue'\n E0548, \/\/ incorrect stability attribute type\n E0549, \/\/ rustc_deprecated attribute must be paired with either stable or unstable attribute\n E0550, \/\/ multiple deprecated attributes\n E0551, \/\/ incorrect meta item\n E0553, \/\/ multiple rustc_const_unstable attributes\n E0555, \/\/ malformed feature attribute, expected #![feature(...)]\n E0556, \/\/ malformed feature, expected just one word\n E0584, \/\/ file for module `..` found at both .. and ..\n E0629, \/\/ missing 'feature' (rustc_const_unstable)\n E0630, \/\/ rustc_const_unstable attribute must be paired with stable\/unstable attribute\n E0693, \/\/ incorrect `repr(align)` attribute format\n E0694, \/\/ an unknown tool name found in scoped attributes\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a regression test for ICE #33287<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nconst A: [u32; 1] = [0];\n\nfn test() {\n let range = A[1]..;\n}\n\nfn main() { }\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add regression test<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for #49685: drop elaboration was not revealing the\n\/\/ value of `impl Trait` returns, leading to an ICE.\n\nfn main() {\n let _ = Some(())\n .into_iter()\n .flat_map(|_| Some(()).into_iter().flat_map(func));\n}\n\nfn func(_: ()) -> impl Iterator<Item = ()> {\n Some(()).into_iter().flat_map(|_| vec![])\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Document -Z extra-link-arg.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rename `error_type` to `kind` in `protocol::QueryError`<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! An implementation of Google Protobuf's Variable-Length Integers\n\n#![cfg_attr(feature = \"nightly\", feature(test))]\n\nextern crate bit_utils;\n\nmod zigzag;\n\npub use zigzag::ZigZag;\n\n#[cfg(feature = \"io-operations\")]\nextern crate io_operations;\n\n#[cfg(feature = \"io-operations\")]\nmod iooperations;\n\n\/\/#[cfg(feature = \"io-operations\")]\n\/\/pub use io-operations;\n\n\/\/\/ The maximum number of bytes used by a 32-bit Varint\npub const VARINT_32_MAX_BYTES: usize = 5;\n\n\/\/\/ The maximum number of bytes used by a 32-bit Varint\npub const VARINT_64_MAX_BYTES: usize = 10;\n<commit_msg>Publically export VarintReader and VarintWriter<commit_after>\/\/! An implementation of Google Protobuf's Variable-Length Integers\n\n#![cfg_attr(feature = \"nightly\", feature(test))]\n\nextern crate bit_utils;\n\nmod zigzag;\n\npub use zigzag::ZigZag;\n\n#[cfg(feature = \"io-operations\")]\nextern crate io_operations;\n\n#[cfg(feature = \"io-operations\")]\nmod iooperations;\n\n#[cfg(feature = \"io-operations\")]\npub use iooperations::VarintReader as VarintReader;\n#[cfg(feature = \"io-operations\")]\npub use iooperations::VarintWriter as VarintWriter;\n\n\/\/\/ The maximum number of bytes used by a 32-bit Varint\npub const VARINT_32_MAX_BYTES: usize = 5;\n\n\/\/\/ The maximum number of bytes used by a 32-bit Varint\npub const VARINT_64_MAX_BYTES: usize = 10;\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#[deny(missing_docs)]\n\nextern crate gfx;\nextern crate gfx_device_gl;\nextern crate glfw;\n\nuse gfx::tex::Size;\n\n\/\/\/ A wrapper around the window that implements `Output`.\npub struct Wrap<R: gfx::Resources> {\n \/\/\/ Glutin window in the open.\n pub window: glfw::Window,\n frame: gfx::FrameBufferHandle<R>,\n mask: gfx::Mask,\n gamma: gfx::Gamma,\n}\n\nimpl<R: gfx::Resources> gfx::Output<R> for Wrap<R> {\n fn get_handle(&self) -> Option<&gfx::FrameBufferHandle<R>> {\n Some(&self.frame)\n }\n\n fn get_size(&self) -> (Size, Size) {\n let (w, h) = self.window.get_framebuffer_size();\n (w as Size, h as Size)\n }\n\n fn get_mask(&self) -> gfx::Mask {\n self.mask\n }\n\n fn get_gamma(&self) -> gfx::Gamma {\n self.gamma\n }\n}\n\n\n\/\/\/ Result of successful context initialization.\npub type Success = (\n Wrap<gfx_device_gl::Resources>,\n gfx_device_gl::Device,\n gfx_device_gl::Factory,\n);\n\n\n\/\/\/ Initialize with a window.\npub fn init(mut window: glfw::Window) -> Success {\n use gflw::Context;\n window.make_current();\n let (device, factory) = gfx_device_gl::create(|s| window.get_proc_address(s));\n let wrap = Wrap {\n window: window,\n frame: factory.get_main_frame_buffer(),\n mask: gfx::COLOR | gfx::DEPTH | gfx::STENCIL, \/\/TODO\n gamma: gfx::Gamma::Original, \/\/TODO\n };\n (wrap, device, factory)\n}\n<commit_msg>Implemented Window<commit_after>\/\/ Copyright 2015 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#[deny(missing_docs)]\n\nextern crate gfx;\nextern crate gfx_device_gl;\nextern crate glfw;\n\nuse gfx::tex::Size;\nuse glfw::Context;\n\n\/\/\/ A wrapper around the window that implements `Output`.\npub struct Wrap<R: gfx::Resources> {\n \/\/\/ Glutin window in the open.\n pub window: glfw::Window,\n frame: gfx::FrameBufferHandle<R>,\n mask: gfx::Mask,\n gamma: gfx::Gamma,\n}\n\nimpl<R: gfx::Resources> gfx::Output<R> for Wrap<R> {\n fn get_handle(&self) -> Option<&gfx::FrameBufferHandle<R>> {\n Some(&self.frame)\n }\n\n fn get_size(&self) -> (Size, Size) {\n let (w, h) = self.window.get_framebuffer_size();\n (w as Size, h as Size)\n }\n\n fn get_mask(&self) -> gfx::Mask {\n self.mask\n }\n\n fn get_gamma(&self) -> gfx::Gamma {\n self.gamma\n }\n}\n\nimpl<R: gfx::Resources> gfx::Window<R> for Wrap<R> {\n fn swap_buffers(&mut self) {\n self.window.swap_buffers();\n }\n}\n\n\n\/\/\/ Result of successful context initialization.\npub type Success = (\n Wrap<gfx_device_gl::Resources>,\n gfx_device_gl::Device,\n gfx_device_gl::Factory,\n);\n\n\n\/\/\/ Initialize with a window.\npub fn init(mut window: glfw::Window) -> Success {\n window.make_current();\n let (device, factory) = gfx_device_gl::create(|s| window.get_proc_address(s));\n let wrap = Wrap {\n window: window,\n frame: factory.get_main_frame_buffer(),\n mask: gfx::COLOR | gfx::DEPTH | gfx::STENCIL, \/\/TODO\n gamma: gfx::Gamma::Original, \/\/TODO\n };\n (wrap, device, factory)\n}\n<|endoftext|>"} {"text":"<commit_before>use core::prelude::*;\n\nuse aspace::mapflag;\nuse con;\nuse con::write;\nuse cpu;\nuse process;\nuse process::Handle;\nuse process::Process;\nuse start32::kernel_base;\nuse util::abort;\n\nstatic log_syscall : bool = false;\nstatic log_transfer_message : bool = false;\nstatic log_portio : bool = false;\nstatic log_hmod : bool = false;\n\nstatic log_recv : bool = false;\n\npub mod nr {\n #![allow(dead_code)]\n pub static RECV : uint = 0;\n pub static MAP : uint = 1;\n pub static PFAULT : uint = 2;\n pub static UNMAP : uint = 3;\n pub static HMOD : uint = 4;\n pub static NEWPROC : uint = 5;\n pub static WRITE : uint = 6;\n pub static PORTIO : uint = 7;\n pub static GRANT : uint = 8;\n pub static PULSE : uint = 9;\n\n pub static USER : uint = 16;\n\n pub static MSG_MASK : uint = 0xff;\n pub static MSG_KIND_MASK : uint = 0x300;\n pub static MSG_KIND_SEND : uint = 0x000;\n pub static MSG_KIND_CALL : uint = 0x100;\n}\n\n\/\/ Note: tail-called from the syscall code, \"return\" by switching to a process.\n#[no_mangle]\npub fn syscall(\n arg0: uint,\n arg1: uint,\n arg2: uint,\n arg3: uint,\n arg4: uint,\n arg5: uint,\n nr : uint, \/\/ saved_rax\n) -> ! {\n use syscall::nr::*;\n\n let p = unsafe { cpu().get_process() };\n \/\/ FIXME cpu.leave_proc?\n p.unset(process::Running);\n p.set(process::FastRet);\n\n if log_syscall {\n write(\"syscall! nr=\");\n con::writeUInt(nr);\n write(\" from process \");\n con::writeMutPtr(p);\n con::newline();\n }\n\n match nr {\n RECV => ipc_recv(p, arg0),\n MAP => syscall_map(p, arg0, arg1, arg2, arg3, arg4),\n PFAULT => syscall_pfault(p, arg1, arg2), \/\/ arg0 is always 0\n HMOD => syscall_hmod(p, arg0, arg1, arg2),\n PORTIO => syscall_portio(p, arg0, arg1, arg2),\n WRITE => {\n con::putc(arg0 as u8 as char);\n cpu().syscall_return(p, 0);\n },\n _ if nr >= USER => {\n match nr & MSG_KIND_MASK {\n MSG_KIND_CALL => ipc_call(p, nr & MSG_MASK, arg0, arg1, arg2, arg3, arg4, arg5),\n MSG_KIND_SEND => ipc_send(p, nr & MSG_MASK, arg0, arg1, arg2, arg3, arg4, arg5),\n _ => abort(\"Unknown IPC kind\")\n }\n },\n _ => abort(\"Unhandled syscall\"),\n }\n\n if p.is_runnable() {\n abort(\"process not blocked at return\");\n }\n\n unsafe { cpu().run(); }\n}\n\nfn ipc_call(p : &mut Process, msg : uint, to : uint, arg1: uint, arg2: uint,\n arg3: uint, arg4: uint, arg5: uint) {\n write(\"ipc_call to \");\n con::writeUInt(to);\n con::newline();\n\n let handle = p.find_handle(to);\n match handle {\n Some(h) => {\n write(\"==> process \");\n con::writeMutPtr(h.process());\n con::newline();\n p.set(process::InSend);\n p.set(process::InRecv);\n p.regs.rdi = to;\n send_or_block(h, msg, arg1, arg2, arg3, arg4, arg5);\n },\n None => abort(\"ipc_call: no recipient\")\n }\n write(\"ipc_call: blocked\\n\");\n}\n\nfn transfer_set_handle(target: &mut Process, source: &mut Process) {\n let mut rcpt = target.regs.rdi;\n let from = source.regs.rdi;\n\n let h = source.find_handle(from).unwrap();\n if rcpt == 0 {\n rcpt = h.other().unwrap().id();\n } else if !target.find_handle(rcpt).is_some() {\n match h.other() {\n \/\/ Already associated, \"junk\" the fresh handle and just update the\n \/\/ recipient-side handle.\n Some(g) => rcpt = g.id(),\n \/\/ Associate handles now.\n None => {\n let g = target.new_handle(rcpt, source);\n g.associate(h);\n },\n }\n } else {\n \/\/ TODO Assert that rcpt <-> from. (But the caller is responsible for\n \/\/ checking that first.)\n }\n target.regs.rdi = rcpt;\n}\n\nfn transfer_message(target: &mut Process, source: &mut Process) -> ! {\n transfer_set_handle(target, source);\n\n if log_transfer_message {\n write(\"transfer_message \");\n con::writeMutPtr(target);\n write(\" <- \");\n con::writeMutPtr(source);\n con::newline();\n }\n\n target.regs.rax = source.regs.rax;\n target.regs.rdi = source.regs.rdi;\n target.regs.rsi = source.regs.rsi;\n target.regs.rdx = source.regs.rdx;\n target.regs.r8 = source.regs.r8;\n target.regs.r9 = source.regs.r9;\n target.regs.r10 = source.regs.r10;\n\n target.unset(process::InRecv);\n source.unset(process::InSend);\n\n let c = cpu();\n c.queue(target);\n if source.ipc_state() == 0 {\n c.queue(source);\n }\n unsafe { c.run(); }\n}\n\nfn send_or_block(h : &mut Handle, msg: uint, arg1: uint, arg2: uint,\n arg3: uint, arg4: uint, arg5: uint) {\n match h.other() {\n Some(g) => {\n let p = h.process();\n let sender = g.process();\n\n \/\/ Save regs - either we'll copy these in transfer_message or we'll\n \/\/ need to store them until later on when the transfer can finish.\n sender.regs.rax = msg;\n sender.regs.rdi = h.id();\n sender.regs.rsi = arg1;\n sender.regs.rdx = arg2;\n sender.regs.r10 = arg3;\n sender.regs.r8 = arg4;\n sender.regs.r9 = arg5;\n\n \/\/ p is the recipient, the sender is in g.process().\n if p.ipc_state() == process::InRecv.mask() {\n let rcpt = h.process().regs.rdi;\n \/\/ Check the receiving process' receipt handle\n \/\/ 0 ==> transfer\n \/\/ !0, connected to our handle ==> transfer\n \/\/ !0, fresh ==> transfer\n \/\/ !0 otherwise ==> block\n if rcpt == 0\n || rcpt == g.id()\n || !p.find_handle(rcpt).is_some() {\n transfer_message(p, sender);\n }\n }\n\n p.add_waiter(sender)\n },\n None => abort(\"sending to unconnected handle\"),\n }\n}\n\nfn ipc_send(p : &mut Process, msg : uint, to : uint, arg1: uint, arg2: uint,\n arg3: uint, arg4: uint, arg5: uint) {\n let handle = p.find_handle(to);\n match handle {\n Some(h) => {\n p.set(process::InSend);\n send_or_block(h, msg, arg1, arg2, arg3, arg4, arg5);\n },\n None => abort(\"ipc_send: no recipient\")\n }\n}\n\nfn ipc_recv(p : &mut Process, from : uint) {\n let mut handle = None;\n if from != 0 {\n handle = p.find_handle(from);\n }\n\n if log_recv {\n con::writeMutPtr(p);\n write(\" recv from \");\n con::writeUInt(from);\n }\n\n p.set(process::InRecv);\n p.regs.rdi = from;\n match handle {\n Some(h) => {\n if log_recv {\n write(\" ==> process \");\n con::writeMutPtr(h.process());\n con::newline();\n }\n recv(p, h)\n },\n None => {\n if log_recv && from != 0 {\n write(\" ==> fresh\\n\");\n }\n recv_from_any(p, from)\n }\n }\n}\n\nfn recv(p: &mut Process, handle: &mut Handle) {\n let rcpt = handle.process();\n if rcpt.is(process::InSend) {\n abort(\"recv-from-specific not implemented\");\n } else {\n rcpt.add_waiter(p);\n }\n}\n\nfn recv_from_any(p : &mut Process, _id: uint) {\n let mut sender = None;\n for waiter in p.waiters.iter() {\n if waiter.is(process::InSend) {\n sender = Some(waiter);\n break;\n }\n }\n match sender {\n Some(s) => {\n p.remove_waiter(s);\n transfer_message(p, s);\n },\n None => ()\n }\n\n if log_recv {\n write(\"recv: no waiters for \");\n con::writeMutPtr(p);\n con::newline();\n }\n\n \/\/ TODO Look for pending pulse\n \/\/ 2. Look for pending pulses\n \/\/ 3. Switch next\n}\n\nfn syscall_map(p: &mut Process, handle: uint, mut prot: uint, addr: uint, mut offset: uint, size: uint) {\n prot &= mapflag::UserAllowed;\n \/\/ TODO Check (and return failure) on:\n \/\/ * unaligned addr, offset, size (must be page-aligned)\n if (prot & mapflag::DMA) == mapflag::DMA {\n offset = match cpu().memory.alloc_frame() {\n None => 0,\n Some(p) => p as uint - kernel_base,\n }\n }\n\n p.aspace().map_range(addr, addr + size, handle, (offset - addr) | prot);\n\n if (prot & mapflag::Phys) == 0 {\n offset = 0;\n }\n cpu().syscall_return(p, offset);\n}\n\nfn syscall_pfault(p : &mut Process, mut vaddr: uint, access: uint) {\n vaddr &= !0xfff;\n\n \/\/ set fault address\n p.fault_addr = vaddr;\n p.regs.rsi = vaddr;\n p.regs.rdx = access & mapflag::RWX;\n \/\/ Look up vaddr, get handle, offset and flags\n let card = p.aspace().mapcard_find_def(vaddr);\n p.regs.rsi += card.offset; \/\/ proc.rsi is now translated into offset\n p.regs.rdi = card.handle;\n p.set(process::PFault);\n\n \/\/ Now do the equivalent of sendrcv with rdi=handle, rsi=offset, rdx=flags\n}\n\nfn syscall_hmod(p : &mut Process, id: uint, rename: uint, copy: uint) {\n let handle = p.find_handle(id);\n if log_hmod {\n con::writeMutPtr(p);\n write(\" hmod: id=\"); con::writeHex(id);\n write(\" rename=\"); con::writeHex(rename);\n write(\" copy=\"); con::writeHex(copy);\n con::newline();\n }\n match handle {\n None => (),\n Some(h) => {\n \/\/ Fresh\/dissociated handle for the same process as the original\n if copy != 0 {\n p.new_handle(copy, h.process());\n }\n if rename == 0 {\n p.delete_handle(h);\n } else if rename != id {\n p.rename_handle(h, rename);\n }\n }\n }\n cpu().syscall_return(p, 0);\n}\n\nfn syscall_portio(p : &mut Process, port : uint, op : uint, data: uint) -> ! {\n let mut res : uint = 0;\n unsafe { match op {\n 0x01 => asm!(\"inb %dx, %al\" : \"={al}\"(res) : \"{dx}\"(port)),\n 0x02 => asm!(\"inw %dx, %ax\" : \"={ax}\"(res) : \"{dx}\"(port)),\n 0x04 => asm!(\"inl %dx, %eax\" : \"={eax}\"(res) : \"{dx}\"(port)),\n 0x11 => asm!(\"outb %al, %dx\" :: \"{al}\"(data), \"{dx}\"(port)),\n 0x12 => asm!(\"outw %ax, %dx\" :: \"{ax}\"(data), \"{dx}\"(port)),\n 0x14 => asm!(\"outl %eax, %dx\" :: \"{eax}\"(data), \"{dx}\"(port)),\n _ => abort(\"unhandled portio operation\")\n } }\n if log_portio {\n write(\"portio: port=\"); con::writeHex(port & 0xffff);\n write(\" op=\"); con::writeUInt(op);\n write(\" data=\"); con::writeHex(data);\n write(\" res=\"); con::writeHex(res);\n con::newline();\n }\n cpu().syscall_return(p, res);\n}\n<commit_msg>IPC logging tweaks<commit_after>use core::prelude::*;\n\nuse aspace::mapflag;\nuse con;\nuse con::write;\nuse cpu;\nuse process;\nuse process::Handle;\nuse process::Process;\nuse start32::kernel_base;\nuse util::abort;\n\nstatic log_syscall : bool = false;\nstatic log_transfer_message : bool = false;\nstatic log_portio : bool = false;\nstatic log_hmod : bool = false;\n\nstatic log_recv : bool = false;\nstatic log_ipc : bool = false;\n\npub mod nr {\n #![allow(dead_code)]\n pub static RECV : uint = 0;\n pub static MAP : uint = 1;\n pub static PFAULT : uint = 2;\n pub static UNMAP : uint = 3;\n pub static HMOD : uint = 4;\n pub static NEWPROC : uint = 5;\n pub static WRITE : uint = 6;\n pub static PORTIO : uint = 7;\n pub static GRANT : uint = 8;\n pub static PULSE : uint = 9;\n\n pub static USER : uint = 16;\n\n pub static MSG_MASK : uint = 0xff;\n pub static MSG_KIND_MASK : uint = 0x300;\n pub static MSG_KIND_SEND : uint = 0x000;\n pub static MSG_KIND_CALL : uint = 0x100;\n}\n\n\/\/ Note: tail-called from the syscall code, \"return\" by switching to a process.\n#[no_mangle]\npub fn syscall(\n arg0: uint,\n arg1: uint,\n arg2: uint,\n arg3: uint,\n arg4: uint,\n arg5: uint,\n nr : uint, \/\/ saved_rax\n) -> ! {\n use syscall::nr::*;\n\n let p = unsafe { cpu().get_process() };\n \/\/ FIXME cpu.leave_proc?\n p.unset(process::Running);\n p.set(process::FastRet);\n\n if log_syscall {\n write(\"syscall! nr=\");\n con::writeUInt(nr);\n write(\" from process \");\n con::writeMutPtr(p);\n con::newline();\n }\n\n match nr {\n RECV => ipc_recv(p, arg0),\n MAP => syscall_map(p, arg0, arg1, arg2, arg3, arg4),\n PFAULT => syscall_pfault(p, arg1, arg2), \/\/ arg0 is always 0\n HMOD => syscall_hmod(p, arg0, arg1, arg2),\n PORTIO => syscall_portio(p, arg0, arg1, arg2),\n WRITE => {\n con::putc(arg0 as u8 as char);\n cpu().syscall_return(p, 0);\n },\n _ if nr >= USER => {\n match nr & MSG_KIND_MASK {\n MSG_KIND_CALL => ipc_call(p, nr & MSG_MASK, arg0, arg1, arg2, arg3, arg4, arg5),\n MSG_KIND_SEND => ipc_send(p, nr & MSG_MASK, arg0, arg1, arg2, arg3, arg4, arg5),\n _ => abort(\"Unknown IPC kind\")\n }\n },\n _ => abort(\"Unhandled syscall\"),\n }\n\n if p.is_runnable() {\n abort(\"process not blocked at return\");\n }\n\n unsafe { cpu().run(); }\n}\n\nfn ipc_call(p : &mut Process, msg : uint, to : uint, arg1: uint, arg2: uint,\n arg3: uint, arg4: uint, arg5: uint) {\n if log_ipc {\n con::writeMutPtr(p);\n write(\" ipc_call to \");\n con::writeUInt(to);\n }\n\n let handle = p.find_handle(to);\n match handle {\n Some(h) => {\n if log_ipc {\n write(\"==> process \");\n con::writeMutPtr(h.process());\n con::newline();\n }\n\n p.set(process::InSend);\n p.set(process::InRecv);\n p.regs.rdi = to;\n send_or_block(h, msg, arg1, arg2, arg3, arg4, arg5);\n },\n None => abort(\"ipc_call: no recipient\")\n }\n if log_ipc {\n write(\"ipc_call: blocked\\n\");\n }\n}\n\nfn transfer_set_handle(target: &mut Process, source: &mut Process) {\n let mut rcpt = target.regs.rdi;\n let from = source.regs.rdi;\n\n let h = source.find_handle(from).unwrap();\n if rcpt == 0 {\n rcpt = h.other().unwrap().id();\n } else if !target.find_handle(rcpt).is_some() {\n match h.other() {\n \/\/ Already associated, \"junk\" the fresh handle and just update the\n \/\/ recipient-side handle.\n Some(g) => rcpt = g.id(),\n \/\/ Associate handles now.\n None => {\n let g = target.new_handle(rcpt, source);\n g.associate(h);\n },\n }\n } else {\n \/\/ TODO Assert that rcpt <-> from. (But the caller is responsible for\n \/\/ checking that first.)\n }\n if log_transfer_message {\n write(\"transfer_set_handle: rcpt=\");\n con::writeHex(rcpt);\n write(\" for \");\n con::writeHex(target.regs.rdi);\n con::newline();\n }\n target.regs.rdi = rcpt;\n}\n\nfn transfer_message(target: &mut Process, source: &mut Process) -> ! {\n transfer_set_handle(target, source);\n\n if log_transfer_message {\n write(\"transfer_message \");\n con::writeMutPtr(target);\n write(\" <- \");\n con::writeMutPtr(source);\n con::newline();\n }\n\n target.regs.rax = source.regs.rax;\n target.regs.rdi = source.regs.rdi;\n target.regs.rsi = source.regs.rsi;\n target.regs.rdx = source.regs.rdx;\n target.regs.r8 = source.regs.r8;\n target.regs.r9 = source.regs.r9;\n target.regs.r10 = source.regs.r10;\n\n target.unset(process::InRecv);\n source.unset(process::InSend);\n\n let c = cpu();\n c.queue(target);\n if source.ipc_state() == 0 {\n c.queue(source);\n }\n unsafe { c.run(); }\n}\n\nfn send_or_block(h : &mut Handle, msg: uint, arg1: uint, arg2: uint,\n arg3: uint, arg4: uint, arg5: uint) {\n match h.other() {\n Some(g) => {\n let p = h.process();\n let sender = g.process();\n\n \/\/ Save regs - either we'll copy these in transfer_message or we'll\n \/\/ need to store them until later on when the transfer can finish.\n sender.regs.rax = msg;\n sender.regs.rdi = h.id();\n sender.regs.rsi = arg1;\n sender.regs.rdx = arg2;\n sender.regs.r10 = arg3;\n sender.regs.r8 = arg4;\n sender.regs.r9 = arg5;\n\n \/\/ p is the recipient, the sender is in g.process().\n if p.ipc_state() == process::InRecv.mask() {\n let rcpt = h.process().regs.rdi;\n \/\/ Check the receiving process' receipt handle\n \/\/ 0 ==> transfer\n \/\/ !0, connected to our handle ==> transfer\n \/\/ !0, fresh ==> transfer\n \/\/ !0 otherwise ==> block\n if rcpt == 0\n || rcpt == g.id()\n || !p.find_handle(rcpt).is_some() {\n transfer_message(p, sender);\n }\n }\n\n p.add_waiter(sender)\n },\n None => abort(\"sending to unconnected handle\"),\n }\n}\n\nfn ipc_send(p : &mut Process, msg : uint, to : uint, arg1: uint, arg2: uint,\n arg3: uint, arg4: uint, arg5: uint) {\n if log_ipc {\n con::writeMutPtr(p);\n write(\" ipc_send to \");\n con::writeUInt(to);\n con::newline();\n }\n\n let handle = p.find_handle(to);\n match handle {\n Some(h) => {\n p.set(process::InSend);\n send_or_block(h, msg, arg1, arg2, arg3, arg4, arg5);\n },\n None => abort(\"ipc_send: no recipient\")\n }\n}\n\nfn ipc_recv(p : &mut Process, from : uint) {\n let mut handle = None;\n if from != 0 {\n handle = p.find_handle(from);\n }\n\n if log_recv {\n con::writeMutPtr(p);\n write(\" recv from \");\n con::writeUInt(from);\n }\n\n p.set(process::InRecv);\n p.regs.rdi = from;\n match handle {\n Some(h) => {\n if log_recv {\n write(\" ==> process \");\n con::writeMutPtr(h.process());\n con::newline();\n }\n recv(p, h)\n },\n None => {\n if log_recv && from != 0 {\n write(\" ==> fresh\\n\");\n }\n recv_from_any(p, from)\n }\n }\n}\n\nfn recv(p: &mut Process, handle: &mut Handle) {\n let rcpt = handle.process();\n if rcpt.is(process::InSend) {\n abort(\"recv-from-specific not implemented\");\n } else {\n rcpt.add_waiter(p);\n }\n}\n\nfn recv_from_any(p : &mut Process, _id: uint) {\n let mut sender = None;\n for waiter in p.waiters.iter() {\n if waiter.is(process::InSend) {\n sender = Some(waiter);\n break;\n }\n }\n match sender {\n Some(s) => {\n p.remove_waiter(s);\n transfer_message(p, s);\n },\n None => ()\n }\n\n if log_recv {\n write(\"recv: no waiters for \");\n con::writeMutPtr(p);\n con::newline();\n }\n\n \/\/ TODO Look for pending pulse\n \/\/ 2. Look for pending pulses\n \/\/ 3. Switch next\n}\n\nfn syscall_map(p: &mut Process, handle: uint, mut prot: uint, addr: uint, mut offset: uint, size: uint) {\n prot &= mapflag::UserAllowed;\n \/\/ TODO Check (and return failure) on:\n \/\/ * unaligned addr, offset, size (must be page-aligned)\n if (prot & mapflag::DMA) == mapflag::DMA {\n offset = match cpu().memory.alloc_frame() {\n None => 0,\n Some(p) => p as uint - kernel_base,\n }\n }\n\n p.aspace().map_range(addr, addr + size, handle, (offset - addr) | prot);\n\n if (prot & mapflag::Phys) == 0 {\n offset = 0;\n }\n cpu().syscall_return(p, offset);\n}\n\nfn syscall_pfault(p : &mut Process, mut vaddr: uint, access: uint) {\n vaddr &= !0xfff;\n\n \/\/ set fault address\n p.fault_addr = vaddr;\n p.regs.rsi = vaddr;\n p.regs.rdx = access & mapflag::RWX;\n \/\/ Look up vaddr, get handle, offset and flags\n let card = p.aspace().mapcard_find_def(vaddr);\n p.regs.rsi += card.offset; \/\/ proc.rsi is now translated into offset\n p.regs.rdi = card.handle;\n p.set(process::PFault);\n\n \/\/ Now do the equivalent of sendrcv with rdi=handle, rsi=offset, rdx=flags\n}\n\nfn syscall_hmod(p : &mut Process, id: uint, rename: uint, copy: uint) {\n let handle = p.find_handle(id);\n if log_hmod {\n con::writeMutPtr(p);\n write(\" hmod: id=\"); con::writeHex(id);\n write(\" rename=\"); con::writeHex(rename);\n write(\" copy=\"); con::writeHex(copy);\n con::newline();\n }\n match handle {\n None => (),\n Some(h) => {\n \/\/ Fresh\/dissociated handle for the same process as the original\n if copy != 0 {\n p.new_handle(copy, h.process());\n }\n if rename == 0 {\n p.delete_handle(h);\n } else if rename != id {\n p.rename_handle(h, rename);\n }\n }\n }\n cpu().syscall_return(p, 0);\n}\n\nfn syscall_portio(p : &mut Process, port : uint, op : uint, data: uint) -> ! {\n let mut res : uint = 0;\n unsafe { match op {\n 0x01 => asm!(\"inb %dx, %al\" : \"={al}\"(res) : \"{dx}\"(port)),\n 0x02 => asm!(\"inw %dx, %ax\" : \"={ax}\"(res) : \"{dx}\"(port)),\n 0x04 => asm!(\"inl %dx, %eax\" : \"={eax}\"(res) : \"{dx}\"(port)),\n 0x11 => asm!(\"outb %al, %dx\" :: \"{al}\"(data), \"{dx}\"(port)),\n 0x12 => asm!(\"outw %ax, %dx\" :: \"{ax}\"(data), \"{dx}\"(port)),\n 0x14 => asm!(\"outl %eax, %dx\" :: \"{eax}\"(data), \"{dx}\"(port)),\n _ => abort(\"unhandled portio operation\")\n } }\n if log_portio {\n write(\"portio: port=\"); con::writeHex(port & 0xffff);\n write(\" op=\"); con::writeUInt(op);\n write(\" data=\"); con::writeHex(data);\n write(\" res=\"); con::writeHex(res);\n con::newline();\n }\n cpu().syscall_return(p, res);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore(term): Add example for custom timestamp<commit_after>#[macro_use(o,slog_log,slog_trace,slog_debug,slog_info,slog_warn,slog_error,slog_crit)]\nextern crate slog;\n\nextern crate slog_term;\n\nuse std::time::{Duration, SystemTime};\n\n\nuse slog::DrainExt;\n\nfn main() {\n let now = SystemTime::now();\n let streamer = slog_term::StreamerBuilder::new()\n .use_custom_timestamp(Box::new(move || {\n let elapsed = now.elapsed().unwrap();\n format!(\"{:5}.{:06}\", elapsed.as_secs(), elapsed.subsec_nanos()\/1000)\n }));\n let log = slog::Logger::root(streamer.build().fuse(), o!());\n\n slog_trace!(log, \"logging a trace message\");\n std::thread::sleep(Duration::from_millis(250));\n slog_debug!(log, \"debug values\"; \"x\" => 1, \"y\" => -1);\n std::thread::sleep(Duration::from_millis(250));\n slog_info!(log, \"some interesting info\"; \"where\" => \"right here\");\n std::thread::sleep(Duration::from_millis(250));\n slog_warn!(log, \"be cautious!\"; \"why\" => \"you never know...\");\n std::thread::sleep(Duration::from_millis(250));\n slog_error!(log, \"type\" => \"unknown\"; \"wrong {}\", \"foobar\");\n std::thread::sleep(Duration::from_millis(250));\n slog_crit!(log, \"abandoning test\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #81<commit_after>use common::problem::{ Problem };\n\npub static problem: Problem<'static> = Problem {\n id: 81,\n answer: \"427337\",\n solver: solve\n};\n\nfn solve() -> ~str {\n let result = io::file_reader(&Path(\"files\/matrix.txt\")).map(|file| {\n let mut mat = ~[];\n for file.each_line |line| {\n let mut row = ~[];\n for line.each_split_char(',') |n| {\n row.push(uint::from_str(n).get());\n }\n mat.push(row);\n assert_eq!(mat[0].len(), mat.last().len());\n }\n let w = mat[0].len();\n let h = mat.len();\n ((w, h), mat)\n }).map(|&((w, h), mat)| {\n let mut sum = vec::from_fn(h, |_y| vec::from_elem(w, 0));\n sum[0][0] = mat[0][0];\n for uint::range(1, h) |y| {\n sum[y][0] = mat[y][0] + sum[y - 1][0];\n }\n for uint::range(1, w) |x| {\n sum[0][x] = mat[0][x] + sum[0][x - 1];\n for uint::range(1, h) |y| {\n sum[y][x] = mat[y][x] + uint::min(sum[y - 1][x], sum[y][x - 1]);\n }\n }\n sum[h - 1][w - 1]\n });\n\n match result {\n Err(msg) => fail!(msg),\n Ok(value) => return value.to_str()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>small doc fix.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>remove arity comments<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>それっぽい<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>unwrap<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>(feat) Added Params object to be stored on Alloy.<commit_after>use regex::Regex;\nuse std::collections::hashmap::HashMap;\n\npub struct Params {\n captures: HashMap<String, String>\n}\n\nimpl Params {\n pub fn new<I: Iterator<String>>(uri: &str, matcher: Regex, params: I) -> Params {\n let captures = matcher.captures(uri).unwrap();\n Params {\n captures: params.map(|p| (p.clone(), captures.name(p.as_slice()).to_string())).collect()\n }\n }\n\n pub fn get<'a>(&'a self, param: String) -> Option<&'a String> {\n self.captures.find(¶m)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>feat(scores): add PAM200 score matrix<commit_after>\/\/ Copyright 2015 M. Rizky Luthfianto.\n\/\/ Licensed under the MIT license (http:\/\/opensource.org\/licenses\/MIT)\n\/\/ This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[macro_use]\nextern crate lazy_static;\n\nextern crate nalgebra;\n\nuse nalgebra::DMat;\n\nlazy_static! {\n\n\t\/\/ Taken from https:\/\/github.com\/seqan\/seqan\/blob\/master\/include%2Fseqan%2Fscore%2Fscore_matrix_data.h#L710\n\t\/\/ Copyright (c) 2006-2015, Knut Reinert, FU Berlin\n\tstatic ref ARRAY: [i32;729]=[\n\t\t 3, 0, -3, 0, 0, -4, 1, -2, -1, -2, -2, -2, -2, 0, 0, 1, -1, -2, 1, 1, 0, 0, -7, -4, 0, 0, -9,\n\t\t 0, 3, -5, 4, 3, -6, 0, 1, -3, -4, 0, -4, -3, 3, -1, -1, 1, -1, 1, 0, -1, -3, -6, -4, 2, -1, -9,\n\t\t-3, -5, 12, -6, -7, -6, -4, -4, -3, -5, -7, -7, -6, -5, -4, -4, -7, -4, 0, -3, -4, -2, -9, 0, -7, -4, -9,\n\t\t 0, 4, -6, 5, 4, -7, 0, 0, -3, -4, 0, -5, -4, 3, -1, -2, 2, -2, 0, 0, -1, -3, -8, -5, 3, -1, -9,\n\t\t 0, 3, -7, 4, 5, -7, 0, 0, -3, -4, 0, -4, -3, 2, -1, -1, 3, -2, 0, -1, -1, -2, -9, -5, 4, -1, -9,\n\t\t-4, -6, -6, -7, -7, 10, -6, -2, 1, 2, -7, 2, 0, -4, -3, -6, -6, -5, -4, -4, -3, -2, 0, 7, -6, -3, -9,\n\t\t 1, 0, -4, 0, 0, -6, 6, -3, -3, -4, -2, -5, -4, 0, -1, -1, -2, -4, 1, 0, -1, -2, -8, -6, -1, -1, -9,\n\t\t-2, 1, -4, 0, 0, -2, -3, 8, -3, -3, -1, -3, -3, 2, -1, -1, 3, 2, -1, -2, -1, -3, -3, 0, 2, -1, -9,\n\t\t-1, -3, -3, -3, -3, 1, -3, -3, 6, 4, -2, 2, 2, -2, -1, -3, -3, -2, -2, 0, -1, 4, -6, -2, -3, -1, -9,\n\t\t-2, -4, -5, -4, -4, 2, -4, -3, 4, 5, -3, 5, 3, -3, -2, -3, -3, -3, -3, -1, -2, 3, -4, -2, -3, -2, -9,\n\t\t-2, 0, -7, 0, 0, -7, -2, -1, -2, -3, 6, -4, 1, 1, -1, -2, 1, 4, 0, 0, -1, -3, -4, -5, 0, -1, -9,\n\t\t-2, -4, -7, -5, -4, 2, -5, -3, 2, 5, -4, 7, 4, -4, -2, -3, -2, -4, -4, -2, -2, 2, -2, -2, -3, -2, -9,\n\t\t-2, -3, -6, -4, -3, 0, -4, -3, 2, 3, 1, 4, 8, -2, -1, -3, -1, -1, -2, -1, -1, 2, -5, -3, -2, -1, -9,\n\t\t 0, 3, -5, 3, 2, -4, 0, 2, -2, -3, 1, -4, -2, 3, 0, -1, 1, 0, 1, 0, 0, -2, -5, -2, 1, 0, -9,\n\t\t 0, -1, -4, -1, -1, -3, -1, -1, -1, -2, -1, -2, -1, 0, -1, -1, -1, -1, 0, 0, -1, -1, -5, -3, -1, -1, -9,\n\t\t 1, -1, -4, -2, -1, -6, -1, -1, -3, -3, -2, -3, -3, -1, -1, 7, 0, 0, 1, 0, -1, -2, -7, -6, -1, -1, -9,\n\t\t-1, 1, -7, 2, 3, -6, -2, 3, -3, -3, 1, -2, -1, 1, -1, 0, 5, 1, -1, -1, -1, -3, -6, -5, 4, -1, -9,\n\t\t-2, -1, -4, -2, -2, -5, -4, 2, -2, -3, 4, -4, -1, 0, -1, 0, 1, 7, -1, -1, -1, -3, 2, -5, 0, -1, -9,\n\t\t 1, 1, 0, 0, 0, -4, 1, -1, -2, -3, 0, -4, -2, 1, 0, 1, -1, -1, 2, 2, 0, -1, -3, -3, -1, 0, -9,\n\t\t 1, 0, -3, 0, -1, -4, 0, -2, 0, -1, 0, -2, -1, 0, 0, 0, -1, -1, 2, 4, 0, 0, -6, -3, -1, 0, -9,\n\t\t 0, -1, -4, -1, -1, -3, -1, -1, -1, -2, -1, -2, -1, 0, -1, -1, -1, -1, 0, 0, -1, -1, -5, -3, -1, -1, -9,\n\t\t 0, -3, -2, -3, -2, -2, -2, -3, 4, 3, -3, 2, 2, -2, -1, -2, -3, -3, -1, 0, -1, 5, -8, -3, -2, -1, -9,\n\t\t-7, -6, -9, -8, -9, 0, -8, -3, -6, -4, -4, -2, -5, -5, -5, -7, -6, 2, -3, -6, -5, -8, 18, -1, -7, -5, -9,\n\t\t-4, -4, 0, -5, -5, 7, -6, 0, -2, -2, -5, -2, -3, -2, -3, -6, -5, -5, -3, -3, -3, -3, -1, 11, -5, -3, -9,\n\t\t 0, 2, -7, 3, 4, -6, -1, 2, -3, -3, 0, -3, -2, 1, -1, -1, 4, 0, -1, -1, -1, -2, -7, -5, 4, -1, -9,\n\t\t 0, -1, -4, -1, -1, -3, -1, -1, -1, -2, -1, -2, -1, 0, -1, -1, -1, -1, 0, 0, -1, -1, -5, -3, -1, -1, -9,\n\t\t-9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, 1\n\t];\n\n\tstatic ref MAT: DMat<i32> = DMat::from_col_vec(27, 27, &*ARRAY);\n}\n\n#[inline]\nfn lookup(number: u8) -> usize {\n\tif number==b'Y' { 23 as usize }\n\telse if number==b'Z' { 24 as usize }\n\telse if number==b'X' { 25 as usize }\n\telse if number==b'*' { 26 as usize }\n\telse { (number-65) as usize }\n}\n\npub fn pam200(a: u8, b: u8) -> i32 {\n\tlet a = lookup(a);\n\tlet b = lookup(b);\n\n\tMAT[(a, b)]\n}\n\n#[cfg(test)]\nmod tests {\n\tuse super::*;\n\n\t#[test]\n\tfn test_pam200() {\n\t\tlet score1 = pam200(b'A',b'A');\n\t\tassert_eq!(score1, 2);\n\t\tlet score2 = pam200(b'*',b'*');\n\t\tassert_eq!(score2, 1);\n\t\tlet score3 = pam200(b'A',b'*');\n\t\tassert_eq!(score3, -8);\n\t\tlet score4 = pam200(b'Y',b'Z');\n\t\tassert_eq!(score4, -4);\n\t\tlet score5 = pam200(b'X',b'X');\n\t\tassert_eq!(score5, -1);\n\t\tlet score6 = pam200(b'X',b'Z');\n\t\tassert_eq!(score6, -1);\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add gtk::Separators to Each Unit<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>fix(SubCommands): fixed where subcmds weren't recognized after mult args<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ These 4 `thumbv*` targets cover the ARM Cortex-M family of processors which are widely used in\n\/\/ microcontrollers. Namely, all these processors:\n\/\/\n\/\/ - Cortex-M0\n\/\/ - Cortex-M0+\n\/\/ - Cortex-M1\n\/\/ - Cortex-M3\n\/\/ - Cortex-M4(F)\n\/\/ - Cortex-M7(F)\n\/\/\n\/\/ We have opted for 4 targets instead of one target per processor (e.g. `cortex-m0`, `cortex-m3`,\n\/\/ etc) because the differences between some processors like the cortex-m0 and cortex-m1 are almost\n\/\/ non-existent from the POV of codegen so it doesn't make sense to have separate targets for them.\n\/\/ And if differences exist between two processors under the same target, rustc flags can be used to\n\/\/ optimize for one processor or the other.\n\/\/\n\/\/ Also, we have not chosen a single target (`arm-none-eabi`) like GCC does because this makes\n\/\/ difficult to integrate Rust code and C code. Targeting the Cortex-M4 requires different gcc flags\n\/\/ than the ones you would use for the Cortex-M0 and with a single target it'd be impossible to\n\/\/ differentiate one processor from the other.\n\/\/\n\/\/ About arm vs thumb in the name. The Cortex-M devices only support the Thumb instruction set,\n\/\/ which is more compact (higher code density), and not the ARM instruction set. That's why LLVM\n\/\/ triples use thumb instead of arm. We follow suit because having thumb in the name let us\n\/\/ differentiate these targets from our other `arm(v7)-*-*-gnueabi(hf)` targets in the context of\n\/\/ build scripts \/ gcc flags.\n\nuse target::TargetOptions;\nuse std::default::Default;\n\npub fn opts() -> TargetOptions {\n \/\/ See rust-lang\/rfcs#1645 for a discussion about these defaults\n TargetOptions {\n executables: true,\n \/\/ In 99%+ of cases, we want to use the `arm-none-eabi-gcc` compiler (there aren't many\n \/\/ options around)\n linker: \"arm-none-eabi-gcc\".to_string(),\n \/\/ Because these devices have very little resources having an unwinder is too onerous so we\n \/\/ default to \"abort\" because the \"unwind\" strategy is very rare.\n panic_strategy: \"abort\".to_string(),\n \/\/ Similarly, one almost always never wants to use relocatable code because of the extra\n \/\/ costs it involves.\n relocation_model: \"static\".to_string(),\n .. Default::default()\n }\n}\n<commit_msg>fix: \"abort\" -> PanicStrategy<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ These 4 `thumbv*` targets cover the ARM Cortex-M family of processors which are widely used in\n\/\/ microcontrollers. Namely, all these processors:\n\/\/\n\/\/ - Cortex-M0\n\/\/ - Cortex-M0+\n\/\/ - Cortex-M1\n\/\/ - Cortex-M3\n\/\/ - Cortex-M4(F)\n\/\/ - Cortex-M7(F)\n\/\/\n\/\/ We have opted for 4 targets instead of one target per processor (e.g. `cortex-m0`, `cortex-m3`,\n\/\/ etc) because the differences between some processors like the cortex-m0 and cortex-m1 are almost\n\/\/ non-existent from the POV of codegen so it doesn't make sense to have separate targets for them.\n\/\/ And if differences exist between two processors under the same target, rustc flags can be used to\n\/\/ optimize for one processor or the other.\n\/\/\n\/\/ Also, we have not chosen a single target (`arm-none-eabi`) like GCC does because this makes\n\/\/ difficult to integrate Rust code and C code. Targeting the Cortex-M4 requires different gcc flags\n\/\/ than the ones you would use for the Cortex-M0 and with a single target it'd be impossible to\n\/\/ differentiate one processor from the other.\n\/\/\n\/\/ About arm vs thumb in the name. The Cortex-M devices only support the Thumb instruction set,\n\/\/ which is more compact (higher code density), and not the ARM instruction set. That's why LLVM\n\/\/ triples use thumb instead of arm. We follow suit because having thumb in the name let us\n\/\/ differentiate these targets from our other `arm(v7)-*-*-gnueabi(hf)` targets in the context of\n\/\/ build scripts \/ gcc flags.\n\nuse PanicStrategy;\nuse std::default::Default;\nuse target::TargetOptions;\n\npub fn opts() -> TargetOptions {\n \/\/ See rust-lang\/rfcs#1645 for a discussion about these defaults\n TargetOptions {\n executables: true,\n \/\/ In 99%+ of cases, we want to use the `arm-none-eabi-gcc` compiler (there aren't many\n \/\/ options around)\n linker: \"arm-none-eabi-gcc\".to_string(),\n \/\/ Because these devices have very little resources having an unwinder is too onerous so we\n \/\/ default to \"abort\" because the \"unwind\" strategy is very rare.\n panic_strategy: PanicStrategy::Abort,\n \/\/ Similarly, one almost always never wants to use relocatable code because of the extra\n \/\/ costs it involves.\n relocation_model: \"static\".to_string(),\n .. Default::default()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test another version of 'creating a shared ref must not leak the Unique'<commit_after>\/\/ Creating a shared reference does not leak the data to raw pointers,\n\/\/ not even when interior mutability is involved.\n\nuse std::cell::Cell;\nuse std::ptr;\n\nfn main() { unsafe {\n let x = &mut Cell::new(0);\n let raw = x as *mut Cell<i32>;\n let x = &mut *raw;\n let _shr = &*x;\n \/\/ The state here is interesting because the top of the stack is [Unique, SharedReadWrite],\n \/\/ just like if we had done `x as *mut _`.\n \/\/ If we said that reading from a lower item is fine if the top item is `SharedReadWrite`\n \/\/ (one way to maybe preserve a stack discipline), then we could now read from `raw`\n \/\/ without invalidating `x`. That would be bad! It would mean that creating `shr`\n \/\/ leaked `x` to `raw`.\n let _val = ptr::read(raw);\n let _val = *x.get_mut(); \/\/~ ERROR borrow stack\n} }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove commented code 🐽<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for #20714<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct G;\n\nfn main() {\n let g = G(); \/\/~ ERROR: expected function, found `G`\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add rustdoc test for `everybody_loops` fix<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/prior to fixing `everybody_loops` to preserve items, rustdoc would crash on this file, as it\n\/\/didn't see that `SomeStruct` implemented `Clone`\n\n\/\/FIXME(misdreavus): whenever rustdoc shows traits impl'd inside bodies, make sure this test\n\/\/reflects that\n\npub struct Bounded<T: Clone>(T);\n\npub struct SomeStruct;\n\nfn asdf() -> Bounded<SomeStruct> {\n impl Clone for SomeStruct {\n fn clone(&self) -> SomeStruct {\n SomeStruct\n }\n }\n\n Bounded(SomeStruct)\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Code to handle initial setup steps for a pool.\n\/\/ Initial setup steps are steps that do not alter the environment.\n\nuse std::collections::{HashMap, HashSet};\nuse std::fs::OpenOptions;\nuse std::path::PathBuf;\n\nuse serde_json;\n\nuse devicemapper::{devnode_to_devno, Device};\n\nuse stratis::{ErrorEnum, StratisError, StratisResult};\n\nuse super::super::super::types::PoolUuid;\n\nuse super::super::serde_structs::{BackstoreSave, PoolSave};\n\nuse super::blockdev::StratBlockDev;\nuse super::device::{blkdev_size, is_stratis_device};\nuse super::metadata::BDA;\nuse super::util::get_stratis_block_devices;\n\n\/\/\/ Find all Stratis devices.\n\/\/\/\n\/\/\/ Returns a map of pool uuids to a map of devices to devnodes for each pool.\npub fn find_all() -> StratisResult<HashMap<PoolUuid, HashMap<Device, PathBuf>>> {\n let mut pool_map = HashMap::new();\n\n for devnode in get_stratis_block_devices()? {\n match devnode_to_devno(&devnode)? {\n None => continue,\n Some(devno) => {\n is_stratis_device(&devnode)?.and_then(|pool_uuid| {\n pool_map\n .entry(pool_uuid)\n .or_insert_with(HashMap::new)\n .insert(Device::from(devno), devnode)\n });\n }\n }\n }\n Ok(pool_map)\n}\n\n\/\/\/ Get the most recent metadata from a set of Devices for a given pool UUID.\n\/\/\/ Returns None if no metadata found for this pool.\n#[allow(implicit_hasher)]\npub fn get_metadata(\n pool_uuid: PoolUuid,\n devnodes: &HashMap<Device, PathBuf>,\n) -> StratisResult<Option<PoolSave>> {\n \/\/ Get pairs of device nodes and matching BDAs\n \/\/ If no BDA, or BDA UUID does not match pool UUID, skip.\n \/\/ If there is an error reading the BDA, error. There could have been\n \/\/ vital information on that BDA, for example, it may have contained\n \/\/ the newest metadata.\n let mut bdas = Vec::new();\n for devnode in devnodes.values() {\n let bda = BDA::load(&mut OpenOptions::new().read(true).open(devnode)?)?;\n if let Some(bda) = bda {\n if bda.pool_uuid() == pool_uuid {\n bdas.push((devnode, bda));\n }\n }\n }\n\n \/\/ Most recent time should never be None if this was a properly\n \/\/ created pool; this allows for the method to be called in other\n \/\/ circumstances.\n let most_recent_time = {\n match bdas.iter()\n .filter_map(|&(_, ref bda)| bda.last_update_time())\n .max()\n {\n Some(time) => time,\n None => return Ok(None),\n }\n };\n\n \/\/ Try to read from all available devnodes that could contain most\n \/\/ recent metadata. In the event of errors, continue to try until all are\n \/\/ exhausted.\n for &(devnode, ref bda) in bdas.iter()\n .filter(|&&(_, ref bda)| bda.last_update_time() == Some(most_recent_time))\n {\n let poolsave = OpenOptions::new()\n .read(true)\n .open(devnode)\n .ok()\n .and_then(|mut f| bda.load_state(&mut f).ok())\n .and_then(|opt| opt)\n .and_then(|data| serde_json::from_slice(&data).ok());\n\n if poolsave.is_some() {\n return Ok(poolsave);\n }\n }\n\n \/\/ If no data has yet returned, we have an error. That is, we should have\n \/\/ some metadata, because we have a most recent time, but we failed to\n \/\/ get any.\n let err_str = \"timestamp indicates data was written, but no data successfully read\";\n Err(StratisError::Engine(ErrorEnum::NotFound, err_str.into()))\n}\n\n\/\/\/ Get all the blockdevs corresponding to this pool that can be obtained from\n\/\/\/ the given devices.\n\/\/\/ Returns an error if a BDA can not be read or can not be found on any\n\/\/\/ blockdev in devnodes.\n\/\/\/ Returns an error if the blockdevs obtained do not match the metadata.\n\/\/\/ Returns a tuple, of which the first are the data devs, and the second\n\/\/\/ are the devs that support the cache tier.\n\/\/\/ Precondition: Every device in devnodes has already been determined to\n\/\/\/ belong to the pool with the specified pool uuid.\n#[allow(implicit_hasher)]\npub fn get_blockdevs(\n pool_uuid: PoolUuid,\n backstore_save: &BackstoreSave,\n devnodes: &HashMap<Device, PathBuf>,\n) -> StratisResult<(Vec<StratBlockDev>, Vec<StratBlockDev>)> {\n let recorded_data_map: HashMap<_, _> = backstore_save\n .data_devs\n .iter()\n .map(|bds| (bds.uuid, bds))\n .collect();\n\n let recorded_cache_map: HashMap<_, _> = match backstore_save.cache_devs {\n Some(ref cache_devs) => cache_devs.iter().map(|bds| (bds.uuid, bds)).collect(),\n None => HashMap::new(),\n };\n\n let mut segment_table = HashMap::new();\n for seg in &backstore_save.data_segments {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n if let Some(ref segs) = backstore_save.cache_segments {\n for seg in segs {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n }\n if let Some(ref segs) = backstore_save.meta_segments {\n for seg in segs {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n }\n\n let (mut datadevs, mut cachedevs) = (vec![], vec![]);\n for (device, devnode) in devnodes {\n let bda = BDA::load(&mut OpenOptions::new().read(true).open(devnode)?)?.ok_or_else(|| {\n StratisError::Engine(ErrorEnum::NotFound,\n format!(\"Device {} with devnode {} was previously determined to belong to pool with uuid {} but no BDA was found\",\n device,\n devnode.display(),\n pool_uuid))\n })?;\n\n let actual_size = blkdev_size(&OpenOptions::new().read(true).open(devnode)?)?.sectors();\n\n if actual_size < bda.dev_size() {\n let err_msg = format!(\n \"actual blockdev size ({}) < recorded size ({})\",\n actual_size,\n bda.dev_size()\n );\n\n return Err(StratisError::Engine(ErrorEnum::Error, err_msg));\n }\n\n let dev_uuid = bda.dev_uuid();\n\n let (dev_vec, bd_save) = match recorded_data_map.get(&dev_uuid) {\n Some(bd_save) => (&mut datadevs, bd_save),\n None => match recorded_cache_map.get(&dev_uuid) {\n Some(bd_save) => (&mut cachedevs, bd_save),\n None => {\n let err_msg = format!(\"Blockdev {} not found in metadata\", bda.dev_uuid());\n return Err(StratisError::Engine(ErrorEnum::NotFound, err_msg));\n }\n },\n };\n\n \/\/ This should always succeed since the actual size is at\n \/\/ least the recorded size, so all segments should be\n \/\/ available to be allocated. If this fails, the most likely\n \/\/ conclusion is metadata corruption.\n let segments = segment_table.get(&dev_uuid);\n dev_vec.push(StratBlockDev::new(\n *device,\n devnode.to_owned(),\n bda,\n segments.unwrap_or(&vec![]),\n bd_save.user_info.clone(),\n bd_save.hardware_info.clone(),\n )?);\n }\n\n \/\/ Verify that datadevs found match datadevs recorded.\n let current_data_uuids: HashSet<_> = datadevs.iter().map(|b| b.uuid()).collect();\n let recorded_data_uuids: HashSet<_> = recorded_data_map.keys().cloned().collect();\n if current_data_uuids != recorded_data_uuids {\n let err_msg = \"Recorded data dev UUIDs != discovered datadev UUIDs\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n if datadevs.len() != current_data_uuids.len() {\n let err_msg = \"Duplicate data devices found in environment\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n \/\/ Verify that cachedevs found match cachedevs recorded.\n let current_cache_uuids: HashSet<_> = cachedevs.iter().map(|b| b.uuid()).collect();\n let recorded_cache_uuids: HashSet<_> = recorded_cache_map.keys().cloned().collect();\n if current_cache_uuids != recorded_cache_uuids {\n let err_msg = \"Recorded cache dev UUIDs != discovered cachedev UUIDs\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n if cachedevs.len() != current_cache_uuids.len() {\n let err_msg = \"Duplicate cache devices found in environment\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n Ok((datadevs, cachedevs))\n}\n<commit_msg>Tidy up the body of the loop<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Code to handle initial setup steps for a pool.\n\/\/ Initial setup steps are steps that do not alter the environment.\n\nuse std::collections::{HashMap, HashSet};\nuse std::fs::OpenOptions;\nuse std::path::PathBuf;\n\nuse serde_json;\n\nuse devicemapper::{devnode_to_devno, Device};\n\nuse stratis::{ErrorEnum, StratisError, StratisResult};\n\nuse super::super::super::types::{BlockDevTier, PoolUuid};\n\nuse super::super::serde_structs::{BackstoreSave, PoolSave};\n\nuse super::blockdev::StratBlockDev;\nuse super::device::{blkdev_size, is_stratis_device};\nuse super::metadata::BDA;\nuse super::util::get_stratis_block_devices;\n\n\/\/\/ Find all Stratis devices.\n\/\/\/\n\/\/\/ Returns a map of pool uuids to a map of devices to devnodes for each pool.\npub fn find_all() -> StratisResult<HashMap<PoolUuid, HashMap<Device, PathBuf>>> {\n let mut pool_map = HashMap::new();\n\n for devnode in get_stratis_block_devices()? {\n match devnode_to_devno(&devnode)? {\n None => continue,\n Some(devno) => {\n is_stratis_device(&devnode)?.and_then(|pool_uuid| {\n pool_map\n .entry(pool_uuid)\n .or_insert_with(HashMap::new)\n .insert(Device::from(devno), devnode)\n });\n }\n }\n }\n Ok(pool_map)\n}\n\n\/\/\/ Get the most recent metadata from a set of Devices for a given pool UUID.\n\/\/\/ Returns None if no metadata found for this pool.\n#[allow(implicit_hasher)]\npub fn get_metadata(\n pool_uuid: PoolUuid,\n devnodes: &HashMap<Device, PathBuf>,\n) -> StratisResult<Option<PoolSave>> {\n \/\/ Get pairs of device nodes and matching BDAs\n \/\/ If no BDA, or BDA UUID does not match pool UUID, skip.\n \/\/ If there is an error reading the BDA, error. There could have been\n \/\/ vital information on that BDA, for example, it may have contained\n \/\/ the newest metadata.\n let mut bdas = Vec::new();\n for devnode in devnodes.values() {\n let bda = BDA::load(&mut OpenOptions::new().read(true).open(devnode)?)?;\n if let Some(bda) = bda {\n if bda.pool_uuid() == pool_uuid {\n bdas.push((devnode, bda));\n }\n }\n }\n\n \/\/ Most recent time should never be None if this was a properly\n \/\/ created pool; this allows for the method to be called in other\n \/\/ circumstances.\n let most_recent_time = {\n match bdas.iter()\n .filter_map(|&(_, ref bda)| bda.last_update_time())\n .max()\n {\n Some(time) => time,\n None => return Ok(None),\n }\n };\n\n \/\/ Try to read from all available devnodes that could contain most\n \/\/ recent metadata. In the event of errors, continue to try until all are\n \/\/ exhausted.\n for &(devnode, ref bda) in bdas.iter()\n .filter(|&&(_, ref bda)| bda.last_update_time() == Some(most_recent_time))\n {\n let poolsave = OpenOptions::new()\n .read(true)\n .open(devnode)\n .ok()\n .and_then(|mut f| bda.load_state(&mut f).ok())\n .and_then(|opt| opt)\n .and_then(|data| serde_json::from_slice(&data).ok());\n\n if poolsave.is_some() {\n return Ok(poolsave);\n }\n }\n\n \/\/ If no data has yet returned, we have an error. That is, we should have\n \/\/ some metadata, because we have a most recent time, but we failed to\n \/\/ get any.\n let err_str = \"timestamp indicates data was written, but no data successfully read\";\n Err(StratisError::Engine(ErrorEnum::NotFound, err_str.into()))\n}\n\n\/\/\/ Get all the blockdevs corresponding to this pool that can be obtained from\n\/\/\/ the given devices.\n\/\/\/ Returns an error if a BDA can not be read or can not be found on any\n\/\/\/ blockdev in devnodes.\n\/\/\/ Returns an error if the blockdevs obtained do not match the metadata.\n\/\/\/ Returns a tuple, of which the first are the data devs, and the second\n\/\/\/ are the devs that support the cache tier.\n\/\/\/ Precondition: Every device in devnodes has already been determined to\n\/\/\/ belong to the pool with the specified pool uuid.\n#[allow(implicit_hasher)]\npub fn get_blockdevs(\n pool_uuid: PoolUuid,\n backstore_save: &BackstoreSave,\n devnodes: &HashMap<Device, PathBuf>,\n) -> StratisResult<(Vec<StratBlockDev>, Vec<StratBlockDev>)> {\n let recorded_data_map: HashMap<_, _> = backstore_save\n .data_devs\n .iter()\n .map(|bds| (bds.uuid, bds))\n .collect();\n\n let recorded_cache_map: HashMap<_, _> = match backstore_save.cache_devs {\n Some(ref cache_devs) => cache_devs.iter().map(|bds| (bds.uuid, bds)).collect(),\n None => HashMap::new(),\n };\n\n let mut segment_table = HashMap::new();\n for seg in &backstore_save.data_segments {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n if let Some(ref segs) = backstore_save.cache_segments {\n for seg in segs {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n }\n if let Some(ref segs) = backstore_save.meta_segments {\n for seg in segs {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n }\n\n let (mut datadevs, mut cachedevs) = (vec![], vec![]);\n for (device, devnode) in devnodes {\n let bda = BDA::load(&mut OpenOptions::new().read(true).open(devnode)?)?.ok_or_else(|| {\n StratisError::Engine(ErrorEnum::NotFound,\n format!(\"Device {} with devnode {} was previously determined to belong to pool with uuid {} but no BDA was found\",\n device,\n devnode.display(),\n pool_uuid))\n })?;\n\n \/\/ Return an error if apparent size of Stratis block device appears to\n \/\/ have decreased since metadata was recorded or if size of block\n \/\/ device could not be obtained.\n blkdev_size(&OpenOptions::new().read(true).open(devnode)?).and_then(|actual_size| {\n let actual_size_sectors = actual_size.sectors();\n let recorded_size = bda.dev_size();\n if actual_size_sectors < recorded_size {\n let err_msg = format!(\n \"Stratis device with device number {}, devnode {}, pool UUID {} and device UUID {} had recorded size ({}), but actual size is less at ({})\",\n device,\n devnode.display(),\n bda.pool_uuid(),\n bda.dev_uuid(),\n recorded_size,\n actual_size\n );\n Err(StratisError::Engine(ErrorEnum::Error, err_msg))\n } else {\n Ok(())\n }\n })?;\n\n let dev_uuid = bda.dev_uuid();\n\n \/\/ Locate the device in the metadata using its uuid. Return the device\n \/\/ metadata and whether it was a cache or a datadev.\n let (tier, bd_save) = recorded_data_map\n .get(&dev_uuid)\n .map(|bd_save| (BlockDevTier::Data, bd_save))\n .or_else(|| {\n recorded_cache_map\n .get(&dev_uuid)\n .map(|bd_save| (BlockDevTier::Cache, bd_save))\n })\n .ok_or_else(|| {\n let err_msg = format!(\n \"Stratis device with device number {}, devnode {}, pool UUID {} and device UUID {} had no record in pool metadata\",\n device,\n devnode.display(),\n bda.pool_uuid(),\n bda.dev_uuid()\n );\n StratisError::Engine(ErrorEnum::NotFound, err_msg)\n })?;\n\n \/\/ This should always succeed since the actual size is at\n \/\/ least the recorded size, so all segments should be\n \/\/ available to be allocated. If this fails, the most likely\n \/\/ conclusion is metadata corruption.\n let segments = segment_table.get(&dev_uuid);\n match tier {\n BlockDevTier::Data => &mut datadevs,\n BlockDevTier::Cache => &mut cachedevs,\n }.push(StratBlockDev::new(\n *device,\n devnode.to_owned(),\n bda,\n segments.unwrap_or(&vec![]),\n bd_save.user_info.clone(),\n bd_save.hardware_info.clone(),\n )?);\n }\n\n \/\/ Verify that datadevs found match datadevs recorded.\n let current_data_uuids: HashSet<_> = datadevs.iter().map(|b| b.uuid()).collect();\n let recorded_data_uuids: HashSet<_> = recorded_data_map.keys().cloned().collect();\n if current_data_uuids != recorded_data_uuids {\n let err_msg = \"Recorded data dev UUIDs != discovered datadev UUIDs\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n if datadevs.len() != current_data_uuids.len() {\n let err_msg = \"Duplicate data devices found in environment\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n \/\/ Verify that cachedevs found match cachedevs recorded.\n let current_cache_uuids: HashSet<_> = cachedevs.iter().map(|b| b.uuid()).collect();\n let recorded_cache_uuids: HashSet<_> = recorded_cache_map.keys().cloned().collect();\n if current_cache_uuids != recorded_cache_uuids {\n let err_msg = \"Recorded cache dev UUIDs != discovered cachedev UUIDs\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n if cachedevs.len() != current_cache_uuids.len() {\n let err_msg = \"Duplicate cache devices found in environment\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n Ok((datadevs, cachedevs))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make iterator lifetimes less restricting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add an undef validation test<commit_after>#![allow(unused_variables)]\n\/\/ error-pattern: attempted to read undefined bytes\n\nmod safe {\n use std::mem;\n\n pub(crate) fn make_float() -> f32 {\n unsafe { mem::uninitialized() }\n }\n}\n\nfn main() {\n let _x = safe::make_float();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>man or boy implementation<commit_after>\/\/ http:\/\/rosettacode.org\/wiki\/Man_or_boy_test\n\/\/ as originally posted by Kimundi on Reddit\n\/\/ http:\/\/www.reddit.com\/r\/rust\/comments\/2t80mw\/the_man_or_boy_test_in_rust\/\nuse std::cell::Cell;\n\nfn a(k: i32,\n x1: &Fn() -> i32,\n x2: &Fn() -> i32,\n x3: &Fn() -> i32,\n x4: &Fn() -> i32,\n x5: &Fn() -> i32) -> i32 {\n let k = Cell::new(k);\n\n let b: Cell<Option<&Fn() -> i32>> = Cell::new(None);\n let tmp = |&:| {\n k.set(k.get() - 1);\n a(k.get(), &*b.get().unwrap(), x1, x2, x3, x4)\n };\n b.set(Some(&tmp));\n\n if k.get() <= 0 { x4() + x5() } else { b.get().unwrap()() }\n}\n\n#[cfg(not(test))]\nfn main() {\n println!(\"%{}\", a(10, &|| 1, &|| -1, &|| -1, &|| 1, &|| 0));\n}\n\n#[test]\nfn result() {\n assert_eq!(a(10, &|| 1, &|| -1, &|| -1, &|| 1, &|| 0), -67)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add error module<commit_after>generate_error_module!(\n generate_error_types!(DiaryError, DiaryErrorKind,\n StoreWriteError => \"Error writing store\",\n StoreReadError => \"Error reading store\",\n CannotFindDiary => \"Cannot find diary\",\n CannotCreateNote => \"Cannot create Note object for diary entry\",\n DiaryEditError => \"Cannot edit diary entry\",\n PathConversionError => \"Error while converting paths internally\",\n EntryNotInDiary => \"Entry not in Diary\",\n IOError => \"IO Error\"\n );\n);\n\npub use self::error::DiaryError;\npub use self::error::DiaryErrorKind;\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>imag-mv: Move from error-chain to failure<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>parser: fix tests<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse target::{Target, TargetOptions, TargetResult};\n\npub fn target() -> TargetResult {\n let mut base = super::android_base::opts();\n base.features = \"+v7,+vfp3,+d16\".to_string();\n base.max_atomic_width = Some(64);\n\n Ok(Target {\n llvm_target: \"arm-linux-androideabi\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n data_layout: \"e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64\".to_string(),\n arch: \"arm\".to_string(),\n target_os: \"android\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n linker_flavor: LinkerFlavor::Gcc,\n options: TargetOptions {\n abi_blacklist: super::arm_base::abi_blacklist(),\n .. base\n },\n })\n}\n<commit_msg>Rollup merge of #41656 - malbarbo:android-armeabi, r=alexcrichton<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse target::{Target, TargetOptions, TargetResult};\n\npub fn target() -> TargetResult {\n let mut base = super::android_base::opts();\n \/\/ https:\/\/developer.android.com\/ndk\/guides\/abis.html#armeabi\n base.features = \"+v5te\".to_string();\n base.max_atomic_width = Some(64);\n\n Ok(Target {\n llvm_target: \"arm-linux-androideabi\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n data_layout: \"e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64\".to_string(),\n arch: \"arm\".to_string(),\n target_os: \"android\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n linker_flavor: LinkerFlavor::Gcc,\n options: TargetOptions {\n abi_blacklist: super::arm_base::abi_blacklist(),\n .. base\n },\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for ChanSelect<commit_after>#[macro_use] extern crate \"rust-sessions\" as rust_sessions;\n\nuse std::thread::spawn;\nuse std::borrow::ToOwned;\nuse rust_sessions::*;\n\n\/\/ recv and assert a value, then close the channel\nmacro_rules! recv_assert_eq_close(\n ($e:expr, $rx:ident.recv())\n =>\n ({\n let (c, v) = $rx.recv();\n assert_eq!($e, v);\n c.close();\n })\n);\n\n#[test]\nfn chan_select_simple() {\n let (tcs, rcs) = session_channel();\n let (tcu, rcu) = session_channel();\n\n \/\/ Spawn threads\n send_str(tcs);\n\n \/\/ The lifetime of `sel` is reduced to the point where we call\n \/\/ `wait()`. This ensures we don't hold on to Chan references, but still\n \/\/ prevents using the channels the ChanSelect holds references to.\n let index = {\n let mut sel = ChanSelect::new();\n sel.add(&rcs); \/\/ Assigned 0\n sel.add(&rcu); \/\/ Assigned 1\n sel.wait() \/\/ Destroys the ChanSelect, releases references to\n \/\/ rcs and rcu\n };\n\n assert_eq!(0, index);\n recv_assert_eq_close!(\"Hello, World!\".to_owned(), rcs.recv());\n\n let (tcs, rcs) = session_channel();\n\n send_usize(tcu);\n\n let index = {\n let mut sel = ChanSelect::new();\n sel.add(&rcs);\n sel.add(&rcu);\n sel.wait()\n };\n\n assert_eq!(1, index);\n recv_assert_eq_close!(42, rcu.recv());\n\n \/\/ Not really necessary for the test, just used to coerce the types of\n \/\/ tcs and rcs\n send_str(tcs);\n recv_assert_eq_close!(\"Hello, World!\".to_owned(), rcs.recv());\n}\n\n#[test]\nfn chan_select_add_ret() {\n enum ChanToRead {\n Str,\n Usize\n }\n\n let (tcs, rcs) = session_channel();\n let (tcu, rcu) = session_channel();\n\n \/\/ Spawn threads\n spawn(move|| send_str(tcs));\n\n \/\/ The lifetime of `sel` is reduced to the point where we call\n \/\/ `wait()`. This ensures we don't hold on to Chan references, but still\n \/\/ prevents using the channels the ChanSelect holds references to.\n let chan_to_read = {\n let mut sel = ChanSelect::new();\n sel.add_ret(&rcs, ChanToRead::Str); \/\/ Assigned 0\n sel.add_ret(&rcu, ChanToRead::Usize); \/\/ Assigned 1\n sel.wait() \/\/ Destroys the ChanSelect, releases references to\n \/\/ rcs and rcu\n };\n\n send_usize(tcu);\n\n match chan_to_read {\n ChanToRead::Str => {\n recv_assert_eq_close!(\"Hello, World!\".to_owned(), rcs.recv());\n recv_assert_eq_close!(42, rcu.recv());\n }\n ChanToRead::Usize => {\n panic!(\"Unexpected read of usize chan before str chan!\");\n }\n }\n}\n\n\/\/ Utility functions\n\nfn send_str(c: Chan<(), Send<String, Eps>>) {\n c.send(\"Hello, World!\".to_string()).close();\n}\n\nfn send_usize(c: Chan<(), Send<usize, Eps>>) {\n c.send(42).close();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added ringbuffer<commit_after>\/\/! A generic ringbuffer\n\n#![experimental]\n\nuse std::clone::Clone;\nuse std::vec::Vec;\n\nuse core::Time;\n\n\npub struct RingBuffer<T: Clone> {\n buf: Vec<T>,\n capacity: uint,\n size: uint,\n start_t: Time,\n end_t: Time,\n}\n\nimpl<T: Clone> RingBuffer<T> {\n pub fn new(capacity: uint) -> RingBuffer<T> {\n RingBuffer { \n buf: Vec::with_capacity(capacity), \n capacity: capacity,\n size: 0, \n start_t: 0, \n end_t: 0 \n }\n }\n\n pub fn get(&self, t: Time) -> Option<T> {\n if self.start_t <= t && t < self.end_t {\n Some(self.buf[(t % self.capacity as Time) as uint].clone())\n } else {\n None\n }\n }\n\n pub fn push(&mut self, data: T) {\n if self.size < self.capacity {\n self.buf.push(data);\n self.size += 1;\n self.end_t += 1;\n } else {\n self.buf[(self.end_t % self.capacity as Time) as uint] = data;\n self.start_t += 1;\n self.end_t += 1;\n }\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::RingBuffer;\n\n #[test]\n fn test_push() {\n let mut rb = RingBuffer::<int>::new(2);\n\n rb.push(13);\n assert_eq!(rb.size, 1);\n assert_eq!(rb.start_t, 0);\n assert_eq!(rb.end_t, 1);\n assert_eq!(rb.buf[0], 13);\n\n rb.push(7);\n assert_eq!(rb.size, 2);\n assert_eq!(rb.start_t, 0);\n assert_eq!(rb.end_t, 2);\n assert_eq!(rb.buf[0], 13);\n assert_eq!(rb.buf[1], 7);\n\n rb.push(3);\n assert_eq!(rb.size, 2);\n assert_eq!(rb.start_t, 1);\n assert_eq!(rb.end_t, 3);\n assert_eq!(rb.buf[0], 3);\n assert_eq!(rb.buf[1], 7);\n }\n\n #[test]\n fn test_get() {\n let mut rb: RingBuffer<int> = RingBuffer { \n buf: vec![7,13], \n capacity: 2, \n size: 2,\n start_t: 7,\n end_t: 9\n };\n\n \/\/ Test with odd start\n assert_eq!(rb.get(6), None);\n assert_eq!(rb.get(7), Some(13));\n assert_eq!(rb.get(8), Some(7));\n assert_eq!(rb.get(9), None);\n\n \/\/ Test with even start\n rb.start_t = 6; rb.end_t = 8;\n assert_eq!(rb.get(5), None);\n assert_eq!(rb.get(6), Some(7));\n assert_eq!(rb.get(7), Some(13));\n assert_eq!(rb.get(8), None);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Improved streak count algo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make code more functional by more function chaining.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary to_string() calls<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added bar def<commit_after>\/\/! Defines the operations and data definitions for a top bar program.\n\nuse std::ops::Drop;\nuse rustwlc::WlcView;\n\n#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]\npub struct Bar {\n view: WlcView\n}\n\nimpl Bar {\n pub fn new(view: WlcView) -> Self {\n Bar { view: view }\n }\n\n \/\/\/ Gets the view that is associated with the bar.\n pub fn view(&self) -> WlcView {\n self.view\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n#![comment = \"The Servo Parallel Browser Project\"]\n#![license = \"MPL\"]\n\n#![feature(globs, macro_rules, phase, thread_local, unsafe_destructor)]\n\n#![deny(unused_imports, unused_variable)]\n#![allow(unrooted_must_root)]\n\n#[phase(plugin, link)]\nextern crate log;\n\nextern crate debug;\n\nextern crate geom;\nextern crate gfx;\nextern crate layout_traits;\nextern crate script;\nextern crate script_traits;\nextern crate serialize;\nextern crate style;\n#[phase(plugin)]\nextern crate \"plugins\" as servo_plugins;\nextern crate \"net\" as servo_net;\nextern crate \"msg\" as servo_msg;\n#[phase(plugin, link)]\nextern crate \"util\" as servo_util;\n\n#[phase(plugin)]\nextern crate string_cache_macros;\nextern crate string_cache;\n\nextern crate collections;\nextern crate encoding;\nextern crate green;\nextern crate libc;\nextern crate sync;\nextern crate url;\n\n\/\/ Listed first because of macro definitions\npub mod layout_debug;\n\npub mod block;\npub mod construct;\npub mod context;\npub mod display_list_builder;\npub mod floats;\npub mod flow;\npub mod flow_list;\npub mod flow_ref;\npub mod fragment;\npub mod layout_task;\npub mod inline;\npub mod model;\npub mod parallel;\npub mod sequential;\npub mod table_wrapper;\npub mod table;\npub mod table_caption;\npub mod table_colgroup;\npub mod table_rowgroup;\npub mod table_row;\npub mod table_cell;\npub mod text;\npub mod traversal;\npub mod util;\npub mod incremental;\npub mod wrapper;\n\npub mod css {\n mod node_util;\n\n pub mod matching;\n pub mod node_style;\n}\n<commit_msg>Remove unnecessary import of libgreen<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n#![comment = \"The Servo Parallel Browser Project\"]\n#![license = \"MPL\"]\n\n#![feature(globs, macro_rules, phase, thread_local, unsafe_destructor)]\n\n#![deny(unused_imports, unused_variable)]\n#![allow(unrooted_must_root)]\n\n#[phase(plugin, link)]\nextern crate log;\n\nextern crate debug;\n\nextern crate geom;\nextern crate gfx;\nextern crate layout_traits;\nextern crate script;\nextern crate script_traits;\nextern crate serialize;\nextern crate style;\n#[phase(plugin)]\nextern crate \"plugins\" as servo_plugins;\nextern crate \"net\" as servo_net;\nextern crate \"msg\" as servo_msg;\n#[phase(plugin, link)]\nextern crate \"util\" as servo_util;\n\n#[phase(plugin)]\nextern crate string_cache_macros;\nextern crate string_cache;\n\nextern crate collections;\nextern crate encoding;\nextern crate libc;\nextern crate sync;\nextern crate url;\n\n\/\/ Listed first because of macro definitions\npub mod layout_debug;\n\npub mod block;\npub mod construct;\npub mod context;\npub mod display_list_builder;\npub mod floats;\npub mod flow;\npub mod flow_list;\npub mod flow_ref;\npub mod fragment;\npub mod layout_task;\npub mod inline;\npub mod model;\npub mod parallel;\npub mod sequential;\npub mod table_wrapper;\npub mod table;\npub mod table_caption;\npub mod table_colgroup;\npub mod table_rowgroup;\npub mod table_row;\npub mod table_cell;\npub mod text;\npub mod traversal;\npub mod util;\npub mod incremental;\npub mod wrapper;\n\npub mod css {\n mod node_util;\n\n pub mod matching;\n pub mod node_style;\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore: shortcut for propagating errors: ?<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Simplify re-export in library module.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>all references in static variable have 'static lifetime<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #27500 - michaelwoerister:bring-gdb-pp-tests-back, r=alexcrichton<commit_after>\/\/ Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-windows failing on win32 bot\n\/\/ ignore-freebsd: output doesn't match\n\/\/ ignore-tidy-linelength\n\/\/ ignore-lldb\n\/\/ ignore-android: FIXME(#10381)\n\/\/ compile-flags:-g\n\n\/\/ This test uses some GDB Python API features (e.g. accessing anonymous fields)\n\/\/ which are only available in newer GDB version. The following directive will\n\/\/ case the test runner to ignore this test if an older GDB version is used:\n\/\/ min-gdb-version 7.7\n\n\/\/ gdb-command: run\n\n\/\/ gdb-command: print regular_struct\n\/\/ gdb-check:$1 = RegularStruct = {the_first_field = 101, the_second_field = 102.5, the_third_field = false, the_fourth_field = \"I'm so pretty, oh so pretty...\"}\n\n\/\/ gdb-command: print tuple\n\/\/ gdb-check:$2 = {true, 103, \"blub\"}\n\n\/\/ gdb-command: print tuple_struct\n\/\/ gdb-check:$3 = TupleStruct = {-104.5, 105}\n\n\/\/ gdb-command: print empty_struct\n\/\/ gdb-check:$4 = EmptyStruct\n\n\/\/ gdb-command: print c_style_enum1\n\/\/ gdb-check:$5 = CStyleEnumVar1\n\n\/\/ gdb-command: print c_style_enum2\n\/\/ gdb-check:$6 = CStyleEnumVar2\n\n\/\/ gdb-command: print c_style_enum3\n\/\/ gdb-check:$7 = CStyleEnumVar3\n\n\/\/ gdb-command: print mixed_enum_c_style_var\n\/\/ gdb-check:$8 = MixedEnumCStyleVar\n\n\/\/ gdb-command: print mixed_enum_tuple_var\n\/\/ gdb-check:$9 = MixedEnumTupleVar = {106, 107, false}\n\n\/\/ gdb-command: print mixed_enum_struct_var\n\/\/ gdb-check:$10 = MixedEnumStructVar = {field1 = 108.5, field2 = 109}\n\n\/\/ gdb-command: print some\n\/\/ gdb-check:$11 = Some = {110}\n\n\/\/ gdb-command: print none\n\/\/ gdb-check:$12 = None\n\n\/\/ gdb-command: print some_fat\n\/\/ gdb-check:$13 = Some = {\"abc\"}\n\n\/\/ gdb-command: print none_fat\n\/\/ gdb-check:$14 = None\n\n\/\/ gdb-command: print nested_variant1\n\/\/ gdb-check:$15 = NestedVariant1 = {NestedStruct = {regular_struct = RegularStruct = {the_first_field = 111, the_second_field = 112.5, the_third_field = true, the_fourth_field = \"NestedStructString1\"}, tuple_struct = TupleStruct = {113.5, 114}, empty_struct = EmptyStruct, c_style_enum = CStyleEnumVar2, mixed_enum = MixedEnumTupleVar = {115, 116, false}}}\n\n\/\/ gdb-command: print nested_variant2\n\/\/ gdb-check:$16 = NestedVariant2 = {abc = NestedStruct = {regular_struct = RegularStruct = {the_first_field = 117, the_second_field = 118.5, the_third_field = false, the_fourth_field = \"NestedStructString10\"}, tuple_struct = TupleStruct = {119.5, 120}, empty_struct = EmptyStruct, c_style_enum = CStyleEnumVar3, mixed_enum = MixedEnumStructVar = {field1 = 121.5, field2 = -122}}}\n\n\/\/ gdb-command: print none_check1\n\/\/ gdb-check:$17 = None\n\n\/\/ gdb-command: print none_check2\n\/\/ gdb-check:$18 = None\n\n#![allow(dead_code, unused_variables)]\n\nuse self::CStyleEnum::{CStyleEnumVar1, CStyleEnumVar2, CStyleEnumVar3};\nuse self::MixedEnum::{MixedEnumCStyleVar, MixedEnumTupleVar, MixedEnumStructVar};\nuse self::NestedEnum::{NestedVariant1, NestedVariant2};\n\nstruct RegularStruct {\n the_first_field: isize,\n the_second_field: f64,\n the_third_field: bool,\n the_fourth_field: &'static str,\n}\n\nstruct TupleStruct(f64, i16);\n\nstruct EmptyStruct;\n\nenum CStyleEnum {\n CStyleEnumVar1,\n CStyleEnumVar2,\n CStyleEnumVar3,\n}\n\nenum MixedEnum {\n MixedEnumCStyleVar,\n MixedEnumTupleVar(u32, u16, bool),\n MixedEnumStructVar { field1: f64, field2: i32 }\n}\n\nstruct NestedStruct {\n regular_struct: RegularStruct,\n tuple_struct: TupleStruct,\n empty_struct: EmptyStruct,\n c_style_enum: CStyleEnum,\n mixed_enum: MixedEnum,\n}\n\nenum NestedEnum {\n NestedVariant1(NestedStruct),\n NestedVariant2 { abc: NestedStruct }\n}\n\nfn main() {\n\n let regular_struct = RegularStruct {\n the_first_field: 101,\n the_second_field: 102.5,\n the_third_field: false,\n the_fourth_field: \"I'm so pretty, oh so pretty...\"\n };\n\n let tuple = ( true, 103u32, \"blub\" );\n\n let tuple_struct = TupleStruct(-104.5, 105);\n\n let empty_struct = EmptyStruct;\n\n let c_style_enum1 = CStyleEnumVar1;\n let c_style_enum2 = CStyleEnumVar2;\n let c_style_enum3 = CStyleEnumVar3;\n\n let mixed_enum_c_style_var = MixedEnumCStyleVar;\n let mixed_enum_tuple_var = MixedEnumTupleVar(106, 107, false);\n let mixed_enum_struct_var = MixedEnumStructVar { field1: 108.5, field2: 109 };\n\n let some = Some(110_usize);\n let none: Option<isize> = None;\n let some_fat = Some(\"abc\");\n let none_fat: Option<&'static str> = None;\n\n let nested_variant1 = NestedVariant1(\n NestedStruct {\n regular_struct: RegularStruct {\n the_first_field: 111,\n the_second_field: 112.5,\n the_third_field: true,\n the_fourth_field: \"NestedStructString1\",\n },\n tuple_struct: TupleStruct(113.5, 114),\n empty_struct: EmptyStruct,\n c_style_enum: CStyleEnumVar2,\n mixed_enum: MixedEnumTupleVar(115, 116, false)\n }\n );\n\n let nested_variant2 = NestedVariant2 {\n abc: NestedStruct {\n regular_struct: RegularStruct {\n the_first_field: 117,\n the_second_field: 118.5,\n the_third_field: false,\n the_fourth_field: \"NestedStructString10\",\n },\n tuple_struct: TupleStruct(119.5, 120),\n empty_struct: EmptyStruct,\n c_style_enum: CStyleEnumVar3,\n mixed_enum: MixedEnumStructVar {\n field1: 121.5,\n field2: -122\n }\n }\n };\n\n let none_check1: Option<(usize, Vec<usize>)> = None;\n let none_check2: Option<String> = None;\n\n zzz(); \/\/ #break\n}\n\nfn zzz() { () }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unused constant<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>refactor: Resolve App::usage deprecation<commit_after><|endoftext|>"} {"text":"<commit_before>use std::cmp::max;\nuse std::collections::VecDeque;\nuse std::mem::size_of;\nuse std::{ptr, slice};\n\nuse super::{Color, Display, Event, Font, Image, ImageRoi};\n\nuse system::error::{Error, Result, EINVAL};\n\npub struct Window {\n pub x: i32,\n pub y: i32,\n image: Image,\n title: String,\n events: VecDeque<Event>,\n}\n\nimpl Window {\n pub fn new(x: i32, y: i32, w: i32, h: i32, title: String) -> Window {\n Window {\n x: x,\n y: y,\n image: Image::new(w, h),\n title: title,\n events: VecDeque::new()\n }\n }\n\n pub fn width(&self) -> i32 {\n self.image.width()\n }\n\n pub fn height(&self) -> i32 {\n self.image.height()\n }\n\n pub fn as_roi(&mut self) -> ImageRoi {\n self.image.as_roi()\n }\n\n pub fn roi(&mut self, x: i32, y: i32, w: i32, h: i32) -> ImageRoi {\n self.image.roi(x, y, w, h)\n }\n\n pub fn contains(&self, x: i32, y: i32) -> bool {\n x >= self.x && y >= self.y && x < self.x + self.width() && y < self.y + self.height()\n }\n\n pub fn title_contains(&self, x: i32, y: i32) -> bool {\n ! self.title.is_empty() && x >= self.x && y >= self.y - 18 && x < self.x + self.width() && y < self.y\n }\n\n pub fn exit_contains(&self, x: i32, y: i32) -> bool {\n ! self.title.is_empty() && x >= max(self.x, self.x + self.width() - 8) && y >= self.y - 18 && x < self.x + self.width() && y < self.y\n }\n\n pub fn draw(&mut self, display: &mut Display, focused: bool) {\n if ! self.title.is_empty() {\n if focused {\n display.roi(self.x, self.y - 18, self.width(), 18).set(Color::rgba(192, 192, 192, 224));\n } else {\n display.roi(self.x, self.y - 18, self.width(), 18).set(Color::rgba(64, 64, 64, 224));\n }\n\n let mut x = self.x + 2;\n for c in self.title.chars() {\n if x + 10 <= self.x + self.width() - 10 {\n display.roi(x, self.y - 17, 8, 16).blend(&Font::render(c, Color::rgb(255, 255, 255)).as_roi());\n } else {\n break;\n }\n x += 8;\n }\n\n x = max(self.x + 2, self.x + self.width() - 10);\n if x + 10 <= self.x + self.width() {\n display.roi(x, self.y - 17, 8, 16).blend(&Font::render('X', Color::rgb(255, 255, 255)).as_roi());\n }\n }\n let mut display_roi = display.roi(self.x, self.y, self.width(), self.height());\n display_roi.blend(&self.as_roi());\n }\n\n pub fn event(&mut self, event: Event) {\n self.events.push_back(event);\n }\n\n pub fn read(&mut self, buf: &mut [u8]) -> Result {\n if buf.len() >= size_of::<Event>() {\n let mut i = 0;\n while i <= buf.len() - size_of::<Event>() {\n if let Some(event) = self.events.pop_front() {\n unsafe { ptr::write(buf.as_mut_ptr().offset(i as isize) as *mut Event, event) };\n i += size_of::<Event>();\n } else {\n break;\n }\n }\n Ok(i)\n } else {\n Err(Error::new(EINVAL))\n }\n }\n\n pub fn write(&mut self, buf: &[u8]) -> Result {\n let old = self.image.data_mut();\n let new = unsafe { slice::from_raw_parts(buf.as_ptr() as *const Color, buf.len() \/ size_of::<Color>()) };\n\n let mut i = 0;\n while i < old.len() && i < new.len() {\n old[i] = new[i];\n i += 1;\n }\n\n Ok(i * size_of::<Color>())\n }\n}\n<commit_msg>Fix margins<commit_after>use std::cmp::max;\nuse std::collections::VecDeque;\nuse std::mem::size_of;\nuse std::{ptr, slice};\n\nuse super::{Color, Display, Event, Font, Image, ImageRoi};\n\nuse system::error::{Error, Result, EINVAL};\n\npub struct Window {\n pub x: i32,\n pub y: i32,\n image: Image,\n title: String,\n events: VecDeque<Event>,\n}\n\nimpl Window {\n pub fn new(x: i32, y: i32, w: i32, h: i32, title: String) -> Window {\n Window {\n x: x,\n y: y,\n image: Image::new(w, h),\n title: title,\n events: VecDeque::new()\n }\n }\n\n pub fn width(&self) -> i32 {\n self.image.width()\n }\n\n pub fn height(&self) -> i32 {\n self.image.height()\n }\n\n pub fn as_roi(&mut self) -> ImageRoi {\n self.image.as_roi()\n }\n\n pub fn roi(&mut self, x: i32, y: i32, w: i32, h: i32) -> ImageRoi {\n self.image.roi(x, y, w, h)\n }\n\n pub fn contains(&self, x: i32, y: i32) -> bool {\n x >= self.x && y >= self.y && x < self.x + self.width() && y < self.y + self.height()\n }\n\n pub fn title_contains(&self, x: i32, y: i32) -> bool {\n ! self.title.is_empty() && x >= self.x && y >= self.y - 18 && x < self.x + self.width() && y < self.y\n }\n\n pub fn exit_contains(&self, x: i32, y: i32) -> bool {\n ! self.title.is_empty() && x >= max(self.x, self.x + self.width() - 10) && y >= self.y - 18 && x < self.x + self.width() && y < self.y\n }\n\n pub fn draw(&mut self, display: &mut Display, focused: bool) {\n if ! self.title.is_empty() {\n if focused {\n display.roi(self.x, self.y - 18, self.width(), 18).set(Color::rgba(192, 192, 192, 224));\n } else {\n display.roi(self.x, self.y - 18, self.width(), 18).set(Color::rgba(64, 64, 64, 224));\n }\n\n let mut x = self.x + 2;\n for c in self.title.chars() {\n if x + 8 <= self.x + self.width() - 10 {\n display.roi(x, self.y - 17, 8, 16).blend(&Font::render(c, Color::rgb(255, 255, 255)).as_roi());\n } else {\n break;\n }\n x += 8;\n }\n\n x = max(self.x + 2, self.x + self.width() - 10);\n if x + 10 <= self.x + self.width() {\n display.roi(x, self.y - 17, 8, 16).blend(&Font::render('X', Color::rgb(255, 255, 255)).as_roi());\n }\n }\n let mut display_roi = display.roi(self.x, self.y, self.width(), self.height());\n display_roi.blend(&self.as_roi());\n }\n\n pub fn event(&mut self, event: Event) {\n self.events.push_back(event);\n }\n\n pub fn read(&mut self, buf: &mut [u8]) -> Result {\n if buf.len() >= size_of::<Event>() {\n let mut i = 0;\n while i <= buf.len() - size_of::<Event>() {\n if let Some(event) = self.events.pop_front() {\n unsafe { ptr::write(buf.as_mut_ptr().offset(i as isize) as *mut Event, event) };\n i += size_of::<Event>();\n } else {\n break;\n }\n }\n Ok(i)\n } else {\n Err(Error::new(EINVAL))\n }\n }\n\n pub fn write(&mut self, buf: &[u8]) -> Result {\n let old = self.image.data_mut();\n let new = unsafe { slice::from_raw_parts(buf.as_ptr() as *const Color, buf.len() \/ size_of::<Color>()) };\n\n let mut i = 0;\n while i < old.len() && i < new.len() {\n old[i] = new[i];\n i += 1;\n }\n\n Ok(i * size_of::<Color>())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>CIFF: implement basic parser same as TIFF<commit_after>use std::collections::HashMap;\nuse decoders::basics::*;\nuse decoders::Buffer;\nuse std::str;\n\nenum_from_primitive! {\n#[derive(Debug, Copy, Clone, PartialEq)]\npub enum CiffTag {\n Null = 0x0000,\n MakeModel = 0x080a,\n ShotInfo = 0x102a,\n WhiteBalance = 0x10a9,\n SensorInfo = 0x1031,\n ImageInfo = 0x1810,\n DecoderTable = 0x1835,\n RawData = 0x2005,\n SubIFD = 0x300a,\n Exif = 0x300b,\n}\n}\n\nfn ct (tag: CiffTag) -> u16 {\n tag as u16\n}\n\n#[derive(Debug, Copy, Clone)]\npub struct CiffEntry<'a> {\n pub tag: u16,\n pub typ: u16,\n pub count: usize,\n pub bytesize: usize,\n pub data_offset: usize,\n pub data: &'a [u8],\n}\n\n#[derive(Debug, Clone)]\npub struct CiffIFD<'a> {\n entries: HashMap<u16,CiffEntry<'a>>,\n subifds: Vec<CiffIFD<'a>>,\n}\n\npub fn is_ciff(buf: &[u8]) -> bool {\n buf[6..14] == b\"HEAPCCDR\"[..]\n}\n\nimpl<'a> CiffIFD<'a> {\n pub fn new_file(buf: &'a Buffer) -> Result<CiffIFD<'a>,String> {\n let data = &buf.buf;\n CiffIFD::new(data, data[2] as usize, buf.size, 1)\n }\n\n pub fn new(buf: &'a[u8], start: usize, end: usize, depth: u32) -> Result<CiffIFD<'a>, String> {\n let mut entries = HashMap::new();\n let mut subifds = Vec::new();\n\n let valuedata_size = LEu32(buf, end-4) as usize;\n let dircount = LEu16(buf, start+valuedata_size) as usize;\n\n for i in 0..dircount {\n let entry_offset: usize = start+valuedata_size+2+i*10;\n let e = try!(CiffEntry::new(buf, start, entry_offset));\n if e.typ == 0x2800 || e.typ == 0x3000 { \/\/ SubIFDs\n if depth < 10 { \/\/ Avoid infinite looping IFDs\n let ifd = CiffIFD::new(buf, e.data_offset, e.data_offset+e.bytesize, depth+1);\n match ifd {\n Ok(val) => {subifds.push(val);},\n Err(_) => {entries.insert(e.tag, e);}, \/\/ Ignore unparsable IFDs\n }\n }\n } else {\n entries.insert(e.tag, e);\n }\n }\n\n Ok(CiffIFD {\n entries: entries,\n subifds: subifds,\n })\n }\n\n pub fn find_entry(&self, tag: CiffTag) -> Option<&CiffEntry> {\n if self.entries.contains_key(&ct(tag)) {\n self.entries.get(&ct(tag))\n } else {\n for ifd in &self.subifds {\n match ifd.find_entry(tag) {\n Some(x) => return Some(x),\n None => {},\n }\n }\n None\n }\n }\n}\n\nimpl<'a> CiffEntry<'a> {\n pub fn new(buf: &'a[u8], value_data: usize, offset: usize) -> Result<CiffEntry<'a>, String> {\n let p = LEu16(buf, offset);\n let tag = p & 0x3fff;\n let datalocation = (p & 0xc000) as usize;\n let typ = p & 0x3800;\n\n let (bytesize, data_offset) = match datalocation {\n \/\/ Data is offset in value_data\n 0x0000 => (LEu32(buf, offset+2) as usize, LEu32(buf, offset+6) as usize + value_data),\n \/\/ Data is stored directly in entry\n 0x4000 => (8, offset+2),\n val => return Err(format!(\"CIFF: Don't know about data location {:x}\", val).to_string()),\n };\n let data = &buf[data_offset..data_offset+bytesize];\n let count = bytesize >> CiffEntry::element_shift(typ);\n\n Ok(CiffEntry {\n tag: tag,\n typ: typ,\n count: count,\n bytesize: bytesize,\n data_offset: data_offset,\n data: data,\n })\n }\n\n pub fn element_shift(typ: u16) -> usize {\n match typ {\n \/\/ Byte and ASCII\n 0x0000 | 0x8000 => 0,\n \/\/ Short\n 0x1000 => 1,\n \/\/ Long, Mix, Sub1 and Sub2\n 0x1800 | 0x2000 | 0x2800 | 0x3000 => 2,\n \/\/ Default to 0\n _ => 0,\n }\n }\n\n pub fn get_strings(&self) -> Vec<String> {\n String::from_utf8_lossy(self.data).split_terminator(\"\\0\").map(|x| x.to_string()).collect()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add refine.rs<commit_after>\nuse rand;\nuse generate::Generator;\nuse truth_value::TruthValue;\nuse program::Program;\nuse fact_table::FactTable;\nuse optimize::compute_adjustments;\nuse bottom_up::evaluate_bottom_up;\nuse name_table::NameTable;\nuse std::mem::swap;\n\npub struct Refiner<R, T> where R: rand::Rng, T: TruthValue {\n generator: Generator<R>,\n base_facts: FactTable<T>,\n program: Program<T>,\n samples: Vec<(FactTable<T>, FactTable<T>)>,\n gradient_iterations: usize,\n learning_rate: f64,\n max_new_body_len: usize,\n max_new_predicate_terms: usize,\n step_iterations: usize,\n clause_weight_cutoff_coeff: f64,\n num_clauses_to_add: usize,\n default_clause_weight: T::Dual,\n}\n\nimpl<R, T> Refiner<R, T> where R: rand::Rng, T: TruthValue {\n pub fn new(rng: R, facts: FactTable<T>, program: Program<T>, samples: Vec<(FactTable<T>, FactTable<T>)>) -> Self {\n let mut default_clause_weight = T::dual_zero();\n T::dual_adjust(&mut default_clause_weight, &T::dual_default(), 0.5);\n Refiner {\n generator: Generator::new(rng, &facts, &program),\n base_facts: facts,\n program: program,\n samples: samples,\n gradient_iterations: 10,\n learning_rate: 0.5,\n max_new_body_len: 8,\n max_new_predicate_terms: 8,\n step_iterations: 100,\n clause_weight_cutoff_coeff: 1.0,\n num_clauses_to_add: 5,\n default_clause_weight: default_clause_weight,\n }\n }\n\n pub fn fit_weights(&mut self) {\n for _ in 0..self.gradient_iterations {\n let res = compute_adjustments(&self.program, &self.base_facts, &self.samples, self.step_iterations);\n for (clause_idx, adjustment) in res.clause_adjustments.iter().enumerate() {\n T::dual_adjust(&mut self.program.clause_weights[clause_idx], adjustment, self.learning_rate);\n }\n }\n }\n\n pub fn add_clauses(&mut self) {\n let mut result_facts = self.base_facts.clone();\n evaluate_bottom_up(&mut result_facts, &self.program);\n self.generator.update_max_constant(&result_facts);\n self.generator.update_num_terms(&self.program);\n for _ in 0..self.num_clauses_to_add {\n self.program.push_clause(self.generator.gen_clause(self.max_new_body_len,\n self.max_new_predicate_terms),\n self.default_clause_weight.clone(),\n NameTable::new()).unwrap();\n }\n }\n\n pub fn reduce_clauses(&mut self) {\n let num_weights = self.program.clause_weights.len();\n let amount_per_weight = 1f64 \/ num_weights as f64;\n let mut mean_weight = T::dual_zero();\n for weight in self.program.clause_weights.iter() {\n T::dual_adjust(&mut mean_weight, weight, amount_per_weight);\n }\n let mut program = Program::new();\n swap(&mut program, &mut self.program);\n self.program.predicate_names = program.predicate_names.clone();\n let clauses = program.clauses;\n let weights = program.clause_weights;\n for (clause_idx, (clause, weight)) in clauses.into_iter().zip(weights).enumerate() {\n if T::dual_less(&mean_weight, &weight, self.clause_weight_cutoff_coeff) {\n let clause_var_names = program.clause_variable_names.remove(&clause_idx).unwrap_or_else(|| NameTable::new());\n self.program.push_clause(clause, weight, clause_var_names).unwrap();\n }\n }\n for &(ref input, ref output) in &self.samples {\n self.program.check_num_fact_terms(input).unwrap();\n self.program.check_num_fact_terms(output).unwrap();\n }\n self.program.check_num_fact_terms(&self.base_facts).unwrap();\n }\n\n pub fn iterate(&mut self, iterations: usize) {\n for _ in 0..iterations {\n self.add_clauses();\n self.fit_weights();\n self.reduce_clauses();\n }\n }\n\n #[cfg(test)]\n pub fn get_program(&self) -> &Program<T> {\n &self.program\n }\n\n pub fn to_program(self) -> Program<T> {\n self.program\n }\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::{Refiner};\n use parser::{program};\n use rand::XorShiftRng;\n use rand::SeedableRng;\n use truth_value::MaxFloat64;\n\n #[test]\n fn can_refine_single_clause() {\n let rng = XorShiftRng::from_seed([0xde, 0xad, 0xbe, 0xef]);\n let (facts, program, samples) = program::<MaxFloat64>(r#\"\n types(0) :- a(0), b(0)\n sample\n b(1)\n output\n a(1).\n \"#).unwrap().0;\n let mut refiner = Refiner::new(rng, facts, program, samples);\n refiner.iterate(10);\n println!(\"program = {}\", refiner.get_program());\n }\n\n #[test]\n fn can_refine_conjunction() {\n let rng = XorShiftRng::from_seed([0xde, 0xad, 0xbe, 0xef]);\n let (facts, program, samples) = program::<MaxFloat64>(r#\"\n types(0) :- a(0), b(0), c(0)\n sample\n b(1),\n c(1)\n output\n a(1).\n sample\n b(2),\n c(2)\n output\n a(2).\n sample\n b(3),\n c(3)\n output\n a(3).\n sample\n b(2),\n c(3)\n output\n types(0).\n \"#).unwrap().0;\n let mut refiner = Refiner::new(rng, facts, program, samples);\n refiner.iterate(1);\n println!(\"program = {}\", refiner.get_program());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>style<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #30465 - androm3da:master, r=alexcrichton<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Don't draw title in new tabbed\/stacked containers<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[No-auto] lib\/entry\/category: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before>use std::convert::Into;\ngenerate_error_imports!();\n\ngenerate_custom_error_types!(HookError, HookErrorKind, CustomData,\n HookExecutionError => \"Hook exec error\",\n AccessTypeViolation => \"Hook access type violation\"\n);\n\n#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Copy)]\npub struct CustomData {\n aborting: bool,\n}\n\nimpl HookError {\n\n pub fn is_aborting(&self) -> bool {\n match self.custom_data {\n Some(b) => b.aborting,\n None => true\n }\n }\n\n}\n<commit_msg>Add error kind for denied hooks<commit_after>use std::convert::Into;\ngenerate_error_imports!();\n\ngenerate_custom_error_types!(HookError, HookErrorKind, CustomData,\n HookExecutionError => \"Hook exec error\",\n AccessTypeViolation => \"Hook access type violation\",\n MutableHooksNotAllowed => \"Mutable Hooks are denied\"\n);\n\n#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Copy)]\npub struct CustomData {\n aborting: bool,\n}\n\nimpl HookError {\n\n pub fn is_aborting(&self) -> bool {\n match self.custom_data {\n Some(b) => b.aborting,\n None => true\n }\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #24863 - asajeffrey:gstplugin-resizing, r=ferjm<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test(safety): add safety issue test<commit_after>#[macro_use]\nextern crate toml_edit;\n\nuse toml_edit::{Document, Key};\nuse std::mem;\n\nmacro_rules! parse_doc {\n ($toml:ident) => (\n {\n let doc = Document::parse($toml);\n assert!(doc.is_ok());\n doc.unwrap()\n }\n );\n}\n\n\nmacro_rules! as_table {\n ($entry:ident) => (\n {\n assert!($entry.is_table());\n $entry.as_table_mut().unwrap()\n }\n );\n}\n\n#[test]\nfn test_safety_issue() {\n\n let toml1 = r#\"\n[a]\nb = 2\n[a.c]\nb = 3\n\"#;\n let toml2 = r#\"\n[b]\na = 2\n[b.c]\na = 3\n\"#;\n let mut doc1 = parse_doc!(toml1);\n let mut doc2 = parse_doc!(toml2);\n\n {\n let mut r1 = doc1.root_mut();\n let mut r2 = doc2.root_mut();\n {\n let mut a = r1.entry(\"a\");\n let mut a = as_table!(a);\n let mut ac = a.entry(\"c\");\n let mut ac = as_table!(ac);\n ac.append_table(parse_key!(\"ac\"));\n\n let mut b = r2.entry(\"b\");\n let mut b = as_table!(b);\n let mut bc = b.entry(\"c\");\n let mut bc = as_table!(bc);\n bc.append_table(parse_key!(\"bc\"));\n\n mem::swap(ac, bc); \/\/ now both documents are invalid (duplicate keys) :(\n }\n\n \/\/ what's even worse,\n \/\/ `ac` is now pointing to `[b.c]`\n let mut a = r1.entry(\"a\");\n let mut a = as_table!(a);\n let mut ac = a.entry(\"c\");\n let mut ac = as_table!(ac);\n ac.append_table(parse_key!(\"'i am in [b.c]'\"));\n\n \/\/ same for `bc`\n let mut b = r2.entry(\"b\");\n let mut b = as_table!(b);\n let mut bc = b.entry(\"c\");\n let mut bc = as_table!(bc);\n bc.append_table(parse_key!(\"'i am in [a.c]'\"));\n }\n\n assert_eq!(\n doc1.to_string(),\n r#\"\n[a]\nb = 2\n[b.c]\na = 3\n\n[b.c.bc]\n\n[b.c.'i am in [b.c]']\n\"#\n );\n assert_eq!(\n doc2.to_string(),\n r#\"\n[b]\na = 2\n[a.c]\nb = 3\n\n[a.c.ac]\n\n[a.c.'i am in [a.c]']\n\"#\n );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Check we can bind before we spawn a thread.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\npub const DBUS_TIMEOUT: i32 = 20000; \/\/ millieconds\n\npub const STRATIS_VERSION: &'static str = \"1\";\npub const MANAGER_NAME: &'static str = \"\/Manager\";\npub const STRATIS_BASE_PATH: &'static str = \"\/org\/storage\/stratis1\";\npub const STRATIS_BASE_SERVICE: &'static str = \"org.storage.stratis1\";\npub const STRATIS_BASE_MANAGER: &'static str = \"\/org\/storage\/stratis1\/Manager\";\npub const STRATIS_MANAGER_INTERFACE: &'static str = \"org.storage.stratis1.Manager\";\npub const STRATIS_POOL_BASE_INTERFACE: &'static str = \"org.storage.stratis1.pool\";\npub const STRATIS_VOLUME_BASE_INTERFACE: &'static str = \"org.storage.stratis1.volume\";\npub const STRATIS_DEV_BASE_INTERFACE: &'static str = \"org.storage.stratis1.dev\";\npub const STRATIS_CACHE_BASE_INTERFACE: &'static str = \"org.storage.stratis1.cache\";\npub const STRATIS_POOL_BASE_PATH: &'static str = \"\/org\/storage\/stratis\/pool\";\n\n\npub const LIST_POOLS: &'static str = \"ListPools\";\npub const CREATE_POOL: &'static str = \"CreatePool\";\npub const DESTROY_POOL: &'static str = \"DestroyPool\";\npub const GET_POOL_OBJECT_PATH: &'static str = \"GetPoolObjectPath\";\npub const GET_VOLUME_OBJECT_PATH: &'static str = \"GetVolumeObjectPath\";\npub const GET_DEV_OBJECT_PATH: &'static str = \"GetDevObjectPath\";\npub const GET_CACHE_OBJECT_PATH: &'static str = \"GetCacheObjectPath\";\npub const GET_ERROR_CODES: &'static str = \"GetErrorCodes\";\npub const GET_RAID_LEVELS: &'static str = \"GetRaidLevels\";\npub const GET_DEV_TYPES: &'static str = \"GetDevTypes\";\n\npub trait HasCodes {\n \/\/\/ Indicates that this enum can be converted to an int or described\n \/\/\/ with a string.\n fn get_error_int(&self) -> u16;\n fn get_error_string(&self) -> &str;\n}\n\ncustom_derive! {\n #[derive(Copy, Clone, EnumDisplay,\n IterVariants(StratisDBusErrorVariants),\n IterVariantNames(StratisDBusErrorVariantNames))]\n #[allow(non_camel_case_types)]\n pub enum StratisErrorEnum {\n STRATIS_OK,\n STRATIS_ERROR,\n STRATIS_NULL,\n STRATIS_NOTFOUND,\n STRATIS_POOL_NOTFOUND,\n STRATIS_VOLUME_NOTFOUND,\n STRATIS_DEV_NOTFOUND,\n STRATIS_CACHE_NOTFOUND,\n STRATIS_BAD_PARAM,\n STRATIS_ALREADY_EXISTS,\n STRATIS_NULL_NAME,\n STRATIS_NO_POOLS,\n STRATIS_LIST_FAILURE,\n }\n}\n\nimpl HasCodes for StratisErrorEnum {\n fn get_error_int(&self) -> u16 {\n *self as u16\n }\n\n fn get_error_string(&self) -> &str {\n match *self {\n \/\/ TODO deal with internationalization\/do this better\n StratisErrorEnum::STRATIS_OK => \"Ok\",\n StratisErrorEnum::STRATIS_ERROR => \"A general error happened\",\n StratisErrorEnum::STRATIS_NULL => \"Null parameter was supplied\",\n StratisErrorEnum::STRATIS_NOTFOUND => \"Not found\",\n StratisErrorEnum::STRATIS_POOL_NOTFOUND => \"Pool not found\",\n StratisErrorEnum::STRATIS_VOLUME_NOTFOUND => \"Volume not found\",\n StratisErrorEnum::STRATIS_CACHE_NOTFOUND => \"Cache not found\",\n StratisErrorEnum::STRATIS_BAD_PARAM => \"Bad parameter\",\n StratisErrorEnum::STRATIS_DEV_NOTFOUND => \"Dev not found\",\n StratisErrorEnum::STRATIS_ALREADY_EXISTS => \"Already exists\",\n StratisErrorEnum::STRATIS_NULL_NAME => \"Null name supplied\",\n StratisErrorEnum::STRATIS_NO_POOLS => \"No pools\",\n StratisErrorEnum::STRATIS_LIST_FAILURE => \"List operation failure.\",\n }\n }\n}\n\ncustom_derive! {\n #[derive(Copy, Clone, EnumDisplay,\n IterVariants(StratisDBusRaidTypeVariants),\n IterVariantNames(StratisDBusRaidTypeVariantNames))]\n #[allow(non_camel_case_types)]\n pub enum StratisRaidType {\n STRATIS_RAID_TYPE_UNKNOWN,\n \/** Single *\/\n STRATIS_RAID_TYPE_SINGLE,\n \/** Mirror between two disks. For 4 disks or more, they are RAID10.*\/\n STRATIS_RAID_TYPE_RAID1,\n \/** Block-level striping with distributed parity *\/\n STRATIS_RAID_TYPE_RAID5,\n \/** Block-level striping with two distributed parities, aka, RAID-DP *\/\n STRATIS_RAID_TYPE_RAID6,\n }\n}\n\nimpl HasCodes for StratisRaidType {\n fn get_error_int(&self) -> u16 {\n *self as u16\n }\n\n fn get_error_string(&self) -> &str {\n match *self {\n StratisRaidType::STRATIS_RAID_TYPE_UNKNOWN => \"Ok\",\n StratisRaidType::STRATIS_RAID_TYPE_SINGLE => \"Single\",\n StratisRaidType::STRATIS_RAID_TYPE_RAID1 => \"Mirrored\",\n StratisRaidType::STRATIS_RAID_TYPE_RAID5 => {\n \"Block-level striping with distributed parity\"\n }\n StratisRaidType::STRATIS_RAID_TYPE_RAID6 => {\n \"Block-level striping with two distributed parities\"\n }\n }\n }\n}\n<commit_msg>Remove comments from RaidType enum<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\npub const DBUS_TIMEOUT: i32 = 20000; \/\/ millieconds\n\npub const STRATIS_VERSION: &'static str = \"1\";\npub const MANAGER_NAME: &'static str = \"\/Manager\";\npub const STRATIS_BASE_PATH: &'static str = \"\/org\/storage\/stratis1\";\npub const STRATIS_BASE_SERVICE: &'static str = \"org.storage.stratis1\";\npub const STRATIS_BASE_MANAGER: &'static str = \"\/org\/storage\/stratis1\/Manager\";\npub const STRATIS_MANAGER_INTERFACE: &'static str = \"org.storage.stratis1.Manager\";\npub const STRATIS_POOL_BASE_INTERFACE: &'static str = \"org.storage.stratis1.pool\";\npub const STRATIS_VOLUME_BASE_INTERFACE: &'static str = \"org.storage.stratis1.volume\";\npub const STRATIS_DEV_BASE_INTERFACE: &'static str = \"org.storage.stratis1.dev\";\npub const STRATIS_CACHE_BASE_INTERFACE: &'static str = \"org.storage.stratis1.cache\";\npub const STRATIS_POOL_BASE_PATH: &'static str = \"\/org\/storage\/stratis\/pool\";\n\n\npub const LIST_POOLS: &'static str = \"ListPools\";\npub const CREATE_POOL: &'static str = \"CreatePool\";\npub const DESTROY_POOL: &'static str = \"DestroyPool\";\npub const GET_POOL_OBJECT_PATH: &'static str = \"GetPoolObjectPath\";\npub const GET_VOLUME_OBJECT_PATH: &'static str = \"GetVolumeObjectPath\";\npub const GET_DEV_OBJECT_PATH: &'static str = \"GetDevObjectPath\";\npub const GET_CACHE_OBJECT_PATH: &'static str = \"GetCacheObjectPath\";\npub const GET_ERROR_CODES: &'static str = \"GetErrorCodes\";\npub const GET_RAID_LEVELS: &'static str = \"GetRaidLevels\";\npub const GET_DEV_TYPES: &'static str = \"GetDevTypes\";\n\npub trait HasCodes {\n \/\/\/ Indicates that this enum can be converted to an int or described\n \/\/\/ with a string.\n fn get_error_int(&self) -> u16;\n fn get_error_string(&self) -> &str;\n}\n\ncustom_derive! {\n #[derive(Copy, Clone, EnumDisplay,\n IterVariants(StratisDBusErrorVariants),\n IterVariantNames(StratisDBusErrorVariantNames))]\n #[allow(non_camel_case_types)]\n pub enum StratisErrorEnum {\n STRATIS_OK,\n STRATIS_ERROR,\n STRATIS_NULL,\n STRATIS_NOTFOUND,\n STRATIS_POOL_NOTFOUND,\n STRATIS_VOLUME_NOTFOUND,\n STRATIS_DEV_NOTFOUND,\n STRATIS_CACHE_NOTFOUND,\n STRATIS_BAD_PARAM,\n STRATIS_ALREADY_EXISTS,\n STRATIS_NULL_NAME,\n STRATIS_NO_POOLS,\n STRATIS_LIST_FAILURE,\n }\n}\n\nimpl HasCodes for StratisErrorEnum {\n fn get_error_int(&self) -> u16 {\n *self as u16\n }\n\n fn get_error_string(&self) -> &str {\n match *self {\n \/\/ TODO deal with internationalization\/do this better\n StratisErrorEnum::STRATIS_OK => \"Ok\",\n StratisErrorEnum::STRATIS_ERROR => \"A general error happened\",\n StratisErrorEnum::STRATIS_NULL => \"Null parameter was supplied\",\n StratisErrorEnum::STRATIS_NOTFOUND => \"Not found\",\n StratisErrorEnum::STRATIS_POOL_NOTFOUND => \"Pool not found\",\n StratisErrorEnum::STRATIS_VOLUME_NOTFOUND => \"Volume not found\",\n StratisErrorEnum::STRATIS_CACHE_NOTFOUND => \"Cache not found\",\n StratisErrorEnum::STRATIS_BAD_PARAM => \"Bad parameter\",\n StratisErrorEnum::STRATIS_DEV_NOTFOUND => \"Dev not found\",\n StratisErrorEnum::STRATIS_ALREADY_EXISTS => \"Already exists\",\n StratisErrorEnum::STRATIS_NULL_NAME => \"Null name supplied\",\n StratisErrorEnum::STRATIS_NO_POOLS => \"No pools\",\n StratisErrorEnum::STRATIS_LIST_FAILURE => \"List operation failure.\",\n }\n }\n}\n\ncustom_derive! {\n #[derive(Copy, Clone, EnumDisplay,\n IterVariants(StratisDBusRaidTypeVariants),\n IterVariantNames(StratisDBusRaidTypeVariantNames))]\n #[allow(non_camel_case_types)]\n pub enum StratisRaidType {\n STRATIS_RAID_TYPE_UNKNOWN,\n STRATIS_RAID_TYPE_SINGLE,\n STRATIS_RAID_TYPE_RAID1,\n STRATIS_RAID_TYPE_RAID5,\n STRATIS_RAID_TYPE_RAID6,\n }\n}\n\nimpl HasCodes for StratisRaidType {\n fn get_error_int(&self) -> u16 {\n *self as u16\n }\n\n fn get_error_string(&self) -> &str {\n match *self {\n StratisRaidType::STRATIS_RAID_TYPE_UNKNOWN => \"Ok\",\n StratisRaidType::STRATIS_RAID_TYPE_SINGLE => \"Single\",\n StratisRaidType::STRATIS_RAID_TYPE_RAID1 => \"Mirrored\",\n StratisRaidType::STRATIS_RAID_TYPE_RAID5 => {\n \"Block-level striping with distributed parity\"\n }\n StratisRaidType::STRATIS_RAID_TYPE_RAID6 => {\n \"Block-level striping with two distributed parities\"\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fixed pipes. Before they could only pull one value before the started to return FinishedPipe. Now you can pull all the values out of the pipe. I'm not sure that this is the best solution, but it is the only one I could come up with in two days of trying.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>implement part of the interpreter<commit_after>use operations::Op;\n\npub struct Interpreter {\n memory: Vec<u8>,\n pointer: i64,\n ops: Vec<Op>,\n}\n\nimpl Interpreter {\n pub fn new(ops: Vec<Op>) -> Interpreter {\n let m = (0 .. 30000).map(|_| 0).collect();\n Interpreter { memory: m, pointer: 0, ops: ops }\n }\n\n pub fn run(&mut self) {\n let mut program_counter = 0;\n while program_counter < self.ops.len() {\n match self.ops[program_counter] {\n Op::Increment => self.increment(),\n Op::Decrement => self.decrement(),\n Op::Output => self.output(),\n Op::Right => self.right(),\n Op::Left => self.left(),\n Op::Jump => self.jump(&mut program_counter),\n Op::JumpBack => self.jump_back(&mut program_counter),\n _ => panic!(\"boom\"),\n }\n program_counter += 1;\n }\n println!(\"\");\n }\n\n fn left(&mut self) {\n self.pointer -= 1;\n }\n\n fn right(&mut self) {\n self.pointer += 1;\n }\n\n fn increment(&mut self) {\n self.memory[self.pointer as usize] += 1;\n }\n\n fn decrement(&mut self) {\n self.memory[self.pointer as usize] -= 1;\n }\n\n fn output(&self) {\n print!(\"{}\", (self.memory[self.pointer as usize]) as char);\n }\n\n fn jump(&mut self, program_counter: &mut usize) {\n let mut bal = 1i32;\n if self.memory[self.pointer as usize] == 0u8 {\n loop {\n *program_counter += 1;\n if self.ops[*program_counter] == Op::Jump {\n bal += 1;\n } else if self.ops[*program_counter] == Op::JumpBack {\n bal -= 1;\n }\n if bal == 0 {\n break;\n }\n }\n }\n }\n\n fn jump_back(&mut self, program_counter: &mut usize) {\n let mut bal = 0i32;\n loop {\n if self.ops[*program_counter] == Op::Jump {\n bal += 1;\n } else if self.ops[*program_counter] == Op::JumpBack {\n bal -= 1;\n }\n *program_counter -= 1;\n if bal == 0 {\n break;\n }\n }\n }\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Silence warnings about macro-generated dead code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>get_subscribed_presences::Response::presence_events should be public<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_name = \"split\"]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Akira Hayakawa <ruby.wktk@gmail.com>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\n#![feature(macro_rules)]\n\nextern crate getopts;\nextern crate libc;\n\nuse std::io;\nuse std::num::Int;\nuse std::char;\n\n#[path = \"..\/common\/util.rs\"]\nmod util;\n\nstatic NAME: &'static str = \"split\";\nstatic VERSION: &'static str = \"1.0.0\";\n\npub fn uumain(args: Vec<String>) -> int {\n let opts = [\n getopts::optopt(\"a\", \"suffix-length\", \"use suffixes of length N (default 2)\", \"N\"),\n getopts::optopt(\"b\", \"bytes\", \"put SIZE bytes per output file\", \"SIZE\"),\n getopts::optopt(\"C\", \"line-bytes\", \"put at most SIZE bytes of lines per output file\", \"SIZE\"),\n getopts::optflag(\"d\", \"numeric-suffixes\", \"use numeric suffixes instead of alphabetic\"),\n getopts::optopt(\"l\", \"lines\", \"put NUMBER lines per output file\", \"NUMBER\"),\n getopts::optflag(\"\", \"verbose\", \"print a diagnostic just before each output file is opened\"),\n getopts::optflag(\"h\", \"help\", \"display help and exit\"),\n getopts::optflag(\"V\", \"version\", \"output version information and exit\"),\n ];\n\n let matches = match getopts::getopts(args.tail(), &opts) {\n Ok(m) => m,\n Err(f) => crash!(1, \"{}\", f)\n };\n\n if matches.opt_present(\"h\") {\n println!(\"{} v{}\", NAME, VERSION);\n println!(\"\");\n println!(\"Usage:\");\n println!(\" {0} [OPTION]... [INPUT [PREFIX]]\", NAME);\n println!(\"\");\n io::print(getopts::usage(\"Output fixed-size pieces of INPUT to PREFIXaa, PREFIX ab, ...; default size is 1000, and default PREFIX is 'x'. With no INPUT, or when INPUT is -, read standard input.\" , &opts).as_slice());\n println!(\"\");\n println!(\"SIZE may have a multiplier suffix: b for 512, k for 1K, m for 1 Meg.\");\n return 0;\n }\n\n if matches.opt_present(\"V\") {\n println!(\"{} v{}\", NAME, VERSION);\n return 0;\n }\n\n let mut settings = Settings {\n prefix: \"\".to_string(),\n numeric_suffix: false,\n suffix_length: 0,\n input: \"\".to_string(),\n strategy: \"\".to_string(),\n strategy_param: \"\".to_string(),\n verbose: false,\n };\n\n settings.numeric_suffix = if matches.opt_present(\"d\") { true } else { false };\n\n settings.suffix_length = match matches.opt_str(\"a\") {\n Some(n) => match from_str(n.as_slice()) {\n Some(m) => m,\n None => crash!(1, \"cannot parse num\")\n },\n None => 2\n };\n\n settings.verbose = if matches.opt_present(\"verbose\") { true } else { false };\n\n settings.strategy = \"l\".to_string();\n settings.strategy_param = \"1000\".to_string();\n let strategies = vec![\"b\", \"C\", \"l\"];\n for e in strategies.iter() {\n match matches.opt_str(*e) {\n Some(a) => {\n if settings.strategy.as_slice() == \"l\" {\n settings.strategy = e.to_string();\n settings.strategy_param = a;\n } else {\n crash!(1, \"{}: cannot split in more than one way\", NAME)\n }\n },\n None => {}\n }\n }\n\n let mut v = matches.free.iter();\n let (input, prefix) = match (v.next(), v.next()) {\n (Some(a), None) => (a.to_string(), \"x\".to_string()),\n (Some(a), Some(b)) => (a.to_string(), b.to_string()),\n (None, _) => (\"-\".to_string(), \"x\".to_string()),\n };\n settings.input = input;\n settings.prefix = prefix;\n\n split(&settings)\n}\n\nstruct Settings {\n prefix: String,\n numeric_suffix: bool,\n suffix_length: uint,\n input: String,\n strategy: String,\n strategy_param: String,\n verbose: bool,\n}\n\nstruct SplitControl {\n current_line: String, \/\/ Don't touch\n request_new_file: bool, \/\/ Splitter implementation requests new file\n}\n\ntrait Splitter {\n \/\/ Factory pattern\n fn new(_hint: Option<Self>, &Settings) -> Box<Splitter>;\n\n \/\/ Consume the current_line and return the consumed string\n fn consume(&mut self, &mut SplitControl) -> String;\n}\n\nstruct LineSplitter {\n saved_lines_to_write: uint,\n lines_to_write: uint,\n}\n\nimpl Splitter for LineSplitter {\n fn new(_: Option<LineSplitter>, settings: &Settings) -> Box<Splitter> {\n let n = match from_str(settings.strategy_param.as_slice()) {\n Some(a) => a,\n _ => crash!(1, \"invalid number of lines\")\n };\n box LineSplitter {\n saved_lines_to_write: n,\n lines_to_write: n,\n } as Box<Splitter>\n }\n\n fn consume(&mut self, control: &mut SplitControl) -> String {\n self.lines_to_write -= 1;\n if self.lines_to_write == 0 {\n self.lines_to_write = self.saved_lines_to_write;\n control.request_new_file = true;\n }\n control.current_line.clone()\n }\n}\n\nstruct ByteSplitter {\n saved_bytes_to_write: uint,\n bytes_to_write: uint,\n}\n\nimpl Splitter for ByteSplitter {\n fn new(_: Option<ByteSplitter>, settings: &Settings) -> Box<Splitter> {\n let mut strategy_param : Vec<char> = settings.strategy_param.chars().collect();\n let suffix = strategy_param.pop().unwrap();\n let multiplier = match suffix {\n '0'...'9' => 1u,\n 'b' => 512u,\n 'k' => 1024u,\n 'm' => 1024u * 1024u,\n _ => crash!(1, \"invalid number of bytes\")\n };\n let n = if suffix.is_alphabetic() {\n match String::from_chars(strategy_param.as_slice()).as_slice().parse::<uint>() {\n Some(a) => a,\n _ => crash!(1, \"invalid number of bytes\")\n }\n } else {\n match settings.strategy_param.as_slice().parse::<uint>() {\n Some(a) => a,\n _ => crash!(1, \"invalid number of bytes\")\n }\n };\n box ByteSplitter {\n saved_bytes_to_write: n * multiplier,\n bytes_to_write: n * multiplier,\n } as Box<Splitter>\n }\n\n fn consume(&mut self, control: &mut SplitControl) -> String {\n let line = control.current_line.clone();\n let n = std::cmp::min(line.as_slice().chars().count(), self.bytes_to_write);\n self.bytes_to_write -= n;\n if n == 0 {\n self.bytes_to_write = self.saved_bytes_to_write;\n control.request_new_file = true;\n }\n line.as_slice().slice(0, n).to_string()\n }\n}\n\n\/\/ (1, 3) -> \"aab\"\nfn str_prefix(i: uint, width: uint) -> String {\n let mut c = \"\".to_string();\n let mut n = i;\n let mut w = width;\n while w > 0 {\n w -= 1;\n let div = Int::pow(26 as uint, w);\n let r = n \/ div;\n n -= r * div;\n c.push(char::from_u32((r as u32) + 97).unwrap());\n }\n c\n}\n\n\/\/ (1, 3) -> \"001\"\nfn num_prefix(i: uint, width: uint) -> String {\n let mut c = \"\".to_string();\n let mut n = i;\n let mut w = width;\n while w > 0 {\n w -= 1;\n let div = Int::pow(10 as uint, w);\n let r = n \/ div;\n n -= r * div;\n c.push(char::from_digit(r, 10).unwrap());\n }\n c\n}\n\nfn split(settings: &Settings) -> int {\n let mut reader = io::BufferedReader::new(\n if settings.input.as_slice() == \"-\" {\n box io::stdio::stdin_raw() as Box<Reader>\n } else {\n let r = match io::File::open(&Path::new(settings.input.clone())) {\n Ok(a) => a,\n Err(_) => crash!(1, \"cannot open '{}' for reading: No such file or directory\", settings.input)\n };\n box r as Box<Reader>\n }\n );\n\n let mut splitter: Box<Splitter> =\n match settings.strategy.as_slice() {\n \"l\" => Splitter::new(None::<LineSplitter>, settings),\n \"b\" => Splitter::new(None::<ByteSplitter>, settings),\n a @ _ => crash!(1, \"strategy {} not supported\", a)\n };\n\n let mut control = SplitControl {\n current_line: \"\".to_string(), \/\/ Request new line\n request_new_file: true, \/\/ Request new file\n };\n\n let mut writer = io::BufferedWriter::new(box io::stdio::stdout_raw() as Box<Writer>);\n let mut fileno = 0;\n loop {\n if control.current_line.as_slice().chars().count() == 0 {\n match reader.read_line() {\n Ok(a) => { control.current_line = a; }\n Err(_) => { break; }\n }\n }\n\n if control.request_new_file {\n let mut filename = settings.prefix.to_string();\n filename.push_str(if settings.numeric_suffix {\n num_prefix(fileno, settings.suffix_length)\n } else {\n str_prefix(fileno, settings.suffix_length)\n }.as_slice());\n\n if fileno != 0 {\n crash_if_err!(1, writer.flush());\n }\n fileno += 1;\n writer = io::BufferedWriter::new(box io::File::open_mode(&Path::new(filename.as_slice()), io::Open, io::Write) as Box<Writer>);\n control.request_new_file = false;\n }\n\n let consumed = splitter.consume(&mut control);\n crash_if_err!(1, writer.write_str(consumed.as_slice()));\n\n let advance = consumed.as_slice().chars().count();\n let clone = control.current_line.clone();\n let sl = clone.as_slice();\n control.current_line = sl.slice(advance, sl.chars().count()).to_string();\n }\n 0\n}\n<commit_msg>Removed deprecated calls.<commit_after>#![crate_name = \"split\"]\n\n\/*\n * This file is part of the uutils coreutils package.\n *\n * (c) Akira Hayakawa <ruby.wktk@gmail.com>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n *\/\n\n#![feature(macro_rules)]\n\nextern crate getopts;\nextern crate libc;\n\nuse std::io;\nuse std::num::Int;\nuse std::char;\n\n#[path = \"..\/common\/util.rs\"]\nmod util;\n\nstatic NAME: &'static str = \"split\";\nstatic VERSION: &'static str = \"1.0.0\";\n\npub fn uumain(args: Vec<String>) -> int {\n let opts = [\n getopts::optopt(\"a\", \"suffix-length\", \"use suffixes of length N (default 2)\", \"N\"),\n getopts::optopt(\"b\", \"bytes\", \"put SIZE bytes per output file\", \"SIZE\"),\n getopts::optopt(\"C\", \"line-bytes\", \"put at most SIZE bytes of lines per output file\", \"SIZE\"),\n getopts::optflag(\"d\", \"numeric-suffixes\", \"use numeric suffixes instead of alphabetic\"),\n getopts::optopt(\"l\", \"lines\", \"put NUMBER lines per output file\", \"NUMBER\"),\n getopts::optflag(\"\", \"verbose\", \"print a diagnostic just before each output file is opened\"),\n getopts::optflag(\"h\", \"help\", \"display help and exit\"),\n getopts::optflag(\"V\", \"version\", \"output version information and exit\"),\n ];\n\n let matches = match getopts::getopts(args.tail(), &opts) {\n Ok(m) => m,\n Err(f) => crash!(1, \"{}\", f)\n };\n\n if matches.opt_present(\"h\") {\n println!(\"{} v{}\", NAME, VERSION);\n println!(\"\");\n println!(\"Usage:\");\n println!(\" {0} [OPTION]... [INPUT [PREFIX]]\", NAME);\n println!(\"\");\n io::print(getopts::usage(\"Output fixed-size pieces of INPUT to PREFIXaa, PREFIX ab, ...; default size is 1000, and default PREFIX is 'x'. With no INPUT, or when INPUT is -, read standard input.\" , &opts).as_slice());\n println!(\"\");\n println!(\"SIZE may have a multiplier suffix: b for 512, k for 1K, m for 1 Meg.\");\n return 0;\n }\n\n if matches.opt_present(\"V\") {\n println!(\"{} v{}\", NAME, VERSION);\n return 0;\n }\n\n let mut settings = Settings {\n prefix: \"\".to_string(),\n numeric_suffix: false,\n suffix_length: 0,\n input: \"\".to_string(),\n strategy: \"\".to_string(),\n strategy_param: \"\".to_string(),\n verbose: false,\n };\n\n settings.numeric_suffix = if matches.opt_present(\"d\") { true } else { false };\n\n settings.suffix_length = match matches.opt_str(\"a\") {\n Some(n) => match n.as_slice().parse() {\n Some(m) => m,\n None => crash!(1, \"cannot parse num\")\n },\n None => 2\n };\n\n settings.verbose = if matches.opt_present(\"verbose\") { true } else { false };\n\n settings.strategy = \"l\".to_string();\n settings.strategy_param = \"1000\".to_string();\n let strategies = vec![\"b\", \"C\", \"l\"];\n for e in strategies.iter() {\n match matches.opt_str(*e) {\n Some(a) => {\n if settings.strategy.as_slice() == \"l\" {\n settings.strategy = e.to_string();\n settings.strategy_param = a;\n } else {\n crash!(1, \"{}: cannot split in more than one way\", NAME)\n }\n },\n None => {}\n }\n }\n\n let mut v = matches.free.iter();\n let (input, prefix) = match (v.next(), v.next()) {\n (Some(a), None) => (a.to_string(), \"x\".to_string()),\n (Some(a), Some(b)) => (a.to_string(), b.to_string()),\n (None, _) => (\"-\".to_string(), \"x\".to_string()),\n };\n settings.input = input;\n settings.prefix = prefix;\n\n split(&settings)\n}\n\nstruct Settings {\n prefix: String,\n numeric_suffix: bool,\n suffix_length: uint,\n input: String,\n strategy: String,\n strategy_param: String,\n verbose: bool,\n}\n\nstruct SplitControl {\n current_line: String, \/\/ Don't touch\n request_new_file: bool, \/\/ Splitter implementation requests new file\n}\n\ntrait Splitter {\n \/\/ Factory pattern\n fn new(_hint: Option<Self>, &Settings) -> Box<Splitter>;\n\n \/\/ Consume the current_line and return the consumed string\n fn consume(&mut self, &mut SplitControl) -> String;\n}\n\nstruct LineSplitter {\n saved_lines_to_write: uint,\n lines_to_write: uint,\n}\n\nimpl Splitter for LineSplitter {\n fn new(_: Option<LineSplitter>, settings: &Settings) -> Box<Splitter> {\n let n = match settings.strategy_param.as_slice().parse() {\n Some(a) => a,\n _ => crash!(1, \"invalid number of lines\")\n };\n box LineSplitter {\n saved_lines_to_write: n,\n lines_to_write: n,\n } as Box<Splitter>\n }\n\n fn consume(&mut self, control: &mut SplitControl) -> String {\n self.lines_to_write -= 1;\n if self.lines_to_write == 0 {\n self.lines_to_write = self.saved_lines_to_write;\n control.request_new_file = true;\n }\n control.current_line.clone()\n }\n}\n\nstruct ByteSplitter {\n saved_bytes_to_write: uint,\n bytes_to_write: uint,\n}\n\nimpl Splitter for ByteSplitter {\n fn new(_: Option<ByteSplitter>, settings: &Settings) -> Box<Splitter> {\n let mut strategy_param : Vec<char> = settings.strategy_param.chars().collect();\n let suffix = strategy_param.pop().unwrap();\n let multiplier = match suffix {\n '0'...'9' => 1u,\n 'b' => 512u,\n 'k' => 1024u,\n 'm' => 1024u * 1024u,\n _ => crash!(1, \"invalid number of bytes\")\n };\n let n = if suffix.is_alphabetic() {\n match String::from_chars(strategy_param.as_slice()).as_slice().parse::<uint>() {\n Some(a) => a,\n _ => crash!(1, \"invalid number of bytes\")\n }\n } else {\n match settings.strategy_param.as_slice().parse::<uint>() {\n Some(a) => a,\n _ => crash!(1, \"invalid number of bytes\")\n }\n };\n box ByteSplitter {\n saved_bytes_to_write: n * multiplier,\n bytes_to_write: n * multiplier,\n } as Box<Splitter>\n }\n\n fn consume(&mut self, control: &mut SplitControl) -> String {\n let line = control.current_line.clone();\n let n = std::cmp::min(line.as_slice().chars().count(), self.bytes_to_write);\n self.bytes_to_write -= n;\n if n == 0 {\n self.bytes_to_write = self.saved_bytes_to_write;\n control.request_new_file = true;\n }\n line.as_slice().slice(0, n).to_string()\n }\n}\n\n\/\/ (1, 3) -> \"aab\"\nfn str_prefix(i: uint, width: uint) -> String {\n let mut c = \"\".to_string();\n let mut n = i;\n let mut w = width;\n while w > 0 {\n w -= 1;\n let div = Int::pow(26 as uint, w);\n let r = n \/ div;\n n -= r * div;\n c.push(char::from_u32((r as u32) + 97).unwrap());\n }\n c\n}\n\n\/\/ (1, 3) -> \"001\"\nfn num_prefix(i: uint, width: uint) -> String {\n let mut c = \"\".to_string();\n let mut n = i;\n let mut w = width;\n while w > 0 {\n w -= 1;\n let div = Int::pow(10 as uint, w);\n let r = n \/ div;\n n -= r * div;\n c.push(char::from_digit(r, 10).unwrap());\n }\n c\n}\n\nfn split(settings: &Settings) -> int {\n let mut reader = io::BufferedReader::new(\n if settings.input.as_slice() == \"-\" {\n box io::stdio::stdin_raw() as Box<Reader>\n } else {\n let r = match io::File::open(&Path::new(settings.input.clone())) {\n Ok(a) => a,\n Err(_) => crash!(1, \"cannot open '{}' for reading: No such file or directory\", settings.input)\n };\n box r as Box<Reader>\n }\n );\n\n let mut splitter: Box<Splitter> =\n match settings.strategy.as_slice() {\n \"l\" => Splitter::new(None::<LineSplitter>, settings),\n \"b\" => Splitter::new(None::<ByteSplitter>, settings),\n a @ _ => crash!(1, \"strategy {} not supported\", a)\n };\n\n let mut control = SplitControl {\n current_line: \"\".to_string(), \/\/ Request new line\n request_new_file: true, \/\/ Request new file\n };\n\n let mut writer = io::BufferedWriter::new(box io::stdio::stdout_raw() as Box<Writer>);\n let mut fileno = 0;\n loop {\n if control.current_line.as_slice().chars().count() == 0 {\n match reader.read_line() {\n Ok(a) => { control.current_line = a; }\n Err(_) => { break; }\n }\n }\n\n if control.request_new_file {\n let mut filename = settings.prefix.to_string();\n filename.push_str(if settings.numeric_suffix {\n num_prefix(fileno, settings.suffix_length)\n } else {\n str_prefix(fileno, settings.suffix_length)\n }.as_slice());\n\n if fileno != 0 {\n crash_if_err!(1, writer.flush());\n }\n fileno += 1;\n writer = io::BufferedWriter::new(box io::File::open_mode(&Path::new(filename.as_slice()), io::Open, io::Write) as Box<Writer>);\n control.request_new_file = false;\n }\n\n let consumed = splitter.consume(&mut control);\n crash_if_err!(1, writer.write_str(consumed.as_slice()));\n\n let advance = consumed.as_slice().chars().count();\n let clone = control.current_line.clone();\n let sl = clone.as_slice();\n control.current_line = sl.slice(advance, sl.chars().count()).to_string();\n }\n 0\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test that binops consume their arguments<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that binary operators consume their arguments\n\nfn add<A: Add<B, ()>, B>(lhs: A, rhs: B) {\n lhs + rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn sub<A: Sub<B, ()>, B>(lhs: A, rhs: B) {\n lhs - rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn mul<A: Mul<B, ()>, B>(lhs: A, rhs: B) {\n lhs * rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn div<A: Div<B, ()>, B>(lhs: A, rhs: B) {\n lhs \/ rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn rem<A: Rem<B, ()>, B>(lhs: A, rhs: B) {\n lhs % rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn bitand<A: BitAnd<B, ()>, B>(lhs: A, rhs: B) {\n lhs & rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn bitor<A: BitOr<B, ()>, B>(lhs: A, rhs: B) {\n lhs | rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn bitxor<A: BitXor<B, ()>, B>(lhs: A, rhs: B) {\n lhs ^ rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn shl<A: Shl<B, ()>, B>(lhs: A, rhs: B) {\n lhs << rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn shr<A: Shr<B, ()>, B>(lhs: A, rhs: B) {\n lhs >> rhs;\n drop(lhs); \/\/~ ERROR use of moved value: `lhs`\n drop(rhs); \/\/~ ERROR use of moved value: `rhs`\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>cubeb-rs: Add example usage of cubeb.<commit_after>\/\/ Copyright © 2011 Mozilla Foundation\n\/\/\n\/\/ This program is made available under an ISC-style license. See the\n\/\/ accompanying file LICENSE for details.\n\n\/\/! libcubeb api\/function test. Plays a simple tone.\nextern crate cubeb;\n\nuse cubeb::SampleType;\nuse std::f32::consts::PI;\nuse std::thread;\nuse std::time::Duration;\n\nconst SAMPLE_FREQUENCY: u32 = 48000;\nconst STREAM_FORMAT: cubeb::SampleFormat = cubeb::SampleFormat::S16LE;\n\n\/\/ store the phase of the generated waveform\nstruct Tone {\n position: isize\n}\n\nimpl cubeb::StreamCallback for Tone {\n type Frame = cubeb::MonoFrame<i16>;\n\n fn data_callback(&mut self, _: &[cubeb::MonoFrame<i16>], output: &mut [cubeb::MonoFrame<i16>]) -> isize {\n\n \/\/ generate our test tone on the fly\n for f in output.iter_mut() {\n \/\/ North American dial tone\n let t1 = (2.0 * PI * 350.0 * self.position as f32 \/ SAMPLE_FREQUENCY as f32).sin();\n let t2 = (2.0 * PI * 440.0 * self.position as f32 \/ SAMPLE_FREQUENCY as f32).sin();\n\n f.m = i16::from_float(0.5 * (t1 + t2));\n\n self.position += 1;\n }\n\n output.len() as isize\n }\n\n fn state_callback(&mut self, state: cubeb::State) {\n println!(\"stream {:?}\", state);\n }\n}\n\nfn main() {\n let ctx = cubeb::Context::init(\"Cubeb tone example\", None).expect(\"Failed to create cubeb context\");\n\n \/\/ std::unique_ptr<cb_user_data> user_data(new cb_user_data());\n \/\/ ASSERT_TRUE(!!user_data) << \"Error allocating user data\";\n\n \/\/ user_data->position = 0;\n\n let params = cubeb::StreamParamsBuilder::new()\n .format(STREAM_FORMAT)\n .rate(SAMPLE_FREQUENCY)\n .channels(1)\n .layout(cubeb::ChannelLayout::Mono)\n .take();\n\n let stream_init_opts = cubeb::StreamInitOptionsBuilder::new()\n .stream_name(\"Cubeb tone (mono)\")\n .output_stream_param(¶ms)\n .latency(4096)\n .take();\n\n let stream = ctx.stream_init(\n &stream_init_opts,\n Tone {\n position: 0\n }\n ).expect(\"Failed to create cubeb stream\");\n\n stream.start().unwrap();\n thread::sleep(Duration::from_millis(500));\n stream.stop().unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test case that checks that class members must be prefixed with \"self\"<commit_after>class cat {\n priv {\n let mut meows : uint;\n fn sleep() { loop{} }\n fn meow() {\n #error(\"Meow\");\n meows += 1u; \/\/! ERROR unresolved name\n sleep(); \/\/! ERROR unresolved name\n }\n }\n\n new(in_x : uint) { self.meows = in_x; }\n}\n\n fn main() { }<|endoftext|>"} {"text":"<commit_before>extern crate specs;\nextern crate futures;\nextern crate serde;\nextern crate serde_json;\nextern crate websocket_server;\n\nuse self::specs::{Join, ReadStorage, WriteStorage, System, Entities, EntitiesRes};\nuse self::futures::{Future, Sink};\nuse self::websocket_server::Message;\nuse self::serde::ser::Serialize;\n\nuse model::comp::{Pos, Vel, Connect, Disconnect, Player as PlayerComp, Actor};\nuse model::client::{Message as ClientMessage, OpCode};\n\nuse std::collections::HashMap;\nuse std::fmt::Debug;\n\npub struct Sending;\nimpl<'a> System<'a> for Sending {\n #[allow(type_complexity)]\n type SystemData = (ReadStorage<'a, Pos>,\n ReadStorage<'a, Vel>,\n ReadStorage<'a, PlayerComp>,\n ReadStorage<'a, Actor>,\n WriteStorage<'a, Connect>,\n ReadStorage<'a, Disconnect>,\n Entities<'a>);\n\n fn run(\n &mut self,\n (pos, vel, player, actor, mut connect, disconnect, entities): Self::SystemData,\n ) {\n\n handle_new_connections(&player, &*entities, &actor, &mut connect);\n handle_disconnects(&player, &actor, &disconnect);\n\n send_world_updates(&player, &actor, &pos, &vel);\n }\n}\n\n\nfn send<T>(player: &PlayerComp, msg: &ClientMessage<T>)\nwhere\n T: Serialize + Debug,\n{\n let msg = serde_json::to_string(&msg).expect(&format!(\"Failed to serialize object {:?}\", msg));\n let send_channel = player.send_channel.clone();\n send_channel.send(Message::Text(msg)).wait().expect(\n \"Failed to send message\",\n );\n}\n\n\nfn handle_new_connections(\n player: &ReadStorage<PlayerComp>,\n entities: &EntitiesRes,\n actor: &ReadStorage<Actor>,\n connect: &mut WriteStorage<Connect>,\n) {\n let mut new_connections = Vec::new();\n for (new_player, entity, new_actor, _) in (player, entities, actor, &mut *connect).join() {\n new_connections.push((entity.clone(), new_actor.clone()));\n let mut actors = Vec::new();\n for actor in (&actor).join() {\n actors.push(actor);\n }\n let msg = ClientMessage::new_greeting(&new_actor.id, &actors);\n send(new_player, &msg);\n }\n\n for new_connection in new_connections {\n let (new_entity, new_actor) = new_connection;\n connect.remove(new_entity);\n let msg = ClientMessage::new_connection(&new_actor);\n for (player, entity) in (player, entities).join() {\n if entity != new_entity {\n send(player, &msg);\n }\n }\n }\n}\n\nfn handle_disconnects(\n player: &ReadStorage<PlayerComp>,\n actor: &ReadStorage<Actor>,\n disconnect: &ReadStorage<Disconnect>,\n) {\n for (actor, _) in (actor, disconnect).join() {\n let msg = ClientMessage::new_disconnect(&actor.id);\n for player in (player).join() {\n send(player, &msg);\n }\n }\n\n}\n\nfn send_world_updates(\n player: &ReadStorage<PlayerComp>,\n actor: &ReadStorage<Actor>,\n pos: &ReadStorage<Pos>,\n vel: &ReadStorage<Vel>\n) {\n let mut serialized_actors = HashMap::new();\n for actor in (actor).join() {\n serialized_actors.insert(actor.id, HashMap::new());\n }\n\n for (pos, vel, actor) in (pos, vel, actor).join() {\n let mut actor = serialized_actors.get_mut(&actor.id).unwrap();\n actor.insert(\"pos\", json!(pos));\n actor.insert(\"vel\", json!(vel));\n }\n\n for (player, actor) in (player, actor).join() {\n let mut actor = serialized_actors.get_mut(&actor.id).unwrap();\n actor.insert(\"delay\", json!(player.delay));\n }\n let json_actors = json!(serialized_actors);\n for player in (player).join() {\n let last_input = json!(player.last_input);\n let payload = hashmap!(\n \"last_input\" => &last_input,\n \"actors\" => &json_actors\n );\n let world_state = ClientMessage {\n opcode: OpCode::WorldUpdate,\n payload: &payload\n };\n send(player, &world_state);\n }\n}\n<commit_msg>Core: Run rustfmt<commit_after>extern crate specs;\nextern crate futures;\nextern crate serde;\nextern crate serde_json;\nextern crate websocket_server;\n\nuse self::specs::{Join, ReadStorage, WriteStorage, System, Entities, EntitiesRes};\nuse self::futures::{Future, Sink};\nuse self::websocket_server::Message;\nuse self::serde::ser::Serialize;\n\nuse model::comp::{Pos, Vel, Connect, Disconnect, Player as PlayerComp, Actor};\nuse model::client::{Message as ClientMessage, OpCode};\n\nuse std::collections::HashMap;\nuse std::fmt::Debug;\n\npub struct Sending;\nimpl<'a> System<'a> for Sending {\n #[allow(type_complexity)]\n type SystemData = (ReadStorage<'a, Pos>,\n ReadStorage<'a, Vel>,\n ReadStorage<'a, PlayerComp>,\n ReadStorage<'a, Actor>,\n WriteStorage<'a, Connect>,\n ReadStorage<'a, Disconnect>,\n Entities<'a>);\n\n fn run(\n &mut self,\n (pos, vel, player, actor, mut connect, disconnect, entities): Self::SystemData,\n ) {\n\n handle_new_connections(&player, &*entities, &actor, &mut connect);\n handle_disconnects(&player, &actor, &disconnect);\n\n send_world_updates(&player, &actor, &pos, &vel);\n }\n}\n\n\nfn send<T>(player: &PlayerComp, msg: &ClientMessage<T>)\nwhere\n T: Serialize + Debug,\n{\n let msg = serde_json::to_string(&msg).expect(&format!(\"Failed to serialize object {:?}\", msg));\n let send_channel = player.send_channel.clone();\n send_channel.send(Message::Text(msg)).wait().expect(\n \"Failed to send message\",\n );\n}\n\n\nfn handle_new_connections(\n player: &ReadStorage<PlayerComp>,\n entities: &EntitiesRes,\n actor: &ReadStorage<Actor>,\n connect: &mut WriteStorage<Connect>,\n) {\n let mut new_connections = Vec::new();\n for (new_player, entity, new_actor, _) in (player, entities, actor, &mut *connect).join() {\n new_connections.push((entity.clone(), new_actor.clone()));\n let mut actors = Vec::new();\n for actor in (&actor).join() {\n actors.push(actor);\n }\n let msg = ClientMessage::new_greeting(&new_actor.id, &actors);\n send(new_player, &msg);\n }\n\n for new_connection in new_connections {\n let (new_entity, new_actor) = new_connection;\n connect.remove(new_entity);\n let msg = ClientMessage::new_connection(&new_actor);\n for (player, entity) in (player, entities).join() {\n if entity != new_entity {\n send(player, &msg);\n }\n }\n }\n}\n\nfn handle_disconnects(\n player: &ReadStorage<PlayerComp>,\n actor: &ReadStorage<Actor>,\n disconnect: &ReadStorage<Disconnect>,\n) {\n for (actor, _) in (actor, disconnect).join() {\n let msg = ClientMessage::new_disconnect(&actor.id);\n for player in (player).join() {\n send(player, &msg);\n }\n }\n\n}\n\nfn send_world_updates(\n player: &ReadStorage<PlayerComp>,\n actor: &ReadStorage<Actor>,\n pos: &ReadStorage<Pos>,\n vel: &ReadStorage<Vel>,\n) {\n let mut serialized_actors = HashMap::new();\n for actor in (actor).join() {\n serialized_actors.insert(actor.id, HashMap::new());\n }\n\n for (pos, vel, actor) in (pos, vel, actor).join() {\n let mut actor = serialized_actors.get_mut(&actor.id).unwrap();\n actor.insert(\"pos\", json!(pos));\n actor.insert(\"vel\", json!(vel));\n }\n\n for (player, actor) in (player, actor).join() {\n let mut actor = serialized_actors.get_mut(&actor.id).unwrap();\n actor.insert(\"delay\", json!(player.delay));\n }\n let json_actors = json!(serialized_actors);\n for player in (player).join() {\n let last_input = json!(player.last_input);\n let payload =\n hashmap!(\n \"last_input\" => &last_input,\n \"actors\" => &json_actors\n );\n let world_state = ClientMessage {\n opcode: OpCode::WorldUpdate,\n payload: &payload,\n };\n send(player, &world_state);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse font_context::FontContext;\nuse style::computed_values::border_style;\nuse opts::Opts;\n\nuse azure::azure_hl::{B8G8R8A8, Color, ColorPattern, DrawOptions};\nuse azure::azure_hl::{DrawSurfaceOptions, DrawTarget, Linear, StrokeOptions};\nuse azure::{AZ_CAP_BUTT, AZ_CAP_ROUND};\nuse azure::AZ_JOIN_BEVEL;\nuse azure::AzFloat;\nuse extra::arc::Arc;\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse geom::side_offsets::SideOffsets2D;\nuse servo_net::image::base::Image;\nuse png::{RGBA8, K8, KA8};\nuse servo_util::geometry::Au;\nuse std::vec;\nuse std::libc::types::common::c99::uint16_t;\nuse std::libc::size_t;\n\npub struct RenderContext<'self> {\n draw_target: &'self DrawTarget,\n font_ctx: @mut FontContext,\n opts: &'self Opts,\n \/\/\/ The rectangle that this context encompasses in page coordinates.\n page_rect: Rect<f32>,\n \/\/\/ The rectangle that this context encompasses in screen coordinates (pixels).\n screen_rect: Rect<uint>,\n}\n\nimpl<'self> RenderContext<'self> {\n pub fn get_draw_target(&self) -> &'self DrawTarget {\n self.draw_target\n }\n\n pub fn draw_solid_color(&self, bounds: &Rect<Au>, color: Color) {\n self.draw_target.make_current();\n self.draw_target.fill_rect(&bounds.to_azure_rect(), &ColorPattern(color));\n }\n\n pub fn draw_border(&self,\n bounds: &Rect<Au>,\n border: SideOffsets2D<Au>,\n color: SideOffsets2D<Color>,\n style: SideOffsets2D<border_style::T>) {\n let draw_opts = DrawOptions(1 as AzFloat, 0 as uint16_t);\n let rect = bounds.to_azure_rect();\n let border = border.to_float_px();\n\n self.draw_target.make_current();\n let mut dash: [AzFloat, ..2] = [0 as AzFloat, 0 as AzFloat];\n let mut stroke_opts = StrokeOptions(0 as AzFloat, 10 as AzFloat);\n\n \/\/ draw top border\n RenderContext::apply_border_style(style.top, border.top, dash, &mut stroke_opts);\n let y = rect.origin.y + border.top * 0.5;\n let start = Point2D(rect.origin.x, y);\n let end = Point2D(rect.origin.x + rect.size.width, y);\n self.draw_target.stroke_line(start,\n end,\n &ColorPattern(color.top),\n &stroke_opts,\n &draw_opts);\n\n \/\/ draw right border\n RenderContext::apply_border_style(style.right, border.right, dash, &mut stroke_opts);\n let x = rect.origin.x + rect.size.width - border.right * 0.5;\n let start = Point2D(x, rect.origin.y);\n let end = Point2D(x, rect.origin.y + rect.size.height);\n self.draw_target.stroke_line(start,\n end,\n &ColorPattern(color.right),\n &stroke_opts,\n &draw_opts);\n\n \/\/ draw bottom border\n RenderContext::apply_border_style(style.bottom, border.bottom, dash, &mut stroke_opts);\n let y = rect.origin.y + rect.size.height - border.bottom * 0.5;\n let start = Point2D(rect.origin.x, y);\n let end = Point2D(rect.origin.x + rect.size.width, y);\n self.draw_target.stroke_line(start,\n end,\n &ColorPattern(color.bottom),\n &stroke_opts,\n &draw_opts);\n\n \/\/ draw left border\n RenderContext::apply_border_style(style.left, border.left, dash, &mut stroke_opts);\n let x = rect.origin.x + border.left * 0.5;\n let start = Point2D(x, rect.origin.y);\n let end = Point2D(x, rect.origin.y + rect.size.height);\n self.draw_target.stroke_line(start,\n end,\n &ColorPattern(color.left),\n &stroke_opts,\n &draw_opts);\n }\n\n pub fn draw_image(&self, bounds: Rect<Au>, image: Arc<~Image>) {\n let image = image.get();\n let size = Size2D(image.width as i32, image.height as i32);\n let pixel_width = match image.color_type {\n RGBA8 => 4,\n K8 => 1,\n KA8 => 2,\n _ => fail!(~\"color type not supported\"),\n };\n let stride = image.width * pixel_width;\n\n self.draw_target.make_current();\n let draw_target_ref = &self.draw_target;\n let azure_surface = draw_target_ref.create_source_surface_from_data(image.pixels, size,\n stride as i32, B8G8R8A8);\n let source_rect = Rect(Point2D(0 as AzFloat, 0 as AzFloat),\n Size2D(image.width as AzFloat, image.height as AzFloat));\n let dest_rect = bounds.to_azure_rect();\n let draw_surface_options = DrawSurfaceOptions(Linear, true);\n let draw_options = DrawOptions(1.0f64 as AzFloat, 0);\n draw_target_ref.draw_surface(azure_surface,\n dest_rect,\n source_rect,\n draw_surface_options,\n draw_options);\n }\n\n pub fn clear(&self) {\n let pattern = ColorPattern(Color(1.0, 1.0, 1.0, 1.0));\n let rect = Rect(Point2D(self.page_rect.origin.x as AzFloat,\n self.page_rect.origin.y as AzFloat),\n Size2D(self.screen_rect.size.width as AzFloat,\n self.screen_rect.size.height as AzFloat));\n self.draw_target.make_current();\n self.draw_target.fill_rect(&rect, &pattern);\n }\n\n fn apply_border_style(style: border_style::T, border_width: AzFloat, dash: &mut [AzFloat], stroke_opts: &mut StrokeOptions){\n match style{\n border_style::none => {\n }\n border_style::hidden => {\n }\n \/\/FIXME(sammykim): This doesn't work with dash_pattern and cap_style well. I referred firefox code.\n border_style::dotted => {\n stroke_opts.line_width = border_width;\n\n if border_width > 2.0 {\n dash[0] = 0 as AzFloat;\n dash[1] = border_width * 2.0;\n\n stroke_opts.set_cap_style(AZ_CAP_ROUND as u8);\n } else {\n dash[0] = border_width;\n dash[1] = border_width;\n }\n stroke_opts.mDashPattern = vec::raw::to_ptr(dash);\n stroke_opts.mDashLength = dash.len() as size_t;\n }\n border_style::dashed => {\n stroke_opts.set_cap_style(AZ_CAP_BUTT as u8);\n stroke_opts.line_width = border_width;\n dash[0] = border_width*3 as AzFloat;\n dash[1] = border_width*3 as AzFloat;\n stroke_opts.mDashPattern = vec::raw::to_ptr(dash);\n stroke_opts.mDashLength = dash.len() as size_t;\n }\n \/\/FIXME(sammykim): BorderStyleSolid doesn't show proper join-style with comparing firefox.\n border_style::solid => {\n stroke_opts.set_cap_style(AZ_CAP_BUTT as u8);\n stroke_opts.set_join_style(AZ_JOIN_BEVEL as u8);\n stroke_opts.line_width = border_width;\n stroke_opts.mDashLength = 0 as size_t;\n }\n \/\/FIXME(sammykim): Five more styles should be implemented.\n \/\/double, groove, ridge, inset, outset\n }\n }\n}\n\ntrait to_float {\n fn to_float(&self) -> f64;\n}\n\nimpl to_float for u8 {\n fn to_float(&self) -> f64 {\n (*self as f64) \/ 255f64\n }\n}\n\ntrait ToAzureRect {\n fn to_azure_rect(&self) -> Rect<AzFloat>;\n}\n\nimpl ToAzureRect for Rect<Au> {\n fn to_azure_rect(&self) -> Rect<AzFloat> {\n Rect(Point2D(self.origin.x.to_nearest_px() as AzFloat,\n self.origin.y.to_nearest_px() as AzFloat),\n Size2D(self.size.width.to_nearest_px() as AzFloat,\n self.size.height.to_nearest_px() as AzFloat))\n }\n}\n\ntrait ToSideOffsetsPx {\n fn to_float_px(&self) -> SideOffsets2D<AzFloat>;\n}\n\nimpl ToSideOffsetsPx for SideOffsets2D<Au> {\n fn to_float_px(&self) -> SideOffsets2D<AzFloat> {\n SideOffsets2D::new(self.top.to_nearest_px() as AzFloat,\n self.right.to_nearest_px() as AzFloat,\n self.bottom.to_nearest_px() as AzFloat,\n self.left.to_nearest_px() as AzFloat)\n }\n}\n<commit_msg>push\/pop clip<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse font_context::FontContext;\nuse style::computed_values::border_style;\nuse opts::Opts;\n\nuse azure::azure_hl::{B8G8R8A8, Color, ColorPattern, DrawOptions};\nuse azure::azure_hl::{DrawSurfaceOptions, DrawTarget, Linear, StrokeOptions};\nuse azure::{AZ_CAP_BUTT, AZ_CAP_ROUND};\nuse azure::AZ_JOIN_BEVEL;\nuse azure::AzFloat;\nuse extra::arc::Arc;\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse geom::side_offsets::SideOffsets2D;\nuse servo_net::image::base::Image;\nuse png::{RGBA8, K8, KA8};\nuse servo_util::geometry::Au;\nuse std::vec;\nuse std::libc::types::common::c99::uint16_t;\nuse std::libc::size_t;\n\npub struct RenderContext<'self> {\n draw_target: &'self DrawTarget,\n font_ctx: @mut FontContext,\n opts: &'self Opts,\n \/\/\/ The rectangle that this context encompasses in page coordinates.\n page_rect: Rect<f32>,\n \/\/\/ The rectangle that this context encompasses in screen coordinates (pixels).\n screen_rect: Rect<uint>,\n}\n\nimpl<'self> RenderContext<'self> {\n pub fn get_draw_target(&self) -> &'self DrawTarget {\n self.draw_target\n }\n\n pub fn draw_solid_color(&self, bounds: &Rect<Au>, color: Color) {\n self.draw_target.make_current();\n self.draw_target.fill_rect(&bounds.to_azure_rect(), &ColorPattern(color));\n }\n\n\n\n pub fn draw_border(&self,\n bounds: &Rect<Au>,\n border: SideOffsets2D<Au>,\n color: SideOffsets2D<Color>,\n style: SideOffsets2D<border_style::T>) {\n let draw_opts = DrawOptions(1 as AzFloat, 0 as uint16_t);\n let rect = bounds.to_azure_rect();\n let border = border.to_float_px();\n\n\n self.draw_target.make_current();\n let mut dash: [AzFloat, ..2] = [0 as AzFloat, 0 as AzFloat];\n let mut stroke_opts = StrokeOptions(0 as AzFloat, 10 as AzFloat);\n\n \/\/ draw top border\n RenderContext::apply_border_style(style.top, border.top, dash, &mut stroke_opts);\n let y = rect.origin.y + border.top * 0.5;\n let start = Point2D(rect.origin.x, y);\n let end = Point2D(rect.origin.x + rect.size.width, y);\n self.draw_target.stroke_line(start,\n end,\n &ColorPattern(color.top),\n &stroke_opts,\n &draw_opts);\n\n \/\/ draw right border\n RenderContext::apply_border_style(style.right, border.right, dash, &mut stroke_opts);\n let x = rect.origin.x + rect.size.width - border.right * 0.5;\n let start = Point2D(x, rect.origin.y);\n let end = Point2D(x, rect.origin.y + rect.size.height);\n self.draw_target.stroke_line(start,\n end,\n &ColorPattern(color.right),\n &stroke_opts,\n &draw_opts);\n\n \/\/ draw bottom border\n RenderContext::apply_border_style(style.bottom, border.bottom, dash, &mut stroke_opts);\n let y = rect.origin.y + rect.size.height - border.bottom * 0.5;\n let start = Point2D(rect.origin.x, y);\n let end = Point2D(rect.origin.x + rect.size.width, y);\n self.draw_target.stroke_line(start,\n end,\n &ColorPattern(color.bottom),\n &stroke_opts,\n &draw_opts);\n\n \/\/ draw left border\n RenderContext::apply_border_style(style.left, border.left, dash, &mut stroke_opts);\n let x = rect.origin.x + border.left * 0.5;\n let start = Point2D(x, rect.origin.y);\n let end = Point2D(x, rect.origin.y + rect.size.height);\n self.draw_target.stroke_line(start,\n end,\n &ColorPattern(color.left),\n &stroke_opts,\n &draw_opts);\n }\n\n pub fn draw_push_clip(&self,\n bounds: &Rect<Au>,\n border: SideOffsets2D<f32>\n ){\n \n let rect = bounds.to_azure_rect();\n let path_builder = self.draw_target.create_path_builder();\n\n let left_top = Point2D(rect.origin.x, rect.origin.y);\n let right_top = Point2D(rect.origin.x + rect.size.width, rect.origin.y);\n let left_bottom = Point2D(rect.origin.x, rect.origin.y + rect.size.height);\n let right_bottom = Point2D(rect.origin.x + rect.size.width, rect.origin.y + rect.size.height);\n\n path_builder.move_to(left_top);\n path_builder.line_to(right_top);\n path_builder.line_to(right_bottom);\n path_builder.line_to(left_bottom);\n\n let path = path_builder.finish();\n self.draw_target.push_clip(&path);\n } \n \n pub fn draw_pop_clip(&self){\n self.draw_target.pop_clip();\n } \n\n pub fn draw_image(&self, bounds: Rect<Au>, image: Arc<~Image>) {\n let image = image.get();\n let size = Size2D(image.width as i32, image.height as i32);\n let pixel_width = match image.color_type {\n RGBA8 => 4,\n K8 => 1,\n KA8 => 2,\n _ => fail!(~\"color type not supported\"),\n };\n let stride = image.width * pixel_width;\n\n self.draw_target.make_current();\n let draw_target_ref = &self.draw_target;\n let azure_surface = draw_target_ref.create_source_surface_from_data(image.pixels, size,\n stride as i32, B8G8R8A8);\n let source_rect = Rect(Point2D(0 as AzFloat, 0 as AzFloat),\n Size2D(image.width as AzFloat, image.height as AzFloat));\n let dest_rect = bounds.to_azure_rect();\n let draw_surface_options = DrawSurfaceOptions(Linear, true);\n let draw_options = DrawOptions(1.0f64 as AzFloat, 0);\n draw_target_ref.draw_surface(azure_surface,\n dest_rect,\n source_rect,\n draw_surface_options,\n draw_options);\n }\n\n pub fn clear(&self) {\n let pattern = ColorPattern(Color(1.0, 1.0, 1.0, 1.0));\n let rect = Rect(Point2D(self.page_rect.origin.x as AzFloat,\n self.page_rect.origin.y as AzFloat),\n Size2D(self.screen_rect.size.width as AzFloat,\n self.screen_rect.size.height as AzFloat));\n self.draw_target.make_current();\n self.draw_target.fill_rect(&rect, &pattern);\n }\n\n fn apply_border_style(style: border_style::T, border_width: AzFloat, dash: &mut [AzFloat], stroke_opts: &mut StrokeOptions){\n match style{\n border_style::none => {\n }\n border_style::hidden => {\n }\n \/\/FIXME(sammykim): This doesn't work with dash_pattern and cap_style well. I referred firefox code.\n border_style::dotted => {\n stroke_opts.line_width = border_width;\n\n if border_width > 2.0 {\n dash[0] = 0 as AzFloat;\n dash[1] = border_width * 2.0;\n\n stroke_opts.set_cap_style(AZ_CAP_ROUND as u8);\n } else {\n dash[0] = border_width;\n dash[1] = border_width;\n }\n stroke_opts.mDashPattern = vec::raw::to_ptr(dash);\n stroke_opts.mDashLength = dash.len() as size_t;\n }\n border_style::dashed => {\n stroke_opts.set_cap_style(AZ_CAP_BUTT as u8);\n stroke_opts.line_width = border_width;\n dash[0] = border_width*3 as AzFloat;\n dash[1] = border_width*3 as AzFloat;\n stroke_opts.mDashPattern = vec::raw::to_ptr(dash);\n stroke_opts.mDashLength = dash.len() as size_t;\n }\n \/\/FIXME(sammykim): BorderStyleSolid doesn't show proper join-style with comparing firefox.\n border_style::solid => {\n stroke_opts.set_cap_style(AZ_CAP_BUTT as u8);\n stroke_opts.set_join_style(AZ_JOIN_BEVEL as u8);\n stroke_opts.line_width = border_width;\n stroke_opts.mDashLength = 0 as size_t;\n }\n \/\/FIXME(sammykim): Five more styles should be implemented.\n \/\/double, groove, ridge, inset, outset\n }\n }\n}\n\ntrait to_float {\n fn to_float(&self) -> f64;\n}\n\nimpl to_float for u8 {\n fn to_float(&self) -> f64 {\n (*self as f64) \/ 255f64\n }\n}\n\ntrait ToAzureRect {\n fn to_azure_rect(&self) -> Rect<AzFloat>;\n}\n\nimpl ToAzureRect for Rect<Au> {\n fn to_azure_rect(&self) -> Rect<AzFloat> {\n Rect(Point2D(self.origin.x.to_nearest_px() as AzFloat,\n self.origin.y.to_nearest_px() as AzFloat),\n Size2D(self.size.width.to_nearest_px() as AzFloat,\n self.size.height.to_nearest_px() as AzFloat))\n }\n}\n\ntrait ToSideOffsetsPx {\n fn to_float_px(&self) -> SideOffsets2D<AzFloat>;\n}\n\nimpl ToSideOffsetsPx for SideOffsets2D<Au> {\n fn to_float_px(&self) -> SideOffsets2D<AzFloat> {\n SideOffsets2D::new(self.top.to_nearest_px() as AzFloat,\n self.right.to_nearest_px() as AzFloat,\n self.bottom.to_nearest_px() as AzFloat,\n self.left.to_nearest_px() as AzFloat)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use core::fmt::{self, Write};\nuse core::result;\n\nuse system::syscall::{sys_write, sys_exit};\n\npub struct DebugStream;\n\nimpl Write for DebugStream {\n fn write_str(&mut self, s: &str) -> fmt::Result {\n let _ = sys_write(2, s.as_bytes());\n\n result::Result::Ok(())\n }\n}\n\n#[lang=\"panic_fmt\"]\n#[allow(unused_must_use)]\npub extern \"C\" fn panic_impl(args: &fmt::Arguments, file: &'static str, line: u32) -> ! {\n let mut stream = DebugStream;\n stream.write_str(file);\n stream.write_fmt(format_args!(\":{}: \", line));\n stream.write_fmt(*args);\n stream.write_str(\"\\n\");\n\n loop {\n let _ = sys_exit(128);\n }\n}\n<commit_msg>Error checking in panic<commit_after>use core::fmt::{self, Write};\nuse core::result;\n\nuse system::syscall::{sys_write, sys_exit};\n\npub struct DebugStream;\n\nimpl Write for DebugStream {\n fn write_str(&mut self, s: &str) -> fmt::Result {\n if let Err(_err) = sys_write(2, s.as_bytes()) {\n result::Result::Err(fmt::Error)\n } else {\n result::Result::Ok(())\n }\n }\n}\n\n#[lang=\"panic_fmt\"]\n#[allow(unused_must_use)]\npub extern \"C\" fn panic_impl(args: &fmt::Arguments, file: &'static str, line: u32) -> ! {\n let mut stream = DebugStream;\n stream.write_fmt(format_args!(\"Panic in {}:{}: {}\\n\", file, line, *args));\n\n loop {\n let _ = sys_exit(128);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Ingest in a thread pool sharded by railway id.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse target::{Target, TargetOptions, TargetResult};\n\n\/\/ See https:\/\/developer.android.com\/ndk\/guides\/abis.html#v7a\n\/\/ for target ABI requirements.\n\npub fn target() -> TargetResult {\n let mut base = super::android_base::opts();\n base.features = \"+v7,+thumb2,+vfp3,+d16,-neon\".to_string();\n base.max_atomic_width = Some(64);\n\n Ok(Target {\n llvm_target: \"armv7-none-linux-android\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n data_layout: \"e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64\".to_string(),\n arch: \"arm\".to_string(),\n target_os: \"android\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n linker_flavor: LinkerFlavor::Gcc,\n options: TargetOptions {\n abi_blacklist: super::arm_base::abi_blacklist(),\n .. base\n },\n })\n}\n<commit_msg>Rollup merge of #41657 - malbarbo:android-armv7-linker, r=alexcrichton<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse target::{Target, TargetOptions, TargetResult};\n\n\/\/ See https:\/\/developer.android.com\/ndk\/guides\/abis.html#v7a\n\/\/ for target ABI requirements.\n\npub fn target() -> TargetResult {\n let mut base = super::android_base::opts();\n base.features = \"+v7,+thumb2,+vfp3,+d16,-neon\".to_string();\n base.max_atomic_width = Some(64);\n base.pre_link_args\n .get_mut(&LinkerFlavor::Gcc).unwrap().push(\"-march=armv7-a\".to_string());\n\n Ok(Target {\n llvm_target: \"armv7-none-linux-android\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n data_layout: \"e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64\".to_string(),\n arch: \"arm\".to_string(),\n target_os: \"android\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n linker_flavor: LinkerFlavor::Gcc,\n options: TargetOptions {\n abi_blacklist: super::arm_base::abi_blacklist(),\n .. base\n },\n })\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#[macro_use]\nextern crate log;\n#[macro_use]\nextern crate objc;\nextern crate objc_foundation;\nextern crate cocoa;\nextern crate gfx_core as core;\nextern crate metal_rs as metal;\nextern crate bit_set;\n\n\/\/ use cocoa::base::{selector, class};\n\/\/ use cocoa::foundation::{NSUInteger};\n\nuse metal::*;\n\nuse core::{handle, texture as tex};\nuse core::SubmissionResult;\nuse core::memory::{self, Usage, Bind};\nuse core::command::{AccessInfo, AccessGuard};\n\nuse std::cell::RefCell;\nuse std::sync::Arc;\n\/\/ use std::{mem, ptr};\n\nmod factory;\nmod encoder;\nmod command;\nmod mirror;\nmod map;\n\npub use self::command::CommandBuffer;\npub use self::factory::Factory;\npub use self::map::*;\n\n\/\/ Grabbed from https:\/\/developer.apple.com\/metal\/limits\/\nconst MTL_MAX_TEXTURE_BINDINGS: usize = 128;\nconst MTL_MAX_BUFFER_BINDINGS: usize = 31;\nconst MTL_MAX_SAMPLER_BINDINGS: usize = 16;\n\n\/\/\/ Internal struct of shared data between the device and its factories.\n#[doc(hidden)]\npub struct Share {\n capabilities: core::Capabilities,\n handles: RefCell<handle::Manager<Resources>>,\n}\n\npub mod native {\n use metal::*;\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Buffer(pub *mut MTLBuffer);\n unsafe impl Send for Buffer {}\n unsafe impl Sync for Buffer {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Texture(pub *mut MTLTexture);\n unsafe impl Send for Texture {}\n unsafe impl Sync for Texture {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Sampler(pub MTLSamplerState);\n unsafe impl Send for Sampler {}\n unsafe impl Sync for Sampler {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Rtv(pub *mut MTLTexture);\n unsafe impl Send for Rtv {}\n unsafe impl Sync for Rtv {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Dsv(pub *mut MTLTexture, pub Option<u16>);\n unsafe impl Send for Dsv {}\n unsafe impl Sync for Dsv {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Srv(pub *mut MTLTexture);\n unsafe impl Send for Srv {}\n unsafe impl Sync for Srv {}\n}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub struct InputLayout(pub MTLVertexDescriptor);\nunsafe impl Send for InputLayout {}\nunsafe impl Sync for InputLayout {}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub struct Shader {\n func: MTLFunction,\n}\nunsafe impl Send for Shader {}\nunsafe impl Sync for Shader {}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub struct Program {\n vs: MTLFunction,\n ps: MTLFunction,\n}\nunsafe impl Send for Program {}\nunsafe impl Sync for Program {}\n\npub struct ShaderLibrary {\n lib: MTLLibrary,\n}\nunsafe impl Send for ShaderLibrary {}\nunsafe impl Sync for ShaderLibrary {}\n\n\/\/ ShaderLibrary isn't handled via Device.cleanup(). Not really an issue since it will usually\n\/\/ live for the entire application lifetime and be cloned rarely.\nimpl Drop for ShaderLibrary {\n fn drop(&mut self) {\n unsafe { self.lib.release() };\n }\n}\n\nimpl Clone for ShaderLibrary {\n fn clone(&self) -> Self {\n unsafe { self.lib.retain() };\n ShaderLibrary { lib: self.lib }\n }\n}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub struct Pipeline {\n pipeline: MTLRenderPipelineState,\n depth_stencil: Option<MTLDepthStencilState>,\n winding: MTLWinding,\n cull: MTLCullMode,\n fill: MTLTriangleFillMode,\n alpha_to_one: bool,\n alpha_to_coverage: bool,\n depth_bias: i32,\n slope_scaled_depth_bias: i32,\n depth_clip: bool,\n}\nunsafe impl Send for Pipeline {}\nunsafe impl Sync for Pipeline {}\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]\npub struct Buffer(native::Buffer, Usage, Bind);\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]\npub struct Texture(native::Texture, Usage);\n\npub struct Device {\n pub device: MTLDevice,\n pub drawable: *mut CAMetalDrawable,\n pub backbuffer: *mut MTLTexture,\n feature_set: MTLFeatureSet,\n share: Arc<Share>,\n frame_handles: handle::Manager<Resources>,\n max_resource_count: Option<usize>,\n}\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash)]\npub struct Fence;\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub enum Resources {}\n\nimpl core::Resources for Resources {\n type Buffer = Buffer;\n type Shader = Shader;\n type Program = Program;\n type PipelineStateObject = Pipeline;\n type Texture = Texture;\n type RenderTargetView = native::Rtv;\n type DepthStencilView = native::Dsv;\n type ShaderResourceView = native::Srv;\n type UnorderedAccessView = ();\n type Sampler = native::Sampler;\n type Fence = Fence;\n type Mapping = factory::RawMapping;\n}\n\npub type ShaderModel = u16;\n\nimpl Device {\n pub fn get_shader_model(&self) -> ShaderModel {\n use metal::MTLFeatureSet::*;\n\n match self.feature_set {\n iOS_GPUFamily1_v1 |\n iOS_GPUFamily1_v2 => 10,\n iOS_GPUFamily2_v1 |\n iOS_GPUFamily2_v2 |\n iOS_GPUFamily3_v1 |\n OSX_GPUFamily1_v1 => 11,\n }\n }\n}\n\nimpl core::Device for Device {\n type Resources = Resources;\n type CommandBuffer = command::CommandBuffer;\n\n fn get_capabilities(&self) -> &core::Capabilities {\n &self.share.capabilities\n }\n\n fn pin_submitted_resources(&mut self, man: &handle::Manager<Resources>) {\n self.frame_handles.extend(man);\n match self.max_resource_count {\n Some(c) if self.frame_handles.count() > c => {\n error!(\"Way too many resources in the current frame. Did you call \\\n Device::cleanup()?\");\n self.max_resource_count = None;\n }\n _ => (),\n }\n }\n\n fn submit(&mut self,\n cb: &mut command::CommandBuffer,\n access: &AccessInfo<Resources>) -> SubmissionResult<()> {\n let _guard = try!(access.take_accesses());\n cb.commit(unsafe { *self.drawable });\n Ok(())\n }\n\n fn fenced_submit(&mut self,\n _: &mut Self::CommandBuffer,\n _: &AccessInfo<Resources>,\n _after: Option<handle::Fence<Resources>>)\n -> SubmissionResult<handle::Fence<Resources>> {\n unimplemented!()\n }\n\n fn wait_fence(&mut self, fence: &handle::Fence<Self::Resources>) {\n unimplemented!()\n }\n\n fn cleanup(&mut self) {\n use core::handle::Producer;\n self.frame_handles.clear();\n self.share.handles.borrow_mut().clean_with(&mut (),\n |_, _v| {\n \/\/ v.0.release();\n }, \/\/ buffer\n |_, _s| { \/\/shader\n \/*(*s.object).Release();\n (*s.reflection).Release();*\/\n },\n |_, _p| {\n \/\/ if !p.vs.is_null() { p.vs.release(); }\n \/\/ if !p.ps.is_null() { p.ps.release(); }\n }, \/\/ program\n |_, _v| { \/\/PSO\n \/*type Child = *mut winapi::ID3D11DeviceChild;\n (*v.layout).Release();\n (*(v.rasterizer as Child)).Release();\n (*(v.depth_stencil as Child)).Release();\n (*(v.blend as Child)).Release();*\/\n },\n |_, _v| {\n \/\/ (*(v.0).0).release();\n }, \/\/ texture\n |_, _v| {\n \/\/ (*v.0).Release();\n }, \/\/ SRV\n |_, _| {}, \/\/ UAV\n |_, _v| {\n \/\/ (*v.0).Release();\n }, \/\/ RTV\n |_, _v| {\n \/\/ (*v.0).Release();\n }, \/\/ DSV\n |_, _v| {\n \/\/ v.sampler.release();\n }, \/\/ sampler\n |_, _| {\n \/\/ fence\n });\n }\n}\n\n#[derive(Clone, Debug)]\npub enum InitError {\n FeatureSet,\n}\n\npub fn create(format: core::format::Format,\n width: u32,\n height: u32)\n -> Result<(Device,\n Factory,\n handle::RawRenderTargetView<Resources>,\n *mut CAMetalDrawable,\n *mut MTLTexture),\n InitError> {\n use core::handle::Producer;\n\n let share = Share {\n capabilities: core::Capabilities {\n max_vertex_count: 0,\n max_index_count: 0,\n max_texture_size: 0,\n max_patch_size: 0,\n instance_base_supported: false,\n instance_call_supported: false,\n instance_rate_supported: false,\n vertex_base_supported: false,\n srgb_color_supported: false,\n constant_buffer_supported: true,\n unordered_access_view_supported: false,\n separate_blending_slots_supported: false,\n copy_buffer_supported: true,\n },\n handles: RefCell::new(handle::Manager::new()),\n };\n\n let mtl_device = create_system_default_device();\n let feature_sets = {\n use metal::MTLFeatureSet::*;\n [OSX_GPUFamily1_v1,\n \/\/OSX_GPUFamily1_v2,\n iOS_GPUFamily3_v1,\n iOS_GPUFamily2_v2,\n iOS_GPUFamily2_v1,\n iOS_GPUFamily1_v2,\n iOS_GPUFamily1_v1]\n };\n let selected_set = feature_sets.into_iter()\n .find(|&&f| mtl_device.supports_feature_set(f));\n\n let bb = Box::into_raw(Box::new(MTLTexture::nil()));\n let d = Box::into_raw(Box::new(CAMetalDrawable::nil()));\n\n let device = Device {\n device: mtl_device,\n feature_set: match selected_set {\n Some(&set) => set,\n None => return Err(InitError::FeatureSet),\n },\n share: Arc::new(share),\n frame_handles: handle::Manager::new(),\n max_resource_count: None,\n\n drawable: d,\n backbuffer: bb,\n };\n\n \/\/ let raw_addr: *mut MTLTexture = ptr::null_mut();\/\/&mut MTLTexture::nil();\/\/unsafe { mem::transmute(&(raw_tex.0).0) };\n let raw_tex = Texture(native::Texture(bb), Usage::Data);\n\n let color_info = tex::Info {\n kind: tex::Kind::D2(width as tex::Size,\n height as tex::Size,\n tex::AaMode::Single),\n levels: 1,\n format: format.0,\n bind: memory::RENDER_TARGET,\n usage: raw_tex.1,\n };\n let color_tex = device.share.handles.borrow_mut().make_texture(raw_tex, color_info);\n\n let mut factory = Factory::new(mtl_device, device.share.clone());\n\n let color_target = {\n use core::Factory;\n\n let desc = tex::RenderDesc {\n channel: format.1,\n level: 0,\n layer: None,\n };\n\n factory.view_texture_as_render_target_raw(&color_tex, desc).unwrap()\n };\n\n Ok((device, factory, color_target, d, bb))\n}\n<commit_msg>Old Metal backend update to build with new metal-rs<commit_after>\/\/ Copyright 2016 The Gfx-rs Developers.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n#[macro_use]\nextern crate log;\n#[macro_use]\nextern crate objc;\nextern crate objc_foundation;\nextern crate cocoa;\nextern crate gfx_core as core;\nextern crate metal_rs as metal;\nextern crate bit_set;\n\n\/\/ use cocoa::base::{selector, class};\n\/\/ use cocoa::foundation::{NSUInteger};\n\nuse metal::*;\n\nuse core::{handle, texture as tex};\nuse core::SubmissionResult;\nuse core::memory::{self, Usage, Bind};\nuse core::command::{AccessInfo, AccessGuard};\n\nuse std::cell::RefCell;\nuse std::sync::Arc;\n\/\/ use std::{mem, ptr};\n\nmod factory;\nmod encoder;\nmod command;\nmod mirror;\nmod map;\n\npub use self::command::CommandBuffer;\npub use self::factory::Factory;\npub use self::map::*;\n\n\/\/ Grabbed from https:\/\/developer.apple.com\/metal\/limits\/\nconst MTL_MAX_TEXTURE_BINDINGS: usize = 128;\nconst MTL_MAX_BUFFER_BINDINGS: usize = 31;\nconst MTL_MAX_SAMPLER_BINDINGS: usize = 16;\n\n\/\/\/ Internal struct of shared data between the device and its factories.\n#[doc(hidden)]\npub struct Share {\n capabilities: core::Capabilities,\n handles: RefCell<handle::Manager<Resources>>,\n}\n\npub mod native {\n use metal::*;\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Buffer(pub *mut MTLBuffer);\n unsafe impl Send for Buffer {}\n unsafe impl Sync for Buffer {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Texture(pub *mut MTLTexture);\n unsafe impl Send for Texture {}\n unsafe impl Sync for Texture {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Sampler(pub MTLSamplerState);\n unsafe impl Send for Sampler {}\n unsafe impl Sync for Sampler {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Rtv(pub *mut MTLTexture);\n unsafe impl Send for Rtv {}\n unsafe impl Sync for Rtv {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Dsv(pub *mut MTLTexture, pub Option<u16>);\n unsafe impl Send for Dsv {}\n unsafe impl Sync for Dsv {}\n\n #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n pub struct Srv(pub *mut MTLTexture);\n unsafe impl Send for Srv {}\n unsafe impl Sync for Srv {}\n}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub struct InputLayout(pub MTLVertexDescriptor);\nunsafe impl Send for InputLayout {}\nunsafe impl Sync for InputLayout {}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub struct Shader {\n func: MTLFunction,\n}\nunsafe impl Send for Shader {}\nunsafe impl Sync for Shader {}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub struct Program {\n vs: MTLFunction,\n ps: MTLFunction,\n}\nunsafe impl Send for Program {}\nunsafe impl Sync for Program {}\n\npub struct ShaderLibrary {\n lib: MTLLibrary,\n}\nunsafe impl Send for ShaderLibrary {}\nunsafe impl Sync for ShaderLibrary {}\n\n\/\/ ShaderLibrary isn't handled via Device.cleanup(). Not really an issue since it will usually\n\/\/ live for the entire application lifetime and be cloned rarely.\nimpl Drop for ShaderLibrary {\n fn drop(&mut self) {\n unsafe { self.lib.release() };\n }\n}\n\nimpl Clone for ShaderLibrary {\n fn clone(&self) -> Self {\n unsafe { self.lib.retain() };\n ShaderLibrary { lib: self.lib }\n }\n}\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub struct Pipeline {\n pipeline: MTLRenderPipelineState,\n depth_stencil: Option<MTLDepthStencilState>,\n winding: MTLWinding,\n cull: MTLCullMode,\n fill: MTLTriangleFillMode,\n alpha_to_one: bool,\n alpha_to_coverage: bool,\n depth_bias: i32,\n slope_scaled_depth_bias: i32,\n depth_clip: bool,\n}\nunsafe impl Send for Pipeline {}\nunsafe impl Sync for Pipeline {}\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]\npub struct Buffer(native::Buffer, Usage, Bind);\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]\npub struct Texture(native::Texture, Usage);\n\npub struct Device {\n pub device: MTLDevice,\n pub drawable: *mut CAMetalDrawable,\n pub backbuffer: *mut MTLTexture,\n feature_set: MTLFeatureSet,\n share: Arc<Share>,\n frame_handles: handle::Manager<Resources>,\n max_resource_count: Option<usize>,\n}\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash)]\npub struct Fence;\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\npub enum Resources {}\n\nimpl core::Resources for Resources {\n type Buffer = Buffer;\n type Shader = Shader;\n type Program = Program;\n type PipelineStateObject = Pipeline;\n type Texture = Texture;\n type RenderTargetView = native::Rtv;\n type DepthStencilView = native::Dsv;\n type ShaderResourceView = native::Srv;\n type UnorderedAccessView = ();\n type Sampler = native::Sampler;\n type Fence = Fence;\n type Mapping = factory::RawMapping;\n}\n\npub type ShaderModel = u16;\n\nimpl Device {\n pub fn get_shader_model(&self) -> ShaderModel {\n use metal::MTLFeatureSet::*;\n\n match self.feature_set {\n iOS_GPUFamily1_v1 |\n iOS_GPUFamily1_v2 => 10,\n iOS_GPUFamily2_v1 |\n iOS_GPUFamily2_v2 |\n iOS_GPUFamily3_v1 |\n macOS_GPUFamily1_v1 => 11,\n _ => unimplemented!()\n }\n }\n}\n\nimpl core::Device for Device {\n type Resources = Resources;\n type CommandBuffer = command::CommandBuffer;\n\n fn get_capabilities(&self) -> &core::Capabilities {\n &self.share.capabilities\n }\n\n fn pin_submitted_resources(&mut self, man: &handle::Manager<Resources>) {\n self.frame_handles.extend(man);\n match self.max_resource_count {\n Some(c) if self.frame_handles.count() > c => {\n error!(\"Way too many resources in the current frame. Did you call \\\n Device::cleanup()?\");\n self.max_resource_count = None;\n }\n _ => (),\n }\n }\n\n fn submit(&mut self,\n cb: &mut command::CommandBuffer,\n access: &AccessInfo<Resources>) -> SubmissionResult<()> {\n let _guard = try!(access.take_accesses());\n cb.commit(unsafe { *self.drawable });\n Ok(())\n }\n\n fn fenced_submit(&mut self,\n _: &mut Self::CommandBuffer,\n _: &AccessInfo<Resources>,\n _after: Option<handle::Fence<Resources>>)\n -> SubmissionResult<handle::Fence<Resources>> {\n unimplemented!()\n }\n\n fn wait_fence(&mut self, fence: &handle::Fence<Self::Resources>) {\n unimplemented!()\n }\n\n fn cleanup(&mut self) {\n use core::handle::Producer;\n self.frame_handles.clear();\n self.share.handles.borrow_mut().clean_with(&mut (),\n |_, _v| {\n \/\/ v.0.release();\n }, \/\/ buffer\n |_, _s| { \/\/shader\n \/*(*s.object).Release();\n (*s.reflection).Release();*\/\n },\n |_, _p| {\n \/\/ if !p.vs.is_null() { p.vs.release(); }\n \/\/ if !p.ps.is_null() { p.ps.release(); }\n }, \/\/ program\n |_, _v| { \/\/PSO\n \/*type Child = *mut winapi::ID3D11DeviceChild;\n (*v.layout).Release();\n (*(v.rasterizer as Child)).Release();\n (*(v.depth_stencil as Child)).Release();\n (*(v.blend as Child)).Release();*\/\n },\n |_, _v| {\n \/\/ (*(v.0).0).release();\n }, \/\/ texture\n |_, _v| {\n \/\/ (*v.0).Release();\n }, \/\/ SRV\n |_, _| {}, \/\/ UAV\n |_, _v| {\n \/\/ (*v.0).Release();\n }, \/\/ RTV\n |_, _v| {\n \/\/ (*v.0).Release();\n }, \/\/ DSV\n |_, _v| {\n \/\/ v.sampler.release();\n }, \/\/ sampler\n |_, _| {\n \/\/ fence\n });\n }\n}\n\n#[derive(Clone, Debug)]\npub enum InitError {\n FeatureSet,\n}\n\npub fn create(format: core::format::Format,\n width: u32,\n height: u32)\n -> Result<(Device,\n Factory,\n handle::RawRenderTargetView<Resources>,\n *mut CAMetalDrawable,\n *mut MTLTexture),\n InitError> {\n use core::handle::Producer;\n\n let share = Share {\n capabilities: core::Capabilities {\n max_vertex_count: 0,\n max_index_count: 0,\n max_texture_size: 0,\n max_patch_size: 0,\n instance_base_supported: false,\n instance_call_supported: false,\n instance_rate_supported: false,\n vertex_base_supported: false,\n srgb_color_supported: false,\n constant_buffer_supported: true,\n unordered_access_view_supported: false,\n separate_blending_slots_supported: false,\n copy_buffer_supported: true,\n },\n handles: RefCell::new(handle::Manager::new()),\n };\n\n let mtl_device = create_system_default_device();\n let feature_sets = {\n use metal::MTLFeatureSet::*;\n [macOS_GPUFamily1_v1,\n macOS_GPUFamily1_v2,\n iOS_GPUFamily3_v1,\n iOS_GPUFamily2_v2,\n iOS_GPUFamily2_v1,\n iOS_GPUFamily1_v2,\n iOS_GPUFamily1_v1]\n };\n let selected_set = feature_sets.into_iter()\n .find(|&&f| mtl_device.supports_feature_set(f));\n\n let bb = Box::into_raw(Box::new(MTLTexture::nil()));\n let d = Box::into_raw(Box::new(CAMetalDrawable::nil()));\n\n let device = Device {\n device: mtl_device,\n feature_set: match selected_set {\n Some(&set) => set,\n None => return Err(InitError::FeatureSet),\n },\n share: Arc::new(share),\n frame_handles: handle::Manager::new(),\n max_resource_count: None,\n\n drawable: d,\n backbuffer: bb,\n };\n\n \/\/ let raw_addr: *mut MTLTexture = ptr::null_mut();\/\/&mut MTLTexture::nil();\/\/unsafe { mem::transmute(&(raw_tex.0).0) };\n let raw_tex = Texture(native::Texture(bb), Usage::Data);\n\n let color_info = tex::Info {\n kind: tex::Kind::D2(width as tex::Size,\n height as tex::Size,\n tex::AaMode::Single),\n levels: 1,\n format: format.0,\n bind: memory::RENDER_TARGET,\n usage: raw_tex.1,\n };\n let color_tex = device.share.handles.borrow_mut().make_texture(raw_tex, color_info);\n\n let mut factory = Factory::new(mtl_device, device.share.clone());\n\n let color_target = {\n use core::Factory;\n\n let desc = tex::RenderDesc {\n channel: format.1,\n level: 0,\n layer: None,\n };\n\n factory.view_texture_as_render_target_raw(&color_tex, desc).unwrap()\n };\n\n Ok((device, factory, color_target, d, bb))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Allow transaction inputs to be validated individually<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Code to handle a single block device.\n\nuse std::fs::OpenOptions;\nuse std::path::PathBuf;\n\nuse devicemapper::Device;\nuse devicemapper::Segment;\nuse devicemapper::Sectors;\nuse time::Timespec;\n\nuse super::super::errors::EngineResult;\nuse super::super::types::{DevUuid, PoolUuid};\n\nuse super::metadata::BDA;\nuse super::range_alloc::RangeAllocator;\n\n\n#[derive(Debug)]\npub struct BlockDev {\n dev: Device,\n pub devnode: PathBuf,\n bda: BDA,\n used: RangeAllocator,\n}\n\nimpl BlockDev {\n pub fn new(dev: Device, devnode: PathBuf, bda: BDA, allocator: RangeAllocator) -> BlockDev {\n BlockDev {\n dev: dev,\n devnode: devnode,\n bda: bda,\n used: allocator,\n }\n }\n\n pub fn wipe_metadata(self) -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(&self.devnode));\n BDA::wipe(&mut f)\n }\n\n pub fn save_state(&mut self, time: &Timespec, metadata: &[u8]) -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(&self.devnode));\n self.bda.save_state(time, metadata, &mut f)\n }\n\n pub fn load_state(&self) -> EngineResult<Option<Vec<u8>>> {\n let mut f = try!(OpenOptions::new().read(true).open(&self.devnode));\n self.bda.load_state(&mut f)\n }\n\n \/\/\/ List the available-for-upper-layer-use range in this blockdev.\n pub fn avail_range(&self) -> Segment {\n let start = self.bda.size();\n let size = self.size();\n \/\/ Blockdev size is at least MIN_DEV_SIZE, so this can fail only if\n \/\/ size of metadata area exceeds 1 GiB. Initial metadata area size\n \/\/ is 4 MiB.\n assert!(start <= size);\n Segment::new(self.dev, start, size - start)\n }\n\n \/\/\/ The device's UUID.\n pub fn uuid(&self) -> &DevUuid {\n self.bda.dev_uuid()\n }\n\n \/\/\/ The device's pool's UUID.\n pub fn pool_uuid(&self) -> &PoolUuid {\n self.bda.pool_uuid()\n }\n\n \/\/\/ The device's size.\n pub fn size(&self) -> Sectors {\n self.bda.dev_size()\n }\n\n \/\/\/ Last time metadata was written to this device.\n pub fn last_update_time(&self) -> Option<&Timespec> {\n self.bda.last_update_time()\n }\n\n pub fn available(&self) -> Sectors {\n self.used.available()\n }\n\n \/\/ Find some sector ranges that could be allocated. If more\n \/\/ sectors are needed than our capacity, return partial results.\n pub fn request_space(&mut self, size: Sectors) -> (Sectors, Vec<Segment>) {\n let (size, segs) = self.used.request(size);\n (size,\n segs.iter()\n .map(|&(start, len)| Segment::new(self.dev, start, len))\n .collect())\n }\n}\n<commit_msg>Omit unused method<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Code to handle a single block device.\n\nuse std::fs::OpenOptions;\nuse std::path::PathBuf;\n\nuse devicemapper::Device;\nuse devicemapper::Segment;\nuse devicemapper::Sectors;\nuse time::Timespec;\n\nuse super::super::errors::EngineResult;\nuse super::super::types::{DevUuid, PoolUuid};\n\nuse super::metadata::BDA;\nuse super::range_alloc::RangeAllocator;\n\n\n#[derive(Debug)]\npub struct BlockDev {\n dev: Device,\n pub devnode: PathBuf,\n bda: BDA,\n used: RangeAllocator,\n}\n\nimpl BlockDev {\n pub fn new(dev: Device, devnode: PathBuf, bda: BDA, allocator: RangeAllocator) -> BlockDev {\n BlockDev {\n dev: dev,\n devnode: devnode,\n bda: bda,\n used: allocator,\n }\n }\n\n pub fn wipe_metadata(self) -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(&self.devnode));\n BDA::wipe(&mut f)\n }\n\n pub fn save_state(&mut self, time: &Timespec, metadata: &[u8]) -> EngineResult<()> {\n let mut f = try!(OpenOptions::new().write(true).open(&self.devnode));\n self.bda.save_state(time, metadata, &mut f)\n }\n\n \/\/\/ List the available-for-upper-layer-use range in this blockdev.\n pub fn avail_range(&self) -> Segment {\n let start = self.bda.size();\n let size = self.size();\n \/\/ Blockdev size is at least MIN_DEV_SIZE, so this can fail only if\n \/\/ size of metadata area exceeds 1 GiB. Initial metadata area size\n \/\/ is 4 MiB.\n assert!(start <= size);\n Segment::new(self.dev, start, size - start)\n }\n\n \/\/\/ The device's UUID.\n pub fn uuid(&self) -> &DevUuid {\n self.bda.dev_uuid()\n }\n\n \/\/\/ The device's pool's UUID.\n pub fn pool_uuid(&self) -> &PoolUuid {\n self.bda.pool_uuid()\n }\n\n \/\/\/ The device's size.\n pub fn size(&self) -> Sectors {\n self.bda.dev_size()\n }\n\n \/\/\/ Last time metadata was written to this device.\n pub fn last_update_time(&self) -> Option<&Timespec> {\n self.bda.last_update_time()\n }\n\n pub fn available(&self) -> Sectors {\n self.used.available()\n }\n\n \/\/ Find some sector ranges that could be allocated. If more\n \/\/ sectors are needed than our capacity, return partial results.\n pub fn request_space(&mut self, size: Sectors) -> (Sectors, Vec<Segment>) {\n let (size, segs) = self.used.request(size);\n (size,\n segs.iter()\n .map(|&(start, len)| Segment::new(self.dev, start, len))\n .collect())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Make everything cfg(test)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for nullary univariant enums.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nenum Foo {\n Bar = 0xDEADBEE\n}\n\nconst X: Foo = Bar;\n\nfn main() {\n assert((X as uint) == 0xDEADBEE);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #18238 and #18336<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(unboxed_closures)]\n#![deny(unused_mut)]\n\n\/\/ Test that mutating a mutable upvar in a capture-by-value unboxed\n\/\/ closure does not ice (issue #18238) and marks the upvar as used\n\/\/ mutably so we do not get a spurious warning about it not needing to\n\/\/ be declared mutable (issue #18336).\n\nfn main() {\n {\n let mut x = 0u;\n move |&mut:| x += 1;\n }\n {\n let mut x = 0u;\n move |:| x += 1;\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(const_int_ops)]\n#![feature(test)]\n\nextern crate test;\nuse test::black_box as b;\n\nconst BE_U32: u32 = 55u32.to_be();\nconst LE_U32: u32 = 55u32.to_le();\n\n\nfn main() {\n assert_eq!(BE_U32, b(55u32).to_be());\n assert_eq!(LE_U32, b(55u32).to_le());\n\n #[cfg(not(target_arch = \"asmjs\"))]\n {\n const BE_U128: u128 = 999999u128.to_be();\n const LE_I128: i128 = -999999i128.to_le();\n assert_eq!(BE_U128, b(999999u128).to_be());\n assert_eq!(LE_I128, b(-999999i128).to_le());\n }\n}\n<commit_msg>run-pass\/const-endianness: negate before to_le()<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(const_int_ops)]\n#![feature(test)]\n\nextern crate test;\nuse test::black_box as b;\n\nconst BE_U32: u32 = 55u32.to_be();\nconst LE_U32: u32 = 55u32.to_le();\n\n\nfn main() {\n assert_eq!(BE_U32, b(55u32).to_be());\n assert_eq!(LE_U32, b(55u32).to_le());\n\n #[cfg(not(target_arch = \"asmjs\"))]\n {\n const BE_U128: u128 = 999999u128.to_be();\n const LE_I128: i128 = (-999999i128).to_le();\n assert_eq!(BE_U128, b(999999u128).to_be());\n assert_eq!(LE_I128, b(-999999i128).to_le());\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Detecting lib features (i.e. features that are not lang features).\n\/\/\n\/\/ These are declared using stability attributes (e.g. `#[stable (..)]`\n\/\/ and `#[unstable (..)]`), but are not declared in one single location\n\/\/ (unlike lang features), which means we need to collect them instead.\n\nuse ty::TyCtxt;\nuse syntax::symbol::Symbol;\nuse syntax::ast::{Attribute, MetaItem, MetaItemKind};\nuse syntax_pos::{Span, DUMMY_SP};\nuse hir;\nuse hir::itemlikevisit::ItemLikeVisitor;\nuse rustc_data_structures::fx::{FxHashSet, FxHashMap};\nuse errors::DiagnosticId;\n\npub struct LibFeatures {\n \/\/ A map from feature to stabilisation version.\n pub stable: FxHashMap<Symbol, Symbol>,\n pub unstable: FxHashSet<Symbol>,\n}\n\nimpl LibFeatures {\n fn new() -> LibFeatures {\n LibFeatures {\n stable: FxHashMap(),\n unstable: FxHashSet(),\n }\n }\n\n pub fn to_vec(&self) -> Vec<(Symbol, Option<Symbol>)> {\n let mut all_features: Vec<_> = self.stable.iter().map(|(f, s)| (*f, Some(*s)))\n .chain(self.unstable.iter().map(|f| (*f, None)))\n .collect();\n all_features.sort_unstable_by_key(|f| f.0.as_str());\n all_features\n }\n}\n\npub struct LibFeatureCollector<'a, 'tcx: 'a> {\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n lib_features: LibFeatures,\n}\n\nimpl<'a, 'tcx> LibFeatureCollector<'a, 'tcx> {\n fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LibFeatureCollector<'a, 'tcx> {\n LibFeatureCollector {\n tcx,\n lib_features: LibFeatures::new(),\n }\n }\n\n fn extract(&self, attrs: &[Attribute]) -> Vec<(Symbol, Option<Symbol>, Span)> {\n let stab_attrs = vec![\"stable\", \"unstable\", \"rustc_const_unstable\"];\n let mut features = vec![];\n\n for attr in attrs {\n \/\/ Find a stability attribute (i.e. `#[stable (..)]`, `#[unstable (..)]`,\n \/\/ `#[rustc_const_unstable (..)]`).\n if let Some(stab_attr) = stab_attrs.iter().find(|stab_attr| {\n attr.check_name(stab_attr)\n }) {\n let meta_item = attr.meta();\n if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta_item {\n let mut feature = None;\n let mut since = None;\n for meta in metas {\n if let Some(mi) = meta.meta_item() {\n \/\/ Find the `feature = \"..\"` meta-item.\n match (&*mi.name().as_str(), mi.value_str()) {\n (\"feature\", val) => feature = val,\n (\"since\", val) => since = val,\n _ => {}\n }\n }\n }\n if let Some(feature) = feature {\n \/\/ This additional check for stability is to make sure we\n \/\/ don't emit additional, irrelevant errors for malformed\n \/\/ attributes.\n if *stab_attr != \"stable\" || since.is_some() {\n features.push((feature, since, attr.span));\n }\n }\n \/\/ We need to iterate over the other attributes, because\n \/\/ `rustc_const_unstable` is not mutually exclusive with\n \/\/ the other stability attributes, so we can't just `break`\n \/\/ here.\n }\n }\n }\n\n features\n }\n\n fn collect_feature(&mut self, feature: Symbol, since: Option<Symbol>, span: Span) {\n let already_in_stable = self.lib_features.stable.contains_key(&feature);\n let already_in_unstable = self.lib_features.unstable.contains(&feature);\n\n match (since, already_in_stable, already_in_unstable) {\n (Some(since), _, false) => {\n if let Some(prev_since) = self.lib_features.stable.get(&feature) {\n if *prev_since != since {\n let msg = format!(\n \"feature `{}` is declared stable since {}, \\\n but was previously declared stable since {}\",\n feature,\n since,\n prev_since,\n );\n self.tcx.sess.struct_span_err_with_code(span, &msg,\n DiagnosticId::Error(\"E0711\".into())).emit();\n return;\n }\n }\n\n self.lib_features.stable.insert(feature, since);\n }\n (None, false, _) => {\n self.lib_features.unstable.insert(feature);\n }\n (Some(_), _, true) | (None, true, _) => {\n let msg = format!(\n \"feature `{}` is declared {}, but was previously declared {}\",\n feature,\n if since.is_some() { \"stable\"} else { \"unstable\" },\n if since.is_none() { \"stable\"} else { \"unstable\" },\n );\n self.tcx.sess.struct_span_err_with_code(span, &msg,\n DiagnosticId::Error(\"E0711\".into())).emit();\n }\n }\n }\n\n fn collect_from_attrs(&mut self, attrs: &[Attribute]) {\n for (feature, stable, span) in self.extract(attrs) {\n self.collect_feature(feature, stable, span);\n }\n }\n}\n\nimpl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LibFeatureCollector<'a, 'tcx> {\n fn visit_item(&mut self, item: &hir::Item) {\n self.collect_from_attrs(&item.attrs);\n }\n\n fn visit_trait_item(&mut self, trait_item: &hir::TraitItem) {\n self.collect_from_attrs(&trait_item.attrs);\n }\n\n fn visit_impl_item(&mut self, impl_item: &hir::ImplItem) {\n self.collect_from_attrs(&impl_item.attrs);\n }\n}\n\npub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LibFeatures {\n let mut collector = LibFeatureCollector::new(tcx);\n for &cnum in tcx.crates().iter() {\n for &(feature, since) in tcx.defined_lib_features(cnum).iter() {\n collector.collect_feature(feature, since, DUMMY_SP);\n }\n }\n collector.collect_from_attrs(&tcx.hir.krate().attrs);\n tcx.hir.krate().visit_all_item_likes(&mut collector);\n for exported_macro in &tcx.hir.krate().exported_macros {\n collector.collect_from_attrs(&exported_macro.attrs);\n }\n collector.lib_features\n}\n<commit_msg>Auto merge of #53397 - varkor:feature-collector-expand-visitor, r=nikomatsakis<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Detecting lib features (i.e. features that are not lang features).\n\/\/\n\/\/ These are declared using stability attributes (e.g. `#[stable (..)]`\n\/\/ and `#[unstable (..)]`), but are not declared in one single location\n\/\/ (unlike lang features), which means we need to collect them instead.\n\nuse ty::TyCtxt;\nuse syntax::symbol::Symbol;\nuse syntax::ast::{Attribute, MetaItem, MetaItemKind};\nuse syntax_pos::{Span, DUMMY_SP};\nuse hir::intravisit::{self, NestedVisitorMap, Visitor};\nuse rustc_data_structures::fx::{FxHashSet, FxHashMap};\nuse errors::DiagnosticId;\n\npub struct LibFeatures {\n \/\/ A map from feature to stabilisation version.\n pub stable: FxHashMap<Symbol, Symbol>,\n pub unstable: FxHashSet<Symbol>,\n}\n\nimpl LibFeatures {\n fn new() -> LibFeatures {\n LibFeatures {\n stable: FxHashMap(),\n unstable: FxHashSet(),\n }\n }\n\n pub fn to_vec(&self) -> Vec<(Symbol, Option<Symbol>)> {\n let mut all_features: Vec<_> = self.stable.iter().map(|(f, s)| (*f, Some(*s)))\n .chain(self.unstable.iter().map(|f| (*f, None)))\n .collect();\n all_features.sort_unstable_by_key(|f| f.0.as_str());\n all_features\n }\n}\n\npub struct LibFeatureCollector<'a, 'tcx: 'a> {\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n lib_features: LibFeatures,\n}\n\nimpl<'a, 'tcx> LibFeatureCollector<'a, 'tcx> {\n fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LibFeatureCollector<'a, 'tcx> {\n LibFeatureCollector {\n tcx,\n lib_features: LibFeatures::new(),\n }\n }\n\n fn extract(&self, attr: &Attribute) -> Option<(Symbol, Option<Symbol>, Span)> {\n let stab_attrs = vec![\"stable\", \"unstable\", \"rustc_const_unstable\"];\n\n \/\/ Find a stability attribute (i.e. `#[stable (..)]`, `#[unstable (..)]`,\n \/\/ `#[rustc_const_unstable (..)]`).\n if let Some(stab_attr) = stab_attrs.iter().find(|stab_attr| {\n attr.check_name(stab_attr)\n }) {\n let meta_item = attr.meta();\n if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta_item {\n let mut feature = None;\n let mut since = None;\n for meta in metas {\n if let Some(mi) = meta.meta_item() {\n \/\/ Find the `feature = \"..\"` meta-item.\n match (&*mi.name().as_str(), mi.value_str()) {\n (\"feature\", val) => feature = val,\n (\"since\", val) => since = val,\n _ => {}\n }\n }\n }\n if let Some(feature) = feature {\n \/\/ This additional check for stability is to make sure we\n \/\/ don't emit additional, irrelevant errors for malformed\n \/\/ attributes.\n if *stab_attr != \"stable\" || since.is_some() {\n return Some((feature, since, attr.span));\n }\n }\n \/\/ We need to iterate over the other attributes, because\n \/\/ `rustc_const_unstable` is not mutually exclusive with\n \/\/ the other stability attributes, so we can't just `break`\n \/\/ here.\n }\n }\n\n None\n }\n\n fn collect_feature(&mut self, feature: Symbol, since: Option<Symbol>, span: Span) {\n let already_in_stable = self.lib_features.stable.contains_key(&feature);\n let already_in_unstable = self.lib_features.unstable.contains(&feature);\n\n match (since, already_in_stable, already_in_unstable) {\n (Some(since), _, false) => {\n if let Some(prev_since) = self.lib_features.stable.get(&feature) {\n if *prev_since != since {\n let msg = format!(\n \"feature `{}` is declared stable since {}, \\\n but was previously declared stable since {}\",\n feature,\n since,\n prev_since,\n );\n self.tcx.sess.struct_span_err_with_code(span, &msg,\n DiagnosticId::Error(\"E0711\".into())).emit();\n return;\n }\n }\n\n self.lib_features.stable.insert(feature, since);\n }\n (None, false, _) => {\n self.lib_features.unstable.insert(feature);\n }\n (Some(_), _, true) | (None, true, _) => {\n let msg = format!(\n \"feature `{}` is declared {}, but was previously declared {}\",\n feature,\n if since.is_some() { \"stable\"} else { \"unstable\" },\n if since.is_none() { \"stable\"} else { \"unstable\" },\n );\n self.tcx.sess.struct_span_err_with_code(span, &msg,\n DiagnosticId::Error(\"E0711\".into())).emit();\n }\n }\n }\n}\n\nimpl<'a, 'tcx> Visitor<'tcx> for LibFeatureCollector<'a, 'tcx> {\n fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {\n NestedVisitorMap::All(&self.tcx.hir)\n }\n\n fn visit_attribute(&mut self, attr: &'tcx Attribute) {\n if let Some((feature, stable, span)) = self.extract(attr) {\n self.collect_feature(feature, stable, span);\n }\n }\n}\n\npub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LibFeatures {\n let mut collector = LibFeatureCollector::new(tcx);\n for &cnum in tcx.crates().iter() {\n for &(feature, since) in tcx.defined_lib_features(cnum).iter() {\n collector.collect_feature(feature, since, DUMMY_SP);\n }\n }\n intravisit::walk_crate(&mut collector, tcx.hir.krate());\n collector.lib_features\n}\n<|endoftext|>"} {"text":"<commit_before>use std::ops::{Add, Sub, Mul, Div, Shl, Shr};\n\nmacro_rules! wrapping_impl {\n ($trait_name:ident, $method:ident, $t:ty) => {\n impl $trait_name for $t {\n #[inline]\n fn $method(&self, v: &Self) -> Self {\n <$t>::$method(*self, *v)\n }\n }\n };\n ($trait_name:ident, $method:ident, $t:ty, $rhs:ty) => {\n impl $trait_name<$rhs> for $t {\n #[inline]\n fn $method(&self, v: &$rhs) -> Self {\n <$t>::$method(*self, *v)\n }\n }\n }\n}\n\n\/\/\/ Performs addition that wrapps around on overflow.\npub trait WrappingAdd: Sized + Add<Self, Output=Self> {\n \/\/\/ Wrapping (modular) addition. Computes `self + other`, wrapping around at the boundary of\n \/\/\/ the type.\n fn wrapping_add(&self, v: &Self) -> Self;\n}\n\nwrapping_impl!(WrappingAdd, wrapping_add, u8);\nwrapping_impl!(WrappingAdd, wrapping_add, u16);\nwrapping_impl!(WrappingAdd, wrapping_add, u32);\nwrapping_impl!(WrappingAdd, wrapping_add, u64);\nwrapping_impl!(WrappingAdd, wrapping_add, usize);\n\nwrapping_impl!(WrappingAdd, wrapping_add, i8);\nwrapping_impl!(WrappingAdd, wrapping_add, i16);\nwrapping_impl!(WrappingAdd, wrapping_add, i32);\nwrapping_impl!(WrappingAdd, wrapping_add, i64);\nwrapping_impl!(WrappingAdd, wrapping_add, isize);\n\n\/\/\/ Performs subtraction that wrapps around on overflow.\npub trait WrappingSub: Sized + Sub<Self, Output=Self> {\n \/\/\/ Wrapping (modular) subtraction. Computes `self - other`, wrapping around at the boundary\n \/\/\/ of the type.\n fn wrapping_sub(&self, v: &Self) -> Self;\n}\n\nwrapping_impl!(WrappingSub, wrapping_sub, u8);\nwrapping_impl!(WrappingSub, wrapping_sub, u16);\nwrapping_impl!(WrappingSub, wrapping_sub, u32);\nwrapping_impl!(WrappingSub, wrapping_sub, u64);\nwrapping_impl!(WrappingSub, wrapping_sub, usize);\n\nwrapping_impl!(WrappingSub, wrapping_sub, i8);\nwrapping_impl!(WrappingSub, wrapping_sub, i16);\nwrapping_impl!(WrappingSub, wrapping_sub, i32);\nwrapping_impl!(WrappingSub, wrapping_sub, i64);\nwrapping_impl!(WrappingSub, wrapping_sub, isize);\n\n\/\/\/ Performs multiplication that wrapps around on overflow.\npub trait WrappingMul: Sized + Mul<Self, Output=Self> {\n \/\/\/ Wrapping (modular) multiplication. Computes `self * other`, wrapping around at the boundary\n \/\/\/ of the type.\n fn wrapping_mul(&self, v: &Self) -> Self;\n}\n\nwrapping_impl!(WrappingMul, wrapping_mul, u8);\nwrapping_impl!(WrappingMul, wrapping_mul, u16);\nwrapping_impl!(WrappingMul, wrapping_mul, u32);\nwrapping_impl!(WrappingMul, wrapping_mul, u64);\nwrapping_impl!(WrappingMul, wrapping_mul, usize);\n\nwrapping_impl!(WrappingMul, wrapping_mul, i8);\nwrapping_impl!(WrappingMul, wrapping_mul, i16);\nwrapping_impl!(WrappingMul, wrapping_mul, i32);\nwrapping_impl!(WrappingMul, wrapping_mul, i64);\nwrapping_impl!(WrappingMul, wrapping_mul, isize);\n\n\/\/\/ Performs division that wrapps around on overflow.\npub trait WrappingDiv: Sized + Div<Self, Output=Self> {\n \/\/\/ Wrapping (modular) division. Computes `self \/ other`, wrapping around at the boundary of\n \/\/\/ the type.\n \/\/\/\n \/\/\/ The only case where such wrapping can occur is when one divides `MIN \/ -1` on a signed type\n \/\/\/ (where `MIN` is the negative minimal value for the type); this is equivalent to `-MIN`, a\n \/\/\/ positive value that is too large to represent in the type. In such a case, this function\n \/\/\/ returns `MIN` itself.\n \/\/\/\n \/\/\/ # Panics\n \/\/\/\n \/\/\/ This function will panic if rhs is 0.\n fn wrapping_div(&self, v: &Self) -> Self;\n}\n\nwrapping_impl!(WrappingDiv, wrapping_div, u8);\nwrapping_impl!(WrappingDiv, wrapping_div, u16);\nwrapping_impl!(WrappingDiv, wrapping_div, u32);\nwrapping_impl!(WrappingDiv, wrapping_div, u64);\nwrapping_impl!(WrappingDiv, wrapping_div, usize);\n\nwrapping_impl!(WrappingDiv, wrapping_div, i8);\nwrapping_impl!(WrappingDiv, wrapping_div, i16);\nwrapping_impl!(WrappingDiv, wrapping_div, i32);\nwrapping_impl!(WrappingDiv, wrapping_div, i64);\nwrapping_impl!(WrappingDiv, wrapping_div, isize);\n\n\/\/\/ Performs bitwise shift left that wrapps around on overflow.\npub trait WrappingShl<RHS>: Sized + Shl<RHS, Output=Self> {\n \/\/\/ Panic-free bitwise shift-left; yields `self << mask(rhs)`, where `mask` removes any\n \/\/\/ high-order bits of rhs that would cause the shift to exceed the bitwidth of the type.\n \/\/\/\n \/\/\/ Note that this is *not* the same as a rotate-left; the RHS of a wrapping shift-left is\n \/\/\/ restricted to the range of the type, rather than the bits shifted out of the LHS being\n \/\/\/ returned to the other end. The primitive integer types all implement a `rotate_left`\n \/\/\/ function, which may be what you want instead.\n fn wrapping_shl(&self, v: &RHS) -> Self;\n}\n\nwrapping_impl!(WrappingShl, wrapping_shl, u8, u32);\nwrapping_impl!(WrappingShl, wrapping_shl, u16, u32);\nwrapping_impl!(WrappingShl, wrapping_shl, u32, u32);\nwrapping_impl!(WrappingShl, wrapping_shl, u64, u32);\nwrapping_impl!(WrappingShl, wrapping_shl, usize, u32);\n\nwrapping_impl!(WrappingShl, wrapping_shl, i8, u32);\nwrapping_impl!(WrappingShl, wrapping_shl, i16, u32);\nwrapping_impl!(WrappingShl, wrapping_shl, i32, u32);\nwrapping_impl!(WrappingShl, wrapping_shl, i64, u32);\nwrapping_impl!(WrappingShl, wrapping_shl, isize, u32);\n\n\/\/\/ Performs bitwise shift right that wrapps around on overflow.\npub trait WrappingShr<RHS>: Sized + Shr<RHS, Output=Self> {\n \/\/\/ Panic-free bitwise shift-right; yields `self >> mask(rhs)`, where `mask` removes any\n \/\/\/ high-order bits of rhs that would cause the shift to exceed the bitwidth of the type.\n \/\/\/\n \/\/\/ Note that this is *not* the same as a rotate-right; the RHS of a wrapping shift-right is\n \/\/\/ restricted to the range of the type, rather than the bits shifted out of the LHS being\n \/\/\/ returned to the other end. The primitive integer types all implement a `rotate_right`\n \/\/\/ function, which may be what you want instead.\n fn wrapping_shr(&self, v: &RHS) -> Self;\n}\n\nwrapping_impl!(WrappingShr, wrapping_shr, u8, u32);\nwrapping_impl!(WrappingShr, wrapping_shr, u16, u32);\nwrapping_impl!(WrappingShr, wrapping_shr, u32, u32);\nwrapping_impl!(WrappingShr, wrapping_shr, u64, u32);\nwrapping_impl!(WrappingShr, wrapping_shr, usize, u32);\n\nwrapping_impl!(WrappingShr, wrapping_shr, i8, u32);\nwrapping_impl!(WrappingShr, wrapping_shr, i16, u32);\nwrapping_impl!(WrappingShr, wrapping_shr, i32, u32);\nwrapping_impl!(WrappingShr, wrapping_shr, i64, u32);\nwrapping_impl!(WrappingShr, wrapping_shr, isize, u32);\n<commit_msg>Ensure compatibility with Rust 1.0.0.<commit_after>use std::ops::{Add, Sub, Mul};\n\nmacro_rules! wrapping_impl {\n ($trait_name:ident, $method:ident, $t:ty) => {\n impl $trait_name for $t {\n #[inline]\n fn $method(&self, v: &Self) -> Self {\n <$t>::$method(*self, *v)\n }\n }\n };\n ($trait_name:ident, $method:ident, $t:ty, $rhs:ty) => {\n impl $trait_name<$rhs> for $t {\n #[inline]\n fn $method(&self, v: &$rhs) -> Self {\n <$t>::$method(*self, *v)\n }\n }\n }\n}\n\n\/\/\/ Performs addition that wrapps around on overflow.\npub trait WrappingAdd: Sized + Add<Self, Output=Self> {\n \/\/\/ Wrapping (modular) addition. Computes `self + other`, wrapping around at the boundary of\n \/\/\/ the type.\n fn wrapping_add(&self, v: &Self) -> Self;\n}\n\nwrapping_impl!(WrappingAdd, wrapping_add, u8);\nwrapping_impl!(WrappingAdd, wrapping_add, u16);\nwrapping_impl!(WrappingAdd, wrapping_add, u32);\nwrapping_impl!(WrappingAdd, wrapping_add, u64);\nwrapping_impl!(WrappingAdd, wrapping_add, usize);\n\nwrapping_impl!(WrappingAdd, wrapping_add, i8);\nwrapping_impl!(WrappingAdd, wrapping_add, i16);\nwrapping_impl!(WrappingAdd, wrapping_add, i32);\nwrapping_impl!(WrappingAdd, wrapping_add, i64);\nwrapping_impl!(WrappingAdd, wrapping_add, isize);\n\n\/\/\/ Performs subtraction that wrapps around on overflow.\npub trait WrappingSub: Sized + Sub<Self, Output=Self> {\n \/\/\/ Wrapping (modular) subtraction. Computes `self - other`, wrapping around at the boundary\n \/\/\/ of the type.\n fn wrapping_sub(&self, v: &Self) -> Self;\n}\n\nwrapping_impl!(WrappingSub, wrapping_sub, u8);\nwrapping_impl!(WrappingSub, wrapping_sub, u16);\nwrapping_impl!(WrappingSub, wrapping_sub, u32);\nwrapping_impl!(WrappingSub, wrapping_sub, u64);\nwrapping_impl!(WrappingSub, wrapping_sub, usize);\n\nwrapping_impl!(WrappingSub, wrapping_sub, i8);\nwrapping_impl!(WrappingSub, wrapping_sub, i16);\nwrapping_impl!(WrappingSub, wrapping_sub, i32);\nwrapping_impl!(WrappingSub, wrapping_sub, i64);\nwrapping_impl!(WrappingSub, wrapping_sub, isize);\n\n\/\/\/ Performs multiplication that wrapps around on overflow.\npub trait WrappingMul: Sized + Mul<Self, Output=Self> {\n \/\/\/ Wrapping (modular) multiplication. Computes `self * other`, wrapping around at the boundary\n \/\/\/ of the type.\n fn wrapping_mul(&self, v: &Self) -> Self;\n}\n\nwrapping_impl!(WrappingMul, wrapping_mul, u8);\nwrapping_impl!(WrappingMul, wrapping_mul, u16);\nwrapping_impl!(WrappingMul, wrapping_mul, u32);\nwrapping_impl!(WrappingMul, wrapping_mul, u64);\nwrapping_impl!(WrappingMul, wrapping_mul, usize);\n\nwrapping_impl!(WrappingMul, wrapping_mul, i8);\nwrapping_impl!(WrappingMul, wrapping_mul, i16);\nwrapping_impl!(WrappingMul, wrapping_mul, i32);\nwrapping_impl!(WrappingMul, wrapping_mul, i64);\nwrapping_impl!(WrappingMul, wrapping_mul, isize);\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Initial version of the client<commit_after>\/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n *\/\n\n#![crate_name=\"thrift-client\"]\n#![crate_type=\"bin\"]\n\n#[allow(dead_code)]\nstruct ThriftBinaryProtocol {\n x: i32,\n}\n\nimpl ThriftBinaryProtocol {\n\n fn new() -> ThriftBinaryProtocol {\n ThriftBinaryProtocol { x: 0 }\n }\n\n fn write_message_begin(&self, name: &str, message_type: i32, seq_id: i32) -> i32 {\n println!(\"Protocol: message begin\");\n \n \/\/ TODO: strict write\n let mut wsize: i32 = 0;\n wsize += self.write_string(name);\n wsize += self.write_byte(message_type as i8);\n wsize += self.write_i32(seq_id);\n wsize\n }\n \n fn write_string(&self, s: &str) -> i32 {\n println!(\"Protocol: {}\", s);\n s.len() as i32\n }\n \n fn write_byte(&self, b: i8) -> i32 {\n println!(\"Protocol: {}\", b);\n 1\n }\n \n fn write_i32(&self, i: i32) -> i32 {\n println!(\"Protocol: {}\", i);\n 4\n }\n \n fn write_message_end(&self) {\n println!(\"Protocol: end\");\n }\n}\n\n#[allow(dead_code)]\nstruct CalculatorPingArgs {\n dummy: i32 \/\/ TODO\n}\n\nimpl CalculatorPingArgs {\n\n fn new() -> CalculatorPingArgs {\n CalculatorPingArgs { dummy: 1 }\n }\n \n fn write(&self, oprot: &ThriftBinaryProtocol) {\n println!(\"CalculatorPingArgs::write\");\n }\n}\n\n#[allow(dead_code)]\nstruct CalculatorClient {\n oprotocol: ThriftBinaryProtocol,\n iprotocol: ThriftBinaryProtocol,\n}\n\nimpl CalculatorClient {\n\n fn new(protocol: ThriftBinaryProtocol) -> CalculatorClient {\n CalculatorClient { oprotocol: protocol, iprotocol: protocol }\n }\n \n fn ping(&self) {\n self.send_ping();\n self.receive_ping();\n }\n \n fn send_ping(&self) {\n let cseqid: i32 = 0;\n let T_CALL = 0; \/\/ TODO\n self.oprotocol.write_message_begin(\"ping\", T_CALL, cseqid);\n \n let args = CalculatorPingArgs::new();\n args.write(&self.oprotocol);\n \n self.oprotocol.write_message_end();\n \/\/ TODO:\n\/\/ self.oprotocol.get_transport().write_end();\n\/\/ self.oprotocol.get_transport().flush();\n }\n\n fn receive_ping(&self) {\n \/\/ TODO\n }\n}\n\npub fn main() {\n \/\/let socket = ThriftSocket::new(\"localhost\", 9090);\n \/\/let transport = ThriftBufferedTransport::new(socket);\n\n let protocol = ThriftBinaryProtocol::new( \/*transport*\/);\n let client = CalculatorClient::new(protocol);\n \n \/\/transport.open();\n \n client.ping();\n\n println!(\"PASS\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added CXXFLAGS=-std=c++11 to make<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add doc example for `url::Url::scheme`.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>mugen<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added for future use, not in use at the moment<commit_after>\/*\nDo we need to represent emotes the same way\nTwitchLib does?\nIt adds a lot of functionality, so perhaps\nin a later verison it is something we can add.\n\nFor now we'll just use the string representation of the emote\n*\/<|endoftext|>"} {"text":"<commit_before><commit_msg>add back test for issue #6804<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_attrs)]\n#![feature(slice_patterns)]\n#![allow(dead_code)]\n#![deny(illegal_floating_point_constant_pattern)]\n\n\/\/ Matching against NaN should result in a warning\n\nuse std::f64::NAN;\n\n#[rustc_error]\nfn main() {\n let x = NAN;\n match x {\n NAN => {}, \/\/~ ERROR floating point constants cannot be used\n \/\/~| WARNING hard error\n _ => {},\n };\n\n match [x, 1.0] {\n [NAN, _] => {}, \/\/~ ERROR floating point constants cannot be used\n \/\/~| WARNING hard error\n _ => {},\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #23411 - cmr:test-7950, r=huonw<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ tests the good error message, not \"missing module Foo\" or something else unexpected\n\nstruct Foo;\n\nfn main() {\n Foo::bar(); \/\/~ ERROR type `Foo` does not implement any method in scope named `bar`\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ @!has trait_self_link\/trait.Foo.html \/\/a\/@href ..\/trait_self_link\/trait.Foo.html\npub trait Foo {}\n\npub struct Bar;\n\nimpl Foo for Bar {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test errors for malformed inclusive ranges<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Make sure that invalid ranges generate an error during HIR lowering, not an ICE\n\n#![feature(inclusive_range_syntax)]\n\npub fn main() {\n ..;\n 0..;\n ..1;\n 0..1;\n\n ...; \/\/~ERROR inclusive range with no end\n 0...; \/\/~ERROR unexpected token\n ...1;\n 0...1;\n}\n\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add ui test<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\nfn a(&self) { }\n\/\/~^ ERROR `self` argument in bare function\n\nfn main() { }<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::env;\nuse common::Config;\n\n\/\/\/ Conversion table from triple OS name to Rust SYSNAME\nconst OS_TABLE: &'static [(&'static str, &'static str)] = &[\n (\"android\", \"android\"),\n (\"bitrig\", \"bitrig\"),\n (\"darwin\", \"macos\"),\n (\"dragonfly\", \"dragonfly\"),\n (\"freebsd\", \"freebsd\"),\n (\"haiku\", \"haiku\"),\n (\"ios\", \"ios\"),\n (\"linux\", \"linux\"),\n (\"mingw32\", \"windows\"),\n (\"netbsd\", \"netbsd\"),\n (\"openbsd\", \"openbsd\"),\n (\"win32\", \"windows\"),\n (\"windows\", \"windows\"),\n (\"solaris\", \"solaris\"),\n (\"emscripten\", \"emscripten\"),\n];\n\nconst ARCH_TABLE: &'static [(&'static str, &'static str)] = &[\n (\"aarch64\", \"aarch64\"),\n (\"amd64\", \"x86_64\"),\n (\"arm\", \"arm\"),\n (\"arm64\", \"aarch64\"),\n (\"hexagon\", \"hexagon\"),\n (\"i386\", \"x86\"),\n (\"i586\", \"x86\"),\n (\"i686\", \"x86\"),\n (\"mips\", \"mips\"),\n (\"msp430\", \"msp430\"),\n (\"powerpc\", \"powerpc\"),\n (\"powerpc64\", \"powerpc64\"),\n (\"s390x\", \"s390x\"),\n (\"sparc\", \"sparc\"),\n (\"x86_64\", \"x86_64\"),\n (\"xcore\", \"xcore\"),\n (\"asmjs\", \"asmjs\"),\n (\"wasm32\", \"wasm32\"),\n];\n\npub fn matches_os(triple: &str, name: &str) -> bool {\n \/\/ For the wasm32 bare target we ignore anything also ignored on emscripten\n \/\/ and then we also recognize `wasm32-bare` as the os for the target\n if triple == \"wasm32-unknown-unknown\" {\n return name == \"emscripten\" || name == \"wasm32-bare\"\n }\n for &(triple_os, os) in OS_TABLE {\n if triple.contains(triple_os) {\n return os == name;\n }\n }\n panic!(\"Cannot determine OS from triple\");\n}\npub fn get_arch(triple: &str) -> &'static str {\n for &(triple_arch, arch) in ARCH_TABLE {\n if triple.contains(triple_arch) {\n return arch;\n }\n }\n panic!(\"Cannot determine Architecture from triple\");\n}\n\npub fn get_env(triple: &str) -> Option<&str> {\n triple.split('-').nth(3)\n}\n\npub fn get_pointer_width(triple: &str) -> &'static str {\n if (triple.contains(\"64\") && !triple.ends_with(\"gnux32\")) || triple.starts_with(\"s390x\") {\n \"64bit\"\n } else {\n \"32bit\"\n }\n}\n\npub fn make_new_path(path: &str) -> String {\n assert!(cfg!(windows));\n \/\/ Windows just uses PATH as the library search path, so we have to\n \/\/ maintain the current value while adding our own\n match env::var(lib_path_env_var()) {\n Ok(curr) => format!(\"{}{}{}\", path, path_div(), curr),\n Err(..) => path.to_owned(),\n }\n}\n\npub fn lib_path_env_var() -> &'static str {\n \"PATH\"\n}\nfn path_div() -> &'static str {\n \";\"\n}\n\npub fn logv(config: &Config, s: String) {\n debug!(\"{}\", s);\n if config.verbose {\n println!(\"{}\", s);\n }\n}\n<commit_msg>Add CloudABI to the list of supported targets in compiletest.<commit_after>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::env;\nuse common::Config;\n\n\/\/\/ Conversion table from triple OS name to Rust SYSNAME\nconst OS_TABLE: &'static [(&'static str, &'static str)] = &[\n (\"android\", \"android\"),\n (\"bitrig\", \"bitrig\"),\n (\"cloudabi\", \"cloudabi\"),\n (\"darwin\", \"macos\"),\n (\"dragonfly\", \"dragonfly\"),\n (\"freebsd\", \"freebsd\"),\n (\"haiku\", \"haiku\"),\n (\"ios\", \"ios\"),\n (\"linux\", \"linux\"),\n (\"mingw32\", \"windows\"),\n (\"netbsd\", \"netbsd\"),\n (\"openbsd\", \"openbsd\"),\n (\"win32\", \"windows\"),\n (\"windows\", \"windows\"),\n (\"solaris\", \"solaris\"),\n (\"emscripten\", \"emscripten\"),\n];\n\nconst ARCH_TABLE: &'static [(&'static str, &'static str)] = &[\n (\"aarch64\", \"aarch64\"),\n (\"amd64\", \"x86_64\"),\n (\"arm\", \"arm\"),\n (\"arm64\", \"aarch64\"),\n (\"hexagon\", \"hexagon\"),\n (\"i386\", \"x86\"),\n (\"i586\", \"x86\"),\n (\"i686\", \"x86\"),\n (\"mips\", \"mips\"),\n (\"msp430\", \"msp430\"),\n (\"powerpc\", \"powerpc\"),\n (\"powerpc64\", \"powerpc64\"),\n (\"s390x\", \"s390x\"),\n (\"sparc\", \"sparc\"),\n (\"x86_64\", \"x86_64\"),\n (\"xcore\", \"xcore\"),\n (\"asmjs\", \"asmjs\"),\n (\"wasm32\", \"wasm32\"),\n];\n\npub fn matches_os(triple: &str, name: &str) -> bool {\n \/\/ For the wasm32 bare target we ignore anything also ignored on emscripten\n \/\/ and then we also recognize `wasm32-bare` as the os for the target\n if triple == \"wasm32-unknown-unknown\" {\n return name == \"emscripten\" || name == \"wasm32-bare\"\n }\n for &(triple_os, os) in OS_TABLE {\n if triple.contains(triple_os) {\n return os == name;\n }\n }\n panic!(\"Cannot determine OS from triple\");\n}\npub fn get_arch(triple: &str) -> &'static str {\n for &(triple_arch, arch) in ARCH_TABLE {\n if triple.contains(triple_arch) {\n return arch;\n }\n }\n panic!(\"Cannot determine Architecture from triple\");\n}\n\npub fn get_env(triple: &str) -> Option<&str> {\n triple.split('-').nth(3)\n}\n\npub fn get_pointer_width(triple: &str) -> &'static str {\n if (triple.contains(\"64\") && !triple.ends_with(\"gnux32\")) || triple.starts_with(\"s390x\") {\n \"64bit\"\n } else {\n \"32bit\"\n }\n}\n\npub fn make_new_path(path: &str) -> String {\n assert!(cfg!(windows));\n \/\/ Windows just uses PATH as the library search path, so we have to\n \/\/ maintain the current value while adding our own\n match env::var(lib_path_env_var()) {\n Ok(curr) => format!(\"{}{}{}\", path, path_div(), curr),\n Err(..) => path.to_owned(),\n }\n}\n\npub fn lib_path_env_var() -> &'static str {\n \"PATH\"\n}\nfn path_div() -> &'static str {\n \";\"\n}\n\npub fn logv(config: &Config, s: String) {\n debug!(\"{}\", s);\n if config.verbose {\n println!(\"{}\", s);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate num_cpus;\n\nuse std::fs::File;\nuse std::io::prelude::*;\nuse regex::RegexSet;\nuse gitignore::*;\nuse std::path::PathBuf;\nuse self::num_cpus::get;\nuse std::fs::Metadata;\n\n#[cfg(target_os = \"linux\")]\nuse std::os::linux::fs::MetadataExt;\n\n#[cfg(target_os = \"darwin\")]\nuse std::os::unix::fs::MetadataExt;\n\n#[cfg(target_os = \"linux\")]\npub fn size(m: &Metadata, blocks: bool) -> u64 {\n if blocks {\n m.st_blocks() * 512\n } else {\n m.len()\n }\n}\n\n#[cfg(target_os = \"windows\")]\npub fn size(m: &Metadata, _: bool) -> u64 {\n m.len()\n}\n\n#[cfg(target_os = \"darwin\")]\npub fn size(m: &Metadata, blocks: bool) -> u64 {\n if blocks {\n m.blocks() * 512 \/\/ idk if this is correct on bsd\/linux\n } else {\n m.len()\n }\n}\n\n\/\/\/ Gather the information from `.gitignore`, `.ignore`, and darcs `boring` files in a given\n\/\/\/ directory, and assemble a `RegexSet` from it.\npub fn mk_ignores(in_paths: &PathBuf, maybe_ignore: &Option<RegexSet>) -> Option<RegexSet> {\n\n if let Some(ref ignore) = *maybe_ignore {\n Some(ignore.to_owned())\n } else if let (ignore_path, Ok(mut file)) =\n {\n let mut ignore_path = in_paths.clone();\n ignore_path.push(\".ignore\");\n (ignore_path.clone(), File::open(ignore_path.clone()))\n } {\n let mut contents = String::new();\n file.read_to_string(&mut contents)\n .expect(\"File read failed.\"); \/\/ ok because we check that the file exists\n Some(file_contents_to_regex(&contents, &ignore_path))\n } else if let (gitignore_path, Ok(mut file)) =\n {\n let mut gitignore_path = in_paths.clone();\n gitignore_path.push(\".gitignore\");\n (gitignore_path.clone(), File::open(gitignore_path))\n } {\n let mut contents = String::new();\n file.read_to_string(&mut contents)\n .expect(\"File read failed.\"); \/\/ ok because we check that the file exists\n Some(file_contents_to_regex(&contents, &gitignore_path))\n } else if let (darcs_path, Ok(mut file)) =\n {\n let mut darcs_path = in_paths.clone();\n darcs_path.push(\"_darcs\/prefs\/boring\");\n (darcs_path.clone(), File::open(darcs_path))\n } {\n let mut contents = String::new();\n file.read_to_string(&mut contents)\n .expect(\"File read failed.\"); \/\/ ok because we check that the file exists\n Some(darcs_contents_to_regex(&contents, &darcs_path))\n } else {\n None\n }\n}\n\n\/\/\/ Helper function to get the number of CPUs. We subtract 1, because the main thread that's doing\n\/\/\/ the spawning counts as one OS thread.\npub fn get_processors() -> usize {\n let n = get();\n if n > 1 {\n n - 1\n } else {\n n\n }\n}\n<commit_msg>mac again<commit_after>extern crate num_cpus;\n\nuse std::fs::File;\nuse std::io::prelude::*;\nuse regex::RegexSet;\nuse gitignore::*;\nuse std::path::PathBuf;\nuse self::num_cpus::get;\nuse std::fs::Metadata;\n\n#[cfg(target_os = \"linux\")]\nuse std::os::linux::fs::MetadataExt;\n\n#[cfg(target_os = \"mac_os\")]\nuse std::os::unix::fs::MetadataExt;\n\n#[cfg(target_os = \"linux\")]\npub fn size(m: &Metadata, blocks: bool) -> u64 {\n if blocks {\n m.st_blocks() * 512\n } else {\n m.len()\n }\n}\n\n#[cfg(target_os = \"windows\")]\npub fn size(m: &Metadata, _: bool) -> u64 {\n m.len()\n}\n\n#[cfg(target_os = \"mac_os\")]\npub fn size(m: &Metadata, blocks: bool) -> u64 {\n if blocks {\n m.blocks() * 512 \/\/ idk if this is correct on bsd\/linux\n } else {\n m.len()\n }\n}\n\n\/\/\/ Gather the information from `.gitignore`, `.ignore`, and darcs `boring` files in a given\n\/\/\/ directory, and assemble a `RegexSet` from it.\npub fn mk_ignores(in_paths: &PathBuf, maybe_ignore: &Option<RegexSet>) -> Option<RegexSet> {\n\n if let Some(ref ignore) = *maybe_ignore {\n Some(ignore.to_owned())\n } else if let (ignore_path, Ok(mut file)) =\n {\n let mut ignore_path = in_paths.clone();\n ignore_path.push(\".ignore\");\n (ignore_path.clone(), File::open(ignore_path.clone()))\n } {\n let mut contents = String::new();\n file.read_to_string(&mut contents)\n .expect(\"File read failed.\"); \/\/ ok because we check that the file exists\n Some(file_contents_to_regex(&contents, &ignore_path))\n } else if let (gitignore_path, Ok(mut file)) =\n {\n let mut gitignore_path = in_paths.clone();\n gitignore_path.push(\".gitignore\");\n (gitignore_path.clone(), File::open(gitignore_path))\n } {\n let mut contents = String::new();\n file.read_to_string(&mut contents)\n .expect(\"File read failed.\"); \/\/ ok because we check that the file exists\n Some(file_contents_to_regex(&contents, &gitignore_path))\n } else if let (darcs_path, Ok(mut file)) =\n {\n let mut darcs_path = in_paths.clone();\n darcs_path.push(\"_darcs\/prefs\/boring\");\n (darcs_path.clone(), File::open(darcs_path))\n } {\n let mut contents = String::new();\n file.read_to_string(&mut contents)\n .expect(\"File read failed.\"); \/\/ ok because we check that the file exists\n Some(darcs_contents_to_regex(&contents, &darcs_path))\n } else {\n None\n }\n}\n\n\/\/\/ Helper function to get the number of CPUs. We subtract 1, because the main thread that's doing\n\/\/\/ the spawning counts as one OS thread.\npub fn get_processors() -> usize {\n let n = get();\n if n > 1 {\n n - 1\n } else {\n n\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make VgaConsole take chars instead of bytes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add codegen test<commit_after>\/\/ compile-flags: -C no-prepopulate-passes\n\n#![crate_type = \"lib\"]\n\n\/\/ Hack to get the correct size for the length part in slices\n\/\/ CHECK: @helper([[USIZE:i[0-9]+]] %_1)\n#[no_mangle]\npub fn helper(_: usize) {\n}\n\n\/\/ Check that we correctly generate a GEP for a ZST that is not included in Scalar layout\n\/\/ CHECK-LABEL: @scalar_layout\n#[no_mangle]\npub fn scalar_layout(s: &(u64, ())) {\n\/\/ CHECK: [[X0:%[0-9]+]] = bitcast i64* %s to i8*\n\/\/ CHECK-NEXT: [[X1:%[0-9]+]] = getelementptr i8, i8* [[X0]], [[USIZE]] 8\n let x = &s.1;\n &x; \/\/ keep variable in an alloca\n}\n\n\/\/ Check that we correctly generate a GEP for a ZST that is not included in ScalarPair layout\n\/\/ CHECK-LABEL: @scalarpair_layout\n#[no_mangle]\npub fn scalarpair_layout(s: &(u64, u32, ())) {\n\/\/ CHECK: [[X0:%[0-9]+]] = bitcast { i64, i32 }* %s to i8*\n\/\/ CHECK-NEXT: [[X1:%[0-9]+]] = getelementptr i8, i8* [[X0]], [[USIZE]] 12\n let x = &s.2;\n &x; \/\/ keep variable in an alloca\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #336 - kbknapp:issue-333, r=kbknapp<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Start of asn1 type cereal<commit_after>use tag;\nuse err;\n\nuse std::io;\n\nstruct OctetString(Vec<u8>);\n\nimpl Asn1Serialize for OctetString {\n fn serialize<W: io::Write>(&self, writer: W) -> Result<(), err::EncodeError> {\n try!(writer.write(&self.0));\n }\n}\n\nimpl Asn1Deserialize for OctetString {\n fn deserialize<I: Iterator<Item=io::Result<u8>>>(reader: I) -> Result<Self, err::DecodeError> {\n unimplemented!();\n }\n}\n\nimpl Asn1Info for OctetString {\n fn asn1_type() -> tag::Type {\n \"OCCTET STRING\".into()\n }\n \n fn asn1_class() -> tag::Class {\n tag::Class::Universal\n }\n\n fn asn1_tagnum() -> tag::TagNum {\n 4.into()\n }\n\n fn asn1_constructed() -> bool {\n false\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added some lex tuples<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>epoll_ctl ffi stuff<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added `error.rs` module.<commit_after>use std;\nuse std::result;\nuse std::error::Error;\nuse std::fmt;\n\nuse time;\n\n#[derive(Debug, Clone)]\npub enum DurationError {\n StdOutOfRange,\n SystemTimeError(std::time::SystemTimeError),\n}\n\nimpl Error for DurationError {\n fn description(&self) -> &str {\n match *self {\n DurationError::StdOutOfRange => {\n \"Conversion between FloatDuration and std::time::Duration \\\n out of range\"\n }\n DurationError::SystemTimeError(ref e) => e.description(),\n }\n }\n\n fn cause(&self) -> Option<&Error> {\n match *self {\n DurationError::StdOutOfRange => None,\n DurationError::SystemTimeError(ref e) => Some(e),\n }\n }\n}\n\nimpl From<time::OutOfRangeError> for DurationError {\n fn from(_: time::OutOfRangeError) -> DurationError {\n DurationError::StdOutOfRange\n }\n}\nimpl From<std::time::SystemTimeError> for DurationError {\n fn from(err: std::time::SystemTimeError) -> DurationError {\n DurationError::SystemTimeError(err)\n }\n}\n\nimpl fmt::Display for DurationError {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}\", self.description())\n }\n}\n\npub type Result<T> = result::Result<T, DurationError>;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Derive standard traits for HNil and HCons<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added `remove` on `Index`<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>load and draw image on screen<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add additional methods to help creating PluginMeta structs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add comments on modules<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Change writer::pack_directory to return io::Result<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/\/ A module for permission units\npub mod permission_unit;\n\/\/\/ A module for checking string (wildcard) matches\npub mod str_match;\n\/\/\/ A module for permissions\npub mod permission;\n\n#[test]\nfn test() {\n use str_match::*;\n use permission_unit::*;\n use permission::*;\n \/\/ Test string matches (wildcard chars)\n assert!(str_match(\"hey*hey\", \"heyabchey\"));\n assert!(str_match(\"hey\\\\*hey*\", \"hey*heycatsarefunny\"));\n \/\/ Test permission units\n assert!(PermissionUnit::from_str(\"rw=hey\").read());\n assert!(!PermissionUnit::from_str(\"r=hey\").write());\n assert!(PermissionUnit::from_str(\"r=hey\").read_foc());\n assert!(PermissionUnit::from_str(\"r=file:home\/*\").applies(&PermissionUnit::from_str(\"R=file:home\/lal\")));\n assert!(PermissionUnit::from_str(\"R=file:home\/lal\").read_foc);\n assert!(PermissionUnit::from_str(\"r=file:home\/lal\").read_foc());\n assert!(!PermissionUnit::from_str(\"RW=http:*\").read());\n \/\/ Test permissions\n assert!(Permission::from_str(\"rw=file:home\/*-rw=file:veryimportant\").test(PermissionUnit::from_str(\"rw=file:home\/lal\")));\n assert!(Permission::from_str(\"rw=file:home\/*-rw=file:veryimportant\").test(PermissionUnit::from_str(\"rw=file:home\/veryimportant\")));\n assert!(Permission::from_str(\"rw=i\\\\+can\\\\+do\\\\+like\\\\+this\").test(PermissionUnit::from_str(\"rw=file:i+can+do+like+this\")));\n \/\/ assert!(!Permission::from_str(\"RW=http:*+\").test(PermissionUnit::from_str(\"rw=http:\/\/google.com\")));\n \/\/ TODO: Failes when using uppercase RW\n}\n<commit_msg>It works! TODO: Focus<commit_after>\/\/\/ A module for permission units\npub mod permission_unit;\n\/\/\/ A module for checking string (wildcard) matches\npub mod str_match;\n\/\/\/ A module for permissions\npub mod permission;\n\n#[test]\nfn test() {\n use str_match::*;\n use permission_unit::*;\n use permission::*;\n \/\/ Test string matches (wildcard chars)\n assert!(str_match(\"hey*hey\", \"heyabchey\"));\n assert!(str_match(\"hey\\\\*hey*\", \"hey*heycatsarefunny\"));\n \/\/ Test permission units\n assert!(PermissionUnit::from_str(\"rw=hey\").read());\n assert!(!PermissionUnit::from_str(\"r=hey\").write());\n assert!(PermissionUnit::from_str(\"r=hey\").read_foc());\n assert!(PermissionUnit::from_str(\"r=file:home\/*\").applies(&PermissionUnit::from_str(\"R=file:home\/lal\")));\n assert!(PermissionUnit::from_str(\"R=file:home\/lal\").read_foc);\n assert!(PermissionUnit::from_str(\"r=file:home\/lal\").read_foc());\n assert!(!PermissionUnit::from_str(\"RW=http:*\").read());\n \/\/ Test permissions\n assert!(Permission::from_str(\"rw=file:home\/*-rw=file:veryimportant\").test(PermissionUnit::from_str(\"rw=file:home\/lal\")));\n assert!(Permission::from_str(\"rw=file:home\/*-rw=file:veryimportant\").test(PermissionUnit::from_str(\"rw=file:home\/veryimportant\")));\n assert!(Permission::from_str(\"rw=i\\\\+can\\\\+do\\\\+like\\\\+this\").test(PermissionUnit::from_str(\"rw=file:i+can+do+like+this\")));\n\n \/\/ assert!(!Permission::from_str(\"RW=http:*\").test(PermissionUnit::from_str(\"rw=http:\/\/google.com\")));\n \/\/ TODO: Failes when using uppercase RW\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Change InvalidInput to EndOfFile in several cases<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add the processor.<commit_after>use std::collections::BTreeMap;\nuse yaml_rust::Yaml;\nuse project::parser::Command;\nuse tmux;\n\npub fn main(commands: &Vec<Command>) -> () {\n let sess = \"test\".to_string();\n\n let (first_window, exec_commands) = commands.split_at(1);\n tmux::new_session(sess.clone(), first_window[0].value.clone());\n\n for w in exec_commands {\n tmux::new_window(sess.clone(), w.value.clone());\n };\n\n tmux::open(sess.clone());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added string example<commit_after>use std::borrow::Cow;\n\nfn remove_spaces<'a>(input: &'a str) -> Cow<'a, str> {\n if input.contains(' ') {\n let mut buf = String::with_capacity(input.len());\n\n for c in input.chars() {\n if c != ' ' {\n buf.push(c);\n }\n }\n\n return Cow::Owned(buf);\n }\n\n return Cow::Borrowed(input);\n}\n\nfn main() {\n let s = remove_spaces(\"I love Rustlang\");\n println!(\"{}\", s);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add solution to problem 56<commit_after>#[macro_use] extern crate libeuler;\nextern crate num;\n\nuse num::bigint::{BigInt, ToBigInt};\nuse num::traits::{Zero, ToPrimitive};\n\/\/\/ A googol (10^100) is a massive number: one followed by one-hundred zeros; 100^100 is almost\n\/\/\/ unimaginably large: one followed by two-hundred zeros. Despite their size, the sum of the\n\/\/\/ digits in each number is only 1.\n\/\/\/\n\/\/\/ Considering natural numbers of the form, ab, where a, b < 100, what is the maximum digital sum?\nfn main() {\n solutions! {\n sol naive {\n let ten = 10.to_bigint().unwrap();\n let mut maxsum = 0;\n for a in 90..100 {\n let ab = a.to_bigint().unwrap();\n let mut ap = num::pow(ab.clone(), 89);\n for _ in 90..100 {\n ap = &ap * &ab;\n\n let mut d = ap.clone();\n let mut sum = 0;\n\n while d > BigInt::zero() {\n sum += (&d % &ten).to_i64().unwrap();\n d = &d \/ &ten;\n }\n\n if sum > maxsum {\n maxsum = sum;\n println!(\"{}\", maxsum);\n }\n }\n }\n\n maxsum\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use gl;\nuse gl::types::*;\nuse types::{Color, Viewport, Capability, GLError};\nuse buffer::{ArrayBufferBinder, ElementArrayBufferBinder};\nuse program::{ProgramBinder, ProgramAttrib};\nuse framebuffer::FramebufferBinder;\nuse renderbuffer::RenderbufferBinder;\nuse texture_units::TextureUnits;\n\npub struct Context {\n pub array_buffer: ArrayBufferBinder,\n pub element_array_buffer: ElementArrayBufferBinder,\n pub program: ProgramBinder,\n pub framebuffer: FramebufferBinder,\n pub renderbuffer: RenderbufferBinder,\n pub tex_units: TextureUnits\n}\n\nimpl Context {\n pub unsafe fn current_context() -> Self {\n Context {\n array_buffer: ArrayBufferBinder,\n element_array_buffer: ElementArrayBufferBinder,\n program: ProgramBinder,\n framebuffer: FramebufferBinder,\n renderbuffer: RenderbufferBinder,\n tex_units: TextureUnits::current()\n }\n }\n\n pub fn clear_color(&mut self, color: Color) {\n unsafe {\n gl::ClearColor(color.r, color.g, color.b, color.a);\n }\n }\n\n pub fn enable(&mut self, cap: Capability) {\n unsafe {\n gl::Enable(cap.gl_enum());\n dbg_gl_sanity_check! {\n GLError::InvalidEnum => \"`cap` is not a valid OpenGL capability\",\n _ => \"Unknown error\"\n }\n }\n }\n\n pub fn disable(&mut self, cap: Capability) {\n unsafe {\n gl::Disable(cap.gl_enum());\n dbg_gl_sanity_check! {\n GLError::InvalidEnum => \"`cap` is not a valid OpenGL capability\",\n _ => \"Unknown error\"\n }\n }\n }\n\n pub fn enable_vertex_attrib_array(&self, attrib: ProgramAttrib) {\n unsafe {\n gl::EnableVertexAttribArray(attrib.gl_index);\n dbg_gl_error! {\n GLError::InvalidValue => \"`index` is >= GL_MAX_VERTEX_ATTRIBS\",\n _ => \"Unknown error\"\n }\n }\n }\n\n pub fn viewport(&self, viewport: Viewport) {\n unsafe {\n gl::Viewport(viewport.x as GLint,\n viewport.y as GLint,\n viewport.width as GLsizei,\n viewport.height as GLsizei);\n dbg_gl_sanity_check! {\n GLError::InvalidValue => \"`width` or `height` is negative\",\n _ => \"Unknown error\"\n }\n }\n }\n\n pub fn get_error() -> Option<GLError> {\n unsafe {\n match gl::GetError() {\n gl::INVALID_ENUM =>\n Some(GLError::InvalidEnum),\n gl::INVALID_VALUE =>\n Some(GLError::InvalidValue),\n gl::INVALID_OPERATION =>\n Some(GLError::InvalidOperation),\n gl::INVALID_FRAMEBUFFER_OPERATION =>\n Some(GLError::InvalidFramebufferOperation),\n gl::OUT_OF_MEMORY =>\n Some(GLError::OutOfMemory),\n _ =>\n None\n }\n }\n }\n}\n\n#[macro_export]\nmacro_rules! bind_array_buffer {\n ($gl:expr, $buffer:expr) => {\n $gl.array_buffer.bind($buffer)\n }\n}\n\n#[macro_export]\nmacro_rules! bind_element_array_buffer {\n ($gl:expr, $buffer:expr) => {\n $gl.element_array_buffer.bind($buffer)\n }\n}\n\n#[macro_export]\nmacro_rules! use_program {\n ($gl:expr, $program:expr) => {\n $gl.program.bind($program)\n }\n}\n\n#[macro_export]\nmacro_rules! bind_framebuffer {\n ($gl:expr, $fbo:expr) => {\n $gl.framebuffer.bind($fbo)\n }\n}\n\n#[macro_export]\nmacro_rules! current_framebuffer_binding {\n ($gl:expr) => {\n $gl.framebuffer.current_binding()\n }\n}\n\n#[macro_export]\nmacro_rules! bind_renderbuffer {\n ($gl:expr, $renderbuffer:expr) => {\n $gl.renderbuffer.bind($renderbuffer)\n }\n}\n\n\/\/ HACK: Workaround for issue described here:\n\/\/ https:\/\/www.reddit.com\/r\/rust\/comments\/339yj3\/tuple_indexing_in_a_macro\/cqiyv4n\n#[macro_export]\nmacro_rules! _glitter_expr {\n ($x:expr) => ($x)\n}\n\n#[macro_export]\nmacro_rules! active_texture {\n ($gl:expr, $idx:tt) => {\n _glitter_expr!($gl.tex_units.$idx.active())\n }\n}\n\n#[macro_export]\nmacro_rules! active_texture_n {\n ($gl:expr, $idx:expr) => {\n $gl.tex_units.nth_unit($idx).active()\n }\n}\n<commit_msg>Add `ContextOf` type<commit_after>use gl;\nuse gl::types::*;\nuse types::{Color, Viewport, Capability, GLError};\nuse buffer::{ArrayBufferBinder, ElementArrayBufferBinder};\nuse program::{ProgramBinder, ProgramAttrib};\nuse framebuffer::FramebufferBinder;\nuse renderbuffer::RenderbufferBinder;\nuse texture_units::TextureUnits;\n\npub type Context = ContextOf<ArrayBufferBinder,\n ElementArrayBufferBinder,\n ProgramBinder,\n FramebufferBinder,\n RenderbufferBinder,\n TextureUnits>;\n\npub struct ContextOf<AB, EAB, P, FB, RB, TU> {\n pub array_buffer: AB,\n pub element_array_buffer: EAB,\n pub program: P,\n pub framebuffer: FB,\n pub renderbuffer: RB,\n pub tex_units: TU\n}\n\nimpl Context {\n pub unsafe fn current_context() -> Self {\n Context {\n array_buffer: ArrayBufferBinder,\n element_array_buffer: ElementArrayBufferBinder,\n program: ProgramBinder,\n framebuffer: FramebufferBinder,\n renderbuffer: RenderbufferBinder,\n tex_units: TextureUnits::current()\n }\n }\n\n pub fn clear_color(&mut self, color: Color) {\n unsafe {\n gl::ClearColor(color.r, color.g, color.b, color.a);\n }\n }\n\n pub fn enable(&mut self, cap: Capability) {\n unsafe {\n gl::Enable(cap.gl_enum());\n dbg_gl_sanity_check! {\n GLError::InvalidEnum => \"`cap` is not a valid OpenGL capability\",\n _ => \"Unknown error\"\n }\n }\n }\n\n pub fn disable(&mut self, cap: Capability) {\n unsafe {\n gl::Disable(cap.gl_enum());\n dbg_gl_sanity_check! {\n GLError::InvalidEnum => \"`cap` is not a valid OpenGL capability\",\n _ => \"Unknown error\"\n }\n }\n }\n\n pub fn enable_vertex_attrib_array(&self, attrib: ProgramAttrib) {\n unsafe {\n gl::EnableVertexAttribArray(attrib.gl_index);\n dbg_gl_error! {\n GLError::InvalidValue => \"`index` is >= GL_MAX_VERTEX_ATTRIBS\",\n _ => \"Unknown error\"\n }\n }\n }\n\n pub fn viewport(&self, viewport: Viewport) {\n unsafe {\n gl::Viewport(viewport.x as GLint,\n viewport.y as GLint,\n viewport.width as GLsizei,\n viewport.height as GLsizei);\n dbg_gl_sanity_check! {\n GLError::InvalidValue => \"`width` or `height` is negative\",\n _ => \"Unknown error\"\n }\n }\n }\n\n pub fn get_error() -> Option<GLError> {\n unsafe {\n match gl::GetError() {\n gl::INVALID_ENUM =>\n Some(GLError::InvalidEnum),\n gl::INVALID_VALUE =>\n Some(GLError::InvalidValue),\n gl::INVALID_OPERATION =>\n Some(GLError::InvalidOperation),\n gl::INVALID_FRAMEBUFFER_OPERATION =>\n Some(GLError::InvalidFramebufferOperation),\n gl::OUT_OF_MEMORY =>\n Some(GLError::OutOfMemory),\n _ =>\n None\n }\n }\n }\n}\n\n#[macro_export]\nmacro_rules! bind_array_buffer {\n ($gl:expr, $buffer:expr) => {\n $gl.array_buffer.bind($buffer)\n }\n}\n\n#[macro_export]\nmacro_rules! bind_element_array_buffer {\n ($gl:expr, $buffer:expr) => {\n $gl.element_array_buffer.bind($buffer)\n }\n}\n\n#[macro_export]\nmacro_rules! use_program {\n ($gl:expr, $program:expr) => {\n $gl.program.bind($program)\n }\n}\n\n#[macro_export]\nmacro_rules! bind_framebuffer {\n ($gl:expr, $fbo:expr) => {\n $gl.framebuffer.bind($fbo)\n }\n}\n\n#[macro_export]\nmacro_rules! current_framebuffer_binding {\n ($gl:expr) => {\n $gl.framebuffer.current_binding()\n }\n}\n\n#[macro_export]\nmacro_rules! bind_renderbuffer {\n ($gl:expr, $renderbuffer:expr) => {\n $gl.renderbuffer.bind($renderbuffer)\n }\n}\n\n\/\/ HACK: Workaround for issue described here:\n\/\/ https:\/\/www.reddit.com\/r\/rust\/comments\/339yj3\/tuple_indexing_in_a_macro\/cqiyv4n\n#[macro_export]\nmacro_rules! _glitter_expr {\n ($x:expr) => ($x)\n}\n\n#[macro_export]\nmacro_rules! active_texture {\n ($gl:expr, $idx:tt) => {\n _glitter_expr!($gl.tex_units.$idx.active())\n }\n}\n\n#[macro_export]\nmacro_rules! active_texture_n {\n ($gl:expr, $idx:expr) => {\n $gl.tex_units.nth_unit($idx).active()\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! This module defines the `Bits`, `Dibits`, and `Tribits` iterators as well as the\n\/\/! wrapper types `Bit`, `Dibit`, and `Tribit`, for working with sub-byte values.\n\/\/!\n\/\/! The wrapped values of `Bit`, `Dibit`, and `Tribit, are guaranteed to have only one,\n\/\/! two, or three bits, respectively.\n\nuse std;\n\n\/\/\/ Iterate over individual bits of a byte source, MSB to LSB.\npub type Bits<T> = SubByteIter<BitParams, T>;\n\/\/\/ Iterate over the dibits of a byte source, MSB to LSB.\npub type Dibits<T> = SubByteIter<DibitParams, T>;\n\n\/\/\/ Defines parameters needed for (power of two) sub-byte iterators.\npub trait IterParams {\n \/\/\/ Type to yield at each iteration.\n type IterType;\n\n \/\/\/ Number of bits to consume at each iteration.\n fn bits() -> u8;\n \/\/\/ Wrap the given bits in container type.\n fn wrap(bits: u8) -> Self::IterType;\n\n \/\/\/ Number of iterations needed for each byte.\n fn iterations() -> u8 { 8 \/ Self::bits() }\n\n \/\/\/ Verify the parameters are supported.\n fn validate() {\n \/\/ Only powers of two are valid because there can be no \"leftovers.\"\n assert!(Self::bits().is_power_of_two());\n }\n}\n\n\/\/\/ A single bit.\n#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]\npub struct Bit(u8);\n\nimpl Bit {\n \/\/\/ Construct a new `Bit` with the given bit in the LSB position.\n pub fn new(bits: u8) -> Bit {\n assert!(bits & 0b11111110 == 0);\n Bit(bits)\n }\n\n \/\/\/ Get the wrapped bit value.\n pub fn bit(&self) -> u8 { self.0 }\n}\n\n\/\/\/ Parameters for `Bits` iterator.\npub struct BitParams;\n\nimpl IterParams for BitParams {\n type IterType = Bit;\n fn bits() -> u8 { 1 }\n fn wrap(bits: u8) -> Bit { Bit::new(bits) }\n}\n\n\/\/\/ Two bits.\n#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]\npub struct Dibit(u8);\n\nimpl Dibit {\n \/\/\/ Construct a new `Dibit` with the two given bits in the LSB position.\n pub fn new(bits: u8) -> Dibit {\n assert!(bits & 0b11111100 == 0);\n Dibit(bits)\n }\n\n \/\/\/ Get the wrapped dibit.\n pub fn bits(&self) -> u8 { self.0 }\n}\n\n\/\/\/ Parameters for `Dibits` iterator.\npub struct DibitParams;\n\nimpl IterParams for DibitParams {\n type IterType = Dibit;\n fn bits() -> u8 { 2 }\n fn wrap(bits: u8) -> Dibit { Dibit::new(bits) }\n}\n\n\/\/\/ Three bits.\n#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]\npub struct Tribit(u8);\n\nimpl Tribit {\n \/\/\/ Construct a new `Tribit` with the three given bits in the LSB position.\n pub fn new(bits: u8) -> Tribit {\n assert!(bits & 0b11111000 == 0);\n Tribit(bits)\n }\n\n \/\/\/ Get the wrapped tribit.\n pub fn bits(&self) -> u8 { self.0 }\n}\n\n\/\/\/ An iterator for sub-byte (bit-level) values.\npub struct SubByteIter<P, T> where\n P: IterParams, T: Iterator<Item = u8>\n{\n params: std::marker::PhantomData<P>,\n \/\/\/ Source of bytes.\n src: T,\n \/\/\/ Current bit-level index into the current byte.\n idx: u8,\n \/\/\/ Current byte in the source.\n byte: u8,\n}\n\nimpl<P, T> SubByteIter<P, T> where\n P: IterParams, T: Iterator<Item = u8>\n{\n \/\/\/ Construct a new `SubByteIter` over the given byte source. All bits are iterated\n \/\/\/ over, so the number of bits must be a byte multiple.\n pub fn new(src: T) -> SubByteIter<P, T> {\n SubByteIter {\n params: std::marker::PhantomData,\n src: src,\n byte: 0,\n idx: 0,\n }\n }\n}\n\nimpl<P, T> Iterator for SubByteIter<P, T> where\n P: IterParams, T: Iterator<Item = u8>\n{\n type Item = P::IterType;\n\n fn next(&mut self) -> Option<Self::Item> {\n if self.idx == 0 {\n self.byte = match self.src.next() {\n Some(b) => b,\n None => return None,\n };\n }\n\n \/\/ Extract MSBs.\n let bits = self.byte >> (8 - P::bits());\n\n \/\/ Strip off the MSBs for the next iteration.\n self.byte <<= P::bits();\n\n \/\/ Move to the next item and reset after all have been visited.\n self.idx += 1;\n self.idx %= P::iterations();\n\n Some(P::wrap(bits))\n }\n}\n\n\/\/\/ Iterates over the tribits in a byte source.\npub struct Tribits<T: Iterator<Item = u8>> {\n \/\/\/ The source of bytes.\n src: T,\n \/\/\/ Current bit buffer, containing either 6 or 3 bits (in MSB position) or 0.\n bits: u8,\n \/\/\/ Tribit index into `bits` (0 or 1).\n idx: usize,\n \/\/\/ Buffered bits from current source byte, to be added to `bits`.\n buf: u8,\n \/\/\/ Number of bits in `buf`, either 2, 4, 6, or 0.\n buf_bits: usize,\n}\n\nimpl<T: Iterator<Item = u8>> Tribits<T> {\n \/\/\/ Construct a new `Tribits` from the given source of bytes. The number of bytes must\n \/\/\/ be a multiple of 3 (a multiple of 24 bits).\n pub fn new(src: T) -> Tribits<T> {\n Tribits {\n src: src,\n bits: 0,\n idx: 0,\n buf: 0,\n buf_bits: 0,\n }\n }\n}\n\nimpl<T: Iterator<Item = u8>> Iterator for Tribits<T> {\n type Item = Tribit;\n\n fn next(&mut self) -> Option<Self::Item> {\n \/\/ If on the first tribit, it's time to flush the buffer and (maybe) load another\n \/\/ byte.\n if self.idx == 0 {\n \/\/ Flush and reset the buffer.\n self.bits = self.buf;\n self.buf = 0;\n\n \/\/ Calculate the number of bits to buffer for the next iteration.\n self.buf_bits += 2;\n self.buf_bits %= 8;\n\n \/\/ Only load a new byte if bits need to be buffered.\n if self.buf_bits != 0 {\n let next = match self.src.next() {\n Some(b) => b,\n None => if self.buf_bits == 2 {\n \/\/ In this case we've covered 8 tribits = 24 bits = 3 bytes\n \/\/ exactly, so it's fine if there are no more bytes.\n return None;\n } else {\n panic!(\"incomplete tribit\");\n }\n };\n\n \/\/ Add in some source bits after the MSBs.\n self.bits |= next >> self.buf_bits << 2;\n \/\/ Buffer the rest of the bits.\n self.buf = next << (8 - self.buf_bits);\n }\n }\n\n \/\/ Extract the 3 MSBs and strip them off for next time.\n let bits = self.bits >> 5;\n self.bits <<= 3;\n\n self.idx += 1;\n self.idx %= 2;\n\n Some(Tribit::new(bits))\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::*;\n\n #[test]\n fn validate_params() {\n BitParams::validate();\n DibitParams::validate();\n }\n\n #[test]\n fn test_bits() {\n const BITS: &'static [u8] = &[\n 0b00011011,\n 0b11001100,\n ];\n\n {\n let mut d = Dibits::new(BITS.iter().cloned());\n\n assert!(d.next().unwrap().0 == 0b00);\n assert!(d.next().unwrap().0 == 0b01);\n assert!(d.next().unwrap().0 == 0b10);\n assert!(d.next().unwrap().0 == 0b11);\n assert!(d.next().unwrap().0 == 0b11);\n assert!(d.next().unwrap().0 == 0b00);\n assert!(d.next().unwrap().0 == 0b11);\n assert!(d.next().unwrap().0 == 0b00);\n assert!(d.next().is_none());\n }\n\n {\n let mut b = Bits::new(BITS.iter().cloned());\n\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 0);\n }\n }\n\n #[test]\n fn test_tribits() {\n let bytes = [\n 0b00101001,\n 0b11001011,\n 0b10111000,\n 0b00101001,\n 0b11001011,\n 0b10111000,\n ];\n let mut t = Tribits::new(bytes.iter().cloned());\n\n assert_eq!(t.next().unwrap().bits(), 0b001);\n assert_eq!(t.next().unwrap().bits(), 0b010);\n assert_eq!(t.next().unwrap().bits(), 0b011);\n assert_eq!(t.next().unwrap().bits(), 0b100);\n assert_eq!(t.next().unwrap().bits(), 0b101);\n assert_eq!(t.next().unwrap().bits(), 0b110);\n assert_eq!(t.next().unwrap().bits(), 0b111);\n assert_eq!(t.next().unwrap().bits(), 0b000);\n assert_eq!(t.next().unwrap().bits(), 0b001);\n assert_eq!(t.next().unwrap().bits(), 0b010);\n assert_eq!(t.next().unwrap().bits(), 0b011);\n assert_eq!(t.next().unwrap().bits(), 0b100);\n assert_eq!(t.next().unwrap().bits(), 0b101);\n assert_eq!(t.next().unwrap().bits(), 0b110);\n assert_eq!(t.next().unwrap().bits(), 0b111);\n assert_eq!(t.next().unwrap().bits(), 0b000);\n assert!(t.next().is_none());\n }\n\n #[test]\n #[should_panic]\n fn test_tribits_panic() {\n let bytes = [1, 2, 3, 4];\n let t = Tribits::new(bytes.iter().cloned());\n\n for _ in t {}\n }\n}\n<commit_msg>SubByteIter doesn't need to be public<commit_after>\/\/! This module defines the `Bits`, `Dibits`, and `Tribits` iterators as well as the\n\/\/! wrapper types `Bit`, `Dibit`, and `Tribit`, for working with sub-byte values.\n\/\/!\n\/\/! The wrapped values of `Bit`, `Dibit`, and `Tribit, are guaranteed to have only one,\n\/\/! two, or three bits, respectively.\n\nuse std;\n\n\/\/\/ Iterate over individual bits of a byte source, MSB to LSB.\npub type Bits<T> = SubByteIter<BitParams, T>;\n\/\/\/ Iterate over the dibits of a byte source, MSB to LSB.\npub type Dibits<T> = SubByteIter<DibitParams, T>;\n\n\/\/\/ Defines parameters needed for (power of two) sub-byte iterators.\npub trait IterParams {\n \/\/\/ Type to yield at each iteration.\n type IterType;\n\n \/\/\/ Number of bits to consume at each iteration.\n fn bits() -> u8;\n \/\/\/ Wrap the given bits in container type.\n fn wrap(bits: u8) -> Self::IterType;\n\n \/\/\/ Number of iterations needed for each byte.\n fn iterations() -> u8 { 8 \/ Self::bits() }\n\n \/\/\/ Verify the parameters are supported.\n fn validate() {\n \/\/ Only powers of two are valid because there can be no \"leftovers.\"\n assert!(Self::bits().is_power_of_two());\n }\n}\n\n\/\/\/ A single bit.\n#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]\npub struct Bit(u8);\n\nimpl Bit {\n \/\/\/ Construct a new `Bit` with the given bit in the LSB position.\n pub fn new(bits: u8) -> Bit {\n assert!(bits & 0b11111110 == 0);\n Bit(bits)\n }\n\n \/\/\/ Get the wrapped bit value.\n pub fn bit(&self) -> u8 { self.0 }\n}\n\n\/\/\/ Parameters for `Bits` iterator.\npub struct BitParams;\n\nimpl IterParams for BitParams {\n type IterType = Bit;\n fn bits() -> u8 { 1 }\n fn wrap(bits: u8) -> Bit { Bit::new(bits) }\n}\n\n\/\/\/ Two bits.\n#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]\npub struct Dibit(u8);\n\nimpl Dibit {\n \/\/\/ Construct a new `Dibit` with the two given bits in the LSB position.\n pub fn new(bits: u8) -> Dibit {\n assert!(bits & 0b11111100 == 0);\n Dibit(bits)\n }\n\n \/\/\/ Get the wrapped dibit.\n pub fn bits(&self) -> u8 { self.0 }\n}\n\n\/\/\/ Parameters for `Dibits` iterator.\npub struct DibitParams;\n\nimpl IterParams for DibitParams {\n type IterType = Dibit;\n fn bits() -> u8 { 2 }\n fn wrap(bits: u8) -> Dibit { Dibit::new(bits) }\n}\n\n\/\/\/ Three bits.\n#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]\npub struct Tribit(u8);\n\nimpl Tribit {\n \/\/\/ Construct a new `Tribit` with the three given bits in the LSB position.\n pub fn new(bits: u8) -> Tribit {\n assert!(bits & 0b11111000 == 0);\n Tribit(bits)\n }\n\n \/\/\/ Get the wrapped tribit.\n pub fn bits(&self) -> u8 { self.0 }\n}\n\n\/\/\/ An iterator for sub-byte (bit-level) values.\nstruct SubByteIter<P, T> where\n P: IterParams, T: Iterator<Item = u8>\n{\n params: std::marker::PhantomData<P>,\n \/\/\/ Source of bytes.\n src: T,\n \/\/\/ Current bit-level index into the current byte.\n idx: u8,\n \/\/\/ Current byte in the source.\n byte: u8,\n}\n\nimpl<P, T> SubByteIter<P, T> where\n P: IterParams, T: Iterator<Item = u8>\n{\n \/\/\/ Construct a new `SubByteIter` over the given byte source. All bits are iterated\n \/\/\/ over, so the number of bits must be a byte multiple.\n pub fn new(src: T) -> SubByteIter<P, T> {\n SubByteIter {\n params: std::marker::PhantomData,\n src: src,\n byte: 0,\n idx: 0,\n }\n }\n}\n\nimpl<P, T> Iterator for SubByteIter<P, T> where\n P: IterParams, T: Iterator<Item = u8>\n{\n type Item = P::IterType;\n\n fn next(&mut self) -> Option<Self::Item> {\n if self.idx == 0 {\n self.byte = match self.src.next() {\n Some(b) => b,\n None => return None,\n };\n }\n\n \/\/ Extract MSBs.\n let bits = self.byte >> (8 - P::bits());\n\n \/\/ Strip off the MSBs for the next iteration.\n self.byte <<= P::bits();\n\n \/\/ Move to the next item and reset after all have been visited.\n self.idx += 1;\n self.idx %= P::iterations();\n\n Some(P::wrap(bits))\n }\n}\n\n\/\/\/ Iterates over the tribits in a byte source.\npub struct Tribits<T: Iterator<Item = u8>> {\n \/\/\/ The source of bytes.\n src: T,\n \/\/\/ Current bit buffer, containing either 6 or 3 bits (in MSB position) or 0.\n bits: u8,\n \/\/\/ Tribit index into `bits` (0 or 1).\n idx: usize,\n \/\/\/ Buffered bits from current source byte, to be added to `bits`.\n buf: u8,\n \/\/\/ Number of bits in `buf`, either 2, 4, 6, or 0.\n buf_bits: usize,\n}\n\nimpl<T: Iterator<Item = u8>> Tribits<T> {\n \/\/\/ Construct a new `Tribits` from the given source of bytes. The number of bytes must\n \/\/\/ be a multiple of 3 (a multiple of 24 bits).\n pub fn new(src: T) -> Tribits<T> {\n Tribits {\n src: src,\n bits: 0,\n idx: 0,\n buf: 0,\n buf_bits: 0,\n }\n }\n}\n\nimpl<T: Iterator<Item = u8>> Iterator for Tribits<T> {\n type Item = Tribit;\n\n fn next(&mut self) -> Option<Self::Item> {\n \/\/ If on the first tribit, it's time to flush the buffer and (maybe) load another\n \/\/ byte.\n if self.idx == 0 {\n \/\/ Flush and reset the buffer.\n self.bits = self.buf;\n self.buf = 0;\n\n \/\/ Calculate the number of bits to buffer for the next iteration.\n self.buf_bits += 2;\n self.buf_bits %= 8;\n\n \/\/ Only load a new byte if bits need to be buffered.\n if self.buf_bits != 0 {\n let next = match self.src.next() {\n Some(b) => b,\n None => if self.buf_bits == 2 {\n \/\/ In this case we've covered 8 tribits = 24 bits = 3 bytes\n \/\/ exactly, so it's fine if there are no more bytes.\n return None;\n } else {\n panic!(\"incomplete tribit\");\n }\n };\n\n \/\/ Add in some source bits after the MSBs.\n self.bits |= next >> self.buf_bits << 2;\n \/\/ Buffer the rest of the bits.\n self.buf = next << (8 - self.buf_bits);\n }\n }\n\n \/\/ Extract the 3 MSBs and strip them off for next time.\n let bits = self.bits >> 5;\n self.bits <<= 3;\n\n self.idx += 1;\n self.idx %= 2;\n\n Some(Tribit::new(bits))\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::*;\n\n #[test]\n fn validate_params() {\n BitParams::validate();\n DibitParams::validate();\n }\n\n #[test]\n fn test_bits() {\n const BITS: &'static [u8] = &[\n 0b00011011,\n 0b11001100,\n ];\n\n {\n let mut d = Dibits::new(BITS.iter().cloned());\n\n assert!(d.next().unwrap().0 == 0b00);\n assert!(d.next().unwrap().0 == 0b01);\n assert!(d.next().unwrap().0 == 0b10);\n assert!(d.next().unwrap().0 == 0b11);\n assert!(d.next().unwrap().0 == 0b11);\n assert!(d.next().unwrap().0 == 0b00);\n assert!(d.next().unwrap().0 == 0b11);\n assert!(d.next().unwrap().0 == 0b00);\n assert!(d.next().is_none());\n }\n\n {\n let mut b = Bits::new(BITS.iter().cloned());\n\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 1);\n assert!(b.next().unwrap().0 == 0);\n assert!(b.next().unwrap().0 == 0);\n }\n }\n\n #[test]\n fn test_tribits() {\n let bytes = [\n 0b00101001,\n 0b11001011,\n 0b10111000,\n 0b00101001,\n 0b11001011,\n 0b10111000,\n ];\n let mut t = Tribits::new(bytes.iter().cloned());\n\n assert_eq!(t.next().unwrap().bits(), 0b001);\n assert_eq!(t.next().unwrap().bits(), 0b010);\n assert_eq!(t.next().unwrap().bits(), 0b011);\n assert_eq!(t.next().unwrap().bits(), 0b100);\n assert_eq!(t.next().unwrap().bits(), 0b101);\n assert_eq!(t.next().unwrap().bits(), 0b110);\n assert_eq!(t.next().unwrap().bits(), 0b111);\n assert_eq!(t.next().unwrap().bits(), 0b000);\n assert_eq!(t.next().unwrap().bits(), 0b001);\n assert_eq!(t.next().unwrap().bits(), 0b010);\n assert_eq!(t.next().unwrap().bits(), 0b011);\n assert_eq!(t.next().unwrap().bits(), 0b100);\n assert_eq!(t.next().unwrap().bits(), 0b101);\n assert_eq!(t.next().unwrap().bits(), 0b110);\n assert_eq!(t.next().unwrap().bits(), 0b111);\n assert_eq!(t.next().unwrap().bits(), 0b000);\n assert!(t.next().is_none());\n }\n\n #[test]\n #[should_panic]\n fn test_tribits_panic() {\n let bytes = [1, 2, 3, 4];\n let t = Tribits::new(bytes.iter().cloned());\n\n for _ in t {}\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(test)]\n\nextern crate clap;\nextern crate test;\n\nuse clap::{App, Arg, SubCommand};\n\nuse test::Bencher;\n\nstatic M_VAL_NAMES: [&'static str; 2] = [\"one\", \"two\"];\nstatic ARGS: &'static str = \"-o --option=[opt]... 'tests options'\n [positional] 'tests positionals'\";\nstatic OPT3_VALS: [&'static str; 2] = [\"fast\", \"slow\"];\nstatic POS3_VALS: [&'static str; 2] = [\"vi\", \"emacs\"];\n\nmacro_rules! create_app {\n () => ({\n App::new(\"claptests\")\n .version(\"0.1\")\n .about(\"tests clap library\")\n .author(\"Kevin K. <kbknapp@gmail.com>\")\n .args_from_usage(ARGS)\n .arg(Arg::from_usage(\"-f --flag... 'tests flags'\")\n .global(true))\n .args(vec![\n Arg::from_usage(\"[flag2] -F 'tests flags with exclusions'\").conflicts_with(\"flag\").requires(\"option2\"),\n Arg::from_usage(\"--long-option-2 [option2] 'tests long options with exclusions'\").conflicts_with(\"option\").requires(\"positional2\"),\n Arg::from_usage(\"[positional2] 'tests positionals with exclusions'\"),\n Arg::from_usage(\"-O --Option [option3] 'tests options with specific value sets'\").possible_values(&OPT3_VALS),\n Arg::from_usage(\"[positional3]... 'tests positionals with specific values'\").possible_values(&POS3_VALS),\n Arg::from_usage(\"--multvals [multvals] 'Tests mutliple values, not mult occs'\").value_names(&M_VAL_NAMES),\n Arg::from_usage(\"--multvalsmo [multvalsmo]... 'Tests mutliple values, not mult occs'\").value_names(&M_VAL_NAMES),\n Arg::from_usage(\"--minvals2 [minvals]... 'Tests 2 min vals'\").min_values(2),\n Arg::from_usage(\"--maxvals3 [maxvals]... 'Tests 3 max vals'\").max_values(3)\n ])\n .subcommand(SubCommand::with_name(\"subcmd\")\n .about(\"tests subcommands\")\n .version(\"0.1\")\n .author(\"Kevin K. <kbknapp@gmail.com>\")\n .arg_from_usage(\"-o --option [scoption]... 'tests options'\")\n .arg_from_usage(\"[scpositional] 'tests positionals'\"))\n })\n}\n\n#[bench]\nfn build_app(b: &mut Bencher) {\n\n b.iter(|| create_app!());\n}\n\n#[bench]\nfn parse_clean(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\"]));\n}\n\n<commit_msg>tests(benchmark): Add benchmarks<commit_after>#![feature(test)]\n\nextern crate clap;\nextern crate test;\n\nuse clap::{App, Arg, SubCommand};\n\nuse test::Bencher;\n\nstatic M_VAL_NAMES: [&'static str; 2] = [\"one\", \"two\"];\nstatic ARGS: &'static str = \"-o --option=[opt]... 'tests options'\n [positional] 'tests positionals'\";\nstatic OPT3_VALS: [&'static str; 2] = [\"fast\", \"slow\"];\nstatic POS3_VALS: [&'static str; 2] = [\"vi\", \"emacs\"];\n\nmacro_rules! create_app {\n () => ({\n App::new(\"claptests\")\n .version(\"0.1\")\n .about(\"tests clap library\")\n .author(\"Kevin K. <kbknapp@gmail.com>\")\n .args_from_usage(ARGS)\n .arg(Arg::from_usage(\"-f --flag... 'tests flags'\")\n .global(true))\n .args(vec![\n Arg::from_usage(\"[flag2] -F 'tests flags with exclusions'\").conflicts_with(\"flag\").requires(\"option2\"),\n Arg::from_usage(\"--long-option-2 [option2] 'tests long options with exclusions'\").conflicts_with(\"option\").requires(\"positional2\"),\n Arg::from_usage(\"[positional2] 'tests positionals with exclusions'\"),\n Arg::from_usage(\"-O --Option [option3] 'tests options with specific value sets'\").possible_values(&OPT3_VALS),\n Arg::from_usage(\"[positional3]... 'tests positionals with specific values'\").possible_values(&POS3_VALS),\n Arg::from_usage(\"--multvals [multvals] 'Tests mutliple values, not mult occs'\").value_names(&M_VAL_NAMES),\n Arg::from_usage(\"--multvalsmo [multvalsmo]... 'Tests mutliple values, not mult occs'\").value_names(&M_VAL_NAMES),\n Arg::from_usage(\"--minvals2 [minvals]... 'Tests 2 min vals'\").min_values(2),\n Arg::from_usage(\"--maxvals3 [maxvals]... 'Tests 3 max vals'\").max_values(3)\n ])\n .subcommand(SubCommand::with_name(\"subcmd\")\n .about(\"tests subcommands\")\n .version(\"0.1\")\n .author(\"Kevin K. <kbknapp@gmail.com>\")\n .arg_from_usage(\"-o --option [scoption]... 'tests options'\")\n .arg_from_usage(\"[scpositional] 'tests positionals'\"))\n })\n}\n\n#[bench]\nfn build_app(b: &mut Bencher) {\n\n b.iter(|| create_app!());\n}\n\n#[bench]\nfn parse_clean(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\"]));\n}\n\n#[bench]\nfn parse_flag(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"-f\"]));\n}\n\n#[bench]\nfn parse_option(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"-o\", \"option1\"]));\n}\n\n#[bench]\nfn parse_positional(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"arg1\"]));\n}\n\n#[bench]\nfn parse_sc_clean(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"subcmd\"]));\n}\n\n#[bench]\nfn parse_sc_flag(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"subcmd\", \"-f\"]));\n}\n\n#[bench]\nfn parse_sc_option(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"subcmd\", \"-o\", \"option1\"]));\n}\n\n#[bench]\nfn parse_sc_positional(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"subcmd\", \"arg1\"]));\n}\n\n#[bench]\nfn parse_complex1(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"-ff\", \"-o\", \"option1\", \"arg1\", \"-O\", \"fast\", \"arg2\", \"--multvals\", \"one\", \"two\", \"three\"]));\n}\n\n#[bench]\nfn parse_complex2(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"arg1\", \"-f\", \"arg2\", \"--long-option-2\", \"some\", \"-O\", \"slow\", \"--multvalsmo\", \"one\", \"two\", \"--minvals2\", \"3\", \"2\", \"1\"]));\n}\n\n\n#[bench]\nfn parse_sc_complex(b: &mut Bencher) {\n b.iter(|| create_app!().get_matches_from(vec![\"\", \"subcmd\", \"-f\", \"-o\", \"option1\", \"arg1\"]));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove get_description function<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove bounds checks in wram<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! The high-level interface from script to layout. Using this abstract interface helps reduce\n\/\/\/ coupling between these two components, and enables the DOM to be placed in a separate crate\n\/\/\/ from layout.\n\nuse dom::node::LayoutDataRef;\n\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse script_traits::{ScriptControlChan, OpaqueScriptLayoutChannel, UntrustedNodeAddress};\nuse servo_msg::constellation_msg::{PipelineExitType, WindowSizeData};\nuse util::geometry::Au;\nuse std::any::Any;\nuse std::sync::mpsc::{channel, Receiver, Sender};\nuse std::boxed::BoxAny;\nuse style::stylesheets::Stylesheet;\nuse url::Url;\n\npub use dom::node::TrustedNodeAddress;\n\n\/\/\/ Asynchronous messages that script can send to layout.\npub enum Msg {\n \/\/\/ Adds the given stylesheet to the document.\n AddStylesheet(Stylesheet),\n\n \/\/\/ Adds the given stylesheet to the document.\n LoadStylesheet(Url),\n\n \/\/\/ Puts a document into quirks mode, causing the quirks mode stylesheet to be loaded.\n SetQuirksMode,\n\n \/\/\/ Requests a reflow.\n Reflow(Box<Reflow>),\n\n \/\/\/ Get an RPC interface.\n GetRPC(Sender<Box<LayoutRPC + Send>>),\n\n \/\/\/ Destroys layout data associated with a DOM node.\n \/\/\/\n \/\/\/ TODO(pcwalton): Maybe think about batching to avoid message traffic.\n ReapLayoutData(LayoutDataRef),\n\n \/\/\/ Requests that the layout task enter a quiescent state in which no more messages are\n \/\/\/ accepted except `ExitMsg`. A response message will be sent on the supplied channel when\n \/\/\/ this happens.\n PrepareToExit(Sender<()>),\n\n \/\/\/ Requests that the layout task immediately shut down. There must be no more nodes left after\n \/\/\/ this, or layout will crash.\n ExitNow(PipelineExitType),\n}\n\n\/\/\/ Synchronous messages that script can send to layout.\n\/\/\/\n\/\/\/ In general, you should use messages to talk to Layout. Use the RPC interface\n\/\/\/ if and only if the work is\n\/\/\/\n\/\/\/ 1) read-only with respect to LayoutTaskData,\n\/\/\/ 2) small,\n\/\/ 3) and really needs to be fast.\npub trait LayoutRPC {\n \/\/\/ Requests the dimensions of the content box, as in the `getBoundingClientRect()` call.\n fn content_box(&self) -> ContentBoxResponse;\n \/\/\/ Requests the dimensions of all the content boxes, as in the `getClientRects()` call.\n fn content_boxes(&self) -> ContentBoxesResponse;\n \/\/\/ Requests the node containing the point of interest\n fn hit_test(&self, node: TrustedNodeAddress, point: Point2D<f32>) -> Result<HitTestResponse, ()>;\n fn mouse_over(&self, node: TrustedNodeAddress, point: Point2D<f32>) -> Result<MouseOverResponse, ()>;\n}\n\npub struct ContentBoxResponse(pub Rect<Au>);\npub struct ContentBoxesResponse(pub Vec<Rect<Au>>);\npub struct HitTestResponse(pub UntrustedNodeAddress);\npub struct MouseOverResponse(pub Vec<UntrustedNodeAddress>);\n\n\/\/\/ Why we're doing reflow.\n#[derive(PartialEq, Show)]\npub enum ReflowGoal {\n \/\/\/ We're reflowing in order to send a display list to the screen.\n ForDisplay,\n \/\/\/ We're reflowing in order to satisfy a script query. No display list will be created.\n ForScriptQuery,\n}\n\n\/\/\/ Any query to perform with this reflow.\npub enum ReflowQueryType {\n NoQuery,\n ContentBoxQuery(TrustedNodeAddress),\n ContentBoxesQuery(TrustedNodeAddress),\n}\n\n\/\/\/ Information needed for a reflow.\npub struct Reflow {\n \/\/\/ The document node.\n pub document_root: TrustedNodeAddress,\n \/\/\/ The goal of reflow: either to render to the screen or to flush layout info for script.\n pub goal: ReflowGoal,\n \/\/\/ The URL of the page.\n pub url: Url,\n \/\/\/ Is the current reflow of an iframe, as opposed to a root window?\n pub iframe: bool,\n \/\/\/ The channel through which messages can be sent back to the script task.\n pub script_chan: ScriptControlChan,\n \/\/\/ The current window size.\n pub window_size: WindowSizeData,\n \/\/\/ The channel that we send a notification to.\n pub script_join_chan: Sender<()>,\n \/\/\/ Unique identifier\n pub id: uint,\n \/\/\/ The type of query if any to perform during this reflow.\n pub query_type: ReflowQueryType,\n \/\/\/ A clipping rectangle for the page, an enlarged rectangle containing the viewport.\n pub page_clip_rect: Rect<Au>,\n}\n\n\/\/\/ Encapsulates a channel to the layout task.\n#[derive(Clone)]\npub struct LayoutChan(pub Sender<Msg>);\n\nimpl LayoutChan {\n pub fn new() -> (Receiver<Msg>, LayoutChan) {\n let (chan, port) = channel();\n (port, LayoutChan(chan))\n }\n}\n\n\/\/\/ A trait to manage opaque references to script<->layout channels without needing\n\/\/\/ to expose the message type to crates that don't need to know about them.\npub trait ScriptLayoutChan {\n fn new(sender: Sender<Msg>, receiver: Receiver<Msg>) -> Self;\n fn sender(&self) -> Sender<Msg>;\n fn receiver(self) -> Receiver<Msg>;\n}\n\nimpl ScriptLayoutChan for OpaqueScriptLayoutChannel {\n fn new(sender: Sender<Msg>, receiver: Receiver<Msg>) -> OpaqueScriptLayoutChannel {\n let inner = (box sender as Box<Any+Send>, box receiver as Box<Any+Send>);\n OpaqueScriptLayoutChannel(inner)\n }\n\n fn sender(&self) -> Sender<Msg> {\n let &OpaqueScriptLayoutChannel((ref sender, _)) = self;\n (*sender.downcast_ref::<Sender<Msg>>().unwrap()).clone()\n }\n\n fn receiver(self) -> Receiver<Msg> {\n let OpaqueScriptLayoutChannel((_, receiver)) = self;\n *receiver.downcast::<Receiver<Msg>>().unwrap()\n }\n}\n<commit_msg>auto merge of #4814 : Ms2ger\/servo\/layout_interface-doc, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! The high-level interface from script to layout. Using this abstract\n\/\/! interface helps reduce coupling between these two components, and enables\n\/\/! the DOM to be placed in a separate crate from layout.\n\nuse dom::node::LayoutDataRef;\n\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse script_traits::{ScriptControlChan, OpaqueScriptLayoutChannel, UntrustedNodeAddress};\nuse servo_msg::constellation_msg::{PipelineExitType, WindowSizeData};\nuse util::geometry::Au;\nuse std::any::Any;\nuse std::sync::mpsc::{channel, Receiver, Sender};\nuse std::boxed::BoxAny;\nuse style::stylesheets::Stylesheet;\nuse url::Url;\n\npub use dom::node::TrustedNodeAddress;\n\n\/\/\/ Asynchronous messages that script can send to layout.\npub enum Msg {\n \/\/\/ Adds the given stylesheet to the document.\n AddStylesheet(Stylesheet),\n\n \/\/\/ Adds the given stylesheet to the document.\n LoadStylesheet(Url),\n\n \/\/\/ Puts a document into quirks mode, causing the quirks mode stylesheet to be loaded.\n SetQuirksMode,\n\n \/\/\/ Requests a reflow.\n Reflow(Box<Reflow>),\n\n \/\/\/ Get an RPC interface.\n GetRPC(Sender<Box<LayoutRPC + Send>>),\n\n \/\/\/ Destroys layout data associated with a DOM node.\n \/\/\/\n \/\/\/ TODO(pcwalton): Maybe think about batching to avoid message traffic.\n ReapLayoutData(LayoutDataRef),\n\n \/\/\/ Requests that the layout task enter a quiescent state in which no more messages are\n \/\/\/ accepted except `ExitMsg`. A response message will be sent on the supplied channel when\n \/\/\/ this happens.\n PrepareToExit(Sender<()>),\n\n \/\/\/ Requests that the layout task immediately shut down. There must be no more nodes left after\n \/\/\/ this, or layout will crash.\n ExitNow(PipelineExitType),\n}\n\n\/\/\/ Synchronous messages that script can send to layout.\n\/\/\/\n\/\/\/ In general, you should use messages to talk to Layout. Use the RPC interface\n\/\/\/ if and only if the work is\n\/\/\/\n\/\/\/ 1) read-only with respect to LayoutTaskData,\n\/\/\/ 2) small,\n\/\/ 3) and really needs to be fast.\npub trait LayoutRPC {\n \/\/\/ Requests the dimensions of the content box, as in the `getBoundingClientRect()` call.\n fn content_box(&self) -> ContentBoxResponse;\n \/\/\/ Requests the dimensions of all the content boxes, as in the `getClientRects()` call.\n fn content_boxes(&self) -> ContentBoxesResponse;\n \/\/\/ Requests the node containing the point of interest\n fn hit_test(&self, node: TrustedNodeAddress, point: Point2D<f32>) -> Result<HitTestResponse, ()>;\n fn mouse_over(&self, node: TrustedNodeAddress, point: Point2D<f32>) -> Result<MouseOverResponse, ()>;\n}\n\npub struct ContentBoxResponse(pub Rect<Au>);\npub struct ContentBoxesResponse(pub Vec<Rect<Au>>);\npub struct HitTestResponse(pub UntrustedNodeAddress);\npub struct MouseOverResponse(pub Vec<UntrustedNodeAddress>);\n\n\/\/\/ Why we're doing reflow.\n#[derive(PartialEq, Show)]\npub enum ReflowGoal {\n \/\/\/ We're reflowing in order to send a display list to the screen.\n ForDisplay,\n \/\/\/ We're reflowing in order to satisfy a script query. No display list will be created.\n ForScriptQuery,\n}\n\n\/\/\/ Any query to perform with this reflow.\npub enum ReflowQueryType {\n NoQuery,\n ContentBoxQuery(TrustedNodeAddress),\n ContentBoxesQuery(TrustedNodeAddress),\n}\n\n\/\/\/ Information needed for a reflow.\npub struct Reflow {\n \/\/\/ The document node.\n pub document_root: TrustedNodeAddress,\n \/\/\/ The goal of reflow: either to render to the screen or to flush layout info for script.\n pub goal: ReflowGoal,\n \/\/\/ The URL of the page.\n pub url: Url,\n \/\/\/ Is the current reflow of an iframe, as opposed to a root window?\n pub iframe: bool,\n \/\/\/ The channel through which messages can be sent back to the script task.\n pub script_chan: ScriptControlChan,\n \/\/\/ The current window size.\n pub window_size: WindowSizeData,\n \/\/\/ The channel that we send a notification to.\n pub script_join_chan: Sender<()>,\n \/\/\/ Unique identifier\n pub id: uint,\n \/\/\/ The type of query if any to perform during this reflow.\n pub query_type: ReflowQueryType,\n \/\/\/ A clipping rectangle for the page, an enlarged rectangle containing the viewport.\n pub page_clip_rect: Rect<Au>,\n}\n\n\/\/\/ Encapsulates a channel to the layout task.\n#[derive(Clone)]\npub struct LayoutChan(pub Sender<Msg>);\n\nimpl LayoutChan {\n pub fn new() -> (Receiver<Msg>, LayoutChan) {\n let (chan, port) = channel();\n (port, LayoutChan(chan))\n }\n}\n\n\/\/\/ A trait to manage opaque references to script<->layout channels without needing\n\/\/\/ to expose the message type to crates that don't need to know about them.\npub trait ScriptLayoutChan {\n fn new(sender: Sender<Msg>, receiver: Receiver<Msg>) -> Self;\n fn sender(&self) -> Sender<Msg>;\n fn receiver(self) -> Receiver<Msg>;\n}\n\nimpl ScriptLayoutChan for OpaqueScriptLayoutChannel {\n fn new(sender: Sender<Msg>, receiver: Receiver<Msg>) -> OpaqueScriptLayoutChannel {\n let inner = (box sender as Box<Any+Send>, box receiver as Box<Any+Send>);\n OpaqueScriptLayoutChannel(inner)\n }\n\n fn sender(&self) -> Sender<Msg> {\n let &OpaqueScriptLayoutChannel((ref sender, _)) = self;\n (*sender.downcast_ref::<Sender<Msg>>().unwrap()).clone()\n }\n\n fn receiver(self) -> Receiver<Msg> {\n let OpaqueScriptLayoutChannel((_, receiver)) = self;\n *receiver.downcast::<Receiver<Msg>>().unwrap()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example FTP client that uses our test certificate to connect to the test server<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>examples: Add simple usage example<commit_after>extern crate rs_release;\n\nuse rs_release::get_os_release;\n\nfn main() {\n match get_os_release() {\n Ok(os_release) => {\n println!(\"Parsed os-release:\");\n for (k, v) in os_release {\n println!(\"{}={}\", k, v);\n }\n }\n Err(e) => println!(\"ERROR: {:?}\", e),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>parse<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Implementing parts of CompiledMethod<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Renumber uniform blocks, reusing the block for view matrices<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix BlockPos.proto_decode for negative coords<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>of the year passed<commit_after><|endoftext|>"} {"text":"<commit_before>use std::env::home_dir;\nuse std::path::PathBuf;\n\n#[cfg(not(windows))]\npub fn base() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\".local\/share\/parallel\");\n path\n })\n}\n\n#[cfg(windows)]\npub fn base() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\"AppData\/Local\/Temp\/parallel\");\n path\n })\n}\n\n#[cfg(not(windows))]\npub fn processed() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\".local\/share\/parallel\/processed\");\n path\n })\n}\n\n#[cfg(windows)]\npub fn processed() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\"AppData\/Local\/Temp\/parallel\/processed\");\n path\n })\n}\n\n#[cfg(not(windows))]\npub fn unprocessed() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\".local\/share\/parallel\/unprocessed\");\n path\n })\n}\n\n#[cfg(windows)]\npub fn unprocessed() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\"AppData\/Local\/Temp\/parallel\/unprocessed\");\n path\n })\n}\n\n\n#[cfg(not(windows))]\npub fn errors() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\".local\/share\/parallel\/errors\");\n path\n })\n}\n\n#[cfg(windows)]\npub fn errors() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\"AppData\/Local\/Temp\/parallel\/errors\");\n path\n })\n}\n\n#[cfg(not(windows))]\npub fn outputs_path() -> PathBuf {\n PathBuf::from(\"\/tmp\/parallel\/\")\n}\n\n#[cfg(not(windows))]\npub fn job(id: usize) -> (PathBuf, PathBuf) {\n let stdout = PathBuf::from(format!(\"\/tmp\/parallel\/stdout_{}\", id));\n let stderr = PathBuf::from(format!(\"\/tmp\/parallel\/stderr_{}\", id));\n (stdout, stderr)\n}\n\n#[cfg(windows)]\npub fn job(id: usize) -> (PathBuf, PathBuf) {\n home_dir().map(|mut stdout| {\n let mut stderr = stdout.clone();\n stdout.push(format!(\"AppData\/Local\/Temp\/parallel\/stdout_{}\", id));\n stderr.push(format!(\"AppData\/Local\/Temp\/parallel\/stderr_{}\", id));\n (stdout, stderr)\n }).expect(\"parallel: unable to open home folder\")\n}\n<commit_msg>Windows Build Fix<commit_after>use std::env::home_dir;\nuse std::path::PathBuf;\n\n#[cfg(not(windows))]\npub fn base() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\".local\/share\/parallel\");\n path\n })\n}\n\n#[cfg(windows)]\npub fn base() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\"AppData\/Local\/Temp\/parallel\");\n path\n })\n}\n\n#[cfg(not(windows))]\npub fn processed() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\".local\/share\/parallel\/processed\");\n path\n })\n}\n\n#[cfg(windows)]\npub fn processed() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\"AppData\/Local\/Temp\/parallel\/processed\");\n path\n })\n}\n\n#[cfg(not(windows))]\npub fn unprocessed() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\".local\/share\/parallel\/unprocessed\");\n path\n })\n}\n\n#[cfg(windows)]\npub fn unprocessed() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\"AppData\/Local\/Temp\/parallel\/unprocessed\");\n path\n })\n}\n\n\n#[cfg(not(windows))]\npub fn errors() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\".local\/share\/parallel\/errors\");\n path\n })\n}\n\n#[cfg(windows)]\npub fn errors() -> Option<PathBuf> {\n home_dir().map(|mut path| {\n path.push(\"AppData\/Local\/Temp\/parallel\/errors\");\n path\n })\n}\n\npub fn outputs_path() -> PathBuf {\n PathBuf::from(\"\/tmp\/parallel\/\")\n}\n\n#[cfg(not(windows))]\npub fn job(id: usize) -> (PathBuf, PathBuf) {\n let stdout = PathBuf::from(format!(\"\/tmp\/parallel\/stdout_{}\", id));\n let stderr = PathBuf::from(format!(\"\/tmp\/parallel\/stderr_{}\", id));\n (stdout, stderr)\n}\n\n#[cfg(windows)]\npub fn job(id: usize) -> (PathBuf, PathBuf) {\n home_dir().map(|mut stdout| {\n let mut stderr = stdout.clone();\n stdout.push(format!(\"AppData\/Local\/Temp\/parallel\/stdout_{}\", id));\n stderr.push(format!(\"AppData\/Local\/Temp\/parallel\/stderr_{}\", id));\n (stdout, stderr)\n }).expect(\"parallel: unable to open home folder\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>nbt: Use consistent naming for `io::Write` variables.<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::callback::eReportExceptions;\nuse dom::bindings::codegen::InheritTypes::{EventTargetCast, NodeCast, NodeDerived};\nuse dom::bindings::js::JS;\nuse dom::eventtarget::{Capturing, Bubbling, EventTarget};\nuse dom::event::{Event, Phase_At_Target, Phase_None, Phase_Bubbling, Phase_Capturing};\nuse dom::node::{Node, NodeHelpers};\n\n\/\/ See http:\/\/dom.spec.whatwg.org\/#concept-event-dispatch for the full dispatch algorithm\npub fn dispatch_event(target: &JS<EventTarget>,\n pseudo_target: Option<JS<EventTarget>>,\n event: &mut JS<Event>) -> bool {\n assert!(!event.get().dispatching);\n\n {\n let event = event.get_mut();\n event.target = match pseudo_target {\n Some(pseudo_target) => Some(pseudo_target),\n None => Some(target.clone())\n };\n event.dispatching = true;\n }\n\n let type_ = event.get().type_.clone();\n let mut chain = ~[];\n\n \/\/TODO: no chain if not participating in a tree\n if target.get().is_node() {\n let target_node: JS<Node> = NodeCast::to(target);\n for ancestor in target_node.ancestors() {\n let ancestor_target: JS<EventTarget> = EventTargetCast::from(&ancestor);\n chain.push(ancestor_target);\n }\n }\n\n event.get_mut().phase = Phase_Capturing;\n\n \/\/FIXME: The \"callback this value\" should be currentTarget\n\n \/* capturing *\/\n for cur_target in chain.rev_iter() {\n let stopped = match cur_target.get().get_listeners_for(type_, Capturing) {\n Some(listeners) => {\n event.get_mut().current_target = Some(cur_target.clone());\n for listener in listeners.iter() {\n listener.HandleEvent__(event, eReportExceptions);\n\n if event.get().stop_immediate {\n break;\n }\n }\n\n event.get().stop_propagation\n }\n None => false\n };\n\n if stopped {\n break;\n }\n }\n\n \/* at target *\/\n if !event.get().stop_propagation {\n {\n let event = event.get_mut();\n event.phase = Phase_At_Target;\n event.current_target = Some(target.clone());\n }\n\n let opt_listeners = target.get().get_listeners(type_);\n for listeners in opt_listeners.iter() {\n for listener in listeners.iter() {\n listener.HandleEvent__(event, eReportExceptions);\n if event.get().stop_immediate {\n break;\n }\n }\n }\n }\n\n \/* bubbling *\/\n if event.get().bubbles && !event.get().stop_propagation {\n event.get_mut().phase = Phase_Bubbling;\n\n for cur_target in chain.iter() {\n let stopped = match cur_target.get().get_listeners_for(type_, Bubbling) {\n Some(listeners) => {\n event.get_mut().current_target = Some(cur_target.clone());\n for listener in listeners.iter() {\n listener.HandleEvent__(event, eReportExceptions);\n\n if event.get().stop_immediate {\n break;\n }\n }\n\n event.get().stop_propagation\n }\n None => false\n };\n if stopped {\n break;\n }\n }\n }\n\n let event = event.get_mut();\n event.dispatching = false;\n event.phase = Phase_None;\n event.current_target = None;\n\n !event.DefaultPrevented()\n}\n<commit_msg>Use Option::or_else in eventdispatcher::dispatch_event.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::callback::eReportExceptions;\nuse dom::bindings::codegen::InheritTypes::{EventTargetCast, NodeCast, NodeDerived};\nuse dom::bindings::js::JS;\nuse dom::eventtarget::{Capturing, Bubbling, EventTarget};\nuse dom::event::{Event, Phase_At_Target, Phase_None, Phase_Bubbling, Phase_Capturing};\nuse dom::node::{Node, NodeHelpers};\n\n\/\/ See http:\/\/dom.spec.whatwg.org\/#concept-event-dispatch for the full dispatch algorithm\npub fn dispatch_event(target: &JS<EventTarget>,\n pseudo_target: Option<JS<EventTarget>>,\n event: &mut JS<Event>) -> bool {\n assert!(!event.get().dispatching);\n\n {\n let event = event.get_mut();\n event.target = pseudo_target.or_else(|| {\n Some(target.clone())\n });\n event.dispatching = true;\n }\n\n let type_ = event.get().type_.clone();\n let mut chain = ~[];\n\n \/\/TODO: no chain if not participating in a tree\n if target.get().is_node() {\n let target_node: JS<Node> = NodeCast::to(target);\n for ancestor in target_node.ancestors() {\n let ancestor_target: JS<EventTarget> = EventTargetCast::from(&ancestor);\n chain.push(ancestor_target);\n }\n }\n\n event.get_mut().phase = Phase_Capturing;\n\n \/\/FIXME: The \"callback this value\" should be currentTarget\n\n \/* capturing *\/\n for cur_target in chain.rev_iter() {\n let stopped = match cur_target.get().get_listeners_for(type_, Capturing) {\n Some(listeners) => {\n event.get_mut().current_target = Some(cur_target.clone());\n for listener in listeners.iter() {\n listener.HandleEvent__(event, eReportExceptions);\n\n if event.get().stop_immediate {\n break;\n }\n }\n\n event.get().stop_propagation\n }\n None => false\n };\n\n if stopped {\n break;\n }\n }\n\n \/* at target *\/\n if !event.get().stop_propagation {\n {\n let event = event.get_mut();\n event.phase = Phase_At_Target;\n event.current_target = Some(target.clone());\n }\n\n let opt_listeners = target.get().get_listeners(type_);\n for listeners in opt_listeners.iter() {\n for listener in listeners.iter() {\n listener.HandleEvent__(event, eReportExceptions);\n if event.get().stop_immediate {\n break;\n }\n }\n }\n }\n\n \/* bubbling *\/\n if event.get().bubbles && !event.get().stop_propagation {\n event.get_mut().phase = Phase_Bubbling;\n\n for cur_target in chain.iter() {\n let stopped = match cur_target.get().get_listeners_for(type_, Bubbling) {\n Some(listeners) => {\n event.get_mut().current_target = Some(cur_target.clone());\n for listener in listeners.iter() {\n listener.HandleEvent__(event, eReportExceptions);\n\n if event.get().stop_immediate {\n break;\n }\n }\n\n event.get().stop_propagation\n }\n None => false\n };\n if stopped {\n break;\n }\n }\n }\n\n let event = event.get_mut();\n event.dispatching = false;\n event.phase = Phase_None;\n event.current_target = None;\n\n !event.DefaultPrevented()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>#24468 call submit_resource_timing after creating the document parser<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix benchmark import<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>bench: Add tests for setting functions<commit_after>#![feature(test)]\n\nextern crate test;\nextern crate rle_vec;\n\nuse std::iter::FromIterator;\nuse std::iter::repeat;\nuse test::Bencher;\nuse rle_vec::RleVec;\n\n#[bench]\nfn rle_set_middle_10_000_unique_values(b: &mut Bencher) {\n b.iter(|| {\n let mut rle = RleVec::from_iter(0..10_000);\n rle.set(5_000, 424242);\n })\n}\n\n#[bench]\nfn vec_set_middle_10_000_unique_values(b: &mut Bencher) {\n b.iter(|| {\n let mut vec = Vec::from_iter(0..10_000);\n vec[5_000] = 424242;\n })\n}\n\n#[bench]\nfn rle_set_middle_10_000_equal_values(b: &mut Bencher) {\n b.iter(|| {\n let mut rle = RleVec::from_iter(repeat(0).take(10_000));\n rle.set(5_000, 424242);\n })\n}\n\n#[bench]\nfn vec_set_middle_10_000_equal_values(b: &mut Bencher) {\n b.iter(|| {\n let mut vec = Vec::from_iter(repeat(0).take(10_000));\n vec[5_000] = 424242;\n })\n}\n\n#[bench]\nfn rle_set_middle_10_000_runs_of_10_values(b: &mut Bencher) {\n b.iter(|| {\n let zeros = repeat(0).take(10);\n let ones = repeat(1).take(10);\n let iter = repeat(zeros.chain(ones)).flat_map(|x| x).take(10_000);\n\n let mut rle = RleVec::from_iter(iter);\n rle.set(5_000, 424242);\n })\n}\n\n#[bench]\nfn vec_set_middle_10_000_runs_of_10_values(b: &mut Bencher) {\n b.iter(|| {\n let zeros = repeat(0).take(10);\n let ones = repeat(1).take(10);\n let iter = repeat(zeros.chain(ones)).flat_map(|x| x).take(10_000);\n\n let mut vec = Vec::from_iter(iter);\n vec[5_000] = 424242;\n })\n}\n\n#[bench]\nfn rle_set_middle_same_value_10_000_equal_values(b: &mut Bencher) {\n b.iter(|| {\n let mut rle = RleVec::from_iter(repeat(0).take(10_000));\n rle.set(5_000, 0);\n })\n}\n\n#[bench]\nfn vec_set_middle_same_value_10_000_equal_values(b: &mut Bencher) {\n b.iter(|| {\n let mut vec = Vec::from_iter(repeat(0).take(10_000));\n vec[5_000] = 0;\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a cross-crate test for casting classes to ifaces<commit_after>import to_str::*;\nimport to_str::to_str;\n\nclass cat implements to_str {\n priv {\n let mut meows : uint;\n fn meow() {\n #error(\"Meow\");\n self.meows += 1u;\n if self.meows % 5u == 0u {\n self.how_hungry += 1;\n }\n }\n }\n\n let mut how_hungry : int;\n let name : str;\n\n new(in_x : uint, in_y : int, in_name: str)\n { self.meows = in_x; self.how_hungry = in_y; self.name = in_name; }\n\n fn speak() { self.meow(); }\n\n fn eat() -> bool {\n if self.how_hungry > 0 {\n #error(\"OM NOM NOM\");\n self.how_hungry -= 2;\n ret true;\n }\n else {\n #error(\"Not hungry!\");\n ret false;\n }\n }\n\n fn to_str() -> str { self.name }\n}\n\nfn print_out<T: to_str>(thing: T, expected: str) {\n let actual = thing.to_str();\n #debug(\"%s\", actual);\n assert(actual == expected);\n}\n\nfn main() {\n let nyan : to_str = cat(0u, 2, \"nyan\") as to_str;\n print_out(nyan, \"nyan\");\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ffi::CString;\nuse std::ptr;\n\nuse libc::c_uint;\nuse rustc::middle::allocator::AllocatorKind;\nuse rustc::ty::TyCtxt;\nuse rustc_allocator::{ALLOCATOR_METHODS, AllocatorTy};\n\nuse ModuleLlvm;\nuse llvm::{self, False, True};\n\npub(crate) unsafe fn trans(tcx: TyCtxt, mods: &ModuleLlvm, kind: AllocatorKind) {\n let llcx = mods.llcx;\n let llmod = mods.llmod;\n let usize = match &tcx.sess.target.target.target_pointer_width[..] {\n \"16\" => llvm::LLVMInt16TypeInContext(llcx),\n \"32\" => llvm::LLVMInt32TypeInContext(llcx),\n \"64\" => llvm::LLVMInt64TypeInContext(llcx),\n tws => bug!(\"Unsupported target word size for int: {}\", tws),\n };\n let i8 = llvm::LLVMInt8TypeInContext(llcx);\n let i8p = llvm::LLVMPointerType(i8, 0);\n let void = llvm::LLVMVoidTypeInContext(llcx);\n\n for method in ALLOCATOR_METHODS {\n let mut args = Vec::new();\n for ty in method.inputs.iter() {\n match *ty {\n AllocatorTy::Layout => {\n args.push(usize); \/\/ size\n args.push(usize); \/\/ align\n }\n AllocatorTy::Ptr => args.push(i8p),\n AllocatorTy::Usize => args.push(usize),\n\n AllocatorTy::ResultPtr |\n AllocatorTy::Unit => panic!(\"invalid allocator arg\"),\n }\n }\n let output = match method.output {\n AllocatorTy::ResultPtr => Some(i8p),\n AllocatorTy::Unit => None,\n\n AllocatorTy::Layout |\n AllocatorTy::Usize |\n AllocatorTy::Ptr => panic!(\"invalid allocator output\"),\n };\n let ty = llvm::LLVMFunctionType(output.unwrap_or(void),\n args.as_ptr(),\n args.len() as c_uint,\n False);\n let name = CString::new(format!(\"__rust_{}\", method.name)).unwrap();\n let llfn = llvm::LLVMRustGetOrInsertFunction(llmod,\n name.as_ptr(),\n ty);\n\n if tcx.sess.target.target.options.default_hidden_visibility {\n llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);\n }\n\n let callee = CString::new(kind.fn_name(method.name)).unwrap();\n let callee = llvm::LLVMRustGetOrInsertFunction(llmod,\n callee.as_ptr(),\n ty);\n\n let llbb = llvm::LLVMAppendBasicBlockInContext(llcx,\n llfn,\n \"entry\\0\".as_ptr() as *const _);\n\n let llbuilder = llvm::LLVMCreateBuilderInContext(llcx);\n llvm::LLVMPositionBuilderAtEnd(llbuilder, llbb);\n let args = args.iter().enumerate().map(|(i, _)| {\n llvm::LLVMGetParam(llfn, i as c_uint)\n }).collect::<Vec<_>>();\n let ret = llvm::LLVMRustBuildCall(llbuilder,\n callee,\n args.as_ptr(),\n args.len() as c_uint,\n ptr::null_mut(),\n \"\\0\".as_ptr() as *const _);\n llvm::LLVMSetTailCall(ret, True);\n if output.is_some() {\n llvm::LLVMBuildRet(llbuilder, ret);\n } else {\n llvm::LLVMBuildRetVoid(llbuilder);\n }\n llvm::LLVMDisposeBuilder(llbuilder);\n }\n}\n<commit_msg>rustc: Emit `uwtable` for allocator shims<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ffi::CString;\nuse std::ptr;\n\nuse attributes;\nuse libc::c_uint;\nuse rustc::middle::allocator::AllocatorKind;\nuse rustc::ty::TyCtxt;\nuse rustc_allocator::{ALLOCATOR_METHODS, AllocatorTy};\n\nuse ModuleLlvm;\nuse llvm::{self, False, True};\n\npub(crate) unsafe fn trans(tcx: TyCtxt, mods: &ModuleLlvm, kind: AllocatorKind) {\n let llcx = mods.llcx;\n let llmod = mods.llmod;\n let usize = match &tcx.sess.target.target.target_pointer_width[..] {\n \"16\" => llvm::LLVMInt16TypeInContext(llcx),\n \"32\" => llvm::LLVMInt32TypeInContext(llcx),\n \"64\" => llvm::LLVMInt64TypeInContext(llcx),\n tws => bug!(\"Unsupported target word size for int: {}\", tws),\n };\n let i8 = llvm::LLVMInt8TypeInContext(llcx);\n let i8p = llvm::LLVMPointerType(i8, 0);\n let void = llvm::LLVMVoidTypeInContext(llcx);\n\n for method in ALLOCATOR_METHODS {\n let mut args = Vec::new();\n for ty in method.inputs.iter() {\n match *ty {\n AllocatorTy::Layout => {\n args.push(usize); \/\/ size\n args.push(usize); \/\/ align\n }\n AllocatorTy::Ptr => args.push(i8p),\n AllocatorTy::Usize => args.push(usize),\n\n AllocatorTy::ResultPtr |\n AllocatorTy::Unit => panic!(\"invalid allocator arg\"),\n }\n }\n let output = match method.output {\n AllocatorTy::ResultPtr => Some(i8p),\n AllocatorTy::Unit => None,\n\n AllocatorTy::Layout |\n AllocatorTy::Usize |\n AllocatorTy::Ptr => panic!(\"invalid allocator output\"),\n };\n let ty = llvm::LLVMFunctionType(output.unwrap_or(void),\n args.as_ptr(),\n args.len() as c_uint,\n False);\n let name = CString::new(format!(\"__rust_{}\", method.name)).unwrap();\n let llfn = llvm::LLVMRustGetOrInsertFunction(llmod,\n name.as_ptr(),\n ty);\n\n if tcx.sess.target.target.options.default_hidden_visibility {\n llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);\n }\n if tcx.sess.target.target.options.requires_uwtable {\n attributes::emit_uwtable(llfn, true);\n }\n\n let callee = CString::new(kind.fn_name(method.name)).unwrap();\n let callee = llvm::LLVMRustGetOrInsertFunction(llmod,\n callee.as_ptr(),\n ty);\n\n let llbb = llvm::LLVMAppendBasicBlockInContext(llcx,\n llfn,\n \"entry\\0\".as_ptr() as *const _);\n\n let llbuilder = llvm::LLVMCreateBuilderInContext(llcx);\n llvm::LLVMPositionBuilderAtEnd(llbuilder, llbb);\n let args = args.iter().enumerate().map(|(i, _)| {\n llvm::LLVMGetParam(llfn, i as c_uint)\n }).collect::<Vec<_>>();\n let ret = llvm::LLVMRustBuildCall(llbuilder,\n callee,\n args.as_ptr(),\n args.len() as c_uint,\n ptr::null_mut(),\n \"\\0\".as_ptr() as *const _);\n llvm::LLVMSetTailCall(ret, True);\n if output.is_some() {\n llvm::LLVMBuildRet(llbuilder, ret);\n } else {\n llvm::LLVMBuildRetVoid(llbuilder);\n }\n llvm::LLVMDisposeBuilder(llbuilder);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax_pos::symbol::Symbol;\nuse back::write::create_target_machine;\nuse llvm;\nuse rustc::session::Session;\nuse rustc::session::config::PrintRequest;\nuse libc::c_int;\nuse std::ffi::{CStr, CString};\n\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::Once;\n\nstatic POISONED: AtomicBool = AtomicBool::new(false);\nstatic INIT: Once = Once::new();\n\npub(crate) fn init(sess: &Session) {\n unsafe {\n \/\/ Before we touch LLVM, make sure that multithreading is enabled.\n INIT.call_once(|| {\n if llvm::LLVMStartMultithreaded() != 1 {\n \/\/ use an extra bool to make sure that all future usage of LLVM\n \/\/ cannot proceed despite the Once not running more than once.\n POISONED.store(true, Ordering::SeqCst);\n }\n\n configure_llvm(sess);\n });\n\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n }\n}\n\nfn require_inited() {\n INIT.call_once(|| bug!(\"llvm is not initialized\"));\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n}\n\nunsafe fn configure_llvm(sess: &Session) {\n let mut llvm_c_strs = Vec::new();\n let mut llvm_args = Vec::new();\n\n {\n let mut add = |arg: &str| {\n let s = CString::new(arg).unwrap();\n llvm_args.push(s.as_ptr());\n llvm_c_strs.push(s);\n };\n add(\"rustc\"); \/\/ fake program name\n if sess.time_llvm_passes() { add(\"-time-passes\"); }\n if sess.print_llvm_passes() { add(\"-debug-pass=Structure\"); }\n\n for arg in &sess.opts.cg.llvm_args {\n add(&(*arg));\n }\n }\n\n llvm::LLVMInitializePasses();\n\n llvm::initialize_available_targets();\n\n llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,\n llvm_args.as_ptr());\n}\n\n\/\/ WARNING: the features must be known to LLVM or the feature\n\/\/ detection code will walk past the end of the feature array,\n\/\/ leading to crashes.\n\nconst ARM_WHITELIST: &'static [&'static str] = &[\"neon\", \"v7\", \"vfp2\", \"vfp3\", \"vfp4\"];\n\nconst AARCH64_WHITELIST: &'static [&'static str] = &[\"neon\", \"v7\"];\n\nconst X86_WHITELIST: &'static [&'static str] = &[\"avx\", \"avx2\", \"bmi\", \"bmi2\", \"sse\",\n \"sse2\", \"sse3\", \"sse4.1\", \"sse4.2\",\n \"ssse3\", \"tbm\", \"lzcnt\", \"popcnt\",\n \"sse4a\", \"rdrnd\", \"rdseed\", \"fma\",\n \"xsave\", \"xsaveopt\", \"xsavec\",\n \"xsaves\", \"aes\", \"pclmulqdq\",\n \"avx512bw\", \"avx512cd\",\n \"avx512dq\", \"avx512er\",\n \"avx512f\", \"avx512ifma\",\n \"avx512pf\", \"avx512vbmi\",\n \"avx512vl\", \"avx512vpopcntdq\",\n \"mmx\", \"fxsr\"];\n\nconst HEXAGON_WHITELIST: &'static [&'static str] = &[\"hvx\", \"hvx-double\"];\n\nconst POWERPC_WHITELIST: &'static [&'static str] = &[\"altivec\",\n \"power8-altivec\", \"power9-altivec\",\n \"power8-vector\", \"power9-vector\",\n \"vsx\"];\n\nconst MIPS_WHITELIST: &'static [&'static str] = &[\"msa\"];\n\npub fn to_llvm_feature(s: &str) -> &str {\n match s {\n \"pclmulqdq\" => \"pclmul\",\n s => s,\n }\n}\n\npub fn target_features(sess: &Session) -> Vec<Symbol> {\n let target_machine = create_target_machine(sess);\n target_feature_whitelist(sess)\n .iter()\n .filter(|feature| {\n let llvm_feature = to_llvm_feature(feature);\n let ptr = CString::new(llvm_feature).as_ptr();\n unsafe { llvm::LLVMRustHasFeature(target_machine, ptr) }\n })\n .map(Symbol::intern).collect()\n}\n\npub fn target_feature_whitelist(sess: &Session) -> &'static [&'static str] {\n let whitelist = match &*sess.target.target.arch {\n \"arm\" => ARM_WHITELIST,\n \"aarch64\" => AARCH64_WHITELIST,\n \"x86\" | \"x86_64\" => X86_WHITELIST,\n \"hexagon\" => HEXAGON_WHITELIST,\n \"mips\" | \"mips64\" => MIPS_WHITELIST,\n \"powerpc\" | \"powerpc64\" => POWERPC_WHITELIST,\n _ => &[],\n }\n}\n\npub fn print_version() {\n \/\/ Can be called without initializing LLVM\n unsafe {\n println!(\"LLVM version: {}.{}\",\n llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());\n }\n}\n\npub fn print_passes() {\n \/\/ Can be called without initializing LLVM\n unsafe { llvm::LLVMRustPrintPasses(); }\n}\n\npub(crate) fn print(req: PrintRequest, sess: &Session) {\n require_inited();\n let tm = create_target_machine(sess);\n unsafe {\n match req {\n PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),\n PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),\n _ => bug!(\"rustc_trans can't handle print request: {:?}\", req),\n }\n }\n}\n<commit_msg>fixed errors<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax_pos::symbol::Symbol;\nuse back::write::create_target_machine;\nuse llvm;\nuse rustc::session::Session;\nuse rustc::session::config::PrintRequest;\nuse libc::c_int;\nuse std::ffi::CString;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::Once;\n\nstatic POISONED: AtomicBool = AtomicBool::new(false);\nstatic INIT: Once = Once::new();\n\npub(crate) fn init(sess: &Session) {\n unsafe {\n \/\/ Before we touch LLVM, make sure that multithreading is enabled.\n INIT.call_once(|| {\n if llvm::LLVMStartMultithreaded() != 1 {\n \/\/ use an extra bool to make sure that all future usage of LLVM\n \/\/ cannot proceed despite the Once not running more than once.\n POISONED.store(true, Ordering::SeqCst);\n }\n\n configure_llvm(sess);\n });\n\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n }\n}\n\nfn require_inited() {\n INIT.call_once(|| bug!(\"llvm is not initialized\"));\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n}\n\nunsafe fn configure_llvm(sess: &Session) {\n let mut llvm_c_strs = Vec::new();\n let mut llvm_args = Vec::new();\n\n {\n let mut add = |arg: &str| {\n let s = CString::new(arg).unwrap();\n llvm_args.push(s.as_ptr());\n llvm_c_strs.push(s);\n };\n add(\"rustc\"); \/\/ fake program name\n if sess.time_llvm_passes() { add(\"-time-passes\"); }\n if sess.print_llvm_passes() { add(\"-debug-pass=Structure\"); }\n\n for arg in &sess.opts.cg.llvm_args {\n add(&(*arg));\n }\n }\n\n llvm::LLVMInitializePasses();\n\n llvm::initialize_available_targets();\n\n llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,\n llvm_args.as_ptr());\n}\n\n\/\/ WARNING: the features must be known to LLVM or the feature\n\/\/ detection code will walk past the end of the feature array,\n\/\/ leading to crashes.\n\nconst ARM_WHITELIST: &'static [&'static str] = &[\"neon\", \"v7\", \"vfp2\", \"vfp3\", \"vfp4\"];\n\nconst AARCH64_WHITELIST: &'static [&'static str] = &[\"neon\", \"v7\"];\n\nconst X86_WHITELIST: &'static [&'static str] = &[\"avx\", \"avx2\", \"bmi\", \"bmi2\", \"sse\",\n \"sse2\", \"sse3\", \"sse4.1\", \"sse4.2\",\n \"ssse3\", \"tbm\", \"lzcnt\", \"popcnt\",\n \"sse4a\", \"rdrnd\", \"rdseed\", \"fma\",\n \"xsave\", \"xsaveopt\", \"xsavec\",\n \"xsaves\", \"aes\", \"pclmulqdq\",\n \"avx512bw\", \"avx512cd\",\n \"avx512dq\", \"avx512er\",\n \"avx512f\", \"avx512ifma\",\n \"avx512pf\", \"avx512vbmi\",\n \"avx512vl\", \"avx512vpopcntdq\",\n \"mmx\", \"fxsr\"];\n\nconst HEXAGON_WHITELIST: &'static [&'static str] = &[\"hvx\", \"hvx-double\"];\n\nconst POWERPC_WHITELIST: &'static [&'static str] = &[\"altivec\",\n \"power8-altivec\", \"power9-altivec\",\n \"power8-vector\", \"power9-vector\",\n \"vsx\"];\n\nconst MIPS_WHITELIST: &'static [&'static str] = &[\"msa\"];\n\npub fn to_llvm_feature(s: &str) -> &str {\n match s {\n \"pclmulqdq\" => \"pclmul\",\n s => s,\n }\n}\n\npub fn target_features(sess: &Session) -> Vec<Symbol> {\n let target_machine = create_target_machine(sess);\n target_feature_whitelist(sess)\n .iter()\n .filter(|feature| {\n let llvm_feature = to_llvm_feature(feature);\n let ptr = CString::new(llvm_feature).unwrap().as_ptr();\n unsafe { llvm::LLVMRustHasFeature(target_machine, ptr) }\n })\n .map(|feature| Symbol::intern(feature)).collect()\n}\n\npub fn target_feature_whitelist(sess: &Session) -> &'static [&'static str] {\n match &*sess.target.target.arch {\n \"arm\" => ARM_WHITELIST,\n \"aarch64\" => AARCH64_WHITELIST,\n \"x86\" | \"x86_64\" => X86_WHITELIST,\n \"hexagon\" => HEXAGON_WHITELIST,\n \"mips\" | \"mips64\" => MIPS_WHITELIST,\n \"powerpc\" | \"powerpc64\" => POWERPC_WHITELIST,\n _ => &[],\n }\n}\n\npub fn print_version() {\n \/\/ Can be called without initializing LLVM\n unsafe {\n println!(\"LLVM version: {}.{}\",\n llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());\n }\n}\n\npub fn print_passes() {\n \/\/ Can be called without initializing LLVM\n unsafe { llvm::LLVMRustPrintPasses(); }\n}\n\npub(crate) fn print(req: PrintRequest, sess: &Session) {\n require_inited();\n let tm = create_target_machine(sess);\n unsafe {\n match req {\n PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),\n PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),\n _ => bug!(\"rustc_trans can't handle print request: {:?}\", req),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Regression test for #9951<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(unused_variables)]\n\ntrait Bar {\n fn noop(&self);\n}\nimpl Bar for u8 {\n fn noop(&self) {}\n}\n\nfn main() {\n let (a, b) = (&5u8 as &Bar, &9u8 as &Bar);\n let (c, d): (&Bar, &Bar) = (a, b);\n\n let (a, b) = (Box::new(5u8) as Box<Bar>, Box::new(9u8) as Box<Bar>);\n let (c, d): (&Bar, &Bar) = (&*a, &*b);\n\n let (c, d): (&Bar, &Bar) = (&5, &9);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This function takes ownership of a box and destroys it \nfn eat_box(boxed_int: Box<i32>) {\n println!(\"Destroying box that contains {}\", boxed_int);\n}\n\n\/\/ This function borrows an i32\nfn borrow_box(borrowed_int: &i32) {\n println!(\"This int is: {}\", borrowed_int);\n}\n\nfn main() {\n \/\/ Create a boxed integer\n let boxed_int = Box::new(5);\n\n \/\/ Borrow the contents of the box. Ownership is not taken,\n \/\/ so the contents can be borrowed again.\n borrow_box(&boxed_int);\n borrow_box(&boxed_int);\n\n {\n \/\/ Take a reference to the data contained inside the box\n let _ref_to_int: &i32 = &boxed_int;\n\n \/\/ Error! \n \/\/ Can't destroy `boxed_int` while the inner value is borrowed.\n eat_box(boxed_int);\n \/\/ FIXME ^ Comment out this line\n\n \/\/ `_ref_to_int` goes out of scope and is no longer borrowed.\n }\n\n \/\/ Box can now give up ownership to `eat_box` and be destroyed\n eat_box(boxed_int);\n}\n<commit_msg>More emphasis on the type system<commit_after>\/\/ This function takes ownership of a box and destroys it\nfn eat_box_i32(boxed_i32: Box<i32>) {\n println!(\"Destroying box that contains {}\", boxed_i32);\n}\n\n\/\/ This function borrows an i32\nfn borrow_i32(borrowed_i32: &i32) {\n println!(\"This int is: {}\", borrowed_i32);\n}\n\nfn main() {\n \/\/ Create a boxed i32, and a stacked i32\n let boxed_i32 = Box::new(5_i32);\n let stacked_i32 = 6_i32;\n\n \/\/ Borrow the contents of the box. Ownership is not taken,\n \/\/ so the contents can be borrowed again.\n borrow_i32(&boxed_i32);\n borrow_i32(&stacked_i32);\n\n {\n \/\/ Take a reference to the data contained inside the box\n let _ref_to_i32: &i32 = &boxed_i32;\n\n \/\/ Error!\n \/\/ Can't destroy `boxed_i32` while the inner value is borrowed.\n eat_box_i32(boxed_i32);\n \/\/ FIXME ^ Comment out this line\n\n \/\/ `_ref_to_i32` goes out of scope and is no longer borrowed.\n }\n\n \/\/ `boxed_i32` can now give up ownership to `eat_box` and be destroyed\n eat_box_i32(boxed_i32);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Euler p5<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added networked example<commit_after>#![feature(hash)]\n#![feature(core)]\n#![feature(alloc)]\n#![feature(std_misc)]\n#![feature(unsafe_destructor)]\n\n\nextern crate timely;\nextern crate core;\n\nextern crate docopt;\nuse docopt::Docopt;\n\nuse timely::communication::Communicator;\nuse timely::communication::channels::Data;\nuse timely::progress::{Timestamp, PathSummary};\nuse timely::progress::subgraph::{Subgraph, Summary, new_graph};\nuse timely::progress::broadcast::Progcaster;\nuse timely::progress::subgraph::Summary::Local;\nuse timely::progress::scope::Scope;\nuse timely::progress::graph::{Graph, GraphExtension};\nuse timely::example::input::InputExtensionTrait;\nuse timely::example::concat::ConcatExtensionTrait;\nuse timely::example::feedback::FeedbackExtensionTrait;\nuse timely::example::distinct::DistinctExtensionTrait;\nuse timely::example::stream::Stream;\nuse timely::example::graph_builder::GraphBoundary;\nuse timely::networking::initialize_networking;\n\nuse core::fmt::Debug;\nuse std::thread::Thread;\n\nuse std::rc::{Rc, try_unwrap};\nuse std::cell::RefCell;\nuse std::hash::{Hash, SipHasher};\n\nstatic USAGE: &'static str = \"\nUsage: networked [options] [<arguments>...]\n\nOptions:\n -t <arg>, --threads <arg> number of threads per worker [default: 1]\n -p <arg>, --processid <arg> identity of this process [default: 0]\n -n <arg>, --processes <arg> number of processes involved [default: 1]\n\";\n\nfn main() {\n let args = Docopt::new(USAGE).and_then(|dopt| dopt.parse()).unwrap_or_else(|e| e.exit());\n\n let threads: u64 = if let Ok(threads) = args.get_str(\"-t\").parse() { threads }\n else { panic!(\"invalid setting for --threads: {}\", args.get_str(\"-t\")) };\n let process_id: u64 = if let Ok(proc_id) = args.get_str(\"-p\").parse() { proc_id }\n else { panic!(\"invalid setting for --processid: {}\", args.get_str(\"-p\")) };\n let processes: u64 = if let Ok(processes) = args.get_str(\"-n\").parse() { processes }\n else { panic!(\"invalid setting for --processes: {}\", args.get_str(\"-n\")) };\n\n let addresses = range(0, processes).map(|index| format!(\"localhost:{}\", 2101 + index).to_string()).collect();\n let network_communicator = initialize_networking(addresses, process_id, threads).ok().expect(\"error initializing networking\");\n\n let mut guards = Vec::new();\n for communicator in network_communicator.into_iter() {\n guards.push(Thread::scoped(move || _queue(communicator)));\n }\n}\n\n\nfn _queue(allocator: Communicator) {\n let allocator = Rc::new(RefCell::new(allocator));\n \/\/ no \"base scopes\" yet, so the root pretends to be a subscope of some parent with a () timestamp type.\n let mut graph = new_graph(Progcaster::new(&mut (*allocator.borrow_mut())));\n\n \/\/ try building some input scopes\n let (mut input1, mut stream1) = graph.new_input::<u64>(allocator.clone());\n let (mut input2, mut stream2) = graph.new_input::<u64>(allocator.clone());\n\n \/\/ prepare some feedback edges\n let (mut feedback1, mut feedback1_output) = stream1.feedback(((), 1000), Local(1));\n let (mut feedback2, mut feedback2_output) = stream2.feedback(((), 1000), Local(1));\n\n \/\/ build up a subgraph using the concatenated inputs\/feedbacks\n let progcaster = Progcaster::new(&mut (*allocator.borrow_mut()));\n let (mut egress1, mut egress2) = _create_subgraph(&mut graph.clone(),\n &mut stream1.concat(&mut feedback1_output),\n &mut stream2.concat(&mut feedback2_output),\n progcaster);\n\n \/\/ connect feedback sources. notice that we have swapped indices ...\n feedback1.connect_input(&mut egress2);\n feedback2.connect_input(&mut egress1);\n\n \/\/ finalize the graph\/subgraph\n graph.borrow_mut().get_internal_summary();\n graph.borrow_mut().set_external_summary(Vec::new(), &mut Vec::new());\n\n \/\/ do one round of push progress, pull progress ...\n graph.borrow_mut().push_external_progress(&mut Vec::new());\n graph.borrow_mut().pull_internal_progress(&mut Vec::new(), &mut Vec::new(), &mut Vec::new());\n\n \/\/ move some data into the dataflow graph.\n input1.send_messages(&((), 0), vec![1u64]);\n input2.send_messages(&((), 0), vec![2u64]);\n\n \/\/ see what everyone thinks about that ...\n graph.borrow_mut().pull_internal_progress(&mut Vec::new(), &mut Vec::new(), &mut Vec::new());\n\n input1.advance(&((), 0), &((), 1000000));\n input2.advance(&((), 0), &((), 1000000));\n input1.close_at(&((), 1000000));\n input2.close_at(&((), 1000000));\n\n \/\/ spin\n while graph.borrow_mut().pull_internal_progress(&mut Vec::new(), &mut Vec::new(), &mut Vec::new()) { }\n}\n\nfn _create_subgraph<T1, T2, S1, S2, D>(graph: &mut Rc<RefCell<Subgraph<T1, S1, T2, S2>>>,\n source1: &mut Stream<(T1, T2), Summary<S1, S2>, D>,\n source2: &mut Stream<(T1, T2), Summary<S1, S2>, D>,\n progcaster: Progcaster<((T1,T2),u64)>)\n -> (Stream<(T1, T2), Summary<S1, S2>, D>, Stream<(T1, T2), Summary<S1, S2>, D>)\nwhere T1: Timestamp, S1: PathSummary<T1>,\n T2: Timestamp, S2: PathSummary<T2>,\n D: Data+Hash<SipHasher>+Eq+Debug,\n{\n \/\/ build up a subgraph using the concatenated inputs\/feedbacks\n let mut subgraph = graph.new_subgraph::<_, u64>(0, progcaster);\n\n let (sub_egress1, sub_egress2) = {\n \/\/ create new ingress nodes, passing in a reference to the subgraph for them to use.\n let mut sub_ingress1 = subgraph.add_input(source1);\n let mut sub_ingress2 = subgraph.add_input(source2);\n\n \/\/ putting a distinct scope into the subgraph!\n let mut queue = sub_ingress1.distinct();\n\n \/\/ egress each of the streams from the subgraph.\n let sub_egress1 = subgraph.add_output_to_graph(&mut queue, graph.as_box());\n let sub_egress2 = subgraph.add_output_to_graph(&mut sub_ingress2, graph.as_box());\n\n (sub_egress1, sub_egress2)\n };\n\n \/\/ sort of a mess, but the way to get the subgraph out of the Rc<RefCell<...>>.\n \/\/ will explode if anyone else is still sitting on a reference to subgraph.\n graph.add_scope(try_unwrap(subgraph).ok().expect(\"hm\").into_inner());\n\n return (sub_egress1, sub_egress2);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for sieve case<commit_after>pub fn primes_up_to(limit: u32) -> Vec<u32> {\n if limit < 2 {\n return Vec::new();\n }\n\n let upper = (limit as f64).sqrt().ceil() as usize;\n\n let mut board = vec![true; limit as usize - 1];\n\n for i in 2..upper {\n if !board[i - 2] {\n continue;\n }\n\n let mut v = i * i;\n\n while v <= limit as usize {\n board[v - 2] = false;\n\n v += i;\n }\n }\n\n (2..limit + 1)\n .filter(|&x| board[x as usize - 2])\n .collect::<Vec<u32>>()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement send_server_packet and add test.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Impossible usage of `rust-scrypt` on Windows OS due to mmap call Added conditional dependency<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Type Names for Debug Info.\n\nuse common::CrateContext;\nuse rustc::hir::def_id::DefId;\nuse rustc::ty::subst::Substs;\nuse rustc::ty::{self, Ty};\n\nuse rustc::hir;\n\n\/\/ Compute the name of the type as it should be stored in debuginfo. Does not do\n\/\/ any caching, i.e. calling the function twice with the same type will also do\n\/\/ the work twice. The `qualified` parameter only affects the first level of the\n\/\/ type name, further levels (i.e. type parameters) are always fully qualified.\npub fn compute_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,\n t: Ty<'tcx>,\n qualified: bool)\n -> String {\n let mut result = String::with_capacity(64);\n push_debuginfo_type_name(cx, t, qualified, &mut result);\n result\n}\n\n\/\/ Pushes the name of the type as it should be stored in debuginfo on the\n\/\/ `output` String. See also compute_debuginfo_type_name().\npub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,\n t: Ty<'tcx>,\n qualified: bool,\n output: &mut String) {\n match t.sty {\n ty::TyBool => output.push_str(\"bool\"),\n ty::TyChar => output.push_str(\"char\"),\n ty::TyStr => output.push_str(\"str\"),\n ty::TyNever => output.push_str(\"!\"),\n ty::TyInt(int_ty) => output.push_str(int_ty.ty_to_string()),\n ty::TyUint(uint_ty) => output.push_str(uint_ty.ty_to_string()),\n ty::TyFloat(float_ty) => output.push_str(float_ty.ty_to_string()),\n ty::TyAdt(def, substs) => {\n push_item_name(cx, def.did, qualified, output);\n push_type_params(cx, substs, output);\n },\n ty::TyTuple(component_types, _) => {\n output.push('(');\n for &component_type in component_types {\n push_debuginfo_type_name(cx, component_type, true, output);\n output.push_str(\", \");\n }\n if !component_types.is_empty() {\n output.pop();\n output.pop();\n }\n output.push(')');\n },\n ty::TyRawPtr(ty::TypeAndMut { ty: inner_type, mutbl } ) => {\n let is_like_msvc = cx.sess().target.target.options.is_like_msvc;\n\n if !is_like_msvc {output.push('*');}\n match mutbl {\n hir::MutImmutable => output.push_str(\"const \"),\n hir::MutMutable => output.push_str(\"mut \"),\n }\n\n push_debuginfo_type_name(cx, inner_type, true, output);\n if is_like_msvc {output.push('*');}\n },\n ty::TyRef(_, ty::TypeAndMut { ty: inner_type, mutbl }) => {\n let is_like_msvc = cx.sess().target.target.options.is_like_msvc;\n\n if !is_like_msvc {output.push('&');}\n if mutbl == hir::MutMutable {\n output.push_str(\"mut \");\n }\n\n push_debuginfo_type_name(cx, inner_type, true, output);\n if is_like_msvc {output.push('*');}\n },\n ty::TyArray(inner_type, len) => {\n output.push('[');\n push_debuginfo_type_name(cx, inner_type, true, output);\n output.push_str(&format!(\"; {}\", len));\n output.push(']');\n },\n ty::TySlice(inner_type) => {\n let is_like_msvc = cx.sess().target.target.options.is_like_msvc;\n output.push_str(if is_like_msvc {\"slice<\"} else {\"[\"});\n push_debuginfo_type_name(cx, inner_type, true, output);\n output.push(if is_like_msvc {'>'} else {']'});\n },\n ty::TyDynamic(ref trait_data, ..) => {\n if let Some(principal) = trait_data.principal() {\n let principal = cx.tcx().erase_late_bound_regions_and_normalize(\n &principal);\n push_item_name(cx, principal.def_id, false, output);\n push_type_params(cx, principal.substs, output);\n }\n },\n ty::TyFnDef(..) | ty::TyFnPtr(_) => {\n let sig = t.fn_sig(cx.tcx());\n if sig.unsafety() == hir::Unsafety::Unsafe {\n output.push_str(\"unsafe \");\n }\n\n let abi = sig.abi();\n if abi != ::abi::Abi::Rust {\n output.push_str(\"extern \\\"\");\n output.push_str(abi.name());\n output.push_str(\"\\\" \");\n }\n\n output.push_str(\"fn(\");\n\n let sig = cx.tcx().erase_late_bound_regions_and_normalize(&sig);\n if !sig.inputs().is_empty() {\n for ¶meter_type in sig.inputs() {\n push_debuginfo_type_name(cx, parameter_type, true, output);\n output.push_str(\", \");\n }\n output.pop();\n output.pop();\n }\n\n if sig.variadic {\n if !sig.inputs().is_empty() {\n output.push_str(\", ...\");\n } else {\n output.push_str(\"...\");\n }\n }\n\n output.push(')');\n\n if !sig.output().is_nil() {\n output.push_str(\" -> \");\n push_debuginfo_type_name(cx, sig.output(), true, output);\n }\n },\n ty::TyClosure(..) => {\n output.push_str(\"closure\");\n }\n ty::TyError |\n ty::TyInfer(_) |\n ty::TyProjection(..) |\n ty::TyAnon(..) |\n ty::TyParam(_) => {\n bug!(\"debuginfo: Trying to create type name for \\\n unexpected type: {:?}\", t);\n }\n }\n\n fn push_item_name(cx: &CrateContext,\n def_id: DefId,\n qualified: bool,\n output: &mut String) {\n if qualified {\n output.push_str(&cx.tcx().crate_name(def_id.krate).as_str());\n for path_element in cx.tcx().def_path(def_id).data {\n output.push_str(\"::\");\n output.push_str(&path_element.data.as_interned_str());\n }\n } else {\n output.push_str(&cx.tcx().item_name(def_id).as_str());\n }\n }\n\n \/\/ Pushes the type parameters in the given `Substs` to the output string.\n \/\/ This ignores region parameters, since they can't reliably be\n \/\/ reconstructed for items from non-local crates. For local crates, this\n \/\/ would be possible but with inlining and LTO we have to use the least\n \/\/ common denominator - otherwise we would run into conflicts.\n fn push_type_params<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,\n substs: &Substs<'tcx>,\n output: &mut String) {\n if substs.types().next().is_none() {\n return;\n }\n\n output.push('<');\n\n for type_parameter in substs.types() {\n push_debuginfo_type_name(cx, type_parameter, true, output);\n output.push_str(\", \");\n }\n\n output.pop();\n output.pop();\n\n output.push('>');\n }\n}\n<commit_msg>Expand one-liners, rename is_like_msvc to cpp_like_names and explain.<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Type Names for Debug Info.\n\nuse common::CrateContext;\nuse rustc::hir::def_id::DefId;\nuse rustc::ty::subst::Substs;\nuse rustc::ty::{self, Ty};\n\nuse rustc::hir;\n\n\/\/ Compute the name of the type as it should be stored in debuginfo. Does not do\n\/\/ any caching, i.e. calling the function twice with the same type will also do\n\/\/ the work twice. The `qualified` parameter only affects the first level of the\n\/\/ type name, further levels (i.e. type parameters) are always fully qualified.\npub fn compute_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,\n t: Ty<'tcx>,\n qualified: bool)\n -> String {\n let mut result = String::with_capacity(64);\n push_debuginfo_type_name(cx, t, qualified, &mut result);\n result\n}\n\n\/\/ Pushes the name of the type as it should be stored in debuginfo on the\n\/\/ `output` String. See also compute_debuginfo_type_name().\npub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,\n t: Ty<'tcx>,\n qualified: bool,\n output: &mut String) {\n \/\/ When targeting MSVC, emit C++ style type names for compatability with\n \/\/ .natvis visualizers (and perhaps other existing native debuggers?)\n let cpp_like_names = cx.sess().target.target.options.is_like_msvc;\n\n match t.sty {\n ty::TyBool => output.push_str(\"bool\"),\n ty::TyChar => output.push_str(\"char\"),\n ty::TyStr => output.push_str(\"str\"),\n ty::TyNever => output.push_str(\"!\"),\n ty::TyInt(int_ty) => output.push_str(int_ty.ty_to_string()),\n ty::TyUint(uint_ty) => output.push_str(uint_ty.ty_to_string()),\n ty::TyFloat(float_ty) => output.push_str(float_ty.ty_to_string()),\n ty::TyAdt(def, substs) => {\n push_item_name(cx, def.did, qualified, output);\n push_type_params(cx, substs, output);\n },\n ty::TyTuple(component_types, _) => {\n output.push('(');\n for &component_type in component_types {\n push_debuginfo_type_name(cx, component_type, true, output);\n output.push_str(\", \");\n }\n if !component_types.is_empty() {\n output.pop();\n output.pop();\n }\n output.push(')');\n },\n ty::TyRawPtr(ty::TypeAndMut { ty: inner_type, mutbl } ) => {\n if !cpp_like_names {\n output.push('*');\n }\n match mutbl {\n hir::MutImmutable => output.push_str(\"const \"),\n hir::MutMutable => output.push_str(\"mut \"),\n }\n\n push_debuginfo_type_name(cx, inner_type, true, output);\n\n if cpp_like_names {\n output.push('*');\n }\n },\n ty::TyRef(_, ty::TypeAndMut { ty: inner_type, mutbl }) => {\n if !cpp_like_names {\n output.push('&');\n }\n if mutbl == hir::MutMutable {\n output.push_str(\"mut \");\n }\n\n push_debuginfo_type_name(cx, inner_type, true, output);\n\n if cpp_like_names {\n output.push('*');\n }\n },\n ty::TyArray(inner_type, len) => {\n output.push('[');\n push_debuginfo_type_name(cx, inner_type, true, output);\n output.push_str(&format!(\"; {}\", len));\n output.push(']');\n },\n ty::TySlice(inner_type) => {\n if cpp_like_names {\n output.push_str(\"slice<\");\n } else {\n output.push('[');\n }\n\n push_debuginfo_type_name(cx, inner_type, true, output);\n\n if cpp_like_names {\n output.push('>');\n } else {\n output.push(']');\n }\n },\n ty::TyDynamic(ref trait_data, ..) => {\n if let Some(principal) = trait_data.principal() {\n let principal = cx.tcx().erase_late_bound_regions_and_normalize(\n &principal);\n push_item_name(cx, principal.def_id, false, output);\n push_type_params(cx, principal.substs, output);\n }\n },\n ty::TyFnDef(..) | ty::TyFnPtr(_) => {\n let sig = t.fn_sig(cx.tcx());\n if sig.unsafety() == hir::Unsafety::Unsafe {\n output.push_str(\"unsafe \");\n }\n\n let abi = sig.abi();\n if abi != ::abi::Abi::Rust {\n output.push_str(\"extern \\\"\");\n output.push_str(abi.name());\n output.push_str(\"\\\" \");\n }\n\n output.push_str(\"fn(\");\n\n let sig = cx.tcx().erase_late_bound_regions_and_normalize(&sig);\n if !sig.inputs().is_empty() {\n for ¶meter_type in sig.inputs() {\n push_debuginfo_type_name(cx, parameter_type, true, output);\n output.push_str(\", \");\n }\n output.pop();\n output.pop();\n }\n\n if sig.variadic {\n if !sig.inputs().is_empty() {\n output.push_str(\", ...\");\n } else {\n output.push_str(\"...\");\n }\n }\n\n output.push(')');\n\n if !sig.output().is_nil() {\n output.push_str(\" -> \");\n push_debuginfo_type_name(cx, sig.output(), true, output);\n }\n },\n ty::TyClosure(..) => {\n output.push_str(\"closure\");\n }\n ty::TyError |\n ty::TyInfer(_) |\n ty::TyProjection(..) |\n ty::TyAnon(..) |\n ty::TyParam(_) => {\n bug!(\"debuginfo: Trying to create type name for \\\n unexpected type: {:?}\", t);\n }\n }\n\n fn push_item_name(cx: &CrateContext,\n def_id: DefId,\n qualified: bool,\n output: &mut String) {\n if qualified {\n output.push_str(&cx.tcx().crate_name(def_id.krate).as_str());\n for path_element in cx.tcx().def_path(def_id).data {\n output.push_str(\"::\");\n output.push_str(&path_element.data.as_interned_str());\n }\n } else {\n output.push_str(&cx.tcx().item_name(def_id).as_str());\n }\n }\n\n \/\/ Pushes the type parameters in the given `Substs` to the output string.\n \/\/ This ignores region parameters, since they can't reliably be\n \/\/ reconstructed for items from non-local crates. For local crates, this\n \/\/ would be possible but with inlining and LTO we have to use the least\n \/\/ common denominator - otherwise we would run into conflicts.\n fn push_type_params<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,\n substs: &Substs<'tcx>,\n output: &mut String) {\n if substs.types().next().is_none() {\n return;\n }\n\n output.push('<');\n\n for type_parameter in substs.types() {\n push_debuginfo_type_name(cx, type_parameter, true, output);\n output.push_str(\", \");\n }\n\n output.pop();\n output.pop();\n\n output.push('>');\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Reorder spanmap module.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>clippy<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate rand;\nextern crate timely;\nextern crate timely_sort;\nextern crate differential_dataflow;\nextern crate vec_map;\n\nuse timely::dataflow::*;\nuse timely::dataflow::operators::*;\n\nuse rand::{Rng, SeedableRng, StdRng};\n\nuse differential_dataflow::AsCollection;\nuse differential_dataflow::operators::arrange::ArrangeByKey;\nuse differential_dataflow::trace::{Cursor, Trace};\n\nfn main() {\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n let edges: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n\n \/\/ define a new timely dataflow computation. \n timely::execute_from_args(std::env::args().skip(4), move |worker| {\n\n \tlet index = worker.index();\n \tlet peers = worker.peers();\n\n \t\/\/ create a a degree counting differential dataflow\n \tlet (mut input, probe, trace) = worker.scoped(|scope| {\n\n \t\t\/\/ create edge input, count a few ways.\n \t\tlet (input, edges) = scope.new_input();\n\n \t\t\/\/ pull off source, and count.\n \t\tlet arranged = edges.as_collection()\n \t\t\t\t\t\t\t.arrange_by_key_hashed();\n\n\t\t (input, arranged.stream.probe().0, arranged.trace.clone())\n \t});\n\n let seed: &[_] = &[1, 2, 3, index];\n let mut rng1: StdRng = SeedableRng::from_seed(seed); \/\/ rng for edge additions\n let mut rng2: StdRng = SeedableRng::from_seed(seed); \/\/ rng for edge additions\n\n \/\/ load up graph dataz\n let &time = input.time();\n for edge in 0..edges {\n \tif edge % peers == index {\n \t\tinput.send(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), time, 1));\n \t}\n\n \t\/\/ move the data along a bit\n \tif edge % 10000 == 9999 {\n \t\tworker.step();\n \t}\n\t\t}\n\n\t\tlet timer = ::std::time::Instant::now();\n\n\t\tinput.advance_to(1);\n\t\tworker.step_while(|| probe.lt(input.time()));\n\n\t\tif index == 0 {\n\t\t\tlet timer = timer.elapsed();\n\t\t\tlet nanos = timer.as_secs() * 1000000000 + timer.subsec_nanos() as u64;\n\t\t\tprintln!(\"Loading finished after {:?}\", nanos);\n\t\t}\n\n\t\t\/\/ change graph, forever\n\t\tif batch > 0 {\n\n\t\t\tfor edge in 0usize .. {\n\t\t\t\tlet &time = input.time();\n\t\t\t\tif edge % peers == index {\n\t \t\tinput.send(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), time, 1));\n\t \t\tinput.send(((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes)), time,-1));\n\t\t\t\t}\n\n\t \tif edge % batch == (batch - 1) {\n\n\t \t\tlet timer = ::std::time::Instant::now();\n\n\t \t\tlet next = input.epoch() + 1;\n\t \t\tinput.advance_to(next);\n\t\t\t\t\tworker.step_while(|| probe.lt(input.time()));\n\n\t\t\t\t\tif index == 0 {\n\t\t\t\t\t\tlet timer = timer.elapsed();\n\t\t\t\t\t\tlet nanos = timer.as_secs() * 1000000000 + timer.subsec_nanos() as u64;\n\t\t\t\t\t\tprintln!(\"Round {} finished after {:?}\", next - 1, nanos);\n\n\t\t\t\t\t\tlet mut count = 0;\n\t\t \t\tlet timer = ::std::time::Instant::now();\n\t\t \t\tlet mut cursor = trace.cursor();\n\t\t \t\twhile cursor.key_valid() {\n\t\t \t\t\twhile cursor.val_valid() {\n\t\t\t\t\t\t\t\tlet mut sum = 0;\t\t \t\t\t\t\n\t\t\t\t\t\t\t\tcursor.map_times(|_,d| sum += d);\n\t\t\t\t\t\t\t\tif sum > 0 { count += 1; }\n\t\t\t\t\t\t\t\tcursor.step_val();\n\t\t \t\t\t}\n\n\t\t \t\t\tcursor.step_key()\n\t\t \t\t}\n\n\t\t \t\tprintln!(\"count: {} in {:?}\", count, timer.elapsed());\n\t\t\t\t\t}\n\t \t}\n\t }\n\t }\n\n }).unwrap();\n}<commit_msg>removed unused reference<commit_after>extern crate rand;\nextern crate timely;\nextern crate timely_sort;\nextern crate differential_dataflow;\nextern crate vec_map;\n\nuse timely::dataflow::*;\nuse timely::dataflow::operators::*;\n\nuse rand::{Rng, SeedableRng, StdRng};\n\nuse differential_dataflow::AsCollection;\nuse differential_dataflow::operators::arrange::ArrangeByKey;\nuse differential_dataflow::trace::Cursor;\n\nfn main() {\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n let edges: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n\n \/\/ define a new timely dataflow computation. \n timely::execute_from_args(std::env::args().skip(4), move |worker| {\n\n \tlet index = worker.index();\n \tlet peers = worker.peers();\n\n \t\/\/ create a a degree counting differential dataflow\n \tlet (mut input, probe, trace) = worker.scoped(|scope| {\n\n \t\t\/\/ create edge input, count a few ways.\n \t\tlet (input, edges) = scope.new_input();\n\n \t\t\/\/ pull off source, and count.\n \t\tlet arranged = edges.as_collection()\n \t\t\t\t\t\t\t.arrange_by_key_hashed();\n\n\t\t (input, arranged.stream.probe().0, arranged.trace.clone())\n \t});\n\n let seed: &[_] = &[1, 2, 3, index];\n let mut rng1: StdRng = SeedableRng::from_seed(seed); \/\/ rng for edge additions\n let mut rng2: StdRng = SeedableRng::from_seed(seed); \/\/ rng for edge additions\n\n \/\/ load up graph dataz\n let &time = input.time();\n for edge in 0..edges {\n \tif edge % peers == index {\n \t\tinput.send(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), time, 1));\n \t}\n\n \t\/\/ move the data along a bit\n \tif edge % 10000 == 9999 {\n \t\tworker.step();\n \t}\n\t\t}\n\n\t\tlet timer = ::std::time::Instant::now();\n\n\t\tinput.advance_to(1);\n\t\tworker.step_while(|| probe.lt(input.time()));\n\n\t\tif index == 0 {\n\t\t\tlet timer = timer.elapsed();\n\t\t\tlet nanos = timer.as_secs() * 1000000000 + timer.subsec_nanos() as u64;\n\t\t\tprintln!(\"Loading finished after {:?}\", nanos);\n\t\t}\n\n\t\t\/\/ change graph, forever\n\t\tif batch > 0 {\n\n\t\t\tfor edge in 0usize .. {\n\t\t\t\tlet &time = input.time();\n\t\t\t\tif edge % peers == index {\n\t \t\tinput.send(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), time, 1));\n\t \t\tinput.send(((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes)), time,-1));\n\t\t\t\t}\n\n\t \tif edge % batch == (batch - 1) {\n\n\t \t\tlet timer = ::std::time::Instant::now();\n\n\t \t\tlet next = input.epoch() + 1;\n\t \t\tinput.advance_to(next);\n\t\t\t\t\tworker.step_while(|| probe.lt(input.time()));\n\n\t\t\t\t\tif index == 0 {\n\t\t\t\t\t\tlet timer = timer.elapsed();\n\t\t\t\t\t\tlet nanos = timer.as_secs() * 1000000000 + timer.subsec_nanos() as u64;\n\t\t\t\t\t\tprintln!(\"Round {} finished after {:?}\", next - 1, nanos);\n\n\t\t\t\t\t\tlet mut count = 0;\n\t\t \t\tlet timer = ::std::time::Instant::now();\n\t\t \t\tlet mut cursor = trace.cursor();\n\t\t \t\twhile cursor.key_valid() {\n\t\t \t\t\twhile cursor.val_valid() {\n\t\t\t\t\t\t\t\tlet mut sum = 0;\t\t \t\t\t\t\n\t\t\t\t\t\t\t\tcursor.map_times(|_,d| sum += d);\n\t\t\t\t\t\t\t\tif sum > 0 { count += 1; }\n\t\t\t\t\t\t\t\tcursor.step_val();\n\t\t \t\t\t}\n\n\t\t \t\t\tcursor.step_key()\n\t\t \t\t}\n\n\t\t \t\tprintln!(\"count: {} in {:?}\", count, timer.elapsed());\n\t\t\t\t\t}\n\t \t}\n\t }\n\t }\n\n }).unwrap();\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example based on into_header_for_infinite_file<commit_after>\/\/ Generate endless screeching noise to stdout\n\n\/\/ Usage: cargo run --example wavstdout | mpv -\n\nextern crate hound;\nuse std::io::Write;\n\nfn main() {\n let spec = hound::WavSpec {\n bits_per_sample: 16,\n channels: 1,\n sample_format: hound::SampleFormat::Int,\n sample_rate: 16000,\n };\n\n let v = spec.into_header_for_infinite_file();\n\n let so = std::io::stdout();\n let mut so = so.lock();\n so.write_all(&v[..]).unwrap();\n\n loop {\n for i in 0..126 {\n use hound::Sample;\n let x : i16 = (i * 256) as i16;\n if x.write(&mut so, 16).is_err() {\n return;\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>ZINTERSTORE\/ZUNIONSTORE invalid parameters error messages<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for MIR printing changes<commit_after>\/\/ Test that we don't ICE when trying to dump MIR for unusual item types and\n\/\/ that we don't create filenames containing `<` and `>`\n\nstruct A;\n\nimpl A {\n const ASSOCIATED_CONSTANT: i32 = 2;\n}\n\nenum E {\n V = 5,\n}\n\nfn main() {\n let v = Vec::<i32>::new();\n}\n\n\/\/ END RUST SOURCE\n\n\/\/ START rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir\n\/\/ bb0: {\n\/\/ _0 = const 2i32;\n\/\/ return;\n\/\/ }\n\/\/ bb1: {\n\/\/ resume;\n\/\/ }\n\/\/ END rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir\n\n\/\/ START rustc.E-V-{{constant}}.mir_map.0.mir\n\/\/ bb0: {\n\/\/ _0 = const 5isize;\n\/\/ return;\n\/\/ }\n\/\/ bb1: {\n\/\/ resume;\n\/\/ }\n\/\/ END rustc.E-V-{{constant}}.mir_map.0.mir\n\n\/\/ START rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir\n\/\/ bb0: {\n\/\/ goto -> bb7;\n\/\/ }\n\/\/ bb1: {\n\/\/ return;\n\/\/ }\n\/\/ bb2: {\n\/\/ resume;\n\/\/ }\n\/\/ bb3: {\n\/\/ goto -> bb1;\n\/\/ }\n\/\/ bb4: {\n\/\/ goto -> bb2;\n\/\/ }\n\/\/ bb5: {\n\/\/ drop(((*_1).0: alloc::raw_vec::RawVec<i32>)) -> bb4;\n\/\/ }\n\/\/ bb6: {\n\/\/ drop(((*_1).0: alloc::raw_vec::RawVec<i32>)) -> [return: bb3, unwind: bb4];\n\/\/ }\n\/\/ bb7: {\n\/\/ _2 = &mut (*_1);\n\/\/ _3 = const std::ops::Drop::drop(move _2) -> [return: bb6, unwind: bb5];\n\/\/ }\n\/\/ END rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for #6976<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[feature(macro_rules)];\n\nmacro_rules! define_vec (\n () => (\n mod foo {\n #[deriving(Eq)]\n pub struct bar;\n }\n )\n)\n\ndefine_vec!()\n\npub fn main() {}\n<|endoftext|>"} {"text":"<commit_before>use alloc::boxed::Box;\n\nuse collections::String;\n\nuse core::cmp;\n\nuse common::event::{KeyEvent, MouseEvent};\n\nuse drivers::io::{Io, Pio, ReadOnly, WriteOnly};\n\nuse graphics::display::VBEMODEINFO;\n\nuse fs::KScheme;\n\nuse drivers::kb_layouts::layouts;\n\npub struct Ps2Keyboard<'a> {\n bus: &'a mut Ps2\n}\n\nimpl<'a> Ps2Keyboard<'a> {\n \/\/TODO: Use result\n fn cmd(&mut self, command: u8) -> u8 {\n self.bus.wait_write();\n self.bus.data.write(command);\n self.bus.wait_read();\n self.bus.data.read()\n }\n}\n\npub struct Ps2Mouse<'a> {\n bus: &'a mut Ps2\n}\n\nimpl<'a> Ps2Mouse<'a> {\n \/\/TODO: Use result\n fn cmd(&mut self, command: u8) -> u8 {\n self.bus.write(0xD4, command);\n self.bus.wait_read();\n self.bus.data.read()\n }\n}\n\n\/\/\/ PS2\npub struct Ps2 {\n \/\/\/ The data register\n data: Pio<u8>,\n \/\/\/ The status register\n sts: ReadOnly<Pio<u8>>,\n \/\/\/ The command register\n cmd: WriteOnly<Pio<u8>>,\n \/\/\/ Left shift\n lshift: bool,\n \/\/\/ Right shift\n rshift: bool,\n \/\/\/ Caps lock\n caps_lock: bool,\n \/\/\/ Caps lock toggle\n caps_lock_toggle: bool,\n \/\/\/ Left control\n lctrl: bool,\n \/\/\/ AltGr?\n altgr: bool,\n \/\/\/ The mouse packet\n mouse_packet: [u8; 4],\n \/\/\/ Mouse packet index\n mouse_i: usize,\n \/\/\/ Mouse point x\n mouse_x: i32,\n \/\/\/ Mouse point y\n mouse_y: i32,\n \/\/\/ Layout for keyboard\n \/\/\/ Default: English\n layout: layouts::Layout,\n}\n\nimpl Ps2 {\n \/\/\/ Create new PS2 data\n pub fn new() -> Box<Self> {\n let mut module = box Ps2 {\n data: Pio::new(0x60),\n sts: ReadOnly::new(Pio::new(0x64)),\n cmd: WriteOnly::new(Pio::new(0x64)),\n lshift: false,\n rshift: false,\n caps_lock: false,\n caps_lock_toggle: false,\n lctrl: false,\n altgr: false,\n mouse_packet: [0; 4],\n mouse_i: 0,\n mouse_x: 0,\n mouse_y: 0,\n layout: layouts::Layout::English,\n };\n\n module.init();\n\n module\n }\n\n fn wait_read(&self) {\n while ! self.sts.readf(1) {}\n }\n\n fn wait_write(&self) {\n while self.sts.readf(2) {}\n }\n\n fn cmd(&mut self, command: u8) {\n self.wait_write();\n self.cmd.write(command);\n }\n\n fn read(&mut self, command: u8) -> u8 {\n self.cmd(command);\n self.wait_read();\n self.data.read()\n }\n\n fn write(&mut self, command: u8, data: u8) {\n self.cmd(command);\n self.wait_write();\n self.data.write(data);\n }\n\n fn keyboard<'a>(&'a mut self) -> Ps2Keyboard<'a> {\n Ps2Keyboard {\n bus: self\n }\n }\n\n fn mouse<'a>(&'a mut self) -> Ps2Mouse<'a> {\n Ps2Mouse {\n bus: self\n }\n }\n\n fn init(&mut self) {\n while self.sts.readf(1) {\n self.data.read();\n }\n\n syslog_info!(\" + PS\/2\");\n\n \/\/ No interrupts, system flag set, clocks enabled, translation enabled\n self.write(0x60, 0b01000100);\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Enable First Port\n syslog_info!(\" + Keyboard\");\n self.cmd(0xAE);\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n\n {\n \/\/ Reset\n debug!(\" - Reset {:X}\", self.keyboard().cmd(0xFF));\n self.wait_read();\n debugln!(\", {:X}\", self.data.read());\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Set defaults\n debugln!(\" - Set defaults {:X}\", self.keyboard().cmd(0xF6));\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Enable Streaming\n debugln!(\" - Enable streaming {:X}\", self.keyboard().cmd(0xF4));\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n }\n\n \/\/ Enable Second Port\n syslog_info!(\" + PS\/2 Mouse\");\n self.cmd(0xA8);\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n\n {\n \/\/ Reset\n debug!(\" - Reset {:X}\", self.keyboard().cmd(0xFF));\n self.wait_read();\n debugln!(\", {:X}\", self.data.read());\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Set defaults\n debugln!(\" - Set defaults {:X}\", self.mouse().cmd(0xF6));\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Enable Streaming\n debugln!(\" - Enable streaming {:X}\", self.mouse().cmd(0xF4));\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n }\n\n \/\/ Key and mouse interrupts, system flag set, clocks enabled, translation enabled\n self.write(0x60, 0b01000111);\n\n while self.sts.readf(1) {\n debugln!(\"Extra {}: {:X}\", line!(), self.data.read());\n }\n }\n\n \/\/\/ Keyboard interrupt\n pub fn keyboard_interrupt(&mut self, mut scancode: u8) -> Option<KeyEvent> {\n if scancode == 0 {\n return None;\n } else if scancode == 0x2A {\n self.lshift = true;\n } else if scancode == 0xAA {\n self.lshift = false;\n } else if scancode == 0x36 {\n self.rshift = true;\n } else if scancode == 0xB6 {\n self.rshift = false;\n } else if scancode == 0x3A {\n if !self.caps_lock {\n self.caps_lock = true;\n self.caps_lock_toggle = true;\n } else {\n self.caps_lock_toggle = false;\n }\n } else if scancode == 0xBA {\n if self.caps_lock && !self.caps_lock_toggle {\n self.caps_lock = false;\n }\n } else if scancode == 0x1D {\n self.lctrl = true;\n } else if scancode == 0x9D {\n self.lctrl = false;\n } else if scancode == 0xE0 {\n let scancode_byte_2 = self.data.read();\n if scancode_byte_2 == 0x38 {\n self.altgr = true;\n } else if scancode_byte_2 == 0xB8 {\n self.altgr = false;\n } else {\n scancode = scancode_byte_2;\n }\n }\n\n if self.lctrl {\n if scancode == 0x2E {\n let console = unsafe { &mut *::env().console.get() };\n\n console.write(b\"^C\\n\");\n console.commands.send(String::new(), \"Serial Control C\");\n\n if let Some(ref mut inner) = console.inner {\n inner.redraw = true;\n }\n console.write(b\"\");\n\n return None;\n } else if scancode == 0x20 {\n let console = unsafe { &mut *::env().console.get() };\n\n console.write(b\"^D\\n\");\n\n {\n let contexts = unsafe { &mut *::env().contexts.get() };\n debugln!(\"Magic CTRL-D {}\", ::common::time::Duration::monotonic().secs);\n for context in contexts.iter() {\n debugln!(\" PID {}: {}\", context.pid, context.name);\n\n if context.blocked > 0 {\n debugln!(\" BLOCKED {}\", context.blocked);\n }\n\n if let Some(current_syscall) = context.current_syscall {\n debugln!(\" SYS {:X}: {} {} {:X} {:X} {:X}\", current_syscall.0, current_syscall.1, ::syscall::name(current_syscall.1), current_syscall.2, current_syscall.3, current_syscall.4);\n }\n }\n }\n\n if let Some(ref mut inner) = console.inner {\n inner.redraw = true;\n }\n console.write(b\"\");\n\n return None;\n }\n }\n\n let shift = self.caps_lock != (self.lshift || self.rshift);\n\n\n Some(KeyEvent {\n character: layouts::char_for_scancode(scancode & 0x7F, shift, self.altgr, &self.layout),\n scancode: scancode & 0x7F,\n pressed: scancode < 0x80,\n })\n }\n\n \/\/\/ Mouse interrupt\n pub fn mouse_interrupt(&mut self, byte: u8) -> Option<MouseEvent> {\n if self.mouse_i == 0 {\n if byte & 0x8 == 0x8 {\n self.mouse_packet[0] = byte;\n self.mouse_i += 1;\n }\n } else if self.mouse_i == 1 {\n self.mouse_packet[1] = byte;\n\n self.mouse_i += 1;\n } else {\n self.mouse_packet[2] = byte;\n\n let left_button = (self.mouse_packet[0] & 1) == 1;\n let right_button = (self.mouse_packet[0] & 2) == 2;\n let middle_button = (self.mouse_packet[0] & 4) == 4;\n\n let x;\n if (self.mouse_packet[0] & 0x40) != 0x40 && self.mouse_packet[1] != 0 {\n x = (self.mouse_packet[1] as isize -\n (((self.mouse_packet[0] as isize) << 4) & 0x100)) as i32;\n } else {\n x = 0;\n }\n\n let y;\n if (self.mouse_packet[0] & 0x80) != 0x80 && self.mouse_packet[2] != 0 {\n y = ((((self.mouse_packet[0] as isize) << 3) & 0x100) -\n self.mouse_packet[2] as isize) as i32;\n } else {\n y = 0;\n }\n\n if let Some(mode_info) = unsafe { VBEMODEINFO } {\n self.mouse_x = cmp::max(0, cmp::min(mode_info.xresolution as i32, self.mouse_x + x));\n self.mouse_y = cmp::max(0, cmp::min(mode_info.yresolution as i32, self.mouse_y + y));\n }\n\n self.mouse_i = 0;\n\n return Some(MouseEvent {\n x: self.mouse_x,\n y: self.mouse_y,\n left_button: left_button,\n right_button: right_button,\n middle_button: middle_button,\n });\n }\n\n return None;\n }\n\n \/\/\/ Function to change the layout of the keyboard\n pub fn change_layout(&mut self, layout: usize) {\n self.layout = match layout {\n 0 => layouts::Layout::English,\n 1 => layouts::Layout::French,\n 2 => layouts::Layout::German,\n _ => layouts::Layout::English,\n }\n }\n}\n\nimpl KScheme for Ps2 {\n fn on_irq(&mut self, irq: u8) {\n if irq == 0xC || irq == 0x1 {\n loop {\n let status = self.sts.read();\n if status & 0x21 == 0x21 {\n let data = self.data.read();\n if let Some(mouse_event) = self.mouse_interrupt(data) {\n if unsafe { & *::env().console.get() }.draw {\n \/\/Ignore mouse event\n } else {\n ::env().events.send(mouse_event.to_event(), \"Ps2::on_irq mouse\");\n }\n }\n } else if status & 0x21 == 0x01 {\n let data = self.data.read();\n if let Some(key_event) = self.keyboard_interrupt(data) {\n if unsafe { & *::env().console.get() }.draw {\n unsafe { &mut *::env().console.get() }.event(key_event.to_event());\n } else {\n ::env().events.send(key_event.to_event(), \"Ps2::on_irq key\");\n }\n }\n } else {\n break;\n }\n }\n }\n }\n}\n<commit_msg>Remove logging from ps2<commit_after>use alloc::boxed::Box;\n\nuse collections::String;\n\nuse core::cmp;\n\nuse common::event::{KeyEvent, MouseEvent};\n\nuse drivers::io::{Io, Pio, ReadOnly, WriteOnly};\n\nuse graphics::display::VBEMODEINFO;\n\nuse fs::KScheme;\n\nuse drivers::kb_layouts::layouts;\n\npub struct Ps2Keyboard<'a> {\n bus: &'a mut Ps2\n}\n\nimpl<'a> Ps2Keyboard<'a> {\n \/\/TODO: Use result\n fn cmd(&mut self, command: u8) -> u8 {\n self.bus.wait_write();\n self.bus.data.write(command);\n self.bus.wait_read();\n self.bus.data.read()\n }\n}\n\npub struct Ps2Mouse<'a> {\n bus: &'a mut Ps2\n}\n\nimpl<'a> Ps2Mouse<'a> {\n \/\/TODO: Use result\n fn cmd(&mut self, command: u8) -> u8 {\n self.bus.write(0xD4, command);\n self.bus.wait_read();\n self.bus.data.read()\n }\n}\n\n\/\/\/ PS2\npub struct Ps2 {\n \/\/\/ The data register\n data: Pio<u8>,\n \/\/\/ The status register\n sts: ReadOnly<Pio<u8>>,\n \/\/\/ The command register\n cmd: WriteOnly<Pio<u8>>,\n \/\/\/ Left shift\n lshift: bool,\n \/\/\/ Right shift\n rshift: bool,\n \/\/\/ Caps lock\n caps_lock: bool,\n \/\/\/ Caps lock toggle\n caps_lock_toggle: bool,\n \/\/\/ Left control\n lctrl: bool,\n \/\/\/ AltGr?\n altgr: bool,\n \/\/\/ The mouse packet\n mouse_packet: [u8; 4],\n \/\/\/ Mouse packet index\n mouse_i: usize,\n \/\/\/ Mouse point x\n mouse_x: i32,\n \/\/\/ Mouse point y\n mouse_y: i32,\n \/\/\/ Layout for keyboard\n \/\/\/ Default: English\n layout: layouts::Layout,\n}\n\nimpl Ps2 {\n \/\/\/ Create new PS2 data\n pub fn new() -> Box<Self> {\n let mut module = box Ps2 {\n data: Pio::new(0x60),\n sts: ReadOnly::new(Pio::new(0x64)),\n cmd: WriteOnly::new(Pio::new(0x64)),\n lshift: false,\n rshift: false,\n caps_lock: false,\n caps_lock_toggle: false,\n lctrl: false,\n altgr: false,\n mouse_packet: [0; 4],\n mouse_i: 0,\n mouse_x: 0,\n mouse_y: 0,\n layout: layouts::Layout::English,\n };\n\n module.init();\n\n module\n }\n\n fn wait_read(&self) {\n while ! self.sts.readf(1) {}\n }\n\n fn wait_write(&self) {\n while self.sts.readf(2) {}\n }\n\n fn cmd(&mut self, command: u8) {\n self.wait_write();\n self.cmd.write(command);\n }\n\n fn read(&mut self, command: u8) -> u8 {\n self.cmd(command);\n self.wait_read();\n self.data.read()\n }\n\n fn write(&mut self, command: u8, data: u8) {\n self.cmd(command);\n self.wait_write();\n self.data.write(data);\n }\n\n fn keyboard<'a>(&'a mut self) -> Ps2Keyboard<'a> {\n Ps2Keyboard {\n bus: self\n }\n }\n\n fn mouse<'a>(&'a mut self) -> Ps2Mouse<'a> {\n Ps2Mouse {\n bus: self\n }\n }\n\n fn init(&mut self) {\n while self.sts.readf(1) {\n self.data.read();\n }\n\n syslog_info!(\" + PS\/2\");\n\n \/\/ No interrupts, system flag set, clocks enabled, translation enabled\n self.write(0x60, 0b01000100);\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Enable First Port\n syslog_info!(\" + Keyboard\");\n self.cmd(0xAE);\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n\n {\n \/\/ Reset\n self.keyboard().cmd(0xFF);\n self.wait_read();\n self.data.read();\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Set defaults\n self.keyboard().cmd(0xF6);\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Enable Streaming\n self.keyboard().cmd(0xF4);\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n }\n\n \/\/ Enable Second Port\n syslog_info!(\" + PS\/2 Mouse\");\n self.cmd(0xA8);\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n\n {\n \/\/ Reset\n self.keyboard().cmd(0xFF);\n self.wait_read();\n self.data.read();\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Set defaults\n self.mouse().cmd(0xF6);\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n\n \/\/ Enable Streaming\n self.mouse().cmd(0xF4);\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n }\n\n \/\/ Key and mouse interrupts, system flag set, clocks enabled, translation enabled\n self.write(0x60, 0b01000111);\n\n while self.sts.readf(1) {\n syslog_info!(\" - Extra {}: {:X}\", line!(), self.data.read());\n }\n }\n\n \/\/\/ Keyboard interrupt\n pub fn keyboard_interrupt(&mut self, mut scancode: u8) -> Option<KeyEvent> {\n if scancode == 0 {\n return None;\n } else if scancode == 0x2A {\n self.lshift = true;\n } else if scancode == 0xAA {\n self.lshift = false;\n } else if scancode == 0x36 {\n self.rshift = true;\n } else if scancode == 0xB6 {\n self.rshift = false;\n } else if scancode == 0x3A {\n if !self.caps_lock {\n self.caps_lock = true;\n self.caps_lock_toggle = true;\n } else {\n self.caps_lock_toggle = false;\n }\n } else if scancode == 0xBA {\n if self.caps_lock && !self.caps_lock_toggle {\n self.caps_lock = false;\n }\n } else if scancode == 0x1D {\n self.lctrl = true;\n } else if scancode == 0x9D {\n self.lctrl = false;\n } else if scancode == 0xE0 {\n let scancode_byte_2 = self.data.read();\n if scancode_byte_2 == 0x38 {\n self.altgr = true;\n } else if scancode_byte_2 == 0xB8 {\n self.altgr = false;\n } else {\n scancode = scancode_byte_2;\n }\n }\n\n if self.lctrl {\n if scancode == 0x2E {\n let console = unsafe { &mut *::env().console.get() };\n\n console.write(b\"^C\\n\");\n console.commands.send(String::new(), \"Serial Control C\");\n\n if let Some(ref mut inner) = console.inner {\n inner.redraw = true;\n }\n console.write(b\"\");\n\n return None;\n } else if scancode == 0x20 {\n let console = unsafe { &mut *::env().console.get() };\n\n console.write(b\"^D\\n\");\n\n {\n let contexts = unsafe { &mut *::env().contexts.get() };\n debugln!(\"Magic CTRL-D {}\", ::common::time::Duration::monotonic().secs);\n for context in contexts.iter() {\n debugln!(\" PID {}: {}\", context.pid, context.name);\n\n if context.blocked > 0 {\n debugln!(\" BLOCKED {}\", context.blocked);\n }\n\n if let Some(current_syscall) = context.current_syscall {\n debugln!(\" SYS {:X}: {} {} {:X} {:X} {:X}\", current_syscall.0, current_syscall.1, ::syscall::name(current_syscall.1), current_syscall.2, current_syscall.3, current_syscall.4);\n }\n }\n }\n\n if let Some(ref mut inner) = console.inner {\n inner.redraw = true;\n }\n console.write(b\"\");\n\n return None;\n }\n }\n\n let shift = self.caps_lock != (self.lshift || self.rshift);\n\n\n Some(KeyEvent {\n character: layouts::char_for_scancode(scancode & 0x7F, shift, self.altgr, &self.layout),\n scancode: scancode & 0x7F,\n pressed: scancode < 0x80,\n })\n }\n\n \/\/\/ Mouse interrupt\n pub fn mouse_interrupt(&mut self, byte: u8) -> Option<MouseEvent> {\n if self.mouse_i == 0 {\n if byte & 0x8 == 0x8 {\n self.mouse_packet[0] = byte;\n self.mouse_i += 1;\n }\n } else if self.mouse_i == 1 {\n self.mouse_packet[1] = byte;\n\n self.mouse_i += 1;\n } else {\n self.mouse_packet[2] = byte;\n\n let left_button = (self.mouse_packet[0] & 1) == 1;\n let right_button = (self.mouse_packet[0] & 2) == 2;\n let middle_button = (self.mouse_packet[0] & 4) == 4;\n\n let x;\n if (self.mouse_packet[0] & 0x40) != 0x40 && self.mouse_packet[1] != 0 {\n x = (self.mouse_packet[1] as isize -\n (((self.mouse_packet[0] as isize) << 4) & 0x100)) as i32;\n } else {\n x = 0;\n }\n\n let y;\n if (self.mouse_packet[0] & 0x80) != 0x80 && self.mouse_packet[2] != 0 {\n y = ((((self.mouse_packet[0] as isize) << 3) & 0x100) -\n self.mouse_packet[2] as isize) as i32;\n } else {\n y = 0;\n }\n\n if let Some(mode_info) = unsafe { VBEMODEINFO } {\n self.mouse_x = cmp::max(0, cmp::min(mode_info.xresolution as i32, self.mouse_x + x));\n self.mouse_y = cmp::max(0, cmp::min(mode_info.yresolution as i32, self.mouse_y + y));\n }\n\n self.mouse_i = 0;\n\n return Some(MouseEvent {\n x: self.mouse_x,\n y: self.mouse_y,\n left_button: left_button,\n right_button: right_button,\n middle_button: middle_button,\n });\n }\n\n return None;\n }\n\n \/\/\/ Function to change the layout of the keyboard\n pub fn change_layout(&mut self, layout: usize) {\n self.layout = match layout {\n 0 => layouts::Layout::English,\n 1 => layouts::Layout::French,\n 2 => layouts::Layout::German,\n _ => layouts::Layout::English,\n }\n }\n}\n\nimpl KScheme for Ps2 {\n fn on_irq(&mut self, irq: u8) {\n if irq == 0xC || irq == 0x1 {\n loop {\n let status = self.sts.read();\n if status & 0x21 == 0x21 {\n let data = self.data.read();\n if let Some(mouse_event) = self.mouse_interrupt(data) {\n if unsafe { & *::env().console.get() }.draw {\n \/\/Ignore mouse event\n } else {\n ::env().events.send(mouse_event.to_event(), \"Ps2::on_irq mouse\");\n }\n }\n } else if status & 0x21 == 0x01 {\n let data = self.data.read();\n if let Some(key_event) = self.keyboard_interrupt(data) {\n if unsafe { & *::env().console.get() }.draw {\n unsafe { &mut *::env().console.get() }.event(key_event.to_event());\n } else {\n ::env().events.send(key_event.to_event(), \"Ps2::on_irq key\");\n }\n }\n } else {\n break;\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>v0.0.4<commit_after><|endoftext|>"} {"text":"<commit_before>use piston::input::Button;\nuse piston::input::Button::Keyboard;\nuse piston::input::Key;\nuse piston::input::{RenderArgs, UpdateArgs};\n\nuse std::cell::RefCell;\nuse std::cell::Ref;\nuse std::rc::Rc;\nuse entity::Entity;\nuse std::collections::HashMap;\n\nuse player::Player;\n\npub struct App {\n \/\/\/ next unique id\n player : Rc<RefCell<Player>>,\n last_entity_id: u32,\n entities: HashMap<u32, Rc<RefCell<Entity>>>,\n}\n\n\/\/fn insert(&mut self, k: K, v: V) -> Option<V>\n\nimpl App {\n pub fn new() -> App {\n let mut hm : HashMap<u32, Rc<RefCell<Entity>>> = HashMap::new();\n let mut player = Rc::new(RefCell::new(Player::new()));\n hm.insert(0, player.clone());\n let mut app = App {\n last_entity_id: 1,\n entities: hm,\n player : player.clone()\n };\n\n return app;\n }\n\n pub fn key_press(&mut self, args: Button) {\n if args == Keyboard(Key::Space) {\n println!(\"was\");\n }\n }\n\/*\n pub fn add_entity(&mut self, e: Box<Entity>) {\n self.entities.push(e);\n }*\/\n\n pub fn update(&mut self, args: UpdateArgs) {\n for (id, e) in &mut self.entities {\n e.borrow_mut().update(args);\n }\n }\n\n pub fn render(&mut self, args: RenderArgs) {\n for (id, e) in &mut self.entities {\n e.borrow_mut().render(args);\n }\n }\n}\n<commit_msg>return App better.<commit_after>use piston::input::Button;\nuse piston::input::Button::Keyboard;\nuse piston::input::Key;\nuse piston::input::{RenderArgs, UpdateArgs};\n\nuse std::cell::RefCell;\nuse std::cell::Ref;\nuse std::rc::Rc;\nuse entity::Entity;\nuse std::collections::HashMap;\n\nuse player::Player;\n\npub struct App {\n \/\/\/ next unique id\n player : Rc<RefCell<Player>>,\n last_entity_id: u32,\n entities: HashMap<u32, Rc<RefCell<Entity>>>,\n}\n\n\/\/fn insert(&mut self, k: K, v: V) -> Option<V>\n\nimpl App {\n pub fn new() -> App {\n let mut hm : HashMap<u32, Rc<RefCell<Entity>>> = HashMap::new();\n let mut player = Rc::new(RefCell::new(Player::new()));\n hm.insert(0, player.clone());\n App {\n last_entity_id: 1,\n entities: hm,\n player : player.clone()\n }\n }\n\n pub fn key_press(&mut self, args: Button) {\n if args == Keyboard(Key::Space) {\n println!(\"was\");\n }\n }\n\/*\n pub fn add_entity(&mut self, e: Box<Entity>) {\n self.entities.push(e);\n }*\/\n\n pub fn update(&mut self, args: UpdateArgs) {\n for (id, e) in &mut self.entities {\n e.borrow_mut().update(args);\n }\n }\n\n pub fn render(&mut self, args: RenderArgs) {\n for (id, e) in &mut self.entities {\n e.borrow_mut().render(args);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ FIXME: Use bindgen\n\nextern mod core_graphics;\n\nuse cairo_quartz::core_graphics::font::CGFontRef;\n\n#[nolink]\npub extern mod bindgen {\n fn cairo_quartz_font_face_create_for_cgfont(font: CGFontRef) -> *cairo::cairo_font_face_t;\n \/\/ XXX: This is here because otherwise the symbol goes missing from the library after linking,\n \/\/ and it's used by azure\n fn cairo_quartz_surface_get_cg_context();\n}\n<commit_msg>Rust upgrade Mac<commit_after>\/\/ FIXME: Use bindgen\n\nextern mod core_graphics;\n\nuse cairo;\nuse cairo_quartz::core_graphics::font::CGFontRef;\n\n#[nolink]\npub extern mod bindgen {\n fn cairo_quartz_font_face_create_for_cgfont(font: CGFontRef) -> *cairo::cairo_font_face_t;\n \/\/ XXX: This is here because otherwise the symbol goes missing from the library after linking,\n \/\/ and it's used by azure\n fn cairo_quartz_surface_get_cg_context();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>simple cpu structure and creation<commit_after>use std::io::File;\n\nstruct Cpu {\n pc: i16,\n\tsp: i16,\n\tr0: i16,\n\tr1: i16,\n\tr2: i16,\n\tr3: i16,\n\tr4: i16,\n\tr5: i16,\n\tr6: i16,\n\tr7: i16,\n\tr8: i16,\n\tr9: i16,\n\tra: i16,\n\trb: i16,\n\trc: i16,\n\trd: i16,\n\tre: i16,\n\trf: i16,\n\tflags: i8,\n\tfile: File,\n}\n\nimpl Cpu {\n fn new(file_path: std::path::Path) -> Cpu {\n\t\tlet file = match File::open(&file_path) {\n\t\t Err(why) => fail!(\"{} {}\",why.desc, file_path.display()),\n\t\t\tOk(file) => file,\n\t\t};\n\t let cpu = Cpu {pc: 0, sp: 0, r0: 0, r1: 0, r2: 0, r3: 0, r4: 0, r5: 0, r6: 0, r7: 0,\n\t\t r8: 0, r9: 0, ra: 0, rb: 0, rc: 0, rd: 0, re: 0, rf: 0, flags: 0, file: file};\n\t\tcpu\n\t}\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>more on env<commit_after>\n\/\/use ast::Ast;\nuse ast::ExprAst;\nuse ast::StrNode;\nuse ast::IntNode;\n\n#[deriving(Clone, PartialEq)]\npub struct Env {\n pub vars: Vec<ExprAst>,\n pub vals: Vec<ExprAst>,\n pub next: Option<Box<Env>>\n}\n\n\n#[allow(dead_code)]\nimpl Env {\n pub fn new() -> Env {\n Env {\n vars: vec![],\n vals: vec![],\n next: None\n }\n }\n\n pub fn def_var(&mut self, var: ExprAst, val: ExprAst) {\n self.add_bingding(var, val);\n }\n\n pub fn add_bingding(&mut self, var: ExprAst, val: ExprAst) {\n assert!(self.vars.len() == self.vals.len());\n self.vars.push(var);\n self.vals.push(val);\n }\n\n pub fn lookup(&self, var: ExprAst) -> Option<ExprAst> {\n for i in range(0u, self.vars.len()) {\n if self.vars[i] == var {\n return Some(self.vals[i].clone());\n }\n }\n match self.next {\n Some(ref sub) => return sub.lookup(var),\n _ => return None\n };\n }\n\n}\n\n\n#[test]\nfn test_env() {\n let mut env = Env::new();\n env.def_var(ExprAst::Str(StrNode::new(\"hello\".to_string())),\n ExprAst::Str(StrNode::new(\"world\".to_string())));\n\n let val = env.lookup(ExprAst::Str(StrNode::new(\"hello\".to_string())));\n assert!(val.unwrap().as_str() == \"world\".to_string());\n\n env.def_var(ExprAst::Str(StrNode::new(\"1\".to_string())),\n ExprAst::Int(IntNode::new(1)));\n let val = env.lookup(ExprAst::Str(StrNode::new(\"1\".to_string())));\n assert!(val.unwrap().as_int() == 1);\n\n env.def_var(ExprAst::Str(StrNode::new(\"1\".to_string())),\n ExprAst::Int(IntNode::new(2)));\n let val = env.lookup(ExprAst::Str(StrNode::new(\"1\".to_string())));\n assert!(val.unwrap().as_int() == 1);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for parsing multiple lists<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>clj-rub implemented<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>make thread run finite<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>WIP: v8 updates<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Ensure counter is digested high order byte first.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Remove dead_code attribute<commit_after><|endoftext|>"} {"text":"<commit_before>#![crate_name = \"piston\"]\n#![deny(missing_docs)]\n#![warn(dead_code)]\n#![feature(default_type_params)]\n#![feature(globs)]\n\n\/\/! A user friendly game engine written in Rust.\n\n#[cfg(feature = \"include_gfx\")]\nextern crate gfx;\n#[cfg(feature = \"include_gfx\")]\nextern crate gfx_graphics;\nextern crate opengl_graphics;\nextern crate sdl2;\nextern crate sdl2_window;\n\n\/\/ Crates used to reexport.\nextern crate \"ai_behavior\" as ai_behavior_lib;\nextern crate \"vecmath\" as vecmath_lib;\nextern crate \"shader_version\" as shader_version_lib;\nextern crate \"image\" as image_lib;\nextern crate \"graphics\" as graphics_lib;\nextern crate \"input\" as input_lib;\nextern crate \"event\" as event_lib;\nextern crate \"window\" as window_lib;\nextern crate \"cam\" as cam_lib;\nextern crate \"current\" as current_lib;\nextern crate \"fps_counter\" as fps_counter_lib;\nextern crate \"drag_controller\" as drag_controller_lib;\nextern crate \"read_color\" as read_color_lib;\nextern crate \"select_color\" as select_color_lib;\n\n\/\/ Reexports.\npub use current_lib as current;\npub use ai_behavior_lib as ai_behavior;\npub use shader_version_lib as shader_version;\npub use image_lib as image;\npub use graphics_lib as graphics;\npub use vecmath_lib as vecmath;\npub use input_lib as input;\npub use event_lib as event;\npub use window_lib as window;\npub use cam_lib as cam;\npub use fps_counter_lib as fps_counter;\npub use drag_controller_lib as drag_controller;\n\npub use sdl2_window::Sdl2Window as WindowBackEnd;\npub use event::{\n Event,\n Events,\n NoWindow,\n RenderArgs,\n UpdateArgs,\n Window,\n WindowSettings,\n};\n\npub use current::{\n DANGER,\n Get,\n Set,\n Modifier,\n Current,\n CurrentGuard,\n};\n\n#[cfg(feature = \"include_gfx\")]\nuse gfx_graphics::G2D;\n#[cfg(feature = \"include_gfx\")]\nuse gfx::{ DeviceHelper };\nuse opengl_graphics::Gl;\nuse fps_counter::FPSCounter;\n\npub mod color {\n \/\/! Rexported libraries for working with colors\n pub use read_color_lib as read_color;\n pub use select_color_lib as select_color;\n}\n\nfn start_window(\n opengl: shader_version::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n let mut window = WindowBackEnd::new(\n opengl,\n window_settings,\n );\n\n let mut gl = Gl::new(opengl);\n let mut fps_counter = FPSCounter::new();\n\n let window_guard = CurrentGuard::new(&mut window);\n let gl_guard = CurrentGuard::new(&mut gl);\n let fps_counter_guard = CurrentGuard::new(&mut fps_counter);\n\n f();\n\n drop(window_guard);\n drop(gl_guard);\n drop(fps_counter_guard);\n}\n\n#[cfg(feature = \"include_gfx\")]\nfn start_gfx(f: ||) {\n let mut device = gfx::GlDevice::new(|s| unsafe {\n std::mem::transmute(sdl2::video::gl_get_proc_address(s))\n });\n let mut g2d = G2D::new(&mut device);\n let mut renderer = device.create_renderer();\n let event::window::Size([w, h]) = window.get(); \n let mut frame = gfx::Frame::new(w as u16, h as u16);\n\n let device_guard = CurrentGuard::new(&mut device);\n let g2d_guard = CurrentGuard::new(&mut g2d);\n let renderer_guard = CurrentGuard::new(&mut renderer);\n let frame_guard = CurrentGuard::new(&mut frame);\n\n f();\n \n drop(g2d_guard);\n drop(renderer_guard);\n drop(frame_guard);\n drop(device_guard);\n}\n\n#[cfg(not(feature = \"include_gfx\"))]\nfn start_gfx(f: ||) {\n f();\n}\n\n\/\/\/ Initializes window and sets up current objects.\npub fn start(\n opengl: shader_version::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n start_window(opengl, window_settings, || {\n if cfg!(feature = \"include_gfx\") {\n start_gfx(|| f());\n } else {\n f();\n }\n });\n}\n\n\/\/\/ The current window\npub unsafe fn current_window() -> Current<WindowBackEnd> { Current::new() }\n\/\/\/ The current Gfx device\n#[cfg(feature = \"include_gfx\")]\npub unsafe fn current_gfx_device() -> Current<gfx::GlDevice> { Current::new() }\n\/\/\/ The current opengl_graphics back-end\npub unsafe fn current_gl() -> Current<Gl> { Current::new() }\n\/\/\/ The current gfx_graphics back-end\n#[cfg(feature = \"include_gfx\")]\npub unsafe fn current_g2d() -> Current<G2D> { Current::new() }\n\/\/\/ The current Gfx renderer\n#[cfg(feature = \"include_gfx\")]\npub unsafe fn current_renderer() -> Current<gfx::Renderer<gfx::GlCommandBuffer>> { Current::new() }\n\/\/\/ The current Gfx frame\n#[cfg(feature = \"include_gfx\")]\npub unsafe fn current_frame() -> Current<gfx::Frame> { Current::new() }\n\/\/\/ The current FPS counter\npub unsafe fn current_fps_counter() -> Current<FPSCounter> { Current::new() }\n\n\/\/\/ Returns an event iterator for the event loop\npub fn events() -> event::Events<Current<WindowBackEnd>> {\n unsafe {\n Events::new(current_window())\n }\n}\n\n\/\/\/ Updates the FPS counter and gets the frames per second.\npub fn fps_tick() -> uint {\n unsafe {\n current_fps_counter().tick()\n }\n}\n\n\/\/\/ Sets title of the current window.\npub fn set_title(text: String) {\n unsafe {\n current_window().set_mut(window::Title(text));\n }\n}\n\n\/\/\/ Returns true if the current window should be closed.\npub fn should_close() -> bool {\n use window::ShouldClose;\n\n unsafe {\n let ShouldClose(val) = current_window().get();\n val\n }\n}\n\n\/\/\/ Renders 2D graphics using Gfx.\n\/\/\/\n\/\/\/ ### DANGER\n\/\/\/\n\/\/\/ This function should not be called nested within the closure.\n\/\/\/ Doing so will lead to mutable aliased references to the graphics back-end.\n#[cfg(feature = \"include_gfx\")]\npub fn render_2d_gfx(\n _: current::DANGER,\n bg_color: Option<[f32, ..4]>, \n f: |graphics::Context, \n &mut gfx_graphics::GraphicsBackEnd<gfx::GlCommandBuffer>|\n) {\n use gfx::Device; \n\n unsafe {\n current_g2d().draw(\n &mut *current_renderer(),\n &*current_frame(), \n |c, g| {\n if let Some(bg_color) = bg_color {\n graphics::clear(bg_color, g);\n }\n f(c, g);\n });\n current_gfx_device().submit(current_renderer().as_buffer());\n current_renderer().reset();\n }\n}\n\n\/\/\/ Renders 2D graphics using OpenGL.\n\/\/\/\n\/\/\/ ### DANGER\n\/\/\/\n\/\/\/ This function should not be called nested within the closure.\n\/\/\/ Doing so will lead to mutable aliased references to the graphics back-end.\npub fn render_2d_opengl(\n _: current::DANGER,\n bg_color: Option<[f32, ..4]>,\n f: |graphics::Context,\n &mut opengl_graphics::Gl|\n) {\n unsafe {\n let gl = &mut *current_gl();\n let window::Size([w, h]) = current_window().get();\n gl.draw([0, 0, w as i32, h as i32], |c, g| {\n use graphics::*;\n if let Some(bg_color) = bg_color {\n graphics::clear(bg_color, g);\n }\n f(c, g);\n });\n }\n}\n\n<commit_msg>Upgraded to latest piston-current<commit_after>#![crate_name = \"piston\"]\n#![deny(missing_docs)]\n#![warn(dead_code)]\n#![feature(default_type_params)]\n#![feature(globs)]\n\n\/\/! A user friendly game engine written in Rust.\n\n#[cfg(feature = \"include_gfx\")]\nextern crate gfx;\n#[cfg(feature = \"include_gfx\")]\nextern crate gfx_graphics;\nextern crate opengl_graphics;\nextern crate sdl2;\nextern crate sdl2_window;\n\n\/\/ Crates used to reexport.\nextern crate \"ai_behavior\" as ai_behavior_lib;\nextern crate \"vecmath\" as vecmath_lib;\nextern crate \"shader_version\" as shader_version_lib;\nextern crate \"image\" as image_lib;\nextern crate \"graphics\" as graphics_lib;\nextern crate \"input\" as input_lib;\nextern crate \"event\" as event_lib;\nextern crate \"window\" as window_lib;\nextern crate \"cam\" as cam_lib;\nextern crate \"current\" as current_lib;\nextern crate \"fps_counter\" as fps_counter_lib;\nextern crate \"drag_controller\" as drag_controller_lib;\nextern crate \"read_color\" as read_color_lib;\nextern crate \"select_color\" as select_color_lib;\n\n\/\/ Reexports.\npub use current_lib as current;\npub use ai_behavior_lib as ai_behavior;\npub use shader_version_lib as shader_version;\npub use image_lib as image;\npub use graphics_lib as graphics;\npub use vecmath_lib as vecmath;\npub use input_lib as input;\npub use event_lib as event;\npub use window_lib as window;\npub use cam_lib as cam;\npub use fps_counter_lib as fps_counter;\npub use drag_controller_lib as drag_controller;\n\npub use sdl2_window::Sdl2Window as WindowBackEnd;\npub use event::{\n Event,\n Events,\n NoWindow,\n RenderArgs,\n UpdateArgs,\n WindowSettings,\n};\n\npub use current::{\n DANGER,\n Action,\n ActOn,\n Get,\n GetFrom,\n Set,\n SetAt,\n Current,\n CurrentGuard,\n};\n\n#[cfg(feature = \"include_gfx\")]\nuse gfx_graphics::G2D;\n#[cfg(feature = \"include_gfx\")]\nuse gfx::{ DeviceHelper };\nuse opengl_graphics::Gl;\nuse fps_counter::FPSCounter;\n\npub mod color {\n \/\/! Rexported libraries for working with colors\n pub use read_color_lib as read_color;\n pub use select_color_lib as select_color;\n}\n\nfn start_window(\n opengl: shader_version::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n let mut window = WindowBackEnd::new(\n opengl,\n window_settings,\n );\n\n let mut gl = Gl::new(opengl);\n let mut fps_counter = FPSCounter::new();\n\n let window_guard = CurrentGuard::new(&mut window);\n let gl_guard = CurrentGuard::new(&mut gl);\n let fps_counter_guard = CurrentGuard::new(&mut fps_counter);\n\n f();\n\n drop(window_guard);\n drop(gl_guard);\n drop(fps_counter_guard);\n}\n\n#[cfg(feature = \"include_gfx\")]\nfn start_gfx(f: ||) {\n let mut device = gfx::GlDevice::new(|s| unsafe {\n std::mem::transmute(sdl2::video::gl_get_proc_address(s))\n });\n let mut g2d = G2D::new(&mut device);\n let mut renderer = device.create_renderer();\n let event::window::Size([w, h]) = window.get(); \n let mut frame = gfx::Frame::new(w as u16, h as u16);\n\n let device_guard = CurrentGuard::new(&mut device);\n let g2d_guard = CurrentGuard::new(&mut g2d);\n let renderer_guard = CurrentGuard::new(&mut renderer);\n let frame_guard = CurrentGuard::new(&mut frame);\n\n f();\n \n drop(g2d_guard);\n drop(renderer_guard);\n drop(frame_guard);\n drop(device_guard);\n}\n\n#[cfg(not(feature = \"include_gfx\"))]\nfn start_gfx(f: ||) {\n f();\n}\n\n\/\/\/ Initializes window and sets up current objects.\npub fn start(\n opengl: shader_version::OpenGL,\n window_settings: WindowSettings,\n f: ||\n) {\n start_window(opengl, window_settings, || {\n if cfg!(feature = \"include_gfx\") {\n start_gfx(|| f());\n } else {\n f();\n }\n });\n}\n\n\/\/\/ The current window\npub unsafe fn current_window() -> Current<WindowBackEnd> { Current::new() }\n\/\/\/ The current Gfx device\n#[cfg(feature = \"include_gfx\")]\npub unsafe fn current_gfx_device() -> Current<gfx::GlDevice> { Current::new() }\n\/\/\/ The current opengl_graphics back-end\npub unsafe fn current_gl() -> Current<Gl> { Current::new() }\n\/\/\/ The current gfx_graphics back-end\n#[cfg(feature = \"include_gfx\")]\npub unsafe fn current_g2d() -> Current<G2D> { Current::new() }\n\/\/\/ The current Gfx renderer\n#[cfg(feature = \"include_gfx\")]\npub unsafe fn current_renderer() -> Current<gfx::Renderer<gfx::GlCommandBuffer>> { Current::new() }\n\/\/\/ The current Gfx frame\n#[cfg(feature = \"include_gfx\")]\npub unsafe fn current_frame() -> Current<gfx::Frame> { Current::new() }\n\/\/\/ The current FPS counter\npub unsafe fn current_fps_counter() -> Current<FPSCounter> { Current::new() }\n\n\/\/\/ Returns an event iterator for the event loop\npub fn events() -> event::Events<Current<WindowBackEnd>> {\n unsafe {\n Events::new(current_window())\n }\n}\n\n\/\/\/ Updates the FPS counter and gets the frames per second.\npub fn fps_tick() -> uint {\n unsafe {\n current_fps_counter().tick()\n }\n}\n\n\/\/\/ Sets title of the current window.\npub fn set_title(text: String) {\n unsafe {\n current_window().set_mut(window::Title(text));\n }\n}\n\n\/\/\/ Returns true if the current window should be closed.\npub fn should_close() -> bool {\n use window::ShouldClose;\n\n unsafe {\n let ShouldClose(val) = current_window().get();\n val\n }\n}\n\n\/\/\/ Renders 2D graphics using Gfx.\n\/\/\/\n\/\/\/ ### DANGER\n\/\/\/\n\/\/\/ This function should not be called nested within the closure.\n\/\/\/ Doing so will lead to mutable aliased references to the graphics back-end.\n#[cfg(feature = \"include_gfx\")]\npub fn render_2d_gfx(\n _: current::DANGER,\n bg_color: Option<[f32, ..4]>, \n f: |graphics::Context, \n &mut gfx_graphics::GraphicsBackEnd<gfx::GlCommandBuffer>|\n) {\n use gfx::Device; \n\n unsafe {\n current_g2d().draw(\n &mut *current_renderer(),\n &*current_frame(), \n |c, g| {\n if let Some(bg_color) = bg_color {\n graphics::clear(bg_color, g);\n }\n f(c, g);\n });\n current_gfx_device().submit(current_renderer().as_buffer());\n current_renderer().reset();\n }\n}\n\n\/\/\/ Renders 2D graphics using OpenGL.\n\/\/\/\n\/\/\/ ### DANGER\n\/\/\/\n\/\/\/ This function should not be called nested within the closure.\n\/\/\/ Doing so will lead to mutable aliased references to the graphics back-end.\npub fn render_2d_opengl(\n _: current::DANGER,\n bg_color: Option<[f32, ..4]>,\n f: |graphics::Context,\n &mut opengl_graphics::Gl|\n) {\n unsafe {\n let gl = &mut *current_gl();\n let window::Size([w, h]) = current_window().get();\n gl.draw([0, 0, w as i32, h as i32], |c, g| {\n use graphics::*;\n if let Some(bg_color) = bg_color {\n graphics::clear(bg_color, g);\n }\n f(c, g);\n });\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>remove deprecated const<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Actually, we don't need RefCell<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>err fix<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test that generic bare functions can be bound<commit_after>fn# f<T>(i: T, j: T, k: T) {\n assert i == j;\n assert j != k;\n}\n\nfn main() {\n \/\/ Binding a bare function turns it into a shared closure\n let g: fn() = bind f(10, 10, 20);\n g();\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Revert \"Unit tests for key formatter\"<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Paiza programming<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Interfaces to the operating system provided random number\n\/\/! generators.\n\npub use self::imp::OsRng;\n\n#[cfg(all(unix, not(target_os = \"ios\")))]\nmod imp {\n use prelude::v1::*;\n use self::OsRngInner::*;\n\n use fs::File;\n use io;\n use libc;\n use mem;\n use rand::Rng;\n use rand::reader::ReaderRng;\n use sys::os::errno;\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\")))]\n fn getrandom(buf: &mut [u8]) -> libc::c_long {\n extern \"C\" {\n fn syscall(number: libc::c_long, ...) -> libc::c_long;\n }\n\n #[cfg(target_arch = \"x86_64\")]\n const NR_GETRANDOM: libc::c_long = 318;\n #[cfg(target_arch = \"x86\")]\n const NR_GETRANDOM: libc::c_long = 355;\n #[cfg(any(target_arch = \"arm\", target_arch = \"aarch64\"))]\n const NR_GETRANDOM: libc::c_long = 384;\n #[cfg(target_arch = \"powerpc\")]\n const NR_GETRANDOM: libc::c_long = 384;\n\n unsafe {\n syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), 0)\n }\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\"))))]\n fn getrandom(_buf: &mut [u8]) -> libc::c_long { -1 }\n\n fn getrandom_fill_bytes(v: &mut [u8]) {\n let mut read = 0;\n let len = v.len();\n while read < len {\n let result = getrandom(&mut v[read..]);\n if result == -1 {\n let err = errno() as libc::c_int;\n if err == libc::EINTR {\n continue;\n } else {\n panic!(\"unexpected getrandom error: {}\", err);\n }\n } else {\n read += result as usize;\n }\n }\n }\n\n fn getrandom_next_u32() -> u32 {\n let mut buf: [u8; 4] = [0; 4];\n getrandom_fill_bytes(&mut buf);\n unsafe { mem::transmute::<[u8; 4], u32>(buf) }\n }\n\n fn getrandom_next_u64() -> u64 {\n let mut buf: [u8; 8] = [0; 8];\n getrandom_fill_bytes(&mut buf);\n unsafe { mem::transmute::<[u8; 8], u64>(buf) }\n }\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\")))]\n fn is_getrandom_available() -> bool {\n use sync::atomic::{AtomicBool, Ordering};\n use sync::Once;\n\n static CHECKER: Once = Once::new();\n static AVAILABLE: AtomicBool = AtomicBool::new(false);\n\n CHECKER.call_once(|| {\n let mut buf: [u8; 0] = [];\n let result = getrandom(&mut buf);\n let available = if result == -1 {\n let err = io::Error::last_os_error().raw_os_error();\n err != Some(libc::ENOSYS)\n } else {\n true\n };\n AVAILABLE.store(available, Ordering::Relaxed);\n });\n\n AVAILABLE.load(Ordering::Relaxed)\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\"))))]\n fn is_getrandom_available() -> bool { false }\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n inner: OsRngInner,\n }\n\n enum OsRngInner {\n OsGetrandomRng,\n OsReaderRng(ReaderRng<File>),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n if is_getrandom_available() {\n return Ok(OsRng { inner: OsGetrandomRng });\n }\n\n let reader = try!(File::open(\"\/dev\/urandom\"));\n let reader_rng = ReaderRng::new(reader);\n\n Ok(OsRng { inner: OsReaderRng(reader_rng) })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n match self.inner {\n OsGetrandomRng => getrandom_next_u32(),\n OsReaderRng(ref mut rng) => rng.next_u32(),\n }\n }\n fn next_u64(&mut self) -> u64 {\n match self.inner {\n OsGetrandomRng => getrandom_next_u64(),\n OsReaderRng(ref mut rng) => rng.next_u64(),\n }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n match self.inner {\n OsGetrandomRng => getrandom_fill_bytes(v),\n OsReaderRng(ref mut rng) => rng.fill_bytes(v)\n }\n }\n }\n}\n\n#[cfg(target_os = \"ios\")]\nmod imp {\n use prelude::v1::*;\n\n use io;\n use mem;\n use rand::Rng;\n use libc::{c_int, size_t};\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n #[repr(C)]\n struct SecRandom;\n\n #[allow(non_upper_case_globals)]\n const kSecRandomDefault: *const SecRandom = 0 as *const SecRandom;\n\n #[link(name = \"Security\", kind = \"framework\")]\n extern \"C\" {\n fn SecRandomCopyBytes(rnd: *const SecRandom,\n count: size_t, bytes: *mut u8) -> c_int;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n let mut v = [0; 4];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn next_u64(&mut self) -> u64 {\n let mut v = [0; 8];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n SecRandomCopyBytes(kSecRandomDefault, v.len() as size_t,\n v.as_mut_ptr())\n };\n if ret == -1 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(windows)]\nmod imp {\n use prelude::v1::*;\n\n use io;\n use mem;\n use rand::Rng;\n use libc::types::os::arch::extra::{LONG_PTR};\n use libc::{DWORD, BYTE, LPCSTR, BOOL};\n\n type HCRYPTPROV = LONG_PTR;\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n hcryptprov: HCRYPTPROV\n }\n\n const PROV_RSA_FULL: DWORD = 1;\n const CRYPT_SILENT: DWORD = 64;\n const CRYPT_VERIFYCONTEXT: DWORD = 0xF0000000;\n\n #[allow(non_snake_case)]\n #[link(name = \"advapi32\")]\n extern \"system\" {\n fn CryptAcquireContextA(phProv: *mut HCRYPTPROV,\n pszContainer: LPCSTR,\n pszProvider: LPCSTR,\n dwProvType: DWORD,\n dwFlags: DWORD) -> BOOL;\n fn CryptGenRandom(hProv: HCRYPTPROV,\n dwLen: DWORD,\n pbBuffer: *mut BYTE) -> BOOL;\n fn CryptReleaseContext(hProv: HCRYPTPROV, dwFlags: DWORD) -> BOOL;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n let mut hcp = 0;\n let ret = unsafe {\n CryptAcquireContextA(&mut hcp, 0 as LPCSTR, 0 as LPCSTR,\n PROV_RSA_FULL,\n CRYPT_VERIFYCONTEXT | CRYPT_SILENT)\n };\n\n if ret == 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(OsRng { hcryptprov: hcp })\n }\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n let mut v = [0; 4];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn next_u64(&mut self) -> u64 {\n let mut v = [0; 8];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n CryptGenRandom(self.hcryptprov, v.len() as DWORD,\n v.as_mut_ptr())\n };\n if ret == 0 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n\n impl Drop for OsRng {\n fn drop(&mut self) {\n let ret = unsafe {\n CryptReleaseContext(self.hcryptprov, 0)\n };\n if ret == 0 {\n panic!(\"couldn't release context: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use prelude::v1::*;\n\n use sync::mpsc::channel;\n use rand::Rng;\n use super::OsRng;\n use thread;\n\n #[test]\n fn test_os_rng() {\n let mut r = OsRng::new().unwrap();\n\n r.next_u32();\n r.next_u64();\n\n let mut v = [0; 1000];\n r.fill_bytes(&mut v);\n }\n\n #[test]\n fn test_os_rng_tasks() {\n\n let mut txs = vec!();\n for _ in 0..20 {\n let (tx, rx) = channel();\n txs.push(tx);\n\n thread::spawn(move|| {\n \/\/ wait until all the threads are ready to go.\n rx.recv().unwrap();\n\n \/\/ deschedule to attempt to interleave things as much\n \/\/ as possible (XXX: is this a good test?)\n let mut r = OsRng::new().unwrap();\n thread::yield_now();\n let mut v = [0; 1000];\n\n for _ in 0..100 {\n r.next_u32();\n thread::yield_now();\n r.next_u64();\n thread::yield_now();\n r.fill_bytes(&mut v);\n thread::yield_now();\n }\n });\n }\n\n \/\/ start all the threads\n for tx in &txs {\n tx.send(()).unwrap();\n }\n }\n}\n<commit_msg>libstd\/rand\/os.rs: Remove a tiny bit of duplicated code<commit_after>\/\/ Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Interfaces to the operating system provided random number\n\/\/! generators.\n\npub use self::imp::OsRng;\n\n#[cfg(all(unix, not(target_os = \"ios\")))]\nmod imp {\n use prelude::v1::*;\n use self::OsRngInner::*;\n\n use fs::File;\n use io;\n use libc;\n use mem;\n use rand::Rng;\n use rand::reader::ReaderRng;\n use sys::os::errno;\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\")))]\n fn getrandom(buf: &mut [u8]) -> libc::c_long {\n extern \"C\" {\n fn syscall(number: libc::c_long, ...) -> libc::c_long;\n }\n\n #[cfg(target_arch = \"x86_64\")]\n const NR_GETRANDOM: libc::c_long = 318;\n #[cfg(target_arch = \"x86\")]\n const NR_GETRANDOM: libc::c_long = 355;\n #[cfg(any(target_arch = \"arm\", target_arch = \"aarch64\", target_arch = \"powerpc\"))]\n const NR_GETRANDOM: libc::c_long = 384;\n\n unsafe {\n syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), 0)\n }\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\"))))]\n fn getrandom(_buf: &mut [u8]) -> libc::c_long { -1 }\n\n fn getrandom_fill_bytes(v: &mut [u8]) {\n let mut read = 0;\n let len = v.len();\n while read < len {\n let result = getrandom(&mut v[read..]);\n if result == -1 {\n let err = errno() as libc::c_int;\n if err == libc::EINTR {\n continue;\n } else {\n panic!(\"unexpected getrandom error: {}\", err);\n }\n } else {\n read += result as usize;\n }\n }\n }\n\n fn getrandom_next_u32() -> u32 {\n let mut buf: [u8; 4] = [0; 4];\n getrandom_fill_bytes(&mut buf);\n unsafe { mem::transmute::<[u8; 4], u32>(buf) }\n }\n\n fn getrandom_next_u64() -> u64 {\n let mut buf: [u8; 8] = [0; 8];\n getrandom_fill_bytes(&mut buf);\n unsafe { mem::transmute::<[u8; 8], u64>(buf) }\n }\n\n #[cfg(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\")))]\n fn is_getrandom_available() -> bool {\n use sync::atomic::{AtomicBool, Ordering};\n use sync::Once;\n\n static CHECKER: Once = Once::new();\n static AVAILABLE: AtomicBool = AtomicBool::new(false);\n\n CHECKER.call_once(|| {\n let mut buf: [u8; 0] = [];\n let result = getrandom(&mut buf);\n let available = if result == -1 {\n let err = io::Error::last_os_error().raw_os_error();\n err != Some(libc::ENOSYS)\n } else {\n true\n };\n AVAILABLE.store(available, Ordering::Relaxed);\n });\n\n AVAILABLE.load(Ordering::Relaxed)\n }\n\n #[cfg(not(all(target_os = \"linux\",\n any(target_arch = \"x86_64\",\n target_arch = \"x86\",\n target_arch = \"arm\",\n target_arch = \"aarch64\",\n target_arch = \"powerpc\"))))]\n fn is_getrandom_available() -> bool { false }\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n inner: OsRngInner,\n }\n\n enum OsRngInner {\n OsGetrandomRng,\n OsReaderRng(ReaderRng<File>),\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n if is_getrandom_available() {\n return Ok(OsRng { inner: OsGetrandomRng });\n }\n\n let reader = try!(File::open(\"\/dev\/urandom\"));\n let reader_rng = ReaderRng::new(reader);\n\n Ok(OsRng { inner: OsReaderRng(reader_rng) })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n match self.inner {\n OsGetrandomRng => getrandom_next_u32(),\n OsReaderRng(ref mut rng) => rng.next_u32(),\n }\n }\n fn next_u64(&mut self) -> u64 {\n match self.inner {\n OsGetrandomRng => getrandom_next_u64(),\n OsReaderRng(ref mut rng) => rng.next_u64(),\n }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n match self.inner {\n OsGetrandomRng => getrandom_fill_bytes(v),\n OsReaderRng(ref mut rng) => rng.fill_bytes(v)\n }\n }\n }\n}\n\n#[cfg(target_os = \"ios\")]\nmod imp {\n use prelude::v1::*;\n\n use io;\n use mem;\n use rand::Rng;\n use libc::{c_int, size_t};\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n \/\/ dummy field to ensure that this struct cannot be constructed outside\n \/\/ of this module\n _dummy: (),\n }\n\n #[repr(C)]\n struct SecRandom;\n\n #[allow(non_upper_case_globals)]\n const kSecRandomDefault: *const SecRandom = 0 as *const SecRandom;\n\n #[link(name = \"Security\", kind = \"framework\")]\n extern \"C\" {\n fn SecRandomCopyBytes(rnd: *const SecRandom,\n count: size_t, bytes: *mut u8) -> c_int;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n Ok(OsRng { _dummy: () })\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n let mut v = [0; 4];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn next_u64(&mut self) -> u64 {\n let mut v = [0; 8];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n SecRandomCopyBytes(kSecRandomDefault, v.len() as size_t,\n v.as_mut_ptr())\n };\n if ret == -1 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(windows)]\nmod imp {\n use prelude::v1::*;\n\n use io;\n use mem;\n use rand::Rng;\n use libc::types::os::arch::extra::{LONG_PTR};\n use libc::{DWORD, BYTE, LPCSTR, BOOL};\n\n type HCRYPTPROV = LONG_PTR;\n\n \/\/\/ A random number generator that retrieves randomness straight from\n \/\/\/ the operating system. Platform sources:\n \/\/\/\n \/\/\/ - Unix-like systems (Linux, Android, Mac OSX): read directly from\n \/\/\/ `\/dev\/urandom`, or from `getrandom(2)` system call if available.\n \/\/\/ - Windows: calls `CryptGenRandom`, using the default cryptographic\n \/\/\/ service provider with the `PROV_RSA_FULL` type.\n \/\/\/ - iOS: calls SecRandomCopyBytes as \/dev\/(u)random is sandboxed.\n \/\/\/\n \/\/\/ This does not block.\n pub struct OsRng {\n hcryptprov: HCRYPTPROV\n }\n\n const PROV_RSA_FULL: DWORD = 1;\n const CRYPT_SILENT: DWORD = 64;\n const CRYPT_VERIFYCONTEXT: DWORD = 0xF0000000;\n\n #[allow(non_snake_case)]\n #[link(name = \"advapi32\")]\n extern \"system\" {\n fn CryptAcquireContextA(phProv: *mut HCRYPTPROV,\n pszContainer: LPCSTR,\n pszProvider: LPCSTR,\n dwProvType: DWORD,\n dwFlags: DWORD) -> BOOL;\n fn CryptGenRandom(hProv: HCRYPTPROV,\n dwLen: DWORD,\n pbBuffer: *mut BYTE) -> BOOL;\n fn CryptReleaseContext(hProv: HCRYPTPROV, dwFlags: DWORD) -> BOOL;\n }\n\n impl OsRng {\n \/\/\/ Create a new `OsRng`.\n pub fn new() -> io::Result<OsRng> {\n let mut hcp = 0;\n let ret = unsafe {\n CryptAcquireContextA(&mut hcp, 0 as LPCSTR, 0 as LPCSTR,\n PROV_RSA_FULL,\n CRYPT_VERIFYCONTEXT | CRYPT_SILENT)\n };\n\n if ret == 0 {\n Err(io::Error::last_os_error())\n } else {\n Ok(OsRng { hcryptprov: hcp })\n }\n }\n }\n\n impl Rng for OsRng {\n fn next_u32(&mut self) -> u32 {\n let mut v = [0; 4];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn next_u64(&mut self) -> u64 {\n let mut v = [0; 8];\n self.fill_bytes(&mut v);\n unsafe { mem::transmute(v) }\n }\n fn fill_bytes(&mut self, v: &mut [u8]) {\n let ret = unsafe {\n CryptGenRandom(self.hcryptprov, v.len() as DWORD,\n v.as_mut_ptr())\n };\n if ret == 0 {\n panic!(\"couldn't generate random bytes: {}\",\n io::Error::last_os_error());\n }\n }\n }\n\n impl Drop for OsRng {\n fn drop(&mut self) {\n let ret = unsafe {\n CryptReleaseContext(self.hcryptprov, 0)\n };\n if ret == 0 {\n panic!(\"couldn't release context: {}\",\n io::Error::last_os_error());\n }\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use prelude::v1::*;\n\n use sync::mpsc::channel;\n use rand::Rng;\n use super::OsRng;\n use thread;\n\n #[test]\n fn test_os_rng() {\n let mut r = OsRng::new().unwrap();\n\n r.next_u32();\n r.next_u64();\n\n let mut v = [0; 1000];\n r.fill_bytes(&mut v);\n }\n\n #[test]\n fn test_os_rng_tasks() {\n\n let mut txs = vec!();\n for _ in 0..20 {\n let (tx, rx) = channel();\n txs.push(tx);\n\n thread::spawn(move|| {\n \/\/ wait until all the threads are ready to go.\n rx.recv().unwrap();\n\n \/\/ deschedule to attempt to interleave things as much\n \/\/ as possible (XXX: is this a good test?)\n let mut r = OsRng::new().unwrap();\n thread::yield_now();\n let mut v = [0; 1000];\n\n for _ in 0..100 {\n r.next_u32();\n thread::yield_now();\n r.next_u64();\n thread::yield_now();\n r.fill_bytes(&mut v);\n thread::yield_now();\n }\n });\n }\n\n \/\/ start all the threads\n for tx in &txs {\n tx.send(()).unwrap();\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse build::{Location, ScopeAuxiliaryVec};\nuse rustc::mir::repr::*;\nuse rustc::ty::{self, TyCtxt};\nuse rustc_data_structures::fnv::FnvHashMap;\nuse std::fmt::Display;\nuse std::fs;\nuse std::io::{self, Write};\nuse syntax::ast::NodeId;\nuse syntax::codemap::Span;\n\nconst INDENT: &'static str = \" \";\n\n\/\/\/ If the session is properly configured, dumps a human-readable\n\/\/\/ representation of the mir into:\n\/\/\/\n\/\/\/ ```\n\/\/\/ rustc.node<node_id>.<pass_name>.<disambiguator>\n\/\/\/ ```\n\/\/\/\n\/\/\/ Output from this function is controlled by passing `-Z dump-mir=<filter>`,\n\/\/\/ where `<filter>` takes the following forms:\n\/\/\/\n\/\/\/ - `all` -- dump MIR for all fns, all passes, all everything\n\/\/\/ - `substring1&substring2,...` -- `&`-separated list of substrings\n\/\/\/ that can appear in the pass-name or the `item_path_str` for the given\n\/\/\/ node-id. If any one of the substrings match, the data is dumped out.\npub fn dump_mir<'a, 'tcx>(tcx: &TyCtxt<'tcx>,\n pass_name: &str,\n disambiguator: &Display,\n node_id: NodeId,\n mir: &Mir<'tcx>,\n auxiliary: Option<&ScopeAuxiliaryVec>) {\n let filters = match tcx.sess.opts.debugging_opts.dump_mir {\n None => return,\n Some(ref filters) => filters,\n };\n let node_path = tcx.item_path_str(tcx.map.local_def_id(node_id));\n let is_matched =\n filters.split(\"&\")\n .any(|filter| {\n filter == \"all\" ||\n pass_name.contains(filter) ||\n node_path.contains(filter)\n });\n if !is_matched {\n return;\n }\n\n let file_name = format!(\"rustc.node{}.{}.{}.mir\",\n node_id, pass_name, disambiguator);\n let _ = fs::File::create(&file_name).and_then(|mut file| {\n try!(writeln!(file, \"\/\/ MIR for `{}`\", node_path));\n try!(writeln!(file, \"\/\/ node_id = {}\", node_id));\n try!(writeln!(file, \"\/\/ pass_name = {}\", pass_name));\n try!(writeln!(file, \"\/\/ disambiguator = {}\", disambiguator));\n try!(writeln!(file, \"\"));\n try!(write_mir_fn(tcx, node_id, mir, &mut file, auxiliary));\n Ok(())\n });\n}\n\n\/\/\/ Write out a human-readable textual representation for the given MIR.\npub fn write_mir_pretty<'a, 'tcx, I>(tcx: &TyCtxt<'tcx>,\n iter: I,\n w: &mut Write)\n -> io::Result<()>\n where I: Iterator<Item=(&'a NodeId, &'a Mir<'tcx>)>, 'tcx: 'a\n{\n for (&node_id, mir) in iter {\n write_mir_fn(tcx, node_id, mir, w, None)?;\n }\n Ok(())\n}\n\nenum Annotation {\n EnterScope(ScopeId),\n ExitScope(ScopeId),\n}\n\npub fn write_mir_fn<'tcx>(tcx: &TyCtxt<'tcx>,\n node_id: NodeId,\n mir: &Mir<'tcx>,\n w: &mut Write,\n auxiliary: Option<&ScopeAuxiliaryVec>)\n -> io::Result<()> {\n \/\/ compute scope\/entry exit annotations\n let mut annotations = FnvHashMap();\n if let Some(auxiliary) = auxiliary {\n for (index, auxiliary) in auxiliary.vec.iter().enumerate() {\n let scope_id = ScopeId::new(index);\n\n annotations.entry(auxiliary.dom)\n .or_insert(vec![])\n .push(Annotation::EnterScope(scope_id));\n\n for &loc in &auxiliary.postdoms {\n annotations.entry(loc)\n .or_insert(vec![])\n .push(Annotation::ExitScope(scope_id));\n }\n }\n }\n\n write_mir_intro(tcx, node_id, mir, w)?;\n for block in mir.all_basic_blocks() {\n write_basic_block(tcx, block, mir, w, &annotations)?;\n }\n\n \/\/ construct a scope tree and write it out\n let mut scope_tree: FnvHashMap<Option<ScopeId>, Vec<ScopeId>> = FnvHashMap();\n for (index, scope_data) in mir.scopes.iter().enumerate() {\n scope_tree.entry(scope_data.parent_scope)\n .or_insert(vec![])\n .push(ScopeId::new(index));\n }\n write_scope_tree(tcx, mir, auxiliary, &scope_tree, w, None, 1)?;\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation for the given basic block.\nfn write_basic_block(tcx: &TyCtxt,\n block: BasicBlock,\n mir: &Mir,\n w: &mut Write,\n annotations: &FnvHashMap<Location, Vec<Annotation>>)\n -> io::Result<()> {\n let data = mir.basic_block_data(block);\n\n \/\/ Basic block label at the top.\n writeln!(w, \"\\n{}{:?}: {{\", INDENT, block)?;\n\n \/\/ List of statements in the middle.\n let mut current_location = Location { block: block, statement_index: 0 };\n for statement in &data.statements {\n if let Some(ref annotations) = annotations.get(¤t_location) {\n for annotation in annotations.iter() {\n match *annotation {\n Annotation::EnterScope(id) =>\n writeln!(w, \"{0}{0}\/\/ Enter Scope({1})\",\n INDENT, id.index())?,\n Annotation::ExitScope(id) =>\n writeln!(w, \"{0}{0}\/\/ Exit Scope({1})\",\n INDENT, id.index())?,\n }\n }\n }\n\n writeln!(w, \"{0}{0}{1:?}; \/\/ {2}\",\n INDENT,\n statement,\n comment(tcx, statement.scope, statement.span))?;\n\n current_location.statement_index += 1;\n }\n\n \/\/ Terminator at the bottom.\n writeln!(w, \"{0}{0}{1:?}; \/\/ {2}\",\n INDENT,\n data.terminator().kind,\n comment(tcx, data.terminator().scope, data.terminator().span))?;\n\n writeln!(w, \"{}}}\", INDENT)\n}\n\nfn comment(tcx: &TyCtxt,\n scope: ScopeId,\n span: Span)\n -> String {\n format!(\"Scope({}) at {}\", scope.index(), tcx.sess.codemap().span_to_string(span))\n}\n\nfn write_scope_tree(tcx: &TyCtxt,\n mir: &Mir,\n auxiliary: Option<&ScopeAuxiliaryVec>,\n scope_tree: &FnvHashMap<Option<ScopeId>, Vec<ScopeId>>,\n w: &mut Write,\n parent: Option<ScopeId>,\n depth: usize)\n -> io::Result<()> {\n for &child in scope_tree.get(&parent).unwrap_or(&vec![]) {\n let indent = depth * INDENT.len();\n let data = &mir.scopes[child];\n assert_eq!(data.parent_scope, parent);\n writeln!(w, \"{0:1$}Scope({2}) {{\", \"\", indent, child.index())?;\n\n let indent = indent + INDENT.len();\n if let Some(parent) = parent {\n writeln!(w, \"{0:1$}Parent: Scope({2})\", \"\", indent, parent.index())?;\n }\n\n if let Some(auxiliary) = auxiliary {\n let extent = auxiliary[child].extent;\n let data = tcx.region_maps.code_extent_data(extent);\n writeln!(w, \"{0:1$}Extent: {2:?}\", \"\", indent, data)?;\n }\n\n write_scope_tree(tcx, mir, auxiliary, scope_tree, w,\n Some(child), depth + 1)?;\n }\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation of the MIR's `fn` type and the types of its\n\/\/\/ local variables (both user-defined bindings and compiler temporaries).\nfn write_mir_intro(tcx: &TyCtxt, nid: NodeId, mir: &Mir, w: &mut Write)\n -> io::Result<()> {\n write!(w, \"fn {}(\", tcx.node_path_str(nid))?;\n\n \/\/ fn argument types.\n for (i, arg) in mir.arg_decls.iter().enumerate() {\n if i > 0 {\n write!(w, \", \")?;\n }\n write!(w, \"{:?}: {}\", Lvalue::Arg(i as u32), arg.ty)?;\n }\n\n write!(w, \") -> \")?;\n\n \/\/ fn return type.\n match mir.return_ty {\n ty::FnOutput::FnConverging(ty) => write!(w, \"{}\", ty)?,\n ty::FnOutput::FnDiverging => write!(w, \"!\")?,\n }\n\n writeln!(w, \" {{\")?;\n\n \/\/ User variable types (including the user's name in a comment).\n for (i, var) in mir.var_decls.iter().enumerate() {\n write!(w, \"{}let \", INDENT)?;\n if var.mutability == Mutability::Mut {\n write!(w, \"mut \")?;\n }\n writeln!(w, \"{:?}: {}; \/\/ {}\", Lvalue::Var(i as u32), var.ty, var.name)?;\n }\n\n \/\/ Compiler-introduced temporary types.\n for (i, temp) in mir.temp_decls.iter().enumerate() {\n writeln!(w, \"{}let mut {:?}: {};\", INDENT, Lvalue::Temp(i as u32), temp.ty)?;\n }\n\n Ok(())\n}\n<commit_msg>mir: print the scope and span for variables.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse build::{Location, ScopeAuxiliaryVec};\nuse rustc::mir::repr::*;\nuse rustc::ty::{self, TyCtxt};\nuse rustc_data_structures::fnv::FnvHashMap;\nuse std::fmt::Display;\nuse std::fs;\nuse std::io::{self, Write};\nuse syntax::ast::NodeId;\nuse syntax::codemap::Span;\n\nconst INDENT: &'static str = \" \";\n\n\/\/\/ If the session is properly configured, dumps a human-readable\n\/\/\/ representation of the mir into:\n\/\/\/\n\/\/\/ ```\n\/\/\/ rustc.node<node_id>.<pass_name>.<disambiguator>\n\/\/\/ ```\n\/\/\/\n\/\/\/ Output from this function is controlled by passing `-Z dump-mir=<filter>`,\n\/\/\/ where `<filter>` takes the following forms:\n\/\/\/\n\/\/\/ - `all` -- dump MIR for all fns, all passes, all everything\n\/\/\/ - `substring1&substring2,...` -- `&`-separated list of substrings\n\/\/\/ that can appear in the pass-name or the `item_path_str` for the given\n\/\/\/ node-id. If any one of the substrings match, the data is dumped out.\npub fn dump_mir<'a, 'tcx>(tcx: &TyCtxt<'tcx>,\n pass_name: &str,\n disambiguator: &Display,\n node_id: NodeId,\n mir: &Mir<'tcx>,\n auxiliary: Option<&ScopeAuxiliaryVec>) {\n let filters = match tcx.sess.opts.debugging_opts.dump_mir {\n None => return,\n Some(ref filters) => filters,\n };\n let node_path = tcx.item_path_str(tcx.map.local_def_id(node_id));\n let is_matched =\n filters.split(\"&\")\n .any(|filter| {\n filter == \"all\" ||\n pass_name.contains(filter) ||\n node_path.contains(filter)\n });\n if !is_matched {\n return;\n }\n\n let file_name = format!(\"rustc.node{}.{}.{}.mir\",\n node_id, pass_name, disambiguator);\n let _ = fs::File::create(&file_name).and_then(|mut file| {\n try!(writeln!(file, \"\/\/ MIR for `{}`\", node_path));\n try!(writeln!(file, \"\/\/ node_id = {}\", node_id));\n try!(writeln!(file, \"\/\/ pass_name = {}\", pass_name));\n try!(writeln!(file, \"\/\/ disambiguator = {}\", disambiguator));\n try!(writeln!(file, \"\"));\n try!(write_mir_fn(tcx, node_id, mir, &mut file, auxiliary));\n Ok(())\n });\n}\n\n\/\/\/ Write out a human-readable textual representation for the given MIR.\npub fn write_mir_pretty<'a, 'tcx, I>(tcx: &TyCtxt<'tcx>,\n iter: I,\n w: &mut Write)\n -> io::Result<()>\n where I: Iterator<Item=(&'a NodeId, &'a Mir<'tcx>)>, 'tcx: 'a\n{\n for (&node_id, mir) in iter {\n write_mir_fn(tcx, node_id, mir, w, None)?;\n }\n Ok(())\n}\n\nenum Annotation {\n EnterScope(ScopeId),\n ExitScope(ScopeId),\n}\n\npub fn write_mir_fn<'tcx>(tcx: &TyCtxt<'tcx>,\n node_id: NodeId,\n mir: &Mir<'tcx>,\n w: &mut Write,\n auxiliary: Option<&ScopeAuxiliaryVec>)\n -> io::Result<()> {\n \/\/ compute scope\/entry exit annotations\n let mut annotations = FnvHashMap();\n if let Some(auxiliary) = auxiliary {\n for (index, auxiliary) in auxiliary.vec.iter().enumerate() {\n let scope_id = ScopeId::new(index);\n\n annotations.entry(auxiliary.dom)\n .or_insert(vec![])\n .push(Annotation::EnterScope(scope_id));\n\n for &loc in &auxiliary.postdoms {\n annotations.entry(loc)\n .or_insert(vec![])\n .push(Annotation::ExitScope(scope_id));\n }\n }\n }\n\n write_mir_intro(tcx, node_id, mir, w)?;\n for block in mir.all_basic_blocks() {\n write_basic_block(tcx, block, mir, w, &annotations)?;\n }\n\n \/\/ construct a scope tree and write it out\n let mut scope_tree: FnvHashMap<Option<ScopeId>, Vec<ScopeId>> = FnvHashMap();\n for (index, scope_data) in mir.scopes.iter().enumerate() {\n scope_tree.entry(scope_data.parent_scope)\n .or_insert(vec![])\n .push(ScopeId::new(index));\n }\n write_scope_tree(tcx, mir, auxiliary, &scope_tree, w, None, 1)?;\n\n writeln!(w, \"}}\")?;\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation for the given basic block.\nfn write_basic_block(tcx: &TyCtxt,\n block: BasicBlock,\n mir: &Mir,\n w: &mut Write,\n annotations: &FnvHashMap<Location, Vec<Annotation>>)\n -> io::Result<()> {\n let data = mir.basic_block_data(block);\n\n \/\/ Basic block label at the top.\n writeln!(w, \"\\n{}{:?}: {{\", INDENT, block)?;\n\n \/\/ List of statements in the middle.\n let mut current_location = Location { block: block, statement_index: 0 };\n for statement in &data.statements {\n if let Some(ref annotations) = annotations.get(¤t_location) {\n for annotation in annotations.iter() {\n match *annotation {\n Annotation::EnterScope(id) =>\n writeln!(w, \"{0}{0}\/\/ Enter Scope({1})\",\n INDENT, id.index())?,\n Annotation::ExitScope(id) =>\n writeln!(w, \"{0}{0}\/\/ Exit Scope({1})\",\n INDENT, id.index())?,\n }\n }\n }\n\n writeln!(w, \"{0}{0}{1:?}; \/\/ {2}\",\n INDENT,\n statement,\n comment(tcx, statement.scope, statement.span))?;\n\n current_location.statement_index += 1;\n }\n\n \/\/ Terminator at the bottom.\n writeln!(w, \"{0}{0}{1:?}; \/\/ {2}\",\n INDENT,\n data.terminator().kind,\n comment(tcx, data.terminator().scope, data.terminator().span))?;\n\n writeln!(w, \"{}}}\", INDENT)\n}\n\nfn comment(tcx: &TyCtxt,\n scope: ScopeId,\n span: Span)\n -> String {\n format!(\"Scope({}) at {}\", scope.index(), tcx.sess.codemap().span_to_string(span))\n}\n\nfn write_scope_tree(tcx: &TyCtxt,\n mir: &Mir,\n auxiliary: Option<&ScopeAuxiliaryVec>,\n scope_tree: &FnvHashMap<Option<ScopeId>, Vec<ScopeId>>,\n w: &mut Write,\n parent: Option<ScopeId>,\n depth: usize)\n -> io::Result<()> {\n for &child in scope_tree.get(&parent).unwrap_or(&vec![]) {\n let indent = depth * INDENT.len();\n let data = &mir.scopes[child];\n assert_eq!(data.parent_scope, parent);\n writeln!(w, \"{0:1$}Scope({2}) {{\", \"\", indent, child.index())?;\n\n let indent = indent + INDENT.len();\n if let Some(parent) = parent {\n writeln!(w, \"{0:1$}Parent: Scope({2})\", \"\", indent, parent.index())?;\n }\n\n if let Some(auxiliary) = auxiliary {\n let extent = auxiliary[child].extent;\n let data = tcx.region_maps.code_extent_data(extent);\n writeln!(w, \"{0:1$}Extent: {2:?}\", \"\", indent, data)?;\n }\n\n write_scope_tree(tcx, mir, auxiliary, scope_tree, w,\n Some(child), depth + 1)?;\n }\n Ok(())\n}\n\n\/\/\/ Write out a human-readable textual representation of the MIR's `fn` type and the types of its\n\/\/\/ local variables (both user-defined bindings and compiler temporaries).\nfn write_mir_intro(tcx: &TyCtxt, nid: NodeId, mir: &Mir, w: &mut Write)\n -> io::Result<()> {\n write!(w, \"fn {}(\", tcx.node_path_str(nid))?;\n\n \/\/ fn argument types.\n for (i, arg) in mir.arg_decls.iter().enumerate() {\n if i > 0 {\n write!(w, \", \")?;\n }\n write!(w, \"{:?}: {}\", Lvalue::Arg(i as u32), arg.ty)?;\n }\n\n write!(w, \") -> \")?;\n\n \/\/ fn return type.\n match mir.return_ty {\n ty::FnOutput::FnConverging(ty) => write!(w, \"{}\", ty)?,\n ty::FnOutput::FnDiverging => write!(w, \"!\")?,\n }\n\n writeln!(w, \" {{\")?;\n\n \/\/ User variable types (including the user's name in a comment).\n for (i, var) in mir.var_decls.iter().enumerate() {\n write!(w, \"{}let \", INDENT)?;\n if var.mutability == Mutability::Mut {\n write!(w, \"mut \")?;\n }\n writeln!(w, \"{:?}: {}; \/\/ {} in {}\",\n Lvalue::Var(i as u32),\n var.ty,\n var.name,\n comment(tcx, var.scope, var.span))?;\n }\n\n \/\/ Compiler-introduced temporary types.\n for (i, temp) in mir.temp_decls.iter().enumerate() {\n writeln!(w, \"{}let mut {:?}: {};\", INDENT, Lvalue::Temp(i as u32), temp.ty)?;\n }\n\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Serialize trait for USB module to make serializing simpler<commit_after>use core::mem::{transmute, size_of};\nuse core::intrinsics::copy_nonoverlapping;\n\npub unsafe trait Serialize: Sized {\n fn serialize(&self, buffer: &mut [u8]) -> usize {\n let length = if buffer.len() < size_of::<Self>() {\n buffer.len()\n } else {\n size_of::<Self>()\n };\n\n unsafe {\n copy_nonoverlapping(transmute(self), buffer.as_mut_ptr(), length);\n }\n length\n }\n}\n\nunsafe impl Serialize for u8 {}\nunsafe impl Serialize for u16 {}\nunsafe impl Serialize for u32 {}\nunsafe impl Serialize for u64 {}\nunsafe impl Serialize for usize {}\nunsafe impl Serialize for i8 {}\nunsafe impl Serialize for i16 {}\nunsafe impl Serialize for i32 {}\nunsafe impl Serialize for i64 {}\nunsafe impl Serialize for isize {}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>continued iterator_provider cleanup<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>x async<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add Store type for store<commit_after>use std::path::PathBuf;\nuse std::result::Result as RResult;\n\npub use entry::Entry;\npub use error::StoreError;\n\npub type Result<T> = RResult<T, StoreError>;\n\npub struct Store {\n location: PathBuf,\n}\n\nimpl Store {\n\n pub fn create(entry: Entry) -> Result<()> {\n unimplemented!()\n }\n\n pub fn read(path: PathBuf) -> Result<Entry> {\n unimplemented!()\n }\n\n pub fn update(entry: Entry) -> Result<()> {\n unimplemented!()\n }\n\n pub fn delete(path: PathBuf) -> Result<()> {\n unimplemented!()\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add a test for fixed issue #11844<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn main() {\n let a = Some(box 1);\n match a {\n Ok(a) => \/\/~ ERROR: mismatched types\n println!(\"{}\",a), \/\/~ ERROR: failed to find an implementation of trait\n None => fail!()\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>regression test for #37665<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z unstable-options --unpretty=mir\n\nuse std::path::MAIN_SEPARATOR;\n\nfn main() {\n let mut foo : String = \"hello\".to_string();\n foo.push(MAIN_SEPARATOR);\n println!(\"{}\", foo);\n let x: () = 0; \/\/~ ERROR: mismatched types\n}\n<|endoftext|>"} {"text":"<commit_before>use au = gfx::geometry;\nuse core::dlist::DList;\nuse css::values::{BoxAuto, BoxLength, Px};\nuse dl = gfx::display_list;\nuse dom::rcu;\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse gfx::geometry::au;\nuse layout::box::{RenderBox, RenderBoxTree, ImageBox, TextBox, GenericBox};\nuse layout::flow::{FlowContext, InlineFlow};\nuse layout::context::LayoutContext;\nuse num::Num;\nuse util::tree;\n\n\/*\nTentative design: (may not line up with reality)\n\nLineboxes are represented as offsets into the child list, rather than\nas an object that \"owns\" boxes. Choosing a different set of line\nbreaks requires a new list of offsets, and possibly some splitting and\nmerging of TextBoxes.\n\nA similar list will keep track of the mapping between CSS boxes and\nthe corresponding render boxes in the inline flow.\n\nAfter line breaks are determined, lender boxes in the inline flow may\noverlap visually. For example, in the case of nested inline CSS boxes,\nouter inlines must be at least as large as the inner inlines, for\npurposes of drawing noninherited things like backgrounds, borders,\noutlines.\n\nN.B. roc has an alternative design where the list instead consists of\nthings like \"start outer box, text, start inner box, text, end inner\nbox, text, end outer box, text\". This seems a little complicated to\nserve as the starting point, but the current design doesn't make it\nhard to try out that alternative.\n*\/\n\nstruct InlineFlowData {\n boxes: ~DList<@RenderBox>\n}\n\nfn InlineFlowData() -> InlineFlowData {\n InlineFlowData {\n boxes: ~DList()\n }\n}\n\ntrait InlineLayout {\n pure fn starts_inline_flow() -> bool;\n\n fn bubble_widths_inline(ctx: &LayoutContext);\n fn assign_widths_inline(ctx: &LayoutContext);\n fn assign_height_inline(ctx: &LayoutContext);\n fn build_display_list_inline(a: &dl::DisplayListBuilder, b: &Rect<au>, c: &Point2D<au>, d: &dl::DisplayList);\n}\n\nimpl FlowContext : InlineLayout {\n pure fn starts_inline_flow() -> bool { match self { InlineFlow(*) => true, _ => false } }\n\n fn bubble_widths_inline(_ctx: &LayoutContext) {\n assert self.starts_inline_flow();\n\n let mut min_width = au(0);\n let mut pref_width = au(0);\n\n for self.inline().boxes.each |box| {\n min_width = au::max(min_width, box.get_min_width());\n pref_width = au::max(pref_width, box.get_pref_width());\n }\n\n self.d().min_width = min_width;\n self.d().pref_width = pref_width;\n }\n\n \/* Recursively (top-down) determines the actual width of child\n contexts and boxes. When called on this context, the context has\n had its width set by the parent context. *\/\n fn assign_widths_inline(ctx: &LayoutContext) {\n assert self.starts_inline_flow();\n\n \/* Perform inline flow with the available width. *\/\n \/\/let avail_width = self.d().position.size.width;\n\n let line_height = au::from_px(20);\n \/\/let mut cur_x = au(0);\n let mut cur_y = au(0);\n \n for self.inline().boxes.each |box| {\n \/* TODO: actually do inline flow.\n - Create a working linebox, and successively put boxes\n into it, splitting if necessary.\n \n - Set width and height for each positioned element based on \n where its chunks ended up.\n\n - Save the dvec of this context's lineboxes. *\/\n\n \/* hack: until text box splitting is hoisted into this\n function, force \"reflow\" on TextBoxes. *\/\n match *box {\n @TextBox(*) => box.reflow_text(ctx),\n _ => {}\n }\n\n box.d().position.size.width = match *box {\n @ImageBox(_,img) => au::from_px(img.get_size().get_default(Size2D(0,0)).width),\n @TextBox(_,d) => d.runs[0].size().width,\n \/\/ TODO: this should be set to the extents of its children\n @GenericBox(*) => au(0)\n };\n\n box.d().position.size.height = match *box {\n @ImageBox(_,img) => au::from_px(img.get_size().get_default(Size2D(0,0)).height),\n @TextBox(_,d) => d.runs[0].size().height,\n \/\/ TODO: this should be set to the extents of its children\n @GenericBox(*) => au(0)\n };\n\n box.d().position.origin = Point2D(au(0), cur_y);\n cur_y = cur_y.add(au::max(line_height, box.d().position.size.height));\n } \/\/ for boxes.each |box|\n\n self.d().position.size.height = cur_y;\n \n \/* There are no child contexts, so stop here. *\/\n\n \/\/ TODO: once there are 'inline-block' elements, this won't be\n \/\/ true. In that case, perform inline flow, and then set the\n \/\/ block flow context's width as the width of the\n \/\/ 'inline-block' box that created this flow.\n }\n\n fn assign_height_inline(_ctx: &LayoutContext) {\n \/\/ Don't need to set box or ctx heights, since that is done\n \/\/ during inline flowing.\n }\n\n fn build_display_list_inline(builder: &dl::DisplayListBuilder, dirty: &Rect<au>, \n offset: &Point2D<au>, list: &dl::DisplayList) {\n\n assert self.starts_inline_flow();\n\n \/\/ TODO: if the CSS box introducing this inline context is *not* anonymous,\n \/\/ we need to draw it too, in a way similar to BlowFlowContext\n\n \/\/ TODO: once we form line boxes and have their cached bounds, we can be \n \/\/ smarter and not recurse on a line if nothing in it can intersect dirty\n for self.inline().boxes.each |box| {\n box.build_display_list(builder, dirty, offset, list)\n }\n\n \/\/ TODO: should inline-block elements have flows as children\n \/\/ of the inline flow, or should the flow be nested inside the\n \/\/ box somehow? Maybe it's best to unify flows and boxes into\n \/\/ the same enum, so inline-block flows are normal\n \/\/ (indivisible) children in the inline flow child list.\n }\n\n} \/\/ @FlowContext : InlineLayout\n<commit_msg>More inline flow struct refactor<commit_after>use au = gfx::geometry;\nuse core::dlist::DList;\nuse css::values::{BoxAuto, BoxLength, Px};\nuse dl = gfx::display_list;\nuse dom::rcu;\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse gfx::geometry::au;\nuse layout::box::{RenderBox, RenderBoxTree, ImageBox, TextBox, GenericBox};\nuse layout::flow::{FlowContext, InlineFlow};\nuse layout::context::LayoutContext;\nuse num::Num;\nuse util::tree;\n\n\/*\nTentative design: (may not line up with reality)\n\nLineboxes are represented as offsets into the child list, rather than\nas an object that \"owns\" boxes. Choosing a different set of line\nbreaks requires a new list of offsets, and possibly some splitting and\nmerging of TextBoxes.\n\nA similar list will keep track of the mapping between CSS boxes and\nthe corresponding render boxes in the inline flow.\n\nAfter line breaks are determined, render boxes in the inline flow may\noverlap visually. For example, in the case of nested inline CSS boxes,\nouter inlines must be at least as large as the inner inlines, for\npurposes of drawing noninherited things like backgrounds, borders,\noutlines.\n\nN.B. roc has an alternative design where the list instead consists of\nthings like \"start outer box, text, start inner box, text, end inner\nbox, text, end outer box, text\". This seems a little complicated to\nserve as the starting point, but the current design doesn't make it\nhard to try out that alternative.\n*\/\n\ntype BoxRange = {start: u8, len: u8};\n\nstruct InlineFlowData {\n \/\/ A flat list of all inline render boxes. Several boxes may\n \/\/ correspond to one Node\/Element.\n boxes: DList<@RenderBox>,\n \/\/ vec of ranges into boxes that represents line positions.\n \/\/ these ranges are disjoint, and are the result of inline layout.\n lines: DVec<BoxRange>,\n \/\/ vec of ranges into boxes that represent elements. These\n \/\/ ranges must be disjoint or well-nested, and are only related to\n \/\/ the content of boxes (not lines)\n elems: DVec<BoxRange>\n}\n\nfn InlineFlowData() -> InlineFlowData {\n InlineFlowData {\n boxes: DList(),\n lines: DVec(),\n elems: DVec()\n }\n}\n\ntrait InlineLayout {\n pure fn starts_inline_flow() -> bool;\n\n fn bubble_widths_inline(ctx: &LayoutContext);\n fn assign_widths_inline(ctx: &LayoutContext);\n fn assign_height_inline(ctx: &LayoutContext);\n fn build_display_list_inline(a: &dl::DisplayListBuilder, b: &Rect<au>, c: &Point2D<au>, d: &dl::DisplayList);\n}\n\nimpl FlowContext : InlineLayout {\n pure fn starts_inline_flow() -> bool { match self { InlineFlow(*) => true, _ => false } }\n\n fn bubble_widths_inline(_ctx: &LayoutContext) {\n assert self.starts_inline_flow();\n\n let mut min_width = au(0);\n let mut pref_width = au(0);\n\n for self.inline().boxes.each |box| {\n min_width = au::max(min_width, box.get_min_width());\n pref_width = au::max(pref_width, box.get_pref_width());\n }\n\n self.d().min_width = min_width;\n self.d().pref_width = pref_width;\n }\n\n \/* Recursively (top-down) determines the actual width of child\n contexts and boxes. When called on this context, the context has\n had its width set by the parent context. *\/\n fn assign_widths_inline(ctx: &LayoutContext) {\n assert self.starts_inline_flow();\n\n \/* Perform inline flow with the available width. *\/\n \/\/let avail_width = self.d().position.size.width;\n\n let line_height = au::from_px(20);\n \/\/let mut cur_x = au(0);\n let mut cur_y = au(0);\n \n for self.inline().boxes.each |box| {\n \/* TODO: actually do inline flow.\n - Create a working linebox, and successively put boxes\n into it, splitting if necessary.\n \n - Set width and height for each positioned element based on \n where its chunks ended up.\n\n - Save the dvec of this context's lineboxes. *\/\n\n \/* hack: until text box splitting is hoisted into this\n function, force \"reflow\" on TextBoxes. *\/\n match *box {\n @TextBox(*) => box.reflow_text(ctx),\n _ => {}\n }\n\n box.d().position.size.width = match *box {\n @ImageBox(_,img) => au::from_px(img.get_size().get_default(Size2D(0,0)).width),\n @TextBox(_,d) => d.runs[0].size().width,\n \/\/ TODO: this should be set to the extents of its children\n @GenericBox(*) => au(0)\n };\n\n box.d().position.size.height = match *box {\n @ImageBox(_,img) => au::from_px(img.get_size().get_default(Size2D(0,0)).height),\n @TextBox(_,d) => d.runs[0].size().height,\n \/\/ TODO: this should be set to the extents of its children\n @GenericBox(*) => au(0)\n };\n\n box.d().position.origin = Point2D(au(0), cur_y);\n cur_y = cur_y.add(au::max(line_height, box.d().position.size.height));\n } \/\/ for boxes.each |box|\n\n self.d().position.size.height = cur_y;\n \n \/* There are no child contexts, so stop here. *\/\n\n \/\/ TODO: once there are 'inline-block' elements, this won't be\n \/\/ true. In that case, perform inline flow, and then set the\n \/\/ block flow context's width as the width of the\n \/\/ 'inline-block' box that created this flow.\n }\n\n fn assign_height_inline(_ctx: &LayoutContext) {\n \/\/ Don't need to set box or ctx heights, since that is done\n \/\/ during inline flowing.\n }\n\n fn build_display_list_inline(builder: &dl::DisplayListBuilder, dirty: &Rect<au>, \n offset: &Point2D<au>, list: &dl::DisplayList) {\n\n assert self.starts_inline_flow();\n\n \/\/ TODO: if the CSS box introducing this inline context is *not* anonymous,\n \/\/ we need to draw it too, in a way similar to BlowFlowContext\n\n \/\/ TODO: once we form line boxes and have their cached bounds, we can be \n \/\/ smarter and not recurse on a line if nothing in it can intersect dirty\n for self.inline().boxes.each |box| {\n box.build_display_list(builder, dirty, offset, list)\n }\n\n \/\/ TODO: should inline-block elements have flows as children\n \/\/ of the inline flow, or should the flow be nested inside the\n \/\/ box somehow? Maybe it's best to unify flows and boxes into\n \/\/ the same enum, so inline-block flows are normal\n \/\/ (indivisible) children in the inline flow child list.\n }\n\n} \/\/ @FlowContext : InlineLayout\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make Address public<commit_after><|endoftext|>"} {"text":"<commit_before>use std::collections::HashMap;\nuse std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse serde_json::{Value, from_str};\nuse serde_json::error::Result as R;\nuse serde_json::Serializer;\nuse serde::ser::Serialize;\nuse serde::ser::Serializer as Ser;\n\nuse storage::parser::{FileHeaderParser, ParserError};\nuse storage::file::header::spec::FileHeaderSpec;\nuse storage::file::header::data::FileHeaderData;\n\npub struct JsonHeaderParser {\n spec: Option<FileHeaderSpec>,\n}\n\nimpl JsonHeaderParser {\n\n pub fn new(spec: Option<FileHeaderSpec>) -> JsonHeaderParser {\n JsonHeaderParser {\n spec: spec\n }\n }\n\n}\n\nimpl Display for JsonHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"JsonHeaderParser\"));\n Ok(())\n }\n\n}\n\nimpl Debug for JsonHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"JsonHeaderParser, Spec: {:?}\", self.spec));\n Ok(())\n }\n\n}\n\nimpl FileHeaderParser for JsonHeaderParser {\n\n fn read(&self, string: Option<String>)\n -> Result<FileHeaderData, ParserError>\n {\n if string.is_some() {\n let s = string.unwrap();\n debug!(\"Deserializing: {}\", s);\n let fromstr : R<Value> = from_str(&s[..]);\n if let Ok(ref content) = fromstr {\n return Ok(visit_json(&content))\n }\n let oe = fromstr.err().unwrap();\n let s = format!(\"JSON parser error: {}\", oe.description());\n let e = ParserError::short(&s[..], s.clone(), 0);\n Err(e)\n } else {\n Ok(FileHeaderData::Null)\n }\n }\n\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {\n let mut s = Vec::<u8>::new();\n {\n let mut ser = Serializer::pretty(&mut s);\n data.serialize(&mut ser).map_err(|e| {\n debug!(\"Serializer error: {:?}\", e);\n }).ok();\n }\n\n String::from_utf8(s).or(\n Err(ParserError::short(\"Cannot parse utf8 bytes\",\n String::from(\"<not printable>\"),\n 0)))\n }\n\n}\n\n\/\/ TODO: This function must be able to return a parser error\nfn visit_json(v: &Value) -> FileHeaderData {\n match v {\n &Value::Null => FileHeaderData::Null,\n &Value::Bool(b) => FileHeaderData::Bool(b),\n &Value::I64(i) => FileHeaderData::Integer(i),\n &Value::U64(u) => FileHeaderData::UInteger(u),\n &Value::F64(f) => FileHeaderData::Float(f),\n &Value::String(ref s) => FileHeaderData::Text(s.clone()),\n &Value::Array(ref vec) => {\n FileHeaderData::Array {\n values: Box::new(vec.clone().into_iter().map(|i| visit_json(&i)).collect())\n }\n },\n &Value::Object(ref btree) => {\n let btree = btree.clone();\n FileHeaderData::Map{\n keys: btree.into_iter().map(|(k, v)|\n FileHeaderData::Key {\n name: k,\n value: Box::new(visit_json(&v)),\n }\n ).collect()\n }\n }\n }\n}\n\nimpl Serialize for FileHeaderData {\n\n fn serialize<S>(&self, ser: &mut S) -> Result<(), S::Error>\n where S: Ser\n {\n match self {\n &FileHeaderData::Null => {\n let o : Option<bool> = None;\n o.serialize(ser)\n },\n &FileHeaderData::Bool(ref b) => b.serialize(ser),\n &FileHeaderData::Integer(ref i) => i.serialize(ser),\n &FileHeaderData::UInteger(ref u) => u.serialize(ser),\n &FileHeaderData::Float(ref f) => f.serialize(ser),\n &FileHeaderData::Text(ref s) => (&s[..]).serialize(ser),\n &FileHeaderData::Array{values: ref vs} => vs.serialize(ser),\n &FileHeaderData::Map{keys: ref ks} => {\n let mut hm = HashMap::new();\n\n for key in ks {\n if let &FileHeaderData::Key{name: ref n, value: ref v} = key {\n hm.insert(n, v);\n } else {\n panic!(\"Not a key: {:?}\", key);\n }\n }\n\n hm.serialize(ser)\n },\n &FileHeaderData::Key{name: _, value: _} => unreachable!(),\n\n }\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use std::ops::Deref;\n\n use super::JsonHeaderParser;\n use storage::parser::FileHeaderParser;\n use storage::file::header::data::FileHeaderData as FHD;\n use storage::file::header::spec::FileHeaderSpec as FHS;\n\n #[test]\n fn test_deserialization() {\n let text = String::from(\"{\\\"a\\\": 1, \\\"b\\\": -2}\");\n let spec = FHS::Map {\n keys: vec![\n FHS::Key {\n name: String::from(\"a\"),\n value_type: Box::new(FHS::UInteger)\n },\n FHS::Key {\n name: String::from(\"b\"),\n value_type: Box::new(FHS::Integer)\n }\n ]\n };\n\n let parser = JsonHeaderParser::new(Some(spec));\n let parsed = parser.read(Some(text));\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{keys}) => {\n for k in keys {\n match k {\n FHD::Key{name, value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::UInteger(u) => assert_eq!(u, 1),\n &FHD::Integer(i) => assert_eq!(i, -2),\n _ => assert!(false, \"Integers are not here\"),\n }\n },\n _ => assert!(false, \"Key is not a Key\"),\n }\n }\n },\n\n _ => assert!(false, \"Parsed is not a map\"),\n }\n }\n\n #[test]\n fn test_deserialization_without_spec() {\n let text = String::from(\"{\\\"a\\\": [1], \\\"b\\\": {\\\"c\\\": -2}}\");\n let parser = JsonHeaderParser::new(None);\n let parsed = parser.read(Some(text));\n\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{keys}) => {\n for k in keys {\n match_key(&k);\n }\n },\n\n _ => assert!(false, \"Parsed is not a map\"),\n }\n }\n\n fn match_key(k: &FHD) {\n use std::ops::Deref;\n\n match k {\n &FHD::Key{ref name, ref value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::Array{values: ref vs} => {\n for value in vs.iter() {\n match value {\n &FHD::UInteger(u) => assert_eq!(u, 1),\n _ => assert!(false, \"UInt is not an UInt\"),\n }\n }\n }\n\n &FHD::Map{keys: ref ks} => {\n for key in ks.iter() {\n match key {\n &FHD::Key{name: ref name, value: ref value} => {\n match value.deref() {\n &FHD::Integer(i) => {\n assert_eq!(i, -2);\n assert_eq!(name, \"c\");\n },\n _ => assert!(false, \"Int is not an Int\"),\n };\n },\n _ => assert!(false, \"Key is not a Key\"),\n }\n }\n }\n _ => assert!(false, \"Integers are not here\"),\n }\n },\n _ => assert!(false, \"Key in main Map is not a Key\"),\n }\n }\n\n #[test]\n fn test_desser() {\n use serde_json::error::Result as R;\n use serde_json::{Value, from_str};\n\n let text = String::from(\"{\\\"a\\\": [1], \\\"b\\\": {\\\"c\\\": -2}}\");\n let parser = JsonHeaderParser::new(None);\n\n let des = parser.read(Some(text.clone()));\n assert!(des.is_ok(), \"Deserializing failed\");\n\n let ser = parser.write(&des.unwrap());\n assert!(ser.is_ok(), \"Parser error when serializing deserialized text\");\n\n let json_text : R<Value> = from_str(&text[..]);\n let json_ser : R<Value> = from_str(&ser.unwrap()[..]);\n\n assert!(json_text.is_ok(), \"Could not use serde to serialize text for comparison\");\n assert!(json_ser.is_ok(), \"Could not use serde to serialize serialized-deserialized text for comparison\");\n assert_eq!(json_text.unwrap(), json_ser.unwrap());\n }\n\n}\n<commit_msg>Remove unneeded shorthand field pattern<commit_after>use std::collections::HashMap;\nuse std::error::Error;\nuse std::fmt::{Debug, Display, Formatter};\nuse std::fmt;\n\nuse serde_json::{Value, from_str};\nuse serde_json::error::Result as R;\nuse serde_json::Serializer;\nuse serde::ser::Serialize;\nuse serde::ser::Serializer as Ser;\n\nuse storage::parser::{FileHeaderParser, ParserError};\nuse storage::file::header::spec::FileHeaderSpec;\nuse storage::file::header::data::FileHeaderData;\n\npub struct JsonHeaderParser {\n spec: Option<FileHeaderSpec>,\n}\n\nimpl JsonHeaderParser {\n\n pub fn new(spec: Option<FileHeaderSpec>) -> JsonHeaderParser {\n JsonHeaderParser {\n spec: spec\n }\n }\n\n}\n\nimpl Display for JsonHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"JsonHeaderParser\"));\n Ok(())\n }\n\n}\n\nimpl Debug for JsonHeaderParser {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n try!(write!(fmt, \"JsonHeaderParser, Spec: {:?}\", self.spec));\n Ok(())\n }\n\n}\n\nimpl FileHeaderParser for JsonHeaderParser {\n\n fn read(&self, string: Option<String>)\n -> Result<FileHeaderData, ParserError>\n {\n if string.is_some() {\n let s = string.unwrap();\n debug!(\"Deserializing: {}\", s);\n let fromstr : R<Value> = from_str(&s[..]);\n if let Ok(ref content) = fromstr {\n return Ok(visit_json(&content))\n }\n let oe = fromstr.err().unwrap();\n let s = format!(\"JSON parser error: {}\", oe.description());\n let e = ParserError::short(&s[..], s.clone(), 0);\n Err(e)\n } else {\n Ok(FileHeaderData::Null)\n }\n }\n\n fn write(&self, data: &FileHeaderData) -> Result<String, ParserError> {\n let mut s = Vec::<u8>::new();\n {\n let mut ser = Serializer::pretty(&mut s);\n data.serialize(&mut ser).map_err(|e| {\n debug!(\"Serializer error: {:?}\", e);\n }).ok();\n }\n\n String::from_utf8(s).or(\n Err(ParserError::short(\"Cannot parse utf8 bytes\",\n String::from(\"<not printable>\"),\n 0)))\n }\n\n}\n\n\/\/ TODO: This function must be able to return a parser error\nfn visit_json(v: &Value) -> FileHeaderData {\n match v {\n &Value::Null => FileHeaderData::Null,\n &Value::Bool(b) => FileHeaderData::Bool(b),\n &Value::I64(i) => FileHeaderData::Integer(i),\n &Value::U64(u) => FileHeaderData::UInteger(u),\n &Value::F64(f) => FileHeaderData::Float(f),\n &Value::String(ref s) => FileHeaderData::Text(s.clone()),\n &Value::Array(ref vec) => {\n FileHeaderData::Array {\n values: Box::new(vec.clone().into_iter().map(|i| visit_json(&i)).collect())\n }\n },\n &Value::Object(ref btree) => {\n let btree = btree.clone();\n FileHeaderData::Map{\n keys: btree.into_iter().map(|(k, v)|\n FileHeaderData::Key {\n name: k,\n value: Box::new(visit_json(&v)),\n }\n ).collect()\n }\n }\n }\n}\n\nimpl Serialize for FileHeaderData {\n\n fn serialize<S>(&self, ser: &mut S) -> Result<(), S::Error>\n where S: Ser\n {\n match self {\n &FileHeaderData::Null => {\n let o : Option<bool> = None;\n o.serialize(ser)\n },\n &FileHeaderData::Bool(ref b) => b.serialize(ser),\n &FileHeaderData::Integer(ref i) => i.serialize(ser),\n &FileHeaderData::UInteger(ref u) => u.serialize(ser),\n &FileHeaderData::Float(ref f) => f.serialize(ser),\n &FileHeaderData::Text(ref s) => (&s[..]).serialize(ser),\n &FileHeaderData::Array{values: ref vs} => vs.serialize(ser),\n &FileHeaderData::Map{keys: ref ks} => {\n let mut hm = HashMap::new();\n\n for key in ks {\n if let &FileHeaderData::Key{name: ref n, value: ref v} = key {\n hm.insert(n, v);\n } else {\n panic!(\"Not a key: {:?}\", key);\n }\n }\n\n hm.serialize(ser)\n },\n &FileHeaderData::Key{name: _, value: _} => unreachable!(),\n\n }\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use std::ops::Deref;\n\n use super::JsonHeaderParser;\n use storage::parser::FileHeaderParser;\n use storage::file::header::data::FileHeaderData as FHD;\n use storage::file::header::spec::FileHeaderSpec as FHS;\n\n #[test]\n fn test_deserialization() {\n let text = String::from(\"{\\\"a\\\": 1, \\\"b\\\": -2}\");\n let spec = FHS::Map {\n keys: vec![\n FHS::Key {\n name: String::from(\"a\"),\n value_type: Box::new(FHS::UInteger)\n },\n FHS::Key {\n name: String::from(\"b\"),\n value_type: Box::new(FHS::Integer)\n }\n ]\n };\n\n let parser = JsonHeaderParser::new(Some(spec));\n let parsed = parser.read(Some(text));\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{keys}) => {\n for k in keys {\n match k {\n FHD::Key{name, value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::UInteger(u) => assert_eq!(u, 1),\n &FHD::Integer(i) => assert_eq!(i, -2),\n _ => assert!(false, \"Integers are not here\"),\n }\n },\n _ => assert!(false, \"Key is not a Key\"),\n }\n }\n },\n\n _ => assert!(false, \"Parsed is not a map\"),\n }\n }\n\n #[test]\n fn test_deserialization_without_spec() {\n let text = String::from(\"{\\\"a\\\": [1], \\\"b\\\": {\\\"c\\\": -2}}\");\n let parser = JsonHeaderParser::new(None);\n let parsed = parser.read(Some(text));\n\n assert!(parsed.is_ok(), \"Parsed is not ok: {:?}\", parsed);\n\n match parsed.ok() {\n Some(FHD::Map{keys}) => {\n for k in keys {\n match_key(&k);\n }\n },\n\n _ => assert!(false, \"Parsed is not a map\"),\n }\n }\n\n fn match_key(k: &FHD) {\n use std::ops::Deref;\n\n match k {\n &FHD::Key{ref name, ref value} => {\n assert!(name == \"a\" || name == \"b\", \"Key unknown\");\n match value.deref() {\n &FHD::Array{values: ref vs} => {\n for value in vs.iter() {\n match value {\n &FHD::UInteger(u) => assert_eq!(u, 1),\n _ => assert!(false, \"UInt is not an UInt\"),\n }\n }\n }\n\n &FHD::Map{keys: ref ks} => {\n for key in ks.iter() {\n match key {\n &FHD::Key{ref name, ref value} => {\n match value.deref() {\n &FHD::Integer(i) => {\n assert_eq!(i, -2);\n assert_eq!(name, \"c\");\n },\n _ => assert!(false, \"Int is not an Int\"),\n };\n },\n _ => assert!(false, \"Key is not a Key\"),\n }\n }\n }\n _ => assert!(false, \"Integers are not here\"),\n }\n },\n _ => assert!(false, \"Key in main Map is not a Key\"),\n }\n }\n\n #[test]\n fn test_desser() {\n use serde_json::error::Result as R;\n use serde_json::{Value, from_str};\n\n let text = String::from(\"{\\\"a\\\": [1], \\\"b\\\": {\\\"c\\\": -2}}\");\n let parser = JsonHeaderParser::new(None);\n\n let des = parser.read(Some(text.clone()));\n assert!(des.is_ok(), \"Deserializing failed\");\n\n let ser = parser.write(&des.unwrap());\n assert!(ser.is_ok(), \"Parser error when serializing deserialized text\");\n\n let json_text : R<Value> = from_str(&text[..]);\n let json_ser : R<Value> = from_str(&ser.unwrap()[..]);\n\n assert!(json_text.is_ok(), \"Could not use serde to serialize text for comparison\");\n assert!(json_ser.is_ok(), \"Could not use serde to serialize serialized-deserialized text for comparison\");\n assert_eq!(json_text.unwrap(), json_ser.unwrap());\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before>use flexi_logger::{LogSpecification, Logger};\nuse itertools::Itertools;\nuse log::LevelFilter;\nuse log::{info, warn};\nuse qt_ritual::lib_configs::create_config;\nuse qt_ritual_common::all_crate_names;\nuse ritual::cluster_api::{Client, GroupKey, TaskOutput};\nuse ritual::cpp_checker::{LocalCppChecker, SnippetTask};\nuse ritual_common::errors::{format_err, FancyUnwrap, Result};\nuse ritual_common::file_utils::create_dir;\nuse ritual_common::target::current_target;\nuse std::collections::HashMap;\nuse std::env;\nuse tempdir::TempDir;\n\nconst QUEUE_ADDRESS_VAR: &str = \"QT_RITUAL_WORKER_QUEUE_ADDRESS\";\nconst RUN_TESTS_VAR: &str = \"QT_RITUAL_WORKER_RUN_TESTS\";\nconst QMAKE_PATH_VAR_PREFIX: &str = \"QT_RITUAL_QMAKE_\";\n\nstruct RemoteSnippetTaskData {\n id: u64,\n}\n\nfn run() -> Result<()> {\n Logger::with(LogSpecification::default(LevelFilter::Info).build())\n .start()\n .unwrap_or_else(|e| panic!(\"Logger initialization failed: {}\", e));\n\n let temp_dir = TempDir::new(\"qt_ritual_cluster_worker\")?;\n let supported_moqt_libs = [\"moqt_core\", \"moqt_gui\"].iter().map(|crate_name| {\n let lib = GroupKey {\n crate_name: crate_name.to_string(),\n cpp_library_version: None,\n };\n (lib, None)\n });\n\n let supported_qt_libs = env::vars()\n .filter(|(key, _value)| key.starts_with(QMAKE_PATH_VAR_PREFIX))\n .flat_map(|(key, value)| {\n let version = key[QMAKE_PATH_VAR_PREFIX.len()..].replace(\"_\", \".\");\n all_crate_names().iter().map(move |crate_name| {\n let lib = GroupKey {\n crate_name: crate_name.to_string(),\n cpp_library_version: Some(version.clone()),\n };\n (lib, Some(value.clone()))\n })\n });\n\n let supported_libs = supported_moqt_libs.chain(supported_qt_libs);\n\n let mut checkers = HashMap::new();\n let run_tests = env::var(RUN_TESTS_VAR).ok().map_or(false, |s| s == \"1\");\n if run_tests {\n info!(\"running tests\");\n }\n\n for (lib, qmake_path) in supported_libs {\n info!(\"lib: {:?}\", lib);\n let dir = temp_dir.path().join(format!(\n \"{}_{}\",\n lib.crate_name,\n lib.cpp_library_version\n .as_ref()\n .map(|s| s.as_str())\n .unwrap_or(\"noversion\")\n ));\n create_dir(&dir)?;\n\n let qmake_path = qmake_path.as_ref().map(|s| s.as_str());\n let config = create_config(&lib.crate_name, qmake_path)?;\n let checker = LocalCppChecker::new(dir, &config)?;\n let mut checker = checker.get(\"0\")?;\n if run_tests {\n checker.check_preliminary_tests()?;\n }\n checkers.insert(lib, checker);\n }\n if run_tests {\n info!(\"all tests passed\");\n return Ok(());\n }\n\n let queue_address = env::var(QUEUE_ADDRESS_VAR)\n .map_err(|err| format_err!(\"failed to get env var \\\"{}\\\": {}\", QUEUE_ADDRESS_VAR, err))?;\n info!(\"connecting to queue\");\n let mut client = Client::new(&queue_address, ¤t_target())?;\n info!(\"ready\");\n client.run(|task| {\n info!(\"received task: {:?}\", task);\n if let Some(checker) = checkers.get_mut(&task.group_key) {\n let mut snippets = task\n .snippets\n .into_iter()\n .map(|item| SnippetTask {\n snippet: item.snippet,\n data: RemoteSnippetTaskData { id: item.id },\n output: None,\n })\n .collect_vec();\n checker.binary_check(&mut snippets, None)?;\n let outputs = snippets\n .into_iter()\n .map(|snippet| TaskOutput {\n id: snippet.data.id,\n output: snippet.output.unwrap(),\n })\n .collect_vec();\n Ok(outputs)\n } else {\n warn!(\"unknown group key: {:?}\", task);\n Ok(Vec::new())\n }\n })?;\n Ok(())\n}\n\nfn main() {\n run().fancy_unwrap();\n}\n<commit_msg>skip moqt in cluster worker if it's not present<commit_after>use flexi_logger::{LogSpecification, Logger};\nuse itertools::Itertools;\nuse log::LevelFilter;\nuse log::{info, warn};\nuse qt_ritual::lib_configs::{create_config, MOQT_INSTALL_DIR_ENV_VAR_NAME};\nuse qt_ritual_common::all_crate_names;\nuse ritual::cluster_api::{Client, GroupKey, TaskOutput};\nuse ritual::cpp_checker::{LocalCppChecker, SnippetTask};\nuse ritual_common::errors::{format_err, FancyUnwrap, Result};\nuse ritual_common::file_utils::create_dir;\nuse ritual_common::target::current_target;\nuse std::collections::HashMap;\nuse std::env;\nuse tempdir::TempDir;\n\nconst QUEUE_ADDRESS_VAR: &str = \"QT_RITUAL_WORKER_QUEUE_ADDRESS\";\nconst RUN_TESTS_VAR: &str = \"QT_RITUAL_WORKER_RUN_TESTS\";\nconst QMAKE_PATH_VAR_PREFIX: &str = \"QT_RITUAL_QMAKE_\";\n\nstruct RemoteSnippetTaskData {\n id: u64,\n}\n\nfn run() -> Result<()> {\n Logger::with(LogSpecification::default(LevelFilter::Info).build())\n .start()\n .unwrap_or_else(|e| panic!(\"Logger initialization failed: {}\", e));\n\n let temp_dir = TempDir::new(\"qt_ritual_cluster_worker\")?;\n let moqt_present = env::var(MOQT_INSTALL_DIR_ENV_VAR_NAME).is_ok();\n let supported_moqt_libs = [\"moqt_core\", \"moqt_gui\"]\n .iter()\n .filter(|_| moqt_present)\n .map(|crate_name| {\n let lib = GroupKey {\n crate_name: crate_name.to_string(),\n cpp_library_version: None,\n };\n (lib, None)\n });\n\n let supported_qt_libs = env::vars()\n .filter(|(key, _value)| key.starts_with(QMAKE_PATH_VAR_PREFIX))\n .flat_map(|(key, value)| {\n let version = key[QMAKE_PATH_VAR_PREFIX.len()..].replace(\"_\", \".\");\n all_crate_names().iter().map(move |crate_name| {\n let lib = GroupKey {\n crate_name: crate_name.to_string(),\n cpp_library_version: Some(version.clone()),\n };\n (lib, Some(value.clone()))\n })\n });\n\n let supported_libs = supported_moqt_libs.chain(supported_qt_libs);\n\n let mut checkers = HashMap::new();\n let run_tests = env::var(RUN_TESTS_VAR).ok().map_or(false, |s| s == \"1\");\n if run_tests {\n info!(\"running tests\");\n }\n\n for (lib, qmake_path) in supported_libs {\n info!(\"lib: {:?}\", lib);\n let dir = temp_dir.path().join(format!(\n \"{}_{}\",\n lib.crate_name,\n lib.cpp_library_version\n .as_ref()\n .map(|s| s.as_str())\n .unwrap_or(\"noversion\")\n ));\n create_dir(&dir)?;\n\n let qmake_path = qmake_path.as_ref().map(|s| s.as_str());\n let config = create_config(&lib.crate_name, qmake_path)?;\n let checker = LocalCppChecker::new(dir, &config)?;\n let mut checker = checker.get(\"0\")?;\n if run_tests {\n checker.check_preliminary_tests()?;\n }\n checkers.insert(lib, checker);\n }\n if run_tests {\n info!(\"all tests passed\");\n return Ok(());\n }\n\n let queue_address = env::var(QUEUE_ADDRESS_VAR)\n .map_err(|err| format_err!(\"failed to get env var \\\"{}\\\": {}\", QUEUE_ADDRESS_VAR, err))?;\n info!(\"connecting to queue\");\n let mut client = Client::new(&queue_address, ¤t_target())?;\n info!(\"ready\");\n client.run(|task| {\n info!(\"received task: {:?}\", task);\n if let Some(checker) = checkers.get_mut(&task.group_key) {\n let mut snippets = task\n .snippets\n .into_iter()\n .map(|item| SnippetTask {\n snippet: item.snippet,\n data: RemoteSnippetTaskData { id: item.id },\n output: None,\n })\n .collect_vec();\n checker.binary_check(&mut snippets, None)?;\n let outputs = snippets\n .into_iter()\n .map(|snippet| TaskOutput {\n id: snippet.data.id,\n output: snippet.output.unwrap(),\n })\n .collect_vec();\n Ok(outputs)\n } else {\n warn!(\"unknown group key: {:?}\", task);\n Ok(Vec::new())\n }\n })?;\n Ok(())\n}\n\nfn main() {\n run().fancy_unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::utils::Fallible;\nuse dom::document::AbstractDocument;\nuse dom::node::{ScriptView, Node, DocumentFragmentNodeTypeId};\nuse dom::node::{AbstractNode};\nuse dom::window::Window;\n\npub struct DocumentFragment {\n node: Node<ScriptView>,\n}\n\nimpl DocumentFragment {\n \/\/\/ Creates a new DocumentFragment.\n pub fn new(document: AbstractDocument) -> DocumentFragment {\n DocumentFragment {\n node: Node::new(DocumentFragmentNodeTypeId, document),\n }\n }\n\n pub fn Constructor(owner: @mut Window) -> Fallible<AbstractNode<ScriptView>> {\n let cx = owner.page.js_info.get_ref().js_compartment.cx.ptr;\n let fragment = @DocumentFragment::new(owner.Document());\n Ok(unsafe { Node::as_abstract_node(cx, fragment) })\n }\n}\n<commit_msg>Use Window::get_cx() in DocumentFragment::Constructor.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::utils::Fallible;\nuse dom::document::AbstractDocument;\nuse dom::node::{ScriptView, Node, DocumentFragmentNodeTypeId};\nuse dom::node::{AbstractNode};\nuse dom::window::Window;\n\npub struct DocumentFragment {\n node: Node<ScriptView>,\n}\n\nimpl DocumentFragment {\n \/\/\/ Creates a new DocumentFragment.\n pub fn new(document: AbstractDocument) -> DocumentFragment {\n DocumentFragment {\n node: Node::new(DocumentFragmentNodeTypeId, document),\n }\n }\n\n pub fn Constructor(owner: @mut Window) -> Fallible<AbstractNode<ScriptView>> {\n let cx = owner.get_cx();\n let fragment = @DocumentFragment::new(owner.Document());\n Ok(unsafe { Node::as_abstract_node(cx, fragment) })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[rust] size of empty struct is zero.<commit_after>use std::mem::size_of;\n\nstruct EmptyStruct;\n\nfn main() {\n println!(\"{}\", size_of::<EmptyStruct>()); \/\/ 0\n\n assert_eq!(size_of::<EmptyStruct>(), 0);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added pointer example<commit_after>const n: usize = (16*1024);\n\nstruct foo {\n chaMem: *mut [u32; n],\n}\n\nimpl foo {\n pub fn new() -> foo {\n let mem: *mut [u32; n] = unsafe { ::std::mem::transmute(Box::new([1; n])) };\n foo {chaMem: mem}\n }\n \n pub fn get_sig_ptr(&mut self, cha_signal: *mut *mut u32) {\n unsafe {\n *cha_signal = self.chaMem as *mut _;\n }\n }\n \n pub fn get_sig_ptr2(&mut self) -> *mut u32 {\n return self.chaMem as *mut _;\n }\n \n pub fn get_sig_ptr3(&mut self) -> *mut [u32; n] {\n return self.chaMem;\n }\n \n pub fn get_sig_ptr4(&mut self) -> &mut [u32; n] {\n unsafe {\n return &mut *(self.chaMem);\n }\n }\n}\n\n\/\/ This code is editable and runnable!\nfn main() {\n let mut f = foo::new();\n \n let mut cha_signal1: *mut u32 = ::std::ptr::null_mut();\n f.get_sig_ptr(&mut cha_signal1);\n \n for i in 1..n {\n println!(\"{}\", unsafe { *cha_signal1.offset(i as isize) }); \/\/ (*cha_signal)[i] \n }\n \n let mut cha_signal2: *mut u32 = f.get_sig_ptr2();\n \n for i in 1..n {\n println!(\"{}\", unsafe { *cha_signal2.offset(i as isize) });\n }\n \n let mut cha_signal3: *mut [u32; n] = f.get_sig_ptr3();\n \n for i in 1..n {\n println!(\"{}\", unsafe { (*cha_signal3)[i] });\n }\n \n let mut cha_signal4: &mut [u32; n] = f.get_sig_ptr4();\n \n for i in 1..n {\n println!(\"{}\", cha_signal4[i]);\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add codegen test for multiple `asm!` options<commit_after>\/\/ compile-flags: -O\n\/\/ only-x86_64\n\n#![crate_type = \"rlib\"]\n#![feature(asm)]\n\n\/\/ CHECK-LABEL: @pure\n\/\/ CHECK-NOT: asm\n\/\/ CHECK: ret void\n#[no_mangle]\npub unsafe fn pure(x: i32) {\n let y: i32;\n asm!(\"\", out(\"ax\") y, in(\"bx\") x, options(pure), options(nomem));\n}\n\npub static mut VAR: i32 = 0;\npub static mut DUMMY_OUTPUT: i32 = 0;\n\n\/\/ CHECK-LABEL: @readonly\n\/\/ CHECK: call i32 asm\n\/\/ CHECK: ret i32 1\n#[no_mangle]\npub unsafe fn readonly() -> i32 {\n VAR = 1;\n asm!(\"\", out(\"ax\") DUMMY_OUTPUT, options(pure), options(readonly));\n VAR\n}\n\n\/\/ CHECK-LABEL: @nomem\n\/\/ CHECK-NOT: store\n\/\/ CHECK: call i32 asm\n\/\/ CHECK: store\n\/\/ CHECK: ret i32 2\n#[no_mangle]\npub unsafe fn nomem() -> i32 {\n VAR = 1;\n asm!(\"\", out(\"ax\") DUMMY_OUTPUT, options(pure), options(nomem));\n VAR = 2;\n VAR\n}\n\n\/\/ CHECK-LABEL: @not_nomem\n\/\/ CHECK: store\n\/\/ CHECK: call i32 asm\n\/\/ CHECK: store\n\/\/ CHECK: ret i32 2\n#[no_mangle]\npub unsafe fn not_nomem() -> i32 {\n VAR = 1;\n asm!(\"\", out(\"ax\") DUMMY_OUTPUT, options(pure), options(readonly));\n VAR = 2;\n VAR\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>dsfsd<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test to make sure trans doesn't choke on any of the intrinsics<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(extended_compare_and_swap)]\nuse std::sync::atomic::{AtomicIsize, ATOMIC_ISIZE_INIT};\nuse std::sync::atomic::Ordering::*;\n\nstatic ATOMIC: AtomicIsize = ATOMIC_ISIZE_INIT;\n\nfn main() {\n \/\/ Make sure trans can emit all the intrinsics correctly\n ATOMIC.compare_exchange(0, 1, Relaxed, Relaxed);\n ATOMIC.compare_exchange(0, 1, Acquire, Relaxed);\n ATOMIC.compare_exchange(0, 1, Release, Relaxed);\n ATOMIC.compare_exchange(0, 1, AcqRel, Relaxed);\n ATOMIC.compare_exchange(0, 1, SeqCst, Relaxed);\n ATOMIC.compare_exchange(0, 1, Acquire, Acquire);\n ATOMIC.compare_exchange(0, 1, AcqRel, Acquire);\n ATOMIC.compare_exchange(0, 1, SeqCst, Acquire);\n ATOMIC.compare_exchange(0, 1, SeqCst, SeqCst);\n ATOMIC.compare_exchange_weak(0, 1, Relaxed, Relaxed);\n ATOMIC.compare_exchange_weak(0, 1, Acquire, Relaxed);\n ATOMIC.compare_exchange_weak(0, 1, Release, Relaxed);\n ATOMIC.compare_exchange_weak(0, 1, AcqRel, Relaxed);\n ATOMIC.compare_exchange_weak(0, 1, SeqCst, Relaxed);\n ATOMIC.compare_exchange_weak(0, 1, Acquire, Acquire);\n ATOMIC.compare_exchange_weak(0, 1, AcqRel, Acquire);\n ATOMIC.compare_exchange_weak(0, 1, SeqCst, Acquire);\n ATOMIC.compare_exchange_weak(0, 1, SeqCst, SeqCst);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue-58951<commit_after>\/\/ run-pass\n#![feature(existential_type)]\n\nexistential type A: Iterator;\nfn def_a() -> A { 0..1 }\npub fn use_a() {\n def_a().map(|x| x);\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::path::Path;\nuse std::process::Command;\nuse std::fs;\n\nuse build_helper::output;\nuse cmake;\n\nuse build::Build;\nuse build::util::{exe, staticlib};\n\npub fn llvm(build: &Build, target: &str) {\n \/\/ If we're using a custom LLVM bail out here, but we can only use a\n \/\/ custom LLVM for the build triple.\n if let Some(config) = build.config.target_config.get(target) {\n if let Some(ref s) = config.llvm_config {\n return check_llvm_version(build, s);\n }\n }\n\n \/\/ If the cleaning trigger is newer than our built artifacts (or if the\n \/\/ artifacts are missing) then we keep going, otherwise we bail out.\n let dst = build.llvm_out(target);\n let stamp = build.src.join(\"src\/rustllvm\/llvm-auto-clean-trigger\");\n let llvm_config = dst.join(\"bin\").join(exe(\"llvm-config\", target));\n build.clear_if_dirty(&dst, &stamp);\n if fs::metadata(llvm_config).is_ok() {\n return\n }\n\n let _ = fs::remove_dir_all(&dst.join(\"build\"));\n t!(fs::create_dir_all(&dst.join(\"build\")));\n let assertions = if build.config.llvm_assertions {\"ON\"} else {\"OFF\"};\n\n \/\/ http:\/\/llvm.org\/docs\/CMake.html\n let mut cfg = cmake::Config::new(build.src.join(\"src\/llvm\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(if build.config.llvm_optimize {\"Release\"} else {\"Debug\"})\n .define(\"LLVM_ENABLE_ASSERTIONS\", assertions)\n .define(\"LLVM_TARGETS_TO_BUILD\", \"X86;ARM;AArch64;Mips;PowerPC\")\n .define(\"LLVM_INCLUDE_EXAMPLES\", \"OFF\")\n .define(\"LLVM_INCLUDE_TESTS\", \"OFF\")\n .define(\"LLVM_INCLUDE_DOCS\", \"OFF\")\n .define(\"LLVM_ENABLE_ZLIB\", \"OFF\")\n .define(\"WITH_POLLY\", \"OFF\")\n .define(\"LLVM_ENABLE_TERMINFO\", \"OFF\")\n .define(\"LLVM_ENABLE_LIBEDIT\", \"OFF\")\n .define(\"LLVM_PARALLEL_COMPILE_JOBS\", build.jobs().to_string());\n\n if target.starts_with(\"i686\") {\n cfg.define(\"LLVM_BUILD_32_BITS\", \"ON\");\n }\n\n \/\/ http:\/\/llvm.org\/docs\/HowToCrossCompileLLVM.html\n if target != build.config.build {\n \/\/ FIXME: if the llvm root for the build triple is overridden then we\n \/\/ should use llvm-tblgen from there, also should verify that it\n \/\/ actually exists most of the time in normal installs of LLVM.\n let host = build.llvm_out(&build.config.build).join(\"bin\/llvm-tblgen\");\n cfg.define(\"CMAKE_CROSSCOMPILING\", \"True\")\n .define(\"LLVM_TARGET_ARCH\", target.split('-').next().unwrap())\n .define(\"LLVM_TABLEGEN\", &host)\n .define(\"LLVM_DEFAULT_TARGET_TRIPLE\", target);\n }\n\n \/\/ MSVC handles compiler business itself\n if !target.contains(\"msvc\") {\n if build.config.ccache {\n cfg.define(\"CMAKE_C_COMPILER\", \"ccache\")\n .define(\"CMAKE_C_COMPILER_ARG1\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", \"ccache\")\n .define(\"CMAKE_CXX_COMPILER_ARG1\", build.cxx(target));\n } else {\n cfg.define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cxx(target));\n }\n cfg.build_arg(\"-j\").build_arg(build.jobs().to_string());\n }\n\n \/\/ FIXME: we don't actually need to build all LLVM tools and all LLVM\n \/\/ libraries here, e.g. we just want a few components and a few\n \/\/ tools. Figure out how to filter them down and only build the right\n \/\/ tools and libs on all platforms.\n cfg.build();\n}\n\nfn check_llvm_version(build: &Build, llvm_config: &Path) {\n if !build.config.llvm_version_check {\n return\n }\n\n let mut cmd = Command::new(llvm_config);\n let version = output(cmd.arg(\"--version\"));\n if version.starts_with(\"3.5\") || version.starts_with(\"3.6\") ||\n version.starts_with(\"3.7\") {\n return\n }\n panic!(\"\\n\\nbad LLVM version: {}, need >=3.5\\n\\n\", version)\n}\n\npub fn compiler_rt(build: &Build, target: &str) {\n let dst = build.compiler_rt_out(target);\n let arch = target.split('-').next().unwrap();\n let mode = if build.config.rust_optimize {\"Release\"} else {\"Debug\"};\n let (dir, build_target, libname) = if target.contains(\"linux\") {\n let os = if target.contains(\"android\") {\"-android\"} else {\"\"};\n let target = format!(\"clang_rt.builtins-{}{}\", arch, os);\n (\"linux\".to_string(), target.clone(), target)\n } else if target.contains(\"darwin\") {\n let target = format!(\"clang_rt.builtins_{}_osx\", arch);\n (\"builtins\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-gnu\") {\n let target = format!(\"clang_rt.builtins-{}\", arch);\n (\"windows\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-msvc\") {\n (format!(\"windows\/{}\", mode),\n \"lib\/builtins\/builtins\".to_string(),\n format!(\"clang_rt.builtins-{}\", arch.replace(\"i686\", \"i386\")))\n } else {\n panic!(\"can't get os from target: {}\", target)\n };\n let output = dst.join(\"build\/lib\").join(dir)\n .join(staticlib(&libname, target));\n build.compiler_rt_built.borrow_mut().insert(target.to_string(),\n output.clone());\n if fs::metadata(&output).is_ok() {\n return\n }\n let _ = fs::remove_dir_all(&dst);\n t!(fs::create_dir_all(&dst));\n let build_llvm_config = build.llvm_out(&build.config.build)\n .join(\"bin\")\n .join(exe(\"llvm-config\", &build.config.build));\n let mut cfg = cmake::Config::new(build.src.join(\"src\/compiler-rt\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(mode)\n .define(\"LLVM_CONFIG_PATH\", build_llvm_config)\n .define(\"COMPILER_RT_DEFAULT_TARGET_TRIPLE\", target)\n .define(\"COMPILER_RT_BUILD_SANITIZERS\", \"OFF\")\n .define(\"COMPILER_RT_BUILD_EMUTLS\", \"OFF\")\n .define(\"CMAKE_C_COMPILER\", build.cc(target))\n .build_target(&build_target);\n cfg.build();\n}\n<commit_msg>rustbuild: Fix compiler-rt build on gnueabihf<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::path::Path;\nuse std::process::Command;\nuse std::fs;\n\nuse build_helper::output;\nuse cmake;\n\nuse build::Build;\nuse build::util::{exe, staticlib};\n\npub fn llvm(build: &Build, target: &str) {\n \/\/ If we're using a custom LLVM bail out here, but we can only use a\n \/\/ custom LLVM for the build triple.\n if let Some(config) = build.config.target_config.get(target) {\n if let Some(ref s) = config.llvm_config {\n return check_llvm_version(build, s);\n }\n }\n\n \/\/ If the cleaning trigger is newer than our built artifacts (or if the\n \/\/ artifacts are missing) then we keep going, otherwise we bail out.\n let dst = build.llvm_out(target);\n let stamp = build.src.join(\"src\/rustllvm\/llvm-auto-clean-trigger\");\n let llvm_config = dst.join(\"bin\").join(exe(\"llvm-config\", target));\n build.clear_if_dirty(&dst, &stamp);\n if fs::metadata(llvm_config).is_ok() {\n return\n }\n\n let _ = fs::remove_dir_all(&dst.join(\"build\"));\n t!(fs::create_dir_all(&dst.join(\"build\")));\n let assertions = if build.config.llvm_assertions {\"ON\"} else {\"OFF\"};\n\n \/\/ http:\/\/llvm.org\/docs\/CMake.html\n let mut cfg = cmake::Config::new(build.src.join(\"src\/llvm\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(if build.config.llvm_optimize {\"Release\"} else {\"Debug\"})\n .define(\"LLVM_ENABLE_ASSERTIONS\", assertions)\n .define(\"LLVM_TARGETS_TO_BUILD\", \"X86;ARM;AArch64;Mips;PowerPC\")\n .define(\"LLVM_INCLUDE_EXAMPLES\", \"OFF\")\n .define(\"LLVM_INCLUDE_TESTS\", \"OFF\")\n .define(\"LLVM_INCLUDE_DOCS\", \"OFF\")\n .define(\"LLVM_ENABLE_ZLIB\", \"OFF\")\n .define(\"WITH_POLLY\", \"OFF\")\n .define(\"LLVM_ENABLE_TERMINFO\", \"OFF\")\n .define(\"LLVM_ENABLE_LIBEDIT\", \"OFF\")\n .define(\"LLVM_PARALLEL_COMPILE_JOBS\", build.jobs().to_string());\n\n if target.starts_with(\"i686\") {\n cfg.define(\"LLVM_BUILD_32_BITS\", \"ON\");\n }\n\n \/\/ http:\/\/llvm.org\/docs\/HowToCrossCompileLLVM.html\n if target != build.config.build {\n \/\/ FIXME: if the llvm root for the build triple is overridden then we\n \/\/ should use llvm-tblgen from there, also should verify that it\n \/\/ actually exists most of the time in normal installs of LLVM.\n let host = build.llvm_out(&build.config.build).join(\"bin\/llvm-tblgen\");\n cfg.define(\"CMAKE_CROSSCOMPILING\", \"True\")\n .define(\"LLVM_TARGET_ARCH\", target.split('-').next().unwrap())\n .define(\"LLVM_TABLEGEN\", &host)\n .define(\"LLVM_DEFAULT_TARGET_TRIPLE\", target);\n }\n\n \/\/ MSVC handles compiler business itself\n if !target.contains(\"msvc\") {\n if build.config.ccache {\n cfg.define(\"CMAKE_C_COMPILER\", \"ccache\")\n .define(\"CMAKE_C_COMPILER_ARG1\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", \"ccache\")\n .define(\"CMAKE_CXX_COMPILER_ARG1\", build.cxx(target));\n } else {\n cfg.define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cxx(target));\n }\n cfg.build_arg(\"-j\").build_arg(build.jobs().to_string());\n }\n\n \/\/ FIXME: we don't actually need to build all LLVM tools and all LLVM\n \/\/ libraries here, e.g. we just want a few components and a few\n \/\/ tools. Figure out how to filter them down and only build the right\n \/\/ tools and libs on all platforms.\n cfg.build();\n}\n\nfn check_llvm_version(build: &Build, llvm_config: &Path) {\n if !build.config.llvm_version_check {\n return\n }\n\n let mut cmd = Command::new(llvm_config);\n let version = output(cmd.arg(\"--version\"));\n if version.starts_with(\"3.5\") || version.starts_with(\"3.6\") ||\n version.starts_with(\"3.7\") {\n return\n }\n panic!(\"\\n\\nbad LLVM version: {}, need >=3.5\\n\\n\", version)\n}\n\npub fn compiler_rt(build: &Build, target: &str) {\n let dst = build.compiler_rt_out(target);\n let arch = target.split('-').next().unwrap();\n let mode = if build.config.rust_optimize {\"Release\"} else {\"Debug\"};\n let (dir, build_target, libname) = if target.contains(\"linux\") {\n let os = if target.contains(\"android\") {\"-android\"} else {\"\"};\n let arch = if arch.starts_with(\"arm\") && target.contains(\"eabihf\") {\n \"armhf\"\n } else {\n arch\n };\n let target = format!(\"clang_rt.builtins-{}{}\", arch, os);\n (\"linux\".to_string(), target.clone(), target)\n } else if target.contains(\"darwin\") {\n let target = format!(\"clang_rt.builtins_{}_osx\", arch);\n (\"builtins\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-gnu\") {\n let target = format!(\"clang_rt.builtins-{}\", arch);\n (\"windows\".to_string(), target.clone(), target)\n } else if target.contains(\"windows-msvc\") {\n (format!(\"windows\/{}\", mode),\n \"lib\/builtins\/builtins\".to_string(),\n format!(\"clang_rt.builtins-{}\", arch.replace(\"i686\", \"i386\")))\n } else {\n panic!(\"can't get os from target: {}\", target)\n };\n let output = dst.join(\"build\/lib\").join(dir)\n .join(staticlib(&libname, target));\n build.compiler_rt_built.borrow_mut().insert(target.to_string(),\n output.clone());\n if fs::metadata(&output).is_ok() {\n return\n }\n let _ = fs::remove_dir_all(&dst);\n t!(fs::create_dir_all(&dst));\n let build_llvm_config = build.llvm_out(&build.config.build)\n .join(\"bin\")\n .join(exe(\"llvm-config\", &build.config.build));\n let mut cfg = cmake::Config::new(build.src.join(\"src\/compiler-rt\"));\n cfg.target(target)\n .host(&build.config.build)\n .out_dir(&dst)\n .profile(mode)\n .define(\"LLVM_CONFIG_PATH\", build_llvm_config)\n .define(\"COMPILER_RT_DEFAULT_TARGET_TRIPLE\", target)\n .define(\"COMPILER_RT_BUILD_SANITIZERS\", \"OFF\")\n .define(\"COMPILER_RT_BUILD_EMUTLS\", \"OFF\")\n \/\/ inform about c\/c++ compilers, the c++ compiler isn't actually used but\n \/\/ it's needed to get the initial configure to work on all platforms.\n .define(\"CMAKE_C_COMPILER\", build.cc(target))\n .define(\"CMAKE_CXX_COMPILER\", build.cc(target))\n .build_target(&build_target);\n cfg.build();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>utility to measure mem usage<commit_after>#![feature(alloc)]\n#![feature(libc)]\n\nextern crate rand;\nextern crate burst_trie;\nextern crate libc;\n\nuse std::cmp::max;\nuse std::io::Read;\nuse std::io;\nuse std::thread;\nuse std::default::Default;\nuse burst_trie::BurstTrieMap;\nuse std::collections::{HashMap, BTreeMap};\nuse std::ascii::AsciiExt;\nuse libc::*;\n\nextern {fn je_malloc_stats_print (write_cb: extern fn (*const c_void, *const c_char), cbopaque: *const c_void, opts: *const c_char);}\nextern fn write_cb (_: *const c_void, message: *const c_char) {\n print! (\"{}\", String::from_utf8_lossy (unsafe {std::ffi::CStr::from_ptr (message as *const i8) .to_bytes()}));\n}\n\nfn stats_print() {\n unsafe {je_malloc_stats_print (write_cb, std::ptr::null(), std::ptr::null())};\n}\n\nfn main() {\n let words = gen_words(10000, 3, 25);\n \/\/ let words = read_words();\n println!(\"--sample--\\n{:#?}--\", &words[..10]);\n let mut word_counts: HashMap<String, usize> = Default::default();\n for word in words {\n let len = word.len();\n word_counts.insert(word, len);\n }\n stats_print();\n\n \/\/ word_counts.print_structure();\n}\n\nfn read_words() -> Vec<String> {\n let mut input = String::new();\n io::stdin().read_to_string(&mut input).unwrap();\n input.split_whitespace()\n .map(|w| w.trim_matches(|c| ['.', '\"', ':', ';', ',', '!', '?', ')', '(', '_']\n .contains(&c)))\n .map(|w| w.to_lowercase())\n .filter(|w| !w.is_empty())\n .collect()\n}\n\nfn gen_words(count: usize, min_len: usize, max_len: usize) -> Vec<String> {\n use rand::{Rng, StdRng, SeedableRng};\n static SEED: &'static[usize] = &[0, 1, 1, 2, 3, 5, 8, 13, 21, 34];\n let mut rng: StdRng = SeedableRng::from_seed(SEED);\n (0..count).map(|_| {\n let key_len = rng.gen_range(min_len, max_len);\n rng.gen_ascii_chars().take(key_len).collect::<String>()\n }).collect()\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Pick more appropriate int types for fortuna<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Update matcher.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rename RqlError{,Type} to QueryError{,Kind}<commit_after><|endoftext|>"} {"text":"<commit_before>use core::cmp::min;\nuse core::mem::size_of;\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<commit_msg>Implement common::memory::Memory<commit_after>use core::cmp::min;\nuse core::mem::size_of;\nuse core::ptr;\n\nuse common::scheduler::*;\n\npub const PAGE_DIRECTORY: usize = 0x300000;\npub const PAGE_TABLE_SIZE: usize = 1024;\npub const PAGE_TABLES: usize = PAGE_DIRECTORY + PAGE_TABLE_SIZE * 4;\npub const PAGE_SIZE: usize = 4 * 1024;\n\npub const CLUSTER_ADDRESS: usize = PAGE_TABLES + PAGE_TABLE_SIZE * PAGE_TABLE_SIZE * 4 ;\npub const CLUSTER_COUNT: usize = 1024 * 1024; \/\/ 4 GiB\npub const CLUSTER_SIZE: usize = 4 * 1024; \/\/ Of 4 K chunks\n\n#[repr(packed)]\nstruct MemoryMapEntry {\n base: u64,\n len: u64,\n class: u32,\n acpi: u32,\n}\n\nstruct Memory {\n address: usize,\n}\n\nimpl Memory {\n pub fn new(size: usize) -> Option<Self> {\n let alloc = alloc(size);\n if alloc > 0 {\n Some(Memory { address: alloc })\n }\n else { None }\n }\n\n pub fn renew(&self, size: usize) -> Option<Self> {\n let realloc = realloc(self.address, size);\n if realloc > 0 {\n Some(Memory { address: realloc })\n }\n else { None }\n }\n}\n\nimpl Drop for Memory {\n fn drop(&mut self) {\n unalloc(self.address)\n }\n}\n\nconst MEMORY_MAP: *const MemoryMapEntry = 0x500 as *const MemoryMapEntry;\n\npub unsafe fn cluster(number: usize) -> usize {\n if number < CLUSTER_COUNT {\n ptr::read((CLUSTER_ADDRESS + number * size_of::<usize>()) as *const usize)\n } else {\n 0\n }\n}\n\npub unsafe fn set_cluster(number: usize, address: usize) {\n if number < CLUSTER_COUNT {\n ptr::write((CLUSTER_ADDRESS + number * size_of::<usize>()) as *mut usize,\n address);\n }\n}\n\npub unsafe fn address_to_cluster(address: usize) -> usize {\n if address >= CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() {\n (address - CLUSTER_ADDRESS - CLUSTER_COUNT * size_of::<usize>()) \/ CLUSTER_SIZE\n } else {\n 0\n }\n}\n\npub unsafe fn cluster_to_address(number: usize) -> usize {\n CLUSTER_ADDRESS + CLUSTER_COUNT * size_of::<usize>() + number * CLUSTER_SIZE\n}\n\npub unsafe fn cluster_init() {\n \/\/First, set all clusters to the not present value\n for cluster in 0..CLUSTER_COUNT {\n set_cluster(cluster, 0xFFFFFFFF);\n }\n\n \/\/Next, set all valid clusters to the free value\n \/\/TODO: Optimize this function\n for i in 0..((0x5000 - 0x500) \/ size_of::<MemoryMapEntry>()) {\n let entry = &*MEMORY_MAP.offset(i as isize);\n if entry.len > 0 && entry.class == 1 {\n for cluster in 0..CLUSTER_COUNT {\n let address = cluster_to_address(cluster);\n if address as u64 >= entry.base &&\n (address as u64 + CLUSTER_SIZE as u64) <= (entry.base + entry.len) {\n set_cluster(cluster, 0);\n }\n }\n }\n }\n}\n\npub unsafe fn alloc(size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_aligned(size: usize, align: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size > 0 {\n let mut number = 0;\n let mut count = 0;\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 && cluster_to_address(i) % align == 0 {\n if count == 0 {\n number = i;\n }\n count += 1;\n if count * CLUSTER_SIZE > size {\n break;\n }\n } else {\n count = 0;\n }\n }\n if count * CLUSTER_SIZE > size {\n let address = cluster_to_address(number);\n for i in number..number + count {\n set_cluster(i, address);\n }\n ret = address;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn alloc_type<T>() -> *mut T {\n alloc(size_of::<T>()) as *mut T\n}\n\npub unsafe fn alloc_size(ptr: usize) -> usize {\n let mut size = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n size += CLUSTER_SIZE;\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n\n size\n}\n\npub unsafe fn unalloc(ptr: usize) {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if ptr > 0 {\n for i in address_to_cluster(ptr)..CLUSTER_COUNT {\n if cluster(i) == ptr {\n set_cluster(i, 0);\n } else {\n break;\n }\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n}\n\npub unsafe fn realloc(ptr: usize, size: usize) -> usize {\n let mut ret = 0;\n\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n if size == 0 {\n if ptr > 0 {\n unalloc(ptr);\n }\n } else {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n ret = ptr;\n } else {\n ret = alloc(size);\n if ptr > 0 {\n if ret > 0 {\n let copy_size = min(old_size, size);\n\n ::memmove(ret as *mut u8, ptr as *const u8, copy_size);\n }\n unalloc(ptr);\n }\n }\n }\n\n end_no_ints(reenable);\n\n ret\n}\n\npub unsafe fn realloc_inplace(ptr: usize, size: usize) -> usize {\n let old_size = alloc_size(ptr);\n if size <= old_size {\n size\n } else {\n old_size\n }\n}\n\npub fn memory_used() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n\npub fn memory_free() -> usize {\n let mut ret = 0;\n unsafe {\n \/\/Memory allocation must be atomic\n let reenable = start_no_ints();\n\n for i in 0..CLUSTER_COUNT {\n if cluster(i) == 0 {\n ret += CLUSTER_SIZE;\n }\n }\n\n \/\/Memory allocation must be atomic\n end_no_ints(reenable);\n }\n ret\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Building list of new cells<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>16 - pattern matching<commit_after>fn main() {\n let number: int = 13;\n \/\/ TODO ^ Try different values for `number`\n\n println!(\"Tell me about {}\", number);\n match number {\n \/\/ Match a single value\n 1 => println!(\"One!\"),\n \/\/ Match several values\n 2 | 3 | 5 | 7 | 11 => println!(\"This is a prime\"),\n \/\/ Match an inclusive range\n 13...19 => println!(\"A teen\"),\n \/\/ Handle the rest of cases\n _ => println!(\"Ain't special\"),\n }\n\n let boolean = true;\n \/\/ Match is an expression too\n let binary: int = match boolean {\n \/\/ The arms of a match must cover all the possible values\n false => 0,\n true => 1,\n \/\/ TODO ^ Try commenting out one of these arms\n };\n\n println!(\"{} -> {}\", boolean, binary);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make sure we notice when a u16 is loaded at offset 1 into a u8 allocation<commit_after>\/\/ This should fail even without validation or Stacked Borrows.\n\/\/ compile-flags: -Zmiri-disable-validation -Zmiri-disable-stacked-borrows\n\nfn main() {\n \/\/ Make sure we notice when a u16 is loaded at offset 1 into a u8 allocation.\n \/\/ (This would be missed if u8 allocations are *always* at odd addresses.)\n for _ in 0..10 { \/\/ Try many times as this might work by chance.\n let x = [0u8; 4];\n let ptr = x.as_ptr().wrapping_offset(1).cast::<u16>();\n let _val = unsafe { *ptr }; \/\/~ERROR but alignment\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>InviteState in Sync response is stripped events<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Start implementing new import format<commit_after>\nuse serde::{Serialize, Deserialize};\nuse std::path::{Path, PathBuf};\nuse std::fs;\n\nuse crate::api::user::UserInput;\n\nconst CONTEST_FILE_NAME: &str = \"turingarena.yaml\";\n\n#[derive(Debug, Serialize, Deserialize)]\nstruct ContestFile {\n \/\/\/ Title of the contest\n title: String,\n\n \/\/\/ Start time of the contest\n start: Option<String>,\n\n \/\/\/ End time of the contest\n end: Option<String>,\n\n \/\/\/ Users of the contest\n users: Vec<ContestUser>,\n}\n\n#[derive(Debug, Serialize, Deserialize)]\nstruct ContestUser {\n \/\/\/ Id of the user\n id: String,\n\n \/\/\/ Name of the user\n name: String,\n\n \/\/\/ Access token of the user\n token: String,\n\n \/\/\/ Role of the user. Currently supported: user, admin\n role: Option<String>,\n}\n\n\/\/\/ Load a contest file\nfn load_contest(path: &Path) -> Result<(), failure::Error> {\n let contest = serde_yaml::from_slice::<ContestFile>(&fs::read(path)?)?;\n\n for user in contest.users {\n let user_input = UserInput {\n id: user.id,\n display_name: user.name,\n token: user.token,\n };\n }\n\n Ok(())\n}\n\n\/\/\/ Search for a contest file in the current directory\n\/\/\/ or parent directories\nfn find_contest_file(path: &Path, max_depth: u32) -> Option<PathBuf> {\n let mut dir = path;\n\n for _ in 0..max_depth {\n let contest = dir.join(CONTEST_FILE_NAME);\n if contest.exists() {\n return Some(contest)\n }\n dir = match dir.parent() {\n Some(dir) => dir,\n None => return None,\n }\n }\n\n None\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Mem validation<commit_after>use elements::{Module, ResizableLimits, MemoryType};\n\npub struct Error(pub String);\n\npub fn validate_module(module: &Module) -> Result<(), Error> {\n\tif let Some(mem_section) = module.memory_section() {\n\t\tmem_section\n\t\t\t.entries()\n\t\t\t.iter()\n\t\t\t.map(MemoryType::validate)\n\t\t\t.collect::<Result<_, _>>()?\n\t}\n\n\tOk(())\n}\n\nimpl ResizableLimits {\n\tfn validate(&self) -> Result<(), Error> {\n\t\tif let Some(maximum) = self.maximum() {\n\t\t\tif self.initial() >= maximum {\n\t\t\t\treturn Err(Error(format!(\n\t\t\t\t\t\"maximum limit {} is lesser than minimum {}\",\n\t\t\t\t\tmaximum,\n\t\t\t\t\tself.initial()\n\t\t\t\t)));\n\t\t\t}\n\t\t}\n\t\tOk(())\n\t}\n}\n\nimpl MemoryType {\n\tfn validate(&self) -> Result<(), Error> {\n\t\tself.limits().validate()\n\t}\n}\n\n#[cfg(test)]\nmod tests {\n\tuse super::validate_module;\n\tuse builder::module;\n\tuse elements::{BlockType, ExportEntry, External, FunctionType, GlobalEntry, GlobalType,\n\t ImportEntry, InitExpr, Internal, MemoryType, Opcode, Opcodes, TableType,\n\t ValueType};\n\n\t#[test]\n\tfn empty_is_valid() {\n\t\tlet module = module().build();\n\t\tassert!(validate_module(&module).is_ok());\n\t}\n\n\t#[test]\n\tfn mem_limits() {\n\t\t\/\/ min > max\n\t\tlet m = module()\n\t\t\t.memory()\n\t\t\t\t.with_min(10)\n\t\t\t\t.with_max(Some(9))\n\t\t\t\t.build()\n\t\t\t.build();\n\t\tassert!(validate_module(&m).is_err());\n\n\t\t\/\/ mod is always valid without max.\n\t\tlet m = module()\n\t\t\t.memory()\n\t\t\t\t.with_min(10)\n\t\t\t\t.build()\n\t\t\t.build();\n\t\tassert!(validate_module(&m).is_ok());\n\t}\n}\n<|endoftext|>"} {"text":"<commit_before>use backends::Backend;\nuse message::WireMessage;\nuse errors::Result;\n\npub struct NullBackend;\n\nimpl NullBackend {\n pub fn new() -> NullBackend {\n NullBackend {}\n }\n}\n\nimpl Backend for NullBackend {\n fn panic_on_error(&self) -> bool {\n false\n }\n\n fn log_message(&self, _: WireMessage) -> Result<()> {\n Ok(())\n }\n}<commit_msg>Add docs to NullBackend<commit_after>use backends::Backend;\nuse message::WireMessage;\nuse errors::Result;\n\n\/\/\/ The `NullBackend` is a utility backend which discards all messages\npub struct NullBackend;\n\nimpl NullBackend {\n \/\/\/ Construct a new NullBackend\n pub fn new() -> NullBackend {\n NullBackend {}\n }\n}\n\nimpl Backend for NullBackend {\n fn panic_on_error(&self) -> bool {\n false\n }\n\n \/\/\/ Log a message.\n \/\/\/\n \/\/\/ Logging a message with NullBackend is a noop and will never fail.\n fn log_message(&self, _: WireMessage) -> Result<()> {\n Ok(())\n }\n}<|endoftext|>"} {"text":"<commit_before>use std::iter::Iterator;\n\nuse storage::file::File;\n\npub trait FilePrinter {\n\n fn new(verbose: bool, debug: bool) -> Self;\n\n \/*\n * Print a single file\n *\/\n fn print_file(&self, &File);\n\n \/*\n * Print a list of files\n *\/\n fn print_files<I: Iterator<Item = File>>(&self, files: I) {\n for file in files {\n self.print_file(&file);\n }\n }\n\n}\n\nstruct DebugPrinter {\n debug: bool,\n}\n\nimpl FilePrinter for DebugPrinter {\n\n fn new(_: bool, debug: bool) -> DebugPrinter {\n DebugPrinter {\n debug: debug,\n }\n }\n\n fn print_file(&self, f: &File) {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f);\n }\n }\n\n}\n\nstruct SimplePrinter {\n verbose: bool,\n debug: bool,\n}\n\nimpl FilePrinter for SimplePrinter {\n\n fn new(verbose: bool, debug: bool) -> SimplePrinter {\n SimplePrinter {\n debug: debug,\n verbose: verbose,\n }\n }\n\n fn print_file(&self, f: &File) {\n if self.debug {\n debug!(\"{:?}\", f);\n } else if self.verbose {\n info!(\"{}\", f);\n } else {\n info!(\"[File]: {}\", f.id());\n }\n }\n\n}\n\npub struct TablePrinter {\n verbose: bool,\n debug: bool,\n sp: SimplePrinter,\n}\n\nimpl FilePrinter for TablePrinter {\n\n fn new(verbose: bool, debug: bool) -> TablePrinter {\n TablePrinter {\n debug: debug,\n verbose: verbose,\n sp: SimplePrinter::new(verbose, debug),\n }\n }\n\n fn print_file(&self, f: &File) {\n self.sp.print_file(f);\n }\n\n fn print_files<I: Iterator<Item = File>>(&self, files: I) {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"File#\", \"Owner\", \"ID\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.owner_name())[..]);\n\n let id : String = file.id().clone().into();\n let cell_id = Cell::new(&id[..]);\n let row = Row::new(vec![cell_i, cell_o, cell_id]);\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n}\n<commit_msg>ui: Change to take Rc<RefCell<File>> instead of File<commit_after>use std::cell::RefCell;\nuse std::iter::Iterator;\nuse std::rc::Rc;\nuse std::ops::Deref;\n\nuse storage::file::File;\n\npub trait FilePrinter {\n\n fn new(verbose: bool, debug: bool) -> Self;\n\n \/*\n * Print a single file\n *\/\n fn print_file(&self, Rc<RefCell<File>>);\n\n \/*\n * Print a list of files\n *\/\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n for file in files {\n self.print_file(file);\n }\n }\n\n}\n\nstruct DebugPrinter {\n debug: bool,\n}\n\nimpl FilePrinter for DebugPrinter {\n\n fn new(_: bool, debug: bool) -> DebugPrinter {\n DebugPrinter {\n debug: debug,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f);\n }\n }\n\n}\n\nstruct SimplePrinter {\n verbose: bool,\n debug: bool,\n}\n\nimpl FilePrinter for SimplePrinter {\n\n fn new(verbose: bool, debug: bool) -> SimplePrinter {\n SimplePrinter {\n debug: debug,\n verbose: verbose,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"{:?}\", f);\n } else if self.verbose {\n info!(\"{}\", &*f.deref().borrow());\n } else {\n info!(\"[File]: {}\", f.deref().borrow().id());\n }\n }\n\n}\n\npub struct TablePrinter {\n verbose: bool,\n debug: bool,\n sp: SimplePrinter,\n}\n\nimpl FilePrinter for TablePrinter {\n\n fn new(verbose: bool, debug: bool) -> TablePrinter {\n TablePrinter {\n debug: debug,\n verbose: verbose,\n sp: SimplePrinter::new(verbose, debug),\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n self.sp.print_file(f);\n }\n\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"File#\", \"Owner\", \"ID\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.deref().borrow().owner_name())[..]);\n\n let id : String = file.deref().borrow().id().clone().into();\n let cell_id = Cell::new(&id[..]);\n let row = Row::new(vec![cell_i, cell_o, cell_id]);\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Obtain the stage input attributes from shader reflection<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add feature(scoped) as thread::scoped is considered to be unstable<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added suit enum<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ write_volatile causes an LLVM assert with composite types\n\n#![feature(volatile)]\nuse std::ptr::{read_volatile, write_volatile};\n\n#[derive(Debug, Eq, PartialEq)]\nstruct A(u32);\n#[derive(Debug, Eq, PartialEq)]\nstruct B(u64);\n#[derive(Debug, Eq, PartialEq)]\nstruct C(u32, u32);\n#[derive(Debug, Eq, PartialEq)]\nstruct D(u64, u64);\n#[derive(Debug, Eq, PartialEq)]\nstruct E([u64; 32]);\n\nfn main() {\n unsafe {\n let mut x: u32 = 0;\n write_volatile(&mut x, 1);\n assert_eq!(read_volatile(&x), 1);\n assert_eq!(x, 1);\n\n let mut x: u64 = 0;\n write_volatile(&mut x, 1);\n assert_eq!(read_volatile(&x), 1);\n assert_eq!(x, 1);\n\n let mut x = A(0);\n write_volatile(&mut x, A(1));\n assert_eq!(read_volatile(&x), A(1));\n assert_eq!(x, A(1));\n\n let mut x = B(0);\n write_volatile(&mut x, B(1));\n assert_eq!(read_volatile(&x), B(1));\n assert_eq!(x, B(1));\n\n let mut x = C(0, 0);\n write_volatile(&mut x, C(1, 1));\n assert_eq!(read_volatile(&x), C(1, 1));\n assert_eq!(x, C(1, 1));\n\n let mut x = D(0, 0);\n write_volatile(&mut x, D(1, 1));\n assert_eq!(read_volatile(&x), D(1, 1));\n assert_eq!(x, D(1, 1));\n\n let mut x = E([0; 32]);\n write_volatile(&mut x, E([1; 32]));\n assert_eq!(read_volatile(&x), E([1; 32]));\n assert_eq!(x, E([1; 32]));\n }\n}\n<commit_msg>FIN: ignore failing test on emscripten, see #41299<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ write_volatile causes an LLVM assert with composite types\n\n\/\/ ignore-emscripten See #41299: probably a bad optimization\n\n#![feature(volatile)]\nuse std::ptr::{read_volatile, write_volatile};\n\n#[derive(Debug, Eq, PartialEq)]\nstruct A(u32);\n#[derive(Debug, Eq, PartialEq)]\nstruct B(u64);\n#[derive(Debug, Eq, PartialEq)]\nstruct C(u32, u32);\n#[derive(Debug, Eq, PartialEq)]\nstruct D(u64, u64);\n#[derive(Debug, Eq, PartialEq)]\nstruct E([u64; 32]);\n\nfn main() {\n unsafe {\n let mut x: u32 = 0;\n write_volatile(&mut x, 1);\n assert_eq!(read_volatile(&x), 1);\n assert_eq!(x, 1);\n\n let mut x: u64 = 0;\n write_volatile(&mut x, 1);\n assert_eq!(read_volatile(&x), 1);\n assert_eq!(x, 1);\n\n let mut x = A(0);\n write_volatile(&mut x, A(1));\n assert_eq!(read_volatile(&x), A(1));\n assert_eq!(x, A(1));\n\n let mut x = B(0);\n write_volatile(&mut x, B(1));\n assert_eq!(read_volatile(&x), B(1));\n assert_eq!(x, B(1));\n\n let mut x = C(0, 0);\n write_volatile(&mut x, C(1, 1));\n assert_eq!(read_volatile(&x), C(1, 1));\n assert_eq!(x, C(1, 1));\n\n let mut x = D(0, 0);\n write_volatile(&mut x, D(1, 1));\n assert_eq!(read_volatile(&x), D(1, 1));\n assert_eq!(x, D(1, 1));\n\n let mut x = E([0; 32]);\n write_volatile(&mut x, E([1; 32]));\n assert_eq!(read_volatile(&x), E([1; 32]));\n assert_eq!(x, E([1; 32]));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #44277 - mattico:test-33185, r=nikomatsakis<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)]\n\n#[macro_export]\nmacro_rules! state {\n ( $( $name:ident : $field:ty )* ) => (\n #[derive(Default)]\n struct State {\n $($name : $field),*\n }\n )\n}\n\nstate! { x: i64 }\n\npub fn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use tolerance > 0 in Path::contains_point()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a version of the arithmetic example that returns an AST as an enum<commit_after>#![feature(plugin)]\n#![plugin(peg_syntax_ext)]\nuse arithmetic::expression;\n\n#[derive(Clone, PartialEq, Eq, Debug)]\npub enum Expression {\n\tNumber(i64),\n\tSum(Box<Expression>, Box<Expression>),\n\tProduct(Box<Expression>, Box<Expression>),\n}\n\npeg! arithmetic(r#\"\nuse super::Expression;\n\n#[pub]\nexpression -> Expression\n\t= sum\n\nsum -> Expression\n\t= l:product \"+\" r:product { Expression::Sum(Box::new(l), Box::new(r)) }\n\t\/ product\n\nproduct -> Expression\n\t= l:atom \"*\" r:atom { Expression::Product(Box::new(l), Box::new(r)) }\n\t\/ atom\n\natom -> Expression\n\t= number\n\t\/ \"(\" v:sum \")\" { v }\n\nnumber -> Expression\n\t= [0-9]+ { Expression::Number(match_str.parse().unwrap()) }\n\"#);\n\n#[test]\nfn main() {\n\tassert_eq!(expression(\"1+1\"), Ok(Expression::Sum(\n\t\tBox::new(Expression::Number(1)),\n\t\tBox::new(Expression::Number(1)))\n\t));\n\tassert_eq!(expression(\"5*5\"), Ok(Expression::Product(\n\t\tBox::new(Expression::Number(5)),\n\t\tBox::new(Expression::Number(5)))\n\t));\n\tassert_eq!(expression(\"2+3*4\"), Ok(Expression::Sum(\n\t\tBox::new(Expression::Number(2)),\n\t\tBox::new(Expression::Product(\n\t\t\tBox::new(Expression::Number(3)),\n\t\t\tBox::new(Expression::Number(4))\n\t\t)),\n\t)));\n\tassert!(expression(\"(22+)+1\").is_err());\n\tassert!(expression(\"1++1\").is_err());\n\tassert!(expression(\"3)+1\").is_err());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Warn on errors writing to the backend<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright (c) 2017 Anatoly Ikorsky\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0\n\/\/ <LICENSE-APACHE or http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT\n\/\/ license <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. All files in the project carrying such notice may not be copied,\n\/\/ modified, or distributed except according to those terms.\n\nuse tokio_io::AsyncRead;\n\nuse std::{fmt, sync::Arc};\n\nuse crate::BoxFuture;\n\npub mod builtin;\n\n\/\/\/ Trait used to handle local infile requests.\n\/\/\/\n\/\/\/ Simple handler example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # extern crate futures;\n\/\/\/ # extern crate mysql_async as my;\n\/\/\/ # extern crate tokio;\n\/\/\/ # extern crate tokio_io;\n\/\/\/\n\/\/\/ # use futures::Future;\n\/\/\/ # use my::prelude::*;\n\/\/\/ # use tokio_io::AsyncRead;\n\/\/\/ # use std::env;\n\/\/\/ # fn main() {\n\/\/\/\n\/\/\/ # pub fn run<F, T, U>(future: F) -> Result<T, U>\n\/\/\/ # where\n\/\/\/ # F: Future<Item = T, Error = U> + Send + 'static,\n\/\/\/ # T: Send + 'static,\n\/\/\/ # U: Send + 'static,\n\/\/\/ # {\n\/\/\/ # let mut runtime = tokio::runtime::Runtime::new().unwrap();\n\/\/\/ # let result = runtime.block_on(future);\n\/\/\/ # runtime.shutdown_on_idle().wait().unwrap();\n\/\/\/ # result\n\/\/\/ # }\n\/\/\/\n\/\/\/ struct ExampleHandler(&'static [u8]);\n\/\/\/\n\/\/\/ impl LocalInfileHandler for ExampleHandler {\n\/\/\/ fn handle(&self, _: &[u8]) -> Box<Future<Item=Box<AsyncRead + Send>, Error=my::error::Error> + Send> {\n\/\/\/ Box::new(futures::future::ok(Box::new(self.0) as Box<_>))\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ # let database_url: String = if let Ok(url) = env::var(\"DATABASE_URL\") {\n\/\/\/ # let opts = my::Opts::from_url(&url).expect(\"DATABASE_URL invalid\");\n\/\/\/ # if opts.get_db_name().expect(\"a database name is required\").is_empty() {\n\/\/\/ # panic!(\"database name is empty\");\n\/\/\/ # }\n\/\/\/ # url\n\/\/\/ # } else {\n\/\/\/ # \"mysql:\/\/root:password@127.0.0.1:3307\/mysql\".into()\n\/\/\/ # };\n\/\/\/\n\/\/\/ let mut opts = my::OptsBuilder::from_opts(&*database_url);\n\/\/\/ opts.local_infile_handler(Some(ExampleHandler(b\"foobar\")));\n\/\/\/\n\/\/\/ let pool = my::Pool::new(opts);\n\/\/\/\n\/\/\/ let future = pool.get_conn()\n\/\/\/ .and_then(|conn| conn.drop_query(\"CREATE TEMPORARY TABLE tmp (a TEXT);\"))\n\/\/\/ .and_then(|conn| conn.drop_query(\"LOAD DATA LOCAL INFILE 'baz' INTO TABLE tmp;\"))\n\/\/\/ .and_then(|conn| conn.prep_exec(\"SELECT * FROM tmp;\", ()))\n\/\/\/ .and_then(|result| result.map_and_drop(|row| my::from_row::<(String,)>(row).0))\n\/\/\/ .map(|(_ \/* conn *\/, result)| {\n\/\/\/ assert_eq!(result.len(), 1);\n\/\/\/ assert_eq!(result[0], \"foobar\");\n\/\/\/ })\n\/\/\/ .and_then(|_| pool.disconnect())\n\/\/\/ .map_err(|err| match err.kind() {\n\/\/\/ my::error::ErrorKind::Server(_, 1148, _) => {\n\/\/\/ \/\/ The used command is not allowed with this MySQL version\n\/\/\/ },\n\/\/\/ _ => panic!(\"{}\", err),\n\/\/\/ });\n\/\/\/\n\/\/\/ run(future);\n\/\/\/ # }\n\/\/\/ ```\npub trait LocalInfileHandler: Sync + Send {\n \/\/\/ `file_name` is the file name in `LOAD DATA LOCAL INFILE '<file name>' INTO TABLE ...;`\n \/\/\/ query.\n fn handle(&self, file_name: &[u8]) -> BoxFuture<Box<dyn AsyncRead + Send + 'static>>;\n}\n\n\/\/\/ Object used to wrap `T: LocalInfileHandler` inside of Opts.\n#[derive(Clone)]\npub struct LocalInfileHandlerObject(Arc<dyn LocalInfileHandler>);\n\nimpl LocalInfileHandlerObject {\n pub fn new<T: LocalInfileHandler + 'static>(handler: T) -> Self {\n LocalInfileHandlerObject(Arc::new(handler))\n }\n\n pub fn clone_inner(&self) -> Arc<dyn LocalInfileHandler> {\n self.0.clone()\n }\n}\n\nimpl PartialEq for LocalInfileHandlerObject {\n fn eq(&self, other: &LocalInfileHandlerObject) -> bool {\n self.0.as_ref() as *const dyn LocalInfileHandler\n == other.0.as_ref() as *const dyn LocalInfileHandler\n }\n}\n\nimpl Eq for LocalInfileHandlerObject {}\n\nimpl fmt::Debug for LocalInfileHandlerObject {\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n write!(f, \"Local infile handler object\")\n }\n}\n<commit_msg>Fix doctest in local_infile_handler<commit_after>\/\/ Copyright (c) 2017 Anatoly Ikorsky\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0\n\/\/ <LICENSE-APACHE or http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT\n\/\/ license <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. All files in the project carrying such notice may not be copied,\n\/\/ modified, or distributed except according to those terms.\n\nuse tokio_io::AsyncRead;\n\nuse std::{fmt, sync::Arc};\n\nuse crate::BoxFuture;\n\npub mod builtin;\n\n\/\/\/ Trait used to handle local infile requests.\n\/\/\/\n\/\/\/ Simple handler example:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ # extern crate futures;\n\/\/\/ # extern crate mysql_async as my;\n\/\/\/ # extern crate tokio;\n\/\/\/ # extern crate tokio_io;\n\/\/\/\n\/\/\/ # use futures::Future;\n\/\/\/ # use my::prelude::*;\n\/\/\/ # use tokio_io::AsyncRead;\n\/\/\/ # use std::env;\n\/\/\/ # fn main() {\n\/\/\/\n\/\/\/ # pub fn run<F, T, U>(future: F) -> Result<T, U>\n\/\/\/ # where\n\/\/\/ # F: Future<Item = T, Error = U> + Send + 'static,\n\/\/\/ # T: Send + 'static,\n\/\/\/ # U: Send + 'static,\n\/\/\/ # {\n\/\/\/ # let mut runtime = tokio::runtime::Runtime::new().unwrap();\n\/\/\/ # let result = runtime.block_on(future);\n\/\/\/ # runtime.shutdown_on_idle().wait().unwrap();\n\/\/\/ # result\n\/\/\/ # }\n\/\/\/\n\/\/\/ struct ExampleHandler(&'static [u8]);\n\/\/\/\n\/\/\/ impl LocalInfileHandler for ExampleHandler {\n\/\/\/ fn handle(&self, _: &[u8]) -> Box<Future<Item=Box<AsyncRead + Send>, Error=my::error::Error> + Send> {\n\/\/\/ Box::new(futures::future::ok(Box::new(self.0) as Box<_>))\n\/\/\/ }\n\/\/\/ }\n\/\/\/\n\/\/\/ # let database_url: String = if let Ok(url) = env::var(\"DATABASE_URL\") {\n\/\/\/ # let opts = my::Opts::from_url(&url).expect(\"DATABASE_URL invalid\");\n\/\/\/ # if opts.get_db_name().expect(\"a database name is required\").is_empty() {\n\/\/\/ # panic!(\"database name is empty\");\n\/\/\/ # }\n\/\/\/ # url\n\/\/\/ # } else {\n\/\/\/ # \"mysql:\/\/root:password@127.0.0.1:3307\/mysql\".into()\n\/\/\/ # };\n\/\/\/\n\/\/\/ let mut opts = my::OptsBuilder::from_opts(&*database_url);\n\/\/\/ opts.local_infile_handler(Some(ExampleHandler(b\"foobar\")));\n\/\/\/\n\/\/\/ let pool = my::Pool::new(opts);\n\/\/\/\n\/\/\/ let future = pool.get_conn()\n\/\/\/ .and_then(|conn| conn.drop_query(\"CREATE TEMPORARY TABLE tmp (a TEXT);\"))\n\/\/\/ .and_then(|conn| conn.drop_query(\"LOAD DATA LOCAL INFILE 'baz' INTO TABLE tmp;\"))\n\/\/\/ .and_then(|conn| conn.prep_exec(\"SELECT * FROM tmp;\", ()))\n\/\/\/ .and_then(|result| result.map_and_drop(|row| my::from_row::<(String,)>(row).0))\n\/\/\/ .map(|(_ \/* conn *\/, result)| {\n\/\/\/ assert_eq!(result.len(), 1);\n\/\/\/ assert_eq!(result[0], \"foobar\");\n\/\/\/ })\n\/\/\/ .and_then(|_| pool.disconnect())\n\/\/\/ .map_err(|err| match err {\n\/\/\/ my::error::Error::Server(ref err) if err.code == 1148 => {\n\/\/\/ \/\/ The used command is not allowed with this MySQL version\n\/\/\/ },\n\/\/\/ _ => panic!(\"{}\", err),\n\/\/\/ });\n\/\/\/\n\/\/\/ run(future);\n\/\/\/ # }\n\/\/\/ ```\npub trait LocalInfileHandler: Sync + Send {\n \/\/\/ `file_name` is the file name in `LOAD DATA LOCAL INFILE '<file name>' INTO TABLE ...;`\n \/\/\/ query.\n fn handle(&self, file_name: &[u8]) -> BoxFuture<Box<dyn AsyncRead + Send + 'static>>;\n}\n\n\/\/\/ Object used to wrap `T: LocalInfileHandler` inside of Opts.\n#[derive(Clone)]\npub struct LocalInfileHandlerObject(Arc<dyn LocalInfileHandler>);\n\nimpl LocalInfileHandlerObject {\n pub fn new<T: LocalInfileHandler + 'static>(handler: T) -> Self {\n LocalInfileHandlerObject(Arc::new(handler))\n }\n\n pub fn clone_inner(&self) -> Arc<dyn LocalInfileHandler> {\n self.0.clone()\n }\n}\n\nimpl PartialEq for LocalInfileHandlerObject {\n fn eq(&self, other: &LocalInfileHandlerObject) -> bool {\n self.0.as_ref() as *const dyn LocalInfileHandler\n == other.0.as_ref() as *const dyn LocalInfileHandler\n }\n}\n\nimpl Eq for LocalInfileHandlerObject {}\n\nimpl fmt::Debug for LocalInfileHandlerObject {\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n write!(f, \"Local infile handler object\")\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add select_loop test<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Utilities to support Stratis.\nuse std::collections::HashMap;\nuse std::fs;\nuse std::path::{Path, PathBuf};\n\nuse libudev;\n\nuse super::device::is_stratis_device;\nuse stratis::StratisResult;\n\n\/\/\/ Takes a libudev device entry and returns the properties as a HashMap.\nfn device_as_map(device: &libudev::Device) -> HashMap<String, String> {\n let rc: HashMap<_, _> = device\n .properties()\n .map(|i| {\n (\n String::from(i.name().to_str().expect(\"Unix is utf-8\")),\n String::from(i.value().to_str().expect(\"Unix is utf-8\")),\n )\n })\n .collect();\n rc\n}\n\n\/\/\/ Common function used to retrieve the udev db entry for a block device as a HashMap when found\npub fn get_udev_block_device(\n dev_node_search: &Path,\n) -> StratisResult<Option<HashMap<String, String>>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n\n \/\/ Get canonical form to ensure we do correct lookup in udev db\n let canonical = fs::canonicalize(dev_node_search)?;\n\n let result = enumerator\n .scan_devices()?\n .find(|x| x.devnode().map_or(false, |d| canonical == d))\n .and_then(|dev| Some(device_as_map(&dev)));\n Ok(result)\n}\n\n\/\/\/ Lookup the WWN from the udev db using the device node eg. \/dev\/sda\npub fn hw_lookup(dev_node_search: &Path) -> StratisResult<Option<String>> {\n let dev = get_udev_block_device(dev_node_search)?;\n Ok(dev.and_then(|dev| dev.get(\"ID_WWN\").and_then(|i| Some(i.clone()))))\n}\n\n\/\/\/ Collect paths for all the block devices which are not individual multipath paths and which\n\/\/\/ appear to be empty from a udev perspective.\nfn get_all_empty_devices() -> StratisResult<Vec<PathBuf>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n\n Ok(enumerator\n .scan_devices()?\n .filter(|dev| {\n dev.property_value(\"DM_MULTIPATH_DEVICE_PATH\").is_none()\n && !((dev.property_value(\"ID_PART_TABLE_TYPE\").is_some()\n && dev.property_value(\"ID_PART_ENTRY_DISK\").is_none())\n || dev.property_value(\"ID_FS_USAGE\").is_some())\n })\n .filter_map(|i| i.devnode().map(|d| d.into()))\n .collect())\n}\n\n\/\/\/ Retrieve all the block devices on the system that have a Stratis signature.\npub fn get_stratis_block_devices() -> StratisResult<Vec<PathBuf>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n enumerator.match_property(\"ID_FS_TYPE\", \"stratis\")?;\n\n let devices: Vec<PathBuf> = enumerator\n .scan_devices()?\n .filter(|dev| dev.property_value(\"DM_MULTIPATH_DEVICE_PATH\").is_none())\n .filter_map(|i| i.devnode().map(|d| d.into()))\n .collect();\n\n if devices.is_empty() {\n \/\/ Either we don't have any stratis devices or we are using a distribution that doesn't\n \/\/ have a version of libblkid that supports stratis, lets make sure.\n \/\/ TODO: At some point in the future we can remove this and just return the devices.\n\n Ok(get_all_empty_devices()?\n .into_iter()\n .filter(|x| is_stratis_device(&x).ok().is_some())\n .collect())\n } else {\n Ok(devices)\n }\n}\n<commit_msg>device_as_map: Remove expect(s)<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Utilities to support Stratis.\nuse std::collections::HashMap;\nuse std::fs;\nuse std::path::{Path, PathBuf};\n\nuse libudev;\n\nuse super::device::is_stratis_device;\nuse stratis::StratisResult;\n\n\/\/\/ Takes a libudev device entry and returns the properties as a HashMap.\nfn device_as_map(device: &libudev::Device) -> HashMap<String, String> {\n let rc: HashMap<_, _> = device\n .properties()\n .map(|i| {\n (\n String::from(i.name().to_string_lossy()),\n String::from(i.value().to_string_lossy()),\n )\n })\n .collect();\n rc\n}\n\n\/\/\/ Common function used to retrieve the udev db entry for a block device as a HashMap when found\npub fn get_udev_block_device(\n dev_node_search: &Path,\n) -> StratisResult<Option<HashMap<String, String>>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n\n \/\/ Get canonical form to ensure we do correct lookup in udev db\n let canonical = fs::canonicalize(dev_node_search)?;\n\n let result = enumerator\n .scan_devices()?\n .find(|x| x.devnode().map_or(false, |d| canonical == d))\n .and_then(|dev| Some(device_as_map(&dev)));\n Ok(result)\n}\n\n\/\/\/ Lookup the WWN from the udev db using the device node eg. \/dev\/sda\npub fn hw_lookup(dev_node_search: &Path) -> StratisResult<Option<String>> {\n let dev = get_udev_block_device(dev_node_search)?;\n Ok(dev.and_then(|dev| dev.get(\"ID_WWN\").and_then(|i| Some(i.clone()))))\n}\n\n\/\/\/ Collect paths for all the block devices which are not individual multipath paths and which\n\/\/\/ appear to be empty from a udev perspective.\nfn get_all_empty_devices() -> StratisResult<Vec<PathBuf>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n\n Ok(enumerator\n .scan_devices()?\n .filter(|dev| {\n dev.property_value(\"DM_MULTIPATH_DEVICE_PATH\").is_none()\n && !((dev.property_value(\"ID_PART_TABLE_TYPE\").is_some()\n && dev.property_value(\"ID_PART_ENTRY_DISK\").is_none())\n || dev.property_value(\"ID_FS_USAGE\").is_some())\n })\n .filter_map(|i| i.devnode().map(|d| d.into()))\n .collect())\n}\n\n\/\/\/ Retrieve all the block devices on the system that have a Stratis signature.\npub fn get_stratis_block_devices() -> StratisResult<Vec<PathBuf>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n enumerator.match_property(\"ID_FS_TYPE\", \"stratis\")?;\n\n let devices: Vec<PathBuf> = enumerator\n .scan_devices()?\n .filter(|dev| dev.property_value(\"DM_MULTIPATH_DEVICE_PATH\").is_none())\n .filter_map(|i| i.devnode().map(|d| d.into()))\n .collect();\n\n if devices.is_empty() {\n \/\/ Either we don't have any stratis devices or we are using a distribution that doesn't\n \/\/ have a version of libblkid that supports stratis, lets make sure.\n \/\/ TODO: At some point in the future we can remove this and just return the devices.\n\n Ok(get_all_empty_devices()?\n .into_iter()\n .filter(|x| is_stratis_device(&x).ok().is_some())\n .collect())\n } else {\n Ok(devices)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add flex_test<commit_after>#[macro_use]\nextern crate yoga;\nextern crate ordered_float;\n\nuse ordered_float::OrderedFloat;\nuse yoga::{Direction, FlexDirection, Node, Point, Undefined};\nuse yoga::FlexStyle::*;\n\n#[test]\nfn test_flex_basis_flex_grow_column() {\n\tlet mut root = Node::new();\n\n\tstyle!(root,\n\t\tWidth(100 pt),\n\t\tHeight(100 pt)\n\t);\n\n\tlet mut root_child_0 = Node::new();\n\n\tstyle!(root_child_0,\n\t\tFlexGrow(1.0),\n\t\tFlexBasis(50 pt)\n\t);\n\n\tlet mut root_child_1 = Node::new();\n\n\tstyle!(root_child_1,\n\t\tFlexGrow(1.0)\n\t);\n\n\troot.insert_child(&mut root_child_0, 0);\n\troot.insert_child(&mut root_child_1, 1);\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(75.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(75.0, child_1_layout.top);\n\tassert_eq!(100.0, child_1_layout.width);\n\tassert_eq!(25.0, child_1_layout.height);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::RTL);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(75.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(75.0, child_1_layout.top);\n\tassert_eq!(100.0, child_1_layout.width);\n\tassert_eq!(25.0, child_1_layout.height);\n}\n\n#[test]\nfn test_flex_basis_flex_grow_row() {\n\tlet mut root = Node::new();\n\n\tstyle!(root,\n\t\tFlexDirection(FlexDirection::Row),\n\t\tWidth(100 pt),\n\t\tHeight(100 pt)\n\t);\n\n\tlet mut root_child_0 = Node::new();\n\n\tstyle!(root_child_0,\n\t\tFlexGrow(1.0),\n\t\tFlexBasis(50 pt)\n\t);\n\n\tlet mut root_child_1 = Node::new();\n\n\tstyle!(root_child_1,\n\t\tFlexGrow(1.0)\n\t);\n\n\troot.insert_child(&mut root_child_0, 0);\n\troot.insert_child(&mut root_child_1, 1);\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(75.0, child_0_layout.width);\n\tassert_eq!(100.0, child_0_layout.height);\n\n\tassert_eq!(75.0, child_1_layout.left);\n\tassert_eq!(0.0, child_1_layout.top);\n\tassert_eq!(25.0, child_1_layout.width);\n\tassert_eq!(100.0, child_1_layout.height);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::RTL);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(25.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(75.0, child_0_layout.width);\n\tassert_eq!(100.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(0.0, child_1_layout.top);\n\tassert_eq!(25.0, child_1_layout.width);\n\tassert_eq!(100.0, child_1_layout.height);\n}\n\n#[test]\nfn test_flex_basis_flex_shrink_column() {\n\tlet mut root = Node::new();\n\n\tstyle!(root,\n\t\tWidth(100 pt),\n\t\tHeight(100 pt)\n\t);\n\n\tlet mut root_child_0 = Node::new();\n\n\tstyle!(root_child_0,\n\t\tFlexShrink(1.0),\n\t\tFlexBasis(100 pt)\n\t);\n\n\tlet mut root_child_1 = Node::new();\n\n\tstyle!(root_child_1,\n\t\tFlexBasis(50 pt)\n\t);\n\n\troot.insert_child(&mut root_child_0, 0);\n\troot.insert_child(&mut root_child_1, 1);\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(50.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(50.0, child_1_layout.top);\n\tassert_eq!(100.0, child_1_layout.width);\n\tassert_eq!(50.0, child_1_layout.height);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::RTL);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(50.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(50.0, child_1_layout.top);\n\tassert_eq!(100.0, child_1_layout.width);\n\tassert_eq!(50.0, child_1_layout.height);\n}\n\n#[test]\nfn test_flex_basis_flex_shrink_row() {\n\tlet mut root = Node::new();\n\n\tstyle!(root,\n\t\tFlexDirection(FlexDirection::Row),\n\t\tWidth(100 pt),\n\t\tHeight(100 pt)\n\t);\n\n\tlet mut root_child_0 = Node::new();\n\n\tstyle!(root_child_0,\n\t\tFlexShrink(1.0),\n\t\tFlexBasis(100 pt)\n\t);\n\n\tlet mut root_child_1 = Node::new();\n\n\tstyle!(root_child_1,\n\t\tFlexBasis(50 pt)\n\t);\n\n\troot.insert_child(&mut root_child_0, 0);\n\troot.insert_child(&mut root_child_1, 1);\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(50.0, child_0_layout.width);\n\tassert_eq!(100.0, child_0_layout.height);\n\n\tassert_eq!(50.0, child_1_layout.left);\n\tassert_eq!(0.0, child_1_layout.top);\n\tassert_eq!(50.0, child_1_layout.width);\n\tassert_eq!(100.0, child_1_layout.height);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::RTL);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(50.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(50.0, child_0_layout.width);\n\tassert_eq!(100.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(0.0, child_1_layout.top);\n\tassert_eq!(50.0, child_1_layout.width);\n\tassert_eq!(100.0, child_1_layout.height);\n}\n\n#[test]\nfn test_flex_shrink_to_zero() {\n\tlet mut root = Node::new();\n\n\tstyle!(root,\n\t\tHeight(75 pt)\n\t);\n\n\tlet mut root_child_0 = Node::new();\n\n\tstyle!(root_child_0,\n\t\tWidth(50 pt),\n\t\tHeight(50 pt)\n\t);\n\n\tlet mut root_child_1 = Node::new();\n\n\tstyle!(root_child_1,\n\t\tFlexShrink(1.0),\n\t\tWidth(50 pt),\n\t\tHeight(50 pt)\n\t);\n\n\tlet mut root_child_2 = Node::new();\n\n\tstyle!(root_child_2,\n\t\tWidth(50 pt),\n\t\tHeight(50 pt)\n\t);\n\n\troot.insert_child(&mut root_child_0, 0);\n\troot.insert_child(&mut root_child_1, 1);\n\troot.insert_child(&mut root_child_2, 2);\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\tlet child_2_layout = root_child_2.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(50.0, root_layout.width);\n\tassert_eq!(75.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(50.0, child_0_layout.width);\n\tassert_eq!(50.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(50.0, child_1_layout.top);\n\tassert_eq!(50.0, child_1_layout.width);\n\tassert_eq!(0.0, child_1_layout.height);\n\n\tassert_eq!(0.0, child_2_layout.left);\n\tassert_eq!(50.0, child_2_layout.top);\n\tassert_eq!(50.0, child_2_layout.width);\n\tassert_eq!(50.0, child_2_layout.height);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::RTL);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\tlet child_2_layout = root_child_2.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(50.0, root_layout.width);\n\tassert_eq!(75.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(50.0, child_0_layout.width);\n\tassert_eq!(50.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(50.0, child_1_layout.top);\n\tassert_eq!(50.0, child_1_layout.width);\n\tassert_eq!(0.0, child_1_layout.height);\n\n\tassert_eq!(0.0, child_2_layout.left);\n\tassert_eq!(50.0, child_2_layout.top);\n\tassert_eq!(50.0, child_2_layout.width);\n\tassert_eq!(50.0, child_2_layout.height);\n}\n\n#[test]\nfn test_flex_basis_overrides_main_size() {\n\tlet mut root = Node::new();\n\n\tstyle!(root,\n\t\tWidth(100 pt),\n\t\tHeight(100 pt)\n\t);\n\n\tlet mut root_child_0 = Node::new();\n\n\tstyle!(root_child_0,\n\t\tFlexGrow(1.0),\n\t\tFlexBasis(50 pt),\n\t\tHeight(20 pt)\n\t);\n\n\tlet mut root_child_1 = Node::new();\n\n\tstyle!(root_child_1,\n\t\tFlexGrow(1.0),\n\t\tHeight(10 pt)\n\t);\n\n\tlet mut root_child_2 = Node::new();\n\n\tstyle!(root_child_2,\n\t\tFlexGrow(1.0),\n\t\tHeight(10 pt)\n\t);\n\n\troot.insert_child(&mut root_child_0, 0);\n\troot.insert_child(&mut root_child_1, 1);\n\troot.insert_child(&mut root_child_2, 2);\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\tlet child_2_layout = root_child_2.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(60.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(60.0, child_1_layout.top);\n\tassert_eq!(100.0, child_1_layout.width);\n\tassert_eq!(20.0, child_1_layout.height);\n\n\tassert_eq!(0.0, child_2_layout.left);\n\tassert_eq!(80.0, child_2_layout.top);\n\tassert_eq!(100.0, child_2_layout.width);\n\tassert_eq!(20.0, child_2_layout.height);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::RTL);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_1_layout = root_child_1.get_layout();\n\tlet child_2_layout = root_child_2.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(60.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_1_layout.left);\n\tassert_eq!(60.0, child_1_layout.top);\n\tassert_eq!(100.0, child_1_layout.width);\n\tassert_eq!(20.0, child_1_layout.height);\n\n\tassert_eq!(0.0, child_2_layout.left);\n\tassert_eq!(80.0, child_2_layout.top);\n\tassert_eq!(100.0, child_2_layout.width);\n\tassert_eq!(20.0, child_2_layout.height);\n}\n\n#[test]\nfn test_flex_grow_shrink_at_most() {\n\tlet mut root = Node::new();\n\n\tstyle!(root,\n\t\tWidth(100 pt),\n\t\tHeight(100 pt)\n\t);\n\n\tlet mut root_child_0 = Node::new();\n\n\t\/\/ No style for root_child_0\n\n\tlet mut root_child_0_child_0 = Node::new();\n\n\tstyle!(root_child_0_child_0,\n\t\tFlexGrow(1.0),\n\t\tFlexShrink(1.0)\n\t);\n\n\troot.insert_child(&mut root_child_0, 0);\n\n\troot_child_0.insert_child(&mut root_child_0_child_0, 0);\n\troot.calculate_layout(Undefined, Undefined, Direction::LTR);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_0_child_0_layout = root_child_0_child_0.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(0.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_0_child_0_layout.left);\n\tassert_eq!(0.0, child_0_child_0_layout.top);\n\tassert_eq!(100.0, child_0_child_0_layout.width);\n\tassert_eq!(0.0, child_0_child_0_layout.height);\n\n\troot.calculate_layout(Undefined, Undefined, Direction::RTL);\n\n\tlet root_layout = root.get_layout();\n\tlet child_0_layout = root_child_0.get_layout();\n\tlet child_0_child_0_layout = root_child_0_child_0.get_layout();\n\n\tassert_eq!(0.0, root_layout.left);\n\tassert_eq!(0.0, root_layout.top);\n\tassert_eq!(100.0, root_layout.width);\n\tassert_eq!(100.0, root_layout.height);\n\n\tassert_eq!(0.0, child_0_layout.left);\n\tassert_eq!(0.0, child_0_layout.top);\n\tassert_eq!(100.0, child_0_layout.width);\n\tassert_eq!(0.0, child_0_layout.height);\n\n\tassert_eq!(0.0, child_0_child_0_layout.left);\n\tassert_eq!(0.0, child_0_child_0_layout.top);\n\tassert_eq!(100.0, child_0_child_0_layout.width);\n\tassert_eq!(0.0, child_0_child_0_layout.height);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added an empty file! Hooray!<commit_after><|endoftext|>"} {"text":"<commit_before>#[doc=\"High-level interface to CSS selector matching.\"]\n\nimport std::arc::{arc, get, clone};\n\nimport dom::style::{DisplayType, DisBlock, DisInline, DisNone, Stylesheet, Unit, Auto};\nimport dom::base::{Element, HTMLDivElement, HTMLHeadElement, HTMLImageElement, Node, NodeKind};\nimport dom::base::{Text};\nimport util::color::{Color, rgb};\nimport util::color::css_colors::{white, black};\nimport base::{LayoutData, NTree};\n\ntype SpecifiedStyle = {mut background_color : option<Color>,\n mut display_type : option<DisplayType>,\n mut font_size : option<Unit>,\n mut height : option<Unit>,\n mut text_color : option<Color>,\n mut width : option<Unit>\n };\n\ntrait DefaultStyleMethods {\n fn default_color() -> Color;\n fn default_display_type() -> DisplayType;\n fn default_width() -> Unit;\n fn default_height() -> Unit;\n}\n\n\/\/\/ Default styles for various attributes in case they don't get initialized from CSS selectors.\nimpl NodeKind : DefaultStyleMethods {\n fn default_color() -> Color {\n match self {\n Text(*) => { white() }\n Element(*) => {\n let r = rand::rng();\n rgb(r.next() as u8, r.next() as u8, r.next() as u8)\n }\n }\n }\n\n fn default_display_type() -> DisplayType {\n match self {\n Text(*) => { DisInline }\n Element(element) => {\n match *element.kind {\n HTMLDivElement => DisBlock,\n HTMLHeadElement => DisNone,\n HTMLImageElement(*) => DisInline,\n UnknownElement => DisInline\n }\n }\n }\n }\n \n fn default_width() -> Unit {\n Auto\n }\n\n fn default_height() -> Unit {\n Auto\n }\n}\n\n\/**\n * Create a specified style that can be used to initialize a node before selector matching.\n *\n * Everything is initialized to none except the display style. The default value of the display\n * style is computed so that it can be used to short-circuit selector matching to avoid computing\n * style for children of display:none objects.\n *\/\nfn empty_style_for_node_kind(kind: NodeKind) -> SpecifiedStyle {\n let display_type = kind.default_display_type();\n\n {mut background_color : none,\n mut display_type : some(display_type),\n mut font_size : none,\n mut height : none,\n mut text_color : none,\n mut width : none}\n}\n\ntrait StylePriv {\n fn initialize_style();\n}\n\nimpl Node : StylePriv {\n #[doc=\"\n Set a default auxiliary data so that other threads can modify it.\n \n This is, importantly, the function that creates the layout\n data for the node (the reader-auxiliary box in the RCU model)\n and populates it with the default style.\n\n \"]\n \/\/ TODO: we should look into folding this into building the dom,\n \/\/ instead of doing a linear sweep afterwards.\n fn initialize_style() {\n let node_kind = self.read(|n| copy *n.kind);\n let the_layout_data = @LayoutData({\n mut specified_style : ~empty_style_for_node_kind(node_kind),\n mut box : none\n });\n\n self.set_aux(the_layout_data);\n }\n}\n\ntrait StyleMethods {\n fn initialize_style_for_subtree();\n fn get_specified_style() -> SpecifiedStyle;\n fn recompute_style_for_subtree(styles : arc<Stylesheet>);\n}\n\nimpl Node : StyleMethods {\n #[doc=\"Sequentially initialize the nodes' auxilliary data so they can be updated in parallel.\"]\n fn initialize_style_for_subtree() {\n self.initialize_style();\n \n for NTree.each_child(self) |kid| {\n kid.initialize_style_for_subtree();\n }\n }\n \n #[doc=\"\n Returns the computed style for the given node. If CSS selector matching has not yet been\n performed, fails.\n\n TODO: Return a safe reference; don't copy.\n \"]\n fn get_specified_style() -> SpecifiedStyle {\n if !self.has_aux() {\n fail ~\"get_specified_style() called on a node without a style!\";\n }\n return copy *self.aux(|x| copy x).specified_style;\n }\n\n #[doc=\"\n Performs CSS selector matching on a subtree.\n\n This is, importantly, the function that updates the layout data for the node (the reader-\n auxiliary box in the RCU model) with the computed style.\n \"]\n fn recompute_style_for_subtree(styles : arc<Stylesheet>) {\n listen(|ack_chan| {\n let mut i = 0u;\n \n \/\/ Compute the styles of each of our children in parallel\n for NTree.each_child(self) |kid| {\n i = i + 1u;\n let new_styles = clone(&styles);\n \n task::spawn(|| {\n kid.recompute_style_for_subtree(new_styles); \n ack_chan.send(());\n })\n }\n\n self.match_css_style(*get(&styles));\n \n \/\/ Make sure we have finished updating the tree before returning\n while i > 0 {\n ack_chan.recv();\n i = i - 1u;\n }\n })\n }\n}\n<commit_msg>Get rid of colored boxes.<commit_after>#[doc=\"High-level interface to CSS selector matching.\"]\n\nimport std::arc::{arc, get, clone};\n\nimport dom::style::{DisplayType, DisBlock, DisInline, DisNone, Stylesheet, Unit, Auto};\nimport dom::base::{Element, HTMLDivElement, HTMLHeadElement, HTMLImageElement, Node, NodeKind};\nimport dom::base::{Text};\nimport util::color::{Color, rgb};\nimport util::color::css_colors::{white, black};\nimport base::{LayoutData, NTree};\n\ntype SpecifiedStyle = {mut background_color : option<Color>,\n mut display_type : option<DisplayType>,\n mut font_size : option<Unit>,\n mut height : option<Unit>,\n mut text_color : option<Color>,\n mut width : option<Unit>\n };\n\ntrait DefaultStyleMethods {\n fn default_color() -> Color;\n fn default_display_type() -> DisplayType;\n fn default_width() -> Unit;\n fn default_height() -> Unit;\n}\n\n\/\/\/ Default styles for various attributes in case they don't get initialized from CSS selectors.\nimpl NodeKind : DefaultStyleMethods {\n fn default_color() -> Color {\n match self {\n Text(*) => white(),\n Element(*) => white()\n }\n }\n\n fn default_display_type() -> DisplayType {\n match self {\n Text(*) => { DisInline }\n Element(element) => {\n match *element.kind {\n HTMLDivElement => DisBlock,\n HTMLHeadElement => DisNone,\n HTMLImageElement(*) => DisInline,\n UnknownElement => DisInline\n }\n }\n }\n }\n \n fn default_width() -> Unit {\n Auto\n }\n\n fn default_height() -> Unit {\n Auto\n }\n}\n\n\/**\n * Create a specified style that can be used to initialize a node before selector matching.\n *\n * Everything is initialized to none except the display style. The default value of the display\n * style is computed so that it can be used to short-circuit selector matching to avoid computing\n * style for children of display:none objects.\n *\/\nfn empty_style_for_node_kind(kind: NodeKind) -> SpecifiedStyle {\n let display_type = kind.default_display_type();\n\n {mut background_color : none,\n mut display_type : some(display_type),\n mut font_size : none,\n mut height : none,\n mut text_color : none,\n mut width : none}\n}\n\ntrait StylePriv {\n fn initialize_style();\n}\n\nimpl Node : StylePriv {\n #[doc=\"\n Set a default auxiliary data so that other threads can modify it.\n \n This is, importantly, the function that creates the layout\n data for the node (the reader-auxiliary box in the RCU model)\n and populates it with the default style.\n\n \"]\n \/\/ TODO: we should look into folding this into building the dom,\n \/\/ instead of doing a linear sweep afterwards.\n fn initialize_style() {\n let node_kind = self.read(|n| copy *n.kind);\n let the_layout_data = @LayoutData({\n mut specified_style : ~empty_style_for_node_kind(node_kind),\n mut box : none\n });\n\n self.set_aux(the_layout_data);\n }\n}\n\ntrait StyleMethods {\n fn initialize_style_for_subtree();\n fn get_specified_style() -> SpecifiedStyle;\n fn recompute_style_for_subtree(styles : arc<Stylesheet>);\n}\n\nimpl Node : StyleMethods {\n #[doc=\"Sequentially initialize the nodes' auxilliary data so they can be updated in parallel.\"]\n fn initialize_style_for_subtree() {\n self.initialize_style();\n \n for NTree.each_child(self) |kid| {\n kid.initialize_style_for_subtree();\n }\n }\n \n #[doc=\"\n Returns the computed style for the given node. If CSS selector matching has not yet been\n performed, fails.\n\n TODO: Return a safe reference; don't copy.\n \"]\n fn get_specified_style() -> SpecifiedStyle {\n if !self.has_aux() {\n fail ~\"get_specified_style() called on a node without a style!\";\n }\n return copy *self.aux(|x| copy x).specified_style;\n }\n\n #[doc=\"\n Performs CSS selector matching on a subtree.\n\n This is, importantly, the function that updates the layout data for the node (the reader-\n auxiliary box in the RCU model) with the computed style.\n \"]\n fn recompute_style_for_subtree(styles : arc<Stylesheet>) {\n listen(|ack_chan| {\n let mut i = 0u;\n \n \/\/ Compute the styles of each of our children in parallel\n for NTree.each_child(self) |kid| {\n i = i + 1u;\n let new_styles = clone(&styles);\n \n task::spawn(|| {\n kid.recompute_style_for_subtree(new_styles); \n ack_chan.send(());\n })\n }\n\n self.match_css_style(*get(&styles));\n \n \/\/ Make sure we have finished updating the tree before returning\n while i > 0 {\n ack_chan.recv();\n i = i - 1u;\n }\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue 3305<commit_after>trait double {\n fn double() -> uint;\n}\n\nimpl uint: double {\n fn double() -> uint { self * 2u }\n}\n\nfn is_equal<D: double>(x: @D, exp: uint) {\n assert x.double() == exp;\n}\n\nfn main() {\n let x = @(3u as double);\n is_equal(x, 6);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(pub_restricted, item_like_imports)]\n#![deny(unused)]\n\nmod foo {\n fn f() {}\n\n mod m1 {\n pub(super) use super::f; \/\/~ ERROR unused\n }\n\n mod m2 {\n #[allow(unused)]\n use super::m1::*; \/\/ (despite this glob import)\n }\n\n mod m3 {\n pub(super) use super::f; \/\/ Check that this is counted as used (c.f. #36249).\n }\n\n pub mod m4 {\n use super::m3::*;\n pub fn g() { f(); }\n }\n}\n\nfn main() {\n foo::m4::g();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Review remarks from @dulanov Implemented<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor display state sync<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #81566 - osa1:issue71202, r=jonas-schievink<commit_after>\/\/ check-pass\n\n#![feature(const_generics)]\n#![allow(incomplete_features, const_evaluatable_unchecked)]\n\nuse std::marker::PhantomData;\n\nstruct DataHolder<T> {\n item: T,\n}\n\nimpl<T: Copy> DataHolder<T> {\n const ITEM_IS_COPY: [(); 1 - {\n trait NotCopy {\n const VALUE: bool = false;\n }\n\n impl<__Type: ?Sized> NotCopy for __Type {}\n\n struct IsCopy<__Type: ?Sized>(PhantomData<__Type>);\n\n impl<__Type> IsCopy<__Type>\n where\n __Type: Sized + Copy,\n {\n const VALUE: bool = true;\n }\n\n <IsCopy<T>>::VALUE\n } as usize] = [];\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for sqrt\/rsqrt.<commit_after>extern crate simd;\nextern crate simdty;\n\nuse std::num::Float;\n\nuse simdty::*;\n\n\/\/ these are so small\/are a perfect squares that one would hope that\n\/\/ the CPU get compute them exactly.\n\n#[test]\nfn test_sqrt() {\n let a = f32x4(0.0, 1.0, 4.0, 9.0);\n let b = simd::sqrt(a);\n assert_eq!(b.0, 0.0);\n assert_eq!(b.1, 1.0);\n assert_eq!(b.2, 2.0);\n assert_eq!(b.3, 3.0);\n}\n\n#[test]\nfn test_rsqrt() {\n \/\/ powers of two are more likely to be computer accurately\n let a = f32x4(1.0, 1.0\/4.0, 1.0\/16.0, 1.0\/64.0);\n let b = simd::rsqrt(a);\n assert!((b.0 - 1.0).abs() < 0.01);\n assert!((b.1 - 2.0).abs() < 0.01);\n assert!((b.2 - 4.0).abs() < 0.01);\n assert!((b.3 - 8.0).abs() < 0.01);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[cfg_attr(all(), path = \"nonexistent_file.rs\")] mod foo;\n\/\/~^ ERROR nonexistent_file.rs\n<commit_msg>Add regression test.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(rustc_attrs)]\n#![allow(dead_code)]\n#![deny(unused_attributes)] \/\/ c.f #35584\n\nmod auxiliary {\n #[cfg_attr(any(), path = \"nonexistent_file.rs\")] pub mod namespaced_enums;\n #[cfg_attr(all(), path = \"namespaced_enums.rs\")] pub mod nonexistent_file;\n}\n\n#[rustc_error]\nfn main() { \/\/~ ERROR compilation successful\n let _ = auxiliary::namespaced_enums::Foo::A;\n let _ = auxiliary::nonexistent_file::Foo::A;\n}\n<|endoftext|>"} {"text":"<commit_before>use std::fmt;\nuse std::io::{self, Read};\nuse std::sync::{Arc, Mutex};\n\nuse hyper::client::IntoUrl;\nuse hyper::header::{Headers, ContentType, Location, Referer, UserAgent, Accept};\nuse hyper::method::Method;\nuse hyper::status::StatusCode;\nuse hyper::version::HttpVersion;\nuse hyper::{Url};\n\nuse serde::{Deserialize, Serialize};\nuse serde_json;\nuse serde_urlencoded;\n\nuse ::body::{self, Body};\nuse ::redirect::{RedirectPolicy, check_redirect};\n\nstatic DEFAULT_USER_AGENT: &'static str = concat!(env!(\"CARGO_PKG_NAME\"), \"\/\", env!(\"CARGO_PKG_VERSION\"));\n\n\/\/\/ A `Client` to make Requests with.\n\/\/\/\n\/\/\/ The Client has various configuration values to tweak, but the defaults\n\/\/\/ are set to what is usually the most commonly desired value.\n\/\/\/\n\/\/\/ The `Client` holds a connection pool internally, so it is advised that\n\/\/\/ you create one and reuse it.\npub struct Client {\n inner: Arc<ClientRef>, \/\/::hyper::Client,\n}\n\nimpl Client {\n \/\/\/ Constructs a new `Client`.\n pub fn new() -> ::Result<Client> {\n let mut client = try!(new_hyper_client());\n client.set_redirect_policy(::hyper::client::RedirectPolicy::FollowNone);\n Ok(Client {\n inner: Arc::new(ClientRef {\n hyper: client,\n redirect_policy: Mutex::new(RedirectPolicy::default()),\n }),\n })\n }\n\n \/\/\/ Set a `RedirectPolicy` for this client.\n pub fn redirect(&mut self, policy: RedirectPolicy) {\n *self.inner.redirect_policy.lock().unwrap() = policy;\n }\n\n \/\/\/ Convenience method to make a `GET` request to a URL.\n pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Get, url)\n }\n\n \/\/\/ Convenience method to make a `POST` request to a URL.\n pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Post, url)\n }\n \n \/\/\/ Convenience method to make a `PUT` request to a URL.\n pub fn put<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Put, url)\n }\n\n \/\/\/ Convenience method to make a `HEAD` request to a URL.\n pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Head, url)\n }\n\n \/\/\/ Start building a `Request` with the `Method` and `Url`.\n \/\/\/\n \/\/\/ Returns a `RequestBuilder`, which will allow setting headers and\n \/\/\/ request body before sending.\n pub fn request<U: IntoUrl>(&self, method: Method, url: U) -> RequestBuilder {\n let url = url.into_url();\n RequestBuilder {\n client: self.inner.clone(),\n method: method,\n url: url,\n _version: HttpVersion::Http11,\n headers: Headers::new(),\n\n body: None,\n }\n }\n}\n\nimpl fmt::Debug for Client {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Client\")\n .field(\"redirect_policy\", &self.inner.redirect_policy)\n .finish()\n }\n}\n\nstruct ClientRef {\n hyper: ::hyper::Client,\n redirect_policy: Mutex<RedirectPolicy>,\n}\n\nfn new_hyper_client() -> ::Result<::hyper::Client> {\n use hyper_native_tls::NativeTlsClient;\n Ok(::hyper::Client::with_connector(\n ::hyper::client::Pool::with_connector(\n Default::default(),\n ::hyper::net::HttpsConnector::new(\n try!(NativeTlsClient::new()\n .map_err(|e| ::hyper::Error::Ssl(Box::new(e)))))\n )\n ))\n}\n\n\n\/\/\/ A builder to construct the properties of a `Request`.\npub struct RequestBuilder {\n client: Arc<ClientRef>,\n\n method: Method,\n url: Result<Url, ::UrlError>,\n _version: HttpVersion,\n headers: Headers,\n\n body: Option<::Result<Body>>,\n}\n\nimpl RequestBuilder {\n \/\/\/ Add a `Header` to this Request.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ use reqwest::header::UserAgent;\n \/\/\/ let client = reqwest::Client::new().expect(\"client failed to construct\");\n \/\/\/\n \/\/\/ let res = client.get(\"https:\/\/www.rust-lang.org\")\n \/\/\/ .header(UserAgent(\"foo\".to_string()))\n \/\/\/ .send();\n \/\/\/ ```\n pub fn header<H: ::header::Header + ::header::HeaderFormat>(mut self, header: H) -> RequestBuilder {\n self.headers.set(header);\n self\n }\n \/\/\/ Add a set of Headers to the existing ones on this Request.\n \/\/\/\n \/\/\/ The headers will be merged in to any already set.\n pub fn headers(mut self, headers: ::header::Headers) -> RequestBuilder {\n self.headers.extend(headers.iter());\n self\n }\n\n \/\/\/ Set the request body.\n pub fn body<T: Into<Body>>(mut self, body: T) -> RequestBuilder {\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Send a form body.\n \/\/\/\n \/\/\/ Sets the body to the url encoded serialization of the passed value,\n \/\/\/ and also sets the `Content-Type: application\/www-form-url-encoded`\n \/\/\/ header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut params = HashMap::new();\n \/\/\/ params.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .form(¶ms)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn form<T: Serialize>(mut self, form: &T) -> RequestBuilder {\n let body = serde_urlencoded::to_string(form).map_err(::Error::from);\n self.headers.set(ContentType::form_url_encoded());\n self.body = Some(body.map(|b| b.into()));\n self\n }\n\n \/\/\/ Send a JSON body.\n \/\/\/\n \/\/\/ Sets the body to the JSON serialization of the passed value, and\n \/\/\/ also sets the `Content-Type: application\/json` header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut map = HashMap::new();\n \/\/\/ map.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .json(&map)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn json<T: Serialize>(mut self, json: &T) -> RequestBuilder {\n let body = serde_json::to_vec(json).expect(\"serde to_vec cannot fail\");\n self.headers.set(ContentType::json());\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Constructs the Request and sends it the target URL, returning a Response.\n pub fn send(mut self) -> ::Result<Response> {\n if !self.headers.has::<UserAgent>() {\n self.headers.set(UserAgent(DEFAULT_USER_AGENT.to_owned()));\n }\n\n if !self.headers.has::<Accept>() {\n self.headers.set(Accept::star());\n }\n\n let client = self.client;\n let mut method = self.method;\n let mut url = try!(self.url);\n let mut headers = self.headers;\n let mut body = match self.body {\n Some(b) => Some(try!(b)),\n None => None,\n };\n\n let mut urls = Vec::new();\n\n loop {\n let res = {\n debug!(\"request {:?} \\\"{}\\\"\", method, url);\n let mut req = client.hyper.request(method.clone(), url.clone())\n .headers(headers.clone());\n\n if let Some(ref mut b) = body {\n let body = body::as_hyper_body(b);\n req = req.body(body);\n }\n\n try!(req.send())\n };\n\n let should_redirect = match res.status {\n StatusCode::MovedPermanently |\n StatusCode::Found |\n StatusCode::SeeOther => {\n body = None;\n match method {\n Method::Get | Method::Head => {},\n _ => {\n method = Method::Get;\n }\n }\n true\n },\n StatusCode::TemporaryRedirect |\n StatusCode::PermanentRedirect => {\n if let Some(ref body) = body {\n body::can_reset(body)\n } else {\n true\n }\n },\n _ => false,\n };\n\n if should_redirect {\n let loc = {\n let loc = res.headers.get::<Location>().map(|loc| url.join(loc));\n if let Some(loc) = loc {\n loc\n } else {\n return Ok(Response {\n inner: res\n });\n }\n };\n\n url = match loc {\n Ok(loc) => {\n headers.set(Referer(url.to_string()));\n urls.push(url);\n if check_redirect(&client.redirect_policy.lock().unwrap(), &loc, &urls)? {\n loc\n } else {\n debug!(\"redirect_policy disallowed redirection to '{}'\", loc);\n return Ok(Response {\n inner: res\n })\n }\n },\n Err(e) => {\n debug!(\"Location header had invalid URI: {:?}\", e);\n return Ok(Response {\n inner: res\n })\n }\n };\n\n debug!(\"redirecting to {:?} '{}'\", method, url);\n\n \/\/TODO: removeSensitiveHeaders(&mut headers, &url);\n } else {\n return Ok(Response {\n inner: res\n });\n }\n }\n }\n}\n\nimpl fmt::Debug for RequestBuilder {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"RequestBuilder\")\n .field(\"method\", &self.method)\n .field(\"url\", &self.url)\n .field(\"headers\", &self.headers)\n .finish()\n }\n}\n\n\/\/\/ A Response to a submitted `Request`.\npub struct Response {\n inner: ::hyper::client::Response,\n}\n\nimpl Response {\n \/\/\/ Get the `StatusCode`.\n #[inline]\n pub fn status(&self) -> &StatusCode {\n &self.inner.status\n }\n\n \/\/\/ Get the `Headers`.\n #[inline]\n pub fn headers(&self) -> &Headers {\n &self.inner.headers\n }\n\n \/\/\/ Get the `HttpVersion`.\n #[inline]\n pub fn version(&self) -> &HttpVersion {\n &self.inner.version\n }\n\n \/\/\/ Try and deserialize the response body as JSON.\n #[inline]\n pub fn json<T: Deserialize>(&mut self) -> ::Result<T> {\n serde_json::from_reader(self).map_err(::Error::from)\n }\n}\n\n\/\/\/ Read the body of the Response.\nimpl Read for Response {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.inner.read(buf)\n }\n}\n\nimpl fmt::Debug for Response {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Response\")\n .field(\"status\", self.status())\n .field(\"headers\", self.headers())\n .field(\"version\", self.version())\n .finish()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use ::body;\n use hyper::method::Method;\n use hyper::Url;\n use hyper::header::{Host, Headers, ContentType};\n use std::collections::HashMap;\n use serde_urlencoded;\n use serde_json;\n\n #[test]\n fn basic_get_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.get(some_url);\n\n assert_eq!(r.method, Method::Get);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_head_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.head(some_url);\n\n assert_eq!(r.method, Method::Head);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_post_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.post(some_url);\n\n assert_eq!(r.method, Method::Post);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_put_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\";\n let r = client.put(some_url);\n\n assert_eq!(r.method, Method::Put);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn add_header() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let header = Host {\n hostname: \"google.com\".to_string(),\n port: None,\n };\n\n \/\/ Add a copy of the header to the request builder\n r = r.header(header.clone());\n\n \/\/ then check it was actually added\n assert_eq!(r.headers.get::<Host>(), Some(&header));\n }\n\n #[test]\n fn add_headers() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let header = Host {\n hostname: \"google.com\".to_string(),\n port: None,\n };\n\n let mut headers = Headers::new();\n headers.set(header);\n\n \/\/ Add a copy of the headers to the request builder\n r = r.headers(headers.clone());\n\n \/\/ then make sure they were added correctly\n assert_eq!(r.headers, headers);\n }\n\n #[test]\n fn add_body() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let body = \"Some interesting content\";\n\n r = r.body(body);\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n assert_eq!(buf, body);\n }\n\n #[test]\n fn add_form() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let mut form_data = HashMap::new();\n form_data.insert(\"foo\", \"bar\");\n\n r = r.form(&form_data);\n\n \/\/ Make sure the content type was set\n assert_eq!(r.headers.get::<ContentType>(), Some(&ContentType::form_url_encoded()));\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n let body_should_be = serde_urlencoded::to_string(&form_data).unwrap();\n assert_eq!(buf, body_should_be);\n }\n\n #[test]\n fn add_json() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let mut json_data = HashMap::new();\n json_data.insert(\"foo\", \"bar\");\n\n r = r.json(&json_data);\n\n \/\/ Make sure the content type was set\n assert_eq!(r.headers.get::<ContentType>(), Some(&ContentType::json()));\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n let body_should_be = serde_json::to_string(&json_data).unwrap();\n assert_eq!(buf, body_should_be);\n }\n}\n<commit_msg>client: add convenience method for DELETE<commit_after>use std::fmt;\nuse std::io::{self, Read};\nuse std::sync::{Arc, Mutex};\n\nuse hyper::client::IntoUrl;\nuse hyper::header::{Headers, ContentType, Location, Referer, UserAgent, Accept};\nuse hyper::method::Method;\nuse hyper::status::StatusCode;\nuse hyper::version::HttpVersion;\nuse hyper::{Url};\n\nuse serde::{Deserialize, Serialize};\nuse serde_json;\nuse serde_urlencoded;\n\nuse ::body::{self, Body};\nuse ::redirect::{RedirectPolicy, check_redirect};\n\nstatic DEFAULT_USER_AGENT: &'static str = concat!(env!(\"CARGO_PKG_NAME\"), \"\/\", env!(\"CARGO_PKG_VERSION\"));\n\n\/\/\/ A `Client` to make Requests with.\n\/\/\/\n\/\/\/ The Client has various configuration values to tweak, but the defaults\n\/\/\/ are set to what is usually the most commonly desired value.\n\/\/\/\n\/\/\/ The `Client` holds a connection pool internally, so it is advised that\n\/\/\/ you create one and reuse it.\npub struct Client {\n inner: Arc<ClientRef>, \/\/::hyper::Client,\n}\n\nimpl Client {\n \/\/\/ Constructs a new `Client`.\n pub fn new() -> ::Result<Client> {\n let mut client = try!(new_hyper_client());\n client.set_redirect_policy(::hyper::client::RedirectPolicy::FollowNone);\n Ok(Client {\n inner: Arc::new(ClientRef {\n hyper: client,\n redirect_policy: Mutex::new(RedirectPolicy::default()),\n }),\n })\n }\n\n \/\/\/ Set a `RedirectPolicy` for this client.\n pub fn redirect(&mut self, policy: RedirectPolicy) {\n *self.inner.redirect_policy.lock().unwrap() = policy;\n }\n\n \/\/\/ Convenience method to make a `GET` request to a URL.\n pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Get, url)\n }\n\n \/\/\/ Convenience method to make a `POST` request to a URL.\n pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Post, url)\n }\n\n \/\/\/ Convenience method to make a `PUT` request to a URL.\n pub fn put<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Put, url)\n }\n\n \/\/\/ Convenience method to make a `DELETE` request to a URL.\n pub fn delete<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Delete, url)\n }\n\n \/\/\/ Convenience method to make a `HEAD` request to a URL.\n pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Head, url)\n }\n\n \/\/\/ Start building a `Request` with the `Method` and `Url`.\n \/\/\/\n \/\/\/ Returns a `RequestBuilder`, which will allow setting headers and\n \/\/\/ request body before sending.\n pub fn request<U: IntoUrl>(&self, method: Method, url: U) -> RequestBuilder {\n let url = url.into_url();\n RequestBuilder {\n client: self.inner.clone(),\n method: method,\n url: url,\n _version: HttpVersion::Http11,\n headers: Headers::new(),\n\n body: None,\n }\n }\n}\n\nimpl fmt::Debug for Client {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Client\")\n .field(\"redirect_policy\", &self.inner.redirect_policy)\n .finish()\n }\n}\n\nstruct ClientRef {\n hyper: ::hyper::Client,\n redirect_policy: Mutex<RedirectPolicy>,\n}\n\nfn new_hyper_client() -> ::Result<::hyper::Client> {\n use hyper_native_tls::NativeTlsClient;\n Ok(::hyper::Client::with_connector(\n ::hyper::client::Pool::with_connector(\n Default::default(),\n ::hyper::net::HttpsConnector::new(\n try!(NativeTlsClient::new()\n .map_err(|e| ::hyper::Error::Ssl(Box::new(e)))))\n )\n ))\n}\n\n\n\/\/\/ A builder to construct the properties of a `Request`.\npub struct RequestBuilder {\n client: Arc<ClientRef>,\n\n method: Method,\n url: Result<Url, ::UrlError>,\n _version: HttpVersion,\n headers: Headers,\n\n body: Option<::Result<Body>>,\n}\n\nimpl RequestBuilder {\n \/\/\/ Add a `Header` to this Request.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ use reqwest::header::UserAgent;\n \/\/\/ let client = reqwest::Client::new().expect(\"client failed to construct\");\n \/\/\/\n \/\/\/ let res = client.get(\"https:\/\/www.rust-lang.org\")\n \/\/\/ .header(UserAgent(\"foo\".to_string()))\n \/\/\/ .send();\n \/\/\/ ```\n pub fn header<H: ::header::Header + ::header::HeaderFormat>(mut self, header: H) -> RequestBuilder {\n self.headers.set(header);\n self\n }\n \/\/\/ Add a set of Headers to the existing ones on this Request.\n \/\/\/\n \/\/\/ The headers will be merged in to any already set.\n pub fn headers(mut self, headers: ::header::Headers) -> RequestBuilder {\n self.headers.extend(headers.iter());\n self\n }\n\n \/\/\/ Set the request body.\n pub fn body<T: Into<Body>>(mut self, body: T) -> RequestBuilder {\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Send a form body.\n \/\/\/\n \/\/\/ Sets the body to the url encoded serialization of the passed value,\n \/\/\/ and also sets the `Content-Type: application\/www-form-url-encoded`\n \/\/\/ header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut params = HashMap::new();\n \/\/\/ params.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .form(¶ms)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn form<T: Serialize>(mut self, form: &T) -> RequestBuilder {\n let body = serde_urlencoded::to_string(form).map_err(::Error::from);\n self.headers.set(ContentType::form_url_encoded());\n self.body = Some(body.map(|b| b.into()));\n self\n }\n\n \/\/\/ Send a JSON body.\n \/\/\/\n \/\/\/ Sets the body to the JSON serialization of the passed value, and\n \/\/\/ also sets the `Content-Type: application\/json` header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut map = HashMap::new();\n \/\/\/ map.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .json(&map)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn json<T: Serialize>(mut self, json: &T) -> RequestBuilder {\n let body = serde_json::to_vec(json).expect(\"serde to_vec cannot fail\");\n self.headers.set(ContentType::json());\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Constructs the Request and sends it the target URL, returning a Response.\n pub fn send(mut self) -> ::Result<Response> {\n if !self.headers.has::<UserAgent>() {\n self.headers.set(UserAgent(DEFAULT_USER_AGENT.to_owned()));\n }\n\n if !self.headers.has::<Accept>() {\n self.headers.set(Accept::star());\n }\n\n let client = self.client;\n let mut method = self.method;\n let mut url = try!(self.url);\n let mut headers = self.headers;\n let mut body = match self.body {\n Some(b) => Some(try!(b)),\n None => None,\n };\n\n let mut urls = Vec::new();\n\n loop {\n let res = {\n debug!(\"request {:?} \\\"{}\\\"\", method, url);\n let mut req = client.hyper.request(method.clone(), url.clone())\n .headers(headers.clone());\n\n if let Some(ref mut b) = body {\n let body = body::as_hyper_body(b);\n req = req.body(body);\n }\n\n try!(req.send())\n };\n\n let should_redirect = match res.status {\n StatusCode::MovedPermanently |\n StatusCode::Found |\n StatusCode::SeeOther => {\n body = None;\n match method {\n Method::Get | Method::Head => {},\n _ => {\n method = Method::Get;\n }\n }\n true\n },\n StatusCode::TemporaryRedirect |\n StatusCode::PermanentRedirect => {\n if let Some(ref body) = body {\n body::can_reset(body)\n } else {\n true\n }\n },\n _ => false,\n };\n\n if should_redirect {\n let loc = {\n let loc = res.headers.get::<Location>().map(|loc| url.join(loc));\n if let Some(loc) = loc {\n loc\n } else {\n return Ok(Response {\n inner: res\n });\n }\n };\n\n url = match loc {\n Ok(loc) => {\n headers.set(Referer(url.to_string()));\n urls.push(url);\n if check_redirect(&client.redirect_policy.lock().unwrap(), &loc, &urls)? {\n loc\n } else {\n debug!(\"redirect_policy disallowed redirection to '{}'\", loc);\n return Ok(Response {\n inner: res\n })\n }\n },\n Err(e) => {\n debug!(\"Location header had invalid URI: {:?}\", e);\n return Ok(Response {\n inner: res\n })\n }\n };\n\n debug!(\"redirecting to {:?} '{}'\", method, url);\n\n \/\/TODO: removeSensitiveHeaders(&mut headers, &url);\n } else {\n return Ok(Response {\n inner: res\n });\n }\n }\n }\n}\n\nimpl fmt::Debug for RequestBuilder {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"RequestBuilder\")\n .field(\"method\", &self.method)\n .field(\"url\", &self.url)\n .field(\"headers\", &self.headers)\n .finish()\n }\n}\n\n\/\/\/ A Response to a submitted `Request`.\npub struct Response {\n inner: ::hyper::client::Response,\n}\n\nimpl Response {\n \/\/\/ Get the `StatusCode`.\n #[inline]\n pub fn status(&self) -> &StatusCode {\n &self.inner.status\n }\n\n \/\/\/ Get the `Headers`.\n #[inline]\n pub fn headers(&self) -> &Headers {\n &self.inner.headers\n }\n\n \/\/\/ Get the `HttpVersion`.\n #[inline]\n pub fn version(&self) -> &HttpVersion {\n &self.inner.version\n }\n\n \/\/\/ Try and deserialize the response body as JSON.\n #[inline]\n pub fn json<T: Deserialize>(&mut self) -> ::Result<T> {\n serde_json::from_reader(self).map_err(::Error::from)\n }\n}\n\n\/\/\/ Read the body of the Response.\nimpl Read for Response {\n #[inline]\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.inner.read(buf)\n }\n}\n\nimpl fmt::Debug for Response {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n f.debug_struct(\"Response\")\n .field(\"status\", self.status())\n .field(\"headers\", self.headers())\n .field(\"version\", self.version())\n .finish()\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use ::body;\n use hyper::method::Method;\n use hyper::Url;\n use hyper::header::{Host, Headers, ContentType};\n use std::collections::HashMap;\n use serde_urlencoded;\n use serde_json;\n\n #[test]\n fn basic_get_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.get(some_url);\n\n assert_eq!(r.method, Method::Get);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_head_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.head(some_url);\n\n assert_eq!(r.method, Method::Head);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_post_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let r = client.post(some_url);\n\n assert_eq!(r.method, Method::Post);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_put_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\";\n let r = client.put(some_url);\n\n assert_eq!(r.method, Method::Put);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn basic_delete_request() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\";\n let r = client.delete(some_url);\n\n assert_eq!(r.method, Method::Delete);\n assert_eq!(r.url, Url::parse(some_url));\n }\n\n #[test]\n fn add_header() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let header = Host {\n hostname: \"google.com\".to_string(),\n port: None,\n };\n\n \/\/ Add a copy of the header to the request builder\n r = r.header(header.clone());\n\n \/\/ then check it was actually added\n assert_eq!(r.headers.get::<Host>(), Some(&header));\n }\n\n #[test]\n fn add_headers() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let header = Host {\n hostname: \"google.com\".to_string(),\n port: None,\n };\n\n let mut headers = Headers::new();\n headers.set(header);\n\n \/\/ Add a copy of the headers to the request builder\n r = r.headers(headers.clone());\n\n \/\/ then make sure they were added correctly\n assert_eq!(r.headers, headers);\n }\n\n #[test]\n fn add_body() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let body = \"Some interesting content\";\n\n r = r.body(body);\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n assert_eq!(buf, body);\n }\n\n #[test]\n fn add_form() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let mut form_data = HashMap::new();\n form_data.insert(\"foo\", \"bar\");\n\n r = r.form(&form_data);\n\n \/\/ Make sure the content type was set\n assert_eq!(r.headers.get::<ContentType>(), Some(&ContentType::form_url_encoded()));\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n let body_should_be = serde_urlencoded::to_string(&form_data).unwrap();\n assert_eq!(buf, body_should_be);\n }\n\n #[test]\n fn add_json() {\n let client = Client::new().unwrap();\n let some_url = \"https:\/\/google.com\/\";\n let mut r = client.post(some_url);\n\n let mut json_data = HashMap::new();\n json_data.insert(\"foo\", \"bar\");\n\n r = r.json(&json_data);\n\n \/\/ Make sure the content type was set\n assert_eq!(r.headers.get::<ContentType>(), Some(&ContentType::json()));\n\n let buf = body::read_to_string(r.body.unwrap().unwrap()).unwrap();\n\n let body_should_be = serde_json::to_string(&json_data).unwrap();\n assert_eq!(buf, body_should_be);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use scalars anot curve elements<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Create main.rs<commit_after>\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make hardware self references mutable<commit_after><|endoftext|>"} {"text":"<commit_before>#[cfg(feature = \"suggestions\")]\nuse strsim;\n\nuse fmt::Format;\n\n\/\/\/ Produces a string from a given list of possible values which is similar to\n\/\/\/ the passed in value `v` with a certain confidence.\n\/\/\/ Thus in a list of possible values like [\"foo\", \"bar\"], the value \"fop\" will yield\n\/\/\/ `Some(\"foo\")`, whereas \"blark\" would yield `None`.\n#[cfg(feature = \"suggestions\")]\n#[cfg_attr(feature = \"lints\", allow(needless_lifetimes))]\npub fn did_you_mean<'a, T, I>(v: &str, possible_values: I) -> Option<&'a str>\n where T: AsRef<str> + 'a,\n I: IntoIterator<Item = &'a T>\n{\n\n let mut candidate: Option<(f64, &str)> = None;\n for pv in possible_values.into_iter() {\n let confidence = strsim::jaro_winkler(v, pv.as_ref());\n if confidence > 0.8 &&\n (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) {\n candidate = Some((confidence, pv.as_ref()));\n }\n }\n match candidate {\n None => None,\n Some((_, candidate)) => Some(candidate),\n }\n}\n\n#[cfg(not(feature = \"suggestions\"))]\npub fn did_you_mean<'a, T, I>(_: &str, _: I) -> Option<&'a str>\n where T: AsRef<str> + 'a,\n I: IntoIterator<Item = &'a T>\n{\n None\n}\n\n\/\/\/ Returns a suffix that can be empty, or is the standard 'did you mean phrase\n#[cfg_attr(feature = \"lints\", allow(needless_lifetimes))]\npub fn did_you_mean_suffix<'z, T, I>(arg: &str,\n values: I,\n style: DidYouMeanMessageStyle)\n -> (String, Option<&'z str>)\n where T: AsRef<str> + 'z,\n I: IntoIterator<Item = &'z T>\n{\n match did_you_mean(arg, values) {\n Some(candidate) => {\n let mut suffix = \"\\n\\tDid you mean \".to_owned();\n match style {\n DidYouMeanMessageStyle::LongFlag =>\n suffix.push_str(&*format!(\"{}\", Format::Good(\"--\"))),\n DidYouMeanMessageStyle::EnumValue => suffix.push('\\''),\n }\n suffix.push_str(&Format::Good(candidate).to_string()[..]);\n if let DidYouMeanMessageStyle::EnumValue = style {\n suffix.push('\\'');\n }\n suffix.push_str(\" ?\");\n (suffix, Some(candidate))\n }\n None => (String::new(), None),\n }\n}\n\n\/\/\/ A helper to determine message formatting\npub enum DidYouMeanMessageStyle {\n \/\/\/ Suggested value is a long flag\n LongFlag,\n \/\/\/ Suggested value is one of various possible values\n EnumValue,\n}\n\n#[cfg(all(test, features = \"suggestions\"))]\nmod test {\n use super::*;\n\n #[test]\n fn did_you_mean_possible_values() {\n let p_vals = [\"test\", \"possible\", \"values\"];\n assert_eq!(did_you_mean(\"tst\", p_vals.iter()), Some(\"test\"));\n assert!(did_you_mean(\"hahaahahah\", p_vals.iter()).is_none());\n }\n}\n<commit_msg>tests(Suggestions): adds additional tests<commit_after>#[cfg(feature = \"suggestions\")]\nuse strsim;\n\nuse fmt::Format;\n\n\/\/\/ Produces a string from a given list of possible values which is similar to\n\/\/\/ the passed in value `v` with a certain confidence.\n\/\/\/ Thus in a list of possible values like [\"foo\", \"bar\"], the value \"fop\" will yield\n\/\/\/ `Some(\"foo\")`, whereas \"blark\" would yield `None`.\n#[cfg(feature = \"suggestions\")]\n#[cfg_attr(feature = \"lints\", allow(needless_lifetimes))]\npub fn did_you_mean<'a, T, I>(v: &str, possible_values: I) -> Option<&'a str>\n where T: AsRef<str> + 'a,\n I: IntoIterator<Item = &'a T>\n{\n\n let mut candidate: Option<(f64, &str)> = None;\n for pv in possible_values.into_iter() {\n let confidence = strsim::jaro_winkler(v, pv.as_ref());\n if confidence > 0.8 &&\n (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) {\n candidate = Some((confidence, pv.as_ref()));\n }\n }\n match candidate {\n None => None,\n Some((_, candidate)) => Some(candidate),\n }\n}\n\n#[cfg(not(feature = \"suggestions\"))]\npub fn did_you_mean<'a, T, I>(_: &str, _: I) -> Option<&'a str>\n where T: AsRef<str> + 'a,\n I: IntoIterator<Item = &'a T>\n{\n None\n}\n\n\/\/\/ Returns a suffix that can be empty, or is the standard 'did you mean phrase\n#[cfg_attr(feature = \"lints\", allow(needless_lifetimes))]\npub fn did_you_mean_suffix<'z, T, I>(arg: &str,\n values: I,\n style: DidYouMeanMessageStyle)\n -> (String, Option<&'z str>)\n where T: AsRef<str> + 'z,\n I: IntoIterator<Item = &'z T>\n{\n match did_you_mean(arg, values) {\n Some(candidate) => {\n let mut suffix = \"\\n\\tDid you mean \".to_owned();\n match style {\n DidYouMeanMessageStyle::LongFlag =>\n suffix.push_str(&*format!(\"{}\", Format::Good(\"--\"))),\n DidYouMeanMessageStyle::EnumValue => suffix.push('\\''),\n }\n suffix.push_str(&Format::Good(candidate).to_string()[..]);\n if let DidYouMeanMessageStyle::EnumValue = style {\n suffix.push('\\'');\n }\n suffix.push_str(\" ?\");\n (suffix, Some(candidate))\n }\n None => (String::new(), None),\n }\n}\n\n\/\/\/ A helper to determine message formatting\npub enum DidYouMeanMessageStyle {\n \/\/\/ Suggested value is a long flag\n LongFlag,\n \/\/\/ Suggested value is one of various possible values\n EnumValue,\n}\n\n#[cfg(all(test, features = \"suggestions\"))]\nmod test {\n use super::*;\n\n #[test]\n fn possible_values_match() {\n let p_vals = [\"test\", \"possible\", \"values\"];\n assert_eq!(did_you_mean(\"tst\", p_vals.iter()), Some(\"test\"));\n }\n\n #[test]\n fn possible_values_nomatch() {\n let p_vals = [\"test\", \"possible\", \"values\"];\n assert!(did_you_mean(\"hahaahahah\", p_vals.iter()).is_none());\n }\n\n #[test]\n fn suffix_long() {\n let p_vals = [\"test\", \"possible\", \"values\"];\n let suffix = \"\\n\\tDid you mean \\'--test\\' ?\";\n assert_eq!(did_you_mean_suffix(\"tst\", p_vals.iter(), DidYouMeanMessageStyle::LongFlag), (suffix, Some(\"test\")));\n }\n\n #[test]\n fn suffix_enum() {\n let p_vals = [\"test\", \"possible\", \"values\"];\n let suffix = \"\\n\\tDid you mean \\'test\\' ?\";\n assert_eq!(did_you_mean_suffix(\"tst\", p_vals.iter(), DidYouMeanMessageStyle::EnumValue), (suffix, Some(\"test\")));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rust By Examples [RBE] - 1.2.2 Display<commit_after>\/\/ Import (via `use`) the `fmt` module to make it available.\nuse std::fmt;\n\n\/\/ Define a structure which `fmt::Display` will be implemented for. This is simply\n\/\/ a tuple struct containing an `i32` bound to the name `Structure`.\nstruct Structure(i32);\n\n\/\/ In order to use the `{}` marker, the trait `fmt::Display` must be implemented\n\/\/ manually for the type.\nimpl fmt::Display for Structure {\n \/\/ This trait requires `fmt` with this exact signature.\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ Write strictly the first element into the supplied output\n \/\/ stream: `f`. Returns `fmt::Result` which indicates whether the\n \/\/ operation succeeded or failed. Note that `write!` uses syntax which\n \/\/ is very similar to `println!`.\n write!(f, \"{}\", self.0)\n }\n}\n\n\/\/ A structure holding two numbers. `Debug` will be derived so the results can\n\/\/ be contrasted with `Display`.\n#[derive(Debug)]\nstruct MinMax(i64, i64);\n\n\/\/ Implement `Display` for `MinMax`.\nimpl fmt::Display for MinMax {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ Use `self.number` to refer to each positional data point.\n write!(f, \"({}, {})\", self.0, self.1)\n }\n}\n\n\/\/ Define a structure where the fields are nameable for comparison.\nstruct Point2 {\n x: f64,\n y: f64,\n}\n\n\/\/ Similarly, implement for Point2\nimpl fmt::Display for Point2 {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ Customize so only `x` and `y` are denoted.\n write!(f, \"x: {}, y: {}\", self.x, self.y)\n }\n}\n\n\/\/ Activiti section Impl\nimpl fmt::Debug for Point2 {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \/\/ Customize so only `x` and `y` are denoted.\n write!(f, \"Complex {{ rela: {}, imag: {} }}\", self.x, self.y)\n }\n}\n\npub fn main() {\n let struct1 = Structure(30);\n println!(\"Base Structure: {}\", struct1);\n\n let minmax = MinMax(4, 14);\n\n println!(\"Compare structures:\");\n println!(\"Display: {}\", minmax);\n println!(\"Debug: {:?}\", minmax);\n\n let big_range = MinMax(-300, 300);\n let small_range = MinMax(-3, 3);\n\n println!(\"The big range is {big} and the small is {small}\",\n small = small_range,\n big = big_range);\n\n let point = Point2 { x: 3.3, y: 7.2 };\n\n println!(\"Compare points:\");\n println!(\"Display: {}\", point);\n\n \/\/ Activitu implementation\n println!(\"Debug: {:?}\", point);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(resolve_trait_on_defaulted_unit)]\n\ntrait Deserialize: Sized {\n fn deserialize() -> Result<Self, String>;\n}\n\nimpl Deserialize for () {\n fn deserialize() -> Result<(), String> {\n Ok(())\n }\n}\n\nfn doit() -> Result<(), String> {\n let _ = Deserialize::deserialize()?;\n \/\/~^ ERROR code relies on type\n \/\/~| WARNING previously accepted\n Ok(())\n}\n\nfn main() {\n let _ = doit();\n}\n\n<commit_msg>Fix test<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(resolve_trait_on_defaulted_unit)]\n\ntrait Deserialize: Sized {\n fn deserialize() -> Result<Self, String>;\n}\n\nimpl Deserialize for () {\n fn deserialize() -> Result<(), String> {\n Ok(())\n }\n}\n\nfn doit() -> Result<(), String> {\n let _ = match Deserialize::deserialize() {\n \/\/~^ ERROR code relies on type\n \/\/~| WARNING previously accepted\n Ok(x) => x,\n Err(e) => return Err(e),\n };\n Ok(())\n}\n\nfn main() {\n let _ = doit();\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::mem;\n\n#[allow(dead_code)]\nstruct Point {\n x: f64,\n y: f64,\n}\n\n#[allow(dead_code)]\nstruct Rectangle {\n p1: Point,\n p2: Point,\n}\n\nfn origin() -> Point {\n Point { x: 0.0, y: 0.0 }\n}\n\nfn boxed_origin() -> Box<Point> {\n \/\/ Allocate this point in the heap, and return a pointer to it\n box Point { x: 0.0, y: 0.0 }\n}\n\nfn main() {\n \/\/ (all the type annotations are superfluous)\n \/\/ Stack allocated variables\n let point: Point = origin();\n let rectangle: Rectangle = Rectangle {\n p1: origin(),\n p2: Point { x: 3.0, y: 4.0 }\n };\n\n \/\/ Heap allocated rectangle\n let boxed_rectangle: Box<Rectangle> = box Rectangle {\n p1: origin(),\n p2: origin()\n };\n\n \/\/ The output of functions can be boxed\n let boxed_point: Box<Point> = box origin();\n\n \/\/ Double indirection\n let box_in_a_box: Box<Box<Point>> = box boxed_origin();\n\n println!(\"Point occupies {} bytes in the stack\",\n mem::size_of_val(&point));\n println!(\"Rectangle occupies {} bytes in the stack\",\n mem::size_of_val(&rectangle));\n\n \/\/ box size = pointer size\n println!(\"Boxed point occupies {} bytes in the stack\",\n mem::size_of_val(&boxed_point));\n println!(\"Boxed rectangle occupies {} bytes in the stack\",\n mem::size_of_val(&boxed_rectangle));\n println!(\"Boxed box occupies {} bytes in the stack\",\n mem::size_of_val(&box_in_a_box));\n\n \/\/ Copy the data contained in `boxed_point` into `unboxed_point`\n let unboxed_point: Point = *boxed_point;\n println!(\"Unboxed point occupies {} bytes in the stack\",\n mem::size_of_val(&unboxed_point));\n\n \/\/ Unboxing via a destructuring pattern\n let box another_unboxed_point = boxed_point;\n println!(\"Another unboxed point occupies {} bytes in the stack\",\n mem::size_of_val(&another_unboxed_point));\n}\n<commit_msg>Add #[deriving(Copy)] to Point<commit_after>use std::mem;\n\n#[allow(dead_code)]\n#[deriving(Copy)]\nstruct Point {\n x: f64,\n y: f64,\n}\n\n#[allow(dead_code)]\nstruct Rectangle {\n p1: Point,\n p2: Point,\n}\n\nfn origin() -> Point {\n Point { x: 0.0, y: 0.0 }\n}\n\nfn boxed_origin() -> Box<Point> {\n \/\/ Allocate this point in the heap, and return a pointer to it\n box Point { x: 0.0, y: 0.0 }\n}\n\nfn main() {\n \/\/ (all the type annotations are superfluous)\n \/\/ Stack allocated variables\n let point: Point = origin();\n let rectangle: Rectangle = Rectangle {\n p1: origin(),\n p2: Point { x: 3.0, y: 4.0 }\n };\n\n \/\/ Heap allocated rectangle\n let boxed_rectangle: Box<Rectangle> = box Rectangle {\n p1: origin(),\n p2: origin()\n };\n\n \/\/ The output of functions can be boxed\n let boxed_point: Box<Point> = box origin();\n\n \/\/ Double indirection\n let box_in_a_box: Box<Box<Point>> = box boxed_origin();\n\n println!(\"Point occupies {} bytes in the stack\",\n mem::size_of_val(&point));\n println!(\"Rectangle occupies {} bytes in the stack\",\n mem::size_of_val(&rectangle));\n\n \/\/ box size = pointer size\n println!(\"Boxed point occupies {} bytes in the stack\",\n mem::size_of_val(&boxed_point));\n println!(\"Boxed rectangle occupies {} bytes in the stack\",\n mem::size_of_val(&boxed_rectangle));\n println!(\"Boxed box occupies {} bytes in the stack\",\n mem::size_of_val(&box_in_a_box));\n\n \/\/ Copy the data contained in `boxed_point` into `unboxed_point`\n let unboxed_point: Point = *boxed_point;\n println!(\"Unboxed point occupies {} bytes in the stack\",\n mem::size_of_val(&unboxed_point));\n\n \/\/ Unboxing via a destructuring pattern\n let box another_unboxed_point = boxed_point;\n println!(\"Another unboxed point occupies {} bytes in the stack\",\n mem::size_of_val(&another_unboxed_point));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Error Reporting for Anonymous Region Lifetime Errors\n\/\/! where both the regions are anonymous.\nuse hir;\nuse infer::InferCtxt;\nuse ty::{self, Region};\nuse infer::region_inference::RegionResolutionError::*;\nuse infer::region_inference::RegionResolutionError;\nuse hir::map as hir_map;\nuse middle::resolve_lifetime as rl;\nuse hir::intravisit::{self, Visitor, NestedVisitorMap};\n\nimpl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {\n \/\/ This method prints the error message for lifetime errors when both the concerned regions\n \/\/ are anonymous.\n \/\/ Consider a case where we have\n \/\/ fn foo(x: &mut Vec<&u8>, y: &u8)\n \/\/ { x.push(y); }.\n \/\/ The example gives\n \/\/ fn foo(x: &mut Vec<&u8>, y: &u8) {\n \/\/ --- --- these references are declared with different lifetimes...\n \/\/ x.push(y);\n \/\/ ^ ...but data from `y` flows into `x` here\n \/\/ It has been extended for the case of structs too.\n \/\/ Consider the example\n \/\/ struct Ref<'a> { x: &'a u32 }\n \/\/ fn foo(mut x: Vec<Ref>, y: Ref) {\n \/\/ --- --- these structs are declared with different lifetimes...\n \/\/ x.push(y);\n \/\/ ^ ...but data from `y` flows into `x` here\n \/\/ }\n \/\/ It will later be extended to trait objects.\n pub fn try_report_anon_anon_conflict(&self, error: &RegionResolutionError<'tcx>) -> bool {\n let (span, sub, sup) = match *error {\n ConcreteFailure(ref origin, sub, sup) => (origin.span(), sub, sup),\n _ => return false, \/\/ inapplicable\n };\n\n \/\/ Determine whether the sub and sup consist of both anonymous (elided) regions.\n let (ty1, ty2, scope_def_id_1, scope_def_id_2, bregion1, bregion2) = if\n self.is_suitable_anonymous_region(sup, true).is_some() &&\n self.is_suitable_anonymous_region(sub, true).is_some() {\n if let (Some(anon_reg1), Some(anon_reg2)) =\n (self.is_suitable_anonymous_region(sup, true),\n self.is_suitable_anonymous_region(sub, true)) {\n let ((def_id1, br1), (def_id2, br2)) = (anon_reg1, anon_reg2);\n let found_arg1 = self.find_anon_type(sup, &br1);\n let found_arg2 = self.find_anon_type(sub, &br2);\n match (found_arg1, found_arg2) {\n (Some(anonarg_1), Some(anonarg_2)) => {\n (anonarg_1, anonarg_2, def_id1, def_id2, br1, br2)\n }\n _ => {\n return false;\n }\n }\n\n } else {\n return false;\n }\n } else {\n return false; \/\/inapplicable\n };\n\n let (label1, label2) = if let (Some(sup_arg), Some(sub_arg)) =\n (self.find_arg_with_anonymous_region(sup, sup),\n self.find_arg_with_anonymous_region(sub, sub)) {\n\n let ((anon_arg1, _, _, is_first1), (anon_arg2, _, _, is_first2)) = (sup_arg, sub_arg);\n if self.is_self_anon(is_first1, scope_def_id_1) ||\n self.is_self_anon(is_first2, scope_def_id_2) {\n return false;\n }\n\n if self.is_return_type_anon(scope_def_id_1, bregion1) ||\n self.is_return_type_anon(scope_def_id_2, bregion2) {\n return false;\n }\n\n\n\n\n if anon_arg1 == anon_arg2 {\n (format!(\" with one lifetime\"), format!(\" into the other\"))\n } else {\n let span_label_var1 = if let Some(simple_name) = anon_arg1.pat.simple_name() {\n format!(\" from `{}`\", simple_name)\n } else {\n format!(\"\")\n };\n\n let span_label_var2 = if let Some(simple_name) = anon_arg2.pat.simple_name() {\n format!(\" into `{}`\", simple_name)\n } else {\n format!(\"\")\n };\n\n (span_label_var1, span_label_var2)\n }\n } else {\n return false;\n };\n\n struct_span_err!(self.tcx.sess, span, E0623, \"lifetime mismatch\")\n .span_label(ty1.span,\n format!(\"these two types are declared with different lifetimes...\"))\n .span_label(ty2.span, format!(\"\"))\n .span_label(span, format!(\"...but data{} flows{} here\", label1, label2))\n .emit();\n return true;\n\n }\n\n \/\/\/ This function calls the `visit_ty` method for the parameters\n \/\/\/ corresponding to the anonymous regions. The `nested_visitor.found_type`\n \/\/\/ contains the anonymous type.\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ region - the anonymous region corresponding to the anon_anon conflict\n \/\/\/ br - the bound region corresponding to the above region which is of type `BrAnon(_)`\n \/\/\/\n \/\/\/ # Example\n \/\/\/ ```\n \/\/\/ fn foo(x: &mut Vec<&u8>, y: &u8)\n \/\/\/ { x.push(y); }\n \/\/\/ ```\n \/\/\/ The function returns the nested type corresponding to the anonymous region\n \/\/\/ for e.g. `&u8` and Vec<`&u8`.\n pub fn find_anon_type(&self, region: Region<'tcx>, br: &ty::BoundRegion) -> Option<(&hir::Ty)> {\n if let Some(anon_reg) = self.is_suitable_anonymous_region(region, true) {\n let (def_id, _) = anon_reg;\n if let Some(node_id) = self.tcx.hir.as_local_node_id(def_id) {\n let ret_ty = self.tcx.type_of(def_id);\n if let ty::TyFnDef(_, _) = ret_ty.sty {\n if let hir_map::NodeItem(it) = self.tcx.hir.get(node_id) {\n if let hir::ItemFn(ref fndecl, _, _, _, _, _) = it.node {\n return fndecl\n .inputs\n .iter()\n .filter_map(|arg| {\n self.find_visitor_found_type(&**arg, br)\n })\n .next();\n }\n } else if let hir_map::NodeTraitItem(it) = self.tcx.hir.get(node_id) {\n if let hir::TraitItemKind::Method(ref fndecl, _) = it.node {\n return fndecl\n .decl\n .inputs\n .iter()\n .filter_map(|arg| {\n self.find_visitor_found_type(&**arg, br)\n })\n .next();\n }\n } else if let hir_map::NodeImplItem(it) = self.tcx.hir.get(node_id) {\n if let hir::ImplItemKind::Method(ref fndecl, _) = it.node {\n return fndecl\n .decl\n .inputs\n .iter()\n .filter_map(|arg| {\n self.find_visitor_found_type(&**arg, br)\n })\n .next();\n }\n }\n }\n }\n }\n None\n }\n\n \/\/ This method creates a FindNestedTypeVisitor which returns the type corresponding\n \/\/ to the anonymous region.\n fn find_visitor_found_type(&self,\n arg: &'gcx hir::Ty,\n br: &ty::BoundRegion)\n -> Option<(&'gcx hir::Ty)> {\n let mut nested_visitor = FindNestedTypeVisitor {\n infcx: &self,\n hir_map: &self.tcx.hir,\n bound_region: *br,\n found_type: None,\n depth: 1,\n };\n nested_visitor.visit_ty(arg);\n nested_visitor.found_type\n }\n}\n\n\/\/ The FindNestedTypeVisitor captures the corresponding `hir::Ty` of the\n\/\/ anonymous region. The example above would lead to a conflict between\n\/\/ the two anonymous lifetimes for &u8 in x and y respectively. This visitor\n\/\/ would be invoked twice, once for each lifetime, and would\n\/\/ walk the types like &mut Vec<&u8> and &u8 looking for the HIR\n\/\/ where that lifetime appears. This allows us to highlight the\n\/\/ specific part of the type in the error message.\nstruct FindNestedTypeVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {\n infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,\n hir_map: &'a hir::map::Map<'gcx>,\n \/\/ The bound_region corresponding to the Refree(freeregion)\n \/\/ associated with the anonymous region we are looking for.\n bound_region: ty::BoundRegion,\n \/\/ The type where the anonymous lifetime appears\n \/\/ for e.g. Vec<`&u8`> and <`&u8`>\n found_type: Option<&'gcx hir::Ty>,\n depth: u32,\n}\n\nimpl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindNestedTypeVisitor<'a, 'gcx, 'tcx> {\n fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {\n NestedVisitorMap::OnlyBodies(&self.hir_map)\n }\n\n fn visit_ty(&mut self, arg: &'gcx hir::Ty) {\n \/\/ Find the index of the anonymous region that was part of the\n \/\/ error. We will then search the function parameters for a bound\n \/\/ region at the right depth with the same index.\n let br_index = match self.bound_region {\n ty::BrAnon(index) => index,\n _ => return,\n };\n\n match arg.node {\n\n hir::TyBareFn(ref fndecl) => {\n self.depth += 1;\n intravisit::walk_ty(self, arg);\n self.depth -= 1;\n return;\n }\n\n hir::TyRptr(ref lifetime, _) => {\n match self.infcx.tcx.named_region_map.defs.get(&lifetime.id) {\n \/\/ the lifetime of the TyRptr\n Some(&rl::Region::LateBoundAnon(debuijn_index, anon_index)) => {\n if debuijn_index.depth == 1 && anon_index == br_index {\n self.found_type = Some(arg);\n return; \/\/ we can stop visiting now\n }\n }\n Some(&rl::Region::Static) |\n Some(&rl::Region::EarlyBound(_, _)) |\n Some(&rl::Region::LateBound(_, _)) |\n Some(&rl::Region::Free(_, _)) |\n None => {\n debug!(\"no arg found\");\n }\n }\n }\n \/\/ Checks if it is of type `hir::TyPath` which corresponds to a struct.\n hir::TyPath(_) => {\n let subvisitor = &mut TyPathVisitor {\n infcx: self.infcx,\n found_it: false,\n bound_region: self.bound_region,\n hir_map: self.hir_map,\n };\n intravisit::walk_ty(subvisitor, arg); \/\/ call walk_ty; as visit_ty is empty,\n \/\/ this will visit only outermost type\n if subvisitor.found_it {\n self.found_type = Some(arg);\n }\n }\n\n _ => {}\n }\n \/\/ walk the embedded contents: e.g., if we are visiting `Vec<&Foo>`,\n \/\/ go on to visit `&Foo`\n debug!(\"depth is {:?}\", self.depth);\n intravisit::walk_ty(self, arg);\n\n }\n}\n\n\/\/ The visitor captures the corresponding `hir::Ty` of the anonymous region\n\/\/ in the case of structs ie. `hir::TyPath`.\n\/\/ This visitor would be invoked for each lifetime corresponding to a struct,\n\/\/ and would walk the types like Vec<Ref> in the above example and Ref looking for the HIR\n\/\/ where that lifetime appears. This allows us to highlight the\n\/\/ specific part of the type in the error message.\nstruct TyPathVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {\n infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,\n hir_map: &'a hir::map::Map<'gcx>,\n found_it: bool,\n bound_region: ty::BoundRegion,\n}\n\nimpl<'a, 'gcx, 'tcx> Visitor<'gcx> for TyPathVisitor<'a, 'gcx, 'tcx> {\n fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {\n NestedVisitorMap::OnlyBodies(&self.hir_map)\n }\n\n fn visit_lifetime(&mut self, lifetime: &hir::Lifetime) {\n let br_index = match self.bound_region {\n ty::BrAnon(index) => index,\n _ => return,\n };\n\n match self.infcx.tcx.named_region_map.defs.get(&lifetime.id) {\n \/\/ the lifetime of the TyPath!\n Some(&rl::Region::LateBoundAnon(debuijn_index, anon_index)) => {\n if debuijn_index.depth == 1 && anon_index == br_index {\n self.found_it = true;\n }\n }\n Some(&rl::Region::Static) |\n Some(&rl::Region::EarlyBound(_, _)) |\n Some(&rl::Region::LateBound(_, _)) |\n Some(&rl::Region::Free(_, _)) |\n None => {\n debug!(\"no arg found\");\n }\n }\n }\n\n fn visit_ty(&mut self, arg: &'gcx hir::Ty) {\n \/\/ ignore nested types\n \/\/\n \/\/ If you have a type like `Foo<'a, &Ty>` we\n \/\/ are only interested in the immediate lifetimes ('a).\n \/\/\n \/\/ Making `visit_ty` empty will ignore the `&Ty` embedded\n \/\/ inside, it will get reached by the outer visitor.\n debug!(\"`Ty` corresponding to a struct is {:?}\", arg);\n }\n}\n<commit_msg>fixes<commit_after><|endoftext|>"} {"text":"<commit_before>extern crate crossbeam;\nextern crate clap;\n#[macro_use] extern crate version;\nextern crate walkdir;\n\nextern crate libimagrt;\n\nuse std::env;\nuse std::process::exit;\nuse std::process::Command;\nuse std::process::Stdio;\nuse std::io::ErrorKind;\n\nuse walkdir::WalkDir;\nuse crossbeam::*;\nuse clap::{Arg, SubCommand};\n\nuse libimagrt::runtime::Runtime;\n\nfn help(cmds: Vec<String>) {\n println!(r#\"\n\n _\n (_)_ __ ___ __ _ __ _\n | | '_ \\` _ \\\/ _\\`|\/ _\\`|\n | | | | | | | (_| | (_| |\n |_|_| |_| |_|\\__,_|\\__, |\n |___\/\n -------------------------\n\n Usage: imag [--version | --versions | -h | --help] <command> <args...>\n\n imag - the personal information management suite for the commandline\n\n imag is a PIM suite for the commandline. It consists of several commands,\n called \"modules\". Each module implements one PIM aspect and all of these\n modules can be used independently.\n\n Available commands:\n \"#);\n\n for cmd in cmds.iter() {\n println!(\"\\t{}\", cmd);\n }\n\n println!(r#\"\n\n Call a command with 'imag <command> <args>'\n Each command can be called with \"--help\" to get the respective helptext.\n\n Please visit https:\/\/github.com\/matthiasbeyer\/imag to view the source code,\n follow the development of imag or maybe even contribute to imag.\n\n imag is free software. It is released under the terms of LGPLv2.1\n\n (c) 2016 Matthias Beyer and contributors\"#);\n}\n\nfn get_commands() -> Vec<String> {\n let path = env::var(\"PATH\");\n if path.is_err() {\n println!(\"PATH error: {:?}\", path);\n exit(1);\n }\n let pathelements = path.unwrap();\n let pathelements = pathelements.split(\":\");\n\n let joinhandles : Vec<ScopedJoinHandle<Vec<String>>> = pathelements\n .map(|elem| {\n crossbeam::scope(|scope| {\n scope.spawn(|| {\n WalkDir::new(elem)\n .max_depth(1)\n .into_iter()\n .filter(|path| {\n match path {\n &Ok(ref p) => p.file_name()\n .to_str()\n .map_or(false, |filename| filename.starts_with(\"imag-\")),\n &Err(_) => false,\n }\n })\n .filter_map(|x| x.ok())\n .filter_map(|path| {\n path.file_name()\n .to_str()\n .and_then(|s| s.splitn(2, \"-\").nth(1).map(String::from))\n })\n .collect()\n })\n })\n })\n .collect();\n\n let mut execs = vec![];\n for joinhandle in joinhandles.into_iter() {\n let mut v = joinhandle.join();\n execs.append(&mut v);\n }\n\n execs\n}\n\nfn main() {\n let appname = \"imag\";\n let version = &version!();\n let about = \"imag - the PIM suite for the commandline\";\n let mut app = Runtime::get_default_cli_builder(appname, version, about);\n\n let commands = get_commands();\n\n for command in commands.iter() {\n let s = SubCommand::with_name(&command[..]);\n app = app.subcommand(s)\n }\n\n let app = app.arg(Arg::with_name(\"version\")\n .long(\"version\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the version of imag\"))\n .arg(Arg::with_name(\"versions\")\n .long(\"versions\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the versions of the imag commands\"))\n .arg(Arg::with_name(\"help\")\n .long(\"help\")\n .short(\"h\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Show help\"));\n\n\n let matches = app.get_matches();\n\n if matches.is_present(\"help\") {\n help(get_commands());\n exit(0);\n }\n\n if matches.is_present(\"version\") {\n println!(\"imag {}\", &version!()[..]);\n exit(0);\n }\n\n if matches.is_present(\"versions\") {\n let mut result = vec![];\n for command in commands.iter() {\n result.push(crossbeam::scope(|scope| {\n scope.spawn(|| {\n let v = Command::new(command).arg(\"--version\").output();\n match v {\n Ok(v) => match String::from_utf8(v.stdout) {\n Ok(s) => format!(\"{} -> {}\", command, s),\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n },\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n }\n })\n }))\n }\n\n for versionstring in result.into_iter().map(|handle| handle.join()) {\n println!(\"{}\", versionstring);\n }\n }\n\n matches.subcommand_name()\n .map(|subcommand| {\n\n let mut subcommand_args = vec![];\n\n for arg in Runtime::arg_names() {\n matches.value_of(arg)\n .map(|value| {\n subcommand_args.push(arg);\n subcommand_args.push(value);\n });\n }\n\n match Command::new(format!(\"imag-{}\", subcommand))\n .stdin(Stdio::inherit())\n .stdout(Stdio::inherit())\n .stderr(Stdio::inherit())\n .arg(subcommand)\n .args(&subcommand_args[..])\n .spawn()\n .and_then(|mut handle| handle.wait())\n {\n Ok(exit_status) => {\n if !exit_status.success() {\n println!(\"{} exited with non-zero exit code\", subcommand);\n exit(exit_status.code().unwrap_or(42));\n }\n },\n\n Err(e) => {\n match e.kind() {\n ErrorKind::NotFound => {\n println!(\"No such command: 'imag-{}'\", subcommand);\n exit(2);\n },\n ErrorKind::PermissionDenied => {\n println!(\"No permission to execute: 'imag-{}'\", subcommand);\n exit(1);\n },\n _ => {\n println!(\"Error spawning: {:?}\", e);\n exit(1337);\n }\n }\n }\n }\n });\n}\n<commit_msg>Shrink App setup code<commit_after>extern crate crossbeam;\nextern crate clap;\n#[macro_use] extern crate version;\nextern crate walkdir;\n\nextern crate libimagrt;\n\nuse std::env;\nuse std::process::exit;\nuse std::process::Command;\nuse std::process::Stdio;\nuse std::io::ErrorKind;\n\nuse walkdir::WalkDir;\nuse crossbeam::*;\nuse clap::{Arg, SubCommand};\n\nuse libimagrt::runtime::Runtime;\n\nfn help(cmds: Vec<String>) {\n println!(r#\"\n\n _\n (_)_ __ ___ __ _ __ _\n | | '_ \\` _ \\\/ _\\`|\/ _\\`|\n | | | | | | | (_| | (_| |\n |_|_| |_| |_|\\__,_|\\__, |\n |___\/\n -------------------------\n\n Usage: imag [--version | --versions | -h | --help] <command> <args...>\n\n imag - the personal information management suite for the commandline\n\n imag is a PIM suite for the commandline. It consists of several commands,\n called \"modules\". Each module implements one PIM aspect and all of these\n modules can be used independently.\n\n Available commands:\n \"#);\n\n for cmd in cmds.iter() {\n println!(\"\\t{}\", cmd);\n }\n\n println!(r#\"\n\n Call a command with 'imag <command> <args>'\n Each command can be called with \"--help\" to get the respective helptext.\n\n Please visit https:\/\/github.com\/matthiasbeyer\/imag to view the source code,\n follow the development of imag or maybe even contribute to imag.\n\n imag is free software. It is released under the terms of LGPLv2.1\n\n (c) 2016 Matthias Beyer and contributors\"#);\n}\n\nfn get_commands() -> Vec<String> {\n let path = env::var(\"PATH\");\n if path.is_err() {\n println!(\"PATH error: {:?}\", path);\n exit(1);\n }\n let pathelements = path.unwrap();\n let pathelements = pathelements.split(\":\");\n\n let joinhandles : Vec<ScopedJoinHandle<Vec<String>>> = pathelements\n .map(|elem| {\n crossbeam::scope(|scope| {\n scope.spawn(|| {\n WalkDir::new(elem)\n .max_depth(1)\n .into_iter()\n .filter(|path| {\n match path {\n &Ok(ref p) => p.file_name()\n .to_str()\n .map_or(false, |filename| filename.starts_with(\"imag-\")),\n &Err(_) => false,\n }\n })\n .filter_map(|x| x.ok())\n .filter_map(|path| {\n path.file_name()\n .to_str()\n .and_then(|s| s.splitn(2, \"-\").nth(1).map(String::from))\n })\n .collect()\n })\n })\n })\n .collect();\n\n let mut execs = vec![];\n for joinhandle in joinhandles.into_iter() {\n let mut v = joinhandle.join();\n execs.append(&mut v);\n }\n\n execs\n}\n\nfn main() {\n let appname = \"imag\";\n let version = &version!();\n let about = \"imag - the PIM suite for the commandline\";\n let commands = get_commands();\n let r = Runtime::get_default_cli_builder(appname, version, about);\n let matches = commands\n .iter()\n .fold(r, |app, cmd| app.subcommand(SubCommand::with_name(cmd)))\n .arg(Arg::with_name(\"version\")\n .long(\"version\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the version of imag\"))\n .arg(Arg::with_name(\"versions\")\n .long(\"versions\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Get the versions of the imag commands\"))\n .arg(Arg::with_name(\"help\")\n .long(\"help\")\n .short(\"h\")\n .takes_value(false)\n .required(false)\n .multiple(false)\n .help(\"Show help\"))\n .get_matches();\n\n if matches.is_present(\"help\") {\n help(get_commands());\n exit(0);\n }\n\n if matches.is_present(\"version\") {\n println!(\"imag {}\", &version!()[..]);\n exit(0);\n }\n\n if matches.is_present(\"versions\") {\n let mut result = vec![];\n for command in commands.iter() {\n result.push(crossbeam::scope(|scope| {\n scope.spawn(|| {\n let v = Command::new(command).arg(\"--version\").output();\n match v {\n Ok(v) => match String::from_utf8(v.stdout) {\n Ok(s) => format!(\"{} -> {}\", command, s),\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n },\n Err(e) => format!(\"Failed calling {} -> {:?}\", command, e),\n }\n })\n }))\n }\n\n for versionstring in result.into_iter().map(|handle| handle.join()) {\n println!(\"{}\", versionstring);\n }\n }\n\n matches.subcommand_name()\n .map(|subcommand| {\n\n let mut subcommand_args = vec![];\n\n for arg in Runtime::arg_names() {\n matches.value_of(arg)\n .map(|value| {\n subcommand_args.push(arg);\n subcommand_args.push(value);\n });\n }\n\n match Command::new(format!(\"imag-{}\", subcommand))\n .stdin(Stdio::inherit())\n .stdout(Stdio::inherit())\n .stderr(Stdio::inherit())\n .arg(subcommand)\n .args(&subcommand_args[..])\n .spawn()\n .and_then(|mut handle| handle.wait())\n {\n Ok(exit_status) => {\n if !exit_status.success() {\n println!(\"{} exited with non-zero exit code\", subcommand);\n exit(exit_status.code().unwrap_or(42));\n }\n },\n\n Err(e) => {\n match e.kind() {\n ErrorKind::NotFound => {\n println!(\"No such command: 'imag-{}'\", subcommand);\n exit(2);\n },\n ErrorKind::PermissionDenied => {\n println!(\"No permission to execute: 'imag-{}'\", subcommand);\n exit(1);\n },\n _ => {\n println!(\"Error spawning: {:?}\", e);\n exit(1337);\n }\n }\n }\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test drop order for locals when a future is dropped part-way through execution<commit_after>\/\/ aux-build:arc_wake.rs\n\/\/ edition:2018\n\/\/ run-pass\n\n#![allow(unused_variables)]\n#![deny(dead_code)]\n#![feature(async_await)]\n\n\/\/ Test that the drop order for locals in a fn and async fn matches up.\nextern crate arc_wake;\n\nuse arc_wake::ArcWake;\nuse std::cell::RefCell;\nuse std::future::Future;\nuse std::marker::PhantomData;\nuse std::pin::Pin;\nuse std::rc::Rc;\nuse std::sync::Arc;\nuse std::task::{Context, Poll};\n\nstruct EmptyWaker;\n\nimpl ArcWake for EmptyWaker {\n fn wake(self: Arc<Self>) {}\n}\n\n#[derive(Debug, Eq, PartialEq)]\nenum DropOrder {\n Function,\n Val(&'static str),\n}\n\ntype DropOrderListPtr = Rc<RefCell<Vec<DropOrder>>>;\n\nstruct D(&'static str, DropOrderListPtr);\n\nimpl Drop for D {\n fn drop(&mut self) {\n self.1.borrow_mut().push(DropOrder::Val(self.0));\n }\n}\n\nstruct NeverReady;\n\nimpl Future for NeverReady {\n type Output = ();\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n Poll::Pending\n }\n}\n\nasync fn simple_variable_declaration_async(l: DropOrderListPtr) {\n l.borrow_mut().push(DropOrder::Function);\n let x = D(\"x\", l.clone());\n let y = D(\"y\", l.clone());\n NeverReady.await;\n}\n\nfn simple_variable_declaration_sync(l: DropOrderListPtr) {\n l.borrow_mut().push(DropOrder::Function);\n let x = D(\"x\", l.clone());\n let y = D(\"y\", l.clone());\n}\n\nasync fn varable_completely_contained_within_block_async(l: DropOrderListPtr) {\n l.borrow_mut().push(DropOrder::Function);\n async {\n let x = D(\"x\", l.clone());\n }\n .await;\n let y = D(\"y\", l.clone());\n NeverReady.await;\n}\n\nfn varable_completely_contained_within_block_sync(l: DropOrderListPtr) {\n l.borrow_mut().push(DropOrder::Function);\n {\n let x = D(\"x\", l.clone());\n }\n let y = D(\"y\", l.clone());\n}\n\nasync fn variables_moved_into_separate_blocks_async(l: DropOrderListPtr) {\n l.borrow_mut().push(DropOrder::Function);\n let x = D(\"x\", l.clone());\n let y = D(\"y\", l.clone());\n async move { x }.await;\n async move { y }.await;\n NeverReady.await;\n}\n\nfn variables_moved_into_separate_blocks_sync(l: DropOrderListPtr) {\n l.borrow_mut().push(DropOrder::Function);\n let x = D(\"x\", l.clone());\n let y = D(\"y\", l.clone());\n {\n x\n };\n {\n y\n };\n}\n\nasync fn variables_moved_into_same_block_async(l: DropOrderListPtr) {\n l.borrow_mut().push(DropOrder::Function);\n let x = D(\"x\", l.clone());\n let y = D(\"y\", l.clone());\n async move {\n x;\n y;\n };\n NeverReady.await;\n}\n\nfn variables_moved_into_same_block_sync(l: DropOrderListPtr) {\n l.borrow_mut().push(DropOrder::Function);\n let x = D(\"x\", l.clone());\n let y = D(\"y\", l.clone());\n {\n x;\n y;\n };\n return;\n}\n\nasync fn move_after_current_await_doesnt_affect_order(l: DropOrderListPtr) {\n l.borrow_mut().push(DropOrder::Function);\n let x = D(\"x\", l.clone());\n let y = D(\"y\", l.clone());\n NeverReady.await;\n async move {\n x;\n y;\n };\n}\n\nfn assert_drop_order_after_cancel<Fut: Future<Output = ()>>(\n f: impl FnOnce(DropOrderListPtr) -> Fut,\n g: impl FnOnce(DropOrderListPtr),\n) {\n let empty = Arc::new(EmptyWaker);\n let waker = ArcWake::into_waker(empty);\n let mut cx = Context::from_waker(&waker);\n\n let actual_order = Rc::new(RefCell::new(Vec::new()));\n let mut fut = Box::pin(f(actual_order.clone()));\n let _ = fut.as_mut().poll(&mut cx);\n drop(fut);\n\n let expected_order = Rc::new(RefCell::new(Vec::new()));\n g(expected_order.clone());\n assert_eq!(*actual_order.borrow(), *expected_order.borrow());\n}\n\nfn main() {\n assert_drop_order_after_cancel(\n simple_variable_declaration_async,\n simple_variable_declaration_sync,\n );\n assert_drop_order_after_cancel(\n varable_completely_contained_within_block_async,\n varable_completely_contained_within_block_sync,\n );\n assert_drop_order_after_cancel(\n variables_moved_into_separate_blocks_async,\n variables_moved_into_separate_blocks_sync,\n );\n assert_drop_order_after_cancel(\n variables_moved_into_same_block_async,\n variables_moved_into_same_block_sync,\n );\n assert_drop_order_after_cancel(\n move_after_current_await_doesnt_affect_order,\n simple_variable_declaration_sync,\n );\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add possibility to set metadata path<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add feature gate tests<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that diagnostic macros are gated by `rustc_diagnostic_macros` feature\n\/\/ gate\n\n__register_diagnostic!(E0001);\n\/\/~^ ERROR macro undefined: '__register_diagnostic!'\n\nfn main() {\n __diagnostic_used!(E0001);\n \/\/~^ ERROR macro undefined: '__diagnostic_used!'\n}\n\n__build_diagnostic_array!(DIAGNOSTICS);\n\/\/~^ ERROR macro undefined: '__build_diagnostic_array!'\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a Test for actual split files<commit_after>extern crate livesplit_core;\n\nuse livesplit_core::{Time, TimeSpan};\nuse livesplit_core::comparison::balanced_pb::{BalancedPB, NAME};\nuse std::fs::File;\nuse std::io::BufReader;\nuse livesplit_core::run::parser::livesplit;\n\nfn r(t: Time) -> Time {\n Time::new()\n .with_real_time(\n t.real_time\n .map(|t| TimeSpan::from_seconds(t.total_seconds().floor())),\n )\n .with_game_time(\n t.game_time\n .map(|t| TimeSpan::from_seconds(t.total_seconds().floor())),\n )\n}\n\nfn t(r: &str, g: &str) -> Time {\n Time::new()\n .with_real_time(r.parse().ok())\n .with_game_time(g.parse().ok())\n}\n\n#[test]\nfn balanced_pb() {\n let reader = BufReader::new(File::open(\"tests\/run_files\/livesplit1.6_gametime.lss\").unwrap());\n let mut run = livesplit::parse(reader, None).unwrap();\n run.comparison_generators_mut().clear();\n run.comparison_generators_mut().push(Box::new(BalancedPB));\n run.regenerate_comparisons();\n let s = run.segments();\n\n assert_eq!(r(s[0].comparison(NAME)), t(\"3:11\", \"3:11\"));\n assert_eq!(r(s[1].comparison(NAME)), t(\"4:24\", \"4:21\"));\n assert_eq!(r(s[2].comparison(NAME)), t(\"6:38\", \"6:31\"));\n assert_eq!(r(s[3].comparison(NAME)), t(\"10:34\", \"10:13\"));\n assert_eq!(r(s[4].comparison(NAME)), t(\"13:05\", \"12:34\"));\n assert_eq!(r(s[5].comparison(NAME)), t(\"15:02\", \"14:21\"));\n assert_eq!(r(s[6].comparison(NAME)), t(\"17:50\", \"16:56\"));\n assert_eq!(r(s[7].comparison(NAME)), t(\"22:42\", \"21:30\"));\n assert_eq!(r(s[8].comparison(NAME)), t(\"26:20\", \"24:47\"));\n assert_eq!(r(s[9].comparison(NAME)), t(\"30:24\", \"28:37\"));\n assert_eq!(r(s[10].comparison(NAME)), t(\"36:51\", \"34:39\"));\n assert_eq!(r(s[11].comparison(NAME)), t(\"37:56\", \"35:39\"));\n assert_eq!(r(s[12].comparison(NAME)), t(\"40:01\", \"37:37\"));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>22 - aliasing<commit_after>struct Point { x: int, y: int, z: int }\n\nfn main() {\n let mut point = Point { x: 0, y: 0, z: 0 };\n\n {\n let borrowed_point = &point;\n let another_borrow = &point;\n\n \/\/ Data can be accessed via the references and the original owner\n println!(\"Point has coordinates: ({}, {}, {})\",\n borrowed_point.x, another_borrow.y, point.z);\n\n \/\/ Error! Can't borrow point as mutable because it's currently\n \/\/ borrowed as immutable\n \/\/let mutable_borrow = &mut point;\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ Immutable references go out of scope\n }\n\n {\n let mutable_borrow = &mut point;\n\n \/\/ Change data via mutable reference\n mutable_borrow.x = 5;\n\n \/\/ Error! Can't borrow `point` as immutable because it's currently\n \/\/ borrowed as mutable\n \/\/let y = &point.y;\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ Error! Can't print, because println! takes an immutable reference\n \/\/println!(\"Point Z coordinate is {}\", point.z);\n \/\/ TODO ^ Try uncommenting this line\n\n \/\/ Mutable reference goes out of scope\n }\n\n \/\/ Immutable references to point are allowed again\n println!(\"Point now has coordinates: ({}, {}, {})\",\n point.x, point.y, point.z);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test for householder<commit_after>use ndarray::*;\nuse ndarray_linalg::{krylov::*, *};\n\nfn over<A: Scalar + Lapack>(rtol: A::Real) {\n const N: usize = 4;\n let a: Array2<A> = random((N, N * 2));\n\n \/\/ Terminate\n let (q, r) = householder(a.axis_iter(Axis(1)), N, rtol, Strategy::Terminate);\n let a_sub = a.slice(s![.., 0..N]);\n let qc: Array2<A> = conjugate(&q);\n assert_close_l2!(&qc.dot(&q), &Array::eye(N), rtol; \"Check Q^H Q = I\");\n assert_close_l2!(&q.dot(&r), &a_sub, rtol; \"Check A = QR\");\n\n \/\/ Skip\n let (q, r) = householder(a.axis_iter(Axis(1)), N, rtol, Strategy::Skip);\n let a_sub = a.slice(s![.., 0..N]);\n let qc: Array2<A> = conjugate(&q);\n assert_close_l2!(&qc.dot(&q), &Array::eye(N), rtol);\n assert_close_l2!(&q.dot(&r), &a_sub, rtol);\n\n \/\/ Full\n let (q, r) = householder(a.axis_iter(Axis(1)), N, rtol, Strategy::Full);\n let qc: Array2<A> = conjugate(&q);\n assert_close_l2!(&qc.dot(&q), &Array::eye(N), rtol);\n assert_close_l2!(&q.dot(&r), &a, rtol);\n}\n\n#[test]\nfn over_f32() {\n over::<f32>(1e-5);\n}\n#[test]\nfn over_f64() {\n over::<f64>(1e-9);\n}\n#[test]\nfn over_c32() {\n over::<c32>(1e-5);\n}\n#[test]\nfn over_c64() {\n over::<c64>(1e-9);\n}\n\nfn full<A: Scalar + Lapack>(rtol: A::Real) {\n const N: usize = 5;\n let a: Array2<A> = random((N, N));\n let (q, r) = householder(a.axis_iter(Axis(1)), N, rtol, Strategy::Terminate);\n let qc: Array2<A> = conjugate(&q);\n assert_close_l2!(&qc.dot(&q), &Array::eye(N), rtol; \"Check Q^H Q = I\");\n assert_close_l2!(&q.dot(&r), &a, rtol; \"Check A = QR\");\n}\n\n#[test]\nfn full_f32() {\n full::<f32>(1e-5);\n}\n#[test]\nfn full_f64() {\n full::<f64>(1e-9);\n}\n#[test]\nfn full_c32() {\n full::<c32>(1e-5);\n}\n#[test]\nfn full_c64() {\n full::<c64>(1e-9);\n}\n\nfn half<A: Scalar + Lapack>(rtol: A::Real) {\n const N: usize = 4;\n let a: Array2<A> = random((N, N \/ 2));\n let (q, r) = householder(a.axis_iter(Axis(1)), N, rtol, Strategy::Terminate);\n let qc: Array2<A> = conjugate(&q);\n assert_close_l2!(&qc.dot(&q), &Array::eye(N \/ 2), rtol; \"Check Q^H Q = I\");\n assert_close_l2!(&q.dot(&r), &a, rtol; \"Check A = QR\");\n}\n\n#[test]\nfn half_f32() {\n half::<f32>(1e-5);\n}\n#[test]\nfn half_f64() {\n half::<f64>(1e-9);\n}\n#[test]\nfn half_c32() {\n half::<c32>(1e-5);\n}\n#[test]\nfn half_c64() {\n half::<c64>(1e-9);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for loading effects.<commit_after>\/\/! Test loading of effect definitions from the disk.\n\nextern crate libfriendship;\nextern crate digest;\nextern crate serde_json;\nextern crate sha2;\nextern crate tempdir;\nextern crate url;\n\nuse std::fs::File;\nuse std::sync::mpsc::{channel, Receiver, Sender};\n\nuse digest::digest_reader;\nuse sha2::Sha256;\nuse tempdir::TempDir;\nuse url::Url;\n\nuse libfriendship::{Dispatch, Client};\nuse libfriendship::dispatch::{OscRouteGraph, OscRenderer, OscResMan};\nuse libfriendship::render::RefRenderer;\nuse libfriendship::routing::{adjlist, NodeHandle, DagHandle, Edge, EdgeWeight, EffectMeta, EffectDesc};\nuse libfriendship::routing::AdjList;\n\nstruct MyClient {\n \/\/\/ Where to send the rendered audio.\n tx: Sender<Vec<f32>>,\n}\nimpl Client for MyClient {\n fn audio_rendered(&mut self, buffer: &[f32], _idx: u64, _num_ch: u8) {\n self.tx.send(buffer.iter().cloned().collect()).unwrap();\n }\n}\n\nfn test_setup() -> (Dispatch<RefRenderer>, Receiver<Vec<f32>>) {\n let (tx, rx) = channel();\n let client = Box::new(MyClient{ tx });\n let mut dispatch: Dispatch<RefRenderer> = Dispatch::new();\n dispatch.register_client(client);\n (dispatch, rx)\n}\n\nfn create_multby2() -> EffectDesc {\n let mult_hnd = NodeHandle::new_node(DagHandle::toplevel(), 1);\n let mult_data = adjlist::NodeData::Effect(\n EffectMeta::new(\"Multiply\".to_string(), None, [Url::parse(\"primitive:\/\/\/Multiply\").unwrap()].iter().cloned())\n );\n let const_hnd = NodeHandle::new_node(DagHandle::toplevel(), 2);\n let const_data = adjlist::NodeData::Effect(\n EffectMeta::new(\"Constant\".to_string(), None, [Url::parse(\"primitive:\/\/\/Constant?value=5\").unwrap()].iter().cloned())\n );\n\n let nodes = [(mult_hnd, mult_data), (const_hnd, const_data)];\n\n let edge_in = Edge::new_from_null(mult_hnd, EdgeWeight::new(0, 0, 0, 0));\n let edge_out = Edge::new_to_null(mult_hnd, EdgeWeight::new(1, 0, 1, 0));\n let edge_const = Edge::new(const_hnd, mult_hnd, EdgeWeight::new(1, 0, 2, 0)).unwrap();\n\n let edges = [edge_in, edge_out, edge_const];\n\n let list = AdjList {\n nodes: nodes.iter().cloned().collect(),\n edges: edges.iter().cloned().collect(),\n };\n let meta = EffectMeta::new(\"MulBy2\".to_string(), None, Vec::new().into_iter());\n EffectDesc::new(meta, list)\n}\n\n#[test]\nfn load_multby2() {\n let (mut dispatch, rx) = test_setup();\n let dir = TempDir::new(\"libfriendship\").unwrap();\n let mulby2_desc = create_multby2();\n\n \/\/ Add the temp dir as a search dir\n dispatch.dispatch(\n OscResMan::AddDir((), (dir.path().to_str().unwrap().to_string(),)).into()\n ).unwrap();\n\n \/\/ Write the effect definition to file\n let mulby2_path = dir.path().join(\"mulby2.fnd\");\n let mulby2_file = File::create(mulby2_path.clone()).unwrap();\n serde_json::to_writer(mulby2_file, &mulby2_desc).unwrap();\n\n \/\/ Determine the hash of our file\n let mut mulby2_file = File::open(mulby2_path).unwrap();\n let hash_result = digest_reader::<Sha256>(&mut mulby2_file).unwrap();\n let mut sha: [u8; 32] = Default::default();\n sha.copy_from_slice(hash_result.as_slice());\n\n \/\/ Create the MulBy2 node (id=1)\n let mul_hnd = NodeHandle::new_node(DagHandle::toplevel(), 1);\n dispatch.dispatch(OscRouteGraph::AddNode((), (mul_hnd, adjlist::NodeData::Effect(\n EffectMeta::new(\"MulBy2\".to_string(), Some(sha), [].iter().cloned())\n ))).into()).unwrap();\n \/\/ Connect MulBy2 output to master output.\n dispatch.dispatch(OscRouteGraph::AddEdge((), (Edge::new_to_null(mul_hnd, EdgeWeight::new(1, 0, 0, 0)),)).into()).unwrap();\n \n \/\/ Create Constant node (id=2)\n let const_hnd = NodeHandle::new_node(DagHandle::toplevel(), 2);\n dispatch.dispatch(OscRouteGraph::AddNode((), (const_hnd, adjlist::NodeData::Effect(\n EffectMeta::new(\"Constant\".to_string(), None, [Url::parse(\"primitive:\/\/\/Constant?value=0.5\").unwrap()].iter().cloned())\n ))).into()).unwrap();\n \/\/ Route constant output to mul input\n dispatch.dispatch(OscRouteGraph::AddEdge((), (Edge::new(const_hnd, mul_hnd, EdgeWeight::new(1, 0, 0, 0)).unwrap(),)).into()).unwrap();\n \n \/\/ Read some data from ch=0.\n \/\/ This should be 0.5*5 = [2.5, 2.5, 2.5, 2.5]\n dispatch.dispatch(\n OscRenderer::RenderRange((), (0, 4, 1))\n .into()).unwrap();\n let rendered = rx.recv().unwrap();\n assert_eq!(rendered, vec![2.5f32, 2.5f32, 2.5f32, 2.5f32]);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix menu example<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::Bindings::FormDataBinding;\nuse dom::bindings::codegen::Bindings::FormDataBinding::FormDataMethods;\nuse dom::bindings::codegen::InheritTypes::FileCast;\nuse dom::bindings::codegen::UnionTypes::FileOrString::{FileOrString, eFile, eString};\nuse dom::bindings::error::{Fallible};\nuse dom::bindings::global::{GlobalRef, GlobalField};\nuse dom::bindings::js::{JS, JSRef, Temporary};\nuse dom::bindings::trace::Traceable;\nuse dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};\nuse dom::blob::Blob;\nuse dom::file::File;\nuse dom::htmlformelement::HTMLFormElement;\nuse servo_util::str::DOMString;\nuse std::cell::RefCell;\nuse std::collections::hashmap::HashMap;\n\n#[deriving(Clone)]\n#[jstraceable]\n#[must_root]\npub enum FormDatum {\n StringData(DOMString),\n FileData(JS<File>)\n}\n\n#[jstraceable]\n#[must_root]\npub struct FormData {\n data: Traceable<RefCell<HashMap<DOMString, Vec<FormDatum>>>>,\n reflector_: Reflector,\n global: GlobalField,\n form: Option<JS<HTMLFormElement>>\n}\n\nimpl FormData {\n fn new_inherited(form: Option<JSRef<HTMLFormElement>>, global: &GlobalRef) -> FormData {\n FormData {\n data: Traceable::new(RefCell::new(HashMap::new())),\n reflector_: Reflector::new(),\n global: GlobalField::from_rooted(global),\n form: form.map(|f| JS::from_rooted(f)),\n }\n }\n\n pub fn new(form: Option<JSRef<HTMLFormElement>>, global: &GlobalRef) -> Temporary<FormData> {\n reflect_dom_object(box FormData::new_inherited(form, global),\n global, FormDataBinding::Wrap)\n }\n\n pub fn Constructor(global: &GlobalRef, form: Option<JSRef<HTMLFormElement>>) -> Fallible<Temporary<FormData>> {\n Ok(FormData::new(form, global))\n }\n}\n\nimpl<'a> FormDataMethods for JSRef<'a, FormData> {\n #[allow(unrooted_must_root)]\n fn Append(self, name: DOMString, value: JSRef<Blob>, filename: Option<DOMString>) {\n let file = FileData(JS::from_rooted(self.get_file_from_blob(value, filename)));\n self.data.deref().borrow_mut().insert_or_update_with(name.clone(), vec!(file.clone()),\n |_k, v| {v.push(file.clone());});\n }\n\n fn Append_(self, name: DOMString, value: DOMString) {\n self.data.deref().borrow_mut().insert_or_update_with(name, vec!(StringData(value.clone())),\n |_k, v| {v.push(StringData(value.clone()));});\n }\n\n fn Delete(self, name: DOMString) {\n self.data.deref().borrow_mut().remove(&name);\n }\n\n fn Get(self, name: DOMString) -> Option<FileOrString> {\n if self.data.deref().borrow().contains_key_equiv(&name) {\n match (*self.data.deref().borrow())[name][0].clone() {\n StringData(ref s) => Some(eString(s.clone())),\n FileData(ref f) => {\n Some(eFile(f.clone()))\n }\n }\n } else {\n None\n }\n }\n\n fn Has(self, name: DOMString) -> bool {\n self.data.deref().borrow().contains_key_equiv(&name)\n }\n #[allow(unrooted_must_root)]\n fn Set(self, name: DOMString, value: JSRef<Blob>, filename: Option<DOMString>) {\n let file = FileData(JS::from_rooted(self.get_file_from_blob(value, filename)));\n self.data.deref().borrow_mut().insert(name, vec!(file));\n }\n\n fn Set_(self, name: DOMString, value: DOMString) {\n self.data.deref().borrow_mut().insert(name, vec!(StringData(value)));\n }\n}\n\nimpl Reflectable for FormData {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n &self.reflector_\n }\n}\n\ntrait PrivateFormDataHelpers{\n fn get_file_from_blob(&self, value: JSRef<Blob>, filename: Option<DOMString>) -> Temporary<File>;\n}\n\nimpl PrivateFormDataHelpers for FormData {\n fn get_file_from_blob(&self, value: JSRef<Blob>, filename: Option<DOMString>) -> Temporary<File> {\n let global = self.global.root();\n let f: Option<JSRef<File>> = FileCast::to_ref(value);\n let name = filename.unwrap_or(f.map(|inner| inner.name.clone()).unwrap_or(\"blob\".to_string()));\n File::new(&global.root_ref(), value, name)\n }\n}\n<commit_msg>Remove Traceable from formdata.rs<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::codegen::Bindings::FormDataBinding;\nuse dom::bindings::codegen::Bindings::FormDataBinding::FormDataMethods;\nuse dom::bindings::codegen::InheritTypes::FileCast;\nuse dom::bindings::codegen::UnionTypes::FileOrString::{FileOrString, eFile, eString};\nuse dom::bindings::error::{Fallible};\nuse dom::bindings::global::{GlobalRef, GlobalField};\nuse dom::bindings::js::{JS, JSRef, Temporary};\nuse dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};\nuse dom::blob::Blob;\nuse dom::file::File;\nuse dom::htmlformelement::HTMLFormElement;\nuse servo_util::str::DOMString;\nuse std::cell::RefCell;\nuse std::collections::hashmap::HashMap;\n\n#[deriving(Clone)]\n#[jstraceable]\n#[must_root]\npub enum FormDatum {\n StringData(DOMString),\n FileData(JS<File>)\n}\n\n#[jstraceable]\n#[must_root]\npub struct FormData {\n data: RefCell<HashMap<DOMString, Vec<FormDatum>>>,\n reflector_: Reflector,\n global: GlobalField,\n form: Option<JS<HTMLFormElement>>\n}\n\nimpl FormData {\n fn new_inherited(form: Option<JSRef<HTMLFormElement>>, global: &GlobalRef) -> FormData {\n FormData {\n data: RefCell::new(HashMap::new()),\n reflector_: Reflector::new(),\n global: GlobalField::from_rooted(global),\n form: form.map(|f| JS::from_rooted(f)),\n }\n }\n\n pub fn new(form: Option<JSRef<HTMLFormElement>>, global: &GlobalRef) -> Temporary<FormData> {\n reflect_dom_object(box FormData::new_inherited(form, global),\n global, FormDataBinding::Wrap)\n }\n\n pub fn Constructor(global: &GlobalRef, form: Option<JSRef<HTMLFormElement>>) -> Fallible<Temporary<FormData>> {\n Ok(FormData::new(form, global))\n }\n}\n\nimpl<'a> FormDataMethods for JSRef<'a, FormData> {\n #[allow(unrooted_must_root)]\n fn Append(self, name: DOMString, value: JSRef<Blob>, filename: Option<DOMString>) {\n let file = FileData(JS::from_rooted(self.get_file_from_blob(value, filename)));\n self.data.borrow_mut().insert_or_update_with(name.clone(), vec!(file.clone()),\n |_k, v| {v.push(file.clone());});\n }\n\n fn Append_(self, name: DOMString, value: DOMString) {\n self.data.borrow_mut().insert_or_update_with(name, vec!(StringData(value.clone())),\n |_k, v| {v.push(StringData(value.clone()));});\n }\n\n fn Delete(self, name: DOMString) {\n self.data.borrow_mut().remove(&name);\n }\n\n fn Get(self, name: DOMString) -> Option<FileOrString> {\n if self.data.borrow().contains_key_equiv(&name) {\n match (*self.data.borrow())[name][0].clone() {\n StringData(ref s) => Some(eString(s.clone())),\n FileData(ref f) => {\n Some(eFile(f.clone()))\n }\n }\n } else {\n None\n }\n }\n\n fn Has(self, name: DOMString) -> bool {\n self.data.borrow().contains_key_equiv(&name)\n }\n #[allow(unrooted_must_root)]\n fn Set(self, name: DOMString, value: JSRef<Blob>, filename: Option<DOMString>) {\n let file = FileData(JS::from_rooted(self.get_file_from_blob(value, filename)));\n self.data.borrow_mut().insert(name, vec!(file));\n }\n\n fn Set_(self, name: DOMString, value: DOMString) {\n self.data.borrow_mut().insert(name, vec!(StringData(value)));\n }\n}\n\nimpl Reflectable for FormData {\n fn reflector<'a>(&'a self) -> &'a Reflector {\n &self.reflector_\n }\n}\n\ntrait PrivateFormDataHelpers{\n fn get_file_from_blob(&self, value: JSRef<Blob>, filename: Option<DOMString>) -> Temporary<File>;\n}\n\nimpl PrivateFormDataHelpers for FormData {\n fn get_file_from_blob(&self, value: JSRef<Blob>, filename: Option<DOMString>) -> Temporary<File> {\n let global = self.global.root();\n let f: Option<JSRef<File>> = FileCast::to_ref(value);\n let name = filename.unwrap_or(f.map(|inner| inner.name.clone()).unwrap_or(\"blob\".to_string()));\n File::new(&global.root_ref(), value, name)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore: gap buffer<commit_after><|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\n\nuse git2::{Index, Repository};\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::MapErrTrace;\nuse libimagstore::hook::error::CustomData;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\nuse libimagutil::debug_result::*;\n\nuse vcs::git::action::StoreAction;\nuse vcs::git::result::Result;\nuse vcs::git::error::{MapErrInto, GitHookErrorKind as GHEK};\n\n\/\/\/ Runtime object for git hook implementations.\n\/\/\/\n\/\/\/ Contains some utility functionality to hold the repository and the configuration for the hooks.\npub struct Runtime {\n repository: Option<Repository>,\n config: Option<Value>,\n}\n\nimpl Runtime {\n\n \/\/\/ Build a `Runtime` object, pass the store path to build the `Repository` instance the\n \/\/\/ `Runtime` has to contain.\n \/\/\/\n \/\/\/ If the building of the `Repository` fails, this function `trace_error()`s the error and\n \/\/\/ returns a `Runtime` object that does _not_ contain a `Repository`.\n pub fn new(storepath: &PathBuf) -> Runtime {\n Runtime {\n repository: Repository::open(storepath).map_err_trace().ok(),\n config: None,\n }\n }\n\n \/\/\/ Set the configuration for the `Runtime`. Always returns `Ok(())`.\n pub fn set_config(&mut self, cfg: &Value) -> Result<()> {\n self.config = Some(cfg.clone());\n Ok(())\n }\n\n \/\/\/ Check whether the `Runtime` has a `Repository`\n pub fn has_repository(&self) -> bool {\n self.repository.is_some()\n }\n\n \/\/\/ Check whether the `Runtime` has a configuration\n pub fn has_config(&self) -> bool {\n self.config.is_some()\n }\n\n \/\/\/ Get the the config value by reference or get an `Err()` which can be returned to the callee\n \/\/\/ of the Hook.\n \/\/\/\n \/\/\/ The `action` Argument is required in case of `Err()` so the error message can be build\n \/\/\/ correctly.\n pub fn config_value_or_err(&self, action: &StoreAction) -> HookResult<&Value> {\n self.config\n .as_ref()\n .ok_or(GHEK::NoConfigError.into_error())\n .map_err_into(GHEK::ConfigError)\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_err(|e| e.with_custom_data(CustomData::default().aborting(false)))\n .map_dbg_err(|_| {\n format!(\"[GIT {} HOOK]: Couldn't get Value object from config\", action.uppercase())\n })\n }\n\n \/\/\/ Get the `Repository` object from the `Runtime` or an `Err()` that can be returned to the\n \/\/\/ callee of the Hook.\n \/\/\/\n \/\/\/ The `action` Argument is required in case of `Err()` so the error message can be build\n \/\/\/ correctly.\n pub fn repository(&self, action: &StoreAction) -> HookResult<&Repository> {\n use vcs::git::error::MapIntoHookError;\n\n debug!(\"[GIT {} HOOK]: Getting repository\", action.uppercase());\n self.repository\n .as_ref()\n .ok_or(GHEK::MkRepo.into_error())\n .map_err_into(GHEK::RepositoryError)\n .map_into_hook_error()\n .map_dbg_err(|_| format!(\"[GIT {} HOOK]: Couldn't fetch Repository\", action.uppercase()))\n .map_dbg(|_| format!(\"[GIT {} HOOK]: Repository object fetched\", action.uppercase()))\n }\n\n \/\/\/ Ensure that the branch that is put in the configuration file is checked out, if any.\n pub fn ensure_cfg_branch_is_checked_out(&self, action: &StoreAction) -> HookResult<()> {\n use vcs::git::config::ensure_branch;\n use vcs::git::config::do_checkout_ensure_branch;\n\n debug!(\"[GIT {} HOOK]: Ensuring branch checkout\", action.uppercase());\n let head = try!(self\n .repository(action)\n .and_then(|r| {\n debug!(\"[GIT {} HOOK]: Repository fetched, getting head\", action.uppercase());\n r.head()\n .map_dbg_err_str(\"Couldn't fetch HEAD\")\n .map_dbg_err(|e| format!(\"\\tbecause = {:?}\", e))\n .map_err_into(GHEK::HeadFetchError)\n .map_err(|e| e.into())\n }));\n debug!(\"[GIT {} HOOK]: HEAD fetched\", action.uppercase());\n\n \/\/ TODO: Fail if not on branch? hmmh... I'm not sure\n if !head.is_branch() {\n debug!(\"[GIT {} HOOK]: HEAD is not a branch\", action.uppercase());\n return Err(GHEK::NotOnBranch.into_error().into());\n }\n debug!(\"[GIT {} HOOK]: HEAD is a branch\", action.uppercase());\n\n \/\/ Check out appropriate branch ... or fail\n match ensure_branch(self.config.as_ref()) {\n Ok(Some(s)) => {\n debug!(\"[GIT {} HOOK]: We have to ensure branch: {}\", action.uppercase(), s);\n match head.name().map(|name| {\n debug!(\"[GIT {} HOOK]: {} == {}\", action.uppercase(), name, s);\n name == s\n }) {\n Some(b) => {\n if b {\n debug!(\"[GIT {} HOOK]: Branch already checked out.\", action.uppercase());\n Ok(())\n } else {\n debug!(\"[GIT {} HOOK]: Branch not checked out.\", action.uppercase());\n\n if !do_checkout_ensure_branch(self.config.as_ref()) {\n Err(GHEK::RepositoryWrongBranchError.into_error())\n .map_err_into(GHEK::RepositoryError)\n } else {\n \/\/ Else try to check out the branch...\n unimplemented!()\n }\n }\n },\n\n None => Err(GHEK::RepositoryBranchNameFetchingError.into_error())\n .map_err_into(GHEK::RepositoryBranchError)\n .map_err_into(GHEK::RepositoryError),\n }\n },\n Ok(None) => {\n debug!(\"[GIT {} HOOK]: No branch to checkout\", action.uppercase());\n Ok(())\n },\n\n Err(e) => Err(e).map_err_into(GHEK::RepositoryError),\n }\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_dbg(|_| format!(\"[GIT {} HOOK]: Branch checked out\", action.uppercase()))\n }\n\n \/\/\/ Check whether the WD is \"dirty\" - whether there is a diff to the repository\n \/\/\/ This function returns false if there is no `Repository` object in the `Runtime`\n pub fn repo_is_dirty(&self, index: &Index) -> bool {\n match self.repository.as_ref() {\n Some(repo) => {\n repo.diff_index_to_workdir(Some(index), None)\n .map_dbg_str(\"Fetched diff: Index <-> WD\")\n .map_dbg_err_str(\"Failed to fetch diff: Index <-> WD\")\n .map(|diff| diff.deltas().count() != 0)\n .unwrap_or(false)\n },\n\n None => {\n debug!(\"No repository: Cannot fetch diff: Index <-> WD\");\n false\n }\n }\n\n }\n\n}\n\n<commit_msg>Revert \"Add helper fn to check whether the repository WD is dirty\"<commit_after>use std::path::PathBuf;\n\nuse git2::Repository;\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::MapErrTrace;\nuse libimagstore::hook::error::CustomData;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\nuse libimagutil::debug_result::*;\n\nuse vcs::git::action::StoreAction;\nuse vcs::git::result::Result;\nuse vcs::git::error::{MapErrInto, GitHookErrorKind as GHEK};\n\n\/\/\/ Runtime object for git hook implementations.\n\/\/\/\n\/\/\/ Contains some utility functionality to hold the repository and the configuration for the hooks.\npub struct Runtime {\n repository: Option<Repository>,\n config: Option<Value>,\n}\n\nimpl Runtime {\n\n \/\/\/ Build a `Runtime` object, pass the store path to build the `Repository` instance the\n \/\/\/ `Runtime` has to contain.\n \/\/\/\n \/\/\/ If the building of the `Repository` fails, this function `trace_error()`s the error and\n \/\/\/ returns a `Runtime` object that does _not_ contain a `Repository`.\n pub fn new(storepath: &PathBuf) -> Runtime {\n Runtime {\n repository: Repository::open(storepath).map_err_trace().ok(),\n config: None,\n }\n }\n\n \/\/\/ Set the configuration for the `Runtime`. Always returns `Ok(())`.\n pub fn set_config(&mut self, cfg: &Value) -> Result<()> {\n self.config = Some(cfg.clone());\n Ok(())\n }\n\n \/\/\/ Check whether the `Runtime` has a `Repository`\n pub fn has_repository(&self) -> bool {\n self.repository.is_some()\n }\n\n \/\/\/ Check whether the `Runtime` has a configuration\n pub fn has_config(&self) -> bool {\n self.config.is_some()\n }\n\n \/\/\/ Get the the config value by reference or get an `Err()` which can be returned to the callee\n \/\/\/ of the Hook.\n \/\/\/\n \/\/\/ The `action` Argument is required in case of `Err()` so the error message can be build\n \/\/\/ correctly.\n pub fn config_value_or_err(&self, action: &StoreAction) -> HookResult<&Value> {\n self.config\n .as_ref()\n .ok_or(GHEK::NoConfigError.into_error())\n .map_err_into(GHEK::ConfigError)\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_err(|e| e.with_custom_data(CustomData::default().aborting(false)))\n .map_dbg_err(|_| {\n format!(\"[GIT {} HOOK]: Couldn't get Value object from config\", action.uppercase())\n })\n }\n\n \/\/\/ Get the `Repository` object from the `Runtime` or an `Err()` that can be returned to the\n \/\/\/ callee of the Hook.\n \/\/\/\n \/\/\/ The `action` Argument is required in case of `Err()` so the error message can be build\n \/\/\/ correctly.\n pub fn repository(&self, action: &StoreAction) -> HookResult<&Repository> {\n use vcs::git::error::MapIntoHookError;\n\n debug!(\"[GIT {} HOOK]: Getting repository\", action.uppercase());\n self.repository\n .as_ref()\n .ok_or(GHEK::MkRepo.into_error())\n .map_err_into(GHEK::RepositoryError)\n .map_into_hook_error()\n .map_dbg_err(|_| format!(\"[GIT {} HOOK]: Couldn't fetch Repository\", action.uppercase()))\n .map_dbg(|_| format!(\"[GIT {} HOOK]: Repository object fetched\", action.uppercase()))\n }\n\n \/\/\/ Ensure that the branch that is put in the configuration file is checked out, if any.\n pub fn ensure_cfg_branch_is_checked_out(&self, action: &StoreAction) -> HookResult<()> {\n use vcs::git::config::ensure_branch;\n use vcs::git::config::do_checkout_ensure_branch;\n\n debug!(\"[GIT {} HOOK]: Ensuring branch checkout\", action.uppercase());\n let head = try!(self\n .repository(action)\n .and_then(|r| {\n debug!(\"[GIT {} HOOK]: Repository fetched, getting head\", action.uppercase());\n r.head()\n .map_dbg_err_str(\"Couldn't fetch HEAD\")\n .map_dbg_err(|e| format!(\"\\tbecause = {:?}\", e))\n .map_err_into(GHEK::HeadFetchError)\n .map_err(|e| e.into())\n }));\n debug!(\"[GIT {} HOOK]: HEAD fetched\", action.uppercase());\n\n \/\/ TODO: Fail if not on branch? hmmh... I'm not sure\n if !head.is_branch() {\n debug!(\"[GIT {} HOOK]: HEAD is not a branch\", action.uppercase());\n return Err(GHEK::NotOnBranch.into_error().into());\n }\n debug!(\"[GIT {} HOOK]: HEAD is a branch\", action.uppercase());\n\n \/\/ Check out appropriate branch ... or fail\n match ensure_branch(self.config.as_ref()) {\n Ok(Some(s)) => {\n debug!(\"[GIT {} HOOK]: We have to ensure branch: {}\", action.uppercase(), s);\n match head.name().map(|name| {\n debug!(\"[GIT {} HOOK]: {} == {}\", action.uppercase(), name, s);\n name == s\n }) {\n Some(b) => {\n if b {\n debug!(\"[GIT {} HOOK]: Branch already checked out.\", action.uppercase());\n Ok(())\n } else {\n debug!(\"[GIT {} HOOK]: Branch not checked out.\", action.uppercase());\n\n if !do_checkout_ensure_branch(self.config.as_ref()) {\n Err(GHEK::RepositoryWrongBranchError.into_error())\n .map_err_into(GHEK::RepositoryError)\n } else {\n \/\/ Else try to check out the branch...\n unimplemented!()\n }\n }\n },\n\n None => Err(GHEK::RepositoryBranchNameFetchingError.into_error())\n .map_err_into(GHEK::RepositoryBranchError)\n .map_err_into(GHEK::RepositoryError),\n }\n },\n Ok(None) => {\n debug!(\"[GIT {} HOOK]: No branch to checkout\", action.uppercase());\n Ok(())\n },\n\n Err(e) => Err(e).map_err_into(GHEK::RepositoryError),\n }\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_dbg(|_| format!(\"[GIT {} HOOK]: Branch checked out\", action.uppercase()))\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\n\nuse git2::{Repository, Signature};\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::trace_error;\n\nuse vcs::git::result::Result;\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::config::{author_name, author_mail, committer_name, committer_mail};\n\nstruct Person<'a> {\n pub name: &'a str,\n pub mail: &'a str,\n}\n\nimpl<'a> Person<'a> {\n fn new(name: &'a str, mail: &'a str) -> Person<'a> {\n Person { name: name, mail: mail }\n }\n}\n\nstruct Runtime<'a> {\n pub repository: Option<Repository>,\n pub author: Option<Person<'a>>,\n pub committer: Option<Person<'a>>,\n}\n\nimpl<'a> Runtime<'a> {\n\n pub fn new(storepath: &PathBuf) -> Runtime<'a> {\n Runtime {\n repository: match Repository::open(storepath) {\n Ok(r) => Some(r),\n Err(e) => {\n trace_error(&e);\n None\n },\n },\n\n author: None,\n committer: None,\n }\n }\n\n pub fn configure(&mut self, config: &Value) -> Result<()> {\n author_name(cfg)\n .and_then(|n| author_email(cfg).map(|m| Person::new(n, m)))\n .and_then(|author| {\n committer_name(cfg)\n .and_then(|n| committer_email(cfg).map(|m| (author, Person::new(n, m))))\n })\n .map(|(author, committer)| {\n self.author = Some(author);\n self.committer = Some(committer);\n })\n }\n\n pub fn new_committer_sig(&self) -> Option<Result<Signature>> {\n self.committer\n .as_ref()\n .map(|c| {\n Signature::now(c.name, c.mail)\n .map_err(|e| GHEK::MkSignature.into_error_with_cause(Box::new(e)))\n })\n }\n\n}\n\n<commit_msg>Add Runtime::repository()<commit_after>use std::path::PathBuf;\n\nuse git2::{Repository, Signature};\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::trace_error;\n\nuse vcs::git::result::Result;\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::config::{author_name, author_mail, committer_name, committer_mail};\n\nstruct Person<'a> {\n pub name: &'a str,\n pub mail: &'a str,\n}\n\nimpl<'a> Person<'a> {\n fn new(name: &'a str, mail: &'a str) -> Person<'a> {\n Person { name: name, mail: mail }\n }\n}\n\nstruct Runtime<'a> {\n pub repository: Option<Repository>,\n pub author: Option<Person<'a>>,\n pub committer: Option<Person<'a>>,\n}\n\nimpl<'a> Runtime<'a> {\n\n pub fn new(storepath: &PathBuf) -> Runtime<'a> {\n Runtime {\n repository: match Repository::open(storepath) {\n Ok(r) => Some(r),\n Err(e) => {\n trace_error(&e);\n None\n },\n },\n\n author: None,\n committer: None,\n }\n }\n\n pub fn configure(&mut self, config: &Value) -> Result<()> {\n author_name(cfg)\n .and_then(|n| author_email(cfg).map(|m| Person::new(n, m)))\n .and_then(|author| {\n committer_name(cfg)\n .and_then(|n| committer_email(cfg).map(|m| (author, Person::new(n, m))))\n })\n .map(|(author, committer)| {\n self.author = Some(author);\n self.committer = Some(committer);\n })\n }\n\n pub fn new_committer_sig(&self) -> Option<Result<Signature>> {\n self.committer\n .as_ref()\n .map(|c| {\n Signature::now(c.name, c.mail)\n .map_err(|e| GHEK::MkSignature.into_error_with_cause(Box::new(e)))\n })\n }\n\n pub fn repository(&self) -> Result<&Repository> {\n self.repository.as_ref().ok_or(GHEK::MkRepo.into_error())\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Common handling of keyboard input and state management for text input controls\n\nuse dom::bindings::codegen::Bindings::KeyboardEventBinding::KeyboardEventMethods;\nuse dom::bindings::js::JSRef;\nuse dom::keyboardevent::KeyboardEvent;\nuse servo_util::str::DOMString;\n\nuse std::cmp::{min, max};\nuse std::default::Default;\nuse std::num::SignedInt;\n\n#[deriving(Copy, PartialEq)]\nenum Selection {\n Selected,\n NotSelected\n}\n\n#[jstraceable]\n#[deriving(Copy)]\nstruct TextPoint {\n \/\/\/ 0-based line number\n line: uint,\n \/\/\/ 0-based column number\n index: uint,\n}\n\n\/\/\/ Encapsulated state for handling keyboard input in a single or multiline text input control.\n#[jstraceable]\npub struct TextInput {\n \/\/\/ Current text input content, split across lines without trailing '\\n'\n lines: Vec<DOMString>,\n \/\/\/ Current cursor input point\n edit_point: TextPoint,\n \/\/\/ Beginning of selection range with edit_point as end that can span multiple lines.\n selection_begin: Option<TextPoint>,\n \/\/\/ Is this a multiline input?\n multiline: bool,\n}\n\n\/\/\/ Resulting action to be taken by the owner of a text input that is handling an event.\npub enum KeyReaction {\n TriggerDefaultAction,\n DispatchInput,\n Nothing,\n}\n\nimpl Default for TextPoint {\n fn default() -> TextPoint {\n TextPoint {\n line: 0,\n index: 0,\n }\n }\n}\n\n\/\/\/ Control whether this control should allow multiple lines.\n#[deriving(PartialEq)]\npub enum Lines {\n Single,\n Multiple,\n}\n\n\/\/\/ The direction in which to delete a character.\n#[deriving(PartialEq)]\nenum DeleteDir {\n Forward,\n Backward\n}\n\n\n\/\/\/ Was the keyboard event accompanied by the standard control modifier,\n\/\/\/ i.e. cmd on Mac OS or ctrl on other platforms.\n#[cfg(target_os=\"macos\")]\nfn is_control_key(event: JSRef<KeyboardEvent>) -> bool {\n event.MetaKey() && !event.CtrlKey() && !event.AltKey()\n}\n\n#[cfg(not(target_os=\"macos\"))]\nfn is_control_key(event: JSRef<KeyboardEvent>) -> bool {\n event.CtrlKey() && !event.MetaKey() && !event.AltKey()\n}\n\nimpl TextInput {\n \/\/\/ Instantiate a new text input control\n pub fn new(lines: Lines, initial: DOMString) -> TextInput {\n let mut i = TextInput {\n lines: vec!(),\n edit_point: Default::default(),\n selection_begin: None,\n multiline: lines == Lines::Multiple,\n };\n i.set_content(initial);\n i\n }\n\n \/\/\/ Remove a character at the current editing point\n fn delete_char(&mut self, dir: DeleteDir) {\n if self.selection_begin.is_none() {\n self.adjust_horizontal(if dir == DeleteDir::Forward {\n 1\n } else {\n -1\n }, Selection::Selected);\n }\n self.replace_selection(\"\".into_string());\n }\n\n \/\/\/ Insert a character at the current editing point\n fn insert_char(&mut self, ch: char) {\n if self.selection_begin.is_none() {\n self.selection_begin = Some(self.edit_point);\n }\n self.replace_selection(ch.to_string());\n }\n\n fn get_sorted_selection(&self) -> (TextPoint, TextPoint) {\n let begin = self.selection_begin.unwrap();\n let end = self.edit_point;\n\n if begin.line < end.line || (begin.line == end.line && begin.index < end.index) {\n (begin, end)\n } else {\n (end, begin)\n }\n }\n\n fn replace_selection(&mut self, insert: String) {\n let (begin, end) = self.get_sorted_selection();\n self.selection_begin = None;\n\n let new_lines = {\n let prefix = self.lines[begin.line].slice_chars(0, begin.index);\n let suffix = self.lines[end.line].slice_chars(end.index, self.lines[end.line].char_len());\n let lines_prefix = self.lines.slice(0, begin.line);\n let lines_suffix = self.lines.slice(end.line + 1, self.lines.len());\n\n let mut insert_lines = if self.multiline {\n insert.as_slice().split('\\n').map(|s| s.into_string()).collect()\n } else {\n vec!(insert)\n };\n\n let mut new_line = prefix.into_string();\n new_line.push_str(insert_lines[0].as_slice());\n insert_lines[0] = new_line;\n\n let last_insert_lines_index = insert_lines.len() - 1;\n self.edit_point.index = insert_lines[last_insert_lines_index].char_len();\n self.edit_point.line = begin.line + last_insert_lines_index;\n\n insert_lines[last_insert_lines_index].push_str(suffix);\n\n let mut new_lines = vec!();\n new_lines.push_all(lines_prefix);\n new_lines.push_all(insert_lines.as_slice());\n new_lines.push_all(lines_suffix);\n new_lines\n };\n\n self.lines = new_lines;\n }\n\n \/\/\/ Return the length of the current line under the editing point.\n fn current_line_length(&self) -> uint {\n self.lines[self.edit_point.line].char_len()\n }\n\n \/\/\/ Adjust the editing point position by a given of lines. The resulting column is\n \/\/\/ as close to the original column position as possible.\n fn adjust_vertical(&mut self, adjust: int, select: Selection) {\n if !self.multiline {\n return;\n }\n\n if select == Selection::Selected {\n if self.selection_begin.is_none() {\n self.selection_begin = Some(self.edit_point);\n }\n } else {\n self.selection_begin = None;\n }\n\n assert!(self.edit_point.line < self.lines.len());\n\n let target_line: int = self.edit_point.line as int + adjust;\n\n if target_line < 0 {\n self.edit_point.index = 0;\n self.edit_point.line = 0;\n return;\n } else if target_line as uint >= self.lines.len() {\n self.edit_point.line = self.lines.len() - 1;\n self.edit_point.index = self.current_line_length();\n return;\n }\n\n self.edit_point.line = target_line as uint;\n self.edit_point.index = min(self.current_line_length(), self.edit_point.index);\n }\n\n \/\/\/ Adjust the editing point position by a given number of columns. If the adjustment\n \/\/\/ requested is larger than is available in the current line, the editing point is\n \/\/\/ adjusted vertically and the process repeats with the remaining adjustment requested.\n fn adjust_horizontal(&mut self, adjust: int, select: Selection) {\n if select == Selection::Selected {\n if self.selection_begin.is_none() {\n self.selection_begin = Some(self.edit_point);\n }\n } else {\n if self.selection_begin.is_some() {\n let (begin, end) = self.get_sorted_selection();\n self.edit_point = if adjust < 0 {begin} else {end};\n self.selection_begin = None;\n return\n }\n }\n\n if adjust < 0 {\n let remaining = self.edit_point.index;\n if adjust.abs() as uint > remaining && self.edit_point.line > 0 {\n self.adjust_vertical(-1, select);\n self.edit_point.index = self.current_line_length();\n self.adjust_horizontal(adjust + remaining as int + 1, select);\n } else {\n self.edit_point.index = max(0, self.edit_point.index as int + adjust) as uint;\n }\n } else {\n let remaining = self.current_line_length() - self.edit_point.index;\n if adjust as uint > remaining && self.lines.len() > self.edit_point.line + 1 {\n self.adjust_vertical(1, select);\n self.edit_point.index = 0;\n \/\/ one shift is consumed by the change of line, hence the -1\n self.adjust_horizontal(adjust - remaining as int - 1, select);\n } else {\n self.edit_point.index = min(self.current_line_length(),\n self.edit_point.index + adjust as uint);\n }\n }\n }\n\n \/\/\/ Deal with a newline input.\n fn handle_return(&mut self) -> KeyReaction {\n if !self.multiline {\n return KeyReaction::TriggerDefaultAction;\n }\n self.insert_char('\\n');\n return KeyReaction::DispatchInput;\n }\n\n \/\/\/ Select all text in the input control.\n fn select_all(&mut self) {\n self.selection_begin = Some(TextPoint {\n line: 0,\n index: 0,\n });\n let last_line = self.lines.len() - 1;\n self.edit_point.line = last_line;\n self.edit_point.index = self.lines[last_line].char_len();\n }\n\n \/\/\/ Process a given `KeyboardEvent` and return an action for the caller to execute.\n pub fn handle_keydown(&mut self, event: JSRef<KeyboardEvent>) -> KeyReaction {\n \/\/A simple way to convert an event to a selection\n fn maybe_select(event: JSRef<KeyboardEvent>) -> Selection {\n if event.ShiftKey() {\n return Selection::Selected\n }\n return Selection::NotSelected\n }\n match event.Key().as_slice() {\n \"a\" if is_control_key(event) => {\n self.select_all();\n KeyReaction::Nothing\n },\n \/\/ printable characters have single-character key values\n c if c.len() == 1 => {\n self.insert_char(c.char_at(0));\n return KeyReaction::DispatchInput;\n }\n \"Space\" => {\n self.insert_char(' ');\n KeyReaction::DispatchInput\n }\n \"Delete\" => {\n self.delete_char(DeleteDir::Forward);\n KeyReaction::DispatchInput\n }\n \"Backspace\" => {\n self.delete_char(DeleteDir::Backward);\n KeyReaction::DispatchInput\n }\n \"ArrowLeft\" => {\n self.adjust_horizontal(-1, maybe_select(event));\n KeyReaction::Nothing\n }\n \"ArrowRight\" => {\n self.adjust_horizontal(1, maybe_select(event));\n KeyReaction::Nothing\n }\n \"ArrowUp\" => {\n self.adjust_vertical(-1, maybe_select(event));\n KeyReaction::Nothing\n }\n \"ArrowDown\" => {\n self.adjust_vertical(1, maybe_select(event));\n KeyReaction::Nothing\n }\n \"Enter\" => self.handle_return(),\n \"Home\" => {\n self.edit_point.index = 0;\n KeyReaction::Nothing\n }\n \"End\" => {\n self.edit_point.index = self.current_line_length();\n KeyReaction::Nothing\n }\n \"PageUp\" => {\n self.adjust_vertical(-28, maybe_select(event));\n KeyReaction::Nothing\n }\n \"PageDown\" => {\n self.adjust_vertical(28, maybe_select(event));\n KeyReaction::Nothing\n }\n \"Tab\" => KeyReaction::TriggerDefaultAction,\n _ => KeyReaction::Nothing,\n }\n }\n\n \/\/\/ Get the current contents of the text input. Multiple lines are joined by \\n.\n pub fn get_content(&self) -> DOMString {\n let mut content = \"\".into_string();\n for (i, line) in self.lines.iter().enumerate() {\n content.push_str(line.as_slice());\n if i < self.lines.len() - 1 {\n content.push('\\n');\n }\n }\n content\n }\n\n \/\/\/ Set the current contents of the text input. If this is control supports multiple lines,\n \/\/\/ any \\n encountered will be stripped and force a new logical line.\n pub fn set_content(&mut self, content: DOMString) {\n self.lines = if self.multiline {\n content.as_slice().split('\\n').map(|s| s.into_string()).collect()\n } else {\n vec!(content)\n };\n self.edit_point.line = min(self.edit_point.line, self.lines.len() - 1);\n\n if self.current_line_length() == 0 {\n self.edit_point.index = 0;\n }\n else {\n self.edit_point.index = min(self.edit_point.index, self.current_line_length() - 1);\n }\n }\n}\n<commit_msg>auto merge of #4569 : donaldpipowitch\/servo\/add-unit-tests-for-textinput, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Common handling of keyboard input and state management for text input controls\n\nuse dom::bindings::codegen::Bindings::KeyboardEventBinding::KeyboardEventMethods;\nuse dom::bindings::js::JSRef;\nuse dom::keyboardevent::KeyboardEvent;\nuse servo_util::str::DOMString;\n\nuse std::cmp::{min, max};\nuse std::default::Default;\nuse std::num::SignedInt;\n\n#[deriving(Copy, PartialEq)]\nenum Selection {\n Selected,\n NotSelected\n}\n\n#[jstraceable]\n#[deriving(Copy)]\nstruct TextPoint {\n \/\/\/ 0-based line number\n line: uint,\n \/\/\/ 0-based column number\n index: uint,\n}\n\n\/\/\/ Encapsulated state for handling keyboard input in a single or multiline text input control.\n#[jstraceable]\npub struct TextInput {\n \/\/\/ Current text input content, split across lines without trailing '\\n'\n lines: Vec<DOMString>,\n \/\/\/ Current cursor input point\n edit_point: TextPoint,\n \/\/\/ Beginning of selection range with edit_point as end that can span multiple lines.\n selection_begin: Option<TextPoint>,\n \/\/\/ Is this a multiline input?\n multiline: bool,\n}\n\n\/\/\/ Resulting action to be taken by the owner of a text input that is handling an event.\npub enum KeyReaction {\n TriggerDefaultAction,\n DispatchInput,\n Nothing,\n}\n\nimpl Default for TextPoint {\n fn default() -> TextPoint {\n TextPoint {\n line: 0,\n index: 0,\n }\n }\n}\n\n\/\/\/ Control whether this control should allow multiple lines.\n#[deriving(PartialEq)]\npub enum Lines {\n Single,\n Multiple,\n}\n\n\/\/\/ The direction in which to delete a character.\n#[deriving(PartialEq)]\nenum DeleteDir {\n Forward,\n Backward\n}\n\n\n\/\/\/ Was the keyboard event accompanied by the standard control modifier,\n\/\/\/ i.e. cmd on Mac OS or ctrl on other platforms.\n#[cfg(target_os=\"macos\")]\nfn is_control_key(event: JSRef<KeyboardEvent>) -> bool {\n event.MetaKey() && !event.CtrlKey() && !event.AltKey()\n}\n\n#[cfg(not(target_os=\"macos\"))]\nfn is_control_key(event: JSRef<KeyboardEvent>) -> bool {\n event.CtrlKey() && !event.MetaKey() && !event.AltKey()\n}\n\nimpl TextInput {\n \/\/\/ Instantiate a new text input control\n pub fn new(lines: Lines, initial: DOMString) -> TextInput {\n let mut i = TextInput {\n lines: vec!(),\n edit_point: Default::default(),\n selection_begin: None,\n multiline: lines == Lines::Multiple,\n };\n i.set_content(initial);\n i\n }\n\n \/\/\/ Remove a character at the current editing point\n fn delete_char(&mut self, dir: DeleteDir) {\n if self.selection_begin.is_none() {\n self.adjust_horizontal(if dir == DeleteDir::Forward {\n 1\n } else {\n -1\n }, Selection::Selected);\n }\n self.replace_selection(\"\".into_string());\n }\n\n \/\/\/ Insert a character at the current editing point\n fn insert_char(&mut self, ch: char) {\n if self.selection_begin.is_none() {\n self.selection_begin = Some(self.edit_point);\n }\n self.replace_selection(ch.to_string());\n }\n\n fn get_sorted_selection(&self) -> (TextPoint, TextPoint) {\n let begin = self.selection_begin.unwrap();\n let end = self.edit_point;\n\n if begin.line < end.line || (begin.line == end.line && begin.index < end.index) {\n (begin, end)\n } else {\n (end, begin)\n }\n }\n\n fn replace_selection(&mut self, insert: String) {\n let (begin, end) = self.get_sorted_selection();\n self.clear_selection();\n\n let new_lines = {\n let prefix = self.lines[begin.line].slice_chars(0, begin.index);\n let suffix = self.lines[end.line].slice_chars(end.index, self.lines[end.line].char_len());\n let lines_prefix = self.lines.slice(0, begin.line);\n let lines_suffix = self.lines.slice(end.line + 1, self.lines.len());\n\n let mut insert_lines = if self.multiline {\n insert.as_slice().split('\\n').map(|s| s.into_string()).collect()\n } else {\n vec!(insert)\n };\n\n let mut new_line = prefix.into_string();\n new_line.push_str(insert_lines[0].as_slice());\n insert_lines[0] = new_line;\n\n let last_insert_lines_index = insert_lines.len() - 1;\n self.edit_point.index = insert_lines[last_insert_lines_index].char_len();\n self.edit_point.line = begin.line + last_insert_lines_index;\n\n insert_lines[last_insert_lines_index].push_str(suffix);\n\n let mut new_lines = vec!();\n new_lines.push_all(lines_prefix);\n new_lines.push_all(insert_lines.as_slice());\n new_lines.push_all(lines_suffix);\n new_lines\n };\n\n self.lines = new_lines;\n }\n\n \/\/\/ Return the length of the current line under the editing point.\n fn current_line_length(&self) -> uint {\n self.lines[self.edit_point.line].char_len()\n }\n\n \/\/\/ Adjust the editing point position by a given of lines. The resulting column is\n \/\/\/ as close to the original column position as possible.\n fn adjust_vertical(&mut self, adjust: int, select: Selection) {\n if !self.multiline {\n return;\n }\n\n if select == Selection::Selected {\n if self.selection_begin.is_none() {\n self.selection_begin = Some(self.edit_point);\n }\n } else {\n self.clear_selection();\n }\n\n assert!(self.edit_point.line < self.lines.len());\n\n let target_line: int = self.edit_point.line as int + adjust;\n\n if target_line < 0 {\n self.edit_point.index = 0;\n self.edit_point.line = 0;\n return;\n } else if target_line as uint >= self.lines.len() {\n self.edit_point.line = self.lines.len() - 1;\n self.edit_point.index = self.current_line_length();\n return;\n }\n\n self.edit_point.line = target_line as uint;\n self.edit_point.index = min(self.current_line_length(), self.edit_point.index);\n }\n\n \/\/\/ Adjust the editing point position by a given number of columns. If the adjustment\n \/\/\/ requested is larger than is available in the current line, the editing point is\n \/\/\/ adjusted vertically and the process repeats with the remaining adjustment requested.\n fn adjust_horizontal(&mut self, adjust: int, select: Selection) {\n if select == Selection::Selected {\n if self.selection_begin.is_none() {\n self.selection_begin = Some(self.edit_point);\n }\n } else {\n if self.selection_begin.is_some() {\n let (begin, end) = self.get_sorted_selection();\n self.edit_point = if adjust < 0 {begin} else {end};\n self.clear_selection();\n return\n }\n }\n\n if adjust < 0 {\n let remaining = self.edit_point.index;\n if adjust.abs() as uint > remaining && self.edit_point.line > 0 {\n self.adjust_vertical(-1, select);\n self.edit_point.index = self.current_line_length();\n self.adjust_horizontal(adjust + remaining as int + 1, select);\n } else {\n self.edit_point.index = max(0, self.edit_point.index as int + adjust) as uint;\n }\n } else {\n let remaining = self.current_line_length() - self.edit_point.index;\n if adjust as uint > remaining && self.lines.len() > self.edit_point.line + 1 {\n self.adjust_vertical(1, select);\n self.edit_point.index = 0;\n \/\/ one shift is consumed by the change of line, hence the -1\n self.adjust_horizontal(adjust - remaining as int - 1, select);\n } else {\n self.edit_point.index = min(self.current_line_length(),\n self.edit_point.index + adjust as uint);\n }\n }\n }\n\n \/\/\/ Deal with a newline input.\n fn handle_return(&mut self) -> KeyReaction {\n if !self.multiline {\n return KeyReaction::TriggerDefaultAction;\n }\n self.insert_char('\\n');\n return KeyReaction::DispatchInput;\n }\n\n \/\/\/ Select all text in the input control.\n fn select_all(&mut self) {\n self.selection_begin = Some(TextPoint {\n line: 0,\n index: 0,\n });\n let last_line = self.lines.len() - 1;\n self.edit_point.line = last_line;\n self.edit_point.index = self.lines[last_line].char_len();\n }\n\n \/\/\/ Remove the current selection.\n fn clear_selection(&mut self) {\n self.selection_begin = None;\n }\n\n \/\/\/ Process a given `KeyboardEvent` and return an action for the caller to execute.\n pub fn handle_keydown(&mut self, event: JSRef<KeyboardEvent>) -> KeyReaction {\n \/\/A simple way to convert an event to a selection\n fn maybe_select(event: JSRef<KeyboardEvent>) -> Selection {\n if event.ShiftKey() {\n return Selection::Selected\n }\n return Selection::NotSelected\n }\n match event.Key().as_slice() {\n \"a\" if is_control_key(event) => {\n self.select_all();\n KeyReaction::Nothing\n },\n \/\/ printable characters have single-character key values\n c if c.len() == 1 => {\n self.insert_char(c.char_at(0));\n return KeyReaction::DispatchInput;\n }\n \"Space\" => {\n self.insert_char(' ');\n KeyReaction::DispatchInput\n }\n \"Delete\" => {\n self.delete_char(DeleteDir::Forward);\n KeyReaction::DispatchInput\n }\n \"Backspace\" => {\n self.delete_char(DeleteDir::Backward);\n KeyReaction::DispatchInput\n }\n \"ArrowLeft\" => {\n self.adjust_horizontal(-1, maybe_select(event));\n KeyReaction::Nothing\n }\n \"ArrowRight\" => {\n self.adjust_horizontal(1, maybe_select(event));\n KeyReaction::Nothing\n }\n \"ArrowUp\" => {\n self.adjust_vertical(-1, maybe_select(event));\n KeyReaction::Nothing\n }\n \"ArrowDown\" => {\n self.adjust_vertical(1, maybe_select(event));\n KeyReaction::Nothing\n }\n \"Enter\" => self.handle_return(),\n \"Home\" => {\n self.edit_point.index = 0;\n KeyReaction::Nothing\n }\n \"End\" => {\n self.edit_point.index = self.current_line_length();\n KeyReaction::Nothing\n }\n \"PageUp\" => {\n self.adjust_vertical(-28, maybe_select(event));\n KeyReaction::Nothing\n }\n \"PageDown\" => {\n self.adjust_vertical(28, maybe_select(event));\n KeyReaction::Nothing\n }\n \"Tab\" => KeyReaction::TriggerDefaultAction,\n _ => KeyReaction::Nothing,\n }\n }\n\n \/\/\/ Get the current contents of the text input. Multiple lines are joined by \\n.\n pub fn get_content(&self) -> DOMString {\n let mut content = \"\".into_string();\n for (i, line) in self.lines.iter().enumerate() {\n content.push_str(line.as_slice());\n if i < self.lines.len() - 1 {\n content.push('\\n');\n }\n }\n content\n }\n\n \/\/\/ Set the current contents of the text input. If this is control supports multiple lines,\n \/\/\/ any \\n encountered will be stripped and force a new logical line.\n pub fn set_content(&mut self, content: DOMString) {\n self.lines = if self.multiline {\n content.as_slice().split('\\n').map(|s| s.into_string()).collect()\n } else {\n vec!(content)\n };\n self.edit_point.line = min(self.edit_point.line, self.lines.len() - 1);\n\n if self.current_line_length() == 0 {\n self.edit_point.index = 0;\n }\n else {\n self.edit_point.index = min(self.edit_point.index, self.current_line_length() - 1);\n }\n }\n}\n\n#[test]\nfn test_textinput_delete_char() {\n let mut textinput = TextInput::new(Lines::Single, \"abcdefg\".into_string());\n textinput.adjust_horizontal(2, Selection::NotSelected);\n textinput.delete_char(DeleteDir::Backward);\n assert_eq!(textinput.get_content().as_slice(), \"acdefg\");\n\n textinput.delete_char(DeleteDir::Forward);\n assert_eq!(textinput.get_content().as_slice(), \"adefg\");\n\n textinput.adjust_horizontal(2, Selection::Selected);\n textinput.delete_char(DeleteDir::Forward);\n assert_eq!(textinput.get_content().as_slice(), \"afg\");\n}\n\n#[test]\nfn test_textinput_insert_char() {\n let mut textinput = TextInput::new(Lines::Single, \"abcdefg\".into_string());\n textinput.adjust_horizontal(2, Selection::NotSelected);\n textinput.insert_char('a');\n assert_eq!(textinput.get_content().as_slice(), \"abacdefg\");\n\n textinput.adjust_horizontal(2, Selection::Selected);\n textinput.insert_char('b');\n assert_eq!(textinput.get_content().as_slice(), \"ababefg\");\n}\n\n#[test]\nfn test_textinput_get_sorted_selection() {\n let mut textinput = TextInput::new(Lines::Single, \"abcdefg\".into_string());\n textinput.adjust_horizontal(2, Selection::NotSelected);\n textinput.adjust_horizontal(2, Selection::Selected);\n let (begin, end) = textinput.get_sorted_selection();\n assert_eq!(begin.index, 2);\n assert_eq!(end.index, 4);\n\n textinput.clear_selection();\n\n textinput.adjust_horizontal(-2, Selection::Selected);\n let (begin, end) = textinput.get_sorted_selection();\n assert_eq!(begin.index, 2);\n assert_eq!(end.index, 4);\n}\n\n#[test]\nfn test_textinput_replace_selection() {\n let mut textinput = TextInput::new(Lines::Single, \"abcdefg\".into_string());\n textinput.adjust_horizontal(2, Selection::NotSelected);\n textinput.adjust_horizontal(2, Selection::Selected);\n\n textinput.replace_selection(\"xyz\".into_string());\n assert_eq!(textinput.get_content().as_slice(), \"abxyzefg\");\n}\n\n#[test]\nfn test_textinput_current_line_length() {\n let mut textinput = TextInput::new(Lines::Multiple, \"abc\\nde\\nf\".into_string());\n assert_eq!(textinput.current_line_length(), 3);\n\n textinput.adjust_vertical(1, Selection::NotSelected);\n assert_eq!(textinput.current_line_length(), 2);\n\n textinput.adjust_vertical(1, Selection::NotSelected);\n assert_eq!(textinput.current_line_length(), 1);\n}\n\n#[test]\nfn test_textinput_adjust_vertical() {\n let mut textinput = TextInput::new(Lines::Multiple, \"abc\\nde\\nf\".into_string());\n textinput.adjust_horizontal(3, Selection::NotSelected);\n textinput.adjust_vertical(1, Selection::NotSelected);\n assert_eq!(textinput.edit_point.line, 1);\n assert_eq!(textinput.edit_point.index, 2);\n\n textinput.adjust_vertical(-1, Selection::NotSelected);\n assert_eq!(textinput.edit_point.line, 0);\n assert_eq!(textinput.edit_point.index, 2);\n\n textinput.adjust_vertical(2, Selection::NotSelected);\n assert_eq!(textinput.edit_point.line, 2);\n assert_eq!(textinput.edit_point.index, 1);\n}\n\n#[test]\nfn test_textinput_adjust_horizontal() {\n let mut textinput = TextInput::new(Lines::Multiple, \"abc\\nde\\nf\".into_string());\n textinput.adjust_horizontal(4, Selection::NotSelected);\n assert_eq!(textinput.edit_point.line, 1);\n assert_eq!(textinput.edit_point.index, 0);\n\n textinput.adjust_horizontal(1, Selection::NotSelected);\n assert_eq!(textinput.edit_point.line, 1);\n assert_eq!(textinput.edit_point.index, 1);\n\n textinput.adjust_horizontal(2, Selection::NotSelected);\n assert_eq!(textinput.edit_point.line, 2);\n assert_eq!(textinput.edit_point.index, 0);\n\n textinput.adjust_horizontal(-1, Selection::NotSelected);\n assert_eq!(textinput.edit_point.line, 1);\n assert_eq!(textinput.edit_point.index, 2);\n}\n\n#[test]\nfn test_textinput_handle_return() {\n let mut single_line_textinput = TextInput::new(Lines::Single, \"abcdef\".into_string());\n single_line_textinput.adjust_horizontal(3, Selection::NotSelected);\n single_line_textinput.handle_return();\n assert_eq!(single_line_textinput.get_content().as_slice(), \"abcdef\");\n\n let mut multi_line_textinput = TextInput::new(Lines::Multiple, \"abcdef\".into_string());\n multi_line_textinput.adjust_horizontal(3, Selection::NotSelected);\n multi_line_textinput.handle_return();\n assert_eq!(multi_line_textinput.get_content().as_slice(), \"abc\\ndef\");\n}\n\n#[test]\nfn test_textinput_select_all() {\n let mut textinput = TextInput::new(Lines::Multiple, \"abc\\nde\\nf\".into_string());\n assert_eq!(textinput.edit_point.line, 0);\n assert_eq!(textinput.edit_point.index, 0);\n\n textinput.select_all();\n assert_eq!(textinput.edit_point.line, 2);\n assert_eq!(textinput.edit_point.index, 1);\n}\n\n#[test]\nfn test_textinput_get_content() {\n let single_line_textinput = TextInput::new(Lines::Single, \"abcdefg\".into_string());\n assert_eq!(single_line_textinput.get_content().as_slice(), \"abcdefg\");\n\n let multi_line_textinput = TextInput::new(Lines::Multiple, \"abc\\nde\\nf\".into_string());\n assert_eq!(multi_line_textinput.get_content().as_slice(), \"abc\\nde\\nf\");\n}\n\n#[test]\nfn test_textinput_set_content() {\n let mut textinput = TextInput::new(Lines::Multiple, \"abc\\nde\\nf\".into_string());\n assert_eq!(textinput.get_content().as_slice(), \"abc\\nde\\nf\");\n\n textinput.set_content(\"abc\\nf\".into_string());\n assert_eq!(textinput.get_content().as_slice(), \"abc\\nf\");\n\n assert_eq!(textinput.edit_point.line, 0);\n assert_eq!(textinput.edit_point.index, 0);\n textinput.adjust_horizontal(3, Selection::Selected);\n assert_eq!(textinput.edit_point.line, 0);\n assert_eq!(textinput.edit_point.index, 3);\n textinput.set_content(\"de\".into_string());\n assert_eq!(textinput.get_content().as_slice(), \"de\");\n assert_eq!(textinput.edit_point.line, 0);\n \/\/ FIXME: https:\/\/github.com\/servo\/servo\/issues\/4622.\n assert_eq!(textinput.edit_point.index, 1);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>implement RNG (device)<commit_after>extern crate rand;\n\nuse rand::{Rng, OsRng};\n\nfn main() {\n \/\/ becuase `OsRng` opens files, it may fail\n let mut rng = OsRng::new().unwrap();\n\n println!(\"{}\", rng.gen::<u32>());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add concept of a session.<commit_after>use rusqlite::Connection;\nuse rusqlite::Error;\n\n\n#[derive(Debug)]\npub struct Session {\n pub id: i32,\n pub uuid: String,\n}\n\n\npub fn table_create_session(conn: &Connection) -> &Connection {\n conn.execute(\"CREATE TABLE WHENENV_SESSION (\n id INTEGER PRIMARY KEY ASC,\n uuid TEXT NOT NULL UNIQUE\n )\", &[]).unwrap();\n return conn;\n}\n\n\npub fn insert_session(conn: &Connection, uuid: String) -> Result<i32, &'static str> {\n\n let session = Session {\n id: 0,\n uuid: uuid,\n };\n let mut found = 0;\n let dir_instance = conn.execute(\"INSERT INTO WHENENV_SESSION (uuid)\n VALUES (?1)\",\n &[&session.uuid]);\n if dir_instance.is_err() {\n \n return Err(\"ssss\");\n }\n dir_instance.unwrap();\n return Ok(0);\n}\n\n\n\n\n\npub fn list_session(conn: &Connection)-> Vec<Session> {\n let mut stmt = conn.prepare(\"SELECT id, uuid FROM WHENENV_SESSION\").unwrap();\n let wraped_fs_file_iter = stmt.query_map(&[], |row| {\n Session {\n id: row.get(0),\n uuid: row.get(1),\n }\n });\n let mut items = Vec::<Session>::new();\n if wraped_fs_file_iter.is_err() {\n return items;\n }\n let fs_file_iter = wraped_fs_file_iter.unwrap();\n for person in fs_file_iter {\n items.push(person.unwrap());\n }\n return items;\n}\n\n\n\npub fn pk_session_by_uuid(conn: &Connection, uuid: &String, pk: &mut i32) -> Result<i32, &'static str>{\n let mut stmt = conn.prepare(\"SELECT id, uuid FROM WHENENV_SESSION WHERE uuid = ?1\").unwrap();\n let insert_session_iter = stmt.query_map(&[uuid], |row| {\n Session {\n id: row.get(0),\n uuid: row.get(1),\n }\n });\n if insert_session_iter.is_err() {\n return Err(\"Insert failed dfdf\");\n }\n let result = insert_session_iter.unwrap();\n let mut found = 0;\n let mut items = Vec::<i32>::new();\n for person in result {\n let bill= person.unwrap();\n *pk = bill.id;\n found = 1;\n }\n if found != 0 {\n return Ok(found);\n }\n return Err(\"None found\");\n}\n\n\n<|endoftext|>"} {"text":"<commit_before>#![deny(warnings)]\n\nextern crate hamcrest;\nextern crate cargo;\n\nuse std::collections::HashMap;\n\nuse hamcrest::{assert_that, equal_to, contains, not};\n\nuse cargo::core::source::{SourceId, GitReference};\nuse cargo::core::dependency::Kind::{self, Development};\nuse cargo::core::{Dependency, PackageId, Summary, Registry};\nuse cargo::util::{CargoResult, ToUrl};\nuse cargo::core::resolver::{self, Method};\n\nfn resolve(pkg: PackageId, deps: Vec<Dependency>, registry: &[Summary])\n -> CargoResult<Vec<PackageId>>\n{\n struct MyRegistry<'a>(&'a [Summary]);\n impl<'a> Registry for MyRegistry<'a> {\n fn query(&mut self,\n dep: &Dependency,\n f: &mut FnMut(Summary)) -> CargoResult<()> {\n for summary in self.0.iter() {\n if dep.matches(summary) {\n f(summary.clone());\n }\n }\n Ok(())\n }\n }\n let mut registry = MyRegistry(registry);\n let summary = Summary::new(pkg.clone(), deps, HashMap::new()).unwrap();\n let method = Method::Everything;\n let resolve = resolver::resolve(&[(summary, method)], &[], &mut registry)?;\n let res = resolve.iter().cloned().collect();\n Ok(res)\n}\n\ntrait ToDep {\n fn to_dep(self) -> Dependency;\n}\n\nimpl ToDep for &'static str {\n fn to_dep(self) -> Dependency {\n let url = \"http:\/\/example.com\".to_url().unwrap();\n let source_id = SourceId::for_registry(&url);\n Dependency::parse_no_deprecated(self, Some(\"1.0.0\"), &source_id).unwrap()\n }\n}\n\nimpl ToDep for Dependency {\n fn to_dep(self) -> Dependency {\n self\n }\n}\n\ntrait ToPkgId {\n fn to_pkgid(&self) -> PackageId;\n}\n\nimpl ToPkgId for &'static str {\n fn to_pkgid(&self) -> PackageId {\n PackageId::new(*self, \"1.0.0\", ®istry_loc()).unwrap()\n }\n}\n\nimpl ToPkgId for (&'static str, &'static str) {\n fn to_pkgid(&self) -> PackageId {\n let (name, vers) = *self;\n PackageId::new(name, vers, ®istry_loc()).unwrap()\n }\n}\n\nmacro_rules! pkg {\n ($pkgid:expr => [$($deps:expr),+]) => ({\n let d: Vec<Dependency> = vec![$($deps.to_dep()),+];\n\n Summary::new($pkgid.to_pkgid(), d, HashMap::new()).unwrap()\n });\n\n ($pkgid:expr) => (\n Summary::new($pkgid.to_pkgid(), Vec::new(), HashMap::new()).unwrap()\n )\n}\n\nfn registry_loc() -> SourceId {\n let remote = \"http:\/\/example.com\".to_url().unwrap();\n SourceId::for_registry(&remote)\n}\n\nfn pkg(name: &str) -> Summary {\n Summary::new(pkg_id(name), Vec::new(), HashMap::new()).unwrap()\n}\n\nfn pkg_id(name: &str) -> PackageId {\n PackageId::new(name, \"1.0.0\", ®istry_loc()).unwrap()\n}\n\nfn pkg_id_loc(name: &str, loc: &str) -> PackageId {\n let remote = loc.to_url();\n let master = GitReference::Branch(\"master\".to_string());\n let source_id = SourceId::for_git(&remote.unwrap(), master);\n\n PackageId::new(name, \"1.0.0\", &source_id).unwrap()\n}\n\nfn pkg_loc(name: &str, loc: &str) -> Summary {\n Summary::new(pkg_id_loc(name, loc), Vec::new(), HashMap::new()).unwrap()\n}\n\nfn dep(name: &str) -> Dependency { dep_req(name, \"1.0.0\") }\nfn dep_req(name: &str, req: &str) -> Dependency {\n let url = \"http:\/\/example.com\".to_url().unwrap();\n let source_id = SourceId::for_registry(&url);\n Dependency::parse_no_deprecated(name, Some(req), &source_id).unwrap()\n}\n\nfn dep_loc(name: &str, location: &str) -> Dependency {\n let url = location.to_url().unwrap();\n let master = GitReference::Branch(\"master\".to_string());\n let source_id = SourceId::for_git(&url, master);\n Dependency::parse_no_deprecated(name, Some(\"1.0.0\"), &source_id).unwrap()\n}\nfn dep_kind(name: &str, kind: Kind) -> Dependency {\n dep(name).set_kind(kind).clone()\n}\n\nfn registry(pkgs: Vec<Summary>) -> Vec<Summary> {\n pkgs\n}\n\nfn names<P: ToPkgId>(names: &[P]) -> Vec<PackageId> {\n names.iter().map(|name| name.to_pkgid()).collect()\n}\n\nfn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {\n names.iter()\n .map(|&(name, loc)| pkg_id_loc(name, loc)).collect()\n}\n\n#[test]\nfn test_resolving_empty_dependency_list() {\n let res = resolve(pkg_id(\"root\"), Vec::new(),\n &mut registry(vec![])).unwrap();\n\n assert_that(&res, equal_to(&names(&[\"root\"])));\n}\n\n#[test]\nfn test_resolving_only_package() {\n let mut reg = registry(vec![pkg(\"foo\")]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\")], &mut reg);\n\n assert_that(&res.unwrap(), contains(names(&[\"root\", \"foo\"])).exactly());\n}\n\n#[test]\nfn test_resolving_one_dep() {\n let mut reg = registry(vec![pkg(\"foo\"), pkg(\"bar\")]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\")], &mut reg);\n\n assert_that(&res.unwrap(), contains(names(&[\"root\", \"foo\"])).exactly());\n}\n\n#[test]\nfn test_resolving_multiple_deps() {\n let mut reg = registry(vec![pkg!(\"foo\"), pkg!(\"bar\"), pkg!(\"baz\")]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\"), dep(\"baz\")],\n &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[\"root\", \"foo\", \"baz\"])).exactly());\n}\n\n#[test]\nfn test_resolving_transitive_deps() {\n let mut reg = registry(vec![pkg!(\"foo\"), pkg!(\"bar\" => [\"foo\"])]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"bar\")], &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[\"root\", \"foo\", \"bar\"])));\n}\n\n#[test]\nfn test_resolving_common_transitive_deps() {\n let mut reg = registry(vec![pkg!(\"foo\" => [\"bar\"]), pkg!(\"bar\")]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\"), dep(\"bar\")],\n &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[\"root\", \"foo\", \"bar\"])));\n}\n\n#[test]\nfn test_resolving_with_same_name() {\n let list = vec![pkg_loc(\"foo\", \"http:\/\/first.example.com\"),\n pkg_loc(\"bar\", \"http:\/\/second.example.com\")];\n\n let mut reg = registry(list);\n let res = resolve(pkg_id(\"root\"),\n vec![dep_loc(\"foo\", \"http:\/\/first.example.com\"),\n dep_loc(\"bar\", \"http:\/\/second.example.com\")],\n &mut reg);\n\n let mut names = loc_names(&[(\"foo\", \"http:\/\/first.example.com\"),\n (\"bar\", \"http:\/\/second.example.com\")]);\n\n names.push(pkg_id(\"root\"));\n\n assert_that(&res.unwrap(), contains(names).exactly());\n}\n\n#[test]\nfn test_resolving_with_dev_deps() {\n let mut reg = registry(vec![\n pkg!(\"foo\" => [\"bar\", dep_kind(\"baz\", Development)]),\n pkg!(\"baz\" => [\"bat\", dep_kind(\"bam\", Development)]),\n pkg!(\"bar\"),\n pkg!(\"bat\")\n ]);\n\n let res = resolve(pkg_id(\"root\"),\n vec![dep(\"foo\"), dep_kind(\"baz\", Development)],\n &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[\"root\", \"foo\", \"bar\", \"baz\"])));\n}\n\n#[test]\nfn resolving_with_many_versions() {\n let mut reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\")),\n pkg!((\"foo\", \"1.0.2\")),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\")], &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.2\")])));\n}\n\n#[test]\nfn resolving_with_specific_version() {\n let mut reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\")),\n pkg!((\"foo\", \"1.0.2\")),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![dep_req(\"foo\", \"=1.0.1\")],\n &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.1\")])));\n}\n\n#[test]\nfn test_resolving_maximum_version_with_transitive_deps() {\n let mut reg = registry(vec![\n pkg!((\"util\", \"1.2.2\")),\n pkg!((\"util\", \"1.0.0\")),\n pkg!((\"util\", \"1.1.1\")),\n pkg!(\"foo\" => [dep_req(\"util\", \"1.0.0\")]),\n pkg!(\"bar\" => [dep_req(\"util\", \">=1.0.1\")]),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![dep_req(\"foo\", \"1.0.0\"), dep_req(\"bar\", \"1.0.0\")],\n &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.0\"),\n (\"bar\", \"1.0.0\"),\n (\"util\", \"1.2.2\")])));\n assert_that(&res, not(contains(names(&[(\"util\", \"1.0.1\")]))));\n assert_that(&res, not(contains(names(&[(\"util\", \"1.1.1\")]))));\n}\n\n#[test]\nfn resolving_incompat_versions() {\n let mut reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\")),\n pkg!((\"foo\", \"1.0.2\")),\n pkg!(\"bar\" => [dep_req(\"foo\", \"=1.0.2\")]),\n ]);\n\n assert!(resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"=1.0.1\"),\n dep(\"bar\"),\n ], &mut reg).is_err());\n}\n\n#[test]\nfn resolving_backtrack() {\n let mut reg = registry(vec![\n pkg!((\"foo\", \"1.0.2\") => [dep(\"bar\")]),\n pkg!((\"foo\", \"1.0.1\") => [dep(\"baz\")]),\n pkg!(\"bar\" => [dep_req(\"foo\", \"=2.0.2\")]),\n pkg!(\"baz\"),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"^1\"),\n ], &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.1\"),\n (\"baz\", \"1.0.0\")])));\n}\n\n#[test]\nfn resolving_allows_multiple_compatible_versions() {\n let mut reg = registry(vec![\n pkg!((\"foo\", \"1.0.0\")),\n pkg!((\"foo\", \"2.0.0\")),\n pkg!((\"foo\", \"0.1.0\")),\n pkg!((\"foo\", \"0.2.0\")),\n\n pkg!(\"bar\" => [\"d1\", \"d2\", \"d3\", \"d4\"]),\n pkg!(\"d1\" => [dep_req(\"foo\", \"1\")]),\n pkg!(\"d2\" => [dep_req(\"foo\", \"2\")]),\n pkg!(\"d3\" => [dep_req(\"foo\", \"0.1\")]),\n pkg!(\"d4\" => [dep_req(\"foo\", \"0.2\")]),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep(\"bar\"),\n ], &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.0\"),\n (\"foo\", \"2.0.0\"),\n (\"foo\", \"0.1.0\"),\n (\"foo\", \"0.2.0\"),\n (\"d1\", \"1.0.0\"),\n (\"d2\", \"1.0.0\"),\n (\"d3\", \"1.0.0\"),\n (\"d4\", \"1.0.0\"),\n (\"bar\", \"1.0.0\")])));\n}\n\n#[test]\nfn resolving_with_deep_backtracking() {\n let mut reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"1\")]),\n pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"2\")]),\n\n pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"=1.0.2\"),\n dep_req(\"other\", \"1\")]),\n pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"=1.0.1\")]),\n\n pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"2\")]),\n pkg!((\"baz\", \"1.0.1\")),\n\n pkg!((\"dep_req\", \"1.0.0\")),\n pkg!((\"dep_req\", \"2.0.0\")),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"1\"),\n ], &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.0\"),\n (\"bar\", \"2.0.0\"),\n (\"baz\", \"1.0.1\")])));\n}\n\n#[test]\nfn resolving_but_no_exists() {\n let mut reg = registry(vec![\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"1\"),\n ], &mut reg);\n assert!(res.is_err());\n\n assert_eq!(res.err().unwrap().to_string(), \"\\\nno matching package named `foo` found (required by `root`)\nlocation searched: registry http:\/\/example.com\/\nversion required: ^1\\\n\");\n}\n\n#[test]\nfn resolving_cycle() {\n let mut reg = registry(vec![\n pkg!(\"foo\" => [\"foo\"]),\n ]);\n\n let _ = resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"1\"),\n ], &mut reg);\n}\n\n#[test]\nfn hard_equality() {\n extern crate env_logger;\n let mut reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\")),\n pkg!((\"foo\", \"1.0.0\")),\n\n pkg!((\"bar\", \"1.0.0\") => [dep_req(\"foo\", \"1.0.0\")]),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep_req(\"bar\", \"1\"),\n dep_req(\"foo\", \"=1.0.0\"),\n ], &mut reg).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.0\"),\n (\"bar\", \"1.0.0\")])));\n}\n<commit_msg>binding does not need to be mutable<commit_after>#![deny(warnings)]\n\nextern crate hamcrest;\nextern crate cargo;\n\nuse std::collections::HashMap;\n\nuse hamcrest::{assert_that, equal_to, contains, not};\n\nuse cargo::core::source::{SourceId, GitReference};\nuse cargo::core::dependency::Kind::{self, Development};\nuse cargo::core::{Dependency, PackageId, Summary, Registry};\nuse cargo::util::{CargoResult, ToUrl};\nuse cargo::core::resolver::{self, Method};\n\nfn resolve(pkg: PackageId, deps: Vec<Dependency>, registry: &[Summary])\n -> CargoResult<Vec<PackageId>>\n{\n struct MyRegistry<'a>(&'a [Summary]);\n impl<'a> Registry for MyRegistry<'a> {\n fn query(&mut self,\n dep: &Dependency,\n f: &mut FnMut(Summary)) -> CargoResult<()> {\n for summary in self.0.iter() {\n if dep.matches(summary) {\n f(summary.clone());\n }\n }\n Ok(())\n }\n }\n let mut registry = MyRegistry(registry);\n let summary = Summary::new(pkg.clone(), deps, HashMap::new()).unwrap();\n let method = Method::Everything;\n let resolve = resolver::resolve(&[(summary, method)], &[], &mut registry)?;\n let res = resolve.iter().cloned().collect();\n Ok(res)\n}\n\ntrait ToDep {\n fn to_dep(self) -> Dependency;\n}\n\nimpl ToDep for &'static str {\n fn to_dep(self) -> Dependency {\n let url = \"http:\/\/example.com\".to_url().unwrap();\n let source_id = SourceId::for_registry(&url);\n Dependency::parse_no_deprecated(self, Some(\"1.0.0\"), &source_id).unwrap()\n }\n}\n\nimpl ToDep for Dependency {\n fn to_dep(self) -> Dependency {\n self\n }\n}\n\ntrait ToPkgId {\n fn to_pkgid(&self) -> PackageId;\n}\n\nimpl ToPkgId for &'static str {\n fn to_pkgid(&self) -> PackageId {\n PackageId::new(*self, \"1.0.0\", ®istry_loc()).unwrap()\n }\n}\n\nimpl ToPkgId for (&'static str, &'static str) {\n fn to_pkgid(&self) -> PackageId {\n let (name, vers) = *self;\n PackageId::new(name, vers, ®istry_loc()).unwrap()\n }\n}\n\nmacro_rules! pkg {\n ($pkgid:expr => [$($deps:expr),+]) => ({\n let d: Vec<Dependency> = vec![$($deps.to_dep()),+];\n\n Summary::new($pkgid.to_pkgid(), d, HashMap::new()).unwrap()\n });\n\n ($pkgid:expr) => (\n Summary::new($pkgid.to_pkgid(), Vec::new(), HashMap::new()).unwrap()\n )\n}\n\nfn registry_loc() -> SourceId {\n let remote = \"http:\/\/example.com\".to_url().unwrap();\n SourceId::for_registry(&remote)\n}\n\nfn pkg(name: &str) -> Summary {\n Summary::new(pkg_id(name), Vec::new(), HashMap::new()).unwrap()\n}\n\nfn pkg_id(name: &str) -> PackageId {\n PackageId::new(name, \"1.0.0\", ®istry_loc()).unwrap()\n}\n\nfn pkg_id_loc(name: &str, loc: &str) -> PackageId {\n let remote = loc.to_url();\n let master = GitReference::Branch(\"master\".to_string());\n let source_id = SourceId::for_git(&remote.unwrap(), master);\n\n PackageId::new(name, \"1.0.0\", &source_id).unwrap()\n}\n\nfn pkg_loc(name: &str, loc: &str) -> Summary {\n Summary::new(pkg_id_loc(name, loc), Vec::new(), HashMap::new()).unwrap()\n}\n\nfn dep(name: &str) -> Dependency { dep_req(name, \"1.0.0\") }\nfn dep_req(name: &str, req: &str) -> Dependency {\n let url = \"http:\/\/example.com\".to_url().unwrap();\n let source_id = SourceId::for_registry(&url);\n Dependency::parse_no_deprecated(name, Some(req), &source_id).unwrap()\n}\n\nfn dep_loc(name: &str, location: &str) -> Dependency {\n let url = location.to_url().unwrap();\n let master = GitReference::Branch(\"master\".to_string());\n let source_id = SourceId::for_git(&url, master);\n Dependency::parse_no_deprecated(name, Some(\"1.0.0\"), &source_id).unwrap()\n}\nfn dep_kind(name: &str, kind: Kind) -> Dependency {\n dep(name).set_kind(kind).clone()\n}\n\nfn registry(pkgs: Vec<Summary>) -> Vec<Summary> {\n pkgs\n}\n\nfn names<P: ToPkgId>(names: &[P]) -> Vec<PackageId> {\n names.iter().map(|name| name.to_pkgid()).collect()\n}\n\nfn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {\n names.iter()\n .map(|&(name, loc)| pkg_id_loc(name, loc)).collect()\n}\n\n#[test]\nfn test_resolving_empty_dependency_list() {\n let res = resolve(pkg_id(\"root\"), Vec::new(),\n &mut registry(vec![])).unwrap();\n\n assert_that(&res, equal_to(&names(&[\"root\"])));\n}\n\n#[test]\nfn test_resolving_only_package() {\n let reg = registry(vec![pkg(\"foo\")]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\")], ®);\n\n assert_that(&res.unwrap(), contains(names(&[\"root\", \"foo\"])).exactly());\n}\n\n#[test]\nfn test_resolving_one_dep() {\n let reg = registry(vec![pkg(\"foo\"), pkg(\"bar\")]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\")], ®);\n\n assert_that(&res.unwrap(), contains(names(&[\"root\", \"foo\"])).exactly());\n}\n\n#[test]\nfn test_resolving_multiple_deps() {\n let reg = registry(vec![pkg!(\"foo\"), pkg!(\"bar\"), pkg!(\"baz\")]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\"), dep(\"baz\")],\n ®).unwrap();\n\n assert_that(&res, contains(names(&[\"root\", \"foo\", \"baz\"])).exactly());\n}\n\n#[test]\nfn test_resolving_transitive_deps() {\n let reg = registry(vec![pkg!(\"foo\"), pkg!(\"bar\" => [\"foo\"])]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"bar\")], ®).unwrap();\n\n assert_that(&res, contains(names(&[\"root\", \"foo\", \"bar\"])));\n}\n\n#[test]\nfn test_resolving_common_transitive_deps() {\n let reg = registry(vec![pkg!(\"foo\" => [\"bar\"]), pkg!(\"bar\")]);\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\"), dep(\"bar\")],\n ®).unwrap();\n\n assert_that(&res, contains(names(&[\"root\", \"foo\", \"bar\"])));\n}\n\n#[test]\nfn test_resolving_with_same_name() {\n let list = vec![pkg_loc(\"foo\", \"http:\/\/first.example.com\"),\n pkg_loc(\"bar\", \"http:\/\/second.example.com\")];\n\n let reg = registry(list);\n let res = resolve(pkg_id(\"root\"),\n vec![dep_loc(\"foo\", \"http:\/\/first.example.com\"),\n dep_loc(\"bar\", \"http:\/\/second.example.com\")],\n ®);\n\n let mut names = loc_names(&[(\"foo\", \"http:\/\/first.example.com\"),\n (\"bar\", \"http:\/\/second.example.com\")]);\n\n names.push(pkg_id(\"root\"));\n\n assert_that(&res.unwrap(), contains(names).exactly());\n}\n\n#[test]\nfn test_resolving_with_dev_deps() {\n let reg = registry(vec![\n pkg!(\"foo\" => [\"bar\", dep_kind(\"baz\", Development)]),\n pkg!(\"baz\" => [\"bat\", dep_kind(\"bam\", Development)]),\n pkg!(\"bar\"),\n pkg!(\"bat\")\n ]);\n\n let res = resolve(pkg_id(\"root\"),\n vec![dep(\"foo\"), dep_kind(\"baz\", Development)],\n ®).unwrap();\n\n assert_that(&res, contains(names(&[\"root\", \"foo\", \"bar\", \"baz\"])));\n}\n\n#[test]\nfn resolving_with_many_versions() {\n let reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\")),\n pkg!((\"foo\", \"1.0.2\")),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![dep(\"foo\")], ®).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.2\")])));\n}\n\n#[test]\nfn resolving_with_specific_version() {\n let reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\")),\n pkg!((\"foo\", \"1.0.2\")),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![dep_req(\"foo\", \"=1.0.1\")],\n ®).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.1\")])));\n}\n\n#[test]\nfn test_resolving_maximum_version_with_transitive_deps() {\n let reg = registry(vec![\n pkg!((\"util\", \"1.2.2\")),\n pkg!((\"util\", \"1.0.0\")),\n pkg!((\"util\", \"1.1.1\")),\n pkg!(\"foo\" => [dep_req(\"util\", \"1.0.0\")]),\n pkg!(\"bar\" => [dep_req(\"util\", \">=1.0.1\")]),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![dep_req(\"foo\", \"1.0.0\"), dep_req(\"bar\", \"1.0.0\")],\n ®).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.0\"),\n (\"bar\", \"1.0.0\"),\n (\"util\", \"1.2.2\")])));\n assert_that(&res, not(contains(names(&[(\"util\", \"1.0.1\")]))));\n assert_that(&res, not(contains(names(&[(\"util\", \"1.1.1\")]))));\n}\n\n#[test]\nfn resolving_incompat_versions() {\n let reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\")),\n pkg!((\"foo\", \"1.0.2\")),\n pkg!(\"bar\" => [dep_req(\"foo\", \"=1.0.2\")]),\n ]);\n\n assert!(resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"=1.0.1\"),\n dep(\"bar\"),\n ], ®).is_err());\n}\n\n#[test]\nfn resolving_backtrack() {\n let reg = registry(vec![\n pkg!((\"foo\", \"1.0.2\") => [dep(\"bar\")]),\n pkg!((\"foo\", \"1.0.1\") => [dep(\"baz\")]),\n pkg!(\"bar\" => [dep_req(\"foo\", \"=2.0.2\")]),\n pkg!(\"baz\"),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"^1\"),\n ], ®).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.1\"),\n (\"baz\", \"1.0.0\")])));\n}\n\n#[test]\nfn resolving_allows_multiple_compatible_versions() {\n let reg = registry(vec![\n pkg!((\"foo\", \"1.0.0\")),\n pkg!((\"foo\", \"2.0.0\")),\n pkg!((\"foo\", \"0.1.0\")),\n pkg!((\"foo\", \"0.2.0\")),\n\n pkg!(\"bar\" => [\"d1\", \"d2\", \"d3\", \"d4\"]),\n pkg!(\"d1\" => [dep_req(\"foo\", \"1\")]),\n pkg!(\"d2\" => [dep_req(\"foo\", \"2\")]),\n pkg!(\"d3\" => [dep_req(\"foo\", \"0.1\")]),\n pkg!(\"d4\" => [dep_req(\"foo\", \"0.2\")]),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep(\"bar\"),\n ], ®).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.0\"),\n (\"foo\", \"2.0.0\"),\n (\"foo\", \"0.1.0\"),\n (\"foo\", \"0.2.0\"),\n (\"d1\", \"1.0.0\"),\n (\"d2\", \"1.0.0\"),\n (\"d3\", \"1.0.0\"),\n (\"d4\", \"1.0.0\"),\n (\"bar\", \"1.0.0\")])));\n}\n\n#[test]\nfn resolving_with_deep_backtracking() {\n let reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"1\")]),\n pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"2\")]),\n\n pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"=1.0.2\"),\n dep_req(\"other\", \"1\")]),\n pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"=1.0.1\")]),\n\n pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"2\")]),\n pkg!((\"baz\", \"1.0.1\")),\n\n pkg!((\"dep_req\", \"1.0.0\")),\n pkg!((\"dep_req\", \"2.0.0\")),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"1\"),\n ], ®).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.0\"),\n (\"bar\", \"2.0.0\"),\n (\"baz\", \"1.0.1\")])));\n}\n\n#[test]\nfn resolving_but_no_exists() {\n let reg = registry(vec![\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"1\"),\n ], ®);\n assert!(res.is_err());\n\n assert_eq!(res.err().unwrap().to_string(), \"\\\nno matching package named `foo` found (required by `root`)\nlocation searched: registry http:\/\/example.com\/\nversion required: ^1\\\n\");\n}\n\n#[test]\nfn resolving_cycle() {\n let reg = registry(vec![\n pkg!(\"foo\" => [\"foo\"]),\n ]);\n\n let _ = resolve(pkg_id(\"root\"), vec![\n dep_req(\"foo\", \"1\"),\n ], ®);\n}\n\n#[test]\nfn hard_equality() {\n extern crate env_logger;\n let reg = registry(vec![\n pkg!((\"foo\", \"1.0.1\")),\n pkg!((\"foo\", \"1.0.0\")),\n\n pkg!((\"bar\", \"1.0.0\") => [dep_req(\"foo\", \"1.0.0\")]),\n ]);\n\n let res = resolve(pkg_id(\"root\"), vec![\n dep_req(\"bar\", \"1\"),\n dep_req(\"foo\", \"=1.0.0\"),\n ], ®).unwrap();\n\n assert_that(&res, contains(names(&[(\"root\", \"1.0.0\"),\n (\"foo\", \"1.0.0\"),\n (\"bar\", \"1.0.0\")])));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Don't skip over the first Tera error.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #66786 - jyn514:const-if-match-tests, r=Centril<commit_after>\/\/ check-pass\n\n#![feature(const_if_match)]\n\nenum E {\n A,\n B,\n C\n}\n\nconst fn f(e: E) -> usize {\n match e {\n _ => 0\n }\n}\n\nfn main() {\n const X: usize = f(E::C);\n assert_eq!(X, 0);\n assert_eq!(f(E::A), 0);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>queue replaces distinct as the hash-free demo vertex<commit_after>use std::collections::HashMap;\nuse std::hash::Hash;\n\nuse communication::*;\nuse communication::pact::Pipeline;\n\nuse example_shared::*;\nuse example_shared::operators::unary::UnaryNotifyExt;\n\nuse columnar::Columnar;\nuse drain::DrainExt;\n\npub trait QueueExt {\n fn queue(&self) -> Self;\n}\n\nimpl<G: GraphBuilder, D: Data+Columnar> QueueExt for Stream<G, D>\nwhere G::Timestamp: Hash {\n\n fn queue(&self) -> Stream<G, D> {\n let mut elements = HashMap::new();\n self.unary_notify(Pipeline, format!(\"Queue\"), vec![], move |input, output, notificator| {\n while let Some((time, data)) = input.pull() {\n let set = elements.entry(time).or_insert(Vec::new());\n for datum in data.drain_temp() { set.push(datum); }\n\n notificator.notify_at(&time);\n }\n\n while let Some((time, _count)) = notificator.next() {\n if let Some(mut data) = elements.remove(&time) {\n output.give_at(&time, data.drain_temp());\n }\n }\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>use gen_weighted_bool<commit_after><|endoftext|>"} {"text":"<commit_before>use redox::Box;\nuse redox::fs::file::File;\nuse redox::io::{Read, Write, Seek, SeekFrom};\nuse redox::mem;\nuse redox::net::*;\nuse redox::ptr;\nuse redox::rand;\nuse redox::slice;\nuse redox::{str, String, ToString};\nuse redox::to_num::*;\nuse redox::Vec;\n\n\/\/\/ Ethernet resource\npub struct Resource {\n \/\/\/ The network\n network: Box<Resource>,\n \/\/\/ The data\n data: Vec<u8>,\n \/\/\/ The MAC addresss\n peer_addr: MACAddr,\n \/\/\/ The ethernet type\n ethertype: u16,\n}\n\nimpl Resource {\n fn dup(&self) -> Option<Box<Self>> {\n match self.network.dup() {\n Some(network) => Some(box Resource {\n network: network,\n data: self.data.clone(),\n peer_addr: self.peer_addr,\n ethertype: self.ethertype,\n }),\n None => None\n }\n }\n\n pub fn path(&self, buf: &mut [u8]) -> Option<usize> {\n let path = format!(\"ethernet:\/\/{}\/{}\", self.peer_addr.to_string(), String::from_num_radix(self.ethertype as usize, 16));\n\n let mut i = 0;\n for b in path.bytes() {\n if i < buf.len() {\n buf[i] = b;\n i += 1;\n } else {\n break;\n }\n }\n\n Some(i)\n }\n\n fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/*\n if self.data.len() > 0 {\n let mut bytes: Vec<u8> = Vec::new();\n swap(&mut self.data, &mut bytes);\n vec.push_all(&bytes);\n return Some(bytes.len());\n }\n\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match self.network.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(frame) = EthernetII::from_bytes(bytes) {\n if frame.header.ethertype.get() == self.ethertype &&\n (unsafe { frame.header.dst.equals(MAC_ADDR) } ||\n frame.header.dst.equals(BROADCAST_MAC_ADDR)) &&\n (frame.header.src.equals(self.peer_addr) ||\n self.peer_addr.equals(BROADCAST_MAC_ADDR)) {\n vec.push_all(&frame.data);\n return Some(frame.data.len());\n }\n }\n }\n None => return None,\n }\n }\n *\/\n None\n }\n\n fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let data = Vec::from(buf);\n\n \/*\n match self.network.write(EthernetII {\n header: EthernetIIHeader {\n src: unsafe { MAC_ADDR },\n dst: self.peer_addr,\n ethertype: n16::new(self.ethertype),\n },\n data: data,\n }.to_bytes().as_slice()) {\n Some(_) => return Some(buf.len()),\n None => return None,\n }\n *\/\n None\n }\n\n fn seek(&mut self, pos: SeekFrom) -> Option<usize> {\n None\n }\n\n fn sync(&mut self) -> bool {\n self.network.sync()\n }\n}\n\npub struct Scheme;\n\nimpl Scheme {\n fn new() -> Box<Self> {\n box Scheme\n }\n\n fn open(&mut self, url: &str) -> Option<Box<Resource>> {\n \/\/Split scheme from the rest of the URL\n let (scheme, mut not_scheme) = url.split_at(url.find(':').unwrap_or(url.len()));\n\n \/\/Remove the starting two slashes\n if not_scheme.starts_with(\"\/\/\") {\n not_scheme = ¬_scheme[2..not_scheme.len() - 2];\n }\n\n \/\/Check host and port vs path\n if not_scheme.starts_with(\"\/\") {\n if let Some(mut network) = File::open(\"network:\/\/\") {\n if url.path().len() > 0 {\n let ethertype = url.path().to_num_radix(16) as u16;\n\n if url.host().len() > 0 {\n return Some(box Resource {\n network: network,\n data: Vec::new(),\n peer_addr: MACAddr::from_string(&url.host()),\n ethertype: ethertype,\n });\n } else {\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match network.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(frame) = EthernetII::from_bytes(bytes) {\n if frame.header.ethertype.get() == ethertype &&\n (unsafe { frame.header.dst.equals(MAC_ADDR) } ||\n frame.header.dst.equals(BROADCAST_MAC_ADDR)) {\n return Some(box Resource {\n network: network,\n data: frame.data,\n peer_addr: frame.header.src,\n ethertype: ethertype,\n });\n }\n }\n }\n None => break,\n }\n }\n }\n } else {\n \/*\n debug::d(\"Ethernet: No ethertype provided\\n\");\n *\/\n }\n }\n }\n\n None\n }\n}\n<commit_msg>Forgot to make the userspace ethernet scheme have pub methods in `Resource` and `Scheme`<commit_after>use redox::Box;\nuse redox::fs::file::File;\nuse redox::io::{Read, Write, Seek, SeekFrom};\nuse redox::mem;\nuse redox::net::*;\nuse redox::ptr;\nuse redox::rand;\nuse redox::slice;\nuse redox::{str, String, ToString};\nuse redox::to_num::*;\nuse redox::Vec;\n\n\/\/\/ Ethernet resource\npub struct Resource {\n \/\/\/ The network\n network: Box<Resource>,\n \/\/\/ The data\n data: Vec<u8>,\n \/\/\/ The MAC addresss\n peer_addr: MACAddr,\n \/\/\/ The ethernet type\n ethertype: u16,\n}\n\nimpl Resource {\n pub fn dup(&self) -> Option<Box<Self>> {\n match self.network.dup() {\n Some(network) => Some(box Resource {\n network: network,\n data: self.data.clone(),\n peer_addr: self.peer_addr,\n ethertype: self.ethertype,\n }),\n None => None\n }\n }\n\n pub fn path(&self, buf: &mut [u8]) -> Option<usize> {\n let path = format!(\"ethernet:\/\/{}\/{}\", self.peer_addr.to_string(), String::from_num_radix(self.ethertype as usize, 16));\n\n let mut i = 0;\n for b in path.bytes() {\n if i < buf.len() {\n buf[i] = b;\n i += 1;\n } else {\n break;\n }\n }\n\n Some(i)\n }\n\n pub fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/*\n if self.data.len() > 0 {\n let mut bytes: Vec<u8> = Vec::new();\n swap(&mut self.data, &mut bytes);\n vec.push_all(&bytes);\n return Some(bytes.len());\n }\n\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match self.network.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(frame) = EthernetII::from_bytes(bytes) {\n if frame.header.ethertype.get() == self.ethertype &&\n (unsafe { frame.header.dst.equals(MAC_ADDR) } ||\n frame.header.dst.equals(BROADCAST_MAC_ADDR)) &&\n (frame.header.src.equals(self.peer_addr) ||\n self.peer_addr.equals(BROADCAST_MAC_ADDR)) {\n vec.push_all(&frame.data);\n return Some(frame.data.len());\n }\n }\n }\n None => return None,\n }\n }\n *\/\n None\n }\n\n pub fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let data = Vec::from(buf);\n\n \/*\n match self.network.write(EthernetII {\n header: EthernetIIHeader {\n src: unsafe { MAC_ADDR },\n dst: self.peer_addr,\n ethertype: n16::new(self.ethertype),\n },\n data: data,\n }.to_bytes().as_slice()) {\n Some(_) => return Some(buf.len()),\n None => return None,\n }\n *\/\n None\n }\n\n pub fn seek(&mut self, pos: SeekFrom) -> Option<usize> {\n None\n }\n\n pub fn sync(&mut self) -> bool {\n self.network.sync()\n }\n}\n\npub struct Scheme;\n\nimpl Scheme {\n pub fn new() -> Box<Self> {\n box Scheme\n }\n\n pub fn open(&mut self, url: &str) -> Option<Box<Resource>> {\n \/\/Split scheme from the rest of the URL\n let (scheme, mut not_scheme) = url.split_at(url.find(':').unwrap_or(url.len()));\n\n \/\/Remove the starting two slashes\n if not_scheme.starts_with(\"\/\/\") {\n not_scheme = ¬_scheme[2..not_scheme.len() - 2];\n }\n\n \/\/Check host and port vs path\n if not_scheme.starts_with(\"\/\") {\n if let Some(mut network) = File::open(\"network:\/\/\") {\n if url.path().len() > 0 {\n let ethertype = url.path().to_num_radix(16) as u16;\n\n if url.host().len() > 0 {\n return Some(box Resource {\n network: network,\n data: Vec::new(),\n peer_addr: MACAddr::from_string(&url.host()),\n ethertype: ethertype,\n });\n } else {\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match network.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(frame) = EthernetII::from_bytes(bytes) {\n if frame.header.ethertype.get() == ethertype &&\n (unsafe { frame.header.dst.equals(MAC_ADDR) } ||\n frame.header.dst.equals(BROADCAST_MAC_ADDR)) {\n return Some(box Resource {\n network: network,\n data: frame.data,\n peer_addr: frame.header.src,\n ethertype: ethertype,\n });\n }\n }\n }\n None => break,\n }\n }\n }\n } else {\n \/*\n debug::d(\"Ethernet: No ethertype provided\\n\");\n *\/\n }\n }\n }\n\n None\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::borrow_set::{BorrowSet, BorrowData};\nuse borrow_check::place_ext::PlaceExt;\n\nuse rustc;\nuse rustc::hir;\nuse rustc::hir::def_id::DefId;\nuse rustc::middle::region;\nuse rustc::mir::{self, Location, Place, Mir};\nuse rustc::ty::TyCtxt;\nuse rustc::ty::RegionKind;\nuse rustc::ty::RegionKind::ReScope;\n\nuse rustc_data_structures::bitslice::BitwiseOperator;\nuse rustc_data_structures::indexed_set::IdxSet;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse rustc_data_structures::sync::Lrc;\n\nuse dataflow::{BitDenotation, BlockSets, InitialFlow};\npub use dataflow::indexes::BorrowIndex;\nuse borrow_check::nll::region_infer::RegionInferenceContext;\nuse borrow_check::nll::ToRegionVid;\n\nuse std::rc::Rc;\n\n\/\/\/ `Borrows` stores the data used in the analyses that track the flow\n\/\/\/ of borrows.\n\/\/\/\n\/\/\/ It uniquely identifies every borrow (`Rvalue::Ref`) by a\n\/\/\/ `BorrowIndex`, and maps each such index to a `BorrowData`\n\/\/\/ describing the borrow. These indexes are used for representing the\n\/\/\/ borrows in compact bitvectors.\npub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> {\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n scope_tree: Lrc<region::ScopeTree>,\n root_scope: Option<region::Scope>,\n\n borrow_set: Rc<BorrowSet<'tcx>>,\n\n \/\/\/ NLL region inference context with which NLL queries should be resolved\n nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,\n}\n\nimpl<'a, 'gcx, 'tcx> Borrows<'a, 'gcx, 'tcx> {\n crate fn new(\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,\n def_id: DefId,\n body_id: Option<hir::BodyId>,\n borrow_set: &Rc<BorrowSet<'tcx>>\n ) -> Self {\n let scope_tree = tcx.region_scope_tree(def_id);\n let root_scope = body_id.map(|body_id| {\n region::Scope::CallSite(tcx.hir.body(body_id).value.hir_id.local_id)\n });\n\n Borrows {\n tcx: tcx,\n mir: mir,\n borrow_set: borrow_set.clone(),\n scope_tree,\n root_scope,\n nonlexical_regioncx,\n }\n }\n\n crate fn borrows(&self) -> &IndexVec<BorrowIndex, BorrowData<'tcx>> { &self.borrow_set.borrows }\n\n pub fn scope_tree(&self) -> &Lrc<region::ScopeTree> { &self.scope_tree }\n\n pub fn location(&self, idx: BorrowIndex) -> &Location {\n &self.borrow_set.borrows[idx].reserve_location\n }\n\n \/\/\/ Add all borrows to the kill set, if those borrows are out of scope at `location`.\n \/\/\/ That means either they went out of either a nonlexical scope, if we care about those\n \/\/\/ at the moment, or the location represents a lexical EndRegion\n fn kill_loans_out_of_scope_at_location(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n let regioncx = &self.nonlexical_regioncx;\n\n \/\/ NOTE: The state associated with a given `location`\n \/\/ reflects the dataflow on entry to the statement. If it\n \/\/ does not contain `borrow_region`, then then that means\n \/\/ that the statement at `location` kills the borrow.\n \/\/\n \/\/ We are careful always to call this function *before* we\n \/\/ set up the gen-bits for the statement or\n \/\/ termanator. That way, if the effect of the statement or\n \/\/ terminator *does* introduce a new loan of the same\n \/\/ region, then setting that gen-bit will override any\n \/\/ potential kill introduced here.\n for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {\n let borrow_region = borrow_data.region.to_region_vid();\n if !regioncx.region_contains_point(borrow_region, location) {\n sets.kill(&borrow_index);\n }\n }\n }\n\n fn kill_borrows_on_local(&self,\n sets: &mut BlockSets<BorrowIndex>,\n local: &rustc::mir::Local)\n {\n if let Some(borrow_indexes) = self.borrow_set.local_map.get(local) {\n sets.kill_all(borrow_indexes);\n }\n }\n}\n\nimpl<'a, 'gcx, 'tcx> BitDenotation for Borrows<'a, 'gcx, 'tcx> {\n type Idx = BorrowIndex;\n fn name() -> &'static str { \"borrows\" }\n fn bits_per_block(&self) -> usize {\n self.borrow_set.borrows.len() * 2\n }\n\n fn start_block_effect(&self, _entry_set: &mut IdxSet<BorrowIndex>) {\n \/\/ no borrows of code region_scopes have been taken prior to\n \/\/ function execution, so this method has no effect on\n \/\/ `_sets`.\n }\n\n fn before_statement_effect(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n debug!(\"Borrows::before_statement_effect sets: {:?} location: {:?}\", sets, location);\n self.kill_loans_out_of_scope_at_location(sets, location);\n }\n\n fn statement_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {\n debug!(\"Borrows::statement_effect sets: {:?} location: {:?}\", sets, location);\n\n let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {\n panic!(\"could not find block at location {:?}\", location);\n });\n let stmt = block.statements.get(location.statement_index).unwrap_or_else(|| {\n panic!(\"could not find statement at location {:?}\");\n });\n\n match stmt.kind {\n mir::StatementKind::EndRegion(_) => {\n }\n\n mir::StatementKind::Assign(ref lhs, ref rhs) => {\n \/\/ Make sure there are no remaining borrows for variables\n \/\/ that are assigned over.\n if let Place::Local(ref local) = *lhs {\n \/\/ FIXME: Handle the case in which we're assigning over\n \/\/ a projection (`foo.bar`).\n self.kill_borrows_on_local(sets, local);\n }\n\n \/\/ NOTE: if\/when the Assign case is revised to inspect\n \/\/ the assigned_place here, make sure to also\n \/\/ re-consider the current implementations of the\n \/\/ propagate_call_return method.\n\n if let mir::Rvalue::Ref(region, _, ref place) = *rhs {\n if place.is_unsafe_place(self.tcx, self.mir) { return; }\n let index = self.borrow_set.location_map.get(&location).unwrap_or_else(|| {\n panic!(\"could not find BorrowIndex for location {:?}\", location);\n });\n\n if let RegionKind::ReEmpty = region {\n \/\/ If the borrowed value dies before the borrow is used, the region for\n \/\/ the borrow can be empty. Don't track the borrow in that case.\n debug!(\"Borrows::statement_effect_on_borrows \\\n location: {:?} stmt: {:?} has empty region, killing {:?}\",\n location, stmt.kind, index);\n sets.kill(&index);\n return\n } else {\n debug!(\"Borrows::statement_effect_on_borrows location: {:?} stmt: {:?}\",\n location, stmt.kind);\n }\n\n assert!(self.borrow_set.region_map.get(region).unwrap_or_else(|| {\n panic!(\"could not find BorrowIndexs for region {:?}\", region);\n }).contains(&index));\n sets.gen(&index);\n\n \/\/ Issue #46746: Two-phase borrows handles\n \/\/ stmts of form `Tmp = &mut Borrow` ...\n match lhs {\n Place::Local(..) | Place::Static(..) => {} \/\/ okay\n Place::Projection(..) => {\n \/\/ ... can assign into projections,\n \/\/ e.g. `box (&mut _)`. Current\n \/\/ conservative solution: force\n \/\/ immediate activation here.\n sets.gen(&index);\n }\n }\n }\n }\n\n mir::StatementKind::StorageDead(local) => {\n \/\/ Make sure there are no remaining borrows for locals that\n \/\/ are gone out of scope.\n self.kill_borrows_on_local(sets, &local)\n }\n\n mir::StatementKind::InlineAsm { ref outputs, ref asm, .. } => {\n for (output, kind) in outputs.iter().zip(&asm.outputs) {\n if !kind.is_indirect && !kind.is_rw {\n \/\/ Make sure there are no remaining borrows for direct\n \/\/ output variables.\n if let Place::Local(ref local) = *output {\n \/\/ FIXME: Handle the case in which we're assigning over\n \/\/ a projection (`foo.bar`).\n self.kill_borrows_on_local(sets, local);\n }\n }\n }\n }\n\n mir::StatementKind::SetDiscriminant { .. } |\n mir::StatementKind::StorageLive(..) |\n mir::StatementKind::Validate(..) |\n mir::StatementKind::UserAssertTy(..) |\n mir::StatementKind::Nop => {}\n\n }\n }\n\n fn before_terminator_effect(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n debug!(\"Borrows::before_terminator_effect sets: {:?} location: {:?}\", sets, location);\n self.kill_loans_out_of_scope_at_location(sets, location);\n }\n\n fn terminator_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {\n debug!(\"Borrows::terminator_effect sets: {:?} location: {:?}\", sets, location);\n\n let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {\n panic!(\"could not find block at location {:?}\", location);\n });\n\n let term = block.terminator();\n match term.kind {\n mir::TerminatorKind::Resume |\n mir::TerminatorKind::Return |\n mir::TerminatorKind::GeneratorDrop => {\n \/\/ When we return from the function, then all `ReScope`-style regions\n \/\/ are guaranteed to have ended.\n \/\/ Normally, there would be `EndRegion` statements that come before,\n \/\/ and hence most of these loans will already be dead -- but, in some cases\n \/\/ like unwind paths, we do not always emit `EndRegion` statements, so we\n \/\/ add some kills here as a \"backup\" and to avoid spurious error messages.\n for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {\n if let ReScope(scope) = borrow_data.region {\n \/\/ Check that the scope is not actually a scope from a function that is\n \/\/ a parent of our closure. Note that the CallSite scope itself is\n \/\/ *outside* of the closure, for some weird reason.\n if let Some(root_scope) = self.root_scope {\n if *scope != root_scope &&\n self.scope_tree.is_subscope_of(*scope, root_scope)\n {\n sets.kill(&borrow_index);\n }\n }\n }\n }\n }\n mir::TerminatorKind::Abort |\n mir::TerminatorKind::SwitchInt {..} |\n mir::TerminatorKind::Drop {..} |\n mir::TerminatorKind::DropAndReplace {..} |\n mir::TerminatorKind::Call {..} |\n mir::TerminatorKind::Assert {..} |\n mir::TerminatorKind::Yield {..} |\n mir::TerminatorKind::Goto {..} |\n mir::TerminatorKind::FalseEdges {..} |\n mir::TerminatorKind::FalseUnwind {..} |\n mir::TerminatorKind::Unreachable => {}\n }\n }\n\n fn propagate_call_return(&self,\n _in_out: &mut IdxSet<BorrowIndex>,\n _call_bb: mir::BasicBlock,\n _dest_bb: mir::BasicBlock,\n _dest_place: &mir::Place) {\n \/\/ there are no effects on borrows from method call return...\n \/\/\n \/\/ ... but if overwriting a place can affect flow state, then\n \/\/ latter is not true; see NOTE on Assign case in\n \/\/ statement_effect_on_borrows.\n }\n}\n\nimpl<'a, 'gcx, 'tcx> BitwiseOperator for Borrows<'a, 'gcx, 'tcx> {\n #[inline]\n fn join(&self, pred1: usize, pred2: usize) -> usize {\n pred1 | pred2 \/\/ union effects of preds when computing reservations\n }\n}\n\nimpl<'a, 'gcx, 'tcx> InitialFlow for Borrows<'a, 'gcx, 'tcx> {\n #[inline]\n fn bottom_value() -> bool {\n false \/\/ bottom = nothing is reserved or activated yet\n }\n}\n\n<commit_msg>Use precomputed DFS of borrows that out of scope at a location.<commit_after>\/\/ Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse borrow_check::borrow_set::{BorrowSet, BorrowData};\nuse borrow_check::place_ext::PlaceExt;\n\nuse rustc;\nuse rustc::hir;\nuse rustc::hir::def_id::DefId;\nuse rustc::middle::region;\nuse rustc::mir::{self, Location, Place, Mir, TerminatorKind};\nuse rustc::ty::TyCtxt;\nuse rustc::ty::{RegionKind, RegionVid};\nuse rustc::ty::RegionKind::ReScope;\n\nuse rustc_data_structures::bitslice::BitwiseOperator;\nuse rustc_data_structures::fx::FxHashMap;\nuse rustc_data_structures::indexed_set::IdxSet;\nuse rustc_data_structures::indexed_vec::IndexVec;\nuse rustc_data_structures::sync::Lrc;\n\nuse dataflow::{BitDenotation, BlockSets, InitialFlow};\npub use dataflow::indexes::BorrowIndex;\nuse borrow_check::nll::region_infer::RegionInferenceContext;\nuse borrow_check::nll::ToRegionVid;\n\nuse std::rc::Rc;\n\n\/\/\/ `Borrows` stores the data used in the analyses that track the flow\n\/\/\/ of borrows.\n\/\/\/\n\/\/\/ It uniquely identifies every borrow (`Rvalue::Ref`) by a\n\/\/\/ `BorrowIndex`, and maps each such index to a `BorrowData`\n\/\/\/ describing the borrow. These indexes are used for representing the\n\/\/\/ borrows in compact bitvectors.\npub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> {\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n scope_tree: Lrc<region::ScopeTree>,\n root_scope: Option<region::Scope>,\n\n borrow_set: Rc<BorrowSet<'tcx>>,\n borrows_out_of_scope_at_location: FxHashMap<Location, Vec<BorrowIndex>>,\n\n \/\/\/ NLL region inference context with which NLL queries should be resolved\n _nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,\n}\n\nfn precompute_borrows_out_of_scope<'a, 'tcx>(\n mir: &'a Mir<'tcx>,\n regioncx: &Rc<RegionInferenceContext<'tcx>>,\n borrows_out_of_scope_at_location: &mut FxHashMap<Location, Vec<BorrowIndex>>,\n borrow_index: BorrowIndex,\n borrow_region: RegionVid,\n location: Location\n) {\n \/\/ Start by dealing with the current location.\n if !regioncx.region_contains_point(borrow_region, location) {\n borrows_out_of_scope_at_location\n .entry(location.clone())\n .and_modify(|m| m.push(borrow_index))\n .or_insert(vec![ borrow_index ]);\n }\n\n let bb_data = &mir[location.block];\n \/\/ If we are on the last statement, then check the terminator\n \/\/ to determine which location to proceed to.\n if location.statement_index == bb_data.statements.len() - 1 {\n if let Some(ref terminator) = bb_data.terminator {\n match terminator.kind {\n TerminatorKind::Goto { target } |\n TerminatorKind::FalseEdges { real_target: target, .. } |\n TerminatorKind::FalseUnwind { real_target: target, .. } => {\n precompute_borrows_out_of_scope(\n mir, regioncx, borrows_out_of_scope_at_location,\n borrow_index, borrow_region, target.start_location()\n );\n },\n TerminatorKind::SwitchInt { ref targets, .. } => {\n for block in targets {\n precompute_borrows_out_of_scope(\n mir, regioncx, borrows_out_of_scope_at_location,\n borrow_index, borrow_region, block.start_location()\n );\n }\n },\n TerminatorKind::Drop { target, unwind, .. } |\n TerminatorKind::DropAndReplace { target, unwind, .. } => {\n precompute_borrows_out_of_scope(\n mir, regioncx, borrows_out_of_scope_at_location,\n borrow_index, borrow_region, target.start_location()\n );\n\n if let Some(unwind_block) = unwind {\n precompute_borrows_out_of_scope(\n mir, regioncx, borrows_out_of_scope_at_location,\n borrow_index, borrow_region, unwind_block.start_location()\n );\n }\n },\n TerminatorKind::Call { ref destination, cleanup, .. } => {\n if let Some((_, block)) = destination {\n precompute_borrows_out_of_scope(\n mir, regioncx, borrows_out_of_scope_at_location,\n borrow_index, borrow_region, block.start_location()\n );\n }\n\n if let Some(block) = cleanup {\n precompute_borrows_out_of_scope(\n mir, regioncx, borrows_out_of_scope_at_location,\n borrow_index, borrow_region, block.start_location()\n );\n }\n },\n TerminatorKind::Assert { target, cleanup, .. } |\n TerminatorKind::Yield { resume: target, drop: cleanup, .. } => {\n precompute_borrows_out_of_scope(\n mir, regioncx, borrows_out_of_scope_at_location,\n borrow_index, borrow_region, target.start_location()\n );\n\n if let Some(block) = cleanup {\n precompute_borrows_out_of_scope(\n mir, regioncx, borrows_out_of_scope_at_location,\n borrow_index, borrow_region, block.start_location()\n );\n }\n },\n _ => {},\n };\n };\n \/\/ If we're not on the last statement, then go to the next\n \/\/ statement in this block.\n } else {\n precompute_borrows_out_of_scope(mir, regioncx, borrows_out_of_scope_at_location,\n borrow_index, borrow_region,\n location.successor_within_block());\n }\n}\n\nimpl<'a, 'gcx, 'tcx> Borrows<'a, 'gcx, 'tcx> {\n crate fn new(\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n mir: &'a Mir<'tcx>,\n nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,\n def_id: DefId,\n body_id: Option<hir::BodyId>,\n borrow_set: &Rc<BorrowSet<'tcx>>\n ) -> Self {\n let scope_tree = tcx.region_scope_tree(def_id);\n let root_scope = body_id.map(|body_id| {\n region::Scope::CallSite(tcx.hir.body(body_id).value.hir_id.local_id)\n });\n\n let mut borrows_out_of_scope_at_location = FxHashMap();\n for (borrow_index, borrow_data) in borrow_set.borrows.iter_enumerated() {\n let borrow_region = borrow_data.region.to_region_vid();\n let location = borrow_set.borrows[borrow_index].reserve_location;\n\n precompute_borrows_out_of_scope(mir, &nonlexical_regioncx,\n &mut borrows_out_of_scope_at_location,\n borrow_index, borrow_region, location);\n }\n\n Borrows {\n tcx: tcx,\n mir: mir,\n borrow_set: borrow_set.clone(),\n borrows_out_of_scope_at_location,\n scope_tree,\n root_scope,\n _nonlexical_regioncx: nonlexical_regioncx,\n }\n }\n\n crate fn borrows(&self) -> &IndexVec<BorrowIndex, BorrowData<'tcx>> { &self.borrow_set.borrows }\n pub fn scope_tree(&self) -> &Lrc<region::ScopeTree> { &self.scope_tree }\n\n pub fn location(&self, idx: BorrowIndex) -> &Location {\n &self.borrow_set.borrows[idx].reserve_location\n }\n\n \/\/\/ Add all borrows to the kill set, if those borrows are out of scope at `location`.\n \/\/\/ That means either they went out of either a nonlexical scope, if we care about those\n \/\/\/ at the moment, or the location represents a lexical EndRegion\n fn kill_loans_out_of_scope_at_location(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n \/\/ NOTE: The state associated with a given `location`\n \/\/ reflects the dataflow on entry to the statement.\n \/\/ Iterate over each of the borrows that we've precomputed\n \/\/ to have went out of scope at this location and kill them.\n \/\/\n \/\/ We are careful always to call this function *before* we\n \/\/ set up the gen-bits for the statement or\n \/\/ termanator. That way, if the effect of the statement or\n \/\/ terminator *does* introduce a new loan of the same\n \/\/ region, then setting that gen-bit will override any\n \/\/ potential kill introduced here.\n if let Some(indices) = self.borrows_out_of_scope_at_location.get(&location) {\n for index in indices {\n sets.kill(&index);\n }\n }\n }\n\n fn kill_borrows_on_local(&self,\n sets: &mut BlockSets<BorrowIndex>,\n local: &rustc::mir::Local)\n {\n if let Some(borrow_indexes) = self.borrow_set.local_map.get(local) {\n sets.kill_all(borrow_indexes);\n }\n }\n}\n\nimpl<'a, 'gcx, 'tcx> BitDenotation for Borrows<'a, 'gcx, 'tcx> {\n type Idx = BorrowIndex;\n fn name() -> &'static str { \"borrows\" }\n fn bits_per_block(&self) -> usize {\n self.borrow_set.borrows.len() * 2\n }\n\n fn start_block_effect(&self, _entry_set: &mut IdxSet<BorrowIndex>) {\n \/\/ no borrows of code region_scopes have been taken prior to\n \/\/ function execution, so this method has no effect on\n \/\/ `_sets`.\n }\n\n fn before_statement_effect(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n debug!(\"Borrows::before_statement_effect sets: {:?} location: {:?}\", sets, location);\n self.kill_loans_out_of_scope_at_location(sets, location);\n }\n\n fn statement_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {\n debug!(\"Borrows::statement_effect sets: {:?} location: {:?}\", sets, location);\n\n let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {\n panic!(\"could not find block at location {:?}\", location);\n });\n let stmt = block.statements.get(location.statement_index).unwrap_or_else(|| {\n panic!(\"could not find statement at location {:?}\");\n });\n\n match stmt.kind {\n mir::StatementKind::EndRegion(_) => {\n }\n\n mir::StatementKind::Assign(ref lhs, ref rhs) => {\n \/\/ Make sure there are no remaining borrows for variables\n \/\/ that are assigned over.\n if let Place::Local(ref local) = *lhs {\n \/\/ FIXME: Handle the case in which we're assigning over\n \/\/ a projection (`foo.bar`).\n self.kill_borrows_on_local(sets, local);\n }\n\n \/\/ NOTE: if\/when the Assign case is revised to inspect\n \/\/ the assigned_place here, make sure to also\n \/\/ re-consider the current implementations of the\n \/\/ propagate_call_return method.\n\n if let mir::Rvalue::Ref(region, _, ref place) = *rhs {\n if place.is_unsafe_place(self.tcx, self.mir) { return; }\n let index = self.borrow_set.location_map.get(&location).unwrap_or_else(|| {\n panic!(\"could not find BorrowIndex for location {:?}\", location);\n });\n\n if let RegionKind::ReEmpty = region {\n \/\/ If the borrowed value dies before the borrow is used, the region for\n \/\/ the borrow can be empty. Don't track the borrow in that case.\n debug!(\"Borrows::statement_effect_on_borrows \\\n location: {:?} stmt: {:?} has empty region, killing {:?}\",\n location, stmt.kind, index);\n sets.kill(&index);\n return\n } else {\n debug!(\"Borrows::statement_effect_on_borrows location: {:?} stmt: {:?}\",\n location, stmt.kind);\n }\n\n assert!(self.borrow_set.region_map.get(region).unwrap_or_else(|| {\n panic!(\"could not find BorrowIndexs for region {:?}\", region);\n }).contains(&index));\n sets.gen(&index);\n\n \/\/ Issue #46746: Two-phase borrows handles\n \/\/ stmts of form `Tmp = &mut Borrow` ...\n match lhs {\n Place::Local(..) | Place::Static(..) => {} \/\/ okay\n Place::Projection(..) => {\n \/\/ ... can assign into projections,\n \/\/ e.g. `box (&mut _)`. Current\n \/\/ conservative solution: force\n \/\/ immediate activation here.\n sets.gen(&index);\n }\n }\n }\n }\n\n mir::StatementKind::StorageDead(local) => {\n \/\/ Make sure there are no remaining borrows for locals that\n \/\/ are gone out of scope.\n self.kill_borrows_on_local(sets, &local)\n }\n\n mir::StatementKind::InlineAsm { ref outputs, ref asm, .. } => {\n for (output, kind) in outputs.iter().zip(&asm.outputs) {\n if !kind.is_indirect && !kind.is_rw {\n \/\/ Make sure there are no remaining borrows for direct\n \/\/ output variables.\n if let Place::Local(ref local) = *output {\n \/\/ FIXME: Handle the case in which we're assigning over\n \/\/ a projection (`foo.bar`).\n self.kill_borrows_on_local(sets, local);\n }\n }\n }\n }\n\n mir::StatementKind::SetDiscriminant { .. } |\n mir::StatementKind::StorageLive(..) |\n mir::StatementKind::Validate(..) |\n mir::StatementKind::UserAssertTy(..) |\n mir::StatementKind::Nop => {}\n\n }\n }\n\n fn before_terminator_effect(&self,\n sets: &mut BlockSets<BorrowIndex>,\n location: Location) {\n debug!(\"Borrows::before_terminator_effect sets: {:?} location: {:?}\", sets, location);\n self.kill_loans_out_of_scope_at_location(sets, location);\n }\n\n fn terminator_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {\n debug!(\"Borrows::terminator_effect sets: {:?} location: {:?}\", sets, location);\n\n let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {\n panic!(\"could not find block at location {:?}\", location);\n });\n\n let term = block.terminator();\n match term.kind {\n mir::TerminatorKind::Resume |\n mir::TerminatorKind::Return |\n mir::TerminatorKind::GeneratorDrop => {\n \/\/ When we return from the function, then all `ReScope`-style regions\n \/\/ are guaranteed to have ended.\n \/\/ Normally, there would be `EndRegion` statements that come before,\n \/\/ and hence most of these loans will already be dead -- but, in some cases\n \/\/ like unwind paths, we do not always emit `EndRegion` statements, so we\n \/\/ add some kills here as a \"backup\" and to avoid spurious error messages.\n for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {\n if let ReScope(scope) = borrow_data.region {\n \/\/ Check that the scope is not actually a scope from a function that is\n \/\/ a parent of our closure. Note that the CallSite scope itself is\n \/\/ *outside* of the closure, for some weird reason.\n if let Some(root_scope) = self.root_scope {\n if *scope != root_scope &&\n self.scope_tree.is_subscope_of(*scope, root_scope)\n {\n sets.kill(&borrow_index);\n }\n }\n }\n }\n }\n mir::TerminatorKind::Abort |\n mir::TerminatorKind::SwitchInt {..} |\n mir::TerminatorKind::Drop {..} |\n mir::TerminatorKind::DropAndReplace {..} |\n mir::TerminatorKind::Call {..} |\n mir::TerminatorKind::Assert {..} |\n mir::TerminatorKind::Yield {..} |\n mir::TerminatorKind::Goto {..} |\n mir::TerminatorKind::FalseEdges {..} |\n mir::TerminatorKind::FalseUnwind {..} |\n mir::TerminatorKind::Unreachable => {}\n }\n }\n\n fn propagate_call_return(&self,\n _in_out: &mut IdxSet<BorrowIndex>,\n _call_bb: mir::BasicBlock,\n _dest_bb: mir::BasicBlock,\n _dest_place: &mir::Place) {\n \/\/ there are no effects on borrows from method call return...\n \/\/\n \/\/ ... but if overwriting a place can affect flow state, then\n \/\/ latter is not true; see NOTE on Assign case in\n \/\/ statement_effect_on_borrows.\n }\n}\n\nimpl<'a, 'gcx, 'tcx> BitwiseOperator for Borrows<'a, 'gcx, 'tcx> {\n #[inline]\n fn join(&self, pred1: usize, pred2: usize) -> usize {\n pred1 | pred2 \/\/ union effects of preds when computing reservations\n }\n}\n\nimpl<'a, 'gcx, 'tcx> InitialFlow for Borrows<'a, 'gcx, 'tcx> {\n #[inline]\n fn bottom_value() -> bool {\n false \/\/ bottom = nothing is reserved or activated yet\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Basic implementation<commit_after>use std::collections::HashMap;\nuse std::io::{BufferedReader, File};\nuse std::os;\n\nstatic WORD_THRESHOLD: u32 = 1000;\nstatic CLUSTER_THRESHOLD: u32 = 1000;\n\nfn main() {\n let args = os::args();\n let inputs: Vec<Path> = args.tail().iter().map(|f| Path::new(f.as_slice())).collect();\n\n let mut word_freq: HashMap<u64, u32> = HashMap::new();\n for path in inputs.iter() {\n let mut file = BufferedReader::new(File::open(path));\n for l in file.lines() {\n for w in l.ok().unwrap().as_slice().words() {\n let hash = fnv1a(w);\n word_freq.insert_or_update_with(hash, 1, |_k,v| *v+=1);\n }\n }\n }\n\n println!(\"words found: {}\", word_freq.len());\n\n let mut clusters: HashMap<String, u32> = HashMap::new();\n for path in inputs.iter() {\n let mut file = BufferedReader::new(File::open(path));\n for l in file.lines() {\n let cluster = clusterify(&word_freq, l.ok().unwrap().as_slice());\n if !cluster.is_empty() {\n clusters.insert_or_update_with(cluster, 1, |_k,v| *v+=1);\n }\n }\n }\n\n println!(\"clusters found: {}\", clusters.len());\n\n for (k,v) in clusters.iter() {\n if *v >= CLUSTER_THRESHOLD {\n println!(\"{}\\t{}\", v, k);\n }\n }\n}\n\nfn clusterify(word_freq: &HashMap<u64, u32>, line: &str) -> String {\n let words: Vec<&str> = line.words().map({|w|\n if word_freq[fnv1a(w)] < WORD_THRESHOLD {\n \"*\"\n } else {\n w\n }\n }).collect();\n\n words.connect(\" \")\n}\n\nstatic FNV_PRIME_64: u64 = 1099511628211;\nstatic FNV1_OFFSET_BASIS_64: u64 = 14695981039346656037;\n\nfn fnv1a(s: &str) -> u64 {\n let mut hash = FNV1_OFFSET_BASIS_64;\n for b in s.as_bytes().iter() {\n hash = (hash ^ *b as u64) * FNV_PRIME_64;\n }\n hash\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2020 The Exonum Team\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse exonum_cli::{NodeBuilder, Spec};\n\nuse exonum_cryptocurrency_advanced::CryptocurrencyService;\nuse old_cryptocurrency::contracts::CryptocurrencyService as OldService;\n\nfn main() -> anyhow::Result<()> {\n exonum::helpers::init_logger()?;\n NodeBuilder::new()\n .with(Spec::new(OldService).with_default_instance())\n .with(Spec::migrating(CryptocurrencyService))\n .run()\n}\n<commit_msg>Fix binary contents (#1812)<commit_after>\/\/ Copyright 2020 The Exonum Team\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse exonum_cli::{NodeBuilder, Spec};\nuse exonum_rust_runtime::spec::JustFactory;\n\nuse exonum_cryptocurrency_advanced::CryptocurrencyService;\nuse old_cryptocurrency::contracts::CryptocurrencyService as OldService;\n\nfn main() -> anyhow::Result<()> {\n exonum::helpers::init_logger()?;\n NodeBuilder::new()\n .with(Spec::new(OldService).with_default_instance())\n .with(JustFactory::migrating(CryptocurrencyService))\n .run()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add k-nucleotide<commit_after>extern mod std;\n\nuse core::cast::transmute;\nuse core::i32::range;\nuse core::libc::{STDIN_FILENO, c_int, fdopen, fgets, fileno, fopen, fstat};\nuse core::libc::{stat, strlen};\nuse core::ptr::null;\nuse core::unstable::intrinsics::init;\nuse core::vec::{reverse, slice};\nuse std::sort::quick_sort3;\n\nstatic LINE_LEN: uint = 80;\nstatic TABLE: [u8, ..4] = [ 'A' as u8, 'C' as u8, 'G' as u8, 'T' as u8 ];\nstatic TABLE_SIZE: uint = 2 << 16;\n\nstatic OCCURRENCES: [&'static str, ..5] = [\n \"GGT\",\n \"GGTA\",\n \"GGTATT\",\n \"GGTATTTTAATT\",\n \"GGTATTTTAATTTATAGT\",\n];\n\n\/\/ Code implementation\n\n#[deriving(Eq, Ord)]\nstruct Code(u64);\n\nimpl Code {\n fn hash(&self) -> u64 {\n **self\n }\n\n #[inline(always)]\n fn push_char(&self, c: u8) -> Code {\n Code((**self << 2) + (pack_symbol(c) as u64))\n }\n\n fn rotate(&self, c: u8, frame: i32) -> Code {\n Code(*self.push_char(c) & ((1u64 << (2 * (frame as u64))) - 1))\n }\n\n fn pack(string: &str) -> Code {\n let mut code = Code(0u64);\n for uint::range(0, string.len()) |i| {\n code = code.push_char(string[i]);\n }\n code\n }\n\n \/\/ XXX: Inefficient.\n fn unpack(&self, frame: i32) -> ~str {\n let mut key = **self;\n let mut result = ~[];\n for (frame as uint).times {\n result.push(unpack_symbol((key as u8) & 3));\n key >>= 2;\n }\n\n reverse(result);\n str::from_bytes(result)\n }\n}\n\n\/\/ Hash table implementation\n\ntrait TableCallback {\n fn f(&self, entry: &mut Entry);\n}\n\nstruct BumpCallback;\n\nimpl TableCallback for BumpCallback {\n fn f(&self, entry: &mut Entry) {\n entry.count += 1;\n }\n}\n\nstruct PrintCallback(&'static str);\n\nimpl TableCallback for PrintCallback {\n fn f(&self, entry: &mut Entry) {\n println(fmt!(\"%d\\t%s\", entry.count as int, **self));\n }\n}\n\nstruct Entry {\n code: Code,\n count: i32,\n next: Option<~Entry>,\n}\n\nstruct Table {\n count: i32,\n items: [Option<~Entry>, ..TABLE_SIZE]\n}\n\nimpl Table {\n fn new() -> Table {\n Table {\n count: 0,\n items: [ None, ..TABLE_SIZE ],\n }\n }\n\n fn search_remainder<C:TableCallback>(item: &mut Entry, key: Code, c: C) {\n match item.next {\n None => {\n let mut entry = ~Entry {\n code: key,\n count: 0,\n next: None,\n };\n c.f(entry);\n item.next = Some(entry);\n }\n Some(ref mut entry) => {\n if entry.code == key {\n c.f(*entry);\n return;\n }\n\n Table::search_remainder(*entry, key, c)\n }\n }\n }\n\n fn lookup<C:TableCallback>(&mut self, key: Code, c: C) {\n let index = *key % (TABLE_SIZE as u64);\n\n {\n if self.items[index].is_none() {\n let mut entry = ~Entry {\n code: key,\n count: 0,\n next: None,\n };\n c.f(entry);\n self.items[index] = Some(entry);\n return;\n }\n }\n\n {\n let mut entry = &mut *self.items[index].get_mut_ref();\n if entry.code == key {\n c.f(*entry);\n return;\n }\n\n Table::search_remainder(*entry, key, c)\n }\n }\n\n fn each(&self, f: &fn(entry: &Entry) -> bool) {\n for self.items.each |item| {\n match *item {\n None => {}\n Some(ref item) => {\n let mut item: &Entry = *item;\n loop {\n if !f(item) {\n return;\n }\n\n match item.next {\n None => break,\n Some(ref next_item) => item = &**next_item,\n }\n }\n }\n };\n }\n }\n}\n\n\/\/ Main program\n\nfn pack_symbol(c: u8) -> u8 {\n match c {\n 'a' as u8 | 'A' as u8 => 0,\n 'c' as u8 | 'C' as u8 => 1,\n 'g' as u8 | 'G' as u8 => 2,\n 't' as u8 | 'T' as u8 => 3,\n _ => fail!(c.to_str())\n }\n}\n\nfn unpack_symbol(c: u8) -> u8 {\n TABLE[c]\n}\n\nfn next_char<'a>(mut buf: &'a [u8]) -> &'a [u8] {\n loop {\n buf = slice(buf, 1, buf.len());\n if buf.len() == 0 {\n break;\n }\n if buf[0] != (' ' as u8) && buf[0] != ('\\t' as u8) &&\n buf[0] != ('\\n' as u8) && buf[0] != 0 {\n break;\n }\n }\n buf\n}\n\n#[inline(never)]\nfn read_stdin() -> ~[u8] {\n unsafe {\n let mode = \"r\";\n \/\/let stdin = fdopen(STDIN_FILENO as c_int, transmute(&mode[0]));\n let path = \"knucleotide-input.txt\";\n let stdin = fopen(transmute(&path[0]), transmute(&mode[0]));\n\n let mut st: stat = init();\n fstat(fileno(stdin), &mut st);\n let mut buf = vec::from_elem(st.st_size as uint, 0);\n\n let header = str::byte_slice_no_callback(\">THREE\");\n let header = vec::slice(header, 0, 6);\n\n {\n let mut window: &mut [u8] = buf;\n loop {\n fgets(transmute(&mut window[0]), LINE_LEN as c_int, stdin);\n\n {\n if vec::slice(window, 0, 6) == header {\n break;\n }\n }\n }\n\n while fgets(transmute(&mut window[0]),\n LINE_LEN as c_int,\n stdin) != null() {\n window = vec::mut_slice(window,\n strlen(transmute(&window[0])) as uint,\n window.len());\n }\n }\n\n buf\n }\n}\n\n#[inline(never)]\n#[fixed_stack_segment]\nfn generate_frequencies(frequencies: &mut Table,\n mut input: &[u8],\n frame: i32) {\n let mut code = Code(0);\n \n \/\/ Pull first frame.\n for (frame as uint).times {\n code = code.push_char(input[0]);\n input = next_char(input);\n }\n frequencies.lookup(code, BumpCallback);\n\n while input.len() != 0 && input[0] != ('>' as u8) {\n code = code.rotate(input[0], frame);\n frequencies.lookup(code, BumpCallback);\n input = next_char(input);\n }\n}\n\n#[inline(never)]\n#[fixed_stack_segment]\nfn print_frequencies(frequencies: &Table, frame: i32) {\n let mut vector = ~[];\n for frequencies.each |entry| {\n vector.push((entry.code, entry.count));\n }\n quick_sort3(vector);\n\n let mut total_count = 0;\n for vector.each |&(_, count)| {\n total_count += count;\n }\n\n for vector.each |&(key, count)| {\n println(fmt!(\"%s %.3f\",\n key.unpack(frame),\n (count as float * 100.0) \/ (total_count as float)));\n }\n}\n\nfn print_occurrences(frequencies: &mut Table, occurrence: &'static str) {\n frequencies.lookup(Code::pack(occurrence), PrintCallback(occurrence))\n}\n\n#[fixed_stack_segment]\nfn main() {\n let input = read_stdin();\n\n let mut frequencies = ~Table::new();\n generate_frequencies(frequencies, input, 1);\n print_frequencies(frequencies, 1);\n\n *frequencies = Table::new();\n generate_frequencies(frequencies, input, 2);\n print_frequencies(frequencies, 2);\n\n for range(0, 5) |i| {\n let occurrence = OCCURRENCES[i];\n *frequencies = Table::new();\n generate_frequencies(frequencies,\n input,\n occurrence.len() as i32);\n print_occurrences(frequencies, occurrence);\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example Julia set (#106)<commit_after>extern crate minifb;\n\nuse minifb::{Window, Key, WindowOptions};\n\nconst WIDTH: usize = 600;\nconst HEIGHT: usize = 600;\nconst FRACTAL_DEPTH: u32 = 64;\nconst GENERATION_INFINITY: f64 = 16.;\n\nfn main() {\n let mut buffer: Vec<u32> = vec![0; WIDTH * HEIGHT];\n\n let mut window = match Window::new(\"Fractal - ESC to exit\", WIDTH, HEIGHT, WindowOptions::default()) {\n Ok(win) => win,\n Err(err) => {\n println!(\"Unable to create window {}\", err);\n return;\n }\n };\n\n let range = 2.0;\n let x_min = 0. - range;\n let y_min = 0. - range;\n\n let x_max = 0. + range;\n let y_max = 0. + range;\n\n let mut angle: f64 = 0.0;\n\n while window.is_open() && !window.is_key_down(Key::Escape) {\n\n for i in 0..buffer.len() {\n let mut real = map((i % WIDTH) as f64, 0., WIDTH as f64, x_min , x_max);\n let mut imag = map((i \/ HEIGHT) as f64, 0., HEIGHT as f64, y_min, y_max);\n\n\n let mut n = 0;\n\n while n < FRACTAL_DEPTH {\n let re = real.powf(2.) - imag.powf(2.);\n let im = 2. * real * imag;\n\n real = re + angle.cos();\n imag = im + angle.sin();\n\n if (real + imag).abs() > GENERATION_INFINITY {\n break; \/\/ Leave when achieve infinity\n }\n n += 1;\n }\n\n buffer[i] = fill(n);\n }\n\n angle += 0.1;\n\n \/\/ We unwrap here as we want this code to exit if it fails\n window.update_with_buffer(&buffer).unwrap();\n }\n}\n\n\nfn map (val: f64, start1: f64, stop1: f64, start2: f64, stop2:f64) -> f64 {\n start2 + (stop2 - start2) * ((val - start1) \/ (stop1 - start1))\n}\n\nfn fill (n: u32) -> u32 {\n if FRACTAL_DEPTH == n {\n return 0x00;\n } else {\n return n * 32 % 255;\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>partition()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>style: Have scale function and scale property accept percentage value.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix #12 with a test<commit_after>\/\/ https:\/\/github.com\/SimonSapin\/rust-std-candidates\/issues\/12\n#[macro_use(matches)] extern crate matches;\n\n#[test]\nfn matches_works() {\n let foo = Some(\"-12\");\n assert!(matches!(foo, Some(bar) if\n matches!(bar.as_bytes()[0], b'+' | b'-') &&\n matches!(bar.as_bytes()[1], b'0'...b'9')\n ));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>let_chains: Add test protecting the precedence of && in relation to other things.<commit_after>\/\/ run-pass\n\n#![allow(irrefutable_let_patterns)]\n\nuse std::ops::Range;\n\nfn main() {\n let x: bool;\n \/\/ This should associate as: `(x = (true && false));`.\n x = true && false;\n assert!(!x);\n\n fn _f1() -> bool {\n \/\/ Should associate as `(let _ = (return (true && false)))`.\n if let _ = return true && false {};\n }\n assert!(!_f1());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>temp commit<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue 2287<commit_after>import to_str::*;\nimport to_str::to_str;\n\nclass cat {\n priv {\n let mut meows : uint;\n fn meow() {\n #error(\"Meow\");\n self.meows += 1u;\n if self.meows % 5u == 0u {\n self.how_hungry += 1;\n }\n }\n }\n\n let mut how_hungry : int;\n let name : str;\n\n new(in_x : uint, in_y : int, in_name: str)\n { self.meows = in_x; self.how_hungry = in_y; self.name = in_name; }\n\n fn speak() { self.meow(); }\n\n fn eat() -> bool {\n if self.how_hungry > 0 {\n #error(\"OM NOM NOM\");\n self.how_hungry -= 2;\n ret true;\n }\n else {\n #error(\"Not hungry!\");\n ret false;\n }\n }\n}\n\nimpl of to_str for cat {\n fn to_str() -> str { self.name }\n}\n\nfn print_out<T: to_str>(thing: T, expected: str) {\n let actual = thing.to_str();\n #debug(\"%s\", actual);\n assert(actual == expected);\n}\n\nfn main() {\n let nyan : to_str = cat(0u, 2, \"nyan\") as to_str;\n print_out(nyan, \"nyan\");\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add an example of a minimalistic server<commit_after>#[macro_use]\nextern crate rustful;\nuse std::error::Error;\nuse rustful::{Server, Context, Response};\n\nfn main() {\n println!(\"Visit http:\/\/localhost:8080 to try this example.\");\n let server_result = Server {\n host: 8080.into(),\n ..Server::new(|_: Context, res: Response| res.send(\"Hello!\"))\n }.run();\n\n match server_result {\n Ok(_server) => {},\n Err(e) => println!(\"could not start server: {}\", e.description())\n }\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove implementation of Show for Fe in curve25519<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add nested TAIT inference test<commit_after>\/\/ check-pass\n\n#![feature(type_alias_impl_trait)]\n#![allow(dead_code)]\n\nuse std::fmt::Debug;\n\ntype FooX = impl Debug;\n\ntrait Foo<A> { }\n\nimpl Foo<()> for () { }\n\nfn foo() -> impl Foo<FooX> {\n ()\n}\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse std::result::Result as RResult;\nuse std::ops::Deref;\n\nuse toml::{Parser, Value};\n\n\/**\n * Errors which are related to configuration-file loading\n *\/\npub mod error {\n use std::error::Error;\n use std::fmt::{Display, Formatter};\n use std::fmt::Error as FmtError;\n\n \/**\n * The kind of an error\n *\/\n #[derive(Clone, Debug, PartialEq)]\n pub enum ConfigErrorKind {\n NoConfigFileFound,\n }\n\n \/**\n * Configuration error type\n *\/\n #[derive(Debug)]\n pub struct ConfigError {\n kind: ConfigErrorKind,\n cause: Option<Box<Error>>,\n }\n\n impl ConfigError {\n\n \/**\n * Instantiate a new ConfigError, optionally with cause\n *\/\n pub fn new(kind: ConfigErrorKind, cause: Option<Box<Error>>) -> ConfigError {\n ConfigError {\n kind: kind,\n cause: cause,\n }\n }\n\n \/**\n * get the Kind of the Error\n *\/\n pub fn kind(&self) -> ConfigErrorKind {\n self.kind.clone()\n }\n\n \/**\n * Get the string, the ConfigError can be described with\n *\/\n pub fn as_str(e: &ConfigError) -> &'static str {\n match e.kind() {\n ConfigErrorKind::NoConfigFileFound => \"No config file found\",\n }\n }\n\n }\n\n impl Display for ConfigError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"{}\", ConfigError::as_str(self)));\n Ok(())\n }\n\n }\n\n impl Error for ConfigError {\n\n fn description(&self) -> &str {\n ConfigError::as_str(self)\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n }\n\n}\n\nuse self::error::{ConfigError, ConfigErrorKind};\n\n\n\/**\n * Result type of this module. Either T or ConfigError\n *\/\npub type Result<T> = RResult<T, ConfigError>;\n\n\/**\n * Configuration object\n *\n * Holds all config variables which are globally available plus the configuration object from the\n * config parser, which can be accessed.\n *\/\n#[derive(Debug)]\npub struct Configuration {\n\n \/**\n * The plain configuration object for direct access if necessary\n *\/\n config: Value,\n\n \/**\n * The verbosity the program should run with\n *\/\n verbosity: bool,\n\n \/**\n * The editor which should be used\n *\/\n editor: Option<String>,\n\n \/**\n * The options the editor should get when opening some file\n *\/\n editor_opts: String,\n}\n\nimpl Configuration {\n\n \/**\n * Get a new configuration object.\n *\n * The passed runtimepath is used for searching the configuration file, whereas several file\n * names are tested. If that does not work, the home directory and the XDG basedir are tested\n * with all variants.\n *\n * If that doesn't work either, an error is returned.\n *\/\n pub fn new(rtp: &PathBuf) -> Result<Configuration> {\n fetch_config(&rtp).map(|cfg| {\n let verbosity = get_verbosity(&cfg);\n let editor = get_editor(&cfg);\n let editor_opts = get_editor_opts(&cfg);\n\n debug!(\"Building configuration\");\n debug!(\" - verbosity : {:?}\", verbosity);\n debug!(\" - editor : {:?}\", editor);\n debug!(\" - editor-opts: {}\", editor_opts);\n\n Configuration {\n config: cfg,\n verbosity: verbosity,\n editor: editor,\n editor_opts: editor_opts,\n }\n })\n }\n\n pub fn editor(&self) -> Option<&String> {\n self.editor.as_ref()\n }\n\n pub fn config(&self) -> &Value {\n &self.config\n }\n\n pub fn store_config(&self) -> Option<&Value> {\n match &self.config {\n &Value::Table(ref tabl) => tabl.get(\"store\"),\n _ => None,\n }\n }\n\n}\n\nimpl Deref for Configuration {\n type Target = Value;\n\n fn deref(&self) -> &Value {\n &self.config\n }\n\n}\n\nfn get_verbosity(v: &Value) -> bool {\n match v {\n &Value::Table(ref t) => t.get(\"verbose\")\n .map(|v| match v { &Value::Boolean(b) => b, _ => false, })\n .unwrap_or(false),\n _ => false,\n }\n}\n\nfn get_editor(v: &Value) -> Option<String> {\n match v {\n &Value::Table(ref t) => t.get(\"editor\")\n .and_then(|v| match v { &Value::String(ref s) => Some(s.clone()), _ => None, }),\n _ => None,\n }\n}\n\nfn get_editor_opts(v: &Value) -> String {\n match v {\n &Value::Table(ref t) => t.get(\"editor-opts\")\n .and_then(|v| match v { &Value::String(ref s) => Some(s.clone()), _ => None, })\n .unwrap_or(String::new()),\n _ => String::new(),\n }\n}\n\n\/**\n * Helper to fetch the config file\n *\n * Tests several variants for the config file path and uses the first one which works.\n *\/\nfn fetch_config(rtp: &PathBuf) -> Result<Value> {\n use std::env;\n use std::fs::File;\n use std::io::Read;\n use std::io::Write;\n use std::io::stderr;\n\n use xdg_basedir;\n use itertools::Itertools;\n\n use libimagutil::variants::generate_variants as gen_vars;\n\n let variants = vec![\"config\", \"config.toml\", \"imagrc\", \"imagrc.toml\"];\n let modifier = |base: &PathBuf, v: &'static str| {\n let mut base = base.clone();\n base.push(format!(\"{}\", v));\n base\n };\n\n vec![\n gen_vars(rtp.clone(), variants.clone(), &modifier),\n\n env::var(\"HOME\").map(|home| gen_vars(PathBuf::from(home), variants.clone(), &modifier))\n .unwrap_or(vec![]),\n\n xdg_basedir::get_data_home().map(|data_dir| gen_vars(data_dir, variants.clone(), &modifier))\n .unwrap_or(vec![]),\n ].iter()\n .flatten()\n .filter(|path| path.exists() && path.is_file())\n .map(|path| {\n let content = {\n let mut s = String::new();\n let f = File::open(path);\n if f.is_err() {\n }\n let mut f = f.unwrap();\n f.read_to_string(&mut s).ok();\n s\n };\n let mut parser = Parser::new(&content[..]);\n let res = parser.parse();\n if res.is_none() {\n write!(stderr(), \"Config file parser error:\");\n for error in parser.errors {\n write!(stderr(), \"At [{}][{}] <> {}\", error.lo, error.hi, error);\n write!(stderr(), \"in: '{}'\", &content[error.lo..error.hi]);\n }\n None\n } else {\n res\n }\n })\n .filter(|loaded| loaded.is_some())\n .nth(0)\n .map(|inner| Value::Table(inner.unwrap()))\n .ok_or(ConfigError::new(ConfigErrorKind::NoConfigFileFound, None))\n}\n\n<commit_msg>Annotate function with allow(dead_code)<commit_after>use std::path::PathBuf;\nuse std::result::Result as RResult;\nuse std::ops::Deref;\n\nuse toml::{Parser, Value};\n\n\/**\n * Errors which are related to configuration-file loading\n *\/\npub mod error {\n use std::error::Error;\n use std::fmt::{Display, Formatter};\n use std::fmt::Error as FmtError;\n\n \/**\n * The kind of an error\n *\/\n #[derive(Clone, Debug, PartialEq)]\n pub enum ConfigErrorKind {\n NoConfigFileFound,\n }\n\n \/**\n * Configuration error type\n *\/\n #[derive(Debug)]\n pub struct ConfigError {\n kind: ConfigErrorKind,\n cause: Option<Box<Error>>,\n }\n\n impl ConfigError {\n\n \/**\n * Instantiate a new ConfigError, optionally with cause\n *\/\n pub fn new(kind: ConfigErrorKind, cause: Option<Box<Error>>) -> ConfigError {\n ConfigError {\n kind: kind,\n cause: cause,\n }\n }\n\n \/**\n * get the Kind of the Error\n *\/\n pub fn kind(&self) -> ConfigErrorKind {\n self.kind.clone()\n }\n\n \/**\n * Get the string, the ConfigError can be described with\n *\/\n pub fn as_str(e: &ConfigError) -> &'static str {\n match e.kind() {\n ConfigErrorKind::NoConfigFileFound => \"No config file found\",\n }\n }\n\n }\n\n impl Display for ConfigError {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> {\n try!(write!(fmt, \"{}\", ConfigError::as_str(self)));\n Ok(())\n }\n\n }\n\n impl Error for ConfigError {\n\n fn description(&self) -> &str {\n ConfigError::as_str(self)\n }\n\n fn cause(&self) -> Option<&Error> {\n self.cause.as_ref().map(|e| &**e)\n }\n\n }\n\n}\n\nuse self::error::{ConfigError, ConfigErrorKind};\n\n\n\/**\n * Result type of this module. Either T or ConfigError\n *\/\npub type Result<T> = RResult<T, ConfigError>;\n\n\/**\n * Configuration object\n *\n * Holds all config variables which are globally available plus the configuration object from the\n * config parser, which can be accessed.\n *\/\n#[derive(Debug)]\npub struct Configuration {\n\n \/**\n * The plain configuration object for direct access if necessary\n *\/\n config: Value,\n\n \/**\n * The verbosity the program should run with\n *\/\n verbosity: bool,\n\n \/**\n * The editor which should be used\n *\/\n editor: Option<String>,\n\n \/**\n * The options the editor should get when opening some file\n *\/\n editor_opts: String,\n}\n\nimpl Configuration {\n\n \/**\n * Get a new configuration object.\n *\n * The passed runtimepath is used for searching the configuration file, whereas several file\n * names are tested. If that does not work, the home directory and the XDG basedir are tested\n * with all variants.\n *\n * If that doesn't work either, an error is returned.\n *\/\n pub fn new(rtp: &PathBuf) -> Result<Configuration> {\n fetch_config(&rtp).map(|cfg| {\n let verbosity = get_verbosity(&cfg);\n let editor = get_editor(&cfg);\n let editor_opts = get_editor_opts(&cfg);\n\n debug!(\"Building configuration\");\n debug!(\" - verbosity : {:?}\", verbosity);\n debug!(\" - editor : {:?}\", editor);\n debug!(\" - editor-opts: {}\", editor_opts);\n\n Configuration {\n config: cfg,\n verbosity: verbosity,\n editor: editor,\n editor_opts: editor_opts,\n }\n })\n }\n\n pub fn editor(&self) -> Option<&String> {\n self.editor.as_ref()\n }\n\n #[allow(dead_code)] \/\/ Why do I actually need this annotation on a pub function?\n pub fn config(&self) -> &Value {\n &self.config\n }\n\n pub fn store_config(&self) -> Option<&Value> {\n match &self.config {\n &Value::Table(ref tabl) => tabl.get(\"store\"),\n _ => None,\n }\n }\n\n}\n\nimpl Deref for Configuration {\n type Target = Value;\n\n fn deref(&self) -> &Value {\n &self.config\n }\n\n}\n\nfn get_verbosity(v: &Value) -> bool {\n match v {\n &Value::Table(ref t) => t.get(\"verbose\")\n .map(|v| match v { &Value::Boolean(b) => b, _ => false, })\n .unwrap_or(false),\n _ => false,\n }\n}\n\nfn get_editor(v: &Value) -> Option<String> {\n match v {\n &Value::Table(ref t) => t.get(\"editor\")\n .and_then(|v| match v { &Value::String(ref s) => Some(s.clone()), _ => None, }),\n _ => None,\n }\n}\n\nfn get_editor_opts(v: &Value) -> String {\n match v {\n &Value::Table(ref t) => t.get(\"editor-opts\")\n .and_then(|v| match v { &Value::String(ref s) => Some(s.clone()), _ => None, })\n .unwrap_or(String::new()),\n _ => String::new(),\n }\n}\n\n\/**\n * Helper to fetch the config file\n *\n * Tests several variants for the config file path and uses the first one which works.\n *\/\nfn fetch_config(rtp: &PathBuf) -> Result<Value> {\n use std::env;\n use std::fs::File;\n use std::io::Read;\n use std::io::Write;\n use std::io::stderr;\n\n use xdg_basedir;\n use itertools::Itertools;\n\n use libimagutil::variants::generate_variants as gen_vars;\n\n let variants = vec![\"config\", \"config.toml\", \"imagrc\", \"imagrc.toml\"];\n let modifier = |base: &PathBuf, v: &'static str| {\n let mut base = base.clone();\n base.push(format!(\"{}\", v));\n base\n };\n\n vec![\n gen_vars(rtp.clone(), variants.clone(), &modifier),\n\n env::var(\"HOME\").map(|home| gen_vars(PathBuf::from(home), variants.clone(), &modifier))\n .unwrap_or(vec![]),\n\n xdg_basedir::get_data_home().map(|data_dir| gen_vars(data_dir, variants.clone(), &modifier))\n .unwrap_or(vec![]),\n ].iter()\n .flatten()\n .filter(|path| path.exists() && path.is_file())\n .map(|path| {\n let content = {\n let mut s = String::new();\n let f = File::open(path);\n if f.is_err() {\n }\n let mut f = f.unwrap();\n f.read_to_string(&mut s).ok();\n s\n };\n let mut parser = Parser::new(&content[..]);\n let res = parser.parse();\n if res.is_none() {\n write!(stderr(), \"Config file parser error:\");\n for error in parser.errors {\n write!(stderr(), \"At [{}][{}] <> {}\", error.lo, error.hi, error);\n write!(stderr(), \"in: '{}'\", &content[error.lo..error.hi]);\n }\n None\n } else {\n res\n }\n })\n .filter(|loaded| loaded.is_some())\n .nth(0)\n .map(|inner| Value::Table(inner.unwrap()))\n .ok_or(ConfigError::new(ConfigErrorKind::NoConfigFileFound, None))\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Simple TreeView example, a very basic file browser<commit_after>extern crate gtk;\n#[macro_use]\nextern crate relm;\n#[macro_use]\nextern crate relm_derive;\n\nuse gtk::prelude::*;\nuse relm::{Relm, Update, Widget};\nuse std::fs;\nuse std::io;\nuse std::path::PathBuf;\nuse gtk::{\n Inhibit,\n TreeView,\n Window,\n WindowType\n};\nuse gtk::Orientation::Vertical;\n\n\/\/ These two constants stand for the columns of the listmodel and the listview\nconst VALUE_COL: i32 = 0;\nconst IS_DIR_COL: i32 = 1;\n\nstruct Directory {\n current_dir: PathBuf,\n}\n\n#[derive(Msg)]\nenum Msg {\n ItemSelect,\n Quit,\n}\n\nstruct Win {\n tree_view: TreeView,\n model: Directory,\n window: Window,\n}\n\nimpl Update for Win {\n type Model = Directory;\n type ModelParam = ();\n type Msg = Msg;\n\n fn model(_: &Relm<Self>, _: ()) -> Directory {\n let working_directory = fs::canonicalize(\".\").expect(\"Failed to open directory\");\n Directory {\n current_dir: working_directory\n }\n }\n\n fn update(&mut self, event: Msg) {\n match event {\n Msg::ItemSelect => {\n let selection = self.tree_view.get_selection();\n if let Some((list_model, iter)) = selection.get_selected() {\n let is_dir: bool = list_model\n .get_value(&iter, IS_DIR_COL)\n .get::<bool>()\n .unwrap();\n\n if is_dir {\n let dir_name = list_model\n .get_value(&iter, VALUE_COL)\n .get::<String>()\n .unwrap();\n\n println!(\"{:?} selected\", dir_name);\n let new_dir = if dir_name == \"..\" {\n \/\/ Go up parent directory, if it exists\n self.model.current_dir\n .parent()\n .unwrap_or(&self.model.current_dir)\n .to_owned()\n } else {\n self.model.current_dir.join(dir_name)\n };\n self.model.current_dir = new_dir;\n let new_model = create_and_fill_model(&self.model.current_dir).unwrap();\n\n self.tree_view.set_model(Some(&new_model));\n }\n }\n },\n Msg::Quit => gtk::main_quit(),\n }\n }\n}\n\nimpl Widget for Win {\n type Root = Window;\n\n fn root(&self) -> Self::Root {\n self.window.clone()\n }\n\n fn view(relm: &Relm<Self>, model: Self::Model) -> Self {\n let window = gtk::Window::new(WindowType::Toplevel);\n let vbox = gtk::Box::new(Vertical, 0);\n let tree_view = gtk::TreeView::new();\n let column = gtk::TreeViewColumn::new();\n let cell = gtk::CellRendererText::new();\n\n window.set_title(\"TreeView example file browser\");\n window.set_border_width(10);\n window.set_position(gtk::WindowPosition::Center);\n window.set_default_size(350, 70);\n\n column.pack_start(&cell, true);\n \/\/ Assiciate view's column with model's id column\n column.add_attribute(&cell, \"text\", 0);\n tree_view.append_column(&column);\n\n let store_model = create_and_fill_model(&model.current_dir).unwrap();\n tree_view.set_model(Some(&store_model));\n\n vbox.add(&tree_view);\n window.add(&vbox);\n\n window.show_all();\n\n connect!(relm, tree_view, connect_cursor_changed(_), Msg::ItemSelect);\n connect!(relm, window, connect_delete_event(_, _), return (Some(Msg::Quit), Inhibit(false)));\n\n Win {\n tree_view,\n model,\n window,\n }\n }\n}\n\nfn create_and_fill_model(dir_str: &PathBuf) -> io::Result<gtk::ListStore> {\n \/\/ Single row model\n let model = gtk::ListStore::new(&[String::static_type(), bool::static_type()]);\n\n \/\/ Add the parent directory\n model.insert_with_values(None,\n &[VALUE_COL as u32, IS_DIR_COL as u32],\n &[&\"..\", &true]);\n\n let entry_iter = fs::read_dir(dir_str)?.filter_map(|x| x.ok());\n for entry in entry_iter {\n if let Ok(metadata) = entry.metadata() {\n\n if let Ok(file_name) = entry.file_name().into_string() {\n let (final_name, is_dir) = if metadata.is_dir() {\n (format!(\"{}\/\", file_name), true)\n } else {\n (file_name, false)\n };\n model.insert_with_values(None,\n &[VALUE_COL as u32, IS_DIR_COL as u32],\n &[&final_name, &is_dir]);\n }\n }\n }\n Ok(model)\n}\n\n\nfn main() {\n Win::run(()).unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add integer range search example (#490)<commit_after>\/\/ # Searching a range on an indexed int field.\n\/\/\n\/\/ Below is an example of creating an indexed integer field in your schema\n\/\/ You can use RangeQuery to get a Count of all occurrences in a given range.\n\n#[macro_use]\nextern crate tantivy;\nuse tantivy::collector::Count;\nuse tantivy::query::RangeQuery;\nuse tantivy::schema::{Schema, INT_INDEXED};\nuse tantivy::Index;\nuse tantivy::Result;\n\nfn run() -> Result<()> {\n \/\/ For the sake of simplicity, this schema will only have 1 field\n let mut schema_builder = Schema::builder();\n \/\/ INT_INDEXED is shorthand for such fields\n let year_field = schema_builder.add_u64_field(\"year\", INT_INDEXED);\n let schema = schema_builder.build();\n let index = Index::create_in_ram(schema);\n {\n let mut index_writer = index.writer_with_num_threads(1, 6_000_000)?;\n for year in 1950u64..2019u64 {\n index_writer.add_document(doc!(year_field => year));\n }\n index_writer.commit()?;\n \/\/ The index will be a range of years\n }\n index.load_searchers()?;\n let searcher = index.searcher();\n \/\/ The end is excluded i.e. here we are searching up to 1969\n let docs_in_the_sixties = RangeQuery::new_u64(year_field, 1960..1970);\n \/\/ Uses a Count collector to sum the total number of docs in the range\n let num_60s_books = searcher.search(&docs_in_the_sixties, &Count)?;\n assert_eq!(num_60s_books, 10);\n Ok(())\n}\n\nfn main() {\n run().unwrap()\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax_pos::symbol::Symbol;\nuse back::write::create_target_machine;\nuse llvm;\nuse rustc::session::Session;\nuse rustc::session::config::PrintRequest;\nuse libc::c_int;\nuse std::ffi::CString;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::Once;\n\nstatic POISONED: AtomicBool = AtomicBool::new(false);\nstatic INIT: Once = Once::new();\n\npub(crate) fn init(sess: &Session) {\n unsafe {\n \/\/ Before we touch LLVM, make sure that multithreading is enabled.\n INIT.call_once(|| {\n if llvm::LLVMStartMultithreaded() != 1 {\n \/\/ use an extra bool to make sure that all future usage of LLVM\n \/\/ cannot proceed despite the Once not running more than once.\n POISONED.store(true, Ordering::SeqCst);\n }\n\n configure_llvm(sess);\n });\n\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n }\n}\n\nfn require_inited() {\n INIT.call_once(|| bug!(\"llvm is not initialized\"));\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n}\n\nunsafe fn configure_llvm(sess: &Session) {\n let mut llvm_c_strs = Vec::new();\n let mut llvm_args = Vec::new();\n\n {\n let mut add = |arg: &str| {\n let s = CString::new(arg).unwrap();\n llvm_args.push(s.as_ptr());\n llvm_c_strs.push(s);\n };\n add(\"rustc\"); \/\/ fake program name\n if sess.time_llvm_passes() { add(\"-time-passes\"); }\n if sess.print_llvm_passes() { add(\"-debug-pass=Structure\"); }\n if sess.opts.debugging_opts.disable_instrumentation_preinliner {\n add(\"-disable-preinline\");\n }\n\n for arg in &sess.opts.cg.llvm_args {\n add(&(*arg));\n }\n }\n\n llvm::LLVMInitializePasses();\n\n llvm::initialize_available_targets();\n\n llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,\n llvm_args.as_ptr());\n}\n\n\/\/ WARNING: the features after applying `to_llvm_feature` must be known\n\/\/ to LLVM or the feature detection code will walk past the end of the feature\n\/\/ array, leading to crashes.\n\nconst ARM_WHITELIST: &'static [&'static str] = &[\"neon\", \"v7\", \"vfp2\", \"vfp3\", \"vfp4\"];\n\nconst AARCH64_WHITELIST: &'static [&'static str] = &[\"fp\", \"neon\", \"sve\", \"crc\", \"crypto\",\n \"ras\", \"lse\", \"rdm\", \"fp16\", \"rcpc\",\n \"dotprod\", \"v8.1a\", \"v8.2a\", \"v8.3a\"];\n\nconst X86_WHITELIST: &'static [&'static str] = &[\"aes\", \"avx\", \"avx2\", \"avx512bw\",\n \"avx512cd\", \"avx512dq\", \"avx512er\",\n \"avx512f\", \"avx512ifma\", \"avx512pf\",\n \"avx512vbmi\", \"avx512vl\", \"avx512vpopcntdq\",\n \"bmi1\", \"bmi2\", \"fma\", \"fxsr\",\n \"lzcnt\", \"mmx\", \"pclmulqdq\",\n \"popcnt\", \"rdrand\", \"rdseed\",\n \"sha\",\n \"sse\", \"sse2\", \"sse3\", \"sse4.1\",\n \"sse4.2\", \"sse4a\", \"ssse3\",\n \"tbm\", \"xsave\", \"xsavec\",\n \"xsaveopt\", \"xsaves\"];\n\nconst HEXAGON_WHITELIST: &'static [&'static str] = &[\"hvx\", \"hvx-double\"];\n\nconst POWERPC_WHITELIST: &'static [&'static str] = &[\"altivec\",\n \"power8-altivec\", \"power9-altivec\",\n \"power8-vector\", \"power9-vector\",\n \"vsx\"];\n\nconst MIPS_WHITELIST: &'static [&'static str] = &[\"fp64\", \"msa\"];\n\n\/\/\/ When rustdoc is running, provide a list of all known features so that all their respective\n\/\/\/ primtives may be documented.\n\/\/\/\n\/\/\/ IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this\n\/\/\/ iterator!\npub fn all_known_features() -> impl Iterator<Item=&'static str> {\n ARM_WHITELIST.iter().cloned()\n .chain(AARCH64_WHITELIST.iter().cloned())\n .chain(X86_WHITELIST.iter().cloned())\n .chain(HEXAGON_WHITELIST.iter().cloned())\n .chain(POWERPC_WHITELIST.iter().cloned())\n .chain(MIPS_WHITELIST.iter().cloned())\n}\n\npub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {\n let arch = if sess.target.target.arch == \"x86_64\" {\n \"x86\"\n } else {\n &*sess.target.target.arch\n };\n match (arch, s) {\n (\"x86\", \"pclmulqdq\") => \"pclmul\",\n (\"x86\", \"rdrand\") => \"rdrnd\",\n (\"x86\", \"bmi1\") => \"bmi\",\n (\"aarch64\", \"fp16\") => \"fullfp16\",\n (_, s) => s,\n }\n}\n\npub fn target_features(sess: &Session) -> Vec<Symbol> {\n let target_machine = create_target_machine(sess);\n target_feature_whitelist(sess)\n .iter()\n .filter(|feature| {\n let llvm_feature = to_llvm_feature(sess, feature);\n let cstr = CString::new(llvm_feature).unwrap();\n unsafe { llvm::LLVMRustHasFeature(target_machine, cstr.as_ptr()) }\n })\n .map(|feature| Symbol::intern(feature)).collect()\n}\n\npub fn target_feature_whitelist(sess: &Session) -> &'static [&'static str] {\n match &*sess.target.target.arch {\n \"arm\" => ARM_WHITELIST,\n \"aarch64\" => AARCH64_WHITELIST,\n \"x86\" | \"x86_64\" => X86_WHITELIST,\n \"hexagon\" => HEXAGON_WHITELIST,\n \"mips\" | \"mips64\" => MIPS_WHITELIST,\n \"powerpc\" | \"powerpc64\" => POWERPC_WHITELIST,\n _ => &[],\n }\n}\n\npub fn print_version() {\n \/\/ Can be called without initializing LLVM\n unsafe {\n println!(\"LLVM version: {}.{}\",\n llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());\n }\n}\n\npub fn print_passes() {\n \/\/ Can be called without initializing LLVM\n unsafe { llvm::LLVMRustPrintPasses(); }\n}\n\npub(crate) fn print(req: PrintRequest, sess: &Session) {\n require_inited();\n let tm = create_target_machine(sess);\n unsafe {\n match req {\n PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),\n PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),\n _ => bug!(\"rustc_trans can't handle print request: {:?}\", req),\n }\n }\n}\n<commit_msg>Fix \"fp\" feature for AArch64<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax_pos::symbol::Symbol;\nuse back::write::create_target_machine;\nuse llvm;\nuse rustc::session::Session;\nuse rustc::session::config::PrintRequest;\nuse libc::c_int;\nuse std::ffi::CString;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::Once;\n\nstatic POISONED: AtomicBool = AtomicBool::new(false);\nstatic INIT: Once = Once::new();\n\npub(crate) fn init(sess: &Session) {\n unsafe {\n \/\/ Before we touch LLVM, make sure that multithreading is enabled.\n INIT.call_once(|| {\n if llvm::LLVMStartMultithreaded() != 1 {\n \/\/ use an extra bool to make sure that all future usage of LLVM\n \/\/ cannot proceed despite the Once not running more than once.\n POISONED.store(true, Ordering::SeqCst);\n }\n\n configure_llvm(sess);\n });\n\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n }\n}\n\nfn require_inited() {\n INIT.call_once(|| bug!(\"llvm is not initialized\"));\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n}\n\nunsafe fn configure_llvm(sess: &Session) {\n let mut llvm_c_strs = Vec::new();\n let mut llvm_args = Vec::new();\n\n {\n let mut add = |arg: &str| {\n let s = CString::new(arg).unwrap();\n llvm_args.push(s.as_ptr());\n llvm_c_strs.push(s);\n };\n add(\"rustc\"); \/\/ fake program name\n if sess.time_llvm_passes() { add(\"-time-passes\"); }\n if sess.print_llvm_passes() { add(\"-debug-pass=Structure\"); }\n if sess.opts.debugging_opts.disable_instrumentation_preinliner {\n add(\"-disable-preinline\");\n }\n\n for arg in &sess.opts.cg.llvm_args {\n add(&(*arg));\n }\n }\n\n llvm::LLVMInitializePasses();\n\n llvm::initialize_available_targets();\n\n llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,\n llvm_args.as_ptr());\n}\n\n\/\/ WARNING: the features after applying `to_llvm_feature` must be known\n\/\/ to LLVM or the feature detection code will walk past the end of the feature\n\/\/ array, leading to crashes.\n\nconst ARM_WHITELIST: &'static [&'static str] = &[\"neon\", \"v7\", \"vfp2\", \"vfp3\", \"vfp4\"];\n\nconst AARCH64_WHITELIST: &'static [&'static str] = &[\"fp\", \"neon\", \"sve\", \"crc\", \"crypto\",\n \"ras\", \"lse\", \"rdm\", \"fp16\", \"rcpc\",\n \"dotprod\", \"v8.1a\", \"v8.2a\", \"v8.3a\"];\n\nconst X86_WHITELIST: &'static [&'static str] = &[\"aes\", \"avx\", \"avx2\", \"avx512bw\",\n \"avx512cd\", \"avx512dq\", \"avx512er\",\n \"avx512f\", \"avx512ifma\", \"avx512pf\",\n \"avx512vbmi\", \"avx512vl\", \"avx512vpopcntdq\",\n \"bmi1\", \"bmi2\", \"fma\", \"fxsr\",\n \"lzcnt\", \"mmx\", \"pclmulqdq\",\n \"popcnt\", \"rdrand\", \"rdseed\",\n \"sha\",\n \"sse\", \"sse2\", \"sse3\", \"sse4.1\",\n \"sse4.2\", \"sse4a\", \"ssse3\",\n \"tbm\", \"xsave\", \"xsavec\",\n \"xsaveopt\", \"xsaves\"];\n\nconst HEXAGON_WHITELIST: &'static [&'static str] = &[\"hvx\", \"hvx-double\"];\n\nconst POWERPC_WHITELIST: &'static [&'static str] = &[\"altivec\",\n \"power8-altivec\", \"power9-altivec\",\n \"power8-vector\", \"power9-vector\",\n \"vsx\"];\n\nconst MIPS_WHITELIST: &'static [&'static str] = &[\"fp64\", \"msa\"];\n\n\/\/\/ When rustdoc is running, provide a list of all known features so that all their respective\n\/\/\/ primtives may be documented.\n\/\/\/\n\/\/\/ IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this\n\/\/\/ iterator!\npub fn all_known_features() -> impl Iterator<Item=&'static str> {\n ARM_WHITELIST.iter().cloned()\n .chain(AARCH64_WHITELIST.iter().cloned())\n .chain(X86_WHITELIST.iter().cloned())\n .chain(HEXAGON_WHITELIST.iter().cloned())\n .chain(POWERPC_WHITELIST.iter().cloned())\n .chain(MIPS_WHITELIST.iter().cloned())\n}\n\npub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {\n let arch = if sess.target.target.arch == \"x86_64\" {\n \"x86\"\n } else {\n &*sess.target.target.arch\n };\n match (arch, s) {\n (\"x86\", \"pclmulqdq\") => \"pclmul\",\n (\"x86\", \"rdrand\") => \"rdrnd\",\n (\"x86\", \"bmi1\") => \"bmi\",\n (\"aarch64\", \"fp\") => \"fp-armv8\",\n (\"aarch64\", \"fp16\") => \"fullfp16\",\n (_, s) => s,\n }\n}\n\npub fn target_features(sess: &Session) -> Vec<Symbol> {\n let target_machine = create_target_machine(sess);\n target_feature_whitelist(sess)\n .iter()\n .filter(|feature| {\n let llvm_feature = to_llvm_feature(sess, feature);\n let cstr = CString::new(llvm_feature).unwrap();\n unsafe { llvm::LLVMRustHasFeature(target_machine, cstr.as_ptr()) }\n })\n .map(|feature| Symbol::intern(feature)).collect()\n}\n\npub fn target_feature_whitelist(sess: &Session) -> &'static [&'static str] {\n match &*sess.target.target.arch {\n \"arm\" => ARM_WHITELIST,\n \"aarch64\" => AARCH64_WHITELIST,\n \"x86\" | \"x86_64\" => X86_WHITELIST,\n \"hexagon\" => HEXAGON_WHITELIST,\n \"mips\" | \"mips64\" => MIPS_WHITELIST,\n \"powerpc\" | \"powerpc64\" => POWERPC_WHITELIST,\n _ => &[],\n }\n}\n\npub fn print_version() {\n \/\/ Can be called without initializing LLVM\n unsafe {\n println!(\"LLVM version: {}.{}\",\n llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());\n }\n}\n\npub fn print_passes() {\n \/\/ Can be called without initializing LLVM\n unsafe { llvm::LLVMRustPrintPasses(); }\n}\n\npub(crate) fn print(req: PrintRequest, sess: &Session) {\n require_inited();\n let tm = create_target_machine(sess);\n unsafe {\n match req {\n PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),\n PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),\n _ => bug!(\"rustc_trans can't handle print request: {:?}\", req),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>build.rs work for msys2<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>blank message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>an empy message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>matrix-vector multiplication single threaded implemented<commit_after>\/\/ a 3 component vector\n#[allow(dead_code)]\nstruct Vec3{\n x: f32,\n y: f32,\n z: f32\n}\n\n\/\/ a 3x3 matrix\n#[allow(dead_code)]\nstruct Mat3{\n values: [f32;9]\n}\n\nfn dot(vec1:&Vec3,vec2:&Vec3) -> f32{\n (vec1.x*vec2.x + vec1.y*vec2.y + vec1.z*vec2.z)\n}\n\n#[allow(non_snake_case)]\nfn MatXVec3(mat: &Mat3,vec: &Vec3) -> Vec3{\n let mut result = Vec3{x:0.0,y:0.0,z:0.0};\n\n \/\/ construct vectors from the matrix\n let mval = mat.values;\n let mvec1 = Vec3{x:mval[0],y:mval[1],z:mval[2]};\n let mvec2 = Vec3{x:mval[3],y:mval[4],z:mval[5]};\n let mvec3 = Vec3{x:mval[6],y:mval[7],z:mval[8]};\n\n \/\/ matrix multiplication is just a bunch of dot products\n result.x = dot(&mvec1,vec);\n result.y = dot(&mvec2,vec);\n result.y = dot(&mvec3,vec);\n\n result\n}\n\nfn main(){\n let mat_test = Mat3{\n values: [\n 1.0,0.0,0.0,\n 0.0,1.0,0.0,\n 0.0,0.0,1.0]\n };\n\n let vect = Vec3{x:2.0, y:1.3,z:3.2};\n let vect2 = Vec3{x:1.4,y:1.2,z:4.3};\n let vect3 = Vec3{x:0.5,y:0.3,z:2.7};\n\n let product = MatXVec3(&mat_test,&vect);\n println!(\"the result is {} {} {}\",product.x,product.y,product.z);\n println!(\"the dot product is {}\",dot(&vect2,&vect3));\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse compositing::resize_rate_limiter::ResizeRateLimiter;\nuse platform::{Application, Window};\nuse scripting::script_task::{LoadMsg, ScriptMsg};\nuse windowing::{ApplicationMethods, WindowMethods};\n\nuse azure::azure_hl::{BackendType, B8G8R8A8, DataSourceSurface, DrawTarget, SourceSurfaceMethods};\nuse core::cell::Cell;\nuse core::comm::{Chan, SharedChan, Port};\nuse core::util;\nuse geom::matrix::identity;\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse gfx::compositor::{Compositor, LayerBuffer, LayerBufferSet};\nuse gfx::opts::Opts;\nuse layers::layers::{ARGB32Format, BasicImageData, ContainerLayer, ContainerLayerKind, Format};\nuse layers::layers::{Image, ImageData, ImageLayer, ImageLayerKind, RGB24Format, WithDataFn};\nuse layers::rendergl;\nuse layers::scene::Scene;\nuse servo_util::{time, url};\nuse servo_util::time::profile;\nuse servo_util::time::ProfilerChan;\n\nmod resize_rate_limiter;\n\n\/\/\/ The implementation of the layers-based compositor.\n#[deriving(Clone)]\npub struct CompositorImpl {\n chan: SharedChan<Msg>\n}\n\nimpl CompositorImpl {\n \/\/\/ Creates a new compositor instance.\n pub fn new(script_chan: SharedChan<ScriptMsg>,\n opts: Opts,\n prof_chan: ProfilerChan)\n -> CompositorImpl {\n let script_chan = Cell(script_chan);\n let chan: Chan<Msg> = do on_osmain |port| {\n debug!(\"preparing to enter main loop\");\n run_main_loop(port, script_chan.take(), &opts, prof_chan.clone());\n };\n\n CompositorImpl {\n chan: SharedChan::new(chan)\n }\n }\n}\n\n\/\/\/ Messages to the compositor.\npub enum Msg {\n BeginDrawing(Chan<LayerBufferSet>),\n Draw(Chan<LayerBufferSet>, LayerBufferSet),\n AddKeyHandler(Chan<()>),\n Exit\n}\n\n\/\/\/ Azure surface wrapping to work with the layers infrastructure.\nstruct AzureDrawTargetImageData {\n draw_target: DrawTarget,\n data_source_surface: DataSourceSurface,\n size: Size2D<uint>\n}\n\nimpl ImageData for AzureDrawTargetImageData {\n fn size(&self) -> Size2D<uint> {\n self.size\n }\n fn stride(&self) -> uint {\n self.data_source_surface.stride() as uint\n }\n fn format(&self) -> Format {\n \/\/ FIXME: This is not always correct. We should query the Azure draw target for the format.\n ARGB32Format\n }\n fn with_data(&self, f: WithDataFn) { \n do self.data_source_surface.with_data |data| {\n f(data);\n }\n }\n}\n\nfn run_main_loop(po: Port<Msg>, script_chan: SharedChan<ScriptMsg>, opts: &Opts, prof_chan:ProfilerChan) {\n let app: Application = ApplicationMethods::new();\n let window: @mut Window = WindowMethods::new(&app);\n let resize_rate_limiter = @mut ResizeRateLimiter(script_chan.clone());\n\n let surfaces = @mut SurfaceSet::new(opts.render_backend);\n let context = rendergl::init_render_context();\n\n \/\/ Create an initial layer tree.\n \/\/\n \/\/ TODO: There should be no initial layer tree until the renderer creates one from the display\n \/\/ list. This is only here because we don't have that logic in the renderer yet.\n let root_layer = @mut ContainerLayer();\n let original_layer_transform;\n {\n let image_data = @BasicImageData::new(Size2D(0, 0), 0, RGB24Format, ~[]);\n let image = @mut Image::new(image_data as @ImageData);\n let image_layer = @mut ImageLayer(image);\n original_layer_transform = image_layer.common.transform;\n image_layer.common.set_transform(original_layer_transform.scale(800.0, 600.0, 1.0));\n root_layer.add_child(ImageLayerKind(image_layer));\n }\n\n let scene = @mut Scene(ContainerLayerKind(root_layer), Size2D(800.0, 600.0), identity());\n let key_handlers: @mut ~[Chan<()>] = @mut ~[];\n let done = @mut false;\n\n \/\/ FIXME: This should not be a separate offset applied after the fact but rather should be\n \/\/ applied to the layers themselves on a per-layer basis. However, this won't work until scroll\n \/\/ positions are sent to content.\n let world_offset = @mut Point2D(0f32, 0f32);\n\n let check_for_messages: @fn() = || {\n \/\/ Periodically check if the script task responded to our last resize event\n resize_rate_limiter.check_resize_response();\n\n \/\/ Handle messages\n while po.peek() {\n match po.recv() {\n AddKeyHandler(key_ch) => key_handlers.push(key_ch),\n BeginDrawing(sender) => surfaces.lend(sender),\n Exit => *done = true,\n\n Draw(sender, draw_target) => {\n debug!(\"osmain: received new frame\");\n\n \/\/ Perform a buffer swap.\n surfaces.put_back(draw_target);\n surfaces.lend(sender);\n\n \/\/ Iterate over the children of the container layer.\n let mut current_layer_child = root_layer.first_child;\n\n \/\/ Replace the image layer data with the buffer data.\n let buffers = util::replace(&mut surfaces.front.layer_buffer_set.buffers, ~[]);\n for buffers.each |buffer| {\n let width = buffer.rect.size.width as uint;\n let height = buffer.rect.size.height as uint;\n\n debug!(\"osmain: compositing buffer rect %?\", &buffer.rect);\n\n let image_data = @AzureDrawTargetImageData {\n draw_target: buffer.draw_target.clone(),\n data_source_surface: buffer.draw_target.snapshot().get_data_surface(),\n size: Size2D(width, height)\n };\n let image = @mut Image::new(image_data as @ImageData);\n\n \/\/ Find or create an image layer.\n let image_layer;\n current_layer_child = match current_layer_child {\n None => {\n debug!(\"osmain: adding new image layer\");\n image_layer = @mut ImageLayer(image);\n root_layer.add_child(ImageLayerKind(image_layer));\n None\n }\n Some(ImageLayerKind(existing_image_layer)) => {\n image_layer = existing_image_layer;\n image_layer.set_image(image);\n\n \/\/ Move on to the next sibling.\n do current_layer_child.get().with_common |common| {\n common.next_sibling\n }\n }\n Some(_) => fail!(~\"found unexpected layer kind\"),\n };\n\n \/\/ Set the layer's transform.\n let mut origin = Point2D(buffer.rect.origin.x as f32,\n buffer.rect.origin.y as f32);\n let transform = original_layer_transform.translate(origin.x,\n origin.y,\n 0.0);\n let transform = transform.scale(width as f32, height as f32, 1.0);\n image_layer.common.set_transform(transform)\n }\n\n surfaces.front.layer_buffer_set.buffers = buffers\n }\n }\n }\n };\n\n do window.set_composite_callback {\n do profile(time::CompositingCategory, prof_chan.clone()) {\n debug!(\"compositor: compositing\");\n \/\/ Adjust the layer dimensions as necessary to correspond to the size of the window.\n scene.size = window.size();\n\n \/\/ Render the scene.\n rendergl::render_scene(context, scene);\n }\n\n window.present();\n }\n\n \/\/ Hook the windowing system's resize callback up to the resize rate limiter.\n do window.set_resize_callback |width, height| {\n debug!(\"osmain: window resized to %ux%u\", width, height);\n resize_rate_limiter.window_resized(width, height);\n }\n\n \/\/ When the user enters a new URL, load it.\n do window.set_load_url_callback |url_string| {\n debug!(\"osmain: loading URL `%s`\", url_string);\n script_chan.send(LoadMsg(url::make_url(url_string.to_str(), None)))\n }\n\n \/\/ When the user scrolls, move the layer around.\n do window.set_scroll_callback |delta| {\n \/\/ FIXME: Can't use `+=` due to a Rust bug.\n let world_offset_copy = *world_offset;\n *world_offset = world_offset_copy + delta;\n\n debug!(\"compositor: scrolled to %?\", *world_offset);\n\n root_layer.common.set_transform(identity().translate(world_offset.x, world_offset.y, 0.0));\n\n window.set_needs_display()\n }\n\n \/\/ Enter the main event loop.\n while !*done {\n \/\/ Check for new messages coming from the rendering task.\n check_for_messages();\n\n \/\/ Check for messages coming from the windowing system.\n window.check_loop();\n }\n}\n\n\/\/\/ Implementation of the abstract `Compositor` interface.\nimpl Compositor for CompositorImpl {\n fn begin_drawing(&self, next_dt: Chan<LayerBufferSet>) {\n self.chan.send(BeginDrawing(next_dt))\n }\n fn draw(&self, next_dt: Chan<LayerBufferSet>, draw_me: LayerBufferSet) {\n self.chan.send(Draw(next_dt, draw_me))\n }\n}\n\nstruct SurfaceSet {\n front: Surface,\n back: Surface,\n}\n\nimpl SurfaceSet {\n \/\/\/ Creates a new surface set.\n fn new(backend: BackendType) -> SurfaceSet {\n SurfaceSet {\n front: Surface::new(backend),\n back: Surface::new(backend),\n }\n }\n\n fn lend(&mut self, receiver: Chan<LayerBufferSet>) {\n \/\/ We are in a position to lend out the surface?\n assert!(self.front.have);\n \/\/ Ok then take it\n let old_layer_buffers = util::replace(&mut self.front.layer_buffer_set.buffers, ~[]);\n let new_layer_buffers = do old_layer_buffers.map |layer_buffer| {\n let draw_target_ref = &layer_buffer.draw_target;\n let layer_buffer = LayerBuffer {\n draw_target: draw_target_ref.clone(),\n rect: copy layer_buffer.rect,\n stride: layer_buffer.stride\n };\n debug!(\"osmain: lending surface %?\", layer_buffer);\n layer_buffer\n };\n self.front.layer_buffer_set.buffers = old_layer_buffers;\n\n let new_layer_buffer_set = LayerBufferSet { buffers: new_layer_buffers };\n receiver.send(new_layer_buffer_set);\n \/\/ Now we don't have it\n self.front.have = false;\n \/\/ But we (hopefully) have another!\n util::swap(&mut self.front, &mut self.back);\n \/\/ Let's look\n assert!(self.front.have);\n }\n\n fn put_back(&mut self, layer_buffer_set: LayerBufferSet) {\n \/\/ We have room for a return\n assert!(self.front.have);\n assert!(!self.back.have);\n\n self.back.layer_buffer_set = layer_buffer_set;\n\n \/\/ Now we have it again\n self.back.have = true;\n }\n}\n\nstruct Surface {\n layer_buffer_set: LayerBufferSet,\n have: bool,\n}\n\nimpl Surface {\n fn new(backend: BackendType) -> Surface {\n let layer_buffer = LayerBuffer {\n draw_target: DrawTarget::new(backend, Size2D(800, 600), B8G8R8A8),\n rect: Rect(Point2D(0u, 0u), Size2D(800u, 600u)),\n stride: 800 * 4\n };\n let layer_buffer_set = LayerBufferSet {\n buffers: ~[ layer_buffer ]\n };\n Surface {\n layer_buffer_set: layer_buffer_set,\n have: true\n }\n }\n}\n\n\/\/\/ A function for spawning into the platform's main thread.\nfn on_osmain<T: Owned>(f: ~fn(po: Port<T>)) -> Chan<T> {\n let (setup_po, setup_ch) = comm::stream();\n \/\/ FIXME: rust#6399\n let mut main_task = task::task();\n main_task.sched_mode(task::PlatformThread);\n do main_task.spawn {\n let (po, ch) = comm::stream();\n setup_ch.send(ch);\n f(po);\n }\n setup_po.recv()\n}\n\n<commit_msg>Remove unused 'mut'<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse compositing::resize_rate_limiter::ResizeRateLimiter;\nuse platform::{Application, Window};\nuse scripting::script_task::{LoadMsg, ScriptMsg};\nuse windowing::{ApplicationMethods, WindowMethods};\n\nuse azure::azure_hl::{BackendType, B8G8R8A8, DataSourceSurface, DrawTarget, SourceSurfaceMethods};\nuse core::cell::Cell;\nuse core::comm::{Chan, SharedChan, Port};\nuse core::util;\nuse geom::matrix::identity;\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse gfx::compositor::{Compositor, LayerBuffer, LayerBufferSet};\nuse gfx::opts::Opts;\nuse layers::layers::{ARGB32Format, BasicImageData, ContainerLayer, ContainerLayerKind, Format};\nuse layers::layers::{Image, ImageData, ImageLayer, ImageLayerKind, RGB24Format, WithDataFn};\nuse layers::rendergl;\nuse layers::scene::Scene;\nuse servo_util::{time, url};\nuse servo_util::time::profile;\nuse servo_util::time::ProfilerChan;\n\nmod resize_rate_limiter;\n\n\/\/\/ The implementation of the layers-based compositor.\n#[deriving(Clone)]\npub struct CompositorImpl {\n chan: SharedChan<Msg>\n}\n\nimpl CompositorImpl {\n \/\/\/ Creates a new compositor instance.\n pub fn new(script_chan: SharedChan<ScriptMsg>,\n opts: Opts,\n prof_chan: ProfilerChan)\n -> CompositorImpl {\n let script_chan = Cell(script_chan);\n let chan: Chan<Msg> = do on_osmain |port| {\n debug!(\"preparing to enter main loop\");\n run_main_loop(port, script_chan.take(), &opts, prof_chan.clone());\n };\n\n CompositorImpl {\n chan: SharedChan::new(chan)\n }\n }\n}\n\n\/\/\/ Messages to the compositor.\npub enum Msg {\n BeginDrawing(Chan<LayerBufferSet>),\n Draw(Chan<LayerBufferSet>, LayerBufferSet),\n AddKeyHandler(Chan<()>),\n Exit\n}\n\n\/\/\/ Azure surface wrapping to work with the layers infrastructure.\nstruct AzureDrawTargetImageData {\n draw_target: DrawTarget,\n data_source_surface: DataSourceSurface,\n size: Size2D<uint>\n}\n\nimpl ImageData for AzureDrawTargetImageData {\n fn size(&self) -> Size2D<uint> {\n self.size\n }\n fn stride(&self) -> uint {\n self.data_source_surface.stride() as uint\n }\n fn format(&self) -> Format {\n \/\/ FIXME: This is not always correct. We should query the Azure draw target for the format.\n ARGB32Format\n }\n fn with_data(&self, f: WithDataFn) {\n do self.data_source_surface.with_data |data| {\n f(data);\n }\n }\n}\n\nfn run_main_loop(po: Port<Msg>, script_chan: SharedChan<ScriptMsg>, opts: &Opts, prof_chan:ProfilerChan) {\n let app: Application = ApplicationMethods::new();\n let window: @mut Window = WindowMethods::new(&app);\n let resize_rate_limiter = @mut ResizeRateLimiter(script_chan.clone());\n\n let surfaces = @mut SurfaceSet::new(opts.render_backend);\n let context = rendergl::init_render_context();\n\n \/\/ Create an initial layer tree.\n \/\/\n \/\/ TODO: There should be no initial layer tree until the renderer creates one from the display\n \/\/ list. This is only here because we don't have that logic in the renderer yet.\n let root_layer = @mut ContainerLayer();\n let original_layer_transform;\n {\n let image_data = @BasicImageData::new(Size2D(0, 0), 0, RGB24Format, ~[]);\n let image = @mut Image::new(image_data as @ImageData);\n let image_layer = @mut ImageLayer(image);\n original_layer_transform = image_layer.common.transform;\n image_layer.common.set_transform(original_layer_transform.scale(800.0, 600.0, 1.0));\n root_layer.add_child(ImageLayerKind(image_layer));\n }\n\n let scene = @mut Scene(ContainerLayerKind(root_layer), Size2D(800.0, 600.0), identity());\n let key_handlers: @mut ~[Chan<()>] = @mut ~[];\n let done = @mut false;\n\n \/\/ FIXME: This should not be a separate offset applied after the fact but rather should be\n \/\/ applied to the layers themselves on a per-layer basis. However, this won't work until scroll\n \/\/ positions are sent to content.\n let world_offset = @mut Point2D(0f32, 0f32);\n\n let check_for_messages: @fn() = || {\n \/\/ Periodically check if the script task responded to our last resize event\n resize_rate_limiter.check_resize_response();\n\n \/\/ Handle messages\n while po.peek() {\n match po.recv() {\n AddKeyHandler(key_ch) => key_handlers.push(key_ch),\n BeginDrawing(sender) => surfaces.lend(sender),\n Exit => *done = true,\n\n Draw(sender, draw_target) => {\n debug!(\"osmain: received new frame\");\n\n \/\/ Perform a buffer swap.\n surfaces.put_back(draw_target);\n surfaces.lend(sender);\n\n \/\/ Iterate over the children of the container layer.\n let mut current_layer_child = root_layer.first_child;\n\n \/\/ Replace the image layer data with the buffer data.\n let buffers = util::replace(&mut surfaces.front.layer_buffer_set.buffers, ~[]);\n for buffers.each |buffer| {\n let width = buffer.rect.size.width as uint;\n let height = buffer.rect.size.height as uint;\n\n debug!(\"osmain: compositing buffer rect %?\", &buffer.rect);\n\n let image_data = @AzureDrawTargetImageData {\n draw_target: buffer.draw_target.clone(),\n data_source_surface: buffer.draw_target.snapshot().get_data_surface(),\n size: Size2D(width, height)\n };\n let image = @mut Image::new(image_data as @ImageData);\n\n \/\/ Find or create an image layer.\n let image_layer;\n current_layer_child = match current_layer_child {\n None => {\n debug!(\"osmain: adding new image layer\");\n image_layer = @mut ImageLayer(image);\n root_layer.add_child(ImageLayerKind(image_layer));\n None\n }\n Some(ImageLayerKind(existing_image_layer)) => {\n image_layer = existing_image_layer;\n image_layer.set_image(image);\n\n \/\/ Move on to the next sibling.\n do current_layer_child.get().with_common |common| {\n common.next_sibling\n }\n }\n Some(_) => fail!(~\"found unexpected layer kind\"),\n };\n\n \/\/ Set the layer's transform.\n let origin = Point2D(buffer.rect.origin.x as f32,\n buffer.rect.origin.y as f32);\n let transform = original_layer_transform.translate(origin.x,\n origin.y,\n 0.0);\n let transform = transform.scale(width as f32, height as f32, 1.0);\n image_layer.common.set_transform(transform)\n }\n\n surfaces.front.layer_buffer_set.buffers = buffers\n }\n }\n }\n };\n\n do window.set_composite_callback {\n do profile(time::CompositingCategory, prof_chan.clone()) {\n debug!(\"compositor: compositing\");\n \/\/ Adjust the layer dimensions as necessary to correspond to the size of the window.\n scene.size = window.size();\n\n \/\/ Render the scene.\n rendergl::render_scene(context, scene);\n }\n\n window.present();\n }\n\n \/\/ Hook the windowing system's resize callback up to the resize rate limiter.\n do window.set_resize_callback |width, height| {\n debug!(\"osmain: window resized to %ux%u\", width, height);\n resize_rate_limiter.window_resized(width, height);\n }\n\n \/\/ When the user enters a new URL, load it.\n do window.set_load_url_callback |url_string| {\n debug!(\"osmain: loading URL `%s`\", url_string);\n script_chan.send(LoadMsg(url::make_url(url_string.to_str(), None)))\n }\n\n \/\/ When the user scrolls, move the layer around.\n do window.set_scroll_callback |delta| {\n \/\/ FIXME: Can't use `+=` due to a Rust bug.\n let world_offset_copy = *world_offset;\n *world_offset = world_offset_copy + delta;\n\n debug!(\"compositor: scrolled to %?\", *world_offset);\n\n root_layer.common.set_transform(identity().translate(world_offset.x, world_offset.y, 0.0));\n\n window.set_needs_display()\n }\n\n \/\/ Enter the main event loop.\n while !*done {\n \/\/ Check for new messages coming from the rendering task.\n check_for_messages();\n\n \/\/ Check for messages coming from the windowing system.\n window.check_loop();\n }\n}\n\n\/\/\/ Implementation of the abstract `Compositor` interface.\nimpl Compositor for CompositorImpl {\n fn begin_drawing(&self, next_dt: Chan<LayerBufferSet>) {\n self.chan.send(BeginDrawing(next_dt))\n }\n fn draw(&self, next_dt: Chan<LayerBufferSet>, draw_me: LayerBufferSet) {\n self.chan.send(Draw(next_dt, draw_me))\n }\n}\n\nstruct SurfaceSet {\n front: Surface,\n back: Surface,\n}\n\nimpl SurfaceSet {\n \/\/\/ Creates a new surface set.\n fn new(backend: BackendType) -> SurfaceSet {\n SurfaceSet {\n front: Surface::new(backend),\n back: Surface::new(backend),\n }\n }\n\n fn lend(&mut self, receiver: Chan<LayerBufferSet>) {\n \/\/ We are in a position to lend out the surface?\n assert!(self.front.have);\n \/\/ Ok then take it\n let old_layer_buffers = util::replace(&mut self.front.layer_buffer_set.buffers, ~[]);\n let new_layer_buffers = do old_layer_buffers.map |layer_buffer| {\n let draw_target_ref = &layer_buffer.draw_target;\n let layer_buffer = LayerBuffer {\n draw_target: draw_target_ref.clone(),\n rect: copy layer_buffer.rect,\n stride: layer_buffer.stride\n };\n debug!(\"osmain: lending surface %?\", layer_buffer);\n layer_buffer\n };\n self.front.layer_buffer_set.buffers = old_layer_buffers;\n\n let new_layer_buffer_set = LayerBufferSet { buffers: new_layer_buffers };\n receiver.send(new_layer_buffer_set);\n \/\/ Now we don't have it\n self.front.have = false;\n \/\/ But we (hopefully) have another!\n util::swap(&mut self.front, &mut self.back);\n \/\/ Let's look\n assert!(self.front.have);\n }\n\n fn put_back(&mut self, layer_buffer_set: LayerBufferSet) {\n \/\/ We have room for a return\n assert!(self.front.have);\n assert!(!self.back.have);\n\n self.back.layer_buffer_set = layer_buffer_set;\n\n \/\/ Now we have it again\n self.back.have = true;\n }\n}\n\nstruct Surface {\n layer_buffer_set: LayerBufferSet,\n have: bool,\n}\n\nimpl Surface {\n fn new(backend: BackendType) -> Surface {\n let layer_buffer = LayerBuffer {\n draw_target: DrawTarget::new(backend, Size2D(800, 600), B8G8R8A8),\n rect: Rect(Point2D(0u, 0u), Size2D(800u, 600u)),\n stride: 800 * 4\n };\n let layer_buffer_set = LayerBufferSet {\n buffers: ~[ layer_buffer ]\n };\n Surface {\n layer_buffer_set: layer_buffer_set,\n have: true\n }\n }\n}\n\n\/\/\/ A function for spawning into the platform's main thread.\nfn on_osmain<T: Owned>(f: ~fn(po: Port<T>)) -> Chan<T> {\n let (setup_po, setup_ch) = comm::stream();\n \/\/ FIXME: rust#6399\n let mut main_task = task::task();\n main_task.sched_mode(task::PlatformThread);\n do main_task.spawn {\n let (po, ch) = comm::stream();\n setup_ch.send(ch);\n f(po);\n }\n setup_po.recv()\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(bad_style)]\n\npub struct Intrinsic {\n pub inputs: &'static [&'static Type],\n pub output: &'static Type,\n\n pub definition: IntrinsicDef,\n}\n\n#[derive(Clone, Hash, Eq, PartialEq)]\npub enum Type {\n Void,\n Integer(\/* signed *\/ bool, u8, \/* llvm width *\/ u8),\n Float(u8),\n Pointer(&'static Type, Option<&'static Type>, \/* const *\/ bool),\n Vector(&'static Type, Option<&'static Type>, u16),\n Aggregate(bool, &'static [&'static Type]),\n}\n\npub enum IntrinsicDef {\n Named(&'static str),\n}\n\nstatic I8: Type = Type::Integer(true, 8, 8);\nstatic I16: Type = Type::Integer(true, 16, 16);\nstatic I32: Type = Type::Integer(true, 32, 32);\nstatic I64: Type = Type::Integer(true, 64, 64);\nstatic U8: Type = Type::Integer(false, 8, 8);\nstatic U16: Type = Type::Integer(false, 16, 16);\nstatic U32: Type = Type::Integer(false, 32, 32);\nstatic U64: Type = Type::Integer(false, 64, 64);\nstatic F32: Type = Type::Float(32);\nstatic F64: Type = Type::Float(64);\n\nstatic I32_8: Type = Type::Integer(true, 32, 8);\n\nstatic I8x8: Type = Type::Vector(&I8, None, 8);\nstatic U8x8: Type = Type::Vector(&U8, None, 8);\nstatic I8x16: Type = Type::Vector(&I8, None, 16);\nstatic U8x16: Type = Type::Vector(&U8, None, 16);\nstatic I8x32: Type = Type::Vector(&I8, None, 32);\nstatic U8x32: Type = Type::Vector(&U8, None, 32);\nstatic I8x64: Type = Type::Vector(&I8, None, 64);\nstatic U8x64: Type = Type::Vector(&U8, None, 64);\nstatic I8x128: Type = Type::Vector(&I8, None, 128);\nstatic U8x128: Type = Type::Vector(&U8, None, 128);\nstatic I8x256: Type = Type::Vector(&I8, None, 256);\nstatic U8x256: Type = Type::Vector(&U8, None, 256);\n\nstatic I16x4: Type = Type::Vector(&I16, None, 4);\nstatic U16x4: Type = Type::Vector(&U16, None, 4);\nstatic I16x8: Type = Type::Vector(&I16, None, 8);\nstatic U16x8: Type = Type::Vector(&U16, None, 8);\nstatic I16x16: Type = Type::Vector(&I16, None, 16);\nstatic U16x16: Type = Type::Vector(&U16, None, 16);\nstatic I16x32: Type = Type::Vector(&I16, None, 32);\nstatic U16x32: Type = Type::Vector(&U16, None, 32);\nstatic I16x64: Type = Type::Vector(&I16, None, 64);\nstatic U16x64: Type = Type::Vector(&U16, None, 64);\nstatic I16x128: Type = Type::Vector(&I16, None, 128);\nstatic U16x128: Type = Type::Vector(&U16, None, 128);\n\nstatic I32x2: Type = Type::Vector(&I32, None, 2);\nstatic U32x2: Type = Type::Vector(&U32, None, 2);\nstatic I32x4: Type = Type::Vector(&I32, None, 4);\nstatic U32x4: Type = Type::Vector(&U32, None, 4);\nstatic I32x8: Type = Type::Vector(&I32, None, 8);\nstatic U32x8: Type = Type::Vector(&U32, None, 8);\nstatic I32x16: Type = Type::Vector(&I32, None, 16);\nstatic U32x16: Type = Type::Vector(&U32, None, 16);\nstatic I32x32: Type = Type::Vector(&I32, None, 32);\nstatic U32x32: Type = Type::Vector(&U32, None, 32);\nstatic I32x64: Type = Type::Vector(&I32, None, 64);\nstatic U32x64: Type = Type::Vector(&U32, None, 64);\n\nstatic I64x1: Type = Type::Vector(&I64, None, 1);\nstatic U64x1: Type = Type::Vector(&U64, None, 1);\nstatic I64x2: Type = Type::Vector(&I64, None, 2);\nstatic U64x2: Type = Type::Vector(&U64, None, 2);\nstatic I64x4: Type = Type::Vector(&I64, None, 4);\nstatic U64x4: Type = Type::Vector(&U64, None, 4);\n\nstatic F32x2: Type = Type::Vector(&F32, None, 2);\nstatic F32x4: Type = Type::Vector(&F32, None, 4);\nstatic F32x8: Type = Type::Vector(&F32, None, 8);\nstatic F64x1: Type = Type::Vector(&F64, None, 1);\nstatic F64x2: Type = Type::Vector(&F64, None, 2);\nstatic F64x4: Type = Type::Vector(&F64, None, 4);\n\nstatic I32x4_F32: Type = Type::Vector(&I32, Some(&F32), 4);\nstatic I32x8_F32: Type = Type::Vector(&I32, Some(&F32), 8);\nstatic I64x2_F64: Type = Type::Vector(&I64, Some(&F64), 2);\nstatic I64x4_F64: Type = Type::Vector(&I64, Some(&F64), 4);\n\nstatic VOID: Type = Type::Void;\n\nmod x86;\nmod arm;\nmod aarch64;\nmod nvptx;\nmod hexagon;\nmod powerpc;\n\nimpl Intrinsic {\n pub fn find(name: &str) -> Option<Intrinsic> {\n if name.starts_with(\"x86_\") {\n x86::find(name)\n } else if name.starts_with(\"arm_\") {\n arm::find(name)\n } else if name.starts_with(\"aarch64_\") {\n aarch64::find(name)\n } else if name.starts_with(\"nvptx_\") {\n nvptx::find(name)\n } else if name.starts_with(\"Q6_\") {\n hexagon::find(name)\n } else if name.starts_with(\"powerpc_\") {\n powerpc::find(name)\n } else {\n None\n }\n }\n}\n<commit_msg>[nll] librustc_platform_intrinsics: enable feature(nll) for bootstrap<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(bad_style)]\n\n#![cfg_attr(not(stage0), feature(nll))]\n\npub struct Intrinsic {\n pub inputs: &'static [&'static Type],\n pub output: &'static Type,\n\n pub definition: IntrinsicDef,\n}\n\n#[derive(Clone, Hash, Eq, PartialEq)]\npub enum Type {\n Void,\n Integer(\/* signed *\/ bool, u8, \/* llvm width *\/ u8),\n Float(u8),\n Pointer(&'static Type, Option<&'static Type>, \/* const *\/ bool),\n Vector(&'static Type, Option<&'static Type>, u16),\n Aggregate(bool, &'static [&'static Type]),\n}\n\npub enum IntrinsicDef {\n Named(&'static str),\n}\n\nstatic I8: Type = Type::Integer(true, 8, 8);\nstatic I16: Type = Type::Integer(true, 16, 16);\nstatic I32: Type = Type::Integer(true, 32, 32);\nstatic I64: Type = Type::Integer(true, 64, 64);\nstatic U8: Type = Type::Integer(false, 8, 8);\nstatic U16: Type = Type::Integer(false, 16, 16);\nstatic U32: Type = Type::Integer(false, 32, 32);\nstatic U64: Type = Type::Integer(false, 64, 64);\nstatic F32: Type = Type::Float(32);\nstatic F64: Type = Type::Float(64);\n\nstatic I32_8: Type = Type::Integer(true, 32, 8);\n\nstatic I8x8: Type = Type::Vector(&I8, None, 8);\nstatic U8x8: Type = Type::Vector(&U8, None, 8);\nstatic I8x16: Type = Type::Vector(&I8, None, 16);\nstatic U8x16: Type = Type::Vector(&U8, None, 16);\nstatic I8x32: Type = Type::Vector(&I8, None, 32);\nstatic U8x32: Type = Type::Vector(&U8, None, 32);\nstatic I8x64: Type = Type::Vector(&I8, None, 64);\nstatic U8x64: Type = Type::Vector(&U8, None, 64);\nstatic I8x128: Type = Type::Vector(&I8, None, 128);\nstatic U8x128: Type = Type::Vector(&U8, None, 128);\nstatic I8x256: Type = Type::Vector(&I8, None, 256);\nstatic U8x256: Type = Type::Vector(&U8, None, 256);\n\nstatic I16x4: Type = Type::Vector(&I16, None, 4);\nstatic U16x4: Type = Type::Vector(&U16, None, 4);\nstatic I16x8: Type = Type::Vector(&I16, None, 8);\nstatic U16x8: Type = Type::Vector(&U16, None, 8);\nstatic I16x16: Type = Type::Vector(&I16, None, 16);\nstatic U16x16: Type = Type::Vector(&U16, None, 16);\nstatic I16x32: Type = Type::Vector(&I16, None, 32);\nstatic U16x32: Type = Type::Vector(&U16, None, 32);\nstatic I16x64: Type = Type::Vector(&I16, None, 64);\nstatic U16x64: Type = Type::Vector(&U16, None, 64);\nstatic I16x128: Type = Type::Vector(&I16, None, 128);\nstatic U16x128: Type = Type::Vector(&U16, None, 128);\n\nstatic I32x2: Type = Type::Vector(&I32, None, 2);\nstatic U32x2: Type = Type::Vector(&U32, None, 2);\nstatic I32x4: Type = Type::Vector(&I32, None, 4);\nstatic U32x4: Type = Type::Vector(&U32, None, 4);\nstatic I32x8: Type = Type::Vector(&I32, None, 8);\nstatic U32x8: Type = Type::Vector(&U32, None, 8);\nstatic I32x16: Type = Type::Vector(&I32, None, 16);\nstatic U32x16: Type = Type::Vector(&U32, None, 16);\nstatic I32x32: Type = Type::Vector(&I32, None, 32);\nstatic U32x32: Type = Type::Vector(&U32, None, 32);\nstatic I32x64: Type = Type::Vector(&I32, None, 64);\nstatic U32x64: Type = Type::Vector(&U32, None, 64);\n\nstatic I64x1: Type = Type::Vector(&I64, None, 1);\nstatic U64x1: Type = Type::Vector(&U64, None, 1);\nstatic I64x2: Type = Type::Vector(&I64, None, 2);\nstatic U64x2: Type = Type::Vector(&U64, None, 2);\nstatic I64x4: Type = Type::Vector(&I64, None, 4);\nstatic U64x4: Type = Type::Vector(&U64, None, 4);\n\nstatic F32x2: Type = Type::Vector(&F32, None, 2);\nstatic F32x4: Type = Type::Vector(&F32, None, 4);\nstatic F32x8: Type = Type::Vector(&F32, None, 8);\nstatic F64x1: Type = Type::Vector(&F64, None, 1);\nstatic F64x2: Type = Type::Vector(&F64, None, 2);\nstatic F64x4: Type = Type::Vector(&F64, None, 4);\n\nstatic I32x4_F32: Type = Type::Vector(&I32, Some(&F32), 4);\nstatic I32x8_F32: Type = Type::Vector(&I32, Some(&F32), 8);\nstatic I64x2_F64: Type = Type::Vector(&I64, Some(&F64), 2);\nstatic I64x4_F64: Type = Type::Vector(&I64, Some(&F64), 4);\n\nstatic VOID: Type = Type::Void;\n\nmod x86;\nmod arm;\nmod aarch64;\nmod nvptx;\nmod hexagon;\nmod powerpc;\n\nimpl Intrinsic {\n pub fn find(name: &str) -> Option<Intrinsic> {\n if name.starts_with(\"x86_\") {\n x86::find(name)\n } else if name.starts_with(\"arm_\") {\n arm::find(name)\n } else if name.starts_with(\"aarch64_\") {\n aarch64::find(name)\n } else if name.starts_with(\"nvptx_\") {\n nvptx::find(name)\n } else if name.starts_with(\"Q6_\") {\n hexagon::find(name)\n } else if name.starts_with(\"powerpc_\") {\n powerpc::find(name)\n } else {\n None\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse resource_task::{ProgressMsg, Metadata, Payload, Done, LoaderTask, start_sending};\n\nuse std::io;\nuse std::io::File;\nuse servo_util::task::spawn_named;\n\n\/\/FIXME: https:\/\/github.com\/mozilla\/rust\/issues\/12892\nstatic READ_SIZE: uint = 1;\n\nfn read_all(reader: &mut io::Stream, progress_chan: &Sender<ProgressMsg>)\n -> Result<(), String> {\n loop {\n let mut buf = vec!();\n match reader.push_at_least(READ_SIZE, READ_SIZE, &mut buf) {\n Ok(_) => progress_chan.send(Payload(buf)),\n Err(e) => match e.kind {\n io::EndOfFile => return Ok(()),\n _ => return Err(e.desc.to_string()),\n }\n }\n }\n}\n\npub fn factory() -> LoaderTask {\n let f: LoaderTask = proc(load_data, start_chan) {\n let url = load_data.url;\n assert!(\"file\" == url.scheme.as_slice());\n let progress_chan = start_sending(start_chan, Metadata::default(url.clone()));\n spawn_named(\"file_loader\", proc() {\n match File::open_mode(&Path::new(url.serialize_path().unwrap()), io::Open, io::Read) {\n Ok(ref mut reader) => {\n let res = read_all(reader as &mut io::Stream, &progress_chan);\n progress_chan.send(Done(res));\n }\n Err(e) => {\n progress_chan.send(Done(Err(e.desc.to_string())));\n }\n };\n });\n };\n f\n}\n<commit_msg>Make perf_rainbow.html load ~20x faster by making the file loader read in 8 kB blocks.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse resource_task::{ProgressMsg, Metadata, Payload, Done, LoaderTask, start_sending};\n\nuse std::io;\nuse std::io::File;\nuse servo_util::task::spawn_named;\n\nstatic READ_SIZE: uint = 8192;\n\nfn read_all(reader: &mut io::Stream, progress_chan: &Sender<ProgressMsg>)\n -> Result<(), String> {\n loop {\n let mut buf = vec!();\n match reader.push_at_least(READ_SIZE, READ_SIZE, &mut buf) {\n Ok(_) => progress_chan.send(Payload(buf)),\n Err(e) => match e.kind {\n io::EndOfFile => {\n if buf.len() > 0 {\n progress_chan.send(Payload(buf));\n }\n return Ok(());\n }\n _ => return Err(e.desc.to_string()),\n }\n }\n }\n}\n\npub fn factory() -> LoaderTask {\n let f: LoaderTask = proc(load_data, start_chan) {\n let url = load_data.url;\n assert!(\"file\" == url.scheme.as_slice());\n let progress_chan = start_sending(start_chan, Metadata::default(url.clone()));\n spawn_named(\"file_loader\", proc() {\n match File::open_mode(&Path::new(url.serialize_path().unwrap()), io::Open, io::Read) {\n Ok(ref mut reader) => {\n let res = read_all(reader as &mut io::Stream, &progress_chan);\n progress_chan.send(Done(res));\n }\n Err(e) => {\n progress_chan.send(Done(Err(e.desc.to_string())));\n }\n };\n });\n };\n f\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![crate_name = \"foo\"]\n\n#![feature(optin_builtin_traits)]\n\npub struct Foo;\n\n\/\/ @has foo\/struct.Foo.html\n\/\/ @!has - '\/\/*[@class=\"synthetic-implementations\"]' 'Auto Trait Implementations'\nimpl !Send for Foo {}\nimpl !Sync for Foo {}\n<commit_msg>Fix empty-section.rs test<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![crate_name = \"foo\"]\n\n#![feature(optin_builtin_traits)]\n\npub struct Foo;\n\n\/\/ @has foo\/struct.Foo.html\n\/\/ @!has - 'Auto Trait Implementations'\nimpl !Send for Foo {}\nimpl !Sync for Foo {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add missing file<commit_after>use itertools::free::join;\nuse ast::{Expression, TreePrinter, Type, prefix};\nuse compileerror::{Span};\n\n#[derive(Debug, Eq, PartialEq, Clone)]\npub struct StructMember\n{\n pub name: String,\n pub typ: Type,\n pub span: Span,\n}\n\npub fn struct_member(name: &str, typ: Type, span: Span) -> StructMember\n{\n StructMember{\n name: name.into(),\n typ: typ,\n span: span,\n }\n}\n\n#[derive(Debug, Eq, PartialEq, Clone)]\npub struct StructDeclaration\n{\n pub name: String,\n pub members: Vec<StructMember>,\n pub span: Span,\n}\n\npub fn struct_declaration(name: &str, members: Vec<StructMember>, span: Span) -> StructDeclaration\n{\n StructDeclaration{\n name: name.into(),\n members: members,\n span: span,\n }\n}\n\nimpl StructDeclaration\n{\n pub fn get_type(&self) -> Type\n {\n Type::Struct(self.name.clone(), self.members.iter().map(|m| m.typ.clone()).collect())\n }\n}\n\n#[derive(Debug, Eq, PartialEq, Clone)]\npub struct StructInitializer\n{\n pub struct_name: String,\n pub member_initializers: Vec<Expression>,\n pub span: Span,\n}\n\npub fn struct_initializer(struct_name: &str, member_initializers: Vec<Expression>, span: Span) -> StructInitializer\n{\n StructInitializer{\n struct_name: struct_name.into(),\n member_initializers: member_initializers,\n span: span,\n }\n}\n\n#[derive(Debug, Eq, PartialEq, Clone)]\npub struct StructMemberAccess\n{\n pub name: String,\n pub members: Vec<String>,\n pub span: Span,\n}\n\npub fn struct_member_access(name: &str, members: Vec<String>, span: Span) -> StructMemberAccess\n{\n StructMemberAccess{\n name: name.into(),\n members: members,\n span: span,\n }\n}\n\nimpl TreePrinter for StructDeclaration\n{\n fn print(&self, level: usize)\n {\n let p = prefix(level);\n println!(\"{}struct {} ({})\", p, self.name, self.span);\n for m in &self.members {\n m.print(level + 1)\n }\n }\n}\n\nimpl TreePrinter for StructInitializer\n{\n fn print(&self, level: usize)\n {\n let p = prefix(level);\n println!(\"{}struct initializer {} ({})\", p, self.struct_name, self.span);\n for m in &self.member_initializers {\n m.print(level + 1)\n }\n }\n}\n\nimpl TreePrinter for StructMember\n{\n fn print(&self, level: usize)\n {\n let p = prefix(level);\n println!(\"{}{}:{} ({})\", p, self.name, self.typ, self.span);\n }\n}\n\nimpl TreePrinter for StructMemberAccess\n{\n fn print(&self, level: usize)\n {\n let p = prefix(level);\n println!(\"{}{}.{} ({})\", p, self.name, join(self.members.iter(), \".\"), self.span);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n * Methods for the various MIR types. These are intended for use after\n * building is complete.\n *\/\n\nuse repr::*;\nuse rustc::middle::subst::Substs;\nuse rustc::middle::ty::{self, AdtDef, Ty};\nuse rustc_front::hir;\n\n#[derive(Copy, Clone, Debug)]\npub enum LvalueTy<'tcx> {\n \/\/\/ Normal type.\n Ty { ty: Ty<'tcx> },\n\n \/\/\/ Downcast to a particular variant of an enum.\n Downcast { adt_def: AdtDef<'tcx>,\n substs: &'tcx Substs<'tcx>,\n variant_index: usize },\n}\n\nimpl<'tcx> LvalueTy<'tcx> {\n pub fn from_ty(ty: Ty<'tcx>) -> LvalueTy<'tcx> {\n LvalueTy::Ty { ty: ty }\n }\n\n pub fn to_ty(&self, tcx: &ty::ctxt<'tcx>) -> Ty<'tcx> {\n match *self {\n LvalueTy::Ty { ty } =>\n ty,\n LvalueTy::Downcast { adt_def, substs, variant_index: _ } =>\n tcx.mk_enum(adt_def, substs),\n }\n }\n\n pub fn projection_ty(self,\n tcx: &ty::ctxt<'tcx>,\n elem: &LvalueElem<'tcx>)\n -> LvalueTy<'tcx>\n {\n match *elem {\n ProjectionElem::Deref =>\n LvalueTy::Ty {\n ty: self.to_ty(tcx).builtin_deref(true, ty::LvaluePreference::NoPreference)\n .unwrap()\n .ty\n },\n ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } =>\n LvalueTy::Ty {\n ty: self.to_ty(tcx).builtin_index().unwrap()\n },\n ProjectionElem::Downcast(adt_def1, index) =>\n match self.to_ty(tcx).sty {\n ty::TyEnum(adt_def, substs) => {\n assert!(index < adt_def.variants.len());\n assert_eq!(adt_def, adt_def1);\n LvalueTy::Downcast { adt_def: adt_def,\n substs: substs,\n variant_index: index }\n }\n _ => {\n tcx.sess.bug(&format!(\"cannot downcast non-enum type: `{:?}`\", self))\n }\n },\n ProjectionElem::Field(field) => {\n let field_ty = match self {\n LvalueTy::Ty { ty } => match ty.sty {\n ty::TyStruct(adt_def, substs) =>\n adt_def.struct_variant().fields[field.index()].ty(tcx, substs),\n ty::TyTuple(ref tys) =>\n tys[field.index()],\n _ =>\n tcx.sess.bug(&format!(\"cannot get field of type: `{:?}`\", ty)),\n },\n LvalueTy::Downcast { adt_def, substs, variant_index } =>\n adt_def.variants[variant_index].fields[field.index()].ty(tcx, substs),\n };\n LvalueTy::Ty { ty: field_ty }\n }\n }\n }\n}\n\nimpl<'tcx> Mir<'tcx> {\n pub fn operand_ty(&self,\n tcx: &ty::ctxt<'tcx>,\n operand: &Operand<'tcx>)\n -> Ty<'tcx>\n {\n match *operand {\n Operand::Consume(ref l) => self.lvalue_ty(tcx, l).to_ty(tcx),\n Operand::Constant(ref c) => c.ty,\n }\n }\n\n pub fn binop_ty(&self,\n tcx: &ty::ctxt<'tcx>,\n op: BinOp,\n lhs_ty: Ty<'tcx>,\n rhs_ty: Ty<'tcx>)\n -> Ty<'tcx>\n {\n \/\/ FIXME: handle SIMD correctly\n match op {\n BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div | BinOp::Rem |\n BinOp::BitXor | BinOp::BitAnd | BinOp::BitOr => {\n \/\/ these should be integers or floats of the same size.\n assert_eq!(lhs_ty, rhs_ty);\n lhs_ty\n }\n BinOp::Shl | BinOp::Shr => {\n lhs_ty \/\/ lhs_ty can be != rhs_ty\n }\n BinOp::Eq | BinOp::Lt | BinOp::Le |\n BinOp::Ne | BinOp::Ge | BinOp::Gt => {\n tcx.types.bool\n }\n }\n }\n\n pub fn lvalue_ty(&self,\n tcx: &ty::ctxt<'tcx>,\n lvalue: &Lvalue<'tcx>)\n -> LvalueTy<'tcx>\n {\n match *lvalue {\n Lvalue::Var(index) =>\n LvalueTy::Ty { ty: self.var_decls[index as usize].ty },\n Lvalue::Temp(index) =>\n LvalueTy::Ty { ty: self.temp_decls[index as usize].ty },\n Lvalue::Arg(index) =>\n LvalueTy::Ty { ty: self.arg_decls[index as usize].ty },\n Lvalue::Static(def_id) =>\n LvalueTy::Ty { ty: tcx.lookup_item_type(def_id).ty },\n Lvalue::ReturnPointer =>\n LvalueTy::Ty { ty: self.return_ty.unwrap() },\n Lvalue::Projection(ref proj) =>\n self.lvalue_ty(tcx, &proj.base).projection_ty(tcx, &proj.elem)\n }\n }\n}\n\nimpl BorrowKind {\n pub fn to_mutbl_lossy(self) -> hir::Mutability {\n match self {\n BorrowKind::Mut => hir::MutMutable,\n BorrowKind::Shared => hir::MutImmutable,\n\n \/\/ We have no type corresponding to a unique imm borrow, so\n \/\/ use `&mut`. It gives all the capabilities of an `&uniq`\n \/\/ and hence is a safe \"over approximation\".\n BorrowKind::Unique => hir::MutMutable,\n }\n }\n}\n\nimpl BinOp {\n pub fn to_hir_binop(self) -> hir::BinOp_ {\n match self {\n BinOp::Add => hir::BinOp_::BiAdd,\n BinOp::Sub => hir::BinOp_::BiSub,\n BinOp::Mul => hir::BinOp_::BiMul,\n BinOp::Div => hir::BinOp_::BiDiv,\n BinOp::Rem => hir::BinOp_::BiRem,\n BinOp::BitXor => hir::BinOp_::BiBitXor,\n BinOp::BitAnd => hir::BinOp_::BiBitAnd,\n BinOp::BitOr => hir::BinOp_::BiBitOr,\n BinOp::Shl => hir::BinOp_::BiShl,\n BinOp::Shr => hir::BinOp_::BiShr,\n BinOp::Eq => hir::BinOp_::BiEq,\n BinOp::Ne => hir::BinOp_::BiNe,\n BinOp::Lt => hir::BinOp_::BiLt,\n BinOp::Gt => hir::BinOp_::BiGt,\n BinOp::Le => hir::BinOp_::BiLe,\n BinOp::Ge => hir::BinOp_::BiGe\n }\n }\n}\n<commit_msg>Auto merge of #29817 - michaelwoerister:mir-ty-closure-fields, r=nikomatsakis<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/*!\n * Methods for the various MIR types. These are intended for use after\n * building is complete.\n *\/\n\nuse repr::*;\nuse rustc::middle::subst::Substs;\nuse rustc::middle::ty::{self, AdtDef, Ty};\nuse rustc_front::hir;\n\n#[derive(Copy, Clone, Debug)]\npub enum LvalueTy<'tcx> {\n \/\/\/ Normal type.\n Ty { ty: Ty<'tcx> },\n\n \/\/\/ Downcast to a particular variant of an enum.\n Downcast { adt_def: AdtDef<'tcx>,\n substs: &'tcx Substs<'tcx>,\n variant_index: usize },\n}\n\nimpl<'tcx> LvalueTy<'tcx> {\n pub fn from_ty(ty: Ty<'tcx>) -> LvalueTy<'tcx> {\n LvalueTy::Ty { ty: ty }\n }\n\n pub fn to_ty(&self, tcx: &ty::ctxt<'tcx>) -> Ty<'tcx> {\n match *self {\n LvalueTy::Ty { ty } =>\n ty,\n LvalueTy::Downcast { adt_def, substs, variant_index: _ } =>\n tcx.mk_enum(adt_def, substs),\n }\n }\n\n pub fn projection_ty(self,\n tcx: &ty::ctxt<'tcx>,\n elem: &LvalueElem<'tcx>)\n -> LvalueTy<'tcx>\n {\n match *elem {\n ProjectionElem::Deref =>\n LvalueTy::Ty {\n ty: self.to_ty(tcx).builtin_deref(true, ty::LvaluePreference::NoPreference)\n .unwrap()\n .ty\n },\n ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } =>\n LvalueTy::Ty {\n ty: self.to_ty(tcx).builtin_index().unwrap()\n },\n ProjectionElem::Downcast(adt_def1, index) =>\n match self.to_ty(tcx).sty {\n ty::TyEnum(adt_def, substs) => {\n assert!(index < adt_def.variants.len());\n assert_eq!(adt_def, adt_def1);\n LvalueTy::Downcast { adt_def: adt_def,\n substs: substs,\n variant_index: index }\n }\n _ => {\n tcx.sess.bug(&format!(\"cannot downcast non-enum type: `{:?}`\", self))\n }\n },\n ProjectionElem::Field(field) => {\n let field_ty = match self {\n LvalueTy::Ty { ty } => match ty.sty {\n ty::TyStruct(adt_def, substs) =>\n adt_def.struct_variant().fields[field.index()].ty(tcx, substs),\n ty::TyTuple(ref tys) =>\n tys[field.index()],\n ty::TyClosure(_, ref closure_substs) =>\n closure_substs.upvar_tys[field.index()],\n _ =>\n tcx.sess.bug(&format!(\"cannot get field of type: `{:?}`\", ty)),\n },\n LvalueTy::Downcast { adt_def, substs, variant_index } =>\n adt_def.variants[variant_index].fields[field.index()].ty(tcx, substs),\n };\n LvalueTy::Ty { ty: field_ty }\n }\n }\n }\n}\n\nimpl<'tcx> Mir<'tcx> {\n pub fn operand_ty(&self,\n tcx: &ty::ctxt<'tcx>,\n operand: &Operand<'tcx>)\n -> Ty<'tcx>\n {\n match *operand {\n Operand::Consume(ref l) => self.lvalue_ty(tcx, l).to_ty(tcx),\n Operand::Constant(ref c) => c.ty,\n }\n }\n\n pub fn binop_ty(&self,\n tcx: &ty::ctxt<'tcx>,\n op: BinOp,\n lhs_ty: Ty<'tcx>,\n rhs_ty: Ty<'tcx>)\n -> Ty<'tcx>\n {\n \/\/ FIXME: handle SIMD correctly\n match op {\n BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div | BinOp::Rem |\n BinOp::BitXor | BinOp::BitAnd | BinOp::BitOr => {\n \/\/ these should be integers or floats of the same size.\n assert_eq!(lhs_ty, rhs_ty);\n lhs_ty\n }\n BinOp::Shl | BinOp::Shr => {\n lhs_ty \/\/ lhs_ty can be != rhs_ty\n }\n BinOp::Eq | BinOp::Lt | BinOp::Le |\n BinOp::Ne | BinOp::Ge | BinOp::Gt => {\n tcx.types.bool\n }\n }\n }\n\n pub fn lvalue_ty(&self,\n tcx: &ty::ctxt<'tcx>,\n lvalue: &Lvalue<'tcx>)\n -> LvalueTy<'tcx>\n {\n match *lvalue {\n Lvalue::Var(index) =>\n LvalueTy::Ty { ty: self.var_decls[index as usize].ty },\n Lvalue::Temp(index) =>\n LvalueTy::Ty { ty: self.temp_decls[index as usize].ty },\n Lvalue::Arg(index) =>\n LvalueTy::Ty { ty: self.arg_decls[index as usize].ty },\n Lvalue::Static(def_id) =>\n LvalueTy::Ty { ty: tcx.lookup_item_type(def_id).ty },\n Lvalue::ReturnPointer =>\n LvalueTy::Ty { ty: self.return_ty.unwrap() },\n Lvalue::Projection(ref proj) =>\n self.lvalue_ty(tcx, &proj.base).projection_ty(tcx, &proj.elem)\n }\n }\n}\n\nimpl BorrowKind {\n pub fn to_mutbl_lossy(self) -> hir::Mutability {\n match self {\n BorrowKind::Mut => hir::MutMutable,\n BorrowKind::Shared => hir::MutImmutable,\n\n \/\/ We have no type corresponding to a unique imm borrow, so\n \/\/ use `&mut`. It gives all the capabilities of an `&uniq`\n \/\/ and hence is a safe \"over approximation\".\n BorrowKind::Unique => hir::MutMutable,\n }\n }\n}\n\nimpl BinOp {\n pub fn to_hir_binop(self) -> hir::BinOp_ {\n match self {\n BinOp::Add => hir::BinOp_::BiAdd,\n BinOp::Sub => hir::BinOp_::BiSub,\n BinOp::Mul => hir::BinOp_::BiMul,\n BinOp::Div => hir::BinOp_::BiDiv,\n BinOp::Rem => hir::BinOp_::BiRem,\n BinOp::BitXor => hir::BinOp_::BiBitXor,\n BinOp::BitAnd => hir::BinOp_::BiBitAnd,\n BinOp::BitOr => hir::BinOp_::BiBitOr,\n BinOp::Shl => hir::BinOp_::BiShl,\n BinOp::Shr => hir::BinOp_::BiShr,\n BinOp::Eq => hir::BinOp_::BiEq,\n BinOp::Ne => hir::BinOp_::BiNe,\n BinOp::Lt => hir::BinOp_::BiLt,\n BinOp::Gt => hir::BinOp_::BiGt,\n BinOp::Le => hir::BinOp_::BiLe,\n BinOp::Ge => hir::BinOp_::BiGe\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rust's reserved word and json<commit_after><|endoftext|>"} {"text":"<commit_before>\nuse std::iter::{repeat, Enumerate};\nuse std::slice;\nuse std::collections::VecMap;\n\nuse alphabets::Alphabet;\n\n\ntype LPS = Vec<usize>;\n\n\n#[derive(Copy)]\npub struct KMP {\n m: usize\n}\n\n\nfn get_lps(pattern: &[u8]) -> LPS {\n let (m, mut q) = (pattern.len(), 0us);\n let mut lps: LPS = repeat(0).take(m).collect();\n for i in 1..m {\n while q > 0 && pattern[q] != pattern[i] {\n q = lps[q];\n }\n if pattern[q] == pattern[i] {\n q += 1;\n }\n lps[i] = q;\n }\n\n lps\n}\n\n\nstruct Delta {\n table: Vec<VecMap<usize>>\n}\n\n\nimpl Delta {\n fn new(pattern: &[u8], alphabet: Alphabet) -> Self {\n \/\/assert!(alphabet.is_word(pattern));\n let k = alphabet.max_symbol()\n .expect(\"Expecting non-empty alphabet.\") as usize + 1;\n let m = pattern.len();\n\n let mut init = VecMap::with_capacity(k);\n for c in alphabet.symbols.iter() {\n init.insert(c, 0);\n }\n *init.get_mut(&(pattern[0] as usize)).unwrap() = 1;\n\n let lps = get_lps(pattern);\n\n let mut table = Vec::with_capacity(m + 1);\n table.push(init);\n for q in 1..m+1 {\n let mut dq = VecMap::with_capacity(k);\n for c in alphabet.symbols.iter() {\n dq.insert(c, *table[lps[q - 1]].get(&c).unwrap());\n }\n if q < m {\n *dq.get_mut(&(pattern[q] as usize)).unwrap() = q;\n }\n table.push(dq);\n }\n\n Delta { table: table }\n }\n}\n\n\n\n\npub struct FindAll<'a> {\n kmp: KMP,\n q: usize,\n text: Enumerate<slice::Iter<'a, u8>>\n}\n\n\nimpl<'a> Iterator for FindAll<'a> {\n type Item = usize;\n\n fn next(&mut self) -> Option<usize> {\n for (i, &c) in self.text {\n \/\/ TODO self.q = self.kmp.delta(self.q, c);\n if self.q == self.kmp.m {\n return Some(i - self.kmp.m + 1);\n }\n }\n\n None\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{get_lps, Delta};\n use alphabets::Alphabet;\n\n #[test]\n fn test_get_lps() {\n let pattern = b\"ababaca\";\n let lps = get_lps(pattern);\n assert_eq!(lps, [0, 0, 1, 2, 3, 0, 1]);\n }\n\n #[test]\n fn test_delta() {\n let pattern = b\"ababaca\";\n let alphabet = Alphabet::new(pattern);\n let delta = Delta::new(pattern, alphabet);\n \n }\n}\n<commit_msg>Failing test.<commit_after>\nuse std::iter::{repeat, Enumerate};\nuse std::slice;\nuse std::collections::VecMap;\n\nuse alphabets::Alphabet;\n\n\ntype LPS = Vec<usize>;\n\n\n#[derive(Copy)]\npub struct KMP {\n m: usize\n}\n\n\nfn get_lps(pattern: &[u8]) -> LPS {\n let (m, mut q) = (pattern.len(), 0us);\n let mut lps: LPS = repeat(0).take(m).collect();\n for i in 1..m {\n while q > 0 && pattern[q] != pattern[i] {\n q = lps[q];\n }\n if pattern[q] == pattern[i] {\n q += 1;\n }\n lps[i] = q;\n }\n\n lps\n}\n\n\nstruct Delta {\n table: Vec<VecMap<usize>>\n}\n\n\nimpl Delta {\n fn new(pattern: &[u8], alphabet: Alphabet) -> Self {\n \/\/assert!(alphabet.is_word(pattern));\n let k = alphabet.max_symbol()\n .expect(\"Expecting non-empty alphabet.\") as usize + 1;\n let m = pattern.len();\n\n let mut init = VecMap::with_capacity(k);\n for c in alphabet.symbols.iter() {\n init.insert(c, 0);\n }\n *init.get_mut(&(pattern[0] as usize)).unwrap() = 1;\n\n let lps = get_lps(pattern);\n\n let mut table = Vec::with_capacity(m + 1);\n table.push(init);\n for q in 1..m+1 {\n let mut dq = VecMap::with_capacity(k);\n for c in alphabet.symbols.iter() {\n dq.insert(c, *table[lps[q - 1]].get(&c).unwrap());\n }\n if q < m {\n *dq.get_mut(&(pattern[q] as usize)).unwrap() = q;\n }\n table.push(dq);\n }\n\n Delta { table: table }\n }\n\n fn get(&self, q: usize, a: u8) -> usize {\n *self.table[q].get(&(a as usize)).expect(\"Missing symbol in alphabet (is the text a word of the given alphabet?)\")\n }\n}\n\n\n\n\npub struct FindAll<'a> {\n kmp: KMP,\n q: usize,\n text: Enumerate<slice::Iter<'a, u8>>\n}\n\n\nimpl<'a> Iterator for FindAll<'a> {\n type Item = usize;\n\n fn next(&mut self) -> Option<usize> {\n for (i, &c) in self.text {\n \/\/ TODO self.q = self.kmp.delta(self.q, c);\n if self.q == self.kmp.m {\n return Some(i - self.kmp.m + 1);\n }\n }\n\n None\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::{get_lps, Delta};\n use alphabets::Alphabet;\n\n #[test]\n fn test_get_lps() {\n let pattern = b\"ababaca\";\n let lps = get_lps(pattern);\n assert_eq!(lps, [0, 0, 1, 2, 3, 0, 1]);\n }\n\n #[test]\n fn test_delta() {\n let pattern = b\"abbab\";\n let alphabet = Alphabet::new(pattern);\n let delta = Delta::new(pattern, alphabet);\n assert_eq!(delta.get(0, b'a'), 1);\n assert_eq!(delta.get(0, b'b'), 0);\n assert_eq!(delta.get(1, b'a'), 1);\n assert_eq!(delta.get(1, b'b'), 2);\n assert_eq!(delta.get(2, b'a'), 1);\n assert_eq!(delta.get(2, b'b'), 3);\n assert_eq!(delta.get(3, b'a'), 4);\n assert_eq!(delta.get(3, b'b'), 0);\n assert_eq!(delta.get(4, b'a'), 1);\n assert_eq!(delta.get(4, b'b'), 5);\n assert_eq!(delta.get(5, b'a'), 1);\n assert_eq!(delta.get(5, b'b'), 3);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::env;\nuse std::process::Command;\nuse std::path::{Path, PathBuf};\nuse std::fs::File;\nuse std::io::Write;\n\nstruct Test {\n repo: &'static str,\n name: &'static str,\n sha: &'static str,\n lock: Option<&'static str>,\n}\n\nconst TEST_REPOS: &'static [Test] = &[\n Test {\n name: \"cargo\",\n repo: \"https:\/\/github.com\/rust-lang\/cargo\",\n sha: \"26288f799427f9cc6e8bdddd782a17a8156ebc64\",\n lock: None,\n },\n Test {\n name: \"iron\",\n repo: \"https:\/\/github.com\/iron\/iron\",\n sha: \"16c858ec2901e2992fe5e529780f59fa8ed12903\",\n lock: Some(include_str!(\"lockfiles\/iron-Cargo.lock\")),\n },\n];\n\n\nfn main() {\n let args = env::args().collect::<Vec<_>>();\n let ref cargo = args[1];\n let out_dir = Path::new(&args[2]);\n let ref cargo = Path::new(cargo);\n\n for test in TEST_REPOS.iter().rev() {\n test_repo(cargo, out_dir, test);\n }\n}\n\nfn test_repo(cargo: &Path, out_dir: &Path, test: &Test) {\n println!(\"testing {}\", test.repo);\n let dir = clone_repo(test, out_dir);\n if let Some(lockfile) = test.lock {\n File::create(&dir.join(\"Cargo.lock\")).expect(\"\")\n .write_all(lockfile.as_bytes()).expect(\"\");\n }\n if !run_cargo_test(cargo, &dir) {\n panic!(\"tests failed for {}\", test.repo);\n }\n}\n\nfn clone_repo(test: &Test, out_dir: &Path) -> PathBuf {\n let out_dir = out_dir.join(test.name);\n\n if !out_dir.join(\".git\").is_dir() {\n let status = Command::new(\"git\")\n .arg(\"init\")\n .arg(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n \/\/ Try progressively deeper fetch depths to find the commit\n let mut found = false;\n for depth in &[0, 1, 10, 100, 1000, 100000] {\n if *depth > 0 {\n let status = Command::new(\"git\")\n .arg(\"fetch\")\n .arg(test.repo)\n .arg(\"master\")\n .arg(&format!(\"--depth={}\", depth))\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n let status = Command::new(\"git\")\n .arg(\"reset\")\n .arg(test.sha)\n .arg(\"--hard\")\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n\n if status.success() {\n found = true;\n break;\n }\n }\n\n if !found {\n panic!(\"unable to find commit {}\", test.sha)\n }\n let status = Command::new(\"git\")\n .arg(\"clean\")\n .arg(\"-fdx\")\n .current_dir(&out_dir)\n .status()\n .unwrap();\n assert!(status.success());\n\n out_dir\n}\n\nfn run_cargo_test(cargo_path: &Path, crate_path: &Path) -> bool {\n let status = Command::new(cargo_path)\n .arg(\"test\")\n \/\/ Disable rust-lang\/cargo's cross-compile tests\n .env(\"CFG_DISABLE_CROSS_TESTS\", \"1\")\n .current_dir(crate_path)\n .status()\n .expect(\"\");\n\n status.success()\n}\n<commit_msg>Update cargo version in cargotest<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::env;\nuse std::process::Command;\nuse std::path::{Path, PathBuf};\nuse std::fs::File;\nuse std::io::Write;\n\nstruct Test {\n repo: &'static str,\n name: &'static str,\n sha: &'static str,\n lock: Option<&'static str>,\n}\n\nconst TEST_REPOS: &'static [Test] = &[\n Test {\n name: \"cargo\",\n repo: \"https:\/\/github.com\/rust-lang\/cargo\",\n sha: \"7d79da08238e3d47e0bc4406155bdcc45ccb8c82\",\n lock: None,\n },\n Test {\n name: \"iron\",\n repo: \"https:\/\/github.com\/iron\/iron\",\n sha: \"16c858ec2901e2992fe5e529780f59fa8ed12903\",\n lock: Some(include_str!(\"lockfiles\/iron-Cargo.lock\")),\n },\n];\n\n\nfn main() {\n let args = env::args().collect::<Vec<_>>();\n let ref cargo = args[1];\n let out_dir = Path::new(&args[2]);\n let ref cargo = Path::new(cargo);\n\n for test in TEST_REPOS.iter().rev() {\n test_repo(cargo, out_dir, test);\n }\n}\n\nfn test_repo(cargo: &Path, out_dir: &Path, test: &Test) {\n println!(\"testing {}\", test.repo);\n let dir = clone_repo(test, out_dir);\n if let Some(lockfile) = test.lock {\n File::create(&dir.join(\"Cargo.lock\")).expect(\"\")\n .write_all(lockfile.as_bytes()).expect(\"\");\n }\n if !run_cargo_test(cargo, &dir) {\n panic!(\"tests failed for {}\", test.repo);\n }\n}\n\nfn clone_repo(test: &Test, out_dir: &Path) -> PathBuf {\n let out_dir = out_dir.join(test.name);\n\n if !out_dir.join(\".git\").is_dir() {\n let status = Command::new(\"git\")\n .arg(\"init\")\n .arg(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n \/\/ Try progressively deeper fetch depths to find the commit\n let mut found = false;\n for depth in &[0, 1, 10, 100, 1000, 100000] {\n if *depth > 0 {\n let status = Command::new(\"git\")\n .arg(\"fetch\")\n .arg(test.repo)\n .arg(\"master\")\n .arg(&format!(\"--depth={}\", depth))\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n assert!(status.success());\n }\n\n let status = Command::new(\"git\")\n .arg(\"reset\")\n .arg(test.sha)\n .arg(\"--hard\")\n .current_dir(&out_dir)\n .status()\n .expect(\"\");\n\n if status.success() {\n found = true;\n break;\n }\n }\n\n if !found {\n panic!(\"unable to find commit {}\", test.sha)\n }\n let status = Command::new(\"git\")\n .arg(\"clean\")\n .arg(\"-fdx\")\n .current_dir(&out_dir)\n .status()\n .unwrap();\n assert!(status.success());\n\n out_dir\n}\n\nfn run_cargo_test(cargo_path: &Path, crate_path: &Path) -> bool {\n let status = Command::new(cargo_path)\n .arg(\"test\")\n \/\/ Disable rust-lang\/cargo's cross-compile tests\n .env(\"CFG_DISABLE_CROSS_TESTS\", \"1\")\n .current_dir(crate_path)\n .status()\n .expect(\"\");\n\n status.success()\n}\n<|endoftext|>"} {"text":"<commit_before>#![cfg_attr(all(test, feature = \"nightly\"), feature(test))] \/\/ we only need test feature when testing\n\n#[macro_use] extern crate log;\n\nextern crate syntex_syntax;\nextern crate toml;\nextern crate env_logger;\n\nextern crate racer;\n\n#[cfg(not(test))]\nuse racer::core;\n#[cfg(not(test))]\nuse racer::util;\n#[cfg(not(test))]\nuse racer::core::Match;\n#[cfg(not(test))]\nuse racer::util::{getline, path_exists};\n#[cfg(not(test))]\nuse racer::nameres::{do_file_search, do_external_search, PATH_SEP};\n#[cfg(not(test))]\nuse racer::scopes;\n#[cfg(not(test))]\nuse std::path::Path;\n\n\n#[cfg(not(test))]\nfn match_with_snippet_fn(m: Match) {\n let (linenum, charnum) = scopes::point_to_coords_from_file(&m.filepath, m.point).unwrap();\n if m.matchstr == \"\" {\n panic!(\"MATCHSTR is empty - waddup?\");\n }\n\n let snippet = racer::snippets::snippet_for_match(&m);\n println!(\"MATCH {};{};{};{};{};{:?};{}\", m.matchstr,\n snippet,\n linenum.to_string(),\n charnum.to_string(),\n m.filepath.to_str().unwrap(),\n m.mtype,\n m.contextstr\n );\n}\n\n#[cfg(not(test))]\nfn match_fn(m: Match) {\n let (linenum, charnum) = scopes::point_to_coords_from_file(&m.filepath, m.point).unwrap();\n println!(\"MATCH {},{},{},{},{:?},{}\", m.matchstr,\n linenum.to_string(),\n charnum.to_string(),\n m.filepath.to_str().unwrap(),\n m.mtype,\n m.contextstr\n );\n}\n\n#[cfg(not(test))]\nfn complete(match_found: &Fn(Match), args: &[String]) {\n if args.len() < 1 {\n println!(\"Provide more arguments!\");\n print_usage();\n std::process::exit(1);\n }\n match args[1].parse::<usize>() {\n Ok(linenum) => {\n \/\/ input: linenum, colnum, fname\n if args.len() < 4 {\n println!(\"Provide more arguments!\");\n print_usage();\n std::process::exit(1);\n }\n let charnum = args[2].parse::<usize>().unwrap();\n let fname = &args[3];\n let substitute_file = Path::new(match args.len() > 4 {\n true => &args[4],\n false => fname\n });\n let fpath = Path::new(fname);\n let src = core::load_file(&substitute_file);\n let line = &*getline(&substitute_file, linenum);\n let (start, pos) = util::expand_ident(line, charnum);\n println!(\"PREFIX {},{},{}\", start, pos, &line[start..pos]);\n\n let session = core::Session::from_path(&fpath, &substitute_file);\n let point = scopes::coords_to_point(&*src, linenum, charnum);\n for m in core::complete_from_file(&*src, &fpath, point, &session) {\n match_found(m);\n }\n println!(\"END\");\n }\n Err(_) => {\n \/\/ input: a command line string passed in\n let arg = &args[1];\n let it = arg.split(\"::\");\n let p: Vec<&str> = it.collect();\n\n for m in do_file_search(p[0], &Path::new(\".\")) {\n if p.len() == 1 {\n match_found(m);\n } else {\n for m in do_external_search(&p[1..], &m.filepath, m.point, core::SearchType::StartsWith, core::Namespace::BothNamespaces, &m.session) {\n match_found(m);\n }\n }\n }\n }\n }\n}\n\n#[cfg(not(test))]\nfn prefix(args: &[String]) {\n if args.len() < 4 {\n println!(\"Provide more arguments!\");\n print_usage();\n std::process::exit(1);\n }\n let linenum = args[1].parse::<usize>().unwrap();\n let charnum = args[2].parse::<usize>().unwrap();\n let fname = &args[3];\n\n \/\/ print the start, end, and the identifier prefix being matched\n let path = Path::new(fname);\n let line = &*getline(&path, linenum);\n let (start, pos) = util::expand_ident(line, charnum);\n println!(\"PREFIX {},{},{}\", start, pos, &line[start..pos]);\n}\n\n#[cfg(not(test))]\nfn find_definition(args: &[String]) {\n if args.len() < 4 {\n println!(\"Provide more arguments!\");\n print_usage();\n std::process::exit(1);\n }\n let linenum = args[1].parse::<usize>().unwrap();\n let charnum = args[2].parse::<usize>().unwrap();\n let fname = &args[3];\n let substitute_file = Path::new(match args.len() > 4 {\n true => &args[4],\n false => fname\n });\n let fpath = Path::new(&fname);\n let session = core::Session::from_path(&fpath, &substitute_file);\n let src = core::load_file(&substitute_file);\n let pos = scopes::coords_to_point(&*src, linenum, charnum);\n\n core::find_definition(&*src, &fpath, pos, &session).map(match_fn);\n println!(\"END\");\n}\n\n#[cfg(not(test))]\nfn print_usage() {\n let program = std::env::args().next().unwrap().clone();\n println!(\"usage: {} complete linenum charnum fname [substitute_file]\", program);\n println!(\"or: {} find-definition linenum charnum fname [substitute_file]\", program);\n println!(\"or: {} complete fullyqualifiedname (e.g. std::io::)\", program);\n println!(\"or: {} prefix linenum charnum fname\", program);\n println!(\"or replace complete with complete-with-snippet for more detailed completions.\");\n println!(\"or: {} daemon - to start a process that receives the above commands via stdin\", program);\n}\n\n#[cfg(not(test))]\nfn check_rust_src_env_var() {\n if let Ok(srcpaths) = std::env::var(\"RUST_SRC_PATH\") {\n let v = srcpaths.split(PATH_SEP).collect::<Vec<_>>();\n if !v.is_empty() {\n let f = Path::new(v[0]);\n if !path_exists(f) {\n println!(\"racer can't find the directory pointed to by the RUST_SRC_PATH variable \\\"{}\\\". Try using an absolute fully qualified path and make sure it points to the src directory of a rust checkout - e.g. \\\"\/home\/foouser\/src\/rust\/src\\\".\", srcpaths);\n std::process::exit(1);\n } else if !path_exists(f.join(\"libstd\")) {\n println!(\"Unable to find libstd under RUST_SRC_PATH. N.B. RUST_SRC_PATH variable needs to point to the *src* directory inside a rust checkout e.g. \\\"\/home\/foouser\/src\/rust\/src\\\". Current value \\\"{}\\\"\", srcpaths);\n std::process::exit(1);\n }\n }\n } else {\n println!(\"RUST_SRC_PATH environment variable must be set to point to the src directory of a rust checkout. E.g. \\\"\/home\/foouser\/src\/rust\/src\\\"\");\n std::process::exit(1);\n }\n}\n\n#[cfg(not(test))]\nfn daemon() {\n use std::io;\n let mut input = String::new();\n while let Ok(n) = io::stdin().read_line(&mut input) {\n if n == 0 {\n break;\n }\n let args: Vec<String> = input.split(\" \").map(|s| s.trim().to_string()).collect();\n run(&args);\n \n input.clear();\n }\n}\n\n\n#[cfg(not(test))]\nfn main() {\n env_logger::init().unwrap();\n check_rust_src_env_var();\n\n let args: Vec<String> = std::env::args().collect();\n\n if args.len() == 1 {\n print_usage();\n std::process::exit(1);\n }\n\n let args = &args[1..];\n run(args);\n}\n\nfn run(args: &[String]) {\n let command = &args[0];\n match &command[..] {\n \"daemon\" => daemon(),\n \"prefix\" => prefix(&args),\n \"complete\" => complete(&match_fn, &args),\n \"complete-with-snippet\" => complete(&match_with_snippet_fn, &args),\n \"find-definition\" => find_definition(&args),\n \"help\" => print_usage(),\n cmd => {\n println!(\"Sorry, I didn't understand command {}\", cmd);\n print_usage();\n std::process::exit(1);\n }\n }\n}\n<commit_msg>Oops, get tests working again<commit_after>#![cfg_attr(all(test, feature = \"nightly\"), feature(test))] \/\/ we only need test feature when testing\n\n#[macro_use] extern crate log;\n\nextern crate syntex_syntax;\nextern crate toml;\nextern crate env_logger;\n\nextern crate racer;\n\n#[cfg(not(test))]\nuse racer::core;\n#[cfg(not(test))]\nuse racer::util;\n#[cfg(not(test))]\nuse racer::core::Match;\n#[cfg(not(test))]\nuse racer::util::{getline, path_exists};\n#[cfg(not(test))]\nuse racer::nameres::{do_file_search, do_external_search, PATH_SEP};\n#[cfg(not(test))]\nuse racer::scopes;\n#[cfg(not(test))]\nuse std::path::Path;\n\n\n#[cfg(not(test))]\nfn match_with_snippet_fn(m: Match) {\n let (linenum, charnum) = scopes::point_to_coords_from_file(&m.filepath, m.point).unwrap();\n if m.matchstr == \"\" {\n panic!(\"MATCHSTR is empty - waddup?\");\n }\n\n let snippet = racer::snippets::snippet_for_match(&m);\n println!(\"MATCH {};{};{};{};{};{:?};{}\", m.matchstr,\n snippet,\n linenum.to_string(),\n charnum.to_string(),\n m.filepath.to_str().unwrap(),\n m.mtype,\n m.contextstr\n );\n}\n\n#[cfg(not(test))]\nfn match_fn(m: Match) {\n let (linenum, charnum) = scopes::point_to_coords_from_file(&m.filepath, m.point).unwrap();\n println!(\"MATCH {},{},{},{},{:?},{}\", m.matchstr,\n linenum.to_string(),\n charnum.to_string(),\n m.filepath.to_str().unwrap(),\n m.mtype,\n m.contextstr\n );\n}\n\n#[cfg(not(test))]\nfn complete(match_found: &Fn(Match), args: &[String]) {\n if args.len() < 1 {\n println!(\"Provide more arguments!\");\n print_usage();\n std::process::exit(1);\n }\n match args[1].parse::<usize>() {\n Ok(linenum) => {\n \/\/ input: linenum, colnum, fname\n if args.len() < 4 {\n println!(\"Provide more arguments!\");\n print_usage();\n std::process::exit(1);\n }\n let charnum = args[2].parse::<usize>().unwrap();\n let fname = &args[3];\n let substitute_file = Path::new(match args.len() > 4 {\n true => &args[4],\n false => fname\n });\n let fpath = Path::new(fname);\n let src = core::load_file(&substitute_file);\n let line = &*getline(&substitute_file, linenum);\n let (start, pos) = util::expand_ident(line, charnum);\n println!(\"PREFIX {},{},{}\", start, pos, &line[start..pos]);\n\n let session = core::Session::from_path(&fpath, &substitute_file);\n let point = scopes::coords_to_point(&*src, linenum, charnum);\n for m in core::complete_from_file(&*src, &fpath, point, &session) {\n match_found(m);\n }\n println!(\"END\");\n }\n Err(_) => {\n \/\/ input: a command line string passed in\n let arg = &args[1];\n let it = arg.split(\"::\");\n let p: Vec<&str> = it.collect();\n\n for m in do_file_search(p[0], &Path::new(\".\")) {\n if p.len() == 1 {\n match_found(m);\n } else {\n for m in do_external_search(&p[1..], &m.filepath, m.point, core::SearchType::StartsWith, core::Namespace::BothNamespaces, &m.session) {\n match_found(m);\n }\n }\n }\n }\n }\n}\n\n#[cfg(not(test))]\nfn prefix(args: &[String]) {\n if args.len() < 4 {\n println!(\"Provide more arguments!\");\n print_usage();\n std::process::exit(1);\n }\n let linenum = args[1].parse::<usize>().unwrap();\n let charnum = args[2].parse::<usize>().unwrap();\n let fname = &args[3];\n\n \/\/ print the start, end, and the identifier prefix being matched\n let path = Path::new(fname);\n let line = &*getline(&path, linenum);\n let (start, pos) = util::expand_ident(line, charnum);\n println!(\"PREFIX {},{},{}\", start, pos, &line[start..pos]);\n}\n\n#[cfg(not(test))]\nfn find_definition(args: &[String]) {\n if args.len() < 4 {\n println!(\"Provide more arguments!\");\n print_usage();\n std::process::exit(1);\n }\n let linenum = args[1].parse::<usize>().unwrap();\n let charnum = args[2].parse::<usize>().unwrap();\n let fname = &args[3];\n let substitute_file = Path::new(match args.len() > 4 {\n true => &args[4],\n false => fname\n });\n let fpath = Path::new(&fname);\n let session = core::Session::from_path(&fpath, &substitute_file);\n let src = core::load_file(&substitute_file);\n let pos = scopes::coords_to_point(&*src, linenum, charnum);\n\n core::find_definition(&*src, &fpath, pos, &session).map(match_fn);\n println!(\"END\");\n}\n\n#[cfg(not(test))]\nfn print_usage() {\n let program = std::env::args().next().unwrap().clone();\n println!(\"usage: {} complete linenum charnum fname [substitute_file]\", program);\n println!(\"or: {} find-definition linenum charnum fname [substitute_file]\", program);\n println!(\"or: {} complete fullyqualifiedname (e.g. std::io::)\", program);\n println!(\"or: {} prefix linenum charnum fname\", program);\n println!(\"or replace complete with complete-with-snippet for more detailed completions.\");\n println!(\"or: {} daemon - to start a process that receives the above commands via stdin\", program);\n}\n\n#[cfg(not(test))]\nfn check_rust_src_env_var() {\n if let Ok(srcpaths) = std::env::var(\"RUST_SRC_PATH\") {\n let v = srcpaths.split(PATH_SEP).collect::<Vec<_>>();\n if !v.is_empty() {\n let f = Path::new(v[0]);\n if !path_exists(f) {\n println!(\"racer can't find the directory pointed to by the RUST_SRC_PATH variable \\\"{}\\\". Try using an absolute fully qualified path and make sure it points to the src directory of a rust checkout - e.g. \\\"\/home\/foouser\/src\/rust\/src\\\".\", srcpaths);\n std::process::exit(1);\n } else if !path_exists(f.join(\"libstd\")) {\n println!(\"Unable to find libstd under RUST_SRC_PATH. N.B. RUST_SRC_PATH variable needs to point to the *src* directory inside a rust checkout e.g. \\\"\/home\/foouser\/src\/rust\/src\\\". Current value \\\"{}\\\"\", srcpaths);\n std::process::exit(1);\n }\n }\n } else {\n println!(\"RUST_SRC_PATH environment variable must be set to point to the src directory of a rust checkout. E.g. \\\"\/home\/foouser\/src\/rust\/src\\\"\");\n std::process::exit(1);\n }\n}\n\n#[cfg(not(test))]\nfn daemon() {\n use std::io;\n let mut input = String::new();\n while let Ok(n) = io::stdin().read_line(&mut input) {\n if n == 0 {\n break;\n }\n let args: Vec<String> = input.split(\" \").map(|s| s.trim().to_string()).collect();\n run(&args);\n \n input.clear();\n }\n}\n\n\n#[cfg(not(test))]\nfn main() {\n env_logger::init().unwrap();\n check_rust_src_env_var();\n\n let args: Vec<String> = std::env::args().collect();\n\n if args.len() == 1 {\n print_usage();\n std::process::exit(1);\n }\n\n let args = &args[1..];\n run(args);\n}\n\n#[cfg(not(test))]\nfn run(args: &[String]) {\n let command = &args[0];\n match &command[..] {\n \"daemon\" => daemon(),\n \"prefix\" => prefix(&args),\n \"complete\" => complete(&match_fn, &args),\n \"complete-with-snippet\" => complete(&match_with_snippet_fn, &args),\n \"find-definition\" => find_definition(&args),\n \"help\" => print_usage(),\n cmd => {\n println!(\"Sorry, I didn't understand command {}\", cmd);\n print_usage();\n std::process::exit(1);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test: Samurai Sudoku.<commit_after>\/\/! Samurai Sudoku.\n\/\/!\n\/\/! https:\/\/en.wikipedia.org\/wiki\/Sudoku#Variants\n\/\/! http:\/\/www.samurai-sudoku.com\/#ai\n\nextern crate puzzle_solver;\n\nuse puzzle_solver::{Puzzle,Solution,VarToken};\n\nconst SQRT_SIZE: usize = 3;\nconst SIZE: usize = 9;\nconst X: i32 = -1;\ntype Board = [[i32; SIZE + SQRT_SIZE + SIZE]; SIZE + SQRT_SIZE + SIZE];\ntype SudokuVars = Vec<Vec<VarToken>>;\ntype SamuraiVars = (SudokuVars, SudokuVars, SudokuVars, SudokuVars, SudokuVars);\n\nfn make_sudoku(sys: &mut Puzzle) -> SudokuVars {\n let vars = sys.new_vars_with_candidates_2d(SIZE, SIZE, &[1,2,3,4,5,6,7,8,9]);\n\n for y in 0..SIZE {\n sys.all_different(&vars[y]);\n }\n\n for x in 0..SIZE {\n sys.all_different(vars.iter().map(|row| &row[x]));\n }\n\n for block in 0..SIZE {\n let x0 = SQRT_SIZE * (block % SQRT_SIZE);\n let y0 = SQRT_SIZE * (block \/ SQRT_SIZE);\n sys.all_different((0..SIZE).map(|n|\n &vars[y0 + (n \/ SQRT_SIZE)][x0 + (n % SQRT_SIZE)]));\n }\n\n vars\n}\n\nfn make_samurai_sudoku(board: &Board) -> (Puzzle, SamuraiVars) {\n let set = |sys: &mut Puzzle, var, val| if val != 0 { sys.set_value(var, val) };\n\n let mut sys = Puzzle::new();\n let tl = make_sudoku(&mut sys);\n let tr = make_sudoku(&mut sys);\n let bl = make_sudoku(&mut sys);\n let br = make_sudoku(&mut sys);\n let mid = make_sudoku(&mut sys);\n\n for y in 0..SQRT_SIZE {\n for x in 0..SQRT_SIZE {\n sys.equals(mid[0 * SQRT_SIZE + y][0 * SQRT_SIZE + x], tl[2 * SQRT_SIZE + y][2 * SQRT_SIZE + x]);\n sys.equals(mid[0 * SQRT_SIZE + y][2 * SQRT_SIZE + x], tr[2 * SQRT_SIZE + y][0 * SQRT_SIZE + x]);\n sys.equals(mid[2 * SQRT_SIZE + y][0 * SQRT_SIZE + x], bl[0 * SQRT_SIZE + y][2 * SQRT_SIZE + x]);\n sys.equals(mid[2 * SQRT_SIZE + y][2 * SQRT_SIZE + x], br[0 * SQRT_SIZE + y][0 * SQRT_SIZE + x]);\n }\n }\n\n for y in 0..SIZE {\n for x in 0..SIZE {\n set(&mut sys, tl[y][x], board[y][x]);\n set(&mut sys, tr[y][x], board[y][SIZE + SQRT_SIZE + x]);\n set(&mut sys, bl[y][x], board[SIZE + SQRT_SIZE + y][x]);\n set(&mut sys, br[y][x], board[SIZE + SQRT_SIZE + y][SIZE + SQRT_SIZE + x]);\n set(&mut sys, mid[y][x], board[2 * SQRT_SIZE + y][2 * SQRT_SIZE + x]);\n }\n }\n\n (sys, (tl, tr, bl, br, mid))\n}\n\nfn print_samurai_sudoku(dict: &Solution, vars: &SamuraiVars) {\n let &(ref tl, ref tr, ref bl, ref br, ref mid) = vars;\n let pr3 = |a: &[VarToken], j| print!(\" {}{}{}\", dict[a[j]], dict[a[j + 1]], dict[a[j + 2]]);\n let pr9 = |a| { pr3(a, 0); pr3(a, 3); pr3(a, 6); };\n let gap = || print!(\" \");\n\n for i in 0..SIZE {\n pr9(&tl[i]);\n if 2 * SQRT_SIZE <= i {\n pr3(&mid[i - 2 * SQRT_SIZE], 3);\n } else {\n gap();\n }\n pr9(&tr[i]);\n println!();\n }\n\n for i in SQRT_SIZE..(2 * SQRT_SIZE) {\n gap();\n gap();\n pr9(&mid[i]);\n println!();\n }\n\n for i in 0..SIZE {\n pr9(&bl[i]);\n if i < SQRT_SIZE {\n pr3(&mid[2 * SQRT_SIZE + i], 3);\n } else {\n gap();\n }\n pr9(&br[i]);\n println!();\n }\n}\n\nfn verify_samurai_sudoku(dict: &Solution, vars: &SamuraiVars, expected: &Board) {\n let &(ref tl, ref tr, ref bl, ref br, ref mid) = vars;\n for i in 0..SIZE {\n for j in 0..SIZE {\n assert_eq!(dict[tl[i][j]], expected[i][j]);\n assert_eq!(dict[tr[i][j]], expected[i][SIZE + SQRT_SIZE + j]);\n assert_eq!(dict[bl[i][j]], expected[SIZE + SQRT_SIZE + i][j]);\n assert_eq!(dict[br[i][j]], expected[SIZE + SQRT_SIZE + i][SIZE + SQRT_SIZE + j]);\n assert_eq!(dict[mid[i][j]], expected[2 * SQRT_SIZE + i][2 * SQRT_SIZE + j]);\n }\n }\n}\n\n#[test]\nfn samuraisudoku_easy() {\n let puzzle = [\n [ 0,0,3, 0,0,0, 2,0,0, X,X,X, 0,0,6, 0,0,0, 2,0,0 ],\n [ 0,2,0, 4,0,8, 0,3,0, X,X,X, 0,3,0, 4,0,2, 0,8,0 ],\n [ 8,0,0, 0,9,0, 0,0,4, X,X,X, 8,0,0, 0,1,0, 0,0,4 ],\n\n [ 0,5,0, 6,0,1, 0,2,0, X,X,X, 0,2,0, 1,0,7, 0,9,0 ],\n [ 0,0,8, 0,0,0, 6,0,0, X,X,X, 0,0,9, 0,0,0, 8,0,0 ],\n [ 0,7,0, 8,0,4, 0,1,0, X,X,X, 0,8,0, 5,0,9, 0,4,0 ],\n\n [ 1,0,0, 0,7,0, 0,0,0, 0,0,0, 0,0,0, 0,7,0, 0,0,5 ],\n [ 0,4,0, 1,0,2, 0,0,0, 0,0,0, 0,0,0, 2,0,8, 0,7,0 ],\n [ 0,0,9, 0,0,0, 0,0,0, 0,6,0, 0,0,0, 0,0,0, 1,0,0 ],\n\n [ X,X,X, X,X,X, 0,0,0, 5,0,1, 0,0,0, X,X,X, X,X,X ],\n [ X,X,X, X,X,X, 0,0,9, 0,0,0, 6,0,0, X,X,X, X,X,X ],\n [ X,X,X, X,X,X, 0,0,0, 3,0,6, 0,0,0, X,X,X, X,X,X ],\n\n [ 0,0,8, 0,0,0, 0,0,0, 0,7,0, 0,0,0, 0,0,0, 4,0,0 ],\n [ 0,4,0, 5,0,1, 0,0,0, 0,0,0, 0,0,0, 9,0,5, 0,7,0 ],\n [ 6,0,0, 0,2,0, 0,0,0, 0,0,0, 0,0,0, 0,6,0, 0,0,9 ],\n\n [ 0,9,0, 1,0,3, 0,7,0, X,X,X, 0,7,0, 5,0,1, 0,9,0 ],\n [ 0,0,5, 0,0,0, 1,0,0, X,X,X, 0,0,3, 0,0,0, 6,0,0 ],\n [ 0,1,0, 6,0,8, 0,9,0, X,X,X, 0,2,0, 8,0,6, 0,1,0 ],\n\n [ 5,0,0, 0,7,0, 0,0,6, X,X,X, 7,0,0, 0,2,0, 0,0,5 ],\n [ 0,2,0, 3,0,5, 0,1,0, X,X,X, 0,9,0, 6,0,4, 0,3,0 ],\n [ 0,0,6, 0,0,0, 2,0,0, X,X,X, 0,0,4, 0,0,0, 1,0,0 ] ];\n\n let expected = [\n [ 4,9,3, 7,1,5, 2,6,8, X,X,X, 9,4,6, 8,3,5, 2,1,7 ],\n [ 5,2,7, 4,6,8, 9,3,1, X,X,X, 1,3,7, 4,9,2, 5,8,6 ],\n [ 8,6,1, 2,9,3, 5,7,4, X,X,X, 8,5,2, 7,1,6, 9,3,4 ],\n\n [ 9,5,4, 6,3,1, 8,2,7, X,X,X, 5,2,4, 1,8,7, 6,9,3 ],\n [ 3,1,8, 9,2,7, 6,4,5, X,X,X, 7,1,9, 6,4,3, 8,5,2 ],\n [ 2,7,6, 8,5,4, 3,1,9, X,X,X, 6,8,3, 5,2,9, 7,4,1 ],\n\n [ 1,8,2, 3,7,9, 4,5,6, 7,1,3, 2,9,8, 3,7,1, 4,6,5 ],\n [ 6,4,5, 1,8,2, 7,9,3, 8,5,2, 4,6,1, 2,5,8, 3,7,9 ],\n [ 7,3,9, 5,4,6, 1,8,2, 9,6,4, 3,7,5, 9,6,4, 1,2,8 ],\n\n [ X,X,X, X,X,X, 6,4,8, 5,9,1, 7,2,3, X,X,X, X,X,X ],\n [ X,X,X, X,X,X, 3,1,9, 2,8,7, 6,5,4, X,X,X, X,X,X ],\n [ X,X,X, X,X,X, 2,7,5, 3,4,6, 1,8,9, X,X,X, X,X,X ],\n\n [ 1,7,8, 9,3,6, 5,2,4, 1,7,8, 9,3,6, 2,8,7, 4,5,1 ],\n [ 2,4,3, 5,8,1, 9,6,7, 4,3,5, 8,1,2, 9,4,5, 3,7,6 ],\n [ 6,5,9, 4,2,7, 8,3,1, 6,2,9, 5,4,7, 1,6,3, 8,2,9 ],\n\n [ 8,9,2, 1,4,3, 6,7,5, X,X,X, 6,7,8, 5,3,1, 2,9,4 ],\n [ 4,6,5, 7,9,2, 1,8,3, X,X,X, 1,5,3, 4,9,2, 6,8,7 ],\n [ 3,1,7, 6,5,8, 4,9,2, X,X,X, 4,2,9, 8,7,6, 5,1,3 ],\n\n [ 5,8,1, 2,7,9, 3,4,6, X,X,X, 7,6,1, 3,2,8, 9,4,5 ],\n [ 9,2,4, 3,6,5, 7,1,8, X,X,X, 2,9,5, 6,1,4, 7,3,8 ],\n [ 7,3,6, 8,1,4, 2,5,9, X,X,X, 3,8,4, 7,5,9, 1,6,2 ] ];\n\n let (mut sys, vars) = make_samurai_sudoku(&puzzle);\n let dict = sys.solve_any().expect(\"solution\");\n print_samurai_sudoku(&dict, &vars);\n verify_samurai_sudoku(&dict, &vars, &expected);\n println!(\"samuraisudoku_easy: {} guesses\", sys.num_guesses());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Import early l20n FTL parsing code.<commit_after>pub enum Expression {\n IdentifierExpression {\n name: String,\n },\n}\n\npub enum PatternElement {\n TextElement {\n value: String,\n },\n Placeable {\n expressions: Vec<Expression>,\n },\n}\n\npub enum Value {\n Pattern {\n source: String,\n elements: Vec<PatternElement>,\n },\n}\n\npub enum Entry {\n Entity {\n id: Identifier,\n value: Value,\n },\n}\n\npub struct Identifier {\n pub name: String,\n}\n\npub struct Parser<'a> {\n source: std::str::Chars<'a>,\n ch: Option<char>,\n pos: u16,\n}\n\nimpl<'a> Parser<'a> {\n pub fn new(source: &'a str) -> Parser<'a> {\n Parser {\n source: source.chars(),\n ch: None,\n pos: 0,\n }\n }\n\n fn bump(&mut self) {\n self.ch = self.source.next();\n\n self.pos += 1;\n }\n\n fn ch_is(&self, ch: char) -> bool {\n self.ch == Some(ch)\n }\n\n fn get_ws(&mut self) {\n while self.ch_is(' ') || self.ch_is('\\n') || self.ch_is('\\t') || self.ch_is('\\r') {\n self.bump();\n }\n }\n\n fn get_line_ws(&mut self) {\n while self.ch_is(' ') || self.ch_is('\\t') {\n self.bump();\n }\n }\n\n pub fn parse(&mut self) -> Vec<Entry> {\n let mut entries: Vec<Entry> = Vec::new();\n\n self.get_ws();\n\n self.bump();\n\n loop {\n if self.ch == None {\n break;\n }\n\n entries.push(self.get_entry());\n self.get_ws();\n }\n entries\n }\n\n fn get_entry(&mut self) -> Entry {\n let val = self.get_entity();\n\n val\n }\n\n fn get_entity(&mut self) -> Entry {\n let id = self.get_identifier();\n self.get_line_ws();\n\n if !self.ch_is('=') {\n panic!();\n }\n self.bump();\n\n self.get_line_ws();\n\n let value = self.get_pattern();\n\n Entry::Entity {\n id: id,\n value: value,\n }\n }\n\n fn get_identifier(&mut self) -> Identifier {\n let mut name = String::new();\n\n let ch = match self.ch {\n Some(c) => c,\n None => panic!(),\n };\n\n match ch {\n 'a'...'z' | 'A'...'Z' | '_' => name.push(ch),\n _ => return Identifier { name: name },\n }\n self.bump();\n\n loop {\n let ch = match self.ch {\n Some(c) => c,\n None => break,\n };\n\n match ch {\n 'a'...'z' | 'A'...'Z' | '0'...'9' | '_' | '-' => name.push(ch),\n _ => break,\n }\n self.bump();\n }\n\n Identifier { name: name }\n }\n\n fn get_pattern(&mut self) -> Value {\n let mut buffer = String::new();\n let mut source = String::new();\n let mut content = vec![];\n let mut quote_delimited: bool = false;\n let mut first_line = true;\n\n if self.ch_is('\"') {\n quote_delimited = true;\n }\n\n loop {\n match self.ch {\n Some(c) if c == '\\n' => {\n if quote_delimited {\n panic!(\"Unclosed string\");\n }\n self.bump();\n self.get_line_ws();\n\n if !self.ch_is('|') {\n break;\n }\n if first_line && buffer.len() != 0 {\n panic!(\"Multiline string should have the ID line empty\");\n }\n first_line = false;\n self.bump();\n if self.ch_is(' ') {\n self.bump();\n }\n if buffer.len() != 0 {\n buffer.push('\\n');\n }\n continue;\n }\n Some(c) if c == '\"' => {\n self.bump();\n quote_delimited = false;\n break;\n }\n Some(c) => source.push(c),\n None => break,\n }\n match self.ch {\n Some(c) => buffer.push(c),\n None => continue,\n };\n self.bump();\n }\n\n if quote_delimited {\n panic!(\"Unclosed string\");\n }\n\n if buffer.len() != 0 {\n \/\/ source.append(buffer);\n content.push(PatternElement::TextElement { value: source.clone() });\n }\n\n if content.len() == 0 {\n \/\/ return Value::Pattern(source: source, elements: content);\n }\n\n content.push(PatternElement::TextElement { value: source.clone() });\n\n Value::Pattern {\n source: source,\n elements: content,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #113<commit_after>#[crate_type = \"rlib\"];\n\nuse std::vec;\nuse std::iter::AdditiveIterator;\n\npub static EXPECTED_ANSWER: &'static str = \"51161058134250\";\n\nfn num_increasing(len: uint) -> uint {\n let mut buf = vec::from_fn(len, |_| [0u, ..10]);\n\n for d in range(0, buf[0].len()) {\n buf[0][d] = 1;\n }\n for i in range(1, len) {\n let mut s = 0;\n for d in range(0, buf[i].len()).invert() {\n s += buf[i - 1][d];\n buf[i][d] = s;\n }\n }\n\n let sum = range(0, buf[len - 1].len())\n .map(|d| buf[len - 1][d])\n .sum();\n sum - 1 \/\/ all zero\n}\n\nfn num_decreasing(len: uint) -> uint {\n let mut buf = vec::from_fn(len, |_| [0u, ..11]); \/\/ 0, 1, 2, .., 9, A\n\n for d in range(0, buf[0].len()) {\n buf[0][d] = 1;\n }\n for i in range(1, len) {\n let mut s = 0;\n for d in range(0, buf[i].len()) {\n s += buf[i - 1][d];\n buf[i][d] = s;\n }\n }\n\n let sum = range(0, buf[len - 1].len())\n .map(|d| buf[len - 1][d])\n .sum();\n\n sum - len \/\/ A のみからなるものを取り除く\n - 1 \/\/ all zero\n}\n\nfn num_nonbouncy(len: uint) -> uint {\n let num_incr = num_increasing(len);\n let num_decr = num_decreasing(len);\n let num_incr_and_decr = 9 * len;\n num_incr + num_decr - num_incr_and_decr\n}\n\npub fn solve() -> ~str {\n num_nonbouncy(100).to_str()\n}\n\n#[cfg(test)]\nmod test {\n use super::num_nonbouncy;\n\n #[test]\n fn test_nonbouncy() {\n assert_eq!(12951, num_nonbouncy(6));\n assert_eq!(277032, num_nonbouncy(10));\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add problem #74<commit_after>use core::hashmap::{ HashMap };\nuse common::problem::{ Problem };\n\npub static problem: Problem<'static> = Problem {\n id: 74,\n answer: \"402\",\n solver: solve\n};\n\nenum Length {\n Loop(uint), Chain(uint), Unknown\n}\n\n#[inline(always)]\nfn fact_sum(mut n: uint, fs: &[uint, ..10]) -> uint {\n if n == 0 { return 1; }\n\n let mut sum = 0;\n while n > 0 {\n sum += fs[n % 10];\n n \/= 10;\n }\n return sum;\n}\n\n#[inline(always)]\nfn get_chain_len(\n n: uint, map: &mut[Length], fs: &[uint, ..10]\n) -> uint {\n let mut chain_map = HashMap::new::<uint, uint>();\n let mut idx = n;\n let mut chain_len = 0;\n let mut loop_len = 0;\n\n loop {\n match map[idx] {\n Loop(c) => { loop_len += c; break; }\n Chain(c) => { chain_len += c; break; }\n Unknown => {\n match chain_map.find(&idx).map(|k| **k) {\n Some(chain_idx) => {\n loop_len = chain_len - chain_idx;\n chain_len = chain_idx;\n break;\n }\n None => {\n chain_map.insert(idx, chain_len);\n idx = fact_sum(idx, fs);\n chain_len += 1;\n }\n }\n }\n }\n }\n\n for chain_map.each |&(&key, &idx)| {\n if idx >= chain_len {\n map[key] = Loop(loop_len);\n } else {\n map[key] = Chain(loop_len + chain_len - idx);\n }\n }\n\n return chain_len + loop_len;\n}\n\nfn solve() -> ~str {\n let limit = 1000000;\n let factorial = {\n let mut val = [1, ..10];\n for uint::range(1, 10) |i| {\n val[i] = val[i - 1] * i;\n }\n val\n };\n\n let mut map = vec::from_elem(factorial[9] * 6 + 1, Unknown);\n let mut cnt = 0u;\n for uint::range(1, limit + 1) |n| {\n let len = get_chain_len(n, map, &factorial);\n if len == 60 { cnt += 1; }\n }\n return cnt.to_str();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor(syncfile): pass key by reference to various utility functions<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>`overflowing_shr` breaks sign transaction logic on i868<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a test case for #507<commit_after>\n\/* \n This is a test case for Issue 507.\n\n https:\/\/github.com\/graydon\/rust\/issues\/507\n*\/\n\nuse std;\n\nimport std::task::join;\n\nfn grandchild(chan[int] c) {\n c <| 42;\n}\n\nfn child(chan[int] c) {\n auto _grandchild = spawn grandchild(c);\n join(_grandchild);\n}\n\nfn main() {\n let port[int] p = port();\n\n auto _child = spawn child(chan(p));\n \n let int x;\n p |> x;\n\n log x;\n\n assert(x == 42);\n\n join(_child);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n#![feature(nll)]\nstruct FancyNum {\n num: u8,\n}\n\nfn main() {\n let mut fancy = FancyNum{ num: 5 };\n let fancy_ref = &(&mut fancy);\n fancy_ref.num = 6;\n println!(\"{}\", fancy_ref.num);\n}<commit_msg>fix tidy issues<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n#![feature(nll)]\nstruct FancyNum {\n num: u8,\n}\n\nfn main() {\n let mut fancy = FancyNum{ num: 5 };\n let fancy_ref = &(&mut fancy);\n fancy_ref.num = 6;\n println!(\"{}\", fancy_ref.num);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add basic stepper example<commit_after>\/\/ examples\/blinky.rs\n\/\/! Blinks an LED\n\n#![feature(const_fn)]\n#![feature(used)]\n#![no_std]\n\n\/\/ version = \"0.2.0\"\nextern crate cortex_m_rt;\n\n\/\/ version = \"0.1.0\"\n#[macro_use]\nextern crate cortex_m_rtfm as rtfm;\n\nextern crate bluepill;\n\nuse bluepill::pin::{halPin, Pin, Mode};\nuse bluepill::frequency;\nuse bluepill::stm32f103xx::interrupt::Tim3;\nuse bluepill::stm32f103xx;\nuse bluepill::timer::{halTimer, Timer};\nuse rtfm::{Local, P0, P1, T0, T1, TMax};\n\n\/\/ CONFIGURATION\nconst TICKS: u32 = 9_000_000; \/\/ one rotation a second\nconst ORDER:[[bool; 4]; 9] = [[false,false,false,true],\n [false,false,true,true],\n [false,false,true,false],\n [false,true,true,false],\n [false,true,false,false],\n [true,true,false,false],\n [true,false,false,false],\n [true,false,false,true],\n [false,false,false,false]];\n\n\/\/ RESOURCES\nperipherals!(stm32f103xx, {\n GPIOA: Peripheral {\n register_block: Gpioa,\n ceiling: C1,\n },\n RCC: Peripheral {\n register_block: Rcc,\n ceiling: C0,\n },\n TIM3: Peripheral {\n register_block: Tim3,\n ceiling: C1,\n },\n FLASH: Peripheral {\n register_block: Flash,\n ceiling: C0,\n },\n});\n\n\/\/ INITIALIZATION PHASE\nfn init(ref priority: P0, threshold: &TMax) {\n let gpioa = GPIOA.access(priority, threshold);\n let rcc = RCC.access(priority, threshold);\n let tim3 = TIM3.access(priority, threshold);\n let flash = FLASH.access(priority, threshold);\n let timer = Timer::new(&**tim3);\n let in1 = Pin{pin: 1, port: &**gpioa};\n let in2 = Pin{pin: 2, port: &**gpioa};\n let in3 = Pin{pin: 3, port: &**gpioa};\n let in4 = Pin{pin: 4, port: &**gpioa};\n\n \/\/ set clock to 72Mhz\n frequency::init(&rcc, &flash, frequency::Speed::S72Mhz);\n\n \/\/ configure pins for output\n in1.init(&rcc, Mode::OUTPUT);\n in2.init(&rcc, Mode::OUTPUT);\n in3.init(&rcc, Mode::OUTPUT);\n in4.init(&rcc, Mode::OUTPUT);\n\n \/\/ Configure TIM2 for periodic update events\n timer.init(&rcc, TICKS);\n\n \/\/ Start the timer\n timer.resume();\n}\n\n\/\/ IDLE LOOP\nfn idle(_priority: P0, _threshold: T0) -> ! {\n \/\/ Sleep\n loop {\n rtfm::wfi();\n }\n}\n\n\/\/ TASKS\ntasks!(stm32f103xx, {\n periodic: Task {\n interrupt: Tim3,\n priority: P1,\n enabled: true,\n },\n});\n\nfn periodic(mut task: Tim3, ref priority: P1, ref threshold: T1) {\n \/\/ Task local data\n static STEP: Local<u16, Tim3> = Local::new(0);\n\n\n let tim3 = TIM3.access(priority, threshold);\n let timer = Timer{timer: &**tim3};\n let gpioa = GPIOA.access(priority, threshold);\n let in1 = Pin{pin: 1, port: &**gpioa};\n let in2 = Pin{pin: 2, port: &**gpioa};\n let in3 = Pin{pin: 3, port: &**gpioa};\n let in4 = Pin{pin: 4, port: &**gpioa};\n\n if timer.clear_update_flag().is_ok() {\n let step = STEP.borrow_mut(&mut task);\n let current = ORDER[*step as usize];\n\n \n match current[0]{\n true => in1.on(),\n false => in1.off(),\n }\n match current[1]{\n true => in2.on(),\n false => in2.off(),\n }\n match current[2]{\n true => in3.on(),\n false => in3.off(),\n }\n match current[3]{\n true => in4.on(),\n false => in4.off(),\n }\n\n if *step < 8 {\n *step += 1;\n } else {\n *step = 0;\n }\n } else {\n \/\/ Only reachable through `rtfm::request(periodic)`\n \/\/#[cfg(debug_assertion)]\n \/\/unreachable!()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add version example<commit_after>use std;\nuse glfw3;\n\nimport glfw3::*;\n\nfn main() {\n \n \/\/ get version string\n io::println(~\"GLFW version: \" + glfwGetVersionString());\n \n \/\/ get version tuple\n let version = glfwGetVersion();\n match version {\n (major, minor, rev) => {\n io::println(fmt!(\"GLFW version: %d.%d.%d\", major, minor, rev));\n }\n }\n \n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix formatting.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>blank message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix deprecated syntax.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added implementation of Hamming numbers (ported from scala)<commit_after>\/\/ Implements http:\/\/rosettacode.org\/wiki\/Hamming_numbers\n\/\/ port of one of the scala solutions\nextern crate num;\nuse num::bigint::BigUint;\nuse std::cmp::min;\nuse std::sync::spsc_queue::Queue;\n\nfn int_to_biguint(i: int) -> BigUint {\n FromPrimitive::from_int(i).unwrap()\n}\n\n#[cfg(not(test))]\nfn main() {\n let mut hamming = Hamming::new(1691);\n\n println!(\"first 20 Hamming numbers\")\n for _ in range(0,19) {\n print!(\"{} \", hamming.next().unwrap());\n }\n\n println!(\"\\n\\n1691st Hamming number\");\n println!(\"{}\",hamming.nth(1691-20).unwrap())\n}\n\nstruct Hamming {\n pub q2: Queue<BigUint>,\n pub q3: Queue<BigUint>,\n pub q5: Queue<BigUint>\n}\n\nimpl Hamming {\n fn new(n: uint) -> Hamming {\n let h = Hamming {\n q2: Queue::new(n),\n q3: Queue::new(n),\n q5: Queue::new(n)\n };\n\n h.q2.push(int_to_biguint(1));\n h.q3.push(int_to_biguint(1));\n h.q5.push(int_to_biguint(1));\n\n h\n }\n\n fn enqueue(&self, n: BigUint) {\n self.q2.push(n * int_to_biguint(2));\n self.q3.push(n * int_to_biguint(3));\n self.q5.push(n * int_to_biguint(5));\n }\n\n}\n\nimpl Iterator<BigUint> for Hamming {\n fn next(&mut self) -> Option<BigUint> {\n let (head2, head3, head5) =\n ( self.q2.peek().unwrap().clone(),\n self.q3.peek().unwrap().clone(),\n self.q5.peek().unwrap().clone());\n\n let n = min(head2.clone(), min(head3.clone(), head5.clone()));\n\n if head2 == n {self.q2.pop();}\n if head3 == n {self.q3.pop();}\n if head5 == n {self.q5.pop();}\n\n self.enqueue(n.clone());\n Some(n.clone())\n }\n}\n\n#[test]\nfn create() {\n let h = Hamming::new(5);\n h.q2.push(int_to_biguint(1));\n h.q2.push(int_to_biguint(2));\n h.q2.push(int_to_biguint(4));\n\n let _ = h.q2.peek();\n assert!(h.q2.pop().unwrap() == int_to_biguint(1));\n}\n\n#[test]\nfn try_enqueue() {\n let h = Hamming::new(5);\n h.enqueue(int_to_biguint(1));\n h.enqueue(int_to_biguint(2));\n h.enqueue(int_to_biguint(3));\n\n assert!(h.q2.pop().unwrap() == int_to_biguint(1));\n assert!(h.q3.pop().unwrap() == int_to_biguint(1));\n assert!(h.q5.pop().unwrap() == int_to_biguint(1));\n assert!(h.q2.pop().unwrap() == int_to_biguint(2));\n assert!(h.q3.pop().unwrap() == int_to_biguint(3));\n assert!(h.q5.pop().unwrap() == int_to_biguint(5));\n }\n\n#[test]\nfn hamming_iter() {\n let mut hamming = Hamming::new(20);\n assert!(hamming.nth(19).unwrap() == int_to_biguint(36));\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Add Rust implementation of Armstrong Number algorithm<commit_after>fn is_armstrong_number(num: u32) -> bool {\n \/\/ Get vector of the digits in the number\n \/\/ Makes it possible to use `fold` to do the calculation\n let digits = number_to_vec(num);\n num == digits\n .iter()\n .fold(0, |acc, x| acc + x.pow(digits.len() as u32))\n}\n\nfn number_to_vec(n: u32) -> Vec<u32> {\n let mut digits = vec![];\n let mut n = n;\n while n > 9 {\n digits.push(n % 10);\n n \/= 10;\n }\n digits.push(n);\n digits.reverse();\n digits\n}\n\nfn main() {\n assert!(is_armstrong_number(5));\n assert!(is_armstrong_number(153));\n assert!(!is_armstrong_number(9475));\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove spurious uniform block name field<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax_pos::symbol::Symbol;\nuse back::write::create_target_machine;\nuse llvm;\nuse rustc::session::Session;\nuse rustc::session::config::PrintRequest;\nuse libc::c_int;\nuse std::ffi::CString;\nuse syntax::feature_gate::UnstableFeatures;\n\nuse std::str;\nuse std::slice;\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::Once;\n\nstatic POISONED: AtomicBool = AtomicBool::new(false);\nstatic INIT: Once = Once::new();\n\npub(crate) fn init(sess: &Session) {\n unsafe {\n \/\/ Before we touch LLVM, make sure that multithreading is enabled.\n INIT.call_once(|| {\n if llvm::LLVMStartMultithreaded() != 1 {\n \/\/ use an extra bool to make sure that all future usage of LLVM\n \/\/ cannot proceed despite the Once not running more than once.\n POISONED.store(true, Ordering::SeqCst);\n }\n\n configure_llvm(sess);\n });\n\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n }\n}\n\nfn require_inited() {\n INIT.call_once(|| bug!(\"llvm is not initialized\"));\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n}\n\nunsafe fn configure_llvm(sess: &Session) {\n let mut llvm_c_strs = Vec::new();\n let mut llvm_args = Vec::new();\n\n {\n let mut add = |arg: &str| {\n let s = CString::new(arg).unwrap();\n llvm_args.push(s.as_ptr());\n llvm_c_strs.push(s);\n };\n add(\"rustc\"); \/\/ fake program name\n if sess.time_llvm_passes() { add(\"-time-passes\"); }\n if sess.print_llvm_passes() { add(\"-debug-pass=Structure\"); }\n if sess.opts.debugging_opts.disable_instrumentation_preinliner {\n add(\"-disable-preinline\");\n }\n\n for arg in &sess.opts.cg.llvm_args {\n add(&(*arg));\n }\n }\n\n llvm::LLVMInitializePasses();\n\n ::rustc_llvm::initialize_available_targets();\n\n llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,\n llvm_args.as_ptr());\n}\n\n\/\/ WARNING: the features after applying `to_llvm_feature` must be known\n\/\/ to LLVM or the feature detection code will walk past the end of the feature\n\/\/ array, leading to crashes.\n\nconst ARM_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"mclass\", Some(\"arm_target_feature\")),\n (\"rclass\", Some(\"arm_target_feature\")),\n (\"dsp\", Some(\"arm_target_feature\")),\n (\"neon\", Some(\"arm_target_feature\")),\n (\"v7\", Some(\"arm_target_feature\")),\n (\"vfp2\", Some(\"arm_target_feature\")),\n (\"vfp3\", Some(\"arm_target_feature\")),\n (\"vfp4\", Some(\"arm_target_feature\")),\n];\n\nconst AARCH64_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"fp\", Some(\"aarch64_target_feature\")),\n (\"neon\", Some(\"aarch64_target_feature\")),\n (\"sve\", Some(\"aarch64_target_feature\")),\n (\"crc\", Some(\"aarch64_target_feature\")),\n (\"crypto\", Some(\"aarch64_target_feature\")),\n (\"ras\", Some(\"aarch64_target_feature\")),\n (\"lse\", Some(\"aarch64_target_feature\")),\n (\"rdm\", Some(\"aarch64_target_feature\")),\n (\"fp16\", Some(\"aarch64_target_feature\")),\n (\"rcpc\", Some(\"aarch64_target_feature\")),\n (\"dotprod\", Some(\"aarch64_target_feature\")),\n (\"v8.1a\", Some(\"aarch64_target_feature\")),\n (\"v8.2a\", Some(\"aarch64_target_feature\")),\n (\"v8.3a\", Some(\"aarch64_target_feature\")),\n];\n\nconst X86_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"aes\", None),\n (\"avx\", None),\n (\"avx2\", None),\n (\"avx512bw\", Some(\"avx512_target_feature\")),\n (\"avx512cd\", Some(\"avx512_target_feature\")),\n (\"avx512dq\", Some(\"avx512_target_feature\")),\n (\"avx512er\", Some(\"avx512_target_feature\")),\n (\"avx512f\", Some(\"avx512_target_feature\")),\n (\"avx512ifma\", Some(\"avx512_target_feature\")),\n (\"avx512pf\", Some(\"avx512_target_feature\")),\n (\"avx512vbmi\", Some(\"avx512_target_feature\")),\n (\"avx512vl\", Some(\"avx512_target_feature\")),\n (\"avx512vpopcntdq\", Some(\"avx512_target_feature\")),\n (\"bmi1\", None),\n (\"bmi2\", None),\n (\"fma\", None),\n (\"fxsr\", None),\n (\"lzcnt\", None),\n (\"mmx\", Some(\"mmx_target_feature\")),\n (\"pclmulqdq\", None),\n (\"popcnt\", None),\n (\"rdrand\", None),\n (\"rdseed\", None),\n (\"sha\", None),\n (\"sse\", None),\n (\"sse2\", None),\n (\"sse3\", None),\n (\"sse4.1\", None),\n (\"sse4.2\", None),\n (\"sse4a\", Some(\"sse4a_target_feature\")),\n (\"ssse3\", None),\n (\"tbm\", Some(\"tbm_target_feature\")),\n (\"xsave\", None),\n (\"xsavec\", None),\n (\"xsaveopt\", None),\n (\"xsaves\", None),\n];\n\nconst HEXAGON_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"hvx\", Some(\"hexagon_target_feature\")),\n (\"hvx-double\", Some(\"hexagon_target_feature\")),\n];\n\nconst POWERPC_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"altivec\", Some(\"powerpc_target_feature\")),\n (\"power8-altivec\", Some(\"powerpc_target_feature\")),\n (\"power9-altivec\", Some(\"powerpc_target_feature\")),\n (\"power8-vector\", Some(\"powerpc_target_feature\")),\n (\"power9-vector\", Some(\"powerpc_target_feature\")),\n (\"vsx\", Some(\"powerpc_target_feature\")),\n];\n\nconst MIPS_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"fp64\", Some(\"mips_target_feature\")),\n (\"msa\", Some(\"mips_target_feature\")),\n];\n\nconst WASM_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"simd128\", Some(\"wasm_target_feature\")),\n];\n\n\/\/\/ When rustdoc is running, provide a list of all known features so that all their respective\n\/\/\/ primtives may be documented.\n\/\/\/\n\/\/\/ IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this\n\/\/\/ iterator!\npub fn all_known_features() -> impl Iterator<Item=(&'static str, Option<&'static str>)> {\n ARM_WHITELIST.iter().cloned()\n .chain(AARCH64_WHITELIST.iter().cloned())\n .chain(X86_WHITELIST.iter().cloned())\n .chain(HEXAGON_WHITELIST.iter().cloned())\n .chain(POWERPC_WHITELIST.iter().cloned())\n .chain(MIPS_WHITELIST.iter().cloned())\n .chain(WASM_WHITELIST.iter().cloned())\n}\n\npub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {\n let arch = if sess.target.target.arch == \"x86_64\" {\n \"x86\"\n } else {\n &*sess.target.target.arch\n };\n match (arch, s) {\n (\"x86\", \"pclmulqdq\") => \"pclmul\",\n (\"x86\", \"rdrand\") => \"rdrnd\",\n (\"x86\", \"bmi1\") => \"bmi\",\n (\"aarch64\", \"fp\") => \"fp-armv8\",\n (\"aarch64\", \"fp16\") => \"fullfp16\",\n (_, s) => s,\n }\n}\n\npub fn target_features(sess: &Session) -> Vec<Symbol> {\n let target_machine = create_target_machine(sess, true);\n target_feature_whitelist(sess)\n .iter()\n .filter_map(|&(feature, gate)| {\n if UnstableFeatures::from_environment().is_nightly_build() || gate.is_none() {\n Some(feature)\n } else {\n None\n }\n })\n .filter(|feature| {\n let llvm_feature = to_llvm_feature(sess, feature);\n let cstr = CString::new(llvm_feature).unwrap();\n unsafe { llvm::LLVMRustHasFeature(target_machine, cstr.as_ptr()) }\n })\n .map(|feature| Symbol::intern(feature)).collect()\n}\n\npub fn target_feature_whitelist(sess: &Session)\n -> &'static [(&'static str, Option<&'static str>)]\n{\n match &*sess.target.target.arch {\n \"arm\" => ARM_WHITELIST,\n \"aarch64\" => AARCH64_WHITELIST,\n \"x86\" | \"x86_64\" => X86_WHITELIST,\n \"hexagon\" => HEXAGON_WHITELIST,\n \"mips\" | \"mips64\" => MIPS_WHITELIST,\n \"powerpc\" | \"powerpc64\" => POWERPC_WHITELIST,\n \"wasm32\" => WASM_WHITELIST,\n _ => &[],\n }\n}\n\npub fn print_version() {\n \/\/ Can be called without initializing LLVM\n unsafe {\n println!(\"LLVM version: {}.{}\",\n llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());\n }\n}\n\npub fn print_passes() {\n \/\/ Can be called without initializing LLVM\n unsafe { llvm::LLVMRustPrintPasses(); }\n}\n\npub(crate) fn print(req: PrintRequest, sess: &Session) {\n require_inited();\n let tm = create_target_machine(sess, true);\n unsafe {\n match req {\n PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),\n PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),\n _ => bug!(\"rustc_codegen_llvm can't handle print request: {:?}\", req),\n }\n }\n}\n\npub fn target_cpu(sess: &Session) -> &str {\n let name = match sess.opts.cg.target_cpu {\n Some(ref s) => &**s,\n None => &*sess.target.target.options.cpu\n };\n if name != \"native\" {\n return name\n }\n\n unsafe {\n let mut len = 0;\n let ptr = llvm::LLVMRustGetHostCPUName(&mut len);\n str::from_utf8(slice::from_raw_parts(ptr as *const u8, len)).unwrap()\n }\n}\n<commit_msg>Auto merge of #53926 - japaric:arm-features, r=alexcrichton<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax_pos::symbol::Symbol;\nuse back::write::create_target_machine;\nuse llvm;\nuse rustc::session::Session;\nuse rustc::session::config::PrintRequest;\nuse libc::c_int;\nuse std::ffi::CString;\nuse syntax::feature_gate::UnstableFeatures;\n\nuse std::str;\nuse std::slice;\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::Once;\n\nstatic POISONED: AtomicBool = AtomicBool::new(false);\nstatic INIT: Once = Once::new();\n\npub(crate) fn init(sess: &Session) {\n unsafe {\n \/\/ Before we touch LLVM, make sure that multithreading is enabled.\n INIT.call_once(|| {\n if llvm::LLVMStartMultithreaded() != 1 {\n \/\/ use an extra bool to make sure that all future usage of LLVM\n \/\/ cannot proceed despite the Once not running more than once.\n POISONED.store(true, Ordering::SeqCst);\n }\n\n configure_llvm(sess);\n });\n\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n }\n}\n\nfn require_inited() {\n INIT.call_once(|| bug!(\"llvm is not initialized\"));\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n}\n\nunsafe fn configure_llvm(sess: &Session) {\n let mut llvm_c_strs = Vec::new();\n let mut llvm_args = Vec::new();\n\n {\n let mut add = |arg: &str| {\n let s = CString::new(arg).unwrap();\n llvm_args.push(s.as_ptr());\n llvm_c_strs.push(s);\n };\n add(\"rustc\"); \/\/ fake program name\n if sess.time_llvm_passes() { add(\"-time-passes\"); }\n if sess.print_llvm_passes() { add(\"-debug-pass=Structure\"); }\n if sess.opts.debugging_opts.disable_instrumentation_preinliner {\n add(\"-disable-preinline\");\n }\n\n for arg in &sess.opts.cg.llvm_args {\n add(&(*arg));\n }\n }\n\n llvm::LLVMInitializePasses();\n\n ::rustc_llvm::initialize_available_targets();\n\n llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,\n llvm_args.as_ptr());\n}\n\n\/\/ WARNING: the features after applying `to_llvm_feature` must be known\n\/\/ to LLVM or the feature detection code will walk past the end of the feature\n\/\/ array, leading to crashes.\n\nconst ARM_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"aclass\", Some(\"arm_target_feature\")),\n (\"mclass\", Some(\"arm_target_feature\")),\n (\"rclass\", Some(\"arm_target_feature\")),\n (\"dsp\", Some(\"arm_target_feature\")),\n (\"neon\", Some(\"arm_target_feature\")),\n (\"v5te\", Some(\"arm_target_feature\")),\n (\"v6k\", Some(\"arm_target_feature\")),\n (\"v6t2\", Some(\"arm_target_feature\")),\n (\"v7\", Some(\"arm_target_feature\")),\n (\"vfp2\", Some(\"arm_target_feature\")),\n (\"vfp3\", Some(\"arm_target_feature\")),\n (\"vfp4\", Some(\"arm_target_feature\")),\n];\n\nconst AARCH64_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"fp\", Some(\"aarch64_target_feature\")),\n (\"neon\", Some(\"aarch64_target_feature\")),\n (\"sve\", Some(\"aarch64_target_feature\")),\n (\"crc\", Some(\"aarch64_target_feature\")),\n (\"crypto\", Some(\"aarch64_target_feature\")),\n (\"ras\", Some(\"aarch64_target_feature\")),\n (\"lse\", Some(\"aarch64_target_feature\")),\n (\"rdm\", Some(\"aarch64_target_feature\")),\n (\"fp16\", Some(\"aarch64_target_feature\")),\n (\"rcpc\", Some(\"aarch64_target_feature\")),\n (\"dotprod\", Some(\"aarch64_target_feature\")),\n (\"v8.1a\", Some(\"aarch64_target_feature\")),\n (\"v8.2a\", Some(\"aarch64_target_feature\")),\n (\"v8.3a\", Some(\"aarch64_target_feature\")),\n];\n\nconst X86_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"aes\", None),\n (\"avx\", None),\n (\"avx2\", None),\n (\"avx512bw\", Some(\"avx512_target_feature\")),\n (\"avx512cd\", Some(\"avx512_target_feature\")),\n (\"avx512dq\", Some(\"avx512_target_feature\")),\n (\"avx512er\", Some(\"avx512_target_feature\")),\n (\"avx512f\", Some(\"avx512_target_feature\")),\n (\"avx512ifma\", Some(\"avx512_target_feature\")),\n (\"avx512pf\", Some(\"avx512_target_feature\")),\n (\"avx512vbmi\", Some(\"avx512_target_feature\")),\n (\"avx512vl\", Some(\"avx512_target_feature\")),\n (\"avx512vpopcntdq\", Some(\"avx512_target_feature\")),\n (\"bmi1\", None),\n (\"bmi2\", None),\n (\"fma\", None),\n (\"fxsr\", None),\n (\"lzcnt\", None),\n (\"mmx\", Some(\"mmx_target_feature\")),\n (\"pclmulqdq\", None),\n (\"popcnt\", None),\n (\"rdrand\", None),\n (\"rdseed\", None),\n (\"sha\", None),\n (\"sse\", None),\n (\"sse2\", None),\n (\"sse3\", None),\n (\"sse4.1\", None),\n (\"sse4.2\", None),\n (\"sse4a\", Some(\"sse4a_target_feature\")),\n (\"ssse3\", None),\n (\"tbm\", Some(\"tbm_target_feature\")),\n (\"xsave\", None),\n (\"xsavec\", None),\n (\"xsaveopt\", None),\n (\"xsaves\", None),\n];\n\nconst HEXAGON_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"hvx\", Some(\"hexagon_target_feature\")),\n (\"hvx-double\", Some(\"hexagon_target_feature\")),\n];\n\nconst POWERPC_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"altivec\", Some(\"powerpc_target_feature\")),\n (\"power8-altivec\", Some(\"powerpc_target_feature\")),\n (\"power9-altivec\", Some(\"powerpc_target_feature\")),\n (\"power8-vector\", Some(\"powerpc_target_feature\")),\n (\"power9-vector\", Some(\"powerpc_target_feature\")),\n (\"vsx\", Some(\"powerpc_target_feature\")),\n];\n\nconst MIPS_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"fp64\", Some(\"mips_target_feature\")),\n (\"msa\", Some(\"mips_target_feature\")),\n];\n\nconst WASM_WHITELIST: &[(&str, Option<&str>)] = &[\n (\"simd128\", Some(\"wasm_target_feature\")),\n];\n\n\/\/\/ When rustdoc is running, provide a list of all known features so that all their respective\n\/\/\/ primtives may be documented.\n\/\/\/\n\/\/\/ IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this\n\/\/\/ iterator!\npub fn all_known_features() -> impl Iterator<Item=(&'static str, Option<&'static str>)> {\n ARM_WHITELIST.iter().cloned()\n .chain(AARCH64_WHITELIST.iter().cloned())\n .chain(X86_WHITELIST.iter().cloned())\n .chain(HEXAGON_WHITELIST.iter().cloned())\n .chain(POWERPC_WHITELIST.iter().cloned())\n .chain(MIPS_WHITELIST.iter().cloned())\n .chain(WASM_WHITELIST.iter().cloned())\n}\n\npub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {\n let arch = if sess.target.target.arch == \"x86_64\" {\n \"x86\"\n } else {\n &*sess.target.target.arch\n };\n match (arch, s) {\n (\"x86\", \"pclmulqdq\") => \"pclmul\",\n (\"x86\", \"rdrand\") => \"rdrnd\",\n (\"x86\", \"bmi1\") => \"bmi\",\n (\"aarch64\", \"fp\") => \"fp-armv8\",\n (\"aarch64\", \"fp16\") => \"fullfp16\",\n (_, s) => s,\n }\n}\n\npub fn target_features(sess: &Session) -> Vec<Symbol> {\n let target_machine = create_target_machine(sess, true);\n target_feature_whitelist(sess)\n .iter()\n .filter_map(|&(feature, gate)| {\n if UnstableFeatures::from_environment().is_nightly_build() || gate.is_none() {\n Some(feature)\n } else {\n None\n }\n })\n .filter(|feature| {\n let llvm_feature = to_llvm_feature(sess, feature);\n let cstr = CString::new(llvm_feature).unwrap();\n unsafe { llvm::LLVMRustHasFeature(target_machine, cstr.as_ptr()) }\n })\n .map(|feature| Symbol::intern(feature)).collect()\n}\n\npub fn target_feature_whitelist(sess: &Session)\n -> &'static [(&'static str, Option<&'static str>)]\n{\n match &*sess.target.target.arch {\n \"arm\" => ARM_WHITELIST,\n \"aarch64\" => AARCH64_WHITELIST,\n \"x86\" | \"x86_64\" => X86_WHITELIST,\n \"hexagon\" => HEXAGON_WHITELIST,\n \"mips\" | \"mips64\" => MIPS_WHITELIST,\n \"powerpc\" | \"powerpc64\" => POWERPC_WHITELIST,\n \"wasm32\" => WASM_WHITELIST,\n _ => &[],\n }\n}\n\npub fn print_version() {\n \/\/ Can be called without initializing LLVM\n unsafe {\n println!(\"LLVM version: {}.{}\",\n llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());\n }\n}\n\npub fn print_passes() {\n \/\/ Can be called without initializing LLVM\n unsafe { llvm::LLVMRustPrintPasses(); }\n}\n\npub(crate) fn print(req: PrintRequest, sess: &Session) {\n require_inited();\n let tm = create_target_machine(sess, true);\n unsafe {\n match req {\n PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),\n PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),\n _ => bug!(\"rustc_codegen_llvm can't handle print request: {:?}\", req),\n }\n }\n}\n\npub fn target_cpu(sess: &Session) -> &str {\n let name = match sess.opts.cg.target_cpu {\n Some(ref s) => &**s,\n None => &*sess.target.target.options.cpu\n };\n if name != \"native\" {\n return name\n }\n\n unsafe {\n let mut len = 0;\n let ptr = llvm::LLVMRustGetHostCPUName(&mut len);\n str::from_utf8(slice::from_raw_parts(ptr as *const u8, len)).unwrap()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>minor fix<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A work queue for scheduling units of work across threads in a fork-join fashion.\n\/\/!\n\/\/! Data associated with queues is simply a pair of unsigned integers. It is expected that a\n\/\/! higher-level API on top of this could allow safe fork-join parallelism.\n\nuse native;\nuse rand;\nuse rand::{Rng, XorShiftRng};\nuse std::cast;\nuse std::comm;\nuse std::mem;\nuse std::sync::atomics::{AtomicUint, SeqCst};\nuse std::sync::deque::{Abort, BufferPool, Data, Empty, Stealer, Worker};\nuse std::task::TaskOpts;\n\n\/\/\/ A unit of work.\n\/\/\/\n\/\/\/ The type parameter `QUD` stands for \"queue user data\" and represents global custom data for the\n\/\/\/ entire work queue, and the type parameter `WUD` stands for \"work user data\" and represents\n\/\/\/ custom data specific to each unit of work.\npub struct WorkUnit<QUD,WUD> {\n \/\/\/ The function to execute.\n pub fun: extern \"Rust\" fn(WUD, &mut WorkerProxy<QUD,WUD>),\n \/\/\/ Arbitrary data.\n pub data: WUD,\n}\n\n\/\/\/ Messages from the supervisor to the worker.\nenum WorkerMsg<QUD,WUD> {\n \/\/\/ Tells the worker to start work.\n StartMsg(Worker<WorkUnit<QUD,WUD>>, *mut AtomicUint, *QUD),\n\n \/\/\/ Tells the worker to stop. It can be restarted again with a `StartMsg`.\n StopMsg,\n\n \/\/\/ Tells the worker thread to terminate.\n ExitMsg,\n}\n\n\/\/\/ Messages to the supervisor.\nenum SupervisorMsg<QUD,WUD> {\n FinishedMsg,\n ReturnDequeMsg(uint, Worker<WorkUnit<QUD,WUD>>),\n}\n\n\/\/\/ Information that the supervisor thread keeps about the worker threads.\nstruct WorkerInfo<QUD,WUD> {\n \/\/\/ The communication channel to the workers.\n chan: Sender<WorkerMsg<QUD,WUD>>,\n \/\/\/ The buffer pool for this deque.\n pool: BufferPool<WorkUnit<QUD,WUD>>,\n \/\/\/ The worker end of the deque, if we have it.\n deque: Option<Worker<WorkUnit<QUD,WUD>>>,\n \/\/\/ The thief end of the work-stealing deque.\n thief: Stealer<WorkUnit<QUD,WUD>>,\n}\n\n\/\/\/ Information specific to each worker thread that the thread keeps.\nstruct WorkerThread<QUD,WUD> {\n \/\/\/ The index of this worker.\n index: uint,\n \/\/\/ The communication port from the supervisor.\n port: Receiver<WorkerMsg<QUD,WUD>>,\n \/\/\/ The communication channel on which messages are sent to the supervisor.\n chan: Sender<SupervisorMsg<QUD,WUD>>,\n \/\/\/ The thief end of the work-stealing deque for all other workers.\n other_deques: ~[Stealer<WorkUnit<QUD,WUD>>],\n \/\/\/ The random number generator for this worker.\n rng: XorShiftRng,\n}\n\nstatic SPIN_COUNT: uint = 1000;\n\nimpl<QUD:Send,WUD:Send> WorkerThread<QUD,WUD> {\n \/\/\/ The main logic. This function starts up the worker and listens for\n \/\/\/ messages.\n pub fn start(&mut self) {\n loop {\n \/\/ Wait for a start message.\n let (mut deque, ref_count, queue_data) = match self.port.recv() {\n StartMsg(deque, ref_count, queue_data) => (deque, ref_count, queue_data),\n StopMsg => fail!(\"unexpected stop message\"),\n ExitMsg => return,\n };\n\n \/\/ We're off!\n \/\/\n \/\/ FIXME(pcwalton): Can't use labeled break or continue cross-crate due to a Rust bug.\n loop {\n \/\/ FIXME(pcwalton): Nasty workaround for the lack of labeled break\/continue\n \/\/ cross-crate.\n let mut work_unit = unsafe {\n mem::uninit()\n };\n match deque.pop() {\n Some(work) => work_unit = work,\n None => {\n \/\/ Become a thief.\n let mut i = 0;\n let mut should_continue = true;\n loop {\n let victim = (self.rng.next_u32() as uint) % self.other_deques.len();\n match self.other_deques[victim].steal() {\n Empty | Abort => {\n \/\/ Continue.\n }\n Data(work) => {\n work_unit = work;\n break\n }\n }\n\n if i == SPIN_COUNT {\n match self.port.try_recv() {\n comm::Data(StopMsg) => {\n should_continue = false;\n break\n }\n comm::Data(ExitMsg) => return,\n comm::Data(_) => fail!(\"unexpected message\"),\n _ => {}\n }\n\n i = 0\n } else {\n i += 1\n }\n }\n\n if !should_continue {\n break\n }\n }\n }\n\n \/\/ At this point, we have some work. Perform it.\n let mut proxy = WorkerProxy {\n worker: &mut deque,\n ref_count: ref_count,\n queue_data: queue_data,\n };\n (work_unit.fun)(work_unit.data, &mut proxy);\n\n \/\/ The work is done. Now decrement the count of outstanding work items. If this was\n \/\/ the last work unit in the queue, then send a message on the channel.\n unsafe {\n if (*ref_count).fetch_sub(1, SeqCst) == 1 {\n self.chan.send(FinishedMsg)\n }\n }\n }\n\n \/\/ Give the deque back to the supervisor.\n self.chan.send(ReturnDequeMsg(self.index, deque))\n }\n }\n}\n\n\/\/\/ A handle to the work queue that individual work units have.\npub struct WorkerProxy<'a,QUD,WUD> {\n pub worker: &'a mut Worker<WorkUnit<QUD,WUD>>,\n pub ref_count: *mut AtomicUint,\n pub queue_data: *QUD,\n}\n\nimpl<'a,QUD,WUD:Send> WorkerProxy<'a,QUD,WUD> {\n \/\/\/ Enqueues a block into the work queue.\n #[inline]\n pub fn push(&mut self, work_unit: WorkUnit<QUD,WUD>) {\n unsafe {\n drop((*self.ref_count).fetch_add(1, SeqCst));\n }\n self.worker.push(work_unit);\n }\n\n \/\/\/ Retrieves the queue user data.\n #[inline]\n pub fn user_data<'a>(&'a self) -> &'a QUD {\n unsafe {\n cast::transmute(self.queue_data)\n }\n }\n}\n\n\/\/\/ A work queue on which units of work can be submitted.\npub struct WorkQueue<QUD,WUD> {\n \/\/\/ Information about each of the workers.\n pub workers: ~[WorkerInfo<QUD,WUD>],\n \/\/\/ A port on which deques can be received from the workers.\n pub port: Receiver<SupervisorMsg<QUD,WUD>>,\n \/\/\/ The amount of work that has been enqueued.\n pub work_count: uint,\n \/\/\/ Arbitrary user data.\n pub data: QUD,\n}\n\nimpl<QUD:Send,WUD:Send> WorkQueue<QUD,WUD> {\n \/\/\/ Creates a new work queue and spawns all the threads associated with\n \/\/\/ it.\n pub fn new(task_name: &'static str, thread_count: uint, user_data: QUD) -> WorkQueue<QUD,WUD> {\n \/\/ Set up data structures.\n let (supervisor_chan, supervisor_port) = channel();\n let (mut infos, mut threads) = (~[], ~[]);\n for i in range(0, thread_count) {\n let (worker_chan, worker_port) = channel();\n let mut pool = BufferPool::new();\n let (worker, thief) = pool.deque();\n infos.push(WorkerInfo {\n chan: worker_chan,\n pool: pool,\n deque: Some(worker),\n thief: thief,\n });\n threads.push(WorkerThread {\n index: i,\n port: worker_port,\n chan: supervisor_chan.clone(),\n other_deques: ~[],\n rng: rand::weak_rng(),\n });\n }\n\n \/\/ Connect workers to one another.\n for i in range(0, thread_count) {\n for j in range(0, thread_count) {\n if i != j {\n threads[i].other_deques.push(infos[j].thief.clone())\n }\n }\n assert!(threads[i].other_deques.len() == thread_count - 1)\n }\n\n \/\/ Spawn threads.\n for thread in threads.move_iter() {\n let mut opts = TaskOpts::new();\n opts.name = Some(task_name.into_maybe_owned());\n native::task::spawn_opts(opts, proc() {\n let mut thread = thread;\n thread.start()\n })\n }\n\n WorkQueue {\n workers: infos,\n port: supervisor_port,\n work_count: 0,\n data: user_data,\n }\n }\n\n \/\/\/ Enqueues a block into the work queue.\n #[inline]\n pub fn push(&mut self, work_unit: WorkUnit<QUD,WUD>) {\n match self.workers[0].deque {\n None => {\n fail!(\"tried to push a block but we don't have the deque?!\")\n }\n Some(ref mut deque) => deque.push(work_unit),\n }\n self.work_count += 1\n }\n\n \/\/\/ Synchronously runs all the enqueued tasks and waits for them to complete.\n pub fn run(&mut self) {\n \/\/ Tell the workers to start.\n let mut work_count = AtomicUint::new(self.work_count);\n for worker in self.workers.mut_iter() {\n worker.chan.send(StartMsg(worker.deque.take_unwrap(), &mut work_count, &self.data))\n }\n\n \/\/ Wait for the work to finish.\n drop(self.port.recv());\n self.work_count = 0;\n\n \/\/ Tell everyone to stop.\n for worker in self.workers.iter() {\n worker.chan.send(StopMsg)\n }\n\n \/\/ Get our deques back.\n for _ in range(0, self.workers.len()) {\n match self.port.recv() {\n ReturnDequeMsg(index, deque) => self.workers[index].deque = Some(deque),\n FinishedMsg => fail!(\"unexpected finished message!\"),\n }\n }\n }\n\n pub fn shutdown(&mut self) {\n for worker in self.workers.iter() {\n worker.chan.send(ExitMsg)\n }\n }\n}\n\n<commit_msg>auto merge of #2303 : Manishearth\/servo\/privtype, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A work queue for scheduling units of work across threads in a fork-join fashion.\n\/\/!\n\/\/! Data associated with queues is simply a pair of unsigned integers. It is expected that a\n\/\/! higher-level API on top of this could allow safe fork-join parallelism.\n\nuse native;\nuse rand;\nuse rand::{Rng, XorShiftRng};\nuse std::cast;\nuse std::comm;\nuse std::mem;\nuse std::sync::atomics::{AtomicUint, SeqCst};\nuse std::sync::deque::{Abort, BufferPool, Data, Empty, Stealer, Worker};\nuse std::task::TaskOpts;\n\n\/\/\/ A unit of work.\n\/\/\/\n\/\/\/ The type parameter `QUD` stands for \"queue user data\" and represents global custom data for the\n\/\/\/ entire work queue, and the type parameter `WUD` stands for \"work user data\" and represents\n\/\/\/ custom data specific to each unit of work.\npub struct WorkUnit<QUD,WUD> {\n \/\/\/ The function to execute.\n pub fun: extern \"Rust\" fn(WUD, &mut WorkerProxy<QUD,WUD>),\n \/\/\/ Arbitrary data.\n pub data: WUD,\n}\n\n\/\/\/ Messages from the supervisor to the worker.\nenum WorkerMsg<QUD,WUD> {\n \/\/\/ Tells the worker to start work.\n StartMsg(Worker<WorkUnit<QUD,WUD>>, *mut AtomicUint, *QUD),\n\n \/\/\/ Tells the worker to stop. It can be restarted again with a `StartMsg`.\n StopMsg,\n\n \/\/\/ Tells the worker thread to terminate.\n ExitMsg,\n}\n\n\/\/\/ Messages to the supervisor.\nenum SupervisorMsg<QUD,WUD> {\n FinishedMsg,\n ReturnDequeMsg(uint, Worker<WorkUnit<QUD,WUD>>),\n}\n\n\/\/\/ Information that the supervisor thread keeps about the worker threads.\nstruct WorkerInfo<QUD,WUD> {\n \/\/\/ The communication channel to the workers.\n chan: Sender<WorkerMsg<QUD,WUD>>,\n \/\/\/ The buffer pool for this deque.\n pool: BufferPool<WorkUnit<QUD,WUD>>,\n \/\/\/ The worker end of the deque, if we have it.\n deque: Option<Worker<WorkUnit<QUD,WUD>>>,\n \/\/\/ The thief end of the work-stealing deque.\n thief: Stealer<WorkUnit<QUD,WUD>>,\n}\n\n\/\/\/ Information specific to each worker thread that the thread keeps.\nstruct WorkerThread<QUD,WUD> {\n \/\/\/ The index of this worker.\n index: uint,\n \/\/\/ The communication port from the supervisor.\n port: Receiver<WorkerMsg<QUD,WUD>>,\n \/\/\/ The communication channel on which messages are sent to the supervisor.\n chan: Sender<SupervisorMsg<QUD,WUD>>,\n \/\/\/ The thief end of the work-stealing deque for all other workers.\n other_deques: ~[Stealer<WorkUnit<QUD,WUD>>],\n \/\/\/ The random number generator for this worker.\n rng: XorShiftRng,\n}\n\nstatic SPIN_COUNT: uint = 1000;\n\nimpl<QUD:Send,WUD:Send> WorkerThread<QUD,WUD> {\n \/\/\/ The main logic. This function starts up the worker and listens for\n \/\/\/ messages.\n pub fn start(&mut self) {\n loop {\n \/\/ Wait for a start message.\n let (mut deque, ref_count, queue_data) = match self.port.recv() {\n StartMsg(deque, ref_count, queue_data) => (deque, ref_count, queue_data),\n StopMsg => fail!(\"unexpected stop message\"),\n ExitMsg => return,\n };\n\n \/\/ We're off!\n \/\/\n \/\/ FIXME(pcwalton): Can't use labeled break or continue cross-crate due to a Rust bug.\n loop {\n \/\/ FIXME(pcwalton): Nasty workaround for the lack of labeled break\/continue\n \/\/ cross-crate.\n let mut work_unit = unsafe {\n mem::uninit()\n };\n match deque.pop() {\n Some(work) => work_unit = work,\n None => {\n \/\/ Become a thief.\n let mut i = 0;\n let mut should_continue = true;\n loop {\n let victim = (self.rng.next_u32() as uint) % self.other_deques.len();\n match self.other_deques[victim].steal() {\n Empty | Abort => {\n \/\/ Continue.\n }\n Data(work) => {\n work_unit = work;\n break\n }\n }\n\n if i == SPIN_COUNT {\n match self.port.try_recv() {\n comm::Data(StopMsg) => {\n should_continue = false;\n break\n }\n comm::Data(ExitMsg) => return,\n comm::Data(_) => fail!(\"unexpected message\"),\n _ => {}\n }\n\n i = 0\n } else {\n i += 1\n }\n }\n\n if !should_continue {\n break\n }\n }\n }\n\n \/\/ At this point, we have some work. Perform it.\n let mut proxy = WorkerProxy {\n worker: &mut deque,\n ref_count: ref_count,\n queue_data: queue_data,\n };\n (work_unit.fun)(work_unit.data, &mut proxy);\n\n \/\/ The work is done. Now decrement the count of outstanding work items. If this was\n \/\/ the last work unit in the queue, then send a message on the channel.\n unsafe {\n if (*ref_count).fetch_sub(1, SeqCst) == 1 {\n self.chan.send(FinishedMsg)\n }\n }\n }\n\n \/\/ Give the deque back to the supervisor.\n self.chan.send(ReturnDequeMsg(self.index, deque))\n }\n }\n}\n\n\/\/\/ A handle to the work queue that individual work units have.\npub struct WorkerProxy<'a,QUD,WUD> {\n pub worker: &'a mut Worker<WorkUnit<QUD,WUD>>,\n pub ref_count: *mut AtomicUint,\n pub queue_data: *QUD,\n}\n\nimpl<'a,QUD,WUD:Send> WorkerProxy<'a,QUD,WUD> {\n \/\/\/ Enqueues a block into the work queue.\n #[inline]\n pub fn push(&mut self, work_unit: WorkUnit<QUD,WUD>) {\n unsafe {\n drop((*self.ref_count).fetch_add(1, SeqCst));\n }\n self.worker.push(work_unit);\n }\n\n \/\/\/ Retrieves the queue user data.\n #[inline]\n pub fn user_data<'a>(&'a self) -> &'a QUD {\n unsafe {\n cast::transmute(self.queue_data)\n }\n }\n}\n\n\/\/\/ A work queue on which units of work can be submitted.\npub struct WorkQueue<QUD,WUD> {\n \/\/\/ Information about each of the workers.\n workers: ~[WorkerInfo<QUD,WUD>],\n \/\/\/ A port on which deques can be received from the workers.\n port: Receiver<SupervisorMsg<QUD,WUD>>,\n \/\/\/ The amount of work that has been enqueued.\n pub work_count: uint,\n \/\/\/ Arbitrary user data.\n pub data: QUD,\n}\n\nimpl<QUD:Send,WUD:Send> WorkQueue<QUD,WUD> {\n \/\/\/ Creates a new work queue and spawns all the threads associated with\n \/\/\/ it.\n pub fn new(task_name: &'static str, thread_count: uint, user_data: QUD) -> WorkQueue<QUD,WUD> {\n \/\/ Set up data structures.\n let (supervisor_chan, supervisor_port) = channel();\n let (mut infos, mut threads) = (~[], ~[]);\n for i in range(0, thread_count) {\n let (worker_chan, worker_port) = channel();\n let mut pool = BufferPool::new();\n let (worker, thief) = pool.deque();\n infos.push(WorkerInfo {\n chan: worker_chan,\n pool: pool,\n deque: Some(worker),\n thief: thief,\n });\n threads.push(WorkerThread {\n index: i,\n port: worker_port,\n chan: supervisor_chan.clone(),\n other_deques: ~[],\n rng: rand::weak_rng(),\n });\n }\n\n \/\/ Connect workers to one another.\n for i in range(0, thread_count) {\n for j in range(0, thread_count) {\n if i != j {\n threads[i].other_deques.push(infos[j].thief.clone())\n }\n }\n assert!(threads[i].other_deques.len() == thread_count - 1)\n }\n\n \/\/ Spawn threads.\n for thread in threads.move_iter() {\n let mut opts = TaskOpts::new();\n opts.name = Some(task_name.into_maybe_owned());\n native::task::spawn_opts(opts, proc() {\n let mut thread = thread;\n thread.start()\n })\n }\n\n WorkQueue {\n workers: infos,\n port: supervisor_port,\n work_count: 0,\n data: user_data,\n }\n }\n\n \/\/\/ Enqueues a block into the work queue.\n #[inline]\n pub fn push(&mut self, work_unit: WorkUnit<QUD,WUD>) {\n match self.workers[0].deque {\n None => {\n fail!(\"tried to push a block but we don't have the deque?!\")\n }\n Some(ref mut deque) => deque.push(work_unit),\n }\n self.work_count += 1\n }\n\n \/\/\/ Synchronously runs all the enqueued tasks and waits for them to complete.\n pub fn run(&mut self) {\n \/\/ Tell the workers to start.\n let mut work_count = AtomicUint::new(self.work_count);\n for worker in self.workers.mut_iter() {\n worker.chan.send(StartMsg(worker.deque.take_unwrap(), &mut work_count, &self.data))\n }\n\n \/\/ Wait for the work to finish.\n drop(self.port.recv());\n self.work_count = 0;\n\n \/\/ Tell everyone to stop.\n for worker in self.workers.iter() {\n worker.chan.send(StopMsg)\n }\n\n \/\/ Get our deques back.\n for _ in range(0, self.workers.len()) {\n match self.port.recv() {\n ReturnDequeMsg(index, deque) => self.workers[index].deque = Some(deque),\n FinishedMsg => fail!(\"unexpected finished message!\"),\n }\n }\n }\n\n pub fn shutdown(&mut self) {\n for worker in self.workers.iter() {\n worker.chan.send(ExitMsg)\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\n\/\/ Error messages for EXXXX errors.\n\/\/ Each message should start and end with a new line, and be wrapped to 80 characters.\n\/\/ In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable.\nregister_long_diagnostics! {\n\nE0178: r##\"\nIn types, the `+` type operator has low precedence, so it is often necessary\nto use parentheses.\n\nFor example:\n\n```compile_fail,E0178\ntrait Foo {}\n\nstruct Bar<'a> {\n w: &'a Foo + Copy, \/\/ error, use &'a (Foo + Copy)\n x: &'a Foo + 'a, \/\/ error, use &'a (Foo + 'a)\n y: &'a mut Foo + 'a, \/\/ error, use &'a mut (Foo + 'a)\n z: fn() -> Foo + 'a, \/\/ error, use fn() -> (Foo + 'a)\n}\n```\n\nMore details can be found in [RFC 438].\n\n[RFC 438]: https:\/\/github.com\/rust-lang\/rfcs\/pull\/438\n\"##,\n\nE0536: r##\"\nThe `not` cfg-predicate was malformed.\n\nErroneous code example:\n\n```compile_fail,E0536\n#[cfg(not())] \/\/ error: expected 1 cfg-pattern\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `not` predicate expects one cfg-pattern. Example:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0537: r##\"\nAn unknown predicate was used inside the `cfg` attribute.\n\nErroneous code example:\n\n```compile_fail,E0537\n#[cfg(unknown())] \/\/ error: invalid predicate `unknown`\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `cfg` attribute supports only three kinds of predicates:\n\n * any\n * all\n * not\n\nExample:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0538: r##\"\nAttribute contains same meta item more than once.\n\nErroneous code example:\n\n```compile_fail,E0538\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\",\n note=\"Second deprecation note.\" \/\/ error: multiple same meta item\n)]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. Each key may only be\nused once in each attribute.\n\nTo fix the problem, remove all but one of the meta items with the same key.\n\nExample:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0541: r##\"\nAn unknown meta item was used.\n\nErroneous code example:\n\n```compile_fail,E0541\n#[deprecated(\n since=\"1.0.0\",\n \/\/ error: unknown meta item\n reason=\"Example invalid meta item. Should be 'note'\")\n]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. The keys provided\nmust be one of the valid keys for the specified attribute.\n\nTo fix the problem, either remove the unknown meta item, or rename it if you\nprovided the wrong name.\n\nIn the erroneous code example above, the wrong name was provided, so changing\nto a correct one it will fix the error. Example:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"This is a valid meta item for the deprecated attribute.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0552: r##\"\nA unrecognized representation attribute was used.\n\nErroneous code example:\n\n```compile_fail,E0552\n#[repr(D)] \/\/ error: unrecognized representation hint\nstruct MyStruct {\n my_field: usize\n}\n```\n\nYou can use a `repr` attribute to tell the compiler how you want a struct or\nenum to be laid out in memory.\n\nMake sure you're using one of the supported options:\n\n```\n#[repr(C)] \/\/ ok!\nstruct MyStruct {\n my_field: usize\n}\n```\n\nFor more information about specifying representations, see the [\"Alternative\nRepresentations\" section] of the Rustonomicon.\n\n[\"Alternative Representations\" section]: https:\/\/doc.rust-lang.org\/nomicon\/other-reprs.html\n\"##,\n\nE0554: r##\"\nFeature attributes are only allowed on the nightly release channel. Stable or\nbeta compilers will not comply.\n\nExample of erroneous code (on a stable compiler):\n\n```ignore (depends on release channel)\n#![feature(non_ascii_idents)] \/\/ error: #![feature] may not be used on the\n \/\/ stable release channel\n```\n\nIf you need the feature, make sure to use a nightly release of the compiler\n(but be warned that the feature may be removed or altered in the future).\n\"##,\n\nE0557: r##\"\nA feature attribute named a feature that has been removed.\n\nErroneous code example:\n\n```compile_fail,E0557\n#![feature(managed_boxes)] \/\/ error: feature has been removed\n```\n\nDelete the offending feature attribute.\n\"##,\n\nE0565: r##\"\nA literal was used in an attribute that doesn't support literals.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#![feature(attr_literals)]\n\n#[inline(\"always\")] \/\/ error: unsupported literal\npub fn something() {}\n```\n\nLiterals in attributes are new and largely unsupported. Work to support literals\nwhere appropriate is ongoing. Try using an unquoted name instead:\n\n```\n#[inline(always)]\npub fn something() {}\n```\n\"##,\n\nE0583: r##\"\nA file wasn't found for an out-of-line module.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\nmod file_that_doesnt_exist; \/\/ error: file not found for module\n\nfn main() {}\n```\n\nPlease be sure that a file corresponding to the module exists. If you\nwant to use a module named `file_that_doesnt_exist`, you need to have a file\nnamed `file_that_doesnt_exist.rs` or `file_that_doesnt_exist\/mod.rs` in the\nsame directory.\n\"##,\n\nE0585: r##\"\nA documentation comment that doesn't document anything was found.\n\nErroneous code example:\n\n```compile_fail,E0585\nfn main() {\n \/\/ The following doc comment will fail:\n \/\/\/ This is a useless doc comment!\n}\n```\n\nDocumentation comments need to be followed by items, including functions,\ntypes, modules, etc. Examples:\n\n```\n\/\/\/ I'm documenting the following struct:\nstruct Foo;\n\n\/\/\/ I'm documenting the following function:\nfn foo() {}\n```\n\"##,\n\nE0586: r##\"\nAn inclusive range was used with no end.\n\nErroneous code example:\n\n```compile_fail,E0586\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=]; \/\/ error: inclusive range was used with no end\n}\n```\n\nAn inclusive range needs an end in order to *include* it. If you just need a\nstart and no end, use a non-inclusive range (with `..`):\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..]; \/\/ ok!\n}\n```\n\nOr put an end to your inclusive range:\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=3]; \/\/ ok!\n}\n```\n\"##,\n\nE0589: r##\"\nThe value of `N` that was specified for `repr(align(N))` was not a power\nof two, or was greater than 2^29.\n\n```compile_fail,E0589\n#[repr(align(15))] \/\/ error: invalid `repr(align)` attribute: not a power of two\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0658: r##\"\nAn unstable feature was used.\n\nErroneous code example:\n\n```compile_fail,E658\n#[repr(u128)] \/\/ error: use of unstable library feature 'repr128'\nenum Foo {\n Bar(u64),\n}\n```\n\nIf you're using a stable or a beta version of rustc, you won't be able to use\nany unstable features. In order to do so, please switch to a nightly version of\nrustc (by using rustup).\n\nIf you're using a nightly version of rustc, just add the corresponding feature\nto be able to use it:\n\n```\n#![feature(repr128)]\n\n#[repr(u128)] \/\/ ok!\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0633: r##\"\nThe `unwind` attribute was malformed.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#[unwind()] \/\/ error: expected one argument\npub extern fn something() {}\n\nfn main() {}\n```\n\nThe `#[unwind]` attribute should be used as follows:\n\n- `#[unwind(aborts)]` -- specifies that if a non-Rust ABI function\n should abort the process if it attempts to unwind. This is the safer\n and preferred option.\n\n- `#[unwind(allowed)]` -- specifies that a non-Rust ABI function\n should be allowed to unwind. This can easily result in Undefined\n Behavior (UB), so be careful.\n\nNB. The default behavior here is \"allowed\", but this is unspecified\nand likely to change in the future.\n\n\"##,\n\nE0705: r##\"\nA `#![feature]` attribute was declared for a feature that is stable in\nthe current edition.\n\nErroneous code example:\n\n```compile_fail,E0705\n#![feature(rust_2018_preview)]\n#![feature(raw_identifiers)] \/\/ error: the feature `raw_identifiers` is\n \/\/ included in the Rust 2018 edition\n```\n\n\"##,\n\n}\n\nregister_diagnostics! {\n E0539, \/\/ incorrect meta item\n E0540, \/\/ multiple rustc_deprecated attributes\n E0542, \/\/ missing 'since'\n E0543, \/\/ missing 'reason'\n E0544, \/\/ multiple stability levels\n E0545, \/\/ incorrect 'issue'\n E0546, \/\/ missing 'feature'\n E0547, \/\/ missing 'issue'\n E0548, \/\/ incorrect stability attribute type\n E0549, \/\/ rustc_deprecated attribute must be paired with either stable or unstable attribute\n E0550, \/\/ multiple deprecated attributes\n E0551, \/\/ incorrect meta item\n E0553, \/\/ multiple rustc_const_unstable attributes\n E0555, \/\/ malformed feature attribute, expected #![feature(...)]\n E0556, \/\/ malformed feature, expected just one word\n E0584, \/\/ file for module `..` found at both .. and ..\n E0629, \/\/ missing 'feature' (rustc_const_unstable)\n E0630, \/\/ rustc_const_unstable attribute must be paired with stable\/unstable attribute\n E0693, \/\/ incorrect `repr(align)` attribute format\n E0694, \/\/ an unknown tool name found in scoped attributes\n E0703, \/\/ invalid ABI\n E0704, \/\/ incorrect visibility restriction\n}\n<commit_msg>Fix diagnostic_list error<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\n\/\/ Error messages for EXXXX errors.\n\/\/ Each message should start and end with a new line, and be wrapped to 80 characters.\n\/\/ In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable.\nregister_long_diagnostics! {\n\nE0178: r##\"\nIn types, the `+` type operator has low precedence, so it is often necessary\nto use parentheses.\n\nFor example:\n\n```compile_fail,E0178\ntrait Foo {}\n\nstruct Bar<'a> {\n w: &'a Foo + Copy, \/\/ error, use &'a (Foo + Copy)\n x: &'a Foo + 'a, \/\/ error, use &'a (Foo + 'a)\n y: &'a mut Foo + 'a, \/\/ error, use &'a mut (Foo + 'a)\n z: fn() -> Foo + 'a, \/\/ error, use fn() -> (Foo + 'a)\n}\n```\n\nMore details can be found in [RFC 438].\n\n[RFC 438]: https:\/\/github.com\/rust-lang\/rfcs\/pull\/438\n\"##,\n\nE0536: r##\"\nThe `not` cfg-predicate was malformed.\n\nErroneous code example:\n\n```compile_fail,E0536\n#[cfg(not())] \/\/ error: expected 1 cfg-pattern\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `not` predicate expects one cfg-pattern. Example:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0537: r##\"\nAn unknown predicate was used inside the `cfg` attribute.\n\nErroneous code example:\n\n```compile_fail,E0537\n#[cfg(unknown())] \/\/ error: invalid predicate `unknown`\npub fn something() {}\n\npub fn main() {}\n```\n\nThe `cfg` attribute supports only three kinds of predicates:\n\n * any\n * all\n * not\n\nExample:\n\n```\n#[cfg(not(target_os = \"linux\"))] \/\/ ok!\npub fn something() {}\n\npub fn main() {}\n```\n\nFor more information about the cfg attribute, read:\nhttps:\/\/doc.rust-lang.org\/reference.html#conditional-compilation\n\"##,\n\nE0538: r##\"\nAttribute contains same meta item more than once.\n\nErroneous code example:\n\n```compile_fail,E0538\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\",\n note=\"Second deprecation note.\" \/\/ error: multiple same meta item\n)]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. Each key may only be\nused once in each attribute.\n\nTo fix the problem, remove all but one of the meta items with the same key.\n\nExample:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"First deprecation note.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0541: r##\"\nAn unknown meta item was used.\n\nErroneous code example:\n\n```compile_fail,E0541\n#[deprecated(\n since=\"1.0.0\",\n \/\/ error: unknown meta item\n reason=\"Example invalid meta item. Should be 'note'\")\n]\nfn deprecated_function() {}\n```\n\nMeta items are the key-value pairs inside of an attribute. The keys provided\nmust be one of the valid keys for the specified attribute.\n\nTo fix the problem, either remove the unknown meta item, or rename it if you\nprovided the wrong name.\n\nIn the erroneous code example above, the wrong name was provided, so changing\nto a correct one it will fix the error. Example:\n\n```\n#[deprecated(\n since=\"1.0.0\",\n note=\"This is a valid meta item for the deprecated attribute.\"\n)]\nfn deprecated_function() {}\n```\n\"##,\n\nE0552: r##\"\nA unrecognized representation attribute was used.\n\nErroneous code example:\n\n```compile_fail,E0552\n#[repr(D)] \/\/ error: unrecognized representation hint\nstruct MyStruct {\n my_field: usize\n}\n```\n\nYou can use a `repr` attribute to tell the compiler how you want a struct or\nenum to be laid out in memory.\n\nMake sure you're using one of the supported options:\n\n```\n#[repr(C)] \/\/ ok!\nstruct MyStruct {\n my_field: usize\n}\n```\n\nFor more information about specifying representations, see the [\"Alternative\nRepresentations\" section] of the Rustonomicon.\n\n[\"Alternative Representations\" section]: https:\/\/doc.rust-lang.org\/nomicon\/other-reprs.html\n\"##,\n\nE0554: r##\"\nFeature attributes are only allowed on the nightly release channel. Stable or\nbeta compilers will not comply.\n\nExample of erroneous code (on a stable compiler):\n\n```ignore (depends on release channel)\n#![feature(non_ascii_idents)] \/\/ error: #![feature] may not be used on the\n \/\/ stable release channel\n```\n\nIf you need the feature, make sure to use a nightly release of the compiler\n(but be warned that the feature may be removed or altered in the future).\n\"##,\n\nE0557: r##\"\nA feature attribute named a feature that has been removed.\n\nErroneous code example:\n\n```compile_fail,E0557\n#![feature(managed_boxes)] \/\/ error: feature has been removed\n```\n\nDelete the offending feature attribute.\n\"##,\n\nE0565: r##\"\nA literal was used in an attribute that doesn't support literals.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#![feature(attr_literals)]\n\n#[inline(\"always\")] \/\/ error: unsupported literal\npub fn something() {}\n```\n\nLiterals in attributes are new and largely unsupported. Work to support literals\nwhere appropriate is ongoing. Try using an unquoted name instead:\n\n```\n#[inline(always)]\npub fn something() {}\n```\n\"##,\n\nE0583: r##\"\nA file wasn't found for an out-of-line module.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\nmod file_that_doesnt_exist; \/\/ error: file not found for module\n\nfn main() {}\n```\n\nPlease be sure that a file corresponding to the module exists. If you\nwant to use a module named `file_that_doesnt_exist`, you need to have a file\nnamed `file_that_doesnt_exist.rs` or `file_that_doesnt_exist\/mod.rs` in the\nsame directory.\n\"##,\n\nE0585: r##\"\nA documentation comment that doesn't document anything was found.\n\nErroneous code example:\n\n```compile_fail,E0585\nfn main() {\n \/\/ The following doc comment will fail:\n \/\/\/ This is a useless doc comment!\n}\n```\n\nDocumentation comments need to be followed by items, including functions,\ntypes, modules, etc. Examples:\n\n```\n\/\/\/ I'm documenting the following struct:\nstruct Foo;\n\n\/\/\/ I'm documenting the following function:\nfn foo() {}\n```\n\"##,\n\nE0586: r##\"\nAn inclusive range was used with no end.\n\nErroneous code example:\n\n```compile_fail,E0586\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=]; \/\/ error: inclusive range was used with no end\n}\n```\n\nAn inclusive range needs an end in order to *include* it. If you just need a\nstart and no end, use a non-inclusive range (with `..`):\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..]; \/\/ ok!\n}\n```\n\nOr put an end to your inclusive range:\n\n```\nfn main() {\n let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];\n let x = &tmp[1..=3]; \/\/ ok!\n}\n```\n\"##,\n\nE0589: r##\"\nThe value of `N` that was specified for `repr(align(N))` was not a power\nof two, or was greater than 2^29.\n\n```compile_fail,E0589\n#[repr(align(15))] \/\/ error: invalid `repr(align)` attribute: not a power of two\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0658: r##\"\nAn unstable feature was used.\n\nErroneous code example:\n\n```compile_fail,E658\n#[repr(u128)] \/\/ error: use of unstable library feature 'repr128'\nenum Foo {\n Bar(u64),\n}\n```\n\nIf you're using a stable or a beta version of rustc, you won't be able to use\nany unstable features. In order to do so, please switch to a nightly version of\nrustc (by using rustup).\n\nIf you're using a nightly version of rustc, just add the corresponding feature\nto be able to use it:\n\n```\n#![feature(repr128)]\n\n#[repr(u128)] \/\/ ok!\nenum Foo {\n Bar(u64),\n}\n```\n\"##,\n\nE0633: r##\"\nThe `unwind` attribute was malformed.\n\nErroneous code example:\n\n```ignore (compile_fail not working here; see Issue #43707)\n#[unwind()] \/\/ error: expected one argument\npub extern fn something() {}\n\nfn main() {}\n```\n\nThe `#[unwind]` attribute should be used as follows:\n\n- `#[unwind(aborts)]` -- specifies that if a non-Rust ABI function\n should abort the process if it attempts to unwind. This is the safer\n and preferred option.\n\n- `#[unwind(allowed)]` -- specifies that a non-Rust ABI function\n should be allowed to unwind. This can easily result in Undefined\n Behavior (UB), so be careful.\n\nNB. The default behavior here is \"allowed\", but this is unspecified\nand likely to change in the future.\n\n\"##,\n\nE0705: r##\"\nA `#![feature]` attribute was declared for a feature that is stable in\nthe current edition.\n\nErroneous code example:\n\n```ignore (limited to a warning during 2018 edition development)\n#![feature(rust_2018_preview)]\n#![feature(raw_identifiers)] \/\/ error: the feature `raw_identifiers` is\n \/\/ included in the Rust 2018 edition\n```\n\n\"##,\n\n}\n\nregister_diagnostics! {\n E0539, \/\/ incorrect meta item\n E0540, \/\/ multiple rustc_deprecated attributes\n E0542, \/\/ missing 'since'\n E0543, \/\/ missing 'reason'\n E0544, \/\/ multiple stability levels\n E0545, \/\/ incorrect 'issue'\n E0546, \/\/ missing 'feature'\n E0547, \/\/ missing 'issue'\n E0548, \/\/ incorrect stability attribute type\n E0549, \/\/ rustc_deprecated attribute must be paired with either stable or unstable attribute\n E0550, \/\/ multiple deprecated attributes\n E0551, \/\/ incorrect meta item\n E0553, \/\/ multiple rustc_const_unstable attributes\n E0555, \/\/ malformed feature attribute, expected #![feature(...)]\n E0556, \/\/ malformed feature, expected just one word\n E0584, \/\/ file for module `..` found at both .. and ..\n E0629, \/\/ missing 'feature' (rustc_const_unstable)\n E0630, \/\/ rustc_const_unstable attribute must be paired with stable\/unstable attribute\n E0693, \/\/ incorrect `repr(align)` attribute format\n E0694, \/\/ an unknown tool name found in scoped attributes\n E0703, \/\/ invalid ABI\n E0704, \/\/ incorrect visibility restriction\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add bench<commit_after>#![feature(test)]\n\nextern crate itertools;\nextern crate test;\n\nuse itertools::Itertools;\nuse test::{black_box, Bencher};\n\n#[bench]\nfn comb_replacement_n10_k5(b: &mut Bencher) {\n b.iter(|| {\n for i in (0..10).combinations_with_replacement(5) {\n black_box(i);\n }\n });\n}\n\n#[bench]\nfn comb_replacement_n5_k10(b: &mut Bencher) {\n b.iter(|| {\n for i in (0..5).combinations_with_replacement(10) {\n black_box(i);\n }\n });\n}\n\n#[bench]\nfn comb_replacement_n10_k10(b: &mut Bencher) {\n b.iter(|| {\n for i in (0..10).combinations_with_replacement(10) {\n black_box(i);\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>debuginfo: Added test case for region pointers pointing to stack values with basic type.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ xfail-win32 Broken because of LLVM bug: http:\/\/llvm.org\/bugs\/show_bug.cgi?id=16249\n\n\/\/ Caveats - gdb prints any 8-bit value (meaning rust i8 and u8 values)\n\/\/ as its numerical value along with its associated ASCII char, there\n\/\/ doesn't seem to be any way around this. Also, gdb doesn't know\n\/\/ about UTF-32 character encoding and will print a rust char as only\n\/\/ its numerical value.\n\n\/\/ compile-flags:-Z extra-debug-info\n\/\/ debugger:break zzz\n\/\/ debugger:run\n\/\/ debugger:finish\n\/\/ debugger:print *bool_ref\n\/\/ check:$1 = true\n\n\/\/ debugger:print *int_ref\n\/\/ check:$2 = -1\n\n\/\/ debugger:print *char_ref\n\/\/ check:$3 = 97\n\n\/\/ debugger:print *i8_ref\n\/\/ check:$4 = 68 'D'\n\n\/\/ debugger:print *i16_ref\n\/\/ check:$5 = -16\n\n\/\/ debugger:print *i32_ref\n\/\/ check:$6 = -32\n\n\/\/ debugger:print *i64_ref\n\/\/ check:$7 = -64\n\n\/\/ debugger:print *uint_ref\n\/\/ check:$8 = 1\n\n\/\/ debugger:print *u8_ref\n\/\/ check:$9 = 100 'd'\n\n\/\/ debugger:print *u16_ref\n\/\/ check:$10 = 16\n\n\/\/ debugger:print *u32_ref\n\/\/ check:$11 = 32\n\n\/\/ debugger:print *u64_ref\n\/\/ check:$12 = 64\n\n\/\/ debugger:print *float_ref\n\/\/ check:$13 = 1.5\n\n\/\/ debugger:print *f32_ref\n\/\/ check:$14 = 2.5\n\n\/\/ debugger:print *f64_ref\n\/\/ check:$15 = 3.5\n\nfn main() {\n let bool_val: bool = true;\n let bool_ref : &bool = &bool_val;\n\n let int_val: int = -1;\n let int_ref : &int = &int_val;\n\n let char_val: char = 'a';\n let char_ref : &char = &char_val;\n\n let i8_val: i8 = 68;\n let i8_ref : &i8 = &i8_val;\n\n let i16_val: i16 = -16;\n let i16_ref : &i16 = &i16_val;\n\n let i32_val: i32 = -32;\n let i32_ref : &i32 = &i32_val;\n\n let uint_val: i64 = -64;\n let i64_ref : &i64 = &uint_val;\n\n let uint_val: uint = 1;\n let uint_ref : &uint = &uint_val;\n\n let u8_val: u8 = 100;\n let u8_ref : &u8 = &u8_val;\n\n let u16_val: u16 = 16;\n let u16_ref : &u16 = &u16_val;\n\n let u32_val: u32 = 32;\n let u32_ref : &u32 = &u32_val;\n\n let u64_val: u64 = 64;\n let u64_ref : &u64 = &u64_val;\n\n let float_val: float = 1.5;\n let float_ref : &float = &float_val;\n\n let f32_val: f32 = 2.5;\n let f32_ref : &f32 = &f32_val;\n\n let f64_val: f64 = 3.5;\n let f64_ref : &f64 = &f64_val;\n zzz();\n}\n\nfn zzz() {()}<|endoftext|>"} {"text":"<commit_before><commit_msg>add new test case<commit_after>\/\/ Copyright 206 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ops::Deref;\n\n\/\/ Due to aggressive error message deduplication, we require 20 *different*\n\/\/ unsized types (even Path and [u8] are considered the \"same\").\n\ntrait Foo {}\ntrait Bar {}\ntrait FooBar {}\ntrait BarFoo {}\n\ntrait PathHelper1 {}\ntrait PathHelper2 {}\ntrait PathHelper3 {}\ntrait PathHelper4 {}\n\nstruct Path1(PathHelper1);\nstruct Path2(PathHelper2);\nstruct Path3(PathHelper3);\nstruct Path4(PathHelper4);\n\nenum E<W: ?Sized, X: ?Sized, Y: ?Sized, Z: ?Sized> {\n \/\/ parameter\n VA(W), \/\/~ ERROR `W: std::marker::Sized` is not satisfied\n VB{x: X}, \/\/~ ERROR `X: std::marker::Sized` is not satisfied\n VC(isize, Y), \/\/~ ERROR `Y: std::marker::Sized` is not satisfied\n VD{u: isize, x: Z}, \/\/~ ERROR `Z: std::marker::Sized` is not satisfied\n\n \/\/ slice \/ str\n VE([u8]), \/\/~ ERROR `[u8]: std::marker::Sized` is not satisfied\n VF{x: str}, \/\/~ ERROR `str: std::marker::Sized` is not satisfied\n VG(isize, [f32]), \/\/~ ERROR `[f32]: std::marker::Sized` is not satisfied\n VH{u: isize, x: [u32]}, \/\/~ ERROR `[u32]: std::marker::Sized` is not satisfied\n\n \/\/ unsized struct\n VI(Path1), \/\/~ ERROR `PathHelper1 + 'static: std::marker::Sized` is not satisfied\n VJ{x: Path2}, \/\/~ ERROR `PathHelper2 + 'static: std::marker::Sized` is not satisfied\n VK(isize, Path3), \/\/~ ERROR `PathHelper3 + 'static: std::marker::Sized` is not satisfied\n VL{u: isize, x: Path4}, \/\/~ ERROR `PathHelper4 + 'static: std::marker::Sized` is not satisfied\n\n \/\/ plain trait\n VM(Foo), \/\/~ ERROR `Foo + 'static: std::marker::Sized` is not satisfied\n VN{x: Bar}, \/\/~ ERROR `Bar + 'static: std::marker::Sized` is not satisfied\n VO(isize, FooBar), \/\/~ ERROR `FooBar + 'static: std::marker::Sized` is not satisfied\n VP{u: isize, x: BarFoo}, \/\/~ ERROR `BarFoo + 'static: std::marker::Sized` is not satisfied\n\n \/\/ projected\n VQ(<&'static [i8] as Deref>::Target), \/\/~ ERROR `[i8]: std::marker::Sized` is not satisfied\n VR{x: <&'static [char] as Deref>::Target},\n \/\/~^ ERROR `[char]: std::marker::Sized` is not satisfied\n VS(isize, <&'static [f64] as Deref>::Target),\n \/\/~^ ERROR `[f64]: std::marker::Sized` is not satisfied\n VT{u: isize, x: <&'static [i32] as Deref>::Target},\n \/\/~^ ERROR `[i32]: std::marker::Sized` is not satisfied\n}\n\n\nfn main() { }\n\n<|endoftext|>"} {"text":"<commit_before>#![crate_name=\"redox\"]\n#![crate_type=\"rlib\"]\n#![feature(alloc)]\n#![feature(asm)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(core_slice_ext)]\n#![feature(core_str_ext)]\n#![feature(lang_items)]\n#![feature(vec_push_all)]\n#![feature(no_std)]\n#![no_std]\n\n\/\/ Yep I'm evil (this is a good idea!)\n#![warn(missing_docs)]\n\n#[macro_use]\nextern crate alloc;\n\n#[macro_use]\nextern crate collections;\n\npub use alloc::boxed::Box;\n\npub use collections::*;\npub use collections::string::ToString;\n\npub use common::random::*;\npub use common::time::*;\n\npub use externs::*;\n\npub use syscall::call::*;\n\npub use audio::wav::*;\npub use console::*;\npub use env::*;\npub use event::*;\npub use fs::file::*;\npub use graphics::bmp::*;\npub use orbital::*;\n\n\/\/\/ A module for common functionalities.\n\/\/\/ Primary functionality provided by std.\n#[path=\"..\/..\/src\/common\/src\/\"]\nmod common {\n pub mod debug;\n pub mod random;\n pub mod time;\n}\n\n#[path=\"..\/..\/src\/externs.rs\"]\npub mod externs;\n\n\/\/\/ A module for system calls\n#[path=\"..\/..\/src\/syscall\/src\"]\nmod syscall {\n \/\/\/ Calls\n pub mod call;\n \/\/\/ Common\n pub mod common;\n}\n\n\/\/\/ A module for audio\nmod audio {\n pub mod wav;\n}\n\n\/\/\/ A module for console functionality\n#[macro_use]\npub mod console;\n\/\/\/ A module for commands and enviroment\npub mod env;\n\/\/\/ A module for events\npub mod event;\n\/\/\/ A module for the filesystem\n#[path=\"fs\/lib.rs\"]\nmod fs;\n\/\/\/ Graphics support\nmod graphics {\n pub mod bmp;\n}\n\/\/\/ A module for window support\npub mod orbital;\n\n\/\/\/ A module for shell based functions\npub mod ion;\n\n\/* Extensions for String { *\/\n\/\/\/ Parse the string to a integer using a given radix\npub trait ToNum {\n fn to_num_radix(&self, radix: usize) -> usize;\n fn to_num_radix_signed(&self, radix: usize) -> isize;\n fn to_num(&self) -> usize;\n fn to_num_signed(&self) -> isize;\n}\n\nimpl ToNum for String {\n fn to_num_radix(&self, radix: usize) -> usize {\n if radix == 0 {\n return 0;\n }\n\n let mut num = 0;\n for c in self.chars() {\n let digit;\n if c >= '0' && c <= '9' {\n digit = c as usize - '0' as usize\n } else if c >= 'A' && c <= 'Z' {\n digit = c as usize - 'A' as usize + 10\n } else if c >= 'a' && c <= 'z' {\n digit = c as usize - 'a' as usize + 10\n } else {\n break;\n }\n\n if digit >= radix {\n break;\n }\n\n num *= radix;\n num += digit;\n }\n\n num\n }\n\n \/\/\/ Parse the string as a signed integer using a given radix\n fn to_num_radix_signed(&self, radix: usize) -> isize {\n if self.starts_with('-') {\n -(self[1 .. self.len()].to_string().to_num_radix(radix) as isize)\n } else {\n self.to_num_radix(radix) as isize\n }\n }\n\n \/\/\/ Parse it as a unsigned integer in base 10\n fn to_num(&self) -> usize {\n self.to_num_radix(10)\n }\n\n \/\/\/ Parse it as a signed integer in base 10\n fn to_num_signed(&self) -> isize {\n self.to_num_radix_signed(10)\n }\n}\n\/* } Extensions for String *\/\n<commit_msg>Small updates to Documentation<commit_after>\/\/! # The Redox Library\n\/\/!\n\/\/! The Redox Library contains a collection of commonly used low-level software\n\/\/! constructs to be used on top of the base operating system, including graphics \n\/\/! support and windowing, a basic filesystem, audio support, a simple console\n\/\/! with shell style functions, an event system, and environment argument support.\n\n#![crate_name=\"redox\"]\n#![crate_type=\"rlib\"]\n#![feature(alloc)]\n#![feature(asm)]\n#![feature(box_syntax)]\n#![feature(collections)]\n#![feature(core_slice_ext)]\n#![feature(core_str_ext)]\n#![feature(lang_items)]\n#![feature(vec_push_all)]\n#![feature(no_std)]\n#![no_std]\n\n\/\/ Yep I'm evil (this is a good idea!)\n#![warn(missing_docs)]\n\n#[macro_use]\nextern crate alloc;\n\n#[macro_use]\nextern crate collections;\n\npub use alloc::boxed::Box;\n\npub use collections::*;\npub use collections::string::ToString;\n\npub use common::random::*;\npub use common::time::*;\n\npub use externs::*;\n\npub use syscall::call::*;\n\npub use audio::wav::*;\npub use console::*;\npub use env::*;\npub use event::*;\npub use fs::file::*;\npub use graphics::bmp::*;\npub use orbital::*;\n\n\/\/\/ A module for common functionalities.\n\/\/\/ Primary functionality provided by std.\n#[path=\"..\/..\/src\/common\/src\/\"]\nmod common {\n pub mod debug;\n pub mod random;\n pub mod time;\n}\n\n\/\/\/ A module for necessary C and assembly constructs\n#[path=\"..\/..\/src\/externs.rs\"]\npub mod externs;\n\n\/\/\/ A module for system calls\n#[path=\"..\/..\/src\/syscall\/src\"]\nmod syscall {\n \/\/\/ Calls\n pub mod call;\n \/\/\/ Common\n pub mod common;\n}\n\n\/\/\/ A module for audio\nmod audio {\n pub mod wav;\n}\n\n\/\/\/ A module for console functionality\n#[macro_use]\npub mod console;\n\/\/\/ A module for commands and enviroment\npub mod env;\n\/\/\/ A module for events\npub mod event;\n\/\/\/ A module for the filesystem\n#[path=\"fs\/lib.rs\"]\nmod fs;\n\/\/\/ Graphics support\nmod graphics {\n pub mod bmp;\n}\n\/\/\/ A module for window support\npub mod orbital;\n\n\/\/\/ A module for shell based functions\npub mod ion;\n\n\/* Extensions for String { *\/\n\/\/\/ Parse the string to a integer using a given radix\npub trait ToNum {\n fn to_num_radix(&self, radix: usize) -> usize;\n fn to_num_radix_signed(&self, radix: usize) -> isize;\n fn to_num(&self) -> usize;\n fn to_num_signed(&self) -> isize;\n}\n\nimpl ToNum for String {\n fn to_num_radix(&self, radix: usize) -> usize {\n if radix == 0 {\n return 0;\n }\n\n let mut num = 0;\n for c in self.chars() {\n let digit;\n if c >= '0' && c <= '9' {\n digit = c as usize - '0' as usize\n } else if c >= 'A' && c <= 'Z' {\n digit = c as usize - 'A' as usize + 10\n } else if c >= 'a' && c <= 'z' {\n digit = c as usize - 'a' as usize + 10\n } else {\n break;\n }\n\n if digit >= radix {\n break;\n }\n\n num *= radix;\n num += digit;\n }\n\n num\n }\n\n \/\/\/ Parse the string as a signed integer using a given radix\n fn to_num_radix_signed(&self, radix: usize) -> isize {\n if self.starts_with('-') {\n -(self[1 .. self.len()].to_string().to_num_radix(radix) as isize)\n } else {\n self.to_num_radix(radix) as isize\n }\n }\n\n \/\/\/ Parse it as a unsigned integer in base 10\n fn to_num(&self) -> usize {\n self.to_num_radix(10)\n }\n\n \/\/\/ Parse it as a signed integer in base 10\n fn to_num_signed(&self) -> isize {\n self.to_num_radix_signed(10)\n }\n}\n\/* } Extensions for String *\/\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement Counter struct and Iterator for it<commit_after>struct Counter {\n count: u32\n}\n\nimpl Counter {\n fn new() -> Counter {\n Counter { count: 0 }\n }\n}\n\nimpl Iterator for Counter {\n type Item = u32;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.count += 1;\n\n if self.count < 6 {\n Some(self.count)\n } else {\n None\n }\n }\n}<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::callback::ExceptionHandling::Report;\nuse dom::bindings::codegen::Bindings::EventBinding::EventMethods;\nuse dom::bindings::codegen::InheritTypes::{EventTargetCast, NodeCast};\nuse dom::bindings::js::{JS, JSRef, OptionalRootable};\nuse dom::bindings::trace::RootedVec;\nuse dom::eventtarget::{EventTarget, ListenerPhase};\nuse dom::event::{Event, EventPhase};\nuse dom::node::{Node, NodeHelpers};\nuse dom::virtualmethods::vtable_for;\n\n\/\/ See https:\/\/dom.spec.whatwg.org\/#concept-event-dispatch for the full dispatch algorithm\npub fn dispatch_event<'a, 'b>(target: JSRef<'a, EventTarget>,\n pseudo_target: Option<JSRef<'b, EventTarget>>,\n event: JSRef<Event>) -> bool {\n assert!(!event.dispatching());\n assert!(event.initialized());\n\n event.set_target(match pseudo_target {\n Some(pseudo_target) => pseudo_target,\n None => target.clone(),\n });\n event.set_dispatching(true);\n\n let type_ = event.Type();\n\n \/\/TODO: no chain if not participating in a tree\n let mut chain: RootedVec<JS<EventTarget>> = RootedVec::new();\n if let Some(target_node) = NodeCast::to_ref(target) {\n for ancestor in target_node.ancestors() {\n let ancestor = ancestor.root();\n let ancestor_target = EventTargetCast::from_ref(ancestor.r());\n chain.push(JS::from_rooted(ancestor_target))\n }\n }\n\n event.set_phase(EventPhase::Capturing);\n\n \/\/FIXME: The \"callback this value\" should be currentTarget\n\n \/* capturing *\/\n for cur_target in chain.as_slice().iter().rev() {\n let cur_target = cur_target.root();\n let stopped = match cur_target.r().get_listeners_for(type_.as_slice(), ListenerPhase::Capturing) {\n Some(listeners) => {\n event.set_current_target(cur_target.r());\n for listener in listeners.iter() {\n \/\/ Explicitly drop any exception on the floor.\n let _ = listener.HandleEvent_(cur_target.r(), event, Report);\n\n if event.stop_immediate() {\n break;\n }\n }\n\n event.stop_propagation()\n }\n None => false\n };\n\n if stopped {\n break;\n }\n }\n\n \/* at target *\/\n if !event.stop_propagation() {\n event.set_phase(EventPhase::AtTarget);\n event.set_current_target(target.clone());\n\n let opt_listeners = target.get_listeners(type_.as_slice());\n for listeners in opt_listeners.iter() {\n for listener in listeners.iter() {\n \/\/ Explicitly drop any exception on the floor.\n let _ = listener.HandleEvent_(target, event, Report);\n\n if event.stop_immediate() {\n break;\n }\n }\n }\n }\n\n \/* bubbling *\/\n if event.bubbles() && !event.stop_propagation() {\n event.set_phase(EventPhase::Bubbling);\n\n for cur_target in chain.iter() {\n let cur_target = cur_target.root();\n let stopped = match cur_target.r().get_listeners_for(type_.as_slice(), ListenerPhase::Bubbling) {\n Some(listeners) => {\n event.set_current_target(cur_target.r());\n for listener in listeners.iter() {\n \/\/ Explicitly drop any exception on the floor.\n let _ = listener.HandleEvent_(cur_target.r(), event, Report);\n\n if event.stop_immediate() {\n break;\n }\n }\n\n event.stop_propagation()\n }\n None => false\n };\n if stopped {\n break;\n }\n }\n }\n\n \/* default action *\/\n let target = event.GetTarget().root();\n match target {\n Some(target) => {\n let node: Option<JSRef<Node>> = NodeCast::to_ref(target.r());\n match node {\n Some(node) => {\n let vtable = vtable_for(&node);\n vtable.handle_event(event);\n }\n None => {}\n }\n }\n None => {}\n }\n\n \/\/ Root ordering restrictions mean we need to unroot the chain entries\n \/\/ in the same order they were rooted.\n while chain.len() > 0 {\n let _ = chain.pop();\n }\n\n event.set_dispatching(false);\n event.set_phase(EventPhase::None);\n event.clear_current_target();\n\n !event.DefaultPrevented()\n}\n<commit_msg>Removed unessary poping in dispatch_event, fixes #5843<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::callback::ExceptionHandling::Report;\nuse dom::bindings::codegen::Bindings::EventBinding::EventMethods;\nuse dom::bindings::codegen::InheritTypes::{EventTargetCast, NodeCast};\nuse dom::bindings::js::{JS, JSRef, OptionalRootable};\nuse dom::bindings::trace::RootedVec;\nuse dom::eventtarget::{EventTarget, ListenerPhase};\nuse dom::event::{Event, EventPhase};\nuse dom::node::{Node, NodeHelpers};\nuse dom::virtualmethods::vtable_for;\n\n\/\/ See https:\/\/dom.spec.whatwg.org\/#concept-event-dispatch for the full dispatch algorithm\npub fn dispatch_event<'a, 'b>(target: JSRef<'a, EventTarget>,\n pseudo_target: Option<JSRef<'b, EventTarget>>,\n event: JSRef<Event>) -> bool {\n assert!(!event.dispatching());\n assert!(event.initialized());\n\n event.set_target(match pseudo_target {\n Some(pseudo_target) => pseudo_target,\n None => target.clone(),\n });\n event.set_dispatching(true);\n\n let type_ = event.Type();\n\n \/\/TODO: no chain if not participating in a tree\n let mut chain: RootedVec<JS<EventTarget>> = RootedVec::new();\n if let Some(target_node) = NodeCast::to_ref(target) {\n for ancestor in target_node.ancestors() {\n let ancestor = ancestor.root();\n let ancestor_target = EventTargetCast::from_ref(ancestor.r());\n chain.push(JS::from_rooted(ancestor_target))\n }\n }\n\n event.set_phase(EventPhase::Capturing);\n\n \/\/FIXME: The \"callback this value\" should be currentTarget\n\n \/* capturing *\/\n for cur_target in chain.as_slice().iter().rev() {\n let cur_target = cur_target.root();\n let stopped = match cur_target.r().get_listeners_for(type_.as_slice(), ListenerPhase::Capturing) {\n Some(listeners) => {\n event.set_current_target(cur_target.r());\n for listener in listeners.iter() {\n \/\/ Explicitly drop any exception on the floor.\n let _ = listener.HandleEvent_(cur_target.r(), event, Report);\n\n if event.stop_immediate() {\n break;\n }\n }\n\n event.stop_propagation()\n }\n None => false\n };\n\n if stopped {\n break;\n }\n }\n\n \/* at target *\/\n if !event.stop_propagation() {\n event.set_phase(EventPhase::AtTarget);\n event.set_current_target(target.clone());\n\n let opt_listeners = target.get_listeners(type_.as_slice());\n for listeners in opt_listeners.iter() {\n for listener in listeners.iter() {\n \/\/ Explicitly drop any exception on the floor.\n let _ = listener.HandleEvent_(target, event, Report);\n\n if event.stop_immediate() {\n break;\n }\n }\n }\n }\n\n \/* bubbling *\/\n if event.bubbles() && !event.stop_propagation() {\n event.set_phase(EventPhase::Bubbling);\n\n for cur_target in chain.iter() {\n let cur_target = cur_target.root();\n let stopped = match cur_target.r().get_listeners_for(type_.as_slice(), ListenerPhase::Bubbling) {\n Some(listeners) => {\n event.set_current_target(cur_target.r());\n for listener in listeners.iter() {\n \/\/ Explicitly drop any exception on the floor.\n let _ = listener.HandleEvent_(cur_target.r(), event, Report);\n\n if event.stop_immediate() {\n break;\n }\n }\n\n event.stop_propagation()\n }\n None => false\n };\n if stopped {\n break;\n }\n }\n }\n\n \/* default action *\/\n let target = event.GetTarget().root();\n match target {\n Some(target) => {\n let node: Option<JSRef<Node>> = NodeCast::to_ref(target.r());\n match node {\n Some(node) => {\n let vtable = vtable_for(&node);\n vtable.handle_event(event);\n }\n None => {}\n }\n }\n None => {}\n }\n\n event.set_dispatching(false);\n event.set_phase(EventPhase::None);\n event.clear_current_target();\n\n !event.DefaultPrevented()\n}\n<|endoftext|>"} {"text":"<commit_before>use std::convert::Into;\n\nuse chrono::naive::datetime::NaiveDateTime;\nuse chrono::naive::time::NaiveTime;\nuse chrono::naive::date::NaiveDate;\nuse chrono::Datelike;\nuse chrono::Timelike;\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\n\nuse module_path::ModuleEntryPath;\n\n#[derive(Debug, Clone)]\npub struct DiaryId {\n name: String,\n year: i32,\n month: u32,\n day: u32,\n hour: u32,\n minute: u32,\n}\n\nimpl DiaryId {\n\n pub fn new(name: String, y: i32, m: u32, d: u32, h: u32, min: u32) -> DiaryId {\n DiaryId {\n name: name,\n year: y,\n month: m,\n day: d,\n hour: h,\n minute: min,\n }\n }\n\n pub fn from_datetime<DT: Datelike + Timelike>(diary_name: String, dt: DT) -> DiaryId {\n DiaryId::new(diary_name, dt.year(), dt.month(), dt.day(), dt.hour(), dt.minute())\n }\n\n pub fn diary_name(&self) -> &String {\n &self.name\n }\n\n pub fn year(&self) -> i32 {\n self.year\n }\n\n pub fn month(&self) -> u32 {\n self.month\n }\n\n pub fn day(&self) -> u32 {\n self.day\n }\n\n pub fn hour(&self) -> u32 {\n self.hour\n }\n\n pub fn minute(&self) -> u32 {\n self.minute\n }\n\n}\n\nimpl IntoStoreId for DiaryId {\n\n fn into_storeid(self) -> StoreId {\n let s : String = self.into();\n ModuleEntryPath::new(s).into_storeid()\n }\n\n}\n\nimpl Into<String> for DiaryId {\n\n fn into(self) -> String {\n format!(\"{}\/{:0>4}\/{:0>2}\/{:0>2}\/{:0>2}:{:0>2}\",\n self.name, self.year, self.month, self.day, self.hour, self.minute)\n }\n\n}\n\nimpl Into<NaiveDateTime> for DiaryId {\n\n fn into(self) -> NaiveDateTime {\n let d = NaiveDate::from_ymd(self.year, self.month, self.day);\n let t = NaiveTime::from_hms(self.hour, self.minute, 0);\n NaiveDateTime::new(d, t)\n }\n\n}\n\npub trait FromStoreId : Sized {\n\n fn from_storeid(&StoreId) -> Option<Self>;\n\n}\n\nuse std::path::Component;\n\nfn component_to_str<'a>(com: Component<'a>) -> Option<&'a str> {\n match com {\n Component::Normal(s) => Some(s),\n _ => None\n }.and_then(|s| s.to_str())\n}\n\nimpl FromStoreId for DiaryId {\n\n fn from_storeid(s: &StoreId) -> Option<DiaryId> {\n use std::str::FromStr;\n\n let mut cmps = s.components().rev();\n let (hour, minute) = match cmps.next().and_then(component_to_str)\n .and_then(|time| {\n let mut time = time.split(\":\");\n let hour = time.next().and_then(|s| FromStr::from_str(s).ok());\n let minute = time.next()\n .and_then(|s| s.split(\"~\").next())\n .and_then(|s| FromStr::from_str(s).ok());\n\n debug!(\"Hour = {:?}\", hour);\n debug!(\"Minute = {:?}\", minute);\n\n match (hour, minute) {\n (Some(h), Some(m)) => Some((h, m)),\n _ => None,\n }\n })\n {\n Some(s) => s,\n None => return None,\n };\n\n let day :Option<u32> = cmps.next().and_then(component_to_str).and_then(|s| FromStr::from_str(s).ok());\n let month :Option<u32> = cmps.next().and_then(component_to_str).and_then(|s| FromStr::from_str(s).ok());\n let year :Option<i32> = cmps.next().and_then(component_to_str).and_then(|s| FromStr::from_str(s).ok());\n let name = cmps.next().and_then(component_to_str).map(String::from);\n\n debug!(\"Day = {:?}\", day);\n debug!(\"Month = {:?}\", month);\n debug!(\"Year = {:?}\", year);\n debug!(\"Name = {:?}\", name);\n\n let day = if day.is_none() { return None; } else { day.unwrap() };\n let month = if month.is_none() { return None; } else { month.unwrap() };\n let year = if year.is_none() { return None; } else { year.unwrap() };\n let name = if name.is_none() { return None; } else { name.unwrap() };\n\n Some(DiaryId::new(name, year, month, day, hour, minute))\n }\n\n}\n\n<commit_msg>Add setters for DiaryId type<commit_after>use std::convert::Into;\n\nuse chrono::naive::datetime::NaiveDateTime;\nuse chrono::naive::time::NaiveTime;\nuse chrono::naive::date::NaiveDate;\nuse chrono::Datelike;\nuse chrono::Timelike;\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\n\nuse module_path::ModuleEntryPath;\n\n#[derive(Debug, Clone)]\npub struct DiaryId {\n name: String,\n year: i32,\n month: u32,\n day: u32,\n hour: u32,\n minute: u32,\n}\n\nimpl DiaryId {\n\n pub fn new(name: String, y: i32, m: u32, d: u32, h: u32, min: u32) -> DiaryId {\n DiaryId {\n name: name,\n year: y,\n month: m,\n day: d,\n hour: h,\n minute: min,\n }\n }\n\n pub fn from_datetime<DT: Datelike + Timelike>(diary_name: String, dt: DT) -> DiaryId {\n DiaryId::new(diary_name, dt.year(), dt.month(), dt.day(), dt.hour(), dt.minute())\n }\n\n pub fn diary_name(&self) -> &String {\n &self.name\n }\n\n pub fn year(&self) -> i32 {\n self.year\n }\n\n pub fn month(&self) -> u32 {\n self.month\n }\n\n pub fn day(&self) -> u32 {\n self.day\n }\n\n pub fn hour(&self) -> u32 {\n self.hour\n }\n\n pub fn minute(&self) -> u32 {\n self.minute\n }\n\n pub fn with_diary_name(mut self, name: String) -> DiaryId {\n self.name = name;\n self\n }\n\n pub fn with_year(mut self, year: i32) -> DiaryId {\n self.year = year;\n self\n }\n\n pub fn with_month(mut self, month: u32) -> DiaryId {\n self.month = month;\n self\n }\n\n pub fn with_day(mut self, day: u32) -> DiaryId {\n self.day = day;\n self\n }\n\n pub fn with_hour(mut self, hour: u32) -> DiaryId {\n self.hour = hour;\n self\n }\n\n pub fn with_minute(mut self, minute: u32) -> DiaryId {\n self.minute = minute;\n self\n }\n\n}\n\nimpl IntoStoreId for DiaryId {\n\n fn into_storeid(self) -> StoreId {\n let s : String = self.into();\n ModuleEntryPath::new(s).into_storeid()\n }\n\n}\n\nimpl Into<String> for DiaryId {\n\n fn into(self) -> String {\n format!(\"{}\/{:0>4}\/{:0>2}\/{:0>2}\/{:0>2}:{:0>2}\",\n self.name, self.year, self.month, self.day, self.hour, self.minute)\n }\n\n}\n\nimpl Into<NaiveDateTime> for DiaryId {\n\n fn into(self) -> NaiveDateTime {\n let d = NaiveDate::from_ymd(self.year, self.month, self.day);\n let t = NaiveTime::from_hms(self.hour, self.minute, 0);\n NaiveDateTime::new(d, t)\n }\n\n}\n\npub trait FromStoreId : Sized {\n\n fn from_storeid(&StoreId) -> Option<Self>;\n\n}\n\nuse std::path::Component;\n\nfn component_to_str<'a>(com: Component<'a>) -> Option<&'a str> {\n match com {\n Component::Normal(s) => Some(s),\n _ => None\n }.and_then(|s| s.to_str())\n}\n\nimpl FromStoreId for DiaryId {\n\n fn from_storeid(s: &StoreId) -> Option<DiaryId> {\n use std::str::FromStr;\n\n let mut cmps = s.components().rev();\n let (hour, minute) = match cmps.next().and_then(component_to_str)\n .and_then(|time| {\n let mut time = time.split(\":\");\n let hour = time.next().and_then(|s| FromStr::from_str(s).ok());\n let minute = time.next()\n .and_then(|s| s.split(\"~\").next())\n .and_then(|s| FromStr::from_str(s).ok());\n\n debug!(\"Hour = {:?}\", hour);\n debug!(\"Minute = {:?}\", minute);\n\n match (hour, minute) {\n (Some(h), Some(m)) => Some((h, m)),\n _ => None,\n }\n })\n {\n Some(s) => s,\n None => return None,\n };\n\n let day :Option<u32> = cmps.next().and_then(component_to_str).and_then(|s| FromStr::from_str(s).ok());\n let month :Option<u32> = cmps.next().and_then(component_to_str).and_then(|s| FromStr::from_str(s).ok());\n let year :Option<i32> = cmps.next().and_then(component_to_str).and_then(|s| FromStr::from_str(s).ok());\n let name = cmps.next().and_then(component_to_str).map(String::from);\n\n debug!(\"Day = {:?}\", day);\n debug!(\"Month = {:?}\", month);\n debug!(\"Year = {:?}\", year);\n debug!(\"Name = {:?}\", name);\n\n let day = if day.is_none() { return None; } else { day.unwrap() };\n let month = if month.is_none() { return None; } else { month.unwrap() };\n let year = if year.is_none() { return None; } else { year.unwrap() };\n let name = if name.is_none() { return None; } else { name.unwrap() };\n\n Some(DiaryId::new(name, year, month, day, hour, minute))\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse glob::Paths;\n\n\/\/\/ The Index into the Store\npub type StoreId = PathBuf;\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\ntrait IntoStoreId {\n fn into_storeid(self) -> StoreId;\n}\n\nimpl IntoStoreId for PathBuf {\n fn into_storeid(self) -> StoreId {\n self\n }\n}\n\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"\/{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(mut self) -> $crate::storeid::StoreId {\n self.0\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n paths: Paths,\n}\n\nimpl StoreIdIterator {\n\n pub fn new(paths: Paths) -> StoreIdIterator {\n StoreIdIterator {\n paths: paths,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.paths.next().and_then(|o| o.ok())\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(),\n \"\/test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<commit_msg>Fix: Add missing documentation for mod module_path<commit_after>use std::path::PathBuf;\nuse glob::Paths;\n\n\/\/\/ The Index into the Store\npub type StoreId = PathBuf;\n\n\/\/\/ This Trait allows you to convert various representations to a single one\n\/\/\/ suitable for usage in the Store\ntrait IntoStoreId {\n fn into_storeid(self) -> StoreId;\n}\n\nimpl IntoStoreId for PathBuf {\n fn into_storeid(self) -> StoreId {\n self\n }\n}\n\n\n#[macro_export]\nmacro_rules! module_entry_path_mod {\n ($name:expr, $version:expr) => (\n #[deny(missing_docs,\n missing_copy_implementations,\n trivial_casts, trivial_numeric_casts,\n unsafe_code,\n unstable_features,\n unused_import_braces, unused_qualifications,\n unused_imports)]\n \/\/\/ A helper module to create valid module entry paths\n pub mod module_path {\n use semver::Version;\n use std::convert::AsRef;\n use std::path::Path;\n use std::path::PathBuf;\n\n \/\/\/ A Struct giving you the ability to choose store entries assigned\n \/\/\/ to it.\n \/\/\/\n \/\/\/ It is created through a call to `new`.\n pub struct ModuleEntryPath(PathBuf);\n\n impl ModuleEntryPath {\n \/\/\/ Path has to be a valid UTF-8 string or this will panic!\n pub fn new<P: AsRef<Path>>(pa: P) -> ModuleEntryPath {\n let mut path = PathBuf::new();\n path.push(format!(\"\/{}\", $name));\n path.push(pa.as_ref().clone());\n let version = Version::parse($version).unwrap();\n let name = pa.as_ref().file_name().unwrap()\n .to_str().unwrap();\n path.set_file_name(format!(\"{}~{}\",\n name,\n version));\n ModuleEntryPath(path)\n }\n }\n\n impl $crate::storeid::IntoStoreId for ModuleEntryPath {\n fn into_storeid(mut self) -> $crate::storeid::StoreId {\n self.0\n }\n }\n }\n )\n}\n\npub struct StoreIdIterator {\n paths: Paths,\n}\n\nimpl StoreIdIterator {\n\n pub fn new(paths: Paths) -> StoreIdIterator {\n StoreIdIterator {\n paths: paths,\n }\n }\n\n}\n\nimpl Iterator for StoreIdIterator {\n type Item = StoreId;\n\n fn next(&mut self) -> Option<StoreId> {\n self.paths.next().and_then(|o| o.ok())\n }\n\n}\n\n#[cfg(test)]\nmod test {\n\n use storeid::IntoStoreId;\n\n module_entry_path_mod!(\"test\", \"0.2.0-alpha+leet1337\");\n\n #[test]\n fn correct_path() {\n let p = module_path::ModuleEntryPath::new(\"test\");\n\n assert_eq!(p.into_storeid().to_str().unwrap(),\n \"\/test\/test~0.2.0-alpha+leet1337\");\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add bindings\/templates.rs example<commit_after>use std::marker::PhantomData;\n\n\/\/ defined in accuse crate\ntrait Wraps<T> {\n fn from_wrapped(other: T) -> Self;\n fn into_wrapped(self) -> T;\n}\n\ntrait BridgeFrom<T> {\n fn bridge_from(other: T) -> Self;\n}\n\ntrait BridgeTo<T> {\n fn bridge(self) -> T;\n}\nimpl<T, U> BridgeTo<U> for T\nwhere\n U: BridgeFrom<T>,\n{\n fn bridge(self) -> U {\n U::bridge_from(self)\n }\n}\n\n\/\/ defined in the \"root\" crate, my_vec, wrapping our C++ library.\ncc_use!(<vector> in libstdc++, std::vector); \/\/ or maybe:\ncc_bind!(<vector> in libstdc++ as vector, std::vector);\npub use vector::*;\n\/\/ ^ `in libstdc++` is so we can grab relevant CFLAGS from metadata\n\/\/ ^ expands to:\n\/\/ extern crate libstdc_bind_my_vec;\n\/\/ use libstdc_bind_my_vec::vector;\n\nunsafe trait InstantiatesVector<T> {\n type Repr;\n const INSTANCE_HASH: usize;\n\n \/\/ maybe some methods can go here, if we determine \/ mark them as universal?\n}\n\n\/\/ defined in local instantiating crate\ncc_use!(my_vec::vector::{<i32>, <bool>});\n\/\/ ^ expands to:\n\/\/ use my_vec::InstantiatesVector; \/\/ we can infer it's generic, so we need the trait\n\/\/ extern crate libstdc_bind_local;\n\/\/ use libstdc_bind_local::{prelude::*, vector};\nuse remote::remote;\n\n\/\/ in libstdc_bind_local:\ntrait HasLocalVectorInstance {\n type Repr;\n}\nstruct LocalVectorReprI32;\n#[repr(transparent)]\nstruct LocalVector<T: HasLocalVectorInstance>(T::Repr);\n\nimpl HasLocalVectorInstance for i32 {\n type Repr = LocalVectorReprI32;\n}\nunsafe impl InstantiatesVector<i32> for LocalVector<i32> {\n type Repr = LocalVectorReprI32;\n const INSTANCE_HASH: usize = 0xdeadbeef;\n}\n\nimpl<T: InstantiatesVector<U>, U: HasLocalVectorInstance> BridgeFrom<T> for LocalVector<U> {\n fn bridge_from(mut other: T) -> Self {\n \/\/ TODO: static assert that INSTANCE_HASH, sizes are equal\n unsafe { std::ptr::read(&mut other as *mut T as *mut Self) }\n }\n}\n\n\/\/ in local:\nfn local(x: LocalVector<i32>) {\n remote(x.bridge())\n}\n\n\/\/ defined in remote instantiating crate\ncc_use!(my_vec::vector::{<i32>});\ntrait HasRemoteVectorInstance {\n type Repr;\n}\nstruct RemoteVectorReprI32;\n#[repr(transparent)]\nstruct RemoteVector<T: HasRemoteVectorInstance>(T::Repr);\n\nimpl HasRemoteVectorInstance for i32 {\n type Repr = RemoteVectorReprI32;\n}\nunsafe impl InstantiatesVector<i32> for RemoteVector<i32> {\n type Repr = RemoteVectorReprI32;\n const INSTANCE_HASH: usize = 0xdeadbeef;\n}\n\nimpl<T: InstantiatesVector<U>, U: HasRemoteVectorInstance> BridgeFrom<T> for RemoteVector<U> {\n fn bridge_from(mut other: T) -> Self {\n \/\/ TODO: static assert that INSTANCE_HASH, sizes are equal\n unsafe { std::ptr::read(&mut other as *mut T as *mut Self) }\n }\n}\n\nfn remote(_x: RemoteVector<i32>) {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\nregister_long_diagnostics! {\nE0454: r##\"\nA link name was given with an empty name. Erroneous code example:\n\n```compile_fail,E0454\n#[link(name = \"\")] extern {} \/\/ error: #[link(name = \"\")] given with empty name\n```\n\nThe rust compiler cannot link to an external library if you don't give it its\nname. Example:\n\n```\n#[link(name = \"some_lib\")] extern {} \/\/ ok!\n```\n\"##,\n\nE0455: r##\"\nLinking with `kind=framework` is only supported when targeting OS X,\nas frameworks are specific to that operating system.\n\nErroneous code example:\n\n```compile_fail,E0455\n#[link(name = \"FooCoreServices\", kind = \"framework\")] extern {}\n\/\/ OS used to compile is Linux for example\n```\n\nTo solve this error you can use conditional compilation:\n\n```\n#[cfg_attr(target=\"macos\", link(name = \"FooCoreServices\", kind = \"framework\"))]\nextern {}\n```\n\nSee more: https:\/\/doc.rust-lang.org\/book\/conditional-compilation.html\n\"##,\n\nE0458: r##\"\nAn unknown \"kind\" was specified for a link attribute. Erroneous code example:\n\n```compile_fail,E0458\n#[link(kind = \"wonderful_unicorn\")] extern {}\n\/\/ error: unknown kind: `wonderful_unicorn`\n```\n\nPlease specify a valid \"kind\" value, from one of the following:\n * static\n * dylib\n * framework\n\"##,\n\nE0459: r##\"\nA link was used without a name parameter. Erroneous code example:\n\n```compile_fail,E0459\n#[link(kind = \"dylib\")] extern {}\n\/\/ error: #[link(...)] specified without `name = \"foo\"`\n```\n\nPlease add the name parameter to allow the rust compiler to find the library\nyou want. Example:\n\n```\n#[link(kind = \"dylib\", name = \"some_lib\")] extern {} \/\/ ok!\n```\n\"##,\n\nE0463: r##\"\nA plugin\/crate was declared but cannot be found. Erroneous code example:\n\n```compile_fail,E0463\n#![feature(plugin)]\n#![plugin(cookie_monster)] \/\/ error: can't find crate for `cookie_monster`\nextern crate cake_is_a_lie; \/\/ error: can't find crate for `cake_is_a_lie`\n```\n\nYou need to link your code to the relevant crate in order to be able to use it\n(through Cargo or the `-L` option of rustc example). Plugins are crates as\nwell, and you link to them the same way.\n\"##,\n\nE0466: r##\"\nMacro import declarations were malformed.\n\nErroneous code examples:\n\n```compile_fail,E0466\n#[macro_use(a_macro(another_macro))] \/\/ error: invalid import declaration\nextern crate some_crate;\n\n#[macro_use(i_want = \"some_macros\")] \/\/ error: invalid import declaration\nextern crate another_crate;\n```\n\nThis is a syntax error at the level of attribute declarations. The proper\nsyntax for macro imports is the following:\n\n```ignore\n\/\/ In some_crate:\n#[macro_export]\nmacro_rules! get_tacos {\n ...\n}\n\n#[macro_export]\nmacro_rules! get_pimientos {\n ...\n}\n\n\/\/ In your crate:\n#[macro_use(get_tacos, get_pimientos)] \/\/ It imports `get_tacos` and\nextern crate some_crate; \/\/ `get_pimientos` macros from some_crate.\n```\n\nIf you would like to import all exported macros, write `macro_use` with no\narguments.\n\"##,\n\nE0467: r##\"\nMacro reexport declarations were empty or malformed.\n\nErroneous code examples:\n\n```compile_fail,E0467\n#[macro_reexport] \/\/ error: no macros listed for export\nextern crate macros_for_good;\n\n#[macro_reexport(fun_macro = \"foo\")] \/\/ error: not a macro identifier\nextern crate other_macros_for_good;\n```\n\nThis is a syntax error at the level of attribute declarations.\n\nCurrently, `macro_reexport` requires at least one macro name to be listed.\nUnlike `macro_use`, listing no names does not reexport all macros from the\ngiven crate.\n\nDecide which macros you would like to export and list them properly.\n\nThese are proper reexport declarations:\n\n```ignore\n#[macro_reexport(some_macro, another_macro)]\nextern crate macros_for_good;\n```\n\"##,\n\nE0468: r##\"\nA non-root module attempts to import macros from another crate.\n\nExample of erroneous code:\n\n```compile_fail,E0468\nmod foo {\n #[macro_use(helpful_macro)] \/\/ error: must be at crate root to import\n extern crate some_crate; \/\/ macros from another crate\n helpful_macro!(...)\n}\n```\n\nOnly `extern crate` imports at the crate root level are allowed to import\nmacros.\n\nEither move the macro import to crate root or do without the foreign macros.\nThis will work:\n\n```ignore\n#[macro_use(helpful_macro)]\nextern crate some_crate;\n\nmod foo {\n helpful_macro!(...)\n}\n```\n\"##,\n}\n\nregister_diagnostics! {\n E0456, \/\/ plugin `..` is not available for triple `..`\n E0457, \/\/ plugin `..` only found in rlib format, but must be available...\n E0514, \/\/ metadata version mismatch\n E0460, \/\/ found possibly newer version of crate `..`\n E0461, \/\/ couldn't find crate `..` with expected target triple ..\n E0462, \/\/ found staticlib `..` instead of rlib or dylib\n E0464, \/\/ multiple matching crates for `..`\n E0465, \/\/ multiple .. candidates for `..` found\n E0469, \/\/ imported macro not found\n E0470, \/\/ reexported macro not found\n E0519, \/\/ local crate and dependency have same (crate-name, disambiguator)\n E0523, \/\/ two dependencies have same (crate-name, disambiguator) but different SVH\n}\n<commit_msg>Add E0469 error explanation<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\nregister_long_diagnostics! {\nE0454: r##\"\nA link name was given with an empty name. Erroneous code example:\n\n```compile_fail,E0454\n#[link(name = \"\")] extern {} \/\/ error: #[link(name = \"\")] given with empty name\n```\n\nThe rust compiler cannot link to an external library if you don't give it its\nname. Example:\n\n```\n#[link(name = \"some_lib\")] extern {} \/\/ ok!\n```\n\"##,\n\nE0455: r##\"\nLinking with `kind=framework` is only supported when targeting OS X,\nas frameworks are specific to that operating system.\n\nErroneous code example:\n\n```compile_fail,E0455\n#[link(name = \"FooCoreServices\", kind = \"framework\")] extern {}\n\/\/ OS used to compile is Linux for example\n```\n\nTo solve this error you can use conditional compilation:\n\n```\n#[cfg_attr(target=\"macos\", link(name = \"FooCoreServices\", kind = \"framework\"))]\nextern {}\n```\n\nSee more: https:\/\/doc.rust-lang.org\/book\/conditional-compilation.html\n\"##,\n\nE0458: r##\"\nAn unknown \"kind\" was specified for a link attribute. Erroneous code example:\n\n```compile_fail,E0458\n#[link(kind = \"wonderful_unicorn\")] extern {}\n\/\/ error: unknown kind: `wonderful_unicorn`\n```\n\nPlease specify a valid \"kind\" value, from one of the following:\n * static\n * dylib\n * framework\n\"##,\n\nE0459: r##\"\nA link was used without a name parameter. Erroneous code example:\n\n```compile_fail,E0459\n#[link(kind = \"dylib\")] extern {}\n\/\/ error: #[link(...)] specified without `name = \"foo\"`\n```\n\nPlease add the name parameter to allow the rust compiler to find the library\nyou want. Example:\n\n```\n#[link(kind = \"dylib\", name = \"some_lib\")] extern {} \/\/ ok!\n```\n\"##,\n\nE0463: r##\"\nA plugin\/crate was declared but cannot be found. Erroneous code example:\n\n```compile_fail,E0463\n#![feature(plugin)]\n#![plugin(cookie_monster)] \/\/ error: can't find crate for `cookie_monster`\nextern crate cake_is_a_lie; \/\/ error: can't find crate for `cake_is_a_lie`\n```\n\nYou need to link your code to the relevant crate in order to be able to use it\n(through Cargo or the `-L` option of rustc example). Plugins are crates as\nwell, and you link to them the same way.\n\"##,\n\nE0466: r##\"\nMacro import declarations were malformed.\n\nErroneous code examples:\n\n```compile_fail,E0466\n#[macro_use(a_macro(another_macro))] \/\/ error: invalid import declaration\nextern crate some_crate;\n\n#[macro_use(i_want = \"some_macros\")] \/\/ error: invalid import declaration\nextern crate another_crate;\n```\n\nThis is a syntax error at the level of attribute declarations. The proper\nsyntax for macro imports is the following:\n\n```ignore\n\/\/ In some_crate:\n#[macro_export]\nmacro_rules! get_tacos {\n ...\n}\n\n#[macro_export]\nmacro_rules! get_pimientos {\n ...\n}\n\n\/\/ In your crate:\n#[macro_use(get_tacos, get_pimientos)] \/\/ It imports `get_tacos` and\nextern crate some_crate; \/\/ `get_pimientos` macros from some_crate.\n```\n\nIf you would like to import all exported macros, write `macro_use` with no\narguments.\n\"##,\n\nE0467: r##\"\nMacro reexport declarations were empty or malformed.\n\nErroneous code examples:\n\n```compile_fail,E0467\n#[macro_reexport] \/\/ error: no macros listed for export\nextern crate macros_for_good;\n\n#[macro_reexport(fun_macro = \"foo\")] \/\/ error: not a macro identifier\nextern crate other_macros_for_good;\n```\n\nThis is a syntax error at the level of attribute declarations.\n\nCurrently, `macro_reexport` requires at least one macro name to be listed.\nUnlike `macro_use`, listing no names does not reexport all macros from the\ngiven crate.\n\nDecide which macros you would like to export and list them properly.\n\nThese are proper reexport declarations:\n\n```ignore\n#[macro_reexport(some_macro, another_macro)]\nextern crate macros_for_good;\n```\n\"##,\n\nE0468: r##\"\nA non-root module attempts to import macros from another crate.\n\nExample of erroneous code:\n\n```compile_fail,E0468\nmod foo {\n #[macro_use(helpful_macro)] \/\/ error: must be at crate root to import\n extern crate some_crate; \/\/ macros from another crate\n helpful_macro!(...)\n}\n```\n\nOnly `extern crate` imports at the crate root level are allowed to import\nmacros.\n\nEither move the macro import to crate root or do without the foreign macros.\nThis will work:\n\n```ignore\n#[macro_use(helpful_macro)]\nextern crate some_crate;\n\nmod foo {\n helpful_macro!(...)\n}\n```\n\"##,\n\nE0469: r##\"\nA macro listed for import was not found.\n\nErroneous code example:\n\n```compile_fail,E0469\n#[macro_use(drink, be_merry)] \/\/ error: imported macro not found\nextern crate collections;\n\nfn main() {\n \/\/ ...\n}\n```\n\nEither the listed macro is not contained in the imported crate, or it is not\nexported from the given crate.\n\nThis could be caused by a typo. Did you misspell the macro's name?\n\nDouble-check the names of the macros listed for import, and that the crate\nin question exports them.\n\nA working version would be:\n\n```ignore\n\/\/ In some_crate:\n#[macro_export]\nmacro_rules! eat {\n ...\n}\n\n#[macro_export]\nmacro_rules! drink {\n ...\n}\n\n\/\/ In your crate:\n#[macro_use(eat, drink)]\nextern crate some_crate; \/\/ok!\n```\n\"##,\n\n}\n\nregister_diagnostics! {\n E0456, \/\/ plugin `..` is not available for triple `..`\n E0457, \/\/ plugin `..` only found in rlib format, but must be available...\n E0514, \/\/ metadata version mismatch\n E0460, \/\/ found possibly newer version of crate `..`\n E0461, \/\/ couldn't find crate `..` with expected target triple ..\n E0462, \/\/ found staticlib `..` instead of rlib or dylib\n E0464, \/\/ multiple matching crates for `..`\n E0465, \/\/ multiple .. candidates for `..` found\n E0470, \/\/ reexported macro not found\n E0519, \/\/ local crate and dependency have same (crate-name, disambiguator)\n E0523, \/\/ two dependencies have same (crate-name, disambiguator) but different SVH\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for Inherent static methods can be called with a non-well-formed Self-type.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct Foo<'a, 'b: 'a>(&'a &'b ());\n\nimpl<'a, 'b> Foo<'a, 'b> {\n fn xmute(a: &'b ()) -> &'a () {\n unreachable!()\n }\n}\n\npub fn foo<'a, 'b>(u: &'b ()) -> &'a () {\n Foo::<'a, 'b>::xmute(u) \/\/~ ERROR lifetime bound not satisfied\n}\n\nfn main() {}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Check that we can use `-C lto` when linking against libraries that were\n\/\/ separately compiled.\n\n\/\/ aux-build:sepcomp_lib.rs\n\/\/ compile-flags: -C lto -g\n\/\/ no-prefer-dynamic\n\/\/ ignore-android FIXME #18800\n\nextern crate sepcomp_lib;\nuse sepcomp_lib::a::one;\nuse sepcomp_lib::b::two;\nuse sepcomp_lib::c::three;\n\nfn main() {\n assert_eq!(one(), 1);\n assert_eq!(two(), 2);\n assert_eq!(three(), 3);\n}\n<commit_msg>Enable run-pass\/sepcomp-lib-lto.rs on Android<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Check that we can use `-C lto` when linking against libraries that were\n\/\/ separately compiled.\n\n\/\/ aux-build:sepcomp_lib.rs\n\/\/ compile-flags: -C lto -g\n\/\/ no-prefer-dynamic\n\nextern crate sepcomp_lib;\nuse sepcomp_lib::a::one;\nuse sepcomp_lib::b::two;\nuse sepcomp_lib::c::three;\n\nfn main() {\n assert_eq!(one(), 1);\n assert_eq!(two(), 2);\n assert_eq!(three(), 3);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::rc::Rc;\nuse std::sync::Arc;\n\nuse base;\nuse monomorphize::Instance;\nuse rustc::hir::def_id::CrateNum;\nuse rustc::hir::def_id::{DefId, LOCAL_CRATE};\nuse rustc::middle::exported_symbols::SymbolExportLevel;\nuse rustc::session::config;\nuse rustc::ty::TyCtxt;\nuse rustc::ty::maps::Providers;\nuse rustc::util::nodemap::FxHashMap;\nuse rustc_allocator::ALLOCATOR_METHODS;\nuse syntax::attr;\n\npub type ExportedSymbols = FxHashMap<\n CrateNum,\n Arc<Vec<(String, Option<DefId>, SymbolExportLevel)>>,\n>;\n\npub fn threshold(tcx: TyCtxt) -> SymbolExportLevel {\n crates_export_threshold(&tcx.sess.crate_types.borrow())\n}\n\npub fn metadata_symbol_name(tcx: TyCtxt) -> String {\n format!(\"rust_metadata_{}_{}\",\n tcx.crate_name(LOCAL_CRATE),\n tcx.crate_disambiguator(LOCAL_CRATE).to_fingerprint().to_hex())\n}\n\nfn crate_export_threshold(crate_type: config::CrateType) -> SymbolExportLevel {\n match crate_type {\n config::CrateTypeExecutable |\n config::CrateTypeStaticlib |\n config::CrateTypeProcMacro |\n config::CrateTypeCdylib => SymbolExportLevel::C,\n config::CrateTypeRlib |\n config::CrateTypeDylib => SymbolExportLevel::Rust,\n }\n}\n\npub fn crates_export_threshold(crate_types: &[config::CrateType])\n -> SymbolExportLevel {\n if crate_types.iter().any(|&crate_type| {\n crate_export_threshold(crate_type) == SymbolExportLevel::Rust\n }) {\n SymbolExportLevel::Rust\n } else {\n SymbolExportLevel::C\n }\n}\n\npub fn provide_local(providers: &mut Providers) {\n providers.exported_symbol_ids = |tcx, cnum| {\n let export_threshold = threshold(tcx);\n Rc::new(tcx.exported_symbols(cnum)\n .iter()\n .filter_map(|&(_, id, level)| {\n id.and_then(|id| {\n if level.is_below_threshold(export_threshold) {\n Some(id)\n } else {\n None\n }\n })\n })\n .collect())\n };\n\n providers.is_exported_symbol = |tcx, id| {\n tcx.exported_symbol_ids(id.krate).contains(&id)\n };\n\n providers.exported_symbols = |tcx, cnum| {\n assert_eq!(cnum, LOCAL_CRATE);\n let local_exported_symbols = base::find_exported_symbols(tcx);\n\n let mut local_crate: Vec<_> = local_exported_symbols\n .iter()\n .map(|&node_id| {\n tcx.hir.local_def_id(node_id)\n })\n .map(|def_id| {\n let name = tcx.symbol_name(Instance::mono(tcx, def_id));\n let export_level = export_level(tcx, def_id);\n debug!(\"EXPORTED SYMBOL (local): {} ({:?})\", name, export_level);\n (str::to_owned(&name), Some(def_id), export_level)\n })\n .collect();\n\n if let Some(_) = *tcx.sess.entry_fn.borrow() {\n local_crate.push((\"main\".to_string(),\n None,\n SymbolExportLevel::C));\n }\n\n if tcx.sess.allocator_kind.get().is_some() {\n for method in ALLOCATOR_METHODS {\n local_crate.push((format!(\"__rust_{}\", method.name),\n None,\n SymbolExportLevel::Rust));\n }\n }\n\n if let Some(id) = tcx.sess.derive_registrar_fn.get() {\n let def_id = tcx.hir.local_def_id(id);\n let idx = def_id.index;\n let disambiguator = tcx.sess.local_crate_disambiguator();\n let registrar = tcx.sess.generate_derive_registrar_symbol(disambiguator, idx);\n local_crate.push((registrar, Some(def_id), SymbolExportLevel::C));\n }\n\n if tcx.sess.crate_types.borrow().contains(&config::CrateTypeDylib) {\n local_crate.push((metadata_symbol_name(tcx),\n None,\n SymbolExportLevel::Rust));\n }\n Arc::new(local_crate)\n };\n}\n\npub fn provide_extern(providers: &mut Providers) {\n providers.exported_symbols = |tcx, cnum| {\n \/\/ If this crate is a plugin and\/or a custom derive crate, then\n \/\/ we're not even going to link those in so we skip those crates.\n if tcx.plugin_registrar_fn(cnum).is_some() ||\n tcx.derive_registrar_fn(cnum).is_some() {\n return Arc::new(Vec::new())\n }\n\n \/\/ Check to see if this crate is a \"special runtime crate\". These\n \/\/ crates, implementation details of the standard library, typically\n \/\/ have a bunch of `pub extern` and `#[no_mangle]` functions as the\n \/\/ ABI between them. We don't want their symbols to have a `C`\n \/\/ export level, however, as they're just implementation details.\n \/\/ Down below we'll hardwire all of the symbols to the `Rust` export\n \/\/ level instead.\n let special_runtime_crate =\n tcx.is_panic_runtime(cnum) || tcx.is_compiler_builtins(cnum);\n\n let crate_exports = tcx\n .exported_symbol_ids(cnum)\n .iter()\n .map(|&def_id| {\n let name = tcx.symbol_name(Instance::mono(tcx, def_id));\n let export_level = if special_runtime_crate {\n \/\/ We can probably do better here by just ensuring that\n \/\/ it has hidden visibility rather than public\n \/\/ visibility, as this is primarily here to ensure it's\n \/\/ not stripped during LTO.\n \/\/\n \/\/ In general though we won't link right if these\n \/\/ symbols are stripped, and LTO currently strips them.\n if &*name == \"rust_eh_personality\" ||\n &*name == \"rust_eh_register_frames\" ||\n &*name == \"rust_eh_unregister_frames\" {\n SymbolExportLevel::C\n } else {\n SymbolExportLevel::Rust\n }\n } else {\n export_level(tcx, def_id)\n };\n debug!(\"EXPORTED SYMBOL (re-export): {} ({:?})\", name, export_level);\n (str::to_owned(&name), Some(def_id), export_level)\n })\n .collect();\n\n Arc::new(crate_exports)\n };\n}\n\nfn export_level(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {\n \/\/ We export anything that's not mangled at the \"C\" layer as it probably has\n \/\/ to do with ABI concerns. We do not, however, apply such treatment to\n \/\/ special symbols in the standard library for various plumbing between\n \/\/ core\/std\/allocators\/etc. For example symbols used to hook up allocation\n \/\/ are not considered for export\n let is_extern = tcx.contains_extern_indicator(sym_def_id);\n let std_internal = attr::contains_name(&tcx.get_attrs(sym_def_id),\n \"rustc_std_internal_symbol\");\n if is_extern && !std_internal {\n SymbolExportLevel::C\n } else {\n SymbolExportLevel::Rust\n }\n}\n<commit_msg>incr.comp.: Sort exported symbols list in order to achieve stable incr. comp. hash.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::rc::Rc;\nuse std::sync::Arc;\n\nuse base;\nuse monomorphize::Instance;\nuse rustc::hir::def_id::CrateNum;\nuse rustc::hir::def_id::{DefId, LOCAL_CRATE};\nuse rustc::middle::exported_symbols::SymbolExportLevel;\nuse rustc::session::config;\nuse rustc::ty::TyCtxt;\nuse rustc::ty::maps::Providers;\nuse rustc::util::nodemap::FxHashMap;\nuse rustc_allocator::ALLOCATOR_METHODS;\nuse syntax::attr;\n\npub type ExportedSymbols = FxHashMap<\n CrateNum,\n Arc<Vec<(String, Option<DefId>, SymbolExportLevel)>>,\n>;\n\npub fn threshold(tcx: TyCtxt) -> SymbolExportLevel {\n crates_export_threshold(&tcx.sess.crate_types.borrow())\n}\n\npub fn metadata_symbol_name(tcx: TyCtxt) -> String {\n format!(\"rust_metadata_{}_{}\",\n tcx.crate_name(LOCAL_CRATE),\n tcx.crate_disambiguator(LOCAL_CRATE).to_fingerprint().to_hex())\n}\n\nfn crate_export_threshold(crate_type: config::CrateType) -> SymbolExportLevel {\n match crate_type {\n config::CrateTypeExecutable |\n config::CrateTypeStaticlib |\n config::CrateTypeProcMacro |\n config::CrateTypeCdylib => SymbolExportLevel::C,\n config::CrateTypeRlib |\n config::CrateTypeDylib => SymbolExportLevel::Rust,\n }\n}\n\npub fn crates_export_threshold(crate_types: &[config::CrateType])\n -> SymbolExportLevel {\n if crate_types.iter().any(|&crate_type| {\n crate_export_threshold(crate_type) == SymbolExportLevel::Rust\n }) {\n SymbolExportLevel::Rust\n } else {\n SymbolExportLevel::C\n }\n}\n\npub fn provide_local(providers: &mut Providers) {\n providers.exported_symbol_ids = |tcx, cnum| {\n let export_threshold = threshold(tcx);\n Rc::new(tcx.exported_symbols(cnum)\n .iter()\n .filter_map(|&(_, id, level)| {\n id.and_then(|id| {\n if level.is_below_threshold(export_threshold) {\n Some(id)\n } else {\n None\n }\n })\n })\n .collect())\n };\n\n providers.is_exported_symbol = |tcx, id| {\n tcx.exported_symbol_ids(id.krate).contains(&id)\n };\n\n providers.exported_symbols = |tcx, cnum| {\n assert_eq!(cnum, LOCAL_CRATE);\n let local_exported_symbols = base::find_exported_symbols(tcx);\n\n let mut local_crate: Vec<_> = local_exported_symbols\n .iter()\n .map(|&node_id| {\n tcx.hir.local_def_id(node_id)\n })\n .map(|def_id| {\n let name = tcx.symbol_name(Instance::mono(tcx, def_id));\n let export_level = export_level(tcx, def_id);\n debug!(\"EXPORTED SYMBOL (local): {} ({:?})\", name, export_level);\n (str::to_owned(&name), Some(def_id), export_level)\n })\n .collect();\n\n if let Some(_) = *tcx.sess.entry_fn.borrow() {\n local_crate.push((\"main\".to_string(),\n None,\n SymbolExportLevel::C));\n }\n\n if tcx.sess.allocator_kind.get().is_some() {\n for method in ALLOCATOR_METHODS {\n local_crate.push((format!(\"__rust_{}\", method.name),\n None,\n SymbolExportLevel::Rust));\n }\n }\n\n if let Some(id) = tcx.sess.derive_registrar_fn.get() {\n let def_id = tcx.hir.local_def_id(id);\n let idx = def_id.index;\n let disambiguator = tcx.sess.local_crate_disambiguator();\n let registrar = tcx.sess.generate_derive_registrar_symbol(disambiguator, idx);\n local_crate.push((registrar, Some(def_id), SymbolExportLevel::C));\n }\n\n if tcx.sess.crate_types.borrow().contains(&config::CrateTypeDylib) {\n local_crate.push((metadata_symbol_name(tcx),\n None,\n SymbolExportLevel::Rust));\n }\n\n \/\/ Sort so we get a stable incr. comp. hash.\n local_crate.sort_unstable_by(|&(ref name1, ..), &(ref name2, ..)| {\n name1.cmp(name2)\n });\n\n Arc::new(local_crate)\n };\n}\n\npub fn provide_extern(providers: &mut Providers) {\n providers.exported_symbols = |tcx, cnum| {\n \/\/ If this crate is a plugin and\/or a custom derive crate, then\n \/\/ we're not even going to link those in so we skip those crates.\n if tcx.plugin_registrar_fn(cnum).is_some() ||\n tcx.derive_registrar_fn(cnum).is_some() {\n return Arc::new(Vec::new())\n }\n\n \/\/ Check to see if this crate is a \"special runtime crate\". These\n \/\/ crates, implementation details of the standard library, typically\n \/\/ have a bunch of `pub extern` and `#[no_mangle]` functions as the\n \/\/ ABI between them. We don't want their symbols to have a `C`\n \/\/ export level, however, as they're just implementation details.\n \/\/ Down below we'll hardwire all of the symbols to the `Rust` export\n \/\/ level instead.\n let special_runtime_crate =\n tcx.is_panic_runtime(cnum) || tcx.is_compiler_builtins(cnum);\n\n let mut crate_exports: Vec<_> = tcx\n .exported_symbol_ids(cnum)\n .iter()\n .map(|&def_id| {\n let name = tcx.symbol_name(Instance::mono(tcx, def_id));\n let export_level = if special_runtime_crate {\n \/\/ We can probably do better here by just ensuring that\n \/\/ it has hidden visibility rather than public\n \/\/ visibility, as this is primarily here to ensure it's\n \/\/ not stripped during LTO.\n \/\/\n \/\/ In general though we won't link right if these\n \/\/ symbols are stripped, and LTO currently strips them.\n if &*name == \"rust_eh_personality\" ||\n &*name == \"rust_eh_register_frames\" ||\n &*name == \"rust_eh_unregister_frames\" {\n SymbolExportLevel::C\n } else {\n SymbolExportLevel::Rust\n }\n } else {\n export_level(tcx, def_id)\n };\n debug!(\"EXPORTED SYMBOL (re-export): {} ({:?})\", name, export_level);\n (str::to_owned(&name), Some(def_id), export_level)\n })\n .collect();\n\n \/\/ Sort so we get a stable incr. comp. hash.\n crate_exports.sort_unstable_by(|&(ref name1, ..), &(ref name2, ..)| {\n name1.cmp(name2)\n });\n\n Arc::new(crate_exports)\n };\n}\n\nfn export_level(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {\n \/\/ We export anything that's not mangled at the \"C\" layer as it probably has\n \/\/ to do with ABI concerns. We do not, however, apply such treatment to\n \/\/ special symbols in the standard library for various plumbing between\n \/\/ core\/std\/allocators\/etc. For example symbols used to hook up allocation\n \/\/ are not considered for export\n let is_extern = tcx.contains_extern_indicator(sym_def_id);\n let std_internal = attr::contains_name(&tcx.get_attrs(sym_def_id),\n \"rustc_std_internal_symbol\");\n if is_extern && !std_internal {\n SymbolExportLevel::C\n } else {\n SymbolExportLevel::Rust\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>languages\/rust\/learn-rust\/3-chapter\/7-code.rs<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for let expressions.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![crate_type=\"rlib\"]\n\nstruct Foo;\n\n\/\/ Change Method Name -----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_name() { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn method_name2() { }\n}\n\n\/\/ Change Method Body -----------------------------------------------------------\n\/\/\n\/\/ This should affect the method itself, but not the impl.\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_body() { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn method_body() {\n println!(\"Hello, world!\");\n }\n}\n\n\/\/ Change Method Privacy -----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_privacy() { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n fn method_privacy() { }\n}\n\n\/\/ Change Method Selfness -----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_selfness() { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn method_selfness(&self) { }\n}\n\n\/\/ Change Method Selfmutness -----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_selfmutness(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn method_selfmutness(&mut self) { }\n}\n\n<commit_msg>ICH: Add test cases for inherent impls.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for let expressions.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![crate_type=\"rlib\"]\n\nstruct Foo;\n\n\/\/ Change Method Name -----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_name() { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn method_name2() { }\n}\n\n\/\/ Change Method Body -----------------------------------------------------------\n\/\/\n\/\/ This should affect the method itself, but not the impl.\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_body() { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn method_body() {\n println!(\"Hello, world!\");\n }\n}\n\n\/\/ Change Method Privacy -----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_privacy() { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n fn method_privacy() { }\n}\n\n\/\/ Change Method Selfness -----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_selfness() { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn method_selfness(&self) { }\n}\n\n\/\/ Change Method Selfmutness ---------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn method_selfmutness(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn method_selfmutness(&mut self) { }\n}\n\n\n\n\/\/ Add Method To Impl ----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn add_method_to_impl1(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_clean(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_method_to_impl1(&self) { }\n\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_method_to_impl2(&self) { }\n}\n\n\n\n\/\/ Add Method Parameter --------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn add_method_parameter(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_method_parameter(&self, _: i32) { }\n}\n\n\n\n\/\/ Change Method Parameter Name ------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn change_method_parameter_name(&self, a: i64) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn change_method_parameter_name(&self, b: i64) { }\n}\n\n\n\n\/\/ Change Method Return Type ---------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn change_method_return_type(&self) -> u16 { 0 }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn change_method_return_type(&self) -> u8 { 0 }\n}\n\n\n\n\/\/ Make Method #[inline] -------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn make_method_inline(&self) -> u8 { 0 }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n #[inline]\n pub fn make_method_inline(&self) -> u8 { 0 }\n}\n\n\n\n\/\/ Change order of parameters -------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn change_method_parameter_order(&self, a: i64, b: i64) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn change_method_parameter_order(&self, b: i64, a: i64) { }\n}\n\n\n\n\/\/ Make method unsafe ----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn make_method_unsafe(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub unsafe fn make_method_unsafe(&self) { }\n}\n\n\n\n\/\/ Make method extern ----------------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn make_method_extern(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub extern fn make_method_extern(&self) { }\n}\n\n\n\n\/\/ Change method calling convention --------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub extern \"C\" fn change_method_calling_convention(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub extern \"system\" fn change_method_calling_convention(&self) { }\n}\n\n\n\n\/\/ Add Lifetime Parameter to Method --------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn add_lifetime_parameter_to_method(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_lifetime_parameter_to_method<'a>(&self) { }\n}\n\n\n\n\/\/ Add Type Parameter To Method ------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn add_type_parameter_to_method(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_type_parameter_to_method<T>(&self) { }\n}\n\n\n\n\/\/ Add Lifetime Bound to Lifetime Parameter of Method --------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn add_lifetime_bound_to_lifetime_param_of_method<'a, 'b>(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_lifetime_bound_to_lifetime_param_of_method<'a, 'b: 'a>(&self) { }\n}\n\n\n\n\/\/ Add Lifetime Bound to Type Parameter of Method ------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn add_lifetime_bound_to_type_param_of_method<'a, T>(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_lifetime_bound_to_type_param_of_method<'a, T: 'a>(&self) { }\n}\n\n\n\n\/\/ Add Trait Bound to Type Parameter of Method ------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn add_trait_bound_to_type_param_of_method<T>(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_trait_bound_to_type_param_of_method<T: Clone>(&self) { }\n}\n\n\n\n\/\/ Add #[no_mangle] to Method --------------------------------------------------\n#[cfg(cfail1)]\nimpl Foo {\n pub fn add_no_mangle_to_method(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_clean(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Foo {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n #[no_mangle]\n pub fn add_no_mangle_to_method(&self) { }\n}\n\n\n\nstruct Bar<T>(T);\n\n\/\/ Add Type Parameter To Impl --------------------------------------------------\n#[cfg(cfail1)]\nimpl Bar<u32> {\n pub fn add_type_parameter_to_impl(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl<T> Bar<T> {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_type_parameter_to_impl(&self) { }\n}\n\n\n\n\/\/ Change Self Type of Impl ----------------------------------------------------\n#[cfg(cfail1)]\nimpl Bar<u32> {\n pub fn change_impl_self_type(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl Bar<u64> {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn change_impl_self_type(&self) { }\n}\n\n\n\n\/\/ Add Lifetime Bound to Impl --------------------------------------------------\n#[cfg(cfail1)]\nimpl<T> Bar<T> {\n pub fn add_lifetime_bound_to_impl_parameter(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl<T: 'static> Bar<T> {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_lifetime_bound_to_impl_parameter(&self) { }\n}\n\n\n\n\/\/ Add Trait Bound to Impl Parameter -------------------------------------------\n#[cfg(cfail1)]\nimpl<T> Bar<T> {\n pub fn add_trait_bound_to_impl_parameter(&self) { }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\nimpl<T: Clone> Bar<T> {\n #[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n #[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n #[rustc_metadata_dirty(cfg=\"cfail2\")]\n #[rustc_metadata_clean(cfg=\"cfail3\")]\n pub fn add_trait_bound_to_impl_parameter(&self) { }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[Auto] lib\/entry\/tag: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>dynamic traits from rustbook<commit_after>\/\/ error[E0277]: the size for values of type `(dyn Vegetable + 'static)` cannot be known at compilation time\n\/\/ --> \/Users\/juliankulesh\/Experiments\/coursera_learning\/rust\/programming_rust_notes\/dynamic_traits.rs:4:5\n\/\/ |\n\/\/ 4 | veggies: Vec<Vegetable> \/\/ error: `Vegetable` does not have\n\/\/ | ^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time\n\/\/ |\n\/\/ = help: the trait `std::marker::Sized` is not implemented for `(dyn Vegetable + 'static)`\n\/\/ = note: to learn more, visit <https:\/\/doc.rust-lang.org\/book\/second-edition\/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>\n\/\/ = note: required by `std::vec::Vec`\ntrait Vegetable {}\n\nstruct Salad {\n veggies: Vec<Vegetable>\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>use std::fs::File;\nuse std::io::{Read, Write};\n\nuse system::scheme::{Packet, Scheme};\n\nextern crate system;\n\nstruct ExampleScheme;\n\nimpl Scheme for ExampleScheme {\n\n}\n\nfn main() {\n \/\/In order to handle example:, we create :example\n let mut scheme = File::create(\":example\").unwrap();\n loop {\n let mut packet = Packet::default();\n if scheme.read(&mut packet).unwrap() == 0 {\n panic!(\"Unexpected EOF\");\n }\n\n println!(\"Received: {:?}\", packet);\n\n packet.a = 0;\n scheme.write(&packet).unwrap();\n }\n}\n<commit_msg>WIP: Implement tests for syscalls in example scheme<commit_after>use std::fs::File;\nuse std::io::{Read, Write};\n\nuse system::error::{Error, Result, ENOENT, EBADF};\nuse system::scheme::{Packet, Scheme};\n\nextern crate system;\n\nstruct ExampleScheme;\n\nimpl Scheme for ExampleScheme {\n fn open(&mut self, path: &str, flags: usize, mode: usize) -> Result {\n println!(\"open {:X} = {}, {:X}, {:X}\", path.as_ptr() as usize, path, flags, mode);\n Ok(0)\n }\n\n #[allow(unused_variables)]\n fn unlink(&mut self, path: &str) -> Result {\n println!(\"unlink {}\", path);\n Err(Error::new(ENOENT))\n }\n\n #[allow(unused_variables)]\n fn mkdir(&mut self, path: &str, mode: usize) -> Result {\n println!(\"mkdir {}, {:X}\", path, mode);\n Err(Error::new(ENOENT))\n }\n\n \/* Resource operations *\/\n\n #[allow(unused_variables)]\n fn read(&mut self, id: usize, buf: &mut [u8]) -> Result {\n println!(\"read {}, {:X}, {}\", id, buf.as_mut_ptr() as usize, buf.len());\n Err(Error::new(EBADF))\n }\n\n #[allow(unused_variables)]\n fn write(&mut self, id: usize, buf: &[u8]) -> Result {\n println!(\"write {}, {:X}, {}\", id, buf.as_ptr() as usize, buf.len());\n Err(Error::new(EBADF))\n }\n\n #[allow(unused_variables)]\n fn seek(&mut self, id: usize, pos: usize, whence: usize) -> Result {\n println!(\"seek {}, {}, {}\", id, pos, whence);\n Err(Error::new(EBADF))\n }\n\n #[allow(unused_variables)]\n fn sync(&mut self, id: usize) -> Result {\n println!(\"sync {}\", id);\n Err(Error::new(EBADF))\n }\n\n #[allow(unused_variables)]\n fn truncate(&mut self, id: usize, len: usize) -> Result {\n println!(\"truncate {}, {}\", id, len);\n Err(Error::new(EBADF))\n }\n}\n\nfn main() {\n \/\/In order to handle example:, we create :example\n let mut scheme = ExampleScheme;\n let mut socket = File::create(\":example\").unwrap();\n loop {\n let mut packet = Packet::default();\n if socket.read(&mut packet).unwrap() == 0 {\n panic!(\"Unexpected EOF\");\n }\n println!(\"Recv {:?}\", packet);\n\n scheme.handle(&mut packet);\n\n socket.write(&packet).unwrap();\n println!(\"Sent {:?}\", packet);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use super::{Drawable, BoundingBox, MeasureMode};\nuse ::props::Color;\nuse ::paint::{Canvas, Point, Rect};\nuse ::platform::Context;\n\npub type MathBackgroundReader<T> = fn (&T) -> &Color;\n\npub struct Wrapper<'a, T: 'a, U: Drawable> {\n wrapped: Option<U>,\n props: &'a T,\n math_background_reader: MathBackgroundReader<T>,\n\n bounding_box: BoundingBox\n}\n\nimpl<'a, T, U: Drawable> Drawable for Wrapper<'a, T, U> {\n fn draw(&self, canvas: &Canvas, pen_pos: &Point) {\n canvas.draw_rect(\n pen_pos,\n self.bounding_box.rect(),\n (self.math_background_reader)(self.props)\n )\n }\n\n fn calculate(&mut self, context: &Context, width: f32, width_mode: &MeasureMode, height: f32,\n height_mode: &MeasureMode) {\n if let Some(val) = self.wrapped.as_mut() {\n val.calculate(context, width, width_mode, height, height_mode);\n }\n self.bounding_box = BoundingBox {\n rect: if let Some(ref val) = self.wrapped {\n val.bounding_box().rect().clone()\n } else {\n Rect::new(0., 0.)\n },\n baseline: if let Some(ref val) = self.wrapped {\n val.bounding_box().baseline()\n } else {\n 0.\n },\n axis: if let Some(ref val) = self.wrapped {\n val.bounding_box().axis()\n } else {\n 0.\n },\n }\n }\n\n fn bounding_box(&self) -> &BoundingBox {\n &self.bounding_box\n }\n}\n\nimpl<'a, T, U: Drawable> Wrapper<'a, T, U> {\n pub fn new(props: &'a T, math_background_reader: MathBackgroundReader<T>)\n -> Wrapper<'a, T, U> {\n Wrapper {\n wrapped: None,\n props,\n math_background_reader,\n bounding_box: BoundingBox::default()\n }\n }\n\n pub fn wrap(&mut self, drawable: U) {\n self.wrapped = Some(drawable);\n }\n}<commit_msg>Fix wrapped elements not drawn<commit_after>use super::{Drawable, BoundingBox, MeasureMode};\nuse ::props::Color;\nuse ::paint::{Canvas, Point, Rect};\nuse ::platform::Context;\n\npub type MathBackgroundReader<T> = fn (&T) -> &Color;\n\npub struct Wrapper<'a, T: 'a, U: Drawable> {\n wrapped: Option<U>,\n props: &'a T,\n math_background_reader: MathBackgroundReader<T>,\n\n bounding_box: BoundingBox\n}\n\nimpl<'a, T, U: Drawable> Drawable for Wrapper<'a, T, U> {\n fn draw(&self, canvas: &Canvas, pen_pos: &Point) {\n canvas.draw_rect(\n pen_pos,\n self.bounding_box.rect(),\n (self.math_background_reader)(self.props)\n );\n if let Some(ref wrapped) = self.wrapped {\n wrapped.draw(canvas, pen_pos);\n }\n }\n\n fn calculate(&mut self, context: &Context, width: f32, width_mode: &MeasureMode, height: f32,\n height_mode: &MeasureMode) {\n if let Some(val) = self.wrapped.as_mut() {\n val.calculate(context, width, width_mode, height, height_mode);\n }\n self.bounding_box = BoundingBox {\n rect: if let Some(ref val) = self.wrapped {\n val.bounding_box().rect().clone()\n } else {\n Rect::new(0., 0.)\n },\n baseline: if let Some(ref val) = self.wrapped {\n val.bounding_box().baseline()\n } else {\n 0.\n },\n axis: if let Some(ref val) = self.wrapped {\n val.bounding_box().axis()\n } else {\n 0.\n },\n }\n }\n\n fn bounding_box(&self) -> &BoundingBox {\n &self.bounding_box\n }\n}\n\nimpl<'a, T, U: Drawable> Wrapper<'a, T, U> {\n pub fn new(props: &'a T, math_background_reader: MathBackgroundReader<T>)\n -> Wrapper<'a, T, U> {\n Wrapper {\n wrapped: None,\n props,\n math_background_reader,\n bounding_box: BoundingBox::default()\n }\n }\n\n pub fn wrap(&mut self, drawable: U) {\n self.wrapped = Some(drawable);\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module contains the code to instantiate a \"query result\", and\n\/\/! in particular to extract out the resulting region obligations and\n\/\/! encode them therein.\n\/\/!\n\/\/! For an overview of what canonicaliation is and how it fits into\n\/\/! rustc, check out the [chapter in the rustc guide][c].\n\/\/!\n\/\/! [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html\n\nuse infer::canonical::substitute::substitute_value;\nuse infer::canonical::{\n Canonical, CanonicalVarValues, Canonicalize, Certainty, QueryRegionConstraint, QueryResult,\n};\nuse infer::region_constraints::{Constraint, RegionConstraintData};\nuse infer::{InferCtxt, InferOk, InferResult};\nuse rustc_data_structures::indexed_vec::Idx;\nuse std::fmt::Debug;\nuse traits::query::NoSolution;\nuse traits::{FulfillmentContext, TraitEngine};\nuse traits::{Obligation, ObligationCause, PredicateObligation};\nuse ty::fold::TypeFoldable;\nuse ty::subst::{Kind, UnpackedKind};\nuse ty::{self, CanonicalVar};\n\nuse rustc_data_structures::indexed_vec::IndexVec;\n\ntype CanonicalizedQueryResult<'gcx, 'tcx, T> =\n <QueryResult<'tcx, T> as Canonicalize<'gcx, 'tcx>>::Canonicalized;\n\nimpl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {\n \/\/\/ This method is meant to be invoked as the final step of a canonical query\n \/\/\/ implementation. It is given:\n \/\/\/\n \/\/\/ - the instantiated variables `inference_vars` created from the query key\n \/\/\/ - the result `answer` of the query\n \/\/\/ - a fulfillment context `fulfill_cx` that may contain various obligations which\n \/\/\/ have yet to be proven.\n \/\/\/\n \/\/\/ Given this, the function will process the obligations pending\n \/\/\/ in `fulfill_cx`:\n \/\/\/\n \/\/\/ - If all the obligations can be proven successfully, it will\n \/\/\/ package up any resulting region obligations (extracted from\n \/\/\/ `infcx`) along with the fully resolved value `answer` into a\n \/\/\/ query result (which is then itself canonicalized).\n \/\/\/ - If some obligations can be neither proven nor disproven, then\n \/\/\/ the same thing happens, but the resulting query is marked as ambiguous.\n \/\/\/ - Finally, if any of the obligations result in a hard error,\n \/\/\/ then `Err(NoSolution)` is returned.\n pub fn make_canonicalized_query_result<T>(\n &self,\n inference_vars: CanonicalVarValues<'tcx>,\n answer: T,\n fulfill_cx: &mut FulfillmentContext<'tcx>,\n ) -> Result<CanonicalizedQueryResult<'gcx, 'tcx, T>, NoSolution>\n where\n T: Debug,\n QueryResult<'tcx, T>: Canonicalize<'gcx, 'tcx>,\n {\n let query_result = self.make_query_result(inference_vars, answer, fulfill_cx)?;\n let (canonical_result, _) = self.canonicalize_response(&query_result);\n\n debug!(\n \"make_canonicalized_query_result: canonical_result = {:#?}\",\n canonical_result\n );\n\n Ok(canonical_result)\n }\n\n \/\/\/ Helper for `make_canonicalized_query_result` that does\n \/\/\/ everything up until the final canonicalization.\n fn make_query_result<T>(\n &self,\n inference_vars: CanonicalVarValues<'tcx>,\n answer: T,\n fulfill_cx: &mut FulfillmentContext<'tcx>,\n ) -> Result<QueryResult<'tcx, T>, NoSolution>\n where\n T: Debug,\n QueryResult<'tcx, T>: Canonicalize<'gcx, 'tcx>,\n {\n let tcx = self.tcx;\n\n debug!(\n \"make_query_result(\\\n inference_vars={:?}, \\\n answer={:?})\",\n inference_vars, answer,\n );\n\n \/\/ Select everything, returning errors.\n let true_errors = match fulfill_cx.select_where_possible(self) {\n Ok(()) => vec![],\n Err(errors) => errors,\n };\n debug!(\"true_errors = {:#?}\", true_errors);\n\n if !true_errors.is_empty() {\n \/\/ FIXME -- we don't indicate *why* we failed to solve\n debug!(\"make_query_result: true_errors={:#?}\", true_errors);\n return Err(NoSolution);\n }\n\n \/\/ Anything left unselected *now* must be an ambiguity.\n let ambig_errors = match fulfill_cx.select_all_or_error(self) {\n Ok(()) => vec![],\n Err(errors) => errors,\n };\n debug!(\"ambig_errors = {:#?}\", ambig_errors);\n\n let region_obligations = self.take_registered_region_obligations();\n\n let region_constraints = self.with_region_constraints(|region_constraints| {\n let RegionConstraintData {\n constraints,\n verifys,\n givens,\n } = region_constraints;\n\n assert!(verifys.is_empty());\n assert!(givens.is_empty());\n\n let mut outlives: Vec<_> = constraints\n .into_iter()\n .map(|(k, _)| match *k {\n \/\/ Swap regions because we are going from sub (<=) to outlives\n \/\/ (>=).\n Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(\n tcx.mk_region(ty::ReVar(v2)).into(),\n tcx.mk_region(ty::ReVar(v1)),\n ),\n Constraint::VarSubReg(v1, r2) => {\n ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))\n }\n Constraint::RegSubVar(r1, v2) => {\n ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)\n }\n Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),\n })\n .map(ty::Binder::dummy) \/\/ no bound regions in the code above\n .collect();\n\n outlives.extend(\n region_obligations\n .into_iter()\n .map(|(_, r_o)| ty::OutlivesPredicate(r_o.sup_type.into(), r_o.sub_region))\n .map(ty::Binder::dummy), \/\/ no bound regions in the code above\n );\n\n outlives\n });\n\n let certainty = if ambig_errors.is_empty() {\n Certainty::Proven\n } else {\n Certainty::Ambiguous\n };\n\n Ok(QueryResult {\n var_values: inference_vars,\n region_constraints,\n certainty,\n value: answer,\n })\n }\n\n \/\/\/ Given the (canonicalized) result to a canonical query,\n \/\/\/ instantiates the result so it can be used, plugging in the\n \/\/\/ values from the canonical query. (Note that the result may\n \/\/\/ have been ambiguous; you should check the certainty level of\n \/\/\/ the query before applying this function.)\n \/\/\/\n \/\/\/ To get a good understanding of what is happening here, check\n \/\/\/ out the [chapter in the rustc guide][c].\n \/\/\/\n \/\/\/ [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html#processing-the-canonicalized-query-result\n pub fn instantiate_query_result<R>(\n &self,\n cause: &ObligationCause<'tcx>,\n param_env: ty::ParamEnv<'tcx>,\n original_values: &CanonicalVarValues<'tcx>,\n query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,\n ) -> InferResult<'tcx, R>\n where\n R: Debug + TypeFoldable<'tcx>,\n {\n debug!(\n \"instantiate_query_result(original_values={:#?}, query_result={:#?})\",\n original_values, query_result,\n );\n\n \/\/ Every canonical query result includes values for each of\n \/\/ the inputs to the query. Therefore, we begin by unifying\n \/\/ these values with the original inputs that were\n \/\/ canonicalized.\n let result_values = &query_result.value.var_values;\n assert_eq!(original_values.len(), result_values.len());\n\n \/\/ Quickly try to find initial values for the canonical\n \/\/ variables in the result in terms of the query. We do this\n \/\/ by iterating down the values that the query gave to each of\n \/\/ the canonical inputs. If we find that one of those values\n \/\/ is directly equal to one of the canonical variables in the\n \/\/ result, then we can type the corresponding value from the\n \/\/ input. See the example above.\n let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =\n IndexVec::from_elem_n(None, query_result.variables.len());\n\n \/\/ In terms of our example above, we are iterating over pairs like:\n \/\/ [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]\n for (original_value, result_value) in original_values.iter().zip(result_values) {\n match result_value.unpack() {\n UnpackedKind::Type(result_value) => {\n \/\/ e.g., here `result_value` might be `?0` in the example above...\n if let ty::TyInfer(ty::InferTy::CanonicalTy(index)) = result_value.sty {\n \/\/ in which case we would set `canonical_vars[0]` to `Some(?U)`.\n opt_values[index] = Some(original_value);\n }\n }\n UnpackedKind::Lifetime(result_value) => {\n \/\/ e.g., here `result_value` might be `'?1` in the example above...\n if let &ty::RegionKind::ReCanonical(index) = result_value {\n \/\/ in which case we would set `canonical_vars[0]` to `Some('static)`.\n opt_values[index] = Some(original_value);\n }\n }\n }\n }\n\n \/\/ Create a result substitution: if we found a value for a\n \/\/ given variable in the loop above, use that. Otherwise, use\n \/\/ a fresh inference variable.\n let result_subst = &CanonicalVarValues {\n var_values: query_result\n .variables\n .iter()\n .enumerate()\n .map(|(index, info)| match opt_values[CanonicalVar::new(index)] {\n Some(k) => k,\n None => self.fresh_inference_var_for_canonical_var(cause.span, *info),\n })\n .collect(),\n };\n\n \/\/ Unify the original values for the canonical variables in\n \/\/ the input with the value found in the query\n \/\/ post-substitution. Often, but not always, this is a no-op,\n \/\/ because we already found the mapping in the first step.\n let substituted_values = |index: CanonicalVar| -> Kind<'tcx> {\n query_result.substitute_projected(self.tcx, result_subst, |v| &v.var_values[index])\n };\n let mut obligations = self\n .unify_canonical_vars(cause, param_env, original_values, substituted_values)?\n .into_obligations();\n\n obligations.extend(self.query_region_constraints_into_obligations(\n cause,\n param_env,\n &query_result.value.region_constraints,\n result_subst,\n ));\n\n let user_result: R =\n query_result.substitute_projected(self.tcx, result_subst, |q_r| &q_r.value);\n\n Ok(InferOk {\n value: user_result,\n obligations,\n })\n }\n\n \/\/\/ Converts the region constraints resulting from a query into an\n \/\/\/ iterator of obligations.\n fn query_region_constraints_into_obligations<'a>(\n &'a self,\n cause: &'a ObligationCause<'tcx>,\n param_env: ty::ParamEnv<'tcx>,\n unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],\n result_subst: &'a CanonicalVarValues<'tcx>,\n ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {\n Box::new(\n unsubstituted_region_constraints\n .iter()\n .map(move |constraint| {\n let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); \/\/ restored below\n let k1 = substitute_value(self.tcx, result_subst, k1);\n let r2 = substitute_value(self.tcx, result_subst, r2);\n match k1.unpack() {\n UnpackedKind::Lifetime(r1) => Obligation::new(\n cause.clone(),\n param_env,\n ty::Predicate::RegionOutlives(ty::Binder::dummy(\n ty::OutlivesPredicate(r1, r2),\n )),\n ),\n\n UnpackedKind::Type(t1) => Obligation::new(\n cause.clone(),\n param_env,\n ty::Predicate::TypeOutlives(ty::Binder::dummy(ty::OutlivesPredicate(\n t1, r2,\n ))),\n ),\n }\n }),\n ) as Box<dyn Iterator<Item = _>>\n }\n\n \/\/\/ Given two sets of values for the same set of canonical variables, unify them.\n \/\/\/ The second set is produced lazilly by supplying indices from the first set.\n fn unify_canonical_vars(\n &self,\n cause: &ObligationCause<'tcx>,\n param_env: ty::ParamEnv<'tcx>,\n variables1: &CanonicalVarValues<'tcx>,\n variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,\n ) -> InferResult<'tcx, ()> {\n self.commit_if_ok(|_| {\n let mut obligations = vec![];\n for (index, value1) in variables1.var_values.iter_enumerated() {\n let value2 = variables2(index);\n\n match (value1.unpack(), value2.unpack()) {\n (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {\n obligations\n .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());\n }\n (\n UnpackedKind::Lifetime(ty::ReErased),\n UnpackedKind::Lifetime(ty::ReErased),\n ) => {\n \/\/ no action needed\n }\n (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {\n obligations\n .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());\n }\n _ => {\n bug!(\"kind mismatch, cannot unify {:?} and {:?}\", value1, value2,);\n }\n }\n }\n Ok(InferOk {\n value: (),\n obligations,\n })\n })\n }\n}\n<commit_msg>extract the handling of region constraints from queries<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module contains the code to instantiate a \"query result\", and\n\/\/! in particular to extract out the resulting region obligations and\n\/\/! encode them therein.\n\/\/!\n\/\/! For an overview of what canonicaliation is and how it fits into\n\/\/! rustc, check out the [chapter in the rustc guide][c].\n\/\/!\n\/\/! [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html\n\nuse infer::canonical::substitute::substitute_value;\nuse infer::canonical::{\n Canonical, CanonicalVarValues, Canonicalize, Certainty, QueryRegionConstraint, QueryResult,\n};\nuse infer::region_constraints::{Constraint, RegionConstraintData};\nuse infer::{InferCtxt, InferOk, InferResult, RegionObligation};\nuse rustc_data_structures::indexed_vec::Idx;\nuse std::fmt::Debug;\nuse syntax::ast;\nuse traits::query::NoSolution;\nuse traits::{FulfillmentContext, TraitEngine};\nuse traits::{Obligation, ObligationCause, PredicateObligation};\nuse ty::fold::TypeFoldable;\nuse ty::subst::{Kind, UnpackedKind};\nuse ty::{self, CanonicalVar, TyCtxt};\n\nuse rustc_data_structures::indexed_vec::IndexVec;\n\ntype CanonicalizedQueryResult<'gcx, 'tcx, T> =\n <QueryResult<'tcx, T> as Canonicalize<'gcx, 'tcx>>::Canonicalized;\n\nimpl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {\n \/\/\/ This method is meant to be invoked as the final step of a canonical query\n \/\/\/ implementation. It is given:\n \/\/\/\n \/\/\/ - the instantiated variables `inference_vars` created from the query key\n \/\/\/ - the result `answer` of the query\n \/\/\/ - a fulfillment context `fulfill_cx` that may contain various obligations which\n \/\/\/ have yet to be proven.\n \/\/\/\n \/\/\/ Given this, the function will process the obligations pending\n \/\/\/ in `fulfill_cx`:\n \/\/\/\n \/\/\/ - If all the obligations can be proven successfully, it will\n \/\/\/ package up any resulting region obligations (extracted from\n \/\/\/ `infcx`) along with the fully resolved value `answer` into a\n \/\/\/ query result (which is then itself canonicalized).\n \/\/\/ - If some obligations can be neither proven nor disproven, then\n \/\/\/ the same thing happens, but the resulting query is marked as ambiguous.\n \/\/\/ - Finally, if any of the obligations result in a hard error,\n \/\/\/ then `Err(NoSolution)` is returned.\n pub fn make_canonicalized_query_result<T>(\n &self,\n inference_vars: CanonicalVarValues<'tcx>,\n answer: T,\n fulfill_cx: &mut FulfillmentContext<'tcx>,\n ) -> Result<CanonicalizedQueryResult<'gcx, 'tcx, T>, NoSolution>\n where\n T: Debug,\n QueryResult<'tcx, T>: Canonicalize<'gcx, 'tcx>,\n {\n let query_result = self.make_query_result(inference_vars, answer, fulfill_cx)?;\n let (canonical_result, _) = self.canonicalize_response(&query_result);\n\n debug!(\n \"make_canonicalized_query_result: canonical_result = {:#?}\",\n canonical_result\n );\n\n Ok(canonical_result)\n }\n\n \/\/\/ Helper for `make_canonicalized_query_result` that does\n \/\/\/ everything up until the final canonicalization.\n fn make_query_result<T>(\n &self,\n inference_vars: CanonicalVarValues<'tcx>,\n answer: T,\n fulfill_cx: &mut FulfillmentContext<'tcx>,\n ) -> Result<QueryResult<'tcx, T>, NoSolution>\n where\n T: Debug,\n QueryResult<'tcx, T>: Canonicalize<'gcx, 'tcx>,\n {\n let tcx = self.tcx;\n\n debug!(\n \"make_query_result(\\\n inference_vars={:?}, \\\n answer={:?})\",\n inference_vars, answer,\n );\n\n \/\/ Select everything, returning errors.\n let true_errors = match fulfill_cx.select_where_possible(self) {\n Ok(()) => vec![],\n Err(errors) => errors,\n };\n debug!(\"true_errors = {:#?}\", true_errors);\n\n if !true_errors.is_empty() {\n \/\/ FIXME -- we don't indicate *why* we failed to solve\n debug!(\"make_query_result: true_errors={:#?}\", true_errors);\n return Err(NoSolution);\n }\n\n \/\/ Anything left unselected *now* must be an ambiguity.\n let ambig_errors = match fulfill_cx.select_all_or_error(self) {\n Ok(()) => vec![],\n Err(errors) => errors,\n };\n debug!(\"ambig_errors = {:#?}\", ambig_errors);\n\n let region_obligations = self.take_registered_region_obligations();\n let region_constraints = self.with_region_constraints(|region_constraints| {\n make_query_outlives(tcx, region_obligations, region_constraints)\n });\n\n let certainty = if ambig_errors.is_empty() {\n Certainty::Proven\n } else {\n Certainty::Ambiguous\n };\n\n Ok(QueryResult {\n var_values: inference_vars,\n region_constraints,\n certainty,\n value: answer,\n })\n }\n\n \/\/\/ Given the (canonicalized) result to a canonical query,\n \/\/\/ instantiates the result so it can be used, plugging in the\n \/\/\/ values from the canonical query. (Note that the result may\n \/\/\/ have been ambiguous; you should check the certainty level of\n \/\/\/ the query before applying this function.)\n \/\/\/\n \/\/\/ To get a good understanding of what is happening here, check\n \/\/\/ out the [chapter in the rustc guide][c].\n \/\/\/\n \/\/\/ [c]: https:\/\/rust-lang-nursery.github.io\/rustc-guide\/traits\/canonicalization.html#processing-the-canonicalized-query-result\n pub fn instantiate_query_result<R>(\n &self,\n cause: &ObligationCause<'tcx>,\n param_env: ty::ParamEnv<'tcx>,\n original_values: &CanonicalVarValues<'tcx>,\n query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,\n ) -> InferResult<'tcx, R>\n where\n R: Debug + TypeFoldable<'tcx>,\n {\n debug!(\n \"instantiate_query_result(original_values={:#?}, query_result={:#?})\",\n original_values, query_result,\n );\n\n \/\/ Every canonical query result includes values for each of\n \/\/ the inputs to the query. Therefore, we begin by unifying\n \/\/ these values with the original inputs that were\n \/\/ canonicalized.\n let result_values = &query_result.value.var_values;\n assert_eq!(original_values.len(), result_values.len());\n\n \/\/ Quickly try to find initial values for the canonical\n \/\/ variables in the result in terms of the query. We do this\n \/\/ by iterating down the values that the query gave to each of\n \/\/ the canonical inputs. If we find that one of those values\n \/\/ is directly equal to one of the canonical variables in the\n \/\/ result, then we can type the corresponding value from the\n \/\/ input. See the example above.\n let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =\n IndexVec::from_elem_n(None, query_result.variables.len());\n\n \/\/ In terms of our example above, we are iterating over pairs like:\n \/\/ [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]\n for (original_value, result_value) in original_values.iter().zip(result_values) {\n match result_value.unpack() {\n UnpackedKind::Type(result_value) => {\n \/\/ e.g., here `result_value` might be `?0` in the example above...\n if let ty::TyInfer(ty::InferTy::CanonicalTy(index)) = result_value.sty {\n \/\/ in which case we would set `canonical_vars[0]` to `Some(?U)`.\n opt_values[index] = Some(original_value);\n }\n }\n UnpackedKind::Lifetime(result_value) => {\n \/\/ e.g., here `result_value` might be `'?1` in the example above...\n if let &ty::RegionKind::ReCanonical(index) = result_value {\n \/\/ in which case we would set `canonical_vars[0]` to `Some('static)`.\n opt_values[index] = Some(original_value);\n }\n }\n }\n }\n\n \/\/ Create a result substitution: if we found a value for a\n \/\/ given variable in the loop above, use that. Otherwise, use\n \/\/ a fresh inference variable.\n let result_subst = &CanonicalVarValues {\n var_values: query_result\n .variables\n .iter()\n .enumerate()\n .map(|(index, info)| match opt_values[CanonicalVar::new(index)] {\n Some(k) => k,\n None => self.fresh_inference_var_for_canonical_var(cause.span, *info),\n })\n .collect(),\n };\n\n \/\/ Unify the original values for the canonical variables in\n \/\/ the input with the value found in the query\n \/\/ post-substitution. Often, but not always, this is a no-op,\n \/\/ because we already found the mapping in the first step.\n let substituted_values = |index: CanonicalVar| -> Kind<'tcx> {\n query_result.substitute_projected(self.tcx, result_subst, |v| &v.var_values[index])\n };\n let mut obligations = self\n .unify_canonical_vars(cause, param_env, original_values, substituted_values)?\n .into_obligations();\n\n obligations.extend(self.query_region_constraints_into_obligations(\n cause,\n param_env,\n &query_result.value.region_constraints,\n result_subst,\n ));\n\n let user_result: R =\n query_result.substitute_projected(self.tcx, result_subst, |q_r| &q_r.value);\n\n Ok(InferOk {\n value: user_result,\n obligations,\n })\n }\n\n \/\/\/ Converts the region constraints resulting from a query into an\n \/\/\/ iterator of obligations.\n fn query_region_constraints_into_obligations<'a>(\n &'a self,\n cause: &'a ObligationCause<'tcx>,\n param_env: ty::ParamEnv<'tcx>,\n unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],\n result_subst: &'a CanonicalVarValues<'tcx>,\n ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {\n Box::new(\n unsubstituted_region_constraints\n .iter()\n .map(move |constraint| {\n let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); \/\/ restored below\n let k1 = substitute_value(self.tcx, result_subst, k1);\n let r2 = substitute_value(self.tcx, result_subst, r2);\n match k1.unpack() {\n UnpackedKind::Lifetime(r1) => Obligation::new(\n cause.clone(),\n param_env,\n ty::Predicate::RegionOutlives(ty::Binder::dummy(\n ty::OutlivesPredicate(r1, r2),\n )),\n ),\n\n UnpackedKind::Type(t1) => Obligation::new(\n cause.clone(),\n param_env,\n ty::Predicate::TypeOutlives(ty::Binder::dummy(ty::OutlivesPredicate(\n t1, r2,\n ))),\n ),\n }\n }),\n ) as Box<dyn Iterator<Item = _>>\n }\n\n \/\/\/ Given two sets of values for the same set of canonical variables, unify them.\n \/\/\/ The second set is produced lazilly by supplying indices from the first set.\n fn unify_canonical_vars(\n &self,\n cause: &ObligationCause<'tcx>,\n param_env: ty::ParamEnv<'tcx>,\n variables1: &CanonicalVarValues<'tcx>,\n variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,\n ) -> InferResult<'tcx, ()> {\n self.commit_if_ok(|_| {\n let mut obligations = vec![];\n for (index, value1) in variables1.var_values.iter_enumerated() {\n let value2 = variables2(index);\n\n match (value1.unpack(), value2.unpack()) {\n (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {\n obligations\n .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());\n }\n (\n UnpackedKind::Lifetime(ty::ReErased),\n UnpackedKind::Lifetime(ty::ReErased),\n ) => {\n \/\/ no action needed\n }\n (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {\n obligations\n .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());\n }\n _ => {\n bug!(\"kind mismatch, cannot unify {:?} and {:?}\", value1, value2,);\n }\n }\n }\n Ok(InferOk {\n value: (),\n obligations,\n })\n })\n }\n}\n\n\/\/\/ Given the region obligations and constraints scraped from the infcx,\n\/\/\/ creates query region constraints.\nfn make_query_outlives<'tcx>(\n tcx: TyCtxt<'_, '_, 'tcx>,\n region_obligations: Vec<(ast::NodeId, RegionObligation<'tcx>)>,\n region_constraints: &RegionConstraintData<'tcx>,\n) -> Vec<QueryRegionConstraint<'tcx>> {\n let RegionConstraintData {\n constraints,\n verifys,\n givens,\n } = region_constraints;\n\n assert!(verifys.is_empty());\n assert!(givens.is_empty());\n\n let mut outlives: Vec<_> = constraints\n .into_iter()\n .map(|(k, _)| match *k {\n \/\/ Swap regions because we are going from sub (<=) to outlives\n \/\/ (>=).\n Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(\n tcx.mk_region(ty::ReVar(v2)).into(),\n tcx.mk_region(ty::ReVar(v1)),\n ),\n Constraint::VarSubReg(v1, r2) => {\n ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))\n }\n Constraint::RegSubVar(r1, v2) => {\n ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)\n }\n Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),\n })\n .map(ty::Binder::dummy) \/\/ no bound regions in the code above\n .collect();\n\n outlives.extend(\n region_obligations\n .into_iter()\n .map(|(_, r_o)| ty::OutlivesPredicate(r_o.sup_type.into(), r_o.sub_region))\n .map(ty::Binder::dummy), \/\/ no bound regions in the code above\n );\n\n outlives\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Simple [DEFLATE][def]-based compression. This is a wrapper around the\n\/\/! [`miniz`][mz] library, which is a one-file pure-C implementation of zlib.\n\/\/!\n\/\/! [def]: https:\/\/en.wikipedia.org\/wiki\/DEFLATE\n\/\/! [mz]: https:\/\/code.google.com\/p\/miniz\/\n\n\/\/ Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"flate\"]\n#![unstable(feature = \"rustc_private\", issue = \"27812\")]\n#![staged_api]\n#![crate_type = \"rlib\"]\n#![crate_type = \"dylib\"]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\")]\n\n#![feature(libc)]\n#![feature(staged_api)]\n#![feature(unique)]\n#![cfg_attr(test, feature(rustc_private, rand, vec_push_all))]\n\n#[cfg(test)] #[macro_use] extern crate log;\n\nextern crate libc;\n\nuse libc::{c_void, size_t, c_int};\nuse std::fmt;\nuse std::ops::Deref;\nuse std::ptr::Unique;\nuse std::slice;\n\n#[derive(Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]\npub struct Error {\n _unused: (),\n}\n\nimpl Error {\n fn new() -> Error {\n Error {\n _unused: (),\n }\n }\n}\n\nimpl fmt::Debug for Error {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"decompression error\".fmt(f)\n }\n}\n\npub struct Bytes {\n ptr: Unique<u8>,\n len: usize,\n}\n\nimpl Deref for Bytes {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe { slice::from_raw_parts(*self.ptr, self.len) }\n }\n}\n\nimpl Drop for Bytes {\n fn drop(&mut self) {\n unsafe { libc::free(*self.ptr as *mut _); }\n }\n}\n\n#[link(name = \"miniz\", kind = \"static\")]\nextern {\n \/\/\/ Raw miniz compression function.\n fn tdefl_compress_mem_to_heap(psrc_buf: *const c_void,\n src_buf_len: size_t,\n pout_len: *mut size_t,\n flags: c_int)\n -> *mut c_void;\n\n \/\/\/ Raw miniz decompression function.\n fn tinfl_decompress_mem_to_heap(psrc_buf: *const c_void,\n src_buf_len: size_t,\n pout_len: *mut size_t,\n flags: c_int)\n -> *mut c_void;\n}\n\nconst LZ_NORM: c_int = 0x80; \/\/ LZ with 128 probes, \"normal\"\nconst TINFL_FLAG_PARSE_ZLIB_HEADER: c_int = 0x1; \/\/ parse zlib header and adler32 checksum\nconst TDEFL_WRITE_ZLIB_HEADER: c_int = 0x01000; \/\/ write zlib header and adler32 checksum\n\nfn deflate_bytes_internal(bytes: &[u8], flags: c_int) -> Bytes {\n unsafe {\n let mut outsz: size_t = 0;\n let res = tdefl_compress_mem_to_heap(bytes.as_ptr() as *const _,\n bytes.len() as size_t,\n &mut outsz,\n flags);\n assert!(!res.is_null());\n Bytes {\n ptr: Unique::new(res as *mut u8),\n len: outsz as usize,\n }\n }\n}\n\n\/\/\/ Compress a buffer, without writing any sort of header on the output.\npub fn deflate_bytes(bytes: &[u8]) -> Bytes {\n deflate_bytes_internal(bytes, LZ_NORM)\n}\n\n\/\/\/ Compress a buffer, using a header that zlib can understand.\npub fn deflate_bytes_zlib(bytes: &[u8]) -> Bytes {\n deflate_bytes_internal(bytes, LZ_NORM | TDEFL_WRITE_ZLIB_HEADER)\n}\n\nfn inflate_bytes_internal(bytes: &[u8], flags: c_int) -> Result<Bytes,Error> {\n unsafe {\n let mut outsz: size_t = 0;\n let res = tinfl_decompress_mem_to_heap(bytes.as_ptr() as *const _,\n bytes.len() as size_t,\n &mut outsz,\n flags);\n if !res.is_null() {\n Ok(Bytes {\n ptr: Unique::new(res as *mut u8),\n len: outsz as usize,\n })\n } else {\n Err(Error::new())\n }\n }\n}\n\n\/\/\/ Decompress a buffer, without parsing any sort of header on the input.\npub fn inflate_bytes(bytes: &[u8]) -> Result<Bytes,Error> {\n inflate_bytes_internal(bytes, 0)\n}\n\n\/\/\/ Decompress a buffer that starts with a zlib header.\npub fn inflate_bytes_zlib(bytes: &[u8]) -> Result<Bytes,Error> {\n inflate_bytes_internal(bytes, TINFL_FLAG_PARSE_ZLIB_HEADER)\n}\n\n#[cfg(test)]\nmod tests {\n #![allow(deprecated)]\n use super::{inflate_bytes, deflate_bytes};\n use std::__rand::{thread_rng, Rng};\n\n #[test]\n fn test_flate_round_trip() {\n let mut r = thread_rng();\n let mut words = vec![];\n for _ in 0..20 {\n let range = r.gen_range(1, 10);\n let v = r.gen_iter::<u8>().take(range).collect::<Vec<u8>>();\n words.push(v);\n }\n for _ in 0..20 {\n let mut input = vec![];\n for _ in 0..2000 {\n input.push_all(r.choose(&words).unwrap());\n }\n debug!(\"de\/inflate of {} bytes of random word-sequences\",\n input.len());\n let cmp = deflate_bytes(&input);\n let out = inflate_bytes(&cmp).unwrap();\n debug!(\"{} bytes deflated to {} ({:.1}% size)\",\n input.len(), cmp.len(),\n 100.0 * ((cmp.len() as f64) \/ (input.len() as f64)));\n assert_eq!(&*input, &*out);\n }\n }\n\n #[test]\n fn test_zlib_flate() {\n let bytes = vec![1, 2, 3, 4, 5];\n let deflated = deflate_bytes(&bytes);\n let inflated = inflate_bytes(&deflated).unwrap();\n assert_eq!(&*inflated, &*bytes);\n }\n}\n<commit_msg>Auto merge of #28949 - iwillspeak:rustfmt-libflate, r=nrc<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! Simple [DEFLATE][def]-based compression. This is a wrapper around the\n\/\/! [`miniz`][mz] library, which is a one-file pure-C implementation of zlib.\n\/\/!\n\/\/! [def]: https:\/\/en.wikipedia.org\/wiki\/DEFLATE\n\/\/! [mz]: https:\/\/code.google.com\/p\/miniz\/\n\n\/\/ Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)\n#![cfg_attr(stage0, feature(custom_attribute))]\n#![crate_name = \"flate\"]\n#![unstable(feature = \"rustc_private\", issue = \"27812\")]\n#![staged_api]\n#![crate_type = \"rlib\"]\n#![crate_type = \"dylib\"]\n#![doc(html_logo_url = \"https:\/\/www.rust-lang.org\/logos\/rust-logo-128x128-blk-v2.png\",\n html_favicon_url = \"https:\/\/doc.rust-lang.org\/favicon.ico\",\n html_root_url = \"https:\/\/doc.rust-lang.org\/nightly\/\")]\n\n#![feature(libc)]\n#![feature(staged_api)]\n#![feature(unique)]\n#![cfg_attr(test, feature(rustc_private, rand, vec_push_all))]\n\n#[cfg(test)]\n#[macro_use]\nextern crate log;\n\nextern crate libc;\n\nuse libc::{c_void, size_t, c_int};\nuse std::fmt;\nuse std::ops::Deref;\nuse std::ptr::Unique;\nuse std::slice;\n\n#[derive(Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]\npub struct Error {\n _unused: (),\n}\n\nimpl Error {\n fn new() -> Error {\n Error { _unused: () }\n }\n}\n\nimpl fmt::Debug for Error {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n \"decompression error\".fmt(f)\n }\n}\n\npub struct Bytes {\n ptr: Unique<u8>,\n len: usize,\n}\n\nimpl Deref for Bytes {\n type Target = [u8];\n fn deref(&self) -> &[u8] {\n unsafe { slice::from_raw_parts(*self.ptr, self.len) }\n }\n}\n\nimpl Drop for Bytes {\n fn drop(&mut self) {\n unsafe {\n libc::free(*self.ptr as *mut _);\n }\n }\n}\n\n#[link(name = \"miniz\", kind = \"static\")]\nextern {\n \/\/\/ Raw miniz compression function.\n fn tdefl_compress_mem_to_heap(psrc_buf: *const c_void,\n src_buf_len: size_t,\n pout_len: *mut size_t,\n flags: c_int)\n -> *mut c_void;\n\n \/\/\/ Raw miniz decompression function.\n fn tinfl_decompress_mem_to_heap(psrc_buf: *const c_void,\n src_buf_len: size_t,\n pout_len: *mut size_t,\n flags: c_int)\n -> *mut c_void;\n}\n\nconst LZ_NORM: c_int = 0x80; \/\/ LZ with 128 probes, \"normal\"\nconst TINFL_FLAG_PARSE_ZLIB_HEADER: c_int = 0x1; \/\/ parse zlib header and adler32 checksum\nconst TDEFL_WRITE_ZLIB_HEADER: c_int = 0x01000; \/\/ write zlib header and adler32 checksum\n\nfn deflate_bytes_internal(bytes: &[u8], flags: c_int) -> Bytes {\n unsafe {\n let mut outsz: size_t = 0;\n let res = tdefl_compress_mem_to_heap(bytes.as_ptr() as *const _,\n bytes.len() as size_t,\n &mut outsz,\n flags);\n assert!(!res.is_null());\n Bytes {\n ptr: Unique::new(res as *mut u8),\n len: outsz as usize,\n }\n }\n}\n\n\/\/\/ Compress a buffer, without writing any sort of header on the output.\npub fn deflate_bytes(bytes: &[u8]) -> Bytes {\n deflate_bytes_internal(bytes, LZ_NORM)\n}\n\n\/\/\/ Compress a buffer, using a header that zlib can understand.\npub fn deflate_bytes_zlib(bytes: &[u8]) -> Bytes {\n deflate_bytes_internal(bytes, LZ_NORM | TDEFL_WRITE_ZLIB_HEADER)\n}\n\nfn inflate_bytes_internal(bytes: &[u8], flags: c_int) -> Result<Bytes, Error> {\n unsafe {\n let mut outsz: size_t = 0;\n let res = tinfl_decompress_mem_to_heap(bytes.as_ptr() as *const _,\n bytes.len() as size_t,\n &mut outsz,\n flags);\n if !res.is_null() {\n Ok(Bytes {\n ptr: Unique::new(res as *mut u8),\n len: outsz as usize,\n })\n } else {\n Err(Error::new())\n }\n }\n}\n\n\/\/\/ Decompress a buffer, without parsing any sort of header on the input.\npub fn inflate_bytes(bytes: &[u8]) -> Result<Bytes, Error> {\n inflate_bytes_internal(bytes, 0)\n}\n\n\/\/\/ Decompress a buffer that starts with a zlib header.\npub fn inflate_bytes_zlib(bytes: &[u8]) -> Result<Bytes, Error> {\n inflate_bytes_internal(bytes, TINFL_FLAG_PARSE_ZLIB_HEADER)\n}\n\n#[cfg(test)]\nmod tests {\n #![allow(deprecated)]\n use super::{inflate_bytes, deflate_bytes};\n use std::__rand::{thread_rng, Rng};\n\n #[test]\n fn test_flate_round_trip() {\n let mut r = thread_rng();\n let mut words = vec![];\n for _ in 0..20 {\n let range = r.gen_range(1, 10);\n let v = r.gen_iter::<u8>().take(range).collect::<Vec<u8>>();\n words.push(v);\n }\n for _ in 0..20 {\n let mut input = vec![];\n for _ in 0..2000 {\n input.push_all(r.choose(&words).unwrap());\n }\n debug!(\"de\/inflate of {} bytes of random word-sequences\",\n input.len());\n let cmp = deflate_bytes(&input);\n let out = inflate_bytes(&cmp).unwrap();\n debug!(\"{} bytes deflated to {} ({:.1}% size)\",\n input.len(),\n cmp.len(),\n 100.0 * ((cmp.len() as f64) \/ (input.len() as f64)));\n assert_eq!(&*input, &*out);\n }\n }\n\n #[test]\n fn test_zlib_flate() {\n let bytes = vec![1, 2, 3, 4, 5];\n let deflated = deflate_bytes(&bytes);\n let inflated = inflate_bytes(&deflated).unwrap();\n assert_eq!(&*inflated, &*bytes);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Detect and reload new versions of the dynamic lib<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! See docs in build\/expr\/mod.rs\n\nuse rustc_data_structures::fnv::FnvHashMap;\n\nuse build::{BlockAnd, BlockAndExtension, Builder};\nuse build::expr::category::{Category, RvalueFunc};\nuse hair::*;\nuse rustc::mir::repr::*;\n\nimpl<'a,'tcx> Builder<'a,'tcx> {\n \/\/\/ Compile `expr`, yielding an rvalue.\n pub fn as_rvalue<M>(&mut self, block: BasicBlock, expr: M) -> BlockAnd<Rvalue<'tcx>>\n where M: Mirror<'tcx, Output = Expr<'tcx>>\n {\n let expr = self.hir.mirror(expr);\n self.expr_as_rvalue(block, expr)\n }\n\n fn expr_as_rvalue(&mut self,\n mut block: BasicBlock,\n expr: Expr<'tcx>)\n -> BlockAnd<Rvalue<'tcx>> {\n debug!(\"expr_as_rvalue(block={:?}, expr={:?})\", block, expr);\n\n let this = self;\n let expr_span = expr.span;\n\n match expr.kind {\n ExprKind::Scope { extent, value } => {\n this.in_scope(extent, block, |this| this.as_rvalue(block, value))\n }\n ExprKind::InlineAsm { asm } => {\n block.and(Rvalue::InlineAsm(asm.clone()))\n }\n ExprKind::Repeat { value, count } => {\n let value_operand = unpack!(block = this.as_operand(block, value));\n block.and(Rvalue::Repeat(value_operand, count))\n }\n ExprKind::Borrow { region, borrow_kind, arg } => {\n let arg_lvalue = unpack!(block = this.as_lvalue(block, arg));\n block.and(Rvalue::Ref(region, borrow_kind, arg_lvalue))\n }\n ExprKind::Binary { op, lhs, rhs } => {\n let lhs = unpack!(block = this.as_operand(block, lhs));\n let rhs = unpack!(block = this.as_operand(block, rhs));\n block.and(Rvalue::BinaryOp(op, lhs, rhs))\n }\n ExprKind::Unary { op, arg } => {\n let arg = unpack!(block = this.as_operand(block, arg));\n block.and(Rvalue::UnaryOp(op, arg))\n }\n ExprKind::Box { value, value_extents } => {\n let value = this.hir.mirror(value);\n let result = this.temp(expr.ty);\n \/\/ to start, malloc some memory of suitable type (thus far, uninitialized):\n this.cfg.push_assign(block, expr_span, &result, Rvalue::Box(value.ty));\n this.in_scope(value_extents, block, |this| {\n \/\/ schedule a shallow free of that memory, lest we unwind:\n this.schedule_box_free(expr_span, value_extents, &result, value.ty);\n \/\/ initialize the box contents:\n unpack!(block = this.into(&result.clone().deref(), block, value));\n block.and(Rvalue::Use(Operand::Consume(result)))\n })\n }\n ExprKind::Cast { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty))\n }\n ExprKind::ReifyFnPointer { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::ReifyFnPointer, source, expr.ty))\n }\n ExprKind::UnsafeFnPointer { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::UnsafeFnPointer, source, expr.ty))\n }\n ExprKind::Unsize { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::Unsize, source, expr.ty))\n }\n ExprKind::Vec { fields } => {\n \/\/ (*) We would (maybe) be closer to trans if we\n \/\/ handled this and other aggregate cases via\n \/\/ `into()`, not `as_rvalue` -- in that case, instead\n \/\/ of generating\n \/\/\n \/\/ let tmp1 = ...1;\n \/\/ let tmp2 = ...2;\n \/\/ dest = Rvalue::Aggregate(Foo, [tmp1, tmp2])\n \/\/\n \/\/ we could just generate\n \/\/\n \/\/ dest.f = ...1;\n \/\/ dest.g = ...2;\n \/\/\n \/\/ The problem is that then we would need to:\n \/\/\n \/\/ (a) have a more complex mechanism for handling\n \/\/ partial cleanup;\n \/\/ (b) distinguish the case where the type `Foo` has a\n \/\/ destructor, in which case creating an instance\n \/\/ as a whole \"arms\" the destructor, and you can't\n \/\/ write individual fields; and,\n \/\/ (c) handle the case where the type Foo has no\n \/\/ fields. We don't want `let x: ();` to compile\n \/\/ to the same MIR as `let x = ();`.\n\n \/\/ first process the set of fields\n let fields: Vec<_> =\n fields.into_iter()\n .map(|f| unpack!(block = this.as_operand(block, f)))\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Vec, fields))\n }\n ExprKind::Tuple { fields } => { \/\/ see (*) above\n \/\/ first process the set of fields\n let fields: Vec<_> =\n fields.into_iter()\n .map(|f| unpack!(block = this.as_operand(block, f)))\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Tuple, fields))\n }\n ExprKind::Closure { closure_id, substs, upvars } => { \/\/ see (*) above\n let upvars =\n upvars.into_iter()\n .map(|upvar| unpack!(block = this.as_operand(block, upvar)))\n .collect();\n block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars))\n }\n ExprKind::Adt {\n adt_def, variant_index, substs, fields, base\n } => { \/\/ see (*) above\n \/\/ first process the set of fields that were provided\n \/\/ (evaluating them in order given by user)\n let fields_map: FnvHashMap<_, _> =\n fields.into_iter()\n .map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr))))\n .collect();\n\n let field_names = this.hir.all_fields(adt_def, variant_index);\n\n let fields = if let Some(FruInfo { base, field_types }) = base {\n let base = unpack!(block = this.as_lvalue(block, base));\n\n \/\/ MIR does not natively support FRU, so for each\n \/\/ base-supplied field, generate an operand that\n \/\/ reads it from the base.\n field_names.into_iter()\n .zip(field_types.into_iter())\n .map(|(n, ty)| match fields_map.get(&n) {\n Some(v) => v.clone(),\n None => Operand::Consume(base.clone().field(n, ty))\n })\n .collect()\n } else {\n field_names.iter().map(|n| fields_map[n].clone()).collect()\n };\n\n block.and(Rvalue::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs),\n fields))\n }\n ExprKind::Literal { .. } |\n ExprKind::Block { .. } |\n ExprKind::Match { .. } |\n ExprKind::If { .. } |\n ExprKind::Loop { .. } |\n ExprKind::LogicalOp { .. } |\n ExprKind::Call { .. } |\n ExprKind::Field { .. } |\n ExprKind::Deref { .. } |\n ExprKind::Index { .. } |\n ExprKind::VarRef { .. } |\n ExprKind::SelfRef |\n ExprKind::Assign { .. } |\n ExprKind::AssignOp { .. } |\n ExprKind::Break { .. } |\n ExprKind::Continue { .. } |\n ExprKind::Return { .. } |\n ExprKind::StaticRef { .. } => {\n \/\/ these do not have corresponding `Rvalue` variants,\n \/\/ so make an operand and then return that\n debug_assert!(match Category::of(&expr.kind) {\n Some(Category::Rvalue(RvalueFunc::AsRvalue)) => false,\n _ => true,\n });\n let operand = unpack!(block = this.as_operand(block, expr));\n block.and(Rvalue::Use(operand))\n }\n }\n }\n}\n<commit_msg>mir: Ignore noop casts (e.g. when `as` used for coercion).<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! See docs in build\/expr\/mod.rs\n\nuse rustc_data_structures::fnv::FnvHashMap;\n\nuse build::{BlockAnd, BlockAndExtension, Builder};\nuse build::expr::category::{Category, RvalueFunc};\nuse hair::*;\nuse rustc::mir::repr::*;\n\nimpl<'a,'tcx> Builder<'a,'tcx> {\n \/\/\/ Compile `expr`, yielding an rvalue.\n pub fn as_rvalue<M>(&mut self, block: BasicBlock, expr: M) -> BlockAnd<Rvalue<'tcx>>\n where M: Mirror<'tcx, Output = Expr<'tcx>>\n {\n let expr = self.hir.mirror(expr);\n self.expr_as_rvalue(block, expr)\n }\n\n fn expr_as_rvalue(&mut self,\n mut block: BasicBlock,\n expr: Expr<'tcx>)\n -> BlockAnd<Rvalue<'tcx>> {\n debug!(\"expr_as_rvalue(block={:?}, expr={:?})\", block, expr);\n\n let this = self;\n let expr_span = expr.span;\n\n match expr.kind {\n ExprKind::Scope { extent, value } => {\n this.in_scope(extent, block, |this| this.as_rvalue(block, value))\n }\n ExprKind::InlineAsm { asm } => {\n block.and(Rvalue::InlineAsm(asm.clone()))\n }\n ExprKind::Repeat { value, count } => {\n let value_operand = unpack!(block = this.as_operand(block, value));\n block.and(Rvalue::Repeat(value_operand, count))\n }\n ExprKind::Borrow { region, borrow_kind, arg } => {\n let arg_lvalue = unpack!(block = this.as_lvalue(block, arg));\n block.and(Rvalue::Ref(region, borrow_kind, arg_lvalue))\n }\n ExprKind::Binary { op, lhs, rhs } => {\n let lhs = unpack!(block = this.as_operand(block, lhs));\n let rhs = unpack!(block = this.as_operand(block, rhs));\n block.and(Rvalue::BinaryOp(op, lhs, rhs))\n }\n ExprKind::Unary { op, arg } => {\n let arg = unpack!(block = this.as_operand(block, arg));\n block.and(Rvalue::UnaryOp(op, arg))\n }\n ExprKind::Box { value, value_extents } => {\n let value = this.hir.mirror(value);\n let result = this.temp(expr.ty);\n \/\/ to start, malloc some memory of suitable type (thus far, uninitialized):\n this.cfg.push_assign(block, expr_span, &result, Rvalue::Box(value.ty));\n this.in_scope(value_extents, block, |this| {\n \/\/ schedule a shallow free of that memory, lest we unwind:\n this.schedule_box_free(expr_span, value_extents, &result, value.ty);\n \/\/ initialize the box contents:\n unpack!(block = this.into(&result.clone().deref(), block, value));\n block.and(Rvalue::Use(Operand::Consume(result)))\n })\n }\n ExprKind::Cast { source } => {\n let source = this.hir.mirror(source);\n if source.ty == expr.ty {\n this.expr_as_rvalue(block, source)\n } else {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty))\n }\n }\n ExprKind::ReifyFnPointer { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::ReifyFnPointer, source, expr.ty))\n }\n ExprKind::UnsafeFnPointer { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::UnsafeFnPointer, source, expr.ty))\n }\n ExprKind::Unsize { source } => {\n let source = unpack!(block = this.as_operand(block, source));\n block.and(Rvalue::Cast(CastKind::Unsize, source, expr.ty))\n }\n ExprKind::Vec { fields } => {\n \/\/ (*) We would (maybe) be closer to trans if we\n \/\/ handled this and other aggregate cases via\n \/\/ `into()`, not `as_rvalue` -- in that case, instead\n \/\/ of generating\n \/\/\n \/\/ let tmp1 = ...1;\n \/\/ let tmp2 = ...2;\n \/\/ dest = Rvalue::Aggregate(Foo, [tmp1, tmp2])\n \/\/\n \/\/ we could just generate\n \/\/\n \/\/ dest.f = ...1;\n \/\/ dest.g = ...2;\n \/\/\n \/\/ The problem is that then we would need to:\n \/\/\n \/\/ (a) have a more complex mechanism for handling\n \/\/ partial cleanup;\n \/\/ (b) distinguish the case where the type `Foo` has a\n \/\/ destructor, in which case creating an instance\n \/\/ as a whole \"arms\" the destructor, and you can't\n \/\/ write individual fields; and,\n \/\/ (c) handle the case where the type Foo has no\n \/\/ fields. We don't want `let x: ();` to compile\n \/\/ to the same MIR as `let x = ();`.\n\n \/\/ first process the set of fields\n let fields: Vec<_> =\n fields.into_iter()\n .map(|f| unpack!(block = this.as_operand(block, f)))\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Vec, fields))\n }\n ExprKind::Tuple { fields } => { \/\/ see (*) above\n \/\/ first process the set of fields\n let fields: Vec<_> =\n fields.into_iter()\n .map(|f| unpack!(block = this.as_operand(block, f)))\n .collect();\n\n block.and(Rvalue::Aggregate(AggregateKind::Tuple, fields))\n }\n ExprKind::Closure { closure_id, substs, upvars } => { \/\/ see (*) above\n let upvars =\n upvars.into_iter()\n .map(|upvar| unpack!(block = this.as_operand(block, upvar)))\n .collect();\n block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars))\n }\n ExprKind::Adt {\n adt_def, variant_index, substs, fields, base\n } => { \/\/ see (*) above\n \/\/ first process the set of fields that were provided\n \/\/ (evaluating them in order given by user)\n let fields_map: FnvHashMap<_, _> =\n fields.into_iter()\n .map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr))))\n .collect();\n\n let field_names = this.hir.all_fields(adt_def, variant_index);\n\n let fields = if let Some(FruInfo { base, field_types }) = base {\n let base = unpack!(block = this.as_lvalue(block, base));\n\n \/\/ MIR does not natively support FRU, so for each\n \/\/ base-supplied field, generate an operand that\n \/\/ reads it from the base.\n field_names.into_iter()\n .zip(field_types.into_iter())\n .map(|(n, ty)| match fields_map.get(&n) {\n Some(v) => v.clone(),\n None => Operand::Consume(base.clone().field(n, ty))\n })\n .collect()\n } else {\n field_names.iter().map(|n| fields_map[n].clone()).collect()\n };\n\n block.and(Rvalue::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs),\n fields))\n }\n ExprKind::Literal { .. } |\n ExprKind::Block { .. } |\n ExprKind::Match { .. } |\n ExprKind::If { .. } |\n ExprKind::Loop { .. } |\n ExprKind::LogicalOp { .. } |\n ExprKind::Call { .. } |\n ExprKind::Field { .. } |\n ExprKind::Deref { .. } |\n ExprKind::Index { .. } |\n ExprKind::VarRef { .. } |\n ExprKind::SelfRef |\n ExprKind::Assign { .. } |\n ExprKind::AssignOp { .. } |\n ExprKind::Break { .. } |\n ExprKind::Continue { .. } |\n ExprKind::Return { .. } |\n ExprKind::StaticRef { .. } => {\n \/\/ these do not have corresponding `Rvalue` variants,\n \/\/ so make an operand and then return that\n debug_assert!(match Category::of(&expr.kind) {\n Some(Category::Rvalue(RvalueFunc::AsRvalue)) => false,\n _ => true,\n });\n let operand = unpack!(block = this.as_operand(block, expr));\n block.and(Rvalue::Use(operand))\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-tidy-linelength\n\/\/ We specify -Z incremental here because we want to test the partitioning for\n\/\/ incremental compilation\n\/\/ compile-flags:-Zprint-trans-items=eager -Zincremental=tmp\/partitioning-tests\/extern-generic\n\n#![allow(dead_code)]\n#![crate_type=\"lib\"]\n\n\/\/ aux-build:cgu_generic_function.rs\nextern crate cgu_generic_function;\n\n\/\/~ TRANS_ITEM fn extern_generic::user[0] @@ extern_generic[Internal]\nfn user() {\n let _ = cgu_generic_function::foo(\"abc\");\n}\n\nmod mod1 {\n use cgu_generic_function;\n\n \/\/~ TRANS_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[Internal]\n fn user() {\n let _ = cgu_generic_function::foo(\"abc\");\n }\n\n mod mod1 {\n use cgu_generic_function;\n\n \/\/~ TRANS_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[Internal]\n fn user() {\n let _ = cgu_generic_function::foo(\"abc\");\n }\n }\n}\n\nmod mod2 {\n use cgu_generic_function;\n\n \/\/~ TRANS_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[Internal]\n fn user() {\n let _ = cgu_generic_function::foo(\"abc\");\n }\n}\n\nmod mod3 {\n \/\/~ TRANS_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[Internal]\n fn non_user() {}\n}\n\n\/\/ Make sure the two generic functions from the extern crate get instantiated\n\/\/ once for the current crate\n\/\/~ TRANS_ITEM fn cgu_generic_function::foo[0]<&str> @@ cgu_generic_function.volatile[External]\n\/\/~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ cgu_generic_function.volatile[Internal]\n<commit_msg>Adapt codegen-unit test to shared-generics.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-tidy-linelength\n\/\/ We specify -Z incremental here because we want to test the partitioning for\n\/\/ incremental compilation\n\/\/ compile-flags:-Zprint-trans-items=eager -Zincremental=tmp\/partitioning-tests\/extern-generic -Zshare-generics=y\n\n#![allow(dead_code)]\n#![crate_type=\"lib\"]\n\n\/\/ aux-build:cgu_generic_function.rs\nextern crate cgu_generic_function;\n\n\/\/~ TRANS_ITEM fn extern_generic::user[0] @@ extern_generic[Internal]\nfn user() {\n let _ = cgu_generic_function::foo(\"abc\");\n}\n\nmod mod1 {\n use cgu_generic_function;\n\n \/\/~ TRANS_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[Internal]\n fn user() {\n let _ = cgu_generic_function::foo(\"abc\");\n }\n\n mod mod1 {\n use cgu_generic_function;\n\n \/\/~ TRANS_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[Internal]\n fn user() {\n let _ = cgu_generic_function::foo(\"abc\");\n }\n }\n}\n\nmod mod2 {\n use cgu_generic_function;\n\n \/\/~ TRANS_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[Internal]\n fn user() {\n let _ = cgu_generic_function::foo(\"abc\");\n }\n}\n\nmod mod3 {\n \/\/~ TRANS_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[Internal]\n fn non_user() {}\n}\n\n\/\/ Make sure the two generic functions from the extern crate get instantiated\n\/\/ once for the current crate\n\/\/~ TRANS_ITEM fn cgu_generic_function::foo[0]<&str> @@ cgu_generic_function.volatile[External]\n\/\/~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ cgu_generic_function.volatile[External]\n<|endoftext|>"} {"text":"<commit_before><commit_msg>made Point structure to have one string property<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add const-generics test<commit_after>\/\/ check-pass\n#![allow(incomplete_features)]\n#![feature(const_generics)]\n\nstruct Const<const N: usize>;\ntrait Foo<const N: usize> {}\n\nimpl<const N: usize> Foo<N> for Const<N> {}\n\nfn foo_impl(_: impl Foo<3>) {}\n\nfn foo_explicit<T: Foo<3>>(_: T) {}\n\nfn foo_where<T>(_: T)\nwhere\n T: Foo<3>,\n{\n}\n\nfn main() {\n foo_impl(Const);\n foo_impl(Const::<3>);\n\n foo_explicit(Const);\n foo_explicit(Const::<3>);\n\n foo_where(Const);\n foo_where(Const::<3>);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>initial implementation of cql3 binary protocol<commit_after>extern mod std;\n\nuse core::result;\nuse core::str;\nuse core::vec;\nuse core::io;\nuse core::io::ReaderUtil;\nuse core::io::WriterUtil;\n\nuse std::net_ip;\nuse std::net_tcp;\nuse std::net_tcp::TcpSocketBuf;\nuse std::uv_global_loop;\nuse std::uv_iotask::IoTask;\nuse std::bigint;\n\ntrait CqlSerializable {\n fn len(&self) -> uint;\n fn serialize<T: io::Writer>(&self, buf: &T);\n}\n\ntrait CqlReader {\n fn read_cql_str(&self) -> ~str;\n fn read_cql_long_str(&self) -> ~str;\n fn read_cql_rows(&self) -> cql_rows;\n fn read_cql_message(&self) -> cql_message;\n}\n\nimpl<T: ReaderUtil> CqlReader for T {\n fn read_cql_str(&self) -> ~str {\n let len = self.read_be_u16() as uint;\n str::from_bytes(self.read_bytes(len))\n }\n\n fn read_cql_long_str(&self) -> ~str {\n let len = self.read_be_u32() as uint;\n str::from_bytes(self.read_bytes(len))\n }\n\n fn read_cql_rows(&self) -> cql_rows {\n let flags = self.read_be_u32();\n let column_count = self.read_be_u32();\n let (keyspace, table) = \n if flags == 0x0001 {\n let keyspace_str = self.read_cql_str();\n let table_str = self.read_cql_str();\n (keyspace_str, table_str)\n } else {\n (~\"\", ~\"\")\n };\n\n let mut row_metadata:~[cql_row_metadata] = ~[];\n for u32::range(0, column_count) |_| {\n let (keyspace, table) = \n if flags == 0x0001 {\n (~\"\", ~\"\")\n } else {\n let keyspace_str = self.read_cql_str();\n let table_str = self.read_cql_str();\n (keyspace_str, table_str)\n };\n let col_name = self.read_cql_str();\n let type_key = self.read_be_u16();\n let type_name = \n if type_key >= 0x20 {\n self.read_cql_str()\n } else {\n ~\"\"\n };\n\n row_metadata.push(cql_row_metadata {\n keyspace: keyspace,\n table: table,\n col_name: col_name,\n col_type: cql_column_type(type_key),\n col_type_name: type_name\n });\n }\n\n let rows_count = self.read_be_u32();\n\n let mut rows:~[cql_row] = ~[];\n for u32::range(0, rows_count) |_| {\n let mut row: cql_row = cql_row{ cols: ~[] };\n for row_metadata.each |meta| {\n let col = match meta.col_type {\n COLUMN_ASCII => cql_string(self.read_cql_long_str()),\n COLUMN_VARCHAR => cql_string(self.read_cql_long_str()),\n COLUMN_TEXT => cql_string(self.read_cql_long_str()),\n\n COLUMN_INT => cql_i32(self.read_be_i32()),\n COLUMN_BIGINT => cql_i64(self.read_be_i64()),\n COLUMN_FLOAT => cql_f32(self.read_be_u32() as f32),\n COLUMN_DOUBLE => cql_f64(self.read_be_u64() as f64),\n\n \/*\n COLUMN_CUSTOM => ,\n COLUMN_BLOB => ,\n COLUMN_BOOLEAN => ,\n COLUMN_COUNTER => ,\n COLUMN_DECIMAL => ,\n COLUMN_TIMESTAMP => ,\n COLUMN_UUID => ,\n COLUMN_VARINT => ,\n COLUMN_TIMEUUID => ,\n COLUMN_INET => ,\n COLUMN_LIST => ,\n COLUMN_MAP => ,\n COLUMN_SET => ,\n *\/\n _ => cql_i32(0),\n };\n\n row.cols.push(col);\n }\n rows.push(row);\n }\n\n cql_rows {\n flags: flags,\n column_count: column_count,\n keyspace: keyspace,\n table: table,\n row_metadata: row_metadata,\n rows_count: rows_count,\n rows: rows,\n }\n }\n\n fn read_cql_message(&self) -> cql_message {\n let version = self.read_u8();\n let flags = self.read_u8();\n let stream = self.read_i8();\n let opcode:cql_opcode = cql_opcode(self.read_u8());\n self.read_be_u32();\n\n let payload = match opcode {\n OPCODE_READY => empty,\n OPCODE_ERROR => {\n let code = self.read_be_u32();\n let msg = self.read_cql_str();\n error(code, msg)\n },\n OPCODE_RESULT => {\n let code = self.read_be_u32();\n match code {\n 0x0001 => {\n result_void\n },\n 0x0002 => {\n result_rows(self.read_cql_rows())\n },\n 0x0003 => {\n let msg = self.read_cql_str();\n result_keyspace(msg)\n },\n _ => empty\n }\n }\n _ => empty,\n };\n\n\n return cql_message {\n version: version,\n flags: flags,\n stream: stream,\n opcode: opcode,\n payload: payload,\n };\n }\n}\n\nstruct pair {\n key: ~str,\n value: ~str,\n}\n\nimpl CqlSerializable for pair {\n fn serialize<T: io::Writer>(&self, buf: &T) {\n buf.write_be_u16(self.key.len() as u16);\n buf.write(str::to_bytes(self.key));\n buf.write_be_u16(self.value.len() as u16);\n buf.write(str::to_bytes(self.value));\n }\n \n fn len(&self) -> uint {\n return 4 + self.key.len() + self.value.len();\n }\n}\n\nstruct string_map {\n pairs: ~[pair],\n}\n\nimpl CqlSerializable for string_map {\n fn serialize<T: io::Writer>(&self, buf: &T) {\n buf.write_be_u16(self.pairs.len() as u16);\n for self.pairs.each |pair| {\n pair.serialize(buf);\n }\n }\n \n fn len(&self) -> uint {\n let mut len = 2u;\n for self.pairs.each |pair| {\n len += pair.len();\n }\n len\n }\n}\n\nenum cql_consistency {\n CONSISTENCY_ANY = 0x0000,\n CONSISTENCY_ONE = 0x0001,\n CONSISTENCY_TWO = 0x0002,\n CONSISTENCY_THREE = 0x0003,\n CONSISTENCY_QUORUM = 0x0004,\n CONSISTENCY_ALL = 0x0005,\n CONSISTENCY_LOCAL_QUORUM = 0x0006,\n CONSISTENCY_EACH_QUORUM = 0x0007,\n CONSISTENCY_UNKNOWN,\n}\n\nfn cql_consistency(val: u16) -> cql_consistency {\n match val {\n 0 => CONSISTENCY_ANY,\n 1 => CONSISTENCY_ONE,\n 2 => CONSISTENCY_TWO,\n 3 => CONSISTENCY_THREE,\n 4 => CONSISTENCY_QUORUM,\n 5 => CONSISTENCY_ALL,\n 6 => CONSISTENCY_LOCAL_QUORUM,\n 7 => CONSISTENCY_EACH_QUORUM,\n _ => CONSISTENCY_UNKNOWN\n }\n}\n\nenum cql_column_type {\n COLUMN_CUSTOM = 0x0000,\n COLUMN_ASCII = 0x0001,\n COLUMN_BIGINT = 0x0002,\n COLUMN_BLOB = 0x0003,\n COLUMN_BOOLEAN = 0x0004,\n COLUMN_COUNTER = 0x0005,\n COLUMN_DECIMAL = 0x0006,\n COLUMN_DOUBLE = 0x0007,\n COLUMN_FLOAT = 0x0008,\n COLUMN_INT = 0x0009,\n COLUMN_TEXT = 0x000A,\n COLUMN_TIMESTAMP = 0x000B,\n COLUMN_UUID = 0x000C,\n COLUMN_VARCHAR = 0x000D,\n COLUMN_VARINT = 0x000E,\n COLUMN_TIMEUUID = 0x000F,\n COLUMN_INET = 0x0010,\n COLUMN_LIST = 0x0020,\n COLUMN_MAP = 0x0021,\n COLUMN_SET = 0x0022,\n COLUMN_UNKNOWN,\n}\n\nfn cql_column_type(val: u16) -> cql_column_type {\n match val {\n 0x0000 => COLUMN_CUSTOM,\n 0x0001 => COLUMN_ASCII,\n 0x0002 => COLUMN_BIGINT,\n 0x0003 => COLUMN_BLOB,\n 0x0004 => COLUMN_BOOLEAN,\n 0x0005 => COLUMN_COUNTER,\n 0x0006 => COLUMN_DECIMAL,\n 0x0007 => COLUMN_DOUBLE,\n 0x0008 => COLUMN_FLOAT,\n 0x0009 => COLUMN_INT,\n 0x000A => COLUMN_TEXT,\n 0x000B => COLUMN_TIMESTAMP,\n 0x000C => COLUMN_UUID,\n 0x000D => COLUMN_VARCHAR,\n 0x000E => COLUMN_VARINT,\n 0x000F => COLUMN_TIMEUUID,\n 0x0010 => COLUMN_INET,\n 0x0020 => COLUMN_LIST,\n 0x0021 => COLUMN_MAP,\n 0x0022 => COLUMN_SET,\n _ => COLUMN_UNKNOWN\n }\n}\n\nstruct cql_row_metadata {\n keyspace: ~str,\n table: ~str,\n col_name: ~str,\n col_type: cql_column_type,\n col_type_name: ~str,\n}\n\nenum cql_col {\n cql_string(~str),\n\n cql_i32(i32),\n cql_i64(i64),\n\n cql_blob(~[u8]),\n cql_bool(bool),\n\n cql_counter(u64),\n\n cql_f32(f32),\n cql_f64(f64),\n\n cql_timestamp(u64),\n cql_bigint(bigint::BigInt),\n}\n\nstruct cql_row {\n cols: ~[cql_col],\n}\n\nstruct cql_rows {\n flags: u32,\n column_count: u32,\n keyspace: ~str,\n table: ~str,\n row_metadata: ~[cql_row_metadata],\n rows_count: u32,\n rows: ~[cql_row],\n}\n\nenum cql_payload {\n startup(string_map),\n query(~str, cql_consistency),\n error(u32, ~str),\n\n result_void(),\n result_rows(cql_rows),\n result_keyspace(~str),\n result_schema_change(),\n\n empty(),\n}\n\n\nconst CQL_VERSION:u8 = 0x01;\n\nenum cql_opcode {\n OPCODE_ERROR = 0x00,\n OPCODE_STARTUP = 0x01,\n OPCODE_READY = 0x02,\n OPCODE_AUTHENTICATE = 0x03,\n OPCODE_CREDENTIALS = 0x04,\n OPCODE_OPTIONS = 0x05,\n OPCODE_SUPPORTED = 0x06,\n OPCODE_QUERY = 0x07,\n OPCODE_RESULT = 0x08,\n OPCODE_PREPARE = 0x09,\n OPCODE_EXECUTE = 0x0A,\n OPCODE_REGISTER = 0x0B,\n OPCODE_EVENT = 0x0C,\n OPCODE_UNKNOWN\n}\n\nfn cql_opcode(val: u8) -> cql_opcode {\n match val {\n 0x00 => OPCODE_ERROR,\n 0x01 => OPCODE_STARTUP,\n 0x02 => OPCODE_READY,\n 0x03 => OPCODE_AUTHENTICATE,\n 0x04 => OPCODE_CREDENTIALS,\n 0x05 => OPCODE_OPTIONS, \n 0x06 => OPCODE_SUPPORTED,\n 0x07 => OPCODE_QUERY,\n 0x08 => OPCODE_RESULT,\n 0x09 => OPCODE_PREPARE,\n 0x0A => OPCODE_EXECUTE,\n 0x0B => OPCODE_REGISTER,\n 0x0C => OPCODE_EVENT,\n _ => OPCODE_UNKNOWN\n }\n}\n\nstruct cql_message {\n version: u8,\n flags: u8,\n stream: i8,\n opcode: cql_opcode,\n payload: cql_payload,\n}\n\nimpl CqlSerializable for cql_message {\n fn serialize<T: io::Writer>(&self, buf: &T) {\n buf.write_u8(self.version);\n buf.write_u8(self.flags);\n buf.write_i8(self.stream);\n buf.write_u8(self.opcode as u8);\n buf.write_be_u32((self.len()-8) as u32);\n\n match copy self.payload {\n startup(map) => {\n map.serialize(buf)\n },\n query(query_str, consistency) => {\n buf.write_be_u32(query_str.len() as u32);\n buf.write(str::to_bytes(query_str));\n buf.write_be_u16(consistency as u16);\n },\n _ => (),\n }\n }\n fn len(&self) -> uint {\n 8 + match copy self.payload {\n startup(map) => {\n map.len()\n },\n query(query_str, _) => {\n 4 + query_str.len() + 2\n },\n _ => {\n 0\n }\n }\n }\n}\n\n\nfn Options() -> cql_message {\n return cql_message {\n version: CQL_VERSION,\n flags: 0x00,\n stream: 0x01,\n opcode: OPCODE_OPTIONS,\n payload: empty,\n };\n}\n\nfn Startup() -> cql_message {\n let payload = string_map {\n pairs:~[pair{key: ~\"CQL_VERSION\", value: ~\"3.0.0\"}],\n };\n return cql_message {\n version: CQL_VERSION,\n flags: 0x00,\n stream: 0x01,\n opcode: OPCODE_STARTUP,\n payload: startup(payload),\n };\n}\n\nfn Query(query_str: ~str, con: cql_consistency) -> cql_message {\n return cql_message {\n version: CQL_VERSION,\n flags: 0x00,\n stream: 0x01,\n opcode: OPCODE_QUERY,\n payload: query(query_str, con),\n };\n}\n\nstruct cql_client {\n socket: @net_tcp::TcpSocketBuf,\n}\n\nimpl cql_client {\n fn query(&self, query_str: ~str) -> cql_message {\n Query(query_str, CONSISTENCY_ONE).serialize::<net_tcp::TcpSocketBuf>(self.socket);\n\n (*self.socket).read_cql_message()\n }\n}\n\nstruct cql_err {\n err_name: ~str,\n err_msg: ~str,\n}\n\nfn cql_err(name: ~str, msg: ~str) -> cql_err {\n return cql_err{err_name: name, err_msg: msg};\n}\n\nfn get_sock(ip: ~str, port: uint) -> net_tcp::TcpSocket {\n let task = @uv_global_loop::get();\n let addr = net_ip::v4::parse_addr(ip);\n\n let res = net_tcp::connect(addr, port, task);\n if(res.is_err()) {\n io::println(\"Connection failed\");\n }\n\n res.unwrap()\n}\n\nfn cql_client_create(ip: ~str, port: uint) -> result::Result<cql_client, cql_err> {\n let socket = get_sock(ip, port);\n let buf = @net_tcp::socket_buf(socket);\n\n let msg_startup = Startup();\n msg_startup.serialize::<net_tcp::TcpSocketBuf>(buf);\n\n let response = (*buf).read_cql_message();\n match response.opcode {\n OPCODE_READY => result::Ok(cql_client { socket: buf }),\n _ => result::Err(cql_err(fmt!(\"Invalid opcode: %?\", response.opcode), ~\"\"))\n }\n}\n\nfn main() {\n let res = ~cql_client_create(~\"127.0.0.1\", 9042);\n if res.is_err() {\n return;\n }\n\n let client = res.get_ref();\n \/\/client.query(~\"use test\");\n let response = client.query(~\"select id, email from test.test\");\n io::println(fmt!(\"%?\", response));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Start writing disk device.<commit_after>use super::ConcurrentDevice;\n\nuse z80e_core_rust::{ Z80IODevice };\n\nuse std::sync::{ Arc, Condvar, Mutex };\nuse std::sync::atomic::{ AtomicBool, AtomicUsize, Ordering };\nuse std::io::{ self, Read, Write, Seek, SeekFrom, ErrorKind };\nuse std::fs::{ OpenOptions, File };\nuse std::path::Path;\nuse std::thread;\nuse std::str;\n\n\/\/ Status port bitflags.\n\/\/ 8-bit values, but \"must\" be usize to avoid overly-verbose casts for AtomicUsize calls.\nconst COMMAND_READY: usize = 1 << 4;\nconst DATA_READY: usize = 1 << 5;\nconst RESERVED: usize = 1 << 6;\nconst ERROR: usize = 1 << 7;\nconst DISK_MASK: usize = 0x0F;\n\n\/\/ Commands.\nconst NOP: u8 = 0;\nconst SEL_DSK: u8 = 1;\nconst SEL_TRK: u8 = 2;\nconst SEL_SEC: u8 = 3;\nconst READ: u8 = 4;\nconst WRITE: u8 = 5;\n\npub struct Disk {\n file: File,\n spt: u16,\n tracks: u16,\n}\n\nimpl Disk {\n pub fn open<T: AsRef<Path>>(path: &T) -> io::Result<Disk> {\n let mut file = try!(OpenOptions::new().read(true).write(true).open(path));\n {\n let mut magic: [u8; 10] = [0; 10];\n let mut read = 0;\n while read < magic.len() {\n read += try!(file.read(&mut magic[read..]));\n }\n if match str::from_utf8(&magic) {\n Ok(x) => x,\n Err(err) => return Err(io::Error::new(ErrorKind::InvalidData, \"Not a valid disk image.\")),\n } != \"<CPM_Disk>\" {\n return Err(io::Error::new(ErrorKind::InvalidData, \"Not a valid disk image.\"));\n }\n }\n try!(file.seek(SeekFrom::Start(32)));\n let mut spt: [u8; 2] = [0; 2];\n let mut read = 0;\n while read < spt.len() {\n read += try!(file.read(&mut spt[read..]));\n }\n let spt: u16 = (spt[0] as u16) | ((spt[1] as u16) << 8);\n try!(file.seek(SeekFrom::Current(1)));\n let mut blm: [u8; 1] = [0; 1];\n while try!(file.read(&mut blm)) == 0 {};\n let blm = blm[0];\n let bls = blm + 1;\n try!(file.seek(SeekFrom::Current(1)));\n let mut dsm: [u8; 2] = [0; 2];\n let mut read = 0;\n while read < dsm.len() {\n read += try!(file.read(&mut dsm[read..]));\n }\n let dsm: u16 = (dsm[0] as u16) | ((dsm[1] as u16) << 8);\n Ok(Disk {\n file: file,\n spt: spt,\n tracks: ((bls as u16 * (dsm + 1)) \/ spt),\n })\n }\n}\n\n#[derive(Clone)]\npub struct DiskController {\n pub status: Arc<AtomicUsize>,\n do_command: Arc<Condvar>,\n buffer: Arc<Mutex<Buffer>>,\n parameters: Arc<Mutex<Parameters>>,\n}\n\nimpl DiskController {\n fn new() -> DiskController {\n DiskController {\n status: Arc::new(AtomicUsize::new(0)),\n do_command: Arc::new(Condvar::new()),\n buffer: Arc::new(Mutex::new(Buffer::new())),\n parameters: Arc::new(Mutex::new(Parameters::new())),\n }\n }\n}\n\nimpl ConcurrentDevice for DiskController {\n fn run(&mut self, die: Arc<AtomicBool>) {\n \n }\n}\n\nstruct Buffer {\n bytes: [u8; 0x80],\n i: u8,\n}\n\nimpl Buffer {\n fn new() -> Buffer {\n Buffer {\n bytes: [0; 0x80],\n i: 0,\n }\n }\n}\n\nstruct Parameters {\n disk: u8,\n track: u16,\n sector: u8,\n command: u8,\n}\n\nimpl Parameters {\n fn new() -> Parameters {\n Parameters {\n disk: 0,\n track: 0,\n sector: 0,\n command: NOP,\n }\n }\n}\n\nstruct StatusPort {\n controller: DiskController,\n}\n\nimpl StatusPort {\n fn new(controller: DiskController) -> StatusPort {\n StatusPort {\n controller: controller,\n }\n }\n}\n\nimpl Z80IODevice for StatusPort {\n fn read_in(&self) -> u8 {\n (self.controller.status.load(Ordering::SeqCst) & 0xff) as u8\n }\n fn write_out(&mut self, value: u8) {\n self.controller.status.fetch_and(!(COMMAND_READY | DATA_READY), Ordering::SeqCst);\n let mut params = self.controller.parameters.lock().unwrap();\n params.command = value;\n self.controller.do_command.notify_one();\n }\n}\n \nstruct DataPort {\n controller: DiskController,\n}\n\nimpl DataPort {\n fn new(controller: DiskController) -> DataPort {\n DataPort {\n controller: controller,\n }\n }\n}\n\nimpl Z80IODevice for DataPort {\n fn read_in (&self) -> u8 {\n if (self.controller.status.load(Ordering::SeqCst) & DATA_READY) != 0 {\n let mut buffer = self.controller.buffer.lock().unwrap();\n let i = (buffer.i & !0x80) as usize;\n buffer.i += 1;\n buffer.bytes[i]\n } else {\n let _ = writeln!(io::stderr(), \"disk: Attempted to read data register when not ready.\");\n 0\n }\n }\n fn write_out(&mut self, value: u8) {\n if (self.controller.status.load(Ordering::SeqCst) & DATA_READY) != 0 {\n let mut buffer = self.controller.buffer.lock().unwrap();\n let i = (buffer.i & !0x80) as usize;\n buffer.i += 1;\n buffer.bytes[i] = value;\n } else {\n let _ = writeln!(io::stderr(), \"disk: Attempted to write data register when not ready.\");\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add help file to deal with help related material.<commit_after>use getopts::{optflag,usage,OptGroup};\nuse std::io::{println,print};\n\npub fn opts() -> [OptGroup, .. 2]{\n return [\n optflag(\"v\", \"version\", \"display the version\"),\n optflag(\"h\", \"help\", \"print this help menu\")\n ]\n}\n\npub fn print_usage(_program: &str, opts: &[OptGroup]) {\n let space = \" \";\n println(\"Usage: muxed [options]\");\n println(\"\");\n println(\"Commands:\");\n println(format!(\"{}new create a new project file\", space).as_slice());\n print(format!(\"{}open open a new project file\", space).as_slice());\n println(usage(\"\", opts).as_slice());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Had some sucess with clap<commit_after>extern crate clap;\nextern crate rusqlite;\nuse clap::{App, Arg};\n\nmod loader;\n\nfn main() {\n\n \/\/ Once all App settings (including all arguments) have been set, you call get_matches() which\n \/\/ parses the string provided by the user, and returns all the valid matches to the ones you\n \/\/ specified.\n \/\/\n \/\/ You can then query the matches struct to get information about how the user ran the program\n \/\/ at startup.\n \/\/\n \/\/ For this example, let's assume you created an App which accepts three arguments (plus two\n \/\/ generated by clap), a flag to display debugging information triggered with \"-d\" or\n \/\/ \"--debug\" as well as an option argument which specifies a custom configuration file to use\n \/\/ triggered with \"-c file\" or \"--config file\" or \"--config=file\" and finally a positional\n \/\/ argument which is the input file we want to work with, this will be the only required\n \/\/ argument.\n let matches = App::new(\"whenenv\")\n .about(\"Parses an input file to do awesome things\")\n .version(\"1.0\")\n .author(\"Kevin K. <kbknapp@gmail.com>\")\n .arg(Arg::with_name(\"verbose\")\n .help(\"Increase log output.\")\n .short(\"v\")\n .multiple(true) \n .long(\"verbose\"))\n .arg(Arg::with_name(\"quiet\")\n .help(\"Decrease log output.\")\n .short(\"q\")\n .multiple(true) \n .long(\"quiet\"))\n .arg(Arg::with_name(\"env\")\n .short(\"e\")\n .long(\"env\")\n .value_name(\"ENVIROMENT_VARIABLE\")\n .help(\"Which enviroment variables to process\")\n .multiple(true)\n .takes_value(true))\n .arg(Arg::with_name(\"dir-jobs\")\n .long(\"dir-jobs\")\n .value_name(\"DIR_JOB\")\n .help(\"directory storing json jobs.\")\n .multiple(true)\n .takes_value(true))\n .arg(Arg::with_name(\"dir-scripts\")\n .long(\"dir-scripts\")\n .value_name(\"DIR_SCRIPT\")\n .help(\"directory storing jobs scripts.\")\n .multiple(true)\n .takes_value(true))\n .arg(Arg::with_name(\"config\")\n .short(\"c\")\n .long(\"config\")\n .value_name(\"FILE\")\n .help(\"Sets a custom config file\")\n .takes_value(true))\n .get_matches();\n\n \/\/ We can find out whether or not debugging was turned on\n if matches.is_present(\"debug\") {\n println!(\"Debugging is turned on\");\n }\n\n \/\/ If we wanted to some custom initialization based off some configuration file provided\n \/\/ by the user, we could get the file (A string of the file)\n if let Some(ref file) = matches.value_of(\"config\") {\n println!(\"Using config file: {}\", file);\n }\n if let Some(ref env) = matches.value_of(\"env\") {\n println!(\"Using config file: {}\", env);\n }\n \n\n \n \n if let Some(in_v) = matches.values_of(\"dir-scripts\") {\n for in_file in in_v {\n println!(\"An input dir-scripts: {}\", in_file);\n loader::listy(&in_file);\n }\n }\n \/\/ Continued program logic goes here...\n loader::deligate(matches)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Added license support.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add display for metadata format<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for second half of issue #15689<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[deriving(Clone)]\nenum Test<'a> {\n Slice(&'a int)\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add basic file system tests<commit_after>#[macro_use]\nextern crate lazy_static;\n\nextern crate rex;\n\nmod util;\n\nuse std::path::Path;\n\nuse rex::frontend::{Event, KeyPress};\n\nuse util::mock_filesystem::MockFileSystem;\n\n\/\/ Little helper function till Iterator.eq stabalizes\nfn iter_eq<I, J>(one: I, other: J) -> bool where\n I: IntoIterator,\n J: IntoIterator,\n I::Item: PartialEq<J::Item>,\n{\n let mut one = one.into_iter();\n let mut other = other.into_iter();\n\n loop {\n match (one.next(), other.next()) {\n (None, None) => return true,\n (None, _) | (_, None) => return false,\n (Some(x), Some(y)) => if x != y { return false },\n }\n }\n}\n\n#[test]\nfn test_basic_open() {\n \/\/ Create a vec with a marker in the end\n let mut v = vec![0; 1000];\n let len = v.len();\n v[len-1] = 0xAA;\n MockFileSystem::put(\"test_basic_open\", v);\n\n let (mut edit, mut frontend) = util::simple_init_empty();\n let pedit = &mut edit;\n\n \/\/ Open file with the marker\n frontend.run_keys(pedit, vec![KeyPress::Shortcut('o')]);\n frontend.run_str(pedit, \"test_basic_open\");\n frontend.run_keys(pedit, vec![KeyPress::Enter]);\n\n \/\/ Find the marker\n frontend.run_keys(pedit, vec![KeyPress::Shortcut('f')]);\n frontend.run_keys(pedit, vec![KeyPress::Shortcut('h')]);\n frontend.run_str(pedit, \"AA\");\n frontend.run_keys(pedit, vec![KeyPress::Enter]);\n\n \/\/ And make sure it is in the right place\n assert_eq!(pedit.get_position(), (len-1) as isize);\n\n \/\/ Make sure the opened file name is correct\n let name = Path::new(\"test_basic_open\");\n assert_eq!(name, pedit.get_file_path().unwrap());\n}\n\n#[test]\nfn test_basic_save() {\n \/\/ Create a view over a generic vector\n let v = util::generate_vec(1000);\n let (mut edit, mut frontend) = util::simple_init_with_vec(v.clone());\n let pedit = &mut edit;\n\n \/\/ Save it to a file\n frontend.run_keys(pedit, vec![KeyPress::Shortcut('s')]);\n frontend.run_str(pedit, \"test_basic_save\");\n frontend.run_keys(pedit, vec![KeyPress::Enter]);\n\n \/\/ Make sure they are equal\n assert!(iter_eq(v.iter(), MockFileSystem::get_inner(\"test_basic_save\").iter()));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse super::{LinkArgs, Target, TargetOptions};\nuse super::emscripten_base::{cmd};\n\npub fn target() -> Result<Target, String> {\n let mut post_link_args = LinkArgs::new();\n post_link_args.insert(LinkerFlavor::Em,\n vec![\"-s\".to_string(),\n \"WASM=1\".to_string(),\n \"-s\".to_string(),\n \"ERROR_ON_UNDEFINED_SYMBOLS=1\".to_string()]);\n\n let opts = TargetOptions {\n linker: cmd(\"emcc\"),\n ar: cmd(\"emar\"),\n\n dynamic_linking: false,\n executables: true,\n \/\/ Today emcc emits two files - a .js file to bootstrap and\n \/\/ possibly interpret the wasm, and a .wasm file\n exe_suffix: \".js\".to_string(),\n linker_is_gnu: true,\n link_env: vec![(\"EMCC_WASM_BACKEND\".to_string(), \"1\".to_string())],\n allow_asm: false,\n obj_is_bitcode: true,\n is_like_emscripten: true,\n max_atomic_width: Some(32),\n post_link_args: post_link_args,\n target_family: Some(\"unix\".to_string()),\n .. Default::default()\n };\n Ok(Target {\n llvm_target: \"wasm32-unknown-unknown\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n target_os: \"emscripten\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n data_layout: \"e-m:e-p:32:32-i64:64-n32:64-S128\".to_string(),\n arch: \"wasm32\".to_string(),\n linker_flavor: LinkerFlavor::Em,\n options: opts,\n })\n}\n<commit_msg>Pass debugging arguments to emcc<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse super::{LinkArgs, Target, TargetOptions};\nuse super::emscripten_base::{cmd};\n\npub fn target() -> Result<Target, String> {\n let mut post_link_args = LinkArgs::new();\n post_link_args.insert(LinkerFlavor::Em,\n vec![\"-s\".to_string(),\n \"WASM=1\".to_string(),\n \"-s\".to_string(),\n \"ASSERTIONS=1\".to_string(),\n \"-s\".to_string(),\n \"ERROR_ON_UNDEFINED_SYMBOLS=1\".to_string(),\n \"-g3\".to_string()]);\n\n let opts = TargetOptions {\n linker: cmd(\"emcc\"),\n ar: cmd(\"emar\"),\n\n dynamic_linking: false,\n executables: true,\n \/\/ Today emcc emits two files - a .js file to bootstrap and\n \/\/ possibly interpret the wasm, and a .wasm file\n exe_suffix: \".js\".to_string(),\n linker_is_gnu: true,\n link_env: vec![(\"EMCC_WASM_BACKEND\".to_string(), \"1\".to_string())],\n allow_asm: false,\n obj_is_bitcode: true,\n is_like_emscripten: true,\n max_atomic_width: Some(32),\n post_link_args: post_link_args,\n target_family: Some(\"unix\".to_string()),\n .. Default::default()\n };\n Ok(Target {\n llvm_target: \"wasm32-unknown-unknown\".to_string(),\n target_endian: \"little\".to_string(),\n target_pointer_width: \"32\".to_string(),\n target_os: \"emscripten\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"unknown\".to_string(),\n data_layout: \"e-m:e-p:32:32-i64:64-n32:64-S128\".to_string(),\n arch: \"wasm32\".to_string(),\n linker_flavor: LinkerFlavor::Em,\n options: opts,\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Wyvern: A final tranche of rustdoc comment updates for renderervk.rs<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Exercise the unused_mut attribute in some positive and negative cases\n\n#![allow(unused_assignments)]\n#![allow(unused_variables)]\n#![allow(dead_code)]\n#![deny(unused_mut)]\n\n\nfn main() {\n \/\/ negative cases\n let mut a = 3; \/\/~ ERROR: variable does not need to be mutable\n let mut a = 2; \/\/~ ERROR: variable does not need to be mutable\n let mut b = 3; \/\/~ ERROR: variable does not need to be mutable\n let mut a = vec![3]; \/\/~ ERROR: variable does not need to be mutable\n let (mut a, b) = (1, 2); \/\/~ ERROR: variable does not need to be mutable\n let mut a; \/\/~ ERROR: variable does not need to be mutable\n a = 3;\n\n let mut b; \/\/~ ERROR: variable does not need to be mutable\n if true {\n b = 3;\n } else {\n b = 4;\n }\n\n match 30 {\n mut x => {} \/\/~ ERROR: variable does not need to be mutable\n }\n match (30, 2) {\n (mut x, 1) | \/\/~ ERROR: variable does not need to be mutable\n (mut x, 2) |\n (mut x, 3) => {\n }\n _ => {}\n }\n\n let x = |mut y: isize| 10; \/\/~ ERROR: variable does not need to be mutable\n fn what(mut foo: isize) {} \/\/~ ERROR: variable does not need to be mutable\n\n \/\/ positive cases\n let mut a = 2;\n a = 3;\n let mut a = Vec::new();\n a.push(3);\n let mut a = Vec::new();\n callback(|| {\n a.push(3);\n });\n let (mut a, b) = (1, 2);\n a = 34;\n\n match 30 {\n mut x => {\n x = 21;\n }\n }\n\n match (30, 2) {\n (mut x, 1) |\n (mut x, 2) |\n (mut x, 3) => {\n x = 21\n }\n _ => {}\n }\n\n let x = |mut y: isize| y = 32;\n fn nothing(mut foo: isize) { foo = 37; }\n\n \/\/ leading underscore should avoid the warning, just like the\n \/\/ unused variable lint.\n let mut _allowed = 1;\n}\n\nfn callback<F>(f: F) where F: FnOnce() {}\n\n\/\/ make sure the lint attribute can be turned off\n#[allow(unused_mut)]\nfn foo(mut a: isize) {\n let mut a = 3;\n let mut b = vec![2];\n}\n<commit_msg>Added tests for bugs fixed.<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Exercise the unused_mut attribute in some positive and negative cases\n\n#![allow(unused_assignments)]\n#![allow(unused_variables)]\n#![allow(dead_code)]\n#![deny(unused_mut)]\n\n\nfn main() {\n \/\/ negative cases\n let mut a = 3; \/\/~ ERROR: variable does not need to be mutable\n let mut a = 2; \/\/~ ERROR: variable does not need to be mutable\n let mut b = 3; \/\/~ ERROR: variable does not need to be mutable\n let mut a = vec![3]; \/\/~ ERROR: variable does not need to be mutable\n let (mut a, b) = (1, 2); \/\/~ ERROR: variable does not need to be mutable\n let mut a; \/\/~ ERROR: variable does not need to be mutable\n a = 3;\n\n let mut b; \/\/~ ERROR: variable does not need to be mutable\n if true {\n b = 3;\n } else {\n b = 4;\n }\n\n match 30 {\n mut x => {} \/\/~ ERROR: variable does not need to be mutable\n }\n match (30, 2) {\n (mut x, 1) | \/\/~ ERROR: variable does not need to be mutable\n (mut x, 2) |\n (mut x, 3) => {\n }\n _ => {}\n }\n\n let x = |mut y: isize| 10; \/\/~ ERROR: variable does not need to be mutable\n fn what(mut foo: isize) {} \/\/~ ERROR: variable does not need to be mutable\n\n let mut a = &mut 5; \/\/~ ERROR: variable does not need to be mutable\n *a = 4;\n\n let mut a = 5;\n let mut b = (&mut a,);\n *b.0 = 4; \/\/~^ ERROR: variable does not need to be mutable\n\n fn mut_ref_arg(mut arg : &mut [u8]) -> &mut [u8] {\n &mut arg[..] \/\/~^ ERROR: variable does not need to be mutable\n }\n\n let mut v : &mut Vec<()> = &mut vec![]; \/\/~ ERROR: variable does not need to be mutable\n v.push(());\n\n \/\/ positive cases\n let mut a = 2;\n a = 3;\n let mut a = Vec::new();\n a.push(3);\n let mut a = Vec::new();\n callback(|| {\n a.push(3);\n });\n let (mut a, b) = (1, 2);\n a = 34;\n\n match 30 {\n mut x => {\n x = 21;\n }\n }\n\n match (30, 2) {\n (mut x, 1) |\n (mut x, 2) |\n (mut x, 3) => {\n x = 21\n }\n _ => {}\n }\n\n let x = |mut y: isize| y = 32;\n fn nothing(mut foo: isize) { foo = 37; }\n\n \/\/ leading underscore should avoid the warning, just like the\n \/\/ unused variable lint.\n let mut _allowed = 1;\n}\n\nfn callback<F>(f: F) where F: FnOnce() {}\n\n\/\/ make sure the lint attribute can be turned off\n#[allow(unused_mut)]\nfn foo(mut a: isize) {\n let mut a = 3;\n let mut b = vec![2];\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::attr::{Attr, AttrHelpers, AttrValue};\nuse dom::bindings::codegen::Bindings::HTMLTableElementBinding::HTMLTableElementMethods;\nuse dom::bindings::codegen::Bindings::HTMLTableElementBinding;\nuse dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;\nuse dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLTableCaptionElementCast};\nuse dom::bindings::codegen::InheritTypes::{HTMLTableElementDerived, NodeCast};\nuse dom::bindings::js::{JSRef, Rootable, Temporary};\nuse dom::document::Document;\nuse dom::eventtarget::{EventTarget, EventTargetTypeId};\nuse dom::element::ElementTypeId;\nuse dom::htmlelement::{HTMLElement, HTMLElementTypeId};\nuse dom::htmltablecaptionelement::HTMLTableCaptionElement;\nuse dom::node::{Node, NodeHelpers, NodeTypeId};\nuse dom::virtualmethods::VirtualMethods;\n\nuse util::str::{self, DOMString, LengthOrPercentageOrAuto};\n\nuse cssparser::RGBA;\nuse string_cache::Atom;\n\nuse std::cell::Cell;\n\n#[dom_struct]\npub struct HTMLTableElement {\n htmlelement: HTMLElement,\n background_color: Cell<Option<RGBA>>,\n border: Cell<Option<u32>>,\n cellspacing: Cell<Option<u32>>,\n width: Cell<LengthOrPercentageOrAuto>,\n}\n\nimpl HTMLTableElementDerived for EventTarget {\n fn is_htmltableelement(&self) -> bool {\n *self.type_id() ==\n EventTargetTypeId::Node(\n NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTableElement)))\n }\n}\n\nimpl HTMLTableElement {\n fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)\n -> HTMLTableElement {\n HTMLTableElement {\n htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLTableElement,\n localName,\n prefix,\n document),\n background_color: Cell::new(None),\n border: Cell::new(None),\n cellspacing: Cell::new(None),\n width: Cell::new(LengthOrPercentageOrAuto::Auto),\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)\n -> Temporary<HTMLTableElement> {\n let element = HTMLTableElement::new_inherited(localName, prefix, document);\n Node::reflect_node(box element, document, HTMLTableElementBinding::Wrap)\n }\n}\n\nimpl<'a> HTMLTableElementMethods for JSRef<'a, HTMLTableElement> {\n \/\/ https:\/\/www.whatwg.org\/html\/#dom-table-caption\n fn GetCaption(self) -> Option<Temporary<HTMLTableCaptionElement>> {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n node.children()\n .map(|c| c.root())\n .filter_map(|c| {\n HTMLTableCaptionElementCast::to_ref(c.r()).map(Temporary::from_rooted)\n })\n .next()\n }\n\n \/\/ https:\/\/www.whatwg.org\/html\/#dom-table-caption\n fn SetCaption(self, new_caption: Option<JSRef<HTMLTableCaptionElement>>) {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n let old_caption = self.GetCaption();\n\n match old_caption {\n Some(htmlelem) => {\n let htmlelem_root = htmlelem.root();\n let old_caption_node: JSRef<Node> = NodeCast::from_ref(htmlelem_root.r());\n assert!(node.RemoveChild(old_caption_node).is_ok());\n }\n None => ()\n }\n\n new_caption.map(|caption| {\n let new_caption_node: JSRef<Node> = NodeCast::from_ref(caption);\n assert!(node.AppendChild(new_caption_node).is_ok());\n });\n }\n}\n\npub trait HTMLTableElementHelpers {\n fn get_background_color(&self) -> Option<RGBA>;\n fn get_border(&self) -> Option<u32>;\n fn get_cellspacing(&self) -> Option<u32>;\n fn get_width(&self) -> LengthOrPercentageOrAuto;\n}\n\nimpl HTMLTableElementHelpers for HTMLTableElement {\n fn get_background_color(&self) -> Option<RGBA> {\n self.background_color.get()\n }\n\n fn get_border(&self) -> Option<u32> {\n self.border.get()\n }\n\n fn get_cellspacing(&self) -> Option<u32> {\n self.cellspacing.get()\n }\n\n fn get_width(&self) -> LengthOrPercentageOrAuto {\n self.width.get()\n }\n}\n\nimpl<'a> VirtualMethods for JSRef<'a, HTMLTableElement> {\n fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {\n let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);\n Some(htmlelement as &VirtualMethods)\n }\n\n fn after_set_attr(&self, attr: JSRef<Attr>) {\n if let Some(ref s) = self.super_type() {\n s.after_set_attr(attr);\n }\n\n match attr.local_name() {\n &atom!(\"bgcolor\") => {\n self.background_color.set(str::parse_legacy_color(&attr.value()).ok())\n }\n &atom!(\"border\") => {\n \/\/ According to HTML5 § 14.3.9, invalid values map to 1px.\n self.border.set(Some(str::parse_unsigned_integer(attr.value()\n .chars()).unwrap_or(1)))\n }\n &atom!(\"cellspacing\") => {\n self.cellspacing.set(str::parse_unsigned_integer(attr.value().chars()))\n }\n &atom!(\"width\") => self.width.set(str::parse_length(&attr.value())),\n _ => ()\n }\n }\n\n fn before_remove_attr(&self, attr: JSRef<Attr>) {\n if let Some(ref s) = self.super_type() {\n s.before_remove_attr(attr);\n }\n\n match attr.local_name() {\n &atom!(\"bgcolor\") => self.background_color.set(None),\n &atom!(\"border\") => self.border.set(None),\n &atom!(\"cellspacing\") => self.cellspacing.set(None),\n &atom!(\"width\") => self.width.set(LengthOrPercentageOrAuto::Auto),\n _ => ()\n }\n }\n\n fn parse_plain_attribute(&self, local_name: &Atom, value: DOMString) -> AttrValue {\n match local_name {\n &atom!(\"border\") => AttrValue::from_u32(value, 1),\n _ => self.super_type().unwrap().parse_plain_attribute(local_name, value),\n }\n }\n}\n\n<commit_msg>Use if let in HTMLTableElement::SetCaption.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::attr::{Attr, AttrHelpers, AttrValue};\nuse dom::bindings::codegen::Bindings::HTMLTableElementBinding::HTMLTableElementMethods;\nuse dom::bindings::codegen::Bindings::HTMLTableElementBinding;\nuse dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;\nuse dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLTableCaptionElementCast};\nuse dom::bindings::codegen::InheritTypes::{HTMLTableElementDerived, NodeCast};\nuse dom::bindings::js::{JSRef, Rootable, Temporary, OptionalRootable, RootedReference};\nuse dom::document::Document;\nuse dom::eventtarget::{EventTarget, EventTargetTypeId};\nuse dom::element::ElementTypeId;\nuse dom::htmlelement::{HTMLElement, HTMLElementTypeId};\nuse dom::htmltablecaptionelement::HTMLTableCaptionElement;\nuse dom::node::{Node, NodeHelpers, NodeTypeId};\nuse dom::virtualmethods::VirtualMethods;\n\nuse util::str::{self, DOMString, LengthOrPercentageOrAuto};\n\nuse cssparser::RGBA;\nuse string_cache::Atom;\n\nuse std::cell::Cell;\n\n#[dom_struct]\npub struct HTMLTableElement {\n htmlelement: HTMLElement,\n background_color: Cell<Option<RGBA>>,\n border: Cell<Option<u32>>,\n cellspacing: Cell<Option<u32>>,\n width: Cell<LengthOrPercentageOrAuto>,\n}\n\nimpl HTMLTableElementDerived for EventTarget {\n fn is_htmltableelement(&self) -> bool {\n *self.type_id() ==\n EventTargetTypeId::Node(\n NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTableElement)))\n }\n}\n\nimpl HTMLTableElement {\n fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)\n -> HTMLTableElement {\n HTMLTableElement {\n htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLTableElement,\n localName,\n prefix,\n document),\n background_color: Cell::new(None),\n border: Cell::new(None),\n cellspacing: Cell::new(None),\n width: Cell::new(LengthOrPercentageOrAuto::Auto),\n }\n }\n\n #[allow(unrooted_must_root)]\n pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>)\n -> Temporary<HTMLTableElement> {\n let element = HTMLTableElement::new_inherited(localName, prefix, document);\n Node::reflect_node(box element, document, HTMLTableElementBinding::Wrap)\n }\n}\n\nimpl<'a> HTMLTableElementMethods for JSRef<'a, HTMLTableElement> {\n \/\/ https:\/\/www.whatwg.org\/html\/#dom-table-caption\n fn GetCaption(self) -> Option<Temporary<HTMLTableCaptionElement>> {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n node.children()\n .map(|c| c.root())\n .filter_map(|c| {\n HTMLTableCaptionElementCast::to_ref(c.r()).map(Temporary::from_rooted)\n })\n .next()\n }\n\n \/\/ https:\/\/www.whatwg.org\/html\/#dom-table-caption\n fn SetCaption(self, new_caption: Option<JSRef<HTMLTableCaptionElement>>) {\n let node: JSRef<Node> = NodeCast::from_ref(self);\n\n if let Some(ref caption) = self.GetCaption().root() {\n assert!(node.RemoveChild(NodeCast::from_ref(caption.r())).is_ok());\n }\n\n if let Some(caption) = new_caption {\n assert!(node.AppendChild(NodeCast::from_ref(caption)).is_ok());\n }\n }\n}\n\npub trait HTMLTableElementHelpers {\n fn get_background_color(&self) -> Option<RGBA>;\n fn get_border(&self) -> Option<u32>;\n fn get_cellspacing(&self) -> Option<u32>;\n fn get_width(&self) -> LengthOrPercentageOrAuto;\n}\n\nimpl HTMLTableElementHelpers for HTMLTableElement {\n fn get_background_color(&self) -> Option<RGBA> {\n self.background_color.get()\n }\n\n fn get_border(&self) -> Option<u32> {\n self.border.get()\n }\n\n fn get_cellspacing(&self) -> Option<u32> {\n self.cellspacing.get()\n }\n\n fn get_width(&self) -> LengthOrPercentageOrAuto {\n self.width.get()\n }\n}\n\nimpl<'a> VirtualMethods for JSRef<'a, HTMLTableElement> {\n fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {\n let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);\n Some(htmlelement as &VirtualMethods)\n }\n\n fn after_set_attr(&self, attr: JSRef<Attr>) {\n if let Some(ref s) = self.super_type() {\n s.after_set_attr(attr);\n }\n\n match attr.local_name() {\n &atom!(\"bgcolor\") => {\n self.background_color.set(str::parse_legacy_color(&attr.value()).ok())\n }\n &atom!(\"border\") => {\n \/\/ According to HTML5 § 14.3.9, invalid values map to 1px.\n self.border.set(Some(str::parse_unsigned_integer(attr.value()\n .chars()).unwrap_or(1)))\n }\n &atom!(\"cellspacing\") => {\n self.cellspacing.set(str::parse_unsigned_integer(attr.value().chars()))\n }\n &atom!(\"width\") => self.width.set(str::parse_length(&attr.value())),\n _ => ()\n }\n }\n\n fn before_remove_attr(&self, attr: JSRef<Attr>) {\n if let Some(ref s) = self.super_type() {\n s.before_remove_attr(attr);\n }\n\n match attr.local_name() {\n &atom!(\"bgcolor\") => self.background_color.set(None),\n &atom!(\"border\") => self.border.set(None),\n &atom!(\"cellspacing\") => self.cellspacing.set(None),\n &atom!(\"width\") => self.width.set(LengthOrPercentageOrAuto::Auto),\n _ => ()\n }\n }\n\n fn parse_plain_attribute(&self, local_name: &Atom, value: DOMString) -> AttrValue {\n match local_name {\n &atom!(\"border\") => AttrValue::from_u32(value, 1),\n _ => self.super_type().unwrap().parse_plain_attribute(local_name, value),\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test that creating a 2nd mutable ref from a NonNull invalidates the first<commit_after>use std::ptr::NonNull;\n\nfn main() { unsafe {\n let x = &mut 0;\n let mut ptr1 = NonNull::from(x);\n let mut ptr2 = ptr1.clone();\n let raw1 = ptr1.as_mut();\n let _raw2 = ptr2.as_mut();\n let _val = *raw1; \/\/~ ERROR borrow stack\n} }\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax_pos::symbol::Symbol;\nuse back::write::create_target_machine;\nuse llvm;\nuse rustc::session::Session;\nuse rustc::session::config::PrintRequest;\nuse libc::{c_int, c_char};\nuse std::ffi::CString;\n\npub fn init(sess: &Session) {\n unsafe {\n \/\/ Before we touch LLVM, make sure that multithreading is enabled.\n use std::sync::Once;\n static INIT: Once = Once::new();\n static mut POISONED: bool = false;\n INIT.call_once(|| {\n if llvm::LLVMStartMultithreaded() != 1 {\n \/\/ use an extra bool to make sure that all future usage of LLVM\n \/\/ cannot proceed despite the Once not running more than once.\n POISONED = true;\n }\n\n configure_llvm(sess);\n });\n\n if POISONED {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n }\n}\n\nunsafe fn configure_llvm(sess: &Session) {\n let mut llvm_c_strs = Vec::new();\n let mut llvm_args = Vec::new();\n\n {\n let mut add = |arg: &str| {\n let s = CString::new(arg).unwrap();\n llvm_args.push(s.as_ptr());\n llvm_c_strs.push(s);\n };\n add(\"rustc\"); \/\/ fake program name\n if sess.time_llvm_passes() { add(\"-time-passes\"); }\n if sess.print_llvm_passes() { add(\"-debug-pass=Structure\"); }\n\n for arg in &sess.opts.cg.llvm_args {\n add(&(*arg));\n }\n }\n\n llvm::LLVMInitializePasses();\n\n llvm::initialize_available_targets();\n\n llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,\n llvm_args.as_ptr());\n}\n\n\/\/ WARNING: the features must be known to LLVM or the feature\n\/\/ detection code will walk past the end of the feature array,\n\/\/ leading to crashes.\n\nconst ARM_WHITELIST: &'static [&'static str] = &[\"neon\\0\", \"vfp2\\0\", \"vfp3\\0\", \"vfp4\\0\"];\n\nconst X86_WHITELIST: &'static [&'static str] = &[\"avx\\0\", \"avx2\\0\", \"bmi\\0\", \"bmi2\\0\", \"sse\\0\",\n \"sse2\\0\", \"sse3\\0\", \"sse4.1\\0\", \"sse4.2\\0\",\n \"ssse3\\0\", \"tbm\\0\", \"lzcnt\\0\", \"popcnt\\0\",\n \"sse4a\\0\", \"rdrnd\\0\", \"rdseed\\0\", \"fma\\0\"];\n\nconst HEXAGON_WHITELIST: &'static [&'static str] = &[\"hvx\\0\", \"hvx-double\\0\"];\n\npub fn target_features(sess: &Session) -> Vec<Symbol> {\n let target_machine = create_target_machine(sess);\n\n let whitelist = match &*sess.target.target.arch {\n \"arm\" => ARM_WHITELIST,\n \"x86\" | \"x86_64\" => X86_WHITELIST,\n \"hexagon\" => HEXAGON_WHITELIST,\n _ => &[],\n };\n\n let mut features = Vec::new();\n for feat in whitelist {\n assert_eq!(feat.chars().last(), Some('\\0'));\n if unsafe { llvm::LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {\n features.push(Symbol::intern(&feat[..feat.len() - 1]));\n }\n }\n features\n}\n\npub fn print_version() {\n unsafe {\n println!(\"LLVM version: {}.{}\",\n llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());\n }\n}\n\npub fn print_passes() {\n unsafe { llvm::LLVMRustPrintPasses(); }\n}\n\npub fn print(req: PrintRequest, sess: &Session) {\n let tm = create_target_machine(sess);\n unsafe {\n match req {\n PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),\n PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),\n _ => bug!(\"rustc_trans can't handle print request: {:?}\", req),\n }\n }\n}\n\npub fn enable_llvm_debug() {\n unsafe { llvm::LLVMRustSetDebug(1); }\n}\n<commit_msg>Use AtomicBool instead of a 'static mut' for LLVM init posioning<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse syntax_pos::symbol::Symbol;\nuse back::write::create_target_machine;\nuse llvm;\nuse rustc::session::Session;\nuse rustc::session::config::PrintRequest;\nuse libc::{c_int, c_char};\nuse std::ffi::CString;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\nuse std::sync::Once;\n\npub fn init(sess: &Session) {\n unsafe {\n \/\/ Before we touch LLVM, make sure that multithreading is enabled.\n static POISONED: AtomicBool = AtomicBool::new(false);\n static INIT: Once = Once::new();\n INIT.call_once(|| {\n if llvm::LLVMStartMultithreaded() != 1 {\n \/\/ use an extra bool to make sure that all future usage of LLVM\n \/\/ cannot proceed despite the Once not running more than once.\n POISONED.store(true, Ordering::SeqCst);\n }\n\n configure_llvm(sess);\n });\n\n if POISONED.load(Ordering::SeqCst) {\n bug!(\"couldn't enable multi-threaded LLVM\");\n }\n }\n}\n\nunsafe fn configure_llvm(sess: &Session) {\n let mut llvm_c_strs = Vec::new();\n let mut llvm_args = Vec::new();\n\n {\n let mut add = |arg: &str| {\n let s = CString::new(arg).unwrap();\n llvm_args.push(s.as_ptr());\n llvm_c_strs.push(s);\n };\n add(\"rustc\"); \/\/ fake program name\n if sess.time_llvm_passes() { add(\"-time-passes\"); }\n if sess.print_llvm_passes() { add(\"-debug-pass=Structure\"); }\n\n for arg in &sess.opts.cg.llvm_args {\n add(&(*arg));\n }\n }\n\n llvm::LLVMInitializePasses();\n\n llvm::initialize_available_targets();\n\n llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,\n llvm_args.as_ptr());\n}\n\n\/\/ WARNING: the features must be known to LLVM or the feature\n\/\/ detection code will walk past the end of the feature array,\n\/\/ leading to crashes.\n\nconst ARM_WHITELIST: &'static [&'static str] = &[\"neon\\0\", \"vfp2\\0\", \"vfp3\\0\", \"vfp4\\0\"];\n\nconst X86_WHITELIST: &'static [&'static str] = &[\"avx\\0\", \"avx2\\0\", \"bmi\\0\", \"bmi2\\0\", \"sse\\0\",\n \"sse2\\0\", \"sse3\\0\", \"sse4.1\\0\", \"sse4.2\\0\",\n \"ssse3\\0\", \"tbm\\0\", \"lzcnt\\0\", \"popcnt\\0\",\n \"sse4a\\0\", \"rdrnd\\0\", \"rdseed\\0\", \"fma\\0\"];\n\nconst HEXAGON_WHITELIST: &'static [&'static str] = &[\"hvx\\0\", \"hvx-double\\0\"];\n\npub fn target_features(sess: &Session) -> Vec<Symbol> {\n let target_machine = create_target_machine(sess);\n\n let whitelist = match &*sess.target.target.arch {\n \"arm\" => ARM_WHITELIST,\n \"x86\" | \"x86_64\" => X86_WHITELIST,\n \"hexagon\" => HEXAGON_WHITELIST,\n _ => &[],\n };\n\n let mut features = Vec::new();\n for feat in whitelist {\n assert_eq!(feat.chars().last(), Some('\\0'));\n if unsafe { llvm::LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {\n features.push(Symbol::intern(&feat[..feat.len() - 1]));\n }\n }\n features\n}\n\npub fn print_version() {\n unsafe {\n println!(\"LLVM version: {}.{}\",\n llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());\n }\n}\n\npub fn print_passes() {\n unsafe { llvm::LLVMRustPrintPasses(); }\n}\n\npub fn print(req: PrintRequest, sess: &Session) {\n let tm = create_target_machine(sess);\n unsafe {\n match req {\n PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),\n PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),\n _ => bug!(\"rustc_trans can't handle print request: {:?}\", req),\n }\n }\n}\n\npub fn enable_llvm_debug() {\n unsafe { llvm::LLVMRustSetDebug(1); }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>renamed some floating-point constants<commit_after><|endoftext|>"} {"text":"<commit_before>\/**\n The layout task. Performs layout on the DOM, builds display lists and sends them to be\n rendered.\n*\/\n\nuse au = gfx::geometry;\nuse au::au;\nuse content::content_task;\nuse core::dvec::DVec;\nuse css::resolve::apply::apply_style;\nuse css::values::Stylesheet;\nuse dl = gfx::display_list;\nuse dom::event::{Event, ReflowEvent};\nuse dom::node::{Node, LayoutData};\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse gfx::render_task;\nuse gfx::render_layers::RenderLayer;\nuse layout::box::RenderBox;\nuse layout::box_builder::LayoutTreeBuilder;\nuse layout::context::LayoutContext;\nuse opt = core::option;\nuse render_task::RenderTask;\nuse resource::image_cache_task::{ImageCacheTask, ImageResponseMsg};\nuse resource::local_image_cache::LocalImageCache;\nuse servo_text::font_cache::FontCache;\nuse std::arc::ARC;\nuse std::net::url::Url;\nuse core::util::replace;\n\nuse layout::traverse::*;\nuse comm::*;\nuse task::*;\n\npub type LayoutTask = comm::Chan<Msg>;\n\npub enum LayoutQuery {\n ContentBox(Node)\n}\n\npub type LayoutQueryResponse = Result<LayoutQueryResponse_, ()>;\n\nenum LayoutQueryResponse_ {\n ContentSize(Size2D<int>)\n}\n\npub enum Msg {\n BuildMsg(Node, ARC<Stylesheet>, Url, comm::Chan<Event>, Size2D<uint>, pipes::Chan<()>),\n QueryMsg(LayoutQuery, comm::Chan<LayoutQueryResponse>),\n ExitMsg\n}\n\nfn LayoutTask(render_task: RenderTask,\n img_cache_task: ImageCacheTask) -> LayoutTask {\n do spawn_listener::<Msg> |from_content| {\n Layout(render_task, img_cache_task.clone(), from_content).start();\n }\n}\n\nstruct Layout {\n render_task: RenderTask,\n image_cache_task: ImageCacheTask,\n local_image_cache: @LocalImageCache,\n from_content: comm::Port<Msg>,\n\n font_cache: @FontCache,\n \/\/ This is used to root auxilliary RCU reader data\n layout_refs: DVec<@LayoutData>\n}\n\nfn Layout(render_task: RenderTask, \n image_cache_task: ImageCacheTask,\n from_content: comm::Port<Msg>) -> Layout {\n\n Layout {\n render_task: render_task,\n image_cache_task: image_cache_task.clone(),\n local_image_cache: @LocalImageCache(move image_cache_task),\n from_content: from_content,\n font_cache: FontCache(),\n layout_refs: DVec()\n }\n}\n\nimpl Layout {\n\n fn handle_query(query: LayoutQuery, \n reply_chan: comm::Chan<LayoutQueryResponse>) {\n match query {\n ContentBox(node) => {\n \/\/ TODO: extract me to a method when I get sibling arms\n let response = match node.aux(|a| copy *a).flow {\n None => Err(()),\n Some(flow) => {\n let start_val : Option<Rect<au>> = None;\n let rect = do flow.foldl_boxes_for_node(node, start_val) |acc, box| {\n match acc {\n Some(acc) => Some(acc.union(&box.content_box())),\n None => Some(box.content_box())\n }\n };\n \n match rect {\n None => Err(()),\n Some(rect) => {\n let size = Size2D(au::to_px(rect.size.width),\n au::to_px(rect.size.height));\n Ok(ContentSize(move size))\n }\n }\n }\n };\n\n reply_chan.send(response)\n }\n }\n }\n\n fn start() {\n while self.handle_request() {\n \/\/ loop indefinitely\n }\n }\n \n fn handle_request() -> bool {\n\n match self.from_content.recv() {\n QueryMsg(query, chan) => self.handle_query(query, chan),\n ExitMsg => {\n debug!(\"layout: ExitMsg received\");\n return false\n },\n BuildMsg(node, styles, doc_url, to_content, window_size, join_chan) => {\n debug!(\"layout: received layout request for: %s\", doc_url.to_str());\n debug!(\"layout: parsed Node tree\");\n debug!(\"%?\", node.dump());\n\n \/\/ Reset the image cache\n self.local_image_cache.next_round(self.make_on_image_available_cb(to_content));\n\n let screen_size = Size2D(au::from_px(window_size.width as int),\n au::from_px(window_size.height as int));\n\n let layout_ctx = LayoutContext {\n image_cache: self.local_image_cache,\n font_cache: self.font_cache,\n doc_url: doc_url,\n screen_size: Rect(Point2D(au(0), au(0)), screen_size)\n };\n\n let layout_root: @FlowContext = do util::time::time(\"layout: tree construction\") {\n \/\/ TODO: this is dumb. we don't need 3 separate traversals.\n node.initialize_style_for_subtree(&layout_ctx, &self.layout_refs);\n node.recompute_style_for_subtree(&layout_ctx, &styles);\n \/* resolve styles (convert relative values) down the node tree *\/\n apply_style(&layout_ctx, node);\n \n let builder = LayoutTreeBuilder();\n let layout_root: @FlowContext = match builder.construct_trees(&layout_ctx,\n node) {\n Ok(root) => root,\n Err(*) => fail ~\"Root flow should always exist\"\n };\n\n debug!(\"layout: constructed Flow tree\");\n debug!(\"%?\", layout_root.dump());\n\n layout_root\n };\n\n do util::time::time(\"layout: main layout\") {\n \/* perform layout passes over the flow tree *\/\n do layout_root.traverse_postorder |f| { f.bubble_widths(&layout_ctx) }\n do layout_root.traverse_preorder |f| { f.assign_widths(&layout_ctx) }\n do layout_root.traverse_postorder |f| { f.assign_height(&layout_ctx) }\n }\n\n do util::time::time(\"layout: display list building\") {\n let dlist = DVec();\n let builder = dl::DisplayListBuilder {\n ctx: &layout_ctx,\n };\n let render_layer = RenderLayer {\n display_list: move dlist,\n size: Size2D(au::to_px(screen_size.width) as uint,\n au::to_px(screen_size.height) as uint)\n };\n\n \/\/ TODO: set options on the builder before building\n \/\/ TODO: be smarter about what needs painting\n layout_root.build_display_list(&builder, © layout_root.d().position,\n &render_layer.display_list);\n self.render_task.send(render_task::RenderMsg(move render_layer));\n } \/\/ time(layout: display list building)\n\n \/\/ Tell content we're done\n join_chan.send(());\n\n } \/\/ BuildMsg\n } \/\/ match\n\n true\n }\n\n \/\/ When images can't be loaded in time to display they trigger\n \/\/ this callback in some task somewhere. This w\n fn make_on_image_available_cb(to_content: comm::Chan<Event>) -> ~fn(ImageResponseMsg) {\n let f: ~fn(ImageResponseMsg) = |_msg| {\n to_content.send(ReflowEvent)\n };\n return f;\n }\n}\n\n<commit_msg>Refactor layout_task a little<commit_after>\/**\n The layout task. Performs layout on the DOM, builds display lists and sends them to be\n rendered.\n*\/\n\nuse au = gfx::geometry;\nuse au::au;\nuse content::content_task;\nuse core::dvec::DVec;\nuse css::resolve::apply::apply_style;\nuse css::values::Stylesheet;\nuse dl = gfx::display_list;\nuse dom::event::{Event, ReflowEvent};\nuse dom::node::{Node, LayoutData};\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse geom::size::Size2D;\nuse gfx::render_task;\nuse gfx::render_layers::RenderLayer;\nuse layout::box::RenderBox;\nuse layout::box_builder::LayoutTreeBuilder;\nuse layout::context::LayoutContext;\nuse opt = core::option;\nuse render_task::RenderTask;\nuse resource::image_cache_task::{ImageCacheTask, ImageResponseMsg};\nuse resource::local_image_cache::LocalImageCache;\nuse servo_text::font_cache::FontCache;\nuse std::arc::ARC;\nuse std::net::url::Url;\nuse core::util::replace;\n\nuse layout::traverse::*;\nuse comm::*;\nuse task::*;\n\npub type LayoutTask = comm::Chan<Msg>;\n\npub enum LayoutQuery {\n ContentBox(Node)\n}\n\npub type LayoutQueryResponse = Result<LayoutQueryResponse_, ()>;\n\nenum LayoutQueryResponse_ {\n ContentSize(Size2D<int>)\n}\n\npub enum Msg {\n BuildMsg(Node, ARC<Stylesheet>, Url, comm::Chan<Event>, Size2D<uint>, pipes::Chan<()>),\n QueryMsg(LayoutQuery, comm::Chan<LayoutQueryResponse>),\n ExitMsg\n}\n\nfn LayoutTask(render_task: RenderTask,\n img_cache_task: ImageCacheTask) -> LayoutTask {\n do spawn_listener::<Msg> |from_content| {\n Layout(render_task, img_cache_task.clone(), from_content).start();\n }\n}\n\nstruct Layout {\n render_task: RenderTask,\n image_cache_task: ImageCacheTask,\n local_image_cache: @LocalImageCache,\n from_content: comm::Port<Msg>,\n\n font_cache: @FontCache,\n \/\/ This is used to root auxilliary RCU reader data\n layout_refs: DVec<@LayoutData>\n}\n\nfn Layout(render_task: RenderTask, \n image_cache_task: ImageCacheTask,\n from_content: comm::Port<Msg>) -> Layout {\n\n Layout {\n render_task: render_task,\n image_cache_task: image_cache_task.clone(),\n local_image_cache: @LocalImageCache(move image_cache_task),\n from_content: from_content,\n font_cache: FontCache(),\n layout_refs: DVec()\n }\n}\n\nimpl Layout {\n\n fn start() {\n while self.handle_request() {\n \/\/ loop indefinitely\n }\n }\n\n fn handle_request() -> bool {\n\n match self.from_content.recv() {\n BuildMsg(move node, move styles, move doc_url,\n move to_content, move window_size, move join_chan) => {\n self.handle_build(node, styles, doc_url, to_content, window_size, join_chan);\n }\n QueryMsg(query, chan) => self.handle_query(query, chan),\n ExitMsg => {\n debug!(\"layout: ExitMsg received\");\n return false\n }\n }\n\n true\n }\n\n fn handle_build(node: Node, styles: ARC<Stylesheet>, doc_url: Url,\n to_content: comm::Chan<Event>, window_size: Size2D<uint>,\n join_chan: pipes::Chan<()>) {\n debug!(\"layout: received layout request for: %s\", doc_url.to_str());\n debug!(\"layout: parsed Node tree\");\n debug!(\"%?\", node.dump());\n\n \/\/ Reset the image cache\n self.local_image_cache.next_round(self.make_on_image_available_cb(to_content));\n\n let screen_size = Size2D(au::from_px(window_size.width as int),\n au::from_px(window_size.height as int));\n\n let layout_ctx = LayoutContext {\n image_cache: self.local_image_cache,\n font_cache: self.font_cache,\n doc_url: doc_url,\n screen_size: Rect(Point2D(au(0), au(0)), screen_size)\n };\n\n let layout_root: @FlowContext = do util::time::time(\"layout: tree construction\") {\n \/\/ TODO: this is dumb. we don't need 3 separate traversals.\n node.initialize_style_for_subtree(&layout_ctx, &self.layout_refs);\n node.recompute_style_for_subtree(&layout_ctx, &styles);\n \/* resolve styles (convert relative values) down the node tree *\/\n apply_style(&layout_ctx, node);\n \n let builder = LayoutTreeBuilder();\n let layout_root: @FlowContext = match builder.construct_trees(&layout_ctx,\n node) {\n Ok(root) => root,\n Err(*) => fail ~\"Root flow should always exist\"\n };\n\n debug!(\"layout: constructed Flow tree\");\n debug!(\"%?\", layout_root.dump());\n\n layout_root\n };\n\n do util::time::time(\"layout: main layout\") {\n \/* perform layout passes over the flow tree *\/\n do layout_root.traverse_postorder |f| { f.bubble_widths(&layout_ctx) }\n do layout_root.traverse_preorder |f| { f.assign_widths(&layout_ctx) }\n do layout_root.traverse_postorder |f| { f.assign_height(&layout_ctx) }\n }\n\n do util::time::time(\"layout: display list building\") {\n let dlist = DVec();\n let builder = dl::DisplayListBuilder {\n ctx: &layout_ctx,\n };\n let render_layer = RenderLayer {\n display_list: move dlist,\n size: Size2D(au::to_px(screen_size.width) as uint,\n au::to_px(screen_size.height) as uint)\n };\n\n \/\/ TODO: set options on the builder before building\n \/\/ TODO: be smarter about what needs painting\n layout_root.build_display_list(&builder, © layout_root.d().position,\n &render_layer.display_list);\n self.render_task.send(render_task::RenderMsg(move render_layer));\n } \/\/ time(layout: display list building)\n\n \/\/ Tell content we're done\n join_chan.send(());\n\n }\n\n\n fn handle_query(query: LayoutQuery, \n reply_chan: comm::Chan<LayoutQueryResponse>) {\n match query {\n ContentBox(node) => {\n \/\/ TODO: extract me to a method when I get sibling arms\n let response = match node.aux(|a| copy *a).flow {\n None => Err(()),\n Some(flow) => {\n let start_val : Option<Rect<au>> = None;\n let rect = do flow.foldl_boxes_for_node(node, start_val) |acc, box| {\n match acc {\n Some(acc) => Some(acc.union(&box.content_box())),\n None => Some(box.content_box())\n }\n };\n \n match rect {\n None => Err(()),\n Some(rect) => {\n let size = Size2D(au::to_px(rect.size.width),\n au::to_px(rect.size.height));\n Ok(ContentSize(move size))\n }\n }\n }\n };\n\n reply_chan.send(response)\n }\n }\n }\n\n \/\/ When images can't be loaded in time to display they trigger\n \/\/ this callback in some task somewhere. This w\n fn make_on_image_available_cb(to_content: comm::Chan<Event>) -> ~fn(ImageResponseMsg) {\n let f: ~fn(ImageResponseMsg) = |_msg| {\n to_content.send(ReflowEvent)\n };\n return f;\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>refactor(tests): Be more explicit about publish stderr<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>auto merge of #9973 : huonw\/rust\/7580, r=luqmana<commit_after>\/\/ Copyright 2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Issue #7580\n\n\/\/ error-pattern:fail works\n#[feature(globs)];\n\nuse std::*;\n\nfn main() {\n str::with_capacity(10); \/\/ avoid an unused import message\n\n fail!(\"fail works\")\n}\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\nuse std::fmt::{Debug, Formatter, Error as FmtError};\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse git2::{Reference as GitReference, Repository, Error as Git2Error};\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::hook::Hook;\nuse libimagstore::hook::error::HookError as HE;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::error::CustomData as HECD;\nuse libimagstore::hook::result::HookResult;\nuse libimagstore::hook::position::HookPosition;\nuse libimagstore::hook::accessor::{HookDataAccessor, HookDataAccessorProvider};\nuse libimagstore::hook::accessor::StoreIdAccessor;\nuse libimagerror::trace::trace_error;\nuse libimagerror::into::IntoError;\n\nuse vcs::git::result::Result;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::GitHookError as GHE;\nuse vcs::git::runtime::Runtime;\n\npub struct CreateHook {\n storepath: PathBuf,\n\n runtime: Runtime,\n\n position: HookPosition,\n}\n\nimpl CreateHook {\n\n pub fn new(storepath: PathBuf, p: HookPosition) -> CreateHook {\n CreateHook {\n runtime: Runtime::new(&storepath),\n storepath: storepath,\n position: p,\n }\n }\n\n}\n\nimpl Debug for CreateHook {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"CreateHook(storepath={:?}, repository={}, pos={:?}, cfg={:?}\",\n self.storepath,\n (if self.runtime.has_repository() { \"Some(_)\" } else { \"None\" }),\n self.position,\n self.runtime.has_config())\n }\n}\n\nimpl Hook for CreateHook {\n\n fn name(&self) -> &'static str {\n \"stdhook_git_create\"\n }\n\n fn set_config(&mut self, config: &Value) {\n if let Err(e) = self.runtime.set_config(config) {\n trace_error(&e);\n }\n }\n\n}\n\nimpl HookDataAccessorProvider for CreateHook {\n\n fn accessor(&self) -> HookDataAccessor {\n HookDataAccessor::StoreIdAccess(self)\n }\n}\n\nimpl StoreIdAccessor for CreateHook {\n\n fn access(&self, id: &StoreId) -> HookResult<()> {\n use vcs::git::action::StoreAction;\n use vcs::git::config::commit_message;\n\n debug!(\"[GIT CREATE HOOK]: {:?}\", id);\n\n debug!(\"[GIT CREATE HOOK]: Ensuring branch checkout\");\n try!(self\n .runtime\n .ensure_cfg_branch_is_checked_out()\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e)));\n debug!(\"[GIT CREATE HOOK]: Branch checked out\");\n\n self.runtime\n .config_value_or_err()\n .map_err(|e| { debug!(\"[GIT CREATE HOOK]: Couldn't get Value object from config\"); e })\n .and_then(|cfg| {\n debug!(\"[GIT CREATE HOOK]: Getting repository\");\n self.runtime\n .repository()\n .map(|r| (r, cfg))\n .map_err(|e| { debug!(\"[GIT CREATE HOOK]: Couldn't fetch Repository\"); e })\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg)| {\n repo.signature()\n .map(|s| (repo, cfg, s))\n .map_err(|e| { debug!(\"[GIT CREATE HOOK]: Couldn't fetch Signature\"); e })\n .map_err_into(GHEK::RepositorySignatureFetchingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig)| {\n repo.index()\n .map(|idx| (repo, cfg, sig, idx))\n .map_err(|e| { debug!(\"[GIT CREATE HOOK]: Couldn't fetch Index\"); e })\n .map_err_into(GHEK::RepositoryIndexFetchingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig, mut idx)| {\n id.strip_prefix(&self.storepath)\n .map_err_into(GHEK::StoreIdStripError)\n .and_then(|id| idx.add_path(&id).map_err_into(GHEK::RepositoryPathAddingError))\n .map(|_| (repo, cfg, sig, idx))\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| { debug!(\"[GIT CREATE HOOK]: Couldn't add Path: {:?}\", e); e })\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig, mut idx)| {\n idx.write_tree()\n .map(|oid| (repo, cfg, sig, idx, oid))\n .map_err(|e| { debug!(\"[GIT CREATE HOOK]: Couldn't write Tree\"); e })\n .map_err_into(GHEK::RepositoryTreeWritingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig, idx, oid)| {\n repo.find_tree(oid)\n .map(|tree| (repo, cfg, sig, idx, oid, tree))\n .map_err(|e| { debug!(\"[GIT CREATE HOOK]: Couldn't find Tree\"); e })\n .map_err_into(GHEK::RepositoryTreeFindingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig, idx, oid, tree)| {\n let cmtmsg = commit_message(cfg, StoreAction::Create);\n repo.find_commit(oid)\n .map(|cmt| (repo, sig, tree, cmt, cmtmsg))\n .map_err(|e| { debug!(\"[GIT CREATE HOOK]: Couldn't find Commit\"); e })\n .map_err_into(GHEK::RepositoryCommitFindingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, sig, tree, cmt, commitmsg)| {\n repo.commit(Some(\"HEAD\"), &sig, &sig, &commitmsg[..], &tree, &[&cmt])\n .map_err(|e| { debug!(\"[GIT CREATE HOOK]: Couldn't create Commit\"); e })\n .map_err_into(GHEK::RepositoryCommittingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .map(|_| ())\n }\n\n}\n\n<commit_msg>Refactor to use Result::map_dbg_err_str()<commit_after>use std::path::PathBuf;\nuse std::fmt::{Debug, Formatter, Error as FmtError};\nuse std::result::Result as RResult;\n\nuse toml::Value;\nuse git2::{Reference as GitReference, Repository, Error as Git2Error};\n\nuse libimagstore::storeid::StoreId;\nuse libimagstore::hook::Hook;\nuse libimagstore::hook::error::HookError as HE;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::error::CustomData as HECD;\nuse libimagstore::hook::result::HookResult;\nuse libimagstore::hook::position::HookPosition;\nuse libimagstore::hook::accessor::{HookDataAccessor, HookDataAccessorProvider};\nuse libimagstore::hook::accessor::StoreIdAccessor;\nuse libimagerror::trace::trace_error;\nuse libimagerror::into::IntoError;\nuse libimagutil::debug_result::*;\n\nuse vcs::git::result::Result;\nuse vcs::git::error::MapErrInto;\nuse vcs::git::error::GitHookErrorKind as GHEK;\nuse vcs::git::error::GitHookError as GHE;\nuse vcs::git::runtime::Runtime;\n\npub struct CreateHook {\n storepath: PathBuf,\n\n runtime: Runtime,\n\n position: HookPosition,\n}\n\nimpl CreateHook {\n\n pub fn new(storepath: PathBuf, p: HookPosition) -> CreateHook {\n CreateHook {\n runtime: Runtime::new(&storepath),\n storepath: storepath,\n position: p,\n }\n }\n\n}\n\nimpl Debug for CreateHook {\n\n fn fmt(&self, fmt: &mut Formatter) -> RResult<(), FmtError> {\n write!(fmt, \"CreateHook(storepath={:?}, repository={}, pos={:?}, cfg={:?}\",\n self.storepath,\n (if self.runtime.has_repository() { \"Some(_)\" } else { \"None\" }),\n self.position,\n self.runtime.has_config())\n }\n}\n\nimpl Hook for CreateHook {\n\n fn name(&self) -> &'static str {\n \"stdhook_git_create\"\n }\n\n fn set_config(&mut self, config: &Value) {\n if let Err(e) = self.runtime.set_config(config) {\n trace_error(&e);\n }\n }\n\n}\n\nimpl HookDataAccessorProvider for CreateHook {\n\n fn accessor(&self) -> HookDataAccessor {\n HookDataAccessor::StoreIdAccess(self)\n }\n}\n\nimpl StoreIdAccessor for CreateHook {\n\n fn access(&self, id: &StoreId) -> HookResult<()> {\n use vcs::git::action::StoreAction;\n use vcs::git::config::commit_message;\n\n debug!(\"[GIT CREATE HOOK]: {:?}\", id);\n\n debug!(\"[GIT CREATE HOOK]: Ensuring branch checkout\");\n try!(self\n .runtime\n .ensure_cfg_branch_is_checked_out()\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e)));\n debug!(\"[GIT CREATE HOOK]: Branch checked out\");\n\n self.runtime\n .config_value_or_err()\n .map_dbg_err_str(\"[GIT CREATE HOOK]: Couldn't get Value object from config\")\n .and_then(|cfg| {\n debug!(\"[GIT CREATE HOOK]: Getting repository\");\n self.runtime\n .repository()\n .map(|r| (r, cfg))\n .map_dbg_err_str(\"[GIT CREATE HOOK]: Couldn't fetch Repository\")\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg)| {\n repo.signature()\n .map(|s| (repo, cfg, s))\n .map_dbg_err_str(\"[GIT CREATE HOOK]: Couldn't fetch Signature\")\n .map_err_into(GHEK::RepositorySignatureFetchingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig)| {\n repo.index()\n .map(|idx| (repo, cfg, sig, idx))\n .map_dbg_err_str(\"[GIT CREATE HOOK]: Couldn't fetch Index\")\n .map_err_into(GHEK::RepositoryIndexFetchingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig, mut idx)| {\n id.strip_prefix(&self.storepath)\n .map_err_into(GHEK::StoreIdStripError)\n .and_then(|id| idx.add_path(&id).map_err_into(GHEK::RepositoryPathAddingError))\n .map(|_| (repo, cfg, sig, idx))\n .map_err_into(GHEK::RepositoryError)\n .map_dbg_err_str(\"[GIT CREATE HOOK]: Couldn't add Path: {:?}\")\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig, mut idx)| {\n idx.write_tree()\n .map(|oid| (repo, cfg, sig, idx, oid))\n .map_dbg_err_str(\"[GIT CREATE HOOK]: Couldn't write Tree\")\n .map_err_into(GHEK::RepositoryTreeWritingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig, idx, oid)| {\n repo.find_tree(oid)\n .map(|tree| (repo, cfg, sig, idx, oid, tree))\n .map_dbg_err_str(\"[GIT CREATE HOOK]: Couldn't find Tree\")\n .map_err_into(GHEK::RepositoryTreeFindingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, cfg, sig, idx, oid, tree)| {\n let cmtmsg = commit_message(cfg, StoreAction::Create);\n repo.find_commit(oid)\n .map(|cmt| (repo, sig, tree, cmt, cmtmsg))\n .map_dbg_err_str(\"[GIT CREATE HOOK]: Couldn't find Commit\")\n .map_err_into(GHEK::RepositoryCommitFindingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .and_then(|(repo, sig, tree, cmt, commitmsg)| {\n repo.commit(Some(\"HEAD\"), &sig, &sig, &commitmsg[..], &tree, &[&cmt])\n .map_dbg_err_str(\"[GIT CREATE HOOK]: Couldn't create Commit\")\n .map_err_into(GHEK::RepositoryCommittingError)\n .map_err_into(GHEK::RepositoryError)\n .map_err(|e| e.into())\n })\n .map(|_| ())\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add importobj.rs<commit_after>use std;\nuse material;\nuse std::rc::Rc;\nuse genmesh::Polygon;\nuse utils::get_full_path;\nuse cgmath::{Vector, Vector2, Vector3};\nuse mesh::{Model, Mesh, Vertex};\nuse obj::{Obj, Material, IndexTuple, load};\nuse memory::cast_to;\n\npub struct ModelObj<'a> {\n model_dir: std::path::PathBuf,\n map: std::collections::HashMap<IndexTuple, u32>,\n model: Model,\n position_buffer: &'a [Vector3<f32>],\n normal_buffer: &'a [Vector3<f32>],\n tex_buffer: &'a [Vector2<f32>],\n}\n\nimpl<'a> ModelObj<'a> {\n fn get_index(&mut self, index: IndexTuple) -> u32 {\n let zero2 = Vector2::new(0.0_f32, 0.0_f32);\n let zero3 = Vector3::new(0.0_f32, 0.0_f32, 0.0_f32);\n if self.map.get(&index).is_none() {\n self.model.vertex_buffer.push(\n Vertex::new(\n &self.position_buffer[index.0],\n &match index.1 {\n Some(t) => self.tex_buffer[t],\n None => zero2,\n },\n &match index.2 {\n Some(n) => self.normal_buffer[n],\n None => zero3,\n }));\n };\n let len = self.model.vertex_buffer.len() as u32 - 1;\n self.map.entry(index).or_insert(len).clone()\n }\n\n fn parse(&mut self, model_obj: &Obj<Rc<Material>>) -> Result<(), String> {\n for object in model_obj.object_iter() {\n for group in object.group_iter() {\n let mut mesh = Mesh::new();\n for polygon in group.indices() {\n match polygon {\n &Polygon::PolyTri(p) => {\n let ind0 = self.get_index(p.x);\n let ind1 = self.get_index(p.y);\n let ind2 = self.get_index(p.z);\n mesh.index_buffer.push(ind0);\n mesh.index_buffer.push(ind1);\n mesh.index_buffer.push(ind2);\n },\n &Polygon::PolyQuad(p) => {\n let ind0 = self.get_index(p.x);\n let ind1 = self.get_index(p.y);\n let ind2 = self.get_index(p.z);\n let ind3 = self.get_index(p.w);\n mesh.index_buffer.push(ind0);\n mesh.index_buffer.push(ind1);\n mesh.index_buffer.push(ind2);\n mesh.index_buffer.push(ind0);\n mesh.index_buffer.push(ind2);\n mesh.index_buffer.push(ind3);\n },\n };\n }\n let material = group.material.clone();\n mesh.material_id = match material {\n Some(m) => {\n let mut mat = material::Material::new();\n match m.ka {\n Some(v) => mat.ambient = Vector3::new(v[0], v[1], v[2]).mul_s(255.0_f32),\n None => {},\n };\n match m.kd {\n Some(v) => mat.diffuse = Vector3::new(v[0], v[1], v[2]).mul_s(255.0_f32),\n None => {},\n };\n match m.ks {\n Some(v) => mat.specular = Vector3::new(v[0], v[1], v[2]).mul_s(255.0_f32),\n None => {},\n };\n match m.map_kd {\n Some(ref path) => try!(mat.texture_from_dir(&self.model_dir, &path)),\n None => {},\n };\n mat.calc_ambient_intensity();\n self.model.material_list.push(mat);\n self.model.material_list.len() - 1\n },\n None => 0,\n };\n self.model.mesh_list.push(mesh);\n }\n }\n\n Ok(())\n }\n\n pub fn load(filename: &str) -> Result<Model, String> {\n let filepath = try!(get_full_path(filename));\n let mut model_dir = std::path::PathBuf::from(filepath.clone());\n if !model_dir.pop() {\n return Err(format!(\"not found parent dir for filepath {}\", filepath));\n }\n\n let model_obj: Obj<Rc<Material>> = load(std::path::Path::new(&filepath)).unwrap();\n let mut this = ModelObj {\n model_dir: model_dir,\n map: std::collections::HashMap::<IndexTuple, u32>::new(),\n model: Model::new(),\n position_buffer: cast_to(model_obj.position()),\n normal_buffer: cast_to(model_obj.normal()),\n tex_buffer: cast_to(model_obj.texture()),\n };\n\n let mut def_mat = material::Material::new();\n def_mat.diffuse = Vector3::new(255.0_f32, 0.0_f32, 0.0_f32);\n def_mat.ambient = Vector3::new(255.0_f32, 0.0_f32, 0.0_f32);\n this.model.material_list.push(def_mat);\n\n try!(this.parse(&model_obj));\n\n Ok(this.model)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Forgot to change a number. Cannot test build like usual because of Rust breaking changes.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Log when we back off.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add rust code for rectangles case<commit_after>use std::cmp::Ordering;\n\n#[derive(Debug, PartialEq, Eq)]\nstruct Rectangle {\n x: usize,\n y: usize,\n dx: usize,\n dy: usize,\n}\n\nimpl Rectangle {\n fn new(x: usize, y: usize, dx: usize, dy: usize) -> Rectangle {\n Rectangle {\n x: x,\n y: y,\n dx: dx,\n dy: dy,\n }\n }\n\n fn segments(&self) -> [usize; 4] {\n [self.x, self.y, self.dx, self.dy]\n }\n}\n\nimpl PartialOrd for Rectangle {\n fn partial_cmp(&self, other: &Rectangle) -> Option<Ordering> {\n Some(self.cmp(other))\n }\n}\n\nimpl Ord for Rectangle {\n fn cmp(&self, other: &Rectangle) -> Ordering {\n self.segments().cmp(&other.segments())\n }\n}\n\n\n#[derive(Debug, PartialEq)]\nstruct Point {\n x: usize,\n y: usize,\n}\n\n#[inline]\nfn get_corners(board: &[char], width: usize) -> Vec<Point> {\n board\n .iter()\n .enumerate()\n .filter_map(|(i, &c)| if c == '+' {\n Some(Point {\n x: i % width,\n y: i \/ width,\n })\n } else {\n None\n })\n .collect()\n}\n\n#[inline]\nfn get_rectangles(corners: &[Point]) -> Vec<Rectangle> {\n let mut rectangles = Vec::new();\n\n for i in 0..corners.len() {\n for j in i..corners.len() {\n let pa = &corners[i];\n let pb = &corners[j];\n\n if pa.x == pb.x || pa.y == pb.y {\n continue;\n }\n\n if !corners.contains(&Point { x: pa.x, y: pb.y }) ||\n !corners.contains(&Point { x: pb.x, y: pa.y })\n {\n continue;\n }\n\n let (x, dx) = if pa.x > pb.x {\n (pb.x, pa.x - pb.x)\n } else {\n (pa.x, pb.x - pa.x)\n };\n\n rectangles.push(Rectangle::new(x, pa.y, dx, pb.y - pa.y));\n }\n }\n\n rectangles\n}\n\n#[inline]\nfn is_rectangle(rectangle: &Rectangle, board: &[char], width: usize) -> bool {\n let r = (1..rectangle.dx).any(|i| {\n let m = rectangle.y * width + rectangle.x + i;\n let n = (rectangle.y + rectangle.dy) * width + rectangle.x + i;\n (board[m] != '-' && board[m] != '+') || (board[n] != '-' && board[n] != '+')\n });\n\n if r {\n return false;\n }\n\n let r = (1..rectangle.dy).any(|i| {\n let m = (rectangle.y + i) * width + rectangle.x;\n let n = (rectangle.y + i) * width + rectangle.x + rectangle.dx;\n (board[m] != '|' && board[m] != '+') || (board[n] != '|' && board[n] != '+')\n });\n\n if r {\n return false;\n }\n\n true\n}\n\npub fn count(text: &[&str]) -> usize {\n let height = text.len();\n if height == 0 {\n return 0;\n }\n\n let width = text[0].len();\n if width < 2 {\n return 0;\n }\n\n let board = text.iter().flat_map(|s| s.chars()).collect::<Vec<_>>();\n\n let corners = get_corners(&board, width);\n\n let mut rectangles = get_rectangles(&corners);\n\n rectangles.sort();\n rectangles.dedup();\n rectangles\n .iter()\n .filter(|x| is_rectangle(x, &board, width))\n .count()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Implement Process Detection in the ArgumentSplitter<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Percent and Time display<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add support for recording bind descriptor set commands to arbitrary command buffers<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Borrowed pointers<commit_after>fn plus_one(x: &int) -> int {\n *x + 1\n}\n\nfn main() {\n let x: @int = @10;\n let y: ~int = ~10;\n\n println(plus_one(x).to_str());\n println(plus_one(y).to_str());\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>works<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Use more fine-granular function as Runtime::ignore_ids() was removed<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Minor<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Don't reuse PathBuilder after calling finish()<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>First attempt at rust!<commit_after>\/\/ TODO\n\/\/ impl ToGeojson for ....\n\/\/ generic number instead of f64 for position?\n\/\/ rename Position to Pos\n\nextern crate serialize;\n\nuse std::collections::TreeMap;\nuse serialize::json::ToJson;\nuse serialize::json;\n\n\n\/*\n * Position\n * GeoJSON Format Specification § 2.1.1\n * http:\/\/geojson.org\/geojson-spec.html#positions\n *\/\npub struct Position(Vec<f64>);\n\nimpl ToJson for Position {\n fn to_json(&self) -> json::Json {\n let &Position(ref nums) = self;\n nums.to_json()\n }\n}\n\n\n\/*\n * Point\n * GeoJSON Format Specification § 2.1.2\n * http:\/\/geojson.org\/geojson-spec.html#point\n *\/\npub struct Point {\n coordinates: Position,\n}\n\nimpl ToJson for Point {\n fn to_json(&self) -> json::Json {\n let mut d = TreeMap::new();\n d.insert(\"type\".to_string(), json::String(\"Point\".to_string()));\n d.insert(\"coordinates\".to_string(), self.coordinates.to_json());\n d.to_json()\n }\n}\n\n\n\/*\n * MultiPoint\n * GeoJSON Format Specification § 2.1.3\n * http:\/\/geojson.org\/geojson-spec.html#multipoint\n *\/\npub struct MultiPoint {\n coordinates: Vec<Position>,\n}\n\n\nfn main() {\n let point = Point {\n coordinates: Position(vec![1., 2., 3.]),\n };\n\n let j: json::Json = point.to_json();\n let s: String = j.to_pretty_str();\n\n println!(\"{}\", s);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug and trace output<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add function to get relative file path<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Code to handle initial setup steps for a pool.\n\/\/ Initial setup steps are steps that do not alter the environment.\n\nuse std::collections::{HashMap, HashSet};\nuse std::fs::OpenOptions;\nuse std::path::{Path, PathBuf};\n\nuse serde_json;\n\nuse devicemapper::{devnode_to_devno, Device, Sectors};\n\nuse stratis::{ErrorEnum, StratisError, StratisResult};\n\nuse super::super::super::types::{BlockDevTier, DevUuid, PoolUuid};\n\nuse super::super::serde_structs::{BackstoreSave, BlockDevSave, PoolSave};\n\nuse super::blockdev::StratBlockDev;\nuse super::device::{blkdev_size, is_stratis_device};\nuse super::metadata::BDA;\nuse super::util::get_stratis_block_devices;\n\n\/\/\/ Find all Stratis devices.\n\/\/\/\n\/\/\/ Returns a map of pool uuids to a map of devices to devnodes for each pool.\npub fn find_all() -> StratisResult<HashMap<PoolUuid, HashMap<Device, PathBuf>>> {\n let mut pool_map = HashMap::new();\n\n for devnode in get_stratis_block_devices()? {\n match devnode_to_devno(&devnode)? {\n None => continue,\n Some(devno) => {\n is_stratis_device(&devnode)?.and_then(|pool_uuid| {\n pool_map\n .entry(pool_uuid)\n .or_insert_with(HashMap::new)\n .insert(Device::from(devno), devnode)\n });\n }\n }\n }\n Ok(pool_map)\n}\n\n\/\/\/ Get the most recent metadata from a set of Devices for a given pool UUID.\n\/\/\/ Returns None if no metadata found for this pool.\n#[allow(implicit_hasher)]\npub fn get_metadata(\n pool_uuid: PoolUuid,\n devnodes: &HashMap<Device, PathBuf>,\n) -> StratisResult<Option<PoolSave>> {\n \/\/ Get pairs of device nodes and matching BDAs\n \/\/ If no BDA, or BDA UUID does not match pool UUID, skip.\n \/\/ If there is an error reading the BDA, error. There could have been\n \/\/ vital information on that BDA, for example, it may have contained\n \/\/ the newest metadata.\n let mut bdas = Vec::new();\n for devnode in devnodes.values() {\n let bda = BDA::load(&mut OpenOptions::new().read(true).open(devnode)?)?;\n if let Some(bda) = bda {\n if bda.pool_uuid() == pool_uuid {\n bdas.push((devnode, bda));\n }\n }\n }\n\n \/\/ Most recent time should never be None if this was a properly\n \/\/ created pool; this allows for the method to be called in other\n \/\/ circumstances.\n let most_recent_time = {\n match bdas.iter()\n .filter_map(|&(_, ref bda)| bda.last_update_time())\n .max()\n {\n Some(time) => time,\n None => return Ok(None),\n }\n };\n\n \/\/ Try to read from all available devnodes that could contain most\n \/\/ recent metadata. In the event of errors, continue to try until all are\n \/\/ exhausted.\n for &(devnode, ref bda) in bdas.iter()\n .filter(|&&(_, ref bda)| bda.last_update_time() == Some(most_recent_time))\n {\n let poolsave = OpenOptions::new()\n .read(true)\n .open(devnode)\n .ok()\n .and_then(|mut f| bda.load_state(&mut f).ok())\n .and_then(|opt| opt)\n .and_then(|data| serde_json::from_slice(&data).ok());\n\n if poolsave.is_some() {\n return Ok(poolsave);\n }\n }\n\n \/\/ If no data has yet returned, we have an error. That is, we should have\n \/\/ some metadata, because we have a most recent time, but we failed to\n \/\/ get any.\n let err_str = \"timestamp indicates data was written, but no data successfully read\";\n Err(StratisError::Engine(ErrorEnum::NotFound, err_str.into()))\n}\n\n\/\/\/ Get all the blockdevs corresponding to this pool that can be obtained from\n\/\/\/ the given devices.\n\/\/\/ Returns an error if a BDA can not be read or can not be found on any\n\/\/\/ blockdev in devnodes.\n\/\/\/ Returns an error if the blockdevs obtained do not match the metadata.\n\/\/\/ Returns a tuple, of which the first are the data devs, and the second\n\/\/\/ are the devs that support the cache tier.\n\/\/\/ Precondition: Every device in devnodes has already been determined to\n\/\/\/ belong to the pool with the specified pool uuid.\n#[allow(implicit_hasher)]\npub fn get_blockdevs(\n pool_uuid: PoolUuid,\n backstore_save: &BackstoreSave,\n devnodes: &HashMap<Device, PathBuf>,\n) -> StratisResult<(Vec<StratBlockDev>, Vec<StratBlockDev>)> {\n let recorded_data_map: HashMap<DevUuid, (usize, &BlockDevSave)> = backstore_save\n .data_devs\n .iter()\n .enumerate()\n .map(|(i, bds)| (bds.uuid, (i, bds)))\n .collect();\n\n let recorded_cache_map: HashMap<DevUuid, (usize, &BlockDevSave)> =\n match backstore_save.cache_devs {\n Some(ref cache_devs) => cache_devs\n .iter()\n .enumerate()\n .map(|(i, bds)| (bds.uuid, (i, bds)))\n .collect(),\n None => HashMap::new(),\n };\n\n let mut segment_table: HashMap<DevUuid, Vec<(Sectors, Sectors)>> = HashMap::new();\n for seg in &backstore_save.data_segments {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n if let Some(ref segs) = backstore_save.cache_segments {\n for seg in segs {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n }\n if let Some(ref segs) = backstore_save.meta_segments {\n for seg in segs {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n }\n\n \/\/ Construct a single StratBlockDev. Return the tier to which the\n \/\/ blockdev has been found to belong. Returns an error if the block\n \/\/ device has shrunk, no metadata can be found for the block device,\n \/\/ or it is impossible to set up the device because the recorded\n \/\/ allocation information is impossible.\n fn get_blockdev(\n device: Device,\n devnode: &Path,\n bda: BDA,\n data_map: &HashMap<DevUuid, (usize, &BlockDevSave)>,\n cache_map: &HashMap<DevUuid, (usize, &BlockDevSave)>,\n segment_table: &HashMap<DevUuid, Vec<(Sectors, Sectors)>>,\n ) -> StratisResult<(BlockDevTier, StratBlockDev)> {\n \/\/ Return an error if apparent size of Stratis block device appears to\n \/\/ have decreased since metadata was recorded or if size of block\n \/\/ device could not be obtained.\n blkdev_size(&OpenOptions::new().read(true).open(devnode)?).and_then(|actual_size| {\n let actual_size_sectors = actual_size.sectors();\n let recorded_size = bda.dev_size();\n if actual_size_sectors < recorded_size {\n let err_msg = format!(\n \"Stratis device with device number {}, devnode {}, pool UUID {} and device UUID {} had recorded size ({}), but actual size is less at ({})\",\n device,\n devnode.display(),\n bda.pool_uuid(),\n bda.dev_uuid(),\n recorded_size,\n actual_size\n );\n Err(StratisError::Engine(ErrorEnum::Error, err_msg))\n } else {\n Ok(())\n }\n })?;\n\n let dev_uuid = bda.dev_uuid();\n\n \/\/ Locate the device in the metadata using its uuid. Return the device\n \/\/ metadata and whether it was a cache or a datadev.\n let (tier, (_, bd_save)) = data_map\n .get(&dev_uuid)\n .map(|bd_save| (BlockDevTier::Data, bd_save))\n .or_else(|| {\n cache_map\n .get(&dev_uuid)\n .map(|bd_save| (BlockDevTier::Cache, bd_save))\n })\n .ok_or_else(|| {\n let err_msg = format!(\n \"Stratis device with device number {}, devnode {}, pool UUID {} and device UUID {} had no record in pool metadata\",\n device,\n devnode.display(),\n bda.pool_uuid(),\n bda.dev_uuid()\n );\n StratisError::Engine(ErrorEnum::NotFound, err_msg)\n })?;\n\n \/\/ This should always succeed since the actual size is at\n \/\/ least the recorded size, so all segments should be\n \/\/ available to be allocated. If this fails, the most likely\n \/\/ conclusion is metadata corruption.\n let segments = segment_table.get(&dev_uuid);\n Ok((\n tier,\n StratBlockDev::new(\n device,\n devnode.to_owned(),\n bda,\n segments.unwrap_or(&vec![]),\n bd_save.user_info.clone(),\n bd_save.hardware_info.clone(),\n )?,\n ))\n }\n\n let (mut datadevs, mut cachedevs) = (vec![], vec![]);\n for (device, devnode) in devnodes {\n let bda = BDA::load(&mut OpenOptions::new().read(true).open(devnode)?)?.ok_or_else(|| {\n StratisError::Engine(ErrorEnum::NotFound,\n format!(\"Device {} with devnode {} was previously determined to belong to pool with uuid {} but no BDA was found\",\n device,\n devnode.display(),\n pool_uuid))\n })?;\n\n get_blockdev(\n *device,\n devnode,\n bda,\n &recorded_data_map,\n &recorded_cache_map,\n &segment_table,\n ).map(|(tier, blockdev)| {\n match tier {\n BlockDevTier::Data => &mut datadevs,\n BlockDevTier::Cache => &mut cachedevs,\n }.push(blockdev)\n })?;\n }\n\n \/\/ Verify that datadevs found match datadevs recorded.\n let current_data_uuids: HashSet<_> = datadevs.iter().map(|b| b.uuid()).collect();\n let recorded_data_uuids: HashSet<_> = recorded_data_map.keys().cloned().collect();\n if current_data_uuids != recorded_data_uuids {\n let err_msg = \"Recorded data dev UUIDs != discovered datadev UUIDs\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n if datadevs.len() != current_data_uuids.len() {\n let err_msg = \"Duplicate data devices found in environment\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n \/\/ Verify that cachedevs found match cachedevs recorded.\n let current_cache_uuids: HashSet<_> = cachedevs.iter().map(|b| b.uuid()).collect();\n let recorded_cache_uuids: HashSet<_> = recorded_cache_map.keys().cloned().collect();\n if current_cache_uuids != recorded_cache_uuids {\n let err_msg = \"Recorded cache dev UUIDs != discovered cachedev UUIDs\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n if cachedevs.len() != current_cache_uuids.len() {\n let err_msg = \"Duplicate cache devices found in environment\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n Ok((datadevs, cachedevs))\n}\n<commit_msg>Make explicit the type of the elements of two Vecs<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Code to handle initial setup steps for a pool.\n\/\/ Initial setup steps are steps that do not alter the environment.\n\nuse std::collections::{HashMap, HashSet};\nuse std::fs::OpenOptions;\nuse std::path::{Path, PathBuf};\n\nuse serde_json;\n\nuse devicemapper::{devnode_to_devno, Device, Sectors};\n\nuse stratis::{ErrorEnum, StratisError, StratisResult};\n\nuse super::super::super::types::{BlockDevTier, DevUuid, PoolUuid};\n\nuse super::super::serde_structs::{BackstoreSave, BlockDevSave, PoolSave};\n\nuse super::blockdev::StratBlockDev;\nuse super::device::{blkdev_size, is_stratis_device};\nuse super::metadata::BDA;\nuse super::util::get_stratis_block_devices;\n\n\/\/\/ Find all Stratis devices.\n\/\/\/\n\/\/\/ Returns a map of pool uuids to a map of devices to devnodes for each pool.\npub fn find_all() -> StratisResult<HashMap<PoolUuid, HashMap<Device, PathBuf>>> {\n let mut pool_map = HashMap::new();\n\n for devnode in get_stratis_block_devices()? {\n match devnode_to_devno(&devnode)? {\n None => continue,\n Some(devno) => {\n is_stratis_device(&devnode)?.and_then(|pool_uuid| {\n pool_map\n .entry(pool_uuid)\n .or_insert_with(HashMap::new)\n .insert(Device::from(devno), devnode)\n });\n }\n }\n }\n Ok(pool_map)\n}\n\n\/\/\/ Get the most recent metadata from a set of Devices for a given pool UUID.\n\/\/\/ Returns None if no metadata found for this pool.\n#[allow(implicit_hasher)]\npub fn get_metadata(\n pool_uuid: PoolUuid,\n devnodes: &HashMap<Device, PathBuf>,\n) -> StratisResult<Option<PoolSave>> {\n \/\/ Get pairs of device nodes and matching BDAs\n \/\/ If no BDA, or BDA UUID does not match pool UUID, skip.\n \/\/ If there is an error reading the BDA, error. There could have been\n \/\/ vital information on that BDA, for example, it may have contained\n \/\/ the newest metadata.\n let mut bdas = Vec::new();\n for devnode in devnodes.values() {\n let bda = BDA::load(&mut OpenOptions::new().read(true).open(devnode)?)?;\n if let Some(bda) = bda {\n if bda.pool_uuid() == pool_uuid {\n bdas.push((devnode, bda));\n }\n }\n }\n\n \/\/ Most recent time should never be None if this was a properly\n \/\/ created pool; this allows for the method to be called in other\n \/\/ circumstances.\n let most_recent_time = {\n match bdas.iter()\n .filter_map(|&(_, ref bda)| bda.last_update_time())\n .max()\n {\n Some(time) => time,\n None => return Ok(None),\n }\n };\n\n \/\/ Try to read from all available devnodes that could contain most\n \/\/ recent metadata. In the event of errors, continue to try until all are\n \/\/ exhausted.\n for &(devnode, ref bda) in bdas.iter()\n .filter(|&&(_, ref bda)| bda.last_update_time() == Some(most_recent_time))\n {\n let poolsave = OpenOptions::new()\n .read(true)\n .open(devnode)\n .ok()\n .and_then(|mut f| bda.load_state(&mut f).ok())\n .and_then(|opt| opt)\n .and_then(|data| serde_json::from_slice(&data).ok());\n\n if poolsave.is_some() {\n return Ok(poolsave);\n }\n }\n\n \/\/ If no data has yet returned, we have an error. That is, we should have\n \/\/ some metadata, because we have a most recent time, but we failed to\n \/\/ get any.\n let err_str = \"timestamp indicates data was written, but no data successfully read\";\n Err(StratisError::Engine(ErrorEnum::NotFound, err_str.into()))\n}\n\n\/\/\/ Get all the blockdevs corresponding to this pool that can be obtained from\n\/\/\/ the given devices.\n\/\/\/ Returns an error if a BDA can not be read or can not be found on any\n\/\/\/ blockdev in devnodes.\n\/\/\/ Returns an error if the blockdevs obtained do not match the metadata.\n\/\/\/ Returns a tuple, of which the first are the data devs, and the second\n\/\/\/ are the devs that support the cache tier.\n\/\/\/ Precondition: Every device in devnodes has already been determined to\n\/\/\/ belong to the pool with the specified pool uuid.\n#[allow(implicit_hasher)]\npub fn get_blockdevs(\n pool_uuid: PoolUuid,\n backstore_save: &BackstoreSave,\n devnodes: &HashMap<Device, PathBuf>,\n) -> StratisResult<(Vec<StratBlockDev>, Vec<StratBlockDev>)> {\n let recorded_data_map: HashMap<DevUuid, (usize, &BlockDevSave)> = backstore_save\n .data_devs\n .iter()\n .enumerate()\n .map(|(i, bds)| (bds.uuid, (i, bds)))\n .collect();\n\n let recorded_cache_map: HashMap<DevUuid, (usize, &BlockDevSave)> =\n match backstore_save.cache_devs {\n Some(ref cache_devs) => cache_devs\n .iter()\n .enumerate()\n .map(|(i, bds)| (bds.uuid, (i, bds)))\n .collect(),\n None => HashMap::new(),\n };\n\n let mut segment_table: HashMap<DevUuid, Vec<(Sectors, Sectors)>> = HashMap::new();\n for seg in &backstore_save.data_segments {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n if let Some(ref segs) = backstore_save.cache_segments {\n for seg in segs {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n }\n if let Some(ref segs) = backstore_save.meta_segments {\n for seg in segs {\n segment_table\n .entry(seg.0)\n .or_insert_with(Vec::default)\n .push((seg.1, seg.2))\n }\n }\n\n \/\/ Construct a single StratBlockDev. Return the tier to which the\n \/\/ blockdev has been found to belong. Returns an error if the block\n \/\/ device has shrunk, no metadata can be found for the block device,\n \/\/ or it is impossible to set up the device because the recorded\n \/\/ allocation information is impossible.\n fn get_blockdev(\n device: Device,\n devnode: &Path,\n bda: BDA,\n data_map: &HashMap<DevUuid, (usize, &BlockDevSave)>,\n cache_map: &HashMap<DevUuid, (usize, &BlockDevSave)>,\n segment_table: &HashMap<DevUuid, Vec<(Sectors, Sectors)>>,\n ) -> StratisResult<(BlockDevTier, StratBlockDev)> {\n \/\/ Return an error if apparent size of Stratis block device appears to\n \/\/ have decreased since metadata was recorded or if size of block\n \/\/ device could not be obtained.\n blkdev_size(&OpenOptions::new().read(true).open(devnode)?).and_then(|actual_size| {\n let actual_size_sectors = actual_size.sectors();\n let recorded_size = bda.dev_size();\n if actual_size_sectors < recorded_size {\n let err_msg = format!(\n \"Stratis device with device number {}, devnode {}, pool UUID {} and device UUID {} had recorded size ({}), but actual size is less at ({})\",\n device,\n devnode.display(),\n bda.pool_uuid(),\n bda.dev_uuid(),\n recorded_size,\n actual_size\n );\n Err(StratisError::Engine(ErrorEnum::Error, err_msg))\n } else {\n Ok(())\n }\n })?;\n\n let dev_uuid = bda.dev_uuid();\n\n \/\/ Locate the device in the metadata using its uuid. Return the device\n \/\/ metadata and whether it was a cache or a datadev.\n let (tier, (_, bd_save)) = data_map\n .get(&dev_uuid)\n .map(|bd_save| (BlockDevTier::Data, bd_save))\n .or_else(|| {\n cache_map\n .get(&dev_uuid)\n .map(|bd_save| (BlockDevTier::Cache, bd_save))\n })\n .ok_or_else(|| {\n let err_msg = format!(\n \"Stratis device with device number {}, devnode {}, pool UUID {} and device UUID {} had no record in pool metadata\",\n device,\n devnode.display(),\n bda.pool_uuid(),\n bda.dev_uuid()\n );\n StratisError::Engine(ErrorEnum::NotFound, err_msg)\n })?;\n\n \/\/ This should always succeed since the actual size is at\n \/\/ least the recorded size, so all segments should be\n \/\/ available to be allocated. If this fails, the most likely\n \/\/ conclusion is metadata corruption.\n let segments = segment_table.get(&dev_uuid);\n Ok((\n tier,\n StratBlockDev::new(\n device,\n devnode.to_owned(),\n bda,\n segments.unwrap_or(&vec![]),\n bd_save.user_info.clone(),\n bd_save.hardware_info.clone(),\n )?,\n ))\n }\n\n let (mut datadevs, mut cachedevs): (Vec<StratBlockDev>, Vec<StratBlockDev>) = (vec![], vec![]);\n for (device, devnode) in devnodes {\n let bda = BDA::load(&mut OpenOptions::new().read(true).open(devnode)?)?.ok_or_else(|| {\n StratisError::Engine(ErrorEnum::NotFound,\n format!(\"Device {} with devnode {} was previously determined to belong to pool with uuid {} but no BDA was found\",\n device,\n devnode.display(),\n pool_uuid))\n })?;\n\n get_blockdev(\n *device,\n devnode,\n bda,\n &recorded_data_map,\n &recorded_cache_map,\n &segment_table,\n ).map(|(tier, blockdev)| {\n match tier {\n BlockDevTier::Data => &mut datadevs,\n BlockDevTier::Cache => &mut cachedevs,\n }.push(blockdev)\n })?;\n }\n\n \/\/ Verify that datadevs found match datadevs recorded.\n let current_data_uuids: HashSet<_> = datadevs.iter().map(|b| b.uuid()).collect();\n let recorded_data_uuids: HashSet<_> = recorded_data_map.keys().cloned().collect();\n if current_data_uuids != recorded_data_uuids {\n let err_msg = \"Recorded data dev UUIDs != discovered datadev UUIDs\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n if datadevs.len() != current_data_uuids.len() {\n let err_msg = \"Duplicate data devices found in environment\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n \/\/ Verify that cachedevs found match cachedevs recorded.\n let current_cache_uuids: HashSet<_> = cachedevs.iter().map(|b| b.uuid()).collect();\n let recorded_cache_uuids: HashSet<_> = recorded_cache_map.keys().cloned().collect();\n if current_cache_uuids != recorded_cache_uuids {\n let err_msg = \"Recorded cache dev UUIDs != discovered cachedev UUIDs\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n if cachedevs.len() != current_cache_uuids.len() {\n let err_msg = \"Duplicate cache devices found in environment\";\n return Err(StratisError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n\n Ok((datadevs, cachedevs))\n}\n<|endoftext|>"} {"text":"<commit_before>use std::io::fs;\nuse std::io;\n\nuse colours::{Plain, Style, Black, Red, Green, Yellow, Blue, Purple, Cyan};\nuse column::{Column, Permissions, FileName, FileSize, User, Group};\nuse format::{formatBinaryBytes, formatDecimalBytes};\nuse unix::{get_user_name, get_group_name};\n\n\/\/ Each file is definitely going to get `stat`ted at least once, if\n\/\/ only to determine what kind of file it is, so carry the `stat`\n\/\/ result around with the file for safe keeping.\npub struct File<'a> {\n pub name: &'a str,\n pub ext: Option<&'a str>,\n pub path: &'a Path,\n pub stat: io::FileStat,\n}\n\nimpl<'a> File<'a> {\n pub fn from_path(path: &'a Path) -> File<'a> {\n let filename: &str = path.filename_str().unwrap();\n\n \/\/ We have to use lstat here instad of file.stat(), as it\n \/\/ doesn't follow symbolic links. Otherwise, the stat() call\n \/\/ will fail if it encounters a link that's target is\n \/\/ non-existent.\n let stat: io::FileStat = match fs::lstat(path) {\n Ok(stat) => stat,\n Err(e) => fail!(\"Couldn't stat {}: {}\", filename, e),\n };\n\n return File {\n path: path,\n stat: stat,\n name: filename,\n ext: File::ext(filename),\n };\n }\n\n fn ext(name: &'a str) -> Option<&'a str> {\n let re = regex!(r\"\\.(.+)$\");\n re.captures(name).map(|caps| caps.at(1))\n }\n\n pub fn is_dotfile(&self) -> bool {\n self.name.starts_with(\".\")\n }\n\n pub fn display(&self, column: &Column) -> StrBuf {\n match *column {\n Permissions => self.permissions(),\n FileName => self.file_colour().paint(self.name.as_slice()),\n FileSize(si) => self.file_size(si),\n User => get_user_name(self.stat.unstable.uid as i32).unwrap_or(self.stat.unstable.uid.to_str()),\n Group => get_group_name(self.stat.unstable.gid as u32).unwrap_or(self.stat.unstable.gid.to_str()),\n }\n }\n\n fn file_size(&self, si: bool) -> StrBuf {\n let sizeStr = if si {\n formatBinaryBytes(self.stat.size)\n } else {\n formatDecimalBytes(self.stat.size)\n };\n\n return if self.stat.kind == io::TypeDirectory {\n Green.normal()\n } else {\n Green.bold()\n }.paint(sizeStr.as_slice());\n }\n\n fn type_char(&self) -> StrBuf {\n return match self.stat.kind {\n io::TypeFile => \".\".to_strbuf(),\n io::TypeDirectory => Blue.paint(\"d\"),\n io::TypeNamedPipe => Yellow.paint(\"|\"),\n io::TypeBlockSpecial => Purple.paint(\"s\"),\n io::TypeSymlink => Cyan.paint(\"l\"),\n _ => \"?\".to_owned(),\n }\n }\n\n\n fn file_colour(&self) -> Style {\n if self.stat.kind == io::TypeDirectory {\n Blue.normal()\n } else if self.stat.perm.contains(io::UserExecute) {\n Green.normal()\n } else if self.name.ends_with(\"~\") {\n Black.bold()\n } else {\n Plain\n }\n }\n\n fn permissions(&self) -> StrBuf {\n let bits = self.stat.perm;\n return format!(\"{}{}{}{}{}{}{}{}{}{}\",\n self.type_char(),\n bit(bits, io::UserRead, \"r\", Yellow.bold()),\n bit(bits, io::UserWrite, \"w\", Red.bold()),\n bit(bits, io::UserExecute, \"x\", Green.bold().underline()),\n bit(bits, io::GroupRead, \"r\", Yellow.normal()),\n bit(bits, io::GroupWrite, \"w\", Red.normal()),\n bit(bits, io::GroupExecute, \"x\", Green.normal()),\n bit(bits, io::OtherRead, \"r\", Yellow.normal()),\n bit(bits, io::OtherWrite, \"w\", Red.normal()),\n bit(bits, io::OtherExecute, \"x\", Green.normal()),\n );\n }\n}\n\nfn bit(bits: io::FilePermission, bit: io::FilePermission, other: &'static str, style: Style) -> StrBuf {\n if bits.contains(bit) {\n style.paint(other.as_slice())\n } else {\n Black.bold().paint(\"-\".as_slice())\n }\n}\n<commit_msg>Add colouring for media and compressed files<commit_after>use std::io::fs;\nuse std::io;\n\nuse colours::{Plain, Style, Black, Red, Green, Yellow, Blue, Purple, Cyan};\nuse column::{Column, Permissions, FileName, FileSize, User, Group};\nuse format::{formatBinaryBytes, formatDecimalBytes};\nuse unix::{get_user_name, get_group_name};\n\nstatic MEDIA_TYPES: &'static [&'static str] = &[\n \"png\", \"jpeg\", \"jpg\", \"gif\", \"bmp\", \"tiff\", \"tif\",\n \"ppm\", \"pgm\", \"pbm\", \"pnm\", \"webp\", \"raw\", \"arw\",\n \"svg\", \"pdf\", \"stl\", \"eps\", \"dvi\", \"ps\" ];\n\nstatic COMPRESSED_TYPES: &'static [&'static str] = &[\n \"zip\", \"tar\", \"Z\", \"gz\", \"bz2\", \"a\", \"ar\", \"7z\",\n \"iso\", \"dmg\", \"tc\", \"rar\", \"par\" ];\n\n\/\/ Each file is definitely going to get `stat`ted at least once, if\n\/\/ only to determine what kind of file it is, so carry the `stat`\n\/\/ result around with the file for safe keeping.\npub struct File<'a> {\n pub name: &'a str,\n pub ext: Option<&'a str>,\n pub path: &'a Path,\n pub stat: io::FileStat,\n}\n\nimpl<'a> File<'a> {\n pub fn from_path(path: &'a Path) -> File<'a> {\n let filename: &str = path.filename_str().unwrap();\n\n \/\/ We have to use lstat here instad of file.stat(), as it\n \/\/ doesn't follow symbolic links. Otherwise, the stat() call\n \/\/ will fail if it encounters a link that's target is\n \/\/ non-existent.\n let stat: io::FileStat = match fs::lstat(path) {\n Ok(stat) => stat,\n Err(e) => fail!(\"Couldn't stat {}: {}\", filename, e),\n };\n\n return File {\n path: path,\n stat: stat,\n name: filename,\n ext: File::ext(filename),\n };\n }\n\n fn ext(name: &'a str) -> Option<&'a str> {\n let re = regex!(r\"\\.(.+)$\");\n re.captures(name).map(|caps| caps.at(1))\n }\n\n pub fn is_dotfile(&self) -> bool {\n self.name.starts_with(\".\")\n }\n\n pub fn display(&self, column: &Column) -> StrBuf {\n match *column {\n Permissions => self.permissions(),\n FileName => self.file_colour().paint(self.name.as_slice()),\n FileSize(si) => self.file_size(si),\n User => get_user_name(self.stat.unstable.uid as i32).unwrap_or(self.stat.unstable.uid.to_str()),\n Group => get_group_name(self.stat.unstable.gid as u32).unwrap_or(self.stat.unstable.gid.to_str()),\n }\n }\n\n fn file_size(&self, si: bool) -> StrBuf {\n let sizeStr = if si {\n formatBinaryBytes(self.stat.size)\n } else {\n formatDecimalBytes(self.stat.size)\n };\n\n return if self.stat.kind == io::TypeDirectory {\n Green.normal()\n } else {\n Green.bold()\n }.paint(sizeStr.as_slice());\n }\n\n fn type_char(&self) -> StrBuf {\n return match self.stat.kind {\n io::TypeFile => \".\".to_strbuf(),\n io::TypeDirectory => Blue.paint(\"d\"),\n io::TypeNamedPipe => Yellow.paint(\"|\"),\n io::TypeBlockSpecial => Purple.paint(\"s\"),\n io::TypeSymlink => Cyan.paint(\"l\"),\n _ => \"?\".to_owned(),\n }\n }\n\n fn file_colour(&self) -> Style {\n if self.stat.kind == io::TypeDirectory {\n Blue.normal()\n }\n else if self.stat.perm.contains(io::UserExecute) {\n Green.bold()\n }\n else if self.name.ends_with(\"~\") {\n Black.bold()\n }\n else if self.name.starts_with(\"README\") {\n Yellow.bold().underline()\n }\n else if self.ext.is_some() && MEDIA_TYPES.iter().any(|&s| s == self.ext.unwrap()) {\n Purple.normal()\n }\n else if self.ext.is_some() && COMPRESSED_TYPES.iter().any(|&s| s == self.ext.unwrap()) {\n Red.normal()\n }\n else {\n Plain\n }\n }\n\n fn permissions(&self) -> StrBuf {\n let bits = self.stat.perm;\n return format!(\"{}{}{}{}{}{}{}{}{}{}\",\n self.type_char(),\n bit(bits, io::UserRead, \"r\", Yellow.bold()),\n bit(bits, io::UserWrite, \"w\", Red.bold()),\n bit(bits, io::UserExecute, \"x\", Green.bold().underline()),\n bit(bits, io::GroupRead, \"r\", Yellow.normal()),\n bit(bits, io::GroupWrite, \"w\", Red.normal()),\n bit(bits, io::GroupExecute, \"x\", Green.normal()),\n bit(bits, io::OtherRead, \"r\", Yellow.normal()),\n bit(bits, io::OtherWrite, \"w\", Red.normal()),\n bit(bits, io::OtherExecute, \"x\", Green.normal()),\n );\n }\n}\n\nfn bit(bits: io::FilePermission, bit: io::FilePermission, other: &'static str, style: Style) -> StrBuf {\n if bits.contains(bit) {\n style.paint(other.as_slice())\n } else {\n Black.bold().paint(\"-\".as_slice())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tests: add test for empty <><commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that empty type parameter list (<>) is synonymous with\n\/\/ no type parameters at all\n\nstruct S<>;\ntrait T<> {}\nenum E<> { V }\nimpl<> T<> for S<> {}\nimpl T for E {}\nfn foo<>() {}\nfn bar() {}\n\nfn main() {\n let _ = S;\n let _ = S::<>;\n let _ = E::V;\n let _ = E::<>::V;\n foo();\n foo::<>();\n\n \/\/ Test that we can supply <> to non generic things\n bar::<>();\n let _: i32<>;\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/\/ External linking is a complex implementation to be able to serve a clean and easy-to-use\n\/\/\/ interface.\n\/\/\/\n\/\/\/ Internally, there are no such things as \"external links\" (plural). Each Entry in the store can\n\/\/\/ only have _one_ external link.\n\/\/\/\n\/\/\/ This library does the following therefor: It allows you to have several external links with one\n\/\/\/ entry, which are internally one file in the store for each link, linked with \"internal\n\/\/\/ linking\".\n\/\/\/\n\/\/\/ This helps us greatly with deduplication of URLs.\n\/\/\/\n\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\n\nuse libimagstore::store::Entry;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagutil::debug_result::*;\n\nuse error::LinkError as LE;\nuse error::LinkErrorKind as LEK;\nuse error::MapErrInto;\nuse result::Result;\nuse internal::InternalLinker;\nuse module_path::ModuleEntryPath;\n\nuse toml::Value;\nuse url::Url;\nuse crypto::sha1::Sha1;\nuse crypto::digest::Digest;\n\n\/\/\/ \"Link\" Type, just an abstraction over `FileLockEntry` to have some convenience internally.\npub struct Link<'a> {\n link: FileLockEntry<'a>\n}\n\nimpl<'a> Link<'a> {\n\n pub fn new(fle: FileLockEntry<'a>) -> Link<'a> {\n Link { link: fle }\n }\n\n \/\/\/ Get a link Url object from a `FileLockEntry`, ignore errors.\n fn get_link_uri_from_filelockentry(file: &FileLockEntry<'a>) -> Option<Url> {\n file.get_header()\n .read(\"imag.content.url\")\n .ok()\n .and_then(|opt| match opt {\n Some(Value::String(s)) => {\n debug!(\"Found url, parsing: {:?}\", s);\n Url::parse(&s[..]).ok()\n },\n _ => None\n })\n }\n\n pub fn get_url(&self) -> Result<Option<Url>> {\n let opt = self.link\n .get_header()\n .read(\"imag.content.url\");\n\n match opt {\n Ok(Some(Value::String(s))) => {\n Url::parse(&s[..])\n .map(Some)\n .map_err(|e| LE::new(LEK::EntryHeaderReadError, Some(Box::new(e))))\n },\n Ok(None) => Ok(None),\n _ => Err(LE::new(LEK::EntryHeaderReadError, None))\n }\n }\n\n}\n\npub trait ExternalLinker : InternalLinker {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>>;\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()>;\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n}\n\npub mod iter {\n \/\/! Iterator helpers for external linking stuff\n \/\/!\n \/\/! Contains also helpers to filter iterators for external\/internal links\n \/\/!\n\n use libimagutil::debug_result::*;\n use libimagstore::store::Store;\n\n use internal::Link;\n use internal::iter::LinkIter;\n use error::LinkErrorKind as LEK;\n use error::MapErrInto;\n use result::Result;\n\n use url::Url;\n\n \/\/\/ Helper for building `OnlyExternalIter` and `NoExternalIter`\n \/\/\/\n \/\/\/ The boolean value defines, how to interpret the `is_external_link_storeid()` return value\n \/\/\/ (here as \"pred\"):\n \/\/\/\n \/\/\/ pred | bool | xor | take?\n \/\/\/ ---- | ---- | --- | ----\n \/\/\/ 0 | 0 | 0 | 1\n \/\/\/ 0 | 1 | 1 | 0\n \/\/\/ 1 | 1 | 1 | 0\n \/\/\/ 1 | 1 | 0 | 1\n \/\/\/\n \/\/\/ If `bool` says \"take if return value is false\", we take the element if the `pred` returns\n \/\/\/ false... and so on.\n \/\/\/\n \/\/\/ As we can see, the operator between these two operants is `!(a ^ b)`.\n struct ExternalFilterIter(LinkIter, bool);\n\n impl Iterator for ExternalFilterIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n use super::is_external_link_storeid;\n\n while let Some(elem) = self.0.next() {\n if !(self.1 ^ is_external_link_storeid(&elem)) {\n return Some(elem);\n }\n }\n None\n }\n }\n\n pub struct OnlyExternalIter(ExternalFilterIter);\n\n impl OnlyExternalIter {\n pub fn new(li: LinkIter) -> OnlyExternalIter {\n OnlyExternalIter(ExternalFilterIter(li, true))\n }\n\n pub fn urls<'a>(self, store: &'a Store) -> UrlIter<'a> {\n UrlIter(self, store)\n }\n }\n\n impl Iterator for OnlyExternalIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next()\n }\n }\n\n pub struct NoExternalIter(ExternalFilterIter);\n\n impl NoExternalIter {\n pub fn new(li: LinkIter) -> NoExternalIter {\n NoExternalIter(ExternalFilterIter(li, false))\n }\n }\n\n impl Iterator for NoExternalIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next()\n }\n }\n\n pub trait OnlyExternalLinks : Sized {\n fn only_external_links(self) -> OnlyExternalIter ;\n\n fn no_internal_links(self) -> OnlyExternalIter {\n self.only_external_links()\n }\n }\n\n impl OnlyExternalLinks for LinkIter {\n fn only_external_links(self) -> OnlyExternalIter {\n OnlyExternalIter::new(self)\n }\n }\n\n pub trait OnlyInternalLinks : Sized {\n fn only_internal_links(self) -> NoExternalIter;\n\n fn no_external_links(self) -> NoExternalIter {\n self.only_internal_links()\n }\n }\n\n impl OnlyInternalLinks for LinkIter {\n fn only_internal_links(self) -> NoExternalIter {\n NoExternalIter::new(self)\n }\n }\n\n pub struct UrlIter<'a>(OnlyExternalIter, &'a Store);\n\n impl<'a> Iterator for UrlIter<'a> {\n type Item = Result<Url>;\n\n fn next(&mut self) -> Option<Self::Item> {\n use super::get_external_link_from_file;\n\n self.0\n .next()\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n self.1\n .retrieve(id.clone())\n .map_err_into(LEK::StoreReadError)\n .map_dbg_err(|_| format!(\"Retrieving entry for id: '{:?}' failed\", id))\n .and_then(|f| {\n debug!(\"Store::retrieve({:?}) succeeded\", id);\n debug!(\"getting external link from file now\");\n get_external_link_from_file(&f)\n .map_dbg_err(|e| format!(\"URL -> Err = {:?}\", e))\n })\n })\n }\n\n }\n\n}\n\n\n\/\/\/ Check whether the StoreId starts with `\/link\/external\/`\npub fn is_external_link_storeid(id: &StoreId) -> bool {\n debug!(\"Checking whether this is a 'links\/external\/': '{:?}'\", id);\n id.local().starts_with(\"links\/external\")\n}\n\nfn get_external_link_from_file(entry: &FileLockEntry) -> Result<Url> {\n Link::get_link_uri_from_filelockentry(entry) \/\/ TODO: Do not hide error by using this function\n .ok_or(LE::new(LEK::StoreReadError, None))\n}\n\n\/\/\/ Implement `ExternalLinker` for `Entry`, hiding the fact that there is no such thing as an external\n\/\/\/ link in an entry, but internal links to other entries which serve as external links, as one\n\/\/\/ entry in the store can only have one external link.\nimpl ExternalLinker for Entry {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>> {\n \/\/ Iterate through all internal links and filter for FileLockEntries which live in\n \/\/ \/link\/external\/<SHA> -> load these files and get the external link from their headers,\n \/\/ put them into the return vector.\n self.get_internal_links()\n .map(|iter| {\n debug!(\"Getting external links\");\n iter.filter(|l| is_external_link_storeid(l))\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n match store.retrieve(id.clone()) {\n Ok(f) => {\n debug!(\"Store::retrieve({:?}) succeeded\", id);\n debug!(\"getting external link from file now\");\n get_external_link_from_file(&f)\n .map_err(|e| { debug!(\"URL -> Err = {:?}\", e); e })\n },\n Err(e) => {\n debug!(\"Retrieving entry for id: '{:?}' failed\", id);\n Err(LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n }\n })\n .filter_map(|x| x.ok()) \/\/ TODO: Do not ignore error here\n .collect()\n })\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()> {\n \/\/ Take all the links, generate a SHA sum out of each one, filter out the already existing\n \/\/ store entries and store the other URIs in the header of one FileLockEntry each, in\n \/\/ the path \/link\/external\/<SHA of the URL>\n\n debug!(\"Iterating {} links = {:?}\", links.len(), links);\n for link in links { \/\/ for all links\n let hash = {\n let mut s = Sha1::new();\n s.input_str(&link.as_str()[..]);\n s.result_str()\n };\n let file_id = try!(\n ModuleEntryPath::new(format!(\"external\/{}\", hash)).into_storeid()\n .map_err_into(LEK::StoreWriteError)\n .map_dbg_err(|_| {\n format!(\"Failed to build StoreId for this hash '{:?}'\", hash)\n })\n );\n\n debug!(\"Link = '{:?}'\", link);\n debug!(\"Hash = '{:?}'\", hash);\n debug!(\"StoreId = '{:?}'\", file_id);\n\n \/\/ retrieve the file from the store, which implicitely creates the entry if it does not\n \/\/ exist\n let mut file = try!(store\n .retrieve(file_id.clone())\n .map_err_into(LEK::StoreWriteError)\n .map_dbg_err(|_| {\n format!(\"Failed to create or retrieve an file for this link '{:?}'\", link)\n }));\n\n debug!(\"Generating header content!\");\n {\n let mut hdr = file.deref_mut().get_header_mut();\n\n let mut table = match hdr.read(\"imag.content\") {\n Ok(Some(Value::Table(table))) => table,\n Ok(Some(_)) => {\n warn!(\"There is a value at 'imag.content' which is not a table.\");\n warn!(\"Going to override this value\");\n BTreeMap::new()\n },\n Ok(None) => BTreeMap::new(),\n Err(e) => return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e)))),\n };\n\n let v = Value::String(link.into_string());\n\n debug!(\"setting URL = '{:?}\", v);\n table.insert(String::from(\"url\"), v);\n\n if let Err(e) = hdr.set(\"imag.content\", Value::Table(table)) {\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n } else {\n debug!(\"Setting URL worked\");\n }\n }\n\n \/\/ then add an internal link to the new file or return an error if this fails\n if let Err(e) = self.add_internal_link(file.deref_mut()) {\n debug!(\"Error adding internal link\");\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n }\n }\n debug!(\"Ready iterating\");\n Ok(())\n }\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, add this one, save them\n debug!(\"Getting links\");\n self.get_external_links(store)\n .and_then(|mut links| {\n debug!(\"Adding link = '{:?}' to links = {:?}\", link, links);\n links.push(link);\n debug!(\"Setting {} links = {:?}\", links.len(), links);\n self.set_external_links(store, links)\n })\n }\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, remove this one, save them\n self.get_external_links(store)\n .and_then(|links| {\n debug!(\"Removing link = '{:?}' from links = {:?}\", link, links);\n let links = links.into_iter()\n .filter(|l| l.as_str() != link.as_str())\n .collect();\n self.set_external_links(store, links)\n })\n }\n\n}\n\n<commit_msg>Add SelectExternal LinkIter extension trait<commit_after>\/\/\n\/\/ imag - the personal information management suite for the commandline\n\/\/ Copyright (C) 2015, 2016 Matthias Beyer <mail@beyermatthias.de> and contributors\n\/\/\n\/\/ This library is free software; you can redistribute it and\/or\n\/\/ modify it under the terms of the GNU Lesser General Public\n\/\/ License as published by the Free Software Foundation; version\n\/\/ 2.1 of the License.\n\/\/\n\/\/ This library is distributed in the hope that it will be useful,\n\/\/ but WITHOUT ANY WARRANTY; without even the implied warranty of\n\/\/ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n\/\/ Lesser General Public License for more details.\n\/\/\n\/\/ You should have received a copy of the GNU Lesser General Public\n\/\/ License along with this library; if not, write to the Free Software\n\/\/ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n\/\/\n\n\/\/\/ External linking is a complex implementation to be able to serve a clean and easy-to-use\n\/\/\/ interface.\n\/\/\/\n\/\/\/ Internally, there are no such things as \"external links\" (plural). Each Entry in the store can\n\/\/\/ only have _one_ external link.\n\/\/\/\n\/\/\/ This library does the following therefor: It allows you to have several external links with one\n\/\/\/ entry, which are internally one file in the store for each link, linked with \"internal\n\/\/\/ linking\".\n\/\/\/\n\/\/\/ This helps us greatly with deduplication of URLs.\n\/\/\/\n\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\n\nuse libimagstore::store::Entry;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\nuse libimagutil::debug_result::*;\n\nuse error::LinkError as LE;\nuse error::LinkErrorKind as LEK;\nuse error::MapErrInto;\nuse result::Result;\nuse internal::InternalLinker;\nuse module_path::ModuleEntryPath;\n\nuse toml::Value;\nuse url::Url;\nuse crypto::sha1::Sha1;\nuse crypto::digest::Digest;\n\n\/\/\/ \"Link\" Type, just an abstraction over `FileLockEntry` to have some convenience internally.\npub struct Link<'a> {\n link: FileLockEntry<'a>\n}\n\nimpl<'a> Link<'a> {\n\n pub fn new(fle: FileLockEntry<'a>) -> Link<'a> {\n Link { link: fle }\n }\n\n \/\/\/ Get a link Url object from a `FileLockEntry`, ignore errors.\n fn get_link_uri_from_filelockentry(file: &FileLockEntry<'a>) -> Option<Url> {\n file.get_header()\n .read(\"imag.content.url\")\n .ok()\n .and_then(|opt| match opt {\n Some(Value::String(s)) => {\n debug!(\"Found url, parsing: {:?}\", s);\n Url::parse(&s[..]).ok()\n },\n _ => None\n })\n }\n\n pub fn get_url(&self) -> Result<Option<Url>> {\n let opt = self.link\n .get_header()\n .read(\"imag.content.url\");\n\n match opt {\n Ok(Some(Value::String(s))) => {\n Url::parse(&s[..])\n .map(Some)\n .map_err(|e| LE::new(LEK::EntryHeaderReadError, Some(Box::new(e))))\n },\n Ok(None) => Ok(None),\n _ => Err(LE::new(LEK::EntryHeaderReadError, None))\n }\n }\n\n}\n\npub trait ExternalLinker : InternalLinker {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>>;\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()>;\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n}\n\npub mod iter {\n \/\/! Iterator helpers for external linking stuff\n \/\/!\n \/\/! Contains also helpers to filter iterators for external\/internal links\n \/\/!\n\n use libimagutil::debug_result::*;\n use libimagstore::store::Store;\n\n use internal::Link;\n use internal::iter::LinkIter;\n use error::LinkErrorKind as LEK;\n use error::MapErrInto;\n use result::Result;\n\n use url::Url;\n\n \/\/\/ Helper for building `OnlyExternalIter` and `NoExternalIter`\n \/\/\/\n \/\/\/ The boolean value defines, how to interpret the `is_external_link_storeid()` return value\n \/\/\/ (here as \"pred\"):\n \/\/\/\n \/\/\/ pred | bool | xor | take?\n \/\/\/ ---- | ---- | --- | ----\n \/\/\/ 0 | 0 | 0 | 1\n \/\/\/ 0 | 1 | 1 | 0\n \/\/\/ 1 | 1 | 1 | 0\n \/\/\/ 1 | 1 | 0 | 1\n \/\/\/\n \/\/\/ If `bool` says \"take if return value is false\", we take the element if the `pred` returns\n \/\/\/ false... and so on.\n \/\/\/\n \/\/\/ As we can see, the operator between these two operants is `!(a ^ b)`.\n pub struct ExternalFilterIter(LinkIter, bool);\n\n impl Iterator for ExternalFilterIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n use super::is_external_link_storeid;\n\n while let Some(elem) = self.0.next() {\n if !(self.1 ^ is_external_link_storeid(&elem)) {\n return Some(elem);\n }\n }\n None\n }\n }\n\n \/\/\/ Helper trait to be implemented on `LinkIter` to select or deselect all external links\n \/\/\/\n \/\/\/ # See also\n \/\/\/\n \/\/\/ Also see `OnlyExternalIter` and `NoExternalIter` and the helper traits\/functions\n \/\/\/ `OnlyInteralLinks`\/`only_internal_links()` and `OnlyExternalLinks`\/`only_external_links()`.\n pub trait SelectExternal {\n fn select_external_links(self, b: bool) -> ExternalFilterIter;\n }\n\n impl SelectExternal for LinkIter {\n fn select_external_links(self, b: bool) -> ExternalFilterIter {\n ExternalFilterIter(self, b)\n }\n }\n\n\n pub struct OnlyExternalIter(ExternalFilterIter);\n\n impl OnlyExternalIter {\n pub fn new(li: LinkIter) -> OnlyExternalIter {\n OnlyExternalIter(ExternalFilterIter(li, true))\n }\n\n pub fn urls<'a>(self, store: &'a Store) -> UrlIter<'a> {\n UrlIter(self, store)\n }\n }\n\n impl Iterator for OnlyExternalIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next()\n }\n }\n\n pub struct NoExternalIter(ExternalFilterIter);\n\n impl NoExternalIter {\n pub fn new(li: LinkIter) -> NoExternalIter {\n NoExternalIter(ExternalFilterIter(li, false))\n }\n }\n\n impl Iterator for NoExternalIter {\n type Item = Link;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next()\n }\n }\n\n pub trait OnlyExternalLinks : Sized {\n fn only_external_links(self) -> OnlyExternalIter ;\n\n fn no_internal_links(self) -> OnlyExternalIter {\n self.only_external_links()\n }\n }\n\n impl OnlyExternalLinks for LinkIter {\n fn only_external_links(self) -> OnlyExternalIter {\n OnlyExternalIter::new(self)\n }\n }\n\n pub trait OnlyInternalLinks : Sized {\n fn only_internal_links(self) -> NoExternalIter;\n\n fn no_external_links(self) -> NoExternalIter {\n self.only_internal_links()\n }\n }\n\n impl OnlyInternalLinks for LinkIter {\n fn only_internal_links(self) -> NoExternalIter {\n NoExternalIter::new(self)\n }\n }\n\n pub struct UrlIter<'a>(OnlyExternalIter, &'a Store);\n\n impl<'a> Iterator for UrlIter<'a> {\n type Item = Result<Url>;\n\n fn next(&mut self) -> Option<Self::Item> {\n use super::get_external_link_from_file;\n\n self.0\n .next()\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n self.1\n .retrieve(id.clone())\n .map_err_into(LEK::StoreReadError)\n .map_dbg_err(|_| format!(\"Retrieving entry for id: '{:?}' failed\", id))\n .and_then(|f| {\n debug!(\"Store::retrieve({:?}) succeeded\", id);\n debug!(\"getting external link from file now\");\n get_external_link_from_file(&f)\n .map_dbg_err(|e| format!(\"URL -> Err = {:?}\", e))\n })\n })\n }\n\n }\n\n}\n\n\n\/\/\/ Check whether the StoreId starts with `\/link\/external\/`\npub fn is_external_link_storeid(id: &StoreId) -> bool {\n debug!(\"Checking whether this is a 'links\/external\/': '{:?}'\", id);\n id.local().starts_with(\"links\/external\")\n}\n\nfn get_external_link_from_file(entry: &FileLockEntry) -> Result<Url> {\n Link::get_link_uri_from_filelockentry(entry) \/\/ TODO: Do not hide error by using this function\n .ok_or(LE::new(LEK::StoreReadError, None))\n}\n\n\/\/\/ Implement `ExternalLinker` for `Entry`, hiding the fact that there is no such thing as an external\n\/\/\/ link in an entry, but internal links to other entries which serve as external links, as one\n\/\/\/ entry in the store can only have one external link.\nimpl ExternalLinker for Entry {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>> {\n \/\/ Iterate through all internal links and filter for FileLockEntries which live in\n \/\/ \/link\/external\/<SHA> -> load these files and get the external link from their headers,\n \/\/ put them into the return vector.\n self.get_internal_links()\n .map(|iter| {\n debug!(\"Getting external links\");\n iter.filter(|l| is_external_link_storeid(l))\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n match store.retrieve(id.clone()) {\n Ok(f) => {\n debug!(\"Store::retrieve({:?}) succeeded\", id);\n debug!(\"getting external link from file now\");\n get_external_link_from_file(&f)\n .map_err(|e| { debug!(\"URL -> Err = {:?}\", e); e })\n },\n Err(e) => {\n debug!(\"Retrieving entry for id: '{:?}' failed\", id);\n Err(LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n }\n })\n .filter_map(|x| x.ok()) \/\/ TODO: Do not ignore error here\n .collect()\n })\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()> {\n \/\/ Take all the links, generate a SHA sum out of each one, filter out the already existing\n \/\/ store entries and store the other URIs in the header of one FileLockEntry each, in\n \/\/ the path \/link\/external\/<SHA of the URL>\n\n debug!(\"Iterating {} links = {:?}\", links.len(), links);\n for link in links { \/\/ for all links\n let hash = {\n let mut s = Sha1::new();\n s.input_str(&link.as_str()[..]);\n s.result_str()\n };\n let file_id = try!(\n ModuleEntryPath::new(format!(\"external\/{}\", hash)).into_storeid()\n .map_err_into(LEK::StoreWriteError)\n .map_dbg_err(|_| {\n format!(\"Failed to build StoreId for this hash '{:?}'\", hash)\n })\n );\n\n debug!(\"Link = '{:?}'\", link);\n debug!(\"Hash = '{:?}'\", hash);\n debug!(\"StoreId = '{:?}'\", file_id);\n\n \/\/ retrieve the file from the store, which implicitely creates the entry if it does not\n \/\/ exist\n let mut file = try!(store\n .retrieve(file_id.clone())\n .map_err_into(LEK::StoreWriteError)\n .map_dbg_err(|_| {\n format!(\"Failed to create or retrieve an file for this link '{:?}'\", link)\n }));\n\n debug!(\"Generating header content!\");\n {\n let mut hdr = file.deref_mut().get_header_mut();\n\n let mut table = match hdr.read(\"imag.content\") {\n Ok(Some(Value::Table(table))) => table,\n Ok(Some(_)) => {\n warn!(\"There is a value at 'imag.content' which is not a table.\");\n warn!(\"Going to override this value\");\n BTreeMap::new()\n },\n Ok(None) => BTreeMap::new(),\n Err(e) => return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e)))),\n };\n\n let v = Value::String(link.into_string());\n\n debug!(\"setting URL = '{:?}\", v);\n table.insert(String::from(\"url\"), v);\n\n if let Err(e) = hdr.set(\"imag.content\", Value::Table(table)) {\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n } else {\n debug!(\"Setting URL worked\");\n }\n }\n\n \/\/ then add an internal link to the new file or return an error if this fails\n if let Err(e) = self.add_internal_link(file.deref_mut()) {\n debug!(\"Error adding internal link\");\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n }\n }\n debug!(\"Ready iterating\");\n Ok(())\n }\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, add this one, save them\n debug!(\"Getting links\");\n self.get_external_links(store)\n .and_then(|mut links| {\n debug!(\"Adding link = '{:?}' to links = {:?}\", link, links);\n links.push(link);\n debug!(\"Setting {} links = {:?}\", links.len(), links);\n self.set_external_links(store, links)\n })\n }\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, remove this one, save them\n self.get_external_links(store)\n .and_then(|links| {\n debug!(\"Removing link = '{:?}' from links = {:?}\", link, links);\n let links = links.into_iter()\n .filter(|l| l.as_str() != link.as_str())\n .collect();\n self.set_external_links(store, links)\n })\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>A test that I forgot to git-add<commit_after>fn main() {\n alt @{foo: true, bar: some(10), baz: 20} {\n @{foo: true, bar: some(_), _} {}\n @{foo: false, bar: none, _} {}\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add the socket settings example from the README to examples<commit_after>extern crate knob;\n\nuse std::io::net::ip::IpAddr;\nuse knob::Settings;\n\nfn main() {\n let mut settings = Settings::new();\n settings.set(\"ip\", \"::0.0.0.1\");\n let socket: IpAddr = settings.fetch(\"ip\").unwrap();\n assert_eq!(socket.to_str(), \"::0.0.0.1\".to_owned());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>dstrules: new example<commit_after>extern crate time;\nextern crate zoneinfo;\n\nuse zoneinfo::ZoneInfo;\nuse std::error::Error;\n\nfn main() {\n let regions = ZoneInfo::get_tz_locations();\n\n for region in regions {\n match ZoneInfo::by_tz(®ion) {\n Ok(zoneinfo) => {\n println!(\"{}: {}\", region, zoneinfo.get_dst_specifier());\n },\n Err(error) => {\n println!(\"{}: unable to parse: {}\", region, error.description());\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example for request_open_port<commit_after>extern crate port_scanner;\nuse port_scanner::request_open_port;\n\nfn main() {\n println!(\"Port {}\", request_open_port().unwrap_or(0));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add file server example<commit_after>extern crate clap;\nextern crate fibers;\nextern crate futures;\nextern crate miasht;\nextern crate handy_async;\n\nuse std::fs;\nuse fibers::{Executor, ThreadPoolExecutor, Spawn};\nuse futures::{Future, BoxFuture};\nuse miasht::{Server, Status};\nuse miasht::builtin::servers::{SimpleHttpServer, RawConnection};\nuse miasht::builtin::headers::ContentLength;\nuse miasht::builtin::FutureExt;\nuse miasht::builtin::router::{Router, RouteBuilder};\nuse handy_async::sync_io::ReadExt;\n\ntype TcpRequest = miasht::server::Request<fibers::net::TcpStream>;\n\nfn main() {\n let mut builder = RouteBuilder::new();\n builder.add_callback((), handle_get);\n builder.add_callback((), handle_default);\n let router = builder.finish();\n\n let mut executor = ThreadPoolExecutor::new().unwrap();\n let addr = \"0.0.0.0:3000\".parse().unwrap();\n let server = SimpleHttpServer::new(router, route);\n let server = server.start(addr, executor.handle());\n let monitor = executor.spawn_monitor(server.join());\n let result = executor.run_fiber(monitor).unwrap();\n println!(\"HTTP Server shutdown: {:?}\", result);\n}\n\nfn route(router: Router<fibers::net::TcpStream>, connection: RawConnection) -> BoxFuture<(), ()> {\n connection.read_request()\n .map_err(|e| {\n println!(\"Error: {:?}\", e);\n ()\n })\n .and_then(move |request| router.handle_request(request))\n .boxed()\n}\n\nfn handle_default(_: (), request: TcpRequest) -> Result<BoxFuture<(), ()>, TcpRequest> {\n Ok(request.finish()\n .build_response(Status::NotFound)\n .finish()\n .write_all_bytes(\"Not Found\\n\")\n .then(|_| Ok(()))\n .boxed())\n}\n\nfn handle_get(_: (), request: TcpRequest) -> Result<BoxFuture<(), ()>, TcpRequest> {\n if miasht::Method::Get != request.method() {\n return Err(request);\n }\n println!(\"# GET: {}\", &request.path()[1..]);\n Ok(match fs::File::open(&request.path()[1..])\n .and_then(|mut f| ReadExt::read_all_bytes(&mut f)) {\n Err(e) => {\n let reason = e.to_string();\n let mut resp = request.finish().build_response(Status::NotFound);\n resp.add_header(&ContentLength(reason.len() as u64));\n resp.finish().write_all_bytes(reason).then(|_| Ok(())).boxed()\n }\n Ok(bytes) => {\n let mut resp = request.finish().build_response(Status::Ok);\n resp.add_header(&ContentLength(bytes.len() as u64));\n resp.finish().write_all_bytes(bytes).then(|_| Ok(())).boxed()\n }\n })\n}\n<|endoftext|>"} {"text":"<commit_before>fn split<'a, T>(list: &'a [T], n: uint) -> (&'a [T], &'a [T]) {\n (list.slice_to(n), list.slice_from(n))\n}\n\nfn main() {\n let list = ~['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'];\n println!(\"{:?}\", split(list, 3));\n}\n\n<commit_msg>Cleanup Problem 17<commit_after>\/\/ The author of this work hereby waives all claim of copyright (economic and\n\/\/ moral) in this work and immediately places it in the public domain; it may\n\/\/ be used, distorted or destroyed in any manner whatsoever without further\n\/\/ attribution or notice to the creator.\n\n\/\/! Problem 17: Vectors: split\n\/\/!\n\/\/! Split a vector into two parts; the length of the first part is given.\n\/\/!\n\/\/! Your function could have this signature:\n\/\/! `fn split<T>(vec: &[T], n: uint) -> (&[T], &[T])`\n\nfn split<'a, T>(vec: &'a [T], n: uint) -> (&'a [T], &'a [T]) {\n (vec.slice_to(n), vec.slice_from(n))\n}\n\n#[test]\nfn split_ok() {\n let vec = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'];\n assert_eq!(split(vec, 3),\n (&['a', 'b', 'c'], &['d', 'e', 'f', 'g', 'h', 'i', 'j']));\n}\n\n#[test]\nfn split_begin() {\n let vec = ['a', 'b', 'c', 'd', 'e'];\n assert_eq!(split(vec, 0),\n (&[], &['a', 'b', 'c', 'd', 'e']));\n}\n\n#[test]\nfn split_end() {\n let vec = ['a', 'b', 'c', 'd', 'e'];\n assert_eq!(split(vec, 5),\n (&['a', 'b', 'c', 'd', 'e'], &[]));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -O -C no-prepopulate-passes\n\n#![crate_type = \"lib\"]\n#![feature(rustc_attrs)]\n\n\/\/ CHECK-LABEL: @test\n#[no_mangle]\n#[rustc_mir] \/\/ FIXME #27840 MIR has different codegen.\npub fn test() {\n let a = 0;\n &a; \/\/ keep variable in an alloca\n\n\/\/ CHECK: [[S_a:%[0-9]+]] = bitcast i32* %a to i8*\n\/\/ CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_a]])\n\n {\n let b = &Some(a);\n &b; \/\/ keep variable in an alloca\n\n\/\/ CHECK: [[S_b:%[0-9]+]] = bitcast %\"2.std::option::Option<i32>\"** %b to i8*\n\/\/ CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_b]])\n\n\/\/ CHECK: [[S_tmp2:%[0-9]+]] = bitcast %\"2.std::option::Option<i32>\"* %tmp2 to i8*\n\/\/ CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_tmp2]])\n\n\/\/ CHECK: [[E_tmp2:%[0-9]+]] = bitcast %\"2.std::option::Option<i32>\"* %tmp2 to i8*\n\/\/ CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_tmp2]])\n\n\/\/ CHECK: [[E_b:%[0-9]+]] = bitcast %\"2.std::option::Option<i32>\"** %b to i8*\n\/\/ CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_b]])\n }\n\n let c = 1;\n &c; \/\/ keep variable in an alloca\n\n\/\/ CHECK: [[S_c:%[0-9]+]] = bitcast i32* %c to i8*\n\/\/ CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_c]])\n\n\/\/ CHECK: [[E_c:%[0-9]+]] = bitcast i32* %c to i8*\n\/\/ CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_c]])\n\n\/\/ CHECK: [[E_a:%[0-9]+]] = bitcast i32* %a to i8*\n\/\/ CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_a]])\n}\n<commit_msg>Fix codegen test (value names changed)<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -O -C no-prepopulate-passes\n\n#![crate_type = \"lib\"]\n#![feature(rustc_attrs)]\n\n\/\/ CHECK-LABEL: @test\n#[no_mangle]\n#[rustc_mir] \/\/ FIXME #27840 MIR has different codegen.\npub fn test() {\n let a = 0;\n &a; \/\/ keep variable in an alloca\n\n\/\/ CHECK: [[S_a:%[0-9]+]] = bitcast i32* %a to i8*\n\/\/ CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_a]])\n\n {\n let b = &Some(a);\n &b; \/\/ keep variable in an alloca\n\n\/\/ CHECK: [[S_b:%[0-9]+]] = bitcast %\"2.std::option::Option<i32>\"** %b to i8*\n\/\/ CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_b]])\n\n\/\/ CHECK: [[S_local5:%[0-9]+]] = bitcast %\"2.std::option::Option<i32>\"* %local5 to i8*\n\/\/ CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_local5]])\n\n\/\/ CHECK: [[E_local5:%[0-9]+]] = bitcast %\"2.std::option::Option<i32>\"* %local5 to i8*\n\/\/ CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_local5]])\n\n\/\/ CHECK: [[E_b:%[0-9]+]] = bitcast %\"2.std::option::Option<i32>\"** %b to i8*\n\/\/ CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_b]])\n }\n\n let c = 1;\n &c; \/\/ keep variable in an alloca\n\n\/\/ CHECK: [[S_c:%[0-9]+]] = bitcast i32* %c to i8*\n\/\/ CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_c]])\n\n\/\/ CHECK: [[E_c:%[0-9]+]] = bitcast i32* %c to i8*\n\/\/ CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_c]])\n\n\/\/ CHECK: [[E_a:%[0-9]+]] = bitcast i32* %a to i8*\n\/\/ CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_a]])\n}\n<|endoftext|>"} {"text":"<commit_before>use alloc::arc::Arc;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cell::UnsafeCell;\nuse core::ops::DerefMut;\nuse core::{mem, ptr};\n\nuse common::elf::Elf;\nuse common::memory;\n\nuse scheduler::context::{CONTEXT_STACK_SIZE, CONTEXT_STACK_ADDR, context_switch,\ncontext_userspace, Context, ContextMemory};\n\nuse schemes::Url;\n\nuse sync::Intex;\n\n\/\/\/ Execute an executable\npub fn execute(url: Url, mut args: Vec<String>) {\n let context_ptr: *mut Context = {\n let mut contexts = ::env().contexts.lock();\n if let Some(mut current) = contexts.current_mut() {\n current.deref_mut()\n } else {\n return\n }\n };\n\n Context::spawn(\"kexec \".to_string() + &url.string, box move || {\n if let Ok(mut resource) = url.open() {\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n let executable = Elf::from_data(vec.as_ptr() as usize);\n let entry = unsafe { executable.entry() };\n let mut memory = Vec::new();\n unsafe {\n for segment in executable.load_segment().iter() {\n let virtual_address = segment.vaddr as usize;\n let virtual_size = segment.mem_len as usize;\n let physical_address = memory::alloc(virtual_size);\n\n if physical_address > 0 {\n \/\/ Copy progbits\n ::memcpy(physical_address as *mut u8,\n (executable.data + segment.off as usize) as *const u8,\n segment.file_len as usize);\n \/\/ Zero bss\n ::memset((physical_address + segment.file_len as usize) as *mut u8,\n 0,\n segment.mem_len as usize - segment.file_len as usize);\n\n memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size,\n writeable: segment.flags & 2 == 2\n });\n }\n }\n }\n\n if entry > 0 && ! memory.is_empty() {\n args.insert(0, url.to_string());\n\n let mut context_args: Vec<usize> = Vec::new();\n context_args.push(0); \/\/ ENVP\n context_args.push(0); \/\/ ARGV NULL\n let mut argc = 0;\n for i in 0..args.len() {\n let reverse_i = args.len() - i - 1;\n if let Some(ref mut arg) = args.get_mut(reverse_i) {\n if ! arg.ends_with('\\0') {\n arg.push('\\0');\n }\n context_args.push(arg.as_ptr() as usize);\n argc += 1;\n }\n }\n context_args.push(argc);\n\n unsafe {\n let _intex = Intex::static_lock();\n\n let context = &mut *context_ptr;\n\n context.name = url.string;\n\n context.sp = context.kernel_stack + CONTEXT_STACK_SIZE - 128;\n\n context.stack = Some(ContextMemory {\n physical_address: memory::alloc(CONTEXT_STACK_SIZE),\n virtual_address: CONTEXT_STACK_ADDR,\n virtual_size: CONTEXT_STACK_SIZE,\n writeable: true\n });\n\n context.args = Arc::new(UnsafeCell::new(args));\n context.cwd = Arc::new(UnsafeCell::new((*context.cwd.get()).clone()));\n context.memory = Arc::new(UnsafeCell::new(memory));\n\n let user_sp = if let Some(ref stack) = context.stack {\n let mut sp = stack.physical_address + stack.virtual_size - 128;\n for arg in context_args.iter() {\n sp -= mem::size_of::<usize>();\n ptr::write(sp as *mut usize, *arg);\n }\n sp - stack.physical_address + stack.virtual_address\n } else {\n 0\n };\n\n context.push(0x20 | 3);\n context.push(user_sp);\n context.push(\/*1 << 9*\/ 0);\n context.push(0x18 | 3);\n context.push(entry);\n context.push(context_userspace as usize);\n }\n } else {\n debug!(\"{}: Invalid memory or entry\\n\", url.string);\n }\n } else {\n debug!(\"{}: Failed to open\\n\", url.string);\n }\n });\n\n loop {\n unsafe { context_switch(false) };\n }\n}\n<commit_msg>Cleanup in executor<commit_after>use alloc::arc::Arc;\n\nuse collections::string::{String, ToString};\nuse collections::vec::Vec;\n\nuse core::cell::UnsafeCell;\nuse core::ops::DerefMut;\nuse core::{mem, ptr};\n\nuse common::elf::Elf;\nuse common::memory;\n\nuse scheduler::context::{CONTEXT_STACK_SIZE, CONTEXT_STACK_ADDR, context_switch,\ncontext_userspace, Context, ContextMemory};\n\nuse schemes::Url;\n\nuse sync::Intex;\n\n\/\/\/ Execute an executable\npub fn execute(url: Url, mut args: Vec<String>) {\n let mut context_ptr: *mut Context = 0 as *mut Context;\n let mut entry: usize = 0;\n\n if let Ok(mut resource) = url.open() {\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n\n let executable = Elf::from_data(vec.as_ptr() as usize);\n entry = unsafe { executable.entry() };\n let mut memory = Vec::new();\n unsafe {\n for segment in executable.load_segment().iter() {\n let virtual_address = segment.vaddr as usize;\n let virtual_size = segment.mem_len as usize;\n let physical_address = memory::alloc(virtual_size);\n\n if physical_address > 0 {\n \/\/ Copy progbits\n ::memcpy(physical_address as *mut u8,\n (executable.data + segment.off as usize) as *const u8,\n segment.file_len as usize);\n \/\/ Zero bss\n ::memset((physical_address + segment.file_len as usize) as *mut u8,\n 0,\n segment.mem_len as usize - segment.file_len as usize);\n\n memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size,\n writeable: segment.flags & 2 == 2\n });\n }\n }\n }\n\n if entry > 0 && ! memory.is_empty() {\n args.insert(0, url.to_string());\n\n let mut contexts = ::env().contexts.lock();\n if let Some(mut context) = contexts.current_mut() {\n unsafe { context.unmap() };\n\n context.name = url.string;\n context.args = Arc::new(UnsafeCell::new(args));\n context.cwd = Arc::new(UnsafeCell::new(unsafe { (*context.cwd.get()).clone() }));\n context.memory = Arc::new(UnsafeCell::new(memory));\n\n unsafe { context.map() };\n\n context_ptr = context.deref_mut();\n }\n } else {\n debug!(\"{}: Invalid memory or entry\\n\", url.string);\n }\n } else {\n debug!(\"{}: Failed to open\\n\", url.string);\n }\n\n if context_ptr as usize > 0 {\n Context::spawn(\"kexec\".to_string(), box move || {\n unsafe {\n let _intex = Intex::static_lock();\n\n let context = &mut *context_ptr;\n\n let mut context_args: Vec<usize> = Vec::new();\n context_args.push(0); \/\/ ENVP\n context_args.push(0); \/\/ ARGV NULL\n let mut argc = 0;\n for i in 0..(*context.args.get()).len() {\n let reverse_i = (*context.args.get()).len() - i - 1;\n if let Some(ref mut arg) = (*context.args.get()).get_mut(reverse_i) {\n if ! arg.ends_with('\\0') {\n arg.push('\\0');\n }\n context_args.push(arg.as_ptr() as usize);\n argc += 1;\n }\n }\n context_args.push(argc);\n\n context.sp = context.kernel_stack + CONTEXT_STACK_SIZE - 128;\n\n context.stack = Some(ContextMemory {\n physical_address: memory::alloc(CONTEXT_STACK_SIZE),\n virtual_address: CONTEXT_STACK_ADDR,\n virtual_size: CONTEXT_STACK_SIZE,\n writeable: true\n });\n\n let user_sp = if let Some(ref stack) = context.stack {\n let mut sp = stack.physical_address + stack.virtual_size - 128;\n for arg in context_args.iter() {\n sp -= mem::size_of::<usize>();\n ptr::write(sp as *mut usize, *arg);\n }\n sp - stack.physical_address + stack.virtual_address\n } else {\n 0\n };\n\n context.push(0x20 | 3);\n context.push(user_sp);\n context.push(\/*1 << 9*\/ 0);\n context.push(0x18 | 3);\n context.push(entry);\n context.push(context_userspace as usize);\n }\n });\n\n loop {\n unsafe { context_switch(false) };\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add example: convolution<commit_after>#![allow(unused)]\n#[macro_use(s)]\nextern crate ndarray;\nextern crate num;\n\nuse num::Float;\n\nuse ndarray::{\n ArrayView,\n ArrayViewMut,\n OwnedArray,\n Ix,\n};\n\ntype Ix2 = (Ix, Ix);\nconst SOBEL_X: [[f32; 3]; 3] = [[-1., 0., 1.], [-2., 0., 2.], [-1., 0., 1.]];\nconst SOBEL_Y: [[f32; 3]; 3] = [[ 1., 2., 1.], [ 0., 0., 0.], [-1., -2., -1.]];\nconst SHARPEN: [[f32; 3]; 3] = [[0., -1., 0.], [ -1., 5., -1.], [0., -1., 0.]];\n\ntype Kernel3x3<A> = [[A; 3]; 3];\n\n#[inline(never)]\nfn conv_3x3<F>(a: &ArrayView<F, Ix2>, out: &mut ArrayViewMut<F, Ix2>, kernel: &Kernel3x3<F>) \n where F: Float,\n{\n let (n, m) = a.dim();\n let (np, mp) = out.dim();\n if n < 3 || m < 3 {\n return;\n }\n assert!(np >= n && mp >= m);\n \/\/ i, j offset by -1 so that we can use unsigned indices\n unsafe {\n for i in 0..n - 2 {\n for j in 0..m - 2 {\n let mut conv = F::zero();\n for k in 0..3 {\n for l in 0..3 {\n conv = conv + *a.uget((i + k, j + l)) * kernel[k][l];\n \/\/conv += a[[i + k, j + l]] * x_kernel[k][l];\n }\n }\n *out.uget_mut((i + 1, j + 1)) = conv;\n }\n }\n }\n}\n\nfn main() {\n let n = 16;\n let mut a = OwnedArray::zeros((n, n));\n \/\/ make a circle\n let c = (8., 8.);\n for ((i, j), elt) in a.indexed_iter_mut() {\n {\n let s = ((i as f32) - c.0).powi(2) + (j as f32 - c.1).powi(2);\n if s.sqrt() > 3. && s.sqrt() < 6. {\n *elt = 1.;\n }\n }\n }\n println!(\"{:?}\", a);\n let mut res = OwnedArray::zeros(a.dim());\n for _ in 0..1000 {\n conv_3x3(&a.view(), &mut res.view_mut(), &SOBEL_X);\n }\n println!(\"{:?}\", res);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add macros4 example<commit_after>\/\/ Make me compile! Scroll down for hints :)\n\nmacro_rules! my_macro {\n () => {\n println!(\"Check out my macro!\");\n }\n ($val:expr) => {\n println!(\"Look at this other macro: {}\", $val);\n }\n}\n\nfn main() {\n my_macro!();\n my_macro!(7777);\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\/\/ You only need to add a single character to make this compile.\n\n\n\n\n\n\n\n\n\n\/\/ The way macros are written, it wants to see something between each \"macro arm\", so it can\n\/\/ separate them.\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rust: anyhow-bt: add 2nd example<commit_after>use anyhow::Context;\n\n\/*\nbaz() failed:\n\nCaused by:\n 0: bar() failed:\n 1: foo() failed:\n 2: root cause\n*\/\n\nfn foo() -> anyhow::Result<()> {\n Err(anyhow::anyhow!(\"root cause\"))\n}\n\nfn bar() -> anyhow::Result<()> {\n foo().context(\"foo() failed:\")?;\n Ok(())\n}\n\nfn baz() -> anyhow::Result<()> {\n bar().context(\"bar() failed:\")?;\n Ok(())\n}\n\nfn main() {\n match baz().context(\"baz() failed:\") {\n Ok(value) => value,\n Err(err) => println!(\"{:?}\", err),\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add test for #19404<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(reflect_marker)]\n\nuse std::any::TypeId;\nuse std::marker::Reflect;\nuse std::rc::Rc;\n\ntype Fp<T> = Rc<T>;\n\nstruct Engine;\n\ntrait Component: 'static + Reflect {}\nimpl Component for Engine {}\n\ntrait Env {\n fn get_component_type_id(&self, type_id: TypeId) -> Option<Fp<Component>>;\n}\n\nimpl<'a> Env+'a {\n fn get_component<T: Component>(&self) -> Option<Fp<T>> {\n let x = self.get_component_type_id(TypeId::of::<T>());\n None\n }\n}\n\ntrait Figment {\n fn init(&mut self, env: &Env);\n}\n\nstruct MyFigment;\n\nimpl Figment for MyFigment {\n fn init(&mut self, env: &Env) {\n let engine = env.get_component::<Engine>();\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for Issue 26997.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub struct Foo {\n x: isize,\n y: isize\n}\n\nimpl Foo {\n pub extern fn foo_new() -> Foo {\n Foo { x: 21, y: 33 }\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Adds Rust Problem 14<commit_after>\/\/\/ Problem 14\n\/\/\/ The following iterative sequence is defined for the set of positive integers:\n\/\/\/ \n\/\/\/ n → n\/2 (n is even)\n\/\/\/ n → 3n + 1 (n is odd)\n\/\/\/ \n\/\/\/ Using the rule above and starting with 13, we generate the following sequence:\n\/\/\/ \n\/\/\/ 13 → 40 → 20 → 10 → 5 → 16 → 8 → 4 → 2 → 1\n\/\/\/ It can be seen that this sequence (starting at 13 and finishing at 1) contains \n\/\/\/ 10 terms. Although it has not been proved yet (Collatz Problem), it is thought \n\/\/\/ that all starting numbers finish at 1.\n\/\/\/ \n\/\/\/ Which starting number, under one million, produces the longest chain?\n\/\/\/ \n\/\/\/ NOTE: Once the chain starts the terms are allowed to go above one million.\nfn main() {\n let top: u64 = 1000000;\n let mut max: u64 = 0;\n let mut num_max: u64 = 0;\n\n for i in 2..top {\n let c = collatz(i);\n if c > max {\n max = c;\n num_max = i;\n println!(\"{} -> {}\", i, c);\n }\n }\n\n println!(\"Answer: {}\", num_max);\n}\n\nfn collatz(n: u64) -> u64 {\n let mut count = 1;\n let mut num: u64 = n;\n loop {\n if num == 1 {\n break\n }\n count += 1;\n num = match num % 2 {\n 0 => num\/2,\n 1 => 3*num+1,\n _ => 1\n };\n }\n count\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Test unop move semantics<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that move restrictions are enforced on overloaded unary operations\n\nfn move_then_borrow<T: Not<T> + Clone>(x: T) {\n !x;\n\n x.clone(); \/\/~ ERROR: use of moved value\n}\n\nfn move_borrowed<T: Not<T>>(x: T, mut y: T) {\n let m = &x;\n let n = &mut y;\n\n !x; \/\/~ ERROR: cannot move out of `x` because it is borrowed\n\n !y; \/\/~ ERROR: cannot move out of `y` because it is borrowed\n}\n\nfn illegal_dereference<T: Not<T>>(mut x: T, y: T) {\n let m = &mut x;\n let n = &y;\n\n !*m; \/\/~ ERROR: cannot move out of dereference of `&mut`-pointer\n\n !*n; \/\/~ ERROR: cannot move out of dereference of `&`-pointer\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix doc<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ffi;\nuse std::ops::Deref;\n\nconst SIZE: usize = 38;\n\n\/\/\/ Like SmallVec but for C strings.\n#[derive(Clone)]\npub enum SmallCStr {\n OnStack {\n data: [u8; SIZE],\n len_with_nul: u8,\n },\n OnHeap {\n data: ffi::CString,\n }\n}\n\nimpl SmallCStr {\n #[inline]\n pub fn new(s: &str) -> SmallCStr {\n if s.len() < SIZE {\n let mut data = [0; SIZE];\n data[.. s.len()].copy_from_slice(s.as_bytes());\n let len_with_nul = s.len() + 1;\n\n \/\/ Make sure once that this is a valid CStr\n if let Err(e) = ffi::CStr::from_bytes_with_nul(&data[.. len_with_nul]) {\n panic!(\"The string \\\"{}\\\" cannot be converted into a CStr: {}\", s, e);\n }\n\n SmallCStr::OnStack {\n data,\n len_with_nul: len_with_nul as u8,\n }\n } else {\n SmallCStr::OnHeap {\n data: ffi::CString::new(s).unwrap()\n }\n }\n }\n\n #[inline]\n pub fn as_c_str(&self) -> &ffi::CStr {\n match *self {\n SmallCStr::OnStack { ref data, len_with_nul } => {\n unsafe {\n let slice = &data[.. len_with_nul as usize];\n ffi::CStr::from_bytes_with_nul_unchecked(slice)\n }\n }\n SmallCStr::OnHeap { ref data } => {\n data.as_c_str()\n }\n }\n }\n\n #[inline]\n pub fn len_with_nul(&self) -> usize {\n match *self {\n SmallCStr::OnStack { len_with_nul, .. } => {\n len_with_nul as usize\n }\n SmallCStr::OnHeap { ref data } => {\n data.as_bytes_with_nul().len()\n }\n }\n }\n}\n\nimpl Deref for SmallCStr {\n type Target = ffi::CStr;\n\n fn deref(&self) -> &ffi::CStr {\n self.as_c_str()\n }\n}\n\n\n#[test]\nfn short() {\n const TEXT: &str = \"abcd\";\n let reference = ffi::CString::new(TEXT.to_string()).unwrap();\n\n let scs = SmallCStr::new(TEXT);\n\n assert_eq!(scs.len_with_nul(), TEXT.len() + 1);\n assert_eq!(scs.as_c_str(), reference.as_c_str());\n assert!(if let SmallCStr::OnStack { .. } = scs { true } else { false });\n}\n\n#[test]\nfn empty() {\n const TEXT: &str = \"\";\n let reference = ffi::CString::new(TEXT.to_string()).unwrap();\n\n let scs = SmallCStr::new(TEXT);\n\n assert_eq!(scs.len_with_nul(), TEXT.len() + 1);\n assert_eq!(scs.as_c_str(), reference.as_c_str());\n assert!(if let SmallCStr::OnStack { .. } = scs { true } else { false });\n}\n\n#[test]\nfn long() {\n const TEXT: &str = \"01234567890123456789012345678901234567890123456789\\\n 01234567890123456789012345678901234567890123456789\\\n 01234567890123456789012345678901234567890123456789\";\n let reference = ffi::CString::new(TEXT.to_string()).unwrap();\n\n let scs = SmallCStr::new(TEXT);\n\n assert_eq!(scs.len_with_nul(), TEXT.len() + 1);\n assert_eq!(scs.as_c_str(), reference.as_c_str());\n assert!(if let SmallCStr::OnHeap { .. } = scs { true } else { false });\n}\n\n#[test]\n#[should_panic]\nfn internal_nul() {\n let _ = SmallCStr::new(\"abcd\\0def\");\n}\n<commit_msg>Rollup merge of #53644 - llogiq:smallvec-for-small-c-str, r=estebank<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ffi;\nuse std::ops::Deref;\n\nuse smallvec::SmallVec;\n\nconst SIZE: usize = 36;\n\n\/\/\/ Like SmallVec but for C strings.\n#[derive(Clone)]\npub struct SmallCStr {\n data: SmallVec<[u8; SIZE]>,\n}\n\nimpl SmallCStr {\n #[inline]\n pub fn new(s: &str) -> SmallCStr {\n let len = s.len();\n let len1 = len + 1;\n let data = if len < SIZE {\n let mut buf = [0; SIZE];\n buf[..len].copy_from_slice(s.as_bytes());\n SmallVec::from_buf_and_len(buf, len1)\n } else {\n let mut data = Vec::with_capacity(len1);\n data.extend_from_slice(s.as_bytes());\n data.push(0);\n SmallVec::from_vec(data)\n };\n if let Err(e) = ffi::CStr::from_bytes_with_nul(&data) {\n panic!(\"The string \\\"{}\\\" cannot be converted into a CStr: {}\", s, e);\n }\n SmallCStr { data }\n }\n\n #[inline]\n pub fn new_with_nul(s: &str) -> SmallCStr {\n let b = s.as_bytes();\n if let Err(e) = ffi::CStr::from_bytes_with_nul(b) {\n panic!(\"The string \\\"{}\\\" cannot be converted into a CStr: {}\", s, e);\n }\n SmallCStr { data: SmallVec::from_slice(s.as_bytes()) }\n }\n\n\n #[inline]\n pub fn as_c_str(&self) -> &ffi::CStr {\n unsafe {\n ffi::CStr::from_bytes_with_nul_unchecked(&self.data[..])\n }\n }\n\n #[inline]\n pub fn len_with_nul(&self) -> usize {\n self.data.len()\n }\n\n pub fn spilled(&self) -> bool {\n self.data.spilled()\n }\n}\n\nimpl Deref for SmallCStr {\n type Target = ffi::CStr;\n\n fn deref(&self) -> &ffi::CStr {\n self.as_c_str()\n }\n}\n\n#[test]\nfn short() {\n const TEXT: &str = \"abcd\";\n let reference = ffi::CString::new(TEXT.to_string()).unwrap();\n\n let scs = SmallCStr::new(TEXT);\n\n assert_eq!(scs.len_with_nul(), TEXT.len() + 1);\n assert_eq!(scs.as_c_str(), reference.as_c_str());\n assert!(!scs.spilled());\n}\n\n#[test]\nfn empty() {\n const TEXT: &str = \"\";\n let reference = ffi::CString::new(TEXT.to_string()).unwrap();\n\n let scs = SmallCStr::new(TEXT);\n\n assert_eq!(scs.len_with_nul(), TEXT.len() + 1);\n assert_eq!(scs.as_c_str(), reference.as_c_str());\n assert!(!scs.spilled());\n}\n\n#[test]\nfn long() {\n const TEXT: &str = \"01234567890123456789012345678901234567890123456789\\\n 01234567890123456789012345678901234567890123456789\\\n 01234567890123456789012345678901234567890123456789\";\n let reference = ffi::CString::new(TEXT.to_string()).unwrap();\n\n let scs = SmallCStr::new(TEXT);\n\n assert_eq!(scs.len_with_nul(), TEXT.len() + 1);\n assert_eq!(scs.as_c_str(), reference.as_c_str());\n assert!(scs.spilled());\n}\n\n#[test]\n#[should_panic]\nfn internal_nul() {\n let _ = SmallCStr::new(\"abcd\\0def\");\n}\n<|endoftext|>"} {"text":"<commit_before>#![feature(rustc_attrs, step_trait, rustc_private)]\n\n#[macro_use] extern crate rustc_data_structures;\nextern crate rustc_serialize;\n\nuse rustc_data_structures::indexed_vec::Idx;\n\nnewtype_index!(struct MyIdx { MAX = 0xFFFF_FFFA });\n\nuse std::mem::size_of;\n\nfn main() {\n assert_eq!(size_of::<MyIdx>(), 4);\n assert_eq!(size_of::<Option<MyIdx>>(), 4);\n assert_eq!(size_of::<Option<Option<MyIdx>>>(), 4);\n assert_eq!(size_of::<Option<Option<Option<MyIdx>>>>(), 4);\n assert_eq!(size_of::<Option<Option<Option<Option<MyIdx>>>>>(), 4);\n assert_eq!(size_of::<Option<Option<Option<Option<Option<MyIdx>>>>>>(), 4);\n assert_eq!(size_of::<Option<Option<Option<Option<Option<Option<MyIdx>>>>>>>(), 8);\n}\n<commit_msg>Add missing #![feature(min_const_fn)] to the newtype_index test.<commit_after>#![feature(min_const_fn, rustc_attrs, rustc_private, step_trait)]\n\n#[macro_use] extern crate rustc_data_structures;\nextern crate rustc_serialize;\n\nuse rustc_data_structures::indexed_vec::Idx;\n\nnewtype_index!(struct MyIdx { MAX = 0xFFFF_FFFA });\n\nuse std::mem::size_of;\n\nfn main() {\n assert_eq!(size_of::<MyIdx>(), 4);\n assert_eq!(size_of::<Option<MyIdx>>(), 4);\n assert_eq!(size_of::<Option<Option<MyIdx>>>(), 4);\n assert_eq!(size_of::<Option<Option<Option<MyIdx>>>>(), 4);\n assert_eq!(size_of::<Option<Option<Option<Option<MyIdx>>>>>(), 4);\n assert_eq!(size_of::<Option<Option<Option<Option<Option<MyIdx>>>>>>(), 4);\n assert_eq!(size_of::<Option<Option<Option<Option<Option<Option<MyIdx>>>>>>>(), 8);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Make sure that casting a ptr-integer down to u8 makes it unusable<commit_after>fn main() {\n let x = &1;\n \/\/ Casting down to u8 and back up to a pointer loses too much precision; this must not work.\n let x = x as *const i32 as u8;\n let x = x as *const i32; \/\/~ ERROR: a raw memory access tried to access part of a pointer value as raw bytes\n let _ = unsafe { *x };\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Utilities to support Stratis.\nuse std::collections::HashMap;\nuse std::fs;\nuse std::path::{Path, PathBuf};\n\nuse libudev;\n\nuse super::device::is_stratis_device;\nuse stratis::StratisResult;\n\n\/\/\/ Takes a libudev device entry and returns the properties as a HashMap.\nfn device_as_map(device: &libudev::Device) -> HashMap<String, String> {\n let rc: HashMap<_, _> = device\n .properties()\n .map(|i| {\n (\n String::from(i.name().to_str().expect(\"Unix is utf-8\")),\n String::from(i.value().to_str().expect(\"Unix is utf-8\")),\n )\n })\n .collect();\n rc\n}\n\n\/\/\/ Common function used to retrieve the udev db entry for a block device as a HashMap when found\npub fn get_udev_block_device(\n dev_node_search: &Path,\n) -> StratisResult<Option<HashMap<String, String>>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n\n \/\/ Get canonical form to ensure we do correct lookup in udev db\n let canonical = fs::canonicalize(dev_node_search)?;\n\n let result = enumerator\n .scan_devices()?\n .find(|x| x.devnode().map_or(false, |d| canonical == d))\n .and_then(|dev| Some(device_as_map(&dev)));\n Ok(result)\n}\n\n\/\/\/ Lookup the WWN from the udev db using the device node eg. \/dev\/sda\npub fn hw_lookup(dev_node_search: &Path) -> StratisResult<Option<String>> {\n let dev = get_udev_block_device(dev_node_search)?;\n Ok(dev.and_then(|dev| dev.get(\"ID_WWN\").and_then(|i| Some(i.clone()))))\n}\n\n\/\/\/ Collect paths for all the block devices which are not individual multipath paths and which\n\/\/\/ appear to be empty from a udev perspective.\nfn get_all_empty_devices() -> StratisResult<Vec<PathBuf>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n\n Ok(enumerator\n .scan_devices()?\n .filter(|dev| {\n dev.property_value(\"DM_MULTIPATH_DEVICE_PATH\").is_none()\n && !((dev.property_value(\"ID_PART_TABLE_TYPE\").is_some()\n && dev.property_value(\"ID_PART_ENTRY_DISK\").is_none())\n || dev.property_value(\"ID_FS_USAGE\").is_some())\n })\n .filter_map(|i| i.devnode().map(|d| d.into()))\n .collect())\n}\n\n\/\/\/ Retrieve all the block devices on the system that have a Stratis signature.\npub fn get_stratis_block_devices() -> StratisResult<Vec<PathBuf>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n enumerator.match_property(\"ID_FS_TYPE\", \"stratis\")?;\n\n let devices: Vec<PathBuf> = enumerator\n .scan_devices()?\n .filter(|dev| dev.property_value(\"DM_MULTIPATH_DEVICE_PATH\").is_none())\n .filter_map(|i| i.devnode().map(|d| d.into()))\n .collect();\n\n if devices.is_empty() {\n \/\/ Either we don't have any stratis devices or we are using a distribution that doesn't\n \/\/ have a version of libblkid that supports stratis, lets make sure.\n \/\/ TODO: At some point in the future we can remove this and just return the devices.\n\n Ok(get_all_empty_devices()?\n .into_iter()\n .filter(|x| is_stratis_device(&x).ok().is_some())\n .collect())\n } else {\n Ok(devices)\n }\n}\n<commit_msg>Ignore incomplete udev entries during enumeration<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/ Utilities to support Stratis.\nuse std::collections::HashMap;\nuse std::fs;\nuse std::path::{Path, PathBuf};\n\nuse libudev;\n\nuse super::device::is_stratis_device;\nuse stratis::StratisResult;\n\n\/\/\/ Takes a libudev device entry and returns the properties as a HashMap.\nfn device_as_map(device: &libudev::Device) -> HashMap<String, String> {\n let rc: HashMap<_, _> = device\n .properties()\n .map(|i| {\n (\n String::from(i.name().to_str().expect(\"Unix is utf-8\")),\n String::from(i.value().to_str().expect(\"Unix is utf-8\")),\n )\n })\n .collect();\n rc\n}\n\n\/\/\/ Common function used to retrieve the udev db entry for a block device as a HashMap when found\npub fn get_udev_block_device(\n dev_node_search: &Path,\n) -> StratisResult<Option<HashMap<String, String>>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n\n \/\/ Get canonical form to ensure we do correct lookup in udev db\n let canonical = fs::canonicalize(dev_node_search)?;\n\n let result = enumerator\n .scan_devices()?\n .filter(|dev| dev.is_initialized())\n .find(|x| x.devnode().map_or(false, |d| canonical == d))\n .and_then(|dev| Some(device_as_map(&dev)));\n Ok(result)\n}\n\n\/\/\/ Lookup the WWN from the udev db using the device node eg. \/dev\/sda\npub fn hw_lookup(dev_node_search: &Path) -> StratisResult<Option<String>> {\n let dev = get_udev_block_device(dev_node_search)?;\n Ok(dev.and_then(|dev| dev.get(\"ID_WWN\").and_then(|i| Some(i.clone()))))\n}\n\n\/\/\/ Collect paths for all the block devices which are not individual multipath paths and which\n\/\/\/ appear to be empty from a udev perspective.\nfn get_all_empty_devices() -> StratisResult<Vec<PathBuf>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n\n Ok(enumerator\n .scan_devices()?\n .filter(|dev| dev.is_initialized())\n .filter(|dev| {\n dev.property_value(\"DM_MULTIPATH_DEVICE_PATH\").is_none()\n && !((dev.property_value(\"ID_PART_TABLE_TYPE\").is_some()\n && dev.property_value(\"ID_PART_ENTRY_DISK\").is_none())\n || dev.property_value(\"ID_FS_USAGE\").is_some())\n })\n .filter_map(|i| i.devnode().map(|d| d.into()))\n .collect())\n}\n\n\/\/\/ Retrieve all the block devices on the system that have a Stratis signature.\npub fn get_stratis_block_devices() -> StratisResult<Vec<PathBuf>> {\n let context = libudev::Context::new()?;\n let mut enumerator = libudev::Enumerator::new(&context)?;\n enumerator.match_subsystem(\"block\")?;\n enumerator.match_property(\"ID_FS_TYPE\", \"stratis\")?;\n\n let devices: Vec<PathBuf> = enumerator\n .scan_devices()?\n .filter(|dev| dev.is_initialized())\n .filter(|dev| dev.property_value(\"DM_MULTIPATH_DEVICE_PATH\").is_none())\n .filter_map(|i| i.devnode().map(|d| d.into()))\n .collect();\n\n if devices.is_empty() {\n \/\/ We have found no Stratis devices, possible reasons are:\n \/\/ 1. We really don't have any\n \/\/ 2. We have some, but libblkid is too old to support Stratis, thus we appear empty\n \/\/ 3. We ran this code at early boot before we have any udev db entries which are complete\n \/\/ or are complete but fall into reasons 1 & 2 above\n \/\/\n \/\/ In this case we will get all the block devices which have complete udev db block device\n \/\/ entries and appear \"empty\" and go out to disk and check them!\n\n Ok(get_all_empty_devices()?\n .into_iter()\n .filter(|x| is_stratis_device(&x).ok().is_some())\n .collect())\n } else {\n Ok(devices)\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse target::{Target, TargetResult};\n\npub fn target() -> TargetResult {\n let mut base = super::solaris_base::opts();\n base.pre_link_args.insert(LinkerFlavor::Gcc, vec![\"-m64\".to_string()]);\n \/\/ llvm calls this \"v9\"\n base.cpu = \"v9\".to_string();\n base.max_atomic_width = Some(64);\n\n Ok(Target {\n llvm_target: \"sparcv9-sun-solaris\".to_string(),\n target_endian: \"big\".to_string(),\n target_pointer_width: \"64\".to_string(),\n target_c_int_width: \"32\".to_string(),\n data_layout: \"E-m:e-i64:64-n32:64-S128\".to_string(),\n \/\/ Use \"sparc64\" instead of \"sparcv9\" here, since the former is already\n \/\/ used widely in the source base. If we ever needed ABI\n \/\/ differentiation from the sparc64, we could, but that would probably\n \/\/ just be confusing.\n arch: \"sparc64\".to_string(),\n target_os: \"solaris\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"sun\".to_string(),\n linker_flavor: LinkerFlavor::Gcc,\n options: base,\n })\n}\n<commit_msg>Disable jemalloc for sparcv9-sun-solaris<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse LinkerFlavor;\nuse target::{Target, TargetResult};\n\npub fn target() -> TargetResult {\n let mut base = super::solaris_base::opts();\n base.pre_link_args.insert(LinkerFlavor::Gcc, vec![\"-m64\".to_string()]);\n \/\/ llvm calls this \"v9\"\n base.cpu = \"v9\".to_string();\n base.max_atomic_width = Some(64);\n base.exe_allocation_crate = None;\n\n Ok(Target {\n llvm_target: \"sparcv9-sun-solaris\".to_string(),\n target_endian: \"big\".to_string(),\n target_pointer_width: \"64\".to_string(),\n target_c_int_width: \"32\".to_string(),\n data_layout: \"E-m:e-i64:64-n32:64-S128\".to_string(),\n \/\/ Use \"sparc64\" instead of \"sparcv9\" here, since the former is already\n \/\/ used widely in the source base. If we ever needed ABI\n \/\/ differentiation from the sparc64, we could, but that would probably\n \/\/ just be confusing.\n arch: \"sparc64\".to_string(),\n target_os: \"solaris\".to_string(),\n target_env: \"\".to_string(),\n target_vendor: \"sun\".to_string(),\n linker_flavor: LinkerFlavor::Gcc,\n options: base,\n })\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove let binding for unit value<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>initial commit - implement handshake<commit_after>\/\/! A Web Socket server\n\n#[crate_id = \"rust-ws\"];\n\nextern mod extra;\nextern mod http;\nextern mod rust_crypto = \"rust-crypto\";\n\nuse rust_crypto::sha1::Sha1;\nuse rust_crypto::digest::Digest;\nuse extra::base64::{ToBase64, STANDARD};\n\nuse std::io::net::ip::{SocketAddr, Ipv4Addr};\nuse std::io::Writer;\nuse extra::time;\n\nuse http::server::{Config, Server, Request, ResponseWriter};\nuse http::status::SwitchingProtocols;\nuse http::headers::HeaderEnum;\nuse http::headers::response::ExtensionHeader;\nuse http::headers::content_type::MediaType;\nuse http::headers::connection::Token;\nuse http::method::Get;\n\nstatic WEBSOCKET_SALT: &'static str = \"258EAFA5-E914-47DA-95CA-C5AB0DC85B11\";\n\n#[deriving(Clone)]\nstruct WebSocketServer;\n\nimpl Server for WebSocketServer {\n fn get_config(&self) -> Config {\n Config { bind_address: SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: 8001 } }\n }\n\n fn handle_request(&self, r: &Request, w: &mut ResponseWriter) {\n match (&r.method, &r.headers.upgrade){\n \/\/ (&Get, &Some(~\"websocket\"), &Some(~[Token(~\"Upgrade\")])) => { \/\/ FIXME this doesn't work. but client must have the header \"Connection: Upgrade\"\n (&Get, &Some(~\"websocket\")) => { \/\/ TODO client must have the header \"Connection: Upgrade\"\n w.status = SwitchingProtocols;\n w.headers.upgrade = Some(~\"websocket\");\n\n \/\/ w.headers.transfer_encoding = None;\n w.headers.content_length = Some(0);\n\n w.headers.connection = Some(~[Token(~\"Upgrade\")]);\n\n \/\/ FIXME must we iter?\n for header in r.headers.iter() {\n match (header.header_name(), header.header_value()) {\n (~\"Sec-Websocket-Key\", val) => {\n \/\/ NOTE from RFC 6455\n \/\/ As an example, if the value of the |Sec-WebSocket-Key|\n \/\/ header field in the client's handshake were\n \/\/ \"dGhlIHNhbXBsZSBub25jZQ==\", the server would append the\n \/\/ string \"258EAFA5-E914-47DA-95CA-C5AB0DC85B11\" to form the\n \/\/ string \"dGhlIHNhbXBsZSBub25jZQ==258EAFA5-E914-47DA-95CA-\n \/\/ C5AB0DC85B11\". The server would then take the SHA-1 hash\n \/\/ of this string, giving the value 0xb3 0x7a 0x4f 0x2c 0xc0\n \/\/ 0x62 0x4f 0x16 0x90 0xf6 0x46 0x06 0xcf 0x38 0x59 0x45\n \/\/ 0xb2 0xbe 0xc4 0xea. This value is then base64-encoded,\n \/\/ to give the value \"s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\", which\n \/\/ would be returned in the |Sec-WebSocket-Accept| header\n \/\/ field.\n\n let mut sh = Sha1::new();\n let mut out = [0u8, ..20];\n sh.input_str(val + WEBSOCKET_SALT);\n sh.result(out);\n let sec_websocket_accept = out.to_base64(STANDARD);\n debug!(\"sec websocket accept: {}\", sec_websocket_accept);\n w.headers.insert(ExtensionHeader(~\"Sec-WebSocket-Accept\", sec_websocket_accept));\n }\n (name, val) => {\n debug!(\"{}: {}\", name, val);\n }\n }\n }\n\n return;\n },\n (&_, &Some(_)) => {}, \/\/ handle other upgrade - this is rare apparently, but may be used for TLS for example. not sure if browsers actually implement it though.\n (&_, &None) => {} \/\/ TODO regular http server should handle this request\n }\n\n w.headers.date = Some(time::now_utc());\n w.headers.content_type = Some(MediaType {\n type_: ~\"text\",\n subtype: ~\"html\",\n parameters: ~[(~\"charset\", ~\"UTF-8\")]\n });\n w.headers.server = Some(~\"Rust Thingummy\/0.0-pre\");\n w.write(bytes!(\"<!DOCTYPE html><title>Rust HTTP server<\/title>\"));\n\n w.write(bytes!(\"<h1>Request<\/h1>\"));\n let s = format!(\"<dl>\n <dt>Method<\/dt><dd>{}<\/dd>\n <dt>Host<\/dt><dd>{:?}<\/dd>\n <dt>Upgrade<\/dt><dd>{:?}<\/dd>\n <dt>Request URI<\/dt><dd>{:?}<\/dd>\n <dt>HTTP version<\/dt><dd>{:?}<\/dd>\n <dt>Close connection<\/dt><dd>{}<\/dd><\/dl>\",\n r.method,\n r.headers.host,\n r.headers.upgrade,\n r.request_uri,\n r.version,\n r.close_connection);\n w.write(s.as_bytes());\n w.write(bytes!(\"<h2>Extension headers<\/h2>\"));\n w.write(bytes!(\"<table><thead><tr><th>Name<\/th><th>Value<\/th><\/thead><tbody>\"));\n for header in r.headers.iter() {\n let line = format!(\"<tr><td><code>{}<\/code><\/td><td><code>{}<\/code><\/td><\/tr>\",\n header.header_name(),\n header.header_value());\n w.write(line.as_bytes());\n }\n w.write(bytes!(\"<\/tbody><\/table>\"));\n w.write(bytes!(\"<h2>Body<\/h2><pre>\"));\n w.write(r.body.as_bytes());\n w.write(bytes!(\"<\/pre>\"));\n\n w.write(bytes!(\"<h1>Response<\/h1>\"));\n let s = format!(\"<dl><dt>Status<\/dt><dd>{}<\/dd><\/dl>\", w.status);\n w.write(s.as_bytes());\n w.write(bytes!(\"<h2>Headers<\/h2>\"));\n w.write(bytes!(\"<table><thead><tr><th>Name<\/th><th>Value<\/th><\/thead><tbody>\"));\n {\n let h = w.headers.clone();\n for header in h.iter() {\n let line = format!(\"<tr><td><code>{}<\/code><\/td><td><code>{}<\/code><\/td><\/tr>\",\n header.header_name(),\n header.header_value());\n w.write(line.as_bytes());\n }\n }\n w.write(bytes!(\"<\/tbody><\/table>\"));\n }\n}\n\nfn main() {\n WebSocketServer.serve_forever();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix: Use unwrap_or_else(panic!(...)) as recommended by clippy<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add test for misaligned pointer loads<commit_after>fn main() {\n let x = &2u16;\n let x = x as *const _ as *const *const u8;\n \/\/ This must fail because alignment is violated. Test specifically for loading pointers, which have special code\n \/\/ in miri's memory.\n let _x = unsafe { *x }; \/\/~ ERROR: tried to access memory with alignment 2, but alignment\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>finished testing bits<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests<commit_after>\/\/ run-pass\n\n#[repr(align(4))]\nstruct Foo;\n\nstatic FOO: Foo = Foo;\n\nfn main() {\n let x: &'static () = &();\n assert_eq!(x as *const () as usize, 1);\n let x: &'static Foo = &Foo;\n assert_eq!(x as *const Foo as usize, 4);\n\n \/\/ statics must have a unique address\n assert_ne!(&FOO as *const Foo as usize, 4);\n\n assert_eq!(<Vec<i32>>::new().as_ptr(), <&[i32]>::default().as_ptr());\n assert_eq!(<Box<[i32]>>::default().as_ptr(), (&[]).as_ptr());\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A task that sniffs data\nuse std::comm::{channel, Receiver, Sender, Disconnected};\nuse std::task::TaskBuilder;\nuse resource_task::{LoadResponse};\n\npub type SnifferTask = Sender<LoadResponse>;\n\npub fn new_sniffer_task(next_rx: Sender<LoadResponse>) -> SnifferTask {\n let (sen, rec) = channel();\n let builder = TaskBuilder::new().named(\"SnifferManager\");\n builder.spawn(proc() {\n SnifferManager::new(rec).start(next_rx);\n });\n sen\n}\n\nstruct SnifferManager {\n data_receiver: Receiver<LoadResponse>,\n}\n\nimpl SnifferManager {\n fn new(data_receiver: Receiver<LoadResponse>) -> SnifferManager {\n SnifferManager {\n data_receiver: data_receiver,\n }\n }\n}\n\nimpl SnifferManager {\n fn start(&self, next_rx: Sender<LoadResponse>) {\n loop {\n match self.data_receiver.try_recv() {\n Ok(snif_data) => next_rx.send(snif_data),\n Err(Disconnected) => break,\n Err(_) => (),\n }\n }\n }\n}\n<commit_msg>auto merge of #4070 : kparaju\/servo\/master-mime-sniffer-failing-4046, r=jdm<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A task that sniffs data\nuse std::comm::{channel, Receiver, Sender, Disconnected};\nuse std::task::TaskBuilder;\nuse resource_task::{LoadResponse};\n\npub type SnifferTask = Sender<LoadResponse>;\n\npub fn new_sniffer_task(next_rx: Sender<LoadResponse>) -> SnifferTask {\n let (sen, rec) = channel();\n let builder = TaskBuilder::new().named(\"SnifferManager\");\n builder.spawn(proc() {\n SnifferManager::new(rec).start(next_rx);\n });\n sen\n}\n\nstruct SnifferManager {\n data_receiver: Receiver<LoadResponse>,\n}\n\nimpl SnifferManager {\n fn new(data_receiver: Receiver<LoadResponse>) -> SnifferManager {\n SnifferManager {\n data_receiver: data_receiver,\n }\n }\n}\n\nimpl SnifferManager {\n fn start(self, next_rx: Sender<LoadResponse>) {\n loop {\n match self.data_receiver.try_recv() {\n Ok(snif_data) => {\n let result = next_rx.send_opt(snif_data);\n if result.is_err() {\n break;\n }\n }\n Err(Disconnected) => break,\n Err(_) => (),\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add test<commit_after>\/\/ ignore-tidy-linelength\n\npub trait MyTrait {\n type Assoc;\n const VALUE: u32;\n fn trait_function(&self);\n fn defaulted(&self) {}\n fn defaulted_override(&self) {}\n}\n\n\nimpl MyTrait for String {\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedtype.Assoc-1\"]\/\/a[@class=\"type\"]\/@href' #associatedtype.Assoc\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedtype.Assoc-1\"]\/\/a[@class=\"anchor\"]\/@href' #associatedtype.Assoc-1\n type Assoc = ();\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedconstant.VALUE-1\"]\/\/a[@class=\"constant\"]\/@href' #associatedconstant.VALUE\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedconstant.VALUE-1\"]\/\/a[@class=\"anchor\"]\/@href' #associatedconstant.VALUE-1\n const VALUE: u32 = 5;\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.trait_function\"]\/\/a[@class=\"fnname\"]\/@href' #tymethod.trait_function\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.trait_function\"]\/\/a[@class=\"anchor\"]\/@href' #method.trait_function\n fn trait_function(&self) {}\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.defaulted_override-1\"]\/\/a[@class=\"fnname\"]\/@href' #method.defaulted_override\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.defaulted_override-1\"]\/\/a[@class=\"anchor\"]\/@href' #method.defaulted_override-1\n fn defaulted_override(&self) {}\n}\n\nimpl MyTrait for Vec<u8> {\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedtype.Assoc-2\"]\/\/a[@class=\"type\"]\/@href' #associatedtype.Assoc\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedtype.Assoc-2\"]\/\/a[@class=\"anchor\"]\/@href' #associatedtype.Assoc-2\n type Assoc = ();\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedconstant.VALUE-2\"]\/\/a[@class=\"constant\"]\/@href' #associatedconstant.VALUE\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedconstant.VALUE-2\"]\/\/a[@class=\"anchor\"]\/@href' #associatedconstant.VALUE-2\n const VALUE: u32 = 5;\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.trait_function\"]\/\/a[@class=\"fnname\"]\/@href' #tymethod.trait_function\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.trait_function-1\"]\/\/a[@class=\"anchor\"]\/@href' #method.trait_function-1\n fn trait_function(&self) {}\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.defaulted_override-2\"]\/\/a[@class=\"fnname\"]\/@href' #method.defaulted_override\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.defaulted_override-2\"]\/\/a[@class=\"anchor\"]\/@href' #method.defaulted_override-2\n fn defaulted_override(&self) {}\n}\n\nimpl MyTrait for MyStruct {\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedtype.Assoc-3\"]\/\/a[@class=\"type\"]\/@href' #associatedtype.Assoc\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedtype.Assoc-3\"]\/\/a[@class=\"anchor\"]\/@href' #associatedtype.Assoc-3\n \/\/ @has trait_impl_items_links_and_anchors\/struct.MyStruct.html '\/\/h4[@id=\"associatedtype.Assoc\"]\/\/a[@class=\"type\"]\/@href' ..\/trait_impl_items_links_and_anchors\/trait.MyTrait.html#associatedtype.Assoc\n \/\/ @has trait_impl_items_links_and_anchors\/struct.MyStruct.html '\/\/h4[@id=\"associatedtype.Assoc\"]\/\/a[@class=\"anchor\"]\/@href' #associatedtype.Assoc\n type Assoc = bool;\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedconstant.VALUE-3\"]\/\/a[@class=\"constant\"]\/@href' #associatedconstant.VALUE\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"associatedconstant.VALUE-3\"]\/\/a[@class=\"anchor\"]\/@href' #associatedconstant.VALUE-3\n \/\/ @has trait_impl_items_links_and_anchors\/struct.MyStruct.html '\/\/h4[@id=\"associatedconstant.VALUE\"]\/\/a[@class=\"constant\"]\/@href' ..\/trait_impl_items_links_and_anchors\/trait.MyTrait.html#associatedconstant.VALUE\n \/\/ @has trait_impl_items_links_and_anchors\/struct.MyStruct.html '\/\/h4[@id=\"associatedconstant.VALUE\"]\/\/a[@class=\"anchor\"]\/@href' #associatedconstant.VALUE\n const VALUE: u32 = 20;\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.trait_function-2\"]\/\/a[@class=\"fnname\"]\/@href' #tymethod.trait_function\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.trait_function-2\"]\/\/a[@class=\"anchor\"]\/@href' #method.trait_function-2\n \/\/ @has trait_impl_items_links_and_anchors\/struct.MyStruct.html '\/\/h4[@id=\"method.trait_function\"]\/\/a[@class=\"fnname\"]\/@href' ..\/trait_impl_items_links_and_anchors\/trait.MyTrait.html#tymethod.trait_function\n \/\/ @has trait_impl_items_links_and_anchors\/struct.MyStruct.html '\/\/h4[@id=\"method.trait_function\"]\/\/a[@class=\"anchor\"]\/@href' #method.trait_function\n fn trait_function(&self) {}\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.defaulted_override-3\"]\/\/a[@class=\"fnname\"]\/@href' #method.defaulted_override\n \/\/ @has trait_impl_items_links_and_anchors\/trait.MyTrait.html '\/\/h4[@id=\"method.defaulted_override-3\"]\/\/a[@class=\"anchor\"]\/@href' #method.defaulted_override-3\n \/\/ @has trait_impl_items_links_and_anchors\/struct.MyStruct.html '\/\/h4[@id=\"method.defaulted_override\"]\/\/a[@class=\"fnname\"]\/@href' ..\/trait_impl_items_links_and_anchors\/trait.MyTrait.html#method.defaulted_override\n \/\/ @has trait_impl_items_links_and_anchors\/struct.MyStruct.html '\/\/h4[@id=\"method.defaulted_override\"]\/\/a[@class=\"anchor\"]\/@href' #method.defaulted_override\n fn defaulted_override(&self) {}\n}\n\npub struct MyStruct;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add arielby's example<commit_after>fn main() {\n let x = &mut 0u32;\n let p = x as *mut u32;\n foo(x, p);\n}\n\nfn foo(a: &mut u32, y: *mut u32) -> u32 {\n *a = 1;\n let _b = &*a;\n unsafe { *y = 2; } \/\/~ ERROR: borrow stack\n return *a;\n}\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\n\nuse git2::{Repository, Signature};\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::trace_error;\nuse libimagstore::hook::error::CustomData;\nuse libimagstore::hook::error::HookErrorKind as HEK;\n\nuse vcs::git::result::Result;\nuse vcs::git::error::{MapErrInto, GitHookErrorKind as GHEK};\nuse vcs::git::config::{author_name, author_mail, committer_name, committer_mail};\n\nstruct Person<'a> {\n pub name: &'a str,\n pub mail: &'a str,\n}\n\nimpl<'a> Person<'a> {\n fn new(name: &'a str, mail: &'a str) -> Person<'a> {\n Person { name: name, mail: mail }\n }\n}\n\npub struct Runtime<'a> {\n repository: Option<Repository>,\n author: Option<Person<'a>>,\n committer: Option<Person<'a>>,\n\n config: Option<Value>,\n}\n\nimpl<'a> Runtime<'a> {\n\n pub fn new(storepath: &PathBuf) -> Runtime<'a> {\n Runtime {\n repository: match Repository::open(storepath) {\n Ok(r) => Some(r),\n Err(e) => {\n trace_error(&e);\n None\n },\n },\n\n author: None,\n committer: None,\n config: None,\n }\n }\n\n pub fn set_config(&mut self, cfg: &Value) -> Result<()> {\n let config = cfg.clone();\n let res = author_name(&config)\n .and_then(|n| author_mail(&config).map(|m| Person::new(n, m)))\n .and_then(|author| {\n committer_name(&config)\n .and_then(|n| committer_mail(&config).map(|m| (author, Person::new(n, m))))\n })\n .map(|(author, committer)| {\n self.author = Some(author);\n self.committer = Some(committer);\n });\n self.config = Some(config);\n res\n }\n\n pub fn has_config(&self) -> bool {\n self.config.is_some()\n }\n\n pub fn config_value_or_err(&self) -> HookResult<&Value> {\n self.config\n .as_ref()\n .ok_or(GHEK::NoConfigError.into_error())\n .map_err_into(GHEK::ConfigError)\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_err(|mut e| e.with_custom_data(CustomData::default().aborting(false)))\n }\n\n pub fn new_committer_sig(&self) -> Option<Result<Signature>> {\n self.committer\n .as_ref()\n .map(|c| {\n Signature::now(c.name, c.mail)\n .map_err(|e| GHEK::MkSignature.into_error_with_cause(Box::new(e)))\n })\n }\n\n pub fn repository(&self) -> Result<&Repository> {\n self.repository.as_ref().ok_or(GHEK::MkRepo.into_error())\n }\n\n pub fn ensure_cfg_branch_is_checked_out(&self) -> HookResult<()> {\n use vcs::git::config::ensure_branch;\n\n let head = try!(self\n .repository()\n .and_then(|r| {\n r.head().map_err_into(GHEK::HeadFetchError)\n })\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e)));\n\n \/\/ TODO: Fail if not on branch? hmmh... I'm not sure\n if head.is_branch() {\n return Err(GHEK::NotOnBranch.into_error())\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e));\n }\n\n \/\/ Check out appropriate branch ... or fail\n match ensure_branch(self.config.as_ref()) {\n Ok(Some(s)) => {\n match head.name().map(|name| name == s) {\n Some(b) => {\n if b {\n debug!(\"Branch already checked out.\");\n Ok(())\n } else {\n debug!(\"Branch not checked out.\");\n unimplemented!()\n }\n },\n\n None => Err(GHEK::RepositoryBranchNameFetchingError.into_error())\n .map_err_into(GHEK::RepositoryBranchError)\n .map_err_into(GHEK::RepositoryError),\n }\n },\n Ok(None) => {\n debug!(\"No branch to checkout\");\n Ok(())\n },\n\n Err(e) => Err(e).map_err_into(GHEK::RepositoryError),\n }\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n }\n\n}\n\n<commit_msg>Add Runtime::has_repository()<commit_after>use std::path::PathBuf;\n\nuse git2::{Repository, Signature};\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::trace_error;\nuse libimagstore::hook::error::CustomData;\nuse libimagstore::hook::error::HookErrorKind as HEK;\n\nuse vcs::git::result::Result;\nuse vcs::git::error::{MapErrInto, GitHookErrorKind as GHEK};\nuse vcs::git::config::{author_name, author_mail, committer_name, committer_mail};\n\nstruct Person<'a> {\n pub name: &'a str,\n pub mail: &'a str,\n}\n\nimpl<'a> Person<'a> {\n fn new(name: &'a str, mail: &'a str) -> Person<'a> {\n Person { name: name, mail: mail }\n }\n}\n\npub struct Runtime<'a> {\n repository: Option<Repository>,\n author: Option<Person<'a>>,\n committer: Option<Person<'a>>,\n\n config: Option<Value>,\n}\n\nimpl<'a> Runtime<'a> {\n\n pub fn new(storepath: &PathBuf) -> Runtime<'a> {\n Runtime {\n repository: match Repository::open(storepath) {\n Ok(r) => Some(r),\n Err(e) => {\n trace_error(&e);\n None\n },\n },\n\n author: None,\n committer: None,\n config: None,\n }\n }\n\n pub fn set_config(&mut self, cfg: &Value) -> Result<()> {\n let config = cfg.clone();\n let res = author_name(&config)\n .and_then(|n| author_mail(&config).map(|m| Person::new(n, m)))\n .and_then(|author| {\n committer_name(&config)\n .and_then(|n| committer_mail(&config).map(|m| (author, Person::new(n, m))))\n })\n .map(|(author, committer)| {\n self.author = Some(author);\n self.committer = Some(committer);\n });\n self.config = Some(config);\n res\n }\n\n pub fn has_repository(&self) -> bool {\n self.repository.is_some()\n }\n\n pub fn has_config(&self) -> bool {\n self.config.is_some()\n }\n\n pub fn config_value_or_err(&self) -> HookResult<&Value> {\n self.config\n .as_ref()\n .ok_or(GHEK::NoConfigError.into_error())\n .map_err_into(GHEK::ConfigError)\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_err(|mut e| e.with_custom_data(CustomData::default().aborting(false)))\n }\n\n pub fn new_committer_sig(&self) -> Option<Result<Signature>> {\n self.committer\n .as_ref()\n .map(|c| {\n Signature::now(c.name, c.mail)\n .map_err(|e| GHEK::MkSignature.into_error_with_cause(Box::new(e)))\n })\n }\n\n pub fn repository(&self) -> Result<&Repository> {\n self.repository.as_ref().ok_or(GHEK::MkRepo.into_error())\n }\n\n pub fn ensure_cfg_branch_is_checked_out(&self) -> HookResult<()> {\n use vcs::git::config::ensure_branch;\n\n let head = try!(self\n .repository()\n .and_then(|r| {\n r.head().map_err_into(GHEK::HeadFetchError)\n })\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e)));\n\n \/\/ TODO: Fail if not on branch? hmmh... I'm not sure\n if head.is_branch() {\n return Err(GHEK::NotOnBranch.into_error())\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e));\n }\n\n \/\/ Check out appropriate branch ... or fail\n match ensure_branch(self.config.as_ref()) {\n Ok(Some(s)) => {\n match head.name().map(|name| name == s) {\n Some(b) => {\n if b {\n debug!(\"Branch already checked out.\");\n Ok(())\n } else {\n debug!(\"Branch not checked out.\");\n unimplemented!()\n }\n },\n\n None => Err(GHEK::RepositoryBranchNameFetchingError.into_error())\n .map_err_into(GHEK::RepositoryBranchError)\n .map_err_into(GHEK::RepositoryError),\n }\n },\n Ok(None) => {\n debug!(\"No branch to checkout\");\n Ok(())\n },\n\n Err(e) => Err(e).map_err_into(GHEK::RepositoryError),\n }\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::path::PathBuf;\n\nuse git2::{Repository, Signature};\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::trace_error;\nuse libimagstore::hook::error::CustomData;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\nuse libimagutil::debug_result::*;\n\nuse vcs::git::action::StoreAction;\nuse vcs::git::result::Result;\nuse vcs::git::error::{MapErrInto, GitHookErrorKind as GHEK};\n\npub struct Runtime {\n repository: Option<Repository>,\n config: Option<Value>,\n}\n\nimpl Runtime {\n\n pub fn new(storepath: &PathBuf) -> Runtime {\n Runtime {\n repository: match Repository::open(storepath) {\n Ok(r) => Some(r),\n Err(e) => {\n trace_error(&e);\n None\n },\n },\n\n config: None,\n }\n }\n\n pub fn set_config(&mut self, cfg: &Value) -> Result<()> {\n self.config = Some(cfg.clone());\n Ok(())\n }\n\n pub fn has_repository(&self) -> bool {\n self.repository.is_some()\n }\n\n pub fn has_config(&self) -> bool {\n self.config.is_some()\n }\n\n pub fn config_value_or_err(&self, action: &StoreAction) -> HookResult<&Value> {\n self.config\n .as_ref()\n .ok_or(GHEK::NoConfigError.into_error())\n .map_err_into(GHEK::ConfigError)\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_err(|mut e| e.with_custom_data(CustomData::default().aborting(false)))\n .map_dbg_err(|_| {\n format!(\"[GIT {} HOOK]: Couldn't get Value object from config\", action.uppercase())\n })\n }\n\n pub fn repository(&self, action: &StoreAction) -> HookResult<&Repository> {\n use vcs::git::error::MapIntoHookError;\n\n debug!(\"[GIT {} HOOK]: Getting repository\", action.uppercase());\n self.repository\n .as_ref()\n .ok_or(GHEK::MkRepo.into_error())\n .map_err_into(GHEK::RepositoryError)\n .map_into_hook_error()\n .map_dbg_err(|_| format!(\"[GIT {} HOOK]: Couldn't fetch Repository\", action.uppercase()))\n .map_dbg(|_| format!(\"[GIT {} HOOK]: Repository object fetched\", action.uppercase()))\n }\n\n pub fn ensure_cfg_branch_is_checked_out(&self, action: &StoreAction) -> HookResult<()> {\n use vcs::git::config::ensure_branch;\n\n debug!(\"[GIT CREATE HOOK]: Ensuring branch checkout\");\n let head = try!(self\n .repository(action)\n .and_then(|r| {\n debug!(\"Repository fetched, getting head\");\n r.head()\n .map_dbg_err_str(\"Couldn't fetch HEAD\")\n .map_dbg_err(|e| format!(\"\\tbecause = {:?}\", e))\n .map_err_into(GHEK::HeadFetchError)\n .map_err(|e| e.into())\n }));\n debug!(\"HEAD fetched\");\n\n \/\/ TODO: Fail if not on branch? hmmh... I'm not sure\n if !head.is_branch() {\n debug!(\"HEAD is not a branch\");\n return Err(GHEK::NotOnBranch.into_error().into());\n }\n debug!(\"HEAD is a branch\");\n\n \/\/ Check out appropriate branch ... or fail\n match ensure_branch(self.config.as_ref()) {\n Ok(Some(s)) => {\n debug!(\"We have to ensure branch: {}\", s);\n match head.name().map(|name| {\n debug!(\"{} == {}\", name, s);\n name == s\n }) {\n Some(b) => {\n if b {\n debug!(\"Branch already checked out.\");\n Ok(())\n } else {\n debug!(\"Branch not checked out.\");\n unimplemented!()\n }\n },\n\n None => Err(GHEK::RepositoryBranchNameFetchingError.into_error())\n .map_err_into(GHEK::RepositoryBranchError)\n .map_err_into(GHEK::RepositoryError),\n }\n },\n Ok(None) => {\n debug!(\"No branch to checkout\");\n Ok(())\n },\n\n Err(e) => Err(e).map_err_into(GHEK::RepositoryError),\n }\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_dbg_str(\"[GIT CREATE HOOK]: Branch checked out\")\n }\n\n}\n\n<commit_msg>runtime: Remove unused import Signature<commit_after>use std::path::PathBuf;\n\nuse git2::Repository;\nuse toml::Value;\n\nuse libimagerror::into::IntoError;\nuse libimagerror::trace::trace_error;\nuse libimagstore::hook::error::CustomData;\nuse libimagstore::hook::error::HookErrorKind as HEK;\nuse libimagstore::hook::result::HookResult;\nuse libimagutil::debug_result::*;\n\nuse vcs::git::action::StoreAction;\nuse vcs::git::result::Result;\nuse vcs::git::error::{MapErrInto, GitHookErrorKind as GHEK};\n\npub struct Runtime {\n repository: Option<Repository>,\n config: Option<Value>,\n}\n\nimpl Runtime {\n\n pub fn new(storepath: &PathBuf) -> Runtime {\n Runtime {\n repository: match Repository::open(storepath) {\n Ok(r) => Some(r),\n Err(e) => {\n trace_error(&e);\n None\n },\n },\n\n config: None,\n }\n }\n\n pub fn set_config(&mut self, cfg: &Value) -> Result<()> {\n self.config = Some(cfg.clone());\n Ok(())\n }\n\n pub fn has_repository(&self) -> bool {\n self.repository.is_some()\n }\n\n pub fn has_config(&self) -> bool {\n self.config.is_some()\n }\n\n pub fn config_value_or_err(&self, action: &StoreAction) -> HookResult<&Value> {\n self.config\n .as_ref()\n .ok_or(GHEK::NoConfigError.into_error())\n .map_err_into(GHEK::ConfigError)\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_err(|mut e| e.with_custom_data(CustomData::default().aborting(false)))\n .map_dbg_err(|_| {\n format!(\"[GIT {} HOOK]: Couldn't get Value object from config\", action.uppercase())\n })\n }\n\n pub fn repository(&self, action: &StoreAction) -> HookResult<&Repository> {\n use vcs::git::error::MapIntoHookError;\n\n debug!(\"[GIT {} HOOK]: Getting repository\", action.uppercase());\n self.repository\n .as_ref()\n .ok_or(GHEK::MkRepo.into_error())\n .map_err_into(GHEK::RepositoryError)\n .map_into_hook_error()\n .map_dbg_err(|_| format!(\"[GIT {} HOOK]: Couldn't fetch Repository\", action.uppercase()))\n .map_dbg(|_| format!(\"[GIT {} HOOK]: Repository object fetched\", action.uppercase()))\n }\n\n pub fn ensure_cfg_branch_is_checked_out(&self, action: &StoreAction) -> HookResult<()> {\n use vcs::git::config::ensure_branch;\n\n debug!(\"[GIT CREATE HOOK]: Ensuring branch checkout\");\n let head = try!(self\n .repository(action)\n .and_then(|r| {\n debug!(\"Repository fetched, getting head\");\n r.head()\n .map_dbg_err_str(\"Couldn't fetch HEAD\")\n .map_dbg_err(|e| format!(\"\\tbecause = {:?}\", e))\n .map_err_into(GHEK::HeadFetchError)\n .map_err(|e| e.into())\n }));\n debug!(\"HEAD fetched\");\n\n \/\/ TODO: Fail if not on branch? hmmh... I'm not sure\n if !head.is_branch() {\n debug!(\"HEAD is not a branch\");\n return Err(GHEK::NotOnBranch.into_error().into());\n }\n debug!(\"HEAD is a branch\");\n\n \/\/ Check out appropriate branch ... or fail\n match ensure_branch(self.config.as_ref()) {\n Ok(Some(s)) => {\n debug!(\"We have to ensure branch: {}\", s);\n match head.name().map(|name| {\n debug!(\"{} == {}\", name, s);\n name == s\n }) {\n Some(b) => {\n if b {\n debug!(\"Branch already checked out.\");\n Ok(())\n } else {\n debug!(\"Branch not checked out.\");\n unimplemented!()\n }\n },\n\n None => Err(GHEK::RepositoryBranchNameFetchingError.into_error())\n .map_err_into(GHEK::RepositoryBranchError)\n .map_err_into(GHEK::RepositoryError),\n }\n },\n Ok(None) => {\n debug!(\"No branch to checkout\");\n Ok(())\n },\n\n Err(e) => Err(e).map_err_into(GHEK::RepositoryError),\n }\n .map_err(Box::new)\n .map_err(|e| HEK::HookExecutionError.into_error_with_cause(e))\n .map_dbg_str(\"[GIT CREATE HOOK]: Branch checked out\")\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add merge-sort implementation<commit_after>\/* Sample merge sort program in Rust.\n Tested to compile with rust-0.6-f1ddb8d.\n*\/\nextern mod extra;\nextern mod benchmark;\nuse std::vec;\nuse benchmark::Benchmark;\n\nfn merge_sort<T:Ord+Copy>(arr: ~[T]) -> ~[T] {\n let length = arr.len();\n if length <= 1 {\n return arr.to_owned();\n }\n\n let middle = length \/ 2;\n let mut left: ~[T] = vec::from_elem(middle, copy arr[0]);\n let mut right: ~[T] = vec::from_elem(length - middle, copy arr[0]);\n let mut index = 0;\n\n\n while index < middle {\n left[index] = arr[index];\n index += 1;\n }\n\n while index < length {\n right[index - middle] = arr[index];\n index += 1;\n }\n\n left = merge_sort(left);\n right = merge_sort(right);\n\n merge(left, right)\n}\n\nfn merge<T:Ord+Copy>(left_orig: ~[T], right_orig: ~[T]) -> ~[T] {\n let mut left = copy left_orig;\n let mut right = copy right_orig;\n let mut result = vec::from_elem(0, copy left[0]);\n\n while left.len() > 0 || right.len() > 0 {\n if left.len() > 0 && right.len() > 0 {\n if left[0] < right[0] {\n result.push(left.shift());\n }\n else {\n result.push(right.shift());\n }\n }\n else if left.len() > 0 {\n result.push(left.shift());\n }\n else {\n result.push(right.shift());\n }\n }\n \n return result;\n}\n\nfn main() {\n let mut bench = Benchmark::new();\n bench.run(merge_sort);\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Some tests for tokio integration<commit_after>#[cfg(feature = \"tokio-support\")]\nmod tests {\n extern crate libc;\n extern crate signal_hook;\n extern crate tokio;\n\n use self::signal_hook::iterator::Signals;\n use self::tokio::prelude::*;\n\n fn send_sig() {\n unsafe { libc::kill(libc::getpid(), libc::SIGUSR1) };\n }\n\n #[test]\n fn repeated() {\n let signals = Signals::new(&[libc::SIGUSR1])\n .unwrap()\n .async()\n .unwrap()\n .map(|sig| {\n assert_eq!(sig, libc::SIGUSR1);\n send_sig();\n })\n .map_err(|e| panic!(\"{}\", e))\n .take(20)\n .for_each(|()| Ok(()));\n send_sig();\n tokio::run(signals);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use super::*;\nuse redox::*;\n\nimpl Editor {\n pub fn exec(&mut self, (n, cmd): Inst) {\n \/\/ match cmd {\n\/\/ 'i' => {\n \/\/ self.cursor().mode = \n }\n}\n<commit_msg>Sorry typeck<commit_after>use super::*;\nuse redox::*;\n\nimpl Editor {\n pub fn exec(&mut self, Inst(n, cmd): Inst) {\n \/\/ match cmd {\n\/\/ 'i' => {\n \/\/ self.cursor().mode = \n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add lints<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add error kinds<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::borrow::Borrow;\nuse std::cmp::Ordering;\nuse std::convert::From;\nuse std::mem;\nuse std::ops::{RangeBounds, Bound, Index, IndexMut};\n\n\/\/\/ `SortedMap` is a data structure with similar characteristics as BTreeMap but\n\/\/\/ slightly different trade-offs: lookup, insertion, and removal are O(log(N))\n\/\/\/ and elements can be iterated in order cheaply.\n\/\/\/\n\/\/\/ `SortedMap` can be faster than a `BTreeMap` for small sizes (<50) since it\n\/\/\/ stores data in a more compact way. It also supports accessing contiguous\n\/\/\/ ranges of elements as a slice, and slices of already sorted elements can be\n\/\/\/ inserted efficiently.\n#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug, RustcEncodable,\n RustcDecodable)]\npub struct SortedMap<K: Ord, V> {\n data: Vec<(K,V)>\n}\n\nimpl<K: Ord, V> SortedMap<K, V> {\n\n #[inline]\n pub fn new() -> SortedMap<K, V> {\n SortedMap {\n data: vec![]\n }\n }\n\n \/\/\/ Construct a `SortedMap` from a presorted set of elements. This is faster\n \/\/\/ than creating an empty map and then inserting the elements individually.\n \/\/\/\n \/\/\/ It is up to the caller to make sure that the elements are sorted by key\n \/\/\/ and that there are no duplicates.\n #[inline]\n pub fn from_presorted_elements(elements: Vec<(K, V)>) -> SortedMap<K, V>\n {\n debug_assert!(elements.windows(2).all(|w| w[0].0 < w[1].0));\n\n SortedMap {\n data: elements\n }\n }\n\n #[inline]\n pub fn insert(&mut self, key: K, mut value: V) -> Option<V> {\n match self.lookup_index_for(&key) {\n Ok(index) => {\n let slot = unsafe {\n self.data.get_unchecked_mut(index)\n };\n mem::swap(&mut slot.1, &mut value);\n Some(value)\n }\n Err(index) => {\n self.data.insert(index, (key, value));\n None\n }\n }\n }\n\n #[inline]\n pub fn remove(&mut self, key: &K) -> Option<V> {\n match self.lookup_index_for(key) {\n Ok(index) => {\n Some(self.data.remove(index).1)\n }\n Err(_) => {\n None\n }\n }\n }\n\n #[inline]\n pub fn get(&self, key: &K) -> Option<&V> {\n match self.lookup_index_for(key) {\n Ok(index) => {\n unsafe {\n Some(&self.data.get_unchecked(index).1)\n }\n }\n Err(_) => {\n None\n }\n }\n }\n\n #[inline]\n pub fn get_mut(&mut self, key: &K) -> Option<&mut V> {\n match self.lookup_index_for(key) {\n Ok(index) => {\n unsafe {\n Some(&mut self.data.get_unchecked_mut(index).1)\n }\n }\n Err(_) => {\n None\n }\n }\n }\n\n #[inline]\n pub fn clear(&mut self) {\n self.data.clear();\n }\n\n \/\/\/ Iterate over elements, sorted by key\n #[inline]\n pub fn iter(&self) -> ::std::slice::Iter<(K, V)> {\n self.data.iter()\n }\n\n \/\/\/ Iterate over the keys, sorted\n #[inline]\n pub fn keys(&self) -> impl Iterator<Item=&K> + ExactSizeIterator {\n self.data.iter().map(|&(ref k, _)| k)\n }\n\n \/\/\/ Iterate over values, sorted by key\n #[inline]\n pub fn values(&self) -> impl Iterator<Item=&V> + ExactSizeIterator {\n self.data.iter().map(|&(_, ref v)| v)\n }\n\n #[inline]\n pub fn len(&self) -> usize {\n self.data.len()\n }\n\n #[inline]\n pub fn range<R>(&self, range: R) -> &[(K, V)]\n where R: RangeBounds<K>\n {\n let (start, end) = self.range_slice_indices(range);\n (&self.data[start .. end])\n }\n\n #[inline]\n pub fn remove_range<R>(&mut self, range: R)\n where R: RangeBounds<K>\n {\n let (start, end) = self.range_slice_indices(range);\n self.data.splice(start .. end, ::std::iter::empty());\n }\n\n \/\/\/ Mutate all keys with the given function `f`. This mutation must not\n \/\/\/ change the sort-order of keys.\n #[inline]\n pub fn offset_keys<F>(&mut self, f: F)\n where F: Fn(&mut K)\n {\n self.data.iter_mut().map(|&mut (ref mut k, _)| k).for_each(f);\n }\n\n \/\/\/ Inserts a presorted range of elements into the map. If the range can be\n \/\/\/ inserted as a whole in between to existing elements of the map, this\n \/\/\/ will be faster than inserting the elements individually.\n \/\/\/\n \/\/\/ It is up to the caller to make sure that the elements are sorted by key\n \/\/\/ and that there are no duplicates.\n #[inline]\n pub fn insert_presorted(&mut self, mut elements: Vec<(K, V)>) {\n if elements.is_empty() {\n return\n }\n\n debug_assert!(elements.windows(2).all(|w| w[0].0 < w[1].0));\n\n let start_index = self.lookup_index_for(&elements[0].0);\n\n let drain = match start_index {\n Ok(index) => {\n let mut drain = elements.drain(..);\n self.data[index] = drain.next().unwrap();\n drain\n }\n Err(index) => {\n if index == self.data.len() ||\n elements.last().unwrap().0 < self.data[index].0 {\n \/\/ We can copy the whole range without having to mix with\n \/\/ existing elements.\n self.data.splice(index .. index, elements.drain(..));\n return\n }\n\n let mut drain = elements.drain(..);\n self.data.insert(index, drain.next().unwrap());\n drain\n }\n };\n\n \/\/ Insert the rest\n for (k, v) in drain {\n self.insert(k, v);\n }\n }\n\n \/\/\/ Looks up the key in `self.data` via `slice::binary_search()`.\n #[inline(always)]\n fn lookup_index_for(&self, key: &K) -> Result<usize, usize> {\n self.data.binary_search_by(|&(ref x, _)| x.cmp(key))\n }\n\n #[inline]\n fn range_slice_indices<R>(&self, range: R) -> (usize, usize)\n where R: RangeBounds<K>\n {\n let start = match range.start_bound() {\n Bound::Included(ref k) => {\n match self.lookup_index_for(k) {\n Ok(index) | Err(index) => index\n }\n }\n Bound::Excluded(ref k) => {\n match self.lookup_index_for(k) {\n Ok(index) => index + 1,\n Err(index) => index,\n }\n }\n Bound::Unbounded => 0,\n };\n\n let end = match range.end_bound() {\n Bound::Included(ref k) => {\n match self.lookup_index_for(k) {\n Ok(index) => index + 1,\n Err(index) => index,\n }\n }\n Bound::Excluded(ref k) => {\n match self.lookup_index_for(k) {\n Ok(index) | Err(index) => index,\n }\n }\n Bound::Unbounded => self.data.len(),\n };\n\n (start, end)\n }\n}\n\nimpl<K: Ord, V> IntoIterator for SortedMap<K, V> {\n type Item = (K, V);\n type IntoIter = ::std::vec::IntoIter<(K, V)>;\n fn into_iter(self) -> Self::IntoIter {\n self.data.into_iter()\n }\n}\n\nimpl<K: Ord, V, Q: Borrow<K>> Index<Q> for SortedMap<K, V> {\n type Output = V;\n fn index(&self, index: Q) -> &Self::Output {\n let k: &K = index.borrow();\n self.get(k).unwrap()\n }\n}\n\nimpl<K: Ord, V, Q: Borrow<K>> IndexMut<Q> for SortedMap<K, V> {\n fn index_mut(&mut self, index: Q) -> &mut Self::Output {\n let k: &K = index.borrow();\n self.get_mut(k).unwrap()\n }\n}\n\nimpl<K: Ord, V, I: Iterator<Item=(K, V)>> From<I> for SortedMap<K, V> {\n fn from(data: I) -> Self {\n let mut data: Vec<(K, V)> = data.collect();\n data.sort_unstable_by(|&(ref k1, _), &(ref k2, _)| k1.cmp(k2));\n data.dedup_by(|&mut (ref k1, _), &mut (ref k2, _)| {\n k1.cmp(k2) == Ordering::Equal\n });\n SortedMap {\n data\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::SortedMap;\n\n #[test]\n fn test_insert_and_iter() {\n let mut map = SortedMap::new();\n let mut expected = Vec::new();\n\n for x in 0 .. 100 {\n assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);\n\n let x = 1000 - x * 2;\n map.insert(x, x);\n expected.insert(0, (x, x));\n }\n }\n\n #[test]\n fn test_get_and_index() {\n let mut map = SortedMap::new();\n let mut expected = Vec::new();\n\n for x in 0 .. 100 {\n let x = 1000 - x;\n if x & 1 == 0 {\n map.insert(x, x);\n }\n expected.push(x);\n }\n\n for mut x in expected {\n if x & 1 == 0 {\n assert_eq!(map.get(&x), Some(&x));\n assert_eq!(map.get_mut(&x), Some(&mut x));\n assert_eq!(map[&x], x);\n assert_eq!(&mut map[&x], &mut x);\n } else {\n assert_eq!(map.get(&x), None);\n assert_eq!(map.get_mut(&x), None);\n }\n }\n }\n\n #[test]\n fn test_range() {\n let mut map = SortedMap::new();\n map.insert(1, 1);\n map.insert(3, 3);\n map.insert(6, 6);\n map.insert(9, 9);\n\n let keys = |s: &[(_, _)]| {\n s.into_iter().map(|e| e.0).collect::<Vec<u32>>()\n };\n\n for start in 0 .. 11 {\n for end in 0 .. 11 {\n if end < start {\n continue\n }\n\n let mut expected = vec![1, 3, 6, 9];\n expected.retain(|&x| x >= start && x < end);\n\n assert_eq!(keys(map.range(start..end)), expected, \"range = {}..{}\", start, end);\n }\n }\n }\n\n\n #[test]\n fn test_offset_keys() {\n let mut map = SortedMap::new();\n map.insert(1, 1);\n map.insert(3, 3);\n map.insert(6, 6);\n\n map.offset_keys(|k| *k += 1);\n\n let mut expected = SortedMap::new();\n expected.insert(2, 1);\n expected.insert(4, 3);\n expected.insert(7, 6);\n\n assert_eq!(map, expected);\n }\n\n fn keys(s: SortedMap<u32, u32>) -> Vec<u32> {\n s.into_iter().map(|(k, _)| k).collect::<Vec<u32>>()\n }\n\n fn elements(s: SortedMap<u32, u32>) -> Vec<(u32, u32)> {\n s.into_iter().collect::<Vec<(u32, u32)>>()\n }\n\n #[test]\n fn test_remove_range() {\n let mut map = SortedMap::new();\n map.insert(1, 1);\n map.insert(3, 3);\n map.insert(6, 6);\n map.insert(9, 9);\n\n for start in 0 .. 11 {\n for end in 0 .. 11 {\n if end < start {\n continue\n }\n\n let mut expected = vec![1, 3, 6, 9];\n expected.retain(|&x| x < start || x >= end);\n\n let mut map = map.clone();\n map.remove_range(start .. end);\n\n assert_eq!(keys(map), expected, \"range = {}..{}\", start, end);\n }\n }\n }\n\n #[test]\n fn test_remove() {\n let mut map = SortedMap::new();\n let mut expected = Vec::new();\n\n for x in 0..10 {\n map.insert(x, x);\n expected.push((x, x));\n }\n\n for x in 0 .. 10 {\n let mut map = map.clone();\n let mut expected = expected.clone();\n\n assert_eq!(map.remove(&x), Some(x));\n expected.remove(x as usize);\n\n assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);\n }\n }\n\n #[test]\n fn test_insert_presorted_non_overlapping() {\n let mut map = SortedMap::new();\n map.insert(2, 0);\n map.insert(8, 0);\n\n map.insert_presorted(vec![(3, 0), (7, 0)]);\n\n let expected = vec![2, 3, 7, 8];\n assert_eq!(keys(map), expected);\n }\n\n #[test]\n fn test_insert_presorted_first_elem_equal() {\n let mut map = SortedMap::new();\n map.insert(2, 2);\n map.insert(8, 8);\n\n map.insert_presorted(vec![(2, 0), (7, 7)]);\n\n let expected = vec![(2, 0), (7, 7), (8, 8)];\n assert_eq!(elements(map), expected);\n }\n\n #[test]\n fn test_insert_presorted_last_elem_equal() {\n let mut map = SortedMap::new();\n map.insert(2, 2);\n map.insert(8, 8);\n\n map.insert_presorted(vec![(3, 3), (8, 0)]);\n\n let expected = vec![(2, 2), (3, 3), (8, 0)];\n assert_eq!(elements(map), expected);\n }\n\n #[test]\n fn test_insert_presorted_shuffle() {\n let mut map = SortedMap::new();\n map.insert(2, 2);\n map.insert(7, 7);\n\n map.insert_presorted(vec![(1, 1), (3, 3), (8, 8)]);\n\n let expected = vec![(1, 1), (2, 2), (3, 3), (7, 7), (8, 8)];\n assert_eq!(elements(map), expected);\n }\n\n #[test]\n fn test_insert_presorted_at_end() {\n let mut map = SortedMap::new();\n map.insert(1, 1);\n map.insert(2, 2);\n\n map.insert_presorted(vec![(3, 3), (8, 8)]);\n\n let expected = vec![(1, 1), (2, 2), (3, 3), (8, 8)];\n assert_eq!(elements(map), expected);\n }\n}\n<commit_msg>sorted_map: change From<Iterator<I>> to FromIterator<I><commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::borrow::Borrow;\nuse std::cmp::Ordering;\nuse std::iter::FromIterator;\nuse std::mem;\nuse std::ops::{RangeBounds, Bound, Index, IndexMut};\n\n\/\/\/ `SortedMap` is a data structure with similar characteristics as BTreeMap but\n\/\/\/ slightly different trade-offs: lookup, insertion, and removal are O(log(N))\n\/\/\/ and elements can be iterated in order cheaply.\n\/\/\/\n\/\/\/ `SortedMap` can be faster than a `BTreeMap` for small sizes (<50) since it\n\/\/\/ stores data in a more compact way. It also supports accessing contiguous\n\/\/\/ ranges of elements as a slice, and slices of already sorted elements can be\n\/\/\/ inserted efficiently.\n#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug, RustcEncodable,\n RustcDecodable)]\npub struct SortedMap<K: Ord, V> {\n data: Vec<(K,V)>\n}\n\nimpl<K: Ord, V> SortedMap<K, V> {\n\n #[inline]\n pub fn new() -> SortedMap<K, V> {\n SortedMap {\n data: vec![]\n }\n }\n\n \/\/\/ Construct a `SortedMap` from a presorted set of elements. This is faster\n \/\/\/ than creating an empty map and then inserting the elements individually.\n \/\/\/\n \/\/\/ It is up to the caller to make sure that the elements are sorted by key\n \/\/\/ and that there are no duplicates.\n #[inline]\n pub fn from_presorted_elements(elements: Vec<(K, V)>) -> SortedMap<K, V>\n {\n debug_assert!(elements.windows(2).all(|w| w[0].0 < w[1].0));\n\n SortedMap {\n data: elements\n }\n }\n\n #[inline]\n pub fn insert(&mut self, key: K, mut value: V) -> Option<V> {\n match self.lookup_index_for(&key) {\n Ok(index) => {\n let slot = unsafe {\n self.data.get_unchecked_mut(index)\n };\n mem::swap(&mut slot.1, &mut value);\n Some(value)\n }\n Err(index) => {\n self.data.insert(index, (key, value));\n None\n }\n }\n }\n\n #[inline]\n pub fn remove(&mut self, key: &K) -> Option<V> {\n match self.lookup_index_for(key) {\n Ok(index) => {\n Some(self.data.remove(index).1)\n }\n Err(_) => {\n None\n }\n }\n }\n\n #[inline]\n pub fn get(&self, key: &K) -> Option<&V> {\n match self.lookup_index_for(key) {\n Ok(index) => {\n unsafe {\n Some(&self.data.get_unchecked(index).1)\n }\n }\n Err(_) => {\n None\n }\n }\n }\n\n #[inline]\n pub fn get_mut(&mut self, key: &K) -> Option<&mut V> {\n match self.lookup_index_for(key) {\n Ok(index) => {\n unsafe {\n Some(&mut self.data.get_unchecked_mut(index).1)\n }\n }\n Err(_) => {\n None\n }\n }\n }\n\n #[inline]\n pub fn clear(&mut self) {\n self.data.clear();\n }\n\n \/\/\/ Iterate over elements, sorted by key\n #[inline]\n pub fn iter(&self) -> ::std::slice::Iter<(K, V)> {\n self.data.iter()\n }\n\n \/\/\/ Iterate over the keys, sorted\n #[inline]\n pub fn keys(&self) -> impl Iterator<Item=&K> + ExactSizeIterator {\n self.data.iter().map(|&(ref k, _)| k)\n }\n\n \/\/\/ Iterate over values, sorted by key\n #[inline]\n pub fn values(&self) -> impl Iterator<Item=&V> + ExactSizeIterator {\n self.data.iter().map(|&(_, ref v)| v)\n }\n\n #[inline]\n pub fn len(&self) -> usize {\n self.data.len()\n }\n\n #[inline]\n pub fn range<R>(&self, range: R) -> &[(K, V)]\n where R: RangeBounds<K>\n {\n let (start, end) = self.range_slice_indices(range);\n (&self.data[start .. end])\n }\n\n #[inline]\n pub fn remove_range<R>(&mut self, range: R)\n where R: RangeBounds<K>\n {\n let (start, end) = self.range_slice_indices(range);\n self.data.splice(start .. end, ::std::iter::empty());\n }\n\n \/\/\/ Mutate all keys with the given function `f`. This mutation must not\n \/\/\/ change the sort-order of keys.\n #[inline]\n pub fn offset_keys<F>(&mut self, f: F)\n where F: Fn(&mut K)\n {\n self.data.iter_mut().map(|&mut (ref mut k, _)| k).for_each(f);\n }\n\n \/\/\/ Inserts a presorted range of elements into the map. If the range can be\n \/\/\/ inserted as a whole in between to existing elements of the map, this\n \/\/\/ will be faster than inserting the elements individually.\n \/\/\/\n \/\/\/ It is up to the caller to make sure that the elements are sorted by key\n \/\/\/ and that there are no duplicates.\n #[inline]\n pub fn insert_presorted(&mut self, mut elements: Vec<(K, V)>) {\n if elements.is_empty() {\n return\n }\n\n debug_assert!(elements.windows(2).all(|w| w[0].0 < w[1].0));\n\n let start_index = self.lookup_index_for(&elements[0].0);\n\n let drain = match start_index {\n Ok(index) => {\n let mut drain = elements.drain(..);\n self.data[index] = drain.next().unwrap();\n drain\n }\n Err(index) => {\n if index == self.data.len() ||\n elements.last().unwrap().0 < self.data[index].0 {\n \/\/ We can copy the whole range without having to mix with\n \/\/ existing elements.\n self.data.splice(index .. index, elements.drain(..));\n return\n }\n\n let mut drain = elements.drain(..);\n self.data.insert(index, drain.next().unwrap());\n drain\n }\n };\n\n \/\/ Insert the rest\n for (k, v) in drain {\n self.insert(k, v);\n }\n }\n\n \/\/\/ Looks up the key in `self.data` via `slice::binary_search()`.\n #[inline(always)]\n fn lookup_index_for(&self, key: &K) -> Result<usize, usize> {\n self.data.binary_search_by(|&(ref x, _)| x.cmp(key))\n }\n\n #[inline]\n fn range_slice_indices<R>(&self, range: R) -> (usize, usize)\n where R: RangeBounds<K>\n {\n let start = match range.start_bound() {\n Bound::Included(ref k) => {\n match self.lookup_index_for(k) {\n Ok(index) | Err(index) => index\n }\n }\n Bound::Excluded(ref k) => {\n match self.lookup_index_for(k) {\n Ok(index) => index + 1,\n Err(index) => index,\n }\n }\n Bound::Unbounded => 0,\n };\n\n let end = match range.end_bound() {\n Bound::Included(ref k) => {\n match self.lookup_index_for(k) {\n Ok(index) => index + 1,\n Err(index) => index,\n }\n }\n Bound::Excluded(ref k) => {\n match self.lookup_index_for(k) {\n Ok(index) | Err(index) => index,\n }\n }\n Bound::Unbounded => self.data.len(),\n };\n\n (start, end)\n }\n}\n\nimpl<K: Ord, V> IntoIterator for SortedMap<K, V> {\n type Item = (K, V);\n type IntoIter = ::std::vec::IntoIter<(K, V)>;\n fn into_iter(self) -> Self::IntoIter {\n self.data.into_iter()\n }\n}\n\nimpl<K: Ord, V, Q: Borrow<K>> Index<Q> for SortedMap<K, V> {\n type Output = V;\n fn index(&self, index: Q) -> &Self::Output {\n let k: &K = index.borrow();\n self.get(k).unwrap()\n }\n}\n\nimpl<K: Ord, V, Q: Borrow<K>> IndexMut<Q> for SortedMap<K, V> {\n fn index_mut(&mut self, index: Q) -> &mut Self::Output {\n let k: &K = index.borrow();\n self.get_mut(k).unwrap()\n }\n}\n\nimpl<K: Ord, V> FromIterator<(K, V)> for SortedMap<K, V> {\n fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {\n let mut data: Vec<(K, V)> = iter.into_iter().collect();\n data.sort_unstable_by(|&(ref k1, _), &(ref k2, _)| k1.cmp(k2));\n data.dedup_by(|&mut (ref k1, _), &mut (ref k2, _)| {\n k1.cmp(k2) == Ordering::Equal\n });\n SortedMap {\n data\n }\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::SortedMap;\n\n #[test]\n fn test_insert_and_iter() {\n let mut map = SortedMap::new();\n let mut expected = Vec::new();\n\n for x in 0 .. 100 {\n assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);\n\n let x = 1000 - x * 2;\n map.insert(x, x);\n expected.insert(0, (x, x));\n }\n }\n\n #[test]\n fn test_get_and_index() {\n let mut map = SortedMap::new();\n let mut expected = Vec::new();\n\n for x in 0 .. 100 {\n let x = 1000 - x;\n if x & 1 == 0 {\n map.insert(x, x);\n }\n expected.push(x);\n }\n\n for mut x in expected {\n if x & 1 == 0 {\n assert_eq!(map.get(&x), Some(&x));\n assert_eq!(map.get_mut(&x), Some(&mut x));\n assert_eq!(map[&x], x);\n assert_eq!(&mut map[&x], &mut x);\n } else {\n assert_eq!(map.get(&x), None);\n assert_eq!(map.get_mut(&x), None);\n }\n }\n }\n\n #[test]\n fn test_range() {\n let mut map = SortedMap::new();\n map.insert(1, 1);\n map.insert(3, 3);\n map.insert(6, 6);\n map.insert(9, 9);\n\n let keys = |s: &[(_, _)]| {\n s.into_iter().map(|e| e.0).collect::<Vec<u32>>()\n };\n\n for start in 0 .. 11 {\n for end in 0 .. 11 {\n if end < start {\n continue\n }\n\n let mut expected = vec![1, 3, 6, 9];\n expected.retain(|&x| x >= start && x < end);\n\n assert_eq!(keys(map.range(start..end)), expected, \"range = {}..{}\", start, end);\n }\n }\n }\n\n\n #[test]\n fn test_offset_keys() {\n let mut map = SortedMap::new();\n map.insert(1, 1);\n map.insert(3, 3);\n map.insert(6, 6);\n\n map.offset_keys(|k| *k += 1);\n\n let mut expected = SortedMap::new();\n expected.insert(2, 1);\n expected.insert(4, 3);\n expected.insert(7, 6);\n\n assert_eq!(map, expected);\n }\n\n fn keys(s: SortedMap<u32, u32>) -> Vec<u32> {\n s.into_iter().map(|(k, _)| k).collect::<Vec<u32>>()\n }\n\n fn elements(s: SortedMap<u32, u32>) -> Vec<(u32, u32)> {\n s.into_iter().collect::<Vec<(u32, u32)>>()\n }\n\n #[test]\n fn test_remove_range() {\n let mut map = SortedMap::new();\n map.insert(1, 1);\n map.insert(3, 3);\n map.insert(6, 6);\n map.insert(9, 9);\n\n for start in 0 .. 11 {\n for end in 0 .. 11 {\n if end < start {\n continue\n }\n\n let mut expected = vec![1, 3, 6, 9];\n expected.retain(|&x| x < start || x >= end);\n\n let mut map = map.clone();\n map.remove_range(start .. end);\n\n assert_eq!(keys(map), expected, \"range = {}..{}\", start, end);\n }\n }\n }\n\n #[test]\n fn test_remove() {\n let mut map = SortedMap::new();\n let mut expected = Vec::new();\n\n for x in 0..10 {\n map.insert(x, x);\n expected.push((x, x));\n }\n\n for x in 0 .. 10 {\n let mut map = map.clone();\n let mut expected = expected.clone();\n\n assert_eq!(map.remove(&x), Some(x));\n expected.remove(x as usize);\n\n assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);\n }\n }\n\n #[test]\n fn test_insert_presorted_non_overlapping() {\n let mut map = SortedMap::new();\n map.insert(2, 0);\n map.insert(8, 0);\n\n map.insert_presorted(vec![(3, 0), (7, 0)]);\n\n let expected = vec![2, 3, 7, 8];\n assert_eq!(keys(map), expected);\n }\n\n #[test]\n fn test_insert_presorted_first_elem_equal() {\n let mut map = SortedMap::new();\n map.insert(2, 2);\n map.insert(8, 8);\n\n map.insert_presorted(vec![(2, 0), (7, 7)]);\n\n let expected = vec![(2, 0), (7, 7), (8, 8)];\n assert_eq!(elements(map), expected);\n }\n\n #[test]\n fn test_insert_presorted_last_elem_equal() {\n let mut map = SortedMap::new();\n map.insert(2, 2);\n map.insert(8, 8);\n\n map.insert_presorted(vec![(3, 3), (8, 0)]);\n\n let expected = vec![(2, 2), (3, 3), (8, 0)];\n assert_eq!(elements(map), expected);\n }\n\n #[test]\n fn test_insert_presorted_shuffle() {\n let mut map = SortedMap::new();\n map.insert(2, 2);\n map.insert(7, 7);\n\n map.insert_presorted(vec![(1, 1), (3, 3), (8, 8)]);\n\n let expected = vec![(1, 1), (2, 2), (3, 3), (7, 7), (8, 8)];\n assert_eq!(elements(map), expected);\n }\n\n #[test]\n fn test_insert_presorted_at_end() {\n let mut map = SortedMap::new();\n map.insert(1, 1);\n map.insert(2, 2);\n\n map.insert_presorted(vec![(3, 3), (8, 8)]);\n\n let expected = vec![(1, 1), (2, 2), (3, 3), (8, 8)];\n assert_eq!(elements(map), expected);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use crate::core::compiler::{CompileKind, CompileMode, Layout, RustcTargetData};\nuse crate::core::profiles::Profiles;\nuse crate::core::{PackageIdSpec, TargetKind, Workspace};\nuse crate::ops;\nuse crate::util::errors::CargoResult;\nuse crate::util::interning::InternedString;\nuse crate::util::lev_distance;\nuse crate::util::{Config, Progress, ProgressStyle};\n\nuse anyhow::Context as _;\nuse cargo_util::paths;\nuse std::fs;\nuse std::path::Path;\n\npub struct CleanOptions<'a> {\n pub config: &'a Config,\n \/\/\/ A list of packages to clean. If empty, everything is cleaned.\n pub spec: Vec<String>,\n \/\/\/ The target arch triple to clean, or None for the host arch\n pub targets: Vec<String>,\n \/\/\/ Whether to clean the release directory\n pub profile_specified: bool,\n \/\/\/ Whether to clean the directory of a certain build profile\n pub requested_profile: InternedString,\n \/\/\/ Whether to just clean the doc directory\n pub doc: bool,\n}\n\n\/\/\/ Cleans the package's build artifacts.\npub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {\n let mut target_dir = ws.target_dir();\n let config = ws.config();\n\n \/\/ If the doc option is set, we just want to delete the doc directory.\n if opts.doc {\n target_dir = target_dir.join(\"doc\");\n return clean_entire_folder(&target_dir.into_path_unlocked(), config);\n }\n\n let profiles = Profiles::new(ws, opts.requested_profile)?;\n\n if opts.profile_specified {\n \/\/ After parsing profiles we know the dir-name of the profile, if a profile\n \/\/ was passed from the command line. If so, delete only the directory of\n \/\/ that profile.\n let dir_name = profiles.get_dir_name();\n target_dir = target_dir.join(dir_name);\n }\n\n \/\/ If we have a spec, then we need to delete some packages, otherwise, just\n \/\/ remove the whole target directory and be done with it!\n \/\/\n \/\/ Note that we don't bother grabbing a lock here as we're just going to\n \/\/ blow it all away anyway.\n if opts.spec.is_empty() {\n return clean_entire_folder(&target_dir.into_path_unlocked(), config);\n }\n\n \/\/ Clean specific packages.\n let requested_kinds = CompileKind::from_requested_targets(config, &opts.targets)?;\n let target_data = RustcTargetData::new(ws, &requested_kinds)?;\n let (pkg_set, resolve) = ops::resolve_ws(ws)?;\n let prof_dir_name = profiles.get_dir_name();\n let host_layout = Layout::new(ws, None, &prof_dir_name)?;\n \/\/ Convert requested kinds to a Vec of layouts.\n let target_layouts: Vec<(CompileKind, Layout)> = requested_kinds\n .into_iter()\n .filter_map(|kind| match kind {\n CompileKind::Target(target) => match Layout::new(ws, Some(target), &prof_dir_name) {\n Ok(layout) => Some(Ok((kind, layout))),\n Err(e) => Some(Err(e)),\n },\n CompileKind::Host => None,\n })\n .collect::<CargoResult<_>>()?;\n \/\/ A Vec of layouts. This is a little convoluted because there can only be\n \/\/ one host_layout.\n let layouts = if opts.targets.is_empty() {\n vec![(CompileKind::Host, &host_layout)]\n } else {\n target_layouts\n .iter()\n .map(|(kind, layout)| (*kind, layout))\n .collect()\n };\n \/\/ Create a Vec that also includes the host for things that need to clean both.\n let layouts_with_host: Vec<(CompileKind, &Layout)> =\n std::iter::once((CompileKind::Host, &host_layout))\n .chain(layouts.iter().map(|(k, l)| (*k, *l)))\n .collect();\n\n \/\/ Cleaning individual rustdoc crates is currently not supported.\n \/\/ For example, the search index would need to be rebuilt to fully\n \/\/ remove it (otherwise you're left with lots of broken links).\n \/\/ Doc tests produce no output.\n\n \/\/ Get Packages for the specified specs.\n let mut pkg_ids = Vec::new();\n for spec_str in opts.spec.iter() {\n \/\/ Translate the spec to a Package.\n let spec = PackageIdSpec::parse(spec_str)?;\n if spec.version().is_some() {\n config.shell().warn(&format!(\n \"version qualifier in `-p {}` is ignored, \\\n cleaning all versions of `{}` found\",\n spec_str,\n spec.name()\n ))?;\n }\n if spec.url().is_some() {\n config.shell().warn(&format!(\n \"url qualifier in `-p {}` ignored, \\\n cleaning all versions of `{}` found\",\n spec_str,\n spec.name()\n ))?;\n }\n let matches: Vec<_> = resolve.iter().filter(|id| spec.matches(*id)).collect();\n if matches.is_empty() {\n let mut suggestion = String::new();\n suggestion.push_str(&lev_distance::closest_msg(\n &spec.name(),\n resolve.iter(),\n |id| id.name().as_str(),\n ));\n anyhow::bail!(\n \"package ID specification `{}` did not match any packages{}\",\n spec,\n suggestion\n );\n }\n pkg_ids.extend(matches);\n }\n let packages = pkg_set.get_many(pkg_ids)?;\n\n let mut progress = CleaningPackagesBar::new(config, packages.len());\n for pkg in packages {\n let pkg_dir = format!(\"{}-*\", pkg.name());\n progress.on_cleaning_package(&pkg.name())?;\n\n \/\/ Clean fingerprints.\n for (_, layout) in &layouts_with_host {\n let dir = escape_glob_path(layout.fingerprint())?;\n rm_rf_glob(&Path::new(&dir).join(&pkg_dir), config, &mut progress)?;\n }\n\n for target in pkg.targets() {\n if target.is_custom_build() {\n \/\/ Get both the build_script_build and the output directory.\n for (_, layout) in &layouts_with_host {\n let dir = escape_glob_path(layout.build())?;\n rm_rf_glob(&Path::new(&dir).join(&pkg_dir), config, &mut progress)?;\n }\n continue;\n }\n let crate_name = target.crate_name();\n for &mode in &[\n CompileMode::Build,\n CompileMode::Test,\n CompileMode::Check { test: false },\n ] {\n for (compile_kind, layout) in &layouts {\n let triple = target_data.short_name(compile_kind);\n\n let (file_types, _unsupported) = target_data\n .info(*compile_kind)\n .rustc_outputs(mode, target.kind(), triple)?;\n let (dir, uplift_dir) = match target.kind() {\n TargetKind::ExampleBin | TargetKind::ExampleLib(..) => {\n (layout.examples(), Some(layout.examples()))\n }\n \/\/ Tests\/benchmarks are never uplifted.\n TargetKind::Test | TargetKind::Bench => (layout.deps(), None),\n _ => (layout.deps(), Some(layout.dest())),\n };\n for file_type in file_types {\n \/\/ Some files include a hash in the filename, some don't.\n let hashed_name = file_type.output_filename(target, Some(\"*\"));\n let unhashed_name = file_type.output_filename(target, None);\n let dir_glob = escape_glob_path(dir)?;\n let dir_glob = Path::new(&dir_glob);\n\n rm_rf_glob(&dir_glob.join(&hashed_name), config, &mut progress)?;\n rm_rf(&dir.join(&unhashed_name), config, &mut progress)?;\n \/\/ Remove dep-info file generated by rustc. It is not tracked in\n \/\/ file_types. It does not have a prefix.\n let hashed_dep_info = dir_glob.join(format!(\"{}-*.d\", crate_name));\n rm_rf_glob(&hashed_dep_info, config, &mut progress)?;\n let unhashed_dep_info = dir.join(format!(\"{}.d\", crate_name));\n rm_rf(&unhashed_dep_info, config, &mut progress)?;\n \/\/ Remove split-debuginfo files generated by rustc.\n let split_debuginfo_obj = dir_glob.join(format!(\"{}.*.o\", crate_name));\n rm_rf_glob(&split_debuginfo_obj, config, &mut progress)?;\n let split_debuginfo_dwo = dir_glob.join(format!(\"{}.*.dwo\", crate_name));\n rm_rf_glob(&split_debuginfo_dwo, config, &mut progress)?;\n\n \/\/ Remove the uplifted copy.\n if let Some(uplift_dir) = uplift_dir {\n let uplifted_path = uplift_dir.join(file_type.uplift_filename(target));\n rm_rf(&uplifted_path, config, &mut progress)?;\n \/\/ Dep-info generated by Cargo itself.\n let dep_info = uplifted_path.with_extension(\"d\");\n rm_rf(&dep_info, config, &mut progress)?;\n }\n }\n \/\/ TODO: what to do about build_script_build?\n let dir = escape_glob_path(layout.incremental())?;\n let incremental = Path::new(&dir).join(format!(\"{}-*\", crate_name));\n rm_rf_glob(&incremental, config, &mut progress)?;\n }\n }\n }\n }\n\n Ok(())\n}\n\nfn escape_glob_path(pattern: &Path) -> CargoResult<String> {\n let pattern = pattern\n .to_str()\n .ok_or_else(|| anyhow::anyhow!(\"expected utf-8 path\"))?;\n Ok(glob::Pattern::escape(pattern))\n}\n\nfn rm_rf_glob(\n pattern: &Path,\n config: &Config,\n progress: &mut dyn CleaningProgressBar,\n) -> CargoResult<()> {\n \/\/ TODO: Display utf8 warning to user? Or switch to globset?\n let pattern = pattern\n .to_str()\n .ok_or_else(|| anyhow::anyhow!(\"expected utf-8 path\"))?;\n for path in glob::glob(pattern)? {\n rm_rf(&path?, config, progress)?;\n }\n Ok(())\n}\n\nfn rm_rf(path: &Path, config: &Config, progress: &mut dyn CleaningProgressBar) -> CargoResult<()> {\n if fs::symlink_metadata(path).is_err() {\n return Ok(());\n }\n\n config\n .shell()\n .verbose(|shell| shell.status(\"Removing\", path.display()))?;\n progress.display_now()?;\n\n for entry in walkdir::WalkDir::new(path).contents_first(true) {\n let entry = entry?;\n progress.on_clean()?;\n if entry.file_type().is_dir() {\n paths::remove_dir(entry.path()).with_context(|| \"could not remove build directory\")?;\n } else {\n paths::remove_file(entry.path()).with_context(|| \"failed to remove build artifact\")?;\n }\n }\n\n Ok(())\n}\n\nfn clean_entire_folder(path: &Path, config: &Config) -> CargoResult<()> {\n let num_paths = walkdir::WalkDir::new(path).into_iter().count();\n let mut progress = CleaningFolderBar::new(config, num_paths);\n rm_rf(path, config, &mut progress)\n}\n\ntrait CleaningProgressBar {\n fn display_now(&mut self) -> CargoResult<()>;\n fn on_clean(&mut self) -> CargoResult<()>;\n}\n\nstruct CleaningFolderBar<'cfg> {\n bar: Progress<'cfg>,\n max: usize,\n cur: usize,\n}\n\nimpl<'cfg> CleaningFolderBar<'cfg> {\n fn new(cfg: &'cfg Config, max: usize) -> Self {\n Self {\n bar: Progress::with_style(\"Cleaning\", ProgressStyle::Percentage, cfg),\n max,\n cur: 0,\n }\n }\n\n fn cur_progress(&self) -> usize {\n std::cmp::min(self.cur, self.max)\n }\n}\n\nimpl<'cfg> CleaningProgressBar for CleaningFolderBar<'cfg> {\n fn display_now(&mut self) -> CargoResult<()> {\n self.bar.tick_now(self.cur_progress(), self.max, \"\")\n }\n\n fn on_clean(&mut self) -> CargoResult<()> {\n self.cur += 1;\n self.bar.tick(self.cur_progress(), self.max, \"\")\n }\n}\n\nstruct CleaningPackagesBar<'cfg> {\n bar: Progress<'cfg>,\n max: usize,\n cur: usize,\n num_files_folders_cleaned: usize,\n package_being_cleaned: String,\n}\n\nimpl<'cfg> CleaningPackagesBar<'cfg> {\n fn new(cfg: &'cfg Config, max: usize) -> Self {\n Self {\n bar: Progress::with_style(\"Cleaning\", ProgressStyle::Ratio, cfg),\n max,\n cur: 0,\n num_files_folders_cleaned: 0,\n package_being_cleaned: String::new(),\n }\n }\n\n fn on_cleaning_package(&mut self, package: &str) -> CargoResult<()> {\n self.cur += 1;\n self.package_being_cleaned = String::from(package);\n self.bar\n .tick(self.cur_progress(), self.max, &self.format_message())\n }\n\n fn cur_progress(&self) -> usize {\n std::cmp::min(self.cur, self.max)\n }\n\n fn format_message(&self) -> String {\n format!(\n \": {}, {} files\/folders cleaned\",\n self.package_being_cleaned, self.num_files_folders_cleaned\n )\n }\n}\n\nimpl<'cfg> CleaningProgressBar for CleaningPackagesBar<'cfg> {\n fn display_now(&mut self) -> CargoResult<()> {\n self.bar\n .tick_now(self.cur_progress(), self.max, &self.format_message())\n }\n\n fn on_clean(&mut self) -> CargoResult<()> {\n self.bar\n .tick(self.cur_progress(), self.max, &self.format_message())?;\n self.num_files_folders_cleaned += 1;\n Ok(())\n }\n}\n<commit_msg>Only remove fingerprints and build script artifacts of the requested package<commit_after>use crate::core::compiler::{CompileKind, CompileMode, Layout, RustcTargetData};\nuse crate::core::profiles::Profiles;\nuse crate::core::{PackageIdSpec, TargetKind, Workspace};\nuse crate::ops;\nuse crate::util::errors::CargoResult;\nuse crate::util::interning::InternedString;\nuse crate::util::lev_distance;\nuse crate::util::{Config, Progress, ProgressStyle};\n\nuse anyhow::Context as _;\nuse cargo_util::paths;\nuse std::fs;\nuse std::path::Path;\n\npub struct CleanOptions<'a> {\n pub config: &'a Config,\n \/\/\/ A list of packages to clean. If empty, everything is cleaned.\n pub spec: Vec<String>,\n \/\/\/ The target arch triple to clean, or None for the host arch\n pub targets: Vec<String>,\n \/\/\/ Whether to clean the release directory\n pub profile_specified: bool,\n \/\/\/ Whether to clean the directory of a certain build profile\n pub requested_profile: InternedString,\n \/\/\/ Whether to just clean the doc directory\n pub doc: bool,\n}\n\n\/\/\/ Cleans the package's build artifacts.\npub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {\n let mut target_dir = ws.target_dir();\n let config = ws.config();\n\n \/\/ If the doc option is set, we just want to delete the doc directory.\n if opts.doc {\n target_dir = target_dir.join(\"doc\");\n return clean_entire_folder(&target_dir.into_path_unlocked(), config);\n }\n\n let profiles = Profiles::new(ws, opts.requested_profile)?;\n\n if opts.profile_specified {\n \/\/ After parsing profiles we know the dir-name of the profile, if a profile\n \/\/ was passed from the command line. If so, delete only the directory of\n \/\/ that profile.\n let dir_name = profiles.get_dir_name();\n target_dir = target_dir.join(dir_name);\n }\n\n \/\/ If we have a spec, then we need to delete some packages, otherwise, just\n \/\/ remove the whole target directory and be done with it!\n \/\/\n \/\/ Note that we don't bother grabbing a lock here as we're just going to\n \/\/ blow it all away anyway.\n if opts.spec.is_empty() {\n return clean_entire_folder(&target_dir.into_path_unlocked(), config);\n }\n\n \/\/ Clean specific packages.\n let requested_kinds = CompileKind::from_requested_targets(config, &opts.targets)?;\n let target_data = RustcTargetData::new(ws, &requested_kinds)?;\n let (pkg_set, resolve) = ops::resolve_ws(ws)?;\n let prof_dir_name = profiles.get_dir_name();\n let host_layout = Layout::new(ws, None, &prof_dir_name)?;\n \/\/ Convert requested kinds to a Vec of layouts.\n let target_layouts: Vec<(CompileKind, Layout)> = requested_kinds\n .into_iter()\n .filter_map(|kind| match kind {\n CompileKind::Target(target) => match Layout::new(ws, Some(target), &prof_dir_name) {\n Ok(layout) => Some(Ok((kind, layout))),\n Err(e) => Some(Err(e)),\n },\n CompileKind::Host => None,\n })\n .collect::<CargoResult<_>>()?;\n \/\/ A Vec of layouts. This is a little convoluted because there can only be\n \/\/ one host_layout.\n let layouts = if opts.targets.is_empty() {\n vec![(CompileKind::Host, &host_layout)]\n } else {\n target_layouts\n .iter()\n .map(|(kind, layout)| (*kind, layout))\n .collect()\n };\n \/\/ Create a Vec that also includes the host for things that need to clean both.\n let layouts_with_host: Vec<(CompileKind, &Layout)> =\n std::iter::once((CompileKind::Host, &host_layout))\n .chain(layouts.iter().map(|(k, l)| (*k, *l)))\n .collect();\n\n \/\/ Cleaning individual rustdoc crates is currently not supported.\n \/\/ For example, the search index would need to be rebuilt to fully\n \/\/ remove it (otherwise you're left with lots of broken links).\n \/\/ Doc tests produce no output.\n\n \/\/ Get Packages for the specified specs.\n let mut pkg_ids = Vec::new();\n for spec_str in opts.spec.iter() {\n \/\/ Translate the spec to a Package.\n let spec = PackageIdSpec::parse(spec_str)?;\n if spec.version().is_some() {\n config.shell().warn(&format!(\n \"version qualifier in `-p {}` is ignored, \\\n cleaning all versions of `{}` found\",\n spec_str,\n spec.name()\n ))?;\n }\n if spec.url().is_some() {\n config.shell().warn(&format!(\n \"url qualifier in `-p {}` ignored, \\\n cleaning all versions of `{}` found\",\n spec_str,\n spec.name()\n ))?;\n }\n let matches: Vec<_> = resolve.iter().filter(|id| spec.matches(*id)).collect();\n if matches.is_empty() {\n let mut suggestion = String::new();\n suggestion.push_str(&lev_distance::closest_msg(\n &spec.name(),\n resolve.iter(),\n |id| id.name().as_str(),\n ));\n anyhow::bail!(\n \"package ID specification `{}` did not match any packages{}\",\n spec,\n suggestion\n );\n }\n pkg_ids.extend(matches);\n }\n let packages = pkg_set.get_many(pkg_ids)?;\n\n let mut progress = CleaningPackagesBar::new(config, packages.len());\n for pkg in packages {\n let pkg_dir = format!(\"{}-*\", pkg.name());\n progress.on_cleaning_package(&pkg.name())?;\n\n \/\/ Clean fingerprints.\n for (_, layout) in &layouts_with_host {\n let dir = escape_glob_path(layout.fingerprint())?;\n rm_rf_package_glob_containing_hash(\n &pkg.name(),\n &Path::new(&dir).join(&pkg_dir),\n config,\n &mut progress,\n )?;\n }\n\n for target in pkg.targets() {\n if target.is_custom_build() {\n \/\/ Get both the build_script_build and the output directory.\n for (_, layout) in &layouts_with_host {\n let dir = escape_glob_path(layout.build())?;\n rm_rf_package_glob_containing_hash(\n &pkg.name(),\n &Path::new(&dir).join(&pkg_dir),\n config,\n &mut progress,\n )?;\n }\n continue;\n }\n let crate_name = target.crate_name();\n for &mode in &[\n CompileMode::Build,\n CompileMode::Test,\n CompileMode::Check { test: false },\n ] {\n for (compile_kind, layout) in &layouts {\n let triple = target_data.short_name(compile_kind);\n\n let (file_types, _unsupported) = target_data\n .info(*compile_kind)\n .rustc_outputs(mode, target.kind(), triple)?;\n let (dir, uplift_dir) = match target.kind() {\n TargetKind::ExampleBin | TargetKind::ExampleLib(..) => {\n (layout.examples(), Some(layout.examples()))\n }\n \/\/ Tests\/benchmarks are never uplifted.\n TargetKind::Test | TargetKind::Bench => (layout.deps(), None),\n _ => (layout.deps(), Some(layout.dest())),\n };\n for file_type in file_types {\n \/\/ Some files include a hash in the filename, some don't.\n let hashed_name = file_type.output_filename(target, Some(\"*\"));\n let unhashed_name = file_type.output_filename(target, None);\n let dir_glob = escape_glob_path(dir)?;\n let dir_glob = Path::new(&dir_glob);\n\n rm_rf_glob(&dir_glob.join(&hashed_name), config, &mut progress)?;\n rm_rf(&dir.join(&unhashed_name), config, &mut progress)?;\n \/\/ Remove dep-info file generated by rustc. It is not tracked in\n \/\/ file_types. It does not have a prefix.\n let hashed_dep_info = dir_glob.join(format!(\"{}-*.d\", crate_name));\n rm_rf_glob(&hashed_dep_info, config, &mut progress)?;\n let unhashed_dep_info = dir.join(format!(\"{}.d\", crate_name));\n rm_rf(&unhashed_dep_info, config, &mut progress)?;\n \/\/ Remove split-debuginfo files generated by rustc.\n let split_debuginfo_obj = dir_glob.join(format!(\"{}.*.o\", crate_name));\n rm_rf_glob(&split_debuginfo_obj, config, &mut progress)?;\n let split_debuginfo_dwo = dir_glob.join(format!(\"{}.*.dwo\", crate_name));\n rm_rf_glob(&split_debuginfo_dwo, config, &mut progress)?;\n\n \/\/ Remove the uplifted copy.\n if let Some(uplift_dir) = uplift_dir {\n let uplifted_path = uplift_dir.join(file_type.uplift_filename(target));\n rm_rf(&uplifted_path, config, &mut progress)?;\n \/\/ Dep-info generated by Cargo itself.\n let dep_info = uplifted_path.with_extension(\"d\");\n rm_rf(&dep_info, config, &mut progress)?;\n }\n }\n \/\/ TODO: what to do about build_script_build?\n let dir = escape_glob_path(layout.incremental())?;\n let incremental = Path::new(&dir).join(format!(\"{}-*\", crate_name));\n rm_rf_glob(&incremental, config, &mut progress)?;\n }\n }\n }\n }\n\n Ok(())\n}\n\nfn escape_glob_path(pattern: &Path) -> CargoResult<String> {\n let pattern = pattern\n .to_str()\n .ok_or_else(|| anyhow::anyhow!(\"expected utf-8 path\"))?;\n Ok(glob::Pattern::escape(pattern))\n}\n\nfn rm_rf_package_glob_containing_hash(\n package: &str,\n pattern: &Path,\n config: &Config,\n progress: &mut dyn CleaningProgressBar,\n) -> CargoResult<()> {\n rm_rf_glob_helper(Some(package), pattern, config, progress)\n}\n\nfn rm_rf_glob(\n pattern: &Path,\n config: &Config,\n progress: &mut dyn CleaningProgressBar,\n) -> CargoResult<()> {\n rm_rf_glob_helper(None, pattern, config, progress)\n}\n\nfn rm_rf_glob_helper(\n package: Option<&str>,\n pattern: &Path,\n config: &Config,\n progress: &mut dyn CleaningProgressBar,\n) -> CargoResult<()> {\n \/\/ TODO: Display utf8 warning to user? Or switch to globset?\n let pattern = pattern\n .to_str()\n .ok_or_else(|| anyhow::anyhow!(\"expected utf-8 path\"))?;\n for path in glob::glob(pattern)? {\n let path = path?;\n\n \/\/ Make sure the artifact is for `package` and not another crate that is prefixed by\n \/\/ `package` by getting the original name stripped of the trialing hash and possible\n \/\/ extension\n if let Some(package) = package {\n let pkg_name = path\n .file_name()\n .and_then(std::ffi::OsStr::to_str)\n .and_then(|artifact| artifact.rsplit_once('-'))\n .expect(\"artifact name is valid UTF-8 and contains at least one hyphen\")\n .0;\n\n if pkg_name != package {\n continue;\n }\n }\n\n rm_rf(&path, config, progress)?;\n }\n Ok(())\n}\n\nfn rm_rf(path: &Path, config: &Config, progress: &mut dyn CleaningProgressBar) -> CargoResult<()> {\n if fs::symlink_metadata(path).is_err() {\n return Ok(());\n }\n\n config\n .shell()\n .verbose(|shell| shell.status(\"Removing\", path.display()))?;\n progress.display_now()?;\n\n for entry in walkdir::WalkDir::new(path).contents_first(true) {\n let entry = entry?;\n progress.on_clean()?;\n if entry.file_type().is_dir() {\n paths::remove_dir(entry.path()).with_context(|| \"could not remove build directory\")?;\n } else {\n paths::remove_file(entry.path()).with_context(|| \"failed to remove build artifact\")?;\n }\n }\n\n Ok(())\n}\n\nfn clean_entire_folder(path: &Path, config: &Config) -> CargoResult<()> {\n let num_paths = walkdir::WalkDir::new(path).into_iter().count();\n let mut progress = CleaningFolderBar::new(config, num_paths);\n rm_rf(path, config, &mut progress)\n}\n\ntrait CleaningProgressBar {\n fn display_now(&mut self) -> CargoResult<()>;\n fn on_clean(&mut self) -> CargoResult<()>;\n}\n\nstruct CleaningFolderBar<'cfg> {\n bar: Progress<'cfg>,\n max: usize,\n cur: usize,\n}\n\nimpl<'cfg> CleaningFolderBar<'cfg> {\n fn new(cfg: &'cfg Config, max: usize) -> Self {\n Self {\n bar: Progress::with_style(\"Cleaning\", ProgressStyle::Percentage, cfg),\n max,\n cur: 0,\n }\n }\n\n fn cur_progress(&self) -> usize {\n std::cmp::min(self.cur, self.max)\n }\n}\n\nimpl<'cfg> CleaningProgressBar for CleaningFolderBar<'cfg> {\n fn display_now(&mut self) -> CargoResult<()> {\n self.bar.tick_now(self.cur_progress(), self.max, \"\")\n }\n\n fn on_clean(&mut self) -> CargoResult<()> {\n self.cur += 1;\n self.bar.tick(self.cur_progress(), self.max, \"\")\n }\n}\n\nstruct CleaningPackagesBar<'cfg> {\n bar: Progress<'cfg>,\n max: usize,\n cur: usize,\n num_files_folders_cleaned: usize,\n package_being_cleaned: String,\n}\n\nimpl<'cfg> CleaningPackagesBar<'cfg> {\n fn new(cfg: &'cfg Config, max: usize) -> Self {\n Self {\n bar: Progress::with_style(\"Cleaning\", ProgressStyle::Ratio, cfg),\n max,\n cur: 0,\n num_files_folders_cleaned: 0,\n package_being_cleaned: String::new(),\n }\n }\n\n fn on_cleaning_package(&mut self, package: &str) -> CargoResult<()> {\n self.cur += 1;\n self.package_being_cleaned = String::from(package);\n self.bar\n .tick(self.cur_progress(), self.max, &self.format_message())\n }\n\n fn cur_progress(&self) -> usize {\n std::cmp::min(self.cur, self.max)\n }\n\n fn format_message(&self) -> String {\n format!(\n \": {}, {} files\/folders cleaned\",\n self.package_being_cleaned, self.num_files_folders_cleaned\n )\n }\n}\n\nimpl<'cfg> CleaningProgressBar for CleaningPackagesBar<'cfg> {\n fn display_now(&mut self) -> CargoResult<()> {\n self.bar\n .tick_now(self.cur_progress(), self.max, &self.format_message())\n }\n\n fn on_clean(&mut self) -> CargoResult<()> {\n self.bar\n .tick(self.cur_progress(), self.max, &self.format_message())?;\n self.num_files_folders_cleaned += 1;\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add code for rail-fence-cipher<commit_after>pub struct RailFence {\n rails: usize,\n}\n\nimpl RailFence {\n pub fn new(rails: u32) -> RailFence {\n RailFence {\n rails: rails as usize,\n }\n }\n\n pub fn encode(&self, text: &str) -> String {\n let mut rails = vec![String::new(); self.rails];\n let indexes = (0..self.rails).chain((1..self.rails - 1).rev()).cycle();\n\n text.chars()\n .zip(indexes)\n .for_each(|(c, i)| rails[i as usize].push(c));\n\n rails.concat()\n }\n\n pub fn decode(&self, cipher: &str) -> String {\n let array = cipher.chars().collect::<Vec<_>>();\n\n let mut indices: Vec<_> = (0..self.rails - 1)\n .chain((1..self.rails).rev())\n .cycle()\n .zip(0..cipher.len())\n .collect();\n\n indices.sort_unstable();\n\n let mut text: Vec<char> = vec![' '; array.len()];\n\n (0..array.len()).for_each(|i| {\n let index = indices[i].1;\n text[index] = array[i];\n });\n\n text.iter().collect::<String>()\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove reference to the old EVM crate<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>No explicit type annotation<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ no-system-llvm\n\/\/ compile-flags: -O\n#![crate_type=\"lib\"]\n\npub enum Three { First, Second, Third }\nuse Three::*;\n\npub enum Four { First, Second, Third, Fourth }\nuse Four::*;\n\n#[no_mangle]\npub fn three_valued(x: Three) -> Three {\n \/\/ CHECK-LABEL: @three_valued\n \/\/ CHECK-NEXT: {{^.*:$}}\n \/\/ CHECK-NEXT: ret i8 %0\n match x {\n First => First,\n Second => Second,\n Third => Third,\n }\n}\n\n#[no_mangle]\npub fn four_valued(x: Four) -> Four {\n \/\/ CHECK-LABEL: @four_valued\n \/\/ CHECK-NEXT: {{^.*:$}}\n \/\/ CHECK-NEXT: ret i8 %0\n match x {\n First => First,\n Second => Second,\n Third => Third,\n Fourth => Fourth,\n }\n}\n<commit_msg>Don't glob-import overlapping variant names in test\/codegen\/match-optimizes-away.rs.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ no-system-llvm\n\/\/ compile-flags: -O\n#![crate_type=\"lib\"]\n\npub enum Three { A, B, C }\n\npub enum Four { A, B, C, D }\n\n#[no_mangle]\npub fn three_valued(x: Three) -> Three {\n \/\/ CHECK-LABEL: @three_valued\n \/\/ CHECK-NEXT: {{^.*:$}}\n \/\/ CHECK-NEXT: ret i8 %0\n match x {\n Three::A => Three::A,\n Three::B => Three::B,\n Three::C => Three::C,\n }\n}\n\n#[no_mangle]\npub fn four_valued(x: Four) -> Four {\n \/\/ CHECK-LABEL: @four_valued\n \/\/ CHECK-NEXT: {{^.*:$}}\n \/\/ CHECK-NEXT: ret i8 %0\n match x {\n Four::A => Four::A,\n Four::B => Four::B,\n Four::C => Four::C,\n Four::D => Four::D,\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse resource_task::{Done, Payload, Metadata, LoadResponse, LoaderTask, start_sending};\n\nuse extra::url::Url;\nuse extra::base64::FromBase64;\n\nuse http::headers::test_utils::from_stream_with_str;\nuse http::headers::content_type::MediaType;\n\npub fn factory() -> LoaderTask {\n |url, start_chan| {\n \/\/ NB: we don't spawn a new task.\n \/\/ Hypothesis: data URLs are too small for parallel base64 etc. to be worth it.\n \/\/ Should be tested at some point.\n load(url, start_chan)\n }\n}\n\nfn load(url: Url, start_chan: Chan<LoadResponse>) {\n assert!(\"data\" == url.scheme);\n\n let mut metadata = Metadata::default(url.clone());\n\n \/\/ Split out content type and data.\n let parts: ~[&str] = url.path.splitn_iter(',', 1).to_owned_vec();\n if parts.len() != 2 {\n start_sending(start_chan, metadata).send(Done(Err(())));\n return;\n }\n\n \/\/ \";base64\" must come at the end of the content type, per RFC 2397.\n \/\/ rust-http will fail to parse it because there's no =value part.\n let mut is_base64 = false;\n let mut ct_str = parts[0];\n if ct_str.ends_with(\";base64\") {\n is_base64 = true;\n ct_str = ct_str.slice_to(ct_str.as_bytes().len() - 7);\n }\n\n \/\/ Parse the content type using rust-http.\n \/\/ FIXME: this can go into an infinite loop! (rust-http #25)\n let content_type: Option<MediaType> = from_stream_with_str(ct_str);\n metadata.set_content_type(&content_type);\n\n let progress_chan = start_sending(start_chan, metadata);\n\n if is_base64 {\n match parts[1].from_base64() {\n Err(*) => {\n progress_chan.send(Done(Err(())));\n }\n Ok(data) => {\n progress_chan.send(Payload(data));\n progress_chan.send(Done(Ok(())));\n }\n }\n } else {\n \/\/ FIXME: Since the %-decoded URL is already a str, we can't\n \/\/ handle UTF8-incompatible encodings.\n progress_chan.send(Payload(parts[1].as_bytes().into_owned()));\n progress_chan.send(Done(Ok(())));\n }\n}\n<commit_msg>Add tests for data: URLs<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse resource_task::{Done, Payload, Metadata, LoadResponse, LoaderTask, start_sending};\n\nuse extra::url::Url;\nuse extra::base64::FromBase64;\n\nuse http::headers::test_utils::from_stream_with_str;\nuse http::headers::content_type::MediaType;\n\npub fn factory() -> LoaderTask {\n |url, start_chan| {\n \/\/ NB: we don't spawn a new task.\n \/\/ Hypothesis: data URLs are too small for parallel base64 etc. to be worth it.\n \/\/ Should be tested at some point.\n load(url, start_chan)\n }\n}\n\nfn load(url: Url, start_chan: Chan<LoadResponse>) {\n assert!(\"data\" == url.scheme);\n\n let mut metadata = Metadata::default(url.clone());\n\n \/\/ Split out content type and data.\n let parts: ~[&str] = url.path.splitn_iter(',', 1).to_owned_vec();\n if parts.len() != 2 {\n start_sending(start_chan, metadata).send(Done(Err(())));\n return;\n }\n\n \/\/ \";base64\" must come at the end of the content type, per RFC 2397.\n \/\/ rust-http will fail to parse it because there's no =value part.\n let mut is_base64 = false;\n let mut ct_str = parts[0];\n if ct_str.ends_with(\";base64\") {\n is_base64 = true;\n ct_str = ct_str.slice_to(ct_str.as_bytes().len() - 7);\n }\n\n \/\/ Parse the content type using rust-http.\n \/\/ FIXME: this can go into an infinite loop! (rust-http #25)\n let content_type: Option<MediaType> = from_stream_with_str(ct_str);\n metadata.set_content_type(&content_type);\n\n let progress_chan = start_sending(start_chan, metadata);\n\n if is_base64 {\n match parts[1].from_base64() {\n Err(*) => {\n progress_chan.send(Done(Err(())));\n }\n Ok(data) => {\n progress_chan.send(Payload(data));\n progress_chan.send(Done(Ok(())));\n }\n }\n } else {\n \/\/ FIXME: Since the %-decoded URL is already a str, we can't\n \/\/ handle UTF8-incompatible encodings.\n progress_chan.send(Payload(parts[1].as_bytes().into_owned()));\n progress_chan.send(Done(Ok(())));\n }\n}\n\n#[cfg(test)]\nfn assert_parse(url: &'static str,\n content_type: Option<(~str, ~str)>,\n charset: Option<~str>,\n data: Option<~[u8]>) {\n use std::from_str::FromStr;\n use std::comm;\n\n let (start_port, start_chan) = comm::stream();\n load(FromStr::from_str(url).unwrap(), start_chan);\n\n let response = start_port.recv();\n assert_eq!(&response.metadata.content_type, &content_type);\n assert_eq!(&response.metadata.charset, &charset);\n\n let progress = response.progress_port.recv();\n\n match data {\n None => {\n assert_eq!(progress, Done(Err(())));\n }\n Some(dat) => {\n assert_eq!(progress, Payload(dat));\n assert_eq!(response.progress_port.recv(), Done(Ok(())));\n }\n }\n}\n\n#[test]\nfn empty_invalid() {\n assert_parse(\"data:\", None, None, None);\n}\n\n#[test]\nfn plain() {\n assert_parse(\"data:,hello%20world\", None, None, Some(bytes!(\"hello world\").into_owned()));\n}\n\n#[test]\nfn plain_ct() {\n assert_parse(\"data:text\/plain,hello\",\n Some((~\"text\", ~\"plain\")), None, Some(bytes!(\"hello\").into_owned()));\n}\n\n#[test]\nfn plain_charset() {\n assert_parse(\"data:text\/plain;charset=latin1,hello\",\n Some((~\"text\", ~\"plain\")), Some(~\"latin1\"), Some(bytes!(\"hello\").into_owned()));\n}\n\n#[test]\nfn base64() {\n assert_parse(\"data:;base64,C62+7w==\", None, None, Some(~[0x0B, 0xAD, 0xBE, 0xEF]));\n}\n\n#[test]\nfn base64_ct() {\n assert_parse(\"data:application\/octet-stream;base64,C62+7w==\",\n Some((~\"application\", ~\"octet-stream\")), None, Some(~[0x0B, 0xAD, 0xBE, 0xEF]));\n}\n\n#[test]\nfn base64_charset() {\n assert_parse(\"data:text\/plain;charset=koi8-r;base64,8PLl9+XkIO3l5Pfl5A==\",\n Some((~\"text\", ~\"plain\")), Some(~\"koi8-r\"),\n Some(~[0xF0, 0xF2, 0xE5, 0xF7, 0xE5, 0xE4, 0x20, 0xED, 0xE5, 0xE4, 0xF7, 0xE5, 0xE4]));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for block expressions that have results. XFAILed.<commit_after>\/\/ xfail-boot\n\/\/ xfail-stage0\n\/\/ -*- rust -*-\n\nfn main() {\n auto x = {\n @100\n };\n\n check (*x == 100);\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Detecting language items.\n\/\/\n\/\/ Language items are items that represent concepts intrinsic to the language\n\/\/ itself. Examples are:\n\/\/\n\/\/ * Traits that specify \"kinds\"; e.g. \"Sync\", \"Send\".\n\/\/\n\/\/ * Traits that represent operators; e.g. \"Add\", \"Sub\", \"Index\".\n\/\/\n\/\/ * Functions called by the compiler itself.\n\npub use self::LangItem::*;\n\nuse hir::map as hir_map;\nuse session::Session;\nuse hir::def_id::DefId;\nuse ty;\nuse middle::weak_lang_items;\nuse util::nodemap::FxHashMap;\n\nuse syntax::ast;\nuse syntax::symbol::Symbol;\nuse hir::itemlikevisit::ItemLikeVisitor;\nuse hir;\n\n\/\/ The actual lang items defined come at the end of this file in one handy table.\n\/\/ So you probably just want to nip down to the end.\nmacro_rules! language_item_table {\n (\n $( $variant:ident, $name:expr, $method:ident; )*\n ) => {\n\n\nenum_from_u32! {\n #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]\n pub enum LangItem {\n $($variant,)*\n }\n}\n\npub struct LanguageItems {\n pub items: Vec<Option<DefId>>,\n pub missing: Vec<LangItem>,\n}\n\nimpl LanguageItems {\n pub fn new() -> LanguageItems {\n fn foo(_: LangItem) -> Option<DefId> { None }\n\n LanguageItems {\n items: vec![$(foo($variant)),*],\n missing: Vec::new(),\n }\n }\n\n pub fn items(&self) -> &[Option<DefId>] {\n &*self.items\n }\n\n pub fn item_name(index: usize) -> &'static str {\n let item: Option<LangItem> = LangItem::from_u32(index as u32);\n match item {\n $( Some($variant) => $name, )*\n None => \"???\"\n }\n }\n\n pub fn require(&self, it: LangItem) -> Result<DefId, String> {\n match self.items[it as usize] {\n Some(id) => Ok(id),\n None => {\n Err(format!(\"requires `{}` lang_item\",\n LanguageItems::item_name(it as usize)))\n }\n }\n }\n\n pub fn require_owned_box(&self) -> Result<DefId, String> {\n self.require(OwnedBoxLangItem)\n }\n\n pub fn fn_trait_kind(&self, id: DefId) -> Option<ty::ClosureKind> {\n let def_id_kinds = [\n (self.fn_trait(), ty::ClosureKind::Fn),\n (self.fn_mut_trait(), ty::ClosureKind::FnMut),\n (self.fn_once_trait(), ty::ClosureKind::FnOnce),\n ];\n\n for &(opt_def_id, kind) in &def_id_kinds {\n if Some(id) == opt_def_id {\n return Some(kind);\n }\n }\n\n None\n }\n\n $(\n #[allow(dead_code)]\n pub fn $method(&self) -> Option<DefId> {\n self.items[$variant as usize]\n }\n )*\n}\n\nstruct LanguageItemCollector<'a, 'tcx: 'a> {\n items: LanguageItems,\n\n hir_map: &'a hir_map::Map<'tcx>,\n\n session: &'a Session,\n\n item_refs: FxHashMap<&'static str, usize>,\n}\n\nimpl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> {\n fn visit_item(&mut self, item: &hir::Item) {\n if let Some(value) = extract(&item.attrs) {\n let item_index = self.item_refs.get(&*value.as_str()).cloned();\n\n if let Some(item_index) = item_index {\n self.collect_item(item_index, self.hir_map.local_def_id(item.id))\n } else {\n let span = self.hir_map.span(item.id);\n span_err!(self.session, span, E0522,\n \"definition of an unknown language item: `{}`.\",\n value);\n }\n }\n }\n\n fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {\n \/\/ at present, lang items are always items, not trait items\n }\n\n fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {\n \/\/ at present, lang items are always items, not impl items\n }\n}\n\nimpl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> {\n pub fn new(session: &'a Session, hir_map: &'a hir_map::Map<'tcx>)\n -> LanguageItemCollector<'a, 'tcx> {\n let mut item_refs = FxHashMap();\n\n $( item_refs.insert($name, $variant as usize); )*\n\n LanguageItemCollector {\n session: session,\n hir_map: hir_map,\n items: LanguageItems::new(),\n item_refs: item_refs,\n }\n }\n\n pub fn collect_item(&mut self, item_index: usize,\n item_def_id: DefId) {\n \/\/ Check for duplicates.\n match self.items.items[item_index] {\n Some(original_def_id) if original_def_id != item_def_id => {\n let cstore = &self.session.cstore;\n let name = LanguageItems::item_name(item_index);\n let mut err = match self.hir_map.span_if_local(item_def_id) {\n Some(span) => struct_span_err!(\n self.session,\n span,\n E0152,\n \"duplicate lang item found: `{}`.\",\n name),\n None => self.session.struct_err(&format!(\n \"duplicate lang item in crate `{}`: `{}`.\",\n cstore.crate_name(item_def_id.krate),\n name)),\n };\n if let Some(span) = self.hir_map.span_if_local(original_def_id) {\n span_note!(&mut err, span,\n \"first defined here.\");\n } else {\n err.note(&format!(\"first defined in crate `{}`.\",\n cstore.crate_name(original_def_id.krate)));\n }\n err.emit();\n }\n _ => {\n \/\/ OK.\n }\n }\n\n \/\/ Matched.\n self.items.items[item_index] = Some(item_def_id);\n }\n\n pub fn collect_local_language_items(&mut self, krate: &hir::Crate) {\n krate.visit_all_item_likes(self);\n }\n\n pub fn collect_external_language_items(&mut self) {\n let cstore = &self.session.cstore;\n\n for cnum in cstore.crates() {\n for (index, item_index) in cstore.lang_items(cnum) {\n let def_id = DefId { krate: cnum, index: index };\n self.collect_item(item_index, def_id);\n }\n }\n }\n\n pub fn collect(&mut self, krate: &hir::Crate) {\n self.collect_external_language_items();\n self.collect_local_language_items(krate);\n }\n}\n\npub fn extract(attrs: &[ast::Attribute]) -> Option<Symbol> {\n for attribute in attrs {\n match attribute.value_str() {\n Some(value) if attribute.check_name(\"lang\") => return Some(value),\n _ => {}\n }\n }\n\n return None;\n}\n\npub fn collect_language_items(session: &Session,\n map: &hir_map::Map)\n -> LanguageItems {\n let krate: &hir::Crate = map.krate();\n let mut collector = LanguageItemCollector::new(session, map);\n collector.collect(krate);\n let LanguageItemCollector { mut items, .. } = collector;\n weak_lang_items::check_crate(krate, session, &mut items);\n items\n}\n\n\/\/ End of the macro\n }\n}\n\nlanguage_item_table! {\n\/\/ Variant name, Name, Method name;\n CharImplItem, \"char\", char_impl;\n StrImplItem, \"str\", str_impl;\n SliceImplItem, \"slice\", slice_impl;\n ConstPtrImplItem, \"const_ptr\", const_ptr_impl;\n MutPtrImplItem, \"mut_ptr\", mut_ptr_impl;\n I8ImplItem, \"i8\", i8_impl;\n I16ImplItem, \"i16\", i16_impl;\n I32ImplItem, \"i32\", i32_impl;\n I64ImplItem, \"i64\", i64_impl;\n I128ImplItem, \"i128\", i128_impl;\n IsizeImplItem, \"isize\", isize_impl;\n U8ImplItem, \"u8\", u8_impl;\n U16ImplItem, \"u16\", u16_impl;\n U32ImplItem, \"u32\", u32_impl;\n U64ImplItem, \"u64\", u64_impl;\n U128ImplItem, \"u128\", u128_impl;\n UsizeImplItem, \"usize\", usize_impl;\n F32ImplItem, \"f32\", f32_impl;\n F64ImplItem, \"f64\", f64_impl;\n\n SendTraitLangItem, \"send\", send_trait;\n SizedTraitLangItem, \"sized\", sized_trait;\n UnsizeTraitLangItem, \"unsize\", unsize_trait;\n CopyTraitLangItem, \"copy\", copy_trait;\n SyncTraitLangItem, \"sync\", sync_trait;\n FreezeTraitLangItem, \"freeze\", freeze_trait;\n\n DropTraitLangItem, \"drop\", drop_trait;\n\n CoerceUnsizedTraitLangItem, \"coerce_unsized\", coerce_unsized_trait;\n\n AddTraitLangItem, \"add\", add_trait;\n SubTraitLangItem, \"sub\", sub_trait;\n MulTraitLangItem, \"mul\", mul_trait;\n DivTraitLangItem, \"div\", div_trait;\n RemTraitLangItem, \"rem\", rem_trait;\n NegTraitLangItem, \"neg\", neg_trait;\n NotTraitLangItem, \"not\", not_trait;\n BitXorTraitLangItem, \"bitxor\", bitxor_trait;\n BitAndTraitLangItem, \"bitand\", bitand_trait;\n BitOrTraitLangItem, \"bitor\", bitor_trait;\n ShlTraitLangItem, \"shl\", shl_trait;\n ShrTraitLangItem, \"shr\", shr_trait;\n AddAssignTraitLangItem, \"add_assign\", add_assign_trait;\n SubAssignTraitLangItem, \"sub_assign\", sub_assign_trait;\n MulAssignTraitLangItem, \"mul_assign\", mul_assign_trait;\n DivAssignTraitLangItem, \"div_assign\", div_assign_trait;\n RemAssignTraitLangItem, \"rem_assign\", rem_assign_trait;\n BitXorAssignTraitLangItem, \"bitxor_assign\", bitxor_assign_trait;\n BitAndAssignTraitLangItem, \"bitand_assign\", bitand_assign_trait;\n BitOrAssignTraitLangItem, \"bitor_assign\", bitor_assign_trait;\n ShlAssignTraitLangItem, \"shl_assign\", shl_assign_trait;\n ShrAssignTraitLangItem, \"shr_assign\", shr_assign_trait;\n IndexTraitLangItem, \"index\", index_trait;\n IndexMutTraitLangItem, \"index_mut\", index_mut_trait;\n\n UnsafeCellTypeLangItem, \"unsafe_cell\", unsafe_cell_type;\n\n DerefTraitLangItem, \"deref\", deref_trait;\n DerefMutTraitLangItem, \"deref_mut\", deref_mut_trait;\n\n FnTraitLangItem, \"fn\", fn_trait;\n FnMutTraitLangItem, \"fn_mut\", fn_mut_trait;\n FnOnceTraitLangItem, \"fn_once\", fn_once_trait;\n\n EqTraitLangItem, \"eq\", eq_trait;\n OrdTraitLangItem, \"ord\", ord_trait;\n\n StrEqFnLangItem, \"str_eq\", str_eq_fn;\n\n \/\/ A number of panic-related lang items. The `panic` item corresponds to\n \/\/ divide-by-zero and various panic cases with `match`. The\n \/\/ `panic_bounds_check` item is for indexing arrays.\n \/\/\n \/\/ The `begin_unwind` lang item has a predefined symbol name and is sort of\n \/\/ a \"weak lang item\" in the sense that a crate is not required to have it\n \/\/ defined to use it, but a final product is required to define it\n \/\/ somewhere. Additionally, there are restrictions on crates that use a weak\n \/\/ lang item, but do not have it defined.\n PanicFnLangItem, \"panic\", panic_fn;\n PanicBoundsCheckFnLangItem, \"panic_bounds_check\", panic_bounds_check_fn;\n PanicFmtLangItem, \"panic_fmt\", panic_fmt;\n\n ExchangeMallocFnLangItem, \"exchange_malloc\", exchange_malloc_fn;\n BoxFreeFnLangItem, \"box_free\", box_free_fn;\n DropInPlaceFnLangItem, \"drop_in_place\", drop_in_place_fn;\n\n StartFnLangItem, \"start\", start_fn;\n\n EhPersonalityLangItem, \"eh_personality\", eh_personality;\n EhUnwindResumeLangItem, \"eh_unwind_resume\", eh_unwind_resume;\n MSVCTryFilterLangItem, \"msvc_try_filter\", msvc_try_filter;\n\n OwnedBoxLangItem, \"owned_box\", owned_box;\n\n PhantomDataItem, \"phantom_data\", phantom_data;\n\n \/\/ Deprecated:\n CovariantTypeItem, \"covariant_type\", covariant_type;\n ContravariantTypeItem, \"contravariant_type\", contravariant_type;\n InvariantTypeItem, \"invariant_type\", invariant_type;\n CovariantLifetimeItem, \"covariant_lifetime\", covariant_lifetime;\n ContravariantLifetimeItem, \"contravariant_lifetime\", contravariant_lifetime;\n InvariantLifetimeItem, \"invariant_lifetime\", invariant_lifetime;\n\n NonZeroItem, \"non_zero\", non_zero;\n\n DebugTraitLangItem, \"debug_trait\", debug_trait;\n}\n\nimpl<'a, 'tcx, 'gcx> ty::TyCtxt<'a, 'tcx, 'gcx> {\n pub fn require_lang_item(&self, lang_item: LangItem) -> DefId {\n self.lang_items.require(lang_item).unwrap_or_else(|msg| {\n self.sess.fatal(&msg)\n })\n }\n}\n<commit_msg>weak_lang_items: check for `lang` attribute before calling `value_str`<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Detecting language items.\n\/\/\n\/\/ Language items are items that represent concepts intrinsic to the language\n\/\/ itself. Examples are:\n\/\/\n\/\/ * Traits that specify \"kinds\"; e.g. \"Sync\", \"Send\".\n\/\/\n\/\/ * Traits that represent operators; e.g. \"Add\", \"Sub\", \"Index\".\n\/\/\n\/\/ * Functions called by the compiler itself.\n\npub use self::LangItem::*;\n\nuse hir::map as hir_map;\nuse session::Session;\nuse hir::def_id::DefId;\nuse ty;\nuse middle::weak_lang_items;\nuse util::nodemap::FxHashMap;\n\nuse syntax::ast;\nuse syntax::symbol::Symbol;\nuse hir::itemlikevisit::ItemLikeVisitor;\nuse hir;\n\n\/\/ The actual lang items defined come at the end of this file in one handy table.\n\/\/ So you probably just want to nip down to the end.\nmacro_rules! language_item_table {\n (\n $( $variant:ident, $name:expr, $method:ident; )*\n ) => {\n\n\nenum_from_u32! {\n #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]\n pub enum LangItem {\n $($variant,)*\n }\n}\n\npub struct LanguageItems {\n pub items: Vec<Option<DefId>>,\n pub missing: Vec<LangItem>,\n}\n\nimpl LanguageItems {\n pub fn new() -> LanguageItems {\n fn foo(_: LangItem) -> Option<DefId> { None }\n\n LanguageItems {\n items: vec![$(foo($variant)),*],\n missing: Vec::new(),\n }\n }\n\n pub fn items(&self) -> &[Option<DefId>] {\n &*self.items\n }\n\n pub fn item_name(index: usize) -> &'static str {\n let item: Option<LangItem> = LangItem::from_u32(index as u32);\n match item {\n $( Some($variant) => $name, )*\n None => \"???\"\n }\n }\n\n pub fn require(&self, it: LangItem) -> Result<DefId, String> {\n match self.items[it as usize] {\n Some(id) => Ok(id),\n None => {\n Err(format!(\"requires `{}` lang_item\",\n LanguageItems::item_name(it as usize)))\n }\n }\n }\n\n pub fn require_owned_box(&self) -> Result<DefId, String> {\n self.require(OwnedBoxLangItem)\n }\n\n pub fn fn_trait_kind(&self, id: DefId) -> Option<ty::ClosureKind> {\n let def_id_kinds = [\n (self.fn_trait(), ty::ClosureKind::Fn),\n (self.fn_mut_trait(), ty::ClosureKind::FnMut),\n (self.fn_once_trait(), ty::ClosureKind::FnOnce),\n ];\n\n for &(opt_def_id, kind) in &def_id_kinds {\n if Some(id) == opt_def_id {\n return Some(kind);\n }\n }\n\n None\n }\n\n $(\n #[allow(dead_code)]\n pub fn $method(&self) -> Option<DefId> {\n self.items[$variant as usize]\n }\n )*\n}\n\nstruct LanguageItemCollector<'a, 'tcx: 'a> {\n items: LanguageItems,\n\n hir_map: &'a hir_map::Map<'tcx>,\n\n session: &'a Session,\n\n item_refs: FxHashMap<&'static str, usize>,\n}\n\nimpl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> {\n fn visit_item(&mut self, item: &hir::Item) {\n if let Some(value) = extract(&item.attrs) {\n let item_index = self.item_refs.get(&*value.as_str()).cloned();\n\n if let Some(item_index) = item_index {\n self.collect_item(item_index, self.hir_map.local_def_id(item.id))\n } else {\n let span = self.hir_map.span(item.id);\n span_err!(self.session, span, E0522,\n \"definition of an unknown language item: `{}`.\",\n value);\n }\n }\n }\n\n fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {\n \/\/ at present, lang items are always items, not trait items\n }\n\n fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {\n \/\/ at present, lang items are always items, not impl items\n }\n}\n\nimpl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> {\n pub fn new(session: &'a Session, hir_map: &'a hir_map::Map<'tcx>)\n -> LanguageItemCollector<'a, 'tcx> {\n let mut item_refs = FxHashMap();\n\n $( item_refs.insert($name, $variant as usize); )*\n\n LanguageItemCollector {\n session: session,\n hir_map: hir_map,\n items: LanguageItems::new(),\n item_refs: item_refs,\n }\n }\n\n pub fn collect_item(&mut self, item_index: usize,\n item_def_id: DefId) {\n \/\/ Check for duplicates.\n match self.items.items[item_index] {\n Some(original_def_id) if original_def_id != item_def_id => {\n let cstore = &self.session.cstore;\n let name = LanguageItems::item_name(item_index);\n let mut err = match self.hir_map.span_if_local(item_def_id) {\n Some(span) => struct_span_err!(\n self.session,\n span,\n E0152,\n \"duplicate lang item found: `{}`.\",\n name),\n None => self.session.struct_err(&format!(\n \"duplicate lang item in crate `{}`: `{}`.\",\n cstore.crate_name(item_def_id.krate),\n name)),\n };\n if let Some(span) = self.hir_map.span_if_local(original_def_id) {\n span_note!(&mut err, span,\n \"first defined here.\");\n } else {\n err.note(&format!(\"first defined in crate `{}`.\",\n cstore.crate_name(original_def_id.krate)));\n }\n err.emit();\n }\n _ => {\n \/\/ OK.\n }\n }\n\n \/\/ Matched.\n self.items.items[item_index] = Some(item_def_id);\n }\n\n pub fn collect_local_language_items(&mut self, krate: &hir::Crate) {\n krate.visit_all_item_likes(self);\n }\n\n pub fn collect_external_language_items(&mut self) {\n let cstore = &self.session.cstore;\n\n for cnum in cstore.crates() {\n for (index, item_index) in cstore.lang_items(cnum) {\n let def_id = DefId { krate: cnum, index: index };\n self.collect_item(item_index, def_id);\n }\n }\n }\n\n pub fn collect(&mut self, krate: &hir::Crate) {\n self.collect_external_language_items();\n self.collect_local_language_items(krate);\n }\n}\n\npub fn extract(attrs: &[ast::Attribute]) -> Option<Symbol> {\n for attribute in attrs {\n if attribute.check_name(\"lang\") {\n if let Some(value) = attribute.value_str() {\n return Some(value)\n }\n }\n }\n\n return None;\n}\n\npub fn collect_language_items(session: &Session,\n map: &hir_map::Map)\n -> LanguageItems {\n let krate: &hir::Crate = map.krate();\n let mut collector = LanguageItemCollector::new(session, map);\n collector.collect(krate);\n let LanguageItemCollector { mut items, .. } = collector;\n weak_lang_items::check_crate(krate, session, &mut items);\n items\n}\n\n\/\/ End of the macro\n }\n}\n\nlanguage_item_table! {\n\/\/ Variant name, Name, Method name;\n CharImplItem, \"char\", char_impl;\n StrImplItem, \"str\", str_impl;\n SliceImplItem, \"slice\", slice_impl;\n ConstPtrImplItem, \"const_ptr\", const_ptr_impl;\n MutPtrImplItem, \"mut_ptr\", mut_ptr_impl;\n I8ImplItem, \"i8\", i8_impl;\n I16ImplItem, \"i16\", i16_impl;\n I32ImplItem, \"i32\", i32_impl;\n I64ImplItem, \"i64\", i64_impl;\n I128ImplItem, \"i128\", i128_impl;\n IsizeImplItem, \"isize\", isize_impl;\n U8ImplItem, \"u8\", u8_impl;\n U16ImplItem, \"u16\", u16_impl;\n U32ImplItem, \"u32\", u32_impl;\n U64ImplItem, \"u64\", u64_impl;\n U128ImplItem, \"u128\", u128_impl;\n UsizeImplItem, \"usize\", usize_impl;\n F32ImplItem, \"f32\", f32_impl;\n F64ImplItem, \"f64\", f64_impl;\n\n SendTraitLangItem, \"send\", send_trait;\n SizedTraitLangItem, \"sized\", sized_trait;\n UnsizeTraitLangItem, \"unsize\", unsize_trait;\n CopyTraitLangItem, \"copy\", copy_trait;\n SyncTraitLangItem, \"sync\", sync_trait;\n FreezeTraitLangItem, \"freeze\", freeze_trait;\n\n DropTraitLangItem, \"drop\", drop_trait;\n\n CoerceUnsizedTraitLangItem, \"coerce_unsized\", coerce_unsized_trait;\n\n AddTraitLangItem, \"add\", add_trait;\n SubTraitLangItem, \"sub\", sub_trait;\n MulTraitLangItem, \"mul\", mul_trait;\n DivTraitLangItem, \"div\", div_trait;\n RemTraitLangItem, \"rem\", rem_trait;\n NegTraitLangItem, \"neg\", neg_trait;\n NotTraitLangItem, \"not\", not_trait;\n BitXorTraitLangItem, \"bitxor\", bitxor_trait;\n BitAndTraitLangItem, \"bitand\", bitand_trait;\n BitOrTraitLangItem, \"bitor\", bitor_trait;\n ShlTraitLangItem, \"shl\", shl_trait;\n ShrTraitLangItem, \"shr\", shr_trait;\n AddAssignTraitLangItem, \"add_assign\", add_assign_trait;\n SubAssignTraitLangItem, \"sub_assign\", sub_assign_trait;\n MulAssignTraitLangItem, \"mul_assign\", mul_assign_trait;\n DivAssignTraitLangItem, \"div_assign\", div_assign_trait;\n RemAssignTraitLangItem, \"rem_assign\", rem_assign_trait;\n BitXorAssignTraitLangItem, \"bitxor_assign\", bitxor_assign_trait;\n BitAndAssignTraitLangItem, \"bitand_assign\", bitand_assign_trait;\n BitOrAssignTraitLangItem, \"bitor_assign\", bitor_assign_trait;\n ShlAssignTraitLangItem, \"shl_assign\", shl_assign_trait;\n ShrAssignTraitLangItem, \"shr_assign\", shr_assign_trait;\n IndexTraitLangItem, \"index\", index_trait;\n IndexMutTraitLangItem, \"index_mut\", index_mut_trait;\n\n UnsafeCellTypeLangItem, \"unsafe_cell\", unsafe_cell_type;\n\n DerefTraitLangItem, \"deref\", deref_trait;\n DerefMutTraitLangItem, \"deref_mut\", deref_mut_trait;\n\n FnTraitLangItem, \"fn\", fn_trait;\n FnMutTraitLangItem, \"fn_mut\", fn_mut_trait;\n FnOnceTraitLangItem, \"fn_once\", fn_once_trait;\n\n EqTraitLangItem, \"eq\", eq_trait;\n OrdTraitLangItem, \"ord\", ord_trait;\n\n StrEqFnLangItem, \"str_eq\", str_eq_fn;\n\n \/\/ A number of panic-related lang items. The `panic` item corresponds to\n \/\/ divide-by-zero and various panic cases with `match`. The\n \/\/ `panic_bounds_check` item is for indexing arrays.\n \/\/\n \/\/ The `begin_unwind` lang item has a predefined symbol name and is sort of\n \/\/ a \"weak lang item\" in the sense that a crate is not required to have it\n \/\/ defined to use it, but a final product is required to define it\n \/\/ somewhere. Additionally, there are restrictions on crates that use a weak\n \/\/ lang item, but do not have it defined.\n PanicFnLangItem, \"panic\", panic_fn;\n PanicBoundsCheckFnLangItem, \"panic_bounds_check\", panic_bounds_check_fn;\n PanicFmtLangItem, \"panic_fmt\", panic_fmt;\n\n ExchangeMallocFnLangItem, \"exchange_malloc\", exchange_malloc_fn;\n BoxFreeFnLangItem, \"box_free\", box_free_fn;\n DropInPlaceFnLangItem, \"drop_in_place\", drop_in_place_fn;\n\n StartFnLangItem, \"start\", start_fn;\n\n EhPersonalityLangItem, \"eh_personality\", eh_personality;\n EhUnwindResumeLangItem, \"eh_unwind_resume\", eh_unwind_resume;\n MSVCTryFilterLangItem, \"msvc_try_filter\", msvc_try_filter;\n\n OwnedBoxLangItem, \"owned_box\", owned_box;\n\n PhantomDataItem, \"phantom_data\", phantom_data;\n\n \/\/ Deprecated:\n CovariantTypeItem, \"covariant_type\", covariant_type;\n ContravariantTypeItem, \"contravariant_type\", contravariant_type;\n InvariantTypeItem, \"invariant_type\", invariant_type;\n CovariantLifetimeItem, \"covariant_lifetime\", covariant_lifetime;\n ContravariantLifetimeItem, \"contravariant_lifetime\", contravariant_lifetime;\n InvariantLifetimeItem, \"invariant_lifetime\", invariant_lifetime;\n\n NonZeroItem, \"non_zero\", non_zero;\n\n DebugTraitLangItem, \"debug_trait\", debug_trait;\n}\n\nimpl<'a, 'tcx, 'gcx> ty::TyCtxt<'a, 'tcx, 'gcx> {\n pub fn require_lang_item(&self, lang_item: LangItem) -> DefId {\n self.lang_items.require(lang_item).unwrap_or_else(|msg| {\n self.sess.fatal(&msg)\n })\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Silence warnings in the fixed time equality routines.<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub trait PublicTrait<T> {}\n\n\/\/ @has issue_46380_2\/struct.Public.html\npub struct PublicStruct;\n\n\/\/ @!has - '\/\/*[@class=\"impl\"]' 'impl Add<Private> for Public'\nimpl PublicTrait<PrivateStruct> for PublicStruct {}\n\nstruct PrivateStruct;\n<commit_msg>Fix htmldocck naming<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\npub trait PublicTrait<T> {}\n\n\/\/ @has issue_46380_2\/struct.PublicStruct.html\npub struct PublicStruct;\n\n\/\/ @!has - '\/\/*[@class=\"impl\"]' 'impl PublicTrait<PrivateStruct> for PublicStruct'\nimpl PublicTrait<PrivateStruct> for PublicStruct {}\n\nstruct PrivateStruct;\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for missing default impl<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(optin_builtin_traits)]\n\nstruct TestType;\n\ntrait TestTrait {}\n\nimpl !TestTrait for TestType {}\n\/\/~^ the trait `TestTrait` is not implemented for the type `TestType`\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add query \"shell\"<commit_after>use std::io::BufReader;\nuse std::path::PathBuf;\n\nuse futures::Stream as _;\nuse log::error;\nuse structopt::StructOpt;\nuse tokio;\n\nuse gerritbot_gerrit as gerrit;\n\n#[derive(StructOpt, Debug)]\nstruct Args {\n \/\/\/ Gerrit username\n #[structopt(short = \"u\")]\n username: String,\n \/\/\/ Gerrit hostname\n hostname: String,\n \/\/\/ Gerrit SSH port\n #[structopt(short = \"p\", default_value = \"29418\")]\n port: u32,\n \/\/\/ Path to SSH private key\n #[structopt(short = \"i\", parse(from_os_str))]\n private_key_path: PathBuf,\n \/\/\/ Enable verbose output\n #[structopt(short = \"v\")]\n verbose: bool,\n}\n\nfn main() {\n let args = Args::from_args();\n stderrlog::new()\n .module(module_path!())\n .timestamp(stderrlog::Timestamp::Second)\n .verbosity(if args.verbose { 5 } else { 2 })\n .init()\n .unwrap();\n\n let connection = gerrit::GerritConnection::connect(\n format!(\"{}:{}\", args.hostname, args.port),\n args.username,\n args.private_key_path,\n )\n .unwrap_or_else(|e| {\n error!(\"connection to gerrit failed: {}\", e);\n std::process::exit(1);\n });\n\n let mut command_runner = gerrit::CommandRunner::new(connection).unwrap_or_else(|e| {\n error!(\"failed to create command runner: {}\", e);\n std::process::exit(1);\n });\n\n let stdin_lines = tokio::io::lines(BufReader::new(tokio::io::stdin()));\n\n tokio::run(\n stdin_lines\n .map_err(|e| format!(\"failed to read line: {}\", e))\n .and_then(move |line| command_runner.run_command(format!(\"gerrit query {}\", line)))\n .map_err(|e| error!(\"error: {}\", e))\n .for_each(|output| {\n println!(\"{}\", output);\n Ok(())\n }),\n );\n\n \/*\n tokio::run(\n command_runner\n .run_command(format!(\"gerrit query {}\", args.query))\n .map_err(|e| error!(\"error running query: {}\", e))\n .map(|output| println!(\"{}\", output)),\n );\n *\/\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[link(name = \"FooCoreServices\", kind = \"framework\")] extern {} \/\/~ ERROR E0455\n\nfn main() {\n}\n<commit_msg>Remove E0455 test (for now)<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>day 6<commit_after>extern crate timely;\nextern crate differential_dataflow;\n\n\/\/ taken from: https:\/\/adventofcode.com\/2017\/day\/6\n\nuse differential_dataflow::input::Input;\nuse differential_dataflow::operators::*;\n\nfn main() {\n\n let input = \"10 3 15 10 5 15 5 15 9 2 5 8 5 2 3 6\";\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n let worker_input = \n input\n .split_whitespace()\n .map(|phrase| phrase.parse::<u8>().unwrap())\n .collect::<Vec<_>>();\n\n worker.dataflow::<(),_,_>(|scope| {\n\n let banks = scope.new_collection_from(Some(worker_input)).1;\n\n let stable = banks.iterate(|iter|\n iter.map_in_place(|banks| recycle(banks))\n .concat(&banks.enter(&iter.scope()))\n .distinct()\n );\n\n stable\n .map(|_| ((),()))\n .count()\n .inspect(|x| println!(\"part 1: {:?}\", (x.0).1));\n\n \/\/ determine the repeated state by stepping all states and subtracting.\n let loop_point = stable\n .map_in_place(|banks| recycle(banks))\n .concat(&stable.negate())\n .concat(&banks);\n\n \/\/ restart iteration from known repeated element.\n loop_point \n .iterate(|iter|\n iter.map_in_place(|banks| recycle(banks))\n .concat(&loop_point.enter(&iter.scope()))\n .distinct()\n )\n .map(|_| ((),()))\n .count()\n .inspect(|x| println!(\"part 2: {:?}\", (x.0).1));\n });\n\n }).unwrap();\n}\n\nfn recycle(banks: &mut [u8]) {\n let mut max_idx = 0;\n for i in 0 .. banks.len() {\n if banks[i] > banks[max_idx] {\n max_idx = i;\n }\n }\n\n let redistribute = banks[max_idx] as usize;\n banks[max_idx] = 0;\n let banks_len = banks.len();\n for i in 1 .. (redistribute + 1) {\n banks[(max_idx + i) % banks_len] += 1;\n } \n}<|endoftext|>"} {"text":"<commit_before><commit_msg>added end2end tests for CosmosDB documents<commit_after>#![cfg(all(test, feature = \"test_e2e\"))]\n#[macro_use]\nextern crate serde_derive;\n\nuse azure_sdk_cosmos::collection::*;\n\nmod setup;\n\nconst DATABASE_NAME: &str = \"test-cosmos-db\";\nconst COLLECTION_NAME: &str = \"test-collection\";\nconst DOCUMENT_NAME: &str = \"test-document-name\";\n\n#[derive(Serialize, Deserialize, Debug, PartialEq)]\nstruct Document {\n id: String, \/\/ required field\n hello: u32,\n}\n\n#[test]\nfn create_and_delete_document() {\n let (client, mut core) = setup::initialize().unwrap();\n\n core.run(client.create_database(DATABASE_NAME)).unwrap();\n\n \/\/ create a new collection\n let collection_to_create = Collection::new(\n COLLECTION_NAME,\n IndexingPolicy {\n automatic: true,\n indexing_mode: IndexingMode::Consistent,\n included_paths: vec![],\n excluded_paths: vec![],\n },\n );\n core.run(client.create_collection(DATABASE_NAME, 400, &collection_to_create))\n .unwrap();\n\n \/\/ create a new document\n let document_data = Document {\n id: DOCUMENT_NAME.to_string(),\n hello: 42,\n };\n core.run(client.create_document(DATABASE_NAME, COLLECTION_NAME, &document_data).execute())\n .unwrap();\n let documents = core\n .run(client.list_documents(DATABASE_NAME, COLLECTION_NAME).execute::<Document>())\n .unwrap()\n .documents;\n assert!(documents.len() == 1);\n\n \/\/ try to get the contents of the previously created document\n let document_request = client\n .get_document(DATABASE_NAME, COLLECTION_NAME, DOCUMENT_NAME)\n .execute::<Document>();\n let document_after_get = core.run(document_request).unwrap().document.expect(\"No document found!\");\n assert_eq!(document_after_get.entity, document_data);\n\n \/\/ delete document\n core.run(client.delete_document(DATABASE_NAME, COLLECTION_NAME, DOCUMENT_NAME).execute())\n .unwrap();\n let documents = core\n .run(client.list_documents(DATABASE_NAME, COLLECTION_NAME).execute::<Document>())\n .unwrap()\n .documents;\n assert!(documents.len() == 0);\n\n core.run(client.delete_database(DATABASE_NAME)).unwrap();\n}\n\n#[test]\n#[ignore]\nfn replace_document() {}\n\n#[test]\n#[ignore]\nfn query_documents() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unused reference symbol<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>cargo run protoc-bin-vendored works now<commit_after>use protoc_bin_vendored::protoc_bin_path;\nuse std::env;\nuse std::process;\n\nfn main() {\n let protoc_bin_path = match protoc_bin_path() {\n Ok(path) => path,\n Err(e) => {\n eprintln!(\"protoc binary not found: {}\", e);\n process::exit(11);\n }\n };\n let mut command = match process::Command::new(protoc_bin_path).args(env::args()).spawn() {\n Ok(command) => command,\n Err(e) => {\n eprintln!(\"failed to spawn protoc: {}\", e);\n process::exit(12);\n }\n };\n let exit_status = command.wait().unwrap();\n process::exit(exit_status.code().unwrap_or(13));\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application<'static> = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ A command\npub struct Command<'a> {\n pub name: &'a str,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl<'a> Command<'a> {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n\n commands.push(Command {\n name: \"cat\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read: {}\", path),\n }\n } else {\n println!(\"Failed to open file: {}\", path);\n }\n },\n });\n\n commands.push(Command {\n name: \"cd\",\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(path) => {\n if !change_cwd(&path) {\n println!(\"Bad path: {}\", path);\n }\n }\n None => println!(\"No path given\")\n }\n },\n });\n\n commands.push(Command {\n name: \"echo\",\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"else\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"exec\",\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"exit\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"fi\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"if\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"ls\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(dir) = read_dir(&path) {\n for entry in dir {\n println!(\"{}\", entry.path());\n }\n } else {\n println!(\"Failed to open directory: {}\", path);\n }\n }\n });\n\n commands.push(Command {\n name: \"pwd\",\n main: box |_: &Vec<String>| {\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n println!(\"{}\", path);\n } else {\n println!(\"Could not get the path\");\n }\n } else {\n println!(\"Could not open the working directory\");\n }\n },\n });\n\n commands.push(Command {\n name: \"read\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"run\",\n main: box |args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"sleep\",\n main: box |args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n },\n });\n\n commands.push(Command {\n name: \"send\",\n main: box |args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n \/\/ Simple command to create a file, in the current directory\n \/\/ The file has got the name given as the first argument of the command\n \/\/ If the command have no arguments, the command don't create the file\n commands.push(Command {\n name: \"touch\",\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(file_name) => if File::create(file_name).is_none() {\n println!(\"Failed to create: {}\", file_name);\n },\n None => println!(\"No name provided\")\n }\n }\n });\n\n commands.push(Command {\n name: \"url_hex\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\",\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + c.name);\n\n commands.push(Command {\n name: \"help\",\n main: box move |_: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application<'a> {\n commands: Vec<Command<'a>>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl<'a> Application<'a> {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &str) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if command_string == \"$\" {\n for variable in self.variables.iter() {\n println!(\"{}={}\", variable.name, variable.value);\n }\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n } else {\n println!(\"No right hand side\");\n }\n } else {\n println!(\"No comparison operator\");\n }\n } else {\n println!(\"No left hand side\");\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n if cmd == \"read\" {\n for i in 1..args.len() {\n if let Some(arg_original) = args.get(i) {\n let arg = arg_original.trim();\n print!(\"{}=\", arg);\n if let Some(value_original) = readln!() {\n let value = value_original.trim();\n self.set_var(arg, value);\n }\n }\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].trim();\n let mut value = cmd[i + 1 .. cmd.len()].trim().to_string();\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n self.set_var(name, &value);\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n\n pub fn set_var(&mut self, name: &str, value: &str){\n if name.is_empty() {\n return;\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value.to_string();\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name.to_string(),\n value: value.to_string(),\n });\n }\n }\n\n \/\/\/ Method to return the current directory\n \/\/\/ If the current directory canno't be find, a default string (\"?\") will be returned\n pub fn get_current_directory(&mut self) -> String {\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n \/\/ Return the current path\n return path\n }\n \/\/ Return a default string if the path canno't be find\n else {\n return \"?\".to_string()\n }\n }\n else {\n return \"?\".to_string()\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"user@redox:{}# {}\", self.get_current_directory(), command);\n self.on_command(&command);\n }\n\n loop {\n for mode in self.modes.iter().rev() {\n if mode.value {\n print!(\"+ \");\n } else {\n print!(\"- \");\n }\n }\n print!(\"user@redox:{}# \", self.get_current_directory());\n if let Some(command_original) = readln!() {\n let command = command_original.trim();\n if command == \"exit\" {\n println!(\"Exit temporarily blocked (due to using terminal as init)\")\n \/\/break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n } else {\n println!(\"Failed to read from stdin\");\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<commit_msg>Rust documentation for Command<commit_after>use redox::ops::DerefMut;\nuse redox::string::*;\nuse redox::vec::Vec;\nuse redox::boxed::Box;\nuse redox::fs::*;\nuse redox::io::*;\nuse redox::env::*;\nuse redox::time::Duration;\nuse redox::to_num::*;\n\n\/* Magic Macros { *\/\nstatic mut application: *mut Application<'static> = 0 as *mut Application;\n\n\/\/\/ Execute a command\nmacro_rules! exec {\n ($cmd:expr) => ({\n unsafe {\n (*application).on_command(&$cmd.to_string());\n }\n })\n}\n\/* } Magic Macros *\/\n\n\/\/\/ Structure which represents a Terminal's command.\n\/\/\/ This command structure contains a name, and the code which run the functionnality associated to this one, with zero, one or several argument(s).\n\/\/\/ # Example\n\/\/\/ ```\n\/\/\/ let my_command = Command {\n\/\/\/ name: \"my_command\",\n\/\/\/ main: box|args: &Vec<String>| {\n\/\/\/ println!(\"Say 'hello' to my command! :-D\");\n\/\/\/ }\n\/\/\/ }\n\/\/\/ ```\npub struct Command<'a> {\n pub name: &'a str,\n pub main: Box<Fn(&Vec<String>)>,\n}\n\nimpl<'a> Command<'a> {\n \/\/\/ Return the vector of the commands\n \/\/ TODO: Use a more efficient collection instead\n pub fn vec() -> Vec<Self> {\n let mut commands: Vec<Self> = Vec::new();\n\n commands.push(Command {\n name: \"cat\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read: {}\", path),\n }\n } else {\n println!(\"Failed to open file: {}\", path);\n }\n },\n });\n\n commands.push(Command {\n name: \"cd\",\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(path) => {\n if !change_cwd(&path) {\n println!(\"Bad path: {}\", path);\n }\n }\n None => println!(\"No path given\")\n }\n },\n });\n\n commands.push(Command {\n name: \"echo\",\n main: box |args: &Vec<String>| {\n let echo = args.iter()\n .skip(1)\n .fold(String::new(), |string, arg| string + \" \" + arg);\n println!(\"{}\", echo.trim());\n },\n });\n\n commands.push(Command {\n name: \"else\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"exec\",\n main: box |args: &Vec<String>| {\n if let Some(arg) = args.get(1) {\n File::exec(arg);\n }\n },\n });\n\n commands.push(Command {\n name: \"exit\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"fi\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"if\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"ls\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(dir) = read_dir(&path) {\n for entry in dir {\n println!(\"{}\", entry.path());\n }\n } else {\n println!(\"Failed to open directory: {}\", path);\n }\n }\n });\n\n commands.push(Command {\n name: \"pwd\",\n main: box |_: &Vec<String>| {\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n println!(\"{}\", path);\n } else {\n println!(\"Could not get the path\");\n }\n } else {\n println!(\"Could not open the working directory\");\n }\n },\n });\n\n commands.push(Command {\n name: \"read\",\n main: box |_: &Vec<String>| {},\n });\n\n commands.push(Command {\n name: \"run\",\n main: box |args: &Vec<String>| {\n if let Some(path) = args.get(1) {\n\n let mut commands = String::new();\n if let Some(mut file) = File::open(path) {\n file.read_to_string(&mut commands);\n }\n\n for command in commands.split('\\n') {\n exec!(command);\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"sleep\",\n main: box |args: &Vec<String>| {\n let secs = {\n match args.get(1) {\n Some(arg) => arg.to_num() as i64,\n None => 0,\n }\n };\n\n let nanos = {\n match args.get(2) {\n Some(arg) => arg.to_num() as i32,\n None => 0,\n }\n };\n\n println!(\"Sleep: {} {}\", secs, nanos);\n let remaining = Duration::new(secs, nanos).sleep();\n println!(\"Remaining: {} {}\", remaining.secs, remaining.nanos);\n },\n });\n\n commands.push(Command {\n name: \"send\",\n main: box |args: &Vec<String>| {\n if args.len() < 3 {\n println!(\"Error: incorrect arguments\");\n println!(\"Usage: send [url] [data]\");\n return;\n }\n\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n println!(\"URL: {:?}\", file.path());\n\n let string: String = args.iter()\n .skip(2)\n .fold(String::new(), |s, arg| s + \" \" + arg)\n + \"\\r\\n\\r\\n\";\n\n match file.write(string.trim_left().as_bytes()) {\n Some(size) => println!(\"Wrote {} bytes\", size),\n None => println!(\"Failed to write\"),\n }\n\n let mut string = String::new();\n match file.read_to_string(&mut string) {\n Some(_) => println!(\"{}\", string),\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n \/\/ Simple command to create a file, in the current directory\n \/\/ The file has got the name given as the first argument of the command\n \/\/ If the command have no arguments, the command don't create the file\n commands.push(Command {\n name: \"touch\",\n main: box |args: &Vec<String>| {\n match args.get(1) {\n Some(file_name) => if File::create(file_name).is_none() {\n println!(\"Failed to create: {}\", file_name);\n },\n None => println!(\"No name provided\")\n }\n }\n });\n\n commands.push(Command {\n name: \"url_hex\",\n main: box |args: &Vec<String>| {\n let path = {\n match args.get(1) {\n Some(arg) => arg.clone(),\n None => String::new(),\n }\n };\n\n if let Some(mut file) = File::open(&path) {\n let mut vec: Vec<u8> = Vec::new();\n match file.read_to_end(&mut vec) {\n Some(_) => {\n let mut line = \"HEX:\".to_string();\n for byte in vec.iter() {\n line = line + \" \" + &format!(\"{:X}\", *byte);\n }\n println!(\"{}\", line);\n }\n None => println!(\"Failed to read\"),\n }\n }\n },\n });\n\n commands.push(Command {\n name: \"wget\",\n main: box |args: &Vec<String>| {\n if let Some(host) = args.get(1) {\n if let Some(req) = args.get(2) {\n if let Some(mut con) = File::open(&(\"tcp:\/\/\".to_string() + host)) {\n con.write((\"GET \".to_string() + req + \" HTTP\/1.1\").as_bytes());\n\n let mut res = Vec::new();\n con.read_to_end(&mut res);\n\n if let Some(mut file) = File::open(&req) {\n file.write(&res);\n }\n }\n } else {\n println!(\"No request given\");\n }\n } else {\n println!(\"No url given\");\n }\n },\n });\n\n let command_list = commands.iter().fold(String::new(), |l , c| l + \" \" + c.name);\n\n commands.push(Command {\n name: \"help\",\n main: box move |_: &Vec<String>| {\n println!(\"Commands:{}\", command_list);\n },\n });\n\n commands\n }\n}\n\n\/\/\/ A (env) variable\npub struct Variable {\n pub name: String,\n pub value: String,\n}\n\npub struct Mode {\n value: bool,\n}\n\n\/\/\/ An application\npub struct Application<'a> {\n commands: Vec<Command<'a>>,\n variables: Vec<Variable>,\n modes: Vec<Mode>,\n}\n\nimpl<'a> Application<'a> {\n \/\/\/ Create a new empty application\n pub fn new() -> Self {\n return Application {\n commands: Command::vec(),\n variables: Vec::new(),\n modes: Vec::new(),\n };\n }\n\n fn on_command(&mut self, command_string: &str) {\n \/\/Comment\n if command_string.starts_with('#') {\n return;\n }\n\n \/\/Show variables\n if command_string == \"$\" {\n for variable in self.variables.iter() {\n println!(\"{}={}\", variable.name, variable.value);\n }\n return;\n }\n\n \/\/Explode into arguments, replace variables\n let mut args: Vec<String> = Vec::<String>::new();\n for arg in command_string.split(' ') {\n if !arg.is_empty() {\n if arg.starts_with('$') {\n let name = arg[1 .. arg.len()].to_string();\n for variable in self.variables.iter() {\n if variable.name == name {\n args.push(variable.value.clone());\n break;\n }\n }\n } else {\n args.push(arg.to_string());\n }\n }\n }\n\n \/\/Execute commands\n if let Some(cmd) = args.get(0) {\n if cmd == \"if\" {\n let mut value = false;\n\n if let Some(left) = args.get(1) {\n if let Some(cmp) = args.get(2) {\n if let Some(right) = args.get(3) {\n if cmp == \"==\" {\n value = *left == *right;\n } else if cmp == \"!=\" {\n value = *left != *right;\n } else if cmp == \">\" {\n value = left.to_num_signed() > right.to_num_signed();\n } else if cmp == \">=\" {\n value = left.to_num_signed() >= right.to_num_signed();\n } else if cmp == \"<\" {\n value = left.to_num_signed() < right.to_num_signed();\n } else if cmp == \"<=\" {\n value = left.to_num_signed() <= right.to_num_signed();\n } else {\n println!(\"Unknown comparison: {}\", cmp);\n }\n } else {\n println!(\"No right hand side\");\n }\n } else {\n println!(\"No comparison operator\");\n }\n } else {\n println!(\"No left hand side\");\n }\n\n self.modes.insert(0, Mode { value: value });\n return;\n }\n\n if cmd == \"else\" {\n let mut syntax_error = false;\n match self.modes.get_mut(0) {\n Some(mode) => mode.value = !mode.value,\n None => syntax_error = true,\n }\n if syntax_error {\n println!(\"Syntax error: else found with no previous if\");\n }\n return;\n }\n\n if cmd == \"fi\" {\n let mut syntax_error = false;\n if !self.modes.is_empty() {\n self.modes.remove(0);\n } else {\n syntax_error = true;\n }\n if syntax_error {\n println!(\"Syntax error: fi found with no previous if\");\n }\n return;\n }\n\n for mode in self.modes.iter() {\n if !mode.value {\n return;\n }\n }\n\n if cmd == \"read\" {\n for i in 1..args.len() {\n if let Some(arg_original) = args.get(i) {\n let arg = arg_original.trim();\n print!(\"{}=\", arg);\n if let Some(value_original) = readln!() {\n let value = value_original.trim();\n self.set_var(arg, value);\n }\n }\n }\n }\n\n \/\/Set variables\n if let Some(i) = cmd.find('=') {\n let name = cmd[0 .. i].trim();\n let mut value = cmd[i + 1 .. cmd.len()].trim().to_string();\n\n for i in 1..args.len() {\n if let Some(arg) = args.get(i) {\n value = value + \" \" + &arg;\n }\n }\n\n self.set_var(name, &value);\n return;\n }\n\n \/\/Commands\n for command in self.commands.iter() {\n if &command.name == cmd {\n (*command.main)(&args);\n return;\n }\n }\n\n println!(\"Unknown command: '{}'\", cmd);\n }\n }\n\n\n pub fn set_var(&mut self, name: &str, value: &str){\n if name.is_empty() {\n return;\n }\n\n if value.is_empty() {\n let mut remove = -1;\n for i in 0..self.variables.len() {\n match self.variables.get(i) {\n Some(variable) => if variable.name == name {\n remove = i as isize;\n break;\n },\n None => break,\n }\n }\n\n if remove >= 0 {\n self.variables.remove(remove as usize);\n }\n } else {\n for variable in self.variables.iter_mut() {\n if variable.name == name {\n variable.value = value.to_string();\n return;\n }\n }\n\n self.variables.push(Variable {\n name: name.to_string(),\n value: value.to_string(),\n });\n }\n }\n\n \/\/\/ Method to return the current directory\n \/\/\/ If the current directory canno't be find, a default string (\"?\") will be returned\n pub fn get_current_directory(&mut self) -> String {\n if let Some(file) = File::open(\"\") {\n if let Some(path) = file.path() {\n \/\/ Return the current path\n return path\n }\n \/\/ Return a default string if the path canno't be find\n else {\n return \"?\".to_string()\n }\n }\n else {\n return \"?\".to_string()\n }\n }\n\n \/\/\/ Run the application\n pub fn main(&mut self) {\n println!(\"Type help for a command list\");\n if let Some(arg) = args().get(1) {\n let command = \"run \".to_string() + arg;\n println!(\"user@redox:{}# {}\", self.get_current_directory(), command);\n self.on_command(&command);\n }\n\n loop {\n for mode in self.modes.iter().rev() {\n if mode.value {\n print!(\"+ \");\n } else {\n print!(\"- \");\n }\n }\n print!(\"user@redox:{}# \", self.get_current_directory());\n if let Some(command_original) = readln!() {\n let command = command_original.trim();\n if command == \"exit\" {\n println!(\"Exit temporarily blocked (due to using terminal as init)\")\n \/\/break;\n } else if !command.is_empty() {\n self.on_command(&command);\n }\n } else {\n println!(\"Failed to read from stdin\");\n }\n }\n }\n}\n\npub fn main() {\n unsafe {\n let mut app = box Application::new();\n application = app.deref_mut();\n app.main();\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2013 The Servo Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern mod std;\nextern mod extra;\n\nuse extra::test::{TestOpts, run_tests_console, TestDesc, TestDescAndFn, DynTestFn, DynTestName};\nuse extra::getopts::{getopts, reqopt};\nuse std::{os, run, str};\nuse std::cell::Cell;\nuse std::os::list_dir_path;\n\n#[deriving(Clone)]\nstruct Config {\n source_dir: ~str,\n filter: Option<~str>\n}\n\nfn main() {\n let args = os::args();\n let config = parse_config(args);\n let opts = test_options(config.clone());\n let tests = find_tests(config);\n if !run_tests_console(&opts, tests) {\n os::set_exit_status(1);\n }\n}\n\nfn parse_config(args: ~[~str]) -> Config {\n let args = args.tail();\n let opts = ~[reqopt(\"source-dir\")];\n let matches = match getopts(args, opts) {\n Ok(m) => m,\n Err(f) => fail!(f.to_err_msg())\n };\n\n Config {\n source_dir: matches.opt_str(\"source-dir\").unwrap(),\n filter: if matches.free.is_empty() {\n None\n } else {\n Some((*matches.free.head()).clone())\n }\n }\n}\n\nfn test_options(config: Config) -> TestOpts {\n TestOpts {\n filter: config.filter,\n run_ignored: false,\n run_tests: true,\n run_benchmarks: false,\n ratchet_metrics: None,\n ratchet_noise_percent: None,\n save_metrics: None,\n test_shard: None,\n logfile: None\n }\n}\n\nfn find_tests(config: Config) -> ~[TestDescAndFn] {\n let mut files = list_dir_path(&Path::new(config.source_dir));\n \/\/ FIXME (#1094): not the right way to transform a path\n files.retain( |file| file.display().to_str().ends_with(\".html\") );\n return files.map(|file| make_test(file.display().to_str()) );\n}\n\nfn make_test(file: ~str) -> TestDescAndFn {\n let f = Cell::new(file.clone());\n TestDescAndFn {\n desc: TestDesc {\n name: DynTestName(file),\n ignore: false,\n should_fail: false\n },\n testfn: DynTestFn(|| { run_test(f.take()) })\n }\n}\n\nfn run_test(file: ~str) {\n let path = os::make_absolute(&Path::new(file));\n \/\/ FIXME (#1094): not the right way to transform a path\n let infile = ~\"file:\/\/\" + path.display().to_str();\n let res = run::process_output(\".\/servo\", [~\"-z\", infile]);\n let out = str::from_utf8(res.output);\n print(out);\n let lines: ~[&str] = out.split_iter('\\n').collect();\n for &line in lines.iter() {\n if line.contains(\"TEST-UNEXPECTED-FAIL\") {\n fail!(line);\n }\n }\n}\n<commit_msg>auto merge of #1176 : jdm\/servo\/harnessoutput, r=metajack<commit_after>\/\/ Copyright 2013 The Servo Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern mod std;\nextern mod extra;\n\nuse extra::test::{TestOpts, run_tests_console, TestDesc, TestDescAndFn, DynTestFn, DynTestName};\nuse extra::getopts::{getopts, reqopt};\nuse std::{os, str};\nuse std::cell::Cell;\nuse std::os::list_dir_path;\nuse std::rt::io::Reader;\nuse std::rt::io::process::{Process, ProcessConfig, Ignored, CreatePipe};\n\n#[deriving(Clone)]\nstruct Config {\n source_dir: ~str,\n filter: Option<~str>\n}\n\nfn main() {\n let args = os::args();\n let config = parse_config(args);\n let opts = test_options(config.clone());\n let tests = find_tests(config);\n if !run_tests_console(&opts, tests) {\n os::set_exit_status(1);\n }\n}\n\nfn parse_config(args: ~[~str]) -> Config {\n let args = args.tail();\n let opts = ~[reqopt(\"source-dir\")];\n let matches = match getopts(args, opts) {\n Ok(m) => m,\n Err(f) => fail!(f.to_err_msg())\n };\n\n Config {\n source_dir: matches.opt_str(\"source-dir\").unwrap(),\n filter: if matches.free.is_empty() {\n None\n } else {\n Some((*matches.free.head()).clone())\n }\n }\n}\n\nfn test_options(config: Config) -> TestOpts {\n TestOpts {\n filter: config.filter,\n run_ignored: false,\n run_tests: true,\n run_benchmarks: false,\n ratchet_metrics: None,\n ratchet_noise_percent: None,\n save_metrics: None,\n test_shard: None,\n logfile: None\n }\n}\n\nfn find_tests(config: Config) -> ~[TestDescAndFn] {\n let mut files = list_dir_path(&Path::new(config.source_dir));\n \/\/ FIXME (#1094): not the right way to transform a path\n files.retain( |file| file.display().to_str().ends_with(\".html\") );\n return files.map(|file| make_test(file.display().to_str()) );\n}\n\nfn make_test(file: ~str) -> TestDescAndFn {\n let f = Cell::new(file.clone());\n TestDescAndFn {\n desc: TestDesc {\n name: DynTestName(file),\n ignore: false,\n should_fail: false\n },\n testfn: DynTestFn(|| { run_test(f.take()) })\n }\n}\n\nfn run_test(file: ~str) {\n let path = os::make_absolute(&Path::new(file));\n \/\/ FIXME (#1094): not the right way to transform a path\n let infile = ~\"file:\/\/\" + path.display().to_str();\n let create_pipe = CreatePipe(true, false); \/\/ rustc #10228\n\n let config = ProcessConfig {\n program: \".\/servo\",\n args: [~\"-z\", infile.clone()],\n env: None,\n cwd: None,\n io: [Ignored, create_pipe, Ignored]\n };\n\n let mut prc = Process::new(config).unwrap();\n let stdout = prc.io[1].get_mut_ref();\n let mut output = ~[];\n loop {\n let byte = stdout.read_byte();\n match byte {\n Some(byte) => {\n print!(\"{}\", byte as char);\n output.push(byte);\n }\n None => break\n }\n }\n\n let out = str::from_utf8(output);\n let lines: ~[&str] = out.split_iter('\\n').collect();\n for &line in lines.iter() {\n if line.contains(\"TEST-UNEXPECTED-FAIL\") {\n fail!(line);\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Time log batches.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add ui test<commit_after>\/\/ check-pass\n\n\/\/ This was an ICE, because the compiler ensures the\n\/\/ function to be const when performing const checking,\n\/\/ but functions marked with the attribute are not const\n\/\/ *and* subject to const checking.\n\n#![feature(staged_api)]\n#![feature(const_trait_impl)]\n#![feature(const_fn_trait_bound)]\n#![stable(since = \"1\", feature = \"foo\")]\n\ntrait Tr {\n #[default_method_body_is_const]\n fn a() {}\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A Doug Lea-style concurrent hash map using striped locks.\n\nuse rand;\nuse rand::Rng;\nuse std::cast;\nuse std::hash::{Hash, sip};\nuse std::ptr;\nuse std::sync::atomics::{AtomicUint, Relaxed, SeqCst};\nuse std::unstable::mutex::StaticNativeMutex;\nuse std::mem;\nuse std::slice;\n\n\/\/\/ When the size exceeds (number of buckets * LOAD_NUMERATOR\/LOAD_DENOMINATOR), the hash table\n\/\/\/ grows.\nstatic LOAD_NUMERATOR: uint = 3;\n\n\/\/\/ When the size exceeds (number of buckets * LOAD_NUMERATOR\/LOAD_DENOMINATOR), the hash table\n\/\/\/ grows.\nstatic LOAD_DENOMINATOR: uint = 4;\n\n\/\/\/ One bucket in the hash table.\nstruct Bucket<K,V> {\n next: Option<~Bucket<K,V>>,\n key: K,\n value: V,\n}\n\n\/\/\/ A concurrent hash map using striped locks.\npub struct ConcurrentHashMap<K,V> {\n \/\/\/ The first secret key value.\n k0: u64,\n \/\/\/ The second key value.\n k1: u64,\n \/\/\/ The number of elements in this hash table.\n size: AtomicUint,\n \/\/\/ The striped locks.\n locks: ~[StaticNativeMutex],\n \/\/\/ The buckets.\n buckets: ~[Option<Bucket<K,V>>],\n}\n\nimpl<K:Hash + Eq,V> ConcurrentHashMap<K,V> {\n \/\/\/ Creates a hash map with 16 locks and 4 buckets per lock.\n pub fn new() -> ConcurrentHashMap<K,V> {\n ConcurrentHashMap::with_locks_and_buckets(16, 4)\n }\n\n \/\/\/ Creates a hash map with the given number of locks and buckets per lock.\n pub fn with_locks_and_buckets(lock_count: uint, buckets_per_lock: uint)\n -> ConcurrentHashMap<K,V> {\n let mut rand = rand::task_rng();\n ConcurrentHashMap {\n k0: rand.gen(),\n k1: rand.gen(),\n size: AtomicUint::new(0),\n locks: slice::from_fn(lock_count, |_| {\n unsafe {\n StaticNativeMutex::new()\n }\n }),\n buckets: slice::from_fn(lock_count * buckets_per_lock, |_| None),\n }\n }\n\n \/\/\/ Inserts the given value into the hash table, replacing the value with the previous value\n \/\/\/ if any.\n pub fn insert(&self, key: K, value: V) {\n unsafe {\n let this: &mut ConcurrentHashMap<K,V> = cast::transmute_mut(self);\n\n loop {\n let (bucket_index, lock_index) = self.bucket_and_lock_indices(&key);\n if this.overloaded() {\n this.locks[lock_index].unlock_noguard();\n this.try_resize(self.buckets_per_lock() * 2);\n\n \/\/ Have to retry because the bucket and lock indices will have shifted.\n continue\n }\n\n this.insert_unlocked(key, value, Some(bucket_index));\n this.locks[lock_index].unlock_noguard();\n break\n }\n }\n }\n\n #[inline(always)]\n unsafe fn insert_unlocked(&self, key: K, value: V, opt_bucket_index: Option<uint>) {\n let this: &mut ConcurrentHashMap<K,V> = cast::transmute_mut(self);\n\n let bucket_index = match opt_bucket_index {\n Some(bucket_index) => bucket_index,\n None => self.bucket_index_unlocked(&key),\n };\n\n match this.buckets[bucket_index] {\n None => {\n this.buckets[bucket_index] = Some(Bucket {\n next: None,\n key: key,\n value: value,\n });\n drop(this.size.fetch_add(1, SeqCst));\n }\n Some(ref mut bucket) => {\n \/\/ Search to try to find a value.\n let mut bucket: *mut Bucket<K,V> = bucket;\n loop {\n if (*bucket).key == key {\n (*bucket).value = value;\n break\n }\n\n match (*bucket).next {\n None => {}\n Some(ref mut next_bucket) => {\n bucket = &mut **next_bucket as *mut Bucket<K,V>;\n continue\n }\n }\n\n (*bucket).next = Some(~Bucket {\n next: None,\n key: key,\n value: value,\n });\n drop(this.size.fetch_add(1, SeqCst));\n break\n }\n }\n }\n }\n\n \/\/\/ Removes the given key from the hash table.\n pub fn remove(&self, key: &K) {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self)\n };\n\n let (bucket_index, lock_index) = self.bucket_and_lock_indices(key);\n\n \/\/ Rebuild the bucket.\n let mut nuke_bucket = false;\n match this.buckets[bucket_index] {\n None => {}\n Some(ref mut bucket) if bucket.key == *key => {\n \/\/ Common case (assuming a sparse table): If the key is the first one in the\n \/\/ chain, just copy the next fields over.\n let next_opt = mem::replace(&mut bucket.next, None);\n match next_opt {\n None => nuke_bucket = true,\n Some(~next) => *bucket = next,\n }\n drop(this.size.fetch_sub(1, SeqCst))\n }\n Some(ref mut bucket) => {\n \/\/ Rarer case: If the key is elsewhere in the chain (or nowhere), then search for\n \/\/ it and just stitch up pointers.\n let mut prev: *mut Bucket<K,V> = bucket;\n unsafe {\n loop {\n match (*prev).next {\n None => break, \/\/ Not found.\n Some(ref mut bucket) => {\n \/\/ Continue the search.\n if bucket.key != *key {\n prev = &mut **bucket as *mut Bucket<K,V>;\n continue\n }\n }\n }\n\n \/\/ If we got here, then we found the key. Now do a pointer stitch.\n let ~Bucket {\n next: next_next,\n ..\n } = (*prev).next.take_unwrap();\n (*prev).next = next_next;\n drop(this.size.fetch_sub(1, SeqCst));\n break\n }\n }\n }\n }\n if nuke_bucket {\n this.buckets[bucket_index] = None\n }\n\n unsafe {\n this.locks[lock_index].unlock_noguard()\n }\n }\n\n \/\/\/ Returns an iterator over this concurrent map.\n pub fn iter<'a>(&'a self) -> ConcurrentHashMapIterator<'a,K,V> {\n ConcurrentHashMapIterator {\n map: self,\n bucket_index: -1,\n current_bucket: ptr::null(),\n }\n }\n\n \/\/\/ Returns true if the given key is in the map and false otherwise.\n pub fn contains_key(&self, key: &K) -> bool {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self)\n };\n\n let (bucket_index, lock_index) = this.bucket_and_lock_indices(key);\n\n let result;\n match this.buckets[bucket_index] {\n None => result = false,\n Some(ref bucket) => {\n \/\/ Search to try to find a value.\n let mut bucket = bucket;\n loop {\n if bucket.key == *key {\n result = true;\n break\n }\n match bucket.next {\n None => {\n result = false;\n break\n }\n Some(ref next_bucket) => bucket = &**next_bucket,\n }\n }\n }\n }\n\n unsafe {\n this.locks[lock_index].unlock_noguard()\n }\n\n result\n }\n\n \/\/\/ Removes all entries from the map.\n pub fn clear(&self) {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self)\n };\n\n let (bucket_count, lock_count) = (this.buckets.len(), this.locks.len());\n let buckets_per_lock = bucket_count \/ lock_count;\n\n let (mut lock_index, mut stripe_index) = (0, 0);\n for bucket in this.buckets.mut_iter() {\n stripe_index += 1;\n if stripe_index == buckets_per_lock {\n unsafe {\n this.locks[lock_index].unlock_noguard();\n }\n\n stripe_index = 0;\n lock_index += 1\n }\n if stripe_index == 0 {\n unsafe {\n this.locks[lock_index].lock_noguard()\n }\n }\n\n *bucket = None\n }\n }\n\n \/\/\/ Resizes the map to a new size. Takes all the locks (i.e. acquires an exclusive lock on the\n \/\/\/ entire table) as it does so.\n \/\/\/\n \/\/\/ This has no problem with invalidating iterators because iterators always hold onto at least\n \/\/\/ one lock.\n fn try_resize(&self, new_buckets_per_lock: uint) {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self)\n };\n\n \/\/ Take a lock on all buckets.\n for lock in this.locks.mut_iter() {\n unsafe {\n lock.lock_noguard()\n }\n }\n\n \/\/ Check to make sure we aren't already at the right size. Someone else could have already\n \/\/ resized.\n let lock_count = this.locks.len();\n let new_bucket_count = lock_count * new_buckets_per_lock;\n if new_bucket_count > this.buckets.len() {\n \/\/ Create a new set of buckets.\n let mut buckets = slice::from_fn(new_bucket_count, |_| None);\n mem::swap(&mut this.buckets, &mut buckets);\n this.size.store(0, Relaxed);\n\n \/\/ Go through all the old buckets and insert the new data.\n for bucket in buckets.move_iter() {\n match bucket {\n None => continue,\n Some(Bucket {\n key: key,\n value: value,\n next: mut bucket\n }) => {\n unsafe {\n this.insert_unlocked(key, value, None)\n }\n\n loop {\n match bucket {\n None => break,\n Some(~Bucket {\n key: key,\n value: value,\n next: next\n }) => {\n unsafe {\n this.insert_unlocked(key, value, None)\n }\n\n bucket = next\n }\n }\n }\n }\n }\n }\n }\n\n \/\/ Release all our locks.\n for lock in this.locks.mut_iter() {\n unsafe {\n lock.unlock_noguard()\n }\n }\n }\n\n \/\/\/ Returns the index of the bucket and the lock for the given key, respectively, taking the\n \/\/\/ appropriate lock before returning. This is subtle: it contains a loop to deal with race\n \/\/\/ conditions in which the bucket array might have resized.\n #[inline]\n fn bucket_and_lock_indices(&self, key: &K) -> (uint, uint) {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(cast::transmute_region(self))\n };\n\n let hash = sip::hash_with_keys(self.k0, self.k1, key);\n let lock_count = this.locks.len();\n let mut bucket_index;\n let mut lock_index;\n loop {\n let bucket_count = this.buckets.len();\n let buckets_per_lock = bucket_count \/ lock_count;\n bucket_index = hash as uint % bucket_count;\n lock_index = bucket_index \/ buckets_per_lock;\n unsafe {\n this.locks[lock_index].lock_noguard();\n }\n let new_bucket_count = this.buckets.len();\n if bucket_count == new_bucket_count {\n break\n }\n\n \/\/ If we got here, the hash table resized from under us: try again.\n unsafe {\n this.locks[lock_index].unlock_noguard()\n }\n }\n\n (bucket_index, lock_index)\n }\n\n \/\/\/ Returns the index of the bucket. You must be holding at least one lock to call this\n \/\/\/ function!\n #[inline]\n unsafe fn bucket_index_unlocked(&self, key: &K) -> uint {\n let hash = sip::hash_with_keys(self.k0, self.k1, key);\n hash as uint % self.buckets.len()\n }\n\n \/\/\/ Returns true if this hash table is overloaded (at its current load factor, default 0.75)\n \/\/\/ and false otherwise.\n #[inline]\n fn overloaded(&self) -> bool {\n self.size.load(SeqCst) >= (self.buckets.len() * LOAD_NUMERATOR \/ LOAD_DENOMINATOR)\n }\n\n \/\/\/ Returns the number of buckets per lock.\n #[inline]\n fn buckets_per_lock(&self) -> uint {\n self.buckets.len() \/ self.locks.len()\n }\n\n \/\/\/ Returns the number of elements in the hash table.\n #[inline]\n pub fn size(&self) -> uint {\n self.size.load(SeqCst)\n }\n}\n\npub struct ConcurrentHashMapIterator<'a,K,V> {\n map: &'a ConcurrentHashMap<K,V>,\n bucket_index: int,\n current_bucket: *Bucket<K,V>,\n}\n\nimpl<'a,K,V> Iterator<(&'a K, &'a V)> for ConcurrentHashMapIterator<'a,K,V> {\n fn next(&mut self) -> Option<(&'a K, &'a V)> {\n let map: &'a mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self.map)\n };\n\n let (bucket_count, lock_count) = (map.buckets.len(), map.locks.len());\n let buckets_per_lock = bucket_count \/ lock_count;\n\n \/\/ Go to the next bucket in the chain, if necessary.\n if self.current_bucket != ptr::null() {\n unsafe {\n self.current_bucket = match (*self.current_bucket).next {\n None => ptr::null(),\n Some(ref bucket) => {\n let bucket: *Bucket<K,V> = &**bucket;\n bucket\n }\n }\n }\n }\n\n \/\/ Advance buckets, taking locks along the way if necessary.\n while self.current_bucket == ptr::null() {\n let bucket_index = self.bucket_index;\n let lock_index = if bucket_index < 0 {\n -1\n } else {\n bucket_index \/ (buckets_per_lock as int)\n };\n\n if bucket_index < 0 ||\n bucket_index % (buckets_per_lock as int) == (buckets_per_lock as int) - 1 {\n \/\/ We're at the boundary between one lock and another. Drop the old lock if\n \/\/ necessary and acquire the new one, if necessary.\n if bucket_index != -1 {\n unsafe {\n map.locks[lock_index as uint].unlock_noguard()\n }\n }\n if bucket_index != (bucket_count as int) - 1 {\n unsafe {\n map.locks[(lock_index + 1) as uint].lock_noguard()\n }\n }\n }\n\n \/\/ If at end, return None.\n if self.bucket_index == (bucket_count as int) - 1 {\n return None\n }\n\n self.bucket_index += 1;\n\n self.current_bucket = match map.buckets[self.bucket_index as uint] {\n None => ptr::null(),\n Some(ref bucket) => {\n let bucket: *Bucket<K,V> = bucket;\n bucket\n }\n }\n }\n\n unsafe {\n Some((cast::transmute(&(*self.current_bucket).key),\n cast::transmute(&(*self.current_bucket).value)))\n }\n }\n}\n\n#[cfg(test)]\npub mod test {\n use sync::Arc;\n use native;\n use std::comm;\n\n use concurrentmap::ConcurrentHashMap;\n\n #[test]\n pub fn smoke() {\n let m = Arc::new(ConcurrentHashMap::new());\n let (chan, port) = comm::channel();\n\n \/\/ Big enough to make it resize once.\n for i in range(0, 5) {\n let m = m.clone();\n let chan = chan.clone();\n native::task::spawn(proc() {\n for j in range(i * 20, (i * 20) + 20) {\n m.insert(j, j * j);\n }\n chan.send(());\n })\n }\n for _ in range(0, 5) {\n port.recv();\n }\n\n let mut count = 0;\n for (&k, &v) in m.iter() {\n assert_eq!(k * k, v)\n count += 1;\n }\n assert_eq!(count, 100)\n }\n}\n\n<commit_msg>Use Vec in concurrentmap.rs.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A Doug Lea-style concurrent hash map using striped locks.\n\nuse rand;\nuse rand::Rng;\nuse std::cast;\nuse std::hash::{Hash, sip};\nuse std::ptr;\nuse std::sync::atomics::{AtomicUint, Relaxed, SeqCst};\nuse std::unstable::mutex::StaticNativeMutex;\nuse std::mem;\n\n\/\/\/ When the size exceeds (number of buckets * LOAD_NUMERATOR\/LOAD_DENOMINATOR), the hash table\n\/\/\/ grows.\nstatic LOAD_NUMERATOR: uint = 3;\n\n\/\/\/ When the size exceeds (number of buckets * LOAD_NUMERATOR\/LOAD_DENOMINATOR), the hash table\n\/\/\/ grows.\nstatic LOAD_DENOMINATOR: uint = 4;\n\n\/\/\/ One bucket in the hash table.\nstruct Bucket<K,V> {\n next: Option<~Bucket<K,V>>,\n key: K,\n value: V,\n}\n\n\/\/\/ A concurrent hash map using striped locks.\npub struct ConcurrentHashMap<K,V> {\n \/\/\/ The first secret key value.\n k0: u64,\n \/\/\/ The second key value.\n k1: u64,\n \/\/\/ The number of elements in this hash table.\n size: AtomicUint,\n \/\/\/ The striped locks.\n locks: Vec<StaticNativeMutex>,\n \/\/\/ The buckets.\n buckets: Vec<Option<Bucket<K,V>>>,\n}\n\nimpl<K:Hash + Eq,V> ConcurrentHashMap<K,V> {\n \/\/\/ Creates a hash map with 16 locks and 4 buckets per lock.\n pub fn new() -> ConcurrentHashMap<K,V> {\n ConcurrentHashMap::with_locks_and_buckets(16, 4)\n }\n\n \/\/\/ Creates a hash map with the given number of locks and buckets per lock.\n pub fn with_locks_and_buckets(lock_count: uint, buckets_per_lock: uint)\n -> ConcurrentHashMap<K,V> {\n let mut rand = rand::task_rng();\n ConcurrentHashMap {\n k0: rand.gen(),\n k1: rand.gen(),\n size: AtomicUint::new(0),\n locks: Vec::from_fn(lock_count, |_| {\n unsafe {\n StaticNativeMutex::new()\n }\n }),\n buckets: Vec::from_fn(lock_count * buckets_per_lock, |_| None),\n }\n }\n\n \/\/\/ Inserts the given value into the hash table, replacing the value with the previous value\n \/\/\/ if any.\n pub fn insert(&self, key: K, value: V) {\n unsafe {\n let this: &mut ConcurrentHashMap<K,V> = cast::transmute_mut(self);\n\n loop {\n let (bucket_index, lock_index) = self.bucket_and_lock_indices(&key);\n if this.overloaded() {\n this.locks.get(lock_index).unlock_noguard();\n this.try_resize(self.buckets_per_lock() * 2);\n\n \/\/ Have to retry because the bucket and lock indices will have shifted.\n continue\n }\n\n this.insert_unlocked(key, value, Some(bucket_index));\n this.locks.get(lock_index).unlock_noguard();\n break\n }\n }\n }\n\n #[inline(always)]\n unsafe fn insert_unlocked(&self, key: K, value: V, opt_bucket_index: Option<uint>) {\n let this: &mut ConcurrentHashMap<K,V> = cast::transmute_mut(self);\n\n let bucket_index = match opt_bucket_index {\n Some(bucket_index) => bucket_index,\n None => self.bucket_index_unlocked(&key),\n };\n\n match this.buckets.get_mut(bucket_index) {\n &None => {}\n &Some(ref mut bucket) => {\n \/\/ Search to try to find a value.\n let mut bucket: *mut Bucket<K,V> = bucket;\n loop {\n if (*bucket).key == key {\n (*bucket).value = value;\n break\n }\n\n match (*bucket).next {\n None => {}\n Some(ref mut next_bucket) => {\n bucket = &mut **next_bucket as *mut Bucket<K,V>;\n continue\n }\n }\n\n (*bucket).next = Some(~Bucket {\n next: None,\n key: key,\n value: value,\n });\n drop(this.size.fetch_add(1, SeqCst));\n break\n }\n return;\n }\n }\n *this.buckets.get_mut(bucket_index) = Some(Bucket {\n next: None,\n key: key,\n value: value,\n });\n drop(this.size.fetch_add(1, SeqCst));\n }\n\n \/\/\/ Removes the given key from the hash table.\n pub fn remove(&self, key: &K) {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self)\n };\n\n let (bucket_index, lock_index) = self.bucket_and_lock_indices(key);\n\n \/\/ Rebuild the bucket.\n let mut nuke_bucket = false;\n match this.buckets.get_mut(bucket_index) {\n &None => {}\n &Some(ref mut bucket) if bucket.key == *key => {\n \/\/ Common case (assuming a sparse table): If the key is the first one in the\n \/\/ chain, just copy the next fields over.\n let next_opt = mem::replace(&mut bucket.next, None);\n match next_opt {\n None => nuke_bucket = true,\n Some(~next) => *bucket = next,\n }\n drop(this.size.fetch_sub(1, SeqCst))\n }\n &Some(ref mut bucket) => {\n \/\/ Rarer case: If the key is elsewhere in the chain (or nowhere), then search for\n \/\/ it and just stitch up pointers.\n let mut prev: *mut Bucket<K,V> = bucket;\n unsafe {\n loop {\n match (*prev).next {\n None => break, \/\/ Not found.\n Some(ref mut bucket) => {\n \/\/ Continue the search.\n if bucket.key != *key {\n prev = &mut **bucket as *mut Bucket<K,V>;\n continue\n }\n }\n }\n\n \/\/ If we got here, then we found the key. Now do a pointer stitch.\n let ~Bucket {\n next: next_next,\n ..\n } = (*prev).next.take_unwrap();\n (*prev).next = next_next;\n drop(this.size.fetch_sub(1, SeqCst));\n break\n }\n }\n }\n }\n if nuke_bucket {\n *this.buckets.get_mut(bucket_index) = None\n }\n\n unsafe {\n this.locks.get(lock_index).unlock_noguard()\n }\n }\n\n \/\/\/ Returns an iterator over this concurrent map.\n pub fn iter<'a>(&'a self) -> ConcurrentHashMapIterator<'a,K,V> {\n ConcurrentHashMapIterator {\n map: self,\n bucket_index: -1,\n current_bucket: ptr::null(),\n }\n }\n\n \/\/\/ Returns true if the given key is in the map and false otherwise.\n pub fn contains_key(&self, key: &K) -> bool {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self)\n };\n\n let (bucket_index, lock_index) = this.bucket_and_lock_indices(key);\n\n let result;\n match this.buckets.get(bucket_index) {\n &None => result = false,\n &Some(ref bucket) => {\n \/\/ Search to try to find a value.\n let mut bucket = bucket;\n loop {\n if bucket.key == *key {\n result = true;\n break\n }\n match bucket.next {\n None => {\n result = false;\n break\n }\n Some(ref next_bucket) => bucket = &**next_bucket,\n }\n }\n }\n }\n\n unsafe {\n this.locks.get(lock_index).unlock_noguard()\n }\n\n result\n }\n\n \/\/\/ Removes all entries from the map.\n pub fn clear(&self) {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self)\n };\n\n let (bucket_count, lock_count) = (this.buckets.len(), this.locks.len());\n let buckets_per_lock = bucket_count \/ lock_count;\n\n let (mut lock_index, mut stripe_index) = (0, 0);\n for bucket in this.buckets.mut_iter() {\n stripe_index += 1;\n if stripe_index == buckets_per_lock {\n unsafe {\n this.locks.get(lock_index).unlock_noguard();\n }\n\n stripe_index = 0;\n lock_index += 1\n }\n if stripe_index == 0 {\n unsafe {\n this.locks.get(lock_index).lock_noguard()\n }\n }\n\n *bucket = None\n }\n }\n\n \/\/\/ Resizes the map to a new size. Takes all the locks (i.e. acquires an exclusive lock on the\n \/\/\/ entire table) as it does so.\n \/\/\/\n \/\/\/ This has no problem with invalidating iterators because iterators always hold onto at least\n \/\/\/ one lock.\n fn try_resize(&self, new_buckets_per_lock: uint) {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self)\n };\n\n \/\/ Take a lock on all buckets.\n for lock in this.locks.mut_iter() {\n unsafe {\n lock.lock_noguard()\n }\n }\n\n \/\/ Check to make sure we aren't already at the right size. Someone else could have already\n \/\/ resized.\n let lock_count = this.locks.len();\n let new_bucket_count = lock_count * new_buckets_per_lock;\n if new_bucket_count > this.buckets.len() {\n \/\/ Create a new set of buckets.\n let mut buckets = Vec::from_fn(new_bucket_count, |_| None);\n mem::swap(&mut this.buckets, &mut buckets);\n this.size.store(0, Relaxed);\n\n \/\/ Go through all the old buckets and insert the new data.\n for bucket in buckets.move_iter() {\n match bucket {\n None => continue,\n Some(Bucket {\n key: key,\n value: value,\n next: mut bucket\n }) => {\n unsafe {\n this.insert_unlocked(key, value, None)\n }\n\n loop {\n match bucket {\n None => break,\n Some(~Bucket {\n key: key,\n value: value,\n next: next\n }) => {\n unsafe {\n this.insert_unlocked(key, value, None)\n }\n\n bucket = next\n }\n }\n }\n }\n }\n }\n }\n\n \/\/ Release all our locks.\n for lock in this.locks.mut_iter() {\n unsafe {\n lock.unlock_noguard()\n }\n }\n }\n\n \/\/\/ Returns the index of the bucket and the lock for the given key, respectively, taking the\n \/\/\/ appropriate lock before returning. This is subtle: it contains a loop to deal with race\n \/\/\/ conditions in which the bucket array might have resized.\n #[inline]\n fn bucket_and_lock_indices(&self, key: &K) -> (uint, uint) {\n let this: &mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(cast::transmute_region(self))\n };\n\n let hash = sip::hash_with_keys(self.k0, self.k1, key);\n let lock_count = this.locks.len();\n let mut bucket_index;\n let mut lock_index;\n loop {\n let bucket_count = this.buckets.len();\n let buckets_per_lock = bucket_count \/ lock_count;\n bucket_index = hash as uint % bucket_count;\n lock_index = bucket_index \/ buckets_per_lock;\n unsafe {\n this.locks.get(lock_index).lock_noguard();\n }\n let new_bucket_count = this.buckets.len();\n if bucket_count == new_bucket_count {\n break\n }\n\n \/\/ If we got here, the hash table resized from under us: try again.\n unsafe {\n this.locks.get(lock_index).unlock_noguard()\n }\n }\n\n (bucket_index, lock_index)\n }\n\n \/\/\/ Returns the index of the bucket. You must be holding at least one lock to call this\n \/\/\/ function!\n #[inline]\n unsafe fn bucket_index_unlocked(&self, key: &K) -> uint {\n let hash = sip::hash_with_keys(self.k0, self.k1, key);\n hash as uint % self.buckets.len()\n }\n\n \/\/\/ Returns true if this hash table is overloaded (at its current load factor, default 0.75)\n \/\/\/ and false otherwise.\n #[inline]\n fn overloaded(&self) -> bool {\n self.size.load(SeqCst) >= (self.buckets.len() * LOAD_NUMERATOR \/ LOAD_DENOMINATOR)\n }\n\n \/\/\/ Returns the number of buckets per lock.\n #[inline]\n fn buckets_per_lock(&self) -> uint {\n self.buckets.len() \/ self.locks.len()\n }\n\n \/\/\/ Returns the number of elements in the hash table.\n #[inline]\n pub fn size(&self) -> uint {\n self.size.load(SeqCst)\n }\n}\n\npub struct ConcurrentHashMapIterator<'a,K,V> {\n map: &'a ConcurrentHashMap<K,V>,\n bucket_index: int,\n current_bucket: *Bucket<K,V>,\n}\n\nimpl<'a,K,V> Iterator<(&'a K, &'a V)> for ConcurrentHashMapIterator<'a,K,V> {\n fn next(&mut self) -> Option<(&'a K, &'a V)> {\n let map: &'a mut ConcurrentHashMap<K,V> = unsafe {\n cast::transmute_mut(self.map)\n };\n\n let (bucket_count, lock_count) = (map.buckets.len(), map.locks.len());\n let buckets_per_lock = bucket_count \/ lock_count;\n\n \/\/ Go to the next bucket in the chain, if necessary.\n if self.current_bucket != ptr::null() {\n unsafe {\n self.current_bucket = match (*self.current_bucket).next {\n None => ptr::null(),\n Some(ref bucket) => {\n let bucket: *Bucket<K,V> = &**bucket;\n bucket\n }\n }\n }\n }\n\n \/\/ Advance buckets, taking locks along the way if necessary.\n while self.current_bucket == ptr::null() {\n let bucket_index = self.bucket_index;\n let lock_index = if bucket_index < 0 {\n -1\n } else {\n bucket_index \/ (buckets_per_lock as int)\n };\n\n if bucket_index < 0 ||\n bucket_index % (buckets_per_lock as int) == (buckets_per_lock as int) - 1 {\n \/\/ We're at the boundary between one lock and another. Drop the old lock if\n \/\/ necessary and acquire the new one, if necessary.\n if bucket_index != -1 {\n unsafe {\n map.locks.get(lock_index as uint).unlock_noguard()\n }\n }\n if bucket_index != (bucket_count as int) - 1 {\n unsafe {\n map.locks.get((lock_index + 1) as uint).lock_noguard()\n }\n }\n }\n\n \/\/ If at end, return None.\n if self.bucket_index == (bucket_count as int) - 1 {\n return None\n }\n\n self.bucket_index += 1;\n\n self.current_bucket = match map.buckets.get(self.bucket_index as uint) {\n &None => ptr::null(),\n &Some(ref bucket) => {\n let bucket: *Bucket<K,V> = bucket;\n bucket\n }\n }\n }\n\n unsafe {\n Some((cast::transmute(&(*self.current_bucket).key),\n cast::transmute(&(*self.current_bucket).value)))\n }\n }\n}\n\n#[cfg(test)]\npub mod test {\n use sync::Arc;\n use native;\n use std::comm;\n\n use concurrentmap::ConcurrentHashMap;\n\n #[test]\n pub fn smoke() {\n let m = Arc::new(ConcurrentHashMap::new());\n let (chan, port) = comm::channel();\n\n \/\/ Big enough to make it resize once.\n for i in range(0, 5) {\n let m = m.clone();\n let chan = chan.clone();\n native::task::spawn(proc() {\n for j in range(i * 20, (i * 20) + 20) {\n m.insert(j, j * j);\n }\n chan.send(());\n })\n }\n for _ in range(0, 5) {\n port.recv();\n }\n\n let mut count = 0;\n for (&k, &v) in m.iter() {\n assert_eq!(k * k, v)\n count += 1;\n }\n assert_eq!(count, 100)\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\nregister_long_diagnostics! {\n\nE0373: r##\"\nThis error occurs when an attempt is made to use data captured by a closure,\nwhen that data may no longer exist. It's most commonly seen when attempting to\nreturn a closure:\n\n```\nfn foo() -> Box<Fn(u32) -> u32> {\n let x = 0u32;\n Box::new(|y| x + y)\n}\n```\n\nNotice that `x` is stack-allocated by `foo()`. By default, Rust captures\nclosed-over data by reference. This means that once `foo()` returns, `x` no\nlonger exists. An attempt to access `x` within the closure would thus be unsafe.\n\nAnother situation where this might be encountered is when spawning threads:\n\n```\nfn foo() {\n let x = 0u32;\n let y = 1u32;\n\n let thr = std::thread::spawn(|| {\n x + y\n });\n}\n```\n\nSince our new thread runs in parallel, the stack frame containing `x` and `y`\nmay well have disappeared by the time we try to use them. Even if we call\n`thr.join()` within foo (which blocks until `thr` has completed, ensuring the\nstack frame won't disappear), we will not succeed: the compiler cannot prove\nthat this behaviour is safe, and so won't let us do it.\n\nThe solution to this problem is usually to switch to using a `move` closure.\nThis approach moves (or copies, where possible) data into the closure, rather\nthan taking references to it. For example:\n\n```\nfn foo() -> Box<Fn(u32) -> u32> {\n let x = 0u32;\n Box::new(move |y| x + y)\n}\n```\n\nNow that the closure has its own copy of the data, there's no need to worry\nabout safety.\n\"##,\n\nE0381: r##\"\nIt is not allowed to use or capture an uninitialized variable. For example:\n\n```\nfn main() {\n let x: i32;\n let y = x; \/\/ error, use of possibly uninitialized variable\n```\n\nTo fix this, ensure that any declared variables are initialized before being\nused.\n\"##,\n\nE0382: r##\"\nThis error occurs when an attempt is made to use a variable after its contents\nhave been moved elsewhere. For example:\n\n```\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = MyStruct{ s: 5u32 };\n let y = x;\n x.s = 6;\n println!(\"{}\", x.s);\n}\n```\n\nSince `MyStruct` is a type that is not marked `Copy`, the data gets moved out\nof `x` when we set `y`. This is fundamental to Rust's ownership system: outside\nof workarounds like `Rc`, a value cannot be owned by more than one variable.\n\nIf we own the type, the easiest way to address this problem is to implement\n`Copy` and `Clone` on it, as shown below. This allows `y` to copy the\ninformation in `x`, while leaving the original version owned by `x`. Subsequent\nchanges to `x` will not be reflected when accessing `y`.\n\n```\n#[derive(Copy, Clone)]\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = MyStruct{ s: 5u32 };\n let y = x;\n x.s = 6;\n println!(\"{}\", x.s);\n}\n```\n\nAlternatively, if we don't control the struct's definition, or mutable shared\nownership is truly required, we can use `Rc` and `RefCell`:\n\n```\nuse std::cell::RefCell;\nuse std::rc::Rc;\n\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = Rc::new(RefCell::new(MyStruct{ s: 5u32 }));\n let y = x.clone();\n x.borrow_mut().s = 6;\n println!(\"{}\", x.borrow.s);\n}\n```\n\nWith this approach, x and y share ownership of the data via the `Rc` (reference\ncount type). `RefCell` essentially performs runtime borrow checking: ensuring\nthat at most one writer or multiple readers can access the data at any one time.\n\nIf you wish to learn more about ownership in Rust, start with the chapter in the\nBook:\n\nhttps:\/\/doc.rust-lang.org\/book\/ownership.html\n\"##,\n\nE0384: r##\"\nThis error occurs when an attempt is made to reassign an immutable variable.\nFor example:\n\n```\nfn main(){\n let x = 3;\n x = 5; \/\/ error, reassignment of immutable variable\n}\n```\n\nBy default, variables in Rust are immutable. To fix this error, add the keyword\n`mut` after the keyword `let` when declaring the variable. For example:\n\n```\nfn main(){\n let mut x = 3;\n x = 5;\n}\n```\n\"##,\n\nE0387: r##\"\nThis error occurs when an attempt is made to mutate or mutably reference data\nthat a closure has captured immutably. Examples of this error are shown below:\n\n```\n\/\/ Accepts a function or a closure that captures its environment immutably.\n\/\/ Closures passed to foo will not be able to mutate their closed-over state.\nfn foo<F: Fn()>(f: F) { }\n\n\/\/ Attempts to mutate closed-over data. Error message reads:\n\/\/ `cannot assign to data in a captured outer variable...`\nfn mutable() {\n let mut x = 0u32;\n foo(|| x = 2);\n}\n\n\/\/ Attempts to take a mutable reference to closed-over data. Error message\n\/\/ reads: `cannot borrow data mutably in a captured outer variable...`\nfn mut_addr() {\n let mut x = 0u32;\n foo(|| { let y = &mut x; });\n}\n```\n\nThe problem here is that foo is defined as accepting a parameter of type `Fn`.\nClosures passed into foo will thus be inferred to be of type `Fn`, meaning that\nthey capture their context immutably.\n\nIf the definition of `foo` is under your control, the simplest solution is to\ncapture the data mutably. This can be done by defining `foo` to take FnMut\nrather than Fn:\n\n```\nfn foo<F: FnMut()>(f: F) { }\n```\n\nAlternatively, we can consider using the `Cell` and `RefCell` types to achieve\ninterior mutability through a shared reference. Our example's `mutable` function\ncould be redefined as below:\n\n```\nuse std::cell::Cell;\n\nfn mutable() {\n let x = Cell::new(0u32);\n foo(|| x.set(2));\n}\n```\n\nYou can read more about cell types in the API documentation:\n\nhttps:\/\/doc.rust-lang.org\/std\/cell\/\n\"##\n\n}\n\nregister_diagnostics! {\n E0383, \/\/ partial reinitialization of uninitialized structure\n E0385, \/\/ {} in an aliasable location\n E0386, \/\/ {} in an immutable container\n E0388, \/\/ {} in a static location\n E0389 \/\/ {} in a `&` reference\n}\n<commit_msg>Add detailed diagnostics for E0383.<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(non_snake_case)]\n\nregister_long_diagnostics! {\n\nE0373: r##\"\nThis error occurs when an attempt is made to use data captured by a closure,\nwhen that data may no longer exist. It's most commonly seen when attempting to\nreturn a closure:\n\n```\nfn foo() -> Box<Fn(u32) -> u32> {\n let x = 0u32;\n Box::new(|y| x + y)\n}\n```\n\nNotice that `x` is stack-allocated by `foo()`. By default, Rust captures\nclosed-over data by reference. This means that once `foo()` returns, `x` no\nlonger exists. An attempt to access `x` within the closure would thus be unsafe.\n\nAnother situation where this might be encountered is when spawning threads:\n\n```\nfn foo() {\n let x = 0u32;\n let y = 1u32;\n\n let thr = std::thread::spawn(|| {\n x + y\n });\n}\n```\n\nSince our new thread runs in parallel, the stack frame containing `x` and `y`\nmay well have disappeared by the time we try to use them. Even if we call\n`thr.join()` within foo (which blocks until `thr` has completed, ensuring the\nstack frame won't disappear), we will not succeed: the compiler cannot prove\nthat this behaviour is safe, and so won't let us do it.\n\nThe solution to this problem is usually to switch to using a `move` closure.\nThis approach moves (or copies, where possible) data into the closure, rather\nthan taking references to it. For example:\n\n```\nfn foo() -> Box<Fn(u32) -> u32> {\n let x = 0u32;\n Box::new(move |y| x + y)\n}\n```\n\nNow that the closure has its own copy of the data, there's no need to worry\nabout safety.\n\"##,\n\nE0381: r##\"\nIt is not allowed to use or capture an uninitialized variable. For example:\n\n```\nfn main() {\n let x: i32;\n let y = x; \/\/ error, use of possibly uninitialized variable\n```\n\nTo fix this, ensure that any declared variables are initialized before being\nused.\n\"##,\n\nE0382: r##\"\nThis error occurs when an attempt is made to use a variable after its contents\nhave been moved elsewhere. For example:\n\n```\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = MyStruct{ s: 5u32 };\n let y = x;\n x.s = 6;\n println!(\"{}\", x.s);\n}\n```\n\nSince `MyStruct` is a type that is not marked `Copy`, the data gets moved out\nof `x` when we set `y`. This is fundamental to Rust's ownership system: outside\nof workarounds like `Rc`, a value cannot be owned by more than one variable.\n\nIf we own the type, the easiest way to address this problem is to implement\n`Copy` and `Clone` on it, as shown below. This allows `y` to copy the\ninformation in `x`, while leaving the original version owned by `x`. Subsequent\nchanges to `x` will not be reflected when accessing `y`.\n\n```\n#[derive(Copy, Clone)]\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = MyStruct{ s: 5u32 };\n let y = x;\n x.s = 6;\n println!(\"{}\", x.s);\n}\n```\n\nAlternatively, if we don't control the struct's definition, or mutable shared\nownership is truly required, we can use `Rc` and `RefCell`:\n\n```\nuse std::cell::RefCell;\nuse std::rc::Rc;\n\nstruct MyStruct { s: u32 }\n\nfn main() {\n let mut x = Rc::new(RefCell::new(MyStruct{ s: 5u32 }));\n let y = x.clone();\n x.borrow_mut().s = 6;\n println!(\"{}\", x.borrow.s);\n}\n```\n\nWith this approach, x and y share ownership of the data via the `Rc` (reference\ncount type). `RefCell` essentially performs runtime borrow checking: ensuring\nthat at most one writer or multiple readers can access the data at any one time.\n\nIf you wish to learn more about ownership in Rust, start with the chapter in the\nBook:\n\nhttps:\/\/doc.rust-lang.org\/book\/ownership.html\n\"##,\n\nE0383: r##\"\nThis error occurs when an attempt is made to partially reinitialize a\nstructure that is currently uninitialized.\n\nFor example, this can happen when a transfer of ownership has taken place:\n\n```\nlet mut t = Test { a: 1, b: None};\nlet mut u = Test { a: 2, b: Some(Box::new(t))}; \/\/ `t` is now uninitialized\n \/\/ because ownership has been\n \/\/ transferred\nt.b = Some(Box::new(u)); \/\/ error, partial reinitialization of uninitialized\n \/\/ structure `t`\n```\n\nThis error can be fixed by fully reinitializing the structure in question:\n\n```\nlet mut t = Test { a: 1, b: None};\nlet mut u = Test { a: 2, b: Some(Box::new(t))};\nt = Test { a: 1, b: Some(Box::new(u))};\n```\n\"##,\n\nE0384: r##\"\nThis error occurs when an attempt is made to reassign an immutable variable.\nFor example:\n\n```\nfn main(){\n let x = 3;\n x = 5; \/\/ error, reassignment of immutable variable\n}\n```\n\nBy default, variables in Rust are immutable. To fix this error, add the keyword\n`mut` after the keyword `let` when declaring the variable. For example:\n\n```\nfn main(){\n let mut x = 3;\n x = 5;\n}\n```\n\"##,\n\nE0387: r##\"\nThis error occurs when an attempt is made to mutate or mutably reference data\nthat a closure has captured immutably. Examples of this error are shown below:\n\n```\n\/\/ Accepts a function or a closure that captures its environment immutably.\n\/\/ Closures passed to foo will not be able to mutate their closed-over state.\nfn foo<F: Fn()>(f: F) { }\n\n\/\/ Attempts to mutate closed-over data. Error message reads:\n\/\/ `cannot assign to data in a captured outer variable...`\nfn mutable() {\n let mut x = 0u32;\n foo(|| x = 2);\n}\n\n\/\/ Attempts to take a mutable reference to closed-over data. Error message\n\/\/ reads: `cannot borrow data mutably in a captured outer variable...`\nfn mut_addr() {\n let mut x = 0u32;\n foo(|| { let y = &mut x; });\n}\n```\n\nThe problem here is that foo is defined as accepting a parameter of type `Fn`.\nClosures passed into foo will thus be inferred to be of type `Fn`, meaning that\nthey capture their context immutably.\n\nIf the definition of `foo` is under your control, the simplest solution is to\ncapture the data mutably. This can be done by defining `foo` to take FnMut\nrather than Fn:\n\n```\nfn foo<F: FnMut()>(f: F) { }\n```\n\nAlternatively, we can consider using the `Cell` and `RefCell` types to achieve\ninterior mutability through a shared reference. Our example's `mutable` function\ncould be redefined as below:\n\n```\nuse std::cell::Cell;\n\nfn mutable() {\n let x = Cell::new(0u32);\n foo(|| x.set(2));\n}\n```\n\nYou can read more about cell types in the API documentation:\n\nhttps:\/\/doc.rust-lang.org\/std\/cell\/\n\"##\n\n}\n\nregister_diagnostics! {\n E0385, \/\/ {} in an aliasable location\n E0386, \/\/ {} in an immutable container\n E0388, \/\/ {} in a static location\n E0389 \/\/ {} in a `&` reference\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rollup merge of #17586 : Sawyer47\/test-16465<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Used to cause an ICE\n\nstruct Foo<T>{\n x : T\n}\n\ntype FooInt = Foo<int>;\n\nimpl Drop for FooInt {\n\/\/~^ ERROR cannot implement a destructor on a structure with type parameters \n fn drop(&mut self){}\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #34194<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![allow(dead_code)]\n\nstruct A {\n a: &'static (),\n}\n\nstatic B: &'static A = &A { a: &() };\nstatic C: &'static A = &B;\n\/\/~^ ERROR cannot refer to other statics by value\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a mir-opt test<commit_after>#![feature(never_type)]\n\npub enum Void {}\n\n#[no_mangle]\npub fn process_never(input: *const !) {\n let _input = unsafe { &*input };\n}\n\n#[no_mangle]\npub fn process_void(input: *const Void) {\n let _input = unsafe { &*input };\n \/\/ In the future, this should end with `unreachable`, but we currently only do\n \/\/ unreachability analysis for `!`.\n}\n\nfn main() {}\n\n\/\/ END RUST SOURCE\n\/\/\n\/\/ START rustc.process_never.SimplifyLocals.after.mir\n\/\/ bb0: {\n\/\/ StorageLive(_2);\n\/\/ _2 = &(*_1);\n\/\/ StorageDead(_2);\n\/\/ unreachable;\n\/\/ }\n\/\/ END rustc.process_never.SimplifyLocals.after.mir\n\/\/\n\/\/ START rustc.process_void.SimplifyLocals.after.mir\n\/\/ bb0: {\n\/\/ StorageLive(_2);\n\/\/ _2 = &(*_1);\n\/\/ StorageDead(_2);\n\/\/ return;\n\/\/ }\n\/\/ END rustc.process_void.SimplifyLocals.after.mir\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate toml;\nextern crate rustc_serialize;\n\nuse std::collections::{BTreeMap, HashMap};\nuse std::env;\nuse std::fs::File;\nuse std::io::{self, Read, Write};\nuse std::path::{PathBuf, Path};\nuse std::process::{Command, Stdio};\n\nstatic HOSTS: &'static [&'static str] = &[\n \"aarch64-unknown-linux-gnu\",\n \"arm-unknown-linux-gnueabi\",\n \"arm-unknown-linux-gnueabihf\",\n \"armv7-unknown-linux-gnueabihf\",\n \"i686-apple-darwin\",\n \"i686-pc-windows-gnu\",\n \"i686-pc-windows-msvc\",\n \"i686-unknown-linux-gnu\",\n \"mips-unknown-linux-gnu\",\n \"mips64-unknown-linux-gnuabi64\",\n \"mips64el-unknown-linux-gnuabi64\",\n \"mipsel-unknown-linux-gnu\",\n \"powerpc-unknown-linux-gnu\",\n \"powerpc64-unknown-linux-gnu\",\n \"powerpc64le-unknown-linux-gnu\",\n \"s390x-unknown-linux-gnu\",\n \"x86_64-apple-darwin\",\n \"x86_64-pc-windows-gnu\",\n \"x86_64-pc-windows-msvc\",\n \"x86_64-unknown-freebsd\",\n \"x86_64-unknown-linux-gnu\",\n \"x86_64-unknown-netbsd\",\n];\n\nstatic TARGETS: &'static [&'static str] = &[\n \"aarch64-apple-ios\",\n \"aarch64-linux-android\",\n \"aarch64-unknown-linux-gnu\",\n \"arm-linux-androideabi\",\n \"arm-unknown-linux-gnueabi\",\n \"arm-unknown-linux-gnueabihf\",\n \"arm-unknown-linux-musleabi\",\n \"arm-unknown-linux-musleabihf\",\n \"armv7-apple-ios\",\n \"armv7-linux-androideabi\",\n \"armv7-unknown-linux-gnueabihf\",\n \"armv7-unknown-linux-musleabihf\",\n \"armv7s-apple-ios\",\n \"asmjs-unknown-emscripten\",\n \"i386-apple-ios\",\n \"i586-pc-windows-msvc\",\n \"i586-unknown-linux-gnu\",\n \"i686-apple-darwin\",\n \"i686-linux-android\",\n \"i686-pc-windows-gnu\",\n \"i686-pc-windows-msvc\",\n \"i686-unknown-freebsd\",\n \"i686-unknown-linux-gnu\",\n \"i686-unknown-linux-musl\",\n \"mips-unknown-linux-gnu\",\n \"mips-unknown-linux-musl\",\n \"mips64-unknown-linux-gnuabi64\",\n \"mips64el-unknown-linux-gnuabi64\",\n \"mipsel-unknown-linux-gnu\",\n \"mipsel-unknown-linux-musl\",\n \"powerpc-unknown-linux-gnu\",\n \"powerpc64-unknown-linux-gnu\",\n \"powerpc64le-unknown-linux-gnu\",\n \"s390x-unknown-linux-gnu\",\n \"sparc64-unknown-linux-gnu\",\n \"wasm32-unknown-emscripten\",\n \"x86_64-apple-darwin\",\n \"x86_64-apple-ios\",\n \"x86_64-pc-windows-gnu\",\n \"x86_64-pc-windows-msvc\",\n \"x86_64-rumprun-netbsd\",\n \"x86_64-unknown-freebsd\",\n \"x86_64-unknown-linux-gnu\",\n \"x86_64-unknown-linux-musl\",\n \"x86_64-unknown-netbsd\",\n];\n\nstatic MINGW: &'static [&'static str] = &[\n \"i686-pc-windows-gnu\",\n \"x86_64-pc-windows-gnu\",\n];\n\nstruct Manifest {\n manifest_version: String,\n date: String,\n pkg: HashMap<String, Package>,\n}\n\n#[derive(RustcEncodable)]\nstruct Package {\n version: String,\n target: HashMap<String, Target>,\n}\n\n#[derive(RustcEncodable)]\nstruct Target {\n available: bool,\n url: Option<String>,\n hash: Option<String>,\n components: Option<Vec<Component>>,\n extensions: Option<Vec<Component>>,\n}\n\n#[derive(RustcEncodable)]\nstruct Component {\n pkg: String,\n target: String,\n}\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {}\", stringify!($e), e),\n })\n}\n\nstruct Builder {\n channel: String,\n input: PathBuf,\n output: PathBuf,\n gpg_passphrase: String,\n digests: HashMap<String, String>,\n s3_address: String,\n date: String,\n rust_version: String,\n cargo_version: String,\n}\n\nfn main() {\n let mut args = env::args().skip(1);\n let input = PathBuf::from(args.next().unwrap());\n let output = PathBuf::from(args.next().unwrap());\n let date = args.next().unwrap();\n let channel = args.next().unwrap();\n let s3_address = args.next().unwrap();\n let mut passphrase = String::new();\n t!(io::stdin().read_to_string(&mut passphrase));\n\n Builder {\n channel: channel,\n input: input,\n output: output,\n gpg_passphrase: passphrase,\n digests: HashMap::new(),\n s3_address: s3_address,\n date: date,\n rust_version: String::new(),\n cargo_version: String::new(),\n }.build();\n}\n\nimpl Builder {\n fn build(&mut self) {\n self.rust_version = self.version(\"rust\", \"x86_64-unknown-linux-gnu\");\n self.cargo_version = self.version(\"cargo\", \"x86_64-unknown-linux-gnu\");\n\n self.digest_and_sign();\n let Manifest { manifest_version, date, pkg } = self.build_manifest();\n\n \/\/ Unfortunately we can't use derive(RustcEncodable) here because the\n \/\/ version field is called `manifest-version`, not `manifest_version`.\n \/\/ In lieu of that just create the table directly here with a `BTreeMap`\n \/\/ and wrap it up in a `Value::Table`.\n let mut manifest = BTreeMap::new();\n manifest.insert(\"manifest-version\".to_string(),\n toml::Value::String(manifest_version));\n manifest.insert(\"date\".to_string(), toml::Value::String(date));\n manifest.insert(\"pkg\".to_string(), toml::encode(&pkg));\n let manifest = toml::Value::Table(manifest).to_string();\n\n let filename = format!(\"channel-rust-{}.toml\", self.channel);\n self.write_manifest(&manifest, &filename);\n\n if self.channel != \"beta\" && self.channel != \"nightly\" {\n self.write_manifest(&manifest, \"channel-rust-stable.toml\");\n }\n }\n\n fn digest_and_sign(&mut self) {\n for file in t!(self.input.read_dir()).map(|e| t!(e).path()) {\n let filename = file.file_name().unwrap().to_str().unwrap();\n let digest = self.hash(&file);\n self.sign(&file);\n assert!(self.digests.insert(filename.to_string(), digest).is_none());\n }\n }\n\n fn build_manifest(&mut self) -> Manifest {\n let mut manifest = Manifest {\n manifest_version: \"2\".to_string(),\n date: self.date.to_string(),\n pkg: HashMap::new(),\n };\n\n self.package(\"rustc\", &mut manifest.pkg, HOSTS);\n self.package(\"cargo\", &mut manifest.pkg, HOSTS);\n self.package(\"rust-mingw\", &mut manifest.pkg, MINGW);\n self.package(\"rust-std\", &mut manifest.pkg, TARGETS);\n self.package(\"rust-docs\", &mut manifest.pkg, TARGETS);\n self.package(\"rust-src\", &mut manifest.pkg, &[\"*\"]);\n\n let mut pkg = Package {\n version: self.cached_version(\"rust\").to_string(),\n target: HashMap::new(),\n };\n for host in HOSTS {\n let filename = self.filename(\"rust\", host);\n let digest = match self.digests.remove(&filename) {\n Some(digest) => digest,\n None => {\n pkg.target.insert(host.to_string(), Target {\n available: false,\n url: None,\n hash: None,\n components: None,\n extensions: None,\n });\n continue\n }\n };\n let mut components = Vec::new();\n let mut extensions = Vec::new();\n\n \/\/ rustc\/rust-std\/cargo are all required, and so is rust-mingw if it's\n \/\/ available for the target.\n components.extend(vec![\n Component { pkg: \"rustc\".to_string(), target: host.to_string() },\n Component { pkg: \"rust-std\".to_string(), target: host.to_string() },\n Component { pkg: \"cargo\".to_string(), target: host.to_string() },\n ]);\n if host.contains(\"pc-windows-gnu\") {\n components.push(Component {\n pkg: \"rust-mingw\".to_string(),\n target: host.to_string(),\n });\n }\n\n \/\/ Docs, other standard libraries, and the source package are all\n \/\/ optional.\n extensions.push(Component {\n pkg: \"rust-docs\".to_string(),\n target: host.to_string(),\n });\n for target in TARGETS {\n if target != host {\n extensions.push(Component {\n pkg: \"rust-std\".to_string(),\n target: target.to_string(),\n });\n }\n }\n extensions.push(Component {\n pkg: \"rust-src\".to_string(),\n target: \"*\".to_string(),\n });\n\n pkg.target.insert(host.to_string(), Target {\n available: true,\n url: Some(self.url(\"rust\", host)),\n hash: Some(to_hex(digest.as_ref())),\n components: Some(components),\n extensions: Some(extensions),\n });\n }\n manifest.pkg.insert(\"rust\".to_string(), pkg);\n\n return manifest\n }\n\n fn package(&mut self,\n pkgname: &str,\n dst: &mut HashMap<String, Package>,\n targets: &[&str]) {\n let targets = targets.iter().map(|name| {\n let filename = self.filename(pkgname, name);\n let digest = match self.digests.remove(&filename) {\n Some(digest) => digest,\n None => {\n return (name.to_string(), Target {\n available: false,\n url: None,\n hash: None,\n components: None,\n extensions: None,\n })\n }\n };\n\n (name.to_string(), Target {\n available: true,\n url: Some(self.url(pkgname, name)),\n hash: Some(digest),\n components: None,\n extensions: None,\n })\n }).collect();\n\n dst.insert(pkgname.to_string(), Package {\n version: self.cached_version(pkgname).to_string(),\n target: targets,\n });\n }\n\n fn url(&self, component: &str, target: &str) -> String {\n format!(\"{}\/{}\/{}\",\n self.s3_address,\n self.date,\n self.filename(component, target))\n }\n\n fn filename(&self, component: &str, target: &str) -> String {\n if component == \"rust-src\" {\n format!(\"rust-src-{}.tar.gz\", self.channel)\n } else if component == \"cargo\" {\n format!(\"cargo-nightly-{}.tar.gz\", target)\n } else {\n format!(\"{}-{}-{}.tar.gz\", component, self.channel, target)\n }\n }\n\n fn cached_version(&self, component: &str) -> &str {\n if component == \"cargo\" {\n &self.cargo_version\n } else {\n &self.rust_version\n }\n }\n\n fn version(&self, component: &str, target: &str) -> String {\n let mut cmd = Command::new(\"tar\");\n let filename = self.filename(component, target);\n cmd.arg(\"xf\")\n .arg(self.input.join(&filename))\n .arg(format!(\"{}\/version\", filename.replace(\".tar.gz\", \"\")))\n .arg(\"-O\");\n let version = t!(cmd.output());\n if !version.status.success() {\n panic!(\"failed to learn version:\\n\\n{:?}\\n\\n{}\\n\\n{}\",\n cmd,\n String::from_utf8_lossy(&version.stdout),\n String::from_utf8_lossy(&version.stderr));\n }\n String::from_utf8_lossy(&version.stdout).trim().to_string()\n }\n\n fn hash(&self, path: &Path) -> String {\n let sha = t!(Command::new(\"shasum\")\n .arg(\"-a\").arg(\"256\")\n .arg(path.file_name().unwrap())\n .current_dir(path.parent().unwrap())\n .output());\n assert!(sha.status.success());\n\n let filename = path.file_name().unwrap().to_str().unwrap();\n let sha256 = self.output.join(format!(\"{}.sha256\", filename));\n t!(t!(File::create(&sha256)).write_all(&sha.stdout));\n\n let stdout = String::from_utf8_lossy(&sha.stdout);\n stdout.split_whitespace().next().unwrap().to_string()\n }\n\n fn sign(&self, path: &Path) {\n let filename = path.file_name().unwrap().to_str().unwrap();\n let asc = self.output.join(format!(\"{}.asc\", filename));\n println!(\"signing: {:?}\", path);\n let mut cmd = Command::new(\"gpg\");\n cmd.arg(\"--no-tty\")\n .arg(\"--yes\")\n .arg(\"--passphrase-fd\").arg(\"0\")\n .arg(\"--armor\")\n .arg(\"--output\").arg(&asc)\n .arg(\"--detach-sign\").arg(path)\n .stdin(Stdio::piped());\n let mut child = t!(cmd.spawn());\n t!(child.stdin.take().unwrap().write_all(self.gpg_passphrase.as_bytes()));\n assert!(t!(child.wait()).success());\n }\n\n fn write_manifest(&self, manifest: &str, name: &str) {\n let dst = self.output.join(name);\n t!(t!(File::create(&dst)).write_all(manifest.as_bytes()));\n self.hash(&dst);\n self.sign(&dst);\n }\n}\n\nfn to_hex(digest: &[u8]) -> String {\n let mut ret = String::new();\n for byte in digest {\n ret.push(hex((byte & 0xf0) >> 4));\n ret.push(hex(byte & 0xf));\n }\n return ret;\n\n fn hex(b: u8) -> char {\n match b {\n 0...9 => (b'0' + b) as char,\n _ => (b'a' + b - 10) as char,\n }\n }\n}\n<commit_msg>Add save-analysis data to nightly manifests.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nextern crate toml;\nextern crate rustc_serialize;\n\nuse std::collections::{BTreeMap, HashMap};\nuse std::env;\nuse std::fs::File;\nuse std::io::{self, Read, Write};\nuse std::path::{PathBuf, Path};\nuse std::process::{Command, Stdio};\n\nstatic HOSTS: &'static [&'static str] = &[\n \"aarch64-unknown-linux-gnu\",\n \"arm-unknown-linux-gnueabi\",\n \"arm-unknown-linux-gnueabihf\",\n \"armv7-unknown-linux-gnueabihf\",\n \"i686-apple-darwin\",\n \"i686-pc-windows-gnu\",\n \"i686-pc-windows-msvc\",\n \"i686-unknown-linux-gnu\",\n \"mips-unknown-linux-gnu\",\n \"mips64-unknown-linux-gnuabi64\",\n \"mips64el-unknown-linux-gnuabi64\",\n \"mipsel-unknown-linux-gnu\",\n \"powerpc-unknown-linux-gnu\",\n \"powerpc64-unknown-linux-gnu\",\n \"powerpc64le-unknown-linux-gnu\",\n \"s390x-unknown-linux-gnu\",\n \"x86_64-apple-darwin\",\n \"x86_64-pc-windows-gnu\",\n \"x86_64-pc-windows-msvc\",\n \"x86_64-unknown-freebsd\",\n \"x86_64-unknown-linux-gnu\",\n \"x86_64-unknown-netbsd\",\n];\n\nstatic TARGETS: &'static [&'static str] = &[\n \"aarch64-apple-ios\",\n \"aarch64-linux-android\",\n \"aarch64-unknown-linux-gnu\",\n \"arm-linux-androideabi\",\n \"arm-unknown-linux-gnueabi\",\n \"arm-unknown-linux-gnueabihf\",\n \"arm-unknown-linux-musleabi\",\n \"arm-unknown-linux-musleabihf\",\n \"armv7-apple-ios\",\n \"armv7-linux-androideabi\",\n \"armv7-unknown-linux-gnueabihf\",\n \"armv7-unknown-linux-musleabihf\",\n \"armv7s-apple-ios\",\n \"asmjs-unknown-emscripten\",\n \"i386-apple-ios\",\n \"i586-pc-windows-msvc\",\n \"i586-unknown-linux-gnu\",\n \"i686-apple-darwin\",\n \"i686-linux-android\",\n \"i686-pc-windows-gnu\",\n \"i686-pc-windows-msvc\",\n \"i686-unknown-freebsd\",\n \"i686-unknown-linux-gnu\",\n \"i686-unknown-linux-musl\",\n \"mips-unknown-linux-gnu\",\n \"mips-unknown-linux-musl\",\n \"mips64-unknown-linux-gnuabi64\",\n \"mips64el-unknown-linux-gnuabi64\",\n \"mipsel-unknown-linux-gnu\",\n \"mipsel-unknown-linux-musl\",\n \"powerpc-unknown-linux-gnu\",\n \"powerpc64-unknown-linux-gnu\",\n \"powerpc64le-unknown-linux-gnu\",\n \"s390x-unknown-linux-gnu\",\n \"sparc64-unknown-linux-gnu\",\n \"wasm32-unknown-emscripten\",\n \"x86_64-apple-darwin\",\n \"x86_64-apple-ios\",\n \"x86_64-pc-windows-gnu\",\n \"x86_64-pc-windows-msvc\",\n \"x86_64-rumprun-netbsd\",\n \"x86_64-unknown-freebsd\",\n \"x86_64-unknown-linux-gnu\",\n \"x86_64-unknown-linux-musl\",\n \"x86_64-unknown-netbsd\",\n];\n\nstatic MINGW: &'static [&'static str] = &[\n \"i686-pc-windows-gnu\",\n \"x86_64-pc-windows-gnu\",\n];\n\nstruct Manifest {\n manifest_version: String,\n date: String,\n pkg: HashMap<String, Package>,\n}\n\n#[derive(RustcEncodable)]\nstruct Package {\n version: String,\n target: HashMap<String, Target>,\n}\n\n#[derive(RustcEncodable)]\nstruct Target {\n available: bool,\n url: Option<String>,\n hash: Option<String>,\n components: Option<Vec<Component>>,\n extensions: Option<Vec<Component>>,\n}\n\n#[derive(RustcEncodable)]\nstruct Component {\n pkg: String,\n target: String,\n}\n\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {}\", stringify!($e), e),\n })\n}\n\nstruct Builder {\n channel: String,\n input: PathBuf,\n output: PathBuf,\n gpg_passphrase: String,\n digests: HashMap<String, String>,\n s3_address: String,\n date: String,\n rust_version: String,\n cargo_version: String,\n}\n\nfn main() {\n let mut args = env::args().skip(1);\n let input = PathBuf::from(args.next().unwrap());\n let output = PathBuf::from(args.next().unwrap());\n let date = args.next().unwrap();\n let channel = args.next().unwrap();\n let s3_address = args.next().unwrap();\n let mut passphrase = String::new();\n t!(io::stdin().read_to_string(&mut passphrase));\n\n Builder {\n channel: channel,\n input: input,\n output: output,\n gpg_passphrase: passphrase,\n digests: HashMap::new(),\n s3_address: s3_address,\n date: date,\n rust_version: String::new(),\n cargo_version: String::new(),\n }.build();\n}\n\nimpl Builder {\n fn build(&mut self) {\n self.rust_version = self.version(\"rust\", \"x86_64-unknown-linux-gnu\");\n self.cargo_version = self.version(\"cargo\", \"x86_64-unknown-linux-gnu\");\n\n self.digest_and_sign();\n let Manifest { manifest_version, date, pkg } = self.build_manifest();\n\n \/\/ Unfortunately we can't use derive(RustcEncodable) here because the\n \/\/ version field is called `manifest-version`, not `manifest_version`.\n \/\/ In lieu of that just create the table directly here with a `BTreeMap`\n \/\/ and wrap it up in a `Value::Table`.\n let mut manifest = BTreeMap::new();\n manifest.insert(\"manifest-version\".to_string(),\n toml::Value::String(manifest_version));\n manifest.insert(\"date\".to_string(), toml::Value::String(date));\n manifest.insert(\"pkg\".to_string(), toml::encode(&pkg));\n let manifest = toml::Value::Table(manifest).to_string();\n\n let filename = format!(\"channel-rust-{}.toml\", self.channel);\n self.write_manifest(&manifest, &filename);\n\n if self.channel != \"beta\" && self.channel != \"nightly\" {\n self.write_manifest(&manifest, \"channel-rust-stable.toml\");\n }\n }\n\n fn digest_and_sign(&mut self) {\n for file in t!(self.input.read_dir()).map(|e| t!(e).path()) {\n let filename = file.file_name().unwrap().to_str().unwrap();\n let digest = self.hash(&file);\n self.sign(&file);\n assert!(self.digests.insert(filename.to_string(), digest).is_none());\n }\n }\n\n fn build_manifest(&mut self) -> Manifest {\n let mut manifest = Manifest {\n manifest_version: \"2\".to_string(),\n date: self.date.to_string(),\n pkg: HashMap::new(),\n };\n\n self.package(\"rustc\", &mut manifest.pkg, HOSTS);\n self.package(\"cargo\", &mut manifest.pkg, HOSTS);\n self.package(\"rust-mingw\", &mut manifest.pkg, MINGW);\n self.package(\"rust-std\", &mut manifest.pkg, TARGETS);\n self.package(\"rust-docs\", &mut manifest.pkg, TARGETS);\n self.package(\"rust-src\", &mut manifest.pkg, &[\"*\"]);\n\n if self.channel == \"rust-nightly\" {\n self.package(\"analysis\", &mut manifest.pkg, TARGETS);\n }\n\n let mut pkg = Package {\n version: self.cached_version(\"rust\").to_string(),\n target: HashMap::new(),\n };\n for host in HOSTS {\n let filename = self.filename(\"rust\", host);\n let digest = match self.digests.remove(&filename) {\n Some(digest) => digest,\n None => {\n pkg.target.insert(host.to_string(), Target {\n available: false,\n url: None,\n hash: None,\n components: None,\n extensions: None,\n });\n continue\n }\n };\n let mut components = Vec::new();\n let mut extensions = Vec::new();\n\n \/\/ rustc\/rust-std\/cargo are all required, and so is rust-mingw if it's\n \/\/ available for the target.\n components.extend(vec![\n Component { pkg: \"rustc\".to_string(), target: host.to_string() },\n Component { pkg: \"rust-std\".to_string(), target: host.to_string() },\n Component { pkg: \"cargo\".to_string(), target: host.to_string() },\n ]);\n if host.contains(\"pc-windows-gnu\") {\n components.push(Component {\n pkg: \"rust-mingw\".to_string(),\n target: host.to_string(),\n });\n }\n\n \/\/ Docs, other standard libraries, and the source package are all\n \/\/ optional.\n extensions.push(Component {\n pkg: \"rust-docs\".to_string(),\n target: host.to_string(),\n });\n for target in TARGETS {\n if target != host {\n extensions.push(Component {\n pkg: \"rust-std\".to_string(),\n target: target.to_string(),\n });\n }\n if self.channel == \"nightly\" {\n extensions.push(Component {\n pkg: \"rust-analysis\".to_string(),\n target: target.to_string(),\n });\n }\n }\n extensions.push(Component {\n pkg: \"rust-src\".to_string(),\n target: \"*\".to_string(),\n });\n\n pkg.target.insert(host.to_string(), Target {\n available: true,\n url: Some(self.url(\"rust\", host)),\n hash: Some(to_hex(digest.as_ref())),\n components: Some(components),\n extensions: Some(extensions),\n });\n }\n manifest.pkg.insert(\"rust\".to_string(), pkg);\n\n return manifest\n }\n\n fn package(&mut self,\n pkgname: &str,\n dst: &mut HashMap<String, Package>,\n targets: &[&str]) {\n let targets = targets.iter().map(|name| {\n let filename = self.filename(pkgname, name);\n let digest = match self.digests.remove(&filename) {\n Some(digest) => digest,\n None => {\n return (name.to_string(), Target {\n available: false,\n url: None,\n hash: None,\n components: None,\n extensions: None,\n })\n }\n };\n\n (name.to_string(), Target {\n available: true,\n url: Some(self.url(pkgname, name)),\n hash: Some(digest),\n components: None,\n extensions: None,\n })\n }).collect();\n\n dst.insert(pkgname.to_string(), Package {\n version: self.cached_version(pkgname).to_string(),\n target: targets,\n });\n }\n\n fn url(&self, component: &str, target: &str) -> String {\n format!(\"{}\/{}\/{}\",\n self.s3_address,\n self.date,\n self.filename(component, target))\n }\n\n fn filename(&self, component: &str, target: &str) -> String {\n if component == \"rust-src\" {\n format!(\"rust-src-{}.tar.gz\", self.channel)\n } else if component == \"cargo\" {\n format!(\"cargo-nightly-{}.tar.gz\", target)\n } else {\n format!(\"{}-{}-{}.tar.gz\", component, self.channel, target)\n }\n }\n\n fn cached_version(&self, component: &str) -> &str {\n if component == \"cargo\" {\n &self.cargo_version\n } else {\n &self.rust_version\n }\n }\n\n fn version(&self, component: &str, target: &str) -> String {\n let mut cmd = Command::new(\"tar\");\n let filename = self.filename(component, target);\n cmd.arg(\"xf\")\n .arg(self.input.join(&filename))\n .arg(format!(\"{}\/version\", filename.replace(\".tar.gz\", \"\")))\n .arg(\"-O\");\n let version = t!(cmd.output());\n if !version.status.success() {\n panic!(\"failed to learn version:\\n\\n{:?}\\n\\n{}\\n\\n{}\",\n cmd,\n String::from_utf8_lossy(&version.stdout),\n String::from_utf8_lossy(&version.stderr));\n }\n String::from_utf8_lossy(&version.stdout).trim().to_string()\n }\n\n fn hash(&self, path: &Path) -> String {\n let sha = t!(Command::new(\"shasum\")\n .arg(\"-a\").arg(\"256\")\n .arg(path.file_name().unwrap())\n .current_dir(path.parent().unwrap())\n .output());\n assert!(sha.status.success());\n\n let filename = path.file_name().unwrap().to_str().unwrap();\n let sha256 = self.output.join(format!(\"{}.sha256\", filename));\n t!(t!(File::create(&sha256)).write_all(&sha.stdout));\n\n let stdout = String::from_utf8_lossy(&sha.stdout);\n stdout.split_whitespace().next().unwrap().to_string()\n }\n\n fn sign(&self, path: &Path) {\n let filename = path.file_name().unwrap().to_str().unwrap();\n let asc = self.output.join(format!(\"{}.asc\", filename));\n println!(\"signing: {:?}\", path);\n let mut cmd = Command::new(\"gpg\");\n cmd.arg(\"--no-tty\")\n .arg(\"--yes\")\n .arg(\"--passphrase-fd\").arg(\"0\")\n .arg(\"--armor\")\n .arg(\"--output\").arg(&asc)\n .arg(\"--detach-sign\").arg(path)\n .stdin(Stdio::piped());\n let mut child = t!(cmd.spawn());\n t!(child.stdin.take().unwrap().write_all(self.gpg_passphrase.as_bytes()));\n assert!(t!(child.wait()).success());\n }\n\n fn write_manifest(&self, manifest: &str, name: &str) {\n let dst = self.output.join(name);\n t!(t!(File::create(&dst)).write_all(manifest.as_bytes()));\n self.hash(&dst);\n self.sign(&dst);\n }\n}\n\nfn to_hex(digest: &[u8]) -> String {\n let mut ret = String::new();\n for byte in digest {\n ret.push(hex((byte & 0xf0) >> 4));\n ret.push(hex(byte & 0xf));\n }\n return ret;\n\n fn hex(b: u8) -> char {\n match b {\n 0...9 => (b'0' + b) as char,\n _ => (b'a' + b - 10) as char,\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add broken_sendreceive example<commit_after>#[macro_use]\nextern crate log;\nextern crate fern;\nextern crate time;\nextern crate mqtt;\n\nuse std::thread;\nuse std::char;\nuse mqtt::async::{PersistenceType, Qos, MqttError, AsyncClient, AsyncConnectOptions};\n\n\nfn conf_logger() {\n let logger_config = fern::DispatchConfig {\n format: Box::new(|msg: &str, level: &log::LogLevel, _location: &log::LogLocation| {\n let t = time::now();\n let ms = t.tm_nsec\/1000_000;\n format!(\"{}.{:3} [{}] {}\", t.strftime(\"%Y-%m-%dT%H:%M:%S\").unwrap(), ms, level, msg)\n }),\n output: vec![fern::OutputConfig::stderr()],\n level: log::LogLevelFilter::Trace,\n };\n\n if let Err(e) = fern::init_global_logger(logger_config, log::LogLevelFilter::Trace) {\n panic!(\"Failed to initialize global logger: {}\", e);\n }\n}\n\nfn setup_mqtt(server_address: &str, topic: &str, client_id: &str) -> Result<AsyncClient, MqttError> {\n let connect_options = AsyncConnectOptions::new();\n let mut client = try!(AsyncClient::new(server_address, client_id, PersistenceType::Nothing));\n try!(client.connect(&connect_options));\n try!(client.subscribe(topic, Qos::FireAndForget));\n Ok(client)\n}\n\nfn main() {\n \/\/ setup fern logger\n conf_logger();\n\n \/\/ start processing\n info!(\"sendreceive test started\");\n let mut data = Vec::new();\n let topic = \"TestTopic\";\n match setup_mqtt(\"tcp:\/\/localhost:1883\", &topic, \"TestClientId\") {\n Ok(mut client) => {\n for i in 0..10 {\n info!(\"data len: {}\", i);\n data.push(char::from_digit(i % 10, 10).unwrap() as u8);\n client.send(&data, &topic, Qos::FireAndForget).unwrap();\n for message in client.messages() {\n info!(\"{:?}\", message);\n }\n thread::sleep_ms(200);\n }},\n Err(e) => error!(\"{}\", e)\n }\n info!(\"sendreceive test ended\");\n}\n<|endoftext|>"} {"text":"<commit_before>use super::*;\nuse redox::*;\n\nimpl Editor {\n \/\/\/ Execute a instruction\n pub fn exec(&mut self, Inst(para, cmd): Inst) {\n use super::Key::*;\n use super::Mode::*;\n use super::PrimitiveMode::*;\n use super::CommandMode::*;\n\n let n = para.d();\n match cmd {\n Ctrl(b) => self.key_state.ctrl = b,\n Alt(b) => self.key_state.alt = b,\n Shift(b) => self.key_state.shift = b,\n _ => {},\n }\n\n if cmd == Char(' ') && self.key_state.shift {\n self.cursor_mut().mode = Mode::Command(CommandMode::Normal);\n } else if self.key_state.alt {\n let new_pos = self.to_motion(Inst(para, cmd));\n self.goto(new_pos);\n } else {\n match self.cursor().mode {\n Command(Normal) => match cmd {\n Char('i') => {\n self.cursor_mut().mode = Mode::Primitive(PrimitiveMode::Insert(\n InsertOptions {\n mode: InsertMode::Insert,\n }));\n\n },\n Char('o') => {\n \/\/ TODO: Autoindent (keep the same indentation level)\n let y = self.y();\n let ind = self.get_indent(y);\n let last = ind.len();\n self.text.insert(y + 1, ind);\n self.goto((last, y + 1));\n self.cursor_mut().mode = Mode::Primitive(PrimitiveMode::Insert(\n InsertOptions {\n mode: InsertMode::Insert,\n }));\n }\n Char('h') => self.goto_left(n),\n Char('j') => self.goto_down(n),\n Char('k') => self.goto_up(n),\n Char('l') => self.goto_right(n),\n Char('J') => self.goto_down(15),\n Char('K') => self.goto_up(15),\n Char('x') => self.delete(),\n Char('X') => {\n self.goto_previous();\n self.delete();\n },\n Char('L') => self.goto_ln_end(),\n Char('H') => self.cursor_mut().x = 0,\n Char('r') => {\n loop {\n if let EventOption::Key(k) = self.window.poll()\n .unwrap_or(Event::new())\n .to_option() {\n if k.pressed {\n let x = self.x();\n let y = self.y();\n self.text[y][x] = k.character;\n break;\n }\n }\n }\n },\n Char('R') => {\n self.cursor_mut().mode = Mode::Primitive(PrimitiveMode::Insert(\n InsertOptions {\n mode: InsertMode::Replace,\n }));\n },\n Char('d') => {\n let ins = self.next_inst();\n let motion = self.to_motion(ins);\n self.remove_rb(motion);\n },\n Char('G') => {\n let last = self.text.len() - 1;\n self.goto((0, last));\n },\n Char('g') => {\n if let Parameter::Int(n) = para {\n self.goto((0, n - 1));\n } else {\n let inst = self.next_inst();\n let new = self.to_motion(inst);\n self.cursor_mut().x = new.0;\n self.cursor_mut().y = new.1;\n }\n\n },\n\/\/ ????\n\/\/ Char('K') => {\n\/\/ self.goto((0, 0));\n\/\/ },\n\/\/ Char('J') => {\n\/\/ self.goto((0, self.text.len() - 1));\n\/\/ },\n Char(' ') => self.goto_next(),\n _ => {},\n },\n Primitive(Insert(opt)) => {\n self.insert(cmd, opt);\n },\n }\n }\n }\n}\n<commit_msg>Add cursor branching<commit_after>use super::*;\nuse redox::*;\n\nimpl Editor {\n \/\/\/ Execute a instruction\n pub fn exec(&mut self, Inst(para, cmd): Inst) {\n use super::Key::*;\n use super::Mode::*;\n use super::PrimitiveMode::*;\n use super::CommandMode::*;\n\n let n = para.d();\n match cmd {\n Ctrl(b) => self.key_state.ctrl = b,\n Alt(b) => self.key_state.alt = b,\n Shift(b) => self.key_state.shift = b,\n _ => {},\n }\n\n if cmd == Char(' ') && self.key_state.shift {\n self.cursor_mut().mode = Mode::Command(CommandMode::Normal);\n } else if self.key_state.alt && cmd == Key::Char(' ') {\n self.next_cursor();\n } else if self.key_state.alt {\n let new_pos = self.to_motion(Inst(para, cmd));\n self.goto(new_pos);\n } else {\n match self.cursor().mode {\n Command(Normal) => match cmd {\n Char('i') => {\n self.cursor_mut().mode = Mode::Primitive(PrimitiveMode::Insert(\n InsertOptions {\n mode: InsertMode::Insert,\n }));\n\n },\n Char('o') => {\n \/\/ TODO: Autoindent (keep the same indentation level)\n let y = self.y();\n let ind = self.get_indent(y);\n let last = ind.len();\n self.text.insert(y + 1, ind);\n self.goto((last, y + 1));\n self.cursor_mut().mode = Mode::Primitive(PrimitiveMode::Insert(\n InsertOptions {\n mode: InsertMode::Insert,\n }));\n }\n Char('h') => self.goto_left(n),\n Char('j') => self.goto_down(n),\n Char('k') => self.goto_up(n),\n Char('l') => self.goto_right(n),\n Char('J') => self.goto_down(15),\n Char('K') => self.goto_up(15),\n Char('x') => self.delete(),\n Char('X') => {\n self.goto_previous();\n self.delete();\n },\n Char('L') => self.goto_ln_end(),\n Char('H') => self.cursor_mut().x = 0,\n Char('r') => {\n loop {\n if let EventOption::Key(k) = self.window.poll()\n .unwrap_or(Event::new())\n .to_option() {\n if k.pressed {\n let x = self.x();\n let y = self.y();\n self.text[y][x] = k.character;\n break;\n }\n }\n }\n },\n Char('R') => {\n self.cursor_mut().mode = Mode::Primitive(PrimitiveMode::Insert(\n InsertOptions {\n mode: InsertMode::Replace,\n }));\n },\n Char('d') => {\n let ins = self.next_inst();\n let motion = self.to_motion(ins);\n self.remove_rb(motion);\n },\n Char('G') => {\n let last = self.text.len() - 1;\n self.goto((0, last));\n },\n Char('g') => {\n if let Parameter::Int(n) = para {\n self.goto((0, n - 1));\n } else {\n let inst = self.next_inst();\n let new = self.to_motion(inst);\n self.cursor_mut().x = new.0;\n self.cursor_mut().y = new.1;\n }\n\n },\n Char('b') => {\n \/\/ Branch cursor\n let cursor = self.cursor().clone();\n self.cursors.push(cursor);\n },\n\/\/ ????\n\/\/ Char('K') => {\n\/\/ self.goto((0, 0));\n\/\/ },\n\/\/ Char('J') => {\n\/\/ self.goto((0, self.text.len() - 1));\n\/\/ },\n Char(' ') => self.goto_next(),\n _ => {},\n },\n Primitive(Insert(opt)) => {\n self.insert(cmd, opt);\n },\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z trace-macros\n\n#![recursion_limit=\"4\"]\n\nmacro_rules! my_faulty_macro {\n () => {\n my_faulty_macro!(bcd);\n };\n}\n\nmacro_rules! pat_macro {\n () => {\n pat_macro!(A{a:a, b:0, c:_, ..});\n };\n ($a:pat) => {\n $a\n };\n}\n\nmacro_rules! my_recursive_macro {\n () => {\n my_recursive_macro!();\n };\n}\n\nmacro_rules! my_macro {\n () => {\n \n };\n}\n\nfn main() {\n my_faulty_macro!();\n my_recursive_macro!();\n test!();\n non_exisiting!();\n derive!(Debug);\n let a = pat_macro!();\n}\n\n#[my_macro]\nfn use_bang_macro_as_attr(){}\n\n#[derive(Debug)]\nfn use_derive_macro_as_attr(){}\n<commit_msg>Fix tidy error<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: -Z trace-macros\n\n#![recursion_limit=\"4\"]\n\nmacro_rules! my_faulty_macro {\n () => {\n my_faulty_macro!(bcd);\n };\n}\n\nmacro_rules! pat_macro {\n () => {\n pat_macro!(A{a:a, b:0, c:_, ..});\n };\n ($a:pat) => {\n $a\n };\n}\n\nmacro_rules! my_recursive_macro {\n () => {\n my_recursive_macro!();\n };\n}\n\nmacro_rules! my_macro {\n () => {\n\n };\n}\n\nfn main() {\n my_faulty_macro!();\n my_recursive_macro!();\n test!();\n non_exisiting!();\n derive!(Debug);\n let a = pat_macro!();\n}\n\n#[my_macro]\nfn use_bang_macro_as_attr(){}\n\n#[derive(Debug)]\nfn use_derive_macro_as_attr(){}\n<|endoftext|>"} {"text":"<commit_before>use super::core;\nuse super::elaborate::{self, ElabCx, LocalElabCx};\nuse super::error_reporting::Report;\nuse super::parser;\nuse super::ast::{self, SourceMap};\nuse super::typeck::{self, LocalCx};\n\nuse std::io;\nuse std::path::PathBuf;\nuse readline;\n\nconst HELP_MESSAGE: &'static str = r#\"\nCommands:\n :help Show this message\n :type <term> Infer the type of <term>\n :quit Exit\n\"#;\n\npub struct Repl {\n elab_cx: ElabCx,\n root_file: Option<PathBuf>,\n}\n\n\/\/ impl From<parser::Error> for Error {\n\/\/ fn from(err: parser::Error) -> Error {\n\/\/ Error::Parser(err)\n\/\/ }\n\/\/ }\n\/\/\n\nimpl From<io::Error> for Error {\n fn from(err: io::Error) -> Error {\n Error::Io(err)\n }\n}\n\nimpl From<elaborate::Error> for Error {\n fn from(err: elaborate::Error) -> Error {\n Error::Elaborator(err)\n }\n}\n\nimpl From<typeck::Error> for Error {\n fn from(err: typeck::Error) -> Error {\n Error::TypeCk(err)\n }\n}\n\nimpl From<parser::Error> for Error {\n fn from(err: parser::Error) -> Error {\n Error::Parser(err)\n }\n}\n\n#[derive(Debug)]\npub enum Error {\n Io(io::Error),\n Elaborator(elaborate::Error),\n UnknownCommand(String),\n TypeCk(typeck::Error),\n Parser(parser::Error),\n}\n\n#[derive(Debug)]\nenum Command {\n Quit,\n Reload,\n Unknown(String),\n TypeOf(String),\n Help,\n}\n\npub enum Cont {\n Quit,\n Done,\n}\n\nimpl Repl {\n pub fn from_path(file: &Option<PathBuf>) -> Result<Repl, Error> {\n match file {\n &None => {\n let ecx = ElabCx::from_module(ast::Module::empty(), SourceMap::empty());\n Ok(Repl {\n elab_cx: ecx,\n root_file: None\n })\n }\n &Some(ref file_path) => {\n let parser = try!(parser::from_file(file_path));\n let module = try!(parser.parse());\n\n let mut ecx = ElabCx::from_module(module, parser.source_map.clone());\n\n let emodule = try!(ecx.elaborate_module(file_path));\n\n {\n let main = try!(ecx.ty_cx.get_main_body());\n let result = try!(ecx.ty_cx.eval(main));\n println!(\"main={}\", result);\n }\n\n Ok(Repl {\n elab_cx: ecx,\n root_file: file.clone(),\n })\n }\n }\n }\n\n \/\/\/ Starts the read-eval-print-loop for querying the language.\n pub fn start(mut self) -> Result<(), Error> {\n loop {\n \/\/ First we grab a line ...\n let input = match readline::readline(\"hubris> \") {\n None => {\n println!(\"\");\n break;\n }\n Some(input) => input,\n };\n\n \/\/ Add it to the history\n readline::add_history(input.as_ref());\n\n match self.repl_interation(input) {\n \/\/ please make me look better\n Err(e) => println!(\"repl error: {:?}\", e),\n Ok(cont) => {\n match cont {\n Cont::Quit => break,\n Cont::Done => {}\n }\n }\n }\n }\n\n Ok(())\n }\n\n pub fn repl_interation(&mut self, input: String) -> Result<Cont, Error> {\n if input.len() <= 0 {\n \/\/ do nothing\n } else if &input[0..1] == \":\" {\n let cmd = self.parse_command(&input[1..]);\n match cmd {\n Command::Quit => return Ok(Cont::Quit),\n Command::Reload => panic!(\"unsupported command\"),\n Command::Unknown(u) => return Err(Error::UnknownCommand(u)),\n Command::TypeOf(t) => {\n let term = try!(self.preprocess_term(t));\n println!(\"{}\", try!(self.type_check_term(&term)));\n }\n Command::Help => println!(\"{}\", HELP_MESSAGE),\n }\n } else {\n try!(self.handle_input(input.to_string()));\n }\n\n Ok(Cont::Done)\n }\n\n fn preprocess_term(&mut self, source: String) -> Result<core::Term, Error> {\n let parser = parser::from_string(source).unwrap();\n let term = try!(parser.parse_term());\n\n let mut lcx = LocalElabCx::from_elab_cx(&mut self.elab_cx);\n let term = try!(lcx.elaborate_term(term));\n\n Ok(term)\n }\n\n fn type_check_term(&mut self, term: &core::Term) -> Result<core::Term, Error> {\n let mut ltycx = LocalCx::from_cx(&self.elab_cx.ty_cx);\n let term = try!(ltycx.type_infer_term(&term));\n\n Ok(term)\n }\n\n fn handle_input(&mut self, source: String) -> Result<(), Error> {\n let term = try!(self.preprocess_term(source));\n let ty = try!(self.type_check_term(&term));\n println!(\"{} : {}\", term, ty);\n println!(\"{}\", try!(self.elab_cx.ty_cx.eval(&term)));\n Ok(())\n }\n\n fn parse_command(&self, command_text: &str) -> Command {\n if command_text == \"quit\" {\n Command::Quit\n } else if command_text == \"reload\" {\n Command::Reload\n } else if &command_text[0..4] == \"type\" {\n Command::TypeOf(command_text[0..4].to_string())\n } else if &command_text[0..1] == \"t\" {\n Command::TypeOf(command_text[1..].to_string())\n } else if &command_text[0..4] == \"help\" {\n Command::Help\n } else {\n Command::Unknown(command_text.to_string())\n }\n }\n}\n\n\/\/ impl Report for Error {\n\/\/ type Context = ElabCx;\n\/\/\n\/\/ fn report<O: Write>(cx: &Self::Context,\n\/\/ mut out: Box<Terminal<Output = O>>,\n\/\/ error: Self)\n\/\/ -> TResult<()> {\n\/\/ match error {\n\/\/ Error::TypeCk(ty_ck) => panic!(),\n\/\/ _ => panic!(),\n\/\/ }\n\/\/ }\n\/\/ }\n<commit_msg>Make repl more usable with command prefixes<commit_after>use super::core;\nuse super::elaborate::{self, ElabCx, LocalElabCx};\nuse super::error_reporting::Report;\nuse super::parser;\nuse super::ast::{self, SourceMap};\nuse super::typeck::{self, LocalCx};\n\nuse std::io;\nuse std::path::PathBuf;\nuse readline;\n\nconst HELP_MESSAGE: &'static str = r#\"\nCommands:\n :help Show this message\n :type <term> Infer the type of <term>\n :quit Exit\n\"#;\n\npub struct Repl {\n elab_cx: ElabCx,\n root_file: Option<PathBuf>,\n}\n\n\/\/ impl From<parser::Error> for Error {\n\/\/ fn from(err: parser::Error) -> Error {\n\/\/ Error::Parser(err)\n\/\/ }\n\/\/ }\n\/\/\n\nimpl From<io::Error> for Error {\n fn from(err: io::Error) -> Error {\n Error::Io(err)\n }\n}\n\nimpl From<elaborate::Error> for Error {\n fn from(err: elaborate::Error) -> Error {\n Error::Elaborator(err)\n }\n}\n\nimpl From<typeck::Error> for Error {\n fn from(err: typeck::Error) -> Error {\n Error::TypeCk(err)\n }\n}\n\nimpl From<parser::Error> for Error {\n fn from(err: parser::Error) -> Error {\n Error::Parser(err)\n }\n}\n\n#[derive(Debug)]\npub enum Error {\n Io(io::Error),\n Elaborator(elaborate::Error),\n UnknownCommand(String),\n TypeCk(typeck::Error),\n Parser(parser::Error),\n}\n\n#[derive(Debug)]\nenum Command {\n Quit,\n Reload,\n Unknown(String),\n TypeOf(String),\n Help,\n}\n\npub enum Cont {\n Quit,\n Done,\n}\n\nfn split_command(command_text: &str) -> (&str, &str) {\n\n for (i, c) in command_text.char_indices() {\n if c.is_whitespace() {\n return command_text.split_at(i);\n }\n }\n\n (command_text, \"\")\n}\n\nimpl Repl {\n pub fn from_path(file: &Option<PathBuf>) -> Result<Repl, Error> {\n match file {\n &None => {\n let ecx = ElabCx::from_module(ast::Module::empty(), SourceMap::empty());\n Ok(Repl {\n elab_cx: ecx,\n root_file: None\n })\n }\n &Some(ref file_path) => {\n let parser = try!(parser::from_file(file_path));\n let module = try!(parser.parse());\n\n let mut ecx = ElabCx::from_module(module, parser.source_map.clone());\n\n let emodule = try!(ecx.elaborate_module(file_path));\n\n {\n let main = try!(ecx.ty_cx.get_main_body());\n let result = try!(ecx.ty_cx.eval(main));\n println!(\"main={}\", result);\n }\n\n Ok(Repl {\n elab_cx: ecx,\n root_file: file.clone(),\n })\n }\n }\n }\n\n \/\/\/ Starts the read-eval-print-loop for querying the language.\n pub fn start(mut self) -> Result<(), Error> {\n loop {\n \/\/ First we grab a line ...\n let input = match readline::readline(\"hubris> \") {\n None => {\n println!(\"\");\n break;\n }\n Some(input) => input,\n };\n\n \/\/ Add it to the history\n readline::add_history(input.as_ref());\n\n match self.repl_interation(input) {\n \/\/ please make me look better\n Err(e) => println!(\"repl error: {:?}\", e),\n Ok(cont) => {\n match cont {\n Cont::Quit => break,\n Cont::Done => {}\n }\n }\n }\n }\n\n Ok(())\n }\n\n pub fn repl_interation(&mut self, input: String) -> Result<Cont, Error> {\n if input.len() <= 0 {\n \/\/ do nothing\n } else if &input[0..1] == \":\" {\n let cmd = self.parse_command(&input[1..]);\n match cmd {\n Command::Quit => return Ok(Cont::Quit),\n Command::Reload => panic!(\"unsupported command\"),\n Command::Unknown(u) => return Err(Error::UnknownCommand(u)),\n Command::TypeOf(t) => {\n let term = try!(self.preprocess_term(t));\n println!(\"{}\", try!(self.type_check_term(&term)));\n }\n Command::Help => println!(\"{}\", HELP_MESSAGE),\n }\n } else {\n try!(self.handle_input(input.to_string()));\n }\n\n Ok(Cont::Done)\n }\n\n fn preprocess_term(&mut self, source: String) -> Result<core::Term, Error> {\n let parser = parser::from_string(source).unwrap();\n let term = try!(parser.parse_term());\n\n let mut lcx = LocalElabCx::from_elab_cx(&mut self.elab_cx);\n let term = try!(lcx.elaborate_term(term));\n\n Ok(term)\n }\n\n fn type_check_term(&mut self, term: &core::Term) -> Result<core::Term, Error> {\n let mut ltycx = LocalCx::from_cx(&self.elab_cx.ty_cx);\n let term = try!(ltycx.type_infer_term(&term));\n\n Ok(term)\n }\n\n fn handle_input(&mut self, source: String) -> Result<(), Error> {\n let term = try!(self.preprocess_term(source));\n let ty = try!(self.type_check_term(&term));\n println!(\"{} : {}\", term, ty);\n println!(\"{}\", try!(self.elab_cx.ty_cx.eval(&term)));\n Ok(())\n }\n\n fn parse_command(&self, command_text: &str) -> Command {\n let (command, arg) = split_command(command_text);\n\n if command == \"\" {\n Command::Unknown(command_text.to_string())\n } else if \"quit\".starts_with(command) {\n Command::Quit\n } else if \"reload\".starts_with(command) {\n Command::Reload\n } else if \"type\".starts_with(command) {\n Command::TypeOf(arg.to_string())\n } else if \"help\".starts_with(command) {\n Command::Help\n } else {\n Command::Unknown(command_text.to_string())\n }\n }\n}\n\n\/\/ impl Report for Error {\n\/\/ type Context = ElabCx;\n\/\/\n\/\/ fn report<O: Write>(cx: &Self::Context,\n\/\/ mut out: Box<Terminal<Output = O>>,\n\/\/ error: Self)\n\/\/ -> TResult<()> {\n\/\/ match error {\n\/\/ Error::TypeCk(ty_ck) => panic!(),\n\/\/ _ => panic!(),\n\/\/ }\n\/\/ }\n\/\/ }\n<|endoftext|>"} {"text":"<commit_before>use arch::ArchDetail;\nuse std::ffi::CStr;\nuse std::slice;\nuse std::str;\nuse std::fmt::{self, Debug, Display, Error, Formatter};\nuse capstone_sys::*;\nuse constants::Arch;\n\n\/\/\/ Representation of the array of instructions returned by disasm\n#[derive(Debug)]\npub struct Instructions<'a>(&'a mut [cs_insn]);\n\n\/\/\/ Integer type used in `InsnId`\npub type InsnIdInt = u32;\n\n\/\/\/ Represents an instruction id, which may architecture-specific.\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]\npub struct InsnId(pub InsnIdInt);\n\n\/\/\/ Integer type used in `InsnGroupId`\npub type InsnGroupIdInt = u8;\n\n\/\/\/ Represents the group an instruction belongs to, which may be architecture-specific.\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]\npub struct InsnGroupId(pub InsnGroupIdInt);\n\n\/\/\/ Integer type used in `RegId`\npub type RegIdInt = u16;\n\n\/\/\/ Represents an register id, which is architecture-specific.\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]\npub struct RegId(pub RegIdInt);\n\nimpl<'a> Instructions<'a> {\n pub(crate) unsafe fn from_raw_parts(ptr: *mut cs_insn, len: usize) -> Instructions<'a> {\n Instructions(slice::from_raw_parts_mut(ptr, len))\n }\n\n pub(crate) fn new_empty() -> Instructions<'a> {\n Instructions(&mut [])\n }\n\n \/\/\/ Get number of instructions\n pub fn len(&self) -> usize {\n self.0.len()\n }\n\n \/\/\/ Iterator over instructions\n pub fn iter<'b>(&'a self) -> InstructionIterator<'b>\n where\n 'a: 'b,\n {\n let iter = self.0.iter();\n InstructionIterator(iter)\n }\n\n pub fn is_empty(&self) -> bool {\n self.len() == 0\n }\n}\n\nimpl<'a> Drop for Instructions<'a> {\n fn drop(&mut self) {\n if self.len() > 0 {\n unsafe {\n cs_free(self.0.as_mut_ptr(), self.len());\n }\n }\n }\n}\n\n\/\/\/ An iterator over the instructions returned by disasm\n\/\/\/\n\/\/\/ This is currently the only supported interface for reading them.\npub struct InstructionIterator<'a>(slice::Iter<'a, cs_insn>);\n\nimpl<'a> Iterator for InstructionIterator<'a> {\n type Item = Insn;\n\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next().map(|x| Insn(*x))\n }\n}\n\n\/\/\/ A wrapper for the raw capstone-sys instruction\npub struct Insn(pub(crate) cs_insn);\n\n\/\/\/ Contains extra information about an instruction such as register reads in\n\/\/\/ addition to architecture-specific information\npub struct InsnDetail<'a>(pub(crate) &'a cs_detail, pub(crate) Arch);\n\nimpl Insn {\n \/\/\/ The mnemonic for the instruction\n pub fn mnemonic(&self) -> Option<&str> {\n let cstr = unsafe { CStr::from_ptr(self.0.mnemonic.as_ptr()) };\n str::from_utf8(cstr.to_bytes()).ok()\n }\n\n \/\/\/ The operand string associated with the instruction\n pub fn op_str(&self) -> Option<&str> {\n let cstr = unsafe { CStr::from_ptr(self.0.op_str.as_ptr()) };\n str::from_utf8(cstr.to_bytes()).ok()\n }\n\n \/\/\/ Access instruction id\n pub fn id(&self) -> InsnId {\n InsnId(self.0.id)\n }\n\n \/\/\/ Size of instruction (in bytes)\n fn len(&self) -> usize {\n self.0.size as usize\n }\n\n \/\/\/ Instruction address\n pub fn address(&self) -> u64 {\n self.0.address as u64\n }\n\n \/\/\/ Byte-level representation of the instruction\n pub fn bytes(&self) -> &[u8] {\n &self.0.bytes[..self.len()]\n }\n\n \/\/\/ Returns the `Detail` object, if there is one. It is up to the caller to determine\n \/\/\/ the pre-conditions are satisfied.\n \/\/\/\n \/\/\/ Be careful this is still in early stages and largely untested with various `cs_option` and\n \/\/\/ architecture matrices\n pub(crate) unsafe fn detail(&self, arch: Arch) -> InsnDetail {\n InsnDetail(&*self.0.detail, arch)\n }\n}\n\nimpl Debug for Insn {\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {\n fmt.debug_struct(\"Insn\")\n .field(\"address\", &self.address())\n .field(\"len\", &self.len())\n .field(\"bytes\", &self.bytes())\n .field(\"mnemonic\", &self.mnemonic())\n .field(\"op_str\", &self.op_str())\n .finish()\n }\n}\n\nimpl Display for Insn {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"{:#x}: \", self.address())?;\n if let Some(mnemonic) = self.mnemonic() {\n write!(fmt, \"{} \", mnemonic)?;\n if let Some(op_str) = self.op_str() {\n write!(fmt, \"{}\", op_str)?;\n }\n }\n Ok(())\n }\n}\n\n\/\/\/ Iterator over registers ids\n#[derive(Debug, Clone)]\npub struct RegsIter<'a, T: 'a + Into<RegIdInt> + Copy>(slice::Iter<'a, T>);\n\nimpl<'a, T: 'a + Into<RegIdInt> + Copy> Iterator for RegsIter<'a, T> {\n type Item = RegId;\n fn next(&mut self) -> Option<Self::Item> {\n match self.0.next() {\n Some(x) => Some(RegId((*x).into())),\n None => None,\n }\n }\n}\n\n\/\/\/ Iterator over instruction group ids\n#[derive(Debug, Clone)]\npub struct InsnGroupIter<'a>(slice::Iter<'a, InsnGroupIdInt>);\n\nimpl<'a> Iterator for InsnGroupIter<'a> {\n type Item = InsnGroupId;\n fn next(&mut self) -> Option<Self::Item> {\n match self.0.next() {\n Some(x) => Some(InsnGroupId(*x as InsnGroupIdInt)),\n None => None,\n }\n }\n}\n\nimpl<'a> InsnDetail<'a> {\n \/\/\/ Returns the implicit read registers\n pub fn regs_read(&self) -> RegsIter<u8> {\n RegsIter((*self.0).regs_read[..self.regs_read_count() as usize].iter())\n }\n\n \/\/\/ Returns the number of implicit read registers\n pub fn regs_read_count(&self) -> u8 {\n (*self.0).regs_read_count\n }\n\n \/\/\/ Returns the implicit write registers\n pub fn regs_write(&self) -> RegsIter<u8> {\n RegsIter((*self.0).regs_write[..self.regs_write_count() as usize].iter())\n }\n\n \/\/\/ Returns the number of implicit write registers\n pub fn regs_write_count(&self) -> u8 {\n (*self.0).regs_write_count\n }\n\n \/\/\/ Returns the groups to which this instruction belongs\n pub fn groups(&self) -> InsnGroupIter {\n InsnGroupIter((*self.0).groups[..self.groups_count() as usize].iter())\n }\n\n \/\/\/ Returns the number groups to which this instruction belongs\n pub fn groups_count(&self) -> u8 {\n (*self.0).groups_count\n }\n\n \/\/\/ Architecture-specific detail\n pub fn arch_detail(&self) -> ArchDetail {\n macro_rules! def_arch_detail_match {\n (\n $( [ $ARCH:ident, $detail:ident, $insn_detail:ident, $arch:ident ] )*\n ) => {\n use self::ArchDetail::*;\n use Arch::*;\n $( use arch::$arch::$insn_detail; )*\n\n return match self.1 {\n $(\n $ARCH => {\n $detail($insn_detail(unsafe { &self.0.__bindgen_anon_1.$arch }))\n }\n )*\n _ => panic!(\"Unsupported detail arch\"),\n }\n }\n }\n def_arch_detail_match!(\n [ARM, ArmDetail, ArmInsnDetail, arm]\n [ARM64, Arm64Detail, Arm64InsnDetail, arm64]\n [MIPS, MipsDetail, MipsInsnDetail, mips]\n [PPC, PpcDetail, PpcInsnDetail, ppc]\n [SPARC, SparcDetail, SparcInsnDetail, sparc]\n [X86, X86Detail, X86InsnDetail, x86]\n [XCORE, XcoreDetail, XcoreInsnDetail, xcore]\n );\n }\n}\n\nimpl<'a> Debug for InsnDetail<'a> {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n fmt.debug_struct(\"Detail\")\n .field(\"regs_read\", &self.regs_read())\n .field(\"regs_read_count\", &self.regs_read_count())\n .field(\"regs_write\", &self.regs_write())\n .field(\"regs_write_count\", &self.regs_write_count())\n .field(\"groups\", &self.groups())\n .field(\"groups_count\", &self.groups_count())\n .finish()\n }\n}\n\nimpl<'a> Display for Instructions<'a> {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n for instruction in self.iter() {\n write!(fmt, \"{:x}:\\t\", instruction.address())?;\n for byte in instruction.bytes() {\n write!(fmt, \" {:02x}\", byte)?;\n }\n let remainder = 16 * 3 - (instruction.bytes().len()) * 3;\n for _ in 0..remainder {\n write!(fmt, \" \")?;\n }\n if let Some(mnemonic) = instruction.mnemonic() {\n write!(fmt, \" {}\", mnemonic)?;\n if let Some(op_str) = instruction.op_str() {\n write!(fmt, \" {}\", op_str)?;\n }\n }\n write!(fmt, \"\\n\")?;\n }\n Ok(())\n }\n}\n<commit_msg>Implement size_hint for InstructionIterator<commit_after>use arch::ArchDetail;\nuse std::ffi::CStr;\nuse std::slice;\nuse std::str;\nuse std::fmt::{self, Debug, Display, Error, Formatter};\nuse capstone_sys::*;\nuse constants::Arch;\n\n\/\/\/ Representation of the array of instructions returned by disasm\n#[derive(Debug)]\npub struct Instructions<'a>(&'a mut [cs_insn]);\n\n\/\/\/ Integer type used in `InsnId`\npub type InsnIdInt = u32;\n\n\/\/\/ Represents an instruction id, which may architecture-specific.\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]\npub struct InsnId(pub InsnIdInt);\n\n\/\/\/ Integer type used in `InsnGroupId`\npub type InsnGroupIdInt = u8;\n\n\/\/\/ Represents the group an instruction belongs to, which may be architecture-specific.\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]\npub struct InsnGroupId(pub InsnGroupIdInt);\n\n\/\/\/ Integer type used in `RegId`\npub type RegIdInt = u16;\n\n\/\/\/ Represents an register id, which is architecture-specific.\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]\npub struct RegId(pub RegIdInt);\n\nimpl<'a> Instructions<'a> {\n pub(crate) unsafe fn from_raw_parts(ptr: *mut cs_insn, len: usize) -> Instructions<'a> {\n Instructions(slice::from_raw_parts_mut(ptr, len))\n }\n\n pub(crate) fn new_empty() -> Instructions<'a> {\n Instructions(&mut [])\n }\n\n \/\/\/ Get number of instructions\n pub fn len(&self) -> usize {\n self.0.len()\n }\n\n \/\/\/ Iterator over instructions\n pub fn iter<'b>(&'a self) -> InstructionIterator<'b>\n where\n 'a: 'b,\n {\n let iter = self.0.iter();\n InstructionIterator(iter)\n }\n\n pub fn is_empty(&self) -> bool {\n self.len() == 0\n }\n}\n\nimpl<'a> Drop for Instructions<'a> {\n fn drop(&mut self) {\n if self.len() > 0 {\n unsafe {\n cs_free(self.0.as_mut_ptr(), self.len());\n }\n }\n }\n}\n\n\/\/\/ An iterator over the instructions returned by disasm\n\/\/\/\n\/\/\/ This is currently the only supported interface for reading them.\npub struct InstructionIterator<'a>(slice::Iter<'a, cs_insn>);\n\nimpl<'a> Iterator for InstructionIterator<'a> {\n type Item = Insn;\n\n #[inline]\n fn next(&mut self) -> Option<Self::Item> {\n self.0.next().map(|x| Insn(*x))\n }\n\n #[inline]\n fn size_hint(&self) -> (usize, Option<usize>) {\n self.0.size_hint()\n }\n\n #[inline]\n fn count(self) -> usize {\n self.0.count()\n }\n}\n\nimpl<'a> DoubleEndedIterator for InstructionIterator<'a> {\n #[inline]\n fn next_back(&mut self) -> Option<Self::Item> {\n self.0.next_back().map(|x| Insn(*x))\n }\n}\n\n\/\/\/ A wrapper for the raw capstone-sys instruction\npub struct Insn(pub(crate) cs_insn);\n\n\/\/\/ Contains extra information about an instruction such as register reads in\n\/\/\/ addition to architecture-specific information\npub struct InsnDetail<'a>(pub(crate) &'a cs_detail, pub(crate) Arch);\n\nimpl Insn {\n \/\/\/ The mnemonic for the instruction\n pub fn mnemonic(&self) -> Option<&str> {\n let cstr = unsafe { CStr::from_ptr(self.0.mnemonic.as_ptr()) };\n str::from_utf8(cstr.to_bytes()).ok()\n }\n\n \/\/\/ The operand string associated with the instruction\n pub fn op_str(&self) -> Option<&str> {\n let cstr = unsafe { CStr::from_ptr(self.0.op_str.as_ptr()) };\n str::from_utf8(cstr.to_bytes()).ok()\n }\n\n \/\/\/ Access instruction id\n pub fn id(&self) -> InsnId {\n InsnId(self.0.id)\n }\n\n \/\/\/ Size of instruction (in bytes)\n fn len(&self) -> usize {\n self.0.size as usize\n }\n\n \/\/\/ Instruction address\n pub fn address(&self) -> u64 {\n self.0.address as u64\n }\n\n \/\/\/ Byte-level representation of the instruction\n pub fn bytes(&self) -> &[u8] {\n &self.0.bytes[..self.len()]\n }\n\n \/\/\/ Returns the `Detail` object, if there is one. It is up to the caller to determine\n \/\/\/ the pre-conditions are satisfied.\n \/\/\/\n \/\/\/ Be careful this is still in early stages and largely untested with various `cs_option` and\n \/\/\/ architecture matrices\n pub(crate) unsafe fn detail(&self, arch: Arch) -> InsnDetail {\n InsnDetail(&*self.0.detail, arch)\n }\n}\n\nimpl Debug for Insn {\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {\n fmt.debug_struct(\"Insn\")\n .field(\"address\", &self.address())\n .field(\"len\", &self.len())\n .field(\"bytes\", &self.bytes())\n .field(\"mnemonic\", &self.mnemonic())\n .field(\"op_str\", &self.op_str())\n .finish()\n }\n}\n\nimpl Display for Insn {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n write!(fmt, \"{:#x}: \", self.address())?;\n if let Some(mnemonic) = self.mnemonic() {\n write!(fmt, \"{} \", mnemonic)?;\n if let Some(op_str) = self.op_str() {\n write!(fmt, \"{}\", op_str)?;\n }\n }\n Ok(())\n }\n}\n\n\/\/\/ Iterator over registers ids\n#[derive(Debug, Clone)]\npub struct RegsIter<'a, T: 'a + Into<RegIdInt> + Copy>(slice::Iter<'a, T>);\n\nimpl<'a, T: 'a + Into<RegIdInt> + Copy> Iterator for RegsIter<'a, T> {\n type Item = RegId;\n fn next(&mut self) -> Option<Self::Item> {\n match self.0.next() {\n Some(x) => Some(RegId((*x).into())),\n None => None,\n }\n }\n}\n\n\/\/\/ Iterator over instruction group ids\n#[derive(Debug, Clone)]\npub struct InsnGroupIter<'a>(slice::Iter<'a, InsnGroupIdInt>);\n\nimpl<'a> Iterator for InsnGroupIter<'a> {\n type Item = InsnGroupId;\n fn next(&mut self) -> Option<Self::Item> {\n match self.0.next() {\n Some(x) => Some(InsnGroupId(*x as InsnGroupIdInt)),\n None => None,\n }\n }\n}\n\nimpl<'a> InsnDetail<'a> {\n \/\/\/ Returns the implicit read registers\n pub fn regs_read(&self) -> RegsIter<u8> {\n RegsIter((*self.0).regs_read[..self.regs_read_count() as usize].iter())\n }\n\n \/\/\/ Returns the number of implicit read registers\n pub fn regs_read_count(&self) -> u8 {\n (*self.0).regs_read_count\n }\n\n \/\/\/ Returns the implicit write registers\n pub fn regs_write(&self) -> RegsIter<u8> {\n RegsIter((*self.0).regs_write[..self.regs_write_count() as usize].iter())\n }\n\n \/\/\/ Returns the number of implicit write registers\n pub fn regs_write_count(&self) -> u8 {\n (*self.0).regs_write_count\n }\n\n \/\/\/ Returns the groups to which this instruction belongs\n pub fn groups(&self) -> InsnGroupIter {\n InsnGroupIter((*self.0).groups[..self.groups_count() as usize].iter())\n }\n\n \/\/\/ Returns the number groups to which this instruction belongs\n pub fn groups_count(&self) -> u8 {\n (*self.0).groups_count\n }\n\n \/\/\/ Architecture-specific detail\n pub fn arch_detail(&self) -> ArchDetail {\n macro_rules! def_arch_detail_match {\n (\n $( [ $ARCH:ident, $detail:ident, $insn_detail:ident, $arch:ident ] )*\n ) => {\n use self::ArchDetail::*;\n use Arch::*;\n $( use arch::$arch::$insn_detail; )*\n\n return match self.1 {\n $(\n $ARCH => {\n $detail($insn_detail(unsafe { &self.0.__bindgen_anon_1.$arch }))\n }\n )*\n _ => panic!(\"Unsupported detail arch\"),\n }\n }\n }\n def_arch_detail_match!(\n [ARM, ArmDetail, ArmInsnDetail, arm]\n [ARM64, Arm64Detail, Arm64InsnDetail, arm64]\n [MIPS, MipsDetail, MipsInsnDetail, mips]\n [PPC, PpcDetail, PpcInsnDetail, ppc]\n [SPARC, SparcDetail, SparcInsnDetail, sparc]\n [X86, X86Detail, X86InsnDetail, x86]\n [XCORE, XcoreDetail, XcoreInsnDetail, xcore]\n );\n }\n}\n\nimpl<'a> Debug for InsnDetail<'a> {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n fmt.debug_struct(\"Detail\")\n .field(\"regs_read\", &self.regs_read())\n .field(\"regs_read_count\", &self.regs_read_count())\n .field(\"regs_write\", &self.regs_write())\n .field(\"regs_write_count\", &self.regs_write_count())\n .field(\"groups\", &self.groups())\n .field(\"groups_count\", &self.groups_count())\n .finish()\n }\n}\n\nimpl<'a> Display for Instructions<'a> {\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n for instruction in self.iter() {\n write!(fmt, \"{:x}:\\t\", instruction.address())?;\n for byte in instruction.bytes() {\n write!(fmt, \" {:02x}\", byte)?;\n }\n let remainder = 16 * 3 - (instruction.bytes().len()) * 3;\n for _ in 0..remainder {\n write!(fmt, \" \")?;\n }\n if let Some(mnemonic) = instruction.mnemonic() {\n write!(fmt, \" {}\", mnemonic)?;\n if let Some(op_str) = instruction.op_str() {\n write!(fmt, \" {}\", op_str)?;\n }\n }\n write!(fmt, \"\\n\")?;\n }\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add while loops<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make primitive function calls work<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! Main module to handle the layout.\n\/\/! This is where the i3-specific code is.\n\nuse std::sync::{Mutex, MutexGuard, TryLockError};\nuse std::ptr;\n\nuse super::container::{Container, Handle, ContainerType};\nuse super::node::{Node};\nuse super::super::rustwlc::handle::{WlcView, WlcOutput};\n\n\npub type TreeResult = Result<(), TryLockError<MutexGuard<'static, Tree>>>;\n\nconst ERR_BAD_TREE: &'static str = \"Layout tree was in an invalid configuration\";\n\npub struct Tree {\n root: Node,\n active_container: *const Node,\n}\n\nimpl Tree {\n fn get_active_container(&self) -> Option<&Node> {\n if self.active_container.is_null() {\n None\n } else {\n unsafe {\n Some(&*self.active_container)\n }\n }\n }\n\n fn get_active_output(&self) -> Option<&Node> {\n if let Some(node) = self.get_active_container() {\n node.get_ancestor_of_type(ContainerType::Output)\n } else {\n None\n }\n }\n\n fn get_current_workspace(&self) -> Option<&mut Node> {\n if let Some(container) = self.get_active_container() {\n \/\/if let Some(child) = container.get_ancestor_of_type(ContainerType::Workspace) {\n \/\/return child.get_children()[0].get_parent()\n\n \/\/}\n \/\/ NOTE hack here, remove commented code above to make this work properly\n let parent = container.get_parent().expect(ERR_BAD_TREE);\n for child in parent.get_children_mut() {\n if child == container {\n return Some(child);\n }\n }\n }\n return None\n }\n\n fn get_output_of_view(&self, wlc_view: &WlcView) -> Option<WlcOutput> {\n if let Some(view_node) = self.root.find_view_by_handle(wlc_view) {\n if let Some(output_node) = view_node.get_ancestor_of_type(ContainerType::Output) {\n if let Some(handle) = output_node.get_val().get_handle() {\n return match handle {\n Handle::Output(output) => Some(output),\n _ => None\n }\n }\n }\n }\n return None;\n }\n\n fn get_workspace_by_name(&self, name: &str) -> Option<&Node> {\n for child in self.root.get_children()[0].get_children() {\n if child.get_val().get_name().expect(ERR_BAD_TREE) != name {\n continue\n }\n return Some(child);\n }\n return None\n }\n\n fn get_workspace_by_name_mut(&mut self, name: &str) -> Option<&mut Node> {\n for child in self.root.get_children_mut()[0].get_children_mut() {\n if child.get_val().get_name().expect(ERR_BAD_TREE) != name {\n continue\n }\n return Some(child);\n }\n return None\n }\n\n\n fn add_output(&mut self, wlc_output: WlcOutput) {\n self.root.new_child(Container::new_output(wlc_output));\n }\n\n fn add_workspace(&mut self, name: &str) {\n let workspace = Container::new_workspace(name.to_string());\n let mut index = 0;\n if let Some(output) = self.get_active_output() {\n for (cur_index, child) in self.root.get_children().iter().enumerate() {\n if child == output {\n index = cur_index;\n break;\n }\n }\n }\n self.root.get_children_mut()[index].new_child(workspace);\n }\n\n fn add_view(&self, wlc_view: WlcView) {\n if let Some(current_workspace) = self.get_current_workspace() {\n trace!(\"Adding view {:?} to {:?}\", wlc_view, current_workspace);\n current_workspace.new_child(Container::new_view(wlc_view));\n }\n }\n\n fn remove_view(&self, wlc_view: &WlcView) {\n if let Some(view) = self.root.find_view_by_handle(&wlc_view) {\n let parent = view.get_parent().expect(ERR_BAD_TREE);\n parent.remove_child(view);\n }\n }\n}\n\nunsafe impl Send for Tree {}\n\nlazy_static! {\n static ref TREE: Mutex<Tree> = {\n Mutex::new(Tree{\n root: Node::new(Container::new_root()),\n active_container: ptr::null(),\n })\n };\n}\n\npub fn add_output(wlc_output: WlcOutput) -> TreeResult {\n {\n let mut tree = try!(TREE.try_lock());\n tree.add_output(wlc_output);\n }\n try!(add_workspace(&\"1\"));\n try!(switch_workspace(&\"1\"));\n Ok(())\n}\n\npub fn add_workspace(name: &str) -> TreeResult {\n trace!(\"Adding new workspace to root\");\n let mut tree = try!(TREE.lock());\n tree.add_workspace(name);\n Ok(())\n}\n\npub fn add_view(wlc_view: WlcView) -> TreeResult {\n let tree = try!(TREE.lock());\n tree.add_view(wlc_view);\n Ok(())\n}\n\npub fn remove_view(wlc_view: &WlcView) -> TreeResult {\n let tree = try!(TREE.lock());\n tree.remove_view(wlc_view);\n Ok(())\n}\n\npub fn switch_workspace(name: &str) -> TreeResult {\n trace!(\"Switching to workspace {}\", name);\n let mut tree = try!(TREE.lock());\n if let Some(old_workspace) = tree.get_current_workspace() {\n \/\/ Make all the views in the original workspace to be invisible\n for view in old_workspace.get_children_mut() {\n trace!(\"Setting {:?} invisible\", view);\n match view.get_val().get_handle().expect(ERR_BAD_TREE) {\n Handle::View(view) => view.set_mask(0),\n _ => {},\n }\n }\n }\n let current_workspace: *const Node;\n {\n let new_current_workspace: &mut Node;\n if let Some(_) = tree.get_workspace_by_name(name) {\n trace!(\"Found workspace {}\", name);\n new_current_workspace = tree.get_workspace_by_name_mut(name)\n .expect(ERR_BAD_TREE);\n } else {\n drop(tree);\n try!(add_workspace(name));\n tree = try!(TREE.lock());\n new_current_workspace = tree.get_workspace_by_name_mut(name)\n .expect(ERR_BAD_TREE);\n }\n for view in new_current_workspace.get_children_mut() {\n trace!(\"Setting {:?} visible\", view);\n match view.get_val().get_handle().expect(ERR_BAD_TREE) {\n Handle::View(view) => view.set_mask(1),\n _ => {},\n }\n }\n \/\/ Set the first view to be focused, so that the view is updated to this new workspace\n if new_current_workspace.get_children().len() > 0 {\n trace!(\"Focusing view\");\n match new_current_workspace.get_children_mut()[0]\n .get_val().get_handle().expect(ERR_BAD_TREE) {\n Handle::View(view) => view.focus(),\n _ => {},\n }\n } else {\n WlcView::root().focus();\n }\n current_workspace = new_current_workspace as *const Node;\n }\n tree.active_container = current_workspace;\n Ok(())\n}\n\n\/\/\/ Finds the WlcOutput associated with the WlcView from the tree\npub fn get_output_of_view(wlc_view: &WlcView) -> Option<WlcOutput> {\n let tree = TREE.lock().expect(\"Unable to lock layout tree!\");\n tree.get_output_of_view(wlc_view)\n}\n\n<commit_msg>Touched up switch_workspace<commit_after>\/\/! Main module to handle the layout.\n\/\/! This is where the i3-specific code is.\n\nuse std::sync::{Mutex, MutexGuard, TryLockError};\nuse std::ptr;\n\nuse super::container::{Container, Handle, ContainerType};\nuse super::node::{Node};\nuse super::super::rustwlc::handle::{WlcView, WlcOutput};\n\n\npub type TreeResult = Result<(), TryLockError<MutexGuard<'static, Tree>>>;\n\nconst ERR_BAD_TREE: &'static str = \"Layout tree was in an invalid configuration\";\n\npub struct Tree {\n root: Node,\n active_container: *const Node,\n}\n\nimpl Tree {\n fn get_active_container(&self) -> Option<&Node> {\n if self.active_container.is_null() {\n None\n } else {\n unsafe {\n Some(&*self.active_container)\n }\n }\n }\n\n fn get_active_output(&self) -> Option<&Node> {\n if let Some(node) = self.get_active_container() {\n node.get_ancestor_of_type(ContainerType::Output)\n } else {\n None\n }\n }\n\n fn get_current_workspace(&self) -> Option<&mut Node> {\n if let Some(container) = self.get_active_container() {\n \/\/if let Some(child) = container.get_ancestor_of_type(ContainerType::Workspace) {\n \/\/return child.get_children()[0].get_parent()\n\n \/\/}\n \/\/ NOTE hack here, remove commented code above to make this work properly\n let parent = container.get_parent().expect(ERR_BAD_TREE);\n for child in parent.get_children_mut() {\n if child == container {\n return Some(child);\n }\n }\n }\n return None\n }\n\n fn get_output_of_view(&self, wlc_view: &WlcView) -> Option<WlcOutput> {\n if let Some(view_node) = self.root.find_view_by_handle(wlc_view) {\n if let Some(output_node) = view_node.get_ancestor_of_type(ContainerType::Output) {\n if let Some(handle) = output_node.get_val().get_handle() {\n return match handle {\n Handle::Output(output) => Some(output),\n _ => None\n }\n }\n }\n }\n return None;\n }\n\n fn get_workspace_by_name(&self, name: &str) -> Option<&Node> {\n for child in self.root.get_children()[0].get_children() {\n if child.get_val().get_name().expect(ERR_BAD_TREE) != name {\n continue\n }\n return Some(child);\n }\n return None\n }\n\n fn get_workspace_by_name_mut(&mut self, name: &str) -> Option<&mut Node> {\n for child in self.root.get_children_mut()[0].get_children_mut() {\n if child.get_val().get_name().expect(ERR_BAD_TREE) != name {\n continue\n }\n return Some(child);\n }\n return None\n }\n\n\n fn add_output(&mut self, wlc_output: WlcOutput) {\n self.root.new_child(Container::new_output(wlc_output));\n }\n\n fn add_workspace(&mut self, name: &str) {\n let workspace = Container::new_workspace(name.to_string());\n let mut index = 0;\n if let Some(output) = self.get_active_output() {\n for (cur_index, child) in self.root.get_children().iter().enumerate() {\n if child == output {\n index = cur_index;\n break;\n }\n }\n }\n self.root.get_children_mut()[index].new_child(workspace);\n }\n\n fn add_view(&self, wlc_view: WlcView) {\n if let Some(current_workspace) = self.get_current_workspace() {\n trace!(\"Adding view {:?} to {:?}\", wlc_view, current_workspace);\n current_workspace.new_child(Container::new_view(wlc_view));\n }\n }\n\n fn remove_view(&self, wlc_view: &WlcView) {\n if let Some(view) = self.root.find_view_by_handle(&wlc_view) {\n let parent = view.get_parent().expect(ERR_BAD_TREE);\n parent.remove_child(view);\n }\n }\n}\n\nunsafe impl Send for Tree {}\n\nlazy_static! {\n static ref TREE: Mutex<Tree> = {\n Mutex::new(Tree{\n root: Node::new(Container::new_root()),\n active_container: ptr::null(),\n })\n };\n}\n\npub fn add_output(wlc_output: WlcOutput) -> TreeResult {\n {\n let mut tree = try!(TREE.try_lock());\n tree.add_output(wlc_output);\n }\n try!(add_workspace(&\"1\"));\n try!(switch_workspace(&\"1\"));\n Ok(())\n}\n\npub fn add_workspace(name: &str) -> TreeResult {\n trace!(\"Adding new workspace to root\");\n let mut tree = try!(TREE.lock());\n tree.add_workspace(name);\n Ok(())\n}\n\npub fn add_view(wlc_view: WlcView) -> TreeResult {\n let tree = try!(TREE.lock());\n tree.add_view(wlc_view);\n Ok(())\n}\n\npub fn remove_view(wlc_view: &WlcView) -> TreeResult {\n let tree = try!(TREE.lock());\n tree.remove_view(wlc_view);\n Ok(())\n}\n\npub fn switch_workspace(name: &str) -> TreeResult {\n trace!(\"Switching to workspace {}\", name);\n let mut tree = try!(TREE.lock());\n if let Some(old_workspace) = tree.get_current_workspace() {\n \/\/ Make all the views in the original workspace to be invisible\n for view in old_workspace.get_children_mut() {\n trace!(\"Setting {:?} invisible\", view);\n match view.get_val().get_handle().expect(ERR_BAD_TREE) {\n Handle::View(view) => view.set_mask(0),\n _ => {},\n }\n }\n }\n let current_workspace: *const Node;\n {\n if let Some(_) = tree.get_workspace_by_name(name) {\n trace!(\"Found workspace {}\", name);\n } else {\n trace!(\"Adding workspace {}\", name);\n try!(add_workspace(name));\n }\n let new_current_workspace = tree.get_workspace_by_name_mut(name).expect(ERR_BAD_TREE);\n for view in new_current_workspace.get_children_mut() {\n trace!(\"Setting {:?} visible\", view);\n match view.get_val().get_handle().expect(ERR_BAD_TREE) {\n Handle::View(view) => view.set_mask(1),\n _ => {},\n }\n }\n \/\/ Set the first view to be focused, so that the view is updated to this new workspace\n if new_current_workspace.get_children().len() > 0 {\n trace!(\"Focusing view\");\n match new_current_workspace.get_children_mut()[0]\n .get_val().get_handle().expect(ERR_BAD_TREE) {\n Handle::View(view) => view.focus(),\n _ => {},\n }\n } else {\n WlcView::root().focus();\n }\n current_workspace = new_current_workspace as *const Node;\n }\n tree.active_container = current_workspace;\n Ok(())\n}\n\n\/\/\/ Finds the WlcOutput associated with the WlcView from the tree\npub fn get_output_of_view(wlc_view: &WlcView) -> Option<WlcOutput> {\n let tree = TREE.lock().expect(\"Unable to lock layout tree!\");\n tree.get_output_of_view(wlc_view)\n}\n\n<|endoftext|>"} {"text":"<commit_before>enum Category {\n Identifier,\n AssignmentOperator,\n IntegerLiteral,\n StringLiteral,\n}\n\npub struct Token {\n lexeme: String,\n category: Category,\n}\n<commit_msg>Derive traits necessary to use categories in assert_eq!.<commit_after>#[derive(PartialEq, Show)]\npub enum Category {\n Identifier,\n AssignmentOperator,\n IntegerLiteral,\n StringLiteral,\n}\n\npub struct Token {\n lexeme: String,\n category: Category,\n}\n<|endoftext|>"} {"text":"<commit_before>use std::collections::HashMap;\nuse std::collections::hash_map;\nuse std::default::Default;\nuse std::slice;\nuse stream::wire_format;\nuse clear::Clear;\n\n#[derive(Show)]\npub enum UnknownValue {\n Fixed32(u32),\n Fixed64(u64),\n Varint(u64),\n LengthDelimited(Vec<u8>),\n}\n\nimpl UnknownValue {\n pub fn wire_type(&self) -> wire_format::WireType {\n self.get_ref().wire_type()\n }\n\n pub fn get_ref<'s>(&'s self) -> UnknownValueRef<'s> {\n match *self {\n UnknownValue::Fixed32(fixed32) => UnknownValueRef::Fixed32(fixed32),\n UnknownValue::Fixed64(fixed64) => UnknownValueRef::Fixed64(fixed64),\n UnknownValue::Varint(varint) => UnknownValueRef::Varint(varint),\n UnknownValue::LengthDelimited(ref bytes) => UnknownValueRef::LengthDelimited(bytes.as_slice()),\n }\n }\n}\n\npub enum UnknownValueRef<'o> {\n Fixed32(u32),\n Fixed64(u64),\n Varint(u64),\n LengthDelimited(&'o [u8]),\n}\n\nimpl<'o> UnknownValueRef<'o> {\n pub fn wire_type(&self) -> wire_format::WireType {\n match *self {\n UnknownValueRef::Fixed32(_) => wire_format::WireTypeFixed32,\n UnknownValueRef::Fixed64(_) => wire_format::WireTypeFixed64,\n UnknownValueRef::Varint(_) => wire_format::WireTypeVarint,\n UnknownValueRef::LengthDelimited(_) => wire_format::WireTypeLengthDelimited,\n }\n }\n}\n\n#[derive(Clone,PartialEq,Eq,Show,Default)]\npub struct UnknownValues {\n pub fixed32: Vec<u32>,\n pub fixed64: Vec<u64>,\n pub varint: Vec<u64>,\n pub length_delimited: Vec<Vec<u8>>,\n}\n\nimpl UnknownValues {\n pub fn add_value(&mut self, value: UnknownValue) {\n match value {\n UnknownValue::Fixed64(fixed64) => self.fixed64.push(fixed64),\n UnknownValue::Fixed32(fixed32) => self.fixed32.push(fixed32),\n UnknownValue::Varint(varint) => self.varint.push(varint),\n UnknownValue::LengthDelimited(length_delimited) => self.length_delimited.push(length_delimited),\n };\n }\n\n pub fn iter<'s>(&'s self) -> UnknownValuesIter<'s> {\n UnknownValuesIter {\n fixed32: self.fixed32.iter(),\n fixed64: self.fixed64.iter(),\n varint: self.varint.iter(),\n length_delimited: self.length_delimited.iter(),\n }\n }\n}\n\npub struct UnknownValuesIter<'o> {\n fixed32: slice::Iter<'o, u32>,\n fixed64: slice::Iter<'o, u64>,\n varint: slice::Iter<'o, u64>,\n length_delimited: slice::Iter<'o, Vec<u8>>,\n}\n\nimpl<'o> Iterator for UnknownValuesIter<'o> {\n type Item = UnknownValueRef<'o>;\n\n fn next(&mut self) -> Option<UnknownValueRef<'o>> {\n let fixed32 = self.fixed32.next();\n if fixed32.is_some() {\n return Some(UnknownValueRef::Fixed32(*fixed32.unwrap()));\n }\n let fixed64 = self.fixed64.next();\n if fixed64.is_some() {\n return Some(UnknownValueRef::Fixed64(*fixed64.unwrap()));\n }\n let varint = self.varint.next();\n if varint.is_some() {\n return Some(UnknownValueRef::Varint(*varint.unwrap()));\n }\n let length_delimited = self.length_delimited.next();\n if length_delimited.is_some() {\n return Some(UnknownValueRef::LengthDelimited(length_delimited.unwrap().as_slice()))\n }\n None\n }\n}\n\n#[derive(Clone,PartialEq,Eq,Show,Default)]\npub struct UnknownFields {\n \/\/ option is needed, because HashMap constructor performs allocation,\n \/\/ and very expensive\n pub fields: Option<Box<HashMap<u32, UnknownValues>>>,\n}\n\nimpl UnknownFields {\n pub fn new() -> UnknownFields {\n Default::default()\n }\n\n fn init_map(&mut self) {\n if self.fields.is_none() {\n self.fields = Some(box Default::default());\n }\n }\n\n fn find_field<'a>(&'a mut self, number: u32) -> &'a mut UnknownValues {\n self.init_map();\n\n match self.fields.as_mut().unwrap().entry(number) {\n hash_map::Entry::Occupied(e) => e.into_mut(),\n hash_map::Entry::Vacant(e) => e.set(Default::default()),\n }\n }\n\n pub fn add_fixed32(&mut self, number: u32, fixed32: u32) {\n self.find_field(number).fixed32.push(fixed32);\n }\n\n pub fn add_fixed64(&mut self, number: u32, fixed64: u64) {\n self.find_field(number).fixed64.push(fixed64);\n }\n\n pub fn add_varint(&mut self, number: u32, varint: u64) {\n self.find_field(number).varint.push(varint);\n }\n\n pub fn add_length_delimited(&mut self, number: u32, length_delimited: Vec<u8>) {\n self.find_field(number).length_delimited.push(length_delimited);\n }\n\n pub fn add_value(&mut self, number: u32, value: UnknownValue) {\n self.find_field(number).add_value(value);\n }\n\n pub fn iter<'s>(&'s self) -> UnknownFieldIter<'s> {\n UnknownFieldIter {\n entries: self.fields.as_ref().map(|m| m.iter())\n }\n }\n}\n\nimpl Clear for UnknownFields {\n fn clear(&mut self) {\n if self.fields.is_some() {\n self.fields.as_mut().unwrap().clear();\n }\n }\n}\n\npub struct UnknownFieldIter<'s> {\n entries: Option<hash_map::Iter<'s, u32, UnknownValues>>,\n}\n\nimpl<'s> Iterator for UnknownFieldIter<'s> {\n type Item = (u32, &'s UnknownValues);\n\n fn next(&mut self) -> Option<(u32, &'s UnknownValues)> {\n if self.entries.is_none() {\n None\n } else {\n self.entries.as_mut().unwrap().next().map(|(&number, values)| (number, values))\n }\n }\n}\n<commit_msg>update to lastest rust<commit_after>use std::collections::HashMap;\nuse std::collections::hash_map;\nuse std::default::Default;\nuse std::slice;\nuse stream::wire_format;\nuse clear::Clear;\n\n#[derive(Show)]\npub enum UnknownValue {\n Fixed32(u32),\n Fixed64(u64),\n Varint(u64),\n LengthDelimited(Vec<u8>),\n}\n\nimpl UnknownValue {\n pub fn wire_type(&self) -> wire_format::WireType {\n self.get_ref().wire_type()\n }\n\n pub fn get_ref<'s>(&'s self) -> UnknownValueRef<'s> {\n match *self {\n UnknownValue::Fixed32(fixed32) => UnknownValueRef::Fixed32(fixed32),\n UnknownValue::Fixed64(fixed64) => UnknownValueRef::Fixed64(fixed64),\n UnknownValue::Varint(varint) => UnknownValueRef::Varint(varint),\n UnknownValue::LengthDelimited(ref bytes) => UnknownValueRef::LengthDelimited(bytes.as_slice()),\n }\n }\n}\n\npub enum UnknownValueRef<'o> {\n Fixed32(u32),\n Fixed64(u64),\n Varint(u64),\n LengthDelimited(&'o [u8]),\n}\n\nimpl<'o> UnknownValueRef<'o> {\n pub fn wire_type(&self) -> wire_format::WireType {\n match *self {\n UnknownValueRef::Fixed32(_) => wire_format::WireTypeFixed32,\n UnknownValueRef::Fixed64(_) => wire_format::WireTypeFixed64,\n UnknownValueRef::Varint(_) => wire_format::WireTypeVarint,\n UnknownValueRef::LengthDelimited(_) => wire_format::WireTypeLengthDelimited,\n }\n }\n}\n\n#[derive(Clone,PartialEq,Eq,Show,Default)]\npub struct UnknownValues {\n pub fixed32: Vec<u32>,\n pub fixed64: Vec<u64>,\n pub varint: Vec<u64>,\n pub length_delimited: Vec<Vec<u8>>,\n}\n\nimpl UnknownValues {\n pub fn add_value(&mut self, value: UnknownValue) {\n match value {\n UnknownValue::Fixed64(fixed64) => self.fixed64.push(fixed64),\n UnknownValue::Fixed32(fixed32) => self.fixed32.push(fixed32),\n UnknownValue::Varint(varint) => self.varint.push(varint),\n UnknownValue::LengthDelimited(length_delimited) => self.length_delimited.push(length_delimited),\n };\n }\n\n pub fn iter<'s>(&'s self) -> UnknownValuesIter<'s> {\n UnknownValuesIter {\n fixed32: self.fixed32.iter(),\n fixed64: self.fixed64.iter(),\n varint: self.varint.iter(),\n length_delimited: self.length_delimited.iter(),\n }\n }\n}\n\npub struct UnknownValuesIter<'o> {\n fixed32: slice::Iter<'o, u32>,\n fixed64: slice::Iter<'o, u64>,\n varint: slice::Iter<'o, u64>,\n length_delimited: slice::Iter<'o, Vec<u8>>,\n}\n\nimpl<'o> Iterator for UnknownValuesIter<'o> {\n type Item = UnknownValueRef<'o>;\n\n fn next(&mut self) -> Option<UnknownValueRef<'o>> {\n let fixed32 = self.fixed32.next();\n if fixed32.is_some() {\n return Some(UnknownValueRef::Fixed32(*fixed32.unwrap()));\n }\n let fixed64 = self.fixed64.next();\n if fixed64.is_some() {\n return Some(UnknownValueRef::Fixed64(*fixed64.unwrap()));\n }\n let varint = self.varint.next();\n if varint.is_some() {\n return Some(UnknownValueRef::Varint(*varint.unwrap()));\n }\n let length_delimited = self.length_delimited.next();\n if length_delimited.is_some() {\n return Some(UnknownValueRef::LengthDelimited(length_delimited.unwrap().as_slice()))\n }\n None\n }\n}\n\n#[derive(Clone,PartialEq,Eq,Show,Default)]\npub struct UnknownFields {\n \/\/ option is needed, because HashMap constructor performs allocation,\n \/\/ and very expensive\n pub fields: Option<Box<HashMap<u32, UnknownValues>>>,\n}\n\nimpl UnknownFields {\n pub fn new() -> UnknownFields {\n Default::default()\n }\n\n fn init_map(&mut self) {\n if self.fields.is_none() {\n self.fields = Some(box Default::default());\n }\n }\n\n fn find_field<'a>(&'a mut self, number: &'a u32) -> &'a mut UnknownValues {\n self.init_map();\n\n match self.fields.as_mut().unwrap().entry(number) {\n hash_map::Entry::Occupied(e) => e.into_mut(),\n hash_map::Entry::Vacant(e) => e.insert(Default::default()),\n }\n }\n\n pub fn add_fixed32(&mut self, number: u32, fixed32: u32) {\n self.find_field(&number).fixed32.push(fixed32);\n }\n\n pub fn add_fixed64(&mut self, number: u32, fixed64: u64) {\n self.find_field(&number).fixed64.push(fixed64);\n }\n\n pub fn add_varint(&mut self, number: u32, varint: u64) {\n self.find_field(&number).varint.push(varint);\n }\n\n pub fn add_length_delimited(&mut self, number: u32, length_delimited: Vec<u8>) {\n self.find_field(&number).length_delimited.push(length_delimited);\n }\n\n pub fn add_value(&mut self, number: u32, value: UnknownValue) {\n self.find_field(&number).add_value(value);\n }\n\n pub fn iter<'s>(&'s self) -> UnknownFieldIter<'s> {\n UnknownFieldIter {\n entries: self.fields.as_ref().map(|m| m.iter())\n }\n }\n}\n\nimpl Clear for UnknownFields {\n fn clear(&mut self) {\n if self.fields.is_some() {\n self.fields.as_mut().unwrap().clear();\n }\n }\n}\n\npub struct UnknownFieldIter<'s> {\n entries: Option<hash_map::Iter<'s, u32, UnknownValues>>,\n}\n\nimpl<'s> Iterator for UnknownFieldIter<'s> {\n type Item = (u32, &'s UnknownValues);\n\n fn next(&mut self) -> Option<(u32, &'s UnknownValues)> {\n if self.entries.is_none() {\n None\n } else {\n self.entries.as_mut().unwrap().next().map(|(&number, values)| (number, values))\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use ::nfa::{State, Edge, NFA};\n\n#[cfg(test)] mod spec;\n\n#[derive(Clone,PartialEq,Debug)]\nstruct PotentialMatch {\n current_state: Option<State>,\n remaining_text: String\n}\n\nimpl PotentialMatch {\n pub fn advance(&self, nfa: &NFA) -> Vec<PotentialMatch> {\n if self.current_state.is_none() {\n return vec![self.clone()];\n }\n\n let current_state = self.current_state.clone().unwrap();\n\n match current_state {\n State::State{ref edge, ref out} => {\n self.next(nfa, edge, out)\n },\n State::Split{ref s1, ref out1, ref s2, ref out2} => {\n let mut s1_next = self.next(nfa, s1, out1);\n let mut s2_next = self.next(nfa, s2, out2);\n s1_next.append(&mut s2_next);\n s1_next\n }\n }\n }\n\n fn next(&self, nfa: &NFA, edge: &Option<char>, out: &Edge) -> Vec<PotentialMatch> {\n match edge {\n &Some(val) => {\n if self.remaining_text.is_empty() {\n \/\/ no character to consume, this potential match cannot continue\n return vec![];\n }\n\n if val == self.remaining_text.as_bytes()[0] as char {\n \/\/ can consume char and advance along edge\n match out {\n &Edge::End => {\n vec![PotentialMatch::new(None, \n &self.remaining_text[1..])]\n },\n &Edge::Id(id) => {\n vec![PotentialMatch::new(nfa.get_state(id),\n &self.remaining_text[1..])]\n },\n _ => panic!()\n }\n } else { \/\/ cannot proceed\n vec![]\n }\n },\n &None => {\n \/\/ can advance along empty edge\n match out {\n &Edge::End => {\n vec![PotentialMatch::new(None,\n &self.remaining_text)]\n },\n &Edge::Id(id) => {\n vec![PotentialMatch::new(\n nfa.get_state(id),\n &self.remaining_text)]\n },\n _ => panic!()\n }\n }\n }\n\n }\n\n\n pub fn is_match(&self) -> bool {\n self.current_state.is_none()\n }\n\n pub fn new(state: Option<State>, remaining_text: &str) -> PotentialMatch {\n PotentialMatch {\n current_state: state,\n remaining_text: remaining_text.to_owned()\n }\n }\n}\n\npub struct Matcher {\n nfa: NFA,\n text: String\n}\n\nimpl Matcher {\n pub fn new(nfa: NFA, text: &str) -> Matcher {\n Matcher {\n nfa: nfa,\n text: text.to_owned()\n }\n }\n\n pub fn run(&mut self) -> bool {\n\n if self.nfa.num_states() == 0 { \/\/ regex is empty\n return true;\n }\n\n let mut states = vec![\n PotentialMatch::new(Some(self.nfa.get_start().unwrap()),\n &self.text)\n ];\n\n while Self::has_valid_state(&states) {\n\n let mut updated_states = Vec::new();\n\n for state in states {\n let new_states = state.advance(&self.nfa);\n\n for s in new_states {\n if s.is_match() {\n return true;\n } else {\n updated_states.push(s);\n }\n }\n }\n\n states = updated_states;\n }\n\n false\n }\n\n fn has_valid_state(states: &Vec<PotentialMatch>) -> bool {\n states.len() > 0\n }\n}\n\n<commit_msg>handle split state case<commit_after>use ::nfa::{State, Edge, NFA};\n\n#[cfg(test)] mod spec;\n\n#[derive(Clone,PartialEq,Debug)]\nstruct PotentialMatch {\n current_state: Option<State>,\n remaining_text: String\n}\n\nimpl PotentialMatch {\n pub fn advance(&self, nfa: &NFA) -> Vec<PotentialMatch> {\n if self.current_state.is_none() {\n return vec![self.clone()];\n }\n\n let current_state = self.current_state.clone().unwrap();\n\n match current_state {\n State::State{ref edge, ref out} => {\n vec![self.next_for_edge(nfa, edge, out)]\n },\n State::Split{ref s1, ref out1, ref s2, ref out2} => {\n let mut s1_next = self.next_for_edge(nfa, s1, out1);\n let mut s2_next = self.next_for_edge(nfa, s2, out2);\n vec![s1_next, s2_next]\n }\n }\n }\n\n fn next(&self, nfa: &NFA, edge: &Option<char>, out: &Edge) -> Option<PotentialMatch> {\n match edge {\n &Some(val) => {\n if self.remaining_text.is_empty() {\n \/\/ no character to consume, this potential match cannot continue\n return None;\n }\n\n if val == self.remaining_text.as_bytes()[0] as char {\n \/\/ can consume char and advance along edge\n match out {\n &Edge::End => {\n Some(PotentialMatch::new(None, \n &self.remaining_text[1..]))\n },\n &Edge::Id(id) => {\n Some(PotentialMatch::new(nfa.get_state(id),\n &self.remaining_text[1..]))\n },\n _ => panic!()\n }\n } else { \n \/\/ potential match cannot proceed, mismatched character\n None\n }\n },\n &None => {\n \/\/ can advance along empty edge\n match out {\n &Edge::End => {\n Some(PotentialMatch::new(None,\n &self.remaining_text))\n },\n &Edge::Id(id) => {\n Some(PotentialMatch::new(\n nfa.get_state(id),\n &self.remaining_text))\n },\n _ => panic!(\"cannot evaluate incomplete NFA\")\n }\n }\n }\n\n }\n\n\n pub fn is_match(&self) -> bool {\n self.current_state.is_none()\n }\n\n pub fn new(state: Option<State>, remaining_text: &str) -> PotentialMatch {\n PotentialMatch {\n current_state: state,\n remaining_text: remaining_text.to_owned()\n }\n }\n}\n\npub struct Matcher {\n nfa: NFA,\n text: String\n}\n\nimpl Matcher {\n pub fn new(nfa: NFA, text: &str) -> Matcher {\n Matcher {\n nfa: nfa,\n text: text.to_owned()\n }\n }\n\n pub fn run(&mut self) -> bool {\n\n if self.nfa.num_states() == 0 { \/\/ regex is empty\n return true;\n }\n\n let mut states = vec![\n PotentialMatch::new(Some(self.nfa.get_start().unwrap()),\n &self.text)\n ];\n\n while Self::has_valid_state(&states) {\n\n let mut updated_states = Vec::new();\n\n for state in states {\n let new_states = state.advance(&self.nfa);\n\n for s in new_states {\n if s.is_match() {\n return true;\n } else {\n updated_states.push(s);\n }\n }\n }\n\n states = updated_states;\n }\n\n false\n }\n\n fn has_valid_state(states: &Vec<PotentialMatch>) -> bool {\n states.len() > 0\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>use std::ops::{Add, Mul};\n\n\/\/\/ A pair of coordinates\n#[derive(Clone, Copy)]\npub struct Vec2(pub usize, pub usize);\n\n\/\/\/ A structure of values\npub struct Matrix<T> {\n \/\/\/ 2 dimensional array of rows and columns\n dim: Vec2,\n \/\/\/ `Vector` of values in the `Matrix`\n buffer: Vec<T>,\n}\n\nimpl <T> Matrix<T> {\n \/\/\/ Returns a new `Matrix`\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `dimensions` - 2 dimensional array of rows and columns\n \/\/\/ - `buffer` - `Vector` of values\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0, 1, 2, 3, 4, 5]);\n \/\/\/ ```\n pub fn new(dimensions: Vec2, buffer: Vec<T>) -> Matrix<T> {\n Matrix {\n dim: dimensions,\n buffer: buffer,\n }\n }\n}\n\nimpl <T> Matrix<T> where T: Copy {\n \/\/\/ Returns reference to value at `Vec2`\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `Vec2` - coordinates of the value in the `Matrix`\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0, 1, 2, 3, 4, 5]);\n \/\/\/ assert_eq!(matrix.get(ktensor::math::Vec2(1, 2)), 5);\n \/\/\/ assert_eq!(matrix.get(ktensor::math::Vec2(1, 2)), 5);\n \/\/\/ ```\n pub fn get(&self, Vec2(x, y): Vec2) -> T {\n self.buffer[x * self.dim.1 + y]\n }\n}\n\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Addition \/\/\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\nimpl<T> Add<Matrix<T>> for Matrix<T> where T: Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Add Matricies\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix\n \/\/\/ - `rhs` - another matrix\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let matrix2 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![5.0, 4.0, 3.0, 2.0, 1.0, 0.0]);\n \/\/\/ let matrix3 = matrix1 + matrix2;\n \/\/\/ assert_eq!(matrix3.get(ktensor::math::Vec2(0, 0)), 5.0);\n \/\/\/ ```\n fn add(self, rhs: Matrix<T>) -> Matrix<T> {\n let mut buffer = Vec::new();\n for (&i, &j) in self.buffer.iter().zip(rhs.buffer.iter()) {\n buffer.push(i + j);\n }\n Matrix::new(self.dim, buffer)\n }\n}\n\nimpl<'a, 'b, T> Add<&'b Matrix<T>> for &'a Matrix<T> where T: Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Add Matricies by reference\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix reference\n \/\/\/ - `rhs` - another matrix reference\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let matrix2 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![5.0, 4.0, 3.0, 2.0, 1.0, 0.0]);\n \/\/\/ let matrix3 = &matrix1 + &matrix2;\n \/\/\/ assert_eq!(matrix1.get(ktensor::math::Vec2(0, 0)), 0.0);\n \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 5.0);\n \/\/\/ assert_eq!(matrix3.get(ktensor::math::Vec2(0, 0)), 5.0);\n \/\/\/ ```\n fn add(self, rhs: &'b Matrix<T>) -> Matrix<T> {\n let mut buffer = Vec::new();\n for (&i, &j) in self.buffer.iter().zip(rhs.buffer.iter()) {\n buffer.push(i + j);\n }\n Matrix::new(self.dim, buffer)\n }\n}\n\nimpl<T> Add<T> for Matrix<T> where T: Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Add Matrix and a constant\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix\n \/\/\/ - `rhs` - a constant\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let float = 1.0;\n \/\/\/ let matrix2 = matrix1 + float;\n \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 1.0);\n \/\/\/ ```\n fn add(self, rhs: T) -> Matrix<T> {\n let mut buffer = Vec::new();\n for &i in self.buffer.iter() {\n buffer.push(i + rhs);\n }\n Matrix::new(self.dim, buffer)\n }\n}\n\nimpl<'a, 'b, T> Add<&'b T> for &'a Matrix<T> where T: Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Add Matrix and a constant\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix reference\n \/\/\/ - `rhs` - a constant reference\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let float = 1.0;\n \/\/\/ let matrix2 = &matrix1 + &float;\n \/\/\/ assert_eq!(matrix1.get(ktensor::math::Vec2(0, 0)), 0.0);\n \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 1.0);\n \/\/\/ assert_eq!(float, 1.0);\n \/\/\/ ```\n fn add(self, &rhs: &'b T) -> Matrix<T> {\n let mut buffer = Vec::new();\n for &i in self.buffer.iter() {\n buffer.push(i + rhs);\n }\n Matrix::new(self.dim, buffer)\n }\n}\n\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Multiplication \/\/\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\nimpl<T> Mul<Matrix<T>> for Matrix<T> where T: Mul<Output=T> + Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Multiply Matricies\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix\n \/\/\/ - `rhs` - another matrix\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let matrix2 = ktensor::math::Matrix::new(ktensor::math::Vec2(3, 2), vec![5.0, 4.0, 3.0, 2.0, 1.0, 0.0]);\n \/\/\/ let matrix3 = matrix1 * matrix2;\n \/\/\/ assert_eq!(matrix3.buffer.len(), 4);\n \/\/\/ ```\n fn mul(self, rhs: Matrix<T>) -> Matrix<<<T as Mul>::Output as Add>::Output> {\n let mut buffer = Vec::new();\n let Vec2(x, y) = self.dim;\n let Vec2(x2, y2) = rhs.dim;\n for i in 0..x {\n for j in 0..y2 {\n let mut sum = self.get(Vec2(i, 0)) * rhs.get(Vec2(0, j));\n for k in 1..y {\n sum = sum + self.get(Vec2(i, k)) * rhs.get(Vec2(k, j));\n }\n buffer.push(sum);\n }\n }\n Matrix::new(Vec2(x, y2), buffer)\n }\n}\n\n\/\/ impl<'a, 'b, T> Add<&'b Matrix<T>> for &'a Matrix<T> where T: Add + Copy {\n\/\/ type Output = Matrix<<T as Add>::Output>;\n\/\/\n\/\/ \/\/\/ Add Matricies by reference\n\/\/ \/\/\/\n\/\/ \/\/\/ # Arguments\n\/\/ \/\/\/\n\/\/ \/\/\/ - `self` - this matrix reference\n\/\/ \/\/\/ - `rhs` - another matrix reference\n\/\/ \/\/\/\n\/\/ \/\/\/ # Example\n\/\/ \/\/\/\n\/\/ \/\/\/ ```\n\/\/ \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n\/\/ \/\/\/ let matrix2 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![5.0, 4.0, 3.0, 2.0, 1.0, 0.0]);\n\/\/ \/\/\/ let matrix3 = &matrix1 + &matrix2;\n\/\/ \/\/\/ assert_eq!(matrix1.get(ktensor::math::Vec2(0, 0)), 0.0);\n\/\/ \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 5.0);\n\/\/ \/\/\/ assert_eq!(matrix3.get(ktensor::math::Vec2(0, 0)), 5.0);\n\/\/ \/\/\/ ```\n\/\/ fn add(self, rhs: &'b Matrix<T>) -> Matrix<<T as Add>::Output> {\n\/\/ let mut buffer = Vec::new();\n\/\/ for (&i, &j) in self.buffer.iter().zip(rhs.buffer.iter()) {\n\/\/ buffer.push(i + j);\n\/\/ }\n\/\/ Matrix::new(self.dim, buffer)\n\/\/ }\n\/\/ }\n\/\/\n\/\/ impl<T> Add<T> for Matrix<T> where T: Add + Copy {\n\/\/ type Output = Matrix<<T as Add>::Output>;\n\/\/\n\/\/ \/\/\/ Add Matrix and a constant\n\/\/ \/\/\/\n\/\/ \/\/\/ # Arguments\n\/\/ \/\/\/\n\/\/ \/\/\/ - `self` - this matrix\n\/\/ \/\/\/ - `rhs` - a constant\n\/\/ \/\/\/\n\/\/ \/\/\/ # Example\n\/\/ \/\/\/\n\/\/ \/\/\/ ```\n\/\/ \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n\/\/ \/\/\/ let float = 1.0;\n\/\/ \/\/\/ let matrix2 = matrix1 + float;\n\/\/ \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 1.0);\n\/\/ \/\/\/ ```\n\/\/ fn add(self, rhs: T) -> Matrix<<T as Add>::Output> {\n\/\/ let mut buffer = Vec::new();\n\/\/ for &i in self.buffer.iter() {\n\/\/ buffer.push(i + rhs);\n\/\/ }\n\/\/ Matrix::new(self.dim, buffer)\n\/\/ }\n\/\/ }\n\/\/\n\/\/ impl<'a, 'b, T> Add<&'b T> for &'a Matrix<T> where T: Add + Copy {\n\/\/ type Output = Matrix<<T as Add>::Output>;\n\/\/\n\/\/ \/\/\/ Add Matrix and a constant\n\/\/ \/\/\/\n\/\/ \/\/\/ # Arguments\n\/\/ \/\/\/\n\/\/ \/\/\/ - `self` - this matrix reference\n\/\/ \/\/\/ - `rhs` - a constant reference\n\/\/ \/\/\/\n\/\/ \/\/\/ # Example\n\/\/ \/\/\/\n\/\/ \/\/\/ ```\n\/\/ \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n\/\/ \/\/\/ let float = 1.0;\n\/\/ \/\/\/ let matrix2 = &matrix1 + &float;\n\/\/ \/\/\/ assert_eq!(matrix1.get(ktensor::math::Vec2(0, 0)), 0.0);\n\/\/ \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 1.0);\n\/\/ \/\/\/ assert_eq!(float, 1.0);\n\/\/ \/\/\/ ```\n\/\/ fn add(self, &rhs: &'b T) -> Matrix<<T as Add>::Output> {\n\/\/ let mut buffer = Vec::new();\n\/\/ for &i in self.buffer.iter() {\n\/\/ buffer.push(i + rhs);\n\/\/ }\n\/\/ Matrix::new(self.dim, buffer)\n\/\/ }\n\/\/ }\n<commit_msg>matrix multiply<commit_after>use std::ops::{Add, Mul};\n\n\/\/\/ A pair of coordinates\n#[derive(Clone, Copy)]\npub struct Vec2(pub usize, pub usize);\n\n\/\/\/ A structure of values\npub struct Matrix<T> {\n \/\/\/ 2 dimensional array of rows and columns\n dim: Vec2,\n \/\/\/ `Vector` of values in the `Matrix`\n buffer: Vec<T>,\n}\n\nimpl <T> Matrix<T> {\n \/\/\/ Returns a new `Matrix`\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `dimensions` - 2 dimensional array of rows and columns\n \/\/\/ - `buffer` - `Vector` of values\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0, 1, 2, 3, 4, 5]);\n \/\/\/ ```\n pub fn new(dimensions: Vec2, buffer: Vec<T>) -> Matrix<T> {\n Matrix {\n dim: dimensions,\n buffer: buffer,\n }\n }\n\n pub fn len(&self) -> usize {\n self.buffer.len()\n }\n}\n\nimpl <T> Matrix<T> where T: Copy {\n \/\/\/ Returns reference to value at `Vec2`\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `Vec2` - coordinates of the value in the `Matrix`\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0, 1, 2, 3, 4, 5]);\n \/\/\/ assert_eq!(matrix.get(ktensor::math::Vec2(1, 2)), 5);\n \/\/\/ assert_eq!(matrix.get(ktensor::math::Vec2(1, 2)), 5);\n \/\/\/ ```\n pub fn get(&self, Vec2(x, y): Vec2) -> T {\n self.buffer[x * self.dim.1 + y]\n }\n}\n\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Addition \/\/\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\nimpl<T> Add<Matrix<T>> for Matrix<T> where T: Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Add Matricies\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix\n \/\/\/ - `rhs` - another matrix\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let matrix2 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![5.0, 4.0, 3.0, 2.0, 1.0, 0.0]);\n \/\/\/ let matrix3 = matrix1 + matrix2;\n \/\/\/ assert_eq!(matrix3.get(ktensor::math::Vec2(0, 0)), 5.0);\n \/\/\/ ```\n fn add(self, rhs: Matrix<T>) -> Matrix<T> {\n let mut buffer = Vec::with_capacity(self.len());\n for (&i, &j) in self.buffer.iter().zip(rhs.buffer.iter()) {\n buffer.push(i + j);\n }\n Matrix::new(self.dim, buffer)\n }\n}\n\nimpl<'a, 'b, T> Add<&'b Matrix<T>> for &'a Matrix<T> where T: Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Add Matricies by reference\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix reference\n \/\/\/ - `rhs` - another matrix reference\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let matrix2 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![5.0, 4.0, 3.0, 2.0, 1.0, 0.0]);\n \/\/\/ let matrix3 = &matrix1 + &matrix2;\n \/\/\/ assert_eq!(matrix1.get(ktensor::math::Vec2(0, 0)), 0.0);\n \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 5.0);\n \/\/\/ assert_eq!(matrix3.get(ktensor::math::Vec2(0, 0)), 5.0);\n \/\/\/ ```\n fn add(self, rhs: &'b Matrix<T>) -> Matrix<T> {\n let mut buffer = Vec::with_capacity(self.len());\n for (&i, &j) in self.buffer.iter().zip(rhs.buffer.iter()) {\n buffer.push(i + j);\n }\n Matrix::new(self.dim, buffer)\n }\n}\n\nimpl<T> Add<T> for Matrix<T> where T: Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Add Matrix and a constant\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix\n \/\/\/ - `rhs` - a constant\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let float = 1.0;\n \/\/\/ let matrix2 = matrix1 + float;\n \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 1.0);\n \/\/\/ ```\n fn add(self, rhs: T) -> Matrix<T> {\n let mut buffer = Vec::with_capacity(self.len());\n for &i in self.buffer.iter() {\n buffer.push(i + rhs);\n }\n Matrix::new(self.dim, buffer)\n }\n}\n\nimpl<'a, 'b, T> Add<&'b T> for &'a Matrix<T> where T: Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Add Matrix and a constant\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix reference\n \/\/\/ - `rhs` - a constant reference\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let float = 1.0;\n \/\/\/ let matrix2 = &matrix1 + &float;\n \/\/\/ assert_eq!(matrix1.get(ktensor::math::Vec2(0, 0)), 0.0);\n \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 1.0);\n \/\/\/ assert_eq!(float, 1.0);\n \/\/\/ ```\n fn add(self, &rhs: &'b T) -> Matrix<T> {\n let mut buffer = Vec::with_capacity(self.len());\n for &i in self.buffer.iter() {\n buffer.push(i + rhs);\n }\n Matrix::new(self.dim, buffer)\n }\n}\n\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\/\/ Multiplication \/\/\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\nimpl<T> Mul<Matrix<T>> for Matrix<T> where T: Mul<Output=T> + Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Multiply Matricies\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix\n \/\/\/ - `rhs` - another matrix\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let matrix2 = ktensor::math::Matrix::new(ktensor::math::Vec2(3, 2), vec![5.0, 4.0, 3.0, 2.0, 1.0, 0.0]);\n \/\/\/ let matrix3 = matrix1 * matrix2;\n \/\/\/ assert_eq!(matrix3.len(), 4);\n \/\/\/ assert_eq!(matrix3.get(ktensor::math::Vec2(0, 0)), 5.0);\n \/\/\/ ```\n fn mul(self, rhs: Matrix<T>) -> Matrix<T> {\n let Vec2(x, y) = self.dim;\n let Vec2(x2, y2) = rhs.dim;\n assert_eq!(y, x2);\n let mut buffer = Vec::with_capacity(x * y2);\n for i in 0..x {\n for j in 0..y2 {\n let mut sum = self.get(Vec2(i, 0)) * rhs.get(Vec2(0, j));\n for k in 1..y {\n sum = sum + self.get(Vec2(i, k)) * rhs.get(Vec2(k, j));\n }\n buffer.push(sum);\n }\n }\n Matrix::new(Vec2(x, y2), buffer)\n }\n}\n\nimpl<'a, 'b, T> Mul<&'b Matrix<T>> for &'a Matrix<T> where T: Mul<Output=T> + Add<Output=T> + Copy {\n type Output = Matrix<T>;\n\n \/\/\/ Multiply Matricies by reference\n \/\/\/\n \/\/\/ # Arguments\n \/\/\/\n \/\/\/ - `self` - this matrix reference\n \/\/\/ - `rhs` - another matrix reference\n \/\/\/\n \/\/\/ # Example\n \/\/\/\n \/\/\/ ```\n \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n \/\/\/ let matrix2 = ktensor::math::Matrix::new(ktensor::math::Vec2(3, 2), vec![5.0, 4.0, 3.0, 2.0, 1.0, 0.0]);\n \/\/\/ let matrix3 = &matrix1 * &matrix2;\n \/\/\/ assert_eq!(matrix3.len(), 4);\n \/\/\/ assert_eq!(matrix1.get(ktensor::math::Vec2(0, 0)), 0.0);\n \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 5.0);\n \/\/\/ assert_eq!(matrix3.get(ktensor::math::Vec2(0, 0)), 5.0);\n \/\/\/ ```\n fn mul(self, rhs: &'b Matrix<T>) -> Matrix<T> {\n let Vec2(x, y) = self.dim;\n let Vec2(x2, y2) = rhs.dim;\n assert_eq!(y, x2);\n let mut buffer = Vec::with_capacity(x * y2);\n for i in 0..x {\n for j in 0..y2 {\n let mut sum = self.get(Vec2(i, 0)) * rhs.get(Vec2(0, j));\n for k in 1..y {\n sum = sum + self.get(Vec2(i, k)) * rhs.get(Vec2(k, j));\n }\n buffer.push(sum);\n }\n }\n Matrix::new(Vec2(x, y2), buffer)\n }\n}\n\n\/\/ impl<T> Add<T> for Matrix<T> where T: Add + Copy {\n\/\/ type Output = Matrix<<T as Add>::Output>;\n\/\/\n\/\/ \/\/\/ Add Matrix and a constant\n\/\/ \/\/\/\n\/\/ \/\/\/ # Arguments\n\/\/ \/\/\/\n\/\/ \/\/\/ - `self` - this matrix\n\/\/ \/\/\/ - `rhs` - a constant\n\/\/ \/\/\/\n\/\/ \/\/\/ # Example\n\/\/ \/\/\/\n\/\/ \/\/\/ ```\n\/\/ \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n\/\/ \/\/\/ let float = 1.0;\n\/\/ \/\/\/ let matrix2 = matrix1 + float;\n\/\/ \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 1.0);\n\/\/ \/\/\/ ```\n\/\/ fn add(self, rhs: T) -> Matrix<<T as Add>::Output> {\n\/\/ let mut buffer = Vec::new();\n\/\/ for &i in self.buffer.iter() {\n\/\/ buffer.push(i + rhs);\n\/\/ }\n\/\/ Matrix::new(self.dim, buffer)\n\/\/ }\n\/\/ }\n\/\/\n\/\/ impl<'a, 'b, T> Add<&'b T> for &'a Matrix<T> where T: Add + Copy {\n\/\/ type Output = Matrix<<T as Add>::Output>;\n\/\/\n\/\/ \/\/\/ Add Matrix and a constant\n\/\/ \/\/\/\n\/\/ \/\/\/ # Arguments\n\/\/ \/\/\/\n\/\/ \/\/\/ - `self` - this matrix reference\n\/\/ \/\/\/ - `rhs` - a constant reference\n\/\/ \/\/\/\n\/\/ \/\/\/ # Example\n\/\/ \/\/\/\n\/\/ \/\/\/ ```\n\/\/ \/\/\/ let matrix1 = ktensor::math::Matrix::new(ktensor::math::Vec2(2, 3), vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0]);\n\/\/ \/\/\/ let float = 1.0;\n\/\/ \/\/\/ let matrix2 = &matrix1 + &float;\n\/\/ \/\/\/ assert_eq!(matrix1.get(ktensor::math::Vec2(0, 0)), 0.0);\n\/\/ \/\/\/ assert_eq!(matrix2.get(ktensor::math::Vec2(0, 0)), 1.0);\n\/\/ \/\/\/ assert_eq!(float, 1.0);\n\/\/ \/\/\/ ```\n\/\/ fn add(self, &rhs: &'b T) -> Matrix<<T as Add>::Output> {\n\/\/ let mut buffer = Vec::new();\n\/\/ for &i in self.buffer.iter() {\n\/\/ buffer.push(i + rhs);\n\/\/ }\n\/\/ Matrix::new(self.dim, buffer)\n\/\/ }\n\/\/ }\n<|endoftext|>"} {"text":"<commit_before>use super::{allocate_shapes, allocate_states, Boundary, Find, Mesh};\nuse crate::shapes::{Shape, StateOfShape};\nuse crate::StrError;\nuse std::ffi::OsStr;\n\n\/\/\/ Holds all data related to a mesh including element shapes, states, boundaries, and functions to find entities\n\/\/\/\n\/\/\/ This struct is a (high-level) convenience that calls the necessary functions to generate all derived data from a mesh struct.\n\/\/\/\n\/\/\/ The Region basically calls the following functions:\n\/\/\/\n\/\/\/ ```text\n\/\/\/ let shapes = allocate_shapes(&mesh)?;\n\/\/\/ let states = allocate_states(&mesh, &shapes)?;\n\/\/\/ let boundary = Boundary::new(&mesh, &shapes)?;\n\/\/\/ let find = Find::new(&mesh, &boundary)?;\n\/\/\/ ```\npub struct Region {\n \/\/\/ Holds the raw mesh data\n pub mesh: Mesh,\n\n \/\/\/ Holds all shapes of all cells (len = **number of cells**)\n pub shapes: Vec<Shape>,\n\n \/\/\/ Holds all states of shapes of all cells (len = **number of cells**)\n pub states: Vec<StateOfShape>,\n\n \/\/\/ Holds the boundary data such as points, edges, and faces on boundary\n pub boundary: Boundary,\n\n \/\/\/ Allows finding points, edges, and faces on the boundary by giving coordinates or keys\n pub find: Find,\n}\n\nimpl Region {\n \/\/\/ Allocates and prepares a new region with a given mesh\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use gemlab::mesh::{At, Cell, Mesh, Point, Region};\n \/\/\/ use gemlab::shapes::GeoKind;\n \/\/\/ use gemlab::StrError;\n \/\/\/\n \/\/\/ fn main() -> Result<(), StrError> {\n \/\/\/ \/\/ 3---------2---------5\n \/\/\/ \/\/ | | |\n \/\/\/ \/\/ | [0] | [1] |\n \/\/\/ \/\/ | | |\n \/\/\/ \/\/ 0---------1---------4\n \/\/\/ let mesh = Mesh {\n \/\/\/ space_ndim: 2,\n \/\/\/ points: vec![\n \/\/\/ Point { id: 0, coords: vec![0.0, 0.0] },\n \/\/\/ Point { id: 1, coords: vec![1.0, 0.0] },\n \/\/\/ Point { id: 2, coords: vec![1.0, 1.0] },\n \/\/\/ Point { id: 3, coords: vec![0.0, 1.0] },\n \/\/\/ Point { id: 4, coords: vec![2.0, 0.0] },\n \/\/\/ Point { id: 5, coords: vec![2.0, 1.0] },\n \/\/\/ ],\n \/\/\/ cells: vec![\n \/\/\/ Cell { id: 0, attribute_id: 1, geo_ndim: 2, points: vec![0, 1, 2, 3] },\n \/\/\/ Cell { id: 1, attribute_id: 2, geo_ndim: 2, points: vec![1, 4, 5, 2] },\n \/\/\/ ],\n \/\/\/ };\n \/\/\/ let region = Region::with(mesh)?;\n \/\/\/ assert_eq!(region.mesh.space_ndim, 2);\n \/\/\/ assert_eq!(region.shapes.len(), 2);\n \/\/\/ assert_eq!(region.states.len(), 2);\n \/\/\/ assert_eq!(region.shapes[0].kind, GeoKind::Qua4);\n \/\/\/ assert_eq!(region.boundary.points.len(), 6);\n \/\/\/ assert_eq!(region.boundary.edges.len(), 6);\n \/\/\/ assert_eq!(region.boundary.faces.len(), 0);\n \/\/\/ assert_eq!(region.boundary.min, &[0.0, 0.0]);\n \/\/\/ assert_eq!(region.boundary.max, &[2.0, 1.0]);\n \/\/\/ assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n \/\/\/ Ok(())\n \/\/\/ }\n \/\/\/ ```\n pub fn with(mesh: Mesh) -> Result<Self, StrError> {\n let shapes = allocate_shapes(&mesh)?;\n let states = allocate_states(&mesh, &shapes)?;\n let boundary = Boundary::new(&mesh, &shapes)?;\n let find = Find::new(&mesh, &boundary)?;\n Ok(Region {\n mesh,\n shapes,\n states,\n boundary,\n find,\n })\n }\n\n \/\/\/ Allocates and prepares a new region with a mesh defined in a text string\n #[inline]\n pub fn with_text(mesh_text: &str) -> Result<Self, StrError> {\n let mesh = Mesh::from_text(mesh_text)?;\n Region::with(mesh)\n }\n\n \/\/\/ Allocates and prepares a new region with a mesh read from a text file\n \/\/\/\n \/\/\/ # Input\n \/\/\/\n \/\/\/ * `full_path` -- may be a String, &str, or Path\n #[inline]\n pub fn with_text_file<P>(full_path: &P) -> Result<Self, StrError>\n where\n P: AsRef<OsStr> + ?Sized,\n {\n let mesh = Mesh::from_text_file(full_path)?;\n Region::with(mesh)\n }\n\n \/\/\/ Allocates and prepares a new region with a mesh read from a binary file\n \/\/\/\n \/\/\/ # Input\n \/\/\/\n \/\/\/ * `full_path` -- may be a String, &str, or Path\n #[inline]\n pub fn with_binary_file<P>(full_path: &P) -> Result<Self, StrError>\n where\n P: AsRef<OsStr> + ?Sized,\n {\n let mesh = Mesh::read(full_path)?;\n Region::with(mesh)\n }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n#[cfg(test)]\nmod tests {\n use super::{Mesh, Region};\n use crate::mesh::{At, Samples};\n use crate::StrError;\n\n #[test]\n fn with_works() -> Result<(), StrError> {\n \/\/ 3---------2---------5\n \/\/ | | |\n \/\/ | [0] | [1] |\n \/\/ | | |\n \/\/ 0---------1---------4\n let mesh = Samples::two_quads_horizontal();\n let region = Region::with(mesh)?;\n \/\/ println!(\"{:?}\", mesh); \/\/ WRONG: mesh has been moved into region\n assert_eq!(region.mesh.space_ndim, 2);\n assert_eq!(region.shapes.len(), 2);\n assert_eq!(region.states.len(), 2);\n assert_eq!(region.boundary.points.len(), 6);\n assert_eq!(region.boundary.edges.len(), 6);\n assert_eq!(region.boundary.faces.len(), 0);\n assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n Ok(())\n }\n\n #[test]\n fn with_text_works() -> Result<(), StrError> {\n let mesh = Mesh::from_text(\n \"# 1.0 3-----------2-----------5\n # | | |\n # | [0] | [1] | [*] indicates id\n # | (1) | (2) | (*) indicates attribute_id\n # | | |\n # 0.0 0-----------1-----------4\n # 0.0 1.0 2.0\n #\n # header\n # space_ndim npoint ncell\n 2 6 2\n\n # points\n # id x y\n 0 0.0 0.0\n 1 1.0 0.0\n 2 1.0 1.0\n 3 0.0 1.0\n 4 2.0 0.0\n 5 2.0 1.0\n\n # cells\n # id att geo_ndim nnode point_ids...\n 0 1 2 4 0 1 2 3\n 1 2 2 4 1 4 5 2\n \",\n )?;\n let region = Region::with(mesh)?;\n assert_eq!(region.mesh.space_ndim, 2);\n assert_eq!(region.shapes.len(), 2);\n assert_eq!(region.states.len(), 2);\n assert_eq!(region.boundary.points.len(), 6);\n assert_eq!(region.boundary.edges.len(), 6);\n assert_eq!(region.boundary.faces.len(), 0);\n assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n Ok(())\n }\n\n #[test]\n fn with_text_file_works() -> Result<(), StrError> {\n \/\/ 3---------2---------5\n \/\/ | | |\n \/\/ | [0] | [1] |\n \/\/ | | |\n \/\/ 0---------1---------4\n let mesh = Mesh::from_text_file(\".\/data\/meshes\/two_quads_horizontal.msh\")?;\n let region = Region::with(mesh)?;\n assert_eq!(region.mesh.space_ndim, 2);\n assert_eq!(region.shapes.len(), 2);\n assert_eq!(region.states.len(), 2);\n assert_eq!(region.boundary.points.len(), 6);\n assert_eq!(region.boundary.edges.len(), 6);\n assert_eq!(region.boundary.faces.len(), 0);\n assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n Ok(())\n }\n\n #[test]\n fn with_binary_file_works() -> Result<(), StrError> {\n \/\/ 3---------2---------5\n \/\/ | | |\n \/\/ | [0] | [1] |\n \/\/ | | |\n \/\/ 0---------1---------4\n let full_path = \"\/tmp\/gemlab\/test_region_two_quads_horizontal.dat\";\n let mesh = Samples::two_quads_horizontal();\n mesh.write(full_path)?;\n let region = Region::with_binary_file(full_path)?;\n assert_eq!(region.mesh.space_ndim, 2);\n assert_eq!(region.shapes.len(), 2);\n assert_eq!(region.states.len(), 2);\n assert_eq!(region.boundary.points.len(), 6);\n assert_eq!(region.boundary.edges.len(), 6);\n assert_eq!(region.boundary.faces.len(), 0);\n assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n Ok(())\n }\n}\n<commit_msg>Fix test<commit_after>use super::{allocate_shapes, allocate_states, Boundary, Find, Mesh};\nuse crate::shapes::{Shape, StateOfShape};\nuse crate::StrError;\nuse std::ffi::OsStr;\n\n\/\/\/ Holds all data related to a mesh including element shapes, states, boundaries, and functions to find entities\n\/\/\/\n\/\/\/ This struct is a (high-level) convenience that calls the necessary functions to generate all derived data from a mesh struct.\n\/\/\/\n\/\/\/ The Region basically calls the following functions:\n\/\/\/\n\/\/\/ ```text\n\/\/\/ let shapes = allocate_shapes(&mesh)?;\n\/\/\/ let states = allocate_states(&mesh, &shapes)?;\n\/\/\/ let boundary = Boundary::new(&mesh, &shapes)?;\n\/\/\/ let find = Find::new(&mesh, &boundary)?;\n\/\/\/ ```\npub struct Region {\n \/\/\/ Holds the raw mesh data\n pub mesh: Mesh,\n\n \/\/\/ Holds all shapes of all cells (len = **number of cells**)\n pub shapes: Vec<Shape>,\n\n \/\/\/ Holds all states of shapes of all cells (len = **number of cells**)\n pub states: Vec<StateOfShape>,\n\n \/\/\/ Holds the boundary data such as points, edges, and faces on boundary\n pub boundary: Boundary,\n\n \/\/\/ Allows finding points, edges, and faces on the boundary by giving coordinates or keys\n pub find: Find,\n}\n\nimpl Region {\n \/\/\/ Allocates and prepares a new region with a given mesh\n \/\/\/\n \/\/\/ # Examples\n \/\/\/\n \/\/\/ ```\n \/\/\/ use gemlab::mesh::{At, Cell, Mesh, Point, Region};\n \/\/\/ use gemlab::shapes::GeoKind;\n \/\/\/ use gemlab::StrError;\n \/\/\/\n \/\/\/ fn main() -> Result<(), StrError> {\n \/\/\/ \/\/ 3---------2---------5\n \/\/\/ \/\/ | | |\n \/\/\/ \/\/ | [0] | [1] |\n \/\/\/ \/\/ | | |\n \/\/\/ \/\/ 0---------1---------4\n \/\/\/ let mesh = Mesh {\n \/\/\/ space_ndim: 2,\n \/\/\/ points: vec![\n \/\/\/ Point { id: 0, coords: vec![0.0, 0.0] },\n \/\/\/ Point { id: 1, coords: vec![1.0, 0.0] },\n \/\/\/ Point { id: 2, coords: vec![1.0, 1.0] },\n \/\/\/ Point { id: 3, coords: vec![0.0, 1.0] },\n \/\/\/ Point { id: 4, coords: vec![2.0, 0.0] },\n \/\/\/ Point { id: 5, coords: vec![2.0, 1.0] },\n \/\/\/ ],\n \/\/\/ cells: vec![\n \/\/\/ Cell { id: 0, attribute_id: 1, geo_ndim: 2, points: vec![0, 1, 2, 3] },\n \/\/\/ Cell { id: 1, attribute_id: 2, geo_ndim: 2, points: vec![1, 4, 5, 2] },\n \/\/\/ ],\n \/\/\/ };\n \/\/\/ let region = Region::with(mesh)?;\n \/\/\/ assert_eq!(region.mesh.space_ndim, 2);\n \/\/\/ assert_eq!(region.shapes.len(), 2);\n \/\/\/ assert_eq!(region.states.len(), 2);\n \/\/\/ assert_eq!(region.shapes[0].kind, GeoKind::Qua4);\n \/\/\/ assert_eq!(region.boundary.points.len(), 6);\n \/\/\/ assert_eq!(region.boundary.edges.len(), 6);\n \/\/\/ assert_eq!(region.boundary.faces.len(), 0);\n \/\/\/ assert_eq!(region.boundary.min, &[0.0, 0.0]);\n \/\/\/ assert_eq!(region.boundary.max, &[2.0, 1.0]);\n \/\/\/ assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n \/\/\/ Ok(())\n \/\/\/ }\n \/\/\/ ```\n pub fn with(mesh: Mesh) -> Result<Self, StrError> {\n let shapes = allocate_shapes(&mesh)?;\n let states = allocate_states(&mesh, &shapes)?;\n let boundary = Boundary::new(&mesh, &shapes)?;\n let find = Find::new(&mesh, &boundary)?;\n Ok(Region {\n mesh,\n shapes,\n states,\n boundary,\n find,\n })\n }\n\n \/\/\/ Allocates and prepares a new region with a mesh defined in a text string\n #[inline]\n pub fn with_text(mesh_text: &str) -> Result<Self, StrError> {\n let mesh = Mesh::from_text(mesh_text)?;\n Region::with(mesh)\n }\n\n \/\/\/ Allocates and prepares a new region with a mesh read from a text file\n \/\/\/\n \/\/\/ # Input\n \/\/\/\n \/\/\/ * `full_path` -- may be a String, &str, or Path\n #[inline]\n pub fn with_text_file<P>(full_path: &P) -> Result<Self, StrError>\n where\n P: AsRef<OsStr> + ?Sized,\n {\n let mesh = Mesh::from_text_file(full_path)?;\n Region::with(mesh)\n }\n\n \/\/\/ Allocates and prepares a new region with a mesh read from a binary file\n \/\/\/\n \/\/\/ # Input\n \/\/\/\n \/\/\/ * `full_path` -- may be a String, &str, or Path\n #[inline]\n pub fn with_binary_file<P>(full_path: &P) -> Result<Self, StrError>\n where\n P: AsRef<OsStr> + ?Sized,\n {\n let mesh = Mesh::read(full_path)?;\n Region::with(mesh)\n }\n}\n\n\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\n#[cfg(test)]\nmod tests {\n use super::{Mesh, Region};\n use crate::mesh::{At, Samples};\n use crate::StrError;\n\n #[test]\n fn with_works() -> Result<(), StrError> {\n \/\/ 3---------2---------5\n \/\/ | | |\n \/\/ | [0] | [1] |\n \/\/ | | |\n \/\/ 0---------1---------4\n let mesh = Samples::two_quads_horizontal();\n let region = Region::with(mesh)?;\n \/\/ println!(\"{:?}\", mesh); \/\/ WRONG: mesh has been moved into region\n assert_eq!(region.mesh.space_ndim, 2);\n assert_eq!(region.shapes.len(), 2);\n assert_eq!(region.states.len(), 2);\n assert_eq!(region.boundary.points.len(), 6);\n assert_eq!(region.boundary.edges.len(), 6);\n assert_eq!(region.boundary.faces.len(), 0);\n assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n Ok(())\n }\n\n #[test]\n fn with_text_works() -> Result<(), StrError> {\n let region = Region::with_text(\n \"# 1.0 3-----------2-----------5\n # | | |\n # | [0] | [1] | [*] indicates id\n # | (1) | (2) | (*) indicates attribute_id\n # | | |\n # 0.0 0-----------1-----------4\n # 0.0 1.0 2.0\n #\n # header\n # space_ndim npoint ncell\n 2 6 2\n\n # points\n # id x y\n 0 0.0 0.0\n 1 1.0 0.0\n 2 1.0 1.0\n 3 0.0 1.0\n 4 2.0 0.0\n 5 2.0 1.0\n\n # cells\n # id att geo_ndim nnode point_ids...\n 0 1 2 4 0 1 2 3\n 1 2 2 4 1 4 5 2\n \",\n )?;\n assert_eq!(region.mesh.space_ndim, 2);\n assert_eq!(region.shapes.len(), 2);\n assert_eq!(region.states.len(), 2);\n assert_eq!(region.boundary.points.len(), 6);\n assert_eq!(region.boundary.edges.len(), 6);\n assert_eq!(region.boundary.faces.len(), 0);\n assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n Ok(())\n }\n\n #[test]\n fn with_text_file_works() -> Result<(), StrError> {\n \/\/ 3---------2---------5\n \/\/ | | |\n \/\/ | [0] | [1] |\n \/\/ | | |\n \/\/ 0---------1---------4\n let mesh = Mesh::from_text_file(\".\/data\/meshes\/two_quads_horizontal.msh\")?;\n let region = Region::with(mesh)?;\n assert_eq!(region.mesh.space_ndim, 2);\n assert_eq!(region.shapes.len(), 2);\n assert_eq!(region.states.len(), 2);\n assert_eq!(region.boundary.points.len(), 6);\n assert_eq!(region.boundary.edges.len(), 6);\n assert_eq!(region.boundary.faces.len(), 0);\n assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n Ok(())\n }\n\n #[test]\n fn with_binary_file_works() -> Result<(), StrError> {\n \/\/ 3---------2---------5\n \/\/ | | |\n \/\/ | [0] | [1] |\n \/\/ | | |\n \/\/ 0---------1---------4\n let full_path = \"\/tmp\/gemlab\/test_region_two_quads_horizontal.dat\";\n let mesh = Samples::two_quads_horizontal();\n mesh.write(full_path)?;\n let region = Region::with_binary_file(full_path)?;\n assert_eq!(region.mesh.space_ndim, 2);\n assert_eq!(region.shapes.len(), 2);\n assert_eq!(region.states.len(), 2);\n assert_eq!(region.boundary.points.len(), 6);\n assert_eq!(region.boundary.edges.len(), 6);\n assert_eq!(region.boundary.faces.len(), 0);\n assert_eq!(region.find.points(At::XY(0.0, 0.0))?.len(), 1);\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #54225 - pnkfelix:issue-53675-add-test-called-panic, r=petrochenkov<commit_after>\/\/ rust-lang\/rust#53675: At one point the compiler errored when a test\n\/\/ named `panic` used the `assert!` macro in expression position.\n\n\/\/ compile-pass\n\/\/ compile-flags: --test\n\nmod in_expression_position {\n #[test]\n fn panic() {\n assert!(true)\n }\n}\n\nmod in_statement_position {\n #[test]\n fn panic() {\n assert!(true);\n }\n}\n\nmod what_if_we_use_panic_directly_in_expr {\n #[test]\n #[should_panic]\n fn panic() {\n panic!(\"in expr\")\n }\n}\n\n\nmod what_if_we_use_panic_directly_in_stmt {\n #[test]\n #[should_panic]\n fn panic() {\n panic!(\"in stmt\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate rand;\n\n#[allow(non_camel_case_types)]\npub mod ll {\n use libc::{c_int, uint8_t, uint32_t};\n\n \/\/SDL_pixels.h\n pub struct SDL_Color {\n pub r: uint8_t,\n pub g: uint8_t,\n pub b: uint8_t,\n pub a: uint8_t,\n }\n\n pub struct SDL_Palette {\n pub ncolors: c_int,\n pub colors: *const SDL_Color,\n pub version: uint32_t,\n pub refcount: c_int\n }\n\n #[allow(uppercase_variables)]\n pub struct SDL_PixelFormat {\n pub format: SDL_PixelFormatFlag,\n pub palette: *const SDL_Palette,\n pub BitsPerPixel: uint8_t,\n pub BytesPerPixel: uint8_t,\n pub padding: [uint8_t, ..2],\n pub Rmask: uint8_t,\n pub Gmask: uint8_t,\n pub Bmask: uint8_t,\n pub Amask: uint8_t,\n pub Rloss: uint8_t,\n pub Gloss: uint8_t,\n pub Bloss: uint8_t,\n pub Aloss: uint8_t,\n pub Rshift: uint8_t,\n pub Gshift: uint8_t,\n pub Bshift: uint8_t,\n pub Ashift: uint8_t,\n pub refcount: c_int,\n pub next: *const SDL_PixelFormat\n }\n\n pub type SDL_PixelFormatFlag = uint32_t;\n pub static SDL_PIXELFORMAT_UNKNOWN: SDL_PixelFormatFlag = 0x0;\n pub static SDL_PIXELFORMAT_INDEX1LSB: SDL_PixelFormatFlag = 0x11100100;\n pub static SDL_PIXELFORMAT_INDEX1MSB: SDL_PixelFormatFlag = 0x11200100;\n pub static SDL_PIXELFORMAT_INDEX4LSB: SDL_PixelFormatFlag = 0x12100400;\n pub static SDL_PIXELFORMAT_INDEX4MSB: SDL_PixelFormatFlag = 0x12200400;\n pub static SDL_PIXELFORMAT_INDEX8: SDL_PixelFormatFlag = 0x13000801;\n pub static SDL_PIXELFORMAT_RGB332: SDL_PixelFormatFlag = 0x14110801;\n pub static SDL_PIXELFORMAT_RGB444: SDL_PixelFormatFlag = 0x15120c02;\n pub static SDL_PIXELFORMAT_RGB555: SDL_PixelFormatFlag = 0x15130f02;\n pub static SDL_PIXELFORMAT_BGR555: SDL_PixelFormatFlag = 0x15530f02;\n pub static SDL_PIXELFORMAT_ARGB4444: SDL_PixelFormatFlag = 0x15321002;\n pub static SDL_PIXELFORMAT_RGBA4444: SDL_PixelFormatFlag = 0x15421002;\n pub static SDL_PIXELFORMAT_ABGR4444: SDL_PixelFormatFlag = 0x15721002;\n pub static SDL_PIXELFORMAT_BGRA4444: SDL_PixelFormatFlag = 0x15821002;\n pub static SDL_PIXELFORMAT_ARGB1555: SDL_PixelFormatFlag = 0x15331002;\n pub static SDL_PIXELFORMAT_RGBA5551: SDL_PixelFormatFlag = 0x15441002;\n pub static SDL_PIXELFORMAT_ABGR1555: SDL_PixelFormatFlag = 0x15731002;\n pub static SDL_PIXELFORMAT_BGRA5551: SDL_PixelFormatFlag = 0x15841002;\n pub static SDL_PIXELFORMAT_RGB565: SDL_PixelFormatFlag = 0x15151002;\n pub static SDL_PIXELFORMAT_BGR565: SDL_PixelFormatFlag = 0x15551002;\n pub static SDL_PIXELFORMAT_RGB24: SDL_PixelFormatFlag = 0x17101803;\n pub static SDL_PIXELFORMAT_BGR24: SDL_PixelFormatFlag = 0x17401803;\n pub static SDL_PIXELFORMAT_RGB888: SDL_PixelFormatFlag = 0x16161804;\n pub static SDL_PIXELFORMAT_RGBX8888: SDL_PixelFormatFlag = 0x16261804;\n pub static SDL_PIXELFORMAT_BGR888: SDL_PixelFormatFlag = 0x16561804;\n pub static SDL_PIXELFORMAT_BGRX8888: SDL_PixelFormatFlag = 0x16661804;\n pub static SDL_PIXELFORMAT_ARGB8888: SDL_PixelFormatFlag = 0x16362004;\n pub static SDL_PIXELFORMAT_RGBA8888: SDL_PixelFormatFlag = 0x16462004;\n pub static SDL_PIXELFORMAT_ABGR8888: SDL_PixelFormatFlag = 0x16762004;\n pub static SDL_PIXELFORMAT_BGRA8888: SDL_PixelFormatFlag = 0x16862004;\n pub static SDL_PIXELFORMAT_ARGB2101010: SDL_PixelFormatFlag = 0x16372004;\n pub static SDL_PIXELFORMAT_YV12: SDL_PixelFormatFlag = 0x32315659;\n pub static SDL_PIXELFORMAT_IYUV: SDL_PixelFormatFlag = 0x56555949;\n pub static SDL_PIXELFORMAT_YUY2: SDL_PixelFormatFlag = 0x32595559;\n pub static SDL_PIXELFORMAT_UYVY: SDL_PixelFormatFlag = 0x59565955;\n pub static SDL_PIXELFORMAT_YVYU: SDL_PixelFormatFlag = 0x55595659;\n\n extern \"C\" {\n pub fn SDL_GetRGB(pixel: uint32_t, format: *const SDL_PixelFormat, r: *const uint8_t, g: *const uint8_t, b: *const uint8_t);\n pub fn SDL_GetRGBA(pixel: uint32_t, format: *const SDL_PixelFormat, r: *const uint8_t, g: *const uint8_t, b: *const uint8_t, a: *const uint8_t);\n pub fn SDL_MapRGB(format: *const SDL_PixelFormat, r: uint8_t, g: uint8_t, b: uint8_t) -> uint32_t;\n pub fn SDL_MapRGBA(format: *const SDL_PixelFormat, r: uint8_t, g: uint8_t, b: uint8_t, a: uint8_t) -> uint32_t;\n }\n}\n#[deriving(PartialEq)] #[allow(raw_pointer_deriving)]\npub struct Palette {\n raw: *const ll::SDL_Palette\n}\n\nimpl_raw_accessors!(Palette, *const ll::SDL_Palette)\n\n#[deriving(PartialEq)]\npub enum Color {\n RGB(u8, u8, u8),\n RGBA(u8, u8, u8, u8)\n}\n\nimpl Color {\n pub fn to_u32(&self, format: &PixelFormat) -> u32 {\n match self {\n &RGB(r, g, b) => {\n unsafe { ll::SDL_MapRGB(format.raw, r, g, b) }\n }\n &RGBA(r, g, b, a) => {\n unsafe { ll::SDL_MapRGBA(format.raw, r, g, b, a) }\n }\n }\n }\n\n pub fn from_u32(format: &PixelFormat, pixel: u32) -> Color {\n let r: u8 = 0;\n let g: u8 = 0;\n let b: u8 = 0;\n let a: u8 = 0;\n\n unsafe {\n ll::SDL_GetRGBA(pixel, format.raw, &r, &g, &b, &a)\n };\n RGBA(r, g, b, a)\n }\n}\n\nimpl rand::Rand for Color {\n fn rand<R: rand::Rng>(rng: &mut R) -> Color {\n if rng.gen() { RGBA(rng.gen(), rng.gen(), rng.gen(), rng.gen()) }\n else { RGB(rng.gen(), rng.gen(), rng.gen()) }\n }\n}\n\n#[deriving(PartialEq)] #[allow(raw_pointer_deriving)]\npub struct PixelFormat {\n raw: *const ll::SDL_PixelFormat\n}\n\nimpl_raw_accessors!(PixelFormat, *const ll::SDL_PixelFormat)\nimpl_raw_constructor!(PixelFormat -> PixelFormat (raw: *const ll::SDL_PixelFormat))\n\n#[deriving(PartialEq, Show, FromPrimitive)]\npub enum PixelFormatFlag {\n Unknown = ll::SDL_PIXELFORMAT_UNKNOWN as int,\n Index1LSB = ll::SDL_PIXELFORMAT_INDEX1LSB as int,\n Index1MSB = ll::SDL_PIXELFORMAT_INDEX1MSB as int,\n Index4LSB = ll::SDL_PIXELFORMAT_INDEX4LSB as int,\n Index4MSB = ll::SDL_PIXELFORMAT_INDEX4MSB as int,\n Index8 = ll::SDL_PIXELFORMAT_INDEX8 as int,\n RGB332 = ll::SDL_PIXELFORMAT_RGB332 as int,\n RGB444 = ll::SDL_PIXELFORMAT_RGB444 as int,\n RGB555 = ll::SDL_PIXELFORMAT_RGB555 as int,\n BGR555 = ll::SDL_PIXELFORMAT_BGR555 as int,\n ARGB4444 = ll::SDL_PIXELFORMAT_ARGB4444 as int,\n RGBA4444 = ll::SDL_PIXELFORMAT_RGBA4444 as int,\n ABGR4444 = ll::SDL_PIXELFORMAT_ABGR4444 as int,\n BGRA4444 = ll::SDL_PIXELFORMAT_BGRA4444 as int,\n ARGB1555 = ll::SDL_PIXELFORMAT_ARGB1555 as int,\n RGBA5551 = ll::SDL_PIXELFORMAT_RGBA5551 as int,\n ABGR1555 = ll::SDL_PIXELFORMAT_ABGR1555 as int,\n BGRA5551 = ll::SDL_PIXELFORMAT_BGRA5551 as int,\n RGB565 = ll::SDL_PIXELFORMAT_RGB565 as int,\n BGR565 = ll::SDL_PIXELFORMAT_BGR565 as int,\n RGB24 = ll::SDL_PIXELFORMAT_RGB24 as int,\n BGR24 = ll::SDL_PIXELFORMAT_BGR24 as int,\n RGB888 = ll::SDL_PIXELFORMAT_RGB888 as int,\n RGBX8888 = ll::SDL_PIXELFORMAT_RGBX8888 as int,\n BGR888 = ll::SDL_PIXELFORMAT_BGR888 as int,\n BGRX8888 = ll::SDL_PIXELFORMAT_BGRX8888 as int,\n ARGB8888 = ll::SDL_PIXELFORMAT_ARGB8888 as int,\n RGBA8888 = ll::SDL_PIXELFORMAT_RGBA8888 as int,\n ABGR8888 = ll::SDL_PIXELFORMAT_ABGR8888 as int,\n BGRA8888 = ll::SDL_PIXELFORMAT_BGRA8888 as int,\n ARGB2101010 = ll::SDL_PIXELFORMAT_ARGB2101010 as int,\n YV12 = ll::SDL_PIXELFORMAT_YV12 as int,\n IYUV = ll::SDL_PIXELFORMAT_IYUV as int,\n YUY2 = ll::SDL_PIXELFORMAT_YUY2 as int,\n UYVY = ll::SDL_PIXELFORMAT_UYVY as int,\n YVYU = ll::SDL_PIXELFORMAT_YVYU as int\n}\n\nimpl PixelFormatFlag {\n pub fn byte_size_of_pixels(&self, num_of_pixels: uint) -> uint {\n match *self {\n RGB332\n => num_of_pixels * 1,\n RGB444 | RGB555 | BGR555 | ARGB4444 | RGBA4444 | ABGR4444 |\n BGRA4444 | ARGB1555 | RGBA5551 | ABGR1555 | BGRA5551 | RGB565 |\n BGR565\n => num_of_pixels * 2,\n RGB24 | BGR24\n => num_of_pixels * 3,\n RGB888 | RGBX8888 | BGR888 | BGRX8888 | ARGB8888 | RGBA8888 |\n ABGR8888 | BGRA8888 | ARGB2101010\n => num_of_pixels * 4,\n \/\/ YUV formats\n \/\/ FIXME: rounding error here?\n YV12 | IYUV\n => num_of_pixels \/ 2 * 3,\n YUY2 | UYVY | YVYU\n => num_of_pixels * 2,\n \/\/ Unsupported formats\n Index8\n => num_of_pixels * 1,\n Unknown | Index1LSB | Index1MSB | Index4LSB | Index4MSB\n => fail!(\"not supported format: {}\", *self),\n }\n }\n\n pub fn byte_size_per_pixel(&self) -> uint {\n match *self {\n RGB332\n => 1,\n RGB444 | RGB555 | BGR555 | ARGB4444 | RGBA4444 | ABGR4444 |\n BGRA4444 | ARGB1555 | RGBA5551 | ABGR1555 | BGRA5551 | RGB565 |\n BGR565\n => 2,\n RGB24 | BGR24\n => 3,\n RGB888 | RGBX8888 | BGR888 | BGRX8888 | ARGB8888 | RGBA8888 |\n ABGR8888 | BGRA8888 | ARGB2101010\n => 4,\n \/\/ YUV formats\n YV12 | IYUV\n => 2,\n YUY2 | UYVY | YVYU\n => 2,\n \/\/ Unsupported formats\n Index8\n => 1,\n Unknown | Index1LSB | Index1MSB | Index4LSB | Index4MSB\n => fail!(\"not supported format: {}\", *self),\n }\n }\n}\n<commit_msg>Added clone trait to color<commit_after>extern crate rand;\n\n#[allow(non_camel_case_types)]\npub mod ll {\n use libc::{c_int, uint8_t, uint32_t};\n\n \/\/SDL_pixels.h\n pub struct SDL_Color {\n pub r: uint8_t,\n pub g: uint8_t,\n pub b: uint8_t,\n pub a: uint8_t,\n }\n\n pub struct SDL_Palette {\n pub ncolors: c_int,\n pub colors: *const SDL_Color,\n pub version: uint32_t,\n pub refcount: c_int\n }\n\n #[allow(uppercase_variables)]\n pub struct SDL_PixelFormat {\n pub format: SDL_PixelFormatFlag,\n pub palette: *const SDL_Palette,\n pub BitsPerPixel: uint8_t,\n pub BytesPerPixel: uint8_t,\n pub padding: [uint8_t, ..2],\n pub Rmask: uint8_t,\n pub Gmask: uint8_t,\n pub Bmask: uint8_t,\n pub Amask: uint8_t,\n pub Rloss: uint8_t,\n pub Gloss: uint8_t,\n pub Bloss: uint8_t,\n pub Aloss: uint8_t,\n pub Rshift: uint8_t,\n pub Gshift: uint8_t,\n pub Bshift: uint8_t,\n pub Ashift: uint8_t,\n pub refcount: c_int,\n pub next: *const SDL_PixelFormat\n }\n\n pub type SDL_PixelFormatFlag = uint32_t;\n pub static SDL_PIXELFORMAT_UNKNOWN: SDL_PixelFormatFlag = 0x0;\n pub static SDL_PIXELFORMAT_INDEX1LSB: SDL_PixelFormatFlag = 0x11100100;\n pub static SDL_PIXELFORMAT_INDEX1MSB: SDL_PixelFormatFlag = 0x11200100;\n pub static SDL_PIXELFORMAT_INDEX4LSB: SDL_PixelFormatFlag = 0x12100400;\n pub static SDL_PIXELFORMAT_INDEX4MSB: SDL_PixelFormatFlag = 0x12200400;\n pub static SDL_PIXELFORMAT_INDEX8: SDL_PixelFormatFlag = 0x13000801;\n pub static SDL_PIXELFORMAT_RGB332: SDL_PixelFormatFlag = 0x14110801;\n pub static SDL_PIXELFORMAT_RGB444: SDL_PixelFormatFlag = 0x15120c02;\n pub static SDL_PIXELFORMAT_RGB555: SDL_PixelFormatFlag = 0x15130f02;\n pub static SDL_PIXELFORMAT_BGR555: SDL_PixelFormatFlag = 0x15530f02;\n pub static SDL_PIXELFORMAT_ARGB4444: SDL_PixelFormatFlag = 0x15321002;\n pub static SDL_PIXELFORMAT_RGBA4444: SDL_PixelFormatFlag = 0x15421002;\n pub static SDL_PIXELFORMAT_ABGR4444: SDL_PixelFormatFlag = 0x15721002;\n pub static SDL_PIXELFORMAT_BGRA4444: SDL_PixelFormatFlag = 0x15821002;\n pub static SDL_PIXELFORMAT_ARGB1555: SDL_PixelFormatFlag = 0x15331002;\n pub static SDL_PIXELFORMAT_RGBA5551: SDL_PixelFormatFlag = 0x15441002;\n pub static SDL_PIXELFORMAT_ABGR1555: SDL_PixelFormatFlag = 0x15731002;\n pub static SDL_PIXELFORMAT_BGRA5551: SDL_PixelFormatFlag = 0x15841002;\n pub static SDL_PIXELFORMAT_RGB565: SDL_PixelFormatFlag = 0x15151002;\n pub static SDL_PIXELFORMAT_BGR565: SDL_PixelFormatFlag = 0x15551002;\n pub static SDL_PIXELFORMAT_RGB24: SDL_PixelFormatFlag = 0x17101803;\n pub static SDL_PIXELFORMAT_BGR24: SDL_PixelFormatFlag = 0x17401803;\n pub static SDL_PIXELFORMAT_RGB888: SDL_PixelFormatFlag = 0x16161804;\n pub static SDL_PIXELFORMAT_RGBX8888: SDL_PixelFormatFlag = 0x16261804;\n pub static SDL_PIXELFORMAT_BGR888: SDL_PixelFormatFlag = 0x16561804;\n pub static SDL_PIXELFORMAT_BGRX8888: SDL_PixelFormatFlag = 0x16661804;\n pub static SDL_PIXELFORMAT_ARGB8888: SDL_PixelFormatFlag = 0x16362004;\n pub static SDL_PIXELFORMAT_RGBA8888: SDL_PixelFormatFlag = 0x16462004;\n pub static SDL_PIXELFORMAT_ABGR8888: SDL_PixelFormatFlag = 0x16762004;\n pub static SDL_PIXELFORMAT_BGRA8888: SDL_PixelFormatFlag = 0x16862004;\n pub static SDL_PIXELFORMAT_ARGB2101010: SDL_PixelFormatFlag = 0x16372004;\n pub static SDL_PIXELFORMAT_YV12: SDL_PixelFormatFlag = 0x32315659;\n pub static SDL_PIXELFORMAT_IYUV: SDL_PixelFormatFlag = 0x56555949;\n pub static SDL_PIXELFORMAT_YUY2: SDL_PixelFormatFlag = 0x32595559;\n pub static SDL_PIXELFORMAT_UYVY: SDL_PixelFormatFlag = 0x59565955;\n pub static SDL_PIXELFORMAT_YVYU: SDL_PixelFormatFlag = 0x55595659;\n\n extern \"C\" {\n pub fn SDL_GetRGB(pixel: uint32_t, format: *const SDL_PixelFormat, r: *const uint8_t, g: *const uint8_t, b: *const uint8_t);\n pub fn SDL_GetRGBA(pixel: uint32_t, format: *const SDL_PixelFormat, r: *const uint8_t, g: *const uint8_t, b: *const uint8_t, a: *const uint8_t);\n pub fn SDL_MapRGB(format: *const SDL_PixelFormat, r: uint8_t, g: uint8_t, b: uint8_t) -> uint32_t;\n pub fn SDL_MapRGBA(format: *const SDL_PixelFormat, r: uint8_t, g: uint8_t, b: uint8_t, a: uint8_t) -> uint32_t;\n }\n}\n#[deriving(PartialEq)] #[allow(raw_pointer_deriving)]\npub struct Palette {\n raw: *const ll::SDL_Palette\n}\n\nimpl_raw_accessors!(Palette, *const ll::SDL_Palette)\n\n#[deriving(PartialEq, Clone)]\npub enum Color {\n RGB(u8, u8, u8),\n RGBA(u8, u8, u8, u8)\n}\n\nimpl Color {\n pub fn to_u32(&self, format: &PixelFormat) -> u32 {\n match self {\n &RGB(r, g, b) => {\n unsafe { ll::SDL_MapRGB(format.raw, r, g, b) }\n }\n &RGBA(r, g, b, a) => {\n unsafe { ll::SDL_MapRGBA(format.raw, r, g, b, a) }\n }\n }\n }\n\n pub fn from_u32(format: &PixelFormat, pixel: u32) -> Color {\n let r: u8 = 0;\n let g: u8 = 0;\n let b: u8 = 0;\n let a: u8 = 0;\n\n unsafe {\n ll::SDL_GetRGBA(pixel, format.raw, &r, &g, &b, &a)\n };\n RGBA(r, g, b, a)\n }\n}\n\nimpl rand::Rand for Color {\n fn rand<R: rand::Rng>(rng: &mut R) -> Color {\n if rng.gen() { RGBA(rng.gen(), rng.gen(), rng.gen(), rng.gen()) }\n else { RGB(rng.gen(), rng.gen(), rng.gen()) }\n }\n}\n\n#[deriving(PartialEq)] #[allow(raw_pointer_deriving)]\npub struct PixelFormat {\n raw: *const ll::SDL_PixelFormat\n}\n\nimpl_raw_accessors!(PixelFormat, *const ll::SDL_PixelFormat)\nimpl_raw_constructor!(PixelFormat -> PixelFormat (raw: *const ll::SDL_PixelFormat))\n\n#[deriving(PartialEq, Show, FromPrimitive)]\npub enum PixelFormatFlag {\n Unknown = ll::SDL_PIXELFORMAT_UNKNOWN as int,\n Index1LSB = ll::SDL_PIXELFORMAT_INDEX1LSB as int,\n Index1MSB = ll::SDL_PIXELFORMAT_INDEX1MSB as int,\n Index4LSB = ll::SDL_PIXELFORMAT_INDEX4LSB as int,\n Index4MSB = ll::SDL_PIXELFORMAT_INDEX4MSB as int,\n Index8 = ll::SDL_PIXELFORMAT_INDEX8 as int,\n RGB332 = ll::SDL_PIXELFORMAT_RGB332 as int,\n RGB444 = ll::SDL_PIXELFORMAT_RGB444 as int,\n RGB555 = ll::SDL_PIXELFORMAT_RGB555 as int,\n BGR555 = ll::SDL_PIXELFORMAT_BGR555 as int,\n ARGB4444 = ll::SDL_PIXELFORMAT_ARGB4444 as int,\n RGBA4444 = ll::SDL_PIXELFORMAT_RGBA4444 as int,\n ABGR4444 = ll::SDL_PIXELFORMAT_ABGR4444 as int,\n BGRA4444 = ll::SDL_PIXELFORMAT_BGRA4444 as int,\n ARGB1555 = ll::SDL_PIXELFORMAT_ARGB1555 as int,\n RGBA5551 = ll::SDL_PIXELFORMAT_RGBA5551 as int,\n ABGR1555 = ll::SDL_PIXELFORMAT_ABGR1555 as int,\n BGRA5551 = ll::SDL_PIXELFORMAT_BGRA5551 as int,\n RGB565 = ll::SDL_PIXELFORMAT_RGB565 as int,\n BGR565 = ll::SDL_PIXELFORMAT_BGR565 as int,\n RGB24 = ll::SDL_PIXELFORMAT_RGB24 as int,\n BGR24 = ll::SDL_PIXELFORMAT_BGR24 as int,\n RGB888 = ll::SDL_PIXELFORMAT_RGB888 as int,\n RGBX8888 = ll::SDL_PIXELFORMAT_RGBX8888 as int,\n BGR888 = ll::SDL_PIXELFORMAT_BGR888 as int,\n BGRX8888 = ll::SDL_PIXELFORMAT_BGRX8888 as int,\n ARGB8888 = ll::SDL_PIXELFORMAT_ARGB8888 as int,\n RGBA8888 = ll::SDL_PIXELFORMAT_RGBA8888 as int,\n ABGR8888 = ll::SDL_PIXELFORMAT_ABGR8888 as int,\n BGRA8888 = ll::SDL_PIXELFORMAT_BGRA8888 as int,\n ARGB2101010 = ll::SDL_PIXELFORMAT_ARGB2101010 as int,\n YV12 = ll::SDL_PIXELFORMAT_YV12 as int,\n IYUV = ll::SDL_PIXELFORMAT_IYUV as int,\n YUY2 = ll::SDL_PIXELFORMAT_YUY2 as int,\n UYVY = ll::SDL_PIXELFORMAT_UYVY as int,\n YVYU = ll::SDL_PIXELFORMAT_YVYU as int\n}\n\nimpl PixelFormatFlag {\n pub fn byte_size_of_pixels(&self, num_of_pixels: uint) -> uint {\n match *self {\n RGB332\n => num_of_pixels * 1,\n RGB444 | RGB555 | BGR555 | ARGB4444 | RGBA4444 | ABGR4444 |\n BGRA4444 | ARGB1555 | RGBA5551 | ABGR1555 | BGRA5551 | RGB565 |\n BGR565\n => num_of_pixels * 2,\n RGB24 | BGR24\n => num_of_pixels * 3,\n RGB888 | RGBX8888 | BGR888 | BGRX8888 | ARGB8888 | RGBA8888 |\n ABGR8888 | BGRA8888 | ARGB2101010\n => num_of_pixels * 4,\n \/\/ YUV formats\n \/\/ FIXME: rounding error here?\n YV12 | IYUV\n => num_of_pixels \/ 2 * 3,\n YUY2 | UYVY | YVYU\n => num_of_pixels * 2,\n \/\/ Unsupported formats\n Index8\n => num_of_pixels * 1,\n Unknown | Index1LSB | Index1MSB | Index4LSB | Index4MSB\n => fail!(\"not supported format: {}\", *self),\n }\n }\n\n pub fn byte_size_per_pixel(&self) -> uint {\n match *self {\n RGB332\n => 1,\n RGB444 | RGB555 | BGR555 | ARGB4444 | RGBA4444 | ABGR4444 |\n BGRA4444 | ARGB1555 | RGBA5551 | ABGR1555 | BGRA5551 | RGB565 |\n BGR565\n => 2,\n RGB24 | BGR24\n => 3,\n RGB888 | RGBX8888 | BGR888 | BGRX8888 | ARGB8888 | RGBA8888 |\n ABGR8888 | BGRA8888 | ARGB2101010\n => 4,\n \/\/ YUV formats\n YV12 | IYUV\n => 2,\n YUY2 | UYVY | YVYU\n => 2,\n \/\/ Unsupported formats\n Index8\n => 1,\n Unknown | Index1LSB | Index1MSB | Index4LSB | Index4MSB\n => fail!(\"not supported format: {}\", *self),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse target::{Target, TargetResult};\n\npub fn target() -> TargetResult {\n let mut base = super::linux_base::opts();\n \/\/ z10 is the oldest CPU supported by LLVM\n base.cpu = \"z10\".to_string();\n \/\/ FIXME: The data_layout string below and the ABI implementation in\n \/\/ cabi_s390x.rs are for now hard-coded to assume the no-vector ABI.\n \/\/ Pass the -vector feature string to LLVM to respect this assumption.\n base.features = \"-vector\".to_string();\n base.max_atomic_width = Some(64);\n\n Ok(Target {\n llvm_target: \"s390x-unknown-linux-gnu\".to_string(),\n target_endian: \"big\".to_string(),\n target_pointer_width: \"64\".to_string(),\n data_layout: \"E-m:e-i1:8:16-i8:8:16-i64:64-f128:64-a:8:16-n32:64\".to_string(),\n arch: \"s390x\".to_string(),\n target_os: \"linux\".to_string(),\n target_env: \"gnu\".to_string(),\n target_vendor: \"unknown\".to_string(),\n options: base,\n })\n}\n<commit_msg>Disable jemalloc on s390x as well (closes #38596)<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse target::{Target, TargetResult};\n\npub fn target() -> TargetResult {\n let mut base = super::linux_base::opts();\n \/\/ z10 is the oldest CPU supported by LLVM\n base.cpu = \"z10\".to_string();\n \/\/ FIXME: The data_layout string below and the ABI implementation in\n \/\/ cabi_s390x.rs are for now hard-coded to assume the no-vector ABI.\n \/\/ Pass the -vector feature string to LLVM to respect this assumption.\n base.features = \"-vector\".to_string();\n base.max_atomic_width = Some(64);\n \/\/ see #36994\n base.exe_allocation_crate = \"alloc_system\".to_string();\n\n Ok(Target {\n llvm_target: \"s390x-unknown-linux-gnu\".to_string(),\n target_endian: \"big\".to_string(),\n target_pointer_width: \"64\".to_string(),\n data_layout: \"E-m:e-i1:8:16-i8:8:16-i64:64-f128:64-a:8:16-n32:64\".to_string(),\n arch: \"s390x\".to_string(),\n target_os: \"linux\".to_string(),\n target_env: \"gnu\".to_string(),\n target_vendor: \"unknown\".to_string(),\n options: base,\n })\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Diagnostics engine\n\nuse std::fmt;\nuse std::cmp;\nuse std::rc::Rc;\nuse std::cell::RefCell;\nuse super::{Span, SrcMgr};\n\nuse colored::{Color, Colorize};\n\n\/\/\/ Severity of the diagnostic message.\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum Severity {\n Remark,\n Info,\n Warning,\n Error,\n Fatal,\n}\n\nimpl Severity {\n \/\/\/ Get the color corresponding to this severity.\n fn color(&self) -> Color {\n match self {\n Severity::Remark => Color::Blue,\n Severity::Info => Color::Black,\n Severity::Warning => Color::Magenta,\n Severity::Error | Severity::Fatal => Color::Red,\n }\n }\n}\n\nimpl fmt::Display for Severity {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let str = match self {\n Severity::Remark => \"remark\",\n Severity::Info => \"info\",\n Severity::Warning => \"warning\",\n Severity::Error => \"error\",\n Severity::Fatal => \"fatal error\",\n };\n write!(f, \"{}\", str)\n }\n}\n\n\/\/\/ A note for detailed message or suggesting how to fix it.\npub struct Note {\n pub span: Span,\n pub fix: Option<String>,\n pub message: Option<String>,\n}\n\n\/\/\/ A diagnostic message.\npub struct Diagnostic {\n pub severity: Severity,\n pub message: String,\n \/\/\/ This is the primary span that causes the issue. This will not be displayed.\n \/\/\/ `new` function will automatically add the span to notes for it to be displayed.\n pub span: Option<Span>,\n pub notes: Vec<Note>\n}\n\n\/\/\/ Helpers for building diagnostic message. Intended to be called in chains.\nimpl Diagnostic {\n pub fn new(severity: Severity, msg: impl Into<String>, span: Span) -> Self {\n Diagnostic {\n severity,\n message: msg.into(),\n span: Some(span),\n notes: vec![Note {\n span,\n fix: None,\n message: None\n }],\n }\n }\n\n pub fn fix_primary(mut self, fix: impl Into<String>) -> Self {\n self.notes[0].fix = Some(fix.into());\n self\n }\n\n pub fn fix(mut self, span: Span, fix: impl Into<String>) -> Self {\n self.notes.push(Note {\n span,\n fix: Some(fix.into()),\n message: None,\n });\n self\n }\n}\n\n\/\/ Helper class for printing column number in a file with tabs and non-ASCII characters.\nstruct VisualString {\n str: String,\n columns: Vec<usize>,\n}\n\nimpl VisualString {\n fn new(str: &str, tab: usize) -> VisualString {\n let mut columns = Vec::with_capacity(str.len() + 1);\n columns.push(0);\n\n \/\/ Current visual string and visual length\n let mut vstr = String::new();\n let mut vlen = 0;\n \n for ch in str.chars() {\n match ch {\n '\\r' | '\\n' => (),\n '\\t' => {\n let newlen = (vlen + tab) \/ tab * tab;\n for _ in vlen..newlen {\n vstr.push(' ');\n }\n vlen = newlen\n }\n _ => {\n vstr.push(ch);\n vlen += 1\n }\n }\n\n for _ in 0..ch.len_utf8() {\n columns.push(vlen);\n }\n }\n\n \/\/ Reserve a column for end-of-line character\n columns.push(vlen + 1);\n\n VisualString {\n str: vstr,\n columns: columns,\n }\n }\n\n fn visual_column(&self, pos: usize) -> usize {\n self.columns[pos]\n }\n\n fn visual_length(&self) -> usize {\n self.columns[self.columns.len() - 1]\n }\n\n fn visual_text(&self) -> &str {\n &self.str\n }\n}\n\nimpl Diagnostic {\n pub fn print(&self, mgr: &SrcMgr, color: bool, tab: usize) {\n \/\/ Stringify and color severity\n let mut severity = format!(\"{}: \", self.severity);\n if color {\n severity = severity.color(self.severity.color()).to_string();\n }\n\n \/\/ Convert spans to fat spans\n let primary_span = match self.notes.first().and_then(|x| mgr.find_span(x.span)) {\n None => {\n \/\/ If the message has no associated file, just print it\n if color {\n eprintln!(\"{}{}\", severity, self.message.bold());\n } else {\n eprintln!(\"{}{}\", severity, self.message);\n }\n return\n }\n Some(v) => v,\n };\n\n \/\/ Obtain line map\n let src = &primary_span.source;\n let linemap = src.linemap();\n\n \/\/ Get line number (starting from 0)\n let line = linemap.line_number(primary_span.start);\n \/\/ Get position within the line\n let line_start = linemap.line_start_pos(line);\n \/\/ Get source code line for handling\n let line_text = linemap.line(src, line);\n let vstr = VisualString::new(line_text, tab);\n\n \/\/ Get colored severity string\n \/\/ Generate the error message line\n let mut msg = format!(\"{}:{}: {}{}\", src.filename(), line + 1, severity, self.message);\n if color {\n msg = msg.bold().to_string();\n }\n\n \/\/ Allocate char vectors to hold indicators and hints\n \/\/ Make this 1 longer for possibility to point to the line break character.\n let mut indicators = vec![' '; vstr.visual_length() + 1];\n let mut fixes = vec![' '; vstr.visual_length()];\n let mut character = '^';\n let mut has_fix = false;\n\n \/\/ Fill in ^ and ~ characters for all spans\n for note in &self.notes {\n let span = match mgr.find_span(note.span) {\n \/\/ The span is non-existent, continue instead\n None => continue,\n Some(v) => v,\n };\n\n \/\/ Unlikely event, we cannot display this\n if !Rc::ptr_eq(&span.source, &primary_span.source) {\n continue\n }\n\n \/\/ Get start and end position, clamped within the line.\n let start = span.start as isize - line_start as isize;\n let start_clamp = cmp::min(cmp::max(start, 0) as usize, line_text.len());\n let end = span.end as isize - line_start as isize;\n let end_clamp = cmp::min(cmp::max(end, 0) as usize, line_text.len() + 1);\n\n for i in vstr.visual_column(start_clamp)..vstr.visual_column(end_clamp) {\n indicators[i] = character;\n }\n\n \/\/ We can only display it if it partially covers this line\n if note.fix.is_some() && end >= 0 && start <= line_text.len() as isize {\n let mut vptr = cmp::min(cmp::max(start, 0) as usize, line_text.len());\n \/\/ Now replace the part in vector with the replacement suggestion\n for ch in note.fix.as_ref().unwrap().chars() {\n if vptr >= fixes.len() {\n fixes.push(ch);\n } else {\n fixes[vptr] = ch;\n }\n vptr += 1;\n }\n has_fix = true;\n }\n\n \/\/ For non-primary notes, the character is different.\n character = '~';\n }\n\n let mut indicator_line: String = indicators.into_iter().collect();\n if color {\n indicator_line = indicator_line.green().bold().to_string();\n }\n\n if has_fix {\n let mut line: String = fixes.into_iter().collect();\n if color {\n line = line.green().to_string();\n }\n\n eprintln!(\"{}\\n{}\\n{}\\n{}\", msg, vstr.visual_text(), indicator_line, line);\n } else {\n eprintln!(\"{}\\n{}\\n{}\", msg, vstr.visual_text(), indicator_line);\n }\n }\n}\n\n\/\/\/ Diagnostic manager\nstruct DiagMgrMut {\n src: Rc<SrcMgr>,\n diagnostics: Vec<Diagnostic>,\n}\n\npub struct DiagMgr {\n mutable: RefCell<DiagMgrMut>,\n}\n\nimpl DiagMgr {\n \/\/\/ Create a new diagnostics manager\n pub fn new(mgr: Rc<SrcMgr>) -> Self {\n Self {\n mutable: RefCell::new(DiagMgrMut {\n src: mgr,\n diagnostics: Vec::new(),\n })\n }\n }\n\n \/\/\/ Add a new diagnostic. Returns `Err` for fatal errors.\n pub fn report(&self, diag: Diagnostic) {\n let mut m = self.mutable.borrow_mut();\n diag.print(&m.src, true, 4);\n let severity = diag.severity;\n m.diagnostics.push(diag);\n if severity == Severity::Fatal {\n panic!(Severity::Fatal);\n }\n }\n\n \/\/\/ Clear exsting diagnostics\n pub fn clear(&self) {\n let mut m = self.mutable.borrow_mut();\n m.diagnostics.clear();\n }\n\n \/\/\/ Check if there is any fatal error.\n pub fn has_fatal(&self) -> bool {\n let m = self.mutable.borrow();\n m.diagnostics.iter().any(|diag| diag.severity == Severity::Fatal)\n }\n\n \/\/\/ Check if there is any error.\n pub fn has_error(&self) -> bool {\n let m = self.mutable.borrow();\n m.diagnostics.iter().any(|diag| diag.severity == Severity::Error || diag.severity == Severity::Fatal)\n }\n}\n<commit_msg>Add the commonly used error reporting function into DiagMgr.<commit_after>\/\/ Diagnostics engine\n\nuse std::fmt;\nuse std::cmp;\nuse std::rc::Rc;\nuse std::cell::RefCell;\nuse super::{Span, SrcMgr};\n\nuse colored::{Color, Colorize};\n\n\/\/\/ Severity of the diagnostic message.\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum Severity {\n Remark,\n Info,\n Warning,\n Error,\n Fatal,\n}\n\nimpl Severity {\n \/\/\/ Get the color corresponding to this severity.\n fn color(&self) -> Color {\n match self {\n Severity::Remark => Color::Blue,\n Severity::Info => Color::Black,\n Severity::Warning => Color::Magenta,\n Severity::Error | Severity::Fatal => Color::Red,\n }\n }\n}\n\nimpl fmt::Display for Severity {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let str = match self {\n Severity::Remark => \"remark\",\n Severity::Info => \"info\",\n Severity::Warning => \"warning\",\n Severity::Error => \"error\",\n Severity::Fatal => \"fatal error\",\n };\n write!(f, \"{}\", str)\n }\n}\n\n\/\/\/ A note for detailed message or suggesting how to fix it.\npub struct Note {\n pub span: Span,\n pub fix: Option<String>,\n pub message: Option<String>,\n}\n\n\/\/\/ A diagnostic message.\npub struct Diagnostic {\n pub severity: Severity,\n pub message: String,\n \/\/\/ This is the primary span that causes the issue. This will not be displayed.\n \/\/\/ `new` function will automatically add the span to notes for it to be displayed.\n pub span: Option<Span>,\n pub notes: Vec<Note>\n}\n\n\/\/\/ Helpers for building diagnostic message. Intended to be called in chains.\nimpl Diagnostic {\n pub fn new(severity: Severity, msg: impl Into<String>, span: Span) -> Self {\n Diagnostic {\n severity,\n message: msg.into(),\n span: Some(span),\n notes: vec![Note {\n span,\n fix: None,\n message: None\n }],\n }\n }\n\n pub fn fix_primary(mut self, fix: impl Into<String>) -> Self {\n self.notes[0].fix = Some(fix.into());\n self\n }\n\n pub fn fix(mut self, span: Span, fix: impl Into<String>) -> Self {\n self.notes.push(Note {\n span,\n fix: Some(fix.into()),\n message: None,\n });\n self\n }\n}\n\n\/\/ Helper class for printing column number in a file with tabs and non-ASCII characters.\nstruct VisualString {\n str: String,\n columns: Vec<usize>,\n}\n\nimpl VisualString {\n fn new(str: &str, tab: usize) -> VisualString {\n let mut columns = Vec::with_capacity(str.len() + 1);\n columns.push(0);\n\n \/\/ Current visual string and visual length\n let mut vstr = String::new();\n let mut vlen = 0;\n \n for ch in str.chars() {\n match ch {\n '\\r' | '\\n' => (),\n '\\t' => {\n let newlen = (vlen + tab) \/ tab * tab;\n for _ in vlen..newlen {\n vstr.push(' ');\n }\n vlen = newlen\n }\n _ => {\n vstr.push(ch);\n vlen += 1\n }\n }\n\n for _ in 0..ch.len_utf8() {\n columns.push(vlen);\n }\n }\n\n \/\/ Reserve a column for end-of-line character\n columns.push(vlen + 1);\n\n VisualString {\n str: vstr,\n columns: columns,\n }\n }\n\n fn visual_column(&self, pos: usize) -> usize {\n self.columns[pos]\n }\n\n fn visual_length(&self) -> usize {\n self.columns[self.columns.len() - 1]\n }\n\n fn visual_text(&self) -> &str {\n &self.str\n }\n}\n\nimpl Diagnostic {\n pub fn print(&self, mgr: &SrcMgr, color: bool, tab: usize) {\n \/\/ Stringify and color severity\n let mut severity = format!(\"{}: \", self.severity);\n if color {\n severity = severity.color(self.severity.color()).to_string();\n }\n\n \/\/ Convert spans to fat spans\n let primary_span = match self.notes.first().and_then(|x| mgr.find_span(x.span)) {\n None => {\n \/\/ If the message has no associated file, just print it\n if color {\n eprintln!(\"{}{}\", severity, self.message.bold());\n } else {\n eprintln!(\"{}{}\", severity, self.message);\n }\n return\n }\n Some(v) => v,\n };\n\n \/\/ Obtain line map\n let src = &primary_span.source;\n let linemap = src.linemap();\n\n \/\/ Get line number (starting from 0)\n let line = linemap.line_number(primary_span.start);\n \/\/ Get position within the line\n let line_start = linemap.line_start_pos(line);\n \/\/ Get source code line for handling\n let line_text = linemap.line(src, line);\n let vstr = VisualString::new(line_text, tab);\n\n \/\/ Get colored severity string\n \/\/ Generate the error message line\n let mut msg = format!(\"{}:{}: {}{}\", src.filename(), line + 1, severity, self.message);\n if color {\n msg = msg.bold().to_string();\n }\n\n \/\/ Allocate char vectors to hold indicators and hints\n \/\/ Make this 1 longer for possibility to point to the line break character.\n let mut indicators = vec![' '; vstr.visual_length() + 1];\n let mut fixes = vec![' '; vstr.visual_length()];\n let mut character = '^';\n let mut has_fix = false;\n\n \/\/ Fill in ^ and ~ characters for all spans\n for note in &self.notes {\n let span = match mgr.find_span(note.span) {\n \/\/ The span is non-existent, continue instead\n None => continue,\n Some(v) => v,\n };\n\n \/\/ Unlikely event, we cannot display this\n if !Rc::ptr_eq(&span.source, &primary_span.source) {\n continue\n }\n\n \/\/ Get start and end position, clamped within the line.\n let start = span.start as isize - line_start as isize;\n let start_clamp = cmp::min(cmp::max(start, 0) as usize, line_text.len());\n let end = span.end as isize - line_start as isize;\n let end_clamp = cmp::min(cmp::max(end, 0) as usize, line_text.len() + 1);\n\n for i in vstr.visual_column(start_clamp)..vstr.visual_column(end_clamp) {\n indicators[i] = character;\n }\n\n \/\/ We can only display it if it partially covers this line\n if note.fix.is_some() && end >= 0 && start <= line_text.len() as isize {\n let mut vptr = cmp::min(cmp::max(start, 0) as usize, line_text.len());\n \/\/ Now replace the part in vector with the replacement suggestion\n for ch in note.fix.as_ref().unwrap().chars() {\n if vptr >= fixes.len() {\n fixes.push(ch);\n } else {\n fixes[vptr] = ch;\n }\n vptr += 1;\n }\n has_fix = true;\n }\n\n \/\/ For non-primary notes, the character is different.\n character = '~';\n }\n\n let mut indicator_line: String = indicators.into_iter().collect();\n if color {\n indicator_line = indicator_line.green().bold().to_string();\n }\n\n if has_fix {\n let mut line: String = fixes.into_iter().collect();\n if color {\n line = line.green().to_string();\n }\n\n eprintln!(\"{}\\n{}\\n{}\\n{}\", msg, vstr.visual_text(), indicator_line, line);\n } else {\n eprintln!(\"{}\\n{}\\n{}\", msg, vstr.visual_text(), indicator_line);\n }\n }\n}\n\n\/\/\/ Diagnostic manager\nstruct DiagMgrMut {\n src: Rc<SrcMgr>,\n diagnostics: Vec<Diagnostic>,\n}\n\npub struct DiagMgr {\n mutable: RefCell<DiagMgrMut>,\n}\n\nimpl DiagMgr {\n \/\/\/ Create a new diagnostics manager\n pub fn new(mgr: Rc<SrcMgr>) -> Self {\n Self {\n mutable: RefCell::new(DiagMgrMut {\n src: mgr,\n diagnostics: Vec::new(),\n })\n }\n }\n\n \/\/\/ Add a new diagnostic. Returns `Err` for fatal errors.\n pub fn report(&self, diag: Diagnostic) {\n let mut m = self.mutable.borrow_mut();\n diag.print(&m.src, true, 4);\n let severity = diag.severity;\n m.diagnostics.push(diag);\n if severity == Severity::Fatal {\n panic!(Severity::Fatal);\n }\n }\n\n \/\/\/ Create a errpr diagnostic from message and span and report it.\n pub fn report_error<M: Into<String>>(&self, msg: M, span: Span) {\n self.report(Diagnostic::new(\n Severity::Error,\n msg.into(),\n span,\n ));\n }\n\n \/\/\/ Create a fatal diagnostic from message and span and report it.\n pub fn report_fatal<M: Into<String>>(&self, msg: M, span: Span) -> ! {\n self.report(Diagnostic::new(\n Severity::Fatal,\n msg.into(),\n span,\n ));\n unreachable!()\n }\n\n \/\/\/ Clear exsting diagnostics\n pub fn clear(&self) {\n let mut m = self.mutable.borrow_mut();\n m.diagnostics.clear();\n }\n\n \/\/\/ Check if there is any fatal error.\n pub fn has_fatal(&self) -> bool {\n let m = self.mutable.borrow();\n m.diagnostics.iter().any(|diag| diag.severity == Severity::Fatal)\n }\n\n \/\/\/ Check if there is any error.\n pub fn has_error(&self) -> bool {\n let m = self.mutable.borrow();\n m.diagnostics.iter().any(|diag| diag.severity == Severity::Error || diag.severity == Severity::Fatal)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix broken accessor test<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>test unwinding past topmost frame of a stack<commit_after>\/\/ ignore-windows: Concurrency on Windows is not supported yet.\n\/\/ error-pattern: unwinding past the topmost frame of the stack\n\n\/\/! Unwinding past the top frame of a stack is Undefined Behavior.\n\n#![feature(rustc_private)]\n\nextern crate libc;\n\nuse std::{mem, ptr};\n\nextern \"C\" fn thread_start(_null: *mut libc::c_void) -> *mut libc::c_void {\n panic!()\n}\n\nfn main() {\n unsafe {\n let mut native: libc::pthread_t = mem::zeroed();\n let attr: libc::pthread_attr_t = mem::zeroed();\n \/\/ assert_eq!(libc::pthread_attr_init(&mut attr), 0); FIXME: this function is not yet implemented.\n assert_eq!(libc::pthread_create(&mut native, &attr, thread_start, ptr::null_mut()), 0);\n assert_eq!(libc::pthread_join(native, ptr::null_mut()), 0);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>New Rust submission<commit_after><|endoftext|>"} {"text":"<commit_before>use redox::Box;\nuse redox::fs::file::File;\nuse redox::io::{Read, Write, Seek, SeekFrom};\nuse redox::mem;\nuse redox::net::*;\nuse redox::ptr;\nuse redox::rand;\nuse redox::slice;\nuse redox::str;\nuse redox::{String, ToString};\nuse redox::to_num::*;\nuse redox::Vec;\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct UDPHeader {\n pub src: n16,\n pub dst: n16,\n pub len: n16,\n pub checksum: Checksum,\n}\n\npub struct UDP {\n pub header: UDPHeader,\n pub data: Vec<u8>,\n}\n\nimpl FromBytes for UDP {\n fn from_bytes(bytes: Vec<u8>) -> Option<Self> {\n if bytes.len() >= mem::size_of::<UDPHeader>() {\n unsafe {\n return Option::Some(UDP {\n header: ptr::read(bytes.as_ptr() as *const UDPHeader),\n data: bytes[mem::size_of::<UDPHeader>().. bytes.len()].to_vec(),\n });\n }\n }\n Option::None\n }\n}\n\nimpl ToBytes for UDP {\n fn to_bytes(&self) -> Vec<u8> {\n unsafe {\n let header_ptr: *const UDPHeader = &self.header;\n let mut ret = Vec::from(slice::from_raw_parts(header_ptr as *const u8, mem::size_of::<UDPHeader>()));\n ret.push_all(&self.data);\n ret\n }\n }\n}\n\n\/\/\/ UDP resource\npub struct Resource {\n ip: File,\n data: Vec<u8>,\n peer_addr: IPv4Addr,\n peer_port: u16,\n host_port: u16,\n}\n\nimpl Resource {\n pub fn dup(&self) -> Option<Box<Resource>> {\n match self.ip.dup() {\n Some(ip) => Some(box Resource {\n ip: ip,\n data: self.data.clone(),\n peer_addr: self.peer_addr,\n peer_port: self.peer_port,\n host_port: self.host_port,\n }),\n None => None\n }\n }\n\n pub fn path(&self, buf: &mut [u8]) -> Option<usize> {\n let path = format!(\"udp:\/\/{}:{}\/{}\", self.peer_addr.to_string(), self.peer_port, self.host_port);\n\n let mut i = 0;\n for b in path.bytes() {\n if i < buf.len() {\n buf[i] = b;\n i += 1;\n } else {\n break;\n }\n }\n\n Some(i)\n }\n\n pub fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/*\n\n if self.data.len() > 0 {\n let mut bytes: Vec<u8> = Vec::new();\n mem::swap(&mut self.data, &mut bytes);\n vec.push_all(&bytes);\n return Some(bytes.len());\n }\n\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match self.ip.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(datagram) = UDP::from_bytes(bytes) {\n if datagram.header.dst.get() == self.host_port &&\n datagram.header.src.get() == self.peer_port {\n vec.push_all(&datagram.data);\n return Some(datagram.data.len());\n }\n }\n }\n None => return None,\n }\n }\n *\/\n None\n }\n\n pub fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let udp_data = Vec::from(buf);\n\n let mut udp = UDP {\n header: UDPHeader {\n src: n16::new(self.host_port),\n dst: n16::new(self.peer_port),\n len: n16::new((mem::size_of::<UDPHeader>() + udp_data.len()) as u16),\n checksum: Checksum { data: 0 },\n },\n data: udp_data,\n };\n\n unsafe {\n let proto = n16::new(0x11);\n let datagram_len = n16::new((mem::size_of::<UDPHeader>() + udp.data.len()) as u16);\n udp.header.checksum.data =\n Checksum::compile(Checksum::sum((&IP_ADDR as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&self.peer_addr as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&datagram_len as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&udp.header as *const UDPHeader) as usize,\n mem::size_of::<UDPHeader>()) +\n Checksum::sum(udp.data.as_ptr() as usize, udp.data.len()));\n }\n\n match self.ip.write(udp.to_bytes().as_slice()) {\n Some(_) => return Some(buf.len()),\n None => return None,\n }\n }\n\n pub fn seek(&mut self, pos: SeekFrom) -> Option<usize> {\n return None;\n }\n\n pub fn sync(&mut self) -> bool {\n self.ip.sync()\n }\n}\n\n\/\/\/ UDP scheme\npub struct Scheme;\n\nimpl Scheme {\n pub fn new() -> Box<Self> {\n box Scheme\n }\n\n pub fn open(&mut self, url: &str) -> Option<Box<Resource>> {\n \/\/Split scheme from the rest of the URL\n let (scheme, mut not_scheme) = url.split_at(url.find(':').unwrap_or(url.len()));\n\n \/\/Remove the starting two slashes\n if not_scheme.starts_with(\"\/\/\") {\n not_scheme = ¬_scheme[2..not_scheme.len() - 2];\n }\n\n \/\/Check host and port vs path\n if not_scheme.starts_with(\"\/\") {\n let host_port = not_scheme[1..not_scheme.len() - 1].to_string().to_num();\n if host_port > 0 && host_port < 65536 {\n if let Some(mut ip) = File::open(\"ip:\/\/\/11\") {\n let mut bytes: Vec<u8> = Vec::new();\n if ip.read_to_end(&mut bytes).is_some() {\n if let Some(datagram) = UDP::from_bytes(bytes) {\n if datagram.header.dst.get() as usize == host_port {\n let mut url_bytes = [0; 4096];\n if let Some(count) = ip.path(&mut url_bytes) {\n let url = unsafe { str::from_utf8_unchecked(&url_bytes[0..count]) };\n\n \/\/Split scheme from the rest of the URL\n let (scheme, mut not_scheme) = url.split_at(url.find(':').unwrap_or(url.len()));\n\n \/\/Remove the starting two slashes\n if not_scheme.starts_with(\"\/\/\") {\n not_scheme = ¬_scheme[2..not_scheme.len() - 2];\n }\n\n let (host, port) = not_scheme.split_at(not_scheme.find(':').unwrap_or(not_scheme.len()));\n\n let peer_addr = IPv4Addr::from_string(&host.to_string());\n\n return Some(box Resource {\n ip: ip,\n data: datagram.data,\n peer_addr: peer_addr,\n peer_port: datagram.header.src.get(),\n host_port: host_port as u16,\n });\n }\n }\n }\n }\n }\n }\n }else{\n let (host, port) = not_scheme.split_at(not_scheme.find(':').unwrap_or(not_scheme.len()));\n\n let peer_port = port.to_string().to_num();\n if peer_port > 0 && peer_port < 65536 {\n let host_port = (rand() % 32768 + 32768) as u16;\n\n if let Some(ip) = File::open(&format!(\"ip:\/\/{}\/11\", host)) {\n return Some(box Resource {\n ip: ip,\n data: Vec::new(),\n peer_addr: IPv4Addr::from_string(&host.to_string()),\n peer_port: peer_port as u16,\n host_port: host_port,\n });\n }\n }\n }\n\n None\n }\n}\n<commit_msg>For UDP scheme, return `Self` in `Resource::dup()` and remove unneeded return statements<commit_after>use redox::Box;\nuse redox::fs::file::File;\nuse redox::io::{Read, Write, Seek, SeekFrom};\nuse redox::mem;\nuse redox::net::*;\nuse redox::ptr;\nuse redox::rand;\nuse redox::slice;\nuse redox::str;\nuse redox::{String, ToString};\nuse redox::to_num::*;\nuse redox::Vec;\n\n#[derive(Copy, Clone)]\n#[repr(packed)]\npub struct UDPHeader {\n pub src: n16,\n pub dst: n16,\n pub len: n16,\n pub checksum: Checksum,\n}\n\npub struct UDP {\n pub header: UDPHeader,\n pub data: Vec<u8>,\n}\n\nimpl FromBytes for UDP {\n fn from_bytes(bytes: Vec<u8>) -> Option<Self> {\n if bytes.len() >= mem::size_of::<UDPHeader>() {\n unsafe {\n return Option::Some(UDP {\n header: ptr::read(bytes.as_ptr() as *const UDPHeader),\n data: bytes[mem::size_of::<UDPHeader>().. bytes.len()].to_vec(),\n });\n }\n }\n Option::None\n }\n}\n\nimpl ToBytes for UDP {\n fn to_bytes(&self) -> Vec<u8> {\n unsafe {\n let header_ptr: *const UDPHeader = &self.header;\n let mut ret = Vec::from(slice::from_raw_parts(header_ptr as *const u8, mem::size_of::<UDPHeader>()));\n ret.push_all(&self.data);\n ret\n }\n }\n}\n\n\/\/\/ UDP resource\npub struct Resource {\n ip: File,\n data: Vec<u8>,\n peer_addr: IPv4Addr,\n peer_port: u16,\n host_port: u16,\n}\n\nimpl Resource {\n pub fn dup(&self) -> Option<Box<Self>> {\n match self.ip.dup() {\n Some(ip) => Some(box Resource {\n ip: ip,\n data: self.data.clone(),\n peer_addr: self.peer_addr,\n peer_port: self.peer_port,\n host_port: self.host_port,\n }),\n None => None\n }\n }\n\n pub fn path(&self, buf: &mut [u8]) -> Option<usize> {\n let path = format!(\"udp:\/\/{}:{}\/{}\", self.peer_addr.to_string(), self.peer_port, self.host_port);\n\n let mut i = 0;\n for b in path.bytes() {\n if i < buf.len() {\n buf[i] = b;\n i += 1;\n } else {\n break;\n }\n }\n\n Some(i)\n }\n\n pub fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/*\n\n if self.data.len() > 0 {\n let mut bytes: Vec<u8> = Vec::new();\n mem::swap(&mut self.data, &mut bytes);\n vec.push_all(&bytes);\n return Some(bytes.len());\n }\n\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match self.ip.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(datagram) = UDP::from_bytes(bytes) {\n if datagram.header.dst.get() == self.host_port &&\n datagram.header.src.get() == self.peer_port {\n vec.push_all(&datagram.data);\n return Some(datagram.data.len());\n }\n }\n }\n None => return None,\n }\n }\n *\/\n None\n }\n\n pub fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let udp_data = Vec::from(buf);\n\n let mut udp = UDP {\n header: UDPHeader {\n src: n16::new(self.host_port),\n dst: n16::new(self.peer_port),\n len: n16::new((mem::size_of::<UDPHeader>() + udp_data.len()) as u16),\n checksum: Checksum { data: 0 },\n },\n data: udp_data,\n };\n\n unsafe {\n let proto = n16::new(0x11);\n let datagram_len = n16::new((mem::size_of::<UDPHeader>() + udp.data.len()) as u16);\n udp.header.checksum.data =\n Checksum::compile(Checksum::sum((&IP_ADDR as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&self.peer_addr as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&datagram_len as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&udp.header as *const UDPHeader) as usize,\n mem::size_of::<UDPHeader>()) +\n Checksum::sum(udp.data.as_ptr() as usize, udp.data.len()));\n }\n\n match self.ip.write(udp.to_bytes().as_slice()) {\n Some(_) => return Some(buf.len()),\n None => return None,\n }\n }\n\n pub fn seek(&mut self, pos: SeekFrom) -> Option<usize> {\n None\n }\n\n pub fn sync(&mut self) -> bool {\n self.ip.sync()\n }\n}\n\n\/\/\/ UDP scheme\npub struct Scheme;\n\nimpl Scheme {\n pub fn new() -> Box<Self> {\n box Scheme\n }\n\n pub fn open(&mut self, url: &str) -> Option<Box<Resource>> {\n \/\/Split scheme from the rest of the URL\n let (scheme, mut not_scheme) = url.split_at(url.find(':').unwrap_or(url.len()));\n\n \/\/Remove the starting two slashes\n if not_scheme.starts_with(\"\/\/\") {\n not_scheme = ¬_scheme[2..not_scheme.len() - 2];\n }\n\n \/\/Check host and port vs path\n if not_scheme.starts_with(\"\/\") {\n let host_port = not_scheme[1..not_scheme.len() - 1].to_string().to_num();\n if host_port > 0 && host_port < 65536 {\n if let Some(mut ip) = File::open(\"ip:\/\/\/11\") {\n let mut bytes: Vec<u8> = Vec::new();\n if ip.read_to_end(&mut bytes).is_some() {\n if let Some(datagram) = UDP::from_bytes(bytes) {\n if datagram.header.dst.get() as usize == host_port {\n let mut url_bytes = [0; 4096];\n if let Some(count) = ip.path(&mut url_bytes) {\n let url = unsafe { str::from_utf8_unchecked(&url_bytes[0..count]) };\n\n \/\/Split scheme from the rest of the URL\n let (scheme, mut not_scheme) = url.split_at(url.find(':').unwrap_or(url.len()));\n\n \/\/Remove the starting two slashes\n if not_scheme.starts_with(\"\/\/\") {\n not_scheme = ¬_scheme[2..not_scheme.len() - 2];\n }\n\n let (host, port) = not_scheme.split_at(not_scheme.find(':').unwrap_or(not_scheme.len()));\n\n let peer_addr = IPv4Addr::from_string(&host.to_string());\n\n return Some(box Resource {\n ip: ip,\n data: datagram.data,\n peer_addr: peer_addr,\n peer_port: datagram.header.src.get(),\n host_port: host_port as u16,\n });\n }\n }\n }\n }\n }\n }\n } else {\n let (host, port) = not_scheme.split_at(not_scheme.find(':').unwrap_or(not_scheme.len()));\n\n let peer_port = port.to_string().to_num();\n if peer_port > 0 && peer_port < 65536 {\n let host_port = (rand() % 32768 + 32768) as u16;\n\n if let Some(ip) = File::open(&format!(\"ip:\/\/{}\/11\", host)) {\n return Some(box Resource {\n ip: ip,\n data: Vec::new(),\n peer_addr: IPv4Addr::from_string(&host.to_string()),\n peer_port: peer_port as u16,\n host_port: host_port,\n });\n }\n }\n }\n\n None\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Initial commit of toy AA tree implementation<commit_after>type Link<T> = Option<Box<T>>;\n\nstruct Node<K, V> {\n key: K,\n value: V,\n left: Link<Node<K, V>>,\n right: Link<Node<K, V>>,\n level: uint\n}\n\nstruct Tree<K, V> {\n root: Link<Node<K, V>>,\n}\n\n\nimpl<K: Ord, V> Node<K, V> {\n fn new(key: K, value: V) -> Node<K, V> {\n Node { key: key, value: value, left: None, right: None, level: 1 }\n }\n\n fn max(&self) -> &K {\n match self.right {\n None => &self.key,\n Some(ref n) => n.max(),\n }\n }\n\n fn min(&self) -> &K {\n match self.left {\n None => &self.key,\n Some(ref n) => n.min(),\n }\n }\n\n fn is_bst(&self) -> bool {\n let check_left = self.left.as_ref().map_or(true, \n |n| n.is_bst() && *n.max() < self.key);\n\n if check_left {\n self.right.as_ref().map_or(true,\n |n| n.is_bst() && *n.min() > self.key)\n } else { \n false \n }\n }\n\n fn find<'a>(&'a self, key: &K) -> Option<&'a V> {\n match key.cmp(&self.key) {\n Equal => Some(&self.value),\n Less =>\n match self.left {\n None => None,\n Some(ref b) => b.find(key),\n },\n Greater =>\n match self.right {\n None => None,\n Some(ref b) => b.find(key),\n },\n }\n }\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before>use std::error::Error;\nuse std::fmt::Error as FmtError;\nuse std::fmt::{Display, Formatter};\n\ngenerate_error_types!(InteractionError, InteractionErrorKind,\n Unknown => \"Unknown Error\"\n);\n\n<commit_msg>libimaginteraction: Replace error module imports with macro helper<commit_after>generate_error_imports!();\n\ngenerate_error_types!(InteractionError, InteractionErrorKind,\n Unknown => \"Unknown Error\"\n);\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>And now no allocation at all<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a test that joining main is UB.<commit_after>\/\/ ignore-windows: Concurrency on Windows is not supported yet.\n\n\/\/ Joining the main thread is undefined behavior.\n\n#![feature(rustc_private)]\n\nextern crate libc;\n\nuse std::{ptr, thread};\n\nfn main() {\n let thread_id: libc::pthread_t = unsafe { libc::pthread_self() };\n let handle = thread::spawn(move || {\n unsafe {\n assert_eq!(libc::pthread_join(thread_id, ptr::null_mut()), 0); \/\/~ ERROR: Undefined Behavior: trying to join a detached or already joined thread\n }\n });\n thread::yield_now();\n handle.join().unwrap();\n}\n<|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Servo's experimental layout system builds a tree of `FlowContext` and `RenderBox` objects and\n\/\/\/ solves layout constraints to obtain positions and display attributes of tree nodes. Positions\n\/\/\/ are computed in several tree traversals driven by the fundamental data dependencies required by\n\/\/\/ inline and block layout.\n\/\/\/ \n\/\/\/ Flows are interior nodes in the layout tree and correspond closely to *flow contexts* in the\n\/\/\/ CSS specification. Flows are responsible for positioning their child flow contexts and render\n\/\/\/ boxes. Flows have purpose-specific fields, such as auxiliary line box structs, out-of-flow\n\/\/\/ child lists, and so on.\n\/\/\/\n\/\/\/ Currently, the important types of flows are:\n\/\/\/ \n\/\/\/ * `BlockFlow`: A flow that establishes a block context. It has several child flows, each of\n\/\/\/ which are positioned according to block formatting context rules (CSS block boxes). Block\n\/\/\/ flows also contain a single `GenericBox` to represent their rendered borders, padding, etc.\n\/\/\/ (In the future, this render box may be folded into `BlockFlow` to save space.) The BlockFlow\n\/\/\/ at the root of the tree has special behavior: it stretches to the boundaries of the viewport.\n\/\/\/ \n\/\/\/ * `InlineFlow`: A flow that establishes an inline context. It has a flat list of child\n\/\/\/ boxes\/flows that are subject to inline layout and line breaking and structs to represent\n\/\/\/ line breaks and mapping to CSS boxes, for the purpose of handling `getClientRects()` and\n\/\/\/ similar methods.\n\nuse layout::block::BlockFlowData;\nuse layout::float::FloatFlowData;\nuse layout::box::RenderBox;\nuse layout::context::LayoutContext;\nuse layout::display_list_builder::{DisplayListBuilder, ExtraDisplayListData};\nuse layout::inline::{InlineFlowData};\nuse layout::float_context::{FloatContext, Invalid, FloatType};\n\nuse std::cell::Cell;\nuse std::uint;\nuse std::io::stderr;\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse gfx::display_list::DisplayList;\nuse gfx::geometry::Au;\nuse script::dom::node::{AbstractNode, LayoutView};\nuse servo_util::tree::{TreeNode, TreeNodeRef, TreeUtils};\n\n\/\/\/ The type of the formatting context and data specific to each context, such as line box\n\/\/\/ structures or float lists.\npub enum FlowContext {\n AbsoluteFlow(@mut FlowData), \n BlockFlow(@mut BlockFlowData),\n FloatFlow(@mut FloatFlowData),\n InlineBlockFlow(@mut FlowData),\n InlineFlow(@mut InlineFlowData),\n TableFlow(@mut FlowData),\n}\n\npub enum FlowContextType {\n Flow_Absolute, \n Flow_Block,\n Flow_Float(FloatType),\n Flow_InlineBlock,\n Flow_Inline,\n Flow_Root,\n Flow_Table\n}\n\nimpl Clone for FlowContext {\n fn clone(&self) -> FlowContext {\n *self\n }\n}\n\nimpl FlowContext {\n pub fn teardown(&self) {\n match *self {\n AbsoluteFlow(data) |\n InlineBlockFlow(data) |\n TableFlow(data) => data.teardown(),\n BlockFlow(data) => data.teardown(),\n FloatFlow(data) => data.teardown(),\n InlineFlow(data) => data.teardown()\n }\n }\n\n \/\/\/ Like traverse_preorder, but don't end the whole traversal if the callback\n \/\/\/ returns false.\n fn partially_traverse_preorder(&self, callback: &fn(FlowContext) -> bool) {\n if !callback((*self).clone()) {\n return;\n }\n\n for self.each_child |kid| {\n \/\/ FIXME: Work around rust#2202. We should be able to pass the callback directly.\n kid.partially_traverse_preorder(|a| callback(a));\n }\n }\n}\n\nimpl FlowData {\n pub fn teardown(&mut self) {\n \/\/ Under the assumption that all flows exist in a tree,\n \/\/ we must restrict ourselves to finalizing flows that\n \/\/ are descendents and subsequent siblings to ourselves,\n \/\/ or we risk dynamic borrow failures.\n self.parent = None;\n\n for self.first_child.iter().advance |flow| {\n flow.teardown();\n }\n self.first_child = None;\n\n self.last_child = None;\n\n for self.next_sibling.iter().advance |flow| {\n flow.teardown();\n }\n self.next_sibling = None;\n\n self.prev_sibling = None;\n }\n}\n\nimpl TreeNodeRef<FlowData> for FlowContext {\n fn with_base<R>(&self, callback: &fn(&FlowData) -> R) -> R {\n match *self {\n AbsoluteFlow(info) => callback(info),\n BlockFlow(info) => {\n callback(&info.common)\n }\n FloatFlow(info) => callback(&info.common),\n InlineBlockFlow(info) => callback(info),\n InlineFlow(info) => {\n callback(&info.common)\n }\n TableFlow(info) => callback(info)\n }\n }\n fn with_mut_base<R>(&self, callback: &fn(&mut FlowData) -> R) -> R {\n match *self {\n AbsoluteFlow(info) => callback(info),\n BlockFlow(info) => {\n callback(&mut info.common)\n }\n FloatFlow(info) => callback(&mut info.common),\n InlineBlockFlow(info) => callback(info),\n InlineFlow(info) => {\n callback(&mut info.common)\n }\n TableFlow(info) => callback(info),\n }\n }\n}\n\n\/\/\/ Data common to all flows.\n\/\/\/\n\/\/\/ FIXME: We need a naming convention for pseudo-inheritance like this. How about\n\/\/\/ `CommonFlowInfo`?\npub struct FlowData {\n node: AbstractNode<LayoutView>,\n\n parent: Option<FlowContext>,\n first_child: Option<FlowContext>,\n last_child: Option<FlowContext>,\n prev_sibling: Option<FlowContext>,\n next_sibling: Option<FlowContext>,\n\n \/* TODO (Issue #87): debug only *\/\n id: int,\n\n \/* layout computations *\/\n \/\/ TODO: min\/pref and position are used during disjoint phases of\n \/\/ layout; maybe combine into a single enum to save space.\n min_width: Au,\n pref_width: Au,\n position: Rect<Au>,\n floats_in: FloatContext,\n floats_out: FloatContext,\n num_floats: uint,\n abs_position: Point2D<Au>\n}\n\nimpl TreeNode<FlowContext> for FlowData {\n fn parent_node(&self) -> Option<FlowContext> {\n self.parent\n }\n\n fn first_child(&self) -> Option<FlowContext> {\n self.first_child\n }\n\n fn last_child(&self) -> Option<FlowContext> {\n self.last_child\n }\n\n fn prev_sibling(&self) -> Option<FlowContext> {\n self.prev_sibling\n }\n\n fn next_sibling(&self) -> Option<FlowContext> {\n self.next_sibling\n }\n\n fn set_parent_node(&mut self, new_parent_node: Option<FlowContext>) {\n self.parent = new_parent_node\n }\n\n fn set_first_child(&mut self, new_first_child: Option<FlowContext>) {\n self.first_child = new_first_child\n }\n\n fn set_last_child(&mut self, new_last_child: Option<FlowContext>) {\n self.last_child = new_last_child\n }\n\n fn set_prev_sibling(&mut self, new_prev_sibling: Option<FlowContext>) {\n self.prev_sibling = new_prev_sibling\n }\n\n fn set_next_sibling(&mut self, new_next_sibling: Option<FlowContext>) {\n self.next_sibling = new_next_sibling\n }\n}\n\nimpl FlowData {\n pub fn new(id: int, node: AbstractNode<LayoutView>) -> FlowData {\n FlowData {\n node: node,\n\n parent: None,\n first_child: None,\n last_child: None,\n prev_sibling: None,\n next_sibling: None,\n\n id: id,\n\n min_width: Au(0),\n pref_width: Au(0),\n position: Au::zero_rect(),\n floats_in: Invalid,\n floats_out: Invalid,\n num_floats: 0,\n abs_position: Point2D(Au(0), Au(0))\n }\n }\n}\n\nimpl<'self> FlowContext {\n \/\/\/ A convenience method to return the position of this flow. Fails if the flow is currently\n \/\/\/ being borrowed mutably.\n #[inline(always)]\n pub fn position(&self) -> Rect<Au> {\n do self.with_base |common_info| {\n common_info.position\n }\n }\n\n \/\/\/ A convenience method to return the ID of this flow. Fails if the flow is currently being\n \/\/\/ borrowed mutably.\n #[inline(always)]\n pub fn id(&self) -> int {\n do self.with_base |info| {\n info.id\n }\n }\n\n pub fn inline(&self) -> @mut InlineFlowData {\n match *self {\n InlineFlow(info) => info,\n _ => fail!(fmt!(\"Tried to access inline data of non-inline: f%d\", self.id()))\n }\n }\n\n pub fn block(&self) -> @mut BlockFlowData {\n match *self {\n BlockFlow(info) => info,\n _ => fail!(fmt!(\"Tried to access block data of non-block: f%d\", self.id()))\n }\n }\n\n pub fn root(&self) -> @mut BlockFlowData {\n match *self {\n BlockFlow(info) if info.is_root => info,\n _ => fail!(fmt!(\"Tried to access root block data of non-root: f%d\", self.id()))\n }\n }\n\n pub fn bubble_widths(&self, ctx: &mut LayoutContext) {\n match *self {\n BlockFlow(info) => info.bubble_widths_block(ctx),\n InlineFlow(info) => info.bubble_widths_inline(ctx),\n FloatFlow(info) => info.bubble_widths_float(ctx),\n _ => fail!(fmt!(\"Tried to bubble_widths of flow: f%d\", self.id()))\n }\n }\n\n pub fn assign_widths(&self, ctx: &mut LayoutContext) {\n match *self {\n BlockFlow(info) => info.assign_widths_block(ctx),\n InlineFlow(info) => info.assign_widths_inline(ctx),\n FloatFlow(info) => info.assign_widths_float(ctx),\n _ => fail!(fmt!(\"Tried to assign_widths of flow: f%d\", self.id()))\n }\n }\n\n pub fn assign_height(&self, ctx: &mut LayoutContext) {\n match *self {\n BlockFlow(info) => info.assign_height_block(ctx),\n InlineFlow(info) => info.assign_height_inline(ctx),\n FloatFlow(info) => info.assign_height_float(ctx),\n _ => fail!(fmt!(\"Tried to assign_height of flow: f%d\", self.id()))\n }\n }\n\n pub fn build_display_list<E:ExtraDisplayListData>(&self,\n builder: &DisplayListBuilder,\n dirty: &Rect<Au>,\n list: &Cell<DisplayList<E>>)\n -> bool {\n\n \n match *self {\n BlockFlow(info) => info.build_display_list_block(builder, dirty, list),\n InlineFlow(info) => info.build_display_list_inline(builder, dirty, list),\n FloatFlow(info) => info.build_display_list_float(builder, dirty, list),\n _ => {\n fail!(\"Tried to build_display_list_recurse of flow: %?\", self)\n }\n }\n\n }\n\n \/\/ Actual methods that do not require much flow-specific logic\n pub fn foldl_all_boxes<B:Clone>(&self, seed: B, cb: &fn(a: B, b: RenderBox) -> B) -> B {\n match *self {\n BlockFlow(block) => {\n let block = &mut *block;\n do block.box.map_default(seed.clone()) |box| {\n cb(seed.clone(), *box)\n }\n }\n InlineFlow(inline) => {\n let inline = &mut *inline;\n do inline.boxes.iter().fold(seed) |acc, box| {\n cb(acc.clone(), *box)\n }\n }\n _ => fail!(fmt!(\"Don't know how to iterate node's RenderBoxes for %?\", self)),\n }\n }\n\n pub fn foldl_boxes_for_node<B:Clone>(&self,\n node: AbstractNode<LayoutView>,\n seed: B,\n callback: &fn(a: B, RenderBox) -> B)\n -> B {\n do self.foldl_all_boxes(seed) |acc, box| {\n if box.node() == node {\n callback(acc, box)\n } else {\n acc\n }\n }\n }\n\n pub fn iter_all_boxes(&self, cb: &fn(RenderBox) -> bool) -> bool {\n match *self {\n BlockFlow(block) => {\n let block = &mut *block;\n for block.box.iter().advance |box| {\n if !cb(*box) {\n break;\n }\n }\n }\n InlineFlow(inline) => {\n let inline = &mut *inline;\n for inline.boxes.iter().advance |box| {\n if !cb(*box) {\n break;\n }\n }\n }\n _ => fail!(fmt!(\"Don't know how to iterate node's RenderBoxes for %?\", self))\n }\n\n true\n }\n\n pub fn iter_boxes_for_node(&self,\n node: AbstractNode<LayoutView>,\n callback: &fn(RenderBox) -> bool)\n -> bool {\n for self.iter_all_boxes |box| {\n if box.node() == node {\n if !callback(box) {\n break;\n }\n }\n }\n\n true\n }\n\n \/\/\/ Dumps the flow tree for debugging.\n pub fn dump(&self) {\n self.dump_indent(0);\n }\n\n \/\/\/ Dumps the flow tree, for debugging, with indentation.\n pub fn dump_indent(&self, indent: uint) {\n let mut s = ~\"|\";\n for uint::range(0, indent) |_i| {\n s.push_str(\"---- \");\n }\n\n s.push_str(self.debug_str());\n stderr().write_line(s);\n\n \/\/ FIXME: this should have a pure\/const version?\n for self.each_child |child| {\n child.dump_indent(indent + 1)\n }\n }\n \n pub fn debug_str(&self) -> ~str {\n let repr = match *self {\n InlineFlow(inline) => {\n let mut s = inline.boxes.iter().fold(~\"InlineFlow(children=\", |s, box| {\n fmt!(\"%s b%d\", s, box.id())\n });\n s.push_str(\")\");\n s\n },\n BlockFlow(block) => {\n match block.box {\n Some(box) => fmt!(\"BlockFlow(box=b%d)\", box.id()),\n None => ~\"BlockFlow\",\n }\n },\n FloatFlow(float) => {\n match float.box {\n Some(box) => fmt!(\"FloatFlow(box=b%d)\", box.id()),\n None => ~\"FloatFlow\",\n }\n },\n _ => ~\"(Unknown flow)\"\n };\n\n do self.with_base |base| {\n fmt!(\"f%? %? floats %? size %?\", base.id, repr, base.num_floats, base.position)\n }\n }\n}\n\n<commit_msg>Derive Clone for FlowContext<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! Servo's experimental layout system builds a tree of `FlowContext` and `RenderBox` objects and\n\/\/\/ solves layout constraints to obtain positions and display attributes of tree nodes. Positions\n\/\/\/ are computed in several tree traversals driven by the fundamental data dependencies required by\n\/\/\/ inline and block layout.\n\/\/\/ \n\/\/\/ Flows are interior nodes in the layout tree and correspond closely to *flow contexts* in the\n\/\/\/ CSS specification. Flows are responsible for positioning their child flow contexts and render\n\/\/\/ boxes. Flows have purpose-specific fields, such as auxiliary line box structs, out-of-flow\n\/\/\/ child lists, and so on.\n\/\/\/\n\/\/\/ Currently, the important types of flows are:\n\/\/\/ \n\/\/\/ * `BlockFlow`: A flow that establishes a block context. It has several child flows, each of\n\/\/\/ which are positioned according to block formatting context rules (CSS block boxes). Block\n\/\/\/ flows also contain a single `GenericBox` to represent their rendered borders, padding, etc.\n\/\/\/ (In the future, this render box may be folded into `BlockFlow` to save space.) The BlockFlow\n\/\/\/ at the root of the tree has special behavior: it stretches to the boundaries of the viewport.\n\/\/\/ \n\/\/\/ * `InlineFlow`: A flow that establishes an inline context. It has a flat list of child\n\/\/\/ boxes\/flows that are subject to inline layout and line breaking and structs to represent\n\/\/\/ line breaks and mapping to CSS boxes, for the purpose of handling `getClientRects()` and\n\/\/\/ similar methods.\n\nuse layout::block::BlockFlowData;\nuse layout::float::FloatFlowData;\nuse layout::box::RenderBox;\nuse layout::context::LayoutContext;\nuse layout::display_list_builder::{DisplayListBuilder, ExtraDisplayListData};\nuse layout::inline::{InlineFlowData};\nuse layout::float_context::{FloatContext, Invalid, FloatType};\n\nuse std::cell::Cell;\nuse std::uint;\nuse std::io::stderr;\nuse geom::point::Point2D;\nuse geom::rect::Rect;\nuse gfx::display_list::DisplayList;\nuse gfx::geometry::Au;\nuse script::dom::node::{AbstractNode, LayoutView};\nuse servo_util::tree::{TreeNode, TreeNodeRef, TreeUtils};\n\n\/\/\/ The type of the formatting context and data specific to each context, such as line box\n\/\/\/ structures or float lists.\n#[deriving(Clone)]\npub enum FlowContext {\n AbsoluteFlow(@mut FlowData), \n BlockFlow(@mut BlockFlowData),\n FloatFlow(@mut FloatFlowData),\n InlineBlockFlow(@mut FlowData),\n InlineFlow(@mut InlineFlowData),\n TableFlow(@mut FlowData),\n}\n\npub enum FlowContextType {\n Flow_Absolute, \n Flow_Block,\n Flow_Float(FloatType),\n Flow_InlineBlock,\n Flow_Inline,\n Flow_Root,\n Flow_Table\n}\n\nimpl FlowContext {\n pub fn teardown(&self) {\n match *self {\n AbsoluteFlow(data) |\n InlineBlockFlow(data) |\n TableFlow(data) => data.teardown(),\n BlockFlow(data) => data.teardown(),\n FloatFlow(data) => data.teardown(),\n InlineFlow(data) => data.teardown()\n }\n }\n\n \/\/\/ Like traverse_preorder, but don't end the whole traversal if the callback\n \/\/\/ returns false.\n fn partially_traverse_preorder(&self, callback: &fn(FlowContext) -> bool) {\n if !callback((*self).clone()) {\n return;\n }\n\n for self.each_child |kid| {\n \/\/ FIXME: Work around rust#2202. We should be able to pass the callback directly.\n kid.partially_traverse_preorder(|a| callback(a));\n }\n }\n}\n\nimpl FlowData {\n pub fn teardown(&mut self) {\n \/\/ Under the assumption that all flows exist in a tree,\n \/\/ we must restrict ourselves to finalizing flows that\n \/\/ are descendents and subsequent siblings to ourselves,\n \/\/ or we risk dynamic borrow failures.\n self.parent = None;\n\n for self.first_child.iter().advance |flow| {\n flow.teardown();\n }\n self.first_child = None;\n\n self.last_child = None;\n\n for self.next_sibling.iter().advance |flow| {\n flow.teardown();\n }\n self.next_sibling = None;\n\n self.prev_sibling = None;\n }\n}\n\nimpl TreeNodeRef<FlowData> for FlowContext {\n fn with_base<R>(&self, callback: &fn(&FlowData) -> R) -> R {\n match *self {\n AbsoluteFlow(info) => callback(info),\n BlockFlow(info) => {\n callback(&info.common)\n }\n FloatFlow(info) => callback(&info.common),\n InlineBlockFlow(info) => callback(info),\n InlineFlow(info) => {\n callback(&info.common)\n }\n TableFlow(info) => callback(info)\n }\n }\n fn with_mut_base<R>(&self, callback: &fn(&mut FlowData) -> R) -> R {\n match *self {\n AbsoluteFlow(info) => callback(info),\n BlockFlow(info) => {\n callback(&mut info.common)\n }\n FloatFlow(info) => callback(&mut info.common),\n InlineBlockFlow(info) => callback(info),\n InlineFlow(info) => {\n callback(&mut info.common)\n }\n TableFlow(info) => callback(info),\n }\n }\n}\n\n\/\/\/ Data common to all flows.\n\/\/\/\n\/\/\/ FIXME: We need a naming convention for pseudo-inheritance like this. How about\n\/\/\/ `CommonFlowInfo`?\npub struct FlowData {\n node: AbstractNode<LayoutView>,\n\n parent: Option<FlowContext>,\n first_child: Option<FlowContext>,\n last_child: Option<FlowContext>,\n prev_sibling: Option<FlowContext>,\n next_sibling: Option<FlowContext>,\n\n \/* TODO (Issue #87): debug only *\/\n id: int,\n\n \/* layout computations *\/\n \/\/ TODO: min\/pref and position are used during disjoint phases of\n \/\/ layout; maybe combine into a single enum to save space.\n min_width: Au,\n pref_width: Au,\n position: Rect<Au>,\n floats_in: FloatContext,\n floats_out: FloatContext,\n num_floats: uint,\n abs_position: Point2D<Au>\n}\n\nimpl TreeNode<FlowContext> for FlowData {\n fn parent_node(&self) -> Option<FlowContext> {\n self.parent\n }\n\n fn first_child(&self) -> Option<FlowContext> {\n self.first_child\n }\n\n fn last_child(&self) -> Option<FlowContext> {\n self.last_child\n }\n\n fn prev_sibling(&self) -> Option<FlowContext> {\n self.prev_sibling\n }\n\n fn next_sibling(&self) -> Option<FlowContext> {\n self.next_sibling\n }\n\n fn set_parent_node(&mut self, new_parent_node: Option<FlowContext>) {\n self.parent = new_parent_node\n }\n\n fn set_first_child(&mut self, new_first_child: Option<FlowContext>) {\n self.first_child = new_first_child\n }\n\n fn set_last_child(&mut self, new_last_child: Option<FlowContext>) {\n self.last_child = new_last_child\n }\n\n fn set_prev_sibling(&mut self, new_prev_sibling: Option<FlowContext>) {\n self.prev_sibling = new_prev_sibling\n }\n\n fn set_next_sibling(&mut self, new_next_sibling: Option<FlowContext>) {\n self.next_sibling = new_next_sibling\n }\n}\n\nimpl FlowData {\n pub fn new(id: int, node: AbstractNode<LayoutView>) -> FlowData {\n FlowData {\n node: node,\n\n parent: None,\n first_child: None,\n last_child: None,\n prev_sibling: None,\n next_sibling: None,\n\n id: id,\n\n min_width: Au(0),\n pref_width: Au(0),\n position: Au::zero_rect(),\n floats_in: Invalid,\n floats_out: Invalid,\n num_floats: 0,\n abs_position: Point2D(Au(0), Au(0))\n }\n }\n}\n\nimpl<'self> FlowContext {\n \/\/\/ A convenience method to return the position of this flow. Fails if the flow is currently\n \/\/\/ being borrowed mutably.\n #[inline(always)]\n pub fn position(&self) -> Rect<Au> {\n do self.with_base |common_info| {\n common_info.position\n }\n }\n\n \/\/\/ A convenience method to return the ID of this flow. Fails if the flow is currently being\n \/\/\/ borrowed mutably.\n #[inline(always)]\n pub fn id(&self) -> int {\n do self.with_base |info| {\n info.id\n }\n }\n\n pub fn inline(&self) -> @mut InlineFlowData {\n match *self {\n InlineFlow(info) => info,\n _ => fail!(fmt!(\"Tried to access inline data of non-inline: f%d\", self.id()))\n }\n }\n\n pub fn block(&self) -> @mut BlockFlowData {\n match *self {\n BlockFlow(info) => info,\n _ => fail!(fmt!(\"Tried to access block data of non-block: f%d\", self.id()))\n }\n }\n\n pub fn root(&self) -> @mut BlockFlowData {\n match *self {\n BlockFlow(info) if info.is_root => info,\n _ => fail!(fmt!(\"Tried to access root block data of non-root: f%d\", self.id()))\n }\n }\n\n pub fn bubble_widths(&self, ctx: &mut LayoutContext) {\n match *self {\n BlockFlow(info) => info.bubble_widths_block(ctx),\n InlineFlow(info) => info.bubble_widths_inline(ctx),\n FloatFlow(info) => info.bubble_widths_float(ctx),\n _ => fail!(fmt!(\"Tried to bubble_widths of flow: f%d\", self.id()))\n }\n }\n\n pub fn assign_widths(&self, ctx: &mut LayoutContext) {\n match *self {\n BlockFlow(info) => info.assign_widths_block(ctx),\n InlineFlow(info) => info.assign_widths_inline(ctx),\n FloatFlow(info) => info.assign_widths_float(ctx),\n _ => fail!(fmt!(\"Tried to assign_widths of flow: f%d\", self.id()))\n }\n }\n\n pub fn assign_height(&self, ctx: &mut LayoutContext) {\n match *self {\n BlockFlow(info) => info.assign_height_block(ctx),\n InlineFlow(info) => info.assign_height_inline(ctx),\n FloatFlow(info) => info.assign_height_float(ctx),\n _ => fail!(fmt!(\"Tried to assign_height of flow: f%d\", self.id()))\n }\n }\n\n pub fn build_display_list<E:ExtraDisplayListData>(&self,\n builder: &DisplayListBuilder,\n dirty: &Rect<Au>,\n list: &Cell<DisplayList<E>>)\n -> bool {\n\n \n match *self {\n BlockFlow(info) => info.build_display_list_block(builder, dirty, list),\n InlineFlow(info) => info.build_display_list_inline(builder, dirty, list),\n FloatFlow(info) => info.build_display_list_float(builder, dirty, list),\n _ => {\n fail!(\"Tried to build_display_list_recurse of flow: %?\", self)\n }\n }\n\n }\n\n \/\/ Actual methods that do not require much flow-specific logic\n pub fn foldl_all_boxes<B:Clone>(&self, seed: B, cb: &fn(a: B, b: RenderBox) -> B) -> B {\n match *self {\n BlockFlow(block) => {\n let block = &mut *block;\n do block.box.map_default(seed.clone()) |box| {\n cb(seed.clone(), *box)\n }\n }\n InlineFlow(inline) => {\n let inline = &mut *inline;\n do inline.boxes.iter().fold(seed) |acc, box| {\n cb(acc.clone(), *box)\n }\n }\n _ => fail!(fmt!(\"Don't know how to iterate node's RenderBoxes for %?\", self)),\n }\n }\n\n pub fn foldl_boxes_for_node<B:Clone>(&self,\n node: AbstractNode<LayoutView>,\n seed: B,\n callback: &fn(a: B, RenderBox) -> B)\n -> B {\n do self.foldl_all_boxes(seed) |acc, box| {\n if box.node() == node {\n callback(acc, box)\n } else {\n acc\n }\n }\n }\n\n pub fn iter_all_boxes(&self, cb: &fn(RenderBox) -> bool) -> bool {\n match *self {\n BlockFlow(block) => {\n let block = &mut *block;\n for block.box.iter().advance |box| {\n if !cb(*box) {\n break;\n }\n }\n }\n InlineFlow(inline) => {\n let inline = &mut *inline;\n for inline.boxes.iter().advance |box| {\n if !cb(*box) {\n break;\n }\n }\n }\n _ => fail!(fmt!(\"Don't know how to iterate node's RenderBoxes for %?\", self))\n }\n\n true\n }\n\n pub fn iter_boxes_for_node(&self,\n node: AbstractNode<LayoutView>,\n callback: &fn(RenderBox) -> bool)\n -> bool {\n for self.iter_all_boxes |box| {\n if box.node() == node {\n if !callback(box) {\n break;\n }\n }\n }\n\n true\n }\n\n \/\/\/ Dumps the flow tree for debugging.\n pub fn dump(&self) {\n self.dump_indent(0);\n }\n\n \/\/\/ Dumps the flow tree, for debugging, with indentation.\n pub fn dump_indent(&self, indent: uint) {\n let mut s = ~\"|\";\n for uint::range(0, indent) |_i| {\n s.push_str(\"---- \");\n }\n\n s.push_str(self.debug_str());\n stderr().write_line(s);\n\n \/\/ FIXME: this should have a pure\/const version?\n for self.each_child |child| {\n child.dump_indent(indent + 1)\n }\n }\n \n pub fn debug_str(&self) -> ~str {\n let repr = match *self {\n InlineFlow(inline) => {\n let mut s = inline.boxes.iter().fold(~\"InlineFlow(children=\", |s, box| {\n fmt!(\"%s b%d\", s, box.id())\n });\n s.push_str(\")\");\n s\n },\n BlockFlow(block) => {\n match block.box {\n Some(box) => fmt!(\"BlockFlow(box=b%d)\", box.id()),\n None => ~\"BlockFlow\",\n }\n },\n FloatFlow(float) => {\n match float.box {\n Some(box) => fmt!(\"FloatFlow(box=b%d)\", box.id()),\n None => ~\"FloatFlow\",\n }\n },\n _ => ~\"(Unknown flow)\"\n };\n\n do self.with_base |base| {\n fmt!(\"f%? %? floats %? size %?\", base.id, repr, base.num_floats, base.position)\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module contains the `EvalContext` methods for executing a single step of the interpreter.\n\/\/!\n\/\/! The main entry point is the `step` method.\n\nuse rustc::mir;\nuse rustc::ty::layout::LayoutOf;\nuse rustc::mir::interpret::{EvalResult, Scalar, PointerArithmetic};\n\nuse super::{EvalContext, Machine};\n\n\/\/\/ Classify whether an operator is \"left-homogeneous\", i.e. the LHS has the\n\/\/\/ same type as the result.\n#[inline]\nfn binop_left_homogeneous(op: mir::BinOp) -> bool {\n use rustc::mir::BinOp::*;\n match op {\n Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr |\n Offset | Shl | Shr =>\n true,\n Eq | Ne | Lt | Le | Gt | Ge =>\n false,\n }\n}\n\/\/\/ Classify whether an operator is \"right-homogeneous\", i.e. the RHS has the\n\/\/\/ same type as the LHS.\n#[inline]\nfn binop_right_homogeneous(op: mir::BinOp) -> bool {\n use rustc::mir::BinOp::*;\n match op {\n Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr |\n Eq | Ne | Lt | Le | Gt | Ge =>\n true,\n Offset | Shl | Shr =>\n false,\n }\n}\n\nimpl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {\n pub fn run(&mut self) -> EvalResult<'tcx> {\n while self.step()? {}\n Ok(())\n }\n\n \/\/\/ Returns true as long as there are more things to do.\n fn step(&mut self) -> EvalResult<'tcx, bool> {\n if self.stack.is_empty() {\n return Ok(false);\n }\n\n let block = self.frame().block;\n let stmt_id = self.frame().stmt;\n let mir = self.mir();\n let basic_block = &mir.basic_blocks()[block];\n\n let old_frames = self.cur_frame();\n\n if let Some(stmt) = basic_block.statements.get(stmt_id) {\n assert_eq!(old_frames, self.cur_frame());\n self.statement(stmt)?;\n return Ok(true);\n }\n\n M::before_terminator(self)?;\n\n let terminator = basic_block.terminator();\n assert_eq!(old_frames, self.cur_frame());\n self.terminator(terminator)?;\n Ok(true)\n }\n\n fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> EvalResult<'tcx> {\n debug!(\"{:?}\", stmt);\n\n use rustc::mir::StatementKind::*;\n\n \/\/ Some statements (e.g. box) push new stack frames.\n \/\/ We have to record the stack frame number *before* executing the statement.\n let frame_idx = self.cur_frame();\n self.tcx.span = stmt.source_info.span;\n self.memory.tcx.span = stmt.source_info.span;\n\n match stmt.kind {\n Assign(ref place, ref rvalue) => self.eval_rvalue_into_place(rvalue, place)?,\n\n SetDiscriminant {\n ref place,\n variant_index,\n } => {\n let dest = self.eval_place(place)?;\n self.write_discriminant_index(variant_index, dest)?;\n }\n\n \/\/ Mark locals as alive\n StorageLive(local) => {\n let old_val = self.storage_live(local)?;\n self.deallocate_local(old_val)?;\n }\n\n \/\/ Mark locals as dead\n StorageDead(local) => {\n let old_val = self.storage_dead(local);\n self.deallocate_local(old_val)?;\n }\n\n \/\/ No dynamic semantics attached to `FakeRead`; MIR\n \/\/ interpreter is solely intended for borrowck'ed code.\n FakeRead(..) => {}\n\n \/\/ Validity checks.\n Validate(op, ref places) => {\n for operand in places {\n M::validation_op(self, op, operand)?;\n }\n }\n\n EndRegion(..) => {}\n AscribeUserType(..) => {}\n\n \/\/ Defined to do nothing. These are added by optimization passes, to avoid changing the\n \/\/ size of MIR constantly.\n Nop => {}\n\n InlineAsm { .. } => return err!(InlineAsm),\n }\n\n self.stack[frame_idx].stmt += 1;\n Ok(())\n }\n\n \/\/\/ Evaluate an assignment statement.\n \/\/\/\n \/\/\/ There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue\n \/\/\/ type writes its results directly into the memory specified by the place.\n fn eval_rvalue_into_place(\n &mut self,\n rvalue: &mir::Rvalue<'tcx>,\n place: &mir::Place<'tcx>,\n ) -> EvalResult<'tcx> {\n let dest = self.eval_place(place)?;\n\n use rustc::mir::Rvalue::*;\n match *rvalue {\n Use(ref operand) => {\n \/\/ Avoid recomputing the layout\n let op = self.eval_operand(operand, Some(dest.layout))?;\n self.copy_op(op, dest)?;\n }\n\n BinaryOp(bin_op, ref left, ref right) => {\n let layout = if binop_left_homogeneous(bin_op) { Some(dest.layout) } else { None };\n let left = self.read_value(self.eval_operand(left, layout)?)?;\n let layout = if binop_right_homogeneous(bin_op) { Some(left.layout) } else { None };\n let right = self.read_value(self.eval_operand(right, layout)?)?;\n self.binop_ignore_overflow(\n bin_op,\n left,\n right,\n dest,\n )?;\n }\n\n CheckedBinaryOp(bin_op, ref left, ref right) => {\n \/\/ Due to the extra boolean in the result, we can never reuse the `dest.layout`.\n let left = self.read_value(self.eval_operand(left, None)?)?;\n let layout = if binop_right_homogeneous(bin_op) { Some(left.layout) } else { None };\n let right = self.read_value(self.eval_operand(right, layout)?)?;\n self.binop_with_overflow(\n bin_op,\n left,\n right,\n dest,\n )?;\n }\n\n UnaryOp(un_op, ref operand) => {\n \/\/ The operand always has the same type as the result.\n let val = self.read_value(self.eval_operand(operand, Some(dest.layout))?)?;\n let val = self.unary_op(un_op, val.to_scalar()?, dest.layout)?;\n self.write_scalar(val, dest)?;\n }\n\n Aggregate(ref kind, ref operands) => {\n let (dest, active_field_index) = match **kind {\n mir::AggregateKind::Adt(adt_def, variant_index, _, _, active_field_index) => {\n self.write_discriminant_index(variant_index, dest)?;\n if adt_def.is_enum() {\n (self.place_downcast(dest, variant_index)?, active_field_index)\n } else {\n (dest, active_field_index)\n }\n }\n _ => (dest, None)\n };\n\n for (i, operand) in operands.iter().enumerate() {\n let op = self.eval_operand(operand, None)?;\n \/\/ Ignore zero-sized fields.\n if !op.layout.is_zst() {\n let field_index = active_field_index.unwrap_or(i);\n let field_dest = self.place_field(dest, field_index as u64)?;\n self.copy_op(op, field_dest)?;\n }\n }\n }\n\n Repeat(ref operand, _) => {\n let op = self.eval_operand(operand, None)?;\n let dest = self.force_allocation(dest)?;\n let length = dest.len(&self)?;\n\n if length > 0 {\n \/\/ write the first\n let first = self.mplace_field(dest, 0)?;\n self.copy_op(op, first.into())?;\n\n if length > 1 {\n \/\/ copy the rest\n let (dest, dest_align) = first.to_scalar_ptr_align();\n let rest = dest.ptr_offset(first.layout.size, &self)?;\n self.memory.copy_repeatedly(\n dest, dest_align, rest, dest_align, first.layout.size, length - 1, true\n )?;\n }\n }\n }\n\n Len(ref place) => {\n \/\/ FIXME(CTFE): don't allow computing the length of arrays in const eval\n let src = self.eval_place(place)?;\n let mplace = self.force_allocation(src)?;\n let len = mplace.len(&self)?;\n let size = self.pointer_size();\n self.write_scalar(\n Scalar::from_uint(len, size),\n dest,\n )?;\n }\n\n Ref(_, _, ref place) => {\n let src = self.eval_place(place)?;\n let val = self.force_allocation(src)?.to_ref();\n self.write_value(val, dest)?;\n }\n\n NullaryOp(mir::NullOp::Box, _) => {\n M::box_alloc(self, dest)?;\n }\n\n NullaryOp(mir::NullOp::SizeOf, ty) => {\n let ty = self.monomorphize(ty, self.substs());\n let layout = self.layout_of(ty)?;\n assert!(!layout.is_unsized(),\n \"SizeOf nullary MIR operator called for unsized type\");\n let size = self.pointer_size();\n self.write_scalar(\n Scalar::from_uint(layout.size.bytes(), size),\n dest,\n )?;\n }\n\n Cast(kind, ref operand, cast_ty) => {\n debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest.layout.ty);\n let src = self.eval_operand(operand, None)?;\n self.cast(src, kind, dest)?;\n }\n\n Discriminant(ref place) => {\n let place = self.eval_place(place)?;\n let discr_val = self.read_discriminant(self.place_to_op(place)?)?.0;\n let size = dest.layout.size;\n self.write_scalar(Scalar::from_uint(discr_val, size), dest)?;\n }\n }\n\n self.dump_place(*dest);\n\n Ok(())\n }\n\n fn terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> EvalResult<'tcx> {\n debug!(\"{:?}\", terminator.kind);\n self.tcx.span = terminator.source_info.span;\n self.memory.tcx.span = terminator.source_info.span;\n\n let old_stack = self.cur_frame();\n let old_bb = self.frame().block;\n self.eval_terminator(terminator)?;\n if !self.stack.is_empty() {\n \/\/ This should change *something*\n debug_assert!(self.cur_frame() != old_stack || self.frame().block != old_bb);\n debug!(\"\/\/ {:?}\", self.frame().block);\n }\n Ok(())\n }\n}\n<commit_msg>Rollup merge of #55062 - bjorn3:ecx-step-public, r=oli-obk<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This module contains the `EvalContext` methods for executing a single step of the interpreter.\n\/\/!\n\/\/! The main entry point is the `step` method.\n\nuse rustc::mir;\nuse rustc::ty::layout::LayoutOf;\nuse rustc::mir::interpret::{EvalResult, Scalar, PointerArithmetic};\n\nuse super::{EvalContext, Machine};\n\n\/\/\/ Classify whether an operator is \"left-homogeneous\", i.e. the LHS has the\n\/\/\/ same type as the result.\n#[inline]\nfn binop_left_homogeneous(op: mir::BinOp) -> bool {\n use rustc::mir::BinOp::*;\n match op {\n Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr |\n Offset | Shl | Shr =>\n true,\n Eq | Ne | Lt | Le | Gt | Ge =>\n false,\n }\n}\n\/\/\/ Classify whether an operator is \"right-homogeneous\", i.e. the RHS has the\n\/\/\/ same type as the LHS.\n#[inline]\nfn binop_right_homogeneous(op: mir::BinOp) -> bool {\n use rustc::mir::BinOp::*;\n match op {\n Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr |\n Eq | Ne | Lt | Le | Gt | Ge =>\n true,\n Offset | Shl | Shr =>\n false,\n }\n}\n\nimpl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {\n pub fn run(&mut self) -> EvalResult<'tcx> {\n while self.step()? {}\n Ok(())\n }\n\n \/\/\/ Returns true as long as there are more things to do.\n \/\/\/\n \/\/\/ This is used by [priroda](https:\/\/github.com\/oli-obk\/priroda)\n pub fn step(&mut self) -> EvalResult<'tcx, bool> {\n if self.stack.is_empty() {\n return Ok(false);\n }\n\n let block = self.frame().block;\n let stmt_id = self.frame().stmt;\n let mir = self.mir();\n let basic_block = &mir.basic_blocks()[block];\n\n let old_frames = self.cur_frame();\n\n if let Some(stmt) = basic_block.statements.get(stmt_id) {\n assert_eq!(old_frames, self.cur_frame());\n self.statement(stmt)?;\n return Ok(true);\n }\n\n M::before_terminator(self)?;\n\n let terminator = basic_block.terminator();\n assert_eq!(old_frames, self.cur_frame());\n self.terminator(terminator)?;\n Ok(true)\n }\n\n fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> EvalResult<'tcx> {\n debug!(\"{:?}\", stmt);\n\n use rustc::mir::StatementKind::*;\n\n \/\/ Some statements (e.g. box) push new stack frames.\n \/\/ We have to record the stack frame number *before* executing the statement.\n let frame_idx = self.cur_frame();\n self.tcx.span = stmt.source_info.span;\n self.memory.tcx.span = stmt.source_info.span;\n\n match stmt.kind {\n Assign(ref place, ref rvalue) => self.eval_rvalue_into_place(rvalue, place)?,\n\n SetDiscriminant {\n ref place,\n variant_index,\n } => {\n let dest = self.eval_place(place)?;\n self.write_discriminant_index(variant_index, dest)?;\n }\n\n \/\/ Mark locals as alive\n StorageLive(local) => {\n let old_val = self.storage_live(local)?;\n self.deallocate_local(old_val)?;\n }\n\n \/\/ Mark locals as dead\n StorageDead(local) => {\n let old_val = self.storage_dead(local);\n self.deallocate_local(old_val)?;\n }\n\n \/\/ No dynamic semantics attached to `FakeRead`; MIR\n \/\/ interpreter is solely intended for borrowck'ed code.\n FakeRead(..) => {}\n\n \/\/ Validity checks.\n Validate(op, ref places) => {\n for operand in places {\n M::validation_op(self, op, operand)?;\n }\n }\n\n EndRegion(..) => {}\n AscribeUserType(..) => {}\n\n \/\/ Defined to do nothing. These are added by optimization passes, to avoid changing the\n \/\/ size of MIR constantly.\n Nop => {}\n\n InlineAsm { .. } => return err!(InlineAsm),\n }\n\n self.stack[frame_idx].stmt += 1;\n Ok(())\n }\n\n \/\/\/ Evaluate an assignment statement.\n \/\/\/\n \/\/\/ There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue\n \/\/\/ type writes its results directly into the memory specified by the place.\n fn eval_rvalue_into_place(\n &mut self,\n rvalue: &mir::Rvalue<'tcx>,\n place: &mir::Place<'tcx>,\n ) -> EvalResult<'tcx> {\n let dest = self.eval_place(place)?;\n\n use rustc::mir::Rvalue::*;\n match *rvalue {\n Use(ref operand) => {\n \/\/ Avoid recomputing the layout\n let op = self.eval_operand(operand, Some(dest.layout))?;\n self.copy_op(op, dest)?;\n }\n\n BinaryOp(bin_op, ref left, ref right) => {\n let layout = if binop_left_homogeneous(bin_op) { Some(dest.layout) } else { None };\n let left = self.read_value(self.eval_operand(left, layout)?)?;\n let layout = if binop_right_homogeneous(bin_op) { Some(left.layout) } else { None };\n let right = self.read_value(self.eval_operand(right, layout)?)?;\n self.binop_ignore_overflow(\n bin_op,\n left,\n right,\n dest,\n )?;\n }\n\n CheckedBinaryOp(bin_op, ref left, ref right) => {\n \/\/ Due to the extra boolean in the result, we can never reuse the `dest.layout`.\n let left = self.read_value(self.eval_operand(left, None)?)?;\n let layout = if binop_right_homogeneous(bin_op) { Some(left.layout) } else { None };\n let right = self.read_value(self.eval_operand(right, layout)?)?;\n self.binop_with_overflow(\n bin_op,\n left,\n right,\n dest,\n )?;\n }\n\n UnaryOp(un_op, ref operand) => {\n \/\/ The operand always has the same type as the result.\n let val = self.read_value(self.eval_operand(operand, Some(dest.layout))?)?;\n let val = self.unary_op(un_op, val.to_scalar()?, dest.layout)?;\n self.write_scalar(val, dest)?;\n }\n\n Aggregate(ref kind, ref operands) => {\n let (dest, active_field_index) = match **kind {\n mir::AggregateKind::Adt(adt_def, variant_index, _, _, active_field_index) => {\n self.write_discriminant_index(variant_index, dest)?;\n if adt_def.is_enum() {\n (self.place_downcast(dest, variant_index)?, active_field_index)\n } else {\n (dest, active_field_index)\n }\n }\n _ => (dest, None)\n };\n\n for (i, operand) in operands.iter().enumerate() {\n let op = self.eval_operand(operand, None)?;\n \/\/ Ignore zero-sized fields.\n if !op.layout.is_zst() {\n let field_index = active_field_index.unwrap_or(i);\n let field_dest = self.place_field(dest, field_index as u64)?;\n self.copy_op(op, field_dest)?;\n }\n }\n }\n\n Repeat(ref operand, _) => {\n let op = self.eval_operand(operand, None)?;\n let dest = self.force_allocation(dest)?;\n let length = dest.len(&self)?;\n\n if length > 0 {\n \/\/ write the first\n let first = self.mplace_field(dest, 0)?;\n self.copy_op(op, first.into())?;\n\n if length > 1 {\n \/\/ copy the rest\n let (dest, dest_align) = first.to_scalar_ptr_align();\n let rest = dest.ptr_offset(first.layout.size, &self)?;\n self.memory.copy_repeatedly(\n dest, dest_align, rest, dest_align, first.layout.size, length - 1, true\n )?;\n }\n }\n }\n\n Len(ref place) => {\n \/\/ FIXME(CTFE): don't allow computing the length of arrays in const eval\n let src = self.eval_place(place)?;\n let mplace = self.force_allocation(src)?;\n let len = mplace.len(&self)?;\n let size = self.pointer_size();\n self.write_scalar(\n Scalar::from_uint(len, size),\n dest,\n )?;\n }\n\n Ref(_, _, ref place) => {\n let src = self.eval_place(place)?;\n let val = self.force_allocation(src)?.to_ref();\n self.write_value(val, dest)?;\n }\n\n NullaryOp(mir::NullOp::Box, _) => {\n M::box_alloc(self, dest)?;\n }\n\n NullaryOp(mir::NullOp::SizeOf, ty) => {\n let ty = self.monomorphize(ty, self.substs());\n let layout = self.layout_of(ty)?;\n assert!(!layout.is_unsized(),\n \"SizeOf nullary MIR operator called for unsized type\");\n let size = self.pointer_size();\n self.write_scalar(\n Scalar::from_uint(layout.size.bytes(), size),\n dest,\n )?;\n }\n\n Cast(kind, ref operand, cast_ty) => {\n debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest.layout.ty);\n let src = self.eval_operand(operand, None)?;\n self.cast(src, kind, dest)?;\n }\n\n Discriminant(ref place) => {\n let place = self.eval_place(place)?;\n let discr_val = self.read_discriminant(self.place_to_op(place)?)?.0;\n let size = dest.layout.size;\n self.write_scalar(Scalar::from_uint(discr_val, size), dest)?;\n }\n }\n\n self.dump_place(*dest);\n\n Ok(())\n }\n\n fn terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> EvalResult<'tcx> {\n debug!(\"{:?}\", terminator.kind);\n self.tcx.span = terminator.source_info.span;\n self.memory.tcx.span = terminator.source_info.span;\n\n let old_stack = self.cur_frame();\n let old_bb = self.frame().block;\n self.eval_terminator(terminator)?;\n if !self.stack.is_empty() {\n \/\/ This should change *something*\n debug_assert!(self.cur_frame() != old_stack || self.frame().block != old_bb);\n debug!(\"\/\/ {:?}\", self.frame().block);\n }\n Ok(())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Use verbose mode for the line regex<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>WIP: client<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Init comimt<commit_after>\/\/ This code is editable and runnable!\nfn main() {\n \/\/ A simple integer calculator:\n \/\/ `+` or `-` means add or subtract by 1\n \/\/ `*` or `\/` means multiply or divide by 2\n\n let program = \"+ + * - \/\";\n let mut accumulator = 0;\n\n for token in program.chars() {\n match token {\n '+' => accumulator += 1,\n '-' => accumulator -= 1,\n '*' => accumulator *= 2,\n '\/' => accumulator \/= 2,\n _ => { \/* ignore everything else *\/ }\n }\n }\n\n println!(\"The program \\\"{}\\\" calculates the value {}\",\n program, accumulator);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(stmt_expr_attributes)]\n#![deny(unused_parens)]\n\n\/\/ Tests that lint attributes on statements\/expressions are\n\/\/ correctly applied to non-builtin early (AST) lints\n\nfn main() {\n #[allow(unused_parens)]\n {\n let _ = (9);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for #34792<commit_after>\/\/ This test is a regression test for #34792\n\n\/\/ check-pass\n\npub struct A;\npub struct B;\n\npub trait Foo {\n type T: PartialEq<A> + PartialEq<B>;\n}\n\npub fn generic<F: Foo>(t: F::T, a: A, b: B) -> bool {\n t == a && t == b\n}\n\npub fn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>And further simplify find::find() to accept &Path<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.\n\/\/! It has to be run really early, before transformations like inlining, because\n\/\/! introducing these calls *adds* UB -- so, conceptually, this pass is actually part\n\/\/! of MIR building, and only after this pass we think of the program has having the\n\/\/! normal MIR semantics.\n\nuse rustc::ty::{self, TyCtxt, RegionKind};\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::middle::region;\nuse transform::{MirPass, MirSource};\n\npub struct AddValidation;\n\n\/\/\/ Determine the \"context\" of the place: Mutability and region.\nfn place_context<'a, 'tcx, D>(\n place: &Place<'tcx>,\n local_decls: &D,\n tcx: TyCtxt<'a, 'tcx, 'tcx>\n) -> (Option<region::Scope>, hir::Mutability)\n where D: HasLocalDecls<'tcx>\n{\n use rustc::mir::Place::*;\n\n match *place {\n Local { .. } => (None, hir::MutMutable),\n Static(_) => (None, hir::MutImmutable),\n Projection(ref proj) => {\n match proj.elem {\n ProjectionElem::Deref => {\n \/\/ Computing the inside the recursion makes this quadratic.\n \/\/ We don't expect deep paths though.\n let ty = proj.base.ty(local_decls, tcx).to_ty(tcx);\n \/\/ A Deref projection may restrict the context, this depends on the type\n \/\/ being deref'd.\n let context = match ty.sty {\n ty::TyRef(re, _, mutbl) => {\n let re = match re {\n &RegionKind::ReScope(ce) => Some(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => None\n };\n (re, mutbl)\n }\n ty::TyRawPtr(_) =>\n \/\/ There is no guarantee behind even a mutable raw pointer,\n \/\/ no write locks are acquired there, so we also don't want to\n \/\/ release any.\n (None, hir::MutImmutable),\n ty::TyAdt(adt, _) if adt.is_box() => (None, hir::MutMutable),\n _ => bug!(\"Deref on a non-pointer type {:?}\", ty),\n };\n \/\/ \"Intersect\" this restriction with proj.base.\n if let (Some(_), hir::MutImmutable) = context {\n \/\/ This is already as restricted as it gets, no need to even recurse\n context\n } else {\n let base_context = place_context(&proj.base, local_decls, tcx);\n \/\/ The region of the outermost Deref is always most restrictive.\n let re = context.0.or(base_context.0);\n let mutbl = context.1.and(base_context.1);\n (re, mutbl)\n }\n\n }\n _ => place_context(&proj.base, local_decls, tcx),\n }\n }\n }\n}\n\n\/\/\/ Check if this function contains an unsafe block or is an unsafe function.\nfn fn_contains_unsafe<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource) -> bool {\n use rustc::hir::intravisit::{self, Visitor, FnKind};\n use rustc::hir::map::blocks::FnLikeNode;\n use rustc::hir::map::Node;\n\n \/\/\/ Decide if this is an unsafe block\n fn block_is_unsafe(block: &hir::Block) -> bool {\n use rustc::hir::BlockCheckMode::*;\n\n match block.rules {\n UnsafeBlock(_) | PushUnsafeBlock(_) => true,\n \/\/ For PopUnsafeBlock, we don't actually know -- but we will always also check all\n \/\/ parent blocks, so we can safely declare the PopUnsafeBlock to not be unsafe.\n DefaultBlock | PopUnsafeBlock(_) => false,\n }\n }\n\n \/\/\/ Decide if this FnLike is a closure\n fn fn_is_closure<'a>(fn_like: FnLikeNode<'a>) -> bool {\n match fn_like.kind() {\n FnKind::Closure(_) => true,\n FnKind::Method(..) | FnKind::ItemFn(..) => false,\n }\n }\n\n let node_id = tcx.hir.as_local_node_id(src.def_id).unwrap();\n let fn_like = match tcx.hir.body_owner_kind(node_id) {\n hir::BodyOwnerKind::Fn => {\n match FnLikeNode::from_node(tcx.hir.get(node_id)) {\n Some(fn_like) => fn_like,\n None => return false, \/\/ e.g. struct ctor shims -- such auto-generated code cannot\n \/\/ contain unsafe.\n }\n },\n _ => return false, \/\/ only functions can have unsafe\n };\n\n \/\/ Test if the function is marked unsafe.\n if fn_like.unsafety() == hir::Unsafety::Unsafe {\n return true;\n }\n\n \/\/ For closures, we need to walk up the parents and see if we are inside an unsafe fn or\n \/\/ unsafe block.\n if fn_is_closure(fn_like) {\n let mut cur = fn_like.id();\n loop {\n \/\/ Go further upwards.\n cur = tcx.hir.get_parent_node(cur);\n let node = tcx.hir.get(cur);\n \/\/ Check if this is an unsafe function\n if let Some(fn_like) = FnLikeNode::from_node(node) {\n if !fn_is_closure(fn_like) {\n if fn_like.unsafety() == hir::Unsafety::Unsafe {\n return true;\n }\n }\n }\n \/\/ Check if this is an unsafe block, or an item\n match node {\n Node::NodeExpr(&hir::Expr { node: hir::ExprKind::Block(ref block, _), ..}) => {\n if block_is_unsafe(&*block) {\n \/\/ Found an unsafe block, we can bail out here.\n return true;\n }\n }\n Node::NodeItem(..) => {\n \/\/ No walking up beyond items. This makes sure the loop always terminates.\n break;\n }\n _ => {},\n }\n }\n }\n\n \/\/ Visit the entire body of the function and check for unsafe blocks in there\n struct FindUnsafe {\n found_unsafe: bool,\n }\n let mut finder = FindUnsafe { found_unsafe: false };\n \/\/ Run the visitor on the NodeId we got. Seems like there is no uniform way to do that.\n finder.visit_body(tcx.hir.body(fn_like.body()));\n\n impl<'tcx> Visitor<'tcx> for FindUnsafe {\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> {\n intravisit::NestedVisitorMap::None\n }\n\n fn visit_block(&mut self, b: &'tcx hir::Block) {\n if self.found_unsafe { return; } \/\/ short-circuit\n\n if block_is_unsafe(b) {\n \/\/ We found an unsafe block. We can stop searching.\n self.found_unsafe = true;\n } else {\n \/\/ No unsafe block here, go on searching.\n intravisit::walk_block(self, b);\n }\n }\n }\n\n finder.found_unsafe\n}\n\nimpl MirPass for AddValidation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &mut Mir<'tcx>)\n {\n let emit_validate = tcx.sess.opts.debugging_opts.mir_emit_validate;\n if emit_validate == 0 {\n return;\n }\n let restricted_validation = emit_validate == 1 && fn_contains_unsafe(tcx, src);\n let local_decls = mir.local_decls.clone(); \/\/ FIXME: Find a way to get rid of this clone.\n\n \/\/ Convert a place to a validation operand.\n let place_to_operand = |place: Place<'tcx>| -> ValidationOperand<'tcx, Place<'tcx>> {\n let (re, mutbl) = place_context(&place, &local_decls, tcx);\n let ty = place.ty(&local_decls, tcx).to_ty(tcx);\n ValidationOperand { place, ty, re, mutbl }\n };\n\n \/\/ Emit an Acquire at the beginning of the given block. If we are in restricted emission\n \/\/ mode (mir_emit_validate=1), also emit a Release immediately after the Acquire.\n let emit_acquire = |block: &mut BasicBlockData<'tcx>, source_info, operands: Vec<_>| {\n if operands.len() == 0 {\n return; \/\/ Nothing to do\n }\n \/\/ Emit the release first, to avoid cloning if we do not emit it\n if restricted_validation {\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release, operands.clone()),\n };\n block.statements.insert(0, release_stmt);\n }\n \/\/ Now, the acquire\n let acquire_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire, operands),\n };\n block.statements.insert(0, acquire_stmt);\n };\n\n \/\/ PART 1\n \/\/ Add an AcquireValid at the beginning of the start block.\n {\n let source_info = SourceInfo {\n scope: OUTERMOST_SOURCE_SCOPE,\n span: mir.span, \/\/ FIXME: Consider using just the span covering the function\n \/\/ argument declaration.\n };\n \/\/ Gather all arguments, skip return value.\n let operands = mir.local_decls.iter_enumerated().skip(1).take(mir.arg_count)\n .map(|(local, _)| place_to_operand(Place::Local(local))).collect();\n emit_acquire(&mut mir.basic_blocks_mut()[START_BLOCK], source_info, operands);\n }\n\n \/\/ PART 2\n \/\/ Add ReleaseValid\/AcquireValid around function call terminators. We don't use a visitor\n \/\/ because we need to access the block that a Call jumps to.\n let mut returns : Vec<(SourceInfo, Place<'tcx>, BasicBlock)> = Vec::new();\n for block_data in mir.basic_blocks_mut() {\n match block_data.terminator {\n Some(Terminator { kind: TerminatorKind::Call { ref args, ref destination, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments *and* the return value.\n \/\/ The callee may write into the return value! Note that this relies\n \/\/ on \"release of uninitialized\" to be a NOP.\n if !restricted_validation {\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n destination.iter().map(|dest| place_to_operand(dest.0.clone()))\n .chain(\n args.iter().filter_map(|op| {\n match op {\n &Operand::Copy(ref place) |\n &Operand::Move(ref place) =>\n Some(place_to_operand(place.clone())),\n &Operand::Constant(..) => { None },\n }\n })\n ).collect())\n };\n block_data.statements.push(release_stmt);\n }\n \/\/ Remember the return destination for later\n if let &Some(ref destination) = destination {\n returns.push((source_info, destination.0.clone(), destination.1));\n }\n }\n Some(Terminator { kind: TerminatorKind::Drop { location: ref place, .. },\n source_info }) |\n Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref place, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n if !restricted_validation {\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![place_to_operand(place.clone())]),\n };\n block_data.statements.push(release_stmt);\n }\n \/\/ drop doesn't return anything, so we need no acquire.\n }\n _ => {\n \/\/ Not a block ending in a Call -> ignore.\n }\n }\n }\n \/\/ Now we go over the returns we collected to acquire the return values.\n for (source_info, dest_place, dest_block) in returns {\n emit_acquire(\n &mut mir.basic_blocks_mut()[dest_block],\n source_info,\n vec![place_to_operand(dest_place)]\n );\n }\n\n if restricted_validation {\n \/\/ No part 3 for us.\n return;\n }\n\n \/\/ PART 3\n \/\/ Add ReleaseValid\/AcquireValid around Ref and Cast. Again an iterator does not seem very\n \/\/ suited as we need to add new statements before and after each Ref.\n for block_data in mir.basic_blocks_mut() {\n \/\/ We want to insert statements around Ref commands as we iterate. To this end, we\n \/\/ iterate backwards using indices.\n for i in (0..block_data.statements.len()).rev() {\n match block_data.statements[i].kind {\n \/\/ When the borrow of this ref expires, we need to recover validation.\n StatementKind::Assign(_, Rvalue::Ref(_, _, _)) => {\n \/\/ Due to a lack of NLL; we can't capture anything directly here.\n \/\/ Instead, we have to re-match and clone there.\n let (dest_place, re, src_place) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_place,\n Rvalue::Ref(re, _, ref src_place)) => {\n (dest_place.clone(), re, src_place.clone())\n },\n _ => bug!(\"We already matched this.\"),\n };\n \/\/ So this is a ref, and we got all the data we wanted.\n \/\/ Do an acquire of the result -- but only what it points to, so add a Deref\n \/\/ projection.\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![place_to_operand(dest_place.deref())]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ The source is released until the region of the borrow ends.\n let op = match re {\n &RegionKind::ReScope(ce) => ValidationOp::Suspend(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => ValidationOp::Release,\n };\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(op, vec![place_to_operand(src_place)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n \/\/ Casts can change what validation does (e.g. unsizing)\n StatementKind::Assign(_, Rvalue::Cast(kind, Operand::Copy(_), _)) |\n StatementKind::Assign(_, Rvalue::Cast(kind, Operand::Move(_), _))\n if kind != CastKind::Misc =>\n {\n \/\/ Due to a lack of NLL; we can't capture anything directly here.\n \/\/ Instead, we have to re-match and clone there.\n let (dest_place, src_place) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_place,\n Rvalue::Cast(_, Operand::Copy(ref src_place), _)) |\n StatementKind::Assign(ref dest_place,\n Rvalue::Cast(_, Operand::Move(ref src_place), _)) =>\n {\n (dest_place.clone(), src_place.clone())\n },\n _ => bug!(\"We already matched this.\"),\n };\n\n \/\/ Acquire of the result\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![place_to_operand(dest_place)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ Release of the input\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![place_to_operand(src_place)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n _ => {},\n }\n }\n }\n }\n}\n<commit_msg>Auto merge of #52364 - ljedrz:mir_remove_clone, r=RalfJung<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.\n\/\/! It has to be run really early, before transformations like inlining, because\n\/\/! introducing these calls *adds* UB -- so, conceptually, this pass is actually part\n\/\/! of MIR building, and only after this pass we think of the program has having the\n\/\/! normal MIR semantics.\n\nuse rustc::ty::{self, TyCtxt, RegionKind};\nuse rustc::hir;\nuse rustc::mir::*;\nuse rustc::middle::region;\nuse transform::{MirPass, MirSource};\n\npub struct AddValidation;\n\n\/\/\/ Determine the \"context\" of the place: Mutability and region.\nfn place_context<'a, 'tcx, D>(\n place: &Place<'tcx>,\n local_decls: &D,\n tcx: TyCtxt<'a, 'tcx, 'tcx>\n) -> (Option<region::Scope>, hir::Mutability)\n where D: HasLocalDecls<'tcx>\n{\n use rustc::mir::Place::*;\n\n match *place {\n Local { .. } => (None, hir::MutMutable),\n Static(_) => (None, hir::MutImmutable),\n Projection(ref proj) => {\n match proj.elem {\n ProjectionElem::Deref => {\n \/\/ Computing the inside the recursion makes this quadratic.\n \/\/ We don't expect deep paths though.\n let ty = proj.base.ty(local_decls, tcx).to_ty(tcx);\n \/\/ A Deref projection may restrict the context, this depends on the type\n \/\/ being deref'd.\n let context = match ty.sty {\n ty::TyRef(re, _, mutbl) => {\n let re = match re {\n &RegionKind::ReScope(ce) => Some(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => None\n };\n (re, mutbl)\n }\n ty::TyRawPtr(_) =>\n \/\/ There is no guarantee behind even a mutable raw pointer,\n \/\/ no write locks are acquired there, so we also don't want to\n \/\/ release any.\n (None, hir::MutImmutable),\n ty::TyAdt(adt, _) if adt.is_box() => (None, hir::MutMutable),\n _ => bug!(\"Deref on a non-pointer type {:?}\", ty),\n };\n \/\/ \"Intersect\" this restriction with proj.base.\n if let (Some(_), hir::MutImmutable) = context {\n \/\/ This is already as restricted as it gets, no need to even recurse\n context\n } else {\n let base_context = place_context(&proj.base, local_decls, tcx);\n \/\/ The region of the outermost Deref is always most restrictive.\n let re = context.0.or(base_context.0);\n let mutbl = context.1.and(base_context.1);\n (re, mutbl)\n }\n\n }\n _ => place_context(&proj.base, local_decls, tcx),\n }\n }\n }\n}\n\n\/\/\/ Check if this function contains an unsafe block or is an unsafe function.\nfn fn_contains_unsafe<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource) -> bool {\n use rustc::hir::intravisit::{self, Visitor, FnKind};\n use rustc::hir::map::blocks::FnLikeNode;\n use rustc::hir::map::Node;\n\n \/\/\/ Decide if this is an unsafe block\n fn block_is_unsafe(block: &hir::Block) -> bool {\n use rustc::hir::BlockCheckMode::*;\n\n match block.rules {\n UnsafeBlock(_) | PushUnsafeBlock(_) => true,\n \/\/ For PopUnsafeBlock, we don't actually know -- but we will always also check all\n \/\/ parent blocks, so we can safely declare the PopUnsafeBlock to not be unsafe.\n DefaultBlock | PopUnsafeBlock(_) => false,\n }\n }\n\n \/\/\/ Decide if this FnLike is a closure\n fn fn_is_closure<'a>(fn_like: FnLikeNode<'a>) -> bool {\n match fn_like.kind() {\n FnKind::Closure(_) => true,\n FnKind::Method(..) | FnKind::ItemFn(..) => false,\n }\n }\n\n let node_id = tcx.hir.as_local_node_id(src.def_id).unwrap();\n let fn_like = match tcx.hir.body_owner_kind(node_id) {\n hir::BodyOwnerKind::Fn => {\n match FnLikeNode::from_node(tcx.hir.get(node_id)) {\n Some(fn_like) => fn_like,\n None => return false, \/\/ e.g. struct ctor shims -- such auto-generated code cannot\n \/\/ contain unsafe.\n }\n },\n _ => return false, \/\/ only functions can have unsafe\n };\n\n \/\/ Test if the function is marked unsafe.\n if fn_like.unsafety() == hir::Unsafety::Unsafe {\n return true;\n }\n\n \/\/ For closures, we need to walk up the parents and see if we are inside an unsafe fn or\n \/\/ unsafe block.\n if fn_is_closure(fn_like) {\n let mut cur = fn_like.id();\n loop {\n \/\/ Go further upwards.\n cur = tcx.hir.get_parent_node(cur);\n let node = tcx.hir.get(cur);\n \/\/ Check if this is an unsafe function\n if let Some(fn_like) = FnLikeNode::from_node(node) {\n if !fn_is_closure(fn_like) {\n if fn_like.unsafety() == hir::Unsafety::Unsafe {\n return true;\n }\n }\n }\n \/\/ Check if this is an unsafe block, or an item\n match node {\n Node::NodeExpr(&hir::Expr { node: hir::ExprKind::Block(ref block, _), ..}) => {\n if block_is_unsafe(&*block) {\n \/\/ Found an unsafe block, we can bail out here.\n return true;\n }\n }\n Node::NodeItem(..) => {\n \/\/ No walking up beyond items. This makes sure the loop always terminates.\n break;\n }\n _ => {},\n }\n }\n }\n\n \/\/ Visit the entire body of the function and check for unsafe blocks in there\n struct FindUnsafe {\n found_unsafe: bool,\n }\n let mut finder = FindUnsafe { found_unsafe: false };\n \/\/ Run the visitor on the NodeId we got. Seems like there is no uniform way to do that.\n finder.visit_body(tcx.hir.body(fn_like.body()));\n\n impl<'tcx> Visitor<'tcx> for FindUnsafe {\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> {\n intravisit::NestedVisitorMap::None\n }\n\n fn visit_block(&mut self, b: &'tcx hir::Block) {\n if self.found_unsafe { return; } \/\/ short-circuit\n\n if block_is_unsafe(b) {\n \/\/ We found an unsafe block. We can stop searching.\n self.found_unsafe = true;\n } else {\n \/\/ No unsafe block here, go on searching.\n intravisit::walk_block(self, b);\n }\n }\n }\n\n finder.found_unsafe\n}\n\nimpl MirPass for AddValidation {\n fn run_pass<'a, 'tcx>(&self,\n tcx: TyCtxt<'a, 'tcx, 'tcx>,\n src: MirSource,\n mir: &mut Mir<'tcx>)\n {\n let emit_validate = tcx.sess.opts.debugging_opts.mir_emit_validate;\n if emit_validate == 0 {\n return;\n }\n let restricted_validation = emit_validate == 1 && fn_contains_unsafe(tcx, src);\n let (span, arg_count) = (mir.span, mir.arg_count);\n let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();\n\n \/\/ Convert a place to a validation operand.\n let place_to_operand = |place: Place<'tcx>| -> ValidationOperand<'tcx, Place<'tcx>> {\n let (re, mutbl) = place_context(&place, local_decls, tcx);\n let ty = place.ty(local_decls, tcx).to_ty(tcx);\n ValidationOperand { place, ty, re, mutbl }\n };\n\n \/\/ Emit an Acquire at the beginning of the given block. If we are in restricted emission\n \/\/ mode (mir_emit_validate=1), also emit a Release immediately after the Acquire.\n let emit_acquire = |block: &mut BasicBlockData<'tcx>, source_info, operands: Vec<_>| {\n if operands.len() == 0 {\n return; \/\/ Nothing to do\n }\n \/\/ Emit the release first, to avoid cloning if we do not emit it\n if restricted_validation {\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release, operands.clone()),\n };\n block.statements.insert(0, release_stmt);\n }\n \/\/ Now, the acquire\n let acquire_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire, operands),\n };\n block.statements.insert(0, acquire_stmt);\n };\n\n \/\/ PART 1\n \/\/ Add an AcquireValid at the beginning of the start block.\n {\n let source_info = SourceInfo {\n scope: OUTERMOST_SOURCE_SCOPE,\n span: span, \/\/ FIXME: Consider using just the span covering the function\n \/\/ argument declaration.\n };\n \/\/ Gather all arguments, skip return value.\n let operands = local_decls.iter_enumerated().skip(1).take(arg_count)\n .map(|(local, _)| place_to_operand(Place::Local(local))).collect();\n emit_acquire(&mut basic_blocks[START_BLOCK], source_info, operands);\n }\n\n \/\/ PART 2\n \/\/ Add ReleaseValid\/AcquireValid around function call terminators. We don't use a visitor\n \/\/ because we need to access the block that a Call jumps to.\n let mut returns : Vec<(SourceInfo, Place<'tcx>, BasicBlock)> = Vec::new();\n for block_data in basic_blocks.iter_mut() {\n match block_data.terminator {\n Some(Terminator { kind: TerminatorKind::Call { ref args, ref destination, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments *and* the return value.\n \/\/ The callee may write into the return value! Note that this relies\n \/\/ on \"release of uninitialized\" to be a NOP.\n if !restricted_validation {\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n destination.iter().map(|dest| place_to_operand(dest.0.clone()))\n .chain(\n args.iter().filter_map(|op| {\n match op {\n &Operand::Copy(ref place) |\n &Operand::Move(ref place) =>\n Some(place_to_operand(place.clone())),\n &Operand::Constant(..) => { None },\n }\n })\n ).collect())\n };\n block_data.statements.push(release_stmt);\n }\n \/\/ Remember the return destination for later\n if let &Some(ref destination) = destination {\n returns.push((source_info, destination.0.clone(), destination.1));\n }\n }\n Some(Terminator { kind: TerminatorKind::Drop { location: ref place, .. },\n source_info }) |\n Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref place, .. },\n source_info }) => {\n \/\/ Before the call: Release all arguments\n if !restricted_validation {\n let release_stmt = Statement {\n source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![place_to_operand(place.clone())]),\n };\n block_data.statements.push(release_stmt);\n }\n \/\/ drop doesn't return anything, so we need no acquire.\n }\n _ => {\n \/\/ Not a block ending in a Call -> ignore.\n }\n }\n }\n \/\/ Now we go over the returns we collected to acquire the return values.\n for (source_info, dest_place, dest_block) in returns {\n emit_acquire(\n &mut basic_blocks[dest_block],\n source_info,\n vec![place_to_operand(dest_place)]\n );\n }\n\n if restricted_validation {\n \/\/ No part 3 for us.\n return;\n }\n\n \/\/ PART 3\n \/\/ Add ReleaseValid\/AcquireValid around Ref and Cast. Again an iterator does not seem very\n \/\/ suited as we need to add new statements before and after each Ref.\n for block_data in basic_blocks {\n \/\/ We want to insert statements around Ref commands as we iterate. To this end, we\n \/\/ iterate backwards using indices.\n for i in (0..block_data.statements.len()).rev() {\n match block_data.statements[i].kind {\n \/\/ When the borrow of this ref expires, we need to recover validation.\n StatementKind::Assign(_, Rvalue::Ref(_, _, _)) => {\n \/\/ Due to a lack of NLL; we can't capture anything directly here.\n \/\/ Instead, we have to re-match and clone there.\n let (dest_place, re, src_place) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_place,\n Rvalue::Ref(re, _, ref src_place)) => {\n (dest_place.clone(), re, src_place.clone())\n },\n _ => bug!(\"We already matched this.\"),\n };\n \/\/ So this is a ref, and we got all the data we wanted.\n \/\/ Do an acquire of the result -- but only what it points to, so add a Deref\n \/\/ projection.\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![place_to_operand(dest_place.deref())]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ The source is released until the region of the borrow ends.\n let op = match re {\n &RegionKind::ReScope(ce) => ValidationOp::Suspend(ce),\n &RegionKind::ReErased =>\n bug!(\"AddValidation pass must be run before erasing lifetimes\"),\n _ => ValidationOp::Release,\n };\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(op, vec![place_to_operand(src_place)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n \/\/ Casts can change what validation does (e.g. unsizing)\n StatementKind::Assign(_, Rvalue::Cast(kind, Operand::Copy(_), _)) |\n StatementKind::Assign(_, Rvalue::Cast(kind, Operand::Move(_), _))\n if kind != CastKind::Misc =>\n {\n \/\/ Due to a lack of NLL; we can't capture anything directly here.\n \/\/ Instead, we have to re-match and clone there.\n let (dest_place, src_place) = match block_data.statements[i].kind {\n StatementKind::Assign(ref dest_place,\n Rvalue::Cast(_, Operand::Copy(ref src_place), _)) |\n StatementKind::Assign(ref dest_place,\n Rvalue::Cast(_, Operand::Move(ref src_place), _)) =>\n {\n (dest_place.clone(), src_place.clone())\n },\n _ => bug!(\"We already matched this.\"),\n };\n\n \/\/ Acquire of the result\n let acquire_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Acquire,\n vec![place_to_operand(dest_place)]),\n };\n block_data.statements.insert(i+1, acquire_stmt);\n\n \/\/ Release of the input\n let release_stmt = Statement {\n source_info: block_data.statements[i].source_info,\n kind: StatementKind::Validate(ValidationOp::Release,\n vec![place_to_operand(src_place)]),\n };\n block_data.statements.insert(i, release_stmt);\n }\n _ => {},\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! System Mutexes\n\/\/!\n\/\/! The Windows implementation of mutexes is a little odd and it may not be\n\/\/! immediately obvious what's going on. The primary oddness is that SRWLock is\n\/\/! used instead of CriticalSection, and this is done because:\n\/\/!\n\/\/! 1. SRWLock is several times faster than CriticalSection according to\n\/\/! benchmarks performed on both Windows 8 and Windows 7.\n\/\/!\n\/\/! 2. CriticalSection allows recursive locking while SRWLock deadlocks. The\n\/\/! Unix implementation deadlocks so consistency is preferred. See #19962 for\n\/\/! more details.\n\/\/!\n\/\/! 3. While CriticalSection is fair and SRWLock is not, the current Rust policy\n\/\/! is that there are no guarantees of fairness.\n\/\/!\n\/\/! The downside of this approach, however, is that SRWLock is not available on\n\/\/! Windows XP, so we continue to have a fallback implementation where\n\/\/! CriticalSection is used and we keep track of who's holding the mutex to\n\/\/! detect recursive locks.\n\nuse cell::UnsafeCell;\nuse mem::{self, MaybeUninit};\nuse sync::atomic::{AtomicUsize, Ordering};\nuse sys::c;\nuse sys::compat;\n\npub struct Mutex {\n lock: AtomicUsize,\n held: UnsafeCell<bool>,\n}\n\nunsafe impl Send for Mutex {}\nunsafe impl Sync for Mutex {}\n\n#[derive(Clone, Copy)]\nenum Kind {\n SRWLock = 1,\n CriticalSection = 2,\n}\n\n#[inline]\npub unsafe fn raw(m: &Mutex) -> c::PSRWLOCK {\n debug_assert!(mem::size_of::<c::SRWLOCK>() <= mem::size_of_val(&m.lock));\n &m.lock as *const _ as *mut _\n}\n\nimpl Mutex {\n pub const fn new() -> Mutex {\n Mutex {\n \/\/ This works because SRWLOCK_INIT is 0 (wrapped in a struct), so we are also properly\n \/\/ initializing an SRWLOCK here.\n lock: AtomicUsize::new(0),\n held: UnsafeCell::new(false),\n }\n }\n #[inline]\n pub unsafe fn init(&mut self) {}\n pub unsafe fn lock(&self) {\n match kind() {\n Kind::SRWLock => c::AcquireSRWLockExclusive(raw(self)),\n Kind::CriticalSection => {\n let re = self.remutex();\n (*re).lock();\n if !self.flag_locked() {\n (*re).unlock();\n panic!(\"cannot recursively lock a mutex\");\n }\n }\n }\n }\n pub unsafe fn try_lock(&self) -> bool {\n match kind() {\n Kind::SRWLock => c::TryAcquireSRWLockExclusive(raw(self)) != 0,\n Kind::CriticalSection => {\n let re = self.remutex();\n if !(*re).try_lock() {\n false\n } else if self.flag_locked() {\n true\n } else {\n (*re).unlock();\n false\n }\n }\n }\n }\n pub unsafe fn unlock(&self) {\n *self.held.get() = false;\n match kind() {\n Kind::SRWLock => c::ReleaseSRWLockExclusive(raw(self)),\n Kind::CriticalSection => (*self.remutex()).unlock(),\n }\n }\n pub unsafe fn destroy(&self) {\n match kind() {\n Kind::SRWLock => {}\n Kind::CriticalSection => {\n match self.lock.load(Ordering::SeqCst) {\n 0 => {}\n n => { Box::from_raw(n as *mut ReentrantMutex).destroy(); }\n }\n }\n }\n }\n\n unsafe fn remutex(&self) -> *mut ReentrantMutex {\n match self.lock.load(Ordering::SeqCst) {\n 0 => {}\n n => return n as *mut _,\n }\n let mut re = box ReentrantMutex::uninitialized();\n re.init();\n let re = Box::into_raw(re);\n match self.lock.compare_and_swap(0, re as usize, Ordering::SeqCst) {\n 0 => re,\n n => { Box::from_raw(re).destroy(); n as *mut _ }\n }\n }\n\n unsafe fn flag_locked(&self) -> bool {\n if *self.held.get() {\n false\n } else {\n *self.held.get() = true;\n true\n }\n\n }\n}\n\nfn kind() -> Kind {\n static KIND: AtomicUsize = AtomicUsize::new(0);\n\n let val = KIND.load(Ordering::SeqCst);\n if val == Kind::SRWLock as usize {\n return Kind::SRWLock\n } else if val == Kind::CriticalSection as usize {\n return Kind::CriticalSection\n }\n\n let ret = match compat::lookup(\"kernel32\", \"AcquireSRWLockExclusive\") {\n None => Kind::CriticalSection,\n Some(..) => Kind::SRWLock,\n };\n KIND.store(ret as usize, Ordering::SeqCst);\n return ret;\n}\n\npub struct ReentrantMutex { inner: UnsafeCell<MaybeUninit<c::CRITICAL_SECTION>> }\n\nunsafe impl Send for ReentrantMutex {}\nunsafe impl Sync for ReentrantMutex {}\n\nimpl ReentrantMutex {\n pub fn uninitialized() -> ReentrantMutex {\n ReentrantMutex { inner: UnsafeCell::new(MaybeUninit::uninitialized()) }\n }\n\n pub unsafe fn init(&mut self) {\n c::InitializeCriticalSection((&mut *self.inner.get()).as_mut_ptr());\n }\n\n pub unsafe fn lock(&self) {\n \/\/ `init` must have been called, so this is now initialized and\n \/\/ we can call `get_mut`.\n c::EnterCriticalSection((&mut *self.inner.get()).get_mut());\n }\n\n #[inline]\n pub unsafe fn try_lock(&self) -> bool {\n \/\/ `init` must have been called, so this is now initialized and\n \/\/ we can call `get_mut`.\n c::TryEnterCriticalSection((&mut *self.inner.get()).get_mut()) != 0\n }\n\n pub unsafe fn unlock(&self) {\n \/\/ `init` must have been called, so this is now initialized and\n \/\/ we can call `get_mut`.\n c::LeaveCriticalSection((&mut *self.inner.get()).get_mut());\n }\n\n pub unsafe fn destroy(&self) {\n \/\/ `init` must have been called, so this is now initialized and\n \/\/ we can call `get_mut`.\n c::DeleteCriticalSection((&mut *self.inner.get()).get_mut());\n }\n}\n<commit_msg>no reason to use mutable references here at all<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/! System Mutexes\n\/\/!\n\/\/! The Windows implementation of mutexes is a little odd and it may not be\n\/\/! immediately obvious what's going on. The primary oddness is that SRWLock is\n\/\/! used instead of CriticalSection, and this is done because:\n\/\/!\n\/\/! 1. SRWLock is several times faster than CriticalSection according to\n\/\/! benchmarks performed on both Windows 8 and Windows 7.\n\/\/!\n\/\/! 2. CriticalSection allows recursive locking while SRWLock deadlocks. The\n\/\/! Unix implementation deadlocks so consistency is preferred. See #19962 for\n\/\/! more details.\n\/\/!\n\/\/! 3. While CriticalSection is fair and SRWLock is not, the current Rust policy\n\/\/! is that there are no guarantees of fairness.\n\/\/!\n\/\/! The downside of this approach, however, is that SRWLock is not available on\n\/\/! Windows XP, so we continue to have a fallback implementation where\n\/\/! CriticalSection is used and we keep track of who's holding the mutex to\n\/\/! detect recursive locks.\n\nuse cell::UnsafeCell;\nuse mem::{self, MaybeUninit};\nuse sync::atomic::{AtomicUsize, Ordering};\nuse sys::c;\nuse sys::compat;\n\npub struct Mutex {\n lock: AtomicUsize,\n held: UnsafeCell<bool>,\n}\n\nunsafe impl Send for Mutex {}\nunsafe impl Sync for Mutex {}\n\n#[derive(Clone, Copy)]\nenum Kind {\n SRWLock = 1,\n CriticalSection = 2,\n}\n\n#[inline]\npub unsafe fn raw(m: &Mutex) -> c::PSRWLOCK {\n debug_assert!(mem::size_of::<c::SRWLOCK>() <= mem::size_of_val(&m.lock));\n &m.lock as *const _ as *mut _\n}\n\nimpl Mutex {\n pub const fn new() -> Mutex {\n Mutex {\n \/\/ This works because SRWLOCK_INIT is 0 (wrapped in a struct), so we are also properly\n \/\/ initializing an SRWLOCK here.\n lock: AtomicUsize::new(0),\n held: UnsafeCell::new(false),\n }\n }\n #[inline]\n pub unsafe fn init(&mut self) {}\n pub unsafe fn lock(&self) {\n match kind() {\n Kind::SRWLock => c::AcquireSRWLockExclusive(raw(self)),\n Kind::CriticalSection => {\n let re = self.remutex();\n (*re).lock();\n if !self.flag_locked() {\n (*re).unlock();\n panic!(\"cannot recursively lock a mutex\");\n }\n }\n }\n }\n pub unsafe fn try_lock(&self) -> bool {\n match kind() {\n Kind::SRWLock => c::TryAcquireSRWLockExclusive(raw(self)) != 0,\n Kind::CriticalSection => {\n let re = self.remutex();\n if !(*re).try_lock() {\n false\n } else if self.flag_locked() {\n true\n } else {\n (*re).unlock();\n false\n }\n }\n }\n }\n pub unsafe fn unlock(&self) {\n *self.held.get() = false;\n match kind() {\n Kind::SRWLock => c::ReleaseSRWLockExclusive(raw(self)),\n Kind::CriticalSection => (*self.remutex()).unlock(),\n }\n }\n pub unsafe fn destroy(&self) {\n match kind() {\n Kind::SRWLock => {}\n Kind::CriticalSection => {\n match self.lock.load(Ordering::SeqCst) {\n 0 => {}\n n => { Box::from_raw(n as *mut ReentrantMutex).destroy(); }\n }\n }\n }\n }\n\n unsafe fn remutex(&self) -> *mut ReentrantMutex {\n match self.lock.load(Ordering::SeqCst) {\n 0 => {}\n n => return n as *mut _,\n }\n let mut re = box ReentrantMutex::uninitialized();\n re.init();\n let re = Box::into_raw(re);\n match self.lock.compare_and_swap(0, re as usize, Ordering::SeqCst) {\n 0 => re,\n n => { Box::from_raw(re).destroy(); n as *mut _ }\n }\n }\n\n unsafe fn flag_locked(&self) -> bool {\n if *self.held.get() {\n false\n } else {\n *self.held.get() = true;\n true\n }\n\n }\n}\n\nfn kind() -> Kind {\n static KIND: AtomicUsize = AtomicUsize::new(0);\n\n let val = KIND.load(Ordering::SeqCst);\n if val == Kind::SRWLock as usize {\n return Kind::SRWLock\n } else if val == Kind::CriticalSection as usize {\n return Kind::CriticalSection\n }\n\n let ret = match compat::lookup(\"kernel32\", \"AcquireSRWLockExclusive\") {\n None => Kind::CriticalSection,\n Some(..) => Kind::SRWLock,\n };\n KIND.store(ret as usize, Ordering::SeqCst);\n return ret;\n}\n\npub struct ReentrantMutex { inner: UnsafeCell<MaybeUninit<c::CRITICAL_SECTION>> }\n\nunsafe impl Send for ReentrantMutex {}\nunsafe impl Sync for ReentrantMutex {}\n\nimpl ReentrantMutex {\n pub fn uninitialized() -> ReentrantMutex {\n ReentrantMutex { inner: UnsafeCell::new(MaybeUninit::uninitialized()) }\n }\n\n pub unsafe fn init(&mut self) {\n c::InitializeCriticalSection((&mut *self.inner.get()).as_mut_ptr());\n }\n\n pub unsafe fn lock(&self) {\n c::EnterCriticalSection((&mut *self.inner.get()).as_mut_ptr());\n }\n\n #[inline]\n pub unsafe fn try_lock(&self) -> bool {\n c::TryEnterCriticalSection((&mut *self.inner.get()).as_mut_ptr()) != 0\n }\n\n pub unsafe fn unlock(&self) {\n c::LeaveCriticalSection((&mut *self.inner.get()).as_mut_ptr());\n }\n\n pub unsafe fn destroy(&self) {\n c::DeleteCriticalSection((&mut *self.inner.get()).as_mut_ptr());\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test we reject equivalent signatures with more than one argument<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(universal_impl_trait)]\nuse std::fmt::Debug;\n\ntrait Foo {\n fn foo<A: Debug>(&self, a: &A, b: &impl Debug);\n}\n\nimpl Foo for () {\n fn foo<B: Debug>(&self, a: &impl Debug, b: &B) { }\n \/\/~^ ERROR method `foo` has an incompatible type for trait\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-android needs extra network permissions\n\/\/ ignore-bitrig system ulimit (Too many open files)\n\/\/ ignore-netbsd system ulimit (Too many open files)\n\/\/ ignore-openbsd system ulimit (Too many open files)\n\/\/ ignore-emscripten no threads or sockets support\n\nuse std::io::prelude::*;\nuse std::net::{TcpListener, TcpStream};\nuse std::process;\nuse std::sync::mpsc::channel;\nuse std::thread::{self, Builder};\n\nfn main() {\n \/\/ This test has a chance to time out, try to not let it time out\n thread::spawn(move|| -> () {\n thread::sleep_ms(30 * 1000);\n process::exit(1);\n });\n\n let mut listener = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = listener.local_addr().unwrap();\n thread::spawn(move || -> () {\n loop {\n let mut stream = match listener.accept() {\n Ok(stream) => stream.0,\n Err(error) => continue,\n };\n stream.read(&mut [0]);\n stream.write(&[2]);\n }\n });\n\n let (tx, rx) = channel();\n for _ in 0..1000 {\n let tx = tx.clone();\n Builder::new().stack_size(64 * 1024).spawn(move|| {\n match TcpStream::connect(addr) {\n Ok(mut stream) => {\n stream.write(&[1]);\n stream.read(&mut [0]);\n },\n Err(..) => {}\n }\n tx.send(()).unwrap();\n });\n }\n\n \/\/ Wait for all clients to exit, but don't wait for the server to exit. The\n \/\/ server just runs infinitely.\n drop(tx);\n for _ in 0..1000 {\n rx.recv().unwrap();\n }\n process::exit(0);\n}\n<commit_msg>test: assert that all tcp-stress threads get spawned<commit_after>\/\/ Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-android needs extra network permissions\n\/\/ ignore-bitrig system ulimit (Too many open files)\n\/\/ ignore-netbsd system ulimit (Too many open files)\n\/\/ ignore-openbsd system ulimit (Too many open files)\n\/\/ ignore-emscripten no threads or sockets support\n\nuse std::io::prelude::*;\nuse std::net::{TcpListener, TcpStream};\nuse std::process;\nuse std::sync::mpsc::channel;\nuse std::thread::{self, Builder};\n\nfn main() {\n \/\/ This test has a chance to time out, try to not let it time out\n thread::spawn(move|| -> () {\n thread::sleep_ms(30 * 1000);\n process::exit(1);\n });\n\n let mut listener = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n let addr = listener.local_addr().unwrap();\n thread::spawn(move || -> () {\n loop {\n let mut stream = match listener.accept() {\n Ok(stream) => stream.0,\n Err(error) => continue,\n };\n stream.read(&mut [0]);\n stream.write(&[2]);\n }\n });\n\n let (tx, rx) = channel();\n let mut spawned_cnt = 0;\n for _ in 0..1000 {\n let tx = tx.clone();\n let res = Builder::new().stack_size(64 * 1024).spawn(move|| {\n match TcpStream::connect(addr) {\n Ok(mut stream) => {\n stream.write(&[1]);\n stream.read(&mut [0]);\n },\n Err(..) => {}\n }\n tx.send(()).unwrap();\n });\n if let Ok(_) = res {\n spawned_cnt += 1;\n };\n }\n\n \/\/ Wait for all clients to exit, but don't wait for the server to exit. The\n \/\/ server just runs infinitely.\n drop(tx);\n for _ in 0..spawned_cnt {\n rx.recv().unwrap();\n }\n assert_eq!(spawned_cnt, 1000);\n process::exit(0);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>ICH: Add test case for match-expressions<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n\/\/ This test case tests the incremental compilation hash (ICH) implementation\n\/\/ for match expressions.\n\n\/\/ The general pattern followed here is: Change one thing between rev1 and rev2\n\/\/ and make sure that the hash has changed, then change nothing between rev2 and\n\/\/ rev3 and make sure that the hash has not changed.\n\n\/\/ must-compile-successfully\n\/\/ revisions: cfail1 cfail2 cfail3\n\/\/ compile-flags: -Z query-dep-graph\n\n\n#![allow(warnings)]\n#![feature(rustc_attrs)]\n#![crate_type=\"rlib\"]\n\n\/\/ Add Arm ---------------------------------------------------------------------\n#[cfg(cfail1)]\npub fn add_arm(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n _ => 100,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn add_arm(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n 2 => 2,\n _ => 100,\n }\n}\n\n\n\n\/\/ Change Order Of Arms --------------------------------------------------------\n#[cfg(cfail1)]\npub fn change_order_of_arms(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n _ => 100,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_order_of_arms(x: u32) -> u32 {\n match x {\n 1 => 1,\n 0 => 0,\n _ => 100,\n }\n}\n\n\n\n\/\/ Add Guard Clause ------------------------------------------------------------\n#[cfg(cfail1)]\npub fn add_guard_clause(x: u32, y: bool) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n _ => 100,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn add_guard_clause(x: u32, y: bool) -> u32 {\n match x {\n 0 => 0,\n 1 if y => 1,\n _ => 100,\n }\n}\n\n\n\n\/\/ Change Guard Clause ------------------------------------------------------------\n#[cfg(cfail1)]\npub fn change_guard_clause(x: u32, y: bool) -> u32 {\n match x {\n 0 => 0,\n 1 if y => 1,\n _ => 100,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_guard_clause(x: u32, y: bool) -> u32 {\n match x {\n 0 => 0,\n 1 if !y => 1,\n _ => 100,\n }\n}\n\n\n\n\/\/ Add @-Binding ---------------------------------------------------------------\n#[cfg(cfail1)]\npub fn add_at_binding(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n _ => x,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn add_at_binding(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n x @ _ => x,\n }\n}\n\n\n\n\/\/ Change Name of @-Binding ----------------------------------------------------\n#[cfg(cfail1)]\npub fn change_name_of_at_binding(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n x @ _ => 7,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_name_of_at_binding(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n y @ _ => 7,\n }\n}\n\n\n\n\/\/ Change Simple Binding To Pattern --------------------------------------------\n#[cfg(cfail1)]\npub fn change_simple_name_to_pattern(x: u32) -> u32 {\n match (x, x & 1) {\n (0, 0) => 0,\n a => 1\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_simple_name_to_pattern(x: u32) -> u32 {\n match (x, x & 1) {\n (0, 0) => 0,\n (x, y) => 1\n }\n}\n\n\n\n\/\/ Change Name In Pattern ------------------------------------------------------\n#[cfg(cfail1)]\npub fn change_name_in_pattern(x: u32) -> u32 {\n match (x, x & 1) {\n (a, 0) => 0,\n (a, 1) => a,\n _ => 100,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_name_in_pattern(x: u32) -> u32 {\n match (x, x & 1) {\n (b, 0) => 0,\n (a, 1) => a,\n _ => 100,\n }\n}\n\n\n\n\/\/ Change Mutability Of Binding In Pattern -------------------------------------\n#[cfg(cfail1)]\npub fn change_mutability_of_binding_in_pattern(x: u32) -> u32 {\n match (x, x & 1) {\n (a, 0) => 0,\n _ => 1\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_mutability_of_binding_in_pattern(x: u32) -> u32 {\n match (x, x & 1) {\n (mut a, 0) => 0,\n _ => 1\n }\n}\n\n\n\n\/\/ Add `ref` To Binding In Pattern -------------------------------------\n#[cfg(cfail1)]\npub fn add_ref_to_binding_in_pattern(x: u32) -> u32 {\n match (x, x & 1) {\n (a, 0) => 0,\n _ => 1\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn add_ref_to_binding_in_pattern(x: u32) -> u32 {\n match (x, x & 1) {\n (ref a, 0) => 0,\n _ => 1,\n }\n}\n\n\n\n\/\/ Add `&` To Binding In Pattern -------------------------------------\n#[cfg(cfail1)]\npub fn add_amp_to_binding_in_pattern(x: u32) -> u32 {\n match (&x, x & 1) {\n (a, 0) => 0,\n _ => 1\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn add_amp_to_binding_in_pattern(x: u32) -> u32 {\n match (&x, x & 1) {\n (&a, 0) => 0,\n _ => 1,\n }\n}\n\n\n\n\/\/ Change RHS Of Arm -----------------------------------------------------------\n#[cfg(cfail1)]\npub fn change_rhs_of_arm(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n _ => 2,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn change_rhs_of_arm(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 3,\n _ => 2,\n }\n}\n\n\n\n\/\/ Add Alternative To Arm ------------------------------------------------------\n#[cfg(cfail1)]\npub fn add_alternative_to_arm(x: u32) -> u32 {\n match x {\n 0 => 0,\n 1 => 1,\n _ => 2,\n }\n}\n\n#[cfg(not(cfail1))]\n#[rustc_dirty(label=\"Hir\", cfg=\"cfail2\")]\n#[rustc_clean(label=\"Hir\", cfg=\"cfail3\")]\n#[rustc_metadata_dirty(cfg=\"cfail2\")]\n#[rustc_metadata_clean(cfg=\"cfail3\")]\npub fn add_alternative_to_arm(x: u32) -> u32 {\n match x {\n 0 | 7 => 0,\n 1 => 3,\n _ => 2,\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Logic and data structures related to impl specialization, explained in\n\/\/ greater detail below.\n\/\/\n\/\/ At the moment, this implementation support only the simple \"chain\" rule:\n\/\/ If any two impls overlap, one must be a strict subset of the other.\n\/\/\n\/\/ See traits\/README.md for a bit more detail on how specialization\n\/\/ fits together with the rest of the trait machinery.\n\nuse super::{SelectionContext, FulfillmentContext};\nuse super::util::impl_trait_ref_and_oblig;\n\nuse rustc_data_structures::fnv::FnvHashMap;\nuse hir::def_id::DefId;\nuse infer::{InferCtxt, TypeOrigin};\nuse middle::region;\nuse ty::subst::{Subst, Substs};\nuse traits::{self, Reveal, ObligationCause, Normalized};\nuse ty::{self, TyCtxt, TypeFoldable};\nuse syntax_pos::DUMMY_SP;\n\nuse syntax::ast;\n\npub mod specialization_graph;\n\n\/\/\/ Information pertinent to an overlapping impl error.\npub struct OverlapError {\n pub with_impl: DefId,\n pub trait_desc: String,\n pub self_desc: Option<String>\n}\n\n\/\/\/ Given a subst for the requested impl, translate it to a subst\n\/\/\/ appropriate for the actual item definition (whether it be in that impl,\n\/\/\/ a parent impl, or the trait).\n\/\/\/ When we have selected one impl, but are actually using item definitions from\n\/\/\/ a parent impl providing a default, we need a way to translate between the\n\/\/\/ type parameters of the two impls. Here the `source_impl` is the one we've\n\/\/\/ selected, and `source_substs` is a substitution of its generics.\n\/\/\/ And `target_node` is the impl\/trait we're actually going to get the\n\/\/\/ definition from. The resulting substitution will map from `target_node`'s\n\/\/\/ generics to `source_impl`'s generics as instantiated by `source_subst`.\n\/\/\/\n\/\/\/ For example, consider the following scenario:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ trait Foo { ... }\n\/\/\/ impl<T, U> Foo for (T, U) { ... } \/\/ target impl\n\/\/\/ impl<V> Foo for (V, V) { ... } \/\/ source impl\n\/\/\/ ```\n\/\/\/\n\/\/\/ Suppose we have selected \"source impl\" with `V` instantiated with `u32`.\n\/\/\/ This function will produce a substitution with `T` and `U` both mapping to `u32`.\n\/\/\/\n\/\/\/ Where clauses add some trickiness here, because they can be used to \"define\"\n\/\/\/ an argument indirectly:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ impl<'a, I, T: 'a> Iterator for Cloned<I>\n\/\/\/ where I: Iterator<Item=&'a T>, T: Clone\n\/\/\/ ```\n\/\/\/\n\/\/\/ In a case like this, the substitution for `T` is determined indirectly,\n\/\/\/ through associated type projection. We deal with such cases by using\n\/\/\/ *fulfillment* to relate the two impls, requiring that all projections are\n\/\/\/ resolved.\npub fn translate_substs<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,\n source_impl: DefId,\n source_substs: &'tcx Substs<'tcx>,\n target_node: specialization_graph::Node)\n -> &'tcx Substs<'tcx> {\n let source_trait_ref = infcx.tcx\n .impl_trait_ref(source_impl)\n .unwrap()\n .subst(infcx.tcx, &source_substs);\n\n \/\/ translate the Self and TyParam parts of the substitution, since those\n \/\/ vary across impls\n let target_substs = match target_node {\n specialization_graph::Node::Impl(target_impl) => {\n \/\/ no need to translate if we're targetting the impl we started with\n if source_impl == target_impl {\n return source_substs;\n }\n\n fulfill_implication(infcx, source_trait_ref, target_impl).unwrap_or_else(|_| {\n bug!(\"When translating substitutions for specialization, the expected \\\n specializaiton failed to hold\")\n })\n }\n specialization_graph::Node::Trait(..) => source_trait_ref.substs,\n };\n\n \/\/ directly inherent the method generics, since those do not vary across impls\n source_substs.rebase_onto(infcx.tcx, source_impl, target_substs)\n}\n\n\/\/\/ Given a selected impl described by `impl_data`, returns the\n\/\/\/ definition and substitions for the method with the name `name`,\n\/\/\/ and trait method substitutions `substs`, in that impl, a less\n\/\/\/ specialized impl, or the trait default, whichever applies.\npub fn find_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n name: ast::Name,\n substs: &'tcx Substs<'tcx>,\n impl_data: &super::VtableImplData<'tcx, ()>)\n -> (DefId, &'tcx Substs<'tcx>)\n{\n assert!(!substs.needs_infer());\n\n let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap();\n let trait_def = tcx.lookup_trait_def(trait_def_id);\n\n match trait_def.ancestors(impl_data.impl_def_id).fn_defs(tcx, name).next() {\n Some(node_item) => {\n let substs = tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {\n let substs = substs.rebase_onto(tcx, trait_def_id, impl_data.substs);\n let substs = translate_substs(&infcx, impl_data.impl_def_id,\n substs, node_item.node);\n tcx.lift(&substs).unwrap_or_else(|| {\n bug!(\"find_method: translate_substs \\\n returned {:?} which contains inference types\/regions\",\n substs);\n })\n });\n (node_item.item.def_id, substs)\n }\n None => {\n bug!(\"method {:?} not found in {:?}\", name, impl_data.impl_def_id)\n }\n }\n}\n\n\/\/\/ Is impl1 a specialization of impl2?\n\/\/\/\n\/\/\/ Specialization is determined by the sets of types to which the impls apply;\n\/\/\/ impl1 specializes impl2 if it applies to a subset of the types impl2 applies\n\/\/\/ to.\npub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n impl1_def_id: DefId,\n impl2_def_id: DefId) -> bool {\n if let Some(r) = tcx.specializes_cache.borrow().check(impl1_def_id, impl2_def_id) {\n return r;\n }\n\n \/\/ The feature gate should prevent introducing new specializations, but not\n \/\/ taking advantage of upstream ones.\n if !tcx.sess.features.borrow().specialization &&\n (impl1_def_id.is_local() || impl2_def_id.is_local()) {\n return false;\n }\n\n \/\/ We determine whether there's a subset relationship by:\n \/\/\n \/\/ - skolemizing impl1,\n \/\/ - assuming the where clauses for impl1,\n \/\/ - instantiating impl2 with fresh inference variables,\n \/\/ - unifying,\n \/\/ - attempting to prove the where clauses for impl2\n \/\/\n \/\/ The last three steps are encapsulated in `fulfill_implication`.\n \/\/\n \/\/ See RFC 1210 for more details and justification.\n\n \/\/ Currently we do not allow e.g. a negative impl to specialize a positive one\n if tcx.trait_impl_polarity(impl1_def_id) != tcx.trait_impl_polarity(impl2_def_id) {\n return false;\n }\n\n \/\/ create a parameter environment corresponding to a (skolemized) instantiation of impl1\n let mut penv = tcx.construct_parameter_environment(DUMMY_SP,\n impl1_def_id,\n region::DUMMY_CODE_EXTENT);\n let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id)\n .unwrap()\n .subst(tcx, &penv.free_substs);\n\n let result = tcx.infer_ctxt(None, None, Reveal::ExactMatch).enter(|mut infcx| {\n \/\/ Normalize the trait reference, adding any obligations\n \/\/ that arise into the impl1 assumptions.\n let Normalized { value: impl1_trait_ref, obligations: normalization_obligations } = {\n let selcx = &mut SelectionContext::new(&infcx);\n traits::normalize(selcx, ObligationCause::dummy(), &impl1_trait_ref)\n };\n penv.caller_bounds.extend(normalization_obligations.into_iter().map(|o| {\n match tcx.lift_to_global(&o.predicate) {\n Some(predicate) => predicate,\n None => {\n bug!(\"specializes: obligation `{:?}` has inference types\/regions\", o);\n }\n }\n }));\n\n \/\/ Install the parameter environment, taking the predicates of impl1 as assumptions:\n infcx.parameter_environment = penv;\n\n \/\/ Attempt to prove that impl2 applies, given all of the above.\n fulfill_implication(&infcx, impl1_trait_ref, impl2_def_id).is_ok()\n });\n\n tcx.specializes_cache.borrow_mut().insert(impl1_def_id, impl2_def_id, result);\n result\n}\n\n\/\/\/ Attempt to fulfill all obligations of `target_impl` after unification with\n\/\/\/ `source_trait_ref`. If successful, returns a substitution for *all* the\n\/\/\/ generics of `target_impl`, including both those needed to unify with\n\/\/\/ `source_trait_ref` and those whose identity is determined via a where\n\/\/\/ clause in the impl.\nfn fulfill_implication<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,\n source_trait_ref: ty::TraitRef<'tcx>,\n target_impl: DefId)\n -> Result<&'tcx Substs<'tcx>, ()> {\n let selcx = &mut SelectionContext::new(&infcx);\n let target_substs = infcx.fresh_substs_for_item(DUMMY_SP, target_impl);\n let (target_trait_ref, obligations) = impl_trait_ref_and_oblig(selcx,\n target_impl,\n target_substs);\n\n \/\/ do the impls unify? If not, no specialization.\n if let Err(_) = infcx.eq_trait_refs(true,\n TypeOrigin::Misc(DUMMY_SP),\n source_trait_ref,\n target_trait_ref) {\n debug!(\"fulfill_implication: {:?} does not unify with {:?}\",\n source_trait_ref,\n target_trait_ref);\n return Err(());\n }\n\n \/\/ attempt to prove all of the predicates for impl2 given those for impl1\n \/\/ (which are packed up in penv)\n\n infcx.save_and_restore_obligations_in_snapshot_flag(|infcx| {\n let mut fulfill_cx = FulfillmentContext::new();\n for oblig in obligations.into_iter() {\n fulfill_cx.register_predicate_obligation(&infcx, oblig);\n }\n match fulfill_cx.select_all_or_error(infcx) {\n Err(errors) => {\n \/\/ no dice!\n debug!(\"fulfill_implication: for impls on {:?} and {:?}, \\\n could not fulfill: {:?} given {:?}\",\n source_trait_ref,\n target_trait_ref,\n errors,\n infcx.parameter_environment.caller_bounds);\n Err(())\n }\n\n Ok(()) => {\n debug!(\"fulfill_implication: an impl for {:?} specializes {:?}\",\n source_trait_ref,\n target_trait_ref);\n\n \/\/ Now resolve the *substitution* we built for the target earlier, replacing\n \/\/ the inference variables inside with whatever we got from fulfillment.\n Ok(infcx.resolve_type_vars_if_possible(&target_substs))\n }\n }\n })\n}\n\npub struct SpecializesCache {\n map: FnvHashMap<(DefId, DefId), bool>\n}\n\nimpl SpecializesCache {\n pub fn new() -> Self {\n SpecializesCache {\n map: FnvHashMap()\n }\n }\n\n pub fn check(&self, a: DefId, b: DefId) -> Option<bool> {\n self.map.get(&(a, b)).cloned()\n }\n\n pub fn insert(&mut self, a: DefId, b: DefId, result: bool) {\n self.map.insert((a, b), result);\n }\n}\n<commit_msg>normalize trait-ref in context of impl<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Logic and data structures related to impl specialization, explained in\n\/\/ greater detail below.\n\/\/\n\/\/ At the moment, this implementation support only the simple \"chain\" rule:\n\/\/ If any two impls overlap, one must be a strict subset of the other.\n\/\/\n\/\/ See traits\/README.md for a bit more detail on how specialization\n\/\/ fits together with the rest of the trait machinery.\n\nuse super::{SelectionContext, FulfillmentContext};\nuse super::util::impl_trait_ref_and_oblig;\n\nuse rustc_data_structures::fnv::FnvHashMap;\nuse hir::def_id::DefId;\nuse infer::{InferCtxt, TypeOrigin};\nuse middle::region;\nuse ty::subst::{Subst, Substs};\nuse traits::{self, Reveal, ObligationCause, Normalized};\nuse ty::{self, TyCtxt, TypeFoldable};\nuse syntax_pos::DUMMY_SP;\n\nuse syntax::ast;\n\npub mod specialization_graph;\n\n\/\/\/ Information pertinent to an overlapping impl error.\npub struct OverlapError {\n pub with_impl: DefId,\n pub trait_desc: String,\n pub self_desc: Option<String>\n}\n\n\/\/\/ Given a subst for the requested impl, translate it to a subst\n\/\/\/ appropriate for the actual item definition (whether it be in that impl,\n\/\/\/ a parent impl, or the trait).\n\/\/\/ When we have selected one impl, but are actually using item definitions from\n\/\/\/ a parent impl providing a default, we need a way to translate between the\n\/\/\/ type parameters of the two impls. Here the `source_impl` is the one we've\n\/\/\/ selected, and `source_substs` is a substitution of its generics.\n\/\/\/ And `target_node` is the impl\/trait we're actually going to get the\n\/\/\/ definition from. The resulting substitution will map from `target_node`'s\n\/\/\/ generics to `source_impl`'s generics as instantiated by `source_subst`.\n\/\/\/\n\/\/\/ For example, consider the following scenario:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ trait Foo { ... }\n\/\/\/ impl<T, U> Foo for (T, U) { ... } \/\/ target impl\n\/\/\/ impl<V> Foo for (V, V) { ... } \/\/ source impl\n\/\/\/ ```\n\/\/\/\n\/\/\/ Suppose we have selected \"source impl\" with `V` instantiated with `u32`.\n\/\/\/ This function will produce a substitution with `T` and `U` both mapping to `u32`.\n\/\/\/\n\/\/\/ Where clauses add some trickiness here, because they can be used to \"define\"\n\/\/\/ an argument indirectly:\n\/\/\/\n\/\/\/ ```rust\n\/\/\/ impl<'a, I, T: 'a> Iterator for Cloned<I>\n\/\/\/ where I: Iterator<Item=&'a T>, T: Clone\n\/\/\/ ```\n\/\/\/\n\/\/\/ In a case like this, the substitution for `T` is determined indirectly,\n\/\/\/ through associated type projection. We deal with such cases by using\n\/\/\/ *fulfillment* to relate the two impls, requiring that all projections are\n\/\/\/ resolved.\npub fn translate_substs<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,\n source_impl: DefId,\n source_substs: &'tcx Substs<'tcx>,\n target_node: specialization_graph::Node)\n -> &'tcx Substs<'tcx> {\n let source_trait_ref = infcx.tcx\n .impl_trait_ref(source_impl)\n .unwrap()\n .subst(infcx.tcx, &source_substs);\n\n \/\/ translate the Self and TyParam parts of the substitution, since those\n \/\/ vary across impls\n let target_substs = match target_node {\n specialization_graph::Node::Impl(target_impl) => {\n \/\/ no need to translate if we're targetting the impl we started with\n if source_impl == target_impl {\n return source_substs;\n }\n\n fulfill_implication(infcx, source_trait_ref, target_impl).unwrap_or_else(|_| {\n bug!(\"When translating substitutions for specialization, the expected \\\n specializaiton failed to hold\")\n })\n }\n specialization_graph::Node::Trait(..) => source_trait_ref.substs,\n };\n\n \/\/ directly inherent the method generics, since those do not vary across impls\n source_substs.rebase_onto(infcx.tcx, source_impl, target_substs)\n}\n\n\/\/\/ Given a selected impl described by `impl_data`, returns the\n\/\/\/ definition and substitions for the method with the name `name`,\n\/\/\/ and trait method substitutions `substs`, in that impl, a less\n\/\/\/ specialized impl, or the trait default, whichever applies.\npub fn find_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n name: ast::Name,\n substs: &'tcx Substs<'tcx>,\n impl_data: &super::VtableImplData<'tcx, ()>)\n -> (DefId, &'tcx Substs<'tcx>)\n{\n assert!(!substs.needs_infer());\n\n let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap();\n let trait_def = tcx.lookup_trait_def(trait_def_id);\n\n match trait_def.ancestors(impl_data.impl_def_id).fn_defs(tcx, name).next() {\n Some(node_item) => {\n let substs = tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {\n let substs = substs.rebase_onto(tcx, trait_def_id, impl_data.substs);\n let substs = translate_substs(&infcx, impl_data.impl_def_id,\n substs, node_item.node);\n tcx.lift(&substs).unwrap_or_else(|| {\n bug!(\"find_method: translate_substs \\\n returned {:?} which contains inference types\/regions\",\n substs);\n })\n });\n (node_item.item.def_id, substs)\n }\n None => {\n bug!(\"method {:?} not found in {:?}\", name, impl_data.impl_def_id)\n }\n }\n}\n\n\/\/\/ Is impl1 a specialization of impl2?\n\/\/\/\n\/\/\/ Specialization is determined by the sets of types to which the impls apply;\n\/\/\/ impl1 specializes impl2 if it applies to a subset of the types impl2 applies\n\/\/\/ to.\npub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,\n impl1_def_id: DefId,\n impl2_def_id: DefId) -> bool {\n debug!(\"specializes({:?}, {:?})\", impl1_def_id, impl2_def_id);\n\n if let Some(r) = tcx.specializes_cache.borrow().check(impl1_def_id, impl2_def_id) {\n return r;\n }\n\n \/\/ The feature gate should prevent introducing new specializations, but not\n \/\/ taking advantage of upstream ones.\n if !tcx.sess.features.borrow().specialization &&\n (impl1_def_id.is_local() || impl2_def_id.is_local()) {\n return false;\n }\n\n \/\/ We determine whether there's a subset relationship by:\n \/\/\n \/\/ - skolemizing impl1,\n \/\/ - assuming the where clauses for impl1,\n \/\/ - instantiating impl2 with fresh inference variables,\n \/\/ - unifying,\n \/\/ - attempting to prove the where clauses for impl2\n \/\/\n \/\/ The last three steps are encapsulated in `fulfill_implication`.\n \/\/\n \/\/ See RFC 1210 for more details and justification.\n\n \/\/ Currently we do not allow e.g. a negative impl to specialize a positive one\n if tcx.trait_impl_polarity(impl1_def_id) != tcx.trait_impl_polarity(impl2_def_id) {\n return false;\n }\n\n \/\/ create a parameter environment corresponding to a (skolemized) instantiation of impl1\n let penv = tcx.construct_parameter_environment(DUMMY_SP,\n impl1_def_id,\n region::DUMMY_CODE_EXTENT);\n let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id)\n .unwrap()\n .subst(tcx, &penv.free_substs);\n\n \/\/ Create a infcx, taking the predicates of impl1 as assumptions:\n let result = tcx.infer_ctxt(None, Some(penv), Reveal::ExactMatch).enter(|mut infcx| {\n \/\/ Normalize the trait reference, adding any obligations\n \/\/ that arise into the impl1 assumptions.\n let Normalized { value: impl1_trait_ref, obligations: normalization_obligations } = {\n let selcx = &mut SelectionContext::new(&infcx);\n traits::normalize(selcx, ObligationCause::dummy(), &impl1_trait_ref)\n };\n infcx.parameter_environment.caller_bounds.extend(normalization_obligations.into_iter().map(|o| {\n match tcx.lift_to_global(&o.predicate) {\n Some(predicate) => predicate,\n None => {\n bug!(\"specializes: obligation `{:?}` has inference types\/regions\", o);\n }\n }\n }));\n\n \/\/ Attempt to prove that impl2 applies, given all of the above.\n fulfill_implication(&infcx, impl1_trait_ref, impl2_def_id).is_ok()\n });\n\n tcx.specializes_cache.borrow_mut().insert(impl1_def_id, impl2_def_id, result);\n result\n}\n\n\/\/\/ Attempt to fulfill all obligations of `target_impl` after unification with\n\/\/\/ `source_trait_ref`. If successful, returns a substitution for *all* the\n\/\/\/ generics of `target_impl`, including both those needed to unify with\n\/\/\/ `source_trait_ref` and those whose identity is determined via a where\n\/\/\/ clause in the impl.\nfn fulfill_implication<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,\n source_trait_ref: ty::TraitRef<'tcx>,\n target_impl: DefId)\n -> Result<&'tcx Substs<'tcx>, ()> {\n let selcx = &mut SelectionContext::new(&infcx);\n let target_substs = infcx.fresh_substs_for_item(DUMMY_SP, target_impl);\n let (target_trait_ref, obligations) = impl_trait_ref_and_oblig(selcx,\n target_impl,\n target_substs);\n\n \/\/ do the impls unify? If not, no specialization.\n if let Err(_) = infcx.eq_trait_refs(true,\n TypeOrigin::Misc(DUMMY_SP),\n source_trait_ref,\n target_trait_ref) {\n debug!(\"fulfill_implication: {:?} does not unify with {:?}\",\n source_trait_ref,\n target_trait_ref);\n return Err(());\n }\n\n \/\/ attempt to prove all of the predicates for impl2 given those for impl1\n \/\/ (which are packed up in penv)\n\n infcx.save_and_restore_obligations_in_snapshot_flag(|infcx| {\n let mut fulfill_cx = FulfillmentContext::new();\n for oblig in obligations.into_iter() {\n fulfill_cx.register_predicate_obligation(&infcx, oblig);\n }\n match fulfill_cx.select_all_or_error(infcx) {\n Err(errors) => {\n \/\/ no dice!\n debug!(\"fulfill_implication: for impls on {:?} and {:?}, \\\n could not fulfill: {:?} given {:?}\",\n source_trait_ref,\n target_trait_ref,\n errors,\n infcx.parameter_environment.caller_bounds);\n Err(())\n }\n\n Ok(()) => {\n debug!(\"fulfill_implication: an impl for {:?} specializes {:?}\",\n source_trait_ref,\n target_trait_ref);\n\n \/\/ Now resolve the *substitution* we built for the target earlier, replacing\n \/\/ the inference variables inside with whatever we got from fulfillment.\n Ok(infcx.resolve_type_vars_if_possible(&target_substs))\n }\n }\n })\n}\n\npub struct SpecializesCache {\n map: FnvHashMap<(DefId, DefId), bool>\n}\n\nimpl SpecializesCache {\n pub fn new() -> Self {\n SpecializesCache {\n map: FnvHashMap()\n }\n }\n\n pub fn check(&self, a: DefId, b: DefId) -> Option<bool> {\n self.map.get(&(a, b)).cloned()\n }\n\n pub fn insert(&mut self, a: DefId, b: DefId, result: bool) {\n self.map.insert((a, b), result);\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Validate AST before lowering it to HIR\n\/\/\n\/\/ This pass is supposed to catch things that fit into AST data structures,\n\/\/ but not permitted by the language. It runs after expansion when AST is frozen,\n\/\/ so it can check for erroneous constructions produced by syntax extensions.\n\/\/ This pass is supposed to perform only simple checks not requiring name resolution\n\/\/ or type checking or some other kind of complex analysis.\n\nuse rustc::lint;\nuse rustc::session::Session;\nuse syntax::ast::*;\nuse syntax::parse::token::{self, keywords};\nuse syntax::visit::{self, Visitor};\nuse syntax_pos::Span;\nuse errors;\n\nstruct AstValidator<'a> {\n session: &'a Session,\n}\n\nimpl<'a> AstValidator<'a> {\n fn err_handler(&self) -> &errors::Handler {\n &self.session.parse_sess.span_diagnostic\n }\n\n fn check_label(&self, label: Ident, span: Span, id: NodeId) {\n if label.name == keywords::StaticLifetime.name() {\n self.err_handler().span_err(span, &format!(\"invalid label name `{}`\", label.name));\n }\n if label.name.as_str() == \"'_\" {\n self.session.add_lint(lint::builtin::LIFETIME_UNDERSCORE,\n id,\n span,\n format!(\"invalid label name `{}`\", label.name));\n }\n }\n\n fn invalid_visibility(&self, vis: &Visibility, span: Span, note: Option<&str>) {\n if vis != &Visibility::Inherited {\n let mut err = struct_span_err!(self.session,\n span,\n E0449,\n \"unnecessary visibility qualifier\");\n if let Some(note) = note {\n err.span_note(span, note);\n }\n err.emit();\n }\n }\n\n fn check_decl_no_pat<ReportFn: Fn(Span, bool)>(&self, decl: &FnDecl, report_err: ReportFn) {\n for arg in &decl.inputs {\n match arg.pat.node {\n PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), _, None) |\n PatKind::Wild => {}\n PatKind::Ident(..) => report_err(arg.pat.span, true),\n _ => report_err(arg.pat.span, false),\n }\n }\n }\n}\n\nimpl<'a> Visitor for AstValidator<'a> {\n fn visit_lifetime(&mut self, lt: &Lifetime) {\n if lt.name.as_str() == \"'_\" {\n self.session.add_lint(lint::builtin::LIFETIME_UNDERSCORE,\n lt.id,\n lt.span,\n format!(\"invalid lifetime name `{}`\", lt.name));\n }\n\n visit::walk_lifetime(self, lt)\n }\n\n fn visit_expr(&mut self, expr: &Expr) {\n match expr.node {\n ExprKind::While(_, _, Some(ident)) |\n ExprKind::Loop(_, Some(ident)) |\n ExprKind::WhileLet(_, _, _, Some(ident)) |\n ExprKind::ForLoop(_, _, _, Some(ident)) |\n ExprKind::Break(Some(ident)) |\n ExprKind::Continue(Some(ident)) => {\n self.check_label(ident.node, ident.span, expr.id);\n }\n _ => {}\n }\n\n visit::walk_expr(self, expr)\n }\n\n fn visit_ty(&mut self, ty: &Ty) {\n match ty.node {\n TyKind::BareFn(ref bfty) => {\n self.check_decl_no_pat(&bfty.decl, |span, _| {\n let mut err = struct_span_err!(self.session,\n span,\n E0561,\n \"patterns aren't allowed in function pointer \\\n types\");\n err.span_note(span,\n \"this is a recent error, see issue #35203 for more details\");\n err.emit();\n });\n }\n _ => {}\n }\n\n visit::walk_ty(self, ty)\n }\n\n fn visit_path(&mut self, path: &Path, id: NodeId) {\n if path.global && path.segments.len() > 0 {\n let ident = path.segments[0].identifier;\n if token::Ident(ident).is_path_segment_keyword() {\n self.session.add_lint(lint::builtin::SUPER_OR_SELF_IN_GLOBAL_PATH,\n id,\n path.span,\n format!(\"global paths cannot start with `{}`\", ident));\n }\n }\n\n visit::walk_path(self, path)\n }\n\n fn visit_item(&mut self, item: &Item) {\n match item.node {\n ItemKind::Use(ref view_path) => {\n let path = view_path.node.path();\n if !path.segments.iter().all(|segment| segment.parameters.is_empty()) {\n self.err_handler()\n .span_err(path.span, \"type or lifetime parameters in import path\");\n }\n }\n ItemKind::Impl(_, _, _, Some(..), _, ref impl_items) => {\n self.invalid_visibility(&item.vis, item.span, None);\n for impl_item in impl_items {\n self.invalid_visibility(&impl_item.vis, impl_item.span, None);\n }\n }\n ItemKind::Impl(_, _, _, None, _, _) => {\n self.invalid_visibility(&item.vis,\n item.span,\n Some(\"place qualifiers on individual impl items instead\"));\n }\n ItemKind::DefaultImpl(..) => {\n self.invalid_visibility(&item.vis, item.span, None);\n }\n ItemKind::ForeignMod(..) => {\n self.invalid_visibility(&item.vis,\n item.span,\n Some(\"place qualifiers on individual foreign items \\\n instead\"));\n }\n ItemKind::Enum(ref def, _) => {\n for variant in &def.variants {\n for field in variant.node.data.fields() {\n self.invalid_visibility(&field.vis, field.span, None);\n }\n }\n }\n _ => {}\n }\n\n visit::walk_item(self, item)\n }\n\n fn visit_foreign_item(&mut self, fi: &ForeignItem) {\n match fi.node {\n ForeignItemKind::Fn(ref decl, _) => {\n self.check_decl_no_pat(decl, |span, is_recent| {\n let mut err = struct_span_err!(self.session,\n span,\n E0130,\n \"patterns aren't allowed in foreign function \\\n declarations\");\n err.span_label(span, &format!(\"pattern not allowed in foreign function\"));\n if is_recent {\n err.span_note(span,\n \"this is a recent error, see issue #35203 for more details\");\n }\n err.emit();\n });\n }\n ForeignItemKind::Static(..) => {}\n }\n\n visit::walk_foreign_item(self, fi)\n }\n\n fn visit_variant_data(&mut self,\n vdata: &VariantData,\n _: Ident,\n _: &Generics,\n _: NodeId,\n span: Span) {\n if vdata.fields().is_empty() {\n if vdata.is_tuple() {\n self.err_handler()\n .struct_span_err(span,\n \"empty tuple structs and enum variants are not allowed, use \\\n unit structs and enum variants instead\")\n .span_help(span,\n \"remove trailing `()` to make a unit struct or unit enum variant\")\n .emit();\n }\n }\n\n visit::walk_struct_def(self, vdata)\n }\n\n fn visit_vis(&mut self, vis: &Visibility) {\n match *vis {\n Visibility::Restricted { ref path, .. } => {\n if !path.segments.iter().all(|segment| segment.parameters.is_empty()) {\n self.err_handler()\n .span_err(path.span, \"type or lifetime parameters in visibility path\");\n }\n }\n _ => {}\n }\n\n visit::walk_vis(self, vis)\n }\n}\n\npub fn check_crate(session: &Session, krate: &Crate) {\n visit::walk_crate(&mut AstValidator { session: session }, krate)\n}\n<commit_msg>Allow attributes to be marked used before `cfg` proccessing.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Validate AST before lowering it to HIR\n\/\/\n\/\/ This pass is supposed to catch things that fit into AST data structures,\n\/\/ but not permitted by the language. It runs after expansion when AST is frozen,\n\/\/ so it can check for erroneous constructions produced by syntax extensions.\n\/\/ This pass is supposed to perform only simple checks not requiring name resolution\n\/\/ or type checking or some other kind of complex analysis.\n\nuse rustc::lint;\nuse rustc::session::Session;\nuse syntax::ast::*;\nuse syntax::attr;\nuse syntax::parse::token::{self, keywords};\nuse syntax::visit::{self, Visitor};\nuse syntax_pos::Span;\nuse errors;\n\nstruct AstValidator<'a> {\n session: &'a Session,\n}\n\nimpl<'a> AstValidator<'a> {\n fn err_handler(&self) -> &errors::Handler {\n &self.session.parse_sess.span_diagnostic\n }\n\n fn check_label(&self, label: Ident, span: Span, id: NodeId) {\n if label.name == keywords::StaticLifetime.name() {\n self.err_handler().span_err(span, &format!(\"invalid label name `{}`\", label.name));\n }\n if label.name.as_str() == \"'_\" {\n self.session.add_lint(lint::builtin::LIFETIME_UNDERSCORE,\n id,\n span,\n format!(\"invalid label name `{}`\", label.name));\n }\n }\n\n fn invalid_visibility(&self, vis: &Visibility, span: Span, note: Option<&str>) {\n if vis != &Visibility::Inherited {\n let mut err = struct_span_err!(self.session,\n span,\n E0449,\n \"unnecessary visibility qualifier\");\n if let Some(note) = note {\n err.span_note(span, note);\n }\n err.emit();\n }\n }\n\n fn check_decl_no_pat<ReportFn: Fn(Span, bool)>(&self, decl: &FnDecl, report_err: ReportFn) {\n for arg in &decl.inputs {\n match arg.pat.node {\n PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), _, None) |\n PatKind::Wild => {}\n PatKind::Ident(..) => report_err(arg.pat.span, true),\n _ => report_err(arg.pat.span, false),\n }\n }\n }\n}\n\nimpl<'a> Visitor for AstValidator<'a> {\n fn visit_lifetime(&mut self, lt: &Lifetime) {\n if lt.name.as_str() == \"'_\" {\n self.session.add_lint(lint::builtin::LIFETIME_UNDERSCORE,\n lt.id,\n lt.span,\n format!(\"invalid lifetime name `{}`\", lt.name));\n }\n\n visit::walk_lifetime(self, lt)\n }\n\n fn visit_expr(&mut self, expr: &Expr) {\n match expr.node {\n ExprKind::While(_, _, Some(ident)) |\n ExprKind::Loop(_, Some(ident)) |\n ExprKind::WhileLet(_, _, _, Some(ident)) |\n ExprKind::ForLoop(_, _, _, Some(ident)) |\n ExprKind::Break(Some(ident)) |\n ExprKind::Continue(Some(ident)) => {\n self.check_label(ident.node, ident.span, expr.id);\n }\n _ => {}\n }\n\n visit::walk_expr(self, expr)\n }\n\n fn visit_ty(&mut self, ty: &Ty) {\n match ty.node {\n TyKind::BareFn(ref bfty) => {\n self.check_decl_no_pat(&bfty.decl, |span, _| {\n let mut err = struct_span_err!(self.session,\n span,\n E0561,\n \"patterns aren't allowed in function pointer \\\n types\");\n err.span_note(span,\n \"this is a recent error, see issue #35203 for more details\");\n err.emit();\n });\n }\n _ => {}\n }\n\n visit::walk_ty(self, ty)\n }\n\n fn visit_path(&mut self, path: &Path, id: NodeId) {\n if path.global && path.segments.len() > 0 {\n let ident = path.segments[0].identifier;\n if token::Ident(ident).is_path_segment_keyword() {\n self.session.add_lint(lint::builtin::SUPER_OR_SELF_IN_GLOBAL_PATH,\n id,\n path.span,\n format!(\"global paths cannot start with `{}`\", ident));\n }\n }\n\n visit::walk_path(self, path)\n }\n\n fn visit_item(&mut self, item: &Item) {\n match item.node {\n ItemKind::Use(ref view_path) => {\n let path = view_path.node.path();\n if !path.segments.iter().all(|segment| segment.parameters.is_empty()) {\n self.err_handler()\n .span_err(path.span, \"type or lifetime parameters in import path\");\n }\n }\n ItemKind::Impl(_, _, _, Some(..), _, ref impl_items) => {\n self.invalid_visibility(&item.vis, item.span, None);\n for impl_item in impl_items {\n self.invalid_visibility(&impl_item.vis, impl_item.span, None);\n }\n }\n ItemKind::Impl(_, _, _, None, _, _) => {\n self.invalid_visibility(&item.vis,\n item.span,\n Some(\"place qualifiers on individual impl items instead\"));\n }\n ItemKind::DefaultImpl(..) => {\n self.invalid_visibility(&item.vis, item.span, None);\n }\n ItemKind::ForeignMod(..) => {\n self.invalid_visibility(&item.vis,\n item.span,\n Some(\"place qualifiers on individual foreign items \\\n instead\"));\n }\n ItemKind::Enum(ref def, _) => {\n for variant in &def.variants {\n for field in variant.node.data.fields() {\n self.invalid_visibility(&field.vis, field.span, None);\n }\n }\n }\n ItemKind::Mod(_) => {\n \/\/ Ensure that `path` attributes on modules are recorded as used (c.f. #35584).\n attr::first_attr_value_str_by_name(&item.attrs, \"path\");\n }\n _ => {}\n }\n\n visit::walk_item(self, item)\n }\n\n fn visit_foreign_item(&mut self, fi: &ForeignItem) {\n match fi.node {\n ForeignItemKind::Fn(ref decl, _) => {\n self.check_decl_no_pat(decl, |span, is_recent| {\n let mut err = struct_span_err!(self.session,\n span,\n E0130,\n \"patterns aren't allowed in foreign function \\\n declarations\");\n err.span_label(span, &format!(\"pattern not allowed in foreign function\"));\n if is_recent {\n err.span_note(span,\n \"this is a recent error, see issue #35203 for more details\");\n }\n err.emit();\n });\n }\n ForeignItemKind::Static(..) => {}\n }\n\n visit::walk_foreign_item(self, fi)\n }\n\n fn visit_variant_data(&mut self,\n vdata: &VariantData,\n _: Ident,\n _: &Generics,\n _: NodeId,\n span: Span) {\n if vdata.fields().is_empty() {\n if vdata.is_tuple() {\n self.err_handler()\n .struct_span_err(span,\n \"empty tuple structs and enum variants are not allowed, use \\\n unit structs and enum variants instead\")\n .span_help(span,\n \"remove trailing `()` to make a unit struct or unit enum variant\")\n .emit();\n }\n }\n\n visit::walk_struct_def(self, vdata)\n }\n\n fn visit_vis(&mut self, vis: &Visibility) {\n match *vis {\n Visibility::Restricted { ref path, .. } => {\n if !path.segments.iter().all(|segment| segment.parameters.is_empty()) {\n self.err_handler()\n .span_err(path.span, \"type or lifetime parameters in visibility path\");\n }\n }\n _ => {}\n }\n\n visit::walk_vis(self, vis)\n }\n}\n\npub fn check_crate(session: &Session, krate: &Crate) {\n visit::walk_crate(&mut AstValidator { session: session }, krate)\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/\/ External linking is a complex implementation to be able to serve a clean and easy-to-use\n\/\/\/ interface.\n\/\/\/\n\/\/\/ Internally, there are no such things as \"external links\" (plural). Each Entry in the store can\n\/\/\/ only have _one_ external link.\n\/\/\/\n\/\/\/ This library does the following therefor: It allows you to have several external links with one\n\/\/\/ entry, which are internally one file in the store for each link, linked with \"internal\n\/\/\/ linking\".\n\/\/\/\n\/\/\/ This helps us greatly with deduplication of URLs.\n\/\/\/\n\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\n\nuse libimagstore::store::Entry;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\n\nuse error::LinkError as LE;\nuse error::LinkErrorKind as LEK;\nuse result::Result;\nuse internal::InternalLinker;\nuse module_path::ModuleEntryPath;\n\nuse toml::Value;\nuse url::Url;\nuse crypto::sha1::Sha1;\nuse crypto::digest::Digest;\n\n\/\/\/ \"Link\" Type, just an abstraction over `FileLockEntry` to have some convenience internally.\npub struct Link<'a> {\n link: FileLockEntry<'a>\n}\n\nimpl<'a> Link<'a> {\n\n pub fn new(fle: FileLockEntry<'a>) -> Link<'a> {\n Link { link: fle }\n }\n\n \/\/\/ For interal use only. Load an Link from a store id, if this is actually a Link\n fn retrieve(store: &'a Store, id: StoreId) -> Result<Option<Link<'a>>> {\n store.retrieve(id)\n .map(|fle| {\n if let Some(_) = Link::get_link_uri_from_filelockentry(&fle) {\n Some(Link {\n link: fle\n })\n } else {\n None\n }\n })\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Get a link Url object from a `FileLockEntry`, ignore errors.\n fn get_link_uri_from_filelockentry(file: &FileLockEntry<'a>) -> Option<Url> {\n file.get_header()\n .read(\"imag.content.uri\")\n .ok()\n .and_then(|opt| {\n match opt {\n Some(Value::String(s)) => Url::parse(&s[..]).ok(),\n _ => None\n }\n })\n }\n\n pub fn get_url(&self) -> Result<Option<Url>> {\n let opt = self.link\n .get_header()\n .read(\"imag.content.uri\");\n\n match opt {\n Ok(Some(Value::String(s))) => {\n Url::parse(&s[..])\n .map(Some)\n .map_err(|e| LE::new(LEK::EntryHeaderReadError, Some(Box::new(e))))\n },\n Ok(None) => Ok(None),\n _ => Err(LE::new(LEK::EntryHeaderReadError, None))\n }\n }\n\n}\n\npub trait ExternalLinker : InternalLinker {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>>;\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()>;\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n}\n\n\/\/\/ Check whether the StoreId starts with `\/link\/external\/`\npub fn is_external_link_storeid(id: &StoreId) -> bool {\n debug!(\"Checking whether this is a \/link\/external\/*: '{:?}'\", id);\n id.starts_with(\"\/link\/external\/\")\n}\n\nfn get_external_link_from_file(entry: &FileLockEntry) -> Result<Url> {\n Link::get_link_uri_from_filelockentry(entry) \/\/ TODO: Do not hide error by using this function\n .ok_or(LE::new(LEK::StoreReadError, None))\n}\n\n\/\/\/ Implement `ExternalLinker` for `Entry`, hiding the fact that there is no such thing as an external\n\/\/\/ link in an entry, but internal links to other entries which serve as external links, as one\n\/\/\/ entry in the store can only have one external link.\nimpl ExternalLinker for Entry {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>> {\n \/\/ Iterate through all internal links and filter for FileLockEntries which live in\n \/\/ \/link\/external\/<SHA> -> load these files and get the external link from their headers,\n \/\/ put them into the return vector.\n self.get_internal_links()\n .map(|vect| {\n debug!(\"Getting external links\");\n vect.into_iter()\n .filter(is_external_link_storeid)\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n match store.retrieve(id.clone()) {\n Ok(f) => get_external_link_from_file(&f),\n Err(e) => {\n debug!(\"Retrieving entry for id: '{:?}' failed\", id);\n Err(LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n }\n })\n .filter_map(|x| x.ok()) \/\/ TODO: Do not ignore error here\n .collect()\n })\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()> {\n \/\/ Take all the links, generate a SHA sum out of each one, filter out the already existing\n \/\/ store entries and store the other URIs in the header of one FileLockEntry each, in\n \/\/ the path \/link\/external\/<SHA of the URL>\n\n debug!(\"Iterating {} links = {:?}\", links.len(), links);\n for link in links { \/\/ for all links\n let hash = {\n let mut s = Sha1::new();\n s.input_str(&link.as_str()[..]);\n s.result_str()\n };\n let file_id = ModuleEntryPath::new(format!(\"external\/{}\", hash)).into_storeid();\n\n debug!(\"Link = '{:?}'\", link);\n debug!(\"Hash = '{:?}'\", hash);\n debug!(\"StoreId = '{:?}'\", file_id);\n\n \/\/ retrieve the file from the store, which implicitely creates the entry if it does not\n \/\/ exist\n let file = store.retrieve(file_id.clone());\n if file.is_err() {\n debug!(\"Failed to create or retrieve an file for this link '{:?}'\", link);\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(file.unwrap_err()))));\n }\n let mut file = file.unwrap();\n\n debug!(\"Generating header content!\");\n {\n let mut hdr = file.deref_mut().get_header_mut();\n\n let mut table = match hdr.read(\"imag.content\") {\n Ok(Some(Value::Table(table))) => table,\n Ok(Some(_)) => {\n warn!(\"There is a value at 'imag.content' which is not a table.\");\n warn!(\"Going to override this value\");\n BTreeMap::new()\n },\n Ok(None) => BTreeMap::new(),\n Err(e) => return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e)))),\n };\n\n let v = Value::String(link.into_string());\n\n debug!(\"setting URL = '{:?}\", v);\n table.insert(String::from(\"url\"), v);\n\n if let Err(e) = hdr.set(\"imag.content\", Value::Table(table)) {\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n } else {\n debug!(\"Setting URL worked\");\n }\n }\n\n \/\/ then add an internal link to the new file or return an error if this fails\n if let Err(e) = self.add_internal_link(file.deref_mut()) {\n debug!(\"Error adding internal link\");\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n }\n }\n debug!(\"Ready iterating\");\n Ok(())\n }\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, add this one, save them\n debug!(\"Getting links\");\n self.get_external_links(store)\n .and_then(|mut links| {\n debug!(\"Adding link = '{:?}' to links = {:?}\", link, links);\n links.push(link);\n debug!(\"Setting {} links = {:?}\", links.len(), links);\n self.set_external_links(store, links)\n })\n }\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, remove this one, save them\n self.get_external_links(store)\n .and_then(|links| {\n debug!(\"Removing link = '{:?}' from links = {:?}\", link, links);\n let links = links.into_iter()\n .filter(|l| l.as_str() != link.as_str())\n .collect();\n self.set_external_links(store, links)\n })\n }\n\n}\n\n<commit_msg>Remove if-else by map()<commit_after>\/\/\/ External linking is a complex implementation to be able to serve a clean and easy-to-use\n\/\/\/ interface.\n\/\/\/\n\/\/\/ Internally, there are no such things as \"external links\" (plural). Each Entry in the store can\n\/\/\/ only have _one_ external link.\n\/\/\/\n\/\/\/ This library does the following therefor: It allows you to have several external links with one\n\/\/\/ entry, which are internally one file in the store for each link, linked with \"internal\n\/\/\/ linking\".\n\/\/\/\n\/\/\/ This helps us greatly with deduplication of URLs.\n\/\/\/\n\nuse std::ops::DerefMut;\nuse std::collections::BTreeMap;\n\nuse libimagstore::store::Entry;\nuse libimagstore::store::FileLockEntry;\nuse libimagstore::store::Store;\nuse libimagstore::storeid::StoreId;\nuse libimagstore::storeid::IntoStoreId;\n\nuse error::LinkError as LE;\nuse error::LinkErrorKind as LEK;\nuse result::Result;\nuse internal::InternalLinker;\nuse module_path::ModuleEntryPath;\n\nuse toml::Value;\nuse url::Url;\nuse crypto::sha1::Sha1;\nuse crypto::digest::Digest;\n\n\/\/\/ \"Link\" Type, just an abstraction over `FileLockEntry` to have some convenience internally.\npub struct Link<'a> {\n link: FileLockEntry<'a>\n}\n\nimpl<'a> Link<'a> {\n\n pub fn new(fle: FileLockEntry<'a>) -> Link<'a> {\n Link { link: fle }\n }\n\n \/\/\/ For interal use only. Load an Link from a store id, if this is actually a Link\n fn retrieve(store: &'a Store, id: StoreId) -> Result<Option<Link<'a>>> {\n store.retrieve(id)\n .map(|fle| Link::get_link_uri_from_filelockentry(&fle).map(|_| Link { link: fle }))\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Get a link Url object from a `FileLockEntry`, ignore errors.\n fn get_link_uri_from_filelockentry(file: &FileLockEntry<'a>) -> Option<Url> {\n file.get_header()\n .read(\"imag.content.uri\")\n .ok()\n .and_then(|opt| {\n match opt {\n Some(Value::String(s)) => Url::parse(&s[..]).ok(),\n _ => None\n }\n })\n }\n\n pub fn get_url(&self) -> Result<Option<Url>> {\n let opt = self.link\n .get_header()\n .read(\"imag.content.uri\");\n\n match opt {\n Ok(Some(Value::String(s))) => {\n Url::parse(&s[..])\n .map(Some)\n .map_err(|e| LE::new(LEK::EntryHeaderReadError, Some(Box::new(e))))\n },\n Ok(None) => Ok(None),\n _ => Err(LE::new(LEK::EntryHeaderReadError, None))\n }\n }\n\n}\n\npub trait ExternalLinker : InternalLinker {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>>;\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()>;\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()>;\n\n}\n\n\/\/\/ Check whether the StoreId starts with `\/link\/external\/`\npub fn is_external_link_storeid(id: &StoreId) -> bool {\n debug!(\"Checking whether this is a \/link\/external\/*: '{:?}'\", id);\n id.starts_with(\"\/link\/external\/\")\n}\n\nfn get_external_link_from_file(entry: &FileLockEntry) -> Result<Url> {\n Link::get_link_uri_from_filelockentry(entry) \/\/ TODO: Do not hide error by using this function\n .ok_or(LE::new(LEK::StoreReadError, None))\n}\n\n\/\/\/ Implement `ExternalLinker` for `Entry`, hiding the fact that there is no such thing as an external\n\/\/\/ link in an entry, but internal links to other entries which serve as external links, as one\n\/\/\/ entry in the store can only have one external link.\nimpl ExternalLinker for Entry {\n\n \/\/\/ Get the external links from the implementor object\n fn get_external_links(&self, store: &Store) -> Result<Vec<Url>> {\n \/\/ Iterate through all internal links and filter for FileLockEntries which live in\n \/\/ \/link\/external\/<SHA> -> load these files and get the external link from their headers,\n \/\/ put them into the return vector.\n self.get_internal_links()\n .map(|vect| {\n debug!(\"Getting external links\");\n vect.into_iter()\n .filter(is_external_link_storeid)\n .map(|id| {\n debug!(\"Retrieving entry for id: '{:?}'\", id);\n match store.retrieve(id.clone()) {\n Ok(f) => get_external_link_from_file(&f),\n Err(e) => {\n debug!(\"Retrieving entry for id: '{:?}' failed\", id);\n Err(LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n }\n })\n .filter_map(|x| x.ok()) \/\/ TODO: Do not ignore error here\n .collect()\n })\n .map_err(|e| LE::new(LEK::StoreReadError, Some(Box::new(e))))\n }\n\n \/\/\/ Set the external links for the implementor object\n fn set_external_links(&mut self, store: &Store, links: Vec<Url>) -> Result<()> {\n \/\/ Take all the links, generate a SHA sum out of each one, filter out the already existing\n \/\/ store entries and store the other URIs in the header of one FileLockEntry each, in\n \/\/ the path \/link\/external\/<SHA of the URL>\n\n debug!(\"Iterating {} links = {:?}\", links.len(), links);\n for link in links { \/\/ for all links\n let hash = {\n let mut s = Sha1::new();\n s.input_str(&link.as_str()[..]);\n s.result_str()\n };\n let file_id = ModuleEntryPath::new(format!(\"external\/{}\", hash)).into_storeid();\n\n debug!(\"Link = '{:?}'\", link);\n debug!(\"Hash = '{:?}'\", hash);\n debug!(\"StoreId = '{:?}'\", file_id);\n\n \/\/ retrieve the file from the store, which implicitely creates the entry if it does not\n \/\/ exist\n let file = store.retrieve(file_id.clone());\n if file.is_err() {\n debug!(\"Failed to create or retrieve an file for this link '{:?}'\", link);\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(file.unwrap_err()))));\n }\n let mut file = file.unwrap();\n\n debug!(\"Generating header content!\");\n {\n let mut hdr = file.deref_mut().get_header_mut();\n\n let mut table = match hdr.read(\"imag.content\") {\n Ok(Some(Value::Table(table))) => table,\n Ok(Some(_)) => {\n warn!(\"There is a value at 'imag.content' which is not a table.\");\n warn!(\"Going to override this value\");\n BTreeMap::new()\n },\n Ok(None) => BTreeMap::new(),\n Err(e) => return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e)))),\n };\n\n let v = Value::String(link.into_string());\n\n debug!(\"setting URL = '{:?}\", v);\n table.insert(String::from(\"url\"), v);\n\n if let Err(e) = hdr.set(\"imag.content\", Value::Table(table)) {\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n } else {\n debug!(\"Setting URL worked\");\n }\n }\n\n \/\/ then add an internal link to the new file or return an error if this fails\n if let Err(e) = self.add_internal_link(file.deref_mut()) {\n debug!(\"Error adding internal link\");\n return Err(LE::new(LEK::StoreWriteError, Some(Box::new(e))));\n }\n }\n debug!(\"Ready iterating\");\n Ok(())\n }\n\n \/\/\/ Add an external link to the implementor object\n fn add_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, add this one, save them\n debug!(\"Getting links\");\n self.get_external_links(store)\n .and_then(|mut links| {\n debug!(\"Adding link = '{:?}' to links = {:?}\", link, links);\n links.push(link);\n debug!(\"Setting {} links = {:?}\", links.len(), links);\n self.set_external_links(store, links)\n })\n }\n\n \/\/\/ Remove an external link from the implementor object\n fn remove_external_link(&mut self, store: &Store, link: Url) -> Result<()> {\n \/\/ get external links, remove this one, save them\n self.get_external_links(store)\n .and_then(|links| {\n debug!(\"Removing link = '{:?}' from links = {:?}\", link, links);\n let links = links.into_iter()\n .filter(|l| l.as_str() != link.as_str())\n .collect();\n self.set_external_links(store, links)\n })\n }\n\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug helper for fuzzing<commit_after>\/\/ Copyright 2019-2020 Google LLC\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\nuse fuzz_store::{fuzz, StatKey, Stats};\nuse std::io::Write;\nuse std::io::{stdout, Read};\nuse std::path::Path;\n\nfn usage(program: &str) {\n println!(\n r#\"Usage: {} {{ [<artifact_file>] | <corpus_directory> <bucket_predicate>.. }}\n\nIf <artifact_file> is not provided, it is read from standard input.\n\nWhen <bucket_predicate>.. are provided, only runs matching all predicates are shown. The format of\neach <bucket_predicate> is <bucket_key>=<bucket_value>.\"#,\n program\n );\n}\n\nfn debug(data: &[u8]) {\n println!(\"{:02x?}\", data);\n fuzz(data, true, None);\n}\n\n\/\/\/ Bucket predicate.\nstruct Predicate {\n \/\/\/ Bucket key.\n key: StatKey,\n\n \/\/\/ Bucket value.\n value: usize,\n}\n\nimpl std::str::FromStr for Predicate {\n type Err = String;\n\n fn from_str(input: &str) -> Result<Self, Self::Err> {\n let predicate: Vec<&str> = input.split('=').collect();\n if predicate.len() != 2 {\n return Err(\"Predicate should have exactly one equal sign.\".to_string());\n }\n let key = predicate[0]\n .parse()\n .map_err(|_| format!(\"Predicate key `{}` is not recognized.\", predicate[0]))?;\n let value: usize = predicate[1]\n .parse()\n .map_err(|_| format!(\"Predicate value `{}` is not a number.\", predicate[1]))?;\n if value != 0 && !value.is_power_of_two() {\n return Err(format!(\n \"Predicate value `{}` is not a bucket.\",\n predicate[1]\n ));\n }\n Ok(Predicate { key, value })\n }\n}\n\nfn analyze(corpus: &Path, predicates: Vec<Predicate>) {\n let mut stats = Stats::default();\n let mut count = 0;\n let total = std::fs::read_dir(corpus).unwrap().count();\n for entry in std::fs::read_dir(corpus).unwrap() {\n let data = std::fs::read(entry.unwrap().path()).unwrap();\n let mut stat = Stats::default();\n fuzz(&data, false, Some(&mut stat));\n if predicates\n .iter()\n .all(|p| stat.get_count(p.key, p.value).is_some())\n {\n stats.merge(&stat);\n }\n count += 1;\n print!(\"\\u{1b}[K{} \/ {}\\r\", count, total);\n stdout().flush().unwrap();\n }\n \/\/ NOTE: To avoid reloading the corpus each time we want to check a different filter, we can\n \/\/ start an interactive loop here taking filters as input and printing the filtered stats. We\n \/\/ would keep all individual stats for each run in a vector.\n print!(\"{}\", stats);\n}\n\nfn main() {\n let args: Vec<String> = std::env::args().collect();\n \/\/ No arguments reads from stdin.\n if args.len() <= 1 {\n let stdin = std::io::stdin();\n let mut data = Vec::new();\n stdin.lock().read_to_end(&mut data).unwrap();\n return debug(&data);\n }\n let path = Path::new(&args[1]);\n \/\/ File argument assumes artifact.\n if path.is_file() && args.len() == 2 {\n return debug(&std::fs::read(path).unwrap());\n }\n \/\/ Directory argument assumes corpus.\n if path.is_dir() {\n match args[2..].iter().map(|x| x.parse()).collect() {\n Ok(predicates) => return analyze(path, predicates),\n Err(error) => eprintln!(\"Error: {}\", error),\n }\n }\n usage(&args[0]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #60360 - rasendubi:const-labeled-break, r=Centril<commit_after>\/\/ Using labeled break in a while loop has caused an illegal instruction being\n\/\/ generated, and an ICE later.\n\/\/\n\/\/ See https:\/\/github.com\/rust-lang\/rust\/issues\/51350 for more information.\n\nconst CRASH: () = 'a: while break 'a {};\n\nfn main() {\n println!(\"{:?}\", CRASH);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test that causes an ICE currently<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct S<'self, T> {\n o: &'self Option<T>\n}\n\nfn main() {\n S { o: &None }; \/\/~ ERROR cannot determine a type for this expression: unconstrained type\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[Rust] static한 변수와 같은 이름을 local에서 사용 못함<commit_after>const a: i32 = 0;\nfn main() {\n let a = 0;\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! A syntax highlighting plugin based on syntect.\n\nextern crate serde;\nextern crate syntect;\n#[macro_use]\nextern crate xi_plugin_lib;\n\nmod stackmap;\n\nuse xi_plugin_lib::caching_plugin::{self, PluginCtx};\nuse xi_plugin_lib::plugin_base::ScopeSpan;\nuse syntect::parsing::{ParseState, ScopeStack, SyntaxSet, SCOPE_REPO};\nuse stackmap::{StackMap, LookupResult};\n\n\nstruct PluginState<'a> {\n syntax_set: &'a SyntaxSet,\n stack_idents: StackMap,\n line_num: usize,\n offset: usize,\n parse_state: Option<ParseState>,\n scope_state: ScopeStack,\n spans_start: usize,\n \/\/ unflushed spans\n spans: Vec<ScopeSpan>,\n new_scopes: Vec<Vec<String>>,\n syntax_name: String,\n}\n\n\nimpl<'a> PluginState<'a> {\n pub fn new(syntax_set: &'a SyntaxSet) -> Self {\n PluginState {\n syntax_set: syntax_set,\n stack_idents: StackMap::default(),\n line_num: 0,\n offset: 0,\n parse_state: None,\n scope_state: ScopeStack::new(),\n spans_start: 0,\n spans: Vec::new(),\n new_scopes: Vec::new(),\n syntax_name: String::from(\"None\"),\n }\n }\n\n \/\/ Return true if there's more to do.\n fn highlight_one_line(&mut self, ctx: &mut PluginCtx) -> bool {\n let line = ctx.get_line(self.line_num);\n if let Err(err) = line {\n print_err!(\"Error: {:?}\", err);\n return false;\n }\n let line = line.unwrap();\n if line.is_none() {\n return false;\n }\n let line = line.unwrap();\n let ops = self.parse_state.as_mut().unwrap().parse_line(&line);\n if self.spans.is_empty() {\n self.spans_start = self.offset;\n }\n\n let mut prev_cursor = 0;\n let repo = SCOPE_REPO.lock().unwrap();\n for (cursor, batch) in ops {\n if self.scope_state.len() > 0 {\n let scope_ident = self.stack_idents.get_value(self.scope_state.as_slice());\n \/\/print_err!(\"scope ident: {:?}\", scope_ident);\n let scope_ident = match scope_ident {\n LookupResult::Existing(id) => id,\n LookupResult::New(id) => {\n let stack_strings = self.scope_state.as_slice().iter()\n .map(|slice| repo.to_string(*slice))\n .collect::<Vec<_>>();\n self.new_scopes.push(stack_strings);\n id\n }\n };\n\n let start = self.offset - self.spans_start + prev_cursor;\n let end = start + (cursor - prev_cursor);\n let span = ScopeSpan::new(start, end, scope_ident);\n self.spans.push(span);\n }\n prev_cursor = cursor;\n self.scope_state.apply(&batch);\n }\n\n self.line_num += 1;\n self.offset += line.len();\n true\n }\n\n fn flush_spans(&mut self, ctx: &mut PluginCtx) {\n if !self.new_scopes.is_empty() {\n ctx.add_scopes(&self.new_scopes);\n }\n if !self.spans.is_empty() {\n ctx.update_spans(self.spans_start, self.offset - self.spans_start,\n self.spans.as_slice());\n }\n }\n\n fn do_highlighting(&mut self, mut ctx: PluginCtx) {\n let syntax = match ctx.get_path() {\n Some(ref path) => self.syntax_set.find_syntax_for_file(path).unwrap()\n .unwrap_or_else(|| self.syntax_set.find_syntax_plain_text()),\n None => self.syntax_set.find_syntax_plain_text(),\n };\n\n if syntax.name != self.syntax_name {\n self.syntax_name = syntax.name.clone();\n print_err!(\"syntect using {}\", syntax.name);\n }\n\n self.parse_state = Some(ParseState::new(syntax));\n self.scope_state = ScopeStack::new();\n self.spans = Vec::new();\n self.new_scopes = Vec::new();\n self.line_num = 0;\n self.offset = 0;\n ctx.schedule_idle(0);\n }\n}\n\nconst LINES_PER_RPC: usize = 50;\n\nimpl<'a> caching_plugin::Handler for PluginState<'a> {\n fn initialize(&mut self, ctx: PluginCtx, _buf_size: usize) {\n self.do_highlighting(ctx);\n }\n\n fn update(&mut self, ctx: PluginCtx) {\n self.do_highlighting(ctx);\n }\n\n fn idle(&mut self, mut ctx: PluginCtx, _token: usize) {\n print_err!(\"idle task at line {}\", self.line_num);\n for _ in 0..LINES_PER_RPC {\n if !self.highlight_one_line(&mut ctx) {\n self.flush_spans(&mut ctx);\n return;\n }\n if ctx.request_is_pending() {\n print_err!(\"request pending at line {}\", self.line_num);\n break;\n }\n }\n self.flush_spans(&mut ctx);\n ctx.schedule_idle(0);\n }\n}\n\nfn main() {\n let syntax_set = SyntaxSet::load_defaults_newlines();\n let mut state = PluginState::new(&syntax_set);\n\n caching_plugin::mainloop(&mut state);\n}\n<commit_msg>Fix highlighting irregularities<commit_after>\/\/ Copyright 2016 Google Inc. All rights reserved.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 (the \"License\");\n\/\/ you may not use this file except in compliance with the License.\n\/\/ You may obtain a copy of the License at\n\/\/\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0\n\/\/\n\/\/ Unless required by applicable law or agreed to in writing, software\n\/\/ distributed under the License is distributed on an \"AS IS\" BASIS,\n\/\/ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\/\/ See the License for the specific language governing permissions and\n\/\/ limitations under the License.\n\n\/\/! A syntax highlighting plugin based on syntect.\n\nextern crate serde;\nextern crate syntect;\n#[macro_use]\nextern crate xi_plugin_lib;\n\nmod stackmap;\n\nuse xi_plugin_lib::caching_plugin::{self, PluginCtx};\nuse xi_plugin_lib::plugin_base::ScopeSpan;\nuse syntect::parsing::{ParseState, ScopeStack, SyntaxSet, SCOPE_REPO};\nuse stackmap::{StackMap, LookupResult};\n\n\nstruct PluginState<'a> {\n syntax_set: &'a SyntaxSet,\n stack_idents: StackMap,\n line_num: usize,\n offset: usize,\n parse_state: Option<ParseState>,\n scope_state: ScopeStack,\n spans_start: usize,\n \/\/ unflushed spans\n spans: Vec<ScopeSpan>,\n new_scopes: Vec<Vec<String>>,\n syntax_name: String,\n}\n\n\nimpl<'a> PluginState<'a> {\n pub fn new(syntax_set: &'a SyntaxSet) -> Self {\n PluginState {\n syntax_set: syntax_set,\n stack_idents: StackMap::default(),\n line_num: 0,\n offset: 0,\n parse_state: None,\n scope_state: ScopeStack::new(),\n spans_start: 0,\n spans: Vec::new(),\n new_scopes: Vec::new(),\n syntax_name: String::from(\"None\"),\n }\n }\n\n \/\/ Return true if there's more to do.\n fn highlight_one_line(&mut self, ctx: &mut PluginCtx) -> bool {\n let line = ctx.get_line(self.line_num);\n if let Err(err) = line {\n print_err!(\"Error: {:?}\", err);\n return false;\n }\n let line = line.unwrap();\n if line.is_none() {\n return false;\n }\n let line = line.unwrap();\n let ops = self.parse_state.as_mut().unwrap().parse_line(&line);\n\n let mut prev_cursor = 0;\n let repo = SCOPE_REPO.lock().unwrap();\n for (cursor, batch) in ops {\n if self.scope_state.len() > 0 {\n let scope_ident = self.stack_idents.get_value(self.scope_state.as_slice());\n let scope_ident = match scope_ident {\n LookupResult::Existing(id) => id,\n LookupResult::New(id) => {\n let stack_strings = self.scope_state.as_slice().iter()\n .map(|slice| repo.to_string(*slice))\n .collect::<Vec<_>>();\n self.new_scopes.push(stack_strings);\n id\n }\n };\n\n let start = self.offset - self.spans_start + prev_cursor;\n let end = start + (cursor - prev_cursor);\n let span = ScopeSpan::new(start, end, scope_ident);\n self.spans.push(span);\n }\n prev_cursor = cursor;\n self.scope_state.apply(&batch);\n }\n\n self.line_num += 1;\n self.offset += line.len();\n true\n }\n\n fn flush_spans(&mut self, ctx: &mut PluginCtx) {\n if !self.new_scopes.is_empty() {\n ctx.add_scopes(&self.new_scopes);\n self.new_scopes.clear();\n }\n if !self.spans.is_empty() {\n ctx.update_spans(self.spans_start, self.offset - self.spans_start,\n self.spans.as_slice());\n self.spans.clear();\n }\n self.spans_start = self.offset;\n }\n\n fn do_highlighting(&mut self, mut ctx: PluginCtx) {\n let syntax = match ctx.get_path() {\n Some(ref path) => self.syntax_set.find_syntax_for_file(path).unwrap()\n .unwrap_or_else(|| self.syntax_set.find_syntax_plain_text()),\n None => self.syntax_set.find_syntax_plain_text(),\n };\n\n if syntax.name != self.syntax_name {\n self.syntax_name = syntax.name.clone();\n print_err!(\"syntect using {}\", syntax.name);\n }\n\n self.parse_state = Some(ParseState::new(syntax));\n self.scope_state = ScopeStack::new();\n self.spans = Vec::new();\n self.new_scopes = Vec::new();\n self.line_num = 0;\n self.offset = 0;\n self.spans_start = 0;\n ctx.schedule_idle(0);\n }\n}\n\nconst LINES_PER_RPC: usize = 50;\n\nimpl<'a> caching_plugin::Handler for PluginState<'a> {\n fn initialize(&mut self, ctx: PluginCtx, _buf_size: usize) {\n self.do_highlighting(ctx);\n }\n\n fn update(&mut self, ctx: PluginCtx) {\n self.do_highlighting(ctx);\n }\n\n fn idle(&mut self, mut ctx: PluginCtx, _token: usize) {\n \/\/print_err!(\"idle task at line {}\", self.line_num);\n for _ in 0..LINES_PER_RPC {\n if !self.highlight_one_line(&mut ctx) {\n self.flush_spans(&mut ctx);\n return;\n }\n if ctx.request_is_pending() {\n print_err!(\"request pending at line {}\", self.line_num);\n break;\n }\n }\n self.flush_spans(&mut ctx);\n ctx.schedule_idle(0);\n }\n}\n\nfn main() {\n let syntax_set = SyntaxSet::load_defaults_newlines();\n let mut state = PluginState::new(&syntax_set);\n\n caching_plugin::mainloop(&mut state);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>new test case demonstrating ability to return ptr to interior of option<commit_after>fn get<T>(opt: &option<T>) -> &T {\n match *opt {\n some(ref v) => v,\n none => fail ~\"none\"\n }\n}\n\nfn main() {\n let mut x = some(23);\n\n {\n let y = get(&x);\n assert *y == 23;\n }\n\n x = some(24);\n\n {\n let y = get(&x);\n assert *y == 24;\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/\/ Code to handle management of a pool's thinpool device.\n\nuse std::process::Command;\n\nuse devicemapper;\nuse devicemapper::{Bytes, DM, DataBlocks, DmError, DmResult, LinearDev, MetaBlocks, Sectors,\n Segment, ThinDev, ThinDevId, ThinPoolDev, ThinPoolStatus};\n\nuse super::super::consts::IEC;\nuse super::super::errors::{EngineError, EngineResult, ErrorEnum};\nuse super::super::types::PoolUuid;\n\nuse super::dmdevice::{FlexRole, ThinDevIdPool, ThinPoolRole, format_flex_name,\n format_thinpool_name};\nuse super::serde_structs::{Recordable, ThinPoolDevSave};\n\npub const DATA_BLOCK_SIZE: Sectors = Sectors(2048);\npub const DATA_LOWATER: DataBlocks = DataBlocks(512);\npub const META_LOWATER: MetaBlocks = MetaBlocks(512);\n\n\/\/\/ A ThinPool struct contains the thinpool itself, but also the spare\n\/\/\/ segments for its metadata device.\n#[derive(Debug)]\npub struct ThinPool {\n thin_pool: ThinPoolDev,\n meta_spare: Vec<Segment>,\n id_gen: ThinDevIdPool,\n}\n\nimpl ThinPool {\n \/\/\/ Make a new thin pool.\n pub fn new(pool_uuid: PoolUuid,\n dm: &DM,\n data_block_size: Sectors,\n low_water_mark: DataBlocks,\n spare_segments: Vec<Segment>,\n meta_dev: LinearDev,\n data_dev: LinearDev)\n -> EngineResult<ThinPool> {\n let name = format_thinpool_name(&pool_uuid, ThinPoolRole::Pool);\n let thinpool_dev = try!(ThinPoolDev::new(&name,\n dm,\n try!(data_dev.size()),\n data_block_size,\n low_water_mark,\n meta_dev,\n data_dev));\n Ok(ThinPool {\n thin_pool: thinpool_dev,\n meta_spare: spare_segments,\n id_gen: ThinDevIdPool::new_from_ids(&[]),\n })\n }\n\n \/\/\/ Set up an \"existing\" thin pool.\n \/\/\/ A thin pool must store the metadata for its thin devices, regardless of\n \/\/\/ whether it has an existing device node. An existing thin pool device\n \/\/\/ is a device where the metadata is already stored on its meta device.\n \/\/\/ If initial setup fails due to a thin_check failure, attempt to fix\n \/\/\/ the problem by running thin_repair. If failure recurs, return an\n \/\/\/ error.\n pub fn setup(pool_uuid: PoolUuid,\n dm: &DM,\n data_block_size: Sectors,\n low_water_mark: DataBlocks,\n thin_ids: &[ThinDevId],\n spare_segments: Vec<Segment>,\n meta_dev: LinearDev,\n data_dev: LinearDev)\n -> EngineResult<ThinPool> {\n let name = format_thinpool_name(&pool_uuid, ThinPoolRole::Pool);\n let size = try!(data_dev.size());\n match ThinPoolDev::setup(&name,\n dm,\n size,\n data_block_size,\n low_water_mark,\n meta_dev,\n data_dev) {\n Ok(dev) => {\n Ok(ThinPool {\n thin_pool: dev,\n meta_spare: spare_segments,\n id_gen: ThinDevIdPool::new_from_ids(thin_ids),\n })\n }\n Err(DmError::Dm(devicemapper::ErrorEnum::CheckFailed(meta_dev, data_dev), _)) => {\n let (new_meta_dev, new_spare_segments) =\n try!(attempt_thin_repair(pool_uuid, dm, meta_dev, spare_segments));\n Ok(ThinPool {\n thin_pool: try!(ThinPoolDev::setup(&name,\n dm,\n size,\n data_block_size,\n low_water_mark,\n new_meta_dev,\n data_dev)),\n meta_spare: new_spare_segments,\n id_gen: ThinDevIdPool::new_from_ids(thin_ids),\n })\n }\n Err(err) => Err(err.into()),\n }\n }\n\n \/\/\/ The status of the thin pool as calculated by DM.\n pub fn thin_pool_status(&self, dm: &DM) -> EngineResult<ThinPoolStatus> {\n Ok(try!(self.thin_pool.status(dm)))\n }\n\n \/\/\/ Make a new thin device.\n pub fn make_thin_device(&mut self,\n dm: &DM,\n name: &str,\n size: Option<Sectors>)\n -> EngineResult<ThinDev> {\n Ok(try!(ThinDev::new(name,\n dm,\n &self.thin_pool,\n try!(self.id_gen.new_id()),\n size.unwrap_or(Bytes(IEC::Ti).sectors()))))\n }\n\n \/\/\/ Setup a previously constructed thin device.\n pub fn setup_thin_device(&self,\n dm: &DM,\n name: &str,\n id: ThinDevId,\n size: Sectors)\n -> EngineResult<ThinDev> {\n Ok(try!(ThinDev::setup(name, dm, &self.thin_pool, id, size)))\n }\n\n \/\/\/ Tear down the thin pool.\n pub fn teardown(self, dm: &DM) -> DmResult<()> {\n self.thin_pool.teardown(dm)\n }\n\n \/\/\/ Get an immutable reference to the sparse segments of the ThinPool.\n pub fn spare_segments(&self) -> &[Segment] {\n &self.meta_spare\n }\n\n \/\/\/ The segments belonging to the thin pool meta device.\n pub fn thin_pool_meta_segments(&self) -> &[Segment] {\n self.thin_pool.meta_dev().segments()\n }\n\n \/\/\/ The segments belonging to the thin pool data device.\n pub fn thin_pool_data_segments(&self) -> &[Segment] {\n self.thin_pool.data_dev().segments()\n }\n\n \/\/\/ Extend the thinpool with new data regions.\n pub fn extend_data(&mut self, dm: &DM, segs: Vec<Segment>) -> EngineResult<()> {\n Ok(try!(self.thin_pool.extend_data(dm, segs)))\n }\n\n \/\/\/ The number of physical sectors in use, that is, unavailable for storage\n \/\/\/ of additional user data, by this pool.\n \/\/ This includes all the sectors being held as spares for the meta device,\n \/\/ all the sectors allocated to the meta data device, and all the sectors\n \/\/ in use on the data device.\n pub fn total_physical_used(&self) -> EngineResult<Sectors> {\n let data_dev_used = match try!(self.thin_pool.status(&try!(DM::new()))) {\n ThinPoolStatus::Good(_, usage) => *usage.used_data * DATA_BLOCK_SIZE,\n _ => {\n let err_msg = \"thin pool failed, could not obtain usage\";\n return Err(EngineError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n };\n\n let spare_total = self.spare_segments().iter().map(|s| s.length).sum();\n let meta_dev_total = self.thin_pool\n .meta_dev()\n .segments()\n .iter()\n .map(|s| s.length)\n .sum();\n\n Ok(data_dev_used + spare_total + meta_dev_total)\n }\n}\n\nimpl Recordable<ThinPoolDevSave> for ThinPool {\n fn record(&self) -> EngineResult<ThinPoolDevSave> {\n Ok(ThinPoolDevSave { data_block_size: self.thin_pool.data_block_size() })\n }\n}\n\n\/\/\/ Attempt a thin repair operation on the meta device.\n\/\/\/ If the operation succeeds, teardown the old meta device,\n\/\/\/ and return the new meta device and the new spare segments.\nfn attempt_thin_repair(pool_uuid: PoolUuid,\n dm: &DM,\n meta_dev: LinearDev,\n mut spare_segments: Vec<Segment>)\n -> EngineResult<(LinearDev, Vec<Segment>)> {\n let mut new_meta_dev = try!(LinearDev::new(&format_flex_name(&pool_uuid,\n FlexRole::ThinMetaSpare),\n dm,\n spare_segments.drain(..).collect()));\n\n\n if !try!(Command::new(\"thin_repair\")\n .arg(\"-i\")\n .arg(&try!(meta_dev.devnode()))\n .arg(\"-o\")\n .arg(&try!(new_meta_dev.devnode()))\n .status())\n .success() {\n return Err(EngineError::Engine(ErrorEnum::Error,\n \"thin_repair failed, pool unusable\".into()));\n }\n\n let name = meta_dev.name().to_owned();\n let new_spare_segments = meta_dev\n .segments()\n .iter()\n .map(|x| {\n Segment {\n start: x.start,\n length: x.length,\n device: x.device,\n }\n })\n .collect();\n try!(meta_dev.teardown(dm));\n try!(new_meta_dev.set_name(dm, &name));\n\n Ok((new_meta_dev, new_spare_segments))\n}\n<commit_msg>Allow 8 arguments in ThinPool::setup()<commit_after>\/\/ This Source Code Form is subject to the terms of the Mozilla Public\n\/\/ License, v. 2.0. If a copy of the MPL was not distributed with this\n\/\/ file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/.\n\n\/\/\/ Code to handle management of a pool's thinpool device.\n\nuse std::process::Command;\n\nuse devicemapper;\nuse devicemapper::{Bytes, DM, DataBlocks, DmError, DmResult, LinearDev, MetaBlocks, Sectors,\n Segment, ThinDev, ThinDevId, ThinPoolDev, ThinPoolStatus};\n\nuse super::super::consts::IEC;\nuse super::super::errors::{EngineError, EngineResult, ErrorEnum};\nuse super::super::types::PoolUuid;\n\nuse super::dmdevice::{FlexRole, ThinDevIdPool, ThinPoolRole, format_flex_name,\n format_thinpool_name};\nuse super::serde_structs::{Recordable, ThinPoolDevSave};\n\npub const DATA_BLOCK_SIZE: Sectors = Sectors(2048);\npub const DATA_LOWATER: DataBlocks = DataBlocks(512);\npub const META_LOWATER: MetaBlocks = MetaBlocks(512);\n\n\/\/\/ A ThinPool struct contains the thinpool itself, but also the spare\n\/\/\/ segments for its metadata device.\n#[derive(Debug)]\npub struct ThinPool {\n thin_pool: ThinPoolDev,\n meta_spare: Vec<Segment>,\n id_gen: ThinDevIdPool,\n}\n\nimpl ThinPool {\n \/\/\/ Make a new thin pool.\n pub fn new(pool_uuid: PoolUuid,\n dm: &DM,\n data_block_size: Sectors,\n low_water_mark: DataBlocks,\n spare_segments: Vec<Segment>,\n meta_dev: LinearDev,\n data_dev: LinearDev)\n -> EngineResult<ThinPool> {\n let name = format_thinpool_name(&pool_uuid, ThinPoolRole::Pool);\n let thinpool_dev = try!(ThinPoolDev::new(&name,\n dm,\n try!(data_dev.size()),\n data_block_size,\n low_water_mark,\n meta_dev,\n data_dev));\n Ok(ThinPool {\n thin_pool: thinpool_dev,\n meta_spare: spare_segments,\n id_gen: ThinDevIdPool::new_from_ids(&[]),\n })\n }\n\n \/\/\/ Set up an \"existing\" thin pool.\n \/\/\/ A thin pool must store the metadata for its thin devices, regardless of\n \/\/\/ whether it has an existing device node. An existing thin pool device\n \/\/\/ is a device where the metadata is already stored on its meta device.\n \/\/\/ If initial setup fails due to a thin_check failure, attempt to fix\n \/\/\/ the problem by running thin_repair. If failure recurs, return an\n \/\/\/ error.\n #[allow(too_many_arguments)]\n pub fn setup(pool_uuid: PoolUuid,\n dm: &DM,\n data_block_size: Sectors,\n low_water_mark: DataBlocks,\n thin_ids: &[ThinDevId],\n spare_segments: Vec<Segment>,\n meta_dev: LinearDev,\n data_dev: LinearDev)\n -> EngineResult<ThinPool> {\n let name = format_thinpool_name(&pool_uuid, ThinPoolRole::Pool);\n let size = try!(data_dev.size());\n match ThinPoolDev::setup(&name,\n dm,\n size,\n data_block_size,\n low_water_mark,\n meta_dev,\n data_dev) {\n Ok(dev) => {\n Ok(ThinPool {\n thin_pool: dev,\n meta_spare: spare_segments,\n id_gen: ThinDevIdPool::new_from_ids(thin_ids),\n })\n }\n Err(DmError::Dm(devicemapper::ErrorEnum::CheckFailed(meta_dev, data_dev), _)) => {\n let (new_meta_dev, new_spare_segments) =\n try!(attempt_thin_repair(pool_uuid, dm, meta_dev, spare_segments));\n Ok(ThinPool {\n thin_pool: try!(ThinPoolDev::setup(&name,\n dm,\n size,\n data_block_size,\n low_water_mark,\n new_meta_dev,\n data_dev)),\n meta_spare: new_spare_segments,\n id_gen: ThinDevIdPool::new_from_ids(thin_ids),\n })\n }\n Err(err) => Err(err.into()),\n }\n }\n\n \/\/\/ The status of the thin pool as calculated by DM.\n pub fn thin_pool_status(&self, dm: &DM) -> EngineResult<ThinPoolStatus> {\n Ok(try!(self.thin_pool.status(dm)))\n }\n\n \/\/\/ Make a new thin device.\n pub fn make_thin_device(&mut self,\n dm: &DM,\n name: &str,\n size: Option<Sectors>)\n -> EngineResult<ThinDev> {\n Ok(try!(ThinDev::new(name,\n dm,\n &self.thin_pool,\n try!(self.id_gen.new_id()),\n size.unwrap_or(Bytes(IEC::Ti).sectors()))))\n }\n\n \/\/\/ Setup a previously constructed thin device.\n pub fn setup_thin_device(&self,\n dm: &DM,\n name: &str,\n id: ThinDevId,\n size: Sectors)\n -> EngineResult<ThinDev> {\n Ok(try!(ThinDev::setup(name, dm, &self.thin_pool, id, size)))\n }\n\n \/\/\/ Tear down the thin pool.\n pub fn teardown(self, dm: &DM) -> DmResult<()> {\n self.thin_pool.teardown(dm)\n }\n\n \/\/\/ Get an immutable reference to the sparse segments of the ThinPool.\n pub fn spare_segments(&self) -> &[Segment] {\n &self.meta_spare\n }\n\n \/\/\/ The segments belonging to the thin pool meta device.\n pub fn thin_pool_meta_segments(&self) -> &[Segment] {\n self.thin_pool.meta_dev().segments()\n }\n\n \/\/\/ The segments belonging to the thin pool data device.\n pub fn thin_pool_data_segments(&self) -> &[Segment] {\n self.thin_pool.data_dev().segments()\n }\n\n \/\/\/ Extend the thinpool with new data regions.\n pub fn extend_data(&mut self, dm: &DM, segs: Vec<Segment>) -> EngineResult<()> {\n Ok(try!(self.thin_pool.extend_data(dm, segs)))\n }\n\n \/\/\/ The number of physical sectors in use, that is, unavailable for storage\n \/\/\/ of additional user data, by this pool.\n \/\/ This includes all the sectors being held as spares for the meta device,\n \/\/ all the sectors allocated to the meta data device, and all the sectors\n \/\/ in use on the data device.\n pub fn total_physical_used(&self) -> EngineResult<Sectors> {\n let data_dev_used = match try!(self.thin_pool.status(&try!(DM::new()))) {\n ThinPoolStatus::Good(_, usage) => *usage.used_data * DATA_BLOCK_SIZE,\n _ => {\n let err_msg = \"thin pool failed, could not obtain usage\";\n return Err(EngineError::Engine(ErrorEnum::Invalid, err_msg.into()));\n }\n };\n\n let spare_total = self.spare_segments().iter().map(|s| s.length).sum();\n let meta_dev_total = self.thin_pool\n .meta_dev()\n .segments()\n .iter()\n .map(|s| s.length)\n .sum();\n\n Ok(data_dev_used + spare_total + meta_dev_total)\n }\n}\n\nimpl Recordable<ThinPoolDevSave> for ThinPool {\n fn record(&self) -> EngineResult<ThinPoolDevSave> {\n Ok(ThinPoolDevSave { data_block_size: self.thin_pool.data_block_size() })\n }\n}\n\n\/\/\/ Attempt a thin repair operation on the meta device.\n\/\/\/ If the operation succeeds, teardown the old meta device,\n\/\/\/ and return the new meta device and the new spare segments.\nfn attempt_thin_repair(pool_uuid: PoolUuid,\n dm: &DM,\n meta_dev: LinearDev,\n mut spare_segments: Vec<Segment>)\n -> EngineResult<(LinearDev, Vec<Segment>)> {\n let mut new_meta_dev = try!(LinearDev::new(&format_flex_name(&pool_uuid,\n FlexRole::ThinMetaSpare),\n dm,\n spare_segments.drain(..).collect()));\n\n\n if !try!(Command::new(\"thin_repair\")\n .arg(\"-i\")\n .arg(&try!(meta_dev.devnode()))\n .arg(\"-o\")\n .arg(&try!(new_meta_dev.devnode()))\n .status())\n .success() {\n return Err(EngineError::Engine(ErrorEnum::Error,\n \"thin_repair failed, pool unusable\".into()));\n }\n\n let name = meta_dev.name().to_owned();\n let new_spare_segments = meta_dev\n .segments()\n .iter()\n .map(|x| {\n Segment {\n start: x.start,\n length: x.length,\n device: x.device,\n }\n })\n .collect();\n try!(meta_dev.teardown(dm));\n try!(new_meta_dev.set_name(dm, &name));\n\n Ok((new_meta_dev, new_spare_segments))\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for slice drop shims<commit_after>fn main() {\n std::ptr::drop_in_place::<[String]> as unsafe fn(_);\n}\n\n\/\/ END RUST SOURCE\n\n\/\/ START rustc.ptr-real_drop_in_place.[std__string__String].AddMovesForPackedDrops.before.mir\n\/\/ let mut _2: usize;\n\/\/ let mut _3: bool;\n\/\/ let mut _4: usize;\n\/\/ let mut _5: usize;\n\/\/ let mut _6: &mut std::string::String;\n\/\/ let mut _7: bool;\n\/\/ let mut _8: &mut std::string::String;\n\/\/ let mut _9: bool;\n\/\/ let mut _10: *mut std::string::String;\n\/\/ let mut _11: usize;\n\/\/ let mut _12: *mut std::string::String;\n\/\/ let mut _13: &mut std::string::String;\n\/\/ let mut _14: bool;\n\/\/ let mut _15: &mut std::string::String;\n\/\/ let mut _16: bool;\n\/\/ let mut _17: *mut [std::string::String];\n\/\/ bb0: {\n\/\/ goto -> bb15;\n\/\/ }\n\/\/ bb1: {\n\/\/ return;\n\/\/ }\n\/\/ bb2 (cleanup): {\n\/\/ resume;\n\/\/ }\n\/\/ bb3 (cleanup): {\n\/\/ _6 = &mut (*_1)[_4];\n\/\/ _4 = Add(_4, const 1usize);\n\/\/ drop((*_6)) -> bb4;\n\/\/ }\n\/\/ bb4 (cleanup): {\n\/\/ _7 = Eq(_4, _5);\n\/\/ switchInt(move _7) -> [false: bb3, otherwise: bb2];\n\/\/ }\n\/\/ bb5: {\n\/\/ _8 = &mut (*_1)[_4];\n\/\/ _4 = Add(_4, const 1usize);\n\/\/ drop((*_8)) -> [return: bb6, unwind: bb4];\n\/\/ }\n\/\/ bb6: {\n\/\/ _9 = Eq(_4, _5);\n\/\/ switchInt(move _9) -> [false: bb5, otherwise: bb1];\n\/\/ }\n\/\/ bb7: {\n\/\/ _5 = Len((*_1));\n\/\/ _4 = const 0usize;\n\/\/ goto -> bb6;\n\/\/ }\n\/\/ bb8: {\n\/\/ goto -> bb7;\n\/\/ }\n\/\/ bb9 (cleanup): {\n\/\/ _13 = &mut (*_10);\n\/\/ _10 = Offset(_10, const 1usize);\n\/\/ drop((*_13)) -> bb10;\n\/\/ }\n\/\/ bb10 (cleanup): {\n\/\/ _14 = Eq(_10, _12);\n\/\/ switchInt(move _14) -> [false: bb9, otherwise: bb2];\n\/\/ }\n\/\/ bb11: {\n\/\/ _15 = &mut (*_10);\n\/\/ _10 = Offset(_10, const 1usize);\n\/\/ drop((*_15)) -> [return: bb12, unwind: bb10];\n\/\/ }\n\/\/ bb12: {\n\/\/ _16 = Eq(_10, _12);\n\/\/ switchInt(move _16) -> [false: bb11, otherwise: bb1];\n\/\/ }\n\/\/ bb13: {\n\/\/ _11 = Len((*_1));\n\/\/ _17 = &mut (*_1);\n\/\/ _10 = move _17 as *mut std::string::String (Misc);\n\/\/ _12 = Offset(_10, move _11);\n\/\/ goto -> bb12;\n\/\/ }\n\/\/ bb14: {\n\/\/ goto -> bb13;\n\/\/ }\n\/\/ bb15: {\n\/\/ _2 = SizeOf(std::string::String);\n\/\/ _3 = Eq(move _2, const 0usize);\n\/\/ switchInt(move _3) -> [false: bb14, otherwise: bb8];\n\/\/ }\n\/\/ END rustc.ptr-real_drop_in_place.[std__string__String].AddMovesForPackedDrops.before.mir\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Cleanup<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Updates<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Fix float comparison bug found by clippy<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>[Auto] bin\/core\/markdown: Fix Clippy warnings<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-tidy-linelength\n\n#![feature(infer_outlives_requirements)]\n\nstruct Foo<U> {\n bar: Bar<U> \/\/~ ERROR 16:5: 16:16: the parameter type `U` may not live long enough [E0310]\n}\nstruct Bar<T: 'static> {\n x: T,\n}\n\nfn main() {}\n\n<commit_msg>some comment about not infering static lifetimes test<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ ignore-tidy-linelength\n\n#![feature(infer_outlives_requirements)]\n\n\/*\n * We don't infer `T: 'static` outlives relationships by default.\n * Instead an additional feature gate `infer_static_outlives_requirements`\n * is required.\n *\/\n\nstruct Foo<U> {\n bar: Bar<U> \/\/~ ERROR 22:5: 22:16: the parameter type `U` may not live long enough [E0310]\n}\nstruct Bar<T: 'static> {\n x: T,\n}\n\nfn main() {}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n#![allow(dead_code)]\n#![allow(unused_unsafe)]\n\nuse std::marker::Sync;\n\nstruct Foo {\n a: usize,\n b: *const ()\n}\n\nunsafe impl Sync for Foo {}\n\nfn foo<T>(a: T) -> T {\n a\n}\n\nstatic BLOCK_INTEGRAL: usize = { 1 };\nstatic BLOCK_EXPLICIT_UNIT: () = { () };\nstatic BLOCK_IMPLICIT_UNIT: () = { };\nstatic BLOCK_FLOAT: f64 = { 1.0 };\nstatic BLOCK_ENUM: Option<usize> = { Some(100) };\nstatic BLOCK_STRUCT: Foo = { Foo { a: 12, b: 0 as *const () } };\nstatic BLOCK_UNSAFE: usize = unsafe { 1000 };\n\nstatic BLOCK_FN_INFERRED: fn(usize) -> usize = { foo };\n\nstatic BLOCK_FN: fn(usize) -> usize = { foo::<usize> };\n\nstatic BLOCK_ENUM_CONSTRUCTOR: fn(usize) -> Option<usize> = { Some };\n\n\/\/ FIXME #13972\n\/\/ static BLOCK_UNSAFE_SAFE_PTR: &'static isize = unsafe { &*(0xdeadbeef as *const isize) };\n\/\/ static BLOCK_UNSAFE_SAFE_PTR_2: &'static isize = unsafe {\n\/\/ const X: *const isize = 0xdeadbeef as *const isize;\n\/\/ &*X\n\/\/ };\n\npub fn main() {\n assert_eq!(BLOCK_INTEGRAL, 1);\n assert_eq!(BLOCK_EXPLICIT_UNIT, ());\n assert_eq!(BLOCK_IMPLICIT_UNIT, ());\n assert_eq!(BLOCK_FLOAT, 1.0_f64);\n assert_eq!(BLOCK_STRUCT.a, 12);\n assert_eq!(BLOCK_STRUCT.b, 0 as *const ());\n assert_eq!(BLOCK_ENUM, Some(100));\n assert_eq!(BLOCK_UNSAFE, 1000);\n assert_eq!(BLOCK_FN_INFERRED(300), 300);\n assert_eq!(BLOCK_FN(300), 300);\n assert_eq!(BLOCK_ENUM_CONSTRUCTOR(200), Some(200));\n \/\/ FIXME #13972\n \/\/ assert_eq!(BLOCK_UNSAFE_SAFE_PTR as *const isize as usize, 0xdeadbeef);\n \/\/ assert_eq!(BLOCK_UNSAFE_SAFE_PTR_2 as *const isize as usize, 0xdeadbeef);\n}\n<commit_msg>Rollup merge of #52527 - ljedrz:cleanup_13973, r=oli-obk<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\n#![allow(dead_code)]\n#![allow(unused_unsafe)]\n\nuse std::marker::Sync;\n\nstruct Foo {\n a: usize,\n b: *const ()\n}\n\nunsafe impl Sync for Foo {}\n\nfn foo<T>(a: T) -> T {\n a\n}\n\nstatic BLOCK_INTEGRAL: usize = { 1 };\nstatic BLOCK_EXPLICIT_UNIT: () = { () };\nstatic BLOCK_IMPLICIT_UNIT: () = { };\nstatic BLOCK_FLOAT: f64 = { 1.0 };\nstatic BLOCK_ENUM: Option<usize> = { Some(100) };\nstatic BLOCK_STRUCT: Foo = { Foo { a: 12, b: 0 as *const () } };\nstatic BLOCK_UNSAFE: usize = unsafe { 1000 };\n\nstatic BLOCK_FN_INFERRED: fn(usize) -> usize = { foo };\n\nstatic BLOCK_FN: fn(usize) -> usize = { foo::<usize> };\n\nstatic BLOCK_ENUM_CONSTRUCTOR: fn(usize) -> Option<usize> = { Some };\n\npub fn main() {\n assert_eq!(BLOCK_INTEGRAL, 1);\n assert_eq!(BLOCK_EXPLICIT_UNIT, ());\n assert_eq!(BLOCK_IMPLICIT_UNIT, ());\n assert_eq!(BLOCK_FLOAT, 1.0_f64);\n assert_eq!(BLOCK_STRUCT.a, 12);\n assert_eq!(BLOCK_STRUCT.b, 0 as *const ());\n assert_eq!(BLOCK_ENUM, Some(100));\n assert_eq!(BLOCK_UNSAFE, 1000);\n assert_eq!(BLOCK_FN_INFERRED(300), 300);\n assert_eq!(BLOCK_FN(300), 300);\n assert_eq!(BLOCK_ENUM_CONSTRUCTOR(200), Some(200));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add test for issue #15735<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nstruct A<'a> {\n a: &'a i32,\n b: &'a i32,\n}\n\nimpl <'a> A<'a> {\n fn foo<'b>(&'b self) {\n A {\n a: self.a,\n b: self.b,\n };\n }\n}\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse std::ops::{Mul, MulAssign};\n\npub trait ClosedMul<Right>: Sized + Mul<Right, Output = Self> + MulAssign<Right> {}\nimpl<T, Right> ClosedMul<Right> for T\nwhere\n T: Mul<Right, Output = T> + MulAssign<Right>,\n{\n}\n\npub trait InnerSpace: ClosedMul<<Self as InnerSpace>::Real> {\n type Real;\n}\n\npub trait FiniteDimVectorSpace: ClosedMul<<Self as FiniteDimVectorSpace>::Field> {\n type Field;\n}\n\npub trait FiniteDimInnerSpace\n : InnerSpace + FiniteDimVectorSpace<Field = <Self as InnerSpace>::Real> {\n}\n\npub trait EuclideanSpace: ClosedMul<<Self as EuclideanSpace>::Real> {\n type Coordinates: FiniteDimInnerSpace<Real = Self::Real>\n + Mul<Self::Real, Output = Self::Coordinates>\n + MulAssign<Self::Real>;\n\n type Real;\n}\n\nfn main() {}\n<commit_msg>Update issue-48551.rs<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Regression test for #48551. Covers a case where duplicate candidates\n\/\/ arose during associated type projection.\n\nuse std::ops::{Mul, MulAssign};\n\npub trait ClosedMul<Right>: Sized + Mul<Right, Output = Self> + MulAssign<Right> {}\nimpl<T, Right> ClosedMul<Right> for T\nwhere\n T: Mul<Right, Output = T> + MulAssign<Right>,\n{\n}\n\npub trait InnerSpace: ClosedMul<<Self as InnerSpace>::Real> {\n type Real;\n}\n\npub trait FiniteDimVectorSpace: ClosedMul<<Self as FiniteDimVectorSpace>::Field> {\n type Field;\n}\n\npub trait FiniteDimInnerSpace\n : InnerSpace + FiniteDimVectorSpace<Field = <Self as InnerSpace>::Real> {\n}\n\npub trait EuclideanSpace: ClosedMul<<Self as EuclideanSpace>::Real> {\n type Coordinates: FiniteDimInnerSpace<Real = Self::Real>\n + Mul<Self::Real, Output = Self::Coordinates>\n + MulAssign<Self::Real>;\n\n type Real;\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>pub fn add(num_l: &str, num_r: &str) -> String {\n let sum = String::from(num_l) + num_r;\n\n compress(&sum)\n}\n\npub fn compress(num: &str) -> String {\n match num {\n \"IIIII\" => \"V\".to_string(),\n \"IIII\" => \"IV\".to_string(),\n _ => num.to_string(),\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::add;\n use super::compress;\n\n #[test]\n fn add_i_i() {\n assert_eq!(\"II\", add(\"I\", \"I\"));\n }\n\n #[test]\n fn add_i_ii() {\n assert_eq!(\"III\", add(\"I\", \"II\"));\n }\n\n #[test]\n fn add_ii_iii() {\n assert_eq!(\"V\", add(\"II\", \"III\"));\n\n }\n\n #[test]\n fn compress_iiiii() {\n assert_eq!(\"V\", compress(\"IIIII\"));\n }\n\n #[test]\n fn compress_iiii() {\n assert_eq!(\"IV\", compress(\"IIII\"));\n }\n}\n<commit_msg>\"Simple\" version of V+I \/ I+V<commit_after>pub fn add(num_l: &str, num_r: &str) -> String {\n let mut sum: Vec<u8> = (String::from(num_l) + num_r).into();\n\n sum.sort();\n sum.reverse();\n\n compress(&String::from_utf8(sum).unwrap())\n}\n\npub fn compress(num: &str) -> String {\n match num {\n \"IIIII\" => \"V\".to_string(),\n \"IIII\" => \"IV\".to_string(),\n _ => num.to_string(),\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::add;\n use super::compress;\n\n #[test]\n fn add_i_i() {\n assert_eq!(\"II\", add(\"I\", \"I\"));\n }\n\n #[test]\n fn add_i_ii() {\n assert_eq!(\"III\", add(\"I\", \"II\"));\n }\n\n #[test]\n fn add_ii_iii() {\n assert_eq!(\"V\", add(\"II\", \"III\"));\n\n }\n\n #[test]\n fn add_v_i() {\n assert_eq!(\"VI\", add(\"V\", \"I\"));\n }\n\n #[test]\n fn add_i_v() {\n assert_eq!(\"VI\", add(\"I\", \"V\"));\n }\n\n #[test]\n fn compress_iiiii() {\n assert_eq!(\"V\", compress(\"IIIII\"));\n }\n\n #[test]\n fn compress_iiii() {\n assert_eq!(\"IV\", compress(\"IIII\"));\n }\n}\n<|endoftext|>"} {"text":"<commit_before>use latch::{CountLatch, Latch};\n#[allow(warnings)]\nuse log::Event::*;\nuse futures::{Async, Future, Poll};\nuse futures::future::CatchUnwind;\nuse futures::task::{self, Spawn, Task, Unpark};\nuse job::{Job, JobRef};\nuse std::any::Any;\nuse std::panic::AssertUnwindSafe;\nuse std::mem;\nuse std::sync::Arc;\nuse std::sync::atomic::AtomicUsize;\nuse std::sync::atomic::Ordering::*;\nuse std::sync::Mutex;\nuse thread_pool::{Registry, WorkerThread};\nuse unwind;\n\nconst STATE_PARKED: usize = 0;\nconst STATE_UNPARKED: usize = 1;\nconst STATE_EXECUTING: usize = 2;\nconst STATE_EXECUTING_UNPARKED: usize = 3;\nconst STATE_COMPLETE: usize = 4;\n\n\/\/ Warning: Public end-user API.\npub struct RayonFuture<T, E> {\n inner: Arc<ScopeFutureTrait<Result<T, E>, Box<Any + Send + 'static>>>\n}\n\n\/\/\/ This is a free fn so that we can expose `RayonFuture` as public API.\npub unsafe fn new_rayon_future<F>(future: F,\n counter: *const CountLatch)\n -> RayonFuture<F::Item, F::Error>\n where F: Future + Send\n{\n let inner = ScopeFuture::spawn(future, counter);\n RayonFuture { inner: hide_lifetime(inner) }\n}\n\nunsafe fn hide_lifetime<'l, T, E>(x: Arc<ScopeFutureTrait<T, E> + 'l>)\n -> Arc<ScopeFutureTrait<T, E>> {\n mem::transmute(x)\n}\n\nimpl<T, E> Future for RayonFuture<T, E> {\n type Item = T;\n type Error = E;\n\n fn poll(&mut self) -> Poll<T, E> {\n match self.inner.poll() {\n Ok(Async::Ready(Ok(v))) => Ok(Async::Ready(v)),\n Ok(Async::Ready(Err(e))) => Err(e),\n Ok(Async::NotReady) => Ok(Async::NotReady),\n Err(e) => unwind::resume_unwinding(e),\n }\n }\n}\n\nimpl<T, E> Drop for RayonFuture<T, E> {\n fn drop(&mut self) {\n self.inner.cancel();\n }\n}\n\n\/\/\/ \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\nstruct ScopeFuture<F: Future + Send> {\n state: AtomicUsize,\n registry: Arc<Registry>,\n contents: Mutex<ScopeFutureContents<F>>,\n}\n\ntype CU<F> = CatchUnwind<AssertUnwindSafe<F>>;\n\nstruct ScopeFutureContents<F: Future + Send> {\n spawn: Option<Spawn<CU<F>>>,\n unpark: Option<Arc<Unpark>>,\n\n \/\/ Pointer to ourselves. We `None` this out when we are finished\n \/\/ executing, but it's convenient to keep around normally.\n this: Option<Arc<ScopeFuture<F>>>,\n\n \/\/ the counter in the scope; since the scope doesn't terminate until\n \/\/ counter reaches zero, and we hold a ref in this counter, we are\n \/\/ assured that this pointer remains valid\n counter: *const CountLatch,\n\n waiting_task: Option<Task>,\n result: Poll<<CU<F> as Future>::Item, <CU<F> as Future>::Error>,\n}\n\ntrait Ping: Send + Sync {\n fn ping(&self);\n}\n\n\/\/ Assert that the `*const` is safe to transmit between threads:\nunsafe impl<F: Future + Send> Send for ScopeFuture<F> {}\nunsafe impl<F: Future + Send> Sync for ScopeFuture<F> {}\n\nimpl<F: Future + Send> ScopeFuture<F> {\n \/\/ Unsafe: Caller asserts that `future` and `counter` will remain\n \/\/ valid until we invoke `counter.set()`.\n unsafe fn spawn(future: F,\n counter: *const CountLatch)\n -> Arc<Self> {\n let worker_thread = WorkerThread::current();\n debug_assert!(!worker_thread.is_null());\n\n \/\/ Using `AssertUnwindSafe` is valid here because (a) the data\n \/\/ is `Send + Sync`, which is our usual boundary and (b)\n \/\/ panics will be propagated when the `RayonFuture` is polled.\n let spawn = task::spawn(AssertUnwindSafe(future).catch_unwind());\n\n let future: Arc<Self> = Arc::new(ScopeFuture::<F> {\n state: AtomicUsize::new(STATE_PARKED),\n registry: (*worker_thread).registry().clone(),\n contents: Mutex::new(ScopeFutureContents {\n spawn: None,\n unpark: None,\n this: None,\n counter: counter,\n waiting_task: None,\n result: Ok(Async::NotReady),\n }),\n });\n\n \/\/ Make the two self-cycles. Note that these imply the future\n \/\/ cannot be freed until these fields are set to `None` (which\n \/\/ occurs when it is finished executing).\n {\n let mut contents = future.contents.try_lock().unwrap();\n contents.spawn = Some(spawn);\n contents.unpark = Some(Self::make_unpark(&future));\n contents.this = Some(future.clone());\n }\n\n future.ping();\n\n future\n }\n\n \/\/\/ Creates a `JobRef` from this job -- note that this hides all\n \/\/\/ lifetimes, so it is up to you to ensure that this JobRef\n \/\/\/ doesn't outlive any data that it closes over.\n unsafe fn into_job_ref(this: Arc<Self>) -> JobRef {\n let this: *const Self = mem::transmute(this);\n JobRef::new(this)\n }\n\n fn make_unpark(this: &Arc<Self>) -> Arc<Unpark> {\n \/\/ Hide any lifetimes in `self`. This is safe because, until\n \/\/ `self` is dropped, the counter is not decremented, and so\n \/\/ the `'scope` lifetimes cannot end.\n \/\/\n \/\/ Unfortunately, as `Unpark` currently requires `'static`, we\n \/\/ have to do an indirection and this ultimately requires a\n \/\/ fresh allocation.\n unsafe {\n let ping: PingUnpark = PingUnpark::new(this.clone());\n let ping: PingUnpark<'static> = mem::transmute(ping);\n Arc::new(ping)\n }\n }\n\n fn unpark_inherent(&self) {\n loop {\n let state = self.state.load(Acquire);\n if {\n state == STATE_PARKED &&\n self.state\n .compare_exchange_weak(state, STATE_UNPARKED, Release, Relaxed)\n .is_ok()\n } {\n \/\/ Contention here is unlikely but possible: a\n \/\/ previous execution might have moved us to the\n \/\/ PARKED state but not yet released the lock.\n let contents = self.contents.lock().unwrap();\n unsafe {\n let job_ref = Self::into_job_ref(contents.this.clone().unwrap());\n self.registry.inject(&[job_ref]);\n }\n return;\n } else if {\n state == STATE_EXECUTING &&\n self.state\n .compare_exchange_weak(state, STATE_EXECUTING_UNPARKED, Release, Relaxed)\n .is_ok()\n } {\n return;\n } else {\n debug_assert!(state == STATE_UNPARKED || state == STATE_EXECUTING_UNPARKED ||\n state == STATE_COMPLETE);\n return;\n }\n }\n }\n\n fn begin_execute_state(&self) {\n \/\/ When we are put into the unparked state, we are enqueued in\n \/\/ a worker thread. We should then be executed exactly once,\n \/\/ at which point we transiition to STATE_EXECUTING. Nobody\n \/\/ should be contending with us to change the state here.\n let state = self.state.load(Acquire);\n debug_assert_eq!(state, STATE_UNPARKED);\n let result = self.state.compare_exchange(state, STATE_EXECUTING, Release, Relaxed);\n debug_assert_eq!(result, Ok(STATE_UNPARKED));\n }\n\n fn end_execute_state(&self) -> bool {\n loop {\n let state = self.state.load(Acquire);\n if state == STATE_EXECUTING {\n if {\n self.state\n .compare_exchange_weak(state, STATE_PARKED, Release, Relaxed)\n .is_ok()\n } {\n \/\/ We put ourselves into parked state, no need to\n \/\/ re-execute. We'll just wait for the Unpark.\n return true;\n }\n } else {\n debug_assert_eq!(state, STATE_EXECUTING_UNPARKED);\n if {\n self.state\n .compare_exchange_weak(state, STATE_EXECUTING, Release, Relaxed)\n .is_ok()\n } {\n \/\/ We finished executing, but an unpark request\n \/\/ came in the meantime. We need to execute\n \/\/ again. Return false as we failed to end the\n \/\/ execution phase.\n return false;\n }\n }\n }\n }\n}\n\nimpl<F: Future + Send> Ping for ScopeFuture<F> {\n fn ping(&self) {\n self.unpark_inherent();\n }\n}\n\nimpl<F: Future + Send> Job for ScopeFuture<F> {\n unsafe fn execute(this: *const Self) {\n let this: Arc<Self> = mem::transmute(this);\n\n \/\/ *generally speaking* there should be no contention for the\n \/\/ lock, but it is possible -- we can end execution, get re-enqeueud,\n \/\/ and re-executed, before we have time to return from this fn\n let mut contents = this.contents.lock().unwrap();\n\n log!(FutureExecute { state: this.state.load(Relaxed) });\n\n this.begin_execute_state();\n loop {\n match contents.poll() {\n Ok(Async::Ready(v)) => {\n log!(FutureExecuteReady);\n return contents.complete(Ok(Async::Ready(v)));\n }\n Ok(Async::NotReady) => {\n log!(FutureExecuteNotReady);\n if this.end_execute_state() {\n return;\n }\n }\n Err(err) => {\n log!(FutureExecuteErr);\n return contents.complete(Err(err));\n }\n }\n }\n }\n}\n\nimpl<F: Future + Send> ScopeFutureContents<F> {\n fn poll(&mut self) -> Poll<<CU<F> as Future>::Item, <CU<F> as Future>::Error> {\n let unpark = self.unpark.clone().unwrap();\n self.spawn.as_mut().unwrap().poll_future(unpark)\n }\n\n fn complete(&mut self, value: Poll<<CU<F> as Future>::Item, <CU<F> as Future>::Error>) {\n log!(FutureComplete);\n\n \/\/ So, this is subtle. We know that the type `F` may have some\n \/\/ data which is only valid until the end of the scope, and we\n \/\/ also know that the scope doesn't end until `self.counter`\n \/\/ is decremented below. So we want to be sure to drop\n \/\/ `self.future` first, lest its dtor try to access some of\n \/\/ that state or something!\n self.spawn.take().unwrap();\n\n self.unpark = None;\n self.result = value;\n let this = self.this.take().unwrap();\n if cfg!(debug_assertions) {\n let state = this.state.load(Relaxed);\n debug_assert!(state == STATE_EXECUTING || state == STATE_EXECUTING_UNPARKED,\n \"cannot complete when not executing (state = {})\",\n state);\n }\n this.state.store(STATE_COMPLETE, Release);\n\n if let Some(waiting_task) = self.waiting_task.take() {\n log!(FutureUnparkWaitingTask);\n waiting_task.unpark();\n }\n\n \/\/ allow the enclosing scope to end\n unsafe {\n (*self.counter).set();\n }\n }\n}\n\nstruct PingUnpark<'l> {\n ping: Arc<Ping + 'l>,\n}\n\nimpl<'l> PingUnpark<'l> {\n fn new(ping: Arc<Ping + 'l>) -> PingUnpark {\n PingUnpark { ping: ping }\n }\n}\n\nimpl Unpark for PingUnpark<'static> {\n fn unpark(&self) {\n self.ping.ping()\n }\n}\n\npub trait ScopeFutureTrait<T, E>: Send + Sync {\n fn poll(&self) -> Poll<T, E>;\n fn cancel(&self);\n}\n\nimpl<F> ScopeFutureTrait<<CU<F> as Future>::Item, <CU<F> as Future>::Error> for ScopeFuture<F>\n where F: Future + Send\n{\n fn poll(&self) -> Poll<<CU<F> as Future>::Item, <CU<F> as Future>::Error> {\n \/\/ Important: due to transmute hackery, not all the fields are\n \/\/ truly known to be valid at this point. In particular, the\n \/\/ type F is erased. But the `state` and `result` fields\n \/\/ should be valid.\n let mut contents = self.contents.lock().unwrap();\n let state = self.state.load(Relaxed);\n if state == STATE_COMPLETE {\n let r = mem::replace(&mut contents.result, Ok(Async::NotReady));\n return r;\n } else {\n assert!(contents.waiting_task.is_none());\n log!(FutureInstallWaitingTask { state: state });\n contents.waiting_task = Some(task::park());\n Ok(Async::NotReady)\n }\n }\n\n fn cancel(&self) {\n loop {\n let state = self.state.load(Relaxed);\n if state == STATE_COMPLETE {\n \/\/ no need to do anything\n return;\n } else {\n log!(FutureCancel { state: state });\n let mut contents = self.contents.lock().unwrap();\n if self.state.compare_exchange_weak(state, STATE_COMPLETE, Release, Relaxed).is_ok() {\n contents.complete(Ok(Async::NotReady));\n return;\n }\n }\n }\n }\n}\n\n<commit_msg>cleanup the compare-exchange loops<commit_after>use latch::{CountLatch, Latch};\n#[allow(warnings)]\nuse log::Event::*;\nuse futures::{Async, Future, Poll};\nuse futures::future::CatchUnwind;\nuse futures::task::{self, Spawn, Task, Unpark};\nuse job::{Job, JobRef};\nuse std::any::Any;\nuse std::panic::AssertUnwindSafe;\nuse std::mem;\nuse std::sync::Arc;\nuse std::sync::atomic::AtomicUsize;\nuse std::sync::atomic::Ordering::*;\nuse std::sync::Mutex;\nuse thread_pool::{Registry, WorkerThread};\nuse unwind;\n\nconst STATE_PARKED: usize = 0;\nconst STATE_UNPARKED: usize = 1;\nconst STATE_EXECUTING: usize = 2;\nconst STATE_EXECUTING_UNPARKED: usize = 3;\nconst STATE_COMPLETE: usize = 4;\n\n\/\/ Warning: Public end-user API.\npub struct RayonFuture<T, E> {\n inner: Arc<ScopeFutureTrait<Result<T, E>, Box<Any + Send + 'static>>>,\n}\n\n\/\/\/ This is a free fn so that we can expose `RayonFuture` as public API.\npub unsafe fn new_rayon_future<F>(future: F,\n counter: *const CountLatch)\n -> RayonFuture<F::Item, F::Error>\n where F: Future + Send\n{\n let inner = ScopeFuture::spawn(future, counter);\n RayonFuture { inner: hide_lifetime(inner) }\n}\n\nunsafe fn hide_lifetime<'l, T, E>(x: Arc<ScopeFutureTrait<T, E> + 'l>)\n -> Arc<ScopeFutureTrait<T, E>> {\n mem::transmute(x)\n}\n\nimpl<T, E> Future for RayonFuture<T, E> {\n type Item = T;\n type Error = E;\n\n fn poll(&mut self) -> Poll<T, E> {\n match self.inner.poll() {\n Ok(Async::Ready(Ok(v))) => Ok(Async::Ready(v)),\n Ok(Async::Ready(Err(e))) => Err(e),\n Ok(Async::NotReady) => Ok(Async::NotReady),\n Err(e) => unwind::resume_unwinding(e),\n }\n }\n}\n\nimpl<T, E> Drop for RayonFuture<T, E> {\n fn drop(&mut self) {\n self.inner.cancel();\n }\n}\n\n\/\/\/ \/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\n\nstruct ScopeFuture<F: Future + Send> {\n state: AtomicUsize,\n registry: Arc<Registry>,\n contents: Mutex<ScopeFutureContents<F>>,\n}\n\ntype CU<F> = CatchUnwind<AssertUnwindSafe<F>>;\n\nstruct ScopeFutureContents<F: Future + Send> {\n spawn: Option<Spawn<CU<F>>>,\n unpark: Option<Arc<Unpark>>,\n\n \/\/ Pointer to ourselves. We `None` this out when we are finished\n \/\/ executing, but it's convenient to keep around normally.\n this: Option<Arc<ScopeFuture<F>>>,\n\n \/\/ the counter in the scope; since the scope doesn't terminate until\n \/\/ counter reaches zero, and we hold a ref in this counter, we are\n \/\/ assured that this pointer remains valid\n counter: *const CountLatch,\n\n waiting_task: Option<Task>,\n result: Poll<<CU<F> as Future>::Item, <CU<F> as Future>::Error>,\n}\n\ntrait Ping: Send + Sync {\n fn ping(&self);\n}\n\n\/\/ Assert that the `*const` is safe to transmit between threads:\nunsafe impl<F: Future + Send> Send for ScopeFuture<F> {}\nunsafe impl<F: Future + Send> Sync for ScopeFuture<F> {}\n\nimpl<F: Future + Send> ScopeFuture<F> {\n \/\/ Unsafe: Caller asserts that `future` and `counter` will remain\n \/\/ valid until we invoke `counter.set()`.\n unsafe fn spawn(future: F, counter: *const CountLatch) -> Arc<Self> {\n let worker_thread = WorkerThread::current();\n debug_assert!(!worker_thread.is_null());\n\n \/\/ Using `AssertUnwindSafe` is valid here because (a) the data\n \/\/ is `Send + Sync`, which is our usual boundary and (b)\n \/\/ panics will be propagated when the `RayonFuture` is polled.\n let spawn = task::spawn(AssertUnwindSafe(future).catch_unwind());\n\n let future: Arc<Self> = Arc::new(ScopeFuture::<F> {\n state: AtomicUsize::new(STATE_PARKED),\n registry: (*worker_thread).registry().clone(),\n contents: Mutex::new(ScopeFutureContents {\n spawn: None,\n unpark: None,\n this: None,\n counter: counter,\n waiting_task: None,\n result: Ok(Async::NotReady),\n }),\n });\n\n \/\/ Make the two self-cycles. Note that these imply the future\n \/\/ cannot be freed until these fields are set to `None` (which\n \/\/ occurs when it is finished executing).\n {\n let mut contents = future.contents.try_lock().unwrap();\n contents.spawn = Some(spawn);\n contents.unpark = Some(Self::make_unpark(&future));\n contents.this = Some(future.clone());\n }\n\n future.ping();\n\n future\n }\n\n \/\/\/ Creates a `JobRef` from this job -- note that this hides all\n \/\/\/ lifetimes, so it is up to you to ensure that this JobRef\n \/\/\/ doesn't outlive any data that it closes over.\n unsafe fn into_job_ref(this: Arc<Self>) -> JobRef {\n let this: *const Self = mem::transmute(this);\n JobRef::new(this)\n }\n\n fn make_unpark(this: &Arc<Self>) -> Arc<Unpark> {\n \/\/ Hide any lifetimes in `self`. This is safe because, until\n \/\/ `self` is dropped, the counter is not decremented, and so\n \/\/ the `'scope` lifetimes cannot end.\n \/\/\n \/\/ Unfortunately, as `Unpark` currently requires `'static`, we\n \/\/ have to do an indirection and this ultimately requires a\n \/\/ fresh allocation.\n unsafe {\n let ping: PingUnpark = PingUnpark::new(this.clone());\n let ping: PingUnpark<'static> = mem::transmute(ping);\n Arc::new(ping)\n }\n }\n\n fn unpark_inherent(&self) {\n loop {\n match self.state.load(Relaxed) {\n STATE_PARKED => {\n if {\n self.state\n .compare_exchange_weak(STATE_PARKED, STATE_UNPARKED, Release, Relaxed)\n .is_ok()\n } {\n \/\/ Contention here is unlikely but possible: a\n \/\/ previous execution might have moved us to the\n \/\/ PARKED state but not yet released the lock.\n let contents = self.contents.lock().unwrap();\n unsafe {\n let job_ref = Self::into_job_ref(contents.this.clone().unwrap());\n self.registry.inject(&[job_ref]);\n }\n return;\n }\n }\n\n STATE_EXECUTING => {\n if {\n self.state\n .compare_exchange_weak(STATE_EXECUTING,\n STATE_EXECUTING_UNPARKED,\n Release,\n Relaxed)\n .is_ok()\n } {\n return;\n }\n }\n\n state => {\n debug_assert!(state == STATE_UNPARKED || state == STATE_EXECUTING_UNPARKED ||\n state == STATE_COMPLETE);\n return;\n }\n }\n }\n }\n\n fn begin_execute_state(&self) {\n \/\/ When we are put into the unparked state, we are enqueued in\n \/\/ a worker thread. We should then be executed exactly once,\n \/\/ at which point we transiition to STATE_EXECUTING. Nobody\n \/\/ should be contending with us to change the state here.\n let state = self.state.load(Acquire);\n debug_assert_eq!(state, STATE_UNPARKED);\n let result = self.state.compare_exchange(state, STATE_EXECUTING, Release, Relaxed);\n debug_assert_eq!(result, Ok(STATE_UNPARKED));\n }\n\n fn end_execute_state(&self) -> bool {\n loop {\n match self.state.load(Relaxed) {\n STATE_EXECUTING => {\n if {\n self.state\n .compare_exchange_weak(STATE_EXECUTING, STATE_PARKED, Release, Relaxed)\n .is_ok()\n } {\n \/\/ We put ourselves into parked state, no need to\n \/\/ re-execute. We'll just wait for the Unpark.\n return true;\n }\n }\n\n state => {\n debug_assert_eq!(state, STATE_EXECUTING_UNPARKED);\n if {\n self.state\n .compare_exchange_weak(state, STATE_EXECUTING, Release, Relaxed)\n .is_ok()\n } {\n \/\/ We finished executing, but an unpark request\n \/\/ came in the meantime. We need to execute\n \/\/ again. Return false as we failed to end the\n \/\/ execution phase.\n return false;\n }\n }\n }\n }\n }\n}\n\nimpl<F: Future + Send> Ping for ScopeFuture<F> {\n fn ping(&self) {\n self.unpark_inherent();\n }\n}\n\nimpl<F: Future + Send> Job for ScopeFuture<F> {\n unsafe fn execute(this: *const Self) {\n let this: Arc<Self> = mem::transmute(this);\n\n \/\/ *generally speaking* there should be no contention for the\n \/\/ lock, but it is possible -- we can end execution, get re-enqeueud,\n \/\/ and re-executed, before we have time to return from this fn\n let mut contents = this.contents.lock().unwrap();\n\n log!(FutureExecute { state: this.state.load(Relaxed) });\n\n this.begin_execute_state();\n loop {\n match contents.poll() {\n Ok(Async::Ready(v)) => {\n log!(FutureExecuteReady);\n return contents.complete(Ok(Async::Ready(v)));\n }\n Ok(Async::NotReady) => {\n log!(FutureExecuteNotReady);\n if this.end_execute_state() {\n return;\n }\n }\n Err(err) => {\n log!(FutureExecuteErr);\n return contents.complete(Err(err));\n }\n }\n }\n }\n}\n\nimpl<F: Future + Send> ScopeFutureContents<F> {\n fn poll(&mut self) -> Poll<<CU<F> as Future>::Item, <CU<F> as Future>::Error> {\n let unpark = self.unpark.clone().unwrap();\n self.spawn.as_mut().unwrap().poll_future(unpark)\n }\n\n fn complete(&mut self, value: Poll<<CU<F> as Future>::Item, <CU<F> as Future>::Error>) {\n log!(FutureComplete);\n\n \/\/ So, this is subtle. We know that the type `F` may have some\n \/\/ data which is only valid until the end of the scope, and we\n \/\/ also know that the scope doesn't end until `self.counter`\n \/\/ is decremented below. So we want to be sure to drop\n \/\/ `self.future` first, lest its dtor try to access some of\n \/\/ that state or something!\n self.spawn.take().unwrap();\n\n self.unpark = None;\n self.result = value;\n let this = self.this.take().unwrap();\n if cfg!(debug_assertions) {\n let state = this.state.load(Relaxed);\n debug_assert!(state == STATE_EXECUTING || state == STATE_EXECUTING_UNPARKED,\n \"cannot complete when not executing (state = {})\",\n state);\n }\n this.state.store(STATE_COMPLETE, Release);\n\n if let Some(waiting_task) = self.waiting_task.take() {\n log!(FutureUnparkWaitingTask);\n waiting_task.unpark();\n }\n\n \/\/ allow the enclosing scope to end\n unsafe {\n (*self.counter).set();\n }\n }\n}\n\nstruct PingUnpark<'l> {\n ping: Arc<Ping + 'l>,\n}\n\nimpl<'l> PingUnpark<'l> {\n fn new(ping: Arc<Ping + 'l>) -> PingUnpark {\n PingUnpark { ping: ping }\n }\n}\n\nimpl Unpark for PingUnpark<'static> {\n fn unpark(&self) {\n self.ping.ping()\n }\n}\n\npub trait ScopeFutureTrait<T, E>: Send + Sync {\n fn poll(&self) -> Poll<T, E>;\n fn cancel(&self);\n}\n\nimpl<F> ScopeFutureTrait<<CU<F> as Future>::Item, <CU<F> as Future>::Error> for ScopeFuture<F>\n where F: Future + Send\n{\n fn poll(&self) -> Poll<<CU<F> as Future>::Item, <CU<F> as Future>::Error> {\n \/\/ Important: due to transmute hackery, not all the fields are\n \/\/ truly known to be valid at this point. In particular, the\n \/\/ type F is erased. But the `state` and `result` fields\n \/\/ should be valid.\n let mut contents = self.contents.lock().unwrap();\n let state = self.state.load(Relaxed);\n if state == STATE_COMPLETE {\n let r = mem::replace(&mut contents.result, Ok(Async::NotReady));\n return r;\n } else {\n assert!(contents.waiting_task.is_none());\n log!(FutureInstallWaitingTask { state: state });\n contents.waiting_task = Some(task::park());\n Ok(Async::NotReady)\n }\n }\n\n fn cancel(&self) {\n \/\/ Fast-path: check if this is already complete and return if\n \/\/ so. A relaxed load suffices since we are not going to\n \/\/ access any data as a result of this action.\n if self.state.load(Relaxed) == STATE_COMPLETE {\n return;\n }\n\n \/\/ Slow-path. Get the lock and everything.\n let mut contents = self.contents.lock().unwrap();\n loop {\n match self.state.load(Relaxed) {\n STATE_COMPLETE => {\n return;\n }\n\n state => {\n log!(FutureCancel { state: state });\n if {\n self.state\n .compare_exchange_weak(state, STATE_COMPLETE, Release, Relaxed)\n .is_ok()\n } {\n contents.complete(Ok(Async::NotReady));\n return;\n }\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright (c) 2018-2021, The rav1e contributors. All rights reserved\n\/\/\n\/\/ This source code is subject to the terms of the BSD 2 Clause License and\n\/\/ the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License\n\/\/ was not distributed with this source code in the LICENSE file, you can\n\/\/ obtain it at www.aomedia.org\/license\/software. If the Alliance for Open\n\/\/ Media Patent License 1.0 was not distributed with this source code in the\n\/\/ PATENTS file, you can obtain it at www.aomedia.org\/license\/patent.\n\nuse crate::context::*;\nuse crate::header::PRIMARY_REF_NONE;\nuse crate::partition::BlockSize;\nuse crate::rdo::spatiotemporal_scale;\nuse crate::rdo::DistortionScale;\nuse crate::tiling::TileStateMut;\nuse crate::util::Pixel;\nuse crate::FrameInvariants;\nuse crate::FrameState;\n\npub const MAX_SEGMENTS: usize = 8;\n\npub fn segmentation_optimize<T: Pixel>(\n fi: &FrameInvariants<T>, fs: &mut FrameState<T>,\n) {\n assert!(fi.enable_segmentation);\n fs.segmentation.enabled = true;\n\n if fs.segmentation.enabled {\n fs.segmentation.update_map = true;\n\n \/\/ We don't change the values between frames.\n fs.segmentation.update_data = fi.primary_ref_frame == PRIMARY_REF_NONE;\n\n \/\/ Avoid going into lossless mode by never bringing qidx below 1.\n \/\/ Because base_q_idx changes more frequently than the segmentation\n \/\/ data, it is still possible for a segment to enter lossless, so\n \/\/ enforcement elsewhere is needed.\n let offset_lower_limit = 1 - fi.base_q_idx as i16;\n\n if !fs.segmentation.update_data {\n let mut min_segment = MAX_SEGMENTS;\n for i in 0..MAX_SEGMENTS {\n if fs.segmentation.features[i][SegLvl::SEG_LVL_ALT_Q as usize]\n && fs.segmentation.data[i][SegLvl::SEG_LVL_ALT_Q as usize]\n >= offset_lower_limit\n {\n min_segment = i;\n break;\n }\n }\n assert_ne!(min_segment, MAX_SEGMENTS);\n fs.segmentation.min_segment = min_segment as u8;\n fs.segmentation.update_threshold(fi.base_q_idx, fi.config.bit_depth);\n return;\n }\n\n segmentation_optimize_inner(fi, fs, offset_lower_limit);\n\n \/* Figure out parameters *\/\n fs.segmentation.preskip = false;\n fs.segmentation.last_active_segid = 0;\n for i in 0..MAX_SEGMENTS {\n for j in 0..SegLvl::SEG_LVL_MAX as usize {\n if fs.segmentation.features[i][j] {\n fs.segmentation.last_active_segid = i as u8;\n if j >= SegLvl::SEG_LVL_REF_FRAME as usize {\n fs.segmentation.preskip = true;\n }\n }\n }\n }\n }\n}\n\n\/\/ Select target quantizers for each segment by fitting to log(scale).\nfn segmentation_optimize_inner<T: Pixel>(\n fi: &FrameInvariants<T>, fs: &mut FrameState<T>, offset_lower_limit: i16,\n) {\n use crate::quantize::{ac_q, select_ac_qi};\n use crate::util::kmeans;\n use arrayvec::ArrayVec;\n\n \/\/ Minimize the total distance from a small set of values to all scales.\n \/\/ Find k-means of log(spatiotemporal scale), k in 3..=8\n let c: ([_; 8], [_; 7], [_; 6], [_; 5], [_; 4], [_; 3]) = {\n let spatiotemporal_scores =\n &fi.coded_frame_data.as_ref().unwrap().spatiotemporal_scores;\n let mut log2_scale_q11 = Vec::with_capacity(spatiotemporal_scores.len());\n log2_scale_q11.extend(spatiotemporal_scores.iter().map(|&s| s.blog16()));\n log2_scale_q11.sort_unstable();\n let l = &log2_scale_q11;\n (kmeans(l), kmeans(l), kmeans(l), kmeans(l), kmeans(l), kmeans(l))\n };\n\n \/\/ Find variance in spacing between successive log(scale)\n let var = |c: &[i16]| {\n let delta = ArrayVec::<_, MAX_SEGMENTS>::from_iter(\n c.iter().skip(1).zip(c).map(|(&a, &b)| b as i64 - a as i64),\n );\n let mean = delta.iter().sum::<i64>() \/ delta.len() as i64;\n delta.iter().map(|&d| (d - mean).pow(2)).sum::<i64>() as u64\n };\n let variance =\n [var(&c.0), var(&c.1), var(&c.2), var(&c.3), var(&c.4), var(&c.5)];\n\n \/\/ Choose the k value with minimal variance in spacing\n let min_variance = *variance.iter().min().unwrap();\n let position = variance.iter().rposition(|&v| v == min_variance).unwrap();\n\n \/\/ For the selected centroids, derive a target quantizer:\n \/\/ scale Q'^2 = Q^2\n \/\/ See `distortion_scale_for` for more information.\n let compute_delta = |centroids: &[i16]| {\n use crate::util::{bexp64, blog64};\n let log2_base_ac_q_q57 =\n blog64(ac_q(fi.base_q_idx, 0, fi.config.bit_depth).into());\n centroids\n .iter()\n .rev()\n \/\/ Rewrite in log form and exponentiate:\n \/\/ scale Q'^2 = Q^2\n \/\/ Q' = Q \/ sqrt(scale)\n \/\/ log(Q') = log(Q) - 0.5 log(scale)\n .map(|&log2_scale_q11| {\n bexp64(log2_base_ac_q_q57 - ((log2_scale_q11 as i64) << (57 - 11 - 1)))\n })\n \/\/ Find the index of the nearest quantizer to the target,\n \/\/ and take the delta from the base quantizer index.\n .map(|q| {\n \/\/ Avoid going into lossless mode by never bringing qidx below 1.\n select_ac_qi(q, fi.config.bit_depth).max(1) as i16\n - fi.base_q_idx as i16\n })\n .collect::<ArrayVec<_, MAX_SEGMENTS>>()\n };\n\n \/\/ Compute segment deltas for best value of k\n let seg_delta = match position {\n 0 => compute_delta(&c.0),\n 1 => compute_delta(&c.1),\n 2 => compute_delta(&c.2),\n 3 => compute_delta(&c.3),\n 4 => compute_delta(&c.4),\n _ => compute_delta(&c.5),\n };\n\n \/\/ Update the segmentation data\n fs.segmentation.max_segment = seg_delta.len() as u8 - 1;\n for (&delta, (features, data)) in seg_delta\n .iter()\n .zip(fs.segmentation.features.iter_mut().zip(&mut fs.segmentation.data))\n {\n features[SegLvl::SEG_LVL_ALT_Q as usize] = true;\n data[SegLvl::SEG_LVL_ALT_Q as usize] = delta.max(offset_lower_limit);\n }\n\n fs.segmentation.update_threshold(fi.base_q_idx, fi.config.bit_depth);\n}\n\npub fn select_segment<T: Pixel>(\n fi: &FrameInvariants<T>, ts: &TileStateMut<'_, T>, tile_bo: TileBlockOffset,\n bsize: BlockSize, skip: bool,\n) -> std::ops::RangeInclusive<u8> {\n \/\/ If skip is true or segmentation is turned off, sidx is not coded.\n if skip || !fi.enable_segmentation {\n return 0..=0;\n }\n\n use crate::api::SegmentationLevel;\n if fi.config.speed_settings.segmentation == SegmentationLevel::Full {\n return ts.segmentation.min_segment..=ts.segmentation.max_segment;\n }\n\n let frame_bo = ts.to_frame_block_offset(tile_bo);\n let scale = spatiotemporal_scale(fi, frame_bo, bsize);\n\n let sidx = segment_idx_from_distortion(&ts.segmentation.threshold, scale);\n\n \/\/ Avoid going into lossless mode by never bringing qidx below 1.\n let sidx = sidx.max(ts.segmentation.min_segment);\n\n sidx..=sidx\n}\n\nfn segment_idx_from_distortion(\n threshold: &[DistortionScale; MAX_SEGMENTS - 1], s: DistortionScale,\n) -> u8 {\n threshold.partition_point(|&t| s.0 < t.0) as u8\n}\n<commit_msg>segmentation: Reset min_segment on data update<commit_after>\/\/ Copyright (c) 2018-2021, The rav1e contributors. All rights reserved\n\/\/\n\/\/ This source code is subject to the terms of the BSD 2 Clause License and\n\/\/ the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License\n\/\/ was not distributed with this source code in the LICENSE file, you can\n\/\/ obtain it at www.aomedia.org\/license\/software. If the Alliance for Open\n\/\/ Media Patent License 1.0 was not distributed with this source code in the\n\/\/ PATENTS file, you can obtain it at www.aomedia.org\/license\/patent.\n\nuse crate::context::*;\nuse crate::header::PRIMARY_REF_NONE;\nuse crate::partition::BlockSize;\nuse crate::rdo::spatiotemporal_scale;\nuse crate::rdo::DistortionScale;\nuse crate::tiling::TileStateMut;\nuse crate::util::Pixel;\nuse crate::FrameInvariants;\nuse crate::FrameState;\n\npub const MAX_SEGMENTS: usize = 8;\n\npub fn segmentation_optimize<T: Pixel>(\n fi: &FrameInvariants<T>, fs: &mut FrameState<T>,\n) {\n assert!(fi.enable_segmentation);\n fs.segmentation.enabled = true;\n\n if fs.segmentation.enabled {\n fs.segmentation.update_map = true;\n\n \/\/ We don't change the values between frames.\n fs.segmentation.update_data = fi.primary_ref_frame == PRIMARY_REF_NONE;\n\n \/\/ Avoid going into lossless mode by never bringing qidx below 1.\n \/\/ Because base_q_idx changes more frequently than the segmentation\n \/\/ data, it is still possible for a segment to enter lossless, so\n \/\/ enforcement elsewhere is needed.\n let offset_lower_limit = 1 - fi.base_q_idx as i16;\n\n if !fs.segmentation.update_data {\n let mut min_segment = MAX_SEGMENTS;\n for i in 0..MAX_SEGMENTS {\n if fs.segmentation.features[i][SegLvl::SEG_LVL_ALT_Q as usize]\n && fs.segmentation.data[i][SegLvl::SEG_LVL_ALT_Q as usize]\n >= offset_lower_limit\n {\n min_segment = i;\n break;\n }\n }\n assert_ne!(min_segment, MAX_SEGMENTS);\n fs.segmentation.min_segment = min_segment as u8;\n fs.segmentation.update_threshold(fi.base_q_idx, fi.config.bit_depth);\n return;\n }\n\n segmentation_optimize_inner(fi, fs, offset_lower_limit);\n\n \/* Figure out parameters *\/\n fs.segmentation.preskip = false;\n fs.segmentation.last_active_segid = 0;\n for i in 0..MAX_SEGMENTS {\n for j in 0..SegLvl::SEG_LVL_MAX as usize {\n if fs.segmentation.features[i][j] {\n fs.segmentation.last_active_segid = i as u8;\n if j >= SegLvl::SEG_LVL_REF_FRAME as usize {\n fs.segmentation.preskip = true;\n }\n }\n }\n }\n }\n}\n\n\/\/ Select target quantizers for each segment by fitting to log(scale).\nfn segmentation_optimize_inner<T: Pixel>(\n fi: &FrameInvariants<T>, fs: &mut FrameState<T>, offset_lower_limit: i16,\n) {\n use crate::quantize::{ac_q, select_ac_qi};\n use crate::util::kmeans;\n use arrayvec::ArrayVec;\n\n \/\/ Minimize the total distance from a small set of values to all scales.\n \/\/ Find k-means of log(spatiotemporal scale), k in 3..=8\n let c: ([_; 8], [_; 7], [_; 6], [_; 5], [_; 4], [_; 3]) = {\n let spatiotemporal_scores =\n &fi.coded_frame_data.as_ref().unwrap().spatiotemporal_scores;\n let mut log2_scale_q11 = Vec::with_capacity(spatiotemporal_scores.len());\n log2_scale_q11.extend(spatiotemporal_scores.iter().map(|&s| s.blog16()));\n log2_scale_q11.sort_unstable();\n let l = &log2_scale_q11;\n (kmeans(l), kmeans(l), kmeans(l), kmeans(l), kmeans(l), kmeans(l))\n };\n\n \/\/ Find variance in spacing between successive log(scale)\n let var = |c: &[i16]| {\n let delta = ArrayVec::<_, MAX_SEGMENTS>::from_iter(\n c.iter().skip(1).zip(c).map(|(&a, &b)| b as i64 - a as i64),\n );\n let mean = delta.iter().sum::<i64>() \/ delta.len() as i64;\n delta.iter().map(|&d| (d - mean).pow(2)).sum::<i64>() as u64\n };\n let variance =\n [var(&c.0), var(&c.1), var(&c.2), var(&c.3), var(&c.4), var(&c.5)];\n\n \/\/ Choose the k value with minimal variance in spacing\n let min_variance = *variance.iter().min().unwrap();\n let position = variance.iter().rposition(|&v| v == min_variance).unwrap();\n\n \/\/ For the selected centroids, derive a target quantizer:\n \/\/ scale Q'^2 = Q^2\n \/\/ See `distortion_scale_for` for more information.\n let compute_delta = |centroids: &[i16]| {\n use crate::util::{bexp64, blog64};\n let log2_base_ac_q_q57 =\n blog64(ac_q(fi.base_q_idx, 0, fi.config.bit_depth).into());\n centroids\n .iter()\n .rev()\n \/\/ Rewrite in log form and exponentiate:\n \/\/ scale Q'^2 = Q^2\n \/\/ Q' = Q \/ sqrt(scale)\n \/\/ log(Q') = log(Q) - 0.5 log(scale)\n .map(|&log2_scale_q11| {\n bexp64(log2_base_ac_q_q57 - ((log2_scale_q11 as i64) << (57 - 11 - 1)))\n })\n \/\/ Find the index of the nearest quantizer to the target,\n \/\/ and take the delta from the base quantizer index.\n .map(|q| {\n \/\/ Avoid going into lossless mode by never bringing qidx below 1.\n select_ac_qi(q, fi.config.bit_depth).max(1) as i16\n - fi.base_q_idx as i16\n })\n .collect::<ArrayVec<_, MAX_SEGMENTS>>()\n };\n\n \/\/ Compute segment deltas for best value of k\n let seg_delta = match position {\n 0 => compute_delta(&c.0),\n 1 => compute_delta(&c.1),\n 2 => compute_delta(&c.2),\n 3 => compute_delta(&c.3),\n 4 => compute_delta(&c.4),\n _ => compute_delta(&c.5),\n };\n\n \/\/ Update the segmentation data\n fs.segmentation.min_segment = 0;\n fs.segmentation.max_segment = seg_delta.len() as u8 - 1;\n for (&delta, (features, data)) in seg_delta\n .iter()\n .zip(fs.segmentation.features.iter_mut().zip(&mut fs.segmentation.data))\n {\n features[SegLvl::SEG_LVL_ALT_Q as usize] = true;\n data[SegLvl::SEG_LVL_ALT_Q as usize] = delta.max(offset_lower_limit);\n }\n\n fs.segmentation.update_threshold(fi.base_q_idx, fi.config.bit_depth);\n}\n\npub fn select_segment<T: Pixel>(\n fi: &FrameInvariants<T>, ts: &TileStateMut<'_, T>, tile_bo: TileBlockOffset,\n bsize: BlockSize, skip: bool,\n) -> std::ops::RangeInclusive<u8> {\n \/\/ If skip is true or segmentation is turned off, sidx is not coded.\n if skip || !fi.enable_segmentation {\n return 0..=0;\n }\n\n use crate::api::SegmentationLevel;\n if fi.config.speed_settings.segmentation == SegmentationLevel::Full {\n return ts.segmentation.min_segment..=ts.segmentation.max_segment;\n }\n\n let frame_bo = ts.to_frame_block_offset(tile_bo);\n let scale = spatiotemporal_scale(fi, frame_bo, bsize);\n\n let sidx = segment_idx_from_distortion(&ts.segmentation.threshold, scale);\n\n \/\/ Avoid going into lossless mode by never bringing qidx below 1.\n let sidx = sidx.max(ts.segmentation.min_segment);\n\n sidx..=sidx\n}\n\nfn segment_idx_from_distortion(\n threshold: &[DistortionScale; MAX_SEGMENTS - 1], s: DistortionScale,\n) -> u8 {\n threshold.partition_point(|&t| s.0 < t.0) as u8\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/! Utilities and shortcuts for working with IRC servers.\n#![experimental]\n\nuse std::io::IoResult;\nuse data::{Command, Config, User};\nuse data::command::Command::{INVITE, JOIN, KILL, MODE, NICK, KICK};\nuse data::command::Command::{OPER, PONG, PRIVMSG, SAMODE, SANICK, TOPIC, USER};\nuse data::kinds::IrcStream;\nuse server::{Server, ServerIterator};\n\n\/\/\/ Functionality-providing wrapper for Server.\n#[experimental]\npub struct Wrapper<'a, T> where T: IrcStream {\n server: &'a Server<'a, T> + 'a\n}\n\nimpl<'a, T> Server<'a, T> for Wrapper<'a, T> where T: IrcStream {\n fn config(&self) -> &Config {\n self.server.config()\n }\n\n fn send(&self, command: Command) -> IoResult<()> {\n self.server.send(command)\n }\n\n fn iter(&'a self) -> ServerIterator<'a, T> {\n self.server.iter()\n }\n\n fn list_users(&self, chan: &str) -> Option<Vec<User>> {\n self.server.list_users(chan)\n }\n}\n\nimpl<'a, T> Wrapper<'a, T> where T: IrcStream {\n \/\/\/ Creates a new Wrapper from the given Server.\n #[experimental]\n pub fn new(server: &'a Server<'a, T>) -> Wrapper<'a, T> {\n Wrapper { server: server }\n }\n\n \/\/\/ Sends a NICK and USER to identify.\n #[experimental]\n pub fn identify(&self) -> IoResult<()> {\n try!(self.server.send(NICK(self.server.config().nickname[])));\n self.server.send(USER(self.server.config().username[], \"0\",\n self.server.config().realname[]))\n }\n\n \/\/\/ Sends a PONG with the specified message.\n #[experimental]\n pub fn send_pong(&self, msg: &str) -> IoResult<()> {\n self.server.send(PONG(msg, None))\n }\n\n \/\/\/ Joins the specified channel or chanlist.\n #[experimental]\n pub fn send_join(&self, chanlist: &str) -> IoResult<()> {\n self.server.send(JOIN(chanlist, None))\n }\n\n \/\/\/ Attempts to oper up using the specified username and password.\n #[experimental]\n pub fn send_oper(&self, username: &str, password: &str) -> IoResult<()> {\n self.server.send(OPER(username, password))\n }\n\n \/\/\/ Sends a message to the specified target.\n #[experimental]\n pub fn send_privmsg(&self, target: &str, message: &str) -> IoResult<()> {\n for line in message.split_str(\"\\r\\n\") {\n try!(self.server.send(PRIVMSG(target, line)))\n }\n Ok(())\n }\n\n \/\/\/ Sets the topic of a channel or requests the current one.\n \/\/\/ If `topic` is an empty string, it won't be included in the message.\n #[experimental]\n pub fn send_topic(&self, channel: &str, topic: &str) -> IoResult<()> {\n self.server.send(TOPIC(channel, if topic.len() == 0 {\n None\n } else {\n Some(topic)\n }))\n }\n\n \/\/\/ Kills the target with the provided message.\n #[experimental]\n pub fn send_kill(&self, target: &str, message: &str) -> IoResult<()> {\n self.server.send(KILL(target, message))\n }\n\n \/\/\/ Kicks the listed nicknames from the listed channels with a comment.\n \/\/\/ If `message` is an empty string, it won't be included in the message.\n #[experimental]\n pub fn send_kick(&self, chanlist: &str, nicklist: &str, message: &str) -> IoResult<()> {\n self.server.send(KICK(chanlist, nicklist, if message.len() == 0 {\n None\n } else {\n Some(message)\n }))\n }\n\n \/\/\/ Changes the mode of the target.\n \/\/\/ If `modeparmas` is an empty string, it won't be included in the message.\n #[experimental]\n pub fn send_mode(&self, target: &str, mode: &str, modeparams: &str) -> IoResult<()> {\n self.server.send(MODE(target, mode, if modeparams.len() == 0 {\n None\n } else {\n Some(modeparams)\n }))\n }\n\n \/\/\/ Changes the mode of the target by force.\n \/\/\/ If `modeparams` is an empty string, it won't be included in the message.\n #[experimental]\n pub fn send_samode(&self, target: &str, mode: &str, modeparams: &str) -> IoResult<()> {\n self.server.send(SAMODE(target, mode, if modeparams.len() == 0 {\n None\n } else {\n Some(modeparams)\n }))\n }\n\n \/\/\/ Forces a user to change from the old nickname to the new nickname.\n #[experimental]\n pub fn send_sanick(&self, old_nick: &str, new_nick: &str) -> IoResult<()> {\n self.server.send(SANICK(old_nick, new_nick))\n }\n\n \/\/\/ Invites a user to the specified channel.\n #[experimental]\n pub fn send_invite(&self, nick: &str, chan: &str) -> IoResult<()> {\n self.server.send(INVITE(nick, chan))\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::Wrapper;\n use std::io::MemWriter;\n use std::io::util::NullReader;\n use conn::{Connection, IoStream};\n use server::IrcServer;\n use server::test::{get_server_value, test_config};\n\n #[test]\n fn identify() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.identify().unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"NICK :test\\r\\nUSER test 0 * :test\\r\\n\");\n }\n\n #[test]\n fn send_pong() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_pong(\"irc.test.net\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"PONG :irc.test.net\\r\\n\");\n }\n\n #[test]\n fn send_join() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_join(\"#test,#test2,#test3\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"JOIN #test,#test2,#test3\\r\\n\");\n }\n\n #[test]\n fn send_oper() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_oper(\"test\", \"test\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"OPER test :test\\r\\n\");\n }\n\n #[test]\n fn send_privmsg() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_privmsg(\"#test\", \"Hi, everybody!\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"PRIVMSG #test :Hi, everybody!\\r\\n\");\n }\n\n #[test]\n fn send_topic_no_topic() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_topic(\"#test\", \"\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"TOPIC #test\\r\\n\");\n }\n\n #[test]\n fn send_topic() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_topic(\"#test\", \"Testing stuff.\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"TOPIC #test :Testing stuff.\\r\\n\");\n }\n\n #[test]\n fn send_kill() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_kill(\"test\", \"Testing kills.\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"KILL test :Testing kills.\\r\\n\");\n }\n\n #[test]\n fn send_kick_no_message() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_kick(\"#test\", \"test\", \"\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"KICK #test test\\r\\n\");\n }\n\n #[test]\n fn send_kick() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_kick(\"#test\", \"test\", \"Testing kicks.\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"KICK #test test :Testing kicks.\\r\\n\");\n }\n\n #[test]\n fn send_mode_no_modeparams() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_mode(\"#test\", \"+i\", \"\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"MODE #test +i\\r\\n\");\n }\n\n #[test]\n fn send_mode() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_mode(\"#test\", \"+o\", \"test\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"MODE #test +o test\\r\\n\");\n }\n\n #[test]\n fn send_samode_no_modeparams() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_samode(\"#test\", \"+i\", \"\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"SAMODE #test +i\\r\\n\");\n }\n\n #[test]\n fn send_samode() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_samode(\"#test\", \"+o\", \"test\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"SAMODE #test +o test\\r\\n\");\n }\n\n #[test]\n fn send_sanick() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_sanick(\"test\", \"test2\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"SANICK test test2\\r\\n\");\n }\n\n #[test]\n fn send_invite() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_invite(\"test\", \"#test\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"INVITE test #test\\r\\n\");\n }\n}\n<commit_msg>Added send_notice(...) to Wrappers.<commit_after>\/\/! Utilities and shortcuts for working with IRC servers.\n#![experimental]\n\nuse std::io::IoResult;\nuse data::{Command, Config, User};\nuse data::command::Command::{INVITE, JOIN, KILL, MODE, NICK, NOTICE, KICK};\nuse data::command::Command::{OPER, PONG, PRIVMSG, SAMODE, SANICK, TOPIC, USER};\nuse data::kinds::IrcStream;\nuse server::{Server, ServerIterator};\n\n\/\/\/ Functionality-providing wrapper for Server.\n#[experimental]\npub struct Wrapper<'a, T> where T: IrcStream {\n server: &'a Server<'a, T> + 'a\n}\n\nimpl<'a, T> Server<'a, T> for Wrapper<'a, T> where T: IrcStream {\n fn config(&self) -> &Config {\n self.server.config()\n }\n\n fn send(&self, command: Command) -> IoResult<()> {\n self.server.send(command)\n }\n\n fn iter(&'a self) -> ServerIterator<'a, T> {\n self.server.iter()\n }\n\n fn list_users(&self, chan: &str) -> Option<Vec<User>> {\n self.server.list_users(chan)\n }\n}\n\nimpl<'a, T> Wrapper<'a, T> where T: IrcStream {\n \/\/\/ Creates a new Wrapper from the given Server.\n #[experimental]\n pub fn new(server: &'a Server<'a, T>) -> Wrapper<'a, T> {\n Wrapper { server: server }\n }\n\n \/\/\/ Sends a NICK and USER to identify.\n #[experimental]\n pub fn identify(&self) -> IoResult<()> {\n try!(self.server.send(NICK(self.server.config().nickname[])));\n self.server.send(USER(self.server.config().username[], \"0\",\n self.server.config().realname[]))\n }\n\n \/\/\/ Sends a PONG with the specified message.\n #[experimental]\n pub fn send_pong(&self, msg: &str) -> IoResult<()> {\n self.server.send(PONG(msg, None))\n }\n\n \/\/\/ Joins the specified channel or chanlist.\n #[experimental]\n pub fn send_join(&self, chanlist: &str) -> IoResult<()> {\n self.server.send(JOIN(chanlist, None))\n }\n\n \/\/\/ Attempts to oper up using the specified username and password.\n #[experimental]\n pub fn send_oper(&self, username: &str, password: &str) -> IoResult<()> {\n self.server.send(OPER(username, password))\n }\n\n \/\/\/ Sends a message to the specified target.\n #[experimental]\n pub fn send_privmsg(&self, target: &str, message: &str) -> IoResult<()> {\n for line in message.split_str(\"\\r\\n\") {\n try!(self.server.send(PRIVMSG(target, line)))\n }\n Ok(())\n }\n\n \/\/\/ Sends a notice to the specified target.\n #[experimental]\n pub fn send_notice(&self, target: &str, message: &str) -> IoResult<()> {\n for line in message.split_str(\"\\r\\n\") {\n try!(self.server.send(NOTICE(target, line)))\n }\n Ok(())\n }\n\n \/\/\/ Sets the topic of a channel or requests the current one.\n \/\/\/ If `topic` is an empty string, it won't be included in the message.\n #[experimental]\n pub fn send_topic(&self, channel: &str, topic: &str) -> IoResult<()> {\n self.server.send(TOPIC(channel, if topic.len() == 0 {\n None\n } else {\n Some(topic)\n }))\n }\n\n \/\/\/ Kills the target with the provided message.\n #[experimental]\n pub fn send_kill(&self, target: &str, message: &str) -> IoResult<()> {\n self.server.send(KILL(target, message))\n }\n\n \/\/\/ Kicks the listed nicknames from the listed channels with a comment.\n \/\/\/ If `message` is an empty string, it won't be included in the message.\n #[experimental]\n pub fn send_kick(&self, chanlist: &str, nicklist: &str, message: &str) -> IoResult<()> {\n self.server.send(KICK(chanlist, nicklist, if message.len() == 0 {\n None\n } else {\n Some(message)\n }))\n }\n\n \/\/\/ Changes the mode of the target.\n \/\/\/ If `modeparmas` is an empty string, it won't be included in the message.\n #[experimental]\n pub fn send_mode(&self, target: &str, mode: &str, modeparams: &str) -> IoResult<()> {\n self.server.send(MODE(target, mode, if modeparams.len() == 0 {\n None\n } else {\n Some(modeparams)\n }))\n }\n\n \/\/\/ Changes the mode of the target by force.\n \/\/\/ If `modeparams` is an empty string, it won't be included in the message.\n #[experimental]\n pub fn send_samode(&self, target: &str, mode: &str, modeparams: &str) -> IoResult<()> {\n self.server.send(SAMODE(target, mode, if modeparams.len() == 0 {\n None\n } else {\n Some(modeparams)\n }))\n }\n\n \/\/\/ Forces a user to change from the old nickname to the new nickname.\n #[experimental]\n pub fn send_sanick(&self, old_nick: &str, new_nick: &str) -> IoResult<()> {\n self.server.send(SANICK(old_nick, new_nick))\n }\n\n \/\/\/ Invites a user to the specified channel.\n #[experimental]\n pub fn send_invite(&self, nick: &str, chan: &str) -> IoResult<()> {\n self.server.send(INVITE(nick, chan))\n }\n}\n\n#[cfg(test)]\nmod test {\n use super::Wrapper;\n use std::io::MemWriter;\n use std::io::util::NullReader;\n use conn::{Connection, IoStream};\n use server::IrcServer;\n use server::test::{get_server_value, test_config};\n\n #[test]\n fn identify() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.identify().unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"NICK :test\\r\\nUSER test 0 * :test\\r\\n\");\n }\n\n #[test]\n fn send_pong() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_pong(\"irc.test.net\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"PONG :irc.test.net\\r\\n\");\n }\n\n #[test]\n fn send_join() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_join(\"#test,#test2,#test3\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"JOIN #test,#test2,#test3\\r\\n\");\n }\n\n #[test]\n fn send_oper() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_oper(\"test\", \"test\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"OPER test :test\\r\\n\");\n }\n\n #[test]\n fn send_privmsg() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_privmsg(\"#test\", \"Hi, everybody!\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"PRIVMSG #test :Hi, everybody!\\r\\n\");\n }\n\n #[test]\n fn send_notice() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_notice(\"#test\", \"Hi, everybody!\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"NOTICE #test :Hi, everybody!\\r\\n\");\n }\n\n #[test]\n fn send_topic_no_topic() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_topic(\"#test\", \"\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"TOPIC #test\\r\\n\");\n }\n\n #[test]\n fn send_topic() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_topic(\"#test\", \"Testing stuff.\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"TOPIC #test :Testing stuff.\\r\\n\");\n }\n\n #[test]\n fn send_kill() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_kill(\"test\", \"Testing kills.\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"KILL test :Testing kills.\\r\\n\");\n }\n\n #[test]\n fn send_kick_no_message() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_kick(\"#test\", \"test\", \"\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"KICK #test test\\r\\n\");\n }\n\n #[test]\n fn send_kick() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_kick(\"#test\", \"test\", \"Testing kicks.\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"KICK #test test :Testing kicks.\\r\\n\");\n }\n\n #[test]\n fn send_mode_no_modeparams() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_mode(\"#test\", \"+i\", \"\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"MODE #test +i\\r\\n\");\n }\n\n #[test]\n fn send_mode() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_mode(\"#test\", \"+o\", \"test\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"MODE #test +o test\\r\\n\");\n }\n\n #[test]\n fn send_samode_no_modeparams() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_samode(\"#test\", \"+i\", \"\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"SAMODE #test +i\\r\\n\");\n }\n\n #[test]\n fn send_samode() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_samode(\"#test\", \"+o\", \"test\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"SAMODE #test +o test\\r\\n\");\n }\n\n #[test]\n fn send_sanick() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_sanick(\"test\", \"test2\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"SANICK test test2\\r\\n\");\n }\n\n #[test]\n fn send_invite() {\n let server = IrcServer::from_connection(test_config(),\n Connection::new(IoStream::new(MemWriter::new(), NullReader)));\n {\n let wrapper = Wrapper::new(&server);\n wrapper.send_invite(\"test\", \"#test\").unwrap();\n }\n assert_eq!(get_server_value(server)[],\n \"INVITE test #test\\r\\n\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#[cfg(target_os = \"linux\")]\nmod platform {\n fn runmain(f: fn()) {\n f()\n }\n}\n\n#[cfg(target_os = \"macos\")]\nmod platform {\n use cocoa;\n import cocoa::base::*;\n\n mod NSApplication {\n fn sharedApplication() -> id {\n let klass = str::as_c_str(\"NSApplication\") { |s|\n objc::objc_getClass(s)\n };\n\n let sel = str::as_c_str(\"sharedApplication\") { |s|\n objc::sel_registerName(s)\n };\n\n let nsapp = objc::objc_msgSend(klass, sel);\n #debug(\"nsapp: %d\", (nsapp as int));\n\n\t ret nsapp;\n }\n }\n\n mod NSAutoreleasePool {\n fn alloc() -> id {\n let klass = str::as_c_str(\"NSAutoreleasePool\") { |s|\n objc::objc_getClass(s)\n };\n let sel = str::as_c_str(\"alloc\") { |s|\n objc::sel_registerName(s)\n };\n let pool = objc::objc_msgSend(klass, sel);\n #debug(\"autorelease pool: %?\", pool);\n ret pool;\n }\n fn init(pool: id) {\n let sel = str::as_c_str(\"init\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(pool, sel);\n }\n fn release(pool: id) {\n let sel = str::as_c_str(\"release\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(pool, sel);\n }\n }\n\n mod NSApp {\n fn setDelegate(nsapp: id, main: id) {\n\t #debug(\"NSApp::setDelegate\");\n\t let sel = str::as_c_str(\"setDelegate:\") { |s|\n\t objc::sel_registerName(s)\n\t };\n\t cocoa::msgSend1Id(nsapp, sel, main);\n }\n\n fn run(nsapp: id) {\n\t #debug(\"NSApp::run\");\n let sel = str::as_c_str(\"run\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(nsapp, sel);\n }\n }\n\n mod MainObj {\n crust fn applicationDidFinishLaunching(this: id, _sel: SEL) {\n\t #debug(\"applicationDidFinishLaunching\");\n\n\t let fptr: *fn() = ptr::null();\n\t str::as_c_str(\"fptr\") { |name|\n\t let outValue = unsafe { unsafe::reinterpret_cast(ptr::addr_of(fptr)) };\n #debug(\"*fptr %?\", outValue);\n objc::object_getInstanceVariable(this, name, outValue)\n };\n\n\t #debug(\"getting osmain fptr: %?\", fptr);\n\n\t unsafe {\n\t \/\/ FIXME: We probably don't want to run the main routine in a crust function\n (*fptr)();\n }\n\t }\n\n \t fn create(f: fn()) -> id {\n let NSObject = str::as_c_str(\"NSObject\") { |s|\n\t objc::objc_getClass(s)\n };\n\t let MainObj = str::as_c_str(\"MainObj\") { |s|\n\t objc::objc_allocateClassPair(NSObject, s, 0 as libc::size_t)\n\t };\n\n \/\/ Add a field to our class to contain a pointer to a rust closure\n\t let res = str::as_c_str(\"fptr\") { |name|\n str::as_c_str(\"^i\") { |types|\n objc::class_addIvar(MainObj, name,\n sys::size_of::<libc::uintptr_t>() as libc::size_t,\n 16u8, types)\n }\n };\n \t assert res == true;\n\n\t let launchfn = str::as_c_str(\"applicationDidFinishLaunching:\") { |s|\n\t objc::sel_registerName(s)\n\t };\n\t let _ = str::as_c_str(\"@@:\") { |types|\n\t objc::class_addMethod(MainObj, launchfn, applicationDidFinishLaunching, types)\n\t };\n\n\t objc::objc_registerClassPair(MainObj);\n\n let sel = str::as_c_str(\"alloc\") { |s|\n objc::sel_registerName(s)\n };\n let mainobj = objc::objc_msgSend(MainObj, sel);\n\n let sel = str::as_c_str(\"init\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(mainobj, sel);\n\n\t let fptr = ptr::addr_of(f);\n\t str::as_c_str(\"fptr\") { |name|\n\t #debug(\"setting osmain fptr: %?\", fptr);\n\t\t let value = unsafe { unsafe::reinterpret_cast(fptr) };\n #debug(\"*fptr: %?\", value);\n objc::object_setInstanceVariable(mainobj, name, value)\n };\n\n\t ret mainobj;\n\t }\n\t fn release(mainobj: id) {\n let sel = str::as_c_str(\"release\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(mainobj, sel);\n\t }\n }\n\n fn runmain(f: fn()) {\n\tlet pool = NSAutoreleasePool::alloc();\n\tNSAutoreleasePool::init(pool);\n let NSApp = NSApplication::sharedApplication();\n\n let mainobj = MainObj::create(f);\n\tNSApp::setDelegate(NSApp, mainobj);\n\tNSApp::run(NSApp);\n\t\n\tMainObj::release(mainobj);\t\n\tNSAutoreleasePool::release(pool);\n }\n}\n\nenum msg {\n get_draw_target(comm::chan<AzDrawTargetRef>),\n add_key_handler(comm::chan<()>),\n draw(comm::chan<()>),\n exit\n}\n\nfn osmain() -> comm::chan<msg> {\n on_osmain::<msg> {|po|\n platform::runmain {||\n #debug(\"preparing to enter main loop\");\n\t mainloop(po);\n }\n }\n}\n\n\/\/ A function for spawning into the platform's main thread\nfn on_osmain<T: send>(f: fn~(comm::port<T>)) -> comm::chan<T> {\n let builder = task::builder();\n let opts = {\n sched: some({\n mode: task::osmain,\n native_stack_size: none\n })\n with task::get_opts(builder)\n };\n task::set_opts(builder, opts);\n ret task::run_listener(builder, f);\n}\n\nfn mainloop(po: comm::port<msg>) {\n\n let mut key_handlers = [];\n\n sdl::init([\n sdl::init_video\n ]);\n let screen = sdl::video::set_video_mode(\n 800, 600, 32,\n [sdl::video::swsurface],\n [sdl::video::doublebuf]);\n assert !ptr::is_null(screen);\n let sdl_surf = sdl::video::create_rgb_surface(\n [sdl::video::swsurface],\n 800, 600, 32,\n 0x00FF0000u32,\n 0x0000FF00u32,\n 0x000000FFu32,\n 0x00000000u32\n );\n assert !ptr::is_null(sdl_surf);\n sdl::video::lock_surface(sdl_surf);\n let cairo_surf = unsafe {\n cairo_image_surface_create_for_data(\n unsafe::reinterpret_cast((*sdl_surf).pixels),\n cairo::CAIRO_FORMAT_RGB24,\n (*sdl_surf).w,\n (*sdl_surf).h,\n (*sdl_surf).pitch as libc::c_int\n )\n };\n assert !ptr::is_null(cairo_surf);\n let azure_target = AzCreateDrawTargetForCairoSurface(cairo_surf);\n assert !ptr::is_null(azure_target);\n\n loop {\n sdl::event::poll_event {|event|\n alt event {\n sdl::event::keydown_event(_) {\n key_handlers.iter {|key_ch|\n comm::send(key_ch, ())\n }\n }\n _ { }\n }\n }\n\n \/\/ Handle messages\n if comm::peek(po) {\n alt check comm::recv(po) {\n add_key_handler(key_ch) {\n key_handlers += [key_ch];\n }\n get_draw_target(response_ch) {\n comm::send(response_ch, azure_target);\n }\n draw(response_ch) {\n sdl::video::unlock_surface(sdl_surf);\n sdl::video::blit_surface(sdl_surf, ptr::null(),\n screen, ptr::null());\n sdl::video::lock_surface(sdl_surf);\n sdl::video::flip(screen);\n comm::send(response_ch, ());\n }\n exit { break; }\n }\n }\n }\n AzReleaseDrawTarget(azure_target);\n cairo_surface_destroy(cairo_surf);\n sdl::video::unlock_surface(sdl_surf);\n sdl::quit();\n}<commit_msg>Factor out some code in osmain<commit_after>import azure::cairo::cairo_surface_t;\n\n#[cfg(target_os = \"linux\")]\nmod platform {\n fn runmain(f: fn()) {\n f()\n }\n}\n\n#[cfg(target_os = \"macos\")]\nmod platform {\n use cocoa;\n import cocoa::base::*;\n\n mod NSApplication {\n fn sharedApplication() -> id {\n let klass = str::as_c_str(\"NSApplication\") { |s|\n objc::objc_getClass(s)\n };\n\n let sel = str::as_c_str(\"sharedApplication\") { |s|\n objc::sel_registerName(s)\n };\n\n let nsapp = objc::objc_msgSend(klass, sel);\n #debug(\"nsapp: %d\", (nsapp as int));\n\n\t ret nsapp;\n }\n }\n\n mod NSAutoreleasePool {\n fn alloc() -> id {\n let klass = str::as_c_str(\"NSAutoreleasePool\") { |s|\n objc::objc_getClass(s)\n };\n let sel = str::as_c_str(\"alloc\") { |s|\n objc::sel_registerName(s)\n };\n let pool = objc::objc_msgSend(klass, sel);\n #debug(\"autorelease pool: %?\", pool);\n ret pool;\n }\n fn init(pool: id) {\n let sel = str::as_c_str(\"init\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(pool, sel);\n }\n fn release(pool: id) {\n let sel = str::as_c_str(\"release\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(pool, sel);\n }\n }\n\n mod NSApp {\n fn setDelegate(nsapp: id, main: id) {\n\t #debug(\"NSApp::setDelegate\");\n\t let sel = str::as_c_str(\"setDelegate:\") { |s|\n\t objc::sel_registerName(s)\n\t };\n\t cocoa::msgSend1Id(nsapp, sel, main);\n }\n\n fn run(nsapp: id) {\n\t #debug(\"NSApp::run\");\n let sel = str::as_c_str(\"run\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(nsapp, sel);\n }\n }\n\n mod MainObj {\n crust fn applicationDidFinishLaunching(this: id, _sel: SEL) {\n\t #debug(\"applicationDidFinishLaunching\");\n\n\t let fptr: *fn() = ptr::null();\n\t str::as_c_str(\"fptr\") { |name|\n\t let outValue = unsafe { unsafe::reinterpret_cast(ptr::addr_of(fptr)) };\n #debug(\"*fptr %?\", outValue);\n objc::object_getInstanceVariable(this, name, outValue)\n };\n\n\t #debug(\"getting osmain fptr: %?\", fptr);\n\n\t unsafe {\n\t \/\/ FIXME: We probably don't want to run the main routine in a crust function\n (*fptr)();\n }\n\t }\n\n \t fn create(f: fn()) -> id {\n let NSObject = str::as_c_str(\"NSObject\") { |s|\n\t objc::objc_getClass(s)\n };\n\t let MainObj = str::as_c_str(\"MainObj\") { |s|\n\t objc::objc_allocateClassPair(NSObject, s, 0 as libc::size_t)\n\t };\n\n \/\/ Add a field to our class to contain a pointer to a rust closure\n\t let res = str::as_c_str(\"fptr\") { |name|\n str::as_c_str(\"^i\") { |types|\n objc::class_addIvar(MainObj, name,\n sys::size_of::<libc::uintptr_t>() as libc::size_t,\n 16u8, types)\n }\n };\n \t assert res == true;\n\n\t let launchfn = str::as_c_str(\"applicationDidFinishLaunching:\") { |s|\n\t objc::sel_registerName(s)\n\t };\n\t let _ = str::as_c_str(\"@@:\") { |types|\n\t objc::class_addMethod(MainObj, launchfn, applicationDidFinishLaunching, types)\n\t };\n\n\t objc::objc_registerClassPair(MainObj);\n\n let sel = str::as_c_str(\"alloc\") { |s|\n objc::sel_registerName(s)\n };\n let mainobj = objc::objc_msgSend(MainObj, sel);\n\n let sel = str::as_c_str(\"init\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(mainobj, sel);\n\n\t let fptr = ptr::addr_of(f);\n\t str::as_c_str(\"fptr\") { |name|\n\t #debug(\"setting osmain fptr: %?\", fptr);\n\t\t let value = unsafe { unsafe::reinterpret_cast(fptr) };\n #debug(\"*fptr: %?\", value);\n objc::object_setInstanceVariable(mainobj, name, value)\n };\n\n\t ret mainobj;\n\t }\n\t fn release(mainobj: id) {\n let sel = str::as_c_str(\"release\") { |s|\n objc::sel_registerName(s)\n };\n objc::objc_msgSend(mainobj, sel);\n\t }\n }\n\n fn runmain(f: fn()) {\n\tlet pool = NSAutoreleasePool::alloc();\n\tNSAutoreleasePool::init(pool);\n let NSApp = NSApplication::sharedApplication();\n\n let mainobj = MainObj::create(f);\n\tNSApp::setDelegate(NSApp, mainobj);\n\tNSApp::run(NSApp);\n\t\n\tMainObj::release(mainobj);\t\n\tNSAutoreleasePool::release(pool);\n }\n}\n\nenum msg {\n get_draw_target(comm::chan<AzDrawTargetRef>),\n add_key_handler(comm::chan<()>),\n draw(comm::chan<()>),\n exit\n}\n\nfn osmain() -> comm::chan<msg> {\n on_osmain::<msg> {|po|\n platform::runmain {||\n #debug(\"preparing to enter main loop\");\n\t mainloop(po);\n }\n }\n}\n\n\/\/ A function for spawning into the platform's main thread\nfn on_osmain<T: send>(f: fn~(comm::port<T>)) -> comm::chan<T> {\n let builder = task::builder();\n let opts = {\n sched: some({\n mode: task::osmain,\n native_stack_size: none\n })\n with task::get_opts(builder)\n };\n task::set_opts(builder, opts);\n ret task::run_listener(builder, f);\n}\n\nfn mainloop(po: comm::port<msg>) {\n\n let mut key_handlers = [];\n\n sdl::init([\n sdl::init_video\n ]);\n\n let screen = sdl::video::set_video_mode(\n 800, 600, 32,\n [sdl::video::swsurface],\n [sdl::video::doublebuf]);\n assert !ptr::is_null(screen);\n\n let surface = mk_surface();\n\n loop {\n sdl::event::poll_event {|event|\n alt event {\n sdl::event::keydown_event(_) {\n key_handlers.iter {|key_ch|\n comm::send(key_ch, ())\n }\n }\n _ { }\n }\n }\n\n \/\/ Handle messages\n if comm::peek(po) {\n alt check comm::recv(po) {\n add_key_handler(key_ch) {\n key_handlers += [key_ch];\n }\n get_draw_target(response_ch) {\n comm::send(response_ch, copy(surface.az_target));\n }\n draw(response_ch) {\n sdl::video::unlock_surface(surface.sdl_surf);\n sdl::video::blit_surface(surface.sdl_surf, ptr::null(),\n screen, ptr::null());\n sdl::video::lock_surface(surface.sdl_surf);\n sdl::video::flip(screen);\n comm::send(response_ch, ());\n }\n exit { break; }\n }\n }\n }\n destroy_surface(surface);\n sdl::quit();\n}\n\ntype surface = {\n sdl_surf: *sdl::video::surface,\n cairo_surf: *cairo_surface_t,\n az_target: AzDrawTargetRef\n};\n\nfn mk_surface() -> surface {\n let sdl_surf = sdl::video::create_rgb_surface(\n [sdl::video::swsurface],\n 800, 600, 32,\n 0x00FF0000u32,\n 0x0000FF00u32,\n 0x000000FFu32,\n 0x00000000u32\n );\n assert !ptr::is_null(sdl_surf);\n sdl::video::lock_surface(sdl_surf);\n let cairo_surf = unsafe {\n cairo_image_surface_create_for_data(\n unsafe::reinterpret_cast((*sdl_surf).pixels),\n cairo::CAIRO_FORMAT_RGB24,\n (*sdl_surf).w,\n (*sdl_surf).h,\n (*sdl_surf).pitch as libc::c_int\n )\n };\n assert !ptr::is_null(cairo_surf);\n\n let azure_target = AzCreateDrawTargetForCairoSurface(cairo_surf);\n assert !ptr::is_null(azure_target);\n\n {\n sdl_surf: sdl_surf,\n cairo_surf: cairo_surf,\n az_target: azure_target\n }\n}\n\nfn destroy_surface(surface: surface) {\n AzReleaseDrawTarget(surface.az_target);\n cairo_surface_destroy(surface.cairo_surf);\n sdl::video::unlock_surface(surface.sdl_surf);\n sdl::video::free_surface(surface.sdl_surf);\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>add test illustrating current \"coerce to `!`\" behavior<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(never_type)]\n\nfn foo(x: usize, y: !, z: usize) { }\n\nfn call_foo_a() {\n \/\/ FIXME(#40800) -- accepted beacuse divergence happens **before**\n \/\/ the coercion to `!`, but within same expression. Not clear that\n \/\/ these are the rules we want.\n foo(return, 22, 44);\n}\n\nfn call_foo_b() {\n \/\/ Divergence happens in the argument itself, definitely ok.\n foo(22, return, 44);\n}\n\nfn call_foo_c() {\n \/\/ This test fails because the divergence happens **after** the\n \/\/ coercion to `!`:\n foo(22, 44, return); \/\/~ ERROR mismatched types\n}\n\nfn call_foo_d() {\n \/\/ This test passes because `a` has type `!`:\n let a: ! = return;\n let b = 22;\n let c = 44;\n foo(a, b, c); \/\/ ... and hence a reference to `a` is expected to diverge.\n}\n\nfn call_foo_e() {\n \/\/ This test probably could pass but we don't *know* that `a`\n \/\/ has type `!` so we don't let it work.\n let a = return;\n let b = 22;\n let c = 44;\n foo(a, b, c); \/\/~ ERROR mismatched types\n}\n\nfn call_foo_f() {\n \/\/ This fn fails because `a` has type `usize`, and hence a\n \/\/ reference to is it **not** considered to diverge.\n let a: usize = return;\n let b = 22;\n let c = 44;\n foo(a, b, c); \/\/~ ERROR mismatched types\n}\n\nfn array_a() {\n \/\/ Accepted: return is coerced to `!` just fine, and then `22` can be\n \/\/ because we already diverged.\n let x: [!; 2] = [return, 22];\n}\n\nfn array_b() {\n \/\/ Error: divergence has not yet occurred.\n let x: [!; 2] = [22, return]; \/\/~ ERROR mismatched types\n}\n\nfn tuple_a() {\n \/\/ No divergence at all.\n let x: (usize, !, usize) = (22, 44, 66); \/\/~ ERROR mismatched types\n}\n\nfn tuple_b() {\n \/\/ Divergence happens before coercion: OK\n let x: (usize, !, usize) = (return, 44, 66);\n}\n\nfn tuple_c() {\n \/\/ Divergence happens before coercion: OK\n let x: (usize, !, usize) = (22, return, 66);\n}\n\nfn tuple_d() {\n \/\/ Error: divergence happens too late\n let x: (usize, !, usize) = (22, 44, return); \/\/~ ERROR mismatched types\n}\n\nfn main() { }\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #89975 - JohnTitor:gats-tests-85921, r=jackh726<commit_after>\/\/ check-pass\n\n#![feature(generic_associated_types)]\n\ntrait Trait {\n type Assoc<'a>;\n\n fn with_assoc(f: impl FnOnce(Self::Assoc<'_>));\n}\n\nimpl Trait for () {\n type Assoc<'a> = i32;\n\n fn with_assoc(f: impl FnOnce(Self::Assoc<'_>)) {\n f(5i32)\n }\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #26668 - frewsxcv:regression-test-17756, r=alexcrichton<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nconst count : usize = 2 as usize;\nfn main() {\n let larger : [usize; count*2];\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add tests for macro-based impls<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#[derive(Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]\nstruct Array<T> {\n f00: [T; 00],\n f01: [T; 01],\n f02: [T; 02],\n f03: [T; 03],\n f04: [T; 04],\n f05: [T; 05],\n f06: [T; 06],\n f07: [T; 07],\n f08: [T; 08],\n f09: [T; 09],\n f10: [T; 10],\n f11: [T; 11],\n f12: [T; 12],\n f13: [T; 13],\n f14: [T; 14],\n f15: [T; 15],\n f16: [T; 16],\n f17: [T; 17],\n f18: [T; 18],\n f19: [T; 19],\n f20: [T; 20],\n f21: [T; 21],\n f22: [T; 22],\n f23: [T; 23],\n f24: [T; 24],\n f25: [T; 25],\n f26: [T; 26],\n f27: [T; 27],\n f28: [T; 28],\n f29: [T; 29],\n f30: [T; 30],\n f31: [T; 31],\n f32: [T; 32],\n}\n\n\/\/ FIXME(#7622): merge with `Array` once `[T; N]: Clone` where `T: Clone`\n#[derive(Clone, Copy)]\nstruct CopyArray<T: Copy> {\n f00: [T; 00],\n f01: [T; 01],\n f02: [T; 02],\n f03: [T; 03],\n f04: [T; 04],\n f05: [T; 05],\n f06: [T; 06],\n f07: [T; 07],\n f08: [T; 08],\n f09: [T; 09],\n f10: [T; 10],\n f11: [T; 11],\n f12: [T; 12],\n f13: [T; 13],\n f14: [T; 14],\n f15: [T; 15],\n f16: [T; 16],\n f17: [T; 17],\n f18: [T; 18],\n f19: [T; 19],\n f20: [T; 20],\n f21: [T; 21],\n f22: [T; 22],\n f23: [T; 23],\n f24: [T; 24],\n f25: [T; 25],\n f26: [T; 26],\n f27: [T; 27],\n f28: [T; 28],\n f29: [T; 29],\n f30: [T; 30],\n f31: [T; 31],\n f32: [T; 32],\n}\n\n#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]\nstruct Fn<A, B, C, D, E, F, G, H, I, J, K, L> {\n f00: fn(),\n f01: fn(A),\n f02: fn(A, B),\n f03: fn(A, B, C),\n f04: fn(A, B, C, D),\n f05: fn(A, B, C, D, E),\n f06: fn(A, B, C, D, E, F),\n f07: fn(A, B, C, D, E, F, G),\n f08: fn(A, B, C, D, E, F, G, H),\n f09: fn(A, B, C, D, E, F, G, H, I),\n f10: fn(A, B, C, D, E, F, G, H, I, J),\n f11: fn(A, B, C, D, E, F, G, H, I, J, K),\n f12: fn(A, B, C, D, E, F, G, H, I, J, K, L),\n}\n\n#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]\nstruct Tuple<A, B, C, D, E, F, G, H, I, J, K, L> {\n f00: (),\n f01: (A),\n f02: (A, B),\n f03: (A, B, C),\n f04: (A, B, C, D),\n f05: (A, B, C, D, E),\n f06: (A, B, C, D, E, F),\n f07: (A, B, C, D, E, F, G),\n f08: (A, B, C, D, E, F, G, H),\n f09: (A, B, C, D, E, F, G, H, I),\n f10: (A, B, C, D, E, F, G, H, I, J),\n f11: (A, B, C, D, E, F, G, H, I, J, K),\n f12: (A, B, C, D, E, F, G, H, I, J, K, L),\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #50505 - Aaronepower:add-test, r=oli-obk<commit_after>\/\/ Copyright 2018 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\ntype Func = fn(usize, usize) -> usize;\n\nfn foo(a: usize, b: usize) -> usize { a + b }\nfn bar(a: usize, b: usize) -> usize { a * b }\nfn test(x: usize) -> Func {\n if x % 2 == 0 { foo }\n else { bar }\n}\n\nconst FOO: Func = foo;\nconst BAR: Func = bar;\n\nfn main() {\n match test(std::env::consts::ARCH.len()) {\n FOO => println!(\"foo\"),\n BAR => println!(\"bar\"),\n _ => unreachable!(),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>there can never be too many tests<commit_after>#[repr(C)]\npub enum Foo {\n A, B, C, D\n}\n\nfn main() {\n let f = unsafe { std::mem::transmute::<i32, Foo>(42) };\n match f {\n Foo::A => {}, \/\/~ ERROR invalid enum discriminant value read\n Foo::B => {},\n Foo::C => {},\n Foo::D => {},\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>basic structure<commit_after>extern crate core;\n\nuse core::num::FromPrimitive;\nuse core::fmt;\n\nuse Direction::{SpinUp, SpinDown, SpinSuper};\nuse Plan::{Trival, OddBall};\n\n#[derive(Copy)]\n#[derive(Show)]\nenum Direction {\n SpinUp,\n SpinDown,\n SpinSuper,\n}\n\nimpl fmt::String for Direction {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n let r = match self {\n &SpinUp => \"SpinUp\",\n &SpinDown => \"SpinDown\",\n &SpinSuper => \"SpinSuper\"\n };\n write!(f, \"{}\", r)\n }\n}\n\nstruct Pair<T> {\n lhs: T,\n rhs: T\n}\n\nstruct Particle {\n spin: Direction\n}\n\nenum Plan {\n Trivial, \/\/ up-up-up -> down-down-down\n OddBall \/\/ up-down-up -> down-up-down\n}\n\nenum Detector {\n D12, \/\/ 12 o'clock\n D3, \/\/ 3 o'clock\n D9, \/\/ 9 o' clock\n}\n\nimpl Particle {\n fn new_pair () -> Pair<Particle> {\n let d1 = SpinSuper;\n let d2 = SpinSuper;\n let p1 = Particle{spin: d1};\n let p2 = Particle{spin: d2};\n\n return Pair{lhs: p1, rhs: p2};\n }\n\n pub fn measure (&mut self, theta: int) {\n \/\/ TODO theta = 60degrees use 3\/4th and 1\/4th\n \/\/ TODO theta = 0 SpinUp\n \/\/ TODO theta = 180 SpinDown\n }\n\n \/\/ NOTE on spooky and premeditated only if measured in the same\n \/\/ direction must the spins be opposite\n\n \/\/ measure with with a message\n pub fn spooky (&mut self, friend: &mut Particle) -> Pair<Direction> {\n\n let spin = match self.spin {\n SpinUp => SpinDown,\n SpinDown => SpinUp,\n SpinSuper => SpinUp\n };\n friend.spin = spin;\n\n let spin = match friend.spin {\n SpinUp => SpinDown,\n SpinDown => SpinUp,\n _ => panic!(\"broke the universe\")\n };\n self.spin = spin;\n\n return Pair{lhs: self.spin, rhs: friend.spin};\n }\n\n pub fn premeditated (&mut self, friend: &mut Particle, theta: int, plan: Plan) -> Pair<Direction> {\n\n \/\/ TODO: Use plan\n friend.spin = SpinUp;\n \n let spin = match friend.spin {\n SpinUp => SpinDown,\n SpinDown => SpinUp,\n _ => panic!(\"broke the universe\")\n };\n self.spin = spin;\n\n return Pair{lhs: self.spin, rhs: friend.spin};\n }\n}\n\n\nfn main () {\n\n let particles = Particle::new_pair();\n\n let mut lhs = particles.lhs;\n let mut rhs = particles.rhs;\n\n lhs.spooky(&mut rhs);\n\n println!(\"lhs.spin {}, rhs.spin {}\", lhs.spin, rhs.spin);\n\n \/\/ premeditated should give +55.6% difference, spooky would give 50%\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Still insert items one at a time, but at least parse in a seperate thread.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Added a repeater example demonstrating message throttling.<commit_after>extern crate irc;\n\nuse std::default::Default;\nuse irc::client::prelude::*;\n\nfn main() {\n let config = Config {\n nickname: Some(\"repeater\".to_owned()),\n alt_nicks: Some(vec![\"blaster\".to_owned(), \"smg\".to_owned()]),\n server: Some(\"irc.mozilla.org\".to_owned()),\n use_ssl: Some(true),\n channels: Some(vec![\"#rust-spam\".to_owned()]),\n burst_window_length: Some(4),\n max_messages_in_burst: Some(4),\n ..Default::default()\n };\n\n let server = IrcServer::from_config(config).unwrap();\n server.identify().unwrap();\n\n server.for_each_incoming(|message| {\n print!(\"{}\", message);\n match message.command {\n Command::PRIVMSG(ref target, ref msg) => {\n if msg.starts_with(server.current_nickname()) {\n let tokens: Vec<_> = msg.split(\" \").collect();\n if tokens.len() > 2 {\n let n = tokens[0].len() + tokens[1].len() + 2;\n if let Ok(count) = tokens[1].parse::<u8>() {\n for _ in 0..count {\n server.send_privmsg(\n message.response_target().unwrap_or(target),\n &msg[n..]\n ).unwrap();\n }\n }\n }\n }\n }\n _ => (),\n }\n }).unwrap()\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add a test for #38273<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ compile-flags: --emit=metadata\n\/\/ no-prefer-dynamic\n\/\/ must-compile-successfully\n\n#[deny(warnings)]\n\n\/\/ Test that we don't get warnings for non-pub main when only emitting metadata.\n\/\/ (#38273)\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>the minimal test for two-phase borrows: the core example from niko's blog post on it.<commit_after>\/\/ Copyright 2017 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ revisions: lxl nll\n\/\/[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows\n\/\/[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll\n\n\/\/ This is the \"goto example\" for why we want two phase borrows.\n\nfn main() {\n let mut v = vec![0, 1, 2];\n v.push(v.len());\n assert_eq!(v, [0, 1, 2, 3]);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>regression test for issue #88583.<commit_after>\/\/ check-pass\n\n#![allow(non_camel_case_types)]\n\nstruct union;\n\nimpl union {\n pub fn new() -> Self {\n union { }\n }\n}\n\nfn main() {\n let _u = union::new();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Benchmark pattern and exception parsing<commit_after>#![feature(test)]\n\n#[macro_use]\nextern crate lazy_static;\nextern crate test;\n\nuse std::path::PathBuf;\nuse test::Bencher;\n\nextern crate hyphenation;\nuse hyphenation::{load, Language};\nuse hyphenation::exception::{Exceptions};\nuse hyphenation::pattern::{Patterns};\n\n\nlazy_static! {\n static ref DATAPATH: PathBuf = {\n let mut path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n path.push(\"patterns\");\n\n path\n };\n}\n\n\n#[bench]\nfn parse_patterns_en_us(bench: &mut Bencher) {\n hyphenation::set_pattern_folder(DATAPATH.as_path());\n\n let by_line = load::patterns(Language::English_US).unwrap();\n let v: Vec<_> = by_line.collect();\n\n let mut ps = Patterns::empty();\n bench.iter(|| {\n for p in &v {\n for val in p { ps.insert(&*val) };\n }\n });\n}\n\n#[bench]\nfn parse_exceptions_en_us(bench: &mut Bencher) {\n hyphenation::set_pattern_folder(DATAPATH.as_path());\n\n let by_line = load::exceptions(Language::English_US).unwrap();\n let v: Vec<_> = by_line.collect();\n\n let mut exs = Exceptions::empty();\n bench.iter(|| {\n for ex in &v {\n for val in ex { exs.insert(&*val) };\n }\n });\n}\n<|endoftext|>"} {"text":"<commit_before>use std::cell::RefCell;\nuse std::iter::Iterator;\nuse std::rc::Rc;\nuse std::ops::Deref;\n\nuse storage::file::File;\n\npub trait FilePrinter {\n\n fn new(verbose: bool, debug: bool) -> Self;\n\n \/*\n * Print a single file\n *\/\n fn print_file(&self, Rc<RefCell<File>>);\n\n \/*\n * Print a list of files\n *\/\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n for file in files {\n self.print_file(file);\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n info!(\"{}\", f(file).join(\" \"));\n }\n\n fn print_files_custom<F, I>(&self, files: I, f: &F)\n where I: Iterator<Item = Rc<RefCell<File>>>,\n F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n for file in files {\n self.print_file_custom(file, f);\n }\n }\n\n}\n\nstruct DebugPrinter {\n debug: bool,\n}\n\nimpl FilePrinter for DebugPrinter {\n\n fn new(_: bool, debug: bool) -> DebugPrinter {\n DebugPrinter {\n debug: debug,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f);\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f(file).join(\" \"));\n }\n }\n\n}\n\nstruct SimplePrinter {\n verbose: bool,\n debug: bool,\n}\n\nimpl FilePrinter for SimplePrinter {\n\n fn new(verbose: bool, debug: bool) -> SimplePrinter {\n SimplePrinter {\n debug: debug,\n verbose: verbose,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"{:?}\", f);\n } else if self.verbose {\n info!(\"{}\", &*f.deref().borrow());\n } else {\n info!(\"[File]: {}\", f.deref().borrow().id());\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n let s = f(file).join(\" \");\n if self.debug {\n debug!(\"{:?}\", s);\n } else if self.verbose {\n info!(\"{}\", s);\n } else {\n info!(\"[File]: {}\", s);\n }\n }\n\n}\n\npub struct TablePrinter {\n verbose: bool,\n debug: bool,\n sp: SimplePrinter,\n}\n\nimpl FilePrinter for TablePrinter {\n\n fn new(verbose: bool, debug: bool) -> TablePrinter {\n TablePrinter {\n debug: debug,\n verbose: verbose,\n sp: SimplePrinter::new(verbose, debug),\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n self.sp.print_file(f);\n }\n\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"File#\", \"Owner\", \"ID\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.deref().borrow().owner_name())[..]);\n\n let id : String = file.deref().borrow().id().clone().into();\n let cell_id = Cell::new(&id[..]);\n let row = Row::new(vec![cell_i, cell_o, cell_id]);\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n fn print_files_custom<F, I>(&self, files: I, f: &F)\n where I: Iterator<Item = Rc<RefCell<File>>>,\n F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"#\", \"Module\", \"ID\", \"...\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.deref().borrow().owner_name())[..]);\n\n let id : String = file.deref().borrow().id().clone().into();\n let cell_id = Cell::new(&id[..]);\n\n let mut row = Row::new(vec![cell_i, cell_o, cell_id]);\n\n for cell in f(file).iter() {\n debug!(\"Adding custom cell: {:?}\", cell);\n row.add_cell(Cell::new(&cell[..]))\n }\n\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n}\n<commit_msg>Add docs to ui\/file.rs<commit_after>use std::cell::RefCell;\nuse std::iter::Iterator;\nuse std::rc::Rc;\nuse std::ops::Deref;\n\nuse storage::file::File;\n\n\/**\n * Trait for a printer which can be used to print data from files\n *\/\npub trait FilePrinter {\n\n fn new(verbose: bool, debug: bool) -> Self;\n\n \/*\n * Print a single file\n *\/\n fn print_file(&self, Rc<RefCell<File>>);\n\n \/*\n * Print a list of files\n *\/\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n for file in files {\n self.print_file(file);\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n info!(\"{}\", f(file).join(\" \"));\n }\n\n fn print_files_custom<F, I>(&self, files: I, f: &F)\n where I: Iterator<Item = Rc<RefCell<File>>>,\n F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n for file in files {\n self.print_file_custom(file, f);\n }\n }\n\n}\n\n\/**\n * Printer which prints in debug mode if enabled\n *\/\nstruct DebugPrinter {\n debug: bool,\n}\n\nimpl FilePrinter for DebugPrinter {\n\n fn new(_: bool, debug: bool) -> DebugPrinter {\n DebugPrinter {\n debug: debug,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f);\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n if self.debug {\n debug!(\"[DebugPrinter] ->\\n{:?}\", f(file).join(\" \"));\n }\n }\n\n}\n\n\/**\n * Simple printer, which just uses the info!() macro or debug!() macro if in debug mode.\n *\/\nstruct SimplePrinter {\n verbose: bool,\n debug: bool,\n}\n\nimpl FilePrinter for SimplePrinter {\n\n fn new(verbose: bool, debug: bool) -> SimplePrinter {\n SimplePrinter {\n debug: debug,\n verbose: verbose,\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n if self.debug {\n debug!(\"{:?}\", f);\n } else if self.verbose {\n info!(\"{}\", &*f.deref().borrow());\n } else {\n info!(\"[File]: {}\", f.deref().borrow().id());\n }\n }\n\n fn print_file_custom<F>(&self, file: Rc<RefCell<File>>, f: &F)\n where F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n let s = f(file).join(\" \");\n if self.debug {\n debug!(\"{:?}\", s);\n } else if self.verbose {\n info!(\"{}\", s);\n } else {\n info!(\"[File]: {}\", s);\n }\n }\n\n}\n\n\/**\n * Table printer to print file information in a nice ASCII-table\n *\/\npub struct TablePrinter {\n verbose: bool,\n debug: bool,\n sp: SimplePrinter,\n}\n\nimpl FilePrinter for TablePrinter {\n\n fn new(verbose: bool, debug: bool) -> TablePrinter {\n TablePrinter {\n debug: debug,\n verbose: verbose,\n sp: SimplePrinter::new(verbose, debug),\n }\n }\n\n fn print_file(&self, f: Rc<RefCell<File>>) {\n self.sp.print_file(f);\n }\n\n fn print_files<I: Iterator<Item = Rc<RefCell<File>>>>(&self, files: I) {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"File#\", \"Owner\", \"ID\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.deref().borrow().owner_name())[..]);\n\n let id : String = file.deref().borrow().id().clone().into();\n let cell_id = Cell::new(&id[..]);\n let row = Row::new(vec![cell_i, cell_o, cell_id]);\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n fn print_files_custom<F, I>(&self, files: I, f: &F)\n where I: Iterator<Item = Rc<RefCell<File>>>,\n F: Fn(Rc<RefCell<File>>) -> Vec<String>\n {\n use prettytable::Table;\n use prettytable::row::Row;\n use prettytable::cell::Cell;\n\n let titles = row![\"#\", \"Module\", \"ID\", \"...\"];\n\n let mut tab = Table::new();\n tab.set_titles(titles);\n\n let mut i = 0;\n for file in files {\n debug!(\"Printing file: {:?}\", file);\n i += 1;\n let cell_i = Cell::new(&format!(\"{}\", i)[..]);\n let cell_o = Cell::new(&format!(\"{}\", file.deref().borrow().owner_name())[..]);\n\n let id : String = file.deref().borrow().id().clone().into();\n let cell_id = Cell::new(&id[..]);\n\n let mut row = Row::new(vec![cell_i, cell_o, cell_id]);\n\n for cell in f(file).iter() {\n debug!(\"Adding custom cell: {:?}\", cell);\n row.add_cell(Cell::new(&cell[..]))\n }\n\n tab.add_row(row);\n }\n\n if i != 0 {\n debug!(\"Printing {} table entries\", i);\n tab.printstd();\n } else {\n debug!(\"Not printing table because there are zero entries\");\n }\n }\n\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add test for changing pub inherent method body<commit_after>\/\/ Copyright 2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test where we change the body of a private method in an impl.\n\/\/ We then test what sort of functions must be rebuilt as a result.\n\n\/\/ revisions:rpass1 rpass2\n\/\/ compile-flags: -Z query-dep-graph\n\n#![feature(rustc_attrs)]\n#![feature(stmt_expr_attributes)]\n#![allow(dead_code)]\n\n#![rustc_partition_translated(module=\"struct_point-point\", cfg=\"rpass2\")]\n\n\/\/ FIXME(#35078) -- this gets recompiled because we don't separate sig from body\n#![rustc_partition_translated(module=\"struct_point-fn_calls_changed_method\", cfg=\"rpass2\")]\n\/\/ FIXME(#36349) -- this gets recompiled because we don't separate items from impl\n#![rustc_partition_translated(module=\"struct_point-fn_calls_another_method\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"struct_point-fn_make_struct\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"struct_point-fn_read_field\", cfg=\"rpass2\")]\n#![rustc_partition_reused(module=\"struct_point-fn_write_field\", cfg=\"rpass2\")]\n\nmod point {\n pub struct Point {\n pub x: f32,\n pub y: f32,\n }\n\n impl Point {\n pub fn distance_from_origin(&self) -> f32 {\n #[cfg(rpass1)]\n return self.x * self.x + self.y * self.y;\n\n #[cfg(rpass2)]\n return (self.x * self.x + self.y * self.y).sqrt();\n }\n\n pub fn x(&self) -> f32 {\n self.x\n }\n }\n}\n\n\/\/\/ A fn item that calls (public) methods on `Point` from the same impl which changed\nmod fn_calls_changed_method {\n use point::Point;\n\n \/\/ FIXME(#35078) -- this gets recompiled because we don't separate sig from body\n #[rustc_dirty(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn check() {\n let p = Point { x: 2.0, y: 2.0 };\n p.distance_from_origin();\n }\n}\n\n\/\/\/ A fn item that calls (public) methods on `Point` from the same impl which changed\nmod fn_calls_another_method {\n use point::Point;\n\n \/\/ FIXME(#36349) -- this gets recompiled because we don't separate items from impl\n #[rustc_dirty(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn check() {\n let p = Point { x: 2.0, y: 2.0 };\n p.x();\n }\n}\n\n\/\/\/ A fn item that makes an instance of `Point` but does not invoke methods\nmod fn_make_struct {\n use point::Point;\n\n #[rustc_clean(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn make_origin() -> Point {\n Point { x: 2.0, y: 2.0 }\n }\n}\n\n\/\/\/ A fn item that reads fields from `Point` but does not invoke methods\nmod fn_read_field {\n use point::Point;\n\n #[rustc_clean(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn get_x(p: Point) -> f32 {\n p.x\n }\n}\n\n\/\/\/ A fn item that writes to a field of `Point` but does not invoke methods\nmod fn_write_field {\n use point::Point;\n\n #[rustc_clean(label=\"TypeckItemBody\", cfg=\"rpass2\")]\n pub fn inc_x(p: &mut Point) {\n p.x += 1.0;\n }\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Refactor: cleanup printing<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::DOMRefCell;\nuse dom::bindings::codegen::Bindings::WebSocketBinding;\nuse dom::bindings::codegen::Bindings::WebSocketBinding::WebSocketMethods;\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;\nuse dom::bindings::codegen::InheritTypes::EventTargetCast;\nuse dom::bindings::codegen::InheritTypes::EventCast;\nuse dom::bindings::error::{Error, Fallible};\nuse dom::bindings::error::Error::{InvalidAccess, Syntax};\nuse dom::bindings::global::{GlobalField, GlobalRef};\nuse dom::bindings::js::Root;\nuse dom::bindings::refcounted::Trusted;\nuse dom::bindings::str::USVString;\nuse dom::bindings::trace::JSTraceable;\nuse dom::bindings::utils::reflect_dom_object;\nuse dom::closeevent::CloseEvent;\nuse dom::event::{Event, EventBubbles, EventCancelable, EventHelpers};\nuse dom::eventtarget::{EventTarget, EventTargetHelpers, EventTargetTypeId};\nuse script_task::Runnable;\nuse script_task::ScriptMsg;\nuse std::cell::{Cell, RefCell};\nuse std::borrow::ToOwned;\nuse util::str::DOMString;\n\nuse hyper::header::Host;\nuse websocket::Message;\nuse websocket::ws::sender::Sender as Sender_Object;\nuse websocket::client::sender::Sender;\nuse websocket::client::receiver::Receiver;\nuse websocket::stream::WebSocketStream;\nuse websocket::client::request::Url;\nuse websocket::Client;\nuse websocket::header::Origin;\nuse websocket::result::WebSocketResult;\nuse websocket::ws::util::url::parse_url;\n\n#[derive(JSTraceable, PartialEq, Copy, Clone)]\nenum WebSocketRequestState {\n Connecting = 0,\n Open = 1,\n Closing = 2,\n Closed = 3,\n}\n\nno_jsmanaged_fields!(Sender<WebSocketStream>);\nno_jsmanaged_fields!(Receiver<WebSocketStream>);\n\n#[dom_struct]\npub struct WebSocket {\n eventtarget: EventTarget,\n url: Url,\n global: GlobalField,\n ready_state: Cell<WebSocketRequestState>,\n sender: RefCell<Option<Sender<WebSocketStream>>>,\n receiver: RefCell<Option<Receiver<WebSocketStream>>>,\n failed: Cell<bool>, \/\/Flag to tell if websocket was closed due to failure\n full: Cell<bool>, \/\/Flag to tell if websocket queue is full\n clean_close: Cell<bool>, \/\/Flag to tell if the websocket closed cleanly (not due to full or fail)\n code: Cell<u16>, \/\/Closing code\n reason: DOMRefCell<DOMString>, \/\/Closing reason\n data: DOMRefCell<DOMString>, \/\/Data from send - TODO: Remove after buffer is added.\n sendCloseFrame: Cell<bool>\n}\n\n\/\/\/ *Establish a WebSocket Connection* as defined in RFC 6455.\nfn establish_a_websocket_connection(url: (Host, String, bool), origin: String)\n -> WebSocketResult<(Sender<WebSocketStream>, Receiver<WebSocketStream>)> {\n let mut request = try!(Client::connect(url));\n request.headers.set(Origin(origin));\n\n let response = try!(request.send());\n try!(response.validate());\n\n Ok(response.begin().split())\n}\n\n\nimpl WebSocket {\n pub fn new_inherited(global: GlobalRef, url: Url) -> WebSocket {\n WebSocket {\n eventtarget: EventTarget::new_inherited(EventTargetTypeId::WebSocket),\n url: url,\n global: GlobalField::from_rooted(&global),\n ready_state: Cell::new(WebSocketRequestState::Connecting),\n failed: Cell::new(false),\n sender: RefCell::new(None),\n receiver: RefCell::new(None),\n full: Cell::new(false),\n clean_close: Cell::new(true),\n code: Cell::new(0),\n reason: DOMRefCell::new(\"\".to_owned()),\n data: DOMRefCell::new(\"\".to_owned()),\n sendCloseFrame: Cell::new(false)\n }\n\n }\n\n pub fn new(global: GlobalRef, url: DOMString) -> Fallible<Root<WebSocket>> {\n \/\/ Step 1.\n let parsed_url = try!(Url::parse(&url).map_err(|_| Error::Syntax));\n let url = try!(parse_url(&parsed_url).map_err(|_| Error::Syntax));\n\n \/*TODO: This constructor is only a prototype, it does not accomplish the specs\n defined here:\n http:\/\/html.spec.whatwg.org\n The remaining 8 items must be satisfied.\n TODO: This constructor should be responsible for spawning a thread for the\n receive loop after ws.r().Open() - See comment\n *\/\n let ws = reflect_dom_object(box WebSocket::new_inherited(global, parsed_url),\n global,\n WebSocketBinding::Wrap);\n\n let channel = establish_a_websocket_connection(url, global.get_url().serialize());\n let (temp_sender, temp_receiver) = match channel {\n Ok(channel) => channel,\n Err(e) => {\n debug!(\"Failed to establish a WebSocket connection: {:?}\", e);\n let global_root = ws.r().global.root();\n let address = Trusted::new(global_root.r().get_cx(), ws.r(), global_root.r().script_chan().clone());\n let task = box WebSocketTaskHandler::new(address, WebSocketTask::Close);\n global_root.r().script_chan().send(ScriptMsg::RunnableMsg(task)).unwrap();\n return Ok(ws);\n }\n };\n\n *ws.r().sender.borrow_mut() = Some(temp_sender);\n *ws.r().receiver.borrow_mut() = Some(temp_receiver);\n\n \/\/Create everything necessary for starting the open asynchronous task, then begin the task.\n let global_root = ws.r().global.root();\n let addr: Trusted<WebSocket> =\n Trusted::new(global_root.r().get_cx(), ws.r(), global_root.r().script_chan().clone());\n let open_task = box WebSocketTaskHandler::new(addr, WebSocketTask::ConnectionEstablished);\n global_root.r().script_chan().send(ScriptMsg::RunnableMsg(open_task)).unwrap();\n \/\/TODO: Spawn thread here for receive loop\n \/*TODO: Add receive loop here and make new thread run this\n Receive is an infinite loop \"similiar\" the one shown here:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: The receive loop however does need to follow the spec. These are outlined here\n under \"WebSocket message has been received\" items 1-5:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: The receive loop also needs to dispatch an asynchronous event as stated here:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: When the receive loop receives a close message from the server,\n it confirms the websocket is now closed. This requires the close event\n to be fired (dispatch_close fires the close event - see implementation below)\n *\/\n Ok(ws)\n }\n\n pub fn Constructor(global: GlobalRef, url: DOMString) -> Fallible<Root<WebSocket>> {\n WebSocket::new(global, url)\n }\n}\n\nimpl<'a> WebSocketMethods for &'a WebSocket {\n event_handler!(open, GetOnopen, SetOnopen);\n event_handler!(close, GetOnclose, SetOnclose);\n event_handler!(error, GetOnerror, SetOnerror);\n\n fn Url(self) -> DOMString {\n self.url.serialize()\n }\n\n fn ReadyState(self) -> u16 {\n self.ready_state.get() as u16\n }\n\n fn Send(self, data: Option<USVString>) -> Fallible<()> {\n if self.ready_state.get() == WebSocketRequestState::Connecting {\n return Err(Error::InvalidState);\n }\n\n \/*TODO: This is not up to spec see http:\/\/html.spec.whatwg.org\/multipage\/comms.html search for\n \"If argument is a string\"\n TODO: Need to buffer data\n TODO: bufferedAmount attribute returns the size of the buffer in bytes -\n this is a required attribute defined in the websocket.webidl file\n TODO: The send function needs to flag when full by using the following\n self.full.set(true). This needs to be done when the buffer is full\n *\/\n let mut other_sender = self.sender.borrow_mut();\n let my_sender = other_sender.as_mut().unwrap();\n if self.sendCloseFrame.get() { \/\/TODO: Also check if the buffer is full\n self.sendCloseFrame.set(false);\n let _ = my_sender.send_message(Message::Close(None));\n return Ok(());\n }\n let _ = my_sender.send_message(Message::Text(data.unwrap().0));\n return Ok(())\n }\n\n fn Close(self, code: Option<u16>, reason: Option<USVString>) -> Fallible<()>{\n if let Some(code) = code {\n \/\/Check code is NOT 1000 NOR in the range of 3000-4999 (inclusive)\n if code != 1000 && (code < 3000 || code > 4999) {\n return Err(Error::InvalidAccess);\n }\n }\n if let Some(ref reason) = reason {\n if reason.0.as_bytes().len() > 123 { \/\/reason cannot be larger than 123 bytes\n return Err(Error::Syntax);\n }\n }\n\n match self.ready_state.get() {\n WebSocketRequestState::Closing | WebSocketRequestState::Closed => {} \/\/Do nothing\n WebSocketRequestState::Connecting => { \/\/Connection is not yet established\n \/*By setting the state to closing, the open function\n will abort connecting the websocket*\/\n self.ready_state.set(WebSocketRequestState::Closing);\n self.failed.set(true);\n self.sendCloseFrame.set(true);\n \/\/Dispatch send task to send close frame\n \/\/TODO: Sending here is just empty string, though no string is really needed. Another send, empty\n \/\/ send, could be used.\n let _ = self.Send(None);\n \/\/Note: After sending the close message, the receive loop confirms a close message from the server and\n \/\/ must fire a close event\n }\n WebSocketRequestState::Open => {\n \/\/Closing handshake not started - still in open\n \/\/Start the closing by setting the code and reason if they exist\n if let Some(code) = code {\n self.code.set(code);\n }\n if let Some(reason) = reason {\n *self.reason.borrow_mut() = reason.0;\n }\n self.ready_state.set(WebSocketRequestState::Closing);\n self.sendCloseFrame.set(true);\n \/\/Dispatch send task to send close frame\n let _ = self.Send(None);\n \/\/Note: After sending the close message, the receive loop confirms a close message from the server and\n \/\/ must fire a close event\n }\n }\n Ok(()) \/\/Return Ok\n }\n}\n\n\npub enum WebSocketTask {\n \/\/\/ Task queued when *the WebSocket connection is established*.\n ConnectionEstablished,\n Close,\n}\n\npub struct WebSocketTaskHandler {\n addr: Trusted<WebSocket>,\n task: WebSocketTask,\n}\n\nimpl WebSocketTaskHandler {\n pub fn new(addr: Trusted<WebSocket>, task: WebSocketTask) -> WebSocketTaskHandler {\n WebSocketTaskHandler {\n addr: addr,\n task: task,\n }\n }\n\n fn connection_established(&self) {\n \/*TODO: Items 1, 3, 4, & 5 under \"WebSocket connection is established\" as specified here:\n https:\/\/html.spec.whatwg.org\/multipage\/#feedback-from-the-protocol\n *\/\n let ws = self.addr.root();\n\n \/\/ Step 1: Protocols.\n\n \/\/ Step 2.\n ws.ready_state.set(WebSocketRequestState::Open);\n\n \/\/ Step 3: Extensions.\n \/\/ Step 4: Protocols.\n \/\/ Step 5: Cookies.\n\n \/\/ Step 6.\n let global = ws.global.root();\n let event = Event::new(global.r(), \"open\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::NotCancelable);\n event.fire(EventTargetCast::from_ref(ws.r()));\n }\n\n fn dispatch_close(&self) {\n let ws = self.addr.root();\n let ws = ws.r();\n let global = ws.global.root();\n ws.ready_state.set(WebSocketRequestState::Closed);\n \/\/If failed or full, fire error event\n if ws.failed.get() || ws.full.get() {\n ws.failed.set(false);\n ws.full.set(false);\n \/\/A Bad close\n ws.clean_close.set(false);\n let event = Event::new(global.r(),\n \"error\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::Cancelable);\n let target = EventTargetCast::from_ref(ws);\n event.r().fire(target);\n }\n let rsn = ws.reason.borrow();\n let rsn_clone = rsn.clone();\n \/*In addition, we also have to fire a close even if error event fired\n https:\/\/html.spec.whatwg.org\/multipage\/#closeWebSocket\n *\/\n let close_event = CloseEvent::new(global.r(),\n \"close\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::NotCancelable,\n ws.clean_close.get(),\n ws.code.get(),\n rsn_clone);\n let target = EventTargetCast::from_ref(ws);\n let event = EventCast::from_ref(close_event.r());\n event.fire(target);\n }\n}\n\nimpl Runnable for WebSocketTaskHandler {\n fn handler(self: Box<WebSocketTaskHandler>) {\n match self.task {\n WebSocketTask::ConnectionEstablished => {\n self.connection_established();\n }\n WebSocketTask::Close => {\n self.dispatch_close();\n }\n }\n }\n}\n\n<commit_msg>Remove WebSocket::sendCloseFrame.<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\nuse dom::bindings::cell::DOMRefCell;\nuse dom::bindings::codegen::Bindings::WebSocketBinding;\nuse dom::bindings::codegen::Bindings::WebSocketBinding::WebSocketMethods;\nuse dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;\nuse dom::bindings::codegen::InheritTypes::EventTargetCast;\nuse dom::bindings::codegen::InheritTypes::EventCast;\nuse dom::bindings::error::{Error, Fallible};\nuse dom::bindings::error::Error::{InvalidAccess, Syntax};\nuse dom::bindings::global::{GlobalField, GlobalRef};\nuse dom::bindings::js::Root;\nuse dom::bindings::refcounted::Trusted;\nuse dom::bindings::str::USVString;\nuse dom::bindings::trace::JSTraceable;\nuse dom::bindings::utils::reflect_dom_object;\nuse dom::closeevent::CloseEvent;\nuse dom::event::{Event, EventBubbles, EventCancelable, EventHelpers};\nuse dom::eventtarget::{EventTarget, EventTargetHelpers, EventTargetTypeId};\nuse script_task::Runnable;\nuse script_task::ScriptMsg;\nuse std::cell::{Cell, RefCell};\nuse std::borrow::ToOwned;\nuse util::str::DOMString;\n\nuse hyper::header::Host;\nuse websocket::Message;\nuse websocket::ws::sender::Sender as Sender_Object;\nuse websocket::client::sender::Sender;\nuse websocket::client::receiver::Receiver;\nuse websocket::stream::WebSocketStream;\nuse websocket::client::request::Url;\nuse websocket::Client;\nuse websocket::header::Origin;\nuse websocket::result::WebSocketResult;\nuse websocket::ws::util::url::parse_url;\n\n#[derive(JSTraceable, PartialEq, Copy, Clone)]\nenum WebSocketRequestState {\n Connecting = 0,\n Open = 1,\n Closing = 2,\n Closed = 3,\n}\n\nno_jsmanaged_fields!(Sender<WebSocketStream>);\nno_jsmanaged_fields!(Receiver<WebSocketStream>);\n\n#[dom_struct]\npub struct WebSocket {\n eventtarget: EventTarget,\n url: Url,\n global: GlobalField,\n ready_state: Cell<WebSocketRequestState>,\n sender: RefCell<Option<Sender<WebSocketStream>>>,\n receiver: RefCell<Option<Receiver<WebSocketStream>>>,\n failed: Cell<bool>, \/\/Flag to tell if websocket was closed due to failure\n full: Cell<bool>, \/\/Flag to tell if websocket queue is full\n clean_close: Cell<bool>, \/\/Flag to tell if the websocket closed cleanly (not due to full or fail)\n code: Cell<u16>, \/\/Closing code\n reason: DOMRefCell<DOMString>, \/\/Closing reason\n data: DOMRefCell<DOMString>, \/\/Data from send - TODO: Remove after buffer is added.\n}\n\n\/\/\/ *Establish a WebSocket Connection* as defined in RFC 6455.\nfn establish_a_websocket_connection(url: (Host, String, bool), origin: String)\n -> WebSocketResult<(Sender<WebSocketStream>, Receiver<WebSocketStream>)> {\n let mut request = try!(Client::connect(url));\n request.headers.set(Origin(origin));\n\n let response = try!(request.send());\n try!(response.validate());\n\n Ok(response.begin().split())\n}\n\n\nimpl WebSocket {\n pub fn new_inherited(global: GlobalRef, url: Url) -> WebSocket {\n WebSocket {\n eventtarget: EventTarget::new_inherited(EventTargetTypeId::WebSocket),\n url: url,\n global: GlobalField::from_rooted(&global),\n ready_state: Cell::new(WebSocketRequestState::Connecting),\n failed: Cell::new(false),\n sender: RefCell::new(None),\n receiver: RefCell::new(None),\n full: Cell::new(false),\n clean_close: Cell::new(true),\n code: Cell::new(0),\n reason: DOMRefCell::new(\"\".to_owned()),\n data: DOMRefCell::new(\"\".to_owned()),\n }\n\n }\n\n pub fn new(global: GlobalRef, url: DOMString) -> Fallible<Root<WebSocket>> {\n \/\/ Step 1.\n let parsed_url = try!(Url::parse(&url).map_err(|_| Error::Syntax));\n let url = try!(parse_url(&parsed_url).map_err(|_| Error::Syntax));\n\n \/*TODO: This constructor is only a prototype, it does not accomplish the specs\n defined here:\n http:\/\/html.spec.whatwg.org\n The remaining 8 items must be satisfied.\n TODO: This constructor should be responsible for spawning a thread for the\n receive loop after ws.r().Open() - See comment\n *\/\n let ws = reflect_dom_object(box WebSocket::new_inherited(global, parsed_url),\n global,\n WebSocketBinding::Wrap);\n\n let channel = establish_a_websocket_connection(url, global.get_url().serialize());\n let (temp_sender, temp_receiver) = match channel {\n Ok(channel) => channel,\n Err(e) => {\n debug!(\"Failed to establish a WebSocket connection: {:?}\", e);\n let global_root = ws.r().global.root();\n let address = Trusted::new(global_root.r().get_cx(), ws.r(), global_root.r().script_chan().clone());\n let task = box WebSocketTaskHandler::new(address, WebSocketTask::Close);\n global_root.r().script_chan().send(ScriptMsg::RunnableMsg(task)).unwrap();\n return Ok(ws);\n }\n };\n\n *ws.r().sender.borrow_mut() = Some(temp_sender);\n *ws.r().receiver.borrow_mut() = Some(temp_receiver);\n\n \/\/Create everything necessary for starting the open asynchronous task, then begin the task.\n let global_root = ws.r().global.root();\n let addr: Trusted<WebSocket> =\n Trusted::new(global_root.r().get_cx(), ws.r(), global_root.r().script_chan().clone());\n let open_task = box WebSocketTaskHandler::new(addr, WebSocketTask::ConnectionEstablished);\n global_root.r().script_chan().send(ScriptMsg::RunnableMsg(open_task)).unwrap();\n \/\/TODO: Spawn thread here for receive loop\n \/*TODO: Add receive loop here and make new thread run this\n Receive is an infinite loop \"similiar\" the one shown here:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: The receive loop however does need to follow the spec. These are outlined here\n under \"WebSocket message has been received\" items 1-5:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: The receive loop also needs to dispatch an asynchronous event as stated here:\n https:\/\/github.com\/cyderize\/rust-websocket\/blob\/master\/examples\/client.rs#L64\n TODO: When the receive loop receives a close message from the server,\n it confirms the websocket is now closed. This requires the close event\n to be fired (dispatch_close fires the close event - see implementation below)\n *\/\n Ok(ws)\n }\n\n pub fn Constructor(global: GlobalRef, url: DOMString) -> Fallible<Root<WebSocket>> {\n WebSocket::new(global, url)\n }\n}\n\nimpl<'a> WebSocketMethods for &'a WebSocket {\n event_handler!(open, GetOnopen, SetOnopen);\n event_handler!(close, GetOnclose, SetOnclose);\n event_handler!(error, GetOnerror, SetOnerror);\n\n fn Url(self) -> DOMString {\n self.url.serialize()\n }\n\n fn ReadyState(self) -> u16 {\n self.ready_state.get() as u16\n }\n\n fn Send(self, data: Option<USVString>) -> Fallible<()> {\n if self.ready_state.get() == WebSocketRequestState::Connecting {\n return Err(Error::InvalidState);\n }\n\n \/*TODO: This is not up to spec see http:\/\/html.spec.whatwg.org\/multipage\/comms.html search for\n \"If argument is a string\"\n TODO: Need to buffer data\n TODO: bufferedAmount attribute returns the size of the buffer in bytes -\n this is a required attribute defined in the websocket.webidl file\n TODO: The send function needs to flag when full by using the following\n self.full.set(true). This needs to be done when the buffer is full\n *\/\n let mut other_sender = self.sender.borrow_mut();\n let my_sender = other_sender.as_mut().unwrap();\n let _ = my_sender.send_message(Message::Text(data.unwrap().0));\n return Ok(())\n }\n\n fn Close(self, code: Option<u16>, reason: Option<USVString>) -> Fallible<()>{\n fn send_close(this: &WebSocket) {\n this.ready_state.set(WebSocketRequestState::Closing);\n\n let mut sender = this.sender.borrow_mut();\n \/\/TODO: Also check if the buffer is full\n let _ = sender.as_mut().unwrap().send_message(Message::Close(None));\n }\n\n\n if let Some(code) = code {\n \/\/Check code is NOT 1000 NOR in the range of 3000-4999 (inclusive)\n if code != 1000 && (code < 3000 || code > 4999) {\n return Err(Error::InvalidAccess);\n }\n }\n if let Some(ref reason) = reason {\n if reason.0.as_bytes().len() > 123 { \/\/reason cannot be larger than 123 bytes\n return Err(Error::Syntax);\n }\n }\n\n match self.ready_state.get() {\n WebSocketRequestState::Closing | WebSocketRequestState::Closed => {} \/\/Do nothing\n WebSocketRequestState::Connecting => { \/\/Connection is not yet established\n \/*By setting the state to closing, the open function\n will abort connecting the websocket*\/\n self.failed.set(true);\n send_close(self);\n \/\/Note: After sending the close message, the receive loop confirms a close message from the server and\n \/\/ must fire a close event\n }\n WebSocketRequestState::Open => {\n \/\/Closing handshake not started - still in open\n \/\/Start the closing by setting the code and reason if they exist\n if let Some(code) = code {\n self.code.set(code);\n }\n if let Some(reason) = reason {\n *self.reason.borrow_mut() = reason.0;\n }\n send_close(self);\n \/\/Note: After sending the close message, the receive loop confirms a close message from the server and\n \/\/ must fire a close event\n }\n }\n Ok(()) \/\/Return Ok\n }\n}\n\n\npub enum WebSocketTask {\n \/\/\/ Task queued when *the WebSocket connection is established*.\n ConnectionEstablished,\n Close,\n}\n\npub struct WebSocketTaskHandler {\n addr: Trusted<WebSocket>,\n task: WebSocketTask,\n}\n\nimpl WebSocketTaskHandler {\n pub fn new(addr: Trusted<WebSocket>, task: WebSocketTask) -> WebSocketTaskHandler {\n WebSocketTaskHandler {\n addr: addr,\n task: task,\n }\n }\n\n fn connection_established(&self) {\n \/*TODO: Items 1, 3, 4, & 5 under \"WebSocket connection is established\" as specified here:\n https:\/\/html.spec.whatwg.org\/multipage\/#feedback-from-the-protocol\n *\/\n let ws = self.addr.root();\n\n \/\/ Step 1: Protocols.\n\n \/\/ Step 2.\n ws.ready_state.set(WebSocketRequestState::Open);\n\n \/\/ Step 3: Extensions.\n \/\/ Step 4: Protocols.\n \/\/ Step 5: Cookies.\n\n \/\/ Step 6.\n let global = ws.global.root();\n let event = Event::new(global.r(), \"open\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::NotCancelable);\n event.fire(EventTargetCast::from_ref(ws.r()));\n }\n\n fn dispatch_close(&self) {\n let ws = self.addr.root();\n let ws = ws.r();\n let global = ws.global.root();\n ws.ready_state.set(WebSocketRequestState::Closed);\n \/\/If failed or full, fire error event\n if ws.failed.get() || ws.full.get() {\n ws.failed.set(false);\n ws.full.set(false);\n \/\/A Bad close\n ws.clean_close.set(false);\n let event = Event::new(global.r(),\n \"error\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::Cancelable);\n let target = EventTargetCast::from_ref(ws);\n event.r().fire(target);\n }\n let rsn = ws.reason.borrow();\n let rsn_clone = rsn.clone();\n \/*In addition, we also have to fire a close even if error event fired\n https:\/\/html.spec.whatwg.org\/multipage\/#closeWebSocket\n *\/\n let close_event = CloseEvent::new(global.r(),\n \"close\".to_owned(),\n EventBubbles::DoesNotBubble,\n EventCancelable::NotCancelable,\n ws.clean_close.get(),\n ws.code.get(),\n rsn_clone);\n let target = EventTargetCast::from_ref(ws);\n let event = EventCast::from_ref(close_event.r());\n event.fire(target);\n }\n}\n\nimpl Runnable for WebSocketTaskHandler {\n fn handler(self: Box<WebSocketTaskHandler>) {\n match self.task {\n WebSocketTask::ConnectionEstablished => {\n self.connection_established();\n }\n WebSocketTask::Close => {\n self.dispatch_close();\n }\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>test: Add an interior-vec test case, XFAIL'd for now because of leaks in the compiler<commit_after>\/\/ xfail-stage0\n\/\/ xfail-stage1\n\/\/ xfail-stage2\n\/\/ xfail-stage3\n\n\/\/ works, but leaks in the compiler :(\n\nimport rusti::ivec_len;\n\nnative \"rust-intrinsic\" mod rusti {\n fn ivec_len[T](&T[] v) -> uint;\n}\n\nfn main() {\n let int[] v = ~[];\n assert (ivec_len(v) == 0u); \/\/ zero-length\n auto x = ~[ 1, 2 ];\n assert (ivec_len(x) == 2u); \/\/ on stack\n auto y = ~[ 1, 2, 3, 4, 5 ];\n assert (ivec_len(y) == 5u); \/\/ on heap\n\n v += ~[];\n assert (ivec_len(v) == 0u); \/\/ zero-length append\n x += ~[ 3 ];\n assert (ivec_len(x) == 3u); \/\/ on-stack append\n y += ~[ 6, 7, 8, 9 ];\n assert (ivec_len(y) == 9u); \/\/ on-heap append\n\n auto vv = v + v;\n assert (ivec_len(vv) == 0u); \/\/ zero-length add\n auto xx = x + ~[ 4 ];\n assert (ivec_len(xx) == 4u); \/\/ on-stack add\n auto yy = y + ~[ 10, 11 ];\n assert (ivec_len(yy) == 11u); \/\/ on-heap add\n}\n\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse infer::InferCtxt;\nuse infer::lexical_region_resolve::RegionResolutionError;\nuse infer::lexical_region_resolve::RegionResolutionError::*;\nuse syntax::codemap::Span;\nuse ty::{self, TyCtxt};\nuse util::common::ErrorReported;\n\nmod different_lifetimes;\nmod find_anon_type;\nmod named_anon_conflict;\nmod util;\n\nimpl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {\n pub fn try_report_nice_region_error(&self, error: &RegionResolutionError<'tcx>) -> bool {\n let (span, sub, sup) = match *error {\n ConcreteFailure(ref origin, sub, sup) => (origin.span(), sub, sup),\n SubSupConflict(_, ref origin, sub, _, sup) => (origin.span(), sub, sup),\n _ => return false, \/\/ inapplicable\n };\n\n if let Some(tables) = self.in_progress_tables {\n let tables = tables.borrow();\n NiceRegionError::new(self.tcx, span, sub, sup, Some(&tables)).try_report().is_some()\n } else {\n NiceRegionError::new(self.tcx, span, sub, sup, None).try_report().is_some()\n }\n }\n}\n\npub struct NiceRegionError<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n span: Span,\n sub: ty::Region<'tcx>,\n sup: ty::Region<'tcx>,\n tables: Option<&'cx ty::TypeckTables<'tcx>>,\n}\n\nimpl<'cx, 'gcx, 'tcx> NiceRegionError<'cx, 'gcx, 'tcx> {\n pub fn new(\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n span: Span,\n sub: ty::Region<'tcx>,\n sup: ty::Region<'tcx>,\n tables: Option<&'cx ty::TypeckTables<'tcx>>,\n ) -> Self {\n Self { tcx, span, sub, sup, tables }\n }\n\n pub fn try_report(&self) -> Option<ErrorReported> {\n self.try_report_anon_anon_conflict()\n .or_else(|| self.try_report_named_anon_conflict())\n }\n}\n<commit_msg>give precedence to `try_report_named_anon_conflict` method<commit_after>\/\/ Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nuse infer::InferCtxt;\nuse infer::lexical_region_resolve::RegionResolutionError;\nuse infer::lexical_region_resolve::RegionResolutionError::*;\nuse syntax::codemap::Span;\nuse ty::{self, TyCtxt};\nuse util::common::ErrorReported;\n\nmod different_lifetimes;\nmod find_anon_type;\nmod named_anon_conflict;\nmod util;\n\nimpl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {\n pub fn try_report_nice_region_error(&self, error: &RegionResolutionError<'tcx>) -> bool {\n let (span, sub, sup) = match *error {\n ConcreteFailure(ref origin, sub, sup) => (origin.span(), sub, sup),\n SubSupConflict(_, ref origin, sub, _, sup) => (origin.span(), sub, sup),\n _ => return false, \/\/ inapplicable\n };\n\n if let Some(tables) = self.in_progress_tables {\n let tables = tables.borrow();\n NiceRegionError::new(self.tcx, span, sub, sup, Some(&tables)).try_report().is_some()\n } else {\n NiceRegionError::new(self.tcx, span, sub, sup, None).try_report().is_some()\n }\n }\n}\n\npub struct NiceRegionError<'cx, 'gcx: 'tcx, 'tcx: 'cx> {\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n span: Span,\n sub: ty::Region<'tcx>,\n sup: ty::Region<'tcx>,\n tables: Option<&'cx ty::TypeckTables<'tcx>>,\n}\n\nimpl<'cx, 'gcx, 'tcx> NiceRegionError<'cx, 'gcx, 'tcx> {\n pub fn new(\n tcx: TyCtxt<'cx, 'gcx, 'tcx>,\n span: Span,\n sub: ty::Region<'tcx>,\n sup: ty::Region<'tcx>,\n tables: Option<&'cx ty::TypeckTables<'tcx>>,\n ) -> Self {\n Self { tcx, span, sub, sup, tables }\n }\n\n pub fn try_report(&self) -> Option<ErrorReported> {\n self.try_report_named_anon_conflict()\n .or_else(|| self.try_report_anon_anon_conflict())\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #88194 - spastorino:test-tait-assoc-impl-trait, r=oli-obk<commit_after>\/\/ check-pass\n\n#![feature(type_alias_impl_trait)]\n#![allow(dead_code)]\n\ntype Foo = impl Iterator<Item = impl Send>;\n\nfn make_foo() -> Foo {\n vec![1, 2].into_iter()\n}\n\ntype Bar = impl Send;\ntype Baz = impl Iterator<Item = Bar>;\n\nfn make_baz() -> Baz {\n vec![\"1\", \"2\"].into_iter()\n}\n\nfn main() {}\n<|endoftext|>"} {"text":"<commit_before>use std::io;\n\nuse docopt;\nuse tabwriter::TabWriter;\n\nuse types::{CliError, CsvConfig, Delimiter};\nuse util;\n\ndocopt!(Args, \"\nPrints flattened records such that fields are labeled separated\nby a new line. This mode is particularly useful for viewing one\nrecord at a time.\n\nThere is also a condensed view (-c or --condensed) that will shorten\nthe contents of each field to provide a summary view.\n\nUsage:\n xcsv flatten [options] [<input>]\n\nflatten options:\n -c, --condensed <arg> Limits the length (in bytes) of each field to the\n value specified.\n -s, --separator <arg> A string of characters to write after each record.\n When non-empty, a new line is automatically\n appended to the separator.\n [default: #]\n\nCommon options:\n -h, --help Display this message\n -n, --no-headers When set, the first row will not be interpreted\n as headers. (i.e., They are not searched, analyzed,\n sliced, etc.)\n -d, --delimiter <arg> The field delimiter for reading CSV data.\n Must be a single character. [default: ,]\n\", arg_input: Option<String>, flag_delimiter: Delimiter)\n\npub fn main() -> Result<(), CliError> {\n let args: Args = try!(util::get_args());\n let rconfig = CsvConfig::new(args.arg_input.clone())\n .delimiter(args.flag_delimiter)\n .no_headers(args.flag_no_headers);\n let mut rdr = try!(io| rconfig.reader());\n let headers = try!(csv| rdr.byte_headers());\n\n let mut wtr: Box<Writer> =\n if false {\n box io::stdout() as Box<Writer>\n } else {\n box TabWriter::new(io::stdout()) as Box<Writer>\n };\n let mut first = true;\n for r in rdr.byte_records() {\n if !first && !args.flag_separator.is_empty() {\n try!(io| wtr.write_str(args.flag_separator.as_slice()));\n try!(io| wtr.write_u8(b'\\n'));\n }\n first = false;\n for (header, field) in headers.iter().zip(try!(csv| r).into_iter()) {\n try!(io| wtr.write(header[]));\n try!(io| wtr.write_u8(b'\\t'));\n try!(io| wtr.write(field[]));\n try!(io| wtr.write_u8(b'\\n'));\n }\n }\n try!(io| wtr.flush());\n Ok(())\n}\n<commit_msg>Add \"condense\" feature to flatten command.<commit_after>use std::io;\n\nuse docopt;\nuse tabwriter::TabWriter;\n\nuse types::{CliError, CsvConfig, Delimiter};\nuse util;\n\ndocopt!(Args, \"\nPrints flattened records such that fields are labeled separated\nby a new line. This mode is particularly useful for viewing one\nrecord at a time.\n\nThere is also a condensed view (-c or --condensed) that will shorten\nthe contents of each field to provide a summary view.\n\nUsage:\n xcsv flatten [options] [<input>]\n\nflatten options:\n -c, --condensed <arg> Limits the length (in bytes) of each field to the\n value specified.\n -s, --separator <arg> A string of characters to write after each record.\n When non-empty, a new line is automatically\n appended to the separator.\n [default: #]\n\nCommon options:\n -h, --help Display this message\n -n, --no-headers When set, the first row will not be interpreted\n as headers. (i.e., They are not searched, analyzed,\n sliced, etc.)\n -d, --delimiter <arg> The field delimiter for reading CSV data.\n Must be a single character. [default: ,]\n\", arg_input: Option<String>, flag_delimiter: Delimiter,\n flag_condensed: Option<uint>)\n\npub fn main() -> Result<(), CliError> {\n let args: Args = try!(util::get_args());\n let rconfig = CsvConfig::new(args.arg_input.clone())\n .delimiter(args.flag_delimiter)\n .no_headers(args.flag_no_headers);\n let mut rdr = try!(io| rconfig.reader());\n let headers = try!(csv| rdr.byte_headers());\n\n let mut wtr: Box<Writer> =\n if false {\n box io::stdout() as Box<Writer>\n } else {\n box TabWriter::new(io::stdout()) as Box<Writer>\n };\n let mut first = true;\n for r in rdr.byte_records() {\n if !first && !args.flag_separator.is_empty() {\n try!(io| wtr.write_str(args.flag_separator.as_slice()));\n try!(io| wtr.write_u8(b'\\n'));\n }\n first = false;\n for (header, field) in headers.iter().zip(try!(csv| r).into_iter()) {\n try!(io| wtr.write(header[]));\n try!(io| wtr.write_u8(b'\\t'));\n match args.flag_condensed {\n Some(n) if n < field.len() => {\n try!(io| wtr.write(field[..n]));\n try!(io| wtr.write_str(\"...\"));\n }\n _ => try!(io| wtr.write(field[])),\n }\n try!(io| wtr.write_u8(b'\\n'));\n }\n }\n try!(io| wtr.flush());\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>rollup merge of #17062 : nathantypanski\/generic-lifetime-trait-impl<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ This code used to produce an ICE on the definition of trait Bar\n\/\/ with the following message:\n\/\/\n\/\/ Type parameter out of range when substituting in region 'a (root\n\/\/ type=fn(Self) -> 'astr) (space=FnSpace, index=0)\n\/\/\n\/\/ Regression test for issue #16218.\n\ntrait Bar<'a> {}\n\ntrait Foo<'a> {\n fn bar<'a, T: Bar<'a>>(self) -> &'a str;\n}\n\nimpl<'a> Foo<'a> for &'a str {\n fn bar<T: Bar<'a>>(self) -> &'a str { fail!() } \/\/~ ERROR lifetime\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add ICE regression test with unboxed closures<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ This code used to produce the following ICE:\n\/\/\n\/\/ error: internal compiler error: get_unique_type_id_of_type() -\n\/\/ unexpected type: closure,\n\/\/ ty_unboxed_closure(syntax::ast::DefId{krate: 0u32, node: 66u32},\n\/\/ ReScope(63u32))\n\/\/\n\/\/ This is a regression test for issue #17021.\n\n#![feature(unboxed_closures, overloaded_calls)]\n\nuse std::ptr;\n\npub fn replace_map<'a, T, F>(src: &mut T, prod: F)\nwhere F: |: T| -> T {\n unsafe { *src = prod(ptr::read(src as *mut T as *const T)); }\n}\n\npub fn main() {\n let mut a = 7u;\n let b = &mut a;\n replace_map(b, |: x: uint| x * 2);\n assert_eq!(*b, 14u);\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ run-pass\n\n#[repr(u32)]\nenum Tag { I, F }\n\n#[repr(C)]\nunion U {\n i: i32,\n f: f32,\n}\n\n#[repr(C)]\nstruct Value {\n tag: Tag,\n u: U,\n}\n\nfn is_zero(v: Value) -> bool {\n unsafe {\n match v {\n Value { tag: Tag::I, u: U { i: 0 } } => true,\n Value { tag: Tag::F, u: U { f: 0.0 } } => true,\n _ => false,\n }\n }\n}\n\nunion W {\n a: u8,\n b: u8,\n}\n\nfn refut(w: W) {\n unsafe {\n match w {\n W { a: 10 } => {\n panic!();\n }\n W { b } => {\n assert_eq!(b, 11);\n }\n }\n }\n}\n\nfn main() {\n let v = Value { tag: Tag::I, u: U { i: 1 } };\n assert_eq!(is_zero(v), false);\n\n let w = W { a: 11 };\n refut(w);\n}\n<commit_msg>Allow illegal_floating_point_literal_pattern. These will need to be updated at some point.<commit_after>\/\/ Copyright 2016 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ run-pass\n#![allow(illegal_floating_point_literal_pattern)]\n\n#[repr(u32)]\nenum Tag { I, F }\n\n#[repr(C)]\nunion U {\n i: i32,\n f: f32,\n}\n\n#[repr(C)]\nstruct Value {\n tag: Tag,\n u: U,\n}\n\nfn is_zero(v: Value) -> bool {\n unsafe {\n match v {\n Value { tag: Tag::I, u: U { i: 0 } } => true,\n Value { tag: Tag::F, u: U { f: 0.0 } } => true,\n _ => false,\n }\n }\n}\n\nunion W {\n a: u8,\n b: u8,\n}\n\nfn refut(w: W) {\n unsafe {\n match w {\n W { a: 10 } => {\n panic!();\n }\n W { b } => {\n assert_eq!(b, 11);\n }\n }\n }\n}\n\nfn main() {\n let v = Value { tag: Tag::I, u: U { i: 1 } };\n assert_eq!(is_zero(v), false);\n\n let w = W { a: 11 };\n refut(w);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Write to db from thread pool.<commit_after><|endoftext|>"} {"text":"<commit_before>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A windowing implementation using GLFW.\n\nuse windowing::{ApplicationMethods, WindowEvent, WindowMethods};\nuse windowing::{IdleWindowEvent, ResizeWindowEvent, LoadUrlWindowEvent, MouseWindowEventClass, MouseWindowMoveEventClass};\nuse windowing::{ScrollWindowEvent, ZoomWindowEvent, NavigationWindowEvent, FinishedWindowEvent};\nuse windowing::{QuitWindowEvent, MouseWindowClickEvent, MouseWindowMouseDownEvent, MouseWindowMouseUpEvent};\nuse windowing::RefreshWindowEvent;\nuse windowing::{Forward, Back};\n\nuse alert::{Alert, AlertMethods};\nuse extra::time::Timespec;\nuse extra::time;\nuse std::cell::RefCell;\nuse std::libc::{exit, c_int};\nuse std::local_data;\nuse std::rc::Rc;\n\nuse geom::point::Point2D;\nuse geom::size::Size2D;\nuse servo_msg::compositor_msg::{IdleRenderState, RenderState, RenderingRenderState};\nuse servo_msg::compositor_msg::{FinishedLoading, Blank, Loading, PerformingLayout, ReadyState};\n\nuse glfw;\n\n\/\/\/ A structure responsible for setting up and tearing down the entire windowing system.\npub struct Application;\n\nimpl ApplicationMethods for Application {\n fn new() -> Application {\n \/\/ Per GLFW docs it's safe to set the error callback before calling\n \/\/ glfwInit(), and this way we notice errors from init too.\n glfw::set_error_callback(~glfw::LogErrorHandler);\n\n if glfw::init().is_err() {\n \/\/ handles things like inability to connect to X\n \/\/ cannot simply fail, since the runtime isn't up yet (causes a nasty abort)\n println!(\"GLFW initialization failed\");\n unsafe { exit(1); }\n }\n\n Application\n }\n}\n\nimpl Drop for Application {\n fn drop(&mut self) {\n drop_local_window();\n glfw::terminate();\n }\n}\n\nmacro_rules! glfw_callback(\n (\n $callback:path ($($arg:ident: $arg_ty:ty),*) $block:expr\n ) => ({\n struct GlfwCallback;\n impl $callback for GlfwCallback {\n fn call(&self $(, $arg: $arg_ty)*) {\n $block\n }\n }\n ~GlfwCallback\n });\n\n (\n [$($state:ident: $state_ty:ty),*],\n $callback:path ($($arg:ident: $arg_ty:ty),*) $block:expr\n ) => ({\n struct GlfwCallback {\n $($state: $state_ty,)*\n }\n impl $callback for GlfwCallback {\n fn call(&self $(, $arg: $arg_ty)*) {\n $block\n }\n }\n ~GlfwCallback {\n $($state: $state,)*\n }\n });\n)\n\n\n\/\/\/ The type of a window.\npub struct Window {\n glfw_window: glfw::Window,\n\n event_queue: RefCell<~[WindowEvent]>,\n\n drag_origin: Point2D<c_int>,\n\n mouse_down_button: RefCell<Option<glfw::MouseButton>>,\n mouse_down_point: RefCell<Point2D<c_int>>,\n\n ready_state: RefCell<ReadyState>,\n render_state: RefCell<RenderState>,\n\n last_title_set_time: RefCell<Timespec>,\n}\n\nimpl WindowMethods<Application> for Window {\n \/\/\/ Creates a new window.\n fn new(_: &Application) -> Rc<Window> {\n \/\/ Create the GLFW window.\n let glfw_window = glfw::Window::create(800, 600, \"Servo\", glfw::Windowed)\n .expect(\"Failed to create GLFW window\");\n glfw_window.make_context_current();\n\n \/\/ Create our window object.\n let window = Window {\n glfw_window: glfw_window,\n\n event_queue: RefCell::new(~[]),\n\n drag_origin: Point2D(0 as c_int, 0),\n\n mouse_down_button: RefCell::new(None),\n mouse_down_point: RefCell::new(Point2D(0 as c_int, 0)),\n\n ready_state: RefCell::new(Blank),\n render_state: RefCell::new(IdleRenderState),\n\n last_title_set_time: RefCell::new(Timespec::new(0, 0)),\n };\n\n \/\/ Register event handlers.\n window.glfw_window.set_framebuffer_size_callback(\n glfw_callback!(glfw::FramebufferSizeCallback(_win: &glfw::Window, width: i32, height: i32) {\n let tmp = local_window();\n tmp.borrow().event_queue.with_mut(|queue| queue.push(ResizeWindowEvent(width as uint, height as uint)));\n }));\n window.glfw_window.set_refresh_callback(\n glfw_callback!(glfw::WindowRefreshCallback(_win: &glfw::Window) {\n let tmp = local_window();\n tmp.borrow().event_queue.with_mut(|queue| queue.push(RefreshWindowEvent));\n }));\n window.glfw_window.set_key_callback(\n glfw_callback!(glfw::KeyCallback(_win: &glfw::Window, key: glfw::Key, _scancode: c_int,\n action: glfw::Action, mods: glfw::Modifiers) {\n if action == glfw::Press {\n let tmp = local_window();\n tmp.borrow().handle_key(key, mods)\n }\n }));\n window.glfw_window.set_mouse_button_callback(\n glfw_callback!(glfw::MouseButtonCallback(win: &glfw::Window, button: glfw::MouseButton,\n action: glfw::Action, _mods: glfw::Modifiers) {\n let (x, y) = win.get_cursor_pos();\n \/\/handle hidpi displays, since GLFW returns non-hi-def coordinates.\n let (backing_size, _) = win.get_framebuffer_size();\n let (window_size, _) = win.get_size();\n let hidpi = (backing_size as f32) \/ (window_size as f32);\n let x = x as f32 * hidpi;\n let y = y as f32 * hidpi;\n if button == glfw::MouseButtonLeft || button == glfw::MouseButtonRight {\n let tmp = local_window();\n tmp.borrow().handle_mouse(button, action, x as i32, y as i32);\n }\n }));\n window.glfw_window.set_cursor_pos_callback(\n glfw_callback!(glfw::CursorPosCallback(_win: &glfw::Window, xpos: f64, ypos: f64) {\n let tmp = local_window();\n tmp.borrow().event_queue.with_mut(|queue| queue.push(MouseWindowMoveEventClass(Point2D(xpos as f32, ypos as f32))));\n }));\n window.glfw_window.set_scroll_callback(\n glfw_callback!(glfw::ScrollCallback(win: &glfw::Window, xpos: f64, ypos: f64) {\n let dx = (xpos as f32) * 30.0;\n let dy = (ypos as f32) * 30.0;\n\n let (x, y) = win.get_cursor_pos();\n \/\/handle hidpi displays, since GLFW returns non-hi-def coordinates.\n let (backing_size, _) = win.get_framebuffer_size();\n let (window_size, _) = win.get_size();\n let hidpi = (backing_size as f32) \/ (window_size as f32);\n let x = x as f32 * hidpi;\n let y = y as f32 * hidpi;\n\n let tmp = local_window();\n tmp.borrow().event_queue.with_mut(|queue| queue.push(ScrollWindowEvent(Point2D(dx, dy), Point2D(x as i32, y as i32))));\n }));\n\n let wrapped_window = Rc::from_send(window);\n\n install_local_window(wrapped_window.clone());\n\n wrapped_window\n }\n\n \/\/\/ Returns the size of the window.\n fn size(&self) -> Size2D<f32> {\n let (width, height) = self.glfw_window.get_framebuffer_size();\n Size2D(width as f32, height as f32)\n }\n\n \/\/\/ Presents the window to the screen (perhaps by page flipping).\n fn present(&self) {\n self.glfw_window.swap_buffers();\n }\n\n fn recv(&self) -> WindowEvent {\n if !self.event_queue.with_mut(|queue| queue.is_empty()) {\n return self.event_queue.with_mut(|queue| queue.shift())\n }\n glfw::poll_events();\n\n if self.glfw_window.should_close() {\n QuitWindowEvent\n } else if !self.event_queue.with_mut(|queue| queue.is_empty()) {\n self.event_queue.with_mut(|queue| queue.shift())\n } else {\n IdleWindowEvent\n }\n }\n\n \/\/\/ Sets the ready state.\n fn set_ready_state(&self, ready_state: ReadyState) {\n self.ready_state.set(ready_state);\n self.update_window_title()\n }\n\n \/\/\/ Sets the render state.\n fn set_render_state(&self, render_state: RenderState) {\n if self.ready_state.get() == FinishedLoading &&\n self.render_state.get() == RenderingRenderState &&\n render_state == IdleRenderState {\n \/\/ page loaded\n self.event_queue.with_mut(|queue| queue.push(FinishedWindowEvent));\n }\n\n self.render_state.set(render_state);\n self.update_window_title()\n }\n\n fn hidpi_factor(&self) -> f32 {\n let (backing_size, _) = self.glfw_window.get_framebuffer_size();\n let (window_size, _) = self.glfw_window.get_size();\n (backing_size as f32) \/ (window_size as f32)\n }\n}\n\nimpl Window {\n \/\/\/ Helper function to set the window title in accordance with the ready state.\n fn update_window_title(&self) {\n let now = time::get_time();\n if now.sec == self.last_title_set_time.get().sec {\n return\n }\n self.last_title_set_time.set(now);\n\n match self.ready_state.get() {\n Blank => {\n self.glfw_window.set_title(\"blank — Servo\")\n }\n Loading => {\n self.glfw_window.set_title(\"Loading — Servo\")\n }\n PerformingLayout => {\n self.glfw_window.set_title(\"Performing Layout — Servo\")\n }\n FinishedLoading => {\n match self.render_state.get() {\n RenderingRenderState => {\n self.glfw_window.set_title(\"Rendering — Servo\")\n }\n IdleRenderState => {\n self.glfw_window.set_title(\"Servo\")\n }\n }\n }\n }\n }\n\n \/\/\/ Helper function to handle keyboard events.\n fn handle_key(&self, key: glfw::Key, mods: glfw::Modifiers) {\n match key {\n glfw::KeyEscape => self.glfw_window.set_should_close(true),\n glfw::KeyL if mods.contains(glfw::Control) => self.load_url(), \/\/ Ctrl+L\n glfw::KeyEqual if mods.contains(glfw::Control) => { \/\/ Ctrl-+\n self.event_queue.with_mut(|queue| queue.push(ZoomWindowEvent(1.1)));\n }\n glfw::KeyMinus if mods.contains(glfw::Control) => { \/\/ Ctrl--\n self.event_queue.with_mut(|queue| queue.push(ZoomWindowEvent(0.90909090909)));\n }\n glfw::KeyBackspace if mods.contains(glfw::Shift) => { \/\/ Shift-Backspace\n self.event_queue.with_mut(|queue| queue.push(NavigationWindowEvent(Forward)));\n }\n glfw::KeyBackspace => { \/\/ Backspace\n self.event_queue.with_mut(|queue| queue.push(NavigationWindowEvent(Back)));\n }\n _ => {}\n }\n }\n\n \/\/\/ Helper function to handle a click\n fn handle_mouse(&self, button: glfw::MouseButton, action: glfw::Action, x: c_int, y: c_int) {\n \/\/ FIXME(tkuehn): max pixel dist should be based on pixel density\n let max_pixel_dist = 10f64;\n let event = match action {\n glfw::Press => {\n self.mouse_down_point.set(Point2D(x, y));\n self.mouse_down_button.set(Some(button));\n MouseWindowMouseDownEvent(button as uint, Point2D(x as f32, y as f32))\n }\n glfw::Release => {\n match self.mouse_down_button.get() {\n None => (),\n Some(but) if button == but => {\n let pixel_dist = self.mouse_down_point.get() - Point2D(x, y);\n let pixel_dist = ((pixel_dist.x * pixel_dist.x +\n pixel_dist.y * pixel_dist.y) as f64).sqrt();\n if pixel_dist < max_pixel_dist {\n let click_event = MouseWindowClickEvent(button as uint,\n Point2D(x as f32, y as f32));\n self.event_queue.with_mut(|queue| queue.push(MouseWindowEventClass(click_event)));\n }\n }\n Some(_) => (),\n }\n MouseWindowMouseUpEvent(button as uint, Point2D(x as f32, y as f32))\n }\n _ => fail!(\"I cannot recognize the type of mouse action that occured. :-(\")\n };\n self.event_queue.with_mut(|queue| queue.push(MouseWindowEventClass(event)));\n }\n\n \/\/\/ Helper function to pop up an alert box prompting the user to load a URL.\n fn load_url(&self) {\n let mut alert: Alert = AlertMethods::new(\"Navigate to:\");\n alert.add_prompt();\n alert.run();\n let value = alert.prompt_value();\n if \"\" == value { \/\/ To avoid crashing on Linux.\n self.event_queue.with_mut(|queue| queue.push(LoadUrlWindowEvent(~\"http:\/\/purple.com\/\")))\n } else {\n self.event_queue.with_mut(|queue| queue.push(LoadUrlWindowEvent(value.clone())))\n }\n }\n}\n\nstatic TLS_KEY: local_data::Key<Rc<Window>> = &local_data::Key;\n\nfn install_local_window(window: Rc<Window>) {\n local_data::set(TLS_KEY, window);\n}\n\nfn drop_local_window() {\n local_data::pop(TLS_KEY);\n}\n\nfn local_window() -> Rc<Window> {\n local_data::get(TLS_KEY, |v| v.unwrap().clone())\n}\n<commit_msg>RefCell->Cell<commit_after>\/* This Source Code Form is subject to the terms of the Mozilla Public\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n * file, You can obtain one at http:\/\/mozilla.org\/MPL\/2.0\/. *\/\n\n\/\/! A windowing implementation using GLFW.\n\nuse windowing::{ApplicationMethods, WindowEvent, WindowMethods};\nuse windowing::{IdleWindowEvent, ResizeWindowEvent, LoadUrlWindowEvent, MouseWindowEventClass, MouseWindowMoveEventClass};\nuse windowing::{ScrollWindowEvent, ZoomWindowEvent, NavigationWindowEvent, FinishedWindowEvent};\nuse windowing::{QuitWindowEvent, MouseWindowClickEvent, MouseWindowMouseDownEvent, MouseWindowMouseUpEvent};\nuse windowing::RefreshWindowEvent;\nuse windowing::{Forward, Back};\n\nuse alert::{Alert, AlertMethods};\nuse extra::time::Timespec;\nuse extra::time;\nuse std::cell::{Cell, RefCell};\nuse std::libc::{exit, c_int};\nuse std::local_data;\nuse std::rc::Rc;\n\nuse geom::point::Point2D;\nuse geom::size::Size2D;\nuse servo_msg::compositor_msg::{IdleRenderState, RenderState, RenderingRenderState};\nuse servo_msg::compositor_msg::{FinishedLoading, Blank, Loading, PerformingLayout, ReadyState};\n\nuse glfw;\n\n\/\/\/ A structure responsible for setting up and tearing down the entire windowing system.\npub struct Application;\n\nimpl ApplicationMethods for Application {\n fn new() -> Application {\n \/\/ Per GLFW docs it's safe to set the error callback before calling\n \/\/ glfwInit(), and this way we notice errors from init too.\n glfw::set_error_callback(~glfw::LogErrorHandler);\n\n if glfw::init().is_err() {\n \/\/ handles things like inability to connect to X\n \/\/ cannot simply fail, since the runtime isn't up yet (causes a nasty abort)\n println!(\"GLFW initialization failed\");\n unsafe { exit(1); }\n }\n\n Application\n }\n}\n\nimpl Drop for Application {\n fn drop(&mut self) {\n drop_local_window();\n glfw::terminate();\n }\n}\n\nmacro_rules! glfw_callback(\n (\n $callback:path ($($arg:ident: $arg_ty:ty),*) $block:expr\n ) => ({\n struct GlfwCallback;\n impl $callback for GlfwCallback {\n fn call(&self $(, $arg: $arg_ty)*) {\n $block\n }\n }\n ~GlfwCallback\n });\n\n (\n [$($state:ident: $state_ty:ty),*],\n $callback:path ($($arg:ident: $arg_ty:ty),*) $block:expr\n ) => ({\n struct GlfwCallback {\n $($state: $state_ty,)*\n }\n impl $callback for GlfwCallback {\n fn call(&self $(, $arg: $arg_ty)*) {\n $block\n }\n }\n ~GlfwCallback {\n $($state: $state,)*\n }\n });\n)\n\n\n\/\/\/ The type of a window.\npub struct Window {\n glfw_window: glfw::Window,\n\n event_queue: RefCell<~[WindowEvent]>,\n\n drag_origin: Point2D<c_int>,\n\n mouse_down_button: Cell<Option<glfw::MouseButton>>,\n mouse_down_point: Cell<Point2D<c_int>>,\n\n ready_state: Cell<ReadyState>,\n render_state: Cell<RenderState>,\n\n last_title_set_time: Cell<Timespec>,\n}\n\nimpl WindowMethods<Application> for Window {\n \/\/\/ Creates a new window.\n fn new(_: &Application) -> Rc<Window> {\n \/\/ Create the GLFW window.\n let glfw_window = glfw::Window::create(800, 600, \"Servo\", glfw::Windowed)\n .expect(\"Failed to create GLFW window\");\n glfw_window.make_context_current();\n\n \/\/ Create our window object.\n let window = Window {\n glfw_window: glfw_window,\n\n event_queue: RefCell::new(~[]),\n\n drag_origin: Point2D(0 as c_int, 0),\n\n mouse_down_button: Cell::new(None),\n mouse_down_point: Cell::new(Point2D(0 as c_int, 0)),\n\n ready_state: Cell::new(Blank),\n render_state: Cell::new(IdleRenderState),\n\n last_title_set_time: Cell::new(Timespec::new(0, 0)),\n };\n\n \/\/ Register event handlers.\n window.glfw_window.set_framebuffer_size_callback(\n glfw_callback!(glfw::FramebufferSizeCallback(_win: &glfw::Window, width: i32, height: i32) {\n let tmp = local_window();\n tmp.borrow().event_queue.with_mut(|queue| queue.push(ResizeWindowEvent(width as uint, height as uint)));\n }));\n window.glfw_window.set_refresh_callback(\n glfw_callback!(glfw::WindowRefreshCallback(_win: &glfw::Window) {\n let tmp = local_window();\n tmp.borrow().event_queue.with_mut(|queue| queue.push(RefreshWindowEvent));\n }));\n window.glfw_window.set_key_callback(\n glfw_callback!(glfw::KeyCallback(_win: &glfw::Window, key: glfw::Key, _scancode: c_int,\n action: glfw::Action, mods: glfw::Modifiers) {\n if action == glfw::Press {\n let tmp = local_window();\n tmp.borrow().handle_key(key, mods)\n }\n }));\n window.glfw_window.set_mouse_button_callback(\n glfw_callback!(glfw::MouseButtonCallback(win: &glfw::Window, button: glfw::MouseButton,\n action: glfw::Action, _mods: glfw::Modifiers) {\n let (x, y) = win.get_cursor_pos();\n \/\/handle hidpi displays, since GLFW returns non-hi-def coordinates.\n let (backing_size, _) = win.get_framebuffer_size();\n let (window_size, _) = win.get_size();\n let hidpi = (backing_size as f32) \/ (window_size as f32);\n let x = x as f32 * hidpi;\n let y = y as f32 * hidpi;\n if button == glfw::MouseButtonLeft || button == glfw::MouseButtonRight {\n let tmp = local_window();\n tmp.borrow().handle_mouse(button, action, x as i32, y as i32);\n }\n }));\n window.glfw_window.set_cursor_pos_callback(\n glfw_callback!(glfw::CursorPosCallback(_win: &glfw::Window, xpos: f64, ypos: f64) {\n let tmp = local_window();\n tmp.borrow().event_queue.with_mut(|queue| queue.push(MouseWindowMoveEventClass(Point2D(xpos as f32, ypos as f32))));\n }));\n window.glfw_window.set_scroll_callback(\n glfw_callback!(glfw::ScrollCallback(win: &glfw::Window, xpos: f64, ypos: f64) {\n let dx = (xpos as f32) * 30.0;\n let dy = (ypos as f32) * 30.0;\n\n let (x, y) = win.get_cursor_pos();\n \/\/handle hidpi displays, since GLFW returns non-hi-def coordinates.\n let (backing_size, _) = win.get_framebuffer_size();\n let (window_size, _) = win.get_size();\n let hidpi = (backing_size as f32) \/ (window_size as f32);\n let x = x as f32 * hidpi;\n let y = y as f32 * hidpi;\n\n let tmp = local_window();\n tmp.borrow().event_queue.with_mut(|queue| queue.push(ScrollWindowEvent(Point2D(dx, dy), Point2D(x as i32, y as i32))));\n }));\n\n let wrapped_window = Rc::from_send(window);\n\n install_local_window(wrapped_window.clone());\n\n wrapped_window\n }\n\n \/\/\/ Returns the size of the window.\n fn size(&self) -> Size2D<f32> {\n let (width, height) = self.glfw_window.get_framebuffer_size();\n Size2D(width as f32, height as f32)\n }\n\n \/\/\/ Presents the window to the screen (perhaps by page flipping).\n fn present(&self) {\n self.glfw_window.swap_buffers();\n }\n\n fn recv(&self) -> WindowEvent {\n if !self.event_queue.with_mut(|queue| queue.is_empty()) {\n return self.event_queue.with_mut(|queue| queue.shift())\n }\n glfw::poll_events();\n\n if self.glfw_window.should_close() {\n QuitWindowEvent\n } else if !self.event_queue.with_mut(|queue| queue.is_empty()) {\n self.event_queue.with_mut(|queue| queue.shift())\n } else {\n IdleWindowEvent\n }\n }\n\n \/\/\/ Sets the ready state.\n fn set_ready_state(&self, ready_state: ReadyState) {\n self.ready_state.set(ready_state);\n self.update_window_title()\n }\n\n \/\/\/ Sets the render state.\n fn set_render_state(&self, render_state: RenderState) {\n if self.ready_state.get() == FinishedLoading &&\n self.render_state.get() == RenderingRenderState &&\n render_state == IdleRenderState {\n \/\/ page loaded\n self.event_queue.with_mut(|queue| queue.push(FinishedWindowEvent));\n }\n\n self.render_state.set(render_state);\n self.update_window_title()\n }\n\n fn hidpi_factor(&self) -> f32 {\n let (backing_size, _) = self.glfw_window.get_framebuffer_size();\n let (window_size, _) = self.glfw_window.get_size();\n (backing_size as f32) \/ (window_size as f32)\n }\n}\n\nimpl Window {\n \/\/\/ Helper function to set the window title in accordance with the ready state.\n fn update_window_title(&self) {\n let now = time::get_time();\n if now.sec == self.last_title_set_time.get().sec {\n return\n }\n self.last_title_set_time.set(now);\n\n match self.ready_state.get() {\n Blank => {\n self.glfw_window.set_title(\"blank — Servo\")\n }\n Loading => {\n self.glfw_window.set_title(\"Loading — Servo\")\n }\n PerformingLayout => {\n self.glfw_window.set_title(\"Performing Layout — Servo\")\n }\n FinishedLoading => {\n match self.render_state.get() {\n RenderingRenderState => {\n self.glfw_window.set_title(\"Rendering — Servo\")\n }\n IdleRenderState => {\n self.glfw_window.set_title(\"Servo\")\n }\n }\n }\n }\n }\n\n \/\/\/ Helper function to handle keyboard events.\n fn handle_key(&self, key: glfw::Key, mods: glfw::Modifiers) {\n match key {\n glfw::KeyEscape => self.glfw_window.set_should_close(true),\n glfw::KeyL if mods.contains(glfw::Control) => self.load_url(), \/\/ Ctrl+L\n glfw::KeyEqual if mods.contains(glfw::Control) => { \/\/ Ctrl-+\n self.event_queue.with_mut(|queue| queue.push(ZoomWindowEvent(1.1)));\n }\n glfw::KeyMinus if mods.contains(glfw::Control) => { \/\/ Ctrl--\n self.event_queue.with_mut(|queue| queue.push(ZoomWindowEvent(0.90909090909)));\n }\n glfw::KeyBackspace if mods.contains(glfw::Shift) => { \/\/ Shift-Backspace\n self.event_queue.with_mut(|queue| queue.push(NavigationWindowEvent(Forward)));\n }\n glfw::KeyBackspace => { \/\/ Backspace\n self.event_queue.with_mut(|queue| queue.push(NavigationWindowEvent(Back)));\n }\n _ => {}\n }\n }\n\n \/\/\/ Helper function to handle a click\n fn handle_mouse(&self, button: glfw::MouseButton, action: glfw::Action, x: c_int, y: c_int) {\n \/\/ FIXME(tkuehn): max pixel dist should be based on pixel density\n let max_pixel_dist = 10f64;\n let event = match action {\n glfw::Press => {\n self.mouse_down_point.set(Point2D(x, y));\n self.mouse_down_button.set(Some(button));\n MouseWindowMouseDownEvent(button as uint, Point2D(x as f32, y as f32))\n }\n glfw::Release => {\n match self.mouse_down_button.get() {\n None => (),\n Some(but) if button == but => {\n let pixel_dist = self.mouse_down_point.get() - Point2D(x, y);\n let pixel_dist = ((pixel_dist.x * pixel_dist.x +\n pixel_dist.y * pixel_dist.y) as f64).sqrt();\n if pixel_dist < max_pixel_dist {\n let click_event = MouseWindowClickEvent(button as uint,\n Point2D(x as f32, y as f32));\n self.event_queue.with_mut(|queue| queue.push(MouseWindowEventClass(click_event)));\n }\n }\n Some(_) => (),\n }\n MouseWindowMouseUpEvent(button as uint, Point2D(x as f32, y as f32))\n }\n _ => fail!(\"I cannot recognize the type of mouse action that occured. :-(\")\n };\n self.event_queue.with_mut(|queue| queue.push(MouseWindowEventClass(event)));\n }\n\n \/\/\/ Helper function to pop up an alert box prompting the user to load a URL.\n fn load_url(&self) {\n let mut alert: Alert = AlertMethods::new(\"Navigate to:\");\n alert.add_prompt();\n alert.run();\n let value = alert.prompt_value();\n if \"\" == value { \/\/ To avoid crashing on Linux.\n self.event_queue.with_mut(|queue| queue.push(LoadUrlWindowEvent(~\"http:\/\/purple.com\/\")))\n } else {\n self.event_queue.with_mut(|queue| queue.push(LoadUrlWindowEvent(value.clone())))\n }\n }\n}\n\nstatic TLS_KEY: local_data::Key<Rc<Window>> = &local_data::Key;\n\nfn install_local_window(window: Rc<Window>) {\n local_data::set(TLS_KEY, window);\n}\n\nfn drop_local_window() {\n local_data::pop(TLS_KEY);\n}\n\nfn local_window() -> Rc<Window> {\n local_data::get(TLS_KEY, |v| v.unwrap().clone())\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>chore: OO<commit_after><|endoftext|>"} {"text":"<commit_before>use std::fs::File;\nuse std::io::Cursor;\nuse std::io::prelude::*;\n\nuse cargo::util::process;\nuse flate2::read::GzDecoder;\nuse git2;\nuse tar::Archive;\n\nuse support::{project, execs, cargo_dir, paths, git};\nuse support::{PACKAGING, VERIFYING, COMPILING, ARCHIVING};\nuse hamcrest::{assert_that, existing_file};\n\nfn setup() {\n}\n\ntest!(simple {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n exclude = [\"*.txt\"]\n license = \"MIT\"\n description = \"foo\"\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() { println!(\"hello\"); }\n \"#)\n .file(\"src\/bar.txt\", \"\"); \/\/ should be ignored when packaging\n\n assert_that(p.cargo_process(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url())));\n assert_that(&p.root().join(\"target\/package\/foo-0.0.1.crate\"), existing_file());\n assert_that(p.cargo(\"package\").arg(\"-l\"),\n execs().with_status(0).with_stdout(\"\\\nCargo.toml\nsrc[..]main.rs\n\"));\n assert_that(p.cargo(\"package\"),\n execs().with_status(0).with_stdout(\"\"));\n\n let f = File::open(&p.root().join(\"target\/package\/foo-0.0.1.crate\")).unwrap();\n let mut rdr = GzDecoder::new(f).unwrap();\n let mut contents = Vec::new();\n rdr.read_to_end(&mut contents).unwrap();\n let ar = Archive::new(Cursor::new(contents));\n for f in ar.files().unwrap() {\n let f = f.unwrap();\n let fname = f.filename_bytes();\n assert!(fname == b\"foo-0.0.1\/Cargo.toml\" ||\n fname == b\"foo-0.0.1\/src\/main.rs\",\n \"unexpected filename: {:?}\", f.filename())\n }\n});\n\ntest!(metadata_warning {\n let p = project(\"all\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#);\n assert_that(p.cargo_process(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url()))\n .with_stderr(\"\\\nwarning: manifest has no description, license, license-file, documentation, \\\nhomepage or repository. See \\\nhttp:\/\/doc.crates.io\/manifest.html#package-metadata for more info.\"));\n\n let p = project(\"one\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n license = \"MIT\"\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#);\n assert_that(p.cargo_process(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url()))\n .with_stderr(\"\\\nwarning: manifest has no description, documentation, homepage or repository. See \\\nhttp:\/\/doc.crates.io\/manifest.html#package-metadata for more info.\"));\n\n let p = project(\"all\")\n .file(\"Cargo.toml\", &format!(r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n license = \"MIT\"\n description = \"foo\"\n repository = \"bar\"\n \"#))\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#);\n assert_that(p.cargo_process(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url())));\n});\n\ntest!(package_verbose {\n let root = paths::root().join(\"all\");\n let p = git::repo(&root)\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#)\n .file(\"a\/Cargo.toml\", r#\"\n [project]\n name = \"a\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"a\/src\/lib.rs\", \"\");\n p.build();\n let mut cargo = process(&cargo_dir().join(\"cargo\")).unwrap();\n cargo.cwd(&root).env(\"HOME\", &paths::home());\n assert_that(cargo.clone().arg(\"build\"), execs().with_status(0));\n assert_that(cargo.arg(\"package\").arg(\"-v\").arg(\"--no-verify\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ([..])\n{archiving} [..]\n{archiving} [..]\n\",\n packaging = PACKAGING,\n archiving = ARCHIVING)));\n});\n\ntest!(package_verification {\n let p = project(\"all\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#);\n assert_that(p.cargo_process(\"build\"),\n execs().with_status(0));\n assert_that(p.cargo(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url())));\n});\n\ntest!(exclude {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n exclude = [\"*.txt\"]\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() { println!(\"hello\"); }\n \"#)\n .file(\"bar.txt\", \"\")\n .file(\"src\/bar.txt\", \"\");\n\n assert_that(p.cargo_process(\"package\").arg(\"--no-verify\").arg(\"-v\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ([..])\n{archiving} [..]\n{archiving} [..]\n\", packaging = PACKAGING, archiving = ARCHIVING)));\n});\n\ntest!(include {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n exclude = [\"*.txt\"]\n include = [\"foo.txt\", \"**\/*.rs\", \"Cargo.toml\"]\n \"#)\n .file(\"foo.txt\", \"\")\n .file(\"src\/main.rs\", r#\"\n fn main() { println!(\"hello\"); }\n \"#)\n .file(\"src\/bar.txt\", \"\"); \/\/ should be ignored when packaging\n\n assert_that(p.cargo_process(\"package\").arg(\"--no-verify\").arg(\"-v\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ([..])\n{archiving} [..]\n{archiving} [..]\n{archiving} [..]\n\", packaging = PACKAGING, archiving = ARCHIVING)));\n});\n\ntest!(package_lib_with_bin {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n extern crate foo;\n fn main() {}\n \"#)\n .file(\"src\/lib.rs\", \"\");\n\n assert_that(p.cargo_process(\"package\").arg(\"-v\"),\n execs().with_status(0));\n});\n\ntest!(package_new_git_repo {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n \"#)\n .file(\"src\/main.rs\", \"fn main() {}\");\n p.build();\n git2::Repository::init(&p.root()).unwrap();\n\n assert_that(p.process(cargo_dir().join(\"cargo\")).arg(\"package\")\n .arg(\"--no-verify\").arg(\"-v\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ([..])\n{archiving} [..]\n{archiving} [..]\n\", packaging = PACKAGING, archiving = ARCHIVING)));\n});\n<commit_msg>tests: add a test for submodule packaging<commit_after>use std::fs::File;\nuse std::io::Cursor;\nuse std::io::prelude::*;\nuse std::path::Path;\n\nuse cargo::util::process;\nuse flate2::read::GzDecoder;\nuse git2;\nuse tar::Archive;\n\nuse support::{project, execs, cargo_dir, paths, git, path2url};\nuse support::{PACKAGING, VERIFYING, COMPILING, ARCHIVING};\nuse hamcrest::{assert_that, existing_file};\n\nfn setup() {\n}\n\ntest!(simple {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n exclude = [\"*.txt\"]\n license = \"MIT\"\n description = \"foo\"\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() { println!(\"hello\"); }\n \"#)\n .file(\"src\/bar.txt\", \"\"); \/\/ should be ignored when packaging\n\n assert_that(p.cargo_process(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url())));\n assert_that(&p.root().join(\"target\/package\/foo-0.0.1.crate\"), existing_file());\n assert_that(p.cargo(\"package\").arg(\"-l\"),\n execs().with_status(0).with_stdout(\"\\\nCargo.toml\nsrc[..]main.rs\n\"));\n assert_that(p.cargo(\"package\"),\n execs().with_status(0).with_stdout(\"\"));\n\n let f = File::open(&p.root().join(\"target\/package\/foo-0.0.1.crate\")).unwrap();\n let mut rdr = GzDecoder::new(f).unwrap();\n let mut contents = Vec::new();\n rdr.read_to_end(&mut contents).unwrap();\n let ar = Archive::new(Cursor::new(contents));\n for f in ar.files().unwrap() {\n let f = f.unwrap();\n let fname = f.filename_bytes();\n assert!(fname == b\"foo-0.0.1\/Cargo.toml\" ||\n fname == b\"foo-0.0.1\/src\/main.rs\",\n \"unexpected filename: {:?}\", f.filename())\n }\n});\n\ntest!(metadata_warning {\n let p = project(\"all\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#);\n assert_that(p.cargo_process(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url()))\n .with_stderr(\"\\\nwarning: manifest has no description, license, license-file, documentation, \\\nhomepage or repository. See \\\nhttp:\/\/doc.crates.io\/manifest.html#package-metadata for more info.\"));\n\n let p = project(\"one\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n license = \"MIT\"\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#);\n assert_that(p.cargo_process(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url()))\n .with_stderr(\"\\\nwarning: manifest has no description, documentation, homepage or repository. See \\\nhttp:\/\/doc.crates.io\/manifest.html#package-metadata for more info.\"));\n\n let p = project(\"all\")\n .file(\"Cargo.toml\", &format!(r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n license = \"MIT\"\n description = \"foo\"\n repository = \"bar\"\n \"#))\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#);\n assert_that(p.cargo_process(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url())));\n});\n\ntest!(package_verbose {\n let root = paths::root().join(\"all\");\n let p = git::repo(&root)\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#)\n .file(\"a\/Cargo.toml\", r#\"\n [project]\n name = \"a\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"a\/src\/lib.rs\", \"\");\n p.build();\n let mut cargo = process(&cargo_dir().join(\"cargo\")).unwrap();\n cargo.cwd(&root).env(\"HOME\", &paths::home());\n assert_that(cargo.clone().arg(\"build\"), execs().with_status(0));\n assert_that(cargo.arg(\"package\").arg(\"-v\").arg(\"--no-verify\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ([..])\n{archiving} [..]\n{archiving} [..]\n\",\n packaging = PACKAGING,\n archiving = ARCHIVING)));\n});\n\ntest!(package_verification {\n let p = project(\"all\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() {}\n \"#);\n assert_that(p.cargo_process(\"build\"),\n execs().with_status(0));\n assert_that(p.cargo(\"package\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ({dir})\n{verifying} foo v0.0.1 ({dir})\n{compiling} foo v0.0.1 ({dir}[..])\n\",\n packaging = PACKAGING,\n verifying = VERIFYING,\n compiling = COMPILING,\n dir = p.url())));\n});\n\ntest!(exclude {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n exclude = [\"*.txt\"]\n \"#)\n .file(\"src\/main.rs\", r#\"\n fn main() { println!(\"hello\"); }\n \"#)\n .file(\"bar.txt\", \"\")\n .file(\"src\/bar.txt\", \"\");\n\n assert_that(p.cargo_process(\"package\").arg(\"--no-verify\").arg(\"-v\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ([..])\n{archiving} [..]\n{archiving} [..]\n\", packaging = PACKAGING, archiving = ARCHIVING)));\n});\n\ntest!(include {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n exclude = [\"*.txt\"]\n include = [\"foo.txt\", \"**\/*.rs\", \"Cargo.toml\"]\n \"#)\n .file(\"foo.txt\", \"\")\n .file(\"src\/main.rs\", r#\"\n fn main() { println!(\"hello\"); }\n \"#)\n .file(\"src\/bar.txt\", \"\"); \/\/ should be ignored when packaging\n\n assert_that(p.cargo_process(\"package\").arg(\"--no-verify\").arg(\"-v\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ([..])\n{archiving} [..]\n{archiving} [..]\n{archiving} [..]\n\", packaging = PACKAGING, archiving = ARCHIVING)));\n});\n\ntest!(package_lib_with_bin {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = []\n \"#)\n .file(\"src\/main.rs\", r#\"\n extern crate foo;\n fn main() {}\n \"#)\n .file(\"src\/lib.rs\", \"\");\n\n assert_that(p.cargo_process(\"package\").arg(\"-v\"),\n execs().with_status(0));\n});\n\ntest!(package_new_git_repo {\n let p = project(\"foo\")\n .file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n \"#)\n .file(\"src\/main.rs\", \"fn main() {}\");\n p.build();\n git2::Repository::init(&p.root()).unwrap();\n\n assert_that(p.process(cargo_dir().join(\"cargo\")).arg(\"package\")\n .arg(\"--no-verify\").arg(\"-v\"),\n execs().with_status(0).with_stdout(&format!(\"\\\n{packaging} foo v0.0.1 ([..])\n{archiving} [..]\n{archiving} [..]\n\", packaging = PACKAGING, archiving = ARCHIVING)));\n});\n\ntest!(package_git_submodule {\n use std::str::from_utf8;\n\n let project = git::new(\"foo\", |project| {\n project.file(\"Cargo.toml\", r#\"\n [project]\n name = \"foo\"\n version = \"0.0.1\"\n authors = [\"foo@example.com\"]\n license = \"MIT\"\n description = \"foo\"\n repository = \"foo\"\n \"#)\n .file(\"src\/lib.rs\", \"pub fn foo() {}\")\n }).unwrap();\n let library = git::new(\"bar\", |library| {\n library.file(\"Makefile\", \"all:\")\n }).unwrap();\n\n let repository = git2::Repository::open(&project.root()).unwrap();\n let url = path2url(library.root()).to_string();\n git::add_submodule(&repository, &url, Path::new(\"bar\"));\n git::commit(&repository);\n\n let repository = git2::Repository::open(&project.root().join(\"bar\")).unwrap();\n repository.reset(&repository.revparse_single(\"HEAD\").unwrap(),\n git2::ResetType::Hard, None).unwrap();\n\n let result = project.cargo(\"package\").arg(\"--no-verify\").arg(\"-v\").exec_with_output().unwrap();\n assert!(result.status.success());\n assert!(from_utf8(&result.stdout).unwrap().contains(&format!(\"{} bar\/Makefile\", ARCHIVING)));\n});\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add admin level variable for maybe future use<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Bind vertex buffers and descriptor sets once per pass, not every draw call<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add a regression test for issue-72793<commit_after>\/\/ build-pass\n\n\/\/ Regression test for #72793.\n\/\/ FIXME: This still shows ICE with `-Zmir-opt-level=2`.\n\n#![feature(type_alias_impl_trait)]\n\ntrait T { type Item; }\n\ntype Alias<'a> = impl T<Item = &'a ()>;\n\nstruct S;\nimpl<'a> T for &'a S {\n type Item = &'a ();\n}\n\nfn filter_positive<'a>() -> Alias<'a> {\n &S\n}\n\nfn with_positive(fun: impl Fn(Alias<'_>)) {\n fun(filter_positive());\n}\n\nfn main() {\n with_positive(|_| ());\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>utopian tree<commit_after>use std::io;\nuse std::io::prelude::*;\n\nfn height(num_cycles: u32) -> u32 {\n let mut height: u32 = 1;\n for i in 0..num_cycles {\n if i % 2 == 0 {\n height *= 2;\n } else {\n height += 1;\n }\n }\n height\n}\n\nfn main() {\n let stdin = io::stdin();\n\n let count: usize = stdin.lock().lines() \/\/iterator over lines in stdin\n .next().unwrap().unwrap() \/\/finally it's a string\n .trim().parse().unwrap(); \/\/and then parsing count value...But we don't need it :) lol\n\n for line in stdin.lock().lines() {\n let num_cycles: u32 = line.unwrap().trim().parse().unwrap();\n \n println!(\"{}\", height(num_cycles));\n }\n\n}<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(box_syntax)]\nuse std::any::Any;\n\nfn main()\n{\n fn h(x:i32) -> i32 {3*x}\n let mut vfnfer:Vec<Box<Any>> = vec![];\n vfnfer.push(box h);\n println!(\"{:?}\",(vfnfer[0] as Fn)(3));\n \/\/~^ ERROR the precise format of `Fn`-family traits'\n \/\/~| ERROR E0243\n \/\/~| NOTE expected 1 type arguments, found 0\n \/\/~| ERROR the value of the associated type `Output` (from the trait `std::ops::FnOnce`)\n \/\/~| NOTE in this expansion of println!\n \/\/~| NOTE in this expansion of println!\n \/\/~| NOTE in this expansion of println!\n \/\/~| NOTE in this expansion of println!\n}\n<commit_msg>Remove the NOTE tests for now. We'll move to UI tests later<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![feature(box_syntax)]\nuse std::any::Any;\n\nfn main()\n{\n fn h(x:i32) -> i32 {3*x}\n let mut vfnfer:Vec<Box<Any>> = vec![];\n vfnfer.push(box h);\n println!(\"{:?}\",(vfnfer[0] as Fn)(3));\n \/\/~^ ERROR the precise format of `Fn`-family traits'\n \/\/~| ERROR E0243\n \/\/~| ERROR the value of the associated type `Output` (from the trait `std::ops::FnOnce`)\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate rustc_serialize;\nextern crate docopt;\nextern crate ssh_bookmarker;\n#[macro_use]\nextern crate error_chain;\n\nuse std::path::Path;\nuse docopt::Docopt;\n\nuse ssh_bookmarker::process;\nuse ssh_bookmarker::{ssh_config, known_hosts};\n\nuse ssh_bookmarker::errors::*;\n\n\/\/ use quick_error::ResultExt;\n\nconst USAGE: &'static str = \"\nCreate SSH bookmarks from known_hosts and ssh_config files.\n\nUsage:\n ssh_bookmarker create [-v...] [-c FILE...] [-k FILE...] <output>\n\nOptions:\n -h --help Show this screen.\n -v --verbose Log verbosely.\n -c --config FILE ssh_config(5) file to read.\n -k --known-hosts FILE known_hosts file to read.\n\";\n\n#[derive(Debug, RustcDecodable)]\nstruct Args {\n flag_verbose: isize,\n cmd_create: bool,\n arg_output: String,\n flag_config: Vec<String>,\n flag_known_hosts: Vec<String>,\n}\n\nquick_main!(run);\nfn run() -> Result<()> {\n let args: Args = Docopt::new(USAGE)\n .and_then(|d| d.decode())\n .unwrap_or_else(|e| e.exit());\n if args.cmd_create {\n let mut hosts = process::<known_hosts::KnownHosts>(args.flag_known_hosts)?;\n hosts.extend(process::<ssh_config::SSHConfigFile>(args.flag_config)?);\n hosts.sort();\n hosts.dedup();\n\n let output = Path::new(&args.arg_output);\n std::fs::remove_dir_all(output)\n .chain_err(|| format!(\"Could not clear output directory {:?}\", output))?;\n std::fs::create_dir_all(output)\n .chain_err(|| format!(\"Couldn't re-create output directory {:?}\", output))?;\n\n for kh in hosts {\n if kh.ineligible() {\n continue;\n }\n kh.write_bookmark(output).chain_err(|| format!(\"Couldn't write bookmark {:?}\", kh))?;\n }\n Ok(())\n } else {\n bail!(\"Don't know what to do!\");\n }\n}\n<commit_msg>Document --help invocation<commit_after>extern crate rustc_serialize;\nextern crate docopt;\nextern crate ssh_bookmarker;\n#[macro_use]\nextern crate error_chain;\n\nuse std::path::Path;\nuse docopt::Docopt;\n\nuse ssh_bookmarker::process;\nuse ssh_bookmarker::{ssh_config, known_hosts};\n\nuse ssh_bookmarker::errors::*;\n\n\/\/ use quick_error::ResultExt;\n\nconst USAGE: &'static str = \"\nCreate SSH bookmarks from known_hosts and ssh_config files.\n\nUsage:\n ssh_bookmarker create [-v...] [-c FILE...] [-k FILE...] <output>\n ssh_bookmarker --help\n\nOptions:\n -h --help Show this screen.\n -v --verbose Log verbosely.\n -c --config FILE ssh_config(5) file to read.\n -k --known-hosts FILE known_hosts file to read.\n\";\n\n#[derive(Debug, RustcDecodable)]\nstruct Args {\n flag_verbose: isize,\n cmd_create: bool,\n arg_output: String,\n flag_config: Vec<String>,\n flag_known_hosts: Vec<String>,\n}\n\nquick_main!(run);\nfn run() -> Result<()> {\n let args: Args = Docopt::new(USAGE)\n .and_then(|d| d.decode())\n .unwrap_or_else(|e| e.exit());\n if args.cmd_create {\n let mut hosts = process::<known_hosts::KnownHosts>(args.flag_known_hosts)?;\n hosts.extend(process::<ssh_config::SSHConfigFile>(args.flag_config)?);\n hosts.sort();\n hosts.dedup();\n\n let output = Path::new(&args.arg_output);\n std::fs::remove_dir_all(output)\n .chain_err(|| format!(\"Could not clear output directory {:?}\", output))?;\n std::fs::create_dir_all(output)\n .chain_err(|| format!(\"Couldn't re-create output directory {:?}\", output))?;\n\n for kh in hosts {\n if kh.ineligible() {\n continue;\n }\n kh.write_bookmark(output).chain_err(|| format!(\"Couldn't write bookmark {:?}\", kh))?;\n }\n Ok(())\n } else {\n bail!(\"Don't know what to do!\");\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate liner;\n#[macro_use]\nextern crate serde_derive;\nextern crate toml;\n\nuse liner::Context;\nuse liner::KeyBindings;\nuse std::collections::HashMap;\nuse std::collections::hash_map::Keys;\nuse std::fs::File;\nuse std::io::{ErrorKind, Read};\nuse std::mem;\nuse std::path::Path;\n\ntype Data = HashMap<String, String>;\n\n#[derive(Deserialize)]\nstruct CacheConfig {\n ttl: usize,\n}\n\n#[derive(Deserialize)]\nstruct Config {\n cache: CacheConfig,\n}\n\nstruct Cache {\n data: Box<Data>,\n config: Config,\n bytes: usize\n}\n\nfn get_size_of_string(s: &String) -> usize {\n return s.len() * std::mem::size_of::<u8>();\n}\n\nimpl Cache {\n fn clear(&mut self) {\n self.data.clear();\n self.bytes = 0;\n }\n\n fn get(&mut self, key: &String) -> Option<&String> {\n self.data.get(key)\n }\n\n fn set(&mut self, key: String, value: String) {\n if let Some(previous_value) = self.data.get(&key) {\n self.bytes -= get_size_of_string(previous_value);\n }\n else {\n self.bytes += get_size_of_string(&key);\n }\n self.bytes += get_size_of_string(&value);\n\n self.data.insert(key, value);\n }\n\n fn delete(&mut self, key: &String) {\n if let Some(value) = self.data.remove(key) {\n self.bytes -= get_size_of_string(&value) + get_size_of_string(key);\n }\n }\n\n fn keys(&self) -> Keys<String, String> {\n self.data.keys()\n }\n\n fn size(&self) -> usize {\n self.bytes\n }\n}\n\nfn main() {\n \/\/ Open and read configuration\n let config_path = Path::new(\"etc\/carpool.toml\");\n let mut config_file = match File::open(&config_path) {\n Err(_) => panic!(\"couldn't open {:?}\", config_path),\n Ok(file) => file,\n };\n\n let mut config_content = String::new();\n let _ = config_file.read_to_string(&mut config_content);\n\n let config: Config = toml::from_str(config_content.as_str()).unwrap();\n\n \/\/ Set up cache\n let mut cache = Cache {\n config: config,\n data: Box::new(HashMap::new()),\n bytes: 0,\n };\n\n let empty_value = String::from(\"\");\n\n \/\/ Start REPL\n let mut con = Context::new();\n\n loop {\n let res = con.read_line(\"> \", &mut |_| {});\n\n match res {\n Ok(res) => {\n match res.as_str() {\n s if s.trim() == \"\" => {}\n s if s.starts_with(\"get \") => {\n let (_, key) = s.split_at(4);\n let key_trimmed = String::from(key.trim());\n if key_trimmed.find(' ').is_some() {\n println!(\"key cannot contain space\");\n continue\n }\n println!(\"{}\", cache.get(&key_trimmed).unwrap_or(&empty_value));\n }\n s if s.starts_with(\"del \") => {\n let (_, key) = s.split_at(4);\n let key_trimmed = String::from(key.trim());\n if key_trimmed.find(' ').is_some() {\n println!(\"key cannot contain space\");\n continue\n }\n let value = cache.delete(&key_trimmed);\n }\n s if s.starts_with(\"set \") => {\n let (_, key_value) = s.split_at(4);\n match key_value.find(' ') {\n Some(i) => {\n let (key, value) = key_value.split_at(i);\n let key_trimmed = String::from(key.trim());\n let value_trimmed = String::from(value.trim());\n cache.set(key_trimmed, value_trimmed);\n }\n None => println!(\"no value specified\")\n }\n }\n \"size\" => {\n println!(\"{} bytes\", cache.size());\n }\n \"keys\" => {\n for key in cache.keys() {\n println!(\"{}\", key);\n }\n }\n \"emacs\" => {\n con.key_bindings = KeyBindings::Emacs;\n println!(\"emacs mode\");\n }\n \"vi\" | \"vim\" => {\n con.key_bindings = KeyBindings::Vi;\n println!(\"vi mode\");\n }\n \"exit\" => {\n break;\n }\n _ => {\n println!(\"operation not defined\")\n }\n }\n\n con.history.push(res.into()).unwrap();\n }\n Err(e) => {\n match e.kind() {\n \/\/ ctrl-c pressed\n ErrorKind::Interrupted => {}\n \/\/ ctrl-d pressed\n ErrorKind::UnexpectedEof => {\n break;\n }\n _ => panic!(\"error: {:?}\", e),\n }\n }\n }\n }\n}\n<commit_msg>Add count operation<commit_after>extern crate liner;\n#[macro_use]\nextern crate serde_derive;\nextern crate toml;\n\nuse liner::Context;\nuse liner::KeyBindings;\nuse std::collections::HashMap;\nuse std::collections::hash_map::Keys;\nuse std::fs::File;\nuse std::io::{ErrorKind, Read};\nuse std::mem;\nuse std::path::Path;\n\ntype Data = HashMap<String, String>;\n\n#[derive(Deserialize)]\nstruct CacheConfig {\n ttl: usize,\n}\n\n#[derive(Deserialize)]\nstruct Config {\n cache: CacheConfig,\n}\n\nstruct Cache {\n data: Box<Data>,\n config: Config,\n bytes: usize\n}\n\nfn get_size_of_string(s: &String) -> usize {\n return s.len() * std::mem::size_of::<u8>();\n}\n\nimpl Cache {\n fn clear(&mut self) {\n self.data.clear();\n self.bytes = 0;\n }\n\n fn get(&mut self, key: &String) -> Option<&String> {\n self.data.get(key)\n }\n\n fn set(&mut self, key: String, value: String) {\n if let Some(previous_value) = self.data.get(&key) {\n self.bytes -= get_size_of_string(previous_value);\n }\n else {\n self.bytes += get_size_of_string(&key);\n }\n self.bytes += get_size_of_string(&value);\n\n self.data.insert(key, value);\n }\n\n fn delete(&mut self, key: &String) {\n if let Some(value) = self.data.remove(key) {\n self.bytes -= get_size_of_string(&value) + get_size_of_string(key);\n }\n }\n\n fn keys(&self) -> Keys<String, String> {\n self.data.keys()\n }\n\n fn count(&self) -> usize {\n self.data.len()\n }\n\n fn size(&self) -> usize {\n self.bytes\n }\n}\n\nfn main() {\n \/\/ Open and read configuration\n let config_path = Path::new(\"etc\/carpool.toml\");\n let mut config_file = match File::open(&config_path) {\n Err(_) => panic!(\"couldn't open {:?}\", config_path),\n Ok(file) => file,\n };\n\n let mut config_content = String::new();\n let _ = config_file.read_to_string(&mut config_content);\n\n let config: Config = toml::from_str(config_content.as_str()).unwrap();\n\n \/\/ Set up cache\n let mut cache = Cache {\n config: config,\n data: Box::new(HashMap::new()),\n bytes: 0,\n };\n\n let empty_value = String::from(\"\");\n\n \/\/ Start REPL\n let mut con = Context::new();\n\n loop {\n let res = con.read_line(\"> \", &mut |_| {});\n\n match res {\n Ok(res) => {\n match res.as_str() {\n s if s.trim() == \"\" => {}\n s if s.starts_with(\"get \") => {\n let (_, key) = s.split_at(4);\n let key_trimmed = String::from(key.trim());\n if key_trimmed.find(' ').is_some() {\n println!(\"key cannot contain space\");\n continue\n }\n println!(\"{}\", cache.get(&key_trimmed).unwrap_or(&empty_value));\n }\n s if s.starts_with(\"del \") => {\n let (_, key) = s.split_at(4);\n let key_trimmed = String::from(key.trim());\n if key_trimmed.find(' ').is_some() {\n println!(\"key cannot contain space\");\n continue\n }\n let value = cache.delete(&key_trimmed);\n }\n s if s.starts_with(\"set \") => {\n let (_, key_value) = s.split_at(4);\n match key_value.find(' ') {\n Some(i) => {\n let (key, value) = key_value.split_at(i);\n let key_trimmed = String::from(key.trim());\n let value_trimmed = String::from(value.trim());\n cache.set(key_trimmed, value_trimmed);\n }\n None => println!(\"no value specified\")\n }\n }\n \"count\" => {\n println!(\"{}\", cache.count());\n }\n \"size\" => {\n println!(\"{} bytes\", cache.size());\n }\n \"keys\" => {\n for key in cache.keys() {\n println!(\"{}\", key);\n }\n }\n \"emacs\" => {\n con.key_bindings = KeyBindings::Emacs;\n println!(\"emacs mode\");\n }\n \"vi\" | \"vim\" => {\n con.key_bindings = KeyBindings::Vi;\n println!(\"vi mode\");\n }\n \"exit\" => {\n break;\n }\n _ => {\n println!(\"operation not defined\")\n }\n }\n\n con.history.push(res.into()).unwrap();\n }\n Err(e) => {\n match e.kind() {\n \/\/ ctrl-c pressed\n ErrorKind::Interrupted => {}\n \/\/ ctrl-d pressed\n ErrorKind::UnexpectedEof => {\n break;\n }\n _ => panic!(\"error: {:?}\", e),\n }\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>extern crate num_cpus;\n\nuse std::env;\nuse std::io::{self, Write, StderrLock};\nuse std::process::{Command, exit};\nuse std::thread::{self, JoinHandle};\nuse std::sync::Arc;\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\n\/* TODO: Functionality can be increased to accept the following syntaxes from GNU Parallel:\n - Stdin support is currently missing.\n - Use a tokenizer for building commands instead of string replacements.\n - {N}, {N.}, etc.\n - parallel command {1} {2} {3} ::: 1 2 3 ::: 4 5 6 ::: 7 8 9\n - parallel command ::: * instead of parallel command {} ::: *\n - parallel ::: \"command 1\" \"command 2\"\n - paralllel command ::: a b c :::+ 1 2 3 ::: d e f :::+ 4 5 6\n*\/\n\n\/\/\/ A JobThread allows for the manipulation of content within.\nstruct JobThread {\n \/\/\/ Allows us to know when a thread has completed all of it's tasks.\n handle: JoinHandle<()>,\n}\n\n\/\/\/ Contains the parameters that each thread will acquire and manipulate.\nstruct Inputs {\n \/\/\/ The values that each thread will copy values from.\n values: Vec<String>\n}\n\nstruct Flags {\n \/\/\/ The number of jobs to create for processing inputs.\n ncores: usize,\n}\n\nfn main() {\n let stderr = io::stderr();\n let mut flags = Flags {\n ncores: num_cpus::get()\n };\n let mut command = String::new();\n let mut inputs = Inputs { values: Vec::new() };\n\n \/\/ Let's collect all parameters that we need from the program's arguments.\n \/\/ If an error is returned, this will handle that error as efficiently as possible.\n if let Err(why) = parse_arguments(&mut flags, &mut command, &mut inputs.values) {\n let mut stderr = stderr.lock();\n let _ = stderr.write(b\"parallel: parsing error: \");\n match why {\n ParseErr::JobsNaN(value) => {\n let _ = stderr.write(b\"jobs parameter, '\");\n let _ = stderr.write(value.as_bytes());\n let _ = stderr.write(b\"', is not a number.\\n\");\n },\n _ => {\n let message: &[u8] = match why {\n ParseErr::InputVarsNotDefined => b\"input variables were not defined.\\n\",\n ParseErr::JobsNoValue => b\"no jobs parameter was defined.\\n\",\n _ => unreachable!()\n };\n let _ = stderr.write(message);\n }\n };\n exit(1);\n }\n\n \/\/ It will be useful to know the number of inputs, to know when to quit.\n let num_inputs = inputs.values.len();\n\n \/\/ Stores the next input to be processed\n let shared_counter = Arc::new(AtomicUsize::new(0));\n\n \/\/ We will share the same list of inputs with each thread.\n let shared_input = Arc::new(inputs);\n\n \/\/ First we will create as many threads as `flags.ncores` specifies.\n \/\/ The `threads` vector will contain the thread handles needed to\n \/\/ know when to quit the program.\n let mut threads: Vec<JobThread> = Vec::with_capacity(flags.ncores);\n for slot in 1..flags.ncores+1 {\n \/\/ The command that each input variable will be sent to.\n let command = command.clone();\n \/\/ Allow the thread to gain access to the list of inputs.\n let input = shared_input.clone();\n \/\/ Allow the thread to access the current command counter\n let counter = shared_counter.clone();\n \/\/ Allow the thread to know when it's time to stop.\n let num_inputs = num_inputs.clone();\n\n \/\/ The actual thread where the work will happen on incoming data.\n let handle: JoinHandle<()> = thread::spawn(move || {\n let slot_number = slot;\n let stderr = io::stderr();\n loop {\n \/\/ Obtain the Nth item and it's job ID from the list of inputs.\n let (input_var, job_id) = {\n \/\/ Atomically increment the counter\n let old_counter = counter.fetch_add(1, Ordering::SeqCst);\n if old_counter >= num_inputs {\n break\n } else {\n let input_var = &input.values[old_counter];\n let job_id = old_counter + 1;\n (input_var, job_id)\n }\n };\n\n \/\/ Now the input can be processed with the command.\n if let Err(cmd_err) = cmd_builder(&input_var, &command, slot_number, job_id) {\n let mut stderr = stderr.lock();\n cmd_err.handle(&mut stderr);\n }\n }\n });\n\n \/\/ After the thread has been created, add the important pieces needed by the\n \/\/ main thread to the `threads` vector.\n threads.push(JobThread {\n handle: handle, \/\/ Gives the main thread access to using the thread's `join()` method.\n });\n }\n\n for thread in threads.into_iter() { thread.handle.join().unwrap(); }\n}\n\nenum CommandErr {\n NoCommandSpecified,\n Failed(String, Vec<String>)\n}\n\nimpl CommandErr {\n fn handle(self, stderr: &mut StderrLock) {\n let _ = stderr.write(b\"parallel: command error: \");\n match self {\n CommandErr::NoCommandSpecified => {\n let _ = stderr.write(b\"no command specified.\\n\");\n },\n CommandErr::Failed(command, arguments) => {\n let _ = stderr.write(command.as_bytes());\n for arg in &arguments {\n let _ = stderr.write(b\" \");\n let _ = stderr.write(arg.as_bytes());\n }\n let _ = stderr.write(b\"\\n\");\n }\n }\n }\n}\n\n\/\/\/ Builds the command and executes it\nfn cmd_builder(input: &str, template: &str, slot_id: usize, job_id: usize) -> Result<(), CommandErr> {\n \/\/ TODO: Use a tokenizer for building the command from the template.\n let mut iterator = template.split_whitespace();\n let command = match iterator.next() {\n Some(command) => command,\n None => return Err(CommandErr::NoCommandSpecified)\n };\n let mut arguments = Vec::new();\n for arg in iterator {\n if arg.contains(\"{}\") {\n arguments.push(arg.replace(\"{}\", input));\n } else if arg.contains(\"{.}\") {\n arguments.push(arg.replace(\"{.}\", remove_extension(input)));\n } else if arg.contains(\"{\/}\") {\n arguments.push(arg.replace(\"{\/}\", basename(input)));\n } else if arg.contains(\"{\/\/}\") {\n arguments.push(arg.replace(\"{\/\/}\", dirname(input)));\n } else if arg.contains(\"{\/.}\") {\n arguments.push(arg.replace(\"{\/.}\", basename(remove_extension(input))));\n } else if arg.contains(\"{#}\") {\n arguments.push(arg.replace(\"{#}\", &job_id.to_string()));\n } else if arg.contains(\"{%}\") {\n arguments.push(arg.replace(\"{%}\", &slot_id.to_string()));\n } else {\n arguments.push(arg.to_owned());\n }\n }\n\n if let Err(_) = Command::new(&command).args(&arguments).status() {\n return Err(CommandErr::Failed(String::from(command), arguments));\n }\n Ok(())\n}\n\n\/\/\/ Removes the extension of a given input\nfn remove_extension<'a>(input: &'a str) -> &'a str {\n let mut index = 0;\n for (id, character) in input.chars().enumerate() {\n if character == '.' { index = id; }\n }\n if index == 0 { input } else { &input[0..index] }\n}\n\nfn basename<'a>(input: &'a str) -> &'a str {\n let mut index = 0;\n for (id, character) in input.chars().enumerate() {\n if character == '\/' { index = id; }\n }\n if index == 0 { input } else { &input[index+1..] }\n}\n\nfn dirname<'a>(input: &'a str) -> &'a str {\n let mut index = 0;\n for (id, character) in input.chars().enumerate() {\n if character == '\/' { index = id; }\n }\n if index == 0 { input } else { &input[0..index] }\n}\n\nenum ParseErr {\n JobsNaN(String),\n JobsNoValue,\n InputVarsNotDefined,\n}\n\n\/\/ Parses input arguments and stores their values into their associated variabless.\nfn parse_arguments(flags: &mut Flags, command: &mut String, input_variables: &mut Vec<String>)\n -> Result<(), ParseErr>\n{\n let mut parsing_arguments = true;\n let mut command_is_set = false;\n let mut raw_args = env::args().skip(1).peekable();\n while let Some(argument) = raw_args.next() {\n if parsing_arguments {\n match argument.as_str() {\n \/\/ Defines the number of jobs to run in parallel.\n \"-j\" => {\n match raw_args.peek() {\n Some(val) => match val.parse::<usize>() {\n Ok(val) => flags.ncores = val,\n Err(_) => return Err(ParseErr::JobsNaN(val.clone()))\n },\n None => return Err(ParseErr::JobsNoValue)\n }\n let _ = raw_args.next();\n },\n \/\/ Arguments after `:::` are input values.\n \":::\" => parsing_arguments = false,\n _ => {\n if command_is_set {\n command.push(' ');\n command.push_str(&argument);\n } else {\n command.push_str(&argument);\n command_is_set = true;\n }\n\n }\n }\n } else {\n input_variables.push(argument);\n }\n }\n\n if input_variables.is_empty() { return Err(ParseErr::InputVarsNotDefined) }\n Ok(())\n}\n<commit_msg>Some style fixes<commit_after>extern crate num_cpus;\n\nuse std::env;\nuse std::io::{self, Write, StderrLock};\nuse std::process::{Command, exit};\nuse std::thread::{self, JoinHandle};\nuse std::sync::Arc;\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\n\/* TODO: Functionality can be increased to accept the following syntaxes from GNU Parallel:\n - Stdin support is currently missing.\n - Use a tokenizer for building commands instead of string replacements.\n - {N}, {N.}, etc.\n - parallel command {1} {2} {3} ::: 1 2 3 ::: 4 5 6 ::: 7 8 9\n - parallel command ::: * instead of parallel command {} ::: *\n - parallel ::: \"command 1\" \"command 2\"\n - paralllel command ::: a b c :::+ 1 2 3 ::: d e f :::+ 4 5 6\n*\/\n\n\/\/\/ A `JobThread` allows for the manipulation of content within.\nstruct JobThread {\n \/\/\/ Allows us to know when a thread has completed all of it's tasks.\n handle: JoinHandle<()>,\n}\n\n\/\/\/ Contains the parameters that each thread will acquire and manipulate.\nstruct Inputs {\n \/\/\/ The values that each thread will copy values from.\n values: Vec<String>\n}\n\nstruct Flags {\n \/\/\/ The number of jobs to create for processing inputs.\n ncores: usize,\n}\n\nfn main() {\n let stderr = io::stderr();\n let mut flags = Flags {\n ncores: num_cpus::get()\n };\n let mut command = String::new();\n let mut inputs = Inputs { values: Vec::new() };\n\n \/\/ Let's collect all parameters that we need from the program's arguments.\n \/\/ If an error is returned, this will handle that error as efficiently as possible.\n if let Err(why) = parse_arguments(&mut flags, &mut command, &mut inputs.values) {\n let mut stderr = stderr.lock();\n let _ = stderr.write(b\"parallel: parsing error: \");\n match why {\n ParseErr::JobsNaN(value) => {\n let _ = stderr.write(b\"jobs parameter, '\");\n let _ = stderr.write(value.as_bytes());\n let _ = stderr.write(b\"', is not a number.\\n\");\n },\n _ => {\n let message: &[u8] = match why {\n ParseErr::InputVarsNotDefined => b\"input variables were not defined.\\n\",\n ParseErr::JobsNoValue => b\"no jobs parameter was defined.\\n\",\n _ => unreachable!()\n };\n let _ = stderr.write(message);\n }\n };\n exit(1);\n }\n\n \/\/ It will be useful to know the number of inputs, to know when to quit.\n let num_inputs = inputs.values.len();\n\n \/\/ Stores the next input to be processed\n let shared_counter = Arc::new(AtomicUsize::new(0));\n\n \/\/ We will share the same list of inputs with each thread.\n let shared_input = Arc::new(inputs);\n\n \/\/ First we will create as many threads as `flags.ncores` specifies.\n \/\/ The `threads` vector will contain the thread handles needed to\n \/\/ know when to quit the program.\n let mut threads: Vec<JobThread> = Vec::with_capacity(flags.ncores);\n for slot in 1..flags.ncores+1 {\n \/\/ The command that each input variable will be sent to.\n let command = command.clone();\n \/\/ Allow the thread to gain access to the list of inputs.\n let input = shared_input.clone();\n \/\/ Allow the thread to access the current command counter\n let counter = shared_counter.clone();\n \/\/ Allow the thread to know when it's time to stop.\n let num_inputs = num_inputs;\n\n \/\/ The actual thread where the work will happen on incoming data.\n let handle: JoinHandle<()> = thread::spawn(move || {\n let slot_number = slot;\n let stderr = io::stderr();\n loop {\n \/\/ Obtain the Nth item and it's job ID from the list of inputs.\n let (input_var, job_id) = {\n \/\/ Atomically increment the counter\n let old_counter = counter.fetch_add(1, Ordering::SeqCst);\n if old_counter >= num_inputs {\n break\n } else {\n let input_var = &input.values[old_counter];\n let job_id = old_counter + 1;\n (input_var, job_id)\n }\n };\n\n \/\/ Now the input can be processed with the command.\n if let Err(cmd_err) = cmd_builder(input_var, &command, slot_number, job_id) {\n let mut stderr = stderr.lock();\n cmd_err.handle(&mut stderr);\n }\n }\n });\n\n \/\/ After the thread has been created, add the important pieces needed by the\n \/\/ main thread to the `threads` vector.\n threads.push(JobThread {\n handle: handle, \/\/ Gives the main thread access to using the thread's `join()` method.\n });\n }\n\n for thread in threads.into_iter() { thread.handle.join().unwrap(); }\n}\n\nenum CommandErr {\n NoCommandSpecified,\n Failed(String, Vec<String>)\n}\n\nimpl CommandErr {\n fn handle(self, stderr: &mut StderrLock) {\n let _ = stderr.write(b\"parallel: command error: \");\n match self {\n CommandErr::NoCommandSpecified => {\n let _ = stderr.write(b\"no command specified.\\n\");\n },\n CommandErr::Failed(command, arguments) => {\n let _ = stderr.write(command.as_bytes());\n for arg in &arguments {\n let _ = stderr.write(b\" \");\n let _ = stderr.write(arg.as_bytes());\n }\n let _ = stderr.write(b\"\\n\");\n }\n }\n }\n}\n\n\/\/\/ Builds the command and executes it\nfn cmd_builder(input: &str, template: &str, slot_id: usize, job_id: usize) -> Result<(), CommandErr> {\n \/\/ TODO: Use a tokenizer for building the command from the template.\n let mut iterator = template.split_whitespace();\n let command = match iterator.next() {\n Some(command) => command,\n None => return Err(CommandErr::NoCommandSpecified)\n };\n let mut arguments = Vec::new();\n for arg in iterator {\n if arg.contains(\"{}\") {\n arguments.push(arg.replace(\"{}\", input));\n } else if arg.contains(\"{.}\") {\n arguments.push(arg.replace(\"{.}\", remove_extension(input)));\n } else if arg.contains(\"{\/}\") {\n arguments.push(arg.replace(\"{\/}\", basename(input)));\n } else if arg.contains(\"{\/\/}\") {\n arguments.push(arg.replace(\"{\/\/}\", dirname(input)));\n } else if arg.contains(\"{\/.}\") {\n arguments.push(arg.replace(\"{\/.}\", basename(remove_extension(input))));\n } else if arg.contains(\"{#}\") {\n arguments.push(arg.replace(\"{#}\", &job_id.to_string()));\n } else if arg.contains(\"{%}\") {\n arguments.push(arg.replace(\"{%}\", &slot_id.to_string()));\n } else {\n arguments.push(arg.to_owned());\n }\n }\n\n if let Err(_) = Command::new(&command).args(&arguments).status() {\n return Err(CommandErr::Failed(String::from(command), arguments));\n }\n Ok(())\n}\n\n\/\/\/ Removes the extension of a given input\nfn remove_extension(input: &str) -> &str {\n let mut index = 0;\n for (id, character) in input.chars().enumerate() {\n if character == '.' { index = id; }\n }\n if index == 0 { input } else { &input[0..index] }\n}\n\nfn basename(input: &str) -> &str {\n let mut index = 0;\n for (id, character) in input.chars().enumerate() {\n if character == '\/' { index = id; }\n }\n if index == 0 { input } else { &input[index+1..] }\n}\n\nfn dirname(input: &str) -> &str {\n let mut index = 0;\n for (id, character) in input.chars().enumerate() {\n if character == '\/' { index = id; }\n }\n if index == 0 { input } else { &input[0..index] }\n}\n\nenum ParseErr {\n JobsNaN(String),\n JobsNoValue,\n InputVarsNotDefined,\n}\n\n\/\/ Parses input arguments and stores their values into their associated variabless.\nfn parse_arguments(flags: &mut Flags, command: &mut String, input_variables: &mut Vec<String>)\n -> Result<(), ParseErr>\n{\n let mut parsing_arguments = true;\n let mut command_is_set = false;\n let mut raw_args = env::args().skip(1).peekable();\n while let Some(argument) = raw_args.next() {\n if parsing_arguments {\n match argument.as_str() {\n \/\/ Defines the number of jobs to run in parallel.\n \"-j\" => {\n match raw_args.peek() {\n Some(val) => match val.parse::<usize>() {\n Ok(val) => flags.ncores = val,\n Err(_) => return Err(ParseErr::JobsNaN(val.clone()))\n },\n None => return Err(ParseErr::JobsNoValue)\n }\n let _ = raw_args.next();\n },\n \/\/ Arguments after `:::` are input values.\n \":::\" => parsing_arguments = false,\n _ => {\n if command_is_set {\n command.push(' ');\n command.push_str(&argument);\n } else {\n command.push_str(&argument);\n command_is_set = true;\n }\n\n }\n }\n } else {\n input_variables.push(argument);\n }\n }\n\n if input_variables.is_empty() { return Err(ParseErr::InputVarsNotDefined) }\n Ok(())\n}\n<|endoftext|>"} {"text":"<commit_before>use instr::Instr;\nuse cpu::Cpu;\nuse time::precise_time_ns;\nuse std::fs::File;\nuse std::io::Read;\nuse std::ops::Index;\n\nuse glium::DisplayBuild;\nuse glium::Surface;\nuse glium::SwapBuffersError;\nuse glium::glutin::Api;\nuse glium::glutin::GlRequest;\nuse glium::glutin::Event;\n\nextern crate time;\nextern crate getopts;\n#[macro_use]\nextern crate glium;\nextern crate nalgebra;\n\nmod instr;\nmod cpu;\nmod render;\n\n#[derive(Copy, Clone)]\npub enum MemSection {\n Vram,\n RomBank0,\n}\n\npub struct AddressSpace {\n data: [u8; 0x10000],\n}\n\nimpl AddressSpace {\n\n pub fn new() -> AddressSpace {\n AddressSpace { data: [0; 0x10000] }\n }\n\n pub fn read(&self, addr: u16) -> u8 {\n self.data[addr as usize]\n }\n\n pub fn read_u16(&self, addr: u16) -> u16 {\n self.data[addr as usize] as u16 | ((self.data[addr as usize + 1] as u16) << 8)\n }\n\n pub fn read_slice(&self, addr: u16, bytes: u16) -> &[u8] {\n &self.data[addr as usize .. (addr + bytes) as usize]\n }\n\n pub fn write(&mut self, addr: u16, data: u8) {\n self.data[addr as usize] = data;\n }\n\n pub fn write_u16(&mut self, addr: u16, data: u16) {\n let lo = (data & 0xFF) as u8;\n let hi = ((data & 0xFF00) >> 8) as u8;\n self.data[addr as usize] = lo;\n self.data[addr as usize + 1] = hi;\n }\n\n pub fn load_rom(&mut self, rom: &mut File) -> std::io::Result<()> {\n \/\/ Read in header first\n try!(rom.read(&mut self.data[0x000..0x150]));\n \/\/ Then read in remaining cart data\n try!(rom.read(&mut self.data[0x0150..0x8000]));\n Ok(())\n }\n\n}\n\nimpl Index<u16> for AddressSpace {\n type Output = u8;\n\n fn index(&self, idx: u16) -> &u8 {\n &self.data[idx as usize]\n }\n}\n\n#[derive(Copy, Clone)]\npub enum RegFlag {\n Zero,\n Subtract,\n HalfCarry,\n Carry,\n}\n\n#[derive(Copy, Clone)]\npub enum Register {\n A,\n B,\n C,\n D,\n E,\n F,\n H,\n L,\n AF,\n BC,\n DE,\n HL,\n SP,\n PC,\n Flag,\n}\n\npub struct RegData {\n a: u8,\n b: u8,\n c: u8,\n d: u8,\n e: u8,\n f: u8,\n h: u8,\n l: u8,\n sp: u16,\n pc: u16,\n flag: u8,\n}\n\nimpl RegData {\n\n pub fn new() -> RegData {\n RegData {\n a: 0,\n b: 0,\n c: 0,\n d: 0,\n e: 0,\n f: 0,\n h: 0,\n l: 0,\n sp: 0xFFFE,\n pc: 0x100,\n flag: 0,\n }\n }\n\n pub fn read(&self, reg: Register) -> u8 {\n match reg {\n Register::A => self.a,\n Register::B => self.b,\n Register::C => self.c,\n Register::D => self.d,\n Register::E => self.e,\n Register::F => self.f,\n Register::H => self.h,\n Register::L => self.l,\n Register::Flag => self.flag,\n _ => panic!(\"Register not available for 8-bit read\"),\n }\n }\n\n pub fn read_u16(&self, reg: Register) -> u16 {\n match reg {\n Register::AF => (self.a as u16) << 8 | self.f as u16,\n Register::BC => (self.b as u16) << 8 | self.c as u16,\n Register::DE => (self.d as u16) << 8 | self.e as u16,\n Register::HL => (self.h as u16) << 8 | self.l as u16,\n Register::SP => self.sp,\n Register::PC => self.pc,\n _ => panic!(\"Register not available for 16-bit read\"),\n }\n }\n\n pub fn write(&mut self, reg: Register, data: u8) {\n match reg {\n Register::A => self.a = data,\n Register::B => self.b = data,\n Register::C => self.c = data,\n Register::D => self.d = data,\n Register::E => self.e = data,\n Register::F => self.f = data,\n Register::H => self.h = data,\n Register::L => self.l = data,\n _ => panic!(\"Register not available for 8-bit write\"),\n }\n }\n\n pub fn write_u16(&mut self, reg: Register, data: u16) {\n let (hi, lo) = ((data & 0xFF00 >> 8) as u8, (data & 0xFF) as u8);\n match reg {\n Register::AF => {\n self.a = hi;\n self.f = lo;\n },\n Register::BC => {\n self.b = hi;\n self.c = lo;\n },\n Register::DE => {\n self.d = hi;\n self.c = lo;\n },\n Register::HL => {\n self.h = hi;\n self.l = lo;\n },\n Register::SP => self.sp = data,\n Register::PC => self.pc = data,\n _ => panic!(\"Register not available for 16-bit write\"),\n }\n }\n\n pub fn copy(&mut self, dst: Register, src: Register) {\n let data = self.read(src);\n self.write(dst, data);\n }\n\n pub fn copy_u16(&mut self, dst: Register, src: Register) {\n let data = self.read_u16(src);\n self.write_u16(dst, data);\n }\n\n pub fn set_flag(&mut self, flag: RegFlag, on: bool) {\n let bit = match flag {\n RegFlag::Zero => 0x80,\n RegFlag::Subtract => 0x40,\n RegFlag::HalfCarry => 0x20,\n RegFlag::Carry => 0x10,\n };\n if (on) {\n self.flag |= bit;\n } else {\n self.flag &= bit ^ 0xFF;\n }\n }\n\n pub fn get_flag(&self, flag: RegFlag) -> bool {\n let bit = match flag {\n RegFlag::Zero => 0x80,\n RegFlag::Subtract => 0x40,\n RegFlag::HalfCarry => 0x20,\n RegFlag::Carry => 0x10,\n };\n self.flag & bit != 0\n }\n\n pub fn advance_pc(&mut self) -> u16 {\n let pc = self.pc;\n self.pc += 1;\n pc\n }\n\n pub fn set_pc(&mut self, addr: u16) -> u16 {\n let pc = self.pc;\n self.pc = addr;\n pc\n }\n\n pub fn add_pc(&mut self, n: i8) -> u16 {\n let pc = self.pc;\n self.pc = ((self.pc as i32) + (n as i32)) as u16;\n pc\n }\n\n}\n\nfn main() {\n \/\/ Gather command line args\n let args: Vec<String> = std::env::args().collect();\n let mut opts = getopts::Options::new();\n let matches = match opts.parse(&args[1..]) {\n Ok(m) => { m },\n Err(e) => panic!(\"Error: {}\", e),\n };\n let input = if !matches.free.is_empty() {\n matches.free[0].clone()\n } else {\n println!(\"No input ROM\");\n return;\n };\n\n \/\/ Build graphics context and window\n let display = glium::glutin::WindowBuilder::new()\n .with_title(\"Gameboy Rust\".to_string())\n .with_gl(GlRequest::Specific(Api::OpenGl, (3, 2)))\n .build_glium()\n .unwrap();\n\n \/\/ Do machine initialization\n let mut cpu = Cpu::new();\n {\n let mut ram = cpu.get_ram();\n let mut romfile = match File::open(std::path::Path::new(&input)) {\n Ok(f) => { f },\n Err(e) => {\n println!(\"Error opening file: {}\", e);\n return;\n }\n };\n if let Err(e) = ram.load_rom(&mut romfile) {\n println!(\"Error loading rom data: {}\", e);\n return;\n }\n }\n\n \/\/ Initialize virtual LCD\n let mut lcd = render::GbDisplay::new(&display);\n\n let mut viewport = {\n let window = display.get_window();\n let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();\n render::calculate_viewport(width, height)\n };\n\n \/\/ Simulate CPU\n 'main: loop {\n \/\/ Collect user input\n for ev in display.poll_events() {\n match ev {\n Event::Closed => {\n break 'main;\n },\n Event::Resized(..) => {\n let window = display.get_window();\n let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();\n viewport = render::calculate_viewport(width, height);\n },\n _ => (),\n }\n }\n\n \/\/ Redraw screen\n let mut target = display.draw();\n target.clear_color(0.0, 0.0, 0.0, 0.0);\n lcd.clear_viewport(&mut target, viewport, (1.0, 1.0, 1.0, 1.0));\n match target.finish().err() {\n Some(SwapBuffersError::ContextLost) => {\n panic!(\"OpenGL contetxt lost!\");\n },\n Some(SwapBuffersError::AlreadySwapped) => {\n println!(\"Warning: OpenGL buffer already swapped\");\n },\n None => (),\n }\n }\n}\n<commit_msg>Make background fill non-transparent<commit_after>use instr::Instr;\nuse cpu::Cpu;\nuse time::precise_time_ns;\nuse std::fs::File;\nuse std::io::Read;\nuse std::ops::Index;\n\nuse glium::DisplayBuild;\nuse glium::Surface;\nuse glium::SwapBuffersError;\nuse glium::glutin::Api;\nuse glium::glutin::GlRequest;\nuse glium::glutin::Event;\n\nextern crate time;\nextern crate getopts;\n#[macro_use]\nextern crate glium;\nextern crate nalgebra;\n\nmod instr;\nmod cpu;\nmod render;\n\n#[derive(Copy, Clone)]\npub enum MemSection {\n Vram,\n RomBank0,\n}\n\npub struct AddressSpace {\n data: [u8; 0x10000],\n}\n\nimpl AddressSpace {\n\n pub fn new() -> AddressSpace {\n AddressSpace { data: [0; 0x10000] }\n }\n\n pub fn read(&self, addr: u16) -> u8 {\n self.data[addr as usize]\n }\n\n pub fn read_u16(&self, addr: u16) -> u16 {\n self.data[addr as usize] as u16 | ((self.data[addr as usize + 1] as u16) << 8)\n }\n\n pub fn read_slice(&self, addr: u16, bytes: u16) -> &[u8] {\n &self.data[addr as usize .. (addr + bytes) as usize]\n }\n\n pub fn write(&mut self, addr: u16, data: u8) {\n self.data[addr as usize] = data;\n }\n\n pub fn write_u16(&mut self, addr: u16, data: u16) {\n let lo = (data & 0xFF) as u8;\n let hi = ((data & 0xFF00) >> 8) as u8;\n self.data[addr as usize] = lo;\n self.data[addr as usize + 1] = hi;\n }\n\n pub fn load_rom(&mut self, rom: &mut File) -> std::io::Result<()> {\n \/\/ Read in header first\n try!(rom.read(&mut self.data[0x000..0x150]));\n \/\/ Then read in remaining cart data\n try!(rom.read(&mut self.data[0x0150..0x8000]));\n Ok(())\n }\n\n}\n\nimpl Index<u16> for AddressSpace {\n type Output = u8;\n\n fn index(&self, idx: u16) -> &u8 {\n &self.data[idx as usize]\n }\n}\n\n#[derive(Copy, Clone)]\npub enum RegFlag {\n Zero,\n Subtract,\n HalfCarry,\n Carry,\n}\n\n#[derive(Copy, Clone)]\npub enum Register {\n A,\n B,\n C,\n D,\n E,\n F,\n H,\n L,\n AF,\n BC,\n DE,\n HL,\n SP,\n PC,\n Flag,\n}\n\npub struct RegData {\n a: u8,\n b: u8,\n c: u8,\n d: u8,\n e: u8,\n f: u8,\n h: u8,\n l: u8,\n sp: u16,\n pc: u16,\n flag: u8,\n}\n\nimpl RegData {\n\n pub fn new() -> RegData {\n RegData {\n a: 0,\n b: 0,\n c: 0,\n d: 0,\n e: 0,\n f: 0,\n h: 0,\n l: 0,\n sp: 0xFFFE,\n pc: 0x100,\n flag: 0,\n }\n }\n\n pub fn read(&self, reg: Register) -> u8 {\n match reg {\n Register::A => self.a,\n Register::B => self.b,\n Register::C => self.c,\n Register::D => self.d,\n Register::E => self.e,\n Register::F => self.f,\n Register::H => self.h,\n Register::L => self.l,\n Register::Flag => self.flag,\n _ => panic!(\"Register not available for 8-bit read\"),\n }\n }\n\n pub fn read_u16(&self, reg: Register) -> u16 {\n match reg {\n Register::AF => (self.a as u16) << 8 | self.f as u16,\n Register::BC => (self.b as u16) << 8 | self.c as u16,\n Register::DE => (self.d as u16) << 8 | self.e as u16,\n Register::HL => (self.h as u16) << 8 | self.l as u16,\n Register::SP => self.sp,\n Register::PC => self.pc,\n _ => panic!(\"Register not available for 16-bit read\"),\n }\n }\n\n pub fn write(&mut self, reg: Register, data: u8) {\n match reg {\n Register::A => self.a = data,\n Register::B => self.b = data,\n Register::C => self.c = data,\n Register::D => self.d = data,\n Register::E => self.e = data,\n Register::F => self.f = data,\n Register::H => self.h = data,\n Register::L => self.l = data,\n _ => panic!(\"Register not available for 8-bit write\"),\n }\n }\n\n pub fn write_u16(&mut self, reg: Register, data: u16) {\n let (hi, lo) = ((data & 0xFF00 >> 8) as u8, (data & 0xFF) as u8);\n match reg {\n Register::AF => {\n self.a = hi;\n self.f = lo;\n },\n Register::BC => {\n self.b = hi;\n self.c = lo;\n },\n Register::DE => {\n self.d = hi;\n self.c = lo;\n },\n Register::HL => {\n self.h = hi;\n self.l = lo;\n },\n Register::SP => self.sp = data,\n Register::PC => self.pc = data,\n _ => panic!(\"Register not available for 16-bit write\"),\n }\n }\n\n pub fn copy(&mut self, dst: Register, src: Register) {\n let data = self.read(src);\n self.write(dst, data);\n }\n\n pub fn copy_u16(&mut self, dst: Register, src: Register) {\n let data = self.read_u16(src);\n self.write_u16(dst, data);\n }\n\n pub fn set_flag(&mut self, flag: RegFlag, on: bool) {\n let bit = match flag {\n RegFlag::Zero => 0x80,\n RegFlag::Subtract => 0x40,\n RegFlag::HalfCarry => 0x20,\n RegFlag::Carry => 0x10,\n };\n if (on) {\n self.flag |= bit;\n } else {\n self.flag &= bit ^ 0xFF;\n }\n }\n\n pub fn get_flag(&self, flag: RegFlag) -> bool {\n let bit = match flag {\n RegFlag::Zero => 0x80,\n RegFlag::Subtract => 0x40,\n RegFlag::HalfCarry => 0x20,\n RegFlag::Carry => 0x10,\n };\n self.flag & bit != 0\n }\n\n pub fn advance_pc(&mut self) -> u16 {\n let pc = self.pc;\n self.pc += 1;\n pc\n }\n\n pub fn set_pc(&mut self, addr: u16) -> u16 {\n let pc = self.pc;\n self.pc = addr;\n pc\n }\n\n pub fn add_pc(&mut self, n: i8) -> u16 {\n let pc = self.pc;\n self.pc = ((self.pc as i32) + (n as i32)) as u16;\n pc\n }\n\n}\n\nfn main() {\n \/\/ Gather command line args\n let args: Vec<String> = std::env::args().collect();\n let mut opts = getopts::Options::new();\n let matches = match opts.parse(&args[1..]) {\n Ok(m) => { m },\n Err(e) => panic!(\"Error: {}\", e),\n };\n let input = if !matches.free.is_empty() {\n matches.free[0].clone()\n } else {\n println!(\"No input ROM\");\n return;\n };\n\n \/\/ Build graphics context and window\n let display = glium::glutin::WindowBuilder::new()\n .with_title(\"Gameboy Rust\".to_string())\n .with_gl(GlRequest::Specific(Api::OpenGl, (3, 2)))\n .build_glium()\n .unwrap();\n\n \/\/ Do machine initialization\n let mut cpu = Cpu::new();\n {\n let mut ram = cpu.get_ram();\n let mut romfile = match File::open(std::path::Path::new(&input)) {\n Ok(f) => { f },\n Err(e) => {\n println!(\"Error opening file: {}\", e);\n return;\n }\n };\n if let Err(e) = ram.load_rom(&mut romfile) {\n println!(\"Error loading rom data: {}\", e);\n return;\n }\n }\n\n \/\/ Initialize virtual LCD\n let mut lcd = render::GbDisplay::new(&display);\n\n let mut viewport = {\n let window = display.get_window();\n let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();\n render::calculate_viewport(width, height)\n };\n\n \/\/ Simulate CPU\n 'main: loop {\n \/\/ Collect user input\n for ev in display.poll_events() {\n match ev {\n Event::Closed => {\n break 'main;\n },\n Event::Resized(..) => {\n let window = display.get_window();\n let (width, height) = window.unwrap().get_inner_size_pixels().unwrap();\n viewport = render::calculate_viewport(width, height);\n },\n _ => (),\n }\n }\n\n \/\/ Redraw screen\n let mut target = display.draw();\n target.clear_color(0.0, 0.0, 0.0, 0.0);\n lcd.clear_viewport(&mut target, viewport, (1.0, 1.0, 1.0, 0.0));\n match target.finish().err() {\n Some(SwapBuffersError::ContextLost) => {\n panic!(\"OpenGL contetxt lost!\");\n },\n Some(SwapBuffersError::AlreadySwapped) => {\n println!(\"Warning: OpenGL buffer already swapped\");\n },\n None => (),\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before>#[macro_use]\nextern crate glium;\nuse glium::glutin;\n\nuse std::collections::HashMap;\n\n#[macro_use]\nmod support;\n\nenum Shape {\n Clear([f32; 4]),\n Rectangle(f32, f32, f32, f32, [f32; 4]),\n Triangle(f32, f32, f32, f32, f32, f32, [f32; 4]),\n Polygon(Vec<(f32, f32)>, [f32; 4]),\n Line(f32, f32, f32, f32, [f32; 4]),\n}\n\npub struct Window {\n display: glium::Display,\n events_loop: glium::glutin::EventsLoop,\n color: [f32; 4],\n shapes: Vec<Shape>,\n keys: HashMap<glutin::VirtualKeyCode, bool>,\n}\n\nimpl Window {\n pub fn new(title: &str, width: u32, height: u32) -> Self {\n let events_loop = glutin::EventsLoop::new();\n let window = glutin::WindowBuilder::new()\n .with_title(title)\n .with_dimensions(width, height);\n let context = glutin::ContextBuilder::new();\n let display = glium::Display::new(window, context, &events_loop).unwrap();\n\n Window {\n display: display,\n events_loop: events_loop,\n color: [1.0, 1.0, 1.0, 1.0],\n shapes: Vec::new(),\n keys: HashMap::new(),\n }\n }\n\n pub fn start_loop<F>(&mut self, mut callback: F)\n where\n F: FnMut(&mut Window, f32) -> support::Action,\n {\n let program = support::shaders::load_program(&self.display).unwrap();\n let params = glium::DrawParameters {\n blend: glium::draw_parameters::Blend::alpha_blending(),\n ..Default::default()\n };\n\n use glium::Surface;\n support::start_loop(|delta| {\n\n let mut target = self.display.draw();\n\n let mut rtn = callback(self, delta);\n for shape in self.shapes.iter() {\n match shape {\n &Shape::Clear(color) => {\n target.clear_color(color[0], color[1], color[2], color[3])\n }\n &Shape::Rectangle(x1, y1, x2, y2, color) => {\n let vert_buff = support::buffer::rectangle_vert_buff(\n &self.display,\n x1,\n y1,\n x2,\n y2,\n color,\n ).unwrap();\n let indices =\n glium::index::NoIndices(glium::index::PrimitiveType::TriangleStrip);\n target\n .draw(\n &vert_buff,\n &indices,\n &program,\n &glium::uniforms::EmptyUniforms,\n ¶ms,\n )\n .unwrap();\n }\n &Shape::Line(x1, y1, x2, y2, color) => {\n let vert_buff =\n support::buffer::line_vert_buff(&self.display, x1, y1, x2, y2, color)\n .unwrap();\n let indices =\n glium::index::NoIndices(glium::index::PrimitiveType::LineStrip);\n target\n .draw(\n &vert_buff,\n &indices,\n &program,\n &glium::uniforms::EmptyUniforms,\n ¶ms,\n )\n .unwrap();\n }\n &Shape::Triangle(x1, y1, x2, y2, x3, y3, color) => {\n let vert_buff = support::buffer::triangle_vert_buff(\n &self.display,\n x1,\n y1,\n x2,\n y2,\n x3,\n y3,\n color,\n ).unwrap();\n let indices =\n glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList);\n target\n .draw(\n &vert_buff,\n &indices,\n &program,\n &glium::uniforms::EmptyUniforms,\n ¶ms,\n )\n .unwrap();\n }\n &Shape::Polygon(ref points, color) => {\n let vert_buff =\n support::buffer::poly_vert_buffer(&self.display, &points, color)\n .unwrap();\n let indices =\n glium::index::NoIndices(glium::index::PrimitiveType::TriangleFan);\n target\n .draw(\n &vert_buff,\n &indices,\n &program,\n &glium::uniforms::EmptyUniforms,\n ¶ms,\n )\n .unwrap();\n }\n }\n }\n\n self.shapes.clear();\n\n target.finish().unwrap();\n\n let mut keys: HashMap<glutin::VirtualKeyCode, bool> = HashMap::new();\n self.events_loop.poll_events(|ev| match ev {\n glium::glutin::Event::WindowEvent { event, .. } => {\n match event {\n glutin::WindowEvent::Closed => rtn = support::Action::Stop,\n glutin::WindowEvent::KeyboardInput { input, .. } => {\n match input.virtual_keycode {\n Some(key) => {\n keys.insert(\n key,\n match input.state {\n glutin::ElementState::Pressed => true,\n glutin::ElementState::Released => false,\n },\n );\n }\n None => (),\n }\n }\n _ => (),\n }\n }\n _ => (),\n });\n\n for (key, value) in keys {\n self.keys.insert(key, value);\n }\n\n rtn\n });\n }\n\n pub fn clear_color(&mut self, red: f32, green: f32, blue: f32, alpha: f32) {\n let shape = Shape::Clear([red, green, blue, alpha]);\n self.shapes.push(shape);\n }\n\n pub fn set_color(&mut self, red: f32, green: f32, blue: f32, alpha: f32) {\n self.color = [red, green, blue, alpha];\n }\n\n pub fn draw_rect(&mut self, x: f32, y: f32, width: f32, height: f32) {\n let shape = Shape::Rectangle(x, y, x + width, y + height, self.color);\n self.shapes.push(shape);\n }\n\n pub fn draw_triangle(&mut self, x1: f32, y1: f32, x2: f32, y2: f32, x3: f32, y3: f32) {\n let shape = Shape::Triangle(x1, y1, x2, y2, x3, y3, self.color);\n self.shapes.push(shape);\n }\n\n pub fn draw_line(&mut self, x1: f32, y1: f32, x2: f32, y2: f32) {\n let shape = Shape::Line(x1, y1, x2, y2, self.color);\n self.shapes.push(shape);\n }\n\n pub fn draw(&mut self, points: &[(f32, f32)]) {\n let shape = Shape::Polygon(Vec::from(points), self.color);\n self.shapes.push(shape);\n }\n}\n\nfn main() {\n let mut window = Window::new(\"Hello World\", 640, 480);\n\n window.start_loop(|app, delta| {\n app.clear_color(0.1, 0.3, 0.2, 1.0);\n\n app.set_color(0.3, 0.1, 0.2, 1.0);\n app.draw_rect(0.0, 0.0, 10.0, 10.0);\n\n app.set_color(1.0, 0.0, 0.0, 1.0);\n app.draw_line(0.0, 0.0, 1.0, 1.0);\n\n app.set_color(0.0, 0.0, 1.0, 0.3);\n app.draw_triangle(0.0, 0.0, 1.0, 1.0, 1.0, 0.0);\n\n app.set_color(0.0, 1.0, 0.0, 0.3);\n app.draw(&[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0)]);\n\n let circle: Vec<(f32, f32)> = (0..360)\n .map(|d| {\n let r = (d as f32).to_radians();\n (r.cos(), r.sin())\n })\n .collect();\n app.set_color(1.0, 0.0, 0.0, 0.3);\n app.draw(&circle);\n\n support::Action::Continue\n });\n}\n<commit_msg>Removed rectangle and trignale draw<commit_after>#[macro_use]\nextern crate glium;\nuse glium::glutin;\n\nuse std::collections::HashMap;\n\n#[macro_use]\nmod support;\n\nenum Shape {\n Clear([f32; 4]),\n Polygon(Vec<(f32, f32)>, [f32; 4]),\n Line(f32, f32, f32, f32, [f32; 4]),\n}\n\npub struct Window {\n display: glium::Display,\n events_loop: glium::glutin::EventsLoop,\n color: [f32; 4],\n shapes: Vec<Shape>,\n keys: HashMap<glutin::VirtualKeyCode, bool>,\n}\n\nimpl Window {\n pub fn new(title: &str, width: u32, height: u32) -> Self {\n let events_loop = glutin::EventsLoop::new();\n let window = glutin::WindowBuilder::new()\n .with_title(title)\n .with_dimensions(width, height);\n let context = glutin::ContextBuilder::new();\n let display = glium::Display::new(window, context, &events_loop).unwrap();\n\n Window {\n display: display,\n events_loop: events_loop,\n color: [1.0, 1.0, 1.0, 1.0],\n shapes: Vec::new(),\n keys: HashMap::new(),\n }\n }\n\n pub fn start_loop<F>(&mut self, mut callback: F)\n where\n F: FnMut(&mut Window, f32) -> support::Action,\n {\n let program = support::shaders::load_program(&self.display).unwrap();\n let params = glium::DrawParameters {\n blend: glium::draw_parameters::Blend::alpha_blending(),\n ..Default::default()\n };\n\n use glium::Surface;\n support::start_loop(|delta| {\n\n let mut target = self.display.draw();\n\n let mut rtn = callback(self, delta);\n for shape in self.shapes.iter() {\n match shape {\n &Shape::Clear(color) => {\n target.clear_color(color[0], color[1], color[2], color[3])\n }\n &Shape::Line(x1, y1, x2, y2, color) => {\n let vert_buff =\n support::buffer::line_vert_buff(&self.display, x1, y1, x2, y2, color)\n .unwrap();\n let indices =\n glium::index::NoIndices(glium::index::PrimitiveType::LineStrip);\n target\n .draw(\n &vert_buff,\n &indices,\n &program,\n &glium::uniforms::EmptyUniforms,\n ¶ms,\n )\n .unwrap();\n }\n &Shape::Polygon(ref points, color) => {\n let vert_buff =\n support::buffer::poly_vert_buffer(&self.display, &points, color)\n .unwrap();\n let indices =\n glium::index::NoIndices(glium::index::PrimitiveType::TriangleFan);\n target\n .draw(\n &vert_buff,\n &indices,\n &program,\n &glium::uniforms::EmptyUniforms,\n ¶ms,\n )\n .unwrap();\n }\n }\n }\n\n self.shapes.clear();\n\n target.finish().unwrap();\n\n let mut keys: HashMap<glutin::VirtualKeyCode, bool> = HashMap::new();\n self.events_loop.poll_events(|ev| match ev {\n glium::glutin::Event::WindowEvent { event, .. } => {\n match event {\n glutin::WindowEvent::Closed => rtn = support::Action::Stop,\n glutin::WindowEvent::KeyboardInput { input, .. } => {\n match input.virtual_keycode {\n Some(key) => {\n keys.insert(\n key,\n match input.state {\n glutin::ElementState::Pressed => true,\n glutin::ElementState::Released => false,\n },\n );\n }\n None => (),\n }\n }\n _ => (),\n }\n }\n _ => (),\n });\n\n for (key, value) in keys {\n self.keys.insert(key, value);\n }\n\n rtn\n });\n }\n\n pub fn clear_color(&mut self, red: f32, green: f32, blue: f32, alpha: f32) {\n let shape = Shape::Clear([red, green, blue, alpha]);\n self.shapes.push(shape);\n }\n\n pub fn set_color(&mut self, red: f32, green: f32, blue: f32, alpha: f32) {\n self.color = [red, green, blue, alpha];\n }\n\n pub fn draw_line(&mut self, x1: f32, y1: f32, x2: f32, y2: f32) {\n let shape = Shape::Line(x1, y1, x2, y2, self.color);\n self.shapes.push(shape);\n }\n\n pub fn draw(&mut self, points: &[(f32, f32)]) {\n let shape = Shape::Polygon(Vec::from(points), self.color);\n self.shapes.push(shape);\n }\n}\n\nfn main() {\n let mut window = Window::new(\"Hello World\", 640, 480);\n\n window.start_loop(|app, delta| {\n app.clear_color(0.1, 0.3, 0.2, 1.0);\n\n app.set_color(0.3, 0.1, 0.2, 1.0);\n app.draw(&[(0.0, 0.0), (10.0, 10.0)]);\n\n app.set_color(1.0, 0.0, 0.0, 1.0);\n app.draw_line(0.0, 0.0, 1.0, 1.0);\n\n app.set_color(0.0, 0.0, 1.0, 0.3);\n app.draw(&[(0.0, 0.0), (1.0, 1.0), (1.0, 0.0)]);\n\n app.set_color(0.0, 1.0, 0.0, 0.3);\n app.draw(&[(0.0, 0.0), (0.0, 1.0), (1.0, 1.0)]);\n\n let circle: Vec<(f32, f32)> = (0..360)\n .map(|d| {\n let r = (d as f32).to_radians();\n (r.cos(), r.sin())\n })\n .collect();\n app.set_color(1.0, 0.0, 0.0, 0.3);\n app.draw(&circle);\n\n support::Action::Continue\n });\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>build: use Default for unwrap_or, dont use format!<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Auto merge of #10453 - notriddle:notriddle\/vec-extend, r=weihanglo<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>finish rust version<commit_after>use std::collections::HashMap;\n\nfn find_shortest_subarray(nums: Vec<u32>) -> u32 {\n \/\/tuple of top degree, and which numbers has this degree.\n let mut nums_degree: (u32, Vec<u32>) = (0, vec![]);\n \/\/key is number, value is degree and index of first and last index of this number\n let mut nums_index: HashMap<u32, (u32, (usize, usize))> = HashMap::new();\n\n for (ind, num) in nums.iter().enumerate() {\n if nums_index.contains_key(&num) {\n let mut val = nums_index.get_mut(num).unwrap();\n val.0 += 1;\n (val.1).1 = ind;\n } else {\n nums_index.insert(*num, (1, (ind, 0)));\n }\n }\n\n \/\/update nums_degree\n for (num, detls) in &nums_index {\n if detls.0 < nums_degree.0 {\n continue;\n }\n\n if detls.0 > nums_degree.0 {\n nums_degree = (detls.0, vec![*num]);\n } else if detls.0 == nums_degree.0 {\n nums_degree.1.push(*num);\n }\n }\n\n \/\/println!(\"{:?}\", nums_degree);\n\n let mut result: Vec<_> = nums_degree\n .1\n .into_iter()\n .map(|x| {\n let a = nums_index[&x].1;\n \/\/println!(\"{:?}\", a);\n return a.1 - a.0;\n })\n .collect();\n\n result.sort();\n result[0] as u32 + 1\n}\n\nfn main() {\n let testcase0 = vec![1, 2, 2, 3, 1];\n let testcase1 = vec![1, 2, 2, 3, 1, 4, 2];\n\n println!(\"{:?}\", find_shortest_subarray(testcase0));\n println!(\"{:?}\", find_shortest_subarray(testcase1));\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add id reporting in imag-create<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>add testcase for issue-70818<commit_after>\/\/ edition 2018\n\nfn d<T: Sized>(t: T) -> impl std::future::Future<Output = T> + Send { \/\/~ Error `T` cannot be sent between threads safely\n async { t }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>lexer: slightly simplify code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rename example<commit_after>\nuse native_windows_gui as nwg;\nuse native_windows_derive as nwd;\n\nuse nwd::{NwgUi, NwgPartial};\nuse nwg::NativeUi;\n\n\n#[derive(Default, NwgUi)]\npub struct ConfigDlg {\n #[nwg_control(size: (500, 400), position: (300, 300), title: \"DynLayout\")]\n #[nwg_events(OnInit: [ConfigDlg::init], OnResize: [ConfigDlg::size], OnWindowClose: [ConfigDlg::exit])]\n window: nwg::Window,\n\n #[nwg_layout(parent: window)]\n layout: nwg::DynLayout,\n\n #[nwg_control(position: (10, 30), size: (220, 330), collection: vec![\"People\"])]\n list: nwg::ListBox<&'static str>,\n\n #[nwg_control(text: \"Cancel\", position: (10, 350), size: (100, 25))]\n cancel_btn: nwg::Button,\n\n #[nwg_control(text: \"Ok\", position: (120, 350), size: (100, 25))]\n ok_btn: nwg::Button,\n\n #[nwg_control(text: \"Config\", position: (380, 350), size: (100, 25))]\n config_btn: nwg::Button,\n\n #[nwg_control(position: (240, 30), size: (240, 300))]\n frame: nwg::Frame,\n\n #[nwg_partial(parent: frame)]\n #[nwg_events((save_btn, OnButtonClick): [ConfigDlg::save])]\n controls: Controls,\n}\n\nimpl ConfigDlg {\n fn init(&self) {\n self.frame.set_visible(true);\n\n self.layout.add_child((0, 0), (50, 100), &self.list);\n self.layout.add_child((0, 100), (0, 0), &self.ok_btn);\n self.layout.add_child((0, 100), (0, 0), &self.cancel_btn);\n self.layout.add_child((100, 100), (0, 0), &self.config_btn);\n\n self.layout.add_child((50, 0), (50, 100), &self.frame);\n\n self.controls.init(&self.frame);\n\n self.layout.fit();\n }\n\n fn size(&self) {\n self.layout.fit();\n }\n\n fn save(&self) {\n nwg::simple_message(\"Saved!\", \"Data saved!\");\n }\n\n fn exit(&self) {\n nwg::stop_thread_dispatch();\n }\n}\n\n#[derive(Default, NwgPartial)]\npub struct Controls {\n #[nwg_layout]\n layout: nwg::DynLayout,\n\n #[nwg_control(text: \"Name:\", h_align: HTextAlign::Right, position: (10, 10), size: (100, 20))]\n label1: nwg::Label,\n\n #[nwg_control(text: \"Age:\", h_align: HTextAlign::Right, position: (10, 40), size: (100, 20))]\n label2: nwg::Label,\n\n #[nwg_control(text: \"Job:\", h_align: HTextAlign::Right, position: (10, 70), size: (100, 20))]\n label3: nwg::Label,\n\n #[nwg_control(text: \"John Doe\", position: (120, 10), size: (100, 20))]\n #[nwg_events(OnChar: [print_char(EVT_DATA)])]\n name_input: nwg::TextInput,\n\n #[nwg_control(text: \"75\", flags: \"NUMBER|VISIBLE\", position: (120, 40), size: (100, 20))]\n age_input: nwg::TextInput,\n\n #[nwg_control(text: \"Programmer\", position: (120, 70), size: (100, 25))]\n job_input: nwg::TextInput,\n\n #[nwg_control(text: \"Save\", position: (10, 250), size: (100, 25))]\n save_btn: nwg::Button,\n}\n\nimpl Controls {\n fn init(&self, frame: &nwg::Frame) {\n self.layout.parent(frame);\n\n self.layout.add_child((0, 0), (0, 0), &self.label1);\n self.layout.add_child((0, 0), (0, 0), &self.label2);\n self.layout.add_child((0, 0), (0, 0), &self.label3);\n\n self.layout.add_child((0, 0), (100, 0), &self.name_input);\n self.layout.add_child((0, 0), (100, 0), &self.age_input);\n self.layout.add_child((0, 0), (100, 0), &self.job_input);\n\n self.layout.add_child((0, 100), (0, 0), &self.save_btn);\n }\n}\n\nfn print_char(data: &nwg::EventData) {\n println!(\"{:?}\", data.on_char());\n}\n\nfn main() {\n nwg::init().expect(\"Failed to init Native Windows GUI\");\n \/\/nwg::Font::set_global_family(\"Segoe UI\").expect(\"Failed to set default font\");\n let mut font = nwg::Font::default();\n nwg::Font::builder()\n .family(\"MS Shell Dlg\")\n .size(15)\n .build(&mut font)\n .expect(\"Failed to build font\");\n nwg::Font::set_global_default(Some(font));\n\n let _ui = ConfigDlg::build_ui(Default::default()).expect(\"Failed to build UI\");\n\n nwg::dispatch_thread_events();\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>vec: add a by-move iterator<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Make Cons a tuple struct<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>TODO: Metric prefixes and split up service module<commit_after><|endoftext|>"} {"text":"<commit_before>\nuse std;\nuse piston::{\n Event,\n Input,\n Update,\n UpdateArgs,\n};\nuse piston::input;\nuse {\n Action,\n AlwaysSucceed,\n Behavior,\n Failure,\n If,\n Not,\n Pressed,\n Released,\n Running,\n Select,\n Sequence,\n Status,\n Success,\n Wait,\n WaitForever,\n WhenAll,\n While,\n};\n\n\/\/\/ Keeps track of a behavior.\n#[deriving(Clone)]\npub enum State<A> {\n \/\/\/ Returns `Success` when button is pressed.\n PressedState(input::Button),\n \/\/\/ Returns `Success` when button is released.\n ReleasedState(input::Button),\n \/\/\/ Executes an action.\n ActionState(A),\n \/\/\/ Converts `Success` into `Failure` and vice versa.\n NotState(Box<State<A>>),\n \/\/\/ Ignores failures and always return `Success`.\n AlwaysSucceedState(Box<State<A>>),\n \/\/\/ Number of seconds we should wait and seconds we have waited.\n WaitState(f64, f64),\n \/\/\/ Waits forever.\n WaitForeverState,\n \/\/\/ Keeps track of an `If` behavior.\n \/\/\/ If status is `Running`, then it evaluates the condition.\n \/\/\/ If status is `Success`, then it evaluates the success behavior.\n \/\/\/ If status is `Failure`, then it evaluates the failure behavior.\n IfState(Box<Behavior<A>>, Box<Behavior<A>>, Status, Box<State<A>>),\n \/\/\/ Keeps track of a `Select` behavior.\n SelectState(Vec<Behavior<A>>, uint, Box<State<A>>),\n \/\/\/ Keeps track of an `Sequence` behavior.\n SequenceState(Vec<Behavior<A>>, uint, Box<State<A>>),\n \/\/\/ Keeps track of a `While` behavior.\n WhileState(Box<State<A>>, Vec<Behavior<A>>, uint, Box<State<A>>),\n \/\/\/ Keeps track of an `WhenAll` behavior.\n WhenAllState(Vec<Option<State<A>>>),\n}\n\nimpl<A: Clone> State<A> {\n \/\/\/ Creates a state from a behavior.\n pub fn new(behavior: Behavior<A>) -> State<A> {\n match behavior {\n Pressed(button) => PressedState(button),\n Released(button) => ReleasedState(button),\n Action(action) => ActionState(action),\n Not(ev) => NotState(box State::new(*ev)),\n AlwaysSucceed(ev) => AlwaysSucceedState(box State::new(*ev)),\n Wait(dt) => WaitState(dt, 0.0),\n WaitForever => WaitForeverState,\n If(condition, success, failure) => {\n let state = State::new(*condition);\n IfState(success, failure, Running, box state)\n }\n Select(sel) => {\n let state = State::new(sel[0].clone());\n SelectState(sel, 0, box state)\n }\n Sequence(seq) => {\n let state = State::new(seq[0].clone());\n SequenceState(seq, 0, box state)\n }\n While(ev, rep) => {\n let state = State::new(rep[0].clone());\n WhileState(box State::new(*ev), rep, 0, box state)\n }\n WhenAll(all)\n => WhenAllState(all.move_iter().map(\n |ev| Some(State::new(ev))).collect()),\n }\n }\n\n \/\/\/ Updates the cursor that tracks an event.\n \/\/\/\n \/\/\/ The action need to return status and remaining delta time.\n \/\/\/ Returns status and the remaining delta time.\n pub fn update(\n &mut self,\n e: &Event,\n f: |dt: f64, action: &A| -> (Status, f64)\n ) -> (Status, f64) {\n match (e, self) {\n (&Input(input::Press(button_pressed)), &PressedState(button))\n if button_pressed == button => {\n \/\/ Button press is considered to happen instantly.\n \/\/ There is no remaining delta time because this is input event.\n (Success, 0.0)\n }\n (&Input(input::Release(button_released)), &ReleasedState(button))\n if button_released == button => {\n \/\/ Button release is considered to happen instantly.\n \/\/ There is no remaining delta time because this is input event.\n (Success, 0.0)\n }\n (&Update(UpdateArgs { dt }), &ActionState(ref action)) => {\n \/\/ Execute action.\n f(dt, action)\n }\n (_, &NotState(ref mut cur)) => {\n match cur.update(e, f) {\n (Running, dt) => (Running, dt),\n (Failure, dt) => (Success, dt),\n (Success, dt) => (Failure, dt),\n }\n }\n (_, &AlwaysSucceedState(ref mut cur)) => {\n match cur.update(e, f) {\n (Running, dt) => (Running, dt),\n (_, dt) => (Success, dt),\n }\n }\n (&Update(UpdateArgs { dt }), &WaitState(wait_t, ref mut t)) => {\n if *t + dt >= wait_t {\n let remaining_dt = *t + dt - wait_t;\n *t = wait_t;\n (Success, remaining_dt)\n } else {\n *t += dt;\n (Running, 0.0)\n }\n }\n (_, &IfState(ref success, ref failure,\n ref mut status, ref mut state)) => {\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n \/\/ Run in a loop to evaluate success or failure with\n \/\/ remaining delta time after condition.\n loop {\n *status = match *status {\n Running => {\n match state.update(e, |dt, a| f(dt, a)) {\n (Running, dt) => { return (Running, dt); },\n (Success, dt) => {\n **state = State::new((**success).clone());\n remaining_dt = dt;\n Success\n }\n (Failure, dt) => {\n **state = State::new((**failure).clone());\n remaining_dt = dt;\n Failure\n }\n }\n }\n _ => {\n return state.update(match *e {\n Update(_) => {\n remaining_e = Update(UpdateArgs {\n dt: remaining_dt\n });\n &remaining_e\n }\n _ => e\n }, |dt, a| f(dt, a));\n }\n }\n }\n }\n (_, &SelectState(\n ref seq,\n ref mut i,\n ref mut cursor\n )) => {\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n while *i < seq.len() {\n match cursor.update(\n match *e {\n Update(_) => {\n remaining_e = Update(UpdateArgs {\n dt: remaining_dt\n });\n &remaining_e\n }\n _ => e\n },\n |dt, a| f(dt, a)) {\n (Success, x) => { return (Success, x) }\n (Running, _) => { break }\n (Failure, new_dt) => { remaining_dt = new_dt }\n };\n *i += 1;\n \/\/ If end of sequence,\n \/\/ return the 'dt' that is left.\n if *i >= seq.len() { return (Failure, remaining_dt); }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cursor = State::new(seq[*i].clone());\n }\n (Running, 0.0)\n }\n (_, &SequenceState(\n ref seq,\n ref mut i,\n ref mut cursor\n )) => {\n let cur = cursor;\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n while *i < seq.len() {\n match cur.update(match *e {\n Update(_) => {\n remaining_e = Update(UpdateArgs {\n dt: remaining_dt\n });\n &remaining_e\n }\n _ => e\n },\n |dt, a| f(dt, a)) {\n (Failure, x) => return (Failure, x),\n (Running, _) => { break },\n (Success, new_dt) => {\n remaining_dt = match *e {\n \/\/ Change update event with remaining delta time.\n Update(_) => new_dt,\n \/\/ Other events are 'consumed' and not passed to next.\n \/\/ If this is the last event, then the sequence succeeded.\n _ => if *i == seq.len() - 1 {\n return (Success, new_dt)\n } else {\n return (Running, 0.0)\n }\n }\n }\n };\n *i += 1;\n \/\/ If end of sequence,\n \/\/ return the 'dt' that is left.\n if *i >= seq.len() { return (Success, remaining_dt); }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cur = State::new(seq[*i].clone());\n }\n (Running, 0.0)\n }\n (_, &WhileState(\n ref mut ev_cursor,\n ref rep,\n ref mut i,\n ref mut cursor\n )) => {\n \/\/ If the event terminates, do not execute the loop.\n match ev_cursor.update(e, |dt, a| f(dt, a)) {\n (Running, _) => {}\n x => return x,\n };\n let cur = cursor;\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n loop {\n match cur.update(match *e {\n Update(_) => {\n remaining_e = Update(UpdateArgs {\n dt: remaining_dt\n });\n &remaining_e\n }\n _ => e\n },\n |dt, a| f(dt, a)) {\n (Failure, x) => return (Failure, x),\n (Running, _) => { break },\n (Success, new_dt) => {\n remaining_dt = match *e {\n \/\/ Change update event with remaining delta time.\n Update(_) => new_dt,\n \/\/ Other events are 'consumed' and not passed to next.\n _ => return (Running, 0.0)\n }\n }\n };\n *i += 1;\n \/\/ If end of repeated events,\n \/\/ start over from the first one.\n if *i >= rep.len() { *i = 0; }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cur = State::new(rep[*i].clone());\n }\n (Running, 0.0)\n }\n (_, &WhenAllState(ref mut cursors)) => {\n \/\/ Get the least delta time left over.\n let mut min_dt = std::f64::MAX_VALUE;\n \/\/ Count number of terminated events.\n let mut terminated = 0;\n for cur in cursors.mut_iter() {\n match *cur {\n None => terminated += 1,\n Some(ref mut cur) => {\n match cur.update(e, |dt, a| f(dt, a)) {\n (Running, _) => {},\n (Failure, new_dt) => return (Failure, new_dt),\n (Success, new_dt) => {\n min_dt = min_dt.min(new_dt);\n terminated += 1;\n }\n }\n }\n }\n }\n match terminated {\n \/\/ If there are no events, there is a whole 'dt' left.\n 0 if cursors.len() == 0 => (Success, match *e {\n Update(UpdateArgs { dt }) => dt,\n \/\/ Other kind of events happen instantly.\n _ => 0.0\n }),\n \/\/ If all events terminated, the least delta time is left.\n n if cursors.len() == n => (Success, min_dt),\n _ => (Running, 0.0)\n }\n }\n _ => (Running, 0.0)\n }\n }\n}\n<commit_msg>Fixed arguments<commit_after>\nuse std;\nuse piston::{\n Event,\n Input,\n Update,\n UpdateArgs,\n};\nuse piston::input;\nuse {\n Action,\n AlwaysSucceed,\n Behavior,\n Failure,\n If,\n Not,\n Pressed,\n Released,\n Running,\n Select,\n Sequence,\n Status,\n Success,\n Wait,\n WaitForever,\n WhenAll,\n While,\n};\n\n\/\/\/ Keeps track of a behavior.\n#[deriving(Clone)]\npub enum State<A> {\n \/\/\/ Returns `Success` when button is pressed.\n PressedState(input::Button),\n \/\/\/ Returns `Success` when button is released.\n ReleasedState(input::Button),\n \/\/\/ Executes an action.\n ActionState(A),\n \/\/\/ Converts `Success` into `Failure` and vice versa.\n NotState(Box<State<A>>),\n \/\/\/ Ignores failures and always return `Success`.\n AlwaysSucceedState(Box<State<A>>),\n \/\/\/ Number of seconds we should wait and seconds we have waited.\n WaitState(f64, f64),\n \/\/\/ Waits forever.\n WaitForeverState,\n \/\/\/ Keeps track of an `If` behavior.\n \/\/\/ If status is `Running`, then it evaluates the condition.\n \/\/\/ If status is `Success`, then it evaluates the success behavior.\n \/\/\/ If status is `Failure`, then it evaluates the failure behavior.\n IfState(Box<Behavior<A>>, Box<Behavior<A>>, Status, Box<State<A>>),\n \/\/\/ Keeps track of a `Select` behavior.\n SelectState(Vec<Behavior<A>>, uint, Box<State<A>>),\n \/\/\/ Keeps track of an `Sequence` behavior.\n SequenceState(Vec<Behavior<A>>, uint, Box<State<A>>),\n \/\/\/ Keeps track of a `While` behavior.\n WhileState(Box<State<A>>, Vec<Behavior<A>>, uint, Box<State<A>>),\n \/\/\/ Keeps track of an `WhenAll` behavior.\n WhenAllState(Vec<Option<State<A>>>),\n}\n\nimpl<A: Clone> State<A> {\n \/\/\/ Creates a state from a behavior.\n pub fn new(behavior: Behavior<A>) -> State<A> {\n match behavior {\n Pressed(button) => PressedState(button),\n Released(button) => ReleasedState(button),\n Action(action) => ActionState(action),\n Not(ev) => NotState(box State::new(*ev)),\n AlwaysSucceed(ev) => AlwaysSucceedState(box State::new(*ev)),\n Wait(dt) => WaitState(dt, 0.0),\n WaitForever => WaitForeverState,\n If(condition, success, failure) => {\n let state = State::new(*condition);\n IfState(success, failure, Running, box state)\n }\n Select(sel) => {\n let state = State::new(sel[0].clone());\n SelectState(sel, 0, box state)\n }\n Sequence(seq) => {\n let state = State::new(seq[0].clone());\n SequenceState(seq, 0, box state)\n }\n While(ev, rep) => {\n let state = State::new(rep[0].clone());\n WhileState(box State::new(*ev), rep, 0, box state)\n }\n WhenAll(all)\n => WhenAllState(all.move_iter().map(\n |ev| Some(State::new(ev))).collect()),\n }\n }\n\n \/\/\/ Updates the cursor that tracks an event.\n \/\/\/\n \/\/\/ The action need to return status and remaining delta time.\n \/\/\/ Returns status and the remaining delta time.\n pub fn update(\n &mut self,\n e: &Event,\n f: |dt: f64, action: &A| -> (Status, f64)\n ) -> (Status, f64) {\n match (e, self) {\n (&Input(input::Press(button_pressed)), &PressedState(button))\n if button_pressed == button => {\n \/\/ Button press is considered to happen instantly.\n \/\/ There is no remaining delta time because this is input event.\n (Success, 0.0)\n }\n (&Input(input::Release(button_released)), &ReleasedState(button))\n if button_released == button => {\n \/\/ Button release is considered to happen instantly.\n \/\/ There is no remaining delta time because this is input event.\n (Success, 0.0)\n }\n (&Update(UpdateArgs { dt }), &ActionState(ref action)) => {\n \/\/ Execute action.\n f(dt, action)\n }\n (_, &NotState(ref mut cur)) => {\n match cur.update(e, f) {\n (Running, dt) => (Running, dt),\n (Failure, dt) => (Success, dt),\n (Success, dt) => (Failure, dt),\n }\n }\n (_, &AlwaysSucceedState(ref mut cur)) => {\n match cur.update(e, f) {\n (Running, dt) => (Running, dt),\n (_, dt) => (Success, dt),\n }\n }\n (&Update(UpdateArgs { dt }), &WaitState(wait_t, ref mut t)) => {\n if *t + dt >= wait_t {\n let remaining_dt = *t + dt - wait_t;\n *t = wait_t;\n (Success, remaining_dt)\n } else {\n *t += dt;\n (Running, 0.0)\n }\n }\n (_, &IfState(ref success, ref failure,\n ref mut status, ref mut state)) => {\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n \/\/ Run in a loop to evaluate success or failure with\n \/\/ remaining delta time after condition.\n loop {\n *status = match *status {\n Running => {\n match state.update(e, |dt, a| f(dt, a)) {\n (Running, dt) => { return (Running, dt); },\n (Success, dt) => {\n **state = State::new((**success).clone());\n remaining_dt = dt;\n Success\n }\n (Failure, dt) => {\n **state = State::new((**failure).clone());\n remaining_dt = dt;\n Failure\n }\n }\n }\n _ => {\n return state.update(match *e {\n Update(_) => {\n remaining_e = Update(UpdateArgs {\n dt: remaining_dt\n });\n &remaining_e\n }\n _ => e\n }, |dt, a| f(dt, a));\n }\n }\n }\n }\n (_, &SelectState(\n ref seq,\n ref mut i,\n ref mut cursor\n )) => {\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n while *i < seq.len() {\n match cursor.update(\n match *e {\n Update(_) => {\n remaining_e = Update(UpdateArgs {\n dt: remaining_dt\n });\n &remaining_e\n }\n _ => e\n },\n |dt, a| f(dt, a)) {\n (Success, x) => { return (Success, x) }\n (Running, _) => { break }\n (Failure, new_dt) => { remaining_dt = new_dt }\n };\n *i += 1;\n \/\/ If end of sequence,\n \/\/ return the 'dt' that is left.\n if *i >= seq.len() { return (Failure, remaining_dt); }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cursor = State::new(seq[*i].clone());\n }\n (Running, 0.0)\n }\n (_, &SequenceState(ref seq, ref mut i, ref mut cursor)) => {\n let cur = cursor;\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n while *i < seq.len() {\n match cur.update(match *e {\n Update(_) => {\n remaining_e = Update(UpdateArgs {\n dt: remaining_dt\n });\n &remaining_e\n }\n _ => e\n },\n |dt, a| f(dt, a)) {\n (Failure, x) => return (Failure, x),\n (Running, _) => { break },\n (Success, new_dt) => {\n remaining_dt = match *e {\n \/\/ Change update event with remaining delta time.\n Update(_) => new_dt,\n \/\/ Other events are 'consumed' and not passed to next.\n \/\/ If this is the last event, then the sequence succeeded.\n _ => if *i == seq.len() - 1 {\n return (Success, new_dt)\n } else {\n return (Running, 0.0)\n }\n }\n }\n };\n *i += 1;\n \/\/ If end of sequence,\n \/\/ return the 'dt' that is left.\n if *i >= seq.len() { return (Success, remaining_dt); }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cur = State::new(seq[*i].clone());\n }\n (Running, 0.0)\n }\n (_, &WhileState(ref mut ev_cursor, ref rep, ref mut i,\n ref mut cursor)) => {\n \/\/ If the event terminates, do not execute the loop.\n match ev_cursor.update(e, |dt, a| f(dt, a)) {\n (Running, _) => {}\n x => return x,\n };\n let cur = cursor;\n let mut remaining_dt = match *e {\n Update(UpdateArgs { dt }) => dt,\n _ => 0.0,\n };\n let mut remaining_e;\n loop {\n match cur.update(match *e {\n Update(_) => {\n remaining_e = Update(UpdateArgs {\n dt: remaining_dt\n });\n &remaining_e\n }\n _ => e\n },\n |dt, a| f(dt, a)) {\n (Failure, x) => return (Failure, x),\n (Running, _) => { break },\n (Success, new_dt) => {\n remaining_dt = match *e {\n \/\/ Change update event with remaining delta time.\n Update(_) => new_dt,\n \/\/ Other events are 'consumed' and not passed to next.\n _ => return (Running, 0.0)\n }\n }\n };\n *i += 1;\n \/\/ If end of repeated events,\n \/\/ start over from the first one.\n if *i >= rep.len() { *i = 0; }\n \/\/ Create a new cursor for next event.\n \/\/ Use the same pointer to avoid allocation.\n **cur = State::new(rep[*i].clone());\n }\n (Running, 0.0)\n }\n (_, &WhenAllState(ref mut cursors)) => {\n \/\/ Get the least delta time left over.\n let mut min_dt = std::f64::MAX_VALUE;\n \/\/ Count number of terminated events.\n let mut terminated = 0;\n for cur in cursors.mut_iter() {\n match *cur {\n None => terminated += 1,\n Some(ref mut cur) => {\n match cur.update(e, |dt, a| f(dt, a)) {\n (Running, _) => {},\n (Failure, new_dt) => return (Failure, new_dt),\n (Success, new_dt) => {\n min_dt = min_dt.min(new_dt);\n terminated += 1;\n }\n }\n }\n }\n }\n match terminated {\n \/\/ If there are no events, there is a whole 'dt' left.\n 0 if cursors.len() == 0 => (Success, match *e {\n Update(UpdateArgs { dt }) => dt,\n \/\/ Other kind of events happen instantly.\n _ => 0.0\n }),\n \/\/ If all events terminated, the least delta time is left.\n n if cursors.len() == n => (Success, min_dt),\n _ => (Running, 0.0)\n }\n }\n _ => (Running, 0.0)\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Fix(trash): fix send to trash on windows<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Get started with importing the tcp scheme to userspace<commit_after>use redox::Box;\nuse redox::fs::file::File;\nuse redox::io::{Read, Write, Seek, SeekFrom};\nuse redox::mem;\nuse redox::net::*;\nuse redox::ptr;\nuse redox::rand;\nuse redox::slice;\nuse redox::str;\nuse redox::{String, ToString};\nuse redox::to_num::*;\nuse redox::Vec;\n\n\/\/\/ TCP resource\npub struct Resource {\n ip: Box<Resource>,\n peer_addr: IPv4Addr,\n peer_port: u16,\n host_port: u16,\n sequence: u32,\n acknowledge: u32,\n}\n\nimpl Resource {\n pub fn dup(&self) -> Option<Box<Self>> {\n match self.ip.dup() {\n Some(ip) => Some(box Resource {\n ip: ip,\n peer_addr: self.peer_addr,\n peer_port: self.peer_port,\n host_port: self.host_port,\n sequence: self.sequence,\n acknowledge: self.acknowledge,\n }),\n None => None\n }\n }\n\n pub fn path(&self, buf: &mut [u8]) -> Option<usize> {\n let path = format!(\"tcp:\/\/{}:{}\/{}\", self.peer_addr.to_string(), self.peer_port, self.host_port);\n\n let mut i = 0;\n for b in path.bytes() {\n if i < buf.len() {\n buf[i] = b;\n i += 1;\n } else {\n break;\n }\n }\n\n Some(i)\n }\n\n pub fn read(&mut self, buf: &mut [u8]) -> Option<usize> {\n \/*\n loop {\n let mut bytes: Vec<u8> = Vec::new();\n match self.ip.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(segment) = TCP::from_bytes(bytes) {\n if (segment.header.flags.get() & (TCP_PSH | TCP_SYN | TCP_ACK)) ==\n (TCP_PSH | TCP_ACK) &&\n segment.header.dst.get() == self.host_port &&\n segment.header.src.get() == self.peer_port {\n \/\/Send ACK\n self.sequence = segment.header.ack_num.get();\n self.acknowledge = segment.header.sequence.get() +\n segment.data.len() as u32;\n let mut tcp = TCP {\n header: TCPHeader {\n src: n16::new(self.host_port),\n dst: n16::new(self.peer_port),\n sequence: n32::new(self.sequence),\n ack_num: n32::new(self.acknowledge),\n flags: n16::new(((mem::size_of::<TCPHeader>() << 10) & 0xF000) as u16 | TCP_ACK),\n window_size: n16::new(65535),\n checksum: Checksum {\n data: 0\n },\n urgent_pointer: n16::new(0)\n },\n options: Vec::new(),\n data: Vec::new()\n };\n\n unsafe {\n let proto = n16::new(0x06);\n let segment_len = n16::new((mem::size_of::<TCPHeader>() + tcp.options.len() + tcp.data.len()) as u16);\n tcp.header.checksum.data = Checksum::compile(\n Checksum::sum((&IP_ADDR as *const IPv4Addr) as usize, mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&self.peer_addr as *const IPv4Addr) as usize, mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize, mem::size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize, mem::size_of::<n16>()) +\n Checksum::sum((&tcp.header as *const TCPHeader) as usize, mem::size_of::<TCPHeader>()) +\n Checksum::sum(tcp.options.as_ptr() as usize, tcp.options.len()) +\n Checksum::sum(tcp.data.as_ptr() as usize, tcp.data.len())\n );\n }\n\n self.ip.write(&tcp.to_bytes().as_slice());\n\n vec.push_all(&segment.data);\n return Some(segment.data.len());\n }\n }\n }\n None => return None,\n }\n }\n *\/\n None\n }\n\n pub fn write(&mut self, buf: &[u8]) -> Option<usize> {\n let tcp_data = unsafe { Vec::from_raw_buf(buf.as_ptr(), buf.len()) };\n\n let mut tcp = TCP {\n header: TCPHeader {\n src: n16::new(self.host_port),\n dst: n16::new(self.peer_port),\n sequence: n32::new(self.sequence),\n ack_num: n32::new(self.acknowledge),\n flags: n16::new((((mem::size_of::<TCPHeader>()) << 10) & 0xF000) as u16 | TCP_PSH |\n TCP_ACK),\n window_size: n16::new(65535),\n checksum: Checksum { data: 0 },\n urgent_pointer: n16::new(0),\n },\n options: Vec::new(),\n data: tcp_data,\n };\n\n unsafe {\n let proto = n16::new(0x06);\n let segment_len = n16::new((mem::size_of::<TCPHeader>() + tcp.data.len()) as u16);\n tcp.header.checksum.data =\n Checksum::compile(Checksum::sum((&IP_ADDR as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&self.peer_addr as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&tcp.header as *const TCPHeader) as usize,\n mem::size_of::<TCPHeader>()) +\n Checksum::sum(tcp.options.as_ptr() as usize, tcp.options.len()) +\n Checksum::sum(tcp.data.as_ptr() as usize, tcp.data.len()));\n }\n\n match self.ip.write(&tcp.to_bytes().as_slice()) {\n Some(size) => loop { \/\/ Wait for ACK\n let mut bytes: Vec<u8> = Vec::new();\n match self.ip.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(segment) = TCP::from_bytes(bytes) {\n if segment.header.dst.get() == self.host_port &&\n segment.header.src.get() == self.peer_port {\n if (segment.header.flags.get() & (TCP_PSH | TCP_SYN | TCP_ACK)) ==\n TCP_ACK {\n self.sequence = segment.header.ack_num.get();\n self.acknowledge = segment.header.sequence.get();\n return Some(size);\n } else {\n return None;\n }\n }\n }\n }\n None => return None,\n }\n },\n None => return None,\n }\n }\n\n pub fn seek(&mut self, pos: SeekFrom) -> Option<usize> {\n None\n }\n\n pub fn sync(&mut self) -> bool {\n self.ip.sync()\n }\n}\n\n\/*\nimpl Resource {\n \/\/\/ Etablish client\n pub fn client_establish(&mut self) -> bool {\n \/\/ Send SYN\n let mut tcp = TCP {\n header: TCPHeader {\n src: n16::new(self.host_port),\n dst: n16::new(self.peer_port),\n sequence: n32::new(self.sequence),\n ack_num: n32::new(self.acknowledge),\n flags: n16::new(((mem::size_of::<TCPHeader>() << 10) & 0xF000) as u16 | TCP_SYN),\n window_size: n16::new(65535),\n checksum: Checksum { data: 0 },\n urgent_pointer: n16::new(0),\n },\n options: Vec::new(),\n data: Vec::new(),\n };\n\n unsafe {\n let proto = n16::new(0x06);\n let segment_len =\n n16::new((mem::size_of::<TCPHeader>() + tcp.options.len() + tcp.data.len()) as u16);\n tcp.header.checksum.data =\n Checksum::compile(Checksum::sum((&IP_ADDR as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&self.peer_addr as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&tcp.header as *const TCPHeader) as usize,\n mem::size_of::<TCPHeader>()) +\n Checksum::sum(tcp.options.as_ptr() as usize, tcp.options.len()) +\n Checksum::sum(tcp.data.as_ptr() as usize, tcp.data.len()));\n }\n\n match self.ip.write(&tcp.to_bytes().as_slice()) {\n Some(_) => loop { \/\/ Wait for SYN-ACK\n let mut bytes: Vec<u8> = Vec::new();\n match self.ip.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(segment) = TCP::from_bytes(bytes) {\n if segment.header.dst.get() == self.host_port &&\n segment.header.src.get() == self.peer_port {\n if (segment.header.flags.get() & (TCP_PSH | TCP_SYN | TCP_ACK)) ==\n (TCP_SYN | TCP_ACK) {\n self.sequence = segment.header.ack_num.get();\n self.acknowledge = segment.header.sequence.get();\n\n self.acknowledge += 1;\n tcp = TCP {\n header: TCPHeader {\n src: n16::new(self.host_port),\n dst: n16::new(self.peer_port),\n sequence: n32::new(self.sequence),\n ack_num: n32::new(self.acknowledge),\n flags: n16::new(((mem::size_of::<TCPHeader>() << 10) & 0xF000) as u16 | TCP_ACK),\n window_size: n16::new(65535),\n checksum: Checksum {\n data: 0\n },\n urgent_pointer: n16::new(0)\n },\n options: Vec::new(),\n data: Vec::new()\n };\n\n unsafe {\n let proto = n16::new(0x06);\n let segment_len = n16::new((mem::size_of::<TCPHeader>() + tcp.options.len() + tcp.data.len()) as u16);\n tcp.header.checksum.data = Checksum::compile(\n Checksum::sum((&IP_ADDR as *const IPv4Addr) as usize, mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&self.peer_addr as *const IPv4Addr) as usize, mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize, mem::size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize, mem::size_of::<n16>()) +\n Checksum::sum((&tcp.header as *const TCPHeader) as usize, mem::size_of::<TCPHeader>()) +\n Checksum::sum(tcp.options.as_ptr() as usize, tcp.options.len()) +\n Checksum::sum(tcp.data.as_ptr() as usize, tcp.data.len())\n );\n }\n\n self.ip.write(&tcp.to_bytes().as_slice());\n\n return true;\n } else {\n return false;\n }\n }\n }\n }\n None => return false,\n }\n },\n None => return false,\n }\n }\n\n \/\/\/ Try to establish a server connection\n pub fn server_establish(&mut self, syn: TCP) -> bool {\n \/\/Send SYN-ACK\n self.acknowledge += 1;\n let mut tcp = TCP {\n header: TCPHeader {\n src: n16::new(self.host_port),\n dst: n16::new(self.peer_port),\n sequence: n32::new(self.sequence),\n ack_num: n32::new(self.acknowledge),\n flags: n16::new(((mem::size_of::<TCPHeader>() << 10) & 0xF000) as u16 | TCP_SYN |\n TCP_ACK),\n window_size: n16::new(65535),\n checksum: Checksum { data: 0 },\n urgent_pointer: n16::new(0),\n },\n options: Vec::new(),\n data: Vec::new(),\n };\n\n unsafe {\n let proto = n16::new(0x06);\n let segment_len =\n n16::new((mem::size_of::<TCPHeader>() + tcp.options.len() + tcp.data.len()) as u16);\n tcp.header.checksum.data =\n Checksum::compile(Checksum::sum((&IP_ADDR as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&self.peer_addr as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&tcp.header as *const TCPHeader) as usize,\n mem::size_of::<TCPHeader>()) +\n Checksum::sum(tcp.options.as_ptr() as usize, tcp.options.len()) +\n Checksum::sum(tcp.data.as_ptr() as usize, tcp.data.len()));\n }\n\n match self.ip.write(&tcp.to_bytes().as_slice()) {\n Some(_) => loop { \/\/ Wait for ACK\n let mut bytes: Vec<u8> = Vec::new();\n match self.ip.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(segment) = TCP::from_bytes(bytes) {\n if segment.header.dst.get() == self.host_port &&\n segment.header.src.get() == self.peer_port {\n if (segment.header.flags.get() & (TCP_PSH | TCP_SYN | TCP_ACK)) ==\n TCP_ACK {\n self.sequence = segment.header.ack_num.get();\n self.acknowledge = segment.header.sequence.get();\n return true;\n } else {\n return false;\n }\n }\n }\n }\n None => return false,\n }\n },\n None => return false,\n }\n }\n}\n\nimpl Drop for Resource {\n fn drop(&mut self) {\n \/\/Send FIN-ACK\n let mut tcp = TCP {\n header: TCPHeader {\n src: n16::new(self.host_port),\n dst: n16::new(self.peer_port),\n sequence: n32::new(self.sequence),\n ack_num: n32::new(self.acknowledge),\n flags: n16::new((((mem::size_of::<TCPHeader>()) << 10) & 0xF000) as u16 | TCP_FIN |\n TCP_ACK),\n window_size: n16::new(65535),\n checksum: Checksum { data: 0 },\n urgent_pointer: n16::new(0),\n },\n options: Vec::new(),\n data: Vec::new(),\n };\n\n unsafe {\n let proto = n16::new(0x06);\n let segment_len =\n n16::new((mem::size_of::<TCPHeader>() + tcp.options.len() + tcp.data.len()) as u16);\n tcp.header.checksum.data =\n Checksum::compile(Checksum::sum((&IP_ADDR as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&self.peer_addr as *const IPv4Addr) as usize,\n mem::size_of::<IPv4Addr>()) +\n Checksum::sum((&proto as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&segment_len as *const n16) as usize,\n mem::size_of::<n16>()) +\n Checksum::sum((&tcp.header as *const TCPHeader) as usize,\n mem::size_of::<TCPHeader>()) +\n Checksum::sum(tcp.options.as_ptr() as usize, tcp.options.len()) +\n Checksum::sum(tcp.data.as_ptr() as usize, tcp.data.len()));\n }\n\n self.ip.write(&tcp.to_bytes().as_slice());\n }\n}\n*\/\n\n\/\/\/ A TCP scheme\npub struct Scheme;\n\nimpl Scheme {\n pub fn new() -> Box<Self> {\n box Scheme\n }\n\n pub fn open(&mut self, url: &str) -> Option<Box<Resource>> {\n if url.host().len() > 0 && url.port().len() > 0 {\n let peer_addr = IPv4Addr::from_string(&url.host());\n let peer_port = url.port().to_num() as u16;\n let host_port = (random::rand() % 32768 + 32768) as u16;\n\n if let Some(ip) = File::open(&(\"ip:\/\/\".to_string() + peer_addr.to_string() + \"\/6\")) {\n let mut ret = box TCPResource {\n ip: ip,\n peer_addr: peer_addr,\n peer_port: peer_port,\n host_port: host_port,\n sequence: random::rand() as u32,\n acknowledge: 0,\n };\n\n if ret.client_establish() {\n return Some(ret);\n }\n }\n } else if url.path().len() > 0 {\n let host_port = url.path().to_num() as u16;\n\n while let Some(mut ip) = File::open(\"ip:\/\/\/6\") {\n let mut bytes: Vec<u8> = Vec::new();\n match ip.read_to_end(&mut bytes) {\n Some(_) => {\n if let Some(segment) = TCP::from_bytes(bytes) {\n if segment.header.dst.get() == host_port &&\n (segment.header.flags.get() & (TCP_PSH | TCP_SYN | TCP_ACK)) ==\n TCP_SYN {\n let peer_addr = IPv4Addr::from_string(&ip.url().host());\n\n let mut ret = box TCPResource {\n ip: ip,\n peer_addr: peer_addr,\n peer_port: segment.header.src.get(),\n host_port: host_port,\n sequence: random::rand() as u32,\n acknowledge: segment.header.sequence.get(),\n };\n\n if ret.server_establish(segment) {\n return Some(ret);\n }\n }\n }\n }\n None => break,\n }\n }\n } else {\n \/*\n debug::d(\"TCP: No remote endpoint or local port provided\\n\");\n *\/\n }\n\n None\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add long body tests<commit_after>extern crate http_parser;\n\nuse http_parser::{HttpParserType, HttpParser, HttpParserCallback};\n\nmod helper;\n\n#[test]\nfn test_no_overflow_long_body_request() {\n test_no_overflow_long_body(HttpParserType::HttpRequest, 1000);\n test_no_overflow_long_body(HttpParserType::HttpRequest, 100000);\n}\n\n#[test]\nfn test_no_overflow_long_body_response() {\n test_no_overflow_long_body(HttpParserType::HttpResponse, 1000);\n test_no_overflow_long_body(HttpParserType::HttpResponse, 100000);\n}\n\nfn test_no_overflow_long_body(tp: HttpParserType, length: u64) {\n let mut hp = HttpParser::new(tp);\n let cb = helper::CallbackEmpty;\n \n let line = if tp == HttpParserType::HttpRequest {\n \"POST \/ HTTP\/1.0\"\n } else {\n \"HTTP\/1.0 200 OK\"\n };\n\n let headers = format!(\"{}\\r\\nConnection: Keep-Alive\\r\\nContent-Length: {}\\r\\n\\r\\n\",\n line, length);\n\n let mut parsed = hp.execute(cb, headers.as_bytes());\n assert_eq!(parsed, headers.len() as u64); \n\n for i in range(0, length) {\n parsed = hp.execute(cb, [b'a'].as_slice());\n assert_eq!(parsed, 1 as u64);\n }\n\n parsed = hp.execute(cb, headers.as_bytes());\n assert_eq!(parsed, headers.len() as u64);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add basic API tests<commit_after>extern crate cryptocurrency;\nextern crate exonum;\nextern crate exonum_harness;\nextern crate iron;\n\nuse exonum::crypto::{self, PublicKey, SecretKey};\nuse exonum::messages::Message;\nuse exonum_harness::{TestHarness, HarnessApi};\n\nuse cryptocurrency::{TxCreateWallet, TxTransfer, TransactionResponse, Wallet, blockchain};\n\nfn create_wallet(api: &HarnessApi, name: &str) -> (TxCreateWallet, SecretKey) {\n let (pubkey, key) = crypto::gen_keypair();\n \/\/ Create a presigned transaction\n let tx = TxCreateWallet::new(&pubkey, name, &key);\n\n let tx_info: TransactionResponse = api.post(\"cryptocurrency\", \"v1\/wallets\/transaction\", &tx);\n assert_eq!(tx_info.tx_hash, tx.hash());\n\n (tx, key)\n}\n\nfn get_wallet(api: &HarnessApi, pubkey: &PublicKey) -> Wallet {\n api.get(\n \"cryptocurrency\",\n &format!(\"v1\/wallet\/{}\", pubkey.to_string()),\n )\n}\n\n#[test]\nfn test_create_wallet() {\n let mut harness = TestHarness::new(blockchain());\n let api = harness.api();\n let (tx, _) = create_wallet(&api, \"Alice\");\n\n harness.create_block();\n\n \/\/ Check that the user indeed is persisted by the service\n let wallet = get_wallet(&api, tx.pub_key());\n assert_eq!(wallet.pub_key(), tx.pub_key());\n assert_eq!(wallet.name(), tx.name());\n assert_eq!(wallet.balance(), 100);\n}\n\n#[test]\nfn test_transfer() {\n let mut harness = TestHarness::new(blockchain());\n let api = harness.api();\n let (tx_alice, key_alice) = create_wallet(&api, \"Alice\");\n let (tx_bob, _) = create_wallet(&api, \"Bob\");\n\n harness.create_block();\n\n \/\/ Check that the initial Alice's and Bob's balances persisted by the service\n let wallet = get_wallet(&api, tx_alice.pub_key());\n assert_eq!(wallet.balance(), 100);\n let wallet = get_wallet(&api, tx_bob.pub_key());\n assert_eq!(wallet.balance(), 100);\n\n \/\/ Transfer funds\n let tx = TxTransfer::new(\n tx_alice.pub_key(),\n tx_bob.pub_key(),\n 10, \/\/ amount\n 0, \/\/ seed\n &key_alice,\n );\n let tx_info: TransactionResponse = api.post(\"cryptocurrency\", \"v1\/wallets\/transaction\", &tx);\n assert_eq!(tx_info.tx_hash, tx.hash());\n\n harness.create_block();\n\n let wallet = get_wallet(&api, tx_alice.pub_key());\n assert_eq!(wallet.balance(), 90);\n let wallet = get_wallet(&api, tx_bob.pub_key());\n assert_eq!(wallet.balance(), 110);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>fix: Adapt to new test requirements<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>practice<commit_after>fn main() {\n println!(\"practice\")\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add basic gpio driver<commit_after>#![allow(dead_code)]\npub const PORTF_BASE: *const u32 = 0x40025000 as (*const u32);\n\npub const PIN0: u32 = (1 << 0);\npub const PIN1: u32 = (1 << 1);\npub const PIN2: u32 = (1 << 2);\npub const PIN3: u32 = (1 << 3);\npub const PIN4: u32 = (1 << 4);\npub const PIN5: u32 = (1 << 5);\npub const PIN6: u32 = (1 << 6);\npub const PIN7: u32 = (1 << 7);\n\nextern {\n fn GPIOPinTypeGPIOOutput(base: *const u32, mask: u32);\n fn GPIOPinWrite(base: *const u32, mask: u32, value: u32);\n}\n\npub fn make_output(base: *const u32, mask: u32) {\n unsafe {\n GPIOPinTypeGPIOOutput(base, mask);\n }\n}\n\npub fn write(base: *const u32, mask: u32, value: u32) {\n unsafe {\n GPIOPinWrite(base, mask, value);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add the file I've been forgetting to add last two commits<commit_after>use std::cell::{RefCell, UnsafeCell};\r\nuse std::collections::HashMap;\r\nuse std::collections::hash_map::Entry;\r\nuse std::fmt::{Display, Error, Formatter};\r\nuse std::rc::Rc;\r\nuse super::parser::{Builder, ParserError, ShowType};\r\n\r\ntype RcCell<T> = Rc<RefCell<T>>;\r\n\r\nstruct DocBuilder;\r\nimpl Builder for DocBuilder {\r\n type Table = Table;\r\n type Array = Array;\r\n type Key = Key;\r\n type Value = Value;\r\n\r\n fn table(lead_ws: &str)-> Table { Table::new(lead_ws.to_string()) }\r\n fn array(vals: Vec<Value>, lead_ws: &str) -> Array { unimplemented!() }\r\n fn key(val: String, lead_ws: &str) -> Key { Key::new(val) }\r\n fn value_string(parsed: String, raw: &str, lead_ws: &str) -> Value { unimplemented!() }\r\n fn value_integer(parsed: i64, raw: &str, lead_ws: &str) -> Value { unimplemented!() }\r\n fn value_float(parsed: f64, raw: &str, lead_ws: &str) -> Value { unimplemented!() }\r\n fn value_boolean(parsed: bool, raw: &str, lead_ws: &str) -> Value { unimplemented!() }\r\n fn value_datetime(parsed: String, raw: &str, lead_ws: &str) -> Value { unimplemented!() }\r\n fn value_array(parsed: Array, raw: &str, lead_ws: &str) -> Value { unimplemented!() }\r\n fn value_table(parsed: Table, raw: &str, lead_ws: &str) -> Value { Value::Table(parsed) }\r\n fn insert(table: &mut Table, key: Key, v: Value) -> bool {\r\n table.insert(key, Box::new(UnsafeCell::new(v)))\r\n }\r\n fn insert_container<'b>(mut cur: &'b mut Table, keys: &[String],\r\n key_lo: usize, key_hi: usize)\r\n -> Result<&'b mut Table, ParserError> {\r\n for part in keys {\r\n let tmp = cur;\r\n\r\n if tmp.map.contains_key(part) {\r\n match unsafe { &mut *tmp.map.get_mut(part).unwrap().get() } {\r\n &mut Value::Table(ref mut table) => {\r\n cur = table;\r\n continue\r\n }\r\n &mut Value::Array(ref mut array) => {\r\n match array.vec.last_mut() {\r\n Some(&mut Value::Table(ref mut table)) => cur = table,\r\n _ => {\r\n return Err(ParserError {\r\n lo: key_lo,\r\n hi: key_hi,\r\n desc: format!(\"array `{}` does not contain \\\r\n tables\", part)\r\n });\r\n }\r\n }\r\n continue\r\n }\r\n _ => {\r\n return Err(ParserError {\r\n lo: key_lo,\r\n hi: key_hi,\r\n desc: format!(\"key `{}` was not previously a table\",\r\n part)\r\n });\r\n }\r\n }\r\n }\r\n\r\n \/\/ Initialize an empty table as part of this sub-key\r\n tmp.insert(DocBuilder::key(part.clone(), \"\"), Box::new(UnsafeCell::new(Value::Table(Table::null()))));\r\n match unsafe { &mut *tmp.map.get_mut(part).unwrap().get() } {\r\n &mut Value::Table(ref mut inner) => cur = inner,\r\n _ => unreachable!(),\r\n }\r\n }\r\n Ok(cur)\r\n\r\n }\r\n fn get_table<'b>(t: &'b mut Table, key: &'b str) -> Option<&'b mut Table> { \r\n t.map.get_mut::<'b>(key).and_then(|x| match unsafe { &mut *x.get() } { &mut Value::Table(ref mut s) => Some(s), _ => None })\r\n }\r\n fn get_array<'b>(t: &'b mut Table, key: &'b str) -> Option<&'b mut Array> { unimplemented!() }\r\n fn contains_table(t: &Table) -> bool { \r\n t.map.values().any(|v| if let &mut Value::Table(_) = unsafe { &mut *v.get() } { true } else { false })\r\n }\r\n fn merge(table: &mut Table, value: Table) -> Result<(), String> {\r\n for (k, v) in value.map.into_iter() {\r\n if !table.insert(DocBuilder::key(k.clone(), \"\"), v) {\r\n return Err(k);\r\n }\r\n }\r\n Ok(())\r\n }\r\n fn push<'b>(vec: &'b mut Array, value: Value) -> Result<(), &'b str> { unimplemented!() }\r\n fn set_trailing_aux(table: &mut Table, aux: &str) {\r\n table.trail = aux.to_string();\r\n }\r\n}\r\n\r\npub struct Table {\r\n map: HashMap<String, Box<UnsafeCell<Value>>>,\r\n vec: Vec<(Key, *mut UnsafeCell<Value>)>,\r\n lead: Option<String>, \/\/ anonymous tables contain no text\r\n trail: String\r\n}\r\n\r\nimpl Table {\r\n fn new(s: String) -> Table {\r\n Table {\r\n map: HashMap::new(),\r\n vec: Vec::new(),\r\n lead: Some(s),\r\n trail: String::new()\r\n }\r\n }\r\n\r\n fn null() -> Table {\r\n Table {\r\n map: HashMap::new(),\r\n vec: Vec::new(),\r\n lead: None,\r\n trail: String::new()\r\n }\r\n }\r\n\r\n fn insert(&mut self, k: Key, mut value: Box<UnsafeCell<Value>>) -> bool {\r\n let key_string = k.text.clone();\r\n match self.map.entry(key_string) {\r\n Entry::Vacant(entry) => {\r\n self.vec.push((k, &mut *value));\r\n entry.insert(value);\r\n true\r\n },\r\n Entry::Occupied(_) => false\r\n }\r\n }\r\n\r\n fn print(&self, buf: &mut String) {\r\n buf.push_str(self.lead.as_ref().unwrap());\r\n for &(ref k, ref v) in self.vec.iter() {\r\n buf.push_str(\"[\");\r\n k.print(buf);\r\n buf.push_str(\"]\");\r\n unsafe { &*(**v).get() }.print(buf);\r\n }\r\n buf.push_str(&*self.trail);\r\n }\r\n}\r\n\r\npub struct Array {\r\n vec: Vec<Value>\r\n}\r\n\r\n\r\n#[derive(PartialEq, Eq, Hash)]\r\nenum KeyKind {\r\n Plain,\r\n Table\r\n}\r\n#[derive(PartialEq, Eq, Hash)]\r\npub struct Key {\r\n kind: KeyKind,\r\n text: String,\r\n lead: String,\r\n trail: String\r\n}\r\nimpl Key {\r\n fn new(s: String) -> Key {\r\n Key {\r\n kind: KeyKind::Plain,\r\n text: s,\r\n lead: String::new(),\r\n trail: String::new()\r\n }\r\n }\r\n fn new_table(s: String) -> Key {\r\n Key {\r\n kind: KeyKind::Table,\r\n text: s,\r\n lead: String::new(),\r\n trail: String::new()\r\n }\r\n }\r\n\r\n fn print(&self, buf: &mut String) {\r\n buf.push_str(&*self.lead);\r\n buf.push_str(&*self.text);\r\n buf.push_str(&*self.trail);\r\n }\r\n}\r\n\r\nimpl Display for Key {\r\n fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {\r\n unimplemented!()\r\n }\r\n}\r\n\r\npub enum Value {\r\n String(String),\r\n Integer(i64),\r\n Float(f64),\r\n Boolean(bool),\r\n Datetime(String),\r\n Array(Array),\r\n Table(Table),\r\n}\r\nimpl ShowType for Value {\r\n fn type_str(&self) -> &'static str {\r\n unimplemented!()\r\n }\r\n}\r\nimpl Value {\r\n fn print(&self, buf: &mut String) {\r\n match self {\r\n &Value::Table(ref t) => t.print(buf),\r\n _ => unimplemented!()\r\n }\r\n }\r\n}\r\n\r\n#[cfg(test)]\r\nmod test {\r\n use super::DocBuilder;\r\n use super::super::parser::ParseSession;\r\n\r\n macro_rules! round_trip {\r\n ($text: expr) => ({\r\n let mut p = ParseSession::new($text, DocBuilder);\r\n let table = p.parse().unwrap();\r\n let mut buf = String::new();\r\n table.print(&mut buf);\r\n println!(\"\\\"{}\\\"\", buf);\r\n assert!($text == buf);\r\n })\r\n }\r\n\r\n #[test]\r\n fn empty() { round_trip!(\" #asd \\n \") }\r\n #[test]\r\n fn single_table() { round_trip!(\" #asd\\t \\n [a] \\t \\n\\n #asdasdad\\n \") }\r\n\r\n}<|endoftext|>"} {"text":"<commit_before><commit_msg>GDT code<commit_after>use vmx;\nuse cli;\n\n#[repr(packed)]\nstruct GDTEntry {\n limit_low: u16,\n base_low: u16,\n base_middle: u8,\n flags: u8,\n more_flags: u8,\n base_high: u8,\n}\n\nconst GDT: [GDTEntry; 3] = [\n GDTEntry{\n limit_low: 0,\n base_low: 0,\n base_middle: 0,\n flags: 0,\n more_flags: 0,\n base_high: 0,\n },\n GDTEntry{\n limit_low: 0xffff,\n base_low: 0,\n base_middle: 0,\n flags: 0b10101001,\n more_flags: 0b11110111,\n base_high: 0,\n },\n GDTEntry{\n limit_low: 0xffff,\n base_low: 0,\n base_middle: 0,\n flags: 0b00101001,\n more_flags: 0b11110111,\n base_high: 0,\n },\n];\n\npub fn new_host_descriptor() -> vmx::CPUTableDescriptor {\n vmx::CPUTableDescriptor{ limit: 0xffff, base: GDT.as_ptr() as u64 }\n}\n\npub fn test_load() {\n cli::ClearLocalInterruptsGuard::new();\n let gdt_desc = new_host_descriptor();\n let mut orig_gdt_desc: vmx::CPUTableDescriptor = Default::default();\n vmx::sgdt(&mut orig_gdt_desc);\n vmx::lgdt(&gdt_desc);\n vmx::lgdt(&orig_gdt_desc);\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove unnecessary feature attribute<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>chore: fixes features declarations<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Minor fixes<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>git work, not working yet<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Updated docs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Dummy lib for root<commit_after>\/\/ Dummy lib file so we can refer to the root Calx crate\n<|endoftext|>"} {"text":"<commit_before><commit_msg>First insertion implementation, so far without root splitting<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Changed the recv() method to have it return appropriate FunnelErrors and also catch an attempted out-of-bounds read on the sources<commit_after><|endoftext|>"} {"text":"<commit_before>\/\/! A fast, low-level IO library for Rust focusing on non-blocking APIs, event\n\/\/! notification, and other useful utilities for building high performance IO\n\/\/! apps.\n\/\/!\n\/\/! # Goals\n\/\/!\n\/\/! * Fast - minimal overhead over the equivalent OS facilities (epoll, kqueue, etc...)\n\/\/! * Zero allocations\n\/\/! * A scalable readiness-based API, similar to epoll on Linux\n\/\/! * Design to allow for stack allocated buffers when possible (avoid double buffering).\n\/\/! * Provide utilities such as a timers, a notification channel, buffer abstractions, and a slab.\n\/\/!\n\/\/! # Usage\n\/\/!\n\/\/! Using mio starts by creating an [EventLoop](struct.EventLoop.html), which\n\/\/! handles receiving events from the OS and dispatching them to a supplied\n\/\/! [Handler](handler\/trait.Handler.html).\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use mio::*;\n\/\/! use mio::tcp::TcpListener;\n\/\/! use std::net::SocketAddr;\n\/\/! use std::str::FromStr;\n\/\/!\n\/\/! \/\/ Setup some tokens to allow us to identify which event is\n\/\/! \/\/ for which socket.\n\/\/! const SERVER: Token = Token(0);\n\/\/! const CLIENT: Token = Token(1);\n\/\/!\n\/\/! let addr = FromStr::from_str(\"127.0.0.1:13265\").unwrap();\n\/\/!\n\/\/! \/\/ Setup the server socket\n\/\/! let server = tcp::listen(&addr).unwrap();\n\/\/!\n\/\/! \/\/ Create an event loop\n\/\/! let mut event_loop = EventLoop::new().unwrap();\n\/\/!\n\/\/! \/\/ Start listening for incoming connections\n\/\/! event_loop.register(&server, SERVER).unwrap();\n\/\/!\n\/\/! \/\/ Setup the client socket\n\/\/! let (sock, _) = tcp::connect(&addr).unwrap();\n\/\/!\n\/\/! \/\/ Register the socket\n\/\/! event_loop.register(&sock, CLIENT).unwrap();\n\/\/!\n\/\/! \/\/ Define a handler to process the events\n\/\/! struct MyHandler(NonBlock<TcpListener>);\n\/\/!\n\/\/! impl Handler for MyHandler {\n\/\/! type Timeout = ();\n\/\/! type Message = ();\n\/\/!\n\/\/! fn readable(&mut self, event_loop: &mut EventLoop<MyHandler>, token: Token, _: ReadHint) {\n\/\/! match token {\n\/\/! SERVER => {\n\/\/! let MyHandler(ref mut server) = *self;\n\/\/! \/\/ Accept and drop the socket immediately, this will close\n\/\/! \/\/ the socket and notify the client of the EOF.\n\/\/! let _ = server.accept();\n\/\/! }\n\/\/! CLIENT => {\n\/\/! \/\/ The server just shuts down the socket, let's just\n\/\/! \/\/ shutdown the event loop\n\/\/! event_loop.shutdown();\n\/\/! }\n\/\/! _ => panic!(\"unexpected token\"),\n\/\/! }\n\/\/! }\n\/\/! }\n\/\/!\n\/\/! \/\/ Start handling events\n\/\/! event_loop.run(&mut MyHandler(server)).unwrap();\n\/\/!\n\/\/! ```\n\n#![crate_name = \"mio\"]\n\n\/\/ mio is still in rapid development\n#![unstable]\n\n#![feature(alloc, core, io, libc, net, path, std_misc, unsafe_destructor)]\n\nextern crate alloc;\nextern crate bytes;\nextern crate nix;\nextern crate time;\n\n#[macro_use]\nextern crate log;\n\npub mod util;\n\nmod event_loop;\nmod handler;\nmod io;\nmod net;\nmod nonblock;\nmod notify;\nmod os;\nmod poll;\nmod timer;\n\npub use buf::{\n Buf,\n MutBuf,\n};\npub use event_loop::{\n EventLoop,\n EventLoopConfig,\n EventLoopSender,\n};\npub use handler::{\n Handler,\n};\npub use io::{\n pipe,\n FromFd,\n Io,\n TryRead,\n TryWrite,\n Evented,\n PipeReader,\n PipeWriter,\n};\npub use net::{\n tcp,\n udp,\n unix,\n Socket,\n};\npub use nonblock::{\n IntoNonBlock,\n NonBlock,\n};\npub use os::token::{\n Token,\n};\npub use os::event::{\n PollOpt,\n Interest,\n ReadHint,\n};\npub use poll::{\n Poll\n};\npub use timer::{\n Timeout,\n TimerError,\n TimerResult\n};\n\npub mod prelude {\n pub use super::{\n EventLoop,\n TryRead,\n TryWrite,\n };\n}\n\n\/\/ Re-export bytes\npub mod buf {\n pub use bytes::{\n Buf,\n MutBuf,\n ByteBuf,\n MutByteBuf,\n RingBuf,\n SliceBuf,\n MutSliceBuf,\n };\n}\n<commit_msg>Track Rust master (fix warning)<commit_after>\/\/! A fast, low-level IO library for Rust focusing on non-blocking APIs, event\n\/\/! notification, and other useful utilities for building high performance IO\n\/\/! apps.\n\/\/!\n\/\/! # Goals\n\/\/!\n\/\/! * Fast - minimal overhead over the equivalent OS facilities (epoll, kqueue, etc...)\n\/\/! * Zero allocations\n\/\/! * A scalable readiness-based API, similar to epoll on Linux\n\/\/! * Design to allow for stack allocated buffers when possible (avoid double buffering).\n\/\/! * Provide utilities such as a timers, a notification channel, buffer abstractions, and a slab.\n\/\/!\n\/\/! # Usage\n\/\/!\n\/\/! Using mio starts by creating an [EventLoop](struct.EventLoop.html), which\n\/\/! handles receiving events from the OS and dispatching them to a supplied\n\/\/! [Handler](handler\/trait.Handler.html).\n\/\/!\n\/\/! # Example\n\/\/!\n\/\/! ```\n\/\/! use mio::*;\n\/\/! use mio::tcp::TcpListener;\n\/\/! use std::net::SocketAddr;\n\/\/! use std::str::FromStr;\n\/\/!\n\/\/! \/\/ Setup some tokens to allow us to identify which event is\n\/\/! \/\/ for which socket.\n\/\/! const SERVER: Token = Token(0);\n\/\/! const CLIENT: Token = Token(1);\n\/\/!\n\/\/! let addr = FromStr::from_str(\"127.0.0.1:13265\").unwrap();\n\/\/!\n\/\/! \/\/ Setup the server socket\n\/\/! let server = tcp::listen(&addr).unwrap();\n\/\/!\n\/\/! \/\/ Create an event loop\n\/\/! let mut event_loop = EventLoop::new().unwrap();\n\/\/!\n\/\/! \/\/ Start listening for incoming connections\n\/\/! event_loop.register(&server, SERVER).unwrap();\n\/\/!\n\/\/! \/\/ Setup the client socket\n\/\/! let (sock, _) = tcp::connect(&addr).unwrap();\n\/\/!\n\/\/! \/\/ Register the socket\n\/\/! event_loop.register(&sock, CLIENT).unwrap();\n\/\/!\n\/\/! \/\/ Define a handler to process the events\n\/\/! struct MyHandler(NonBlock<TcpListener>);\n\/\/!\n\/\/! impl Handler for MyHandler {\n\/\/! type Timeout = ();\n\/\/! type Message = ();\n\/\/!\n\/\/! fn readable(&mut self, event_loop: &mut EventLoop<MyHandler>, token: Token, _: ReadHint) {\n\/\/! match token {\n\/\/! SERVER => {\n\/\/! let MyHandler(ref mut server) = *self;\n\/\/! \/\/ Accept and drop the socket immediately, this will close\n\/\/! \/\/ the socket and notify the client of the EOF.\n\/\/! let _ = server.accept();\n\/\/! }\n\/\/! CLIENT => {\n\/\/! \/\/ The server just shuts down the socket, let's just\n\/\/! \/\/ shutdown the event loop\n\/\/! event_loop.shutdown();\n\/\/! }\n\/\/! _ => panic!(\"unexpected token\"),\n\/\/! }\n\/\/! }\n\/\/! }\n\/\/!\n\/\/! \/\/ Start handling events\n\/\/! event_loop.run(&mut MyHandler(server)).unwrap();\n\/\/!\n\/\/! ```\n\n#![crate_name = \"mio\"]\n\n\/\/ mio is still in rapid development\n#![unstable]\n\n#![feature(alloc, core, io, libc, net, std_misc, unsafe_destructor)]\n\nextern crate alloc;\nextern crate bytes;\nextern crate nix;\nextern crate time;\n\n#[macro_use]\nextern crate log;\n\npub mod util;\n\nmod event_loop;\nmod handler;\nmod io;\nmod net;\nmod nonblock;\nmod notify;\nmod os;\nmod poll;\nmod timer;\n\npub use buf::{\n Buf,\n MutBuf,\n};\npub use event_loop::{\n EventLoop,\n EventLoopConfig,\n EventLoopSender,\n};\npub use handler::{\n Handler,\n};\npub use io::{\n pipe,\n FromFd,\n Io,\n TryRead,\n TryWrite,\n Evented,\n PipeReader,\n PipeWriter,\n};\npub use net::{\n tcp,\n udp,\n unix,\n Socket,\n};\npub use nonblock::{\n IntoNonBlock,\n NonBlock,\n};\npub use os::token::{\n Token,\n};\npub use os::event::{\n PollOpt,\n Interest,\n ReadHint,\n};\npub use poll::{\n Poll\n};\npub use timer::{\n Timeout,\n TimerError,\n TimerResult\n};\n\npub mod prelude {\n pub use super::{\n EventLoop,\n TryRead,\n TryWrite,\n };\n}\n\n\/\/ Re-export bytes\npub mod buf {\n pub use bytes::{\n Buf,\n MutBuf,\n ByteBuf,\n MutByteBuf,\n RingBuf,\n SliceBuf,\n MutSliceBuf,\n };\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>add error method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Formatting<commit_after><|endoftext|>"} {"text":"<commit_before>use std::process::Command;\n\nuse self::grammar::pipelines;\nuse glob::glob;\n\n#[derive(Debug, PartialEq, Clone)]\npub struct Pipeline {\n pub jobs: Vec<Job>,\n pub stdout_file: Option<String>,\n pub stdin_file: Option<String>,\n}\n\nimpl Pipeline {\n\n pub fn new(jobs: Vec<Job>, stdin: Option<String>, stdout: Option<String>) -> Self {\n Pipeline {\n jobs: jobs,\n stdin_file: stdin,\n stdout_file: stdout,\n }\n }\n\n pub fn expand_globs(&mut self) {\n let jobs = self.jobs.drain(..).map(|mut job| {\n job.expand_globs();\n job\n }).collect();\n self.jobs = jobs;\n }\n}\n\n#[derive(Debug, PartialEq, Clone)]\npub struct Job {\n pub command: String,\n pub args: Vec<String>,\n pub background: bool,\n}\n\nimpl Job {\n\n pub fn new(args: Vec<String>, background: bool) -> Self {\n let command = args[0].clone();\n Job {\n command: command,\n args: args,\n background: background,\n }\n }\n\n pub fn expand_globs(&mut self) {\n let mut new_args: Vec<String> = vec![];\n for arg in self.args.drain(..) {\n let mut pushed_glob = false;\n if let Ok(expanded) = glob(&arg) {\n for path in expanded.filter_map(Result::ok) {\n pushed_glob = true;\n new_args.push(path.to_string_lossy().into_owned());\n }\n }\n if !pushed_glob {\n new_args.push(arg);\n }\n }\n self.args = new_args;\n }\n\n pub fn build_command(&self) -> Command {\n let mut command = Command::new(&self.command);\n for i in 1..self.args.len() {\n if let Some(arg) = self.args.get(i) {\n command.arg(arg);\n }\n }\n command\n }\n}\n\npub fn parse(code: &str) -> Vec<Pipeline> {\n pipelines(code).unwrap_or(vec![])\n}\n\npeg! grammar(r#\"\nuse super::Pipeline;\nuse super::Job;\n\n\n#[pub]\npipelines -> Vec<Pipeline>\n = (unused* newline)* pipelines:pipeline ++ ((job_ending+ unused*)+) (newline unused*)* { pipelines }\n \/ (unused*) ** newline { vec![] }\n\npipeline -> Pipeline\n = whitespace? res:job ++ pipeline_sep whitespace? redir:redirection whitespace? comment? { Pipeline::new(res, redir.0, redir.1) }\n\njob -> Job\n = args:word ++ whitespace background:background_token? { \n Job::new(args.iter().map(|arg|arg.to_string()).collect(), background.is_some())\n }\n\nredirection -> (Option<String>, Option<String>)\n = stdin:redirect_stdin whitespace? stdout:redirect_stdout? { (Some(stdin), stdout) }\n \/ stdout:redirect_stdout whitespace? stdin:redirect_stdin? { (stdin, Some(stdout)) }\n \/ { (None, None) }\n\nredirect_stdin -> String\n = [<] whitespace? file:word { file.to_string() }\n\nredirect_stdout -> String\n = [>] whitespace? file:word { file.to_string() }\n\npipeline_sep -> ()\n = (whitespace? [|] whitespace?) { }\n\nbackground_token -> ()\n = [&]\n \/ whitespace [&]\n\nword -> &'input str\n = double_quoted_word\n \/ single_quoted_word\n \/ [^ \\t\\r\\n#;&|<>]+ { match_str }\n\ndouble_quoted_word -> &'input str\n = [\"] word:_double_quoted_word [\"] { word }\n\n_double_quoted_word -> &'input str\n = [^\"]+ { match_str }\n\nsingle_quoted_word -> &'input str\n = ['] word:_single_quoted_word ['] { word }\n\n_single_quoted_word -> &'input str\n = [^']+ { match_str }\n\nunused -> ()\n = whitespace comment? { () }\n \/ comment { () }\n\ncomment -> ()\n = [#] [^\\r\\n]*\n\nwhitespace -> ()\n = [ \\t]+\n\njob_ending -> ()\n = [;]\n \/ newline\n \/ newline\n\nnewline -> ()\n = [\\r\\n]\n\"#);\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use super::grammar::*;\n\n #[test]\n fn single_job_no_args() {\n let jobs = parse(\"cat\").remove(0).jobs;\n assert_eq!(1, jobs.len());\n assert_eq!(\"cat\", jobs[0].command);\n assert_eq!(1, jobs[0].args.len());\n }\n\n #[test]\n fn single_job_with_args() {\n let jobs = parse(\"ls -al dir\").remove(0).jobs;\n assert_eq!(1, jobs.len());\n assert_eq!(\"ls\", jobs[0].command);\n assert_eq!(\"-al\", jobs[0].args[1]);\n assert_eq!(\"dir\", jobs[0].args[2]);\n }\n\n #[test]\n fn multiple_jobs_with_args() {\n let pipelines = parse(\"ls -al;cat tmp.txt\");\n assert_eq!(2, pipelines.len());\n assert_eq!(\"ls\", pipelines[0].jobs[0].command);\n assert_eq!(\"-al\", pipelines[0].jobs[0].args[1]);\n assert_eq!(\"cat\", pipelines[1].jobs[0].command);\n assert_eq!(\"tmp.txt\", pipelines[1].jobs[0].args[1]);\n }\n\n #[test]\n fn parse_empty_string() {\n let pipelines = parse(\"\");\n assert_eq!(0, pipelines.len());\n }\n\n #[test]\n fn multiple_white_space_between_words() {\n let jobs = parse(\"ls \\t -al\\t\\tdir\").remove(0).jobs;\n assert_eq!(1, jobs.len());\n assert_eq!(\"ls\", jobs[0].command);\n assert_eq!(\"-al\", jobs[0].args[1]);\n assert_eq!(\"dir\", jobs[0].args[2]);\n }\n\n #[test]\n fn trailing_whitespace() {\n let pipelines = parse(\"ls -al\\t \");\n assert_eq!(1, pipelines.len());\n assert_eq!(\"ls\", pipelines[0].jobs[0].command);\n assert_eq!(\"-al\", pipelines[0].jobs[0].args[1]);\n }\n\n #[test]\n fn double_quoting() {\n let jobs = parse(\"echo \\\"Hello World\\\"\").remove(0).jobs;\n assert_eq!(2, jobs[0].args.len());\n assert_eq!(\"Hello World\", jobs[0].args[1]);\n }\n\n #[test]\n fn all_whitespace() {\n let pipelines = parse(\" \\t \");\n assert_eq!(0, pipelines.len());\n }\n\n #[test]\n fn not_background_job() {\n let jobs = parse(\"echo hello world\").remove(0).jobs;\n assert_eq!(false, jobs[0].background);\n }\n\n #[test]\n fn background_job() {\n let jobs = parse(\"echo hello world&\").remove(0).jobs;\n assert_eq!(true, jobs[0].background);\n }\n\n #[test]\n fn background_job_with_space() {\n let jobs = parse(\"echo hello world &\").remove(0).jobs;\n assert_eq!(true, jobs[0].background);\n }\n\n #[test]\n fn lone_comment() {\n let pipelines = parse(\"# ; \\t as!!+dfa\");\n assert_eq!(0, pipelines.len());\n }\n\n #[test]\n fn command_followed_by_comment() {\n let pipelines = parse(\"cat # ; \\t as!!+dfa\");\n assert_eq!(1, pipelines.len());\n assert_eq!(1, pipelines[0].jobs[0].args.len());\n }\n\n #[test]\n fn comments_in_multiline_script() {\n let pipelines = parse(\"echo\\n# a comment;\\necho#asfasdf\");\n assert_eq!(2, pipelines.len());\n }\n\n #[test]\n fn multiple_newlines() {\n let pipelines = parse(\"echo\\n\\ncat\");\n assert_eq!(2, pipelines.len());\n }\n\n #[test]\n fn leading_whitespace() {\n let jobs = parse(\" \\techo\").remove(0).jobs;\n assert_eq!(1, jobs.len());\n assert_eq!(\"echo\", jobs[0].command);\n }\n\n #[test]\n fn indentation_on_multiple_lines() {\n let pipelines = parse(\"echo\\n cat\");\n assert_eq!(2, pipelines.len());\n assert_eq!(\"echo\", pipelines[0].jobs[0].command);\n assert_eq!(\"cat\", pipelines[1].jobs[0].command);\n }\n\n #[test]\n fn single_quoting() {\n let jobs = parse(\"echo '#!!;\\\"\\\\'\").remove(0).jobs;\n assert_eq!(\"#!!;\\\"\\\\\", jobs[0].args[1]);\n }\n\n #[test]\n fn mixed_quoted_and_unquoted() {\n let jobs = parse(\"echo '#!!;\\\"\\\\' and \\t some \\\"more' 'stuff\\\"\").remove(0).jobs;\n assert_eq!(\"#!!;\\\"\\\\\", jobs[0].args[1]);\n assert_eq!(\"and\", jobs[0].args[2]);\n assert_eq!(\"some\", jobs[0].args[3]);\n assert_eq!(\"more' 'stuff\", jobs[0].args[4]);\n }\n\n #[test]\n fn several_blank_lines() {\n let pipelines = parse(\"\\n\\n\\n\");\n assert_eq!(0, pipelines.len());\n }\n\n #[test]\n fn pipelines_with_redirection() {\n let pipelines = parse(\"cat | echo hello | cat < stuff > other\");\n assert_eq!(3, pipelines[0].jobs.len());\n assert_eq!(Some(\"stuff\".to_string()), pipelines[0].stdin_file);\n assert_eq!(Some(\"other\".to_string()), pipelines[0].stdout_file);\n }\n\n #[test]\n fn pipelines_with_redirection_reverse_order() {\n let pipelines = parse(\"cat | echo hello | cat > stuff < other\");\n assert_eq!(3, pipelines[0].jobs.len());\n assert_eq!(Some(\"other\".to_string()), pipelines[0].stdin_file);\n assert_eq!(Some(\"stuff\".to_string()), pipelines[0].stdout_file);\n }\n\n #[test]\n fn full_script() {\n pipelines(r#\"if a == a\n echo true a == a\n\n if b != b\n echo true b != b\n else\n echo false b != b\n\n if 3 > 2\n echo true 3 > 2\n else\n echo false 3 > 2\n fi\n fi\nelse\n echo false a == a\nfi\n\"#)\n .unwrap(); \/\/ Make sure it parses\n }\n\n #[test]\n fn leading_and_trailing_junk() {\n pipelines(r#\"\n\n# comment\n # comment\n \n\n if a == a \n echo true a == a # Line ending commment\n\n if b != b\n echo true b != b\n else\n echo false b != b\n\n if 3 > 2\n echo true 3 > 2\n else\n echo false 3 > 2\n fi\n fi\nelse\n echo false a == a\n fi \n\n# comment\n\n\"#).unwrap(); \/\/ Make sure it parses\n }\n}\n<commit_msg>Only expand globs if they contain a metacharater<commit_after>use std::process::Command;\n\nuse self::grammar::pipelines;\nuse glob::glob;\n\n#[derive(Debug, PartialEq, Clone)]\npub struct Pipeline {\n pub jobs: Vec<Job>,\n pub stdout_file: Option<String>,\n pub stdin_file: Option<String>,\n}\n\nimpl Pipeline {\n\n pub fn new(jobs: Vec<Job>, stdin: Option<String>, stdout: Option<String>) -> Self {\n Pipeline {\n jobs: jobs,\n stdin_file: stdin,\n stdout_file: stdout,\n }\n }\n\n pub fn expand_globs(&mut self) {\n let jobs = self.jobs.drain(..).map(|mut job| {\n job.expand_globs();\n job\n }).collect();\n self.jobs = jobs;\n }\n}\n\n#[derive(Debug, PartialEq, Clone)]\npub struct Job {\n pub command: String,\n pub args: Vec<String>,\n pub background: bool,\n}\n\nimpl Job {\n\n pub fn new(args: Vec<String>, background: bool) -> Self {\n let command = args[0].clone();\n Job {\n command: command,\n args: args,\n background: background,\n }\n }\n\n pub fn expand_globs(&mut self) {\n let mut new_args: Vec<String> = vec![];\n for arg in self.args.drain(..) {\n let mut pushed_glob = false;\n if arg.contains(|chr| chr == '?' || chr == '*' || chr == '[') {\n if let Ok(expanded) = glob(&arg) {\n for path in expanded.filter_map(Result::ok) {\n pushed_glob = true;\n new_args.push(path.to_string_lossy().into_owned());\n }\n }\n }\n if !pushed_glob {\n new_args.push(arg);\n }\n }\n self.args = new_args;\n }\n\n pub fn build_command(&self) -> Command {\n let mut command = Command::new(&self.command);\n for i in 1..self.args.len() {\n if let Some(arg) = self.args.get(i) {\n command.arg(arg);\n }\n }\n command\n }\n}\n\npub fn parse(code: &str) -> Vec<Pipeline> {\n pipelines(code).unwrap_or(vec![])\n}\n\npeg! grammar(r#\"\nuse super::Pipeline;\nuse super::Job;\n\n\n#[pub]\npipelines -> Vec<Pipeline>\n = (unused* newline)* pipelines:pipeline ++ ((job_ending+ unused*)+) (newline unused*)* { pipelines }\n \/ (unused*) ** newline { vec![] }\n\npipeline -> Pipeline\n = whitespace? res:job ++ pipeline_sep whitespace? redir:redirection whitespace? comment? { Pipeline::new(res, redir.0, redir.1) }\n\njob -> Job\n = args:word ++ whitespace background:background_token? { \n Job::new(args.iter().map(|arg|arg.to_string()).collect(), background.is_some())\n }\n\nredirection -> (Option<String>, Option<String>)\n = stdin:redirect_stdin whitespace? stdout:redirect_stdout? { (Some(stdin), stdout) }\n \/ stdout:redirect_stdout whitespace? stdin:redirect_stdin? { (stdin, Some(stdout)) }\n \/ { (None, None) }\n\nredirect_stdin -> String\n = [<] whitespace? file:word { file.to_string() }\n\nredirect_stdout -> String\n = [>] whitespace? file:word { file.to_string() }\n\npipeline_sep -> ()\n = (whitespace? [|] whitespace?) { }\n\nbackground_token -> ()\n = [&]\n \/ whitespace [&]\n\nword -> &'input str\n = double_quoted_word\n \/ single_quoted_word\n \/ [^ \\t\\r\\n#;&|<>]+ { match_str }\n\ndouble_quoted_word -> &'input str\n = [\"] word:_double_quoted_word [\"] { word }\n\n_double_quoted_word -> &'input str\n = [^\"]+ { match_str }\n\nsingle_quoted_word -> &'input str\n = ['] word:_single_quoted_word ['] { word }\n\n_single_quoted_word -> &'input str\n = [^']+ { match_str }\n\nunused -> ()\n = whitespace comment? { () }\n \/ comment { () }\n\ncomment -> ()\n = [#] [^\\r\\n]*\n\nwhitespace -> ()\n = [ \\t]+\n\njob_ending -> ()\n = [;]\n \/ newline\n \/ newline\n\nnewline -> ()\n = [\\r\\n]\n\"#);\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use super::grammar::*;\n\n #[test]\n fn single_job_no_args() {\n let jobs = parse(\"cat\").remove(0).jobs;\n assert_eq!(1, jobs.len());\n assert_eq!(\"cat\", jobs[0].command);\n assert_eq!(1, jobs[0].args.len());\n }\n\n #[test]\n fn single_job_with_args() {\n let jobs = parse(\"ls -al dir\").remove(0).jobs;\n assert_eq!(1, jobs.len());\n assert_eq!(\"ls\", jobs[0].command);\n assert_eq!(\"-al\", jobs[0].args[1]);\n assert_eq!(\"dir\", jobs[0].args[2]);\n }\n\n #[test]\n fn multiple_jobs_with_args() {\n let pipelines = parse(\"ls -al;cat tmp.txt\");\n assert_eq!(2, pipelines.len());\n assert_eq!(\"ls\", pipelines[0].jobs[0].command);\n assert_eq!(\"-al\", pipelines[0].jobs[0].args[1]);\n assert_eq!(\"cat\", pipelines[1].jobs[0].command);\n assert_eq!(\"tmp.txt\", pipelines[1].jobs[0].args[1]);\n }\n\n #[test]\n fn parse_empty_string() {\n let pipelines = parse(\"\");\n assert_eq!(0, pipelines.len());\n }\n\n #[test]\n fn multiple_white_space_between_words() {\n let jobs = parse(\"ls \\t -al\\t\\tdir\").remove(0).jobs;\n assert_eq!(1, jobs.len());\n assert_eq!(\"ls\", jobs[0].command);\n assert_eq!(\"-al\", jobs[0].args[1]);\n assert_eq!(\"dir\", jobs[0].args[2]);\n }\n\n #[test]\n fn trailing_whitespace() {\n let pipelines = parse(\"ls -al\\t \");\n assert_eq!(1, pipelines.len());\n assert_eq!(\"ls\", pipelines[0].jobs[0].command);\n assert_eq!(\"-al\", pipelines[0].jobs[0].args[1]);\n }\n\n #[test]\n fn double_quoting() {\n let jobs = parse(\"echo \\\"Hello World\\\"\").remove(0).jobs;\n assert_eq!(2, jobs[0].args.len());\n assert_eq!(\"Hello World\", jobs[0].args[1]);\n }\n\n #[test]\n fn all_whitespace() {\n let pipelines = parse(\" \\t \");\n assert_eq!(0, pipelines.len());\n }\n\n #[test]\n fn not_background_job() {\n let jobs = parse(\"echo hello world\").remove(0).jobs;\n assert_eq!(false, jobs[0].background);\n }\n\n #[test]\n fn background_job() {\n let jobs = parse(\"echo hello world&\").remove(0).jobs;\n assert_eq!(true, jobs[0].background);\n }\n\n #[test]\n fn background_job_with_space() {\n let jobs = parse(\"echo hello world &\").remove(0).jobs;\n assert_eq!(true, jobs[0].background);\n }\n\n #[test]\n fn lone_comment() {\n let pipelines = parse(\"# ; \\t as!!+dfa\");\n assert_eq!(0, pipelines.len());\n }\n\n #[test]\n fn command_followed_by_comment() {\n let pipelines = parse(\"cat # ; \\t as!!+dfa\");\n assert_eq!(1, pipelines.len());\n assert_eq!(1, pipelines[0].jobs[0].args.len());\n }\n\n #[test]\n fn comments_in_multiline_script() {\n let pipelines = parse(\"echo\\n# a comment;\\necho#asfasdf\");\n assert_eq!(2, pipelines.len());\n }\n\n #[test]\n fn multiple_newlines() {\n let pipelines = parse(\"echo\\n\\ncat\");\n assert_eq!(2, pipelines.len());\n }\n\n #[test]\n fn leading_whitespace() {\n let jobs = parse(\" \\techo\").remove(0).jobs;\n assert_eq!(1, jobs.len());\n assert_eq!(\"echo\", jobs[0].command);\n }\n\n #[test]\n fn indentation_on_multiple_lines() {\n let pipelines = parse(\"echo\\n cat\");\n assert_eq!(2, pipelines.len());\n assert_eq!(\"echo\", pipelines[0].jobs[0].command);\n assert_eq!(\"cat\", pipelines[1].jobs[0].command);\n }\n\n #[test]\n fn single_quoting() {\n let jobs = parse(\"echo '#!!;\\\"\\\\'\").remove(0).jobs;\n assert_eq!(\"#!!;\\\"\\\\\", jobs[0].args[1]);\n }\n\n #[test]\n fn mixed_quoted_and_unquoted() {\n let jobs = parse(\"echo '#!!;\\\"\\\\' and \\t some \\\"more' 'stuff\\\"\").remove(0).jobs;\n assert_eq!(\"#!!;\\\"\\\\\", jobs[0].args[1]);\n assert_eq!(\"and\", jobs[0].args[2]);\n assert_eq!(\"some\", jobs[0].args[3]);\n assert_eq!(\"more' 'stuff\", jobs[0].args[4]);\n }\n\n #[test]\n fn several_blank_lines() {\n let pipelines = parse(\"\\n\\n\\n\");\n assert_eq!(0, pipelines.len());\n }\n\n #[test]\n fn pipelines_with_redirection() {\n let pipelines = parse(\"cat | echo hello | cat < stuff > other\");\n assert_eq!(3, pipelines[0].jobs.len());\n assert_eq!(Some(\"stuff\".to_string()), pipelines[0].stdin_file);\n assert_eq!(Some(\"other\".to_string()), pipelines[0].stdout_file);\n }\n\n #[test]\n fn pipelines_with_redirection_reverse_order() {\n let pipelines = parse(\"cat | echo hello | cat > stuff < other\");\n assert_eq!(3, pipelines[0].jobs.len());\n assert_eq!(Some(\"other\".to_string()), pipelines[0].stdin_file);\n assert_eq!(Some(\"stuff\".to_string()), pipelines[0].stdout_file);\n }\n\n #[test]\n fn full_script() {\n pipelines(r#\"if a == a\n echo true a == a\n\n if b != b\n echo true b != b\n else\n echo false b != b\n\n if 3 > 2\n echo true 3 > 2\n else\n echo false 3 > 2\n fi\n fi\nelse\n echo false a == a\nfi\n\"#)\n .unwrap(); \/\/ Make sure it parses\n }\n\n #[test]\n fn leading_and_trailing_junk() {\n pipelines(r#\"\n\n# comment\n # comment\n \n\n if a == a \n echo true a == a # Line ending commment\n\n if b != b\n echo true b != b\n else\n echo false b != b\n\n if 3 > 2\n echo true 3 > 2\n else\n echo false 3 > 2\n fi\n fi\nelse\n echo false a == a\n fi \n\n# comment\n\n\"#).unwrap(); \/\/ Make sure it parses\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>[projecteuler] Add solution for problem 12 using rust.<commit_after>\/\/! # Problem 12 from projecteuler.net\n\/\/!\n\/\/! You can find problem description [here][problem].\n\/\/!\n\/\/! [problem]: https:\/\/projecteuler.net\/problem=12\n\n\nmod factorization {\n\nuse std::collections::HashMap;\n\nfn ferma_factorization(number: u64) -> (u64, u64)\n{\n assert!(number % 2 != 0);\n let mut x = (number as f64).sqrt().ceil() as u64;\n\n if x == number {\n return (x, 1);\n }\n\n loop {\n let y = ((x * x - number) as f64).sqrt();\n if y == y.round() {\n return (x - y as u64, x + y as u64);\n }\n x += 1;\n }\n}\n\nfn map_incr(map: &mut HashMap<u64, u64>, key: u64) {\n if map.contains_key(&key) {\n *map.get_mut(&key) += 1;\n } else {\n map.insert(key, 1);\n }\n}\n\nfn recoursive_factorization(map: &mut HashMap<u64, u64>, mut num: u64) {\n while num % 2 == 0 {\n map_incr(map, 2);\n num \/= 2;\n }\n\n if num == 1 {\n return;\n }\n\n let factorization = ferma_factorization(num);\n if num == factorization.val0() {\n map_incr(map, num);\n } else {\n recoursive_factorization(map, factorization.val0());\n }\n\n if num == factorization.val1() {\n map_incr(map, num);\n } else {\n recoursive_factorization(map, factorization.val1());\n }\n}\n\npub fn fast_number_of_divisors(number: u64) -> u64\n{\n let mut map: HashMap<u64, u64> = HashMap::new();\n\n recoursive_factorization(&mut map, number);\n let mut result = 1u64;\n\n for val in map.values() {\n result *= val + 1;\n }\n return result;\n}\n\nfn number_of_divisors(value: u64) -> u64\n{\n let mut result = 1;\n let mut current = 1;\n\n while current < value {\n if value % current == 0 {\n result += 1;\n }\n current += 1;\n }\n\n return result;\n}\n\n#[allow(dead_code)]\npub fn stress_test(n: u64) { \n for n in range(1u64, n) {\n let fast = fast_number_of_divisors(n);\n let slow = number_of_divisors(n);\n if fast != slow {\n println!(\"n = {}\\tgot {}\\texpect {}\", n, fast, slow);\n }\n }\n println!(\"TEST OK!\");\n}\n\n} \/\/ mod factorization\n\n\nfn calc_triangular(num: u64) -> u64\n{\n return num * (num + 1) \/ 2;\n}\n\nfn triangular_number_from_divisors(num_divisors: u64) -> u64\n{\n let mut current: u64 = 1;\n loop {\n let triangular = calc_triangular(current);\n let triangular_divisors = factorization::fast_number_of_divisors(triangular);\n\n if triangular_divisors > num_divisors {\n return triangular;\n }\n\n current += 1;\n }\n}\n\nfn main()\n{\n factorization::stress_test(500);\n \n println!(\"{}\", triangular_number_from_divisors(500));\n}\n<|endoftext|>"} {"text":"<commit_before>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(warnings)]\n\nextern crate filetime;\n\nuse std::{fs, env};\nuse std::fs::File;\nuse std::process::{Command, Stdio};\nuse std::path::{Path, PathBuf};\n\nuse filetime::FileTime;\n\n\/\/\/ A helper macro to `unwrap` a result except also print out details like:\n\/\/\/\n\/\/\/ * The file\/line of the panic\n\/\/\/ * The expression that failed\n\/\/\/ * The error itself\n\/\/\/\n\/\/\/ This is currently used judiciously throughout the build system rather than\n\/\/\/ using a `Result` with `try!`, but this may change one day...\n#[macro_export]\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {}\", stringify!($e), e),\n })\n}\n\npub fn run(cmd: &mut Command) {\n println!(\"running: {:?}\", cmd);\n run_silent(cmd);\n}\n\npub fn run_silent(cmd: &mut Command) {\n let status = match cmd.status() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !status.success() {\n fail(&format!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\",\n cmd,\n status));\n }\n}\n\npub fn run_suppressed(cmd: &mut Command) {\n let output = match cmd.output() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !output.status.success() {\n fail(&format!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\\n\\n\\\n stdout ----\\n{}\\n\\\n stderr ----\\n{}\\n\",\n cmd,\n output.status,\n String::from_utf8_lossy(&output.stdout),\n String::from_utf8_lossy(&output.stderr)));\n }\n}\n\npub fn gnu_target(target: &str) -> String {\n match target {\n \"i686-pc-windows-msvc\" => \"i686-pc-win32\".to_string(),\n \"x86_64-pc-windows-msvc\" => \"x86_64-pc-win32\".to_string(),\n \"i686-pc-windows-gnu\" => \"i686-w64-mingw32\".to_string(),\n \"x86_64-pc-windows-gnu\" => \"x86_64-w64-mingw32\".to_string(),\n s => s.to_string(),\n }\n}\n\npub fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {\n if target.contains(\"msvc\") {\n None\n } else if target.contains(\"musl\") {\n Some(PathBuf::from(\"ar\"))\n } else if target.contains(\"openbsd\") {\n Some(PathBuf::from(\"ar\"))\n } else {\n let parent = cc.parent().unwrap();\n let file = cc.file_name().unwrap().to_str().unwrap();\n for suffix in &[\"gcc\", \"cc\", \"clang\"] {\n if let Some(idx) = file.rfind(suffix) {\n let mut file = file[..idx].to_owned();\n file.push_str(\"ar\");\n return Some(parent.join(&file));\n }\n }\n Some(parent.join(file))\n }\n}\n\npub fn make(host: &str) -> PathBuf {\n if host.contains(\"bitrig\") || host.contains(\"dragonfly\") ||\n host.contains(\"freebsd\") || host.contains(\"netbsd\") ||\n host.contains(\"openbsd\") {\n PathBuf::from(\"gmake\")\n } else {\n PathBuf::from(\"make\")\n }\n}\n\npub fn output(cmd: &mut Command) -> String {\n let output = match cmd.stderr(Stdio::inherit()).output() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !output.status.success() {\n panic!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\",\n cmd,\n output.status);\n }\n String::from_utf8(output.stdout).unwrap()\n}\n\npub fn rerun_if_changed_anything_in_dir(dir: &Path) {\n let mut stack = dir.read_dir().unwrap()\n .map(|e| e.unwrap())\n .filter(|e| &*e.file_name() != \".git\")\n .collect::<Vec<_>>();\n while let Some(entry) = stack.pop() {\n let path = entry.path();\n if entry.file_type().unwrap().is_dir() {\n stack.extend(path.read_dir().unwrap().map(|e| e.unwrap()));\n } else {\n println!(\"cargo:rerun-if-changed={}\", path.display());\n }\n }\n}\n\n\/\/\/ Returns the last-modified time for `path`, or zero if it doesn't exist.\npub fn mtime(path: &Path) -> FileTime {\n fs::metadata(path).map(|f| {\n FileTime::from_last_modification_time(&f)\n }).unwrap_or(FileTime::zero())\n}\n\n\/\/\/ Returns whether `dst` is up to date given that the file or files in `src`\n\/\/\/ are used to generate it.\n\/\/\/\n\/\/\/ Uses last-modified time checks to verify this.\npub fn up_to_date(src: &Path, dst: &Path) -> bool {\n let threshold = mtime(dst);\n let meta = match fs::metadata(src) {\n Ok(meta) => meta,\n Err(e) => panic!(\"source {:?} failed to get metadata: {}\", src, e),\n };\n if meta.is_dir() {\n dir_up_to_date(src, &threshold)\n } else {\n FileTime::from_last_modification_time(&meta) <= threshold\n }\n}\n\n#[must_use]\npub struct NativeLibBoilerplate {\n pub src_dir: PathBuf,\n pub out_dir: PathBuf,\n}\n\nimpl Drop for NativeLibBoilerplate {\n fn drop(&mut self) {\n t!(File::create(self.out_dir.join(\"rustbuild.timestamp\")));\n }\n}\n\n\/\/ Perform standard preparations for native libraries that are build only once for all stages.\n\/\/ Emit rerun-if-changed and linking attributes for Cargo, check if any source files are\n\/\/ updated, calculate paths used later in actual build with CMake\/make or C\/C++ compiler.\n\/\/ If Err is returned, then everything is up-to-date and further build actions can be skipped.\n\/\/ Timestamps are created automatically when the result of `native_lib_boilerplate` goes out\n\/\/ of scope, so all the build actions should be completed until then.\npub fn native_lib_boilerplate(src_name: &str,\n out_name: &str,\n link_name: &str,\n search_subdir: &str)\n -> Result<NativeLibBoilerplate, ()> {\n let current_dir = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap());\n let src_dir = current_dir.join(\"..\").join(src_name);\n rerun_if_changed_anything_in_dir(&src_dir);\n\n let out_dir = env::var_os(\"RUSTBUILD_NATIVE_DIR\").unwrap_or(env::var_os(\"OUT_DIR\").unwrap());\n let out_dir = PathBuf::from(out_dir).join(out_name);\n let _ = fs::create_dir_all(&out_dir);\n println!(\"cargo:rustc-link-lib=static={}\", link_name);\n println!(\"cargo:rustc-link-search=native={}\", out_dir.join(search_subdir).display());\n\n let timestamp = out_dir.join(\"rustbuild.timestamp\");\n if !up_to_date(Path::new(\"build.rs\"), ×tamp) || !up_to_date(&src_dir, ×tamp) {\n Ok(NativeLibBoilerplate { src_dir: src_dir, out_dir: out_dir })\n } else {\n Err(())\n }\n}\n\nfn dir_up_to_date(src: &Path, threshold: &FileTime) -> bool {\n t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| {\n let meta = t!(e.metadata());\n if meta.is_dir() {\n dir_up_to_date(&e.path(), threshold)\n } else {\n FileTime::from_last_modification_time(&meta) < *threshold\n }\n })\n}\n\nfn fail(s: &str) -> ! {\n println!(\"\\n\\n{}\\n\\n\", s);\n std::process::exit(1);\n}\n<commit_msg>Auto merge of #40337 - alexcrichton:racy-dirs, r=brson<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n#![deny(warnings)]\n\nextern crate filetime;\n\nuse std::fs::File;\nuse std::io;\nuse std::path::{Path, PathBuf};\nuse std::process::{Command, Stdio};\nuse std::{fs, env};\n\nuse filetime::FileTime;\n\n\/\/\/ A helper macro to `unwrap` a result except also print out details like:\n\/\/\/\n\/\/\/ * The file\/line of the panic\n\/\/\/ * The expression that failed\n\/\/\/ * The error itself\n\/\/\/\n\/\/\/ This is currently used judiciously throughout the build system rather than\n\/\/\/ using a `Result` with `try!`, but this may change one day...\n#[macro_export]\nmacro_rules! t {\n ($e:expr) => (match $e {\n Ok(e) => e,\n Err(e) => panic!(\"{} failed with {}\", stringify!($e), e),\n })\n}\n\npub fn run(cmd: &mut Command) {\n println!(\"running: {:?}\", cmd);\n run_silent(cmd);\n}\n\npub fn run_silent(cmd: &mut Command) {\n let status = match cmd.status() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !status.success() {\n fail(&format!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\",\n cmd,\n status));\n }\n}\n\npub fn run_suppressed(cmd: &mut Command) {\n let output = match cmd.output() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !output.status.success() {\n fail(&format!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\\n\\n\\\n stdout ----\\n{}\\n\\\n stderr ----\\n{}\\n\",\n cmd,\n output.status,\n String::from_utf8_lossy(&output.stdout),\n String::from_utf8_lossy(&output.stderr)));\n }\n}\n\npub fn gnu_target(target: &str) -> String {\n match target {\n \"i686-pc-windows-msvc\" => \"i686-pc-win32\".to_string(),\n \"x86_64-pc-windows-msvc\" => \"x86_64-pc-win32\".to_string(),\n \"i686-pc-windows-gnu\" => \"i686-w64-mingw32\".to_string(),\n \"x86_64-pc-windows-gnu\" => \"x86_64-w64-mingw32\".to_string(),\n s => s.to_string(),\n }\n}\n\npub fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {\n if target.contains(\"msvc\") {\n None\n } else if target.contains(\"musl\") {\n Some(PathBuf::from(\"ar\"))\n } else if target.contains(\"openbsd\") {\n Some(PathBuf::from(\"ar\"))\n } else {\n let parent = cc.parent().unwrap();\n let file = cc.file_name().unwrap().to_str().unwrap();\n for suffix in &[\"gcc\", \"cc\", \"clang\"] {\n if let Some(idx) = file.rfind(suffix) {\n let mut file = file[..idx].to_owned();\n file.push_str(\"ar\");\n return Some(parent.join(&file));\n }\n }\n Some(parent.join(file))\n }\n}\n\npub fn make(host: &str) -> PathBuf {\n if host.contains(\"bitrig\") || host.contains(\"dragonfly\") ||\n host.contains(\"freebsd\") || host.contains(\"netbsd\") ||\n host.contains(\"openbsd\") {\n PathBuf::from(\"gmake\")\n } else {\n PathBuf::from(\"make\")\n }\n}\n\npub fn output(cmd: &mut Command) -> String {\n let output = match cmd.stderr(Stdio::inherit()).output() {\n Ok(status) => status,\n Err(e) => fail(&format!(\"failed to execute command: {:?}\\nerror: {}\",\n cmd, e)),\n };\n if !output.status.success() {\n panic!(\"command did not execute successfully: {:?}\\n\\\n expected success, got: {}\",\n cmd,\n output.status);\n }\n String::from_utf8(output.stdout).unwrap()\n}\n\npub fn rerun_if_changed_anything_in_dir(dir: &Path) {\n let mut stack = dir.read_dir().unwrap()\n .map(|e| e.unwrap())\n .filter(|e| &*e.file_name() != \".git\")\n .collect::<Vec<_>>();\n while let Some(entry) = stack.pop() {\n let path = entry.path();\n if entry.file_type().unwrap().is_dir() {\n stack.extend(path.read_dir().unwrap().map(|e| e.unwrap()));\n } else {\n println!(\"cargo:rerun-if-changed={}\", path.display());\n }\n }\n}\n\n\/\/\/ Returns the last-modified time for `path`, or zero if it doesn't exist.\npub fn mtime(path: &Path) -> FileTime {\n fs::metadata(path).map(|f| {\n FileTime::from_last_modification_time(&f)\n }).unwrap_or(FileTime::zero())\n}\n\n\/\/\/ Returns whether `dst` is up to date given that the file or files in `src`\n\/\/\/ are used to generate it.\n\/\/\/\n\/\/\/ Uses last-modified time checks to verify this.\npub fn up_to_date(src: &Path, dst: &Path) -> bool {\n let threshold = mtime(dst);\n let meta = match fs::metadata(src) {\n Ok(meta) => meta,\n Err(e) => panic!(\"source {:?} failed to get metadata: {}\", src, e),\n };\n if meta.is_dir() {\n dir_up_to_date(src, &threshold)\n } else {\n FileTime::from_last_modification_time(&meta) <= threshold\n }\n}\n\n#[must_use]\npub struct NativeLibBoilerplate {\n pub src_dir: PathBuf,\n pub out_dir: PathBuf,\n}\n\nimpl Drop for NativeLibBoilerplate {\n fn drop(&mut self) {\n t!(File::create(self.out_dir.join(\"rustbuild.timestamp\")));\n }\n}\n\n\/\/ Perform standard preparations for native libraries that are build only once for all stages.\n\/\/ Emit rerun-if-changed and linking attributes for Cargo, check if any source files are\n\/\/ updated, calculate paths used later in actual build with CMake\/make or C\/C++ compiler.\n\/\/ If Err is returned, then everything is up-to-date and further build actions can be skipped.\n\/\/ Timestamps are created automatically when the result of `native_lib_boilerplate` goes out\n\/\/ of scope, so all the build actions should be completed until then.\npub fn native_lib_boilerplate(src_name: &str,\n out_name: &str,\n link_name: &str,\n search_subdir: &str)\n -> Result<NativeLibBoilerplate, ()> {\n let current_dir = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap());\n let src_dir = current_dir.join(\"..\").join(src_name);\n rerun_if_changed_anything_in_dir(&src_dir);\n\n let out_dir = env::var_os(\"RUSTBUILD_NATIVE_DIR\").unwrap_or(env::var_os(\"OUT_DIR\").unwrap());\n let out_dir = PathBuf::from(out_dir).join(out_name);\n t!(create_dir_racy(&out_dir));\n println!(\"cargo:rustc-link-lib=static={}\", link_name);\n println!(\"cargo:rustc-link-search=native={}\", out_dir.join(search_subdir).display());\n\n let timestamp = out_dir.join(\"rustbuild.timestamp\");\n if !up_to_date(Path::new(\"build.rs\"), ×tamp) || !up_to_date(&src_dir, ×tamp) {\n Ok(NativeLibBoilerplate { src_dir: src_dir, out_dir: out_dir })\n } else {\n Err(())\n }\n}\n\nfn dir_up_to_date(src: &Path, threshold: &FileTime) -> bool {\n t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| {\n let meta = t!(e.metadata());\n if meta.is_dir() {\n dir_up_to_date(&e.path(), threshold)\n } else {\n FileTime::from_last_modification_time(&meta) < *threshold\n }\n })\n}\n\nfn fail(s: &str) -> ! {\n println!(\"\\n\\n{}\\n\\n\", s);\n std::process::exit(1);\n}\n\nfn create_dir_racy(path: &Path) -> io::Result<()> {\n match fs::create_dir(path) {\n Ok(()) => return Ok(()),\n Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => return Ok(()),\n Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}\n Err(e) => return Err(e),\n }\n match path.parent() {\n Some(p) => try!(create_dir_racy(p)),\n None => return Err(io::Error::new(io::ErrorKind::Other, \"failed to create whole tree\")),\n }\n match fs::create_dir(path) {\n Ok(()) => Ok(()),\n Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()),\n Err(e) => Err(e),\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>remove insert sentinel code<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>beginnings of a bulk classifier<commit_after>extern crate classifier;\nuse classifier::Classifier;\nuse diesel::pg::PgConnection;\nuse dotenv::dotenv;\nuse self::models::Ad;\n\n\/\/ For now we grab all the ads in the database and classify and resave them not\n\/\/ ideal in the future, but we're launching next week.\nfn main() {\n dotenv().ok();\n let database_url = env::var(\"DATABASE_URL\").expect(\"DATABASE_URL must be set\");\n let conn = PgConnection::establish(&database_url).expect(\"could not connect to the databas\");\n let ads: Vec<Ad> = diesel::select\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>#24468 call update_entry API from process_response_eof<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add additional test case for superregion<commit_after>\/\/ Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\n\/\/ Test that when a `..` impl applies, we also check that any\n\/\/ supertrait conditions are met.\n\n#![feature(optin_builtin_traits)]\n\nuse std::marker::MarkerTrait;\n\ntrait MyTrait : 'static {}\n\nimpl MyTrait for .. {}\n\nfn foo<T:MyTrait>() { }\n\nfn bar<'a>() {\n foo::<&'a ()>(); \/\/~ ERROR does not fulfill the required lifetime\n}\n\nfn main() {\n}\n<|endoftext|>"} {"text":"<commit_before>use expression::Expression;\nuse query_builder::AsQuery;\nuse query_source::QuerySource;\n\n\/\/\/ Sets the order clause of a query. If there was already a order clause, it\n\/\/\/ will be overridden. The expression passed to `order` must actually be valid\n\/\/\/ for the query. See also:\n\/\/\/ [`.desc()`](..\/expression\/expression_methods\/global_expression_methods\/trait.ExpressionMethods.html#method.desc)\n\/\/\/ and [`.asc()`](..\/expression\/expression_methods\/global_expression_methods\/trait.ExpressionMethods.html#method.asc)\n\/\/\/\n\/\/\/ This is automatically implemented for the various query builder types.\npub trait OrderDsl<Expr: Expression>: AsQuery {\n type Output: AsQuery<SqlType=Self::SqlType>;\n\n fn order(self, expr: Expr) -> Self::Output;\n}\n\nimpl<T, Expr, ST> OrderDsl<Expr> for T where\n Expr: Expression,\n T: QuerySource + AsQuery<SqlType=ST>,\n T::Query: OrderDsl<Expr, SqlType=ST>,\n{\n type Output = <T::Query as OrderDsl<Expr>>::Output;\n\n fn order(self, expr: Expr) -> Self::Output {\n self.as_query().order(expr)\n }\n}\n<commit_msg>fix broken doc links<commit_after>use expression::Expression;\nuse query_builder::AsQuery;\nuse query_source::QuerySource;\n\n\/\/\/ Sets the order clause of a query. If there was already a order clause, it\n\/\/\/ will be overridden. The expression passed to `order` must actually be valid\n\/\/\/ for the query. See also:\n\/\/\/ [`.desc()`](..\/..\/expression\/expression_methods\/global_expression_methods\/trait.ExpressionMethods.html#method.desc)\n\/\/\/ and [`.asc()`](..\/..\/expression\/expression_methods\/global_expression_methods\/trait.ExpressionMethods.html#method.asc)\n\/\/\/\n\/\/\/ This is automatically implemented for the various query builder types.\npub trait OrderDsl<Expr: Expression>: AsQuery {\n type Output: AsQuery<SqlType=Self::SqlType>;\n\n fn order(self, expr: Expr) -> Self::Output;\n}\n\nimpl<T, Expr, ST> OrderDsl<Expr> for T where\n Expr: Expression,\n T: QuerySource + AsQuery<SqlType=ST>,\n T::Query: OrderDsl<Expr, SqlType=ST>,\n{\n type Output = <T::Query as OrderDsl<Expr>>::Output;\n\n fn order(self, expr: Expr) -> Self::Output {\n self.as_query().order(expr)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>added another example of reversing a singleton vector<commit_after>extern crate quickcheck;\n\nuse quickcheck::{TestResult, quickcheck};\n\nfn reverse<T: Clone>(xs: &[T]) -> ~[T] {\n let mut rev = ~[];\n for x in xs.iter() {\n rev.unshift(x.clone())\n }\n rev\n}\n\nfn main() {\n fn prop(xs: ~[int]) -> ~TestResult {\n if xs.len() != 1 {\n return TestResult::discard()\n }\n TestResult::from_bool(xs == reverse(xs))\n }\n quickcheck(prop);\n}\n<|endoftext|>"} {"text":"<commit_before>use redox::Vec;\n\npub struct AvlNode<T> {\n value: T,\n left: Option<AvlNodeId>, \/\/ ID for left node\n right: Option<AvlNodeId>, \/\/ ID for right node\n}\n\n#[derive(Copy, Clone)]\npub struct AvlNodeId {\n index: usize,\n time_stamp: u64,\n}\n\nimpl AvlNodeId {\n pub fn get<'a, T: PartialOrd>(&self, avl: &'a AvlTree<T>) -> &'a AvlNode<T> {\n let ref slot = avl.nodes[self.index];\n if slot.time_stamp == self.time_stamp {\n slot.node.as_ref().unwrap()\n } else {\n panic!(\"AvlNodeId had invalid time_stamp\");\n }\n }\n\n pub fn try_get<'a, T: PartialOrd>(&self, avl: &'a AvlTree<T>) -> Option<&'a AvlNode<T>> {\n avl.nodes\n .get(self.index)\n .and_then(|slot| {\n if slot.time_stamp == self.time_stamp {\n slot.node.as_ref()\n } else {\n None\n }\n })\n }\n\n pub fn get_mut<'a, T: PartialOrd>(&self, avl: &'a mut AvlTree<T>) -> &'a mut AvlNode<T> {\n let ref mut slot = avl.nodes[self.index];\n if slot.time_stamp == self.time_stamp {\n slot.node.as_mut().unwrap()\n } else {\n panic!(\"AvlNodeId had invalid time_stamp\");\n }\n }\n\n pub fn try_get_mut<'a, T: PartialOrd>(&self, avl: &'a mut AvlTree<T>) -> Option<&'a mut AvlNode<T>> {\n avl.nodes\n .get_mut(self.index)\n .and_then(|slot| {\n if slot.time_stamp == self.time_stamp {\n slot.node.as_mut()\n } else {\n None\n }\n })\n }\n}\n\npub struct AvlTree<T: PartialOrd> {\n root: Option<AvlNodeId>, \/\/ Index of the root node\n nodes: Vec<AvlSlot<T>>,\n free_list: Vec<usize>,\n}\n\nimpl<T: PartialOrd> AvlTree<T> {\n pub fn new() -> Self {\n AvlTree {\n root: None,\n nodes: Vec::new(),\n free_list: Vec::new(),\n }\n }\n\n \/\/ Inserts a value into the tree, keeping it balanced. Lesser values will be stored on\n \/\/ the left, while greater values will be stored on the right. No duplicates are allowed.\n pub fn insert(&mut self, value: T) {\n let root = self.root;\n self.root = Some(self._insert(value, root));\n }\n\n pub fn in_order<F: Fn(&AvlNode<T>)>(&self, f: &F) {\n if let Some(root) = self.root {\n self._in_order(f, root);\n }\n }\n\n \/\/ Implementation of insert\n fn _insert(&mut self, value: T, node_index: Option<AvlNodeId>) -> AvlNodeId {\n let node =\n match node_index {\n Some(node) => {\n \/\/ Node exists, check which way to branch.\n if value == node.get(self).value {\n return node;\n } else if value < node.get(self).value {\n let l = node.get(self).left;\n node.get_mut(self).left = Some(self._insert(value, l));\n } else if value > node.get(self).value {\n let r = node.get(self).right;\n node.get_mut(self).right = Some(self._insert(value, r));\n }\n\n node\n },\n None => {\n \/\/ The node doesn't exist, create it here.\n self.allocate_node(value)\n },\n };\n\n self.rebalance(node)\n }\n\n pub fn _in_order<F: Fn(&AvlNode<T>)>(&self, f: &F, node: AvlNodeId) {\n if let Some(l) = node.get(self).left {\n self._in_order(f, l);\n }\n f(node.get(self));\n if let Some(r) = node.get(self).right {\n self._in_order(f, r);\n }\n }\n\n \/\/ Performs a left rotation on a tree\/subtree.\n \/\/ Returns the replace the specified node with\n fn rotate_left(&mut self, node: AvlNodeId) -> AvlNodeId {\n \/\/ Keep track of the original node positions\n \/\/ For a rotate left, the right child node must exist\n let r = node.get(self).right.unwrap();\n let rl = r.get(self).left;\n\n let ret = r; \n node.get_mut(self).right = rl;\n ret.get_mut(self).left = Some(node);\n\n ret\n }\n\n \/\/ Performs a right rotation on a tree\/subtree.\n \/\/ Returns the replace the specified node with\n fn rotate_right(&mut self, node: AvlNodeId) -> AvlNodeId {\n \/\/ Keep track of the original node positions\n \/\/ For a rotate right, the left child node must exist\n let l = node.get(self).left.unwrap();\n let lr = l.get(self).right;\n\n let ret = l;\n node.get_mut(self).left = lr;\n ret.get_mut(self).right = Some(node);\n\n ret\n }\n\n \/\/ performs a left-right double rotation on a tree\/subtree.\n fn rotate_leftright(&mut self, node: AvlNodeId) -> AvlNodeId {\n let l = node.get(self).left.unwrap();\n let new_l = self.rotate_left(l); \/\/ Left node needs to exist\n node.get_mut(self).left = Some(new_l);\n self.rotate_right(node)\n }\n\n \/\/ performs a right-left double rotation on a tree\/subtree.\n fn rotate_rightleft(&mut self, node: AvlNodeId) -> AvlNodeId {\n let r = node.get(self).right.unwrap();\n let new_r = self.rotate_right(r); \/\/ Right node needs to exist\n node.get_mut(self).right = Some(new_r);\n self.rotate_left(node)\n }\n\n \/\/ _rebalance rebalances the provided node\n fn rebalance(&mut self, node: AvlNodeId) -> AvlNodeId {\n let balance = self.height(node.get(self).left) - self.height(node.get(self).right);\n if balance == 2 { \/\/ left\n let lbalance = self.height(node.get(self).left.unwrap().get(self).left) -\n self.height(node.get(self).left.unwrap().get(self).right);\n if lbalance == 0 || lbalance == 1 { \/\/ left left - need to rotate right\n return self.rotate_right(node);\n } else if lbalance == -1 { \/\/ left right\n return self.rotate_leftright(node); \/\/ function name is just a coincidence\n }\n } else if balance == -2 { \/\/ right\n let rbalance = self.height(node.get(self).right.unwrap().get(self).left) -\n self.height(node.get(self).right.unwrap().get(self).right);\n if rbalance == 1 { \/\/ right left\n return self.rotate_rightleft(node); \/\/ function name is just a coincidence\n } else if rbalance == 0 || rbalance == -1 { \/\/ right right - need to rotate left\n return self.rotate_left(node);\n }\n }\n\n node\n }\n\n \/\/ height gets the height of a tree or subtree\n fn height(&self, node: Option<AvlNodeId>) -> i64 {\n match node {\n Some(node) => {\n let left_height = self.height(node.get(self).left);\n let right_height = self.height(node.get(self).right);\n\n if left_height > right_height {\n left_height+1\n } else {\n right_height+1\n }\n },\n None => { -1 },\n }\n }\n\n fn allocate_node(&mut self, value: T) -> AvlNodeId {\n match self.free_list.pop() {\n Some(index) => {\n AvlNodeId { time_stamp: self.nodes[index].time_stamp+1, index: index }\n },\n None => {\n \/\/ No free slots, create a new one\n let id = AvlNodeId { index: self.nodes.len(), time_stamp: 0 };\n self.nodes.push(AvlSlot { time_stamp: 0,\n node: Some(AvlNode { value: value, left: None, right: None }) });\n id\n },\n }\n }\n\n fn free_node(&mut self, id: AvlNodeId) -> AvlNode<T> {\n self.free_list.push(id.index);\n \n \/\/ NOTE: We unwrap here, because we trust that `id` points to a valid node, because\n \/\/ only we can create and free AvlNodes and their AvlNodeIds\n self.nodes[id.index].node.take().unwrap()\n }\n}\n\nstruct AvlSlot<T> {\n time_stamp: u64,\n node: Option<AvlNode<T>>,\n}\n<commit_msg>AvlTree now uses usize instead of AvlNodeId internally<commit_after>use redox::Vec;\n\npub struct AvlNode<T> {\n value: T,\n left: Option<usize>, \/\/ ID for left node\n right: Option<usize>, \/\/ ID for right node\n}\n\nimpl<T: PartialOrd> AvlNode<T> {\n pub fn value(&self) -> &T { &self.value }\n pub fn left(&self, tree: &AvlTree<T>) -> Option<AvlNodeId> {\n self.left.map(|l| AvlNodeId { index: l, time_stamp: tree.nodes[l].time_stamp })\n }\n pub fn right(&self, tree: &AvlTree<T>) -> Option<AvlNodeId> {\n self.right.map(|r| AvlNodeId { index: r, time_stamp: tree.nodes[r].time_stamp })\n }\n}\n\n#[derive(Copy, Clone)]\npub struct AvlNodeId {\n index: usize,\n time_stamp: u64,\n}\n\nimpl AvlNodeId {\n pub fn get<'a, T: PartialOrd>(&self, avl: &'a AvlTree<T>) -> &'a AvlNode<T> {\n let ref slot = avl.nodes[self.index];\n if slot.time_stamp == self.time_stamp {\n slot.node.as_ref().unwrap()\n } else {\n panic!(\"AvlNodeId had invalid time_stamp\");\n }\n }\n\n pub fn try_get<'a, T: PartialOrd>(&self, avl: &'a AvlTree<T>) -> Option<&'a AvlNode<T>> {\n avl.nodes\n .get(self.index)\n .and_then(|slot| {\n if slot.time_stamp == self.time_stamp {\n slot.node.as_ref()\n } else {\n None\n }\n })\n }\n\n pub fn get_mut<'a, T: PartialOrd>(&self, avl: &'a mut AvlTree<T>) -> &'a mut AvlNode<T> {\n let ref mut slot = avl.nodes[self.index];\n if slot.time_stamp == self.time_stamp {\n slot.node.as_mut().unwrap()\n } else {\n panic!(\"AvlNodeId had invalid time_stamp\");\n }\n }\n\n pub fn try_get_mut<'a, T: PartialOrd>(&self, avl: &'a mut AvlTree<T>) -> Option<&'a mut AvlNode<T>> {\n avl.nodes\n .get_mut(self.index)\n .and_then(|slot| {\n if slot.time_stamp == self.time_stamp {\n slot.node.as_mut()\n } else {\n None\n }\n })\n }\n}\n\npub struct AvlTree<T: PartialOrd> {\n root: Option<usize>, \/\/ Index of the root node\n nodes: Vec<AvlSlot<T>>,\n free_list: Vec<usize>,\n}\n\nimpl<T: PartialOrd> AvlTree<T> {\n pub fn new() -> Self {\n AvlTree {\n root: None,\n nodes: Vec::new(),\n free_list: Vec::new(),\n }\n }\n\n \/\/ Inserts a value into the tree, keeping it balanced. Lesser values will be stored on\n \/\/ the left, while greater values will be stored on the right. No duplicates are allowed.\n pub fn insert(&mut self, value: T) {\n let root = self.root;\n self.root = Some(self._insert(value, root));\n }\n\n pub fn in_order<F: Fn(&AvlNode<T>)>(&self, f: &F) {\n if let Some(root) = self.root {\n self._in_order(f, root);\n }\n }\n\n \/\/ Implementation of insert\n fn _insert(&mut self, value: T, node: Option<usize>) -> usize {\n let node =\n match node{\n Some(node) => {\n \/\/ Node exists, check which way to branch.\n if value == self.node(node).value {\n return node;\n } else if value < self.node(node).value {\n let l = self.node(node).left;\n self.node_mut(node).left = Some(self._insert(value, l));\n } else if value > self.node(node).value {\n let r = self.node(node).right;\n self.node_mut(node).right = Some(self._insert(value, r));\n }\n\n node\n },\n None => {\n \/\/ The node doesn't exist, create it here.\n self.allocate_node(value)\n },\n };\n\n self.rebalance(node)\n }\n\n pub fn _in_order<F: Fn(&AvlNode<T>)>(&self, f: &F, node: usize) {\n if let Some(l) = self.node(node).left {\n self._in_order(f, l);\n }\n f(self.node(node));\n if let Some(r) = self.node(node).right {\n self._in_order(f, r);\n }\n }\n\n \/\/ Performs a left rotation on a tree\/subtree.\n \/\/ Returns the replace the specified node with\n fn rotate_left(&mut self, node: usize) -> usize {\n \/\/ Keep track of the original node positions\n \/\/ For a rotate left, the right child node must exist\n let r = self.node(node).right.unwrap();\n let rl = self.node(r).left;\n\n let ret = r; \n self.node_mut(node).right = rl;\n self.node_mut(ret).left = Some(node);\n\n ret\n }\n\n \/\/ Performs a right rotation on a tree\/subtree.\n \/\/ Returns the replace the specified node with\n fn rotate_right(&mut self, node: usize) -> usize {\n \/\/ Keep track of the original node positions\n \/\/ For a rotate right, the left child node must exist\n let l = self.node(node).left.unwrap();\n let lr = self.node(l).right;\n\n let ret = l;\n self.node_mut(node).left = lr;\n self.node_mut(ret).right = Some(node);\n\n ret\n }\n\n \/\/ performs a left-right double rotation on a tree\/subtree.\n fn rotate_leftright(&mut self, node: usize) -> usize {\n let l = self.node(node).left.unwrap();\n let new_l = self.rotate_left(l); \/\/ Left node needs to exist\n self.node_mut(node).left = Some(new_l);\n self.rotate_right(node)\n }\n\n \/\/ performs a right-left double rotation on a tree\/subtree.\n fn rotate_rightleft(&mut self, node: usize) -> usize {\n let r = self.node(node).right.unwrap();\n let new_r = self.rotate_right(r); \/\/ Right node needs to exist\n self.node_mut(node).right = Some(new_r);\n self.rotate_left(node)\n }\n\n \/\/ _rebalance rebalances the provided node\n fn rebalance(&mut self, node: usize) -> usize {\n let balance = self.height(self.node(node).left) - self.height(self.node(node).right);\n if balance == 2 { \/\/ left\n let lbalance = self.height(self.node(self.node(node).left.unwrap()).left) -\n self.height(self.node(self.node(node).left.unwrap()).right);\n if lbalance == 0 || lbalance == 1 { \/\/ left left - need to rotate right\n return self.rotate_right(node);\n } else if lbalance == -1 { \/\/ left right\n return self.rotate_leftright(node); \/\/ function name is just a coincidence\n }\n } else if balance == -2 { \/\/ right\n let rbalance = self.height(self.node(self.node(node).right.unwrap()).left) -\n self.height(self.node(self.node(node).right.unwrap()).right);\n if rbalance == 1 { \/\/ right left\n return self.rotate_rightleft(node); \/\/ function name is just a coincidence\n } else if rbalance == 0 || rbalance == -1 { \/\/ right right - need to rotate left\n return self.rotate_left(node);\n }\n }\n\n node\n }\n\n \/\/ height gets the height of a tree or subtree\n fn height(&self, node: Option<usize>) -> i64 {\n match node {\n Some(node) => {\n let left_height = self.height(self.node(node).left);\n let right_height = self.height(self.node(node).right);\n\n if left_height > right_height {\n left_height+1\n } else {\n right_height+1\n }\n },\n None => { -1 },\n }\n }\n\n fn allocate_node(&mut self, value: T) -> usize {\n match self.free_list.pop() {\n Some(index) => {\n self.nodes[index].time_stamp += 1;\n index\n },\n None => {\n \/\/ No free slots, create a new one\n let index = self.nodes.len();\n self.nodes.push(AvlSlot { time_stamp: 0,\n node: Some(AvlNode { value: value, left: None, right: None }) });\n index\n },\n }\n }\n\n fn free_node(&mut self, id: AvlNodeId) -> AvlNode<T> {\n self.free_list.push(id.index);\n \n \/\/ NOTE: We unwrap here, because we trust that `id` points to a valid node, because\n \/\/ only we can create and free AvlNodes and their AvlNodeIds\n self.nodes[id.index].node.take().unwrap()\n }\n\n fn node(&self, index: usize) -> &AvlNode<T> {\n self.nodes[index].node.as_ref().unwrap()\n }\n\n fn node_mut(&mut self, index: usize) -> &mut AvlNode<T> {\n self.nodes[index].node.as_mut().unwrap()\n }\n}\n\nstruct AvlSlot<T> {\n time_stamp: u64,\n node: Option<AvlNode<T>>,\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>tuples for rectangle area<commit_after>fn main() {\n let rectangle = (50, 30);\n\n println!(\"Area of Rectangle with length {}cm and bredth {}cm is {}cm^2\", rectangle.0, rectangle.1, area(rectangle) );\n\n}\n\nfn area(dimensions: (u32, u32)) -> u32 {\n dimensions.0 * dimensions.1\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Rollup merge of #81430 - lcnr:elaborate-const-eval, r=oli-obk<commit_after>\/\/ run-pass\n\/\/ Test that we use the elaborated predicates from traits\n\/\/ to satisfy const evaluatable predicates.\n#![feature(const_generics, const_evaluatable_checked)]\n#![allow(incomplete_features)]\nuse std::mem::size_of;\n\ntrait Foo: Sized\nwhere\n [(); size_of::<Self>()]: Sized,\n{\n}\n\nimpl Foo for u64 {}\nimpl Foo for u32 {}\n\nfn foo<T: Foo>() -> [u8; size_of::<T>()] {\n [0; size_of::<T>()]\n}\n\nfn main() {\n assert_eq!(foo::<u32>(), [0; 4]);\n assert_eq!(foo::<u64>(), [0; 8]);\n}\n<|endoftext|>"} {"text":"<commit_before>use common::context::*;\nuse common::elf::*;\nuse common::memory::*;\nuse common::scheduler::*;\n\nuse programs::common::*;\n\npub struct Executor;\n\nimpl Executor {\n pub fn new() -> Executor {\n Executor\n }\n}\n\nimpl SessionItem for Executor {\n fn main(&mut self, url: URL){\n unsafe{\n let mut physical_address = 0;\n let virtual_address = LOAD_ADDR;\n let mut virtual_size = 0;\n\n let mut entry = 0;\n {\n let mut resource = url.open();\n drop(url);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n drop(resource);\n\n let executable = ELF::from_data(vec.as_ptr() as usize);\n drop(vec);\n\n if executable.data > 0 {\n virtual_size = alloc_size(executable.data) - 4096;\n physical_address = alloc(virtual_size);\n ptr::copy((executable.data + 4096) as *const u8, physical_address as *mut u8, virtual_size);\n entry = executable.entry();\n }\n drop(executable);\n }\n\n if physical_address > 0 && virtual_address > 0 && virtual_size > 0 && entry >= virtual_address && entry < virtual_address + virtual_size {\n let reenable = start_no_ints();\n\n let contexts = &mut *(*contexts_ptr);\n\n match contexts.get(context_i) {\n Option::Some(mut current) => {\n current.memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size\n });\n current.map();\n },\n Option::None => ()\n }\n\n end_no_ints(reenable);\n\n \/\/TODO: Free this\n asm!(\n \"push 0\n push edx\n push 0\n push ecx\n push ebx\n jmp eax\"\n :\n : \"{eax}\"(entry), \"{ebx}\"(1), \"{ecx}\"(\"Test\".to_string().to_c_str()), \"{edx}\"(\"test=test\".to_string().to_c_str())\n : \"memory\"\n : \"intel\", \"volatile\"\n )\n }else if physical_address > 0{\n unalloc(physical_address);\n }\n }\n }\n}\n<commit_msg>Executor will pass argument<commit_after>use common::context::*;\nuse common::elf::*;\nuse common::memory::*;\nuse common::scheduler::*;\n\nuse programs::common::*;\n\npub struct Executor;\n\nimpl Executor {\n pub fn new() -> Executor {\n Executor\n }\n}\n\nimpl SessionItem for Executor {\n fn main(&mut self, url: URL){\n unsafe{\n let mut physical_address = 0;\n let virtual_address = LOAD_ADDR;\n let mut virtual_size = 0;\n let url_c_str = url.string.to_c_str();\n\n let mut entry = 0;\n {\n let mut resource = url.open();\n drop(url);\n\n let mut vec: Vec<u8> = Vec::new();\n resource.read_to_end(&mut vec);\n drop(resource);\n\n let executable = ELF::from_data(vec.as_ptr() as usize);\n drop(vec);\n\n if executable.data > 0 {\n virtual_size = alloc_size(executable.data) - 4096;\n physical_address = alloc(virtual_size);\n ptr::copy((executable.data + 4096) as *const u8, physical_address as *mut u8, virtual_size);\n entry = executable.entry();\n }\n drop(executable);\n }\n\n if physical_address > 0 && virtual_address > 0 && virtual_size > 0 && entry >= virtual_address && entry < virtual_address + virtual_size {\n let reenable = start_no_ints();\n\n let contexts = &mut *(*contexts_ptr);\n\n match contexts.get(context_i) {\n Option::Some(mut current) => {\n current.memory.push(ContextMemory {\n physical_address: physical_address,\n virtual_address: virtual_address,\n virtual_size: virtual_size\n });\n current.map();\n },\n Option::None => ()\n }\n\n end_no_ints(reenable);\n\n \/\/TODO: Free this, show environment\n asm!(\n \"push 0\n push 0\n push ecx\n push ebx\n jmp eax\"\n :\n : \"{eax}\"(entry), \"{ebx}\"(1), \"{ecx}\"(url_c_str)\n : \"memory\"\n : \"intel\", \"volatile\"\n )\n }else if physical_address > 0{\n unalloc(physical_address);\n }\n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg> removed filter for testing<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>empty message<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Updated render_task.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>This barely works<commit_after>use std::io::net::tcp::TcpStream;\nuse std::str::from_utf8;\n\nstruct IRC {\n stream: TcpStream,\n connected: bool\n}\n\nimpl IRC {\n fn connect() -> IRC {\n let mut irc: IRC;\n let mut stream = TcpStream::connect(\"irc.mozilla.org\",6667).unwrap();\n stream.write(\"NICK homura-bot\\n\".as_bytes());\n stream.write(\"USER homura-bot 0 * :Best\\n\".as_bytes());\n stream.flush();\n let connected = false;\n return IRC { stream: stream, connected: connected };\n }\n fn read_line(&mut self) -> String {\n let mut buf = Vec::new();\n loop {\n let c = self.stream.read_byte().unwrap();\n if c == 0x0A {\n break;\n }\n buf.push(c);\n }\n let s = from_utf8(buf.as_slice()).unwrap();\n return s.to_string();\n }\n fn get_message(&mut self) -> String {\n loop {\n let line = self.read_line();\n let s = line.as_slice();\n println!(\"{}\",s);\n if s.contains(\"PING\") {\n println!(\"sending PONG\");\n let split: Vec<&str> = s.as_slice().split(':').collect();\n self.stream.write(\"PONG \".as_bytes());\n self.stream.write(split[1].as_bytes());\n self.stream.write(\"\\n\".as_bytes());\n self.stream.flush();\n } else {\n return line.clone();\n }\n }\n }\n fn write(&mut self, s: &str) {\n self.stream.write(s.as_bytes());\n }\n fn flush(&mut self) {\n self.stream.flush();\n }\n fn msg(&mut self, channel: &str, msg: &str) {\n self.write(\"PRIVMSG \");\n self.write(channel);\n self.write(\" :\");\n self.write(msg);\n self.write(\"\\n\");\n self.flush();\n }\n}\n\nfn main() {\n let mut irc = IRC::connect();\n loop {\n let s = irc.get_message();\n if s.as_slice().contains(\":homura-chan MODE\") {\n println!(\"Registered!\");\n irc.write(\"JOIN #interns\\n\");\n }\n if s.as_slice().contains(\"ben-せんぱい\") {\n println!(\"Triggered\");\n irc.msg(\"#interns\",\"かわいいです\"); \n }\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add run-pass\/never_coercions.rs test<commit_after>\/\/ Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\nfn main() {\n let v: Vec<u32> = Vec::new();\n match 0u32 {\n 0 => &v,\n 1 => return,\n _ => &v[..],\n };\n}\n\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Remove use of Itertools<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>rollup merge of #17093 : nathantypanski\/17021<commit_after>\/\/ Copyright 2012 The Rust Project Developers. See the COPYRIGHT\n\/\/ file at the top-level directory of this distribution and at\n\/\/ http:\/\/rust-lang.org\/COPYRIGHT.\n\/\/\n\/\/ Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\/\/ http:\/\/www.apache.org\/licenses\/LICENSE-2.0> or the MIT license\n\/\/ <LICENSE-MIT or http:\/\/opensource.org\/licenses\/MIT>, at your\n\/\/ option. This file may not be copied, modified, or distributed\n\/\/ except according to those terms.\n\/\/\n\/\/ This code used to produce the following ICE:\n\/\/\n\/\/ error: internal compiler error: get_unique_type_id_of_type() -\n\/\/ unexpected type: closure,\n\/\/ ty_unboxed_closure(syntax::ast::DefId{krate: 0u32, node: 66u32},\n\/\/ ReScope(63u32))\n\/\/\n\/\/ This is a regression test for issue #17021.\n\n#![feature(unboxed_closures, overloaded_calls)]\n\nuse std::ptr;\n\npub fn replace_map<'a, T, F>(src: &mut T, prod: F)\nwhere F: |: T| -> T {\n unsafe { *src = prod(ptr::read(src as *mut T as *const T)); }\n}\n\npub fn main() {\n let mut a = 7u;\n let b = &mut a;\n replace_map(b, |: x: uint| x * 2);\n assert_eq!(*b, 14u);\n}\n<|endoftext|>"} {"text":"<commit_before>use std::io;\n\nuse futures::{Poll, Task, Future};\n\nuse WriteTask;\n\n\/\/\/ A future used to fully flush an I\/O object.\n\/\/\/\n\/\/\/ Resolves to the underlying I\/O object once the flush operation is complete.\n\/\/\/\n\/\/\/ Created by the `flush` function.\npub struct Flush<A> {\n a: Option<A>,\n}\n\n\/\/\/ Creates a future which will entirely flush an I\/O object and then yield the\n\/\/\/ object itself.\n\/\/\/\n\/\/\/ This function will consume the object provided if an error happens, and\n\/\/\/ otherwise it will repeatedly call `flush` until it sees `Ok(())`, scheduling\n\/\/\/ a retry if `WouldBlock` is seen along the way.\npub fn flush<A>(a: A) -> Flush<A>\n where A: WriteTask,\n{\n Flush {\n a: Some(a),\n }\n}\n\nimpl<A> Future for Flush<A>\n where A: WriteTask,\n{\n type Item = A;\n type Error = io::Error;\n\n fn poll(&mut self, task: &mut Task) -> Poll<A, io::Error> {\n match self.a.as_mut().unwrap().flush(task) {\n Ok(()) => Poll::Ok(self.a.take().unwrap()),\n Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {\n Poll::NotReady\n }\n Err(e) => Poll::Err(e),\n }\n }\n\n fn schedule(&mut self, task: &mut Task) {\n if let Some(ref mut a) = self.a {\n a.schedule(task);\n }\n }\n}\n\n<commit_msg>Wait for write readiness on Flush as well<commit_after>use std::io;\n\nuse futures::{Poll, Task, Future};\n\nuse WriteTask;\n\n\/\/\/ A future used to fully flush an I\/O object.\n\/\/\/\n\/\/\/ Resolves to the underlying I\/O object once the flush operation is complete.\n\/\/\/\n\/\/\/ Created by the `flush` function.\npub struct Flush<A> {\n a: Option<A>,\n first: bool,\n}\n\n\/\/\/ Creates a future which will entirely flush an I\/O object and then yield the\n\/\/\/ object itself.\n\/\/\/\n\/\/\/ This function will consume the object provided if an error happens, and\n\/\/\/ otherwise it will repeatedly call `flush` until it sees `Ok(())`, scheduling\n\/\/\/ a retry if `WouldBlock` is seen along the way.\npub fn flush<A>(a: A) -> Flush<A>\n where A: WriteTask,\n{\n Flush {\n a: Some(a),\n first: true,\n }\n}\n\nimpl<A> Future for Flush<A>\n where A: WriteTask,\n{\n type Item = A;\n type Error = io::Error;\n\n fn poll(&mut self, task: &mut Task) -> Poll<A, io::Error> {\n if self.first {\n self.first = false;\n } else {\n match try_poll!(self.a.as_mut().unwrap().poll(task)) {\n Ok(Some(ref r)) if r.is_write() => {}\n Ok(Some(_)) => return Poll::NotReady,\n Ok(None) => panic!(\"need flush but can't write\"),\n Err(e) => return Poll::Err(e)\n }\n }\n match self.a.as_mut().unwrap().flush(task) {\n Ok(()) => Poll::Ok(self.a.take().unwrap()),\n Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {\n Poll::NotReady\n }\n Err(e) => Poll::Err(e),\n }\n }\n\n fn schedule(&mut self, task: &mut Task) {\n if let Some(ref mut a) = self.a {\n a.schedule(task);\n }\n }\n}\n\n<|endoftext|>"} {"text":"<commit_before>#![cfg_attr(feature = \"dev\", feature(plugin))]\n#![cfg_attr(feature = \"dev\", plugin(clippy))]\n#![feature(test)]\n\nextern crate test;\nextern crate time;\nextern crate fruently;\nextern crate rust_fluent;\n\n#[cfg(test)]\nmod tests {\n use test;\n use test::Bencher;\n use time;\n use fruently::fluent::Fluent;\n use std::collections::HashMap;\n\n #[bench]\n fn benchmark_rust_fluent_tcp(b: &mut Bencher) {\n use rust_fluent::tcp;\n let mut obj: HashMap<String, String> = HashMap::new();\n obj.insert(\"json\".to_string(), \"value\".to_string());\n\n b.iter(|| {\n let fluentd = tcp::Fluentd::new(\"0.0.0.0:24224\", \"test\");\n let _ = fluentd.write(&obj);\n });\n }\n\n #[bench]\n fn benchmark_json_forwardable(b: &mut Bencher) {\n use fruently::forwardable::JsonForwardable;\n\n let mut obj: HashMap<String, String> = HashMap::new();\n obj.insert(\"json\".to_string(), \"fruently\".to_string());\n b.iter(|| {\n let fruently = Fluent::new(\"0.0.0.0:24224\", \"test\");\n let _ = fruently.post(&obj);\n });\n }\n\n #[bench]\n fn benchmark_msgpack_forwardable(b: &mut Bencher) {\n use fruently::forwardable::MsgpackForwardable;\n\n let mut obj: HashMap<String, String> = HashMap::new();\n obj.insert(\"msgp\".to_string(), \"fruently\".to_string());\n b.iter(|| {\n let fruently = Fluent::new(\"0.0.0.0:24224\", \"test\");\n let _ = fruently.post(&obj);\n });\n }\n\n #[bench]\n fn benchmark_forwardable(b: &mut Bencher) {\n use fruently::forwardable::Forwardable;\n\n let mut obj = HashMap::new();\n let mut hmap = HashMap::new();\n let time = time::now().to_timespec().sec;\n b.iter(|| {\n let n = test::black_box(1000);\n let thmap = (0..n).fold(&mut obj, |mut acc, _| {\n {\n acc.insert(\"fwd\", \"fruently\".to_string());\n }\n acc\n });\n hmap = thmap.clone();\n });\n let entry = (time, hmap);\n let fruently = Fluent::new(\"0.0.0.0:24224\", \"test\");\n let _ = fruently.post(vec![(entry)]);\n }\n}\n<commit_msg>bench: Use EventTime in forward mode<commit_after>#![cfg_attr(feature = \"dev\", feature(plugin))]\n#![cfg_attr(feature = \"dev\", plugin(clippy))]\n#![feature(test)]\n\nextern crate test;\nextern crate time;\nextern crate fruently;\nextern crate rust_fluent;\n\n#[cfg(test)]\nmod tests {\n use test;\n use test::Bencher;\n use time;\n use fruently::fluent::Fluent;\n use std::collections::HashMap;\n\n #[bench]\n fn benchmark_rust_fluent_tcp(b: &mut Bencher) {\n use rust_fluent::tcp;\n let mut obj: HashMap<String, String> = HashMap::new();\n obj.insert(\"json\".to_string(), \"value\".to_string());\n\n b.iter(|| {\n let fluentd = tcp::Fluentd::new(\"0.0.0.0:24224\", \"test\");\n let _ = fluentd.write(&obj);\n });\n }\n\n #[bench]\n fn benchmark_json_forwardable(b: &mut Bencher) {\n use fruently::forwardable::JsonForwardable;\n\n let mut obj: HashMap<String, String> = HashMap::new();\n obj.insert(\"json\".to_string(), \"fruently\".to_string());\n b.iter(|| {\n let fruently = Fluent::new(\"0.0.0.0:24224\", \"test\");\n let _ = fruently.post(&obj);\n });\n }\n\n #[bench]\n fn benchmark_msgpack_forwardable(b: &mut Bencher) {\n use fruently::forwardable::MsgpackForwardable;\n\n let mut obj: HashMap<String, String> = HashMap::new();\n obj.insert(\"msgp\".to_string(), \"fruently\".to_string());\n b.iter(|| {\n let fruently = Fluent::new(\"0.0.0.0:24224\", \"test\");\n let _ = fruently.post(&obj);\n });\n }\n\n #[bench]\n fn benchmark_forwardable(b: &mut Bencher) {\n use fruently::forwardable::Forwardable;\n use fruently::event_time::EventTime;\n\n let mut obj = HashMap::new();\n let mut hmap = HashMap::new();\n let time = time::now();\n b.iter(|| {\n let n = test::black_box(1000);\n let thmap = (0..n).fold(&mut obj, |mut acc, _| {\n {\n acc.insert(\"fwd\", \"fruently\".to_string());\n }\n acc\n });\n hmap = thmap.clone();\n });\n let entry = (EventTime::new(time), hmap);\n let fruently = Fluent::new(\"0.0.0.0:24224\", \"test\");\n let _ = fruently.post(vec![(entry)]);\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add (incomplete) rules for instruction selectoin<commit_after>rules!{\n \/\/ %(foo) refers to a register\n \/\/ 0(foo) refers to an immediate value\n \/\/ @(foo) refers to a static variable\n \/\/ FIXME: `load` -> handle statics\n\n \/\/ --- Arithmetics & binary operations -------------------------------------\n\n \/\/ Addition\n [%(dst) = add %(lhs), %(rhs); ..] => {\n mov $dst, $lhs;\n add $dst, $rhs;\n },\n [%(dst) = add %(lhs), 0(rhs); ..] => {\n mov $dst, $lhs;\n add $dst, $rhs;\n },\n [%(dst) = add 0(lhs), %(rhs)]; ..] => {\n mov $dst, $lhs;\n add $dst, $rhs;\n },\n\n \/\/ Subtraction\n [%(dst) = sub %(lhs), %(rhs); ..] => {\n mov $dst, $lhs;\n sub $dst, $rhs;\n },\n [%(dst) = sub %(lhs), 0(rhs); ..] => {\n mov $dst, $lhs;\n sub $dst, $rhs;\n },\n [%(dst) = sub 0(lhs), %(rhs)]; ..] => {\n mov $dst, $lhs;\n sub $dst, $rhs;\n },\n\n \/\/ Multiplication\n [%(dst) = mul %(lhs), %(rhs); ..] => {\n mov $dst, $lhs;\n imul $dst, $rhs;\n },\n [%(dst) = mul %(lhs), 0(rhs); ..] => {\n \/\/ Use the three-operand form\n imu $dst, $lhs, $rhs;\n },\n [%(dst) = mul 0(lhs), %(rhs)]; ..] => {\n \/\/ Use the three-operand form\n imu $dst, $rhs, $lhs;\n },\n\n \/\/ Integer division\n [%(dst) = sub %(lhs), %(rhs); ..] => {\n xor rdx, rdx;\n mov rax, $lhs;\n idiv $rhs;\n mov $dst, rax;\n },\n [%(dst) = sub %(lhs), 0(rhs); ..] => {\n mov %(tmp), $rhs; \/\/ Create a temporary virtual register\n xor rdx, rdx;\n mov rax, $lhs;\n idiv $tmp;\n mov $dst, rax;\n },\n [%(dst) = sub 0(lhs), %(rhs)]; ..] => {\n xor rdx, rdx;\n mov rax, $lhs;\n idiv rhs;\n mov $dst, rax;\n },\n\n \/\/ Note: pow will be implemented as an intrinsic\n\n \/\/ Modulo\n \/\/ Like div but use the remainder of the division\n [%(dst) = sub %(lhs), %(rhs); ..] => {\n xor rdx, rdx;\n mov rax, $lhs;\n idiv $rhs;\n mov $dst, rdx;\n },\n [%(dst) = sub %(lhs), 0(rhs); ..] => {\n mov %(tmp), $rhs; \/\/ Create a temporary virtual register\n xor rdx, rdx;\n mov rax, $lhs;\n idiv $tmp;\n mov $dst, rdx;\n },\n [%(dst) = sub 0(lhs), %(rhs)]; ..] => {\n xor rdx, rdx;\n mov rax, $lhs;\n idiv rhs;\n mov $dst, rdx;\n },\n\n \/\/ Shift left\n [%(dst) = shl %(lhs), %(rhs); ..] => {\n mov $dst, $lhs;\n mov rcx, $rhs;\n sal $dst, cl;\n },\n [%(dst) = shl %(lhs), 0(rhs); ..] => {\n mov $dst, $lhs;\n sal $dst, $rhs;\n },\n [%(dst) = shl 0(lhs), %(rhs)]; ..] => {\n mov $dst, $lhs;\n mov rcx, $rhs;\n sal $dst, cl;\n },\n\n \/\/ Shift right\n [%(dst) = shr %(lhs), %(rhs); ..] => {\n mov $dst, $lhs;\n mov rcx, $rhs;\n sar $dst, cl;\n },\n [%(dst) = shr %(lhs), 0(rhs); ..] => {\n mov $dst, $lhs;\n sar $dst, $rhs;\n },\n [%(dst) = shr 0(lhs), %(rhs)]; ..] => {\n mov $dst, $lhs;\n mov rcx, $rhs;\n sar $dst, cl;\n },\n\n \/\/ And\n [%(dst) = and %(lhs), %(rhs); ..] => {\n mov $dst, $lhs;\n and $dst, $rhs;\n },\n [%(dst) = and %(lhs), 0(rhs); ..] => {\n mov $dst, $lhs;\n and $dst, $rhs;\n },\n [%(dst) = and 0(lhs), %(rhs)]; ..] => {\n mov $dst, $lhs;\n and $dst, $rhs;\n },\n\n \/\/ Or\n [%(dst) = or %(lhs), %(rhs); ..] => {\n mov $dst, $lhs;\n or $dst, $rhs;\n },\n [%(dst) = or %(lhs), 0(rhs); ..] => {\n mov $dst, $lhs;\n or $dst, $rhs;\n },\n [%(dst) = or 0(lhs), %(rhs)]; ..] => {\n mov $dst, $lhs;\n or $dst, $rhs;\n },\n\n \/\/ Xor\n [%(dst) = xor %(lhs), %(rhs); ..] => {\n mov $dst, $lhs;\n xor $dst, $rhs;\n },\n [%(dst) = xor %(lhs), 0(rhs); ..] => {\n mov $dst, $lhs;\n xor $dst, $rhs;\n },\n [%(dst) = xor 0(lhs), %(rhs)]; ..] => {\n mov $dst, $lhs;\n xor $dst, $rhs;\n },\n\n \/\/ Unary negation\n [%(dst) = not %(item); ..] => {\n mov $dst $item;\n not $dst;\n },\n\n \/\/ --- Comparisons ----------------------------------------------------------\n\n \/\/ Lower than: With branch\n [%(dst) = cmp lt %(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jl $conseq;\n jmp $altern;\n },\n [%(dst) = cmp lt %(lhs), 0(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jl $conseq;\n jmp $altern;\n },\n [%(dst) = cmp lt 0(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n jge $altern;\n jmp $conseq;\n },\n\n \/\/ Lower than: Without branch\n [%(dst) = cmp lt %(lhs), %(rhs); ..] => {\n cmp $lhs, $rhs;\n setl cl;\n and cl, 1; \/\/ Truncate to first bit\n movzx $dst, cl; \/\/ Move with zero extension\n },\n [%(dst) = cmp lt %(lhs), 0(rhs); ..] => {\n cmp $lhs, $rhs;\n setl cl;\n and cl, 1;\n movzx $dst, cl;\n },\n [%(dst) = cmp lt 0(lhs), %(rhs); ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n setge cl;\n and cl, 1;\n movzx $dst, cl;\n },\n\n \/\/ Lower than or equal: With branch\n [%(dst) = cmp le %(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jle $conseq;\n jmp $altern;\n },\n [%(dst) = cmp le %(lhs), 0(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jle $conseq;\n jmp $altern;\n },\n [%(dst) = cmp le 0(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n jg $altern;\n jmp $conseq;\n },\n\n \/\/ Lower than or equal: Without branch\n [%(dst) = cmp le %(lhs), %(rhs); ..] => {\n cmp $lhs, $rhs;\n setle cl;\n and cl, 1; \/\/ Truncate to first bit\n movzx $dst, cl; \/\/ Move with zero extension\n },\n [%(dst) = cmp le %(lhs), 0(rhs); ..] => {\n cmp $lhs, $rhs;\n setle cl;\n and cl, 1;\n movzx $dst, cl;\n },\n [%(dst) = cmp le 0(lhs), %(rhs); ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n setg cl;\n and cl, 1;\n movzx $dst, cl;\n },\n\n \/\/ Greater than or equal: With branch\n [%(dst) = cmp ge %(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jge $conseq;\n jmp $altern;\n },\n [%(dst) = cmp ge %(lhs), 0(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jge $conseq;\n jmp $altern;\n },\n [%(dst) = cmp ge 0(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n jl $altern;\n jmp $conseq;\n },\n\n \/\/ Greater than or equal: Without branch\n [%(dst) = cmp ge %(lhs), %(rhs); ..] => {\n cmp $lhs, $rhs;\n setge cl;\n and cl, 1; \/\/ Truncate to first bit\n movzx $dst, cl; \/\/ Move with zero extension\n },\n [%(dst) = cmp ge %(lhs), 0(rhs); ..] => {\n cmp $lhs, $rhs;\n setge cl;\n and cl, 1;\n movzx $dst, cl;\n },\n [%(dst) = cmp ge 0(lhs), %(rhs); ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n setl cl;\n and cl, 1;\n movzx $dst, cl;\n },\n\n \/\/ Greater than: With branch\n [%(dst) = cmp gt %(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jg $conseq;\n jmp $altern;\n },\n [%(dst) = cmp gt %(lhs), 0(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jg $conseq;\n jmp $altern;\n },\n [%(dst) = cmp gt 0(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n jle $altern;\n jmp $conseq;\n },\n\n \/\/ Greater than: Without branch\n [%(dst) = cmp gt %(lhs), %(rhs); ..] => {\n cmp $lhs, $rhs;\n setg cl;\n and cl, 1; \/\/ Truncate to first bit\n movzx $dst, cl; \/\/ Move with zero extension\n },\n [%(dst) = cmp gt %(lhs), 0(rhs); ..] => {\n cmp $lhs, $rhs;\n setg cl;\n and cl, 1;\n movzx $dst, cl;\n },\n [%(dst) = cmp gt 0(lhs), %(rhs); ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n setle cl;\n and cl, 1;\n movzx $dst, cl;\n },\n\n \/\/ Equality: With branch\n [%(dst) = cmp eq %(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n je $conseq;\n jmp $altern;\n },\n [%(dst) = cmp eq %(lhs), 0(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n je $conseq;\n jmp $altern;\n },\n [%(dst) = cmp eq 0(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n jne $altern;\n jmp $conseq;\n },\n\n \/\/ Equality: Without branch\n [%(dst) = cmp eq %(lhs), %(rhs); ..] => {\n cmp $lhs, $rhs;\n sete cl;\n and cl, 1; \/\/ Truncate to first bit\n movzx $dst, cl; \/\/ Move with zero extension\n },\n [%(dst) = cmp eq %(lhs), 0(rhs); ..] => {\n cmp $lhs, $rhs;\n sete cl;\n and cl, 1;\n movzx $dst, cl;\n },\n [%(dst) = cmp eq 0(lhs), %(rhs); ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n setne cl;\n and cl, 1;\n movzx $dst, cl;\n },\n\n \/\/ Inequality: With branch\n [%(dst) = cmp eq %(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jne $conseq;\n jmp $altern;\n },\n [%(dst) = cmp eq %(lhs), 0(rhs); br %(cond), conseq, altern; ..] => {\n cmp $lhs, $rhs;\n jne $conseq;\n jmp $altern;\n },\n [%(dst) = cmp eq 0(lhs), %(rhs); br %(cond), conseq, altern; ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n je $altern;\n jmp $conseq;\n },\n\n \/\/ Inequality: Without branch\n [%(dst) = cmp eq %(lhs), %(rhs); ..] => {\n cmp $lhs, $rhs;\n setne cl;\n and cl, 1; \/\/ Truncate to first bit\n movzx $dst, cl; \/\/ Move with zero extension\n },\n [%(dst) = cmp eq %(lhs), 0(rhs); ..] => {\n cmp $lhs, $rhs;\n setne cl;\n and cl, 1;\n movzx $dst, cl;\n },\n [%(dst) = cmp eq 0(lhs), %(rhs); ..] => {\n \/\/ Inverted cmp\n cmp $rhs, $lhs;\n sete cl;\n and cl, 1;\n movzx $dst, cl;\n },\n\n \/\/ TODO: Special case: a == 0 => jz\n\n \/\/ --- Alloca\/load\/store ----------------------------------------------------\n\n [%(dst) = alloca; ..] => {\n \/\/ TODO: Collect allocas and replace usage with indirect arguments\n mov $dst, rsp;\n sub rsi, 4;\n },\n [%(dst) = load %(src); ..] => {\n mov $dst, qword ptr [src];\n },\n [store %(val), %(dst); ..] => {\n mov qword ptr [dst], $val;\n },\n [store 0(val), %(dst); ..] => {\n mov qword ptr [dst], $val;\n },\n\n \/\/ --- Call -----------------------------------------------------------------\n\n [%dst = call func [args ...]] -> {\n \/\/\n }\n\n \/\/ TODO: How are phi nodes handeled?\n \/\/ TODO: return\/branch\/jump\n \/\/ FIXME: How is neg handled?\n}\n\n\/*\nResult:\n\npub fn trans_instr(instr: &mut [ir::Instruction],\n last: &ir::ControlFlowInstruction,\n code: &mut machine::MachineCode)\n -> usize\n{\n match instr {\n ...\n }\n}<|endoftext|>"} {"text":"<commit_before>\/\/! Server Responses\n\/\/!\n\/\/! These are responses sent by a `hyper::Server` to clients, after\n\/\/! receiving a request.\nuse header;\nuse http;\nuse status::StatusCode;\nuse version;\n\n\n\/\/\/ The outgoing half for a Tcp connection, created by a `Server` and given to a `Handler`.\n\/\/\/\n\/\/\/ The default `StatusCode` for a `Response` is `200 OK`.\n#[derive(Debug)]\npub struct Response<'a> {\n head: &'a mut http::MessageHead<StatusCode>,\n}\n\nimpl<'a> Response<'a> {\n \/\/\/ The headers of this response.\n #[inline]\n pub fn headers(&self) -> &header::Headers { &self.head.headers }\n\n \/\/\/ The status of this response.\n #[inline]\n pub fn status(&self) -> &StatusCode {\n &self.head.subject\n }\n\n \/\/\/ The HTTP version of this response.\n #[inline]\n pub fn version(&self) -> &version::HttpVersion { &self.head.version }\n\n \/\/\/ Get a mutable reference to the Headers.\n #[inline]\n pub fn headers_mut(&mut self) -> &mut header::Headers { &mut self.head.headers }\n\n \/\/\/ Get a mutable reference to the status.\n #[inline]\n pub fn set_status(&mut self, status: StatusCode) {\n self.head.subject = status;\n }\n}\n\n\/\/\/ Creates a new Response that can be used to write to a network stream.\npub fn new(head: &mut http::MessageHead<StatusCode>) -> Response {\n Response {\n head: head\n }\n}\n<commit_msg>docs(server): fix Response::set_status() documentation<commit_after>\/\/! Server Responses\n\/\/!\n\/\/! These are responses sent by a `hyper::Server` to clients, after\n\/\/! receiving a request.\nuse header;\nuse http;\nuse status::StatusCode;\nuse version;\n\n\n\/\/\/ The outgoing half for a Tcp connection, created by a `Server` and given to a `Handler`.\n\/\/\/\n\/\/\/ The default `StatusCode` for a `Response` is `200 OK`.\n#[derive(Debug)]\npub struct Response<'a> {\n head: &'a mut http::MessageHead<StatusCode>,\n}\n\nimpl<'a> Response<'a> {\n \/\/\/ The headers of this response.\n #[inline]\n pub fn headers(&self) -> &header::Headers { &self.head.headers }\n\n \/\/\/ The status of this response.\n #[inline]\n pub fn status(&self) -> &StatusCode {\n &self.head.subject\n }\n\n \/\/\/ The HTTP version of this response.\n #[inline]\n pub fn version(&self) -> &version::HttpVersion { &self.head.version }\n\n \/\/\/ Get a mutable reference to the Headers.\n #[inline]\n pub fn headers_mut(&mut self) -> &mut header::Headers { &mut self.head.headers }\n\n \/\/\/ Set the status of this response.\n #[inline]\n pub fn set_status(&mut self, status: StatusCode) {\n self.head.subject = status;\n }\n}\n\n\/\/\/ Creates a new Response that can be used to write to a network stream.\npub fn new(head: &mut http::MessageHead<StatusCode>) -> Response {\n Response {\n head: head\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>account.unbanUser method<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>added some more hashing algorithms<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Initial Work On Ping Format<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Clarify maths a little<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Revert support for router_id in ZMsgExtended. Better achieved by using `ZMsg::push<type>()`.<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Test if IoError is properly returned as cause()<commit_after>extern crate yaml;\n\nuse std::error::Error;\nuse std::io::{IoError, IoResult};\nuse std::io::IoErrorKind;\n\nstruct MockReader {\n _data: ()\n}\n\nimpl MockReader {\n pub fn new() -> MockReader {\n MockReader { _data: () }\n }\n}\n\nimpl Reader for MockReader {\n fn read(&mut self, _buf: &mut [u8]) -> IoResult<uint> {\n Err(IoError {\n kind: IoErrorKind::OtherIoError,\n desc: \"\",\n detail: Some(\"mock reader\".to_string())\n })\n }\n}\n\n#[test]\nfn error_cause_test() {\n let mut mock_reader = MockReader::new();\n match yaml::parse_io_utf8(&mut mock_reader) {\n Ok(_) => panic!(\"Should return an error\"),\n Err(e) => assert_eq!(e.cause().and_then(|ioe| ioe.detail()), Some(\"mock reader\".to_string()))\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>languages\/rust\/rust-book\/3-chapter\/13-code.rs<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>(test) created an example for helper gt<commit_after>extern crate handlebars;\n#[macro_use]\nextern crate serde_json;\n\nuse handlebars::{Handlebars, RenderError, Helper, RenderContext};\n\n#[test]\nfn test_subexpression() {\n let mut hbs = Handlebars::new();\n\n hbs.register_helper(\n \"gt\",\n Box::new(|h: &Helper,\n _: &Handlebars,\n rc: &mut RenderContext|\n -> Result<(), RenderError> {\n let p1 = try!(h.param(0).and_then(|v| v.value().as_i64()).ok_or(\n RenderError::new(\"Param 0 with i64 type is required for gt helper.\"),\n ));\n let p2 = try!(h.param(1).and_then(|v| v.value().as_i64()).ok_or(\n RenderError::new(\"Param 1 with i64 type is required for gt helper.\"),\n ));\n\n if p1 > p2 {\n rc.writer.write(\"true\".as_bytes())?;\n }\n\n Ok(())\n }),\n );\n\n assert_eq!(\n hbs.template_render(\n \"{{#if (gt a b)}}Success{{else}}Failed{{\/if}}\",\n &json!({\"a\": 1, \"b\": 0}),\n ).unwrap(),\n \"Success\"\n );\n}\n<|endoftext|>"} {"text":"<commit_before>use std::io::{self, Read};\n\nuse hyper::client::IntoUrl;\nuse hyper::header::{Headers, ContentType, Location, Referer, UserAgent};\nuse hyper::method::Method;\nuse hyper::status::StatusCode;\nuse hyper::version::HttpVersion;\nuse hyper::{Url};\n\nuse serde::Serialize;\nuse serde_json;\nuse serde_urlencoded;\n\nuse ::body::{self, Body};\n\nstatic DEFAULT_USER_AGENT: &'static str = concat!(env!(\"CARGO_PKG_NAME\"), \"\/\", env!(\"CARGO_PKG_VERSION\"));\n\n\/\/\/ A `Client` to make Requests with.\n\/\/\/\n\/\/\/ The Client has various configuration values to tweak, but the defaults\n\/\/\/ are set to what is usually the most commonly desired value.\n\/\/\/\n\/\/\/ The `Client` holds a connection pool internally, so it is advised that\n\/\/\/ you create one and reuse it.\npub struct Client {\n inner: ::hyper::Client,\n}\n\nimpl Client {\n \/\/\/ Constructs a new `Client`.\n pub fn new() -> ::Result<Client> {\n let mut client = try!(new_hyper_client());\n client.set_redirect_policy(::hyper::client::RedirectPolicy::FollowNone);\n Ok(Client {\n inner: client\n })\n }\n\n \/\/\/ Convenience method to make a `GET` request to a URL.\n pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Get, url)\n }\n\n \/\/\/ Convenience method to make a `POST` request to a URL.\n pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Post, url)\n }\n\n \/\/\/ Convenience method to make a `HEAD` request to a URL.\n pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Head, url)\n }\n\n \/\/\/ Start building a `Request` with the `Method` and `Url`.\n \/\/\/\n \/\/\/ Returns a `RequestBuilder`, which will allow setting headers and\n \/\/\/ request body before sending.\n pub fn request<U: IntoUrl>(&self, method: Method, url: U) -> RequestBuilder {\n let url = url.into_url();\n RequestBuilder {\n client: self,\n method: method,\n url: url,\n _version: HttpVersion::Http11,\n headers: Headers::new(),\n\n body: None,\n }\n }\n}\n\n#[cfg(not(feature = \"tls\"))]\nfn new_hyper_client() -> ::Result<::hyper::Client> {\n Ok(::hyper::Client::new())\n}\n\n#[cfg(feature = \"tls\")]\nfn new_hyper_client() -> ::Result<::hyper::Client> {\n use tls::TlsClient;\n Ok(::hyper::Client::with_connector(\n ::hyper::client::Pool::with_connector(\n Default::default(),\n ::hyper::net::HttpsConnector::new(try!(TlsClient::new()))\n )\n ))\n}\n\n\n\/\/\/ A builder to construct the properties of a `Request`.\npub struct RequestBuilder<'a> {\n client: &'a Client,\n\n method: Method,\n url: Result<Url, ::UrlError>,\n _version: HttpVersion,\n headers: Headers,\n\n body: Option<::Result<Body>>,\n}\n\nimpl<'a> RequestBuilder<'a> {\n \/\/\/ Add a `Header` to this Request.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ use reqwest::header::UserAgent;\n \/\/\/ let client = reqwest::Client::new().expect(\"client failed to construct\");\n \/\/\/\n \/\/\/ let res = client.get(\"https:\/\/www.rust-lang.org\")\n \/\/\/ .header(UserAgent(\"foo\".to_string()))\n \/\/\/ .send();\n \/\/\/ ```\n pub fn header<H: ::header::Header + ::header::HeaderFormat>(mut self, header: H) -> RequestBuilder<'a> {\n self.headers.set(header);\n self\n }\n \/\/\/ Add a set of Headers to the existing ones on this Request.\n \/\/\/\n \/\/\/ The headers will be merged in to any already set.\n pub fn headers(mut self, headers: ::header::Headers) -> RequestBuilder<'a> {\n self.headers.extend(headers.iter());\n self\n }\n\n \/\/\/ Set the request body.\n pub fn body<T: Into<Body>>(mut self, body: T) -> RequestBuilder<'a> {\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Send a form body.\n \/\/\/\n \/\/\/ Sets the body to the url encoded serialization of the passed value,\n \/\/\/ and also sets the `Content-Type: application\/www-form-url-encoded`\n \/\/\/ header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut params = HashMap::new();\n \/\/\/ params.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .form(¶ms)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn form<T: Serialize>(mut self, form: &T) -> RequestBuilder<'a> {\n let body = serde_urlencoded::to_string(form).map_err(::Error::from);\n self.headers.set(ContentType::form_url_encoded());\n self.body = Some(body.map(|b| b.into()));\n self\n }\n\n \/\/\/ Send a JSON body.\n \/\/\/\n \/\/\/ Sets the body to the JSON serialization of the passed value, and\n \/\/\/ also sets the `Content-Type: application\/json` header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut map = HashMap::new();\n \/\/\/ map.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .json(&map)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn json<T: Serialize>(mut self, json: &T) -> RequestBuilder<'a> {\n let body = serde_json::to_vec(json).expect(\"serde to_vec cannot fail\");\n self.headers.set(ContentType::json());\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Constructs the Request and sends it the target URL, returning a Response.\n pub fn send(mut self) -> ::Result<Response> {\n if !self.headers.has::<UserAgent>() {\n self.headers.set(UserAgent(DEFAULT_USER_AGENT.to_owned()));\n }\n\n let client = self.client;\n let mut method = self.method;\n let mut url = try!(self.url);\n let mut headers = self.headers;\n let mut body = match self.body {\n Some(b) => Some(try!(b)),\n None => None,\n };\n\n let mut redirect_count = 0;\n\n loop {\n let res = {\n debug!(\"request {:?} \\\"{}\\\"\", method, url);\n let mut req = client.inner.request(method.clone(), url.clone())\n .headers(headers.clone());\n\n if let Some(ref mut b) = body {\n let body = body::as_hyper_body(b);\n req = req.body(body);\n }\n\n try!(req.send())\n };\n body.take();\n\n match res.status {\n StatusCode::MovedPermanently |\n StatusCode::Found => {\n\n \/\/TODO: turn this into self.redirect_policy.check()\n if redirect_count > 10 {\n return Err(::Error::TooManyRedirects);\n }\n redirect_count += 1;\n\n method = match method {\n Method::Post | Method::Put => Method::Get,\n m => m\n };\n\n headers.set(Referer(url.to_string()));\n\n let loc = {\n let loc = res.headers.get::<Location>().map(|loc| url.join(loc));\n if let Some(loc) = loc {\n loc\n } else {\n return Ok(Response {\n inner: res\n });\n }\n };\n\n url = match loc {\n Ok(u) => u,\n Err(e) => {\n debug!(\"Location header had invalid URI: {:?}\", e);\n return Ok(Response {\n inner: res\n })\n }\n };\n\n debug!(\"redirecting to '{}'\", url);\n\n \/\/TODO: removeSensitiveHeaders(&mut headers, &url);\n\n },\n _ => {\n return Ok(Response {\n inner: res\n });\n }\n }\n }\n }\n}\n\n\/\/\/ A Response to a submitted `Request`.\npub struct Response {\n inner: ::hyper::client::Response,\n}\n\nimpl Response {\n \/\/\/ Get the `StatusCode`.\n pub fn status(&self) -> &StatusCode {\n &self.inner.status\n }\n\n \/\/\/ Get the `Headers`.\n pub fn headers(&self) -> &Headers {\n &self.inner.headers\n }\n\n \/\/\/ Get the `HttpVersion`.\n pub fn version(&self) -> &HttpVersion {\n &self.inner.version\n }\n}\n\n\/\/\/ Read the body of the Response.\nimpl Read for Response {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.inner.read(buf)\n }\n}\n<commit_msg>add 303 status code to redirect policy<commit_after>use std::io::{self, Read};\n\nuse hyper::client::IntoUrl;\nuse hyper::header::{Headers, ContentType, Location, Referer, UserAgent};\nuse hyper::method::Method;\nuse hyper::status::StatusCode;\nuse hyper::version::HttpVersion;\nuse hyper::{Url};\n\nuse serde::Serialize;\nuse serde_json;\nuse serde_urlencoded;\n\nuse ::body::{self, Body};\n\nstatic DEFAULT_USER_AGENT: &'static str = concat!(env!(\"CARGO_PKG_NAME\"), \"\/\", env!(\"CARGO_PKG_VERSION\"));\n\n\/\/\/ A `Client` to make Requests with.\n\/\/\/\n\/\/\/ The Client has various configuration values to tweak, but the defaults\n\/\/\/ are set to what is usually the most commonly desired value.\n\/\/\/\n\/\/\/ The `Client` holds a connection pool internally, so it is advised that\n\/\/\/ you create one and reuse it.\npub struct Client {\n inner: ::hyper::Client,\n}\n\nimpl Client {\n \/\/\/ Constructs a new `Client`.\n pub fn new() -> ::Result<Client> {\n let mut client = try!(new_hyper_client());\n client.set_redirect_policy(::hyper::client::RedirectPolicy::FollowNone);\n Ok(Client {\n inner: client\n })\n }\n\n \/\/\/ Convenience method to make a `GET` request to a URL.\n pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Get, url)\n }\n\n \/\/\/ Convenience method to make a `POST` request to a URL.\n pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Post, url)\n }\n\n \/\/\/ Convenience method to make a `HEAD` request to a URL.\n pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {\n self.request(Method::Head, url)\n }\n\n \/\/\/ Start building a `Request` with the `Method` and `Url`.\n \/\/\/\n \/\/\/ Returns a `RequestBuilder`, which will allow setting headers and\n \/\/\/ request body before sending.\n pub fn request<U: IntoUrl>(&self, method: Method, url: U) -> RequestBuilder {\n let url = url.into_url();\n RequestBuilder {\n client: self,\n method: method,\n url: url,\n _version: HttpVersion::Http11,\n headers: Headers::new(),\n\n body: None,\n }\n }\n}\n\n#[cfg(not(feature = \"tls\"))]\nfn new_hyper_client() -> ::Result<::hyper::Client> {\n Ok(::hyper::Client::new())\n}\n\n#[cfg(feature = \"tls\")]\nfn new_hyper_client() -> ::Result<::hyper::Client> {\n use tls::TlsClient;\n Ok(::hyper::Client::with_connector(\n ::hyper::client::Pool::with_connector(\n Default::default(),\n ::hyper::net::HttpsConnector::new(try!(TlsClient::new()))\n )\n ))\n}\n\n\n\/\/\/ A builder to construct the properties of a `Request`.\npub struct RequestBuilder<'a> {\n client: &'a Client,\n\n method: Method,\n url: Result<Url, ::UrlError>,\n _version: HttpVersion,\n headers: Headers,\n\n body: Option<::Result<Body>>,\n}\n\nimpl<'a> RequestBuilder<'a> {\n \/\/\/ Add a `Header` to this Request.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ use reqwest::header::UserAgent;\n \/\/\/ let client = reqwest::Client::new().expect(\"client failed to construct\");\n \/\/\/\n \/\/\/ let res = client.get(\"https:\/\/www.rust-lang.org\")\n \/\/\/ .header(UserAgent(\"foo\".to_string()))\n \/\/\/ .send();\n \/\/\/ ```\n pub fn header<H: ::header::Header + ::header::HeaderFormat>(mut self, header: H) -> RequestBuilder<'a> {\n self.headers.set(header);\n self\n }\n \/\/\/ Add a set of Headers to the existing ones on this Request.\n \/\/\/\n \/\/\/ The headers will be merged in to any already set.\n pub fn headers(mut self, headers: ::header::Headers) -> RequestBuilder<'a> {\n self.headers.extend(headers.iter());\n self\n }\n\n \/\/\/ Set the request body.\n pub fn body<T: Into<Body>>(mut self, body: T) -> RequestBuilder<'a> {\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Send a form body.\n \/\/\/\n \/\/\/ Sets the body to the url encoded serialization of the passed value,\n \/\/\/ and also sets the `Content-Type: application\/www-form-url-encoded`\n \/\/\/ header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut params = HashMap::new();\n \/\/\/ params.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .form(¶ms)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn form<T: Serialize>(mut self, form: &T) -> RequestBuilder<'a> {\n let body = serde_urlencoded::to_string(form).map_err(::Error::from);\n self.headers.set(ContentType::form_url_encoded());\n self.body = Some(body.map(|b| b.into()));\n self\n }\n\n \/\/\/ Send a JSON body.\n \/\/\/\n \/\/\/ Sets the body to the JSON serialization of the passed value, and\n \/\/\/ also sets the `Content-Type: application\/json` header.\n \/\/\/\n \/\/\/ ```no_run\n \/\/\/ # use std::collections::HashMap;\n \/\/\/ let mut map = HashMap::new();\n \/\/\/ map.insert(\"lang\", \"rust\");\n \/\/\/\n \/\/\/ let client = reqwest::Client::new().unwrap();\n \/\/\/ let res = client.post(\"http:\/\/httpbin.org\")\n \/\/\/ .json(&map)\n \/\/\/ .send();\n \/\/\/ ```\n pub fn json<T: Serialize>(mut self, json: &T) -> RequestBuilder<'a> {\n let body = serde_json::to_vec(json).expect(\"serde to_vec cannot fail\");\n self.headers.set(ContentType::json());\n self.body = Some(Ok(body.into()));\n self\n }\n\n \/\/\/ Constructs the Request and sends it the target URL, returning a Response.\n pub fn send(mut self) -> ::Result<Response> {\n if !self.headers.has::<UserAgent>() {\n self.headers.set(UserAgent(DEFAULT_USER_AGENT.to_owned()));\n }\n\n let client = self.client;\n let mut method = self.method;\n let mut url = try!(self.url);\n let mut headers = self.headers;\n let mut body = match self.body {\n Some(b) => Some(try!(b)),\n None => None,\n };\n\n let mut redirect_count = 0;\n\n loop {\n let res = {\n debug!(\"request {:?} \\\"{}\\\"\", method, url);\n let mut req = client.inner.request(method.clone(), url.clone())\n .headers(headers.clone());\n\n if let Some(ref mut b) = body {\n let body = body::as_hyper_body(b);\n req = req.body(body);\n }\n\n try!(req.send())\n };\n body.take();\n\n match res.status {\n StatusCode::MovedPermanently |\n StatusCode::Found |\n StatusCode::SeeOther => {\n\n \/\/TODO: turn this into self.redirect_policy.check()\n if redirect_count > 10 {\n return Err(::Error::TooManyRedirects);\n }\n redirect_count += 1;\n\n method = match method {\n Method::Post | Method::Put => Method::Get,\n m => m\n };\n\n headers.set(Referer(url.to_string()));\n\n let loc = {\n let loc = res.headers.get::<Location>().map(|loc| url.join(loc));\n if let Some(loc) = loc {\n loc\n } else {\n return Ok(Response {\n inner: res\n });\n }\n };\n\n url = match loc {\n Ok(u) => u,\n Err(e) => {\n debug!(\"Location header had invalid URI: {:?}\", e);\n return Ok(Response {\n inner: res\n })\n }\n };\n\n debug!(\"redirecting to '{}'\", url);\n\n \/\/TODO: removeSensitiveHeaders(&mut headers, &url);\n\n },\n _ => {\n return Ok(Response {\n inner: res\n });\n }\n }\n }\n }\n}\n\n\/\/\/ A Response to a submitted `Request`.\npub struct Response {\n inner: ::hyper::client::Response,\n}\n\nimpl Response {\n \/\/\/ Get the `StatusCode`.\n pub fn status(&self) -> &StatusCode {\n &self.inner.status\n }\n\n \/\/\/ Get the `Headers`.\n pub fn headers(&self) -> &Headers {\n &self.inner.headers\n }\n\n \/\/\/ Get the `HttpVersion`.\n pub fn version(&self) -> &HttpVersion {\n &self.inner.version\n }\n}\n\n\/\/\/ Read the body of the Response.\nimpl Read for Response {\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n self.inner.read(buf)\n }\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add debug print to chunk<commit_after><|endoftext|>"} {"text":"<commit_before><commit_msg>Add example how to make the main window transparent<commit_after>\/\/! # Transparent main window example\n\/\/!\n\/\/! This example demonstrates how to create a main window with a transparent background.\n\nextern crate gtk;\nextern crate gdk;\nextern crate cairo;\n\nuse gtk::prelude::*;\nuse gtk::{Window, WindowType, Fixed, Button};\nuse gdk::ScreenExt; \/\/import get_rgba_visual\n\nfn main() {\n if gtk::init().is_err() {\n println!(\"Failed to initialize GTK.\");\n return;\n }\n\n let window = Window::new(WindowType::Toplevel);\n set_visual(&window, &None);\n\n window.connect_delete_event(quit);\n window.connect_screen_changed(set_visual);\n window.connect_draw(draw);\n\n window.set_title(\"Alpha Demo\");\n window.set_default_size(500, 500);\n window.set_app_paintable(true); \/\/crucial for transparency\n\n let fixed = Fixed::new();\n window.add(&fixed);\n let button = Button::new_with_label(\"Dummy\");\n button.set_size_request(100, 30);\n fixed.add(&button);\n\n window.show_all();\n gtk::main();\n}\n\nfn set_visual(window: &Window, _screen: &Option<gdk::Screen>) {\n if let Some(screen) = window.get_screen() {\n if let Some(visual) = screen.get_rgba_visual() {\n window.set_visual(&visual); \/\/crucial for transparency\n }\n }\n}\n\nfn draw(_window: &Window, ctx: &cairo::Context) -> Inhibit {\n \/\/crucial for transparency\n ctx.set_source_rgba(1.0, 0.0, 0.0, 0.4);\n ctx.set_operator(cairo::enums::Operator::Screen);\n ctx.paint();\n Inhibit(false)\n}\n\nfn quit(_window: &Window, _event: &gdk::Event) -> Inhibit {\n gtk::main_quit();\n Inhibit(false)\n}\n<|endoftext|>"} {"text":"<commit_before><commit_msg>Add regression test for #65348<commit_after>\/\/ check-pass\n\nstruct Generic<T>(T);\n\nimpl<T> Generic<T> {\n const ARRAY: [T; 0] = [];\n const NEWTYPE_ARRAY: Generic<[T; 0]> = Generic([]);\n const ARRAY_FIELD: Generic<(i32, [T; 0])> = Generic((0, []));\n}\n\npub const fn array<T>() -> &'static T {\n &Generic::<T>::ARRAY[0]\n}\n\npub const fn newtype_array<T>() -> &'static T {\n &Generic::<T>::NEWTYPE_ARRAY.0[0]\n}\n\npub const fn array_field<T>() -> &'static T {\n &(Generic::<T>::ARRAY_FIELD.0).1[0]\n}\n\nfn main() {}\n<|endoftext|>"}